Merge V8 5.5.372.32

Test: Manual, built and ran D8

Change-Id: I831a5491f74342c2675bb6fe1e24a2258e493758
diff --git a/.gn b/.gn
index a1c0ff8..aee1752 100644
--- a/.gn
+++ b/.gn
@@ -14,8 +14,7 @@
 # matching these patterns (see "gn help label_pattern" for format) will have
 # their includes checked for proper dependencies when you run either
 # "gn check" or "gn gen --check".
-check_targets = [
-]
+check_targets = []
 
 # These are the list of GN files that run exec_script. This whitelist exists
 # to force additional review for new uses of exec_script, which is strongly
@@ -45,7 +44,5 @@
   "//build/toolchain/win/BUILD.gn",
   "//build/util/branding.gni",
   "//build/util/version.gni",
-  "//test/cctest/BUILD.gn",
   "//test/test262/BUILD.gn",
-  "//test/unittests/BUILD.gn",
 ]
diff --git a/.landmines b/.landmines
index 77142a4..db301ff 100644
--- a/.landmines
+++ b/.landmines
@@ -13,4 +13,3 @@
 Clobber after Android NDK update.
 Clober to fix windows build problems.
 Clober again to fix windows build problems.
-Clobber to possibly resolve failure on win-32 bot.
diff --git a/AUTHORS b/AUTHORS
index d70be77..0229c92 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -63,6 +63,7 @@
 Filipe David Manana <fdmanana@gmail.com>
 Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
 Geoffrey Garside <ggarside@gmail.com>
+Gwang Yoon Hwang <ryumiel@company100.net>
 Han Choongwoo <cwhan.tunz@gmail.com>
 Hirofumi Mako <mkhrfm@gmail.com>
 Honggyu Kim <honggyu.kp@gmail.com>
@@ -95,14 +96,17 @@
 Milton Chiang <milton.chiang@mediatek.com>
 Myeong-bo Shim <m0609.shim@samsung.com>
 Nicolas Antonius Ernst Leopold Maria Kaiser <nikai@nikai.net>
+Noj Vek <nojvek@gmail.com>
 Oleksandr Chekhovskyi <oleksandr.chekhovskyi@gmail.com>
 Paolo Giarrusso <p.giarrusso@gmail.com>
 Patrick Gansterer <paroga@paroga.com>
+Peter Rybin <peter.rybin@gmail.com>
 Peter Varga <pvarga@inf.u-szeged.hu>
 Paul Lind <plind44@gmail.com>
 Rafal Krypa <rafal@krypa.net>
 Refael Ackermann <refack@gmail.com>
 Rene Rebe <rene@exactcode.de>
+Rob Wu <rob@robwu.nl>
 Robert Mustacchi <rm@fingolfin.org>
 Robert Nagy <robert.nagy@gmail.com>
 Ryan Dahl <ry@tinyclouds.org>
diff --git a/Android.base.mk b/Android.base.mk
index 475cd9d..5e72c6d 100644
--- a/Android.base.mk
+++ b/Android.base.mk
@@ -1,17 +1,16 @@
 LOCAL_PATH := $(call my-dir)
 
 v8_base_common_src := \
-	src/base/accounting-allocator.cc \
 	src/base/bits.cc \
 	src/base/cpu.cc \
 	src/base/debug/stack_trace.cc \
+	src/base/debug/stack_trace_android.cc \
 	src/base/division-by-constant.cc \
 	src/base/file-utils.cc \
 	src/base/functional.cc \
 	src/base/ieee754.cc \
 	src/base/logging.cc \
 	src/base/once.cc \
-	src/base/debug/stack_trace_android.cc \
 	src/base/platform/condition-variable.cc \
 	src/base/platform/mutex.cc \
 	src/base/platform/platform-posix.cc \
@@ -53,10 +52,12 @@
 
 # Host may be linux or darwin.
 ifeq ($(HOST_OS),linux)
-LOCAL_SRC_FILES += src/base/platform/platform-linux.cc
+LOCAL_SRC_FILES += \
+	src/base/platform/platform-linux.cc
 endif
 ifeq ($(HOST_OS),darwin)
-LOCAL_SRC_FILES += src/base/platform/platform-macos.cc
+LOCAL_SRC_FILES += \
+	src/base/platform/platform-macos.cc
 endif
 
 ifeq ($(HOST_ARCH),x86)
diff --git a/Android.platform.mk b/Android.platform.mk
index d38736a..94a5c6e 100644
--- a/Android.platform.mk
+++ b/Android.platform.mk
@@ -17,7 +17,9 @@
 	src/libplatform/tracing/tracing-controller.cc \
 	src/libplatform/worker-thread.cc
 
-LOCAL_C_INCLUDES := $(LOCAL_PATH)/src
+LOCAL_C_INCLUDES := \
+	$(LOCAL_PATH)/src \
+	$(LOCAL_PATH)/include
 
 include $(BUILD_STATIC_LIBRARY)
 
diff --git a/Android.v8.mk b/Android.v8.mk
index 6d5296c..b048eb5 100644
--- a/Android.v8.mk
+++ b/Android.v8.mk
@@ -27,7 +27,9 @@
 	src/ast/ast-expression-rewriter.cc \
 	src/ast/ast-literal-reindexer.cc \
 	src/ast/ast-numbering.cc \
+	src/ast/ast-types.cc \
 	src/ast/ast-value-factory.cc \
+	src/ast/compile-time-value.cc \
 	src/ast/context-slot-cache.cc \
 	src/ast/modules.cc \
 	src/ast/prettyprinter.cc \
@@ -59,12 +61,14 @@
 	src/builtins/builtins-handler.cc \
 	src/builtins/builtins-internal.cc \
 	src/builtins/builtins-interpreter.cc \
+	src/builtins/builtins-iterator.cc \
 	src/builtins/builtins-json.cc \
 	src/builtins/builtins-math.cc \
 	src/builtins/builtins-number.cc \
 	src/builtins/builtins-object.cc \
 	src/builtins/builtins-proxy.cc \
 	src/builtins/builtins-reflect.cc \
+	src/builtins/builtins-regexp.cc \
 	src/builtins/builtins-sharedarraybuffer.cc \
 	src/builtins/builtins-string.cc \
 	src/builtins/builtins-symbol.cc \
@@ -79,6 +83,7 @@
 	src/code-stubs-hydrogen.cc \
 	src/compilation-cache.cc \
 	src/compilation-dependencies.cc \
+	src/compilation-info.cc \
 	src/compilation-statistics.cc \
 	src/compiler/access-builder.cc \
 	src/compiler/access-info.cc \
@@ -142,6 +147,7 @@
 	src/compiler/loop-analysis.cc \
 	src/compiler/loop-peeling.cc \
 	src/compiler/loop-variable-optimizer.cc \
+	src/compiler/machine-graph-verifier.cc \
 	src/compiler/machine-operator.cc \
 	src/compiler/machine-operator-reducer.cc \
 	src/compiler/memory-optimizer.cc \
@@ -173,9 +179,11 @@
 	src/compiler/state-values-utils.cc \
 	src/compiler/store-store-elimination.cc \
 	src/compiler/tail-call-optimization.cc \
+	src/compiler/type-cache.cc \
+	src/compiler/typed-optimization.cc \
 	src/compiler/type-hint-analyzer.cc \
-	src/compiler/type-hints.cc \
 	src/compiler/typer.cc \
+	src/compiler/types.cc \
 	src/compiler/value-numbering-reducer.cc \
 	src/compiler/verifier.cc \
 	src/compiler/wasm-compiler.cc \
@@ -284,9 +292,9 @@
 	src/interpreter/bytecode-flags.cc \
 	src/interpreter/bytecode-generator.cc \
 	src/interpreter/bytecode-label.cc \
+	src/interpreter/bytecode-operands.cc \
 	src/interpreter/bytecode-peephole-optimizer.cc \
 	src/interpreter/bytecode-pipeline.cc \
-	src/interpreter/bytecode-register-allocator.cc \
 	src/interpreter/bytecode-register.cc \
 	src/interpreter/bytecode-register-optimizer.cc \
 	src/interpreter/bytecodes.cc \
@@ -303,6 +311,7 @@
 	src/layout-descriptor.cc \
 	src/log.cc \
 	src/log-utils.cc \
+	src/lookup-cache.cc \
 	src/lookup.cc \
 	src/machine-type.cc \
 	src/messages.cc \
@@ -310,6 +319,7 @@
 	src/objects-debug.cc \
 	src/objects-printer.cc \
 	src/ostreams.cc \
+	src/parsing/duplicate-finder.cc \
 	src/parsing/func-name-inferrer.cc \
 	src/parsing/parameter-initializer-rewriter.cc \
 	src/parsing/parse-info.cc \
@@ -332,6 +342,7 @@
 	src/profiler/sampling-heap-profiler.cc \
 	src/profiler/strings-storage.cc \
 	src/profiler/tick-sample.cc \
+	src/profiler/tracing-cpu-profiler.cc \
 	src/property.cc \
 	src/property-descriptor.cc \
 	src/regexp/interpreter-irregexp.cc \
@@ -392,10 +403,9 @@
 	src/strtod.cc \
 	src/tracing/trace-event.cc \
 	src/transitions.cc \
-	src/type-cache.cc \
 	src/type-feedback-vector.cc \
+	src/type-hints.cc \
 	src/type-info.cc \
-	src/types.cc \
 	src/unicode.cc \
 	src/unicode-decoder.cc \
 	src/uri.cc \
@@ -405,7 +415,6 @@
 	src/value-serializer.cc \
 	src/version.cc \
 	src/wasm/ast-decoder.cc \
-	src/wasm/encoder.cc \
 	src/wasm/module-decoder.cc \
 	src/wasm/switch-logic.cc \
 	src/wasm/wasm-debug.cc \
@@ -413,10 +422,13 @@
 	src/wasm/wasm-function-name-table.cc \
 	src/wasm/wasm-interpreter.cc \
 	src/wasm/wasm-js.cc \
+	src/wasm/wasm-module-builder.cc \
 	src/wasm/wasm-module.cc \
 	src/wasm/wasm-opcodes.cc \
 	src/wasm/wasm-result.cc \
-	src/zone.cc
+	src/zone/accounting-allocator.cc \
+	src/zone/zone.cc \
+	src/zone/zone-segment.cc
 
 LOCAL_SRC_FILES_arm += \
 	src/arm/assembler-arm.cc \
@@ -557,6 +569,7 @@
 	src/ia32/frames-ia32.cc \
 	src/ia32/interface-descriptors-ia32.cc \
 	src/ia32/macro-assembler-ia32.cc \
+	src/ia32/simulator-ia32.cc \
 	src/ic/ia32/access-compiler-ia32.cc \
 	src/ic/ia32/handler-compiler-ia32.cc \
 	src/ic/ia32/ic-compiler-ia32.cc \
@@ -590,7 +603,8 @@
 	src/x64/eh-frame-x64.cc \
 	src/x64/frames-x64.cc \
 	src/x64/interface-descriptors-x64.cc \
-	src/x64/macro-assembler-x64.cc
+	src/x64/macro-assembler-x64.cc \
+	src/x64/simulator-x64.cc
 
 LOCAL_SRC_FILES += \
 	src/snapshot/snapshot-empty.cc \
diff --git a/Android.v8gen.mk b/Android.v8gen.mk
index 2b3456e..87b3b03 100644
--- a/Android.v8gen.mk
+++ b/Android.v8gen.mk
@@ -23,29 +23,25 @@
 	src/js/regexp.js \
 	src/js/arraybuffer.js \
 	src/js/typedarray.js \
-	src/js/iterator-prototype.js \
 	src/js/collection.js \
 	src/js/weak-collection.js \
 	src/js/collection-iterator.js \
 	src/js/promise.js \
 	src/js/messages.js \
 	src/js/array-iterator.js \
-	src/js/string-iterator.js \
 	src/js/templates.js \
 	src/js/spread.js \
 	src/js/proxy.js \
 	src/debug/mirrors.js \
 	src/debug/debug.js \
-	src/debug/liveedit.js \
-	src/js/i18n.js
+	src/debug/liveedit.js
 
 V8_LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES := \
 	src/js/macros.py \
 	src/messages.h \
 	src/js/harmony-atomics.js \
 	src/js/harmony-simd.js \
-	src/js/harmony-string-padding.js \
-	src/js/harmony-async-await.js
+	src/js/harmony-string-padding.js
 
 LOCAL_JS_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, $(V8_LOCAL_JS_LIBRARY_FILES))
 LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, $(V8_LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES))
diff --git a/BUILD.gn b/BUILD.gn
index dcefe37..06870b6 100644
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -62,6 +62,9 @@
   # tools/gen-postmortem-metadata.py for details.
   v8_postmortem_support = false
 
+  # Switches off inlining in V8.
+  v8_no_inline = false
+
   # Similar to vfp but on MIPS.
   v8_can_use_fpu_instructions = true
 
@@ -94,6 +97,11 @@
   v8_enable_disassembler = is_debug && !v8_optimized_debug
 }
 
+# Specifies if the target build is a simulator build. Comparing target cpu
+# with v8 target cpu to not affect simulator builds for making cross-compile
+# snapshots.
+is_target_simulator = target_cpu != v8_target_cpu
+
 v8_generated_peephole_source = "$target_gen_dir/bytecode-peephole-table.cc"
 v8_random_seed = "314159265"
 v8_toolset_for_shell = "host"
@@ -107,10 +115,7 @@
   include_dirs = [ "." ]
 
   if (is_component_build) {
-    defines = [
-      "V8_SHARED",
-      "BUILDING_V8_SHARED",
-    ]
+    defines = [ "BUILDING_V8_SHARED" ]
   }
 }
 
@@ -134,12 +139,12 @@
 # itself.
 config("external_config") {
   if (is_component_build) {
-    defines = [
-      "V8_SHARED",
-      "USING_V8_SHARED",
-    ]
+    defines = [ "USING_V8_SHARED" ]
   }
   include_dirs = [ "include" ]
+  if (v8_enable_inspector_override) {
+    include_dirs += [ "$target_gen_dir/include" ]
+  }
   libs = []
   if (is_android && current_toolchain != host_toolchain) {
     libs += [ "log" ]
@@ -200,7 +205,7 @@
 
   if (v8_current_cpu == "arm") {
     defines += [ "V8_TARGET_ARCH_ARM" ]
-    if (arm_version == 7) {
+    if (arm_version >= 7) {
       defines += [ "CAN_USE_ARMV7_INSTRUCTIONS" ]
     }
     if (arm_fpu == "vfpv3-d16") {
@@ -233,6 +238,12 @@
     defines += [ "V8_TARGET_ARCH_ARM64" ]
   }
 
+  # Mips64el/mipsel simulators.
+  if (is_target_simulator &&
+      (v8_current_cpu == "mipsel" || v8_current_cpu == "mips64el")) {
+    defines += [ "_MIPS_TARGET_SIMULATOR" ]
+  }
+
   # TODO(jochen): Add support for mips.
   if (v8_current_cpu == "mipsel") {
     defines += [ "V8_TARGET_ARCH_MIPS" ]
@@ -343,6 +354,13 @@
   } else if (dcheck_always_on) {
     defines += [ "DEBUG" ]
   }
+
+  if (v8_no_inline) {
+    cflags += [
+      "-fno-inline-functions",
+      "-fno-inline",
+    ]
+  }
 }
 
 ###############################################################################
@@ -374,17 +392,16 @@
     "src/js/regexp.js",
     "src/js/arraybuffer.js",
     "src/js/typedarray.js",
-    "src/js/iterator-prototype.js",
     "src/js/collection.js",
     "src/js/weak-collection.js",
     "src/js/collection-iterator.js",
     "src/js/promise.js",
     "src/js/messages.js",
     "src/js/array-iterator.js",
-    "src/js/string-iterator.js",
     "src/js/templates.js",
     "src/js/spread.js",
     "src/js/proxy.js",
+    "src/js/async-await.js",
     "src/debug/mirrors.js",
     "src/debug/debug.js",
     "src/debug/liveedit.js",
@@ -427,7 +444,6 @@
   sources = [
     "src/js/macros.py",
     "src/messages.h",
-    "src/js/harmony-async-await.js",
     "src/js/harmony-atomics.js",
     "src/js/harmony-simd.js",
     "src/js/harmony-string-padding.js",
@@ -439,8 +455,8 @@
 
   if (v8_enable_i18n_support) {
     sources += [
+      "src/js/datetime-format-to-parts.js",
       "src/js/icu-case-mapping.js",
-      "src/js/intl-extra.js",
     ]
   }
 
@@ -809,6 +825,8 @@
 
   sources = [
     "//base/trace_event/common/trace_event_common.h",
+
+    ### gcmole(all) ###
     "include/v8-debug.h",
     "include/v8-experimental.h",
     "include/v8-platform.h",
@@ -857,10 +875,14 @@
     "src/ast/ast-numbering.h",
     "src/ast/ast-traversal-visitor.h",
     "src/ast/ast-type-bounds.h",
+    "src/ast/ast-types.cc",
+    "src/ast/ast-types.h",
     "src/ast/ast-value-factory.cc",
     "src/ast/ast-value-factory.h",
     "src/ast/ast.cc",
     "src/ast/ast.h",
+    "src/ast/compile-time-value.cc",
+    "src/ast/compile-time-value.h",
     "src/ast/context-slot-cache.cc",
     "src/ast/context-slot-cache.h",
     "src/ast/modules.cc",
@@ -868,7 +890,6 @@
     "src/ast/prettyprinter.cc",
     "src/ast/prettyprinter.h",
     "src/ast/scopeinfo.cc",
-    "src/ast/scopeinfo.h",
     "src/ast/scopes.cc",
     "src/ast/scopes.h",
     "src/ast/variables.cc",
@@ -904,12 +925,14 @@
     "src/builtins/builtins-handler.cc",
     "src/builtins/builtins-internal.cc",
     "src/builtins/builtins-interpreter.cc",
+    "src/builtins/builtins-iterator.cc",
     "src/builtins/builtins-json.cc",
     "src/builtins/builtins-math.cc",
     "src/builtins/builtins-number.cc",
     "src/builtins/builtins-object.cc",
     "src/builtins/builtins-proxy.cc",
     "src/builtins/builtins-reflect.cc",
+    "src/builtins/builtins-regexp.cc",
     "src/builtins/builtins-sharedarraybuffer.cc",
     "src/builtins/builtins-string.cc",
     "src/builtins/builtins-symbol.cc",
@@ -940,6 +963,8 @@
     "src/compilation-cache.h",
     "src/compilation-dependencies.cc",
     "src/compilation-dependencies.h",
+    "src/compilation-info.cc",
+    "src/compilation-info.h",
     "src/compilation-statistics.cc",
     "src/compilation-statistics.h",
     "src/compiler-dispatcher/compiler-dispatcher-job.cc",
@@ -1069,6 +1094,8 @@
     "src/compiler/loop-peeling.h",
     "src/compiler/loop-variable-optimizer.cc",
     "src/compiler/loop-variable-optimizer.h",
+    "src/compiler/machine-graph-verifier.cc",
+    "src/compiler/machine-graph-verifier.h",
     "src/compiler/machine-operator-reducer.cc",
     "src/compiler/machine-operator-reducer.h",
     "src/compiler/machine-operator.cc",
@@ -1132,12 +1159,16 @@
     "src/compiler/store-store-elimination.h",
     "src/compiler/tail-call-optimization.cc",
     "src/compiler/tail-call-optimization.h",
+    "src/compiler/type-cache.cc",
+    "src/compiler/type-cache.h",
     "src/compiler/type-hint-analyzer.cc",
     "src/compiler/type-hint-analyzer.h",
-    "src/compiler/type-hints.cc",
-    "src/compiler/type-hints.h",
+    "src/compiler/typed-optimization.cc",
+    "src/compiler/typed-optimization.h",
     "src/compiler/typer.cc",
     "src/compiler/typer.h",
+    "src/compiler/types.cc",
+    "src/compiler/types.h",
     "src/compiler/unwinding-info-writer.h",
     "src/compiler/value-numbering-reducer.cc",
     "src/compiler/value-numbering-reducer.h",
@@ -1216,6 +1247,7 @@
     "src/crankshaft/lithium-allocator.h",
     "src/crankshaft/lithium-codegen.cc",
     "src/crankshaft/lithium-codegen.h",
+    "src/crankshaft/lithium-inl.h",
     "src/crankshaft/lithium.cc",
     "src/crankshaft/lithium.h",
     "src/crankshaft/typing.cc",
@@ -1314,6 +1346,7 @@
     "src/heap/heap-inl.h",
     "src/heap/heap.cc",
     "src/heap/heap.h",
+    "src/heap/incremental-marking-inl.h",
     "src/heap/incremental-marking-job.cc",
     "src/heap/incremental-marking-job.h",
     "src/heap/incremental-marking.cc",
@@ -1351,6 +1384,7 @@
     "src/ic/call-optimization.h",
     "src/ic/handler-compiler.cc",
     "src/ic/handler-compiler.h",
+    "src/ic/handler-configuration.h",
     "src/ic/ic-compiler.cc",
     "src/ic/ic-compiler.h",
     "src/ic/ic-inl.h",
@@ -1382,12 +1416,13 @@
     "src/interpreter/bytecode-generator.h",
     "src/interpreter/bytecode-label.cc",
     "src/interpreter/bytecode-label.h",
+    "src/interpreter/bytecode-operands.cc",
+    "src/interpreter/bytecode-operands.h",
     "src/interpreter/bytecode-peephole-optimizer.cc",
     "src/interpreter/bytecode-peephole-optimizer.h",
     "src/interpreter/bytecode-peephole-table.h",
     "src/interpreter/bytecode-pipeline.cc",
     "src/interpreter/bytecode-pipeline.h",
-    "src/interpreter/bytecode-register-allocator.cc",
     "src/interpreter/bytecode-register-allocator.h",
     "src/interpreter/bytecode-register-optimizer.cc",
     "src/interpreter/bytecode-register-optimizer.h",
@@ -1422,11 +1457,16 @@
     "src/layout-descriptor.h",
     "src/list-inl.h",
     "src/list.h",
+    "src/locked-queue-inl.h",
+    "src/locked-queue.h",
     "src/log-inl.h",
     "src/log-utils.cc",
     "src/log-utils.h",
     "src/log.cc",
     "src/log.h",
+    "src/lookup-cache-inl.h",
+    "src/lookup-cache.cc",
+    "src/lookup-cache.h",
     "src/lookup.cc",
     "src/lookup.h",
     "src/machine-type.cc",
@@ -1444,6 +1484,8 @@
     "src/objects.h",
     "src/ostreams.cc",
     "src/ostreams.h",
+    "src/parsing/duplicate-finder.cc",
+    "src/parsing/duplicate-finder.h",
     "src/parsing/expression-classifier.h",
     "src/parsing/func-name-inferrer.cc",
     "src/parsing/func-name-inferrer.h",
@@ -1495,6 +1537,8 @@
     "src/profiler/strings-storage.h",
     "src/profiler/tick-sample.cc",
     "src/profiler/tick-sample.h",
+    "src/profiler/tracing-cpu-profiler.cc",
+    "src/profiler/tracing-cpu-profiler.h",
     "src/profiler/unbound-queue-inl.h",
     "src/profiler/unbound-queue.h",
     "src/property-descriptor.cc",
@@ -1601,15 +1645,13 @@
     "src/transitions-inl.h",
     "src/transitions.cc",
     "src/transitions.h",
-    "src/type-cache.cc",
-    "src/type-cache.h",
     "src/type-feedback-vector-inl.h",
     "src/type-feedback-vector.cc",
     "src/type-feedback-vector.h",
+    "src/type-hints.cc",
+    "src/type-hints.h",
     "src/type-info.cc",
     "src/type-info.h",
-    "src/types.cc",
-    "src/types.h",
     "src/unicode-cache-inl.h",
     "src/unicode-cache.h",
     "src/unicode-decoder.cc",
@@ -1629,6 +1671,7 @@
     "src/v8threads.h",
     "src/value-serializer.cc",
     "src/value-serializer.h",
+    "src/vector.h",
     "src/version.cc",
     "src/version.h",
     "src/vm-state-inl.h",
@@ -1636,8 +1679,6 @@
     "src/wasm/ast-decoder.cc",
     "src/wasm/ast-decoder.h",
     "src/wasm/decoder.h",
-    "src/wasm/encoder.cc",
-    "src/wasm/encoder.h",
     "src/wasm/leb-helper.h",
     "src/wasm/module-decoder.cc",
     "src/wasm/module-decoder.h",
@@ -1654,20 +1695,27 @@
     "src/wasm/wasm-js.cc",
     "src/wasm/wasm-js.h",
     "src/wasm/wasm-macro-gen.h",
+    "src/wasm/wasm-module-builder.cc",
+    "src/wasm/wasm-module-builder.h",
     "src/wasm/wasm-module.cc",
     "src/wasm/wasm-module.h",
     "src/wasm/wasm-opcodes.cc",
     "src/wasm/wasm-opcodes.h",
     "src/wasm/wasm-result.cc",
     "src/wasm/wasm-result.h",
-    "src/zone-allocator.h",
-    "src/zone-containers.h",
-    "src/zone.cc",
-    "src/zone.h",
+    "src/zone/accounting-allocator.cc",
+    "src/zone/accounting-allocator.h",
+    "src/zone/zone-allocator.h",
+    "src/zone/zone-allocator.h",
+    "src/zone/zone-containers.h",
+    "src/zone/zone-segment.cc",
+    "src/zone/zone-segment.h",
+    "src/zone/zone.cc",
+    "src/zone/zone.h",
   ]
 
   if (v8_current_cpu == "x86") {
-    sources += [
+    sources += [  ### gcmole(arch:ia32) ###
       "src/builtins/ia32/builtins-ia32.cc",
       "src/compiler/ia32/code-generator-ia32.cc",
       "src/compiler/ia32/instruction-codes-ia32.h",
@@ -1696,6 +1744,8 @@
       "src/ia32/interface-descriptors-ia32.cc",
       "src/ia32/macro-assembler-ia32.cc",
       "src/ia32/macro-assembler-ia32.h",
+      "src/ia32/simulator-ia32.cc",
+      "src/ia32/simulator-ia32.h",
       "src/ic/ia32/access-compiler-ia32.cc",
       "src/ic/ia32/handler-compiler-ia32.cc",
       "src/ic/ia32/ic-compiler-ia32.cc",
@@ -1705,7 +1755,7 @@
       "src/regexp/ia32/regexp-macro-assembler-ia32.h",
     ]
   } else if (v8_current_cpu == "x64") {
-    sources += [
+    sources += [  ### gcmole(arch:x64) ###
       "src/builtins/x64/builtins-x64.cc",
       "src/compiler/x64/code-generator-x64.cc",
       "src/compiler/x64/instruction-codes-x64.h",
@@ -1728,6 +1778,7 @@
       "src/ic/x64/stub-cache-x64.cc",
       "src/regexp/x64/regexp-macro-assembler-x64.cc",
       "src/regexp/x64/regexp-macro-assembler-x64.h",
+      "src/third_party/valgrind/valgrind.h",
       "src/x64/assembler-x64-inl.h",
       "src/x64/assembler-x64.cc",
       "src/x64/assembler-x64.h",
@@ -1744,9 +1795,12 @@
       "src/x64/interface-descriptors-x64.cc",
       "src/x64/macro-assembler-x64.cc",
       "src/x64/macro-assembler-x64.h",
+      "src/x64/simulator-x64.cc",
+      "src/x64/simulator-x64.h",
+      "src/x64/sse-instr.h",
     ]
   } else if (v8_current_cpu == "arm") {
-    sources += [
+    sources += [  ### gcmole(arch:arm) ###
       "src/arm/assembler-arm-inl.h",
       "src/arm/assembler-arm.cc",
       "src/arm/assembler-arm.h",
@@ -1792,7 +1846,7 @@
       "src/regexp/arm/regexp-macro-assembler-arm.h",
     ]
   } else if (v8_current_cpu == "arm64") {
-    sources += [
+    sources += [  ### gcmole(arch:arm64) ###
       "src/arm64/assembler-arm64-inl.h",
       "src/arm64/assembler-arm64.cc",
       "src/arm64/assembler-arm64.h",
@@ -1850,8 +1904,8 @@
       "src/regexp/arm64/regexp-macro-assembler-arm64.cc",
       "src/regexp/arm64/regexp-macro-assembler-arm64.h",
     ]
-  } else if (v8_current_cpu == "mipsel") {
-    sources += [
+  } else if (v8_current_cpu == "mips" || v8_current_cpu == "mipsel") {
+    sources += [  ### gcmole(arch:mipsel) ###
       "src/builtins/mips/builtins-mips.cc",
       "src/compiler/mips/code-generator-mips.cc",
       "src/compiler/mips/instruction-codes-mips.h",
@@ -1892,8 +1946,8 @@
       "src/regexp/mips/regexp-macro-assembler-mips.cc",
       "src/regexp/mips/regexp-macro-assembler-mips.h",
     ]
-  } else if (v8_current_cpu == "mips64el") {
-    sources += [
+  } else if (v8_current_cpu == "mips64" || v8_current_cpu == "mips64el") {
+    sources += [  ### gcmole(arch:mips64el) ###
       "src/builtins/mips64/builtins-mips64.cc",
       "src/compiler/mips64/code-generator-mips64.cc",
       "src/compiler/mips64/instruction-codes-mips64.h",
@@ -1934,8 +1988,50 @@
       "src/regexp/mips64/regexp-macro-assembler-mips64.cc",
       "src/regexp/mips64/regexp-macro-assembler-mips64.h",
     ]
+  } else if (v8_current_cpu == "ppc" || v8_current_cpu == "ppc64") {
+    sources += [  ### gcmole(arch:ppc) ###
+      "src/builtins/ppc/builtins-ppc.cc",
+      "src/compiler/ppc/code-generator-ppc.cc",
+      "src/compiler/ppc/instruction-codes-ppc.h",
+      "src/compiler/ppc/instruction-scheduler-ppc.cc",
+      "src/compiler/ppc/instruction-selector-ppc.cc",
+      "src/crankshaft/ppc/lithium-codegen-ppc.cc",
+      "src/crankshaft/ppc/lithium-codegen-ppc.h",
+      "src/crankshaft/ppc/lithium-gap-resolver-ppc.cc",
+      "src/crankshaft/ppc/lithium-gap-resolver-ppc.h",
+      "src/crankshaft/ppc/lithium-ppc.cc",
+      "src/crankshaft/ppc/lithium-ppc.h",
+      "src/debug/ppc/debug-ppc.cc",
+      "src/full-codegen/ppc/full-codegen-ppc.cc",
+      "src/ic/ppc/access-compiler-ppc.cc",
+      "src/ic/ppc/handler-compiler-ppc.cc",
+      "src/ic/ppc/ic-compiler-ppc.cc",
+      "src/ic/ppc/ic-ppc.cc",
+      "src/ic/ppc/stub-cache-ppc.cc",
+      "src/ppc/assembler-ppc-inl.h",
+      "src/ppc/assembler-ppc.cc",
+      "src/ppc/assembler-ppc.h",
+      "src/ppc/code-stubs-ppc.cc",
+      "src/ppc/code-stubs-ppc.h",
+      "src/ppc/codegen-ppc.cc",
+      "src/ppc/codegen-ppc.h",
+      "src/ppc/constants-ppc.cc",
+      "src/ppc/constants-ppc.h",
+      "src/ppc/cpu-ppc.cc",
+      "src/ppc/deoptimizer-ppc.cc",
+      "src/ppc/disasm-ppc.cc",
+      "src/ppc/frames-ppc.cc",
+      "src/ppc/frames-ppc.h",
+      "src/ppc/interface-descriptors-ppc.cc",
+      "src/ppc/macro-assembler-ppc.cc",
+      "src/ppc/macro-assembler-ppc.h",
+      "src/ppc/simulator-ppc.cc",
+      "src/ppc/simulator-ppc.h",
+      "src/regexp/ppc/regexp-macro-assembler-ppc.cc",
+      "src/regexp/ppc/regexp-macro-assembler-ppc.h",
+    ]
   } else if (v8_current_cpu == "s390" || v8_current_cpu == "s390x") {
-    sources += [
+    sources += [  ### gcmole(arch:s390) ###
       "src/builtins/s390/builtins-s390.cc",
       "src/compiler/s390/code-generator-s390.cc",
       "src/compiler/s390/instruction-codes-s390.h",
@@ -1976,6 +2072,46 @@
       "src/s390/simulator-s390.cc",
       "src/s390/simulator-s390.h",
     ]
+  } else if (v8_current_cpu == "x87") {
+    sources += [  ### gcmole(arch:x87) ###
+      "src/builtins/x87/builtins-x87.cc",
+      "src/compiler/x87/code-generator-x87.cc",
+      "src/compiler/x87/instruction-codes-x87.h",
+      "src/compiler/x87/instruction-scheduler-x87.cc",
+      "src/compiler/x87/instruction-selector-x87.cc",
+      "src/crankshaft/x87/lithium-codegen-x87.cc",
+      "src/crankshaft/x87/lithium-codegen-x87.h",
+      "src/crankshaft/x87/lithium-gap-resolver-x87.cc",
+      "src/crankshaft/x87/lithium-gap-resolver-x87.h",
+      "src/crankshaft/x87/lithium-x87.cc",
+      "src/crankshaft/x87/lithium-x87.h",
+      "src/debug/x87/debug-x87.cc",
+      "src/full-codegen/x87/full-codegen-x87.cc",
+      "src/ic/x87/access-compiler-x87.cc",
+      "src/ic/x87/handler-compiler-x87.cc",
+      "src/ic/x87/ic-compiler-x87.cc",
+      "src/ic/x87/ic-x87.cc",
+      "src/ic/x87/stub-cache-x87.cc",
+      "src/regexp/x87/regexp-macro-assembler-x87.cc",
+      "src/regexp/x87/regexp-macro-assembler-x87.h",
+      "src/x87/assembler-x87-inl.h",
+      "src/x87/assembler-x87.cc",
+      "src/x87/assembler-x87.h",
+      "src/x87/code-stubs-x87.cc",
+      "src/x87/code-stubs-x87.h",
+      "src/x87/codegen-x87.cc",
+      "src/x87/codegen-x87.h",
+      "src/x87/cpu-x87.cc",
+      "src/x87/deoptimizer-x87.cc",
+      "src/x87/disasm-x87.cc",
+      "src/x87/frames-x87.cc",
+      "src/x87/frames-x87.h",
+      "src/x87/interface-descriptors-x87.cc",
+      "src/x87/macro-assembler-x87.cc",
+      "src/x87/macro-assembler-x87.h",
+      "src/x87/simulator-x87.cc",
+      "src/x87/simulator-x87.h",
+    ]
   }
 
   configs = [ ":internal_config" ]
@@ -2010,14 +2146,16 @@
     sources += [ "$target_gen_dir/debug-support.cc" ]
     deps += [ ":postmortem-metadata" ]
   }
+
+  if (v8_enable_inspector_override) {
+    deps += [ "src/inspector:inspector" ]
+  }
 }
 
 v8_source_set("v8_libbase") {
   visibility = [ ":*" ]  # Only targets in this file can depend on this.
 
   sources = [
-    "src/base/accounting-allocator.cc",
-    "src/base/accounting-allocator.h",
     "src/base/adapters.h",
     "src/base/atomic-utils.h",
     "src/base/atomicops.h",
@@ -2035,6 +2173,7 @@
     "src/base/bits.cc",
     "src/base/bits.h",
     "src/base/build_config.h",
+    "src/base/compiler-specific.h",
     "src/base/cpu.cc",
     "src/base/cpu.h",
     "src/base/debug/stack_trace.cc",
@@ -2048,6 +2187,7 @@
     "src/base/free_deleter.h",
     "src/base/functional.cc",
     "src/base/functional.h",
+    "src/base/hashmap-entry.h",
     "src/base/hashmap.h",
     "src/base/ieee754.cc",
     "src/base/ieee754.h",
@@ -2200,6 +2340,27 @@
   configs = [ ":internal_config_base" ]
 
   deps = [
+    ":v8",
+  ]
+
+  public_deps = [
+    ":v8_libplatform",
+  ]
+}
+
+# Used by fuzzers that would require exposing too many symbols for a proper
+# component build.
+v8_source_set("fuzzer_support_nocomponent") {
+  visibility = [ ":*" ]  # Only targets in this file can depend on this.
+
+  sources = [
+    "test/fuzzer/fuzzer-support.cc",
+    "test/fuzzer/fuzzer-support.h",
+  ]
+
+  configs = [ ":internal_config_base" ]
+
+  deps = [
     ":v8_maybe_snapshot",
   ]
 
@@ -2247,7 +2408,10 @@
   visibility = [ ":*" ]  # Only targets in this file can depend on this.
 
   sources = [
+    "src/interpreter/bytecode-operands.cc",
+    "src/interpreter/bytecode-operands.h",
     "src/interpreter/bytecode-peephole-optimizer.h",
+    "src/interpreter/bytecode-traits.h",
     "src/interpreter/bytecodes.cc",
     "src/interpreter/bytecodes.h",
     "src/interpreter/mkpeephole.cc",
@@ -2336,6 +2500,7 @@
 
 v8_executable("d8") {
   sources = [
+    "$target_gen_dir/d8-js.cc",
     "src/d8.cc",
     "src/d8.h",
   ]
@@ -2363,9 +2528,6 @@
     sources += [ "src/d8-windows.cc" ]
   }
 
-  if (!is_component_build) {
-    sources += [ "$target_gen_dir/d8-js.cc" ]
-  }
   if (v8_enable_i18n_support) {
     deps += [ "//third_party/icu" ]
   }
@@ -2516,7 +2678,10 @@
     ":fuzzer_support",
   ]
 
-  configs = [ ":internal_config" ]
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
 }
 
 v8_fuzzer("json_fuzzer") {
@@ -2528,10 +2693,13 @@
   ]
 
   deps = [
-    ":fuzzer_support",
+    ":fuzzer_support_nocomponent",
   ]
 
-  configs = [ ":internal_config" ]
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
 }
 
 v8_fuzzer("parser_fuzzer") {
@@ -2546,12 +2714,38 @@
     ":fuzzer_support",
   ]
 
-  configs = [ ":internal_config" ]
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
 }
 
 v8_fuzzer("regexp_fuzzer") {
 }
 
+v8_source_set("wasm_module_runner") {
+  sources = [
+    "test/common/wasm/wasm-module-runner.cc",
+    "test/common/wasm/wasm-module-runner.h",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_source_set("wasm_test_signatures") {
+  sources = [
+    "test/common/wasm/test-signatures.h",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
 v8_source_set("wasm_fuzzer") {
   sources = [
     "test/fuzzer/wasm.cc",
@@ -2559,9 +2753,13 @@
 
   deps = [
     ":fuzzer_support",
+    ":wasm_module_runner",
   ]
 
-  configs = [ ":internal_config" ]
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
 }
 
 v8_fuzzer("wasm_fuzzer") {
@@ -2574,10 +2772,186 @@
 
   deps = [
     ":fuzzer_support",
+    ":wasm_module_runner",
   ]
 
-  configs = [ ":internal_config" ]
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
 }
 
 v8_fuzzer("wasm_asmjs_fuzzer") {
 }
+
+v8_source_set("wasm_code_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-code.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":wasm_module_runner",
+    ":wasm_test_signatures",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_code_fuzzer") {
+}
+
+v8_source_set("lib_wasm_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-section-fuzzers.cc",
+    "test/fuzzer/wasm-section-fuzzers.h",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_source_set("wasm_types_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-types-section.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":lib_wasm_section_fuzzer",
+    ":wasm_module_runner",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_types_section_fuzzer") {
+}
+
+v8_source_set("wasm_names_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-names-section.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":lib_wasm_section_fuzzer",
+    ":wasm_module_runner",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_names_section_fuzzer") {
+}
+
+v8_source_set("wasm_globals_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-globals-section.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":lib_wasm_section_fuzzer",
+    ":wasm_module_runner",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_globals_section_fuzzer") {
+}
+
+v8_source_set("wasm_imports_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-imports-section.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":lib_wasm_section_fuzzer",
+    ":wasm_module_runner",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_imports_section_fuzzer") {
+}
+
+v8_source_set("wasm_function_sigs_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-function-sigs-section.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":lib_wasm_section_fuzzer",
+    ":wasm_module_runner",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_function_sigs_section_fuzzer") {
+}
+
+v8_source_set("wasm_memory_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-memory-section.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":lib_wasm_section_fuzzer",
+    ":wasm_module_runner",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_memory_section_fuzzer") {
+}
+
+v8_source_set("wasm_data_section_fuzzer") {
+  sources = [
+    "test/fuzzer/wasm-data-section.cc",
+  ]
+
+  deps = [
+    ":fuzzer_support",
+    ":lib_wasm_section_fuzzer",
+    ":wasm_module_runner",
+  ]
+
+  configs = [
+    ":external_config",
+    ":internal_config_base",
+  ]
+}
+
+v8_fuzzer("wasm_data_section_fuzzer") {
+}
diff --git a/ChangeLog b/ChangeLog
index b2a43a1..40c8537 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,1987 @@
+2016-10-05: Version 5.5.372
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-05: Version 5.5.371
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-05: Version 5.5.370
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-05: Version 5.5.369
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-05: Version 5.5.368
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-05: Version 5.5.367
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.366
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.365
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.364
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.363
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.362
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.361
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.360
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.359
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.358
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.357
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.356
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.355
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.354
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.353
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-04: Version 5.5.352
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-03: Version 5.5.351
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-03: Version 5.5.350
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-03: Version 5.5.349
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-03: Version 5.5.348
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-03: Version 5.5.347
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-03: Version 5.5.346
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-02: Version 5.5.345
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-02: Version 5.5.344
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-02: Version 5.5.343
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-02: Version 5.5.342
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-01: Version 5.5.341
+
+        Performance and stability improvements on all platforms.
+
+
+2016-10-01: Version 5.5.340
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-30: Version 5.5.339
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-30: Version 5.5.338
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-30: Version 5.5.337
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-30: Version 5.5.336
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-30: Version 5.5.335
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.334
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.333
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.332
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.331
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.330
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.329
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.328
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.327
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.326
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.325
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.324
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.323
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.322
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.321
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.320
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.319
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.318
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-29: Version 5.5.317
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.316
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.315
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.314
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.313
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.312
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.311
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.310
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-28: Version 5.5.309
+
+        [wasm] Master CL for Binary 0xC changes (Chromium issue 575167).
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.308
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.307
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.306
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.305
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.304
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.303
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.302
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.301
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.300
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.299
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.298
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.297
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-27: Version 5.5.296
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.295
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.294
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.293
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.292
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.291
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.290
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.289
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.288
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.287
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.286
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.285
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.284
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.283
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-26: Version 5.5.282
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-24: Version 5.5.281
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-24: Version 5.5.280
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-24: Version 5.5.279
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-24: Version 5.5.278
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-23: Version 5.5.277
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-23: Version 5.5.276
+
+        [wasm] Master CL for Binary 0xC changes (Chromium issue 575167).
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-23: Version 5.5.275
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.274
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.273
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.272
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.271
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.270
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.269
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.268
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.267
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.266
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.265
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.264
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.263
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.262
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.261
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.260
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.259
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-22: Version 5.5.258
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.257
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.256
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.255
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.254
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.253
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.252
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.251
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.250
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.249
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.248
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.247
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.246
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.245
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.244
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.243
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.242
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.241
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.240
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.239
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-21: Version 5.5.238
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.237
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.236
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.235
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.234
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.233
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.232
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.231
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-20: Version 5.5.230
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-19: Version 5.5.229
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-19: Version 5.5.228
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-19: Version 5.5.227
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.226
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.225
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.224
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.223
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.222
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.221
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.220
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.219
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-16: Version 5.5.218
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.217
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.216
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.215
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.214
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.213
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.212
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.211
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.210
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.209
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.208
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-15: Version 5.5.207
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.206
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.205
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.204
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.203
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.202
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.201
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.200
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.199
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.198
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.197
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.196
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.195
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.194
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.193
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.192
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.191
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.190
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.189
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.188
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.187
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-14: Version 5.5.186
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.185
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.184
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.183
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.182
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.181
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.180
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.179
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.178
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.177
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.176
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.175
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.174
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.173
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.172
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.171
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-13: Version 5.5.170
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.169
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.168
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.167
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.166
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.165
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.164
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.163
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.162
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.161
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.160
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.159
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.158
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.157
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.156
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.155
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.154
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.153
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.152
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.151
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-12: Version 5.5.150
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-10: Version 5.5.149
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.148
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.147
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.146
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.145
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.144
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.143
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.142
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.141
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.140
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.139
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.138
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.137
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.136
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.135
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.134
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-09: Version 5.5.133
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.132
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.131
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.130
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.129
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.128
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.127
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.126
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.125
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.124
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-08: Version 5.5.123
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.122
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.121
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.120
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.119
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.118
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.117
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.116
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.115
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.114
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.113
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.112
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-07: Version 5.5.111
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.110
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.109
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.108
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.107
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.106
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.105
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.104
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.103
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.102
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-06: Version 5.5.101
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.100
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.99
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.98
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.97
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.96
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.95
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.94
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.93
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.92
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.91
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.90
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.89
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.88
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.87
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-05: Version 5.5.86
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-04: Version 5.5.85
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-04: Version 5.5.84
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-04: Version 5.5.83
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.82
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.81
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.80
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.79
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.78
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.77
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.76
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.75
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.74
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.73
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.72
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-02: Version 5.5.71
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.70
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.69
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.68
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.67
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.66
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.65
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.64
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.63
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.62
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.61
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.60
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.59
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.58
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.57
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.56
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.55
+
+        Performance and stability improvements on all platforms.
+
+
+2016-09-01: Version 5.5.54
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.53
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.52
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.51
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.50
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.49
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.48
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.47
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.46
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.45
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.44
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.43
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.42
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.41
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.40
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.39
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.38
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-31: Version 5.5.37
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.36
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.35
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.34
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.33
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.32
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.31
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.30
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.29
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.28
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.27
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.26
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.25
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.24
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.23
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-30: Version 5.5.22
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.21
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.20
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.19
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.18
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.17
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.16
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.15
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.14
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.13
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.12
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.11
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.10
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.9
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.8
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.7
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-29: Version 5.5.6
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-28: Version 5.5.5
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-28: Version 5.5.4
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-28: Version 5.5.3
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-28: Version 5.5.2
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-26: Version 5.5.1
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-25: Version 5.4.524
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-25: Version 5.4.523
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-25: Version 5.4.522
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-25: Version 5.4.521
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-25: Version 5.4.520
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-25: Version 5.4.519
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-25: Version 5.4.518
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.517
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.516
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.515
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.514
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.513
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.512
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.511
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.510
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.509
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.508
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.507
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-24: Version 5.4.506
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-23: Version 5.4.505
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-23: Version 5.4.504
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-23: Version 5.4.503
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-23: Version 5.4.502
+
+        Performance and stability improvements on all platforms.
+
+
+2016-08-23: Version 5.4.501
+
+        Performance and stability improvements on all platforms.
+
+
 2016-08-23: Version 5.4.500
 
         Performance and stability improvements on all platforms.
diff --git a/DEPS b/DEPS
index 6cac01d..058cd8b 100644
--- a/DEPS
+++ b/DEPS
@@ -3,61 +3,62 @@
 # all paths in here must match this assumption.
 
 vars = {
-  "git_url": "https://chromium.googlesource.com",
+  "chromium_url": "https://chromium.googlesource.com",
 }
 
 deps = {
   "v8/build":
-    Var("git_url") + "/chromium/src/build.git" + "@" + "59daf502c36f20b5c9292f4bd9af85791f8a5884",
+    Var("chromium_url") + "/chromium/src/build.git" + "@" + "475d5b37ded6589c9f8a0d19ced54ddf2e6d14a0",
   "v8/tools/gyp":
-    Var("git_url") + "/external/gyp.git" + "@" + "702ac58e477214c635d9b541932e75a95d349352",
+    Var("chromium_url") + "/external/gyp.git" + "@" + "e7079f0e0e14108ab0dba58728ff219637458563",
   "v8/third_party/icu":
-    Var("git_url") + "/chromium/deps/icu.git" + "@" + "2341038bf72869a5683a893a2b319a48ffec7f62",
+    Var("chromium_url") + "/chromium/deps/icu.git" + "@" + "b0bd3ee50bc2e768d7a17cbc60d87f517f024dbe",
   "v8/third_party/instrumented_libraries":
-    Var("git_url") + "/chromium/src/third_party/instrumented_libraries.git" + "@" + "f15768d7fdf68c0748d20738184120c8ab2e6db7",
+    Var("chromium_url") + "/chromium/src/third_party/instrumented_libraries.git" + "@" + "45f5814b1543e41ea0be54c771e3840ea52cca4a",
   "v8/buildtools":
-    Var("git_url") + "/chromium/buildtools.git" + "@" + "adb8bf4e8fc92aa1717bf151b862d58e6f27c4f2",
+    Var("chromium_url") + "/chromium/buildtools.git" + "@" + "5fd66957f08bb752dca714a591c84587c9d70762",
   "v8/base/trace_event/common":
-    Var("git_url") + "/chromium/src/base/trace_event/common.git" + "@" + "315bf1e2d45be7d53346c31cfcc37424a32c30c8",
+    Var("chromium_url") + "/chromium/src/base/trace_event/common.git" + "@" + "e0fa02a02f61430dae2bddfd89a334ea4389f495",
   "v8/third_party/WebKit/Source/platform/inspector_protocol":
-    Var("git_url") + "/chromium/src/third_party/WebKit/Source/platform/inspector_protocol.git" + "@" + "547960151fb364dd9a382fa79ffc9abfb184e3d1",
+    Var("chromium_url") + "/chromium/src/third_party/WebKit/Source/platform/inspector_protocol.git" + "@" + "3280c57c4c575ce82ccd13e4a403492fb4ca624b",
   "v8/third_party/jinja2":
-    Var("git_url") + "/chromium/src/third_party/jinja2.git" + "@" + "2222b31554f03e62600cd7e383376a7c187967a1",
+    Var("chromium_url") + "/chromium/src/third_party/jinja2.git" + "@" + "b61a2c009a579593a259c1b300e0ad02bf48fd78",
   "v8/third_party/markupsafe":
-    Var("git_url") + "/chromium/src/third_party/markupsafe.git" + "@" + "484a5661041cac13bfc688a26ec5434b05d18961",
-  "v8/tools/mb":
-    Var('git_url') + '/chromium/src/tools/mb.git' + '@' + "99788b8b516c44d7db25cfb68695bc234fdee5ed",
+    Var("chromium_url") + "/chromium/src/third_party/markupsafe.git" + "@" + "484a5661041cac13bfc688a26ec5434b05d18961",
   "v8/tools/swarming_client":
-    Var('git_url') + '/external/swarming.client.git' + '@' + "e4288c3040a32f2e7ad92f957668f2ee3d36e5a6",
+    Var('chromium_url') + '/external/swarming.client.git' + '@' + "380e32662312eb107f06fcba6409b0409f8fef72",
   "v8/testing/gtest":
-    Var("git_url") + "/external/github.com/google/googletest.git" + "@" + "6f8a66431cb592dad629028a50b3dd418a408c87",
+    Var("chromium_url") + "/external/github.com/google/googletest.git" + "@" + "6f8a66431cb592dad629028a50b3dd418a408c87",
   "v8/testing/gmock":
-    Var("git_url") + "/external/googlemock.git" + "@" + "0421b6f358139f02e102c9c332ce19a33faf75be",
+    Var("chromium_url") + "/external/googlemock.git" + "@" + "0421b6f358139f02e102c9c332ce19a33faf75be",
   "v8/test/benchmarks/data":
-    Var("git_url") + "/v8/deps/third_party/benchmarks.git" + "@" + "05d7188267b4560491ff9155c5ee13e207ecd65f",
+    Var("chromium_url") + "/v8/deps/third_party/benchmarks.git" + "@" + "05d7188267b4560491ff9155c5ee13e207ecd65f",
   "v8/test/mozilla/data":
-    Var("git_url") + "/v8/deps/third_party/mozilla-tests.git" + "@" + "f6c578a10ea707b1a8ab0b88943fe5115ce2b9be",
-  "v8/test/simdjs/data": Var("git_url") + "/external/github.com/tc39/ecmascript_simd.git" + "@" + "baf493985cb9ea7cdbd0d68704860a8156de9556",
+    Var("chromium_url") + "/v8/deps/third_party/mozilla-tests.git" + "@" + "f6c578a10ea707b1a8ab0b88943fe5115ce2b9be",
+  "v8/test/simdjs/data": Var("chromium_url") + "/external/github.com/tc39/ecmascript_simd.git" + "@" + "baf493985cb9ea7cdbd0d68704860a8156de9556",
   "v8/test/test262/data":
-    Var("git_url") + "/external/github.com/tc39/test262.git" + "@" + "88bc7fe7586f161201c5f14f55c9c489f82b1b67",
+    Var("chromium_url") + "/external/github.com/tc39/test262.git" + "@" + "29c23844494a7cc2fbebc6948d2cb0bcaddb24e7",
   "v8/test/test262/harness":
-    Var("git_url") + "/external/github.com/test262-utils/test262-harness-py.git" + "@" + "cbd968f54f7a95c6556d53ba852292a4c49d11d8",
+    Var("chromium_url") + "/external/github.com/test262-utils/test262-harness-py.git" + "@" + "cbd968f54f7a95c6556d53ba852292a4c49d11d8",
   "v8/tools/clang":
-    Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "3afb04a8153e40ff00f9eaa14337851c3ab4a368",
+    Var("chromium_url") + "/chromium/src/tools/clang.git" + "@" + "1f92f999fc374a479e98a189ebdfe25c09484486",
 }
 
 deps_os = {
   "android": {
     "v8/third_party/android_tools":
-      Var("git_url") + "/android_tools.git" + "@" + "af1c5a4cd6329ccdcf8c2bc93d9eea02f9d74869",
+      Var("chromium_url") + "/android_tools.git" + "@" + "25d57ead05d3dfef26e9c19b13ed10b0a69829cf",
   },
   "win": {
     "v8/third_party/cygwin":
-      Var("git_url") + "/chromium/deps/cygwin.git" + "@" + "c89e446b273697fadf3a10ff1007a97c0b7de6df",
+      Var("chromium_url") + "/chromium/deps/cygwin.git" + "@" + "c89e446b273697fadf3a10ff1007a97c0b7de6df",
   }
 }
 
-recursedeps = [ 'v8/third_party/android_tools' ]
+recursedeps = [
+  "v8/buildtools",
+  "v8/third_party/android_tools",
+]
 
 include_rules = [
   # Everybody can use some things.
@@ -204,6 +205,39 @@
     ],
   },
   {
+    "name": "wasm_fuzzer",
+    "pattern": ".",
+    "action": [ "download_from_google_storage",
+                "--no_resume",
+                "--no_auth",
+                "-u",
+                "--bucket", "v8-wasm-fuzzer",
+                "-s", "v8/test/fuzzer/wasm.tar.gz.sha1",
+    ],
+  },
+  {
+    "name": "wasm_asmjs_fuzzer",
+    "pattern": ".",
+    "action": [ "download_from_google_storage",
+                "--no_resume",
+                "--no_auth",
+                "-u",
+                "--bucket", "v8-wasm-asmjs-fuzzer",
+                "-s", "v8/test/fuzzer/wasm_asmjs.tar.gz.sha1",
+    ],
+  },
+  {
+    "name": "closure_compiler",
+    "pattern": ".",
+    "action": [ "download_from_google_storage",
+                "--no_resume",
+                "--no_auth",
+                "-u",
+                "--bucket", "chromium-v8-closure-compiler",
+                "-s", "v8/src/inspector/build/closure-compiler.tar.gz.sha1",
+    ],
+  },
+  {
     # Downloads the current stable linux sysroot to build/linux/ if needed.
     # This sysroot updates at about the same rate that the chrome build deps
     # change.
@@ -259,6 +293,6 @@
   {
     # A change to a .gyp, .gypi, or to GYP itself should run the generator.
     "pattern": ".",
-    "action": ["python", "v8/gypfiles/gyp_v8"],
+    "action": ["python", "v8/gypfiles/gyp_v8", "--running-as-hook"],
   },
 ]
diff --git a/OWNERS b/OWNERS
index 26701ee..028f4ff 100644
--- a/OWNERS
+++ b/OWNERS
@@ -22,7 +22,6 @@
 mvstanton@chromium.org
 mythria@chromium.org
 neis@chromium.org
-oth@chromium.org
 rmcilroy@chromium.org
 rossberg@chromium.org
 titzer@chromium.org
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 5255ca1..78e7482 100644
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -216,6 +216,38 @@
     return []
 
 
+def _CheckMissingFiles(input_api, output_api):
+  """Runs verify_source_deps.py to ensure no files were added that are not in
+  GN.
+  """
+  # We need to wait until we have an input_api object and use this
+  # roundabout construct to import checkdeps because this file is
+  # eval-ed and thus doesn't have __file__.
+  original_sys_path = sys.path
+  try:
+    sys.path = sys.path + [input_api.os_path.join(
+        input_api.PresubmitLocalPath(), 'tools')]
+    from verify_source_deps import missing_gn_files, missing_gyp_files
+  finally:
+    # Restore sys.path to what it was before.
+    sys.path = original_sys_path
+
+  gn_files = missing_gn_files()
+  gyp_files = missing_gyp_files()
+  results = []
+  if gn_files:
+    results.append(output_api.PresubmitError(
+        "You added one or more source files but didn't update the\n"
+        "corresponding BUILD.gn files:\n",
+        gn_files))
+  if gyp_files:
+    results.append(output_api.PresubmitError(
+        "You added one or more source files but didn't update the\n"
+        "corresponding gyp files:\n",
+        gyp_files))
+  return results
+
+
 def _CommonChecks(input_api, output_api):
   """Checks common to both upload and commit."""
   results = []
@@ -231,6 +263,7 @@
       _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
   results.extend(
       _CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api))
+  results.extend(_CheckMissingFiles(input_api, output_api))
   return results
 
 
diff --git a/V8_MERGE_REVISION b/V8_MERGE_REVISION
index 239f125..c253eac 100644
--- a/V8_MERGE_REVISION
+++ b/V8_MERGE_REVISION
@@ -1,2 +1,2 @@
-v8 5.4.500.40
-https://chromium.googlesource.com/v8/v8/+/5.4.500.40
+v8 5.5.372.32
+https://chromium.googlesource.com/v8/v8/+/5.5.372.32
diff --git a/base/trace_event/common/trace_event_common.h b/base/trace_event/common/trace_event_common.h
index e87665b..0db9269 100644
--- a/base/trace_event/common/trace_event_common.h
+++ b/base/trace_event/common/trace_event_common.h
@@ -223,6 +223,49 @@
                                             flow_flags, arg1_name, arg1_val, \
                                             arg2_name, arg2_val)
 
+// UNSHIPPED_TRACE_EVENT* are like TRACE_EVENT* except that they are not
+// included in official builds.
+
+#if OFFICIAL_BUILD
+#undef TRACING_IS_OFFICIAL_BUILD
+#define TRACING_IS_OFFICIAL_BUILD 1
+#elif !defined(TRACING_IS_OFFICIAL_BUILD)
+#define TRACING_IS_OFFICIAL_BUILD 0
+#endif
+
+#if TRACING_IS_OFFICIAL_BUILD
+#define UNSHIPPED_TRACE_EVENT0(category_group, name) (void)0
+#define UNSHIPPED_TRACE_EVENT1(category_group, name, arg1_name, arg1_val) \
+  (void)0
+#define UNSHIPPED_TRACE_EVENT2(category_group, name, arg1_name, arg1_val, \
+                               arg2_name, arg2_val)                       \
+  (void)0
+#define UNSHIPPED_TRACE_EVENT_INSTANT0(category_group, name, scope) (void)0
+#define UNSHIPPED_TRACE_EVENT_INSTANT1(category_group, name, scope, arg1_name, \
+                                       arg1_val)                               \
+  (void)0
+#define UNSHIPPED_TRACE_EVENT_INSTANT2(category_group, name, scope, arg1_name, \
+                                       arg1_val, arg2_name, arg2_val)          \
+  (void)0
+#else
+#define UNSHIPPED_TRACE_EVENT0(category_group, name) \
+  TRACE_EVENT0(category_group, name)
+#define UNSHIPPED_TRACE_EVENT1(category_group, name, arg1_name, arg1_val) \
+  TRACE_EVENT1(category_group, name, arg1_name, arg1_val)
+#define UNSHIPPED_TRACE_EVENT2(category_group, name, arg1_name, arg1_val, \
+                               arg2_name, arg2_val)                       \
+  TRACE_EVENT2(category_group, name, arg1_name, arg1_val, arg2_name, arg2_val)
+#define UNSHIPPED_TRACE_EVENT_INSTANT0(category_group, name, scope) \
+  TRACE_EVENT_INSTANT0(category_group, name, scope)
+#define UNSHIPPED_TRACE_EVENT_INSTANT1(category_group, name, scope, arg1_name, \
+                                       arg1_val)                               \
+  TRACE_EVENT_INSTANT1(category_group, name, scope, arg1_name, arg1_val)
+#define UNSHIPPED_TRACE_EVENT_INSTANT2(category_group, name, scope, arg1_name, \
+                                       arg1_val, arg2_name, arg2_val)          \
+  TRACE_EVENT_INSTANT2(category_group, name, scope, arg1_name, arg1_val,       \
+                       arg2_name, arg2_val)
+#endif
+
 // Records a single event called "name" immediately, with 0, 1 or 2
 // associated arguments. If the category is not enabled, then this
 // does nothing.
@@ -258,6 +301,16 @@
       TRACE_EVENT_PHASE_INSTANT, category_group, name, timestamp,        \
       TRACE_EVENT_FLAG_NONE | scope)
 
+// Syntactic sugars for the sampling tracing in the main thread.
+#define TRACE_EVENT_SCOPED_SAMPLING_STATE(category, name) \
+  TRACE_EVENT_SCOPED_SAMPLING_STATE_FOR_BUCKET(0, category, name)
+#define TRACE_EVENT_GET_SAMPLING_STATE() \
+  TRACE_EVENT_GET_SAMPLING_STATE_FOR_BUCKET(0)
+#define TRACE_EVENT_SET_SAMPLING_STATE(category, name) \
+  TRACE_EVENT_SET_SAMPLING_STATE_FOR_BUCKET(0, category, name)
+#define TRACE_EVENT_SET_NONCONST_SAMPLING_STATE(category_and_name) \
+  TRACE_EVENT_SET_NONCONST_SAMPLING_STATE_FOR_BUCKET(0, category_and_name)
+
 // Records a single BEGIN event called "name" immediately, with 0, 1 or 2
 // associated arguments. If the category is not enabled, then this
 // does nothing.
@@ -953,15 +1006,15 @@
   INTERNAL_TRACE_EVENT_SCOPED_CONTEXT(category_group, name, context)
 
 // Macro to specify that two trace IDs are identical. For example,
-// TRACE_LINK_IDS(
+// TRACE_BIND_IDS(
 //     "category", "name",
 //     TRACE_ID_WITH_SCOPE("net::URLRequest", 0x1000),
 //     TRACE_ID_WITH_SCOPE("blink::ResourceFetcher::FetchRequest", 0x2000))
 // tells the trace consumer that events with ID ("net::URLRequest", 0x1000) from
 // the current process have the same ID as events with ID
 // ("blink::ResourceFetcher::FetchRequest", 0x2000).
-#define TRACE_LINK_IDS(category_group, name, id, linked_id) \
-  INTERNAL_TRACE_EVENT_ADD_LINK_IDS(category_group, name, id, linked_id);
+#define TRACE_BIND_IDS(category_group, name, id, bind_id) \
+  INTERNAL_TRACE_EVENT_ADD_BIND_IDS(category_group, name, id, bind_id);
 
 // Macro to efficiently determine if a given category group is enabled.
 #define TRACE_EVENT_CATEGORY_GROUP_ENABLED(category_group, ret)             \
@@ -1028,7 +1081,7 @@
 #define TRACE_EVENT_PHASE_CLOCK_SYNC ('c')
 #define TRACE_EVENT_PHASE_ENTER_CONTEXT ('(')
 #define TRACE_EVENT_PHASE_LEAVE_CONTEXT (')')
-#define TRACE_EVENT_PHASE_LINK_IDS ('=')
+#define TRACE_EVENT_PHASE_BIND_IDS ('=')
 
 // Flags for changing the behavior of TRACE_EVENT_API_ADD_TRACE_EVENT.
 #define TRACE_EVENT_FLAG_NONE (static_cast<unsigned int>(0))
diff --git a/build_overrides/build.gni b/build_overrides/build.gni
index da6d3e0..6b8a4ff 100644
--- a/build_overrides/build.gni
+++ b/build_overrides/build.gni
@@ -16,3 +16,11 @@
 
 # Some non-Chromium builds don't support building java targets.
 enable_java_templates = false
+
+# Some non-Chromium builds don't use Chromium's third_party/binutils.
+linux_use_bundled_binutils_override = true
+
+# Allows different projects to specify their own suppressions files.
+asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
+lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc"
+tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc"
diff --git a/build_overrides/v8.gni b/build_overrides/v8.gni
index fc4a70e..09ea457 100644
--- a/build_overrides/v8.gni
+++ b/build_overrides/v8.gni
@@ -11,10 +11,8 @@
   import("//build/config/android/config.gni")
 }
 
-if (((v8_current_cpu == "x86" ||
-      v8_current_cpu == "x64" ||
-      v8_current_cpu=="x87") &&
-     (is_linux || is_mac)) ||
+if (((v8_current_cpu == "x86" || v8_current_cpu == "x64" ||
+      v8_current_cpu == "x87") && (is_linux || is_mac)) ||
     (v8_current_cpu == "ppc64" && is_linux)) {
   v8_enable_gdbjit_default = true
 }
@@ -23,4 +21,12 @@
 
 # Add simple extras solely for the purpose of the cctests.
 v8_extra_library_files = [ "//test/cctest/test-extra.js" ]
-v8_experimental_extra_library_files = [ "//test/cctest/test-experimental-extra.js" ]
+v8_experimental_extra_library_files =
+    [ "//test/cctest/test-experimental-extra.js" ]
+
+declare_args() {
+  # Enable inspector. See include/v8-inspector.h.
+  v8_enable_inspector = false
+}
+
+v8_enable_inspector_override = v8_enable_inspector
diff --git a/gypfiles/config/win/msvs_dependencies.isolate b/gypfiles/config/win/msvs_dependencies.isolate
deleted file mode 100644
index ff92227..0000000
--- a/gypfiles/config/win/msvs_dependencies.isolate
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright 2015 the V8 project authors. All rights reserved.
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'conditions': [
-    # Copy the VS runtime DLLs into the isolate so that they
-    # don't have to be preinstalled on the target machine.
-    #
-    # VS2013 runtimes
-    ['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/x64/msvcp120d.dll',
-          '<(PRODUCT_DIR)/x64/msvcr120d.dll',
-        ],
-      },
-    }],
-    ['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/x64/msvcp120.dll',
-          '<(PRODUCT_DIR)/x64/msvcr120.dll',
-        ],
-      },
-    }],
-    ['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/msvcp120d.dll',
-          '<(PRODUCT_DIR)/msvcr120d.dll',
-        ],
-      },
-    }],
-    ['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/msvcp120.dll',
-          '<(PRODUCT_DIR)/msvcr120.dll',
-        ],
-      },
-    }],
-    # VS2015 runtimes
-    ['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/x64/msvcp140d.dll',
-          '<(PRODUCT_DIR)/x64/vccorlib140d.dll',
-        ],
-      },
-    }],
-    ['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/x64/msvcp140.dll',
-          '<(PRODUCT_DIR)/x64/vccorlib140.dll',
-        ],
-      },
-    }],
-    ['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/msvcp140d.dll',
-          '<(PRODUCT_DIR)/vccorlib140d.dll',
-        ],
-      },
-    }],
-    ['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/msvcp140.dll',
-          '<(PRODUCT_DIR)/vccorlib140.dll',
-        ],
-      },
-    }],
-  ],
-}
\ No newline at end of file
diff --git a/gypfiles/get_landmines.py b/gypfiles/get_landmines.py
index 9fcca4b..432dfd7 100755
--- a/gypfiles/get_landmines.py
+++ b/gypfiles/get_landmines.py
@@ -28,6 +28,8 @@
   print 'Clobbering to hopefully resolve problem with mksnapshot'
   print 'Clobber after ICU roll.'
   print 'Clobber after Android NDK update.'
+  print 'Clober to fix windows build problems.'
+  print 'Clober again to fix windows build problems.'
   return 0
 
 
diff --git a/gypfiles/gyp_v8 b/gypfiles/gyp_v8
index 8be39d9..b8b5f74 100755
--- a/gypfiles/gyp_v8
+++ b/gypfiles/gyp_v8
@@ -118,10 +118,22 @@
 if __name__ == '__main__':
   args = sys.argv[1:]
 
-  if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+  gyp_chromium_no_action = os.environ.get('GYP_CHROMIUM_NO_ACTION')
+  if gyp_chromium_no_action == '1':
     print 'Skipping gyp_v8 due to GYP_CHROMIUM_NO_ACTION env var.'
     sys.exit(0)
 
+  running_as_hook = '--running-as-hook'
+  if running_as_hook in args and gyp_chromium_no_action != '0':
+    print 'GYP is now disabled by default in runhooks.\n'
+    print 'If you really want to run this, either run '
+    print '`python gypfiles/gyp_v8` explicitly by hand '
+    print 'or set the environment variable GYP_CHROMIUM_NO_ACTION=0.'
+    sys.exit(0)
+
+  if running_as_hook in args:
+    args.remove(running_as_hook)
+
   gyp_environment.set_environment()
 
   # This could give false positives since it doesn't actually do real option
diff --git a/gypfiles/standalone.gypi b/gypfiles/standalone.gypi
index 6599bb8..7e41ce8 100644
--- a/gypfiles/standalone.gypi
+++ b/gypfiles/standalone.gypi
@@ -46,6 +46,7 @@
     'msvs_multi_core_compile%': '1',
     'mac_deployment_target%': '10.7',
     'release_extra_cflags%': '',
+    'v8_enable_inspector%': 0,
     'variables': {
       'variables': {
         'variables': {
@@ -319,7 +320,7 @@
             'android_ndk_root%': '<(base_dir)/third_party/android_tools/ndk/',
             'android_host_arch%': "<!(uname -m | sed -e 's/i[3456]86/x86/')",
             # Version of the NDK. Used to ensure full rebuilds on NDK rolls.
-            'android_ndk_version%': 'r11c',
+            'android_ndk_version%': 'r12b',
             'host_os%': "<!(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')",
             'os_folder_name%': "<!(uname -s | sed -e 's/Linux/linux/;s/Darwin/darwin/')",
           },
@@ -378,6 +379,9 @@
         'arm_version%': '<(arm_version)',
         'host_os%': '<(host_os)',
 
+        # Print to stdout on Android.
+        'v8_android_log_stdout%': 1,
+
         'conditions': [
           ['android_ndk_root==""', {
             'variables': {
diff --git a/include/DEPS b/include/DEPS
new file mode 100644
index 0000000..ca60f84
--- /dev/null
+++ b/include/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+  # v8-inspector-protocol.h depends on generated files under include/inspector.
+  "+inspector",
+]
diff --git a/include/OWNERS b/include/OWNERS
index efa3b93..07f8a61 100644
--- a/include/OWNERS
+++ b/include/OWNERS
@@ -1,2 +1,7 @@
 danno@chromium.org
 jochen@chromium.org
+
+per-file v8-inspector.h=dgozman@chromium.org
+per-file v8-inspector.h=pfeldman@chromium.org
+per-file v8-inspector-protocol.h=dgozman@chromium.org
+per-file v8-inspector-protocol.h=pfeldman@chromium.org
diff --git a/include/libplatform/v8-tracing.h b/include/libplatform/v8-tracing.h
index 7646ea5..e9f4941 100644
--- a/include/libplatform/v8-tracing.h
+++ b/include/libplatform/v8-tracing.h
@@ -7,9 +7,17 @@
 
 #include <fstream>
 #include <memory>
+#include <unordered_set>
 #include <vector>
 
+#include "v8-platform.h"  // NOLINT(build/include)
+
 namespace v8 {
+
+namespace base {
+class Mutex;
+}  // namespace base
+
 namespace platform {
 namespace tracing {
 
@@ -28,19 +36,22 @@
 
   TraceObject() {}
   ~TraceObject();
-  void Initialize(char phase, const uint8_t* category_enabled_flag,
-                  const char* name, const char* scope, uint64_t id,
-                  uint64_t bind_id, int num_args, const char** arg_names,
-                  const uint8_t* arg_types, const uint64_t* arg_values,
-                  unsigned int flags);
+  void Initialize(
+      char phase, const uint8_t* category_enabled_flag, const char* name,
+      const char* scope, uint64_t id, uint64_t bind_id, int num_args,
+      const char** arg_names, const uint8_t* arg_types,
+      const uint64_t* arg_values,
+      std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+      unsigned int flags);
   void UpdateDuration();
-  void InitializeForTesting(char phase, const uint8_t* category_enabled_flag,
-                            const char* name, const char* scope, uint64_t id,
-                            uint64_t bind_id, int num_args,
-                            const char** arg_names, const uint8_t* arg_types,
-                            const uint64_t* arg_values, unsigned int flags,
-                            int pid, int tid, int64_t ts, int64_t tts,
-                            uint64_t duration, uint64_t cpu_duration);
+  void InitializeForTesting(
+      char phase, const uint8_t* category_enabled_flag, const char* name,
+      const char* scope, uint64_t id, uint64_t bind_id, int num_args,
+      const char** arg_names, const uint8_t* arg_types,
+      const uint64_t* arg_values,
+      std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+      unsigned int flags, int pid, int tid, int64_t ts, int64_t tts,
+      uint64_t duration, uint64_t cpu_duration);
 
   int pid() const { return pid_; }
   int tid() const { return tid_; }
@@ -56,6 +67,9 @@
   const char** arg_names() { return arg_names_; }
   uint8_t* arg_types() { return arg_types_; }
   ArgValue* arg_values() { return arg_values_; }
+  std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables() {
+    return arg_convertables_;
+  }
   unsigned int flags() const { return flags_; }
   int64_t ts() { return ts_; }
   int64_t tts() { return tts_; }
@@ -71,10 +85,12 @@
   const uint8_t* category_enabled_flag_;
   uint64_t id_;
   uint64_t bind_id_;
-  int num_args_;
+  int num_args_ = 0;
   const char* arg_names_[kTraceMaxNumArgs];
   uint8_t arg_types_[kTraceMaxNumArgs];
   ArgValue arg_values_[kTraceMaxNumArgs];
+  std::unique_ptr<v8::ConvertableToTraceFormat>
+      arg_convertables_[kTraceMaxNumArgs];
   char* parameter_copy_storage_ = nullptr;
   unsigned int flags_;
   int64_t ts_;
@@ -217,21 +233,27 @@
     ENABLED_FOR_ETW_EXPORT = 1 << 3
   };
 
-  TracingController() {}
+  TracingController();
+  ~TracingController();
   void Initialize(TraceBuffer* trace_buffer);
   const uint8_t* GetCategoryGroupEnabled(const char* category_group);
   static const char* GetCategoryGroupName(const uint8_t* category_enabled_flag);
-  uint64_t AddTraceEvent(char phase, const uint8_t* category_enabled_flag,
-                         const char* name, const char* scope, uint64_t id,
-                         uint64_t bind_id, int32_t num_args,
-                         const char** arg_names, const uint8_t* arg_types,
-                         const uint64_t* arg_values, unsigned int flags);
+  uint64_t AddTraceEvent(
+      char phase, const uint8_t* category_enabled_flag, const char* name,
+      const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
+      const char** arg_names, const uint8_t* arg_types,
+      const uint64_t* arg_values,
+      std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+      unsigned int flags);
   void UpdateTraceEventDuration(const uint8_t* category_enabled_flag,
                                 const char* name, uint64_t handle);
 
   void StartTracing(TraceConfig* trace_config);
   void StopTracing();
 
+  void AddTraceStateObserver(Platform::TraceStateObserver* observer);
+  void RemoveTraceStateObserver(Platform::TraceStateObserver* observer);
+
  private:
   const uint8_t* GetCategoryGroupEnabledInternal(const char* category_group);
   void UpdateCategoryGroupEnabledFlag(size_t category_index);
@@ -239,6 +261,8 @@
 
   std::unique_ptr<TraceBuffer> trace_buffer_;
   std::unique_ptr<TraceConfig> trace_config_;
+  std::unique_ptr<base::Mutex> mutex_;
+  std::unordered_set<Platform::TraceStateObserver*> observers_;
   Mode mode_ = DISABLED;
 
   // Disallow copy and assign
diff --git a/include/v8-inspector-protocol.h b/include/v8-inspector-protocol.h
new file mode 100644
index 0000000..612a2eb
--- /dev/null
+++ b/include/v8-inspector-protocol.h
@@ -0,0 +1,13 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_V8_INSPECTOR_PROTOCOL_H_
+#define V8_V8_INSPECTOR_PROTOCOL_H_
+
+#include "inspector/Debugger.h"  // NOLINT(build/include)
+#include "inspector/Runtime.h"   // NOLINT(build/include)
+#include "inspector/Schema.h"    // NOLINT(build/include)
+#include "v8-inspector.h"        // NOLINT(build/include)
+
+#endif  // V8_V8_INSPECTOR_PROTOCOL_H_
diff --git a/include/v8-inspector.h b/include/v8-inspector.h
new file mode 100644
index 0000000..0855ac1
--- /dev/null
+++ b/include/v8-inspector.h
@@ -0,0 +1,267 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_V8_INSPECTOR_H_
+#define V8_V8_INSPECTOR_H_
+
+#include <stdint.h>
+#include <cctype>
+
+#include <memory>
+
+#include "v8.h"  // NOLINT(build/include)
+
+namespace v8_inspector {
+
+namespace protocol {
+namespace Debugger {
+namespace API {
+class SearchMatch;
+}
+}
+namespace Runtime {
+namespace API {
+class RemoteObject;
+class StackTrace;
+}
+}
+namespace Schema {
+namespace API {
+class Domain;
+}
+}
+}  // namespace protocol
+
+class V8_EXPORT StringView {
+ public:
+  StringView() : m_is8Bit(true), m_length(0), m_characters8(nullptr) {}
+
+  StringView(const uint8_t* characters, size_t length)
+      : m_is8Bit(true), m_length(length), m_characters8(characters) {}
+
+  StringView(const uint16_t* characters, size_t length)
+      : m_is8Bit(false), m_length(length), m_characters16(characters) {}
+
+  bool is8Bit() const { return m_is8Bit; }
+  size_t length() const { return m_length; }
+
+  // TODO(dgozman): add DCHECK(m_is8Bit) to accessors once platform can be used
+  // here.
+  const uint8_t* characters8() const { return m_characters8; }
+  const uint16_t* characters16() const { return m_characters16; }
+
+ private:
+  bool m_is8Bit;
+  size_t m_length;
+  union {
+    const uint8_t* m_characters8;
+    const uint16_t* m_characters16;
+  };
+};
+
+class V8_EXPORT StringBuffer {
+ public:
+  virtual ~StringBuffer() {}
+  virtual const StringView& string() = 0;
+  // This method copies contents.
+  static std::unique_ptr<StringBuffer> create(const StringView&);
+};
+
+class V8_EXPORT V8ContextInfo {
+ public:
+  V8ContextInfo(v8::Local<v8::Context> context, int contextGroupId,
+                const StringView& humanReadableName)
+      : context(context),
+        contextGroupId(contextGroupId),
+        humanReadableName(humanReadableName),
+        hasMemoryOnConsole(false) {}
+
+  v8::Local<v8::Context> context;
+  // Each v8::Context is a part of a group. The group id must be non-zero.
+  int contextGroupId;
+  StringView humanReadableName;
+  StringView origin;
+  StringView auxData;
+  bool hasMemoryOnConsole;
+
+ private:
+  // Disallow copying and allocating this one.
+  enum NotNullTagEnum { NotNullLiteral };
+  void* operator new(size_t) = delete;
+  void* operator new(size_t, NotNullTagEnum, void*) = delete;
+  void* operator new(size_t, void*) = delete;
+  V8ContextInfo(const V8ContextInfo&) = delete;
+  V8ContextInfo& operator=(const V8ContextInfo&) = delete;
+};
+
+class V8_EXPORT V8StackTrace {
+ public:
+  virtual bool isEmpty() const = 0;
+  virtual StringView topSourceURL() const = 0;
+  virtual int topLineNumber() const = 0;
+  virtual int topColumnNumber() const = 0;
+  virtual StringView topScriptId() const = 0;
+  virtual StringView topFunctionName() const = 0;
+
+  virtual ~V8StackTrace() {}
+  virtual std::unique_ptr<protocol::Runtime::API::StackTrace>
+  buildInspectorObject() const = 0;
+  virtual std::unique_ptr<StringBuffer> toString() const = 0;
+
+  // Safe to pass between threads, drops async chain.
+  virtual std::unique_ptr<V8StackTrace> clone() = 0;
+};
+
+class V8_EXPORT V8InspectorSession {
+ public:
+  virtual ~V8InspectorSession() {}
+
+  // Cross-context inspectable values (DOM nodes in different worlds, etc.).
+  class V8_EXPORT Inspectable {
+   public:
+    virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0;
+    virtual ~Inspectable() {}
+  };
+  virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0;
+
+  // Dispatching protocol messages.
+  static bool canDispatchMethod(const StringView& method);
+  virtual void dispatchProtocolMessage(const StringView& message) = 0;
+  virtual std::unique_ptr<StringBuffer> stateJSON() = 0;
+  virtual std::vector<std::unique_ptr<protocol::Schema::API::Domain>>
+  supportedDomains() = 0;
+
+  // Debugger actions.
+  virtual void schedulePauseOnNextStatement(const StringView& breakReason,
+                                            const StringView& breakDetails) = 0;
+  virtual void cancelPauseOnNextStatement() = 0;
+  virtual void breakProgram(const StringView& breakReason,
+                            const StringView& breakDetails) = 0;
+  virtual void setSkipAllPauses(bool) = 0;
+  virtual void resume() = 0;
+  virtual void stepOver() = 0;
+  virtual std::vector<std::unique_ptr<protocol::Debugger::API::SearchMatch>>
+  searchInTextByLines(const StringView& text, const StringView& query,
+                      bool caseSensitive, bool isRegex) = 0;
+
+  // Remote objects.
+  virtual std::unique_ptr<protocol::Runtime::API::RemoteObject> wrapObject(
+      v8::Local<v8::Context>, v8::Local<v8::Value>,
+      const StringView& groupName) = 0;
+  virtual bool unwrapObject(std::unique_ptr<StringBuffer>* error,
+                            const StringView& objectId, v8::Local<v8::Value>*,
+                            v8::Local<v8::Context>*,
+                            std::unique_ptr<StringBuffer>* objectGroup) = 0;
+  virtual void releaseObjectGroup(const StringView&) = 0;
+};
+
+enum class V8ConsoleAPIType { kClear, kDebug, kLog, kInfo, kWarning, kError };
+
+class V8_EXPORT V8InspectorClient {
+ public:
+  virtual ~V8InspectorClient() {}
+
+  virtual void runMessageLoopOnPause(int contextGroupId) {}
+  virtual void quitMessageLoopOnPause() {}
+  virtual void runIfWaitingForDebugger(int contextGroupId) {}
+
+  virtual void muteMetrics(int contextGroupId) {}
+  virtual void unmuteMetrics(int contextGroupId) {}
+
+  virtual void beginUserGesture() {}
+  virtual void endUserGesture() {}
+
+  virtual std::unique_ptr<StringBuffer> valueSubtype(v8::Local<v8::Value>) {
+    return nullptr;
+  }
+  virtual bool formatAccessorsAsProperties(v8::Local<v8::Value>) {
+    return false;
+  }
+  virtual bool isInspectableHeapObject(v8::Local<v8::Object>) { return true; }
+
+  virtual v8::Local<v8::Context> ensureDefaultContextInGroup(
+      int contextGroupId) {
+    return v8::Local<v8::Context>();
+  }
+  virtual void beginEnsureAllContextsInGroup(int contextGroupId) {}
+  virtual void endEnsureAllContextsInGroup(int contextGroupId) {}
+
+  virtual void installAdditionalCommandLineAPI(v8::Local<v8::Context>,
+                                               v8::Local<v8::Object>) {}
+  virtual void consoleAPIMessage(int contextGroupId, V8ConsoleAPIType,
+                                 const StringView& message,
+                                 const StringView& url, unsigned lineNumber,
+                                 unsigned columnNumber, V8StackTrace*) {}
+  virtual v8::MaybeLocal<v8::Value> memoryInfo(v8::Isolate*,
+                                               v8::Local<v8::Context>) {
+    return v8::MaybeLocal<v8::Value>();
+  }
+
+  virtual void consoleTime(const StringView& title) {}
+  virtual void consoleTimeEnd(const StringView& title) {}
+  virtual void consoleTimeStamp(const StringView& title) {}
+  virtual double currentTimeMS() { return 0; }
+  typedef void (*TimerCallback)(void*);
+  virtual void startRepeatingTimer(double, TimerCallback, void* data) {}
+  virtual void cancelTimer(void* data) {}
+
+  // TODO(dgozman): this was added to support service worker shadow page. We
+  // should not connect at all.
+  virtual bool canExecuteScripts(int contextGroupId) { return true; }
+};
+
+class V8_EXPORT V8Inspector {
+ public:
+  static std::unique_ptr<V8Inspector> create(v8::Isolate*, V8InspectorClient*);
+  virtual ~V8Inspector() {}
+
+  // Contexts instrumentation.
+  virtual void contextCreated(const V8ContextInfo&) = 0;
+  virtual void contextDestroyed(v8::Local<v8::Context>) = 0;
+  virtual void resetContextGroup(int contextGroupId) = 0;
+
+  // Various instrumentation.
+  virtual void willExecuteScript(v8::Local<v8::Context>, int scriptId) = 0;
+  virtual void didExecuteScript(v8::Local<v8::Context>) = 0;
+  virtual void idleStarted() = 0;
+  virtual void idleFinished() = 0;
+
+  // Async stack traces instrumentation.
+  virtual void asyncTaskScheduled(const StringView& taskName, void* task,
+                                  bool recurring) = 0;
+  virtual void asyncTaskCanceled(void* task) = 0;
+  virtual void asyncTaskStarted(void* task) = 0;
+  virtual void asyncTaskFinished(void* task) = 0;
+  virtual void allAsyncTasksCanceled() = 0;
+
+  // Exceptions instrumentation.
+  virtual unsigned exceptionThrown(
+      v8::Local<v8::Context>, const StringView& message,
+      v8::Local<v8::Value> exception, const StringView& detailedMessage,
+      const StringView& url, unsigned lineNumber, unsigned columnNumber,
+      std::unique_ptr<V8StackTrace>, int scriptId) = 0;
+  virtual void exceptionRevoked(v8::Local<v8::Context>, unsigned exceptionId,
+                                const StringView& message) = 0;
+
+  // Connection.
+  class V8_EXPORT Channel {
+   public:
+    virtual ~Channel() {}
+    virtual void sendProtocolResponse(int callId,
+                                      const StringView& message) = 0;
+    virtual void sendProtocolNotification(const StringView& message) = 0;
+    virtual void flushProtocolNotifications() = 0;
+  };
+  virtual std::unique_ptr<V8InspectorSession> connect(
+      int contextGroupId, Channel*, const StringView& state) = 0;
+
+  // API methods.
+  virtual std::unique_ptr<V8StackTrace> createStackTrace(
+      v8::Local<v8::StackTrace>) = 0;
+  virtual std::unique_ptr<V8StackTrace> captureStackTrace(bool fullStack) = 0;
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_V8_INSPECTOR_H_
diff --git a/include/v8-platform.h b/include/v8-platform.h
index 4023a5b..e115674 100644
--- a/include/v8-platform.h
+++ b/include/v8-platform.h
@@ -7,6 +7,8 @@
 
 #include <stddef.h>
 #include <stdint.h>
+#include <memory>
+#include <string>
 
 namespace v8 {
 
@@ -17,24 +19,38 @@
  */
 class Task {
  public:
-  virtual ~Task() {}
+  virtual ~Task() = default;
 
   virtual void Run() = 0;
 };
 
-
 /**
-* An IdleTask represents a unit of work to be performed in idle time.
-* The Run method is invoked with an argument that specifies the deadline in
-* seconds returned by MonotonicallyIncreasingTime().
-* The idle task is expected to complete by this deadline.
-*/
+ * An IdleTask represents a unit of work to be performed in idle time.
+ * The Run method is invoked with an argument that specifies the deadline in
+ * seconds returned by MonotonicallyIncreasingTime().
+ * The idle task is expected to complete by this deadline.
+ */
 class IdleTask {
  public:
-  virtual ~IdleTask() {}
+  virtual ~IdleTask() = default;
   virtual void Run(double deadline_in_seconds) = 0;
 };
 
+/**
+ * The interface represents complex arguments to trace events.
+ */
+class ConvertableToTraceFormat {
+ public:
+  virtual ~ConvertableToTraceFormat() = default;
+
+  /**
+   * Append the class info to the provided |out| string. The appended
+   * data must be a valid JSON object. Strings must be properly quoted, and
+   * escaped. There is no processing applied to the content after it is
+   * appended.
+   */
+  virtual void AppendAsTraceFormat(std::string* out) const = 0;
+};
 
 /**
  * V8 Platform abstraction layer.
@@ -54,7 +70,7 @@
     kLongRunningTask
   };
 
-  virtual ~Platform() {}
+  virtual ~Platform() = default;
 
   /**
    * Gets the number of threads that are used to execute background tasks. Is
@@ -159,11 +175,43 @@
   }
 
   /**
+   * Adds a trace event to the platform tracing system. This function call is
+   * usually the result of a TRACE_* macro from trace_event_common.h when
+   * tracing and the category of the particular trace are enabled. It is not
+   * advisable to call this function on its own; it is really only meant to be
+   * used by the trace macros. The returned handle can be used by
+   * UpdateTraceEventDuration to update the duration of COMPLETE events.
+   */
+  virtual uint64_t AddTraceEvent(
+      char phase, const uint8_t* category_enabled_flag, const char* name,
+      const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
+      const char** arg_names, const uint8_t* arg_types,
+      const uint64_t* arg_values,
+      std::unique_ptr<ConvertableToTraceFormat>* arg_convertables,
+      unsigned int flags) {
+    return AddTraceEvent(phase, category_enabled_flag, name, scope, id, bind_id,
+                         num_args, arg_names, arg_types, arg_values, flags);
+  }
+
+  /**
    * Sets the duration field of a COMPLETE trace event. It must be called with
    * the handle returned from AddTraceEvent().
    **/
   virtual void UpdateTraceEventDuration(const uint8_t* category_enabled_flag,
                                         const char* name, uint64_t handle) {}
+
+  class TraceStateObserver {
+   public:
+    virtual ~TraceStateObserver() = default;
+    virtual void OnTraceEnabled() = 0;
+    virtual void OnTraceDisabled() = 0;
+  };
+
+  /** Adds tracing state change observer. */
+  virtual void AddTraceStateObserver(TraceStateObserver*) {}
+
+  /** Removes tracing state change observer. */
+  virtual void RemoveTraceStateObserver(TraceStateObserver*) {}
 };
 
 }  // namespace v8
diff --git a/include/v8-profiler.h b/include/v8-profiler.h
index bcb69f3..6ee0340 100644
--- a/include/v8-profiler.h
+++ b/include/v8-profiler.h
@@ -46,6 +46,20 @@
 
 namespace v8 {
 
+/**
+ * TracingCpuProfiler monitors tracing being enabled/disabled
+ * and emits CpuProfile trace events once v8.cpu_profile2 tracing category
+ * is enabled. It has no overhead unless the category is enabled.
+ */
+class V8_EXPORT TracingCpuProfiler {
+ public:
+  static std::unique_ptr<TracingCpuProfiler> Create(Isolate*);
+  virtual ~TracingCpuProfiler() = default;
+
+ protected:
+  TracingCpuProfiler() = default;
+};
+
 // TickSample captures the information collected for each sample.
 struct TickSample {
   // Internal profiling (with --prof + tools/$OS-tick-processor) wants to
@@ -131,6 +145,13 @@
   /** Returns function name (empty string for anonymous functions.) */
   Local<String> GetFunctionName() const;
 
+  /**
+   * Returns function name (empty string for anonymous functions.)
+   * The string ownership is *not* passed to the caller. It stays valid until
+   * profile is deleted. The function is thread safe.
+   */
+  const char* GetFunctionNameStr() const;
+
   /** Returns id of the script where function is located. */
   int GetScriptId() const;
 
@@ -138,6 +159,13 @@
   Local<String> GetScriptResourceName() const;
 
   /**
+   * Returns resource name for script from where the function originates.
+   * The string ownership is *not* passed to the caller. It stays valid until
+   * profile is deleted. The function is thread safe.
+   */
+  const char* GetScriptResourceNameStr() const;
+
+  /**
    * Returns the number, 1-based, of the line where the function originates.
    * kNoLineNumberInfo if no line number information is available.
    */
diff --git a/include/v8-util.h b/include/v8-util.h
index 8133fdd..99c59fe 100644
--- a/include/v8-util.h
+++ b/include/v8-util.h
@@ -206,14 +206,19 @@
   }
 
   /**
-   * Call V8::RegisterExternallyReferencedObject with the map value for given
-   * key.
+   * Deprecated. Call V8::RegisterExternallyReferencedObject with the map value
+   * for given key.
+   * TODO(hlopko) Remove once migration to reporter is finished.
    */
-  void RegisterExternallyReferencedObject(K& key) {
+  void RegisterExternallyReferencedObject(K& key) {}
+
+  /**
+   * Use EmbedderReachableReferenceReporter with the map value for given key.
+   */
+  void RegisterExternallyReferencedObject(
+      EmbedderReachableReferenceReporter* reporter, K& key) {
     DCHECK(Contains(key));
-    V8::RegisterExternallyReferencedObject(
-        reinterpret_cast<internal::Object**>(FromVal(Traits::Get(&impl_, key))),
-        reinterpret_cast<internal::Isolate*>(GetIsolate()));
+    reporter->ReportExternalReference(FromVal(Traits::Get(&impl_, key)));
   }
 
   /**
diff --git a/include/v8-version.h b/include/v8-version.h
index 1347fb6..16133c3 100644
--- a/include/v8-version.h
+++ b/include/v8-version.h
@@ -9,9 +9,9 @@
 // NOTE these macros are used by some of the tool scripts and the build
 // system so their names cannot be changed without changing the scripts.
 #define V8_MAJOR_VERSION 5
-#define V8_MINOR_VERSION 4
-#define V8_BUILD_NUMBER 500
-#define V8_PATCH_LEVEL 40
+#define V8_MINOR_VERSION 5
+#define V8_BUILD_NUMBER 372
+#define V8_PATCH_LEVEL 32
 
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
diff --git a/include/v8.h b/include/v8.h
index d7e39ad..36edf53 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -51,7 +51,7 @@
 #else  // V8_OS_WIN
 
 // Setup for Linux shared library export.
-#if V8_HAS_ATTRIBUTE_VISIBILITY && defined(V8_SHARED)
+#if V8_HAS_ATTRIBUTE_VISIBILITY
 # ifdef BUILDING_V8_SHARED
 #  define V8_EXPORT __attribute__ ((visibility("default")))
 # else
@@ -70,6 +70,7 @@
 
 class AccessorSignature;
 class Array;
+class ArrayBuffer;
 class Boolean;
 class BooleanObject;
 class Context;
@@ -95,6 +96,7 @@
 class Platform;
 class Primitive;
 class Promise;
+class PropertyDescriptor;
 class Proxy;
 class RawOperationDescriptor;
 class Script;
@@ -341,7 +343,7 @@
 
 
 #if !defined(V8_IMMINENT_DEPRECATION_WARNINGS)
-// Local is an alias for Local for historical reasons.
+// Handle is an alias for Local for historical reasons.
 template <class T>
 using Handle = Local<T>;
 #endif
@@ -466,6 +468,16 @@
 enum class WeakCallbackType { kParameter, kInternalFields, kFinalizer };
 
 /**
+ * A reporter class that embedder will use to report reachable references found
+ * by EmbedderHeapTracer.
+ */
+class V8_EXPORT EmbedderReachableReferenceReporter {
+ public:
+  virtual void ReportExternalReference(Value* object) = 0;
+  virtual ~EmbedderReachableReferenceReporter() = default;
+};
+
+/**
  * An object reference that is independent of any handle scope.  Where
  * a Local handle only lives as long as the HandleScope in which it was
  * allocated, a PersistentBase handle remains valid until it is explicitly
@@ -562,11 +574,18 @@
   V8_INLINE void ClearWeak() { ClearWeak<void>(); }
 
   /**
+   * Deprecated.
+   * TODO(hlopko): remove once migration to reporter is finished.
+   */
+  V8_INLINE void RegisterExternalReference(Isolate* isolate) const {}
+
+  /**
    * Allows the embedder to tell the v8 garbage collector that a certain object
    * is alive. Only allowed when the embedder is asked to trace its heap by
    * EmbedderHeapTracer.
    */
-  V8_INLINE void RegisterExternalReference(Isolate* isolate) const;
+  V8_INLINE void RegisterExternalReference(
+      EmbedderReachableReferenceReporter* reporter) const;
 
   /**
    * Marks the reference to this object independent. Garbage collector is free
@@ -615,6 +634,9 @@
    */
   V8_INLINE uint16_t WrapperClassId() const;
 
+  PersistentBase(const PersistentBase& other) = delete;  // NOLINT
+  void operator=(const PersistentBase&) = delete;
+
  private:
   friend class Isolate;
   friend class Utils;
@@ -630,8 +652,6 @@
   friend class Object;
 
   explicit V8_INLINE PersistentBase(T* val) : val_(val) {}
-  PersistentBase(const PersistentBase& other) = delete;  // NOLINT
-  void operator=(const PersistentBase&) = delete;
   V8_INLINE static T* New(Isolate* isolate, T* that);
 
   T* val_;
@@ -835,11 +855,12 @@
    */
   typedef void MoveOnlyTypeForCPP03;
 
+  Global(const Global&) = delete;
+  void operator=(const Global&) = delete;
+
  private:
   template <class F>
   friend class ReturnValue;
-  Global(const Global&) = delete;
-  void operator=(const Global&) = delete;
   V8_INLINE T* operator*() const { return this->val_; }
 };
 
@@ -878,6 +899,11 @@
     return reinterpret_cast<Isolate*>(isolate_);
   }
 
+  HandleScope(const HandleScope&) = delete;
+  void operator=(const HandleScope&) = delete;
+  void* operator new(size_t size) = delete;
+  void operator delete(void*, size_t) = delete;
+
  protected:
   V8_INLINE HandleScope() {}
 
@@ -891,13 +917,6 @@
   static internal::Object** CreateHandle(internal::HeapObject* heap_object,
                                          internal::Object* value);
 
-  // Make it hard to create heap-allocated or illegal handle scopes by
-  // disallowing certain operations.
-  HandleScope(const HandleScope&);
-  void operator=(const HandleScope&);
-  void* operator new(size_t size);
-  void operator delete(void*, size_t);
-
   internal::Isolate* isolate_;
   internal::Object** prev_next_;
   internal::Object** prev_limit_;
@@ -932,16 +951,13 @@
     return Local<T>(reinterpret_cast<T*>(slot));
   }
 
+  EscapableHandleScope(const EscapableHandleScope&) = delete;
+  void operator=(const EscapableHandleScope&) = delete;
+  void* operator new(size_t size) = delete;
+  void operator delete(void*, size_t) = delete;
+
  private:
   internal::Object** Escape(internal::Object** escape_value);
-
-  // Make it hard to create heap-allocated or illegal handle scopes by
-  // disallowing certain operations.
-  EscapableHandleScope(const EscapableHandleScope&);
-  void operator=(const EscapableHandleScope&);
-  void* operator new(size_t size);
-  void operator delete(void*, size_t);
-
   internal::Object** escape_slot_;
 };
 
@@ -950,14 +966,12 @@
   SealHandleScope(Isolate* isolate);
   ~SealHandleScope();
 
- private:
-  // Make it hard to create heap-allocated or illegal handle scopes by
-  // disallowing certain operations.
-  SealHandleScope(const SealHandleScope&);
-  void operator=(const SealHandleScope&);
-  void* operator new(size_t size);
-  void operator delete(void*, size_t);
+  SealHandleScope(const SealHandleScope&) = delete;
+  void operator=(const SealHandleScope&) = delete;
+  void* operator new(size_t size) = delete;
+  void operator delete(void*, size_t) = delete;
 
+ private:
   internal::Isolate* const isolate_;
   internal::Object** prev_limit_;
   int prev_sealed_level_;
@@ -1073,6 +1087,47 @@
   static const int kNoScriptId = 0;
 };
 
+/**
+ * This is an unfinished experimental feature, and is only exposed
+ * here for internal testing purposes. DO NOT USE.
+ *
+ * A compiled JavaScript module.
+ */
+class V8_EXPORT Module {
+ public:
+  /**
+   * Returns the number of modules requested by this module.
+   */
+  int GetModuleRequestsLength() const;
+
+  /**
+   * Returns the ith module specifier in this module.
+   * i must be < GetModuleRequestsLength() and >= 0.
+   */
+  Local<String> GetModuleRequest(int i) const;
+
+  void SetEmbedderData(Local<Value> data);
+  Local<Value> GetEmbedderData() const;
+
+  typedef MaybeLocal<Module> (*ResolveCallback)(Local<Context> context,
+                                                Local<String> specifier,
+                                                Local<Module> referrer,
+                                                Local<Value> data);
+
+  /**
+   * ModuleDeclarationInstantiation
+   *
+   * Returns false if an exception occurred during instantiation.
+   */
+  V8_WARN_UNUSED_RESULT bool Instantiate(
+      Local<Context> context, ResolveCallback callback,
+      Local<Value> callback_data = Local<Value>());
+
+  /**
+   * ModuleEvaluation
+   */
+  V8_WARN_UNUSED_RESULT MaybeLocal<Value> Evaluate(Local<Context> context);
+};
 
 /**
  * A compiled JavaScript script, tied to a Context which was active when the
@@ -1148,10 +1203,9 @@
     bool rejected;
     BufferPolicy buffer_policy;
 
-   private:
-    // Prevent copying. Not implemented.
-    CachedData(const CachedData&);
-    CachedData& operator=(const CachedData&);
+    // Prevent copying.
+    CachedData(const CachedData&) = delete;
+    CachedData& operator=(const CachedData&) = delete;
   };
 
   /**
@@ -1171,11 +1225,12 @@
     // alive.
     V8_INLINE const CachedData* GetCachedData() const;
 
+    // Prevent copying.
+    Source(const Source&) = delete;
+    Source& operator=(const Source&) = delete;
+
    private:
     friend class ScriptCompiler;
-    // Prevent copying. Not implemented.
-    Source(const Source&);
-    Source& operator=(const Source&);
 
     Local<String> source_string;
 
@@ -1258,11 +1313,11 @@
 
     internal::StreamedSource* impl() const { return impl_; }
 
-   private:
-    // Prevent copying. Not implemented.
-    StreamedSource(const StreamedSource&);
-    StreamedSource& operator=(const StreamedSource&);
+    // Prevent copying.
+    StreamedSource(const StreamedSource&) = delete;
+    StreamedSource& operator=(const StreamedSource&) = delete;
 
+   private:
     internal::StreamedSource* impl_;
   };
 
@@ -1376,18 +1431,17 @@
   static uint32_t CachedDataVersionTag();
 
   /**
-   * Compile an ES6 module.
-   *
    * This is an unfinished experimental feature, and is only exposed
-   * here for internal testing purposes.
-   * Only parsing works at the moment. Do not use.
+   * here for internal testing purposes. DO NOT USE.
    *
-   * TODO(adamk): Script is likely the wrong return value for this;
-   * should return some new Module type.
+   * Compile an ES module, returning a Module that encapsulates
+   * the compiled code.
+   *
+   * Corresponds to the ParseModule abstract operation in the
+   * ECMAScript specification.
    */
-  static V8_WARN_UNUSED_RESULT MaybeLocal<Script> CompileModule(
-      Local<Context> context, Source* source,
-      CompileOptions options = kNoCompileOptions);
+  static V8_WARN_UNUSED_RESULT MaybeLocal<Module> CompileModule(
+      Isolate* isolate, Source* source);
 
   /**
    * Compile a function for a given context. This is equivalent to running
@@ -1664,6 +1718,174 @@
       Local<String> gap = Local<String>());
 };
 
+/**
+ * Value serialization compatible with the HTML structured clone algorithm.
+ * The format is backward-compatible (i.e. safe to store to disk).
+ *
+ * WARNING: This API is under development, and changes (including incompatible
+ * changes to the API or wire format) may occur without notice until this
+ * warning is removed.
+ */
+class V8_EXPORT ValueSerializer {
+ public:
+  class V8_EXPORT Delegate {
+   public:
+    virtual ~Delegate() {}
+
+    /*
+     * Handles the case where a DataCloneError would be thrown in the structured
+     * clone spec. Other V8 embedders may throw some other appropriate exception
+     * type.
+     */
+    virtual void ThrowDataCloneError(Local<String> message) = 0;
+
+    /*
+     * The embedder overrides this method to write some kind of host object, if
+     * possible. If not, a suitable exception should be thrown and
+     * Nothing<bool>() returned.
+     */
+    virtual Maybe<bool> WriteHostObject(Isolate* isolate, Local<Object> object);
+  };
+
+  explicit ValueSerializer(Isolate* isolate);
+  ValueSerializer(Isolate* isolate, Delegate* delegate);
+  ~ValueSerializer();
+
+  /*
+   * Writes out a header, which includes the format version.
+   */
+  void WriteHeader();
+
+  /*
+   * Serializes a JavaScript value into the buffer.
+   */
+  V8_WARN_UNUSED_RESULT Maybe<bool> WriteValue(Local<Context> context,
+                                               Local<Value> value);
+
+  /*
+   * Returns the stored data. This serializer should not be used once the buffer
+   * is released. The contents are undefined if a previous write has failed.
+   */
+  std::vector<uint8_t> ReleaseBuffer();
+
+  /*
+   * Marks an ArrayBuffer as havings its contents transferred out of band.
+   * Pass the corresponding JSArrayBuffer in the deserializing context to
+   * ValueDeserializer::TransferArrayBuffer.
+   */
+  void TransferArrayBuffer(uint32_t transfer_id,
+                           Local<ArrayBuffer> array_buffer);
+
+  /*
+   * Similar to TransferArrayBuffer, but for SharedArrayBuffer.
+   */
+  void TransferSharedArrayBuffer(uint32_t transfer_id,
+                                 Local<SharedArrayBuffer> shared_array_buffer);
+
+  /*
+   * Write raw data in various common formats to the buffer.
+   * Note that integer types are written in base-128 varint format, not with a
+   * binary copy. For use during an override of Delegate::WriteHostObject.
+   */
+  void WriteUint32(uint32_t value);
+  void WriteUint64(uint64_t value);
+  void WriteDouble(double value);
+  void WriteRawBytes(const void* source, size_t length);
+
+ private:
+  ValueSerializer(const ValueSerializer&) = delete;
+  void operator=(const ValueSerializer&) = delete;
+
+  struct PrivateData;
+  PrivateData* private_;
+};
+
+/**
+ * Deserializes values from data written with ValueSerializer, or a compatible
+ * implementation.
+ *
+ * WARNING: This API is under development, and changes (including incompatible
+ * changes to the API or wire format) may occur without notice until this
+ * warning is removed.
+ */
+class V8_EXPORT ValueDeserializer {
+ public:
+  class V8_EXPORT Delegate {
+   public:
+    virtual ~Delegate() {}
+
+    /*
+     * The embedder overrides this method to read some kind of host object, if
+     * possible. If not, a suitable exception should be thrown and
+     * MaybeLocal<Object>() returned.
+     */
+    virtual MaybeLocal<Object> ReadHostObject(Isolate* isolate);
+  };
+
+  ValueDeserializer(Isolate* isolate, const uint8_t* data, size_t size);
+  ValueDeserializer(Isolate* isolate, const uint8_t* data, size_t size,
+                    Delegate* delegate);
+  ~ValueDeserializer();
+
+  /*
+   * Reads and validates a header (including the format version).
+   * May, for example, reject an invalid or unsupported wire format.
+   */
+  V8_WARN_UNUSED_RESULT Maybe<bool> ReadHeader(Local<Context> context);
+  V8_DEPRECATE_SOON("Use Local<Context> version", Maybe<bool> ReadHeader());
+
+  /*
+   * Deserializes a JavaScript value from the buffer.
+   */
+  V8_WARN_UNUSED_RESULT MaybeLocal<Value> ReadValue(Local<Context> context);
+
+  /*
+   * Accepts the array buffer corresponding to the one passed previously to
+   * ValueSerializer::TransferArrayBuffer.
+   */
+  void TransferArrayBuffer(uint32_t transfer_id,
+                           Local<ArrayBuffer> array_buffer);
+
+  /*
+   * Similar to TransferArrayBuffer, but for SharedArrayBuffer.
+   * transfer_id exists in the same namespace as unshared ArrayBuffer objects.
+   */
+  void TransferSharedArrayBuffer(uint32_t transfer_id,
+                                 Local<SharedArrayBuffer> shared_array_buffer);
+
+  /*
+   * Must be called before ReadHeader to enable support for reading the legacy
+   * wire format (i.e., which predates this being shipped).
+   *
+   * Don't use this unless you need to read data written by previous versions of
+   * blink::ScriptValueSerializer.
+   */
+  void SetSupportsLegacyWireFormat(bool supports_legacy_wire_format);
+
+  /*
+   * Reads the underlying wire format version. Likely mostly to be useful to
+   * legacy code reading old wire format versions. Must be called after
+   * ReadHeader.
+   */
+  uint32_t GetWireFormatVersion() const;
+
+  /*
+   * Reads raw data in various common formats to the buffer.
+   * Note that integer types are read in base-128 varint format, not with a
+   * binary copy. For use during an override of Delegate::ReadHostObject.
+   */
+  V8_WARN_UNUSED_RESULT bool ReadUint32(uint32_t* value);
+  V8_WARN_UNUSED_RESULT bool ReadUint64(uint64_t* value);
+  V8_WARN_UNUSED_RESULT bool ReadDouble(double* value);
+  V8_WARN_UNUSED_RESULT bool ReadRawBytes(size_t length, const void** data);
+
+ private:
+  ValueDeserializer(const ValueDeserializer&) = delete;
+  void operator=(const ValueDeserializer&) = delete;
+
+  struct PrivateData;
+  PrivateData* private_;
+};
 
 /**
  * A map whose keys are referenced weakly. It is similar to JavaScript WeakMap
@@ -1811,6 +2033,11 @@
   bool IsRegExp() const;
 
   /**
+   * Returns true if this value is an async function.
+   */
+  bool IsAsyncFunction() const;
+
+  /**
    * Returns true if this value is a Generator function.
    * This is an experimental feature.
    */
@@ -2207,11 +2434,11 @@
      */
     virtual void Dispose() { delete this; }
 
-   private:
     // Disallow copying and assigning.
-    ExternalStringResourceBase(const ExternalStringResourceBase&);
-    void operator=(const ExternalStringResourceBase&);
+    ExternalStringResourceBase(const ExternalStringResourceBase&) = delete;
+    void operator=(const ExternalStringResourceBase&) = delete;
 
+   private:
     friend class v8::internal::Heap;
   };
 
@@ -2413,13 +2640,14 @@
     char* operator*() { return str_; }
     const char* operator*() const { return str_; }
     int length() const { return length_; }
+
+    // Disallow copying and assigning.
+    Utf8Value(const Utf8Value&) = delete;
+    void operator=(const Utf8Value&) = delete;
+
    private:
     char* str_;
     int length_;
-
-    // Disallow copying and assigning.
-    Utf8Value(const Utf8Value&);
-    void operator=(const Utf8Value&);
   };
 
   /**
@@ -2435,13 +2663,14 @@
     uint16_t* operator*() { return str_; }
     const uint16_t* operator*() const { return str_; }
     int length() const { return length_; }
+
+    // Disallow copying and assigning.
+    Value(const Value&) = delete;
+    void operator=(const Value&) = delete;
+
    private:
     uint16_t* str_;
     int length_;
-
-    // Disallow copying and assigning.
-    Value(const Value&);
-    void operator=(const Value&);
   };
 
  private:
@@ -2575,11 +2804,17 @@
   static void CheckCast(v8::Value* obj);
 };
 
-
+/**
+ * PropertyAttribute.
+ */
 enum PropertyAttribute {
-  None       = 0,
-  ReadOnly   = 1 << 0,
-  DontEnum   = 1 << 1,
+  /** None. **/
+  None = 0,
+  /** ReadOnly, i.e., not writable. **/
+  ReadOnly = 1 << 0,
+  /** DontEnum, i.e., not enumerable. **/
+  DontEnum = 1 << 1,
+  /** DontDelete, i.e., not configurable. **/
   DontDelete = 1 << 2
 };
 
@@ -2693,6 +2928,22 @@
       Local<Context> context, Local<Name> key, Local<Value> value,
       PropertyAttribute attributes = None);
 
+  // Implements Object.DefineProperty(O, P, Attributes), see Ecma-262 19.1.2.4.
+  //
+  // The defineProperty function is used to add an own property or
+  // update the attributes of an existing own property of an object.
+  //
+  // Both data and accessor descriptors can be used.
+  //
+  // In general, CreateDataProperty is faster, however, does not allow
+  // for specifying attributes or an accessor descriptor.
+  //
+  // The PropertyDescriptor can change when redefining a property.
+  //
+  // Returns true on success.
+  V8_WARN_UNUSED_RESULT Maybe<bool> DefineProperty(
+      Local<Context> context, Local<Name> key, PropertyDescriptor& descriptor);
+
   // Sets an own property on this object bypassing interceptors and
   // overriding accessors or read-only properties.
   //
@@ -2736,6 +2987,21 @@
       Local<Context> context, Local<String> key);
 
   V8_DEPRECATE_SOON("Use maybe version", bool Has(Local<Value> key));
+  /**
+   * Object::Has() calls the abstract operation HasProperty(O, P) described
+   * in ECMA-262, 7.3.10. Has() returns
+   * true, if the object has the property, either own or on the prototype chain.
+   * Interceptors, i.e., PropertyQueryCallbacks, are called if present.
+   *
+   * Has() has the same side effects as JavaScript's `variable in object`.
+   * For example, calling Has() on a revoked proxy will throw an exception.
+   *
+   * \note Has() converts the key to a name, which possibly calls back into
+   * JavaScript.
+   *
+   * See also v8::Object::HasOwnProperty() and
+   * v8::Object::HasRealNamedProperty().
+   */
   V8_WARN_UNUSED_RESULT Maybe<bool> Has(Local<Context> context,
                                         Local<Value> key);
 
@@ -2900,12 +3166,31 @@
 
   // Testers for local properties.
   V8_DEPRECATED("Use maybe version", bool HasOwnProperty(Local<String> key));
+
+  /**
+   * HasOwnProperty() is like JavaScript's Object.prototype.hasOwnProperty().
+   *
+   * See also v8::Object::Has() and v8::Object::HasRealNamedProperty().
+   */
   V8_WARN_UNUSED_RESULT Maybe<bool> HasOwnProperty(Local<Context> context,
                                                    Local<Name> key);
   V8_WARN_UNUSED_RESULT Maybe<bool> HasOwnProperty(Local<Context> context,
                                                    uint32_t index);
   V8_DEPRECATE_SOON("Use maybe version",
                     bool HasRealNamedProperty(Local<String> key));
+  /**
+   * Use HasRealNamedProperty() if you want to check if an object has an own
+   * property without causing side effects, i.e., without calling interceptors.
+   *
+   * This function is similar to v8::Object::HasOwnProperty(), but it does not
+   * call interceptors.
+   *
+   * \note Consider using non-masking interceptors, i.e., the interceptors are
+   * not called if the receiver has the real named property. See
+   * `v8::PropertyHandlerFlags::kNonMasking`.
+   *
+   * See also v8::Object::Has().
+   */
   V8_WARN_UNUSED_RESULT Maybe<bool> HasRealNamedProperty(Local<Context> context,
                                                          Local<Name> key);
   V8_DEPRECATE_SOON("Use maybe version",
@@ -2988,6 +3273,12 @@
    */
   Local<Context> CreationContext();
 
+  /** Same as above, but works for Persistents */
+  V8_INLINE static Local<Context> CreationContext(
+      const PersistentBase<Object>& object) {
+    return object.val_->CreationContext();
+  }
+
   /**
    * Checks whether a callback is set by the
    * ObjectTemplate::SetCallAsFunctionHandler method.
@@ -3236,12 +3527,91 @@
 template<typename T>
 class PropertyCallbackInfo {
  public:
+  /**
+   * \return The isolate of the property access.
+   */
   V8_INLINE Isolate* GetIsolate() const;
+
+  /**
+   * \return The data set in the configuration, i.e., in
+   * `NamedPropertyHandlerConfiguration` or
+   * `IndexedPropertyHandlerConfiguration.`
+   */
   V8_INLINE Local<Value> Data() const;
+
+  /**
+   * \return The receiver. In many cases, this is the object on which the
+   * property access was intercepted. When using
+   * `Reflect.Get`, `Function.prototype.call`, or similar functions, it is the
+   * object passed in as receiver or thisArg.
+   *
+   * \code
+   *  void GetterCallback(Local<Name> name,
+   *                      const v8::PropertyCallbackInfo<v8::Value>& info) {
+   *     auto context = info.GetIsolate()->GetCurrentContext();
+   *
+   *     v8::Local<v8::Value> a_this =
+   *         info.This()
+   *             ->GetRealNamedProperty(context, v8_str("a"))
+   *             .ToLocalChecked();
+   *     v8::Local<v8::Value> a_holder =
+   *         info.Holder()
+   *             ->GetRealNamedProperty(context, v8_str("a"))
+   *             .ToLocalChecked();
+   *
+   *    CHECK(v8_str("r")->Equals(context, a_this).FromJust());
+   *    CHECK(v8_str("obj")->Equals(context, a_holder).FromJust());
+   *
+   *    info.GetReturnValue().Set(name);
+   *  }
+   *
+   *  v8::Local<v8::FunctionTemplate> templ =
+   *  v8::FunctionTemplate::New(isolate);
+   *  templ->InstanceTemplate()->SetHandler(
+   *      v8::NamedPropertyHandlerConfiguration(GetterCallback));
+   *  LocalContext env;
+   *  env->Global()
+   *      ->Set(env.local(), v8_str("obj"), templ->GetFunction(env.local())
+   *                                           .ToLocalChecked()
+   *                                           ->NewInstance(env.local())
+   *                                           .ToLocalChecked())
+   *      .FromJust();
+   *
+   *  CompileRun("obj.a = 'obj'; var r = {a: 'r'}; Reflect.get(obj, 'x', r)");
+   * \endcode
+   */
   V8_INLINE Local<Object> This() const;
+
+  /**
+   * \return The object in the prototype chain of the receiver that has the
+   * interceptor. Suppose you have `x` and its prototype is `y`, and `y`
+   * has an interceptor. Then `info.This()` is `x` and `info.Holder()` is `y`.
+   * The Holder() could be a hidden object (the global object, rather
+   * than the global proxy).
+   *
+   * \note For security reasons, do not pass the object back into the runtime.
+   */
   V8_INLINE Local<Object> Holder() const;
+
+  /**
+   * \return The return value of the callback.
+   * Can be changed by calling Set().
+   * \code
+   * info.GetReturnValue().Set(...)
+   * \endcode
+   *
+   */
   V8_INLINE ReturnValue<T> GetReturnValue() const;
+
+  /**
+   * \return True if the intercepted function should throw if an error occurs.
+   * Usually, `true` corresponds to `'use strict'`.
+   *
+   * \note Always `false` when intercepting `Reflect.Set()`
+   * independent of the language mode.
+   */
   V8_INLINE bool ShouldThrowOnError() const;
+
   // This shouldn't be public, but the arm compiler needs it.
   static const int kArgsLength = 7;
 
@@ -3431,6 +3801,78 @@
   static void CheckCast(Value* obj);
 };
 
+/**
+ * An instance of a Property Descriptor, see Ecma-262 6.2.4.
+ *
+ * Properties in a descriptor are present or absent. If you do not set
+ * `enumerable`, `configurable`, and `writable`, they are absent. If `value`,
+ * `get`, or `set` are absent, but you must specify them in the constructor, use
+ * empty handles.
+ *
+ * Accessors `get` and `set` must be callable or undefined if they are present.
+ *
+ * \note Only query properties if they are present, i.e., call `x()` only if
+ * `has_x()` returns true.
+ *
+ * \code
+ * // var desc = {writable: false}
+ * v8::PropertyDescriptor d(Local<Value>()), false);
+ * d.value(); // error, value not set
+ * if (d.has_writable()) {
+ *   d.writable(); // false
+ * }
+ *
+ * // var desc = {value: undefined}
+ * v8::PropertyDescriptor d(v8::Undefined(isolate));
+ *
+ * // var desc = {get: undefined}
+ * v8::PropertyDescriptor d(v8::Undefined(isolate), Local<Value>()));
+ * \endcode
+ */
+class V8_EXPORT PropertyDescriptor {
+ public:
+  // GenericDescriptor
+  PropertyDescriptor();
+
+  // DataDescriptor
+  PropertyDescriptor(Local<Value> value);
+
+  // DataDescriptor with writable property
+  PropertyDescriptor(Local<Value> value, bool writable);
+
+  // AccessorDescriptor
+  PropertyDescriptor(Local<Value> get, Local<Value> set);
+
+  ~PropertyDescriptor();
+
+  Local<Value> value() const;
+  bool has_value() const;
+
+  Local<Value> get() const;
+  bool has_get() const;
+  Local<Value> set() const;
+  bool has_set() const;
+
+  void set_enumerable(bool enumerable);
+  bool enumerable() const;
+  bool has_enumerable() const;
+
+  void set_configurable(bool configurable);
+  bool configurable() const;
+  bool has_configurable() const;
+
+  bool writable() const;
+  bool has_writable() const;
+
+  struct PrivateData;
+  PrivateData* get_private() const { return private_; }
+
+  PropertyDescriptor(const PropertyDescriptor&) = delete;
+  void operator=(const PropertyDescriptor&) = delete;
+
+ private:
+  PrivateData* private_;
+};
 
 /**
  * An instance of the built-in Proxy constructor (ECMA-262, 6th Edition,
@@ -3444,7 +3886,7 @@
   void Revoke();
 
   /**
-   * Creates a new empty Map.
+   * Creates a new Proxy for the target object.
    */
   static MaybeLocal<Proxy> New(Local<Context> context,
                                Local<Object> local_target,
@@ -4296,36 +4738,115 @@
 
 // TODO(dcarney): Deprecate and remove previous typedefs, and replace
 // GenericNamedPropertyFooCallback with just NamedPropertyFooCallback.
+
 /**
- * GenericNamedProperty[Getter|Setter] are used as interceptors on object.
- * See ObjectTemplate::SetNamedPropertyHandler.
+ * Interceptor for get requests on an object.
+ *
+ * Use `info.GetReturnValue().Set()` to set the return value of the
+ * intercepted get request.
+ *
+ * \param property The name of the property for which the request was
+ * intercepted.
+ * \param info Information about the intercepted request, such as
+ * isolate, receiver, return value, or whether running in `'use strict`' mode.
+ * See `PropertyCallbackInfo`.
+ *
+ * \code
+ *  void GetterCallback(
+ *    Local<Name> name,
+ *    const v8::PropertyCallbackInfo<v8::Value>& info) {
+ *      info.GetReturnValue().Set(v8_num(42));
+ *  }
+ *
+ *  v8::Local<v8::FunctionTemplate> templ =
+ *      v8::FunctionTemplate::New(isolate);
+ *  templ->InstanceTemplate()->SetHandler(
+ *      v8::NamedPropertyHandlerConfiguration(GetterCallback));
+ *  LocalContext env;
+ *  env->Global()
+ *      ->Set(env.local(), v8_str("obj"), templ->GetFunction(env.local())
+ *                                             .ToLocalChecked()
+ *                                             ->NewInstance(env.local())
+ *                                             .ToLocalChecked())
+ *      .FromJust();
+ *  v8::Local<v8::Value> result = CompileRun("obj.a = 17; obj.a");
+ *  CHECK(v8_num(42)->Equals(env.local(), result).FromJust());
+ * \endcode
+ *
+ * See also `ObjectTemplate::SetHandler`.
  */
 typedef void (*GenericNamedPropertyGetterCallback)(
     Local<Name> property, const PropertyCallbackInfo<Value>& info);
 
-
 /**
- * Returns the value if the setter intercepts the request.
- * Otherwise, returns an empty handle.
+ * Interceptor for set requests on an object.
+ *
+ * Use `info.GetReturnValue()` to indicate whether the request was intercepted
+ * or not. If the setter successfully intercepts the request, i.e., if the
+ * request should not be further executed, call
+ * `info.GetReturnValue().Set(value)`. If the setter
+ * did not intercept the request, i.e., if the request should be handled as
+ * if no interceptor is present, do not not call `Set()`.
+ *
+ * \param property The name of the property for which the request was
+ * intercepted.
+ * \param value The value which the property will have if the request
+ * is not intercepted.
+ * \param info Information about the intercepted request, such as
+ * isolate, receiver, return value, or whether running in `'use strict'` mode.
+ * See `PropertyCallbackInfo`.
+ *
+ * See also
+ * `ObjectTemplate::SetHandler.`
  */
 typedef void (*GenericNamedPropertySetterCallback)(
     Local<Name> property, Local<Value> value,
     const PropertyCallbackInfo<Value>& info);
 
-
 /**
- * Returns a non-empty handle if the interceptor intercepts the request.
- * The result is an integer encoding property attributes (like v8::None,
- * v8::DontEnum, etc.)
+ * Intercepts all requests that query the attributes of the
+ * property, e.g., getOwnPropertyDescriptor(), propertyIsEnumerable(), and
+ * defineProperty().
+ *
+ * Use `info.GetReturnValue().Set(value)` to set the property attributes. The
+ * value is an interger encoding a `v8::PropertyAttribute`.
+ *
+ * \param property The name of the property for which the request was
+ * intercepted.
+ * \param info Information about the intercepted request, such as
+ * isolate, receiver, return value, or whether running in `'use strict'` mode.
+ * See `PropertyCallbackInfo`.
+ *
+ * \note Some functions query the property attributes internally, even though
+ * they do not return the attributes. For example, `hasOwnProperty()` can
+ * trigger this interceptor depending on the state of the object.
+ *
+ * See also
+ * `ObjectTemplate::SetHandler.`
  */
 typedef void (*GenericNamedPropertyQueryCallback)(
     Local<Name> property, const PropertyCallbackInfo<Integer>& info);
 
-
 /**
- * Returns a non-empty handle if the deleter intercepts the request.
- * The return value is true if the property could be deleted and false
- * otherwise.
+ * Interceptor for delete requests on an object.
+ *
+ * Use `info.GetReturnValue()` to indicate whether the request was intercepted
+ * or not. If the deleter successfully intercepts the request, i.e., if the
+ * request should not be further executed, call
+ * `info.GetReturnValue().Set(value)` with a boolean `value`. The `value` is
+ * used as the return value of `delete`.
+ *
+ * \param property The name of the property for which the request was
+ * intercepted.
+ * \param info Information about the intercepted request, such as
+ * isolate, receiver, return value, or whether running in `'use strict'` mode.
+ * See `PropertyCallbackInfo`.
+ *
+ * \note If you need to mimic the behavior of `delete`, i.e., throw in strict
+ * mode instead of returning false, use `info.ShouldThrowOnError()` to determine
+ * if you are in strict mode.
+ *
+ * See also `ObjectTemplate::SetHandler.`
  */
 typedef void (*GenericNamedPropertyDeleterCallback)(
     Local<Name> property, const PropertyCallbackInfo<Boolean>& info);
@@ -4338,52 +4859,99 @@
 typedef void (*GenericNamedPropertyEnumeratorCallback)(
     const PropertyCallbackInfo<Array>& info);
 
+/**
+ * Interceptor for defineProperty requests on an object.
+ *
+ * Use `info.GetReturnValue()` to indicate whether the request was intercepted
+ * or not. If the definer successfully intercepts the request, i.e., if the
+ * request should not be further executed, call
+ * `info.GetReturnValue().Set(value)`. If the definer
+ * did not intercept the request, i.e., if the request should be handled as
+ * if no interceptor is present, do not not call `Set()`.
+ *
+ * \param property The name of the property for which the request was
+ * intercepted.
+ * \param desc The property descriptor which is used to define the
+ * property if the request is not intercepted.
+ * \param info Information about the intercepted request, such as
+ * isolate, receiver, return value, or whether running in `'use strict'` mode.
+ * See `PropertyCallbackInfo`.
+ *
+ * See also `ObjectTemplate::SetHandler`.
+ */
+typedef void (*GenericNamedPropertyDefinerCallback)(
+    Local<Name> property, const PropertyDescriptor& desc,
+    const PropertyCallbackInfo<Value>& info);
 
 /**
- * Returns the value of the property if the getter intercepts the
- * request.  Otherwise, returns an empty handle.
+ * Interceptor for getOwnPropertyDescriptor requests on an object.
+ *
+ * Use `info.GetReturnValue().Set()` to set the return value of the
+ * intercepted request. The return value must be an object that
+ * can be converted to a PropertyDescriptor, e.g., a `v8::value` returned from
+ * `v8::Object::getOwnPropertyDescriptor`.
+ *
+ * \param property The name of the property for which the request was
+ * intercepted.
+ * \info Information about the intercepted request, such as
+ * isolate, receiver, return value, or whether running in `'use strict'` mode.
+ * See `PropertyCallbackInfo`.
+ *
+ * \note If GetOwnPropertyDescriptor is intercepted, it will
+ * always return true, i.e., indicate that the property was found.
+ *
+ * See also `ObjectTemplate::SetHandler`.
+ */
+typedef void (*GenericNamedPropertyDescriptorCallback)(
+    Local<Name> property, const PropertyCallbackInfo<Value>& info);
+
+/**
+ * See `v8::GenericNamedPropertyGetterCallback`.
  */
 typedef void (*IndexedPropertyGetterCallback)(
     uint32_t index,
     const PropertyCallbackInfo<Value>& info);
 
-
 /**
- * Returns the value if the setter intercepts the request.
- * Otherwise, returns an empty handle.
+ * See `v8::GenericNamedPropertySetterCallback`.
  */
 typedef void (*IndexedPropertySetterCallback)(
     uint32_t index,
     Local<Value> value,
     const PropertyCallbackInfo<Value>& info);
 
-
 /**
- * Returns a non-empty handle if the interceptor intercepts the request.
- * The result is an integer encoding property attributes.
+ * See `v8::GenericNamedPropertyQueryCallback`.
  */
 typedef void (*IndexedPropertyQueryCallback)(
     uint32_t index,
     const PropertyCallbackInfo<Integer>& info);
 
-
 /**
- * Returns a non-empty handle if the deleter intercepts the request.
- * The return value is true if the property could be deleted and false
- * otherwise.
+ * See `v8::GenericNamedPropertyDeleterCallback`.
  */
 typedef void (*IndexedPropertyDeleterCallback)(
     uint32_t index,
     const PropertyCallbackInfo<Boolean>& info);
 
-
 /**
- * Returns an array containing the indices of the properties the
- * indexed property getter intercepts.
+ * See `v8::GenericNamedPropertyEnumeratorCallback`.
  */
 typedef void (*IndexedPropertyEnumeratorCallback)(
     const PropertyCallbackInfo<Array>& info);
 
+/**
+ * See `v8::GenericNamedPropertyDefinerCallback`.
+ */
+typedef void (*IndexedPropertyDefinerCallback)(
+    uint32_t index, const PropertyDescriptor& desc,
+    const PropertyCallbackInfo<Value>& info);
+
+/**
+ * See `v8::GenericNamedPropertyDescriptorCallback`.
+ */
+typedef void (*IndexedPropertyDescriptorCallback)(
+    uint32_t index, const PropertyCallbackInfo<Value>& info);
 
 /**
  * Access type specification.
@@ -4617,23 +5185,37 @@
   friend class ObjectTemplate;
 };
 
-
+/**
+ * Configuration flags for v8::NamedPropertyHandlerConfiguration or
+ * v8::IndexedPropertyHandlerConfiguration.
+ */
 enum class PropertyHandlerFlags {
+  /**
+   * None.
+   */
   kNone = 0,
-  // See ALL_CAN_READ above.
+
+  /**
+   * See ALL_CAN_READ above.
+   */
   kAllCanRead = 1,
-  // Will not call into interceptor for properties on the receiver or prototype
-  // chain.  Currently only valid for named interceptors.
+
+  /** Will not call into interceptor for properties on the receiver or prototype
+   * chain, i.e., only call into interceptor for properties that do not exist.
+   * Currently only valid for named interceptors.
+   */
   kNonMasking = 1 << 1,
-  // Will not call into interceptor for symbol lookup.  Only meaningful for
-  // named interceptors.
+
+  /**
+   * Will not call into interceptor for symbol lookup.  Only meaningful for
+   * named interceptors.
+   */
   kOnlyInterceptStrings = 1 << 2,
 };
 
-
 struct NamedPropertyHandlerConfiguration {
   NamedPropertyHandlerConfiguration(
-      /** Note: getter is required **/
+      /** Note: getter is required */
       GenericNamedPropertyGetterCallback getter = 0,
       GenericNamedPropertySetterCallback setter = 0,
       GenericNamedPropertyQueryCallback query = 0,
@@ -4646,6 +5228,27 @@
         query(query),
         deleter(deleter),
         enumerator(enumerator),
+        definer(0),
+        descriptor(0),
+        data(data),
+        flags(flags) {}
+
+  NamedPropertyHandlerConfiguration(
+      GenericNamedPropertyGetterCallback getter,
+      GenericNamedPropertySetterCallback setter,
+      GenericNamedPropertyDescriptorCallback descriptor,
+      GenericNamedPropertyDeleterCallback deleter,
+      GenericNamedPropertyEnumeratorCallback enumerator,
+      GenericNamedPropertyDefinerCallback definer,
+      Local<Value> data = Local<Value>(),
+      PropertyHandlerFlags flags = PropertyHandlerFlags::kNone)
+      : getter(getter),
+        setter(setter),
+        query(0),
+        deleter(deleter),
+        enumerator(enumerator),
+        definer(definer),
+        descriptor(descriptor),
         data(data),
         flags(flags) {}
 
@@ -4654,6 +5257,8 @@
   GenericNamedPropertyQueryCallback query;
   GenericNamedPropertyDeleterCallback deleter;
   GenericNamedPropertyEnumeratorCallback enumerator;
+  GenericNamedPropertyDefinerCallback definer;
+  GenericNamedPropertyDescriptorCallback descriptor;
   Local<Value> data;
   PropertyHandlerFlags flags;
 };
@@ -4661,7 +5266,7 @@
 
 struct IndexedPropertyHandlerConfiguration {
   IndexedPropertyHandlerConfiguration(
-      /** Note: getter is required **/
+      /** Note: getter is required */
       IndexedPropertyGetterCallback getter = 0,
       IndexedPropertySetterCallback setter = 0,
       IndexedPropertyQueryCallback query = 0,
@@ -4674,6 +5279,27 @@
         query(query),
         deleter(deleter),
         enumerator(enumerator),
+        definer(0),
+        descriptor(0),
+        data(data),
+        flags(flags) {}
+
+  IndexedPropertyHandlerConfiguration(
+      IndexedPropertyGetterCallback getter,
+      IndexedPropertySetterCallback setter,
+      IndexedPropertyDescriptorCallback descriptor,
+      IndexedPropertyDeleterCallback deleter,
+      IndexedPropertyEnumeratorCallback enumerator,
+      IndexedPropertyDefinerCallback definer,
+      Local<Value> data = Local<Value>(),
+      PropertyHandlerFlags flags = PropertyHandlerFlags::kNone)
+      : getter(getter),
+        setter(setter),
+        query(0),
+        deleter(deleter),
+        enumerator(enumerator),
+        definer(definer),
+        descriptor(descriptor),
         data(data),
         flags(flags) {}
 
@@ -4682,6 +5308,8 @@
   IndexedPropertyQueryCallback query;
   IndexedPropertyDeleterCallback deleter;
   IndexedPropertyEnumeratorCallback enumerator;
+  IndexedPropertyDefinerCallback definer;
+  IndexedPropertyDescriptorCallback descriptor;
   Local<Value> data;
   PropertyHandlerFlags flags;
 };
@@ -4977,6 +5605,10 @@
   void set_auto_enable(bool value) { auto_enable_ = value; }
   bool auto_enable() { return auto_enable_; }
 
+  // Disallow copying and assigning.
+  Extension(const Extension&) = delete;
+  void operator=(const Extension&) = delete;
+
  private:
   const char* name_;
   size_t source_length_;  // expected to initialize before source_
@@ -4984,10 +5616,6 @@
   int dep_count_;
   const char** deps_;
   bool auto_enable_;
-
-  // Disallow copying and assigning.
-  Extension(const Extension&);
-  void operator=(const Extension&);
 };
 
 
@@ -5213,13 +5841,13 @@
    */
   static bool IsRunningMicrotasks(Isolate* isolate);
 
+  // Prevent copying.
+  MicrotasksScope(const MicrotasksScope&) = delete;
+  MicrotasksScope& operator=(const MicrotasksScope&) = delete;
+
  private:
   internal::Isolate* const isolate_;
   bool run_;
-
-  // Prevent copying.
-  MicrotasksScope(const MicrotasksScope&);
-  MicrotasksScope& operator=(const MicrotasksScope&);
 };
 
 
@@ -5520,8 +6148,8 @@
  * Interface for tracing through the embedder heap. During the v8 garbage
  * collection, v8 collects hidden fields of all potential wrappers, and at the
  * end of its marking phase iterates the collection and asks the embedder to
- * trace through its heap and call PersistentBase::RegisterExternalReference on
- * each js object reachable from any of the given wrappers.
+ * trace through its heap and use reporter to report each js object reachable
+ * from any of the given wrappers.
  *
  * Before the first call to the TraceWrappersFrom function TracePrologue will be
  * called. When the garbage collection cycle is finished, TraceEpilogue will be
@@ -5530,36 +6158,49 @@
 class V8_EXPORT EmbedderHeapTracer {
  public:
   enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION };
+
   struct AdvanceTracingActions {
     explicit AdvanceTracingActions(ForceCompletionAction force_completion_)
         : force_completion(force_completion_) {}
 
     ForceCompletionAction force_completion;
   };
+
   /**
-   * V8 will call this method with internal fields of found wrappers.
-   * Embedder is expected to store them in it's marking deque and trace
-   * reachable wrappers from them when asked by AdvanceTracing method.
+   * V8 will call this method with internal fields of found wrappers. The
+   * embedder is expected to store them in its marking deque and trace
+   * reachable wrappers from them when called through |AdvanceTracing|.
    */
   virtual void RegisterV8References(
       const std::vector<std::pair<void*, void*> >& internal_fields) = 0;
+
   /**
-   * V8 will call this method at the beginning of the gc cycle.
+   * Deprecated.
+   * TODO(hlopko) Remove once the migration to reporter is finished.
    */
-  virtual void TracePrologue() = 0;
+  virtual void TracePrologue() {}
+
+  /**
+   * V8 will call this method at the beginning of a GC cycle. Embedder is
+   * expected to use EmbedderReachableReferenceReporter for reporting all
+   * reachable v8 objects.
+   */
+  virtual void TracePrologue(EmbedderReachableReferenceReporter* reporter) {}
+
   /**
    * Embedder is expected to trace its heap starting from wrappers reported by
-   * RegisterV8References method, and call
-   * PersistentBase::RegisterExternalReference() on all reachable wrappers.
+   * RegisterV8References method, and use reporter for all reachable wrappers.
    * Embedder is expected to stop tracing by the given deadline.
    *
    * Returns true if there is still work to do.
    */
   virtual bool AdvanceTracing(double deadline_in_ms,
                               AdvanceTracingActions actions) = 0;
+
   /**
-   * V8 will call this method at the end of the gc cycle. Allocation is *not*
-   * allowed in the TraceEpilogue.
+   * V8 will call this method at the end of a GC cycle.
+   *
+   * Note that allocation is *not* allowed within |TraceEpilogue|.
    */
   virtual void TraceEpilogue() = 0;
 
@@ -5574,6 +6215,11 @@
    */
   virtual void AbortTracing() {}
 
+  /**
+   * Returns the number of wrappers that are still to be traced by the embedder.
+   */
+  virtual size_t NumberOfWrappersToTrace() { return 0; }
+
  protected:
   virtual ~EmbedderHeapTracer() = default;
 };
@@ -5671,12 +6317,12 @@
 
     ~Scope() { isolate_->Exit(); }
 
+    // Prevent copying of Scope objects.
+    Scope(const Scope&) = delete;
+    Scope& operator=(const Scope&) = delete;
+
    private:
     Isolate* const isolate_;
-
-    // Prevent copying of Scope objects.
-    Scope(const Scope&);
-    Scope& operator=(const Scope&);
   };
 
 
@@ -5690,14 +6336,15 @@
     DisallowJavascriptExecutionScope(Isolate* isolate, OnFailure on_failure);
     ~DisallowJavascriptExecutionScope();
 
+    // Prevent copying of Scope objects.
+    DisallowJavascriptExecutionScope(const DisallowJavascriptExecutionScope&) =
+        delete;
+    DisallowJavascriptExecutionScope& operator=(
+        const DisallowJavascriptExecutionScope&) = delete;
+
    private:
     bool on_failure_;
     void* internal_;
-
-    // Prevent copying of Scope objects.
-    DisallowJavascriptExecutionScope(const DisallowJavascriptExecutionScope&);
-    DisallowJavascriptExecutionScope& operator=(
-        const DisallowJavascriptExecutionScope&);
   };
 
 
@@ -5709,14 +6356,15 @@
     explicit AllowJavascriptExecutionScope(Isolate* isolate);
     ~AllowJavascriptExecutionScope();
 
+    // Prevent copying of Scope objects.
+    AllowJavascriptExecutionScope(const AllowJavascriptExecutionScope&) =
+        delete;
+    AllowJavascriptExecutionScope& operator=(
+        const AllowJavascriptExecutionScope&) = delete;
+
    private:
     void* internal_throws_;
     void* internal_assert_;
-
-    // Prevent copying of Scope objects.
-    AllowJavascriptExecutionScope(const AllowJavascriptExecutionScope&);
-    AllowJavascriptExecutionScope& operator=(
-        const AllowJavascriptExecutionScope&);
   };
 
   /**
@@ -5728,13 +6376,14 @@
     explicit SuppressMicrotaskExecutionScope(Isolate* isolate);
     ~SuppressMicrotaskExecutionScope();
 
+    // Prevent copying of Scope objects.
+    SuppressMicrotaskExecutionScope(const SuppressMicrotaskExecutionScope&) =
+        delete;
+    SuppressMicrotaskExecutionScope& operator=(
+        const SuppressMicrotaskExecutionScope&) = delete;
+
    private:
     internal::Isolate* const isolate_;
-
-    // Prevent copying of Scope objects.
-    SuppressMicrotaskExecutionScope(const SuppressMicrotaskExecutionScope&);
-    SuppressMicrotaskExecutionScope& operator=(
-        const SuppressMicrotaskExecutionScope&);
   };
 
   /**
@@ -6460,17 +7109,17 @@
    */
   bool IsInUse();
 
+  Isolate() = delete;
+  ~Isolate() = delete;
+  Isolate(const Isolate&) = delete;
+  Isolate& operator=(const Isolate&) = delete;
+  void* operator new(size_t size) = delete;
+  void operator delete(void*, size_t) = delete;
+
  private:
   template <class K, class V, class Traits>
   friend class PersistentValueMapBase;
 
-  Isolate();
-  Isolate(const Isolate&);
-  ~Isolate();
-  Isolate& operator=(const Isolate&);
-  void* operator new(size_t size);
-  void operator delete(void*, size_t);
-
   void SetObjectGroupId(internal::Object** object, UniqueId id);
   void SetReferenceFromGroup(UniqueId id, internal::Object** object);
   void SetReference(internal::Object** parent, internal::Object** child);
@@ -6857,8 +7506,6 @@
                          int* index);
   static Local<Value> GetEternal(Isolate* isolate, int index);
 
-  static void RegisterExternallyReferencedObject(internal::Object** object,
-                                                 internal::Isolate* isolate);
   template <class K, class V, class T>
   friend class PersistentValueMapBase;
 
@@ -6925,12 +7572,12 @@
    */
   StartupData CreateBlob(FunctionCodeHandling function_code_handling);
 
+  // Disallow copying and assigning.
+  SnapshotCreator(const SnapshotCreator&) = delete;
+  void operator=(const SnapshotCreator&) = delete;
+
  private:
   void* data_;
-
-  // Disallow copying and assigning.
-  SnapshotCreator(const SnapshotCreator&);
-  void operator=(const SnapshotCreator&);
 };
 
 /**
@@ -7134,15 +7781,14 @@
     return handler->js_stack_comparable_address_;
   }
 
+  TryCatch(const TryCatch&) = delete;
+  void operator=(const TryCatch&) = delete;
+  void* operator new(size_t size) = delete;
+  void operator delete(void*, size_t) = delete;
+
  private:
   void ResetInternal();
 
-  // Make it hard to create heap-allocated TryCatch blocks.
-  TryCatch(const TryCatch&);
-  void operator=(const TryCatch&);
-  void* operator new(size_t size);
-  void operator delete(void*, size_t);
-
   v8::internal::Isolate* isolate_;
   v8::TryCatch* next_;
   void* exception_;
@@ -7498,16 +8144,16 @@
    */
   static bool IsActive();
 
+  // Disallow copying and assigning.
+  Locker(const Locker&) = delete;
+  void operator=(const Locker&) = delete;
+
  private:
   void Initialize(Isolate* isolate);
 
   bool has_lock_;
   bool top_level_;
   internal::Isolate* isolate_;
-
-  // Disallow copying and assigning.
-  Locker(const Locker&);
-  void operator=(const Locker&);
 };
 
 
@@ -7643,8 +8289,8 @@
   static const int kNodeIsPartiallyDependentShift = 4;
   static const int kNodeIsActiveShift = 4;
 
-  static const int kJSObjectType = 0xb7;
-  static const int kJSApiObjectType = 0xb6;
+  static const int kJSObjectType = 0xb9;
+  static const int kJSApiObjectType = 0xb8;
   static const int kFirstNonstringType = 0x80;
   static const int kOddballType = 0x83;
   static const int kForeignType = 0x87;
@@ -7920,11 +8566,10 @@
 }
 
 template <class T>
-void PersistentBase<T>::RegisterExternalReference(Isolate* isolate) const {
+void PersistentBase<T>::RegisterExternalReference(
+    EmbedderReachableReferenceReporter* reporter) const {
   if (IsEmpty()) return;
-  V8::RegisterExternallyReferencedObject(
-      reinterpret_cast<internal::Object**>(this->val_),
-      reinterpret_cast<internal::Isolate*>(isolate));
+  reporter->ReportExternalReference(this->val_);
 }
 
 template <class T>
diff --git a/infra/mb/mb_config.pyl b/infra/mb/mb_config.pyl
index edfd254..2747be5 100644
--- a/infra/mb/mb_config.pyl
+++ b/infra/mb/mb_config.pyl
@@ -9,12 +9,18 @@
   # Bots are ordered by appearance on waterfall.
   'masters': {
     'developer_default': {
+      'arm.debug': 'default_debug_arm',
+      'arm.optdebug': 'default_optdebug_arm',
+      'arm.release': 'default_release_arm',
+      'arm64.debug': 'default_debug_arm64',
+      'arm64.optdebug': 'default_optdebug_arm64',
+      'arm64.release': 'default_release_arm64',
+      'ia32.debug': 'default_debug_x86',
+      'ia32.optdebug': 'default_optdebug_x86',
+      'ia32.release': 'default_release_x86',
       'x64.debug': 'default_debug_x64',
       'x64.optdebug': 'default_optdebug_x64',
       'x64.release': 'default_release_x64',
-      'x86.debug': 'default_debug_x86',
-      'x86.optdebug': 'default_optdebug_x86',
-      'x86.release': 'default_release_x86',
     },
 
     'client.dart.fyi': {
@@ -32,7 +38,7 @@
       'V8 Linux - nosnap builder': 'gn_release_x86_no_snap',
       'V8 Linux - nosnap debug builder': 'gn_debug_x86_no_snap',
       'V8 Linux - shared': 'gn_release_x86_shared_verify_heap',
-      'V8 Linux - noi18n - debug': 'gyp_debug_x86_no_i18n',
+      'V8 Linux - noi18n - debug': 'gn_debug_x86_no_i18n',
       # Linux64.
       'V8 Linux64 - builder': 'gn_release_x64',
       'V8 Linux64 - debug builder': 'gn_debug_x64_valgrind',
@@ -40,34 +46,35 @@
       'V8 Linux64 - internal snapshot': 'gn_release_x64_internal',
       'V8 Linux64 - gyp': 'gyp_release_x64',
       # Windows.
-      'V8 Win32 - builder': 'gyp_release_x86_minimal_symbols',
-      'V8 Win32 - debug builder': 'gyp_debug_x86_minimal_symbols',
+      'V8 Win32 - builder': 'gn_release_x86_minimal_symbols',
+      'V8 Win32 - debug builder': 'gn_debug_x86_minimal_symbols',
       'V8 Win32 - nosnap - shared':
-        'gyp_release_x86_no_snap_shared_minimal_symbols',
-      'V8 Win64': 'gyp_release_x64_minimal_symbols',
-      'V8 Win64 - debug': 'gyp_debug_x64_minimal_symbols',
-      'V8 Win64 - clang': 'gyp_release_x64_clang',
+        'gn_release_x86_no_snap_shared_minimal_symbols',
+      'V8 Win64': 'gn_release_x64_minimal_symbols',
+      'V8 Win64 - debug': 'gn_debug_x64_minimal_symbols',
+      # TODO(machenbach): Switch plugins on when errors are fixed.
+      'V8 Win64 - clang': 'gn_release_x64_clang',
       # Mac.
       'V8 Mac': 'gn_release_x86',
       'V8 Mac - debug': 'gn_debug_x86',
       'V8 Mac64': 'gn_release_x64',
       'V8 Mac64 - debug': 'gn_debug_x64',
       'V8 Mac GC Stress': 'gn_debug_x86',
-      'V8 Mac64 ASAN': 'gyp_release_x64_asan',
+      'V8 Mac64 ASAN': 'gn_release_x64_asan_no_lsan',
       # Sanitizers.
-      'V8 Linux64 ASAN': 'gyp_release_x64_asan',
+      'V8 Linux64 ASAN': 'gn_release_x64_asan',
       'V8 Linux64 TSAN': 'gn_release_x64_tsan',
       'V8 Linux - arm64 - sim - MSAN': 'gn_release_simulate_arm64_msan',
       # Clusterfuzz.
       'V8 Linux64 ASAN no inline - release builder':
-          'gyp_release_x64_asan_symbolized_edge_verify_heap',
-      'V8 Linux64 ASAN - debug builder': 'gyp_debug_x64_asan_edge',
+          'gn_release_x64_asan_symbolized_edge_verify_heap',
+      'V8 Linux64 ASAN - debug builder': 'gn_debug_x64_asan_edge',
       'V8 Linux64 ASAN arm64 - debug builder':
-          'gyp_debug_simulate_arm64_asan_edge',
+          'gn_debug_simulate_arm64_asan_edge',
       'V8 Linux ASAN arm - debug builder':
-          'gyp_debug_simulate_arm_asan_edge',
+          'gn_debug_simulate_arm_asan_edge',
       'V8 Linux ASAN mipsel - debug builder':
-          'gyp_debug_simulate_mipsel_asan_edge',
+          'gn_debug_simulate_mipsel_asan_edge',
       # Misc.
       'V8 Linux gcc 4.8': 'gn_release_x86_gcc',
       # FYI.
@@ -86,13 +93,13 @@
 
     'client.v8.ports': {
       # Arm.
-      'V8 Arm - builder': 'gyp_release_arm',
-      'V8 Arm - debug builder': 'gyp_debug_arm',
-      'V8 Android Arm - builder': 'gyp_release_android_arm',
-      'V8 Linux - arm - sim': 'gyp_release_simulate_arm',
-      'V8 Linux - arm - sim - debug': 'gyp_debug_simulate_arm',
+      'V8 Arm - builder': 'gn_release_arm',
+      'V8 Arm - debug builder': 'gn_debug_arm',
+      'V8 Android Arm - builder': 'gn_release_android_arm',
+      'V8 Linux - arm - sim': 'gn_release_simulate_arm',
+      'V8 Linux - arm - sim - debug': 'gn_debug_simulate_arm',
       # Arm64.
-      'V8 Android Arm64 - builder': 'gyp_release_android_arm64',
+      'V8 Android Arm64 - builder': 'gn_release_android_arm64',
       'V8 Linux - arm64 - sim': 'gn_release_simulate_arm64',
       'V8 Linux - arm64 - sim - debug': 'gn_debug_simulate_arm64',
       'V8 Linux - arm64 - sim - nosnap - debug':
@@ -100,8 +107,8 @@
       'V8 Linux - arm64 - sim - gc stress': 'gn_debug_simulate_arm64',
       # Mips.
       'V8 Mips - builder': 'gyp_release_mips_no_snap_no_i18n',
-      'V8 Linux - mipsel - sim - builder': 'gyp_release_simulate_mipsel',
-      'V8 Linux - mips64el - sim - builder': 'gyp_release_simulate_mips64el',
+      'V8 Linux - mipsel - sim - builder': 'gn_release_simulate_mipsel',
+      'V8 Linux - mips64el - sim - builder': 'gn_release_simulate_mips64el',
       # PPC.
       'V8 Linux - ppc - sim': 'gyp_release_simulate_ppc',
       'V8 Linux - ppc64 - sim': 'gyp_release_simulate_ppc64',
@@ -117,18 +124,18 @@
       'V8 Linux - beta branch - debug': 'gn_debug_x86',
       'V8 Linux - stable branch': 'gn_release_x86',
       'V8 Linux - stable branch - debug': 'gn_debug_x86',
-      'V8 Linux64 - beta branch': 'gyp_release_x64',
+      'V8 Linux64 - beta branch': 'gn_release_x64',
       'V8 Linux64 - beta branch - debug': 'gn_debug_x64',
       'V8 Linux64 - stable branch': 'gn_release_x64',
       'V8 Linux64 - stable branch - debug': 'gn_debug_x64',
-      'V8 arm - sim - beta branch': 'gyp_release_simulate_arm',
-      'V8 arm - sim - beta branch - debug': 'gyp_debug_simulate_arm',
-      'V8 arm - sim - stable branch': 'gyp_release_simulate_arm',
-      'V8 arm - sim - stable branch - debug': 'gyp_debug_simulate_arm',
-      'V8 mips64el - sim - beta branch': 'gyp_release_simulate_mips64el',
-      'V8 mips64el - sim - stable branch': 'gyp_release_simulate_mips64el',
-      'V8 mipsel - sim - beta branch': 'gyp_release_simulate_mipsel',
-      'V8 mipsel - sim - stable branch': 'gyp_release_simulate_mipsel',
+      'V8 arm - sim - beta branch': 'gn_release_simulate_arm',
+      'V8 arm - sim - beta branch - debug': 'gn_debug_simulate_arm',
+      'V8 arm - sim - stable branch': 'gn_release_simulate_arm',
+      'V8 arm - sim - stable branch - debug': 'gn_debug_simulate_arm',
+      'V8 mips64el - sim - beta branch': 'gn_release_simulate_mips64el',
+      'V8 mips64el - sim - stable branch': 'gn_release_simulate_mips64el',
+      'V8 mipsel - sim - beta branch': 'gn_release_simulate_mipsel',
+      'V8 mipsel - sim - stable branch': 'gn_release_simulate_mipsel',
       'V8 ppc - sim - beta branch': 'gyp_release_simulate_ppc',
       'V8 ppc - sim - stable branch': 'gyp_release_simulate_ppc',
       'V8 ppc64 - sim - beta branch': 'gyp_release_simulate_ppc64',
@@ -143,8 +150,8 @@
       'v8_linux_avx2_dbg': 'gn_debug_x86_trybot',
       'v8_linux_nodcheck_rel_ng': 'gn_release_x86_minimal_symbols',
       'v8_linux_dbg_ng': 'gn_debug_x86_trybot',
-      'v8_linux_noi18n_rel_ng': 'gyp_release_x86_no_i18n_trybot',
-      'v8_linux_gc_stress_dbg': 'gyp_debug_x86_trybot',
+      'v8_linux_noi18n_rel_ng': 'gn_release_x86_no_i18n_trybot',
+      'v8_linux_gc_stress_dbg': 'gn_debug_x86_trybot',
       'v8_linux_nosnap_rel': 'gn_release_x86_no_snap_trybot',
       'v8_linux_nosnap_dbg': 'gn_debug_x86_no_snap_trybot',
       'v8_linux_gcc_compile_rel': 'gn_release_x86_gcc_minimal_symbols',
@@ -153,34 +160,34 @@
       'v8_linux64_gyp_rel_ng': 'gyp_release_x64',
       'v8_linux64_avx2_rel_ng': 'gn_release_x64_trybot',
       'v8_linux64_avx2_dbg': 'gn_debug_x64_trybot',
-      'v8_linux64_asan_rel_ng': 'gyp_release_x64_asan_minimal_symbols',
+      'v8_linux64_asan_rel_ng': 'gn_release_x64_asan_minimal_symbols',
       'v8_linux64_msan_rel': 'gn_release_simulate_arm64_msan_minimal_symbols',
       'v8_linux64_sanitizer_coverage_rel':
           'gyp_release_x64_asan_minimal_symbols_coverage',
       'v8_linux64_tsan_rel': 'gn_release_x64_tsan_minimal_symbols',
-      'v8_win_dbg': 'gyp_debug_x86_trybot',
-      'v8_win_compile_dbg': 'gyp_debug_x86_trybot',
-      'v8_win_rel_ng': 'gyp_release_x86_trybot',
+      'v8_win_dbg': 'gn_debug_x86_trybot',
+      'v8_win_compile_dbg': 'gn_debug_x86_trybot',
+      'v8_win_rel_ng': 'gn_release_x86_trybot',
       'v8_win_nosnap_shared_rel_ng':
-        'gyp_release_x86_no_snap_shared_minimal_symbols',
-      'v8_win64_dbg': 'gyp_debug_x64_minimal_symbols',
-      'v8_win64_rel_ng': 'gyp_release_x64_trybot',
+        'gn_release_x86_no_snap_shared_minimal_symbols',
+      'v8_win64_dbg': 'gn_debug_x64_minimal_symbols',
+      'v8_win64_rel_ng': 'gn_release_x64_trybot',
       'v8_mac_rel_ng': 'gn_release_x86_trybot',
       'v8_mac_dbg': 'gn_debug_x86_trybot',
       'v8_mac_gc_stress_dbg': 'gn_debug_x86_trybot',
       'v8_mac64_rel': 'gn_release_x64_trybot',
       'v8_mac64_dbg': 'gn_debug_x64_minimal_symbols',
-      'v8_mac64_asan_rel': 'gyp_release_x64_asan',
-      'v8_linux_arm_rel_ng': 'gyp_release_simulate_arm_trybot',
-      'v8_linux_arm_dbg': 'gyp_debug_simulate_arm',
-      'v8_linux_arm_armv8a_rel': 'gyp_release_simulate_arm_trybot',
-      'v8_linux_arm_armv8a_dbg': 'gyp_debug_simulate_arm',
+      'v8_mac64_asan_rel': 'gn_release_x64_asan_no_lsan',
+      'v8_linux_arm_rel_ng': 'gn_release_simulate_arm_trybot',
+      'v8_linux_arm_dbg': 'gn_debug_simulate_arm',
+      'v8_linux_arm_armv8a_rel': 'gn_release_simulate_arm_trybot',
+      'v8_linux_arm_armv8a_dbg': 'gn_debug_simulate_arm',
       'v8_linux_arm64_rel_ng': 'gn_release_simulate_arm64_trybot',
       'v8_linux_arm64_dbg': 'gn_debug_simulate_arm64',
       'v8_linux_arm64_gc_stress_dbg': 'gn_debug_simulate_arm64',
-      'v8_linux_mipsel_compile_rel': 'gyp_release_simulate_mipsel',
-      'v8_linux_mips64el_compile_rel': 'gyp_release_simulate_mips64el',
-      'v8_android_arm_compile_rel': 'gyp_release_android_arm',
+      'v8_linux_mipsel_compile_rel': 'gn_release_simulate_mipsel',
+      'v8_linux_mips64el_compile_rel': 'gn_release_simulate_mips64el',
+      'v8_android_arm_compile_rel': 'gn_release_android_arm',
     },
   },
 
@@ -189,6 +196,20 @@
   # gyp/gn, release/debug, arch type, other values alphabetically.
   'configs': {
     # Developer default configs.
+    'default_debug_arm': [
+      'gn', 'debug', 'simulate_arm', 'v8_enable_slow_dchecks',
+      'v8_full_debug'],
+    'default_optdebug_arm': [
+      'gn', 'debug', 'simulate_arm', 'v8_enable_slow_dchecks'],
+    'default_release_arm': [
+      'gn', 'release', 'simulate_arm'],
+    'default_debug_arm64': [
+      'gn', 'debug', 'simulate_arm64', 'v8_enable_slow_dchecks',
+      'v8_full_debug'],
+    'default_optdebug_arm64': [
+      'gn', 'debug', 'simulate_arm64', 'v8_enable_slow_dchecks'],
+    'default_release_arm64': [
+      'gn', 'release', 'simulate_arm64'],
     'default_debug_x64': [
       'gn', 'debug', 'x64', 'v8_enable_slow_dchecks', 'v8_full_debug'],
     'default_optdebug_x64': [
@@ -204,12 +225,24 @@
 
 
     # GN debug configs for simulators.
+    'gn_debug_simulate_arm': [
+      'gn', 'debug_bot', 'simulate_arm', 'swarming'],
+    'gn_debug_simulate_arm_asan_edge': [
+      'gn', 'debug_bot', 'simulate_arm', 'asan', 'edge'],
     'gn_debug_simulate_arm64': [
       'gn', 'debug_bot', 'simulate_arm64', 'swarming'],
+    'gn_debug_simulate_arm64_asan_edge': [
+      'gn', 'debug_bot', 'simulate_arm64', 'asan', 'lsan', 'edge'],
     'gn_debug_simulate_arm64_no_snap': [
       'gn', 'debug_bot', 'simulate_arm64', 'swarming', 'v8_snapshot_none'],
+    'gn_debug_simulate_mipsel_asan_edge': [
+      'gn', 'debug_bot', 'simulate_mipsel', 'asan', 'edge'],
 
     # GN release configs for simulators.
+    'gn_release_simulate_arm': [
+      'gn', 'release_bot', 'simulate_arm', 'swarming'],
+    'gn_release_simulate_arm_trybot': [
+      'gn', 'release_trybot', 'simulate_arm', 'swarming'],
     'gn_release_simulate_arm64': [
       'gn', 'release_bot', 'simulate_arm64', 'swarming'],
     'gn_release_simulate_arm64_msan': [
@@ -219,12 +252,44 @@
       'swarming'],
     'gn_release_simulate_arm64_trybot': [
       'gn', 'release_trybot', 'simulate_arm64', 'swarming'],
+    'gn_release_simulate_mipsel': [
+      'gn', 'release_bot', 'simulate_mipsel', 'swarming'],
+    'gn_release_simulate_mips64el': [
+      'gn', 'release_bot', 'simulate_mips64el', 'swarming'],
+
+    # GN debug configs for arm.
+    'gn_debug_arm': [
+      'gn', 'debug_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
+
+    # GN release configs for arm.
+    'gn_release_arm': [
+      'gn', 'release_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
+    'gn_release_android_arm': [
+      'gn', 'release_bot', 'arm', 'android', 'crosscompile',
+      'minimal_symbols', 'swarming'],
+    'gn_release_android_arm64': [
+      'gn', 'release_bot', 'arm64', 'android', 'crosscompile',
+      'minimal_symbols', 'swarming'],
 
     # GN release configs for x64.
     'gn_release_x64': [
       'gn', 'release_bot', 'x64', 'swarming'],
+    'gn_release_x64_asan': [
+      'gn', 'release_bot', 'x64', 'asan', 'lsan', 'swarming'],
+    'gn_release_x64_asan_minimal_symbols': [
+      'gn', 'release_bot', 'x64', 'asan', 'lsan', 'minimal_symbols',
+      'swarming'],
+    'gn_release_x64_asan_no_lsan': [
+      'gn', 'release_bot', 'x64', 'asan', 'swarming'],
+    'gn_release_x64_asan_symbolized_edge_verify_heap': [
+      'gn', 'release_bot', 'x64', 'asan', 'edge', 'lsan', 'symbolized',
+      'v8_verify_heap'],
+    'gn_release_x64_clang': [
+      'gn', 'release_bot', 'x64', 'clang', 'swarming'],
     'gn_release_x64_internal': [
       'gn', 'release_bot', 'x64', 'swarming', 'v8_snapshot_internal'],
+    'gn_release_x64_minimal_symbols': [
+      'gn', 'release_bot', 'x64', 'minimal_symbols', 'swarming'],
     'gn_release_x64_trybot': [
       'gn', 'release_trybot', 'x64', 'swarming'],
     'gn_release_x64_tsan': [
@@ -235,6 +300,8 @@
     # GN debug configs for x64.
     'gn_debug_x64': [
       'gn', 'debug_bot', 'x64', 'swarming'],
+    'gn_debug_x64_asan_edge': [
+      'gn', 'debug_bot', 'x64', 'asan', 'lsan', 'edge'],
     'gn_debug_x64_custom': [
       'gn', 'debug_bot', 'x64', 'swarming', 'v8_snapshot_custom'],
     'gn_debug_x64_minimal_symbols': [
@@ -247,6 +314,10 @@
     # GN debug configs for x86.
     'gn_debug_x86': [
       'gn', 'debug_bot', 'x86', 'swarming'],
+    'gn_debug_x86_minimal_symbols': [
+      'gn', 'debug_bot', 'x86', 'minimal_symbols', 'swarming'],
+    'gn_debug_x86_no_i18n': [
+      'gn', 'debug_bot', 'x86', 'v8_no_i18n'],
     'gn_debug_x86_no_snap': [
       'gn', 'debug_bot', 'x86', 'swarming', 'v8_snapshot_none'],
     'gn_debug_x86_no_snap_trybot': [
@@ -267,8 +338,13 @@
       'gn', 'release_trybot', 'x86', 'gcmole', 'swarming'],
     'gn_release_x86_minimal_symbols': [
       'gn', 'release_bot', 'x86', 'minimal_symbols', 'swarming'],
+    'gn_release_x86_no_i18n_trybot': [
+      'gn', 'release_trybot', 'x86', 'swarming', 'v8_no_i18n'],
     'gn_release_x86_no_snap': [
       'gn', 'release_bot', 'x86', 'swarming', 'v8_snapshot_none'],
+    'gn_release_x86_no_snap_shared_minimal_symbols': [
+      'gn', 'release', 'x86', 'goma', 'minimal_symbols', 'shared', 'swarming',
+      'v8_snapshot_none'],
     'gn_release_x86_no_snap_trybot': [
       'gn', 'release_trybot', 'x86', 'swarming', 'v8_snapshot_none'],
     'gn_release_x86_shared_verify_heap': [
@@ -276,65 +352,25 @@
     'gn_release_x86_trybot': [
       'gn', 'release_trybot', 'x86', 'swarming'],
 
-    # Gyp debug configs for arm.
-    'gyp_debug_arm': [
-      'gyp', 'debug_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
-
     # Gyp debug configs for simulators.
-    'gyp_debug_simulate_arm': [
-      'gyp', 'debug_bot', 'simulate_arm', 'swarming'],
-    'gyp_debug_simulate_arm_asan_edge': [
-      'gyp', 'debug_bot', 'simulate_arm', 'asan', 'edge'],
-    'gyp_debug_simulate_arm64_asan_edge': [
-      'gyp', 'debug_bot', 'simulate_arm64', 'asan', 'lsan', 'edge'],
-    'gyp_debug_simulate_mipsel_asan_edge': [
-      'gyp', 'debug_bot', 'simulate_mipsel', 'asan', 'edge'],
     'gyp_debug_simulate_x87_no_snap': [
       'gyp', 'debug_bot', 'simulate_x87', 'swarming', 'v8_snapshot_none'],
 
-    # Gyp debug configs for x64.
-    'gyp_debug_x64_asan_edge': [
-      'gyp', 'debug_bot', 'x64', 'asan', 'lsan', 'edge'],
-    'gyp_debug_x64_minimal_symbols': [
-      'gyp', 'debug_bot', 'x64', 'minimal_symbols', 'swarming'],
-
     # Gyp debug configs for x86.
     'gyp_debug_x86': [
       'gyp', 'debug_bot', 'x86', 'swarming'],
-    'gyp_debug_x86_minimal_symbols': [
-      'gyp', 'debug_bot', 'x86', 'minimal_symbols', 'swarming'],
-    'gyp_debug_x86_trybot': [
-      'gyp', 'debug_trybot', 'x86', 'swarming'],
-    'gyp_debug_x86_no_i18n': [
-      'gyp', 'debug_bot', 'x86', 'v8_no_i18n'],
     'gyp_debug_x86_vtunejit': [
       'gyp', 'debug_bot', 'x86', 'v8_enable_vtunejit'],
     'gyp_full_debug_x86': [
       'gyp', 'debug', 'x86', 'goma', 'static', 'v8_enable_slow_dchecks',
       'v8_full_debug'],
 
-    # Gyp release configs for arm.
-    'gyp_release_arm': [
-      'gyp', 'release_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
-    'gyp_release_android_arm': [
-      'gyp', 'release_bot', 'arm', 'android', 'crosscompile', 'swarming'],
-    'gyp_release_android_arm64': [
-      'gyp', 'release_bot', 'arm64', 'android', 'crosscompile', 'swarming'],
-
     # Gyp release configs for mips.
     'gyp_release_mips_no_snap_no_i18n': [
       'gyp', 'release', 'mips', 'crosscompile', 'static', 'v8_no_i18n',
       'v8_snapshot_none'],
 
     # Gyp release configs for simulators.
-    'gyp_release_simulate_arm': [
-      'gyp', 'release_bot', 'simulate_arm', 'swarming'],
-    'gyp_release_simulate_arm_trybot': [
-      'gyp', 'release_trybot', 'simulate_arm', 'swarming'],
-    'gyp_release_simulate_mipsel': [
-      'gyp', 'release_bot', 'simulate_mipsel', 'swarming'],
-    'gyp_release_simulate_mips64el': [
-      'gyp', 'release_bot', 'simulate_mips64el', 'swarming'],
     'gyp_release_simulate_ppc': [
       'gyp', 'release_bot', 'simulate_ppc', 'swarming'],
     'gyp_release_simulate_ppc64': [
@@ -347,44 +383,21 @@
     # Gyp release configs for x64.
     'gyp_release_x64': [
       'gyp', 'release_bot', 'x64', 'swarming'],
-    'gyp_release_x64_asan': [
-      'gyp', 'release_bot', 'x64', 'asan', 'lsan', 'swarming'],
-    'gyp_release_x64_asan_minimal_symbols': [
-      'gyp', 'release_bot', 'x64', 'asan', 'lsan', 'minimal_symbols',
-      'swarming'],
     'gyp_release_x64_asan_minimal_symbols_coverage': [
       'gyp', 'release_bot', 'x64', 'asan', 'bb', 'coverage', 'lsan',
       'minimal_symbols', 'swarming'],
-    'gyp_release_x64_asan_symbolized_edge_verify_heap': [
-      'gyp', 'release_bot', 'x64', 'asan', 'edge', 'lsan', 'symbolized',
-      'v8_verify_heap'],
     'gyp_release_x64_cfi_symbolized': [
       'gyp', 'release_bot', 'x64', 'cfi', 'swarming', 'symbolized'],
-    'gyp_release_x64_clang': [
-      'gyp', 'release_bot', 'x64', 'clang', 'swarming'],
     'gyp_release_x64_gcc_coverage': [
       'gyp', 'release_bot', 'x64', 'coverage', 'gcc'],
-    'gyp_release_x64_minimal_symbols': [
-      'gyp', 'release_bot', 'x64', 'minimal_symbols', 'swarming'],
-    'gyp_release_x64_trybot': [
-      'gyp', 'release_trybot', 'x64', 'swarming'],
 
     # Gyp release configs for x86.
     'gyp_release_x86_disassembler': [
       'gyp', 'release_bot', 'x86', 'v8_enable_disassembler'],
     'gyp_release_x86_interpreted_regexp': [
       'gyp', 'release_bot', 'x86', 'v8_interpreted_regexp'],
-    'gyp_release_x86_minimal_symbols': [
-      'gyp', 'release_bot', 'x86', 'minimal_symbols', 'swarming'],
-    'gyp_release_x86_no_i18n_trybot': [
-      'gyp', 'release_trybot', 'x86', 'swarming', 'v8_no_i18n'],
-    'gyp_release_x86_no_snap_shared_minimal_symbols': [
-      'gyp', 'release', 'x86', 'goma', 'minimal_symbols', 'shared', 'swarming',
-      'v8_snapshot_none'],
     'gyp_release_x86_predictable': [
       'gyp', 'release_bot', 'x86', 'v8_enable_verify_predictable'],
-    'gyp_release_x86_trybot': [
-      'gyp', 'release_trybot', 'x86', 'swarming'],
   },
 
   'mixins': {
@@ -444,7 +457,7 @@
 
     'debug_bot': {
       'mixins': [
-        'debug', 'static', 'goma', 'v8_enable_slow_dchecks',
+        'debug', 'shared', 'goma', 'v8_enable_slow_dchecks',
         'v8_optimized_debug'],
     },
 
@@ -533,7 +546,8 @@
     },
 
     'simulate_mipsel': {
-      'gn_args': 'target_cpu="x86" v8_target_cpu="mipsel"',
+      'gn_args':
+          'target_cpu="x86" v8_target_cpu="mipsel" mips_arch_variant="r2"',
       'gyp_defines': 'target_arch=ia32 v8_target_arch=mipsel',
     },
 
@@ -577,9 +591,9 @@
       'gyp_defines': 'test_isolation_mode=prepare',
     },
 
-    # TODO(machenbach): Remove the symbolized config after the bots are  gone.
+    # TODO(machenbach): Remove the symbolized config after the bots are gone.
     'symbolized': {
-      'gn_args': 'symbolized=true',
+      'gn_args': 'v8_no_inline=true',
       'gyp_defines':
         'release_extra_cflags="-fno-inline-functions -fno-inline"',
     },
@@ -595,8 +609,8 @@
     },
 
     'v8_no_i18n': {
-      'gn_args': 'v8_enable_i18n_support=false',
-      'gyp_defines': 'v8_enable_i18n_support=0',
+      'gn_args': 'v8_enable_i18n_support=false icu_use_data_file=false',
+      'gyp_defines': 'v8_enable_i18n_support=0 icu_use_data_file_flag=0',
     },
 
     'v8_enable_disassembler': {
diff --git a/src/address-map.cc b/src/address-map.cc
index 61292bf..3122b33 100644
--- a/src/address-map.cc
+++ b/src/address-map.cc
@@ -13,7 +13,7 @@
 RootIndexMap::RootIndexMap(Isolate* isolate) {
   map_ = isolate->root_index_map();
   if (map_ != NULL) return;
-  map_ = new base::HashMap(base::HashMap::PointersMatch);
+  map_ = new base::HashMap();
   for (uint32_t i = 0; i < Heap::kStrongRootListLength; i++) {
     Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
     Object* root = isolate->heap()->root(root_index);
diff --git a/src/address-map.h b/src/address-map.h
index 0ce93d2..95e9cb0 100644
--- a/src/address-map.h
+++ b/src/address-map.h
@@ -189,9 +189,7 @@
 class SerializerReferenceMap : public AddressMapBase {
  public:
   SerializerReferenceMap()
-      : no_allocation_(),
-        map_(base::HashMap::PointersMatch),
-        attached_reference_index_(0) {}
+      : no_allocation_(), map_(), attached_reference_index_(0) {}
 
   SerializerReference Lookup(HeapObject* obj) {
     base::HashMap::Entry* entry = LookupEntry(&map_, obj, false);
diff --git a/src/allocation.h b/src/allocation.h
index 8581cc9..a92b71f 100644
--- a/src/allocation.h
+++ b/src/allocation.h
@@ -13,10 +13,10 @@
 // Called when allocation routines fail to allocate.
 // This function should not return, but should terminate the current
 // processing.
-void FatalProcessOutOfMemory(const char* message);
+V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(const char* message);
 
 // Superclass for classes managed with new & delete.
-class Malloced {
+class V8_EXPORT_PRIVATE Malloced {
  public:
   void* operator new(size_t size) { return New(size); }
   void  operator delete(void* p) { Delete(p); }
@@ -72,7 +72,7 @@
 // The normal strdup functions use malloc.  These versions of StrDup
 // and StrNDup uses new and calls the FatalProcessOutOfMemory handler
 // if allocation fails.
-char* StrDup(const char* str);
+V8_EXPORT_PRIVATE char* StrDup(const char* str);
 char* StrNDup(const char* str, int n);
 
 
diff --git a/src/api-arguments-inl.h b/src/api-arguments-inl.h
index eefdf35..bf72fc4 100644
--- a/src/api-arguments-inl.h
+++ b/src/api-arguments-inl.h
@@ -20,8 +20,6 @@
                                                          Handle<Name> name) { \
     Isolate* isolate = this->isolate();                                       \
     RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Function);        \
-    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(                            \
-        isolate, &tracing::TraceEventStatsTable::Function);                   \
     VMState<EXTERNAL> state(isolate);                                         \
     ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));              \
     PropertyCallbackInfo<ApiReturn> info(begin());                            \
@@ -46,8 +44,6 @@
                                                          uint32_t index) { \
     Isolate* isolate = this->isolate();                                    \
     RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Function);     \
-    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(                         \
-        isolate, &tracing::TraceEventStatsTable::Function);                \
     VMState<EXTERNAL> state(isolate);                                      \
     ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));           \
     PropertyCallbackInfo<ApiReturn> info(begin());                         \
@@ -68,9 +64,6 @@
   Isolate* isolate = this->isolate();
   RuntimeCallTimerScope timer(
       isolate, &RuntimeCallStats::GenericNamedPropertySetterCallback);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate,
-      &tracing::TraceEventStatsTable::GenericNamedPropertySetterCallback);
   VMState<EXTERNAL> state(isolate);
   ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
   PropertyCallbackInfo<v8::Value> info(begin());
@@ -80,14 +73,27 @@
   return GetReturnValue<Object>(isolate);
 }
 
+Handle<Object> PropertyCallbackArguments::Call(
+    GenericNamedPropertyDefinerCallback f, Handle<Name> name,
+    const v8::PropertyDescriptor& desc) {
+  Isolate* isolate = this->isolate();
+  RuntimeCallTimerScope timer(
+      isolate, &RuntimeCallStats::GenericNamedPropertyDefinerCallback);
+  VMState<EXTERNAL> state(isolate);
+  ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
+  PropertyCallbackInfo<v8::Value> info(begin());
+  LOG(isolate,
+      ApiNamedPropertyAccess("interceptor-named-define", holder(), *name));
+  f(v8::Utils::ToLocal(name), desc, info);
+  return GetReturnValue<Object>(isolate);
+}
+
 Handle<Object> PropertyCallbackArguments::Call(IndexedPropertySetterCallback f,
                                                uint32_t index,
                                                Handle<Object> value) {
   Isolate* isolate = this->isolate();
   RuntimeCallTimerScope timer(isolate,
                               &RuntimeCallStats::IndexedPropertySetterCallback);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::IndexedPropertySetterCallback);
   VMState<EXTERNAL> state(isolate);
   ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
   PropertyCallbackInfo<v8::Value> info(begin());
@@ -97,13 +103,26 @@
   return GetReturnValue<Object>(isolate);
 }
 
+Handle<Object> PropertyCallbackArguments::Call(
+    IndexedPropertyDefinerCallback f, uint32_t index,
+    const v8::PropertyDescriptor& desc) {
+  Isolate* isolate = this->isolate();
+  RuntimeCallTimerScope timer(
+      isolate, &RuntimeCallStats::IndexedPropertyDefinerCallback);
+  VMState<EXTERNAL> state(isolate);
+  ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
+  PropertyCallbackInfo<v8::Value> info(begin());
+  LOG(isolate,
+      ApiIndexedPropertyAccess("interceptor-indexed-define", holder(), index));
+  f(index, desc, info);
+  return GetReturnValue<Object>(isolate);
+}
+
 void PropertyCallbackArguments::Call(AccessorNameSetterCallback f,
                                      Handle<Name> name, Handle<Object> value) {
   Isolate* isolate = this->isolate();
   RuntimeCallTimerScope timer(isolate,
                               &RuntimeCallStats::AccessorNameSetterCallback);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::AccessorNameSetterCallback);
   VMState<EXTERNAL> state(isolate);
   ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
   PropertyCallbackInfo<void> info(begin());
diff --git a/src/api-arguments.cc b/src/api-arguments.cc
index 6e347c7..f8d6c8f 100644
--- a/src/api-arguments.cc
+++ b/src/api-arguments.cc
@@ -13,8 +13,6 @@
 Handle<Object> FunctionCallbackArguments::Call(FunctionCallback f) {
   Isolate* isolate = this->isolate();
   RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::FunctionCallback);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &internal::tracing::TraceEventStatsTable::FunctionCallback);
   VMState<EXTERNAL> state(isolate);
   ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
   FunctionCallbackInfo<v8::Value> info(begin(), argv_, argc_);
@@ -26,8 +24,6 @@
     IndexedPropertyEnumeratorCallback f) {
   Isolate* isolate = this->isolate();
   RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::PropertyCallback);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &internal::tracing::TraceEventStatsTable::PropertyCallback);
   VMState<EXTERNAL> state(isolate);
   ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
   PropertyCallbackInfo<v8::Array> info(begin());
diff --git a/src/api-arguments.h b/src/api-arguments.h
index 0dfe618..9e01f3a 100644
--- a/src/api-arguments.h
+++ b/src/api-arguments.h
@@ -119,9 +119,16 @@
   inline Handle<Object> Call(GenericNamedPropertySetterCallback f,
                              Handle<Name> name, Handle<Object> value);
 
+  inline Handle<Object> Call(GenericNamedPropertyDefinerCallback f,
+                             Handle<Name> name,
+                             const v8::PropertyDescriptor& desc);
+
   inline Handle<Object> Call(IndexedPropertySetterCallback f, uint32_t index,
                              Handle<Object> value);
 
+  inline Handle<Object> Call(IndexedPropertyDefinerCallback f, uint32_t index,
+                             const v8::PropertyDescriptor& desc);
+
   inline void Call(AccessorNameSetterCallback f, Handle<Name> name,
                    Handle<Object> value);
 
diff --git a/src/api-natives.cc b/src/api-natives.cc
index 0f3c3b6..ea2cce5 100644
--- a/src/api-natives.cc
+++ b/src/api-natives.cc
@@ -17,42 +17,39 @@
 
 class InvokeScope {
  public:
-  explicit InvokeScope(Isolate* isolate) : save_context_(isolate) {}
+  explicit InvokeScope(Isolate* isolate)
+      : isolate_(isolate), save_context_(isolate) {}
   ~InvokeScope() {
-    Isolate* isolate = save_context_.isolate();
-    bool has_exception = isolate->has_pending_exception();
+    bool has_exception = isolate_->has_pending_exception();
     if (has_exception) {
-      isolate->ReportPendingMessages();
+      isolate_->ReportPendingMessages();
     } else {
-      isolate->clear_pending_message();
+      isolate_->clear_pending_message();
     }
   }
 
  private:
+  Isolate* isolate_;
   SaveContext save_context_;
 };
 
-enum class CacheCheck { kCheck, kSkip };
+MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
+                                        Handle<ObjectTemplateInfo> data,
+                                        Handle<JSReceiver> new_target,
+                                        bool is_hidden_prototype);
 
-MaybeHandle<JSObject> InstantiateObject(
-    Isolate* isolate, Handle<ObjectTemplateInfo> data,
-    Handle<JSReceiver> new_target, CacheCheck cache_check = CacheCheck::kCheck,
-    bool is_hidden_prototype = false);
-
-MaybeHandle<JSFunction> InstantiateFunction(
-    Isolate* isolate, Handle<FunctionTemplateInfo> data,
-    CacheCheck cache_check = CacheCheck::kCheck,
-    Handle<Name> name = Handle<Name>());
+MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
+                                            Handle<FunctionTemplateInfo> data,
+                                            Handle<Name> name = Handle<Name>());
 
 MaybeHandle<Object> Instantiate(Isolate* isolate, Handle<Object> data,
                                 Handle<Name> name = Handle<Name>()) {
   if (data->IsFunctionTemplateInfo()) {
     return InstantiateFunction(isolate,
-                               Handle<FunctionTemplateInfo>::cast(data),
-                               CacheCheck::kCheck, name);
+                               Handle<FunctionTemplateInfo>::cast(data), name);
   } else if (data->IsObjectTemplateInfo()) {
     return InstantiateObject(isolate, Handle<ObjectTemplateInfo>::cast(data),
-                             Handle<JSReceiver>());
+                             Handle<JSReceiver>(), false);
   } else {
     return data;
   }
@@ -199,15 +196,14 @@
     Handle<FixedArray> array =
         isolate->factory()->NewFixedArray(max_number_of_properties);
 
-    info = *data;
-    while (info != nullptr) {
+    for (Handle<TemplateInfoT> temp(*data); *temp != nullptr;
+         temp = handle(temp->GetParent(isolate), isolate)) {
       // Accumulate accessors.
-      Object* maybe_properties = info->property_accessors();
+      Object* maybe_properties = temp->property_accessors();
       if (!maybe_properties->IsUndefined(isolate)) {
         valid_descriptors = AccessorInfo::AppendUnique(
             handle(maybe_properties, isolate), array, valid_descriptors);
       }
-      info = info->GetParent(isolate);
     }
 
     // Install accumulated accessors.
@@ -339,17 +335,9 @@
   return fun->context()->native_context() == isolate->raw_native_context();
 }
 
-MaybeHandle<JSObject> InstantiateObjectWithInvokeScope(
-    Isolate* isolate, Handle<ObjectTemplateInfo> info,
-    Handle<JSReceiver> new_target) {
-  InvokeScope invoke_scope(isolate);
-  return InstantiateObject(isolate, info, new_target, CacheCheck::kSkip);
-}
-
 MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
                                         Handle<ObjectTemplateInfo> info,
                                         Handle<JSReceiver> new_target,
-                                        CacheCheck cache_check,
                                         bool is_hidden_prototype) {
   Handle<JSFunction> constructor;
   int serial_number = Smi::cast(info->serial_number())->value();
@@ -363,7 +351,7 @@
   }
   // Fast path.
   Handle<JSObject> result;
-  if (serial_number && cache_check == CacheCheck::kCheck) {
+  if (serial_number) {
     if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
       return isolate->factory()->CopyJSObject(result);
     }
@@ -397,6 +385,7 @@
   if (info->immutable_proto()) {
     JSObject::SetImmutableProto(object);
   }
+  // TODO(dcarney): is this necessary?
   JSObject::MigrateSlowToFast(result, 0, "ApiNatives::InstantiateObject");
 
   if (serial_number) {
@@ -406,18 +395,12 @@
   return result;
 }
 
-MaybeHandle<JSFunction> InstantiateFunctionWithInvokeScope(
-    Isolate* isolate, Handle<FunctionTemplateInfo> info) {
-  InvokeScope invoke_scope(isolate);
-  return InstantiateFunction(isolate, info, CacheCheck::kSkip);
-}
 
 MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
                                             Handle<FunctionTemplateInfo> data,
-                                            CacheCheck cache_check,
                                             Handle<Name> name) {
   int serial_number = Smi::cast(data->serial_number())->value();
-  if (serial_number && cache_check == CacheCheck::kCheck) {
+  if (serial_number) {
     Handle<JSObject> result;
     if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
       return Handle<JSFunction>::cast(result);
@@ -434,8 +417,7 @@
           InstantiateObject(
               isolate,
               handle(ObjectTemplateInfo::cast(prototype_templ), isolate),
-              Handle<JSReceiver>(), CacheCheck::kCheck,
-              data->hidden_prototype()),
+              Handle<JSReceiver>(), data->hidden_prototype()),
           JSFunction);
     }
     Object* parent = data->parent_template();
@@ -505,31 +487,17 @@
 }  // namespace
 
 MaybeHandle<JSFunction> ApiNatives::InstantiateFunction(
-    Handle<FunctionTemplateInfo> info) {
-  Isolate* isolate = info->GetIsolate();
-  int serial_number = Smi::cast(info->serial_number())->value();
-  if (serial_number) {
-    Handle<JSObject> result;
-    if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
-      return Handle<JSFunction>::cast(result);
-    }
-  }
-  return InstantiateFunctionWithInvokeScope(isolate, info);
+    Handle<FunctionTemplateInfo> data) {
+  Isolate* isolate = data->GetIsolate();
+  InvokeScope invoke_scope(isolate);
+  return ::v8::internal::InstantiateFunction(isolate, data);
 }
 
 MaybeHandle<JSObject> ApiNatives::InstantiateObject(
-    Handle<ObjectTemplateInfo> info, Handle<JSReceiver> new_target) {
-  Isolate* isolate = info->GetIsolate();
-  int serial_number = Smi::cast(info->serial_number())->value();
-  if (serial_number && !new_target.is_null() &&
-      IsSimpleInstantiation(isolate, *info, *new_target)) {
-    // Fast path.
-    Handle<JSObject> result;
-    if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
-      return isolate->factory()->CopyJSObject(result);
-    }
-  }
-  return InstantiateObjectWithInvokeScope(isolate, info, new_target);
+    Handle<ObjectTemplateInfo> data, Handle<JSReceiver> new_target) {
+  Isolate* isolate = data->GetIsolate();
+  InvokeScope invoke_scope(isolate);
+  return ::v8::internal::InstantiateObject(isolate, data, new_target, false);
 }
 
 MaybeHandle<JSObject> ApiNatives::InstantiateRemoteObject(
diff --git a/src/api.cc b/src/api.cc
index 6858a32..44933b9 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -24,6 +24,7 @@
 #include "src/base/functional.h"
 #include "src/base/platform/platform.h"
 #include "src/base/platform/time.h"
+#include "src/base/safe_conversions.h"
 #include "src/base/utils/random-number-generator.h"
 #include "src/bootstrapper.h"
 #include "src/char-predicates-inl.h"
@@ -68,6 +69,7 @@
 #include "src/unicode-inl.h"
 #include "src/v8.h"
 #include "src/v8threads.h"
+#include "src/value-serializer.h"
 #include "src/version.h"
 #include "src/vm-state-inl.h"
 #include "src/wasm/wasm-module.h"
@@ -77,9 +79,6 @@
 #define LOG_API(isolate, class_name, function_name)                       \
   i::RuntimeCallTimerScope _runtime_timer(                                \
       isolate, &i::RuntimeCallStats::API_##class_name##_##function_name); \
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(                          \
-      isolate, &internal::tracing::TraceEventStatsTable::                 \
-                   API_##class_name##_##function_name);                   \
   LOG(isolate, ApiEntryCall("v8::" #class_name "::" #function_name))
 
 #define ENTER_V8(isolate) i::VMState<v8::OTHER> __state__((isolate))
@@ -105,6 +104,16 @@
   PREPARE_FOR_EXECUTION_GENERIC(isolate, context, class_name, function_name,   \
                                 bailout_value, HandleScopeClass, do_callback);
 
+#define PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(      \
+    category, name, context, class_name, function_name, bailout_value,       \
+    HandleScopeClass, do_callback)                                           \
+  auto isolate = context.IsEmpty()                                           \
+                     ? i::Isolate::Current()                                 \
+                     : reinterpret_cast<i::Isolate*>(context->GetIsolate()); \
+  TRACE_EVENT_CALL_STATS_SCOPED(isolate, category, name);                    \
+  PREPARE_FOR_EXECUTION_GENERIC(isolate, context, class_name, function_name, \
+                                bailout_value, HandleScopeClass, do_callback);
+
 #define PREPARE_FOR_EXECUTION_WITH_ISOLATE(isolate, class_name, function_name, \
                                            T)                                  \
   PREPARE_FOR_EXECUTION_GENERIC(isolate, Local<Context>(), class_name,         \
@@ -126,6 +135,10 @@
   PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name,       \
                                      Nothing<T>(), i::HandleScope, false)
 
+#define PREPARE_FOR_EXECUTION_BOOL(context, class_name, function_name)   \
+  PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name, \
+                                     false, i::HandleScope, false)
+
 #define EXCEPTION_BAILOUT_CHECK_SCOPED(isolate, value) \
   do {                                                 \
     if (has_pending_exception) {                       \
@@ -142,6 +155,8 @@
 #define RETURN_ON_FAILED_EXECUTION_PRIMITIVE(T) \
   EXCEPTION_BAILOUT_CHECK_SCOPED(isolate, Nothing<T>())
 
+#define RETURN_ON_FAILED_EXECUTION_BOOL() \
+  EXCEPTION_BAILOUT_CHECK_SCOPED(isolate, false)
 
 #define RETURN_TO_LOCAL_UNCHECKED(maybe_local, T) \
   return maybe_local.FromMaybe(Local<T>());
@@ -513,7 +528,8 @@
 
   // If we don't do this then we end up with a stray root pointing at the
   // context even after we have disposed of the context.
-  isolate->heap()->CollectAllAvailableGarbage("mksnapshot");
+  isolate->heap()->CollectAllAvailableGarbage(
+      i::GarbageCollectionReason::kSnapshotCreator);
   isolate->heap()->CompactWeakFixedArrays();
 
   i::DisallowHeapAllocation no_gc_from_here_on;
@@ -770,11 +786,6 @@
   return result.location();
 }
 
-void V8::RegisterExternallyReferencedObject(i::Object** object,
-                                            i::Isolate* isolate) {
-  isolate->heap()->RegisterExternallyReferencedObject(object);
-}
-
 void V8::MakeWeak(i::Object** location, void* parameter,
                   int internal_field_index1, int internal_field_index2,
                   WeakCallbackInfo<void>::Callback weak_callback) {
@@ -1503,12 +1514,17 @@
                       signature, i::FLAG_disable_old_api_accessors);
 }
 
-template <typename Getter, typename Setter, typename Query, typename Deleter,
-          typename Enumerator>
+template <typename Getter, typename Setter, typename Query, typename Descriptor,
+          typename Deleter, typename Enumerator, typename Definer>
 static i::Handle<i::InterceptorInfo> CreateInterceptorInfo(
     i::Isolate* isolate, Getter getter, Setter setter, Query query,
-    Deleter remover, Enumerator enumerator, Local<Value> data,
-    PropertyHandlerFlags flags) {
+    Descriptor descriptor, Deleter remover, Enumerator enumerator,
+    Definer definer, Local<Value> data, PropertyHandlerFlags flags) {
+  DCHECK(query == nullptr ||
+         descriptor == nullptr);  // Either intercept attributes or descriptor.
+  DCHECK(query == nullptr ||
+         definer ==
+             nullptr);  // Only use descriptor callback with definer callback.
   auto obj = i::Handle<i::InterceptorInfo>::cast(
       isolate->factory()->NewStruct(i::INTERCEPTOR_INFO_TYPE));
   obj->set_flags(0);
@@ -1516,8 +1532,10 @@
   if (getter != 0) SET_FIELD_WRAPPED(obj, set_getter, getter);
   if (setter != 0) SET_FIELD_WRAPPED(obj, set_setter, setter);
   if (query != 0) SET_FIELD_WRAPPED(obj, set_query, query);
+  if (descriptor != 0) SET_FIELD_WRAPPED(obj, set_descriptor, descriptor);
   if (remover != 0) SET_FIELD_WRAPPED(obj, set_deleter, remover);
   if (enumerator != 0) SET_FIELD_WRAPPED(obj, set_enumerator, enumerator);
+  if (definer != 0) SET_FIELD_WRAPPED(obj, set_definer, definer);
   obj->set_can_intercept_symbols(
       !(static_cast<int>(flags) &
         static_cast<int>(PropertyHandlerFlags::kOnlyInterceptStrings)));
@@ -1533,40 +1551,37 @@
   return obj;
 }
 
-template <typename Getter, typename Setter, typename Query, typename Deleter,
-          typename Enumerator>
-static void ObjectTemplateSetNamedPropertyHandler(ObjectTemplate* templ,
-                                                  Getter getter, Setter setter,
-                                                  Query query, Deleter remover,
-                                                  Enumerator enumerator,
-                                                  Local<Value> data,
-                                                  PropertyHandlerFlags flags) {
+template <typename Getter, typename Setter, typename Query, typename Descriptor,
+          typename Deleter, typename Enumerator, typename Definer>
+static void ObjectTemplateSetNamedPropertyHandler(
+    ObjectTemplate* templ, Getter getter, Setter setter, Query query,
+    Descriptor descriptor, Deleter remover, Enumerator enumerator,
+    Definer definer, Local<Value> data, PropertyHandlerFlags flags) {
   i::Isolate* isolate = Utils::OpenHandle(templ)->GetIsolate();
   ENTER_V8(isolate);
   i::HandleScope scope(isolate);
   auto cons = EnsureConstructor(isolate, templ);
   EnsureNotInstantiated(cons, "ObjectTemplateSetNamedPropertyHandler");
-  auto obj = CreateInterceptorInfo(isolate, getter, setter, query, remover,
-                                   enumerator, data, flags);
+  auto obj = CreateInterceptorInfo(isolate, getter, setter, query, descriptor,
+                                   remover, enumerator, definer, data, flags);
   cons->set_named_property_handler(*obj);
 }
 
-
 void ObjectTemplate::SetNamedPropertyHandler(
     NamedPropertyGetterCallback getter, NamedPropertySetterCallback setter,
     NamedPropertyQueryCallback query, NamedPropertyDeleterCallback remover,
     NamedPropertyEnumeratorCallback enumerator, Local<Value> data) {
   ObjectTemplateSetNamedPropertyHandler(
-      this, getter, setter, query, remover, enumerator, data,
+      this, getter, setter, query, nullptr, remover, enumerator, nullptr, data,
       PropertyHandlerFlags::kOnlyInterceptStrings);
 }
 
-
 void ObjectTemplate::SetHandler(
     const NamedPropertyHandlerConfiguration& config) {
   ObjectTemplateSetNamedPropertyHandler(
-      this, config.getter, config.setter, config.query, config.deleter,
-      config.enumerator, config.data, config.flags);
+      this, config.getter, config.setter, config.query, config.descriptor,
+      config.deleter, config.enumerator, config.definer, config.data,
+      config.flags);
 }
 
 
@@ -1626,13 +1641,14 @@
   SET_FIELD_WRAPPED(info, set_callback, callback);
   auto named_interceptor = CreateInterceptorInfo(
       isolate, named_handler.getter, named_handler.setter, named_handler.query,
-      named_handler.deleter, named_handler.enumerator, named_handler.data,
-      named_handler.flags);
+      named_handler.descriptor, named_handler.deleter, named_handler.enumerator,
+      named_handler.definer, named_handler.data, named_handler.flags);
   info->set_named_interceptor(*named_interceptor);
   auto indexed_interceptor = CreateInterceptorInfo(
       isolate, indexed_handler.getter, indexed_handler.setter,
-      indexed_handler.query, indexed_handler.deleter,
-      indexed_handler.enumerator, indexed_handler.data, indexed_handler.flags);
+      indexed_handler.query, indexed_handler.descriptor,
+      indexed_handler.deleter, indexed_handler.enumerator,
+      indexed_handler.definer, indexed_handler.data, indexed_handler.flags);
   info->set_indexed_interceptor(*indexed_interceptor);
 
   if (data.IsEmpty()) {
@@ -1651,9 +1667,10 @@
   i::HandleScope scope(isolate);
   auto cons = EnsureConstructor(isolate, this);
   EnsureNotInstantiated(cons, "v8::ObjectTemplate::SetHandler");
-  auto obj = CreateInterceptorInfo(
-      isolate, config.getter, config.setter, config.query, config.deleter,
-      config.enumerator, config.data, config.flags);
+  auto obj = CreateInterceptorInfo(isolate, config.getter, config.setter,
+                                   config.query, config.descriptor,
+                                   config.deleter, config.enumerator,
+                                   config.definer, config.data, config.flags);
   cons->set_indexed_property_handler(*obj);
 }
 
@@ -1834,17 +1851,19 @@
 
 
 MaybeLocal<Value> Script::Run(Local<Context> context) {
-  PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Script, Run, Value)
+  PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
+      "v8", "V8.Execute", context, Script, Run, MaybeLocal<Value>(),
+      InternalEscapableScope, true);
   i::HistogramTimerScope execute_timer(isolate->counters()->execute(), true);
   i::AggregatingHistogramTimerScope timer(isolate->counters()->compile_lazy());
   i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
-  TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
   auto fun = i::Handle<i::JSFunction>::cast(Utils::OpenHandle(this));
+
   i::Handle<i::Object> receiver = isolate->global_proxy();
   Local<Value> result;
-  has_pending_exception =
-      !ToLocal<Value>(i::Execution::Call(isolate, fun, receiver, 0, NULL),
-                      &result);
+  has_pending_exception = !ToLocal<Value>(
+      i::Execution::Call(isolate, fun, receiver, 0, nullptr), &result);
+
   RETURN_ON_FAILED_EXECUTION(Value);
   RETURN_ESCAPED(result);
 }
@@ -1866,6 +1885,58 @@
       i::Handle<i::SharedFunctionInfo>(i::JSFunction::cast(*obj)->shared()));
 }
 
+int Module::GetModuleRequestsLength() const {
+  i::Handle<i::Module> self = Utils::OpenHandle(this);
+  return self->info()->module_requests()->length();
+}
+
+Local<String> Module::GetModuleRequest(int i) const {
+  CHECK_GE(i, 0);
+  i::Handle<i::Module> self = Utils::OpenHandle(this);
+  i::Isolate* isolate = self->GetIsolate();
+  i::Handle<i::FixedArray> module_requests(self->info()->module_requests(),
+                                           isolate);
+  CHECK_LT(i, module_requests->length());
+  return ToApiHandle<String>(i::handle(module_requests->get(i), isolate));
+}
+
+void Module::SetEmbedderData(Local<Value> data) {
+  Utils::OpenHandle(this)->set_embedder_data(*Utils::OpenHandle(*data));
+}
+
+Local<Value> Module::GetEmbedderData() const {
+  auto self = Utils::OpenHandle(this);
+  return ToApiHandle<Value>(
+      i::handle(self->embedder_data(), self->GetIsolate()));
+}
+
+bool Module::Instantiate(Local<Context> context,
+                         Module::ResolveCallback callback,
+                         Local<Value> callback_data) {
+  PREPARE_FOR_EXECUTION_BOOL(context, Module, Instantiate);
+  has_pending_exception = !i::Module::Instantiate(
+      Utils::OpenHandle(this), context, callback, callback_data);
+  RETURN_ON_FAILED_EXECUTION_BOOL();
+  return true;
+}
+
+MaybeLocal<Value> Module::Evaluate(Local<Context> context) {
+  PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
+      "v8", "V8.Execute", context, Module, Evaluate, MaybeLocal<Value>(),
+      InternalEscapableScope, true);
+  i::HistogramTimerScope execute_timer(isolate->counters()->execute(), true);
+  i::AggregatingHistogramTimerScope timer(isolate->counters()->compile_lazy());
+  i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
+
+  i::Handle<i::Module> self = Utils::OpenHandle(this);
+  // It's an API error to call Evaluate before Instantiate.
+  CHECK(self->code()->IsJSFunction());
+
+  Local<Value> result;
+  has_pending_exception = !ToLocal(i::Module::Evaluate(self), &result);
+  RETURN_ON_FAILED_EXECUTION(Value);
+  RETURN_ESCAPED(result);
+}
 
 MaybeLocal<UnboundScript> ScriptCompiler::CompileUnboundInternal(
     Isolate* v8_isolate, Source* source, CompileOptions options,
@@ -1976,16 +2047,16 @@
   RETURN_TO_LOCAL_UNCHECKED(Compile(context, source, options), Script);
 }
 
+MaybeLocal<Module> ScriptCompiler::CompileModule(Isolate* isolate,
+                                                 Source* source) {
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
 
-MaybeLocal<Script> ScriptCompiler::CompileModule(Local<Context> context,
-                                                 Source* source,
-                                                 CompileOptions options) {
-  auto isolate = context->GetIsolate();
-  auto maybe = CompileUnboundInternal(isolate, source, options, true);
-  Local<UnboundScript> generic;
-  if (!maybe.ToLocal(&generic)) return MaybeLocal<Script>();
-  v8::Context::Scope scope(context);
-  return generic->BindToCurrentContext();
+  auto maybe = CompileUnboundInternal(isolate, source, kNoCompileOptions, true);
+  Local<UnboundScript> unbound;
+  if (!maybe.ToLocal(&unbound)) return MaybeLocal<Module>();
+
+  i::Handle<i::SharedFunctionInfo> shared = Utils::OpenHandle(*unbound);
+  return ToApiHandle<Module>(i_isolate->factory()->NewModule(shared));
 }
 
 
@@ -2084,7 +2155,13 @@
         Utils::OpenHandle(*context_extensions[i]);
     if (!extension->IsJSObject()) return Local<Function>();
     i::Handle<i::JSFunction> closure(context->closure(), isolate);
-    context = factory->NewWithContext(closure, context, extension);
+    context = factory->NewWithContext(
+        closure, context,
+        i::ScopeInfo::CreateForWithScope(
+            isolate, context->IsNativeContext()
+                         ? i::Handle<i::ScopeInfo>::null()
+                         : i::Handle<i::ScopeInfo>(context->scope_info())),
+        extension);
   }
 
   i::Handle<i::Object> name_obj;
@@ -2138,6 +2215,9 @@
 
 ScriptCompiler::ScriptStreamingTask* ScriptCompiler::StartStreamingScript(
     Isolate* v8_isolate, StreamedSource* source, CompileOptions options) {
+  if (!i::FLAG_script_streaming) {
+    return nullptr;
+  }
   i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
   return new i::BackgroundParsingTask(source->impl(), options,
                                       i::FLAG_stack_size, isolate);
@@ -2171,17 +2251,19 @@
   }
 
   source->info->set_script(script);
-  source->info->set_context(isolate->native_context());
 
-  // Create a canonical handle scope before internalizing parsed values if
-  // compiling bytecode. This is required for off-thread bytecode generation.
-  std::unique_ptr<i::CanonicalHandleScope> canonical;
-  if (i::FLAG_ignition) canonical.reset(new i::CanonicalHandleScope(isolate));
+  {
+    // Create a canonical handle scope if compiling ignition bytecode. This is
+    // required by the constant array builder to de-duplicate objects without
+    // dereferencing handles.
+    std::unique_ptr<i::CanonicalHandleScope> canonical;
+    if (i::FLAG_ignition) canonical.reset(new i::CanonicalHandleScope(isolate));
 
-  // Do the parsing tasks which need to be done on the main thread. This will
-  // also handle parse errors.
-  source->parser->Internalize(isolate, script,
-                              source->info->literal() == nullptr);
+    // Do the parsing tasks which need to be done on the main thread. This will
+    // also handle parse errors.
+    source->parser->Internalize(isolate, script,
+                                source->info->literal() == nullptr);
+  }
   source->parser->HandleSourceURLComments(isolate, script);
 
   i::Handle<i::SharedFunctionInfo> result;
@@ -2192,9 +2274,10 @@
   }
   has_pending_exception = result.is_null();
   if (has_pending_exception) isolate->ReportPendingMessages();
-  RETURN_ON_FAILED_EXECUTION(Script);
 
-  source->info->clear_script();  // because script goes out of scope.
+  source->Release();
+
+  RETURN_ON_FAILED_EXECUTION(Script);
 
   Local<UnboundScript> generic = ToApiHandle<UnboundScript>(result);
   if (generic.IsEmpty()) return Local<Script>();
@@ -2263,8 +2346,8 @@
   ResetInternal();
   // Special handling for simulators which have a separate JS stack.
   js_stack_comparable_address_ =
-      reinterpret_cast<void*>(v8::internal::SimulatorStack::RegisterCTryCatch(
-          isolate_, v8::internal::GetCurrentStackPosition()));
+      reinterpret_cast<void*>(i::SimulatorStack::RegisterCTryCatch(
+          isolate_, i::GetCurrentStackPosition()));
   isolate_->RegisterTryCatchHandler(this);
 }
 
@@ -2280,8 +2363,8 @@
   ResetInternal();
   // Special handling for simulators which have a separate JS stack.
   js_stack_comparable_address_ =
-      reinterpret_cast<void*>(v8::internal::SimulatorStack::RegisterCTryCatch(
-          isolate_, v8::internal::GetCurrentStackPosition()));
+      reinterpret_cast<void*>(i::SimulatorStack::RegisterCTryCatch(
+          isolate_, i::GetCurrentStackPosition()));
   isolate_->RegisterTryCatchHandler(this);
 }
 
@@ -2300,7 +2383,7 @@
       isolate_->RestorePendingMessageFromTryCatch(this);
     }
     isolate_->UnregisterTryCatchHandler(this);
-    v8::internal::SimulatorStack::UnregisterCTryCatch(isolate_);
+    i::SimulatorStack::UnregisterCTryCatch(isolate_);
     reinterpret_cast<Isolate*>(isolate_)->ThrowException(exc);
     DCHECK(!isolate_->thread_local_top()->rethrowing_message_);
   } else {
@@ -2311,7 +2394,7 @@
       isolate_->CancelScheduledExceptionFromTryCatch(this);
     }
     isolate_->UnregisterTryCatchHandler(this);
-    v8::internal::SimulatorStack::UnregisterCTryCatch(isolate_);
+    i::SimulatorStack::UnregisterCTryCatch(isolate_);
   }
 }
 
@@ -2832,6 +2915,205 @@
   RETURN_ESCAPED(result);
 }
 
+// --- V a l u e   S e r i a l i z a t i o n ---
+
+Maybe<bool> ValueSerializer::Delegate::WriteHostObject(Isolate* v8_isolate,
+                                                       Local<Object> object) {
+  i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
+  isolate->ScheduleThrow(*isolate->factory()->NewError(
+      isolate->error_function(), i::MessageTemplate::kDataCloneError,
+      Utils::OpenHandle(*object)));
+  return Nothing<bool>();
+}
+
+struct ValueSerializer::PrivateData {
+  explicit PrivateData(i::Isolate* i, ValueSerializer::Delegate* delegate)
+      : isolate(i), serializer(i, delegate) {}
+  i::Isolate* isolate;
+  i::ValueSerializer serializer;
+};
+
+ValueSerializer::ValueSerializer(Isolate* isolate)
+    : ValueSerializer(isolate, nullptr) {}
+
+ValueSerializer::ValueSerializer(Isolate* isolate, Delegate* delegate)
+    : private_(
+          new PrivateData(reinterpret_cast<i::Isolate*>(isolate), delegate)) {}
+
+ValueSerializer::~ValueSerializer() { delete private_; }
+
+void ValueSerializer::WriteHeader() { private_->serializer.WriteHeader(); }
+
+Maybe<bool> ValueSerializer::WriteValue(Local<Context> context,
+                                        Local<Value> value) {
+  PREPARE_FOR_EXECUTION_PRIMITIVE(context, ValueSerializer, WriteValue, bool);
+  i::Handle<i::Object> object = Utils::OpenHandle(*value);
+  Maybe<bool> result = private_->serializer.WriteObject(object);
+  has_pending_exception = result.IsNothing();
+  RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
+  return result;
+}
+
+std::vector<uint8_t> ValueSerializer::ReleaseBuffer() {
+  return private_->serializer.ReleaseBuffer();
+}
+
+void ValueSerializer::TransferArrayBuffer(uint32_t transfer_id,
+                                          Local<ArrayBuffer> array_buffer) {
+  private_->serializer.TransferArrayBuffer(transfer_id,
+                                           Utils::OpenHandle(*array_buffer));
+}
+
+void ValueSerializer::TransferSharedArrayBuffer(
+    uint32_t transfer_id, Local<SharedArrayBuffer> shared_array_buffer) {
+  private_->serializer.TransferArrayBuffer(
+      transfer_id, Utils::OpenHandle(*shared_array_buffer));
+}
+
+void ValueSerializer::WriteUint32(uint32_t value) {
+  private_->serializer.WriteUint32(value);
+}
+
+void ValueSerializer::WriteUint64(uint64_t value) {
+  private_->serializer.WriteUint64(value);
+}
+
+void ValueSerializer::WriteDouble(double value) {
+  private_->serializer.WriteDouble(value);
+}
+
+void ValueSerializer::WriteRawBytes(const void* source, size_t length) {
+  private_->serializer.WriteRawBytes(source, length);
+}
+
+MaybeLocal<Object> ValueDeserializer::Delegate::ReadHostObject(
+    Isolate* v8_isolate) {
+  i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
+  isolate->ScheduleThrow(*isolate->factory()->NewError(
+      isolate->error_function(),
+      i::MessageTemplate::kDataCloneDeserializationError));
+  return MaybeLocal<Object>();
+}
+
+struct ValueDeserializer::PrivateData {
+  PrivateData(i::Isolate* i, i::Vector<const uint8_t> data, Delegate* delegate)
+      : isolate(i), deserializer(i, data, delegate) {}
+  i::Isolate* isolate;
+  i::ValueDeserializer deserializer;
+  bool has_aborted = false;
+  bool supports_legacy_wire_format = false;
+};
+
+ValueDeserializer::ValueDeserializer(Isolate* isolate, const uint8_t* data,
+                                     size_t size)
+    : ValueDeserializer(isolate, data, size, nullptr) {}
+
+ValueDeserializer::ValueDeserializer(Isolate* isolate, const uint8_t* data,
+                                     size_t size, Delegate* delegate) {
+  if (base::IsValueInRangeForNumericType<int>(size)) {
+    private_ = new PrivateData(
+        reinterpret_cast<i::Isolate*>(isolate),
+        i::Vector<const uint8_t>(data, static_cast<int>(size)), delegate);
+  } else {
+    private_ = new PrivateData(reinterpret_cast<i::Isolate*>(isolate),
+                               i::Vector<const uint8_t>(nullptr, 0), nullptr);
+    private_->has_aborted = true;
+  }
+}
+
+ValueDeserializer::~ValueDeserializer() { delete private_; }
+
+Maybe<bool> ValueDeserializer::ReadHeader(Local<Context> context) {
+  PREPARE_FOR_EXECUTION_PRIMITIVE(context, ValueDeserializer, ReadHeader, bool);
+
+  // We could have aborted during the constructor.
+  // If so, ReadHeader is where we report it.
+  if (private_->has_aborted) {
+    isolate->Throw(*isolate->factory()->NewError(
+        i::MessageTemplate::kDataCloneDeserializationError));
+    has_pending_exception = true;
+    RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
+  }
+
+  bool read_header = false;
+  has_pending_exception = !private_->deserializer.ReadHeader().To(&read_header);
+  RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
+  DCHECK(read_header);
+
+  // TODO(jbroman): Today, all wire formats are "legacy". When a more supported
+  // format is added, compare the version of the internal serializer to the
+  // minimum non-legacy version number.
+  if (!private_->supports_legacy_wire_format) {
+    isolate->Throw(*isolate->factory()->NewError(
+        i::MessageTemplate::kDataCloneDeserializationVersionError));
+    has_pending_exception = true;
+    RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
+  }
+
+  return Just(true);
+}
+
+Maybe<bool> ValueDeserializer::ReadHeader() {
+  Isolate* isolate = reinterpret_cast<Isolate*>(private_->isolate);
+  return ReadHeader(isolate->GetEnteredContext());
+}
+
+void ValueDeserializer::SetSupportsLegacyWireFormat(
+    bool supports_legacy_wire_format) {
+  private_->supports_legacy_wire_format = supports_legacy_wire_format;
+}
+
+uint32_t ValueDeserializer::GetWireFormatVersion() const {
+  CHECK(!private_->has_aborted);
+  return private_->deserializer.GetWireFormatVersion();
+}
+
+MaybeLocal<Value> ValueDeserializer::ReadValue(Local<Context> context) {
+  CHECK(!private_->has_aborted);
+  PREPARE_FOR_EXECUTION(context, ValueDeserializer, ReadValue, Value);
+  i::MaybeHandle<i::Object> result;
+  if (GetWireFormatVersion() > 0) {
+    result = private_->deserializer.ReadObject();
+  } else {
+    result =
+        private_->deserializer.ReadObjectUsingEntireBufferForLegacyFormat();
+  }
+  Local<Value> value;
+  has_pending_exception = !ToLocal(result, &value);
+  RETURN_ON_FAILED_EXECUTION(Value);
+  RETURN_ESCAPED(value);
+}
+
+void ValueDeserializer::TransferArrayBuffer(uint32_t transfer_id,
+                                            Local<ArrayBuffer> array_buffer) {
+  CHECK(!private_->has_aborted);
+  private_->deserializer.TransferArrayBuffer(transfer_id,
+                                             Utils::OpenHandle(*array_buffer));
+}
+
+void ValueDeserializer::TransferSharedArrayBuffer(
+    uint32_t transfer_id, Local<SharedArrayBuffer> shared_array_buffer) {
+  CHECK(!private_->has_aborted);
+  private_->deserializer.TransferArrayBuffer(
+      transfer_id, Utils::OpenHandle(*shared_array_buffer));
+}
+
+bool ValueDeserializer::ReadUint32(uint32_t* value) {
+  return private_->deserializer.ReadUint32(value);
+}
+
+bool ValueDeserializer::ReadUint64(uint64_t* value) {
+  return private_->deserializer.ReadUint64(value);
+}
+
+bool ValueDeserializer::ReadDouble(double* value) {
+  return private_->deserializer.ReadDouble(value);
+}
+
+bool ValueDeserializer::ReadRawBytes(size_t length, const void** data) {
+  return private_->deserializer.ReadRawBytes(length, data);
+}
+
 // --- D a t a ---
 
 bool Value::FullIsUndefined() const {
@@ -3019,12 +3301,18 @@
   return obj->IsJSRegExp();
 }
 
+bool Value::IsAsyncFunction() const {
+  i::Handle<i::Object> obj = Utils::OpenHandle(this);
+  if (!obj->IsJSFunction()) return false;
+  i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(obj);
+  return i::IsAsyncFunction(func->shared()->kind());
+}
 
 bool Value::IsGeneratorFunction() const {
   i::Handle<i::Object> obj = Utils::OpenHandle(this);
   if (!obj->IsJSFunction()) return false;
   i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(obj);
-  return func->shared()->is_generator();
+  return i::IsGeneratorFunction(func->shared()->kind());
 }
 
 
@@ -3662,6 +3950,98 @@
   return result;
 }
 
+struct v8::PropertyDescriptor::PrivateData {
+  PrivateData() : desc() {}
+  i::PropertyDescriptor desc;
+};
+
+v8::PropertyDescriptor::PropertyDescriptor() : private_(new PrivateData()) {}
+
+// DataDescriptor
+v8::PropertyDescriptor::PropertyDescriptor(v8::Local<v8::Value> value)
+    : private_(new PrivateData()) {
+  private_->desc.set_value(Utils::OpenHandle(*value, true));
+}
+
+// DataDescriptor with writable field
+v8::PropertyDescriptor::PropertyDescriptor(v8::Local<v8::Value> value,
+                                           bool writable)
+    : private_(new PrivateData()) {
+  private_->desc.set_value(Utils::OpenHandle(*value, true));
+  private_->desc.set_writable(writable);
+}
+
+// AccessorDescriptor
+v8::PropertyDescriptor::PropertyDescriptor(v8::Local<v8::Value> get,
+                                           v8::Local<v8::Value> set)
+    : private_(new PrivateData()) {
+  DCHECK(get.IsEmpty() || get->IsUndefined() || get->IsFunction());
+  DCHECK(set.IsEmpty() || set->IsUndefined() || set->IsFunction());
+  private_->desc.set_get(Utils::OpenHandle(*get, true));
+  private_->desc.set_set(Utils::OpenHandle(*set, true));
+}
+
+v8::PropertyDescriptor::~PropertyDescriptor() { delete private_; }
+
+v8::Local<Value> v8::PropertyDescriptor::value() const {
+  DCHECK(private_->desc.has_value());
+  return Utils::ToLocal(private_->desc.value());
+}
+
+v8::Local<Value> v8::PropertyDescriptor::get() const {
+  DCHECK(private_->desc.has_get());
+  return Utils::ToLocal(private_->desc.get());
+}
+
+v8::Local<Value> v8::PropertyDescriptor::set() const {
+  DCHECK(private_->desc.has_set());
+  return Utils::ToLocal(private_->desc.set());
+}
+
+bool v8::PropertyDescriptor::has_value() const {
+  return private_->desc.has_value();
+}
+bool v8::PropertyDescriptor::has_get() const {
+  return private_->desc.has_get();
+}
+bool v8::PropertyDescriptor::has_set() const {
+  return private_->desc.has_set();
+}
+
+bool v8::PropertyDescriptor::writable() const {
+  DCHECK(private_->desc.has_writable());
+  return private_->desc.writable();
+}
+
+bool v8::PropertyDescriptor::has_writable() const {
+  return private_->desc.has_writable();
+}
+
+void v8::PropertyDescriptor::set_enumerable(bool enumerable) {
+  private_->desc.set_enumerable(enumerable);
+}
+
+bool v8::PropertyDescriptor::enumerable() const {
+  DCHECK(private_->desc.has_enumerable());
+  return private_->desc.enumerable();
+}
+
+bool v8::PropertyDescriptor::has_enumerable() const {
+  return private_->desc.has_enumerable();
+}
+
+void v8::PropertyDescriptor::set_configurable(bool configurable) {
+  private_->desc.set_configurable(configurable);
+}
+
+bool v8::PropertyDescriptor::configurable() const {
+  DCHECK(private_->desc.has_configurable());
+  return private_->desc.configurable();
+}
+
+bool v8::PropertyDescriptor::has_configurable() const {
+  return private_->desc.has_configurable();
+}
 
 Maybe<bool> v8::Object::DefineOwnProperty(v8::Local<v8::Context> context,
                                           v8::Local<Name> key,
@@ -3672,13 +4052,6 @@
   i::Handle<i::Name> key_obj = Utils::OpenHandle(*key);
   i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
 
-  if (self->IsAccessCheckNeeded() &&
-      !isolate->MayAccess(handle(isolate->context()),
-                          i::Handle<i::JSObject>::cast(self))) {
-    isolate->ReportFailedAccessCheck(i::Handle<i::JSObject>::cast(self));
-    return Nothing<bool>();
-  }
-
   i::PropertyDescriptor desc;
   desc.set_writable(!(attributes & v8::ReadOnly));
   desc.set_enumerable(!(attributes & v8::DontEnum));
@@ -3691,6 +4064,19 @@
   return success;
 }
 
+Maybe<bool> v8::Object::DefineProperty(v8::Local<v8::Context> context,
+                                       v8::Local<Name> key,
+                                       PropertyDescriptor& descriptor) {
+  PREPARE_FOR_EXECUTION_PRIMITIVE(context, Object, DefineProperty, bool);
+  i::Handle<i::JSReceiver> self = Utils::OpenHandle(this);
+  i::Handle<i::Name> key_obj = Utils::OpenHandle(*key);
+
+  Maybe<bool> success = i::JSReceiver::DefineOwnProperty(
+      isolate, self, key_obj, &descriptor.get_private()->desc,
+      i::Object::DONT_THROW);
+  RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
+  return success;
+}
 
 MUST_USE_RESULT
 static i::MaybeHandle<i::Object> DefineObjectProperty(
@@ -4408,9 +4794,10 @@
 MaybeLocal<Value> Object::CallAsFunction(Local<Context> context,
                                          Local<Value> recv, int argc,
                                          Local<Value> argv[]) {
-  PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Object, CallAsFunction, Value);
+  PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
+      "v8", "V8.Execute", context, Object, CallAsFunction, MaybeLocal<Value>(),
+      InternalEscapableScope, true);
   i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
-  TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
   auto self = Utils::OpenHandle(this);
   auto recv_obj = Utils::OpenHandle(*recv);
   STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
@@ -4434,10 +4821,10 @@
 
 MaybeLocal<Value> Object::CallAsConstructor(Local<Context> context, int argc,
                                             Local<Value> argv[]) {
-  PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Object, CallAsConstructor,
-                                      Value);
+  PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
+      "v8", "V8.Execute", context, Object, CallAsConstructor,
+      MaybeLocal<Value>(), InternalEscapableScope, true);
   i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
-  TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
   auto self = Utils::OpenHandle(this);
   STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
   i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv);
@@ -4485,9 +4872,10 @@
 
 MaybeLocal<Object> Function::NewInstance(Local<Context> context, int argc,
                                          v8::Local<v8::Value> argv[]) const {
-  PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Function, NewInstance, Object);
+  PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
+      "v8", "V8.Execute", context, Function, NewInstance, MaybeLocal<Object>(),
+      InternalEscapableScope, true);
   i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
-  TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
   auto self = Utils::OpenHandle(this);
   STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
   i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv);
@@ -4509,9 +4897,10 @@
 MaybeLocal<v8::Value> Function::Call(Local<Context> context,
                                      v8::Local<v8::Value> recv, int argc,
                                      v8::Local<v8::Value> argv[]) {
-  PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Function, Call, Value);
+  PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
+      "v8", "V8.Execute", context, Function, Call, MaybeLocal<Value>(),
+      InternalEscapableScope, true);
   i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
-  TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
   auto self = Utils::OpenHandle(this);
   i::Handle<i::Object> recv_obj = Utils::OpenHandle(*recv);
   STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
@@ -5708,8 +6097,8 @@
                           v8::MaybeLocal<Value> global_object,
                           size_t context_snapshot_index) {
   i::Isolate* isolate = reinterpret_cast<i::Isolate*>(external_isolate);
-  LOG_API(isolate, Context, New);
   TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.NewContext");
+  LOG_API(isolate, Context, New);
   i::HandleScope scope(isolate);
   ExtensionConfiguration no_extensions;
   if (extensions == NULL) extensions = &no_extensions;
@@ -6820,8 +7209,9 @@
   if (!maybe_compiled_part.ToHandle(&compiled_part)) {
     return MaybeLocal<WasmCompiledModule>();
   }
-  return Local<WasmCompiledModule>::Cast(Utils::ToLocal(
-      i::wasm::CreateCompiledModuleObject(i_isolate, compiled_part)));
+  return Local<WasmCompiledModule>::Cast(
+      Utils::ToLocal(i::wasm::CreateCompiledModuleObject(
+          i_isolate, compiled_part, i::wasm::ModuleOrigin::kWasmOrigin)));
 }
 
 // static
@@ -7234,8 +7624,7 @@
 void Isolate::ReportExternalAllocationLimitReached() {
   i::Heap* heap = reinterpret_cast<i::Isolate*>(this)->heap();
   if (heap->gc_state() != i::Heap::NOT_IN_GC) return;
-  heap->ReportExternalMemoryPressure(
-      "external memory allocation limit reached.");
+  heap->ReportExternalMemoryPressure();
 }
 
 
@@ -7303,27 +7692,24 @@
 void Isolate::SetObjectGroupId(internal::Object** object, UniqueId id) {
   i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
   internal_isolate->global_handles()->SetObjectGroupId(
-      v8::internal::Handle<v8::internal::Object>(object).location(),
-      id);
+      i::Handle<i::Object>(object).location(), id);
 }
 
 
 void Isolate::SetReferenceFromGroup(UniqueId id, internal::Object** object) {
   i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
   internal_isolate->global_handles()->SetReferenceFromGroup(
-      id,
-      v8::internal::Handle<v8::internal::Object>(object).location());
+      id, i::Handle<i::Object>(object).location());
 }
 
 
 void Isolate::SetReference(internal::Object** parent,
                            internal::Object** child) {
   i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
-  i::Object** parent_location =
-      v8::internal::Handle<v8::internal::Object>(parent).location();
+  i::Object** parent_location = i::Handle<i::Object>(parent).location();
   internal_isolate->global_handles()->SetReference(
       reinterpret_cast<i::HeapObject**>(parent_location),
-      v8::internal::Handle<v8::internal::Object>(child).location());
+      i::Handle<i::Object>(child).location());
 }
 
 
@@ -7398,13 +7784,13 @@
   CHECK(i::FLAG_expose_gc);
   if (type == kMinorGarbageCollection) {
     reinterpret_cast<i::Isolate*>(this)->heap()->CollectGarbage(
-        i::NEW_SPACE, "Isolate::RequestGarbageCollection",
+        i::NEW_SPACE, i::GarbageCollectionReason::kTesting,
         kGCCallbackFlagForced);
   } else {
     DCHECK_EQ(kFullGarbageCollection, type);
     reinterpret_cast<i::Isolate*>(this)->heap()->CollectAllGarbage(
         i::Heap::kAbortIncrementalMarkingMask,
-        "Isolate::RequestGarbageCollection", kGCCallbackFlagForced);
+        i::GarbageCollectionReason::kTesting, kGCCallbackFlagForced);
   }
 }
 
@@ -7833,7 +8219,8 @@
     i::HistogramTimerScope idle_notification_scope(
         isolate->counters()->gc_low_memory_notification());
     TRACE_EVENT0("v8", "V8.GCLowMemoryNotification");
-    isolate->heap()->CollectAllAvailableGarbage("low memory notification");
+    isolate->heap()->CollectAllAvailableGarbage(
+        i::GarbageCollectionReason::kLowMemoryNotification);
   }
 }
 
@@ -7857,8 +8244,7 @@
 
 void Isolate::MemoryPressureNotification(MemoryPressureLevel level) {
   i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
-  return isolate->heap()->MemoryPressureNotification(level,
-                                                     Locker::IsLocked(this));
+  isolate->heap()->MemoryPressureNotification(level, Locker::IsLocked(this));
 }
 
 void Isolate::SetRAILMode(RAILMode rail_mode) {
@@ -8325,6 +8711,10 @@
   }
 }
 
+const char* CpuProfileNode::GetFunctionNameStr() const {
+  const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
+  return node->entry()->name();
+}
 
 int CpuProfileNode::GetScriptId() const {
   const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
@@ -8332,7 +8722,6 @@
   return entry->script_id();
 }
 
-
 Local<String> CpuProfileNode::GetScriptResourceName() const {
   const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
   i::Isolate* isolate = node->isolate();
@@ -8340,6 +8729,10 @@
       node->entry()->resource_name()));
 }
 
+const char* CpuProfileNode::GetScriptResourceNameStr() const {
+  const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
+  return node->entry()->resource_name();
+}
 
 int CpuProfileNode::GetLineNumber() const {
   return reinterpret_cast<const i::ProfileNode*>(this)->entry()->line_number();
@@ -8966,9 +9359,6 @@
   Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
   RuntimeCallTimerScope timer(isolate,
                               &RuntimeCallStats::AccessorGetterCallback);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate,
-      &internal::tracing::TraceEventStatsTable::AccessorGetterCallback);
   Address getter_address = reinterpret_cast<Address>(reinterpret_cast<intptr_t>(
       getter));
   VMState<EXTERNAL> state(isolate);
@@ -8982,9 +9372,6 @@
   Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
   RuntimeCallTimerScope timer(isolate,
                               &RuntimeCallStats::InvokeFunctionCallback);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate,
-      &internal::tracing::TraceEventStatsTable::InvokeFunctionCallback);
   Address callback_address =
       reinterpret_cast<Address>(reinterpret_cast<intptr_t>(callback));
   VMState<EXTERNAL> state(isolate);
diff --git a/src/api.h b/src/api.h
index ede7ba9..22c10dd 100644
--- a/src/api.h
+++ b/src/api.h
@@ -69,7 +69,6 @@
   static RegisteredExtension* first_extension_;
 };
 
-
 #define OPEN_HANDLE_LIST(V)                  \
   V(Template, TemplateInfo)                  \
   V(FunctionTemplate, FunctionTemplateInfo)  \
@@ -101,6 +100,7 @@
   V(Symbol, Symbol)                          \
   V(Script, JSFunction)                      \
   V(UnboundScript, SharedFunctionInfo)       \
+  V(Module, Module)                          \
   V(Function, JSReceiver)                    \
   V(Message, JSMessageObject)                \
   V(Context, Context)                        \
@@ -124,6 +124,8 @@
       v8::internal::Handle<v8::internal::Context> obj);
   static inline Local<Value> ToLocal(
       v8::internal::Handle<v8::internal::Object> obj);
+  static inline Local<Module> ToLocal(
+      v8::internal::Handle<v8::internal::Module> obj);
   static inline Local<Name> ToLocal(
       v8::internal::Handle<v8::internal::Name> obj);
   static inline Local<String> ToLocal(
@@ -136,6 +138,8 @@
       v8::internal::Handle<v8::internal::JSReceiver> obj);
   static inline Local<Object> ToLocal(
       v8::internal::Handle<v8::internal::JSObject> obj);
+  static inline Local<Function> ToLocal(
+      v8::internal::Handle<v8::internal::JSFunction> obj);
   static inline Local<Array> ToLocal(
       v8::internal::Handle<v8::internal::JSArray> obj);
   static inline Local<Map> ToLocal(
@@ -284,12 +288,14 @@
 
 MAKE_TO_LOCAL(ToLocal, Context, Context)
 MAKE_TO_LOCAL(ToLocal, Object, Value)
+MAKE_TO_LOCAL(ToLocal, Module, Module)
 MAKE_TO_LOCAL(ToLocal, Name, Name)
 MAKE_TO_LOCAL(ToLocal, String, String)
 MAKE_TO_LOCAL(ToLocal, Symbol, Symbol)
 MAKE_TO_LOCAL(ToLocal, JSRegExp, RegExp)
 MAKE_TO_LOCAL(ToLocal, JSReceiver, Object)
 MAKE_TO_LOCAL(ToLocal, JSObject, Object)
+MAKE_TO_LOCAL(ToLocal, JSFunction, Function)
 MAKE_TO_LOCAL(ToLocal, JSArray, Array)
 MAKE_TO_LOCAL(ToLocal, JSMap, Map)
 MAKE_TO_LOCAL(ToLocal, JSSet, Set)
diff --git a/src/arguments.h b/src/arguments.h
index 9c629ce..92c7075 100644
--- a/src/arguments.h
+++ b/src/arguments.h
@@ -81,21 +81,20 @@
 
 // TODO(cbruni): add global flag to check whether any tracing events have been
 // enabled.
-// TODO(cbruni): Convert the IsContext CHECK back to a DCHECK.
 #define RUNTIME_FUNCTION_RETURNS_TYPE(Type, Name)                             \
   static INLINE(Type __RT_impl_##Name(Arguments args, Isolate* isolate));     \
                                                                               \
   V8_NOINLINE static Type Stats_##Name(int args_length, Object** args_object, \
                                        Isolate* isolate) {                    \
     RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Name);            \
+    TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.runtime"),                     \
+                 "V8.Runtime_" #Name);                                        \
     Arguments args(args_length, args_object);                                 \
-    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(                            \
-        isolate, &tracing::TraceEventStatsTable::Name);                       \
     return __RT_impl_##Name(args, isolate);                                   \
   }                                                                           \
                                                                               \
   Type Name(int args_length, Object** args_object, Isolate* isolate) {        \
-    CHECK(isolate->context() == nullptr || isolate->context()->IsContext());  \
+    DCHECK(isolate->context() == nullptr || isolate->context()->IsContext()); \
     CLOBBER_DOUBLE_REGISTERS();                                               \
     if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||       \
                     FLAG_runtime_call_stats)) {                               \
diff --git a/src/arm/OWNERS b/src/arm/OWNERS
deleted file mode 100644
index 906a5ce..0000000
--- a/src/arm/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-rmcilroy@chromium.org
diff --git a/src/arm/assembler-arm-inl.h b/src/arm/assembler-arm-inl.h
index b1f33e0..bc501b1 100644
--- a/src/arm/assembler-arm-inl.h
+++ b/src/arm/assembler-arm-inl.h
@@ -46,7 +46,7 @@
 namespace v8 {
 namespace internal {
 
-bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
+bool CpuFeatures::SupportsCrankshaft() { return true; }
 
 bool CpuFeatures::SupportsSimd128() { return false; }
 
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 78ffe25..ee02027 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -46,97 +46,203 @@
 namespace v8 {
 namespace internal {
 
-// Get the CPU features enabled by the build. For cross compilation the
-// preprocessor symbols CAN_USE_ARMV7_INSTRUCTIONS and CAN_USE_VFP3_INSTRUCTIONS
-// can be defined to enable ARMv7 and VFPv3 instructions when building the
-// snapshot.
-static unsigned CpuFeaturesImpliedByCompiler() {
-  unsigned answer = 0;
-#ifdef CAN_USE_ARMV8_INSTRUCTIONS
-  if (FLAG_enable_armv8) {
-    answer |= 1u << ARMv8;
-    // ARMv8 always features VFP and NEON.
-    answer |= 1u << ARMv7 | 1u << VFP3 | 1u << NEON | 1u << VFP32DREGS;
-    answer |= 1u << SUDIV;
-  }
-#endif  // CAN_USE_ARMV8_INSTRUCTIONS
-#ifdef CAN_USE_ARMV7_INSTRUCTIONS
-  if (FLAG_enable_armv7) answer |= 1u << ARMv7;
-#endif  // CAN_USE_ARMV7_INSTRUCTIONS
-#ifdef CAN_USE_VFP3_INSTRUCTIONS
-  if (FLAG_enable_vfp3) answer |= 1u << VFP3 | 1u << ARMv7;
-#endif  // CAN_USE_VFP3_INSTRUCTIONS
-#ifdef CAN_USE_VFP32DREGS
-  if (FLAG_enable_32dregs) answer |= 1u << VFP32DREGS;
-#endif  // CAN_USE_VFP32DREGS
-#ifdef CAN_USE_NEON
-  if (FLAG_enable_neon) answer |= 1u << NEON;
-#endif  // CAN_USE_VFP32DREGS
+static const unsigned kArmv6 = 0u;
+static const unsigned kArmv7 = kArmv6 | (1u << ARMv7);
+static const unsigned kArmv7WithSudiv = kArmv7 | (1u << ARMv7_SUDIV);
+static const unsigned kArmv8 = kArmv7WithSudiv | (1u << ARMv8);
 
-  return answer;
+static unsigned CpuFeaturesFromCommandLine() {
+  unsigned result;
+  if (strcmp(FLAG_arm_arch, "armv8") == 0) {
+    result = kArmv8;
+  } else if (strcmp(FLAG_arm_arch, "armv7+sudiv") == 0) {
+    result = kArmv7WithSudiv;
+  } else if (strcmp(FLAG_arm_arch, "armv7") == 0) {
+    result = kArmv7;
+  } else if (strcmp(FLAG_arm_arch, "armv6") == 0) {
+    result = kArmv6;
+  } else {
+    fprintf(stderr, "Error: unrecognised value for --arm-arch ('%s').\n",
+            FLAG_arm_arch);
+    fprintf(stderr,
+            "Supported values are:  armv8\n"
+            "                       armv7+sudiv\n"
+            "                       armv7\n"
+            "                       armv6\n");
+    CHECK(false);
+  }
+
+  // If any of the old (deprecated) flags are specified, print a warning, but
+  // otherwise try to respect them for now.
+  // TODO(jbramley): When all the old bots have been updated, remove this.
+  if (FLAG_enable_armv7.has_value || FLAG_enable_vfp3.has_value ||
+      FLAG_enable_32dregs.has_value || FLAG_enable_neon.has_value ||
+      FLAG_enable_sudiv.has_value || FLAG_enable_armv8.has_value) {
+    // As an approximation of the old behaviour, set the default values from the
+    // arm_arch setting, then apply the flags over the top.
+    bool enable_armv7 = (result & (1u << ARMv7)) != 0;
+    bool enable_vfp3 = (result & (1u << ARMv7)) != 0;
+    bool enable_32dregs = (result & (1u << ARMv7)) != 0;
+    bool enable_neon = (result & (1u << ARMv7)) != 0;
+    bool enable_sudiv = (result & (1u << ARMv7_SUDIV)) != 0;
+    bool enable_armv8 = (result & (1u << ARMv8)) != 0;
+    if (FLAG_enable_armv7.has_value) {
+      fprintf(stderr,
+              "Warning: --enable_armv7 is deprecated. "
+              "Use --arm_arch instead.\n");
+      enable_armv7 = FLAG_enable_armv7.value;
+    }
+    if (FLAG_enable_vfp3.has_value) {
+      fprintf(stderr,
+              "Warning: --enable_vfp3 is deprecated. "
+              "Use --arm_arch instead.\n");
+      enable_vfp3 = FLAG_enable_vfp3.value;
+    }
+    if (FLAG_enable_32dregs.has_value) {
+      fprintf(stderr,
+              "Warning: --enable_32dregs is deprecated. "
+              "Use --arm_arch instead.\n");
+      enable_32dregs = FLAG_enable_32dregs.value;
+    }
+    if (FLAG_enable_neon.has_value) {
+      fprintf(stderr,
+              "Warning: --enable_neon is deprecated. "
+              "Use --arm_arch instead.\n");
+      enable_neon = FLAG_enable_neon.value;
+    }
+    if (FLAG_enable_sudiv.has_value) {
+      fprintf(stderr,
+              "Warning: --enable_sudiv is deprecated. "
+              "Use --arm_arch instead.\n");
+      enable_sudiv = FLAG_enable_sudiv.value;
+    }
+    if (FLAG_enable_armv8.has_value) {
+      fprintf(stderr,
+              "Warning: --enable_armv8 is deprecated. "
+              "Use --arm_arch instead.\n");
+      enable_armv8 = FLAG_enable_armv8.value;
+    }
+    // Emulate the old implications.
+    if (enable_armv8) {
+      enable_vfp3 = true;
+      enable_neon = true;
+      enable_32dregs = true;
+      enable_sudiv = true;
+    }
+    // Select the best available configuration.
+    if (enable_armv7 && enable_vfp3 && enable_32dregs && enable_neon) {
+      if (enable_sudiv) {
+        if (enable_armv8) {
+          result = kArmv8;
+        } else {
+          result = kArmv7WithSudiv;
+        }
+      } else {
+        result = kArmv7;
+      }
+    } else {
+      result = kArmv6;
+    }
+  }
+  return result;
+}
+
+// Get the CPU features enabled by the build.
+// For cross compilation the preprocessor symbols such as
+// CAN_USE_ARMV7_INSTRUCTIONS and CAN_USE_VFP3_INSTRUCTIONS can be used to
+// enable ARMv7 and VFPv3 instructions when building the snapshot. However,
+// these flags should be consistent with a supported ARM configuration:
+//  "armv6":       ARMv6 + VFPv2
+//  "armv7":       ARMv7 + VFPv3-D32 + NEON
+//  "armv7+sudiv": ARMv7 + VFPv4-D32 + NEON + SUDIV
+//  "armv8":       ARMv8 (+ all of the above)
+static constexpr unsigned CpuFeaturesFromCompiler() {
+// TODO(jbramley): Once the build flags are simplified, these tests should
+// also be simplified.
+
+// Check *architectural* implications.
+#if defined(CAN_USE_ARMV8_INSTRUCTIONS) && !defined(CAN_USE_ARMV7_INSTRUCTIONS)
+#error "CAN_USE_ARMV8_INSTRUCTIONS should imply CAN_USE_ARMV7_INSTRUCTIONS"
+#endif
+#if defined(CAN_USE_ARMV8_INSTRUCTIONS) && !defined(CAN_USE_SUDIV)
+#error "CAN_USE_ARMV8_INSTRUCTIONS should imply CAN_USE_SUDIV"
+#endif
+#if defined(CAN_USE_ARMV7_INSTRUCTIONS) != defined(CAN_USE_VFP3_INSTRUCTIONS)
+// V8 requires VFP, and all ARMv7 devices with VFP have VFPv3. Similarly,
+// VFPv3 isn't available before ARMv7.
+#error "CAN_USE_ARMV7_INSTRUCTIONS should match CAN_USE_VFP3_INSTRUCTIONS"
+#endif
+#if defined(CAN_USE_NEON) && !defined(CAN_USE_ARMV7_INSTRUCTIONS)
+#error "CAN_USE_NEON should imply CAN_USE_ARMV7_INSTRUCTIONS"
+#endif
+
+// Find compiler-implied features.
+#if defined(CAN_USE_ARMV8_INSTRUCTIONS) &&                           \
+    defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(CAN_USE_SUDIV) && \
+    defined(CAN_USE_NEON) && defined(CAN_USE_VFP3_INSTRUCTIONS)
+  return kArmv8;
+#elif defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(CAN_USE_SUDIV) && \
+    defined(CAN_USE_NEON) && defined(CAN_USE_VFP3_INSTRUCTIONS)
+  return kArmv7WithSudiv;
+#elif defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(CAN_USE_NEON) && \
+    defined(CAN_USE_VFP3_INSTRUCTIONS)
+  return kArmv7;
+#else
+  return kArmv6;
+#endif
 }
 
 
 void CpuFeatures::ProbeImpl(bool cross_compile) {
-  supported_ |= CpuFeaturesImpliedByCompiler();
   dcache_line_size_ = 64;
 
+  unsigned command_line = CpuFeaturesFromCommandLine();
   // Only use statically determined features for cross compile (snapshot).
-  if (cross_compile) return;
+  if (cross_compile) {
+    supported_ |= command_line & CpuFeaturesFromCompiler();
+    return;
+  }
 
 #ifndef __arm__
   // For the simulator build, use whatever the flags specify.
-  if (FLAG_enable_armv8) {
-    supported_ |= 1u << ARMv8;
-    // ARMv8 always features VFP and NEON.
-    supported_ |= 1u << ARMv7 | 1u << VFP3 | 1u << NEON | 1u << VFP32DREGS;
-    supported_ |= 1u << SUDIV;
-    if (FLAG_enable_movw_movt) supported_ |= 1u << MOVW_MOVT_IMMEDIATE_LOADS;
-  }
-  if (FLAG_enable_armv7) {
-    supported_ |= 1u << ARMv7;
-    if (FLAG_enable_vfp3) supported_ |= 1u << VFP3;
-    if (FLAG_enable_neon) supported_ |= 1u << NEON | 1u << VFP32DREGS;
-    if (FLAG_enable_sudiv) supported_ |= 1u << SUDIV;
-    if (FLAG_enable_movw_movt) supported_ |= 1u << MOVW_MOVT_IMMEDIATE_LOADS;
-    if (FLAG_enable_32dregs) supported_ |= 1u << VFP32DREGS;
-  }
+  supported_ |= command_line;
 
 #else  // __arm__
   // Probe for additional features at runtime.
   base::CPU cpu;
-  if (FLAG_enable_vfp3 && cpu.has_vfp3()) {
-    // This implementation also sets the VFP flags if runtime
-    // detection of VFP returns true. VFPv3 implies ARMv7, see ARM DDI
-    // 0406B, page A1-6.
-    supported_ |= 1u << VFP3 | 1u << ARMv7;
-  }
-
-  if (FLAG_enable_neon && cpu.has_neon()) supported_ |= 1u << NEON;
-  if (FLAG_enable_sudiv && cpu.has_idiva()) supported_ |= 1u << SUDIV;
-
-  if (cpu.architecture() >= 7) {
-    if (FLAG_enable_armv7) supported_ |= 1u << ARMv7;
-    if (FLAG_enable_armv8 && cpu.architecture() >= 8) {
-      supported_ |= 1u << ARMv8;
-    }
-    // Use movw/movt for QUALCOMM ARMv7 cores.
-    if (FLAG_enable_movw_movt && cpu.implementer() == base::CPU::QUALCOMM) {
-      supported_ |= 1u << MOVW_MOVT_IMMEDIATE_LOADS;
+  // Runtime detection is slightly fuzzy, and some inferences are necessary.
+  unsigned runtime = kArmv6;
+  // NEON and VFPv3 imply at least ARMv7-A.
+  if (cpu.has_neon() && cpu.has_vfp3_d32()) {
+    DCHECK(cpu.has_vfp3());
+    runtime |= kArmv7;
+    if (cpu.has_idiva()) {
+      runtime |= kArmv7WithSudiv;
+      if (cpu.architecture() >= 8) {
+        runtime |= kArmv8;
+      }
     }
   }
 
+  // Use the best of the features found by CPU detection and those inferred from
+  // the build system. In both cases, restrict available features using the
+  // command-line. Note that the command-line flags are very permissive (kArmv8)
+  // by default.
+  supported_ |= command_line & CpuFeaturesFromCompiler();
+  supported_ |= command_line & runtime;
+
+  // Additional tuning options.
+
   // ARM Cortex-A9 and Cortex-A5 have 32 byte cachelines.
   if (cpu.implementer() == base::CPU::ARM &&
       (cpu.part() == base::CPU::ARM_CORTEX_A5 ||
        cpu.part() == base::CPU::ARM_CORTEX_A9)) {
     dcache_line_size_ = 32;
   }
-
-  if (FLAG_enable_32dregs && cpu.has_vfp3_d32()) supported_ |= 1u << VFP32DREGS;
 #endif
 
-  DCHECK(!IsSupported(VFP3) || IsSupported(ARMv7));
+  DCHECK_IMPLIES(IsSupported(ARMv7_SUDIV), IsSupported(ARMv7));
+  DCHECK_IMPLIES(IsSupported(ARMv8), IsSupported(ARMv7_SUDIV));
 }
 
 
@@ -195,13 +301,10 @@
 
 
 void CpuFeatures::PrintFeatures() {
-  printf(
-      "ARMv8=%d ARMv7=%d VFP3=%d VFP32DREGS=%d NEON=%d SUDIV=%d "
-      "MOVW_MOVT_IMMEDIATE_LOADS=%d",
-      CpuFeatures::IsSupported(ARMv8), CpuFeatures::IsSupported(ARMv7),
-      CpuFeatures::IsSupported(VFP3), CpuFeatures::IsSupported(VFP32DREGS),
-      CpuFeatures::IsSupported(NEON), CpuFeatures::IsSupported(SUDIV),
-      CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS));
+  printf("ARMv8=%d ARMv7=%d VFPv3=%d VFP32DREGS=%d NEON=%d SUDIV=%d",
+         CpuFeatures::IsSupported(ARMv8), CpuFeatures::IsSupported(ARMv7),
+         CpuFeatures::IsSupported(VFPv3), CpuFeatures::IsSupported(VFP32DREGS),
+         CpuFeatures::IsSupported(NEON), CpuFeatures::IsSupported(SUDIV));
 #ifdef __arm__
   bool eabi_hardfloat = base::OS::ArmUsingHardFloat();
 #elif USE_EABI_HARDFLOAT
@@ -209,7 +312,7 @@
 #else
   bool eabi_hardfloat = false;
 #endif
-    printf(" USE_EABI_HARDFLOAT=%d\n", eabi_hardfloat);
+  printf(" USE_EABI_HARDFLOAT=%d\n", eabi_hardfloat);
 }
 
 
@@ -481,6 +584,12 @@
   first_const_pool_64_use_ = -1;
   last_bound_pos_ = 0;
   ClearRecordedAstId();
+  if (CpuFeatures::IsSupported(VFP32DREGS)) {
+    // Register objects tend to be abstracted and survive between scopes, so
+    // it's awkward to use CpuFeatures::VFP32DREGS with CpuFeatureScope. To make
+    // its use consistent with other features, we always enable it if we can.
+    EnableCpuFeature(VFP32DREGS);
+  }
 }
 
 
@@ -860,10 +969,12 @@
         if (target16_1 == 0) {
           CodePatcher patcher(isolate(), reinterpret_cast<byte*>(buffer_ + pos),
                               1, CodePatcher::DONT_FLUSH);
+          CpuFeatureScope scope(patcher.masm(), ARMv7);
           patcher.masm()->movw(dst, target16_0);
         } else {
           CodePatcher patcher(isolate(), reinterpret_cast<byte*>(buffer_ + pos),
                               2, CodePatcher::DONT_FLUSH);
+          CpuFeatureScope scope(patcher.masm(), ARMv7);
           patcher.masm()->movw(dst, target16_0);
           patcher.masm()->movt(dst, target16_1);
         }
@@ -1075,13 +1186,10 @@
 
 static bool use_mov_immediate_load(const Operand& x,
                                    const Assembler* assembler) {
-  if (FLAG_enable_embedded_constant_pool && assembler != NULL &&
+  DCHECK(assembler != nullptr);
+  if (FLAG_enable_embedded_constant_pool &&
       !assembler->is_constant_pool_available()) {
     return true;
-  } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
-             (assembler == NULL || !assembler->predictable_code_size())) {
-    // Prefer movw / movt to constant pool if it is more efficient on the CPU.
-    return true;
   } else if (x.must_output_reloc_info(assembler)) {
     // Prefer constant pool if data is likely to be patched.
     return false;
@@ -1094,6 +1202,7 @@
 
 int Operand::instructions_required(const Assembler* assembler,
                                    Instr instr) const {
+  DCHECK(assembler != nullptr);
   if (rm_.is_valid()) return 1;
   uint32_t dummy1, dummy2;
   if (must_output_reloc_info(assembler) ||
@@ -1105,8 +1214,7 @@
     if (use_mov_immediate_load(*this, assembler)) {
       // A movw / movt or mov / orr immediate load.
       instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4;
-    } else if (assembler != NULL &&
-               assembler->ConstantPoolAccessIsInOverflow()) {
+    } else if (assembler->ConstantPoolAccessIsInOverflow()) {
       // An overflowed constant pool load.
       instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5;
     } else {
@@ -1140,6 +1248,7 @@
   if (use_mov_immediate_load(x, this)) {
     Register target = rd.code() == pc.code() ? ip : rd;
     if (CpuFeatures::IsSupported(ARMv7)) {
+      CpuFeatureScope scope(this, ARMv7);
       if (!FLAG_enable_embedded_constant_pool &&
           x.must_output_reloc_info(this)) {
         // Make sure the movw/movt doesn't get separated.
@@ -1166,6 +1275,7 @@
       Register target = rd.code() == pc.code() ? ip : rd;
       // Emit instructions to load constant pool offset.
       if (CpuFeatures::IsSupported(ARMv7)) {
+        CpuFeatureScope scope(this, ARMv7);
         movw(target, 0, cond);
         movt(target, 0, cond);
       } else {
@@ -1376,8 +1486,7 @@
   emit(cond | B27 | B25 | B24 | (imm24 & kImm24Mask));
 }
 
-
-void Assembler::blx(int branch_offset) {  // v5 and above
+void Assembler::blx(int branch_offset) {
   DCHECK((branch_offset & 1) == 0);
   int h = ((branch_offset & 2) >> 1)*B24;
   int imm24 = branch_offset >> 2;
@@ -1385,14 +1494,12 @@
   emit(kSpecialCondition | B27 | B25 | h | (imm24 & kImm24Mask));
 }
 
-
-void Assembler::blx(Register target, Condition cond) {  // v5 and above
+void Assembler::blx(Register target, Condition cond) {
   DCHECK(!target.is(pc));
   emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BLX | target.code());
 }
 
-
-void Assembler::bx(Register target, Condition cond) {  // v5 and above, plus v4t
+void Assembler::bx(Register target, Condition cond) {
   DCHECK(!target.is(pc));  // use of pc is actually allowed, but discouraged
   emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BX | target.code());
 }
@@ -1548,13 +1655,13 @@
 
 
 void Assembler::movw(Register reg, uint32_t immediate, Condition cond) {
-  DCHECK(CpuFeatures::IsSupported(ARMv7));
+  DCHECK(IsEnabled(ARMv7));
   emit(cond | 0x30*B20 | reg.code()*B12 | EncodeMovwImmediate(immediate));
 }
 
 
 void Assembler::movt(Register reg, uint32_t immediate, Condition cond) {
-  DCHECK(CpuFeatures::IsSupported(ARMv7));
+  DCHECK(IsEnabled(ARMv7));
   emit(cond | 0x34*B20 | reg.code()*B12 | EncodeMovwImmediate(immediate));
 }
 
@@ -1684,7 +1791,6 @@
 
 // Miscellaneous arithmetic instructions.
 void Assembler::clz(Register dst, Register src, Condition cond) {
-  // v5 and above.
   DCHECK(!dst.is(pc) && !src.is(pc));
   emit(cond | B24 | B22 | B21 | 15*B16 | dst.code()*B12 |
        15*B8 | CLZ | src.code());
@@ -1724,8 +1830,7 @@
                      int lsb,
                      int width,
                      Condition cond) {
-  // v7 and above.
-  DCHECK(CpuFeatures::IsSupported(ARMv7));
+  DCHECK(IsEnabled(ARMv7));
   DCHECK(!dst.is(pc) && !src.is(pc));
   DCHECK((lsb >= 0) && (lsb <= 31));
   DCHECK((width >= 1) && (width <= (32 - lsb)));
@@ -1744,8 +1849,7 @@
                      int lsb,
                      int width,
                      Condition cond) {
-  // v7 and above.
-  DCHECK(CpuFeatures::IsSupported(ARMv7));
+  DCHECK(IsEnabled(ARMv7));
   DCHECK(!dst.is(pc) && !src.is(pc));
   DCHECK((lsb >= 0) && (lsb <= 31));
   DCHECK((width >= 1) && (width <= (32 - lsb)));
@@ -1759,8 +1863,7 @@
 // to zero, preserving the value of the other bits.
 //   bfc dst, #lsb, #width
 void Assembler::bfc(Register dst, int lsb, int width, Condition cond) {
-  // v7 and above.
-  DCHECK(CpuFeatures::IsSupported(ARMv7));
+  DCHECK(IsEnabled(ARMv7));
   DCHECK(!dst.is(pc));
   DCHECK((lsb >= 0) && (lsb <= 31));
   DCHECK((width >= 1) && (width <= (32 - lsb)));
@@ -1778,8 +1881,7 @@
                     int lsb,
                     int width,
                     Condition cond) {
-  // v7 and above.
-  DCHECK(CpuFeatures::IsSupported(ARMv7));
+  DCHECK(IsEnabled(ARMv7));
   DCHECK(!dst.is(pc) && !src.is(pc));
   DCHECK((lsb >= 0) && (lsb <= 31));
   DCHECK((width >= 1) && (width <= (32 - lsb)));
@@ -2176,8 +2278,7 @@
 #endif  // def __arm__
 }
 
-
-void Assembler::bkpt(uint32_t imm16) {  // v5 and above
+void Assembler::bkpt(uint32_t imm16) {
   DCHECK(is_uint16(imm16));
   emit(al | B24 | B21 | (imm16 >> 4)*B8 | BKPT | (imm16 & 0xf));
 }
@@ -2190,17 +2291,38 @@
 
 
 void Assembler::dmb(BarrierOption option) {
-  emit(kSpecialCondition | 0x57ff*B12 | 5*B4 | option);
+  if (CpuFeatures::IsSupported(ARMv7)) {
+    // Details available in ARM DDI 0406C.b, A8-378.
+    emit(kSpecialCondition | 0x57ff * B12 | 5 * B4 | option);
+  } else {
+    // Details available in ARM DDI 0406C.b, B3-1750.
+    // CP15DMB: CRn=c7, opc1=0, CRm=c10, opc2=5, Rt is ignored.
+    mcr(p15, 0, r0, cr7, cr10, 5);
+  }
 }
 
 
 void Assembler::dsb(BarrierOption option) {
-  emit(kSpecialCondition | 0x57ff*B12 | 4*B4 | option);
+  if (CpuFeatures::IsSupported(ARMv7)) {
+    // Details available in ARM DDI 0406C.b, A8-380.
+    emit(kSpecialCondition | 0x57ff * B12 | 4 * B4 | option);
+  } else {
+    // Details available in ARM DDI 0406C.b, B3-1750.
+    // CP15DSB: CRn=c7, opc1=0, CRm=c10, opc2=4, Rt is ignored.
+    mcr(p15, 0, r0, cr7, cr10, 4);
+  }
 }
 
 
 void Assembler::isb(BarrierOption option) {
-  emit(kSpecialCondition | 0x57ff*B12 | 6*B4 | option);
+  if (CpuFeatures::IsSupported(ARMv7)) {
+    // Details available in ARM DDI 0406C.b, A8-389.
+    emit(kSpecialCondition | 0x57ff * B12 | 6 * B4 | option);
+  } else {
+    // Details available in ARM DDI 0406C.b, B3-1750.
+    // CP15ISB: CRn=c7, opc1=0, CRm=c5, opc2=4, Rt is ignored.
+    mcr(p15, 0, r0, cr7, cr5, 4);
+  }
 }
 
 
@@ -2217,13 +2339,8 @@
        crd.code()*B12 | coproc*B8 | (opcode_2 & 7)*B5 | crm.code());
 }
 
-
-void Assembler::cdp2(Coprocessor coproc,
-                     int opcode_1,
-                     CRegister crd,
-                     CRegister crn,
-                     CRegister crm,
-                     int opcode_2) {  // v5 and above
+void Assembler::cdp2(Coprocessor coproc, int opcode_1, CRegister crd,
+                     CRegister crn, CRegister crm, int opcode_2) {
   cdp(coproc, opcode_1, crd, crn, crm, opcode_2, kSpecialCondition);
 }
 
@@ -2240,13 +2357,8 @@
        rd.code()*B12 | coproc*B8 | (opcode_2 & 7)*B5 | B4 | crm.code());
 }
 
-
-void Assembler::mcr2(Coprocessor coproc,
-                     int opcode_1,
-                     Register rd,
-                     CRegister crn,
-                     CRegister crm,
-                     int opcode_2) {  // v5 and above
+void Assembler::mcr2(Coprocessor coproc, int opcode_1, Register rd,
+                     CRegister crn, CRegister crm, int opcode_2) {
   mcr(coproc, opcode_1, rd, crn, crm, opcode_2, kSpecialCondition);
 }
 
@@ -2263,13 +2375,8 @@
        rd.code()*B12 | coproc*B8 | (opcode_2 & 7)*B5 | B4 | crm.code());
 }
 
-
-void Assembler::mrc2(Coprocessor coproc,
-                     int opcode_1,
-                     Register rd,
-                     CRegister crn,
-                     CRegister crm,
-                     int opcode_2) {  // v5 and above
+void Assembler::mrc2(Coprocessor coproc, int opcode_1, Register rd,
+                     CRegister crn, CRegister crm, int opcode_2) {
   mrc(coproc, opcode_1, rd, crn, crm, opcode_2, kSpecialCondition);
 }
 
@@ -2295,20 +2402,13 @@
        coproc*B8 | (option & 255));
 }
 
-
-void Assembler::ldc2(Coprocessor coproc,
-                     CRegister crd,
-                     const MemOperand& src,
-                     LFlag l) {  // v5 and above
+void Assembler::ldc2(Coprocessor coproc, CRegister crd, const MemOperand& src,
+                     LFlag l) {
   ldc(coproc, crd, src, l, kSpecialCondition);
 }
 
-
-void Assembler::ldc2(Coprocessor coproc,
-                     CRegister crd,
-                     Register rn,
-                     int option,
-                     LFlag l) {  // v5 and above
+void Assembler::ldc2(Coprocessor coproc, CRegister crd, Register rn, int option,
+                     LFlag l) {
   ldc(coproc, crd, rn, option, l, kSpecialCondition);
 }
 
@@ -2323,6 +2423,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8-924.
   // cond(31-28) | 1101(27-24)| U(23) | D(22) | 01(21-20) | Rbase(19-16) |
   // Vd(15-12) | 1011(11-8) | offset
+  DCHECK(VfpRegisterIsAvailable(dst));
   int u = 1;
   if (offset < 0) {
     CHECK(offset != kMinInt);
@@ -2353,6 +2454,7 @@
 void Assembler::vldr(const DwVfpRegister dst,
                      const MemOperand& operand,
                      const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(dst));
   DCHECK(operand.am_ == Offset);
   if (operand.rm().is_valid()) {
     add(ip, operand.rn(),
@@ -2420,6 +2522,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8-1082.
   // cond(31-28) | 1101(27-24)| U(23) | D(22) | 00(21-20) | Rbase(19-16) |
   // Vd(15-12) | 1011(11-8) | (offset/4)
+  DCHECK(VfpRegisterIsAvailable(src));
   int u = 1;
   if (offset < 0) {
     CHECK(offset != kMinInt);
@@ -2450,6 +2553,7 @@
 void Assembler::vstr(const DwVfpRegister src,
                      const MemOperand& operand,
                      const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(src));
   DCHECK(operand.am_ == Offset);
   if (operand.rm().is_valid()) {
     add(ip, operand.rn(),
@@ -2508,16 +2612,13 @@
   }
 }
 
-
-void  Assembler::vldm(BlockAddrMode am,
-                      Register base,
-                      DwVfpRegister first,
-                      DwVfpRegister last,
-                      Condition cond) {
+void Assembler::vldm(BlockAddrMode am, Register base, DwVfpRegister first,
+                     DwVfpRegister last, Condition cond) {
   // Instruction details available in ARM DDI 0406C.b, A8-922.
   // cond(31-28) | 110(27-25)| PUDW1(24-20) | Rbase(19-16) |
   // first(15-12) | 1011(11-8) | (count * 2)
   DCHECK_LE(first.code(), last.code());
+  DCHECK(VfpRegisterIsAvailable(last));
   DCHECK(am == ia || am == ia_w || am == db_w);
   DCHECK(!base.is(pc));
 
@@ -2529,16 +2630,13 @@
        0xB*B8 | count*2);
 }
 
-
-void  Assembler::vstm(BlockAddrMode am,
-                      Register base,
-                      DwVfpRegister first,
-                      DwVfpRegister last,
-                      Condition cond) {
+void Assembler::vstm(BlockAddrMode am, Register base, DwVfpRegister first,
+                     DwVfpRegister last, Condition cond) {
   // Instruction details available in ARM DDI 0406C.b, A8-1080.
   // cond(31-28) | 110(27-25)| PUDW0(24-20) | Rbase(19-16) |
   // first(15-12) | 1011(11-8) | (count * 2)
   DCHECK_LE(first.code(), last.code());
+  DCHECK(VfpRegisterIsAvailable(last));
   DCHECK(am == ia || am == ia_w || am == db_w);
   DCHECK(!base.is(pc));
 
@@ -2550,11 +2648,8 @@
        0xB*B8 | count*2);
 }
 
-void  Assembler::vldm(BlockAddrMode am,
-                      Register base,
-                      SwVfpRegister first,
-                      SwVfpRegister last,
-                      Condition cond) {
+void Assembler::vldm(BlockAddrMode am, Register base, SwVfpRegister first,
+                     SwVfpRegister last, Condition cond) {
   // Instruction details available in ARM DDI 0406A, A8-626.
   // cond(31-28) | 110(27-25)| PUDW1(24-20) | Rbase(19-16) |
   // first(15-12) | 1010(11-8) | (count/2)
@@ -2569,12 +2664,8 @@
        0xA*B8 | count);
 }
 
-
-void  Assembler::vstm(BlockAddrMode am,
-                      Register base,
-                      SwVfpRegister first,
-                      SwVfpRegister last,
-                      Condition cond) {
+void Assembler::vstm(BlockAddrMode am, Register base, SwVfpRegister first,
+                     SwVfpRegister last, Condition cond) {
   // Instruction details available in ARM DDI 0406A, A8-784.
   // cond(31-28) | 110(27-25)| PUDW0(24-20) | Rbase(19-16) |
   // first(15-12) | 1011(11-8) | (count/2)
@@ -2602,8 +2693,6 @@
 // Only works for little endian floating point formats.
 // We don't support VFP on the mixed endian floating point platform.
 static bool FitsVmovFPImmediate(double d, uint32_t* encoding) {
-  DCHECK(CpuFeatures::IsSupported(VFP3));
-
   // VMOV can accept an immediate of the form:
   //
   //  +/- m * 2^(-n) where 16 <= m <= 31 and 0 <= n <= 7
@@ -2652,7 +2741,8 @@
 
 void Assembler::vmov(const SwVfpRegister dst, float imm) {
   uint32_t enc;
-  if (CpuFeatures::IsSupported(VFP3) && FitsVmovFPImmediate(imm, &enc)) {
+  if (CpuFeatures::IsSupported(VFPv3) && FitsVmovFPImmediate(imm, &enc)) {
+    CpuFeatureScope scope(this, VFPv3);
     // The float can be encoded in the instruction.
     //
     // Sd = immediate
@@ -2672,6 +2762,8 @@
 void Assembler::vmov(const DwVfpRegister dst,
                      double imm,
                      const Register scratch) {
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(!scratch.is(ip));
   uint32_t enc;
   // If the embedded constant pool is disabled, we can use the normal, inline
   // constant pool. If the embedded constant pool is enabled (via
@@ -2679,7 +2771,8 @@
   // pointer (pp) is valid.
   bool can_use_pool =
       !FLAG_enable_embedded_constant_pool || is_constant_pool_available();
-  if (CpuFeatures::IsSupported(VFP3) && FitsVmovFPImmediate(imm, &enc)) {
+  if (CpuFeatures::IsSupported(VFPv3) && FitsVmovFPImmediate(imm, &enc)) {
+    CpuFeatureScope scope(this, VFPv3);
     // The double can be encoded in the instruction.
     //
     // Dd = immediate
@@ -2689,7 +2782,9 @@
     int vd, d;
     dst.split_code(&vd, &d);
     emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc);
-  } else if (FLAG_enable_vldr_imm && can_use_pool) {
+  } else if (CpuFeatures::IsSupported(ARMv7) && FLAG_enable_vldr_imm &&
+             can_use_pool) {
+    CpuFeatureScope scope(this, ARMv7);
     // TODO(jfb) Temporarily turned off until we have constant blinding or
     //           some equivalent mitigation: an attacker can otherwise control
     //           generated data which also happens to be executable, a Very Bad
@@ -2732,6 +2827,7 @@
       vmov(dst, VmovIndexLo, ip);
       if (((lo & 0xffff) == (hi & 0xffff)) &&
           CpuFeatures::IsSupported(ARMv7)) {
+        CpuFeatureScope scope(this, ARMv7);
         movt(ip, hi >> 16);
       } else {
         mov(ip, Operand(hi));
@@ -2767,6 +2863,8 @@
   // Instruction details available in ARM DDI 0406C.b, A8-938.
   // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 0000(19-16) | Vd(15-12) |
   // 101(11-9) | sz=1(8) | 0(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -2784,6 +2882,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8-940.
   // cond(31-28) | 1110(27-24) | 0(23) | opc1=0index(22-21) | 0(20) |
   // Vd(19-16) | Rt(15-12) | 1011(11-8) | D(7) | opc2=00(6-5) | 1(4) | 0000(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
   DCHECK(index.index == 0 || index.index == 1);
   int vd, d;
   dst.split_code(&vd, &d);
@@ -2800,6 +2899,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8.8.342.
   // cond(31-28) | 1110(27-24) | U=0(23) | opc1=0index(22-21) | 1(20) |
   // Vn(19-16) | Rt(15-12) | 1011(11-8) | N(7) | opc2=00(6-5) | 1(4) | 0000(3-0)
+  DCHECK(VfpRegisterIsAvailable(src));
   DCHECK(index.index == 0 || index.index == 1);
   int vn, n;
   src.split_code(&vn, &n);
@@ -2816,6 +2916,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8-948.
   // cond(31-28) | 1100(27-24)| 010(23-21) | op=0(20) | Rt2(19-16) |
   // Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
+  DCHECK(VfpRegisterIsAvailable(dst));
   DCHECK(!src1.is(pc) && !src2.is(pc));
   int vm, m;
   dst.split_code(&vm, &m);
@@ -2832,6 +2933,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8-948.
   // cond(31-28) | 1100(27-24)| 010(23-21) | op=1(20) | Rt2(19-16) |
   // Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
+  DCHECK(VfpRegisterIsAvailable(src));
   DCHECK(!dst1.is(pc) && !dst2.is(pc));
   int vm, m;
   src.split_code(&vm, &m);
@@ -2985,6 +3087,7 @@
                              const SwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(dst));
   emit(EncodeVCVT(F64, dst.code(), S32, src.code(), mode, cond));
 }
 
@@ -3001,6 +3104,7 @@
                              const SwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(dst));
   emit(EncodeVCVT(F64, dst.code(), U32, src.code(), mode, cond));
 }
 
@@ -3027,6 +3131,7 @@
                              const DwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(src));
   emit(EncodeVCVT(S32, dst.code(), F64, src.code(), mode, cond));
 }
 
@@ -3035,6 +3140,7 @@
                              const DwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(src));
   emit(EncodeVCVT(U32, dst.code(), F64, src.code(), mode, cond));
 }
 
@@ -3043,6 +3149,7 @@
                              const SwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(dst));
   emit(EncodeVCVT(F64, dst.code(), F32, src.code(), mode, cond));
 }
 
@@ -3051,6 +3158,7 @@
                              const DwVfpRegister src,
                              VFPConversionMode mode,
                              const Condition cond) {
+  DCHECK(VfpRegisterIsAvailable(src));
   emit(EncodeVCVT(F32, dst.code(), F64, src.code(), mode, cond));
 }
 
@@ -3061,8 +3169,9 @@
   // Instruction details available in ARM DDI 0406C.b, A8-874.
   // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 1010(19-16) | Vd(15-12) |
   // 101(11-9) | sf=1(8) | sx=1(7) | 1(6) | i(5) | 0(4) | imm4(3-0)
+  DCHECK(IsEnabled(VFPv3));
+  DCHECK(VfpRegisterIsAvailable(dst));
   DCHECK(fraction_bits > 0 && fraction_bits <= 32);
-  DCHECK(CpuFeatures::IsSupported(VFP3));
   int vd, d;
   dst.split_code(&vd, &d);
   int imm5 = 32 - fraction_bits;
@@ -3079,6 +3188,8 @@
   // Instruction details available in ARM DDI 0406C.b, A8-968.
   // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 0001(19-16) | Vd(15-12) |
   // 101(11-9) | sz=1(8) | 0(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3110,6 +3221,8 @@
   // Instruction details available in ARM DDI 0406C.b, A8-524.
   // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | 0000(19-16) | Vd(15-12) |
   // 101(11-9) | sz=1(8) | 1(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3142,6 +3255,9 @@
   // Instruction details available in ARM DDI 0406C.b, A8-830.
   // cond(31-28) | 11100(27-23)| D(22) | 11(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | 0(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src1));
+  DCHECK(VfpRegisterIsAvailable(src2));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3180,6 +3296,9 @@
   // Instruction details available in ARM DDI 0406C.b, A8-1086.
   // cond(31-28) | 11100(27-23)| D(22) | 11(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src1));
+  DCHECK(VfpRegisterIsAvailable(src2));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3218,6 +3337,9 @@
   // Instruction details available in ARM DDI 0406C.b, A8-960.
   // cond(31-28) | 11100(27-23)| D(22) | 10(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | 0(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src1));
+  DCHECK(VfpRegisterIsAvailable(src2));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3254,6 +3376,9 @@
   // Instruction details available in ARM DDI 0406C.b, A8-932.
   // cond(31-28) | 11100(27-23) | D(22) | 00(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | op=0(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src1));
+  DCHECK(VfpRegisterIsAvailable(src2));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3288,6 +3413,9 @@
   // Instruction details available in ARM DDI 0406C.b, A8-932.
   // cond(31-28) | 11100(27-23) | D(22) | 00(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | op=1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src1));
+  DCHECK(VfpRegisterIsAvailable(src2));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3324,6 +3452,9 @@
   // Instruction details available in ARM DDI 0406C.b, A8-882.
   // cond(31-28) | 11101(27-23)| D(22) | 00(21-20) | Vn(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | 0(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src1));
+  DCHECK(VfpRegisterIsAvailable(src2));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3360,6 +3491,8 @@
   // Instruction details available in ARM DDI 0406C.b, A8-864.
   // cond(31-28) | 11101(27-23)| D(22) | 11(21-20) | 0100(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | E=0(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(src1));
+  DCHECK(VfpRegisterIsAvailable(src2));
   int vd, d;
   src1.split_code(&vd, &d);
   int vm, m;
@@ -3391,6 +3524,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8-864.
   // cond(31-28) | 11101(27-23)| D(22) | 11(21-20) | 0101(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | E=0(7) | 1(6) | 0(5) | 0(4) | 0000(3-0)
+  DCHECK(VfpRegisterIsAvailable(src1));
   DCHECK(src2 == 0.0);
   int vd, d;
   src1.split_code(&vd, &d);
@@ -3411,12 +3545,76 @@
        0x5 * B9 | B6);
 }
 
+void Assembler::vmaxnm(const DwVfpRegister dst, const DwVfpRegister src1,
+                       const DwVfpRegister src2) {
+  // kSpecialCondition(31-28) | 11101(27-23) | D(22) | 00(21-20) | Vn(19-16) |
+  // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | 0(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(IsEnabled(ARMv8));
+  int vd, d;
+  dst.split_code(&vd, &d);
+  int vn, n;
+  src1.split_code(&vn, &n);
+  int vm, m;
+  src2.split_code(&vm, &m);
+
+  emit(kSpecialCondition | 0x1D * B23 | d * B22 | vn * B16 | vd * B12 |
+       0x5 * B9 | B8 | n * B7 | m * B5 | vm);
+}
+
+void Assembler::vmaxnm(const SwVfpRegister dst, const SwVfpRegister src1,
+                       const SwVfpRegister src2) {
+  // kSpecialCondition(31-28) | 11101(27-23) | D(22) | 00(21-20) | Vn(19-16) |
+  // Vd(15-12) | 101(11-9) | sz=0(8) | N(7) | 0(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(IsEnabled(ARMv8));
+  int vd, d;
+  dst.split_code(&vd, &d);
+  int vn, n;
+  src1.split_code(&vn, &n);
+  int vm, m;
+  src2.split_code(&vm, &m);
+
+  emit(kSpecialCondition | 0x1D * B23 | d * B22 | vn * B16 | vd * B12 |
+       0x5 * B9 | n * B7 | m * B5 | vm);
+}
+
+void Assembler::vminnm(const DwVfpRegister dst, const DwVfpRegister src1,
+                       const DwVfpRegister src2) {
+  // kSpecialCondition(31-28) | 11101(27-23) | D(22) | 00(21-20) | Vn(19-16) |
+  // Vd(15-12) | 101(11-9) | sz=1(8) | N(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(IsEnabled(ARMv8));
+  int vd, d;
+  dst.split_code(&vd, &d);
+  int vn, n;
+  src1.split_code(&vn, &n);
+  int vm, m;
+  src2.split_code(&vm, &m);
+
+  emit(kSpecialCondition | 0x1D * B23 | d * B22 | vn * B16 | vd * B12 |
+       0x5 * B9 | B8 | n * B7 | B6 | m * B5 | vm);
+}
+
+void Assembler::vminnm(const SwVfpRegister dst, const SwVfpRegister src1,
+                       const SwVfpRegister src2) {
+  // kSpecialCondition(31-28) | 11101(27-23) | D(22) | 00(21-20) | Vn(19-16) |
+  // Vd(15-12) | 101(11-9) | sz=0(8) | N(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(IsEnabled(ARMv8));
+  int vd, d;
+  dst.split_code(&vd, &d);
+  int vn, n;
+  src1.split_code(&vn, &n);
+  int vm, m;
+  src2.split_code(&vm, &m);
+
+  emit(kSpecialCondition | 0x1D * B23 | d * B22 | vn * B16 | vd * B12 |
+       0x5 * B9 | n * B7 | B6 | m * B5 | vm);
+}
+
 void Assembler::vsel(Condition cond, const DwVfpRegister dst,
                      const DwVfpRegister src1, const DwVfpRegister src2) {
   // cond=kSpecialCondition(31-28) | 11100(27-23) | D(22) |
   // vsel_cond=XX(21-20) | Vn(19-16) | Vd(15-12) | 101(11-9) | sz=1(8) | N(7) |
   // 0(6) | M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3448,7 +3646,7 @@
   // cond=kSpecialCondition(31-28) | 11100(27-23) | D(22) |
   // vsel_cond=XX(21-20) | Vn(19-16) | Vd(15-12) | 101(11-9) | sz=0(8) | N(7) |
   // 0(6) | M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vn, n;
@@ -3481,6 +3679,8 @@
   // Instruction details available in ARM DDI 0406C.b, A8-1058.
   // cond(31-28) | 11101(27-23)| D(22) | 11(21-20) | 0001(19-16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | 11(7-6) | M(5) | 0(4) | Vm(3-0)
+  DCHECK(VfpRegisterIsAvailable(dst));
+  DCHECK(VfpRegisterIsAvailable(src));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3524,7 +3724,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=00(17-16) |  Vd(15-12) | 101(11-9) | sz=0(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3538,7 +3738,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=00(17-16) |  Vd(15-12) | 101(11-9) | sz=1(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3552,7 +3752,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=01(17-16) |  Vd(15-12) | 101(11-9) | sz=0(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3566,7 +3766,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=01(17-16) |  Vd(15-12) | 101(11-9) | sz=1(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3580,7 +3780,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=10(17-16) |  Vd(15-12) | 101(11-9) | sz=0(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3594,7 +3794,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=10(17-16) |  Vd(15-12) | 101(11-9) | sz=1(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3608,7 +3808,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=11(17-16) |  Vd(15-12) | 101(11-9) | sz=0(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3622,7 +3822,7 @@
   // cond=kSpecialCondition(31-28) | 11101(27-23)| D(22) | 11(21-20) |
   // 10(19-18) | RM=11(17-16) |  Vd(15-12) | 101(11-9) | sz=1(8) | 01(7-6) |
   // M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3636,7 +3836,7 @@
                        const Condition cond) {
   // cond(31-28) | 11101(27-23)| D(22) | 11(21-20) | 011(19-17) | 0(16) |
   // Vd(15-12) | 101(11-9) | sz=0(8) | op=1(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3650,7 +3850,7 @@
                        const Condition cond) {
   // cond(31-28) | 11101(27-23)| D(22) | 11(21-20) | 011(19-17) | 0(16) |
   // Vd(15-12) | 101(11-9) | sz=1(8) | op=1(7) | 1(6) | M(5) | 0(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(ARMv8));
+  DCHECK(IsEnabled(ARMv8));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3668,7 +3868,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8.8.320.
   // 1111(31-28) | 01000(27-23) | D(22) | 10(21-20) | Rn(19-16) |
   // Vd(15-12) | type(11-8) | size(7-6) | align(5-4) | Rm(3-0)
-  DCHECK(CpuFeatures::IsSupported(NEON));
+  DCHECK(IsEnabled(NEON));
   int vd, d;
   dst.base().split_code(&vd, &d);
   emit(0xFU*B28 | 4*B24 | d*B22 | 2*B20 | src.rn().code()*B16 | vd*B12 |
@@ -3682,7 +3882,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8.8.404.
   // 1111(31-28) | 01000(27-23) | D(22) | 00(21-20) | Rn(19-16) |
   // Vd(15-12) | type(11-8) | size(7-6) | align(5-4) | Rm(3-0)
-  DCHECK(CpuFeatures::IsSupported(NEON));
+  DCHECK(IsEnabled(NEON));
   int vd, d;
   src.base().split_code(&vd, &d);
   emit(0xFU*B28 | 4*B24 | d*B22 | dst.rn().code()*B16 | vd*B12 | src.type()*B8 |
@@ -3694,7 +3894,7 @@
   // Instruction details available in ARM DDI 0406C.b, A8.8.346.
   // 1111(31-28) | 001(27-25) | U(24) | 1(23) | D(22) | imm3(21-19) |
   // 000(18-16) | Vd(15-12) | 101000(11-6) | M(5) | 1(4) | Vm(3-0)
-  DCHECK(CpuFeatures::IsSupported(NEON));
+  DCHECK(IsEnabled(NEON));
   int vd, d;
   dst.split_code(&vd, &d);
   int vm, m;
@@ -3703,6 +3903,29 @@
         (dt & NeonDataTypeSizeMask)*B19 | vd*B12 | 0xA*B8 | m*B5 | B4 | vm);
 }
 
+void Assembler::vswp(DwVfpRegister srcdst0, DwVfpRegister srcdst1) {
+  DCHECK(VfpRegisterIsAvailable(srcdst0));
+  DCHECK(VfpRegisterIsAvailable(srcdst1));
+  DCHECK(!srcdst0.is(kScratchDoubleReg));
+  DCHECK(!srcdst1.is(kScratchDoubleReg));
+
+  if (srcdst0.is(srcdst1)) return;  // Swapping aliased registers emits nothing.
+
+  if (CpuFeatures::IsSupported(NEON)) {
+    // Instruction details available in ARM DDI 0406C.b, A8.8.418.
+    // 1111(31-28) | 00111(27-23) | D(22) | 110010(21-16) |
+    // Vd(15-12) | 000000(11-6) | M(5) | 0(4) | Vm(3-0)
+    int vd, d;
+    srcdst0.split_code(&vd, &d);
+    int vm, m;
+    srcdst1.split_code(&vm, &m);
+    emit(0xFU * B28 | 7 * B23 | d * B22 | 0x32 * B16 | vd * B12 | m * B5 | vm);
+  } else {
+    vmov(kScratchDoubleReg, srcdst0);
+    vmov(srcdst0, srcdst1);
+    vmov(srcdst1, kScratchDoubleReg);
+  }
+}
 
 // Pseudo instructions.
 void Assembler::nop(int type) {
@@ -4208,6 +4431,7 @@
   Instr instr = instr_at(pc);
   if (access == ConstantPoolEntry::OVERFLOWED) {
     if (CpuFeatures::IsSupported(ARMv7)) {
+      CpuFeatureScope scope(this, ARMv7);
       // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
       Instr next_instr = instr_at(pc + kInstrSize);
       DCHECK((IsMovW(instr) && Instruction::ImmedMovwMovtValue(instr) == 0));
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 0b9cd91..e5448f7 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -1022,7 +1022,8 @@
   void bkpt(uint32_t imm16);  // v5 and above
   void svc(uint32_t imm24, Condition cond = al);
 
-  // Synchronization instructions
+  // Synchronization instructions.
+  // On ARMv6, an equivalent CP15 operation will be used.
   void dmb(BarrierOption option);
   void dsb(BarrierOption option);
   void isb(BarrierOption option);
@@ -1258,6 +1259,19 @@
   void vcmp(const SwVfpRegister src1, const float src2,
             const Condition cond = al);
 
+  void vmaxnm(const DwVfpRegister dst,
+              const DwVfpRegister src1,
+              const DwVfpRegister src2);
+  void vmaxnm(const SwVfpRegister dst,
+              const SwVfpRegister src1,
+              const SwVfpRegister src2);
+  void vminnm(const DwVfpRegister dst,
+              const DwVfpRegister src1,
+              const DwVfpRegister src2);
+  void vminnm(const SwVfpRegister dst,
+              const SwVfpRegister src1,
+              const SwVfpRegister src2);
+
   // VSEL supports cond in {eq, ne, ge, lt, gt, le, vs, vc}.
   void vsel(const Condition cond,
             const DwVfpRegister dst,
@@ -1289,8 +1303,8 @@
               const Condition cond = al);
 
   // Support for NEON.
-  // All these APIs support D0 to D31 and Q0 to Q15.
 
+  // All these APIs support D0 to D31 and Q0 to Q15.
   void vld1(NeonSize size,
             const NeonListOperand& dst,
             const NeonMemOperand& src);
@@ -1299,6 +1313,9 @@
             const NeonMemOperand& dst);
   void vmovl(NeonDataType dt, QwNeonRegister dst, DwVfpRegister src);
 
+  // Currently, vswp supports only D0 to D31.
+  void vswp(DwVfpRegister srcdst0, DwVfpRegister srcdst1);
+
   // Pseudo instructions
 
   // Different nop operations are used by the code generator to detect certain
@@ -1586,6 +1603,12 @@
            (pc_offset() < no_const_pool_before_);
   }
 
+  bool VfpRegisterIsAvailable(DwVfpRegister reg) {
+    DCHECK(reg.is_valid());
+    return IsEnabled(VFP32DREGS) ||
+           (reg.reg_code < LowDwVfpRegister::kMaxNumLowRegisters);
+  }
+
  private:
   int next_buffer_check_;  // pc offset of next buffer check
 
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index 264f24f..de6803f 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -553,17 +553,14 @@
   // 3) Fall through to both_loaded_as_doubles.
   // 4) Jump to lhs_not_nan.
   // In cases 3 and 4 we have found out we were dealing with a number-number
-  // comparison.  If VFP3 is supported the double values of the numbers have
-  // been loaded into d7 and d6.  Otherwise, the double values have been loaded
-  // into r0, r1, r2, and r3.
+  // comparison. The double values of the numbers have been loaded into d7 (lhs)
+  // and d6 (rhs).
   EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict());
 
   __ bind(&both_loaded_as_doubles);
-  // The arguments have been converted to doubles and stored in d6 and d7, if
-  // VFP3 is supported, or in r0, r1, r2, and r3.
+  // The arguments have been converted to doubles and stored in d6 and d7.
   __ bind(&lhs_not_nan);
   Label no_nan;
-  // ARMv7 VFP3 instructions to implement double precision comparison.
   __ VFPCompareAndSetFlags(d7, d6);
   Label nan;
   __ b(vs, &nan);
@@ -1646,7 +1643,6 @@
   // r2 : feedback vector
   // r3 : slot in feedback vector (Smi)
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_initialize_count, done_increment_count;
 
   DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
             masm->isolate()->heap()->megamorphic_symbol());
@@ -1666,7 +1662,7 @@
   Register weak_value = r9;
   __ ldr(weak_value, FieldMemOperand(r5, WeakCell::kValueOffset));
   __ cmp(r1, weak_value);
-  __ b(eq, &done_increment_count);
+  __ b(eq, &done);
   __ CompareRoot(r5, Heap::kmegamorphic_symbolRootIndex);
   __ b(eq, &done);
   __ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
@@ -1689,7 +1685,7 @@
   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
   __ cmp(r1, r5);
   __ b(ne, &megamorphic);
-  __ jmp(&done_increment_count);
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -1718,32 +1714,22 @@
   // slot.
   CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ b(&done_initialize_count);
+  __ b(&done);
 
   __ bind(&not_array_function);
   CreateWeakCellStub weak_cell_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
 
-  __ bind(&done_initialize_count);
-  // Initialize the call counter.
-  __ Move(r5, Operand(Smi::FromInt(1)));
-  __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
-  __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
-  __ b(&done);
+  __ bind(&done);
 
-  __ bind(&done_increment_count);
-
-  // Increment the call count for monomorphic function calls.
+  // Increment the call count for all function calls.
   __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
   __ add(r5, r5, Operand(FixedArray::kHeaderSize + kPointerSize));
   __ ldr(r4, FieldMemOperand(r5, 0));
   __ add(r4, r4, Operand(Smi::FromInt(1)));
   __ str(r4, FieldMemOperand(r5, 0));
-
-  __ bind(&done);
 }
 
-
 void CallConstructStub::Generate(MacroAssembler* masm) {
   // r0 : number of arguments
   // r1 : the function to call
@@ -1785,6 +1771,17 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot) {
+  __ add(feedback_vector, feedback_vector,
+         Operand::PointerOffsetFromSmiKey(slot));
+  __ add(feedback_vector, feedback_vector,
+         Operand(FixedArray::kHeaderSize + kPointerSize));
+  __ ldr(slot, FieldMemOperand(feedback_vector, 0));
+  __ add(slot, slot, Operand(Smi::FromInt(1)));
+  __ str(slot, FieldMemOperand(feedback_vector, 0));
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // r1 - function
@@ -1798,11 +1795,7 @@
   __ mov(r0, Operand(arg_count()));
 
   // Increment the call count for monomorphic function calls.
-  __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
-  __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
-  __ ldr(r3, FieldMemOperand(r2, 0));
-  __ add(r3, r3, Operand(Smi::FromInt(1)));
-  __ str(r3, FieldMemOperand(r2, 0));
+  IncrementCallCount(masm, r2, r3);
 
   __ mov(r2, r4);
   __ mov(r3, r1);
@@ -1815,7 +1808,7 @@
   // r1 - function
   // r3 - slot id (Smi)
   // r2 - vector
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -1845,14 +1838,11 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(r1, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  __ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
-  __ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
-  __ ldr(r3, FieldMemOperand(r2, 0));
-  __ add(r3, r3, Operand(Smi::FromInt(1)));
-  __ str(r3, FieldMemOperand(r2, 0));
-
   __ bind(&call_function);
+
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, r2, r3);
+
   __ mov(r0, Operand(argc));
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
@@ -1893,6 +1883,11 @@
   __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
 
   __ bind(&call);
+
+  // Increment the call count for megamorphic function calls.
+  IncrementCallCount(masm, r2, r3);
+
+  __ bind(&call_count_incremented);
   __ mov(r0, Operand(argc));
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET);
@@ -1919,11 +1914,6 @@
   __ cmp(r4, ip);
   __ b(ne, &miss);
 
-  // Initialize the call counter.
-  __ Move(r5, Operand(Smi::FromInt(1)));
-  __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
-  __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
-
   // Store the function. Use a stub since we need a frame for allocation.
   // r2 - vector
   // r3 - slot
@@ -1931,9 +1921,13 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(masm->isolate());
+    __ Push(r2);
+    __ Push(r3);
     __ Push(cp, r1);
     __ CallStub(&create_stub);
     __ Pop(cp, r1);
+    __ Pop(r3);
+    __ Pop(r2);
   }
 
   __ jmp(&call_function);
@@ -1943,7 +1937,7 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ jmp(&call);
+  __ jmp(&call_count_incremented);
 }
 
 
@@ -2131,291 +2125,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-
-  // Stack frame on entry.
-  //  lr: return address
-  //  sp[0]: to
-  //  sp[4]: from
-  //  sp[8]: string
-
-  // This stub is called from the native-call %_SubString(...), so
-  // nothing can be assumed about the arguments. It is tested that:
-  //  "string" is a sequential string,
-  //  both "from" and "to" are smis, and
-  //  0 <= from <= to <= string.length.
-  // If any of these assumptions fail, we call the runtime system.
-
-  const int kToOffset = 0 * kPointerSize;
-  const int kFromOffset = 1 * kPointerSize;
-  const int kStringOffset = 2 * kPointerSize;
-
-  __ Ldrd(r2, r3, MemOperand(sp, kToOffset));
-  STATIC_ASSERT(kFromOffset == kToOffset + 4);
-  STATIC_ASSERT(kSmiTag == 0);
-  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
-
-  // Arithmetic shift right by one un-smi-tags. In this case we rotate right
-  // instead because we bail out on non-smi values: ROR and ASR are equivalent
-  // for smis but they set the flags in a way that's easier to optimize.
-  __ mov(r2, Operand(r2, ROR, 1), SetCC);
-  __ mov(r3, Operand(r3, ROR, 1), SetCC, cc);
-  // If either to or from had the smi tag bit set, then C is set now, and N
-  // has the same value: we rotated by 1, so the bottom bit is now the top bit.
-  // We want to bailout to runtime here if From is negative.  In that case, the
-  // next instruction is not executed and we fall through to bailing out to
-  // runtime.
-  // Executed if both r2 and r3 are untagged integers.
-  __ sub(r2, r2, Operand(r3), SetCC, cc);
-  // One of the above un-smis or the above SUB could have set N==1.
-  __ b(mi, &runtime);  // Either "from" or "to" is not an smi, or from > to.
-
-  // Make sure first argument is a string.
-  __ ldr(r0, MemOperand(sp, kStringOffset));
-  __ JumpIfSmi(r0, &runtime);
-  Condition is_string = masm->IsObjectStringType(r0, r1);
-  __ b(NegateCondition(is_string), &runtime);
-
-  Label single_char;
-  __ cmp(r2, Operand(1));
-  __ b(eq, &single_char);
-
-  // Short-cut for the case of trivial substring.
-  Label return_r0;
-  // r0: original string
-  // r2: result string length
-  __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
-  __ cmp(r2, Operand(r4, ASR, 1));
-  // Return original string.
-  __ b(eq, &return_r0);
-  // Longer than original string's length or negative: unsafe arguments.
-  __ b(hi, &runtime);
-  // Shorter than original string's length: an actual substring.
-
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into r5.
-  // r0: original string
-  // r1: instance type
-  // r2: length
-  // r3: from index (untagged)
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ tst(r1, Operand(kIsIndirectStringMask));
-  __ b(eq, &seq_or_external_string);
-
-  __ tst(r1, Operand(kSlicedNotConsMask));
-  __ b(ne, &sliced_string);
-  // Cons string.  Check whether it is flat, then fetch first part.
-  __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
-  __ CompareRoot(r5, Heap::kempty_stringRootIndex);
-  __ b(ne, &runtime);
-  __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
-  // Update instance type.
-  __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
-  __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and correct start index by offset.
-  __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
-  __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
-  __ add(r3, r3, Operand(r4, ASR, 1));  // Add offset to index.
-  // Update instance type.
-  __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
-  __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the expected register.
-  __ mov(r5, r0);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // r5: underlying subject string
-    // r1: instance type of underlying subject string
-    // r2: length
-    // r3: adjusted start index (untagged)
-    __ cmp(r2, Operand(SlicedString::kMinLength));
-    // Short slice.  Copy instead of slicing.
-    __ b(lt, &copy_routine);
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ tst(r1, Operand(kStringEncodingMask));
-    __ b(eq, &two_byte_slice);
-    __ AllocateOneByteSlicedString(r0, r2, r6, r4, &runtime);
-    __ jmp(&set_slice_header);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(r0, r2, r6, r4, &runtime);
-    __ bind(&set_slice_header);
-    __ mov(r3, Operand(r3, LSL, 1));
-    __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
-    __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
-    __ jmp(&return_r0);
-
-    __ bind(&copy_routine);
-  }
-
-  // r5: underlying subject string
-  // r1: instance type of underlying subject string
-  // r2: length
-  // r3: adjusted start index (untagged)
-  Label two_byte_sequential, sequential_string, allocate_result;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ tst(r1, Operand(kExternalStringTag));
-  __ b(eq, &sequential_string);
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ tst(r1, Operand(kShortExternalStringTag));
-  __ b(ne, &runtime);
-  __ ldr(r5, FieldMemOperand(r5, ExternalString::kResourceDataOffset));
-  // r5 already points to the first character of underlying string.
-  __ jmp(&allocate_result);
-
-  __ bind(&sequential_string);
-  // Locate first character of underlying subject string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ add(r5, r5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&allocate_result);
-  // Sequential acii string.  Allocate the result.
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ tst(r1, Operand(kStringEncodingMask));
-  __ b(eq, &two_byte_sequential);
-
-  // Allocate and copy the resulting one-byte string.
-  __ AllocateOneByteString(r0, r2, r4, r6, r1, &runtime);
-
-  // Locate first character of substring to copy.
-  __ add(r5, r5, r3);
-  // Locate first character of result.
-  __ add(r1, r0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  // r0: result string
-  // r1: first character of result string
-  // r2: result string length
-  // r5: first character of substring to copy
-  STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(
-      masm, r1, r5, r2, r3, String::ONE_BYTE_ENCODING);
-  __ jmp(&return_r0);
-
-  // Allocate and copy the resulting two-byte string.
-  __ bind(&two_byte_sequential);
-  __ AllocateTwoByteString(r0, r2, r4, r6, r1, &runtime);
-
-  // Locate first character of substring to copy.
-  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
-  __ add(r5, r5, Operand(r3, LSL, 1));
-  // Locate first character of result.
-  __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  // r0: result string.
-  // r1: first character of result.
-  // r2: result length.
-  // r5: first character of substring to copy.
-  STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(
-      masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING);
-
-  __ bind(&return_r0);
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
-  __ Drop(3);
-  __ Ret();
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // r0: original string
-  // r1: instance type
-  // r2: length
-  // r3: from index (untagged)
-  __ SmiTag(r3, r3);
-  StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime,
-                                  RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ Drop(3);
-  __ Ret();
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes one argument in r0.
-  Label is_number;
-  __ JumpIfSmi(r0, &is_number);
-
-  __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
-  // r0: receiver
-  // r1: receiver instance type
-  __ Ret(lo);
-
-  Label not_heap_number;
-  __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
-  __ b(ne, &not_heap_number);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ cmp(r1, Operand(ODDBALL_TYPE));
-  __ b(ne, &not_oddball);
-  __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ push(r0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes one argument in r0.
-  Label is_number;
-  __ JumpIfSmi(r0, &is_number);
-
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ CompareObjectType(r0, r1, r1, LAST_NAME_TYPE);
-  // r0: receiver
-  // r1: receiver instance type
-  __ Ret(ls);
-
-  Label not_heap_number;
-  __ cmp(r1, Operand(HEAP_NUMBER_TYPE));
-  __ b(ne, &not_heap_number);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ cmp(r1, Operand(ODDBALL_TYPE));
-  __ b(ne, &not_oddball);
-  __ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ push(r0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(
     MacroAssembler* masm, Register left, Register right, Register scratch1,
     Register scratch2, Register scratch3) {
@@ -3275,16 +2984,6 @@
   Label need_incremental;
   Label need_incremental_pop_scratch;
 
-  __ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
-  __ ldr(regs_.scratch1(),
-         MemOperand(regs_.scratch0(),
-                    MemoryChunk::kWriteBarrierCounterOffset));
-  __ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC);
-  __ str(regs_.scratch1(),
-         MemOperand(regs_.scratch0(),
-                    MemoryChunk::kWriteBarrierCounterOffset));
-  __ b(mi, &need_incremental);
-
   // Let's look at the color of the object:  If it is not black we don't have
   // to inform the incremental marker.
   __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@@ -3712,7 +3411,7 @@
   __ ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
 
   // Load the map into the correct register.
-  DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+  DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
   __ mov(feedback, too_far);
 
   __ add(pc, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4425,7 +4124,7 @@
     // Fall back to %AllocateInNewSpace (if not too big).
     Label too_big_for_new_space;
     __ bind(&allocate);
-    __ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
+    __ cmp(r6, Operand(kMaxRegularHeapObjectSize));
     __ b(gt, &too_big_for_new_space);
     {
       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
@@ -4763,7 +4462,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
+  __ cmp(r6, Operand(kMaxRegularHeapObjectSize));
   __ b(gt, &too_big_for_new_space);
   {
     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc
index 7580145..e63da5c 100644
--- a/src/arm/codegen-arm.cc
+++ b/src/arm/codegen-arm.cc
@@ -39,6 +39,7 @@
   Label less_4;
 
   if (CpuFeatures::IsSupported(NEON)) {
+    CpuFeatureScope scope(&masm, NEON);
     Label loop, less_256, less_128, less_64, less_32, _16_or_less, _8_or_less;
     Label size_less_than_8;
     __ pld(MemOperand(src, 0));
@@ -193,6 +194,7 @@
   Register src = r1;
   Register chars = r2;
   if (CpuFeatures::IsSupported(NEON)) {
+    CpuFeatureScope scope(&masm, NEON);
     Register temp = r3;
     Label loop;
 
diff --git a/src/arm/constants-arm.h b/src/arm/constants-arm.h
index a162051..2bade20 100644
--- a/src/arm/constants-arm.h
+++ b/src/arm/constants-arm.h
@@ -477,40 +477,42 @@
     *reinterpret_cast<Instr*>(this) = value;
   }
 
-  // Read one particular bit out of the instruction bits.
+  // Extract a single bit from the instruction bits and return it as bit 0 in
+  // the result.
   inline int Bit(int nr) const {
     return (InstructionBits() >> nr) & 1;
   }
 
-  // Read a bit field's value out of the instruction bits.
+  // Extract a bit field <hi:lo> from the instruction bits and return it in the
+  // least-significant bits of the result.
   inline int Bits(int hi, int lo) const {
     return (InstructionBits() >> lo) & ((2 << (hi - lo)) - 1);
   }
 
-  // Read a bit field out of the instruction bits.
+  // Read a bit field <hi:lo>, leaving its position unchanged in the result.
   inline int BitField(int hi, int lo) const {
     return InstructionBits() & (((2 << (hi - lo)) - 1) << lo);
   }
 
   // Static support.
 
-  // Read one particular bit out of the instruction bits.
+  // Extract a single bit from the instruction bits and return it as bit 0 in
+  // the result.
   static inline int Bit(Instr instr, int nr) {
     return (instr >> nr) & 1;
   }
 
-  // Read the value of a bit field out of the instruction bits.
+  // Extract a bit field <hi:lo> from the instruction bits and return it in the
+  // least-significant bits of the result.
   static inline int Bits(Instr instr, int hi, int lo) {
     return (instr >> lo) & ((2 << (hi - lo)) - 1);
   }
 
-
-  // Read a bit field out of the instruction bits.
+  // Read a bit field <hi:lo>, leaving its position unchanged in the result.
   static inline int BitField(Instr instr, int hi, int lo) {
     return instr & (((2 << (hi - lo)) - 1) << lo);
   }
 
-
   // Accessors for the different named fields used in the ARM encoding.
   // The naming of these accessor corresponds to figure A3-1.
   //
@@ -525,13 +527,11 @@
 
 
   // Generally applicable fields
-  inline Condition ConditionValue() const {
-    return static_cast<Condition>(Bits(31, 28));
-  }
+  inline int ConditionValue() const { return Bits(31, 28); }
   inline Condition ConditionField() const {
     return static_cast<Condition>(BitField(31, 28));
   }
-  DECLARE_STATIC_TYPED_ACCESSOR(Condition, ConditionValue);
+  DECLARE_STATIC_TYPED_ACCESSOR(int, ConditionValue);
   DECLARE_STATIC_TYPED_ACCESSOR(Condition, ConditionField);
 
   inline int TypeValue() const { return Bits(27, 25); }
diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc
index c569e66..e49fed9 100644
--- a/src/arm/deoptimizer-arm.cc
+++ b/src/arm/deoptimizer-arm.cc
@@ -119,14 +119,20 @@
   DCHECK(kDoubleRegZero.code() == 14);
   DCHECK(kScratchDoubleReg.code() == 15);
 
-  // Check CPU flags for number of registers, setting the Z condition flag.
-  __ CheckFor32DRegs(ip);
+  {
+    // We use a run-time check for VFP32DREGS.
+    CpuFeatureScope scope(masm(), VFP32DREGS,
+                          CpuFeatureScope::kDontCheckSupported);
 
-  // Push registers d0-d15, and possibly d16-d31, on the stack.
-  // If d16-d31 are not pushed, decrease the stack pointer instead.
-  __ vstm(db_w, sp, d16, d31, ne);
-  __ sub(sp, sp, Operand(16 * kDoubleSize), LeaveCC, eq);
-  __ vstm(db_w, sp, d0, d15);
+    // Check CPU flags for number of registers, setting the Z condition flag.
+    __ CheckFor32DRegs(ip);
+
+    // Push registers d0-d15, and possibly d16-d31, on the stack.
+    // If d16-d31 are not pushed, decrease the stack pointer instead.
+    __ vstm(db_w, sp, d16, d31, ne);
+    __ sub(sp, sp, Operand(16 * kDoubleSize), LeaveCC, eq);
+    __ vstm(db_w, sp, d0, d15);
+  }
 
   // Push all 16 registers (needed to populate FrameDescription::registers_).
   // TODO(1588) Note that using pc with stm is deprecated, so we should perhaps
@@ -259,9 +265,6 @@
   __ cmp(r4, r1);
   __ b(lt, &outer_push_loop);
 
-  // Check CPU flags for number of registers, setting the Z condition flag.
-  __ CheckFor32DRegs(ip);
-
   __ ldr(r1, MemOperand(r0, Deoptimizer::input_offset()));
   for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
     int code = config->GetAllocatableDoubleCode(i);
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index 1e1c75d..e408e85 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -105,6 +105,8 @@
   void DecodeType6(Instruction* instr);
   // Type 7 includes special Debugger instructions.
   int DecodeType7(Instruction* instr);
+  // CP15 coprocessor instructions.
+  void DecodeTypeCP15(Instruction* instr);
   // For VFP support.
   void DecodeTypeVFP(Instruction* instr);
   void DecodeType6CoprocessorIns(Instruction* instr);
@@ -1279,18 +1281,16 @@
           break;
         }
       }
-      if (FLAG_enable_sudiv) {
-        if (instr->Bits(5, 4) == 0x1) {
-          if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
-            if (instr->Bit(21) == 0x1) {
-              // UDIV (in V8 notation matching ARM ISA format) rn = rm/rs
-              Format(instr, "udiv'cond'b 'rn, 'rm, 'rs");
-            } else {
-              // SDIV (in V8 notation matching ARM ISA format) rn = rm/rs
-              Format(instr, "sdiv'cond'b 'rn, 'rm, 'rs");
-            }
-            break;
+      if (instr->Bits(5, 4) == 0x1) {
+        if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
+          if (instr->Bit(21) == 0x1) {
+            // UDIV (in V8 notation matching ARM ISA format) rn = rm/rs
+            Format(instr, "udiv'cond'b 'rn, 'rm, 'rs");
+          } else {
+            // SDIV (in V8 notation matching ARM ISA format) rn = rm/rs
+            Format(instr, "sdiv'cond'b 'rn, 'rm, 'rs");
           }
+          break;
         }
       }
       Format(instr, "'memop'cond'b 'rd, ['rn, -'shift_rm]'w");
@@ -1374,7 +1374,18 @@
       Format(instr, "svc'cond 'svc");
     }
   } else {
-    DecodeTypeVFP(instr);
+    switch (instr->CoprocessorValue()) {
+      case 10:  // Fall through.
+      case 11:
+        DecodeTypeVFP(instr);
+        break;
+      case 15:
+        DecodeTypeCP15(instr);
+        break;
+      default:
+        Unknown(instr);
+        break;
+    }
   }
   return Instruction::kInstrSize;
 }
@@ -1556,6 +1567,34 @@
   }
 }
 
+void Decoder::DecodeTypeCP15(Instruction* instr) {
+  VERIFY((instr->TypeValue() == 7) && (instr->Bit(24) == 0x0));
+  VERIFY(instr->CoprocessorValue() == 15);
+
+  if (instr->Bit(4) == 1) {
+    int crn = instr->Bits(19, 16);
+    int crm = instr->Bits(3, 0);
+    int opc1 = instr->Bits(23, 21);
+    int opc2 = instr->Bits(7, 5);
+    if ((opc1 == 0) && (crn == 7)) {
+      // ARMv6 memory barrier operations.
+      // Details available in ARM DDI 0406C.b, B3-1750.
+      if ((crm == 10) && (opc2 == 5)) {
+        Format(instr, "mcr'cond (CP15DMB)");
+      } else if ((crm == 10) && (opc2 == 4)) {
+        Format(instr, "mcr'cond (CP15DSB)");
+      } else if ((crm == 5) && (opc2 == 4)) {
+        Format(instr, "mcr'cond (CP15ISB)");
+      } else {
+        Unknown(instr);
+      }
+    } else {
+      Unknown(instr);
+    }
+  } else {
+    Unknown(instr);
+  }
+}
 
 void Decoder::DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(
     Instruction* instr) {
@@ -1786,6 +1825,13 @@
         int imm3 = instr->Bits(21, 19);
         out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
                                     "vmovl.u%d q%d, d%d", imm3*8, Vd, Vm);
+      } else if ((instr->Bits(21, 16) == 0x32) && (instr->Bits(11, 7) == 0) &&
+                 (instr->Bit(4) == 0)) {
+        int Vd = instr->VFPDRegValue(kDoublePrecision);
+        int Vm = instr->VFPMRegValue(kDoublePrecision);
+        char rtype = (instr->Bit(6) == 0) ? 'd' : 'q';
+        out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
+                                    "vswp %c%d, %c%d", rtype, Vd, rtype, Vm);
       } else {
         Unknown(instr);
       }
@@ -1898,6 +1944,22 @@
             UNREACHABLE();  // Case analysis is exhaustive.
             break;
         }
+      } else if ((instr->Opc1Value() == 0x4) && (instr->Bits(11, 9) == 0x5) &&
+                 (instr->Bit(4) == 0x0)) {
+        // VMAXNM, VMINNM (floating-point)
+        if (instr->SzValue() == 0x1) {
+          if (instr->Bit(6) == 0x1) {
+            Format(instr, "vminnm.f64 'Dd, 'Dn, 'Dm");
+          } else {
+            Format(instr, "vmaxnm.f64 'Dd, 'Dn, 'Dm");
+          }
+        } else {
+          if (instr->Bit(6) == 0x1) {
+            Format(instr, "vminnm.f32 'Sd, 'Sn, 'Sm");
+          } else {
+            Format(instr, "vmaxnm.f32 'Sd, 'Sn, 'Sm");
+          }
+        }
       } else {
         Unknown(instr);
       }
diff --git a/src/arm/interface-descriptors-arm.cc b/src/arm/interface-descriptors-arm.cc
index d26804a..a002b8d 100644
--- a/src/arm/interface-descriptors-arm.cc
+++ b/src/arm/interface-descriptors-arm.cc
@@ -42,13 +42,9 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return r3; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() { return r4; }
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return r3; }
-const Register VectorStoreTransitionDescriptor::MapRegister() { return r5; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return r3; }
-
+const Register StoreTransitionDescriptor::SlotRegister() { return r4; }
+const Register StoreTransitionDescriptor::VectorRegister() { return r3; }
+const Register StoreTransitionDescriptor::MapRegister() { return r5; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r2; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r0; }
@@ -375,7 +371,7 @@
                                    &default_descriptor);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   static PlatformInterfaceDescriptor default_descriptor =
       PlatformInterfaceDescriptor(CAN_INLINE_TARGET_ADDRESS);
@@ -414,7 +410,19 @@
       r0,  // argument count (not including receiver)
       r3,  // new target
       r1,  // constructor to call
-      r2   // address of the first argument
+      r2,  // allocation site feedback if available, undefined otherwise
+      r4   // address of the first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      r0,  // argument count (not including receiver)
+      r1,  // target to call checked to be Array function
+      r2,  // allocation site feedback if available, undefined otherwise
+      r3   // address of the first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index a08673d..00f8ab5 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -250,15 +250,17 @@
   }
 }
 
-void MacroAssembler::Move(SwVfpRegister dst, SwVfpRegister src) {
+void MacroAssembler::Move(SwVfpRegister dst, SwVfpRegister src,
+                          Condition cond) {
   if (!dst.is(src)) {
-    vmov(dst, src);
+    vmov(dst, src, cond);
   }
 }
 
-void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
+void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src,
+                          Condition cond) {
   if (!dst.is(src)) {
-    vmov(dst, src);
+    vmov(dst, src, cond);
   }
 }
 
@@ -285,6 +287,7 @@
              !src2.must_output_reloc_info(this) &&
              CpuFeatures::IsSupported(ARMv7) &&
              base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
+    CpuFeatureScope scope(this, ARMv7);
     ubfx(dst, src1, 0,
         WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
   } else {
@@ -303,6 +306,7 @@
       mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
     }
   } else {
+    CpuFeatureScope scope(this, ARMv7);
     ubfx(dst, src1, lsb, width, cond);
   }
 }
@@ -323,6 +327,7 @@
       mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
     }
   } else {
+    CpuFeatureScope scope(this, ARMv7);
     sbfx(dst, src1, lsb, width, cond);
   }
 }
@@ -346,6 +351,7 @@
     mov(scratch, Operand(scratch, LSL, lsb));
     orr(dst, dst, scratch);
   } else {
+    CpuFeatureScope scope(this, ARMv7);
     bfi(dst, src, lsb, width, cond);
   }
 }
@@ -358,6 +364,7 @@
     int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
     bic(dst, src, Operand(mask));
   } else {
+    CpuFeatureScope scope(this, ARMv7);
     Move(dst, src, cond);
     bfc(dst, lsb, width, cond);
   }
@@ -404,15 +411,6 @@
 void MacroAssembler::LoadRoot(Register destination,
                               Heap::RootListIndex index,
                               Condition cond) {
-  if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
-      isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
-      !predictable_code_size()) {
-    // The CPU supports fast immediate values, and this root will never
-    // change. We will load it as a relocatable immediate value.
-    Handle<Object> root = isolate()->heap()->root_handle(index);
-    mov(destination, Operand(root), LeaveCC, cond);
-    return;
-  }
   ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
 }
 
@@ -430,9 +428,7 @@
                                 Condition cond,
                                 Label* branch) {
   DCHECK(cond == eq || cond == ne);
-  const int mask =
-      (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
-  CheckPageFlag(object, scratch, mask, cond, branch);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cond, branch);
 }
 
 
@@ -1054,6 +1050,7 @@
     vmov(dst, VmovIndexLo, src);
   }
 }
+
 void MacroAssembler::LslPair(Register dst_low, Register dst_high,
                              Register src_low, Register src_high,
                              Register scratch, Register shift) {
@@ -1971,7 +1968,7 @@
                               Register scratch2,
                               Label* gc_required,
                               AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
@@ -2049,7 +2046,6 @@
   // point, so we cannot just use add().
   DCHECK(object_size > 0);
   Register source = result;
-  Condition cond = al;
   int shift = 0;
   while (object_size != 0) {
     if (((object_size >> shift) & 0x03) == 0) {
@@ -2060,9 +2056,8 @@
       shift += 8;
       Operand bits_operand(bits);
       DCHECK(bits_operand.instructions_required(this) == 1);
-      add(result_end, source, bits_operand, LeaveCC, cond);
+      add(result_end, source, bits_operand);
       source = result_end;
-      cond = cc;
     }
   }
 
@@ -2226,7 +2221,7 @@
 void MacroAssembler::FastAllocate(int object_size, Register result,
                                   Register scratch1, Register scratch2,
                                   AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK(!AreAliased(result, scratch1, scratch2, ip));
 
   // Make object size into bytes.
@@ -2261,7 +2256,6 @@
   // this point, so we cannot just use add().
   DCHECK(object_size > 0);
   Register source = result;
-  Condition cond = al;
   int shift = 0;
   while (object_size != 0) {
     if (((object_size >> shift) & 0x03) == 0) {
@@ -2272,9 +2266,8 @@
       shift += 8;
       Operand bits_operand(bits);
       DCHECK(bits_operand.instructions_required(this) == 1);
-      add(result_end, source, bits_operand, LeaveCC, cond);
+      add(result_end, source, bits_operand);
       source = result_end;
-      cond = cc;
     }
   }
 
@@ -2650,7 +2643,8 @@
 
 
 void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
-  if (CpuFeatures::IsSupported(VFP3)) {
+  if (CpuFeatures::IsSupported(VFPv3)) {
+    CpuFeatureScope scope(this, VFPv3);
     vmov(value.low(), smi);
     vcvt_f64_s32(value, 1);
   } else {
@@ -2807,6 +2801,7 @@
                                          Register src,
                                          int num_least_bits) {
   if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
+    CpuFeatureScope scope(this, ARMv7);
     ubfx(dst, src, kSmiTagSize, num_least_bits);
   } else {
     SmiUntag(dst, src);
@@ -3416,6 +3411,7 @@
 
 
 void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
+  CpuFeatureScope scope(this, VFP32DREGS, CpuFeatureScope::kDontCheckSupported);
   CheckFor32DRegs(scratch);
   vstm(db_w, location, d16, d31, ne);
   sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
@@ -3424,12 +3420,151 @@
 
 
 void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
+  CpuFeatureScope scope(this, VFP32DREGS, CpuFeatureScope::kDontCheckSupported);
   CheckFor32DRegs(scratch);
   vldm(ia_w, location, d0, d15);
   vldm(ia_w, location, d16, d31, ne);
   add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
 }
 
+template <typename T>
+void MacroAssembler::FloatMaxHelper(T result, T left, T right,
+                                    Label* out_of_line) {
+  // This trivial case is caught sooner, so that the out-of-line code can be
+  // completely avoided.
+  DCHECK(!left.is(right));
+
+  if (CpuFeatures::IsSupported(ARMv8)) {
+    CpuFeatureScope scope(this, ARMv8);
+    VFPCompareAndSetFlags(left, right);
+    b(vs, out_of_line);
+    vmaxnm(result, left, right);
+  } else {
+    Label done;
+    VFPCompareAndSetFlags(left, right);
+    b(vs, out_of_line);
+    // Avoid a conditional instruction if the result register is unique.
+    bool aliased_result_reg = result.is(left) || result.is(right);
+    Move(result, right, aliased_result_reg ? mi : al);
+    Move(result, left, gt);
+    b(ne, &done);
+    // Left and right are equal, but check for +/-0.
+    VFPCompareAndSetFlags(left, 0.0);
+    b(eq, out_of_line);
+    // The arguments are equal and not zero, so it doesn't matter which input we
+    // pick. We have already moved one input into the result (if it didn't
+    // already alias) so there's nothing more to do.
+    bind(&done);
+  }
+}
+
+template <typename T>
+void MacroAssembler::FloatMaxOutOfLineHelper(T result, T left, T right) {
+  DCHECK(!left.is(right));
+
+  // ARMv8: At least one of left and right is a NaN.
+  // Anything else: At least one of left and right is a NaN, or both left and
+  // right are zeroes with unknown sign.
+
+  // If left and right are +/-0, select the one with the most positive sign.
+  // If left or right are NaN, vadd propagates the appropriate one.
+  vadd(result, left, right);
+}
+
+template <typename T>
+void MacroAssembler::FloatMinHelper(T result, T left, T right,
+                                    Label* out_of_line) {
+  // This trivial case is caught sooner, so that the out-of-line code can be
+  // completely avoided.
+  DCHECK(!left.is(right));
+
+  if (CpuFeatures::IsSupported(ARMv8)) {
+    CpuFeatureScope scope(this, ARMv8);
+    VFPCompareAndSetFlags(left, right);
+    b(vs, out_of_line);
+    vminnm(result, left, right);
+  } else {
+    Label done;
+    VFPCompareAndSetFlags(left, right);
+    b(vs, out_of_line);
+    // Avoid a conditional instruction if the result register is unique.
+    bool aliased_result_reg = result.is(left) || result.is(right);
+    Move(result, left, aliased_result_reg ? mi : al);
+    Move(result, right, gt);
+    b(ne, &done);
+    // Left and right are equal, but check for +/-0.
+    VFPCompareAndSetFlags(left, 0.0);
+    // If the arguments are equal and not zero, it doesn't matter which input we
+    // pick. We have already moved one input into the result (if it didn't
+    // already alias) so there's nothing more to do.
+    b(ne, &done);
+    // At this point, both left and right are either 0 or -0.
+    // We could use a single 'vorr' instruction here if we had NEON support.
+    // The algorithm used is -((-L) + (-R)), which is most efficiently expressed
+    // as -((-L) - R).
+    if (left.is(result)) {
+      DCHECK(!right.is(result));
+      vneg(result, left);
+      vsub(result, result, right);
+      vneg(result, result);
+    } else {
+      DCHECK(!left.is(result));
+      vneg(result, right);
+      vsub(result, result, left);
+      vneg(result, result);
+    }
+    bind(&done);
+  }
+}
+
+template <typename T>
+void MacroAssembler::FloatMinOutOfLineHelper(T result, T left, T right) {
+  DCHECK(!left.is(right));
+
+  // At least one of left and right is a NaN. Use vadd to propagate the NaN
+  // appropriately. +/-0 is handled inline.
+  vadd(result, left, right);
+}
+
+void MacroAssembler::FloatMax(SwVfpRegister result, SwVfpRegister left,
+                              SwVfpRegister right, Label* out_of_line) {
+  FloatMaxHelper(result, left, right, out_of_line);
+}
+
+void MacroAssembler::FloatMin(SwVfpRegister result, SwVfpRegister left,
+                              SwVfpRegister right, Label* out_of_line) {
+  FloatMinHelper(result, left, right, out_of_line);
+}
+
+void MacroAssembler::FloatMax(DwVfpRegister result, DwVfpRegister left,
+                              DwVfpRegister right, Label* out_of_line) {
+  FloatMaxHelper(result, left, right, out_of_line);
+}
+
+void MacroAssembler::FloatMin(DwVfpRegister result, DwVfpRegister left,
+                              DwVfpRegister right, Label* out_of_line) {
+  FloatMinHelper(result, left, right, out_of_line);
+}
+
+void MacroAssembler::FloatMaxOutOfLine(SwVfpRegister result, SwVfpRegister left,
+                                       SwVfpRegister right) {
+  FloatMaxOutOfLineHelper(result, left, right);
+}
+
+void MacroAssembler::FloatMinOutOfLine(SwVfpRegister result, SwVfpRegister left,
+                                       SwVfpRegister right) {
+  FloatMinOutOfLineHelper(result, left, right);
+}
+
+void MacroAssembler::FloatMaxOutOfLine(DwVfpRegister result, DwVfpRegister left,
+                                       DwVfpRegister right) {
+  FloatMaxOutOfLineHelper(result, left, right);
+}
+
+void MacroAssembler::FloatMinOutOfLine(DwVfpRegister result, DwVfpRegister left,
+                                       DwVfpRegister right) {
+  FloatMinOutOfLineHelper(result, left, right);
+}
 
 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
     Register first, Register second, Register scratch1, Register scratch2,
diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h
index 2f1b3c2..d524d84 100644
--- a/src/arm/macro-assembler-arm.h
+++ b/src/arm/macro-assembler-arm.h
@@ -123,6 +123,18 @@
   void CallDeoptimizer(Address target);
   static int CallDeoptimizerSize();
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp.
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 0) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    UNIMPLEMENTED();
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the sp register.
   void Drop(int count, Condition cond = al);
@@ -170,8 +182,8 @@
       mov(dst, src, sbit, cond);
     }
   }
-  void Move(SwVfpRegister dst, SwVfpRegister src);
-  void Move(DwVfpRegister dst, DwVfpRegister src);
+  void Move(SwVfpRegister dst, SwVfpRegister src, Condition cond = al);
+  void Move(DwVfpRegister dst, DwVfpRegister src, Condition cond = al);
 
   void Load(Register dst, const MemOperand& src, Representation r);
   void Store(Register src, const MemOperand& dst, Representation r);
@@ -1082,6 +1094,32 @@
   // values to location, restoring [d0..(d15|d31)].
   void RestoreFPRegs(Register location, Register scratch);
 
+  // Perform a floating-point min or max operation with the
+  // (IEEE-754-compatible) semantics of ARM64's fmin/fmax. Some cases, typically
+  // NaNs or +/-0.0, are expected to be rare and are handled in out-of-line
+  // code. The specific behaviour depends on supported instructions.
+  //
+  // These functions assume (and assert) that !left.is(right). It is permitted
+  // for the result to alias either input register.
+  void FloatMax(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right,
+                Label* out_of_line);
+  void FloatMin(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right,
+                Label* out_of_line);
+  void FloatMax(DwVfpRegister result, DwVfpRegister left, DwVfpRegister right,
+                Label* out_of_line);
+  void FloatMin(DwVfpRegister result, DwVfpRegister left, DwVfpRegister right,
+                Label* out_of_line);
+
+  // Generate out-of-line cases for the macros above.
+  void FloatMaxOutOfLine(SwVfpRegister result, SwVfpRegister left,
+                         SwVfpRegister right);
+  void FloatMinOutOfLine(SwVfpRegister result, SwVfpRegister left,
+                         SwVfpRegister right);
+  void FloatMaxOutOfLine(DwVfpRegister result, DwVfpRegister left,
+                         DwVfpRegister right);
+  void FloatMinOutOfLine(DwVfpRegister result, DwVfpRegister left,
+                         DwVfpRegister right);
+
   // ---------------------------------------------------------------------------
   // Runtime calls
 
@@ -1513,6 +1551,16 @@
   MemOperand SafepointRegisterSlot(Register reg);
   MemOperand SafepointRegistersAndDoublesSlot(Register reg);
 
+  // Implementation helpers for FloatMin and FloatMax.
+  template <typename T>
+  void FloatMaxHelper(T result, T left, T right, Label* out_of_line);
+  template <typename T>
+  void FloatMinHelper(T result, T left, T right, Label* out_of_line);
+  template <typename T>
+  void FloatMaxOutOfLineHelper(T result, T left, T right);
+  template <typename T>
+  void FloatMinOutOfLineHelper(T result, T left, T right);
+
   bool generating_stub_;
   bool has_frame_;
   // This handle will be patched with the code object on installation.
diff --git a/src/arm/simulator-arm.cc b/src/arm/simulator-arm.cc
index cfcc5b1..331a7e9 100644
--- a/src/arm/simulator-arm.cc
+++ b/src/arm/simulator-arm.cc
@@ -575,8 +575,8 @@
   last_debugger_input_ = input;
 }
 
-void Simulator::FlushICache(base::HashMap* i_cache, void* start_addr,
-                            size_t size) {
+void Simulator::FlushICache(base::CustomMatcherHashMap* i_cache,
+                            void* start_addr, size_t size) {
   intptr_t start = reinterpret_cast<intptr_t>(start_addr);
   int intra_line = (start & CachePage::kLineMask);
   start -= intra_line;
@@ -596,7 +596,8 @@
   }
 }
 
-CachePage* Simulator::GetCachePage(base::HashMap* i_cache, void* page) {
+CachePage* Simulator::GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                   void* page) {
   base::HashMap::Entry* entry = i_cache->LookupOrInsert(page, ICacheHash(page));
   if (entry->value == NULL) {
     CachePage* new_page = new CachePage();
@@ -607,7 +608,8 @@
 
 
 // Flush from start up to and not including start + size.
-void Simulator::FlushOnePage(base::HashMap* i_cache, intptr_t start, int size) {
+void Simulator::FlushOnePage(base::CustomMatcherHashMap* i_cache,
+                             intptr_t start, int size) {
   DCHECK(size <= CachePage::kPageSize);
   DCHECK(AllOnOnePage(start, size - 1));
   DCHECK((start & CachePage::kLineMask) == 0);
@@ -619,7 +621,8 @@
   memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
 }
 
-void Simulator::CheckICache(base::HashMap* i_cache, Instruction* instr) {
+void Simulator::CheckICache(base::CustomMatcherHashMap* i_cache,
+                            Instruction* instr) {
   intptr_t address = reinterpret_cast<intptr_t>(instr);
   void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
   void* line = reinterpret_cast<void*>(address & (~CachePage::kLineMask));
@@ -652,7 +655,7 @@
 Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
   i_cache_ = isolate_->simulator_i_cache();
   if (i_cache_ == NULL) {
-    i_cache_ = new base::HashMap(&ICacheMatch);
+    i_cache_ = new base::CustomMatcherHashMap(&ICacheMatch);
     isolate_->set_simulator_i_cache(i_cache_);
   }
   Initialize(isolate);
@@ -783,7 +786,8 @@
 
 
 // static
-void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
+void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
+                         Redirection* first) {
   Redirection::DeleteChain(first);
   if (i_cache != nullptr) {
     for (base::HashMap::Entry* entry = i_cache->Start(); entry != nullptr;
@@ -2886,26 +2890,24 @@
           return;
         }
       }
-      if (FLAG_enable_sudiv) {
-        if (instr->Bits(5, 4) == 0x1) {
-          if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
-            // (s/u)div (in V8 notation matching ARM ISA format) rn = rm/rs
-            // Format(instr, "'(s/u)div'cond'b 'rn, 'rm, 'rs);
-            int rm = instr->RmValue();
-            int32_t rm_val = get_register(rm);
-            int rs = instr->RsValue();
-            int32_t rs_val = get_register(rs);
-            int32_t ret_val = 0;
-            // udiv
-            if (instr->Bit(21) == 0x1) {
-              ret_val = bit_cast<int32_t>(base::bits::UnsignedDiv32(
-                  bit_cast<uint32_t>(rm_val), bit_cast<uint32_t>(rs_val)));
-            } else {
-              ret_val = base::bits::SignedDiv32(rm_val, rs_val);
-            }
-            set_register(rn, ret_val);
-            return;
+      if (instr->Bits(5, 4) == 0x1) {
+        if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
+          // (s/u)div (in V8 notation matching ARM ISA format) rn = rm/rs
+          // Format(instr, "'(s/u)div'cond'b 'rn, 'rm, 'rs);
+          int rm = instr->RmValue();
+          int32_t rm_val = get_register(rm);
+          int rs = instr->RsValue();
+          int32_t rs_val = get_register(rs);
+          int32_t ret_val = 0;
+          // udiv
+          if (instr->Bit(21) == 0x1) {
+            ret_val = bit_cast<int32_t>(base::bits::UnsignedDiv32(
+                bit_cast<uint32_t>(rm_val), bit_cast<uint32_t>(rs_val)));
+          } else {
+            ret_val = base::bits::SignedDiv32(rm_val, rs_val);
           }
+          set_register(rn, ret_val);
+          return;
         }
       }
       // Format(instr, "'memop'cond'b 'rd, ['rn, -'shift_rm]'w");
@@ -3026,7 +3028,17 @@
   if (instr->Bit(24) == 1) {
     SoftwareInterrupt(instr);
   } else {
-    DecodeTypeVFP(instr);
+    switch (instr->CoprocessorValue()) {
+      case 10:  // Fall through.
+      case 11:
+        DecodeTypeVFP(instr);
+        break;
+      case 15:
+        DecodeTypeCP15(instr);
+        break;
+      default:
+        UNIMPLEMENTED();
+    }
   }
 }
 
@@ -3335,6 +3347,31 @@
   }
 }
 
+void Simulator::DecodeTypeCP15(Instruction* instr) {
+  DCHECK((instr->TypeValue() == 7) && (instr->Bit(24) == 0x0));
+  DCHECK(instr->CoprocessorValue() == 15);
+
+  if (instr->Bit(4) == 1) {
+    // mcr
+    int crn = instr->Bits(19, 16);
+    int crm = instr->Bits(3, 0);
+    int opc1 = instr->Bits(23, 21);
+    int opc2 = instr->Bits(7, 5);
+    if ((opc1 == 0) && (crn == 7)) {
+      // ARMv6 memory barrier operations.
+      // Details available in ARM DDI 0406C.b, B3-1750.
+      if (((crm == 10) && (opc2 == 5)) ||  // CP15DMB
+          ((crm == 10) && (opc2 == 4)) ||  // CP15DSB
+          ((crm == 5) && (opc2 == 4))) {   // CP15ISB
+        // These are ignored by the simulator for now.
+      } else {
+        UNIMPLEMENTED();
+      }
+    }
+  } else {
+    UNIMPLEMENTED();
+  }
+}
 
 void Simulator::DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(
     Instruction* instr) {
@@ -3750,6 +3787,21 @@
           e++;
         }
         set_q_register(Vd, reinterpret_cast<uint64_t*>(to));
+      } else if ((instr->Bits(21, 16) == 0x32) && (instr->Bits(11, 7) == 0) &&
+                 (instr->Bit(4) == 0)) {
+        int vd = instr->VFPDRegValue(kDoublePrecision);
+        int vm = instr->VFPMRegValue(kDoublePrecision);
+        if (instr->Bit(6) == 0) {
+          // vswp Dd, Dm.
+          uint64_t dval, mval;
+          get_d_register(vd, &dval);
+          get_d_register(vm, &mval);
+          set_d_register(vm, &dval);
+          set_d_register(vd, &mval);
+        } else {
+          // Q register vswp unimplemented.
+          UNIMPLEMENTED();
+        }
       } else {
         UNIMPLEMENTED();
       }
@@ -3848,6 +3900,7 @@
       } else if (instr->SpecialValue() == 0xA && instr->Bits(22, 20) == 7) {
         // dsb, dmb, isb: ignore instruction for now.
         // TODO(binji): implement
+        // Also refer to the ARMv6 CP15 equivalents in DecodeTypeCP15.
       } else {
         UNIMPLEMENTED();
       }
@@ -3908,6 +3961,69 @@
           sd_value = canonicalizeNaN(sd_value);
           set_s_register_from_float(d, sd_value);
         }
+      } else if ((instr->Opc1Value() == 0x4) && (instr->Bits(11, 9) == 0x5) &&
+                 (instr->Bit(4) == 0x0)) {
+        if (instr->SzValue() == 0x1) {
+          int m = instr->VFPMRegValue(kDoublePrecision);
+          int n = instr->VFPNRegValue(kDoublePrecision);
+          int d = instr->VFPDRegValue(kDoublePrecision);
+          double dn_value = get_double_from_d_register(n);
+          double dm_value = get_double_from_d_register(m);
+          double dd_value;
+          if (instr->Bit(6) == 0x1) {  // vminnm
+            if ((dn_value < dm_value) || std::isnan(dm_value)) {
+              dd_value = dn_value;
+            } else if ((dm_value < dn_value) || std::isnan(dn_value)) {
+              dd_value = dm_value;
+            } else {
+              DCHECK_EQ(dn_value, dm_value);
+              // Make sure that we pick the most negative sign for +/-0.
+              dd_value = std::signbit(dn_value) ? dn_value : dm_value;
+            }
+          } else {  // vmaxnm
+            if ((dn_value > dm_value) || std::isnan(dm_value)) {
+              dd_value = dn_value;
+            } else if ((dm_value > dn_value) || std::isnan(dn_value)) {
+              dd_value = dm_value;
+            } else {
+              DCHECK_EQ(dn_value, dm_value);
+              // Make sure that we pick the most positive sign for +/-0.
+              dd_value = std::signbit(dn_value) ? dm_value : dn_value;
+            }
+          }
+          dd_value = canonicalizeNaN(dd_value);
+          set_d_register_from_double(d, dd_value);
+        } else {
+          int m = instr->VFPMRegValue(kSinglePrecision);
+          int n = instr->VFPNRegValue(kSinglePrecision);
+          int d = instr->VFPDRegValue(kSinglePrecision);
+          float sn_value = get_float_from_s_register(n);
+          float sm_value = get_float_from_s_register(m);
+          float sd_value;
+          if (instr->Bit(6) == 0x1) {  // vminnm
+            if ((sn_value < sm_value) || std::isnan(sm_value)) {
+              sd_value = sn_value;
+            } else if ((sm_value < sn_value) || std::isnan(sn_value)) {
+              sd_value = sm_value;
+            } else {
+              DCHECK_EQ(sn_value, sm_value);
+              // Make sure that we pick the most negative sign for +/-0.
+              sd_value = std::signbit(sn_value) ? sn_value : sm_value;
+            }
+          } else {  // vmaxnm
+            if ((sn_value > sm_value) || std::isnan(sm_value)) {
+              sd_value = sn_value;
+            } else if ((sm_value > sn_value) || std::isnan(sn_value)) {
+              sd_value = sm_value;
+            } else {
+              DCHECK_EQ(sn_value, sm_value);
+              // Make sure that we pick the most positive sign for +/-0.
+              sd_value = std::signbit(sn_value) ? sm_value : sn_value;
+            }
+          }
+          sd_value = canonicalizeNaN(sd_value);
+          set_s_register_from_float(d, sd_value);
+        }
       } else {
         UNIMPLEMENTED();
       }
diff --git a/src/arm/simulator-arm.h b/src/arm/simulator-arm.h
index 71b8e40..7435b77 100644
--- a/src/arm/simulator-arm.h
+++ b/src/arm/simulator-arm.h
@@ -200,7 +200,7 @@
   // Call on program start.
   static void Initialize(Isolate* isolate);
 
-  static void TearDown(base::HashMap* i_cache, Redirection* first);
+  static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
 
   // V8 generally calls into generated JS code with 5 parameters and into
   // generated RegExp code with 7 parameters. This is a convenience function,
@@ -222,7 +222,8 @@
   char* last_debugger_input() { return last_debugger_input_; }
 
   // ICache checking.
-  static void FlushICache(base::HashMap* i_cache, void* start, size_t size);
+  static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
+                          size_t size);
 
   // Returns true if pc register contains one of the 'special_values' defined
   // below (bad_lr, end_sim_pc).
@@ -327,6 +328,9 @@
   void DecodeType6(Instruction* instr);
   void DecodeType7(Instruction* instr);
 
+  // CP15 coprocessor instructions.
+  void DecodeTypeCP15(Instruction* instr);
+
   // Support for VFP.
   void DecodeTypeVFP(Instruction* instr);
   void DecodeType6CoprocessorIns(Instruction* instr);
@@ -341,9 +345,12 @@
   void InstructionDecode(Instruction* instr);
 
   // ICache.
-  static void CheckICache(base::HashMap* i_cache, Instruction* instr);
-  static void FlushOnePage(base::HashMap* i_cache, intptr_t start, int size);
-  static CachePage* GetCachePage(base::HashMap* i_cache, void* page);
+  static void CheckICache(base::CustomMatcherHashMap* i_cache,
+                          Instruction* instr);
+  static void FlushOnePage(base::CustomMatcherHashMap* i_cache, intptr_t start,
+                           int size);
+  static CachePage* GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                 void* page);
 
   // Runtime call support.
   static void* RedirectExternalReference(
@@ -403,7 +410,7 @@
   char* last_debugger_input_;
 
   // Icache simulation
-  base::HashMap* i_cache_;
+  base::CustomMatcherHashMap* i_cache_;
 
   // Registered breakpoints.
   Instruction* break_pc_;
diff --git a/src/arm64/OWNERS b/src/arm64/OWNERS
deleted file mode 100644
index 906a5ce..0000000
--- a/src/arm64/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-rmcilroy@chromium.org
diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
index 5f103bc..ca5ea80 100644
--- a/src/arm64/code-stubs-arm64.cc
+++ b/src/arm64/code-stubs-arm64.cc
@@ -1089,6 +1089,7 @@
   __ Ldr(cp, MemOperand(cp));
   __ Mov(jssp, Operand(pending_handler_sp_address));
   __ Ldr(jssp, MemOperand(jssp));
+  __ Mov(csp, jssp);
   __ Mov(fp, Operand(pending_handler_fp_address));
   __ Ldr(fp, MemOperand(fp));
 
@@ -1845,7 +1846,6 @@
   //  feedback_vector : the feedback vector
   //  index :           slot in feedback vector (smi)
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_initialize_count, done_increment_count;
 
   DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
             masm->isolate()->heap()->megamorphic_symbol());
@@ -1868,7 +1868,7 @@
   Label check_allocation_site;
   __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset));
   __ Cmp(function, feedback_value);
-  __ B(eq, &done_increment_count);
+  __ B(eq, &done);
   __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
   __ B(eq, &done);
   __ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset));
@@ -1890,7 +1890,7 @@
   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1);
   __ Cmp(function, scratch1);
   __ B(ne, &megamorphic);
-  __ B(&done_increment_count);
+  __ B(&done);
 
   __ Bind(&miss);
 
@@ -1921,33 +1921,22 @@
   CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub, argc, function,
                              feedback_vector, index, new_target);
-  __ B(&done_initialize_count);
+  __ B(&done);
 
   __ Bind(&not_array_function);
   CreateWeakCellStub weak_cell_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function,
                              feedback_vector, index, new_target);
 
-  __ bind(&done_initialize_count);
-  // Initialize the call counter.
-  __ Mov(scratch1, Operand(Smi::FromInt(1)));
-  __ Adds(scratch2, feedback_vector,
-          Operand::UntagSmiAndScale(index, kPointerSizeLog2));
-  __ Str(scratch1,
-         FieldMemOperand(scratch2, FixedArray::kHeaderSize + kPointerSize));
-  __ b(&done);
+  __ Bind(&done);
 
-  __ bind(&done_increment_count);
-
-  // Increment the call count for monomorphic function calls.
+  // Increment the call count for all function calls.
   __ Add(scratch1, feedback_vector,
          Operand::UntagSmiAndScale(index, kPointerSizeLog2));
   __ Add(scratch1, scratch1, Operand(FixedArray::kHeaderSize + kPointerSize));
   __ Ldr(scratch2, FieldMemOperand(scratch1, 0));
   __ Add(scratch2, scratch2, Operand(Smi::FromInt(1)));
   __ Str(scratch2, FieldMemOperand(scratch1, 0));
-
-  __ Bind(&done);
 }
 
 
@@ -1995,6 +1984,17 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot) {
+  __ Add(feedback_vector, feedback_vector,
+         Operand::UntagSmiAndScale(slot, kPointerSizeLog2));
+  __ Add(feedback_vector, feedback_vector,
+         Operand(FixedArray::kHeaderSize + kPointerSize));
+  __ Ldr(slot, FieldMemOperand(feedback_vector, 0));
+  __ Add(slot, slot, Operand(Smi::FromInt(1)));
+  __ Str(slot, FieldMemOperand(feedback_vector, 0));
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // x1 - function
@@ -2014,13 +2014,7 @@
   __ Mov(x0, Operand(arg_count()));
 
   // Increment the call count for monomorphic function calls.
-  __ Add(feedback_vector, feedback_vector,
-         Operand::UntagSmiAndScale(index, kPointerSizeLog2));
-  __ Add(feedback_vector, feedback_vector,
-         Operand(FixedArray::kHeaderSize + kPointerSize));
-  __ Ldr(index, FieldMemOperand(feedback_vector, 0));
-  __ Add(index, index, Operand(Smi::FromInt(1)));
-  __ Str(index, FieldMemOperand(feedback_vector, 0));
+  IncrementCallCount(masm, feedback_vector, index);
 
   // Set up arguments for the array constructor stub.
   Register allocation_site_arg = feedback_vector;
@@ -2038,7 +2032,7 @@
   // x1 - function
   // x3 - slot id (Smi)
   // x2 - vector
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -2073,16 +2067,11 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(function, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  __ Add(feedback_vector, feedback_vector,
-         Operand::UntagSmiAndScale(index, kPointerSizeLog2));
-  __ Add(feedback_vector, feedback_vector,
-         Operand(FixedArray::kHeaderSize + kPointerSize));
-  __ Ldr(index, FieldMemOperand(feedback_vector, 0));
-  __ Add(index, index, Operand(Smi::FromInt(1)));
-  __ Str(index, FieldMemOperand(feedback_vector, 0));
-
   __ Bind(&call_function);
+
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, feedback_vector, index);
+
   __ Mov(x0, argc);
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
@@ -2106,6 +2095,7 @@
     __ jmp(&miss);
   }
 
+  // TODO(mvstanton): the code below is effectively disabled. Investigate.
   __ JumpIfRoot(x4, Heap::kuninitialized_symbolRootIndex, &miss);
 
   // We are going megamorphic. If the feedback is a JSFunction, it is fine
@@ -2118,6 +2108,11 @@
   __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
 
   __ Bind(&call);
+
+  // Increment the call count for megamorphic function calls.
+  IncrementCallCount(masm, feedback_vector, index);
+
+  __ Bind(&call_count_incremented);
   __ Mov(x0, argc);
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET);
@@ -2143,12 +2138,6 @@
   __ Cmp(x4, x5);
   __ B(ne, &miss);
 
-  // Initialize the call counter.
-  __ Mov(x5, Smi::FromInt(1));
-  __ Adds(x4, feedback_vector,
-          Operand::UntagSmiAndScale(index, kPointerSizeLog2));
-  __ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize + kPointerSize));
-
   // Store the function. Use a stub since we need a frame for allocation.
   // x2 - vector
   // x3 - slot
@@ -2156,9 +2145,13 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(masm->isolate());
+    __ Push(feedback_vector, index);
+
     __ Push(cp, function);
     __ CallStub(&create_stub);
     __ Pop(cp, function);
+
+    __ Pop(feedback_vector, index);
   }
 
   __ B(&call_function);
@@ -2168,7 +2161,8 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ B(&call);
+  // The runtime increments the call count in the vector for us.
+  __ B(&call_count_incremented);
 }
 
 
@@ -2681,321 +2675,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  ASM_LOCATION("SubStringStub::Generate");
-  Label runtime;
-
-  // Stack frame on entry.
-  //  lr: return address
-  //  jssp[0]:  substring "to" offset
-  //  jssp[8]:  substring "from" offset
-  //  jssp[16]: pointer to string object
-
-  // This stub is called from the native-call %_SubString(...), so
-  // nothing can be assumed about the arguments. It is tested that:
-  //  "string" is a sequential string,
-  //  both "from" and "to" are smis, and
-  //  0 <= from <= to <= string.length (in debug mode.)
-  // If any of these assumptions fail, we call the runtime system.
-
-  static const int kToOffset = 0 * kPointerSize;
-  static const int kFromOffset = 1 * kPointerSize;
-  static const int kStringOffset = 2 * kPointerSize;
-
-  Register to = x0;
-  Register from = x15;
-  Register input_string = x10;
-  Register input_length = x11;
-  Register input_type = x12;
-  Register result_string = x0;
-  Register result_length = x1;
-  Register temp = x3;
-
-  __ Peek(to, kToOffset);
-  __ Peek(from, kFromOffset);
-
-  // Check that both from and to are smis. If not, jump to runtime.
-  __ JumpIfEitherNotSmi(from, to, &runtime);
-  __ SmiUntag(from);
-  __ SmiUntag(to);
-
-  // Calculate difference between from and to. If to < from, branch to runtime.
-  __ Subs(result_length, to, from);
-  __ B(mi, &runtime);
-
-  // Check from is positive.
-  __ Tbnz(from, kWSignBit, &runtime);
-
-  // Make sure first argument is a string.
-  __ Peek(input_string, kStringOffset);
-  __ JumpIfSmi(input_string, &runtime);
-  __ IsObjectJSStringType(input_string, input_type, &runtime);
-
-  Label single_char;
-  __ Cmp(result_length, 1);
-  __ B(eq, &single_char);
-
-  // Short-cut for the case of trivial substring.
-  Label return_x0;
-  __ Ldrsw(input_length,
-           UntagSmiFieldMemOperand(input_string, String::kLengthOffset));
-
-  __ Cmp(result_length, input_length);
-  __ CmovX(x0, input_string, eq);
-  // Return original string.
-  __ B(eq, &return_x0);
-
-  // Longer than original string's length or negative: unsafe arguments.
-  __ B(hi, &runtime);
-
-  // Shorter than original string's length: an actual substring.
-
-  //   x0   to               substring end character offset
-  //   x1   result_length    length of substring result
-  //   x10  input_string     pointer to input string object
-  //   x10  unpacked_string  pointer to unpacked string object
-  //   x11  input_length     length of input string
-  //   x12  input_type       instance type of input string
-  //   x15  from             substring start character offset
-
-  // Deal with different string types: update the index if necessary and put
-  // the underlying string into register unpacked_string.
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  Label update_instance_type;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-
-  // Test for string types, and branch/fall through to appropriate unpacking
-  // code.
-  __ Tst(input_type, kIsIndirectStringMask);
-  __ B(eq, &seq_or_external_string);
-  __ Tst(input_type, kSlicedNotConsMask);
-  __ B(ne, &sliced_string);
-
-  Register unpacked_string = input_string;
-
-  // Cons string. Check whether it is flat, then fetch first part.
-  __ Ldr(temp, FieldMemOperand(input_string, ConsString::kSecondOffset));
-  __ JumpIfNotRoot(temp, Heap::kempty_stringRootIndex, &runtime);
-  __ Ldr(unpacked_string,
-         FieldMemOperand(input_string, ConsString::kFirstOffset));
-  __ B(&update_instance_type);
-
-  __ Bind(&sliced_string);
-  // Sliced string. Fetch parent and correct start index by offset.
-  __ Ldrsw(temp,
-           UntagSmiFieldMemOperand(input_string, SlicedString::kOffsetOffset));
-  __ Add(from, from, temp);
-  __ Ldr(unpacked_string,
-         FieldMemOperand(input_string, SlicedString::kParentOffset));
-
-  __ Bind(&update_instance_type);
-  __ Ldr(temp, FieldMemOperand(unpacked_string, HeapObject::kMapOffset));
-  __ Ldrb(input_type, FieldMemOperand(temp, Map::kInstanceTypeOffset));
-  // Now control must go to &underlying_unpacked. Since the no code is generated
-  // before then we fall through instead of generating a useless branch.
-
-  __ Bind(&seq_or_external_string);
-  // Sequential or external string. Registers unpacked_string and input_string
-  // alias, so there's nothing to do here.
-  // Note that if code is added here, the above code must be updated.
-
-  //   x0   result_string    pointer to result string object (uninit)
-  //   x1   result_length    length of substring result
-  //   x10  unpacked_string  pointer to unpacked string object
-  //   x11  input_length     length of input string
-  //   x12  input_type       instance type of input string
-  //   x15  from             substring start character offset
-  __ Bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    __ Cmp(result_length, SlicedString::kMinLength);
-    // Short slice. Copy instead of slicing.
-    __ B(lt, &copy_routine);
-    // Allocate new sliced string. At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string. It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyway due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ Tbz(input_type, MaskToBit(kStringEncodingMask), &two_byte_slice);
-    __ AllocateOneByteSlicedString(result_string, result_length, x3, x4,
-                                   &runtime);
-    __ B(&set_slice_header);
-
-    __ Bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(result_string, result_length, x3, x4,
-                                   &runtime);
-
-    __ Bind(&set_slice_header);
-    __ SmiTag(from);
-    __ Str(from, FieldMemOperand(result_string, SlicedString::kOffsetOffset));
-    __ Str(unpacked_string,
-           FieldMemOperand(result_string, SlicedString::kParentOffset));
-    __ B(&return_x0);
-
-    __ Bind(&copy_routine);
-  }
-
-  //   x0   result_string    pointer to result string object (uninit)
-  //   x1   result_length    length of substring result
-  //   x10  unpacked_string  pointer to unpacked string object
-  //   x11  input_length     length of input string
-  //   x12  input_type       instance type of input string
-  //   x13  unpacked_char0   pointer to first char of unpacked string (uninit)
-  //   x13  substring_char0  pointer to first char of substring (uninit)
-  //   x14  result_char0     pointer to first char of result (uninit)
-  //   x15  from             substring start character offset
-  Register unpacked_char0 = x13;
-  Register substring_char0 = x13;
-  Register result_char0 = x14;
-  Label two_byte_sequential, sequential_string, allocate_result;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-
-  __ Tst(input_type, kExternalStringTag);
-  __ B(eq, &sequential_string);
-
-  __ Tst(input_type, kShortExternalStringTag);
-  __ B(ne, &runtime);
-  __ Ldr(unpacked_char0,
-         FieldMemOperand(unpacked_string, ExternalString::kResourceDataOffset));
-  // unpacked_char0 points to the first character of the underlying string.
-  __ B(&allocate_result);
-
-  __ Bind(&sequential_string);
-  // Locate first character of underlying subject string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ Add(unpacked_char0, unpacked_string,
-         SeqOneByteString::kHeaderSize - kHeapObjectTag);
-
-  __ Bind(&allocate_result);
-  // Sequential one-byte string. Allocate the result.
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ Tbz(input_type, MaskToBit(kStringEncodingMask), &two_byte_sequential);
-
-  // Allocate and copy the resulting one-byte string.
-  __ AllocateOneByteString(result_string, result_length, x3, x4, x5, &runtime);
-
-  // Locate first character of substring to copy.
-  __ Add(substring_char0, unpacked_char0, from);
-
-  // Locate first character of result.
-  __ Add(result_char0, result_string,
-         SeqOneByteString::kHeaderSize - kHeapObjectTag);
-
-  STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong);
-  __ B(&return_x0);
-
-  // Allocate and copy the resulting two-byte string.
-  __ Bind(&two_byte_sequential);
-  __ AllocateTwoByteString(result_string, result_length, x3, x4, x5, &runtime);
-
-  // Locate first character of substring to copy.
-  __ Add(substring_char0, unpacked_char0, Operand(from, LSL, 1));
-
-  // Locate first character of result.
-  __ Add(result_char0, result_string,
-         SeqTwoByteString::kHeaderSize - kHeapObjectTag);
-
-  STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  __ Add(result_length, result_length, result_length);
-  __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong);
-
-  __ Bind(&return_x0);
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1, x3, x4);
-  __ Drop(3);
-  __ Ret();
-
-  __ Bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // x1: result_length
-  // x10: input_string
-  // x12: input_type
-  // x15: from (untagged)
-  __ SmiTag(from);
-  StringCharAtGenerator generator(input_string, from, result_length, x0,
-                                  &runtime, &runtime, &runtime,
-                                  RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ Drop(3);
-  __ Ret();
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes one argument in x0.
-  Label is_number;
-  __ JumpIfSmi(x0, &is_number);
-
-  Label not_string;
-  __ JumpIfObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE, &not_string, hs);
-  // x0: receiver
-  // x1: receiver instance type
-  __ Ret();
-  __ Bind(&not_string);
-
-  Label not_heap_number;
-  __ Cmp(x1, HEAP_NUMBER_TYPE);
-  __ B(ne, &not_heap_number);
-  __ Bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ Bind(&not_heap_number);
-
-  Label not_oddball;
-  __ Cmp(x1, ODDBALL_TYPE);
-  __ B(ne, &not_oddball);
-  __ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset));
-  __ Ret();
-  __ Bind(&not_oddball);
-
-  __ Push(x0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes one argument in x0.
-  Label is_number;
-  __ JumpIfSmi(x0, &is_number);
-
-  Label not_name;
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ JumpIfObjectType(x0, x1, x1, LAST_NAME_TYPE, &not_name, hi);
-  // x0: receiver
-  // x1: receiver instance type
-  __ Ret();
-  __ Bind(&not_name);
-
-  Label not_heap_number;
-  __ Cmp(x1, HEAP_NUMBER_TYPE);
-  __ B(ne, &not_heap_number);
-  __ Bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ Bind(&not_heap_number);
-
-  Label not_oddball;
-  __ Cmp(x1, ODDBALL_TYPE);
-  __ B(ne, &not_oddball);
-  __ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset));
-  __ Ret();
-  __ Bind(&not_oddball);
-
-  __ Push(x0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(
     MacroAssembler* masm, Register left, Register right, Register scratch1,
     Register scratch2, Register scratch3) {
@@ -3195,16 +2874,6 @@
   Label need_incremental;
   Label need_incremental_pop_scratch;
 
-  Register mem_chunk = regs_.scratch0();
-  Register counter = regs_.scratch1();
-  __ Bic(mem_chunk, regs_.object(), Page::kPageAlignmentMask);
-  __ Ldr(counter,
-         MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset));
-  __ Subs(counter, counter, 1);
-  __ Str(counter,
-         MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset));
-  __ B(mi, &need_incremental);
-
   // If the object is not black we don't have to inform the incremental marker.
   __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
 
@@ -3655,7 +3324,7 @@
 
   __ Ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
   // Load the map into the correct register.
-  DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+  DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
   __ mov(feedback, too_far);
   __ Add(receiver_map, receiver_map, Code::kHeaderSize - kHeapObjectTag);
   __ Jump(receiver_map);
@@ -4673,7 +4342,7 @@
     // Fall back to %AllocateInNewSpace (if not too big).
     Label too_big_for_new_space;
     __ Bind(&allocate);
-    __ Cmp(x6, Operand(Page::kMaxRegularHeapObjectSize));
+    __ Cmp(x6, Operand(kMaxRegularHeapObjectSize));
     __ B(gt, &too_big_for_new_space);
     {
       FrameScope scope(masm, StackFrame::INTERNAL);
@@ -5093,7 +4762,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ Bind(&allocate);
-  __ Cmp(x6, Operand(Page::kMaxRegularHeapObjectSize));
+  __ Cmp(x6, Operand(kMaxRegularHeapObjectSize));
   __ B(gt, &too_big_for_new_space);
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
diff --git a/src/arm64/interface-descriptors-arm64.cc b/src/arm64/interface-descriptors-arm64.cc
index 881d2d8..d7bc3de 100644
--- a/src/arm64/interface-descriptors-arm64.cc
+++ b/src/arm64/interface-descriptors-arm64.cc
@@ -42,13 +42,9 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return x3; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() { return x4; }
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return x3; }
-const Register VectorStoreTransitionDescriptor::MapRegister() { return x5; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return x3; }
-
+const Register StoreTransitionDescriptor::SlotRegister() { return x4; }
+const Register StoreTransitionDescriptor::VectorRegister() { return x3; }
+const Register StoreTransitionDescriptor::MapRegister() { return x5; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return x2; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return x0; }
@@ -407,7 +403,7 @@
                                    &default_descriptor);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   static PlatformInterfaceDescriptor default_descriptor =
       PlatformInterfaceDescriptor(CAN_INLINE_TARGET_ADDRESS);
@@ -446,7 +442,19 @@
       x0,  // argument count (not including receiver)
       x3,  // new target
       x1,  // constructor to call
-      x2   // address of the first argument
+      x2,  // allocation site feedback if available, undefined otherwise
+      x4   // address of the first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      x0,  // argument count (not including receiver)
+      x1,  // target to call checked to be Array function
+      x2,  // allocation site feedback if available, undefined otherwise
+      x3   // address of the first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc
index f674dd5..87ea1eb 100644
--- a/src/arm64/macro-assembler-arm64.cc
+++ b/src/arm64/macro-assembler-arm64.cc
@@ -1571,9 +1571,8 @@
                                 Label* branch) {
   DCHECK(cond == eq || cond == ne);
   UseScratchRegisterScope temps(this);
-  const int mask =
-      (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
-  CheckPageFlag(object, temps.AcquireSameSizeAs(object), mask, cond, branch);
+  CheckPageFlag(object, temps.AcquireSameSizeAs(object),
+                MemoryChunk::kIsInNewSpaceMask, cond, branch);
 }
 
 
@@ -3037,7 +3036,7 @@
                               Register scratch2,
                               Label* gc_required,
                               AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
@@ -3196,7 +3195,7 @@
 void MacroAssembler::FastAllocate(int object_size, Register result,
                                   Register scratch1, Register scratch2,
                                   AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
 
   DCHECK(!AreAliased(result, scratch1, scratch2));
   DCHECK(result.Is64Bits() && scratch1.Is64Bits() && scratch2.Is64Bits());
diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h
index 06e9a1d..37e9926 100644
--- a/src/arm64/macro-assembler-arm64.h
+++ b/src/arm64/macro-assembler-arm64.h
@@ -742,6 +742,18 @@
   // csp must be aligned to 16 bytes.
   void PeekPair(const CPURegister& dst1, const CPURegister& dst2, int offset);
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp.
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 0) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    UNIMPLEMENTED();
+  }
+
   // Claim or drop stack space without actually accessing memory.
   //
   // In debug mode, both of these will write invalid data into the claimed or
diff --git a/src/arm64/simulator-arm64.cc b/src/arm64/simulator-arm64.cc
index f5595a8..83b4cf7 100644
--- a/src/arm64/simulator-arm64.cc
+++ b/src/arm64/simulator-arm64.cc
@@ -524,7 +524,8 @@
 
 
 // static
-void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
+void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
+                         Redirection* first) {
   Redirection::DeleteChain(first);
 }
 
diff --git a/src/arm64/simulator-arm64.h b/src/arm64/simulator-arm64.h
index d490109..c8c715a 100644
--- a/src/arm64/simulator-arm64.h
+++ b/src/arm64/simulator-arm64.h
@@ -151,7 +151,8 @@
 
 class Simulator : public DecoderVisitor {
  public:
-  static void FlushICache(base::HashMap* i_cache, void* start, size_t size) {
+  static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
+                          size_t size) {
     USE(i_cache);
     USE(start);
     USE(size);
@@ -167,7 +168,7 @@
 
   static void Initialize(Isolate* isolate);
 
-  static void TearDown(base::HashMap* i_cache, Redirection* first);
+  static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
 
   static Simulator* current(v8::internal::Isolate* isolate);
 
diff --git a/src/asmjs/asm-js.cc b/src/asmjs/asm-js.cc
index e94d917..a1af1af 100644
--- a/src/asmjs/asm-js.cc
+++ b/src/asmjs/asm-js.cc
@@ -16,9 +16,9 @@
 #include "src/objects.h"
 #include "src/parsing/parse-info.h"
 
-#include "src/wasm/encoder.h"
 #include "src/wasm/module-decoder.h"
 #include "src/wasm/wasm-js.h"
+#include "src/wasm/wasm-module-builder.h"
 #include "src/wasm/wasm-module.h"
 #include "src/wasm/wasm-result.h"
 
@@ -30,29 +30,6 @@
 namespace internal {
 
 namespace {
-i::MaybeHandle<i::FixedArray> CompileModule(
-    i::Isolate* isolate, const byte* start, const byte* end,
-    ErrorThrower* thrower,
-    internal::wasm::ModuleOrigin origin = i::wasm::kWasmOrigin) {
-  // Decode but avoid a redundant pass over function bodies for verification.
-  // Verification will happen during compilation.
-  i::Zone zone(isolate->allocator());
-  internal::wasm::ModuleResult result = internal::wasm::DecodeWasmModule(
-      isolate, &zone, start, end, false, origin);
-
-  i::MaybeHandle<i::FixedArray> compiled_module;
-  if (result.failed() && origin == internal::wasm::kAsmJsOrigin) {
-    thrower->Error("Asm.js converted module failed to decode");
-  } else if (result.failed()) {
-    thrower->Failed("", result);
-  } else {
-    compiled_module = result.val->CompileFunctions(isolate, thrower);
-  }
-
-  if (result.val) delete result.val;
-  return compiled_module;
-}
-
 Handle<i::Object> StdlibMathMember(i::Isolate* isolate,
                                    Handle<JSReceiver> stdlib,
                                    Handle<Name> name) {
@@ -187,9 +164,9 @@
   i::Handle<i::FixedArray> foreign_globals;
   auto module = builder.Run(&foreign_globals);
 
-  i::MaybeHandle<i::FixedArray> compiled =
-      CompileModule(info->isolate(), module->begin(), module->end(), &thrower,
-                    internal::wasm::kAsmJsOrigin);
+  i::MaybeHandle<i::JSObject> compiled = wasm::CreateModuleObjectFromBytes(
+      info->isolate(), module->begin(), module->end(), &thrower,
+      internal::wasm::kAsmJsOrigin);
   DCHECK(!compiled.is_null());
 
   wasm::AsmTyper::StdlibSet uses = typer.StdlibUses();
@@ -223,24 +200,25 @@
                                               Handle<FixedArray> wasm_data,
                                               Handle<JSArrayBuffer> memory,
                                               Handle<JSReceiver> foreign) {
-  i::Handle<i::FixedArray> compiled(i::FixedArray::cast(wasm_data->get(0)));
+  i::Handle<i::JSObject> module(i::JSObject::cast(wasm_data->get(0)));
   i::Handle<i::FixedArray> foreign_globals(
       i::FixedArray::cast(wasm_data->get(1)));
 
   ErrorThrower thrower(isolate, "Asm.js -> WebAssembly instantiation");
 
   i::MaybeHandle<i::JSObject> maybe_module_object =
-      i::wasm::WasmModule::Instantiate(isolate, compiled, foreign, memory);
+      i::wasm::WasmModule::Instantiate(isolate, &thrower, module, foreign,
+                                       memory);
   if (maybe_module_object.is_null()) {
     return MaybeHandle<Object>();
   }
 
-  i::Handle<i::Name> name(isolate->factory()->InternalizeOneByteString(
-      STATIC_CHAR_VECTOR("__foreign_init__")));
+  i::Handle<i::Name> init_name(isolate->factory()->InternalizeUtf8String(
+      wasm::AsmWasmBuilder::foreign_init_name));
 
   i::Handle<i::Object> module_object = maybe_module_object.ToHandleChecked();
   i::MaybeHandle<i::Object> maybe_init =
-      i::Object::GetProperty(module_object, name);
+      i::Object::GetProperty(module_object, init_name);
   DCHECK(!maybe_init.is_null());
 
   i::Handle<i::Object> init = maybe_init.ToHandleChecked();
@@ -265,10 +243,18 @@
   i::MaybeHandle<i::Object> retval = i::Execution::Call(
       isolate, init, undefined, foreign_globals->length(), foreign_args_array);
   delete[] foreign_args_array;
-
   DCHECK(!retval.is_null());
 
-  return maybe_module_object;
+  i::Handle<i::Name> single_function_name(
+      isolate->factory()->InternalizeUtf8String(
+          wasm::AsmWasmBuilder::single_function_name));
+  i::MaybeHandle<i::Object> single_function =
+      i::Object::GetProperty(module_object, single_function_name);
+  if (!single_function.is_null() &&
+      !single_function.ToHandleChecked()->IsUndefined(isolate)) {
+    return single_function;
+  }
+  return module_object;
 }
 
 }  // namespace internal
diff --git a/src/asmjs/asm-js.h b/src/asmjs/asm-js.h
index 44bf04d..a2c5cec 100644
--- a/src/asmjs/asm-js.h
+++ b/src/asmjs/asm-js.h
@@ -5,24 +5,21 @@
 #ifndef V8_ASMJS_ASM_JS_H_
 #define V8_ASMJS_ASM_JS_H_
 
-#ifndef V8_SHARED
-#include "src/allocation.h"
-#include "src/base/hashmap.h"
-#else
-#include "include/v8.h"
-#include "src/base/compiler-specific.h"
-#endif  // !V8_SHARED
-#include "src/parsing/parser.h"
+#include "src/globals.h"
 
 namespace v8 {
 namespace internal {
+
+class JSArrayBuffer;
+class ParseInfo;
+
 // Interface to compile and instantiate for asmjs.
 class AsmJs {
  public:
-  static MaybeHandle<FixedArray> ConvertAsmToWasm(i::ParseInfo* info);
-  static bool IsStdlibValid(i::Isolate* isolate, Handle<FixedArray> wasm_data,
+  static MaybeHandle<FixedArray> ConvertAsmToWasm(ParseInfo* info);
+  static bool IsStdlibValid(Isolate* isolate, Handle<FixedArray> wasm_data,
                             Handle<JSReceiver> stdlib);
-  static MaybeHandle<Object> InstantiateAsmWasm(i::Isolate* isolate,
+  static MaybeHandle<Object> InstantiateAsmWasm(Isolate* isolate,
                                                 Handle<FixedArray> wasm_data,
                                                 Handle<JSArrayBuffer> memory,
                                                 Handle<JSReceiver> foreign);
diff --git a/src/asmjs/asm-typer.cc b/src/asmjs/asm-typer.cc
index 1d070a0..94cc4db 100644
--- a/src/asmjs/asm-typer.cc
+++ b/src/asmjs/asm-typer.cc
@@ -17,7 +17,6 @@
 #include "src/base/bits.h"
 #include "src/codegen.h"
 #include "src/globals.h"
-#include "src/type-cache.h"
 #include "src/utils.h"
 
 #define FAIL(node, msg)                                        \
@@ -129,14 +128,13 @@
       script_(script),
       root_(root),
       forward_definitions_(zone),
+      ffi_use_signatures_(zone),
       stdlib_types_(zone),
       stdlib_math_types_(zone),
       module_info_(VariableInfo::ForSpecialSymbol(zone_, kModule)),
-      global_scope_(ZoneHashMap::PointersMatch,
-                    ZoneHashMap::kDefaultHashMapCapacity,
+      global_scope_(ZoneHashMap::kDefaultHashMapCapacity,
                     ZoneAllocationPolicy(zone)),
-      local_scope_(ZoneHashMap::PointersMatch,
-                   ZoneHashMap::kDefaultHashMapCapacity,
+      local_scope_(ZoneHashMap::kDefaultHashMapCapacity,
                    ZoneAllocationPolicy(zone)),
       stack_limit_(isolate->stack_guard()->real_climit()),
       node_types_(zone_),
@@ -330,8 +328,8 @@
   return i->second;
 }
 
-AsmTyper::VariableInfo* AsmTyper::Lookup(Variable* variable) {
-  ZoneHashMap* scope = in_function_ ? &local_scope_ : &global_scope_;
+AsmTyper::VariableInfo* AsmTyper::Lookup(Variable* variable) const {
+  const ZoneHashMap* scope = in_function_ ? &local_scope_ : &global_scope_;
   ZoneHashMap::Entry* entry =
       scope->Lookup(variable, ComputePointerHash(variable));
   if (entry == nullptr && in_function_) {
@@ -424,6 +422,8 @@
   return AsmType::None();
 }
 
+AsmType* AsmTyper::TypeOf(Variable* v) const { return Lookup(v)->type(); }
+
 AsmTyper::StandardMember AsmTyper::VariableAsStandardMember(Variable* var) {
   auto* var_info = Lookup(var);
   if (var_info == nullptr) {
@@ -606,8 +606,10 @@
   if (estatement != nullptr) {
     Assignment* assignment = estatement->expression()->AsAssignment();
     if (assignment != nullptr && assignment->target()->IsVariableProxy() &&
-        assignment->target()->AsVariableProxy()->var()->mode() ==
-            CONST_LEGACY) {
+        assignment->target()
+            ->AsVariableProxy()
+            ->var()
+            ->is_sloppy_function_name()) {
       use_asm_directive = iter.Next();
     }
   }
@@ -760,7 +762,7 @@
   bool global_variable = false;
   if (value->IsLiteral() || value->IsCall()) {
     AsmType* type = nullptr;
-    RECURSE(type = VariableTypeAnnotations(value));
+    RECURSE(type = VariableTypeAnnotations(value, true));
     target_info = new (zone_) VariableInfo(type);
     target_info->set_mutability(VariableInfo::kMutableGlobal);
     global_variable = true;
@@ -1509,7 +1511,7 @@
 }
 
 namespace {
-bool IsNegate(BinaryOperation* binop) {
+bool IsInvert(BinaryOperation* binop) {
   if (binop->op() != Token::BIT_XOR) {
     return false;
   }
@@ -1524,7 +1526,7 @@
 }
 
 bool IsUnaryMinus(BinaryOperation* binop) {
-  // *VIOLATION* The parser replaces uses of +x with x*1.0.
+  // *VIOLATION* The parser replaces uses of -x with x*-1.
   if (binop->op() != Token::MUL) {
     return false;
   }
@@ -1570,7 +1572,7 @@
       }
 
       if (IsUnaryMinus(expr)) {
-        // *VIOLATION* the parser converts -x to x * -1.0.
+        // *VIOLATION* the parser converts -x to x * -1.
         AsmType* left_type;
         RECURSE(left_type = ValidateExpression(expr->left()));
         SetTypeOf(expr->right(), left_type);
@@ -1595,11 +1597,11 @@
     case Token::BIT_AND:
       return ValidateBitwiseANDExpression(expr);
     case Token::BIT_XOR:
-      if (IsNegate(expr)) {
+      if (IsInvert(expr)) {
         auto* left = expr->left();
         auto* left_as_binop = left->AsBinaryOperation();
 
-        if (left_as_binop != nullptr && IsNegate(left_as_binop)) {
+        if (left_as_binop != nullptr && IsInvert(left_as_binop)) {
           // This is the special ~~ operator.
           AsmType* left_type;
           RECURSE(left_type = ValidateExpression(left_as_binop->left()));
@@ -1660,6 +1662,12 @@
     return AsmType::Double();
   }
 
+  // The parser collapses expressions like !0 and !123 to true/false.
+  // We therefore need to permit these as alternate versions of 0 / 1.
+  if (literal->raw_value()->IsTrue() || literal->raw_value()->IsFalse()) {
+    return AsmType::Int();
+  }
+
   uint32_t value;
   if (!literal->value()->ToUint32(&value)) {
     int32_t value;
@@ -2305,9 +2313,20 @@
       FAIL(call, "Calling something that's not a function.");
     }
 
-    if (callee_type->AsFFIType() != nullptr &&
-        return_type == AsmType::Float()) {
-      FAIL(call, "Foreign functions can't return float.");
+    if (callee_type->AsFFIType() != nullptr) {
+      if (return_type == AsmType::Float()) {
+        FAIL(call, "Foreign functions can't return float.");
+      }
+      // Record FFI use signature, since the asm->wasm translator must know
+      // all uses up-front.
+      ffi_use_signatures_.emplace_back(
+          FFIUseSignature(call_var_proxy->var(), zone_));
+      FFIUseSignature* sig = &ffi_use_signatures_.back();
+      sig->return_type_ = return_type;
+      sig->arg_types_.reserve(args.size());
+      for (size_t i = 0; i < args.size(); ++i) {
+        sig->arg_types_.emplace_back(args[i]);
+      }
     }
 
     if (!callee_type->CanBeInvokedWith(return_type, args)) {
@@ -2662,7 +2681,8 @@
 
 // 5.4 VariableTypeAnnotations
 // Also used for 5.5 GlobalVariableTypeAnnotations
-AsmType* AsmTyper::VariableTypeAnnotations(Expression* initializer) {
+AsmType* AsmTyper::VariableTypeAnnotations(Expression* initializer,
+                                           bool global) {
   if (auto* literal = initializer->AsLiteral()) {
     if (literal->raw_value()->ContainsDot()) {
       SetTypeOf(initializer, AsmType::Double());
@@ -2703,10 +2723,13 @@
          "to fround.");
   }
 
-  if (!src_expr->raw_value()->ContainsDot()) {
-    FAIL(initializer,
-         "Invalid float type annotation - expected literal argument to be a "
-         "floating point literal.");
+  // Float constants must contain dots in local, but not in globals.
+  if (!global) {
+    if (!src_expr->raw_value()->ContainsDot()) {
+      FAIL(initializer,
+           "Invalid float type annotation - expected literal argument to be a "
+           "floating point literal.");
+    }
   }
 
   return AsmType::Float();
diff --git a/src/asmjs/asm-typer.h b/src/asmjs/asm-typer.h
index 6b9c70c..942ca21 100644
--- a/src/asmjs/asm-typer.h
+++ b/src/asmjs/asm-typer.h
@@ -12,12 +12,12 @@
 #include "src/allocation.h"
 #include "src/asmjs/asm-types.h"
 #include "src/ast/ast-type-bounds.h"
+#include "src/ast/ast-types.h"
 #include "src/ast/ast.h"
 #include "src/effects.h"
 #include "src/type-info.h"
-#include "src/types.h"
-#include "src/zone-containers.h"
-#include "src/zone.h"
+#include "src/zone/zone-containers.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -73,12 +73,26 @@
   const char* error_message() const { return error_message_; }
 
   AsmType* TypeOf(AstNode* node) const;
+  AsmType* TypeOf(Variable* v) const;
   StandardMember VariableAsStandardMember(Variable* var);
 
   typedef std::unordered_set<StandardMember, std::hash<int> > StdlibSet;
 
   StdlibSet StdlibUses() const { return stdlib_uses_; }
 
+  // Each FFI import has a usage-site signature associated with it.
+  struct FFIUseSignature {
+    Variable* var;
+    ZoneVector<AsmType*> arg_types_;
+    AsmType* return_type_;
+    FFIUseSignature(Variable* v, Zone* zone)
+        : var(v), arg_types_(zone), return_type_(nullptr) {}
+  };
+
+  const ZoneVector<FFIUseSignature>& FFIUseSignatures() {
+    return ffi_use_signatures_;
+  }
+
  private:
   friend class v8::internal::wasm::AsmTyperHarnessBuilder;
 
@@ -192,7 +206,7 @@
   //   Lookup(Delta, Gamma, x)
   //
   // Delta is the global_scope_ member, and Gamma, local_scope_.
-  VariableInfo* Lookup(Variable* variable);
+  VariableInfo* Lookup(Variable* variable) const;
 
   // All of the ValidateXXX methods below return AsmType::None() in case of
   // validation failure.
@@ -292,8 +306,9 @@
   // 5.2 ReturnTypeAnnotations
   AsmType* ReturnTypeAnnotations(ReturnStatement* statement);
   // 5.4 VariableTypeAnnotations
-  AsmType* VariableTypeAnnotations(Expression* initializer);
   // 5.5 GlobalVariableTypeAnnotations
+  AsmType* VariableTypeAnnotations(Expression* initializer,
+                                   bool global = false);
   AsmType* ImportExpression(Property* import);
   AsmType* NewHeapView(CallNew* new_heap_view);
 
@@ -306,6 +321,7 @@
   AsmType* return_type_ = nullptr;
 
   ZoneVector<VariableInfo*> forward_definitions_;
+  ZoneVector<FFIUseSignature> ffi_use_signatures_;
   ObjectTypeMap stdlib_types_;
   ObjectTypeMap stdlib_math_types_;
 
diff --git a/src/asmjs/asm-types.h b/src/asmjs/asm-types.h
index c307bf5..6fe4201 100644
--- a/src/asmjs/asm-types.h
+++ b/src/asmjs/asm-types.h
@@ -8,8 +8,8 @@
 #include <string>
 
 #include "src/base/macros.h"
-#include "src/zone-containers.h"
-#include "src/zone.h"
+#include "src/zone/zone-containers.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/asmjs/asm-wasm-builder.cc b/src/asmjs/asm-wasm-builder.cc
index 6419459..091f793 100644
--- a/src/asmjs/asm-wasm-builder.cc
+++ b/src/asmjs/asm-wasm-builder.cc
@@ -32,6 +32,7 @@
   } while (false)
 
 enum AsmScope { kModuleScope, kInitScope, kFuncScope, kExportScope };
+enum ValueFate { kDrop, kLeaveOnStack };
 
 struct ForeignVariable {
   Handle<Name> name;
@@ -43,14 +44,11 @@
  public:
   AsmWasmBuilderImpl(Isolate* isolate, Zone* zone, FunctionLiteral* literal,
                      AsmTyper* typer)
-      : local_variables_(base::HashMap::PointersMatch,
-                         ZoneHashMap::kDefaultHashMapCapacity,
+      : local_variables_(ZoneHashMap::kDefaultHashMapCapacity,
                          ZoneAllocationPolicy(zone)),
-        functions_(base::HashMap::PointersMatch,
-                   ZoneHashMap::kDefaultHashMapCapacity,
+        functions_(ZoneHashMap::kDefaultHashMapCapacity,
                    ZoneAllocationPolicy(zone)),
-        global_variables_(base::HashMap::PointersMatch,
-                          ZoneHashMap::kDefaultHashMapCapacity,
+        global_variables_(ZoneHashMap::kDefaultHashMapCapacity,
                           ZoneAllocationPolicy(zone)),
         scope_(kModuleScope),
         builder_(new (zone) WasmModuleBuilder(zone)),
@@ -61,46 +59,43 @@
         typer_(typer),
         breakable_blocks_(zone),
         foreign_variables_(zone),
-        init_function_index_(0),
-        foreign_init_function_index_(0),
+        init_function_(nullptr),
+        foreign_init_function_(nullptr),
         next_table_index_(0),
-        function_tables_(base::HashMap::PointersMatch,
-                         ZoneHashMap::kDefaultHashMapCapacity,
+        function_tables_(ZoneHashMap::kDefaultHashMapCapacity,
                          ZoneAllocationPolicy(zone)),
         imported_function_table_(this) {
     InitializeAstVisitor(isolate);
   }
 
   void InitializeInitFunction() {
-    init_function_index_ = builder_->AddFunction();
     FunctionSig::Builder b(zone(), 0, 0);
-    current_function_builder_ = builder_->FunctionAt(init_function_index_);
-    current_function_builder_->SetSignature(b.Build());
-    builder_->MarkStartFunction(init_function_index_);
-    current_function_builder_ = nullptr;
+    init_function_ = builder_->AddFunction(b.Build());
+    builder_->MarkStartFunction(init_function_);
   }
 
   void BuildForeignInitFunction() {
-    foreign_init_function_index_ = builder_->AddFunction();
+    foreign_init_function_ = builder_->AddFunction();
     FunctionSig::Builder b(zone(), 0, foreign_variables_.size());
     for (auto i = foreign_variables_.begin(); i != foreign_variables_.end();
          ++i) {
       b.AddParam(i->type);
     }
-    current_function_builder_ =
-        builder_->FunctionAt(foreign_init_function_index_);
-    current_function_builder_->SetExported();
+    foreign_init_function_->SetExported();
     std::string raw_name = "__foreign_init__";
-    current_function_builder_->SetName(raw_name.data(),
-                                       static_cast<int>(raw_name.size()));
-    current_function_builder_->SetSignature(b.Build());
+    foreign_init_function_->SetName(
+        AsmWasmBuilder::foreign_init_name,
+        static_cast<int>(strlen(AsmWasmBuilder::foreign_init_name)));
+
+    foreign_init_function_->SetName(raw_name.data(),
+                                    static_cast<int>(raw_name.size()));
+    foreign_init_function_->SetSignature(b.Build());
     for (size_t pos = 0; pos < foreign_variables_.size(); ++pos) {
-      current_function_builder_->EmitGetLocal(static_cast<uint32_t>(pos));
+      foreign_init_function_->EmitGetLocal(static_cast<uint32_t>(pos));
       ForeignVariable* fv = &foreign_variables_[pos];
       uint32_t index = LookupOrInsertGlobal(fv->var, fv->type);
-      current_function_builder_->EmitWithVarInt(kExprSetGlobal, index);
+      foreign_init_function_->EmitWithVarInt(kExprSetGlobal, index);
     }
-    current_function_builder_ = nullptr;
   }
 
   i::Handle<i::FixedArray> GetForeignArgs() {
@@ -124,8 +119,7 @@
   void VisitFunctionDeclaration(FunctionDeclaration* decl) {
     DCHECK_EQ(kModuleScope, scope_);
     DCHECK_NULL(current_function_builder_);
-    uint32_t index = LookupOrInsertFunction(decl->proxy()->var());
-    current_function_builder_ = builder_->FunctionAt(index);
+    current_function_builder_ = LookupOrInsertFunction(decl->proxy()->var());
     scope_ = kFuncScope;
     RECURSE(Visit(decl->fun()));
     scope_ = kModuleScope;
@@ -157,8 +151,7 @@
       }
     }
     if (scope_ == kFuncScope) {
-      BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock,
-                           false);
+      BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock);
       RECURSE(VisitStatements(stmt->statements()));
     } else {
       RECURSE(VisitStatements(stmt->statements()));
@@ -171,10 +164,12 @@
 
    public:
     BlockVisitor(AsmWasmBuilderImpl* builder, BreakableStatement* stmt,
-                 WasmOpcode opcode, bool is_loop)
+                 WasmOpcode opcode)
         : builder_(builder) {
-      builder_->breakable_blocks_.push_back(std::make_pair(stmt, is_loop));
-      builder_->current_function_builder_->Emit(opcode);
+      builder_->breakable_blocks_.push_back(
+          std::make_pair(stmt, opcode == kExprLoop));
+      // block and loops have a type immediate.
+      builder_->current_function_builder_->EmitWithU8(opcode, kLocalVoid);
     }
     ~BlockVisitor() {
       builder_->current_function_builder_->Emit(kExprEnd);
@@ -183,7 +178,32 @@
   };
 
   void VisitExpressionStatement(ExpressionStatement* stmt) {
-    RECURSE(Visit(stmt->expression()));
+    VisitForEffect(stmt->expression());
+  }
+
+  void VisitForEffect(Expression* expr) {
+    if (expr->IsAssignment()) {
+      // Don't emit drops for assignments. Instead use SetLocal/GetLocal.
+      VisitAssignment(expr->AsAssignment(), kDrop);
+      return;
+    }
+    if (expr->IsCall()) {
+      // Only emit a drop if the call has a non-void return value.
+      if (VisitCallExpression(expr->AsCall()) && scope_ == kFuncScope) {
+        current_function_builder_->Emit(kExprDrop);
+      }
+      return;
+    }
+    if (expr->IsBinaryOperation()) {
+      BinaryOperation* binop = expr->AsBinaryOperation();
+      if (binop->op() == Token::COMMA) {
+        VisitForEffect(binop->left());
+        VisitForEffect(binop->right());
+        return;
+      }
+    }
+    RECURSE(Visit(expr));
+    if (scope_ == kFuncScope) current_function_builder_->Emit(kExprDrop);
   }
 
   void VisitEmptyStatement(EmptyStatement* stmt) {}
@@ -193,7 +213,7 @@
   void VisitIfStatement(IfStatement* stmt) {
     DCHECK_EQ(kFuncScope, scope_);
     RECURSE(Visit(stmt->condition()));
-    current_function_builder_->Emit(kExprIf);
+    current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
     // WASM ifs come with implement blocks for both arms.
     breakable_blocks_.push_back(std::make_pair(nullptr, false));
     if (stmt->HasThenStatement()) {
@@ -207,48 +227,26 @@
     breakable_blocks_.pop_back();
   }
 
-  void VisitContinueStatement(ContinueStatement* stmt) {
+  void DoBreakOrContinue(BreakableStatement* target, bool is_continue) {
     DCHECK_EQ(kFuncScope, scope_);
-    DCHECK_NOT_NULL(stmt->target());
-    int i = static_cast<int>(breakable_blocks_.size()) - 1;
-    int block_distance = 0;
-    for (; i >= 0; i--) {
+    for (int i = static_cast<int>(breakable_blocks_.size()) - 1; i >= 0; --i) {
       auto elem = breakable_blocks_.at(i);
-      if (elem.first == stmt->target()) {
-        DCHECK(elem.second);
-        break;
-      } else if (elem.second) {
-        block_distance += 2;
-      } else {
-        block_distance += 1;
+      if (elem.first == target && elem.second == is_continue) {
+        int block_distance = static_cast<int>(breakable_blocks_.size() - i - 1);
+        current_function_builder_->Emit(kExprBr);
+        current_function_builder_->EmitVarInt(block_distance);
+        return;
       }
     }
-    DCHECK(i >= 0);
-    current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
-    current_function_builder_->EmitVarInt(block_distance);
+    UNREACHABLE();  // statement not found
+  }
+
+  void VisitContinueStatement(ContinueStatement* stmt) {
+    DoBreakOrContinue(stmt->target(), true);
   }
 
   void VisitBreakStatement(BreakStatement* stmt) {
-    DCHECK_EQ(kFuncScope, scope_);
-    DCHECK_NOT_NULL(stmt->target());
-    int i = static_cast<int>(breakable_blocks_.size()) - 1;
-    int block_distance = 0;
-    for (; i >= 0; i--) {
-      auto elem = breakable_blocks_.at(i);
-      if (elem.first == stmt->target()) {
-        if (elem.second) {
-          block_distance++;
-        }
-        break;
-      } else if (elem.second) {
-        block_distance += 2;
-      } else {
-        block_distance += 1;
-      }
-    }
-    DCHECK(i >= 0);
-    current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
-    current_function_builder_->EmitVarInt(block_distance);
+    DoBreakOrContinue(stmt->target(), false);
   }
 
   void VisitReturnStatement(ReturnStatement* stmt) {
@@ -258,9 +256,7 @@
       scope_ = kModuleScope;
     } else if (scope_ == kFuncScope) {
       RECURSE(Visit(stmt->expression()));
-      uint8_t arity =
-          TypeOf(stmt->expression()) == kAstStmt ? ARITY_0 : ARITY_1;
-      current_function_builder_->EmitWithU8(kExprReturn, arity);
+      current_function_builder_->Emit(kExprReturn);
     } else {
       UNREACHABLE();
     }
@@ -276,7 +272,7 @@
       VisitVariableProxy(tag);
       current_function_builder_->EmitI32Const(node->begin);
       current_function_builder_->Emit(kExprI32LtS);
-      current_function_builder_->Emit(kExprIf);
+      current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
       if_depth++;
       breakable_blocks_.push_back(std::make_pair(nullptr, false));
       HandleCase(node->left, case_to_block, tag, default_block, if_depth);
@@ -286,7 +282,7 @@
       VisitVariableProxy(tag);
       current_function_builder_->EmitI32Const(node->end);
       current_function_builder_->Emit(kExprI32GtS);
-      current_function_builder_->Emit(kExprIf);
+      current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
       if_depth++;
       breakable_blocks_.push_back(std::make_pair(nullptr, false));
       HandleCase(node->right, case_to_block, tag, default_block, if_depth);
@@ -296,9 +292,9 @@
       VisitVariableProxy(tag);
       current_function_builder_->EmitI32Const(node->begin);
       current_function_builder_->Emit(kExprI32Eq);
-      current_function_builder_->Emit(kExprIf);
+      current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
       DCHECK(case_to_block.find(node->begin) != case_to_block.end());
-      current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
+      current_function_builder_->Emit(kExprBr);
       current_function_builder_->EmitVarInt(1 + if_depth +
                                             case_to_block[node->begin]);
       current_function_builder_->Emit(kExprEnd);
@@ -310,22 +306,22 @@
       } else {
         VisitVariableProxy(tag);
       }
-      current_function_builder_->EmitWithU8(kExprBrTable, ARITY_0);
+      current_function_builder_->Emit(kExprBrTable);
       current_function_builder_->EmitVarInt(node->end - node->begin + 1);
-      for (int v = node->begin; v <= node->end; v++) {
+      for (int v = node->begin; v <= node->end; ++v) {
         if (case_to_block.find(v) != case_to_block.end()) {
-          byte break_code[] = {BR_TARGET(if_depth + case_to_block[v])};
-          current_function_builder_->EmitCode(break_code, sizeof(break_code));
+          uint32_t target = if_depth + case_to_block[v];
+          current_function_builder_->EmitVarInt(target);
         } else {
-          byte break_code[] = {BR_TARGET(if_depth + default_block)};
-          current_function_builder_->EmitCode(break_code, sizeof(break_code));
+          uint32_t target = if_depth + default_block;
+          current_function_builder_->EmitVarInt(target);
         }
         if (v == kMaxInt) {
           break;
         }
       }
-      byte break_code[] = {BR_TARGET(if_depth + default_block)};
-      current_function_builder_->EmitCode(break_code, sizeof(break_code));
+      uint32_t target = if_depth + default_block;
+      current_function_builder_->EmitVarInt(target);
     }
 
     while (if_depth-- != prev_if_depth) {
@@ -342,14 +338,14 @@
     if (case_count == 0) {
       return;
     }
-    BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock, false);
+    BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock);
     ZoneVector<BlockVisitor*> blocks(zone_);
     ZoneVector<int32_t> cases(zone_);
     ZoneMap<int, unsigned int> case_to_block(zone_);
     bool has_default = false;
-    for (int i = case_count - 1; i >= 0; i--) {
+    for (int i = case_count - 1; i >= 0; --i) {
       CaseClause* clause = clauses->at(i);
-      blocks.push_back(new BlockVisitor(this, nullptr, kExprBlock, false));
+      blocks.push_back(new BlockVisitor(this, nullptr, kExprBlock));
       if (!clause->is_default()) {
         Literal* label = clause->label()->AsLiteral();
         Handle<Object> value = label->value();
@@ -366,12 +362,12 @@
     }
     if (!has_default || case_count > 1) {
       int default_block = has_default ? case_count - 1 : case_count;
-      BlockVisitor switch_logic_block(this, nullptr, kExprBlock, false);
+      BlockVisitor switch_logic_block(this, nullptr, kExprBlock);
       CaseNode* root = OrderCases(&cases, zone_);
       HandleCase(root, case_to_block, tag, default_block, 0);
       if (root->left != nullptr || root->right != nullptr ||
           root->begin == root->end) {
-        current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
+        current_function_builder_->Emit(kExprBr);
         current_function_builder_->EmitVarInt(default_block);
       }
     }
@@ -388,22 +384,24 @@
 
   void VisitDoWhileStatement(DoWhileStatement* stmt) {
     DCHECK_EQ(kFuncScope, scope_);
-    BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprLoop, true);
+    BlockVisitor block(this, stmt->AsBreakableStatement(), kExprBlock);
+    BlockVisitor loop(this, stmt->AsBreakableStatement(), kExprLoop);
     RECURSE(Visit(stmt->body()));
     RECURSE(Visit(stmt->cond()));
-    current_function_builder_->Emit(kExprIf);
-    current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 1);
+    current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
+    current_function_builder_->EmitWithU8(kExprBr, 1);
     current_function_builder_->Emit(kExprEnd);
   }
 
   void VisitWhileStatement(WhileStatement* stmt) {
     DCHECK_EQ(kFuncScope, scope_);
-    BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprLoop, true);
+    BlockVisitor block(this, stmt->AsBreakableStatement(), kExprBlock);
+    BlockVisitor loop(this, stmt->AsBreakableStatement(), kExprLoop);
     RECURSE(Visit(stmt->cond()));
     breakable_blocks_.push_back(std::make_pair(nullptr, false));
-    current_function_builder_->Emit(kExprIf);
+    current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
     RECURSE(Visit(stmt->body()));
-    current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 1);
+    current_function_builder_->EmitWithU8(kExprBr, 1);
     current_function_builder_->Emit(kExprEnd);
     breakable_blocks_.pop_back();
   }
@@ -413,13 +411,13 @@
     if (stmt->init() != nullptr) {
       RECURSE(Visit(stmt->init()));
     }
-    BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprLoop, true);
+    BlockVisitor block(this, stmt->AsBreakableStatement(), kExprBlock);
+    BlockVisitor loop(this, stmt->AsBreakableStatement(), kExprLoop);
     if (stmt->cond() != nullptr) {
       RECURSE(Visit(stmt->cond()));
       current_function_builder_->Emit(kExprI32Eqz);
-      current_function_builder_->Emit(kExprIf);
-      current_function_builder_->Emit(kExprNop);
-      current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 2);
+      current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
+      current_function_builder_->EmitWithU8(kExprBr, 2);
       current_function_builder_->Emit(kExprEnd);
     }
     if (stmt->body() != nullptr) {
@@ -428,8 +426,7 @@
     if (stmt->next() != nullptr) {
       RECURSE(Visit(stmt->next()));
     }
-    current_function_builder_->Emit(kExprNop);
-    current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 0);
+    current_function_builder_->EmitWithU8(kExprBr, 0);
   }
 
   void VisitForInStatement(ForInStatement* stmt) { UNREACHABLE(); }
@@ -446,19 +443,13 @@
     DeclarationScope* scope = expr->scope();
     if (scope_ == kFuncScope) {
       if (auto* func_type = typer_->TypeOf(expr)->AsFunctionType()) {
-        // Build the signature for the function.
-        LocalType return_type = TypeFrom(func_type->ReturnType());
+        // Add the parameters for the function.
         const auto& arguments = func_type->Arguments();
-        FunctionSig::Builder b(zone(), return_type == kAstStmt ? 0 : 1,
-                               arguments.size());
-        if (return_type != kAstStmt) b.AddReturn(return_type);
         for (int i = 0; i < expr->parameter_count(); ++i) {
           LocalType type = TypeFrom(arguments[i]);
           DCHECK_NE(kAstStmt, type);
-          b.AddParam(type);
           InsertParameter(scope->parameter(i), type, i);
         }
-        current_function_builder_->SetSignature(b.Build());
       } else {
         UNREACHABLE();
       }
@@ -476,7 +467,24 @@
     RECURSE(Visit(expr->condition()));
     // WASM ifs come with implicit blocks for both arms.
     breakable_blocks_.push_back(std::make_pair(nullptr, false));
-    current_function_builder_->Emit(kExprIf);
+    LocalTypeCode type;
+    switch (TypeOf(expr)) {
+      case kAstI32:
+        type = kLocalI32;
+        break;
+      case kAstI64:
+        type = kLocalI64;
+        break;
+      case kAstF32:
+        type = kLocalF32;
+        break;
+      case kAstF64:
+        type = kLocalF64;
+        break;
+      default:
+        UNREACHABLE();
+    }
+    current_function_builder_->EmitWithU8(kExprIf, type);
     RECURSE(Visit(expr->then_expression()));
     current_function_builder_->Emit(kExprElse);
     RECURSE(Visit(expr->else_expression()));
@@ -551,12 +559,22 @@
         current_function_builder_->EmitGetLocal(
             LookupOrInsertLocal(var, var_type));
       }
+    } else if (scope_ == kExportScope) {
+      Variable* var = expr->var();
+      DCHECK(var->is_function());
+      WasmFunctionBuilder* function = LookupOrInsertFunction(var);
+      function->SetExported();
+      function->SetName(
+          AsmWasmBuilder::single_function_name,
+          static_cast<int>(strlen(AsmWasmBuilder::single_function_name)));
     }
   }
 
   void VisitLiteral(Literal* expr) {
     Handle<Object> value = expr->value();
-    if (!value->IsNumber() || (scope_ != kFuncScope && scope_ != kInitScope)) {
+    if (!(value->IsNumber() || expr->raw_value()->IsTrue() ||
+          expr->raw_value()->IsFalse()) ||
+        (scope_ != kFuncScope && scope_ != kInitScope)) {
       return;
     }
     AsmType* type = typer_->TypeOf(expr);
@@ -577,10 +595,40 @@
       int32_t i = static_cast<int32_t>(u);
       byte code[] = {WASM_I32V(i)};
       current_function_builder_->EmitCode(code, sizeof(code));
+    } else if (type->IsA(AsmType::Int())) {
+      // The parser can collapse !0, !1 etc to true / false.
+      // Allow these as int literals.
+      if (expr->raw_value()->IsTrue()) {
+        byte code[] = {WASM_I32V(1)};
+        current_function_builder_->EmitCode(code, sizeof(code));
+      } else if (expr->raw_value()->IsFalse()) {
+        byte code[] = {WASM_I32V(0)};
+        current_function_builder_->EmitCode(code, sizeof(code));
+      } else if (expr->raw_value()->IsNumber()) {
+        // This can happen when -x becomes x * -1 (due to the parser).
+        int32_t i = 0;
+        if (!value->ToInt32(&i) || i != -1) {
+          UNREACHABLE();
+        }
+        byte code[] = {WASM_I32V(i)};
+        current_function_builder_->EmitCode(code, sizeof(code));
+      } else {
+        UNREACHABLE();
+      }
     } else if (type->IsA(AsmType::Double())) {
+      // TODO(bradnelson): Pattern match the case where negation occurs and
+      // emit f64.neg instead.
       double val = expr->raw_value()->AsNumber();
       byte code[] = {WASM_F64(val)};
       current_function_builder_->EmitCode(code, sizeof(code));
+    } else if (type->IsA(AsmType::Float())) {
+      // This can happen when -fround(x) becomes fround(x) * 1.0[float]
+      // (due to the parser).
+      // TODO(bradnelson): Pattern match this and emit f32.neg instead.
+      double val = expr->raw_value()->AsNumber();
+      DCHECK_EQ(-1.0, val);
+      byte code[] = {WASM_F32(val)};
+      current_function_builder_->EmitCode(code, sizeof(code));
     } else {
       UNREACHABLE();
     }
@@ -601,11 +649,10 @@
       DCHECK(name->IsPropertyName());
       const AstRawString* raw_name = name->AsRawPropertyName();
       if (var->is_function()) {
-        uint32_t index = LookupOrInsertFunction(var);
-        builder_->FunctionAt(index)->SetExported();
-        builder_->FunctionAt(index)->SetName(
-            reinterpret_cast<const char*>(raw_name->raw_data()),
-            raw_name->length());
+        WasmFunctionBuilder* function = LookupOrInsertFunction(var);
+        function->SetExported();
+        function->SetName(reinterpret_cast<const char*>(raw_name->raw_data()),
+                          raw_name->length());
       }
     }
   }
@@ -613,7 +660,7 @@
   void VisitArrayLiteral(ArrayLiteral* expr) { UNREACHABLE(); }
 
   void LoadInitFunction() {
-    current_function_builder_ = builder_->FunctionAt(init_function_index_);
+    current_function_builder_ = init_function_;
     scope_ = kInitScope;
   }
 
@@ -642,7 +689,8 @@
     for (int i = 0; i < funcs->values()->length(); ++i) {
       VariableProxy* func = funcs->values()->at(i)->AsVariableProxy();
       DCHECK_NOT_NULL(func);
-      builder_->AddIndirectFunction(LookupOrInsertFunction(func->var()));
+      builder_->AddIndirectFunction(
+          LookupOrInsertFunction(func->var())->func_index());
     }
   }
 
@@ -684,20 +732,20 @@
 
    public:
     explicit ImportedFunctionTable(AsmWasmBuilderImpl* builder)
-        : table_(base::HashMap::PointersMatch,
-                 ZoneHashMap::kDefaultHashMapCapacity,
+        : table_(ZoneHashMap::kDefaultHashMapCapacity,
                  ZoneAllocationPolicy(builder->zone())),
           builder_(builder) {}
 
     void AddImport(Variable* v, const char* name, int name_length) {
       ImportedFunctionIndices* indices = new (builder_->zone())
           ImportedFunctionIndices(name, name_length, builder_->zone());
-      ZoneHashMap::Entry* entry = table_.LookupOrInsert(
+      auto* entry = table_.LookupOrInsert(
           v, ComputePointerHash(v), ZoneAllocationPolicy(builder_->zone()));
       entry->value = indices;
     }
 
-    uint32_t GetFunctionIndex(Variable* v, FunctionSig* sig) {
+    // Get a function's index (or allocate if new).
+    uint32_t LookupOrInsertImport(Variable* v, FunctionSig* sig) {
       ZoneHashMap::Entry* entry = table_.Lookup(v, ComputePointerHash(v));
       DCHECK_NOT_NULL(entry);
       ImportedFunctionIndices* indices =
@@ -774,7 +822,7 @@
     RECURSE(Visit(value));
   }
 
-  void EmitAssignment(Assignment* expr, MachineType type) {
+  void EmitAssignment(Assignment* expr, MachineType type, ValueFate fate) {
     // Match the left hand side of the assignment.
     VariableProxy* target_var = expr->target()->AsVariableProxy();
     if (target_var != nullptr) {
@@ -783,11 +831,19 @@
       LocalType var_type = TypeOf(expr);
       DCHECK_NE(kAstStmt, var_type);
       if (var->IsContextSlot()) {
-        current_function_builder_->EmitWithVarInt(
-            kExprSetGlobal, LookupOrInsertGlobal(var, var_type));
+        uint32_t index = LookupOrInsertGlobal(var, var_type);
+        current_function_builder_->EmitWithVarInt(kExprSetGlobal, index);
+        if (fate == kLeaveOnStack) {
+          current_function_builder_->EmitWithVarInt(kExprGetGlobal, index);
+        }
       } else {
-        current_function_builder_->EmitSetLocal(
-            LookupOrInsertLocal(var, var_type));
+        if (fate == kDrop) {
+          current_function_builder_->EmitSetLocal(
+              LookupOrInsertLocal(var, var_type));
+        } else {
+          current_function_builder_->EmitTeeLocal(
+              LookupOrInsertLocal(var, var_type));
+        }
       }
     }
 
@@ -799,6 +855,7 @@
               ->IsA(AsmType::Float32Array())) {
         current_function_builder_->Emit(kExprF32ConvertF64);
       }
+      // Note that unlike StoreMem, AsmjsStoreMem ignores out-of-bounds writes.
       WasmOpcode opcode;
       if (type == MachineType::Int8()) {
         opcode = kExprI32AsmjsStoreMem8;
@@ -820,6 +877,10 @@
         UNREACHABLE();
       }
       current_function_builder_->Emit(opcode);
+      if (fate == kDrop) {
+        // Asm.js stores to memory leave their result on the stack.
+        current_function_builder_->Emit(kExprDrop);
+      }
     }
 
     if (target_var == nullptr && target_prop == nullptr) {
@@ -828,12 +889,16 @@
   }
 
   void VisitAssignment(Assignment* expr) {
+    VisitAssignment(expr, kLeaveOnStack);
+  }
+
+  void VisitAssignment(Assignment* expr, ValueFate fate) {
     bool as_init = false;
     if (scope_ == kModuleScope) {
       // Skip extra assignment inserted by the parser when in this form:
       // (function Module(a, b, c) {... })
       if (expr->target()->IsVariableProxy() &&
-          expr->target()->AsVariableProxy()->var()->mode() == CONST_LEGACY) {
+          expr->target()->AsVariableProxy()->var()->is_sloppy_function_name()) {
         return;
       }
       Property* prop = expr->value()->AsProperty();
@@ -873,12 +938,12 @@
     }
 
     if (as_init) LoadInitFunction();
-    MachineType mtype;
+    MachineType mtype = MachineType::None();
     bool is_nop = false;
     EmitAssignmentLhs(expr->target(), &mtype);
     EmitAssignmentRhs(expr->target(), expr->value(), &is_nop);
     if (!is_nop) {
-      EmitAssignment(expr, mtype);
+      EmitAssignment(expr, mtype, fate);
     }
     if (as_init) UnLoadInitFunction();
   }
@@ -1099,24 +1164,24 @@
       }
       case AsmTyper::kMathAbs: {
         if (call_type == kAstI32) {
-          uint32_t tmp = current_function_builder_->AddLocal(kAstI32);
+          WasmTemporary tmp(current_function_builder_, kAstI32);
 
           // if set_local(tmp, x) < 0
           Visit(call->arguments()->at(0));
-          current_function_builder_->EmitSetLocal(tmp);
+          current_function_builder_->EmitTeeLocal(tmp.index());
           byte code[] = {WASM_I8(0)};
           current_function_builder_->EmitCode(code, sizeof(code));
           current_function_builder_->Emit(kExprI32LtS);
-          current_function_builder_->Emit(kExprIf);
+          current_function_builder_->EmitWithU8(kExprIf, kLocalI32);
 
           // then (0 - tmp)
           current_function_builder_->EmitCode(code, sizeof(code));
-          current_function_builder_->EmitGetLocal(tmp);
+          current_function_builder_->EmitGetLocal(tmp.index());
           current_function_builder_->Emit(kExprI32Sub);
 
           // else tmp
           current_function_builder_->Emit(kExprElse);
-          current_function_builder_->EmitGetLocal(tmp);
+          current_function_builder_->EmitGetLocal(tmp.index());
           // end
           current_function_builder_->Emit(kExprEnd);
 
@@ -1134,25 +1199,25 @@
       case AsmTyper::kMathMin: {
         // TODO(bradnelson): Change wasm to match Math.min in asm.js mode.
         if (call_type == kAstI32) {
-          uint32_t tmp_x = current_function_builder_->AddLocal(kAstI32);
-          uint32_t tmp_y = current_function_builder_->AddLocal(kAstI32);
+          WasmTemporary tmp_x(current_function_builder_, kAstI32);
+          WasmTemporary tmp_y(current_function_builder_, kAstI32);
 
           // if set_local(tmp_x, x) < set_local(tmp_y, y)
           Visit(call->arguments()->at(0));
-          current_function_builder_->EmitSetLocal(tmp_x);
+          current_function_builder_->EmitTeeLocal(tmp_x.index());
 
           Visit(call->arguments()->at(1));
-          current_function_builder_->EmitSetLocal(tmp_y);
+          current_function_builder_->EmitTeeLocal(tmp_y.index());
 
           current_function_builder_->Emit(kExprI32LeS);
-          current_function_builder_->Emit(kExprIf);
+          current_function_builder_->EmitWithU8(kExprIf, kLocalI32);
 
           // then tmp_x
-          current_function_builder_->EmitGetLocal(tmp_x);
+          current_function_builder_->EmitGetLocal(tmp_x.index());
 
           // else tmp_y
           current_function_builder_->Emit(kExprElse);
-          current_function_builder_->EmitGetLocal(tmp_y);
+          current_function_builder_->EmitGetLocal(tmp_y.index());
           current_function_builder_->Emit(kExprEnd);
 
         } else if (call_type == kAstF32) {
@@ -1169,26 +1234,26 @@
       case AsmTyper::kMathMax: {
         // TODO(bradnelson): Change wasm to match Math.max in asm.js mode.
         if (call_type == kAstI32) {
-          uint32_t tmp_x = current_function_builder_->AddLocal(kAstI32);
-          uint32_t tmp_y = current_function_builder_->AddLocal(kAstI32);
+          WasmTemporary tmp_x(current_function_builder_, kAstI32);
+          WasmTemporary tmp_y(current_function_builder_, kAstI32);
 
           // if set_local(tmp_x, x) < set_local(tmp_y, y)
           Visit(call->arguments()->at(0));
 
-          current_function_builder_->EmitSetLocal(tmp_x);
+          current_function_builder_->EmitTeeLocal(tmp_x.index());
 
           Visit(call->arguments()->at(1));
-          current_function_builder_->EmitSetLocal(tmp_y);
+          current_function_builder_->EmitTeeLocal(tmp_y.index());
 
           current_function_builder_->Emit(kExprI32LeS);
-          current_function_builder_->Emit(kExprIf);
+          current_function_builder_->EmitWithU8(kExprIf, kLocalI32);
 
           // then tmp_y
-          current_function_builder_->EmitGetLocal(tmp_y);
+          current_function_builder_->EmitGetLocal(tmp_y.index());
 
           // else tmp_x
           current_function_builder_->Emit(kExprElse);
-          current_function_builder_->EmitGetLocal(tmp_x);
+          current_function_builder_->EmitGetLocal(tmp_x.index());
           current_function_builder_->Emit(kExprEnd);
 
         } else if (call_type == kAstF32) {
@@ -1267,18 +1332,23 @@
     }
   }
 
-  void VisitCall(Call* expr) {
+  void VisitCall(Call* expr) { VisitCallExpression(expr); }
+
+  bool VisitCallExpression(Call* expr) {
     Call::CallType call_type = expr->GetCallType();
+    bool returns_value = true;
     switch (call_type) {
       case Call::OTHER_CALL: {
-        DCHECK_EQ(kFuncScope, scope_);
         VariableProxy* proxy = expr->expression()->AsVariableProxy();
         if (proxy != nullptr) {
+          DCHECK(kFuncScope == scope_ ||
+                 typer_->VariableAsStandardMember(proxy->var()) ==
+                     AsmTyper::kMathFround);
           if (VisitStdlibFunction(expr, proxy)) {
-            return;
+            return true;
           }
         }
-        uint32_t index;
+        DCHECK(kFuncScope == scope_);
         VariableProxy* vp = expr->expression()->AsVariableProxy();
         DCHECK_NOT_NULL(vp);
         if (typer_->TypeOf(vp)->AsFFIType() != nullptr) {
@@ -1288,22 +1358,24 @@
                                    args->length());
           if (return_type != kAstStmt) {
             sig.AddReturn(return_type);
+          } else {
+            returns_value = false;
           }
           for (int i = 0; i < args->length(); ++i) {
             sig.AddParam(TypeOf(args->at(i)));
           }
-          index =
-              imported_function_table_.GetFunctionIndex(vp->var(), sig.Build());
-          VisitCallArgs(expr);
-          current_function_builder_->Emit(kExprCallImport);
-          current_function_builder_->EmitVarInt(expr->arguments()->length());
-          current_function_builder_->EmitVarInt(index);
-        } else {
-          index = LookupOrInsertFunction(vp->var());
+          uint32_t index = imported_function_table_.LookupOrInsertImport(
+              vp->var(), sig.Build());
           VisitCallArgs(expr);
           current_function_builder_->Emit(kExprCallFunction);
-          current_function_builder_->EmitVarInt(expr->arguments()->length());
           current_function_builder_->EmitVarInt(index);
+        } else {
+          WasmFunctionBuilder* function = LookupOrInsertFunction(vp->var());
+          VisitCallArgs(expr);
+          current_function_builder_->Emit(kExprCallFunction);
+          current_function_builder_->EmitDirectCallIndex(
+              function->func_index());
+          returns_value = function->signature()->return_count() > 0;
         }
         break;
       }
@@ -1314,18 +1386,28 @@
         VariableProxy* var = p->obj()->AsVariableProxy();
         DCHECK_NOT_NULL(var);
         FunctionTableIndices* indices = LookupFunctionTable(var->var());
-        RECURSE(Visit(p->key()));
+        Visit(p->key());  // TODO(titzer): should use RECURSE()
+
+        // We have to use a temporary for the correct order of evaluation.
         current_function_builder_->EmitI32Const(indices->start_index);
         current_function_builder_->Emit(kExprI32Add);
+        WasmTemporary tmp(current_function_builder_, kAstI32);
+        current_function_builder_->EmitSetLocal(tmp.index());
+
         VisitCallArgs(expr);
+
+        current_function_builder_->EmitGetLocal(tmp.index());
         current_function_builder_->Emit(kExprCallIndirect);
-        current_function_builder_->EmitVarInt(expr->arguments()->length());
         current_function_builder_->EmitVarInt(indices->signature_index);
+        returns_value =
+            builder_->GetSignature(indices->signature_index)->return_count() >
+            0;
         break;
       }
       default:
         UNREACHABLE();
     }
+    return returns_value;
   }
 
   void VisitCallNew(CallNew* expr) { UNREACHABLE(); }
@@ -1511,16 +1593,13 @@
       RECURSE(Visit(GetLeft(expr)));
     } else {
       if (expr->op() == Token::COMMA) {
-        current_function_builder_->Emit(kExprBlock);
+        RECURSE(VisitForEffect(expr->left()));
+        RECURSE(Visit(expr->right()));
+        return;
       }
-
       RECURSE(Visit(expr->left()));
       RECURSE(Visit(expr->right()));
 
-      if (expr->op() == Token::COMMA) {
-        current_function_builder_->Emit(kExprEnd);
-      }
-
       switch (expr->op()) {
         BINOP_CASE(Token::ADD, Add, NON_SIGNED_BINOP, true);
         BINOP_CASE(Token::SUB, Sub, NON_SIGNED_BINOP, true);
@@ -1720,18 +1799,33 @@
     return (reinterpret_cast<IndexContainer*>(entry->value))->index;
   }
 
-  uint32_t LookupOrInsertFunction(Variable* v) {
+  WasmFunctionBuilder* LookupOrInsertFunction(Variable* v) {
     DCHECK_NOT_NULL(builder_);
     ZoneHashMap::Entry* entry = functions_.Lookup(v, ComputePointerHash(v));
     if (entry == nullptr) {
-      uint32_t index = builder_->AddFunction();
-      IndexContainer* container = new (zone()) IndexContainer();
-      container->index = index;
+      auto* func_type = typer_->TypeOf(v)->AsFunctionType();
+      DCHECK_NOT_NULL(func_type);
+      // Build the signature for the function.
+      LocalType return_type = TypeFrom(func_type->ReturnType());
+      const auto& arguments = func_type->Arguments();
+      FunctionSig::Builder b(zone(), return_type == kAstStmt ? 0 : 1,
+                             arguments.size());
+      if (return_type != kAstStmt) b.AddReturn(return_type);
+      for (int i = 0; i < static_cast<int>(arguments.size()); ++i) {
+        LocalType type = TypeFrom(arguments[i]);
+        DCHECK_NE(kAstStmt, type);
+        b.AddParam(type);
+      }
+
+      WasmFunctionBuilder* function = builder_->AddFunction(b.Build());
       entry = functions_.LookupOrInsert(v, ComputePointerHash(v),
                                         ZoneAllocationPolicy(zone()));
-      entry->value = container;
+      function->SetName(
+          reinterpret_cast<const char*>(v->raw_name()->raw_data()),
+          v->raw_name()->length());
+      entry->value = function;
     }
-    return (reinterpret_cast<IndexContainer*>(entry->value))->index;
+    return (reinterpret_cast<WasmFunctionBuilder*>(entry->value));
   }
 
   LocalType TypeOf(Expression* expr) { return TypeFrom(typer_->TypeOf(expr)); }
@@ -1766,8 +1860,8 @@
   AsmTyper* typer_;
   ZoneVector<std::pair<BreakableStatement*, bool>> breakable_blocks_;
   ZoneVector<ForeignVariable> foreign_variables_;
-  uint32_t init_function_index_;
-  uint32_t foreign_init_function_index_;
+  WasmFunctionBuilder* init_function_;
+  WasmFunctionBuilder* foreign_init_function_;
   uint32_t next_table_index_;
   ZoneHashMap function_tables_;
   ImportedFunctionTable imported_function_table_;
@@ -1792,6 +1886,10 @@
   impl.builder_->WriteTo(*buffer);
   return buffer;
 }
+
+const char* AsmWasmBuilder::foreign_init_name = "__foreign_init__";
+const char* AsmWasmBuilder::single_function_name = "__single_function__";
+
 }  // namespace wasm
 }  // namespace internal
 }  // namespace v8
diff --git a/src/asmjs/asm-wasm-builder.h b/src/asmjs/asm-wasm-builder.h
index 3276c88..9f85dfa 100644
--- a/src/asmjs/asm-wasm-builder.h
+++ b/src/asmjs/asm-wasm-builder.h
@@ -8,8 +8,8 @@
 #include "src/allocation.h"
 #include "src/asmjs/asm-typer.h"
 #include "src/objects.h"
-#include "src/wasm/encoder.h"
-#include "src/zone.h"
+#include "src/wasm/wasm-module-builder.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -24,6 +24,9 @@
                           AsmTyper* typer);
   ZoneBuffer* Run(Handle<FixedArray>* foreign_args);
 
+  static const char* foreign_init_name;
+  static const char* single_function_name;
+
  private:
   Isolate* isolate_;
   Zone* zone_;
diff --git a/src/assembler.cc b/src/assembler.cc
index 83dbbe8..b44bc06 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -120,7 +120,7 @@
 double one_half;
 double minus_one_half;
 double negative_infinity;
-double the_hole_nan;
+uint64_t the_hole_nan;
 double uint32_bias;
 };
 
@@ -190,6 +190,7 @@
   if (size == 0) return;
 
 #if defined(USE_SIMULATOR)
+  base::LockGuard<base::Mutex> lock_guard(isolate->simulator_i_cache_mutex());
   Simulator::FlushICache(isolate->simulator_i_cache(), start, size);
 #else
   CpuFeatures::FlushICache(start, size);
@@ -233,22 +234,14 @@
 // Implementation of CpuFeatureScope
 
 #ifdef DEBUG
-CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
+CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
+                                 CheckPolicy check)
     : assembler_(assembler) {
-  DCHECK(CpuFeatures::IsSupported(f));
+  DCHECK_IMPLIES(check == kCheckSupported, CpuFeatures::IsSupported(f));
   old_enabled_ = assembler_->enabled_cpu_features();
-  uint64_t mask = static_cast<uint64_t>(1) << f;
-  // TODO(svenpanne) This special case below doesn't belong here!
-#if V8_TARGET_ARCH_ARM
-  // ARMv7 is implied by VFP3.
-  if (f == VFP3) {
-    mask |= static_cast<uint64_t>(1) << ARMv7;
-  }
-#endif
-  assembler_->set_enabled_cpu_features(old_enabled_ | mask);
+  assembler_->EnableCpuFeature(f);
 }
 
-
 CpuFeatureScope::~CpuFeatureScope() {
   assembler_->set_enabled_cpu_features(old_enabled_);
 }
@@ -350,19 +343,18 @@
   DCHECK(IsWasmMemoryReference(rmode_) || IsWasmMemorySizeReference(rmode_));
   if (IsWasmMemoryReference(rmode_)) {
     Address updated_reference;
-    DCHECK(old_size == 0 || Memory::IsAddressInRange(
-                                old_base, wasm_memory_reference(), old_size));
+    DCHECK_GE(wasm_memory_reference(), old_base);
     updated_reference = new_base + (wasm_memory_reference() - old_base);
-    DCHECK(new_size == 0 ||
-           Memory::IsAddressInRange(new_base, updated_reference, new_size));
+    // The reference is not checked here but at runtime. Validity of references
+    // may change over time.
     unchecked_update_wasm_memory_reference(updated_reference,
                                            icache_flush_mode);
   } else if (IsWasmMemorySizeReference(rmode_)) {
-    uint32_t updated_size_reference;
-    DCHECK(old_size == 0 || wasm_memory_size_reference() <= old_size);
-    updated_size_reference =
-        new_size + (wasm_memory_size_reference() - old_size);
-    DCHECK(updated_size_reference <= new_size);
+    uint32_t current_size_reference = wasm_memory_size_reference();
+    DCHECK(old_size == 0 || current_size_reference <= old_size);
+    uint32_t offset = old_size - current_size_reference;
+    DCHECK_GE(new_size, offset);
+    uint32_t updated_size_reference = new_size - offset;
     unchecked_update_wasm_memory_size(updated_size_reference,
                                       icache_flush_mode);
   } else {
@@ -930,7 +922,7 @@
   double_constants.min_int = kMinInt;
   double_constants.one_half = 0.5;
   double_constants.minus_one_half = -0.5;
-  double_constants.the_hole_nan = bit_cast<double>(kHoleNanInt64);
+  double_constants.the_hole_nan = kHoleNanInt64;
   double_constants.negative_infinity = -V8_INFINITY;
   double_constants.uint32_bias =
     static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
@@ -1601,17 +1593,6 @@
 }
 
 
-ExternalReference ExternalReference::virtual_handler_register(
-    Isolate* isolate) {
-  return ExternalReference(isolate->virtual_handler_register_address());
-}
-
-
-ExternalReference ExternalReference::virtual_slot_register(Isolate* isolate) {
-  return ExternalReference(isolate->virtual_slot_register_address());
-}
-
-
 ExternalReference ExternalReference::runtime_function_table_address(
     Isolate* isolate) {
   return ExternalReference(
diff --git a/src/assembler.h b/src/assembler.h
index 77beac1..a925032 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -80,9 +80,14 @@
   void set_enabled_cpu_features(uint64_t features) {
     enabled_cpu_features_ = features;
   }
+  // Features are usually enabled by CpuFeatureScope, which also asserts that
+  // the features are supported before they are enabled.
   bool IsEnabled(CpuFeature f) {
     return (enabled_cpu_features_ & (static_cast<uint64_t>(1) << f)) != 0;
   }
+  void EnableCpuFeature(CpuFeature f) {
+    enabled_cpu_features_ |= (static_cast<uint64_t>(1) << f);
+  }
 
   bool is_constant_pool_available() const {
     if (FLAG_enable_embedded_constant_pool) {
@@ -184,15 +189,22 @@
 // Enable a specified feature within a scope.
 class CpuFeatureScope BASE_EMBEDDED {
  public:
+  enum CheckPolicy {
+    kCheckSupported,
+    kDontCheckSupported,
+  };
+
 #ifdef DEBUG
-  CpuFeatureScope(AssemblerBase* assembler, CpuFeature f);
+  CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
+                  CheckPolicy check = kCheckSupported);
   ~CpuFeatureScope();
 
  private:
   AssemblerBase* assembler_;
   uint64_t old_enabled_;
 #else
-  CpuFeatureScope(AssemblerBase* assembler, CpuFeature f) {}
+  CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
+                  CheckPolicy check = kCheckSupported) {}
 #endif
 };
 
@@ -1035,9 +1047,6 @@
   static ExternalReference invoke_function_callback(Isolate* isolate);
   static ExternalReference invoke_accessor_getter_callback(Isolate* isolate);
 
-  static ExternalReference virtual_handler_register(Isolate* isolate);
-  static ExternalReference virtual_slot_register(Isolate* isolate);
-
   static ExternalReference runtime_function_table_address(Isolate* isolate);
 
   Address address() const { return reinterpret_cast<Address>(address_); }
diff --git a/src/assert-scope.h b/src/assert-scope.h
index 84e6990..fde49f8 100644
--- a/src/assert-scope.h
+++ b/src/assert-scope.h
@@ -7,6 +7,7 @@
 
 #include <stdint.h>
 #include "src/base/macros.h"
+#include "src/globals.h"
 
 namespace v8 {
 namespace internal {
@@ -33,14 +34,13 @@
   COMPILATION_ASSERT
 };
 
-
 template <PerThreadAssertType kType, bool kAllow>
 class PerThreadAssertScope {
  public:
-  PerThreadAssertScope();
-  ~PerThreadAssertScope();
+  V8_EXPORT_PRIVATE PerThreadAssertScope();
+  V8_EXPORT_PRIVATE ~PerThreadAssertScope();
 
-  static bool IsAllowed();
+  V8_EXPORT_PRIVATE static bool IsAllowed();
 
  private:
   PerThreadAssertData* data_;
diff --git a/src/ast/OWNERS b/src/ast/OWNERS
index 65a00bc..b4e1473 100644
--- a/src/ast/OWNERS
+++ b/src/ast/OWNERS
@@ -3,6 +3,7 @@
 adamk@chromium.org
 bmeurer@chromium.org
 littledan@chromium.org
+marja@chromium.org
 mstarzinger@chromium.org
 rossberg@chromium.org
 verwaest@chromium.org
diff --git a/src/ast/ast-expression-rewriter.cc b/src/ast/ast-expression-rewriter.cc
index 7bb8f08..c4fa71b 100644
--- a/src/ast/ast-expression-rewriter.cc
+++ b/src/ast/ast-expression-rewriter.cc
@@ -201,11 +201,10 @@
   AST_REWRITE_PROPERTY(FunctionLiteral, node, constructor);
   ZoneList<typename ClassLiteral::Property*>* properties = node->properties();
   for (int i = 0; i < properties->length(); i++) {
-    VisitObjectLiteralProperty(properties->at(i));
+    VisitLiteralProperty(properties->at(i));
   }
 }
 
-
 void AstExpressionRewriter::VisitNativeFunctionLiteral(
     NativeFunctionLiteral* node) {
   REWRITE_THIS(node);
@@ -243,13 +242,11 @@
   REWRITE_THIS(node);
   ZoneList<typename ObjectLiteral::Property*>* properties = node->properties();
   for (int i = 0; i < properties->length(); i++) {
-    VisitObjectLiteralProperty(properties->at(i));
+    VisitLiteralProperty(properties->at(i));
   }
 }
 
-
-void AstExpressionRewriter::VisitObjectLiteralProperty(
-    ObjectLiteralProperty* property) {
+void AstExpressionRewriter::VisitLiteralProperty(LiteralProperty* property) {
   if (property == nullptr) return;
   AST_REWRITE_PROPERTY(Expression, property, key);
   AST_REWRITE_PROPERTY(Expression, property, value);
diff --git a/src/ast/ast-expression-rewriter.h b/src/ast/ast-expression-rewriter.h
index ac45d76..dfed3e1 100644
--- a/src/ast/ast-expression-rewriter.h
+++ b/src/ast/ast-expression-rewriter.h
@@ -9,7 +9,7 @@
 #include "src/ast/ast.h"
 #include "src/ast/scopes.h"
 #include "src/type-info.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -33,7 +33,7 @@
   virtual void VisitStatements(ZoneList<Statement*>* statements);
   virtual void VisitExpressions(ZoneList<Expression*>* expressions);
 
-  virtual void VisitObjectLiteralProperty(ObjectLiteralProperty* property);
+  virtual void VisitLiteralProperty(LiteralProperty* property);
 
  protected:
   virtual bool RewriteExpression(Expression* expr) = 0;
diff --git a/src/ast/ast-literal-reindexer.cc b/src/ast/ast-literal-reindexer.cc
index a349ae0..81a5225 100644
--- a/src/ast/ast-literal-reindexer.cc
+++ b/src/ast/ast-literal-reindexer.cc
@@ -249,21 +249,18 @@
     VisitVariableProxy(node->class_variable_proxy());
   }
   for (int i = 0; i < node->properties()->length(); i++) {
-    VisitObjectLiteralProperty(node->properties()->at(i));
+    VisitLiteralProperty(node->properties()->at(i));
   }
 }
 
-
 void AstLiteralReindexer::VisitObjectLiteral(ObjectLiteral* node) {
   UpdateIndex(node);
   for (int i = 0; i < node->properties()->length(); i++) {
-    VisitObjectLiteralProperty(node->properties()->at(i));
+    VisitLiteralProperty(node->properties()->at(i));
   }
 }
 
-
-void AstLiteralReindexer::VisitObjectLiteralProperty(
-    ObjectLiteralProperty* node) {
+void AstLiteralReindexer::VisitLiteralProperty(LiteralProperty* node) {
   Visit(node->key());
   Visit(node->value());
 }
diff --git a/src/ast/ast-literal-reindexer.h b/src/ast/ast-literal-reindexer.h
index b33e0c5..4e0ca6b 100644
--- a/src/ast/ast-literal-reindexer.h
+++ b/src/ast/ast-literal-reindexer.h
@@ -26,7 +26,7 @@
   void VisitStatements(ZoneList<Statement*>* statements);
   void VisitDeclarations(ZoneList<Declaration*>* declarations);
   void VisitArguments(ZoneList<Expression*>* arguments);
-  void VisitObjectLiteralProperty(ObjectLiteralProperty* property);
+  void VisitLiteralProperty(LiteralProperty* property);
 
   void UpdateIndex(MaterializedLiteral* literal) {
     literal->literal_index_ = next_index_++;
diff --git a/src/ast/ast-numbering.cc b/src/ast/ast-numbering.cc
index 1b9905a..e1b11f6 100644
--- a/src/ast/ast-numbering.cc
+++ b/src/ast/ast-numbering.cc
@@ -39,7 +39,7 @@
   void VisitStatements(ZoneList<Statement*>* statements);
   void VisitDeclarations(ZoneList<Declaration*>* declarations);
   void VisitArguments(ZoneList<Expression*>* arguments);
-  void VisitObjectLiteralProperty(ObjectLiteralProperty* property);
+  void VisitLiteralProperty(LiteralProperty* property);
 
   int ReserveIdRange(int n) {
     int tmp = next_id_;
@@ -233,14 +233,6 @@
 void AstNumberingVisitor::VisitBlock(Block* node) {
   IncrementNodeCount();
   node->set_base_id(ReserveIdRange(Block::num_ids()));
-
-  if (FLAG_ignition && node->scope() != nullptr &&
-      node->scope()->NeedsContext()) {
-    // Create ScopeInfo while on the main thread to avoid allocation during
-    // potentially concurrent bytecode generation.
-    node->scope()->GetScopeInfo(isolate_);
-  }
-
   if (node->scope() != NULL) VisitDeclarations(node->scope()->declarations());
   VisitStatements(node->statements());
 }
@@ -257,6 +249,27 @@
   IncrementNodeCount();
   node->set_base_id(ReserveIdRange(CallRuntime::num_ids()));
   VisitArguments(node->arguments());
+  // To support catch prediction within async/await:
+  //
+  // The AstNumberingVisitor is when catch prediction currently occurs, and it
+  // is the only common point that has access to this information. The parser
+  // just doesn't know yet. Take the following two cases of catch prediction:
+  //
+  // try { await fn(); } catch (e) { }
+  // try { await fn(); } finally { }
+  //
+  // When parsing the await that we want to mark as caught or uncaught, it's
+  // not yet known whether it will be followed by a 'finally' or a 'catch.
+  // The AstNumberingVisitor is what learns whether it is caught. To make
+  // the information available later to the runtime, the AstNumberingVisitor
+  // has to stash it somewhere. Changing the runtime function into another
+  // one in ast-numbering seemed like a simple and straightforward solution to
+  // that problem.
+  if (node->is_jsruntime() &&
+      node->context_index() == Context::ASYNC_FUNCTION_AWAIT_CAUGHT_INDEX &&
+      catch_prediction_ == HandlerTable::ASYNC_AWAIT) {
+    node->set_context_index(Context::ASYNC_FUNCTION_AWAIT_UNCAUGHT_INDEX);
+  }
 }
 
 
@@ -370,6 +383,7 @@
   node->set_base_id(ReserveIdRange(CompareOperation::num_ids()));
   Visit(node->left());
   Visit(node->right());
+  ReserveFeedbackSlots(node);
 }
 
 
@@ -444,6 +458,7 @@
   node->set_base_id(ReserveIdRange(CaseClause::num_ids()));
   if (!node->is_default()) Visit(node->label());
   VisitStatements(node->statements());
+  ReserveFeedbackSlots(node);
 }
 
 
@@ -470,7 +485,7 @@
     VisitVariableProxy(node->class_variable_proxy());
   }
   for (int i = 0; i < node->properties()->length(); i++) {
-    VisitObjectLiteralProperty(node->properties()->at(i));
+    VisitLiteralProperty(node->properties()->at(i));
   }
   ReserveFeedbackSlots(node);
 }
@@ -480,7 +495,7 @@
   IncrementNodeCount();
   node->set_base_id(ReserveIdRange(node->num_ids()));
   for (int i = 0; i < node->properties()->length(); i++) {
-    VisitObjectLiteralProperty(node->properties()->at(i));
+    VisitLiteralProperty(node->properties()->at(i));
   }
   node->BuildConstantProperties(isolate_);
   // Mark all computed expressions that are bound to a key that
@@ -490,15 +505,12 @@
   ReserveFeedbackSlots(node);
 }
 
-
-void AstNumberingVisitor::VisitObjectLiteralProperty(
-    ObjectLiteralProperty* node) {
+void AstNumberingVisitor::VisitLiteralProperty(LiteralProperty* node) {
   if (node->is_computed_name()) DisableCrankshaft(kComputedPropertyName);
   Visit(node->key());
   Visit(node->value());
 }
 
-
 void AstNumberingVisitor::VisitArrayLiteral(ArrayLiteral* node) {
   IncrementNodeCount();
   node->set_base_id(ReserveIdRange(node->num_ids()));
@@ -570,27 +582,22 @@
 bool AstNumberingVisitor::Renumber(FunctionLiteral* node) {
   DeclarationScope* scope = node->scope();
   if (scope->new_target_var()) DisableCrankshaft(kSuperReference);
-  if (scope->calls_eval()) DisableOptimization(kFunctionCallsEval);
+  if (scope->calls_eval()) DisableCrankshaft(kFunctionCallsEval);
   if (scope->arguments() != NULL && !scope->arguments()->IsStackAllocated()) {
     DisableCrankshaft(kContextAllocatedArguments);
   }
 
-  int rest_index;
-  if (scope->rest_parameter(&rest_index)) {
+  if (scope->rest_parameter() != nullptr) {
     DisableCrankshaft(kRestParameter);
   }
 
-  if (FLAG_ignition && scope->NeedsContext() && scope->is_script_scope()) {
-    // Create ScopeInfo while on the main thread to avoid allocation during
-    // potentially concurrent bytecode generation.
-    node->scope()->GetScopeInfo(isolate_);
-  }
-
   if (IsGeneratorFunction(node->kind()) || IsAsyncFunction(node->kind())) {
-    // TODO(neis): We may want to allow Turbofan optimization here if
-    // --turbo-from-bytecode is set and we know that Ignition is used.
-    // Unfortunately we can't express that here.
-    DisableOptimization(kGenerator);
+    // Generators can be optimized if --turbo-from-bytecode is set.
+    if (FLAG_turbo_from_bytecode) {
+      DisableCrankshaft(kGenerator);
+    } else {
+      DisableOptimization(kGenerator);
+    }
   }
 
   VisitDeclarations(scope->declarations());
diff --git a/src/ast/ast-traversal-visitor.h b/src/ast/ast-traversal-visitor.h
index 0f2976c..e0f88e1 100644
--- a/src/ast/ast-traversal-visitor.h
+++ b/src/ast/ast-traversal-visitor.h
@@ -447,9 +447,9 @@
     RECURSE_EXPRESSION(Visit(expr->extends()));
   }
   RECURSE_EXPRESSION(Visit(expr->constructor()));
-  ZoneList<ObjectLiteralProperty*>* props = expr->properties();
+  ZoneList<ClassLiteralProperty*>* props = expr->properties();
   for (int i = 0; i < props->length(); ++i) {
-    ObjectLiteralProperty* prop = props->at(i);
+    ClassLiteralProperty* prop = props->at(i);
     if (!prop->key()->IsLiteral()) {
       RECURSE_EXPRESSION(Visit(prop->key()));
     }
diff --git a/src/ast/ast-type-bounds.h b/src/ast/ast-type-bounds.h
index ec26fdf..0d1a3c8 100644
--- a/src/ast/ast-type-bounds.h
+++ b/src/ast/ast-type-bounds.h
@@ -7,8 +7,8 @@
 #ifndef V8_AST_AST_TYPE_BOUNDS_H_
 #define V8_AST_AST_TYPE_BOUNDS_H_
 
-#include "src/types.h"
-#include "src/zone-containers.h"
+#include "src/ast/ast-types.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -20,18 +20,18 @@
   explicit AstTypeBounds(Zone* zone) : bounds_map_(zone) {}
   ~AstTypeBounds() {}
 
-  Bounds get(Expression* expression) const {
-    ZoneMap<Expression*, Bounds>::const_iterator i =
+  AstBounds get(Expression* expression) const {
+    ZoneMap<Expression*, AstBounds>::const_iterator i =
         bounds_map_.find(expression);
-    return (i != bounds_map_.end()) ? i->second : Bounds::Unbounded();
+    return (i != bounds_map_.end()) ? i->second : AstBounds::Unbounded();
   }
 
-  void set(Expression* expression, Bounds bounds) {
+  void set(Expression* expression, AstBounds bounds) {
     bounds_map_[expression] = bounds;
   }
 
  private:
-  ZoneMap<Expression*, Bounds> bounds_map_;
+  ZoneMap<Expression*, AstBounds> bounds_map_;
 };
 
 }  // namespace internal
diff --git a/src/ast/ast-types.cc b/src/ast/ast-types.cc
new file mode 100644
index 0000000..a075e8e
--- /dev/null
+++ b/src/ast/ast-types.cc
@@ -0,0 +1,1270 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iomanip>
+
+#include "src/ast/ast-types.h"
+
+#include "src/handles-inl.h"
+#include "src/ostreams.h"
+
+namespace v8 {
+namespace internal {
+
+// NOTE: If code is marked as being a "shortcut", this means that removing
+// the code won't affect the semantics of the surrounding function definition.
+
+// static
+bool AstType::IsInteger(i::Object* x) {
+  return x->IsNumber() && AstType::IsInteger(x->Number());
+}
+
+// -----------------------------------------------------------------------------
+// Range-related helper functions.
+
+bool AstRangeType::Limits::IsEmpty() { return this->min > this->max; }
+
+AstRangeType::Limits AstRangeType::Limits::Intersect(Limits lhs, Limits rhs) {
+  DisallowHeapAllocation no_allocation;
+  Limits result(lhs);
+  if (lhs.min < rhs.min) result.min = rhs.min;
+  if (lhs.max > rhs.max) result.max = rhs.max;
+  return result;
+}
+
+AstRangeType::Limits AstRangeType::Limits::Union(Limits lhs, Limits rhs) {
+  DisallowHeapAllocation no_allocation;
+  if (lhs.IsEmpty()) return rhs;
+  if (rhs.IsEmpty()) return lhs;
+  Limits result(lhs);
+  if (lhs.min > rhs.min) result.min = rhs.min;
+  if (lhs.max < rhs.max) result.max = rhs.max;
+  return result;
+}
+
+bool AstType::Overlap(AstRangeType* lhs, AstRangeType* rhs) {
+  DisallowHeapAllocation no_allocation;
+  return !AstRangeType::Limits::Intersect(AstRangeType::Limits(lhs),
+                                          AstRangeType::Limits(rhs))
+              .IsEmpty();
+}
+
+bool AstType::Contains(AstRangeType* lhs, AstRangeType* rhs) {
+  DisallowHeapAllocation no_allocation;
+  return lhs->Min() <= rhs->Min() && rhs->Max() <= lhs->Max();
+}
+
+bool AstType::Contains(AstRangeType* lhs, AstConstantType* rhs) {
+  DisallowHeapAllocation no_allocation;
+  return IsInteger(*rhs->Value()) && lhs->Min() <= rhs->Value()->Number() &&
+         rhs->Value()->Number() <= lhs->Max();
+}
+
+bool AstType::Contains(AstRangeType* range, i::Object* val) {
+  DisallowHeapAllocation no_allocation;
+  return IsInteger(val) && range->Min() <= val->Number() &&
+         val->Number() <= range->Max();
+}
+
+// -----------------------------------------------------------------------------
+// Min and Max computation.
+
+double AstType::Min() {
+  DCHECK(this->SemanticIs(Number()));
+  if (this->IsBitset()) return AstBitsetType::Min(this->AsBitset());
+  if (this->IsUnion()) {
+    double min = +V8_INFINITY;
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      min = std::min(min, this->AsUnion()->Get(i)->Min());
+    }
+    return min;
+  }
+  if (this->IsRange()) return this->AsRange()->Min();
+  if (this->IsConstant()) return this->AsConstant()->Value()->Number();
+  UNREACHABLE();
+  return 0;
+}
+
+double AstType::Max() {
+  DCHECK(this->SemanticIs(Number()));
+  if (this->IsBitset()) return AstBitsetType::Max(this->AsBitset());
+  if (this->IsUnion()) {
+    double max = -V8_INFINITY;
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      max = std::max(max, this->AsUnion()->Get(i)->Max());
+    }
+    return max;
+  }
+  if (this->IsRange()) return this->AsRange()->Max();
+  if (this->IsConstant()) return this->AsConstant()->Value()->Number();
+  UNREACHABLE();
+  return 0;
+}
+
+// -----------------------------------------------------------------------------
+// Glb and lub computation.
+
+// The largest bitset subsumed by this type.
+AstType::bitset AstBitsetType::Glb(AstType* type) {
+  DisallowHeapAllocation no_allocation;
+  // Fast case.
+  if (IsBitset(type)) {
+    return type->AsBitset();
+  } else if (type->IsUnion()) {
+    SLOW_DCHECK(type->AsUnion()->Wellformed());
+    return type->AsUnion()->Get(0)->BitsetGlb() |
+           AST_SEMANTIC(type->AsUnion()->Get(1)->BitsetGlb());  // Shortcut.
+  } else if (type->IsRange()) {
+    bitset glb = AST_SEMANTIC(
+        AstBitsetType::Glb(type->AsRange()->Min(), type->AsRange()->Max()));
+    return glb | AST_REPRESENTATION(type->BitsetLub());
+  } else {
+    return type->Representation();
+  }
+}
+
+// The smallest bitset subsuming this type, possibly not a proper one.
+AstType::bitset AstBitsetType::Lub(AstType* type) {
+  DisallowHeapAllocation no_allocation;
+  if (IsBitset(type)) return type->AsBitset();
+  if (type->IsUnion()) {
+    // Take the representation from the first element, which is always
+    // a bitset.
+    int bitset = type->AsUnion()->Get(0)->BitsetLub();
+    for (int i = 0, n = type->AsUnion()->Length(); i < n; ++i) {
+      // Other elements only contribute their semantic part.
+      bitset |= AST_SEMANTIC(type->AsUnion()->Get(i)->BitsetLub());
+    }
+    return bitset;
+  }
+  if (type->IsClass()) return type->AsClass()->Lub();
+  if (type->IsConstant()) return type->AsConstant()->Lub();
+  if (type->IsRange()) return type->AsRange()->Lub();
+  if (type->IsContext()) return kOtherInternal & kTaggedPointer;
+  if (type->IsArray()) return kOtherObject;
+  if (type->IsFunction()) return kFunction;
+  if (type->IsTuple()) return kOtherInternal;
+  UNREACHABLE();
+  return kNone;
+}
+
+AstType::bitset AstBitsetType::Lub(i::Map* map) {
+  DisallowHeapAllocation no_allocation;
+  switch (map->instance_type()) {
+    case STRING_TYPE:
+    case ONE_BYTE_STRING_TYPE:
+    case CONS_STRING_TYPE:
+    case CONS_ONE_BYTE_STRING_TYPE:
+    case SLICED_STRING_TYPE:
+    case SLICED_ONE_BYTE_STRING_TYPE:
+    case EXTERNAL_STRING_TYPE:
+    case EXTERNAL_ONE_BYTE_STRING_TYPE:
+    case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
+    case SHORT_EXTERNAL_STRING_TYPE:
+    case SHORT_EXTERNAL_ONE_BYTE_STRING_TYPE:
+    case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
+      return kOtherString;
+    case INTERNALIZED_STRING_TYPE:
+    case ONE_BYTE_INTERNALIZED_STRING_TYPE:
+    case EXTERNAL_INTERNALIZED_STRING_TYPE:
+    case EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
+    case EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
+    case SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE:
+    case SHORT_EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
+    case SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
+      return kInternalizedString;
+    case SYMBOL_TYPE:
+      return kSymbol;
+    case ODDBALL_TYPE: {
+      Heap* heap = map->GetHeap();
+      if (map == heap->undefined_map()) return kUndefined;
+      if (map == heap->null_map()) return kNull;
+      if (map == heap->boolean_map()) return kBoolean;
+      if (map == heap->the_hole_map()) return kHole;
+      DCHECK(map == heap->uninitialized_map() ||
+             map == heap->no_interceptor_result_sentinel_map() ||
+             map == heap->termination_exception_map() ||
+             map == heap->arguments_marker_map() ||
+             map == heap->optimized_out_map() ||
+             map == heap->stale_register_map());
+      return kOtherInternal & kTaggedPointer;
+    }
+    case HEAP_NUMBER_TYPE:
+      return kNumber & kTaggedPointer;
+    case SIMD128_VALUE_TYPE:
+      return kSimd;
+    case JS_OBJECT_TYPE:
+    case JS_ARGUMENTS_TYPE:
+    case JS_ERROR_TYPE:
+    case JS_GLOBAL_OBJECT_TYPE:
+    case JS_GLOBAL_PROXY_TYPE:
+    case JS_API_OBJECT_TYPE:
+    case JS_SPECIAL_API_OBJECT_TYPE:
+      if (map->is_undetectable()) return kOtherUndetectable;
+      return kOtherObject;
+    case JS_VALUE_TYPE:
+    case JS_MESSAGE_OBJECT_TYPE:
+    case JS_DATE_TYPE:
+    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+    case JS_GENERATOR_OBJECT_TYPE:
+    case JS_ARRAY_BUFFER_TYPE:
+    case JS_ARRAY_TYPE:
+    case JS_REGEXP_TYPE:  // TODO(rossberg): there should be a RegExp type.
+    case JS_TYPED_ARRAY_TYPE:
+    case JS_DATA_VIEW_TYPE:
+    case JS_SET_TYPE:
+    case JS_MAP_TYPE:
+    case JS_SET_ITERATOR_TYPE:
+    case JS_MAP_ITERATOR_TYPE:
+    case JS_STRING_ITERATOR_TYPE:
+    case JS_WEAK_MAP_TYPE:
+    case JS_WEAK_SET_TYPE:
+    case JS_PROMISE_TYPE:
+    case JS_BOUND_FUNCTION_TYPE:
+      DCHECK(!map->is_undetectable());
+      return kOtherObject;
+    case JS_FUNCTION_TYPE:
+      DCHECK(!map->is_undetectable());
+      return kFunction;
+    case JS_PROXY_TYPE:
+      DCHECK(!map->is_undetectable());
+      return kProxy;
+    case MAP_TYPE:
+    case ALLOCATION_SITE_TYPE:
+    case ACCESSOR_INFO_TYPE:
+    case SHARED_FUNCTION_INFO_TYPE:
+    case ACCESSOR_PAIR_TYPE:
+    case FIXED_ARRAY_TYPE:
+    case FIXED_DOUBLE_ARRAY_TYPE:
+    case BYTE_ARRAY_TYPE:
+    case BYTECODE_ARRAY_TYPE:
+    case TRANSITION_ARRAY_TYPE:
+    case FOREIGN_TYPE:
+    case SCRIPT_TYPE:
+    case CODE_TYPE:
+    case PROPERTY_CELL_TYPE:
+    case MODULE_TYPE:
+      return kOtherInternal & kTaggedPointer;
+
+    // Remaining instance types are unsupported for now. If any of them do
+    // require bit set types, they should get kOtherInternal & kTaggedPointer.
+    case MUTABLE_HEAP_NUMBER_TYPE:
+    case FREE_SPACE_TYPE:
+#define FIXED_TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
+  case FIXED_##TYPE##_ARRAY_TYPE:
+
+      TYPED_ARRAYS(FIXED_TYPED_ARRAY_CASE)
+#undef FIXED_TYPED_ARRAY_CASE
+    case FILLER_TYPE:
+    case ACCESS_CHECK_INFO_TYPE:
+    case INTERCEPTOR_INFO_TYPE:
+    case CALL_HANDLER_INFO_TYPE:
+    case PROMISE_CONTAINER_TYPE:
+    case FUNCTION_TEMPLATE_INFO_TYPE:
+    case OBJECT_TEMPLATE_INFO_TYPE:
+    case SIGNATURE_INFO_TYPE:
+    case TYPE_SWITCH_INFO_TYPE:
+    case ALLOCATION_MEMENTO_TYPE:
+    case TYPE_FEEDBACK_INFO_TYPE:
+    case ALIASED_ARGUMENTS_ENTRY_TYPE:
+    case BOX_TYPE:
+    case DEBUG_INFO_TYPE:
+    case BREAK_POINT_INFO_TYPE:
+    case CELL_TYPE:
+    case WEAK_CELL_TYPE:
+    case PROTOTYPE_INFO_TYPE:
+    case CONTEXT_EXTENSION_TYPE:
+      UNREACHABLE();
+      return kNone;
+  }
+  UNREACHABLE();
+  return kNone;
+}
+
+AstType::bitset AstBitsetType::Lub(i::Object* value) {
+  DisallowHeapAllocation no_allocation;
+  if (value->IsNumber()) {
+    return Lub(value->Number()) &
+           (value->IsSmi() ? kTaggedSigned : kTaggedPointer);
+  }
+  return Lub(i::HeapObject::cast(value)->map());
+}
+
+AstType::bitset AstBitsetType::Lub(double value) {
+  DisallowHeapAllocation no_allocation;
+  if (i::IsMinusZero(value)) return kMinusZero;
+  if (std::isnan(value)) return kNaN;
+  if (IsUint32Double(value) || IsInt32Double(value)) return Lub(value, value);
+  return kOtherNumber;
+}
+
+// Minimum values of plain numeric bitsets.
+const AstBitsetType::Boundary AstBitsetType::BoundariesArray[] = {
+    {kOtherNumber, kPlainNumber, -V8_INFINITY},
+    {kOtherSigned32, kNegative32, kMinInt},
+    {kNegative31, kNegative31, -0x40000000},
+    {kUnsigned30, kUnsigned30, 0},
+    {kOtherUnsigned31, kUnsigned31, 0x40000000},
+    {kOtherUnsigned32, kUnsigned32, 0x80000000},
+    {kOtherNumber, kPlainNumber, static_cast<double>(kMaxUInt32) + 1}};
+
+const AstBitsetType::Boundary* AstBitsetType::Boundaries() {
+  return BoundariesArray;
+}
+
+size_t AstBitsetType::BoundariesSize() {
+  // Windows doesn't like arraysize here.
+  // return arraysize(BoundariesArray);
+  return 7;
+}
+
+AstType::bitset AstBitsetType::ExpandInternals(AstType::bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  if (!(bits & AST_SEMANTIC(kPlainNumber))) return bits;  // Shortcut.
+  const Boundary* boundaries = Boundaries();
+  for (size_t i = 0; i < BoundariesSize(); ++i) {
+    DCHECK(AstBitsetType::Is(boundaries[i].internal, boundaries[i].external));
+    if (bits & AST_SEMANTIC(boundaries[i].internal))
+      bits |= AST_SEMANTIC(boundaries[i].external);
+  }
+  return bits;
+}
+
+AstType::bitset AstBitsetType::Lub(double min, double max) {
+  DisallowHeapAllocation no_allocation;
+  int lub = kNone;
+  const Boundary* mins = Boundaries();
+
+  for (size_t i = 1; i < BoundariesSize(); ++i) {
+    if (min < mins[i].min) {
+      lub |= mins[i - 1].internal;
+      if (max < mins[i].min) return lub;
+    }
+  }
+  return lub | mins[BoundariesSize() - 1].internal;
+}
+
+AstType::bitset AstBitsetType::NumberBits(bitset bits) {
+  return AST_SEMANTIC(bits & kPlainNumber);
+}
+
+AstType::bitset AstBitsetType::Glb(double min, double max) {
+  DisallowHeapAllocation no_allocation;
+  int glb = kNone;
+  const Boundary* mins = Boundaries();
+
+  // If the range does not touch 0, the bound is empty.
+  if (max < -1 || min > 0) return glb;
+
+  for (size_t i = 1; i + 1 < BoundariesSize(); ++i) {
+    if (min <= mins[i].min) {
+      if (max + 1 < mins[i + 1].min) break;
+      glb |= mins[i].external;
+    }
+  }
+  // OtherNumber also contains float numbers, so it can never be
+  // in the greatest lower bound.
+  return glb & ~(AST_SEMANTIC(kOtherNumber));
+}
+
+double AstBitsetType::Min(bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  DCHECK(Is(AST_SEMANTIC(bits), kNumber));
+  const Boundary* mins = Boundaries();
+  bool mz = AST_SEMANTIC(bits & kMinusZero);
+  for (size_t i = 0; i < BoundariesSize(); ++i) {
+    if (Is(AST_SEMANTIC(mins[i].internal), bits)) {
+      return mz ? std::min(0.0, mins[i].min) : mins[i].min;
+    }
+  }
+  if (mz) return 0;
+  return std::numeric_limits<double>::quiet_NaN();
+}
+
+double AstBitsetType::Max(bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  DCHECK(Is(AST_SEMANTIC(bits), kNumber));
+  const Boundary* mins = Boundaries();
+  bool mz = AST_SEMANTIC(bits & kMinusZero);
+  if (AstBitsetType::Is(AST_SEMANTIC(mins[BoundariesSize() - 1].internal),
+                        bits)) {
+    return +V8_INFINITY;
+  }
+  for (size_t i = BoundariesSize() - 1; i-- > 0;) {
+    if (Is(AST_SEMANTIC(mins[i].internal), bits)) {
+      return mz ? std::max(0.0, mins[i + 1].min - 1) : mins[i + 1].min - 1;
+    }
+  }
+  if (mz) return 0;
+  return std::numeric_limits<double>::quiet_NaN();
+}
+
+// -----------------------------------------------------------------------------
+// Predicates.
+
+bool AstType::SimplyEquals(AstType* that) {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsClass()) {
+    return that->IsClass() &&
+           *this->AsClass()->Map() == *that->AsClass()->Map();
+  }
+  if (this->IsConstant()) {
+    return that->IsConstant() &&
+           *this->AsConstant()->Value() == *that->AsConstant()->Value();
+  }
+  if (this->IsContext()) {
+    return that->IsContext() &&
+           this->AsContext()->Outer()->Equals(that->AsContext()->Outer());
+  }
+  if (this->IsArray()) {
+    return that->IsArray() &&
+           this->AsArray()->Element()->Equals(that->AsArray()->Element());
+  }
+  if (this->IsFunction()) {
+    if (!that->IsFunction()) return false;
+    AstFunctionType* this_fun = this->AsFunction();
+    AstFunctionType* that_fun = that->AsFunction();
+    if (this_fun->Arity() != that_fun->Arity() ||
+        !this_fun->Result()->Equals(that_fun->Result()) ||
+        !this_fun->Receiver()->Equals(that_fun->Receiver())) {
+      return false;
+    }
+    for (int i = 0, n = this_fun->Arity(); i < n; ++i) {
+      if (!this_fun->Parameter(i)->Equals(that_fun->Parameter(i))) return false;
+    }
+    return true;
+  }
+  if (this->IsTuple()) {
+    if (!that->IsTuple()) return false;
+    AstTupleType* this_tuple = this->AsTuple();
+    AstTupleType* that_tuple = that->AsTuple();
+    if (this_tuple->Arity() != that_tuple->Arity()) {
+      return false;
+    }
+    for (int i = 0, n = this_tuple->Arity(); i < n; ++i) {
+      if (!this_tuple->Element(i)->Equals(that_tuple->Element(i))) return false;
+    }
+    return true;
+  }
+  UNREACHABLE();
+  return false;
+}
+
+AstType::bitset AstType::Representation() {
+  return AST_REPRESENTATION(this->BitsetLub());
+}
+
+// Check if [this] <= [that].
+bool AstType::SlowIs(AstType* that) {
+  DisallowHeapAllocation no_allocation;
+
+  // Fast bitset cases
+  if (that->IsBitset()) {
+    return AstBitsetType::Is(this->BitsetLub(), that->AsBitset());
+  }
+
+  if (this->IsBitset()) {
+    return AstBitsetType::Is(this->AsBitset(), that->BitsetGlb());
+  }
+
+  // Check the representations.
+  if (!AstBitsetType::Is(Representation(), that->Representation())) {
+    return false;
+  }
+
+  // Check the semantic part.
+  return SemanticIs(that);
+}
+
+// Check if AST_SEMANTIC([this]) <= AST_SEMANTIC([that]). The result of the
+// method
+// should be independent of the representation axis of the types.
+bool AstType::SemanticIs(AstType* that) {
+  DisallowHeapAllocation no_allocation;
+
+  if (this == that) return true;
+
+  if (that->IsBitset()) {
+    return AstBitsetType::Is(AST_SEMANTIC(this->BitsetLub()), that->AsBitset());
+  }
+  if (this->IsBitset()) {
+    return AstBitsetType::Is(AST_SEMANTIC(this->AsBitset()), that->BitsetGlb());
+  }
+
+  // (T1 \/ ... \/ Tn) <= T  if  (T1 <= T) /\ ... /\ (Tn <= T)
+  if (this->IsUnion()) {
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      if (!this->AsUnion()->Get(i)->SemanticIs(that)) return false;
+    }
+    return true;
+  }
+
+  // T <= (T1 \/ ... \/ Tn)  if  (T <= T1) \/ ... \/ (T <= Tn)
+  if (that->IsUnion()) {
+    for (int i = 0, n = that->AsUnion()->Length(); i < n; ++i) {
+      if (this->SemanticIs(that->AsUnion()->Get(i))) return true;
+      if (i > 1 && this->IsRange()) return false;  // Shortcut.
+    }
+    return false;
+  }
+
+  if (that->IsRange()) {
+    return (this->IsRange() && Contains(that->AsRange(), this->AsRange())) ||
+           (this->IsConstant() &&
+            Contains(that->AsRange(), this->AsConstant()));
+  }
+  if (this->IsRange()) return false;
+
+  return this->SimplyEquals(that);
+}
+
+// Most precise _current_ type of a value (usually its class).
+AstType* AstType::NowOf(i::Object* value, Zone* zone) {
+  if (value->IsSmi() ||
+      i::HeapObject::cast(value)->map()->instance_type() == HEAP_NUMBER_TYPE) {
+    return Of(value, zone);
+  }
+  return Class(i::handle(i::HeapObject::cast(value)->map()), zone);
+}
+
+bool AstType::NowContains(i::Object* value) {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsAny()) return true;
+  if (value->IsHeapObject()) {
+    i::Map* map = i::HeapObject::cast(value)->map();
+    for (Iterator<i::Map> it = this->Classes(); !it.Done(); it.Advance()) {
+      if (*it.Current() == map) return true;
+    }
+  }
+  return this->Contains(value);
+}
+
+bool AstType::NowIs(AstType* that) {
+  DisallowHeapAllocation no_allocation;
+
+  // TODO(rossberg): this is incorrect for
+  //   Union(Constant(V), T)->NowIs(Class(M))
+  // but fuzzing does not cover that!
+  if (this->IsConstant()) {
+    i::Object* object = *this->AsConstant()->Value();
+    if (object->IsHeapObject()) {
+      i::Map* map = i::HeapObject::cast(object)->map();
+      for (Iterator<i::Map> it = that->Classes(); !it.Done(); it.Advance()) {
+        if (*it.Current() == map) return true;
+      }
+    }
+  }
+  return this->Is(that);
+}
+
+// Check if [this] contains only (currently) stable classes.
+bool AstType::NowStable() {
+  DisallowHeapAllocation no_allocation;
+  return !this->IsClass() || this->AsClass()->Map()->is_stable();
+}
+
+// Check if [this] and [that] overlap.
+bool AstType::Maybe(AstType* that) {
+  DisallowHeapAllocation no_allocation;
+
+  // Take care of the representation part (and also approximate
+  // the semantic part).
+  if (!AstBitsetType::IsInhabited(this->BitsetLub() & that->BitsetLub()))
+    return false;
+
+  return SemanticMaybe(that);
+}
+
+bool AstType::SemanticMaybe(AstType* that) {
+  DisallowHeapAllocation no_allocation;
+
+  // (T1 \/ ... \/ Tn) overlaps T  if  (T1 overlaps T) \/ ... \/ (Tn overlaps T)
+  if (this->IsUnion()) {
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      if (this->AsUnion()->Get(i)->SemanticMaybe(that)) return true;
+    }
+    return false;
+  }
+
+  // T overlaps (T1 \/ ... \/ Tn)  if  (T overlaps T1) \/ ... \/ (T overlaps Tn)
+  if (that->IsUnion()) {
+    for (int i = 0, n = that->AsUnion()->Length(); i < n; ++i) {
+      if (this->SemanticMaybe(that->AsUnion()->Get(i))) return true;
+    }
+    return false;
+  }
+
+  if (!AstBitsetType::SemanticIsInhabited(this->BitsetLub() &
+                                          that->BitsetLub()))
+    return false;
+
+  if (this->IsBitset() && that->IsBitset()) return true;
+
+  if (this->IsClass() != that->IsClass()) return true;
+
+  if (this->IsRange()) {
+    if (that->IsConstant()) {
+      return Contains(this->AsRange(), that->AsConstant());
+    }
+    if (that->IsRange()) {
+      return Overlap(this->AsRange(), that->AsRange());
+    }
+    if (that->IsBitset()) {
+      bitset number_bits = AstBitsetType::NumberBits(that->AsBitset());
+      if (number_bits == AstBitsetType::kNone) {
+        return false;
+      }
+      double min = std::max(AstBitsetType::Min(number_bits), this->Min());
+      double max = std::min(AstBitsetType::Max(number_bits), this->Max());
+      return min <= max;
+    }
+  }
+  if (that->IsRange()) {
+    return that->SemanticMaybe(this);  // This case is handled above.
+  }
+
+  if (this->IsBitset() || that->IsBitset()) return true;
+
+  return this->SimplyEquals(that);
+}
+
+// Return the range in [this], or [NULL].
+AstType* AstType::GetRange() {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsRange()) return this;
+  if (this->IsUnion() && this->AsUnion()->Get(1)->IsRange()) {
+    return this->AsUnion()->Get(1);
+  }
+  return NULL;
+}
+
+bool AstType::Contains(i::Object* value) {
+  DisallowHeapAllocation no_allocation;
+  for (Iterator<i::Object> it = this->Constants(); !it.Done(); it.Advance()) {
+    if (*it.Current() == value) return true;
+  }
+  if (IsInteger(value)) {
+    AstType* range = this->GetRange();
+    if (range != NULL && Contains(range->AsRange(), value)) return true;
+  }
+  return AstBitsetType::New(AstBitsetType::Lub(value))->Is(this);
+}
+
+bool AstUnionType::Wellformed() {
+  DisallowHeapAllocation no_allocation;
+  // This checks the invariants of the union representation:
+  // 1. There are at least two elements.
+  // 2. The first element is a bitset, no other element is a bitset.
+  // 3. At most one element is a range, and it must be the second one.
+  // 4. No element is itself a union.
+  // 5. No element (except the bitset) is a subtype of any other.
+  // 6. If there is a range, then the bitset type does not contain
+  //    plain number bits.
+  DCHECK(this->Length() >= 2);       // (1)
+  DCHECK(this->Get(0)->IsBitset());  // (2a)
+
+  for (int i = 0; i < this->Length(); ++i) {
+    if (i != 0) DCHECK(!this->Get(i)->IsBitset());  // (2b)
+    if (i != 1) DCHECK(!this->Get(i)->IsRange());   // (3)
+    DCHECK(!this->Get(i)->IsUnion());               // (4)
+    for (int j = 0; j < this->Length(); ++j) {
+      if (i != j && i != 0)
+        DCHECK(!this->Get(i)->SemanticIs(this->Get(j)));  // (5)
+    }
+  }
+  DCHECK(!this->Get(1)->IsRange() ||
+         (AstBitsetType::NumberBits(this->Get(0)->AsBitset()) ==
+          AstBitsetType::kNone));  // (6)
+  return true;
+}
+
+// -----------------------------------------------------------------------------
+// Union and intersection
+
+static bool AddIsSafe(int x, int y) {
+  return x >= 0 ? y <= std::numeric_limits<int>::max() - x
+                : y >= std::numeric_limits<int>::min() - x;
+}
+
+AstType* AstType::Intersect(AstType* type1, AstType* type2, Zone* zone) {
+  // Fast case: bit sets.
+  if (type1->IsBitset() && type2->IsBitset()) {
+    return AstBitsetType::New(type1->AsBitset() & type2->AsBitset());
+  }
+
+  // Fast case: top or bottom types.
+  if (type1->IsNone() || type2->IsAny()) return type1;  // Shortcut.
+  if (type2->IsNone() || type1->IsAny()) return type2;  // Shortcut.
+
+  // Semi-fast case.
+  if (type1->Is(type2)) return type1;
+  if (type2->Is(type1)) return type2;
+
+  // Slow case: create union.
+
+  // Figure out the representation of the result first.
+  // The rest of the method should not change this representation and
+  // it should not make any decisions based on representations (i.e.,
+  // it should only use the semantic part of types).
+  const bitset representation =
+      type1->Representation() & type2->Representation();
+
+  // Semantic subtyping check - this is needed for consistency with the
+  // semi-fast case above - we should behave the same way regardless of
+  // representations. Intersection with a universal bitset should only update
+  // the representations.
+  if (type1->SemanticIs(type2)) {
+    type2 = Any();
+  } else if (type2->SemanticIs(type1)) {
+    type1 = Any();
+  }
+
+  bitset bits =
+      AST_SEMANTIC(type1->BitsetGlb() & type2->BitsetGlb()) | representation;
+  int size1 = type1->IsUnion() ? type1->AsUnion()->Length() : 1;
+  int size2 = type2->IsUnion() ? type2->AsUnion()->Length() : 1;
+  if (!AddIsSafe(size1, size2)) return Any();
+  int size = size1 + size2;
+  if (!AddIsSafe(size, 2)) return Any();
+  size += 2;
+  AstType* result_type = AstUnionType::New(size, zone);
+  AstUnionType* result = result_type->AsUnion();
+  size = 0;
+
+  // Deal with bitsets.
+  result->Set(size++, AstBitsetType::New(bits));
+
+  AstRangeType::Limits lims = AstRangeType::Limits::Empty();
+  size = IntersectAux(type1, type2, result, size, &lims, zone);
+
+  // If the range is not empty, then insert it into the union and
+  // remove the number bits from the bitset.
+  if (!lims.IsEmpty()) {
+    size = UpdateRange(AstRangeType::New(lims, representation, zone), result,
+                       size, zone);
+
+    // Remove the number bits.
+    bitset number_bits = AstBitsetType::NumberBits(bits);
+    bits &= ~number_bits;
+    result->Set(0, AstBitsetType::New(bits));
+  }
+  return NormalizeUnion(result_type, size, zone);
+}
+
+int AstType::UpdateRange(AstType* range, AstUnionType* result, int size,
+                         Zone* zone) {
+  if (size == 1) {
+    result->Set(size++, range);
+  } else {
+    // Make space for the range.
+    result->Set(size++, result->Get(1));
+    result->Set(1, range);
+  }
+
+  // Remove any components that just got subsumed.
+  for (int i = 2; i < size;) {
+    if (result->Get(i)->SemanticIs(range)) {
+      result->Set(i, result->Get(--size));
+    } else {
+      ++i;
+    }
+  }
+  return size;
+}
+
+AstRangeType::Limits AstType::ToLimits(bitset bits, Zone* zone) {
+  bitset number_bits = AstBitsetType::NumberBits(bits);
+
+  if (number_bits == AstBitsetType::kNone) {
+    return AstRangeType::Limits::Empty();
+  }
+
+  return AstRangeType::Limits(AstBitsetType::Min(number_bits),
+                              AstBitsetType::Max(number_bits));
+}
+
+AstRangeType::Limits AstType::IntersectRangeAndBitset(AstType* range,
+                                                      AstType* bitset,
+                                                      Zone* zone) {
+  AstRangeType::Limits range_lims(range->AsRange());
+  AstRangeType::Limits bitset_lims = ToLimits(bitset->AsBitset(), zone);
+  return AstRangeType::Limits::Intersect(range_lims, bitset_lims);
+}
+
+int AstType::IntersectAux(AstType* lhs, AstType* rhs, AstUnionType* result,
+                          int size, AstRangeType::Limits* lims, Zone* zone) {
+  if (lhs->IsUnion()) {
+    for (int i = 0, n = lhs->AsUnion()->Length(); i < n; ++i) {
+      size =
+          IntersectAux(lhs->AsUnion()->Get(i), rhs, result, size, lims, zone);
+    }
+    return size;
+  }
+  if (rhs->IsUnion()) {
+    for (int i = 0, n = rhs->AsUnion()->Length(); i < n; ++i) {
+      size =
+          IntersectAux(lhs, rhs->AsUnion()->Get(i), result, size, lims, zone);
+    }
+    return size;
+  }
+
+  if (!AstBitsetType::SemanticIsInhabited(lhs->BitsetLub() &
+                                          rhs->BitsetLub())) {
+    return size;
+  }
+
+  if (lhs->IsRange()) {
+    if (rhs->IsBitset()) {
+      AstRangeType::Limits lim = IntersectRangeAndBitset(lhs, rhs, zone);
+
+      if (!lim.IsEmpty()) {
+        *lims = AstRangeType::Limits::Union(lim, *lims);
+      }
+      return size;
+    }
+    if (rhs->IsClass()) {
+      *lims = AstRangeType::Limits::Union(AstRangeType::Limits(lhs->AsRange()),
+                                          *lims);
+    }
+    if (rhs->IsConstant() && Contains(lhs->AsRange(), rhs->AsConstant())) {
+      return AddToUnion(rhs, result, size, zone);
+    }
+    if (rhs->IsRange()) {
+      AstRangeType::Limits lim =
+          AstRangeType::Limits::Intersect(AstRangeType::Limits(lhs->AsRange()),
+                                          AstRangeType::Limits(rhs->AsRange()));
+      if (!lim.IsEmpty()) {
+        *lims = AstRangeType::Limits::Union(lim, *lims);
+      }
+    }
+    return size;
+  }
+  if (rhs->IsRange()) {
+    // This case is handled symmetrically above.
+    return IntersectAux(rhs, lhs, result, size, lims, zone);
+  }
+  if (lhs->IsBitset() || rhs->IsBitset()) {
+    return AddToUnion(lhs->IsBitset() ? rhs : lhs, result, size, zone);
+  }
+  if (lhs->IsClass() != rhs->IsClass()) {
+    return AddToUnion(lhs->IsClass() ? rhs : lhs, result, size, zone);
+  }
+  if (lhs->SimplyEquals(rhs)) {
+    return AddToUnion(lhs, result, size, zone);
+  }
+  return size;
+}
+
+// Make sure that we produce a well-formed range and bitset:
+// If the range is non-empty, the number bits in the bitset should be
+// clear. Moreover, if we have a canonical range (such as Signed32),
+// we want to produce a bitset rather than a range.
+AstType* AstType::NormalizeRangeAndBitset(AstType* range, bitset* bits,
+                                          Zone* zone) {
+  // Fast path: If the bitset does not mention numbers, we can just keep the
+  // range.
+  bitset number_bits = AstBitsetType::NumberBits(*bits);
+  if (number_bits == 0) {
+    return range;
+  }
+
+  // If the range is semantically contained within the bitset, return None and
+  // leave the bitset untouched.
+  bitset range_lub = AST_SEMANTIC(range->BitsetLub());
+  if (AstBitsetType::Is(range_lub, *bits)) {
+    return None();
+  }
+
+  // Slow path: reconcile the bitset range and the range.
+  double bitset_min = AstBitsetType::Min(number_bits);
+  double bitset_max = AstBitsetType::Max(number_bits);
+
+  double range_min = range->Min();
+  double range_max = range->Max();
+
+  // Remove the number bits from the bitset, they would just confuse us now.
+  // NOTE: bits contains OtherNumber iff bits contains PlainNumber, in which
+  // case we already returned after the subtype check above.
+  *bits &= ~number_bits;
+
+  if (range_min <= bitset_min && range_max >= bitset_max) {
+    // Bitset is contained within the range, just return the range.
+    return range;
+  }
+
+  if (bitset_min < range_min) {
+    range_min = bitset_min;
+  }
+  if (bitset_max > range_max) {
+    range_max = bitset_max;
+  }
+  return AstRangeType::New(range_min, range_max, AstBitsetType::kNone, zone);
+}
+
+AstType* AstType::Union(AstType* type1, AstType* type2, Zone* zone) {
+  // Fast case: bit sets.
+  if (type1->IsBitset() && type2->IsBitset()) {
+    return AstBitsetType::New(type1->AsBitset() | type2->AsBitset());
+  }
+
+  // Fast case: top or bottom types.
+  if (type1->IsAny() || type2->IsNone()) return type1;
+  if (type2->IsAny() || type1->IsNone()) return type2;
+
+  // Semi-fast case.
+  if (type1->Is(type2)) return type2;
+  if (type2->Is(type1)) return type1;
+
+  // Figure out the representation of the result.
+  // The rest of the method should not change this representation and
+  // it should not make any decisions based on representations (i.e.,
+  // it should only use the semantic part of types).
+  const bitset representation =
+      type1->Representation() | type2->Representation();
+
+  // Slow case: create union.
+  int size1 = type1->IsUnion() ? type1->AsUnion()->Length() : 1;
+  int size2 = type2->IsUnion() ? type2->AsUnion()->Length() : 1;
+  if (!AddIsSafe(size1, size2)) return Any();
+  int size = size1 + size2;
+  if (!AddIsSafe(size, 2)) return Any();
+  size += 2;
+  AstType* result_type = AstUnionType::New(size, zone);
+  AstUnionType* result = result_type->AsUnion();
+  size = 0;
+
+  // Compute the new bitset.
+  bitset new_bitset = AST_SEMANTIC(type1->BitsetGlb() | type2->BitsetGlb());
+
+  // Deal with ranges.
+  AstType* range = None();
+  AstType* range1 = type1->GetRange();
+  AstType* range2 = type2->GetRange();
+  if (range1 != NULL && range2 != NULL) {
+    AstRangeType::Limits lims =
+        AstRangeType::Limits::Union(AstRangeType::Limits(range1->AsRange()),
+                                    AstRangeType::Limits(range2->AsRange()));
+    AstType* union_range = AstRangeType::New(lims, representation, zone);
+    range = NormalizeRangeAndBitset(union_range, &new_bitset, zone);
+  } else if (range1 != NULL) {
+    range = NormalizeRangeAndBitset(range1, &new_bitset, zone);
+  } else if (range2 != NULL) {
+    range = NormalizeRangeAndBitset(range2, &new_bitset, zone);
+  }
+  new_bitset = AST_SEMANTIC(new_bitset) | representation;
+  AstType* bits = AstBitsetType::New(new_bitset);
+  result->Set(size++, bits);
+  if (!range->IsNone()) result->Set(size++, range);
+
+  size = AddToUnion(type1, result, size, zone);
+  size = AddToUnion(type2, result, size, zone);
+  return NormalizeUnion(result_type, size, zone);
+}
+
+// Add [type] to [result] unless [type] is bitset, range, or already subsumed.
+// Return new size of [result].
+int AstType::AddToUnion(AstType* type, AstUnionType* result, int size,
+                        Zone* zone) {
+  if (type->IsBitset() || type->IsRange()) return size;
+  if (type->IsUnion()) {
+    for (int i = 0, n = type->AsUnion()->Length(); i < n; ++i) {
+      size = AddToUnion(type->AsUnion()->Get(i), result, size, zone);
+    }
+    return size;
+  }
+  for (int i = 0; i < size; ++i) {
+    if (type->SemanticIs(result->Get(i))) return size;
+  }
+  result->Set(size++, type);
+  return size;
+}
+
+AstType* AstType::NormalizeUnion(AstType* union_type, int size, Zone* zone) {
+  AstUnionType* unioned = union_type->AsUnion();
+  DCHECK(size >= 1);
+  DCHECK(unioned->Get(0)->IsBitset());
+  // If the union has just one element, return it.
+  if (size == 1) {
+    return unioned->Get(0);
+  }
+  bitset bits = unioned->Get(0)->AsBitset();
+  // If the union only consists of a range, we can get rid of the union.
+  if (size == 2 && AST_SEMANTIC(bits) == AstBitsetType::kNone) {
+    bitset representation = AST_REPRESENTATION(bits);
+    if (representation == unioned->Get(1)->Representation()) {
+      return unioned->Get(1);
+    }
+    if (unioned->Get(1)->IsRange()) {
+      return AstRangeType::New(unioned->Get(1)->AsRange()->Min(),
+                               unioned->Get(1)->AsRange()->Max(),
+                               unioned->Get(0)->AsBitset(), zone);
+    }
+  }
+  unioned->Shrink(size);
+  SLOW_DCHECK(unioned->Wellformed());
+  return union_type;
+}
+
+// -----------------------------------------------------------------------------
+// Component extraction
+
+// static
+AstType* AstType::Representation(AstType* t, Zone* zone) {
+  return AstBitsetType::New(t->Representation());
+}
+
+// static
+AstType* AstType::Semantic(AstType* t, Zone* zone) {
+  return Intersect(t, AstBitsetType::New(AstBitsetType::kSemantic), zone);
+}
+
+// -----------------------------------------------------------------------------
+// Iteration.
+
+int AstType::NumClasses() {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsClass()) {
+    return 1;
+  } else if (this->IsUnion()) {
+    int result = 0;
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      if (this->AsUnion()->Get(i)->IsClass()) ++result;
+    }
+    return result;
+  } else {
+    return 0;
+  }
+}
+
+int AstType::NumConstants() {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsConstant()) {
+    return 1;
+  } else if (this->IsUnion()) {
+    int result = 0;
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      if (this->AsUnion()->Get(i)->IsConstant()) ++result;
+    }
+    return result;
+  } else {
+    return 0;
+  }
+}
+
+template <class T>
+AstType* AstType::Iterator<T>::get_type() {
+  DCHECK(!Done());
+  return type_->IsUnion() ? type_->AsUnion()->Get(index_) : type_;
+}
+
+// C++ cannot specialise nested templates, so we have to go through this
+// contortion with an auxiliary template to simulate it.
+template <class T>
+struct TypeImplIteratorAux {
+  static bool matches(AstType* type);
+  static i::Handle<T> current(AstType* type);
+};
+
+template <>
+struct TypeImplIteratorAux<i::Map> {
+  static bool matches(AstType* type) { return type->IsClass(); }
+  static i::Handle<i::Map> current(AstType* type) {
+    return type->AsClass()->Map();
+  }
+};
+
+template <>
+struct TypeImplIteratorAux<i::Object> {
+  static bool matches(AstType* type) { return type->IsConstant(); }
+  static i::Handle<i::Object> current(AstType* type) {
+    return type->AsConstant()->Value();
+  }
+};
+
+template <class T>
+bool AstType::Iterator<T>::matches(AstType* type) {
+  return TypeImplIteratorAux<T>::matches(type);
+}
+
+template <class T>
+i::Handle<T> AstType::Iterator<T>::Current() {
+  return TypeImplIteratorAux<T>::current(get_type());
+}
+
+template <class T>
+void AstType::Iterator<T>::Advance() {
+  DisallowHeapAllocation no_allocation;
+  ++index_;
+  if (type_->IsUnion()) {
+    for (int n = type_->AsUnion()->Length(); index_ < n; ++index_) {
+      if (matches(type_->AsUnion()->Get(index_))) return;
+    }
+  } else if (index_ == 0 && matches(type_)) {
+    return;
+  }
+  index_ = -1;
+}
+
+// -----------------------------------------------------------------------------
+// Printing.
+
+const char* AstBitsetType::Name(bitset bits) {
+  switch (bits) {
+    case AST_REPRESENTATION(kAny):
+      return "Any";
+#define RETURN_NAMED_REPRESENTATION_TYPE(type, value) \
+  case AST_REPRESENTATION(k##type):                   \
+    return #type;
+      AST_REPRESENTATION_BITSET_TYPE_LIST(RETURN_NAMED_REPRESENTATION_TYPE)
+#undef RETURN_NAMED_REPRESENTATION_TYPE
+
+#define RETURN_NAMED_SEMANTIC_TYPE(type, value) \
+  case AST_SEMANTIC(k##type):                   \
+    return #type;
+      AST_SEMANTIC_BITSET_TYPE_LIST(RETURN_NAMED_SEMANTIC_TYPE)
+      AST_INTERNAL_BITSET_TYPE_LIST(RETURN_NAMED_SEMANTIC_TYPE)
+#undef RETURN_NAMED_SEMANTIC_TYPE
+
+    default:
+      return NULL;
+  }
+}
+
+void AstBitsetType::Print(std::ostream& os,  // NOLINT
+                          bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  const char* name = Name(bits);
+  if (name != NULL) {
+    os << name;
+    return;
+  }
+
+  // clang-format off
+  static const bitset named_bitsets[] = {
+#define BITSET_CONSTANT(type, value) AST_REPRESENTATION(k##type),
+    AST_REPRESENTATION_BITSET_TYPE_LIST(BITSET_CONSTANT)
+#undef BITSET_CONSTANT
+
+#define BITSET_CONSTANT(type, value) AST_SEMANTIC(k##type),
+    AST_INTERNAL_BITSET_TYPE_LIST(BITSET_CONSTANT)
+    AST_SEMANTIC_BITSET_TYPE_LIST(BITSET_CONSTANT)
+#undef BITSET_CONSTANT
+  };
+  // clang-format on
+
+  bool is_first = true;
+  os << "(";
+  for (int i(arraysize(named_bitsets) - 1); bits != 0 && i >= 0; --i) {
+    bitset subset = named_bitsets[i];
+    if ((bits & subset) == subset) {
+      if (!is_first) os << " | ";
+      is_first = false;
+      os << Name(subset);
+      bits -= subset;
+    }
+  }
+  DCHECK(bits == 0);
+  os << ")";
+}
+
+void AstType::PrintTo(std::ostream& os, PrintDimension dim) {
+  DisallowHeapAllocation no_allocation;
+  if (dim != REPRESENTATION_DIM) {
+    if (this->IsBitset()) {
+      AstBitsetType::Print(os, AST_SEMANTIC(this->AsBitset()));
+    } else if (this->IsClass()) {
+      os << "Class(" << static_cast<void*>(*this->AsClass()->Map()) << " < ";
+      AstBitsetType::New(AstBitsetType::Lub(this))->PrintTo(os, dim);
+      os << ")";
+    } else if (this->IsConstant()) {
+      os << "Constant(" << Brief(*this->AsConstant()->Value()) << ")";
+    } else if (this->IsRange()) {
+      std::ostream::fmtflags saved_flags = os.setf(std::ios::fixed);
+      std::streamsize saved_precision = os.precision(0);
+      os << "Range(" << this->AsRange()->Min() << ", " << this->AsRange()->Max()
+         << ")";
+      os.flags(saved_flags);
+      os.precision(saved_precision);
+    } else if (this->IsContext()) {
+      os << "Context(";
+      this->AsContext()->Outer()->PrintTo(os, dim);
+      os << ")";
+    } else if (this->IsUnion()) {
+      os << "(";
+      for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+        AstType* type_i = this->AsUnion()->Get(i);
+        if (i > 0) os << " | ";
+        type_i->PrintTo(os, dim);
+      }
+      os << ")";
+    } else if (this->IsArray()) {
+      os << "Array(";
+      AsArray()->Element()->PrintTo(os, dim);
+      os << ")";
+    } else if (this->IsFunction()) {
+      if (!this->AsFunction()->Receiver()->IsAny()) {
+        this->AsFunction()->Receiver()->PrintTo(os, dim);
+        os << ".";
+      }
+      os << "(";
+      for (int i = 0; i < this->AsFunction()->Arity(); ++i) {
+        if (i > 0) os << ", ";
+        this->AsFunction()->Parameter(i)->PrintTo(os, dim);
+      }
+      os << ")->";
+      this->AsFunction()->Result()->PrintTo(os, dim);
+    } else if (this->IsTuple()) {
+      os << "<";
+      for (int i = 0, n = this->AsTuple()->Arity(); i < n; ++i) {
+        AstType* type_i = this->AsTuple()->Element(i);
+        if (i > 0) os << ", ";
+        type_i->PrintTo(os, dim);
+      }
+      os << ">";
+    } else {
+      UNREACHABLE();
+    }
+  }
+  if (dim == BOTH_DIMS) os << "/";
+  if (dim != SEMANTIC_DIM) {
+    AstBitsetType::Print(os, AST_REPRESENTATION(this->BitsetLub()));
+  }
+}
+
+#ifdef DEBUG
+void AstType::Print() {
+  OFStream os(stdout);
+  PrintTo(os);
+  os << std::endl;
+}
+void AstBitsetType::Print(bitset bits) {
+  OFStream os(stdout);
+  Print(os, bits);
+  os << std::endl;
+}
+#endif
+
+AstBitsetType::bitset AstBitsetType::SignedSmall() {
+  return i::SmiValuesAre31Bits() ? kSigned31 : kSigned32;
+}
+
+AstBitsetType::bitset AstBitsetType::UnsignedSmall() {
+  return i::SmiValuesAre31Bits() ? kUnsigned30 : kUnsigned31;
+}
+
+#define CONSTRUCT_SIMD_TYPE(NAME, Name, name, lane_count, lane_type) \
+  AstType* AstType::Name(Isolate* isolate, Zone* zone) {             \
+    return Class(i::handle(isolate->heap()->name##_map()), zone);    \
+  }
+SIMD128_TYPES(CONSTRUCT_SIMD_TYPE)
+#undef CONSTRUCT_SIMD_TYPE
+
+// -----------------------------------------------------------------------------
+// Instantiations.
+
+template class AstType::Iterator<i::Map>;
+template class AstType::Iterator<i::Object>;
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/ast/ast-types.h b/src/ast/ast-types.h
new file mode 100644
index 0000000..0b6e23f
--- /dev/null
+++ b/src/ast/ast-types.h
@@ -0,0 +1,1024 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_AST_AST_TYPES_H_
+#define V8_AST_AST_TYPES_H_
+
+#include "src/conversions.h"
+#include "src/handles.h"
+#include "src/objects.h"
+#include "src/ostreams.h"
+
+namespace v8 {
+namespace internal {
+
+// SUMMARY
+//
+// A simple type system for compiler-internal use. It is based entirely on
+// union types, and all subtyping hence amounts to set inclusion. Besides the
+// obvious primitive types and some predefined unions, the type language also
+// can express class types (a.k.a. specific maps) and singleton types (i.e.,
+// concrete constants).
+//
+// Types consist of two dimensions: semantic (value range) and representation.
+// Both are related through subtyping.
+//
+//
+// SEMANTIC DIMENSION
+//
+// The following equations and inequations hold for the semantic axis:
+//
+//   None <= T
+//   T <= Any
+//
+//   Number = Signed32 \/ Unsigned32 \/ Double
+//   Smi <= Signed32
+//   Name = String \/ Symbol
+//   UniqueName = InternalizedString \/ Symbol
+//   InternalizedString < String
+//
+//   Receiver = Object \/ Proxy
+//   Array < Object
+//   Function < Object
+//   RegExp < Object
+//   OtherUndetectable < Object
+//   DetectableReceiver = Receiver - OtherUndetectable
+//
+//   Class(map) < T   iff instance_type(map) < T
+//   Constant(x) < T  iff instance_type(map(x)) < T
+//   Array(T) < Array
+//   Function(R, S, T0, T1, ...) < Function
+//   Context(T) < Internal
+//
+// Both structural Array and Function types are invariant in all parameters;
+// relaxing this would make Union and Intersect operations more involved.
+// There is no subtyping relation between Array, Function, or Context types
+// and respective Constant types, since these types cannot be reconstructed
+// for arbitrary heap values.
+// Note also that Constant(x) < Class(map(x)) does _not_ hold, since x's map can
+// change! (Its instance type cannot, however.)
+// TODO(rossberg): the latter is not currently true for proxies, because of fix,
+// but will hold once we implement direct proxies.
+// However, we also define a 'temporal' variant of the subtyping relation that
+// considers the _current_ state only, i.e., Constant(x) <_now Class(map(x)).
+//
+//
+// REPRESENTATIONAL DIMENSION
+//
+// For the representation axis, the following holds:
+//
+//   None <= R
+//   R <= Any
+//
+//   UntaggedInt = UntaggedInt1 \/ UntaggedInt8 \/
+//                 UntaggedInt16 \/ UntaggedInt32
+//   UntaggedFloat = UntaggedFloat32 \/ UntaggedFloat64
+//   UntaggedNumber = UntaggedInt \/ UntaggedFloat
+//   Untagged = UntaggedNumber \/ UntaggedPtr
+//   Tagged = TaggedInt \/ TaggedPtr
+//
+// Subtyping relates the two dimensions, for example:
+//
+//   Number <= Tagged \/ UntaggedNumber
+//   Object <= TaggedPtr \/ UntaggedPtr
+//
+// That holds because the semantic type constructors defined by the API create
+// types that allow for all possible representations, and dually, the ones for
+// representation types initially include all semantic ranges. Representations
+// can then e.g. be narrowed for a given semantic type using intersection:
+//
+//   SignedSmall /\ TaggedInt       (a 'smi')
+//   Number /\ TaggedPtr            (a heap number)
+//
+//
+// RANGE TYPES
+//
+// A range type represents a continuous integer interval by its minimum and
+// maximum value.  Either value may be an infinity, in which case that infinity
+// itself is also included in the range.   A range never contains NaN or -0.
+//
+// If a value v happens to be an integer n, then Constant(v) is considered a
+// subtype of Range(n, n) (and therefore also a subtype of any larger range).
+// In order to avoid large unions, however, it is usually a good idea to use
+// Range rather than Constant.
+//
+//
+// PREDICATES
+//
+// There are two main functions for testing types:
+//
+//   T1->Is(T2)     -- tests whether T1 is included in T2 (i.e., T1 <= T2)
+//   T1->Maybe(T2)  -- tests whether T1 and T2 overlap (i.e., T1 /\ T2 =/= 0)
+//
+// Typically, the former is to be used to select representations (e.g., via
+// T->Is(SignedSmall())), and the latter to check whether a specific case needs
+// handling (e.g., via T->Maybe(Number())).
+//
+// There is no functionality to discover whether a type is a leaf in the
+// lattice. That is intentional. It should always be possible to refine the
+// lattice (e.g., splitting up number types further) without invalidating any
+// existing assumptions or tests.
+// Consequently, do not normally use Equals for type tests, always use Is!
+//
+// The NowIs operator implements state-sensitive subtying, as described above.
+// Any compilation decision based on such temporary properties requires runtime
+// guarding!
+//
+//
+// PROPERTIES
+//
+// Various formal properties hold for constructors, operators, and predicates
+// over types. For example, constructors are injective and subtyping is a
+// complete partial order.
+//
+// See test/cctest/test-types.cc for a comprehensive executable specification,
+// especially with respect to the properties of the more exotic 'temporal'
+// constructors and predicates (those prefixed 'Now').
+//
+//
+// IMPLEMENTATION
+//
+// Internally, all 'primitive' types, and their unions, are represented as
+// bitsets. Bit 0 is reserved for tagging. Class is a heap pointer to the
+// respective map. Only structured types require allocation.
+// Note that the bitset representation is closed under both Union and Intersect.
+
+// -----------------------------------------------------------------------------
+// Values for bitset types
+
+// clang-format off
+
+#define AST_MASK_BITSET_TYPE_LIST(V) \
+  V(Representation, 0xffc00000u) \
+  V(Semantic,       0x003ffffeu)
+
+#define AST_REPRESENTATION(k) ((k) & AstBitsetType::kRepresentation)
+#define AST_SEMANTIC(k)       ((k) & AstBitsetType::kSemantic)
+
+#define AST_REPRESENTATION_BITSET_TYPE_LIST(V)    \
+  V(None,               0)                    \
+  V(UntaggedBit,        1u << 22 | kSemantic) \
+  V(UntaggedIntegral8,  1u << 23 | kSemantic) \
+  V(UntaggedIntegral16, 1u << 24 | kSemantic) \
+  V(UntaggedIntegral32, 1u << 25 | kSemantic) \
+  V(UntaggedFloat32,    1u << 26 | kSemantic) \
+  V(UntaggedFloat64,    1u << 27 | kSemantic) \
+  V(UntaggedSimd128,    1u << 28 | kSemantic) \
+  V(UntaggedPointer,    1u << 29 | kSemantic) \
+  V(TaggedSigned,       1u << 30 | kSemantic) \
+  V(TaggedPointer,      1u << 31 | kSemantic) \
+  \
+  V(UntaggedIntegral,   kUntaggedBit | kUntaggedIntegral8 |        \
+                        kUntaggedIntegral16 | kUntaggedIntegral32) \
+  V(UntaggedFloat,      kUntaggedFloat32 | kUntaggedFloat64)       \
+  V(UntaggedNumber,     kUntaggedIntegral | kUntaggedFloat)        \
+  V(Untagged,           kUntaggedNumber | kUntaggedPointer)        \
+  V(Tagged,             kTaggedSigned | kTaggedPointer)
+
+#define AST_INTERNAL_BITSET_TYPE_LIST(V)                                      \
+  V(OtherUnsigned31, 1u << 1 | AST_REPRESENTATION(kTagged | kUntaggedNumber)) \
+  V(OtherUnsigned32, 1u << 2 | AST_REPRESENTATION(kTagged | kUntaggedNumber)) \
+  V(OtherSigned32,   1u << 3 | AST_REPRESENTATION(kTagged | kUntaggedNumber)) \
+  V(OtherNumber,     1u << 4 | AST_REPRESENTATION(kTagged | kUntaggedNumber))
+
+#define AST_SEMANTIC_BITSET_TYPE_LIST(V)                                \
+  V(Negative31,          1u << 5  |                                     \
+                         AST_REPRESENTATION(kTagged | kUntaggedNumber)) \
+  V(Null,                1u << 6  | AST_REPRESENTATION(kTaggedPointer)) \
+  V(Undefined,           1u << 7  | AST_REPRESENTATION(kTaggedPointer)) \
+  V(Boolean,             1u << 8  | AST_REPRESENTATION(kTaggedPointer)) \
+  V(Unsigned30,          1u << 9  |                                     \
+                         AST_REPRESENTATION(kTagged | kUntaggedNumber)) \
+  V(MinusZero,           1u << 10 |                                     \
+                         AST_REPRESENTATION(kTagged | kUntaggedNumber)) \
+  V(NaN,                 1u << 11 |                                     \
+                         AST_REPRESENTATION(kTagged | kUntaggedNumber)) \
+  V(Symbol,              1u << 12 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(InternalizedString,  1u << 13 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(OtherString,         1u << 14 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(Simd,                1u << 15 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(OtherObject,         1u << 17 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(OtherUndetectable,   1u << 16 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(Proxy,               1u << 18 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(Function,            1u << 19 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(Hole,                1u << 20 | AST_REPRESENTATION(kTaggedPointer)) \
+  V(OtherInternal,       1u << 21 |                                     \
+                         AST_REPRESENTATION(kTagged | kUntagged))       \
+  \
+  V(Signed31,                   kUnsigned30 | kNegative31) \
+  V(Signed32,                   kSigned31 | kOtherUnsigned31 |          \
+                                kOtherSigned32)                         \
+  V(Signed32OrMinusZero,        kSigned32 | kMinusZero) \
+  V(Signed32OrMinusZeroOrNaN,   kSigned32 | kMinusZero | kNaN) \
+  V(Negative32,                 kNegative31 | kOtherSigned32) \
+  V(Unsigned31,                 kUnsigned30 | kOtherUnsigned31) \
+  V(Unsigned32,                 kUnsigned30 | kOtherUnsigned31 | \
+                                kOtherUnsigned32) \
+  V(Unsigned32OrMinusZero,      kUnsigned32 | kMinusZero) \
+  V(Unsigned32OrMinusZeroOrNaN, kUnsigned32 | kMinusZero | kNaN) \
+  V(Integral32,                 kSigned32 | kUnsigned32) \
+  V(PlainNumber,                kIntegral32 | kOtherNumber) \
+  V(OrderedNumber,              kPlainNumber | kMinusZero) \
+  V(MinusZeroOrNaN,             kMinusZero | kNaN) \
+  V(Number,                     kOrderedNumber | kNaN) \
+  V(String,                     kInternalizedString | kOtherString) \
+  V(UniqueName,                 kSymbol | kInternalizedString) \
+  V(Name,                       kSymbol | kString) \
+  V(BooleanOrNumber,            kBoolean | kNumber) \
+  V(BooleanOrNullOrNumber,      kBooleanOrNumber | kNull) \
+  V(BooleanOrNullOrUndefined,   kBoolean | kNull | kUndefined) \
+  V(NullOrNumber,               kNull | kNumber) \
+  V(NullOrUndefined,            kNull | kUndefined) \
+  V(Undetectable,               kNullOrUndefined | kOtherUndetectable) \
+  V(NumberOrOddball,            kNumber | kNullOrUndefined | kBoolean | kHole) \
+  V(NumberOrSimdOrString,       kNumber | kSimd | kString) \
+  V(NumberOrString,             kNumber | kString) \
+  V(NumberOrUndefined,          kNumber | kUndefined) \
+  V(PlainPrimitive,             kNumberOrString | kBoolean | kNullOrUndefined) \
+  V(Primitive,                  kSymbol | kSimd | kPlainPrimitive) \
+  V(DetectableReceiver,         kFunction | kOtherObject | kProxy) \
+  V(Object,                     kFunction | kOtherObject | kOtherUndetectable) \
+  V(Receiver,                   kObject | kProxy) \
+  V(StringOrReceiver,           kString | kReceiver) \
+  V(Unique,                     kBoolean | kUniqueName | kNull | kUndefined | \
+                                kReceiver) \
+  V(Internal,                   kHole | kOtherInternal) \
+  V(NonInternal,                kPrimitive | kReceiver) \
+  V(NonNumber,                  kUnique | kString | kInternal) \
+  V(Any,                        0xfffffffeu)
+
+// clang-format on
+
+/*
+ * The following diagrams show how integers (in the mathematical sense) are
+ * divided among the different atomic numerical types.
+ *
+ *   ON    OS32     N31     U30     OU31    OU32     ON
+ * ______[_______[_______[_______[_______[_______[_______
+ *     -2^31   -2^30     0      2^30    2^31    2^32
+ *
+ * E.g., OtherUnsigned32 (OU32) covers all integers from 2^31 to 2^32-1.
+ *
+ * Some of the atomic numerical bitsets are internal only (see
+ * INTERNAL_BITSET_TYPE_LIST).  To a types user, they should only occur in
+ * union with certain other bitsets.  For instance, OtherNumber should only
+ * occur as part of PlainNumber.
+ */
+
+#define AST_PROPER_BITSET_TYPE_LIST(V)   \
+  AST_REPRESENTATION_BITSET_TYPE_LIST(V) \
+  AST_SEMANTIC_BITSET_TYPE_LIST(V)
+
+#define AST_BITSET_TYPE_LIST(V)          \
+  AST_MASK_BITSET_TYPE_LIST(V)           \
+  AST_REPRESENTATION_BITSET_TYPE_LIST(V) \
+  AST_INTERNAL_BITSET_TYPE_LIST(V)       \
+  AST_SEMANTIC_BITSET_TYPE_LIST(V)
+
+class AstType;
+
+// -----------------------------------------------------------------------------
+// Bitset types (internal).
+
+class AstBitsetType {
+ public:
+  typedef uint32_t bitset;  // Internal
+
+  enum : uint32_t {
+#define DECLARE_TYPE(type, value) k##type = (value),
+    AST_BITSET_TYPE_LIST(DECLARE_TYPE)
+#undef DECLARE_TYPE
+        kUnusedEOL = 0
+  };
+
+  static bitset SignedSmall();
+  static bitset UnsignedSmall();
+
+  bitset Bitset() {
+    return static_cast<bitset>(reinterpret_cast<uintptr_t>(this) ^ 1u);
+  }
+
+  static bool IsInhabited(bitset bits) {
+    return AST_SEMANTIC(bits) != kNone && AST_REPRESENTATION(bits) != kNone;
+  }
+
+  static bool SemanticIsInhabited(bitset bits) {
+    return AST_SEMANTIC(bits) != kNone;
+  }
+
+  static bool Is(bitset bits1, bitset bits2) {
+    return (bits1 | bits2) == bits2;
+  }
+
+  static double Min(bitset);
+  static double Max(bitset);
+
+  static bitset Glb(AstType* type);  // greatest lower bound that's a bitset
+  static bitset Glb(double min, double max);
+  static bitset Lub(AstType* type);  // least upper bound that's a bitset
+  static bitset Lub(i::Map* map);
+  static bitset Lub(i::Object* value);
+  static bitset Lub(double value);
+  static bitset Lub(double min, double max);
+  static bitset ExpandInternals(bitset bits);
+
+  static const char* Name(bitset);
+  static void Print(std::ostream& os, bitset);  // NOLINT
+#ifdef DEBUG
+  static void Print(bitset);
+#endif
+
+  static bitset NumberBits(bitset bits);
+
+  static bool IsBitset(AstType* type) {
+    return reinterpret_cast<uintptr_t>(type) & 1;
+  }
+
+  static AstType* NewForTesting(bitset bits) { return New(bits); }
+
+ private:
+  friend class AstType;
+
+  static AstType* New(bitset bits) {
+    return reinterpret_cast<AstType*>(static_cast<uintptr_t>(bits | 1u));
+  }
+
+  struct Boundary {
+    bitset internal;
+    bitset external;
+    double min;
+  };
+  static const Boundary BoundariesArray[];
+  static inline const Boundary* Boundaries();
+  static inline size_t BoundariesSize();
+};
+
+// -----------------------------------------------------------------------------
+// Superclass for non-bitset types (internal).
+class AstTypeBase {
+ protected:
+  friend class AstType;
+
+  enum Kind {
+    kClass,
+    kConstant,
+    kContext,
+    kArray,
+    kFunction,
+    kTuple,
+    kUnion,
+    kRange
+  };
+
+  Kind kind() const { return kind_; }
+  explicit AstTypeBase(Kind kind) : kind_(kind) {}
+
+  static bool IsKind(AstType* type, Kind kind) {
+    if (AstBitsetType::IsBitset(type)) return false;
+    AstTypeBase* base = reinterpret_cast<AstTypeBase*>(type);
+    return base->kind() == kind;
+  }
+
+  // The hacky conversion to/from AstType*.
+  static AstType* AsType(AstTypeBase* type) {
+    return reinterpret_cast<AstType*>(type);
+  }
+  static AstTypeBase* FromType(AstType* type) {
+    return reinterpret_cast<AstTypeBase*>(type);
+  }
+
+ private:
+  Kind kind_;
+};
+
+// -----------------------------------------------------------------------------
+// Class types.
+
+class AstClassType : public AstTypeBase {
+ public:
+  i::Handle<i::Map> Map() { return map_; }
+
+ private:
+  friend class AstType;
+  friend class AstBitsetType;
+
+  static AstType* New(i::Handle<i::Map> map, Zone* zone) {
+    return AsType(new (zone->New(sizeof(AstClassType)))
+                      AstClassType(AstBitsetType::Lub(*map), map));
+  }
+
+  static AstClassType* cast(AstType* type) {
+    DCHECK(IsKind(type, kClass));
+    return static_cast<AstClassType*>(FromType(type));
+  }
+
+  AstClassType(AstBitsetType::bitset bitset, i::Handle<i::Map> map)
+      : AstTypeBase(kClass), bitset_(bitset), map_(map) {}
+
+  AstBitsetType::bitset Lub() { return bitset_; }
+
+  AstBitsetType::bitset bitset_;
+  Handle<i::Map> map_;
+};
+
+// -----------------------------------------------------------------------------
+// Constant types.
+
+class AstConstantType : public AstTypeBase {
+ public:
+  i::Handle<i::Object> Value() { return object_; }
+
+ private:
+  friend class AstType;
+  friend class AstBitsetType;
+
+  static AstType* New(i::Handle<i::Object> value, Zone* zone) {
+    AstBitsetType::bitset bitset = AstBitsetType::Lub(*value);
+    return AsType(new (zone->New(sizeof(AstConstantType)))
+                      AstConstantType(bitset, value));
+  }
+
+  static AstConstantType* cast(AstType* type) {
+    DCHECK(IsKind(type, kConstant));
+    return static_cast<AstConstantType*>(FromType(type));
+  }
+
+  AstConstantType(AstBitsetType::bitset bitset, i::Handle<i::Object> object)
+      : AstTypeBase(kConstant), bitset_(bitset), object_(object) {}
+
+  AstBitsetType::bitset Lub() { return bitset_; }
+
+  AstBitsetType::bitset bitset_;
+  Handle<i::Object> object_;
+};
+// TODO(neis): Also cache value if numerical.
+// TODO(neis): Allow restricting the representation.
+
+// -----------------------------------------------------------------------------
+// Range types.
+
+class AstRangeType : public AstTypeBase {
+ public:
+  struct Limits {
+    double min;
+    double max;
+    Limits(double min, double max) : min(min), max(max) {}
+    explicit Limits(AstRangeType* range)
+        : min(range->Min()), max(range->Max()) {}
+    bool IsEmpty();
+    static Limits Empty() { return Limits(1, 0); }
+    static Limits Intersect(Limits lhs, Limits rhs);
+    static Limits Union(Limits lhs, Limits rhs);
+  };
+
+  double Min() { return limits_.min; }
+  double Max() { return limits_.max; }
+
+ private:
+  friend class AstType;
+  friend class AstBitsetType;
+  friend class AstUnionType;
+
+  static AstType* New(double min, double max,
+                      AstBitsetType::bitset representation, Zone* zone) {
+    return New(Limits(min, max), representation, zone);
+  }
+
+  static bool IsInteger(double x) {
+    return nearbyint(x) == x && !i::IsMinusZero(x);  // Allows for infinities.
+  }
+
+  static AstType* New(Limits lim, AstBitsetType::bitset representation,
+                      Zone* zone) {
+    DCHECK(IsInteger(lim.min) && IsInteger(lim.max));
+    DCHECK(lim.min <= lim.max);
+    DCHECK(AST_REPRESENTATION(representation) == representation);
+    AstBitsetType::bitset bits =
+        AST_SEMANTIC(AstBitsetType::Lub(lim.min, lim.max)) | representation;
+
+    return AsType(new (zone->New(sizeof(AstRangeType)))
+                      AstRangeType(bits, lim));
+  }
+
+  static AstRangeType* cast(AstType* type) {
+    DCHECK(IsKind(type, kRange));
+    return static_cast<AstRangeType*>(FromType(type));
+  }
+
+  AstRangeType(AstBitsetType::bitset bitset, Limits limits)
+      : AstTypeBase(kRange), bitset_(bitset), limits_(limits) {}
+
+  AstBitsetType::bitset Lub() { return bitset_; }
+
+  AstBitsetType::bitset bitset_;
+  Limits limits_;
+};
+
+// -----------------------------------------------------------------------------
+// Context types.
+
+class AstContextType : public AstTypeBase {
+ public:
+  AstType* Outer() { return outer_; }
+
+ private:
+  friend class AstType;
+
+  static AstType* New(AstType* outer, Zone* zone) {
+    return AsType(new (zone->New(sizeof(AstContextType)))
+                      AstContextType(outer));  // NOLINT
+  }
+
+  static AstContextType* cast(AstType* type) {
+    DCHECK(IsKind(type, kContext));
+    return static_cast<AstContextType*>(FromType(type));
+  }
+
+  explicit AstContextType(AstType* outer)
+      : AstTypeBase(kContext), outer_(outer) {}
+
+  AstType* outer_;
+};
+
+// -----------------------------------------------------------------------------
+// Array types.
+
+class AstArrayType : public AstTypeBase {
+ public:
+  AstType* Element() { return element_; }
+
+ private:
+  friend class AstType;
+
+  explicit AstArrayType(AstType* element)
+      : AstTypeBase(kArray), element_(element) {}
+
+  static AstType* New(AstType* element, Zone* zone) {
+    return AsType(new (zone->New(sizeof(AstArrayType))) AstArrayType(element));
+  }
+
+  static AstArrayType* cast(AstType* type) {
+    DCHECK(IsKind(type, kArray));
+    return static_cast<AstArrayType*>(FromType(type));
+  }
+
+  AstType* element_;
+};
+
+// -----------------------------------------------------------------------------
+// Superclass for types with variable number of type fields.
+class AstStructuralType : public AstTypeBase {
+ public:
+  int LengthForTesting() { return Length(); }
+
+ protected:
+  friend class AstType;
+
+  int Length() { return length_; }
+
+  AstType* Get(int i) {
+    DCHECK(0 <= i && i < this->Length());
+    return elements_[i];
+  }
+
+  void Set(int i, AstType* type) {
+    DCHECK(0 <= i && i < this->Length());
+    elements_[i] = type;
+  }
+
+  void Shrink(int length) {
+    DCHECK(2 <= length && length <= this->Length());
+    length_ = length;
+  }
+
+  AstStructuralType(Kind kind, int length, i::Zone* zone)
+      : AstTypeBase(kind), length_(length) {
+    elements_ =
+        reinterpret_cast<AstType**>(zone->New(sizeof(AstType*) * length));
+  }
+
+ private:
+  int length_;
+  AstType** elements_;
+};
+
+// -----------------------------------------------------------------------------
+// Function types.
+
+class AstFunctionType : public AstStructuralType {
+ public:
+  int Arity() { return this->Length() - 2; }
+  AstType* Result() { return this->Get(0); }
+  AstType* Receiver() { return this->Get(1); }
+  AstType* Parameter(int i) { return this->Get(2 + i); }
+
+  void InitParameter(int i, AstType* type) { this->Set(2 + i, type); }
+
+ private:
+  friend class AstType;
+
+  AstFunctionType(AstType* result, AstType* receiver, int arity, Zone* zone)
+      : AstStructuralType(kFunction, 2 + arity, zone) {
+    Set(0, result);
+    Set(1, receiver);
+  }
+
+  static AstType* New(AstType* result, AstType* receiver, int arity,
+                      Zone* zone) {
+    return AsType(new (zone->New(sizeof(AstFunctionType)))
+                      AstFunctionType(result, receiver, arity, zone));
+  }
+
+  static AstFunctionType* cast(AstType* type) {
+    DCHECK(IsKind(type, kFunction));
+    return static_cast<AstFunctionType*>(FromType(type));
+  }
+};
+
+// -----------------------------------------------------------------------------
+// Tuple types.
+
+class AstTupleType : public AstStructuralType {
+ public:
+  int Arity() { return this->Length(); }
+  AstType* Element(int i) { return this->Get(i); }
+
+  void InitElement(int i, AstType* type) { this->Set(i, type); }
+
+ private:
+  friend class AstType;
+
+  AstTupleType(int length, Zone* zone)
+      : AstStructuralType(kTuple, length, zone) {}
+
+  static AstType* New(int length, Zone* zone) {
+    return AsType(new (zone->New(sizeof(AstTupleType)))
+                      AstTupleType(length, zone));
+  }
+
+  static AstTupleType* cast(AstType* type) {
+    DCHECK(IsKind(type, kTuple));
+    return static_cast<AstTupleType*>(FromType(type));
+  }
+};
+
+// -----------------------------------------------------------------------------
+// Union types (internal).
+// A union is a structured type with the following invariants:
+// - its length is at least 2
+// - at most one field is a bitset, and it must go into index 0
+// - no field is a union
+// - no field is a subtype of any other field
+class AstUnionType : public AstStructuralType {
+ private:
+  friend AstType;
+  friend AstBitsetType;
+
+  AstUnionType(int length, Zone* zone)
+      : AstStructuralType(kUnion, length, zone) {}
+
+  static AstType* New(int length, Zone* zone) {
+    return AsType(new (zone->New(sizeof(AstUnionType)))
+                      AstUnionType(length, zone));
+  }
+
+  static AstUnionType* cast(AstType* type) {
+    DCHECK(IsKind(type, kUnion));
+    return static_cast<AstUnionType*>(FromType(type));
+  }
+
+  bool Wellformed();
+};
+
+class AstType {
+ public:
+  typedef AstBitsetType::bitset bitset;  // Internal
+
+// Constructors.
+#define DEFINE_TYPE_CONSTRUCTOR(type, value) \
+  static AstType* type() { return AstBitsetType::New(AstBitsetType::k##type); }
+  AST_PROPER_BITSET_TYPE_LIST(DEFINE_TYPE_CONSTRUCTOR)
+#undef DEFINE_TYPE_CONSTRUCTOR
+
+  static AstType* SignedSmall() {
+    return AstBitsetType::New(AstBitsetType::SignedSmall());
+  }
+  static AstType* UnsignedSmall() {
+    return AstBitsetType::New(AstBitsetType::UnsignedSmall());
+  }
+
+  static AstType* Class(i::Handle<i::Map> map, Zone* zone) {
+    return AstClassType::New(map, zone);
+  }
+  static AstType* Constant(i::Handle<i::Object> value, Zone* zone) {
+    return AstConstantType::New(value, zone);
+  }
+  static AstType* Range(double min, double max, Zone* zone) {
+    return AstRangeType::New(min, max,
+                             AST_REPRESENTATION(AstBitsetType::kTagged |
+                                                AstBitsetType::kUntaggedNumber),
+                             zone);
+  }
+  static AstType* Context(AstType* outer, Zone* zone) {
+    return AstContextType::New(outer, zone);
+  }
+  static AstType* Array(AstType* element, Zone* zone) {
+    return AstArrayType::New(element, zone);
+  }
+  static AstType* Function(AstType* result, AstType* receiver, int arity,
+                           Zone* zone) {
+    return AstFunctionType::New(result, receiver, arity, zone);
+  }
+  static AstType* Function(AstType* result, Zone* zone) {
+    return Function(result, Any(), 0, zone);
+  }
+  static AstType* Function(AstType* result, AstType* param0, Zone* zone) {
+    AstType* function = Function(result, Any(), 1, zone);
+    function->AsFunction()->InitParameter(0, param0);
+    return function;
+  }
+  static AstType* Function(AstType* result, AstType* param0, AstType* param1,
+                           Zone* zone) {
+    AstType* function = Function(result, Any(), 2, zone);
+    function->AsFunction()->InitParameter(0, param0);
+    function->AsFunction()->InitParameter(1, param1);
+    return function;
+  }
+  static AstType* Function(AstType* result, AstType* param0, AstType* param1,
+                           AstType* param2, Zone* zone) {
+    AstType* function = Function(result, Any(), 3, zone);
+    function->AsFunction()->InitParameter(0, param0);
+    function->AsFunction()->InitParameter(1, param1);
+    function->AsFunction()->InitParameter(2, param2);
+    return function;
+  }
+  static AstType* Function(AstType* result, int arity, AstType** params,
+                           Zone* zone) {
+    AstType* function = Function(result, Any(), arity, zone);
+    for (int i = 0; i < arity; ++i) {
+      function->AsFunction()->InitParameter(i, params[i]);
+    }
+    return function;
+  }
+  static AstType* Tuple(AstType* first, AstType* second, AstType* third,
+                        Zone* zone) {
+    AstType* tuple = AstTupleType::New(3, zone);
+    tuple->AsTuple()->InitElement(0, first);
+    tuple->AsTuple()->InitElement(1, second);
+    tuple->AsTuple()->InitElement(2, third);
+    return tuple;
+  }
+
+#define CONSTRUCT_SIMD_TYPE(NAME, Name, name, lane_count, lane_type) \
+  static AstType* Name(Isolate* isolate, Zone* zone);
+  SIMD128_TYPES(CONSTRUCT_SIMD_TYPE)
+#undef CONSTRUCT_SIMD_TYPE
+
+  static AstType* Union(AstType* type1, AstType* type2, Zone* zone);
+  static AstType* Intersect(AstType* type1, AstType* type2, Zone* zone);
+
+  static AstType* Of(double value, Zone* zone) {
+    return AstBitsetType::New(
+        AstBitsetType::ExpandInternals(AstBitsetType::Lub(value)));
+  }
+  static AstType* Of(i::Object* value, Zone* zone) {
+    return AstBitsetType::New(
+        AstBitsetType::ExpandInternals(AstBitsetType::Lub(value)));
+  }
+  static AstType* Of(i::Handle<i::Object> value, Zone* zone) {
+    return Of(*value, zone);
+  }
+
+  static AstType* For(i::Map* map) {
+    return AstBitsetType::New(
+        AstBitsetType::ExpandInternals(AstBitsetType::Lub(map)));
+  }
+  static AstType* For(i::Handle<i::Map> map) { return For(*map); }
+
+  // Extraction of components.
+  static AstType* Representation(AstType* t, Zone* zone);
+  static AstType* Semantic(AstType* t, Zone* zone);
+
+  // Predicates.
+  bool IsInhabited() { return AstBitsetType::IsInhabited(this->BitsetLub()); }
+
+  bool Is(AstType* that) { return this == that || this->SlowIs(that); }
+  bool Maybe(AstType* that);
+  bool Equals(AstType* that) { return this->Is(that) && that->Is(this); }
+
+  // Equivalent to Constant(val)->Is(this), but avoiding allocation.
+  bool Contains(i::Object* val);
+  bool Contains(i::Handle<i::Object> val) { return this->Contains(*val); }
+
+  // State-dependent versions of the above that consider subtyping between
+  // a constant and its map class.
+  static AstType* NowOf(i::Object* value, Zone* zone);
+  static AstType* NowOf(i::Handle<i::Object> value, Zone* zone) {
+    return NowOf(*value, zone);
+  }
+  bool NowIs(AstType* that);
+  bool NowContains(i::Object* val);
+  bool NowContains(i::Handle<i::Object> val) { return this->NowContains(*val); }
+
+  bool NowStable();
+
+  // Inspection.
+  bool IsRange() { return IsKind(AstTypeBase::kRange); }
+  bool IsClass() { return IsKind(AstTypeBase::kClass); }
+  bool IsConstant() { return IsKind(AstTypeBase::kConstant); }
+  bool IsContext() { return IsKind(AstTypeBase::kContext); }
+  bool IsArray() { return IsKind(AstTypeBase::kArray); }
+  bool IsFunction() { return IsKind(AstTypeBase::kFunction); }
+  bool IsTuple() { return IsKind(AstTypeBase::kTuple); }
+
+  AstClassType* AsClass() { return AstClassType::cast(this); }
+  AstConstantType* AsConstant() { return AstConstantType::cast(this); }
+  AstRangeType* AsRange() { return AstRangeType::cast(this); }
+  AstContextType* AsContext() { return AstContextType::cast(this); }
+  AstArrayType* AsArray() { return AstArrayType::cast(this); }
+  AstFunctionType* AsFunction() { return AstFunctionType::cast(this); }
+  AstTupleType* AsTuple() { return AstTupleType::cast(this); }
+
+  // Minimum and maximum of a numeric type.
+  // These functions do not distinguish between -0 and +0.  If the type equals
+  // kNaN, they return NaN; otherwise kNaN is ignored.  Only call these
+  // functions on subtypes of Number.
+  double Min();
+  double Max();
+
+  // Extracts a range from the type: if the type is a range or a union
+  // containing a range, that range is returned; otherwise, NULL is returned.
+  AstType* GetRange();
+
+  static bool IsInteger(i::Object* x);
+  static bool IsInteger(double x) {
+    return nearbyint(x) == x && !i::IsMinusZero(x);  // Allows for infinities.
+  }
+
+  int NumClasses();
+  int NumConstants();
+
+  template <class T>
+  class Iterator {
+   public:
+    bool Done() const { return index_ < 0; }
+    i::Handle<T> Current();
+    void Advance();
+
+   private:
+    friend class AstType;
+
+    Iterator() : index_(-1) {}
+    explicit Iterator(AstType* type) : type_(type), index_(-1) { Advance(); }
+
+    inline bool matches(AstType* type);
+    inline AstType* get_type();
+
+    AstType* type_;
+    int index_;
+  };
+
+  Iterator<i::Map> Classes() {
+    if (this->IsBitset()) return Iterator<i::Map>();
+    return Iterator<i::Map>(this);
+  }
+  Iterator<i::Object> Constants() {
+    if (this->IsBitset()) return Iterator<i::Object>();
+    return Iterator<i::Object>(this);
+  }
+
+  // Printing.
+
+  enum PrintDimension { BOTH_DIMS, SEMANTIC_DIM, REPRESENTATION_DIM };
+
+  void PrintTo(std::ostream& os, PrintDimension dim = BOTH_DIMS);  // NOLINT
+
+#ifdef DEBUG
+  void Print();
+#endif
+
+  // Helpers for testing.
+  bool IsBitsetForTesting() { return IsBitset(); }
+  bool IsUnionForTesting() { return IsUnion(); }
+  bitset AsBitsetForTesting() { return AsBitset(); }
+  AstUnionType* AsUnionForTesting() { return AsUnion(); }
+
+ private:
+  // Friends.
+  template <class>
+  friend class Iterator;
+  friend AstBitsetType;
+  friend AstUnionType;
+
+  // Internal inspection.
+  bool IsKind(AstTypeBase::Kind kind) {
+    return AstTypeBase::IsKind(this, kind);
+  }
+
+  bool IsNone() { return this == None(); }
+  bool IsAny() { return this == Any(); }
+  bool IsBitset() { return AstBitsetType::IsBitset(this); }
+  bool IsUnion() { return IsKind(AstTypeBase::kUnion); }
+
+  bitset AsBitset() {
+    DCHECK(this->IsBitset());
+    return reinterpret_cast<AstBitsetType*>(this)->Bitset();
+  }
+  AstUnionType* AsUnion() { return AstUnionType::cast(this); }
+
+  bitset Representation();
+
+  // Auxiliary functions.
+  bool SemanticMaybe(AstType* that);
+
+  bitset BitsetGlb() { return AstBitsetType::Glb(this); }
+  bitset BitsetLub() { return AstBitsetType::Lub(this); }
+
+  bool SlowIs(AstType* that);
+  bool SemanticIs(AstType* that);
+
+  static bool Overlap(AstRangeType* lhs, AstRangeType* rhs);
+  static bool Contains(AstRangeType* lhs, AstRangeType* rhs);
+  static bool Contains(AstRangeType* range, AstConstantType* constant);
+  static bool Contains(AstRangeType* range, i::Object* val);
+
+  static int UpdateRange(AstType* type, AstUnionType* result, int size,
+                         Zone* zone);
+
+  static AstRangeType::Limits IntersectRangeAndBitset(AstType* range,
+                                                      AstType* bits,
+                                                      Zone* zone);
+  static AstRangeType::Limits ToLimits(bitset bits, Zone* zone);
+
+  bool SimplyEquals(AstType* that);
+
+  static int AddToUnion(AstType* type, AstUnionType* result, int size,
+                        Zone* zone);
+  static int IntersectAux(AstType* type, AstType* other, AstUnionType* result,
+                          int size, AstRangeType::Limits* limits, Zone* zone);
+  static AstType* NormalizeUnion(AstType* unioned, int size, Zone* zone);
+  static AstType* NormalizeRangeAndBitset(AstType* range, bitset* bits,
+                                          Zone* zone);
+};
+
+// -----------------------------------------------------------------------------
+// Type bounds. A simple struct to represent a pair of lower/upper types.
+
+struct AstBounds {
+  AstType* lower;
+  AstType* upper;
+
+  AstBounds()
+      :  // Make sure accessing uninitialized bounds crashes big-time.
+        lower(nullptr),
+        upper(nullptr) {}
+  explicit AstBounds(AstType* t) : lower(t), upper(t) {}
+  AstBounds(AstType* l, AstType* u) : lower(l), upper(u) {
+    DCHECK(lower->Is(upper));
+  }
+
+  // Unrestricted bounds.
+  static AstBounds Unbounded() {
+    return AstBounds(AstType::None(), AstType::Any());
+  }
+
+  // Meet: both b1 and b2 are known to hold.
+  static AstBounds Both(AstBounds b1, AstBounds b2, Zone* zone) {
+    AstType* lower = AstType::Union(b1.lower, b2.lower, zone);
+    AstType* upper = AstType::Intersect(b1.upper, b2.upper, zone);
+    // Lower bounds are considered approximate, correct as necessary.
+    if (!lower->Is(upper)) lower = upper;
+    return AstBounds(lower, upper);
+  }
+
+  // Join: either b1 or b2 is known to hold.
+  static AstBounds Either(AstBounds b1, AstBounds b2, Zone* zone) {
+    AstType* lower = AstType::Intersect(b1.lower, b2.lower, zone);
+    AstType* upper = AstType::Union(b1.upper, b2.upper, zone);
+    return AstBounds(lower, upper);
+  }
+
+  static AstBounds NarrowLower(AstBounds b, AstType* t, Zone* zone) {
+    AstType* lower = AstType::Union(b.lower, t, zone);
+    // Lower bounds are considered approximate, correct as necessary.
+    if (!lower->Is(b.upper)) lower = b.upper;
+    return AstBounds(lower, b.upper);
+  }
+  static AstBounds NarrowUpper(AstBounds b, AstType* t, Zone* zone) {
+    AstType* lower = b.lower;
+    AstType* upper = AstType::Intersect(b.upper, t, zone);
+    // Lower bounds are considered approximate, correct as necessary.
+    if (!lower->Is(upper)) lower = upper;
+    return AstBounds(lower, upper);
+  }
+
+  bool Narrows(AstBounds that) {
+    return that.lower->Is(this->lower) && this->upper->Is(that.upper);
+  }
+};
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_AST_AST_TYPES_H_
diff --git a/src/ast/ast-value-factory.cc b/src/ast/ast-value-factory.cc
index a271751..33ccec7 100644
--- a/src/ast/ast-value-factory.cc
+++ b/src/ast/ast-value-factory.cc
@@ -237,28 +237,14 @@
 
 
 const AstRawString* AstValueFactory::GetString(Handle<String> literal) {
-  // For the FlatContent to stay valid, we shouldn't do any heap
-  // allocation. Make sure we won't try to internalize the string in GetString.
   AstRawString* result = NULL;
-  Isolate* saved_isolate = isolate_;
-  isolate_ = NULL;
-  {
-    DisallowHeapAllocation no_gc;
-    String::FlatContent content = literal->GetFlatContent();
-    if (content.IsOneByte()) {
-      result = GetOneByteStringInternal(content.ToOneByteVector());
-    } else {
-      DCHECK(content.IsTwoByte());
-      result = GetTwoByteStringInternal(content.ToUC16Vector());
-    }
-  }
-  isolate_ = saved_isolate;
-  if (strings_ != nullptr && isolate_) {
-    // Only the string we are creating is uninternalized at this point.
-    DCHECK_EQ(result, strings_);
-    DCHECK_NULL(strings_->next());
-    result->Internalize(isolate_);
-    ResetStrings();
+  DisallowHeapAllocation no_gc;
+  String::FlatContent content = literal->GetFlatContent();
+  if (content.IsOneByte()) {
+    result = GetOneByteStringInternal(content.ToOneByteVector());
+  } else {
+    DCHECK(content.IsTwoByte());
+    result = GetTwoByteStringInternal(content.ToUC16Vector());
   }
   return result;
 }
@@ -274,15 +260,40 @@
   return new_string;
 }
 
+const AstRawString* AstValueFactory::ConcatStrings(const AstRawString* left,
+                                                   const AstRawString* right) {
+  int left_length = left->length();
+  int right_length = right->length();
+  const unsigned char* left_data = left->raw_data();
+  const unsigned char* right_data = right->raw_data();
+  if (left->is_one_byte() && right->is_one_byte()) {
+    uint8_t* buffer = zone_->NewArray<uint8_t>(left_length + right_length);
+    memcpy(buffer, left_data, left_length);
+    memcpy(buffer + left_length, right_data, right_length);
+    Vector<const uint8_t> literal(buffer, left_length + right_length);
+    return GetOneByteStringInternal(literal);
+  } else {
+    uint16_t* buffer = zone_->NewArray<uint16_t>(left_length + right_length);
+    if (left->is_one_byte()) {
+      for (int i = 0; i < left_length; ++i) {
+        buffer[i] = left_data[i];
+      }
+    } else {
+      memcpy(buffer, left_data, 2 * left_length);
+    }
+    if (right->is_one_byte()) {
+      for (int i = 0; i < right_length; ++i) {
+        buffer[i + left_length] = right_data[i];
+      }
+    } else {
+      memcpy(buffer + left_length, right_data, 2 * right_length);
+    }
+    Vector<const uint16_t> literal(buffer, left_length + right_length);
+    return GetTwoByteStringInternal(literal);
+  }
+}
 
 void AstValueFactory::Internalize(Isolate* isolate) {
-  if (isolate_) {
-    DCHECK_NULL(strings_);
-    DCHECK_NULL(values_);
-    // Everything is already internalized.
-    return;
-  }
-
   // Strings need to be internalized before values, because values refer to
   // strings.
   for (AstString* current = strings_; current != nullptr;) {
@@ -295,7 +306,6 @@
     current->Internalize(isolate);
     current = next;
   }
-  isolate_ = isolate;
   ResetStrings();
   values_ = nullptr;
 }
diff --git a/src/ast/ast-value-factory.h b/src/ast/ast-value-factory.h
index da209e1..bc3eca2 100644
--- a/src/ast/ast-value-factory.h
+++ b/src/ast/ast-value-factory.h
@@ -283,8 +283,8 @@
   F(default, "default")                         \
   F(done, "done")                               \
   F(dot, ".")                                   \
+  F(dot_class_field_init, ".class-field-init")  \
   F(dot_for, ".for")                            \
-  F(dot_generator, ".generator")                \
   F(dot_generator_object, ".generator_object")  \
   F(dot_iterator, ".iterator")                  \
   F(dot_result, ".result")                      \
@@ -326,7 +326,6 @@
         values_(nullptr),
         strings_end_(&strings_),
         zone_(zone),
-        isolate_(NULL),
         hash_seed_(hash_seed) {
     ResetStrings();
 #define F(name, str) name##_string_ = NULL;
@@ -352,11 +351,10 @@
   const AstRawString* GetString(Handle<String> literal);
   const AstConsString* NewConsString(const AstString* left,
                                      const AstString* right);
+  const AstRawString* ConcatStrings(const AstRawString* left,
+                                    const AstRawString* right);
 
   void Internalize(Isolate* isolate);
-  bool IsInternalized() {
-    return isolate_ != NULL;
-  }
 
 #define F(name, str)                                                    \
   const AstRawString* name##_string() {                                 \
@@ -384,21 +382,13 @@
 
  private:
   AstValue* AddValue(AstValue* value) {
-    if (isolate_) {
-      value->Internalize(isolate_);
-    } else {
-      value->set_next(values_);
-      values_ = value;
-    }
+    value->set_next(values_);
+    values_ = value;
     return value;
   }
   AstString* AddString(AstString* string) {
-    if (isolate_) {
-      string->Internalize(isolate_);
-    } else {
-      *strings_end_ = string;
-      strings_end_ = string->next_location();
-    }
+    *strings_end_ = string;
+    strings_end_ = string->next_location();
     return string;
   }
   void ResetStrings() {
@@ -413,7 +403,7 @@
   static bool AstRawStringCompare(void* a, void* b);
 
   // All strings are copied here, one after another (no NULLs inbetween).
-  base::HashMap string_table_;
+  base::CustomMatcherHashMap string_table_;
   // For keeping track of all AstValues and AstRawStrings we've created (so that
   // they can be internalized later).
   AstValue* values_;
@@ -422,7 +412,6 @@
   AstString* strings_;
   AstString** strings_end_;
   Zone* zone_;
-  Isolate* isolate_;
 
   uint32_t hash_seed_;
 
diff --git a/src/ast/ast.cc b/src/ast/ast.cc
index 06037f4..97d1f9d 100644
--- a/src/ast/ast.cc
+++ b/src/ast/ast.cc
@@ -6,6 +6,7 @@
 
 #include <cmath>  // For isfinite.
 
+#include "src/ast/compile-time-value.h"
 #include "src/ast/prettyprinter.h"
 #include "src/ast/scopes.h"
 #include "src/base/hashmap.h"
@@ -13,7 +14,6 @@
 #include "src/code-stubs.h"
 #include "src/contexts.h"
 #include "src/conversions.h"
-#include "src/parsing/parser.h"
 #include "src/property-details.h"
 #include "src/property.h"
 #include "src/string-stream.h"
@@ -83,18 +83,14 @@
 }
 
 bool Expression::IsUndefinedLiteral() const {
-  if (IsLiteral()) {
-    if (AsLiteral()->raw_value()->IsUndefined()) {
-      return true;
-    }
-  }
+  if (IsLiteral() && AsLiteral()->raw_value()->IsUndefined()) return true;
 
   const VariableProxy* var_proxy = AsVariableProxy();
-  if (var_proxy == NULL) return false;
+  if (var_proxy == nullptr) return false;
   Variable* var = var_proxy->var();
   // The global identifier "undefined" is immutable. Everything
   // else could be reassigned.
-  return var != NULL && var->IsUnallocatedOrGlobalSlot() &&
+  return var != NULL && var->IsUnallocated() &&
          var_proxy->raw_name()->IsOneByteEqualTo("undefined");
 }
 
@@ -166,36 +162,32 @@
 VariableProxy::VariableProxy(Variable* var, int start_position,
                              int end_position)
     : Expression(start_position, kVariableProxy),
-      bit_field_(IsThisField::encode(var->is_this()) |
-                 IsAssignedField::encode(false) |
-                 IsResolvedField::encode(false)),
       end_position_(end_position),
       raw_name_(var->raw_name()),
       next_unresolved_(nullptr) {
+  bit_field_ |= IsThisField::encode(var->is_this()) |
+                IsAssignedField::encode(false) | IsResolvedField::encode(false);
   BindTo(var);
 }
 
 VariableProxy::VariableProxy(const AstRawString* name,
-                             Variable::Kind variable_kind, int start_position,
+                             VariableKind variable_kind, int start_position,
                              int end_position)
     : Expression(start_position, kVariableProxy),
-      bit_field_(IsThisField::encode(variable_kind == Variable::THIS) |
-                 IsAssignedField::encode(false) |
-                 IsResolvedField::encode(false)),
       end_position_(end_position),
       raw_name_(name),
-      next_unresolved_(nullptr) {}
+      next_unresolved_(nullptr) {
+  bit_field_ |= IsThisField::encode(variable_kind == THIS_VARIABLE) |
+                IsAssignedField::encode(false) | IsResolvedField::encode(false);
+}
 
 VariableProxy::VariableProxy(const VariableProxy* copy_from)
     : Expression(copy_from->position(), kVariableProxy),
-      bit_field_(copy_from->bit_field_),
       end_position_(copy_from->end_position_),
       next_unresolved_(nullptr) {
-  if (copy_from->is_resolved()) {
-    var_ = copy_from->var_;
-  } else {
-    raw_name_ = copy_from->raw_name_;
-  }
+  bit_field_ = copy_from->bit_field_;
+  DCHECK(!copy_from->is_resolved());
+  raw_name_ = copy_from->raw_name_;
 }
 
 void VariableProxy::BindTo(Variable* var) {
@@ -253,12 +245,13 @@
 Assignment::Assignment(Token::Value op, Expression* target, Expression* value,
                        int pos)
     : Expression(pos, kAssignment),
-      bit_field_(
-          IsUninitializedField::encode(false) | KeyTypeField::encode(ELEMENT) |
-          StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op)),
       target_(target),
       value_(value),
-      binary_operation_(NULL) {}
+      binary_operation_(NULL) {
+  bit_field_ |= IsUninitializedField::encode(false) |
+                KeyTypeField::encode(ELEMENT) |
+                StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op);
+}
 
 void Assignment::AssignFeedbackVectorSlots(Isolate* isolate,
                                            FeedbackVectorSpec* spec,
@@ -273,7 +266,7 @@
   AssignVectorSlots(expression(), spec, &slot_);
   // Assign a slot to collect feedback about binary operations. Used only in
   // ignition. Fullcodegen uses AstId to record type feedback.
-  binary_operation_slot_ = spec->AddGeneralSlot();
+  binary_operation_slot_ = spec->AddInterpreterBinaryOpICSlot();
 }
 
 
@@ -320,6 +313,7 @@
   return scope()->language_mode();
 }
 
+FunctionKind FunctionLiteral::kind() const { return scope()->function_kind(); }
 
 bool FunctionLiteral::NeedsHomeObject(Expression* expr) {
   if (expr == nullptr || !expr->IsFunctionLiteral()) return false;
@@ -327,27 +321,16 @@
   return expr->AsFunctionLiteral()->scope()->NeedsHomeObject();
 }
 
-
 ObjectLiteralProperty::ObjectLiteralProperty(Expression* key, Expression* value,
-                                             Kind kind, bool is_static,
-                                             bool is_computed_name)
-    : key_(key),
-      value_(value),
+                                             Kind kind, bool is_computed_name)
+    : LiteralProperty(key, value, is_computed_name),
       kind_(kind),
-      emit_store_(true),
-      is_static_(is_static),
-      is_computed_name_(is_computed_name) {}
-
+      emit_store_(true) {}
 
 ObjectLiteralProperty::ObjectLiteralProperty(AstValueFactory* ast_value_factory,
                                              Expression* key, Expression* value,
-                                             bool is_static,
                                              bool is_computed_name)
-    : key_(key),
-      value_(value),
-      emit_store_(true),
-      is_static_(is_static),
-      is_computed_name_(is_computed_name) {
+    : LiteralProperty(key, value, is_computed_name), emit_store_(true) {
   if (!is_computed_name &&
       key->AsLiteral()->raw_value()->EqualsString(
           ast_value_factory->proto_string())) {
@@ -361,13 +344,20 @@
   }
 }
 
-bool ObjectLiteralProperty::NeedsSetFunctionName() const {
+bool LiteralProperty::NeedsSetFunctionName() const {
   return is_computed_name_ &&
          (value_->IsAnonymousFunctionDefinition() ||
           (value_->IsFunctionLiteral() &&
            IsConciseMethod(value_->AsFunctionLiteral()->kind())));
 }
 
+ClassLiteralProperty::ClassLiteralProperty(Expression* key, Expression* value,
+                                           Kind kind, bool is_static,
+                                           bool is_computed_name)
+    : LiteralProperty(key, value, is_computed_name),
+      kind_(kind),
+      is_static_(is_static) {}
+
 void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
                                              FeedbackVectorSpec* spec,
                                              FeedbackVectorSlotCache* cache) {
@@ -379,7 +369,7 @@
   }
 
   for (int i = 0; i < properties()->length(); i++) {
-    ObjectLiteral::Property* property = properties()->at(i);
+    ClassLiteral::Property* property = properties()->at(i);
     Expression* value = property->value();
     if (FunctionLiteral::NeedsHomeObject(value)) {
       property->SetSlot(spec->AddStoreICSlot());
@@ -387,8 +377,7 @@
   }
 }
 
-
-bool ObjectLiteral::Property::IsCompileTimeValue() {
+bool ObjectLiteral::Property::IsCompileTimeValue() const {
   return kind_ == CONSTANT ||
       (kind_ == MATERIALIZED_LITERAL &&
        CompileTimeValue::IsCompileTimeValue(value_));
@@ -399,11 +388,7 @@
   emit_store_ = emit_store;
 }
 
-
-bool ObjectLiteral::Property::emit_store() {
-  return emit_store_;
-}
-
+bool ObjectLiteral::Property::emit_store() const { return emit_store_; }
 
 void ObjectLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
                                               FeedbackVectorSpec* spec,
@@ -473,8 +458,8 @@
 
   ZoneAllocationPolicy allocator(zone);
 
-  ZoneHashMap table(Literal::Match, ZoneHashMap::kDefaultHashMapCapacity,
-                    allocator);
+  CustomMatcherZoneHashMap table(
+      Literal::Match, ZoneHashMap::kDefaultHashMapCapacity, allocator);
   for (int i = properties()->length() - 1; i >= 0; i--) {
     ObjectLiteral::Property* property = properties()->at(i);
     if (property->is_computed_name()) continue;
@@ -551,7 +536,7 @@
     // TODO(verwaest): Remove once we can store them inline.
     if (FLAG_track_double_fields &&
         (value->IsNumber() || value->IsUninitialized(isolate))) {
-      may_store_doubles_ = true;
+      bit_field_ = MayStoreDoublesField::update(bit_field_, true);
     }
 
     is_simple = is_simple && !value->IsUninitialized(isolate);
@@ -578,9 +563,11 @@
   }
 
   constant_properties_ = constant_properties;
-  fast_elements_ =
-      (max_element_index <= 32) || ((2 * elements) >= max_element_index);
-  has_elements_ = elements > 0;
+  bit_field_ = FastElementsField::update(
+      bit_field_,
+      (max_element_index <= 32) || ((2 * elements) >= max_element_index));
+  bit_field_ = HasElementsField::update(bit_field_, elements > 0);
+
   set_is_simple(is_simple);
   set_depth(depth_acc);
 }
@@ -662,8 +649,7 @@
                                              FeedbackVectorSlotCache* cache) {
   // This logic that computes the number of slots needed for vector store
   // ics must mirror FullCodeGenerator::VisitArrayLiteral.
-  int array_index = 0;
-  for (; array_index < values()->length(); array_index++) {
+  for (int array_index = 0; array_index < values()->length(); array_index++) {
     Expression* subexpr = values()->at(array_index);
     DCHECK(!subexpr->IsSpread());
     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
@@ -731,7 +717,7 @@
     case Token::OR:
       return;
     default:
-      type_feedback_slot_ = spec->AddGeneralSlot();
+      type_feedback_slot_ = spec->AddInterpreterBinaryOpICSlot();
       return;
   }
 }
@@ -741,6 +727,20 @@
   return maybe_unary != NULL && maybe_unary->op() == Token::TYPEOF;
 }
 
+void CompareOperation::AssignFeedbackVectorSlots(
+    Isolate* isolate, FeedbackVectorSpec* spec,
+    FeedbackVectorSlotCache* cache_) {
+  // Feedback vector slot is only used by interpreter for binary operations.
+  // Full-codegen uses AstId to record type feedback.
+  switch (op()) {
+    // instanceof and in do not collect type feedback.
+    case Token::INSTANCEOF:
+    case Token::IN:
+      return;
+    default:
+      type_feedback_slot_ = spec->AddInterpreterCompareICSlot();
+  }
+}
 
 // Check for the pattern: typeof <expression> equals <string literal>.
 static bool MatchLiteralCompareTypeof(Expression* left,
@@ -759,8 +759,8 @@
 
 bool CompareOperation::IsLiteralCompareTypeof(Expression** expr,
                                               Handle<String>* check) {
-  return MatchLiteralCompareTypeof(left_, op_, right_, expr, check) ||
-      MatchLiteralCompareTypeof(right_, op_, left_, expr, check);
+  return MatchLiteralCompareTypeof(left_, op(), right_, expr, check) ||
+         MatchLiteralCompareTypeof(right_, op(), left_, expr, check);
 }
 
 
@@ -790,8 +790,8 @@
 }
 
 bool CompareOperation::IsLiteralCompareUndefined(Expression** expr) {
-  return MatchLiteralCompareUndefined(left_, op_, right_, expr) ||
-         MatchLiteralCompareUndefined(right_, op_, left_, expr);
+  return MatchLiteralCompareUndefined(left_, op(), right_, expr) ||
+         MatchLiteralCompareUndefined(right_, op(), left_, expr);
 }
 
 
@@ -809,8 +809,8 @@
 
 
 bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
-  return MatchLiteralCompareNull(left_, op_, right_, expr) ||
-      MatchLiteralCompareNull(right_, op_, left_, expr);
+  return MatchLiteralCompareNull(left_, op(), right_, expr) ||
+         MatchLiteralCompareNull(right_, op(), left_, expr);
 }
 
 
@@ -913,7 +913,7 @@
   if (proxy != NULL) {
     if (is_possibly_eval()) {
       return POSSIBLY_EVAL_CALL;
-    } else if (proxy->var()->IsUnallocatedOrGlobalSlot()) {
+    } else if (proxy->var()->IsUnallocated()) {
       return GLOBAL_CALL;
     } else if (proxy->var()->IsLookupSlot()) {
       return LOOKUP_SLOT_CALL;
@@ -940,7 +940,13 @@
     : Expression(pos, kCaseClause),
       label_(label),
       statements_(statements),
-      compare_type_(Type::None()) {}
+      compare_type_(AstType::None()) {}
+
+void CaseClause::AssignFeedbackVectorSlots(Isolate* isolate,
+                                           FeedbackVectorSpec* spec,
+                                           FeedbackVectorSlotCache* cache) {
+  type_feedback_slot_ = spec->AddInterpreterCompareICSlot();
+}
 
 uint32_t Literal::Hash() {
   return raw_value()->IsString()
diff --git a/src/ast/ast.h b/src/ast/ast.h
index 1b80d3f..a6661be 100644
--- a/src/ast/ast.h
+++ b/src/ast/ast.h
@@ -5,6 +5,7 @@
 #ifndef V8_AST_AST_H_
 #define V8_AST_AST_H_
 
+#include "src/ast/ast-types.h"
 #include "src/ast/ast-value-factory.h"
 #include "src/ast/modules.h"
 #include "src/ast/variables.h"
@@ -17,7 +18,6 @@
 #include "src/parsing/token.h"
 #include "src/runtime/runtime.h"
 #include "src/small-pointer-list.h"
-#include "src/types.h"
 #include "src/utils.h"
 
 namespace v8 {
@@ -126,17 +126,11 @@
 #undef DEF_FORWARD_DECLARATION
 
 
-// Typedef only introduced to avoid unreadable code.
-typedef ZoneList<Handle<String>> ZoneStringList;
-typedef ZoneList<Handle<Object>> ZoneObjectList;
-
-
 class FeedbackVectorSlotCache {
  public:
   explicit FeedbackVectorSlotCache(Zone* zone)
       : zone_(zone),
-        hash_map_(base::HashMap::PointersMatch,
-                  ZoneHashMap::kDefaultHashMapCapacity,
+        hash_map_(ZoneHashMap::kDefaultHashMapCapacity,
                   ZoneAllocationPolicy(zone)) {}
 
   void Put(Variable* variable, FeedbackVectorSlot slot) {
@@ -192,7 +186,7 @@
 
   void* operator new(size_t size, Zone* zone) { return zone->New(size); }
 
-  NodeType node_type() const { return node_type_; }
+  NodeType node_type() const { return NodeTypeField::decode(bit_field_); }
   int position() const { return position_; }
 
 #ifdef DEBUG
@@ -211,19 +205,20 @@
   IterationStatement* AsIterationStatement();
   MaterializedLiteral* AsMaterializedLiteral();
 
- protected:
-  AstNode(int position, NodeType type)
-      : position_(position), node_type_(type) {}
-
  private:
   // Hidden to prevent accidental usage. It would have to load the
   // current zone from the TLS.
   void* operator new(size_t size);
 
   int position_;
-  NodeType node_type_;
-  // Ends with NodeType which is uint8_t sized. Deriving classes in turn begin
-  // sub-int32_t-sized fields for optimum packing efficiency.
+  class NodeTypeField : public BitField<NodeType, 0, 6> {};
+
+ protected:
+  uint32_t bit_field_;
+  static const uint8_t kNextBitFieldIndex = NodeTypeField::kNext;
+
+  AstNode(int position, NodeType type)
+      : position_(position), bit_field_(NodeTypeField::encode(type)) {}
 };
 
 
@@ -234,6 +229,8 @@
 
  protected:
   Statement(int position, NodeType type) : AstNode(position, type) {}
+
+  static const uint8_t kNextBitFieldIndex = AstNode::kNextBitFieldIndex;
 };
 
 
@@ -349,11 +346,18 @@
   BailoutId id() const { return BailoutId(local_id(0)); }
   TypeFeedbackId test_id() const { return TypeFeedbackId(local_id(1)); }
 
+ private:
+  int local_id(int n) const { return base_id() + parent_num_ids() + n; }
+
+  int base_id_;
+  class ToBooleanTypesField
+      : public BitField<uint16_t, AstNode::kNextBitFieldIndex, 9> {};
+
  protected:
   Expression(int pos, NodeType type)
-      : AstNode(pos, type),
-        bit_field_(0),
-        base_id_(BailoutId::None().ToInt()) {}
+      : AstNode(pos, type), base_id_(BailoutId::None().ToInt()) {
+    bit_field_ = ToBooleanTypesField::update(bit_field_, 0);
+  }
 
   static int parent_num_ids() { return 0; }
   void set_to_boolean_types(uint16_t types) {
@@ -364,12 +368,7 @@
     return base_id_;
   }
 
- private:
-  int local_id(int n) const { return base_id() + parent_num_ids() + n; }
-
-  uint16_t bit_field_;
-  int base_id_;
-  class ToBooleanTypesField : public BitField16<uint16_t, 0, 9> {};
+  static const uint8_t kNextBitFieldIndex = ToBooleanTypesField::kNext;
 };
 
 
@@ -389,7 +388,7 @@
 
   // Testers.
   bool is_target_for_anonymous() const {
-    return breakable_type_ == TARGET_FOR_ANONYMOUS;
+    return BreakableTypeField::decode(bit_field_) == TARGET_FOR_ANONYMOUS;
   }
 
   void set_base_id(int id) { base_id_ = id; }
@@ -397,14 +396,28 @@
   BailoutId EntryId() const { return BailoutId(local_id(0)); }
   BailoutId ExitId() const { return BailoutId(local_id(1)); }
 
+ private:
+  int local_id(int n) const { return base_id() + parent_num_ids() + n; }
+
+  BreakableType breakableType() const {
+    return BreakableTypeField::decode(bit_field_);
+  }
+
+  int base_id_;
+  Label break_target_;
+  ZoneList<const AstRawString*>* labels_;
+
+  class BreakableTypeField
+      : public BitField<BreakableType, Statement::kNextBitFieldIndex, 1> {};
+
  protected:
   BreakableStatement(ZoneList<const AstRawString*>* labels,
                      BreakableType breakable_type, int position, NodeType type)
       : Statement(position, type),
-        breakable_type_(breakable_type),
         base_id_(BailoutId::None().ToInt()),
         labels_(labels) {
     DCHECK(labels == NULL || labels->length() > 0);
+    bit_field_ |= BreakableTypeField::encode(breakable_type);
   }
   static int parent_num_ids() { return 0; }
 
@@ -413,20 +426,16 @@
     return base_id_;
   }
 
- private:
-  int local_id(int n) const { return base_id() + parent_num_ids() + n; }
-
-  BreakableType breakable_type_;
-  int base_id_;
-  Label break_target_;
-  ZoneList<const AstRawString*>* labels_;
+  static const uint8_t kNextBitFieldIndex = BreakableTypeField::kNext;
 };
 
 
 class Block final : public BreakableStatement {
  public:
   ZoneList<Statement*>* statements() { return &statements_; }
-  bool ignore_completion_value() const { return ignore_completion_value_; }
+  bool ignore_completion_value() const {
+    return IgnoreCompletionField::decode(bit_field_);
+  }
 
   static int num_ids() { return parent_num_ids() + 1; }
   BailoutId DeclsId() const { return BailoutId(local_id(0)); }
@@ -446,14 +455,20 @@
         bool ignore_completion_value, int pos)
       : BreakableStatement(labels, TARGET_FOR_NAMED_ONLY, pos, kBlock),
         statements_(capacity, zone),
-        ignore_completion_value_(ignore_completion_value),
-        scope_(NULL) {}
+        scope_(NULL) {
+    bit_field_ |= IgnoreCompletionField::encode(ignore_completion_value);
+  }
   static int parent_num_ids() { return BreakableStatement::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
   ZoneList<Statement*> statements_;
-  bool ignore_completion_value_;
   Scope* scope_;
+
+  class IgnoreCompletionField
+      : public BitField<bool, BreakableStatement::kNextBitFieldIndex, 1> {};
+
+ protected:
+  static const uint8_t kNextBitFieldIndex = IgnoreCompletionField::kNext;
 };
 
 
@@ -469,6 +484,9 @@
   }
   bool IsAnonymousFunctionDefinition() const;
 
+ protected:
+  static const uint8_t kNextBitFieldIndex = Expression::kNextBitFieldIndex;
+
  private:
   friend class AstNodeFactory;
 
@@ -498,6 +516,8 @@
   Declaration(VariableProxy* proxy, Scope* scope, int pos, NodeType type)
       : AstNode(pos, type), proxy_(proxy), scope_(scope) {}
 
+  static const uint8_t kNextBitFieldIndex = AstNode::kNextBitFieldIndex;
+
  private:
   VariableProxy* proxy_;
 
@@ -561,6 +581,9 @@
   static int parent_num_ids() { return BreakableStatement::num_ids(); }
   void Initialize(Statement* body) { body_ = body; }
 
+  static const uint8_t kNextBitFieldIndex =
+      BreakableStatement::kNextBitFieldIndex;
+
  private:
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
@@ -715,16 +738,19 @@
   }
 
   enum ForInType { FAST_FOR_IN, SLOW_FOR_IN };
-  ForInType for_in_type() const { return for_in_type_; }
-  void set_for_in_type(ForInType type) { for_in_type_ = type; }
+  ForInType for_in_type() const { return ForInTypeField::decode(bit_field_); }
+  void set_for_in_type(ForInType type) {
+    bit_field_ = ForInTypeField::update(bit_field_, type);
+  }
 
-  static int num_ids() { return parent_num_ids() + 6; }
+  static int num_ids() { return parent_num_ids() + 7; }
   BailoutId BodyId() const { return BailoutId(local_id(0)); }
   BailoutId EnumId() const { return BailoutId(local_id(1)); }
   BailoutId ToObjectId() const { return BailoutId(local_id(2)); }
   BailoutId PrepareId() const { return BailoutId(local_id(3)); }
   BailoutId FilterId() const { return BailoutId(local_id(4)); }
   BailoutId AssignmentId() const { return BailoutId(local_id(5)); }
+  BailoutId IncrementId() const { return BailoutId(local_id(6)); }
   BailoutId ContinueId() const { return EntryId(); }
   BailoutId StackCheckId() const { return BodyId(); }
 
@@ -734,16 +760,23 @@
   ForInStatement(ZoneList<const AstRawString*>* labels, int pos)
       : ForEachStatement(labels, pos, kForInStatement),
         each_(nullptr),
-        subject_(nullptr),
-        for_in_type_(SLOW_FOR_IN) {}
+        subject_(nullptr) {
+    bit_field_ = ForInTypeField::update(bit_field_, SLOW_FOR_IN);
+  }
+
   static int parent_num_ids() { return ForEachStatement::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
   Expression* each_;
   Expression* subject_;
-  ForInType for_in_type_;
   FeedbackVectorSlot each_slot_;
   FeedbackVectorSlot for_in_feedback_slot_;
+
+  class ForInTypeField
+      : public BitField<ForInType, ForEachStatement::kNextBitFieldIndex, 1> {};
+
+ protected:
+  static const uint8_t kNextBitFieldIndex = ForInTypeField::kNext;
 };
 
 
@@ -938,8 +971,18 @@
   BailoutId EntryId() const { return BailoutId(local_id(0)); }
   TypeFeedbackId CompareId() { return TypeFeedbackId(local_id(1)); }
 
-  Type* compare_type() { return compare_type_; }
-  void set_compare_type(Type* type) { compare_type_ = type; }
+  AstType* compare_type() { return compare_type_; }
+  void set_compare_type(AstType* type) { compare_type_ = type; }
+
+  // CaseClause will have both a slot in the feedback vector and the
+  // TypeFeedbackId to record the type information. TypeFeedbackId is used by
+  // full codegen and the feedback vector slot is used by interpreter.
+  void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
+                                 FeedbackVectorSlotCache* cache);
+
+  FeedbackVectorSlot CompareOperationFeedbackSlot() {
+    return type_feedback_slot_;
+  }
 
  private:
   friend class AstNodeFactory;
@@ -951,7 +994,8 @@
   Expression* label_;
   Label body_target_;
   ZoneList<Statement*>* statements_;
-  Type* compare_type_;
+  AstType* compare_type_;
+  FeedbackVectorSlot type_feedback_slot_;
 };
 
 
@@ -1241,17 +1285,27 @@
     return depth_;
   }
 
+ private:
+  int depth_ : 31;
+  int literal_index_;
+
+  friend class AstLiteralReindexer;
+
+  class IsSimpleField
+      : public BitField<bool, Expression::kNextBitFieldIndex, 1> {};
+
  protected:
   MaterializedLiteral(int literal_index, int pos, NodeType type)
-      : Expression(pos, type),
-        is_simple_(false),
-        depth_(0),
-        literal_index_(literal_index) {}
+      : Expression(pos, type), depth_(0), literal_index_(literal_index) {
+    bit_field_ |= IsSimpleField::encode(false);
+  }
 
   // A materialized literal is simple if the values consist of only
   // constants and simple object and array literals.
-  bool is_simple() const { return is_simple_; }
-  void set_is_simple(bool is_simple) { is_simple_ = is_simple; }
+  bool is_simple() const { return IsSimpleField::decode(bit_field_); }
+  void set_is_simple(bool is_simple) {
+    bit_field_ = IsSimpleField::update(bit_field_, is_simple);
+  }
   friend class CompileTimeValue;
 
   void set_depth(int depth) {
@@ -1271,19 +1325,45 @@
   // in the object literal boilerplate.
   Handle<Object> GetBoilerplateValue(Expression* expression, Isolate* isolate);
 
- private:
-  bool is_simple_ : 1;
-  int depth_ : 31;
-  int literal_index_;
-
-  friend class AstLiteralReindexer;
+  static const uint8_t kNextBitFieldIndex = IsSimpleField::kNext;
 };
 
+// Common supertype for ObjectLiteralProperty and ClassLiteralProperty
+class LiteralProperty : public ZoneObject {
+ public:
+  Expression* key() const { return key_; }
+  Expression* value() const { return value_; }
+  void set_key(Expression* e) { key_ = e; }
+  void set_value(Expression* e) { value_ = e; }
+
+  bool is_computed_name() const { return is_computed_name_; }
+
+  FeedbackVectorSlot GetSlot(int offset = 0) const {
+    DCHECK_LT(offset, static_cast<int>(arraysize(slots_)));
+    return slots_[offset];
+  }
+
+  void SetSlot(FeedbackVectorSlot slot, int offset = 0) {
+    DCHECK_LT(offset, static_cast<int>(arraysize(slots_)));
+    slots_[offset] = slot;
+  }
+
+  bool NeedsSetFunctionName() const;
+
+ protected:
+  LiteralProperty(Expression* key, Expression* value, bool is_computed_name)
+      : key_(key), value_(value), is_computed_name_(is_computed_name) {}
+
+  Expression* key_;
+  Expression* value_;
+  FeedbackVectorSlot slots_[2];
+  bool is_computed_name_;
+};
 
 // Property is used for passing information
 // about an object literal's properties from the parser
 // to the code generator.
-class ObjectLiteralProperty final : public ZoneObject {
+class ObjectLiteralProperty final : public LiteralProperty {
  public:
   enum Kind : uint8_t {
     CONSTANT,              // Property with constant value (compile time).
@@ -1294,54 +1374,29 @@
     PROTOTYPE  // Property is __proto__.
   };
 
-  Expression* key() { return key_; }
-  Expression* value() { return value_; }
-  Kind kind() { return kind_; }
-
-  void set_key(Expression* e) { key_ = e; }
-  void set_value(Expression* e) { value_ = e; }
+  Kind kind() const { return kind_; }
 
   // Type feedback information.
-  bool IsMonomorphic() { return !receiver_type_.is_null(); }
-  Handle<Map> GetReceiverType() { return receiver_type_; }
+  bool IsMonomorphic() const { return !receiver_type_.is_null(); }
+  Handle<Map> GetReceiverType() const { return receiver_type_; }
 
-  bool IsCompileTimeValue();
+  bool IsCompileTimeValue() const;
 
   void set_emit_store(bool emit_store);
-  bool emit_store();
-
-  bool is_static() const { return is_static_; }
-  bool is_computed_name() const { return is_computed_name_; }
-
-  FeedbackVectorSlot GetSlot(int offset = 0) const {
-    DCHECK_LT(offset, static_cast<int>(arraysize(slots_)));
-    return slots_[offset];
-  }
-  void SetSlot(FeedbackVectorSlot slot, int offset = 0) {
-    DCHECK_LT(offset, static_cast<int>(arraysize(slots_)));
-    slots_[offset] = slot;
-  }
+  bool emit_store() const;
 
   void set_receiver_type(Handle<Map> map) { receiver_type_ = map; }
 
-  bool NeedsSetFunctionName() const;
-
  private:
   friend class AstNodeFactory;
 
   ObjectLiteralProperty(Expression* key, Expression* value, Kind kind,
-                        bool is_static, bool is_computed_name);
-  ObjectLiteralProperty(AstValueFactory* ast_value_factory, Expression* key,
-                        Expression* value, bool is_static,
                         bool is_computed_name);
+  ObjectLiteralProperty(AstValueFactory* ast_value_factory, Expression* key,
+                        Expression* value, bool is_computed_name);
 
-  Expression* key_;
-  Expression* value_;
-  FeedbackVectorSlot slots_[2];
   Kind kind_;
   bool emit_store_;
-  bool is_static_;
-  bool is_computed_name_;
   Handle<Map> receiver_type_;
 };
 
@@ -1357,9 +1412,11 @@
   }
   int properties_count() const { return boilerplate_properties_; }
   ZoneList<Property*>* properties() const { return properties_; }
-  bool fast_elements() const { return fast_elements_; }
-  bool may_store_doubles() const { return may_store_doubles_; }
-  bool has_elements() const { return has_elements_; }
+  bool fast_elements() const { return FastElementsField::decode(bit_field_); }
+  bool may_store_doubles() const {
+    return MayStoreDoublesField::decode(bit_field_);
+  }
+  bool has_elements() const { return HasElementsField::decode(bit_field_); }
   bool has_shallow_properties() const {
     return depth() == 1 && !has_elements() && !may_store_doubles();
   }
@@ -1429,33 +1486,42 @@
                 uint32_t boilerplate_properties, int pos)
       : MaterializedLiteral(literal_index, pos, kObjectLiteral),
         boilerplate_properties_(boilerplate_properties),
-        fast_elements_(false),
-        has_elements_(false),
-        may_store_doubles_(false),
-        properties_(properties) {}
+        properties_(properties) {
+    bit_field_ |= FastElementsField::encode(false) |
+                  HasElementsField::encode(false) |
+                  MayStoreDoublesField::encode(false);
+  }
 
   static int parent_num_ids() { return MaterializedLiteral::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  uint32_t boilerplate_properties_ : 29;
-  bool fast_elements_ : 1;
-  bool has_elements_ : 1;
-  bool may_store_doubles_ : 1;
+  uint32_t boilerplate_properties_;
   FeedbackVectorSlot slot_;
   Handle<FixedArray> constant_properties_;
   ZoneList<Property*>* properties_;
+
+  class FastElementsField
+      : public BitField<bool, MaterializedLiteral::kNextBitFieldIndex, 1> {};
+  class HasElementsField : public BitField<bool, FastElementsField::kNext, 1> {
+  };
+  class MayStoreDoublesField
+      : public BitField<bool, HasElementsField::kNext, 1> {};
+
+ protected:
+  static const uint8_t kNextBitFieldIndex = MayStoreDoublesField::kNext;
 };
 
 
 // A map from property names to getter/setter pairs allocated in the zone.
 class AccessorTable
     : public base::TemplateHashMap<Literal, ObjectLiteral::Accessors,
+                                   bool (*)(void*, void*),
                                    ZoneAllocationPolicy> {
  public:
   explicit AccessorTable(Zone* zone)
       : base::TemplateHashMap<Literal, ObjectLiteral::Accessors,
-                              ZoneAllocationPolicy>(Literal::Match,
-                                                    ZoneAllocationPolicy(zone)),
+                              bool (*)(void*, void*), ZoneAllocationPolicy>(
+            Literal::Match, ZoneAllocationPolicy(zone)),
         zone_(zone) {}
 
   Iterator lookup(Literal* literal) {
@@ -1628,19 +1694,19 @@
   friend class AstNodeFactory;
 
   VariableProxy(Variable* var, int start_position, int end_position);
-  VariableProxy(const AstRawString* name, Variable::Kind variable_kind,
+  VariableProxy(const AstRawString* name, VariableKind variable_kind,
                 int start_position, int end_position);
   explicit VariableProxy(const VariableProxy* copy_from);
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  class IsThisField : public BitField8<bool, 0, 1> {};
-  class IsAssignedField : public BitField8<bool, 1, 1> {};
-  class IsResolvedField : public BitField8<bool, 2, 1> {};
-  class IsNewTargetField : public BitField8<bool, 3, 1> {};
+  class IsThisField : public BitField<bool, Expression::kNextBitFieldIndex, 1> {
+  };
+  class IsAssignedField : public BitField<bool, IsThisField::kNext, 1> {};
+  class IsResolvedField : public BitField<bool, IsAssignedField::kNext, 1> {};
+  class IsNewTargetField : public BitField<bool, IsResolvedField::kNext, 1> {};
 
-  uint8_t bit_field_;
   // Position is stored in the AstNode superclass, but VariableProxy needs to
   // know its end position too (for error messages). It cannot be inferred from
   // the variable name length because it can contain escapes.
@@ -1737,22 +1803,24 @@
   friend class AstNodeFactory;
 
   Property(Expression* obj, Expression* key, int pos)
-      : Expression(pos, kProperty),
-        bit_field_(IsForCallField::encode(false) |
-                   IsStringAccessField::encode(false) |
-                   InlineCacheStateField::encode(UNINITIALIZED)),
-        obj_(obj),
-        key_(key) {}
+      : Expression(pos, kProperty), obj_(obj), key_(key) {
+    bit_field_ |= IsForCallField::encode(false) |
+                  IsStringAccessField::encode(false) |
+                  InlineCacheStateField::encode(UNINITIALIZED);
+  }
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  class IsForCallField : public BitField8<bool, 0, 1> {};
-  class IsStringAccessField : public BitField8<bool, 1, 1> {};
-  class KeyTypeField : public BitField8<IcCheckType, 2, 1> {};
-  class InlineCacheStateField : public BitField8<InlineCacheState, 3, 4> {};
+  class IsForCallField
+      : public BitField<bool, Expression::kNextBitFieldIndex, 1> {};
+  class IsStringAccessField : public BitField<bool, IsForCallField::kNext, 1> {
+  };
+  class KeyTypeField
+      : public BitField<IcCheckType, IsStringAccessField::kNext, 1> {};
+  class InlineCacheStateField
+      : public BitField<InlineCacheState, KeyTypeField::kNext, 4> {};
 
-  uint8_t bit_field_;
   FeedbackVectorSlot property_feedback_slot_;
   Expression* obj_;
   Expression* key_;
@@ -1789,15 +1857,6 @@
     return !target_.is_null();
   }
 
-  bool global_call() const {
-    VariableProxy* proxy = expression_->AsVariableProxy();
-    return proxy != NULL && proxy->var()->IsUnallocatedOrGlobalSlot();
-  }
-
-  bool known_global_function() const {
-    return global_call() && !target_.is_null();
-  }
-
   Handle<JSFunction> target() { return target_; }
 
   Handle<AllocationSite> allocation_site() { return allocation_site_; }
@@ -1867,11 +1926,12 @@
   Call(Expression* expression, ZoneList<Expression*>* arguments, int pos,
        PossiblyEval possibly_eval)
       : Expression(pos, kCall),
-        bit_field_(
-            IsUninitializedField::encode(false) |
-            IsPossiblyEvalField::encode(possibly_eval == IS_POSSIBLY_EVAL)),
         expression_(expression),
         arguments_(arguments) {
+    bit_field_ |=
+        IsUninitializedField::encode(false) |
+        IsPossiblyEvalField::encode(possibly_eval == IS_POSSIBLY_EVAL);
+
     if (expression->IsProperty()) {
       expression->AsProperty()->mark_for_call();
     }
@@ -1880,11 +1940,11 @@
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  class IsUninitializedField : public BitField8<bool, 0, 1> {};
-  class IsTailField : public BitField8<bool, 1, 1> {};
-  class IsPossiblyEvalField : public BitField8<bool, 2, 1> {};
+  class IsUninitializedField
+      : public BitField<bool, Expression::kNextBitFieldIndex, 1> {};
+  class IsTailField : public BitField<bool, IsUninitializedField::kNext, 1> {};
+  class IsPossiblyEvalField : public BitField<bool, IsTailField::kNext, 1> {};
 
-  uint8_t bit_field_;
   FeedbackVectorSlot ic_slot_;
   FeedbackVectorSlot stub_slot_;
   Expression* expression_;
@@ -1904,10 +1964,9 @@
   // Type feedback information.
   void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
                                  FeedbackVectorSlotCache* cache) {
-    callnew_feedback_slot_ = spec->AddGeneralSlot();
-    // Construct calls have two slots, one right after the other.
-    // The second slot stores the call count for monomorphic calls.
-    spec->AddGeneralSlot();
+    // CallNew stores feedback in the exact same way as Call. We can
+    // piggyback on the type feedback infrastructure for calls.
+    callnew_feedback_slot_ = spec->AddCallICSlot();
   }
 
   FeedbackVectorSlot CallNewFeedbackSlot() {
@@ -1915,7 +1974,7 @@
     return callnew_feedback_slot_;
   }
 
-  bool IsMonomorphic() const { return is_monomorphic_; }
+  bool IsMonomorphic() const { return IsMonomorphicField::decode(bit_field_); }
   Handle<JSFunction> target() const { return target_; }
   Handle<AllocationSite> allocation_site() const {
     return allocation_site_;
@@ -1928,11 +1987,13 @@
   void set_allocation_site(Handle<AllocationSite> site) {
     allocation_site_ = site;
   }
-  void set_is_monomorphic(bool monomorphic) { is_monomorphic_ = monomorphic; }
+  void set_is_monomorphic(bool monomorphic) {
+    bit_field_ = IsMonomorphicField::update(bit_field_, monomorphic);
+  }
   void set_target(Handle<JSFunction> target) { target_ = target; }
   void SetKnownGlobalTarget(Handle<JSFunction> target) {
     target_ = target;
-    is_monomorphic_ = true;
+    set_is_monomorphic(true);
   }
 
  private:
@@ -1940,19 +2001,22 @@
 
   CallNew(Expression* expression, ZoneList<Expression*>* arguments, int pos)
       : Expression(pos, kCallNew),
-        is_monomorphic_(false),
         expression_(expression),
-        arguments_(arguments) {}
+        arguments_(arguments) {
+    bit_field_ |= IsMonomorphicField::encode(false);
+  }
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  bool is_monomorphic_;
   FeedbackVectorSlot callnew_feedback_slot_;
   Expression* expression_;
   ZoneList<Expression*>* arguments_;
   Handle<JSFunction> target_;
   Handle<AllocationSite> allocation_site_;
+
+  class IsMonomorphicField
+      : public BitField<bool, Expression::kNextBitFieldIndex, 1> {};
 };
 
 
@@ -1969,6 +2033,10 @@
     DCHECK(is_jsruntime());
     return context_index_;
   }
+  void set_context_index(int index) {
+    DCHECK(is_jsruntime());
+    context_index_ = index;
+  }
   const Runtime::Function* function() const {
     DCHECK(!is_jsruntime());
     return function_;
@@ -2006,7 +2074,7 @@
 
 class UnaryOperation final : public Expression {
  public:
-  Token::Value op() const { return op_; }
+  Token::Value op() const { return OperatorField::decode(bit_field_); }
   Expression* expression() const { return expression_; }
   void set_expression(Expression* e) { expression_ = e; }
 
@@ -2022,21 +2090,24 @@
   friend class AstNodeFactory;
 
   UnaryOperation(Token::Value op, Expression* expression, int pos)
-      : Expression(pos, kUnaryOperation), op_(op), expression_(expression) {
+      : Expression(pos, kUnaryOperation), expression_(expression) {
+    bit_field_ |= OperatorField::encode(op);
     DCHECK(Token::IsUnaryOp(op));
   }
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  Token::Value op_;
   Expression* expression_;
+
+  class OperatorField
+      : public BitField<Token::Value, Expression::kNextBitFieldIndex, 7> {};
 };
 
 
 class BinaryOperation final : public Expression {
  public:
-  Token::Value op() const { return static_cast<Token::Value>(op_); }
+  Token::Value op() const { return OperatorField::decode(bit_field_); }
   Expression* left() const { return left_; }
   void set_left(Expression* e) { left_ = e; }
   Expression* right() const { return right_; }
@@ -2090,18 +2161,17 @@
 
   BinaryOperation(Token::Value op, Expression* left, Expression* right, int pos)
       : Expression(pos, kBinaryOperation),
-        op_(static_cast<byte>(op)),
         has_fixed_right_arg_(false),
         fixed_right_arg_value_(0),
         left_(left),
         right_(right) {
+    bit_field_ |= OperatorField::encode(op);
     DCHECK(Token::IsBinaryOp(op));
   }
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  const byte op_;  // actually Token::Value
   // TODO(rossberg): the fixed arg should probably be represented as a Constant
   // type for the RHS. Currenty it's actually a Maybe<int>
   bool has_fixed_right_arg_;
@@ -2110,6 +2180,9 @@
   Expression* right_;
   Handle<AllocationSite> allocation_site_;
   FeedbackVectorSlot type_feedback_slot_;
+
+  class OperatorField
+      : public BitField<Token::Value, Expression::kNextBitFieldIndex, 7> {};
 };
 
 
@@ -2132,14 +2205,14 @@
   KeyedAccessStoreMode GetStoreMode() const {
     return StoreModeField::decode(bit_field_);
   }
-  Type* type() const { return type_; }
+  AstType* type() const { return type_; }
   void set_key_type(IcCheckType type) {
     bit_field_ = KeyTypeField::update(bit_field_, type);
   }
   void set_store_mode(KeyedAccessStoreMode mode) {
     bit_field_ = StoreModeField::update(bit_field_, mode);
   }
-  void set_type(Type* type) { type_ = type; }
+  void set_type(AstType* type) { type_ = type; }
 
   static int num_ids() { return parent_num_ids() + 4; }
   BailoutId AssignmentId() const { return BailoutId(local_id(0)); }
@@ -2164,27 +2237,25 @@
   friend class AstNodeFactory;
 
   CountOperation(Token::Value op, bool is_prefix, Expression* expr, int pos)
-      : Expression(pos, kCountOperation),
-        bit_field_(
-            IsPrefixField::encode(is_prefix) | KeyTypeField::encode(ELEMENT) |
-            StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op)),
-        type_(NULL),
-        expression_(expr) {}
+      : Expression(pos, kCountOperation), type_(NULL), expression_(expr) {
+    bit_field_ |=
+        IsPrefixField::encode(is_prefix) | KeyTypeField::encode(ELEMENT) |
+        StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op);
+  }
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  class IsPrefixField : public BitField16<bool, 0, 1> {};
-  class KeyTypeField : public BitField16<IcCheckType, 1, 1> {};
-  class StoreModeField : public BitField16<KeyedAccessStoreMode, 2, 3> {};
-  class TokenField : public BitField16<Token::Value, 5, 8> {};
+  class IsPrefixField
+      : public BitField<bool, Expression::kNextBitFieldIndex, 1> {};
+  class KeyTypeField : public BitField<IcCheckType, IsPrefixField::kNext, 1> {};
+  class StoreModeField
+      : public BitField<KeyedAccessStoreMode, KeyTypeField::kNext, 3> {};
+  class TokenField : public BitField<Token::Value, StoreModeField::kNext, 7> {};
 
-  // Starts with 16-bit field, which should get packed together with
-  // Expression's trailing 16-bit field.
-  uint16_t bit_field_;
   FeedbackVectorSlot slot_;
   FeedbackVectorSlot binary_operation_slot_;
-  Type* type_;
+  AstType* type_;
   Expression* expression_;
   SmallMapList receiver_types_;
 };
@@ -2192,7 +2263,7 @@
 
 class CompareOperation final : public Expression {
  public:
-  Token::Value op() const { return op_; }
+  Token::Value op() const { return OperatorField::decode(bit_field_); }
   Expression* left() const { return left_; }
   Expression* right() const { return right_; }
 
@@ -2204,8 +2275,18 @@
   TypeFeedbackId CompareOperationFeedbackId() const {
     return TypeFeedbackId(local_id(0));
   }
-  Type* combined_type() const { return combined_type_; }
-  void set_combined_type(Type* type) { combined_type_ = type; }
+  AstType* combined_type() const { return combined_type_; }
+  void set_combined_type(AstType* type) { combined_type_ = type; }
+
+  // CompareOperation will have both a slot in the feedback vector and the
+  // TypeFeedbackId to record the type information. TypeFeedbackId is used
+  // by full codegen and the feedback vector slot is used by interpreter.
+  void AssignFeedbackVectorSlots(Isolate* isolate, FeedbackVectorSpec* spec,
+                                 FeedbackVectorSlotCache* cache);
+
+  FeedbackVectorSlot CompareOperationFeedbackSlot() const {
+    return type_feedback_slot_;
+  }
 
   // Match special cases.
   bool IsLiteralCompareTypeof(Expression** expr, Handle<String>* check);
@@ -2218,21 +2299,23 @@
   CompareOperation(Token::Value op, Expression* left, Expression* right,
                    int pos)
       : Expression(pos, kCompareOperation),
-        op_(op),
         left_(left),
         right_(right),
-        combined_type_(Type::None()) {
+        combined_type_(AstType::None()) {
+    bit_field_ |= OperatorField::encode(op);
     DCHECK(Token::IsCompareOp(op));
   }
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  Token::Value op_;
   Expression* left_;
   Expression* right_;
 
-  Type* combined_type_;
+  AstType* combined_type_;
+  FeedbackVectorSlot type_feedback_slot_;
+  class OperatorField
+      : public BitField<Token::Value, Expression::kNextBitFieldIndex, 7> {};
 };
 
 
@@ -2356,17 +2439,14 @@
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  class IsUninitializedField : public BitField16<bool, 0, 1> {};
+  class IsUninitializedField
+      : public BitField<bool, Expression::kNextBitFieldIndex, 1> {};
   class KeyTypeField
-      : public BitField16<IcCheckType, IsUninitializedField::kNext, 1> {};
+      : public BitField<IcCheckType, IsUninitializedField::kNext, 1> {};
   class StoreModeField
-      : public BitField16<KeyedAccessStoreMode, KeyTypeField::kNext, 3> {};
-  class TokenField : public BitField16<Token::Value, StoreModeField::kNext, 8> {
-  };
+      : public BitField<KeyedAccessStoreMode, KeyTypeField::kNext, 3> {};
+  class TokenField : public BitField<Token::Value, StoreModeField::kNext, 7> {};
 
-  // Starts with 16-bit field, which should get packed together with
-  // Expression's trailing 16-bit field.
-  uint16_t bit_field_;
   FeedbackVectorSlot slot_;
   Expression* target_;
   Expression* value_;
@@ -2393,14 +2473,14 @@
 class RewritableExpression final : public Expression {
  public:
   Expression* expression() const { return expr_; }
-  bool is_rewritten() const { return is_rewritten_; }
+  bool is_rewritten() const { return IsRewrittenField::decode(bit_field_); }
 
   void Rewrite(Expression* new_expression) {
     DCHECK(!is_rewritten());
     DCHECK_NOT_NULL(new_expression);
     DCHECK(!new_expression->IsRewritableExpression());
     expr_ = new_expression;
-    is_rewritten_ = true;
+    bit_field_ = IsRewrittenField::update(bit_field_, true);
   }
 
   static int num_ids() { return parent_num_ids(); }
@@ -2410,15 +2490,17 @@
 
   explicit RewritableExpression(Expression* expression)
       : Expression(expression->position(), kRewritableExpression),
-        is_rewritten_(false),
         expr_(expression) {
+    bit_field_ |= IsRewrittenField::encode(false);
     DCHECK(!expression->IsRewritableExpression());
   }
 
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
 
-  bool is_rewritten_;
   Expression* expr_;
+
+  class IsRewrittenField
+      : public BitField<bool, Expression::kNextBitFieldIndex, 1> {};
 };
 
 // Our Yield is different from the JS yield in that it "returns" its argument as
@@ -2430,8 +2512,11 @@
 
   Expression* generator_object() const { return generator_object_; }
   Expression* expression() const { return expression_; }
+  OnException on_exception() const {
+    return OnExceptionField::decode(bit_field_);
+  }
   bool rethrow_on_exception() const {
-    return on_exception_ == kOnExceptionRethrow;
+    return on_exception() == kOnExceptionRethrow;
   }
   int yield_id() const { return yield_id_; }
 
@@ -2445,15 +2530,18 @@
   Yield(Expression* generator_object, Expression* expression, int pos,
         OnException on_exception)
       : Expression(pos, kYield),
-        on_exception_(on_exception),
         yield_id_(-1),
         generator_object_(generator_object),
-        expression_(expression) {}
+        expression_(expression) {
+    bit_field_ |= OnExceptionField::encode(on_exception);
+  }
 
-  OnException on_exception_;
   int yield_id_;
   Expression* generator_object_;
   Expression* expression_;
+
+  class OnExceptionField
+      : public BitField<OnException, Expression::kNextBitFieldIndex, 1> {};
 };
 
 
@@ -2547,14 +2635,14 @@
     inferred_name_ = Handle<String>();
   }
 
-  bool pretenure() const { return Pretenure::decode(bitfield_); }
-  void set_pretenure() { bitfield_ = Pretenure::update(bitfield_, true); }
+  bool pretenure() const { return Pretenure::decode(bit_field_); }
+  void set_pretenure() { bit_field_ = Pretenure::update(bit_field_, true); }
 
   bool has_duplicate_parameters() const {
-    return HasDuplicateParameters::decode(bitfield_);
+    return HasDuplicateParameters::decode(bit_field_);
   }
 
-  bool is_function() const { return IsFunction::decode(bitfield_); }
+  bool is_function() const { return IsFunction::decode(bit_field_); }
 
   // This is used as a heuristic on when to eagerly compile a function
   // literal. We consider the following constructs as hints that the
@@ -2562,25 +2650,25 @@
   // - (function() { ... })();
   // - var x = function() { ... }();
   bool should_eager_compile() const {
-    return ShouldEagerCompile::decode(bitfield_);
+    return ShouldEagerCompile::decode(bit_field_);
   }
   void set_should_eager_compile() {
-    bitfield_ = ShouldEagerCompile::update(bitfield_, true);
+    bit_field_ = ShouldEagerCompile::update(bit_field_, true);
   }
 
   // A hint that we expect this function to be called (exactly) once,
   // i.e. we suspect it's an initialization function.
   bool should_be_used_once_hint() const {
-    return ShouldBeUsedOnceHint::decode(bitfield_);
+    return ShouldNotBeUsedOnceHintField::decode(bit_field_);
   }
   void set_should_be_used_once_hint() {
-    bitfield_ = ShouldBeUsedOnceHint::update(bitfield_, true);
+    bit_field_ = ShouldNotBeUsedOnceHintField::update(bit_field_, true);
   }
 
   FunctionType function_type() const {
-    return FunctionTypeBits::decode(bitfield_);
+    return FunctionTypeBits::decode(bit_field_);
   }
-  FunctionKind kind() const { return FunctionKindBits::decode(bitfield_); }
+  FunctionKind kind() const;
 
   int ast_node_count() { return ast_properties_.node_count(); }
   AstProperties::Flags flags() const { return ast_properties_.flags(); }
@@ -2590,10 +2678,12 @@
   const FeedbackVectorSpec* feedback_vector_spec() const {
     return ast_properties_.get_spec();
   }
-  bool dont_optimize() { return dont_optimize_reason_ != kNoReason; }
-  BailoutReason dont_optimize_reason() { return dont_optimize_reason_; }
+  bool dont_optimize() { return dont_optimize_reason() != kNoReason; }
+  BailoutReason dont_optimize_reason() {
+    return DontOptimizeReasonField::decode(bit_field_);
+  }
   void set_dont_optimize_reason(BailoutReason reason) {
-    dont_optimize_reason_ = reason;
+    bit_field_ = DontOptimizeReasonField::update(bit_field_, reason);
   }
 
   bool IsAnonymousFunctionDefinition() const {
@@ -2603,6 +2693,21 @@
   int yield_count() { return yield_count_; }
   void set_yield_count(int yield_count) { yield_count_ = yield_count; }
 
+  bool requires_class_field_init() {
+    return RequiresClassFieldInit::decode(bit_field_);
+  }
+  void set_requires_class_field_init(bool requires_class_field_init) {
+    bit_field_ =
+        RequiresClassFieldInit::update(bit_field_, requires_class_field_init);
+  }
+  bool is_class_field_initializer() {
+    return IsClassFieldInitializer::decode(bit_field_);
+  }
+  void set_is_class_field_initializer(bool is_class_field_initializer) {
+    bit_field_ =
+        IsClassFieldInitializer::update(bit_field_, is_class_field_initializer);
+  }
+
  private:
   friend class AstNodeFactory;
 
@@ -2612,10 +2717,9 @@
                   int expected_property_count, int parameter_count,
                   FunctionType function_type,
                   ParameterFlag has_duplicate_parameters,
-                  EagerCompileHint eager_compile_hint, FunctionKind kind,
-                  int position, bool is_function)
+                  EagerCompileHint eager_compile_hint, int position,
+                  bool is_function)
       : Expression(position, kFunctionLiteral),
-        dont_optimize_reason_(kNoReason),
         materialized_literal_count_(materialized_literal_count),
         expected_property_count_(expected_property_count),
         parameter_count_(parameter_count),
@@ -2626,29 +2730,32 @@
         body_(body),
         raw_inferred_name_(ast_value_factory->empty_string()),
         ast_properties_(zone) {
-    bitfield_ =
+    bit_field_ |=
         FunctionTypeBits::encode(function_type) | Pretenure::encode(false) |
         HasDuplicateParameters::encode(has_duplicate_parameters ==
                                        kHasDuplicateParameters) |
         IsFunction::encode(is_function) |
         ShouldEagerCompile::encode(eager_compile_hint == kShouldEagerCompile) |
-        FunctionKindBits::encode(kind) | ShouldBeUsedOnceHint::encode(false);
-    DCHECK(IsValidFunctionKind(kind));
+        RequiresClassFieldInit::encode(false) |
+        ShouldNotBeUsedOnceHintField::encode(false) |
+        DontOptimizeReasonField::encode(kNoReason) |
+        IsClassFieldInitializer::encode(false);
   }
 
-  class FunctionTypeBits : public BitField16<FunctionType, 0, 2> {};
-  class Pretenure : public BitField16<bool, 2, 1> {};
-  class HasDuplicateParameters : public BitField16<bool, 3, 1> {};
-  class IsFunction : public BitField16<bool, 4, 1> {};
-  class ShouldEagerCompile : public BitField16<bool, 5, 1> {};
-  class ShouldBeUsedOnceHint : public BitField16<bool, 6, 1> {};
-  class FunctionKindBits : public BitField16<FunctionKind, 7, 9> {};
-
-  // Start with 16-bit field, which should get packed together
-  // with Expression's trailing 16-bit field.
-  uint16_t bitfield_;
-
-  BailoutReason dont_optimize_reason_;
+  class FunctionTypeBits
+      : public BitField<FunctionType, Expression::kNextBitFieldIndex, 2> {};
+  class Pretenure : public BitField<bool, FunctionTypeBits::kNext, 1> {};
+  class HasDuplicateParameters : public BitField<bool, Pretenure::kNext, 1> {};
+  class IsFunction : public BitField<bool, HasDuplicateParameters::kNext, 1> {};
+  class ShouldEagerCompile : public BitField<bool, IsFunction::kNext, 1> {};
+  class ShouldNotBeUsedOnceHintField
+      : public BitField<bool, ShouldEagerCompile::kNext, 1> {};
+  class RequiresClassFieldInit
+      : public BitField<bool, ShouldNotBeUsedOnceHintField::kNext, 1> {};
+  class IsClassFieldInitializer
+      : public BitField<bool, RequiresClassFieldInit::kNext, 1> {};
+  class DontOptimizeReasonField
+      : public BitField<BailoutReason, IsClassFieldInitializer::kNext, 8> {};
 
   int materialized_literal_count_;
   int expected_property_count_;
@@ -2664,10 +2771,29 @@
   AstProperties ast_properties_;
 };
 
+// Property is used for passing information
+// about a class literal's properties from the parser to the code generator.
+class ClassLiteralProperty final : public LiteralProperty {
+ public:
+  enum Kind : uint8_t { METHOD, GETTER, SETTER, FIELD };
+
+  Kind kind() const { return kind_; }
+
+  bool is_static() const { return is_static_; }
+
+ private:
+  friend class AstNodeFactory;
+
+  ClassLiteralProperty(Expression* key, Expression* value, Kind kind,
+                       bool is_static, bool is_computed_name);
+
+  Kind kind_;
+  bool is_static_;
+};
 
 class ClassLiteral final : public Expression {
  public:
-  typedef ObjectLiteralProperty Property;
+  typedef ClassLiteralProperty Property;
 
   VariableProxy* class_variable_proxy() const { return class_variable_proxy_; }
   Expression* extends() const { return extends_; }
@@ -2678,6 +2804,13 @@
   int start_position() const { return position(); }
   int end_position() const { return end_position_; }
 
+  VariableProxy* static_initializer_proxy() const {
+    return static_initializer_proxy_;
+  }
+  void set_static_initializer_proxy(VariableProxy* proxy) {
+    static_initializer_proxy_ = proxy;
+  }
+
   BailoutId CreateLiteralId() const { return BailoutId(local_id(0)); }
   BailoutId PrototypeId() { return BailoutId(local_id(1)); }
 
@@ -2712,7 +2845,8 @@
         class_variable_proxy_(class_variable_proxy),
         extends_(extends),
         constructor_(constructor),
-        properties_(properties) {}
+        properties_(properties),
+        static_initializer_proxy_(nullptr) {}
 
   static int parent_num_ids() { return Expression::num_ids(); }
   int local_id(int n) const { return base_id() + parent_num_ids() + n; }
@@ -2724,6 +2858,7 @@
   Expression* extends_;
   FunctionLiteral* constructor_;
   ZoneList<Property*>* properties_;
+  VariableProxy* static_initializer_proxy_;
 };
 
 
@@ -3097,6 +3232,16 @@
         try_block, scope, variable, catch_block, HandlerTable::DESUGARING, pos);
   }
 
+  TryCatchStatement* NewTryCatchStatementForAsyncAwait(Block* try_block,
+                                                       Scope* scope,
+                                                       Variable* variable,
+                                                       Block* catch_block,
+                                                       int pos) {
+    return new (zone_)
+        TryCatchStatement(try_block, scope, variable, catch_block,
+                          HandlerTable::ASYNC_AWAIT, pos);
+  }
+
   TryFinallyStatement* NewTryFinallyStatement(Block* try_block,
                                               Block* finally_block, int pos) {
     return new (zone_) TryFinallyStatement(try_block, finally_block, pos);
@@ -3110,9 +3255,9 @@
     return new (zone_) EmptyStatement(pos);
   }
 
-  SloppyBlockFunctionStatement* NewSloppyBlockFunctionStatement(
-      Statement* statement, Scope* scope) {
-    return new (zone_) SloppyBlockFunctionStatement(statement, scope);
+  SloppyBlockFunctionStatement* NewSloppyBlockFunctionStatement(Scope* scope) {
+    return new (zone_) SloppyBlockFunctionStatement(
+        NewEmptyStatement(kNoSourcePosition), scope);
   }
 
   CaseClause* NewCaseClause(
@@ -3163,17 +3308,16 @@
 
   ObjectLiteral::Property* NewObjectLiteralProperty(
       Expression* key, Expression* value, ObjectLiteralProperty::Kind kind,
-      bool is_static, bool is_computed_name) {
+      bool is_computed_name) {
     return new (zone_)
-        ObjectLiteral::Property(key, value, kind, is_static, is_computed_name);
+        ObjectLiteral::Property(key, value, kind, is_computed_name);
   }
 
   ObjectLiteral::Property* NewObjectLiteralProperty(Expression* key,
                                                     Expression* value,
-                                                    bool is_static,
                                                     bool is_computed_name) {
     return new (zone_) ObjectLiteral::Property(ast_value_factory_, key, value,
-                                               is_static, is_computed_name);
+                                               is_computed_name);
   }
 
   RegExpLiteral* NewRegExpLiteral(const AstRawString* pattern, int flags,
@@ -3201,7 +3345,7 @@
   }
 
   VariableProxy* NewVariableProxy(const AstRawString* name,
-                                  Variable::Kind variable_kind,
+                                  VariableKind variable_kind,
                                   int start_position = kNoSourcePosition,
                                   int end_position = kNoSourcePosition) {
     DCHECK_NOT_NULL(name);
@@ -3318,13 +3462,12 @@
       int expected_property_count, int parameter_count,
       FunctionLiteral::ParameterFlag has_duplicate_parameters,
       FunctionLiteral::FunctionType function_type,
-      FunctionLiteral::EagerCompileHint eager_compile_hint, FunctionKind kind,
-      int position) {
-    return new (zone_) FunctionLiteral(
-        zone_, name, ast_value_factory_, scope, body,
-        materialized_literal_count, expected_property_count, parameter_count,
-        function_type, has_duplicate_parameters, eager_compile_hint, kind,
-        position, true);
+      FunctionLiteral::EagerCompileHint eager_compile_hint, int position) {
+    return new (zone_) FunctionLiteral(zone_, name, ast_value_factory_, scope,
+                                       body, materialized_literal_count,
+                                       expected_property_count, parameter_count,
+                                       function_type, has_duplicate_parameters,
+                                       eager_compile_hint, position, true);
   }
 
   // Creates a FunctionLiteral representing a top-level script, the
@@ -3332,19 +3475,26 @@
   // the Function constructor.
   FunctionLiteral* NewScriptOrEvalFunctionLiteral(
       DeclarationScope* scope, ZoneList<Statement*>* body,
-      int materialized_literal_count, int expected_property_count) {
+      int materialized_literal_count, int expected_property_count,
+      int parameter_count) {
     return new (zone_) FunctionLiteral(
         zone_, ast_value_factory_->empty_string(), ast_value_factory_, scope,
-        body, materialized_literal_count, expected_property_count, 0,
-        FunctionLiteral::kAnonymousExpression,
+        body, materialized_literal_count, expected_property_count,
+        parameter_count, FunctionLiteral::kAnonymousExpression,
         FunctionLiteral::kNoDuplicateParameters,
-        FunctionLiteral::kShouldLazyCompile, FunctionKind::kNormalFunction, 0,
-        false);
+        FunctionLiteral::kShouldLazyCompile, 0, false);
+  }
+
+  ClassLiteral::Property* NewClassLiteralProperty(
+      Expression* key, Expression* value, ClassLiteralProperty::Kind kind,
+      bool is_static, bool is_computed_name) {
+    return new (zone_)
+        ClassLiteral::Property(key, value, kind, is_static, is_computed_name);
   }
 
   ClassLiteral* NewClassLiteral(VariableProxy* proxy, Expression* extends,
                                 FunctionLiteral* constructor,
-                                ZoneList<ObjectLiteral::Property*>* properties,
+                                ZoneList<ClassLiteral::Property*>* properties,
                                 int start_position, int end_position) {
     return new (zone_) ClassLiteral(proxy, extends, constructor, properties,
                                     start_position, end_position);
@@ -3396,7 +3546,8 @@
       }
     }
 
-    ~BodyScope() { factory_->zone_ = prev_zone_; }
+    void Reset() { factory_->zone_ = prev_zone_; }
+    ~BodyScope() { Reset(); }
 
    private:
     AstNodeFactory* factory_;
diff --git a/src/ast/compile-time-value.cc b/src/ast/compile-time-value.cc
new file mode 100644
index 0000000..eda536b
--- /dev/null
+++ b/src/ast/compile-time-value.cc
@@ -0,0 +1,56 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/ast/compile-time-value.h"
+
+#include "src/ast/ast.h"
+#include "src/factory.h"
+#include "src/handles-inl.h"
+#include "src/isolate.h"
+#include "src/objects-inl.h"
+
+namespace v8 {
+namespace internal {
+
+bool CompileTimeValue::IsCompileTimeValue(Expression* expression) {
+  if (expression->IsLiteral()) return true;
+  MaterializedLiteral* lit = expression->AsMaterializedLiteral();
+  return lit != NULL && lit->is_simple();
+}
+
+Handle<FixedArray> CompileTimeValue::GetValue(Isolate* isolate,
+                                              Expression* expression) {
+  Factory* factory = isolate->factory();
+  DCHECK(IsCompileTimeValue(expression));
+  Handle<FixedArray> result = factory->NewFixedArray(2, TENURED);
+  ObjectLiteral* object_literal = expression->AsObjectLiteral();
+  if (object_literal != NULL) {
+    DCHECK(object_literal->is_simple());
+    if (object_literal->fast_elements()) {
+      result->set(kLiteralTypeSlot, Smi::FromInt(OBJECT_LITERAL_FAST_ELEMENTS));
+    } else {
+      result->set(kLiteralTypeSlot, Smi::FromInt(OBJECT_LITERAL_SLOW_ELEMENTS));
+    }
+    result->set(kElementsSlot, *object_literal->constant_properties());
+  } else {
+    ArrayLiteral* array_literal = expression->AsArrayLiteral();
+    DCHECK(array_literal != NULL && array_literal->is_simple());
+    result->set(kLiteralTypeSlot, Smi::FromInt(ARRAY_LITERAL));
+    result->set(kElementsSlot, *array_literal->constant_elements());
+  }
+  return result;
+}
+
+CompileTimeValue::LiteralType CompileTimeValue::GetLiteralType(
+    Handle<FixedArray> value) {
+  Smi* literal_type = Smi::cast(value->get(kLiteralTypeSlot));
+  return static_cast<LiteralType>(literal_type->value());
+}
+
+Handle<FixedArray> CompileTimeValue::GetElements(Handle<FixedArray> value) {
+  return Handle<FixedArray>(FixedArray::cast(value->get(kElementsSlot)));
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/ast/compile-time-value.h b/src/ast/compile-time-value.h
new file mode 100644
index 0000000..27351b7
--- /dev/null
+++ b/src/ast/compile-time-value.h
@@ -0,0 +1,45 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_AST_COMPILE_TIME_VALUE
+#define V8_AST_COMPILE_TIME_VALUE
+
+#include "src/allocation.h"
+#include "src/globals.h"
+
+namespace v8 {
+namespace internal {
+
+class Expression;
+
+// Support for handling complex values (array and object literals) that
+// can be fully handled at compile time.
+class CompileTimeValue : public AllStatic {
+ public:
+  enum LiteralType {
+    OBJECT_LITERAL_FAST_ELEMENTS,
+    OBJECT_LITERAL_SLOW_ELEMENTS,
+    ARRAY_LITERAL
+  };
+
+  static bool IsCompileTimeValue(Expression* expression);
+
+  // Get the value as a compile time value.
+  static Handle<FixedArray> GetValue(Isolate* isolate, Expression* expression);
+
+  // Get the type of a compile time value returned by GetValue().
+  static LiteralType GetLiteralType(Handle<FixedArray> value);
+
+  // Get the elements array of a compile time value returned by GetValue().
+  static Handle<FixedArray> GetElements(Handle<FixedArray> value);
+
+ private:
+  static const int kLiteralTypeSlot = 0;
+  static const int kElementsSlot = 1;
+};
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_AST_COMPILE_TIME_VALUE
diff --git a/src/ast/context-slot-cache.cc b/src/ast/context-slot-cache.cc
index 43bd6d6..b1387e1 100644
--- a/src/ast/context-slot-cache.cc
+++ b/src/ast/context-slot-cache.cc
@@ -8,6 +8,13 @@
 
 #include "src/ast/scopes.h"
 #include "src/bootstrapper.h"
+// FIXME(mstarzinger, marja): This is weird, but required because of the missing
+// (disallowed) include: src/factory.h -> src/objects-inl.h
+#include "src/objects-inl.h"
+// FIXME(mstarzinger, marja): This is weird, but required because of the missing
+// (disallowed) include: src/type-feedback-vector.h ->
+// src/type-feedback-vector-inl.h
+#include "src/type-feedback-vector-inl.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/ast/context-slot-cache.h b/src/ast/context-slot-cache.h
index 8e9d1f7..4345a65 100644
--- a/src/ast/context-slot-cache.h
+++ b/src/ast/context-slot-cache.h
@@ -7,7 +7,6 @@
 
 #include "src/allocation.h"
 #include "src/ast/modules.h"
-#include "src/ast/variables.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/ast/modules.cc b/src/ast/modules.cc
index cd47c00..2d28d55 100644
--- a/src/ast/modules.cc
+++ b/src/ast/modules.cc
@@ -12,49 +12,35 @@
 void ModuleDescriptor::AddImport(
     const AstRawString* import_name, const AstRawString* local_name,
     const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
-  DCHECK_NOT_NULL(import_name);
-  DCHECK_NOT_NULL(local_name);
-  DCHECK_NOT_NULL(module_request);
-  ModuleEntry* entry = new (zone) ModuleEntry(loc);
+  Entry* entry = new (zone) Entry(loc);
   entry->local_name = local_name;
   entry->import_name = import_name;
-  entry->module_request = module_request;
-  regular_imports_.insert(std::make_pair(entry->local_name, entry));
-  // We don't care if there's already an entry for this local name, as in that
-  // case we will report an error when declaring the variable.
+  entry->module_request = AddModuleRequest(module_request);
+  AddRegularImport(entry);
 }
 
 
 void ModuleDescriptor::AddStarImport(
     const AstRawString* local_name, const AstRawString* module_request,
     Scanner::Location loc, Zone* zone) {
-  DCHECK_NOT_NULL(local_name);
-  DCHECK_NOT_NULL(module_request);
-  ModuleEntry* entry = new (zone) ModuleEntry(loc);
+  Entry* entry = new (zone) Entry(loc);
   entry->local_name = local_name;
-  entry->module_request = module_request;
-  special_imports_.Add(entry, zone);
+  entry->module_request = AddModuleRequest(module_request);
+  AddNamespaceImport(entry, zone);
 }
 
-
-void ModuleDescriptor::AddEmptyImport(
-    const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
-  DCHECK_NOT_NULL(module_request);
-  ModuleEntry* entry = new (zone) ModuleEntry(loc);
-  entry->module_request = module_request;
-  special_imports_.Add(entry, zone);
+void ModuleDescriptor::AddEmptyImport(const AstRawString* module_request) {
+  AddModuleRequest(module_request);
 }
 
 
 void ModuleDescriptor::AddExport(
     const AstRawString* local_name, const AstRawString* export_name,
     Scanner::Location loc, Zone* zone) {
-  DCHECK_NOT_NULL(local_name);
-  DCHECK_NOT_NULL(export_name);
-  ModuleEntry* entry = new (zone) ModuleEntry(loc);
+  Entry* entry = new (zone) Entry(loc);
   entry->export_name = export_name;
   entry->local_name = local_name;
-  exports_.Add(entry, zone);
+  AddRegularExport(entry);
 }
 
 
@@ -63,40 +49,186 @@
     const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
   DCHECK_NOT_NULL(import_name);
   DCHECK_NOT_NULL(export_name);
-  DCHECK_NOT_NULL(module_request);
-  ModuleEntry* entry = new (zone) ModuleEntry(loc);
+  Entry* entry = new (zone) Entry(loc);
   entry->export_name = export_name;
   entry->import_name = import_name;
-  entry->module_request = module_request;
-  exports_.Add(entry, zone);
+  entry->module_request = AddModuleRequest(module_request);
+  AddSpecialExport(entry, zone);
 }
 
 
 void ModuleDescriptor::AddStarExport(
     const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
-  DCHECK_NOT_NULL(module_request);
-  ModuleEntry* entry = new (zone) ModuleEntry(loc);
-  entry->module_request = module_request;
-  exports_.Add(entry, zone);
+  Entry* entry = new (zone) Entry(loc);
+  entry->module_request = AddModuleRequest(module_request);
+  AddSpecialExport(entry, zone);
 }
 
-void ModuleDescriptor::MakeIndirectExportsExplicit() {
-  for (auto entry : exports_) {
-    if (entry->export_name == nullptr) continue;
-    if (entry->import_name != nullptr) continue;
-    DCHECK_NOT_NULL(entry->local_name);
-    auto it = regular_imports_.find(entry->local_name);
-    if (it != regular_imports_.end()) {
-      // Found an indirect export.
-      DCHECK_NOT_NULL(it->second->module_request);
-      DCHECK_NOT_NULL(it->second->import_name);
-      entry->import_name = it->second->import_name;
-      entry->module_request = it->second->module_request;
-      entry->local_name = nullptr;
+namespace {
+
+Handle<Object> ToStringOrUndefined(Isolate* isolate, const AstRawString* s) {
+  return (s == nullptr)
+             ? Handle<Object>::cast(isolate->factory()->undefined_value())
+             : Handle<Object>::cast(s->string());
+}
+
+const AstRawString* FromStringOrUndefined(Isolate* isolate,
+                                          AstValueFactory* avfactory,
+                                          Handle<Object> object) {
+  if (object->IsUndefined(isolate)) return nullptr;
+  return avfactory->GetString(Handle<String>::cast(object));
+}
+
+}  // namespace
+
+Handle<ModuleInfoEntry> ModuleDescriptor::Entry::Serialize(
+    Isolate* isolate) const {
+  CHECK(Smi::IsValid(module_request));  // TODO(neis): Check earlier?
+  return ModuleInfoEntry::New(
+      isolate, ToStringOrUndefined(isolate, export_name),
+      ToStringOrUndefined(isolate, local_name),
+      ToStringOrUndefined(isolate, import_name),
+      Handle<Object>(Smi::FromInt(module_request), isolate));
+}
+
+ModuleDescriptor::Entry* ModuleDescriptor::Entry::Deserialize(
+    Isolate* isolate, AstValueFactory* avfactory,
+    Handle<ModuleInfoEntry> entry) {
+  Entry* result = new (avfactory->zone()) Entry(Scanner::Location::invalid());
+  result->export_name = FromStringOrUndefined(
+      isolate, avfactory, handle(entry->export_name(), isolate));
+  result->local_name = FromStringOrUndefined(
+      isolate, avfactory, handle(entry->local_name(), isolate));
+  result->import_name = FromStringOrUndefined(
+      isolate, avfactory, handle(entry->import_name(), isolate));
+  result->module_request = Smi::cast(entry->module_request())->value();
+  return result;
+}
+
+Handle<FixedArray> ModuleDescriptor::SerializeRegularExports(Isolate* isolate,
+                                                             Zone* zone) const {
+  // We serialize regular exports in a way that lets us later iterate over their
+  // local names and for each local name immediately access all its export
+  // names.  (Regular exports have neither import name nor module request.)
+
+  ZoneVector<Handle<Object>> data(zone);
+  data.reserve(2 * regular_exports_.size());
+
+  for (auto it = regular_exports_.begin(); it != regular_exports_.end();) {
+    // Find out how many export names this local name has.
+    auto next = it;
+    int size = 0;
+    do {
+      ++next;
+      ++size;
+    } while (next != regular_exports_.end() && next->first == it->first);
+
+    Handle<FixedArray> export_names = isolate->factory()->NewFixedArray(size);
+    data.push_back(it->second->local_name->string());
+    data.push_back(export_names);
+
+    // Collect the export names.
+    int i = 0;
+    for (; it != next; ++it) {
+      export_names->set(i++, *it->second->export_name->string());
+    }
+    DCHECK_EQ(i, size);
+
+    // Continue with the next distinct key.
+    DCHECK(it == next);
+  }
+
+  // We cannot create the FixedArray earlier because we only now know the
+  // precise size (the number of unique keys in regular_exports).
+  int size = static_cast<int>(data.size());
+  Handle<FixedArray> result = isolate->factory()->NewFixedArray(size);
+  for (int i = 0; i < size; ++i) {
+    result->set(i, *data[i]);
+  }
+  return result;
+}
+
+void ModuleDescriptor::DeserializeRegularExports(Isolate* isolate,
+                                                 AstValueFactory* avfactory,
+                                                 Handle<FixedArray> data) {
+  for (int i = 0, length_i = data->length(); i < length_i;) {
+    Handle<String> local_name(String::cast(data->get(i++)), isolate);
+    Handle<FixedArray> export_names(FixedArray::cast(data->get(i++)), isolate);
+
+    for (int j = 0, length_j = export_names->length(); j < length_j; ++j) {
+      Handle<String> export_name(String::cast(export_names->get(j)), isolate);
+
+      Entry* entry =
+          new (avfactory->zone()) Entry(Scanner::Location::invalid());
+      entry->local_name = avfactory->GetString(local_name);
+      entry->export_name = avfactory->GetString(export_name);
+
+      AddRegularExport(entry);
     }
   }
 }
 
+void ModuleDescriptor::MakeIndirectExportsExplicit(Zone* zone) {
+  for (auto it = regular_exports_.begin(); it != regular_exports_.end();) {
+    Entry* entry = it->second;
+    DCHECK_NOT_NULL(entry->local_name);
+    auto import = regular_imports_.find(entry->local_name);
+    if (import != regular_imports_.end()) {
+      // Found an indirect export.  Patch export entry and move it from regular
+      // to special.
+      DCHECK_NULL(entry->import_name);
+      DCHECK_LT(entry->module_request, 0);
+      DCHECK_NOT_NULL(import->second->import_name);
+      DCHECK_LE(0, import->second->module_request);
+      DCHECK_LT(import->second->module_request,
+                static_cast<int>(module_requests_.size()));
+      entry->import_name = import->second->import_name;
+      entry->module_request = import->second->module_request;
+      entry->local_name = nullptr;
+      AddSpecialExport(entry, zone);
+      it = regular_exports_.erase(it);
+    } else {
+      it++;
+    }
+  }
+}
+
+namespace {
+
+const ModuleDescriptor::Entry* BetterDuplicate(
+    const ModuleDescriptor::Entry* candidate,
+    ZoneMap<const AstRawString*, const ModuleDescriptor::Entry*>& export_names,
+    const ModuleDescriptor::Entry* current_duplicate) {
+  DCHECK_NOT_NULL(candidate->export_name);
+  DCHECK(candidate->location.IsValid());
+  auto insert_result =
+      export_names.insert(std::make_pair(candidate->export_name, candidate));
+  if (insert_result.second) return current_duplicate;
+  if (current_duplicate == nullptr) {
+    current_duplicate = insert_result.first->second;
+  }
+  return (candidate->location.beg_pos > current_duplicate->location.beg_pos)
+             ? candidate
+             : current_duplicate;
+}
+
+}  // namespace
+
+const ModuleDescriptor::Entry* ModuleDescriptor::FindDuplicateExport(
+    Zone* zone) const {
+  const ModuleDescriptor::Entry* duplicate = nullptr;
+  ZoneMap<const AstRawString*, const ModuleDescriptor::Entry*> export_names(
+      zone);
+  for (const auto& elem : regular_exports_) {
+    duplicate = BetterDuplicate(elem.second, export_names, duplicate);
+  }
+  for (auto entry : special_exports_) {
+    if (entry->export_name == nullptr) continue;  // Star export.
+    duplicate = BetterDuplicate(entry, export_names, duplicate);
+  }
+  return duplicate;
+}
+
 bool ModuleDescriptor::Validate(ModuleScope* module_scope,
                                 PendingCompilationErrorHandler* error_handler,
                                 Zone* zone) {
@@ -105,29 +237,19 @@
 
   // Report error iff there are duplicate exports.
   {
-    ZoneAllocationPolicy allocator(zone);
-    ZoneHashMap* export_names = new (zone->New(sizeof(ZoneHashMap)))
-        ZoneHashMap(ZoneHashMap::PointersMatch,
-                    ZoneHashMap::kDefaultHashMapCapacity, allocator);
-    for (auto entry : exports_) {
-      if (entry->export_name == nullptr) continue;
-      AstRawString* key = const_cast<AstRawString*>(entry->export_name);
-      ZoneHashMap::Entry* p =
-          export_names->LookupOrInsert(key, key->hash(), allocator);
-      DCHECK_NOT_NULL(p);
-      if (p->value != nullptr) {
-        error_handler->ReportMessageAt(
-            entry->location.beg_pos, entry->location.end_pos,
-            MessageTemplate::kDuplicateExport, entry->export_name);
-        return false;
-      }
-      p->value = key;  // Anything but nullptr.
+    const Entry* entry = FindDuplicateExport(zone);
+    if (entry != nullptr) {
+      error_handler->ReportMessageAt(
+          entry->location.beg_pos, entry->location.end_pos,
+          MessageTemplate::kDuplicateExport, entry->export_name);
+      return false;
     }
   }
 
   // Report error iff there are exports of non-existent local names.
-  for (auto entry : exports_) {
-    if (entry->local_name == nullptr) continue;
+  for (const auto& elem : regular_exports_) {
+    const Entry* entry = elem.second;
+    DCHECK_NOT_NULL(entry->local_name);
     if (module_scope->LookupLocal(entry->local_name) == nullptr) {
       error_handler->ReportMessageAt(
           entry->location.beg_pos, entry->location.end_pos,
@@ -136,7 +258,7 @@
     }
   }
 
-  MakeIndirectExportsExplicit();
+  MakeIndirectExportsExplicit(zone);
   return true;
 }
 
diff --git a/src/ast/modules.h b/src/ast/modules.h
index c8f7aa3..4d36735 100644
--- a/src/ast/modules.h
+++ b/src/ast/modules.h
@@ -7,19 +7,26 @@
 
 #include "src/parsing/scanner.h"  // Only for Scanner::Location.
 #include "src/pending-compilation-error-handler.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
 
 
 class AstRawString;
-
+class ModuleInfoEntry;
 
 class ModuleDescriptor : public ZoneObject {
  public:
   explicit ModuleDescriptor(Zone* zone)
-      : exports_(1, zone), special_imports_(1, zone), regular_imports_(zone) {}
+      : module_requests_(zone),
+        special_exports_(1, zone),
+        namespace_imports_(1, zone),
+        regular_exports_(zone),
+        regular_imports_(zone) {}
+
+  // The following Add* methods are high-level convenience functions for use by
+  // the parser.
 
   // import x from "foo.js";
   // import {x} from "foo.js";
@@ -37,9 +44,7 @@
   // import "foo.js";
   // import {} from "foo.js";
   // export {} from "foo.js";  (sic!)
-  void AddEmptyImport(
-      const AstRawString* module_request, const Scanner::Location loc,
-      Zone* zone);
+  void AddEmptyImport(const AstRawString* module_request);
 
   // export {x};
   // export {x as y};
@@ -67,38 +72,107 @@
   bool Validate(ModuleScope* module_scope,
                 PendingCompilationErrorHandler* error_handler, Zone* zone);
 
-  struct ModuleEntry : public ZoneObject {
+  struct Entry : public ZoneObject {
     const Scanner::Location location;
     const AstRawString* export_name;
     const AstRawString* local_name;
     const AstRawString* import_name;
-    const AstRawString* module_request;
+    // The module_request value records the order in which modules are
+    // requested. It also functions as an index into the ModuleInfo's array of
+    // module specifiers and into the Module's array of requested modules.  A
+    // negative value means no module request.
+    int module_request;
 
-    explicit ModuleEntry(Scanner::Location loc)
+    // TODO(neis): Remove local_name component?
+    explicit Entry(Scanner::Location loc)
         : location(loc),
           export_name(nullptr),
           local_name(nullptr),
           import_name(nullptr),
-          module_request(nullptr) {}
+          module_request(-1) {}
+
+    // (De-)serialization support.
+    // Note that the location value is not preserved as it's only needed by the
+    // parser.  (A Deserialize'd entry has an invalid location.)
+    Handle<ModuleInfoEntry> Serialize(Isolate* isolate) const;
+    static Entry* Deserialize(Isolate* isolate, AstValueFactory* avfactory,
+                              Handle<ModuleInfoEntry> entry);
   };
 
-  const ZoneList<ModuleEntry*>& exports() const { return exports_; }
+  // Module requests.
+  const ZoneMap<const AstRawString*, int>& module_requests() const {
+    return module_requests_;
+  }
 
-  // Empty imports and namespace imports.
-  const ZoneList<const ModuleEntry*>& special_imports() const {
-    return special_imports_;
+  // Namespace imports.
+  const ZoneList<const Entry*>& namespace_imports() const {
+    return namespace_imports_;
   }
 
   // All the remaining imports, indexed by local name.
-  const ZoneMap<const AstRawString*, const ModuleEntry*>& regular_imports()
-      const {
+  const ZoneMap<const AstRawString*, const Entry*>& regular_imports() const {
     return regular_imports_;
   }
 
+  // Star exports and explicitly indirect exports.
+  const ZoneList<const Entry*>& special_exports() const {
+    return special_exports_;
+  }
+
+  // All the remaining exports, indexed by local name.
+  // After canonicalization (see Validate), these are exactly the local exports.
+  const ZoneMultimap<const AstRawString*, Entry*>& regular_exports() const {
+    return regular_exports_;
+  }
+
+  void AddRegularExport(Entry* entry) {
+    DCHECK_NOT_NULL(entry->export_name);
+    DCHECK_NOT_NULL(entry->local_name);
+    DCHECK_NULL(entry->import_name);
+    DCHECK_LT(entry->module_request, 0);
+    regular_exports_.insert(std::make_pair(entry->local_name, entry));
+  }
+
+  void AddSpecialExport(const Entry* entry, Zone* zone) {
+    DCHECK_NULL(entry->local_name);
+    DCHECK_LE(0, entry->module_request);
+    special_exports_.Add(entry, zone);
+  }
+
+  void AddRegularImport(const Entry* entry) {
+    DCHECK_NOT_NULL(entry->import_name);
+    DCHECK_NOT_NULL(entry->local_name);
+    DCHECK_NULL(entry->export_name);
+    DCHECK_LE(0, entry->module_request);
+    regular_imports_.insert(std::make_pair(entry->local_name, entry));
+    // We don't care if there's already an entry for this local name, as in that
+    // case we will report an error when declaring the variable.
+  }
+
+  void AddNamespaceImport(const Entry* entry, Zone* zone) {
+    DCHECK_NULL(entry->import_name);
+    DCHECK_NULL(entry->export_name);
+    DCHECK_NOT_NULL(entry->local_name);
+    DCHECK_LE(0, entry->module_request);
+    namespace_imports_.Add(entry, zone);
+  }
+
+  Handle<FixedArray> SerializeRegularExports(Isolate* isolate,
+                                             Zone* zone) const;
+  void DeserializeRegularExports(Isolate* isolate, AstValueFactory* avfactory,
+                                 Handle<FixedArray> data);
+
  private:
-  ZoneList<ModuleEntry*> exports_;
-  ZoneList<const ModuleEntry*> special_imports_;
-  ZoneMap<const AstRawString*, const ModuleEntry*> regular_imports_;
+  // TODO(neis): Use STL datastructure instead of ZoneList?
+  ZoneMap<const AstRawString*, int> module_requests_;
+  ZoneList<const Entry*> special_exports_;
+  ZoneList<const Entry*> namespace_imports_;
+  ZoneMultimap<const AstRawString*, Entry*> regular_exports_;
+  ZoneMap<const AstRawString*, const Entry*> regular_imports_;
+
+  // If there are multiple export entries with the same export name, return the
+  // last of them (in source order).  Otherwise return nullptr.
+  const Entry* FindDuplicateExport(Zone* zone) const;
 
   // Find any implicitly indirect exports and make them explicit.
   //
@@ -116,7 +190,15 @@
   // into:
   //   import {a as b} from "X"; export {a as c} from "X";
   // (The import entry is never deleted.)
-  void MakeIndirectExportsExplicit();
+  void MakeIndirectExportsExplicit(Zone* zone);
+
+  int AddModuleRequest(const AstRawString* specifier) {
+    DCHECK_NOT_NULL(specifier);
+    auto it = module_requests_
+                  .insert(std::make_pair(specifier, module_requests_.size()))
+                  .first;
+    return it->second;
+  }
 };
 
 }  // namespace internal
diff --git a/src/ast/prettyprinter.cc b/src/ast/prettyprinter.cc
index f19ee23..874c159 100644
--- a/src/ast/prettyprinter.cc
+++ b/src/ast/prettyprinter.cc
@@ -9,6 +9,7 @@
 #include "src/ast/ast-value-factory.h"
 #include "src/ast/scopes.h"
 #include "src/base/platform/platform.h"
+#include "src/globals.h"
 
 namespace v8 {
 namespace internal {
@@ -603,8 +604,8 @@
     PrintLiteralIndented(info, value, true);
   } else {
     EmbeddedVector<char, 256> buf;
-    int pos = SNPrintF(buf, "%s (mode = %s", info,
-                       Variable::Mode2String(var->mode()));
+    int pos =
+        SNPrintF(buf, "%s (mode = %s", info, VariableMode2String(var->mode()));
     SNPrintF(buf + pos, ")");
     PrintLiteralIndented(buf.start(), value, true);
   }
@@ -870,6 +871,9 @@
     case HandlerTable::DESUGARING:
       prediction = "DESUGARING";
       break;
+    case HandlerTable::ASYNC_AWAIT:
+      prediction = "ASYNC_AWAIT";
+      break;
   }
   Print(" %s\n", prediction);
 }
@@ -897,34 +901,27 @@
   if (node->extends() != nullptr) {
     PrintIndentedVisit("EXTENDS", node->extends());
   }
-  PrintProperties(node->properties());
+  PrintClassProperties(node->properties());
 }
 
-
-void AstPrinter::PrintProperties(
-    ZoneList<ObjectLiteral::Property*>* properties) {
+void AstPrinter::PrintClassProperties(
+    ZoneList<ClassLiteral::Property*>* properties) {
   for (int i = 0; i < properties->length(); i++) {
-    ObjectLiteral::Property* property = properties->at(i);
+    ClassLiteral::Property* property = properties->at(i);
     const char* prop_kind = nullptr;
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-        prop_kind = "CONSTANT";
+      case ClassLiteral::Property::METHOD:
+        prop_kind = "METHOD";
         break;
-      case ObjectLiteral::Property::COMPUTED:
-        prop_kind = "COMPUTED";
-        break;
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-        prop_kind = "MATERIALIZED_LITERAL";
-        break;
-      case ObjectLiteral::Property::PROTOTYPE:
-        prop_kind = "PROTOTYPE";
-        break;
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         prop_kind = "GETTER";
         break;
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         prop_kind = "SETTER";
         break;
+      case ClassLiteral::Property::FIELD:
+        prop_kind = "FIELD";
+        break;
     }
     EmbeddedVector<char, 128> buf;
     SNPrintF(buf, "PROPERTY%s - %s", property->is_static() ? " - STATIC" : "",
@@ -986,7 +983,40 @@
   EmbeddedVector<char, 128> buf;
   SNPrintF(buf, "literal_index = %d\n", node->literal_index());
   PrintIndented(buf.start());
-  PrintProperties(node->properties());
+  PrintObjectProperties(node->properties());
+}
+
+void AstPrinter::PrintObjectProperties(
+    ZoneList<ObjectLiteral::Property*>* properties) {
+  for (int i = 0; i < properties->length(); i++) {
+    ObjectLiteral::Property* property = properties->at(i);
+    const char* prop_kind = nullptr;
+    switch (property->kind()) {
+      case ObjectLiteral::Property::CONSTANT:
+        prop_kind = "CONSTANT";
+        break;
+      case ObjectLiteral::Property::COMPUTED:
+        prop_kind = "COMPUTED";
+        break;
+      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
+        prop_kind = "MATERIALIZED_LITERAL";
+        break;
+      case ObjectLiteral::Property::PROTOTYPE:
+        prop_kind = "PROTOTYPE";
+        break;
+      case ObjectLiteral::Property::GETTER:
+        prop_kind = "GETTER";
+        break;
+      case ObjectLiteral::Property::SETTER:
+        prop_kind = "SETTER";
+        break;
+    }
+    EmbeddedVector<char, 128> buf;
+    SNPrintF(buf, "PROPERTY - %s", prop_kind);
+    IndentedScope prop(this, buf.start());
+    PrintIndentedVisit("KEY", properties->at(i)->key());
+    PrintIndentedVisit("VALUE", properties->at(i)->value());
+  }
 }
 
 
@@ -1028,9 +1058,6 @@
       case VariableLocation::CONTEXT:
         SNPrintF(buf + pos, " context[%d]", var->index());
         break;
-      case VariableLocation::GLOBAL:
-        SNPrintF(buf + pos, " global[%d]", var->index());
-        break;
       case VariableLocation::LOOKUP:
         SNPrintF(buf + pos, " lookup");
         break;
diff --git a/src/ast/prettyprinter.h b/src/ast/prettyprinter.h
index 9b0e22a..2d553ba 100644
--- a/src/ast/prettyprinter.h
+++ b/src/ast/prettyprinter.h
@@ -93,7 +93,8 @@
                                     Variable* var,
                                     Handle<Object> value);
   void PrintLabelsIndented(ZoneList<const AstRawString*>* labels);
-  void PrintProperties(ZoneList<ObjectLiteral::Property*>* properties);
+  void PrintObjectProperties(ZoneList<ObjectLiteral::Property*>* properties);
+  void PrintClassProperties(ZoneList<ClassLiteral::Property*>* properties);
   void PrintTryStatement(TryStatement* try_statement);
 
   void inc_indent() { indent_++; }
diff --git a/src/ast/scopeinfo.cc b/src/ast/scopeinfo.cc
index 7189de3..5354b8d 100644
--- a/src/ast/scopeinfo.cc
+++ b/src/ast/scopeinfo.cc
@@ -2,33 +2,92 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include "src/ast/scopeinfo.h"
-
 #include <stdlib.h>
 
 #include "src/ast/context-slot-cache.h"
 #include "src/ast/scopes.h"
+#include "src/ast/variables.h"
 #include "src/bootstrapper.h"
 
 namespace v8 {
 namespace internal {
 
+// An entry in ModuleVariableEntries consists of several slots:
+enum ModuleVariableEntryOffset {
+  kModuleVariableNameOffset,
+  kModuleVariableIndexOffset,
+  kModuleVariablePropertiesOffset,
+  kModuleVariableEntryLength  // Sentinel value.
+};
 
-Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
-                                    Scope* scope) {
-  // Collect stack and context locals.
-  ZoneList<Variable*> stack_locals(scope->StackLocalCount(), zone);
-  ZoneList<Variable*> context_locals(scope->ContextLocalCount(), zone);
-  ZoneList<Variable*> context_globals(scope->ContextGlobalCount(), zone);
+#ifdef DEBUG
+bool ScopeInfo::Equals(ScopeInfo* other) const {
+  if (length() != other->length()) return false;
+  for (int index = 0; index < length(); ++index) {
+    Object* entry = get(index);
+    Object* other_entry = other->get(index);
+    if (entry->IsSmi()) {
+      if (entry != other_entry) return false;
+    } else {
+      if (HeapObject::cast(entry)->map()->instance_type() !=
+          HeapObject::cast(other_entry)->map()->instance_type()) {
+        return false;
+      }
+      if (entry->IsString()) {
+        if (!String::cast(entry)->Equals(String::cast(other_entry))) {
+          return false;
+        }
+      } else if (entry->IsScopeInfo()) {
+        if (!ScopeInfo::cast(entry)->Equals(ScopeInfo::cast(other_entry))) {
+          return false;
+        }
+      } else if (entry->IsModuleInfo()) {
+        if (!ModuleInfo::cast(entry)->Equals(ModuleInfo::cast(other_entry))) {
+          return false;
+        }
+      } else {
+        UNREACHABLE();
+        return false;
+      }
+    }
+  }
+  return true;
+}
+#endif
 
-  scope->CollectStackAndContextLocals(&stack_locals, &context_locals,
-                                      &context_globals);
-  const int stack_local_count = stack_locals.length();
-  const int context_local_count = context_locals.length();
-  const int context_global_count = context_globals.length();
+Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
+                                    MaybeHandle<ScopeInfo> outer_scope) {
+  // Collect variables.
+  ZoneList<Variable*>* locals = scope->locals();
+  int stack_local_count = 0;
+  int context_local_count = 0;
+  int module_vars_count = 0;
+  // Stack allocated block scope variables are allocated in the parent
+  // declaration scope, but are recorded in the block scope's scope info. First
+  // slot index indicates at which offset a particular scope starts in the
+  // parent declaration scope.
+  int first_slot_index = 0;
+  for (int i = 0; i < locals->length(); i++) {
+    Variable* var = locals->at(i);
+    switch (var->location()) {
+      case VariableLocation::LOCAL:
+        if (stack_local_count == 0) first_slot_index = var->index();
+        stack_local_count++;
+        break;
+      case VariableLocation::CONTEXT:
+        context_local_count++;
+        break;
+      case VariableLocation::MODULE:
+        module_vars_count++;
+        break;
+      default:
+        break;
+    }
+  }
+  DCHECK(module_vars_count == 0 || scope->is_module_scope());
+
   // Make sure we allocate the correct amount.
   DCHECK_EQ(scope->ContextLocalCount(), context_local_count);
-  DCHECK_EQ(scope->ContextGlobalCount(), context_global_count);
 
   // Determine use and location of the "this" binding if it is present.
   VariableAllocationInfo receiver_info;
@@ -53,7 +112,6 @@
 
   // Determine use and location of the function variable if it is present.
   VariableAllocationInfo function_name_info;
-  VariableMode function_variable_mode;
   if (scope->is_function_scope() &&
       scope->AsDeclarationScope()->function_var() != nullptr) {
     Variable* var = scope->AsDeclarationScope()->function_var();
@@ -65,20 +123,21 @@
       DCHECK(var->IsStackLocal());
       function_name_info = STACK;
     }
-    function_variable_mode = var->mode();
   } else {
     function_name_info = NONE;
-    function_variable_mode = VAR;
   }
-  DCHECK(context_global_count == 0 || scope->scope_type() == SCRIPT_SCOPE);
 
   const bool has_function_name = function_name_info != NONE;
   const bool has_receiver = receiver_info == STACK || receiver_info == CONTEXT;
   const int parameter_count = scope->num_parameters();
+  const bool has_outer_scope_info = !outer_scope.is_null();
   const int length = kVariablePartIndex + parameter_count +
                      (1 + stack_local_count) + 2 * context_local_count +
-                     2 * context_global_count +
-                     (has_receiver ? 1 : 0) + (has_function_name ? 2 : 0);
+                     (has_receiver ? 1 : 0) + (has_function_name ? 2 : 0) +
+                     (has_outer_scope_info ? 1 : 0) +
+                     (scope->is_module_scope()
+                          ? 2 + kModuleVariableEntryLength * module_vars_count
+                          : 0);
 
   Factory* factory = isolate->factory();
   Handle<ScopeInfo> scope_info = factory->NewScopeInfo(length);
@@ -96,27 +155,29 @@
   }
 
   // Encode the flags.
-  int flags = ScopeTypeField::encode(scope->scope_type()) |
-              CallsEvalField::encode(scope->calls_eval()) |
-              LanguageModeField::encode(scope->language_mode()) |
-              DeclarationScopeField::encode(scope->is_declaration_scope()) |
-              ReceiverVariableField::encode(receiver_info) |
-              HasNewTargetField::encode(has_new_target) |
-              FunctionVariableField::encode(function_name_info) |
-              FunctionVariableMode::encode(function_variable_mode) |
-              AsmModuleField::encode(asm_module) |
-              AsmFunctionField::encode(asm_function) |
-              HasSimpleParametersField::encode(has_simple_parameters) |
-              FunctionKindField::encode(function_kind);
+  int flags =
+      ScopeTypeField::encode(scope->scope_type()) |
+      CallsEvalField::encode(scope->calls_eval()) |
+      LanguageModeField::encode(scope->language_mode()) |
+      DeclarationScopeField::encode(scope->is_declaration_scope()) |
+      ReceiverVariableField::encode(receiver_info) |
+      HasNewTargetField::encode(has_new_target) |
+      FunctionVariableField::encode(function_name_info) |
+      AsmModuleField::encode(asm_module) |
+      AsmFunctionField::encode(asm_function) |
+      HasSimpleParametersField::encode(has_simple_parameters) |
+      FunctionKindField::encode(function_kind) |
+      HasOuterScopeInfoField::encode(has_outer_scope_info) |
+      IsDebugEvaluateScopeField::encode(scope->is_debug_evaluate_scope());
   scope_info->SetFlags(flags);
+
   scope_info->SetParameterCount(parameter_count);
   scope_info->SetStackLocalCount(stack_local_count);
   scope_info->SetContextLocalCount(context_local_count);
-  scope_info->SetContextGlobalCount(context_global_count);
 
   int index = kVariablePartIndex;
   // Add parameters.
-  DCHECK(index == scope_info->ParameterEntriesIndex());
+  DCHECK_EQ(index, scope_info->ParameterNamesIndex());
   if (scope->is_declaration_scope()) {
     for (int i = 0; i < parameter_count; ++i) {
       scope_info->set(index++,
@@ -124,68 +185,66 @@
     }
   }
 
-  // Add stack locals' names. We are assuming that the stack locals'
-  // slots are allocated in increasing order, so we can simply add
-  // them to the ScopeInfo object.
-  int first_slot_index;
-  if (stack_local_count > 0) {
-    first_slot_index = stack_locals[0]->index();
-  } else {
-    first_slot_index = 0;
-  }
-  DCHECK(index == scope_info->StackLocalFirstSlotIndex());
+  // Add stack locals' names, context locals' names and info, module variables'
+  // names and info. We are assuming that the stack locals' slots are allocated
+  // in increasing order, so we can simply add them to the ScopeInfo object.
+  // Context locals are added using their index.
+  DCHECK_EQ(index, scope_info->StackLocalFirstSlotIndex());
   scope_info->set(index++, Smi::FromInt(first_slot_index));
-  DCHECK(index == scope_info->StackLocalEntriesIndex());
-  for (int i = 0; i < stack_local_count; ++i) {
-    DCHECK(stack_locals[i]->index() == first_slot_index + i);
-    scope_info->set(index++, *stack_locals[i]->name());
+  DCHECK_EQ(index, scope_info->StackLocalNamesIndex());
+
+  int stack_local_base = index;
+  int context_local_base = stack_local_base + stack_local_count;
+  int context_local_info_base = context_local_base + context_local_count;
+  int module_var_entry = scope_info->ModuleVariablesIndex();
+
+  for (int i = 0; i < locals->length(); ++i) {
+    Variable* var = locals->at(i);
+    switch (var->location()) {
+      case VariableLocation::LOCAL: {
+        int local_index = var->index() - first_slot_index;
+        DCHECK_LE(0, local_index);
+        DCHECK_LT(local_index, stack_local_count);
+        scope_info->set(stack_local_base + local_index, *var->name());
+        break;
+      }
+      case VariableLocation::CONTEXT: {
+        // Due to duplicate parameters, context locals aren't guaranteed to come
+        // in order.
+        int local_index = var->index() - Context::MIN_CONTEXT_SLOTS;
+        DCHECK_LE(0, local_index);
+        DCHECK_LT(local_index, context_local_count);
+        uint32_t info = VariableModeField::encode(var->mode()) |
+                        InitFlagField::encode(var->initialization_flag()) |
+                        MaybeAssignedFlagField::encode(var->maybe_assigned());
+        scope_info->set(context_local_base + local_index, *var->name());
+        scope_info->set(context_local_info_base + local_index,
+                        Smi::FromInt(info));
+        break;
+      }
+      case VariableLocation::MODULE: {
+        scope_info->set(module_var_entry + kModuleVariableNameOffset,
+                        *var->name());
+        scope_info->set(module_var_entry + kModuleVariableIndexOffset,
+                        Smi::FromInt(var->index()));
+        uint32_t properties =
+            VariableModeField::encode(var->mode()) |
+            InitFlagField::encode(var->initialization_flag()) |
+            MaybeAssignedFlagField::encode(var->maybe_assigned());
+        scope_info->set(module_var_entry + kModuleVariablePropertiesOffset,
+                        Smi::FromInt(properties));
+        module_var_entry += kModuleVariableEntryLength;
+        break;
+      }
+      default:
+        break;
+    }
   }
 
-  // Due to usage analysis, context-allocated locals are not necessarily in
-  // increasing order: Some of them may be parameters which are allocated before
-  // the non-parameter locals. When the non-parameter locals are sorted
-  // according to usage, the allocated slot indices may not be in increasing
-  // order with the variable list anymore. Thus, we first need to sort them by
-  // context slot index before adding them to the ScopeInfo object.
-  context_locals.Sort(&Variable::CompareIndex);
-
-  // Add context locals' names.
-  DCHECK(index == scope_info->ContextLocalNameEntriesIndex());
-  for (int i = 0; i < context_local_count; ++i) {
-    scope_info->set(index++, *context_locals[i]->name());
-  }
-
-  // Add context globals' names.
-  DCHECK(index == scope_info->ContextGlobalNameEntriesIndex());
-  for (int i = 0; i < context_global_count; ++i) {
-    scope_info->set(index++, *context_globals[i]->name());
-  }
-
-  // Add context locals' info.
-  DCHECK(index == scope_info->ContextLocalInfoEntriesIndex());
-  for (int i = 0; i < context_local_count; ++i) {
-    Variable* var = context_locals[i];
-    uint32_t value =
-        ContextLocalMode::encode(var->mode()) |
-        ContextLocalInitFlag::encode(var->initialization_flag()) |
-        ContextLocalMaybeAssignedFlag::encode(var->maybe_assigned());
-    scope_info->set(index++, Smi::FromInt(value));
-  }
-
-  // Add context globals' info.
-  DCHECK(index == scope_info->ContextGlobalInfoEntriesIndex());
-  for (int i = 0; i < context_global_count; ++i) {
-    Variable* var = context_globals[i];
-    // TODO(ishell): do we need this kind of info for globals here?
-    uint32_t value =
-        ContextLocalMode::encode(var->mode()) |
-        ContextLocalInitFlag::encode(var->initialization_flag()) |
-        ContextLocalMaybeAssignedFlag::encode(var->maybe_assigned());
-    scope_info->set(index++, Smi::FromInt(value));
-  }
+  index += stack_local_count + 2 * context_local_count;
 
   // If the receiver is allocated, add its index.
-  DCHECK(index == scope_info->ReceiverEntryIndex());
+  DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
   if (has_receiver) {
     int var_index = scope->AsDeclarationScope()->receiver()->index();
     scope_info->set(index++, Smi::FromInt(var_index));
@@ -194,7 +253,7 @@
   }
 
   // If present, add the function variable name and its index.
-  DCHECK(index == scope_info->FunctionNameEntryIndex());
+  DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());
   if (has_function_name) {
     int var_index = scope->AsDeclarationScope()->function_var()->index();
     scope_info->set(index++,
@@ -204,75 +263,130 @@
            var_index == scope_info->ContextLength() - 1);
   }
 
-  DCHECK(index == scope_info->length());
-  DCHECK(scope->num_parameters() == scope_info->ParameterCount());
-  DCHECK(scope->num_heap_slots() == scope_info->ContextLength() ||
-         (scope->num_heap_slots() == kVariablePartIndex &&
-          scope_info->ContextLength() == 0));
+  // If present, add the outer scope info.
+  DCHECK(index == scope_info->OuterScopeInfoIndex());
+  if (has_outer_scope_info) {
+    scope_info->set(index++, *outer_scope.ToHandleChecked());
+  }
+
+  // Module-specific information (only for module scopes).
+  if (scope->is_module_scope()) {
+    Handle<ModuleInfo> module_info =
+        ModuleInfo::New(isolate, zone, scope->AsModuleScope()->module());
+    DCHECK_EQ(index, scope_info->ModuleInfoIndex());
+    scope_info->set(index++, *module_info);
+    DCHECK_EQ(index, scope_info->ModuleVariableCountIndex());
+    scope_info->set(index++, Smi::FromInt(module_vars_count));
+    DCHECK_EQ(index, scope_info->ModuleVariablesIndex());
+    // The variable entries themselves have already been written above.
+    index += kModuleVariableEntryLength * module_vars_count;
+  }
+
+  DCHECK_EQ(index, scope_info->length());
+  DCHECK_EQ(scope->num_parameters(), scope_info->ParameterCount());
+  DCHECK_EQ(scope->num_heap_slots(), scope_info->ContextLength());
   return scope_info;
 }
 
+Handle<ScopeInfo> ScopeInfo::CreateForWithScope(
+    Isolate* isolate, MaybeHandle<ScopeInfo> outer_scope) {
+  const bool has_outer_scope_info = !outer_scope.is_null();
+  const int length = kVariablePartIndex + 1 + (has_outer_scope_info ? 1 : 0);
+
+  Factory* factory = isolate->factory();
+  Handle<ScopeInfo> scope_info = factory->NewScopeInfo(length);
+
+  // Encode the flags.
+  int flags =
+      ScopeTypeField::encode(WITH_SCOPE) | CallsEvalField::encode(false) |
+      LanguageModeField::encode(SLOPPY) | DeclarationScopeField::encode(false) |
+      ReceiverVariableField::encode(NONE) | HasNewTargetField::encode(false) |
+      FunctionVariableField::encode(NONE) | AsmModuleField::encode(false) |
+      AsmFunctionField::encode(false) | HasSimpleParametersField::encode(true) |
+      FunctionKindField::encode(kNormalFunction) |
+      HasOuterScopeInfoField::encode(has_outer_scope_info) |
+      IsDebugEvaluateScopeField::encode(false);
+  scope_info->SetFlags(flags);
+
+  scope_info->SetParameterCount(0);
+  scope_info->SetStackLocalCount(0);
+  scope_info->SetContextLocalCount(0);
+
+  int index = kVariablePartIndex;
+  DCHECK_EQ(index, scope_info->ParameterNamesIndex());
+  DCHECK_EQ(index, scope_info->StackLocalFirstSlotIndex());
+  scope_info->set(index++, Smi::FromInt(0));
+  DCHECK_EQ(index, scope_info->StackLocalNamesIndex());
+  DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
+  DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());
+  DCHECK(index == scope_info->OuterScopeInfoIndex());
+  if (has_outer_scope_info) {
+    scope_info->set(index++, *outer_scope.ToHandleChecked());
+  }
+  DCHECK_EQ(index, scope_info->length());
+  DCHECK_EQ(0, scope_info->ParameterCount());
+  DCHECK_EQ(Context::MIN_CONTEXT_SLOTS, scope_info->ContextLength());
+  return scope_info;
+}
 
 Handle<ScopeInfo> ScopeInfo::CreateGlobalThisBinding(Isolate* isolate) {
   DCHECK(isolate->bootstrapper()->IsActive());
 
   const int stack_local_count = 0;
   const int context_local_count = 1;
-  const int context_global_count = 0;
   const bool has_simple_parameters = true;
   const VariableAllocationInfo receiver_info = CONTEXT;
   const VariableAllocationInfo function_name_info = NONE;
-  const VariableMode function_variable_mode = VAR;
   const bool has_function_name = false;
   const bool has_receiver = true;
+  const bool has_outer_scope_info = false;
   const int parameter_count = 0;
   const int length = kVariablePartIndex + parameter_count +
                      (1 + stack_local_count) + 2 * context_local_count +
-                     2 * context_global_count +
-                     (has_receiver ? 1 : 0) + (has_function_name ? 2 : 0);
+                     (has_receiver ? 1 : 0) + (has_function_name ? 2 : 0) +
+                     (has_outer_scope_info ? 1 : 0);
 
   Factory* factory = isolate->factory();
   Handle<ScopeInfo> scope_info = factory->NewScopeInfo(length);
 
   // Encode the flags.
-  int flags = ScopeTypeField::encode(SCRIPT_SCOPE) |
-              CallsEvalField::encode(false) |
-              LanguageModeField::encode(SLOPPY) |
-              DeclarationScopeField::encode(true) |
-              ReceiverVariableField::encode(receiver_info) |
-              FunctionVariableField::encode(function_name_info) |
-              FunctionVariableMode::encode(function_variable_mode) |
-              AsmModuleField::encode(false) | AsmFunctionField::encode(false) |
-              HasSimpleParametersField::encode(has_simple_parameters) |
-              FunctionKindField::encode(FunctionKind::kNormalFunction);
+  int flags =
+      ScopeTypeField::encode(SCRIPT_SCOPE) | CallsEvalField::encode(false) |
+      LanguageModeField::encode(SLOPPY) | DeclarationScopeField::encode(true) |
+      ReceiverVariableField::encode(receiver_info) |
+      FunctionVariableField::encode(function_name_info) |
+      AsmModuleField::encode(false) | AsmFunctionField::encode(false) |
+      HasSimpleParametersField::encode(has_simple_parameters) |
+      FunctionKindField::encode(FunctionKind::kNormalFunction) |
+      HasOuterScopeInfoField::encode(has_outer_scope_info) |
+      IsDebugEvaluateScopeField::encode(false);
   scope_info->SetFlags(flags);
   scope_info->SetParameterCount(parameter_count);
   scope_info->SetStackLocalCount(stack_local_count);
   scope_info->SetContextLocalCount(context_local_count);
-  scope_info->SetContextGlobalCount(context_global_count);
 
   int index = kVariablePartIndex;
   const int first_slot_index = 0;
-  DCHECK(index == scope_info->StackLocalFirstSlotIndex());
+  DCHECK_EQ(index, scope_info->StackLocalFirstSlotIndex());
   scope_info->set(index++, Smi::FromInt(first_slot_index));
-  DCHECK(index == scope_info->StackLocalEntriesIndex());
+  DCHECK_EQ(index, scope_info->StackLocalNamesIndex());
 
   // Here we add info for context-allocated "this".
-  DCHECK(index == scope_info->ContextLocalNameEntriesIndex());
+  DCHECK_EQ(index, scope_info->ContextLocalNamesIndex());
   scope_info->set(index++, *isolate->factory()->this_string());
-  DCHECK(index == scope_info->ContextLocalInfoEntriesIndex());
-  const uint32_t value = ContextLocalMode::encode(CONST) |
-                         ContextLocalInitFlag::encode(kCreatedInitialized) |
-                         ContextLocalMaybeAssignedFlag::encode(kNotAssigned);
+  DCHECK_EQ(index, scope_info->ContextLocalInfosIndex());
+  const uint32_t value = VariableModeField::encode(CONST) |
+                         InitFlagField::encode(kCreatedInitialized) |
+                         MaybeAssignedFlagField::encode(kNotAssigned);
   scope_info->set(index++, Smi::FromInt(value));
 
   // And here we record that this scopeinfo binds a receiver.
-  DCHECK(index == scope_info->ReceiverEntryIndex());
+  DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
   const int receiver_index = Context::MIN_CONTEXT_SLOTS + 0;
   scope_info->set(index++, Smi::FromInt(receiver_index));
 
-  DCHECK(index == scope_info->FunctionNameEntryIndex());
-
+  DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());
+  DCHECK_EQ(index, scope_info->OuterScopeInfoIndex());
   DCHECK_EQ(index, scope_info->length());
   DCHECK_EQ(scope_info->ParameterCount(), 0);
   DCHECK_EQ(scope_info->ContextLength(), Context::MIN_CONTEXT_SLOTS + 1);
@@ -282,12 +396,12 @@
 
 
 ScopeInfo* ScopeInfo::Empty(Isolate* isolate) {
-  return reinterpret_cast<ScopeInfo*>(isolate->heap()->empty_fixed_array());
+  return isolate->heap()->empty_scope_info();
 }
 
 
 ScopeType ScopeInfo::scope_type() {
-  DCHECK(length() > 0);
+  DCHECK_LT(0, length());
   return ScopeTypeField::decode(Flags());
 }
 
@@ -325,19 +439,17 @@
 int ScopeInfo::ContextLength() {
   if (length() > 0) {
     int context_locals = ContextLocalCount();
-    int context_globals = ContextGlobalCount();
     bool function_name_context_slot =
         FunctionVariableField::decode(Flags()) == CONTEXT;
-    bool has_context = context_locals > 0 || context_globals > 0 ||
-                       function_name_context_slot ||
+    bool has_context = context_locals > 0 || function_name_context_slot ||
                        scope_type() == WITH_SCOPE ||
                        (scope_type() == BLOCK_SCOPE && CallsSloppyEval() &&
-                           is_declaration_scope()) ||
+                        is_declaration_scope()) ||
                        (scope_type() == FUNCTION_SCOPE && CallsSloppyEval()) ||
                        scope_type() == MODULE_SCOPE;
 
     if (has_context) {
-      return Context::MIN_CONTEXT_SLOTS + context_locals + context_globals +
+      return Context::MIN_CONTEXT_SLOTS + context_locals +
              (function_name_context_slot ? 1 : 0);
     }
   }
@@ -375,6 +487,30 @@
   }
 }
 
+bool ScopeInfo::HasOuterScopeInfo() {
+  if (length() > 0) {
+    return HasOuterScopeInfoField::decode(Flags());
+  } else {
+    return false;
+  }
+}
+
+bool ScopeInfo::IsDebugEvaluateScope() {
+  if (length() > 0) {
+    return IsDebugEvaluateScopeField::decode(Flags());
+  } else {
+    return false;
+  }
+}
+
+void ScopeInfo::SetIsDebugEvaluateScope() {
+  if (length() > 0) {
+    DCHECK_EQ(scope_type(), WITH_SCOPE);
+    SetFlags(Flags() | IsDebugEvaluateScopeField::encode(true));
+  } else {
+    UNREACHABLE();
+  }
+}
 
 bool ScopeInfo::HasHeapAllocatedLocals() {
   if (length() > 0) {
@@ -392,68 +528,85 @@
 
 String* ScopeInfo::FunctionName() {
   DCHECK(HasFunctionName());
-  return String::cast(get(FunctionNameEntryIndex()));
+  return String::cast(get(FunctionNameInfoIndex()));
 }
 
+ScopeInfo* ScopeInfo::OuterScopeInfo() {
+  DCHECK(HasOuterScopeInfo());
+  return ScopeInfo::cast(get(OuterScopeInfoIndex()));
+}
+
+ModuleInfo* ScopeInfo::ModuleDescriptorInfo() {
+  DCHECK(scope_type() == MODULE_SCOPE);
+  return ModuleInfo::cast(get(ModuleInfoIndex()));
+}
 
 String* ScopeInfo::ParameterName(int var) {
-  DCHECK(0 <= var && var < ParameterCount());
-  int info_index = ParameterEntriesIndex() + var;
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, ParameterCount());
+  int info_index = ParameterNamesIndex() + var;
   return String::cast(get(info_index));
 }
 
 
 String* ScopeInfo::LocalName(int var) {
-  DCHECK(0 <= var && var < LocalCount());
-  DCHECK(StackLocalEntriesIndex() + StackLocalCount() ==
-         ContextLocalNameEntriesIndex());
-  int info_index = StackLocalEntriesIndex() + var;
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, LocalCount());
+  DCHECK(StackLocalNamesIndex() + StackLocalCount() ==
+         ContextLocalNamesIndex());
+  int info_index = StackLocalNamesIndex() + var;
   return String::cast(get(info_index));
 }
 
 
 String* ScopeInfo::StackLocalName(int var) {
-  DCHECK(0 <= var && var < StackLocalCount());
-  int info_index = StackLocalEntriesIndex() + var;
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, StackLocalCount());
+  int info_index = StackLocalNamesIndex() + var;
   return String::cast(get(info_index));
 }
 
 
 int ScopeInfo::StackLocalIndex(int var) {
-  DCHECK(0 <= var && var < StackLocalCount());
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, StackLocalCount());
   int first_slot_index = Smi::cast(get(StackLocalFirstSlotIndex()))->value();
   return first_slot_index + var;
 }
 
 
 String* ScopeInfo::ContextLocalName(int var) {
-  DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
-  int info_index = ContextLocalNameEntriesIndex() + var;
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, ContextLocalCount());
+  int info_index = ContextLocalNamesIndex() + var;
   return String::cast(get(info_index));
 }
 
 
 VariableMode ScopeInfo::ContextLocalMode(int var) {
-  DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
-  int info_index = ContextLocalInfoEntriesIndex() + var;
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, ContextLocalCount());
+  int info_index = ContextLocalInfosIndex() + var;
   int value = Smi::cast(get(info_index))->value();
-  return ContextLocalMode::decode(value);
+  return VariableModeField::decode(value);
 }
 
 
 InitializationFlag ScopeInfo::ContextLocalInitFlag(int var) {
-  DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
-  int info_index = ContextLocalInfoEntriesIndex() + var;
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, ContextLocalCount());
+  int info_index = ContextLocalInfosIndex() + var;
   int value = Smi::cast(get(info_index))->value();
-  return ContextLocalInitFlag::decode(value);
+  return InitFlagField::decode(value);
 }
 
 
 MaybeAssignedFlag ScopeInfo::ContextLocalMaybeAssignedFlag(int var) {
-  DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
-  int info_index = ContextLocalInfoEntriesIndex() + var;
+  DCHECK_LE(0, var);
+  DCHECK_LT(var, ContextLocalCount());
+  int info_index = ContextLocalInfosIndex() + var;
   int value = Smi::cast(get(info_index))->value();
-  return ContextLocalMaybeAssignedFlag::decode(value);
+  return MaybeAssignedFlagField::decode(value);
 }
 
 bool ScopeInfo::VariableIsSynthetic(String* name) {
@@ -470,8 +623,8 @@
   DCHECK(name->IsInternalizedString());
   if (length() > 0) {
     int first_slot_index = Smi::cast(get(StackLocalFirstSlotIndex()))->value();
-    int start = StackLocalEntriesIndex();
-    int end = StackLocalEntriesIndex() + StackLocalCount();
+    int start = StackLocalNamesIndex();
+    int end = start + StackLocalCount();
     for (int i = start; i < end; ++i) {
       if (name == get(i)) {
         return i - start + first_slot_index;
@@ -481,27 +634,54 @@
   return -1;
 }
 
+int ScopeInfo::ModuleIndex(Handle<String> name, VariableMode* mode,
+                           InitializationFlag* init_flag,
+                           MaybeAssignedFlag* maybe_assigned_flag) {
+  DCHECK_EQ(scope_type(), MODULE_SCOPE);
+  DCHECK(name->IsInternalizedString());
+  DCHECK_NOT_NULL(mode);
+  DCHECK_NOT_NULL(init_flag);
+  DCHECK_NOT_NULL(maybe_assigned_flag);
+
+  int module_vars_count = Smi::cast(get(ModuleVariableCountIndex()))->value();
+  int entry = ModuleVariablesIndex();
+  for (int i = 0; i < module_vars_count; ++i) {
+    if (*name == get(entry + kModuleVariableNameOffset)) {
+      int index = Smi::cast(get(entry + kModuleVariableIndexOffset))->value();
+      int properties =
+          Smi::cast(get(entry + kModuleVariablePropertiesOffset))->value();
+      *mode = VariableModeField::decode(properties);
+      *init_flag = InitFlagField::decode(properties);
+      *maybe_assigned_flag = MaybeAssignedFlagField::decode(properties);
+      return index;
+    }
+    entry += kModuleVariableEntryLength;
+  }
+
+  return -1;
+}
 
 int ScopeInfo::ContextSlotIndex(Handle<ScopeInfo> scope_info,
                                 Handle<String> name, VariableMode* mode,
                                 InitializationFlag* init_flag,
                                 MaybeAssignedFlag* maybe_assigned_flag) {
   DCHECK(name->IsInternalizedString());
-  DCHECK(mode != NULL);
-  DCHECK(init_flag != NULL);
+  DCHECK_NOT_NULL(mode);
+  DCHECK_NOT_NULL(init_flag);
+  DCHECK_NOT_NULL(maybe_assigned_flag);
+
   if (scope_info->length() > 0) {
     ContextSlotCache* context_slot_cache =
         scope_info->GetIsolate()->context_slot_cache();
     int result = context_slot_cache->Lookup(*scope_info, *name, mode, init_flag,
                                             maybe_assigned_flag);
     if (result != ContextSlotCache::kNotFound) {
-      DCHECK(result < scope_info->ContextLength());
+      DCHECK_LT(result, scope_info->ContextLength());
       return result;
     }
 
-    int start = scope_info->ContextLocalNameEntriesIndex();
-    int end = scope_info->ContextLocalNameEntriesIndex() +
-              scope_info->ContextLocalCount();
+    int start = scope_info->ContextLocalNamesIndex();
+    int end = start + scope_info->ContextLocalCount();
     for (int i = start; i < end; ++i) {
       if (*name == scope_info->get(i)) {
         int var = i - start;
@@ -512,7 +692,7 @@
 
         context_slot_cache->Update(scope_info, name, *mode, *init_flag,
                                    *maybe_assigned_flag, result);
-        DCHECK(result < scope_info->ContextLength());
+        DCHECK_LT(result, scope_info->ContextLength());
         return result;
       }
     }
@@ -520,46 +700,14 @@
     context_slot_cache->Update(scope_info, name, TEMPORARY,
                                kNeedsInitialization, kNotAssigned, -1);
   }
+
   return -1;
 }
 
-
-int ScopeInfo::ContextGlobalSlotIndex(Handle<ScopeInfo> scope_info,
-                                      Handle<String> name, VariableMode* mode,
-                                      InitializationFlag* init_flag,
-                                      MaybeAssignedFlag* maybe_assigned_flag) {
-  DCHECK(name->IsInternalizedString());
-  DCHECK(mode != NULL);
-  DCHECK(init_flag != NULL);
-  if (scope_info->length() > 0) {
-    // This is to ensure that ContextLocalMode() and co. queries would work.
-    DCHECK_EQ(scope_info->ContextGlobalNameEntriesIndex(),
-              scope_info->ContextLocalNameEntriesIndex() +
-                  scope_info->ContextLocalCount());
-    int base = scope_info->ContextLocalNameEntriesIndex();
-    int start = scope_info->ContextGlobalNameEntriesIndex();
-    int end = scope_info->ContextGlobalNameEntriesIndex() +
-              scope_info->ContextGlobalCount();
-    for (int i = start; i < end; ++i) {
-      if (*name == scope_info->get(i)) {
-        int var = i - base;
-        *mode = scope_info->ContextLocalMode(var);
-        *init_flag = scope_info->ContextLocalInitFlag(var);
-        *maybe_assigned_flag = scope_info->ContextLocalMaybeAssignedFlag(var);
-        int result = Context::MIN_CONTEXT_SLOTS + var;
-        DCHECK(result < scope_info->ContextLength());
-        return result;
-      }
-    }
-  }
-  return -1;
-}
-
-
 String* ScopeInfo::ContextSlotName(int slot_index) {
   int const var = slot_index - Context::MIN_CONTEXT_SLOTS;
   DCHECK_LE(0, var);
-  DCHECK_LT(var, ContextLocalCount() + ContextGlobalCount());
+  DCHECK_LT(var, ContextLocalCount());
   return ContextLocalName(var);
 }
 
@@ -572,8 +720,8 @@
     // last declaration of that parameter is used
     // inside a function (and thus we need to look
     // at the last index). Was bug# 1110337.
-    int start = ParameterEntriesIndex();
-    int end = ParameterEntriesIndex() + ParameterCount();
+    int start = ParameterNamesIndex();
+    int end = start + ParameterCount();
     for (int i = end - 1; i >= start; --i) {
       if (name == get(i)) {
         return i - start;
@@ -586,19 +734,16 @@
 
 int ScopeInfo::ReceiverContextSlotIndex() {
   if (length() > 0 && ReceiverVariableField::decode(Flags()) == CONTEXT)
-    return Smi::cast(get(ReceiverEntryIndex()))->value();
+    return Smi::cast(get(ReceiverInfoIndex()))->value();
   return -1;
 }
 
-
-int ScopeInfo::FunctionContextSlotIndex(String* name, VariableMode* mode) {
+int ScopeInfo::FunctionContextSlotIndex(String* name) {
   DCHECK(name->IsInternalizedString());
-  DCHECK(mode != NULL);
   if (length() > 0) {
     if (FunctionVariableField::decode(Flags()) == CONTEXT &&
         FunctionName() == name) {
-      *mode = FunctionVariableMode::decode(Flags());
-      return Smi::cast(get(FunctionNameEntryIndex() + 1))->value();
+      return Smi::cast(get(FunctionNameInfoIndex() + 1))->value();
     }
   }
   return -1;
@@ -609,51 +754,45 @@
   return FunctionKindField::decode(Flags());
 }
 
-
-int ScopeInfo::ParameterEntriesIndex() {
-  DCHECK(length() > 0);
+int ScopeInfo::ParameterNamesIndex() {
+  DCHECK_LT(0, length());
   return kVariablePartIndex;
 }
 
 
 int ScopeInfo::StackLocalFirstSlotIndex() {
-  return ParameterEntriesIndex() + ParameterCount();
+  return ParameterNamesIndex() + ParameterCount();
 }
 
+int ScopeInfo::StackLocalNamesIndex() { return StackLocalFirstSlotIndex() + 1; }
 
-int ScopeInfo::StackLocalEntriesIndex() {
-  return StackLocalFirstSlotIndex() + 1;
+int ScopeInfo::ContextLocalNamesIndex() {
+  return StackLocalNamesIndex() + StackLocalCount();
 }
 
-
-int ScopeInfo::ContextLocalNameEntriesIndex() {
-  return StackLocalEntriesIndex() + StackLocalCount();
+int ScopeInfo::ContextLocalInfosIndex() {
+  return ContextLocalNamesIndex() + ContextLocalCount();
 }
 
-
-int ScopeInfo::ContextGlobalNameEntriesIndex() {
-  return ContextLocalNameEntriesIndex() + ContextLocalCount();
+int ScopeInfo::ReceiverInfoIndex() {
+  return ContextLocalInfosIndex() + ContextLocalCount();
 }
 
-
-int ScopeInfo::ContextLocalInfoEntriesIndex() {
-  return ContextGlobalNameEntriesIndex() + ContextGlobalCount();
+int ScopeInfo::FunctionNameInfoIndex() {
+  return ReceiverInfoIndex() + (HasAllocatedReceiver() ? 1 : 0);
 }
 
-
-int ScopeInfo::ContextGlobalInfoEntriesIndex() {
-  return ContextLocalInfoEntriesIndex() + ContextLocalCount();
+int ScopeInfo::OuterScopeInfoIndex() {
+  return FunctionNameInfoIndex() + (HasFunctionName() ? 2 : 0);
 }
 
-
-int ScopeInfo::ReceiverEntryIndex() {
-  return ContextGlobalInfoEntriesIndex() + ContextGlobalCount();
+int ScopeInfo::ModuleInfoIndex() {
+  return OuterScopeInfoIndex() + (HasOuterScopeInfo() ? 1 : 0);
 }
 
+int ScopeInfo::ModuleVariableCountIndex() { return ModuleInfoIndex() + 1; }
 
-int ScopeInfo::FunctionNameEntryIndex() {
-  return ReceiverEntryIndex() + (HasAllocatedReceiver() ? 1 : 0);
-}
+int ScopeInfo::ModuleVariablesIndex() { return ModuleVariableCountIndex() + 1; }
 
 #ifdef DEBUG
 
@@ -686,19 +825,84 @@
   PrintF("{");
 
   if (length() > 0) {
-    PrintList("parameters", 0, ParameterEntriesIndex(),
-              ParameterEntriesIndex() + ParameterCount(), this);
-    PrintList("stack slots", 0, StackLocalEntriesIndex(),
-              StackLocalEntriesIndex() + StackLocalCount(), this);
+    PrintList("parameters", 0, ParameterNamesIndex(),
+              ParameterNamesIndex() + ParameterCount(), this);
+    PrintList("stack slots", 0, StackLocalNamesIndex(),
+              StackLocalNamesIndex() + StackLocalCount(), this);
     PrintList("context slots", Context::MIN_CONTEXT_SLOTS,
-              ContextLocalNameEntriesIndex(),
-              ContextLocalNameEntriesIndex() + ContextLocalCount(), this);
+              ContextLocalNamesIndex(),
+              ContextLocalNamesIndex() + ContextLocalCount(), this);
+    // TODO(neis): Print module stuff if present.
   }
 
   PrintF("}\n");
 }
 #endif  // DEBUG
 
+Handle<ModuleInfoEntry> ModuleInfoEntry::New(Isolate* isolate,
+                                             Handle<Object> export_name,
+                                             Handle<Object> local_name,
+                                             Handle<Object> import_name,
+                                             Handle<Object> module_request) {
+  Handle<ModuleInfoEntry> result = isolate->factory()->NewModuleInfoEntry();
+  result->set(kExportNameIndex, *export_name);
+  result->set(kLocalNameIndex, *local_name);
+  result->set(kImportNameIndex, *import_name);
+  result->set(kModuleRequestIndex, *module_request);
+  return result;
+}
+
+Handle<ModuleInfo> ModuleInfo::New(Isolate* isolate, Zone* zone,
+                                   ModuleDescriptor* descr) {
+  // Serialize module requests.
+  Handle<FixedArray> module_requests = isolate->factory()->NewFixedArray(
+      static_cast<int>(descr->module_requests().size()));
+  for (const auto& elem : descr->module_requests()) {
+    module_requests->set(elem.second, *elem.first->string());
+  }
+
+  // Serialize special exports.
+  Handle<FixedArray> special_exports =
+      isolate->factory()->NewFixedArray(descr->special_exports().length());
+  {
+    int i = 0;
+    for (auto entry : descr->special_exports()) {
+      special_exports->set(i++, *entry->Serialize(isolate));
+    }
+  }
+
+  // Serialize namespace imports.
+  Handle<FixedArray> namespace_imports =
+      isolate->factory()->NewFixedArray(descr->namespace_imports().length());
+  {
+    int i = 0;
+    for (auto entry : descr->namespace_imports()) {
+      namespace_imports->set(i++, *entry->Serialize(isolate));
+    }
+  }
+
+  // Serialize regular exports.
+  Handle<FixedArray> regular_exports =
+      descr->SerializeRegularExports(isolate, zone);
+
+  // Serialize regular imports.
+  Handle<FixedArray> regular_imports = isolate->factory()->NewFixedArray(
+      static_cast<int>(descr->regular_imports().size()));
+  {
+    int i = 0;
+    for (const auto& elem : descr->regular_imports()) {
+      regular_imports->set(i++, *elem.second->Serialize(isolate));
+    }
+  }
+
+  Handle<ModuleInfo> result = isolate->factory()->NewModuleInfo();
+  result->set(kModuleRequestsIndex, *module_requests);
+  result->set(kSpecialExportsIndex, *special_exports);
+  result->set(kRegularExportsIndex, *regular_exports);
+  result->set(kNamespaceImportsIndex, *namespace_imports);
+  result->set(kRegularImportsIndex, *regular_imports);
+  return result;
+}
 
 }  // namespace internal
 }  // namespace v8
diff --git a/src/ast/scopeinfo.h b/src/ast/scopeinfo.h
deleted file mode 100644
index 515c88b..0000000
--- a/src/ast/scopeinfo.h
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_AST_SCOPEINFO_H_
-#define V8_AST_SCOPEINFO_H_
-
-#include "src/allocation.h"
-#include "src/ast/modules.h"
-#include "src/ast/variables.h"
-
-namespace v8 {
-namespace internal {
-
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_AST_SCOPEINFO_H_
diff --git a/src/ast/scopes.cc b/src/ast/scopes.cc
index 7689786..c531ef5 100644
--- a/src/ast/scopes.cc
+++ b/src/ast/scopes.cc
@@ -7,6 +7,7 @@
 #include <set>
 
 #include "src/accessors.h"
+#include "src/ast/ast.h"
 #include "src/bootstrapper.h"
 #include "src/messages.h"
 #include "src/parsing/parse-info.h"
@@ -24,11 +25,11 @@
 //       this is ensured.
 
 VariableMap::VariableMap(Zone* zone)
-    : ZoneHashMap(ZoneHashMap::PointersMatch, 8, ZoneAllocationPolicy(zone)) {}
+    : ZoneHashMap(8, ZoneAllocationPolicy(zone)) {}
 
 Variable* VariableMap::Declare(Zone* zone, Scope* scope,
                                const AstRawString* name, VariableMode mode,
-                               Variable::Kind kind,
+                               VariableKind kind,
                                InitializationFlag initialization_flag,
                                MaybeAssignedFlag maybe_assigned_flag,
                                bool* added) {
@@ -41,13 +42,27 @@
   if (added) *added = p->value == nullptr;
   if (p->value == nullptr) {
     // The variable has not been declared yet -> insert it.
-    DCHECK(p->key == name);
+    DCHECK_EQ(name, p->key);
     p->value = new (zone) Variable(scope, name, mode, kind, initialization_flag,
                                    maybe_assigned_flag);
   }
   return reinterpret_cast<Variable*>(p->value);
 }
 
+void VariableMap::Remove(Variable* var) {
+  const AstRawString* name = var->raw_name();
+  ZoneHashMap::Remove(const_cast<AstRawString*>(name), name->hash());
+}
+
+void VariableMap::Add(Zone* zone, Variable* var) {
+  const AstRawString* name = var->raw_name();
+  Entry* p =
+      ZoneHashMap::LookupOrInsert(const_cast<AstRawString*>(name), name->hash(),
+                                  ZoneAllocationPolicy(zone));
+  DCHECK_NULL(p->value);
+  DCHECK_EQ(name, p->key);
+  p->value = var;
+}
 
 Variable* VariableMap::Lookup(const AstRawString* name) {
   Entry* p = ZoneHashMap::Lookup(const_cast<AstRawString*>(name), name->hash());
@@ -60,7 +75,7 @@
 }
 
 SloppyBlockFunctionMap::SloppyBlockFunctionMap(Zone* zone)
-    : ZoneHashMap(ZoneHashMap::PointersMatch, 8, ZoneAllocationPolicy(zone)) {}
+    : ZoneHashMap(8, ZoneAllocationPolicy(zone)) {}
 
 void SloppyBlockFunctionMap::Declare(Zone* zone, const AstRawString* name,
                                      SloppyBlockFunctionStatement* stmt) {
@@ -81,7 +96,7 @@
     : zone_(zone),
       outer_scope_(nullptr),
       variables_(zone),
-      ordered_variables_(4, zone),
+      locals_(4, zone),
       decls_(4, zone),
       scope_type_(SCRIPT_SCOPE) {
   SetDefaults();
@@ -91,7 +106,7 @@
     : zone_(zone),
       outer_scope_(outer_scope),
       variables_(zone),
-      ordered_variables_(4, zone),
+      locals_(4, zone),
       decls_(4, zone),
       scope_type_(scope_type) {
   DCHECK_NE(SCRIPT_SCOPE, scope_type);
@@ -106,15 +121,21 @@
     : outer_scope_(scope),
       top_inner_scope_(scope->inner_scope_),
       top_unresolved_(scope->unresolved_),
-      top_temp_(scope->GetClosureScope()->temps()->length()) {}
+      top_local_(scope->GetClosureScope()->locals_.length()),
+      top_decl_(scope->GetClosureScope()->decls_.length()) {}
 
-DeclarationScope::DeclarationScope(Zone* zone)
+DeclarationScope::DeclarationScope(Zone* zone,
+                                   AstValueFactory* ast_value_factory)
     : Scope(zone),
       function_kind_(kNormalFunction),
-      temps_(4, zone),
       params_(4, zone),
       sloppy_block_function_map_(zone) {
+  DCHECK_EQ(scope_type_, SCRIPT_SCOPE);
   SetDefaults();
+
+  // Make sure that if we don't find the global 'this', it won't be declared as
+  // a regular dynamic global by predeclaring it with the right variable kind.
+  DeclareDynamicGlobal(ast_value_factory->this_string(), THIS_VARIABLE);
 }
 
 DeclarationScope::DeclarationScope(Zone* zone, Scope* outer_scope,
@@ -122,73 +143,117 @@
                                    FunctionKind function_kind)
     : Scope(zone, outer_scope, scope_type),
       function_kind_(function_kind),
-      temps_(4, zone),
       params_(4, zone),
       sloppy_block_function_map_(zone) {
+  DCHECK_NE(scope_type, SCRIPT_SCOPE);
   SetDefaults();
   asm_function_ = outer_scope_->IsAsmModule();
 }
 
-ModuleScope::ModuleScope(Zone* zone, DeclarationScope* script_scope,
+ModuleScope::ModuleScope(DeclarationScope* script_scope,
                          AstValueFactory* ast_value_factory)
-    : DeclarationScope(zone, script_scope, MODULE_SCOPE) {
+    : DeclarationScope(ast_value_factory->zone(), script_scope, MODULE_SCOPE,
+                       kModule) {
+  Zone* zone = ast_value_factory->zone();
   module_descriptor_ = new (zone) ModuleDescriptor(zone);
   set_language_mode(STRICT);
   DeclareThis(ast_value_factory);
 }
 
-Scope::Scope(Zone* zone, Scope* inner_scope, ScopeType scope_type,
-             Handle<ScopeInfo> scope_info)
+ModuleScope::ModuleScope(Isolate* isolate, Handle<ScopeInfo> scope_info,
+                         AstValueFactory* avfactory)
+    : DeclarationScope(avfactory->zone(), MODULE_SCOPE, scope_info) {
+  Zone* zone = avfactory->zone();
+  ModuleInfo* module_info = scope_info->ModuleDescriptorInfo();
+
+  set_language_mode(STRICT);
+  module_descriptor_ = new (zone) ModuleDescriptor(zone);
+
+  // Deserialize special exports.
+  Handle<FixedArray> special_exports(module_info->special_exports(), isolate);
+  for (int i = 0, n = special_exports->length(); i < n; ++i) {
+    Handle<ModuleInfoEntry> serialized_entry(
+        ModuleInfoEntry::cast(special_exports->get(i)), isolate);
+    module_descriptor_->AddSpecialExport(
+        ModuleDescriptor::Entry::Deserialize(isolate, avfactory,
+                                             serialized_entry),
+        avfactory->zone());
+  }
+
+  // Deserialize regular exports.
+  Handle<FixedArray> regular_exports(module_info->regular_exports(), isolate);
+  module_descriptor_->DeserializeRegularExports(isolate, avfactory,
+                                                regular_exports);
+
+  // Deserialize namespace imports.
+  Handle<FixedArray> namespace_imports(module_info->namespace_imports(),
+                                       isolate);
+  for (int i = 0, n = namespace_imports->length(); i < n; ++i) {
+    Handle<ModuleInfoEntry> serialized_entry(
+        ModuleInfoEntry::cast(namespace_imports->get(i)), isolate);
+    module_descriptor_->AddNamespaceImport(
+        ModuleDescriptor::Entry::Deserialize(isolate, avfactory,
+                                             serialized_entry),
+        avfactory->zone());
+  }
+
+  // Deserialize regular imports.
+  Handle<FixedArray> regular_imports(module_info->regular_imports(), isolate);
+  for (int i = 0, n = regular_imports->length(); i < n; ++i) {
+    Handle<ModuleInfoEntry> serialized_entry(
+        ModuleInfoEntry::cast(regular_imports->get(i)), isolate);
+    module_descriptor_->AddRegularImport(ModuleDescriptor::Entry::Deserialize(
+        isolate, avfactory, serialized_entry));
+  }
+}
+
+Scope::Scope(Zone* zone, ScopeType scope_type, Handle<ScopeInfo> scope_info)
     : zone_(zone),
       outer_scope_(nullptr),
       variables_(zone),
-      ordered_variables_(0, zone),
+      locals_(0, zone),
       decls_(0, zone),
       scope_info_(scope_info),
       scope_type_(scope_type) {
+  DCHECK(!scope_info.is_null());
   SetDefaults();
 #ifdef DEBUG
   already_resolved_ = true;
 #endif
-  if (scope_type == WITH_SCOPE) {
-    DCHECK(scope_info.is_null());
-  } else {
-    if (scope_info->CallsEval()) RecordEvalCall();
-    set_language_mode(scope_info->language_mode());
-    num_heap_slots_ = scope_info->ContextLength();
-  }
+  if (scope_info->CallsEval()) RecordEvalCall();
+  set_language_mode(scope_info->language_mode());
+  num_heap_slots_ = scope_info->ContextLength();
   DCHECK_LE(Context::MIN_CONTEXT_SLOTS, num_heap_slots_);
-
-  if (inner_scope != nullptr) AddInnerScope(inner_scope);
 }
 
-DeclarationScope::DeclarationScope(Zone* zone, Scope* inner_scope,
-                                   ScopeType scope_type,
+DeclarationScope::DeclarationScope(Zone* zone, ScopeType scope_type,
                                    Handle<ScopeInfo> scope_info)
-    : Scope(zone, inner_scope, scope_type, scope_info),
+    : Scope(zone, scope_type, scope_info),
       function_kind_(scope_info->function_kind()),
-      temps_(0, zone),
       params_(0, zone),
       sloppy_block_function_map_(zone) {
+  DCHECK_NE(scope_type, SCRIPT_SCOPE);
   SetDefaults();
 }
 
-Scope::Scope(Zone* zone, Scope* inner_scope,
-             const AstRawString* catch_variable_name)
+Scope::Scope(Zone* zone, const AstRawString* catch_variable_name,
+             Handle<ScopeInfo> scope_info)
     : zone_(zone),
       outer_scope_(nullptr),
       variables_(zone),
-      ordered_variables_(0, zone),
+      locals_(0, zone),
       decls_(0, zone),
+      scope_info_(scope_info),
       scope_type_(CATCH_SCOPE) {
   SetDefaults();
 #ifdef DEBUG
   already_resolved_ = true;
 #endif
-  if (inner_scope != nullptr) AddInnerScope(inner_scope);
-  Variable* variable =
-      variables_.Declare(zone, this, catch_variable_name, VAR, Variable::NORMAL,
-                         kCreatedInitialized);
+  // Cache the catch variable, even though it's also available via the
+  // scope_info, as the parser expects that a catch scope always has the catch
+  // variable as first and only variable.
+  Variable* variable = Declare(zone, this, catch_variable_name, VAR,
+                               NORMAL_VARIABLE, kCreatedInitialized);
   AllocateHeapSlot(variable);
 }
 
@@ -200,31 +265,30 @@
   force_eager_compilation_ = false;
   has_arguments_parameter_ = false;
   scope_uses_super_property_ = false;
+  has_rest_ = false;
   receiver_ = nullptr;
   new_target_ = nullptr;
   function_ = nullptr;
   arguments_ = nullptr;
   this_function_ = nullptr;
   arity_ = 0;
-  rest_index_ = -1;
 }
 
 void Scope::SetDefaults() {
 #ifdef DEBUG
   scope_name_ = nullptr;
   already_resolved_ = false;
+  needs_migration_ = false;
 #endif
   inner_scope_ = nullptr;
   sibling_ = nullptr;
   unresolved_ = nullptr;
-  dynamics_ = nullptr;
 
   start_position_ = kNoSourcePosition;
   end_position_ = kNoSourcePosition;
 
   num_stack_slots_ = 0;
   num_heap_slots_ = Context::MIN_CONTEXT_SLOTS;
-  num_global_slots_ = 0;
 
   set_language_mode(SLOPPY);
 
@@ -237,6 +301,8 @@
   force_context_allocation_ = false;
 
   is_declaration_scope_ = false;
+
+  is_lazily_parsed_ = false;
 }
 
 bool Scope::HasSimpleParameters() {
@@ -244,6 +310,16 @@
   return !scope->is_function_scope() || scope->has_simple_parameters();
 }
 
+void DeclarationScope::set_asm_module() {
+  asm_module_ = true;
+  // Mark any existing inner function scopes as asm function scopes.
+  for (Scope* inner = inner_scope_; inner != nullptr; inner = inner->sibling_) {
+    if (inner->is_function_scope()) {
+      inner->AsDeclarationScope()->set_asm_function();
+    }
+  }
+}
+
 bool Scope::IsAsmModule() const {
   return is_function_scope() && AsDeclarationScope()->asm_module();
 }
@@ -253,137 +329,77 @@
 }
 
 Scope* Scope::DeserializeScopeChain(Isolate* isolate, Zone* zone,
-                                    Context* context,
+                                    ScopeInfo* scope_info,
                                     DeclarationScope* script_scope,
                                     AstValueFactory* ast_value_factory,
                                     DeserializationMode deserialization_mode) {
   // Reconstruct the outer scope chain from a closure's context chain.
   Scope* current_scope = nullptr;
   Scope* innermost_scope = nullptr;
-  while (!context->IsNativeContext()) {
-    if (context->IsWithContext() || context->IsDebugEvaluateContext()) {
+  Scope* outer_scope = nullptr;
+  while (scope_info) {
+    if (scope_info->scope_type() == WITH_SCOPE) {
       // For scope analysis, debug-evaluate is equivalent to a with scope.
-      Scope* with_scope = new (zone)
-          Scope(zone, current_scope, WITH_SCOPE, Handle<ScopeInfo>());
+      outer_scope = new (zone) Scope(zone, WITH_SCOPE, handle(scope_info));
+
       // TODO(yangguo): Remove once debug-evaluate properly keeps track of the
       // function scope in which we are evaluating.
-      if (context->IsDebugEvaluateContext()) {
-        with_scope->set_is_debug_evaluate_scope();
+      if (scope_info->IsDebugEvaluateScope()) {
+        outer_scope->set_is_debug_evaluate_scope();
       }
-      current_scope = with_scope;
-    } else if (context->IsScriptContext()) {
-      Handle<ScopeInfo> scope_info(context->scope_info(), isolate);
-      DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE);
-      current_scope = new (zone)
-          DeclarationScope(zone, current_scope, SCRIPT_SCOPE, scope_info);
-    } else if (context->IsFunctionContext()) {
-      Handle<ScopeInfo> scope_info(context->closure()->shared()->scope_info(),
-                                   isolate);
+    } else if (scope_info->scope_type() == SCRIPT_SCOPE) {
+      // If we reach a script scope, it's the outermost scope. Install the
+      // scope info of this script context onto the existing script scope to
+      // avoid nesting script scopes.
+      if (deserialization_mode == DeserializationMode::kIncludingVariables) {
+        script_scope->SetScriptScopeInfo(handle(scope_info));
+      }
+      DCHECK(!scope_info->HasOuterScopeInfo());
+      break;
+    } else if (scope_info->scope_type() == FUNCTION_SCOPE ||
+               scope_info->scope_type() == EVAL_SCOPE) {
       // TODO(neis): For an eval scope, we currently create an ordinary function
       // context.  This is wrong and needs to be fixed.
       // https://bugs.chromium.org/p/v8/issues/detail?id=5295
-      DCHECK(scope_info->scope_type() == FUNCTION_SCOPE ||
-             scope_info->scope_type() == EVAL_SCOPE);
-      DeclarationScope* function_scope = new (zone)
-          DeclarationScope(zone, current_scope, FUNCTION_SCOPE, scope_info);
-      if (scope_info->IsAsmFunction()) function_scope->set_asm_function();
-      if (scope_info->IsAsmModule()) function_scope->set_asm_module();
-      current_scope = function_scope;
-    } else if (context->IsBlockContext()) {
-      Handle<ScopeInfo> scope_info(context->scope_info(), isolate);
-      DCHECK_EQ(scope_info->scope_type(), BLOCK_SCOPE);
+      outer_scope =
+          new (zone) DeclarationScope(zone, FUNCTION_SCOPE, handle(scope_info));
+      if (scope_info->IsAsmFunction())
+        outer_scope->AsDeclarationScope()->set_asm_function();
+      if (scope_info->IsAsmModule())
+        outer_scope->AsDeclarationScope()->set_asm_module();
+    } else if (scope_info->scope_type() == BLOCK_SCOPE) {
       if (scope_info->is_declaration_scope()) {
-        current_scope = new (zone)
-            DeclarationScope(zone, current_scope, BLOCK_SCOPE, scope_info);
+        outer_scope =
+            new (zone) DeclarationScope(zone, BLOCK_SCOPE, handle(scope_info));
       } else {
-        current_scope =
-            new (zone) Scope(zone, current_scope, BLOCK_SCOPE, scope_info);
+        outer_scope = new (zone) Scope(zone, BLOCK_SCOPE, handle(scope_info));
       }
+    } else if (scope_info->scope_type() == MODULE_SCOPE) {
+      outer_scope = new (zone)
+          ModuleScope(isolate, handle(scope_info), ast_value_factory);
     } else {
-      DCHECK(context->IsCatchContext());
-      String* name = context->catch_name();
-      current_scope =
-          new (zone) Scope(zone, current_scope,
-                           ast_value_factory->GetString(handle(name, isolate)));
+      DCHECK_EQ(scope_info->scope_type(), CATCH_SCOPE);
+      DCHECK_EQ(scope_info->LocalCount(), 1);
+      String* name = scope_info->LocalName(0);
+      outer_scope = new (zone)
+          Scope(zone, ast_value_factory->GetString(handle(name, isolate)),
+                handle(scope_info));
     }
-    if (deserialization_mode == DeserializationMode::kDeserializeOffHeap) {
-      current_scope->DeserializeScopeInfo(isolate, ast_value_factory);
+    if (deserialization_mode == DeserializationMode::kScopesOnly) {
+      outer_scope->scope_info_ = Handle<ScopeInfo>::null();
     }
+    if (current_scope != nullptr) {
+      outer_scope->AddInnerScope(current_scope);
+    }
+    current_scope = outer_scope;
     if (innermost_scope == nullptr) innermost_scope = current_scope;
-    context = context->previous();
+    scope_info = scope_info->HasOuterScopeInfo() ? scope_info->OuterScopeInfo()
+                                                 : nullptr;
   }
 
+  if (innermost_scope == nullptr) return script_scope;
   script_scope->AddInnerScope(current_scope);
-  script_scope->PropagateScopeInfo();
-  return (innermost_scope == NULL) ? script_scope : innermost_scope;
-}
-
-void Scope::DeserializeScopeInfo(Isolate* isolate,
-                                 AstValueFactory* ast_value_factory) {
-  if (scope_info_.is_null()) return;
-
-  DCHECK(ThreadId::Current().Equals(isolate->thread_id()));
-
-  std::set<const AstRawString*> names_seen;
-  // Internalize context local & globals variables.
-  for (int var = 0; var < scope_info_->ContextLocalCount() +
-                              scope_info_->ContextGlobalCount();
-       ++var) {
-    Handle<String> name_handle(scope_info_->ContextLocalName(var), isolate);
-    const AstRawString* name = ast_value_factory->GetString(name_handle);
-    if (!names_seen.insert(name).second) continue;
-    int index = Context::MIN_CONTEXT_SLOTS + var;
-    VariableMode mode = scope_info_->ContextLocalMode(var);
-    InitializationFlag init_flag = scope_info_->ContextLocalInitFlag(var);
-    MaybeAssignedFlag maybe_assigned_flag =
-        scope_info_->ContextLocalMaybeAssignedFlag(var);
-    VariableLocation location = var < scope_info_->ContextLocalCount()
-                                    ? VariableLocation::CONTEXT
-                                    : VariableLocation::GLOBAL;
-    Variable::Kind kind = Variable::NORMAL;
-    if (index == scope_info_->ReceiverContextSlotIndex()) {
-      kind = Variable::THIS;
-    }
-
-    Variable* result = variables_.Declare(zone(), this, name, mode, kind,
-                                          init_flag, maybe_assigned_flag);
-    result->AllocateTo(location, index);
-  }
-
-  // We must read parameters from the end since for multiply declared
-  // parameters the value of the last declaration of that parameter is used
-  // inside a function (and thus we need to look at the last index). Was bug#
-  // 1110337.
-  for (int index = scope_info_->ParameterCount() - 1; index >= 0; --index) {
-    Handle<String> name_handle(scope_info_->ParameterName(index), isolate);
-    const AstRawString* name = ast_value_factory->GetString(name_handle);
-    if (!names_seen.insert(name).second) continue;
-
-    VariableMode mode = DYNAMIC;
-    InitializationFlag init_flag = kCreatedInitialized;
-    MaybeAssignedFlag maybe_assigned_flag = kMaybeAssigned;
-    VariableLocation location = VariableLocation::LOOKUP;
-    Variable::Kind kind = Variable::NORMAL;
-
-    Variable* result = variables_.Declare(zone(), this, name, mode, kind,
-                                          init_flag, maybe_assigned_flag);
-    result->AllocateTo(location, index);
-  }
-
-  // Internalize function proxy for this scope.
-  if (scope_info_->HasFunctionName()) {
-    Handle<String> name_handle(scope_info_->FunctionName(), isolate);
-    const AstRawString* name = ast_value_factory->GetString(name_handle);
-    VariableMode mode;
-    int index = scope_info_->FunctionContextSlotIndex(*name_handle, &mode);
-    if (index >= 0) {
-      Variable* result = AsDeclarationScope()->DeclareFunctionVar(name);
-      DCHECK_EQ(mode, result->mode());
-      result->AllocateTo(VariableLocation::CONTEXT, index);
-    }
-  }
-
-  scope_info_ = Handle<ScopeInfo>::null();
+  return innermost_scope;
 }
 
 DeclarationScope* Scope::AsDeclarationScope() {
@@ -410,10 +426,124 @@
   return is_declaration_scope() ? AsDeclarationScope()->num_parameters() : 0;
 }
 
-void Scope::Analyze(ParseInfo* info) {
+void DeclarationScope::HoistSloppyBlockFunctions(AstNodeFactory* factory) {
+  DCHECK(is_sloppy(language_mode()));
+  DCHECK(is_function_scope() || is_eval_scope() || is_script_scope() ||
+         (is_block_scope() && outer_scope()->is_function_scope()));
+  DCHECK(HasSimpleParameters() || is_block_scope());
+  bool has_simple_parameters = HasSimpleParameters();
+  // For each variable which is used as a function declaration in a sloppy
+  // block,
+  SloppyBlockFunctionMap* map = sloppy_block_function_map();
+  for (ZoneHashMap::Entry* p = map->Start(); p != nullptr; p = map->Next(p)) {
+    AstRawString* name = static_cast<AstRawString*>(p->key);
+
+    // If the variable wouldn't conflict with a lexical declaration
+    // or parameter,
+
+    // Check if there's a conflict with a parameter.
+    // This depends on the fact that functions always have a scope solely to
+    // hold complex parameters, and the names local to that scope are
+    // precisely the names of the parameters. IsDeclaredParameter(name) does
+    // not hold for names declared by complex parameters, nor are those
+    // bindings necessarily declared lexically, so we have to check for them
+    // explicitly. On the other hand, if there are not complex parameters,
+    // it is sufficient to just check IsDeclaredParameter.
+    if (!has_simple_parameters) {
+      if (outer_scope_->LookupLocal(name) != nullptr) {
+        continue;
+      }
+    } else {
+      if (IsDeclaredParameter(name)) {
+        continue;
+      }
+    }
+
+    bool var_created = false;
+
+    // Write in assignments to var for each block-scoped function declaration
+    auto delegates = static_cast<SloppyBlockFunctionStatement*>(p->value);
+
+    DeclarationScope* decl_scope = this;
+    while (decl_scope->is_eval_scope()) {
+      decl_scope = decl_scope->outer_scope()->GetDeclarationScope();
+    }
+    Scope* outer_scope = decl_scope->outer_scope();
+
+    for (SloppyBlockFunctionStatement* delegate = delegates;
+         delegate != nullptr; delegate = delegate->next()) {
+      // Check if there's a conflict with a lexical declaration
+      Scope* query_scope = delegate->scope()->outer_scope();
+      Variable* var = nullptr;
+      bool should_hoist = true;
+
+      // Note that we perform this loop for each delegate named 'name',
+      // which may duplicate work if those delegates share scopes.
+      // It is not sufficient to just do a Lookup on query_scope: for
+      // example, that does not prevent hoisting of the function in
+      // `{ let e; try {} catch (e) { function e(){} } }`
+      do {
+        var = query_scope->LookupLocal(name);
+        if (var != nullptr && IsLexicalVariableMode(var->mode())) {
+          should_hoist = false;
+          break;
+        }
+        query_scope = query_scope->outer_scope();
+      } while (query_scope != outer_scope);
+
+      if (!should_hoist) continue;
+
+      // Declare a var-style binding for the function in the outer scope
+      if (!var_created) {
+        var_created = true;
+        VariableProxy* proxy = factory->NewVariableProxy(name, NORMAL_VARIABLE);
+        Declaration* declaration =
+            factory->NewVariableDeclaration(proxy, this, kNoSourcePosition);
+        // Based on the preceding check, it doesn't matter what we pass as
+        // allow_harmony_restrictive_generators and
+        // sloppy_mode_block_scope_function_redefinition.
+        bool ok = true;
+        DeclareVariable(declaration, VAR,
+                        Variable::DefaultInitializationFlag(VAR), false,
+                        nullptr, &ok);
+        CHECK(ok);  // Based on the preceding check, this should not fail
+      }
+
+      Expression* assignment = factory->NewAssignment(
+          Token::ASSIGN, NewUnresolved(factory, name),
+          delegate->scope()->NewUnresolved(factory, name), kNoSourcePosition);
+      Statement* statement =
+          factory->NewExpressionStatement(assignment, kNoSourcePosition);
+      delegate->set_statement(statement);
+    }
+  }
+}
+
+void DeclarationScope::Analyze(ParseInfo* info, AnalyzeMode mode) {
   DCHECK(info->literal() != NULL);
   DeclarationScope* scope = info->literal()->scope();
 
+  Handle<ScopeInfo> outer_scope_info;
+  if (info->maybe_outer_scope_info().ToHandle(&outer_scope_info)) {
+    if (scope->outer_scope()) {
+      DeclarationScope* script_scope = new (info->zone())
+          DeclarationScope(info->zone(), info->ast_value_factory());
+      info->set_script_scope(script_scope);
+      scope->ReplaceOuterScope(Scope::DeserializeScopeChain(
+          info->isolate(), info->zone(), *outer_scope_info, script_scope,
+          info->ast_value_factory(),
+          Scope::DeserializationMode::kIncludingVariables));
+    } else {
+      DCHECK_EQ(outer_scope_info->scope_type(), SCRIPT_SCOPE);
+      scope->SetScriptScopeInfo(outer_scope_info);
+    }
+  }
+
+  if (scope->is_eval_scope() && is_sloppy(scope->language_mode())) {
+    AstNodeFactory factory(info->ast_value_factory());
+    scope->HoistSloppyBlockFunctions(&factory);
+  }
+
   // We are compiling one of three cases:
   // 1) top-level code,
   // 2) a function/eval/module on the top-level
@@ -422,10 +552,13 @@
          scope->outer_scope()->scope_type() == SCRIPT_SCOPE ||
          scope->outer_scope()->already_resolved_);
 
-  // Allocate the variables.
-  {
-    AstNodeFactory ast_node_factory(info->ast_value_factory());
-    scope->AllocateVariables(info, &ast_node_factory);
+  scope->AllocateVariables(info, mode);
+
+  // Ensuring that the outer script scope has a scope info avoids having
+  // special case for native contexts vs other contexts.
+  if (info->script_scope()->scope_info_.is_null()) {
+    info->script_scope()->scope_info_ =
+        handle(ScopeInfo::Empty(info->isolate()));
   }
 
 #ifdef DEBUG
@@ -446,41 +579,91 @@
   bool subclass_constructor = IsSubclassConstructor(function_kind_);
   Variable* var = Declare(
       zone(), this, ast_value_factory->this_string(),
-      subclass_constructor ? CONST : VAR, Variable::THIS,
+      subclass_constructor ? CONST : VAR, THIS_VARIABLE,
       subclass_constructor ? kNeedsInitialization : kCreatedInitialized);
   receiver_ = var;
 }
 
+void DeclarationScope::DeclareArguments(AstValueFactory* ast_value_factory) {
+  DCHECK(is_function_scope());
+  DCHECK(!is_arrow_scope());
+
+  arguments_ = LookupLocal(ast_value_factory->arguments_string());
+  if (arguments_ == nullptr) {
+    // Declare 'arguments' variable which exists in all non arrow functions.
+    // Note that it might never be accessed, in which case it won't be
+    // allocated during variable allocation.
+    arguments_ = Declare(zone(), this, ast_value_factory->arguments_string(),
+                         VAR, NORMAL_VARIABLE, kCreatedInitialized);
+  } else if (IsLexicalVariableMode(arguments_->mode())) {
+    // Check if there's lexically declared variable named arguments to avoid
+    // redeclaration. See ES#sec-functiondeclarationinstantiation, step 20.
+    arguments_ = nullptr;
+  }
+}
+
 void DeclarationScope::DeclareDefaultFunctionVariables(
     AstValueFactory* ast_value_factory) {
   DCHECK(is_function_scope());
   DCHECK(!is_arrow_scope());
-  // Declare 'arguments' variable which exists in all non arrow functions.
-  // Note that it might never be accessed, in which case it won't be
-  // allocated during variable allocation.
-  arguments_ = Declare(zone(), this, ast_value_factory->arguments_string(), VAR,
-                       Variable::ARGUMENTS, kCreatedInitialized);
 
+  DeclareThis(ast_value_factory);
   new_target_ = Declare(zone(), this, ast_value_factory->new_target_string(),
-                        CONST, Variable::NORMAL, kCreatedInitialized);
+                        CONST, NORMAL_VARIABLE, kCreatedInitialized);
 
   if (IsConciseMethod(function_kind_) || IsClassConstructor(function_kind_) ||
       IsAccessorFunction(function_kind_)) {
     this_function_ =
         Declare(zone(), this, ast_value_factory->this_function_string(), CONST,
-                Variable::NORMAL, kCreatedInitialized);
+                NORMAL_VARIABLE, kCreatedInitialized);
   }
 }
 
 Variable* DeclarationScope::DeclareFunctionVar(const AstRawString* name) {
   DCHECK(is_function_scope());
   DCHECK_NULL(function_);
-  VariableMode mode = is_strict(language_mode()) ? CONST : CONST_LEGACY;
-  function_ = new (zone())
-      Variable(this, name, mode, Variable::NORMAL, kCreatedInitialized);
+  DCHECK_NULL(variables_.Lookup(name));
+  VariableKind kind = is_sloppy(language_mode()) ? SLOPPY_FUNCTION_NAME_VARIABLE
+                                                 : NORMAL_VARIABLE;
+  function_ =
+      new (zone()) Variable(this, name, CONST, kind, kCreatedInitialized);
+  if (calls_sloppy_eval()) {
+    NonLocal(name, DYNAMIC);
+  } else {
+    variables_.Add(zone(), function_);
+  }
   return function_;
 }
 
+bool Scope::HasBeenRemoved() const {
+  // TODO(neis): Store this information somewhere instead of calculating it.
+
+  if (!is_block_scope()) return false;  // Shortcut.
+
+  Scope* parent = outer_scope();
+  if (parent == nullptr) {
+    DCHECK(is_script_scope());
+    return false;
+  }
+
+  Scope* sibling = parent->inner_scope();
+  for (; sibling != nullptr; sibling = sibling->sibling()) {
+    if (sibling == this) return false;
+  }
+
+  DCHECK_NULL(inner_scope_);
+  return true;
+}
+
+Scope* Scope::GetUnremovedScope() {
+  Scope* scope = this;
+  while (scope != nullptr && scope->HasBeenRemoved()) {
+    scope = scope->outer_scope();
+  }
+  DCHECK_NOT_NULL(scope);
+  return scope;
+}
+
 Scope* Scope::FinalizeBlockScope() {
   DCHECK(is_block_scope());
 
@@ -530,7 +713,7 @@
   DCHECK_EQ(new_parent, new_parent->GetClosureScope());
   DCHECK_NULL(new_parent->inner_scope_);
   DCHECK_NULL(new_parent->unresolved_);
-  DCHECK_EQ(0, new_parent->temps()->length());
+  DCHECK_EQ(0, new_parent->locals_.length());
   Scope* inner_scope = new_parent->sibling_;
   if (inner_scope != top_inner_scope_) {
     for (; inner_scope->sibling() != top_inner_scope_;
@@ -557,25 +740,31 @@
     outer_scope_->unresolved_ = top_unresolved_;
   }
 
-  if (outer_scope_->GetClosureScope()->temps()->length() != top_temp_) {
-    ZoneList<Variable*>* temps = outer_scope_->GetClosureScope()->temps();
-    for (int i = top_temp_; i < temps->length(); i++) {
-      Variable* temp = temps->at(i);
-      DCHECK_EQ(temp->scope(), temp->scope()->GetClosureScope());
-      DCHECK_NE(temp->scope(), new_parent);
-      temp->set_scope(new_parent);
-      new_parent->AddTemporary(temp);
+  // TODO(verwaest): This currently only moves do-expression declared variables
+  // in default arguments that weren't already previously declared with the same
+  // name in the closure-scope. See
+  // test/mjsunit/harmony/default-parameter-do-expression.js.
+  DeclarationScope* outer_closure = outer_scope_->GetClosureScope();
+  for (int i = top_local_; i < outer_closure->locals_.length(); i++) {
+    Variable* local = outer_closure->locals_.at(i);
+    DCHECK(local->mode() == TEMPORARY || local->mode() == VAR);
+    DCHECK_EQ(local->scope(), local->scope()->GetClosureScope());
+    DCHECK_NE(local->scope(), new_parent);
+    local->set_scope(new_parent);
+    new_parent->AddLocal(local);
+    if (local->mode() == VAR) {
+      outer_closure->variables_.Remove(local);
+      new_parent->variables_.Add(new_parent->zone(), local);
     }
-    temps->Rewind(top_temp_);
   }
+  outer_closure->locals_.Rewind(top_local_);
+  outer_closure->decls_.Rewind(top_decl_);
 }
 
 void Scope::ReplaceOuterScope(Scope* outer) {
   DCHECK_NOT_NULL(outer);
   DCHECK_NOT_NULL(outer_scope_);
   DCHECK(!already_resolved_);
-  DCHECK(!outer->already_resolved_);
-  DCHECK(!outer_scope_->already_resolved_);
   outer_scope_->RemoveInnerScope(this);
   outer->AddInnerScope(this);
   outer_scope_ = outer;
@@ -589,57 +778,44 @@
   if (calls_eval()) other->RecordEvalCall();
 }
 
-
-Variable* Scope::LookupLocal(const AstRawString* name) {
-  Variable* result = variables_.Lookup(name);
-  if (result != NULL || scope_info_.is_null()) {
-    return result;
-  }
+Variable* Scope::LookupInScopeInfo(const AstRawString* name) {
   Handle<String> name_handle = name->string();
   // The Scope is backed up by ScopeInfo. This means it cannot operate in a
   // heap-independent mode, and all strings must be internalized immediately. So
   // it's ok to get the Handle<String> here.
   // If we have a serialized scope info, we might find the variable there.
   // There should be no local slot with the given name.
-  DCHECK(scope_info_->StackSlotIndex(*name_handle) < 0);
+  DCHECK_LT(scope_info_->StackSlotIndex(*name_handle), 0);
 
-  // Check context slot lookup.
   VariableMode mode;
-  VariableLocation location = VariableLocation::CONTEXT;
   InitializationFlag init_flag;
   MaybeAssignedFlag maybe_assigned_flag;
+
+  VariableLocation location = VariableLocation::CONTEXT;
   int index = ScopeInfo::ContextSlotIndex(scope_info_, name_handle, &mode,
                                           &init_flag, &maybe_assigned_flag);
-  if (index < 0) {
-    location = VariableLocation::GLOBAL;
-    index = ScopeInfo::ContextGlobalSlotIndex(scope_info_, name_handle, &mode,
-                                              &init_flag, &maybe_assigned_flag);
-  }
-  if (index < 0) {
-    // Check parameters.
-    index = scope_info_->ParameterIndex(*name_handle);
-    if (index < 0) return NULL;
-
-    mode = DYNAMIC;
-    location = VariableLocation::LOOKUP;
-    init_flag = kCreatedInitialized;
-    // Be conservative and flag parameters as maybe assigned. Better information
-    // would require ScopeInfo to serialize the maybe_assigned bit also for
-    // parameters.
-    maybe_assigned_flag = kMaybeAssigned;
-  } else {
-    DCHECK(location != VariableLocation::GLOBAL ||
-           (is_script_scope() && IsDeclaredVariableMode(mode) &&
-            !IsLexicalVariableMode(mode)));
+  if (index < 0 && scope_type() == MODULE_SCOPE) {
+    location = VariableLocation::MODULE;
+    index = scope_info_->ModuleIndex(name_handle, &mode, &init_flag,
+                                     &maybe_assigned_flag);
   }
 
-  Variable::Kind kind = Variable::NORMAL;
+  if (index < 0) {
+    index = scope_info_->FunctionContextSlotIndex(*name_handle);
+    if (index < 0) return nullptr;  // Nowhere found.
+    Variable* var = AsDeclarationScope()->DeclareFunctionVar(name);
+    DCHECK_EQ(CONST, var->mode());
+    var->AllocateTo(VariableLocation::CONTEXT, index);
+    return variables_.Lookup(name);
+  }
+
+  VariableKind kind = NORMAL_VARIABLE;
   if (location == VariableLocation::CONTEXT &&
       index == scope_info_->ReceiverContextSlotIndex()) {
-    kind = Variable::THIS;
+    kind = THIS_VARIABLE;
   }
   // TODO(marja, rossberg): Correctly declare FUNCTION, CLASS, NEW_TARGET, and
-  // ARGUMENTS bindings as their corresponding Variable::Kind.
+  // ARGUMENTS bindings as their corresponding VariableKind.
 
   Variable* var = variables_.Declare(zone(), this, name, mode, kind, init_flag,
                                      maybe_assigned_flag);
@@ -647,24 +823,6 @@
   return var;
 }
 
-Variable* DeclarationScope::LookupFunctionVar(const AstRawString* name) {
-  if (function_ != nullptr && function_->raw_name() == name) {
-    return function_;
-  } else if (!scope_info_.is_null()) {
-    // If we are backed by a scope info, try to lookup the variable there.
-    VariableMode mode;
-    int index = scope_info_->FunctionContextSlotIndex(*(name->string()), &mode);
-    if (index < 0) return nullptr;
-    Variable* var = DeclareFunctionVar(name);
-    DCHECK_EQ(mode, var->mode());
-    var->AllocateTo(VariableLocation::CONTEXT, index);
-    return var;
-  } else {
-    return nullptr;
-  }
-}
-
-
 Variable* Scope::Lookup(const AstRawString* name) {
   for (Scope* scope = this;
        scope != NULL;
@@ -679,21 +837,22 @@
     const AstRawString* name, VariableMode mode, bool is_optional, bool is_rest,
     bool* is_duplicate, AstValueFactory* ast_value_factory) {
   DCHECK(!already_resolved_);
-  DCHECK(is_function_scope());
+  DCHECK(is_function_scope() || is_module_scope());
+  DCHECK(!has_rest_);
   DCHECK(!is_optional || !is_rest);
   Variable* var;
   if (mode == TEMPORARY) {
     var = NewTemporary(name);
   } else {
-    var = Declare(zone(), this, name, mode, Variable::NORMAL,
-                  kCreatedInitialized);
+    var =
+        Declare(zone(), this, name, mode, NORMAL_VARIABLE, kCreatedInitialized);
     // TODO(wingo): Avoid O(n^2) check.
     *is_duplicate = IsDeclaredParameter(name);
   }
   if (!is_optional && !is_rest && arity_ == params_.length()) {
     ++arity_;
   }
-  if (is_rest) rest_index_ = num_parameters();
+  has_rest_ = is_rest;
   params_.Add(var, zone());
   if (name == ast_value_factory->arguments_string()) {
     has_arguments_parameter_ = true;
@@ -702,7 +861,7 @@
 }
 
 Variable* Scope::DeclareLocal(const AstRawString* name, VariableMode mode,
-                              InitializationFlag init_flag, Variable::Kind kind,
+                              InitializationFlag init_flag, VariableKind kind,
                               MaybeAssignedFlag maybe_assigned_flag) {
   DCHECK(!already_resolved_);
   // This function handles VAR, LET, and CONST modes.  DYNAMIC variables are
@@ -713,10 +872,138 @@
                  maybe_assigned_flag);
 }
 
+Variable* Scope::DeclareVariable(
+    Declaration* declaration, VariableMode mode, InitializationFlag init,
+    bool allow_harmony_restrictive_generators,
+    bool* sloppy_mode_block_scope_function_redefinition, bool* ok) {
+  DCHECK(IsDeclaredVariableMode(mode));
+  DCHECK(!already_resolved_);
+
+  if (mode == VAR && !is_declaration_scope()) {
+    return GetDeclarationScope()->DeclareVariable(
+        declaration, mode, init, allow_harmony_restrictive_generators,
+        sloppy_mode_block_scope_function_redefinition, ok);
+  }
+  DCHECK(!is_catch_scope());
+  DCHECK(!is_with_scope());
+  DCHECK(is_declaration_scope() ||
+         (IsLexicalVariableMode(mode) && is_block_scope()));
+
+  VariableProxy* proxy = declaration->proxy();
+  DCHECK(proxy->raw_name() != NULL);
+  const AstRawString* name = proxy->raw_name();
+  bool is_function_declaration = declaration->IsFunctionDeclaration();
+
+  Variable* var = nullptr;
+  if (is_eval_scope() && is_sloppy(language_mode()) && mode == VAR) {
+    // In a var binding in a sloppy direct eval, pollute the enclosing scope
+    // with this new binding by doing the following:
+    // The proxy is bound to a lookup variable to force a dynamic declaration
+    // using the DeclareEvalVar or DeclareEvalFunction runtime functions.
+    VariableKind kind = NORMAL_VARIABLE;
+    // TODO(sigurds) figure out if kNotAssigned is OK here
+    var = new (zone()) Variable(this, name, mode, kind, init, kNotAssigned);
+    var->AllocateTo(VariableLocation::LOOKUP, -1);
+  } else {
+    // Declare the variable in the declaration scope.
+    var = LookupLocal(name);
+    if (var == NULL) {
+      // Declare the name.
+      VariableKind kind = NORMAL_VARIABLE;
+      if (is_function_declaration) {
+        kind = FUNCTION_VARIABLE;
+      }
+      var = DeclareLocal(name, mode, init, kind, kNotAssigned);
+    } else if (IsLexicalVariableMode(mode) ||
+               IsLexicalVariableMode(var->mode())) {
+      // Allow duplicate function decls for web compat, see bug 4693.
+      bool duplicate_allowed = false;
+      if (is_sloppy(language_mode()) && is_function_declaration &&
+          var->is_function()) {
+        DCHECK(IsLexicalVariableMode(mode) &&
+               IsLexicalVariableMode(var->mode()));
+        // If the duplication is allowed, then the var will show up
+        // in the SloppyBlockFunctionMap and the new FunctionKind
+        // will be a permitted duplicate.
+        FunctionKind function_kind =
+            declaration->AsFunctionDeclaration()->fun()->kind();
+        duplicate_allowed =
+            GetDeclarationScope()->sloppy_block_function_map()->Lookup(
+                const_cast<AstRawString*>(name), name->hash()) != nullptr &&
+            !IsAsyncFunction(function_kind) &&
+            !(allow_harmony_restrictive_generators &&
+              IsGeneratorFunction(function_kind));
+      }
+      if (duplicate_allowed) {
+        *sloppy_mode_block_scope_function_redefinition = true;
+      } else {
+        // The name was declared in this scope before; check for conflicting
+        // re-declarations. We have a conflict if either of the declarations
+        // is not a var (in script scope, we also have to ignore legacy const
+        // for compatibility). There is similar code in runtime.cc in the
+        // Declare functions. The function CheckConflictingVarDeclarations
+        // checks for var and let bindings from different scopes whereas this
+        // is a check for conflicting declarations within the same scope. This
+        // check also covers the special case
+        //
+        // function () { let x; { var x; } }
+        //
+        // because the var declaration is hoisted to the function scope where
+        // 'x' is already bound.
+        DCHECK(IsDeclaredVariableMode(var->mode()));
+        // In harmony we treat re-declarations as early errors. See
+        // ES5 16 for a definition of early errors.
+        *ok = false;
+        return nullptr;
+      }
+    } else if (mode == VAR) {
+      var->set_maybe_assigned();
+    }
+  }
+  DCHECK_NOT_NULL(var);
+
+  // We add a declaration node for every declaration. The compiler
+  // will only generate code if necessary. In particular, declarations
+  // for inner local variables that do not represent functions won't
+  // result in any generated code.
+  //
+  // This will lead to multiple declaration nodes for the
+  // same variable if it is declared several times. This is not a
+  // semantic issue, but it may be a performance issue since it may
+  // lead to repeated DeclareEvalVar or DeclareEvalFunction calls.
+  decls_.Add(declaration, zone());
+  proxy->BindTo(var);
+  return var;
+}
+
+VariableProxy* Scope::NewUnresolved(AstNodeFactory* factory,
+                                    const AstRawString* name,
+                                    int start_position, int end_position,
+                                    VariableKind kind) {
+  // Note that we must not share the unresolved variables with
+  // the same name because they may be removed selectively via
+  // RemoveUnresolved().
+  DCHECK(!already_resolved_);
+  DCHECK_EQ(!needs_migration_, factory->zone() == zone());
+  VariableProxy* proxy =
+      factory->NewVariableProxy(name, kind, start_position, end_position);
+  proxy->set_next_unresolved(unresolved_);
+  unresolved_ = proxy;
+  return proxy;
+}
+
+void Scope::AddUnresolved(VariableProxy* proxy) {
+  DCHECK(!already_resolved_);
+  DCHECK(!proxy->is_resolved());
+  proxy->set_next_unresolved(unresolved_);
+  unresolved_ = proxy;
+}
+
 Variable* DeclarationScope::DeclareDynamicGlobal(const AstRawString* name,
-                                                 Variable::Kind kind) {
+                                                 VariableKind kind) {
   DCHECK(is_script_scope());
-  return Declare(zone(), this, name, DYNAMIC_GLOBAL, kind, kCreatedInitialized);
+  return variables_.Declare(zone(), this, name, DYNAMIC_GLOBAL, kind,
+                            kCreatedInitialized);
 }
 
 
@@ -739,24 +1026,34 @@
   return false;
 }
 
+bool Scope::RemoveUnresolved(const AstRawString* name) {
+  if (unresolved_->raw_name() == name) {
+    VariableProxy* removed = unresolved_;
+    unresolved_ = unresolved_->next_unresolved();
+    removed->set_next_unresolved(nullptr);
+    return true;
+  }
+  VariableProxy* current = unresolved_;
+  while (current != nullptr) {
+    VariableProxy* next = current->next_unresolved();
+    if (next->raw_name() == name) {
+      current->set_next_unresolved(next->next_unresolved());
+      next->set_next_unresolved(nullptr);
+      return true;
+    }
+    current = next;
+  }
+  return false;
+}
 
 Variable* Scope::NewTemporary(const AstRawString* name) {
   DeclarationScope* scope = GetClosureScope();
-  Variable* var = new(zone()) Variable(scope,
-                                       name,
-                                       TEMPORARY,
-                                       Variable::NORMAL,
-                                       kCreatedInitialized);
-  scope->AddTemporary(var);
+  Variable* var = new (zone())
+      Variable(scope, name, TEMPORARY, NORMAL_VARIABLE, kCreatedInitialized);
+  scope->AddLocal(var);
   return var;
 }
 
-void Scope::AddDeclaration(Declaration* declaration) {
-  DCHECK(!already_resolved_);
-  decls_.Add(declaration, zone());
-}
-
-
 Declaration* Scope::CheckConflictingVarDeclarations() {
   int length = decls_.length();
   for (int i = 0; i < length; i++) {
@@ -806,63 +1103,34 @@
   return nullptr;
 }
 
-void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
-                                         ZoneList<Variable*>* context_locals,
-                                         ZoneList<Variable*>* context_globals) {
-  DCHECK(stack_locals != NULL);
-  DCHECK(context_locals != NULL);
-  DCHECK(context_globals != NULL);
-
-  // Collect temporaries which are always allocated on the stack, unless the
-  // context as a whole has forced context allocation.
-  if (is_declaration_scope()) {
-    ZoneList<Variable*>* temps = AsDeclarationScope()->temps();
-    for (int i = 0; i < temps->length(); i++) {
-      Variable* var = (*temps)[i];
-      if (var->is_used()) {
-        if (var->IsContextSlot()) {
-          DCHECK(has_forced_context_allocation());
-          context_locals->Add(var, zone());
-        } else if (var->IsStackLocal()) {
-          stack_locals->Add(var, zone());
-        } else {
-          DCHECK(var->IsParameter());
-        }
-      }
-    }
-  }
-
-  for (int i = 0; i < ordered_variables_.length(); i++) {
-    Variable* var = ordered_variables_[i];
-    if (var->IsStackLocal()) {
-      stack_locals->Add(var, zone());
-    } else if (var->IsContextSlot()) {
-      context_locals->Add(var, zone());
-    } else if (var->IsGlobalSlot()) {
-      context_globals->Add(var, zone());
-    }
-  }
-}
-
-void DeclarationScope::AllocateVariables(ParseInfo* info,
-                                         AstNodeFactory* factory) {
-  // 1) Propagate scope information.
-  PropagateScopeInfo();
-
-  // 2) Resolve variables.
-  ResolveVariablesRecursively(info, factory);
-
-  // 3) Allocate variables.
+void DeclarationScope::AllocateVariables(ParseInfo* info, AnalyzeMode mode) {
+  ResolveVariablesRecursively(info);
   AllocateVariablesRecursively();
+
+  MaybeHandle<ScopeInfo> outer_scope;
+  for (const Scope* s = outer_scope_; s != nullptr; s = s->outer_scope_) {
+    if (s->scope_info_.is_null()) continue;
+    outer_scope = s->scope_info_;
+    break;
+  }
+  AllocateScopeInfosRecursively(info->isolate(), mode, outer_scope);
+  // The debugger expects all shared function infos to contain a scope info.
+  // Since the top-most scope will end up in a shared function info, make sure
+  // it has one, even if it doesn't need a scope info.
+  // TODO(jochen|yangguo): Remove this requirement.
+  if (scope_info_.is_null()) {
+    scope_info_ = ScopeInfo::Create(info->isolate(), zone(), this, outer_scope);
+  }
 }
 
-
-bool Scope::AllowsLazyParsing() const {
-  // If we are inside a block scope, we must parse eagerly to find out how
-  // to allocate variables on the block scope. At this point, declarations may
-  // not have yet been parsed.
+bool Scope::AllowsLazyParsingWithoutUnresolvedVariables() const {
+  // If we are inside a block scope, we must find unresolved variables in the
+  // inner scopes to find out how to allocate variables on the block scope. At
+  // this point, declarations may not have yet been parsed.
   for (const Scope* s = this; s != nullptr; s = s->outer_scope_) {
     if (s->is_block_scope()) return false;
+    // TODO(marja): Refactor parsing modes: also add s->is_function_scope()
+    // here.
   }
   return true;
 }
@@ -932,6 +1200,16 @@
   return scope->AsDeclarationScope();
 }
 
+ModuleScope* Scope::GetModuleScope() {
+  Scope* scope = this;
+  DCHECK(!scope->is_script_scope());
+  while (!scope->is_module_scope()) {
+    scope = scope->outer_scope();
+    DCHECK_NOT_NULL(scope);
+  }
+  return scope->AsModuleScope();
+}
+
 DeclarationScope* Scope::GetReceiverScope() {
   Scope* scope = this;
   while (!scope->is_script_scope() &&
@@ -942,18 +1220,17 @@
   return scope->AsDeclarationScope();
 }
 
-
-
-Handle<ScopeInfo> Scope::GetScopeInfo(Isolate* isolate) {
-  if (scope_info_.is_null()) {
-    scope_info_ = ScopeInfo::Create(isolate, zone(), this);
+Scope* Scope::GetOuterScopeWithContext() {
+  Scope* scope = outer_scope_;
+  while (scope && !scope->NeedsContext()) {
+    scope = scope->outer_scope();
   }
-  return scope_info_;
+  return scope;
 }
 
 Handle<StringSet> DeclarationScope::CollectNonLocals(
     ParseInfo* info, Handle<StringSet> non_locals) {
-  VariableProxy* free_variables = FetchFreeVariables(this, info);
+  VariableProxy* free_variables = FetchFreeVariables(this, true, info);
   for (VariableProxy* proxy = free_variables; proxy != nullptr;
        proxy = proxy->next_unresolved()) {
     non_locals = StringSet::Add(non_locals, proxy->name());
@@ -961,38 +1238,73 @@
   return non_locals;
 }
 
-void DeclarationScope::AnalyzePartially(DeclarationScope* migrate_to,
-                                        AstNodeFactory* ast_node_factory) {
-  // Gather info from inner scopes.
-  PropagateScopeInfo();
+void DeclarationScope::ResetAfterPreparsing(AstValueFactory* ast_value_factory,
+                                            bool aborted) {
+  DCHECK(is_function_scope());
 
-  // Try to resolve unresolved variables for this Scope and migrate those which
-  // cannot be resolved inside. It doesn't make sense to try to resolve them in
-  // the outer Scopes here, because they are incomplete.
-  for (VariableProxy* proxy = FetchFreeVariables(this); proxy != nullptr;
-       proxy = proxy->next_unresolved()) {
-    DCHECK(!proxy->is_resolved());
-    VariableProxy* copy = ast_node_factory->CopyVariableProxy(proxy);
-    migrate_to->AddUnresolved(copy);
+  // Reset all non-trivial members.
+  decls_.Rewind(0);
+  locals_.Rewind(0);
+  sloppy_block_function_map_.Clear();
+  variables_.Clear();
+  // Make sure we won't walk the scope tree from here on.
+  inner_scope_ = nullptr;
+  unresolved_ = nullptr;
+
+  // TODO(verwaest): We should properly preparse the parameters (no declarations
+  // should be created), and reparse on abort.
+  if (aborted) {
+    if (!IsArrowFunction(function_kind_)) {
+      DeclareDefaultFunctionVariables(ast_value_factory);
+    }
+    // Recreate declarations for parameters.
+    for (int i = 0; i < params_.length(); i++) {
+      Variable* var = params_[i];
+      if (var->mode() == TEMPORARY) {
+        locals_.Add(var, zone());
+      } else if (variables_.Lookup(var->raw_name()) == nullptr) {
+        variables_.Add(zone(), var);
+        locals_.Add(var, zone());
+      }
+    }
+  } else {
+    params_.Rewind(0);
   }
 
-  // Push scope data up to migrate_to. Note that migrate_to and this Scope
-  // describe the same Scope, just in different Zones.
-  PropagateUsageFlagsToScope(migrate_to);
-  if (scope_uses_super_property_) migrate_to->scope_uses_super_property_ = true;
-  if (inner_scope_calls_eval_) migrate_to->inner_scope_calls_eval_ = true;
+#ifdef DEBUG
+  needs_migration_ = false;
+#endif
+
+  is_lazily_parsed_ = !aborted;
+}
+
+void DeclarationScope::AnalyzePartially(AstNodeFactory* ast_node_factory) {
   DCHECK(!force_eager_compilation_);
-  migrate_to->set_start_position(start_position_);
-  migrate_to->set_end_position(end_position_);
-  migrate_to->set_language_mode(language_mode());
-  migrate_to->arity_ = arity_;
-  migrate_to->force_context_allocation_ = force_context_allocation_;
-  outer_scope_->RemoveInnerScope(this);
-  DCHECK_EQ(outer_scope_, migrate_to->outer_scope_);
-  DCHECK_EQ(outer_scope_->zone(), migrate_to->zone());
-  DCHECK_EQ(NeedsHomeObject(), migrate_to->NeedsHomeObject());
-  DCHECK_EQ(asm_function_, migrate_to->asm_function_);
-  DCHECK_EQ(arguments() != nullptr, migrate_to->arguments() != nullptr);
+  VariableProxy* unresolved = nullptr;
+
+  if (!outer_scope_->is_script_scope()) {
+    // Try to resolve unresolved variables for this Scope and migrate those
+    // which cannot be resolved inside. It doesn't make sense to try to resolve
+    // them in the outer Scopes here, because they are incomplete.
+    for (VariableProxy* proxy =
+             FetchFreeVariables(this, !FLAG_lazy_inner_functions);
+         proxy != nullptr; proxy = proxy->next_unresolved()) {
+      DCHECK(!proxy->is_resolved());
+      VariableProxy* copy = ast_node_factory->CopyVariableProxy(proxy);
+      copy->set_next_unresolved(unresolved);
+      unresolved = copy;
+    }
+
+    // Clear arguments_ if unused. This is used as a signal for optimization.
+    if (arguments_ != nullptr &&
+        !(MustAllocate(arguments_) && !has_arguments_parameter_)) {
+      arguments_ = nullptr;
+    }
+  }
+
+  ResetAfterPreparsing(ast_node_factory->ast_value_factory(), false);
+
+  unresolved_ = unresolved;
 }
 
 #ifdef DEBUG
@@ -1040,9 +1352,6 @@
     case VariableLocation::CONTEXT:
       PrintF("context[%d]", var->index());
       break;
-    case VariableLocation::GLOBAL:
-      PrintF("global[%d]", var->index());
-      break;
     case VariableLocation::LOOKUP:
       PrintF("lookup");
       break;
@@ -1055,7 +1364,7 @@
 
 static void PrintVar(int indent, Variable* var) {
   if (var->is_used() || !var->IsUnallocated()) {
-    Indent(indent, Variable::Mode2String(var->mode()));
+    Indent(indent, VariableMode2String(var->mode()));
     PrintF(" ");
     if (var->raw_name()->IsEmpty())
       PrintF(".%p", reinterpret_cast<void*>(var));
@@ -1077,14 +1386,16 @@
   }
 }
 
-
-static void PrintMap(int indent, VariableMap* map) {
-  for (VariableMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
+static void PrintMap(int indent, VariableMap* map, bool locals) {
+  for (VariableMap::Entry* p = map->Start(); p != nullptr; p = map->Next(p)) {
     Variable* var = reinterpret_cast<Variable*>(p->value);
-    if (var == NULL) {
-      Indent(indent, "<?>\n");
-    } else {
-      PrintVar(indent, var);
+    bool local = !IsDynamicVariableMode(var->mode());
+    if (locals ? local : !local) {
+      if (var == nullptr) {
+        Indent(indent, "<?>\n");
+      } else {
+        PrintVar(indent, var);
+      }
     }
   }
 }
@@ -1143,14 +1454,14 @@
     Indent(n1, "// scope uses 'super' property\n");
   }
   if (inner_scope_calls_eval_) Indent(n1, "// inner scope calls 'eval'\n");
+  if (is_lazily_parsed_) Indent(n1, "// lazily parsed\n");
   if (num_stack_slots_ > 0) {
     Indent(n1, "// ");
     PrintF("%d stack slots\n", num_stack_slots_);
   }
   if (num_heap_slots_ > 0) {
     Indent(n1, "// ");
-    PrintF("%d heap slots (including %d global slots)\n", num_heap_slots_,
-           num_global_slots_);
+    PrintF("%d heap slots\n", num_heap_slots_);
   }
 
   // Print locals.
@@ -1159,28 +1470,12 @@
     PrintVar(n1, function);
   }
 
-  if (is_declaration_scope()) {
-    bool printed_header = false;
-    ZoneList<Variable*>* temps = AsDeclarationScope()->temps();
-    for (int i = 0; i < temps->length(); i++) {
-      if (!printed_header) {
-        printed_header = true;
-        Indent(n1, "// temporary vars:\n");
-      }
-      PrintVar(n1, (*temps)[i]);
-    }
-  }
-
   if (variables_.Start() != NULL) {
     Indent(n1, "// local vars:\n");
-    PrintMap(n1, &variables_);
-  }
+    PrintMap(n1, &variables_, true);
 
-  if (dynamics_ != NULL) {
     Indent(n1, "// dynamic vars:\n");
-    PrintMap(n1, dynamics_->GetMap(DYNAMIC));
-    PrintMap(n1, dynamics_->GetMap(DYNAMIC_LOCAL));
-    PrintMap(n1, dynamics_->GetMap(DYNAMIC_GLOBAL));
+    PrintMap(n1, &variables_, false);
   }
 
   // Print inner scopes (disable by providing negative n).
@@ -1208,34 +1503,26 @@
 }
 
 void Scope::CheckZones() {
+  DCHECK(!needs_migration_);
   for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
     CHECK_EQ(scope->zone(), zone());
+    scope->CheckZones();
   }
 }
 #endif  // DEBUG
 
 Variable* Scope::NonLocal(const AstRawString* name, VariableMode mode) {
-  if (dynamics_ == NULL) dynamics_ = new (zone()) DynamicScopePart(zone());
-  VariableMap* map = dynamics_->GetMap(mode);
-  Variable* var = map->Lookup(name);
-  if (var == NULL) {
-    // Declare a new non-local.
-    DCHECK(!IsLexicalVariableMode(mode));
-    var = map->Declare(zone(), NULL, name, mode, Variable::NORMAL,
-                       kCreatedInitialized);
-    // Allocate it by giving it a dynamic lookup.
-    var->AllocateTo(VariableLocation::LOOKUP, -1);
-  }
+  // Declare a new non-local.
+  DCHECK(IsDynamicVariableMode(mode));
+  Variable* var = variables_.Declare(zone(), NULL, name, mode, NORMAL_VARIABLE,
+                                     kCreatedInitialized);
+  // Allocate it by giving it a dynamic lookup.
+  var->AllocateTo(VariableLocation::LOOKUP, -1);
   return var;
 }
 
-Variable* Scope::LookupRecursive(VariableProxy* proxy,
-                                 BindingKind* binding_kind,
-                                 AstNodeFactory* factory,
-                                 Scope* outer_scope_end) {
+Variable* Scope::LookupRecursive(VariableProxy* proxy, Scope* outer_scope_end) {
   DCHECK_NE(outer_scope_end, this);
-  DCHECK_NOT_NULL(binding_kind);
-  DCHECK_EQ(UNBOUND, *binding_kind);
   // Short-cut: whenever we find a debug-evaluate scope, just look everything up
   // dynamically. Debug-evaluate doesn't properly create scope info for the
   // lookups it does. It may not have a valid 'this' declaration, and anything
@@ -1243,10 +1530,7 @@
   // variables.
   // TODO(yangguo): Remove once debug-evaluate creates proper ScopeInfo for the
   // scopes in which it's evaluating.
-  if (is_debug_evaluate_scope_) {
-    *binding_kind = DYNAMIC_LOOKUP;
-    return nullptr;
-  }
+  if (is_debug_evaluate_scope_) return NonLocal(proxy->raw_name(), DYNAMIC);
 
   // Try to find the variable in this scope.
   Variable* var = LookupLocal(proxy->raw_name());
@@ -1254,54 +1538,49 @@
   // We found a variable and we are done. (Even if there is an 'eval' in this
   // scope which introduces the same variable again, the resulting variable
   // remains the same.)
-  if (var != nullptr) {
-    *binding_kind = BOUND;
-    return var;
+  if (var != nullptr) return var;
+
+  if (outer_scope_ == outer_scope_end) {
+    // We may just be trying to find all free variables. In that case, don't
+    // declare them in the outer scope.
+    if (!is_script_scope()) return nullptr;
+    // No binding has been found. Declare a variable on the global object.
+    return AsDeclarationScope()->DeclareDynamicGlobal(proxy->raw_name(),
+                                                      NORMAL_VARIABLE);
   }
 
-  // We did not find a variable locally. Check against the function variable, if
-  // any.
-  if (is_function_scope()) {
-    var = AsDeclarationScope()->LookupFunctionVar(proxy->raw_name());
-    if (var != nullptr) {
-      *binding_kind = calls_sloppy_eval() ? BOUND_EVAL_SHADOWED : BOUND;
-      return var;
-    }
-  }
+  DCHECK(!is_script_scope());
 
-  if (outer_scope_ != outer_scope_end) {
-    var = outer_scope_->LookupRecursive(proxy, binding_kind, factory,
-                                        outer_scope_end);
-    if (*binding_kind == BOUND && is_function_scope()) {
+  var = outer_scope_->LookupRecursive(proxy, outer_scope_end);
+
+  // The variable could not be resolved statically.
+  if (var == nullptr) return var;
+
+  if (is_function_scope() && !var->is_dynamic()) {
+    var->ForceContextAllocation();
+  }
+  // "this" can't be shadowed by "eval"-introduced bindings or by "with"
+  // scopes.
+  // TODO(wingo): There are other variables in this category; add them.
+  if (var->is_this()) return var;
+
+  if (is_with_scope()) {
+    // The current scope is a with scope, so the variable binding can not be
+    // statically resolved. However, note that it was necessary to do a lookup
+    // in the outer scope anyway, because if a binding exists in an outer
+    // scope, the associated variable has to be marked as potentially being
+    // accessed from inside of an inner with scope (the property may not be in
+    // the 'with' object).
+    if (!var->is_dynamic() && var->IsUnallocated()) {
+      DCHECK(!already_resolved_);
+      var->set_is_used();
       var->ForceContextAllocation();
+      if (proxy->is_assigned()) var->set_maybe_assigned();
     }
-    // "this" can't be shadowed by "eval"-introduced bindings or by "with"
-    // scopes.
-    // TODO(wingo): There are other variables in this category; add them.
-    if (var != nullptr && var->is_this()) return var;
-
-    if (is_with_scope()) {
-      // The current scope is a with scope, so the variable binding can not be
-      // statically resolved. However, note that it was necessary to do a lookup
-      // in the outer scope anyway, because if a binding exists in an outer
-      // scope, the associated variable has to be marked as potentially being
-      // accessed from inside of an inner with scope (the property may not be in
-      // the 'with' object).
-      if (var != nullptr && var->IsUnallocated()) {
-        DCHECK(!already_resolved_);
-        var->set_is_used();
-        var->ForceContextAllocation();
-        if (proxy->is_assigned()) var->set_maybe_assigned();
-      }
-      *binding_kind = DYNAMIC_LOOKUP;
-      return nullptr;
-    }
-  } else {
-    DCHECK(!is_with_scope());
-    DCHECK(is_function_scope() || is_script_scope() || is_eval_scope());
+    return NonLocal(proxy->raw_name(), DYNAMIC);
   }
 
-  if (calls_sloppy_eval() && is_declaration_scope() && !is_script_scope()) {
+  if (calls_sloppy_eval() && is_declaration_scope()) {
     // A variable binding may have been found in an outer scope, but the current
     // scope makes a sloppy 'eval' call, so the found variable may not be the
     // correct one (the 'eval' may introduce a binding with the same name). In
@@ -1309,40 +1588,58 @@
     // scopes that can host var bindings (declaration scopes) need be considered
     // here (this excludes block and catch scopes), and variable lookups at
     // script scope are always dynamic.
-    if (*binding_kind == BOUND) {
-      *binding_kind = BOUND_EVAL_SHADOWED;
-    } else if (*binding_kind == UNBOUND) {
-      *binding_kind = UNBOUND_EVAL_SHADOWED;
+    if (var->IsGlobalObjectProperty()) {
+      return NonLocal(proxy->raw_name(), DYNAMIC_GLOBAL);
     }
+
+    if (var->is_dynamic()) return var;
+
+    Variable* invalidated = var;
+    var = NonLocal(proxy->raw_name(), DYNAMIC_LOCAL);
+    var->set_local_if_not_shadowed(invalidated);
   }
 
   return var;
 }
 
-void Scope::ResolveVariable(ParseInfo* info, VariableProxy* proxy,
-                            AstNodeFactory* factory) {
+void Scope::ResolveVariable(ParseInfo* info, VariableProxy* proxy) {
   DCHECK(info->script_scope()->is_script_scope());
+  DCHECK(!proxy->is_resolved());
+  Variable* var = LookupRecursive(proxy, nullptr);
+  ResolveTo(info, proxy, var);
 
-  // If the proxy is already resolved there's nothing to do
-  // (functions and consts may be resolved by the parser).
-  if (proxy->is_resolved()) return;
-
-  // Otherwise, try to resolve the variable.
-  BindingKind binding_kind = UNBOUND;
-  Variable* var = LookupRecursive(proxy, &binding_kind, factory);
-
-  ResolveTo(info, binding_kind, proxy, var);
+  if (FLAG_lazy_inner_functions) {
+    if (info != nullptr && info->is_native()) return;
+    // Pessimistically force context allocation for all variables to which inner
+    // scope variables could potentially resolve to.
+    Scope* scope = GetClosureScope()->outer_scope_;
+    while (scope != nullptr && scope->scope_info_.is_null()) {
+      var = scope->LookupLocal(proxy->raw_name());
+      if (var != nullptr) {
+        // Since we don't lazy parse inner arrow functions, inner functions
+        // cannot refer to the outer "this".
+        if (!var->is_dynamic() && !var->is_this() &&
+            !var->has_forced_context_allocation()) {
+          var->ForceContextAllocation();
+          var->set_is_used();
+          // We don't know what the (potentially lazy parsed) inner function
+          // does with the variable; pessimistically assume that it's assigned.
+          var->set_maybe_assigned();
+        }
+      }
+      scope = scope->outer_scope_;
+    }
+  }
 }
 
-void Scope::ResolveTo(ParseInfo* info, BindingKind binding_kind,
-                      VariableProxy* proxy, Variable* var) {
+void Scope::ResolveTo(ParseInfo* info, VariableProxy* proxy, Variable* var) {
 #ifdef DEBUG
   if (info->script_is_native()) {
     // To avoid polluting the global object in native scripts
     //  - Variables must not be allocated to the global scope.
     CHECK_NOT_NULL(outer_scope());
     //  - Variables must be bound locally or unallocated.
-    if (BOUND != binding_kind) {
+    if (var->IsGlobalObjectProperty()) {
       // The following variable name may be minified. If so, disable
       // minification in js2c.py for better output.
       Handle<String> name = proxy->raw_name()->string();
@@ -1357,85 +1654,44 @@
   }
 #endif
 
-  switch (binding_kind) {
-    case BOUND:
-      break;
-
-    case BOUND_EVAL_SHADOWED:
-      // We either found a variable binding that might be shadowed by eval  or
-      // gave up on it (e.g. by encountering a local with the same in the outer
-      // scope which was not promoted to a context, this can happen if we use
-      // debugger to evaluate arbitrary expressions at a break point).
-      if (var->IsGlobalObjectProperty()) {
-        var = NonLocal(proxy->raw_name(), DYNAMIC_GLOBAL);
-      } else if (var->is_dynamic()) {
-        var = NonLocal(proxy->raw_name(), DYNAMIC);
-      } else {
-        Variable* invalidated = var;
-        var = NonLocal(proxy->raw_name(), DYNAMIC_LOCAL);
-        var->set_local_if_not_shadowed(invalidated);
-      }
-      break;
-
-    case UNBOUND:
-      // No binding has been found. Declare a variable on the global object.
-      var = info->script_scope()->DeclareDynamicGlobal(proxy->raw_name(),
-                                                       Variable::NORMAL);
-      break;
-
-    case UNBOUND_EVAL_SHADOWED:
-      // No binding has been found. But some scope makes a sloppy 'eval' call.
-      var = NonLocal(proxy->raw_name(), DYNAMIC_GLOBAL);
-      break;
-
-    case DYNAMIC_LOOKUP:
-      // The variable could not be resolved statically.
-      var = NonLocal(proxy->raw_name(), DYNAMIC);
-      break;
-  }
-
-  DCHECK(var != NULL);
+  DCHECK_NOT_NULL(var);
   if (proxy->is_assigned()) var->set_maybe_assigned();
-
   proxy->BindTo(var);
 }
 
-void Scope::ResolveVariablesRecursively(ParseInfo* info,
-                                        AstNodeFactory* factory) {
+void Scope::ResolveVariablesRecursively(ParseInfo* info) {
   DCHECK(info->script_scope()->is_script_scope());
 
   // Resolve unresolved variables for this scope.
   for (VariableProxy* proxy = unresolved_; proxy != nullptr;
        proxy = proxy->next_unresolved()) {
-    ResolveVariable(info, proxy, factory);
+    ResolveVariable(info, proxy);
   }
 
   // Resolve unresolved variables for inner scopes.
   for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
-    scope->ResolveVariablesRecursively(info, factory);
+    scope->ResolveVariablesRecursively(info);
   }
 }
 
 VariableProxy* Scope::FetchFreeVariables(DeclarationScope* max_outer_scope,
-                                         ParseInfo* info,
+                                         bool try_to_resolve, ParseInfo* info,
                                          VariableProxy* stack) {
   for (VariableProxy *proxy = unresolved_, *next = nullptr; proxy != nullptr;
        proxy = next) {
     next = proxy->next_unresolved();
-    if (proxy->is_resolved()) continue;
-    // Note that we pass nullptr as AstNodeFactory: this phase should not create
-    // any new AstNodes, since none of the Scopes involved are backed up by
-    // ScopeInfo.
-    BindingKind binding_kind = UNBOUND;
-    Variable* var = LookupRecursive(proxy, &binding_kind, nullptr,
-                                    max_outer_scope->outer_scope());
+    DCHECK(!proxy->is_resolved());
+    Variable* var = nullptr;
+    if (try_to_resolve) {
+      var = LookupRecursive(proxy, max_outer_scope->outer_scope());
+    }
     if (var == nullptr) {
       proxy->set_next_unresolved(stack);
       stack = proxy;
     } else if (info != nullptr) {
-      DCHECK_NE(UNBOUND, binding_kind);
-      DCHECK_NE(UNBOUND_EVAL_SHADOWED, binding_kind);
-      ResolveTo(info, binding_kind, proxy, var);
+      ResolveTo(info, proxy, var);
+    } else {
+      var->set_is_used();
     }
   }
 
@@ -1443,22 +1699,13 @@
   unresolved_ = nullptr;
 
   for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
-    stack = scope->FetchFreeVariables(max_outer_scope, info, stack);
+    stack =
+        scope->FetchFreeVariables(max_outer_scope, try_to_resolve, info, stack);
   }
 
   return stack;
 }
 
-void Scope::PropagateScopeInfo() {
-  for (Scope* inner = inner_scope_; inner != nullptr; inner = inner->sibling_) {
-    inner->PropagateScopeInfo();
-    if (IsAsmModule() && inner->is_function_scope()) {
-      inner->AsDeclarationScope()->set_asm_function();
-    }
-  }
-}
-
-
 bool Scope::MustAllocate(Variable* var) {
   DCHECK(var->location() != VariableLocation::MODULE);
   // Give var a read/write use if there is a chance it might be accessed
@@ -1511,8 +1758,8 @@
 
   bool uses_sloppy_arguments = false;
 
-  // Functions have 'arguments' declared implicitly in all non arrow functions.
   if (arguments_ != nullptr) {
+    DCHECK(!is_arrow_scope());
     // 'arguments' is used. Unless there is also a parameter called
     // 'arguments', we must be conservative and allocate all parameters to
     // the context assuming they will be captured by the arguments object.
@@ -1533,21 +1780,18 @@
       // allocate the arguments object by nulling out arguments_.
       arguments_ = nullptr;
     }
-
-  } else {
-    DCHECK(is_arrow_scope());
   }
 
   // The same parameter may occur multiple times in the parameters_ list.
   // If it does, and if it is not copied into the context object, it must
   // receive the highest parameter index for that parameter; thus iteration
   // order is relevant!
-  for (int i = params_.length() - 1; i >= 0; --i) {
-    if (i == rest_index_) continue;
+  for (int i = num_parameters() - 1; i >= 0; --i) {
     Variable* var = params_[i];
-
-    DCHECK(var->scope() == this);
+    DCHECK(!has_rest_ || var != rest_parameter());
+    DCHECK_EQ(this, var->scope());
     if (uses_sloppy_arguments) {
+      var->set_is_used();
       var->ForceContextAllocation();
     }
     AllocateParameter(var, i);
@@ -1567,8 +1811,6 @@
         var->AllocateTo(VariableLocation::PARAMETER, index);
       }
     }
-  } else {
-    DCHECK(!var->IsGlobalSlot());
   }
 }
 
@@ -1590,38 +1832,9 @@
   }
 }
 
-void Scope::AllocateDeclaredGlobal(Variable* var) {
-  DCHECK(var->scope() == this);
-  if (var->IsUnallocated()) {
-    if (var->IsStaticGlobalObjectProperty()) {
-      DCHECK_EQ(-1, var->index());
-      DCHECK(var->name()->IsString());
-      var->AllocateTo(VariableLocation::GLOBAL, num_heap_slots_++);
-      num_global_slots_++;
-    } else {
-      // There must be only DYNAMIC_GLOBAL in the script scope.
-      DCHECK(!is_script_scope() || DYNAMIC_GLOBAL == var->mode());
-    }
-  }
-}
-
 void Scope::AllocateNonParameterLocalsAndDeclaredGlobals() {
-  // All variables that have no rewrite yet are non-parameter locals.
-  if (is_declaration_scope()) {
-    ZoneList<Variable*>* temps = AsDeclarationScope()->temps();
-    for (int i = 0; i < temps->length(); i++) {
-      AllocateNonParameterLocal((*temps)[i]);
-    }
-  }
-
-  for (int i = 0; i < ordered_variables_.length(); i++) {
-    AllocateNonParameterLocal(ordered_variables_[i]);
-  }
-
-  if (FLAG_global_var_shortcuts) {
-    for (int i = 0; i < ordered_variables_.length(); i++) {
-      AllocateDeclaredGlobal(ordered_variables_[i]);
-    }
+  for (int i = 0; i < locals_.length(); i++) {
+    AllocateNonParameterLocal(locals_[i]);
   }
 
   if (is_declaration_scope()) {
@@ -1638,8 +1851,8 @@
     AllocateNonParameterLocal(function_);
   }
 
-  DCHECK(!has_rest_parameter() || !MustAllocate(params_[rest_index_]) ||
-         !params_[rest_index_]->IsUnallocated());
+  DCHECK(!has_rest_ || !MustAllocate(rest_parameter()) ||
+         !rest_parameter()->IsUnallocated());
 
   if (new_target_ != nullptr && !MustAllocate(new_target_)) {
     new_target_ = nullptr;
@@ -1651,23 +1864,23 @@
 }
 
 void ModuleScope::AllocateModuleVariables() {
-  for (auto it = module()->regular_imports().begin();
-       it != module()->regular_imports().end(); ++it) {
-    Variable* var = LookupLocal(it->second->local_name);
+  for (const auto& it : module()->regular_imports()) {
+    Variable* var = LookupLocal(it.first);
     // TODO(neis): Use a meaningful index.
     var->AllocateTo(VariableLocation::MODULE, 42);
   }
 
-  for (auto entry : module()->exports()) {
-    if (entry->local_name == nullptr) continue;
-    Variable* var = LookupLocal(entry->local_name);
-    var->AllocateTo(VariableLocation::MODULE, 42);
+  for (const auto& it : module()->regular_exports()) {
+    Variable* var = LookupLocal(it.first);
+    var->AllocateTo(VariableLocation::MODULE, 0);
   }
 }
 
 void Scope::AllocateVariablesRecursively() {
   DCHECK(!already_resolved_);
   DCHECK_EQ(0, num_stack_slots_);
+  // Don't allocate variables of preparsed scopes.
+  if (is_lazily_parsed_) return;
 
   // Allocate variables for inner scopes.
   for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
@@ -1708,6 +1921,23 @@
   DCHECK(num_heap_slots_ == 0 || num_heap_slots_ >= Context::MIN_CONTEXT_SLOTS);
 }
 
+void Scope::AllocateScopeInfosRecursively(Isolate* isolate, AnalyzeMode mode,
+                                          MaybeHandle<ScopeInfo> outer_scope) {
+  DCHECK(scope_info_.is_null());
+  if (mode == AnalyzeMode::kDebugger || NeedsScopeInfo()) {
+    scope_info_ = ScopeInfo::Create(isolate, zone(), this, outer_scope);
+  }
+
+  // The ScopeInfo chain should mirror the context chain, so we only link to
+  // the next outer scope that needs a context.
+  MaybeHandle<ScopeInfo> next_outer_scope = outer_scope;
+  if (NeedsContext()) next_outer_scope = scope_info_;
+
+  // Allocate ScopeInfos for inner scopes.
+  for (Scope* scope = inner_scope_; scope != nullptr; scope = scope->sibling_) {
+    scope->AllocateScopeInfosRecursively(isolate, mode, next_outer_scope);
+  }
+}
 
 int Scope::StackLocalCount() const {
   Variable* function =
@@ -1723,12 +1953,9 @@
       is_function_scope() ? AsDeclarationScope()->function_var() : nullptr;
   bool is_function_var_in_context =
       function != nullptr && function->IsContextSlot();
-  return num_heap_slots() - Context::MIN_CONTEXT_SLOTS - num_global_slots() -
+  return num_heap_slots() - Context::MIN_CONTEXT_SLOTS -
          (is_function_var_in_context ? 1 : 0);
 }
 
-
-int Scope::ContextGlobalCount() const { return num_global_slots(); }
-
 }  // namespace internal
 }  // namespace v8
diff --git a/src/ast/scopes.h b/src/ast/scopes.h
index 8c00927..0acff8a 100644
--- a/src/ast/scopes.h
+++ b/src/ast/scopes.h
@@ -5,15 +5,22 @@
 #ifndef V8_AST_SCOPES_H_
 #define V8_AST_SCOPES_H_
 
-#include "src/ast/ast.h"
 #include "src/base/hashmap.h"
 #include "src/globals.h"
-#include "src/zone.h"
+#include "src/objects.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
 
+class AstNodeFactory;
+class AstValueFactory;
+class AstRawString;
+class Declaration;
 class ParseInfo;
+class SloppyBlockFunctionStatement;
+class StringSet;
+class VariableProxy;
 
 // A hash map to support fast variable declaration and lookup.
 class VariableMap: public ZoneHashMap {
@@ -21,34 +28,14 @@
   explicit VariableMap(Zone* zone);
 
   Variable* Declare(Zone* zone, Scope* scope, const AstRawString* name,
-                    VariableMode mode, Variable::Kind kind,
+                    VariableMode mode, VariableKind kind,
                     InitializationFlag initialization_flag,
                     MaybeAssignedFlag maybe_assigned_flag = kNotAssigned,
                     bool* added = nullptr);
 
   Variable* Lookup(const AstRawString* name);
-};
-
-
-// The dynamic scope part holds hash maps for the variables that will
-// be looked up dynamically from within eval and with scopes. The objects
-// are allocated on-demand from Scope::NonLocal to avoid wasting memory
-// and setup time for scopes that don't need them.
-class DynamicScopePart : public ZoneObject {
- public:
-  explicit DynamicScopePart(Zone* zone) {
-    for (int i = 0; i < 3; i++)
-      maps_[i] = new(zone->New(sizeof(VariableMap))) VariableMap(zone);
-  }
-
-  VariableMap* GetMap(VariableMode mode) {
-    int index = mode - DYNAMIC;
-    DCHECK(index >= 0 && index < 3);
-    return maps_[index];
-  }
-
- private:
-  VariableMap *maps_[3];
+  void Remove(Variable* var);
+  void Add(Zone* zone, Variable* var);
 };
 
 
@@ -60,6 +47,7 @@
                SloppyBlockFunctionStatement* statement);
 };
 
+enum class AnalyzeMode { kRegular, kDebugger };
 
 // Global invariants after AST construction: Each reference (i.e. identifier)
 // to a JavaScript variable (including global properties) is represented by a
@@ -86,6 +74,7 @@
   void SetScopeName(const AstRawString* scope_name) {
     scope_name_ = scope_name;
   }
+  void set_needs_migration() { needs_migration_ = true; }
 #endif
 
   // TODO(verwaest): Is this needed on Scope?
@@ -106,18 +95,14 @@
     Scope* outer_scope_;
     Scope* top_inner_scope_;
     VariableProxy* top_unresolved_;
-    int top_temp_;
+    int top_local_;
+    int top_decl_;
   };
 
-  // Compute top scope and allocate variables. For lazy compilation the top
-  // scope only contains the single lazily compiled function, so this
-  // doesn't re-allocate variables repeatedly.
-  static void Analyze(ParseInfo* info);
-
-  enum class DeserializationMode { kDeserializeOffHeap, kKeepScopeInfo };
+  enum class DeserializationMode { kIncludingVariables, kScopesOnly };
 
   static Scope* DeserializeScopeChain(Isolate* isolate, Zone* zone,
-                                      Context* context,
+                                      ScopeInfo* scope_info,
                                       DeclarationScope* script_scope,
                                       AstValueFactory* ast_value_factory,
                                       DeserializationMode deserialization_mode);
@@ -127,6 +112,11 @@
   // tree and its children are reparented.
   Scope* FinalizeBlockScope();
 
+  bool HasBeenRemoved() const;
+
+  // Find the first scope that hasn't been removed.
+  Scope* GetUnremovedScope();
+
   // Inserts outer_scope into this scope's scope chain (and removes this
   // from the current outer_scope_'s inner scope list).
   // Assumes outer_scope_ is non-null.
@@ -142,7 +132,13 @@
   // Declarations
 
   // Lookup a variable in this scope. Returns the variable or NULL if not found.
-  Variable* LookupLocal(const AstRawString* name);
+  Variable* LookupLocal(const AstRawString* name) {
+    Variable* result = variables_.Lookup(name);
+    if (result != nullptr || scope_info_.is_null()) return result;
+    return LookupInScopeInfo(name);
+  }
+
+  Variable* LookupInScopeInfo(const AstRawString* name);
 
   // Lookup a variable in this scope or outer scopes.
   // Returns the variable or NULL if not found.
@@ -151,36 +147,28 @@
   // Declare a local variable in this scope. If the variable has been
   // declared before, the previously declared variable is returned.
   Variable* DeclareLocal(const AstRawString* name, VariableMode mode,
-                         InitializationFlag init_flag, Variable::Kind kind,
+                         InitializationFlag init_flag, VariableKind kind,
                          MaybeAssignedFlag maybe_assigned_flag = kNotAssigned);
 
+  Variable* DeclareVariable(Declaration* declaration, VariableMode mode,
+                            InitializationFlag init,
+                            bool allow_harmony_restrictive_generators,
+                            bool* sloppy_mode_block_scope_function_redefinition,
+                            bool* ok);
+
   // Declarations list.
   ZoneList<Declaration*>* declarations() { return &decls_; }
 
+  ZoneList<Variable*>* locals() { return &locals_; }
+
   // Create a new unresolved variable.
   VariableProxy* NewUnresolved(AstNodeFactory* factory,
                                const AstRawString* name,
                                int start_position = kNoSourcePosition,
                                int end_position = kNoSourcePosition,
-                               Variable::Kind kind = Variable::NORMAL) {
-    // Note that we must not share the unresolved variables with
-    // the same name because they may be removed selectively via
-    // RemoveUnresolved().
-    DCHECK(!already_resolved_);
-    DCHECK_EQ(factory->zone(), zone());
-    VariableProxy* proxy =
-        factory->NewVariableProxy(name, kind, start_position, end_position);
-    proxy->set_next_unresolved(unresolved_);
-    unresolved_ = proxy;
-    return proxy;
-  }
+                               VariableKind kind = NORMAL_VARIABLE);
 
-  void AddUnresolved(VariableProxy* proxy) {
-    DCHECK(!already_resolved_);
-    DCHECK(!proxy->is_resolved());
-    proxy->set_next_unresolved(unresolved_);
-    unresolved_ = proxy;
-  }
+  void AddUnresolved(VariableProxy* proxy);
 
   // Remove a unresolved variable. During parsing, an unresolved variable
   // may have been added optimistically, but then only the variable name
@@ -189,6 +177,7 @@
   // allocated globally as a "ghost" variable. RemoveUnresolved removes
   // such a variable again if it was added; otherwise this is a no-op.
   bool RemoveUnresolved(VariableProxy* var);
+  bool RemoveUnresolved(const AstRawString* name);
 
   // Creates a new temporary variable in this scope's TemporaryScope.  The
   // name is only used for printing and cannot be used to find the variable.
@@ -198,11 +187,6 @@
   // TODO(verwaest): Move to DeclarationScope?
   Variable* NewTemporary(const AstRawString* name);
 
-  // Adds the specific declaration node to the list of declarations in
-  // this scope. The declarations are processed as part of entering
-  // the scope; see codegen.cc:ProcessDeclarations.
-  void AddDeclaration(Declaration* declaration);
-
   // ---------------------------------------------------------------------------
   // Illegal redeclaration support.
 
@@ -223,10 +207,15 @@
   // Scope-specific info.
 
   // Inform the scope and outer scopes that the corresponding code contains an
-  // eval call.
+  // eval call. We don't record eval calls from innner scopes in the outer most
+  // script scope, as we only see those when parsing eagerly. If we recorded the
+  // calls then, the outer most script scope would look different depending on
+  // whether we parsed eagerly or not which is undesirable.
   void RecordEvalCall() {
     scope_calls_eval_ = true;
-    for (Scope* scope = this; scope != nullptr; scope = scope->outer_scope()) {
+    inner_scope_calls_eval_ = true;
+    for (Scope* scope = outer_scope(); scope && !scope->is_script_scope();
+         scope = scope->outer_scope()) {
       scope->inner_scope_calls_eval_ = true;
     }
   }
@@ -353,24 +342,16 @@
   // ---------------------------------------------------------------------------
   // Variable allocation.
 
-  // Collect stack and context allocated local variables in this scope. Note
-  // that the function variable - if present - is not collected and should be
-  // handled separately.
-  void CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
-                                    ZoneList<Variable*>* context_locals,
-                                    ZoneList<Variable*>* context_globals);
-
   // Result of variable allocation.
   int num_stack_slots() const { return num_stack_slots_; }
   int num_heap_slots() const { return num_heap_slots_; }
-  int num_global_slots() const { return num_global_slots_; }
 
   int StackLocalCount() const;
   int ContextLocalCount() const;
-  int ContextGlobalCount() const;
 
-  // Determine if we can parse a function literal in this scope lazily.
-  bool AllowsLazyParsing() const;
+  // Determine if we can parse a function literal in this scope lazily without
+  // caring about the unresolved variables within.
+  bool AllowsLazyParsingWithoutUnresolvedVariables() const;
 
   // The number of contexts between this and scope; zero if this == scope.
   int ContextChainLength(Scope* scope) const;
@@ -398,10 +379,13 @@
   // 'this' is bound, and what determines the function kind.
   DeclarationScope* GetReceiverScope();
 
-  // Creates a scope info if it doesn't already exist.
-  Handle<ScopeInfo> GetScopeInfo(Isolate* isolate);
+  // Find the module scope, assuming there is one.
+  ModuleScope* GetModuleScope();
 
-  // GetScopeInfo() must have been called once to create the ScopeInfo.
+  // Find the innermost outer scope that needs a context.
+  Scope* GetOuterScopeWithContext();
+
+  // Analyze() must have been called once to create the ScopeInfo.
   Handle<ScopeInfo> scope_info() {
     DCHECK(!scope_info_.is_null());
     return scope_info_;
@@ -436,9 +420,11 @@
   // Retrieve `IsSimpleParameterList` of current or outer function.
   bool HasSimpleParameters();
   void set_is_debug_evaluate_scope() { is_debug_evaluate_scope_ = true; }
+  bool is_debug_evaluate_scope() const { return is_debug_evaluate_scope_; }
+
+  bool is_lazily_parsed() const { return is_lazily_parsed_; }
 
  protected:
-  // Creates a script scope.
   explicit Scope(Zone* zone);
 
   void set_language_mode(LanguageMode language_mode) {
@@ -447,16 +433,32 @@
 
  private:
   Variable* Declare(Zone* zone, Scope* scope, const AstRawString* name,
-                    VariableMode mode, Variable::Kind kind,
+                    VariableMode mode, VariableKind kind,
                     InitializationFlag initialization_flag,
                     MaybeAssignedFlag maybe_assigned_flag = kNotAssigned) {
     bool added;
     Variable* var =
         variables_.Declare(zone, scope, name, mode, kind, initialization_flag,
                            maybe_assigned_flag, &added);
-    if (added) ordered_variables_.Add(var, zone);
+    if (added) locals_.Add(var, zone);
     return var;
   }
+
+  // This method should only be invoked on scopes created during parsing (i.e.,
+  // not deserialized from a context). Also, since NeedsContext() is only
+  // returning a valid result after variables are resolved, NeedsScopeInfo()
+  // should also be invoked after resolution.
+  bool NeedsScopeInfo() const {
+    DCHECK(!already_resolved_);
+    // A lazily parsed scope doesn't contain enough information to create a
+    // ScopeInfo from it.
+    if (is_lazily_parsed_) return false;
+    // The debugger expects all functions to have scope infos.
+    // TODO(jochen|yangguo): Remove this requirement.
+    if (is_function_scope()) return true;
+    return NeedsContext();
+  }
+
   Zone* zone_;
 
   // Scope tree.
@@ -473,9 +475,7 @@
   // In case of non-scopeinfo-backed scopes, this contains the variables of the
   // map above in order of addition.
   // TODO(verwaest): Thread through Variable.
-  ZoneList<Variable*> ordered_variables_;
-  // Variables that must be looked up dynamically.
-  DynamicScopePart* dynamics_;
+  ZoneList<Variable*> locals_;
   // Unresolved variables referred to from this scope. The proxies themselves
   // form a linked list of all unresolved proxies.
   VariableProxy* unresolved_;
@@ -490,7 +490,10 @@
 
   // True if it doesn't need scope resolution (e.g., if the scope was
   // constructed based on a serialized scope info or a catch context).
-  bool already_resolved_ : 1;
+  bool already_resolved_;
+  // True if this scope may contain objects from a temp zone that needs to be
+  // fixed up.
+  bool needs_migration_;
 #endif
 
   // Source positions.
@@ -500,7 +503,6 @@
   // Computed via AllocateVariables.
   int num_stack_slots_;
   int num_heap_slots_;
-  int num_global_slots_;
 
   // The scope type.
   const ScopeType scope_type_;
@@ -525,79 +527,30 @@
   // True if it holds 'var' declarations.
   bool is_declaration_scope_ : 1;
 
+  bool is_lazily_parsed_ : 1;
+
   // Create a non-local variable with a given name.
   // These variables are looked up dynamically at runtime.
   Variable* NonLocal(const AstRawString* name, VariableMode mode);
 
   // Variable resolution.
-  // Possible results of a recursive variable lookup telling if and how a
-  // variable is bound. These are returned in the output parameter *binding_kind
-  // of the LookupRecursive function.
-  enum BindingKind {
-    // The variable reference could be statically resolved to a variable binding
-    // which is returned. There is no 'with' statement between the reference and
-    // the binding and no scope between the reference scope (inclusive) and
-    // binding scope (exclusive) makes a sloppy 'eval' call.
-    BOUND,
-
-    // The variable reference could be statically resolved to a variable binding
-    // which is returned. There is no 'with' statement between the reference and
-    // the binding, but some scope between the reference scope (inclusive) and
-    // binding scope (exclusive) makes a sloppy 'eval' call, that might
-    // possibly introduce variable bindings shadowing the found one. Thus the
-    // found variable binding is just a guess.
-    BOUND_EVAL_SHADOWED,
-
-    // The variable reference could not be statically resolved to any binding
-    // and thus should be considered referencing a global variable. NULL is
-    // returned. The variable reference is not inside any 'with' statement and
-    // no scope between the reference scope (inclusive) and script scope
-    // (exclusive) makes a sloppy 'eval' call.
-    UNBOUND,
-
-    // The variable reference could not be statically resolved to any binding
-    // NULL is returned. The variable reference is not inside any 'with'
-    // statement, but some scope between the reference scope (inclusive) and
-    // script scope (exclusive) makes a sloppy 'eval' call, that might
-    // possibly introduce a variable binding. Thus the reference should be
-    // considered referencing a global variable unless it is shadowed by an
-    // 'eval' introduced binding.
-    UNBOUND_EVAL_SHADOWED,
-
-    // The variable could not be statically resolved and needs to be looked up
-    // dynamically. NULL is returned. There are two possible reasons:
-    // * A 'with' statement has been encountered and there is no variable
-    //   binding for the name between the variable reference and the 'with'.
-    //   The variable potentially references a property of the 'with' object.
-    // * The code is being executed as part of a call to 'eval' and the calling
-    //   context chain contains either a variable binding for the name or it
-    //   contains a 'with' context.
-    DYNAMIC_LOOKUP
-  };
-
   // Lookup a variable reference given by name recursively starting with this
   // scope, and stopping when reaching the outer_scope_end scope. If the code is
   // executed because of a call to 'eval', the context parameter should be set
   // to the calling context of 'eval'.
-  Variable* LookupRecursive(VariableProxy* proxy, BindingKind* binding_kind,
-                            AstNodeFactory* factory,
-                            Scope* outer_scope_end = nullptr);
-  void ResolveTo(ParseInfo* info, BindingKind binding_kind,
-                 VariableProxy* proxy, Variable* var);
-  void ResolveVariable(ParseInfo* info, VariableProxy* proxy,
-                       AstNodeFactory* factory);
-  void ResolveVariablesRecursively(ParseInfo* info, AstNodeFactory* factory);
+  Variable* LookupRecursive(VariableProxy* proxy, Scope* outer_scope_end);
+  void ResolveTo(ParseInfo* info, VariableProxy* proxy, Variable* var);
+  void ResolveVariable(ParseInfo* info, VariableProxy* proxy);
+  void ResolveVariablesRecursively(ParseInfo* info);
 
   // Finds free variables of this scope. This mutates the unresolved variables
   // list along the way, so full resolution cannot be done afterwards.
   // If a ParseInfo* is passed, non-free variables will be resolved.
   VariableProxy* FetchFreeVariables(DeclarationScope* max_outer_scope,
+                                    bool try_to_resolve = true,
                                     ParseInfo* info = nullptr,
                                     VariableProxy* stack = nullptr);
 
-  // Scope analysis.
-  void PropagateScopeInfo();
-
   // Predicates.
   bool MustAllocate(Variable* var);
   bool MustAllocateInContext(Variable* var);
@@ -610,15 +563,18 @@
   void AllocateNonParameterLocalsAndDeclaredGlobals();
   void AllocateVariablesRecursively();
 
+  void AllocateScopeInfosRecursively(Isolate* isolate, AnalyzeMode mode,
+                                     MaybeHandle<ScopeInfo> outer_scope);
+
   // Construct a scope based on the scope info.
-  Scope(Zone* zone, Scope* inner_scope, ScopeType type,
-        Handle<ScopeInfo> scope_info);
+  Scope(Zone* zone, ScopeType type, Handle<ScopeInfo> scope_info);
 
   // Construct a catch scope with a binding for the name.
-  Scope(Zone* zone, Scope* inner_scope,
-        const AstRawString* catch_variable_name);
+  Scope(Zone* zone, const AstRawString* catch_variable_name,
+        Handle<ScopeInfo> scope_info);
 
   void AddInnerScope(Scope* inner_scope) {
+    DCHECK_EQ(!needs_migration_, inner_scope->zone() == zone());
     inner_scope->sibling_ = inner_scope_;
     inner_scope_ = inner_scope;
     inner_scope->outer_scope_ = this;
@@ -641,9 +597,6 @@
 
   void SetDefaults();
 
-  void DeserializeScopeInfo(Isolate* isolate,
-                            AstValueFactory* ast_value_factory);
-
   friend class DeclarationScope;
 };
 
@@ -651,10 +604,10 @@
  public:
   DeclarationScope(Zone* zone, Scope* outer_scope, ScopeType scope_type,
                    FunctionKind function_kind = kNormalFunction);
-  DeclarationScope(Zone* zone, Scope* inner_scope, ScopeType scope_type,
+  DeclarationScope(Zone* zone, ScopeType scope_type,
                    Handle<ScopeInfo> scope_info);
   // Creates a script scope.
-  explicit DeclarationScope(Zone* zone);
+  DeclarationScope(Zone* zone, AstValueFactory* ast_value_factory);
 
   bool IsDeclaredParameter(const AstRawString* name) {
     // If IsSimpleParameterList is false, duplicate parameters are not allowed,
@@ -681,23 +634,29 @@
                                         IsClassConstructor(function_kind())));
   }
 
+  void SetScriptScopeInfo(Handle<ScopeInfo> scope_info) {
+    DCHECK(is_script_scope());
+    DCHECK(scope_info_.is_null());
+    scope_info_ = scope_info;
+  }
+
   bool asm_module() const { return asm_module_; }
-  void set_asm_module() { asm_module_ = true; }
+  void set_asm_module();
   bool asm_function() const { return asm_function_; }
   void set_asm_function() { asm_module_ = true; }
 
   void DeclareThis(AstValueFactory* ast_value_factory);
+  void DeclareArguments(AstValueFactory* ast_value_factory);
   void DeclareDefaultFunctionVariables(AstValueFactory* ast_value_factory);
 
-  // This lookup corresponds to a lookup in the "intermediate" scope sitting
-  // between this scope and the outer scope. (ECMA-262, 3rd., requires that
-  // the name of named function literal is kept in an intermediate scope
-  // in between this scope and the next outer scope.)
-  Variable* LookupFunctionVar(const AstRawString* name);
-
   // Declare the function variable for a function literal. This variable
   // is in an intermediate scope between this function scope and the the
   // outer scope. Only possible for function scopes; at most one variable.
+  //
+  // This function needs to be called after all other variables have been
+  // declared in the scope. It will add a variable for {name} to {variables_};
+  // either the function variable itself, or a non-local in case the function
+  // calls sloppy eval.
   Variable* DeclareFunctionVar(const AstRawString* name);
 
   // Declare a parameter in this scope.  When there are duplicated
@@ -712,7 +671,7 @@
   // scope) by a reference to an unresolved variable with no intervening
   // with statements or eval calls.
   Variable* DeclareDynamicGlobal(const AstRawString* name,
-                                 Variable::Kind variable_kind);
+                                 VariableKind variable_kind);
 
   // The variable corresponding to the 'this' value.
   Variable* receiver() {
@@ -739,43 +698,36 @@
   }
 
   // Parameters. The left-most parameter has index 0.
-  // Only valid for function scopes.
+  // Only valid for function and module scopes.
   Variable* parameter(int index) const {
-    DCHECK(is_function_scope());
+    DCHECK(is_function_scope() || is_module_scope());
     return params_[index];
   }
 
   // Returns the default function arity excluding default or rest parameters.
-  int default_function_length() const { return arity_; }
+  // This will be used to set the length of the function, by default.
+  // Class field initializers use this property to indicate the number of
+  // fields being initialized.
+  int arity() const { return arity_; }
 
-  // Returns the number of formal parameters, up to but not including the
-  // rest parameter index (if the function has rest parameters), i.e. it
-  // says 2 for
-  //
-  //   function foo(a, b) { ... }
-  //
-  // and
-  //
-  //   function foo(a, b, ...c) { ... }
-  //
-  // but for
-  //
-  //   function foo(a, b, c = 1) { ... }
-  //
-  // we return 3 here.
+  // Normal code should not need to call this. Class field initializers use this
+  // property to indicate the number of fields being initialized.
+  void set_arity(int arity) { arity_ = arity; }
+
+  // Returns the number of formal parameters, excluding a possible rest
+  // parameter.  Examples:
+  //   function foo(a, b) {}         ==> 2
+  //   function foo(a, b, ...c) {}   ==> 2
+  //   function foo(a, b, c = 1) {}  ==> 3
   int num_parameters() const {
-    return has_rest_parameter() ? params_.length() - 1 : params_.length();
+    return has_rest_ ? params_.length() - 1 : params_.length();
   }
 
-  // A function can have at most one rest parameter. Returns Variable* or NULL.
-  Variable* rest_parameter(int* index) const {
-    *index = rest_index_;
-    if (rest_index_ < 0) return nullptr;
-    return params_[rest_index_];
+  // The function's rest parameter (nullptr if there is none).
+  Variable* rest_parameter() const {
+    return has_rest_ ? params_[params_.length() - 1] : nullptr;
   }
 
-  bool has_rest_parameter() const { return rest_index_ >= 0; }
-
   bool has_simple_parameters() const { return has_simple_parameters_; }
 
   // TODO(caitp): manage this state in a better way. PreParser must be able to
@@ -803,44 +755,40 @@
     return this_function_;
   }
 
-  // Adds a temporary variable in this scope's TemporaryScope. This is for
-  // adjusting the scope of temporaries used when desugaring parameter
+  // Adds a local variable in this scope's locals list. This is for adjusting
+  // the scope of temporaries and do-expression vars when desugaring parameter
   // initializers.
-  void AddTemporary(Variable* var) {
+  void AddLocal(Variable* var) {
     DCHECK(!already_resolved_);
     // Temporaries are only placed in ClosureScopes.
     DCHECK_EQ(GetClosureScope(), this);
-    temps_.Add(var, zone());
+    locals_.Add(var, zone());
   }
 
-  ZoneList<Variable*>* temps() { return &temps_; }
-
   void DeclareSloppyBlockFunction(const AstRawString* name,
                                   SloppyBlockFunctionStatement* statement) {
     sloppy_block_function_map_.Declare(zone(), name, statement);
   }
 
+  // Go through sloppy_block_function_map_ and hoist those (into this scope)
+  // which should be hoisted.
+  void HoistSloppyBlockFunctions(AstNodeFactory* factory);
+
   SloppyBlockFunctionMap* sloppy_block_function_map() {
     return &sloppy_block_function_map_;
   }
 
-  // Resolve and fill in the allocation information for all variables
-  // in this scopes. Must be called *after* all scopes have been
-  // processed (parsed) to ensure that unresolved variables can be
-  // resolved properly.
-  //
-  // In the case of code compiled and run using 'eval', the context
-  // parameter is the context in which eval was called.  In all other
-  // cases the context parameter is an empty handle.
-  void AllocateVariables(ParseInfo* info, AstNodeFactory* factory);
+  // Compute top scope and allocate variables. For lazy compilation the top
+  // scope only contains the single lazily compiled function, so this
+  // doesn't re-allocate variables repeatedly.
+  static void Analyze(ParseInfo* info, AnalyzeMode mode);
 
   // To be called during parsing. Do just enough scope analysis that we can
   // discard the Scope for lazily compiled functions. In particular, this
   // records variables which cannot be resolved inside the Scope (we don't yet
   // know what they will resolve to since the outer Scopes are incomplete) and
   // migrates them into migrate_to.
-  void AnalyzePartially(DeclarationScope* migrate_to,
-                        AstNodeFactory* ast_node_factory);
+  void AnalyzePartially(AstNodeFactory* ast_node_factory);
 
   Handle<StringSet> CollectNonLocals(ParseInfo* info,
                                      Handle<StringSet> non_locals);
@@ -868,9 +816,21 @@
   void AllocateParameterLocals();
   void AllocateReceiver();
 
+  void ResetAfterPreparsing(AstValueFactory* ast_value_factory, bool aborted);
+
  private:
   void AllocateParameter(Variable* var, int index);
 
+  // Resolve and fill in the allocation information for all variables
+  // in this scopes. Must be called *after* all scopes have been
+  // processed (parsed) to ensure that unresolved variables can be
+  // resolved properly.
+  //
+  // In the case of code compiled and run using 'eval', the context
+  // parameter is the context in which eval was called.  In all other
+  // cases the context parameter is an empty handle.
+  void AllocateVariables(ParseInfo* info, AnalyzeMode mode);
+
   void SetDefaults();
 
   // If the scope is a function scope, this is the function kind.
@@ -882,6 +842,8 @@
   // This scope's outer context is an asm module.
   bool asm_function_ : 1;
   bool force_eager_compilation_ : 1;
+  // This function scope has a rest parameter.
+  bool has_rest_ : 1;
   // This scope has a parameter called "arguments".
   bool has_arguments_parameter_ : 1;
   // This scope uses "super" property ('super.foo').
@@ -889,9 +851,6 @@
 
   // Info about the parameter list of a function.
   int arity_;
-  int rest_index_;
-  // Compiler-allocated (user-invisible) temporaries.
-  ZoneList<Variable*> temps_;
   // Parameter list in source order.
   ZoneList<Variable*> params_;
   // Map of function names to lists of functions defined in sloppy blocks
@@ -910,7 +869,14 @@
 
 class ModuleScope final : public DeclarationScope {
  public:
-  ModuleScope(Zone* zone, DeclarationScope* script_scope,
+  ModuleScope(DeclarationScope* script_scope,
+              AstValueFactory* ast_value_factory);
+
+  // Deserialization.
+  // The generated ModuleDescriptor does not preserve all information.  In
+  // particular, its module_requests map will be empty because we no longer need
+  // the map after parsing.
+  ModuleScope(Isolate* isolate, Handle<ScopeInfo> scope_info,
               AstValueFactory* ast_value_factory);
 
   ModuleDescriptor* module() const {
diff --git a/src/ast/variables.cc b/src/ast/variables.cc
index 0541f94..cc269cd 100644
--- a/src/ast/variables.cc
+++ b/src/ast/variables.cc
@@ -13,36 +13,20 @@
 // ----------------------------------------------------------------------------
 // Implementation Variable.
 
-const char* Variable::Mode2String(VariableMode mode) {
-  switch (mode) {
-    case VAR: return "VAR";
-    case CONST_LEGACY: return "CONST_LEGACY";
-    case LET: return "LET";
-    case CONST: return "CONST";
-    case DYNAMIC: return "DYNAMIC";
-    case DYNAMIC_GLOBAL: return "DYNAMIC_GLOBAL";
-    case DYNAMIC_LOCAL: return "DYNAMIC_LOCAL";
-    case TEMPORARY: return "TEMPORARY";
-  }
-  UNREACHABLE();
-  return NULL;
-}
-
 Variable::Variable(Scope* scope, const AstRawString* name, VariableMode mode,
-                   Kind kind, InitializationFlag initialization_flag,
+                   VariableKind kind, InitializationFlag initialization_flag,
                    MaybeAssignedFlag maybe_assigned_flag)
     : scope_(scope),
       name_(name),
-      mode_(mode),
-      kind_(kind),
-      location_(VariableLocation::UNALLOCATED),
+      local_if_not_shadowed_(nullptr),
       index_(-1),
       initializer_position_(kNoSourcePosition),
-      local_if_not_shadowed_(NULL),
-      force_context_allocation_(false),
-      is_used_(false),
-      initialization_flag_(initialization_flag),
-      maybe_assigned_(maybe_assigned_flag) {
+      bit_field_(MaybeAssignedFlagField::encode(maybe_assigned_flag) |
+                 InitializationFlagField::encode(initialization_flag) |
+                 VariableModeField::encode(mode) | IsUsedField::encode(false) |
+                 ForceContextAllocationField::encode(false) |
+                 LocationField::encode(VariableLocation::UNALLOCATED) |
+                 VariableKindField::encode(kind)) {
   // Var declared variables never need initialization.
   DCHECK(!(mode == VAR && initialization_flag == kNeedsInitialization));
 }
@@ -51,8 +35,8 @@
 bool Variable::IsGlobalObjectProperty() const {
   // Temporaries are never global, they must always be allocated in the
   // activation frame.
-  return (IsDynamicVariableMode(mode_) ||
-          (IsDeclaredVariableMode(mode_) && !IsLexicalVariableMode(mode_))) &&
+  return (IsDynamicVariableMode(mode()) ||
+          (IsDeclaredVariableMode(mode()) && !IsLexicalVariableMode(mode()))) &&
          scope_ != NULL && scope_->is_script_scope();
 }
 
@@ -60,17 +44,10 @@
 bool Variable::IsStaticGlobalObjectProperty() const {
   // Temporaries are never global, they must always be allocated in the
   // activation frame.
-  return (IsDeclaredVariableMode(mode_) && !IsLexicalVariableMode(mode_)) &&
+  return (IsDeclaredVariableMode(mode()) && !IsLexicalVariableMode(mode())) &&
          scope_ != NULL && scope_->is_script_scope();
 }
 
 
-int Variable::CompareIndex(Variable* const* v, Variable* const* w) {
-  int x = (*v)->index();
-  int y = (*w)->index();
-  // Consider sorting them according to type as well?
-  return x - y;
-}
-
 }  // namespace internal
 }  // namespace v8
diff --git a/src/ast/variables.h b/src/ast/variables.h
index f1f63b8..5bc7869 100644
--- a/src/ast/variables.h
+++ b/src/ast/variables.h
@@ -6,7 +6,8 @@
 #define V8_AST_VARIABLES_H_
 
 #include "src/ast/ast-value-factory.h"
-#include "src/zone.h"
+#include "src/globals.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -17,15 +18,10 @@
 // after binding and variable allocation.
 class Variable final : public ZoneObject {
  public:
-  enum Kind { NORMAL, FUNCTION, THIS, ARGUMENTS };
-
-  Variable(Scope* scope, const AstRawString* name, VariableMode mode, Kind kind,
-           InitializationFlag initialization_flag,
+  Variable(Scope* scope, const AstRawString* name, VariableMode mode,
+           VariableKind kind, InitializationFlag initialization_flag,
            MaybeAssignedFlag maybe_assigned_flag = kNotAssigned);
 
-  // Printing support
-  static const char* Mode2String(VariableMode mode);
-
   // The source code for an eval() call may refer to a variable that is
   // in an outer scope about which we don't know anything (it may not
   // be the script scope). scope() is NULL in that case. Currently the
@@ -38,51 +34,56 @@
 
   Handle<String> name() const { return name_->string(); }
   const AstRawString* raw_name() const { return name_; }
-  VariableMode mode() const { return mode_; }
+  VariableMode mode() const { return VariableModeField::decode(bit_field_); }
   bool has_forced_context_allocation() const {
-    return force_context_allocation_;
+    return ForceContextAllocationField::decode(bit_field_);
   }
   void ForceContextAllocation() {
-    DCHECK(IsUnallocated() || IsContextSlot());
-    force_context_allocation_ = true;
+    DCHECK(IsUnallocated() || IsContextSlot() ||
+           location() == VariableLocation::MODULE);
+    bit_field_ = ForceContextAllocationField::update(bit_field_, true);
   }
-  bool is_used() { return is_used_; }
-  void set_is_used() { is_used_ = true; }
-  MaybeAssignedFlag maybe_assigned() const { return maybe_assigned_; }
-  void set_maybe_assigned() { maybe_assigned_ = kMaybeAssigned; }
+  bool is_used() { return IsUsedField::decode(bit_field_); }
+  void set_is_used() { bit_field_ = IsUsedField::update(bit_field_, true); }
+  MaybeAssignedFlag maybe_assigned() const {
+    return MaybeAssignedFlagField::decode(bit_field_);
+  }
+  void set_maybe_assigned() {
+    bit_field_ = MaybeAssignedFlagField::update(bit_field_, kMaybeAssigned);
+  }
 
   int initializer_position() { return initializer_position_; }
   void set_initializer_position(int pos) { initializer_position_ = pos; }
 
   bool IsUnallocated() const {
-    return location_ == VariableLocation::UNALLOCATED;
+    return location() == VariableLocation::UNALLOCATED;
   }
-  bool IsParameter() const { return location_ == VariableLocation::PARAMETER; }
-  bool IsStackLocal() const { return location_ == VariableLocation::LOCAL; }
+  bool IsParameter() const { return location() == VariableLocation::PARAMETER; }
+  bool IsStackLocal() const { return location() == VariableLocation::LOCAL; }
   bool IsStackAllocated() const { return IsParameter() || IsStackLocal(); }
-  bool IsContextSlot() const { return location_ == VariableLocation::CONTEXT; }
-  bool IsGlobalSlot() const { return location_ == VariableLocation::GLOBAL; }
-  bool IsUnallocatedOrGlobalSlot() const {
-    return IsUnallocated() || IsGlobalSlot();
-  }
-  bool IsLookupSlot() const { return location_ == VariableLocation::LOOKUP; }
+  bool IsContextSlot() const { return location() == VariableLocation::CONTEXT; }
+  bool IsLookupSlot() const { return location() == VariableLocation::LOOKUP; }
   bool IsGlobalObjectProperty() const;
   bool IsStaticGlobalObjectProperty() const;
 
-  bool is_dynamic() const { return IsDynamicVariableMode(mode_); }
-  bool is_const_mode() const { return IsImmutableVariableMode(mode_); }
+  bool is_dynamic() const { return IsDynamicVariableMode(mode()); }
   bool binding_needs_init() const {
-    DCHECK(initialization_flag_ != kNeedsInitialization ||
-           IsLexicalVariableMode(mode_));
-    return initialization_flag_ == kNeedsInitialization;
+    DCHECK(initialization_flag() != kNeedsInitialization ||
+           IsLexicalVariableMode(mode()));
+    return initialization_flag() == kNeedsInitialization;
+  }
+  bool throw_on_const_assignment(LanguageMode language_mode) const {
+    return kind() != SLOPPY_FUNCTION_NAME_VARIABLE || is_strict(language_mode);
   }
 
-  bool is_function() const { return kind_ == FUNCTION; }
-  bool is_this() const { return kind_ == THIS; }
-  bool is_arguments() const { return kind_ == ARGUMENTS; }
+  bool is_function() const { return kind() == FUNCTION_VARIABLE; }
+  bool is_this() const { return kind() == THIS_VARIABLE; }
+  bool is_sloppy_function_name() const {
+    return kind() == SLOPPY_FUNCTION_NAME_VARIABLE;
+  }
 
   Variable* local_if_not_shadowed() const {
-    DCHECK(mode_ == DYNAMIC_LOCAL && local_if_not_shadowed_ != NULL);
+    DCHECK(mode() == DYNAMIC_LOCAL && local_if_not_shadowed_ != NULL);
     return local_if_not_shadowed_;
   }
 
@@ -90,40 +91,61 @@
     local_if_not_shadowed_ = local;
   }
 
-  VariableLocation location() const { return location_; }
-  int index() const { return index_; }
+  VariableLocation location() const {
+    return LocationField::decode(bit_field_);
+  }
+  VariableKind kind() const { return VariableKindField::decode(bit_field_); }
   InitializationFlag initialization_flag() const {
-    return initialization_flag_;
+    return InitializationFlagField::decode(bit_field_);
+  }
+
+  int index() const { return index_; }
+
+  bool IsExport() const {
+    DCHECK(location() == VariableLocation::MODULE);
+    return index() == 0;
   }
 
   void AllocateTo(VariableLocation location, int index) {
-    DCHECK(IsUnallocated() || (location_ == location && index_ == index));
-    location_ = location;
+    DCHECK(IsUnallocated() ||
+           (this->location() == location && this->index() == index));
+    bit_field_ = LocationField::update(bit_field_, location);
+    DCHECK_EQ(location, this->location());
     index_ = index;
   }
 
-  static int CompareIndex(Variable* const* v, Variable* const* w);
+  static InitializationFlag DefaultInitializationFlag(VariableMode mode) {
+    DCHECK(IsDeclaredVariableMode(mode));
+    return mode == VAR ? kCreatedInitialized : kNeedsInitialization;
+  }
 
  private:
   Scope* scope_;
   const AstRawString* name_;
-  VariableMode mode_;
-  Kind kind_;
-  VariableLocation location_;
-  int index_;
-  int initializer_position_;
 
   // If this field is set, this variable references the stored locally bound
   // variable, but it might be shadowed by variable bindings introduced by
   // sloppy 'eval' calls between the reference scope (inclusive) and the
   // binding scope (exclusive).
   Variable* local_if_not_shadowed_;
+  int index_;
+  int initializer_position_;
+  uint16_t bit_field_;
 
-  // Usage info.
-  bool force_context_allocation_;  // set by variable resolver
-  bool is_used_;
-  InitializationFlag initialization_flag_;
-  MaybeAssignedFlag maybe_assigned_;
+  class VariableModeField : public BitField16<VariableMode, 0, 3> {};
+  class VariableKindField
+      : public BitField16<VariableKind, VariableModeField::kNext, 3> {};
+  class LocationField
+      : public BitField16<VariableLocation, VariableKindField::kNext, 3> {};
+  class ForceContextAllocationField
+      : public BitField16<bool, LocationField::kNext, 1> {};
+  class IsUsedField
+      : public BitField16<bool, ForceContextAllocationField::kNext, 1> {};
+  class InitializationFlagField
+      : public BitField16<InitializationFlag, IsUsedField::kNext, 2> {};
+  class MaybeAssignedFlagField
+      : public BitField16<MaybeAssignedFlag, InitializationFlagField::kNext,
+                          2> {};
 };
 }  // namespace internal
 }  // namespace v8
diff --git a/src/background-parsing-task.cc b/src/background-parsing-task.cc
index 5df46c8..83075c1 100644
--- a/src/background-parsing-task.cc
+++ b/src/background-parsing-task.cc
@@ -3,11 +3,19 @@
 // found in the LICENSE file.
 
 #include "src/background-parsing-task.h"
+
 #include "src/debug/debug.h"
+#include "src/parsing/parser.h"
 
 namespace v8 {
 namespace internal {
 
+void StreamedSource::Release() {
+  parser.reset();
+  info.reset();
+  zone.reset();
+}
+
 BackgroundParsingTask::BackgroundParsingTask(
     StreamedSource* source, ScriptCompiler::CompileOptions options,
     int stack_size, Isolate* isolate)
@@ -42,9 +50,8 @@
   // Parser needs to stay alive for finalizing the parsing on the main
   // thread.
   source_->parser.reset(new Parser(source_->info.get()));
-  source_->parser->DeserializeScopeChain(
-      source_->info.get(), Handle<Context>::null(),
-      Scope::DeserializationMode::kDeserializeOffHeap);
+  source_->parser->DeserializeScopeChain(source_->info.get(),
+                                         MaybeHandle<ScopeInfo>());
 }
 
 
@@ -55,8 +62,7 @@
 
   // Reset the stack limit of the parser to reflect correctly that we're on a
   // background thread.
-  uintptr_t stack_limit =
-      reinterpret_cast<uintptr_t>(&stack_limit) - stack_size_ * KB;
+  uintptr_t stack_limit = GetCurrentStackPosition() - stack_size_ * KB;
   source_->parser->set_stack_limit(stack_limit);
 
   // Nullify the Isolate temporarily so that the background parser doesn't
diff --git a/src/background-parsing-task.h b/src/background-parsing-task.h
index 1bf9d74..d7fe6ba 100644
--- a/src/background-parsing-task.h
+++ b/src/background-parsing-task.h
@@ -7,15 +7,16 @@
 
 #include <memory>
 
+#include "include/v8.h"
 #include "src/base/platform/platform.h"
 #include "src/base/platform/semaphore.h"
-#include "src/compiler.h"
 #include "src/parsing/parse-info.h"
-#include "src/parsing/parser.h"
+#include "src/unicode-cache.h"
 
 namespace v8 {
 namespace internal {
 
+class Parser;
 class ScriptData;
 
 // Internal representation of v8::ScriptCompiler::StreamedSource. Contains all
@@ -26,6 +27,8 @@
                  ScriptCompiler::StreamedSource::Encoding encoding)
       : source_stream(source_stream), encoding(encoding) {}
 
+  void Release();
+
   // Internal implementation of v8::ScriptCompiler::StreamedSource.
   std::unique_ptr<ScriptCompiler::ExternalSourceStream> source_stream;
   ScriptCompiler::StreamedSource::Encoding encoding;
@@ -39,10 +42,9 @@
   std::unique_ptr<ParseInfo> info;
   std::unique_ptr<Parser> parser;
 
- private:
-  // Prevent copying. Not implemented.
-  StreamedSource(const StreamedSource&);
-  StreamedSource& operator=(const StreamedSource&);
+  // Prevent copying.
+  StreamedSource(const StreamedSource&) = delete;
+  StreamedSource& operator=(const StreamedSource&) = delete;
 };
 
 
diff --git a/src/bailout-reason.h b/src/bailout-reason.h
index df47eb8..6b7da16 100644
--- a/src/bailout-reason.h
+++ b/src/bailout-reason.h
@@ -20,7 +20,6 @@
   V(kArgumentsObjectValueInATestContext,                                       \
     "Arguments object value in a test context")                                \
   V(kArrayIndexConstantValueTooBig, "Array index constant value too big")      \
-  V(kAssignmentToArguments, "Assignment to arguments")                         \
   V(kAssignmentToLetVariableBeforeInitialization,                              \
     "Assignment to let variable before initialization")                        \
   V(kAssignmentToLOOKUPVariable, "Assignment to LOOKUP variable")              \
@@ -64,6 +63,8 @@
   V(kEval, "eval")                                                             \
   V(kExpectedAllocationSite, "Expected allocation site")                       \
   V(kExpectedBooleanValue, "Expected boolean value")                           \
+  V(kExpectedFixedDoubleArrayMap,                                              \
+    "Expected a fixed double array map in fast shallow clone array literal")   \
   V(kExpectedFunctionObject, "Expected function object in register")           \
   V(kExpectedHeapNumber, "Expected HeapNumber")                                \
   V(kExpectedJSReceiver, "Expected object to have receiver type")              \
@@ -242,10 +243,6 @@
   V(kUnexpectedTypeForRegExpDataFixedArrayExpected,                            \
     "Unexpected type for RegExp data, FixedArray expected")                    \
   V(kUnexpectedValue, "Unexpected value")                                      \
-  V(kUnsupportedConstCompoundAssignment,                                       \
-    "Unsupported const compound assignment")                                   \
-  V(kUnsupportedCountOperationWithConst,                                       \
-    "Unsupported count operation with const")                                  \
   V(kUnsupportedDoubleImmediate, "Unsupported double immediate")               \
   V(kUnsupportedLetCompoundAssignment, "Unsupported let compound assignment")  \
   V(kUnsupportedLookupSlotInDeclaration,                                       \
@@ -268,9 +265,7 @@
   V(kWrongArgumentCountForInvokeIntrinsic,                                     \
     "Wrong number of arguments for intrinsic")                                 \
   V(kShouldNotDirectlyEnterOsrFunction,                                        \
-    "Should not directly enter OSR-compiled function")                         \
-  V(kConversionFromImpossibleValue,                                            \
-    "Reached conversion from value with empty type (i.e., impossible type)")
+    "Should not directly enter OSR-compiled function")
 
 #define ERROR_MESSAGES_CONSTANTS(C, T) C,
 enum BailoutReason {
diff --git a/src/base.isolate b/src/base.isolate
index a9cfc89..c457f00 100644
--- a/src/base.isolate
+++ b/src/base.isolate
@@ -4,7 +4,6 @@
 {
   'includes': [
     '../third_party/icu/icu.isolate',
-    '../gypfiles/config/win/msvs_dependencies.isolate',
   ],
   'conditions': [
     ['v8_use_snapshot=="true" and v8_use_external_startup_data==1', {
@@ -15,13 +14,6 @@
         ],
       },
     }],
-    ['OS=="mac" and asan==1', {
-      'variables': {
-        'files': [
-          '<(PRODUCT_DIR)/libclang_rt.asan_osx_dynamic.dylib',
-        ],
-      },
-    }],
     ['tsan==1', {
       'variables': {
         'files': [
diff --git a/src/base/accounting-allocator.cc b/src/base/accounting-allocator.cc
deleted file mode 100644
index c56f037..0000000
--- a/src/base/accounting-allocator.cc
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2016 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/base/accounting-allocator.h"
-
-#include <cstdlib>
-
-#if V8_LIBC_BIONIC
-#include <malloc.h>  // NOLINT
-#endif
-
-namespace v8 {
-namespace base {
-
-void* AccountingAllocator::Allocate(size_t bytes) {
-  void* memory = malloc(bytes);
-  if (memory) {
-    AtomicWord current =
-        NoBarrier_AtomicIncrement(&current_memory_usage_, bytes);
-    AtomicWord max = NoBarrier_Load(&max_memory_usage_);
-    while (current > max) {
-      max = NoBarrier_CompareAndSwap(&max_memory_usage_, max, current);
-    }
-  }
-  return memory;
-}
-
-void AccountingAllocator::Free(void* memory, size_t bytes) {
-  free(memory);
-  NoBarrier_AtomicIncrement(&current_memory_usage_,
-                            -static_cast<AtomicWord>(bytes));
-}
-
-size_t AccountingAllocator::GetCurrentMemoryUsage() const {
-  return NoBarrier_Load(&current_memory_usage_);
-}
-
-size_t AccountingAllocator::GetMaxMemoryUsage() const {
-  return NoBarrier_Load(&max_memory_usage_);
-}
-
-}  // namespace base
-}  // namespace v8
diff --git a/src/base/accounting-allocator.h b/src/base/accounting-allocator.h
deleted file mode 100644
index 4e1baf1..0000000
--- a/src/base/accounting-allocator.h
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2016 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_BASE_ACCOUNTING_ALLOCATOR_H_
-#define V8_BASE_ACCOUNTING_ALLOCATOR_H_
-
-#include "src/base/atomicops.h"
-#include "src/base/macros.h"
-
-namespace v8 {
-namespace base {
-
-class AccountingAllocator {
- public:
-  AccountingAllocator() = default;
-  virtual ~AccountingAllocator() = default;
-
-  // Returns nullptr on failed allocation.
-  virtual void* Allocate(size_t bytes);
-  virtual void Free(void* memory, size_t bytes);
-
-  size_t GetCurrentMemoryUsage() const;
-  size_t GetMaxMemoryUsage() const;
-
- private:
-  AtomicWord current_memory_usage_ = 0;
-  AtomicWord max_memory_usage_ = 0;
-
-  DISALLOW_COPY_AND_ASSIGN(AccountingAllocator);
-};
-
-}  // namespace base
-}  // namespace v8
-
-#endif  // V8_BASE_ACCOUNTING_ALLOCATOR_H_
diff --git a/src/base/atomic-utils.h b/src/base/atomic-utils.h
index e19385d..31db603 100644
--- a/src/base/atomic-utils.h
+++ b/src/base/atomic-utils.h
@@ -72,6 +72,22 @@
            cast_helper<T>::to_storage_type(old_value);
   }
 
+  V8_INLINE void SetBits(T bits, T mask) {
+    DCHECK_EQ(bits & ~mask, 0);
+    T old_value;
+    T new_value;
+    do {
+      old_value = Value();
+      new_value = (old_value & ~mask) | bits;
+    } while (!TrySetValue(old_value, new_value));
+  }
+
+  V8_INLINE void SetBit(int bit) {
+    SetBits(static_cast<T>(1) << bit, static_cast<T>(1) << bit);
+  }
+
+  V8_INLINE void ClearBit(int bit) { SetBits(0, 1 << bit); }
+
   V8_INLINE void SetValue(T new_value) {
     base::Release_Store(&value_, cast_helper<T>::to_storage_type(new_value));
   }
diff --git a/src/base/build_config.h b/src/base/build_config.h
index e033134..d113c2a 100644
--- a/src/base/build_config.h
+++ b/src/base/build_config.h
@@ -55,13 +55,21 @@
     defined(__ARM_ARCH_7R__) || \
     defined(__ARM_ARCH_7__)
 # define CAN_USE_ARMV7_INSTRUCTIONS 1
+#ifdef __ARM_ARCH_EXT_IDIV__
+#define CAN_USE_SUDIV 1
+#endif
 # ifndef CAN_USE_VFP3_INSTRUCTIONS
-#  define CAN_USE_VFP3_INSTRUCTIONS
+#define CAN_USE_VFP3_INSTRUCTIONS 1
 # endif
 #endif
 
 #if defined(__ARM_ARCH_8A__)
+#define CAN_USE_ARMV7_INSTRUCTIONS 1
+#define CAN_USE_SUDIV 1
 # define CAN_USE_ARMV8_INSTRUCTIONS 1
+#ifndef CAN_USE_VFP3_INSTRUCTIONS
+#define CAN_USE_VFP3_INSTRUCTIONS 1
+#endif
 #endif
 
 
@@ -196,11 +204,6 @@
 
 // Number of bits to represent the page size for paged spaces. The value of 20
 // gives 1Mb bytes per page.
-#if V8_HOST_ARCH_PPC && V8_TARGET_ARCH_PPC && V8_OS_LINUX
-// Bump up for Power Linux due to larger (64K) page size.
-const int kPageSizeBits = 22;
-#else
-const int kPageSizeBits = 20;
-#endif
+const int kPageSizeBits = 19;
 
 #endif  // V8_BASE_BUILD_CONFIG_H_
diff --git a/src/base/hashmap-entry.h b/src/base/hashmap-entry.h
new file mode 100644
index 0000000..629e734
--- /dev/null
+++ b/src/base/hashmap-entry.h
@@ -0,0 +1,54 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_BASE_HASHMAP_ENTRY_H_
+#define V8_BASE_HASHMAP_ENTRY_H_
+
+#include <cstdint>
+
+namespace v8 {
+namespace base {
+
+// HashMap entries are (key, value, hash) triplets, with a boolean indicating if
+// they are an empty entry. Some clients may not need to use the value slot
+// (e.g. implementers of sets, where the key is the value).
+template <typename Key, typename Value>
+struct TemplateHashMapEntry {
+  Key key;
+  Value value;
+  uint32_t hash;  // The full hash value for key
+
+  TemplateHashMapEntry(Key key, Value value, uint32_t hash)
+      : key(key), value(value), hash(hash), exists_(true) {}
+
+  bool exists() const { return exists_; }
+
+  void clear() { exists_ = false; }
+
+ private:
+  bool exists_;
+};
+
+// Specialization for pointer-valued keys
+template <typename Key, typename Value>
+struct TemplateHashMapEntry<Key*, Value> {
+  Key* key;
+  Value value;
+  uint32_t hash;  // The full hash value for key
+
+  TemplateHashMapEntry(Key* key, Value value, uint32_t hash)
+      : key(key), value(value), hash(hash) {}
+
+  bool exists() const { return key != nullptr; }
+
+  void clear() { key = nullptr; }
+};
+
+// TODO(leszeks): There could be a specialisation for void values (e.g. for
+// sets), which omits the value field
+
+}  // namespace base
+}  // namespace v8
+
+#endif  // V8_BASE_HASHMAP_ENTRY_H_
diff --git a/src/base/hashmap.h b/src/base/hashmap.h
index e3c47de..54038c5 100644
--- a/src/base/hashmap.h
+++ b/src/base/hashmap.h
@@ -12,6 +12,7 @@
 #include <stdlib.h>
 
 #include "src/base/bits.h"
+#include "src/base/hashmap-entry.h"
 #include "src/base/logging.h"
 
 namespace v8 {
@@ -23,10 +24,10 @@
   V8_INLINE static void Delete(void* p) { free(p); }
 };
 
-template <class AllocationPolicy>
+template <typename Key, typename Value, class MatchFun, class AllocationPolicy>
 class TemplateHashMapImpl {
  public:
-  typedef bool (*MatchFun)(void* key1, void* key2);
+  typedef TemplateHashMapEntry<Key, Value> Entry;
 
   // The default capacity.  This is used by the call sites which want
   // to pass in a non-default AllocationPolicy but want to use the
@@ -35,38 +36,36 @@
 
   // initial_capacity is the size of the initial hash map;
   // it must be a power of 2 (and thus must not be 0).
-  TemplateHashMapImpl(MatchFun match,
-                      uint32_t capacity = kDefaultHashMapCapacity,
+  TemplateHashMapImpl(uint32_t capacity = kDefaultHashMapCapacity,
+                      MatchFun match = MatchFun(),
                       AllocationPolicy allocator = AllocationPolicy());
 
   ~TemplateHashMapImpl();
 
-  // HashMap entries are (key, value, hash) triplets.
-  // Some clients may not need to use the value slot
-  // (e.g. implementers of sets, where the key is the value).
-  struct Entry {
-    void* key;
-    void* value;
-    uint32_t hash;  // The full hash value for key
-  };
-
   // If an entry with matching key is found, returns that entry.
-  // Otherwise, NULL is returned.
-  Entry* Lookup(void* key, uint32_t hash) const;
+  // Otherwise, nullptr is returned.
+  Entry* Lookup(const Key& key, uint32_t hash) const;
 
   // If an entry with matching key is found, returns that entry.
   // If no matching entry is found, a new entry is inserted with
-  // corresponding key, key hash, and NULL value.
-  Entry* LookupOrInsert(void* key, uint32_t hash,
+  // corresponding key, key hash, and default initialized value.
+  Entry* LookupOrInsert(const Key& key, uint32_t hash,
                         AllocationPolicy allocator = AllocationPolicy());
 
-  Entry* InsertNew(void* key, uint32_t hash,
+  // If an entry with matching key is found, returns that entry.
+  // If no matching entry is found, a new entry is inserted with
+  // corresponding key, key hash, and value created by func.
+  template <typename Func>
+  Entry* LookupOrInsert(const Key& key, uint32_t hash, const Func& value_func,
+                        AllocationPolicy allocator = AllocationPolicy());
+
+  Entry* InsertNew(const Key& key, uint32_t hash,
                    AllocationPolicy allocator = AllocationPolicy());
 
   // Removes the entry with matching key.
   // It returns the value of the deleted entry
   // or null if there is no value for such key.
-  void* Remove(void* key, uint32_t hash);
+  Value Remove(const Key& key, uint32_t hash);
 
   // Empties the hash map (occupancy() == 0).
   void Clear();
@@ -81,97 +80,101 @@
 
   // Iteration
   //
-  // for (Entry* p = map.Start(); p != NULL; p = map.Next(p)) {
+  // for (Entry* p = map.Start(); p != nullptr; p = map.Next(p)) {
   //   ...
   // }
   //
   // If entries are inserted during iteration, the effect of
   // calling Next() is undefined.
   Entry* Start() const;
-  Entry* Next(Entry* p) const;
-
-  // Some match functions defined for convenience.
-  static bool PointersMatch(void* key1, void* key2) { return key1 == key2; }
+  Entry* Next(Entry* entry) const;
 
  private:
-  MatchFun match_;
   Entry* map_;
   uint32_t capacity_;
   uint32_t occupancy_;
+  // TODO(leszeks): This takes up space even if it has no state, maybe replace
+  // with something that does the empty base optimisation e.g. std::tuple
+  MatchFun match_;
 
   Entry* map_end() const { return map_ + capacity_; }
-  Entry* Probe(void* key, uint32_t hash) const;
+  Entry* Probe(const Key& key, uint32_t hash) const;
+  Entry* FillEmptyEntry(Entry* entry, const Key& key, const Value& value,
+                        uint32_t hash,
+                        AllocationPolicy allocator = AllocationPolicy());
   void Initialize(uint32_t capacity, AllocationPolicy allocator);
   void Resize(AllocationPolicy allocator);
 };
-
-typedef TemplateHashMapImpl<DefaultAllocationPolicy> HashMap;
-
-template <class AllocationPolicy>
-TemplateHashMapImpl<AllocationPolicy>::TemplateHashMapImpl(
-    MatchFun match, uint32_t initial_capacity, AllocationPolicy allocator) {
-  match_ = match;
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::
+    TemplateHashMapImpl(uint32_t initial_capacity, MatchFun match,
+                        AllocationPolicy allocator)
+    : match_(match) {
   Initialize(initial_capacity, allocator);
 }
 
-template <class AllocationPolicy>
-TemplateHashMapImpl<AllocationPolicy>::~TemplateHashMapImpl() {
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+TemplateHashMapImpl<Key, Value, MatchFun,
+                    AllocationPolicy>::~TemplateHashMapImpl() {
   AllocationPolicy::Delete(map_);
 }
 
-template <class AllocationPolicy>
-typename TemplateHashMapImpl<AllocationPolicy>::Entry*
-TemplateHashMapImpl<AllocationPolicy>::Lookup(void* key, uint32_t hash) const {
-  Entry* p = Probe(key, hash);
-  return p->key != NULL ? p : NULL;
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Lookup(
+    const Key& key, uint32_t hash) const {
+  Entry* entry = Probe(key, hash);
+  return entry->exists() ? entry : nullptr;
 }
 
-template <class AllocationPolicy>
-typename TemplateHashMapImpl<AllocationPolicy>::Entry*
-TemplateHashMapImpl<AllocationPolicy>::LookupOrInsert(
-    void* key, uint32_t hash, AllocationPolicy allocator) {
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::LookupOrInsert(
+    const Key& key, uint32_t hash, AllocationPolicy allocator) {
+  return LookupOrInsert(key, hash, []() { return Value(); }, allocator);
+}
+
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+template <typename Func>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::LookupOrInsert(
+    const Key& key, uint32_t hash, const Func& value_func,
+    AllocationPolicy allocator) {
   // Find a matching entry.
-  Entry* p = Probe(key, hash);
-  if (p->key != NULL) {
-    return p;
+  Entry* entry = Probe(key, hash);
+  if (entry->exists()) {
+    return entry;
   }
 
-  return InsertNew(key, hash, allocator);
+  return FillEmptyEntry(entry, key, value_func(), hash, allocator);
 }
 
-template <class AllocationPolicy>
-typename TemplateHashMapImpl<AllocationPolicy>::Entry*
-TemplateHashMapImpl<AllocationPolicy>::InsertNew(void* key, uint32_t hash,
-                                                 AllocationPolicy allocator) {
-  // Find a matching entry.
-  Entry* p = Probe(key, hash);
-  DCHECK(p->key == NULL);
-
-  // No entry found; insert one.
-  p->key = key;
-  p->value = NULL;
-  p->hash = hash;
-  occupancy_++;
-
-  // Grow the map if we reached >= 80% occupancy.
-  if (occupancy_ + occupancy_ / 4 >= capacity_) {
-    Resize(allocator);
-    p = Probe(key, hash);
-  }
-
-  return p;
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::InsertNew(
+    const Key& key, uint32_t hash, AllocationPolicy allocator) {
+  Entry* entry = Probe(key, hash);
+  return FillEmptyEntry(entry, key, Value(), hash, allocator);
 }
 
-template <class AllocationPolicy>
-void* TemplateHashMapImpl<AllocationPolicy>::Remove(void* key, uint32_t hash) {
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+Value TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Remove(
+    const Key& key, uint32_t hash) {
   // Lookup the entry for the key to remove.
   Entry* p = Probe(key, hash);
-  if (p->key == NULL) {
+  if (!p->exists()) {
     // Key not found nothing to remove.
-    return NULL;
+    return nullptr;
   }
 
-  void* value = p->value;
+  Value value = p->value;
   // To remove an entry we need to ensure that it does not create an empty
   // entry that will cause the search for another entry to stop too soon. If all
   // the entries between the entry to remove and the next empty slot have their
@@ -200,7 +203,7 @@
     // All entries between p and q have their initial position between p and q
     // and the entry p can be cleared without breaking the search for these
     // entries.
-    if (q->key == NULL) {
+    if (!q->exists()) {
       break;
     }
 
@@ -217,67 +220,92 @@
   }
 
   // Clear the entry which is allowed to en emptied.
-  p->key = NULL;
+  p->clear();
   occupancy_--;
   return value;
 }
 
-template <class AllocationPolicy>
-void TemplateHashMapImpl<AllocationPolicy>::Clear() {
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+void TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Clear() {
   // Mark all entries as empty.
   const Entry* end = map_end();
-  for (Entry* p = map_; p < end; p++) {
-    p->key = NULL;
+  for (Entry* entry = map_; entry < end; entry++) {
+    entry->clear();
   }
   occupancy_ = 0;
 }
 
-template <class AllocationPolicy>
-typename TemplateHashMapImpl<AllocationPolicy>::Entry*
-TemplateHashMapImpl<AllocationPolicy>::Start() const {
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Start() const {
   return Next(map_ - 1);
 }
 
-template <class AllocationPolicy>
-typename TemplateHashMapImpl<AllocationPolicy>::Entry*
-TemplateHashMapImpl<AllocationPolicy>::Next(Entry* p) const {
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Next(
+    Entry* entry) const {
   const Entry* end = map_end();
-  DCHECK(map_ - 1 <= p && p < end);
-  for (p++; p < end; p++) {
-    if (p->key != NULL) {
-      return p;
+  DCHECK(map_ - 1 <= entry && entry < end);
+  for (entry++; entry < end; entry++) {
+    if (entry->exists()) {
+      return entry;
     }
   }
-  return NULL;
+  return nullptr;
 }
 
-template <class AllocationPolicy>
-typename TemplateHashMapImpl<AllocationPolicy>::Entry*
-TemplateHashMapImpl<AllocationPolicy>::Probe(void* key, uint32_t hash) const {
-  DCHECK(key != NULL);
-
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Probe(
+    const Key& key, uint32_t hash) const {
   DCHECK(base::bits::IsPowerOfTwo32(capacity_));
-  Entry* p = map_ + (hash & (capacity_ - 1));
+  Entry* entry = map_ + (hash & (capacity_ - 1));
   const Entry* end = map_end();
-  DCHECK(map_ <= p && p < end);
+  DCHECK(map_ <= entry && entry < end);
 
   DCHECK(occupancy_ < capacity_);  // Guarantees loop termination.
-  while (p->key != NULL && (hash != p->hash || !match_(key, p->key))) {
-    p++;
-    if (p >= end) {
-      p = map_;
+  while (entry->exists() && !match_(hash, entry->hash, key, entry->key)) {
+    entry++;
+    if (entry >= end) {
+      entry = map_;
     }
   }
 
-  return p;
+  return entry;
 }
 
-template <class AllocationPolicy>
-void TemplateHashMapImpl<AllocationPolicy>::Initialize(
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+typename TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Entry*
+TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::FillEmptyEntry(
+    Entry* entry, const Key& key, const Value& value, uint32_t hash,
+    AllocationPolicy allocator) {
+  DCHECK(!entry->exists());
+
+  new (entry) Entry(key, value, hash);
+  occupancy_++;
+
+  // Grow the map if we reached >= 80% occupancy.
+  if (occupancy_ + occupancy_ / 4 >= capacity_) {
+    Resize(allocator);
+    entry = Probe(key, hash);
+  }
+
+  return entry;
+}
+
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+void TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Initialize(
     uint32_t capacity, AllocationPolicy allocator) {
   DCHECK(base::bits::IsPowerOfTwo32(capacity));
   map_ = reinterpret_cast<Entry*>(allocator.New(capacity * sizeof(Entry)));
-  if (map_ == NULL) {
+  if (map_ == nullptr) {
     FATAL("Out of memory: HashMap::Initialize");
     return;
   }
@@ -285,8 +313,10 @@
   Clear();
 }
 
-template <class AllocationPolicy>
-void TemplateHashMapImpl<AllocationPolicy>::Resize(AllocationPolicy allocator) {
+template <typename Key, typename Value, typename MatchFun,
+          class AllocationPolicy>
+void TemplateHashMapImpl<Key, Value, MatchFun, AllocationPolicy>::Resize(
+    AllocationPolicy allocator) {
   Entry* map = map_;
   uint32_t n = occupancy_;
 
@@ -294,10 +324,11 @@
   Initialize(capacity_ * 2, allocator);
 
   // Rehash all current entries.
-  for (Entry* p = map; n > 0; p++) {
-    if (p->key != NULL) {
-      Entry* entry = LookupOrInsert(p->key, p->hash, allocator);
-      entry->value = p->value;
+  for (Entry* entry = map; n > 0; entry++) {
+    if (entry->exists()) {
+      Entry* new_entry = Probe(entry->key, entry->hash);
+      new_entry = FillEmptyEntry(new_entry, entry->key, entry->value,
+                                 entry->hash, allocator);
       n--;
     }
   }
@@ -306,9 +337,83 @@
   AllocationPolicy::Delete(map);
 }
 
+// Match function which compares hashes before executing a (potentially
+// expensive) key comparison.
+template <typename Key, typename MatchFun>
+struct HashEqualityThenKeyMatcher {
+  explicit HashEqualityThenKeyMatcher(MatchFun match) : match_(match) {}
+
+  bool operator()(uint32_t hash1, uint32_t hash2, const Key& key1,
+                  const Key& key2) const {
+    return hash1 == hash2 && match_(key1, key2);
+  }
+
+ private:
+  MatchFun match_;
+};
+
+// Hashmap<void*, void*> which takes a custom key comparison function pointer.
+template <typename AllocationPolicy>
+class CustomMatcherTemplateHashMapImpl
+    : public TemplateHashMapImpl<
+          void*, void*,
+          HashEqualityThenKeyMatcher<void*, bool (*)(void*, void*)>,
+          AllocationPolicy> {
+  typedef TemplateHashMapImpl<
+      void*, void*, HashEqualityThenKeyMatcher<void*, bool (*)(void*, void*)>,
+      AllocationPolicy>
+      Base;
+
+ public:
+  typedef bool (*MatchFun)(void*, void*);
+
+  CustomMatcherTemplateHashMapImpl(
+      MatchFun match, uint32_t capacity = Base::kDefaultHashMapCapacity,
+      AllocationPolicy allocator = AllocationPolicy())
+      : Base(capacity, HashEqualityThenKeyMatcher<void*, MatchFun>(match),
+             allocator) {}
+};
+
+typedef CustomMatcherTemplateHashMapImpl<DefaultAllocationPolicy>
+    CustomMatcherHashMap;
+
+// Match function which compares keys directly by equality.
+template <typename Key>
+struct KeyEqualityMatcher {
+  bool operator()(uint32_t hash1, uint32_t hash2, const Key& key1,
+                  const Key& key2) const {
+    return key1 == key2;
+  }
+};
+
+// Hashmap<void*, void*> which compares the key pointers directly.
+template <typename AllocationPolicy>
+class PointerTemplateHashMapImpl
+    : public TemplateHashMapImpl<void*, void*, KeyEqualityMatcher<void*>,
+                                 AllocationPolicy> {
+  typedef TemplateHashMapImpl<void*, void*, KeyEqualityMatcher<void*>,
+                              AllocationPolicy>
+      Base;
+
+ public:
+  PointerTemplateHashMapImpl(uint32_t capacity = Base::kDefaultHashMapCapacity,
+                             AllocationPolicy allocator = AllocationPolicy())
+      : Base(capacity, KeyEqualityMatcher<void*>(), allocator) {}
+};
+
+typedef PointerTemplateHashMapImpl<DefaultAllocationPolicy> HashMap;
+
 // A hash map for pointer keys and values with an STL-like interface.
-template <class Key, class Value, class AllocationPolicy>
-class TemplateHashMap : private TemplateHashMapImpl<AllocationPolicy> {
+template <class Key, class Value, class MatchFun, class AllocationPolicy>
+class TemplateHashMap
+    : private TemplateHashMapImpl<void*, void*,
+                                  HashEqualityThenKeyMatcher<void*, MatchFun>,
+                                  AllocationPolicy> {
+  typedef TemplateHashMapImpl<void*, void*,
+                              HashEqualityThenKeyMatcher<void*, MatchFun>,
+                              AllocationPolicy>
+      Base;
+
  public:
   STATIC_ASSERT(sizeof(Key*) == sizeof(void*));    // NOLINT
   STATIC_ASSERT(sizeof(Value*) == sizeof(void*));  // NOLINT
@@ -328,26 +433,22 @@
     bool operator!=(const Iterator& other) { return entry_ != other.entry_; }
 
    private:
-    Iterator(const TemplateHashMapImpl<AllocationPolicy>* map,
-             typename TemplateHashMapImpl<AllocationPolicy>::Entry* entry)
+    Iterator(const Base* map, typename Base::Entry* entry)
         : map_(map), entry_(entry) {}
 
-    const TemplateHashMapImpl<AllocationPolicy>* map_;
-    typename TemplateHashMapImpl<AllocationPolicy>::Entry* entry_;
+    const Base* map_;
+    typename Base::Entry* entry_;
 
     friend class TemplateHashMap;
   };
 
-  TemplateHashMap(
-      typename TemplateHashMapImpl<AllocationPolicy>::MatchFun match,
-      AllocationPolicy allocator = AllocationPolicy())
-      : TemplateHashMapImpl<AllocationPolicy>(
-            match,
-            TemplateHashMapImpl<AllocationPolicy>::kDefaultHashMapCapacity,
-            allocator) {}
+  TemplateHashMap(MatchFun match,
+                  AllocationPolicy allocator = AllocationPolicy())
+      : Base(Base::kDefaultHashMapCapacity,
+             HashEqualityThenKeyMatcher<void*, MatchFun>(match), allocator) {}
 
   Iterator begin() const { return Iterator(this, this->Start()); }
-  Iterator end() const { return Iterator(this, NULL); }
+  Iterator end() const { return Iterator(this, nullptr); }
   Iterator find(Key* key, bool insert = false,
                 AllocationPolicy allocator = AllocationPolicy()) {
     if (insert) {
diff --git a/src/base/macros.h b/src/base/macros.h
index 822c887..e386617 100644
--- a/src/base/macros.h
+++ b/src/base/macros.h
@@ -21,12 +21,6 @@
 // The expression is a compile-time constant, and therefore can be
 // used in defining new arrays, for example.  If you use arraysize on
 // a pointer by mistake, you will get a compile-time error.
-//
-// One caveat is that arraysize() doesn't accept any array of an
-// anonymous type or a type defined inside a function.  In these rare
-// cases, you have to use the unsafe ARRAYSIZE_UNSAFE() macro below.  This is
-// due to a limitation in C++'s template system.  The limitation might
-// eventually be removed, but it hasn't happened yet.
 #define arraysize(array) (sizeof(ArraySizeHelper(array)))
 
 
diff --git a/src/base/platform/platform-macos.cc b/src/base/platform/platform-macos.cc
index b75bc47..69c1816 100644
--- a/src/base/platform/platform-macos.cc
+++ b/src/base/platform/platform-macos.cc
@@ -250,10 +250,7 @@
   return munmap(address, size) == 0;
 }
 
-
-bool VirtualMemory::HasLazyCommits() {
-  return false;
-}
+bool VirtualMemory::HasLazyCommits() { return true; }
 
 }  // namespace base
 }  // namespace v8
diff --git a/src/basic-block-profiler.h b/src/basic-block-profiler.h
index 2e7ac9c..c3c8b64 100644
--- a/src/basic-block-profiler.h
+++ b/src/basic-block-profiler.h
@@ -11,6 +11,7 @@
 #include <vector>
 
 #include "src/base/macros.h"
+#include "src/globals.h"
 
 namespace v8 {
 namespace internal {
@@ -58,15 +59,16 @@
   const DataList* data_list() { return &data_list_; }
 
  private:
-  friend std::ostream& operator<<(std::ostream& os,
-                                  const BasicBlockProfiler& s);
+  friend V8_EXPORT_PRIVATE std::ostream& operator<<(
+      std::ostream& os, const BasicBlockProfiler& s);
 
   DataList data_list_;
 
   DISALLOW_COPY_AND_ASSIGN(BasicBlockProfiler);
 };
 
-std::ostream& operator<<(std::ostream& os, const BasicBlockProfiler& s);
+V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
+                                           const BasicBlockProfiler& s);
 std::ostream& operator<<(std::ostream& os, const BasicBlockProfiler::Data& s);
 
 }  // namespace internal
diff --git a/src/bit-vector.h b/src/bit-vector.h
index 3703f28..13f9e97 100644
--- a/src/bit-vector.h
+++ b/src/bit-vector.h
@@ -6,7 +6,7 @@
 #define V8_DATAFLOW_H_
 
 #include "src/allocation.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 5142817..62cebfb 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -210,7 +210,6 @@
   HARMONY_INPROGRESS(DECLARE_FEATURE_INITIALIZATION)
   HARMONY_STAGED(DECLARE_FEATURE_INITIALIZATION)
   HARMONY_SHIPPING(DECLARE_FEATURE_INITIALIZATION)
-  DECLARE_FEATURE_INITIALIZATION(intl_extra, "")
 #undef DECLARE_FEATURE_INITIALIZATION
 
   Handle<JSFunction> InstallArrayBuffer(Handle<JSObject> target,
@@ -661,6 +660,16 @@
   // Create iterator-related meta-objects.
   Handle<JSObject> iterator_prototype =
       factory()->NewJSObject(isolate()->object_function(), TENURED);
+
+  Handle<JSFunction> iterator_prototype_iterator = SimpleCreateFunction(
+      isolate(), factory()->NewStringFromAsciiChecked("[Symbol.iterator]"),
+      Builtins::kIteratorPrototypeIterator, 0, true);
+  iterator_prototype_iterator->shared()->set_native(true);
+
+  JSObject::AddProperty(iterator_prototype, factory()->iterator_symbol(),
+                        iterator_prototype_iterator, DONT_ENUM);
+  native_context()->set_initial_iterator_prototype(*iterator_prototype);
+
   Handle<JSObject> generator_object_prototype =
       factory()->NewJSObject(isolate()->object_function(), TENURED);
   native_context()->set_initial_generator_prototype(
@@ -694,6 +703,12 @@
   SimpleInstallFunction(generator_object_prototype, "throw",
                         Builtins::kGeneratorPrototypeThrow, 1, true);
 
+  // Internal version of generator_prototype_next, flagged as non-native.
+  Handle<JSFunction> generator_next_internal =
+      SimpleCreateFunction(isolate(), factory()->next_string(),
+                           Builtins::kGeneratorPrototypeNext, 1, true);
+  native_context()->set_generator_next_internal(*generator_next_internal);
+
   // Create maps for generator functions and their prototypes.  Store those
   // maps in the native context. The "prototype" property descriptor is
   // writable, non-enumerable, and non-configurable (as per ES6 draft
@@ -991,13 +1006,10 @@
   error_fun->shared()->set_construct_stub(
       *isolate->builtins()->ErrorConstructor());
   error_fun->shared()->set_length(1);
-  error_fun->shared()->set_native(true);
 
   if (context_index == Context::ERROR_FUNCTION_INDEX) {
-    Handle<JSFunction> capture_stack_trace_fun =
-        SimpleInstallFunction(error_fun, "captureStackTrace",
-                              Builtins::kErrorCaptureStackTrace, 2, false);
-    capture_stack_trace_fun->shared()->set_native(true);
+    SimpleInstallFunction(error_fun, "captureStackTrace",
+                          Builtins::kErrorCaptureStackTrace, 2, false);
   }
 
   InstallWithIntrinsicDefaultProto(isolate, error_fun, context_index);
@@ -1016,7 +1028,6 @@
       Handle<JSFunction> to_string_fun =
           SimpleInstallFunction(prototype, factory->toString_string(),
                                 Builtins::kErrorPrototypeToString, 0, true);
-      to_string_fun->shared()->set_native(true);
       isolate->native_context()->set_error_to_string(*to_string_fun);
     } else {
       DCHECK(context_index != Context::ERROR_FUNCTION_INDEX);
@@ -1206,6 +1217,8 @@
         JSObject::kHeaderSize, MaybeHandle<JSObject>(),
         Builtins::kFunctionPrototypeHasInstance,
         static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY));
+    has_instance->shared()->set_builtin_function_id(kFunctionHasInstance);
+    native_context()->set_function_has_instance(*has_instance);
 
     // Set the expected parameters for @@hasInstance to 1; required by builtin.
     has_instance->shared()->set_internal_formal_parameter_count(1);
@@ -1303,6 +1316,15 @@
     // Install i18n fallback functions.
     SimpleInstallFunction(prototype, "toLocaleString",
                           Builtins::kNumberPrototypeToLocaleString, 0, false);
+
+    // Install the Number functions.
+    SimpleInstallFunction(number_fun, "isFinite", Builtins::kNumberIsFinite, 1,
+                          true);
+    SimpleInstallFunction(number_fun, "isInteger", Builtins::kNumberIsInteger,
+                          1, true);
+    SimpleInstallFunction(number_fun, "isNaN", Builtins::kNumberIsNaN, 1, true);
+    SimpleInstallFunction(number_fun, "isSafeInteger",
+                          Builtins::kNumberIsSafeInteger, 1, true);
   }
 
   {  // --- B o o l e a n ---
@@ -1384,6 +1406,16 @@
                           1, true);
     SimpleInstallFunction(prototype, "charCodeAt",
                           Builtins::kStringPrototypeCharCodeAt, 1, true);
+    SimpleInstallFunction(prototype, "lastIndexOf",
+                          Builtins::kStringPrototypeLastIndexOf, 1, false);
+    SimpleInstallFunction(prototype, "localeCompare",
+                          Builtins::kStringPrototypeLocaleCompare, 1, true);
+    SimpleInstallFunction(prototype, "normalize",
+                          Builtins::kStringPrototypeNormalize, 0, false);
+    SimpleInstallFunction(prototype, "substr", Builtins::kStringPrototypeSubstr,
+                          2, true);
+    SimpleInstallFunction(prototype, "substring",
+                          Builtins::kStringPrototypeSubstring, 2, true);
     SimpleInstallFunction(prototype, "toString",
                           Builtins::kStringPrototypeToString, 0, true);
     SimpleInstallFunction(prototype, "trim", Builtins::kStringPrototypeTrim, 0,
@@ -1394,6 +1426,47 @@
                           Builtins::kStringPrototypeTrimRight, 0, false);
     SimpleInstallFunction(prototype, "valueOf",
                           Builtins::kStringPrototypeValueOf, 0, true);
+
+    Handle<JSFunction> iterator = SimpleCreateFunction(
+        isolate, factory->NewStringFromAsciiChecked("[Symbol.iterator]"),
+        Builtins::kStringPrototypeIterator, 0, true);
+    iterator->shared()->set_native(true);
+    JSObject::AddProperty(prototype, factory->iterator_symbol(), iterator,
+                          static_cast<PropertyAttributes>(DONT_ENUM));
+  }
+
+  {  // --- S t r i n g I t e r a t o r ---
+    Handle<JSObject> iterator_prototype(
+        native_context()->initial_iterator_prototype());
+
+    Handle<JSObject> string_iterator_prototype =
+        factory->NewJSObject(isolate->object_function(), TENURED);
+    JSObject::ForceSetPrototype(string_iterator_prototype, iterator_prototype);
+
+    JSObject::AddProperty(
+        string_iterator_prototype, factory->to_string_tag_symbol(),
+        factory->NewStringFromAsciiChecked("String Iterator"),
+        static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY));
+
+    Handle<JSFunction> next =
+        InstallFunction(string_iterator_prototype, "next", JS_OBJECT_TYPE,
+                        JSObject::kHeaderSize, MaybeHandle<JSObject>(),
+                        Builtins::kStringIteratorPrototypeNext);
+    next->shared()->set_builtin_function_id(kStringIteratorNext);
+
+    // Set the expected parameters for %StringIteratorPrototype%.next to 0 (not
+    // including the receiver), as required by the builtin.
+    next->shared()->set_internal_formal_parameter_count(0);
+
+    // Set the length for the function to satisfy ECMA-262.
+    next->shared()->set_length(0);
+
+    Handle<JSFunction> string_iterator_function = CreateFunction(
+        isolate, factory->NewStringFromAsciiChecked("StringIterator"),
+        JS_STRING_ITERATOR_TYPE, JSStringIterator::kSize,
+        string_iterator_prototype, Builtins::kIllegal);
+    native_context()->set_string_iterator_map(
+        string_iterator_function->initial_map());
   }
 
   {
@@ -1575,14 +1648,28 @@
 
   {  // -- R e g E x p
     // Builtin functions for RegExp.prototype.
+    Handle<JSObject> prototype =
+        factory->NewJSObject(isolate->object_function(), TENURED);
     Handle<JSFunction> regexp_fun =
         InstallFunction(global, "RegExp", JS_REGEXP_TYPE, JSRegExp::kSize,
-                        isolate->initial_object_prototype(),
-                        Builtins::kIllegal);
+                        prototype, Builtins::kRegExpConstructor);
     InstallWithIntrinsicDefaultProto(isolate, regexp_fun,
                                      Context::REGEXP_FUNCTION_INDEX);
-    regexp_fun->shared()->SetConstructStub(
-        *isolate->builtins()->JSBuiltinsConstructStub());
+
+    Handle<SharedFunctionInfo> shared(regexp_fun->shared(), isolate);
+    shared->SetConstructStub(*isolate->builtins()->RegExpConstructor());
+    shared->set_instance_class_name(isolate->heap()->RegExp_string());
+    shared->DontAdaptArguments();
+    shared->set_length(2);
+
+    // RegExp.prototype setup.
+
+    // Install the "constructor" property on the {prototype}.
+    JSObject::AddProperty(prototype, factory->constructor_string(), regexp_fun,
+                          DONT_ENUM);
+
+    SimpleInstallFunction(prototype, "exec", Builtins::kRegExpPrototypeExec, 1,
+                          true, DONT_ENUM);
 
     DCHECK(regexp_fun->has_initial_map());
     Handle<Map> initial_map(regexp_fun->initial_map());
@@ -1840,6 +1927,39 @@
     SimpleInstallGetter(prototype, factory->byte_offset_string(),
                         Builtins::kDataViewPrototypeGetByteOffset, false,
                         kDataViewByteOffset);
+
+    SimpleInstallFunction(prototype, "getInt8",
+                          Builtins::kDataViewPrototypeGetInt8, 1, false);
+    SimpleInstallFunction(prototype, "setInt8",
+                          Builtins::kDataViewPrototypeSetInt8, 2, false);
+    SimpleInstallFunction(prototype, "getUint8",
+                          Builtins::kDataViewPrototypeGetUint8, 1, false);
+    SimpleInstallFunction(prototype, "setUint8",
+                          Builtins::kDataViewPrototypeSetUint8, 2, false);
+    SimpleInstallFunction(prototype, "getInt16",
+                          Builtins::kDataViewPrototypeGetInt16, 1, false);
+    SimpleInstallFunction(prototype, "setInt16",
+                          Builtins::kDataViewPrototypeSetInt16, 2, false);
+    SimpleInstallFunction(prototype, "getUint16",
+                          Builtins::kDataViewPrototypeGetUint16, 1, false);
+    SimpleInstallFunction(prototype, "setUint16",
+                          Builtins::kDataViewPrototypeSetUint16, 2, false);
+    SimpleInstallFunction(prototype, "getInt32",
+                          Builtins::kDataViewPrototypeGetInt32, 1, false);
+    SimpleInstallFunction(prototype, "setInt32",
+                          Builtins::kDataViewPrototypeSetInt32, 2, false);
+    SimpleInstallFunction(prototype, "getUint32",
+                          Builtins::kDataViewPrototypeGetUint32, 1, false);
+    SimpleInstallFunction(prototype, "setUint32",
+                          Builtins::kDataViewPrototypeSetUint32, 2, false);
+    SimpleInstallFunction(prototype, "getFloat32",
+                          Builtins::kDataViewPrototypeGetFloat32, 1, false);
+    SimpleInstallFunction(prototype, "setFloat32",
+                          Builtins::kDataViewPrototypeSetFloat32, 2, false);
+    SimpleInstallFunction(prototype, "getFloat64",
+                          Builtins::kDataViewPrototypeGetFloat64, 1, false);
+    SimpleInstallFunction(prototype, "setFloat64",
+                          Builtins::kDataViewPrototypeSetFloat64, 2, false);
   }
 
   {  // -- M a p
@@ -2177,7 +2297,6 @@
   HARMONY_INPROGRESS(FEATURE_INITIALIZE_GLOBAL)
   HARMONY_STAGED(FEATURE_INITIALIZE_GLOBAL)
   HARMONY_SHIPPING(FEATURE_INITIALIZE_GLOBAL)
-  FEATURE_INITIALIZE_GLOBAL(intl_extra, "")
 #undef FEATURE_INITIALIZE_GLOBAL
 }
 
@@ -2423,17 +2542,12 @@
     native_context->set_object_to_string(*to_string);
   }
 
-  Handle<JSObject> iterator_prototype;
+  Handle<JSObject> iterator_prototype(
+      native_context->initial_iterator_prototype());
 
-  {
-    PrototypeIterator iter(native_context->generator_object_prototype_map());
-    iter.Advance();  // Advance to the prototype of generator_object_prototype.
-    iterator_prototype = Handle<JSObject>(iter.GetCurrent<JSObject>());
-
-    JSObject::AddProperty(container,
-                          factory->InternalizeUtf8String("IteratorPrototype"),
-                          iterator_prototype, NONE);
-  }
+  JSObject::AddProperty(container,
+                        factory->InternalizeUtf8String("IteratorPrototype"),
+                        iterator_prototype, NONE);
 
   {
     PrototypeIterator iter(native_context->sloppy_generator_function_map());
@@ -2686,8 +2800,6 @@
         container, "CallSite", JS_OBJECT_TYPE, JSObject::kHeaderSize,
         isolate->initial_object_prototype(), Builtins::kUnsupportedThrower);
     callsite_fun->shared()->DontAdaptArguments();
-    callsite_fun->shared()->set_native(true);
-
     isolate->native_context()->set_callsite_function(*callsite_fun);
 
     {
@@ -2725,8 +2837,7 @@
 
       Handle<JSFunction> fun;
       for (const FunctionInfo& info : infos) {
-        fun = SimpleInstallFunction(proto, info.name, info.id, 0, true, attrs);
-        fun->shared()->set_native(true);
+        SimpleInstallFunction(proto, info.name, info.id, 0, true, attrs);
       }
 
       Accessors::FunctionSetPrototype(callsite_fun, proto).Assert();
@@ -2739,6 +2850,7 @@
                                                  Handle<JSObject> container) {
   HandleScope scope(isolate);
 
+#ifdef V8_I18N_SUPPORT
 #define INITIALIZE_FLAG(FLAG)                                         \
   {                                                                   \
     Handle<String> name =                                             \
@@ -2747,9 +2859,8 @@
                           isolate->factory()->ToBoolean(FLAG), NONE); \
   }
 
-  INITIALIZE_FLAG(FLAG_intl_extra)
-
 #undef INITIALIZE_FLAG
+#endif
 }
 
 
@@ -2762,17 +2873,17 @@
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_regexp_named_captures)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_regexp_property)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_function_sent)
-EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(intl_extra)
-EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_explicit_tailcalls)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_tailcalls)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_restrictive_declarations)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_string_padding)
 #ifdef V8_I18N_SUPPORT
+EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(datetime_format_to_parts)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(icu_case_mapping)
 #endif
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_async_await)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_restrictive_generators)
 EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_trailing_commas)
+EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_class_fields)
 
 void InstallPublicSymbol(Factory* factory, Handle<Context> native_context,
                          const char* name, Handle<Symbol> value) {
@@ -3106,6 +3217,14 @@
     native_context()->set_global_eval_fun(*eval);
   }
 
+  // Install Global.isFinite
+  SimpleInstallFunction(global_object, "isFinite", Builtins::kGlobalIsFinite, 1,
+                        true, kGlobalIsFinite);
+
+  // Install Global.isNaN
+  SimpleInstallFunction(global_object, "isNaN", Builtins::kGlobalIsNaN, 1, true,
+                        kGlobalIsNaN);
+
   // Install Array.prototype.concat
   {
     Handle<JSFunction> array_constructor(native_context()->array_function());
@@ -3336,7 +3455,6 @@
 
 
 bool Genesis::InstallExperimentalNatives() {
-  static const char* harmony_explicit_tailcalls_natives[] = {nullptr};
   static const char* harmony_tailcalls_natives[] = {nullptr};
   static const char* harmony_sharedarraybuffer_natives[] = {
       "native harmony-atomics.js", NULL};
@@ -3349,7 +3467,6 @@
   static const char* harmony_regexp_named_captures_natives[] = {nullptr};
   static const char* harmony_regexp_property_natives[] = {nullptr};
   static const char* harmony_function_sent_natives[] = {nullptr};
-  static const char* intl_extra_natives[] = {"native intl-extra.js", nullptr};
   static const char* harmony_object_values_entries_natives[] = {nullptr};
   static const char* harmony_object_own_property_descriptors_natives[] = {
       nullptr};
@@ -3359,11 +3476,13 @@
 #ifdef V8_I18N_SUPPORT
   static const char* icu_case_mapping_natives[] = {"native icu-case-mapping.js",
                                                    nullptr};
+  static const char* datetime_format_to_parts_natives[] = {
+      "native datetime-format-to-parts.js", nullptr};
 #endif
-  static const char* harmony_async_await_natives[] = {
-      "native harmony-async-await.js", nullptr};
+  static const char* harmony_async_await_natives[] = {nullptr};
   static const char* harmony_restrictive_generators_natives[] = {nullptr};
   static const char* harmony_trailing_commas_natives[] = {nullptr};
+  static const char* harmony_class_fields_natives[] = {nullptr};
 
   for (int i = ExperimentalNatives::GetDebuggerCount();
        i < ExperimentalNatives::GetBuiltinsCount(); i++) {
@@ -3382,7 +3501,6 @@
     HARMONY_INPROGRESS(INSTALL_EXPERIMENTAL_NATIVES);
     HARMONY_STAGED(INSTALL_EXPERIMENTAL_NATIVES);
     HARMONY_SHIPPING(INSTALL_EXPERIMENTAL_NATIVES);
-    INSTALL_EXPERIMENTAL_NATIVES(intl_extra, "");
 #undef INSTALL_EXPERIMENTAL_NATIVES
   }
 
@@ -3547,8 +3665,7 @@
   return v8::internal::ComputePointerHash(extension);
 }
 
-Genesis::ExtensionStates::ExtensionStates()
-    : map_(base::HashMap::PointersMatch, 8) {}
+Genesis::ExtensionStates::ExtensionStates() : map_(8) {}
 
 Genesis::ExtensionTraversalState Genesis::ExtensionStates::get_state(
     RegisteredExtension* extension) {
@@ -4003,9 +4120,7 @@
 
   // Check that the script context table is empty except for the 'this' binding.
   // We do not need script contexts for native scripts.
-  if (!FLAG_global_var_shortcuts) {
-    DCHECK_EQ(1, native_context()->script_context_table()->used());
-  }
+  DCHECK_EQ(1, native_context()->script_context_table()->used());
 
   result_ = native_context();
 }
diff --git a/src/builtins/arm/builtins-arm.cc b/src/builtins/arm/builtins-arm.cc
index 1b643d4..2c0bef2 100644
--- a/src/builtins/arm/builtins-arm.cc
+++ b/src/builtins/arm/builtins-arm.cc
@@ -387,10 +387,9 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(r2);
     __ EnterBuiltinFrame(cp, r1, r2);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(cp, r1, r2);
     __ SmiUntag(r2);
   }
@@ -449,12 +448,11 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(r6);
       __ EnterBuiltinFrame(cp, r1, r6);
       __ Push(r3);
       __ Move(r0, r2);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ Move(r2, r0);
       __ Pop(r3);
       __ LeaveBuiltinFrame(cp, r1, r6);
@@ -1060,6 +1058,17 @@
   __ cmp(r0, Operand(masm->CodeObject()));  // Self-reference to this code.
   __ b(ne, &switch_to_different_code_kind);
 
+  // Increment invocation count for the function.
+  __ ldr(r2, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
+  __ ldr(r2, FieldMemOperand(r2, LiteralsArray::kFeedbackVectorOffset));
+  __ ldr(r9, FieldMemOperand(
+                 r2, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                         TypeFeedbackVector::kHeaderSize));
+  __ add(r9, r9, Operand(Smi::FromInt(1)));
+  __ str(r9, FieldMemOperand(
+                 r2, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                         TypeFeedbackVector::kHeaderSize));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ SmiTst(kInterpreterBytecodeArrayRegister);
@@ -1162,8 +1171,33 @@
   __ Jump(lr);
 }
 
-static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
-                                         Register limit, Register scratch) {
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch,
+                                        Label* stack_overflow) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
+  // Make scratch the space we have left. The stack might already be overflowed
+  // here which will cause scratch to become negative.
+  __ sub(scratch, sp, scratch);
+  // Check if the arguments will overflow the stack.
+  __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
+  __ b(le, stack_overflow);  // Signed comparison.
+}
+
+static void Generate_InterpreterPushArgs(MacroAssembler* masm,
+                                         Register num_args, Register index,
+                                         Register limit, Register scratch,
+                                         Label* stack_overflow) {
+  // Add a stack check before pushing arguments.
+  Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
+
+  // Find the address of the last argument.
+  __ mov(limit, num_args);
+  __ mov(limit, Operand(limit, LSL, kPointerSizeLog2));
+  __ sub(limit, index, limit);
+
   Label loop_header, loop_check;
   __ b(al, &loop_check);
   __ bind(&loop_header);
@@ -1185,14 +1219,12 @@
   //          they are to be pushed onto the stack.
   //  -- r1 : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
 
-  // Find the address of the last argument.
   __ add(r3, r0, Operand(1));  // Add one for receiver.
-  __ mov(r3, Operand(r3, LSL, kPointerSizeLog2));
-  __ sub(r3, r2, r3);
 
-  // Push the arguments.
-  Generate_InterpreterPushArgs(masm, r2, r3, r4);
+  // Push the arguments. r2, r4, r5 will be modified.
+  Generate_InterpreterPushArgs(masm, r3, r2, r4, r5, &stack_overflow);
 
   // Call the target.
   if (function_type == CallableType::kJSFunction) {
@@ -1205,30 +1237,88 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ bkpt(0);
+  }
 }
 
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   // -- r0 : argument count (not including receiver)
   // -- r3 : new target
   // -- r1 : constructor to call
-  // -- r2 : address of the first argument
+  // -- r2 : allocation site feedback if available, undefined otherwise.
+  // -- r4 : address of the first argument
   // -----------------------------------
-
-  // Find the address of the last argument.
-  __ mov(r4, Operand(r0, LSL, kPointerSizeLog2));
-  __ sub(r4, r2, r4);
+  Label stack_overflow;
 
   // Push a slot for the receiver to be constructed.
   __ mov(ip, Operand::Zero());
   __ push(ip);
 
-  // Push the arguments.
-  Generate_InterpreterPushArgs(masm, r2, r4, r5);
+  // Push the arguments. r5, r4, r6 will be modified.
+  Generate_InterpreterPushArgs(masm, r0, r4, r5, r6, &stack_overflow);
 
-  // Call the constructor with r0, r1, and r3 unmodified.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  __ AssertUndefinedOrAllocationSite(r2, r5);
+  if (construct_type == CallableType::kJSFunction) {
+    __ AssertFunction(r1);
+
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+    __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
+    // Jump to the construct function.
+    __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
+
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
+    // Call the constructor with r0, r1, and r3 unmodified.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ bkpt(0);
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  // -- r0 : argument count (not including receiver)
+  // -- r1 : target to call verified to be Array function
+  // -- r2 : allocation site feedback if available, undefined otherwise.
+  // -- r3 : address of the first argument
+  // -----------------------------------
+  Label stack_overflow;
+
+  __ add(r4, r0, Operand(1));  // Add one for receiver.
+
+  // TODO(mythria): Add a stack check before pushing arguments.
+  // Push the arguments. r3, r5, r6 will be modified.
+  Generate_InterpreterPushArgs(masm, r4, r3, r5, r6, &stack_overflow);
+
+  // Array constructor expects constructor in r3. It is same as r1 here.
+  __ mov(r3, r1);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ bkpt(0);
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1816,61 +1906,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- r0    : number of arguments
-  //  -- r1    : function
-  //  -- cp    : context
-  //  -- lr    : return address
-  //  -- sp[0] : receiver
-  // -----------------------------------
-
-  // 1. Pop receiver into r0 and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ Pop(r0);
-    __ JumpIfSmi(r0, &receiver_not_date);
-    __ CompareObjectType(r0, r2, r3, JS_DATE_TYPE);
-    __ b(ne, &receiver_not_date);
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ ldr(r0, FieldMemOperand(r0, JSDate::kValueOffset));
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ mov(r1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
-      __ ldr(r1, MemOperand(r1));
-      __ ldr(ip, FieldMemOperand(r0, JSDate::kCacheStampOffset));
-      __ cmp(r1, ip);
-      __ b(ne, &stamp_mismatch);
-      __ ldr(r0, FieldMemOperand(
-                     r0, JSDate::kValueOffset + field_index * kPointerSize));
-      __ Ret();
-      __ bind(&stamp_mismatch);
-    }
-    FrameScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2, r1);
-    __ mov(r1, Operand(Smi::FromInt(field_index)));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ Ret();
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ Push(r0);
-    __ Move(r0, Smi::FromInt(0));
-    __ EnterBuiltinFrame(cp, r1, r0);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r0    : argc
@@ -2101,26 +2136,6 @@
   }
 }
 
-static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
-                                      Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- r0 : actual number of arguments
-  //  -- r1 : function (passed through to callee)
-  //  -- r2 : expected number of arguments
-  //  -- r3 : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
-  // Make r5 the space we have left. The stack might already be overflowed
-  // here which will cause r5 to become negative.
-  __ sub(r5, sp, r5);
-  // Check if the arguments will overflow the stack.
-  __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
-  __ b(le, stack_overflow);  // Signed comparison.
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ SmiTag(r0);
   __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
@@ -2786,21 +2801,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in r0.
-  STATIC_ASSERT(kSmiTag == 0);
-  __ tst(r0, Operand(kSmiTagMask));
-  __ Ret(eq);
-
-  __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
-  // r0: receiver
-  // r1: receiver instance type
-  __ Ret(eq);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r0 : actual number of arguments
@@ -2820,7 +2820,7 @@
   {  // Enough parameters: actual >= expected
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow);
 
     // Calculate copy start address into r0 and copy end address into r4.
     // r0: actual number of arguments as a smi
@@ -2853,7 +2853,7 @@
   {  // Too few parameters: Actual < expected
     __ bind(&too_few);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow);
 
     // Calculate copy start address into r0 and copy end address is fp.
     // r0: actual number of arguments as a smi
diff --git a/src/builtins/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc
index 57395d8..48551de 100644
--- a/src/builtins/arm64/builtins-arm64.cc
+++ b/src/builtins/arm64/builtins-arm64.cc
@@ -379,10 +379,9 @@
   __ Bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(x2);
     __ EnterBuiltinFrame(cp, x1, x2);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(cp, x1, x2);
     __ SmiUntag(x2);
   }
@@ -442,12 +441,11 @@
     __ Bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(x6);
       __ EnterBuiltinFrame(cp, x1, x6);
       __ Push(x3);
       __ Move(x0, x2);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ Move(x2, x0);
       __ Pop(x3);
       __ LeaveBuiltinFrame(cp, x1, x6);
@@ -1065,6 +1063,17 @@
   __ Cmp(x0, Operand(masm->CodeObject()));  // Self-reference to this code.
   __ B(ne, &switch_to_different_code_kind);
 
+  // Increment invocation count for the function.
+  __ Ldr(x11, FieldMemOperand(x1, JSFunction::kLiteralsOffset));
+  __ Ldr(x11, FieldMemOperand(x11, LiteralsArray::kFeedbackVectorOffset));
+  __ Ldr(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
+                                           kPointerSize +
+                                       TypeFeedbackVector::kHeaderSize));
+  __ Add(x10, x10, Operand(Smi::FromInt(1)));
+  __ Str(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
+                                           kPointerSize +
+                                       TypeFeedbackVector::kHeaderSize));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
@@ -1171,6 +1180,50 @@
   __ Ret();
 }
 
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch,
+                                        Label* stack_overflow) {
+  // Check the stack for overflow.
+  // We are not trying to catch interruptions (e.g. debug break and
+  // preemption) here, so the "real stack limit" is checked.
+  Label enough_stack_space;
+  __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
+  // Make scratch the space we have left. The stack might already be overflowed
+  // here which will cause scratch to become negative.
+  __ Sub(scratch, jssp, scratch);
+  // Check if the arguments will overflow the stack.
+  __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
+  __ B(le, stack_overflow);
+}
+
+static void Generate_InterpreterPushArgs(MacroAssembler* masm,
+                                         Register num_args, Register index,
+                                         Register last_arg, Register stack_addr,
+                                         Register scratch,
+                                         Label* stack_overflow) {
+  // Add a stack check before pushing arguments.
+  Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
+
+  __ Mov(scratch, num_args);
+  __ lsl(scratch, scratch, kPointerSizeLog2);
+  __ sub(last_arg, index, scratch);
+
+  // Set stack pointer and where to stop.
+  __ Mov(stack_addr, jssp);
+  __ Claim(scratch, 1);
+
+  // Push the arguments.
+  Label loop_header, loop_check;
+  __ B(&loop_check);
+  __ Bind(&loop_header);
+  // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
+  __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
+  __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex));
+  __ Bind(&loop_check);
+  __ Cmp(index, last_arg);
+  __ B(gt, &loop_header);
+}
+
 // static
 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
     MacroAssembler* masm, TailCallMode tail_call_mode,
@@ -1182,24 +1235,13 @@
   //          they are to be pushed onto the stack.
   //  -- x1 : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
 
-  // Find the address of the last argument.
-  __ add(x3, x0, Operand(1));  // Add one for receiver.
-  __ lsl(x3, x3, kPointerSizeLog2);
-  __ sub(x4, x2, x3);
+  // Add one for the receiver.
+  __ add(x3, x0, Operand(1));
 
-  // Push the arguments.
-  Label loop_header, loop_check;
-  __ Mov(x5, jssp);
-  __ Claim(x3, 1);
-  __ B(&loop_check);
-  __ Bind(&loop_header);
-  // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
-  __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
-  __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
-  __ Bind(&loop_check);
-  __ Cmp(x2, x4);
-  __ B(gt, &loop_header);
+  // Push the arguments. x2, x4, x5, x6 will be modified.
+  Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6, &stack_overflow);
 
   // Call the target.
   if (function_type == CallableType::kJSFunction) {
@@ -1212,42 +1254,82 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    __ Unreachable();
+  }
 }
 
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   // -- x0 : argument count (not including receiver)
   // -- x3 : new target
   // -- x1 : constructor to call
-  // -- x2 : address of the first argument
+  // -- x2 : allocation site feedback if available, undefined otherwise
+  // -- x4 : address of the first argument
   // -----------------------------------
-
-  // Find the address of the last argument.
-  __ add(x5, x0, Operand(1));  // Add one for receiver (to be constructed).
-  __ lsl(x5, x5, kPointerSizeLog2);
-
-  // Set stack pointer and where to stop.
-  __ Mov(x6, jssp);
-  __ Claim(x5, 1);
-  __ sub(x4, x6, x5);
+  Label stack_overflow;
 
   // Push a slot for the receiver.
-  __ Str(xzr, MemOperand(x6, -kPointerSize, PreIndex));
+  __ Push(xzr);
 
-  Label loop_header, loop_check;
-  // Push the arguments.
-  __ B(&loop_check);
-  __ Bind(&loop_header);
-  // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
-  __ Ldr(x5, MemOperand(x2, -kPointerSize, PostIndex));
-  __ Str(x5, MemOperand(x6, -kPointerSize, PreIndex));
-  __ Bind(&loop_check);
-  __ Cmp(x6, x4);
-  __ B(gt, &loop_header);
+  // Push the arguments. x5, x4, x6, x7 will be modified.
+  Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7, &stack_overflow);
 
-  // Call the constructor with x0, x1, and x3 unmodified.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  __ AssertUndefinedOrAllocationSite(x2, x6);
+  if (construct_type == CallableType::kJSFunction) {
+    __ AssertFunction(x1);
+
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
+    __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
+    __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
+    __ Br(x4);
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
+    // Call the constructor with x0, x1, and x3 unmodified.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    __ Unreachable();
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  // -- x0 : argument count (not including receiver)
+  // -- x1 : target to call verified to be Array function
+  // -- x2 : allocation site feedback if available, undefined otherwise.
+  // -- x3 : address of the first argument
+  // -----------------------------------
+  Label stack_overflow;
+
+  __ add(x4, x0, Operand(1));  // Add one for the receiver.
+
+  // Push the arguments. x3, x5, x6, x7 will be modified.
+  Generate_InterpreterPushArgs(masm, x4, x3, x5, x6, x7, &stack_overflow);
+
+  // Array constructor expects constructor in x3. It is same as call target.
+  __ mov(x3, x1);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    __ Unreachable();
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1820,60 +1902,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- x0      : number of arguments
-  //  -- x1      : function
-  //  -- cp      : context
-  //  -- lr      : return address
-  //  -- jssp[0] : receiver
-  // -----------------------------------
-  ASM_LOCATION("Builtins::Generate_DatePrototype_GetField");
-
-  // 1. Pop receiver into x0 and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ Pop(x0);
-    __ JumpIfSmi(x0, &receiver_not_date);
-    __ JumpIfNotObjectType(x0, x2, x3, JS_DATE_TYPE, &receiver_not_date);
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ Ldr(x0, FieldMemOperand(x0, JSDate::kValueOffset));
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ Mov(x1, ExternalReference::date_cache_stamp(masm->isolate()));
-      __ Ldr(x1, MemOperand(x1));
-      __ Ldr(x2, FieldMemOperand(x0, JSDate::kCacheStampOffset));
-      __ Cmp(x1, x2);
-      __ B(ne, &stamp_mismatch);
-      __ Ldr(x0, FieldMemOperand(
-                     x0, JSDate::kValueOffset + field_index * kPointerSize));
-      __ Ret();
-      __ Bind(&stamp_mismatch);
-    }
-    FrameScope scope(masm, StackFrame::INTERNAL);
-    __ Mov(x1, Smi::FromInt(field_index));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ Ret();
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ Bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ Push(x0);
-    __ Mov(x0, Smi::FromInt(0));
-    __ EnterBuiltinFrame(cp, x1, x0);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- x0       : argc
@@ -2162,27 +2190,6 @@
   }
 }
 
-static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
-                                      Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- x0 : actual number of arguments
-  //  -- x1 : function (passed through to callee)
-  //  -- x2 : expected number of arguments
-  //  -- x3 : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow.
-  // We are not trying to catch interruptions (e.g. debug break and
-  // preemption) here, so the "real stack limit" is checked.
-  Label enough_stack_space;
-  __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
-  // Make x10 the space we have left. The stack might already be overflowed
-  // here which will cause x10 to become negative.
-  __ Sub(x10, jssp, x10);
-  // Check if the arguments will overflow the stack.
-  __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
-  __ B(le, stack_overflow);
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ SmiTag(x10, x0);
   __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
@@ -2451,11 +2458,9 @@
   Label class_constructor;
   __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
   __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
-  __ TestAndBranchIfAnySet(
-      w3, (1 << SharedFunctionInfo::kIsDefaultConstructor) |
-              (1 << SharedFunctionInfo::kIsSubclassConstructor) |
-              (1 << SharedFunctionInfo::kIsBaseConstructor),
-      &class_constructor);
+  __ TestAndBranchIfAnySet(w3, FunctionKind::kClassConstructor
+                                   << SharedFunctionInfo::kFunctionKindShift,
+                           &class_constructor);
 
   // Enter the context of the function; ToObject has to run in the function
   // context, and we also need to take the global proxy from the function
@@ -2873,26 +2878,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in x0.
-  Label not_smi;
-  __ JumpIfNotSmi(x0, &not_smi);
-  __ Ret();
-  __ Bind(&not_smi);
-
-  Label not_heap_number;
-  __ CompareObjectType(x0, x1, x1, HEAP_NUMBER_TYPE);
-  // x0: receiver
-  // x1: receiver instance type
-  __ B(ne, &not_heap_number);
-  __ Ret();
-  __ Bind(&not_heap_number);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
   // ----------- S t a t e -------------
@@ -2917,7 +2902,7 @@
 
   {  // Enough parameters: actual >= expected
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, x2, x10, &stack_overflow);
 
     Register copy_start = x10;
     Register copy_end = x11;
@@ -2964,7 +2949,7 @@
     Register scratch1 = x13, scratch2 = x14;
 
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, x2, x10, &stack_overflow);
 
     __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
     __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
diff --git a/src/builtins/builtins-array.cc b/src/builtins/builtins-array.cc
index 09ee4cc..b4969f1 100644
--- a/src/builtins/builtins-array.cc
+++ b/src/builtins/builtins-array.cc
@@ -1269,24 +1269,24 @@
   Node* start_from = assembler->Parameter(2);
   Node* context = assembler->Parameter(3 + 2);
 
-  Node* int32_zero = assembler->Int32Constant(0);
-  Node* int32_one = assembler->Int32Constant(1);
+  Node* intptr_zero = assembler->IntPtrConstant(0);
+  Node* intptr_one = assembler->IntPtrConstant(1);
 
   Node* the_hole = assembler->TheHoleConstant();
   Node* undefined = assembler->UndefinedConstant();
   Node* heap_number_map = assembler->HeapNumberMapConstant();
 
-  Variable len_var(assembler, MachineRepresentation::kWord32),
-      index_var(assembler, MachineRepresentation::kWord32),
-      start_from_var(assembler, MachineRepresentation::kWord32);
+  Variable len_var(assembler, MachineType::PointerRepresentation()),
+      index_var(assembler, MachineType::PointerRepresentation()),
+      start_from_var(assembler, MachineType::PointerRepresentation());
 
   Label init_k(assembler), return_true(assembler), return_false(assembler),
       call_runtime(assembler);
 
   Label init_len(assembler);
 
-  index_var.Bind(int32_zero);
-  len_var.Bind(int32_zero);
+  index_var.Bind(intptr_zero);
+  len_var.Bind(intptr_zero);
 
   // Take slow path if not a JSArray, if retrieving elements requires
   // traversing prototype, or if access checks are required.
@@ -1299,7 +1299,7 @@
     assembler->GotoUnless(assembler->WordIsSmi(len), &call_runtime);
 
     len_var.Bind(assembler->SmiToWord(len));
-    assembler->Branch(assembler->Word32Equal(len_var.value(), int32_zero),
+    assembler->Branch(assembler->WordEqual(len_var.value(), intptr_zero),
                       &return_false, &init_k);
   }
 
@@ -1307,31 +1307,32 @@
   {
     Label done(assembler), init_k_smi(assembler), init_k_heap_num(assembler),
         init_k_zero(assembler), init_k_n(assembler);
-    Callable call_to_integer = CodeFactory::ToInteger(assembler->isolate());
-    Node* tagged_n = assembler->CallStub(call_to_integer, context, start_from);
+    Node* tagged_n = assembler->ToInteger(context, start_from);
 
     assembler->Branch(assembler->WordIsSmi(tagged_n), &init_k_smi,
                       &init_k_heap_num);
 
     assembler->Bind(&init_k_smi);
     {
-      start_from_var.Bind(assembler->SmiToWord32(tagged_n));
+      start_from_var.Bind(assembler->SmiUntag(tagged_n));
       assembler->Goto(&init_k_n);
     }
 
     assembler->Bind(&init_k_heap_num);
     {
       Label do_return_false(assembler);
-      Node* fp_len = assembler->ChangeInt32ToFloat64(len_var.value());
+      // This round is lossless for all valid lengths.
+      Node* fp_len = assembler->RoundIntPtrToFloat64(len_var.value());
       Node* fp_n = assembler->LoadHeapNumberValue(tagged_n);
       assembler->GotoIf(assembler->Float64GreaterThanOrEqual(fp_n, fp_len),
                         &do_return_false);
-      start_from_var.Bind(assembler->TruncateFloat64ToWord32(fp_n));
+      start_from_var.Bind(assembler->ChangeInt32ToIntPtr(
+          assembler->TruncateFloat64ToWord32(fp_n)));
       assembler->Goto(&init_k_n);
 
       assembler->Bind(&do_return_false);
       {
-        index_var.Bind(int32_zero);
+        index_var.Bind(intptr_zero);
         assembler->Goto(&return_false);
       }
     }
@@ -1340,7 +1341,7 @@
     {
       Label if_positive(assembler), if_negative(assembler), done(assembler);
       assembler->Branch(
-          assembler->Int32LessThan(start_from_var.value(), int32_zero),
+          assembler->IntPtrLessThan(start_from_var.value(), intptr_zero),
           &if_negative, &if_positive);
 
       assembler->Bind(&if_positive);
@@ -1352,15 +1353,15 @@
       assembler->Bind(&if_negative);
       {
         index_var.Bind(
-            assembler->Int32Add(len_var.value(), start_from_var.value()));
+            assembler->IntPtrAdd(len_var.value(), start_from_var.value()));
         assembler->Branch(
-            assembler->Int32LessThan(index_var.value(), int32_zero),
+            assembler->IntPtrLessThan(index_var.value(), intptr_zero),
             &init_k_zero, &done);
       }
 
       assembler->Bind(&init_k_zero);
       {
-        index_var.Bind(int32_zero);
+        index_var.Bind(intptr_zero);
         assembler->Goto(&done);
       }
 
@@ -1380,9 +1381,7 @@
                                     &if_packed_doubles, &if_holey_doubles};
 
   Node* map = assembler->LoadMap(array);
-  Node* bit_field2 = assembler->LoadMapBitField2(map);
-  Node* elements_kind =
-      assembler->BitFieldDecode<Map::ElementsKindBits>(bit_field2);
+  Node* elements_kind = assembler->LoadMapElementsKind(map);
   Node* elements = assembler->LoadElements(array);
   assembler->Switch(elements_kind, &return_false, kElementsKind,
                     element_kind_handlers, arraysize(kElementsKind));
@@ -1411,43 +1410,41 @@
 
     assembler->Bind(&not_heap_num);
     Node* search_type = assembler->LoadMapInstanceType(map);
+    assembler->GotoIf(assembler->IsStringInstanceType(search_type),
+                      &string_loop);
     assembler->GotoIf(
-        assembler->Int32LessThan(
-            search_type, assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
-        &string_loop);
-    assembler->GotoIf(
-        assembler->WordEqual(search_type,
-                             assembler->Int32Constant(SIMD128_VALUE_TYPE)),
+        assembler->Word32Equal(search_type,
+                               assembler->Int32Constant(SIMD128_VALUE_TYPE)),
         &simd_loop);
     assembler->Goto(&ident_loop);
 
     assembler->Bind(&ident_loop);
     {
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordEqual(element_k, search_element),
                         &return_true);
 
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&ident_loop);
     }
 
     assembler->Bind(&undef_loop);
     {
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordEqual(element_k, undefined),
                         &return_true);
       assembler->GotoIf(assembler->WordEqual(element_k, the_hole),
                         &return_true);
 
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&undef_loop);
     }
 
@@ -1462,10 +1459,11 @@
       {
         Label continue_loop(assembler), not_smi(assembler);
         assembler->GotoUnless(
-            assembler->Int32LessThan(index_var.value(), len_var.value()),
+            assembler->UintPtrLessThan(index_var.value(), len_var.value()),
             &return_false);
-        Node* element_k =
-            assembler->LoadFixedArrayElement(elements, index_var.value());
+        Node* element_k = assembler->LoadFixedArrayElement(
+            elements, index_var.value(), 0,
+            CodeStubAssembler::INTPTR_PARAMETERS);
         assembler->GotoUnless(assembler->WordIsSmi(element_k), &not_smi);
         assembler->Branch(
             assembler->Float64Equal(search_num.value(),
@@ -1481,7 +1479,7 @@
             &return_true, &continue_loop);
 
         assembler->Bind(&continue_loop);
-        index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+        index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
         assembler->Goto(&not_nan_loop);
       }
 
@@ -1489,10 +1487,11 @@
       {
         Label continue_loop(assembler);
         assembler->GotoUnless(
-            assembler->Int32LessThan(index_var.value(), len_var.value()),
+            assembler->UintPtrLessThan(index_var.value(), len_var.value()),
             &return_false);
-        Node* element_k =
-            assembler->LoadFixedArrayElement(elements, index_var.value());
+        Node* element_k = assembler->LoadFixedArrayElement(
+            elements, index_var.value(), 0,
+            CodeStubAssembler::INTPTR_PARAMETERS);
         assembler->GotoIf(assembler->WordIsSmi(element_k), &continue_loop);
         assembler->GotoIf(assembler->WordNotEqual(assembler->LoadMap(element_k),
                                                   heap_number_map),
@@ -1502,7 +1501,7 @@
             &continue_loop);
 
         assembler->Bind(&continue_loop);
-        index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+        index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
         assembler->Goto(&nan_loop);
       }
     }
@@ -1511,14 +1510,13 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordIsSmi(element_k), &continue_loop);
-      assembler->GotoUnless(assembler->Int32LessThan(
-                                assembler->LoadInstanceType(element_k),
-                                assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+      assembler->GotoUnless(assembler->IsStringInstanceType(
+                                assembler->LoadInstanceType(element_k)),
                             &continue_loop);
 
       // TODO(bmeurer): Consider inlining the StringEqual logic here.
@@ -1530,7 +1528,7 @@
           &return_true, &continue_loop);
 
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&string_loop);
     }
 
@@ -1543,11 +1541,11 @@
       assembler->Goto(&loop_body);
       assembler->Bind(&loop_body);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
 
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordIsSmi(element_k), &continue_loop);
 
       Node* map_k = assembler->LoadMap(element_k);
@@ -1555,7 +1553,7 @@
                                       &return_true, &continue_loop);
 
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&loop_body);
     }
   }
@@ -1585,14 +1583,15 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
       Node* element_k = assembler->LoadFixedDoubleArrayElement(
-          elements, index_var.value(), MachineType::Float64());
+          elements, index_var.value(), MachineType::Float64(), 0,
+          CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->BranchIfFloat64Equal(element_k, search_num.value(),
                                       &return_true, &continue_loop);
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&not_nan_loop);
     }
 
@@ -1601,13 +1600,14 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
       Node* element_k = assembler->LoadFixedDoubleArrayElement(
-          elements, index_var.value(), MachineType::Float64());
+          elements, index_var.value(), MachineType::Float64(), 0,
+          CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->BranchIfFloat64IsNaN(element_k, &return_true, &continue_loop);
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&nan_loop);
     }
   }
@@ -1639,31 +1639,18 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
 
-      if (kPointerSize == kDoubleSize) {
-        Node* element = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint64());
-        Node* the_hole = assembler->Int64Constant(kHoleNanInt64);
-        assembler->GotoIf(assembler->Word64Equal(element, the_hole),
-                          &continue_loop);
-      } else {
-        Node* element_upper = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint32(),
-            kIeeeDoubleExponentWordOffset);
-        assembler->GotoIf(
-            assembler->Word32Equal(element_upper,
-                                   assembler->Int32Constant(kHoleNanUpper32)),
-            &continue_loop);
-      }
-
+      // Load double value or continue if it contains a double hole.
       Node* element_k = assembler->LoadFixedDoubleArrayElement(
-          elements, index_var.value(), MachineType::Float64());
+          elements, index_var.value(), MachineType::Float64(), 0,
+          CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
+
       assembler->BranchIfFloat64Equal(element_k, search_num.value(),
                                       &return_true, &continue_loop);
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&not_nan_loop);
     }
 
@@ -1672,30 +1659,17 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
 
-      if (kPointerSize == kDoubleSize) {
-        Node* element = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint64());
-        Node* the_hole = assembler->Int64Constant(kHoleNanInt64);
-        assembler->GotoIf(assembler->Word64Equal(element, the_hole),
-                          &continue_loop);
-      } else {
-        Node* element_upper = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint32(),
-            kIeeeDoubleExponentWordOffset);
-        assembler->GotoIf(
-            assembler->Word32Equal(element_upper,
-                                   assembler->Int32Constant(kHoleNanUpper32)),
-            &continue_loop);
-      }
-
+      // Load double value or continue if it contains a double hole.
       Node* element_k = assembler->LoadFixedDoubleArrayElement(
-          elements, index_var.value(), MachineType::Float64());
+          elements, index_var.value(), MachineType::Float64(), 0,
+          CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
+
       assembler->BranchIfFloat64IsNaN(element_k, &return_true, &continue_loop);
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&nan_loop);
     }
 
@@ -1703,26 +1677,15 @@
     assembler->Bind(&hole_loop);
     {
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_false);
 
-      if (kPointerSize == kDoubleSize) {
-        Node* element = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint64());
-        Node* the_hole = assembler->Int64Constant(kHoleNanInt64);
-        assembler->GotoIf(assembler->Word64Equal(element, the_hole),
-                          &return_true);
-      } else {
-        Node* element_upper = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint32(),
-            kIeeeDoubleExponentWordOffset);
-        assembler->GotoIf(
-            assembler->Word32Equal(element_upper,
-                                   assembler->Int32Constant(kHoleNanUpper32)),
-            &return_true);
-      }
+      // Check if the element is a double hole, but don't load it.
+      assembler->LoadFixedDoubleArrayElement(
+          elements, index_var.value(), MachineType::None(), 0,
+          CodeStubAssembler::INTPTR_PARAMETERS, &return_true);
 
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&hole_loop);
     }
   }
@@ -1749,23 +1712,23 @@
   Node* start_from = assembler->Parameter(2);
   Node* context = assembler->Parameter(3 + 2);
 
-  Node* int32_zero = assembler->Int32Constant(0);
-  Node* int32_one = assembler->Int32Constant(1);
+  Node* intptr_zero = assembler->IntPtrConstant(0);
+  Node* intptr_one = assembler->IntPtrConstant(1);
 
   Node* undefined = assembler->UndefinedConstant();
   Node* heap_number_map = assembler->HeapNumberMapConstant();
 
-  Variable len_var(assembler, MachineRepresentation::kWord32),
-      index_var(assembler, MachineRepresentation::kWord32),
-      start_from_var(assembler, MachineRepresentation::kWord32);
+  Variable len_var(assembler, MachineType::PointerRepresentation()),
+      index_var(assembler, MachineType::PointerRepresentation()),
+      start_from_var(assembler, MachineType::PointerRepresentation());
 
   Label init_k(assembler), return_found(assembler), return_not_found(assembler),
       call_runtime(assembler);
 
   Label init_len(assembler);
 
-  index_var.Bind(int32_zero);
-  len_var.Bind(int32_zero);
+  index_var.Bind(intptr_zero);
+  len_var.Bind(intptr_zero);
 
   // Take slow path if not a JSArray, if retrieving elements requires
   // traversing prototype, or if access checks are required.
@@ -1778,7 +1741,7 @@
     assembler->GotoUnless(assembler->WordIsSmi(len), &call_runtime);
 
     len_var.Bind(assembler->SmiToWord(len));
-    assembler->Branch(assembler->Word32Equal(len_var.value(), int32_zero),
+    assembler->Branch(assembler->WordEqual(len_var.value(), intptr_zero),
                       &return_not_found, &init_k);
   }
 
@@ -1786,31 +1749,32 @@
   {
     Label done(assembler), init_k_smi(assembler), init_k_heap_num(assembler),
         init_k_zero(assembler), init_k_n(assembler);
-    Callable call_to_integer = CodeFactory::ToInteger(assembler->isolate());
-    Node* tagged_n = assembler->CallStub(call_to_integer, context, start_from);
+    Node* tagged_n = assembler->ToInteger(context, start_from);
 
     assembler->Branch(assembler->WordIsSmi(tagged_n), &init_k_smi,
                       &init_k_heap_num);
 
     assembler->Bind(&init_k_smi);
     {
-      start_from_var.Bind(assembler->SmiToWord32(tagged_n));
+      start_from_var.Bind(assembler->SmiUntag(tagged_n));
       assembler->Goto(&init_k_n);
     }
 
     assembler->Bind(&init_k_heap_num);
     {
       Label do_return_not_found(assembler);
-      Node* fp_len = assembler->ChangeInt32ToFloat64(len_var.value());
+      // This round is lossless for all valid lengths.
+      Node* fp_len = assembler->RoundIntPtrToFloat64(len_var.value());
       Node* fp_n = assembler->LoadHeapNumberValue(tagged_n);
       assembler->GotoIf(assembler->Float64GreaterThanOrEqual(fp_n, fp_len),
                         &do_return_not_found);
-      start_from_var.Bind(assembler->TruncateFloat64ToWord32(fp_n));
+      start_from_var.Bind(assembler->ChangeInt32ToIntPtr(
+          assembler->TruncateFloat64ToWord32(fp_n)));
       assembler->Goto(&init_k_n);
 
       assembler->Bind(&do_return_not_found);
       {
-        index_var.Bind(int32_zero);
+        index_var.Bind(intptr_zero);
         assembler->Goto(&return_not_found);
       }
     }
@@ -1819,7 +1783,7 @@
     {
       Label if_positive(assembler), if_negative(assembler), done(assembler);
       assembler->Branch(
-          assembler->Int32LessThan(start_from_var.value(), int32_zero),
+          assembler->IntPtrLessThan(start_from_var.value(), intptr_zero),
           &if_negative, &if_positive);
 
       assembler->Bind(&if_positive);
@@ -1831,15 +1795,15 @@
       assembler->Bind(&if_negative);
       {
         index_var.Bind(
-            assembler->Int32Add(len_var.value(), start_from_var.value()));
+            assembler->IntPtrAdd(len_var.value(), start_from_var.value()));
         assembler->Branch(
-            assembler->Int32LessThan(index_var.value(), int32_zero),
+            assembler->IntPtrLessThan(index_var.value(), intptr_zero),
             &init_k_zero, &done);
       }
 
       assembler->Bind(&init_k_zero);
       {
-        index_var.Bind(int32_zero);
+        index_var.Bind(intptr_zero);
         assembler->Goto(&done);
       }
 
@@ -1859,9 +1823,7 @@
                                     &if_packed_doubles, &if_holey_doubles};
 
   Node* map = assembler->LoadMap(array);
-  Node* bit_field2 = assembler->LoadMapBitField2(map);
-  Node* elements_kind =
-      assembler->BitFieldDecode<Map::ElementsKindBits>(bit_field2);
+  Node* elements_kind = assembler->LoadMapElementsKind(map);
   Node* elements = assembler->LoadElements(array);
   assembler->Switch(elements_kind, &return_not_found, kElementsKind,
                     element_kind_handlers, arraysize(kElementsKind));
@@ -1890,41 +1852,39 @@
 
     assembler->Bind(&not_heap_num);
     Node* search_type = assembler->LoadMapInstanceType(map);
+    assembler->GotoIf(assembler->IsStringInstanceType(search_type),
+                      &string_loop);
     assembler->GotoIf(
-        assembler->Int32LessThan(
-            search_type, assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
-        &string_loop);
-    assembler->GotoIf(
-        assembler->WordEqual(search_type,
-                             assembler->Int32Constant(SIMD128_VALUE_TYPE)),
+        assembler->Word32Equal(search_type,
+                               assembler->Int32Constant(SIMD128_VALUE_TYPE)),
         &simd_loop);
     assembler->Goto(&ident_loop);
 
     assembler->Bind(&ident_loop);
     {
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_not_found);
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordEqual(element_k, search_element),
                         &return_found);
 
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&ident_loop);
     }
 
     assembler->Bind(&undef_loop);
     {
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_not_found);
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordEqual(element_k, undefined),
                         &return_found);
 
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&undef_loop);
     }
 
@@ -1938,10 +1898,11 @@
       {
         Label continue_loop(assembler), not_smi(assembler);
         assembler->GotoUnless(
-            assembler->Int32LessThan(index_var.value(), len_var.value()),
+            assembler->UintPtrLessThan(index_var.value(), len_var.value()),
             &return_not_found);
-        Node* element_k =
-            assembler->LoadFixedArrayElement(elements, index_var.value());
+        Node* element_k = assembler->LoadFixedArrayElement(
+            elements, index_var.value(), 0,
+            CodeStubAssembler::INTPTR_PARAMETERS);
         assembler->GotoUnless(assembler->WordIsSmi(element_k), &not_smi);
         assembler->Branch(
             assembler->Float64Equal(search_num.value(),
@@ -1957,7 +1918,7 @@
             &return_found, &continue_loop);
 
         assembler->Bind(&continue_loop);
-        index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+        index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
         assembler->Goto(&not_nan_loop);
       }
     }
@@ -1966,14 +1927,13 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_not_found);
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordIsSmi(element_k), &continue_loop);
-      assembler->GotoUnless(assembler->Int32LessThan(
-                                assembler->LoadInstanceType(element_k),
-                                assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+      assembler->GotoUnless(assembler->IsStringInstanceType(
+                                assembler->LoadInstanceType(element_k)),
                             &continue_loop);
 
       // TODO(bmeurer): Consider inlining the StringEqual logic here.
@@ -1985,7 +1945,7 @@
           &return_found, &continue_loop);
 
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&string_loop);
     }
 
@@ -1998,11 +1958,11 @@
       assembler->Goto(&loop_body);
       assembler->Bind(&loop_body);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_not_found);
 
-      Node* element_k =
-          assembler->LoadFixedArrayElement(elements, index_var.value());
+      Node* element_k = assembler->LoadFixedArrayElement(
+          elements, index_var.value(), 0, CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->GotoIf(assembler->WordIsSmi(element_k), &continue_loop);
 
       Node* map_k = assembler->LoadMap(element_k);
@@ -2010,7 +1970,7 @@
                                       &return_found, &continue_loop);
 
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&loop_body);
     }
   }
@@ -2039,14 +1999,15 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_not_found);
       Node* element_k = assembler->LoadFixedDoubleArrayElement(
-          elements, index_var.value(), MachineType::Float64());
+          elements, index_var.value(), MachineType::Float64(), 0,
+          CodeStubAssembler::INTPTR_PARAMETERS);
       assembler->BranchIfFloat64Equal(element_k, search_num.value(),
                                       &return_found, &continue_loop);
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&not_nan_loop);
     }
   }
@@ -2075,31 +2036,18 @@
     {
       Label continue_loop(assembler);
       assembler->GotoUnless(
-          assembler->Int32LessThan(index_var.value(), len_var.value()),
+          assembler->UintPtrLessThan(index_var.value(), len_var.value()),
           &return_not_found);
 
-      if (kPointerSize == kDoubleSize) {
-        Node* element = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint64());
-        Node* the_hole = assembler->Int64Constant(kHoleNanInt64);
-        assembler->GotoIf(assembler->Word64Equal(element, the_hole),
-                          &continue_loop);
-      } else {
-        Node* element_upper = assembler->LoadFixedDoubleArrayElement(
-            elements, index_var.value(), MachineType::Uint32(),
-            kIeeeDoubleExponentWordOffset);
-        assembler->GotoIf(
-            assembler->Word32Equal(element_upper,
-                                   assembler->Int32Constant(kHoleNanUpper32)),
-            &continue_loop);
-      }
-
+      // Load double value or continue if it contains a double hole.
       Node* element_k = assembler->LoadFixedDoubleArrayElement(
-          elements, index_var.value(), MachineType::Float64());
+          elements, index_var.value(), MachineType::Float64(), 0,
+          CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
+
       assembler->BranchIfFloat64Equal(element_k, search_num.value(),
                                       &return_found, &continue_loop);
       assembler->Bind(&continue_loop);
-      index_var.Bind(assembler->Int32Add(index_var.value(), int32_one));
+      index_var.Bind(assembler->IntPtrAdd(index_var.value(), intptr_one));
       assembler->Goto(&not_nan_loop);
     }
   }
diff --git a/src/builtins/builtins-callsite.cc b/src/builtins/builtins-callsite.cc
index 7fc2f98..ae9c76d 100644
--- a/src/builtins/builtins-callsite.cc
+++ b/src/builtins/builtins-callsite.cc
@@ -14,7 +14,7 @@
 #define CHECK_CALLSITE(recv, method)                                          \
   CHECK_RECEIVER(JSObject, recv, method);                                     \
   if (!JSReceiver::HasOwnProperty(                                            \
-           recv, isolate->factory()->call_site_position_symbol())             \
+           recv, isolate->factory()->call_site_frame_array_symbol())          \
            .FromMaybe(false)) {                                               \
     THROW_NEW_ERROR_RETURN_FAILURE(                                           \
         isolate,                                                              \
@@ -29,172 +29,152 @@
   return isolate->heap()->null_value();
 }
 
+Handle<FrameArray> GetFrameArray(Isolate* isolate, Handle<JSObject> object) {
+  Handle<Object> frame_array_obj = JSObject::GetDataProperty(
+      object, isolate->factory()->call_site_frame_array_symbol());
+  return Handle<FrameArray>::cast(frame_array_obj);
+}
+
+int GetFrameIndex(Isolate* isolate, Handle<JSObject> object) {
+  Handle<Object> frame_index_obj = JSObject::GetDataProperty(
+      object, isolate->factory()->call_site_frame_index_symbol());
+  return Smi::cast(*frame_index_obj)->value();
+}
+
 }  // namespace
 
 BUILTIN(CallSitePrototypeGetColumnNumber) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getColumnNumber");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return PositiveNumberOrNull(call_site.GetColumnNumber(), isolate);
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return PositiveNumberOrNull(it.Frame()->GetColumnNumber(), isolate);
 }
 
 BUILTIN(CallSitePrototypeGetEvalOrigin) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getEvalOrigin");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return *call_site.GetEvalOrigin();
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return *it.Frame()->GetEvalOrigin();
 }
 
 BUILTIN(CallSitePrototypeGetFileName) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getFileName");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return *call_site.GetFileName();
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return *it.Frame()->GetFileName();
 }
 
-namespace {
-
-bool CallSiteIsStrict(Isolate* isolate, Handle<JSObject> receiver) {
-  Handle<Object> strict;
-  Handle<Symbol> symbol = isolate->factory()->call_site_strict_symbol();
-  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, strict,
-                                     JSObject::GetProperty(receiver, symbol));
-  return strict->BooleanValue();
-}
-
-}  // namespace
-
 BUILTIN(CallSitePrototypeGetFunction) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getFunction");
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
 
-  if (CallSiteIsStrict(isolate, recv))
-    return *isolate->factory()->undefined_value();
-
-  Handle<Symbol> symbol = isolate->factory()->call_site_function_symbol();
-  RETURN_RESULT_OR_FAILURE(isolate, JSObject::GetProperty(recv, symbol));
+  StackFrameBase* frame = it.Frame();
+  if (frame->IsStrict()) return isolate->heap()->undefined_value();
+  return *frame->GetFunction();
 }
 
 BUILTIN(CallSitePrototypeGetFunctionName) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getFunctionName");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return *call_site.GetFunctionName();
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return *it.Frame()->GetFunctionName();
 }
 
 BUILTIN(CallSitePrototypeGetLineNumber) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getLineNumber");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-
-  int line_number = call_site.IsWasm() ? call_site.wasm_func_index()
-                                       : call_site.GetLineNumber();
-  return PositiveNumberOrNull(line_number, isolate);
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return PositiveNumberOrNull(it.Frame()->GetLineNumber(), isolate);
 }
 
 BUILTIN(CallSitePrototypeGetMethodName) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getMethodName");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return *call_site.GetMethodName();
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return *it.Frame()->GetMethodName();
 }
 
 BUILTIN(CallSitePrototypeGetPosition) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getPosition");
-
-  Handle<Symbol> symbol = isolate->factory()->call_site_position_symbol();
-  RETURN_RESULT_OR_FAILURE(isolate, JSObject::GetProperty(recv, symbol));
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return Smi::FromInt(it.Frame()->GetPosition());
 }
 
 BUILTIN(CallSitePrototypeGetScriptNameOrSourceURL) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getScriptNameOrSourceUrl");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return *call_site.GetScriptNameOrSourceUrl();
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return *it.Frame()->GetScriptNameOrSourceUrl();
 }
 
 BUILTIN(CallSitePrototypeGetThis) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getThis");
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
 
-  if (CallSiteIsStrict(isolate, recv))
-    return *isolate->factory()->undefined_value();
-
-  Handle<Object> receiver;
-  Handle<Symbol> symbol = isolate->factory()->call_site_receiver_symbol();
-  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, receiver,
-                                     JSObject::GetProperty(recv, symbol));
-
-  if (*receiver == isolate->heap()->call_site_constructor_symbol())
-    return *isolate->factory()->undefined_value();
-
-  return *receiver;
+  StackFrameBase* frame = it.Frame();
+  if (frame->IsStrict()) return isolate->heap()->undefined_value();
+  return *frame->GetReceiver();
 }
 
 BUILTIN(CallSitePrototypeGetTypeName) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "getTypeName");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return *call_site.GetTypeName();
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return *it.Frame()->GetTypeName();
 }
 
 BUILTIN(CallSitePrototypeIsConstructor) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "isConstructor");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return isolate->heap()->ToBoolean(call_site.IsConstructor());
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return isolate->heap()->ToBoolean(it.Frame()->IsConstructor());
 }
 
 BUILTIN(CallSitePrototypeIsEval) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "isEval");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return isolate->heap()->ToBoolean(call_site.IsEval());
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return isolate->heap()->ToBoolean(it.Frame()->IsEval());
 }
 
 BUILTIN(CallSitePrototypeIsNative) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "isNative");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return isolate->heap()->ToBoolean(call_site.IsNative());
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return isolate->heap()->ToBoolean(it.Frame()->IsNative());
 }
 
 BUILTIN(CallSitePrototypeIsToplevel) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "isToplevel");
-
-  CallSite call_site(isolate, recv);
-  CHECK(call_site.IsJavaScript() || call_site.IsWasm());
-  return isolate->heap()->ToBoolean(call_site.IsToplevel());
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  return isolate->heap()->ToBoolean(it.Frame()->IsToplevel());
 }
 
 BUILTIN(CallSitePrototypeToString) {
   HandleScope scope(isolate);
   CHECK_CALLSITE(recv, "toString");
-  RETURN_RESULT_OR_FAILURE(isolate, CallSiteUtils::ToString(isolate, recv));
+  FrameArrayIterator it(isolate, GetFrameArray(isolate, recv),
+                        GetFrameIndex(isolate, recv));
+  RETURN_RESULT_OR_FAILURE(isolate, it.Frame()->ToString());
 }
 
 #undef CHECK_CALLSITE
diff --git a/src/builtins/builtins-conversion.cc b/src/builtins/builtins-conversion.cc
index 0d04a02..7fbe4f8 100644
--- a/src/builtins/builtins-conversion.cc
+++ b/src/builtins/builtins-conversion.cc
@@ -110,133 +110,99 @@
 }
 
 void Builtins::Generate_StringToNumber(CodeStubAssembler* assembler) {
-  typedef CodeStubAssembler::Label Label;
   typedef compiler::Node Node;
   typedef TypeConversionDescriptor Descriptor;
 
   Node* input = assembler->Parameter(Descriptor::kArgument);
   Node* context = assembler->Parameter(Descriptor::kContext);
 
-  Label runtime(assembler);
+  assembler->Return(assembler->StringToNumber(context, input));
+}
 
-  // Check if string has a cached array index.
-  Node* hash = assembler->LoadNameHashField(input);
-  Node* bit = assembler->Word32And(
-      hash, assembler->Int32Constant(String::kContainsCachedArrayIndexMask));
-  assembler->GotoIf(assembler->Word32NotEqual(bit, assembler->Int32Constant(0)),
-                    &runtime);
+void Builtins::Generate_ToName(CodeStubAssembler* assembler) {
+  typedef compiler::Node Node;
+  typedef TypeConversionDescriptor Descriptor;
 
-  assembler->Return(assembler->SmiTag(
-      assembler->BitFieldDecode<String::ArrayIndexValueBits>(hash)));
+  Node* input = assembler->Parameter(Descriptor::kArgument);
+  Node* context = assembler->Parameter(Descriptor::kContext);
 
-  assembler->Bind(&runtime);
-  {
-    // Note: We cannot tail call to the runtime here, as js-to-wasm
-    // trampolines also use this code currently, and they declare all
-    // outgoing parameters as untagged, while we would push a tagged
-    // object here.
-    Node* result =
-        assembler->CallRuntime(Runtime::kStringToNumber, context, input);
-    assembler->Return(result);
-  }
+  assembler->Return(assembler->ToName(context, input));
+}
+
+// static
+void Builtins::Generate_NonNumberToNumber(CodeStubAssembler* assembler) {
+  typedef compiler::Node Node;
+  typedef TypeConversionDescriptor Descriptor;
+
+  Node* input = assembler->Parameter(Descriptor::kArgument);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  assembler->Return(assembler->NonNumberToNumber(context, input));
 }
 
 // ES6 section 7.1.3 ToNumber ( argument )
-void Builtins::Generate_NonNumberToNumber(CodeStubAssembler* assembler) {
-  typedef CodeStubAssembler::Label Label;
+void Builtins::Generate_ToNumber(CodeStubAssembler* assembler) {
   typedef compiler::Node Node;
-  typedef CodeStubAssembler::Variable Variable;
   typedef TypeConversionDescriptor Descriptor;
 
   Node* input = assembler->Parameter(Descriptor::kArgument);
   Node* context = assembler->Parameter(Descriptor::kContext);
 
-  // We might need to loop once here due to ToPrimitive conversions.
-  Variable var_input(assembler, MachineRepresentation::kTagged);
-  Label loop(assembler, &var_input);
-  var_input.Bind(input);
-  assembler->Goto(&loop);
-  assembler->Bind(&loop);
+  assembler->Return(assembler->ToNumber(context, input));
+}
+
+void Builtins::Generate_ToString(CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef TypeConversionDescriptor Descriptor;
+
+  Node* input = assembler->Parameter(Descriptor::kArgument);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Label is_number(assembler);
+  Label runtime(assembler);
+
+  assembler->GotoIf(assembler->WordIsSmi(input), &is_number);
+
+  Node* input_map = assembler->LoadMap(input);
+  Node* input_instance_type = assembler->LoadMapInstanceType(input_map);
+
+  Label not_string(assembler);
+  assembler->GotoUnless(assembler->IsStringInstanceType(input_instance_type),
+                        &not_string);
+  assembler->Return(input);
+
+  Label not_heap_number(assembler);
+
+  assembler->Bind(&not_string);
   {
-    // Load the current {input} value (known to be a HeapObject).
-    Node* input = var_input.value();
+    assembler->GotoUnless(
+        assembler->WordEqual(input_map, assembler->HeapNumberMapConstant()),
+        &not_heap_number);
+    assembler->Goto(&is_number);
+  }
 
-    // Dispatch on the {input} instance type.
-    Node* input_instance_type = assembler->LoadInstanceType(input);
-    Label if_inputisstring(assembler), if_inputisoddball(assembler),
-        if_inputisreceiver(assembler, Label::kDeferred),
-        if_inputisother(assembler, Label::kDeferred);
-    assembler->GotoIf(assembler->Int32LessThan(
-                          input_instance_type,
-                          assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
-                      &if_inputisstring);
+  assembler->Bind(&is_number);
+  {
+    // TODO(tebbi): inline as soon as NumberToString is in the CodeStubAssembler
+    Callable callable = CodeFactory::NumberToString(assembler->isolate());
+    assembler->Return(assembler->CallStub(callable, context, input));
+  }
+
+  assembler->Bind(&not_heap_number);
+  {
     assembler->GotoIf(
-        assembler->Word32Equal(input_instance_type,
-                               assembler->Int32Constant(ODDBALL_TYPE)),
-        &if_inputisoddball);
-    STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
-    assembler->Branch(assembler->Int32GreaterThanOrEqual(
-                          input_instance_type,
-                          assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE)),
-                      &if_inputisreceiver, &if_inputisother);
+        assembler->Word32NotEqual(input_instance_type,
+                                  assembler->Int32Constant(ODDBALL_TYPE)),
+        &runtime);
+    assembler->Return(
+        assembler->LoadObjectField(input, Oddball::kToStringOffset));
+  }
 
-    assembler->Bind(&if_inputisstring);
-    {
-      // The {input} is a String, use the fast stub to convert it to a Number.
-      // TODO(bmeurer): Consider inlining the StringToNumber logic here.
-      Callable callable = CodeFactory::StringToNumber(assembler->isolate());
-      assembler->TailCallStub(callable, context, input);
-    }
-
-    assembler->Bind(&if_inputisoddball);
-    {
-      // The {input} is an Oddball, we just need to the Number value of it.
-      Node* result =
-          assembler->LoadObjectField(input, Oddball::kToNumberOffset);
-      assembler->Return(result);
-    }
-
-    assembler->Bind(&if_inputisreceiver);
-    {
-      // The {input} is a JSReceiver, we need to convert it to a Primitive first
-      // using the ToPrimitive type conversion, preferably yielding a Number.
-      Callable callable = CodeFactory::NonPrimitiveToPrimitive(
-          assembler->isolate(), ToPrimitiveHint::kNumber);
-      Node* result = assembler->CallStub(callable, context, input);
-
-      // Check if the {result} is already a Number.
-      Label if_resultisnumber(assembler), if_resultisnotnumber(assembler);
-      assembler->GotoIf(assembler->WordIsSmi(result), &if_resultisnumber);
-      Node* result_map = assembler->LoadMap(result);
-      assembler->Branch(
-          assembler->WordEqual(result_map, assembler->HeapNumberMapConstant()),
-          &if_resultisnumber, &if_resultisnotnumber);
-
-      assembler->Bind(&if_resultisnumber);
-      {
-        // The ToPrimitive conversion already gave us a Number, so we're done.
-        assembler->Return(result);
-      }
-
-      assembler->Bind(&if_resultisnotnumber);
-      {
-        // We now have a Primitive {result}, but it's not yet a Number.
-        var_input.Bind(result);
-        assembler->Goto(&loop);
-      }
-    }
-
-    assembler->Bind(&if_inputisother);
-    {
-      // The {input} is something else (i.e. Symbol or Simd128Value), let the
-      // runtime figure out the correct exception.
-      // Note: We cannot tail call to the runtime here, as js-to-wasm
-      // trampolines also use this code currently, and they declare all
-      // outgoing parameters as untagged, while we would push a tagged
-      // object here.
-      Node* result = assembler->CallRuntime(Runtime::kToNumber, context, input);
-      assembler->Return(result);
-    }
+  assembler->Bind(&runtime);
+  {
+    assembler->Return(
+        assembler->CallRuntime(Runtime::kToString, context, input));
   }
 }
 
diff --git a/src/builtins/builtins-dataview.cc b/src/builtins/builtins-dataview.cc
index 32c5a83..3d14e31 100644
--- a/src/builtins/builtins-dataview.cc
+++ b/src/builtins/builtins-dataview.cc
@@ -129,5 +129,209 @@
   return data_view->byte_offset();
 }
 
+namespace {
+
+bool NeedToFlipBytes(bool is_little_endian) {
+#ifdef V8_TARGET_LITTLE_ENDIAN
+  return !is_little_endian;
+#else
+  return is_little_endian;
+#endif
+}
+
+template <size_t n>
+void CopyBytes(uint8_t* target, uint8_t const* source) {
+  for (size_t i = 0; i < n; i++) {
+    *(target++) = *(source++);
+  }
+}
+
+template <size_t n>
+void FlipBytes(uint8_t* target, uint8_t const* source) {
+  source = source + (n - 1);
+  for (size_t i = 0; i < n; i++) {
+    *(target++) = *(source--);
+  }
+}
+
+// ES6 section 24.2.1.1 GetViewValue (view, requestIndex, isLittleEndian, type)
+template <typename T>
+MaybeHandle<Object> GetViewValue(Isolate* isolate, Handle<JSDataView> data_view,
+                                 Handle<Object> request_index,
+                                 bool is_little_endian) {
+  ASSIGN_RETURN_ON_EXCEPTION(
+      isolate, request_index,
+      Object::ToIndex(isolate, request_index,
+                      MessageTemplate::kInvalidDataViewAccessorOffset),
+      Object);
+  size_t get_index = 0;
+  if (!TryNumberToSize(*request_index, &get_index)) {
+    THROW_NEW_ERROR(
+        isolate, NewRangeError(MessageTemplate::kInvalidDataViewAccessorOffset),
+        Object);
+  }
+  Handle<JSArrayBuffer> buffer(JSArrayBuffer::cast(data_view->buffer()),
+                               isolate);
+  size_t const data_view_byte_offset = NumberToSize(data_view->byte_offset());
+  size_t const data_view_byte_length = NumberToSize(data_view->byte_length());
+  if (get_index + sizeof(T) > data_view_byte_length ||
+      get_index + sizeof(T) < get_index) {  // overflow
+    THROW_NEW_ERROR(
+        isolate, NewRangeError(MessageTemplate::kInvalidDataViewAccessorOffset),
+        Object);
+  }
+  union {
+    T data;
+    uint8_t bytes[sizeof(T)];
+  } v;
+  size_t const buffer_offset = data_view_byte_offset + get_index;
+  DCHECK_GE(NumberToSize(buffer->byte_length()), buffer_offset + sizeof(T));
+  uint8_t const* const source =
+      static_cast<uint8_t*>(buffer->backing_store()) + buffer_offset;
+  if (NeedToFlipBytes(is_little_endian)) {
+    FlipBytes<sizeof(T)>(v.bytes, source);
+  } else {
+    CopyBytes<sizeof(T)>(v.bytes, source);
+  }
+  return isolate->factory()->NewNumber(v.data);
+}
+
+template <typename T>
+T DataViewConvertValue(double value);
+
+template <>
+int8_t DataViewConvertValue<int8_t>(double value) {
+  return static_cast<int8_t>(DoubleToInt32(value));
+}
+
+template <>
+int16_t DataViewConvertValue<int16_t>(double value) {
+  return static_cast<int16_t>(DoubleToInt32(value));
+}
+
+template <>
+int32_t DataViewConvertValue<int32_t>(double value) {
+  return DoubleToInt32(value);
+}
+
+template <>
+uint8_t DataViewConvertValue<uint8_t>(double value) {
+  return static_cast<uint8_t>(DoubleToUint32(value));
+}
+
+template <>
+uint16_t DataViewConvertValue<uint16_t>(double value) {
+  return static_cast<uint16_t>(DoubleToUint32(value));
+}
+
+template <>
+uint32_t DataViewConvertValue<uint32_t>(double value) {
+  return DoubleToUint32(value);
+}
+
+template <>
+float DataViewConvertValue<float>(double value) {
+  return static_cast<float>(value);
+}
+
+template <>
+double DataViewConvertValue<double>(double value) {
+  return value;
+}
+
+// ES6 section 24.2.1.2 SetViewValue (view, requestIndex, isLittleEndian, type,
+//                                    value)
+template <typename T>
+MaybeHandle<Object> SetViewValue(Isolate* isolate, Handle<JSDataView> data_view,
+                                 Handle<Object> request_index,
+                                 bool is_little_endian, Handle<Object> value) {
+  ASSIGN_RETURN_ON_EXCEPTION(
+      isolate, request_index,
+      Object::ToIndex(isolate, request_index,
+                      MessageTemplate::kInvalidDataViewAccessorOffset),
+      Object);
+  ASSIGN_RETURN_ON_EXCEPTION(isolate, value, Object::ToNumber(value), Object);
+  size_t get_index = 0;
+  if (!TryNumberToSize(*request_index, &get_index)) {
+    THROW_NEW_ERROR(
+        isolate, NewRangeError(MessageTemplate::kInvalidDataViewAccessorOffset),
+        Object);
+  }
+  Handle<JSArrayBuffer> buffer(JSArrayBuffer::cast(data_view->buffer()),
+                               isolate);
+  size_t const data_view_byte_offset = NumberToSize(data_view->byte_offset());
+  size_t const data_view_byte_length = NumberToSize(data_view->byte_length());
+  if (get_index + sizeof(T) > data_view_byte_length ||
+      get_index + sizeof(T) < get_index) {  // overflow
+    THROW_NEW_ERROR(
+        isolate, NewRangeError(MessageTemplate::kInvalidDataViewAccessorOffset),
+        Object);
+  }
+  union {
+    T data;
+    uint8_t bytes[sizeof(T)];
+  } v;
+  v.data = DataViewConvertValue<T>(value->Number());
+  size_t const buffer_offset = data_view_byte_offset + get_index;
+  DCHECK(NumberToSize(buffer->byte_length()) >= buffer_offset + sizeof(T));
+  uint8_t* const target =
+      static_cast<uint8_t*>(buffer->backing_store()) + buffer_offset;
+  if (NeedToFlipBytes(is_little_endian)) {
+    FlipBytes<sizeof(T)>(target, v.bytes);
+  } else {
+    CopyBytes<sizeof(T)>(target, v.bytes);
+  }
+  return isolate->factory()->undefined_value();
+}
+
+}  // namespace
+
+#define DATA_VIEW_PROTOTYPE_GET(Type, type)                                \
+  BUILTIN(DataViewPrototypeGet##Type) {                                    \
+    HandleScope scope(isolate);                                            \
+    CHECK_RECEIVER(JSDataView, data_view, "DataView.prototype.get" #Type); \
+    Handle<Object> byte_offset = args.atOrUndefined(isolate, 1);           \
+    Handle<Object> is_little_endian = args.atOrUndefined(isolate, 2);      \
+    Handle<Object> result;                                                 \
+    ASSIGN_RETURN_FAILURE_ON_EXCEPTION(                                    \
+        isolate, result,                                                   \
+        GetViewValue<type>(isolate, data_view, byte_offset,                \
+                           is_little_endian->BooleanValue()));             \
+    return *result;                                                        \
+  }
+DATA_VIEW_PROTOTYPE_GET(Int8, int8_t)
+DATA_VIEW_PROTOTYPE_GET(Uint8, uint8_t)
+DATA_VIEW_PROTOTYPE_GET(Int16, int16_t)
+DATA_VIEW_PROTOTYPE_GET(Uint16, uint16_t)
+DATA_VIEW_PROTOTYPE_GET(Int32, int32_t)
+DATA_VIEW_PROTOTYPE_GET(Uint32, uint32_t)
+DATA_VIEW_PROTOTYPE_GET(Float32, float)
+DATA_VIEW_PROTOTYPE_GET(Float64, double)
+#undef DATA_VIEW_PROTOTYPE_GET
+
+#define DATA_VIEW_PROTOTYPE_SET(Type, type)                                \
+  BUILTIN(DataViewPrototypeSet##Type) {                                    \
+    HandleScope scope(isolate);                                            \
+    CHECK_RECEIVER(JSDataView, data_view, "DataView.prototype.set" #Type); \
+    Handle<Object> byte_offset = args.atOrUndefined(isolate, 1);           \
+    Handle<Object> value = args.atOrUndefined(isolate, 2);                 \
+    Handle<Object> is_little_endian = args.atOrUndefined(isolate, 3);      \
+    Handle<Object> result;                                                 \
+    ASSIGN_RETURN_FAILURE_ON_EXCEPTION(                                    \
+        isolate, result,                                                   \
+        SetViewValue<type>(isolate, data_view, byte_offset,                \
+                           is_little_endian->BooleanValue(), value));      \
+    return *result;                                                        \
+  }
+DATA_VIEW_PROTOTYPE_SET(Int8, int8_t)
+DATA_VIEW_PROTOTYPE_SET(Uint8, uint8_t)
+DATA_VIEW_PROTOTYPE_SET(Int16, int16_t)
+DATA_VIEW_PROTOTYPE_SET(Uint16, uint16_t)
+DATA_VIEW_PROTOTYPE_SET(Int32, int32_t)
+DATA_VIEW_PROTOTYPE_SET(Uint32, uint32_t)
+DATA_VIEW_PROTOTYPE_SET(Float32, float)
+DATA_VIEW_PROTOTYPE_SET(Float64, double)
+#undef DATA_VIEW_PROTOTYPE_SET
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/builtins/builtins-date.cc b/src/builtins/builtins-date.cc
index d5c3476..205c8c9 100644
--- a/src/builtins/builtins-date.cc
+++ b/src/builtins/builtins-date.cc
@@ -909,93 +909,156 @@
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetDate(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kDay);
+void Builtins::Generate_DatePrototype_GetField(CodeStubAssembler* assembler,
+                                               int field_index) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(0);
+  Node* context = assembler->Parameter(3);
+
+  Label receiver_not_date(assembler, Label::kDeferred);
+
+  assembler->GotoIf(assembler->WordIsSmi(receiver), &receiver_not_date);
+  Node* receiver_instance_type = assembler->LoadInstanceType(receiver);
+  assembler->GotoIf(
+      assembler->Word32NotEqual(receiver_instance_type,
+                                assembler->Int32Constant(JS_DATE_TYPE)),
+      &receiver_not_date);
+
+  // Load the specified date field, falling back to the runtime as necessary.
+  if (field_index == JSDate::kDateValue) {
+    assembler->Return(
+        assembler->LoadObjectField(receiver, JSDate::kValueOffset));
+  } else {
+    if (field_index < JSDate::kFirstUncachedField) {
+      Label stamp_mismatch(assembler, Label::kDeferred);
+      Node* date_cache_stamp = assembler->Load(
+          MachineType::AnyTagged(),
+          assembler->ExternalConstant(
+              ExternalReference::date_cache_stamp(assembler->isolate())));
+
+      Node* cache_stamp =
+          assembler->LoadObjectField(receiver, JSDate::kCacheStampOffset);
+      assembler->GotoIf(assembler->WordNotEqual(date_cache_stamp, cache_stamp),
+                        &stamp_mismatch);
+      assembler->Return(assembler->LoadObjectField(
+          receiver, JSDate::kValueOffset + field_index * kPointerSize));
+
+      assembler->Bind(&stamp_mismatch);
+    }
+
+    Node* field_index_smi = assembler->SmiConstant(Smi::FromInt(field_index));
+    Node* function = assembler->ExternalConstant(
+        ExternalReference::get_date_field_function(assembler->isolate()));
+    Node* result = assembler->CallCFunction2(
+        MachineType::AnyTagged(), MachineType::Pointer(),
+        MachineType::AnyTagged(), function, receiver, field_index_smi);
+    assembler->Return(result);
+  }
+
+  // Raise a TypeError if the receiver is not a date.
+  assembler->Bind(&receiver_not_date);
+  {
+    Node* result = assembler->CallRuntime(Runtime::kThrowNotDateError, context);
+    assembler->Return(result);
+  }
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetDay(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kWeekday);
+void Builtins::Generate_DatePrototypeGetDate(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kDay);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetFullYear(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kYear);
+void Builtins::Generate_DatePrototypeGetDay(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kWeekday);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetHours(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kHour);
+void Builtins::Generate_DatePrototypeGetFullYear(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kYear);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetMilliseconds(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kMillisecond);
+void Builtins::Generate_DatePrototypeGetHours(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kHour);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetMinutes(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kMinute);
+void Builtins::Generate_DatePrototypeGetMilliseconds(
+    CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kMillisecond);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetMonth(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kMonth);
+void Builtins::Generate_DatePrototypeGetMinutes(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kMinute);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetSeconds(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kSecond);
+void Builtins::Generate_DatePrototypeGetMonth(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kMonth);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetTime(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kDateValue);
+void Builtins::Generate_DatePrototypeGetSeconds(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kSecond);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetTimezoneOffset(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kTimezoneOffset);
+void Builtins::Generate_DatePrototypeGetTime(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kDateValue);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCDate(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kDayUTC);
+void Builtins::Generate_DatePrototypeGetTimezoneOffset(
+    CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kTimezoneOffset);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCDay(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kWeekdayUTC);
+void Builtins::Generate_DatePrototypeGetUTCDate(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kDayUTC);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCFullYear(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kYearUTC);
+void Builtins::Generate_DatePrototypeGetUTCDay(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kWeekdayUTC);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCHours(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kHourUTC);
+void Builtins::Generate_DatePrototypeGetUTCFullYear(
+    CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kYearUTC);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCMilliseconds(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kMillisecondUTC);
+void Builtins::Generate_DatePrototypeGetUTCHours(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kHourUTC);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCMinutes(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kMinuteUTC);
+void Builtins::Generate_DatePrototypeGetUTCMilliseconds(
+    CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kMillisecondUTC);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCMonth(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kMonthUTC);
+void Builtins::Generate_DatePrototypeGetUTCMinutes(
+    CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kMinuteUTC);
 }
 
 // static
-void Builtins::Generate_DatePrototypeGetUTCSeconds(MacroAssembler* masm) {
-  Generate_DatePrototype_GetField(masm, JSDate::kSecondUTC);
+void Builtins::Generate_DatePrototypeGetUTCMonth(CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kMonthUTC);
+}
+
+// static
+void Builtins::Generate_DatePrototypeGetUTCSeconds(
+    CodeStubAssembler* assembler) {
+  Generate_DatePrototype_GetField(assembler, JSDate::kSecondUTC);
 }
 
 }  // namespace internal
diff --git a/src/builtins/builtins-global.cc b/src/builtins/builtins-global.cc
index d99a553..2205788 100644
--- a/src/builtins/builtins-global.cc
+++ b/src/builtins/builtins-global.cc
@@ -5,6 +5,7 @@
 #include "src/builtins/builtins.h"
 #include "src/builtins/builtins-utils.h"
 
+#include "src/code-factory.h"
 #include "src/compiler.h"
 #include "src/uri.h"
 
@@ -99,5 +100,113 @@
       Execution::Call(isolate, function, target_global_proxy, 0, nullptr));
 }
 
+// ES6 section 18.2.2 isFinite ( number )
+void Builtins::Generate_GlobalIsFinite(CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Node* context = assembler->Parameter(4);
+
+  Label return_true(assembler), return_false(assembler);
+
+  // We might need to loop once for ToNumber conversion.
+  Variable var_num(assembler, MachineRepresentation::kTagged);
+  Label loop(assembler, &var_num);
+  var_num.Bind(assembler->Parameter(1));
+  assembler->Goto(&loop);
+  assembler->Bind(&loop);
+  {
+    // Load the current {num} value.
+    Node* num = var_num.value();
+
+    // Check if {num} is a Smi or a HeapObject.
+    assembler->GotoIf(assembler->WordIsSmi(num), &return_true);
+
+    // Check if {num} is a HeapNumber.
+    Label if_numisheapnumber(assembler),
+        if_numisnotheapnumber(assembler, Label::kDeferred);
+    assembler->Branch(assembler->WordEqual(assembler->LoadMap(num),
+                                           assembler->HeapNumberMapConstant()),
+                      &if_numisheapnumber, &if_numisnotheapnumber);
+
+    assembler->Bind(&if_numisheapnumber);
+    {
+      // Check if {num} contains a finite, non-NaN value.
+      Node* num_value = assembler->LoadHeapNumberValue(num);
+      assembler->BranchIfFloat64IsNaN(
+          assembler->Float64Sub(num_value, num_value), &return_false,
+          &return_true);
+    }
+
+    assembler->Bind(&if_numisnotheapnumber);
+    {
+      // Need to convert {num} to a Number first.
+      Callable callable = CodeFactory::NonNumberToNumber(assembler->isolate());
+      var_num.Bind(assembler->CallStub(callable, context, num));
+      assembler->Goto(&loop);
+    }
+  }
+
+  assembler->Bind(&return_true);
+  assembler->Return(assembler->BooleanConstant(true));
+
+  assembler->Bind(&return_false);
+  assembler->Return(assembler->BooleanConstant(false));
+}
+
+// ES6 section 18.2.3 isNaN ( number )
+void Builtins::Generate_GlobalIsNaN(CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Node* context = assembler->Parameter(4);
+
+  Label return_true(assembler), return_false(assembler);
+
+  // We might need to loop once for ToNumber conversion.
+  Variable var_num(assembler, MachineRepresentation::kTagged);
+  Label loop(assembler, &var_num);
+  var_num.Bind(assembler->Parameter(1));
+  assembler->Goto(&loop);
+  assembler->Bind(&loop);
+  {
+    // Load the current {num} value.
+    Node* num = var_num.value();
+
+    // Check if {num} is a Smi or a HeapObject.
+    assembler->GotoIf(assembler->WordIsSmi(num), &return_false);
+
+    // Check if {num} is a HeapNumber.
+    Label if_numisheapnumber(assembler),
+        if_numisnotheapnumber(assembler, Label::kDeferred);
+    assembler->Branch(assembler->WordEqual(assembler->LoadMap(num),
+                                           assembler->HeapNumberMapConstant()),
+                      &if_numisheapnumber, &if_numisnotheapnumber);
+
+    assembler->Bind(&if_numisheapnumber);
+    {
+      // Check if {num} contains a NaN.
+      Node* num_value = assembler->LoadHeapNumberValue(num);
+      assembler->BranchIfFloat64IsNaN(num_value, &return_true, &return_false);
+    }
+
+    assembler->Bind(&if_numisnotheapnumber);
+    {
+      // Need to convert {num} to a Number first.
+      Callable callable = CodeFactory::NonNumberToNumber(assembler->isolate());
+      var_num.Bind(assembler->CallStub(callable, context, num));
+      assembler->Goto(&loop);
+    }
+  }
+
+  assembler->Bind(&return_true);
+  assembler->Return(assembler->BooleanConstant(true));
+
+  assembler->Bind(&return_false);
+  assembler->Return(assembler->BooleanConstant(false));
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/builtins/builtins-handler.cc b/src/builtins/builtins-handler.cc
index 8b3df79..ebbc978 100644
--- a/src/builtins/builtins-handler.cc
+++ b/src/builtins/builtins-handler.cc
@@ -14,6 +14,21 @@
   KeyedLoadIC::GenerateMegamorphic(masm);
 }
 
+void Builtins::Generate_KeyedLoadIC_Megamorphic_TF(
+    CodeStubAssembler* assembler) {
+  typedef compiler::Node Node;
+  typedef LoadWithVectorDescriptor Descriptor;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* name = assembler->Parameter(Descriptor::kName);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  CodeStubAssembler::LoadICParameters p(context, receiver, name, slot, vector);
+  assembler->KeyedLoadICGeneric(&p);
+}
+
 void Builtins::Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
   KeyedLoadIC::GenerateMiss(masm);
 }
@@ -34,7 +49,7 @@
 }
 
 void Builtins::Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
-  ElementHandlerCompiler::GenerateStoreSlow(masm);
+  KeyedStoreIC::GenerateSlow(masm);
 }
 
 void Builtins::Generate_LoadGlobalIC_Miss(CodeStubAssembler* assembler) {
@@ -105,8 +120,8 @@
   Node* vector = assembler->Parameter(Descriptor::kVector);
   Node* context = assembler->Parameter(Descriptor::kContext);
 
-  assembler->TailCallRuntime(Runtime::kStoreIC_Miss, context, receiver, name,
-                             value, slot, vector);
+  assembler->TailCallRuntime(Runtime::kStoreIC_Miss, context, value, slot,
+                             vector, receiver, name);
 }
 
 void Builtins::Generate_StoreIC_Normal(MacroAssembler* masm) {
diff --git a/src/builtins/builtins-internal.cc b/src/builtins/builtins-internal.cc
index 87c5dd5..bec6ff3 100644
--- a/src/builtins/builtins-internal.cc
+++ b/src/builtins/builtins-internal.cc
@@ -64,12 +64,9 @@
   // Load the {object}s elements.
   Node* source = assembler->LoadObjectField(object, JSObject::kElementsOffset);
 
-  CodeStubAssembler::ParameterMode mode =
-      assembler->Is64() ? CodeStubAssembler::INTEGER_PARAMETERS
-                        : CodeStubAssembler::SMI_PARAMETERS;
-  Node* length = (mode == CodeStubAssembler::INTEGER_PARAMETERS)
-                     ? assembler->LoadAndUntagFixedArrayBaseLength(source)
-                     : assembler->LoadFixedArrayBaseLength(source);
+  CodeStubAssembler::ParameterMode mode = assembler->OptimalParameterMode();
+  Node* length = assembler->UntagParameter(
+      assembler->LoadFixedArrayBaseLength(source), mode);
 
   // Check if we can allocate in new space.
   ElementsKind kind = FAST_ELEMENTS;
@@ -111,9 +108,8 @@
 
   Label runtime(assembler, CodeStubAssembler::Label::kDeferred);
   Node* elements = assembler->LoadElements(object);
-  elements = assembler->CheckAndGrowElementsCapacity(
-      context, elements, FAST_DOUBLE_ELEMENTS, key, &runtime);
-  assembler->StoreObjectField(object, JSObject::kElementsOffset, elements);
+  elements = assembler->TryGrowElementsCapacity(
+      object, elements, FAST_DOUBLE_ELEMENTS, key, &runtime);
   assembler->Return(elements);
 
   assembler->Bind(&runtime);
@@ -132,9 +128,8 @@
 
   Label runtime(assembler, CodeStubAssembler::Label::kDeferred);
   Node* elements = assembler->LoadElements(object);
-  elements = assembler->CheckAndGrowElementsCapacity(
-      context, elements, FAST_ELEMENTS, key, &runtime);
-  assembler->StoreObjectField(object, JSObject::kElementsOffset, elements);
+  elements = assembler->TryGrowElementsCapacity(object, elements, FAST_ELEMENTS,
+                                                key, &runtime);
   assembler->Return(elements);
 
   assembler->Bind(&runtime);
diff --git a/src/builtins/builtins-interpreter.cc b/src/builtins/builtins-interpreter.cc
index 900172f..1609184 100644
--- a/src/builtins/builtins-interpreter.cc
+++ b/src/builtins/builtins-interpreter.cc
@@ -50,5 +50,27 @@
                                                  CallableType::kJSFunction);
 }
 
+Handle<Code> Builtins::InterpreterPushArgsAndConstruct(
+    CallableType function_type) {
+  switch (function_type) {
+    case CallableType::kJSFunction:
+      return InterpreterPushArgsAndConstructFunction();
+    case CallableType::kAny:
+      return InterpreterPushArgsAndConstruct();
+  }
+  UNREACHABLE();
+  return Handle<Code>::null();
+}
+
+void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+  return Generate_InterpreterPushArgsAndConstructImpl(masm, CallableType::kAny);
+}
+
+void Builtins::Generate_InterpreterPushArgsAndConstructFunction(
+    MacroAssembler* masm) {
+  return Generate_InterpreterPushArgsAndConstructImpl(
+      masm, CallableType::kJSFunction);
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/builtins/builtins-iterator.cc b/src/builtins/builtins-iterator.cc
new file mode 100644
index 0000000..7b91e36
--- /dev/null
+++ b/src/builtins/builtins-iterator.cc
@@ -0,0 +1,17 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/builtins/builtins.h"
+#include "src/builtins/builtins-utils.h"
+
+namespace v8 {
+namespace internal {
+
+void Builtins::Generate_IteratorPrototypeIterator(
+    CodeStubAssembler* assembler) {
+  assembler->Return(assembler->Parameter(0));
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/builtins/builtins-number.cc b/src/builtins/builtins-number.cc
index c2af0fd..1762844 100644
--- a/src/builtins/builtins-number.cc
+++ b/src/builtins/builtins-number.cc
@@ -11,6 +11,144 @@
 // -----------------------------------------------------------------------------
 // ES6 section 20.1 Number Objects
 
+// ES6 section 20.1.2.2 Number.isFinite ( number )
+void Builtins::Generate_NumberIsFinite(CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* number = assembler->Parameter(1);
+
+  Label return_true(assembler), return_false(assembler);
+
+  // Check if {number} is a Smi.
+  assembler->GotoIf(assembler->WordIsSmi(number), &return_true);
+
+  // Check if {number} is a HeapNumber.
+  assembler->GotoUnless(
+      assembler->WordEqual(assembler->LoadMap(number),
+                           assembler->HeapNumberMapConstant()),
+      &return_false);
+
+  // Check if {number} contains a finite, non-NaN value.
+  Node* number_value = assembler->LoadHeapNumberValue(number);
+  assembler->BranchIfFloat64IsNaN(
+      assembler->Float64Sub(number_value, number_value), &return_false,
+      &return_true);
+
+  assembler->Bind(&return_true);
+  assembler->Return(assembler->BooleanConstant(true));
+
+  assembler->Bind(&return_false);
+  assembler->Return(assembler->BooleanConstant(false));
+}
+
+// ES6 section 20.1.2.3 Number.isInteger ( number )
+void Builtins::Generate_NumberIsInteger(CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* number = assembler->Parameter(1);
+
+  Label return_true(assembler), return_false(assembler);
+
+  // Check if {number} is a Smi.
+  assembler->GotoIf(assembler->WordIsSmi(number), &return_true);
+
+  // Check if {number} is a HeapNumber.
+  assembler->GotoUnless(
+      assembler->WordEqual(assembler->LoadMap(number),
+                           assembler->HeapNumberMapConstant()),
+      &return_false);
+
+  // Load the actual value of {number}.
+  Node* number_value = assembler->LoadHeapNumberValue(number);
+
+  // Truncate the value of {number} to an integer (or an infinity).
+  Node* integer = assembler->Float64Trunc(number_value);
+
+  // Check if {number}s value matches the integer (ruling out the infinities).
+  assembler->BranchIfFloat64Equal(assembler->Float64Sub(number_value, integer),
+                                  assembler->Float64Constant(0.0), &return_true,
+                                  &return_false);
+
+  assembler->Bind(&return_true);
+  assembler->Return(assembler->BooleanConstant(true));
+
+  assembler->Bind(&return_false);
+  assembler->Return(assembler->BooleanConstant(false));
+}
+
+// ES6 section 20.1.2.4 Number.isNaN ( number )
+void Builtins::Generate_NumberIsNaN(CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* number = assembler->Parameter(1);
+
+  Label return_true(assembler), return_false(assembler);
+
+  // Check if {number} is a Smi.
+  assembler->GotoIf(assembler->WordIsSmi(number), &return_false);
+
+  // Check if {number} is a HeapNumber.
+  assembler->GotoUnless(
+      assembler->WordEqual(assembler->LoadMap(number),
+                           assembler->HeapNumberMapConstant()),
+      &return_false);
+
+  // Check if {number} contains a NaN value.
+  Node* number_value = assembler->LoadHeapNumberValue(number);
+  assembler->BranchIfFloat64IsNaN(number_value, &return_true, &return_false);
+
+  assembler->Bind(&return_true);
+  assembler->Return(assembler->BooleanConstant(true));
+
+  assembler->Bind(&return_false);
+  assembler->Return(assembler->BooleanConstant(false));
+}
+
+// ES6 section 20.1.2.5 Number.isSafeInteger ( number )
+void Builtins::Generate_NumberIsSafeInteger(CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* number = assembler->Parameter(1);
+
+  Label return_true(assembler), return_false(assembler);
+
+  // Check if {number} is a Smi.
+  assembler->GotoIf(assembler->WordIsSmi(number), &return_true);
+
+  // Check if {number} is a HeapNumber.
+  assembler->GotoUnless(
+      assembler->WordEqual(assembler->LoadMap(number),
+                           assembler->HeapNumberMapConstant()),
+      &return_false);
+
+  // Load the actual value of {number}.
+  Node* number_value = assembler->LoadHeapNumberValue(number);
+
+  // Truncate the value of {number} to an integer (or an infinity).
+  Node* integer = assembler->Float64Trunc(number_value);
+
+  // Check if {number}s value matches the integer (ruling out the infinities).
+  assembler->GotoUnless(
+      assembler->Float64Equal(assembler->Float64Sub(number_value, integer),
+                              assembler->Float64Constant(0.0)),
+      &return_false);
+
+  // Check if the {integer} value is in safe integer range.
+  assembler->BranchIfFloat64LessThanOrEqual(
+      assembler->Float64Abs(integer),
+      assembler->Float64Constant(kMaxSafeInteger), &return_true, &return_false);
+
+  assembler->Bind(&return_true);
+  assembler->Return(assembler->BooleanConstant(true));
+
+  assembler->Bind(&return_false);
+  assembler->Return(assembler->BooleanConstant(false));
+}
+
 // ES6 section 20.1.3.2 Number.prototype.toExponential ( fractionDigits )
 BUILTIN(NumberPrototypeToExponential) {
   HandleScope scope(isolate);
diff --git a/src/builtins/builtins-object.cc b/src/builtins/builtins-object.cc
index c422145..78df2d6 100644
--- a/src/builtins/builtins-object.cc
+++ b/src/builtins/builtins-object.cc
@@ -35,7 +35,7 @@
   Node* map = assembler->LoadMap(object);
   Node* instance_type = assembler->LoadMapInstanceType(map);
 
-  Variable var_index(assembler, MachineRepresentation::kWord32);
+  Variable var_index(assembler, MachineType::PointerRepresentation());
 
   Label keyisindex(assembler), if_iskeyunique(assembler);
   assembler->TryToName(key, &keyisindex, &var_index, &if_iskeyunique,
@@ -230,10 +230,8 @@
   {
     Node* instance_type = assembler->LoadInstanceType(object);
 
-    assembler->Branch(
-        assembler->Int32LessThan(
-            instance_type, assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
-        if_string, if_notstring);
+    assembler->Branch(assembler->IsStringInstanceType(instance_type), if_string,
+                      if_notstring);
   }
 }
 
@@ -259,10 +257,8 @@
                        CodeStubAssembler::Label* return_string,
                        CodeStubAssembler::Label* return_boolean,
                        CodeStubAssembler::Label* return_number) {
-  assembler->GotoIf(
-      assembler->Int32LessThan(instance_type,
-                               assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
-      return_string);
+  assembler->GotoIf(assembler->IsStringInstanceType(instance_type),
+                    return_string);
 
   assembler->GotoIf(assembler->Word32Equal(
                         instance_type, assembler->Int32Constant(ODDBALL_TYPE)),
@@ -910,5 +906,18 @@
   return *object;
 }
 
+// ES6 section 7.3.19 OrdinaryHasInstance ( C, O )
+void Builtins::Generate_OrdinaryHasInstance(CodeStubAssembler* assembler) {
+  typedef compiler::Node Node;
+  typedef CompareDescriptor Descriptor;
+
+  Node* constructor = assembler->Parameter(Descriptor::kLeft);
+  Node* object = assembler->Parameter(Descriptor::kRight);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  assembler->Return(
+      assembler->OrdinaryHasInstance(context, constructor, object));
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/builtins/builtins-regexp.cc b/src/builtins/builtins-regexp.cc
new file mode 100644
index 0000000..371221f
--- /dev/null
+++ b/src/builtins/builtins-regexp.cc
@@ -0,0 +1,441 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/builtins/builtins-utils.h"
+#include "src/builtins/builtins.h"
+
+#include "src/code-factory.h"
+#include "src/regexp/jsregexp.h"
+
+namespace v8 {
+namespace internal {
+
+// -----------------------------------------------------------------------------
+// ES6 section 21.2 RegExp Objects
+
+namespace {
+
+// ES#sec-isregexp IsRegExp ( argument )
+Maybe<bool> IsRegExp(Isolate* isolate, Handle<Object> object) {
+  if (!object->IsJSReceiver()) return Just(false);
+
+  Handle<JSReceiver> receiver = Handle<JSReceiver>::cast(object);
+
+  if (isolate->regexp_function()->initial_map() == receiver->map()) {
+    // Fast-path for unmodified JSRegExp instances.
+    return Just(true);
+  }
+
+  Handle<Object> match;
+  ASSIGN_RETURN_ON_EXCEPTION_VALUE(
+      isolate, match,
+      JSObject::GetProperty(receiver, isolate->factory()->match_symbol()),
+      Nothing<bool>());
+
+  if (!match->IsUndefined(isolate)) return Just(match->BooleanValue());
+  return Just(object->IsJSRegExp());
+}
+
+Handle<String> PatternFlags(Isolate* isolate, Handle<JSRegExp> regexp) {
+  static const int kMaxFlagsLength = 5 + 1;  // 5 flags and '\0';
+  char flags_string[kMaxFlagsLength];
+  int i = 0;
+
+  const JSRegExp::Flags flags = regexp->GetFlags();
+
+  if ((flags & JSRegExp::kGlobal) != 0) flags_string[i++] = 'g';
+  if ((flags & JSRegExp::kIgnoreCase) != 0) flags_string[i++] = 'i';
+  if ((flags & JSRegExp::kMultiline) != 0) flags_string[i++] = 'm';
+  if ((flags & JSRegExp::kUnicode) != 0) flags_string[i++] = 'u';
+  if ((flags & JSRegExp::kSticky) != 0) flags_string[i++] = 'y';
+
+  DCHECK_LT(i, kMaxFlagsLength);
+  memset(&flags_string[i], '\0', kMaxFlagsLength - i);
+
+  return isolate->factory()->NewStringFromAsciiChecked(flags_string);
+}
+
+// ES#sec-regexpinitialize
+// Runtime Semantics: RegExpInitialize ( obj, pattern, flags )
+MaybeHandle<JSRegExp> RegExpInitialize(Isolate* isolate,
+                                       Handle<JSRegExp> regexp,
+                                       Handle<Object> pattern,
+                                       Handle<Object> flags) {
+  Handle<String> pattern_string;
+  if (pattern->IsUndefined(isolate)) {
+    pattern_string = isolate->factory()->empty_string();
+  } else {
+    ASSIGN_RETURN_ON_EXCEPTION(isolate, pattern_string,
+                               Object::ToString(isolate, pattern), JSRegExp);
+  }
+
+  Handle<String> flags_string;
+  if (flags->IsUndefined(isolate)) {
+    flags_string = isolate->factory()->empty_string();
+  } else {
+    ASSIGN_RETURN_ON_EXCEPTION(isolate, flags_string,
+                               Object::ToString(isolate, flags), JSRegExp);
+  }
+
+  // TODO(jgruber): We could avoid the flags back and forth conversions.
+  RETURN_RESULT(isolate,
+                JSRegExp::Initialize(regexp, pattern_string, flags_string),
+                JSRegExp);
+}
+
+}  // namespace
+
+// ES#sec-regexp-pattern-flags
+// RegExp ( pattern, flags )
+BUILTIN(RegExpConstructor) {
+  HandleScope scope(isolate);
+
+  Handle<HeapObject> new_target = args.new_target();
+  Handle<Object> pattern = args.atOrUndefined(isolate, 1);
+  Handle<Object> flags = args.atOrUndefined(isolate, 2);
+
+  Handle<JSFunction> target = isolate->regexp_function();
+
+  bool pattern_is_regexp;
+  {
+    Maybe<bool> maybe_pattern_is_regexp = IsRegExp(isolate, pattern);
+    if (maybe_pattern_is_regexp.IsNothing()) {
+      DCHECK(isolate->has_pending_exception());
+      return isolate->heap()->exception();
+    }
+    pattern_is_regexp = maybe_pattern_is_regexp.FromJust();
+  }
+
+  if (new_target->IsUndefined(isolate)) {
+    new_target = target;
+
+    // ES6 section 21.2.3.1 step 3.b
+    if (pattern_is_regexp && flags->IsUndefined(isolate)) {
+      Handle<Object> pattern_constructor;
+      ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
+          isolate, pattern_constructor,
+          Object::GetProperty(pattern,
+                              isolate->factory()->constructor_string()));
+
+      if (pattern_constructor.is_identical_to(new_target)) {
+        return *pattern;
+      }
+    }
+  }
+
+  if (pattern->IsJSRegExp()) {
+    Handle<JSRegExp> regexp_pattern = Handle<JSRegExp>::cast(pattern);
+
+    if (flags->IsUndefined(isolate)) {
+      flags = PatternFlags(isolate, regexp_pattern);
+    }
+    pattern = handle(regexp_pattern->source(), isolate);
+  } else if (pattern_is_regexp) {
+    Handle<Object> pattern_source;
+    ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
+        isolate, pattern_source,
+        Object::GetProperty(pattern, isolate->factory()->source_string()));
+
+    if (flags->IsUndefined(isolate)) {
+      ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
+          isolate, flags,
+          Object::GetProperty(pattern, isolate->factory()->flags_string()));
+    }
+    pattern = pattern_source;
+  }
+
+  Handle<JSReceiver> new_target_receiver = Handle<JSReceiver>::cast(new_target);
+
+  // TODO(jgruber): Fast-path for target == new_target == unmodified JSRegExp.
+
+  Handle<JSObject> object;
+  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
+      isolate, object, JSObject::New(target, new_target_receiver));
+  Handle<JSRegExp> regexp = Handle<JSRegExp>::cast(object);
+
+  RETURN_RESULT_OR_FAILURE(isolate,
+                           RegExpInitialize(isolate, regexp, pattern, flags));
+}
+
+namespace {
+
+compiler::Node* LoadLastIndex(CodeStubAssembler* a, compiler::Node* context,
+                              compiler::Node* has_initialmap,
+                              compiler::Node* regexp) {
+  typedef CodeStubAssembler::Variable Variable;
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Variable var_value(a, MachineRepresentation::kTagged);
+
+  Label out(a), if_unmodified(a), if_modified(a, Label::kDeferred);
+  a->Branch(has_initialmap, &if_unmodified, &if_modified);
+
+  a->Bind(&if_unmodified);
+  {
+    // Load the in-object field.
+    static const int field_offset =
+        JSRegExp::kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
+    var_value.Bind(a->LoadObjectField(regexp, field_offset));
+    a->Goto(&out);
+  }
+
+  a->Bind(&if_modified);
+  {
+    // Load through the GetProperty stub.
+    Node* const name =
+        a->HeapConstant(a->isolate()->factory()->last_index_string());
+    Callable getproperty_callable = CodeFactory::GetProperty(a->isolate());
+    var_value.Bind(a->CallStub(getproperty_callable, context, regexp, name));
+    a->Goto(&out);
+  }
+
+  a->Bind(&out);
+  return var_value.value();
+}
+
+void StoreLastIndex(CodeStubAssembler* a, compiler::Node* context,
+                    compiler::Node* has_initialmap, compiler::Node* regexp,
+                    compiler::Node* value) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Label out(a), if_unmodified(a), if_modified(a, Label::kDeferred);
+  a->Branch(has_initialmap, &if_unmodified, &if_modified);
+
+  a->Bind(&if_unmodified);
+  {
+    // Store the in-object field.
+    static const int field_offset =
+        JSRegExp::kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
+    a->StoreObjectField(regexp, field_offset, value);
+    a->Goto(&out);
+  }
+
+  a->Bind(&if_modified);
+  {
+    // Store through runtime.
+    // TODO(ishell): Use SetPropertyStub here once available.
+    Node* const name =
+        a->HeapConstant(a->isolate()->factory()->last_index_string());
+    Node* const language_mode = a->SmiConstant(Smi::FromInt(STRICT));
+    a->CallRuntime(Runtime::kSetProperty, context, regexp, name, value,
+                   language_mode);
+    a->Goto(&out);
+  }
+
+  a->Bind(&out);
+}
+
+compiler::Node* ConstructNewResultFromMatchInfo(Isolate* isolate,
+                                                CodeStubAssembler* a,
+                                                compiler::Node* context,
+                                                compiler::Node* match_elements,
+                                                compiler::Node* string) {
+  typedef CodeStubAssembler::Variable Variable;
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Label out(a);
+
+  CodeStubAssembler::ParameterMode mode = CodeStubAssembler::INTPTR_PARAMETERS;
+  Node* const num_indices = a->SmiUntag(a->LoadFixedArrayElement(
+      match_elements, a->IntPtrConstant(RegExpImpl::kLastCaptureCount), 0,
+      mode));
+  Node* const num_results = a->SmiTag(a->WordShr(num_indices, 1));
+  Node* const start = a->LoadFixedArrayElement(
+      match_elements, a->IntPtrConstant(RegExpImpl::kFirstCapture), 0, mode);
+  Node* const end = a->LoadFixedArrayElement(
+      match_elements, a->IntPtrConstant(RegExpImpl::kFirstCapture + 1), 0,
+      mode);
+
+  // Calculate the substring of the first match before creating the result array
+  // to avoid an unnecessary write barrier storing the first result.
+  Node* const first = a->SubString(context, string, start, end);
+
+  Node* const result =
+      a->AllocateRegExpResult(context, num_results, start, string);
+  Node* const result_elements = a->LoadElements(result);
+
+  a->StoreFixedArrayElement(result_elements, a->IntPtrConstant(0), first,
+                            SKIP_WRITE_BARRIER);
+
+  a->GotoIf(a->SmiEqual(num_results, a->SmiConstant(Smi::FromInt(1))), &out);
+
+  // Store all remaining captures.
+  Node* const limit =
+      a->IntPtrAdd(a->IntPtrConstant(RegExpImpl::kFirstCapture), num_indices);
+
+  Variable var_from_cursor(a, MachineType::PointerRepresentation());
+  Variable var_to_cursor(a, MachineType::PointerRepresentation());
+
+  var_from_cursor.Bind(a->IntPtrConstant(RegExpImpl::kFirstCapture + 2));
+  var_to_cursor.Bind(a->IntPtrConstant(1));
+
+  Variable* vars[] = {&var_from_cursor, &var_to_cursor};
+  Label loop(a, 2, vars);
+
+  a->Goto(&loop);
+  a->Bind(&loop);
+  {
+    Node* const from_cursor = var_from_cursor.value();
+    Node* const to_cursor = var_to_cursor.value();
+    Node* const start = a->LoadFixedArrayElement(match_elements, from_cursor);
+
+    Label next_iter(a);
+    a->GotoIf(a->SmiEqual(start, a->SmiConstant(Smi::FromInt(-1))), &next_iter);
+
+    Node* const from_cursor_plus1 =
+        a->IntPtrAdd(from_cursor, a->IntPtrConstant(1));
+    Node* const end =
+        a->LoadFixedArrayElement(match_elements, from_cursor_plus1);
+
+    Node* const capture = a->SubString(context, string, start, end);
+    a->StoreFixedArrayElement(result_elements, to_cursor, capture);
+    a->Goto(&next_iter);
+
+    a->Bind(&next_iter);
+    var_from_cursor.Bind(a->IntPtrAdd(from_cursor, a->IntPtrConstant(2)));
+    var_to_cursor.Bind(a->IntPtrAdd(to_cursor, a->IntPtrConstant(1)));
+    a->Branch(a->UintPtrLessThan(var_from_cursor.value(), limit), &loop, &out);
+  }
+
+  a->Bind(&out);
+  return result;
+}
+
+}  // namespace
+
+// ES#sec-regexp.prototype.exec
+// RegExp.prototype.exec ( string )
+void Builtins::Generate_RegExpPrototypeExec(CodeStubAssembler* a) {
+  typedef CodeStubAssembler::Variable Variable;
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Isolate* const isolate = a->isolate();
+
+  Node* const receiver = a->Parameter(0);
+  Node* const maybe_string = a->Parameter(1);
+  Node* const context = a->Parameter(4);
+
+  Node* const null = a->NullConstant();
+  Node* const int_zero = a->IntPtrConstant(0);
+  Node* const smi_zero = a->SmiConstant(Smi::FromInt(0));
+
+  // Ensure {receiver} is a JSRegExp.
+  Node* const regexp_map = a->ThrowIfNotInstanceType(
+      context, receiver, JS_REGEXP_TYPE, "RegExp.prototype.exec");
+  Node* const regexp = receiver;
+
+  // Check whether the regexp instance is unmodified.
+  Node* const native_context = a->LoadNativeContext(context);
+  Node* const regexp_fun =
+      a->LoadContextElement(native_context, Context::REGEXP_FUNCTION_INDEX);
+  Node* const initial_map =
+      a->LoadObjectField(regexp_fun, JSFunction::kPrototypeOrInitialMapOffset);
+  Node* const has_initialmap = a->WordEqual(regexp_map, initial_map);
+
+  // Convert {maybe_string} to a string.
+  Callable tostring_callable = CodeFactory::ToString(isolate);
+  Node* const string = a->CallStub(tostring_callable, context, maybe_string);
+  Node* const string_length = a->LoadStringLength(string);
+
+  // Check whether the regexp is global or sticky, which determines whether we
+  // update last index later on.
+  Node* const flags = a->LoadObjectField(regexp, JSRegExp::kFlagsOffset);
+  Node* const is_global_or_sticky =
+      a->WordAnd(a->SmiUntag(flags),
+                 a->IntPtrConstant(JSRegExp::kGlobal | JSRegExp::kSticky));
+  Node* const should_update_last_index =
+      a->WordNotEqual(is_global_or_sticky, int_zero);
+
+  // Grab and possibly update last index.
+  Label run_exec(a);
+  Variable var_lastindex(a, MachineRepresentation::kTagged);
+  {
+    Label if_doupdate(a), if_dontupdate(a);
+    a->Branch(should_update_last_index, &if_doupdate, &if_dontupdate);
+
+    a->Bind(&if_doupdate);
+    {
+      Node* const regexp_lastindex =
+          LoadLastIndex(a, context, has_initialmap, regexp);
+
+      Callable tolength_callable = CodeFactory::ToLength(isolate);
+      Node* const lastindex =
+          a->CallStub(tolength_callable, context, regexp_lastindex);
+      var_lastindex.Bind(lastindex);
+
+      Label if_isoob(a, Label::kDeferred);
+      a->GotoUnless(a->WordIsSmi(lastindex), &if_isoob);
+      a->GotoUnless(a->SmiLessThanOrEqual(lastindex, string_length), &if_isoob);
+      a->Goto(&run_exec);
+
+      a->Bind(&if_isoob);
+      {
+        StoreLastIndex(a, context, has_initialmap, regexp, smi_zero);
+        a->Return(null);
+      }
+    }
+
+    a->Bind(&if_dontupdate);
+    {
+      var_lastindex.Bind(smi_zero);
+      a->Goto(&run_exec);
+    }
+  }
+
+  Node* match_indices;
+  Label successful_match(a);
+  a->Bind(&run_exec);
+  {
+    // Get last match info from the context.
+    Node* const last_match_info = a->LoadContextElement(
+        native_context, Context::REGEXP_LAST_MATCH_INFO_INDEX);
+
+    // Call the exec stub.
+    Callable exec_callable = CodeFactory::RegExpExec(isolate);
+    match_indices = a->CallStub(exec_callable, context, regexp, string,
+                                var_lastindex.value(), last_match_info);
+
+    // {match_indices} is either null or the RegExpLastMatchInfo array.
+    // Return early if exec failed, possibly updating last index.
+    a->GotoUnless(a->WordEqual(match_indices, null), &successful_match);
+
+    Label return_null(a);
+    a->GotoUnless(should_update_last_index, &return_null);
+
+    StoreLastIndex(a, context, has_initialmap, regexp, smi_zero);
+    a->Goto(&return_null);
+
+    a->Bind(&return_null);
+    a->Return(null);
+  }
+
+  Label construct_result(a);
+  a->Bind(&successful_match);
+  {
+    Node* const match_elements = a->LoadElements(match_indices);
+
+    a->GotoUnless(should_update_last_index, &construct_result);
+
+    // Update the new last index from {match_indices}.
+    Node* const new_lastindex = a->LoadFixedArrayElement(
+        match_elements, a->IntPtrConstant(RegExpImpl::kFirstCapture + 1));
+
+    StoreLastIndex(a, context, has_initialmap, regexp, new_lastindex);
+    a->Goto(&construct_result);
+
+    a->Bind(&construct_result);
+    {
+      Node* result = ConstructNewResultFromMatchInfo(isolate, a, context,
+                                                     match_elements, string);
+      a->Return(result);
+    }
+  }
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/builtins/builtins-sharedarraybuffer.cc b/src/builtins/builtins-sharedarraybuffer.cc
index 23d4f43..6aad4da 100644
--- a/src/builtins/builtins-sharedarraybuffer.cc
+++ b/src/builtins/builtins-sharedarraybuffer.cc
@@ -141,6 +141,7 @@
   using namespace compiler;
   // Check if the index is in bounds. If not, throw RangeError.
   CodeStubAssembler::Label if_inbounds(a), if_notinbounds(a);
+  // TODO(jkummerow): Use unsigned comparison instead of "i<0 || i>length".
   a->Branch(
       a->WordOr(a->Int32LessThan(index_word, a->Int32Constant(0)),
                 a->Int32GreaterThanOrEqual(index_word, array_length_word)),
@@ -227,8 +228,7 @@
   ValidateAtomicIndex(a, index_word32, array_length_word32, context);
   Node* index_word = a->ChangeUint32ToWord(index_word32);
 
-  Callable to_integer = CodeFactory::ToInteger(a->isolate());
-  Node* value_integer = a->CallStub(to_integer, context, value);
+  Node* value_integer = a->ToInteger(context, value);
   Node* value_word32 = a->TruncateTaggedToWord32(context, value_integer);
 
   CodeStubAssembler::Label u8(a), u16(a), u32(a), other(a);
@@ -248,8 +248,8 @@
   a->Return(value_integer);
 
   a->Bind(&u16);
-  a->SmiTag(a->AtomicStore(MachineRepresentation::kWord16, backing_store,
-                           a->WordShl(index_word, 1), value_word32));
+  a->AtomicStore(MachineRepresentation::kWord16, backing_store,
+                 a->WordShl(index_word, 1), value_word32);
   a->Return(value_integer);
 
   a->Bind(&u32);
diff --git a/src/builtins/builtins-string.cc b/src/builtins/builtins-string.cc
index d38f6b0..68d2bd0 100644
--- a/src/builtins/builtins-string.cc
+++ b/src/builtins/builtins-string.cc
@@ -10,6 +10,408 @@
 namespace v8 {
 namespace internal {
 
+namespace {
+
+enum ResultMode { kDontNegateResult, kNegateResult };
+
+void GenerateStringEqual(CodeStubAssembler* assembler, ResultMode mode) {
+  // Here's pseudo-code for the algorithm below in case of kDontNegateResult
+  // mode; for kNegateResult mode we properly negate the result.
+  //
+  // if (lhs == rhs) return true;
+  // if (lhs->length() != rhs->length()) return false;
+  // if (lhs->IsInternalizedString() && rhs->IsInternalizedString()) {
+  //   return false;
+  // }
+  // if (lhs->IsSeqOneByteString() && rhs->IsSeqOneByteString()) {
+  //   for (i = 0; i != lhs->length(); ++i) {
+  //     if (lhs[i] != rhs[i]) return false;
+  //   }
+  //   return true;
+  // }
+  // return %StringEqual(lhs, rhs);
+
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Node* lhs = assembler->Parameter(0);
+  Node* rhs = assembler->Parameter(1);
+  Node* context = assembler->Parameter(2);
+
+  Label if_equal(assembler), if_notequal(assembler);
+
+  // Fast check to see if {lhs} and {rhs} refer to the same String object.
+  Label if_same(assembler), if_notsame(assembler);
+  assembler->Branch(assembler->WordEqual(lhs, rhs), &if_same, &if_notsame);
+
+  assembler->Bind(&if_same);
+  assembler->Goto(&if_equal);
+
+  assembler->Bind(&if_notsame);
+  {
+    // The {lhs} and {rhs} don't refer to the exact same String object.
+
+    // Load the length of {lhs} and {rhs}.
+    Node* lhs_length = assembler->LoadStringLength(lhs);
+    Node* rhs_length = assembler->LoadStringLength(rhs);
+
+    // Check if the lengths of {lhs} and {rhs} are equal.
+    Label if_lengthisequal(assembler), if_lengthisnotequal(assembler);
+    assembler->Branch(assembler->WordEqual(lhs_length, rhs_length),
+                      &if_lengthisequal, &if_lengthisnotequal);
+
+    assembler->Bind(&if_lengthisequal);
+    {
+      // Load instance types of {lhs} and {rhs}.
+      Node* lhs_instance_type = assembler->LoadInstanceType(lhs);
+      Node* rhs_instance_type = assembler->LoadInstanceType(rhs);
+
+      // Combine the instance types into a single 16-bit value, so we can check
+      // both of them at once.
+      Node* both_instance_types = assembler->Word32Or(
+          lhs_instance_type,
+          assembler->Word32Shl(rhs_instance_type, assembler->Int32Constant(8)));
+
+      // Check if both {lhs} and {rhs} are internalized.
+      int const kBothInternalizedMask =
+          kIsNotInternalizedMask | (kIsNotInternalizedMask << 8);
+      int const kBothInternalizedTag =
+          kInternalizedTag | (kInternalizedTag << 8);
+      Label if_bothinternalized(assembler), if_notbothinternalized(assembler);
+      assembler->Branch(assembler->Word32Equal(
+                            assembler->Word32And(both_instance_types,
+                                                 assembler->Int32Constant(
+                                                     kBothInternalizedMask)),
+                            assembler->Int32Constant(kBothInternalizedTag)),
+                        &if_bothinternalized, &if_notbothinternalized);
+
+      assembler->Bind(&if_bothinternalized);
+      {
+        // Fast negative check for internalized-to-internalized equality.
+        assembler->Goto(&if_notequal);
+      }
+
+      assembler->Bind(&if_notbothinternalized);
+      {
+        // Check that both {lhs} and {rhs} are flat one-byte strings.
+        int const kBothSeqOneByteStringMask =
+            kStringEncodingMask | kStringRepresentationMask |
+            ((kStringEncodingMask | kStringRepresentationMask) << 8);
+        int const kBothSeqOneByteStringTag =
+            kOneByteStringTag | kSeqStringTag |
+            ((kOneByteStringTag | kSeqStringTag) << 8);
+        Label if_bothonebyteseqstrings(assembler),
+            if_notbothonebyteseqstrings(assembler);
+        assembler->Branch(
+            assembler->Word32Equal(
+                assembler->Word32And(
+                    both_instance_types,
+                    assembler->Int32Constant(kBothSeqOneByteStringMask)),
+                assembler->Int32Constant(kBothSeqOneByteStringTag)),
+            &if_bothonebyteseqstrings, &if_notbothonebyteseqstrings);
+
+        assembler->Bind(&if_bothonebyteseqstrings);
+        {
+          // Compute the effective offset of the first character.
+          Node* begin = assembler->IntPtrConstant(
+              SeqOneByteString::kHeaderSize - kHeapObjectTag);
+
+          // Compute the first offset after the string from the length.
+          Node* end =
+              assembler->IntPtrAdd(begin, assembler->SmiUntag(lhs_length));
+
+          // Loop over the {lhs} and {rhs} strings to see if they are equal.
+          Variable var_offset(assembler, MachineType::PointerRepresentation());
+          Label loop(assembler, &var_offset);
+          var_offset.Bind(begin);
+          assembler->Goto(&loop);
+          assembler->Bind(&loop);
+          {
+            // Check if {offset} equals {end}.
+            Node* offset = var_offset.value();
+            Label if_done(assembler), if_notdone(assembler);
+            assembler->Branch(assembler->WordEqual(offset, end), &if_done,
+                              &if_notdone);
+
+            assembler->Bind(&if_notdone);
+            {
+              // Load the next characters from {lhs} and {rhs}.
+              Node* lhs_value =
+                  assembler->Load(MachineType::Uint8(), lhs, offset);
+              Node* rhs_value =
+                  assembler->Load(MachineType::Uint8(), rhs, offset);
+
+              // Check if the characters match.
+              Label if_valueissame(assembler), if_valueisnotsame(assembler);
+              assembler->Branch(assembler->Word32Equal(lhs_value, rhs_value),
+                                &if_valueissame, &if_valueisnotsame);
+
+              assembler->Bind(&if_valueissame);
+              {
+                // Advance to next character.
+                var_offset.Bind(
+                    assembler->IntPtrAdd(offset, assembler->IntPtrConstant(1)));
+              }
+              assembler->Goto(&loop);
+
+              assembler->Bind(&if_valueisnotsame);
+              assembler->Goto(&if_notequal);
+            }
+
+            assembler->Bind(&if_done);
+            assembler->Goto(&if_equal);
+          }
+        }
+
+        assembler->Bind(&if_notbothonebyteseqstrings);
+        {
+          // TODO(bmeurer): Add fast case support for flattened cons strings;
+          // also add support for two byte string equality checks.
+          Runtime::FunctionId function_id = (mode == kDontNegateResult)
+                                                ? Runtime::kStringEqual
+                                                : Runtime::kStringNotEqual;
+          assembler->TailCallRuntime(function_id, context, lhs, rhs);
+        }
+      }
+    }
+
+    assembler->Bind(&if_lengthisnotequal);
+    {
+      // Mismatch in length of {lhs} and {rhs}, cannot be equal.
+      assembler->Goto(&if_notequal);
+    }
+  }
+
+  assembler->Bind(&if_equal);
+  assembler->Return(assembler->BooleanConstant(mode == kDontNegateResult));
+
+  assembler->Bind(&if_notequal);
+  assembler->Return(assembler->BooleanConstant(mode == kNegateResult));
+}
+
+enum RelationalComparisonMode {
+  kLessThan,
+  kLessThanOrEqual,
+  kGreaterThan,
+  kGreaterThanOrEqual
+};
+
+void GenerateStringRelationalComparison(CodeStubAssembler* assembler,
+                                        RelationalComparisonMode mode) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Node* lhs = assembler->Parameter(0);
+  Node* rhs = assembler->Parameter(1);
+  Node* context = assembler->Parameter(2);
+
+  Label if_less(assembler), if_equal(assembler), if_greater(assembler);
+
+  // Fast check to see if {lhs} and {rhs} refer to the same String object.
+  Label if_same(assembler), if_notsame(assembler);
+  assembler->Branch(assembler->WordEqual(lhs, rhs), &if_same, &if_notsame);
+
+  assembler->Bind(&if_same);
+  assembler->Goto(&if_equal);
+
+  assembler->Bind(&if_notsame);
+  {
+    // Load instance types of {lhs} and {rhs}.
+    Node* lhs_instance_type = assembler->LoadInstanceType(lhs);
+    Node* rhs_instance_type = assembler->LoadInstanceType(rhs);
+
+    // Combine the instance types into a single 16-bit value, so we can check
+    // both of them at once.
+    Node* both_instance_types = assembler->Word32Or(
+        lhs_instance_type,
+        assembler->Word32Shl(rhs_instance_type, assembler->Int32Constant(8)));
+
+    // Check that both {lhs} and {rhs} are flat one-byte strings.
+    int const kBothSeqOneByteStringMask =
+        kStringEncodingMask | kStringRepresentationMask |
+        ((kStringEncodingMask | kStringRepresentationMask) << 8);
+    int const kBothSeqOneByteStringTag =
+        kOneByteStringTag | kSeqStringTag |
+        ((kOneByteStringTag | kSeqStringTag) << 8);
+    Label if_bothonebyteseqstrings(assembler),
+        if_notbothonebyteseqstrings(assembler);
+    assembler->Branch(assembler->Word32Equal(
+                          assembler->Word32And(both_instance_types,
+                                               assembler->Int32Constant(
+                                                   kBothSeqOneByteStringMask)),
+                          assembler->Int32Constant(kBothSeqOneByteStringTag)),
+                      &if_bothonebyteseqstrings, &if_notbothonebyteseqstrings);
+
+    assembler->Bind(&if_bothonebyteseqstrings);
+    {
+      // Load the length of {lhs} and {rhs}.
+      Node* lhs_length = assembler->LoadStringLength(lhs);
+      Node* rhs_length = assembler->LoadStringLength(rhs);
+
+      // Determine the minimum length.
+      Node* length = assembler->SmiMin(lhs_length, rhs_length);
+
+      // Compute the effective offset of the first character.
+      Node* begin = assembler->IntPtrConstant(SeqOneByteString::kHeaderSize -
+                                              kHeapObjectTag);
+
+      // Compute the first offset after the string from the length.
+      Node* end = assembler->IntPtrAdd(begin, assembler->SmiUntag(length));
+
+      // Loop over the {lhs} and {rhs} strings to see if they are equal.
+      Variable var_offset(assembler, MachineType::PointerRepresentation());
+      Label loop(assembler, &var_offset);
+      var_offset.Bind(begin);
+      assembler->Goto(&loop);
+      assembler->Bind(&loop);
+      {
+        // Check if {offset} equals {end}.
+        Node* offset = var_offset.value();
+        Label if_done(assembler), if_notdone(assembler);
+        assembler->Branch(assembler->WordEqual(offset, end), &if_done,
+                          &if_notdone);
+
+        assembler->Bind(&if_notdone);
+        {
+          // Load the next characters from {lhs} and {rhs}.
+          Node* lhs_value = assembler->Load(MachineType::Uint8(), lhs, offset);
+          Node* rhs_value = assembler->Load(MachineType::Uint8(), rhs, offset);
+
+          // Check if the characters match.
+          Label if_valueissame(assembler), if_valueisnotsame(assembler);
+          assembler->Branch(assembler->Word32Equal(lhs_value, rhs_value),
+                            &if_valueissame, &if_valueisnotsame);
+
+          assembler->Bind(&if_valueissame);
+          {
+            // Advance to next character.
+            var_offset.Bind(
+                assembler->IntPtrAdd(offset, assembler->IntPtrConstant(1)));
+          }
+          assembler->Goto(&loop);
+
+          assembler->Bind(&if_valueisnotsame);
+          assembler->BranchIf(assembler->Uint32LessThan(lhs_value, rhs_value),
+                              &if_less, &if_greater);
+        }
+
+        assembler->Bind(&if_done);
+        {
+          // All characters up to the min length are equal, decide based on
+          // string length.
+          Label if_lengthisequal(assembler), if_lengthisnotequal(assembler);
+          assembler->Branch(assembler->SmiEqual(lhs_length, rhs_length),
+                            &if_lengthisequal, &if_lengthisnotequal);
+
+          assembler->Bind(&if_lengthisequal);
+          assembler->Goto(&if_equal);
+
+          assembler->Bind(&if_lengthisnotequal);
+          assembler->BranchIfSmiLessThan(lhs_length, rhs_length, &if_less,
+                                         &if_greater);
+        }
+      }
+    }
+
+    assembler->Bind(&if_notbothonebyteseqstrings);
+    {
+      // TODO(bmeurer): Add fast case support for flattened cons strings;
+      // also add support for two byte string relational comparisons.
+      switch (mode) {
+        case kLessThan:
+          assembler->TailCallRuntime(Runtime::kStringLessThan, context, lhs,
+                                     rhs);
+          break;
+        case kLessThanOrEqual:
+          assembler->TailCallRuntime(Runtime::kStringLessThanOrEqual, context,
+                                     lhs, rhs);
+          break;
+        case kGreaterThan:
+          assembler->TailCallRuntime(Runtime::kStringGreaterThan, context, lhs,
+                                     rhs);
+          break;
+        case kGreaterThanOrEqual:
+          assembler->TailCallRuntime(Runtime::kStringGreaterThanOrEqual,
+                                     context, lhs, rhs);
+          break;
+      }
+    }
+  }
+
+  assembler->Bind(&if_less);
+  switch (mode) {
+    case kLessThan:
+    case kLessThanOrEqual:
+      assembler->Return(assembler->BooleanConstant(true));
+      break;
+
+    case kGreaterThan:
+    case kGreaterThanOrEqual:
+      assembler->Return(assembler->BooleanConstant(false));
+      break;
+  }
+
+  assembler->Bind(&if_equal);
+  switch (mode) {
+    case kLessThan:
+    case kGreaterThan:
+      assembler->Return(assembler->BooleanConstant(false));
+      break;
+
+    case kLessThanOrEqual:
+    case kGreaterThanOrEqual:
+      assembler->Return(assembler->BooleanConstant(true));
+      break;
+  }
+
+  assembler->Bind(&if_greater);
+  switch (mode) {
+    case kLessThan:
+    case kLessThanOrEqual:
+      assembler->Return(assembler->BooleanConstant(false));
+      break;
+
+    case kGreaterThan:
+    case kGreaterThanOrEqual:
+      assembler->Return(assembler->BooleanConstant(true));
+      break;
+  }
+}
+
+}  // namespace
+
+// static
+void Builtins::Generate_StringEqual(CodeStubAssembler* assembler) {
+  GenerateStringEqual(assembler, kDontNegateResult);
+}
+
+// static
+void Builtins::Generate_StringNotEqual(CodeStubAssembler* assembler) {
+  GenerateStringEqual(assembler, kNegateResult);
+}
+
+// static
+void Builtins::Generate_StringLessThan(CodeStubAssembler* assembler) {
+  GenerateStringRelationalComparison(assembler, kLessThan);
+}
+
+// static
+void Builtins::Generate_StringLessThanOrEqual(CodeStubAssembler* assembler) {
+  GenerateStringRelationalComparison(assembler, kLessThanOrEqual);
+}
+
+// static
+void Builtins::Generate_StringGreaterThan(CodeStubAssembler* assembler) {
+  GenerateStringRelationalComparison(assembler, kGreaterThan);
+}
+
+// static
+void Builtins::Generate_StringGreaterThanOrEqual(CodeStubAssembler* assembler) {
+  GenerateStringRelationalComparison(assembler, kGreaterThanOrEqual);
+}
+
 // -----------------------------------------------------------------------------
 // ES6 section 21.1 String Objects
 
@@ -294,7 +696,6 @@
 void Builtins::Generate_StringPrototypeCharAt(CodeStubAssembler* assembler) {
   typedef CodeStubAssembler::Label Label;
   typedef compiler::Node Node;
-  typedef CodeStubAssembler::Variable Variable;
 
   Node* receiver = assembler->Parameter(0);
   Node* position = assembler->Parameter(1);
@@ -306,73 +707,24 @@
 
   // Convert the {position} to a Smi and check that it's in bounds of the
   // {receiver}.
-  // TODO(bmeurer): Find an abstraction for this!
   {
-    // Check if the {position} is already a Smi.
-    Variable var_position(assembler, MachineRepresentation::kTagged);
-    var_position.Bind(position);
-    Label if_positionissmi(assembler),
-        if_positionisnotsmi(assembler, Label::kDeferred);
-    assembler->Branch(assembler->WordIsSmi(position), &if_positionissmi,
-                      &if_positionisnotsmi);
-    assembler->Bind(&if_positionisnotsmi);
-    {
-      // Convert the {position} to an Integer via the ToIntegerStub.
-      Callable callable = CodeFactory::ToInteger(assembler->isolate());
-      Node* index = assembler->CallStub(callable, context, position);
-
-      // Check if the resulting {index} is now a Smi.
-      Label if_indexissmi(assembler, Label::kDeferred),
-          if_indexisnotsmi(assembler, Label::kDeferred);
-      assembler->Branch(assembler->WordIsSmi(index), &if_indexissmi,
-                        &if_indexisnotsmi);
-
-      assembler->Bind(&if_indexissmi);
-      {
-        var_position.Bind(index);
-        assembler->Goto(&if_positionissmi);
-      }
-
-      assembler->Bind(&if_indexisnotsmi);
-      {
-        // The ToIntegerStub canonicalizes everything in Smi range to Smi
-        // representation, so any HeapNumber returned is not in Smi range.
-        // The only exception here is -0.0, which we treat as 0.
-        Node* index_value = assembler->LoadHeapNumberValue(index);
-        Label if_indexiszero(assembler, Label::kDeferred),
-            if_indexisnotzero(assembler, Label::kDeferred);
-        assembler->Branch(assembler->Float64Equal(
-                              index_value, assembler->Float64Constant(0.0)),
-                          &if_indexiszero, &if_indexisnotzero);
-
-        assembler->Bind(&if_indexiszero);
-        {
-          var_position.Bind(assembler->SmiConstant(Smi::FromInt(0)));
-          assembler->Goto(&if_positionissmi);
-        }
-
-        assembler->Bind(&if_indexisnotzero);
-        {
-          // The {index} is some other integral Number, that is definitely
-          // neither -0.0 nor in Smi range.
-          assembler->Return(assembler->EmptyStringConstant());
-        }
-      }
-    }
-    assembler->Bind(&if_positionissmi);
-    position = var_position.value();
+    Label return_emptystring(assembler, Label::kDeferred);
+    position = assembler->ToInteger(context, position,
+                                    CodeStubAssembler::kTruncateMinusZero);
+    assembler->GotoUnless(assembler->WordIsSmi(position), &return_emptystring);
 
     // Determine the actual length of the {receiver} String.
     Node* receiver_length =
         assembler->LoadObjectField(receiver, String::kLengthOffset);
 
     // Return "" if the Smi {position} is outside the bounds of the {receiver}.
-    Label if_positioninbounds(assembler),
-        if_positionnotinbounds(assembler, Label::kDeferred);
+    Label if_positioninbounds(assembler);
     assembler->Branch(assembler->SmiAboveOrEqual(position, receiver_length),
-                      &if_positionnotinbounds, &if_positioninbounds);
-    assembler->Bind(&if_positionnotinbounds);
+                      &return_emptystring, &if_positioninbounds);
+
+    assembler->Bind(&return_emptystring);
     assembler->Return(assembler->EmptyStringConstant());
+
     assembler->Bind(&if_positioninbounds);
   }
 
@@ -389,7 +741,6 @@
     CodeStubAssembler* assembler) {
   typedef CodeStubAssembler::Label Label;
   typedef compiler::Node Node;
-  typedef CodeStubAssembler::Variable Variable;
 
   Node* receiver = assembler->Parameter(0);
   Node* position = assembler->Parameter(1);
@@ -401,73 +752,24 @@
 
   // Convert the {position} to a Smi and check that it's in bounds of the
   // {receiver}.
-  // TODO(bmeurer): Find an abstraction for this!
   {
-    // Check if the {position} is already a Smi.
-    Variable var_position(assembler, MachineRepresentation::kTagged);
-    var_position.Bind(position);
-    Label if_positionissmi(assembler),
-        if_positionisnotsmi(assembler, Label::kDeferred);
-    assembler->Branch(assembler->WordIsSmi(position), &if_positionissmi,
-                      &if_positionisnotsmi);
-    assembler->Bind(&if_positionisnotsmi);
-    {
-      // Convert the {position} to an Integer via the ToIntegerStub.
-      Callable callable = CodeFactory::ToInteger(assembler->isolate());
-      Node* index = assembler->CallStub(callable, context, position);
-
-      // Check if the resulting {index} is now a Smi.
-      Label if_indexissmi(assembler, Label::kDeferred),
-          if_indexisnotsmi(assembler, Label::kDeferred);
-      assembler->Branch(assembler->WordIsSmi(index), &if_indexissmi,
-                        &if_indexisnotsmi);
-
-      assembler->Bind(&if_indexissmi);
-      {
-        var_position.Bind(index);
-        assembler->Goto(&if_positionissmi);
-      }
-
-      assembler->Bind(&if_indexisnotsmi);
-      {
-        // The ToIntegerStub canonicalizes everything in Smi range to Smi
-        // representation, so any HeapNumber returned is not in Smi range.
-        // The only exception here is -0.0, which we treat as 0.
-        Node* index_value = assembler->LoadHeapNumberValue(index);
-        Label if_indexiszero(assembler, Label::kDeferred),
-            if_indexisnotzero(assembler, Label::kDeferred);
-        assembler->Branch(assembler->Float64Equal(
-                              index_value, assembler->Float64Constant(0.0)),
-                          &if_indexiszero, &if_indexisnotzero);
-
-        assembler->Bind(&if_indexiszero);
-        {
-          var_position.Bind(assembler->SmiConstant(Smi::FromInt(0)));
-          assembler->Goto(&if_positionissmi);
-        }
-
-        assembler->Bind(&if_indexisnotzero);
-        {
-          // The {index} is some other integral Number, that is definitely
-          // neither -0.0 nor in Smi range.
-          assembler->Return(assembler->NaNConstant());
-        }
-      }
-    }
-    assembler->Bind(&if_positionissmi);
-    position = var_position.value();
+    Label return_nan(assembler, Label::kDeferred);
+    position = assembler->ToInteger(context, position,
+                                    CodeStubAssembler::kTruncateMinusZero);
+    assembler->GotoUnless(assembler->WordIsSmi(position), &return_nan);
 
     // Determine the actual length of the {receiver} String.
     Node* receiver_length =
         assembler->LoadObjectField(receiver, String::kLengthOffset);
 
     // Return NaN if the Smi {position} is outside the bounds of the {receiver}.
-    Label if_positioninbounds(assembler),
-        if_positionnotinbounds(assembler, Label::kDeferred);
+    Label if_positioninbounds(assembler);
     assembler->Branch(assembler->SmiAboveOrEqual(position, receiver_length),
-                      &if_positionnotinbounds, &if_positioninbounds);
-    assembler->Bind(&if_positionnotinbounds);
+                      &return_nan, &if_positioninbounds);
+
+    assembler->Bind(&return_nan);
     assembler->Return(assembler->NaNConstant());
+
     assembler->Bind(&if_positioninbounds);
   }
 
@@ -477,6 +779,333 @@
   assembler->Return(result);
 }
 
+// ES6 section 21.1.3.9
+// String.prototype.lastIndexOf ( searchString [ , position ] )
+BUILTIN(StringPrototypeLastIndexOf) {
+  HandleScope handle_scope(isolate);
+  return String::LastIndexOf(isolate, args.receiver(),
+                             args.atOrUndefined(isolate, 1),
+                             args.atOrUndefined(isolate, 2));
+}
+
+// ES6 section 21.1.3.10 String.prototype.localeCompare ( that )
+//
+// This function is implementation specific.  For now, we do not
+// do anything locale specific.
+// If internationalization is enabled, then i18n.js will override this function
+// and provide the proper functionality, so this is just a fallback.
+BUILTIN(StringPrototypeLocaleCompare) {
+  HandleScope handle_scope(isolate);
+  DCHECK_EQ(2, args.length());
+
+  TO_THIS_STRING(str1, "String.prototype.localeCompare");
+  Handle<String> str2;
+  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
+      isolate, str2, Object::ToString(isolate, args.at<Object>(1)));
+
+  if (str1.is_identical_to(str2)) return Smi::FromInt(0);  // Equal.
+  int str1_length = str1->length();
+  int str2_length = str2->length();
+
+  // Decide trivial cases without flattening.
+  if (str1_length == 0) {
+    if (str2_length == 0) return Smi::FromInt(0);  // Equal.
+    return Smi::FromInt(-str2_length);
+  } else {
+    if (str2_length == 0) return Smi::FromInt(str1_length);
+  }
+
+  int end = str1_length < str2_length ? str1_length : str2_length;
+
+  // No need to flatten if we are going to find the answer on the first
+  // character. At this point we know there is at least one character
+  // in each string, due to the trivial case handling above.
+  int d = str1->Get(0) - str2->Get(0);
+  if (d != 0) return Smi::FromInt(d);
+
+  str1 = String::Flatten(str1);
+  str2 = String::Flatten(str2);
+
+  DisallowHeapAllocation no_gc;
+  String::FlatContent flat1 = str1->GetFlatContent();
+  String::FlatContent flat2 = str2->GetFlatContent();
+
+  for (int i = 0; i < end; i++) {
+    if (flat1.Get(i) != flat2.Get(i)) {
+      return Smi::FromInt(flat1.Get(i) - flat2.Get(i));
+    }
+  }
+
+  return Smi::FromInt(str1_length - str2_length);
+}
+
+// ES6 section 21.1.3.12 String.prototype.normalize ( [form] )
+//
+// Simply checks the argument is valid and returns the string itself.
+// If internationalization is enabled, then i18n.js will override this function
+// and provide the proper functionality, so this is just a fallback.
+BUILTIN(StringPrototypeNormalize) {
+  HandleScope handle_scope(isolate);
+  TO_THIS_STRING(string, "String.prototype.normalize");
+
+  Handle<Object> form_input = args.atOrUndefined(isolate, 1);
+  if (form_input->IsUndefined(isolate)) return *string;
+
+  Handle<String> form;
+  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, form,
+                                     Object::ToString(isolate, form_input));
+
+  if (!(String::Equals(form,
+                       isolate->factory()->NewStringFromStaticChars("NFC")) ||
+        String::Equals(form,
+                       isolate->factory()->NewStringFromStaticChars("NFD")) ||
+        String::Equals(form,
+                       isolate->factory()->NewStringFromStaticChars("NFKC")) ||
+        String::Equals(form,
+                       isolate->factory()->NewStringFromStaticChars("NFKD")))) {
+    Handle<String> valid_forms =
+        isolate->factory()->NewStringFromStaticChars("NFC, NFD, NFKC, NFKD");
+    THROW_NEW_ERROR_RETURN_FAILURE(
+        isolate,
+        NewRangeError(MessageTemplate::kNormalizationForm, valid_forms));
+  }
+
+  return *string;
+}
+
+// ES6 section B.2.3.1 String.prototype.substr ( start, length )
+void Builtins::Generate_StringPrototypeSubstr(CodeStubAssembler* a) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Label out(a), handle_length(a);
+
+  Variable var_start(a, MachineRepresentation::kTagged);
+  Variable var_length(a, MachineRepresentation::kTagged);
+
+  Node* const receiver = a->Parameter(0);
+  Node* const start = a->Parameter(1);
+  Node* const length = a->Parameter(2);
+  Node* const context = a->Parameter(5);
+
+  Node* const zero = a->SmiConstant(Smi::FromInt(0));
+
+  // Check that {receiver} is coercible to Object and convert it to a String.
+  Node* const string =
+      a->ToThisString(context, receiver, "String.prototype.substr");
+
+  Node* const string_length = a->LoadStringLength(string);
+
+  // Conversions and bounds-checks for {start}.
+  {
+    Node* const start_int =
+        a->ToInteger(context, start, CodeStubAssembler::kTruncateMinusZero);
+
+    Label if_issmi(a), if_isheapnumber(a, Label::kDeferred);
+    a->Branch(a->WordIsSmi(start_int), &if_issmi, &if_isheapnumber);
+
+    a->Bind(&if_issmi);
+    {
+      Node* const length_plus_start = a->SmiAdd(string_length, start_int);
+      var_start.Bind(a->Select(a->SmiLessThan(start_int, zero),
+                               a->SmiMax(length_plus_start, zero), start_int));
+      a->Goto(&handle_length);
+    }
+
+    a->Bind(&if_isheapnumber);
+    {
+      // If {start} is a heap number, it is definitely out of bounds. If it is
+      // negative, {start} = max({string_length} + {start}),0) = 0'. If it is
+      // positive, set {start} to {string_length} which ultimately results in
+      // returning an empty string.
+      Node* const float_zero = a->Float64Constant(0.);
+      Node* const start_float = a->LoadHeapNumberValue(start_int);
+      var_start.Bind(a->Select(a->Float64LessThan(start_float, float_zero),
+                               zero, string_length));
+      a->Goto(&handle_length);
+    }
+  }
+
+  // Conversions and bounds-checks for {length}.
+  a->Bind(&handle_length);
+  {
+    Label if_issmi(a), if_isheapnumber(a, Label::kDeferred);
+
+    // Default to {string_length} if {length} is undefined.
+    {
+      Label if_isundefined(a, Label::kDeferred), if_isnotundefined(a);
+      a->Branch(a->WordEqual(length, a->UndefinedConstant()), &if_isundefined,
+                &if_isnotundefined);
+
+      a->Bind(&if_isundefined);
+      var_length.Bind(string_length);
+      a->Goto(&if_issmi);
+
+      a->Bind(&if_isnotundefined);
+      var_length.Bind(
+          a->ToInteger(context, length, CodeStubAssembler::kTruncateMinusZero));
+    }
+
+    a->Branch(a->WordIsSmi(var_length.value()), &if_issmi, &if_isheapnumber);
+
+    // Set {length} to min(max({length}, 0), {string_length} - {start}
+    a->Bind(&if_issmi);
+    {
+      Node* const positive_length = a->SmiMax(var_length.value(), zero);
+
+      Node* const minimal_length = a->SmiSub(string_length, var_start.value());
+      var_length.Bind(a->SmiMin(positive_length, minimal_length));
+
+      a->GotoUnless(a->SmiLessThanOrEqual(var_length.value(), zero), &out);
+      a->Return(a->EmptyStringConstant());
+    }
+
+    a->Bind(&if_isheapnumber);
+    {
+      // If {length} is a heap number, it is definitely out of bounds. There are
+      // two cases according to the spec: if it is negative, "" is returned; if
+      // it is positive, then length is set to {string_length} - {start}.
+
+      a->Assert(a->WordEqual(a->LoadMap(var_length.value()),
+                             a->HeapNumberMapConstant()));
+
+      Label if_isnegative(a), if_ispositive(a);
+      Node* const float_zero = a->Float64Constant(0.);
+      Node* const length_float = a->LoadHeapNumberValue(var_length.value());
+      a->Branch(a->Float64LessThan(length_float, float_zero), &if_isnegative,
+                &if_ispositive);
+
+      a->Bind(&if_isnegative);
+      a->Return(a->EmptyStringConstant());
+
+      a->Bind(&if_ispositive);
+      {
+        var_length.Bind(a->SmiSub(string_length, var_start.value()));
+        a->GotoUnless(a->SmiLessThanOrEqual(var_length.value(), zero), &out);
+        a->Return(a->EmptyStringConstant());
+      }
+    }
+  }
+
+  a->Bind(&out);
+  {
+    Node* const end = a->SmiAdd(var_start.value(), var_length.value());
+    Node* const result = a->SubString(context, string, var_start.value(), end);
+    a->Return(result);
+  }
+}
+
+namespace {
+
+compiler::Node* ToSmiBetweenZeroAnd(CodeStubAssembler* a,
+                                    compiler::Node* context,
+                                    compiler::Node* value,
+                                    compiler::Node* limit) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Label out(a);
+  Variable var_result(a, MachineRepresentation::kTagged);
+
+  Node* const value_int =
+      a->ToInteger(context, value, CodeStubAssembler::kTruncateMinusZero);
+
+  Label if_issmi(a), if_isnotsmi(a, Label::kDeferred);
+  a->Branch(a->WordIsSmi(value_int), &if_issmi, &if_isnotsmi);
+
+  a->Bind(&if_issmi);
+  {
+    Label if_isinbounds(a), if_isoutofbounds(a, Label::kDeferred);
+    a->Branch(a->SmiAbove(value_int, limit), &if_isoutofbounds, &if_isinbounds);
+
+    a->Bind(&if_isinbounds);
+    {
+      var_result.Bind(value_int);
+      a->Goto(&out);
+    }
+
+    a->Bind(&if_isoutofbounds);
+    {
+      Node* const zero = a->SmiConstant(Smi::FromInt(0));
+      var_result.Bind(a->Select(a->SmiLessThan(value_int, zero), zero, limit));
+      a->Goto(&out);
+    }
+  }
+
+  a->Bind(&if_isnotsmi);
+  {
+    // {value} is a heap number - in this case, it is definitely out of bounds.
+    a->Assert(a->WordEqual(a->LoadMap(value_int), a->HeapNumberMapConstant()));
+
+    Node* const float_zero = a->Float64Constant(0.);
+    Node* const smi_zero = a->SmiConstant(Smi::FromInt(0));
+    Node* const value_float = a->LoadHeapNumberValue(value_int);
+    var_result.Bind(a->Select(a->Float64LessThan(value_float, float_zero),
+                              smi_zero, limit));
+    a->Goto(&out);
+  }
+
+  a->Bind(&out);
+  return var_result.value();
+}
+
+}  // namespace
+
+// ES6 section 21.1.3.19 String.prototype.substring ( start, end )
+void Builtins::Generate_StringPrototypeSubstring(CodeStubAssembler* a) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Label out(a);
+
+  Variable var_start(a, MachineRepresentation::kTagged);
+  Variable var_end(a, MachineRepresentation::kTagged);
+
+  Node* const receiver = a->Parameter(0);
+  Node* const start = a->Parameter(1);
+  Node* const end = a->Parameter(2);
+  Node* const context = a->Parameter(5);
+
+  // Check that {receiver} is coercible to Object and convert it to a String.
+  Node* const string =
+      a->ToThisString(context, receiver, "String.prototype.substring");
+
+  Node* const length = a->LoadStringLength(string);
+
+  // Conversion and bounds-checks for {start}.
+  var_start.Bind(ToSmiBetweenZeroAnd(a, context, start, length));
+
+  // Conversion and bounds-checks for {end}.
+  {
+    var_end.Bind(length);
+    a->GotoIf(a->WordEqual(end, a->UndefinedConstant()), &out);
+
+    var_end.Bind(ToSmiBetweenZeroAnd(a, context, end, length));
+
+    Label if_endislessthanstart(a);
+    a->Branch(a->SmiLessThan(var_end.value(), var_start.value()),
+              &if_endislessthanstart, &out);
+
+    a->Bind(&if_endislessthanstart);
+    {
+      Node* const tmp = var_end.value();
+      var_end.Bind(var_start.value());
+      var_start.Bind(tmp);
+      a->Goto(&out);
+    }
+  }
+
+  a->Bind(&out);
+  {
+    Node* result =
+        a->SubString(context, string, var_start.value(), var_end.value());
+    a->Return(result);
+  }
+}
+
 // ES6 section 21.1.3.25 String.prototype.toString ()
 void Builtins::Generate_StringPrototypeToString(CodeStubAssembler* assembler) {
   typedef compiler::Node Node;
@@ -522,5 +1151,203 @@
   assembler->Return(result);
 }
 
+void Builtins::Generate_StringPrototypeIterator(CodeStubAssembler* assembler) {
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(0);
+  Node* context = assembler->Parameter(3);
+
+  Node* string = assembler->ToThisString(context, receiver,
+                                         "String.prototype[Symbol.iterator]");
+
+  Node* native_context = assembler->LoadNativeContext(context);
+  Node* map = assembler->LoadFixedArrayElement(
+      native_context,
+      assembler->IntPtrConstant(Context::STRING_ITERATOR_MAP_INDEX), 0,
+      CodeStubAssembler::INTPTR_PARAMETERS);
+  Node* iterator = assembler->Allocate(JSStringIterator::kSize);
+  assembler->StoreMapNoWriteBarrier(iterator, map);
+  assembler->StoreObjectFieldRoot(iterator, JSValue::kPropertiesOffset,
+                                  Heap::kEmptyFixedArrayRootIndex);
+  assembler->StoreObjectFieldRoot(iterator, JSObject::kElementsOffset,
+                                  Heap::kEmptyFixedArrayRootIndex);
+  assembler->StoreObjectFieldNoWriteBarrier(
+      iterator, JSStringIterator::kStringOffset, string);
+  Node* index = assembler->SmiConstant(Smi::FromInt(0));
+  assembler->StoreObjectFieldNoWriteBarrier(
+      iterator, JSStringIterator::kNextIndexOffset, index);
+  assembler->Return(iterator);
+}
+
+namespace {
+
+// Return the |word32| codepoint at {index}. Supports SeqStrings and
+// ExternalStrings.
+compiler::Node* LoadSurrogatePairInternal(CodeStubAssembler* assembler,
+                                          compiler::Node* string,
+                                          compiler::Node* length,
+                                          compiler::Node* index,
+                                          UnicodeEncoding encoding) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+  Label handle_surrogate_pair(assembler), return_result(assembler);
+  Variable var_result(assembler, MachineRepresentation::kWord32);
+  Variable var_trail(assembler, MachineRepresentation::kWord16);
+  var_result.Bind(assembler->StringCharCodeAt(string, index));
+  var_trail.Bind(assembler->Int32Constant(0));
+
+  assembler->GotoIf(assembler->Word32NotEqual(
+                        assembler->Word32And(var_result.value(),
+                                             assembler->Int32Constant(0xFC00)),
+                        assembler->Int32Constant(0xD800)),
+                    &return_result);
+  Node* next_index =
+      assembler->SmiAdd(index, assembler->SmiConstant(Smi::FromInt(1)));
+
+  assembler->GotoUnless(assembler->SmiLessThan(next_index, length),
+                        &return_result);
+  var_trail.Bind(assembler->StringCharCodeAt(string, next_index));
+  assembler->Branch(assembler->Word32Equal(
+                        assembler->Word32And(var_trail.value(),
+                                             assembler->Int32Constant(0xFC00)),
+                        assembler->Int32Constant(0xDC00)),
+                    &handle_surrogate_pair, &return_result);
+
+  assembler->Bind(&handle_surrogate_pair);
+  {
+    Node* lead = var_result.value();
+    Node* trail = var_trail.value();
+#ifdef ENABLE_SLOW_DCHECKS
+    // Check that this path is only taken if a surrogate pair is found
+    assembler->Assert(assembler->Uint32GreaterThanOrEqual(
+        lead, assembler->Int32Constant(0xD800)));
+    assembler->Assert(
+        assembler->Uint32LessThan(lead, assembler->Int32Constant(0xDC00)));
+    assembler->Assert(assembler->Uint32GreaterThanOrEqual(
+        trail, assembler->Int32Constant(0xDC00)));
+    assembler->Assert(
+        assembler->Uint32LessThan(trail, assembler->Int32Constant(0xE000)));
+#endif
+
+    switch (encoding) {
+      case UnicodeEncoding::UTF16:
+        var_result.Bind(assembler->WordOr(
+// Need to swap the order for big-endian platforms
+#if V8_TARGET_BIG_ENDIAN
+            assembler->WordShl(lead, assembler->Int32Constant(16)), trail));
+#else
+            assembler->WordShl(trail, assembler->Int32Constant(16)), lead));
+#endif
+        break;
+
+      case UnicodeEncoding::UTF32: {
+        // Convert UTF16 surrogate pair into |word32| code point, encoded as
+        // UTF32.
+        Node* surrogate_offset =
+            assembler->Int32Constant(0x10000 - (0xD800 << 10) - 0xDC00);
+
+        // (lead << 10) + trail + SURROGATE_OFFSET
+        var_result.Bind(assembler->Int32Add(
+            assembler->WordShl(lead, assembler->Int32Constant(10)),
+            assembler->Int32Add(trail, surrogate_offset)));
+        break;
+      }
+    }
+    assembler->Goto(&return_result);
+  }
+
+  assembler->Bind(&return_result);
+  return var_result.value();
+}
+
+compiler::Node* LoadSurrogatePairAt(CodeStubAssembler* assembler,
+                                    compiler::Node* string,
+                                    compiler::Node* length,
+                                    compiler::Node* index) {
+  return LoadSurrogatePairInternal(assembler, string, length, index,
+                                   UnicodeEncoding::UTF16);
+}
+
+}  // namespace
+
+void Builtins::Generate_StringIteratorPrototypeNext(
+    CodeStubAssembler* assembler) {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Variable var_value(assembler, MachineRepresentation::kTagged);
+  Variable var_done(assembler, MachineRepresentation::kTagged);
+
+  var_value.Bind(assembler->UndefinedConstant());
+  var_done.Bind(assembler->BooleanConstant(true));
+
+  Label throw_bad_receiver(assembler), next_codepoint(assembler),
+      return_result(assembler);
+
+  Node* iterator = assembler->Parameter(0);
+  Node* context = assembler->Parameter(3);
+
+  assembler->GotoIf(assembler->WordIsSmi(iterator), &throw_bad_receiver);
+  assembler->GotoUnless(
+      assembler->WordEqual(assembler->LoadInstanceType(iterator),
+                           assembler->Int32Constant(JS_STRING_ITERATOR_TYPE)),
+      &throw_bad_receiver);
+
+  Node* string =
+      assembler->LoadObjectField(iterator, JSStringIterator::kStringOffset);
+  Node* position =
+      assembler->LoadObjectField(iterator, JSStringIterator::kNextIndexOffset);
+  Node* length = assembler->LoadObjectField(string, String::kLengthOffset);
+
+  assembler->Branch(assembler->SmiLessThan(position, length), &next_codepoint,
+                    &return_result);
+
+  assembler->Bind(&next_codepoint);
+  {
+    Node* ch = LoadSurrogatePairAt(assembler, string, length, position);
+    Node* value = assembler->StringFromCodePoint(ch, UnicodeEncoding::UTF16);
+    var_value.Bind(value);
+    Node* length = assembler->LoadObjectField(value, String::kLengthOffset);
+    assembler->StoreObjectFieldNoWriteBarrier(
+        iterator, JSStringIterator::kNextIndexOffset,
+        assembler->SmiAdd(position, length));
+    var_done.Bind(assembler->BooleanConstant(false));
+    assembler->Goto(&return_result);
+  }
+
+  assembler->Bind(&return_result);
+  {
+    Node* native_context = assembler->LoadNativeContext(context);
+    Node* map = assembler->LoadFixedArrayElement(
+        native_context,
+        assembler->IntPtrConstant(Context::ITERATOR_RESULT_MAP_INDEX), 0,
+        CodeStubAssembler::INTPTR_PARAMETERS);
+    Node* result = assembler->Allocate(JSIteratorResult::kSize);
+    assembler->StoreMapNoWriteBarrier(result, map);
+    assembler->StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOffset,
+                                    Heap::kEmptyFixedArrayRootIndex);
+    assembler->StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
+                                    Heap::kEmptyFixedArrayRootIndex);
+    assembler->StoreObjectFieldNoWriteBarrier(
+        result, JSIteratorResult::kValueOffset, var_value.value());
+    assembler->StoreObjectFieldNoWriteBarrier(
+        result, JSIteratorResult::kDoneOffset, var_done.value());
+    assembler->Return(result);
+  }
+
+  assembler->Bind(&throw_bad_receiver);
+  {
+    // The {receiver} is not a valid JSGeneratorObject.
+    Node* result = assembler->CallRuntime(
+        Runtime::kThrowIncompatibleMethodReceiver, context,
+        assembler->HeapConstant(assembler->factory()->NewStringFromAsciiChecked(
+            "String Iterator.prototype.next", TENURED)),
+        iterator);
+    assembler->Return(result);  // Never reached.
+  }
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/builtins/builtins-utils.h b/src/builtins/builtins-utils.h
index 90b58c7..ca1786c 100644
--- a/src/builtins/builtins-utils.h
+++ b/src/builtins/builtins-utils.h
@@ -76,32 +76,31 @@
 // through the BuiltinArguments object args.
 // TODO(cbruni): add global flag to check whether any tracing events have been
 // enabled.
-// TODO(cbruni): Convert the IsContext CHECK back to a DCHECK.
-#define BUILTIN(name)                                                        \
-  MUST_USE_RESULT static Object* Builtin_Impl_##name(BuiltinArguments args,  \
-                                                     Isolate* isolate);      \
-                                                                             \
-  V8_NOINLINE static Object* Builtin_Impl_Stats_##name(                      \
-      int args_length, Object** args_object, Isolate* isolate) {             \
-    BuiltinArguments args(args_length, args_object);                         \
-    RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Builtin_##name); \
-    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(                           \
-        isolate, &tracing::TraceEventStatsTable::Builtin_##name);            \
-    return Builtin_Impl_##name(args, isolate);                               \
-  }                                                                          \
-                                                                             \
-  MUST_USE_RESULT Object* Builtin_##name(                                    \
-      int args_length, Object** args_object, Isolate* isolate) {             \
-    CHECK(isolate->context() == nullptr || isolate->context()->IsContext()); \
-    if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||      \
-                    FLAG_runtime_call_stats)) {                              \
-      return Builtin_Impl_Stats_##name(args_length, args_object, isolate);   \
-    }                                                                        \
-    BuiltinArguments args(args_length, args_object);                         \
-    return Builtin_Impl_##name(args, isolate);                               \
-  }                                                                          \
-                                                                             \
-  MUST_USE_RESULT static Object* Builtin_Impl_##name(BuiltinArguments args,  \
+#define BUILTIN(name)                                                         \
+  MUST_USE_RESULT static Object* Builtin_Impl_##name(BuiltinArguments args,   \
+                                                     Isolate* isolate);       \
+                                                                              \
+  V8_NOINLINE static Object* Builtin_Impl_Stats_##name(                       \
+      int args_length, Object** args_object, Isolate* isolate) {              \
+    BuiltinArguments args(args_length, args_object);                          \
+    RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Builtin_##name);  \
+    TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.runtime"),                     \
+                 "V8.Builtin_" #name);                                        \
+    return Builtin_Impl_##name(args, isolate);                                \
+  }                                                                           \
+                                                                              \
+  MUST_USE_RESULT Object* Builtin_##name(                                     \
+      int args_length, Object** args_object, Isolate* isolate) {              \
+    DCHECK(isolate->context() == nullptr || isolate->context()->IsContext()); \
+    if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||       \
+                    FLAG_runtime_call_stats)) {                               \
+      return Builtin_Impl_Stats_##name(args_length, args_object, isolate);    \
+    }                                                                         \
+    BuiltinArguments args(args_length, args_object);                          \
+    return Builtin_Impl_##name(args, isolate);                                \
+  }                                                                           \
+                                                                              \
+  MUST_USE_RESULT static Object* Builtin_Impl_##name(BuiltinArguments args,   \
                                                      Isolate* isolate)
 
 // ----------------------------------------------------------------------------
diff --git a/src/builtins/builtins.h b/src/builtins/builtins.h
index f8ce2e6..3579f3c 100644
--- a/src/builtins/builtins.h
+++ b/src/builtins/builtins.h
@@ -49,27 +49,6 @@
 //      Args: name
 #define BUILTIN_LIST(CPP, API, TFJ, TFS, ASM, ASH, DBG)                       \
   ASM(Abort)                                                                  \
-  /* Handlers */                                                              \
-  ASH(KeyedLoadIC_Megamorphic, KEYED_LOAD_IC, kNoExtraICState)                \
-  ASM(KeyedLoadIC_Miss)                                                       \
-  ASH(KeyedLoadIC_Slow, HANDLER, Code::KEYED_LOAD_IC)                         \
-  ASH(KeyedStoreIC_Megamorphic, KEYED_STORE_IC, kNoExtraICState)              \
-  ASH(KeyedStoreIC_Megamorphic_Strict, KEYED_STORE_IC,                        \
-      StoreICState::kStrictModeState)                                         \
-  ASM(KeyedStoreIC_Miss)                                                      \
-  ASH(KeyedStoreIC_Slow, HANDLER, Code::KEYED_STORE_IC)                       \
-  TFS(LoadGlobalIC_Miss, BUILTIN, kNoExtraICState, LoadGlobalWithVector)      \
-  TFS(LoadGlobalIC_Slow, HANDLER, Code::LOAD_GLOBAL_IC, LoadGlobalWithVector) \
-  ASH(LoadIC_Getter_ForDeopt, LOAD_IC, kNoExtraICState)                       \
-  TFS(LoadIC_Miss, BUILTIN, kNoExtraICState, LoadWithVector)                  \
-  ASH(LoadIC_Normal, HANDLER, Code::LOAD_IC)                                  \
-  TFS(LoadIC_Slow, HANDLER, Code::LOAD_IC, LoadWithVector)                    \
-  TFS(StoreIC_Miss, BUILTIN, kNoExtraICState, StoreWithVector)                \
-  ASH(StoreIC_Normal, HANDLER, Code::STORE_IC)                                \
-  ASH(StoreIC_Setter_ForDeopt, STORE_IC, StoreICState::kStrictModeState)      \
-  TFS(StoreIC_SlowSloppy, HANDLER, Code::STORE_IC, StoreWithVector)           \
-  TFS(StoreIC_SlowStrict, HANDLER, Code::STORE_IC, StoreWithVector)           \
-                                                                              \
   /* Code aging */                                                            \
   CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, ASM)                       \
                                                                               \
@@ -118,14 +97,24 @@
   ASM(InterruptCheck)                                                         \
   ASM(StackCheck)                                                             \
                                                                               \
+  /* String helpers */                                                        \
+  TFS(StringEqual, BUILTIN, kNoExtraICState, Compare)                         \
+  TFS(StringNotEqual, BUILTIN, kNoExtraICState, Compare)                      \
+  TFS(StringLessThan, BUILTIN, kNoExtraICState, Compare)                      \
+  TFS(StringLessThanOrEqual, BUILTIN, kNoExtraICState, Compare)               \
+  TFS(StringGreaterThan, BUILTIN, kNoExtraICState, Compare)                   \
+  TFS(StringGreaterThanOrEqual, BUILTIN, kNoExtraICState, Compare)            \
+                                                                              \
   /* Interpreter */                                                           \
   ASM(InterpreterEntryTrampoline)                                             \
   ASM(InterpreterMarkBaselineOnReturn)                                        \
   ASM(InterpreterPushArgsAndCall)                                             \
   ASM(InterpreterPushArgsAndCallFunction)                                     \
-  ASM(InterpreterPushArgsAndConstruct)                                        \
   ASM(InterpreterPushArgsAndTailCall)                                         \
   ASM(InterpreterPushArgsAndTailCallFunction)                                 \
+  ASM(InterpreterPushArgsAndConstruct)                                        \
+  ASM(InterpreterPushArgsAndConstructFunction)                                \
+  ASM(InterpreterPushArgsAndConstructArray)                                   \
   ASM(InterpreterEnterBytecodeDispatch)                                       \
   ASM(InterpreterOnStackReplacement)                                          \
                                                                               \
@@ -162,6 +151,7 @@
   TFS(GrowFastDoubleElements, BUILTIN, kNoExtraICState, GrowArrayElements)    \
   TFS(GrowFastSmiOrObjectElements, BUILTIN, kNoExtraICState,                  \
       GrowArrayElements)                                                      \
+  TFS(OrdinaryHasInstance, BUILTIN, kNoExtraICState, Compare)                 \
                                                                               \
   /* Debugger */                                                              \
   DBG(FrameDropper_LiveEdit)                                                  \
@@ -179,8 +169,33 @@
   TFS(NonPrimitiveToPrimitive_String, BUILTIN, kNoExtraICState,               \
       TypeConversion)                                                         \
   TFS(StringToNumber, BUILTIN, kNoExtraICState, TypeConversion)               \
+  TFS(ToName, BUILTIN, kNoExtraICState, TypeConversion)                       \
   TFS(NonNumberToNumber, BUILTIN, kNoExtraICState, TypeConversion)            \
-  ASM(ToNumber)                                                               \
+  TFS(ToNumber, BUILTIN, kNoExtraICState, TypeConversion)                     \
+  TFS(ToString, BUILTIN, kNoExtraICState, TypeConversion)                     \
+                                                                              \
+  /* Handlers */                                                              \
+  ASH(KeyedLoadIC_Megamorphic, KEYED_LOAD_IC, kNoExtraICState)                \
+  TFS(KeyedLoadIC_Megamorphic_TF, KEYED_LOAD_IC, kNoExtraICState,             \
+      LoadWithVector)                                                         \
+  ASM(KeyedLoadIC_Miss)                                                       \
+  ASH(KeyedLoadIC_Slow, HANDLER, Code::KEYED_LOAD_IC)                         \
+  ASH(KeyedStoreIC_Megamorphic, KEYED_STORE_IC, kNoExtraICState)              \
+  ASH(KeyedStoreIC_Megamorphic_Strict, KEYED_STORE_IC,                        \
+      StoreICState::kStrictModeState)                                         \
+  ASM(KeyedStoreIC_Miss)                                                      \
+  ASH(KeyedStoreIC_Slow, HANDLER, Code::KEYED_STORE_IC)                       \
+  TFS(LoadGlobalIC_Miss, BUILTIN, kNoExtraICState, LoadGlobalWithVector)      \
+  TFS(LoadGlobalIC_Slow, HANDLER, Code::LOAD_GLOBAL_IC, LoadGlobalWithVector) \
+  ASH(LoadIC_Getter_ForDeopt, LOAD_IC, kNoExtraICState)                       \
+  TFS(LoadIC_Miss, BUILTIN, kNoExtraICState, LoadWithVector)                  \
+  ASH(LoadIC_Normal, HANDLER, Code::LOAD_IC)                                  \
+  TFS(LoadIC_Slow, HANDLER, Code::LOAD_IC, LoadWithVector)                    \
+  TFS(StoreIC_Miss, BUILTIN, kNoExtraICState, StoreWithVector)                \
+  ASH(StoreIC_Normal, HANDLER, Code::STORE_IC)                                \
+  ASH(StoreIC_Setter_ForDeopt, STORE_IC, StoreICState::kStrictModeState)      \
+  TFS(StoreIC_SlowSloppy, HANDLER, Code::STORE_IC, StoreWithVector)           \
+  TFS(StoreIC_SlowStrict, HANDLER, Code::STORE_IC, StoreWithVector)           \
                                                                               \
   /* Built-in functions for Javascript */                                     \
   /* Special internal builtins */                                             \
@@ -244,46 +259,62 @@
   CPP(DataViewPrototypeGetBuffer)                                             \
   CPP(DataViewPrototypeGetByteLength)                                         \
   CPP(DataViewPrototypeGetByteOffset)                                         \
+  CPP(DataViewPrototypeGetInt8)                                               \
+  CPP(DataViewPrototypeSetInt8)                                               \
+  CPP(DataViewPrototypeGetUint8)                                              \
+  CPP(DataViewPrototypeSetUint8)                                              \
+  CPP(DataViewPrototypeGetInt16)                                              \
+  CPP(DataViewPrototypeSetInt16)                                              \
+  CPP(DataViewPrototypeGetUint16)                                             \
+  CPP(DataViewPrototypeSetUint16)                                             \
+  CPP(DataViewPrototypeGetInt32)                                              \
+  CPP(DataViewPrototypeSetInt32)                                              \
+  CPP(DataViewPrototypeGetUint32)                                             \
+  CPP(DataViewPrototypeSetUint32)                                             \
+  CPP(DataViewPrototypeGetFloat32)                                            \
+  CPP(DataViewPrototypeSetFloat32)                                            \
+  CPP(DataViewPrototypeGetFloat64)                                            \
+  CPP(DataViewPrototypeSetFloat64)                                            \
                                                                               \
   /* Date */                                                                  \
   CPP(DateConstructor)                                                        \
   CPP(DateConstructor_ConstructStub)                                          \
   /* ES6 section 20.3.4.2 Date.prototype.getDate ( ) */                       \
-  ASM(DatePrototypeGetDate)                                                   \
+  TFJ(DatePrototypeGetDate, 1)                                                \
   /* ES6 section 20.3.4.3 Date.prototype.getDay ( ) */                        \
-  ASM(DatePrototypeGetDay)                                                    \
+  TFJ(DatePrototypeGetDay, 1)                                                 \
   /* ES6 section 20.3.4.4 Date.prototype.getFullYear ( ) */                   \
-  ASM(DatePrototypeGetFullYear)                                               \
+  TFJ(DatePrototypeGetFullYear, 1)                                            \
   /* ES6 section 20.3.4.5 Date.prototype.getHours ( ) */                      \
-  ASM(DatePrototypeGetHours)                                                  \
+  TFJ(DatePrototypeGetHours, 1)                                               \
   /* ES6 section 20.3.4.6 Date.prototype.getMilliseconds ( ) */               \
-  ASM(DatePrototypeGetMilliseconds)                                           \
+  TFJ(DatePrototypeGetMilliseconds, 1)                                        \
   /* ES6 section 20.3.4.7 Date.prototype.getMinutes ( ) */                    \
-  ASM(DatePrototypeGetMinutes)                                                \
+  TFJ(DatePrototypeGetMinutes, 1)                                             \
   /* ES6 section 20.3.4.8 Date.prototype.getMonth */                          \
-  ASM(DatePrototypeGetMonth)                                                  \
+  TFJ(DatePrototypeGetMonth, 1)                                               \
   /* ES6 section 20.3.4.9 Date.prototype.getSeconds ( ) */                    \
-  ASM(DatePrototypeGetSeconds)                                                \
+  TFJ(DatePrototypeGetSeconds, 1)                                             \
   /* ES6 section 20.3.4.10 Date.prototype.getTime ( ) */                      \
-  ASM(DatePrototypeGetTime)                                                   \
+  TFJ(DatePrototypeGetTime, 1)                                                \
   /* ES6 section 20.3.4.11 Date.prototype.getTimezoneOffset ( ) */            \
-  ASM(DatePrototypeGetTimezoneOffset)                                         \
+  TFJ(DatePrototypeGetTimezoneOffset, 1)                                      \
   /* ES6 section 20.3.4.12 Date.prototype.getUTCDate ( ) */                   \
-  ASM(DatePrototypeGetUTCDate)                                                \
+  TFJ(DatePrototypeGetUTCDate, 1)                                             \
   /* ES6 section 20.3.4.13 Date.prototype.getUTCDay ( ) */                    \
-  ASM(DatePrototypeGetUTCDay)                                                 \
+  TFJ(DatePrototypeGetUTCDay, 1)                                              \
   /* ES6 section 20.3.4.14 Date.prototype.getUTCFullYear ( ) */               \
-  ASM(DatePrototypeGetUTCFullYear)                                            \
+  TFJ(DatePrototypeGetUTCFullYear, 1)                                         \
   /* ES6 section 20.3.4.15 Date.prototype.getUTCHours ( ) */                  \
-  ASM(DatePrototypeGetUTCHours)                                               \
+  TFJ(DatePrototypeGetUTCHours, 1)                                            \
   /* ES6 section 20.3.4.16 Date.prototype.getUTCMilliseconds ( ) */           \
-  ASM(DatePrototypeGetUTCMilliseconds)                                        \
+  TFJ(DatePrototypeGetUTCMilliseconds, 1)                                     \
   /* ES6 section 20.3.4.17 Date.prototype.getUTCMinutes ( ) */                \
-  ASM(DatePrototypeGetUTCMinutes)                                             \
+  TFJ(DatePrototypeGetUTCMinutes, 1)                                          \
   /* ES6 section 20.3.4.18 Date.prototype.getUTCMonth ( ) */                  \
-  ASM(DatePrototypeGetUTCMonth)                                               \
+  TFJ(DatePrototypeGetUTCMonth, 1)                                            \
   /* ES6 section 20.3.4.19 Date.prototype.getUTCSeconds ( ) */                \
-  ASM(DatePrototypeGetUTCSeconds)                                             \
+  TFJ(DatePrototypeGetUTCSeconds, 1)                                          \
   CPP(DatePrototypeGetYear)                                                   \
   CPP(DatePrototypeSetYear)                                                   \
   CPP(DateNow)                                                                \
@@ -342,16 +373,21 @@
   TFJ(GeneratorPrototypeThrow, 2)                                             \
   CPP(AsyncFunctionConstructor)                                               \
                                                                               \
-  /* Encode and decode */                                                     \
+  /* Global object */                                                         \
   CPP(GlobalDecodeURI)                                                        \
   CPP(GlobalDecodeURIComponent)                                               \
   CPP(GlobalEncodeURI)                                                        \
   CPP(GlobalEncodeURIComponent)                                               \
   CPP(GlobalEscape)                                                           \
   CPP(GlobalUnescape)                                                         \
-                                                                              \
-  /* Eval */                                                                  \
   CPP(GlobalEval)                                                             \
+  /* ES6 section 18.2.2 isFinite ( number ) */                                \
+  TFJ(GlobalIsFinite, 2)                                                      \
+  /* ES6 section 18.2.3 isNaN ( number ) */                                   \
+  TFJ(GlobalIsNaN, 2)                                                         \
+                                                                              \
+  /* ES6 #sec-%iteratorprototype%-@@iterator */                               \
+  TFJ(IteratorPrototypeIterator, 1)                                           \
                                                                               \
   /* JSON */                                                                  \
   CPP(JsonParse)                                                              \
@@ -432,6 +468,14 @@
   ASM(NumberConstructor)                                                      \
   /* ES6 section 20.1.1.1 Number ( [ value ] ) for the [[Construct]] case */  \
   ASM(NumberConstructor_ConstructStub)                                        \
+  /* ES6 section 20.1.2.2 Number.isFinite ( number ) */                       \
+  TFJ(NumberIsFinite, 2)                                                      \
+  /* ES6 section 20.1.2.3 Number.isInteger ( number ) */                      \
+  TFJ(NumberIsInteger, 2)                                                     \
+  /* ES6 section 20.1.2.4 Number.isNaN ( number ) */                          \
+  TFJ(NumberIsNaN, 2)                                                         \
+  /* ES6 section 20.1.2.5 Number.isSafeInteger ( number ) */                  \
+  TFJ(NumberIsSafeInteger, 2)                                                 \
   CPP(NumberPrototypeToExponential)                                           \
   CPP(NumberPrototypeToFixed)                                                 \
   CPP(NumberPrototypeToLocaleString)                                          \
@@ -489,6 +533,10 @@
   CPP(ReflectSet)                                                             \
   CPP(ReflectSetPrototypeOf)                                                  \
                                                                               \
+  /* RegExp */                                                                \
+  CPP(RegExpConstructor)                                                      \
+  TFJ(RegExpPrototypeExec, 2)                                                 \
+                                                                              \
   /* SharedArrayBuffer */                                                     \
   CPP(SharedArrayBufferPrototypeGetByteLength)                                \
   TFJ(AtomicsLoad, 3)                                                         \
@@ -504,6 +552,17 @@
   TFJ(StringPrototypeCharAt, 2)                                               \
   /* ES6 section 21.1.3.2 String.prototype.charCodeAt ( pos ) */              \
   TFJ(StringPrototypeCharCodeAt, 2)                                           \
+  /* ES6 section 21.1.3.9 */                                                  \
+  /* String.prototype.lastIndexOf ( searchString [ , position ] ) */          \
+  CPP(StringPrototypeLastIndexOf)                                             \
+  /* ES6 section 21.1.3.10 String.prototype.localeCompare ( that ) */         \
+  CPP(StringPrototypeLocaleCompare)                                           \
+  /* ES6 section 21.1.3.12 String.prototype.normalize ( [form] ) */           \
+  CPP(StringPrototypeNormalize)                                               \
+  /* ES6 section B.2.3.1 String.prototype.substr ( start, length ) */         \
+  TFJ(StringPrototypeSubstr, 3)                                               \
+  /* ES6 section 21.1.3.19 String.prototype.substring ( start, end ) */       \
+  TFJ(StringPrototypeSubstring, 3)                                            \
   /* ES6 section 21.1.3.25 String.prototype.toString () */                    \
   TFJ(StringPrototypeToString, 1)                                             \
   CPP(StringPrototypeTrim)                                                    \
@@ -511,6 +570,11 @@
   CPP(StringPrototypeTrimRight)                                               \
   /* ES6 section 21.1.3.28 String.prototype.valueOf () */                     \
   TFJ(StringPrototypeValueOf, 1)                                              \
+  /* ES6 #sec-string.prototype-@@iterator */                                  \
+  TFJ(StringPrototypeIterator, 1)                                             \
+                                                                              \
+  /* StringIterator */                                                        \
+  TFJ(StringIteratorPrototypeNext, 1)                                         \
                                                                               \
   /* Symbol */                                                                \
   CPP(SymbolConstructor)                                                      \
@@ -590,6 +654,7 @@
   Handle<Code> InterpreterPushArgsAndCall(
       TailCallMode tail_call_mode,
       CallableType function_type = CallableType::kAny);
+  Handle<Code> InterpreterPushArgsAndConstruct(CallableType function_type);
 
   Code* builtin(Name name) {
     // Code::cast cannot be used here since we access builtins
@@ -643,7 +708,10 @@
       MacroAssembler* masm, TailCallMode tail_call_mode,
       CallableType function_type);
 
-  static void Generate_DatePrototype_GetField(MacroAssembler* masm,
+  static void Generate_InterpreterPushArgsAndConstructImpl(
+      MacroAssembler* masm, CallableType function_type);
+
+  static void Generate_DatePrototype_GetField(CodeStubAssembler* masm,
                                               int field_index);
 
   enum class MathMaxMinKind { kMax, kMin };
diff --git a/src/builtins/ia32/builtins-ia32.cc b/src/builtins/ia32/builtins-ia32.cc
index f31ba6f..9dd621f 100644
--- a/src/builtins/ia32/builtins-ia32.cc
+++ b/src/builtins/ia32/builtins-ia32.cc
@@ -590,6 +590,13 @@
   __ cmp(ecx, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
   __ j(not_equal, &switch_to_different_code_kind);
 
+  // Increment invocation count for the function.
+  __ EmitLoadTypeFeedbackVector(ecx);
+  __ add(FieldOperand(ecx,
+                      TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize),
+         Immediate(Smi::FromInt(1)));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
@@ -703,20 +710,47 @@
   __ ret(0);
 }
 
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch1, Register scratch2,
+                                        Label* stack_overflow,
+                                        bool include_receiver = false) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  ExternalReference real_stack_limit =
+      ExternalReference::address_of_real_stack_limit(masm->isolate());
+  __ mov(scratch1, Operand::StaticVariable(real_stack_limit));
+  // Make scratch2 the space we have left. The stack might already be overflowed
+  // here which will cause scratch2 to become negative.
+  __ mov(scratch2, esp);
+  __ sub(scratch2, scratch1);
+  // Make scratch1 the space we need for the array when it is unrolled onto the
+  // stack.
+  __ mov(scratch1, num_args);
+  if (include_receiver) {
+    __ add(scratch1, Immediate(1));
+  }
+  __ shl(scratch1, kPointerSizeLog2);
+  // Check if the arguments will overflow the stack.
+  __ cmp(scratch2, scratch1);
+  __ j(less_equal, stack_overflow);  // Signed comparison.
+}
+
 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
-                                         Register array_limit) {
+                                         Register array_limit,
+                                         Register start_address) {
   // ----------- S t a t e -------------
-  //  -- ebx : Pointer to the last argument in the args array.
+  //  -- start_address : Pointer to the last argument in the args array.
   //  -- array_limit : Pointer to one before the first argument in the
   //                   args array.
   // -----------------------------------
   Label loop_header, loop_check;
   __ jmp(&loop_check);
   __ bind(&loop_header);
-  __ Push(Operand(ebx, 0));
-  __ sub(ebx, Immediate(kPointerSize));
+  __ Push(Operand(start_address, 0));
+  __ sub(start_address, Immediate(kPointerSize));
   __ bind(&loop_check);
-  __ cmp(ebx, array_limit);
+  __ cmp(start_address, array_limit);
   __ j(greater, &loop_header, Label::kNear);
 }
 
@@ -731,18 +765,26 @@
   //           they are to be pushed onto the stack.
   //  -- edi : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
+  // Compute the expected number of arguments.
+  __ mov(ecx, eax);
+  __ add(ecx, Immediate(1));  // Add one for receiver.
+
+  // Add a stack check before pushing the arguments. We need an extra register
+  // to perform a stack check. So push it onto the stack temporarily. This
+  // might cause stack overflow, but it will be detected by the check.
+  __ Push(edi);
+  Generate_StackOverflowCheck(masm, ecx, edx, edi, &stack_overflow);
+  __ Pop(edi);
 
   // Pop return address to allow tail-call after pushing arguments.
   __ Pop(edx);
 
   // Find the address of the last argument.
-  __ mov(ecx, eax);
-  __ add(ecx, Immediate(1));  // Add one for receiver.
   __ shl(ecx, kPointerSizeLog2);
   __ neg(ecx);
   __ add(ecx, ebx);
-
-  Generate_InterpreterPushArgs(masm, ecx);
+  Generate_InterpreterPushArgs(masm, ecx, ebx);
 
   // Call the target.
   __ Push(edx);  // Re-push return address.
@@ -757,43 +799,210 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    // Pop the temporary registers, so that return address is on top of stack.
+    __ Pop(edi);
+
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+
+    // This should be unreachable.
+    __ int3();
+  }
 }
 
+namespace {
+
+// This function modified start_addr, and only reads the contents of num_args
+// register. scratch1 and scratch2 are used as temporary registers. Their
+// original values are restored after the use.
+void Generate_InterpreterPushArgsAndReturnAddress(
+    MacroAssembler* masm, Register num_args, Register start_addr,
+    Register scratch1, Register scratch2, bool receiver_in_args,
+    int num_slots_above_ret_addr, Label* stack_overflow) {
+  // We have to move return address and the temporary registers above it
+  // before we can copy arguments onto the stack. To achieve this:
+  // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
+  // Step 2: Move the return address and values above it to the top of stack.
+  // Step 3: Copy the arguments into the correct locations.
+  //  current stack    =====>    required stack layout
+  // |             |            | scratch1      | (2) <-- esp(1)
+  // |             |            | ....          | (2)
+  // |             |            | scratch-n     | (2)
+  // |             |            | return addr   | (2)
+  // |             |            | arg N         | (3)
+  // | scratch1    | <-- esp    | ....          |
+  // | ....        |            | arg 0         |
+  // | scratch-n   |            | arg 0         |
+  // | return addr |            | receiver slot |
+
+  // Check for stack overflow before we increment the stack pointer.
+  Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
+                              stack_overflow, true);
+
+// Step 1 - Update the stack pointer. scratch1 already contains the required
+// increment to the stack. i.e. num_args + 1 stack slots. This is computed in
+// the Generate_StackOverflowCheck.
+
+#ifdef _MSC_VER
+  // TODO(mythria): Move it to macro assembler.
+  // In windows, we cannot increment the stack size by more than one page
+  // (mimimum page size is 4KB) without accessing at least one byte on the
+  // page. Check this:
+  // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
+  const int page_size = 4 * 1024;
+  Label check_offset, update_stack_pointer;
+  __ bind(&check_offset);
+  __ cmp(scratch1, page_size);
+  __ j(less, &update_stack_pointer);
+  __ sub(esp, Immediate(page_size));
+  // Just to touch the page, before we increment further.
+  __ mov(Operand(esp, 0), Immediate(0));
+  __ sub(scratch1, Immediate(page_size));
+  __ jmp(&check_offset);
+  __ bind(&update_stack_pointer);
+#endif
+
+  __ sub(esp, scratch1);
+
+  // Step 2 move return_address and slots above it to the correct locations.
+  // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
+  // basically when the source and destination overlap. We at least need one
+  // extra slot for receiver, so no extra checks are required to avoid copy.
+  for (int i = 0; i < num_slots_above_ret_addr + 1; i++) {
+    __ mov(scratch1,
+           Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
+    __ mov(Operand(esp, i * kPointerSize), scratch1);
+  }
+
+  // Step 3 copy arguments to correct locations.
+  if (receiver_in_args) {
+    __ mov(scratch1, num_args);
+    __ add(scratch1, Immediate(1));
+  } else {
+    // Slot meant for receiver contains return address. Reset it so that
+    // we will not incorrectly interpret return address as an object.
+    __ mov(Operand(esp, num_args, times_pointer_size,
+                   (num_slots_above_ret_addr + 1) * kPointerSize),
+           Immediate(0));
+    __ mov(scratch1, num_args);
+  }
+
+  Label loop_header, loop_check;
+  __ jmp(&loop_check);
+  __ bind(&loop_header);
+  __ mov(scratch2, Operand(start_addr, 0));
+  __ mov(Operand(esp, scratch1, times_pointer_size,
+                 num_slots_above_ret_addr * kPointerSize),
+         scratch2);
+  __ sub(start_addr, Immediate(kPointerSize));
+  __ sub(scratch1, Immediate(1));
+  __ bind(&loop_check);
+  __ cmp(scratch1, Immediate(0));
+  __ j(greater, &loop_header, Label::kNear);
+}
+
+}  // end anonymous namespace
+
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   //  -- eax : the number of arguments (not including the receiver)
   //  -- edx : the new target
   //  -- edi : the constructor
-  //  -- ebx : the address of the first argument to be pushed. Subsequent
+  //  -- ebx : allocation site feedback (if available or undefined)
+  //  -- ecx : the address of the first argument to be pushed. Subsequent
   //           arguments should be consecutive above this, in the same order as
   //           they are to be pushed onto the stack.
   // -----------------------------------
-
-  // Pop return address to allow tail-call after pushing arguments.
-  __ Pop(ecx);
-
-  // Push edi in the slot meant for receiver. We need an extra register
-  // so store edi temporarily on stack.
+  Label stack_overflow;
+  // We need two scratch registers. Push edi and edx onto stack.
   __ Push(edi);
+  __ Push(edx);
 
-  // Find the address of the last argument.
-  __ mov(edi, eax);
-  __ neg(edi);
-  __ shl(edi, kPointerSizeLog2);
-  __ add(edi, ebx);
+  // Push arguments and move return address to the top of stack.
+  // The eax register is readonly. The ecx register will be modified. The edx
+  // and edi registers will be modified but restored to their original values.
+  Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, false,
+                                               2, &stack_overflow);
 
-  Generate_InterpreterPushArgs(masm, edi);
+  // Restore edi and edx
+  __ Pop(edx);
+  __ Pop(edi);
 
-  // Restore the constructor from slot on stack. It was pushed at the slot
-  // meant for receiver.
-  __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
+  __ AssertUndefinedOrAllocationSite(ebx);
+  if (construct_type == CallableType::kJSFunction) {
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ AssertFunction(edi);
 
-  // Re-push return address.
-  __ Push(ecx);
+    __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+    __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
+    __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
+    __ jmp(ecx);
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
 
-  // Call the constructor with unmodified eax, edi, ebi values.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+    // Call the constructor with unmodified eax, edi, edx values.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    // Pop the temporary registers, so that return address is on top of stack.
+    __ Pop(edx);
+    __ Pop(edi);
+
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+
+    // This should be unreachable.
+    __ int3();
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- eax : the number of arguments (not including the receiver)
+  //  -- edx : the target to call checked to be Array function.
+  //  -- ebx : the allocation site feedback
+  //  -- ecx : the address of the first argument to be pushed. Subsequent
+  //           arguments should be consecutive above this, in the same order as
+  //           they are to be pushed onto the stack.
+  // -----------------------------------
+  Label stack_overflow;
+  // We need two scratch registers. Register edi is available, push edx onto
+  // stack.
+  __ Push(edx);
+
+  // Push arguments and move return address to the top of stack.
+  // The eax register is readonly. The ecx register will be modified. The edx
+  // and edi registers will be modified but restored to their original values.
+  Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, true,
+                                               1, &stack_overflow);
+
+  // Restore edx.
+  __ Pop(edx);
+
+  // Array constructor expects constructor in edi. It is same as edx here.
+  __ Move(edi, edx);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    // Pop the temporary registers, so that return address is on top of stack.
+    __ Pop(edx);
+
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+
+    // This should be unreachable.
+    __ int3();
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1222,61 +1431,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- eax    : number of arguments
-  //  -- edi    : function
-  //  -- esi    : context
-  //  -- esp[0] : return address
-  //  -- esp[4] : receiver
-  // -----------------------------------
-
-  // 1. Load receiver into eax and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ mov(eax, Operand(esp, kPointerSize));
-    __ JumpIfSmi(eax, &receiver_not_date);
-    __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
-    __ j(not_equal, &receiver_not_date);
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ mov(eax, FieldOperand(eax, JSDate::kValueOffset));
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ mov(edx, Operand::StaticVariable(
-                      ExternalReference::date_cache_stamp(masm->isolate())));
-      __ cmp(edx, FieldOperand(eax, JSDate::kCacheStampOffset));
-      __ j(not_equal, &stamp_mismatch, Label::kNear);
-      __ mov(eax, FieldOperand(
-                      eax, JSDate::kValueOffset + field_index * kPointerSize));
-      __ ret(1 * kPointerSize);
-      __ bind(&stamp_mismatch);
-    }
-    FrameScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2, ebx);
-    __ mov(Operand(esp, 0), eax);
-    __ mov(Operand(esp, 1 * kPointerSize),
-           Immediate(Smi::FromInt(field_index)));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ ret(1 * kPointerSize);
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ Move(ebx, Immediate(0));
-    __ EnterBuiltinFrame(esi, edi, ebx);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax     : argc
@@ -1887,10 +2041,9 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(ebx);
     __ EnterBuiltinFrame(esi, edi, ebx);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(esi, edi, ebx);
     __ SmiUntag(ebx);
   }
@@ -1954,11 +2107,10 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(ebx);
       __ EnterBuiltinFrame(esi, edi, ebx);
       __ Push(edx);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ Pop(edx);
       __ LeaveBuiltinFrame(esi, edi, ebx);
       __ SmiUntag(ebx);
@@ -2009,32 +2161,6 @@
   }
 }
 
-static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
-                                       Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- eax : actual number of arguments
-  //  -- ebx : expected number of arguments
-  //  -- edx : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  ExternalReference real_stack_limit =
-      ExternalReference::address_of_real_stack_limit(masm->isolate());
-  __ mov(edi, Operand::StaticVariable(real_stack_limit));
-  // Make ecx the space we have left. The stack might already be overflowed
-  // here which will cause ecx to become negative.
-  __ mov(ecx, esp);
-  __ sub(ecx, edi);
-  // Make edi the space we need for the array when it is unrolled onto the
-  // stack.
-  __ mov(edi, ebx);
-  __ shl(edi, kPointerSizeLog2);
-  // Check if the arguments will overflow the stack.
-  __ cmp(ecx, edi);
-  __ j(less_equal, stack_overflow);  // Signed comparison.
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ push(ebp);
   __ mov(ebp, esp);
@@ -2743,24 +2869,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in eax.
-  Label not_smi;
-  __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
-  __ Ret();
-  __ bind(&not_smi);
-
-  Label not_heap_number;
-  __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ Ret();
-  __ bind(&not_heap_number);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax : actual number of arguments
@@ -2781,7 +2889,9 @@
   {  // Enough parameters: Actual >= expected.
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
+    // edi is used as a scratch register. It should be restored from the frame
+    // when needed.
+    Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
 
     // Copy receiver and all expected arguments.
     const int offset = StandardFrameConstants::kCallerSPOffset;
@@ -2802,7 +2912,9 @@
   {  // Too few parameters: Actual < expected.
     __ bind(&too_few);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
+    // edi is used as a scratch register. It should be restored from the frame
+    // when needed.
+    Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
 
     // Remember expected arguments in ecx.
     __ mov(ecx, ebx);
diff --git a/src/builtins/mips/builtins-mips.cc b/src/builtins/mips/builtins-mips.cc
index 003eeb2..a2b6bea 100644
--- a/src/builtins/mips/builtins-mips.cc
+++ b/src/builtins/mips/builtins-mips.cc
@@ -395,10 +395,9 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(t0);
     __ EnterBuiltinFrame(cp, a1, t0);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(cp, a1, t0);
     __ SmiUntag(t0);
   }
@@ -459,11 +458,10 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(t0);
       __ EnterBuiltinFrame(cp, a1, t0);
       __ Push(a3);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ Move(a0, v0);
       __ Pop(a3);
       __ LeaveBuiltinFrame(cp, a1, t0);
@@ -1051,6 +1049,17 @@
   __ Branch(&switch_to_different_code_kind, ne, a0,
             Operand(masm->CodeObject()));  // Self-reference to this code.
 
+  // Increment invocation count for the function.
+  __ lw(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
+  __ lw(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
+  __ lw(t0, FieldMemOperand(
+                a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                        TypeFeedbackVector::kHeaderSize));
+  __ Addu(t0, t0, Operand(Smi::FromInt(1)));
+  __ sw(t0, FieldMemOperand(
+                a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                        TypeFeedbackVector::kHeaderSize));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
@@ -1160,6 +1169,45 @@
   __ Jump(ra);
 }
 
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch1, Register scratch2,
+                                        Label* stack_overflow) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
+  // Make scratch1 the space we have left. The stack might already be overflowed
+  // here which will cause scratch1 to become negative.
+  __ subu(scratch1, sp, scratch1);
+  // Check if the arguments will overflow the stack.
+  __ sll(scratch2, num_args, kPointerSizeLog2);
+  // Signed comparison.
+  __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
+}
+
+static void Generate_InterpreterPushArgs(MacroAssembler* masm,
+                                         Register num_args, Register index,
+                                         Register scratch, Register scratch2,
+                                         Label* stack_overflow) {
+  Generate_StackOverflowCheck(masm, num_args, scratch, scratch2,
+                              stack_overflow);
+
+  // Find the address of the last argument.
+  __ mov(scratch2, num_args);
+  __ sll(scratch2, scratch2, kPointerSizeLog2);
+  __ Subu(scratch2, index, Operand(scratch2));
+
+  // Push the arguments.
+  Label loop_header, loop_check;
+  __ Branch(&loop_check);
+  __ bind(&loop_header);
+  __ lw(scratch, MemOperand(index));
+  __ Addu(index, index, Operand(-kPointerSize));
+  __ push(scratch);
+  __ bind(&loop_check);
+  __ Branch(&loop_header, gt, index, Operand(scratch2));
+}
+
 // static
 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
     MacroAssembler* masm, TailCallMode tail_call_mode,
@@ -1171,21 +1219,12 @@
   //          they are to be pushed onto the stack.
   //  -- a1 : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
 
-  // Find the address of the last argument.
-  __ Addu(a3, a0, Operand(1));  // Add one for receiver.
-  __ sll(a3, a3, kPointerSizeLog2);
-  __ Subu(a3, a2, Operand(a3));
+  __ Addu(t0, a0, Operand(1));  // Add one for receiver.
 
-  // Push the arguments.
-  Label loop_header, loop_check;
-  __ Branch(&loop_check);
-  __ bind(&loop_header);
-  __ lw(t0, MemOperand(a2));
-  __ Addu(a2, a2, Operand(-kPointerSize));
-  __ push(t0);
-  __ bind(&loop_check);
-  __ Branch(&loop_header, gt, a2, Operand(a3));
+  // This function modifies a2, t4 and t1.
+  Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow);
 
   // Call the target.
   if (function_type == CallableType::kJSFunction) {
@@ -1198,36 +1237,87 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ break_(0xCC);
+  }
 }
 
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   // -- a0 : argument count (not including receiver)
   // -- a3 : new target
   // -- a1 : constructor to call
-  // -- a2 : address of the first argument
+  // -- a2 : allocation site feedback if available, undefined otherwise.
+  // -- t4 : address of the first argument
   // -----------------------------------
-
-  // Find the address of the last argument.
-  __ sll(t0, a0, kPointerSizeLog2);
-  __ Subu(t0, a2, Operand(t0));
+  Label stack_overflow;
 
   // Push a slot for the receiver.
   __ push(zero_reg);
 
-  // Push the arguments.
-  Label loop_header, loop_check;
-  __ Branch(&loop_check);
-  __ bind(&loop_header);
-  __ lw(t1, MemOperand(a2));
-  __ Addu(a2, a2, Operand(-kPointerSize));
-  __ push(t1);
-  __ bind(&loop_check);
-  __ Branch(&loop_header, gt, a2, Operand(t0));
+  // This function modified t4, t1 and t0.
+  Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow);
 
-  // Call the constructor with a0, a1, and a3 unmodified.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  __ AssertUndefinedOrAllocationSite(a2, t0);
+  if (construct_type == CallableType::kJSFunction) {
+    __ AssertFunction(a1);
+
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+    __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
+    __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
+    __ Jump(at);
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
+    // Call the constructor with a0, a1, and a3 unmodified.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ break_(0xCC);
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- a0 : the number of arguments (not including the receiver)
+  //  -- a1 : the target to call checked to be Array function.
+  //  -- a2 : allocation site feedback.
+  //  -- a3 : the address of the first argument to be pushed. Subsequent
+  //          arguments should be consecutive above this, in the same order as
+  //          they are to be pushed onto the stack.
+  // -----------------------------------
+  Label stack_overflow;
+
+  __ Addu(t0, a0, Operand(1));  // Add one for receiver.
+
+  // This function modifies a3, t4, and t1.
+  Generate_InterpreterPushArgs(masm, t0, a3, t1, t4, &stack_overflow);
+
+  // ArrayConstructor stub expects constructor in a3. Set it here.
+  __ mov(a3, a1);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ break_(0xCC);
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1805,61 +1895,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- a0    : number of arguments
-  //  -- a1    : function
-  //  -- cp    : context
-  //  -- sp[0] : receiver
-  // -----------------------------------
-
-  // 1. Pop receiver into a0 and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ Pop(a0);
-    __ JumpIfSmi(a0, &receiver_not_date);
-    __ GetObjectType(a0, t0, t0);
-    __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ Ret(USE_DELAY_SLOT);
-    __ lw(v0, FieldMemOperand(a0, JSDate::kValueOffset));  // In delay slot.
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
-      __ lw(a1, MemOperand(a1));
-      __ lw(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
-      __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
-      __ Ret(USE_DELAY_SLOT);
-      __ lw(v0, FieldMemOperand(
-                    a0, JSDate::kValueOffset +
-                            field_index * kPointerSize));  // In delay slot.
-      __ bind(&stamp_mismatch);
-    }
-    FrameScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2, t0);
-    __ li(a1, Operand(Smi::FromInt(field_index)));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ Ret();
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ Push(a0);
-    __ Move(a0, Smi::FromInt(0));
-    __ EnterBuiltinFrame(cp, a1, a0);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- a0    : argc
@@ -2115,27 +2150,6 @@
   }
 }
 
-static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
-                                      Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- a0 : actual number of arguments
-  //  -- a1 : function (passed through to callee)
-  //  -- a2 : expected number of arguments
-  //  -- a3 : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
-  // Make t1 the space we have left. The stack might already be overflowed
-  // here which will cause t1 to become negative.
-  __ subu(t1, sp, t1);
-  // Check if the arguments will overflow the stack.
-  __ sll(at, a2, kPointerSizeLog2);
-  // Signed comparison.
-  __ Branch(stack_overflow, le, t1, Operand(at));
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ sll(a0, a0, kSmiTagSize);
   __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
@@ -2854,28 +2868,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in a0.
-  Label not_smi;
-  __ JumpIfNotSmi(a0, &not_smi);
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_smi);
-
-  Label not_heap_number;
-  __ GetObjectType(a0, a1, a1);
-  // a0: receiver
-  // a1: receiver instance type
-  __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_heap_number);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // State setup as expected by MacroAssembler::InvokePrologue.
   // ----------- S t a t e -------------
@@ -2900,7 +2892,7 @@
     // a3: new target (passed through to callee)
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
 
     // Calculate copy start address into a0 and copy end address into t1.
     __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
@@ -2930,7 +2922,7 @@
   {  // Too few parameters: Actual < expected.
     __ bind(&too_few);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
 
     // Calculate copy start address into a0 and copy end address into t3.
     // a0: actual number of arguments as a smi
diff --git a/src/builtins/mips64/builtins-mips64.cc b/src/builtins/mips64/builtins-mips64.cc
index cbdb5c3..f7225f0 100644
--- a/src/builtins/mips64/builtins-mips64.cc
+++ b/src/builtins/mips64/builtins-mips64.cc
@@ -394,10 +394,9 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(t0);
     __ EnterBuiltinFrame(cp, a1, t0);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(cp, a1, t0);
     __ SmiUntag(t0);
   }
@@ -458,11 +457,10 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(t0);
       __ EnterBuiltinFrame(cp, a1, t0);
       __ Push(a3);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ Move(a0, v0);
       __ Pop(a3);
       __ LeaveBuiltinFrame(cp, a1, t0);
@@ -1043,6 +1041,17 @@
   __ Branch(&switch_to_different_code_kind, ne, a0,
             Operand(masm->CodeObject()));  // Self-reference to this code.
 
+  // Increment invocation count for the function.
+  __ ld(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
+  __ ld(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
+  __ ld(a4, FieldMemOperand(
+                a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                        TypeFeedbackVector::kHeaderSize));
+  __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
+  __ sd(a4, FieldMemOperand(
+                a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                        TypeFeedbackVector::kHeaderSize));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
@@ -1152,6 +1161,45 @@
   __ Jump(ra);
 }
 
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch1, Register scratch2,
+                                        Label* stack_overflow) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
+  // Make scratch1 the space we have left. The stack might already be overflowed
+  // here which will cause scratch1 to become negative.
+  __ dsubu(scratch1, sp, scratch1);
+  // Check if the arguments will overflow the stack.
+  __ dsll(scratch2, num_args, kPointerSizeLog2);
+  // Signed comparison.
+  __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
+}
+
+static void Generate_InterpreterPushArgs(MacroAssembler* masm,
+                                         Register num_args, Register index,
+                                         Register scratch, Register scratch2,
+                                         Label* stack_overflow) {
+  //  Generate_StackOverflowCheck(masm, num_args, scratch, scratch2,
+  //  stack_overflow);
+
+  // Find the address of the last argument.
+  __ mov(scratch2, num_args);
+  __ dsll(scratch2, scratch2, kPointerSizeLog2);
+  __ Dsubu(scratch2, index, Operand(scratch2));
+
+  // Push the arguments.
+  Label loop_header, loop_check;
+  __ Branch(&loop_check);
+  __ bind(&loop_header);
+  __ ld(scratch, MemOperand(index));
+  __ Daddu(index, index, Operand(-kPointerSize));
+  __ push(scratch);
+  __ bind(&loop_check);
+  __ Branch(&loop_header, gt, index, Operand(scratch2));
+}
+
 // static
 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
     MacroAssembler* masm, TailCallMode tail_call_mode,
@@ -1163,21 +1211,12 @@
   //          they are to be pushed onto the stack.
   //  -- a1 : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
 
-  // Find the address of the last argument.
   __ Daddu(a3, a0, Operand(1));  // Add one for receiver.
-  __ dsll(a3, a3, kPointerSizeLog2);
-  __ Dsubu(a3, a2, Operand(a3));
 
-  // Push the arguments.
-  Label loop_header, loop_check;
-  __ Branch(&loop_check);
-  __ bind(&loop_header);
-  __ ld(t0, MemOperand(a2));
-  __ Daddu(a2, a2, Operand(-kPointerSize));
-  __ push(t0);
-  __ bind(&loop_check);
-  __ Branch(&loop_header, gt, a2, Operand(a3));
+  // This function modifies a2, t0 and a4.
+  Generate_InterpreterPushArgs(masm, a3, a2, a4, t0, &stack_overflow);
 
   // Call the target.
   if (function_type == CallableType::kJSFunction) {
@@ -1190,36 +1229,87 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ break_(0xCC);
+  }
 }
 
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   // -- a0 : argument count (not including receiver)
   // -- a3 : new target
   // -- a1 : constructor to call
-  // -- a2 : address of the first argument
+  // -- a2 : allocation site feedback if available, undefined otherwise.
+  // -- a4 : address of the first argument
   // -----------------------------------
-
-  // Find the address of the last argument.
-  __ dsll(t0, a0, kPointerSizeLog2);
-  __ Dsubu(t0, a2, Operand(t0));
+  Label stack_overflow;
 
   // Push a slot for the receiver.
   __ push(zero_reg);
 
-  // Push the arguments.
-  Label loop_header, loop_check;
-  __ Branch(&loop_check);
-  __ bind(&loop_header);
-  __ ld(t1, MemOperand(a2));
-  __ Daddu(a2, a2, Operand(-kPointerSize));
-  __ push(t1);
-  __ bind(&loop_check);
-  __ Branch(&loop_header, gt, a2, Operand(t0));
+  // This function modifies t0, a4 and a5.
+  Generate_InterpreterPushArgs(masm, a0, a4, a5, t0, &stack_overflow);
 
-  // Call the constructor with a0, a1, and a3 unmodified.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  __ AssertUndefinedOrAllocationSite(a2, t0);
+  if (construct_type == CallableType::kJSFunction) {
+    __ AssertFunction(a1);
+
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+    __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
+    __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
+    __ Jump(at);
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
+    // Call the constructor with a0, a1, and a3 unmodified.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ break_(0xCC);
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- a0 : the number of arguments (not including the receiver)
+  //  -- a1 : the target to call checked to be Array function.
+  //  -- a2 : allocation site feedback.
+  //  -- a3 : the address of the first argument to be pushed. Subsequent
+  //          arguments should be consecutive above this, in the same order as
+  //          they are to be pushed onto the stack.
+  // -----------------------------------
+  Label stack_overflow;
+
+  __ Daddu(a4, a0, Operand(1));  // Add one for receiver.
+
+  // This function modifies a3, a5 and a6.
+  Generate_InterpreterPushArgs(masm, a4, a3, a5, a6, &stack_overflow);
+
+  // ArrayConstructor stub expects constructor in a3. Set it here.
+  __ mov(a3, a1);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ break_(0xCC);
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1799,61 +1889,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- a0                 : number of arguments
-  //  -- a1                 : function
-  //  -- cp                 : context
-  //  -- sp[0] : receiver
-  // -----------------------------------
-
-  // 1. Pop receiver into a0 and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ Pop(a0);
-    __ JumpIfSmi(a0, &receiver_not_date);
-    __ GetObjectType(a0, t0, t0);
-    __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ Ret(USE_DELAY_SLOT);
-    __ ld(v0, FieldMemOperand(a0, JSDate::kValueOffset));  // In delay slot.
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
-      __ ld(a1, MemOperand(a1));
-      __ ld(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
-      __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
-      __ Ret(USE_DELAY_SLOT);
-      __ ld(v0, FieldMemOperand(
-                    a0, JSDate::kValueOffset +
-                            field_index * kPointerSize));  // In delay slot.
-      __ bind(&stamp_mismatch);
-    }
-    FrameScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2, t0);
-    __ li(a1, Operand(Smi::FromInt(field_index)));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ Ret();
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ Push(a0);
-    __ Move(a0, Smi::FromInt(0));
-    __ EnterBuiltinFrame(cp, a1, a0);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- a0    : argc
@@ -2109,27 +2144,6 @@
   }
 }
 
-static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
-                                      Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- a0 : actual number of arguments
-  //  -- a1 : function (passed through to callee)
-  //  -- a2 : expected number of arguments
-  //  -- a3 : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  __ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
-  // Make a5 the space we have left. The stack might already be overflowed
-  // here which will cause a5 to become negative.
-  __ dsubu(a5, sp, a5);
-  // Check if the arguments will overflow the stack.
-  __ dsll(at, a2, kPointerSizeLog2);
-  // Signed comparison.
-  __ Branch(stack_overflow, le, a5, Operand(at));
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   // __ sll(a0, a0, kSmiTagSize);
   __ dsll32(a0, a0, 0);
@@ -2847,28 +2861,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in a0.
-  Label not_smi;
-  __ JumpIfNotSmi(a0, &not_smi);
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_smi);
-
-  Label not_heap_number;
-  __ GetObjectType(a0, a1, a1);
-  // a0: receiver
-  // a1: receiver instance type
-  __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_heap_number);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // State setup as expected by MacroAssembler::InvokePrologue.
   // ----------- S t a t e -------------
@@ -2893,7 +2885,7 @@
     // a3: new target (passed through to callee)
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, a2, a5, at, &stack_overflow);
 
     // Calculate copy start address into a0 and copy end address into a4.
     __ SmiScale(a0, a0, kPointerSizeLog2);
@@ -2924,7 +2916,7 @@
   {  // Too few parameters: Actual < expected.
     __ bind(&too_few);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, a2, a5, at, &stack_overflow);
 
     // Calculate copy start address into a0 and copy end address into a7.
     // a0: actual number of arguments as a smi
diff --git a/src/builtins/ppc/builtins-ppc.cc b/src/builtins/ppc/builtins-ppc.cc
index dfea83f..7e2b82c 100644
--- a/src/builtins/ppc/builtins-ppc.cc
+++ b/src/builtins/ppc/builtins-ppc.cc
@@ -398,10 +398,9 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(r5);
     __ EnterBuiltinFrame(cp, r4, r5);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(cp, r4, r5);
     __ SmiUntag(r5);
   }
@@ -462,12 +461,11 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(r9);
       __ EnterBuiltinFrame(cp, r4, r9);
       __ Push(r6);
       __ mr(r3, r5);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ mr(r5, r3);
       __ Pop(r6);
       __ LeaveBuiltinFrame(cp, r4, r9);
@@ -1084,6 +1082,18 @@
   __ cmp(r3, ip);
   __ bne(&switch_to_different_code_kind);
 
+  // Increment invocation count for the function.
+  __ LoadP(r7, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
+  __ LoadP(r7, FieldMemOperand(r7, LiteralsArray::kFeedbackVectorOffset));
+  __ LoadP(r8, FieldMemOperand(r7, TypeFeedbackVector::kInvocationCountIndex *
+                                           kPointerSize +
+                                       TypeFeedbackVector::kHeaderSize));
+  __ AddSmiLiteral(r8, r8, Smi::FromInt(1), r0);
+  __ StoreP(r8, FieldMemOperand(r7, TypeFeedbackVector::kInvocationCountIndex *
+                                            kPointerSize +
+                                        TypeFeedbackVector::kHeaderSize),
+            r0);
+
   // Check function data field is actually a BytecodeArray object.
 
   if (FLAG_debug_code) {
@@ -1187,8 +1197,29 @@
   __ blr();
 }
 
-static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
-                                         Register count, Register scratch) {
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch,
+                                        Label* stack_overflow) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
+  // Make scratch the space we have left. The stack might already be overflowed
+  // here which will cause scratch to become negative.
+  __ sub(scratch, sp, scratch);
+  // Check if the arguments will overflow the stack.
+  __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2));
+  __ cmp(scratch, r0);
+  __ ble(stack_overflow);  // Signed comparison.
+}
+
+static void Generate_InterpreterPushArgs(MacroAssembler* masm,
+                                         Register num_args, Register index,
+                                         Register count, Register scratch,
+                                         Label* stack_overflow) {
+  // A stack check before pushing arguments.
+  Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
+
   Label loop;
   __ addi(index, index, Operand(kPointerSize));  // Bias up for LoadPU
   __ mtctr(count);
@@ -1209,12 +1240,13 @@
   //          they are to be pushed onto the stack.
   //  -- r4 : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
 
   // Calculate number of arguments (add one for receiver).
   __ addi(r6, r3, Operand(1));
 
-  // Push the arguments.
-  Generate_InterpreterPushArgs(masm, r5, r6, r7);
+  // Push the arguments. r5, r6, r7 will be modified.
+  Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow);
 
   // Call the target.
   if (function_type == CallableType::kJSFunction) {
@@ -1227,16 +1259,26 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable Code.
+    __ bkpt(0);
+  }
 }
 
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   // -- r3 : argument count (not including receiver)
   // -- r6 : new target
   // -- r4 : constructor to call
-  // -- r5 : address of the first argument
+  // -- r5 : allocation site feedback if available, undefined otherwise.
+  // -- r7 : address of the first argument
   // -----------------------------------
+  Label stack_overflow;
 
   // Push a slot for the receiver to be constructed.
   __ li(r0, Operand::Zero());
@@ -1246,11 +1288,64 @@
   Label skip;
   __ cmpi(r3, Operand::Zero());
   __ beq(&skip);
-  Generate_InterpreterPushArgs(masm, r5, r3, r7);
+  // Push the arguments. r8, r7, r9 will be modified.
+  Generate_InterpreterPushArgs(masm, r3, r7, r3, r8, &stack_overflow);
   __ bind(&skip);
 
-  // Call the constructor with r3, r4, and r6 unmodified.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  __ AssertUndefinedOrAllocationSite(r5, r8);
+  if (construct_type == CallableType::kJSFunction) {
+    __ AssertFunction(r4);
+
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
+    __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
+    // Jump to the construct function.
+    __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
+    __ Jump(ip);
+
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
+    // Call the constructor with r3, r4, and r6 unmodified.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable Code.
+    __ bkpt(0);
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  // -- r3 : argument count (not including receiver)
+  // -- r4 : target to call verified to be Array function
+  // -- r5 : allocation site feedback if available, undefined otherwise.
+  // -- r6 : address of the first argument
+  // -----------------------------------
+  Label stack_overflow;
+
+  __ addi(r7, r3, Operand(1));  // Add one for receiver.
+
+  // Push the arguments. r6, r8, r3 will be modified.
+  Generate_InterpreterPushArgs(masm, r7, r6, r7, r8, &stack_overflow);
+
+  // Array constructor expects constructor in r6. It is same as r4 here.
+  __ mr(r6, r4);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable code.
+    __ bkpt(0);
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1842,61 +1937,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- r3    : number of arguments
-  //  -- r4    : function
-  //  -- cp    : context
-  //  -- lr    : return address
-  //  -- sp[0] : receiver
-  // -----------------------------------
-
-  // 1. Pop receiver into r3 and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ Pop(r3);
-    __ JumpIfSmi(r3, &receiver_not_date);
-    __ CompareObjectType(r3, r5, r6, JS_DATE_TYPE);
-    __ bne(&receiver_not_date);
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset));
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
-      __ LoadP(r4, MemOperand(r4));
-      __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset));
-      __ cmp(r4, ip);
-      __ bne(&stamp_mismatch);
-      __ LoadP(r3, FieldMemOperand(
-                       r3, JSDate::kValueOffset + field_index * kPointerSize));
-      __ Ret();
-      __ bind(&stamp_mismatch);
-    }
-    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2, r4);
-    __ LoadSmiLiteral(r4, Smi::FromInt(field_index));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ Ret();
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ push(r3);
-    __ LoadSmiLiteral(r3, Smi::FromInt(0));
-    __ EnterBuiltinFrame(cp, r4, r3);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r3    : argc
@@ -2151,27 +2191,6 @@
   }
 }
 
-static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
-                                      Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- r3 : actual number of arguments
-  //  -- r4 : function (passed through to callee)
-  //  -- r5 : expected number of arguments
-  //  -- r6 : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
-  // Make r8 the space we have left. The stack might already be overflowed
-  // here which will cause r8 to become negative.
-  __ sub(r8, sp, r8);
-  // Check if the arguments will overflow the stack.
-  __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
-  __ cmp(r8, r0);
-  __ ble(stack_overflow);  // Signed comparison.
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ SmiTag(r3);
   __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
@@ -2433,7 +2452,9 @@
   Label class_constructor;
   __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
   __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
-  __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0);
+  __ TestBitMask(r6, FunctionKind::kClassConstructor
+                         << SharedFunctionInfo::kFunctionKindShift,
+                 r0);
   __ bne(&class_constructor, cr0);
 
   // Enter the context of the function; ToObject has to run in the function
@@ -2861,22 +2882,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in r3.
-  STATIC_ASSERT(kSmiTag == 0);
-  __ TestIfSmi(r3, r0);
-  __ Ret(eq, cr0);
-
-  __ CompareObjectType(r3, r4, r4, HEAP_NUMBER_TYPE);
-  // r3: receiver
-  // r4: receiver instance type
-  __ Ret(eq);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r3 : actual number of arguments
@@ -2897,7 +2902,7 @@
   {  // Enough parameters: actual >= expected
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
 
     // Calculate copy start address into r3 and copy end address into r7.
     // r3: actual number of arguments as a smi
@@ -2935,7 +2940,7 @@
     __ bind(&too_few);
 
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
 
     // Calculate copy start address into r0 and copy end address is fp.
     // r3: actual number of arguments as a smi
diff --git a/src/builtins/s390/builtins-s390.cc b/src/builtins/s390/builtins-s390.cc
index c68fcc3..91ae2c0 100644
--- a/src/builtins/s390/builtins-s390.cc
+++ b/src/builtins/s390/builtins-s390.cc
@@ -396,10 +396,9 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(r4);
     __ EnterBuiltinFrame(cp, r3, r4);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(cp, r3, r4);
     __ SmiUntag(r4);
   }
@@ -459,12 +458,11 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(r8);
       __ EnterBuiltinFrame(cp, r3, r8);
       __ Push(r5);
       __ LoadRR(r2, r4);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ LoadRR(r4, r2);
       __ Pop(r5);
       __ LeaveBuiltinFrame(cp, r3, r8);
@@ -1087,6 +1085,17 @@
   __ CmpP(r2, Operand(masm->CodeObject()));  // Self-reference to this code.
   __ bne(&switch_to_different_code_kind);
 
+  // Increment invocation count for the function.
+  __ LoadP(r6, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
+  __ LoadP(r6, FieldMemOperand(r6, LiteralsArray::kFeedbackVectorOffset));
+  __ LoadP(r1, FieldMemOperand(r6, TypeFeedbackVector::kInvocationCountIndex *
+                                           kPointerSize +
+                                       TypeFeedbackVector::kHeaderSize));
+  __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0);
+  __ StoreP(r1, FieldMemOperand(r6, TypeFeedbackVector::kInvocationCountIndex *
+                                            kPointerSize +
+                                        TypeFeedbackVector::kHeaderSize));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ TestIfSmi(kInterpreterBytecodeArrayRegister);
@@ -1191,8 +1200,29 @@
   __ Ret();
 }
 
-static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
-                                         Register count, Register scratch) {
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch,
+                                        Label* stack_overflow) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
+  // Make scratch the space we have left. The stack might already be overflowed
+  // here which will cause scratch to become negative.
+  __ SubP(scratch, sp, scratch);
+  // Check if the arguments will overflow the stack.
+  __ ShiftLeftP(r0, num_args, Operand(kPointerSizeLog2));
+  __ CmpP(scratch, r0);
+  __ ble(stack_overflow);  // Signed comparison.
+}
+
+static void Generate_InterpreterPushArgs(MacroAssembler* masm,
+                                         Register num_args, Register index,
+                                         Register count, Register scratch,
+                                         Label* stack_overflow) {
+  // Add a stack check before pushing arguments.
+  Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
+
   Label loop;
   __ AddP(index, index, Operand(kPointerSize));  // Bias up for LoadPU
   __ LoadRR(r0, count);
@@ -1215,12 +1245,13 @@
   //          they are to be pushed onto the stack.
   //  -- r3 : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
 
   // Calculate number of arguments (AddP one for receiver).
   __ AddP(r5, r2, Operand(1));
 
   // Push the arguments.
-  Generate_InterpreterPushArgs(masm, r4, r5, r6);
+  Generate_InterpreterPushArgs(masm, r5, r4, r5, r6, &stack_overflow);
 
   // Call the target.
   if (function_type == CallableType::kJSFunction) {
@@ -1233,16 +1264,26 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable Code.
+    __ bkpt(0);
+  }
 }
 
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   // -- r2 : argument count (not including receiver)
   // -- r5 : new target
   // -- r3 : constructor to call
-  // -- r4 : address of the first argument
+  // -- r4 : allocation site feedback if available, undefined otherwise.
+  // -- r6 : address of the first argument
   // -----------------------------------
+  Label stack_overflow;
 
   // Push a slot for the receiver to be constructed.
   __ LoadImmP(r0, Operand::Zero());
@@ -1252,11 +1293,63 @@
   Label skip;
   __ CmpP(r2, Operand::Zero());
   __ beq(&skip);
-  Generate_InterpreterPushArgs(masm, r4, r2, r6);
+  Generate_InterpreterPushArgs(masm, r2, r6, r2, r7, &stack_overflow);
   __ bind(&skip);
 
-  // Call the constructor with r2, r3, and r5 unmodified.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  __ AssertUndefinedOrAllocationSite(r4, r7);
+  if (construct_type == CallableType::kJSFunction) {
+    __ AssertFunction(r3);
+
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
+    __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
+    // Jump to the construct function.
+    __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
+    __ Jump(ip);
+
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
+    // Call the constructor with r2, r3, and r5 unmodified.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable Code.
+    __ bkpt(0);
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  // -- r2 : argument count (not including receiver)
+  // -- r3 : target to call verified to be Array function
+  // -- r4 : allocation site feedback if available, undefined otherwise.
+  // -- r5 : address of the first argument
+  // -----------------------------------
+  Label stack_overflow;
+
+  __ AddP(r6, r2, Operand(1));  // Add one for receiver.
+
+  // Push the arguments. r6, r8, r3 will be modified.
+  Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow);
+
+  // Array constructor expects constructor in r5. It is same as r3 here.
+  __ LoadRR(r5, r3);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // Unreachable Code.
+    __ bkpt(0);
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1844,62 +1937,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- r2    : number of arguments
-  //  -- r3    : function
-  //  -- cp    : context
-
-  //  -- lr    : return address
-  //  -- sp[0] : receiver
-  // -----------------------------------
-
-  // 1. Pop receiver into r2 and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ Pop(r2);
-    __ JumpIfSmi(r2, &receiver_not_date);
-    __ CompareObjectType(r2, r4, r5, JS_DATE_TYPE);
-    __ bne(&receiver_not_date);
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ LoadP(r2, FieldMemOperand(r2, JSDate::kValueOffset));
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ mov(r3, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
-      __ LoadP(r3, MemOperand(r3));
-      __ LoadP(ip, FieldMemOperand(r2, JSDate::kCacheStampOffset));
-      __ CmpP(r3, ip);
-      __ bne(&stamp_mismatch);
-      __ LoadP(r2, FieldMemOperand(
-                       r2, JSDate::kValueOffset + field_index * kPointerSize));
-      __ Ret();
-      __ bind(&stamp_mismatch);
-    }
-    FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2, r3);
-    __ LoadSmiLiteral(r3, Smi::FromInt(field_index));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ Ret();
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ push(r2);
-    __ LoadSmiLiteral(r2, Smi::FromInt(0));
-    __ EnterBuiltinFrame(cp, r3, r2);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r2    : argc
@@ -2154,27 +2191,6 @@
   }
 }
 
-static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
-                                      Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- r2 : actual number of arguments
-  //  -- r3 : function (passed through to callee)
-  //  -- r4 : expected number of arguments
-  //  -- r5 : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  __ LoadRoot(r7, Heap::kRealStackLimitRootIndex);
-  // Make r7 the space we have left. The stack might already be overflowed
-  // here which will cause r7 to become negative.
-  __ SubP(r7, sp, r7);
-  // Check if the arguments will overflow the stack.
-  __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
-  __ CmpP(r7, r0);
-  __ ble(stack_overflow);  // Signed comparison.
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ SmiTag(r2);
   __ LoadSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
@@ -2445,7 +2461,9 @@
   Label class_constructor;
   __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
   __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
-  __ TestBitMask(r5, SharedFunctionInfo::kClassConstructorBits, r0);
+  __ TestBitMask(r5, FunctionKind::kClassConstructor
+                         << SharedFunctionInfo::kFunctionKindShift,
+                 r0);
   __ bne(&class_constructor);
 
   // Enter the context of the function; ToObject has to run in the function
@@ -2875,22 +2893,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in r2.
-  STATIC_ASSERT(kSmiTag == 0);
-  __ TestIfSmi(r2);
-  __ Ret(eq);
-
-  __ CompareObjectType(r2, r3, r3, HEAP_NUMBER_TYPE);
-  // r2: receiver
-  // r3: receiver instance type
-  __ Ret(eq);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- r2 : actual number of arguments
@@ -2911,7 +2913,7 @@
   {  // Enough parameters: actual >= expected
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
 
     // Calculate copy start address into r2 and copy end address into r6.
     // r2: actual number of arguments as a smi
@@ -2949,7 +2951,7 @@
     __ bind(&too_few);
 
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentAdaptorStackCheck(masm, &stack_overflow);
+    Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
 
     // Calculate copy start address into r0 and copy end address is fp.
     // r2: actual number of arguments as a smi
diff --git a/src/builtins/x64/builtins-x64.cc b/src/builtins/x64/builtins-x64.cc
index 1536604..beae2d2 100644
--- a/src/builtins/x64/builtins-x64.cc
+++ b/src/builtins/x64/builtins-x64.cc
@@ -672,6 +672,15 @@
   __ cmpp(rcx, FieldOperand(rax, SharedFunctionInfo::kCodeOffset));
   __ j(not_equal, &switch_to_different_code_kind);
 
+  // Increment invocation count for the function.
+  __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
+  __ movp(rcx, FieldOperand(rcx, LiteralsArray::kFeedbackVectorOffset));
+  __ SmiAddConstant(
+      FieldOperand(rcx,
+                   TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                       TypeFeedbackVector::kHeaderSize),
+      Smi::FromInt(1));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
@@ -782,33 +791,44 @@
   __ ret(0);
 }
 
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch1, Register scratch2,
+                                        Label* stack_overflow) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
+  __ movp(scratch2, rsp);
+  // Make scratch2 the space we have left. The stack might already be overflowed
+  // here which will cause scratch2 to become negative.
+  __ subp(scratch2, scratch1);
+  // Make scratch1 the space we need for the array when it is unrolled onto the
+  // stack.
+  __ movp(scratch1, num_args);
+  __ shlp(scratch1, Immediate(kPointerSizeLog2));
+  // Check if the arguments will overflow the stack.
+  __ cmpp(scratch2, scratch1);
+  __ j(less_equal, stack_overflow);  // Signed comparison.
+}
+
 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
-                                         bool push_receiver) {
-  // ----------- S t a t e -------------
-  //  -- rax : the number of arguments (not including the receiver)
-  //  -- rbx : the address of the first argument to be pushed. Subsequent
-  //           arguments should be consecutive above this, in the same order as
-  //           they are to be pushed onto the stack.
-  // -----------------------------------
-
+                                         Register num_args,
+                                         Register start_address,
+                                         Register scratch) {
   // Find the address of the last argument.
-  __ movp(rcx, rax);
-  if (push_receiver) {
-    __ addp(rcx, Immediate(1));  // Add one for receiver.
-  }
-
-  __ shlp(rcx, Immediate(kPointerSizeLog2));
-  __ negp(rcx);
-  __ addp(rcx, rbx);
+  __ Move(scratch, num_args);
+  __ shlp(scratch, Immediate(kPointerSizeLog2));
+  __ negp(scratch);
+  __ addp(scratch, start_address);
 
   // Push the arguments.
   Label loop_header, loop_check;
   __ j(always, &loop_check);
   __ bind(&loop_header);
-  __ Push(Operand(rbx, 0));
-  __ subp(rbx, Immediate(kPointerSize));
+  __ Push(Operand(start_address, 0));
+  __ subp(start_address, Immediate(kPointerSize));
   __ bind(&loop_check);
-  __ cmpp(rbx, rcx);
+  __ cmpp(start_address, scratch);
   __ j(greater, &loop_header, Label::kNear);
 }
 
@@ -823,11 +843,20 @@
   //           they are to be pushed onto the stack.
   //  -- rdi : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
+
+  // Number of values to be pushed.
+  __ Move(rcx, rax);
+  __ addp(rcx, Immediate(1));  // Add one for receiver.
+
+  // Add a stack check before pushing arguments.
+  Generate_StackOverflowCheck(masm, rcx, rdx, r8, &stack_overflow);
 
   // Pop return address to allow tail-call after pushing arguments.
   __ PopReturnAddressTo(kScratchRegister);
 
-  Generate_InterpreterPushArgs(masm, true);
+  // rbx and rdx will be modified.
+  Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
 
   // Call the target.
   __ PushReturnAddressFrom(kScratchRegister);  // Re-push return address.
@@ -842,19 +871,33 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  // Throw stack overflow exception.
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // This should be unreachable.
+    __ int3();
+  }
 }
 
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   //  -- rax : the number of arguments (not including the receiver)
   //  -- rdx : the new target (either the same as the constructor or
   //           the JSFunction on which new was invoked initially)
   //  -- rdi : the constructor to call (can be any Object)
-  //  -- rbx : the address of the first argument to be pushed. Subsequent
+  //  -- rbx : the allocation site feedback if available, undefined otherwise
+  //  -- rcx : the address of the first argument to be pushed. Subsequent
   //           arguments should be consecutive above this, in the same order as
   //           they are to be pushed onto the stack.
   // -----------------------------------
+  Label stack_overflow;
+
+  // Add a stack check before pushing arguments.
+  Generate_StackOverflowCheck(masm, rax, r8, r9, &stack_overflow);
 
   // Pop return address to allow tail-call after pushing arguments.
   __ PopReturnAddressTo(kScratchRegister);
@@ -862,13 +905,80 @@
   // Push slot for the receiver to be constructed.
   __ Push(Immediate(0));
 
-  Generate_InterpreterPushArgs(masm, false);
+  // rcx and r8 will be modified.
+  Generate_InterpreterPushArgs(masm, rax, rcx, r8);
 
   // Push return address in preparation for the tail-call.
   __ PushReturnAddressFrom(kScratchRegister);
 
-  // Call the constructor (rax, rdx, rdi passed on).
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  __ AssertUndefinedOrAllocationSite(rbx);
+  if (construct_type == CallableType::kJSFunction) {
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ AssertFunction(rdi);
+
+    __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+    __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
+    __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
+    // Jump to the constructor function (rax, rbx, rdx passed on).
+    __ jmp(rcx);
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
+    // Call the constructor (rax, rdx, rdi passed on).
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  // Throw stack overflow exception.
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // This should be unreachable.
+    __ int3();
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- rax : the number of arguments (not including the receiver)
+  //  -- rdx : the target to call checked to be Array function.
+  //  -- rbx : the allocation site feedback
+  //  -- rcx : the address of the first argument to be pushed. Subsequent
+  //           arguments should be consecutive above this, in the same order as
+  //           they are to be pushed onto the stack.
+  // -----------------------------------
+  Label stack_overflow;
+
+  // Number of values to be pushed.
+  __ Move(r8, rax);
+  __ addp(r8, Immediate(1));  // Add one for receiver.
+
+  // Add a stack check before pushing arguments.
+  Generate_StackOverflowCheck(masm, r8, rdi, r9, &stack_overflow);
+
+  // Pop return address to allow tail-call after pushing arguments.
+  __ PopReturnAddressTo(kScratchRegister);
+
+  // rcx and rdi will be modified.
+  Generate_InterpreterPushArgs(masm, r8, rcx, rdi);
+
+  // Push return address in preparation for the tail-call.
+  __ PushReturnAddressFrom(kScratchRegister);
+
+  // Array constructor expects constructor in rdi. It is same as rdx here.
+  __ Move(rdi, rdx);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  // Throw stack overflow exception.
+  __ bind(&stack_overflow);
+  {
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+    // This should be unreachable.
+    __ int3();
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1275,60 +1385,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- rax    : number of arguments
-  //  -- rdi    : function
-  //  -- rsi    : context
-  //  -- rsp[0] : return address
-  //  -- rsp[8] : receiver
-  // -----------------------------------
-
-  // 1. Load receiver into rax and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    StackArgumentsAccessor args(rsp, 0);
-    __ movp(rax, args.GetReceiverOperand());
-    __ JumpIfSmi(rax, &receiver_not_date);
-    __ CmpObjectType(rax, JS_DATE_TYPE, rbx);
-    __ j(not_equal, &receiver_not_date);
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ movp(rax, FieldOperand(rax, JSDate::kValueOffset));
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ Load(rdx, ExternalReference::date_cache_stamp(masm->isolate()));
-      __ cmpp(rdx, FieldOperand(rax, JSDate::kCacheStampOffset));
-      __ j(not_equal, &stamp_mismatch, Label::kNear);
-      __ movp(rax, FieldOperand(
-                       rax, JSDate::kValueOffset + field_index * kPointerSize));
-      __ ret(1 * kPointerSize);
-      __ bind(&stamp_mismatch);
-    }
-    FrameScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2);
-    __ Move(arg_reg_1, rax);
-    __ Move(arg_reg_2, Smi::FromInt(field_index));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ ret(1 * kPointerSize);
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ Move(rbx, Smi::FromInt(0));
-    __ EnterBuiltinFrame(rsi, rdi, rbx);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- rax     : argc
@@ -1948,9 +2004,8 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ EnterBuiltinFrame(rsi, rdi, r8);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(rsi, rdi, r8);
   }
   __ jmp(&drop_frame_and_ret, Label::kNear);
@@ -2017,11 +2072,10 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ EnterBuiltinFrame(rsi, rdi, r8);
       __ Push(rdx);
       __ Move(rax, rbx);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ Move(rbx, rax);
       __ Pop(rdx);
       __ LeaveBuiltinFrame(rsi, rdi, r8);
@@ -2061,32 +2115,6 @@
   }
 }
 
-static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
-                                       Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- rax : actual number of arguments
-  //  -- rbx : expected number of arguments
-  //  -- rdx : new target (passed through to callee)
-  //  -- rdi : function (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  Label okay;
-  __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
-  __ movp(rcx, rsp);
-  // Make rcx the space we have left. The stack might already be overflowed
-  // here which will cause rcx to become negative.
-  __ subp(rcx, r8);
-  // Make r8 the space we need for the array when it is unrolled onto the
-  // stack.
-  __ movp(r8, rbx);
-  __ shlp(r8, Immediate(kPointerSizeLog2));
-  // Check if the arguments will overflow the stack.
-  __ cmpp(rcx, r8);
-  __ j(less_equal, stack_overflow);  // Signed comparison.
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ pushq(rbp);
   __ movp(rbp, rsp);
@@ -2161,25 +2189,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in rax.
-  Label not_smi;
-  __ JumpIfNotSmi(rax, &not_smi, Label::kNear);
-  __ Ret();
-  __ bind(&not_smi);
-
-  Label not_heap_number;
-  __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
-                 Heap::kHeapNumberMapRootIndex);
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ Ret();
-  __ bind(&not_heap_number);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- rax : actual number of arguments
@@ -2201,7 +2210,8 @@
   {  // Enough parameters: Actual >= expected.
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
+    // The registers rcx and r8 will be modified. The register rbx is only read.
+    Generate_StackOverflowCheck(masm, rbx, rcx, r8, &stack_overflow);
 
     // Copy receiver and all expected arguments.
     const int offset = StandardFrameConstants::kCallerSPOffset;
@@ -2222,7 +2232,8 @@
     __ bind(&too_few);
 
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
+    // The registers rcx and r8 will be modified. The register rbx is only read.
+    Generate_StackOverflowCheck(masm, rbx, rcx, r8, &stack_overflow);
 
     // Copy receiver and all actual arguments.
     const int offset = StandardFrameConstants::kCallerSPOffset;
diff --git a/src/builtins/x87/builtins-x87.cc b/src/builtins/x87/builtins-x87.cc
index 9c46f20..8e096a3 100644
--- a/src/builtins/x87/builtins-x87.cc
+++ b/src/builtins/x87/builtins-x87.cc
@@ -591,6 +591,13 @@
   __ cmp(ecx, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
   __ j(not_equal, &switch_to_different_code_kind);
 
+  // Increment invocation count for the function.
+  __ EmitLoadTypeFeedbackVector(ecx);
+  __ add(FieldOperand(ecx,
+                      TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize),
+         Immediate(Smi::FromInt(1)));
+
   // Check function data field is actually a BytecodeArray object.
   if (FLAG_debug_code) {
     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
@@ -704,20 +711,47 @@
   __ ret(0);
 }
 
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+                                        Register scratch1, Register scratch2,
+                                        Label* stack_overflow,
+                                        bool include_receiver = false) {
+  // Check the stack for overflow. We are not trying to catch
+  // interruptions (e.g. debug break and preemption) here, so the "real stack
+  // limit" is checked.
+  ExternalReference real_stack_limit =
+      ExternalReference::address_of_real_stack_limit(masm->isolate());
+  __ mov(scratch1, Operand::StaticVariable(real_stack_limit));
+  // Make scratch2 the space we have left. The stack might already be overflowed
+  // here which will cause scratch2 to become negative.
+  __ mov(scratch2, esp);
+  __ sub(scratch2, scratch1);
+  // Make scratch1 the space we need for the array when it is unrolled onto the
+  // stack.
+  __ mov(scratch1, num_args);
+  if (include_receiver) {
+    __ add(scratch1, Immediate(1));
+  }
+  __ shl(scratch1, kPointerSizeLog2);
+  // Check if the arguments will overflow the stack.
+  __ cmp(scratch2, scratch1);
+  __ j(less_equal, stack_overflow);  // Signed comparison.
+}
+
 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
-                                         Register array_limit) {
+                                         Register array_limit,
+                                         Register start_address) {
   // ----------- S t a t e -------------
-  //  -- ebx : Pointer to the last argument in the args array.
+  //  -- start_address : Pointer to the last argument in the args array.
   //  -- array_limit : Pointer to one before the first argument in the
   //                   args array.
   // -----------------------------------
   Label loop_header, loop_check;
   __ jmp(&loop_check);
   __ bind(&loop_header);
-  __ Push(Operand(ebx, 0));
-  __ sub(ebx, Immediate(kPointerSize));
+  __ Push(Operand(start_address, 0));
+  __ sub(start_address, Immediate(kPointerSize));
   __ bind(&loop_check);
-  __ cmp(ebx, array_limit);
+  __ cmp(start_address, array_limit);
   __ j(greater, &loop_header, Label::kNear);
 }
 
@@ -732,18 +766,26 @@
   //           they are to be pushed onto the stack.
   //  -- edi : the target to call (can be any Object).
   // -----------------------------------
+  Label stack_overflow;
+  // Compute the expected number of arguments.
+  __ mov(ecx, eax);
+  __ add(ecx, Immediate(1));  // Add one for receiver.
+
+  // Add a stack check before pushing the arguments. We need an extra register
+  // to perform a stack check. So push it onto the stack temporarily. This
+  // might cause stack overflow, but it will be detected by the check.
+  __ Push(edi);
+  Generate_StackOverflowCheck(masm, ecx, edx, edi, &stack_overflow);
+  __ Pop(edi);
 
   // Pop return address to allow tail-call after pushing arguments.
   __ Pop(edx);
 
   // Find the address of the last argument.
-  __ mov(ecx, eax);
-  __ add(ecx, Immediate(1));  // Add one for receiver.
   __ shl(ecx, kPointerSizeLog2);
   __ neg(ecx);
   __ add(ecx, ebx);
-
-  Generate_InterpreterPushArgs(masm, ecx);
+  Generate_InterpreterPushArgs(masm, ecx, ebx);
 
   // Call the target.
   __ Push(edx);  // Re-push return address.
@@ -758,43 +800,210 @@
                                               tail_call_mode),
             RelocInfo::CODE_TARGET);
   }
+
+  __ bind(&stack_overflow);
+  {
+    // Pop the temporary registers, so that return address is on top of stack.
+    __ Pop(edi);
+
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+
+    // This should be unreachable.
+    __ int3();
+  }
 }
 
+namespace {
+
+// This function modified start_addr, and only reads the contents of num_args
+// register. scratch1 and scratch2 are used as temporary registers. Their
+// original values are restored after the use.
+void Generate_InterpreterPushArgsAndReturnAddress(
+    MacroAssembler* masm, Register num_args, Register start_addr,
+    Register scratch1, Register scratch2, bool receiver_in_args,
+    int num_slots_above_ret_addr, Label* stack_overflow) {
+  // We have to move return address and the temporary registers above it
+  // before we can copy arguments onto the stack. To achieve this:
+  // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
+  // Step 2: Move the return address and values above it to the top of stack.
+  // Step 3: Copy the arguments into the correct locations.
+  //  current stack    =====>    required stack layout
+  // |             |            | scratch1      | (2) <-- esp(1)
+  // |             |            | ....          | (2)
+  // |             |            | scratch-n     | (2)
+  // |             |            | return addr   | (2)
+  // |             |            | arg N         | (3)
+  // | scratch1    | <-- esp    | ....          |
+  // | ....        |            | arg 0         |
+  // | scratch-n   |            | arg 0         |
+  // | return addr |            | receiver slot |
+
+  // Check for stack overflow before we increment the stack pointer.
+  Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
+                              stack_overflow, true);
+
+// Step 1 - Update the stack pointer. scratch1 already contains the required
+// increment to the stack. i.e. num_args + 1 stack slots. This is computed in
+// the Generate_StackOverflowCheck.
+
+#ifdef _MSC_VER
+  // TODO(mythria): Move it to macro assembler.
+  // In windows, we cannot increment the stack size by more than one page
+  // (mimimum page size is 4KB) without accessing at least one byte on the
+  // page. Check this:
+  // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
+  const int page_size = 4 * 1024;
+  Label check_offset, update_stack_pointer;
+  __ bind(&check_offset);
+  __ cmp(scratch1, page_size);
+  __ j(less, &update_stack_pointer);
+  __ sub(esp, Immediate(page_size));
+  // Just to touch the page, before we increment further.
+  __ mov(Operand(esp, 0), Immediate(0));
+  __ sub(scratch1, Immediate(page_size));
+  __ jmp(&check_offset);
+  __ bind(&update_stack_pointer);
+#endif
+
+  __ sub(esp, scratch1);
+
+  // Step 2 move return_address and slots above it to the correct locations.
+  // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
+  // basically when the source and destination overlap. We at least need one
+  // extra slot for receiver, so no extra checks are required to avoid copy.
+  for (int i = 0; i < num_slots_above_ret_addr + 1; i++) {
+    __ mov(scratch1,
+           Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
+    __ mov(Operand(esp, i * kPointerSize), scratch1);
+  }
+
+  // Step 3 copy arguments to correct locations.
+  if (receiver_in_args) {
+    __ mov(scratch1, num_args);
+    __ add(scratch1, Immediate(1));
+  } else {
+    // Slot meant for receiver contains return address. Reset it so that
+    // we will not incorrectly interpret return address as an object.
+    __ mov(Operand(esp, num_args, times_pointer_size,
+                   (num_slots_above_ret_addr + 1) * kPointerSize),
+           Immediate(0));
+    __ mov(scratch1, num_args);
+  }
+
+  Label loop_header, loop_check;
+  __ jmp(&loop_check);
+  __ bind(&loop_header);
+  __ mov(scratch2, Operand(start_addr, 0));
+  __ mov(Operand(esp, scratch1, times_pointer_size,
+                 num_slots_above_ret_addr * kPointerSize),
+         scratch2);
+  __ sub(start_addr, Immediate(kPointerSize));
+  __ sub(scratch1, Immediate(1));
+  __ bind(&loop_check);
+  __ cmp(scratch1, Immediate(0));
+  __ j(greater, &loop_header, Label::kNear);
+}
+
+}  // end anonymous namespace
+
 // static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+    MacroAssembler* masm, CallableType construct_type) {
   // ----------- S t a t e -------------
   //  -- eax : the number of arguments (not including the receiver)
   //  -- edx : the new target
   //  -- edi : the constructor
-  //  -- ebx : the address of the first argument to be pushed. Subsequent
+  //  -- ebx : allocation site feedback (if available or undefined)
+  //  -- ecx : the address of the first argument to be pushed. Subsequent
   //           arguments should be consecutive above this, in the same order as
   //           they are to be pushed onto the stack.
   // -----------------------------------
-
-  // Pop return address to allow tail-call after pushing arguments.
-  __ Pop(ecx);
-
-  // Push edi in the slot meant for receiver. We need an extra register
-  // so store edi temporarily on stack.
+  Label stack_overflow;
+  // We need two scratch registers. Push edi and edx onto stack.
   __ Push(edi);
+  __ Push(edx);
 
-  // Find the address of the last argument.
-  __ mov(edi, eax);
-  __ neg(edi);
-  __ shl(edi, kPointerSizeLog2);
-  __ add(edi, ebx);
+  // Push arguments and move return address to the top of stack.
+  // The eax register is readonly. The ecx register will be modified. The edx
+  // and edi registers will be modified but restored to their original values.
+  Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, false,
+                                               2, &stack_overflow);
 
-  Generate_InterpreterPushArgs(masm, edi);
+  // Restore edi and edx
+  __ Pop(edx);
+  __ Pop(edi);
 
-  // Restore the constructor from slot on stack. It was pushed at the slot
-  // meant for receiver.
-  __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
+  __ AssertUndefinedOrAllocationSite(ebx);
+  if (construct_type == CallableType::kJSFunction) {
+    // Tail call to the function-specific construct stub (still in the caller
+    // context at this point).
+    __ AssertFunction(edi);
 
-  // Re-push return address.
-  __ Push(ecx);
+    __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+    __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
+    __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
+    __ jmp(ecx);
+  } else {
+    DCHECK_EQ(construct_type, CallableType::kAny);
 
-  // Call the constructor with unmodified eax, edi, ebi values.
-  __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+    // Call the constructor with unmodified eax, edi, edx values.
+    __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+  }
+
+  __ bind(&stack_overflow);
+  {
+    // Pop the temporary registers, so that return address is on top of stack.
+    __ Pop(edx);
+    __ Pop(edi);
+
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+
+    // This should be unreachable.
+    __ int3();
+  }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+    MacroAssembler* masm) {
+  // ----------- S t a t e -------------
+  //  -- eax : the number of arguments (not including the receiver)
+  //  -- edx : the target to call checked to be Array function.
+  //  -- ebx : the allocation site feedback
+  //  -- ecx : the address of the first argument to be pushed. Subsequent
+  //           arguments should be consecutive above this, in the same order as
+  //           they are to be pushed onto the stack.
+  // -----------------------------------
+  Label stack_overflow;
+  // We need two scratch registers. Register edi is available, push edx onto
+  // stack.
+  __ Push(edx);
+
+  // Push arguments and move return address to the top of stack.
+  // The eax register is readonly. The ecx register will be modified. The edx
+  // and edi registers will be modified but restored to their original values.
+  Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, true,
+                                               1, &stack_overflow);
+
+  // Restore edx.
+  __ Pop(edx);
+
+  // Array constructor expects constructor in edi. It is same as edx here.
+  __ Move(edi, edx);
+
+  ArrayConstructorStub stub(masm->isolate());
+  __ TailCallStub(&stub);
+
+  __ bind(&stack_overflow);
+  {
+    // Pop the temporary registers, so that return address is on top of stack.
+    __ Pop(edx);
+
+    __ TailCallRuntime(Runtime::kThrowStackOverflow);
+
+    // This should be unreachable.
+    __ int3();
+  }
 }
 
 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1223,61 +1432,6 @@
 }
 
 // static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
-                                               int field_index) {
-  // ----------- S t a t e -------------
-  //  -- eax    : number of arguments
-  //  -- edi    : function
-  //  -- esi    : context
-  //  -- esp[0] : return address
-  //  -- esp[4] : receiver
-  // -----------------------------------
-
-  // 1. Load receiver into eax and check that it's actually a JSDate object.
-  Label receiver_not_date;
-  {
-    __ mov(eax, Operand(esp, kPointerSize));
-    __ JumpIfSmi(eax, &receiver_not_date);
-    __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
-    __ j(not_equal, &receiver_not_date);
-  }
-
-  // 2. Load the specified date field, falling back to the runtime as necessary.
-  if (field_index == JSDate::kDateValue) {
-    __ mov(eax, FieldOperand(eax, JSDate::kValueOffset));
-  } else {
-    if (field_index < JSDate::kFirstUncachedField) {
-      Label stamp_mismatch;
-      __ mov(edx, Operand::StaticVariable(
-                      ExternalReference::date_cache_stamp(masm->isolate())));
-      __ cmp(edx, FieldOperand(eax, JSDate::kCacheStampOffset));
-      __ j(not_equal, &stamp_mismatch, Label::kNear);
-      __ mov(eax, FieldOperand(
-                      eax, JSDate::kValueOffset + field_index * kPointerSize));
-      __ ret(1 * kPointerSize);
-      __ bind(&stamp_mismatch);
-    }
-    FrameScope scope(masm, StackFrame::INTERNAL);
-    __ PrepareCallCFunction(2, ebx);
-    __ mov(Operand(esp, 0), eax);
-    __ mov(Operand(esp, 1 * kPointerSize),
-           Immediate(Smi::FromInt(field_index)));
-    __ CallCFunction(
-        ExternalReference::get_date_field_function(masm->isolate()), 2);
-  }
-  __ ret(1 * kPointerSize);
-
-  // 3. Raise a TypeError if the receiver is not a date.
-  __ bind(&receiver_not_date);
-  {
-    FrameScope scope(masm, StackFrame::MANUAL);
-    __ Move(ebx, Immediate(0));
-    __ EnterBuiltinFrame(esi, edi, ebx);
-    __ CallRuntime(Runtime::kThrowNotDateError);
-  }
-}
-
-// static
 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax     : argc
@@ -1904,10 +2058,9 @@
   __ bind(&to_string);
   {
     FrameScope scope(masm, StackFrame::MANUAL);
-    ToStringStub stub(masm->isolate());
     __ SmiTag(ebx);
     __ EnterBuiltinFrame(esi, edi, ebx);
-    __ CallStub(&stub);
+    __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
     __ LeaveBuiltinFrame(esi, edi, ebx);
     __ SmiUntag(ebx);
   }
@@ -1971,11 +2124,10 @@
     __ bind(&convert);
     {
       FrameScope scope(masm, StackFrame::MANUAL);
-      ToStringStub stub(masm->isolate());
       __ SmiTag(ebx);
       __ EnterBuiltinFrame(esi, edi, ebx);
       __ Push(edx);
-      __ CallStub(&stub);
+      __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
       __ Pop(edx);
       __ LeaveBuiltinFrame(esi, edi, ebx);
       __ SmiUntag(ebx);
@@ -2026,32 +2178,6 @@
   }
 }
 
-static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
-                                       Label* stack_overflow) {
-  // ----------- S t a t e -------------
-  //  -- eax : actual number of arguments
-  //  -- ebx : expected number of arguments
-  //  -- edx : new target (passed through to callee)
-  // -----------------------------------
-  // Check the stack for overflow. We are not trying to catch
-  // interruptions (e.g. debug break and preemption) here, so the "real stack
-  // limit" is checked.
-  ExternalReference real_stack_limit =
-      ExternalReference::address_of_real_stack_limit(masm->isolate());
-  __ mov(edi, Operand::StaticVariable(real_stack_limit));
-  // Make ecx the space we have left. The stack might already be overflowed
-  // here which will cause ecx to become negative.
-  __ mov(ecx, esp);
-  __ sub(ecx, edi);
-  // Make edi the space we need for the array when it is unrolled onto the
-  // stack.
-  __ mov(edi, ebx);
-  __ shl(edi, kPointerSizeLog2);
-  // Check if the arguments will overflow the stack.
-  __ cmp(ecx, edi);
-  __ j(less_equal, stack_overflow);  // Signed comparison.
-}
-
 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
   __ push(ebp);
   __ mov(ebp, esp);
@@ -2767,24 +2893,6 @@
   __ TailCallRuntime(Runtime::kAbort);
 }
 
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
-  // The ToNumber stub takes one argument in eax.
-  Label not_smi;
-  __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
-  __ Ret();
-  __ bind(&not_smi);
-
-  Label not_heap_number;
-  __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ Ret();
-  __ bind(&not_heap_number);
-
-  __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
-          RelocInfo::CODE_TARGET);
-}
-
 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
   // ----------- S t a t e -------------
   //  -- eax : actual number of arguments
@@ -2805,7 +2913,9 @@
   {  // Enough parameters: Actual >= expected.
     __ bind(&enough);
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
+    // edi is used as a scratch register. It should be restored from the frame
+    // when needed.
+    Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
 
     // Copy receiver and all expected arguments.
     const int offset = StandardFrameConstants::kCallerSPOffset;
@@ -2825,9 +2935,10 @@
 
   {  // Too few parameters: Actual < expected.
     __ bind(&too_few);
-
     EnterArgumentsAdaptorFrame(masm);
-    ArgumentsAdaptorStackCheck(masm, &stack_overflow);
+    // edi is used as a scratch register. It should be restored from the frame
+    // when needed.
+    Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
 
     // Remember expected arguments in ecx.
     __ mov(ecx, ebx);
diff --git a/src/checks.h b/src/checks.h
index 80404e8..0d7eed3 100644
--- a/src/checks.h
+++ b/src/checks.h
@@ -7,6 +7,7 @@
 
 #include "include/v8.h"
 #include "src/base/logging.h"
+#include "src/globals.h"
 
 namespace v8 {
 
@@ -17,10 +18,10 @@
 #ifdef ENABLE_SLOW_DCHECKS
 #define SLOW_DCHECK(condition) \
   CHECK(!v8::internal::FLAG_enable_slow_asserts || (condition))
-extern bool FLAG_enable_slow_asserts;
+V8_EXPORT_PRIVATE extern bool FLAG_enable_slow_asserts;
 #else
 #define SLOW_DCHECK(condition) ((void) 0)
-const bool FLAG_enable_slow_asserts = false;
+static const bool FLAG_enable_slow_asserts = false;
 #endif
 
 }  // namespace internal
diff --git a/src/code-events.h b/src/code-events.h
index 9ae1cae..94f7dbd 100644
--- a/src/code-events.h
+++ b/src/code-events.h
@@ -7,6 +7,7 @@
 
 #include <unordered_set>
 
+#include "src/base/platform/mutex.h"
 #include "src/globals.h"
 
 namespace v8 {
@@ -114,13 +115,16 @@
   CodeEventDispatcher() {}
 
   bool AddListener(CodeEventListener* listener) {
+    base::LockGuard<base::Mutex> guard(&mutex_);
     return listeners_.insert(listener).second;
   }
   void RemoveListener(CodeEventListener* listener) {
+    base::LockGuard<base::Mutex> guard(&mutex_);
     listeners_.erase(listener);
   }
 
-#define CODE_EVENT_DISPATCH(code) \
+#define CODE_EVENT_DISPATCH(code)              \
+  base::LockGuard<base::Mutex> guard(&mutex_); \
   for (auto it = listeners_.begin(); it != listeners_.end(); ++it) (*it)->code
 
   void CodeCreateEvent(LogEventsAndTags tag, AbstractCode* code,
@@ -173,6 +177,7 @@
 
  private:
   std::unordered_set<CodeEventListener*> listeners_;
+  base::Mutex mutex_;
 
   DISALLOW_COPY_AND_ASSIGN(CodeEventDispatcher);
 };
diff --git a/src/code-factory.cc b/src/code-factory.cc
index 018f21d..7448591 100644
--- a/src/code-factory.cc
+++ b/src/code-factory.cc
@@ -82,6 +82,10 @@
 
 // static
 Callable CodeFactory::KeyedLoadIC_Megamorphic(Isolate* isolate) {
+  if (FLAG_tf_load_ic_stub) {
+    return Callable(isolate->builtins()->KeyedLoadIC_Megamorphic_TF(),
+                    LoadWithVectorDescriptor(isolate));
+  }
   return Callable(isolate->builtins()->KeyedLoadIC_Megamorphic(),
                   LoadWithVectorDescriptor(isolate));
 }
@@ -104,6 +108,10 @@
 
 // static
 Callable CodeFactory::StoreIC(Isolate* isolate, LanguageMode language_mode) {
+  if (FLAG_tf_store_ic_stub) {
+    StoreICTrampolineTFStub stub(isolate, StoreICState(language_mode));
+    return make_callable(stub);
+  }
   StoreICTrampolineStub stub(isolate, StoreICState(language_mode));
   return make_callable(stub);
 }
@@ -111,6 +119,10 @@
 // static
 Callable CodeFactory::StoreICInOptimizedCode(Isolate* isolate,
                                              LanguageMode language_mode) {
+  if (FLAG_tf_store_ic_stub) {
+    StoreICTFStub stub(isolate, StoreICState(language_mode));
+    return make_callable(stub);
+  }
   StoreICStub stub(isolate, StoreICState(language_mode));
   return make_callable(stub);
 }
@@ -179,14 +191,14 @@
 
 // static
 Callable CodeFactory::ToString(Isolate* isolate) {
-  ToStringStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->ToString(),
+                  TypeConversionDescriptor(isolate));
 }
 
 // static
 Callable CodeFactory::ToName(Isolate* isolate) {
-  ToNameStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->ToName(),
+                  TypeConversionDescriptor(isolate));
 }
 
 // static
@@ -228,6 +240,12 @@
 }
 
 // static
+Callable CodeFactory::OrdinaryHasInstance(Isolate* isolate) {
+  return Callable(isolate->builtins()->OrdinaryHasInstance(),
+                  CompareDescriptor(isolate));
+}
+
+// static
 Callable CodeFactory::RegExpConstructResult(Isolate* isolate) {
   RegExpConstructResultStub stub(isolate);
   return make_callable(stub);
@@ -398,38 +416,38 @@
 
 // static
 Callable CodeFactory::StringEqual(Isolate* isolate) {
-  StringEqualStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->StringEqual(),
+                  CompareDescriptor(isolate));
 }
 
 // static
 Callable CodeFactory::StringNotEqual(Isolate* isolate) {
-  StringNotEqualStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->StringNotEqual(),
+                  CompareDescriptor(isolate));
 }
 
 // static
 Callable CodeFactory::StringLessThan(Isolate* isolate) {
-  StringLessThanStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->StringLessThan(),
+                  CompareDescriptor(isolate));
 }
 
 // static
 Callable CodeFactory::StringLessThanOrEqual(Isolate* isolate) {
-  StringLessThanOrEqualStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->StringLessThanOrEqual(),
+                  CompareDescriptor(isolate));
 }
 
 // static
 Callable CodeFactory::StringGreaterThan(Isolate* isolate) {
-  StringGreaterThanStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->StringGreaterThan(),
+                  CompareDescriptor(isolate));
 }
 
 // static
 Callable CodeFactory::StringGreaterThanOrEqual(Isolate* isolate) {
-  StringGreaterThanOrEqualStub stub(isolate);
-  return make_callable(stub);
+  return Callable(isolate->builtins()->StringGreaterThanOrEqual(),
+                  CompareDescriptor(isolate));
 }
 
 // static
@@ -594,9 +612,17 @@
 }
 
 // static
-Callable CodeFactory::InterpreterPushArgsAndConstruct(Isolate* isolate) {
-  return Callable(isolate->builtins()->InterpreterPushArgsAndConstruct(),
-                  InterpreterPushArgsAndConstructDescriptor(isolate));
+Callable CodeFactory::InterpreterPushArgsAndConstruct(
+    Isolate* isolate, CallableType function_type) {
+  return Callable(
+      isolate->builtins()->InterpreterPushArgsAndConstruct(function_type),
+      InterpreterPushArgsAndConstructDescriptor(isolate));
+}
+
+// static
+Callable CodeFactory::InterpreterPushArgsAndConstructArray(Isolate* isolate) {
+  return Callable(isolate->builtins()->InterpreterPushArgsAndConstructArray(),
+                  InterpreterPushArgsAndConstructArrayDescriptor(isolate));
 }
 
 // static
diff --git a/src/code-factory.h b/src/code-factory.h
index 40b1ea4..59f069e 100644
--- a/src/code-factory.h
+++ b/src/code-factory.h
@@ -84,6 +84,8 @@
                                       OrdinaryToPrimitiveHint hint);
   static Callable NumberToString(Isolate* isolate);
 
+  static Callable OrdinaryHasInstance(Isolate* isolate);
+
   static Callable RegExpConstructResult(Isolate* isolate);
   static Callable RegExpExec(Isolate* isolate);
 
@@ -160,7 +162,9 @@
   static Callable InterpreterPushArgsAndCall(
       Isolate* isolate, TailCallMode tail_call_mode,
       CallableType function_type = CallableType::kAny);
-  static Callable InterpreterPushArgsAndConstruct(Isolate* isolate);
+  static Callable InterpreterPushArgsAndConstruct(
+      Isolate* isolate, CallableType function_type = CallableType::kAny);
+  static Callable InterpreterPushArgsAndConstructArray(Isolate* isolate);
   static Callable InterpreterCEntry(Isolate* isolate, int result_size = 1);
   static Callable InterpreterOnStackReplacement(Isolate* isolate);
 };
diff --git a/src/code-stub-assembler.cc b/src/code-stub-assembler.cc
index 06552ba..016814c 100644
--- a/src/code-stub-assembler.cc
+++ b/src/code-stub-assembler.cc
@@ -38,41 +38,23 @@
 #endif
 }
 
-Node* CodeStubAssembler::BooleanMapConstant() {
-  return HeapConstant(isolate()->factory()->boolean_map());
-}
-
-Node* CodeStubAssembler::EmptyStringConstant() {
-  return LoadRoot(Heap::kempty_stringRootIndex);
-}
-
-Node* CodeStubAssembler::HeapNumberMapConstant() {
-  return HeapConstant(isolate()->factory()->heap_number_map());
-}
-
 Node* CodeStubAssembler::NoContextConstant() {
   return SmiConstant(Smi::FromInt(0));
 }
 
-Node* CodeStubAssembler::MinusZeroConstant() {
-  return LoadRoot(Heap::kMinusZeroValueRootIndex);
-}
+#define HEAP_CONSTANT_ACCESSOR(rootName, name)     \
+  Node* CodeStubAssembler::name##Constant() {      \
+    return LoadRoot(Heap::k##rootName##RootIndex); \
+  }
+HEAP_CONSTANT_LIST(HEAP_CONSTANT_ACCESSOR);
+#undef HEAP_CONSTANT_ACCESSOR
 
-Node* CodeStubAssembler::NanConstant() {
-  return LoadRoot(Heap::kNanValueRootIndex);
-}
-
-Node* CodeStubAssembler::NullConstant() {
-  return LoadRoot(Heap::kNullValueRootIndex);
-}
-
-Node* CodeStubAssembler::UndefinedConstant() {
-  return LoadRoot(Heap::kUndefinedValueRootIndex);
-}
-
-Node* CodeStubAssembler::TheHoleConstant() {
-  return LoadRoot(Heap::kTheHoleValueRootIndex);
-}
+#define HEAP_CONSTANT_TEST(rootName, name)         \
+  Node* CodeStubAssembler::Is##name(Node* value) { \
+    return WordEqual(value, name##Constant());     \
+  }
+HEAP_CONSTANT_LIST(HEAP_CONSTANT_TEST);
+#undef HEAP_CONSTANT_TEST
 
 Node* CodeStubAssembler::HashSeed() {
   return LoadAndUntagToWord32Root(Heap::kHashSeedRootIndex);
@@ -86,7 +68,7 @@
   if (mode == SMI_PARAMETERS) {
     return SmiConstant(Smi::FromInt(value));
   } else {
-    DCHECK_EQ(INTEGER_PARAMETERS, mode);
+    DCHECK(mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS);
     return IntPtrConstant(value);
   }
 }
@@ -284,7 +266,7 @@
 
 Node* CodeStubAssembler::SmiFromWord32(Node* value) {
   value = ChangeInt32ToIntPtr(value);
-  return WordShl(value, SmiShiftBitsConstant());
+  return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
 }
 
 Node* CodeStubAssembler::SmiTag(Node* value) {
@@ -292,15 +274,15 @@
   if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
     return SmiConstant(Smi::FromInt(constant_value));
   }
-  return WordShl(value, SmiShiftBitsConstant());
+  return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
 }
 
 Node* CodeStubAssembler::SmiUntag(Node* value) {
-  return WordSar(value, SmiShiftBitsConstant());
+  return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant());
 }
 
 Node* CodeStubAssembler::SmiToWord32(Node* value) {
-  Node* result = WordSar(value, SmiShiftBitsConstant());
+  Node* result = SmiUntag(value);
   if (Is64()) {
     result = TruncateInt64ToInt32(result);
   }
@@ -325,10 +307,18 @@
 
 Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); }
 
+Node* CodeStubAssembler::SmiAbove(Node* a, Node* b) {
+  return UintPtrGreaterThan(a, b);
+}
+
 Node* CodeStubAssembler::SmiAboveOrEqual(Node* a, Node* b) {
   return UintPtrGreaterThanOrEqual(a, b);
 }
 
+Node* CodeStubAssembler::SmiBelow(Node* a, Node* b) {
+  return UintPtrLessThan(a, b);
+}
+
 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) {
   return IntPtrLessThan(a, b);
 }
@@ -337,19 +327,12 @@
   return IntPtrLessThanOrEqual(a, b);
 }
 
+Node* CodeStubAssembler::SmiMax(Node* a, Node* b) {
+  return Select(SmiLessThan(a, b), b, a);
+}
+
 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
-  // TODO(bmeurer): Consider using Select once available.
-  Variable min(this, MachineRepresentation::kTagged);
-  Label if_a(this), if_b(this), join(this);
-  BranchIfSmiLessThan(a, b, &if_a, &if_b);
-  Bind(&if_a);
-  min.Bind(a);
-  Goto(&join);
-  Bind(&if_b);
-  min.Bind(b);
-  Goto(&join);
-  Bind(&join);
-  return min.value();
+  return Select(SmiLessThan(a, b), a, b);
 }
 
 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
@@ -485,80 +468,6 @@
                    IntPtrConstant(0));
 }
 
-void CodeStubAssembler::BranchIfSameValueZero(Node* a, Node* b, Node* context,
-                                              Label* if_true, Label* if_false) {
-  Node* number_map = HeapNumberMapConstant();
-  Label a_isnumber(this), a_isnotnumber(this), b_isnumber(this), a_isnan(this),
-      float_not_equal(this);
-  // If register A and register B are identical, goto `if_true`
-  GotoIf(WordEqual(a, b), if_true);
-  // If either register A or B are Smis, goto `if_false`
-  GotoIf(Word32Or(WordIsSmi(a), WordIsSmi(b)), if_false);
-  // GotoIf(WordIsSmi(b), if_false);
-
-  Node* a_map = LoadMap(a);
-  Node* b_map = LoadMap(b);
-  Branch(WordEqual(a_map, number_map), &a_isnumber, &a_isnotnumber);
-
-  // If both register A and B are HeapNumbers, return true if they are equal,
-  // or if both are NaN
-  Bind(&a_isnumber);
-  {
-    Branch(WordEqual(b_map, number_map), &b_isnumber, if_false);
-
-    Bind(&b_isnumber);
-    Node* a_value = LoadHeapNumberValue(a);
-    Node* b_value = LoadHeapNumberValue(b);
-    BranchIfFloat64Equal(a_value, b_value, if_true, &float_not_equal);
-
-    Bind(&float_not_equal);
-    BranchIfFloat64IsNaN(a_value, &a_isnan, if_false);
-
-    Bind(&a_isnan);
-    BranchIfFloat64IsNaN(a_value, if_true, if_false);
-  }
-
-  Bind(&a_isnotnumber);
-  {
-    Label a_isstring(this), a_isnotstring(this);
-    Node* a_instance_type = LoadMapInstanceType(a_map);
-
-    Branch(Int32LessThan(a_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
-           &a_isstring, &a_isnotstring);
-
-    Bind(&a_isstring);
-    {
-      Label b_isstring(this), b_isnotstring(this);
-      Node* b_instance_type = LoadInstanceType(b_map);
-
-      Branch(
-          Int32LessThan(b_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
-          &b_isstring, if_false);
-
-      Bind(&b_isstring);
-      {
-        Callable callable = CodeFactory::StringEqual(isolate());
-        Node* result = CallStub(callable, context, a, b);
-        Branch(WordEqual(BooleanConstant(true), result), if_true, if_false);
-      }
-    }
-
-    Bind(&a_isnotstring);
-    {
-      // Check if {lhs} is a Simd128Value.
-      Label a_issimd128value(this);
-      Branch(Word32Equal(a_instance_type, Int32Constant(SIMD128_VALUE_TYPE)),
-             &a_issimd128value, if_false);
-
-      Bind(&a_issimd128value);
-      {
-        // Load the map of {rhs}.
-        BranchIfSimd128Equal(a, a_map, b, b_map, if_true, if_false);
-      }
-    }
-  }
-}
-
 void CodeStubAssembler::BranchIfSimd128Equal(Node* lhs, Node* lhs_map,
                                              Node* rhs, Node* rhs_map,
                                              Label* if_equal,
@@ -630,69 +539,61 @@
   Goto(if_notequal);
 }
 
+void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
+    Node* receiver_map, Label* definitely_no_elements,
+    Label* possibly_elements) {
+  Variable var_map(this, MachineRepresentation::kTagged);
+  var_map.Bind(receiver_map);
+  Label loop_body(this, &var_map);
+  Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
+  Goto(&loop_body);
+
+  Bind(&loop_body);
+  {
+    Node* map = var_map.value();
+    Node* prototype = LoadMapPrototype(map);
+    GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements);
+    Node* prototype_map = LoadMap(prototype);
+    // Pessimistically assume elements if a Proxy, Special API Object,
+    // or JSValue wrapper is found on the prototype chain. After this
+    // instance type check, it's not necessary to check for interceptors or
+    // access checks.
+    GotoIf(Int32LessThanOrEqual(LoadMapInstanceType(prototype_map),
+                                Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
+           possibly_elements);
+    GotoIf(WordNotEqual(LoadElements(prototype), empty_elements),
+           possibly_elements);
+    var_map.Bind(prototype_map);
+    Goto(&loop_body);
+  }
+}
+
 void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
                                             Label* if_true, Label* if_false) {
-  Node* int32_zero = Int32Constant(0);
-  Node* int32_one = Int32Constant(1);
-
-  Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
-
-  Variable last_map(this, MachineRepresentation::kTagged);
-  Label check_prototype(this);
-
-  // Bailout if Smi
+  // Bailout if receiver is a Smi.
   GotoIf(WordIsSmi(object), if_false);
 
   Node* map = LoadMap(object);
-  last_map.Bind(map);
 
-  // Bailout if instance type is not JS_ARRAY_TYPE
+  // Bailout if instance type is not JS_ARRAY_TYPE.
   GotoIf(WordNotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
          if_false);
 
   Node* bit_field2 = LoadMapBitField2(map);
   Node* elements_kind = BitFieldDecode<Map::ElementsKindBits>(bit_field2);
 
-  // Bailout if slow receiver elements
+  // Bailout if receiver has slow elements.
   GotoIf(
       Int32GreaterThan(elements_kind, Int32Constant(LAST_FAST_ELEMENTS_KIND)),
       if_false);
 
+  // Check prototype chain if receiver does not have packed elements.
   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
   STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
-
-  // Check prototype chain if receiver does not have packed elements
-  Node* holey_elements = Word32And(elements_kind, int32_one);
-  Branch(Word32Equal(holey_elements, int32_zero), if_true, &check_prototype);
-
-  Bind(&check_prototype);
-  {
-    Label loop_body(this, &last_map);
-    Goto(&loop_body);
-    Bind(&loop_body);
-    Node* current_map = last_map.value();
-    Node* proto = LoadObjectField(current_map, Map::kPrototypeOffset);
-
-    // End loop
-    GotoIf(WordEqual(proto, NullConstant()), if_true);
-
-    // ASSERT: proto->IsHeapObject()
-    Node* proto_map = LoadMap(proto);
-
-    // Bailout if a Proxy, API Object, or JSValue wrapper found in prototype
-    // Because of this bailout, it's not necessary to check for interceptors or
-    // access checks on the prototype chain.
-    GotoIf(Int32LessThanOrEqual(LoadMapInstanceType(proto_map),
-                                Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
-           if_false);
-
-    // Bailout if prototype contains non-empty elements
-    GotoUnless(WordEqual(LoadElements(proto), empty_elements), if_false);
-
-    last_map.Bind(proto_map);
-    Goto(&loop_body);
-  }
+  Node* holey_elements = Word32And(elements_kind, Int32Constant(1));
+  GotoIf(Word32Equal(holey_elements, Int32Constant(0)), if_true);
+  BranchIfPrototypesHaveNoElements(map, if_true, if_false);
 }
 
 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
@@ -859,9 +760,8 @@
     // types, the HeapNumber type and everything else.
     GotoIf(Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
            &if_valueisheapnumber);
-    Branch(
-        Int32LessThan(value_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
-        &if_valueisstring, &if_valueisother);
+    Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
+           &if_valueisother);
 
     Bind(&if_valueisstring);
     {
@@ -1008,6 +908,10 @@
   return LoadObjectField(object, JSObject::kElementsOffset);
 }
 
+Node* CodeStubAssembler::LoadJSArrayLength(compiler::Node* array) {
+  return LoadObjectField(array, JSArray::kLengthOffset);
+}
+
 Node* CodeStubAssembler::LoadFixedArrayBaseLength(compiler::Node* array) {
   return LoadObjectField(array, FixedArrayBase::kLengthOffset);
 }
@@ -1032,6 +936,11 @@
   return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8());
 }
 
+Node* CodeStubAssembler::LoadMapElementsKind(Node* map) {
+  Node* bit_field2 = LoadMapBitField2(map);
+  return BitFieldDecode<Map::ElementsKindBits>(bit_field2);
+}
+
 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) {
   return LoadObjectField(map, Map::kDescriptorsOffset);
 }
@@ -1041,7 +950,8 @@
 }
 
 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
-  return LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8());
+  return ChangeUint32ToWord(
+      LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8()));
 }
 
 Node* CodeStubAssembler::LoadMapInobjectProperties(Node* map) {
@@ -1049,9 +959,19 @@
   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
   Assert(Int32GreaterThanOrEqual(LoadMapInstanceType(map),
                                  Int32Constant(FIRST_JS_OBJECT_TYPE)));
-  return LoadObjectField(
+  return ChangeUint32ToWord(LoadObjectField(
       map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
-      MachineType::Uint8());
+      MachineType::Uint8()));
+}
+
+Node* CodeStubAssembler::LoadMapConstructorFunctionIndex(Node* map) {
+  // See Map::GetConstructorFunctionIndex() for details.
+  STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
+  Assert(Int32LessThanOrEqual(LoadMapInstanceType(map),
+                              Int32Constant(LAST_PRIMITIVE_TYPE)));
+  return ChangeUint32ToWord(LoadObjectField(
+      map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
+      MachineType::Uint8()));
 }
 
 Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
@@ -1081,7 +1001,7 @@
 Node* CodeStubAssembler::LoadNameHash(Node* name, Label* if_hash_not_computed) {
   Node* hash_field = LoadNameHashField(name);
   if (if_hash_not_computed != nullptr) {
-    GotoIf(WordEqual(
+    GotoIf(Word32Equal(
                Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
                Int32Constant(0)),
            if_hash_not_computed);
@@ -1105,19 +1025,6 @@
   return value;
 }
 
-Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) {
-  Node* header_size = IntPtrConstant(FixedArray::kHeaderSize);
-  Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2));
-  Node* total_size = IntPtrAdd(data_size, header_size);
-
-  Node* result = Allocate(total_size, kNone);
-  StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex));
-  StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
-      SmiTag(length));
-
-  return result;
-}
-
 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node,
                                                int additional_offset,
                                                ParameterMode parameter_mode) {
@@ -1149,29 +1056,57 @@
 
 Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
     Node* object, Node* index_node, MachineType machine_type,
-    int additional_offset, ParameterMode parameter_mode) {
+    int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
   int32_t header_size =
       FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
   Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS,
                                         parameter_mode, header_size);
-  return Load(machine_type, object, offset);
+  return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
+}
+
+Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
+                                                 Label* if_hole,
+                                                 MachineType machine_type) {
+  if (if_hole) {
+    // TODO(ishell): Compare only the upper part for the hole once the
+    // compiler is able to fold addition of already complex |offset| with
+    // |kIeeeDoubleExponentWordOffset| into one addressing mode.
+    if (Is64()) {
+      Node* element = Load(MachineType::Uint64(), base, offset);
+      GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
+    } else {
+      Node* element_upper = Load(
+          MachineType::Uint32(), base,
+          IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
+      GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
+             if_hole);
+    }
+  }
+  if (machine_type.IsNone()) {
+    // This means the actual value is not needed.
+    return nullptr;
+  }
+  return Load(machine_type, base, offset);
+}
+
+Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) {
+  int offset = Context::SlotOffset(slot_index);
+  return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset));
 }
 
 Node* CodeStubAssembler::LoadNativeContext(Node* context) {
-  return LoadFixedArrayElement(context,
-                               Int32Constant(Context::NATIVE_CONTEXT_INDEX));
+  return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX);
 }
 
 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind,
                                                 Node* native_context) {
   return LoadFixedArrayElement(native_context,
-                               Int32Constant(Context::ArrayMapIndex(kind)));
+                               IntPtrConstant(Context::ArrayMapIndex(kind)));
 }
 
 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
-  return StoreNoWriteBarrier(
-      MachineRepresentation::kFloat64, object,
-      IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value);
+  return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
+                                        MachineRepresentation::kFloat64);
 }
 
 Node* CodeStubAssembler::StoreObjectField(
@@ -1180,12 +1115,32 @@
                IntPtrConstant(offset - kHeapObjectTag), value);
 }
 
+Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
+                                          Node* value) {
+  int const_offset;
+  if (ToInt32Constant(offset, const_offset)) {
+    return StoreObjectField(object, const_offset, value);
+  }
+  return Store(MachineRepresentation::kTagged, object,
+               IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
+}
+
 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
     Node* object, int offset, Node* value, MachineRepresentation rep) {
   return StoreNoWriteBarrier(rep, object,
                              IntPtrConstant(offset - kHeapObjectTag), value);
 }
 
+Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
+    Node* object, Node* offset, Node* value, MachineRepresentation rep) {
+  int const_offset;
+  if (ToInt32Constant(offset, const_offset)) {
+    return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
+  }
+  return StoreNoWriteBarrier(
+      rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
+}
+
 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
   return StoreNoWriteBarrier(
       MachineRepresentation::kTagged, object,
@@ -1227,14 +1182,19 @@
   return StoreNoWriteBarrier(rep, object, offset, value);
 }
 
-Node* CodeStubAssembler::AllocateHeapNumber() {
+Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
   Node* result = Allocate(HeapNumber::kSize, kNone);
-  StoreMapNoWriteBarrier(result, HeapNumberMapConstant());
+  Heap::RootListIndex heap_map_index =
+      mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
+                        : Heap::kMutableHeapNumberMapRootIndex;
+  Node* map = LoadRoot(heap_map_index);
+  StoreMapNoWriteBarrier(result, map);
   return result;
 }
 
-Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) {
-  Node* result = AllocateHeapNumber();
+Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value,
+                                                     MutableMode mode) {
+  Node* result = AllocateHeapNumber(mode);
   StoreHeapNumberValue(result, value);
   return result;
 }
@@ -1261,8 +1221,7 @@
           IntPtrAdd(length, IntPtrConstant(SeqOneByteString::kHeaderSize)),
           IntPtrConstant(kObjectAlignmentMask)),
       IntPtrConstant(~kObjectAlignmentMask));
-  Branch(IntPtrLessThanOrEqual(size,
-                               IntPtrConstant(Page::kMaxRegularHeapObjectSize)),
+  Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
          &if_sizeissmall, &if_notsizeissmall);
 
   Bind(&if_sizeissmall);
@@ -1314,8 +1273,7 @@
                           IntPtrConstant(SeqTwoByteString::kHeaderSize)),
                 IntPtrConstant(kObjectAlignmentMask)),
       IntPtrConstant(~kObjectAlignmentMask));
-  Branch(IntPtrLessThanOrEqual(size,
-                               IntPtrConstant(Page::kMaxRegularHeapObjectSize)),
+  Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
          &if_sizeissmall, &if_notsizeissmall);
 
   Bind(&if_sizeissmall);
@@ -1345,51 +1303,166 @@
   return var_result.value();
 }
 
-Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
-                                         Node* capacity_node, Node* length_node,
-                                         compiler::Node* allocation_site,
-                                         ParameterMode mode) {
-  bool is_double = IsFastDoubleElementsKind(kind);
-  int base_size = JSArray::kSize + FixedArray::kHeaderSize;
-  int elements_offset = JSArray::kSize;
+Node* CodeStubAssembler::AllocateSlicedOneByteString(Node* length, Node* parent,
+                                                     Node* offset) {
+  Node* result = Allocate(SlicedString::kSize);
+  Node* map = LoadRoot(Heap::kSlicedOneByteStringMapRootIndex);
+  StoreMapNoWriteBarrier(result, map);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
+                                 MachineRepresentation::kTagged);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
+                                 Int32Constant(String::kEmptyHashField),
+                                 MachineRepresentation::kWord32);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
+                                 MachineRepresentation::kTagged);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
+                                 MachineRepresentation::kTagged);
+  return result;
+}
 
-  Comment("begin allocation of JSArray");
+Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
+                                                     Node* offset) {
+  Node* result = Allocate(SlicedString::kSize);
+  Node* map = LoadRoot(Heap::kSlicedStringMapRootIndex);
+  StoreMapNoWriteBarrier(result, map);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
+                                 MachineRepresentation::kTagged);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
+                                 Int32Constant(String::kEmptyHashField),
+                                 MachineRepresentation::kWord32);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
+                                 MachineRepresentation::kTagged);
+  StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
+                                 MachineRepresentation::kTagged);
+  return result;
+}
+
+Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
+                                              Node* index, Node* input) {
+  Node* const max_length =
+      SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
+  Assert(SmiLessThanOrEqual(length, max_length));
+
+  // Allocate the JSRegExpResult.
+  // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
+  // unneeded store of elements.
+  Node* const result = Allocate(JSRegExpResult::kSize);
+
+  // TODO(jgruber): Store map as Heap constant?
+  Node* const native_context = LoadNativeContext(context);
+  Node* const map =
+      LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX);
+  StoreMapNoWriteBarrier(result, map);
+
+  // Initialize the header before allocating the elements.
+  Node* const empty_array = EmptyFixedArrayConstant();
+  DCHECK(Heap::RootIsImmortalImmovable(Heap::kEmptyFixedArrayRootIndex));
+  StoreObjectFieldNoWriteBarrier(result, JSArray::kPropertiesOffset,
+                                 empty_array);
+  StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, empty_array);
+  StoreObjectFieldNoWriteBarrier(result, JSArray::kLengthOffset, length);
+
+  StoreObjectFieldNoWriteBarrier(result, JSRegExpResult::kIndexOffset, index);
+  StoreObjectField(result, JSRegExpResult::kInputOffset, input);
+
+  Node* const zero = IntPtrConstant(0);
+  Node* const length_intptr = SmiUntag(length);
+  const ElementsKind elements_kind = FAST_ELEMENTS;
+  const ParameterMode parameter_mode = INTPTR_PARAMETERS;
+
+  Node* const elements =
+      AllocateFixedArray(elements_kind, length_intptr, parameter_mode);
+  StoreObjectField(result, JSArray::kElementsOffset, elements);
+
+  // Fill in the elements with undefined.
+  FillFixedArrayWithValue(elements_kind, elements, zero, length_intptr,
+                          Heap::kUndefinedValueRootIndex, parameter_mode);
+
+  return result;
+}
+
+Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
+    ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
+  Comment("begin allocation of JSArray without elements");
+  int base_size = JSArray::kSize;
+  if (allocation_site != nullptr) {
+    base_size += AllocationMemento::kSize;
+  }
+
+  Node* size = IntPtrConstant(base_size);
+  Node* array = AllocateUninitializedJSArray(kind, array_map, length,
+                                             allocation_site, size);
+  return array;
+}
+
+std::pair<Node*, Node*>
+CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
+    ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
+    Node* capacity, ParameterMode capacity_mode) {
+  Comment("begin allocation of JSArray with elements");
+  int base_size = JSArray::kSize;
 
   if (allocation_site != nullptr) {
     base_size += AllocationMemento::kSize;
-    elements_offset += AllocationMemento::kSize;
   }
 
-  Node* total_size =
-      ElementOffsetFromIndex(capacity_node, kind, mode, base_size);
+  int elements_offset = base_size;
 
-  // Allocate both array and elements object, and initialize the JSArray.
-  Heap* heap = isolate()->heap();
-  Node* array = Allocate(total_size);
+  // Compute space for elements
+  base_size += FixedArray::kHeaderSize;
+  Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
+
+  Node* array = AllocateUninitializedJSArray(kind, array_map, length,
+                                             allocation_site, size);
+
+  Node* elements = InnerAllocate(array, elements_offset);
+  StoreObjectField(array, JSObject::kElementsOffset, elements);
+
+  return {array, elements};
+}
+
+Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
+                                                      Node* array_map,
+                                                      Node* length,
+                                                      Node* allocation_site,
+                                                      Node* size_in_bytes) {
+  Node* array = Allocate(size_in_bytes);
+
+  Comment("write JSArray headers");
   StoreMapNoWriteBarrier(array, array_map);
-  Node* empty_properties = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
-  StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset,
-                                 empty_properties);
-  StoreObjectFieldNoWriteBarrier(
-      array, JSArray::kLengthOffset,
-      mode == SMI_PARAMETERS ? length_node : SmiTag(length_node));
+
+  StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
+
+  StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
+                       Heap::kEmptyFixedArrayRootIndex);
 
   if (allocation_site != nullptr) {
     InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
   }
+  return array;
+}
 
+Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
+                                         Node* capacity, Node* length,
+                                         Node* allocation_site,
+                                         ParameterMode capacity_mode) {
+  bool is_double = IsFastDoubleElementsKind(kind);
+
+  // Allocate both array and elements object, and initialize the JSArray.
+  Node *array, *elements;
+  std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
+      kind, array_map, length, allocation_site, capacity, capacity_mode);
   // Setup elements object.
-  Node* elements = InnerAllocate(array, elements_offset);
-  StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
+  Heap* heap = isolate()->heap();
   Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
                                      : heap->fixed_array_map());
   StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
-  StoreObjectFieldNoWriteBarrier(
-      elements, FixedArray::kLengthOffset,
-      mode == SMI_PARAMETERS ? capacity_node : SmiTag(capacity_node));
+  StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
+                                 TagParameter(capacity, capacity_mode));
 
-  FillFixedArrayWithHole(kind, elements, IntPtrConstant(0), capacity_node,
-                         mode);
+  // Fill in the elements with holes.
+  FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity,
+                          Heap::kTheHoleValueRootIndex, capacity_mode);
 
   return array;
 }
@@ -1398,7 +1471,7 @@
                                             Node* capacity_node,
                                             ParameterMode mode,
                                             AllocationFlags flags) {
-  Node* total_size = GetFixedAarrayAllocationSize(capacity_node, kind, mode);
+  Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
 
   // Allocate both array and elements object, and initialize the JSArray.
   Node* array = Allocate(total_size, flags);
@@ -1411,24 +1484,24 @@
   } else {
     StoreMapNoWriteBarrier(array, HeapConstant(map));
   }
-  StoreObjectFieldNoWriteBarrier(
-      array, FixedArray::kLengthOffset,
-      mode == INTEGER_PARAMETERS ? SmiTag(capacity_node) : capacity_node);
+  StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
+                                 TagParameter(capacity_node, mode));
   return array;
 }
 
-void CodeStubAssembler::FillFixedArrayWithHole(ElementsKind kind,
-                                               compiler::Node* array,
-                                               compiler::Node* from_node,
-                                               compiler::Node* to_node,
-                                               ParameterMode mode) {
-  int const first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
-  Heap* heap = isolate()->heap();
-  Node* hole = HeapConstant(Handle<HeapObject>(heap->the_hole_value()));
+void CodeStubAssembler::FillFixedArrayWithValue(
+    ElementsKind kind, Node* array, Node* from_node, Node* to_node,
+    Heap::RootListIndex value_root_index, ParameterMode mode) {
+  bool is_double = IsFastDoubleElementsKind(kind);
+  DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
+         value_root_index == Heap::kUndefinedValueRootIndex);
+  DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
+  STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
   Node* double_hole =
       Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
-  DCHECK_EQ(kHoleNanLower32, kHoleNanUpper32);
-  bool is_double = IsFastDoubleElementsKind(kind);
+  Node* value = LoadRoot(value_root_index);
+
+  const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
   int32_t to;
   bool constant_to = ToInt32Constant(to_node, to);
   int32_t from;
@@ -1436,8 +1509,9 @@
   if (constant_to && constant_from &&
       (to - from) <= kElementLoopUnrollThreshold) {
     for (int i = from; i < to; ++i) {
+      Node* index = IntPtrConstant(i);
       if (is_double) {
-        Node* offset = ElementOffsetFromIndex(Int32Constant(i), kind, mode,
+        Node* offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
                                               first_element_offset);
         // Don't use doubles to store the hole double, since manipulating the
         // signaling NaN used for the hole in C++, e.g. with bit_cast, will
@@ -1453,14 +1527,14 @@
         } else {
           StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
                               double_hole);
-          offset = ElementOffsetFromIndex(Int32Constant(i), kind, mode,
+          offset = ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
                                           first_element_offset + kPointerSize);
           StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
                               double_hole);
         }
       } else {
-        StoreFixedArrayElement(array, Int32Constant(i), hole,
-                               SKIP_WRITE_BARRIER);
+        StoreFixedArrayElement(array, index, value, SKIP_WRITE_BARRIER,
+                               INTPTR_PARAMETERS);
       }
     }
   } else {
@@ -1477,8 +1551,8 @@
     Bind(&decrement);
     current.Bind(IntPtrSub(
         current.value(),
-        Int32Constant(IsFastDoubleElementsKind(kind) ? kDoubleSize
-                                                     : kPointerSize)));
+        IntPtrConstant(IsFastDoubleElementsKind(kind) ? kDoubleSize
+                                                      : kPointerSize)));
     if (is_double) {
       // Don't use doubles to store the hole double, since manipulating the
       // signaling NaN used for the hole in C++, e.g. with bit_cast, will
@@ -1494,15 +1568,13 @@
       } else {
         StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(),
                             Int32Constant(first_element_offset), double_hole);
-        StoreNoWriteBarrier(
-            MachineRepresentation::kWord32,
-            IntPtrAdd(current.value(),
-                      Int32Constant(kPointerSize + first_element_offset)),
-            double_hole);
+        StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(),
+                            Int32Constant(kPointerSize + first_element_offset),
+                            double_hole);
       }
     } else {
-      StoreNoWriteBarrier(MachineRepresentation::kTagged, current.value(),
-                          IntPtrConstant(first_element_offset), hole);
+      StoreNoWriteBarrier(MachineType::PointerRepresentation(), current.value(),
+                          IntPtrConstant(first_element_offset), value);
     }
     Node* compare = WordNotEqual(current.value(), limit);
     Branch(compare, &decrement, &done);
@@ -1511,50 +1583,236 @@
   }
 }
 
-void CodeStubAssembler::CopyFixedArrayElements(ElementsKind kind,
-                                               compiler::Node* from_array,
-                                               compiler::Node* to_array,
-                                               compiler::Node* element_count,
-                                               WriteBarrierMode barrier_mode,
-                                               ParameterMode mode) {
-  Label test(this);
-  Label done(this);
-  bool double_elements = IsFastDoubleElementsKind(kind);
-  bool needs_write_barrier =
-      barrier_mode == UPDATE_WRITE_BARRIER && IsFastObjectElementsKind(kind);
-  Node* limit_offset = ElementOffsetFromIndex(
-      IntPtrConstant(0), kind, mode, FixedArray::kHeaderSize - kHeapObjectTag);
-  Variable current_offset(this, MachineType::PointerRepresentation());
-  current_offset.Bind(ElementOffsetFromIndex(
-      element_count, kind, mode, FixedArray::kHeaderSize - kHeapObjectTag));
-  Label decrement(this, &current_offset);
+void CodeStubAssembler::CopyFixedArrayElements(
+    ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
+    Node* to_array, Node* element_count, Node* capacity,
+    WriteBarrierMode barrier_mode, ParameterMode mode) {
+  STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
+  const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
+  Comment("[ CopyFixedArrayElements");
 
-  Branch(WordEqual(current_offset.value(), limit_offset), &done, &decrement);
+  // Typed array elements are not supported.
+  DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
+  DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
+
+  Label done(this);
+  bool from_double_elements = IsFastDoubleElementsKind(from_kind);
+  bool to_double_elements = IsFastDoubleElementsKind(to_kind);
+  bool element_size_matches =
+      Is64() ||
+      IsFastDoubleElementsKind(from_kind) == IsFastDoubleElementsKind(to_kind);
+  bool doubles_to_objects_conversion =
+      IsFastDoubleElementsKind(from_kind) && IsFastObjectElementsKind(to_kind);
+  bool needs_write_barrier =
+      doubles_to_objects_conversion || (barrier_mode == UPDATE_WRITE_BARRIER &&
+                                        IsFastObjectElementsKind(to_kind));
+  Node* double_hole =
+      Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
+
+  if (doubles_to_objects_conversion) {
+    // If the copy might trigger a GC, make sure that the FixedArray is
+    // pre-initialized with holes to make sure that it's always in a
+    // consistent state.
+    FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
+                            capacity, Heap::kTheHoleValueRootIndex, mode);
+  } else if (element_count != capacity) {
+    FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
+                            Heap::kTheHoleValueRootIndex, mode);
+  }
+
+  Node* limit_offset = ElementOffsetFromIndex(
+      IntPtrOrSmiConstant(0, mode), from_kind, mode, first_element_offset);
+  Variable var_from_offset(this, MachineType::PointerRepresentation());
+  var_from_offset.Bind(ElementOffsetFromIndex(element_count, from_kind, mode,
+                                              first_element_offset));
+  // This second variable is used only when the element sizes of source and
+  // destination arrays do not match.
+  Variable var_to_offset(this, MachineType::PointerRepresentation());
+  if (element_size_matches) {
+    var_to_offset.Bind(var_from_offset.value());
+  } else {
+    var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
+                                              first_element_offset));
+  }
+
+  Variable* vars[] = {&var_from_offset, &var_to_offset};
+  Label decrement(this, 2, vars);
+
+  Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
 
   Bind(&decrement);
   {
-    current_offset.Bind(IntPtrSub(
-        current_offset.value(),
-        IntPtrConstant(double_elements ? kDoubleSize : kPointerSize)));
+    Node* from_offset = IntPtrSub(
+        var_from_offset.value(),
+        IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
+    var_from_offset.Bind(from_offset);
 
-    Node* value =
-        Load(double_elements ? MachineType::Float64() : MachineType::Pointer(),
-             from_array, current_offset.value());
+    Node* to_offset;
+    if (element_size_matches) {
+      to_offset = from_offset;
+    } else {
+      to_offset = IntPtrSub(
+          var_to_offset.value(),
+          IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
+      var_to_offset.Bind(to_offset);
+    }
+
+    Label next_iter(this), store_double_hole(this);
+    Label* if_hole;
+    if (doubles_to_objects_conversion) {
+      // The target elements array is already preinitialized with holes, so we
+      // can just proceed with the next iteration.
+      if_hole = &next_iter;
+    } else if (IsFastDoubleElementsKind(to_kind)) {
+      if_hole = &store_double_hole;
+    } else {
+      // In all the other cases don't check for holes and copy the data as is.
+      if_hole = nullptr;
+    }
+
+    Node* value = LoadElementAndPrepareForStore(
+        from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
+
     if (needs_write_barrier) {
-      Store(MachineRepresentation::kTagged, to_array,
-            current_offset.value(), value);
-    } else if (double_elements) {
-      StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array,
-                          current_offset.value(), value);
+      Store(MachineRepresentation::kTagged, to_array, to_offset, value);
+    } else if (to_double_elements) {
+      StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array, to_offset,
+                          value);
     } else {
       StoreNoWriteBarrier(MachineType::PointerRepresentation(), to_array,
-                          current_offset.value(), value);
+                          to_offset, value);
     }
-    Node* compare = WordNotEqual(current_offset.value(), limit_offset);
+    Goto(&next_iter);
+
+    if (if_hole == &store_double_hole) {
+      Bind(&store_double_hole);
+      // Don't use doubles to store the hole double, since manipulating the
+      // signaling NaN used for the hole in C++, e.g. with bit_cast, will
+      // change its value on ia32 (the x87 stack is used to return values
+      // and stores to the stack silently clear the signalling bit).
+      //
+      // TODO(danno): When we have a Float32/Float64 wrapper class that
+      // preserves double bits during manipulation, remove this code/change
+      // this to an indexed Float64 store.
+      if (Is64()) {
+        StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array, to_offset,
+                            double_hole);
+      } else {
+        StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array, to_offset,
+                            double_hole);
+        StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array,
+                            IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
+                            double_hole);
+      }
+      Goto(&next_iter);
+    }
+
+    Bind(&next_iter);
+    Node* compare = WordNotEqual(from_offset, limit_offset);
     Branch(compare, &decrement, &done);
   }
 
   Bind(&done);
+  IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
+  Comment("] CopyFixedArrayElements");
+}
+
+void CodeStubAssembler::CopyStringCharacters(compiler::Node* from_string,
+                                             compiler::Node* to_string,
+                                             compiler::Node* from_index,
+                                             compiler::Node* character_count,
+                                             String::Encoding encoding) {
+  Label out(this);
+
+  // Nothing to do for zero characters.
+
+  GotoIf(SmiLessThanOrEqual(character_count, SmiConstant(Smi::FromInt(0))),
+         &out);
+
+  // Calculate offsets into the strings.
+
+  Node* from_offset;
+  Node* limit_offset;
+  Node* to_offset;
+
+  {
+    Node* byte_count = SmiUntag(character_count);
+    Node* from_byte_index = SmiUntag(from_index);
+    if (encoding == String::ONE_BYTE_ENCODING) {
+      const int offset = SeqOneByteString::kHeaderSize - kHeapObjectTag;
+      from_offset = IntPtrAdd(IntPtrConstant(offset), from_byte_index);
+      limit_offset = IntPtrAdd(from_offset, byte_count);
+      to_offset = IntPtrConstant(offset);
+    } else {
+      STATIC_ASSERT(2 == sizeof(uc16));
+      byte_count = WordShl(byte_count, 1);
+      from_byte_index = WordShl(from_byte_index, 1);
+
+      const int offset = SeqTwoByteString::kHeaderSize - kHeapObjectTag;
+      from_offset = IntPtrAdd(IntPtrConstant(offset), from_byte_index);
+      limit_offset = IntPtrAdd(from_offset, byte_count);
+      to_offset = IntPtrConstant(offset);
+    }
+  }
+
+  Variable var_from_offset(this, MachineType::PointerRepresentation());
+  Variable var_to_offset(this, MachineType::PointerRepresentation());
+
+  var_from_offset.Bind(from_offset);
+  var_to_offset.Bind(to_offset);
+
+  Variable* vars[] = {&var_from_offset, &var_to_offset};
+  Label decrement(this, 2, vars);
+
+  Label loop(this, 2, vars);
+  Goto(&loop);
+  Bind(&loop);
+  {
+    from_offset = var_from_offset.value();
+    to_offset = var_to_offset.value();
+
+    // TODO(jgruber): We could make this faster through larger copy unit sizes.
+    Node* value = Load(MachineType::Uint8(), from_string, from_offset);
+    StoreNoWriteBarrier(MachineRepresentation::kWord8, to_string, to_offset,
+                        value);
+
+    Node* new_from_offset = IntPtrAdd(from_offset, IntPtrConstant(1));
+    var_from_offset.Bind(new_from_offset);
+    var_to_offset.Bind(IntPtrAdd(to_offset, IntPtrConstant(1)));
+
+    Branch(WordNotEqual(new_from_offset, limit_offset), &loop, &out);
+  }
+
+  Bind(&out);
+}
+
+Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
+                                                       Node* offset,
+                                                       ElementsKind from_kind,
+                                                       ElementsKind to_kind,
+                                                       Label* if_hole) {
+  if (IsFastDoubleElementsKind(from_kind)) {
+    Node* value =
+        LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
+    if (!IsFastDoubleElementsKind(to_kind)) {
+      value = AllocateHeapNumberWithValue(value);
+    }
+    return value;
+
+  } else {
+    Node* value = Load(MachineType::Pointer(), array, offset);
+    if (if_hole) {
+      GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
+    }
+    if (IsFastDoubleElementsKind(to_kind)) {
+      if (IsFastSmiElementsKind(from_kind)) {
+        value = SmiToFloat64(value);
+      } else {
+        value = LoadHeapNumberValue(value);
+      }
+    }
+    return value;
+  }
 }
 
 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
@@ -1563,7 +1821,7 @@
   Node* new_capacity = IntPtrAdd(half_old_capacity, old_capacity);
   Node* unconditioned_result =
       IntPtrAdd(new_capacity, IntPtrOrSmiConstant(16, mode));
-  if (mode == INTEGER_PARAMETERS) {
+  if (mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS) {
     return unconditioned_result;
   } else {
     int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
@@ -1572,50 +1830,64 @@
   }
 }
 
-Node* CodeStubAssembler::CheckAndGrowElementsCapacity(Node* context,
-                                                      Node* elements,
-                                                      ElementsKind kind,
-                                                      Node* key, Label* fail) {
+Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
+                                                 ElementsKind kind, Node* key,
+                                                 Label* bailout) {
   Node* capacity = LoadFixedArrayBaseLength(elements);
 
-  // On 32-bit platforms, there is a slight performance advantage to doing all
-  // of the arithmetic for the new backing store with SMIs, since it's possible
-  // to save a few tag/untag operations without paying an extra expense when
-  // calculating array offset (the smi math can be folded away) and there are
-  // fewer live ranges. Thus only convert |capacity| and |key| to untagged value
-  // on 64-bit platforms.
-  ParameterMode mode = Is64() ? INTEGER_PARAMETERS : SMI_PARAMETERS;
-  if (mode == INTEGER_PARAMETERS) {
-    capacity = SmiUntag(capacity);
-    key = SmiUntag(key);
-  }
+  ParameterMode mode = OptimalParameterMode();
+  capacity = UntagParameter(capacity, mode);
+  key = UntagParameter(key, mode);
+
+  return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
+                                 bailout);
+}
+
+Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
+                                                 ElementsKind kind, Node* key,
+                                                 Node* capacity,
+                                                 ParameterMode mode,
+                                                 Label* bailout) {
+  Comment("TryGrowElementsCapacity");
 
   // If the gap growth is too big, fall back to the runtime.
   Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
   Node* max_capacity = IntPtrAdd(capacity, max_gap);
-  GotoIf(UintPtrGreaterThanOrEqual(key, max_capacity), fail);
+  GotoIf(UintPtrGreaterThanOrEqual(key, max_capacity), bailout);
 
-  // Calculate the capacity of the new backing tore
+  // Calculate the capacity of the new backing store.
   Node* new_capacity = CalculateNewElementsCapacity(
       IntPtrAdd(key, IntPtrOrSmiConstant(1, mode)), mode);
+  return GrowElementsCapacity(object, elements, kind, kind, capacity,
+                              new_capacity, mode, bailout);
+}
 
+Node* CodeStubAssembler::GrowElementsCapacity(
+    Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
+    Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
+  Comment("[ GrowElementsCapacity");
   // If size of the allocation for the new capacity doesn't fit in a page
-  // that we can bump-pointer allocate from, fall back to the runtime,
-  int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind);
+  // that we can bump-pointer allocate from, fall back to the runtime.
+  int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
   GotoIf(UintPtrGreaterThanOrEqual(new_capacity,
                                    IntPtrOrSmiConstant(max_size, mode)),
-         fail);
+         bailout);
 
   // Allocate the new backing store.
-  Node* new_elements = AllocateFixedArray(kind, new_capacity, mode);
+  Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
 
   // Fill in the added capacity in the new store with holes.
-  FillFixedArrayWithHole(kind, new_elements, capacity, new_capacity, mode);
+  FillFixedArrayWithValue(to_kind, new_elements, capacity, new_capacity,
+                          Heap::kTheHoleValueRootIndex, mode);
 
   // Copy the elements from the old elements store to the new.
-  CopyFixedArrayElements(kind, elements, new_elements, capacity,
-                         SKIP_WRITE_BARRIER, mode);
+  // The size-check above guarantees that the |new_elements| is allocated
+  // in new space so we can skip the write barrier.
+  CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
+                         new_capacity, SKIP_WRITE_BARRIER, mode);
 
+  StoreObjectField(object, JSObject::kElementsOffset, new_elements);
+  Comment("] GrowElementsCapacity");
   return new_elements;
 }
 
@@ -1874,9 +2146,8 @@
 
     // Check if the {value} is already String.
     Label if_valueisnotstring(this, Label::kDeferred);
-    Branch(
-        Int32LessThan(value_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
-        &if_valueisstring, &if_valueisnotstring);
+    Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
+           &if_valueisnotstring);
     Bind(&if_valueisnotstring);
     {
       // Check if the {value} is null.
@@ -1969,9 +2240,7 @@
               &done_loop);
           break;
         case PrimitiveType::kString:
-          GotoIf(Int32LessThan(value_instance_type,
-                               Int32Constant(FIRST_NONSTRING_TYPE)),
-                 &done_loop);
+          GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
           break;
         case PrimitiveType::kSymbol:
           GotoIf(Word32Equal(value_instance_type, Int32Constant(SYMBOL_TYPE)),
@@ -1995,6 +2264,45 @@
   return var_value.value();
 }
 
+Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
+                                                InstanceType instance_type,
+                                                char const* method_name) {
+  Label out(this), throw_exception(this, Label::kDeferred);
+  Variable var_value_map(this, MachineRepresentation::kTagged);
+
+  GotoIf(WordIsSmi(value), &throw_exception);
+
+  // Load the instance type of the {value}.
+  var_value_map.Bind(LoadMap(value));
+  Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
+
+  Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
+         &throw_exception);
+
+  // The {value} is not a compatible receiver for this method.
+  Bind(&throw_exception);
+  CallRuntime(
+      Runtime::kThrowIncompatibleMethodReceiver, context,
+      HeapConstant(factory()->NewStringFromAsciiChecked(method_name, TENURED)),
+      value);
+  var_value_map.Bind(UndefinedConstant());
+  Goto(&out);  // Never reached.
+
+  Bind(&out);
+  return var_value_map.value();
+}
+
+Node* CodeStubAssembler::IsStringInstanceType(Node* instance_type) {
+  STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
+  return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
+}
+
+Node* CodeStubAssembler::IsJSReceiverInstanceType(Node* instance_type) {
+  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
+  return Int32GreaterThanOrEqual(instance_type,
+                                 Int32Constant(FIRST_JS_RECEIVER_TYPE));
+}
+
 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) {
   // Translate the {index} into a Word.
   index = SmiToWord(index);
@@ -2102,14 +2410,14 @@
         Bind(&if_stringisexternal);
         {
           // Check if the {string} is a short external string.
-          Label if_stringisshort(this),
-              if_stringisnotshort(this, Label::kDeferred);
+          Label if_stringisnotshort(this),
+              if_stringisshort(this, Label::kDeferred);
           Branch(Word32Equal(Word32And(string_instance_type,
                                        Int32Constant(kShortExternalStringMask)),
                              Int32Constant(0)),
-                 &if_stringisshort, &if_stringisnotshort);
+                 &if_stringisnotshort, &if_stringisshort);
 
-          Bind(&if_stringisshort);
+          Bind(&if_stringisnotshort);
           {
             // Load the actual resource data from the {string}.
             Node* string_resource_data =
@@ -2139,7 +2447,7 @@
             }
           }
 
-          Bind(&if_stringisnotshort);
+          Bind(&if_stringisshort);
           {
             // The {string} might be compressed, call the runtime.
             var_result.Bind(SmiToWord32(
@@ -2224,6 +2532,586 @@
   return var_result.value();
 }
 
+namespace {
+
+// A wrapper around CopyStringCharacters which determines the correct string
+// encoding, allocates a corresponding sequential string, and then copies the
+// given character range using CopyStringCharacters.
+// |from_string| must be a sequential string. |from_index| and
+// |character_count| must be Smis s.t.
+// 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
+Node* AllocAndCopyStringCharacters(CodeStubAssembler* a, Node* context,
+                                   Node* from, Node* from_instance_type,
+                                   Node* from_index, Node* character_count) {
+  typedef CodeStubAssembler::Label Label;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Label end(a), two_byte_sequential(a);
+  Variable var_result(a, MachineRepresentation::kTagged);
+
+  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
+  a->GotoIf(a->Word32Equal(a->Word32And(from_instance_type,
+                                        a->Int32Constant(kStringEncodingMask)),
+                           a->Int32Constant(0)),
+            &two_byte_sequential);
+
+  // The subject string is a sequential one-byte string.
+  {
+    Node* result =
+        a->AllocateSeqOneByteString(context, a->SmiToWord(character_count));
+    a->CopyStringCharacters(from, result, from_index, character_count,
+                            String::ONE_BYTE_ENCODING);
+    var_result.Bind(result);
+
+    a->Goto(&end);
+  }
+
+  // The subject string is a sequential two-byte string.
+  a->Bind(&two_byte_sequential);
+  {
+    Node* result =
+        a->AllocateSeqTwoByteString(context, a->SmiToWord(character_count));
+    a->CopyStringCharacters(from, result, from_index, character_count,
+                            String::TWO_BYTE_ENCODING);
+    var_result.Bind(result);
+
+    a->Goto(&end);
+  }
+
+  a->Bind(&end);
+  return var_result.value();
+}
+
+}  // namespace
+
+Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
+                                   Node* to) {
+  Label end(this);
+  Label runtime(this);
+
+  Variable var_instance_type(this, MachineRepresentation::kWord8);  // Int32.
+  Variable var_result(this, MachineRepresentation::kTagged);        // String.
+  Variable var_from(this, MachineRepresentation::kTagged);          // Smi.
+  Variable var_string(this, MachineRepresentation::kTagged);        // String.
+
+  var_instance_type.Bind(Int32Constant(0));
+  var_string.Bind(string);
+  var_from.Bind(from);
+
+  // Make sure first argument is a string.
+
+  // Bailout if receiver is a Smi.
+  GotoIf(WordIsSmi(string), &runtime);
+
+  // Load the instance type of the {string}.
+  Node* const instance_type = LoadInstanceType(string);
+  var_instance_type.Bind(instance_type);
+
+  // Check if {string} is a String.
+  GotoUnless(IsStringInstanceType(instance_type), &runtime);
+
+  // Make sure that both from and to are non-negative smis.
+
+  GotoUnless(WordIsPositiveSmi(from), &runtime);
+  GotoUnless(WordIsPositiveSmi(to), &runtime);
+
+  Node* const substr_length = SmiSub(to, from);
+  Node* const string_length = LoadStringLength(string);
+
+  // Begin dispatching based on substring length.
+
+  Label original_string_or_invalid_length(this);
+  GotoIf(SmiAboveOrEqual(substr_length, string_length),
+         &original_string_or_invalid_length);
+
+  // A real substring (substr_length < string_length).
+
+  Label single_char(this);
+  GotoIf(SmiEqual(substr_length, SmiConstant(Smi::FromInt(1))), &single_char);
+
+  // TODO(jgruber): Add an additional case for substring of length == 0?
+
+  // Deal with different string types: update the index if necessary
+  // and put the underlying string into var_string.
+
+  // If the string is not indirect, it can only be sequential or external.
+  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+  STATIC_ASSERT(kIsIndirectStringMask != 0);
+  Label underlying_unpacked(this);
+  GotoIf(Word32Equal(
+             Word32And(instance_type, Int32Constant(kIsIndirectStringMask)),
+             Int32Constant(0)),
+         &underlying_unpacked);
+
+  // The subject string is either a sliced or cons string.
+
+  Label sliced_string(this);
+  GotoIf(Word32NotEqual(
+             Word32And(instance_type, Int32Constant(kSlicedNotConsMask)),
+             Int32Constant(0)),
+         &sliced_string);
+
+  // Cons string.  Check whether it is flat, then fetch first part.
+  // Flat cons strings have an empty second part.
+  {
+    GotoIf(WordNotEqual(LoadObjectField(string, ConsString::kSecondOffset),
+                        EmptyStringConstant()),
+           &runtime);
+
+    Node* first_string_part = LoadObjectField(string, ConsString::kFirstOffset);
+    var_string.Bind(first_string_part);
+    var_instance_type.Bind(LoadInstanceType(first_string_part));
+
+    Goto(&underlying_unpacked);
+  }
+
+  Bind(&sliced_string);
+  {
+    // Fetch parent and correct start index by offset.
+    Node* sliced_offset = LoadObjectField(string, SlicedString::kOffsetOffset);
+    var_from.Bind(SmiAdd(from, sliced_offset));
+
+    Node* slice_parent = LoadObjectField(string, SlicedString::kParentOffset);
+    var_string.Bind(slice_parent);
+
+    Node* slice_parent_instance_type = LoadInstanceType(slice_parent);
+    var_instance_type.Bind(slice_parent_instance_type);
+
+    Goto(&underlying_unpacked);
+  }
+
+  // The subject string can only be external or sequential string of either
+  // encoding at this point.
+  Label external_string(this);
+  Bind(&underlying_unpacked);
+  {
+    if (FLAG_string_slices) {
+      Label copy_routine(this);
+
+      // Short slice.  Copy instead of slicing.
+      GotoIf(SmiLessThan(substr_length,
+                         SmiConstant(Smi::FromInt(SlicedString::kMinLength))),
+             &copy_routine);
+
+      // Allocate new sliced string.
+
+      Label two_byte_slice(this);
+      STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
+      STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
+
+      Counters* counters = isolate()->counters();
+      IncrementCounter(counters->sub_string_native(), 1);
+
+      GotoIf(Word32Equal(Word32And(var_instance_type.value(),
+                                   Int32Constant(kStringEncodingMask)),
+                         Int32Constant(0)),
+             &two_byte_slice);
+
+      var_result.Bind(AllocateSlicedOneByteString(
+          substr_length, var_string.value(), var_from.value()));
+      Goto(&end);
+
+      Bind(&two_byte_slice);
+
+      var_result.Bind(AllocateSlicedTwoByteString(
+          substr_length, var_string.value(), var_from.value()));
+      Goto(&end);
+
+      Bind(&copy_routine);
+    }
+
+    // The subject string can only be external or sequential string of either
+    // encoding at this point.
+    STATIC_ASSERT(kExternalStringTag != 0);
+    STATIC_ASSERT(kSeqStringTag == 0);
+    GotoUnless(Word32Equal(Word32And(var_instance_type.value(),
+                                     Int32Constant(kExternalStringTag)),
+                           Int32Constant(0)),
+               &external_string);
+
+    var_result.Bind(AllocAndCopyStringCharacters(
+        this, context, var_string.value(), var_instance_type.value(),
+        var_from.value(), substr_length));
+
+    Counters* counters = isolate()->counters();
+    IncrementCounter(counters->sub_string_native(), 1);
+
+    Goto(&end);
+  }
+
+  // Handle external string.
+  Bind(&external_string);
+  {
+    // Rule out short external strings.
+    STATIC_ASSERT(kShortExternalStringTag != 0);
+    GotoIf(Word32NotEqual(Word32And(var_instance_type.value(),
+                                    Int32Constant(kShortExternalStringMask)),
+                          Int32Constant(0)),
+           &runtime);
+
+    // Move the pointer so that offset-wise, it looks like a sequential string.
+    STATIC_ASSERT(SeqTwoByteString::kHeaderSize ==
+                  SeqOneByteString::kHeaderSize);
+
+    Node* resource_data = LoadObjectField(var_string.value(),
+                                          ExternalString::kResourceDataOffset);
+    Node* const fake_sequential_string = IntPtrSub(
+        resource_data,
+        IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+
+    var_result.Bind(AllocAndCopyStringCharacters(
+        this, context, fake_sequential_string, var_instance_type.value(),
+        var_from.value(), substr_length));
+
+    Counters* counters = isolate()->counters();
+    IncrementCounter(counters->sub_string_native(), 1);
+
+    Goto(&end);
+  }
+
+  // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
+  Bind(&single_char);
+  {
+    Node* char_code = StringCharCodeAt(var_string.value(), var_from.value());
+    var_result.Bind(StringFromCharCode(char_code));
+    Goto(&end);
+  }
+
+  Bind(&original_string_or_invalid_length);
+  {
+    // Longer than original string's length or negative: unsafe arguments.
+    GotoIf(SmiAbove(substr_length, string_length), &runtime);
+
+    // Equal length - check if {from, to} == {0, str.length}.
+    GotoIf(SmiAbove(from, SmiConstant(Smi::FromInt(0))), &runtime);
+
+    // Return the original string (substr_length == string_length).
+
+    Counters* counters = isolate()->counters();
+    IncrementCounter(counters->sub_string_native(), 1);
+
+    var_result.Bind(string);
+    Goto(&end);
+  }
+
+  // Fall back to a runtime call.
+  Bind(&runtime);
+  {
+    var_result.Bind(
+        CallRuntime(Runtime::kSubString, context, string, from, to));
+    Goto(&end);
+  }
+
+  Bind(&end);
+  return var_result.value();
+}
+
+Node* CodeStubAssembler::StringFromCodePoint(compiler::Node* codepoint,
+                                             UnicodeEncoding encoding) {
+  Variable var_result(this, MachineRepresentation::kTagged);
+  var_result.Bind(EmptyStringConstant());
+
+  Label if_isword16(this), if_isword32(this), return_result(this);
+
+  Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
+         &if_isword32);
+
+  Bind(&if_isword16);
+  {
+    var_result.Bind(StringFromCharCode(codepoint));
+    Goto(&return_result);
+  }
+
+  Bind(&if_isword32);
+  {
+    switch (encoding) {
+      case UnicodeEncoding::UTF16:
+        break;
+      case UnicodeEncoding::UTF32: {
+        // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
+        Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
+
+        // lead = (codepoint >> 10) + LEAD_OFFSET
+        Node* lead =
+            Int32Add(WordShr(codepoint, Int32Constant(10)), lead_offset);
+
+        // trail = (codepoint & 0x3FF) + 0xDC00;
+        Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
+                               Int32Constant(0xDC00));
+
+        // codpoint = (trail << 16) | lead;
+        codepoint = Word32Or(WordShl(trail, Int32Constant(16)), lead);
+        break;
+      }
+    }
+
+    Node* value = AllocateSeqTwoByteString(2);
+    StoreNoWriteBarrier(
+        MachineRepresentation::kWord32, value,
+        IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
+        codepoint);
+    var_result.Bind(value);
+    Goto(&return_result);
+  }
+
+  Bind(&return_result);
+  return var_result.value();
+}
+
+Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
+  Label runtime(this, Label::kDeferred);
+  Label end(this);
+
+  Variable var_result(this, MachineRepresentation::kTagged);
+
+  // Check if string has a cached array index.
+  Node* hash = LoadNameHashField(input);
+  Node* bit =
+      Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask));
+  GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime);
+
+  var_result.Bind(SmiTag(BitFieldDecode<String::ArrayIndexValueBits>(hash)));
+  Goto(&end);
+
+  Bind(&runtime);
+  {
+    var_result.Bind(CallRuntime(Runtime::kStringToNumber, context, input));
+    Goto(&end);
+  }
+
+  Bind(&end);
+  return var_result.value();
+}
+
+Node* CodeStubAssembler::ToName(Node* context, Node* value) {
+  typedef CodeStubAssembler::Label Label;
+  typedef CodeStubAssembler::Variable Variable;
+
+  Label end(this);
+  Variable var_result(this, MachineRepresentation::kTagged);
+
+  Label is_number(this);
+  GotoIf(WordIsSmi(value), &is_number);
+
+  Label not_name(this);
+  Node* value_instance_type = LoadInstanceType(value);
+  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
+  GotoIf(Int32GreaterThan(value_instance_type, Int32Constant(LAST_NAME_TYPE)),
+         &not_name);
+
+  var_result.Bind(value);
+  Goto(&end);
+
+  Bind(&is_number);
+  {
+    Callable callable = CodeFactory::NumberToString(isolate());
+    var_result.Bind(CallStub(callable, context, value));
+    Goto(&end);
+  }
+
+  Bind(&not_name);
+  {
+    GotoIf(Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
+           &is_number);
+
+    Label not_oddball(this);
+    GotoIf(Word32NotEqual(value_instance_type, Int32Constant(ODDBALL_TYPE)),
+           &not_oddball);
+
+    var_result.Bind(LoadObjectField(value, Oddball::kToStringOffset));
+    Goto(&end);
+
+    Bind(&not_oddball);
+    {
+      var_result.Bind(CallRuntime(Runtime::kToName, context, value));
+      Goto(&end);
+    }
+  }
+
+  Bind(&end);
+  return var_result.value();
+}
+
+Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
+  // Assert input is a HeapObject (not smi or heap number)
+  Assert(Word32BinaryNot(WordIsSmi(input)));
+  Assert(Word32NotEqual(LoadMap(input), HeapNumberMapConstant()));
+
+  // We might need to loop once here due to ToPrimitive conversions.
+  Variable var_input(this, MachineRepresentation::kTagged);
+  Variable var_result(this, MachineRepresentation::kTagged);
+  Label loop(this, &var_input);
+  Label end(this);
+  var_input.Bind(input);
+  Goto(&loop);
+  Bind(&loop);
+  {
+    // Load the current {input} value (known to be a HeapObject).
+    Node* input = var_input.value();
+
+    // Dispatch on the {input} instance type.
+    Node* input_instance_type = LoadInstanceType(input);
+    Label if_inputisstring(this), if_inputisoddball(this),
+        if_inputisreceiver(this, Label::kDeferred),
+        if_inputisother(this, Label::kDeferred);
+    GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
+    GotoIf(Word32Equal(input_instance_type, Int32Constant(ODDBALL_TYPE)),
+           &if_inputisoddball);
+    Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
+           &if_inputisother);
+
+    Bind(&if_inputisstring);
+    {
+      // The {input} is a String, use the fast stub to convert it to a Number.
+      var_result.Bind(StringToNumber(context, input));
+      Goto(&end);
+    }
+
+    Bind(&if_inputisoddball);
+    {
+      // The {input} is an Oddball, we just need to load the Number value of it.
+      var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
+      Goto(&end);
+    }
+
+    Bind(&if_inputisreceiver);
+    {
+      // The {input} is a JSReceiver, we need to convert it to a Primitive first
+      // using the ToPrimitive type conversion, preferably yielding a Number.
+      Callable callable = CodeFactory::NonPrimitiveToPrimitive(
+          isolate(), ToPrimitiveHint::kNumber);
+      Node* result = CallStub(callable, context, input);
+
+      // Check if the {result} is already a Number.
+      Label if_resultisnumber(this), if_resultisnotnumber(this);
+      GotoIf(WordIsSmi(result), &if_resultisnumber);
+      Node* result_map = LoadMap(result);
+      Branch(WordEqual(result_map, HeapNumberMapConstant()), &if_resultisnumber,
+             &if_resultisnotnumber);
+
+      Bind(&if_resultisnumber);
+      {
+        // The ToPrimitive conversion already gave us a Number, so we're done.
+        var_result.Bind(result);
+        Goto(&end);
+      }
+
+      Bind(&if_resultisnotnumber);
+      {
+        // We now have a Primitive {result}, but it's not yet a Number.
+        var_input.Bind(result);
+        Goto(&loop);
+      }
+    }
+
+    Bind(&if_inputisother);
+    {
+      // The {input} is something else (i.e. Symbol or Simd128Value), let the
+      // runtime figure out the correct exception.
+      // Note: We cannot tail call to the runtime here, as js-to-wasm
+      // trampolines also use this code currently, and they declare all
+      // outgoing parameters as untagged, while we would push a tagged
+      // object here.
+      var_result.Bind(CallRuntime(Runtime::kToNumber, context, input));
+      Goto(&end);
+    }
+  }
+
+  Bind(&end);
+  return var_result.value();
+}
+
+Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
+  Variable var_result(this, MachineRepresentation::kTagged);
+  Label end(this);
+
+  Label not_smi(this, Label::kDeferred);
+  GotoUnless(WordIsSmi(input), &not_smi);
+  var_result.Bind(input);
+  Goto(&end);
+
+  Bind(&not_smi);
+  {
+    Label not_heap_number(this, Label::kDeferred);
+    Node* input_map = LoadMap(input);
+    GotoIf(Word32NotEqual(input_map, HeapNumberMapConstant()),
+           &not_heap_number);
+
+    var_result.Bind(input);
+    Goto(&end);
+
+    Bind(&not_heap_number);
+    {
+      var_result.Bind(NonNumberToNumber(context, input));
+      Goto(&end);
+    }
+  }
+
+  Bind(&end);
+  return var_result.value();
+}
+
+Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
+                                   ToIntegerTruncationMode mode) {
+  // We might need to loop once for ToNumber conversion.
+  Variable var_arg(this, MachineRepresentation::kTagged);
+  Label loop(this, &var_arg), out(this);
+  var_arg.Bind(input);
+  Goto(&loop);
+  Bind(&loop);
+  {
+    // Shared entry points.
+    Label return_zero(this, Label::kDeferred);
+
+    // Load the current {arg} value.
+    Node* arg = var_arg.value();
+
+    // Check if {arg} is a Smi.
+    GotoIf(WordIsSmi(arg), &out);
+
+    // Check if {arg} is a HeapNumber.
+    Label if_argisheapnumber(this),
+        if_argisnotheapnumber(this, Label::kDeferred);
+    Branch(WordEqual(LoadMap(arg), HeapNumberMapConstant()),
+           &if_argisheapnumber, &if_argisnotheapnumber);
+
+    Bind(&if_argisheapnumber);
+    {
+      // Load the floating-point value of {arg}.
+      Node* arg_value = LoadHeapNumberValue(arg);
+
+      // Check if {arg} is NaN.
+      GotoUnless(Float64Equal(arg_value, arg_value), &return_zero);
+
+      // Truncate {arg} towards zero.
+      Node* value = Float64Trunc(arg_value);
+
+      if (mode == kTruncateMinusZero) {
+        // Truncate -0.0 to 0.
+        GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
+      }
+
+      var_arg.Bind(ChangeFloat64ToTagged(value));
+      Goto(&out);
+    }
+
+    Bind(&if_argisnotheapnumber);
+    {
+      // Need to convert {arg} to a Number first.
+      Callable callable = CodeFactory::NonNumberToNumber(isolate());
+      var_arg.Bind(CallStub(callable, context, arg));
+      Goto(&loop);
+    }
+
+    Bind(&return_zero);
+    var_arg.Bind(SmiConstant(Smi::FromInt(0)));
+    Goto(&out);
+  }
+
+  Bind(&out);
+  return var_arg.value();
+}
+
 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
                                         uint32_t mask) {
   return Word32Shr(Word32And(word32, Int32Constant(mask)),
@@ -2265,54 +3153,49 @@
 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
                                   Variable* var_index, Label* if_keyisunique,
                                   Label* if_bailout) {
-  DCHECK_EQ(MachineRepresentation::kWord32, var_index->rep());
+  DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
   Comment("TryToName");
 
-  Label if_keyissmi(this), if_keyisnotsmi(this);
-  Branch(WordIsSmi(key), &if_keyissmi, &if_keyisnotsmi);
-  Bind(&if_keyissmi);
-  {
-    // Negative smi keys are named properties. Handle in the runtime.
-    GotoUnless(WordIsPositiveSmi(key), if_bailout);
+  Label if_hascachedindex(this), if_keyisnotindex(this);
+  // Handle Smi and HeapNumber keys.
+  var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
+  Goto(if_keyisindex);
 
-    var_index->Bind(SmiToWord32(key));
-    Goto(if_keyisindex);
-  }
-
-  Bind(&if_keyisnotsmi);
-
+  Bind(&if_keyisnotindex);
   Node* key_instance_type = LoadInstanceType(key);
   // Symbols are unique.
   GotoIf(Word32Equal(key_instance_type, Int32Constant(SYMBOL_TYPE)),
          if_keyisunique);
-
-  Label if_keyisinternalized(this);
-  Node* bits =
-      WordAnd(key_instance_type,
-              Int32Constant(kIsNotStringMask | kIsNotInternalizedMask));
-  Branch(Word32Equal(bits, Int32Constant(kStringTag | kInternalizedTag)),
-         &if_keyisinternalized, if_bailout);
-  Bind(&if_keyisinternalized);
-
-  // Check whether the key is an array index passed in as string. Handle
-  // uniform with smi keys if so.
-  // TODO(verwaest): Also support non-internalized strings.
+  // Miss if |key| is not a String.
+  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
+  GotoUnless(IsStringInstanceType(key_instance_type), if_bailout);
+  // |key| is a String. Check if it has a cached array index.
   Node* hash = LoadNameHashField(key);
-  Node* bit = Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask));
-  GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_keyisunique);
-  // Key is an index. Check if it is small enough to be encoded in the
-  // hash_field. Handle too big array index in runtime.
-  bit = Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask));
-  GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_bailout);
+  Node* contains_index =
+      Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask));
+  GotoIf(Word32Equal(contains_index, Int32Constant(0)), &if_hascachedindex);
+  // No cached array index. If the string knows that it contains an index,
+  // then it must be an uncacheable index. Handle this case in the runtime.
+  Node* not_an_index =
+      Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask));
+  GotoIf(Word32Equal(not_an_index, Int32Constant(0)), if_bailout);
+  // Finally, check if |key| is internalized.
+  STATIC_ASSERT(kNotInternalizedTag != 0);
+  Node* not_internalized =
+      Word32And(key_instance_type, Int32Constant(kIsNotInternalizedMask));
+  GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)), if_bailout);
+  Goto(if_keyisunique);
+
+  Bind(&if_hascachedindex);
   var_index->Bind(BitFieldDecode<Name::ArrayIndexValueBits>(hash));
   Goto(if_keyisindex);
 }
 
 template <typename Dictionary>
 Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
-  Node* entry_index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize));
-  return Int32Add(entry_index,
-                  Int32Constant(Dictionary::kElementsStartIndex + field_index));
+  Node* entry_index = IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
+  return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
+                                               field_index));
 }
 
 template <typename Dictionary>
@@ -2321,34 +3204,36 @@
                                              Variable* var_name_index,
                                              Label* if_not_found,
                                              int inlined_probes) {
-  DCHECK_EQ(MachineRepresentation::kWord32, var_name_index->rep());
+  DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
   Comment("NameDictionaryLookup");
 
-  Node* capacity = LoadAndUntagToWord32FixedArrayElement(
-      dictionary, Int32Constant(Dictionary::kCapacityIndex));
-  Node* mask = Int32Sub(capacity, Int32Constant(1));
-  Node* hash = LoadNameHash(unique_name);
+  Node* capacity = SmiUntag(LoadFixedArrayElement(
+      dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0,
+      INTPTR_PARAMETERS));
+  Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
+  Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name));
 
   // See Dictionary::FirstProbe().
-  Node* count = Int32Constant(0);
-  Node* entry = Word32And(hash, mask);
+  Node* count = IntPtrConstant(0);
+  Node* entry = WordAnd(hash, mask);
 
   for (int i = 0; i < inlined_probes; i++) {
     Node* index = EntryToIndex<Dictionary>(entry);
     var_name_index->Bind(index);
 
-    Node* current = LoadFixedArrayElement(dictionary, index);
+    Node* current =
+        LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
     GotoIf(WordEqual(current, unique_name), if_found);
 
     // See Dictionary::NextProbe().
-    count = Int32Constant(i + 1);
-    entry = Word32And(Int32Add(entry, count), mask);
+    count = IntPtrConstant(i + 1);
+    entry = WordAnd(IntPtrAdd(entry, count), mask);
   }
 
   Node* undefined = UndefinedConstant();
 
-  Variable var_count(this, MachineRepresentation::kWord32);
-  Variable var_entry(this, MachineRepresentation::kWord32);
+  Variable var_count(this, MachineType::PointerRepresentation());
+  Variable var_entry(this, MachineType::PointerRepresentation());
   Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
   Label loop(this, 3, loop_vars);
   var_count.Bind(count);
@@ -2362,13 +3247,14 @@
     Node* index = EntryToIndex<Dictionary>(entry);
     var_name_index->Bind(index);
 
-    Node* current = LoadFixedArrayElement(dictionary, index);
+    Node* current =
+        LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
     GotoIf(WordEqual(current, undefined), if_not_found);
     GotoIf(WordEqual(current, unique_name), if_found);
 
     // See Dictionary::NextProbe().
-    count = Int32Add(count, Int32Constant(1));
-    entry = Word32And(Int32Add(entry, count), mask);
+    count = IntPtrAdd(count, IntPtrConstant(1));
+    entry = WordAnd(IntPtrAdd(entry, count), mask);
 
     var_count.Bind(count);
     var_entry.Bind(entry);
@@ -2397,34 +3283,36 @@
 }
 
 template <typename Dictionary>
-void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary, Node* key,
+void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
+                                               Node* intptr_index,
                                                Label* if_found,
                                                Variable* var_entry,
                                                Label* if_not_found) {
-  DCHECK_EQ(MachineRepresentation::kWord32, var_entry->rep());
+  DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
   Comment("NumberDictionaryLookup");
 
-  Node* capacity = LoadAndUntagToWord32FixedArrayElement(
-      dictionary, Int32Constant(Dictionary::kCapacityIndex));
-  Node* mask = Int32Sub(capacity, Int32Constant(1));
+  Node* capacity = SmiUntag(LoadFixedArrayElement(
+      dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0,
+      INTPTR_PARAMETERS));
+  Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
 
-  Node* seed;
+  Node* int32_seed;
   if (Dictionary::ShapeT::UsesSeed) {
-    seed = HashSeed();
+    int32_seed = HashSeed();
   } else {
-    seed = Int32Constant(kZeroHashSeed);
+    int32_seed = Int32Constant(kZeroHashSeed);
   }
-  Node* hash = ComputeIntegerHash(key, seed);
-  Node* key_as_float64 = ChangeUint32ToFloat64(key);
+  Node* hash = ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed));
+  Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
 
   // See Dictionary::FirstProbe().
-  Node* count = Int32Constant(0);
-  Node* entry = Word32And(hash, mask);
+  Node* count = IntPtrConstant(0);
+  Node* entry = WordAnd(hash, mask);
 
   Node* undefined = UndefinedConstant();
   Node* the_hole = TheHoleConstant();
 
-  Variable var_count(this, MachineRepresentation::kWord32);
+  Variable var_count(this, MachineType::PointerRepresentation());
   Variable* loop_vars[] = {&var_count, var_entry};
   Label loop(this, 2, loop_vars);
   var_count.Bind(count);
@@ -2436,7 +3324,8 @@
     Node* entry = var_entry->value();
 
     Node* index = EntryToIndex<Dictionary>(entry);
-    Node* current = LoadFixedArrayElement(dictionary, index);
+    Node* current =
+        LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
     GotoIf(WordEqual(current, undefined), if_not_found);
     Label next_probe(this);
     {
@@ -2444,8 +3333,8 @@
       Branch(WordIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
       Bind(&if_currentissmi);
       {
-        Node* current_value = SmiToWord32(current);
-        Branch(Word32Equal(current_value, key), if_found, &next_probe);
+        Node* current_value = SmiUntag(current);
+        Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
       }
       Bind(&if_currentisnotsmi);
       {
@@ -2459,8 +3348,8 @@
 
     Bind(&next_probe);
     // See Dictionary::NextProbe().
-    count = Int32Add(count, Int32Constant(1));
-    entry = Word32And(Int32Add(entry, count), mask);
+    count = IntPtrAdd(count, IntPtrConstant(1));
+    entry = WordAnd(IntPtrAdd(entry, count), mask);
 
     var_count.Bind(count);
     var_entry->Bind(entry);
@@ -2468,13 +3357,39 @@
   }
 }
 
+void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
+                                               Node* descriptors, Node* nof,
+                                               Label* if_found,
+                                               Variable* var_name_index,
+                                               Label* if_not_found) {
+  Variable var_descriptor(this, MachineType::PointerRepresentation());
+  Label loop(this, &var_descriptor);
+  var_descriptor.Bind(IntPtrConstant(0));
+  Goto(&loop);
+
+  Bind(&loop);
+  {
+    Node* index = var_descriptor.value();
+    Node* name_offset = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
+    Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize);
+    GotoIf(WordEqual(index, nof), if_not_found);
+    Node* name_index = IntPtrAdd(name_offset, IntPtrMul(index, factor));
+    Node* candidate_name =
+        LoadFixedArrayElement(descriptors, name_index, 0, INTPTR_PARAMETERS);
+    var_name_index->Bind(name_index);
+    GotoIf(WordEqual(candidate_name, unique_name), if_found);
+    var_descriptor.Bind(IntPtrAdd(index, IntPtrConstant(1)));
+    Goto(&loop);
+  }
+}
+
 void CodeStubAssembler::TryLookupProperty(
     Node* object, Node* map, Node* instance_type, Node* unique_name,
     Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
     Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
     Label* if_bailout) {
   DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
-  DCHECK_EQ(MachineRepresentation::kWord32, var_name_index->rep());
+  DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
 
   Label if_objectisspecial(this);
   STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
@@ -2494,36 +3409,18 @@
   Bind(&if_isfastmap);
   {
     Comment("DescriptorArrayLookup");
-    Node* nof = BitFieldDecode<Map::NumberOfOwnDescriptorsBits>(bit_field3);
+    Node* nof = BitFieldDecodeWord<Map::NumberOfOwnDescriptorsBits>(bit_field3);
     // Bail out to the runtime for large numbers of own descriptors. The stub
     // only does linear search, which becomes too expensive in that case.
     {
       static const int32_t kMaxLinear = 210;
-      GotoIf(Int32GreaterThan(nof, Int32Constant(kMaxLinear)), if_bailout);
+      GotoIf(UintPtrGreaterThan(nof, IntPtrConstant(kMaxLinear)), if_bailout);
     }
     Node* descriptors = LoadMapDescriptors(map);
     var_meta_storage->Bind(descriptors);
 
-    Variable var_descriptor(this, MachineRepresentation::kWord32);
-    Label loop(this, &var_descriptor);
-    var_descriptor.Bind(Int32Constant(0));
-    Goto(&loop);
-    Bind(&loop);
-    {
-      Node* index = var_descriptor.value();
-      Node* name_offset = Int32Constant(DescriptorArray::ToKeyIndex(0));
-      Node* factor = Int32Constant(DescriptorArray::kDescriptorSize);
-      GotoIf(Word32Equal(index, nof), if_not_found);
-
-      Node* name_index = Int32Add(name_offset, Int32Mul(index, factor));
-      Node* name = LoadFixedArrayElement(descriptors, name_index);
-
-      var_name_index->Bind(name_index);
-      GotoIf(WordEqual(name, unique_name), if_found_fast);
-
-      var_descriptor.Bind(Int32Add(index, Int32Constant(1)));
-      Goto(&loop);
-    }
+    DescriptorLookupLinear(unique_name, descriptors, nof, if_found_fast,
+                           var_name_index, if_not_found);
   }
   Bind(&if_isslowmap);
   {
@@ -2562,7 +3459,7 @@
                                           Label* if_bailout) {
   Comment("TryHasOwnProperty");
   Variable var_meta_storage(this, MachineRepresentation::kTagged);
-  Variable var_name_index(this, MachineRepresentation::kWord32);
+  Variable var_name_index(this, MachineType::PointerRepresentation());
 
   Label if_found_global(this);
   TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
@@ -2608,7 +3505,7 @@
   Bind(&if_in_field);
   {
     Node* field_index =
-        BitFieldDecode<PropertyDetails::FieldIndexField>(details);
+        BitFieldDecodeWord<PropertyDetails::FieldIndexField>(details);
     Node* representation =
         BitFieldDecode<PropertyDetails::RepresentationField>(details);
 
@@ -2617,15 +3514,15 @@
     Label if_inobject(this), if_backing_store(this);
     Variable var_double_value(this, MachineRepresentation::kFloat64);
     Label rebox_double(this, &var_double_value);
-    BranchIfInt32LessThan(field_index, inobject_properties, &if_inobject,
-                          &if_backing_store);
+    BranchIfUintPtrLessThan(field_index, inobject_properties, &if_inobject,
+                            &if_backing_store);
     Bind(&if_inobject);
     {
       Comment("if_inobject");
-      Node* field_offset = ChangeInt32ToIntPtr(
-          Int32Mul(Int32Sub(LoadMapInstanceSize(map),
-                            Int32Sub(inobject_properties, field_index)),
-                   Int32Constant(kPointerSize)));
+      Node* field_offset =
+          IntPtrMul(IntPtrSub(LoadMapInstanceSize(map),
+                              IntPtrSub(inobject_properties, field_index)),
+                    IntPtrConstant(kPointerSize));
 
       Label if_double(this), if_tagged(this);
       BranchIfWord32NotEqual(representation,
@@ -2652,7 +3549,7 @@
     {
       Comment("if_backing_store");
       Node* properties = LoadProperties(object);
-      field_index = Int32Sub(field_index, inobject_properties);
+      field_index = IntPtrSub(field_index, inobject_properties);
       Node* value = LoadFixedArrayElement(properties, field_index);
 
       Label if_double(this), if_tagged(this);
@@ -2739,6 +3636,52 @@
   Comment("] LoadPropertyFromGlobalDictionary");
 }
 
+// |value| is the property backing store's contents, which is either a value
+// or an accessor pair, as specified by |details|.
+// Returns either the original value, or the result of the getter call.
+Node* CodeStubAssembler::CallGetterIfAccessor(Node* value, Node* details,
+                                              Node* context, Node* receiver,
+                                              Label* if_bailout) {
+  Variable var_value(this, MachineRepresentation::kTagged);
+  var_value.Bind(value);
+  Label done(this);
+
+  Node* kind = BitFieldDecode<PropertyDetails::KindField>(details);
+  GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
+
+  // Accessor case.
+  {
+    Node* accessor_pair = value;
+    GotoIf(Word32Equal(LoadInstanceType(accessor_pair),
+                       Int32Constant(ACCESSOR_INFO_TYPE)),
+           if_bailout);
+    AssertInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE);
+    Node* getter = LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
+    Node* getter_map = LoadMap(getter);
+    Node* instance_type = LoadMapInstanceType(getter_map);
+    // FunctionTemplateInfo getters are not supported yet.
+    GotoIf(
+        Word32Equal(instance_type, Int32Constant(FUNCTION_TEMPLATE_INFO_TYPE)),
+        if_bailout);
+
+    // Return undefined if the {getter} is not callable.
+    var_value.Bind(UndefinedConstant());
+    GotoIf(Word32Equal(Word32And(LoadMapBitField(getter_map),
+                                 Int32Constant(1 << Map::kIsCallable)),
+                       Int32Constant(0)),
+           &done);
+
+    // Call the accessor.
+    Callable callable = CodeFactory::Call(isolate());
+    Node* result = CallJS(callable, context, getter, receiver);
+    var_value.Bind(result);
+    Goto(&done);
+  }
+
+  Bind(&done);
+  return var_value.value();
+}
+
 void CodeStubAssembler::TryGetOwnProperty(
     Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
     Node* unique_name, Label* if_found_value, Variable* var_value,
@@ -2747,7 +3690,7 @@
   Comment("TryGetOwnProperty");
 
   Variable var_meta_storage(this, MachineRepresentation::kTagged);
-  Variable var_entry(this, MachineRepresentation::kWord32);
+  Variable var_entry(this, MachineType::PointerRepresentation());
 
   Label if_found_fast(this), if_found_dict(this), if_found_global(this);
 
@@ -2786,59 +3729,28 @@
   // Here we have details and value which could be an accessor.
   Bind(&if_found);
   {
-    Node* details = var_details.value();
-    Node* kind = BitFieldDecode<PropertyDetails::KindField>(details);
-
-    Label if_accessor(this);
-    Branch(Word32Equal(kind, Int32Constant(kData)), if_found_value,
-           &if_accessor);
-    Bind(&if_accessor);
-    {
-      Node* accessor_pair = var_value->value();
-      GotoIf(Word32Equal(LoadInstanceType(accessor_pair),
-                         Int32Constant(ACCESSOR_INFO_TYPE)),
-             if_bailout);
-      AssertInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE);
-      Node* getter =
-          LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
-      Node* getter_map = LoadMap(getter);
-      Node* instance_type = LoadMapInstanceType(getter_map);
-      // FunctionTemplateInfo getters are not supported yet.
-      GotoIf(Word32Equal(instance_type,
-                         Int32Constant(FUNCTION_TEMPLATE_INFO_TYPE)),
-             if_bailout);
-
-      // Return undefined if the {getter} is not callable.
-      var_value->Bind(UndefinedConstant());
-      GotoIf(Word32Equal(Word32And(LoadMapBitField(getter_map),
-                                   Int32Constant(1 << Map::kIsCallable)),
-                         Int32Constant(0)),
-             if_found_value);
-
-      // Call the accessor.
-      Callable callable = CodeFactory::Call(isolate());
-      Node* result = CallJS(callable, context, getter, receiver);
-      var_value->Bind(result);
-      Goto(if_found_value);
-    }
+    Node* value = CallGetterIfAccessor(var_value->value(), var_details.value(),
+                                       context, receiver, if_bailout);
+    var_value->Bind(value);
+    Goto(if_found_value);
   }
 }
 
 void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
-                                         Node* instance_type, Node* index,
-                                         Label* if_found, Label* if_not_found,
+                                         Node* instance_type,
+                                         Node* intptr_index, Label* if_found,
+                                         Label* if_not_found,
                                          Label* if_bailout) {
   // Handle special objects in runtime.
   GotoIf(Int32LessThanOrEqual(instance_type,
                               Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
          if_bailout);
 
-  Node* bit_field2 = LoadMapBitField2(map);
-  Node* elements_kind = BitFieldDecode<Map::ElementsKindBits>(bit_field2);
+  Node* elements_kind = LoadMapElementsKind(map);
 
   // TODO(verwaest): Support other elements kinds as well.
   Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
-      if_isfaststringwrapper(this), if_isslowstringwrapper(this);
+      if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this);
   // clang-format off
   int32_t values[] = {
       // Handled by {if_isobjectorsmi}.
@@ -2873,9 +3785,10 @@
     Node* elements = LoadElements(object);
     Node* length = LoadAndUntagFixedArrayBaseLength(elements);
 
-    GotoUnless(Uint32LessThan(index, length), if_not_found);
+    GotoUnless(UintPtrLessThan(intptr_index, length), &if_oob);
 
-    Node* element = LoadFixedArrayElement(elements, index);
+    Node* element =
+        LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS);
     Node* the_hole = TheHoleConstant();
     Branch(WordEqual(element, the_hole), if_not_found, if_found);
   }
@@ -2884,48 +3797,45 @@
     Node* elements = LoadElements(object);
     Node* length = LoadAndUntagFixedArrayBaseLength(elements);
 
-    GotoUnless(Uint32LessThan(index, length), if_not_found);
+    GotoUnless(UintPtrLessThan(intptr_index, length), &if_oob);
 
-    if (kPointerSize == kDoubleSize) {
-      Node* element =
-          LoadFixedDoubleArrayElement(elements, index, MachineType::Uint64());
-      Node* the_hole = Int64Constant(kHoleNanInt64);
-      Branch(Word64Equal(element, the_hole), if_not_found, if_found);
-    } else {
-      Node* element_upper =
-          LoadFixedDoubleArrayElement(elements, index, MachineType::Uint32(),
-                                      kIeeeDoubleExponentWordOffset);
-      Branch(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
-             if_not_found, if_found);
-    }
+    // Check if the element is a double hole, but don't load it.
+    LoadFixedDoubleArrayElement(elements, intptr_index, MachineType::None(), 0,
+                                INTPTR_PARAMETERS, if_not_found);
+    Goto(if_found);
   }
   Bind(&if_isdictionary);
   {
-    Variable var_entry(this, MachineRepresentation::kWord32);
+    Variable var_entry(this, MachineType::PointerRepresentation());
     Node* elements = LoadElements(object);
-    NumberDictionaryLookup<SeededNumberDictionary>(elements, index, if_found,
-                                                   &var_entry, if_not_found);
+    NumberDictionaryLookup<SeededNumberDictionary>(
+        elements, intptr_index, if_found, &var_entry, if_not_found);
   }
   Bind(&if_isfaststringwrapper);
   {
     AssertInstanceType(object, JS_VALUE_TYPE);
     Node* string = LoadJSValueValue(object);
-    Assert(Int32LessThan(LoadInstanceType(string),
-                         Int32Constant(FIRST_NONSTRING_TYPE)));
+    Assert(IsStringInstanceType(LoadInstanceType(string)));
     Node* length = LoadStringLength(string);
-    GotoIf(Uint32LessThan(index, SmiToWord32(length)), if_found);
+    GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
     Goto(&if_isobjectorsmi);
   }
   Bind(&if_isslowstringwrapper);
   {
     AssertInstanceType(object, JS_VALUE_TYPE);
     Node* string = LoadJSValueValue(object);
-    Assert(Int32LessThan(LoadInstanceType(string),
-                         Int32Constant(FIRST_NONSTRING_TYPE)));
+    Assert(IsStringInstanceType(LoadInstanceType(string)));
     Node* length = LoadStringLength(string);
-    GotoIf(Uint32LessThan(index, SmiToWord32(length)), if_found);
+    GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
     Goto(&if_isdictionary);
   }
+  Bind(&if_oob);
+  {
+    // Positive OOB indices mean "not found", negative indices must be
+    // converted to property names.
+    GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
+    Goto(if_not_found);
+  }
 }
 
 // Instantiate template methods to workaround GCC compilation issue.
@@ -2955,7 +3865,7 @@
     Bind(&if_objectisreceiver);
   }
 
-  Variable var_index(this, MachineRepresentation::kWord32);
+  Variable var_index(this, MachineType::PointerRepresentation());
 
   Label if_keyisindex(this), if_iskeyunique(this);
   TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, if_bailout);
@@ -3183,19 +4093,22 @@
                                                           ElementsKind kind,
                                                           ParameterMode mode,
                                                           int base_size) {
-  bool is_double = IsFastDoubleElementsKind(kind);
-  int element_size_shift = is_double ? kDoubleSizeLog2 : kPointerSizeLog2;
+  int element_size_shift = ElementsKindToShiftSize(kind);
   int element_size = 1 << element_size_shift;
   int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
-  int32_t index = 0;
+  intptr_t index = 0;
   bool constant_index = false;
   if (mode == SMI_PARAMETERS) {
     element_size_shift -= kSmiShiftBits;
-    intptr_t temp = 0;
-    constant_index = ToIntPtrConstant(index_node, temp);
-    index = temp >> kSmiShiftBits;
+    constant_index = ToIntPtrConstant(index_node, index);
+    index = index >> kSmiShiftBits;
+  } else if (mode == INTEGER_PARAMETERS) {
+    int32_t temp = 0;
+    constant_index = ToInt32Constant(index_node, temp);
+    index = static_cast<intptr_t>(temp);
   } else {
-    constant_index = ToInt32Constant(index_node, index);
+    DCHECK(mode == INTPTR_PARAMETERS);
+    constant_index = ToIntPtrConstant(index_node, index);
   }
   if (constant_index) {
     return IntPtrConstant(base_size + element_size * index);
@@ -3225,32 +4138,16 @@
 void CodeStubAssembler::UpdateFeedback(compiler::Node* feedback,
                                        compiler::Node* type_feedback_vector,
                                        compiler::Node* slot_id) {
-  Label combine_feedback(this), record_feedback(this), end(this);
-
+  // This method is used for binary op and compare feedback. These
+  // vector nodes are initialized with a smi 0, so we can simply OR
+  // our new feedback in place.
+  // TODO(interpreter): Consider passing the feedback as Smi already to avoid
+  // the tagging completely.
   Node* previous_feedback =
       LoadFixedArrayElement(type_feedback_vector, slot_id);
-  Node* is_uninitialized = WordEqual(
-      previous_feedback,
-      HeapConstant(TypeFeedbackVector::UninitializedSentinel(isolate())));
-  BranchIf(is_uninitialized, &record_feedback, &combine_feedback);
-
-  Bind(&record_feedback);
-  {
-    StoreFixedArrayElement(type_feedback_vector, slot_id, SmiTag(feedback),
-                           SKIP_WRITE_BARRIER);
-    Goto(&end);
-  }
-
-  Bind(&combine_feedback);
-  {
-    Node* untagged_previous_feedback = SmiUntag(previous_feedback);
-    Node* combined_feedback = Word32Or(untagged_previous_feedback, feedback);
-    StoreFixedArrayElement(type_feedback_vector, slot_id,
-                           SmiTag(combined_feedback), SKIP_WRITE_BARRIER);
-    Goto(&end);
-  }
-
-  Bind(&end);
+  Node* combined_feedback = SmiOr(previous_feedback, SmiFromWord32(feedback));
+  StoreFixedArrayElement(type_feedback_vector, slot_id, combined_feedback,
+                         SKIP_WRITE_BARRIER);
 }
 
 compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) {
@@ -3275,23 +4172,23 @@
 }
 
 compiler::Node* CodeStubAssembler::TryMonomorphicCase(
-    const LoadICParameters* p, compiler::Node* receiver_map, Label* if_handler,
-    Variable* var_handler, Label* if_miss) {
+    compiler::Node* slot, compiler::Node* vector, compiler::Node* receiver_map,
+    Label* if_handler, Variable* var_handler, Label* if_miss) {
   DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
 
   // TODO(ishell): add helper class that hides offset computations for a series
   // of loads.
   int32_t header_size = FixedArray::kHeaderSize - kHeapObjectTag;
-  Node* offset = ElementOffsetFromIndex(p->slot, FAST_HOLEY_ELEMENTS,
+  Node* offset = ElementOffsetFromIndex(slot, FAST_HOLEY_ELEMENTS,
                                         SMI_PARAMETERS, header_size);
-  Node* feedback = Load(MachineType::AnyTagged(), p->vector, offset);
+  Node* feedback = Load(MachineType::AnyTagged(), vector, offset);
 
   // Try to quickly handle the monomorphic case without knowing for sure
   // if we have a weak cell in feedback. We do know it's safe to look
   // at WeakCell::kValueOffset.
   GotoUnless(WordEqual(receiver_map, LoadWeakCellValue(feedback)), if_miss);
 
-  Node* handler = Load(MachineType::AnyTagged(), p->vector,
+  Node* handler = Load(MachineType::AnyTagged(), vector,
                        IntPtrAdd(offset, IntPtrConstant(kPointerSize)));
 
   var_handler->Bind(handler);
@@ -3300,9 +4197,8 @@
 }
 
 void CodeStubAssembler::HandlePolymorphicCase(
-    const LoadICParameters* p, compiler::Node* receiver_map,
-    compiler::Node* feedback, Label* if_handler, Variable* var_handler,
-    Label* if_miss, int unroll_count) {
+    compiler::Node* receiver_map, compiler::Node* feedback, Label* if_handler,
+    Variable* var_handler, Label* if_miss, int unroll_count) {
   DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
 
   // Iterate {feedback} array.
@@ -3310,13 +4206,13 @@
 
   for (int i = 0; i < unroll_count; i++) {
     Label next_entry(this);
-    Node* cached_map = LoadWeakCellValue(
-        LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize)));
+    Node* cached_map = LoadWeakCellValue(LoadFixedArrayElement(
+        feedback, IntPtrConstant(i * kEntrySize), 0, INTPTR_PARAMETERS));
     GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
 
     // Found, now call handler.
-    Node* handler =
-        LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize + 1));
+    Node* handler = LoadFixedArrayElement(
+        feedback, IntPtrConstant(i * kEntrySize + 1), 0, INTPTR_PARAMETERS);
     var_handler->Bind(handler);
     Goto(if_handler);
 
@@ -3325,28 +4221,29 @@
   Node* length = LoadAndUntagFixedArrayBaseLength(feedback);
 
   // Loop from {unroll_count}*kEntrySize to {length}.
-  Variable var_index(this, MachineRepresentation::kWord32);
+  Variable var_index(this, MachineType::PointerRepresentation());
   Label loop(this, &var_index);
-  var_index.Bind(Int32Constant(unroll_count * kEntrySize));
+  var_index.Bind(IntPtrConstant(unroll_count * kEntrySize));
   Goto(&loop);
   Bind(&loop);
   {
     Node* index = var_index.value();
-    GotoIf(Int32GreaterThanOrEqual(index, length), if_miss);
+    GotoIf(UintPtrGreaterThanOrEqual(index, length), if_miss);
 
-    Node* cached_map =
-        LoadWeakCellValue(LoadFixedArrayElement(feedback, index));
+    Node* cached_map = LoadWeakCellValue(
+        LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
 
     Label next_entry(this);
     GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
 
     // Found, now call handler.
-    Node* handler = LoadFixedArrayElement(feedback, index, kPointerSize);
+    Node* handler =
+        LoadFixedArrayElement(feedback, index, kPointerSize, INTPTR_PARAMETERS);
     var_handler->Bind(handler);
     Goto(if_handler);
 
     Bind(&next_entry);
-    var_index.Bind(Int32Add(index, Int32Constant(kEntrySize)));
+    var_index.Bind(IntPtrAdd(index, IntPtrConstant(kEntrySize)));
     Goto(&loop);
   }
 }
@@ -3357,7 +4254,7 @@
   STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift);
   // Compute the hash of the name (use entire hash field).
   Node* hash_field = LoadNameHashField(name);
-  Assert(WordEqual(
+  Assert(Word32Equal(
       Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
       Int32Constant(0)));
 
@@ -3369,7 +4266,7 @@
   hash = Word32Xor(hash, Int32Constant(StubCache::kPrimaryMagic));
   uint32_t mask = (StubCache::kPrimaryTableSize - 1)
                   << StubCache::kCacheIndexShift;
-  return Word32And(hash, Int32Constant(mask));
+  return ChangeUint32ToWord(Word32And(hash, Int32Constant(mask)));
 }
 
 compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset(
@@ -3381,7 +4278,7 @@
   hash = Int32Add(hash, Int32Constant(StubCache::kSecondaryMagic));
   int32_t mask = (StubCache::kSecondaryTableSize - 1)
                  << StubCache::kCacheIndexShift;
-  return Word32And(hash, Int32Constant(mask));
+  return ChangeUint32ToWord(Word32And(hash, Int32Constant(mask)));
 }
 
 enum CodeStubAssembler::StubCacheTable : int {
@@ -3406,7 +4303,7 @@
   // The {table_offset} holds the entry offset times four (due to masking
   // and shifting optimizations).
   const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift;
-  entry_offset = Int32Mul(entry_offset, Int32Constant(kMultiplier));
+  entry_offset = IntPtrMul(entry_offset, IntPtrConstant(kMultiplier));
 
   // Check that the key in the entry matches the name.
   Node* key_base =
@@ -3419,13 +4316,13 @@
                                   stub_cache->key_reference(table).address());
   Node* entry_map =
       Load(MachineType::Pointer(), key_base,
-           Int32Add(entry_offset, Int32Constant(kPointerSize * 2)));
+           IntPtrAdd(entry_offset, IntPtrConstant(kPointerSize * 2)));
   GotoIf(WordNotEqual(map, entry_map), if_miss);
 
   DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() -
                               stub_cache->key_reference(table).address());
   Node* code = Load(MachineType::Pointer(), key_base,
-                    Int32Add(entry_offset, Int32Constant(kPointerSize)));
+                    IntPtrAdd(entry_offset, IntPtrConstant(kPointerSize)));
 
   // We found the handler.
   var_handler->Bind(code);
@@ -3489,41 +4386,43 @@
   return var_intptr_key.value();
 }
 
-// |is_jsarray| should be non-zero for JSArrays.
-void CodeStubAssembler::EmitBoundsCheck(Node* object, Node* elements,
-                                        Node* intptr_key, Node* is_jsarray,
-                                        Label* miss) {
-  Variable var_length(this, MachineRepresentation::kTagged);
+void CodeStubAssembler::EmitFastElementsBoundsCheck(Node* object,
+                                                    Node* elements,
+                                                    Node* intptr_index,
+                                                    Node* is_jsarray_condition,
+                                                    Label* miss) {
+  Variable var_length(this, MachineType::PointerRepresentation());
   Label if_array(this), length_loaded(this, &var_length);
-  GotoUnless(WordEqual(is_jsarray, IntPtrConstant(0)), &if_array);
+  GotoIf(is_jsarray_condition, &if_array);
   {
     var_length.Bind(SmiUntag(LoadFixedArrayBaseLength(elements)));
     Goto(&length_loaded);
   }
   Bind(&if_array);
   {
-    var_length.Bind(SmiUntag(LoadObjectField(object, JSArray::kLengthOffset)));
+    var_length.Bind(SmiUntag(LoadJSArrayLength(object)));
     Goto(&length_loaded);
   }
   Bind(&length_loaded);
-  GotoUnless(UintPtrLessThan(intptr_key, var_length.value()), miss);
+  GotoUnless(UintPtrLessThan(intptr_index, var_length.value()), miss);
 }
 
-// |key| should be untagged (int32).
 void CodeStubAssembler::EmitElementLoad(Node* object, Node* elements,
-                                        Node* elements_kind, Node* key,
+                                        Node* elements_kind, Node* intptr_index,
+                                        Node* is_jsarray_condition,
                                         Label* if_hole, Label* rebox_double,
                                         Variable* var_double_value,
-                                        Label* miss) {
+                                        Label* unimplemented_elements_kind,
+                                        Label* out_of_bounds, Label* miss) {
   Label if_typed_array(this), if_fast_packed(this), if_fast_holey(this),
-      if_fast_double(this), if_fast_holey_double(this),
-      unimplemented_elements_kind(this);
-  STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
+      if_fast_double(this), if_fast_holey_double(this), if_nonfast(this),
+      if_dictionary(this), unreachable(this);
   GotoIf(
-      IntPtrGreaterThanOrEqual(
-          elements_kind, IntPtrConstant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
-      &if_typed_array);
+      IntPtrGreaterThan(elements_kind, IntPtrConstant(LAST_FAST_ELEMENTS_KIND)),
+      &if_nonfast);
 
+  EmitFastElementsBoundsCheck(object, elements, intptr_index,
+                              is_jsarray_condition, out_of_bounds);
   int32_t kinds[] = {// Handled by if_fast_packed.
                      FAST_SMI_ELEMENTS, FAST_ELEMENTS,
                      // Handled by if_fast_holey.
@@ -3540,28 +4439,20 @@
                      &if_fast_double,
                      // FAST_HOLEY_DOUBLE_ELEMENTS
                      &if_fast_holey_double};
-  Switch(elements_kind, &unimplemented_elements_kind, kinds, labels,
+  Switch(elements_kind, unimplemented_elements_kind, kinds, labels,
          arraysize(kinds));
-  Bind(&unimplemented_elements_kind);
-  {
-    // Crash if we get here.
-    DebugBreak();
-    Goto(miss);
-  }
 
   Bind(&if_fast_packed);
   {
     Comment("fast packed elements");
-    // TODO(jkummerow): The Load*Element helpers add movsxlq instructions
-    // on x64 which we don't need here, because |key| is an IntPtr already.
-    // Do something about that.
-    Return(LoadFixedArrayElement(elements, key));
+    Return(LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS));
   }
 
   Bind(&if_fast_holey);
   {
     Comment("fast holey elements");
-    Node* element = LoadFixedArrayElement(elements, key);
+    Node* element =
+        LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS);
     GotoIf(WordEqual(element, TheHoleConstant()), if_hole);
     Return(element);
   }
@@ -3569,30 +4460,56 @@
   Bind(&if_fast_double);
   {
     Comment("packed double elements");
-    var_double_value->Bind(
-        LoadFixedDoubleArrayElement(elements, key, MachineType::Float64()));
+    var_double_value->Bind(LoadFixedDoubleArrayElement(
+        elements, intptr_index, MachineType::Float64(), 0, INTPTR_PARAMETERS));
     Goto(rebox_double);
   }
 
   Bind(&if_fast_holey_double);
   {
     Comment("holey double elements");
-    if (kPointerSize == kDoubleSize) {
-      Node* raw_element =
-          LoadFixedDoubleArrayElement(elements, key, MachineType::Uint64());
-      Node* the_hole = Int64Constant(kHoleNanInt64);
-      GotoIf(Word64Equal(raw_element, the_hole), if_hole);
-    } else {
-      Node* element_upper = LoadFixedDoubleArrayElement(
-          elements, key, MachineType::Uint32(), kIeeeDoubleExponentWordOffset);
-      GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
-             if_hole);
-    }
-    var_double_value->Bind(
-        LoadFixedDoubleArrayElement(elements, key, MachineType::Float64()));
+    Node* value = LoadFixedDoubleArrayElement(elements, intptr_index,
+                                              MachineType::Float64(), 0,
+                                              INTPTR_PARAMETERS, if_hole);
+    var_double_value->Bind(value);
     Goto(rebox_double);
   }
 
+  Bind(&if_nonfast);
+  {
+    STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
+    GotoIf(IntPtrGreaterThanOrEqual(
+               elements_kind,
+               IntPtrConstant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
+           &if_typed_array);
+    GotoIf(IntPtrEqual(elements_kind, IntPtrConstant(DICTIONARY_ELEMENTS)),
+           &if_dictionary);
+    Goto(unimplemented_elements_kind);
+  }
+
+  Bind(&if_dictionary);
+  {
+    Comment("dictionary elements");
+    GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), out_of_bounds);
+    Variable var_entry(this, MachineType::PointerRepresentation());
+    Label if_found(this);
+    NumberDictionaryLookup<SeededNumberDictionary>(
+        elements, intptr_index, &if_found, &var_entry, if_hole);
+    Bind(&if_found);
+    // Check that the value is a data property.
+    Node* details_index = EntryToIndex<SeededNumberDictionary>(
+        var_entry.value(), SeededNumberDictionary::kEntryDetailsIndex);
+    Node* details = SmiToWord32(
+        LoadFixedArrayElement(elements, details_index, 0, INTPTR_PARAMETERS));
+    Node* kind = BitFieldDecode<PropertyDetails::KindField>(details);
+    // TODO(jkummerow): Support accessors without missing?
+    GotoUnless(Word32Equal(kind, Int32Constant(kData)), miss);
+    // Finally, load the value.
+    Node* value_index = EntryToIndex<SeededNumberDictionary>(
+        var_entry.value(), SeededNumberDictionary::kEntryValueIndex);
+    Return(LoadFixedArrayElement(elements, value_index, 0, INTPTR_PARAMETERS));
+  }
+
   Bind(&if_typed_array);
   {
     Comment("typed elements");
@@ -3603,6 +4520,12 @@
     Node* neutered_bit =
         Word32And(bitfield, Int32Constant(JSArrayBuffer::WasNeutered::kMask));
     GotoUnless(Word32Equal(neutered_bit, Int32Constant(0)), miss);
+
+    // Bounds check.
+    Node* length =
+        SmiUntag(LoadObjectField(object, JSTypedArray::kLengthOffset));
+    GotoUnless(UintPtrLessThan(intptr_index, length), out_of_bounds);
+
     // Backing store = external_pointer + base_pointer.
     Node* external_pointer =
         LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
@@ -3632,43 +4555,43 @@
     Bind(&uint8_elements);
     {
       Comment("UINT8_ELEMENTS");  // Handles UINT8_CLAMPED_ELEMENTS too.
-      Return(SmiTag(Load(MachineType::Uint8(), backing_store, key)));
+      Return(SmiTag(Load(MachineType::Uint8(), backing_store, intptr_index)));
     }
     Bind(&int8_elements);
     {
       Comment("INT8_ELEMENTS");
-      Return(SmiTag(Load(MachineType::Int8(), backing_store, key)));
+      Return(SmiTag(Load(MachineType::Int8(), backing_store, intptr_index)));
     }
     Bind(&uint16_elements);
     {
       Comment("UINT16_ELEMENTS");
-      Node* index = WordShl(key, IntPtrConstant(1));
+      Node* index = WordShl(intptr_index, IntPtrConstant(1));
       Return(SmiTag(Load(MachineType::Uint16(), backing_store, index)));
     }
     Bind(&int16_elements);
     {
       Comment("INT16_ELEMENTS");
-      Node* index = WordShl(key, IntPtrConstant(1));
+      Node* index = WordShl(intptr_index, IntPtrConstant(1));
       Return(SmiTag(Load(MachineType::Int16(), backing_store, index)));
     }
     Bind(&uint32_elements);
     {
       Comment("UINT32_ELEMENTS");
-      Node* index = WordShl(key, IntPtrConstant(2));
+      Node* index = WordShl(intptr_index, IntPtrConstant(2));
       Node* element = Load(MachineType::Uint32(), backing_store, index);
       Return(ChangeUint32ToTagged(element));
     }
     Bind(&int32_elements);
     {
       Comment("INT32_ELEMENTS");
-      Node* index = WordShl(key, IntPtrConstant(2));
+      Node* index = WordShl(intptr_index, IntPtrConstant(2));
       Node* element = Load(MachineType::Int32(), backing_store, index);
       Return(ChangeInt32ToTagged(element));
     }
     Bind(&float32_elements);
     {
       Comment("FLOAT32_ELEMENTS");
-      Node* index = WordShl(key, IntPtrConstant(2));
+      Node* index = WordShl(intptr_index, IntPtrConstant(2));
       Node* element = Load(MachineType::Float32(), backing_store, index);
       var_double_value->Bind(ChangeFloat32ToFloat64(element));
       Goto(rebox_double);
@@ -3676,7 +4599,7 @@
     Bind(&float64_elements);
     {
       Comment("FLOAT64_ELEMENTS");
-      Node* index = WordShl(key, IntPtrConstant(3));
+      Node* index = WordShl(intptr_index, IntPtrConstant(3));
       Node* element = Load(MachineType::Float64(), backing_store, index);
       var_double_value->Bind(element);
       Goto(rebox_double);
@@ -3707,17 +4630,26 @@
           &property);
 
       Comment("element_load");
-      Node* key = TryToIntptr(p->name, miss);
+      Node* intptr_index = TryToIntptr(p->name, miss);
       Node* elements = LoadElements(p->receiver);
       Node* is_jsarray =
           WordAnd(handler_word, IntPtrConstant(KeyedLoadIsJsArray::kMask));
-      EmitBoundsCheck(p->receiver, elements, key, is_jsarray, miss);
-      Label if_hole(this);
-
+      Node* is_jsarray_condition = WordNotEqual(is_jsarray, IntPtrConstant(0));
       Node* elements_kind = BitFieldDecode<KeyedLoadElementsKind>(handler_word);
+      Label if_hole(this), unimplemented_elements_kind(this);
+      Label* out_of_bounds = miss;
+      EmitElementLoad(p->receiver, elements, elements_kind, intptr_index,
+                      is_jsarray_condition, &if_hole, &rebox_double,
+                      &var_double_value, &unimplemented_elements_kind,
+                      out_of_bounds, miss);
 
-      EmitElementLoad(p->receiver, elements, elements_kind, key, &if_hole,
-                      &rebox_double, &var_double_value, miss);
+      Bind(&unimplemented_elements_kind);
+      {
+        // Smi handlers should only be installed for supported elements kinds.
+        // Crash if we get here.
+        DebugBreak();
+        Goto(miss);
+      }
 
       Bind(&if_hole);
       {
@@ -3799,8 +4731,9 @@
   Node* receiver_map = LoadReceiverMap(p->receiver);
 
   // Check monomorphic case.
-  Node* feedback = TryMonomorphicCase(p, receiver_map, &if_handler,
-                                      &var_handler, &try_polymorphic);
+  Node* feedback =
+      TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
+                         &var_handler, &try_polymorphic);
   Bind(&if_handler);
   {
     HandleLoadICHandlerCase(p, var_handler.value(), &miss);
@@ -3810,10 +4743,9 @@
   {
     // Check polymorphic case.
     Comment("LoadIC_try_polymorphic");
-    GotoUnless(
-        WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
-        &try_megamorphic);
-    HandlePolymorphicCase(p, receiver_map, feedback, &if_handler, &var_handler,
+    GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
+               &try_megamorphic);
+    HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
                           &miss, 2);
   }
 
@@ -3845,8 +4777,9 @@
   Node* receiver_map = LoadReceiverMap(p->receiver);
 
   // Check monomorphic case.
-  Node* feedback = TryMonomorphicCase(p, receiver_map, &if_handler,
-                                      &var_handler, &try_polymorphic);
+  Node* feedback =
+      TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
+                         &var_handler, &try_polymorphic);
   Bind(&if_handler);
   {
     HandleLoadICHandlerCase(p, var_handler.value(), &miss, kSupportElements);
@@ -3856,10 +4789,9 @@
   {
     // Check polymorphic case.
     Comment("KeyedLoadIC_try_polymorphic");
-    GotoUnless(
-        WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
-        &try_megamorphic);
-    HandlePolymorphicCase(p, receiver_map, feedback, &if_handler, &var_handler,
+    GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
+               &try_megamorphic);
+    HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
                           &miss, 2);
   }
 
@@ -3885,8 +4817,8 @@
         p->slot, FAST_HOLEY_ELEMENTS, SMI_PARAMETERS,
         FixedArray::kHeaderSize + kPointerSize - kHeapObjectTag);
     Node* array = Load(MachineType::AnyTagged(), p->vector, offset);
-    HandlePolymorphicCase(p, receiver_map, array, &if_handler, &var_handler,
-                          &miss, 1);
+    HandlePolymorphicCase(receiver_map, array, &if_handler, &var_handler, &miss,
+                          1);
   }
   Bind(&miss);
   {
@@ -3896,6 +4828,210 @@
   }
 }
 
+void CodeStubAssembler::KeyedLoadICGeneric(const LoadICParameters* p) {
+  Variable var_index(this, MachineType::PointerRepresentation());
+  Variable var_details(this, MachineRepresentation::kWord32);
+  Variable var_value(this, MachineRepresentation::kTagged);
+  Label if_index(this), if_unique_name(this), if_element_hole(this),
+      if_oob(this), slow(this), stub_cache_miss(this),
+      if_property_dictionary(this), if_found_on_receiver(this);
+
+  Node* receiver = p->receiver;
+  GotoIf(WordIsSmi(receiver), &slow);
+  Node* receiver_map = LoadMap(receiver);
+  Node* instance_type = LoadMapInstanceType(receiver_map);
+  // Receivers requiring non-standard element accesses (interceptors, access
+  // checks, strings and string wrappers, proxies) are handled in the runtime.
+  GotoIf(Int32LessThanOrEqual(instance_type,
+                              Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
+         &slow);
+
+  Node* key = p->name;
+  TryToName(key, &if_index, &var_index, &if_unique_name, &slow);
+
+  Bind(&if_index);
+  {
+    Comment("integer index");
+    Node* index = var_index.value();
+    Node* elements = LoadElements(receiver);
+    Node* elements_kind = LoadMapElementsKind(receiver_map);
+    Node* is_jsarray_condition =
+        Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE));
+    Variable var_double_value(this, MachineRepresentation::kFloat64);
+    Label rebox_double(this, &var_double_value);
+
+    // Unimplemented elements kinds fall back to a runtime call.
+    Label* unimplemented_elements_kind = &slow;
+    IncrementCounter(isolate()->counters()->ic_keyed_load_generic_smi(), 1);
+    EmitElementLoad(receiver, elements, elements_kind, index,
+                    is_jsarray_condition, &if_element_hole, &rebox_double,
+                    &var_double_value, unimplemented_elements_kind, &if_oob,
+                    &slow);
+
+    Bind(&rebox_double);
+    Return(AllocateHeapNumberWithValue(var_double_value.value()));
+  }
+
+  Bind(&if_oob);
+  {
+    Comment("out of bounds");
+    Node* index = var_index.value();
+    // Negative keys can't take the fast OOB path.
+    GotoIf(IntPtrLessThan(index, IntPtrConstant(0)), &slow);
+    // Positive OOB indices are effectively the same as hole loads.
+    Goto(&if_element_hole);
+  }
+
+  Bind(&if_element_hole);
+  {
+    Comment("found the hole");
+    Label return_undefined(this);
+    BranchIfPrototypesHaveNoElements(receiver_map, &return_undefined, &slow);
+
+    Bind(&return_undefined);
+    Return(UndefinedConstant());
+  }
+
+  Node* properties = nullptr;
+  Bind(&if_unique_name);
+  {
+    Comment("key is unique name");
+    // Check if the receiver has fast or slow properties.
+    properties = LoadProperties(receiver);
+    Node* properties_map = LoadMap(properties);
+    GotoIf(WordEqual(properties_map, LoadRoot(Heap::kHashTableMapRootIndex)),
+           &if_property_dictionary);
+
+    // Try looking up the property on the receiver; if unsuccessful, look
+    // for a handler in the stub cache.
+    Comment("DescriptorArray lookup");
+
+    // Skip linear search if there are too many descriptors.
+    // TODO(jkummerow): Consider implementing binary search.
+    // See also TryLookupProperty() which has the same limitation.
+    const int32_t kMaxLinear = 210;
+    Label stub_cache(this);
+    Node* bitfield3 = LoadMapBitField3(receiver_map);
+    Node* nof = BitFieldDecodeWord<Map::NumberOfOwnDescriptorsBits>(bitfield3);
+    GotoIf(UintPtrGreaterThan(nof, IntPtrConstant(kMaxLinear)), &stub_cache);
+    Node* descriptors = LoadMapDescriptors(receiver_map);
+    Variable var_name_index(this, MachineType::PointerRepresentation());
+    Label if_descriptor_found(this);
+    DescriptorLookupLinear(key, descriptors, nof, &if_descriptor_found,
+                           &var_name_index, &stub_cache);
+
+    Bind(&if_descriptor_found);
+    {
+      LoadPropertyFromFastObject(receiver, receiver_map, descriptors,
+                                 var_name_index.value(), &var_details,
+                                 &var_value);
+      Goto(&if_found_on_receiver);
+    }
+
+    Bind(&stub_cache);
+    {
+      Comment("stub cache probe for fast property load");
+      Variable var_handler(this, MachineRepresentation::kTagged);
+      Label found_handler(this, &var_handler), stub_cache_miss(this);
+      TryProbeStubCache(isolate()->load_stub_cache(), receiver, key,
+                        &found_handler, &var_handler, &stub_cache_miss);
+      Bind(&found_handler);
+      { HandleLoadICHandlerCase(p, var_handler.value(), &slow); }
+
+      Bind(&stub_cache_miss);
+      {
+        Comment("KeyedLoadGeneric_miss");
+        TailCallRuntime(Runtime::kKeyedLoadIC_Miss, p->context, p->receiver,
+                        p->name, p->slot, p->vector);
+      }
+    }
+  }
+
+  Bind(&if_property_dictionary);
+  {
+    Comment("dictionary property load");
+    // We checked for LAST_CUSTOM_ELEMENTS_RECEIVER before, which rules out
+    // seeing global objects here (which would need special handling).
+
+    Variable var_name_index(this, MachineType::PointerRepresentation());
+    Label dictionary_found(this, &var_name_index);
+    NameDictionaryLookup<NameDictionary>(properties, key, &dictionary_found,
+                                         &var_name_index, &slow);
+    Bind(&dictionary_found);
+    {
+      LoadPropertyFromNameDictionary(properties, var_name_index.value(),
+                                     &var_details, &var_value);
+      Goto(&if_found_on_receiver);
+    }
+  }
+
+  Bind(&if_found_on_receiver);
+  {
+    Node* value = CallGetterIfAccessor(var_value.value(), var_details.value(),
+                                       p->context, receiver, &slow);
+    IncrementCounter(isolate()->counters()->ic_keyed_load_generic_symbol(), 1);
+    Return(value);
+  }
+
+  Bind(&slow);
+  {
+    Comment("KeyedLoadGeneric_slow");
+    IncrementCounter(isolate()->counters()->ic_keyed_load_generic_slow(), 1);
+    // TODO(jkummerow): Should we use the GetProperty TF stub instead?
+    TailCallRuntime(Runtime::kKeyedGetProperty, p->context, p->receiver,
+                    p->name);
+  }
+}
+
+void CodeStubAssembler::StoreIC(const StoreICParameters* p) {
+  Variable var_handler(this, MachineRepresentation::kTagged);
+  // TODO(ishell): defer blocks when it works.
+  Label if_handler(this, &var_handler), try_polymorphic(this),
+      try_megamorphic(this /*, Label::kDeferred*/),
+      miss(this /*, Label::kDeferred*/);
+
+  Node* receiver_map = LoadReceiverMap(p->receiver);
+
+  // Check monomorphic case.
+  Node* feedback =
+      TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
+                         &var_handler, &try_polymorphic);
+  Bind(&if_handler);
+  {
+    Comment("StoreIC_if_handler");
+    StoreWithVectorDescriptor descriptor(isolate());
+    TailCallStub(descriptor, var_handler.value(), p->context, p->receiver,
+                 p->name, p->value, p->slot, p->vector);
+  }
+
+  Bind(&try_polymorphic);
+  {
+    // Check polymorphic case.
+    Comment("StoreIC_try_polymorphic");
+    GotoUnless(
+        WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
+        &try_megamorphic);
+    HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
+                          &miss, 2);
+  }
+
+  Bind(&try_megamorphic);
+  {
+    // Check megamorphic case.
+    GotoUnless(
+        WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
+        &miss);
+
+    TryProbeStubCache(isolate()->store_stub_cache(), p->receiver, p->name,
+                      &if_handler, &var_handler, &miss);
+  }
+  Bind(&miss);
+  {
+    TailCallRuntime(Runtime::kStoreIC_Miss, p->context, p->value, p->slot,
+                    p->vector, p->receiver, p->name);
+  }
+}
+
 void CodeStubAssembler::LoadGlobalIC(const LoadICParameters* p) {
   Label try_handler(this), miss(this);
   Node* weak_cell =
@@ -3921,8 +5057,8 @@
     AssertInstanceType(handler, CODE_TYPE);
     LoadWithVectorDescriptor descriptor(isolate());
     Node* native_context = LoadNativeContext(p->context);
-    Node* receiver = LoadFixedArrayElement(
-        native_context, Int32Constant(Context::EXTENSION_INDEX));
+    Node* receiver =
+        LoadContextElement(native_context, Context::EXTENSION_INDEX);
     Node* fake_name = IntPtrConstant(0);
     TailCallStub(descriptor, handler, p->context, receiver, fake_name, p->slot,
                  p->vector);
@@ -3934,6 +5070,573 @@
   }
 }
 
+void CodeStubAssembler::ExtendPropertiesBackingStore(compiler::Node* object) {
+  Node* properties = LoadProperties(object);
+  Node* length = LoadFixedArrayBaseLength(properties);
+
+  ParameterMode mode = OptimalParameterMode();
+  length = UntagParameter(length, mode);
+
+  Node* delta = IntPtrOrSmiConstant(JSObject::kFieldsAdded, mode);
+  Node* new_capacity = IntPtrAdd(length, delta);
+
+  // Grow properties array.
+  ElementsKind kind = FAST_ELEMENTS;
+  DCHECK(kMaxNumberOfDescriptors + JSObject::kFieldsAdded <
+         FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind));
+  // The size of a new properties backing store is guaranteed to be small
+  // enough that the new backing store will be allocated in new space.
+  Assert(UintPtrLessThan(new_capacity, IntPtrConstant(kMaxNumberOfDescriptors +
+                                                      JSObject::kFieldsAdded)));
+
+  Node* new_properties = AllocateFixedArray(kind, new_capacity, mode);
+
+  FillFixedArrayWithValue(kind, new_properties, length, new_capacity,
+                          Heap::kUndefinedValueRootIndex, mode);
+
+  // |new_properties| is guaranteed to be in new space, so we can skip
+  // the write barrier.
+  CopyFixedArrayElements(kind, properties, new_properties, length,
+                         SKIP_WRITE_BARRIER, mode);
+
+  StoreObjectField(object, JSObject::kPropertiesOffset, new_properties);
+}
+
+Node* CodeStubAssembler::PrepareValueForWrite(Node* value,
+                                              Representation representation,
+                                              Label* bailout) {
+  if (representation.IsDouble()) {
+    Variable var_value(this, MachineRepresentation::kFloat64);
+    Label if_smi(this), if_heap_object(this), done(this);
+    Branch(WordIsSmi(value), &if_smi, &if_heap_object);
+    Bind(&if_smi);
+    {
+      var_value.Bind(SmiToFloat64(value));
+      Goto(&done);
+    }
+    Bind(&if_heap_object);
+    {
+      GotoUnless(
+          Word32Equal(LoadInstanceType(value), Int32Constant(HEAP_NUMBER_TYPE)),
+          bailout);
+      var_value.Bind(LoadHeapNumberValue(value));
+      Goto(&done);
+    }
+    Bind(&done);
+    value = var_value.value();
+  } else if (representation.IsHeapObject()) {
+    // Field type is checked by the handler, here we only check if the value
+    // is a heap object.
+    GotoIf(WordIsSmi(value), bailout);
+  } else if (representation.IsSmi()) {
+    GotoUnless(WordIsSmi(value), bailout);
+  } else {
+    DCHECK(representation.IsTagged());
+  }
+  return value;
+}
+
+void CodeStubAssembler::StoreNamedField(Node* object, FieldIndex index,
+                                        Representation representation,
+                                        Node* value, bool transition_to_field) {
+  DCHECK_EQ(index.is_double(), representation.IsDouble());
+
+  StoreNamedField(object, IntPtrConstant(index.offset()), index.is_inobject(),
+                  representation, value, transition_to_field);
+}
+
+void CodeStubAssembler::StoreNamedField(Node* object, Node* offset,
+                                        bool is_inobject,
+                                        Representation representation,
+                                        Node* value, bool transition_to_field) {
+  bool store_value_as_double = representation.IsDouble();
+  Node* property_storage = object;
+  if (!is_inobject) {
+    property_storage = LoadProperties(object);
+  }
+
+  if (representation.IsDouble()) {
+    if (!FLAG_unbox_double_fields || !is_inobject) {
+      if (transition_to_field) {
+        Node* heap_number = AllocateHeapNumberWithValue(value, MUTABLE);
+        // Store the new mutable heap number into the object.
+        value = heap_number;
+        store_value_as_double = false;
+      } else {
+        // Load the heap number.
+        property_storage = LoadObjectField(property_storage, offset);
+        // Store the double value into it.
+        offset = IntPtrConstant(HeapNumber::kValueOffset);
+      }
+    }
+  }
+
+  if (store_value_as_double) {
+    StoreObjectFieldNoWriteBarrier(property_storage, offset, value,
+                                   MachineRepresentation::kFloat64);
+  } else if (representation.IsSmi()) {
+    StoreObjectFieldNoWriteBarrier(property_storage, offset, value);
+  } else {
+    StoreObjectField(property_storage, offset, value);
+  }
+}
+
+Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
+                                                  Node* value, Label* bailout) {
+  // Mapped arguments are actual arguments. Unmapped arguments are values added
+  // to the arguments object after it was created for the call. Mapped arguments
+  // are stored in the context at indexes given by elements[key + 2]. Unmapped
+  // arguments are stored as regular indexed properties in the arguments array,
+  // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
+  // look at argument object construction.
+  //
+  // The sloppy arguments elements array has a special format:
+  //
+  // 0: context
+  // 1: unmapped arguments array
+  // 2: mapped_index0,
+  // 3: mapped_index1,
+  // ...
+  //
+  // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
+  // If key + 2 >= elements.length then attempt to look in the unmapped
+  // arguments array (given by elements[1]) and return the value at key, missing
+  // to the runtime if the unmapped arguments array is not a fixed array or if
+  // key >= unmapped_arguments_array.length.
+  //
+  // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
+  // in the unmapped arguments array, as described above. Otherwise, t is a Smi
+  // index into the context array given at elements[0]. Return the value at
+  // context[t].
+
+  bool is_load = value == nullptr;
+
+  GotoUnless(WordIsSmi(key), bailout);
+  key = SmiUntag(key);
+  GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
+
+  Node* elements = LoadElements(receiver);
+  Node* elements_length = LoadAndUntagFixedArrayBaseLength(elements);
+
+  Variable var_result(this, MachineRepresentation::kTagged);
+  if (!is_load) {
+    var_result.Bind(value);
+  }
+  Label if_mapped(this), if_unmapped(this), end(this, &var_result);
+  Node* intptr_two = IntPtrConstant(2);
+  Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
+
+  GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
+
+  Node* mapped_index = LoadFixedArrayElement(
+      elements, IntPtrAdd(key, intptr_two), 0, INTPTR_PARAMETERS);
+  Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
+
+  Bind(&if_mapped);
+  {
+    Assert(WordIsSmi(mapped_index));
+    mapped_index = SmiUntag(mapped_index);
+    Node* the_context = LoadFixedArrayElement(elements, IntPtrConstant(0), 0,
+                                              INTPTR_PARAMETERS);
+    // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
+    // methods for accessing Context.
+    STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
+    DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
+              FixedArray::OffsetOfElementAt(0));
+    if (is_load) {
+      Node* result = LoadFixedArrayElement(the_context, mapped_index, 0,
+                                           INTPTR_PARAMETERS);
+      Assert(WordNotEqual(result, TheHoleConstant()));
+      var_result.Bind(result);
+    } else {
+      StoreFixedArrayElement(the_context, mapped_index, value,
+                             UPDATE_WRITE_BARRIER, INTPTR_PARAMETERS);
+    }
+    Goto(&end);
+  }
+
+  Bind(&if_unmapped);
+  {
+    Node* backing_store = LoadFixedArrayElement(elements, IntPtrConstant(1), 0,
+                                                INTPTR_PARAMETERS);
+    GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
+           bailout);
+
+    Node* backing_store_length =
+        LoadAndUntagFixedArrayBaseLength(backing_store);
+    GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
+
+    // The key falls into unmapped range.
+    if (is_load) {
+      Node* result =
+          LoadFixedArrayElement(backing_store, key, 0, INTPTR_PARAMETERS);
+      GotoIf(WordEqual(result, TheHoleConstant()), bailout);
+      var_result.Bind(result);
+    } else {
+      StoreFixedArrayElement(backing_store, key, value, UPDATE_WRITE_BARRIER,
+                             INTPTR_PARAMETERS);
+    }
+    Goto(&end);
+  }
+
+  Bind(&end);
+  return var_result.value();
+}
+
+Node* CodeStubAssembler::LoadScriptContext(Node* context, int context_index) {
+  Node* native_context = LoadNativeContext(context);
+  Node* script_context_table =
+      LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX);
+
+  int offset =
+      ScriptContextTable::GetContextOffset(context_index) - kHeapObjectTag;
+  return Load(MachineType::AnyTagged(), script_context_table,
+              IntPtrConstant(offset));
+}
+
+Node* CodeStubAssembler::ClampedToUint8(Node* int32_value) {
+  Label done(this);
+  Node* int32_zero = Int32Constant(0);
+  Node* int32_255 = Int32Constant(255);
+  Variable var_value(this, MachineRepresentation::kWord32);
+  var_value.Bind(int32_value);
+  GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
+  var_value.Bind(int32_zero);
+  GotoIf(Int32LessThan(int32_value, int32_zero), &done);
+  var_value.Bind(int32_255);
+  Goto(&done);
+  Bind(&done);
+  return var_value.value();
+}
+
+namespace {
+
+// Converts typed array elements kind to a machine representations.
+MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
+  switch (kind) {
+    case UINT8_CLAMPED_ELEMENTS:
+    case UINT8_ELEMENTS:
+    case INT8_ELEMENTS:
+      return MachineRepresentation::kWord8;
+    case UINT16_ELEMENTS:
+    case INT16_ELEMENTS:
+      return MachineRepresentation::kWord16;
+    case UINT32_ELEMENTS:
+    case INT32_ELEMENTS:
+      return MachineRepresentation::kWord32;
+    case FLOAT32_ELEMENTS:
+      return MachineRepresentation::kFloat32;
+    case FLOAT64_ELEMENTS:
+      return MachineRepresentation::kFloat64;
+    default:
+      UNREACHABLE();
+      return MachineRepresentation::kNone;
+  }
+}
+
+}  // namespace
+
+void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
+                                     Node* index, Node* value,
+                                     ParameterMode mode) {
+  if (IsFixedTypedArrayElementsKind(kind)) {
+    if (kind == UINT8_CLAMPED_ELEMENTS) {
+      value = ClampedToUint8(value);
+    }
+    Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
+    MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
+    StoreNoWriteBarrier(rep, elements, offset, value);
+    return;
+  }
+
+  WriteBarrierMode barrier_mode =
+      IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
+  if (IsFastDoubleElementsKind(kind)) {
+    // Make sure we do not store signalling NaNs into double arrays.
+    value = Float64SilenceNaN(value);
+    StoreFixedDoubleArrayElement(elements, index, value, mode);
+  } else {
+    StoreFixedArrayElement(elements, index, value, barrier_mode, mode);
+  }
+}
+
+void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
+                                         bool is_jsarray,
+                                         ElementsKind elements_kind,
+                                         KeyedAccessStoreMode store_mode,
+                                         Label* bailout) {
+  Node* elements = LoadElements(object);
+  if (IsFastSmiOrObjectElementsKind(elements_kind) &&
+      store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
+    // Bailout in case of COW elements.
+    GotoIf(WordNotEqual(LoadMap(elements),
+                        LoadRoot(Heap::kFixedArrayMapRootIndex)),
+           bailout);
+  }
+  // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
+  ParameterMode parameter_mode = INTPTR_PARAMETERS;
+  key = TryToIntptr(key, bailout);
+
+  if (IsFixedTypedArrayElementsKind(elements_kind)) {
+    Label done(this);
+    // TODO(ishell): call ToNumber() on value and don't bailout but be careful
+    // to call it only once if we decide to bailout because of bounds checks.
+
+    if (IsFixedFloatElementsKind(elements_kind)) {
+      // TODO(ishell): move float32 truncation into PrepareValueForWrite.
+      value = PrepareValueForWrite(value, Representation::Double(), bailout);
+      if (elements_kind == FLOAT32_ELEMENTS) {
+        value = TruncateFloat64ToFloat32(value);
+      }
+    } else {
+      // TODO(ishell): It's fine for word8/16/32 to truncate the result.
+      value = TryToIntptr(value, bailout);
+    }
+
+    // There must be no allocations between the buffer load and
+    // and the actual store to backing store, because GC may decide that
+    // the buffer is not alive or move the elements.
+    // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
+
+    // Check if buffer has been neutered.
+    Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
+    Node* bitfield = LoadObjectField(buffer, JSArrayBuffer::kBitFieldOffset,
+                                     MachineType::Uint32());
+    Node* neutered_bit =
+        Word32And(bitfield, Int32Constant(JSArrayBuffer::WasNeutered::kMask));
+    GotoUnless(Word32Equal(neutered_bit, Int32Constant(0)), bailout);
+
+    // Bounds check.
+    Node* length = UntagParameter(
+        LoadObjectField(object, JSTypedArray::kLengthOffset), parameter_mode);
+
+    if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
+      // Skip the store if we write beyond the length.
+      GotoUnless(IntPtrLessThan(key, length), &done);
+      // ... but bailout if the key is negative.
+    } else {
+      DCHECK_EQ(STANDARD_STORE, store_mode);
+    }
+    GotoUnless(UintPtrLessThan(key, length), bailout);
+
+    // Backing store = external_pointer + base_pointer.
+    Node* external_pointer =
+        LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
+                        MachineType::Pointer());
+    Node* base_pointer =
+        LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
+    Node* backing_store = IntPtrAdd(external_pointer, base_pointer);
+    StoreElement(backing_store, elements_kind, key, value, parameter_mode);
+    Goto(&done);
+
+    Bind(&done);
+    return;
+  }
+  DCHECK(IsFastSmiOrObjectElementsKind(elements_kind) ||
+         IsFastDoubleElementsKind(elements_kind));
+
+  Node* length = is_jsarray ? LoadObjectField(object, JSArray::kLengthOffset)
+                            : LoadFixedArrayBaseLength(elements);
+  length = UntagParameter(length, parameter_mode);
+
+  // In case value is stored into a fast smi array, assure that the value is
+  // a smi before manipulating the backing store. Otherwise the backing store
+  // may be left in an invalid state.
+  if (IsFastSmiElementsKind(elements_kind)) {
+    GotoUnless(WordIsSmi(value), bailout);
+  } else if (IsFastDoubleElementsKind(elements_kind)) {
+    value = PrepareValueForWrite(value, Representation::Double(), bailout);
+  }
+
+  if (IsGrowStoreMode(store_mode)) {
+    elements = CheckForCapacityGrow(object, elements, elements_kind, length,
+                                    key, parameter_mode, is_jsarray, bailout);
+  } else {
+    GotoUnless(UintPtrLessThan(key, length), bailout);
+
+    if ((store_mode == STORE_NO_TRANSITION_HANDLE_COW) &&
+        IsFastSmiOrObjectElementsKind(elements_kind)) {
+      elements = CopyElementsOnWrite(object, elements, elements_kind, length,
+                                     parameter_mode, bailout);
+    }
+  }
+  StoreElement(elements, elements_kind, key, value, parameter_mode);
+}
+
+Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
+                                              ElementsKind kind, Node* length,
+                                              Node* key, ParameterMode mode,
+                                              bool is_js_array,
+                                              Label* bailout) {
+  Variable checked_elements(this, MachineRepresentation::kTagged);
+  Label grow_case(this), no_grow_case(this), done(this);
+
+  Node* condition;
+  if (IsHoleyElementsKind(kind)) {
+    condition = UintPtrGreaterThanOrEqual(key, length);
+  } else {
+    condition = WordEqual(key, length);
+  }
+  Branch(condition, &grow_case, &no_grow_case);
+
+  Bind(&grow_case);
+  {
+    Node* current_capacity =
+        UntagParameter(LoadFixedArrayBaseLength(elements), mode);
+
+    checked_elements.Bind(elements);
+
+    Label fits_capacity(this);
+    GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
+    {
+      Node* new_elements = TryGrowElementsCapacity(
+          object, elements, kind, key, current_capacity, mode, bailout);
+
+      checked_elements.Bind(new_elements);
+      Goto(&fits_capacity);
+    }
+    Bind(&fits_capacity);
+
+    if (is_js_array) {
+      Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
+      StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
+                                     TagParameter(new_length, mode));
+    }
+    Goto(&done);
+  }
+
+  Bind(&no_grow_case);
+  {
+    GotoUnless(UintPtrLessThan(key, length), bailout);
+    checked_elements.Bind(elements);
+    Goto(&done);
+  }
+
+  Bind(&done);
+  return checked_elements.value();
+}
+
+Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
+                                             ElementsKind kind, Node* length,
+                                             ParameterMode mode,
+                                             Label* bailout) {
+  Variable new_elements_var(this, MachineRepresentation::kTagged);
+  Label done(this);
+
+  new_elements_var.Bind(elements);
+  GotoUnless(
+      WordEqual(LoadMap(elements), LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
+      &done);
+  {
+    Node* capacity = UntagParameter(LoadFixedArrayBaseLength(elements), mode);
+    Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
+                                              length, capacity, mode, bailout);
+
+    new_elements_var.Bind(new_elements);
+    Goto(&done);
+  }
+
+  Bind(&done);
+  return new_elements_var.value();
+}
+
+void CodeStubAssembler::TransitionElementsKind(
+    compiler::Node* object, compiler::Node* map, ElementsKind from_kind,
+    ElementsKind to_kind, bool is_jsarray, Label* bailout) {
+  DCHECK(!IsFastHoleyElementsKind(from_kind) ||
+         IsFastHoleyElementsKind(to_kind));
+  if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
+    TrapAllocationMemento(object, bailout);
+  }
+
+  if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
+    Comment("Non-simple map transition");
+    Node* elements = LoadElements(object);
+
+    Node* empty_fixed_array =
+        HeapConstant(isolate()->factory()->empty_fixed_array());
+
+    Label done(this);
+    GotoIf(WordEqual(elements, empty_fixed_array), &done);
+
+    // TODO(ishell): Use OptimalParameterMode().
+    ParameterMode mode = INTPTR_PARAMETERS;
+    Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
+    Node* array_length =
+        is_jsarray ? SmiUntag(LoadObjectField(object, JSArray::kLengthOffset))
+                   : elements_length;
+
+    GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
+                         elements_length, mode, bailout);
+    Goto(&done);
+    Bind(&done);
+  }
+
+  StoreObjectField(object, JSObject::kMapOffset, map);
+}
+
+void CodeStubAssembler::TrapAllocationMemento(Node* object,
+                                              Label* memento_found) {
+  Comment("[ TrapAllocationMemento");
+  Label no_memento_found(this);
+  Label top_check(this), map_check(this);
+
+  Node* new_space_top_address = ExternalConstant(
+      ExternalReference::new_space_allocation_top_address(isolate()));
+  const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
+  const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
+
+  // Bail out if the object is not in new space.
+  Node* object_page = PageFromAddress(object);
+  {
+    const int mask =
+        (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
+    Node* page_flags = Load(MachineType::IntPtr(), object_page);
+    GotoIf(
+        WordEqual(WordAnd(page_flags, IntPtrConstant(mask)), IntPtrConstant(0)),
+        &no_memento_found);
+  }
+
+  Node* memento_end = IntPtrAdd(object, IntPtrConstant(kMementoEndOffset));
+  Node* memento_end_page = PageFromAddress(memento_end);
+
+  Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);
+  Node* new_space_top_page = PageFromAddress(new_space_top);
+
+  // If the object is in new space, we need to check whether it is and
+  // respective potential memento object on the same page as the current top.
+  GotoIf(WordEqual(memento_end_page, new_space_top_page), &top_check);
+
+  // The object is on a different page than allocation top. Bail out if the
+  // object sits on the page boundary as no memento can follow and we cannot
+  // touch the memory following it.
+  Branch(WordEqual(object_page, memento_end_page), &map_check,
+         &no_memento_found);
+
+  // If top is on the same page as the current object, we need to check whether
+  // we are below top.
+  Bind(&top_check);
+  {
+    Branch(UintPtrGreaterThan(memento_end, new_space_top), &no_memento_found,
+           &map_check);
+  }
+
+  // Memento map check.
+  Bind(&map_check);
+  {
+    Node* memento_map = LoadObjectField(object, kMementoMapOffset);
+    Branch(
+        WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
+        memento_found, &no_memento_found);
+  }
+  Bind(&no_memento_found);
+  Comment("] TrapAllocationMemento");
+}
+
+Node* CodeStubAssembler::PageFromAddress(Node* address) {
+  return WordAnd(address, IntPtrConstant(~Page::kPageAlignmentMask));
+}
+
 Node* CodeStubAssembler::EnumLength(Node* map) {
   Node* bitfield_3 = LoadMapBitField3(map);
   Node* enum_length = BitFieldDecode<Map::EnumLengthBits>(bitfield_3);
@@ -4001,6 +5704,52 @@
   }
 }
 
+Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
+    Node* feedback_vector, Node* slot) {
+  Node* size = IntPtrConstant(AllocationSite::kSize);
+  Node* site = Allocate(size, CodeStubAssembler::kPretenured);
+
+  // Store the map
+  StoreObjectFieldRoot(site, AllocationSite::kMapOffset,
+                       Heap::kAllocationSiteMapRootIndex);
+  Node* kind = SmiConstant(Smi::FromInt(GetInitialFastElementsKind()));
+  StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
+                                 kind);
+
+  // Unlike literals, constructed arrays don't have nested sites
+  Node* zero = IntPtrConstant(0);
+  StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
+
+  // Pretenuring calculation field.
+  StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
+                                 zero);
+
+  // Pretenuring memento creation count field.
+  StoreObjectFieldNoWriteBarrier(
+      site, AllocationSite::kPretenureCreateCountOffset, zero);
+
+  // Store an empty fixed array for the code dependency.
+  StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
+                       Heap::kEmptyFixedArrayRootIndex);
+
+  // Link the object to the allocation site list
+  Node* site_list = ExternalConstant(
+      ExternalReference::allocation_sites_list_address(isolate()));
+  Node* next_site = LoadBufferObject(site_list, 0);
+
+  // TODO(mvstanton): This is a store to a weak pointer, which we may want to
+  // mark as such in order to skip the write barrier, once we have a unified
+  // system for weakness. For now we decided to keep it like this because having
+  // an initial write barrier backed store makes this pointer strong until the
+  // next GC, and allocation sites are designed to survive several GCs anyway.
+  StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
+  StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
+
+  StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER,
+                         CodeStubAssembler::SMI_PARAMETERS);
+  return site;
+}
+
 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
                                                         Node* slot,
                                                         Node* value) {
diff --git a/src/code-stub-assembler.h b/src/code-stub-assembler.h
index 4bad541..c55f48c 100644
--- a/src/code-stub-assembler.h
+++ b/src/code-stub-assembler.h
@@ -19,6 +19,20 @@
 
 enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
 
+#define HEAP_CONSTANT_LIST(V)                 \
+  V(BooleanMap, BooleanMap)                   \
+  V(empty_string, EmptyString)                \
+  V(EmptyFixedArray, EmptyFixedArray)         \
+  V(FixedArrayMap, FixedArrayMap)             \
+  V(FixedCOWArrayMap, FixedCOWArrayMap)       \
+  V(FixedDoubleArrayMap, FixedDoubleArrayMap) \
+  V(HeapNumberMap, HeapNumberMap)             \
+  V(MinusZeroValue, MinusZero)                \
+  V(NanValue, Nan)                            \
+  V(NullValue, Null)                          \
+  V(TheHoleValue, TheHole)                    \
+  V(UndefinedValue, Undefined)
+
 // Provides JavaScript-specific "macro-assembler" functionality on top of the
 // CodeAssembler. By factoring the JavaScript-isms out of the CodeAssembler,
 // it's possible to add JavaScript-specific useful CodeAssembler "macros"
@@ -46,17 +60,40 @@
 
   typedef base::Flags<AllocationFlag> AllocationFlags;
 
-  enum ParameterMode { INTEGER_PARAMETERS, SMI_PARAMETERS };
+  // TODO(ishell): Fix all loads/stores from arrays by int32 offsets/indices
+  // and eventually remove INTEGER_PARAMETERS in favour of INTPTR_PARAMETERS.
+  enum ParameterMode { INTEGER_PARAMETERS, SMI_PARAMETERS, INTPTR_PARAMETERS };
 
-  compiler::Node* BooleanMapConstant();
-  compiler::Node* EmptyStringConstant();
-  compiler::Node* HeapNumberMapConstant();
+  // On 32-bit platforms, there is a slight performance advantage to doing all
+  // of the array offset/index arithmetic with SMIs, since it's possible
+  // to save a few tag/untag operations without paying an extra expense when
+  // calculating array offset (the smi math can be folded away) and there are
+  // fewer live ranges. Thus only convert indices to untagged value on 64-bit
+  // platforms.
+  ParameterMode OptimalParameterMode() const {
+    return Is64() ? INTPTR_PARAMETERS : SMI_PARAMETERS;
+  }
+
+  compiler::Node* UntagParameter(compiler::Node* value, ParameterMode mode) {
+    if (mode != SMI_PARAMETERS) value = SmiUntag(value);
+    return value;
+  }
+
+  compiler::Node* TagParameter(compiler::Node* value, ParameterMode mode) {
+    if (mode != SMI_PARAMETERS) value = SmiTag(value);
+    return value;
+  }
+
   compiler::Node* NoContextConstant();
-  compiler::Node* NanConstant();
-  compiler::Node* NullConstant();
-  compiler::Node* MinusZeroConstant();
-  compiler::Node* UndefinedConstant();
-  compiler::Node* TheHoleConstant();
+#define HEAP_CONSTANT_ACCESSOR(rootName, name) compiler::Node* name##Constant();
+  HEAP_CONSTANT_LIST(HEAP_CONSTANT_ACCESSOR)
+#undef HEAP_CONSTANT_ACCESSOR
+
+#define HEAP_CONSTANT_TEST(rootName, name) \
+  compiler::Node* Is##name(compiler::Node* value);
+  HEAP_CONSTANT_LIST(HEAP_CONSTANT_TEST)
+#undef HEAP_CONSTANT_TEST
+
   compiler::Node* HashSeed();
   compiler::Node* StaleRegisterConstant();
 
@@ -86,14 +123,20 @@
   compiler::Node* SmiSub(compiler::Node* a, compiler::Node* b);
   compiler::Node* SmiSubWithOverflow(compiler::Node* a, compiler::Node* b);
   compiler::Node* SmiEqual(compiler::Node* a, compiler::Node* b);
+  compiler::Node* SmiAbove(compiler::Node* a, compiler::Node* b);
   compiler::Node* SmiAboveOrEqual(compiler::Node* a, compiler::Node* b);
+  compiler::Node* SmiBelow(compiler::Node* a, compiler::Node* b);
   compiler::Node* SmiLessThan(compiler::Node* a, compiler::Node* b);
   compiler::Node* SmiLessThanOrEqual(compiler::Node* a, compiler::Node* b);
+  compiler::Node* SmiMax(compiler::Node* a, compiler::Node* b);
   compiler::Node* SmiMin(compiler::Node* a, compiler::Node* b);
   // Computes a % b for Smi inputs a and b; result is not necessarily a Smi.
   compiler::Node* SmiMod(compiler::Node* a, compiler::Node* b);
   // Computes a * b for Smi inputs a and b; result is not necessarily a Smi.
   compiler::Node* SmiMul(compiler::Node* a, compiler::Node* b);
+  compiler::Node* SmiOr(compiler::Node* a, compiler::Node* b) {
+    return WordOr(a, b);
+  }
 
   // Allocate an object of the given size.
   compiler::Node* Allocate(compiler::Node* size, AllocationFlags flags = kNone);
@@ -106,7 +149,7 @@
 
   // Check a value for smi-ness
   compiler::Node* WordIsSmi(compiler::Node* a);
-  // Check that the value is a positive smi.
+  // Check that the value is a non-negative smi.
   compiler::Node* WordIsPositiveSmi(compiler::Node* a);
 
   void BranchIfSmiEqual(compiler::Node* a, compiler::Node* b, Label* if_true,
@@ -143,10 +186,6 @@
                          if_notequal);
   }
 
-  void BranchIfSameValueZero(compiler::Node* a, compiler::Node* b,
-                             compiler::Node* context, Label* if_true,
-                             Label* if_false);
-
   void BranchIfFastJSArray(compiler::Node* object, compiler::Node* context,
                            Label* if_true, Label* if_false);
 
@@ -188,6 +227,8 @@
   compiler::Node* LoadProperties(compiler::Node* object);
   // Load the elements backing store of a JSObject.
   compiler::Node* LoadElements(compiler::Node* object);
+  // Load the length of a JSArray instance.
+  compiler::Node* LoadJSArrayLength(compiler::Node* array);
   // Load the length of a fixed array base instance.
   compiler::Node* LoadFixedArrayBaseLength(compiler::Node* array);
   // Load the length of a fixed array base instance.
@@ -200,6 +241,8 @@
   compiler::Node* LoadMapBitField3(compiler::Node* map);
   // Load the instance type of a map.
   compiler::Node* LoadMapInstanceType(compiler::Node* map);
+  // Load the ElementsKind of a map.
+  compiler::Node* LoadMapElementsKind(compiler::Node* map);
   // Load the instance descriptors of a map.
   compiler::Node* LoadMapDescriptors(compiler::Node* map);
   // Load the prototype of a map.
@@ -208,13 +251,16 @@
   compiler::Node* LoadMapInstanceSize(compiler::Node* map);
   // Load the inobject properties count of a Map (valid only for JSObjects).
   compiler::Node* LoadMapInobjectProperties(compiler::Node* map);
+  // Load the constructor function index of a Map (only for primitive maps).
+  compiler::Node* LoadMapConstructorFunctionIndex(compiler::Node* map);
   // Load the constructor of a Map (equivalent to Map::GetConstructor()).
   compiler::Node* LoadMapConstructor(compiler::Node* map);
 
-  // Load the hash field of a name.
+  // Load the hash field of a name as an uint32 value.
   compiler::Node* LoadNameHashField(compiler::Node* name);
-  // Load the hash value of a name. If {if_hash_not_computed} label
-  // is specified then it also checks if hash is actually computed.
+  // Load the hash value of a name as an uint32 value.
+  // If {if_hash_not_computed} label is specified then it also checks if
+  // hash is actually computed.
   compiler::Node* LoadNameHash(compiler::Node* name,
                                Label* if_hash_not_computed = nullptr);
 
@@ -226,25 +272,30 @@
   compiler::Node* LoadWeakCellValue(compiler::Node* weak_cell,
                                     Label* if_cleared = nullptr);
 
-  compiler::Node* AllocateUninitializedFixedArray(compiler::Node* length);
-
   // Load an array element from a FixedArray.
   compiler::Node* LoadFixedArrayElement(
-      compiler::Node* object, compiler::Node* int32_index,
-      int additional_offset = 0,
+      compiler::Node* object, compiler::Node* index, int additional_offset = 0,
       ParameterMode parameter_mode = INTEGER_PARAMETERS);
   // Load an array element from a FixedArray, untag it and return it as Word32.
   compiler::Node* LoadAndUntagToWord32FixedArrayElement(
-      compiler::Node* object, compiler::Node* int32_index,
-      int additional_offset = 0,
+      compiler::Node* object, compiler::Node* index, int additional_offset = 0,
       ParameterMode parameter_mode = INTEGER_PARAMETERS);
   // Load an array element from a FixedDoubleArray.
   compiler::Node* LoadFixedDoubleArrayElement(
-      compiler::Node* object, compiler::Node* int32_index,
-      MachineType machine_type, int additional_offset = 0,
-      ParameterMode parameter_mode = INTEGER_PARAMETERS);
+      compiler::Node* object, compiler::Node* index, MachineType machine_type,
+      int additional_offset = 0,
+      ParameterMode parameter_mode = INTEGER_PARAMETERS,
+      Label* if_hole = nullptr);
+
+  // Load Float64 value by |base| + |offset| address. If the value is a double
+  // hole then jump to |if_hole|. If |machine_type| is None then only the hole
+  // check is generated.
+  compiler::Node* LoadDoubleWithHoleCheck(
+      compiler::Node* base, compiler::Node* offset, Label* if_hole,
+      MachineType machine_type = MachineType::Float64());
 
   // Context manipulation
+  compiler::Node* LoadContextElement(compiler::Node* context, int slot_index);
   compiler::Node* LoadNativeContext(compiler::Node* context);
 
   compiler::Node* LoadJSArrayElementsMap(ElementsKind kind,
@@ -256,9 +307,15 @@
   // Store a field to an object on the heap.
   compiler::Node* StoreObjectField(
       compiler::Node* object, int offset, compiler::Node* value);
+  compiler::Node* StoreObjectField(compiler::Node* object,
+                                   compiler::Node* offset,
+                                   compiler::Node* value);
   compiler::Node* StoreObjectFieldNoWriteBarrier(
       compiler::Node* object, int offset, compiler::Node* value,
       MachineRepresentation rep = MachineRepresentation::kTagged);
+  compiler::Node* StoreObjectFieldNoWriteBarrier(
+      compiler::Node* object, compiler::Node* offset, compiler::Node* value,
+      MachineRepresentation rep = MachineRepresentation::kTagged);
   // Store the Map of an HeapObject.
   compiler::Node* StoreMapNoWriteBarrier(compiler::Node* object,
                                          compiler::Node* map);
@@ -275,9 +332,10 @@
       ParameterMode parameter_mode = INTEGER_PARAMETERS);
 
   // Allocate a HeapNumber without initializing its value.
-  compiler::Node* AllocateHeapNumber();
+  compiler::Node* AllocateHeapNumber(MutableMode mode = IMMUTABLE);
   // Allocate a HeapNumber with a specific value.
-  compiler::Node* AllocateHeapNumberWithValue(compiler::Node* value);
+  compiler::Node* AllocateHeapNumberWithValue(compiler::Node* value,
+                                              MutableMode mode = IMMUTABLE);
   // Allocate a SeqOneByteString with the given length.
   compiler::Node* AllocateSeqOneByteString(int length);
   compiler::Node* AllocateSeqOneByteString(compiler::Node* context,
@@ -286,37 +344,122 @@
   compiler::Node* AllocateSeqTwoByteString(int length);
   compiler::Node* AllocateSeqTwoByteString(compiler::Node* context,
                                            compiler::Node* length);
-  // Allocated an JSArray
-  compiler::Node* AllocateJSArray(ElementsKind kind, compiler::Node* array_map,
-                                  compiler::Node* capacity,
-                                  compiler::Node* length,
-                                  compiler::Node* allocation_site = nullptr,
-                                  ParameterMode mode = INTEGER_PARAMETERS);
+
+  // Allocate a SlicedOneByteString with the given length, parent and offset.
+  // |length| and |offset| are expected to be tagged.
+  compiler::Node* AllocateSlicedOneByteString(compiler::Node* length,
+                                              compiler::Node* parent,
+                                              compiler::Node* offset);
+  // Allocate a SlicedTwoByteString with the given length, parent and offset.
+  // |length| and |offset| are expected to be tagged.
+  compiler::Node* AllocateSlicedTwoByteString(compiler::Node* length,
+                                              compiler::Node* parent,
+                                              compiler::Node* offset);
+
+  // Allocate a RegExpResult with the given length (the number of captures,
+  // including the match itself), index (the index where the match starts),
+  // and input string. |length| and |index| are expected to be tagged, and
+  // |input| must be a string.
+  compiler::Node* AllocateRegExpResult(compiler::Node* context,
+                                       compiler::Node* length,
+                                       compiler::Node* index,
+                                       compiler::Node* input);
+
+  // Allocate a JSArray without elements and initialize the header fields.
+  compiler::Node* AllocateUninitializedJSArrayWithoutElements(
+      ElementsKind kind, compiler::Node* array_map, compiler::Node* length,
+      compiler::Node* allocation_site);
+  // Allocate and return a JSArray with initialized header fields and its
+  // uninitialized elements.
+  // The ParameterMode argument is only used for the capacity parameter.
+  std::pair<compiler::Node*, compiler::Node*>
+  AllocateUninitializedJSArrayWithElements(
+      ElementsKind kind, compiler::Node* array_map, compiler::Node* length,
+      compiler::Node* allocation_site, compiler::Node* capacity,
+      ParameterMode capacity_mode = INTEGER_PARAMETERS);
+  // Allocate a JSArray and fill elements with the hole.
+  // The ParameterMode argument is only used for the capacity parameter.
+  compiler::Node* AllocateJSArray(
+      ElementsKind kind, compiler::Node* array_map, compiler::Node* capacity,
+      compiler::Node* length, compiler::Node* allocation_site = nullptr,
+      ParameterMode capacity_mode = INTEGER_PARAMETERS);
 
   compiler::Node* AllocateFixedArray(ElementsKind kind,
                                      compiler::Node* capacity,
                                      ParameterMode mode = INTEGER_PARAMETERS,
                                      AllocationFlags flags = kNone);
 
-  void FillFixedArrayWithHole(ElementsKind kind, compiler::Node* array,
-                              compiler::Node* from_index,
-                              compiler::Node* to_index,
-                              ParameterMode mode = INTEGER_PARAMETERS);
+  void FillFixedArrayWithValue(ElementsKind kind, compiler::Node* array,
+                               compiler::Node* from_index,
+                               compiler::Node* to_index,
+                               Heap::RootListIndex value_root_index,
+                               ParameterMode mode = INTEGER_PARAMETERS);
 
+  // Copies all elements from |from_array| of |length| size to
+  // |to_array| of the same size respecting the elements kind.
   void CopyFixedArrayElements(
       ElementsKind kind, compiler::Node* from_array, compiler::Node* to_array,
-      compiler::Node* element_count,
+      compiler::Node* length,
+      WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
+      ParameterMode mode = INTEGER_PARAMETERS) {
+    CopyFixedArrayElements(kind, from_array, kind, to_array, length, length,
+                           barrier_mode, mode);
+  }
+
+  // Copies |element_count| elements from |from_array| to |to_array| of
+  // |capacity| size respecting both array's elements kinds.
+  void CopyFixedArrayElements(
+      ElementsKind from_kind, compiler::Node* from_array, ElementsKind to_kind,
+      compiler::Node* to_array, compiler::Node* element_count,
+      compiler::Node* capacity,
       WriteBarrierMode barrier_mode = UPDATE_WRITE_BARRIER,
       ParameterMode mode = INTEGER_PARAMETERS);
 
+  // Copies |character_count| elements from |from_string| to |to_string|
+  // starting at the |from_index|'th character. |from_index| and
+  // |character_count| must be Smis s.t.
+  // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
+  void CopyStringCharacters(compiler::Node* from_string,
+                            compiler::Node* to_string,
+                            compiler::Node* from_index,
+                            compiler::Node* character_count,
+                            String::Encoding encoding);
+
+  // Loads an element from |array| of |from_kind| elements by given |offset|
+  // (NOTE: not index!), does a hole check if |if_hole| is provided and
+  // converts the value so that it becomes ready for storing to array of
+  // |to_kind| elements.
+  compiler::Node* LoadElementAndPrepareForStore(compiler::Node* array,
+                                                compiler::Node* offset,
+                                                ElementsKind from_kind,
+                                                ElementsKind to_kind,
+                                                Label* if_hole);
+
   compiler::Node* CalculateNewElementsCapacity(
       compiler::Node* old_capacity, ParameterMode mode = INTEGER_PARAMETERS);
 
-  compiler::Node* CheckAndGrowElementsCapacity(compiler::Node* context,
-                                               compiler::Node* elements,
-                                               ElementsKind kind,
-                                               compiler::Node* key,
-                                               Label* fail);
+  // Tries to grow the |elements| array of given |object| to store the |key|
+  // or bails out if the growing gap is too big. Returns new elements.
+  compiler::Node* TryGrowElementsCapacity(compiler::Node* object,
+                                          compiler::Node* elements,
+                                          ElementsKind kind,
+                                          compiler::Node* key, Label* bailout);
+
+  // Tries to grow the |capacity|-length |elements| array of given |object|
+  // to store the |key| or bails out if the growing gap is too big. Returns
+  // new elements.
+  compiler::Node* TryGrowElementsCapacity(compiler::Node* object,
+                                          compiler::Node* elements,
+                                          ElementsKind kind,
+                                          compiler::Node* key,
+                                          compiler::Node* capacity,
+                                          ParameterMode mode, Label* bailout);
+
+  // Grows elements capacity of given object. Returns new elements.
+  compiler::Node* GrowElementsCapacity(
+      compiler::Node* object, compiler::Node* elements, ElementsKind from_kind,
+      ElementsKind to_kind, compiler::Node* capacity,
+      compiler::Node* new_capacity, ParameterMode mode, Label* bailout);
 
   // Allocation site manipulation
   void InitializeAllocationMemento(compiler::Node* base_allocation,
@@ -347,19 +490,67 @@
                               PrimitiveType primitive_type,
                               char const* method_name);
 
+  // Throws a TypeError for {method_name} if {value} is not of the given
+  // instance type. Returns {value}'s map.
+  compiler::Node* ThrowIfNotInstanceType(compiler::Node* context,
+                                         compiler::Node* value,
+                                         InstanceType instance_type,
+                                         char const* method_name);
+
+  // Type checks.
+  compiler::Node* IsStringInstanceType(compiler::Node* instance_type);
+  compiler::Node* IsJSReceiverInstanceType(compiler::Node* instance_type);
+
   // String helpers.
   // Load a character from a String (might flatten a ConsString).
   compiler::Node* StringCharCodeAt(compiler::Node* string,
                                    compiler::Node* smi_index);
   // Return the single character string with only {code}.
   compiler::Node* StringFromCharCode(compiler::Node* code);
+  // Return a new string object which holds a substring containing the range
+  // [from,to[ of string.  |from| and |to| are expected to be tagged.
+  compiler::Node* SubString(compiler::Node* context, compiler::Node* string,
+                            compiler::Node* from, compiler::Node* to);
 
-  // Returns a node that is true if the given bit is set in |word32|.
+  compiler::Node* StringFromCodePoint(compiler::Node* codepoint,
+                                      UnicodeEncoding encoding);
+
+  // Type conversion helpers.
+  // Convert a String to a Number.
+  compiler::Node* StringToNumber(compiler::Node* context,
+                                 compiler::Node* input);
+  // Convert an object to a name.
+  compiler::Node* ToName(compiler::Node* context, compiler::Node* input);
+  // Convert a Non-Number object to a Number.
+  compiler::Node* NonNumberToNumber(compiler::Node* context,
+                                    compiler::Node* input);
+  // Convert any object to a Number.
+  compiler::Node* ToNumber(compiler::Node* context, compiler::Node* input);
+
+  enum ToIntegerTruncationMode {
+    kNoTruncation,
+    kTruncateMinusZero,
+  };
+
+  // Convert any object to an Integer.
+  compiler::Node* ToInteger(compiler::Node* context, compiler::Node* input,
+                            ToIntegerTruncationMode mode = kNoTruncation);
+
+  // Returns a node that contains a decoded (unsigned!) value of a bit
+  // field |T| in |word32|. Returns result as an uint32 node.
   template <typename T>
   compiler::Node* BitFieldDecode(compiler::Node* word32) {
     return BitFieldDecode(word32, T::kShift, T::kMask);
   }
 
+  // Returns a node that contains a decoded (unsigned!) value of a bit
+  // field |T| in |word32|. Returns result as a word-size node.
+  template <typename T>
+  compiler::Node* BitFieldDecodeWord(compiler::Node* word32) {
+    return ChangeUint32ToWord(BitFieldDecode<T>(word32));
+  }
+
+  // Decodes an unsigned (!) value from |word32| to an uint32 node.
   compiler::Node* BitFieldDecode(compiler::Node* word32, uint32_t shift,
                                  uint32_t mask);
 
@@ -399,9 +590,9 @@
   compiler::Node* ComputeIntegerHash(compiler::Node* key, compiler::Node* seed);
 
   template <typename Dictionary>
-  void NumberDictionaryLookup(compiler::Node* dictionary, compiler::Node* key,
-                              Label* if_found, Variable* var_entry,
-                              Label* if_not_found);
+  void NumberDictionaryLookup(compiler::Node* dictionary,
+                              compiler::Node* intptr_index, Label* if_found,
+                              Variable* var_entry, Label* if_not_found);
 
   // Tries to check if {object} has own {unique_name} property.
   void TryHasOwnProperty(compiler::Node* object, compiler::Node* map,
@@ -454,9 +645,9 @@
                          Label* if_not_found, Label* if_bailout);
 
   void TryLookupElement(compiler::Node* object, compiler::Node* map,
-                        compiler::Node* instance_type, compiler::Node* index,
-                        Label* if_found, Label* if_not_found,
-                        Label* if_bailout);
+                        compiler::Node* instance_type,
+                        compiler::Node* intptr_index, Label* if_found,
+                        Label* if_not_found, Label* if_bailout);
 
   // This is a type of a lookup in holder generator function. In case of a
   // property lookup the {key} is guaranteed to be a unique name and in case of
@@ -484,7 +675,7 @@
                                       compiler::Node* callable,
                                       compiler::Node* object);
 
-  // LoadIC helpers.
+  // Load/StoreIC helpers.
   struct LoadICParameters {
     LoadICParameters(compiler::Node* context, compiler::Node* receiver,
                      compiler::Node* name, compiler::Node* slot,
@@ -502,6 +693,15 @@
     compiler::Node* vector;
   };
 
+  struct StoreICParameters : public LoadICParameters {
+    StoreICParameters(compiler::Node* context, compiler::Node* receiver,
+                      compiler::Node* name, compiler::Node* value,
+                      compiler::Node* slot, compiler::Node* vector)
+        : LoadICParameters(context, receiver, name, slot, vector),
+          value(value) {}
+    compiler::Node* value;
+  };
+
   // Load type feedback vector from the stub caller's frame.
   compiler::Node* LoadTypeFeedbackVectorForStub();
 
@@ -513,12 +713,12 @@
   compiler::Node* LoadReceiverMap(compiler::Node* receiver);
 
   // Checks monomorphic case. Returns {feedback} entry of the vector.
-  compiler::Node* TryMonomorphicCase(const LoadICParameters* p,
+  compiler::Node* TryMonomorphicCase(compiler::Node* slot,
+                                     compiler::Node* vector,
                                      compiler::Node* receiver_map,
                                      Label* if_handler, Variable* var_handler,
                                      Label* if_miss);
-  void HandlePolymorphicCase(const LoadICParameters* p,
-                             compiler::Node* receiver_map,
+  void HandlePolymorphicCase(compiler::Node* receiver_map,
                              compiler::Node* feedback, Label* if_handler,
                              Variable* var_handler, Label* if_miss,
                              int unroll_count);
@@ -543,9 +743,75 @@
                          compiler::Node* name, Label* if_handler,
                          Variable* var_handler, Label* if_miss);
 
+  // Extends properties backing store by JSObject::kFieldsAdded elements.
+  void ExtendPropertiesBackingStore(compiler::Node* object);
+
+  compiler::Node* PrepareValueForWrite(compiler::Node* value,
+                                       Representation representation,
+                                       Label* bailout);
+
+  void StoreNamedField(compiler::Node* object, FieldIndex index,
+                       Representation representation, compiler::Node* value,
+                       bool transition_to_field);
+
+  void StoreNamedField(compiler::Node* object, compiler::Node* offset,
+                       bool is_inobject, Representation representation,
+                       compiler::Node* value, bool transition_to_field);
+
+  // Emits keyed sloppy arguments load. Returns either the loaded value.
+  compiler::Node* LoadKeyedSloppyArguments(compiler::Node* receiver,
+                                           compiler::Node* key,
+                                           Label* bailout) {
+    return EmitKeyedSloppyArguments(receiver, key, nullptr, bailout);
+  }
+
+  // Emits keyed sloppy arguments store.
+  void StoreKeyedSloppyArguments(compiler::Node* receiver, compiler::Node* key,
+                                 compiler::Node* value, Label* bailout) {
+    DCHECK_NOT_NULL(value);
+    EmitKeyedSloppyArguments(receiver, key, value, bailout);
+  }
+
+  // Loads script context from the script context table.
+  compiler::Node* LoadScriptContext(compiler::Node* context, int context_index);
+
+  compiler::Node* ClampedToUint8(compiler::Node* int32_value);
+
+  // Store value to an elements array with given elements kind.
+  void StoreElement(compiler::Node* elements, ElementsKind kind,
+                    compiler::Node* index, compiler::Node* value,
+                    ParameterMode mode);
+
+  void EmitElementStore(compiler::Node* object, compiler::Node* key,
+                        compiler::Node* value, bool is_jsarray,
+                        ElementsKind elements_kind,
+                        KeyedAccessStoreMode store_mode, Label* bailout);
+
+  compiler::Node* CheckForCapacityGrow(compiler::Node* object,
+                                       compiler::Node* elements,
+                                       ElementsKind kind,
+                                       compiler::Node* length,
+                                       compiler::Node* key, ParameterMode mode,
+                                       bool is_js_array, Label* bailout);
+
+  compiler::Node* CopyElementsOnWrite(compiler::Node* object,
+                                      compiler::Node* elements,
+                                      ElementsKind kind, compiler::Node* length,
+                                      ParameterMode mode, Label* bailout);
+
   void LoadIC(const LoadICParameters* p);
   void LoadGlobalIC(const LoadICParameters* p);
   void KeyedLoadIC(const LoadICParameters* p);
+  void KeyedLoadICGeneric(const LoadICParameters* p);
+  void StoreIC(const StoreICParameters* p);
+
+  void TransitionElementsKind(compiler::Node* object, compiler::Node* map,
+                              ElementsKind from_kind, ElementsKind to_kind,
+                              bool is_jsarray, Label* bailout);
+
+  void TrapAllocationMemento(compiler::Node* object, Label* memento_found);
+
+  compiler::Node* PageFromAddress(compiler::Node* address);
 
   // Get the enumerable length from |map| and return the result as a Smi.
   compiler::Node* EnumLength(compiler::Node* map);
@@ -562,9 +828,13 @@
       compiler::Node* feedback_vector, compiler::Node* slot,
       compiler::Node* value);
 
-  compiler::Node* GetFixedAarrayAllocationSize(compiler::Node* element_count,
-                                               ElementsKind kind,
-                                               ParameterMode mode) {
+  // Create a new AllocationSite and install it into a feedback vector.
+  compiler::Node* CreateAllocationSiteInFeedbackVector(
+      compiler::Node* feedback_vector, compiler::Node* slot);
+
+  compiler::Node* GetFixedArrayAllocationSize(compiler::Node* element_count,
+                                              ElementsKind kind,
+                                              ParameterMode mode) {
     return ElementOffsetFromIndex(element_count, kind, mode,
                                   FixedArray::kHeaderSize);
   }
@@ -572,17 +842,34 @@
  private:
   enum ElementSupport { kOnlyProperties, kSupportElements };
 
+  void DescriptorLookupLinear(compiler::Node* unique_name,
+                              compiler::Node* descriptors, compiler::Node* nof,
+                              Label* if_found, Variable* var_name_index,
+                              Label* if_not_found);
+  compiler::Node* CallGetterIfAccessor(compiler::Node* value,
+                                       compiler::Node* details,
+                                       compiler::Node* context,
+                                       compiler::Node* receiver,
+                                       Label* if_bailout);
+
   void HandleLoadICHandlerCase(
       const LoadICParameters* p, compiler::Node* handler, Label* miss,
       ElementSupport support_elements = kOnlyProperties);
   compiler::Node* TryToIntptr(compiler::Node* key, Label* miss);
-  void EmitBoundsCheck(compiler::Node* object, compiler::Node* elements,
-                       compiler::Node* intptr_key, compiler::Node* is_jsarray,
-                       Label* miss);
+  void EmitFastElementsBoundsCheck(compiler::Node* object,
+                                   compiler::Node* elements,
+                                   compiler::Node* intptr_index,
+                                   compiler::Node* is_jsarray_condition,
+                                   Label* miss);
   void EmitElementLoad(compiler::Node* object, compiler::Node* elements,
                        compiler::Node* elements_kind, compiler::Node* key,
-                       Label* if_hole, Label* rebox_double,
-                       Variable* var_double_value, Label* miss);
+                       compiler::Node* is_jsarray_condition, Label* if_hole,
+                       Label* rebox_double, Variable* var_double_value,
+                       Label* unimplemented_elements_kind, Label* out_of_bounds,
+                       Label* miss);
+  void BranchIfPrototypesHaveNoElements(compiler::Node* receiver_map,
+                                        Label* definitely_no_elements,
+                                        Label* possibly_elements);
 
   compiler::Node* ElementOffsetFromIndex(compiler::Node* index,
                                          ElementsKind kind, ParameterMode mode,
@@ -596,9 +883,23 @@
                                        AllocationFlags flags,
                                        compiler::Node* top_adddress,
                                        compiler::Node* limit_address);
+  // Allocate and return a JSArray of given total size in bytes with header
+  // fields initialized.
+  compiler::Node* AllocateUninitializedJSArray(ElementsKind kind,
+                                               compiler::Node* array_map,
+                                               compiler::Node* length,
+                                               compiler::Node* allocation_site,
+                                               compiler::Node* size_in_bytes);
 
   compiler::Node* SmiShiftBitsConstant();
 
+  // Emits keyed sloppy arguments load if the |value| is nullptr or store
+  // otherwise. Returns either the loaded value or |value|.
+  compiler::Node* EmitKeyedSloppyArguments(compiler::Node* receiver,
+                                           compiler::Node* key,
+                                           compiler::Node* value,
+                                           Label* bailout);
+
   static const int kElementLoopUnrollThreshold = 8;
 };
 
diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc
index fa7a49e..a294d56 100644
--- a/src/code-stubs-hydrogen.cc
+++ b/src/code-stubs-hydrogen.cc
@@ -7,6 +7,7 @@
 #include <memory>
 
 #include "src/bailout-reason.h"
+#include "src/code-factory.h"
 #include "src/crankshaft/hydrogen.h"
 #include "src/crankshaft/lithium.h"
 #include "src/field-index.h"
@@ -37,7 +38,7 @@
 class CodeStubGraphBuilderBase : public HGraphBuilder {
  public:
   explicit CodeStubGraphBuilderBase(CompilationInfo* info, CodeStub* code_stub)
-      : HGraphBuilder(info, code_stub->GetCallInterfaceDescriptor()),
+      : HGraphBuilder(info, code_stub->GetCallInterfaceDescriptor(), false),
         arguments_length_(NULL),
         info_(info),
         code_stub_(code_stub),
@@ -59,7 +60,8 @@
     return parameters_[parameter];
   }
   Representation GetParameterRepresentation(int parameter) {
-    return RepresentationFromType(descriptor_.GetParameterType(parameter));
+    return RepresentationFromMachineType(
+        descriptor_.GetParameterType(parameter));
   }
   bool IsParameterCountRegister(int index) const {
     return descriptor_.GetRegisterParameter(index)
@@ -83,10 +85,6 @@
   HValue* BuildPushElement(HValue* object, HValue* argc,
                            HValue* argument_elements, ElementsKind kind);
 
-  HValue* UnmappedCase(HValue* elements, HValue* key, HValue* value);
-  HValue* EmitKeyedSloppyArguments(HValue* receiver, HValue* key,
-                                   HValue* value);
-
   HValue* BuildToString(HValue* input, bool convert);
   HValue* BuildToPrimitive(HValue* input, HValue* input_map);
 
@@ -129,8 +127,8 @@
                               HParameter::STACK_PARAMETER, r);
     } else {
       param = Add<HParameter>(i, HParameter::REGISTER_PARAMETER, r);
-      start_environment->Bind(i, param);
     }
+    start_environment->Bind(i, param);
     parameters_[i] = param;
     if (i < register_param_count && IsParameterCountRegister(i)) {
       param->set_type(HType::Smi());
@@ -334,7 +332,7 @@
 HValue* CodeStubGraphBuilder<NumberToStringStub>::BuildCodeStub() {
   info()->MarkAsSavesCallerDoubles();
   HValue* number = GetParameter(Descriptor::kArgument);
-  return BuildNumberToString(number, Type::Number());
+  return BuildNumberToString(number, AstType::Number());
 }
 
 
@@ -342,119 +340,6 @@
   return DoGenerateCode(this);
 }
 
-
-template <>
-HValue* CodeStubGraphBuilder<FastCloneShallowArrayStub>::BuildCodeStub() {
-  Factory* factory = isolate()->factory();
-  HValue* undefined = graph()->GetConstantUndefined();
-  AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode();
-  HValue* closure = GetParameter(Descriptor::kClosure);
-  HValue* literal_index = GetParameter(Descriptor::kLiteralIndex);
-
-  // TODO(turbofan): This codestub has regressed to need a frame on ia32 at some
-  // point and wasn't caught since it wasn't built in the snapshot. We should
-  // probably just replace with a TurboFan stub rather than fixing it.
-#if !(V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87)
-  // This stub is very performance sensitive, the generated code must be tuned
-  // so that it doesn't build and eager frame.
-  info()->MarkMustNotHaveEagerFrame();
-#endif
-
-  HValue* literals_array = Add<HLoadNamedField>(
-      closure, nullptr, HObjectAccess::ForLiteralsPointer());
-
-  HInstruction* allocation_site = Add<HLoadKeyed>(
-      literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
-      NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
-  IfBuilder checker(this);
-  checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
-                                                    undefined);
-  checker.Then();
-
-  HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
-      AllocationSite::kTransitionInfoOffset);
-  HInstruction* boilerplate =
-      Add<HLoadNamedField>(allocation_site, nullptr, access);
-  HValue* elements = AddLoadElements(boilerplate);
-  HValue* capacity = AddLoadFixedArrayLength(elements);
-  IfBuilder zero_capacity(this);
-  zero_capacity.If<HCompareNumericAndBranch>(capacity, graph()->GetConstant0(),
-                                           Token::EQ);
-  zero_capacity.Then();
-  Push(BuildCloneShallowArrayEmpty(boilerplate,
-                                   allocation_site,
-                                   alloc_site_mode));
-  zero_capacity.Else();
-  IfBuilder if_fixed_cow(this);
-  if_fixed_cow.If<HCompareMap>(elements, factory->fixed_cow_array_map());
-  if_fixed_cow.Then();
-  Push(BuildCloneShallowArrayCow(boilerplate,
-                                 allocation_site,
-                                 alloc_site_mode,
-                                 FAST_ELEMENTS));
-  if_fixed_cow.Else();
-  IfBuilder if_fixed(this);
-  if_fixed.If<HCompareMap>(elements, factory->fixed_array_map());
-  if_fixed.Then();
-  Push(BuildCloneShallowArrayNonEmpty(boilerplate,
-                                      allocation_site,
-                                      alloc_site_mode,
-                                      FAST_ELEMENTS));
-
-  if_fixed.Else();
-  Push(BuildCloneShallowArrayNonEmpty(boilerplate,
-                                      allocation_site,
-                                      alloc_site_mode,
-                                      FAST_DOUBLE_ELEMENTS));
-  if_fixed.End();
-  if_fixed_cow.End();
-  zero_capacity.End();
-
-  checker.ElseDeopt(DeoptimizeReason::kUninitializedBoilerplateLiterals);
-  checker.End();
-
-  return environment()->Pop();
-}
-
-
-Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-template <>
-HValue* CodeStubGraphBuilder<LoadScriptContextFieldStub>::BuildCodeStub() {
-  int context_index = casted_stub()->context_index();
-  int slot_index = casted_stub()->slot_index();
-
-  HValue* script_context = BuildGetScriptContext(context_index);
-  return Add<HLoadNamedField>(script_context, nullptr,
-                              HObjectAccess::ForContextSlot(slot_index));
-}
-
-
-Handle<Code> LoadScriptContextFieldStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-
-template <>
-HValue* CodeStubGraphBuilder<StoreScriptContextFieldStub>::BuildCodeStub() {
-  int context_index = casted_stub()->context_index();
-  int slot_index = casted_stub()->slot_index();
-
-  HValue* script_context = BuildGetScriptContext(context_index);
-  Add<HStoreNamedField>(script_context,
-                        HObjectAccess::ForContextSlot(slot_index),
-                        GetParameter(2), STORE_TO_INITIALIZED_ENTRY);
-  // TODO(ishell): Remove this unused stub.
-  return GetParameter(2);
-}
-
-
-Handle<Code> StoreScriptContextFieldStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
 HValue* CodeStubGraphBuilderBase::BuildPushElement(HValue* object, HValue* argc,
                                                    HValue* argument_elements,
                                                    ElementsKind kind) {
@@ -505,6 +390,7 @@
 HValue* CodeStubGraphBuilder<FastArrayPushStub>::BuildCodeStub() {
   // TODO(verwaest): Fix deoptimizer messages.
   HValue* argc = GetArgumentsLength();
+
   HInstruction* argument_elements = Add<HArgumentsElements>(false, false);
   HInstruction* object = Add<HAccessArgumentsAt>(argument_elements, argc,
                                                  graph()->GetConstantMinus1());
@@ -904,155 +790,6 @@
 Handle<Code> LoadConstantStub::GenerateCode() { return DoGenerateCode(this); }
 
 
-HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key,
-                                               HValue* value) {
-  HValue* result = NULL;
-  HInstruction* backing_store =
-      Add<HLoadKeyed>(elements, graph()->GetConstant1(), nullptr, nullptr,
-                      FAST_ELEMENTS, ALLOW_RETURN_HOLE);
-  Add<HCheckMaps>(backing_store, isolate()->factory()->fixed_array_map());
-  HValue* backing_store_length = Add<HLoadNamedField>(
-      backing_store, nullptr, HObjectAccess::ForFixedArrayLength());
-  IfBuilder in_unmapped_range(this);
-  in_unmapped_range.If<HCompareNumericAndBranch>(key, backing_store_length,
-                                                 Token::LT);
-  in_unmapped_range.Then();
-  {
-    if (value == NULL) {
-      result = Add<HLoadKeyed>(backing_store, key, nullptr, nullptr,
-                               FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE);
-    } else {
-      Add<HStoreKeyed>(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS);
-    }
-  }
-  in_unmapped_range.ElseDeopt(DeoptimizeReason::kOutsideOfRange);
-  in_unmapped_range.End();
-  return result;
-}
-
-
-HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver,
-                                                           HValue* key,
-                                                           HValue* value) {
-  // Mapped arguments are actual arguments. Unmapped arguments are values added
-  // to the arguments object after it was created for the call. Mapped arguments
-  // are stored in the context at indexes given by elements[key + 2]. Unmapped
-  // arguments are stored as regular indexed properties in the arguments array,
-  // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
-  // look at argument object construction.
-  //
-  // The sloppy arguments elements array has a special format:
-  //
-  // 0: context
-  // 1: unmapped arguments array
-  // 2: mapped_index0,
-  // 3: mapped_index1,
-  // ...
-  //
-  // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
-  // If key + 2 >= elements.length then attempt to look in the unmapped
-  // arguments array (given by elements[1]) and return the value at key, missing
-  // to the runtime if the unmapped arguments array is not a fixed array or if
-  // key >= unmapped_arguments_array.length.
-  //
-  // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
-  // in the unmapped arguments array, as described above. Otherwise, t is a Smi
-  // index into the context array given at elements[0]. Return the value at
-  // context[t].
-
-  bool is_load = value == NULL;
-
-  key = AddUncasted<HForceRepresentation>(key, Representation::Smi());
-  IfBuilder positive_smi(this);
-  positive_smi.If<HCompareNumericAndBranch>(key, graph()->GetConstant0(),
-                                            Token::LT);
-  positive_smi.ThenDeopt(DeoptimizeReason::kKeyIsNegative);
-  positive_smi.End();
-
-  HValue* constant_two = Add<HConstant>(2);
-  HValue* elements = AddLoadElements(receiver, nullptr);
-  HValue* elements_length = Add<HLoadNamedField>(
-      elements, nullptr, HObjectAccess::ForFixedArrayLength());
-  HValue* adjusted_length = AddUncasted<HSub>(elements_length, constant_two);
-  IfBuilder in_range(this);
-  in_range.If<HCompareNumericAndBranch>(key, adjusted_length, Token::LT);
-  in_range.Then();
-  {
-    HValue* index = AddUncasted<HAdd>(key, constant_two);
-    HInstruction* mapped_index =
-        Add<HLoadKeyed>(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS,
-                        ALLOW_RETURN_HOLE);
-
-    IfBuilder is_valid(this);
-    is_valid.IfNot<HCompareObjectEqAndBranch>(mapped_index,
-                                              graph()->GetConstantHole());
-    is_valid.Then();
-    {
-      // TODO(mvstanton): I'd like to assert from this point, that if the
-      // mapped_index is not the hole that it is indeed, a smi. An unnecessary
-      // smi check is being emitted.
-      HValue* the_context = Add<HLoadKeyed>(elements, graph()->GetConstant0(),
-                                            nullptr, nullptr, FAST_ELEMENTS);
-      STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
-      if (is_load) {
-        HValue* result =
-            Add<HLoadKeyed>(the_context, mapped_index, nullptr, nullptr,
-                            FAST_ELEMENTS, ALLOW_RETURN_HOLE);
-        environment()->Push(result);
-      } else {
-        DCHECK(value != NULL);
-        Add<HStoreKeyed>(the_context, mapped_index, value, nullptr,
-                         FAST_ELEMENTS);
-        environment()->Push(value);
-      }
-    }
-    is_valid.Else();
-    {
-      HValue* result = UnmappedCase(elements, key, value);
-      environment()->Push(is_load ? result : value);
-    }
-    is_valid.End();
-  }
-  in_range.Else();
-  {
-    HValue* result = UnmappedCase(elements, key, value);
-    environment()->Push(is_load ? result : value);
-  }
-  in_range.End();
-
-  return environment()->Pop();
-}
-
-
-template <>
-HValue* CodeStubGraphBuilder<KeyedLoadSloppyArgumentsStub>::BuildCodeStub() {
-  HValue* receiver = GetParameter(Descriptor::kReceiver);
-  HValue* key = GetParameter(Descriptor::kName);
-
-  return EmitKeyedSloppyArguments(receiver, key, NULL);
-}
-
-
-Handle<Code> KeyedLoadSloppyArgumentsStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-
-template <>
-HValue* CodeStubGraphBuilder<KeyedStoreSloppyArgumentsStub>::BuildCodeStub() {
-  HValue* receiver = GetParameter(Descriptor::kReceiver);
-  HValue* key = GetParameter(Descriptor::kName);
-  HValue* value = GetParameter(Descriptor::kValue);
-
-  return EmitKeyedSloppyArguments(receiver, key, value);
-}
-
-
-Handle<Code> KeyedStoreSloppyArgumentsStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-
 void CodeStubGraphBuilderBase::BuildStoreNamedField(
     HValue* object, HValue* value, FieldIndex index,
     Representation representation, bool transition_to_field) {
@@ -1099,99 +836,6 @@
 
 
 template <>
-HValue* CodeStubGraphBuilder<StoreFieldStub>::BuildCodeStub() {
-  BuildStoreNamedField(GetParameter(Descriptor::kReceiver),
-                       GetParameter(Descriptor::kValue), casted_stub()->index(),
-                       casted_stub()->representation(), false);
-  return GetParameter(Descriptor::kValue);
-}
-
-
-Handle<Code> StoreFieldStub::GenerateCode() { return DoGenerateCode(this); }
-
-
-template <>
-HValue* CodeStubGraphBuilder<StoreTransitionStub>::BuildCodeStub() {
-  HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
-  HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
-  StoreTransitionStub::StoreMode store_mode = casted_stub()->store_mode();
-
-  if (store_mode != StoreTransitionStub::StoreMapOnly) {
-    value = GetParameter(StoreTransitionHelper::ValueIndex());
-    Representation representation = casted_stub()->representation();
-    if (representation.IsDouble()) {
-      // In case we are storing a double, assure that the value is a double
-      // before manipulating the properties backing store. Otherwise the actual
-      // store may deopt, leaving the backing store in an overallocated state.
-      value = AddUncasted<HForceRepresentation>(value, representation);
-    }
-  }
-
-  switch (store_mode) {
-    case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: {
-      HValue* properties = Add<HLoadNamedField>(
-          object, nullptr, HObjectAccess::ForPropertiesPointer());
-      HValue* length = AddLoadFixedArrayLength(properties);
-      HValue* delta =
-          Add<HConstant>(static_cast<int32_t>(JSObject::kFieldsAdded));
-      HValue* new_capacity = AddUncasted<HAdd>(length, delta);
-
-      // Grow properties array.
-      ElementsKind kind = FAST_ELEMENTS;
-      Add<HBoundsCheck>(new_capacity,
-                        Add<HConstant>((Page::kMaxRegularHeapObjectSize -
-                                        FixedArray::kHeaderSize) >>
-                                       ElementsKindToShiftSize(kind)));
-
-      // Reuse this code for properties backing store allocation.
-      HValue* new_properties =
-          BuildAllocateAndInitializeArray(kind, new_capacity);
-
-      BuildCopyProperties(properties, new_properties, length, new_capacity);
-
-      Add<HStoreNamedField>(object, HObjectAccess::ForPropertiesPointer(),
-                            new_properties);
-    }
-    // Fall through.
-    case StoreTransitionStub::StoreMapAndValue:
-      // Store the new value into the "extended" object.
-      BuildStoreNamedField(object, value, casted_stub()->index(),
-                           casted_stub()->representation(), true);
-    // Fall through.
-
-    case StoreTransitionStub::StoreMapOnly:
-      // And finally update the map.
-      Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
-                            GetParameter(StoreTransitionHelper::MapIndex()));
-      break;
-  }
-  return value;
-}
-
-
-Handle<Code> StoreTransitionStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-
-template <>
-HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
-  BuildUncheckedMonomorphicElementAccess(
-      GetParameter(Descriptor::kReceiver), GetParameter(Descriptor::kName),
-      GetParameter(Descriptor::kValue), casted_stub()->is_js_array(),
-      casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE,
-      casted_stub()->store_mode());
-
-  return GetParameter(Descriptor::kValue);
-}
-
-
-Handle<Code> StoreFastElementStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-
-template <>
 HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
   ElementsKind const from_kind = casted_stub()->from_kind();
   ElementsKind const to_kind = casted_stub()->to_kind();
@@ -1262,26 +906,26 @@
   HValue* left = GetParameter(Descriptor::kLeft);
   HValue* right = GetParameter(Descriptor::kRight);
 
-  Type* left_type = state.GetLeftType();
-  Type* right_type = state.GetRightType();
-  Type* result_type = state.GetResultType();
+  AstType* left_type = state.GetLeftType();
+  AstType* right_type = state.GetRightType();
+  AstType* result_type = state.GetResultType();
 
-  DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
-         (state.HasSideEffects() || !result_type->Is(Type::None())));
+  DCHECK(!left_type->Is(AstType::None()) && !right_type->Is(AstType::None()) &&
+         (state.HasSideEffects() || !result_type->Is(AstType::None())));
 
   HValue* result = NULL;
   HAllocationMode allocation_mode(NOT_TENURED);
-  if (state.op() == Token::ADD &&
-      (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
-      !left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
+  if (state.op() == Token::ADD && (left_type->Maybe(AstType::String()) ||
+                                   right_type->Maybe(AstType::String())) &&
+      !left_type->Is(AstType::String()) && !right_type->Is(AstType::String())) {
     // For the generic add stub a fast case for string addition is performance
     // critical.
-    if (left_type->Maybe(Type::String())) {
+    if (left_type->Maybe(AstType::String())) {
       IfBuilder if_leftisstring(this);
       if_leftisstring.If<HIsStringAndBranch>(left);
       if_leftisstring.Then();
       {
-        Push(BuildBinaryOperation(state.op(), left, right, Type::String(),
+        Push(BuildBinaryOperation(state.op(), left, right, AstType::String(),
                                   right_type, result_type,
                                   state.fixed_right_arg(), allocation_mode));
       }
@@ -1299,7 +943,7 @@
       if_rightisstring.Then();
       {
         Push(BuildBinaryOperation(state.op(), left, right, left_type,
-                                  Type::String(), result_type,
+                                  AstType::String(), result_type,
                                   state.fixed_right_arg(), allocation_mode));
       }
       if_rightisstring.Else();
@@ -1340,9 +984,9 @@
   HValue* left = GetParameter(Descriptor::kLeft);
   HValue* right = GetParameter(Descriptor::kRight);
 
-  Type* left_type = state.GetLeftType();
-  Type* right_type = state.GetRightType();
-  Type* result_type = state.GetResultType();
+  AstType* left_type = state.GetLeftType();
+  AstType* right_type = state.GetRightType();
+  AstType* result_type = state.GetResultType();
   HAllocationMode allocation_mode(allocation_site);
 
   return BuildBinaryOperation(state.op(), left, right, left_type, right_type,
@@ -1363,7 +1007,7 @@
   if_inputissmi.Then();
   {
     // Convert the input smi to a string.
-    Push(BuildNumberToString(input, Type::SignedSmall()));
+    Push(BuildNumberToString(input, AstType::SignedSmall()));
   }
   if_inputissmi.Else();
   {
@@ -1399,10 +1043,10 @@
       }
       if_inputisprimitive.End();
       // Convert the primitive to a string value.
-      ToStringStub stub(isolate());
       HValue* values[] = {context(), Pop()};
-      Push(AddUncasted<HCallWithDescriptor>(Add<HConstant>(stub.GetCode()), 0,
-                                            stub.GetCallInterfaceDescriptor(),
+      Callable toString = CodeFactory::ToString(isolate());
+      Push(AddUncasted<HCallWithDescriptor>(Add<HConstant>(toString.code()), 0,
+                                            toString.descriptor(),
                                             ArrayVector(values)));
     }
     if_inputisstring.End();
@@ -1531,134 +1175,6 @@
 Handle<Code> ToBooleanICStub::GenerateCode() { return DoGenerateCode(this); }
 
 template <>
-HValue* CodeStubGraphBuilder<StoreGlobalStub>::BuildCodeInitializedStub() {
-  StoreGlobalStub* stub = casted_stub();
-  HParameter* value = GetParameter(Descriptor::kValue);
-  if (stub->check_global()) {
-    // Check that the map of the global has not changed: use a placeholder map
-    // that will be replaced later with the global object's map.
-    HParameter* proxy = GetParameter(Descriptor::kReceiver);
-    HValue* proxy_map =
-        Add<HLoadNamedField>(proxy, nullptr, HObjectAccess::ForMap());
-    HValue* global =
-        Add<HLoadNamedField>(proxy_map, nullptr, HObjectAccess::ForPrototype());
-    HValue* map_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
-        StoreGlobalStub::global_map_placeholder(isolate())));
-    HValue* expected_map = Add<HLoadNamedField>(
-        map_cell, nullptr, HObjectAccess::ForWeakCellValue());
-    HValue* map =
-        Add<HLoadNamedField>(global, nullptr, HObjectAccess::ForMap());
-    IfBuilder map_check(this);
-    map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
-    map_check.ThenDeopt(DeoptimizeReason::kUnknownMap);
-    map_check.End();
-  }
-
-  HValue* weak_cell = Add<HConstant>(isolate()->factory()->NewWeakCell(
-      StoreGlobalStub::property_cell_placeholder(isolate())));
-  HValue* cell = Add<HLoadNamedField>(weak_cell, nullptr,
-                                      HObjectAccess::ForWeakCellValue());
-  Add<HCheckHeapObject>(cell);
-  HObjectAccess access = HObjectAccess::ForPropertyCellValue();
-  // Load the payload of the global parameter cell. A hole indicates that the
-  // cell has been invalidated and that the store must be handled by the
-  // runtime.
-  HValue* cell_contents = Add<HLoadNamedField>(cell, nullptr, access);
-
-  auto cell_type = stub->cell_type();
-  if (cell_type == PropertyCellType::kConstant ||
-      cell_type == PropertyCellType::kUndefined) {
-    // This is always valid for all states a cell can be in.
-    IfBuilder builder(this);
-    builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
-    builder.Then();
-    builder.ElseDeopt(
-        DeoptimizeReason::kUnexpectedCellContentsInConstantGlobalStore);
-    builder.End();
-  } else {
-    IfBuilder builder(this);
-    HValue* hole_value = graph()->GetConstantHole();
-    builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
-    builder.Then();
-    builder.Deopt(DeoptimizeReason::kUnexpectedCellContentsInGlobalStore);
-    builder.Else();
-    // When dealing with constant types, the type may be allowed to change, as
-    // long as optimized code remains valid.
-    if (cell_type == PropertyCellType::kConstantType) {
-      switch (stub->constant_type()) {
-        case PropertyCellConstantType::kSmi:
-          access = access.WithRepresentation(Representation::Smi());
-          break;
-        case PropertyCellConstantType::kStableMap: {
-          // It is sufficient here to check that the value and cell contents
-          // have identical maps, no matter if they are stable or not or if they
-          // are the maps that were originally in the cell or not. If optimized
-          // code will deopt when a cell has a unstable map and if it has a
-          // dependency on a stable map, it will deopt if the map destabilizes.
-          Add<HCheckHeapObject>(value);
-          Add<HCheckHeapObject>(cell_contents);
-          HValue* expected_map = Add<HLoadNamedField>(cell_contents, nullptr,
-                                                      HObjectAccess::ForMap());
-          HValue* map =
-              Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
-          IfBuilder map_check(this);
-          map_check.IfNot<HCompareObjectEqAndBranch>(expected_map, map);
-          map_check.ThenDeopt(DeoptimizeReason::kUnknownMap);
-          map_check.End();
-          access = access.WithRepresentation(Representation::HeapObject());
-          break;
-        }
-      }
-    }
-    Add<HStoreNamedField>(cell, access, value);
-    builder.End();
-  }
-
-  return value;
-}
-
-
-Handle<Code> StoreGlobalStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-
-template <>
-HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
-  HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex());
-  HValue* key = GetParameter(StoreTransitionHelper::NameIndex());
-  HValue* value = GetParameter(StoreTransitionHelper::ValueIndex());
-  HValue* map = GetParameter(StoreTransitionHelper::MapIndex());
-
-  if (FLAG_trace_elements_transitions) {
-    // Tracing elements transitions is the job of the runtime.
-    Add<HDeoptimize>(DeoptimizeReason::kTracingElementsTransitions,
-                     Deoptimizer::EAGER);
-  } else {
-    info()->MarkAsSavesCallerDoubles();
-
-    BuildTransitionElementsKind(object, map,
-                                casted_stub()->from_kind(),
-                                casted_stub()->to_kind(),
-                                casted_stub()->is_jsarray());
-
-    BuildUncheckedMonomorphicElementAccess(object, key, value,
-                                           casted_stub()->is_jsarray(),
-                                           casted_stub()->to_kind(),
-                                           STORE, ALLOW_RETURN_HOLE,
-                                           casted_stub()->store_mode());
-  }
-
-  return value;
-}
-
-
-Handle<Code> ElementsTransitionAndStoreStub::GenerateCode() {
-  return DoGenerateCode(this);
-}
-
-
-template <>
 HValue* CodeStubGraphBuilder<LoadDictionaryElementStub>::BuildCodeStub() {
   HValue* receiver = GetParameter(Descriptor::kReceiver);
   HValue* key = GetParameter(Descriptor::kName);
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 2b71716..b899943 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -6,6 +6,7 @@
 
 #include <sstream>
 
+#include "src/ast/ast.h"
 #include "src/bootstrapper.h"
 #include "src/code-factory.h"
 #include "src/code-stub-assembler.h"
@@ -14,7 +15,6 @@
 #include "src/ic/handler-compiler.h"
 #include "src/ic/ic.h"
 #include "src/macro-assembler.h"
-#include "src/parsing/parser.h"
 
 namespace v8 {
 namespace internal {
@@ -498,6 +498,140 @@
   assembler->KeyedLoadIC(&p);
 }
 
+void StoreICTrampolineTFStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* name = assembler->Parameter(Descriptor::kName);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+  Node* vector = assembler->LoadTypeFeedbackVectorForStub();
+
+  CodeStubAssembler::StoreICParameters p(context, receiver, name, value, slot,
+                                         vector);
+  assembler->StoreIC(&p);
+}
+
+void StoreICTFStub::GenerateAssembly(CodeStubAssembler* assembler) const {
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* name = assembler->Parameter(Descriptor::kName);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  CodeStubAssembler::StoreICParameters p(context, receiver, name, value, slot,
+                                         vector);
+  assembler->StoreIC(&p);
+}
+
+void StoreMapStub::GenerateAssembly(CodeStubAssembler* assembler) const {
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* map = assembler->Parameter(Descriptor::kMap);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+
+  assembler->StoreObjectField(receiver, JSObject::kMapOffset, map);
+  assembler->Return(value);
+}
+
+void StoreTransitionStub::GenerateAssembly(CodeStubAssembler* assembler) const {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* name = assembler->Parameter(Descriptor::kName);
+  Node* offset =
+      assembler->SmiUntag(assembler->Parameter(Descriptor::kFieldOffset));
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* map = assembler->Parameter(Descriptor::kMap);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Label miss(assembler);
+
+  Representation representation = this->representation();
+  assembler->Comment("StoreTransitionStub: is_inobject: %d: representation: %s",
+                     is_inobject(), representation.Mnemonic());
+
+  Node* prepared_value =
+      assembler->PrepareValueForWrite(value, representation, &miss);
+
+  if (store_mode() == StoreTransitionStub::ExtendStorageAndStoreMapAndValue) {
+    assembler->Comment("Extend storage");
+    assembler->ExtendPropertiesBackingStore(receiver);
+  } else {
+    DCHECK(store_mode() == StoreTransitionStub::StoreMapAndValue);
+  }
+
+  // Store the new value into the "extended" object.
+  assembler->Comment("Store value");
+  assembler->StoreNamedField(receiver, offset, is_inobject(), representation,
+                             prepared_value, true);
+
+  // And finally update the map.
+  assembler->Comment("Store map");
+  assembler->StoreObjectField(receiver, JSObject::kMapOffset, map);
+  assembler->Return(value);
+
+  // Only store to tagged field never bails out.
+  if (!representation.IsTagged()) {
+    assembler->Bind(&miss);
+    {
+      assembler->Comment("Miss");
+      assembler->TailCallRuntime(Runtime::kStoreIC_Miss, context, value, slot,
+                                 vector, receiver, name);
+    }
+  }
+}
+
+void ElementsTransitionAndStoreStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* key = assembler->Parameter(Descriptor::kName);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* map = assembler->Parameter(Descriptor::kMap);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  assembler->Comment(
+      "ElementsTransitionAndStoreStub: from_kind=%s, to_kind=%s,"
+      " is_jsarray=%d, store_mode=%d",
+      ElementsKindToString(from_kind()), ElementsKindToString(to_kind()),
+      is_jsarray(), store_mode());
+
+  Label miss(assembler);
+
+  if (FLAG_trace_elements_transitions) {
+    // Tracing elements transitions is the job of the runtime.
+    assembler->Goto(&miss);
+  } else {
+    assembler->TransitionElementsKind(receiver, map, from_kind(), to_kind(),
+                                      is_jsarray(), &miss);
+    assembler->EmitElementStore(receiver, key, value, is_jsarray(), to_kind(),
+                                store_mode(), &miss);
+    assembler->Return(value);
+  }
+
+  assembler->Bind(&miss);
+  {
+    assembler->Comment("Miss");
+    assembler->TailCallRuntime(Runtime::kElementsTransitionAndStoreIC_Miss,
+                               context, receiver, key, value, map, slot,
+                               vector);
+  }
+}
+
 void AllocateHeapNumberStub::GenerateAssembly(
     CodeStubAssembler* assembler) const {
   typedef compiler::Node Node;
@@ -599,9 +733,8 @@
         // Check if the {rhs} is a HeapNumber.
         Label if_rhsisnumber(assembler),
             if_rhsisnotnumber(assembler, Label::kDeferred);
-        Node* number_map = assembler->HeapNumberMapConstant();
-        assembler->Branch(assembler->WordEqual(rhs_map, number_map),
-                          &if_rhsisnumber, &if_rhsisnotnumber);
+        assembler->Branch(assembler->IsHeapNumberMap(rhs_map), &if_rhsisnumber,
+                          &if_rhsisnotnumber);
 
         assembler->Bind(&if_rhsisnumber);
         {
@@ -618,9 +751,7 @@
           // Check if the {rhs} is a String.
           Label if_rhsisstring(assembler, Label::kDeferred),
               if_rhsisnotstring(assembler, Label::kDeferred);
-          assembler->Branch(assembler->Int32LessThan(
-                                rhs_instance_type,
-                                assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+          assembler->Branch(assembler->IsStringInstanceType(rhs_instance_type),
                             &if_rhsisstring, &if_rhsisnotstring);
 
           assembler->Bind(&if_rhsisstring);
@@ -636,9 +767,7 @@
             Label if_rhsisreceiver(assembler, Label::kDeferred),
                 if_rhsisnotreceiver(assembler, Label::kDeferred);
             assembler->Branch(
-                assembler->Int32LessThanOrEqual(
-                    assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                    rhs_instance_type),
+                assembler->IsJSReceiverInstanceType(rhs_instance_type),
                 &if_rhsisreceiver, &if_rhsisnotreceiver);
 
             assembler->Bind(&if_rhsisreceiver);
@@ -670,9 +799,7 @@
 
       // Check if {lhs} is a String.
       Label if_lhsisstring(assembler), if_lhsisnotstring(assembler);
-      assembler->Branch(assembler->Int32LessThan(
-                            lhs_instance_type,
-                            assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+      assembler->Branch(assembler->IsStringInstanceType(lhs_instance_type),
                         &if_lhsisstring, &if_lhsisnotstring);
 
       assembler->Bind(&if_lhsisstring);
@@ -714,9 +841,7 @@
             Label if_lhsisreceiver(assembler, Label::kDeferred),
                 if_lhsisnotreceiver(assembler, Label::kDeferred);
             assembler->Branch(
-                assembler->Int32LessThanOrEqual(
-                    assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                    lhs_instance_type),
+                assembler->IsJSReceiverInstanceType(lhs_instance_type),
                 &if_lhsisreceiver, &if_lhsisnotreceiver);
 
             assembler->Bind(&if_lhsisreceiver);
@@ -746,9 +871,7 @@
 
           // Check if {rhs} is a String.
           Label if_rhsisstring(assembler), if_rhsisnotstring(assembler);
-          assembler->Branch(assembler->Int32LessThan(
-                                rhs_instance_type,
-                                assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+          assembler->Branch(assembler->IsStringInstanceType(rhs_instance_type),
                             &if_rhsisstring, &if_rhsisnotstring);
 
           assembler->Bind(&if_rhsisstring);
@@ -791,9 +914,7 @@
                 Label if_rhsisreceiver(assembler, Label::kDeferred),
                     if_rhsisnotreceiver(assembler, Label::kDeferred);
                 assembler->Branch(
-                    assembler->Int32LessThanOrEqual(
-                        assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                        rhs_instance_type),
+                    assembler->IsJSReceiverInstanceType(rhs_instance_type),
                     &if_rhsisreceiver, &if_rhsisnotreceiver);
 
                 assembler->Bind(&if_rhsisreceiver);
@@ -822,9 +943,7 @@
               Label if_lhsisreceiver(assembler, Label::kDeferred),
                   if_lhsisnotreceiver(assembler);
               assembler->Branch(
-                  assembler->Int32LessThanOrEqual(
-                      assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                      lhs_instance_type),
+                  assembler->IsJSReceiverInstanceType(lhs_instance_type),
                   &if_lhsisreceiver, &if_lhsisnotreceiver);
 
               assembler->Bind(&if_lhsisreceiver);
@@ -842,9 +961,7 @@
                 Label if_rhsisreceiver(assembler, Label::kDeferred),
                     if_rhsisnotreceiver(assembler, Label::kDeferred);
                 assembler->Branch(
-                    assembler->Int32LessThanOrEqual(
-                        assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                        rhs_instance_type),
+                    assembler->IsJSReceiverInstanceType(rhs_instance_type),
                     &if_rhsisreceiver, &if_rhsisnotreceiver);
 
                 assembler->Bind(&if_rhsisreceiver);
@@ -917,7 +1034,7 @@
 
   // Shared entry for floating point addition.
   Label do_fadd(assembler), end(assembler),
-      call_add_stub(assembler, Label::kDeferred);
+      do_add_any(assembler, Label::kDeferred), call_add_stub(assembler);
   Variable var_fadd_lhs(assembler, MachineRepresentation::kFloat64),
       var_fadd_rhs(assembler, MachineRepresentation::kFloat64),
       var_type_feedback(assembler, MachineRepresentation::kWord32),
@@ -965,9 +1082,7 @@
       Node* rhs_map = assembler->LoadMap(rhs);
 
       // Check if the {rhs} is a HeapNumber.
-      assembler->GotoUnless(
-          assembler->WordEqual(rhs_map, assembler->HeapNumberMapConstant()),
-          &call_add_stub);
+      assembler->GotoUnless(assembler->IsHeapNumberMap(rhs_map), &do_add_any);
 
       var_fadd_lhs.Bind(assembler->SmiToFloat64(lhs));
       var_fadd_rhs.Bind(assembler->LoadHeapNumberValue(rhs));
@@ -977,14 +1092,14 @@
 
   assembler->Bind(&if_lhsisnotsmi);
   {
+    Label check_string(assembler);
+
     // Load the map of {lhs}.
     Node* lhs_map = assembler->LoadMap(lhs);
 
     // Check if {lhs} is a HeapNumber.
     Label if_lhsisnumber(assembler), if_lhsisnotnumber(assembler);
-    assembler->GotoUnless(
-        assembler->WordEqual(lhs_map, assembler->HeapNumberMapConstant()),
-        &call_add_stub);
+    assembler->GotoUnless(assembler->IsHeapNumberMap(lhs_map), &check_string);
 
     // Check if the {rhs} is Smi.
     Label if_rhsissmi(assembler), if_rhsisnotsmi(assembler);
@@ -1003,14 +1118,34 @@
       Node* rhs_map = assembler->LoadMap(rhs);
 
       // Check if the {rhs} is a HeapNumber.
-      Node* number_map = assembler->HeapNumberMapConstant();
-      assembler->GotoUnless(assembler->WordEqual(rhs_map, number_map),
-                            &call_add_stub);
+      assembler->GotoUnless(assembler->IsHeapNumberMap(rhs_map), &do_add_any);
 
       var_fadd_lhs.Bind(assembler->LoadHeapNumberValue(lhs));
       var_fadd_rhs.Bind(assembler->LoadHeapNumberValue(rhs));
       assembler->Goto(&do_fadd);
     }
+
+    assembler->Bind(&check_string);
+    {
+      // Check if the {rhs} is a smi, and exit the string check early if it is.
+      assembler->GotoIf(assembler->WordIsSmi(rhs), &do_add_any);
+
+      Node* lhs_instance_type = assembler->LoadMapInstanceType(lhs_map);
+
+      // Exit unless {lhs} is a string
+      assembler->GotoUnless(assembler->IsStringInstanceType(lhs_instance_type),
+                            &do_add_any);
+
+      Node* rhs_instance_type = assembler->LoadInstanceType(rhs);
+
+      // Exit unless {rhs} is a string
+      assembler->GotoUnless(assembler->IsStringInstanceType(rhs_instance_type),
+                            &do_add_any);
+
+      var_type_feedback.Bind(
+          assembler->Int32Constant(BinaryOperationFeedback::kString));
+      assembler->Goto(&call_add_stub);
+    }
   }
 
   assembler->Bind(&do_fadd);
@@ -1024,10 +1159,15 @@
     assembler->Goto(&end);
   }
 
-  assembler->Bind(&call_add_stub);
+  assembler->Bind(&do_add_any);
   {
     var_type_feedback.Bind(
         assembler->Int32Constant(BinaryOperationFeedback::kAny));
+    assembler->Goto(&call_add_stub);
+  }
+
+  assembler->Bind(&call_add_stub);
+  {
     Callable callable = CodeFactory::Add(assembler->isolate());
     var_result.Bind(assembler->CallStub(callable, context, lhs, rhs));
     assembler->Goto(&end);
@@ -1111,9 +1251,8 @@
         // Check if {rhs} is a HeapNumber.
         Label if_rhsisnumber(assembler),
             if_rhsisnotnumber(assembler, Label::kDeferred);
-        Node* number_map = assembler->HeapNumberMapConstant();
-        assembler->Branch(assembler->WordEqual(rhs_map, number_map),
-                          &if_rhsisnumber, &if_rhsisnotnumber);
+        assembler->Branch(assembler->IsHeapNumberMap(rhs_map), &if_rhsisnumber,
+                          &if_rhsisnotnumber);
 
         assembler->Bind(&if_rhsisnumber);
         {
@@ -1274,9 +1413,8 @@
       Node* rhs_map = assembler->LoadMap(rhs);
 
       // Check if {rhs} is a HeapNumber.
-      assembler->GotoUnless(
-          assembler->WordEqual(rhs_map, assembler->HeapNumberMapConstant()),
-          &call_subtract_stub);
+      assembler->GotoUnless(assembler->IsHeapNumberMap(rhs_map),
+                            &call_subtract_stub);
 
       // Perform a floating point subtraction.
       var_fsub_lhs.Bind(assembler->SmiToFloat64(lhs));
@@ -1291,9 +1429,8 @@
     Node* lhs_map = assembler->LoadMap(lhs);
 
     // Check if the {lhs} is a HeapNumber.
-    assembler->GotoUnless(
-        assembler->WordEqual(lhs_map, assembler->HeapNumberMapConstant()),
-        &call_subtract_stub);
+    assembler->GotoUnless(assembler->IsHeapNumberMap(lhs_map),
+                          &call_subtract_stub);
 
     // Check if the {rhs} is a Smi.
     Label if_rhsissmi(assembler), if_rhsisnotsmi(assembler);
@@ -1313,9 +1450,8 @@
       Node* rhs_map = assembler->LoadMap(rhs);
 
       // Check if the {rhs} is a HeapNumber.
-      assembler->GotoUnless(
-          assembler->WordEqual(rhs_map, assembler->HeapNumberMapConstant()),
-          &call_subtract_stub);
+      assembler->GotoUnless(assembler->IsHeapNumberMap(rhs_map),
+                            &call_subtract_stub);
 
       // Perform a floating point subtraction.
       var_fsub_lhs.Bind(assembler->LoadHeapNumberValue(lhs));
@@ -1713,7 +1849,7 @@
         Node* untagged_result =
             assembler->Int32Div(untagged_dividend, untagged_divisor);
         Node* truncated =
-            assembler->IntPtrMul(untagged_result, untagged_divisor);
+            assembler->Int32Mul(untagged_result, untagged_divisor);
         // Do floating point division if the remainder is not 0.
         assembler->GotoIf(
             assembler->Word32NotEqual(untagged_dividend, truncated), &bailout);
@@ -1916,7 +2052,7 @@
 
       Node* untagged_result =
           assembler->Int32Div(untagged_dividend, untagged_divisor);
-      Node* truncated = assembler->IntPtrMul(untagged_result, untagged_divisor);
+      Node* truncated = assembler->Int32Mul(untagged_result, untagged_divisor);
       // Do floating point division if the remainder is not 0.
       assembler->GotoIf(assembler->Word32NotEqual(untagged_dividend, truncated),
                         &bailout);
@@ -2441,8 +2577,7 @@
       Label if_valueisnumber(assembler),
           if_valuenotnumber(assembler, Label::kDeferred);
       Node* value_map = assembler->LoadMap(value);
-      Node* number_map = assembler->HeapNumberMapConstant();
-      assembler->Branch(assembler->WordEqual(value_map, number_map),
+      assembler->Branch(assembler->IsHeapNumberMap(value_map),
                         &if_valueisnumber, &if_valuenotnumber);
 
       assembler->Bind(&if_valueisnumber);
@@ -2545,8 +2680,7 @@
       Label if_valueisnumber(assembler),
           if_valuenotnumber(assembler, Label::kDeferred);
       Node* value_map = assembler->LoadMap(value);
-      Node* number_map = assembler->HeapNumberMapConstant();
-      assembler->Branch(assembler->WordEqual(value_map, number_map),
+      assembler->Branch(assembler->IsHeapNumberMap(value_map),
                         &if_valueisnumber, &if_valuenotnumber);
 
       assembler->Bind(&if_valueisnumber);
@@ -2587,6 +2721,15 @@
   return result_var.value();
 }
 
+// ES6 section 21.1.3.19 String.prototype.substring ( start, end )
+compiler::Node* SubStringStub::Generate(CodeStubAssembler* assembler,
+                                        compiler::Node* string,
+                                        compiler::Node* from,
+                                        compiler::Node* to,
+                                        compiler::Node* context) {
+  return assembler->SubString(context, string, from, to);
+}
+
 // ES6 section 7.1.13 ToObject (argument)
 void ToObjectStub::GenerateAssembly(CodeStubAssembler* assembler) const {
   typedef compiler::Node Node;
@@ -2601,43 +2744,38 @@
   Node* context = assembler->Parameter(Descriptor::kContext);
 
   Variable constructor_function_index_var(assembler,
-                                          MachineRepresentation::kWord32);
+                                          MachineType::PointerRepresentation());
 
   assembler->Branch(assembler->WordIsSmi(object), &if_number, &if_notsmi);
 
   assembler->Bind(&if_notsmi);
   Node* map = assembler->LoadMap(object);
 
-  assembler->GotoIf(
-      assembler->WordEqual(map, assembler->HeapNumberMapConstant()),
-      &if_number);
+  assembler->GotoIf(assembler->IsHeapNumberMap(map), &if_number);
 
   Node* instance_type = assembler->LoadMapInstanceType(map);
-  assembler->GotoIf(
-      assembler->Int32GreaterThanOrEqual(
-          instance_type, assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE)),
-      &if_jsreceiver);
+  assembler->GotoIf(assembler->IsJSReceiverInstanceType(instance_type),
+                    &if_jsreceiver);
 
-  Node* constructor_function_index = assembler->LoadObjectField(
-      map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
-      MachineType::Uint8());
-  assembler->GotoIf(
-      assembler->Word32Equal(
-          constructor_function_index,
-          assembler->Int32Constant(Map::kNoConstructorFunctionIndex)),
-      &if_noconstructor);
+  Node* constructor_function_index =
+      assembler->LoadMapConstructorFunctionIndex(map);
+  assembler->GotoIf(assembler->WordEqual(constructor_function_index,
+                                         assembler->IntPtrConstant(
+                                             Map::kNoConstructorFunctionIndex)),
+                    &if_noconstructor);
   constructor_function_index_var.Bind(constructor_function_index);
   assembler->Goto(&if_wrapjsvalue);
 
   assembler->Bind(&if_number);
   constructor_function_index_var.Bind(
-      assembler->Int32Constant(Context::NUMBER_FUNCTION_INDEX));
+      assembler->IntPtrConstant(Context::NUMBER_FUNCTION_INDEX));
   assembler->Goto(&if_wrapjsvalue);
 
   assembler->Bind(&if_wrapjsvalue);
   Node* native_context = assembler->LoadNativeContext(context);
   Node* constructor = assembler->LoadFixedArrayElement(
-      native_context, constructor_function_index_var.value());
+      native_context, constructor_function_index_var.value(), 0,
+      CodeStubAssembler::INTPTR_PARAMETERS);
   Node* initial_map = assembler->LoadObjectField(
       constructor, JSFunction::kPrototypeOrInitialMapOffset);
   Node* js_value = assembler->Allocate(JSValue::kSize);
@@ -2679,9 +2817,7 @@
 
   Node* map = assembler->LoadMap(value);
 
-  assembler->GotoIf(
-      assembler->WordEqual(map, assembler->HeapNumberMapConstant()),
-      &return_number);
+  assembler->GotoIf(assembler->IsHeapNumberMap(map), &return_number);
 
   Node* instance_type = assembler->LoadMapInstanceType(map);
 
@@ -2703,15 +2839,11 @@
                                                assembler->Int32Constant(0)),
                         &return_undefined);
 
-  assembler->GotoIf(
-      assembler->Int32GreaterThanOrEqual(
-          instance_type, assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE)),
-      &return_object);
+  assembler->GotoIf(assembler->IsJSReceiverInstanceType(instance_type),
+                    &return_object);
 
-  assembler->GotoIf(
-      assembler->Int32LessThan(instance_type,
-                               assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
-      &return_string);
+  assembler->GotoIf(assembler->IsStringInstanceType(instance_type),
+                    &return_string);
 
 #define SIMD128_BRANCH(TYPE, Type, type, lane_count, lane_type)    \
   Label return_##type(assembler);                                  \
@@ -2908,11 +3040,10 @@
         Node* rhs_map = assembler->LoadMap(rhs);
 
         // Check if the {rhs} is a HeapNumber.
-        Node* number_map = assembler->HeapNumberMapConstant();
         Label if_rhsisnumber(assembler),
             if_rhsisnotnumber(assembler, Label::kDeferred);
-        assembler->Branch(assembler->WordEqual(rhs_map, number_map),
-                          &if_rhsisnumber, &if_rhsisnotnumber);
+        assembler->Branch(assembler->IsHeapNumberMap(rhs_map), &if_rhsisnumber,
+                          &if_rhsisnotnumber);
 
         assembler->Bind(&if_rhsisnumber);
         {
@@ -3028,9 +3159,7 @@
           // Check if {lhs} is a String.
           Label if_lhsisstring(assembler),
               if_lhsisnotstring(assembler, Label::kDeferred);
-          assembler->Branch(assembler->Int32LessThan(
-                                lhs_instance_type,
-                                assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+          assembler->Branch(assembler->IsStringInstanceType(lhs_instance_type),
                             &if_lhsisstring, &if_lhsisnotstring);
 
           assembler->Bind(&if_lhsisstring);
@@ -3041,10 +3170,9 @@
             // Check if {rhs} is also a String.
             Label if_rhsisstring(assembler, Label::kDeferred),
                 if_rhsisnotstring(assembler, Label::kDeferred);
-            assembler->Branch(assembler->Int32LessThan(
-                                  rhs_instance_type, assembler->Int32Constant(
-                                                         FIRST_NONSTRING_TYPE)),
-                              &if_rhsisstring, &if_rhsisnotstring);
+            assembler->Branch(
+                assembler->IsStringInstanceType(rhs_instance_type),
+                &if_rhsisstring, &if_rhsisnotstring);
 
             assembler->Bind(&if_rhsisstring);
             {
@@ -3088,9 +3216,7 @@
               Label if_rhsisreceiver(assembler, Label::kDeferred),
                   if_rhsisnotreceiver(assembler, Label::kDeferred);
               assembler->Branch(
-                  assembler->Int32LessThanOrEqual(
-                      assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                      rhs_instance_type),
+                  assembler->IsJSReceiverInstanceType(rhs_instance_type),
                   &if_rhsisreceiver, &if_rhsisnotreceiver);
 
               assembler->Bind(&if_rhsisreceiver);
@@ -3122,9 +3248,7 @@
             Label if_lhsisreceiver(assembler, Label::kDeferred),
                 if_lhsisnotreceiver(assembler, Label::kDeferred);
             assembler->Branch(
-                assembler->Int32LessThanOrEqual(
-                    assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                    lhs_instance_type),
+                assembler->IsJSReceiverInstanceType(lhs_instance_type),
                 &if_lhsisreceiver, &if_lhsisnotreceiver);
 
             assembler->Bind(&if_lhsisreceiver);
@@ -3218,10 +3342,9 @@
     Node* value_map = assembler->LoadMap(value);
 
     // Check if {value} (and therefore {rhs}) is a HeapNumber.
-    Node* number_map = assembler->HeapNumberMapConstant();
     Label if_valueisnumber(assembler), if_valueisnotnumber(assembler);
-    assembler->Branch(assembler->WordEqual(value_map, number_map),
-                      &if_valueisnumber, &if_valueisnotnumber);
+    assembler->Branch(assembler->IsHeapNumberMap(value_map), &if_valueisnumber,
+                      &if_valueisnotnumber);
 
     assembler->Bind(&if_valueisnumber);
     {
@@ -3342,10 +3465,9 @@
             // Check if the {rhs} is a String.
             Label if_rhsisstring(assembler, Label::kDeferred),
                 if_rhsisnotstring(assembler);
-            assembler->Branch(assembler->Int32LessThan(
-                                  rhs_instance_type, assembler->Int32Constant(
-                                                         FIRST_NONSTRING_TYPE)),
-                              &if_rhsisstring, &if_rhsisnotstring);
+            assembler->Branch(
+                assembler->IsStringInstanceType(rhs_instance_type),
+                &if_rhsisstring, &if_rhsisnotstring);
 
             assembler->Bind(&if_rhsisstring);
             {
@@ -3358,9 +3480,8 @@
             assembler->Bind(&if_rhsisnotstring);
             {
               // Check if the {rhs} is a Boolean.
-              Node* boolean_map = assembler->BooleanMapConstant();
               Label if_rhsisboolean(assembler), if_rhsisnotboolean(assembler);
-              assembler->Branch(assembler->WordEqual(rhs_map, boolean_map),
+              assembler->Branch(assembler->IsBooleanMap(rhs_map),
                                 &if_rhsisboolean, &if_rhsisnotboolean);
 
               assembler->Bind(&if_rhsisboolean);
@@ -3378,9 +3499,7 @@
                 Label if_rhsisreceiver(assembler, Label::kDeferred),
                     if_rhsisnotreceiver(assembler);
                 assembler->Branch(
-                    assembler->Int32LessThanOrEqual(
-                        assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                        rhs_instance_type),
+                    assembler->IsJSReceiverInstanceType(rhs_instance_type),
                     &if_rhsisreceiver, &if_rhsisnotreceiver);
 
                 assembler->Bind(&if_rhsisreceiver);
@@ -3462,10 +3581,9 @@
             // Check if {rhs} is also a String.
             Label if_rhsisstring(assembler, Label::kDeferred),
                 if_rhsisnotstring(assembler);
-            assembler->Branch(assembler->Int32LessThan(
-                                  rhs_instance_type, assembler->Int32Constant(
-                                                         FIRST_NONSTRING_TYPE)),
-                              &if_rhsisstring, &if_rhsisnotstring);
+            assembler->Branch(
+                assembler->IsStringInstanceType(rhs_instance_type),
+                &if_rhsisstring, &if_rhsisnotstring);
 
             assembler->Bind(&if_rhsisstring);
             {
@@ -3514,9 +3632,7 @@
               Label if_rhsisstring(assembler, Label::kDeferred),
                   if_rhsisnotstring(assembler);
               assembler->Branch(
-                  assembler->Int32LessThan(
-                      rhs_instance_type,
-                      assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+                  assembler->IsStringInstanceType(rhs_instance_type),
                   &if_rhsisstring, &if_rhsisnotstring);
 
               assembler->Bind(&if_rhsisstring);
@@ -3534,9 +3650,7 @@
                     if_rhsisnotreceiver(assembler);
                 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
                 assembler->Branch(
-                    assembler->Int32LessThanOrEqual(
-                        assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                        rhs_instance_type),
+                    assembler->IsJSReceiverInstanceType(rhs_instance_type),
                     &if_rhsisreceiver, &if_rhsisnotreceiver);
 
                 assembler->Bind(&if_rhsisreceiver);
@@ -3556,8 +3670,7 @@
                   // Check if {rhs} is a Boolean.
                   Label if_rhsisboolean(assembler),
                       if_rhsisnotboolean(assembler);
-                  Node* boolean_map = assembler->BooleanMapConstant();
-                  assembler->Branch(assembler->WordEqual(rhs_map, boolean_map),
+                  assembler->Branch(assembler->IsBooleanMap(rhs_map),
                                     &if_rhsisboolean, &if_rhsisnotboolean);
 
                   assembler->Bind(&if_rhsisboolean);
@@ -3625,9 +3738,7 @@
             Label if_rhsisreceiver(assembler), if_rhsisnotreceiver(assembler);
             STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
             assembler->Branch(
-                assembler->Int32LessThanOrEqual(
-                    assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                    rhs_instance_type),
+                assembler->IsJSReceiverInstanceType(rhs_instance_type),
                 &if_rhsisreceiver, &if_rhsisnotreceiver);
 
             assembler->Bind(&if_rhsisreceiver);
@@ -3672,9 +3783,7 @@
               Label if_rhsisreceiver(assembler), if_rhsisnotreceiver(assembler);
               STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
               assembler->Branch(
-                  assembler->Int32LessThanOrEqual(
-                      assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                      rhs_instance_type),
+                  assembler->IsJSReceiverInstanceType(rhs_instance_type),
                   &if_rhsisreceiver, &if_rhsisnotreceiver);
 
               assembler->Bind(&if_rhsisreceiver);
@@ -3702,9 +3811,7 @@
             Label if_rhsisreceiver(assembler), if_rhsisnotreceiver(assembler);
             STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
             assembler->Branch(
-                assembler->Int32LessThanOrEqual(
-                    assembler->Int32Constant(FIRST_JS_RECEIVER_TYPE),
-                    rhs_instance_type),
+                assembler->IsJSReceiverInstanceType(rhs_instance_type),
                 &if_rhsisreceiver, &if_rhsisnotreceiver);
 
             assembler->Bind(&if_rhsisreceiver);
@@ -3940,9 +4047,7 @@
 
           // Check if {lhs} is a String.
           Label if_lhsisstring(assembler), if_lhsisnotstring(assembler);
-          assembler->Branch(assembler->Int32LessThan(
-                                lhs_instance_type,
-                                assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
+          assembler->Branch(assembler->IsStringInstanceType(lhs_instance_type),
                             &if_lhsisstring, &if_lhsisnotstring);
 
           assembler->Bind(&if_lhsisstring);
@@ -3953,10 +4058,9 @@
             // Check if {rhs} is also a String.
             Label if_rhsisstring(assembler, Label::kDeferred),
                 if_rhsisnotstring(assembler);
-            assembler->Branch(assembler->Int32LessThan(
-                                  rhs_instance_type, assembler->Int32Constant(
-                                                         FIRST_NONSTRING_TYPE)),
-                              &if_rhsisstring, &if_rhsisnotstring);
+            assembler->Branch(
+                assembler->IsStringInstanceType(rhs_instance_type),
+                &if_rhsisstring, &if_rhsisnotstring);
 
             assembler->Bind(&if_rhsisstring);
             {
@@ -4057,365 +4161,6 @@
   return result.value();
 }
 
-void GenerateStringRelationalComparison(CodeStubAssembler* assembler,
-                                        RelationalComparisonMode mode) {
-  typedef CodeStubAssembler::Label Label;
-  typedef compiler::Node Node;
-  typedef CodeStubAssembler::Variable Variable;
-
-  Node* lhs = assembler->Parameter(0);
-  Node* rhs = assembler->Parameter(1);
-  Node* context = assembler->Parameter(2);
-
-  Label if_less(assembler), if_equal(assembler), if_greater(assembler);
-
-  // Fast check to see if {lhs} and {rhs} refer to the same String object.
-  Label if_same(assembler), if_notsame(assembler);
-  assembler->Branch(assembler->WordEqual(lhs, rhs), &if_same, &if_notsame);
-
-  assembler->Bind(&if_same);
-  assembler->Goto(&if_equal);
-
-  assembler->Bind(&if_notsame);
-  {
-    // Load instance types of {lhs} and {rhs}.
-    Node* lhs_instance_type = assembler->LoadInstanceType(lhs);
-    Node* rhs_instance_type = assembler->LoadInstanceType(rhs);
-
-    // Combine the instance types into a single 16-bit value, so we can check
-    // both of them at once.
-    Node* both_instance_types = assembler->Word32Or(
-        lhs_instance_type,
-        assembler->Word32Shl(rhs_instance_type, assembler->Int32Constant(8)));
-
-    // Check that both {lhs} and {rhs} are flat one-byte strings.
-    int const kBothSeqOneByteStringMask =
-        kStringEncodingMask | kStringRepresentationMask |
-        ((kStringEncodingMask | kStringRepresentationMask) << 8);
-    int const kBothSeqOneByteStringTag =
-        kOneByteStringTag | kSeqStringTag |
-        ((kOneByteStringTag | kSeqStringTag) << 8);
-    Label if_bothonebyteseqstrings(assembler),
-        if_notbothonebyteseqstrings(assembler);
-    assembler->Branch(assembler->Word32Equal(
-                          assembler->Word32And(both_instance_types,
-                                               assembler->Int32Constant(
-                                                   kBothSeqOneByteStringMask)),
-                          assembler->Int32Constant(kBothSeqOneByteStringTag)),
-                      &if_bothonebyteseqstrings, &if_notbothonebyteseqstrings);
-
-    assembler->Bind(&if_bothonebyteseqstrings);
-    {
-      // Load the length of {lhs} and {rhs}.
-      Node* lhs_length = assembler->LoadStringLength(lhs);
-      Node* rhs_length = assembler->LoadStringLength(rhs);
-
-      // Determine the minimum length.
-      Node* length = assembler->SmiMin(lhs_length, rhs_length);
-
-      // Compute the effective offset of the first character.
-      Node* begin = assembler->IntPtrConstant(SeqOneByteString::kHeaderSize -
-                                              kHeapObjectTag);
-
-      // Compute the first offset after the string from the length.
-      Node* end = assembler->IntPtrAdd(begin, assembler->SmiUntag(length));
-
-      // Loop over the {lhs} and {rhs} strings to see if they are equal.
-      Variable var_offset(assembler, MachineType::PointerRepresentation());
-      Label loop(assembler, &var_offset);
-      var_offset.Bind(begin);
-      assembler->Goto(&loop);
-      assembler->Bind(&loop);
-      {
-        // Check if {offset} equals {end}.
-        Node* offset = var_offset.value();
-        Label if_done(assembler), if_notdone(assembler);
-        assembler->Branch(assembler->WordEqual(offset, end), &if_done,
-                          &if_notdone);
-
-        assembler->Bind(&if_notdone);
-        {
-          // Load the next characters from {lhs} and {rhs}.
-          Node* lhs_value = assembler->Load(MachineType::Uint8(), lhs, offset);
-          Node* rhs_value = assembler->Load(MachineType::Uint8(), rhs, offset);
-
-          // Check if the characters match.
-          Label if_valueissame(assembler), if_valueisnotsame(assembler);
-          assembler->Branch(assembler->Word32Equal(lhs_value, rhs_value),
-                            &if_valueissame, &if_valueisnotsame);
-
-          assembler->Bind(&if_valueissame);
-          {
-            // Advance to next character.
-            var_offset.Bind(
-                assembler->IntPtrAdd(offset, assembler->IntPtrConstant(1)));
-          }
-          assembler->Goto(&loop);
-
-          assembler->Bind(&if_valueisnotsame);
-          assembler->BranchIf(assembler->Uint32LessThan(lhs_value, rhs_value),
-                              &if_less, &if_greater);
-        }
-
-        assembler->Bind(&if_done);
-        {
-          // All characters up to the min length are equal, decide based on
-          // string length.
-          Label if_lengthisequal(assembler), if_lengthisnotequal(assembler);
-          assembler->Branch(assembler->SmiEqual(lhs_length, rhs_length),
-                            &if_lengthisequal, &if_lengthisnotequal);
-
-          assembler->Bind(&if_lengthisequal);
-          assembler->Goto(&if_equal);
-
-          assembler->Bind(&if_lengthisnotequal);
-          assembler->BranchIfSmiLessThan(lhs_length, rhs_length, &if_less,
-                                         &if_greater);
-        }
-      }
-    }
-
-    assembler->Bind(&if_notbothonebyteseqstrings);
-    {
-      // TODO(bmeurer): Add fast case support for flattened cons strings;
-      // also add support for two byte string relational comparisons.
-      switch (mode) {
-        case kLessThan:
-          assembler->TailCallRuntime(Runtime::kStringLessThan, context, lhs,
-                                     rhs);
-          break;
-        case kLessThanOrEqual:
-          assembler->TailCallRuntime(Runtime::kStringLessThanOrEqual, context,
-                                     lhs, rhs);
-          break;
-        case kGreaterThan:
-          assembler->TailCallRuntime(Runtime::kStringGreaterThan, context, lhs,
-                                     rhs);
-          break;
-        case kGreaterThanOrEqual:
-          assembler->TailCallRuntime(Runtime::kStringGreaterThanOrEqual,
-                                     context, lhs, rhs);
-          break;
-      }
-    }
-  }
-
-  assembler->Bind(&if_less);
-  switch (mode) {
-    case kLessThan:
-    case kLessThanOrEqual:
-      assembler->Return(assembler->BooleanConstant(true));
-      break;
-
-    case kGreaterThan:
-    case kGreaterThanOrEqual:
-      assembler->Return(assembler->BooleanConstant(false));
-      break;
-  }
-
-  assembler->Bind(&if_equal);
-  switch (mode) {
-    case kLessThan:
-    case kGreaterThan:
-      assembler->Return(assembler->BooleanConstant(false));
-      break;
-
-    case kLessThanOrEqual:
-    case kGreaterThanOrEqual:
-      assembler->Return(assembler->BooleanConstant(true));
-      break;
-  }
-
-  assembler->Bind(&if_greater);
-  switch (mode) {
-    case kLessThan:
-    case kLessThanOrEqual:
-      assembler->Return(assembler->BooleanConstant(false));
-      break;
-
-    case kGreaterThan:
-    case kGreaterThanOrEqual:
-      assembler->Return(assembler->BooleanConstant(true));
-      break;
-  }
-}
-
-void GenerateStringEqual(CodeStubAssembler* assembler, ResultMode mode) {
-  // Here's pseudo-code for the algorithm below in case of kDontNegateResult
-  // mode; for kNegateResult mode we properly negate the result.
-  //
-  // if (lhs == rhs) return true;
-  // if (lhs->length() != rhs->length()) return false;
-  // if (lhs->IsInternalizedString() && rhs->IsInternalizedString()) {
-  //   return false;
-  // }
-  // if (lhs->IsSeqOneByteString() && rhs->IsSeqOneByteString()) {
-  //   for (i = 0; i != lhs->length(); ++i) {
-  //     if (lhs[i] != rhs[i]) return false;
-  //   }
-  //   return true;
-  // }
-  // return %StringEqual(lhs, rhs);
-
-  typedef CodeStubAssembler::Label Label;
-  typedef compiler::Node Node;
-  typedef CodeStubAssembler::Variable Variable;
-
-  Node* lhs = assembler->Parameter(0);
-  Node* rhs = assembler->Parameter(1);
-  Node* context = assembler->Parameter(2);
-
-  Label if_equal(assembler), if_notequal(assembler);
-
-  // Fast check to see if {lhs} and {rhs} refer to the same String object.
-  Label if_same(assembler), if_notsame(assembler);
-  assembler->Branch(assembler->WordEqual(lhs, rhs), &if_same, &if_notsame);
-
-  assembler->Bind(&if_same);
-  assembler->Goto(&if_equal);
-
-  assembler->Bind(&if_notsame);
-  {
-    // The {lhs} and {rhs} don't refer to the exact same String object.
-
-    // Load the length of {lhs} and {rhs}.
-    Node* lhs_length = assembler->LoadStringLength(lhs);
-    Node* rhs_length = assembler->LoadStringLength(rhs);
-
-    // Check if the lengths of {lhs} and {rhs} are equal.
-    Label if_lengthisequal(assembler), if_lengthisnotequal(assembler);
-    assembler->Branch(assembler->WordEqual(lhs_length, rhs_length),
-                      &if_lengthisequal, &if_lengthisnotequal);
-
-    assembler->Bind(&if_lengthisequal);
-    {
-      // Load instance types of {lhs} and {rhs}.
-      Node* lhs_instance_type = assembler->LoadInstanceType(lhs);
-      Node* rhs_instance_type = assembler->LoadInstanceType(rhs);
-
-      // Combine the instance types into a single 16-bit value, so we can check
-      // both of them at once.
-      Node* both_instance_types = assembler->Word32Or(
-          lhs_instance_type,
-          assembler->Word32Shl(rhs_instance_type, assembler->Int32Constant(8)));
-
-      // Check if both {lhs} and {rhs} are internalized.
-      int const kBothInternalizedMask =
-          kIsNotInternalizedMask | (kIsNotInternalizedMask << 8);
-      int const kBothInternalizedTag =
-          kInternalizedTag | (kInternalizedTag << 8);
-      Label if_bothinternalized(assembler), if_notbothinternalized(assembler);
-      assembler->Branch(assembler->Word32Equal(
-                            assembler->Word32And(both_instance_types,
-                                                 assembler->Int32Constant(
-                                                     kBothInternalizedMask)),
-                            assembler->Int32Constant(kBothInternalizedTag)),
-                        &if_bothinternalized, &if_notbothinternalized);
-
-      assembler->Bind(&if_bothinternalized);
-      {
-        // Fast negative check for internalized-to-internalized equality.
-        assembler->Goto(&if_notequal);
-      }
-
-      assembler->Bind(&if_notbothinternalized);
-      {
-        // Check that both {lhs} and {rhs} are flat one-byte strings.
-        int const kBothSeqOneByteStringMask =
-            kStringEncodingMask | kStringRepresentationMask |
-            ((kStringEncodingMask | kStringRepresentationMask) << 8);
-        int const kBothSeqOneByteStringTag =
-            kOneByteStringTag | kSeqStringTag |
-            ((kOneByteStringTag | kSeqStringTag) << 8);
-        Label if_bothonebyteseqstrings(assembler),
-            if_notbothonebyteseqstrings(assembler);
-        assembler->Branch(
-            assembler->Word32Equal(
-                assembler->Word32And(
-                    both_instance_types,
-                    assembler->Int32Constant(kBothSeqOneByteStringMask)),
-                assembler->Int32Constant(kBothSeqOneByteStringTag)),
-            &if_bothonebyteseqstrings, &if_notbothonebyteseqstrings);
-
-        assembler->Bind(&if_bothonebyteseqstrings);
-        {
-          // Compute the effective offset of the first character.
-          Node* begin = assembler->IntPtrConstant(
-              SeqOneByteString::kHeaderSize - kHeapObjectTag);
-
-          // Compute the first offset after the string from the length.
-          Node* end =
-              assembler->IntPtrAdd(begin, assembler->SmiUntag(lhs_length));
-
-          // Loop over the {lhs} and {rhs} strings to see if they are equal.
-          Variable var_offset(assembler, MachineType::PointerRepresentation());
-          Label loop(assembler, &var_offset);
-          var_offset.Bind(begin);
-          assembler->Goto(&loop);
-          assembler->Bind(&loop);
-          {
-            // Check if {offset} equals {end}.
-            Node* offset = var_offset.value();
-            Label if_done(assembler), if_notdone(assembler);
-            assembler->Branch(assembler->WordEqual(offset, end), &if_done,
-                              &if_notdone);
-
-            assembler->Bind(&if_notdone);
-            {
-              // Load the next characters from {lhs} and {rhs}.
-              Node* lhs_value =
-                  assembler->Load(MachineType::Uint8(), lhs, offset);
-              Node* rhs_value =
-                  assembler->Load(MachineType::Uint8(), rhs, offset);
-
-              // Check if the characters match.
-              Label if_valueissame(assembler), if_valueisnotsame(assembler);
-              assembler->Branch(assembler->Word32Equal(lhs_value, rhs_value),
-                                &if_valueissame, &if_valueisnotsame);
-
-              assembler->Bind(&if_valueissame);
-              {
-                // Advance to next character.
-                var_offset.Bind(
-                    assembler->IntPtrAdd(offset, assembler->IntPtrConstant(1)));
-              }
-              assembler->Goto(&loop);
-
-              assembler->Bind(&if_valueisnotsame);
-              assembler->Goto(&if_notequal);
-            }
-
-            assembler->Bind(&if_done);
-            assembler->Goto(&if_equal);
-          }
-        }
-
-        assembler->Bind(&if_notbothonebyteseqstrings);
-        {
-          // TODO(bmeurer): Add fast case support for flattened cons strings;
-          // also add support for two byte string equality checks.
-          Runtime::FunctionId function_id = (mode == kDontNegateResult)
-                                                ? Runtime::kStringEqual
-                                                : Runtime::kStringNotEqual;
-          assembler->TailCallRuntime(function_id, context, lhs, rhs);
-        }
-      }
-    }
-
-    assembler->Bind(&if_lengthisnotequal);
-    {
-      // Mismatch in length of {lhs} and {rhs}, cannot be equal.
-      assembler->Goto(&if_notequal);
-    }
-  }
-
-  assembler->Bind(&if_equal);
-  assembler->Return(assembler->BooleanConstant(mode == kDontNegateResult));
-
-  assembler->Bind(&if_notequal);
-  assembler->Return(assembler->BooleanConstant(mode == kNegateResult));
-}
-
 }  // namespace
 
 void LoadApiGetterStub::GenerateAssembly(CodeStubAssembler* assembler) const {
@@ -4427,13 +4172,226 @@
   Node* holder = receiver;
   Node* map = assembler->LoadMap(receiver);
   Node* descriptors = assembler->LoadMapDescriptors(map);
-  Node* offset =
-      assembler->Int32Constant(DescriptorArray::ToValueIndex(index()));
-  Node* callback = assembler->LoadFixedArrayElement(descriptors, offset);
+  Node* value_index =
+      assembler->IntPtrConstant(DescriptorArray::ToValueIndex(index()));
+  Node* callback = assembler->LoadFixedArrayElement(
+      descriptors, value_index, 0, CodeStubAssembler::INTPTR_PARAMETERS);
   assembler->TailCallStub(CodeFactory::ApiGetter(isolate()), context, receiver,
                           holder, callback);
 }
 
+void StoreFieldStub::GenerateAssembly(CodeStubAssembler* assembler) const {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  FieldIndex index = this->index();
+  Representation representation = this->representation();
+
+  assembler->Comment("StoreFieldStub: inobject=%d, offset=%d, rep=%s",
+                     index.is_inobject(), index.offset(),
+                     representation.Mnemonic());
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* name = assembler->Parameter(Descriptor::kName);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Label miss(assembler);
+
+  Node* prepared_value =
+      assembler->PrepareValueForWrite(value, representation, &miss);
+  assembler->StoreNamedField(receiver, index, representation, prepared_value,
+                             false);
+  assembler->Return(value);
+
+  // Only stores to tagged field can't bailout.
+  if (!representation.IsTagged()) {
+    assembler->Bind(&miss);
+    {
+      assembler->Comment("Miss");
+      assembler->TailCallRuntime(Runtime::kStoreIC_Miss, context, value, slot,
+                                 vector, receiver, name);
+    }
+  }
+}
+
+void StoreGlobalStub::GenerateAssembly(CodeStubAssembler* assembler) const {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  assembler->Comment(
+      "StoreGlobalStub: cell_type=%d, constant_type=%d, check_global=%d",
+      cell_type(), PropertyCellType::kConstantType == cell_type()
+                       ? static_cast<int>(constant_type())
+                       : -1,
+      check_global());
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* name = assembler->Parameter(Descriptor::kName);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Label miss(assembler);
+
+  if (check_global()) {
+    // Check that the map of the global has not changed: use a placeholder map
+    // that will be replaced later with the global object's map.
+    Node* proxy_map = assembler->LoadMap(receiver);
+    Node* global = assembler->LoadObjectField(proxy_map, Map::kPrototypeOffset);
+    Node* map_cell = assembler->HeapConstant(isolate()->factory()->NewWeakCell(
+        StoreGlobalStub::global_map_placeholder(isolate())));
+    Node* expected_map = assembler->LoadWeakCellValue(map_cell);
+    Node* map = assembler->LoadMap(global);
+    assembler->GotoIf(assembler->WordNotEqual(expected_map, map), &miss);
+  }
+
+  Node* weak_cell = assembler->HeapConstant(isolate()->factory()->NewWeakCell(
+      StoreGlobalStub::property_cell_placeholder(isolate())));
+  Node* cell = assembler->LoadWeakCellValue(weak_cell);
+  assembler->GotoIf(assembler->WordIsSmi(cell), &miss);
+
+  // Load the payload of the global parameter cell. A hole indicates that the
+  // cell has been invalidated and that the store must be handled by the
+  // runtime.
+  Node* cell_contents =
+      assembler->LoadObjectField(cell, PropertyCell::kValueOffset);
+
+  PropertyCellType cell_type = this->cell_type();
+  if (cell_type == PropertyCellType::kConstant ||
+      cell_type == PropertyCellType::kUndefined) {
+    // This is always valid for all states a cell can be in.
+    assembler->GotoIf(assembler->WordNotEqual(cell_contents, value), &miss);
+  } else {
+    assembler->GotoIf(assembler->IsTheHole(cell_contents), &miss);
+
+    // When dealing with constant types, the type may be allowed to change, as
+    // long as optimized code remains valid.
+    bool value_is_smi = false;
+    if (cell_type == PropertyCellType::kConstantType) {
+      switch (constant_type()) {
+        case PropertyCellConstantType::kSmi:
+          assembler->GotoUnless(assembler->WordIsSmi(value), &miss);
+          value_is_smi = true;
+          break;
+        case PropertyCellConstantType::kStableMap: {
+          // It is sufficient here to check that the value and cell contents
+          // have identical maps, no matter if they are stable or not or if they
+          // are the maps that were originally in the cell or not. If optimized
+          // code will deopt when a cell has a unstable map and if it has a
+          // dependency on a stable map, it will deopt if the map destabilizes.
+          assembler->GotoIf(assembler->WordIsSmi(value), &miss);
+          assembler->GotoIf(assembler->WordIsSmi(cell_contents), &miss);
+          Node* expected_map = assembler->LoadMap(cell_contents);
+          Node* map = assembler->LoadMap(value);
+          assembler->GotoIf(assembler->WordNotEqual(expected_map, map), &miss);
+          break;
+        }
+      }
+    }
+    if (value_is_smi) {
+      assembler->StoreObjectFieldNoWriteBarrier(
+          cell, PropertyCell::kValueOffset, value);
+    } else {
+      assembler->StoreObjectField(cell, PropertyCell::kValueOffset, value);
+    }
+  }
+
+  assembler->Return(value);
+
+  assembler->Bind(&miss);
+  {
+    assembler->Comment("Miss");
+    assembler->TailCallRuntime(Runtime::kStoreIC_Miss, context, value, slot,
+                               vector, receiver, name);
+  }
+}
+
+void KeyedLoadSloppyArgumentsStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* key = assembler->Parameter(Descriptor::kName);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Label miss(assembler);
+
+  Node* result = assembler->LoadKeyedSloppyArguments(receiver, key, &miss);
+  assembler->Return(result);
+
+  assembler->Bind(&miss);
+  {
+    assembler->Comment("Miss");
+    assembler->TailCallRuntime(Runtime::kKeyedLoadIC_Miss, context, receiver,
+                               key, slot, vector);
+  }
+}
+
+void KeyedStoreSloppyArgumentsStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* key = assembler->Parameter(Descriptor::kName);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Label miss(assembler);
+
+  assembler->StoreKeyedSloppyArguments(receiver, key, value, &miss);
+  assembler->Return(value);
+
+  assembler->Bind(&miss);
+  {
+    assembler->Comment("Miss");
+    assembler->TailCallRuntime(Runtime::kKeyedStoreIC_Miss, context, value,
+                               slot, vector, receiver, key);
+  }
+}
+
+void LoadScriptContextFieldStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef compiler::Node Node;
+
+  assembler->Comment("LoadScriptContextFieldStub: context_index=%d, slot=%d",
+                     context_index(), slot_index());
+
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Node* script_context = assembler->LoadScriptContext(context, context_index());
+  Node* result = assembler->LoadFixedArrayElement(
+      script_context, assembler->IntPtrConstant(slot_index()), 0,
+      CodeStubAssembler::INTPTR_PARAMETERS);
+  assembler->Return(result);
+}
+
+void StoreScriptContextFieldStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef compiler::Node Node;
+
+  assembler->Comment("StoreScriptContextFieldStub: context_index=%d, slot=%d",
+                     context_index(), slot_index());
+
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Node* script_context = assembler->LoadScriptContext(context, context_index());
+  assembler->StoreFixedArrayElement(
+      script_context, assembler->IntPtrConstant(slot_index()), value,
+      UPDATE_WRITE_BARRIER, CodeStubAssembler::INTPTR_PARAMETERS);
+  assembler->Return(value);
+}
+
 // static
 compiler::Node* LessThanStub::Generate(CodeStubAssembler* assembler,
                                        compiler::Node* lhs, compiler::Node* rhs,
@@ -4499,33 +4457,6 @@
   return GenerateStrictEqual(assembler, kNegateResult, lhs, rhs, context);
 }
 
-void StringEqualStub::GenerateAssembly(CodeStubAssembler* assembler) const {
-  GenerateStringEqual(assembler, kDontNegateResult);
-}
-
-void StringNotEqualStub::GenerateAssembly(CodeStubAssembler* assembler) const {
-  GenerateStringEqual(assembler, kNegateResult);
-}
-
-void StringLessThanStub::GenerateAssembly(CodeStubAssembler* assembler) const {
-  GenerateStringRelationalComparison(assembler, kLessThan);
-}
-
-void StringLessThanOrEqualStub::GenerateAssembly(
-    CodeStubAssembler* assembler) const {
-  GenerateStringRelationalComparison(assembler, kLessThanOrEqual);
-}
-
-void StringGreaterThanStub::GenerateAssembly(
-    CodeStubAssembler* assembler) const {
-  GenerateStringRelationalComparison(assembler, kGreaterThan);
-}
-
-void StringGreaterThanOrEqualStub::GenerateAssembly(
-    CodeStubAssembler* assembler) const {
-  GenerateStringRelationalComparison(assembler, kGreaterThanOrEqual);
-}
-
 void ToLengthStub::GenerateAssembly(CodeStubAssembler* assembler) const {
   typedef CodeStubAssembler::Label Label;
   typedef compiler::Node Node;
@@ -4557,8 +4488,7 @@
     // Check if {len} is a HeapNumber.
     Label if_lenisheapnumber(assembler),
         if_lenisnotheapnumber(assembler, Label::kDeferred);
-    assembler->Branch(assembler->WordEqual(assembler->LoadMap(len),
-                                           assembler->HeapNumberMapConstant()),
+    assembler->Branch(assembler->IsHeapNumberMap(assembler->LoadMap(len)),
                       &if_lenisheapnumber, &if_lenisnotheapnumber);
 
     assembler->Bind(&if_lenisheapnumber);
@@ -4603,64 +4533,12 @@
 }
 
 void ToIntegerStub::GenerateAssembly(CodeStubAssembler* assembler) const {
-  typedef CodeStubAssembler::Label Label;
   typedef compiler::Node Node;
-  typedef CodeStubAssembler::Variable Variable;
 
-  Node* context = assembler->Parameter(1);
+  Node* input = assembler->Parameter(Descriptor::kArgument);
+  Node* context = assembler->Parameter(Descriptor::kContext);
 
-  // We might need to loop once for ToNumber conversion.
-  Variable var_arg(assembler, MachineRepresentation::kTagged);
-  Label loop(assembler, &var_arg);
-  var_arg.Bind(assembler->Parameter(0));
-  assembler->Goto(&loop);
-  assembler->Bind(&loop);
-  {
-    // Shared entry points.
-    Label return_arg(assembler), return_zero(assembler, Label::kDeferred);
-
-    // Load the current {arg} value.
-    Node* arg = var_arg.value();
-
-    // Check if {arg} is a Smi.
-    assembler->GotoIf(assembler->WordIsSmi(arg), &return_arg);
-
-    // Check if {arg} is a HeapNumber.
-    Label if_argisheapnumber(assembler),
-        if_argisnotheapnumber(assembler, Label::kDeferred);
-    assembler->Branch(assembler->WordEqual(assembler->LoadMap(arg),
-                                           assembler->HeapNumberMapConstant()),
-                      &if_argisheapnumber, &if_argisnotheapnumber);
-
-    assembler->Bind(&if_argisheapnumber);
-    {
-      // Load the floating-point value of {arg}.
-      Node* arg_value = assembler->LoadHeapNumberValue(arg);
-
-      // Check if {arg} is NaN.
-      assembler->GotoUnless(assembler->Float64Equal(arg_value, arg_value),
-                            &return_zero);
-
-      // Truncate {arg} towards zero.
-      Node* value = assembler->Float64Trunc(arg_value);
-      var_arg.Bind(assembler->ChangeFloat64ToTagged(value));
-      assembler->Goto(&return_arg);
-    }
-
-    assembler->Bind(&if_argisnotheapnumber);
-    {
-      // Need to convert {arg} to a Number first.
-      Callable callable = CodeFactory::NonNumberToNumber(assembler->isolate());
-      var_arg.Bind(assembler->CallStub(callable, context, arg));
-      assembler->Goto(&loop);
-    }
-
-    assembler->Bind(&return_arg);
-    assembler->Return(var_arg.value());
-
-    assembler->Bind(&return_zero);
-    assembler->Return(assembler->SmiConstant(Smi::FromInt(0)));
-  }
+  assembler->Return(assembler->ToInteger(context, input));
 }
 
 void StoreInterceptorStub::GenerateAssembly(
@@ -4727,15 +4605,13 @@
   typedef compiler::CodeAssembler::Label Label;
   typedef compiler::CodeAssembler::Variable Variable;
 
-  Node* undefined = assembler->UndefinedConstant();
   Node* literals_array =
       assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
   Node* allocation_site = assembler->LoadFixedArrayElement(
       literals_array, literals_index,
       LiteralsArray::kFirstLiteralIndex * kPointerSize,
       CodeStubAssembler::SMI_PARAMETERS);
-  assembler->GotoIf(assembler->WordEqual(allocation_site, undefined),
-                    call_runtime);
+  assembler->GotoIf(assembler->IsUndefined(allocation_site), call_runtime);
 
   // Calculate the object and allocation size based on the properties count.
   Node* object_size = assembler->IntPtrAdd(
@@ -4886,14 +4762,10 @@
 
 
 void HandlerStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
-  if (kind() == Code::STORE_IC) {
-    descriptor->Initialize(FUNCTION_ADDR(Runtime_StoreIC_MissFromStubFailure));
-  } else if (kind() == Code::KEYED_LOAD_IC) {
+  DCHECK(kind() == Code::LOAD_IC || kind() == Code::KEYED_LOAD_IC);
+  if (kind() == Code::KEYED_LOAD_IC) {
     descriptor->Initialize(
         FUNCTION_ADDR(Runtime_KeyedLoadIC_MissFromStubFailure));
-  } else if (kind() == Code::KEYED_STORE_IC) {
-    descriptor->Initialize(
-        FUNCTION_ADDR(Runtime_KeyedStoreIC_MissFromStubFailure));
   }
 }
 
@@ -4908,39 +4780,12 @@
 }
 
 
-void StoreFastElementStub::InitializeDescriptor(
-    CodeStubDescriptor* descriptor) {
-  descriptor->Initialize(
-      FUNCTION_ADDR(Runtime_KeyedStoreIC_MissFromStubFailure));
-}
-
-
-void ElementsTransitionAndStoreStub::InitializeDescriptor(
-    CodeStubDescriptor* descriptor) {
-  descriptor->Initialize(
-      FUNCTION_ADDR(Runtime_ElementsTransitionAndStoreIC_Miss));
-}
-
-void StoreTransitionStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
-  descriptor->Initialize(
-      FUNCTION_ADDR(Runtime_TransitionStoreIC_MissFromStubFailure));
-}
-
 void NumberToStringStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
   descriptor->Initialize(
       Runtime::FunctionForId(Runtime::kNumberToString)->entry);
   descriptor->SetMissHandler(Runtime::kNumberToString);
 }
 
-
-void FastCloneShallowArrayStub::InitializeDescriptor(
-    CodeStubDescriptor* descriptor) {
-  FastCloneShallowArrayDescriptor call_descriptor(isolate());
-  descriptor->Initialize(
-      Runtime::FunctionForId(Runtime::kCreateArrayLiteralStubBailout)->entry);
-  descriptor->SetMissHandler(Runtime::kCreateArrayLiteralStubBailout);
-}
-
 void RegExpConstructResultStub::InitializeDescriptor(
     CodeStubDescriptor* descriptor) {
   descriptor->Initialize(
@@ -5088,9 +4933,7 @@
 
   assembler->Bind(&return_to_name);
   {
-    // TODO(cbruni): inline ToName here.
-    Callable callable = CodeFactory::ToName(assembler->isolate());
-    var_result.Bind(assembler->CallStub(callable, context, key));
+    var_result.Bind(assembler->ToName(context, key));
     assembler->Goto(&end);
   }
 
@@ -5193,43 +5036,49 @@
   Label if_normal(assembler), if_generator(assembler), if_async(assembler),
       if_class_constructor(assembler), if_function_without_prototype(assembler),
       load_map(assembler);
-  Variable map_index(assembler, MachineRepresentation::kTagged);
+  Variable map_index(assembler, MachineType::PointerRepresentation());
 
+  STATIC_ASSERT(FunctionKind::kNormalFunction == 0);
   Node* is_not_normal = assembler->Word32And(
       compiler_hints,
-      assembler->Int32Constant(SharedFunctionInfo::kFunctionKindMaskBits));
+      assembler->Int32Constant(SharedFunctionInfo::kAllFunctionKindBitsMask));
   assembler->GotoUnless(is_not_normal, &if_normal);
 
   Node* is_generator = assembler->Word32And(
       compiler_hints,
-      assembler->Int32Constant(1 << SharedFunctionInfo::kIsGeneratorBit));
+      assembler->Int32Constant(FunctionKind::kGeneratorFunction
+                               << SharedFunctionInfo::kFunctionKindShift));
   assembler->GotoIf(is_generator, &if_generator);
 
   Node* is_async = assembler->Word32And(
       compiler_hints,
-      assembler->Int32Constant(1 << SharedFunctionInfo::kIsAsyncFunctionBit));
+      assembler->Int32Constant(FunctionKind::kAsyncFunction
+                               << SharedFunctionInfo::kFunctionKindShift));
   assembler->GotoIf(is_async, &if_async);
 
   Node* is_class_constructor = assembler->Word32And(
       compiler_hints,
-      assembler->Int32Constant(SharedFunctionInfo::kClassConstructorBits));
+      assembler->Int32Constant(FunctionKind::kClassConstructor
+                               << SharedFunctionInfo::kFunctionKindShift));
   assembler->GotoIf(is_class_constructor, &if_class_constructor);
 
   if (FLAG_debug_code) {
     // Function must be a function without a prototype.
     assembler->Assert(assembler->Word32And(
-        compiler_hints, assembler->Int32Constant(
-                            SharedFunctionInfo::kAccessorFunctionBits |
-                            (1 << SharedFunctionInfo::kIsArrowBit) |
-                            (1 << SharedFunctionInfo::kIsConciseMethodBit))));
+        compiler_hints,
+        assembler->Int32Constant((FunctionKind::kAccessorFunction |
+                                  FunctionKind::kArrowFunction |
+                                  FunctionKind::kConciseMethod)
+                                 << SharedFunctionInfo::kFunctionKindShift)));
   }
   assembler->Goto(&if_function_without_prototype);
 
   assembler->Bind(&if_normal);
   {
     map_index.Bind(assembler->Select(
-        is_strict, assembler->Int32Constant(Context::STRICT_FUNCTION_MAP_INDEX),
-        assembler->Int32Constant(Context::SLOPPY_FUNCTION_MAP_INDEX)));
+        is_strict,
+        assembler->IntPtrConstant(Context::STRICT_FUNCTION_MAP_INDEX),
+        assembler->IntPtrConstant(Context::SLOPPY_FUNCTION_MAP_INDEX)));
     assembler->Goto(&load_map);
   }
 
@@ -5237,8 +5086,8 @@
   {
     map_index.Bind(assembler->Select(
         is_strict,
-        assembler->Int32Constant(Context::STRICT_GENERATOR_FUNCTION_MAP_INDEX),
-        assembler->Int32Constant(
+        assembler->IntPtrConstant(Context::STRICT_GENERATOR_FUNCTION_MAP_INDEX),
+        assembler->IntPtrConstant(
             Context::SLOPPY_GENERATOR_FUNCTION_MAP_INDEX)));
     assembler->Goto(&load_map);
   }
@@ -5247,21 +5096,21 @@
   {
     map_index.Bind(assembler->Select(
         is_strict,
-        assembler->Int32Constant(Context::STRICT_ASYNC_FUNCTION_MAP_INDEX),
-        assembler->Int32Constant(Context::SLOPPY_ASYNC_FUNCTION_MAP_INDEX)));
+        assembler->IntPtrConstant(Context::STRICT_ASYNC_FUNCTION_MAP_INDEX),
+        assembler->IntPtrConstant(Context::SLOPPY_ASYNC_FUNCTION_MAP_INDEX)));
     assembler->Goto(&load_map);
   }
 
   assembler->Bind(&if_class_constructor);
   {
     map_index.Bind(
-        assembler->Int32Constant(Context::STRICT_FUNCTION_MAP_INDEX));
+        assembler->IntPtrConstant(Context::STRICT_FUNCTION_MAP_INDEX));
     assembler->Goto(&load_map);
   }
 
   assembler->Bind(&if_function_without_prototype);
   {
-    map_index.Bind(assembler->Int32Constant(
+    map_index.Bind(assembler->IntPtrConstant(
         Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
     assembler->Goto(&load_map);
   }
@@ -5272,7 +5121,8 @@
   // as the map of the allocated object.
   Node* native_context = assembler->LoadNativeContext(context);
   Node* map_slot_value =
-      assembler->LoadFixedArrayElement(native_context, map_index.value());
+      assembler->LoadFixedArrayElement(native_context, map_index.value(), 0,
+                                       CodeStubAssembler::INTPTR_PARAMETERS);
   assembler->StoreMapNoWriteBarrier(result, map_slot_value);
 
   // Initialize the rest of the function.
@@ -5405,15 +5255,13 @@
 
   Variable result(assembler, MachineRepresentation::kTagged);
 
-  Node* undefined = assembler->UndefinedConstant();
   Node* literals_array =
       assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
   Node* boilerplate = assembler->LoadFixedArrayElement(
       literals_array, literal_index,
       LiteralsArray::kFirstLiteralIndex * kPointerSize,
       CodeStubAssembler::SMI_PARAMETERS);
-  assembler->GotoIf(assembler->WordEqual(boilerplate, undefined),
-                    &call_runtime);
+  assembler->GotoIf(assembler->IsUndefined(boilerplate), &call_runtime);
 
   {
     int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
@@ -5449,6 +5297,191 @@
       Generate(assembler, closure, literal_index, pattern, flags, context));
 }
 
+namespace {
+
+compiler::Node* NonEmptyShallowClone(CodeStubAssembler* assembler,
+                                     compiler::Node* boilerplate,
+                                     compiler::Node* boilerplate_map,
+                                     compiler::Node* boilerplate_elements,
+                                     compiler::Node* allocation_site,
+                                     compiler::Node* capacity,
+                                     ElementsKind kind) {
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::ParameterMode ParameterMode;
+
+  ParameterMode param_mode = CodeStubAssembler::SMI_PARAMETERS;
+
+  Node* length = assembler->LoadJSArrayLength(boilerplate);
+
+  if (assembler->Is64()) {
+    capacity = assembler->SmiUntag(capacity);
+    param_mode = CodeStubAssembler::INTEGER_PARAMETERS;
+  }
+
+  Node *array, *elements;
+  std::tie(array, elements) =
+      assembler->AllocateUninitializedJSArrayWithElements(
+          kind, boilerplate_map, length, allocation_site, capacity, param_mode);
+
+  assembler->Comment("copy elements header");
+  for (int offset = 0; offset < FixedArrayBase::kHeaderSize;
+       offset += kPointerSize) {
+    Node* value = assembler->LoadObjectField(boilerplate_elements, offset);
+    assembler->StoreObjectField(elements, offset, value);
+  }
+
+  if (assembler->Is64()) {
+    length = assembler->SmiUntag(length);
+  }
+
+  assembler->Comment("copy boilerplate elements");
+  assembler->CopyFixedArrayElements(kind, boilerplate_elements, elements,
+                                    length, SKIP_WRITE_BARRIER, param_mode);
+  assembler->IncrementCounter(
+      assembler->isolate()->counters()->inlined_copied_elements(), 1);
+
+  return array;
+}
+
+}  // namespace
+
+// static
+compiler::Node* FastCloneShallowArrayStub::Generate(
+    CodeStubAssembler* assembler, compiler::Node* closure,
+    compiler::Node* literal_index, compiler::Node* context,
+    CodeStubAssembler::Label* call_runtime,
+    AllocationSiteMode allocation_site_mode) {
+  typedef CodeStubAssembler::Label Label;
+  typedef CodeStubAssembler::Variable Variable;
+  typedef compiler::Node Node;
+
+  Label zero_capacity(assembler), cow_elements(assembler),
+      fast_elements(assembler), return_result(assembler);
+  Variable result(assembler, MachineRepresentation::kTagged);
+
+  Node* literals_array =
+      assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
+  Node* allocation_site = assembler->LoadFixedArrayElement(
+      literals_array, literal_index,
+      LiteralsArray::kFirstLiteralIndex * kPointerSize,
+      CodeStubAssembler::SMI_PARAMETERS);
+
+  assembler->GotoIf(assembler->IsUndefined(allocation_site), call_runtime);
+  allocation_site = assembler->LoadFixedArrayElement(
+      literals_array, literal_index,
+      LiteralsArray::kFirstLiteralIndex * kPointerSize,
+      CodeStubAssembler::SMI_PARAMETERS);
+
+  Node* boilerplate = assembler->LoadObjectField(
+      allocation_site, AllocationSite::kTransitionInfoOffset);
+  Node* boilerplate_map = assembler->LoadMap(boilerplate);
+  Node* boilerplate_elements = assembler->LoadElements(boilerplate);
+  Node* capacity = assembler->LoadFixedArrayBaseLength(boilerplate_elements);
+  allocation_site =
+      allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
+
+  Node* zero = assembler->SmiConstant(Smi::FromInt(0));
+  assembler->GotoIf(assembler->SmiEqual(capacity, zero), &zero_capacity);
+
+  Node* elements_map = assembler->LoadMap(boilerplate_elements);
+  assembler->GotoIf(assembler->IsFixedCOWArrayMap(elements_map), &cow_elements);
+
+  assembler->GotoIf(assembler->IsFixedArrayMap(elements_map), &fast_elements);
+  {
+    assembler->Comment("fast double elements path");
+    if (FLAG_debug_code) {
+      Label correct_elements_map(assembler), abort(assembler, Label::kDeferred);
+      assembler->BranchIf(assembler->IsFixedDoubleArrayMap(elements_map),
+                          &correct_elements_map, &abort);
+
+      assembler->Bind(&abort);
+      {
+        Node* abort_id = assembler->SmiConstant(
+            Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
+        assembler->TailCallRuntime(Runtime::kAbort, context, abort_id);
+      }
+      assembler->Bind(&correct_elements_map);
+    }
+
+    Node* array = NonEmptyShallowClone(assembler, boilerplate, boilerplate_map,
+                                       boilerplate_elements, allocation_site,
+                                       capacity, FAST_DOUBLE_ELEMENTS);
+    result.Bind(array);
+    assembler->Goto(&return_result);
+  }
+
+  assembler->Bind(&fast_elements);
+  {
+    assembler->Comment("fast elements path");
+    Node* array = NonEmptyShallowClone(assembler, boilerplate, boilerplate_map,
+                                       boilerplate_elements, allocation_site,
+                                       capacity, FAST_ELEMENTS);
+    result.Bind(array);
+    assembler->Goto(&return_result);
+  }
+
+  Variable length(assembler, MachineRepresentation::kTagged),
+      elements(assembler, MachineRepresentation::kTagged);
+  Label allocate_without_elements(assembler);
+
+  assembler->Bind(&cow_elements);
+  {
+    assembler->Comment("fixed cow path");
+    length.Bind(assembler->LoadJSArrayLength(boilerplate));
+    elements.Bind(boilerplate_elements);
+
+    assembler->Goto(&allocate_without_elements);
+  }
+
+  assembler->Bind(&zero_capacity);
+  {
+    assembler->Comment("zero capacity path");
+    length.Bind(zero);
+    elements.Bind(assembler->LoadRoot(Heap::kEmptyFixedArrayRootIndex));
+
+    assembler->Goto(&allocate_without_elements);
+  }
+
+  assembler->Bind(&allocate_without_elements);
+  {
+    Node* array = assembler->AllocateUninitializedJSArrayWithoutElements(
+        FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site);
+    assembler->StoreObjectField(array, JSObject::kElementsOffset,
+                                elements.value());
+    result.Bind(array);
+    assembler->Goto(&return_result);
+  }
+
+  assembler->Bind(&return_result);
+  return result.value();
+}
+
+void FastCloneShallowArrayStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef compiler::Node Node;
+  typedef CodeStubAssembler::Label Label;
+  Node* closure = assembler->Parameter(Descriptor::kClosure);
+  Node* literal_index = assembler->Parameter(Descriptor::kLiteralIndex);
+  Node* constant_elements = assembler->Parameter(Descriptor::kConstantElements);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+  Label call_runtime(assembler, Label::kDeferred);
+  assembler->Return(Generate(assembler, closure, literal_index, context,
+                             &call_runtime, allocation_site_mode()));
+
+  assembler->Bind(&call_runtime);
+  {
+    assembler->Comment("call runtime");
+    Node* flags = assembler->SmiConstant(
+        Smi::FromInt(ArrayLiteral::kShallowElements |
+                     (allocation_site_mode() == TRACK_ALLOCATION_SITE
+                          ? 0
+                          : ArrayLiteral::kDisableMementos)));
+    assembler->Return(assembler->CallRuntime(Runtime::kCreateArrayLiteral,
+                                             context, closure, literal_index,
+                                             constant_elements, flags));
+  }
+}
+
 void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) {
   CreateAllocationSiteStub stub(isolate);
   stub.GetCode();
@@ -5463,9 +5496,38 @@
 
 void StoreElementStub::Generate(MacroAssembler* masm) {
   DCHECK_EQ(DICTIONARY_ELEMENTS, elements_kind());
-  ElementHandlerCompiler::GenerateStoreSlow(masm);
+  KeyedStoreIC::GenerateSlow(masm);
 }
 
+void StoreFastElementStub::GenerateAssembly(
+    CodeStubAssembler* assembler) const {
+  typedef CodeStubAssembler::Label Label;
+  typedef compiler::Node Node;
+
+  assembler->Comment(
+      "StoreFastElementStub: js_array=%d, elements_kind=%s, store_mode=%d",
+      is_js_array(), ElementsKindToString(elements_kind()), store_mode());
+
+  Node* receiver = assembler->Parameter(Descriptor::kReceiver);
+  Node* key = assembler->Parameter(Descriptor::kName);
+  Node* value = assembler->Parameter(Descriptor::kValue);
+  Node* slot = assembler->Parameter(Descriptor::kSlot);
+  Node* vector = assembler->Parameter(Descriptor::kVector);
+  Node* context = assembler->Parameter(Descriptor::kContext);
+
+  Label miss(assembler);
+
+  assembler->EmitElementStore(receiver, key, value, is_js_array(),
+                              elements_kind(), store_mode(), &miss);
+  assembler->Return(value);
+
+  assembler->Bind(&miss);
+  {
+    assembler->Comment("Miss");
+    assembler->TailCallRuntime(Runtime::kKeyedStoreIC_Miss, context, value,
+                               slot, vector, receiver, key);
+  }
+}
 
 // static
 void StoreFastElementStub::GenerateAheadOfTime(Isolate* isolate) {
@@ -5597,58 +5659,9 @@
 
 void CreateAllocationSiteStub::GenerateAssembly(
     CodeStubAssembler* assembler) const {
-  typedef compiler::Node Node;
-  Node* size = assembler->IntPtrConstant(AllocationSite::kSize);
-  Node* site = assembler->Allocate(size, CodeStubAssembler::kPretenured);
-
-  // Store the map
-  assembler->StoreObjectFieldRoot(site, AllocationSite::kMapOffset,
-                                  Heap::kAllocationSiteMapRootIndex);
-
-  Node* kind =
-      assembler->SmiConstant(Smi::FromInt(GetInitialFastElementsKind()));
-  assembler->StoreObjectFieldNoWriteBarrier(
-      site, AllocationSite::kTransitionInfoOffset, kind);
-
-  // Unlike literals, constructed arrays don't have nested sites
-  Node* zero = assembler->IntPtrConstant(0);
-  assembler->StoreObjectFieldNoWriteBarrier(
-      site, AllocationSite::kNestedSiteOffset, zero);
-
-  // Pretenuring calculation field.
-  assembler->StoreObjectFieldNoWriteBarrier(
-      site, AllocationSite::kPretenureDataOffset, zero);
-
-  // Pretenuring memento creation count field.
-  assembler->StoreObjectFieldNoWriteBarrier(
-      site, AllocationSite::kPretenureCreateCountOffset, zero);
-
-  // Store an empty fixed array for the code dependency.
-  assembler->StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
-                                  Heap::kEmptyFixedArrayRootIndex);
-
-  // Link the object to the allocation site list
-  Node* site_list = assembler->ExternalConstant(
-      ExternalReference::allocation_sites_list_address(isolate()));
-  Node* next_site = assembler->LoadBufferObject(site_list, 0);
-
-  // TODO(mvstanton): This is a store to a weak pointer, which we may want to
-  // mark as such in order to skip the write barrier, once we have a unified
-  // system for weakness. For now we decided to keep it like this because having
-  // an initial write barrier backed store makes this pointer strong until the
-  // next GC, and allocation sites are designed to survive several GCs anyway.
-  assembler->StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
-  assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list,
-                                 site);
-
-  Node* feedback_vector = assembler->Parameter(Descriptor::kVector);
-  Node* slot = assembler->Parameter(Descriptor::kSlot);
-
-  assembler->StoreFixedArrayElement(feedback_vector, slot, site,
-                                    UPDATE_WRITE_BARRIER,
-                                    CodeStubAssembler::SMI_PARAMETERS);
-
-  assembler->Return(site);
+  assembler->Return(assembler->CreateAllocationSiteInFeedbackVector(
+      assembler->Parameter(Descriptor::kVector),
+      assembler->Parameter(Descriptor::kSlot)));
 }
 
 void CreateWeakCellStub::GenerateAssembly(CodeStubAssembler* assembler) const {
@@ -5674,7 +5687,7 @@
   Node* array = assembler->AllocateJSArray(
       elements_kind(), array_map,
       assembler->IntPtrConstant(JSArray::kPreallocatedArrayElements),
-      assembler->IntPtrConstant(0), allocation_site);
+      assembler->SmiConstant(Smi::FromInt(0)), allocation_site);
   assembler->Return(array);
 }
 
@@ -5687,7 +5700,7 @@
   Node* array = assembler->AllocateJSArray(
       elements_kind(), array_map,
       assembler->IntPtrConstant(JSArray::kPreallocatedArrayElements),
-      assembler->IntPtrConstant(0), nullptr);
+      assembler->SmiConstant(Smi::FromInt(0)), nullptr);
   assembler->Return(array);
 }
 
@@ -5727,8 +5740,8 @@
     int element_size =
         IsFastDoubleElementsKind(elements_kind) ? kDoubleSize : kPointerSize;
     int max_fast_elements =
-        (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize -
-         JSArray::kSize - AllocationMemento::kSize) /
+        (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize - JSArray::kSize -
+         AllocationMemento::kSize) /
         element_size;
     assembler->Branch(
         assembler->SmiAboveOrEqual(
@@ -5796,9 +5809,8 @@
   ElementsKind kind = elements_kind();
 
   Node* elements = assembler->LoadElements(object);
-  Node* new_elements = assembler->CheckAndGrowElementsCapacity(
-      context, elements, kind, key, &runtime);
-  assembler->StoreObjectField(object, JSObject::kElementsOffset, new_elements);
+  Node* new_elements =
+      assembler->TryGrowElementsCapacity(object, elements, kind, key, &runtime);
   assembler->Return(new_elements);
 
   assembler->Bind(&runtime);
@@ -5837,20 +5849,19 @@
 InternalArrayConstructorStub::InternalArrayConstructorStub(Isolate* isolate)
     : PlatformCodeStub(isolate) {}
 
-Representation RepresentationFromType(Type* type) {
-  if (type->Is(Type::UntaggedIntegral())) {
+Representation RepresentationFromMachineType(MachineType type) {
+  if (type == MachineType::Int32()) {
     return Representation::Integer32();
   }
 
-  if (type->Is(Type::TaggedSigned())) {
+  if (type == MachineType::TaggedSigned()) {
     return Representation::Smi();
   }
 
-  if (type->Is(Type::UntaggedPointer())) {
+  if (type == MachineType::Pointer()) {
     return Representation::External();
   }
 
-  DCHECK(!type->Is(Type::Untagged()));
   return Representation::Tagged();
 }
 
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 4793d74..5c83fde 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -18,6 +18,8 @@
 namespace v8 {
 namespace internal {
 
+class ObjectLiteral;
+
 // List of code stubs used on all platforms.
 #define CODE_STUB_LIST_ALL_PLATFORMS(V)       \
   /* --- PlatformCodeStubs --- */             \
@@ -41,8 +43,6 @@
   V(StoreBufferOverflow)                      \
   V(StoreElement)                             \
   V(SubString)                                \
-  V(ToString)                                 \
-  V(ToName)                                   \
   V(StoreIC)                                  \
   V(KeyedStoreIC)                             \
   V(KeyedLoadIC)                              \
@@ -66,12 +66,8 @@
   V(KeyedStoreICTrampoline)                   \
   V(StoreICTrampoline)                        \
   /* --- HydrogenCodeStubs --- */             \
-  V(ElementsTransitionAndStore)               \
-  V(FastCloneShallowArray)                    \
   V(NumberToString)                           \
   V(StringAdd)                                \
-  V(ToObject)                                 \
-  V(Typeof)                                   \
   /* These builtins w/ JS linkage are */      \
   /* just fast-cases of C++ builtins. They */ \
   /* require varg support from TF */          \
@@ -81,18 +77,10 @@
   /* as part of the new IC system, ask */     \
   /* ishell before doing anything  */         \
   V(KeyedLoadGeneric)                         \
-  V(KeyedLoadSloppyArguments)                 \
-  V(KeyedStoreSloppyArguments)                \
   V(LoadConstant)                             \
   V(LoadDictionaryElement)                    \
   V(LoadFastElement)                          \
   V(LoadField)                                \
-  V(LoadScriptContextField)                   \
-  V(StoreFastElement)                         \
-  V(StoreField)                               \
-  V(StoreGlobal)                              \
-  V(StoreScriptContextField)                  \
-  V(StoreTransition)                          \
   /* These should never be ported to TF */    \
   /* because they are either used only by */  \
   /* FCG/Crankshaft or are deprecated */      \
@@ -140,8 +128,10 @@
   V(InternalArrayNoArgumentConstructor)       \
   V(InternalArraySingleArgumentConstructor)   \
   V(Dec)                                      \
-  V(FastCloneShallowObject)                   \
+  V(ElementsTransitionAndStore)               \
   V(FastCloneRegExp)                          \
+  V(FastCloneShallowArray)                    \
+  V(FastCloneShallowObject)                   \
   V(FastNewClosure)                           \
   V(FastNewFunctionContext)                   \
   V(InstanceOf)                               \
@@ -151,14 +141,12 @@
   V(GreaterThanOrEqual)                       \
   V(Equal)                                    \
   V(NotEqual)                                 \
+  V(KeyedLoadSloppyArguments)                 \
+  V(KeyedStoreSloppyArguments)                \
+  V(LoadScriptContextField)                   \
+  V(StoreScriptContextField)                  \
   V(StrictEqual)                              \
   V(StrictNotEqual)                           \
-  V(StringEqual)                              \
-  V(StringNotEqual)                           \
-  V(StringLessThan)                           \
-  V(StringLessThanOrEqual)                    \
-  V(StringGreaterThan)                        \
-  V(StringGreaterThanOrEqual)                 \
   V(ToInteger)                                \
   V(ToLength)                                 \
   V(HasProperty)                              \
@@ -166,16 +154,25 @@
   V(GetProperty)                              \
   V(LoadICTF)                                 \
   V(KeyedLoadICTF)                            \
+  V(StoreFastElement)                         \
+  V(StoreField)                               \
+  V(StoreGlobal)                              \
+  V(StoreICTF)                                \
   V(StoreInterceptor)                         \
+  V(StoreMap)                                 \
+  V(StoreTransition)                          \
   V(LoadApiGetter)                            \
   V(LoadIndexedInterceptor)                   \
   V(GrowArrayElements)                        \
+  V(ToObject)                                 \
+  V(Typeof)                                   \
   /* These are only called from FGC and */    \
   /* can be removed when we use ignition */   \
   /* only */                                  \
   V(LoadICTrampolineTF)                       \
   V(LoadGlobalICTrampoline)                   \
-  V(KeyedLoadICTrampolineTF)
+  V(KeyedLoadICTrampolineTF)                  \
+  V(StoreICTrampolineTF)
 
 // List of code stubs only used on ARM 32 bits platforms.
 #if V8_TARGET_ARCH_ARM
@@ -487,12 +484,6 @@
     return Descriptor(isolate());                                       \
   }
 
-#define DEFINE_ON_STACK_CALL_INTERFACE_DESCRIPTOR(PARAMETER_COUNT)         \
- public:                                                                   \
-  CallInterfaceDescriptor GetCallInterfaceDescriptor() const override {    \
-    return OnStackArgsDescriptorBase::ForArgs(isolate(), PARAMETER_COUNT); \
-  }
-
 // There are some code stubs we just can't describe right now with a
 // CallInterfaceDescriptor. Isolate behavior for those cases with this macro.
 // An attempt to retrieve a descriptor will fail.
@@ -564,7 +555,7 @@
     return call_descriptor().GetRegisterParameter(index);
   }
 
-  Type* GetParameterType(int index) const {
+  MachineType GetParameterType(int index) const {
     return call_descriptor().GetParameterType(index);
   }
 
@@ -993,57 +984,6 @@
   DEFINE_TURBOFAN_BINARY_OP_CODE_STUB(StrictNotEqual, TurboFanCodeStub);
 };
 
-class StringEqualStub final : public TurboFanCodeStub {
- public:
-  explicit StringEqualStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
-
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
-  DEFINE_TURBOFAN_CODE_STUB(StringEqual, TurboFanCodeStub);
-};
-
-class StringNotEqualStub final : public TurboFanCodeStub {
- public:
-  explicit StringNotEqualStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
-
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
-  DEFINE_TURBOFAN_CODE_STUB(StringNotEqual, TurboFanCodeStub);
-};
-
-class StringLessThanStub final : public TurboFanCodeStub {
- public:
-  explicit StringLessThanStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
-
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
-  DEFINE_TURBOFAN_CODE_STUB(StringLessThan, TurboFanCodeStub);
-};
-
-class StringLessThanOrEqualStub final : public TurboFanCodeStub {
- public:
-  explicit StringLessThanOrEqualStub(Isolate* isolate)
-      : TurboFanCodeStub(isolate) {}
-
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
-  DEFINE_TURBOFAN_CODE_STUB(StringLessThanOrEqual, TurboFanCodeStub);
-};
-
-class StringGreaterThanStub final : public TurboFanCodeStub {
- public:
-  explicit StringGreaterThanStub(Isolate* isolate)
-      : TurboFanCodeStub(isolate) {}
-
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
-  DEFINE_TURBOFAN_CODE_STUB(StringGreaterThan, TurboFanCodeStub);
-};
-
-class StringGreaterThanOrEqualStub final : public TurboFanCodeStub {
- public:
-  explicit StringGreaterThanOrEqualStub(Isolate* isolate)
-      : TurboFanCodeStub(isolate) {}
-
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
-  DEFINE_TURBOFAN_CODE_STUB(StringGreaterThanOrEqual, TurboFanCodeStub);
-};
-
 class ToIntegerStub final : public TurboFanCodeStub {
  public:
   explicit ToIntegerStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
@@ -1174,7 +1114,7 @@
   // FastNewFunctionContextStub can only allocate closures which fit in the
   // new space.
   STATIC_ASSERT(((kMaximumSlots + Context::MIN_CONTEXT_SLOTS) * kPointerSize +
-                 FixedArray::kHeaderSize) < Page::kMaxRegularHeapObjectSize);
+                 FixedArray::kHeaderSize) < kMaxRegularHeapObjectSize);
 
   DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewFunctionContext);
   DEFINE_TURBOFAN_CODE_STUB(FastNewFunctionContext, TurboFanCodeStub);
@@ -1269,24 +1209,30 @@
   DEFINE_TURBOFAN_CODE_STUB(FastCloneRegExp, TurboFanCodeStub);
 };
 
-
-class FastCloneShallowArrayStub : public HydrogenCodeStub {
+class FastCloneShallowArrayStub : public TurboFanCodeStub {
  public:
   FastCloneShallowArrayStub(Isolate* isolate,
                             AllocationSiteMode allocation_site_mode)
-      : HydrogenCodeStub(isolate) {
-    set_sub_minor_key(AllocationSiteModeBits::encode(allocation_site_mode));
+      : TurboFanCodeStub(isolate) {
+    minor_key_ = AllocationSiteModeBits::encode(allocation_site_mode);
   }
 
+  static compiler::Node* Generate(CodeStubAssembler* assembler,
+                                  compiler::Node* closure,
+                                  compiler::Node* literal_index,
+                                  compiler::Node* context,
+                                  CodeStubAssembler::Label* call_runtime,
+                                  AllocationSiteMode allocation_site_mode);
+
   AllocationSiteMode allocation_site_mode() const {
-    return AllocationSiteModeBits::decode(sub_minor_key());
+    return AllocationSiteModeBits::decode(minor_key_);
   }
 
  private:
   class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
 
   DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowArray);
-  DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowArray, HydrogenCodeStub);
+  DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowArray, TurboFanCodeStub);
 };
 
 class FastCloneShallowObjectStub : public TurboFanCodeStub {
@@ -1556,35 +1502,36 @@
   DEFINE_HANDLER_CODE_STUB(LoadField, HandlerStub);
 };
 
-
-class KeyedLoadSloppyArgumentsStub : public HandlerStub {
+class KeyedLoadSloppyArgumentsStub : public TurboFanCodeStub {
  public:
   explicit KeyedLoadSloppyArgumentsStub(Isolate* isolate)
-      : HandlerStub(isolate) {}
+      : TurboFanCodeStub(isolate) {}
+
+  Code::Kind GetCodeKind() const override { return Code::HANDLER; }
+  ExtraICState GetExtraICState() const override { return Code::LOAD_IC; }
 
  protected:
-  Code::Kind kind() const override { return Code::KEYED_LOAD_IC; }
-
   DEFINE_CALL_INTERFACE_DESCRIPTOR(LoadWithVector);
-  DEFINE_HANDLER_CODE_STUB(KeyedLoadSloppyArguments, HandlerStub);
+  DEFINE_TURBOFAN_CODE_STUB(KeyedLoadSloppyArguments, TurboFanCodeStub);
 };
 
 
 class CommonStoreModeBits : public BitField<KeyedAccessStoreMode, 0, 3> {};
 
-class KeyedStoreSloppyArgumentsStub : public HandlerStub {
+class KeyedStoreSloppyArgumentsStub : public TurboFanCodeStub {
  public:
   explicit KeyedStoreSloppyArgumentsStub(Isolate* isolate,
                                          KeyedAccessStoreMode mode)
-      : HandlerStub(isolate) {
-    set_sub_minor_key(CommonStoreModeBits::encode(mode));
+      : TurboFanCodeStub(isolate) {
+    minor_key_ = CommonStoreModeBits::encode(mode);
   }
 
- protected:
-  Code::Kind kind() const override { return Code::KEYED_STORE_IC; }
+  Code::Kind GetCodeKind() const override { return Code::HANDLER; }
+  ExtraICState GetExtraICState() const override { return Code::STORE_IC; }
 
+ protected:
   DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreWithVector);
-  DEFINE_HANDLER_CODE_STUB(KeyedStoreSloppyArguments, HandlerStub);
+  DEFINE_TURBOFAN_CODE_STUB(KeyedStoreSloppyArguments, TurboFanCodeStub);
 };
 
 
@@ -1637,161 +1584,107 @@
   DEFINE_TURBOFAN_CODE_STUB(LoadApiGetter, TurboFanCodeStub);
 };
 
-class StoreFieldStub : public HandlerStub {
+class StoreFieldStub : public TurboFanCodeStub {
  public:
   StoreFieldStub(Isolate* isolate, FieldIndex index,
                  Representation representation)
-      : HandlerStub(isolate) {
+      : TurboFanCodeStub(isolate) {
     int property_index_key = index.GetFieldAccessStubKey();
-    uint8_t repr = PropertyDetails::EncodeRepresentation(representation);
-    set_sub_minor_key(StoreFieldByIndexBits::encode(property_index_key) |
-                      RepresentationBits::encode(repr));
+    minor_key_ = StoreFieldByIndexBits::encode(property_index_key) |
+                 RepresentationBits::encode(representation.kind());
   }
 
+  Code::Kind GetCodeKind() const override { return Code::HANDLER; }
+  ExtraICState GetExtraICState() const override { return Code::STORE_IC; }
+
   FieldIndex index() const {
-    int property_index_key = StoreFieldByIndexBits::decode(sub_minor_key());
+    int property_index_key = StoreFieldByIndexBits::decode(minor_key_);
     return FieldIndex::FromFieldAccessStubKey(property_index_key);
   }
 
-  Representation representation() {
-    uint8_t repr = RepresentationBits::decode(sub_minor_key());
-    return PropertyDetails::DecodeRepresentation(repr);
+  Representation representation() const {
+    return Representation::FromKind(RepresentationBits::decode(minor_key_));
   }
 
- protected:
-  Code::Kind kind() const override { return Code::STORE_IC; }
-
  private:
   class StoreFieldByIndexBits : public BitField<int, 0, 13> {};
-  class RepresentationBits : public BitField<uint8_t, 13, 4> {};
+  class RepresentationBits
+      : public BitField<Representation::Kind, StoreFieldByIndexBits::kNext, 4> {
+  };
+  STATIC_ASSERT(Representation::kNumRepresentations - 1 <
+                RepresentationBits::kMax);
 
-  // TODO(ishell): The stub uses only kReceiver and kValue parameters.
   DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreWithVector);
-  DEFINE_HANDLER_CODE_STUB(StoreField, HandlerStub);
+  DEFINE_TURBOFAN_CODE_STUB(StoreField, TurboFanCodeStub);
 };
 
-
-// Register and parameter access methods are specified here instead of in
-// the CallInterfaceDescriptor because the stub uses a different descriptor
-// if FLAG_vector_stores is on.
-class StoreTransitionHelper {
+class StoreMapStub : public TurboFanCodeStub {
  public:
-  static Register ReceiverRegister() {
-    return StoreTransitionDescriptor::ReceiverRegister();
-  }
+  explicit StoreMapStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
 
-  static Register NameRegister() {
-    return StoreTransitionDescriptor::NameRegister();
-  }
+  Code::Kind GetCodeKind() const override { return Code::HANDLER; }
+  ExtraICState GetExtraICState() const override { return Code::STORE_IC; }
 
-  static Register ValueRegister() {
-    return StoreTransitionDescriptor::ValueRegister();
-  }
-
-  static Register SlotRegister() {
-    return VectorStoreTransitionDescriptor::SlotRegister();
-  }
-
-  static Register VectorRegister() {
-    return VectorStoreTransitionDescriptor::VectorRegister();
-  }
-
-  static Register MapRegister() {
-    return VectorStoreTransitionDescriptor::MapRegister();
-  }
-
-  static int ReceiverIndex() { return StoreTransitionDescriptor::kReceiver; }
-
-  static int NameIndex() { return StoreTransitionDescriptor::kReceiver; }
-
-  static int ValueIndex() { return StoreTransitionDescriptor::kValue; }
-
-  static int MapIndex() {
-    DCHECK(static_cast<int>(VectorStoreTransitionDescriptor::kMap) ==
-           static_cast<int>(StoreTransitionDescriptor::kMap));
-    return StoreTransitionDescriptor::kMap;
-  }
-
-  static int VectorIndex() {
-    if (HasVirtualSlotArg()) {
-      return VectorStoreTransitionDescriptor::kVirtualSlotVector;
-    }
-    return VectorStoreTransitionDescriptor::kVector;
-  }
-
-  // Some platforms don't have a slot arg.
-  static bool HasVirtualSlotArg() {
-    return SlotRegister().is(no_reg);
-  }
+ private:
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreTransition);
+  DEFINE_TURBOFAN_CODE_STUB(StoreMap, TurboFanCodeStub);
 };
 
-
-class StoreTransitionStub : public HandlerStub {
+class StoreTransitionStub : public TurboFanCodeStub {
  public:
   enum StoreMode {
-    StoreMapOnly,
     StoreMapAndValue,
     ExtendStorageAndStoreMapAndValue
   };
 
-  explicit StoreTransitionStub(Isolate* isolate) : HandlerStub(isolate) {
-    set_sub_minor_key(StoreModeBits::encode(StoreMapOnly));
-  }
-
-  StoreTransitionStub(Isolate* isolate, FieldIndex index,
+  StoreTransitionStub(Isolate* isolate, bool is_inobject,
                       Representation representation, StoreMode store_mode)
-      : HandlerStub(isolate) {
-    DCHECK(store_mode != StoreMapOnly);
-    int property_index_key = index.GetFieldAccessStubKey();
-    uint8_t repr = PropertyDetails::EncodeRepresentation(representation);
-    set_sub_minor_key(StoreFieldByIndexBits::encode(property_index_key) |
-                      RepresentationBits::encode(repr) |
-                      StoreModeBits::encode(store_mode));
+      : TurboFanCodeStub(isolate) {
+    minor_key_ = IsInobjectBits::encode(is_inobject) |
+                 RepresentationBits::encode(representation.kind()) |
+                 StoreModeBits::encode(store_mode);
   }
 
-  FieldIndex index() const {
-    DCHECK(store_mode() != StoreMapOnly);
-    int property_index_key = StoreFieldByIndexBits::decode(sub_minor_key());
-    return FieldIndex::FromFieldAccessStubKey(property_index_key);
+  Code::Kind GetCodeKind() const override { return Code::HANDLER; }
+  ExtraICState GetExtraICState() const override { return Code::STORE_IC; }
+
+  bool is_inobject() const { return IsInobjectBits::decode(minor_key_); }
+
+  Representation representation() const {
+    return Representation::FromKind(RepresentationBits::decode(minor_key_));
   }
 
-  Representation representation() {
-    DCHECK(store_mode() != StoreMapOnly);
-    uint8_t repr = RepresentationBits::decode(sub_minor_key());
-    return PropertyDetails::DecodeRepresentation(repr);
-  }
-
-  StoreMode store_mode() const {
-    return StoreModeBits::decode(sub_minor_key());
-  }
-
- protected:
-  Code::Kind kind() const override { return Code::STORE_IC; }
-  void InitializeDescriptor(CodeStubDescriptor* descriptor) override;
+  StoreMode store_mode() const { return StoreModeBits::decode(minor_key_); }
 
  private:
-  class StoreFieldByIndexBits : public BitField<int, 0, 13> {};
-  class RepresentationBits : public BitField<uint8_t, 13, 4> {};
-  class StoreModeBits : public BitField<StoreMode, 17, 2> {};
+  class IsInobjectBits : public BitField<bool, 0, 1> {};
+  class RepresentationBits
+      : public BitField<Representation::Kind, IsInobjectBits::kNext, 4> {};
+  STATIC_ASSERT(Representation::kNumRepresentations - 1 <
+                RepresentationBits::kMax);
+  class StoreModeBits
+      : public BitField<StoreMode, RepresentationBits::kNext, 1> {};
 
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(VectorStoreTransition);
-  DEFINE_HANDLER_CODE_STUB(StoreTransition, HandlerStub);
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreNamedTransition);
+  DEFINE_TURBOFAN_CODE_STUB(StoreTransition, TurboFanCodeStub);
 };
 
-
-class StoreGlobalStub : public HandlerStub {
+class StoreGlobalStub : public TurboFanCodeStub {
  public:
   StoreGlobalStub(Isolate* isolate, PropertyCellType type,
                   Maybe<PropertyCellConstantType> constant_type,
                   bool check_global)
-      : HandlerStub(isolate) {
+      : TurboFanCodeStub(isolate) {
     PropertyCellConstantType encoded_constant_type =
         constant_type.FromMaybe(PropertyCellConstantType::kSmi);
-    set_sub_minor_key(CellTypeBits::encode(type) |
-                      ConstantTypeBits::encode(encoded_constant_type) |
-                      CheckGlobalBits::encode(check_global));
+    minor_key_ = CellTypeBits::encode(type) |
+                 ConstantTypeBits::encode(encoded_constant_type) |
+                 CheckGlobalBits::encode(check_global);
   }
 
+  Code::Kind GetCodeKind() const override { return Code::HANDLER; }
+  ExtraICState GetExtraICState() const override { return Code::STORE_IC; }
+
   static Handle<HeapObject> property_cell_placeholder(Isolate* isolate) {
     return isolate->factory()->uninitialized_value();
   }
@@ -1812,37 +1705,25 @@
     return CodeStub::GetCodeCopy(pattern);
   }
 
-  Code::Kind kind() const override { return Code::STORE_IC; }
-
   PropertyCellType cell_type() const {
-    return CellTypeBits::decode(sub_minor_key());
+    return CellTypeBits::decode(minor_key_);
   }
 
   PropertyCellConstantType constant_type() const {
     DCHECK(PropertyCellType::kConstantType == cell_type());
-    return ConstantTypeBits::decode(sub_minor_key());
+    return ConstantTypeBits::decode(minor_key_);
   }
 
-  bool check_global() const { return CheckGlobalBits::decode(sub_minor_key()); }
-
-  Representation representation() {
-    return Representation::FromKind(
-        RepresentationBits::decode(sub_minor_key()));
-  }
-
-  void set_representation(Representation r) {
-    set_sub_minor_key(RepresentationBits::update(sub_minor_key(), r.kind()));
-  }
+  bool check_global() const { return CheckGlobalBits::decode(minor_key_); }
 
  private:
   class CellTypeBits : public BitField<PropertyCellType, 0, 2> {};
-  class ConstantTypeBits : public BitField<PropertyCellConstantType, 2, 2> {};
-  class RepresentationBits : public BitField<Representation::Kind, 4, 8> {};
-  class CheckGlobalBits : public BitField<bool, 12, 1> {};
+  class ConstantTypeBits
+      : public BitField<PropertyCellConstantType, CellTypeBits::kNext, 2> {};
+  class CheckGlobalBits : public BitField<bool, ConstantTypeBits::kNext, 1> {};
 
-  // TODO(ishell): The stub uses only kValue parameter.
   DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreWithVector);
-  DEFINE_HANDLER_CODE_STUB(StoreGlobal, HandlerStub);
+  DEFINE_TURBOFAN_CODE_STUB(StoreGlobal, TurboFanCodeStub);
 };
 
 // TODO(ishell): remove, once StoreGlobalIC is implemented.
@@ -1889,10 +1770,6 @@
       : CallApiCallbackStub(isolate, argc, false, call_data_undefined,
                             is_lazy) {}
 
-  CallInterfaceDescriptor GetCallInterfaceDescriptor() const override {
-    return ApiCallbackDescriptorBase::ForArgs(isolate(), argc());
-  }
-
  private:
   CallApiCallbackStub(Isolate* isolate, int argc, bool is_store,
                       bool call_data_undefined, bool is_lazy)
@@ -1916,6 +1793,7 @@
   class ArgumentBits : public BitField<int, 2, kArgBits> {};
   class IsLazyAccessorBits : public BitField<bool, 3 + kArgBits, 1> {};
 
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(ApiCallback);
   DEFINE_PLATFORM_CODE_STUB(CallApiCallback, PlatformCodeStub);
 };
 
@@ -2195,11 +2073,11 @@
  public:
   explicit RegExpExecStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
 
-  DEFINE_ON_STACK_CALL_INTERFACE_DESCRIPTOR(4);
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(RegExpExec);
   DEFINE_PLATFORM_CODE_STUB(RegExpExec, PlatformCodeStub);
 };
 
-
+// TODO(jgruber): Remove this once all uses in regexp.js have been removed.
 class RegExpConstructResultStub final : public HydrogenCodeStub {
  public:
   explicit RegExpConstructResultStub(Isolate* isolate)
@@ -2490,15 +2368,34 @@
   }
 
  protected:
-  StoreICState state() const {
-    return StoreICState(static_cast<ExtraICState>(minor_key_));
-  }
+  StoreICState state() const { return StoreICState(GetExtraICState()); }
 
  private:
   DEFINE_CALL_INTERFACE_DESCRIPTOR(Store);
   DEFINE_PLATFORM_CODE_STUB(StoreICTrampoline, PlatformCodeStub);
 };
 
+class StoreICTrampolineTFStub : public TurboFanCodeStub {
+ public:
+  StoreICTrampolineTFStub(Isolate* isolate, const StoreICState& state)
+      : TurboFanCodeStub(isolate) {
+    minor_key_ = state.GetExtraICState();
+  }
+
+  void GenerateAssembly(CodeStubAssembler* assembler) const override;
+
+  Code::Kind GetCodeKind() const override { return Code::STORE_IC; }
+  ExtraICState GetExtraICState() const final {
+    return static_cast<ExtraICState>(minor_key_);
+  }
+
+ protected:
+  StoreICState state() const { return StoreICState(GetExtraICState()); }
+
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(Store);
+  DEFINE_CODE_STUB(StoreICTrampolineTF, TurboFanCodeStub);
+};
+
 class KeyedStoreICTrampolineStub : public StoreICTrampolineStub {
  public:
   KeyedStoreICTrampolineStub(Isolate* isolate, const StoreICState& state)
@@ -2627,6 +2524,24 @@
   void GenerateImpl(MacroAssembler* masm, bool in_frame);
 };
 
+class StoreICTFStub : public TurboFanCodeStub {
+ public:
+  StoreICTFStub(Isolate* isolate, const StoreICState& state)
+      : TurboFanCodeStub(isolate) {
+    minor_key_ = state.GetExtraICState();
+  }
+
+  void GenerateAssembly(CodeStubAssembler* assembler) const override;
+
+  Code::Kind GetCodeKind() const override { return Code::STORE_IC; }
+  ExtraICState GetExtraICState() const final {
+    return static_cast<ExtraICState>(minor_key_);
+  }
+
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreWithVector);
+  DEFINE_CODE_STUB(StoreICTF, TurboFanCodeStub);
+};
+
 class KeyedStoreICStub : public PlatformCodeStub {
  public:
   KeyedStoreICStub(Isolate* isolate, const StoreICState& state)
@@ -2696,23 +2611,21 @@
   DEFINE_PLATFORM_CODE_STUB(DoubleToI, PlatformCodeStub);
 };
 
-
-class ScriptContextFieldStub : public HandlerStub {
+class ScriptContextFieldStub : public TurboFanCodeStub {
  public:
   ScriptContextFieldStub(Isolate* isolate,
                          const ScriptContextTable::LookupResult* lookup_result)
-      : HandlerStub(isolate) {
+      : TurboFanCodeStub(isolate) {
     DCHECK(Accepted(lookup_result));
-    STATIC_ASSERT(kContextIndexBits + kSlotIndexBits <= kSubMinorKeyBits);
-    set_sub_minor_key(ContextIndexBits::encode(lookup_result->context_index) |
-                      SlotIndexBits::encode(lookup_result->slot_index));
+    minor_key_ = ContextIndexBits::encode(lookup_result->context_index) |
+                 SlotIndexBits::encode(lookup_result->slot_index);
   }
 
-  int context_index() const {
-    return ContextIndexBits::decode(sub_minor_key());
-  }
+  Code::Kind GetCodeKind() const override { return Code::HANDLER; }
 
-  int slot_index() const { return SlotIndexBits::decode(sub_minor_key()); }
+  int context_index() const { return ContextIndexBits::decode(minor_key_); }
+
+  int slot_index() const { return SlotIndexBits::decode(minor_key_); }
 
   static bool Accepted(const ScriptContextTable::LookupResult* lookup_result) {
     return ContextIndexBits::is_valid(lookup_result->context_index) &&
@@ -2726,7 +2639,7 @@
   class SlotIndexBits
       : public BitField<int, kContextIndexBits, kSlotIndexBits> {};
 
-  DEFINE_CODE_STUB_BASE(ScriptContextFieldStub, HandlerStub);
+  DEFINE_CODE_STUB_BASE(ScriptContextFieldStub, TurboFanCodeStub);
 };
 
 
@@ -2736,11 +2649,11 @@
       Isolate* isolate, const ScriptContextTable::LookupResult* lookup_result)
       : ScriptContextFieldStub(isolate, lookup_result) {}
 
- private:
-  Code::Kind kind() const override { return Code::LOAD_IC; }
+  ExtraICState GetExtraICState() const override { return Code::LOAD_IC; }
 
+ private:
   DEFINE_CALL_INTERFACE_DESCRIPTOR(LoadWithVector);
-  DEFINE_HANDLER_CODE_STUB(LoadScriptContextField, ScriptContextFieldStub);
+  DEFINE_TURBOFAN_CODE_STUB(LoadScriptContextField, ScriptContextFieldStub);
 };
 
 
@@ -2750,11 +2663,11 @@
       Isolate* isolate, const ScriptContextTable::LookupResult* lookup_result)
       : ScriptContextFieldStub(isolate, lookup_result) {}
 
- private:
-  Code::Kind kind() const override { return Code::STORE_IC; }
+  ExtraICState GetExtraICState() const override { return Code::STORE_IC; }
 
+ private:
   DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreWithVector);
-  DEFINE_HANDLER_CODE_STUB(StoreScriptContextField, ScriptContextFieldStub);
+  DEFINE_TURBOFAN_CODE_STUB(StoreScriptContextField, ScriptContextFieldStub);
 };
 
 
@@ -2790,38 +2703,38 @@
   DEFINE_HANDLER_CODE_STUB(LoadFastElement, HandlerStub);
 };
 
-
-class StoreFastElementStub : public HydrogenCodeStub {
+class StoreFastElementStub : public TurboFanCodeStub {
  public:
   StoreFastElementStub(Isolate* isolate, bool is_js_array,
                        ElementsKind elements_kind, KeyedAccessStoreMode mode)
-      : HydrogenCodeStub(isolate) {
-    set_sub_minor_key(CommonStoreModeBits::encode(mode) |
-                      ElementsKindBits::encode(elements_kind) |
-                      IsJSArrayBits::encode(is_js_array));
+      : TurboFanCodeStub(isolate) {
+    minor_key_ = CommonStoreModeBits::encode(mode) |
+                 ElementsKindBits::encode(elements_kind) |
+                 IsJSArrayBits::encode(is_js_array);
   }
 
   static void GenerateAheadOfTime(Isolate* isolate);
 
-  bool is_js_array() const { return IsJSArrayBits::decode(sub_minor_key()); }
+  bool is_js_array() const { return IsJSArrayBits::decode(minor_key_); }
 
   ElementsKind elements_kind() const {
-    return ElementsKindBits::decode(sub_minor_key());
+    return ElementsKindBits::decode(minor_key_);
   }
 
   KeyedAccessStoreMode store_mode() const {
-    return CommonStoreModeBits::decode(sub_minor_key());
+    return CommonStoreModeBits::decode(minor_key_);
   }
 
   Code::Kind GetCodeKind() const override { return Code::HANDLER; }
   ExtraICState GetExtraICState() const override { return Code::KEYED_STORE_IC; }
 
  private:
-  class ElementsKindBits : public BitField<ElementsKind, 3, 8> {};
-  class IsJSArrayBits : public BitField<bool, 11, 1> {};
+  class ElementsKindBits
+      : public BitField<ElementsKind, CommonStoreModeBits::kNext, 8> {};
+  class IsJSArrayBits : public BitField<bool, ElementsKindBits::kNext, 1> {};
 
   DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreWithVector);
-  DEFINE_HYDROGEN_CODE_STUB(StoreFastElement, HydrogenCodeStub);
+  DEFINE_TURBOFAN_CODE_STUB(StoreFastElement, TurboFanCodeStub);
 };
 
 
@@ -3008,10 +2921,6 @@
                  CommonStoreModeBits::encode(mode);
   }
 
-  CallInterfaceDescriptor GetCallInterfaceDescriptor() const override {
-    return StoreWithVectorDescriptor(isolate());
-  }
-
   Code::Kind GetCodeKind() const override { return Code::HANDLER; }
   ExtraICState GetExtraICState() const override { return Code::KEYED_STORE_IC; }
 
@@ -3020,8 +2929,10 @@
     return ElementsKindBits::decode(minor_key_);
   }
 
-  class ElementsKindBits : public BitField<ElementsKind, 3, 8> {};
+  class ElementsKindBits
+      : public BitField<ElementsKind, CommonStoreModeBits::kNext, 8> {};
 
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreWithVector);
   DEFINE_PLATFORM_CODE_STUB(StoreElement, PlatformCodeStub);
 };
 
@@ -3098,34 +3009,35 @@
 
 std::ostream& operator<<(std::ostream& os, const ToBooleanICStub::Types& t);
 
-class ElementsTransitionAndStoreStub : public HydrogenCodeStub {
+class ElementsTransitionAndStoreStub : public TurboFanCodeStub {
  public:
   ElementsTransitionAndStoreStub(Isolate* isolate, ElementsKind from_kind,
                                  ElementsKind to_kind, bool is_jsarray,
                                  KeyedAccessStoreMode store_mode)
-      : HydrogenCodeStub(isolate) {
-    set_sub_minor_key(CommonStoreModeBits::encode(store_mode) |
-                      FromBits::encode(from_kind) | ToBits::encode(to_kind) |
-                      IsJSArrayBits::encode(is_jsarray));
+      : TurboFanCodeStub(isolate) {
+    minor_key_ = CommonStoreModeBits::encode(store_mode) |
+                 FromBits::encode(from_kind) | ToBits::encode(to_kind) |
+                 IsJSArrayBits::encode(is_jsarray);
   }
 
-  ElementsKind from_kind() const { return FromBits::decode(sub_minor_key()); }
-  ElementsKind to_kind() const { return ToBits::decode(sub_minor_key()); }
-  bool is_jsarray() const { return IsJSArrayBits::decode(sub_minor_key()); }
+  ElementsKind from_kind() const { return FromBits::decode(minor_key_); }
+  ElementsKind to_kind() const { return ToBits::decode(minor_key_); }
+  bool is_jsarray() const { return IsJSArrayBits::decode(minor_key_); }
   KeyedAccessStoreMode store_mode() const {
-    return CommonStoreModeBits::decode(sub_minor_key());
+    return CommonStoreModeBits::decode(minor_key_);
   }
 
   Code::Kind GetCodeKind() const override { return Code::HANDLER; }
   ExtraICState GetExtraICState() const override { return Code::KEYED_STORE_IC; }
 
  private:
-  class FromBits : public BitField<ElementsKind, 3, 8> {};
+  class FromBits
+      : public BitField<ElementsKind, CommonStoreModeBits::kNext, 8> {};
   class ToBits : public BitField<ElementsKind, 11, 8> {};
   class IsJSArrayBits : public BitField<bool, 19, 1> {};
 
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(VectorStoreTransition);
-  DEFINE_HYDROGEN_CODE_STUB(ElementsTransitionAndStore, HydrogenCodeStub);
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreTransition);
+  DEFINE_TURBOFAN_CODE_STUB(ElementsTransitionAndStore, TurboFanCodeStub);
 };
 
 
@@ -3191,29 +3103,24 @@
   DEFINE_PLATFORM_CODE_STUB(StoreBufferOverflow, PlatformCodeStub);
 };
 
-
-class SubStringStub : public PlatformCodeStub {
+class SubStringStub : public TurboFanCodeStub {
  public:
-  explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
+  explicit SubStringStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
 
-  DEFINE_ON_STACK_CALL_INTERFACE_DESCRIPTOR(3);
-  DEFINE_PLATFORM_CODE_STUB(SubString, PlatformCodeStub);
-};
+  static compiler::Node* Generate(CodeStubAssembler* assembler,
+                                  compiler::Node* string, compiler::Node* from,
+                                  compiler::Node* to, compiler::Node* context);
 
-class ToStringStub final : public PlatformCodeStub {
- public:
-  explicit ToStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
+  void GenerateAssembly(CodeStubAssembler* assembler) const override {
+    assembler->Return(Generate(assembler,
+                               assembler->Parameter(Descriptor::kString),
+                               assembler->Parameter(Descriptor::kFrom),
+                               assembler->Parameter(Descriptor::kTo),
+                               assembler->Parameter(Descriptor::kContext)));
+  }
 
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(TypeConversion);
-  DEFINE_PLATFORM_CODE_STUB(ToString, PlatformCodeStub);
-};
-
-class ToNameStub final : public PlatformCodeStub {
- public:
-  explicit ToNameStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
-
-  DEFINE_CALL_INTERFACE_DESCRIPTOR(TypeConversion);
-  DEFINE_PLATFORM_CODE_STUB(ToName, PlatformCodeStub);
+  DEFINE_CALL_INTERFACE_DESCRIPTOR(SubString);
+  DEFINE_CODE_STUB(SubString, TurboFanCodeStub);
 };
 
 class ToObjectStub final : public TurboFanCodeStub {
@@ -3231,7 +3138,7 @@
 #undef DEFINE_CODE_STUB
 #undef DEFINE_CODE_STUB_BASE
 
-extern Representation RepresentationFromType(Type* type);
+extern Representation RepresentationFromMachineType(MachineType type);
 
 }  // namespace internal
 }  // namespace v8
diff --git a/src/codegen.cc b/src/codegen.cc
index e47db10..afd8a6f 100644
--- a/src/codegen.cc
+++ b/src/codegen.cc
@@ -12,10 +12,9 @@
 
 #include "src/ast/prettyprinter.h"
 #include "src/bootstrapper.h"
-#include "src/compiler.h"
+#include "src/compilation-info.h"
 #include "src/debug/debug.h"
 #include "src/eh-frame.h"
-#include "src/parsing/parser.h"
 #include "src/runtime/runtime.h"
 
 namespace v8 {
@@ -147,7 +146,8 @@
       isolate->bootstrapper()->IsActive()
           ? FLAG_print_builtin_code
           : (FLAG_print_code || (info->IsStub() && FLAG_print_code_stubs) ||
-             (info->IsOptimizing() && FLAG_print_opt_code));
+             (info->IsOptimizing() && FLAG_print_opt_code &&
+              info->shared_info()->PassesFilter(FLAG_print_opt_code_filter)));
   if (print_code) {
     std::unique_ptr<char[]> debug_name = info->GetDebugName();
     CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
diff --git a/src/collector.h b/src/collector.h
index 8454aae..abb2fbb 100644
--- a/src/collector.h
+++ b/src/collector.h
@@ -6,7 +6,7 @@
 #define V8_COLLECTOR_H_
 
 #include "src/checks.h"
-#include "src/list.h"
+#include "src/list-inl.h"
 #include "src/vector.h"
 
 namespace v8 {
diff --git a/src/compilation-dependencies.cc b/src/compilation-dependencies.cc
index 96b3859..dfd7cfe 100644
--- a/src/compilation-dependencies.cc
+++ b/src/compilation-dependencies.cc
@@ -8,7 +8,7 @@
 #include "src/handles-inl.h"
 #include "src/isolate.h"
 #include "src/objects-inl.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compilation-info.cc b/src/compilation-info.cc
new file mode 100644
index 0000000..2e0934a
--- /dev/null
+++ b/src/compilation-info.cc
@@ -0,0 +1,214 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compilation-info.h"
+
+#include "src/api.h"
+#include "src/ast/ast.h"
+#include "src/ast/scopes.h"
+#include "src/isolate.h"
+#include "src/parsing/parse-info.h"
+
+namespace v8 {
+namespace internal {
+
+#define PARSE_INFO_GETTER(type, name)  \
+  type CompilationInfo::name() const { \
+    CHECK(parse_info());               \
+    return parse_info()->name();       \
+  }
+
+#define PARSE_INFO_GETTER_WITH_DEFAULT(type, name, def) \
+  type CompilationInfo::name() const {                  \
+    return parse_info() ? parse_info()->name() : def;   \
+  }
+
+PARSE_INFO_GETTER(Handle<Script>, script)
+PARSE_INFO_GETTER(FunctionLiteral*, literal)
+PARSE_INFO_GETTER_WITH_DEFAULT(DeclarationScope*, scope, nullptr)
+PARSE_INFO_GETTER(Handle<SharedFunctionInfo>, shared_info)
+
+#undef PARSE_INFO_GETTER
+#undef PARSE_INFO_GETTER_WITH_DEFAULT
+
+bool CompilationInfo::has_shared_info() const {
+  return parse_info_ && !parse_info_->shared_info().is_null();
+}
+
+CompilationInfo::CompilationInfo(ParseInfo* parse_info,
+                                 Handle<JSFunction> closure)
+    : CompilationInfo(parse_info, {}, Code::ComputeFlags(Code::FUNCTION), BASE,
+                      parse_info->isolate(), parse_info->zone()) {
+  closure_ = closure;
+
+  // Compiling for the snapshot typically results in different code than
+  // compiling later on. This means that code recompiled with deoptimization
+  // support won't be "equivalent" (as defined by SharedFunctionInfo::
+  // EnableDeoptimizationSupport), so it will replace the old code and all
+  // its type feedback. To avoid this, always compile functions in the snapshot
+  // with deoptimization support.
+  if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();
+
+  if (FLAG_function_context_specialization) MarkAsFunctionContextSpecializing();
+  if (FLAG_turbo_source_positions) MarkAsSourcePositionsEnabled();
+  if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
+}
+
+CompilationInfo::CompilationInfo(Vector<const char> debug_name,
+                                 Isolate* isolate, Zone* zone,
+                                 Code::Flags code_flags)
+    : CompilationInfo(nullptr, debug_name, code_flags, STUB, isolate, zone) {}
+
+CompilationInfo::CompilationInfo(ParseInfo* parse_info,
+                                 Vector<const char> debug_name,
+                                 Code::Flags code_flags, Mode mode,
+                                 Isolate* isolate, Zone* zone)
+    : parse_info_(parse_info),
+      isolate_(isolate),
+      flags_(0),
+      code_flags_(code_flags),
+      mode_(mode),
+      osr_ast_id_(BailoutId::None()),
+      zone_(zone),
+      deferred_handles_(nullptr),
+      dependencies_(isolate, zone),
+      bailout_reason_(kNoReason),
+      prologue_offset_(Code::kPrologueOffsetNotSet),
+      parameter_count_(0),
+      optimization_id_(-1),
+      osr_expr_stack_height_(-1),
+      debug_name_(debug_name) {}
+
+CompilationInfo::~CompilationInfo() {
+  if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
+    shared_info()->DisableOptimization(bailout_reason());
+  }
+  dependencies()->Rollback();
+  delete deferred_handles_;
+}
+
+int CompilationInfo::num_parameters() const {
+  return !IsStub() ? scope()->num_parameters() : parameter_count_;
+}
+
+int CompilationInfo::num_parameters_including_this() const {
+  return num_parameters() + (is_this_defined() ? 1 : 0);
+}
+
+bool CompilationInfo::is_this_defined() const { return !IsStub(); }
+
+// Primitive functions are unlikely to be picked up by the stack-walking
+// profiler, so they trigger their own optimization when they're called
+// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
+bool CompilationInfo::ShouldSelfOptimize() {
+  return FLAG_crankshaft &&
+         !(literal()->flags() & AstProperties::kDontSelfOptimize) &&
+         !literal()->dont_optimize() &&
+         literal()->scope()->AllowsLazyCompilation() &&
+         !shared_info()->optimization_disabled();
+}
+
+void CompilationInfo::ReopenHandlesInNewHandleScope() {
+  closure_ = Handle<JSFunction>(*closure_);
+}
+
+bool CompilationInfo::has_simple_parameters() {
+  return scope()->has_simple_parameters();
+}
+
+std::unique_ptr<char[]> CompilationInfo::GetDebugName() const {
+  if (parse_info() && parse_info()->literal()) {
+    AllowHandleDereference allow_deref;
+    return parse_info()->literal()->debug_name()->ToCString();
+  }
+  if (parse_info() && !parse_info()->shared_info().is_null()) {
+    return parse_info()->shared_info()->DebugName()->ToCString();
+  }
+  Vector<const char> name_vec = debug_name_;
+  if (name_vec.is_empty()) name_vec = ArrayVector("unknown");
+  std::unique_ptr<char[]> name(new char[name_vec.length() + 1]);
+  memcpy(name.get(), name_vec.start(), name_vec.length());
+  name[name_vec.length()] = '\0';
+  return name;
+}
+
+StackFrame::Type CompilationInfo::GetOutputStackFrameType() const {
+  switch (output_code_kind()) {
+    case Code::STUB:
+    case Code::BYTECODE_HANDLER:
+    case Code::HANDLER:
+    case Code::BUILTIN:
+#define CASE_KIND(kind) case Code::kind:
+      IC_KIND_LIST(CASE_KIND)
+#undef CASE_KIND
+      return StackFrame::STUB;
+    case Code::WASM_FUNCTION:
+      return StackFrame::WASM;
+    case Code::JS_TO_WASM_FUNCTION:
+      return StackFrame::JS_TO_WASM;
+    case Code::WASM_TO_JS_FUNCTION:
+      return StackFrame::WASM_TO_JS;
+    default:
+      UNIMPLEMENTED();
+      return StackFrame::NONE;
+  }
+}
+
+int CompilationInfo::GetDeclareGlobalsFlags() const {
+  DCHECK(DeclareGlobalsLanguageMode::is_valid(parse_info()->language_mode()));
+  return DeclareGlobalsEvalFlag::encode(parse_info()->is_eval()) |
+         DeclareGlobalsNativeFlag::encode(parse_info()->is_native()) |
+         DeclareGlobalsLanguageMode::encode(parse_info()->language_mode());
+}
+
+SourcePositionTableBuilder::RecordingMode
+CompilationInfo::SourcePositionRecordingMode() const {
+  return parse_info() && parse_info()->is_native()
+             ? SourcePositionTableBuilder::OMIT_SOURCE_POSITIONS
+             : SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS;
+}
+
+bool CompilationInfo::ExpectsJSReceiverAsReceiver() {
+  return is_sloppy(parse_info()->language_mode()) && !parse_info()->is_native();
+}
+
+bool CompilationInfo::has_context() const { return !closure().is_null(); }
+
+Context* CompilationInfo::context() const {
+  return has_context() ? closure()->context() : nullptr;
+}
+
+bool CompilationInfo::has_native_context() const {
+  return !closure().is_null() && (closure()->native_context() != nullptr);
+}
+
+Context* CompilationInfo::native_context() const {
+  return has_native_context() ? closure()->native_context() : nullptr;
+}
+
+bool CompilationInfo::has_global_object() const { return has_native_context(); }
+
+JSGlobalObject* CompilationInfo::global_object() const {
+  return has_global_object() ? native_context()->global_object() : nullptr;
+}
+
+void CompilationInfo::SetOptimizing() {
+  DCHECK(has_shared_info());
+  SetMode(OPTIMIZE);
+  optimization_id_ = isolate()->NextOptimizationId();
+  code_flags_ = Code::KindField::update(code_flags_, Code::OPTIMIZED_FUNCTION);
+}
+
+void CompilationInfo::AddInlinedFunction(
+    Handle<SharedFunctionInfo> inlined_function) {
+  inlined_functions_.push_back(InlinedFunctionHolder(
+      inlined_function, handle(inlined_function->code())));
+}
+
+Code::Kind CompilationInfo::output_code_kind() const {
+  return Code::ExtractKindFromFlags(code_flags_);
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/compilation-info.h b/src/compilation-info.h
new file mode 100644
index 0000000..88477ae
--- /dev/null
+++ b/src/compilation-info.h
@@ -0,0 +1,400 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILATION_INFO_H_
+#define V8_COMPILATION_INFO_H_
+
+#include <memory>
+
+#include "src/compilation-dependencies.h"
+#include "src/frames.h"
+#include "src/handles.h"
+#include "src/objects.h"
+#include "src/source-position-table.h"
+#include "src/utils.h"
+#include "src/vector.h"
+
+namespace v8 {
+namespace internal {
+
+class DeclarationScope;
+class DeferredHandles;
+class FunctionLiteral;
+class JavaScriptFrame;
+class ParseInfo;
+class Isolate;
+class Zone;
+
+// CompilationInfo encapsulates some information known at compile time.  It
+// is constructed based on the resources available at compile-time.
+class CompilationInfo final {
+ public:
+  // Various configuration flags for a compilation, as well as some properties
+  // of the compiled code produced by a compilation.
+  enum Flag {
+    kDeferredCalling = 1 << 0,
+    kNonDeferredCalling = 1 << 1,
+    kSavesCallerDoubles = 1 << 2,
+    kRequiresFrame = 1 << 3,
+    kMustNotHaveEagerFrame = 1 << 4,
+    kDeoptimizationSupport = 1 << 5,
+    kDebug = 1 << 6,
+    kSerializing = 1 << 7,
+    kFunctionContextSpecializing = 1 << 8,
+    kFrameSpecializing = 1 << 9,
+    kNativeContextSpecializing = 1 << 10,
+    kInliningEnabled = 1 << 11,
+    kDisableFutureOptimization = 1 << 12,
+    kSplittingEnabled = 1 << 13,
+    kDeoptimizationEnabled = 1 << 14,
+    kSourcePositionsEnabled = 1 << 15,
+    kBailoutOnUninitialized = 1 << 16,
+    kOptimizeFromBytecode = 1 << 17,
+    kTypeFeedbackEnabled = 1 << 18,
+    kAccessorInliningEnabled = 1 << 19,
+  };
+
+  CompilationInfo(ParseInfo* parse_info, Handle<JSFunction> closure);
+  CompilationInfo(Vector<const char> debug_name, Isolate* isolate, Zone* zone,
+                  Code::Flags code_flags);
+  ~CompilationInfo();
+
+  ParseInfo* parse_info() const { return parse_info_; }
+
+  // -----------------------------------------------------------
+  // TODO(titzer): inline and delete accessors of ParseInfo
+  // -----------------------------------------------------------
+  Handle<Script> script() const;
+  FunctionLiteral* literal() const;
+  DeclarationScope* scope() const;
+  Handle<SharedFunctionInfo> shared_info() const;
+  bool has_shared_info() const;
+  // -----------------------------------------------------------
+
+  Isolate* isolate() const { return isolate_; }
+  Zone* zone() { return zone_; }
+  bool is_osr() const { return !osr_ast_id_.IsNone(); }
+  Handle<JSFunction> closure() const { return closure_; }
+  Handle<Code> code() const { return code_; }
+  Code::Flags code_flags() const { return code_flags_; }
+  BailoutId osr_ast_id() const { return osr_ast_id_; }
+  JavaScriptFrame* osr_frame() const { return osr_frame_; }
+  int num_parameters() const;
+  int num_parameters_including_this() const;
+  bool is_this_defined() const;
+
+  void set_parameter_count(int parameter_count) {
+    DCHECK(IsStub());
+    parameter_count_ = parameter_count;
+  }
+
+  bool has_bytecode_array() const { return !bytecode_array_.is_null(); }
+  Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; }
+
+  bool is_calling() const {
+    return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
+  }
+
+  void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
+
+  bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
+
+  void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
+
+  bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
+
+  void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
+
+  bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
+
+  void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
+
+  bool requires_frame() const { return GetFlag(kRequiresFrame); }
+
+  void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
+
+  bool GetMustNotHaveEagerFrame() const {
+    return GetFlag(kMustNotHaveEagerFrame);
+  }
+
+  // Compiles marked as debug produce unoptimized code with debug break slots.
+  // Inner functions that cannot be compiled w/o context are compiled eagerly.
+  // Always include deoptimization support to avoid having to recompile again.
+  void MarkAsDebug() {
+    SetFlag(kDebug);
+    SetFlag(kDeoptimizationSupport);
+  }
+
+  bool is_debug() const { return GetFlag(kDebug); }
+
+  void PrepareForSerializing() { SetFlag(kSerializing); }
+
+  bool will_serialize() const { return GetFlag(kSerializing); }
+
+  void MarkAsFunctionContextSpecializing() {
+    SetFlag(kFunctionContextSpecializing);
+  }
+
+  bool is_function_context_specializing() const {
+    return GetFlag(kFunctionContextSpecializing);
+  }
+
+  void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }
+
+  bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }
+
+  void MarkAsNativeContextSpecializing() {
+    SetFlag(kNativeContextSpecializing);
+  }
+
+  bool is_native_context_specializing() const {
+    return GetFlag(kNativeContextSpecializing);
+  }
+
+  void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }
+
+  bool is_deoptimization_enabled() const {
+    return GetFlag(kDeoptimizationEnabled);
+  }
+
+  void MarkAsTypeFeedbackEnabled() { SetFlag(kTypeFeedbackEnabled); }
+
+  bool is_type_feedback_enabled() const {
+    return GetFlag(kTypeFeedbackEnabled);
+  }
+
+  void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
+
+  bool is_accessor_inlining_enabled() const {
+    return GetFlag(kAccessorInliningEnabled);
+  }
+
+  void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
+
+  bool is_source_positions_enabled() const {
+    return GetFlag(kSourcePositionsEnabled);
+  }
+
+  void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
+
+  bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
+
+  void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
+
+  bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
+
+  void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
+
+  bool is_bailout_on_uninitialized() const {
+    return GetFlag(kBailoutOnUninitialized);
+  }
+
+  void MarkAsOptimizeFromBytecode() { SetFlag(kOptimizeFromBytecode); }
+
+  bool is_optimizing_from_bytecode() const {
+    return GetFlag(kOptimizeFromBytecode);
+  }
+
+  bool GeneratePreagedPrologue() const {
+    // Generate a pre-aged prologue if we are optimizing for size, which
+    // will make code flushing more aggressive. Only apply to Code::FUNCTION,
+    // since StaticMarkingVisitor::IsFlushable only flushes proper functions.
+    return FLAG_optimize_for_size && FLAG_age_code && !is_debug() &&
+           output_code_kind() == Code::FUNCTION;
+  }
+
+  void SetCode(Handle<Code> code) { code_ = code; }
+
+  void SetBytecodeArray(Handle<BytecodeArray> bytecode_array) {
+    bytecode_array_ = bytecode_array;
+  }
+
+  bool ShouldTrapOnDeopt() const {
+    return (FLAG_trap_on_deopt && IsOptimizing()) ||
+           (FLAG_trap_on_stub_deopt && IsStub());
+  }
+
+  bool has_context() const;
+  Context* context() const;
+
+  bool has_native_context() const;
+  Context* native_context() const;
+
+  bool has_global_object() const;
+  JSGlobalObject* global_object() const;
+
+  // Accessors for the different compilation modes.
+  bool IsOptimizing() const { return mode_ == OPTIMIZE; }
+  bool IsStub() const { return mode_ == STUB; }
+  void SetOptimizing();
+  void SetOptimizingForOsr(BailoutId osr_ast_id, JavaScriptFrame* osr_frame) {
+    SetOptimizing();
+    osr_ast_id_ = osr_ast_id;
+    osr_frame_ = osr_frame;
+  }
+
+  // Deoptimization support.
+  bool HasDeoptimizationSupport() const {
+    return GetFlag(kDeoptimizationSupport);
+  }
+  void EnableDeoptimizationSupport() {
+    DCHECK_EQ(BASE, mode_);
+    SetFlag(kDeoptimizationSupport);
+  }
+  bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
+
+  bool ExpectsJSReceiverAsReceiver();
+
+  // Determines whether or not to insert a self-optimization header.
+  bool ShouldSelfOptimize();
+
+  void set_deferred_handles(DeferredHandles* deferred_handles) {
+    DCHECK(deferred_handles_ == NULL);
+    deferred_handles_ = deferred_handles;
+  }
+
+  void ReopenHandlesInNewHandleScope();
+
+  void AbortOptimization(BailoutReason reason) {
+    DCHECK(reason != kNoReason);
+    if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
+    SetFlag(kDisableFutureOptimization);
+  }
+
+  void RetryOptimization(BailoutReason reason) {
+    DCHECK(reason != kNoReason);
+    if (GetFlag(kDisableFutureOptimization)) return;
+    bailout_reason_ = reason;
+  }
+
+  BailoutReason bailout_reason() const { return bailout_reason_; }
+
+  int prologue_offset() const {
+    DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
+    return prologue_offset_;
+  }
+
+  void set_prologue_offset(int prologue_offset) {
+    DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
+    prologue_offset_ = prologue_offset;
+  }
+
+  CompilationDependencies* dependencies() { return &dependencies_; }
+
+  int optimization_id() const { return optimization_id_; }
+
+  int osr_expr_stack_height() { return osr_expr_stack_height_; }
+  void set_osr_expr_stack_height(int height) {
+    DCHECK(height >= 0);
+    osr_expr_stack_height_ = height;
+  }
+
+  bool has_simple_parameters();
+
+  struct InlinedFunctionHolder {
+    Handle<SharedFunctionInfo> shared_info;
+
+    // Root that holds the unoptimized code of the inlined function alive
+    // (and out of reach of code flushing) until we finish compilation.
+    // Do not remove.
+    Handle<Code> inlined_code_object_root;
+
+    InlinedFunctionHolder(Handle<SharedFunctionInfo> inlined_shared_info,
+                          Handle<Code> inlined_code_object_root)
+        : shared_info(inlined_shared_info),
+          inlined_code_object_root(inlined_code_object_root) {}
+  };
+
+  typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
+  InlinedFunctionList const& inlined_functions() const {
+    return inlined_functions_;
+  }
+
+  void AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function);
+
+  std::unique_ptr<char[]> GetDebugName() const;
+
+  Code::Kind output_code_kind() const;
+
+  StackFrame::Type GetOutputStackFrameType() const;
+
+  int GetDeclareGlobalsFlags() const;
+
+  SourcePositionTableBuilder::RecordingMode SourcePositionRecordingMode() const;
+
+ private:
+  // Compilation mode.
+  // BASE is generated by the full codegen, optionally prepared for bailouts.
+  // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
+  enum Mode { BASE, OPTIMIZE, STUB };
+
+  CompilationInfo(ParseInfo* parse_info, Vector<const char> debug_name,
+                  Code::Flags code_flags, Mode mode, Isolate* isolate,
+                  Zone* zone);
+
+  ParseInfo* parse_info_;
+  Isolate* isolate_;
+
+  void SetMode(Mode mode) { mode_ = mode; }
+
+  void SetFlag(Flag flag) { flags_ |= flag; }
+
+  void SetFlag(Flag flag, bool value) {
+    flags_ = value ? flags_ | flag : flags_ & ~flag;
+  }
+
+  bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
+
+  unsigned flags_;
+
+  Code::Flags code_flags_;
+
+  Handle<JSFunction> closure_;
+
+  // The compiled code.
+  Handle<Code> code_;
+
+  // Compilation mode flag and whether deoptimization is allowed.
+  Mode mode_;
+  BailoutId osr_ast_id_;
+
+  // Holds the bytecode array generated by the interpreter.
+  // TODO(rmcilroy/mstarzinger): Temporary work-around until compiler.cc is
+  // refactored to avoid us needing to carry the BytcodeArray around.
+  Handle<BytecodeArray> bytecode_array_;
+
+  // The zone from which the compilation pipeline working on this
+  // CompilationInfo allocates.
+  Zone* zone_;
+
+  DeferredHandles* deferred_handles_;
+
+  // Dependencies for this compilation, e.g. stable maps.
+  CompilationDependencies dependencies_;
+
+  BailoutReason bailout_reason_;
+
+  int prologue_offset_;
+
+  InlinedFunctionList inlined_functions_;
+
+  // Number of parameters used for compilation of stubs that require arguments.
+  int parameter_count_;
+
+  int optimization_id_;
+
+  int osr_expr_stack_height_;
+
+  // The current OSR frame for specialization or {nullptr}.
+  JavaScriptFrame* osr_frame_ = nullptr;
+
+  Vector<const char> debug_name_;
+
+  DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
+};
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_COMPILATION_INFO_H_
diff --git a/src/compiler-dispatcher/compiler-dispatcher-job.cc b/src/compiler-dispatcher/compiler-dispatcher-job.cc
index 9237936..96956ae 100644
--- a/src/compiler-dispatcher/compiler-dispatcher-job.cc
+++ b/src/compiler-dispatcher/compiler-dispatcher-job.cc
@@ -5,6 +5,8 @@
 #include "src/compiler-dispatcher/compiler-dispatcher-job.h"
 
 #include "src/assert-scope.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/global-handles.h"
 #include "src/isolate.h"
 #include "src/objects-inl.h"
@@ -12,21 +14,22 @@
 #include "src/parsing/parser.h"
 #include "src/parsing/scanner-character-streams.h"
 #include "src/unicode-cache.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
 
 CompilerDispatcherJob::CompilerDispatcherJob(Isolate* isolate,
-                                             Handle<JSFunction> function,
+                                             Handle<SharedFunctionInfo> shared,
                                              size_t max_stack_size)
     : isolate_(isolate),
-      function_(Handle<JSFunction>::cast(
-          isolate_->global_handles()->Create(*function))),
-      max_stack_size_(max_stack_size) {
+      shared_(Handle<SharedFunctionInfo>::cast(
+          isolate_->global_handles()->Create(*shared))),
+      max_stack_size_(max_stack_size),
+      can_compile_on_background_thread_(false) {
   HandleScope scope(isolate_);
-  Handle<SharedFunctionInfo> shared(function_->shared(), isolate_);
-  Handle<Script> script(Script::cast(shared->script()), isolate_);
+  DCHECK(!shared_->outer_scope_info()->IsTheHole(isolate_));
+  Handle<Script> script(Script::cast(shared_->script()), isolate_);
   Handle<String> source(String::cast(script->source()), isolate_);
   can_parse_on_background_thread_ =
       source->IsExternalTwoByteString() || source->IsExternalOneByteString();
@@ -36,7 +39,7 @@
   DCHECK(ThreadId::Current().Equals(isolate_->thread_id()));
   DCHECK(status_ == CompileJobStatus::kInitial ||
          status_ == CompileJobStatus::kDone);
-  i::GlobalHandles::Destroy(Handle<Object>::cast(function_).location());
+  i::GlobalHandles::Destroy(Handle<Object>::cast(shared_).location());
 }
 
 void CompilerDispatcherJob::PrepareToParseOnMainThread() {
@@ -45,46 +48,42 @@
   HandleScope scope(isolate_);
   unicode_cache_.reset(new UnicodeCache());
   zone_.reset(new Zone(isolate_->allocator()));
-  Handle<SharedFunctionInfo> shared(function_->shared(), isolate_);
-  Handle<Script> script(Script::cast(shared->script()), isolate_);
+  Handle<Script> script(Script::cast(shared_->script()), isolate_);
   DCHECK(script->type() != Script::TYPE_NATIVE);
 
   Handle<String> source(String::cast(script->source()), isolate_);
-  if (source->IsExternalTwoByteString()) {
-    character_stream_.reset(new ExternalTwoByteStringUtf16CharacterStream(
-        Handle<ExternalTwoByteString>::cast(source), shared->start_position(),
-        shared->end_position()));
-  } else if (source->IsExternalOneByteString()) {
-    character_stream_.reset(new ExternalOneByteStringUtf16CharacterStream(
-        Handle<ExternalOneByteString>::cast(source), shared->start_position(),
-        shared->end_position()));
+  if (source->IsExternalTwoByteString() || source->IsExternalOneByteString()) {
+    character_stream_.reset(ScannerStream::For(
+        source, shared_->start_position(), shared_->end_position()));
   } else {
     source = String::Flatten(source);
     // Have to globalize the reference here, so it survives between function
     // calls.
     source_ = Handle<String>::cast(isolate_->global_handles()->Create(*source));
-    character_stream_.reset(new GenericStringUtf16CharacterStream(
-        source_, shared->start_position(), shared->end_position()));
+    character_stream_.reset(ScannerStream::For(
+        source_, shared_->start_position(), shared_->end_position()));
   }
   parse_info_.reset(new ParseInfo(zone_.get()));
   parse_info_->set_isolate(isolate_);
   parse_info_->set_character_stream(character_stream_.get());
   parse_info_->set_lazy();
   parse_info_->set_hash_seed(isolate_->heap()->HashSeed());
-  parse_info_->set_is_named_expression(shared->is_named_expression());
-  parse_info_->set_calls_eval(shared->scope_info()->CallsEval());
-  parse_info_->set_compiler_hints(shared->compiler_hints());
-  parse_info_->set_start_position(shared->start_position());
-  parse_info_->set_end_position(shared->end_position());
+  parse_info_->set_is_named_expression(shared_->is_named_expression());
+  parse_info_->set_compiler_hints(shared_->compiler_hints());
+  parse_info_->set_start_position(shared_->start_position());
+  parse_info_->set_end_position(shared_->end_position());
   parse_info_->set_unicode_cache(unicode_cache_.get());
-  parse_info_->set_language_mode(shared->language_mode());
+  parse_info_->set_language_mode(shared_->language_mode());
 
   parser_.reset(new Parser(parse_info_.get()));
-  parser_->DeserializeScopeChain(
-      parse_info_.get(), handle(function_->context(), isolate_),
-      Scope::DeserializationMode::kDeserializeOffHeap);
+  Handle<ScopeInfo> outer_scope_info(
+      handle(ScopeInfo::cast(shared_->outer_scope_info())));
+  parser_->DeserializeScopeChain(parse_info_.get(),
+                                 outer_scope_info->length() > 0
+                                     ? MaybeHandle<ScopeInfo>(outer_scope_info)
+                                     : MaybeHandle<ScopeInfo>());
 
-  Handle<String> name(String::cast(shared->name()));
+  Handle<String> name(String::cast(shared_->name()));
   parse_info_->set_function_name(
       parse_info_->ast_value_factory()->GetString(name));
   status_ = CompileJobStatus::kReadyToParse;
@@ -108,8 +107,7 @@
   // use it.
   parse_info_->set_isolate(nullptr);
 
-  uintptr_t stack_limit =
-      reinterpret_cast<uintptr_t>(&stack_limit) - max_stack_size_ * KB;
+  uintptr_t stack_limit = GetCurrentStackPosition() - max_stack_size_ * KB;
 
   parser_->set_stack_limit(stack_limit);
   parser_->ParseOnBackground(parse_info_.get());
@@ -131,25 +129,32 @@
   if (parse_info_->literal() == nullptr) {
     status_ = CompileJobStatus::kFailed;
   } else {
-    status_ = CompileJobStatus::kReadyToCompile;
+    status_ = CompileJobStatus::kReadyToAnalyse;
   }
 
   DeferredHandleScope scope(isolate_);
   {
-    // Create a canonical handle scope before internalizing parsed values if
-    // compiling bytecode. This is required for off-thread bytecode generation.
-    std::unique_ptr<CanonicalHandleScope> canonical;
-    if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(isolate_));
-
-    Handle<SharedFunctionInfo> shared(function_->shared(), isolate_);
-    Handle<Script> script(Script::cast(shared->script()), isolate_);
+    Handle<Script> script(Script::cast(shared_->script()), isolate_);
 
     parse_info_->set_script(script);
-    parse_info_->set_context(handle(function_->context(), isolate_));
+    Handle<ScopeInfo> outer_scope_info(
+        handle(ScopeInfo::cast(shared_->outer_scope_info())));
+    if (outer_scope_info->length() > 0) {
+      parse_info_->set_outer_scope_info(outer_scope_info);
+    }
+    parse_info_->set_shared_info(shared_);
 
-    // Do the parsing tasks which need to be done on the main thread. This will
-    // also handle parse errors.
-    parser_->Internalize(isolate_, script, parse_info_->literal() == nullptr);
+    {
+      // Create a canonical handle scope if compiling ignition bytecode. This is
+      // required by the constant array builder to de-duplicate objects without
+      // dereferencing handles.
+      std::unique_ptr<CanonicalHandleScope> canonical;
+      if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(isolate_));
+
+      // Do the parsing tasks which need to be done on the main thread. This
+      // will also handle parse errors.
+      parser_->Internalize(isolate_, script, parse_info_->literal() == nullptr);
+    }
     parser_->HandleSourceURLComments(isolate_, script);
 
     parse_info_->set_character_stream(nullptr);
@@ -163,6 +168,72 @@
   return status_ != CompileJobStatus::kFailed;
 }
 
+bool CompilerDispatcherJob::PrepareToCompileOnMainThread() {
+  DCHECK(ThreadId::Current().Equals(isolate_->thread_id()));
+  DCHECK(status() == CompileJobStatus::kReadyToAnalyse);
+
+  compile_info_.reset(
+      new CompilationInfo(parse_info_.get(), Handle<JSFunction>::null()));
+
+  DeferredHandleScope scope(isolate_);
+  if (Compiler::Analyze(parse_info_.get())) {
+    compile_job_.reset(
+        Compiler::PrepareUnoptimizedCompilationJob(compile_info_.get()));
+  }
+  compile_info_->set_deferred_handles(scope.Detach());
+
+  if (!compile_job_.get()) {
+    if (!isolate_->has_pending_exception()) isolate_->StackOverflow();
+    status_ = CompileJobStatus::kFailed;
+    return false;
+  }
+
+  can_compile_on_background_thread_ =
+      compile_job_->can_execute_on_background_thread();
+  status_ = CompileJobStatus::kReadyToCompile;
+  return true;
+}
+
+void CompilerDispatcherJob::Compile() {
+  DCHECK(status() == CompileJobStatus::kReadyToCompile);
+  DCHECK(can_compile_on_background_thread_ ||
+         ThreadId::Current().Equals(isolate_->thread_id()));
+
+  // Disallowing of handle dereference and heap access dealt with in
+  // CompilationJob::ExecuteJob.
+
+  uintptr_t stack_limit = GetCurrentStackPosition() - max_stack_size_ * KB;
+  compile_job_->set_stack_limit(stack_limit);
+
+  CompilationJob::Status status = compile_job_->ExecuteJob();
+  USE(status);
+
+  // Always transition to kCompiled - errors will be reported by
+  // FinalizeCompilingOnMainThread.
+  status_ = CompileJobStatus::kCompiled;
+}
+
+bool CompilerDispatcherJob::FinalizeCompilingOnMainThread() {
+  DCHECK(ThreadId::Current().Equals(isolate_->thread_id()));
+  DCHECK(status() == CompileJobStatus::kCompiled);
+
+  if (compile_job_->state() == CompilationJob::State::kFailed ||
+      !Compiler::FinalizeCompilationJob(compile_job_.release())) {
+    if (!isolate_->has_pending_exception()) isolate_->StackOverflow();
+    status_ = CompileJobStatus::kFailed;
+    return false;
+  }
+
+  zone_.reset();
+  parse_info_.reset();
+  compile_info_.reset();
+  compile_job_.reset();
+  handles_from_parsing_.reset();
+
+  status_ = CompileJobStatus::kDone;
+  return true;
+}
+
 void CompilerDispatcherJob::ResetOnMainThread() {
   DCHECK(ThreadId::Current().Equals(isolate_->thread_id()));
 
@@ -172,6 +243,8 @@
   parse_info_.reset();
   zone_.reset();
   handles_from_parsing_.reset();
+  compile_info_.reset();
+  compile_job_.reset();
 
   if (!source_.is_null()) {
     i::GlobalHandles::Destroy(Handle<Object>::cast(source_).location());
diff --git a/src/compiler-dispatcher/compiler-dispatcher-job.h b/src/compiler-dispatcher/compiler-dispatcher-job.h
index 50414af..f3aaf93 100644
--- a/src/compiler-dispatcher/compiler-dispatcher-job.h
+++ b/src/compiler-dispatcher/compiler-dispatcher-job.h
@@ -15,10 +15,11 @@
 namespace internal {
 
 class CompilationInfo;
+class CompilationJob;
 class Isolate;
-class JSFunction;
 class ParseInfo;
 class Parser;
+class SharedFunctionInfo;
 class String;
 class UnicodeCache;
 class Utf16CharacterStream;
@@ -28,14 +29,16 @@
   kInitial,
   kReadyToParse,
   kParsed,
+  kReadyToAnalyse,
   kReadyToCompile,
+  kCompiled,
   kFailed,
   kDone,
 };
 
 class CompilerDispatcherJob {
  public:
-  CompilerDispatcherJob(Isolate* isolate, Handle<JSFunction> function,
+  CompilerDispatcherJob(Isolate* isolate, Handle<SharedFunctionInfo> shared,
                         size_t max_stack_size);
   ~CompilerDispatcherJob();
 
@@ -43,6 +46,11 @@
   bool can_parse_on_background_thread() const {
     return can_parse_on_background_thread_;
   }
+  // Should only be called after kReadyToCompile.
+  bool can_compile_on_background_thread() const {
+    DCHECK(compile_job_.get());
+    return can_compile_on_background_thread_;
+  }
 
   // Transition from kInitial to kReadyToParse.
   void PrepareToParseOnMainThread();
@@ -50,10 +58,21 @@
   // Transition from kReadyToParse to kParsed.
   void Parse();
 
-  // Transition from kParsed to kReadyToCompile (or kFailed). Returns false
+  // Transition from kParsed to kReadyToAnalyse (or kFailed). Returns false
   // when transitioning to kFailed. In that case, an exception is pending.
   bool FinalizeParsingOnMainThread();
 
+  // Transition from kReadyToAnalyse to kReadyToCompile (or kFailed). Returns
+  // false when transitioning to kFailed. In that case, an exception is pending.
+  bool PrepareToCompileOnMainThread();
+
+  // Transition from kReadyToCompile to kCompiled.
+  void Compile();
+
+  // Transition from kCompiled to kDone (or kFailed). Returns false when
+  // transitioning to kFailed. In that case, an exception is pending.
+  bool FinalizeCompilingOnMainThread();
+
   // Transition from any state to kInitial and free all resources.
   void ResetOnMainThread();
 
@@ -62,7 +81,7 @@
 
   CompileJobStatus status_ = CompileJobStatus::kInitial;
   Isolate* isolate_;
-  Handle<JSFunction> function_;  // Global handle.
+  Handle<SharedFunctionInfo> shared_;  // Global handle.
   Handle<String> source_;        // Global handle.
   size_t max_stack_size_;
 
@@ -74,7 +93,12 @@
   std::unique_ptr<Parser> parser_;
   std::unique_ptr<DeferredHandles> handles_from_parsing_;
 
+  // Members required for compiling.
+  std::unique_ptr<CompilationInfo> compile_info_;
+  std::unique_ptr<CompilationJob> compile_job_;
+
   bool can_parse_on_background_thread_;
+  bool can_compile_on_background_thread_;
 
   DISALLOW_COPY_AND_ASSIGN(CompilerDispatcherJob);
 };
diff --git a/src/compiler-dispatcher/optimizing-compile-dispatcher.cc b/src/compiler-dispatcher/optimizing-compile-dispatcher.cc
index be81047..75c50ee 100644
--- a/src/compiler-dispatcher/optimizing-compile-dispatcher.cc
+++ b/src/compiler-dispatcher/optimizing-compile-dispatcher.cc
@@ -5,6 +5,8 @@
 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
 
 #include "src/base/atomicops.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/full-codegen/full-codegen.h"
 #include "src/isolate.h"
 #include "src/tracing/trace-event.h"
diff --git a/src/compiler.cc b/src/compiler.cc
index 9a5afe9..ec402fa 100644
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -40,33 +40,11 @@
 namespace internal {
 
 
-#define PARSE_INFO_GETTER(type, name)  \
-  type CompilationInfo::name() const { \
-    CHECK(parse_info());               \
-    return parse_info()->name();       \
-  }
-
-
-#define PARSE_INFO_GETTER_WITH_DEFAULT(type, name, def) \
-  type CompilationInfo::name() const {                  \
-    return parse_info() ? parse_info()->name() : def;   \
-  }
-
-
-PARSE_INFO_GETTER(Handle<Script>, script)
-PARSE_INFO_GETTER(FunctionLiteral*, literal)
-PARSE_INFO_GETTER_WITH_DEFAULT(DeclarationScope*, scope, nullptr)
-PARSE_INFO_GETTER_WITH_DEFAULT(Handle<Context>, context,
-                               Handle<Context>::null())
-PARSE_INFO_GETTER(Handle<SharedFunctionInfo>, shared_info)
-
-#undef PARSE_INFO_GETTER
-#undef PARSE_INFO_GETTER_WITH_DEFAULT
 
 // A wrapper around a CompilationInfo that detaches the Handles from
 // the underlying DeferredHandleScope and stores them in info_ on
 // destruction.
-class CompilationHandleScope BASE_EMBEDDED {
+class CompilationHandleScope final {
  public:
   explicit CompilationHandleScope(CompilationInfo* info)
       : deferred_(info->isolate()), info_(info) {}
@@ -91,154 +69,6 @@
 };
 
 // ----------------------------------------------------------------------------
-// Implementation of CompilationInfo
-
-bool CompilationInfo::has_shared_info() const {
-  return parse_info_ && !parse_info_->shared_info().is_null();
-}
-
-CompilationInfo::CompilationInfo(ParseInfo* parse_info,
-                                 Handle<JSFunction> closure)
-    : CompilationInfo(parse_info, {}, Code::ComputeFlags(Code::FUNCTION), BASE,
-                      parse_info->isolate(), parse_info->zone()) {
-  closure_ = closure;
-
-  // Compiling for the snapshot typically results in different code than
-  // compiling later on. This means that code recompiled with deoptimization
-  // support won't be "equivalent" (as defined by SharedFunctionInfo::
-  // EnableDeoptimizationSupport), so it will replace the old code and all
-  // its type feedback. To avoid this, always compile functions in the snapshot
-  // with deoptimization support.
-  if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();
-
-  if (FLAG_function_context_specialization) MarkAsFunctionContextSpecializing();
-  if (FLAG_turbo_inlining) MarkAsInliningEnabled();
-  if (FLAG_turbo_source_positions) MarkAsSourcePositionsEnabled();
-  if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
-}
-
-CompilationInfo::CompilationInfo(Vector<const char> debug_name,
-                                 Isolate* isolate, Zone* zone,
-                                 Code::Flags code_flags)
-    : CompilationInfo(nullptr, debug_name, code_flags, STUB, isolate, zone) {}
-
-CompilationInfo::CompilationInfo(ParseInfo* parse_info,
-                                 Vector<const char> debug_name,
-                                 Code::Flags code_flags, Mode mode,
-                                 Isolate* isolate, Zone* zone)
-    : parse_info_(parse_info),
-      isolate_(isolate),
-      flags_(0),
-      code_flags_(code_flags),
-      mode_(mode),
-      osr_ast_id_(BailoutId::None()),
-      zone_(zone),
-      deferred_handles_(nullptr),
-      dependencies_(isolate, zone),
-      bailout_reason_(kNoReason),
-      prologue_offset_(Code::kPrologueOffsetNotSet),
-      track_positions_(FLAG_hydrogen_track_positions ||
-                       isolate->is_profiling()),
-      parameter_count_(0),
-      optimization_id_(-1),
-      osr_expr_stack_height_(0),
-      debug_name_(debug_name) {}
-
-CompilationInfo::~CompilationInfo() {
-  if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
-    shared_info()->DisableOptimization(bailout_reason());
-  }
-  dependencies()->Rollback();
-  delete deferred_handles_;
-}
-
-
-int CompilationInfo::num_parameters() const {
-  return !IsStub() ? scope()->num_parameters() : parameter_count_;
-}
-
-
-int CompilationInfo::num_parameters_including_this() const {
-  return num_parameters() + (is_this_defined() ? 1 : 0);
-}
-
-
-bool CompilationInfo::is_this_defined() const { return !IsStub(); }
-
-
-// Primitive functions are unlikely to be picked up by the stack-walking
-// profiler, so they trigger their own optimization when they're called
-// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
-bool CompilationInfo::ShouldSelfOptimize() {
-  return FLAG_crankshaft &&
-         !(literal()->flags() & AstProperties::kDontSelfOptimize) &&
-         !literal()->dont_optimize() &&
-         literal()->scope()->AllowsLazyCompilation() &&
-         !shared_info()->optimization_disabled();
-}
-
-
-bool CompilationInfo::has_simple_parameters() {
-  return scope()->has_simple_parameters();
-}
-
-std::unique_ptr<char[]> CompilationInfo::GetDebugName() const {
-  if (parse_info() && parse_info()->literal()) {
-    AllowHandleDereference allow_deref;
-    return parse_info()->literal()->debug_name()->ToCString();
-  }
-  if (parse_info() && !parse_info()->shared_info().is_null()) {
-    return parse_info()->shared_info()->DebugName()->ToCString();
-  }
-  Vector<const char> name_vec = debug_name_;
-  if (name_vec.is_empty()) name_vec = ArrayVector("unknown");
-  std::unique_ptr<char[]> name(new char[name_vec.length() + 1]);
-  memcpy(name.get(), name_vec.start(), name_vec.length());
-  name[name_vec.length()] = '\0';
-  return name;
-}
-
-StackFrame::Type CompilationInfo::GetOutputStackFrameType() const {
-  switch (output_code_kind()) {
-    case Code::STUB:
-    case Code::BYTECODE_HANDLER:
-    case Code::HANDLER:
-    case Code::BUILTIN:
-#define CASE_KIND(kind) case Code::kind:
-      IC_KIND_LIST(CASE_KIND)
-#undef CASE_KIND
-      return StackFrame::STUB;
-    case Code::WASM_FUNCTION:
-      return StackFrame::WASM;
-    case Code::JS_TO_WASM_FUNCTION:
-      return StackFrame::JS_TO_WASM;
-    case Code::WASM_TO_JS_FUNCTION:
-      return StackFrame::WASM_TO_JS;
-    default:
-      UNIMPLEMENTED();
-      return StackFrame::NONE;
-  }
-}
-
-int CompilationInfo::GetDeclareGlobalsFlags() const {
-  DCHECK(DeclareGlobalsLanguageMode::is_valid(parse_info()->language_mode()));
-  return DeclareGlobalsEvalFlag::encode(parse_info()->is_eval()) |
-         DeclareGlobalsNativeFlag::encode(parse_info()->is_native()) |
-         DeclareGlobalsLanguageMode::encode(parse_info()->language_mode());
-}
-
-SourcePositionTableBuilder::RecordingMode
-CompilationInfo::SourcePositionRecordingMode() const {
-  return parse_info() && parse_info()->is_native()
-             ? SourcePositionTableBuilder::OMIT_SOURCE_POSITIONS
-             : SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS;
-}
-
-bool CompilationInfo::ExpectsJSReceiverAsReceiver() {
-  return is_sloppy(parse_info()->language_mode()) && !parse_info()->is_native();
-}
-
-// ----------------------------------------------------------------------------
 // Implementation of CompilationJob
 
 CompilationJob::Status CompilationJob::PrepareJob() {
@@ -260,10 +90,18 @@
 }
 
 CompilationJob::Status CompilationJob::ExecuteJob() {
-  DisallowHeapAllocation no_allocation;
-  DisallowHandleAllocation no_handles;
-  DisallowHandleDereference no_deref;
-  DisallowCodeDependencyChange no_dependency_change;
+  std::unique_ptr<DisallowHeapAllocation> no_allocation;
+  std::unique_ptr<DisallowHandleAllocation> no_handles;
+  std::unique_ptr<DisallowHandleDereference> no_deref;
+  std::unique_ptr<DisallowCodeDependencyChange> no_dependency_change;
+  if (can_execute_on_background_thread()) {
+    no_allocation.reset(new DisallowHeapAllocation());
+    no_handles.reset(new DisallowHandleAllocation());
+    no_deref.reset(new DisallowHandleDereference());
+    no_dependency_change.reset(new DisallowCodeDependencyChange());
+  } else {
+    DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
+  }
 
   // Delegate to the underlying implementation.
   DCHECK(state() == State::kReadyToExecute);
@@ -283,6 +121,73 @@
   return UpdateState(FinalizeJobImpl(), State::kSucceeded);
 }
 
+CompilationJob::Status CompilationJob::RetryOptimization(BailoutReason reason) {
+  DCHECK(info_->IsOptimizing());
+  info_->RetryOptimization(reason);
+  state_ = State::kFailed;
+  return FAILED;
+}
+
+CompilationJob::Status CompilationJob::AbortOptimization(BailoutReason reason) {
+  DCHECK(info_->IsOptimizing());
+  info_->AbortOptimization(reason);
+  state_ = State::kFailed;
+  return FAILED;
+}
+
+void CompilationJob::RecordUnoptimizedCompilationStats() const {
+  int code_size;
+  if (info()->has_bytecode_array()) {
+    code_size = info()->bytecode_array()->SizeIncludingMetadata();
+  } else {
+    code_size = info()->code()->SizeIncludingMetadata();
+  }
+
+  Counters* counters = isolate()->counters();
+  // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
+  counters->total_baseline_code_size()->Increment(code_size);
+  counters->total_baseline_compile_count()->Increment(1);
+
+  // TODO(5203): Add timers for each phase of compilation.
+}
+
+void CompilationJob::RecordOptimizedCompilationStats() const {
+  DCHECK(info()->IsOptimizing());
+  Handle<JSFunction> function = info()->closure();
+  if (!function->IsOptimized()) {
+    // Concurrent recompilation and OSR may race.  Increment only once.
+    int opt_count = function->shared()->opt_count();
+    function->shared()->set_opt_count(opt_count + 1);
+  }
+  double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
+  double ms_optimize = time_taken_to_execute_.InMillisecondsF();
+  double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
+  if (FLAG_trace_opt) {
+    PrintF("[optimizing ");
+    function->ShortPrint();
+    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
+           ms_codegen);
+  }
+  if (FLAG_trace_opt_stats) {
+    static double compilation_time = 0.0;
+    static int compiled_functions = 0;
+    static int code_size = 0;
+
+    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
+    compiled_functions++;
+    code_size += function->shared()->SourceSize();
+    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
+           compiled_functions, code_size, compilation_time);
+  }
+  if (FLAG_hydrogen_stats) {
+    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
+                                                    time_taken_to_execute_,
+                                                    time_taken_to_finalize_);
+  }
+}
+
+Isolate* CompilationJob::isolate() const { return info()->isolate(); }
+
 namespace {
 
 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
@@ -341,41 +246,6 @@
   code->set_can_have_weak_objects(true);
 }
 
-void CompilationJob::RecordOptimizationStats() {
-  DCHECK(info()->IsOptimizing());
-  Handle<JSFunction> function = info()->closure();
-  if (!function->IsOptimized()) {
-    // Concurrent recompilation and OSR may race.  Increment only once.
-    int opt_count = function->shared()->opt_count();
-    function->shared()->set_opt_count(opt_count + 1);
-  }
-  double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
-  double ms_optimize = time_taken_to_execute_.InMillisecondsF();
-  double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
-  if (FLAG_trace_opt) {
-    PrintF("[optimizing ");
-    function->ShortPrint();
-    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
-           ms_codegen);
-  }
-  if (FLAG_trace_opt_stats) {
-    static double compilation_time = 0.0;
-    static int compiled_functions = 0;
-    static int code_size = 0;
-
-    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
-    compiled_functions++;
-    code_size += function->shared()->SourceSize();
-    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
-           compiled_functions, code_size, compilation_time);
-  }
-  if (FLAG_hydrogen_stats) {
-    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
-                                                    time_taken_to_execute_,
-                                                    time_taken_to_finalize_);
-  }
-}
-
 // ----------------------------------------------------------------------------
 // Local helper methods that make up the compilation pipeline.
 
@@ -387,6 +257,16 @@
              Script::COMPILATION_TYPE_EVAL;
 }
 
+bool Parse(ParseInfo* info) {
+  // Create a canonical handle scope if compiling ignition bytecode. This is
+  // required by the constant array builder to de-duplicate objects without
+  // dereferencing handles.
+  std::unique_ptr<CanonicalHandleScope> canonical;
+  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate()));
+
+  return Parser::ParseStatic(info);
+}
+
 void RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag,
                                CompilationInfo* info) {
   // Log the code generation. If source information is available include
@@ -466,18 +346,24 @@
   return info->shared_info()->PassesFilter(FLAG_ignition_filter);
 }
 
-int CodeAndMetadataSize(CompilationInfo* info) {
-  if (info->has_bytecode_array()) {
-    return info->bytecode_array()->SizeIncludingMetadata();
+CompilationJob* GetUnoptimizedCompilationJob(CompilationInfo* info) {
+  // Function should have been parsed and analyzed before creating a compilation
+  // job.
+  DCHECK_NOT_NULL(info->literal());
+  DCHECK_NOT_NULL(info->scope());
+
+  EnsureFeedbackMetadata(info);
+  if (ShouldUseIgnition(info)) {
+    return interpreter::Interpreter::NewCompilationJob(info);
+  } else {
+    return FullCodeGenerator::NewCompilationJob(info);
   }
-  return info->code()->SizeIncludingMetadata();
 }
 
 bool GenerateUnoptimizedCode(CompilationInfo* info) {
-  bool success;
-  EnsureFeedbackMetadata(info);
   if (FLAG_validate_asm && info->scope()->asm_module() &&
       !info->shared_info()->is_asm_wasm_broken()) {
+    EnsureFeedbackMetadata(info);
     MaybeHandle<FixedArray> wasm_data;
     wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info());
     if (!wasm_data.is_null()) {
@@ -486,19 +372,13 @@
       return true;
     }
   }
-  if (ShouldUseIgnition(info)) {
-    success = interpreter::Interpreter::MakeBytecode(info);
-  } else {
-    success = FullCodeGenerator::MakeCode(info);
-  }
-  if (success) {
-    Isolate* isolate = info->isolate();
-    Counters* counters = isolate->counters();
-    // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
-    counters->total_baseline_code_size()->Increment(CodeAndMetadataSize(info));
-    counters->total_baseline_compile_count()->Increment(1);
-  }
-  return success;
+
+  std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
+  if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
+  if (job->ExecuteJob() != CompilationJob::SUCCEEDED) return false;
+  if (job->FinalizeJob() != CompilationJob::SUCCEEDED) return false;
+  job->RecordUnoptimizedCompilationStats();
+  return true;
 }
 
 bool CompileUnoptimizedCode(CompilationInfo* info) {
@@ -514,8 +394,12 @@
 
 void InstallSharedScopeInfo(CompilationInfo* info,
                             Handle<SharedFunctionInfo> shared) {
-  Handle<ScopeInfo> scope_info = info->scope()->GetScopeInfo(info->isolate());
+  Handle<ScopeInfo> scope_info = info->scope()->scope_info();
   shared->set_scope_info(*scope_info);
+  Scope* outer_scope = info->scope()->GetOuterScopeWithContext();
+  if (outer_scope) {
+    shared->set_outer_scope_info(*outer_scope->scope_info());
+  }
 }
 
 void InstallSharedCompilationResult(CompilationInfo* info,
@@ -534,22 +418,8 @@
   }
 }
 
-MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) {
-  VMState<COMPILER> state(info->isolate());
-  PostponeInterruptsScope postpone(info->isolate());
-
-  // Create a canonical handle scope before internalizing parsed values if
-  // compiling bytecode. This is required for off-thread bytecode generation.
-  std::unique_ptr<CanonicalHandleScope> canonical;
-  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate()));
-
-  // Parse and update CompilationInfo with the results.
-  if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>();
+void InstallUnoptimizedCode(CompilationInfo* info) {
   Handle<SharedFunctionInfo> shared = info->shared_info();
-  DCHECK_EQ(shared->language_mode(), info->literal()->language_mode());
-
-  // Compile either unoptimized code or bytecode for the interpreter.
-  if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
 
   // Update the shared function info with the scope info.
   InstallSharedScopeInfo(info, shared);
@@ -559,10 +429,35 @@
 
   // Record the function compilation event.
   RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
+}
+
+MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) {
+  VMState<COMPILER> state(info->isolate());
+  PostponeInterruptsScope postpone(info->isolate());
+
+  // Parse and update CompilationInfo with the results.
+  if (!Parse(info->parse_info())) return MaybeHandle<Code>();
+  DCHECK_EQ(info->shared_info()->language_mode(),
+            info->literal()->language_mode());
+
+  // Compile either unoptimized code or bytecode for the interpreter.
+  if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
+
+  InstallUnoptimizedCode(info);
 
   return info->code();
 }
 
+CompilationJob::Status FinalizeUnoptimizedCompilationJob(CompilationJob* job) {
+  CompilationJob::Status status = job->FinalizeJob();
+  if (status == CompilationJob::SUCCEEDED) {
+    DCHECK(!job->info()->shared_info()->is_compiled());
+    InstallUnoptimizedCode(job->info());
+    job->RecordUnoptimizedCompilationStats();
+  }
+  return status;
+}
+
 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
     Handle<JSFunction> function, BailoutId osr_ast_id) {
   Handle<SharedFunctionInfo> shared(function->shared());
@@ -615,6 +510,14 @@
 }
 
 bool Renumber(ParseInfo* parse_info) {
+  // Create a canonical handle scope if compiling ignition bytecode. This is
+  // required by the constant array builder to de-duplicate objects without
+  // dereferencing handles.
+  std::unique_ptr<CanonicalHandleScope> canonical;
+  if (FLAG_ignition) {
+    canonical.reset(new CanonicalHandleScope(parse_info->isolate()));
+  }
+
   if (!AstNumbering::Renumber(parse_info->isolate(), parse_info->zone(),
                               parse_info->literal())) {
     return false;
@@ -669,8 +572,8 @@
   TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
   RuntimeCallTimerScope runtimeTimer(isolate,
                                      &RuntimeCallStats::RecompileSynchronous);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
+               "V8.RecompileSynchronous");
 
   if (job->PrepareJob() != CompilationJob::SUCCEEDED ||
       job->ExecuteJob() != CompilationJob::SUCCEEDED ||
@@ -684,7 +587,7 @@
   }
 
   // Success!
-  job->RecordOptimizationStats();
+  job->RecordOptimizedCompilationStats();
   DCHECK(!isolate->has_pending_exception());
   InsertCodeIntoOptimizedCodeMap(info);
   RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
@@ -713,10 +616,6 @@
     return false;
   }
 
-  // All handles below this point will be allocated in a deferred handle scope
-  // that is detached and handed off to the background thread when we return.
-  CompilationHandleScope handle_scope(info);
-
   // Parsing is not required when optimizing from existing bytecode.
   if (!info->is_optimizing_from_bytecode()) {
     if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
@@ -725,15 +624,11 @@
 
   JSFunction::EnsureLiterals(info->closure());
 
-  // Reopen handles in the new CompilationHandleScope.
-  info->ReopenHandlesInNewHandleScope();
-  info->parse_info()->ReopenHandlesInNewHandleScope();
-
   TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
   RuntimeCallTimerScope runtimeTimer(info->isolate(),
                                      &RuntimeCallStats::RecompileSynchronous);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
+               "V8.RecompileSynchronous");
 
   if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
   isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
@@ -808,14 +703,13 @@
     return MaybeHandle<Code>();
   }
 
-  CanonicalHandleScope canonical(isolate);
   TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
   RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::OptimizeCode);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::OptimizeCode);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.OptimizeCode");
 
   // TurboFan can optimize directly from existing bytecode.
   if (FLAG_turbo_from_bytecode && use_turbofan && ShouldUseIgnition(info)) {
+    if (info->is_osr() && !ignition_osr) return MaybeHandle<Code>();
     if (!Compiler::EnsureBytecode(info)) {
       if (isolate->has_pending_exception()) isolate->clear_pending_exception();
       return MaybeHandle<Code>();
@@ -831,6 +725,32 @@
     parse_info->set_lazy(false);
   }
 
+  // Verify that OSR compilations are delegated to the correct graph builder.
+  // Depending on the underlying frame the semantics of the {BailoutId} differ
+  // and the various graph builders hard-code a certain semantic:
+  //  - Interpreter : The BailoutId represents a bytecode offset.
+  //  - FullCodegen : The BailoutId represents the id of an AST node.
+  DCHECK_IMPLIES(info->is_osr() && ignition_osr,
+                 info->is_optimizing_from_bytecode());
+  DCHECK_IMPLIES(info->is_osr() && !ignition_osr,
+                 !info->is_optimizing_from_bytecode());
+
+  // In case of concurrent recompilation, all handles below this point will be
+  // allocated in a deferred handle scope that is detached and handed off to
+  // the background thread when we return.
+  std::unique_ptr<CompilationHandleScope> compilation;
+  if (mode == Compiler::CONCURRENT) {
+    compilation.reset(new CompilationHandleScope(info));
+  }
+
+  // In case of TurboFan, all handles below will be canonicalized.
+  std::unique_ptr<CanonicalHandleScope> canonical;
+  if (use_turbofan) canonical.reset(new CanonicalHandleScope(info->isolate()));
+
+  // Reopen handles in the new CompilationHandleScope.
+  info->ReopenHandlesInNewHandleScope();
+  parse_info->ReopenHandlesInNewHandleScope();
+
   if (mode == Compiler::CONCURRENT) {
     if (GetOptimizedCodeLater(job.get())) {
       job.release();  // The background recompile job owns this now.
@@ -844,6 +764,60 @@
   return MaybeHandle<Code>();
 }
 
+CompilationJob::Status FinalizeOptimizedCompilationJob(CompilationJob* job) {
+  CompilationInfo* info = job->info();
+  Isolate* isolate = info->isolate();
+
+  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
+  RuntimeCallTimerScope runtimeTimer(isolate,
+                                     &RuntimeCallStats::RecompileSynchronous);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
+               "V8.RecompileSynchronous");
+
+  Handle<SharedFunctionInfo> shared = info->shared_info();
+  shared->code()->set_profiler_ticks(0);
+
+  DCHECK(!shared->HasDebugInfo());
+
+  // 1) Optimization on the concurrent thread may have failed.
+  // 2) The function may have already been optimized by OSR.  Simply continue.
+  //    Except when OSR already disabled optimization for some reason.
+  // 3) The code may have already been invalidated due to dependency change.
+  // 4) Code generation may have failed.
+  if (job->state() == CompilationJob::State::kReadyToFinalize) {
+    if (shared->optimization_disabled()) {
+      job->RetryOptimization(kOptimizationDisabled);
+    } else if (info->dependencies()->HasAborted()) {
+      job->RetryOptimization(kBailedOutDueToDependencyChange);
+    } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
+      job->RecordOptimizedCompilationStats();
+      RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
+      if (shared
+              ->SearchOptimizedCodeMap(info->context()->native_context(),
+                                       info->osr_ast_id())
+              .code == nullptr) {
+        InsertCodeIntoOptimizedCodeMap(info);
+      }
+      if (FLAG_trace_opt) {
+        PrintF("[completed optimizing ");
+        info->closure()->ShortPrint();
+        PrintF("]\n");
+      }
+      info->closure()->ReplaceCode(*info->code());
+      return CompilationJob::SUCCEEDED;
+    }
+  }
+
+  DCHECK(job->state() == CompilationJob::State::kFailed);
+  if (FLAG_trace_opt) {
+    PrintF("[aborted optimizing ");
+    info->closure()->ShortPrint();
+    PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
+  }
+  info->closure()->ReplaceCode(shared->code());
+  return CompilationJob::FAILED;
+}
+
 class InterpreterActivationsFinder : public ThreadVisitor,
                                      public OptimizedFunctionVisitor {
  public:
@@ -942,7 +916,7 @@
   // baseline code because there might be suspended activations stored in
   // generator objects on the heap. We could eventually go directly to
   // TurboFan in this case.
-  if (function->shared()->is_resumable()) {
+  if (IsResumableFunction(function->shared()->kind())) {
     return MaybeHandle<Code>();
   }
 
@@ -978,7 +952,7 @@
   }
 
   // Parse and update CompilationInfo with the results.
-  if (!Parser::ParseStatic(info.parse_info())) return MaybeHandle<Code>();
+  if (!Parse(info.parse_info())) return MaybeHandle<Code>();
   Handle<SharedFunctionInfo> shared = info.shared_info();
   DCHECK_EQ(shared->language_mode(), info.literal()->language_mode());
 
@@ -1014,22 +988,19 @@
   TimerEventScope<TimerEventCompileCode> compile_timer(isolate);
   RuntimeCallTimerScope runtimeTimer(isolate,
                                      &RuntimeCallStats::CompileCodeLazy);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::CompileCodeLazy);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
   AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
 
-  if (FLAG_turbo_cache_shared_code) {
-    Handle<Code> cached_code;
-    if (GetCodeFromOptimizedCodeMap(function, BailoutId::None())
-            .ToHandle(&cached_code)) {
-      if (FLAG_trace_opt) {
-        PrintF("[found optimized code for ");
-        function->ShortPrint();
-        PrintF(" during unoptimized compile]\n");
-      }
-      DCHECK(function->shared()->is_compiled());
-      return cached_code;
+  Handle<Code> cached_code;
+  if (GetCodeFromOptimizedCodeMap(function, BailoutId::None())
+          .ToHandle(&cached_code)) {
+    if (FLAG_trace_opt) {
+      PrintF("[found optimized code for ");
+      function->ShortPrint();
+      PrintF(" during unoptimized compile]\n");
     }
+    DCHECK(function->shared()->is_compiled());
+    return cached_code;
   }
 
   if (function->shared()->is_compiled()) {
@@ -1076,18 +1047,12 @@
   Isolate* isolate = info->isolate();
   TimerEventScope<TimerEventCompileCode> timer(isolate);
   RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::CompileCode);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
   PostponeInterruptsScope postpone(isolate);
   DCHECK(!isolate->native_context().is_null());
   ParseInfo* parse_info = info->parse_info();
   Handle<Script> script = parse_info->script();
 
-  // Create a canonical handle scope before internalizing parsed values if
-  // compiling bytecode. This is required for off-thread bytecode generation.
-  std::unique_ptr<CanonicalHandleScope> canonical;
-  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(isolate));
-
   // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
   FixedArray* array = isolate->native_context()->embedder_data();
   script->set_context_data(array->get(v8::Context::kDebugIdIndex));
@@ -1131,7 +1096,7 @@
         parse_info->set_compile_options(ScriptCompiler::kNoCompileOptions);
       }
 
-      if (!Parser::ParseStatic(parse_info)) {
+      if (!Parse(parse_info)) {
         return Handle<SharedFunctionInfo>::null();
       }
     }
@@ -1150,10 +1115,8 @@
                                ? info->isolate()->counters()->compile_eval()
                                : info->isolate()->counters()->compile();
     HistogramTimerScope timer(rate);
-    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-        isolate,
-        (parse_info->is_eval() ? &tracing::TraceEventStatsTable::CompileEval
-                               : &tracing::TraceEventStatsTable::Compile));
+    TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
+                 parse_info->is_eval() ? "V8.CompileEval" : "V8.Compile");
 
     // Allocate a shared function info object.
     DCHECK_EQ(kNoSourcePosition, lit->function_token_position());
@@ -1203,14 +1166,14 @@
 bool Compiler::Analyze(ParseInfo* info) {
   DCHECK_NOT_NULL(info->literal());
   if (!Rewriter::Rewrite(info)) return false;
-  Scope::Analyze(info);
+  DeclarationScope::Analyze(info, AnalyzeMode::kRegular);
   if (!Renumber(info)) return false;
   DCHECK_NOT_NULL(info->scope());
   return true;
 }
 
 bool Compiler::ParseAndAnalyze(ParseInfo* info) {
-  if (!Parser::ParseStatic(info)) return false;
+  if (!Parse(info)) return false;
   if (!Compiler::Analyze(info)) return false;
   DCHECK_NOT_NULL(info->literal());
   DCHECK_NOT_NULL(info->scope());
@@ -1390,10 +1353,18 @@
 }
 
 bool Compiler::EnsureBytecode(CompilationInfo* info) {
-  DCHECK(ShouldUseIgnition(info));
+  if (!ShouldUseIgnition(info)) return false;
   if (!info->shared_info()->HasBytecodeArray()) {
-    DCHECK(!info->shared_info()->is_compiled());
+    Handle<Code> original_code(info->shared_info()->code());
     if (GetUnoptimizedCode(info).is_null()) return false;
+    if (info->shared_info()->HasAsmWasmData()) return false;
+    DCHECK(info->shared_info()->is_compiled());
+    if (original_code->kind() == Code::FUNCTION) {
+      // Generating bytecode will install the {InterpreterEntryTrampoline} as
+      // shared code on the function. To avoid an implicit tier down we restore
+      // original baseline code in case it existed beforehand.
+      info->shared_info()->ReplaceCode(*original_code);
+    }
   }
   DCHECK(info->shared_info()->HasBytecodeArray());
   return true;
@@ -1414,7 +1385,7 @@
     // baseline code because there might be suspended activations stored in
     // generator objects on the heap. We could eventually go directly to
     // TurboFan in this case.
-    if (shared->is_resumable()) return false;
+    if (IsResumableFunction(shared->kind())) return false;
 
     // TODO(4280): For now we disable switching to baseline code in the presence
     // of interpreter activations of the given function. The reasons is that the
@@ -1513,7 +1484,9 @@
     if (context->IsNativeContext()) parse_info.set_global();
     parse_info.set_language_mode(language_mode);
     parse_info.set_parse_restriction(restriction);
-    parse_info.set_context(context);
+    if (!context->IsNativeContext()) {
+      parse_info.set_outer_scope_info(handle(context->scope_info()));
+    }
 
     shared_info = CompileToplevel(&info);
 
@@ -1629,8 +1602,8 @@
       HistogramTimerScope timer(isolate->counters()->compile_deserialize());
       RuntimeCallTimerScope runtimeTimer(isolate,
                                          &RuntimeCallStats::CompileDeserialize);
-      TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-          isolate, &tracing::TraceEventStatsTable::CompileDeserialize);
+      TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
+                   "V8.CompileDeserialize");
       Handle<SharedFunctionInfo> result;
       if (CodeSerializer::Deserialize(isolate, *cached_data, source)
               .ToHandle(&result)) {
@@ -1686,7 +1659,9 @@
     }
     parse_info.set_compile_options(compile_options);
     parse_info.set_extension(extension);
-    parse_info.set_context(context);
+    if (!context->IsNativeContext()) {
+      parse_info.set_outer_scope_info(handle(context->scope_info()));
+    }
     if (FLAG_serialize_toplevel &&
         compile_options == ScriptCompiler::kProduceCodeCache) {
       info.PrepareForSerializing();
@@ -1703,8 +1678,8 @@
             isolate->counters()->compile_serialize());
         RuntimeCallTimerScope runtimeTimer(isolate,
                                            &RuntimeCallStats::CompileSerialize);
-        TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-            isolate, &tracing::TraceEventStatsTable::CompileSerialize);
+        TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
+                     "V8.CompileSerialize");
         *cached_data = CodeSerializer::Serialize(isolate, result, source);
         if (FLAG_profile_deserialization) {
           PrintF("[Compiling and serializing took %0.3f ms]\n",
@@ -1822,17 +1797,14 @@
   // Generate code
   TimerEventScope<TimerEventCompileCode> timer(isolate);
   RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::CompileCode);
-
-  // Create a canonical handle scope if compiling ignition bytecode. This is
-  // required by the constant array builder to de-duplicate common objects
-  // without dereferencing handles.
-  std::unique_ptr<CanonicalHandleScope> canonical;
-  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info.isolate()));
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
 
   if (lazy) {
     info.SetCode(isolate->builtins()->CompileLazy());
+    Scope* outer_scope = literal->scope()->GetOuterScopeWithContext();
+    if (outer_scope) {
+      result->set_outer_scope_info(*outer_scope->scope_info());
+    }
   } else if (Renumber(info.parse_info()) && GenerateUnoptimizedCode(&info)) {
     // Code generation will ensure that the feedback vector is present and
     // appropriately sized.
@@ -1876,6 +1848,7 @@
   Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
       name, fun->shared()->num_literals(), FunctionKind::kNormalFunction, code,
       Handle<ScopeInfo>(fun->shared()->scope_info()));
+  shared->set_outer_scope_info(fun->shared()->outer_scope_info());
   shared->SetConstructStub(*construct_stub);
   shared->set_feedback_metadata(fun->shared()->feedback_metadata());
 
@@ -1895,58 +1868,28 @@
   return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame);
 }
 
-void Compiler::FinalizeCompilationJob(CompilationJob* raw_job) {
+CompilationJob* Compiler::PrepareUnoptimizedCompilationJob(
+    CompilationInfo* info) {
+  VMState<COMPILER> state(info->isolate());
+  std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
+  if (job->PrepareJob() != CompilationJob::SUCCEEDED) {
+    return nullptr;
+  }
+  return job.release();
+}
+
+bool Compiler::FinalizeCompilationJob(CompilationJob* raw_job) {
   // Take ownership of compilation job.  Deleting job also tears down the zone.
   std::unique_ptr<CompilationJob> job(raw_job);
-  CompilationInfo* info = job->info();
-  Isolate* isolate = info->isolate();
 
-  VMState<COMPILER> state(isolate);
-  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
-  RuntimeCallTimerScope runtimeTimer(isolate,
-                                     &RuntimeCallStats::RecompileSynchronous);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
-
-  Handle<SharedFunctionInfo> shared = info->shared_info();
-  shared->code()->set_profiler_ticks(0);
-
-  DCHECK(!shared->HasDebugInfo());
-
-  // 1) Optimization on the concurrent thread may have failed.
-  // 2) The function may have already been optimized by OSR.  Simply continue.
-  //    Except when OSR already disabled optimization for some reason.
-  // 3) The code may have already been invalidated due to dependency change.
-  // 4) Code generation may have failed.
-  if (job->state() == CompilationJob::State::kReadyToFinalize) {
-    if (shared->optimization_disabled()) {
-      job->RetryOptimization(kOptimizationDisabled);
-    } else if (info->dependencies()->HasAborted()) {
-      job->RetryOptimization(kBailedOutDueToDependencyChange);
-    } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
-      job->RecordOptimizationStats();
-      RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
-      if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
-                                         info->osr_ast_id()).code == nullptr) {
-        InsertCodeIntoOptimizedCodeMap(info);
-      }
-      if (FLAG_trace_opt) {
-        PrintF("[completed optimizing ");
-        info->closure()->ShortPrint();
-        PrintF("]\n");
-      }
-      info->closure()->ReplaceCode(*info->code());
-      return;
-    }
+  VMState<COMPILER> state(job->info()->isolate());
+  if (job->info()->IsOptimizing()) {
+    return FinalizeOptimizedCompilationJob(job.get()) ==
+           CompilationJob::SUCCEEDED;
+  } else {
+    return FinalizeUnoptimizedCompilationJob(job.get()) ==
+           CompilationJob::SUCCEEDED;
   }
-
-  DCHECK(job->state() == CompilationJob::State::kFailed);
-  if (FLAG_trace_opt) {
-    PrintF("[aborted optimizing ");
-    info->closure()->ShortPrint();
-    PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
-  }
-  info->closure()->ReplaceCode(shared->code());
 }
 
 void Compiler::PostInstantiation(Handle<JSFunction> function,
diff --git a/src/compiler.h b/src/compiler.h
index 5521573..bfeaa8e 100644
--- a/src/compiler.h
+++ b/src/compiler.h
@@ -9,14 +9,9 @@
 
 #include "src/allocation.h"
 #include "src/bailout-reason.h"
-#include "src/compilation-dependencies.h"
 #include "src/contexts.h"
-#include "src/frames.h"
 #include "src/isolate.h"
-#include "src/objects-inl.h"
-#include "src/source-position-table.h"
-#include "src/source-position.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -57,8 +52,12 @@
   static bool CompileDebugCode(Handle<SharedFunctionInfo> shared);
   static MaybeHandle<JSArray> CompileForLiveEdit(Handle<Script> script);
 
+  // Prepare a compilation job for unoptimized code. Requires ParseAndAnalyse.
+  static CompilationJob* PrepareUnoptimizedCompilationJob(
+      CompilationInfo* info);
+
   // Generate and install code from previously queued compilation job.
-  static void FinalizeCompilationJob(CompilationJob* job);
+  static bool FinalizeCompilationJob(CompilationJob* job);
 
   // Give the compiler a chance to perform low-latency initialization tasks of
   // the given {function} on its instantiation. Note that only the runtime will
@@ -138,405 +137,6 @@
       JavaScriptFrame* osr_frame);
 };
 
-
-// CompilationInfo encapsulates some information known at compile time.  It
-// is constructed based on the resources available at compile-time.
-class CompilationInfo final {
- public:
-  // Various configuration flags for a compilation, as well as some properties
-  // of the compiled code produced by a compilation.
-  enum Flag {
-    kDeferredCalling = 1 << 0,
-    kNonDeferredCalling = 1 << 1,
-    kSavesCallerDoubles = 1 << 2,
-    kRequiresFrame = 1 << 3,
-    kMustNotHaveEagerFrame = 1 << 4,
-    kDeoptimizationSupport = 1 << 5,
-    kDebug = 1 << 6,
-    kSerializing = 1 << 7,
-    kFunctionContextSpecializing = 1 << 8,
-    kFrameSpecializing = 1 << 9,
-    kNativeContextSpecializing = 1 << 10,
-    kInliningEnabled = 1 << 11,
-    kDisableFutureOptimization = 1 << 12,
-    kSplittingEnabled = 1 << 13,
-    kDeoptimizationEnabled = 1 << 14,
-    kSourcePositionsEnabled = 1 << 15,
-    kBailoutOnUninitialized = 1 << 16,
-    kOptimizeFromBytecode = 1 << 17,
-    kTypeFeedbackEnabled = 1 << 18,
-    kAccessorInliningEnabled = 1 << 19,
-  };
-
-  CompilationInfo(ParseInfo* parse_info, Handle<JSFunction> closure);
-  CompilationInfo(Vector<const char> debug_name, Isolate* isolate, Zone* zone,
-                  Code::Flags code_flags = Code::ComputeFlags(Code::STUB));
-  ~CompilationInfo();
-
-  ParseInfo* parse_info() const { return parse_info_; }
-
-  // -----------------------------------------------------------
-  // TODO(titzer): inline and delete accessors of ParseInfo
-  // -----------------------------------------------------------
-  Handle<Script> script() const;
-  FunctionLiteral* literal() const;
-  DeclarationScope* scope() const;
-  Handle<Context> context() const;
-  Handle<SharedFunctionInfo> shared_info() const;
-  bool has_shared_info() const;
-  // -----------------------------------------------------------
-
-  Isolate* isolate() const {
-    return isolate_;
-  }
-  Zone* zone() { return zone_; }
-  bool is_osr() const { return !osr_ast_id_.IsNone(); }
-  Handle<JSFunction> closure() const { return closure_; }
-  Handle<Code> code() const { return code_; }
-  Code::Flags code_flags() const { return code_flags_; }
-  BailoutId osr_ast_id() const { return osr_ast_id_; }
-  JavaScriptFrame* osr_frame() const { return osr_frame_; }
-  int num_parameters() const;
-  int num_parameters_including_this() const;
-  bool is_this_defined() const;
-
-  void set_parameter_count(int parameter_count) {
-    DCHECK(IsStub());
-    parameter_count_ = parameter_count;
-  }
-
-  bool has_bytecode_array() const { return !bytecode_array_.is_null(); }
-  Handle<BytecodeArray> bytecode_array() const { return bytecode_array_; }
-
-  bool is_tracking_positions() const { return track_positions_; }
-
-  bool is_calling() const {
-    return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
-  }
-
-  void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
-
-  bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
-
-  void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
-
-  bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
-
-  void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
-
-  bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
-
-  void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
-
-  bool requires_frame() const { return GetFlag(kRequiresFrame); }
-
-  void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
-
-  bool GetMustNotHaveEagerFrame() const {
-    return GetFlag(kMustNotHaveEagerFrame);
-  }
-
-  // Compiles marked as debug produce unoptimized code with debug break slots.
-  // Inner functions that cannot be compiled w/o context are compiled eagerly.
-  // Always include deoptimization support to avoid having to recompile again.
-  void MarkAsDebug() {
-    SetFlag(kDebug);
-    SetFlag(kDeoptimizationSupport);
-  }
-
-  bool is_debug() const { return GetFlag(kDebug); }
-
-  void PrepareForSerializing() { SetFlag(kSerializing); }
-
-  bool will_serialize() const { return GetFlag(kSerializing); }
-
-  void MarkAsFunctionContextSpecializing() {
-    SetFlag(kFunctionContextSpecializing);
-  }
-
-  bool is_function_context_specializing() const {
-    return GetFlag(kFunctionContextSpecializing);
-  }
-
-  void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }
-
-  bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }
-
-  void MarkAsNativeContextSpecializing() {
-    SetFlag(kNativeContextSpecializing);
-  }
-
-  bool is_native_context_specializing() const {
-    return GetFlag(kNativeContextSpecializing);
-  }
-
-  void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }
-
-  bool is_deoptimization_enabled() const {
-    return GetFlag(kDeoptimizationEnabled);
-  }
-
-  void MarkAsTypeFeedbackEnabled() { SetFlag(kTypeFeedbackEnabled); }
-
-  bool is_type_feedback_enabled() const {
-    return GetFlag(kTypeFeedbackEnabled);
-  }
-
-  void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
-
-  bool is_accessor_inlining_enabled() const {
-    return GetFlag(kAccessorInliningEnabled);
-  }
-
-  void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
-
-  bool is_source_positions_enabled() const {
-    return GetFlag(kSourcePositionsEnabled);
-  }
-
-  void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
-
-  bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
-
-  void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
-
-  bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
-
-  void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
-
-  bool is_bailout_on_uninitialized() const {
-    return GetFlag(kBailoutOnUninitialized);
-  }
-
-  void MarkAsOptimizeFromBytecode() { SetFlag(kOptimizeFromBytecode); }
-
-  bool is_optimizing_from_bytecode() const {
-    return GetFlag(kOptimizeFromBytecode);
-  }
-
-  bool GeneratePreagedPrologue() const {
-    // Generate a pre-aged prologue if we are optimizing for size, which
-    // will make code flushing more aggressive. Only apply to Code::FUNCTION,
-    // since StaticMarkingVisitor::IsFlushable only flushes proper functions.
-    return FLAG_optimize_for_size && FLAG_age_code && !is_debug() &&
-           output_code_kind() == Code::FUNCTION;
-  }
-
-  void SetCode(Handle<Code> code) { code_ = code; }
-
-  void SetBytecodeArray(Handle<BytecodeArray> bytecode_array) {
-    bytecode_array_ = bytecode_array;
-  }
-
-  bool ShouldTrapOnDeopt() const {
-    return (FLAG_trap_on_deopt && IsOptimizing()) ||
-        (FLAG_trap_on_stub_deopt && IsStub());
-  }
-
-  bool has_native_context() const {
-    return !closure().is_null() && (closure()->native_context() != nullptr);
-  }
-
-  Context* native_context() const {
-    return has_native_context() ? closure()->native_context() : nullptr;
-  }
-
-  bool has_global_object() const { return has_native_context(); }
-
-  JSGlobalObject* global_object() const {
-    return has_global_object() ? native_context()->global_object() : nullptr;
-  }
-
-  // Accessors for the different compilation modes.
-  bool IsOptimizing() const { return mode_ == OPTIMIZE; }
-  bool IsStub() const { return mode_ == STUB; }
-  void SetOptimizing() {
-    DCHECK(has_shared_info());
-    SetMode(OPTIMIZE);
-    optimization_id_ = isolate()->NextOptimizationId();
-    code_flags_ =
-        Code::KindField::update(code_flags_, Code::OPTIMIZED_FUNCTION);
-  }
-  void SetOptimizingForOsr(BailoutId osr_ast_id, JavaScriptFrame* osr_frame) {
-    SetOptimizing();
-    osr_ast_id_ = osr_ast_id;
-    osr_frame_ = osr_frame;
-  }
-
-  // Deoptimization support.
-  bool HasDeoptimizationSupport() const {
-    return GetFlag(kDeoptimizationSupport);
-  }
-  void EnableDeoptimizationSupport() {
-    DCHECK_EQ(BASE, mode_);
-    SetFlag(kDeoptimizationSupport);
-  }
-  bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
-
-  bool ExpectsJSReceiverAsReceiver();
-
-  // Determines whether or not to insert a self-optimization header.
-  bool ShouldSelfOptimize();
-
-  void set_deferred_handles(DeferredHandles* deferred_handles) {
-    DCHECK(deferred_handles_ == NULL);
-    deferred_handles_ = deferred_handles;
-  }
-
-  void ReopenHandlesInNewHandleScope() {
-    closure_ = Handle<JSFunction>(*closure_);
-  }
-
-  void AbortOptimization(BailoutReason reason) {
-    DCHECK(reason != kNoReason);
-    if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
-    SetFlag(kDisableFutureOptimization);
-  }
-
-  void RetryOptimization(BailoutReason reason) {
-    DCHECK(reason != kNoReason);
-    if (GetFlag(kDisableFutureOptimization)) return;
-    bailout_reason_ = reason;
-  }
-
-  BailoutReason bailout_reason() const { return bailout_reason_; }
-
-  int prologue_offset() const {
-    DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
-    return prologue_offset_;
-  }
-
-  void set_prologue_offset(int prologue_offset) {
-    DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
-    prologue_offset_ = prologue_offset;
-  }
-
-  CompilationDependencies* dependencies() { return &dependencies_; }
-
-  int optimization_id() const { return optimization_id_; }
-
-  int osr_expr_stack_height() { return osr_expr_stack_height_; }
-  void set_osr_expr_stack_height(int height) {
-    DCHECK(height >= 0);
-    osr_expr_stack_height_ = height;
-  }
-
-  bool has_simple_parameters();
-
-  struct InlinedFunctionHolder {
-    Handle<SharedFunctionInfo> shared_info;
-
-    // Root that holds the unoptimized code of the inlined function alive
-    // (and out of reach of code flushing) until we finish compilation.
-    // Do not remove.
-    Handle<Code> inlined_code_object_root;
-
-    explicit InlinedFunctionHolder(
-        Handle<SharedFunctionInfo> inlined_shared_info)
-        : shared_info(inlined_shared_info),
-          inlined_code_object_root(inlined_shared_info->code()) {}
-  };
-
-  typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
-  InlinedFunctionList const& inlined_functions() const {
-    return inlined_functions_;
-  }
-
-  void AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function) {
-    inlined_functions_.push_back(InlinedFunctionHolder(inlined_function));
-  }
-
-  std::unique_ptr<char[]> GetDebugName() const;
-
-  Code::Kind output_code_kind() const {
-    return Code::ExtractKindFromFlags(code_flags_);
-  }
-
-  StackFrame::Type GetOutputStackFrameType() const;
-
-  int GetDeclareGlobalsFlags() const;
-
-  SourcePositionTableBuilder::RecordingMode SourcePositionRecordingMode() const;
-
- private:
-  // Compilation mode.
-  // BASE is generated by the full codegen, optionally prepared for bailouts.
-  // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
-  enum Mode {
-    BASE,
-    OPTIMIZE,
-    STUB
-  };
-
-  CompilationInfo(ParseInfo* parse_info, Vector<const char> debug_name,
-                  Code::Flags code_flags, Mode mode, Isolate* isolate,
-                  Zone* zone);
-
-  ParseInfo* parse_info_;
-  Isolate* isolate_;
-
-  void SetMode(Mode mode) {
-    mode_ = mode;
-  }
-
-  void SetFlag(Flag flag) { flags_ |= flag; }
-
-  void SetFlag(Flag flag, bool value) {
-    flags_ = value ? flags_ | flag : flags_ & ~flag;
-  }
-
-  bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
-
-  unsigned flags_;
-
-  Code::Flags code_flags_;
-
-  Handle<JSFunction> closure_;
-
-  // The compiled code.
-  Handle<Code> code_;
-
-  // Compilation mode flag and whether deoptimization is allowed.
-  Mode mode_;
-  BailoutId osr_ast_id_;
-
-  // Holds the bytecode array generated by the interpreter.
-  // TODO(rmcilroy/mstarzinger): Temporary work-around until compiler.cc is
-  // refactored to avoid us needing to carry the BytcodeArray around.
-  Handle<BytecodeArray> bytecode_array_;
-
-  // The zone from which the compilation pipeline working on this
-  // CompilationInfo allocates.
-  Zone* zone_;
-
-  DeferredHandles* deferred_handles_;
-
-  // Dependencies for this compilation, e.g. stable maps.
-  CompilationDependencies dependencies_;
-
-  BailoutReason bailout_reason_;
-
-  int prologue_offset_;
-
-  bool track_positions_;
-
-  InlinedFunctionList inlined_functions_;
-
-  // Number of parameters used for compilation of stubs that require arguments.
-  int parameter_count_;
-
-  int optimization_id_;
-
-  int osr_expr_stack_height_;
-
-  // The current OSR frame for specialization or {nullptr}.
-  JavaScriptFrame* osr_frame_ = nullptr;
-
-  Vector<const char> debug_name_;
-
-  DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
-};
-
 // A base class for compilation jobs intended to run concurrent to the main
 // thread. The job is split into three phases which are called in sequence on
 // different threads and with different limitations:
@@ -557,15 +157,20 @@
     kFailed,
   };
 
-  explicit CompilationJob(CompilationInfo* info, const char* compiler_name,
-                          State initial_state = State::kReadyToPrepare)
-      : info_(info), compiler_name_(compiler_name), state_(initial_state) {}
+  CompilationJob(Isolate* isolate, CompilationInfo* info,
+                 const char* compiler_name,
+                 State initial_state = State::kReadyToPrepare)
+      : info_(info),
+        compiler_name_(compiler_name),
+        state_(initial_state),
+        stack_limit_(isolate->stack_guard()->real_climit()) {}
   virtual ~CompilationJob() {}
 
   // Prepare the compile job. Must be called on the main thread.
   MUST_USE_RESULT Status PrepareJob();
 
-  // Executes the compile job. Can be called off the main thread.
+  // Executes the compile job. Can be called on a background thread if
+  // can_execute_on_background_thread() returns true.
   MUST_USE_RESULT Status ExecuteJob();
 
   // Finalizes the compile job. Must be called on the main thread.
@@ -573,27 +178,23 @@
 
   // Report a transient failure, try again next time. Should only be called on
   // optimization compilation jobs.
-  Status RetryOptimization(BailoutReason reason) {
-    DCHECK(info_->IsOptimizing());
-    info_->RetryOptimization(reason);
-    state_ = State::kFailed;
-    return FAILED;
-  }
+  Status RetryOptimization(BailoutReason reason);
 
   // Report a persistent failure, disable future optimization on the function.
   // Should only be called on optimization compilation jobs.
-  Status AbortOptimization(BailoutReason reason) {
-    DCHECK(info_->IsOptimizing());
-    info_->AbortOptimization(reason);
-    state_ = State::kFailed;
-    return FAILED;
-  }
+  Status AbortOptimization(BailoutReason reason);
 
-  void RecordOptimizationStats();
+  void RecordOptimizedCompilationStats() const;
+  void RecordUnoptimizedCompilationStats() const;
+
+  virtual bool can_execute_on_background_thread() const { return true; }
+
+  void set_stack_limit(uintptr_t stack_limit) { stack_limit_ = stack_limit; }
+  uintptr_t stack_limit() const { return stack_limit_; }
 
   State state() const { return state_; }
   CompilationInfo* info() const { return info_; }
-  Isolate* isolate() const { return info()->isolate(); }
+  Isolate* isolate() const;
 
  protected:
   // Overridden by the actual implementation.
@@ -612,6 +213,7 @@
   base::TimeDelta time_taken_to_finalize_;
   const char* compiler_name_;
   State state_;
+  uintptr_t stack_limit_;
 
   MUST_USE_RESULT Status UpdateState(Status status, State next_state) {
     if (status == SUCCEEDED) {
diff --git a/src/compiler/access-builder.cc b/src/compiler/access-builder.cc
index c43a53f..5301434 100644
--- a/src/compiler/access-builder.cc
+++ b/src/compiler/access-builder.cc
@@ -4,21 +4,29 @@
 
 #include "src/compiler/access-builder.h"
 
+#include "src/compiler/type-cache.h"
 #include "src/contexts.h"
 #include "src/frames.h"
 #include "src/handles-inl.h"
 #include "src/heap/heap.h"
-#include "src/type-cache.h"
 
 namespace v8 {
 namespace internal {
 namespace compiler {
 
 // static
+FieldAccess AccessBuilder::ForExternalDoubleValue() {
+  FieldAccess access = {kUntaggedBase,          0,
+                        MaybeHandle<Name>(),    Type::Number(),
+                        MachineType::Float64(), kNoWriteBarrier};
+  return access;
+}
+
+// static
 FieldAccess AccessBuilder::ForMap() {
   FieldAccess access = {
-      kTaggedBase,           HeapObject::kMapOffset,   MaybeHandle<Name>(),
-      Type::OtherInternal(), MachineType::AnyTagged(), kMapWriteBarrier};
+      kTaggedBase,           HeapObject::kMapOffset,       MaybeHandle<Name>(),
+      Type::OtherInternal(), MachineType::TaggedPointer(), kMapWriteBarrier};
   return access;
 }
 
@@ -38,8 +46,8 @@
 // static
 FieldAccess AccessBuilder::ForJSObjectProperties() {
   FieldAccess access = {
-      kTaggedBase,      JSObject::kPropertiesOffset, MaybeHandle<Name>(),
-      Type::Internal(), MachineType::AnyTagged(),    kPointerWriteBarrier};
+      kTaggedBase,      JSObject::kPropertiesOffset,  MaybeHandle<Name>(),
+      Type::Internal(), MachineType::TaggedPointer(), kPointerWriteBarrier};
   return access;
 }
 
@@ -47,8 +55,8 @@
 // static
 FieldAccess AccessBuilder::ForJSObjectElements() {
   FieldAccess access = {
-      kTaggedBase,      JSObject::kElementsOffset, MaybeHandle<Name>(),
-      Type::Internal(), MachineType::AnyTagged(),  kPointerWriteBarrier};
+      kTaggedBase,      JSObject::kElementsOffset,    MaybeHandle<Name>(),
+      Type::Internal(), MachineType::TaggedPointer(), kPointerWriteBarrier};
   return access;
 }
 
@@ -60,7 +68,7 @@
   FieldAccess access = {kTaggedBase,
                         offset,
                         MaybeHandle<Name>(),
-                        Type::Tagged(),
+                        Type::NonInternal(),
                         MachineType::AnyTagged(),
                         kFullWriteBarrier};
   return access;
@@ -93,7 +101,7 @@
                         JSFunction::kSharedFunctionInfoOffset,
                         Handle<Name>(),
                         Type::OtherInternal(),
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedPointer(),
                         kPointerWriteBarrier};
   return access;
 }
@@ -101,19 +109,16 @@
 // static
 FieldAccess AccessBuilder::ForJSFunctionLiterals() {
   FieldAccess access = {
-      kTaggedBase,      JSFunction::kLiteralsOffset, Handle<Name>(),
-      Type::Internal(), MachineType::AnyTagged(),    kPointerWriteBarrier};
+      kTaggedBase,      JSFunction::kLiteralsOffset,  Handle<Name>(),
+      Type::Internal(), MachineType::TaggedPointer(), kPointerWriteBarrier};
   return access;
 }
 
 // static
 FieldAccess AccessBuilder::ForJSFunctionCodeEntry() {
-  FieldAccess access = {kTaggedBase,
-                        JSFunction::kCodeEntryOffset,
-                        Handle<Name>(),
-                        Type::UntaggedPointer(),
-                        MachineType::Pointer(),
-                        kNoWriteBarrier};
+  FieldAccess access = {
+      kTaggedBase,           JSFunction::kCodeEntryOffset, Handle<Name>(),
+      Type::OtherInternal(), MachineType::Pointer(),       kNoWriteBarrier};
   return access;
 }
 
@@ -134,7 +139,7 @@
                         JSGeneratorObject::kContextOffset,
                         Handle<Name>(),
                         Type::Internal(),
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedPointer(),
                         kPointerWriteBarrier};
   return access;
 }
@@ -146,7 +151,7 @@
                         JSGeneratorObject::kContinuationOffset,
                         Handle<Name>(),
                         type_cache.kSmi,
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedSigned(),
                         kNoWriteBarrier};
   return access;
 }
@@ -176,12 +181,9 @@
 // static
 FieldAccess AccessBuilder::ForJSGeneratorObjectResumeMode() {
   TypeCache const& type_cache = TypeCache::Get();
-  FieldAccess access = {kTaggedBase,
-                        JSGeneratorObject::kResumeModeOffset,
-                        Handle<Name>(),
-                        type_cache.kSmi,
-                        MachineType::AnyTagged(),
-                        kNoWriteBarrier};
+  FieldAccess access = {
+      kTaggedBase,     JSGeneratorObject::kResumeModeOffset, Handle<Name>(),
+      type_cache.kSmi, MachineType::TaggedSigned(),          kNoWriteBarrier};
   return access;
 }
 
@@ -192,7 +194,7 @@
                         JSArray::kLengthOffset,
                         Handle<Name>(),
                         type_cache.kJSArrayLengthType,
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedSigned(),
                         kFullWriteBarrier};
   if (IsFastDoubleElementsKind(elements_kind)) {
     access.type = type_cache.kFixedDoubleArrayLengthType;
@@ -210,7 +212,7 @@
   FieldAccess access = {kTaggedBase,
                         JSArrayBuffer::kBackingStoreOffset,
                         MaybeHandle<Name>(),
-                        Type::UntaggedPointer(),
+                        Type::OtherInternal(),
                         MachineType::Pointer(),
                         kNoWriteBarrier};
   return access;
@@ -229,8 +231,8 @@
   FieldAccess access = {kTaggedBase,
                         JSArrayBufferView::kBufferOffset,
                         MaybeHandle<Name>(),
-                        Type::TaggedPointer(),
-                        MachineType::AnyTagged(),
+                        Type::OtherInternal(),
+                        MachineType::TaggedPointer(),
                         kPointerWriteBarrier};
   return access;
 }
@@ -263,12 +265,23 @@
                         JSTypedArray::kLengthOffset,
                         MaybeHandle<Name>(),
                         TypeCache::Get().kJSTypedArrayLengthType,
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedSigned(),
                         kNoWriteBarrier};
   return access;
 }
 
 // static
+FieldAccess AccessBuilder::ForJSDateValue() {
+  FieldAccess access = {kTaggedBase,
+                        JSDate::kValueOffset,
+                        MaybeHandle<Name>(),
+                        TypeCache::Get().kJSDateValueType,
+                        MachineType::AnyTagged(),
+                        kFullWriteBarrier};
+  return access;
+}
+
+// static
 FieldAccess AccessBuilder::ForJSDateField(JSDate::FieldIndex index) {
   FieldAccess access = {kTaggedBase,
                         JSDate::kValueOffset + index * kPointerSize,
@@ -301,8 +314,8 @@
 // static
 FieldAccess AccessBuilder::ForJSRegExpFlags() {
   FieldAccess access = {
-      kTaggedBase,    JSRegExp::kFlagsOffset,   MaybeHandle<Name>(),
-      Type::Tagged(), MachineType::AnyTagged(), kFullWriteBarrier};
+      kTaggedBase,         JSRegExp::kFlagsOffset,   MaybeHandle<Name>(),
+      Type::NonInternal(), MachineType::AnyTagged(), kFullWriteBarrier};
   return access;
 }
 
@@ -310,8 +323,8 @@
 // static
 FieldAccess AccessBuilder::ForJSRegExpSource() {
   FieldAccess access = {
-      kTaggedBase,    JSRegExp::kSourceOffset,  MaybeHandle<Name>(),
-      Type::Tagged(), MachineType::AnyTagged(), kFullWriteBarrier};
+      kTaggedBase,         JSRegExp::kSourceOffset,  MaybeHandle<Name>(),
+      Type::NonInternal(), MachineType::AnyTagged(), kFullWriteBarrier};
   return access;
 }
 
@@ -322,7 +335,7 @@
                         FixedArray::kLengthOffset,
                         MaybeHandle<Name>(),
                         TypeCache::Get().kFixedArrayLengthType,
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedSigned(),
                         kNoWriteBarrier};
   return access;
 }
@@ -332,7 +345,7 @@
   FieldAccess access = {kTaggedBase,
                         FixedTypedArrayBase::kBasePointerOffset,
                         MaybeHandle<Name>(),
-                        Type::Tagged(),
+                        Type::OtherInternal(),
                         MachineType::AnyTagged(),
                         kPointerWriteBarrier};
   return access;
@@ -343,7 +356,7 @@
   FieldAccess access = {kTaggedBase,
                         FixedTypedArrayBase::kExternalPointerOffset,
                         MaybeHandle<Name>(),
-                        Type::UntaggedPointer(),
+                        Type::OtherInternal(),
                         MachineType::Pointer(),
                         kNoWriteBarrier};
   return access;
@@ -354,8 +367,8 @@
   FieldAccess access = {kTaggedBase,
                         DescriptorArray::kEnumCacheOffset,
                         Handle<Name>(),
-                        Type::TaggedPointer(),
-                        MachineType::AnyTagged(),
+                        Type::OtherInternal(),
+                        MachineType::TaggedPointer(),
                         kPointerWriteBarrier};
   return access;
 }
@@ -366,8 +379,8 @@
   FieldAccess access = {kTaggedBase,
                         DescriptorArray::kEnumCacheBridgeCacheOffset,
                         Handle<Name>(),
-                        Type::TaggedPointer(),
-                        MachineType::AnyTagged(),
+                        Type::OtherInternal(),
+                        MachineType::TaggedPointer(),
                         kPointerWriteBarrier};
   return access;
 }
@@ -393,9 +406,12 @@
 
 // static
 FieldAccess AccessBuilder::ForMapDescriptors() {
-  FieldAccess access = {
-      kTaggedBase,           Map::kDescriptorsOffset,  Handle<Name>(),
-      Type::TaggedPointer(), MachineType::AnyTagged(), kPointerWriteBarrier};
+  FieldAccess access = {kTaggedBase,
+                        Map::kDescriptorsOffset,
+                        Handle<Name>(),
+                        Type::OtherInternal(),
+                        MachineType::TaggedPointer(),
+                        kPointerWriteBarrier};
   return access;
 }
 
@@ -412,8 +428,8 @@
 // static
 FieldAccess AccessBuilder::ForMapPrototype() {
   FieldAccess access = {
-      kTaggedBase,           Map::kPrototypeOffset,    Handle<Name>(),
-      Type::TaggedPointer(), MachineType::AnyTagged(), kPointerWriteBarrier};
+      kTaggedBase, Map::kPrototypeOffset,        Handle<Name>(),
+      Type::Any(), MachineType::TaggedPointer(), kPointerWriteBarrier};
   return access;
 }
 
@@ -432,7 +448,7 @@
                         String::kLengthOffset,
                         Handle<Name>(),
                         TypeCache::Get().kStringLengthType,
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedSigned(),
                         kNoWriteBarrier};
   return access;
 }
@@ -440,16 +456,16 @@
 // static
 FieldAccess AccessBuilder::ForConsStringFirst() {
   FieldAccess access = {
-      kTaggedBase,    ConsString::kFirstOffset, Handle<Name>(),
-      Type::String(), MachineType::AnyTagged(), kPointerWriteBarrier};
+      kTaggedBase,    ConsString::kFirstOffset,     Handle<Name>(),
+      Type::String(), MachineType::TaggedPointer(), kPointerWriteBarrier};
   return access;
 }
 
 // static
 FieldAccess AccessBuilder::ForConsStringSecond() {
   FieldAccess access = {
-      kTaggedBase,    ConsString::kSecondOffset, Handle<Name>(),
-      Type::String(), MachineType::AnyTagged(),  kPointerWriteBarrier};
+      kTaggedBase,    ConsString::kSecondOffset,    Handle<Name>(),
+      Type::String(), MachineType::TaggedPointer(), kPointerWriteBarrier};
   return access;
 }
 
@@ -457,15 +473,15 @@
 FieldAccess AccessBuilder::ForSlicedStringOffset() {
   FieldAccess access = {
       kTaggedBase,         SlicedString::kOffsetOffset, Handle<Name>(),
-      Type::SignedSmall(), MachineType::AnyTagged(),    kNoWriteBarrier};
+      Type::SignedSmall(), MachineType::TaggedSigned(), kNoWriteBarrier};
   return access;
 }
 
 // static
 FieldAccess AccessBuilder::ForSlicedStringParent() {
   FieldAccess access = {
-      kTaggedBase,    SlicedString::kParentOffset, Handle<Name>(),
-      Type::String(), MachineType::AnyTagged(),    kPointerWriteBarrier};
+      kTaggedBase,    SlicedString::kParentOffset,  Handle<Name>(),
+      Type::String(), MachineType::TaggedPointer(), kPointerWriteBarrier};
   return access;
 }
 
@@ -474,7 +490,7 @@
   FieldAccess access = {kTaggedBase,
                         ExternalString::kResourceDataOffset,
                         Handle<Name>(),
-                        Type::UntaggedPointer(),
+                        Type::OtherInternal(),
                         MachineType::Pointer(),
                         kNoWriteBarrier};
   return access;
@@ -516,7 +532,7 @@
                         JSGlobalObject::kGlobalProxyOffset,
                         Handle<Name>(),
                         Type::Receiver(),
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedPointer(),
                         kPointerWriteBarrier};
   return access;
 }
@@ -527,11 +543,29 @@
                         JSGlobalObject::kNativeContextOffset,
                         Handle<Name>(),
                         Type::Internal(),
-                        MachineType::AnyTagged(),
+                        MachineType::TaggedPointer(),
                         kPointerWriteBarrier};
   return access;
 }
 
+// static
+FieldAccess AccessBuilder::ForJSStringIteratorString() {
+  FieldAccess access = {
+      kTaggedBase,    JSStringIterator::kStringOffset, Handle<Name>(),
+      Type::String(), MachineType::TaggedPointer(),    kPointerWriteBarrier};
+  return access;
+}
+
+// static
+FieldAccess AccessBuilder::ForJSStringIteratorIndex() {
+  FieldAccess access = {kTaggedBase,
+                        JSStringIterator::kNextIndexOffset,
+                        Handle<Name>(),
+                        TypeCache::Get().kStringLengthType,
+                        MachineType::TaggedSigned(),
+                        kNoWriteBarrier};
+  return access;
+}
 
 // static
 FieldAccess AccessBuilder::ForValue() {
@@ -590,24 +624,28 @@
   return access;
 }
 
-
 // static
-FieldAccess AccessBuilder::ForPropertyCellValue() {
-  return ForPropertyCellValue(Type::Tagged());
+FieldAccess AccessBuilder::ForContextExtensionScopeInfo() {
+  FieldAccess access = {kTaggedBase,
+                        ContextExtension::kScopeInfoOffset,
+                        Handle<Name>(),
+                        Type::OtherInternal(),
+                        MachineType::AnyTagged(),
+                        kFullWriteBarrier};
+  return access;
 }
 
-
 // static
-FieldAccess AccessBuilder::ForPropertyCellValue(Type* type) {
+FieldAccess AccessBuilder::ForContextExtensionExtension() {
   FieldAccess access = {
-      kTaggedBase, PropertyCell::kValueOffset, Handle<Name>(),
-      type,        MachineType::AnyTagged(),   kFullWriteBarrier};
+      kTaggedBase, ContextExtension::kExtensionOffset, Handle<Name>(),
+      Type::Any(), MachineType::AnyTagged(),           kFullWriteBarrier};
   return access;
 }
 
 // static
 ElementAccess AccessBuilder::ForFixedArrayElement() {
-  ElementAccess access = {kTaggedBase, FixedArray::kHeaderSize, Type::Tagged(),
+  ElementAccess access = {kTaggedBase, FixedArray::kHeaderSize, Type::Any(),
                           MachineType::AnyTagged(), kFullWriteBarrier};
   return access;
 }
@@ -619,6 +657,7 @@
   switch (kind) {
     case FAST_SMI_ELEMENTS:
       access.type = TypeCache::Get().kSmi;
+      access.machine_type = MachineType::TaggedSigned();
       access.write_barrier_kind = kNoWriteBarrier;
       break;
     case FAST_HOLEY_SMI_ELEMENTS:
diff --git a/src/compiler/access-builder.h b/src/compiler/access-builder.h
index caaf8f8..96f3200 100644
--- a/src/compiler/access-builder.h
+++ b/src/compiler/access-builder.h
@@ -18,6 +18,12 @@
 class AccessBuilder final : public AllStatic {
  public:
   // ===========================================================================
+  // Access to external values (based on external references).
+
+  // Provides access to a double field identified by an external reference.
+  static FieldAccess ForExternalDoubleValue();
+
+  // ===========================================================================
   // Access to heap object fields and elements (based on tagged pointer).
 
   // Provides access to HeapObject::map() field.
@@ -89,6 +95,9 @@
   // Provides access to JSTypedArray::length() field.
   static FieldAccess ForJSTypedArrayLength();
 
+  // Provides access to JSDate::value() field.
+  static FieldAccess ForJSDateValue();
+
   // Provides access to JSDate fields.
   static FieldAccess ForJSDateField(JSDate::FieldIndex index);
 
@@ -173,6 +182,12 @@
   // Provides access to JSGlobalObject::native_context() field.
   static FieldAccess ForJSGlobalObjectNativeContext();
 
+  // Provides access to JSStringIterator::string() field.
+  static FieldAccess ForJSStringIteratorString();
+
+  // Provides access to JSStringIterator::index() field.
+  static FieldAccess ForJSStringIteratorIndex();
+
   // Provides access to JSValue::value() field.
   static FieldAccess ForValue();
 
@@ -186,9 +201,9 @@
   // Provides access to Context slots.
   static FieldAccess ForContextSlot(size_t index);
 
-  // Provides access to PropertyCell::value() field.
-  static FieldAccess ForPropertyCellValue();
-  static FieldAccess ForPropertyCellValue(Type* type);
+  // Provides access to ContextExtension fields.
+  static FieldAccess ForContextExtensionScopeInfo();
+  static FieldAccess ForContextExtensionExtension();
 
   // Provides access to FixedArray elements.
   static ElementAccess ForFixedArrayElement();
diff --git a/src/compiler/access-info.cc b/src/compiler/access-info.cc
index 97de25b..329cb93 100644
--- a/src/compiler/access-info.cc
+++ b/src/compiler/access-info.cc
@@ -7,10 +7,10 @@
 #include "src/accessors.h"
 #include "src/compilation-dependencies.h"
 #include "src/compiler/access-info.h"
+#include "src/compiler/type-cache.h"
 #include "src/field-index-inl.h"
 #include "src/field-type.h"
 #include "src/objects-inl.h"
-#include "src/type-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -79,9 +79,12 @@
 
 // static
 PropertyAccessInfo PropertyAccessInfo::DataField(
-    MapList const& receiver_maps, FieldIndex field_index, Type* field_type,
-    MaybeHandle<JSObject> holder, MaybeHandle<Map> transition_map) {
-  return PropertyAccessInfo(holder, transition_map, field_index, field_type,
+    MapList const& receiver_maps, FieldIndex field_index,
+    MachineRepresentation field_representation, Type* field_type,
+    MaybeHandle<Map> field_map, MaybeHandle<JSObject> holder,
+    MaybeHandle<Map> transition_map) {
+  return PropertyAccessInfo(holder, transition_map, field_index,
+                            field_representation, field_type, field_map,
                             receiver_maps);
 }
 
@@ -93,13 +96,16 @@
 }
 
 PropertyAccessInfo::PropertyAccessInfo()
-    : kind_(kInvalid), field_type_(Type::None()) {}
+    : kind_(kInvalid),
+      field_representation_(MachineRepresentation::kNone),
+      field_type_(Type::None()) {}
 
 PropertyAccessInfo::PropertyAccessInfo(MaybeHandle<JSObject> holder,
                                        MapList const& receiver_maps)
     : kind_(kNotFound),
       receiver_maps_(receiver_maps),
       holder_(holder),
+      field_representation_(MachineRepresentation::kNone),
       field_type_(Type::None()) {}
 
 PropertyAccessInfo::PropertyAccessInfo(Kind kind, MaybeHandle<JSObject> holder,
@@ -109,18 +115,21 @@
       receiver_maps_(receiver_maps),
       constant_(constant),
       holder_(holder),
+      field_representation_(MachineRepresentation::kNone),
       field_type_(Type::Any()) {}
 
-PropertyAccessInfo::PropertyAccessInfo(MaybeHandle<JSObject> holder,
-                                       MaybeHandle<Map> transition_map,
-                                       FieldIndex field_index, Type* field_type,
-                                       MapList const& receiver_maps)
+PropertyAccessInfo::PropertyAccessInfo(
+    MaybeHandle<JSObject> holder, MaybeHandle<Map> transition_map,
+    FieldIndex field_index, MachineRepresentation field_representation,
+    Type* field_type, MaybeHandle<Map> field_map, MapList const& receiver_maps)
     : kind_(kDataField),
       receiver_maps_(receiver_maps),
       transition_map_(transition_map),
       holder_(holder),
       field_index_(field_index),
-      field_type_(field_type) {}
+      field_representation_(field_representation),
+      field_type_(field_type),
+      field_map_(field_map) {}
 
 bool PropertyAccessInfo::Merge(PropertyAccessInfo const* that) {
   if (this->kind_ != that->kind_) return false;
@@ -138,7 +147,8 @@
       if (this->transition_map_.address() == that->transition_map_.address() &&
           this->field_index_ == that->field_index_ &&
           this->field_type_->Is(that->field_type_) &&
-          that->field_type_->Is(this->field_type_)) {
+          that->field_type_->Is(this->field_type_) &&
+          this->field_representation_ == that->field_representation_) {
         this->receiver_maps_.insert(this->receiver_maps_.end(),
                                     that->receiver_maps_.begin(),
                                     that->receiver_maps_.end());
@@ -283,41 +293,45 @@
         }
         case DATA: {
           int index = descriptors->GetFieldIndex(number);
-          Representation field_representation = details.representation();
+          Representation details_representation = details.representation();
           FieldIndex field_index = FieldIndex::ForPropertyIndex(
-              *map, index, field_representation.IsDouble());
-          Type* field_type = Type::Tagged();
-          if (field_representation.IsSmi()) {
+              *map, index, details_representation.IsDouble());
+          Type* field_type = Type::NonInternal();
+          MachineRepresentation field_representation =
+              MachineRepresentation::kTagged;
+          MaybeHandle<Map> field_map;
+          if (details_representation.IsSmi()) {
             field_type = type_cache_.kSmi;
-          } else if (field_representation.IsDouble()) {
+            field_representation = MachineRepresentation::kTaggedSigned;
+          } else if (details_representation.IsDouble()) {
             field_type = type_cache_.kFloat64;
-          } else if (field_representation.IsHeapObject()) {
+            field_representation = MachineRepresentation::kFloat64;
+          } else if (details_representation.IsHeapObject()) {
             // Extract the field type from the property details (make sure its
             // representation is TaggedPointer to reflect the heap object case).
-            field_type = Type::Intersect(
-                descriptors->GetFieldType(number)->Convert(zone()),
-                Type::TaggedPointer(), zone());
-            if (field_type->Is(Type::None())) {
+            field_representation = MachineRepresentation::kTaggedPointer;
+            Handle<FieldType> descriptors_field_type(
+                descriptors->GetFieldType(number), isolate());
+            if (descriptors_field_type->IsNone()) {
               // Store is not safe if the field type was cleared.
               if (access_mode == AccessMode::kStore) return false;
 
               // The field type was cleared by the GC, so we don't know anything
               // about the contents now.
-              // TODO(bmeurer): It would be awesome to make this saner in the
-              // runtime/GC interaction.
-              field_type = Type::TaggedPointer();
-            } else if (!Type::Any()->Is(field_type)) {
+            } else if (descriptors_field_type->IsClass()) {
               // Add proper code dependencies in case of stable field map(s).
               Handle<Map> field_owner_map(map->FindFieldOwner(number),
                                           isolate());
               dependencies()->AssumeFieldType(field_owner_map);
-            }
-            if (access_mode == AccessMode::kLoad) {
-              field_type = Type::Any();
+
+              // Remember the field map, and try to infer a useful type.
+              field_type = Type::For(descriptors_field_type->AsClass());
+              field_map = descriptors_field_type->AsClass();
             }
           }
           *access_info = PropertyAccessInfo::DataField(
-              MapList{receiver_map}, field_index, field_type, holder);
+              MapList{receiver_map}, field_index, field_representation,
+              field_type, field_map, holder);
           return true;
         }
         case ACCESSOR_CONSTANT: {
@@ -423,12 +437,14 @@
   int offset;
   if (Accessors::IsJSObjectFieldAccessor(map, name, &offset)) {
     FieldIndex field_index = FieldIndex::ForInObjectOffset(offset);
-    Type* field_type = Type::Tagged();
+    Type* field_type = Type::NonInternal();
+    MachineRepresentation field_representation = MachineRepresentation::kTagged;
     if (map->IsStringMap()) {
       DCHECK(Name::Equals(factory()->length_string(), name));
       // The String::length property is always a smi in the range
       // [0, String::kMaxLength].
       field_type = type_cache_.kStringLengthType;
+      field_representation = MachineRepresentation::kTaggedSigned;
     } else if (map->IsJSArrayMap()) {
       DCHECK(Name::Equals(factory()->length_string(), name));
       // The JSArray::length property is a smi in the range
@@ -438,14 +454,16 @@
       // case of other arrays.
       if (IsFastDoubleElementsKind(map->elements_kind())) {
         field_type = type_cache_.kFixedDoubleArrayLengthType;
+        field_representation = MachineRepresentation::kTaggedSigned;
       } else if (IsFastElementsKind(map->elements_kind())) {
         field_type = type_cache_.kFixedArrayLengthType;
+        field_representation = MachineRepresentation::kTaggedSigned;
       } else {
         field_type = type_cache_.kJSArrayLengthType;
       }
     }
-    *access_info =
-        PropertyAccessInfo::DataField(MapList{map}, field_index, field_type);
+    *access_info = PropertyAccessInfo::DataField(
+        MapList{map}, field_index, field_representation, field_type);
     return true;
   }
   return false;
@@ -468,35 +486,43 @@
     // TODO(bmeurer): Handle transition to data constant?
     if (details.type() != DATA) return false;
     int const index = details.field_index();
-    Representation field_representation = details.representation();
+    Representation details_representation = details.representation();
     FieldIndex field_index = FieldIndex::ForPropertyIndex(
-        *transition_map, index, field_representation.IsDouble());
-    Type* field_type = Type::Tagged();
-    if (field_representation.IsSmi()) {
+        *transition_map, index, details_representation.IsDouble());
+    Type* field_type = Type::NonInternal();
+    MaybeHandle<Map> field_map;
+    MachineRepresentation field_representation = MachineRepresentation::kTagged;
+    if (details_representation.IsSmi()) {
       field_type = type_cache_.kSmi;
-    } else if (field_representation.IsDouble()) {
+      field_representation = MachineRepresentation::kTaggedSigned;
+    } else if (details_representation.IsDouble()) {
       field_type = type_cache_.kFloat64;
-    } else if (field_representation.IsHeapObject()) {
+      field_representation = MachineRepresentation::kFloat64;
+    } else if (details_representation.IsHeapObject()) {
       // Extract the field type from the property details (make sure its
       // representation is TaggedPointer to reflect the heap object case).
-      field_type = Type::Intersect(
-          transition_map->instance_descriptors()->GetFieldType(number)->Convert(
-              zone()),
-          Type::TaggedPointer(), zone());
-      if (field_type->Is(Type::None())) {
+      field_representation = MachineRepresentation::kTaggedPointer;
+      Handle<FieldType> descriptors_field_type(
+          transition_map->instance_descriptors()->GetFieldType(number),
+          isolate());
+      if (descriptors_field_type->IsNone()) {
         // Store is not safe if the field type was cleared.
         return false;
-      } else if (!Type::Any()->Is(field_type)) {
+      } else if (descriptors_field_type->IsClass()) {
         // Add proper code dependencies in case of stable field map(s).
         Handle<Map> field_owner_map(transition_map->FindFieldOwner(number),
                                     isolate());
         dependencies()->AssumeFieldType(field_owner_map);
+
+        // Remember the field map, and try to infer a useful type.
+        field_type = Type::For(descriptors_field_type->AsClass());
+        field_map = descriptors_field_type->AsClass();
       }
-      DCHECK(field_type->Is(Type::TaggedPointer()));
     }
     dependencies()->AssumeMapNotDeprecated(transition_map);
     *access_info = PropertyAccessInfo::DataField(
-        MapList{map}, field_index, field_type, holder, transition_map);
+        MapList{map}, field_index, field_representation, field_type, field_map,
+        holder, transition_map);
     return true;
   }
   return false;
diff --git a/src/compiler/access-info.h b/src/compiler/access-info.h
index daa8722..ac186fb 100644
--- a/src/compiler/access-info.h
+++ b/src/compiler/access-info.h
@@ -8,8 +8,9 @@
 #include <iosfwd>
 
 #include "src/field-index.h"
+#include "src/machine-type.h"
 #include "src/objects.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -17,10 +18,13 @@
 // Forward declarations.
 class CompilationDependencies;
 class Factory;
-class TypeCache;
 
 namespace compiler {
 
+// Forward declarations.
+class Type;
+class TypeCache;
+
 // Whether we are loading a property or storing to a property.
 enum class AccessMode { kLoad, kStore };
 
@@ -66,7 +70,9 @@
                                          Handle<Object> constant,
                                          MaybeHandle<JSObject> holder);
   static PropertyAccessInfo DataField(
-      MapList const& receiver_maps, FieldIndex field_index, Type* field_type,
+      MapList const& receiver_maps, FieldIndex field_index,
+      MachineRepresentation field_representation, Type* field_type,
+      MaybeHandle<Map> field_map = MaybeHandle<Map>(),
       MaybeHandle<JSObject> holder = MaybeHandle<JSObject>(),
       MaybeHandle<Map> transition_map = MaybeHandle<Map>());
   static PropertyAccessInfo AccessorConstant(MapList const& receiver_maps,
@@ -90,6 +96,10 @@
   Handle<Object> constant() const { return constant_; }
   FieldIndex field_index() const { return field_index_; }
   Type* field_type() const { return field_type_; }
+  MachineRepresentation field_representation() const {
+    return field_representation_;
+  }
+  MaybeHandle<Map> field_map() const { return field_map_; }
   MapList const& receiver_maps() const { return receiver_maps_; }
 
  private:
@@ -99,7 +109,9 @@
                      Handle<Object> constant, MapList const& receiver_maps);
   PropertyAccessInfo(MaybeHandle<JSObject> holder,
                      MaybeHandle<Map> transition_map, FieldIndex field_index,
-                     Type* field_type, MapList const& receiver_maps);
+                     MachineRepresentation field_representation,
+                     Type* field_type, MaybeHandle<Map> field_map,
+                     MapList const& receiver_maps);
 
   Kind kind_;
   MapList receiver_maps_;
@@ -107,7 +119,9 @@
   MaybeHandle<Map> transition_map_;
   MaybeHandle<JSObject> holder_;
   FieldIndex field_index_;
+  MachineRepresentation field_representation_;
   Type* field_type_;
+  MaybeHandle<Map> field_map_;
 };
 
 
diff --git a/src/compiler/all-nodes.cc b/src/compiler/all-nodes.cc
index 8040897..eada0cf 100644
--- a/src/compiler/all-nodes.cc
+++ b/src/compiler/all-nodes.cc
@@ -14,13 +14,26 @@
     : reachable(local_zone),
       is_reachable_(graph->NodeCount(), false, local_zone),
       only_inputs_(only_inputs) {
-  Node* end = graph->end();
+  Mark(local_zone, graph->end(), graph);
+}
+
+AllNodes::AllNodes(Zone* local_zone, Node* end, const Graph* graph,
+                   bool only_inputs)
+    : reachable(local_zone),
+      is_reachable_(graph->NodeCount(), false, local_zone),
+      only_inputs_(only_inputs) {
+  Mark(local_zone, end, graph);
+}
+
+void AllNodes::Mark(Zone* local_zone, Node* end, const Graph* graph) {
+  DCHECK_LT(end->id(), graph->NodeCount());
   is_reachable_[end->id()] = true;
   reachable.push_back(end);
-  // Find all nodes reachable from end.
+  // Find all nodes reachable from {end}.
   for (size_t i = 0; i < reachable.size(); i++) {
-    for (Node* input : reachable[i]->inputs()) {
-      if (input == nullptr || input->id() >= graph->NodeCount()) {
+    for (Node* const input : reachable[i]->inputs()) {
+      if (input == nullptr) {
+        // TODO(titzer): print a warning.
         continue;
       }
       if (!is_reachable_[input->id()]) {
@@ -28,7 +41,7 @@
         reachable.push_back(input);
       }
     }
-    if (!only_inputs) {
+    if (!only_inputs_) {
       for (Node* use : reachable[i]->uses()) {
         if (use == nullptr || use->id() >= graph->NodeCount()) {
           continue;
diff --git a/src/compiler/all-nodes.h b/src/compiler/all-nodes.h
index 36f02e9..7c70bf7 100644
--- a/src/compiler/all-nodes.h
+++ b/src/compiler/all-nodes.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_ALL_NODES_H_
 
 #include "src/compiler/node.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -16,9 +16,13 @@
 // from end.
 class AllNodes {
  public:
-  // Constructor. Traverses the graph and builds the {reachable} sets. When
-  // {only_inputs} is true, find the nodes reachable through input edges;
-  // these are all live nodes.
+  // Constructor. Traverses the graph and builds the {reachable} set of nodes
+  // reachable from {end}. When {only_inputs} is true, find the nodes
+  // reachable through input edges; these are all live nodes.
+  AllNodes(Zone* local_zone, Node* end, const Graph* graph,
+           bool only_inputs = true);
+  // Constructor. Traverses the graph and builds the {reachable} set of nodes
+  // reachable from the End node.
   AllNodes(Zone* local_zone, const Graph* graph, bool only_inputs = true);
 
   bool IsLive(Node* node) {
@@ -35,6 +39,8 @@
   NodeVector reachable;  // Nodes reachable from end.
 
  private:
+  void Mark(Zone* local_zone, Node* end, const Graph* graph);
+
   BoolVector is_reachable_;
   const bool only_inputs_;
 };
diff --git a/src/compiler/arm/code-generator-arm.cc b/src/compiler/arm/code-generator-arm.cc
index 4ae282a..dbe1828 100644
--- a/src/compiler/arm/code-generator-arm.cc
+++ b/src/compiler/arm/code-generator-arm.cc
@@ -5,7 +5,7 @@
 #include "src/compiler/code-generator.h"
 
 #include "src/arm/macro-assembler-arm.h"
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -271,6 +271,37 @@
   UnwindingInfoWriter* const unwinding_info_writer_;
 };
 
+template <typename T>
+class OutOfLineFloatMin final : public OutOfLineCode {
+ public:
+  OutOfLineFloatMin(CodeGenerator* gen, T result, T left, T right)
+      : OutOfLineCode(gen), result_(result), left_(left), right_(right) {}
+
+  void Generate() final { __ FloatMinOutOfLine(result_, left_, right_); }
+
+ private:
+  T const result_;
+  T const left_;
+  T const right_;
+};
+typedef OutOfLineFloatMin<SwVfpRegister> OutOfLineFloat32Min;
+typedef OutOfLineFloatMin<DwVfpRegister> OutOfLineFloat64Min;
+
+template <typename T>
+class OutOfLineFloatMax final : public OutOfLineCode {
+ public:
+  OutOfLineFloatMax(CodeGenerator* gen, T result, T left, T right)
+      : OutOfLineCode(gen), result_(result), left_(left), right_(right) {}
+
+  void Generate() final { __ FloatMaxOutOfLine(result_, left_, right_); }
+
+ private:
+  T const result_;
+  T const left_;
+  T const right_;
+};
+typedef OutOfLineFloatMax<SwVfpRegister> OutOfLineFloat32Max;
+typedef OutOfLineFloatMax<DwVfpRegister> OutOfLineFloat64Max;
 
 Condition FlagsConditionToCondition(FlagsCondition condition) {
   switch (condition) {
@@ -707,9 +738,6 @@
     case kArchDebugBreak:
       __ stop("kArchDebugBreak");
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchComment: {
       Address comment_string = i.InputExternalReference(0).address();
       __ RecordComment(reinterpret_cast<const char*>(comment_string));
@@ -725,8 +753,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -1199,33 +1227,51 @@
     case kArmVnegF64:
       __ vneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
       break;
-    case kArmVrintmF32:
+    case kArmVrintmF32: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
       break;
-    case kArmVrintmF64:
+    }
+    case kArmVrintmF64: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
       break;
-    case kArmVrintpF32:
+    }
+    case kArmVrintpF32: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
       break;
-    case kArmVrintpF64:
+    }
+    case kArmVrintpF64: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
       break;
-    case kArmVrintzF32:
+    }
+    case kArmVrintzF32: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
       break;
-    case kArmVrintzF64:
+    }
+    case kArmVrintzF64: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
       break;
-    case kArmVrintaF64:
+    }
+    case kArmVrintaF64: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
       break;
-    case kArmVrintnF32:
+    }
+    case kArmVrintnF32: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
       break;
-    case kArmVrintnF64:
+    }
+    case kArmVrintnF64: {
+      CpuFeatureScope scope(masm(), ARMv8);
       __ vrintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
       break;
+    }
     case kArmVcvtF32F64: {
       __ vcvt_f32_f64(i.OutputFloat32Register(), i.InputDoubleRegister(0));
       DCHECK_EQ(LeaveCC, i.OutputSBit());
@@ -1380,145 +1426,59 @@
       DCHECK_EQ(LeaveCC, i.OutputSBit());
       break;
     case kArmFloat32Max: {
-      FloatRegister left_reg = i.InputFloat32Register(0);
-      FloatRegister right_reg = i.InputFloat32Register(1);
-      FloatRegister result_reg = i.OutputFloat32Register();
-      Label result_is_nan, return_left, return_right, check_zero, done;
-      __ VFPCompareAndSetFlags(left_reg, right_reg);
-      __ b(mi, &return_right);
-      __ b(gt, &return_left);
-      __ b(vs, &result_is_nan);
-      // Left equals right => check for -0.
-      __ VFPCompareAndSetFlags(left_reg, 0.0);
-      if (left_reg.is(result_reg) || right_reg.is(result_reg)) {
-        __ b(ne, &done);  // left == right != 0.
+      SwVfpRegister result = i.OutputFloat32Register();
+      SwVfpRegister left = i.InputFloat32Register(0);
+      SwVfpRegister right = i.InputFloat32Register(1);
+      if (left.is(right)) {
+        __ Move(result, left);
       } else {
-        __ b(ne, &return_left);  // left == right != 0.
+        auto ool = new (zone()) OutOfLineFloat32Max(this, result, left, right);
+        __ FloatMax(result, left, right, ool->entry());
+        __ bind(ool->exit());
       }
-      // At this point, both left and right are either 0 or -0.
-      // Since we operate on +0 and/or -0, vadd and vand have the same effect;
-      // the decision for vadd is easy because vand is a NEON instruction.
-      __ vadd(result_reg, left_reg, right_reg);
-      __ b(&done);
-      __ bind(&result_is_nan);
-      __ vadd(result_reg, left_reg, right_reg);
-      __ b(&done);
-      __ bind(&return_right);
-      __ Move(result_reg, right_reg);
-      if (!left_reg.is(result_reg)) __ b(&done);
-      __ bind(&return_left);
-      __ Move(result_reg, left_reg);
-      __ bind(&done);
+      DCHECK_EQ(LeaveCC, i.OutputSBit());
       break;
     }
     case kArmFloat64Max: {
-      DwVfpRegister left_reg = i.InputDoubleRegister(0);
-      DwVfpRegister right_reg = i.InputDoubleRegister(1);
-      DwVfpRegister result_reg = i.OutputDoubleRegister();
-      Label result_is_nan, return_left, return_right, check_zero, done;
-      __ VFPCompareAndSetFlags(left_reg, right_reg);
-      __ b(mi, &return_right);
-      __ b(gt, &return_left);
-      __ b(vs, &result_is_nan);
-      // Left equals right => check for -0.
-      __ VFPCompareAndSetFlags(left_reg, 0.0);
-      if (left_reg.is(result_reg) || right_reg.is(result_reg)) {
-        __ b(ne, &done);  // left == right != 0.
+      DwVfpRegister result = i.OutputDoubleRegister();
+      DwVfpRegister left = i.InputDoubleRegister(0);
+      DwVfpRegister right = i.InputDoubleRegister(1);
+      if (left.is(right)) {
+        __ Move(result, left);
       } else {
-        __ b(ne, &return_left);  // left == right != 0.
+        auto ool = new (zone()) OutOfLineFloat64Max(this, result, left, right);
+        __ FloatMax(result, left, right, ool->entry());
+        __ bind(ool->exit());
       }
-      // At this point, both left and right are either 0 or -0.
-      // Since we operate on +0 and/or -0, vadd and vand have the same effect;
-      // the decision for vadd is easy because vand is a NEON instruction.
-      __ vadd(result_reg, left_reg, right_reg);
-      __ b(&done);
-      __ bind(&result_is_nan);
-      __ vadd(result_reg, left_reg, right_reg);
-      __ b(&done);
-      __ bind(&return_right);
-      __ Move(result_reg, right_reg);
-      if (!left_reg.is(result_reg)) __ b(&done);
-      __ bind(&return_left);
-      __ Move(result_reg, left_reg);
-      __ bind(&done);
+      DCHECK_EQ(LeaveCC, i.OutputSBit());
       break;
     }
     case kArmFloat32Min: {
-      FloatRegister left_reg = i.InputFloat32Register(0);
-      FloatRegister right_reg = i.InputFloat32Register(1);
-      FloatRegister result_reg = i.OutputFloat32Register();
-      Label result_is_nan, return_left, return_right, check_zero, done;
-      __ VFPCompareAndSetFlags(left_reg, right_reg);
-      __ b(mi, &return_left);
-      __ b(gt, &return_right);
-      __ b(vs, &result_is_nan);
-      // Left equals right => check for -0.
-      __ VFPCompareAndSetFlags(left_reg, 0.0);
-      if (left_reg.is(result_reg) || right_reg.is(result_reg)) {
-        __ b(ne, &done);  // left == right != 0.
+      SwVfpRegister result = i.OutputFloat32Register();
+      SwVfpRegister left = i.InputFloat32Register(0);
+      SwVfpRegister right = i.InputFloat32Register(1);
+      if (left.is(right)) {
+        __ Move(result, left);
       } else {
-        __ b(ne, &return_left);  // left == right != 0.
+        auto ool = new (zone()) OutOfLineFloat32Min(this, result, left, right);
+        __ FloatMin(result, left, right, ool->entry());
+        __ bind(ool->exit());
       }
-      // At this point, both left and right are either 0 or -0.
-      // We could use a single 'vorr' instruction here if we had NEON support.
-      // The algorithm is: -((-L) + (-R)), which in case of L and R being
-      // different registers is most efficiently expressed as -((-L) - R).
-      __ vneg(left_reg, left_reg);
-      if (left_reg.is(right_reg)) {
-        __ vadd(result_reg, left_reg, right_reg);
-      } else {
-        __ vsub(result_reg, left_reg, right_reg);
-      }
-      __ vneg(result_reg, result_reg);
-      __ b(&done);
-      __ bind(&result_is_nan);
-      __ vadd(result_reg, left_reg, right_reg);
-      __ b(&done);
-      __ bind(&return_right);
-      __ Move(result_reg, right_reg);
-      if (!left_reg.is(result_reg)) __ b(&done);
-      __ bind(&return_left);
-      __ Move(result_reg, left_reg);
-      __ bind(&done);
+      DCHECK_EQ(LeaveCC, i.OutputSBit());
       break;
     }
     case kArmFloat64Min: {
-      DwVfpRegister left_reg = i.InputDoubleRegister(0);
-      DwVfpRegister right_reg = i.InputDoubleRegister(1);
-      DwVfpRegister result_reg = i.OutputDoubleRegister();
-      Label result_is_nan, return_left, return_right, check_zero, done;
-      __ VFPCompareAndSetFlags(left_reg, right_reg);
-      __ b(mi, &return_left);
-      __ b(gt, &return_right);
-      __ b(vs, &result_is_nan);
-      // Left equals right => check for -0.
-      __ VFPCompareAndSetFlags(left_reg, 0.0);
-      if (left_reg.is(result_reg) || right_reg.is(result_reg)) {
-        __ b(ne, &done);  // left == right != 0.
+      DwVfpRegister result = i.OutputDoubleRegister();
+      DwVfpRegister left = i.InputDoubleRegister(0);
+      DwVfpRegister right = i.InputDoubleRegister(1);
+      if (left.is(right)) {
+        __ Move(result, left);
       } else {
-        __ b(ne, &return_left);  // left == right != 0.
+        auto ool = new (zone()) OutOfLineFloat64Min(this, result, left, right);
+        __ FloatMin(result, left, right, ool->entry());
+        __ bind(ool->exit());
       }
-      // At this point, both left and right are either 0 or -0.
-      // We could use a single 'vorr' instruction here if we had NEON support.
-      // The algorithm is: -((-L) + (-R)), which in case of L and R being
-      // different registers is most efficiently expressed as -((-L) - R).
-      __ vneg(left_reg, left_reg);
-      if (left_reg.is(right_reg)) {
-        __ vadd(result_reg, left_reg, right_reg);
-      } else {
-        __ vsub(result_reg, left_reg, right_reg);
-      }
-      __ vneg(result_reg, result_reg);
-      __ b(&done);
-      __ bind(&result_is_nan);
-      __ vadd(result_reg, left_reg, right_reg);
-      __ b(&done);
-      __ bind(&return_right);
-      __ Move(result_reg, right_reg);
-      if (!left_reg.is(result_reg)) __ b(&done);
-      __ bind(&return_left);
-      __ Move(result_reg, left_reg);
-      __ bind(&done);
+      DCHECK_EQ(LeaveCC, i.OutputSBit());
       break;
     }
     case kArmFloat64SilenceNaN: {
@@ -1679,7 +1639,8 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   // TODO(turbofan): We should be able to generate better code by sharing the
@@ -1688,7 +1649,7 @@
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   __ CheckConstPool(false, false);
   return kSuccess;
@@ -1967,33 +1928,31 @@
     __ vstr(temp_1, src);
   } else if (source->IsFPRegister()) {
     LowDwVfpRegister temp = kScratchDoubleReg;
-      DwVfpRegister src = g.ToDoubleRegister(source);
-      if (destination->IsFPRegister()) {
-        DwVfpRegister dst = g.ToDoubleRegister(destination);
-        __ Move(temp, src);
-        __ Move(src, dst);
-        __ Move(dst, temp);
-      } else {
-        DCHECK(destination->IsFPStackSlot());
-        MemOperand dst = g.ToMemOperand(destination);
-        __ Move(temp, src);
-        __ vldr(src, dst);
-        __ vstr(temp, dst);
-      }
+    DwVfpRegister src = g.ToDoubleRegister(source);
+    if (destination->IsFPRegister()) {
+      DwVfpRegister dst = g.ToDoubleRegister(destination);
+      __ vswp(src, dst);
+    } else {
+      DCHECK(destination->IsFPStackSlot());
+      MemOperand dst = g.ToMemOperand(destination);
+      __ Move(temp, src);
+      __ vldr(src, dst);
+      __ vstr(temp, dst);
+    }
   } else if (source->IsFPStackSlot()) {
     DCHECK(destination->IsFPStackSlot());
     Register temp_0 = kScratchReg;
     LowDwVfpRegister temp_1 = kScratchDoubleReg;
     MemOperand src0 = g.ToMemOperand(source);
     MemOperand dst0 = g.ToMemOperand(destination);
-      MemOperand src1(src0.rn(), src0.offset() + kPointerSize);
-      MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize);
-      __ vldr(temp_1, dst0);  // Save destination in temp_1.
-      __ ldr(temp_0, src0);   // Then use temp_0 to copy source to destination.
-      __ str(temp_0, dst0);
-      __ ldr(temp_0, src1);
-      __ str(temp_0, dst1);
-      __ vstr(temp_1, src0);
+    MemOperand src1(src0.rn(), src0.offset() + kPointerSize);
+    MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize);
+    __ vldr(temp_1, dst0);  // Save destination in temp_1.
+    __ ldr(temp_0, src0);   // Then use temp_0 to copy source to destination.
+    __ str(temp_0, dst0);
+    __ ldr(temp_0, src1);
+    __ str(temp_0, dst1);
+    __ vstr(temp_1, src0);
   } else {
     // No other combinations are possible.
     UNREACHABLE();
diff --git a/src/compiler/arm/instruction-selector-arm.cc b/src/compiler/arm/instruction-selector-arm.cc
index 4b0b6af..ceb5b25 100644
--- a/src/compiler/arm/instruction-selector-arm.cc
+++ b/src/compiler/arm/instruction-selector-arm.cc
@@ -252,14 +252,7 @@
     inputs[input_count++] = g.Label(cont->false_block());
   }
 
-  if (cont->IsDeoptimize()) {
-    // If we can deoptimize as a result of the binop, we need to make sure that
-    // the deopt inputs are not overwritten by the binop result. One way
-    // to achieve that is to declare the output register as same-as-first.
-    outputs[output_count++] = g.DefineSameAsFirst(node);
-  } else {
-    outputs[output_count++] = g.DefineAsRegister(node);
-  }
+  outputs[output_count++] = g.DefineAsRegister(node);
   if (cont->IsSet()) {
     outputs[output_count++] = g.DefineAsRegister(cont->result());
   }
@@ -419,6 +412,10 @@
   EmitLoad(this, opcode, &output, base, index);
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   ArmOperandGenerator g(this);
@@ -431,7 +428,7 @@
   MachineRepresentation rep = store_rep.representation();
 
   if (write_barrier_kind != kNoWriteBarrier) {
-    DCHECK_EQ(MachineRepresentation::kTagged, rep);
+    DCHECK(CanBeTaggedPointer(rep));
     AddressingMode addressing_mode;
     InstructionOperand inputs[3];
     size_t input_count = 0;
@@ -1516,46 +1513,55 @@
 
 
 void InstructionSelector::VisitFloat32RoundDown(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintmF32, node);
 }
 
 
 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintmF64, node);
 }
 
 
 void InstructionSelector::VisitFloat32RoundUp(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintpF32, node);
 }
 
 
 void InstructionSelector::VisitFloat64RoundUp(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintpF64, node);
 }
 
 
 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintzF32, node);
 }
 
 
 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintzF64, node);
 }
 
 
 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintaF64, node);
 }
 
 
 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintnF32, node);
 }
 
 
 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
+  DCHECK(CpuFeatures::IsSupported(ARMv8));
   VisitRR(this, kArmVrintnF64, node);
 }
 
@@ -1965,6 +1971,10 @@
     break;
   }
 
+  if (user->opcode() == IrOpcode::kWord32Equal) {
+    return VisitWordCompare(selector, user, cont);
+  }
+
   // Continuation could not be combined with a compare, emit compare against 0.
   ArmOperandGenerator g(selector);
   InstructionCode const opcode =
diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc
index 35f7e43..f543b18 100644
--- a/src/compiler/arm64/code-generator-arm64.cc
+++ b/src/compiler/arm64/code-generator-arm64.cc
@@ -6,7 +6,7 @@
 
 #include "src/arm64/frames-arm64.h"
 #include "src/arm64/macro-assembler-arm64.h"
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -766,9 +766,6 @@
     case kArchDebugBreak:
       __ Debug("kArchDebugBreak", 0, BREAK);
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchComment: {
       Address comment_string = i.InputExternalReference(0).address();
       __ RecordComment(reinterpret_cast<const char*>(comment_string));
@@ -783,8 +780,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -1755,13 +1752,14 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
@@ -1956,10 +1954,14 @@
         __ Fmov(dst, src.ToFloat32());
       } else {
         DCHECK(destination->IsFPStackSlot());
-        UseScratchRegisterScope scope(masm());
-        FPRegister temp = scope.AcquireS();
-        __ Fmov(temp, src.ToFloat32());
-        __ Str(temp, g.ToMemOperand(destination, masm()));
+        if (bit_cast<int32_t>(src.ToFloat32()) == 0) {
+          __ Str(wzr, g.ToMemOperand(destination, masm()));
+        } else {
+          UseScratchRegisterScope scope(masm());
+          FPRegister temp = scope.AcquireS();
+          __ Fmov(temp, src.ToFloat32());
+          __ Str(temp, g.ToMemOperand(destination, masm()));
+        }
       }
     } else {
       DCHECK_EQ(Constant::kFloat64, src.type());
@@ -1968,10 +1970,14 @@
         __ Fmov(dst, src.ToFloat64());
       } else {
         DCHECK(destination->IsFPStackSlot());
-        UseScratchRegisterScope scope(masm());
-        FPRegister temp = scope.AcquireD();
-        __ Fmov(temp, src.ToFloat64());
-        __ Str(temp, g.ToMemOperand(destination, masm()));
+        if (bit_cast<int64_t>(src.ToFloat64()) == 0) {
+          __ Str(xzr, g.ToMemOperand(destination, masm()));
+        } else {
+          UseScratchRegisterScope scope(masm());
+          FPRegister temp = scope.AcquireD();
+          __ Fmov(temp, src.ToFloat64());
+          __ Str(temp, g.ToMemOperand(destination, masm()));
+        }
       }
     }
   } else if (source->IsFPRegister()) {
diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc
index 9bc5385..da27be8 100644
--- a/src/compiler/arm64/instruction-selector-arm64.cc
+++ b/src/compiler/arm64/instruction-selector-arm64.cc
@@ -434,24 +434,18 @@
   } else if (TryMatchAnyShift(selector, node, right_node, &opcode,
                               !is_add_sub)) {
     Matcher m_shift(right_node);
-    inputs[input_count++] = cont->IsDeoptimize()
-                                ? g.UseRegister(left_node)
-                                : g.UseRegisterOrImmediateZero(left_node);
+    inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
     inputs[input_count++] = g.UseRegister(m_shift.left().node());
     inputs[input_count++] = g.UseImmediate(m_shift.right().node());
   } else if (can_commute && TryMatchAnyShift(selector, node, left_node, &opcode,
                                              !is_add_sub)) {
     if (must_commute_cond) cont->Commute();
     Matcher m_shift(left_node);
-    inputs[input_count++] = cont->IsDeoptimize()
-                                ? g.UseRegister(right_node)
-                                : g.UseRegisterOrImmediateZero(right_node);
+    inputs[input_count++] = g.UseRegisterOrImmediateZero(right_node);
     inputs[input_count++] = g.UseRegister(m_shift.left().node());
     inputs[input_count++] = g.UseImmediate(m_shift.right().node());
   } else {
-    inputs[input_count++] = cont->IsDeoptimize()
-                                ? g.UseRegister(left_node)
-                                : g.UseRegisterOrImmediateZero(left_node);
+    inputs[input_count++] = g.UseRegisterOrImmediateZero(left_node);
     inputs[input_count++] = g.UseRegister(right_node);
   }
 
@@ -461,14 +455,7 @@
   }
 
   if (!IsComparisonField::decode(properties)) {
-    if (cont->IsDeoptimize()) {
-      // If we can deoptimize as a result of the binop, we need to make sure
-      // that the deopt inputs are not overwritten by the binop result. One way
-      // to achieve that is to declare the output register as same-as-first.
-      outputs[output_count++] = g.DefineSameAsFirst(node);
-    } else {
-      outputs[output_count++] = g.DefineAsRegister(node);
-    }
+    outputs[output_count++] = g.DefineAsRegister(node);
   }
 
   if (cont->IsSet()) {
@@ -606,6 +593,10 @@
   EmitLoad(this, node, opcode, immediate_mode, rep);
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   Arm64OperandGenerator g(this);
@@ -619,7 +610,7 @@
 
   // TODO(arm64): I guess this could be done in a better way.
   if (write_barrier_kind != kNoWriteBarrier) {
-    DCHECK_EQ(MachineRepresentation::kTagged, rep);
+    DCHECK(CanBeTaggedPointer(rep));
     AddressingMode addressing_mode;
     InstructionOperand inputs[3];
     size_t input_count = 0;
@@ -2128,11 +2119,101 @@
   }
 }
 
+// Map {cond} to kEqual or kNotEqual, so that we can select
+// either TBZ or TBNZ when generating code for:
+// (x cmp 0), b.{cond}
+FlagsCondition MapForTbz(FlagsCondition cond) {
+  switch (cond) {
+    case kSignedLessThan:  // generate TBNZ
+      return kNotEqual;
+    case kSignedGreaterThanOrEqual:  // generate TBZ
+      return kEqual;
+    default:
+      UNREACHABLE();
+      return cond;
+  }
+}
+
+// Map {cond} to kEqual or kNotEqual, so that we can select
+// either CBZ or CBNZ when generating code for:
+// (x cmp 0), b.{cond}
+FlagsCondition MapForCbz(FlagsCondition cond) {
+  switch (cond) {
+    case kEqual:     // generate CBZ
+    case kNotEqual:  // generate CBNZ
+      return cond;
+    case kUnsignedLessThanOrEqual:  // generate CBZ
+      return kEqual;
+    case kUnsignedGreaterThan:  // generate CBNZ
+      return kNotEqual;
+    default:
+      UNREACHABLE();
+      return cond;
+  }
+}
+
+// Try to emit TBZ, TBNZ, CBZ or CBNZ for certain comparisons of {node}
+// against zero, depending on the condition.
+bool TryEmitCbzOrTbz(InstructionSelector* selector, Node* node, Node* user,
+                     FlagsCondition cond, FlagsContinuation* cont) {
+  Int32BinopMatcher m_user(user);
+  USE(m_user);
+  DCHECK(m_user.right().Is(0) || m_user.left().Is(0));
+
+  // Only handle branches.
+  if (!cont->IsBranch()) return false;
+
+  switch (cond) {
+    case kSignedLessThan:
+    case kSignedGreaterThanOrEqual: {
+      Arm64OperandGenerator g(selector);
+      cont->Overwrite(MapForTbz(cond));
+      Int32Matcher m(node);
+      if (m.IsFloat64ExtractHighWord32() && selector->CanCover(user, node)) {
+        // SignedLessThan(Float64ExtractHighWord32(x), 0) and
+        // SignedGreaterThanOrEqual(Float64ExtractHighWord32(x), 0) essentially
+        // check the sign bit of a 64-bit floating point value.
+        InstructionOperand temp = g.TempRegister();
+        selector->Emit(kArm64U64MoveFloat64, temp,
+                       g.UseRegister(node->InputAt(0)));
+        selector->Emit(cont->Encode(kArm64TestAndBranch), g.NoOutput(), temp,
+                       g.TempImmediate(63), g.Label(cont->true_block()),
+                       g.Label(cont->false_block()));
+        return true;
+      }
+      selector->Emit(cont->Encode(kArm64TestAndBranch32), g.NoOutput(),
+                     g.UseRegister(node), g.TempImmediate(31),
+                     g.Label(cont->true_block()), g.Label(cont->false_block()));
+      return true;
+    }
+    case kEqual:
+    case kNotEqual:
+    case kUnsignedLessThanOrEqual:
+    case kUnsignedGreaterThan: {
+      Arm64OperandGenerator g(selector);
+      cont->Overwrite(MapForCbz(cond));
+      selector->Emit(cont->Encode(kArm64CompareAndBranch32), g.NoOutput(),
+                     g.UseRegister(node), g.Label(cont->true_block()),
+                     g.Label(cont->false_block()));
+      return true;
+    }
+    default:
+      return false;
+  }
+}
+
 void VisitWord32Compare(InstructionSelector* selector, Node* node,
                         FlagsContinuation* cont) {
   Int32BinopMatcher m(node);
   ArchOpcode opcode = kArm64Cmp32;
   FlagsCondition cond = cont->condition();
+  if (m.right().Is(0)) {
+    if (TryEmitCbzOrTbz(selector, m.left().node(), node, cond, cont)) return;
+  } else if (m.left().Is(0)) {
+    FlagsCondition commuted_cond = CommuteFlagsCondition(cond);
+    if (TryEmitCbzOrTbz(selector, m.right().node(), node, commuted_cond, cont))
+      return;
+  }
   ImmediateMode immediate_mode = kArithmeticImm;
   if (m.right().Is(0) && (m.left().IsInt32Add() || m.left().IsWord32And())) {
     // Emit flag setting add/and instructions for comparisons against zero.
@@ -2145,14 +2226,18 @@
              (m.right().IsInt32Add() || m.right().IsWord32And())) {
     // Same as above, but we need to commute the condition before we
     // continue with the rest of the checks.
-    cond = CommuteFlagsCondition(cond);
-    if (CanUseFlagSettingBinop(cond)) {
+    FlagsCondition commuted_cond = CommuteFlagsCondition(cond);
+    if (CanUseFlagSettingBinop(commuted_cond)) {
       Node* binop = m.right().node();
       MaybeReplaceCmpZeroWithFlagSettingBinop(selector, &node, binop, &opcode,
-                                              cond, cont, &immediate_mode);
+                                              commuted_cond, cont,
+                                              &immediate_mode);
     }
-  } else if (m.right().IsInt32Sub()) {
+  } else if (m.right().IsInt32Sub() && (cond == kEqual || cond == kNotEqual)) {
     // Select negated compare for comparisons with negated right input.
+    // Only do this for kEqual and kNotEqual, which do not depend on the
+    // C and V flags, as those flags will be different with CMN when the
+    // right-hand side of the original subtraction is INT_MIN.
     Node* sub = m.right().node();
     Int32BinopMatcher msub(sub);
     if (msub.left().Is(0)) {
diff --git a/src/compiler/ast-graph-builder.cc b/src/compiler/ast-graph-builder.cc
index 0f1fb29..b292a2e 100644
--- a/src/compiler/ast-graph-builder.cc
+++ b/src/compiler/ast-graph-builder.cc
@@ -4,7 +4,9 @@
 
 #include "src/compiler/ast-graph-builder.h"
 
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler.h"
 #include "src/compiler/ast-loop-assignment-analyzer.h"
 #include "src/compiler/control-builders.h"
@@ -16,7 +18,6 @@
 #include "src/compiler/operator-properties.h"
 #include "src/compiler/state-values-utils.h"
 #include "src/compiler/type-hint-analyzer.h"
-#include "src/parsing/parser.h"
 
 namespace v8 {
 namespace internal {
@@ -410,14 +411,15 @@
   TryFinallyBuilder* control_;
 };
 
-
 AstGraphBuilder::AstGraphBuilder(Zone* local_zone, CompilationInfo* info,
-                                 JSGraph* jsgraph, LoopAssignmentAnalysis* loop,
+                                 JSGraph* jsgraph, float invocation_frequency,
+                                 LoopAssignmentAnalysis* loop,
                                  TypeHintAnalysis* type_hint_analysis)
     : isolate_(info->isolate()),
       local_zone_(local_zone),
       info_(info),
       jsgraph_(jsgraph),
+      invocation_frequency_(invocation_frequency),
       environment_(nullptr),
       ast_context_(nullptr),
       globals_(0, local_zone),
@@ -535,12 +537,11 @@
   // TODO(mstarzinger): For now we cannot assume that the {this} parameter is
   // not {the_hole}, because for derived classes {this} has a TDZ and the
   // JSConstructStubForDerived magically passes {the_hole} as a receiver.
-  if (scope->has_this_declaration() && scope->receiver()->is_const_mode()) {
+  if (scope->has_this_declaration() && scope->receiver()->mode() == CONST) {
     env.RawParameterBind(0, jsgraph()->TheHoleConstant());
   }
 
-  // Build local context only if there are context allocated variables.
-  if (scope->num_heap_slots() > 0) {
+  if (scope->NeedsContext()) {
     // Push a new inner context scope for the current activation.
     Node* inner_context = BuildLocalActivationContext(GetFunctionContext());
     ContextScope top_context(this, scope, inner_context);
@@ -573,9 +574,8 @@
   BuildArgumentsObject(scope->arguments());
 
   // Build rest arguments array if it is used.
-  int rest_index;
-  Variable* rest_parameter = scope->rest_parameter(&rest_index);
-  BuildRestArgumentsArray(rest_parameter, rest_index);
+  Variable* rest_parameter = scope->rest_parameter();
+  BuildRestArgumentsArray(rest_parameter);
 
   // Build assignment to {.this_function} variable if it is used.
   BuildThisFunctionVariable(scope->this_function_var());
@@ -629,8 +629,7 @@
 // Gets the bailout id just before reading a variable proxy, but only for
 // unallocated variables.
 static BailoutId BeforeId(VariableProxy* proxy) {
-  return proxy->var()->IsUnallocatedOrGlobalSlot() ? proxy->BeforeId()
-                                                   : BailoutId::None();
+  return proxy->var()->IsUnallocated() ? proxy->BeforeId() : BailoutId::None();
 }
 
 static const char* GetDebugParameterName(Zone* zone, DeclarationScope* scope,
@@ -788,6 +787,10 @@
   return env;
 }
 
+AstGraphBuilder::Environment* AstGraphBuilder::Environment::CopyForOsrEntry() {
+  return new (zone())
+      Environment(this, builder_->liveness_analyzer()->NewBlock());
+}
 
 AstGraphBuilder::Environment*
 AstGraphBuilder::Environment::CopyAndShareLiveness() {
@@ -802,8 +805,15 @@
 
 AstGraphBuilder::Environment* AstGraphBuilder::Environment::CopyForLoop(
     BitVector* assigned, bool is_osr) {
-  PrepareForLoop(assigned, is_osr);
-  return CopyAndShareLiveness();
+  PrepareForLoop(assigned);
+  Environment* loop = CopyAndShareLiveness();
+  if (is_osr) {
+    // Create and merge the OSR entry if necessary.
+    Environment* osr_env = CopyForOsrEntry();
+    osr_env->PrepareForOsrEntry();
+    loop->Merge(osr_env);
+  }
+  return loop;
 }
 
 
@@ -1085,7 +1095,6 @@
 void AstGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
   Variable* variable = decl->proxy()->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = decl->proxy()->VariableFeedbackSlot();
@@ -1125,7 +1134,6 @@
 void AstGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* decl) {
   Variable* variable = decl->proxy()->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Handle<SharedFunctionInfo> function = Compiler::GetSharedFunctionInfo(
           decl->fun(), info()->script(), info());
@@ -1240,7 +1248,8 @@
   VisitForValue(stmt->expression());
   Node* value = environment()->Pop();
   Node* object = BuildToObject(value, stmt->ToObjectId());
-  const Operator* op = javascript()->CreateWithContext();
+  Handle<ScopeInfo> scope_info = stmt->scope()->scope_info();
+  const Operator* op = javascript()->CreateWithContext(scope_info);
   Node* context = NewNode(op, object, GetFunctionClosureForContext());
   PrepareFrameState(context, stmt->EntryId());
   VisitInScope(stmt->statement(), stmt->scope(), context);
@@ -1394,9 +1403,14 @@
       Node* cache_type = environment()->Peek(3);
       Node* object = environment()->Peek(4);
 
-      // Check loop termination condition.
-      Node* exit_cond = NewNode(javascript()->ForInDone(), index, cache_length);
-      for_loop.BreakWhen(exit_cond);
+      // Check loop termination condition (we know that the {index} is always
+      // in Smi range, so we can just set the hint on the comparison below).
+      PrepareEagerCheckpoint(stmt->EntryId());
+      Node* exit_cond =
+          NewNode(javascript()->LessThan(CompareOperationHint::kSignedSmall),
+                  index, cache_length);
+      PrepareFrameState(exit_cond, BailoutId::None());
+      for_loop.BreakUnless(exit_cond);
 
       // Compute the next enumerated value.
       Node* value = NewNode(javascript()->ForInNext(), object, cache_array,
@@ -1424,9 +1438,13 @@
       test_value.End();
       for_loop.EndBody();
 
-      // Increment counter and continue.
+      // Increment counter and continue (we know that the {index} is always
+      // in Smi range, so we can just set the hint on the increment below).
       index = environment()->Peek(0);
-      index = NewNode(javascript()->ForInStep(), index);
+      PrepareEagerCheckpoint(stmt->IncrementId());
+      index = NewNode(javascript()->Add(BinaryOperationHint::kSignedSmall),
+                      index, jsgraph()->OneConstant());
+      PrepareFrameState(index, BailoutId::None());
       environment()->Poke(0, index);
     }
     for_loop.EndLoop();
@@ -1475,7 +1493,8 @@
   // Create a catch scope that binds the exception.
   Node* exception = try_control.GetExceptionNode();
   Handle<String> name = stmt->variable()->name();
-  const Operator* op = javascript()->CreateCatchContext(name);
+  Handle<ScopeInfo> scope_info = stmt->scope()->scope_info();
+  const Operator* op = javascript()->CreateCatchContext(name, scope_info);
   Node* context = NewNode(op, exception, GetFunctionClosureForContext());
 
   // Evaluate the catch-block.
@@ -1595,7 +1614,7 @@
 
   // Create nodes to store method values into the literal.
   for (int i = 0; i < expr->properties()->length(); i++) {
-    ObjectLiteral::Property* property = expr->properties()->at(i);
+    ClassLiteral::Property* property = expr->properties()->at(i);
     environment()->Push(environment()->Peek(property->is_static() ? 1 : 0));
 
     VisitForValue(property->key());
@@ -1620,11 +1639,7 @@
     BuildSetHomeObject(value, receiver, property);
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED: {
+      case ClassLiteral::Property::METHOD: {
         Node* attr = jsgraph()->Constant(DONT_ENUM);
         Node* set_function_name =
             jsgraph()->Constant(property->NeedsSetFunctionName());
@@ -1634,20 +1649,24 @@
         PrepareFrameState(call, BailoutId::None());
         break;
       }
-      case ObjectLiteral::Property::GETTER: {
+      case ClassLiteral::Property::GETTER: {
         Node* attr = jsgraph()->Constant(DONT_ENUM);
         const Operator* op = javascript()->CallRuntime(
             Runtime::kDefineGetterPropertyUnchecked, 4);
         NewNode(op, receiver, key, value, attr);
         break;
       }
-      case ObjectLiteral::Property::SETTER: {
+      case ClassLiteral::Property::SETTER: {
         Node* attr = jsgraph()->Constant(DONT_ENUM);
         const Operator* op = javascript()->CallRuntime(
             Runtime::kDefineSetterPropertyUnchecked, 4);
         NewNode(op, receiver, key, value, attr);
         break;
       }
+      case ClassLiteral::Property::FIELD: {
+        UNREACHABLE();
+        break;
+      }
     }
   }
 
@@ -1945,8 +1964,8 @@
 
   // Create nodes to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < expr->values()->length(); array_index++) {
+  for (int array_index = 0; array_index < expr->values()->length();
+       array_index++) {
     Expression* subexpr = expr->values()->at(array_index);
     DCHECK(!subexpr->IsSpread());
     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
@@ -1961,26 +1980,6 @@
                       OutputFrameStateCombine::Ignore());
   }
 
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  for (; array_index < expr->values()->length(); array_index++) {
-    Expression* subexpr = expr->values()->at(array_index);
-    DCHECK(!subexpr->IsSpread());
-
-    VisitForValue(subexpr);
-    {
-      Node* value = environment()->Pop();
-      Node* array = environment()->Pop();
-      const Operator* op = javascript()->CallRuntime(Runtime::kAppendElement);
-      Node* result = NewNode(op, array, value);
-      PrepareFrameState(result, expr->GetIdForElement(array_index));
-      environment()->Push(result);
-    }
-  }
-
   ast_context()->ProduceValue(expr, environment()->Pop());
 }
 
@@ -2447,12 +2446,17 @@
   }
 
   // Create node to perform the function call.
+  float const frequency = ComputeCallFrequency(expr->CallFeedbackICSlot());
   VectorSlotPair feedback = CreateVectorSlotPair(expr->CallFeedbackICSlot());
-  const Operator* call = javascript()->CallFunction(
-      args->length() + 2, feedback, receiver_hint, expr->tail_call_mode());
+  const Operator* call =
+      javascript()->CallFunction(args->length() + 2, frequency, feedback,
+                                 receiver_hint, expr->tail_call_mode());
   PrepareEagerCheckpoint(possibly_eval ? expr->EvalId() : expr->CallId());
   Node* value = ProcessArguments(call, args->length() + 2);
-  environment()->Push(value->InputAt(0));  // The callee passed to the call.
+  // The callee passed to the call, we just need to push something here to
+  // satisfy the bailout location contract. The fullcodegen code will not
+  // ever look at this value, so we just push optimized_out here.
+  environment()->Push(jsgraph()->OptimizedOutConstant());
   PrepareFrameState(value, expr->ReturnId(), OutputFrameStateCombine::Push());
   environment()->Drop(1);
   ast_context()->ProduceValue(expr, value);
@@ -2480,7 +2484,7 @@
 
   // Create node to perform the super call.
   const Operator* call =
-      javascript()->CallConstruct(args->length() + 2, VectorSlotPair());
+      javascript()->CallConstruct(args->length() + 2, 0.0f, VectorSlotPair());
   Node* value = ProcessArguments(call, args->length() + 2);
   PrepareFrameState(value, expr->ReturnId(), OutputFrameStateCombine::Push());
   ast_context()->ProduceValue(expr, value);
@@ -2498,9 +2502,10 @@
   environment()->Push(environment()->Peek(args->length()));
 
   // Create node to perform the construct call.
+  float const frequency = ComputeCallFrequency(expr->CallNewFeedbackSlot());
   VectorSlotPair feedback = CreateVectorSlotPair(expr->CallNewFeedbackSlot());
   const Operator* call =
-      javascript()->CallConstruct(args->length() + 2, feedback);
+      javascript()->CallConstruct(args->length() + 2, frequency, feedback);
   Node* value = ProcessArguments(call, args->length() + 2);
   PrepareFrameState(value, expr->ReturnId(), OutputFrameStateCombine::Push());
   ast_context()->ProduceValue(expr, value);
@@ -3086,7 +3091,7 @@
   DCHECK_EQ(DYNAMIC_GLOBAL, variable->mode());
   uint32_t check_depths = 0;
   for (Scope* s = current_scope(); s != nullptr; s = s->outer_scope()) {
-    if (s->num_heap_slots() <= 0) continue;
+    if (!s->NeedsContext()) continue;
     if (!s->calls_sloppy_eval()) continue;
     int depth = current_scope()->ContextChainLength(s);
     if (depth > kMaxCheckDepth) return kFullCheckRequired;
@@ -3100,7 +3105,7 @@
   DCHECK_EQ(DYNAMIC_LOCAL, variable->mode());
   uint32_t check_depths = 0;
   for (Scope* s = current_scope(); s != nullptr; s = s->outer_scope()) {
-    if (s->num_heap_slots() <= 0) continue;
+    if (!s->NeedsContext()) continue;
     if (!s->calls_sloppy_eval() && s != variable->scope()) continue;
     int depth = current_scope()->ContextChainLength(s);
     if (depth > kMaxCheckDepth) return kFullCheckRequired;
@@ -3110,6 +3115,13 @@
   return check_depths;
 }
 
+float AstGraphBuilder::ComputeCallFrequency(FeedbackVectorSlot slot) const {
+  if (slot.IsInvalid()) return 0.0f;
+  Handle<TypeFeedbackVector> feedback_vector(
+      info()->closure()->feedback_vector(), isolate());
+  CallICNexus nexus(feedback_vector, slot);
+  return nexus.ComputeCallFrequency() * invocation_frequency_;
+}
 
 Node* AstGraphBuilder::ProcessArguments(const Operator* op, int arity) {
   DCHECK(environment()->stack_height() >= arity);
@@ -3171,7 +3183,7 @@
   DCHECK(scope->is_script_scope());
 
   // Allocate a new local context.
-  Handle<ScopeInfo> scope_info = scope->GetScopeInfo(isolate());
+  Handle<ScopeInfo> scope_info = scope->scope_info();
   const Operator* op = javascript()->CreateScriptContext(scope_info);
   Node* local_context = NewNode(op, GetFunctionClosure());
   PrepareFrameState(local_context, BailoutId::ScriptContext(),
@@ -3185,7 +3197,7 @@
   DCHECK(scope->is_block_scope());
 
   // Allocate a new local context.
-  Handle<ScopeInfo> scope_info = scope->GetScopeInfo(isolate());
+  Handle<ScopeInfo> scope_info = scope->scope_info();
   const Operator* op = javascript()->CreateBlockContext(scope_info);
   Node* local_context = NewNode(op, GetFunctionClosureForContext());
 
@@ -3213,8 +3225,7 @@
   return object;
 }
 
-
-Node* AstGraphBuilder::BuildRestArgumentsArray(Variable* rest, int index) {
+Node* AstGraphBuilder::BuildRestArgumentsArray(Variable* rest) {
   if (rest == nullptr) return nullptr;
 
   // Allocate and initialize a new arguments object.
@@ -3321,7 +3332,6 @@
                                          TypeofMode typeof_mode) {
   Node* the_hole = jsgraph()->TheHoleConstant();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       // Global var, const, or let variable.
       Handle<Name> name = variable->name();
@@ -3383,7 +3393,6 @@
                                            BailoutId bailout_id,
                                            OutputFrameStateCombine combine) {
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       // Global var, const, or let variable.
       Node* global = BuildLoadGlobalObject();
@@ -3422,7 +3431,6 @@
   Node* the_hole = jsgraph()->TheHoleConstant();
   VariableMode mode = variable->mode();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       // Global var, const, or let variable.
       Handle<Name> name = variable->name();
@@ -3433,15 +3441,7 @@
     case VariableLocation::PARAMETER:
     case VariableLocation::LOCAL:
       // Local var, const, or let variable.
-      if (mode == CONST_LEGACY && op != Token::INIT) {
-        // Non-initializing assignment to legacy const is
-        // - exception in strict mode.
-        // - ignored in sloppy mode.
-        if (is_strict(language_mode())) {
-          return BuildThrowConstAssignError(bailout_id);
-        }
-        return value;
-      } else if (mode == LET && op == Token::INIT) {
+      if (mode == LET && op == Token::INIT) {
         // No initialization check needed because scoping guarantees it. Note
         // that we still perform a lookup to keep the variable live, because
         // baseline code might contain debug code that inspects the variable.
@@ -3464,6 +3464,16 @@
         if (current->op() != the_hole->op() && variable->is_this()) {
           value = BuildHoleCheckElseThrow(current, variable, value, bailout_id);
         }
+      } else if (mode == CONST && op != Token::INIT &&
+                 variable->is_sloppy_function_name()) {
+        // Non-initializing assignment to sloppy function names is
+        // - exception in strict mode.
+        // - ignored in sloppy mode.
+        DCHECK(!variable->binding_needs_init());
+        if (variable->throw_on_const_assignment(language_mode())) {
+          return BuildThrowConstAssignError(bailout_id);
+        }
+        return value;
       } else if (mode == CONST && op != Token::INIT) {
         if (variable->binding_needs_init()) {
           Node* current = environment()->Lookup(variable);
@@ -3481,16 +3491,7 @@
     case VariableLocation::CONTEXT: {
       // Context variable (potentially up the context chain).
       int depth = current_scope()->ContextChainLength(variable->scope());
-      if (mode == CONST_LEGACY && op != Token::INIT) {
-        // Non-initializing assignment to legacy const is
-        // - exception in strict mode.
-        // - ignored in sloppy mode.
-        if (is_strict(language_mode())) {
-          return BuildThrowConstAssignError(bailout_id);
-        }
-        return value;
-      } else if (mode == LET && op != Token::INIT &&
-                 variable->binding_needs_init()) {
+      if (mode == LET && op != Token::INIT && variable->binding_needs_init()) {
         // Perform an initialization check for let declared variables.
         const Operator* op =
             javascript()->LoadContext(depth, variable->index(), false);
@@ -3506,6 +3507,16 @@
           Node* current = NewNode(op, current_context());
           value = BuildHoleCheckElseThrow(current, variable, value, bailout_id);
         }
+      } else if (mode == CONST && op != Token::INIT &&
+                 variable->is_sloppy_function_name()) {
+        // Non-initializing assignment to sloppy function names is
+        // - exception in strict mode.
+        // - ignored in sloppy mode.
+        DCHECK(!variable->binding_needs_init());
+        if (variable->throw_on_const_assignment(language_mode())) {
+          return BuildThrowConstAssignError(bailout_id);
+        }
+        return value;
       } else if (mode == CONST && op != Token::INIT) {
         if (variable->binding_needs_init()) {
           const Operator* op =
@@ -3688,9 +3699,8 @@
   return object;
 }
 
-
 Node* AstGraphBuilder::BuildSetHomeObject(Node* value, Node* home_object,
-                                          ObjectLiteralProperty* property,
+                                          LiteralProperty* property,
                                           int slot_number) {
   Expression* expr = property->value();
   if (!FunctionLiteral::NeedsHomeObject(expr)) return value;
@@ -3989,8 +3999,8 @@
 
 bool AstGraphBuilder::CheckOsrEntry(IterationStatement* stmt) {
   if (info()->osr_ast_id() == stmt->OsrEntryId()) {
-    info()->set_osr_expr_stack_height(std::max(
-        environment()->stack_height(), info()->osr_expr_stack_height()));
+    DCHECK_EQ(-1, info()->osr_expr_stack_height());
+    info()->set_osr_expr_stack_height(environment()->stack_height());
     return true;
   }
   return false;
@@ -4183,9 +4193,39 @@
   }
 }
 
+void AstGraphBuilder::Environment::PrepareForOsrEntry() {
+  int size = static_cast<int>(values()->size());
+  Graph* graph = builder_->graph();
 
-void AstGraphBuilder::Environment::PrepareForLoop(BitVector* assigned,
-                                                  bool is_osr) {
+  // Set the control and effect to the OSR loop entry.
+  Node* osr_loop_entry = graph->NewNode(builder_->common()->OsrLoopEntry(),
+                                        graph->start(), graph->start());
+  UpdateControlDependency(osr_loop_entry);
+  UpdateEffectDependency(osr_loop_entry);
+  // Set OSR values.
+  for (int i = 0; i < size; ++i) {
+    values()->at(i) =
+        graph->NewNode(builder_->common()->OsrValue(i), osr_loop_entry);
+  }
+
+  // Set the contexts.
+  // The innermost context is the OSR value, and the outer contexts are
+  // reconstructed by dynamically walking up the context chain.
+  Node* osr_context = nullptr;
+  const Operator* op =
+      builder_->javascript()->LoadContext(0, Context::PREVIOUS_INDEX, true);
+  const Operator* op_inner =
+      builder_->common()->OsrValue(Linkage::kOsrContextSpillSlotIndex);
+  int last = static_cast<int>(contexts()->size() - 1);
+  for (int i = last; i >= 0; i--) {
+    osr_context = (i == last) ? graph->NewNode(op_inner, osr_loop_entry)
+                              : graph->NewNode(op, osr_context, osr_context,
+                                               osr_loop_entry);
+    contexts()->at(i) = osr_context;
+  }
+}
+
+void AstGraphBuilder::Environment::PrepareForLoop(BitVector* assigned) {
   int size = static_cast<int>(values()->size());
 
   Node* control = builder_->NewLoop();
@@ -4220,40 +4260,6 @@
       contexts()->at(i) = builder_->NewPhi(1, context, control);
     }
   }
-
-  if (is_osr) {
-    // Merge OSR values as inputs to the phis of the loop.
-    Graph* graph = builder_->graph();
-    Node* osr_loop_entry = builder_->graph()->NewNode(
-        builder_->common()->OsrLoopEntry(), graph->start(), graph->start());
-
-    builder_->MergeControl(control, osr_loop_entry);
-    builder_->MergeEffect(effect, osr_loop_entry, control);
-
-    for (int i = 0; i < size; ++i) {
-      Node* value = values()->at(i);
-      Node* osr_value =
-          graph->NewNode(builder_->common()->OsrValue(i), osr_loop_entry);
-      values()->at(i) = builder_->MergeValue(value, osr_value, control);
-    }
-
-    // Rename all the contexts in the environment.
-    // The innermost context is the OSR value, and the outer contexts are
-    // reconstructed by dynamically walking up the context chain.
-    Node* osr_context = nullptr;
-    const Operator* op =
-        builder_->javascript()->LoadContext(0, Context::PREVIOUS_INDEX, true);
-    const Operator* op_inner =
-        builder_->common()->OsrValue(Linkage::kOsrContextSpillSlotIndex);
-    int last = static_cast<int>(contexts()->size() - 1);
-    for (int i = last; i >= 0; i--) {
-      Node* context = contexts()->at(i);
-      osr_context = (i == last) ? graph->NewNode(op_inner, osr_loop_entry)
-                                : graph->NewNode(op, osr_context, osr_context,
-                                                 osr_loop_entry);
-      contexts()->at(i) = builder_->MergeValue(context, osr_context, control);
-    }
-  }
 }
 
 
diff --git a/src/compiler/ast-graph-builder.h b/src/compiler/ast-graph-builder.h
index bd307ba..27f2c9b 100644
--- a/src/compiler/ast-graph-builder.h
+++ b/src/compiler/ast-graph-builder.h
@@ -37,6 +37,7 @@
 class AstGraphBuilder : public AstVisitor<AstGraphBuilder> {
  public:
   AstGraphBuilder(Zone* local_zone, CompilationInfo* info, JSGraph* jsgraph,
+                  float invocation_frequency,
                   LoopAssignmentAnalysis* loop_assignment = nullptr,
                   TypeHintAnalysis* type_hint_analysis = nullptr);
   virtual ~AstGraphBuilder() {}
@@ -80,6 +81,7 @@
   Zone* local_zone_;
   CompilationInfo* info_;
   JSGraph* jsgraph_;
+  float const invocation_frequency_;
   Environment* environment_;
   AstContext* ast_context_;
 
@@ -264,6 +266,9 @@
   uint32_t ComputeBitsetForDynamicGlobal(Variable* variable);
   uint32_t ComputeBitsetForDynamicContext(Variable* variable);
 
+  // Computes the frequency for JSCallFunction and JSCallConstruct nodes.
+  float ComputeCallFrequency(FeedbackVectorSlot slot) const;
+
   // ===========================================================================
   // The following build methods all generate graph fragments and return one
   // resulting node. The operand stack height remains the same, variables and
@@ -278,8 +283,8 @@
   // Builder to create an arguments object if it is used.
   Node* BuildArgumentsObject(Variable* arguments);
 
-  // Builder to create an array of rest parameters if used
-  Node* BuildRestArgumentsArray(Variable* rest, int index);
+  // Builder to create an array of rest parameters if used.
+  Node* BuildRestArgumentsArray(Variable* rest);
 
   // Builder that assigns to the {.this_function} internal variable if needed.
   Node* BuildThisFunctionVariable(Variable* this_function_var);
@@ -342,8 +347,7 @@
   // Builder for adding the [[HomeObject]] to a value if the value came from a
   // function literal and needs a home object. Do nothing otherwise.
   Node* BuildSetHomeObject(Node* value, Node* home_object,
-                           ObjectLiteralProperty* property,
-                           int slot_number = 0);
+                           LiteralProperty* property, int slot_number = 0);
 
   // Builders for error reporting at runtime.
   Node* BuildThrowError(Node* exception, BailoutId bailout_id);
@@ -575,6 +579,11 @@
   // Copies this environment at a loop header control-flow point.
   Environment* CopyForLoop(BitVector* assigned, bool is_osr = false);
 
+  // Copies this environment for Osr entry. This only produces environment
+  // of the right shape, the caller is responsible for filling in the right
+  // values and dependencies.
+  Environment* CopyForOsrEntry();
+
  private:
   AstGraphBuilder* builder_;
   int parameters_count_;
@@ -604,7 +613,8 @@
   bool IsLivenessBlockConsistent();
 
   // Prepare environment to be used as loop header.
-  void PrepareForLoop(BitVector* assigned, bool is_osr = false);
+  void PrepareForLoop(BitVector* assigned);
+  void PrepareForOsrEntry();
 };
 
 }  // namespace compiler
diff --git a/src/compiler/ast-loop-assignment-analyzer.cc b/src/compiler/ast-loop-assignment-analyzer.cc
index f1469f7..82eaeb2 100644
--- a/src/compiler/ast-loop-assignment-analyzer.cc
+++ b/src/compiler/ast-loop-assignment-analyzer.cc
@@ -3,8 +3,8 @@
 // found in the LICENSE file.
 
 #include "src/compiler/ast-loop-assignment-analyzer.h"
-#include "src/compiler.h"
-#include "src/parsing/parser.h"
+#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 
 namespace v8 {
 namespace internal {
@@ -122,7 +122,7 @@
 void ALAA::VisitClassLiteral(ClassLiteral* e) {
   VisitIfNotNull(e->extends());
   VisitIfNotNull(e->constructor());
-  ZoneList<ObjectLiteralProperty*>* properties = e->properties();
+  ZoneList<ClassLiteralProperty*>* properties = e->properties();
   for (int i = 0; i < properties->length(); i++) {
     Visit(properties->at(i)->key());
     Visit(properties->at(i)->value());
diff --git a/src/compiler/ast-loop-assignment-analyzer.h b/src/compiler/ast-loop-assignment-analyzer.h
index 0893fd1..44ad7be 100644
--- a/src/compiler/ast-loop-assignment-analyzer.h
+++ b/src/compiler/ast-loop-assignment-analyzer.h
@@ -7,7 +7,7 @@
 
 #include "src/ast/ast.h"
 #include "src/bit-vector.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/basic-block-instrumentor.cc b/src/compiler/basic-block-instrumentor.cc
index a966a5b..40f0a29 100644
--- a/src/compiler/basic-block-instrumentor.cc
+++ b/src/compiler/basic-block-instrumentor.cc
@@ -6,13 +6,14 @@
 
 #include <sstream>
 
-#include "src/compiler.h"
+#include "src/compilation-info.h"
 #include "src/compiler/common-operator.h"
 #include "src/compiler/graph.h"
 #include "src/compiler/machine-operator.h"
 #include "src/compiler/node.h"
 #include "src/compiler/operator-properties.h"
 #include "src/compiler/schedule.h"
+#include "src/objects-inl.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/bytecode-graph-builder.cc b/src/compiler/bytecode-graph-builder.cc
index a17947a..d26ff93 100644
--- a/src/compiler/bytecode-graph-builder.cc
+++ b/src/compiler/bytecode-graph-builder.cc
@@ -4,10 +4,14 @@
 
 #include "src/compiler/bytecode-graph-builder.h"
 
+#include "src/ast/ast.h"
+#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/bytecode-branch-analysis.h"
 #include "src/compiler/linkage.h"
 #include "src/compiler/operator-properties.h"
 #include "src/interpreter/bytecodes.h"
+#include "src/objects-inl.h"
 
 namespace v8 {
 namespace internal {
@@ -26,6 +30,7 @@
 
   Node* LookupAccumulator() const;
   Node* LookupRegister(interpreter::Register the_register) const;
+  void MarkAllRegistersLive();
 
   void BindAccumulator(Node* node, FrameStateBeforeAndAfter* states = nullptr);
   void BindRegister(interpreter::Register the_register, Node* node,
@@ -42,7 +47,8 @@
 
   // Preserve a checkpoint of the environment for the IR graph. Any
   // further mutation of the environment will not affect checkpoints.
-  Node* Checkpoint(BailoutId bytecode_offset, OutputFrameStateCombine combine);
+  Node* Checkpoint(BailoutId bytecode_offset, OutputFrameStateCombine combine,
+                   bool owner_has_exception);
 
   // Returns true if the state values are up to date with the current
   // environment.
@@ -57,27 +63,36 @@
   Node* Context() const { return context_; }
   void SetContext(Node* new_context) { context_ = new_context; }
 
-  Environment* CopyForConditional() const;
+  Environment* CopyForConditional();
   Environment* CopyForLoop();
+  Environment* CopyForOsrEntry();
   void Merge(Environment* other);
-  void PrepareForOsr();
+  void PrepareForOsrEntry();
 
   void PrepareForLoopExit(Node* loop);
 
  private:
-  explicit Environment(const Environment* copy);
+  Environment(const Environment* copy, LivenessAnalyzerBlock* liveness_block);
   void PrepareForLoop();
+
+  enum { kNotCached, kCached };
+
   bool StateValuesAreUpToDate(Node** state_values, int offset, int count,
-                              int output_poke_start, int output_poke_end);
+                              int output_poke_start, int output_poke_end,
+                              int cached = kNotCached);
   bool StateValuesRequireUpdate(Node** state_values, int offset, int count);
   void UpdateStateValues(Node** state_values, int offset, int count);
+  void UpdateStateValuesWithCache(Node** state_values, int offset, int count);
 
   int RegisterToValuesIndex(interpreter::Register the_register) const;
 
+  bool IsLivenessBlockConsistent() const;
+
   Zone* zone() const { return builder_->local_zone(); }
   Graph* graph() const { return builder_->graph(); }
   CommonOperatorBuilder* common() const { return builder_->common(); }
   BytecodeGraphBuilder* builder() const { return builder_; }
+  LivenessAnalyzerBlock* liveness_block() const { return liveness_block_; }
   const NodeVector* values() const { return &values_; }
   NodeVector* values() { return &values_; }
   int register_base() const { return register_base_; }
@@ -86,6 +101,7 @@
   BytecodeGraphBuilder* builder_;
   int register_count_;
   int parameter_count_;
+  LivenessAnalyzerBlock* liveness_block_;
   Node* context_;
   Node* control_dependency_;
   Node* effect_dependency_;
@@ -109,7 +125,7 @@
         output_poke_count_(0) {
     BailoutId id_before(builder->bytecode_iterator().current_offset());
     frame_state_before_ = builder_->environment()->Checkpoint(
-        id_before, OutputFrameStateCombine::Ignore());
+        id_before, OutputFrameStateCombine::Ignore(), false);
     id_after_ = BailoutId(id_before.ToInt() +
                           builder->bytecode_iterator().current_bytecode_size());
     // Create an explicit checkpoint node for before the operation.
@@ -136,8 +152,9 @@
       // Add the frame state for after the operation.
       DCHECK_EQ(IrOpcode::kDead,
                 NodeProperties::GetFrameStateInput(node)->opcode());
-      Node* frame_state_after =
-          builder_->environment()->Checkpoint(id_after_, combine);
+      bool has_exception = NodeProperties::IsExceptionalCall(node);
+      Node* frame_state_after = builder_->environment()->Checkpoint(
+          id_after_, combine, has_exception);
       NodeProperties::ReplaceFrameStateInput(node, frame_state_after);
     }
 
@@ -171,6 +188,9 @@
     : builder_(builder),
       register_count_(register_count),
       parameter_count_(parameter_count),
+      liveness_block_(builder->is_liveness_analysis_enabled_
+                          ? builder_->liveness_analyzer()->NewBlock()
+                          : nullptr),
       context_(context),
       control_dependency_(control_dependency),
       effect_dependency_(control_dependency),
@@ -204,12 +224,13 @@
   values()->push_back(undefined_constant);
 }
 
-
 BytecodeGraphBuilder::Environment::Environment(
-    const BytecodeGraphBuilder::Environment* other)
+    const BytecodeGraphBuilder::Environment* other,
+    LivenessAnalyzerBlock* liveness_block)
     : builder_(other->builder_),
       register_count_(other->register_count_),
       parameter_count_(other->parameter_count_),
+      liveness_block_(liveness_block),
       context_(other->context_),
       control_dependency_(other->control_dependency_),
       effect_dependency_(other->effect_dependency_),
@@ -232,6 +253,10 @@
   }
 }
 
+bool BytecodeGraphBuilder::Environment::IsLivenessBlockConsistent() const {
+  return !builder_->IsLivenessAnalysisEnabled() ==
+         (liveness_block() == nullptr);
+}
 
 Node* BytecodeGraphBuilder::Environment::LookupAccumulator() const {
   return values()->at(accumulator_base_);
@@ -248,10 +273,22 @@
     return builder()->GetNewTarget();
   } else {
     int values_index = RegisterToValuesIndex(the_register);
+    if (liveness_block() != nullptr && !the_register.is_parameter()) {
+      DCHECK(IsLivenessBlockConsistent());
+      liveness_block()->Lookup(the_register.index());
+    }
     return values()->at(values_index);
   }
 }
 
+void BytecodeGraphBuilder::Environment::MarkAllRegistersLive() {
+  DCHECK(IsLivenessBlockConsistent());
+  if (liveness_block() != nullptr) {
+    for (int i = 0; i < register_count(); ++i) {
+      liveness_block()->Lookup(i);
+    }
+  }
+}
 
 void BytecodeGraphBuilder::Environment::BindAccumulator(
     Node* node, FrameStateBeforeAndAfter* states) {
@@ -271,6 +308,10 @@
                                                             values_index));
   }
   values()->at(values_index) = node;
+  if (liveness_block() != nullptr && !the_register.is_parameter()) {
+    DCHECK(IsLivenessBlockConsistent());
+    liveness_block()->Bind(the_register.index());
+  }
 }
 
 
@@ -298,18 +339,41 @@
 BytecodeGraphBuilder::Environment*
 BytecodeGraphBuilder::Environment::CopyForLoop() {
   PrepareForLoop();
-  return new (zone()) Environment(this);
+  if (liveness_block() != nullptr) {
+    // Finish the current block before copying.
+    liveness_block_ = builder_->liveness_analyzer()->NewBlock(liveness_block());
+  }
+  return new (zone()) Environment(this, liveness_block());
 }
 
+BytecodeGraphBuilder::Environment*
+BytecodeGraphBuilder::Environment::CopyForOsrEntry() {
+  return new (zone())
+      Environment(this, builder_->liveness_analyzer()->NewBlock());
+}
 
 BytecodeGraphBuilder::Environment*
-BytecodeGraphBuilder::Environment::CopyForConditional() const {
-  return new (zone()) Environment(this);
+BytecodeGraphBuilder::Environment::CopyForConditional() {
+  LivenessAnalyzerBlock* copy_liveness_block = nullptr;
+  if (liveness_block() != nullptr) {
+    copy_liveness_block =
+        builder_->liveness_analyzer()->NewBlock(liveness_block());
+    liveness_block_ = builder_->liveness_analyzer()->NewBlock(liveness_block());
+  }
+  return new (zone()) Environment(this, copy_liveness_block);
 }
 
 
 void BytecodeGraphBuilder::Environment::Merge(
     BytecodeGraphBuilder::Environment* other) {
+  if (builder_->is_liveness_analysis_enabled_) {
+    if (GetControlDependency()->opcode() != IrOpcode::kLoop) {
+      liveness_block_ =
+          builder()->liveness_analyzer()->NewBlock(liveness_block());
+    }
+    liveness_block()->AddPredecessor(other->liveness_block());
+  }
+
   // Create a merge of the control dependencies of both environments and update
   // the current environment's control dependency accordingly.
   Node* control = builder()->MergeControl(GetControlDependency(),
@@ -352,34 +416,27 @@
   builder()->exit_controls_.push_back(terminate);
 }
 
-void BytecodeGraphBuilder::Environment::PrepareForOsr() {
+void BytecodeGraphBuilder::Environment::PrepareForOsrEntry() {
   DCHECK_EQ(IrOpcode::kLoop, GetControlDependency()->opcode());
   DCHECK_EQ(1, GetControlDependency()->InputCount());
+
   Node* start = graph()->start();
 
-  // Create a control node for the OSR entry point and merge it into the loop
-  // header. Update the current environment's control dependency accordingly.
+  // Create a control node for the OSR entry point and update the current
+  // environment's dependencies accordingly.
   Node* entry = graph()->NewNode(common()->OsrLoopEntry(), start, start);
-  Node* control = builder()->MergeControl(GetControlDependency(), entry);
-  UpdateControlDependency(control);
+  UpdateControlDependency(entry);
+  UpdateEffectDependency(entry);
 
-  // Create a merge of the effect from the OSR entry and the existing effect
-  // dependency. Update the current environment's effect dependency accordingly.
-  Node* effect = builder()->MergeEffect(GetEffectDependency(), entry, control);
-  UpdateEffectDependency(effect);
-
-  // Rename all values in the environment which will extend or introduce Phi
-  // nodes to contain the OSR values available at the entry point.
-  Node* osr_context = graph()->NewNode(
-      common()->OsrValue(Linkage::kOsrContextSpillSlotIndex), entry);
-  context_ = builder()->MergeValue(context_, osr_context, control);
+  // Create OSR values for each environment value.
+  SetContext(graph()->NewNode(
+      common()->OsrValue(Linkage::kOsrContextSpillSlotIndex), entry));
   int size = static_cast<int>(values()->size());
   for (int i = 0; i < size; i++) {
     int idx = i;  // Indexing scheme follows {StandardFrame}, adapt accordingly.
     if (i >= register_base()) idx += InterpreterFrameConstants::kExtraSlotCount;
     if (i >= accumulator_base()) idx = Linkage::kOsrAccumulatorRegisterIndex;
-    Node* osr_value = graph()->NewNode(common()->OsrValue(idx), entry);
-    values_[i] = builder()->MergeValue(values_[i], osr_value, control);
+    values()->at(i) = graph()->NewNode(common()->OsrValue(idx), entry);
   }
 }
 
@@ -434,13 +491,19 @@
   }
 }
 
+void BytecodeGraphBuilder::Environment::UpdateStateValuesWithCache(
+    Node** state_values, int offset, int count) {
+  Node** env_values = (count == 0) ? nullptr : &values()->at(offset);
+  *state_values = builder_->state_values_cache_.GetNodeForValues(
+      env_values, static_cast<size_t>(count));
+}
 
 Node* BytecodeGraphBuilder::Environment::Checkpoint(
-    BailoutId bailout_id, OutputFrameStateCombine combine) {
-  // TODO(rmcilroy): Consider using StateValuesCache for some state values.
+    BailoutId bailout_id, OutputFrameStateCombine combine,
+    bool owner_has_exception) {
   UpdateStateValues(&parameters_state_values_, 0, parameter_count());
-  UpdateStateValues(&registers_state_values_, register_base(),
-                    register_count());
+  UpdateStateValuesWithCache(&registers_state_values_, register_base(),
+                             register_count());
   UpdateStateValues(&accumulator_state_values_, accumulator_base(), 1);
 
   const Operator* op = common()->FrameState(
@@ -450,20 +513,43 @@
       accumulator_state_values_, Context(), builder()->GetFunctionClosure(),
       builder()->graph()->start());
 
+  if (liveness_block() != nullptr) {
+    // If the owning node has an exception, register the checkpoint to the
+    // predecessor so that the checkpoint is used for both the normal and the
+    // exceptional paths. Yes, this is a terrible hack and we might want
+    // to use an explicit frame state for the exceptional path.
+    if (owner_has_exception) {
+      liveness_block()->GetPredecessor()->Checkpoint(result);
+    } else {
+      liveness_block()->Checkpoint(result);
+    }
+  }
+
   return result;
 }
 
-
 bool BytecodeGraphBuilder::Environment::StateValuesAreUpToDate(
     Node** state_values, int offset, int count, int output_poke_start,
-    int output_poke_end) {
+    int output_poke_end, int cached) {
   DCHECK_LE(static_cast<size_t>(offset + count), values()->size());
-  for (int i = 0; i < count; i++, offset++) {
-    if (offset < output_poke_start || offset >= output_poke_end) {
-      if ((*state_values)->InputAt(i) != values()->at(offset)) {
-        return false;
+  if (cached == kNotCached) {
+    for (int i = 0; i < count; i++, offset++) {
+      if (offset < output_poke_start || offset >= output_poke_end) {
+        if ((*state_values)->InputAt(i) != values()->at(offset)) {
+          return false;
+        }
       }
     }
+  } else {
+    for (StateValuesAccess::TypedNode state_value :
+         StateValuesAccess(*state_values)) {
+      if (offset < output_poke_start || offset >= output_poke_end) {
+        if (state_value.node != values()->at(offset)) {
+          return false;
+        }
+      }
+      ++offset;
+    }
   }
   return true;
 }
@@ -478,16 +564,18 @@
                                 output_poke_start, output_poke_end) &&
          StateValuesAreUpToDate(&registers_state_values_, register_base(),
                                 register_count(), output_poke_start,
-                                output_poke_end) &&
+                                output_poke_end, kCached) &&
          StateValuesAreUpToDate(&accumulator_state_values_, accumulator_base(),
                                 1, output_poke_start, output_poke_end);
 }
 
 BytecodeGraphBuilder::BytecodeGraphBuilder(Zone* local_zone,
                                            CompilationInfo* info,
-                                           JSGraph* jsgraph)
+                                           JSGraph* jsgraph,
+                                           float invocation_frequency)
     : local_zone_(local_zone),
       jsgraph_(jsgraph),
+      invocation_frequency_(invocation_frequency),
       bytecode_array_(handle(info->shared_info()->bytecode_array())),
       exception_handler_table_(
           handle(HandlerTable::cast(bytecode_array()->handler_table()))),
@@ -502,7 +590,13 @@
       current_exception_handler_(0),
       input_buffer_size_(0),
       input_buffer_(nullptr),
-      exit_controls_(local_zone) {}
+      exit_controls_(local_zone),
+      is_liveness_analysis_enabled_(FLAG_analyze_environment_liveness &&
+                                    info->is_deoptimization_enabled()),
+      state_values_cache_(jsgraph),
+      liveness_analyzer_(
+          static_cast<size_t>(bytecode_array()->register_count()), local_zone) {
+}
 
 Node* BytecodeGraphBuilder::GetNewTarget() {
   if (!new_target_.is_set()) {
@@ -556,10 +650,6 @@
 }
 
 bool BytecodeGraphBuilder::CreateGraph() {
-  // Set up the basic structure of the graph. Outputs for {Start} are
-  // the formal parameters (including the receiver) plus context and
-  // closure.
-
   // Set up the basic structure of the graph. Outputs for {Start} are the formal
   // parameters (including the receiver) plus new target, number of arguments,
   // context and closure.
@@ -571,10 +661,6 @@
                   GetFunctionContext());
   set_environment(&env);
 
-  // For OSR add an {OsrNormalEntry} as the start of the top-level environment.
-  // It will be replaced with {Dead} after typing and optimizations.
-  if (!osr_ast_id_.IsNone()) NewNode(common()->OsrNormalEntry());
-
   VisitBytecodes();
 
   // Finish the basic structure of the graph.
@@ -584,9 +670,25 @@
   Node* end = graph()->NewNode(common()->End(input_count), input_count, inputs);
   graph()->SetEnd(end);
 
+  ClearNonLiveSlotsInFrameStates();
+
   return true;
 }
 
+void BytecodeGraphBuilder::ClearNonLiveSlotsInFrameStates() {
+  if (!IsLivenessAnalysisEnabled()) {
+    return;
+  }
+  NonLiveFrameStateSlotReplacer replacer(
+      &state_values_cache_, jsgraph()->OptimizedOutConstant(),
+      liveness_analyzer()->local_count(), local_zone());
+  liveness_analyzer()->Run(&replacer);
+  if (FLAG_trace_environment_liveness) {
+    OFStream os(stdout);
+    liveness_analyzer()->Print(os);
+  }
+}
+
 void BytecodeGraphBuilder::VisitBytecodes() {
   BytecodeBranchAnalysis analysis(bytecode_array(), local_zone());
   BytecodeLoopAnalysis loop_analysis(bytecode_array(), &analysis, local_zone());
@@ -596,12 +698,14 @@
   set_loop_analysis(&loop_analysis);
   interpreter::BytecodeArrayIterator iterator(bytecode_array());
   set_bytecode_iterator(&iterator);
+  BuildOSRNormalEntryPoint();
   while (!iterator.done()) {
     int current_offset = iterator.current_offset();
     EnterAndExitExceptionHandlers(current_offset);
     SwitchToMergeEnvironment(current_offset);
     if (environment() != nullptr) {
       BuildLoopHeaderEnvironment(current_offset);
+      BuildOSRLoopEntryPoint(current_offset);
 
       switch (iterator.current_bytecode()) {
 #define BYTECODE_CASE(name, ...)       \
@@ -682,9 +786,9 @@
   environment()->BindRegister(bytecode_iterator().GetRegisterOperand(1), value);
 }
 
-Node* BytecodeGraphBuilder::BuildLoadGlobal(TypeofMode typeof_mode) {
-  VectorSlotPair feedback =
-      CreateVectorSlotPair(bytecode_iterator().GetIndexOperand(0));
+Node* BytecodeGraphBuilder::BuildLoadGlobal(uint32_t feedback_slot_index,
+                                            TypeofMode typeof_mode) {
+  VectorSlotPair feedback = CreateVectorSlotPair(feedback_slot_index);
   DCHECK_EQ(FeedbackVectorSlotKind::LOAD_GLOBAL_IC,
             feedback_vector()->GetKind(feedback.slot()));
   Handle<Name> name(feedback_vector()->GetName(feedback.slot()));
@@ -694,20 +798,23 @@
 
 void BytecodeGraphBuilder::VisitLdaGlobal() {
   FrameStateBeforeAndAfter states(this);
-  Node* node = BuildLoadGlobal(TypeofMode::NOT_INSIDE_TYPEOF);
+  Node* node = BuildLoadGlobal(bytecode_iterator().GetIndexOperand(0),
+                               TypeofMode::NOT_INSIDE_TYPEOF);
   environment()->BindAccumulator(node, &states);
 }
 
 void BytecodeGraphBuilder::VisitLdrGlobal() {
   FrameStateBeforeAndAfter states(this);
-  Node* node = BuildLoadGlobal(TypeofMode::NOT_INSIDE_TYPEOF);
+  Node* node = BuildLoadGlobal(bytecode_iterator().GetIndexOperand(0),
+                               TypeofMode::NOT_INSIDE_TYPEOF);
   environment()->BindRegister(bytecode_iterator().GetRegisterOperand(1), node,
                               &states);
 }
 
 void BytecodeGraphBuilder::VisitLdaGlobalInsideTypeof() {
   FrameStateBeforeAndAfter states(this);
-  Node* node = BuildLoadGlobal(TypeofMode::INSIDE_TYPEOF);
+  Node* node = BuildLoadGlobal(bytecode_iterator().GetIndexOperand(0),
+                               TypeofMode::INSIDE_TYPEOF);
   environment()->BindAccumulator(node, &states);
 }
 
@@ -733,14 +840,12 @@
 }
 
 Node* BytecodeGraphBuilder::BuildLoadContextSlot() {
-  // TODO(mythria): LoadContextSlots are unrolled by the required depth when
-  // generating bytecode. Hence the value of depth is always 0. Update this
-  // code, when the implementation changes.
   // TODO(mythria): immutable flag is also set to false. This information is not
   // available in bytecode array. update this code when the implementation
   // changes.
   const Operator* op = javascript()->LoadContext(
-      0, bytecode_iterator().GetIndexOperand(1), false);
+      bytecode_iterator().GetUnsignedImmediateOperand(2),
+      bytecode_iterator().GetIndexOperand(1), false);
   Node* context =
       environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
   return NewNode(op, context);
@@ -753,15 +858,13 @@
 
 void BytecodeGraphBuilder::VisitLdrContextSlot() {
   Node* node = BuildLoadContextSlot();
-  environment()->BindRegister(bytecode_iterator().GetRegisterOperand(2), node);
+  environment()->BindRegister(bytecode_iterator().GetRegisterOperand(3), node);
 }
 
 void BytecodeGraphBuilder::VisitStaContextSlot() {
-  // TODO(mythria): LoadContextSlots are unrolled by the required depth when
-  // generating bytecode. Hence the value of depth is always 0. Update this
-  // code, when the implementation changes.
-  const Operator* op =
-      javascript()->StoreContext(0, bytecode_iterator().GetIndexOperand(1));
+  const Operator* op = javascript()->StoreContext(
+      bytecode_iterator().GetUnsignedImmediateOperand(2),
+      bytecode_iterator().GetIndexOperand(1));
   Node* context =
       environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
   Node* value = environment()->LookupAccumulator();
@@ -788,6 +891,150 @@
   BuildLdaLookupSlot(TypeofMode::INSIDE_TYPEOF);
 }
 
+BytecodeGraphBuilder::Environment* BytecodeGraphBuilder::CheckContextExtensions(
+    uint32_t depth) {
+  // Output environment where the context has an extension
+  Environment* slow_environment = nullptr;
+
+  // We only need to check up to the last-but-one depth, because the an eval in
+  // the same scope as the variable itself has no way of shadowing it.
+  for (uint32_t d = 0; d < depth; d++) {
+    Node* extension_slot =
+        NewNode(javascript()->LoadContext(d, Context::EXTENSION_INDEX, false),
+                environment()->Context());
+
+    Node* check_no_extension =
+        NewNode(javascript()->StrictEqual(CompareOperationHint::kAny),
+                extension_slot, jsgraph()->TheHoleConstant());
+
+    NewBranch(check_no_extension);
+    Environment* true_environment = environment()->CopyForConditional();
+
+    {
+      NewIfFalse();
+      // If there is an extension, merge into the slow path.
+      if (slow_environment == nullptr) {
+        slow_environment = environment();
+        NewMerge();
+      } else {
+        slow_environment->Merge(environment());
+      }
+    }
+
+    {
+      set_environment(true_environment);
+      NewIfTrue();
+      // Do nothing on if there is no extension, eventually falling through to
+      // the fast path.
+    }
+  }
+
+  // The depth can be zero, in which case no slow-path checks are built, and the
+  // slow path environment can be null.
+  DCHECK(depth == 0 || slow_environment != nullptr);
+
+  return slow_environment;
+}
+
+void BytecodeGraphBuilder::BuildLdaLookupContextSlot(TypeofMode typeof_mode) {
+  uint32_t depth = bytecode_iterator().GetUnsignedImmediateOperand(2);
+
+  // Check if any context in the depth has an extension.
+  Environment* slow_environment = CheckContextExtensions(depth);
+
+  // Fast path, do a context load.
+  {
+    uint32_t slot_index = bytecode_iterator().GetIndexOperand(1);
+
+    const Operator* op = javascript()->LoadContext(depth, slot_index, false);
+    Node* context = environment()->Context();
+    environment()->BindAccumulator(NewNode(op, context));
+  }
+
+  // Only build the slow path if there were any slow-path checks.
+  if (slow_environment != nullptr) {
+    // Add a merge to the fast environment.
+    NewMerge();
+    Environment* fast_environment = environment();
+
+    // Slow path, do a runtime load lookup.
+    set_environment(slow_environment);
+    {
+      FrameStateBeforeAndAfter states(this);
+
+      Node* name = jsgraph()->Constant(
+          bytecode_iterator().GetConstantForIndexOperand(0));
+
+      const Operator* op =
+          javascript()->CallRuntime(typeof_mode == TypeofMode::NOT_INSIDE_TYPEOF
+                                        ? Runtime::kLoadLookupSlot
+                                        : Runtime::kLoadLookupSlotInsideTypeof);
+      Node* value = NewNode(op, name);
+      environment()->BindAccumulator(value, &states);
+    }
+
+    fast_environment->Merge(environment());
+    set_environment(fast_environment);
+  }
+}
+
+void BytecodeGraphBuilder::VisitLdaLookupContextSlot() {
+  BuildLdaLookupContextSlot(TypeofMode::NOT_INSIDE_TYPEOF);
+}
+
+void BytecodeGraphBuilder::VisitLdaLookupContextSlotInsideTypeof() {
+  BuildLdaLookupContextSlot(TypeofMode::INSIDE_TYPEOF);
+}
+
+void BytecodeGraphBuilder::BuildLdaLookupGlobalSlot(TypeofMode typeof_mode) {
+  uint32_t depth = bytecode_iterator().GetUnsignedImmediateOperand(2);
+
+  // Check if any context in the depth has an extension.
+  Environment* slow_environment = CheckContextExtensions(depth);
+
+  // Fast path, do a global load.
+  {
+    FrameStateBeforeAndAfter states(this);
+    Node* node =
+        BuildLoadGlobal(bytecode_iterator().GetIndexOperand(1), typeof_mode);
+    environment()->BindAccumulator(node, &states);
+  }
+
+  // Only build the slow path if there were any slow-path checks.
+  if (slow_environment != nullptr) {
+    // Add a merge to the fast environment.
+    NewMerge();
+    Environment* fast_environment = environment();
+
+    // Slow path, do a runtime load lookup.
+    set_environment(slow_environment);
+    {
+      FrameStateBeforeAndAfter states(this);
+
+      Node* name = jsgraph()->Constant(
+          bytecode_iterator().GetConstantForIndexOperand(0));
+
+      const Operator* op =
+          javascript()->CallRuntime(typeof_mode == TypeofMode::NOT_INSIDE_TYPEOF
+                                        ? Runtime::kLoadLookupSlot
+                                        : Runtime::kLoadLookupSlotInsideTypeof);
+      Node* value = NewNode(op, name);
+      environment()->BindAccumulator(value, &states);
+    }
+
+    fast_environment->Merge(environment());
+    set_environment(fast_environment);
+  }
+}
+
+void BytecodeGraphBuilder::VisitLdaLookupGlobalSlot() {
+  BuildLdaLookupGlobalSlot(TypeofMode::NOT_INSIDE_TYPEOF);
+}
+
+void BytecodeGraphBuilder::VisitLdaLookupGlobalSlotInsideTypeof() {
+  BuildLdaLookupGlobalSlot(TypeofMode::INSIDE_TYPEOF);
+}
+
 void BytecodeGraphBuilder::BuildStaLookupSlot(LanguageMode language_mode) {
   FrameStateBeforeAndAfter states(this);
   Node* value = environment()->LookupAccumulator();
@@ -920,7 +1167,10 @@
   Handle<SharedFunctionInfo> shared_info = Handle<SharedFunctionInfo>::cast(
       bytecode_iterator().GetConstantForIndexOperand(0));
   PretenureFlag tenured =
-      bytecode_iterator().GetFlagOperand(1) ? TENURED : NOT_TENURED;
+      interpreter::CreateClosureFlags::PretenuredBit::decode(
+          bytecode_iterator().GetFlagOperand(1))
+          ? TENURED
+          : NOT_TENURED;
   const Operator* op = javascript()->CreateClosure(shared_info, tenured);
   Node* closure = NewNode(op);
   environment()->BindAccumulator(closure);
@@ -936,7 +1186,7 @@
 }
 
 void BytecodeGraphBuilder::VisitCreateFunctionContext() {
-  uint32_t slots = bytecode_iterator().GetIndexOperand(0);
+  uint32_t slots = bytecode_iterator().GetUnsignedImmediateOperand(0);
   const Operator* op = javascript()->CreateFunctionContext(slots);
   Node* context = NewNode(op, GetFunctionClosure());
   environment()->BindAccumulator(context);
@@ -947,9 +1197,11 @@
   Node* exception = environment()->LookupRegister(reg);
   Handle<String> name =
       Handle<String>::cast(bytecode_iterator().GetConstantForIndexOperand(1));
+  Handle<ScopeInfo> scope_info = Handle<ScopeInfo>::cast(
+      bytecode_iterator().GetConstantForIndexOperand(2));
   Node* closure = environment()->LookupAccumulator();
 
-  const Operator* op = javascript()->CreateCatchContext(name);
+  const Operator* op = javascript()->CreateCatchContext(name, scope_info);
   Node* context = NewNode(op, exception, closure);
   environment()->BindAccumulator(context);
 }
@@ -957,8 +1209,10 @@
 void BytecodeGraphBuilder::VisitCreateWithContext() {
   Node* object =
       environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
+  Handle<ScopeInfo> scope_info = Handle<ScopeInfo>::cast(
+      bytecode_iterator().GetConstantForIndexOperand(1));
 
-  const Operator* op = javascript()->CreateWithContext();
+  const Operator* op = javascript()->CreateWithContext(scope_info);
   Node* context = NewNode(op, object, environment()->LookupAccumulator());
   environment()->BindAccumulator(context);
 }
@@ -1003,6 +1257,11 @@
       bytecode_iterator().GetConstantForIndexOperand(0));
   int literal_index = bytecode_iterator().GetIndexOperand(1);
   int literal_flags = bytecode_iterator().GetFlagOperand(2);
+  // Disable allocation site mementos. Only unoptimized code will collect
+  // feedback about allocation site. Once the code is optimized we expect the
+  // data to converge. So, we disable allocation site mementos in optimized
+  // code. We can revisit this when we have data to the contrary.
+  literal_flags |= ArrayLiteral::kDisableMementos;
   int number_of_elements = constant_elements->length();
   const Operator* op = javascript()->CreateLiteralArray(
       constant_elements, literal_flags, literal_index, number_of_elements);
@@ -1054,11 +1313,12 @@
   // Slot index of 0 is used indicate no feedback slot is available. Assert
   // the assumption that slot index 0 is never a valid feedback slot.
   STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
-  VectorSlotPair feedback =
-      CreateVectorSlotPair(bytecode_iterator().GetIndexOperand(3));
+  int const slot_id = bytecode_iterator().GetIndexOperand(3);
+  VectorSlotPair feedback = CreateVectorSlotPair(slot_id);
 
+  float const frequency = ComputeCallFrequency(slot_id);
   const Operator* call = javascript()->CallFunction(
-      arg_count + 1, feedback, receiver_hint, tail_call_mode);
+      arg_count + 1, frequency, feedback, receiver_hint, tail_call_mode);
   Node* value = ProcessCallArguments(call, callee, receiver, arg_count + 1);
   environment()->BindAccumulator(value, &states);
 }
@@ -1142,13 +1402,13 @@
     const Operator* call_new_op, Node* callee, Node* new_target,
     interpreter::Register first_arg, size_t arity) {
   Node** all = local_zone()->NewArray<Node*>(arity);
-  all[0] = new_target;
+  all[0] = callee;
   int first_arg_index = first_arg.index();
   for (int i = 1; i < static_cast<int>(arity) - 1; ++i) {
     all[i] = environment()->LookupRegister(
         interpreter::Register(first_arg_index + i - 1));
   }
-  all[arity - 1] = callee;
+  all[arity - 1] = new_target;
   Node* value = MakeNode(call_new_op, static_cast<int>(arity), all, false);
   return value;
 }
@@ -1158,12 +1418,18 @@
   interpreter::Register callee_reg = bytecode_iterator().GetRegisterOperand(0);
   interpreter::Register first_arg = bytecode_iterator().GetRegisterOperand(1);
   size_t arg_count = bytecode_iterator().GetRegisterCountOperand(2);
+  // Slot index of 0 is used indicate no feedback slot is available. Assert
+  // the assumption that slot index 0 is never a valid feedback slot.
+  STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
+  int const slot_id = bytecode_iterator().GetIndexOperand(3);
+  VectorSlotPair feedback = CreateVectorSlotPair(slot_id);
 
   Node* new_target = environment()->LookupAccumulator();
   Node* callee = environment()->LookupRegister(callee_reg);
-  // TODO(turbofan): Pass the feedback here.
+
+  float const frequency = ComputeCallFrequency(slot_id);
   const Operator* call = javascript()->CallConstruct(
-      static_cast<int>(arg_count) + 2, VectorSlotPair());
+      static_cast<int>(arg_count) + 2, frequency, feedback);
   Node* value = ProcessCallNewArguments(call, callee, new_target, first_arg,
                                         arg_count + 2);
   environment()->BindAccumulator(value, &states);
@@ -1207,13 +1473,33 @@
     int operand_index) {
   FeedbackVectorSlot slot = feedback_vector()->ToSlot(
       bytecode_iterator().GetIndexOperand(operand_index));
-  DCHECK_EQ(FeedbackVectorSlotKind::GENERAL, feedback_vector()->GetKind(slot));
-  Object* feedback = feedback_vector()->Get(slot);
-  BinaryOperationHint hint = BinaryOperationHint::kAny;
-  if (feedback->IsSmi()) {
-    hint = BinaryOperationHintFromFeedback((Smi::cast(feedback))->value());
+  DCHECK_EQ(FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC,
+            feedback_vector()->GetKind(slot));
+  BinaryOpICNexus nexus(feedback_vector(), slot);
+  return nexus.GetBinaryOperationFeedback();
+}
+
+// Helper function to create compare operation hint from the recorded type
+// feedback.
+CompareOperationHint BytecodeGraphBuilder::GetCompareOperationHint() {
+  int slot_index = bytecode_iterator().GetIndexOperand(1);
+  if (slot_index == 0) {
+    return CompareOperationHint::kAny;
   }
-  return hint;
+  FeedbackVectorSlot slot =
+      feedback_vector()->ToSlot(bytecode_iterator().GetIndexOperand(1));
+  DCHECK_EQ(FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC,
+            feedback_vector()->GetKind(slot));
+  CompareICNexus nexus(feedback_vector(), slot);
+  return nexus.GetCompareOperationFeedback();
+}
+
+float BytecodeGraphBuilder::ComputeCallFrequency(int slot_id) const {
+  if (slot_id >= TypeFeedbackVector::kReservedIndexCount) {
+    CallICNexus nexus(feedback_vector(), feedback_vector()->ToSlot(slot_id));
+    return nexus.ComputeCallFrequency() * invocation_frequency_;
+  }
+  return 0.0f;
 }
 
 void BytecodeGraphBuilder::VisitAdd() {
@@ -1379,38 +1665,31 @@
 }
 
 void BytecodeGraphBuilder::VisitTestEqual() {
-  CompareOperationHint hint = CompareOperationHint::kAny;
-  BuildCompareOp(javascript()->Equal(hint));
+  BuildCompareOp(javascript()->Equal(GetCompareOperationHint()));
 }
 
 void BytecodeGraphBuilder::VisitTestNotEqual() {
-  CompareOperationHint hint = CompareOperationHint::kAny;
-  BuildCompareOp(javascript()->NotEqual(hint));
+  BuildCompareOp(javascript()->NotEqual(GetCompareOperationHint()));
 }
 
 void BytecodeGraphBuilder::VisitTestEqualStrict() {
-  CompareOperationHint hint = CompareOperationHint::kAny;
-  BuildCompareOp(javascript()->StrictEqual(hint));
+  BuildCompareOp(javascript()->StrictEqual(GetCompareOperationHint()));
 }
 
 void BytecodeGraphBuilder::VisitTestLessThan() {
-  CompareOperationHint hint = CompareOperationHint::kAny;
-  BuildCompareOp(javascript()->LessThan(hint));
+  BuildCompareOp(javascript()->LessThan(GetCompareOperationHint()));
 }
 
 void BytecodeGraphBuilder::VisitTestGreaterThan() {
-  CompareOperationHint hint = CompareOperationHint::kAny;
-  BuildCompareOp(javascript()->GreaterThan(hint));
+  BuildCompareOp(javascript()->GreaterThan(GetCompareOperationHint()));
 }
 
 void BytecodeGraphBuilder::VisitTestLessThanOrEqual() {
-  CompareOperationHint hint = CompareOperationHint::kAny;
-  BuildCompareOp(javascript()->LessThanOrEqual(hint));
+  BuildCompareOp(javascript()->LessThanOrEqual(GetCompareOperationHint()));
 }
 
 void BytecodeGraphBuilder::VisitTestGreaterThanOrEqual() {
-  CompareOperationHint hint = CompareOperationHint::kAny;
-  BuildCompareOp(javascript()->GreaterThanOrEqual(hint));
+  BuildCompareOp(javascript()->GreaterThanOrEqual(GetCompareOperationHint()));
 }
 
 void BytecodeGraphBuilder::VisitTestIn() {
@@ -1444,37 +1723,28 @@
 
 void BytecodeGraphBuilder::VisitJumpConstant() { BuildJump(); }
 
+void BytecodeGraphBuilder::VisitJumpIfTrue() { BuildJumpIfTrue(); }
 
-void BytecodeGraphBuilder::VisitJumpIfTrue() {
-  BuildJumpIfEqual(jsgraph()->TrueConstant());
-}
+void BytecodeGraphBuilder::VisitJumpIfTrueConstant() { BuildJumpIfTrue(); }
 
-void BytecodeGraphBuilder::VisitJumpIfTrueConstant() {
-  BuildJumpIfEqual(jsgraph()->TrueConstant());
-}
+void BytecodeGraphBuilder::VisitJumpIfFalse() { BuildJumpIfFalse(); }
 
-void BytecodeGraphBuilder::VisitJumpIfFalse() {
-  BuildJumpIfEqual(jsgraph()->FalseConstant());
-}
-
-void BytecodeGraphBuilder::VisitJumpIfFalseConstant() {
-  BuildJumpIfEqual(jsgraph()->FalseConstant());
-}
+void BytecodeGraphBuilder::VisitJumpIfFalseConstant() { BuildJumpIfFalse(); }
 
 void BytecodeGraphBuilder::VisitJumpIfToBooleanTrue() {
-  BuildJumpIfToBooleanEqual(jsgraph()->TrueConstant());
+  BuildJumpIfToBooleanTrue();
 }
 
 void BytecodeGraphBuilder::VisitJumpIfToBooleanTrueConstant() {
-  BuildJumpIfToBooleanEqual(jsgraph()->TrueConstant());
+  BuildJumpIfToBooleanTrue();
 }
 
 void BytecodeGraphBuilder::VisitJumpIfToBooleanFalse() {
-  BuildJumpIfToBooleanEqual(jsgraph()->FalseConstant());
+  BuildJumpIfToBooleanFalse();
 }
 
 void BytecodeGraphBuilder::VisitJumpIfToBooleanFalseConstant() {
-  BuildJumpIfToBooleanEqual(jsgraph()->FalseConstant());
+  BuildJumpIfToBooleanFalse();
 }
 
 void BytecodeGraphBuilder::VisitJumpIfNotHole() { BuildJumpIfNotHole(); }
@@ -1499,21 +1769,14 @@
   BuildJumpIfEqual(jsgraph()->UndefinedConstant());
 }
 
+void BytecodeGraphBuilder::VisitJumpLoop() { BuildJump(); }
+
 void BytecodeGraphBuilder::VisitStackCheck() {
   FrameStateBeforeAndAfter states(this);
   Node* node = NewNode(javascript()->StackCheck());
   environment()->RecordAfterState(node, &states);
 }
 
-void BytecodeGraphBuilder::VisitOsrPoll() {
-  // TODO(4764): This should be moved into the {VisitBytecodes} once we merge
-  // the polling with existing bytecode. This will also guarantee that we are
-  // not missing the OSR entry point, which we wouldn't catch right now.
-  if (osr_ast_id_.ToInt() == bytecode_iterator().current_offset()) {
-    environment()->PrepareForOsr();
-  }
-}
-
 void BytecodeGraphBuilder::VisitReturn() {
   BuildLoopExitsForFunctionExit();
   Node* control =
@@ -1526,6 +1789,7 @@
   Node* call =
       NewNode(javascript()->CallRuntime(Runtime::kHandleDebuggerStatement));
   environment()->BindAccumulator(call, &states);
+  environment()->MarkAllRegistersLive();
 }
 
 // We cannot create a graph from the debugger copy of the bytecode array.
@@ -1545,13 +1809,15 @@
 
 void BytecodeGraphBuilder::VisitForInPrepare() { BuildForInPrepare(); }
 
-void BytecodeGraphBuilder::VisitForInDone() {
+void BytecodeGraphBuilder::VisitForInContinue() {
   FrameStateBeforeAndAfter states(this);
   Node* index =
       environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
   Node* cache_length =
       environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1));
-  Node* exit_cond = NewNode(javascript()->ForInDone(), index, cache_length);
+  Node* exit_cond =
+      NewNode(javascript()->LessThan(CompareOperationHint::kSignedSmall), index,
+              cache_length);
   environment()->BindAccumulator(exit_cond, &states);
 }
 
@@ -1578,7 +1844,8 @@
   FrameStateBeforeAndAfter states(this);
   Node* index =
       environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0));
-  index = NewNode(javascript()->ForInStep(), index);
+  index = NewNode(javascript()->Add(BinaryOperationHint::kSignedSmall), index,
+                  jsgraph()->OneConstant());
   environment()->BindAccumulator(index, &states);
 }
 
@@ -1681,6 +1948,28 @@
   set_environment(nullptr);
 }
 
+void BytecodeGraphBuilder::BuildOSRLoopEntryPoint(int current_offset) {
+  if (!osr_ast_id_.IsNone() && osr_ast_id_.ToInt() == current_offset) {
+    // For OSR add a special {OsrLoopEntry} node into the current loop header.
+    // It will be turned into a usable entry by the OSR deconstruction.
+    Environment* loop_env = merge_environments_[current_offset];
+    Environment* osr_env = loop_env->CopyForOsrEntry();
+    osr_env->PrepareForOsrEntry();
+    loop_env->Merge(osr_env);
+  }
+}
+
+void BytecodeGraphBuilder::BuildOSRNormalEntryPoint() {
+  if (!osr_ast_id_.IsNone()) {
+    // For OSR add an {OsrNormalEntry} as the the top-level environment start.
+    // It will be replaced with {Dead} by the OSR deconstruction.
+    NewNode(common()->OsrNormalEntry());
+    // Note that the requested OSR entry point must be the target of a backward
+    // branch, otherwise there will not be a proper loop header available.
+    DCHECK(branch_analysis()->backward_branches_target(osr_ast_id_.ToInt()));
+  }
+}
+
 void BytecodeGraphBuilder::BuildLoopExitsForBranch(int target_offset) {
   int origin_offset = bytecode_iterator().current_offset();
   // Only build loop exits for forward edges.
@@ -1707,8 +1996,7 @@
   MergeIntoSuccessorEnvironment(bytecode_iterator().GetJumpTargetOffset());
 }
 
-
-void BytecodeGraphBuilder::BuildConditionalJump(Node* condition) {
+void BytecodeGraphBuilder::BuildJumpIf(Node* condition) {
   NewBranch(condition);
   Environment* if_false_environment = environment()->CopyForConditional();
   NewIfTrue();
@@ -1717,24 +2005,43 @@
   NewIfFalse();
 }
 
+void BytecodeGraphBuilder::BuildJumpIfNot(Node* condition) {
+  NewBranch(condition);
+  Environment* if_true_environment = environment()->CopyForConditional();
+  NewIfFalse();
+  MergeIntoSuccessorEnvironment(bytecode_iterator().GetJumpTargetOffset());
+  set_environment(if_true_environment);
+  NewIfTrue();
+}
 
 void BytecodeGraphBuilder::BuildJumpIfEqual(Node* comperand) {
   Node* accumulator = environment()->LookupAccumulator();
   Node* condition =
       NewNode(javascript()->StrictEqual(CompareOperationHint::kAny),
               accumulator, comperand);
-  BuildConditionalJump(condition);
+  BuildJumpIf(condition);
 }
 
+void BytecodeGraphBuilder::BuildJumpIfFalse() {
+  BuildJumpIfNot(environment()->LookupAccumulator());
+}
 
-void BytecodeGraphBuilder::BuildJumpIfToBooleanEqual(Node* comperand) {
+void BytecodeGraphBuilder::BuildJumpIfTrue() {
+  BuildJumpIf(environment()->LookupAccumulator());
+}
+
+void BytecodeGraphBuilder::BuildJumpIfToBooleanTrue() {
   Node* accumulator = environment()->LookupAccumulator();
-  Node* to_boolean =
-      NewNode(javascript()->ToBoolean(ToBooleanHint::kAny), accumulator);
   Node* condition =
-      NewNode(javascript()->StrictEqual(CompareOperationHint::kAny), to_boolean,
-              comperand);
-  BuildConditionalJump(condition);
+      NewNode(javascript()->ToBoolean(ToBooleanHint::kAny), accumulator);
+  BuildJumpIf(condition);
+}
+
+void BytecodeGraphBuilder::BuildJumpIfToBooleanFalse() {
+  Node* accumulator = environment()->LookupAccumulator();
+  Node* condition =
+      NewNode(javascript()->ToBoolean(ToBooleanHint::kAny), accumulator);
+  BuildJumpIfNot(condition);
 }
 
 void BytecodeGraphBuilder::BuildJumpIfNotHole() {
@@ -1742,10 +2049,7 @@
   Node* condition =
       NewNode(javascript()->StrictEqual(CompareOperationHint::kAny),
               accumulator, jsgraph()->TheHoleConstant());
-  Node* node =
-      NewNode(common()->Select(MachineRepresentation::kTagged), condition,
-              jsgraph()->FalseConstant(), jsgraph()->TrueConstant());
-  BuildConditionalJump(node);
+  BuildJumpIfNot(condition);
 }
 
 Node** BytecodeGraphBuilder::EnsureInputBufferSize(int size) {
diff --git a/src/compiler/bytecode-graph-builder.h b/src/compiler/bytecode-graph-builder.h
index 2f3acc1..53582f7 100644
--- a/src/compiler/bytecode-graph-builder.h
+++ b/src/compiler/bytecode-graph-builder.h
@@ -5,10 +5,11 @@
 #ifndef V8_COMPILER_BYTECODE_GRAPH_BUILDER_H_
 #define V8_COMPILER_BYTECODE_GRAPH_BUILDER_H_
 
-#include "src/compiler.h"
 #include "src/compiler/bytecode-branch-analysis.h"
 #include "src/compiler/bytecode-loop-analysis.h"
 #include "src/compiler/js-graph.h"
+#include "src/compiler/liveness-analyzer.h"
+#include "src/compiler/state-values-utils.h"
 #include "src/compiler/type-hint-analyzer.h"
 #include "src/interpreter/bytecode-array-iterator.h"
 #include "src/interpreter/bytecode-flags.h"
@@ -16,6 +17,9 @@
 
 namespace v8 {
 namespace internal {
+
+class CompilationInfo;
+
 namespace compiler {
 
 // The BytecodeGraphBuilder produces a high-level IR graph based on
@@ -23,7 +27,7 @@
 class BytecodeGraphBuilder {
  public:
   BytecodeGraphBuilder(Zone* local_zone, CompilationInfo* info,
-                       JSGraph* jsgraph);
+                       JSGraph* jsgraph, float invocation_frequency);
 
   // Creates a graph by visiting bytecodes.
   bool CreateGraph();
@@ -113,16 +117,22 @@
                                     interpreter::Register first_arg,
                                     size_t arity);
 
+  // Computes register liveness and replaces dead ones in frame states with the
+  // undefined values.
+  void ClearNonLiveSlotsInFrameStates();
+
   void BuildCreateLiteral(const Operator* op);
   void BuildCreateArguments(CreateArgumentsType type);
   Node* BuildLoadContextSlot();
-  Node* BuildLoadGlobal(TypeofMode typeof_mode);
+  Node* BuildLoadGlobal(uint32_t feedback_slot_index, TypeofMode typeof_mode);
   void BuildStoreGlobal(LanguageMode language_mode);
   Node* BuildNamedLoad();
   void BuildNamedStore(LanguageMode language_mode);
   Node* BuildKeyedLoad();
   void BuildKeyedStore(LanguageMode language_mode);
   void BuildLdaLookupSlot(TypeofMode typeof_mode);
+  void BuildLdaLookupContextSlot(TypeofMode typeof_mode);
+  void BuildLdaLookupGlobalSlot(TypeofMode typeof_mode);
   void BuildStaLookupSlot(LanguageMode language_mode);
   void BuildCall(TailCallMode tail_call_mode);
   void BuildThrow();
@@ -135,15 +145,30 @@
   void BuildForInNext();
   void BuildInvokeIntrinsic();
 
+  // Check the context chain for extensions, for lookup fast paths.
+  Environment* CheckContextExtensions(uint32_t depth);
+
   // Helper function to create binary operation hint from the recorded
   // type feedback.
   BinaryOperationHint GetBinaryOperationHint(int operand_index);
 
+  // Helper function to create compare operation hint from the recorded
+  // type feedback.
+  CompareOperationHint GetCompareOperationHint();
+
+  // Helper function to compute call frequency from the recorded type
+  // feedback.
+  float ComputeCallFrequency(int slot_id) const;
+
   // Control flow plumbing.
   void BuildJump();
-  void BuildConditionalJump(Node* condition);
+  void BuildJumpIf(Node* condition);
+  void BuildJumpIfNot(Node* condition);
   void BuildJumpIfEqual(Node* comperand);
-  void BuildJumpIfToBooleanEqual(Node* boolean_comperand);
+  void BuildJumpIfTrue();
+  void BuildJumpIfFalse();
+  void BuildJumpIfToBooleanTrue();
+  void BuildJumpIfToBooleanFalse();
   void BuildJumpIfNotHole();
 
   // Simulates control flow by forward-propagating environments.
@@ -154,6 +179,10 @@
   // Simulates control flow that exits the function body.
   void MergeControlToLeaveFunction(Node* exit);
 
+  // Builds entry points that are used by OSR deconstruction.
+  void BuildOSRLoopEntryPoint(int current_offset);
+  void BuildOSRNormalEntryPoint();
+
   // Builds loop exit nodes for every exited loop between the current bytecode
   // offset and {target_offset}.
   void BuildLoopExitsForBranch(int target_offset);
@@ -221,12 +250,19 @@
     loop_analysis_ = loop_analysis;
   }
 
+  LivenessAnalyzer* liveness_analyzer() { return &liveness_analyzer_; }
+
+  bool IsLivenessAnalysisEnabled() const {
+    return this->is_liveness_analysis_enabled_;
+  }
+
 #define DECLARE_VISIT_BYTECODE(name, ...) void Visit##name();
   BYTECODE_LIST(DECLARE_VISIT_BYTECODE)
 #undef DECLARE_VISIT_BYTECODE
 
   Zone* local_zone_;
   JSGraph* jsgraph_;
+  float const invocation_frequency_;
   Handle<BytecodeArray> bytecode_array_;
   Handle<HandlerTable> exception_handler_table_;
   Handle<TypeFeedbackVector> feedback_vector_;
@@ -258,6 +294,13 @@
   // Control nodes that exit the function body.
   ZoneVector<Node*> exit_controls_;
 
+  bool const is_liveness_analysis_enabled_;
+
+  StateValuesCache state_values_cache_;
+
+  // Analyzer of register liveness.
+  LivenessAnalyzer liveness_analyzer_;
+
   static int const kBinaryOperationHintIndex = 1;
   static int const kCountOperationHintIndex = 0;
   static int const kBinaryOperationSmiHintIndex = 2;
diff --git a/src/compiler/bytecode-loop-analysis.h b/src/compiler/bytecode-loop-analysis.h
index 59fabce..1a86d7b 100644
--- a/src/compiler/bytecode-loop-analysis.h
+++ b/src/compiler/bytecode-loop-analysis.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_BYTECODE_LOOP_ANALYSIS_H_
 
 #include "src/handles.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/c-linkage.cc b/src/compiler/c-linkage.cc
index f79497a..690a52b 100644
--- a/src/compiler/c-linkage.cc
+++ b/src/compiler/c-linkage.cc
@@ -7,7 +7,7 @@
 
 #include "src/compiler/linkage.h"
 
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/code-assembler.cc b/src/compiler/code-assembler.cc
index 4dd7e79..ff7ef31 100644
--- a/src/compiler/code-assembler.cc
+++ b/src/compiler/code-assembler.cc
@@ -20,7 +20,7 @@
 #include "src/machine-type.h"
 #include "src/macro-assembler.h"
 #include "src/utils.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -234,6 +234,13 @@
   return value;
 }
 
+Node* CodeAssembler::RoundIntPtrToFloat64(Node* value) {
+  if (raw_assembler_->machine()->Is64()) {
+    return raw_assembler_->RoundInt64ToFloat64(value);
+  }
+  return raw_assembler_->ChangeInt32ToFloat64(value);
+}
+
 #define DEFINE_CODE_ASSEMBLER_UNARY_OP(name) \
   Node* CodeAssembler::name(Node* a) { return raw_assembler_->name(a); }
 CODE_ASSEMBLER_UNARY_OP_LIST(DEFINE_CODE_ASSEMBLER_UNARY_OP)
@@ -299,6 +306,10 @@
                              IntPtrConstant(root_index * kPointerSize), value);
 }
 
+Node* CodeAssembler::Retain(Node* value) {
+  return raw_assembler_->Retain(value);
+}
+
 Node* CodeAssembler::Projection(int index, Node* value) {
   return raw_assembler_->Projection(index, value);
 }
@@ -425,6 +436,14 @@
                                           arg5, context);
 }
 
+Node* CodeAssembler::TailCallRuntime(Runtime::FunctionId function_id,
+                                     Node* context, Node* arg1, Node* arg2,
+                                     Node* arg3, Node* arg4, Node* arg5,
+                                     Node* arg6) {
+  return raw_assembler_->TailCallRuntime6(function_id, arg1, arg2, arg3, arg4,
+                                          arg5, arg6, context);
+}
+
 Node* CodeAssembler::CallStub(Callable const& callable, Node* context,
                               Node* arg1, size_t result_size) {
   Node* target = HeapConstant(callable.code());
@@ -446,6 +465,14 @@
                   result_size);
 }
 
+Node* CodeAssembler::CallStub(Callable const& callable, Node* context,
+                              Node* arg1, Node* arg2, Node* arg3, Node* arg4,
+                              size_t result_size) {
+  Node* target = HeapConstant(callable.code());
+  return CallStub(callable.descriptor(), target, context, arg1, arg2, arg3,
+                  arg4, result_size);
+}
+
 Node* CodeAssembler::CallStubN(Callable const& callable, Node** args,
                                size_t result_size) {
   Node* target = HeapConstant(callable.code());
@@ -638,9 +665,11 @@
 }
 
 Node* CodeAssembler::CallStubN(const CallInterfaceDescriptor& descriptor,
-                               Node* target, Node** args, size_t result_size) {
+                               int js_parameter_count, Node* target,
+                               Node** args, size_t result_size) {
   CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
-      isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
+      isolate(), zone(), descriptor,
+      descriptor.GetStackParameterCount() + js_parameter_count,
       CallDescriptor::kNoFlags, Operator::kNoProperties,
       MachineType::AnyTagged(), result_size);
 
@@ -745,6 +774,26 @@
 }
 
 Node* CodeAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor,
+                                  Node* target, Node* context, Node* arg1,
+                                  Node* arg2, Node* arg3, Node* arg4,
+                                  Node* arg5, size_t result_size) {
+  CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
+      isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
+      CallDescriptor::kSupportsTailCalls, Operator::kNoProperties,
+      MachineType::AnyTagged(), result_size);
+
+  Node** args = zone()->NewArray<Node*>(6);
+  args[0] = arg1;
+  args[1] = arg2;
+  args[2] = arg3;
+  args[3] = arg4;
+  args[4] = arg5;
+  args[5] = context;
+
+  return raw_assembler_->TailCallN(call_descriptor, target, args);
+}
+
+Node* CodeAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor,
                                   Node* target, Node* context, const Arg& arg1,
                                   const Arg& arg2, const Arg& arg3,
                                   const Arg& arg4, size_t result_size) {
@@ -803,10 +852,6 @@
                             Node* function, Node* receiver,
                             size_t result_size) {
   const int argc = 0;
-  CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
-      isolate(), zone(), callable.descriptor(), argc + 1,
-      CallDescriptor::kNoFlags, Operator::kNoProperties,
-      MachineType::AnyTagged(), result_size);
   Node* target = HeapConstant(callable.code());
 
   Node** args = zone()->NewArray<Node*>(argc + 4);
@@ -815,17 +860,13 @@
   args[2] = receiver;
   args[3] = context;
 
-  return CallN(call_descriptor, target, args);
+  return CallStubN(callable.descriptor(), argc + 1, target, args, result_size);
 }
 
 Node* CodeAssembler::CallJS(Callable const& callable, Node* context,
                             Node* function, Node* receiver, Node* arg1,
                             size_t result_size) {
   const int argc = 1;
-  CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
-      isolate(), zone(), callable.descriptor(), argc + 1,
-      CallDescriptor::kNoFlags, Operator::kNoProperties,
-      MachineType::AnyTagged(), result_size);
   Node* target = HeapConstant(callable.code());
 
   Node** args = zone()->NewArray<Node*>(argc + 4);
@@ -835,17 +876,13 @@
   args[3] = arg1;
   args[4] = context;
 
-  return CallN(call_descriptor, target, args);
+  return CallStubN(callable.descriptor(), argc + 1, target, args, result_size);
 }
 
 Node* CodeAssembler::CallJS(Callable const& callable, Node* context,
                             Node* function, Node* receiver, Node* arg1,
                             Node* arg2, size_t result_size) {
   const int argc = 2;
-  CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
-      isolate(), zone(), callable.descriptor(), argc + 1,
-      CallDescriptor::kNoFlags, Operator::kNoProperties,
-      MachineType::AnyTagged(), result_size);
   Node* target = HeapConstant(callable.code());
 
   Node** args = zone()->NewArray<Node*>(argc + 4);
@@ -856,7 +893,15 @@
   args[4] = arg2;
   args[5] = context;
 
-  return CallN(call_descriptor, target, args);
+  return CallStubN(callable.descriptor(), argc + 1, target, args, result_size);
+}
+
+Node* CodeAssembler::CallCFunction2(MachineType return_type,
+                                    MachineType arg0_type,
+                                    MachineType arg1_type, Node* function,
+                                    Node* arg0, Node* arg1) {
+  return raw_assembler_->CallCFunction2(return_type, arg0_type, arg1_type,
+                                        function, arg0, arg1);
 }
 
 void CodeAssembler::Goto(CodeAssembler::Label* label) {
diff --git a/src/compiler/code-assembler.h b/src/compiler/code-assembler.h
index bea999b..8372334 100644
--- a/src/compiler/code-assembler.h
+++ b/src/compiler/code-assembler.h
@@ -15,7 +15,7 @@
 #include "src/heap/heap.h"
 #include "src/machine-type.h"
 #include "src/runtime/runtime.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -54,8 +54,10 @@
   V(IntPtrGreaterThanOrEqual)                    \
   V(IntPtrEqual)                                 \
   V(Uint32LessThan)                              \
+  V(Uint32LessThanOrEqual)                       \
   V(Uint32GreaterThanOrEqual)                    \
   V(UintPtrLessThan)                             \
+  V(UintPtrGreaterThan)                          \
   V(UintPtrGreaterThanOrEqual)                   \
   V(WordEqual)                                   \
   V(WordNotEqual)                                \
@@ -133,7 +135,9 @@
   V(Float64Tanh)                        \
   V(Float64ExtractLowWord32)            \
   V(Float64ExtractHighWord32)           \
+  V(BitcastTaggedToWord)                \
   V(BitcastWordToTagged)                \
+  V(BitcastWordToTaggedSigned)          \
   V(TruncateFloat64ToFloat32)           \
   V(TruncateFloat64ToWord32)            \
   V(TruncateInt64ToInt32)               \
@@ -144,10 +148,12 @@
   V(ChangeUint32ToFloat64)              \
   V(ChangeUint32ToUint64)               \
   V(RoundFloat64ToInt32)                \
+  V(Float64SilenceNaN)                  \
   V(Float64RoundDown)                   \
   V(Float64RoundUp)                     \
   V(Float64RoundTruncate)               \
-  V(Word32Clz)
+  V(Word32Clz)                          \
+  V(Word32BinaryNot)
 
 // A "public" interface used by components outside of compiler directory to
 // create code objects with TurboFan's backend. This class is mostly a thin shim
@@ -283,11 +289,19 @@
   CODE_ASSEMBLER_UNARY_OP_LIST(DECLARE_CODE_ASSEMBLER_UNARY_OP)
 #undef DECLARE_CODE_ASSEMBLER_UNARY_OP
 
+  // Changes an intptr_t to a double, e.g. for storing an element index
+  // outside Smi range in a HeapNumber. Lossless on 32-bit,
+  // rounds on 64-bit (which doesn't affect valid element indices).
+  Node* RoundIntPtrToFloat64(Node* value);
   // No-op on 32-bit, otherwise zero extend.
   Node* ChangeUint32ToWord(Node* value);
   // No-op on 32-bit, otherwise sign extend.
   Node* ChangeInt32ToIntPtr(Node* value);
 
+  // No-op that guarantees that the value is kept alive till this point even
+  // if GC happens.
+  Node* Retain(Node* value);
+
   // Projections
   Node* Projection(int index, Node* value);
 
@@ -315,6 +329,9 @@
   Node* TailCallRuntime(Runtime::FunctionId function_id, Node* context,
                         Node* arg1, Node* arg2, Node* arg3, Node* arg4,
                         Node* arg5);
+  Node* TailCallRuntime(Runtime::FunctionId function_id, Node* context,
+                        Node* arg1, Node* arg2, Node* arg3, Node* arg4,
+                        Node* arg5, Node* arg6);
 
   // A pair of a zero-based argument index and a value.
   // It helps writing arguments order independent code.
@@ -331,6 +348,8 @@
                  Node* arg2, size_t result_size = 1);
   Node* CallStub(Callable const& callable, Node* context, Node* arg1,
                  Node* arg2, Node* arg3, size_t result_size = 1);
+  Node* CallStub(Callable const& callable, Node* context, Node* arg1,
+                 Node* arg2, Node* arg3, Node* arg4, size_t result_size = 1);
   Node* CallStubN(Callable const& callable, Node** args,
                   size_t result_size = 1);
 
@@ -364,8 +383,13 @@
                  const Arg& arg3, const Arg& arg4, const Arg& arg5,
                  size_t result_size = 1);
 
+  Node* CallStubN(const CallInterfaceDescriptor& descriptor,
+                  int js_parameter_count, Node* target, Node** args,
+                  size_t result_size = 1);
   Node* CallStubN(const CallInterfaceDescriptor& descriptor, Node* target,
-                  Node** args, size_t result_size = 1);
+                  Node** args, size_t result_size = 1) {
+    return CallStubN(descriptor, 0, target, args, result_size);
+  }
 
   Node* TailCallStub(Callable const& callable, Node* context, Node* arg1,
                      size_t result_size = 1);
@@ -387,6 +411,9 @@
   Node* TailCallStub(const CallInterfaceDescriptor& descriptor, Node* target,
                      Node* context, Node* arg1, Node* arg2, Node* arg3,
                      Node* arg4, size_t result_size = 1);
+  Node* TailCallStub(const CallInterfaceDescriptor& descriptor, Node* target,
+                     Node* context, Node* arg1, Node* arg2, Node* arg3,
+                     Node* arg4, Node* arg5, size_t result_size = 1);
 
   Node* TailCallStub(const CallInterfaceDescriptor& descriptor, Node* target,
                      Node* context, const Arg& arg1, const Arg& arg2,
@@ -406,6 +433,11 @@
   Node* CallJS(Callable const& callable, Node* context, Node* function,
                Node* receiver, Node* arg1, Node* arg2, size_t result_size = 1);
 
+  // Call to a C function with two arguments.
+  Node* CallCFunction2(MachineType return_type, MachineType arg0_type,
+                       MachineType arg1_type, Node* function, Node* arg0,
+                       Node* arg1);
+
   // Exception handling support.
   void GotoIfException(Node* node, Label* if_exception,
                        Variable* exception_var = nullptr);
diff --git a/src/compiler/code-generator-impl.h b/src/compiler/code-generator-impl.h
index 4dccdc9..8bf3a9e 100644
--- a/src/compiler/code-generator-impl.h
+++ b/src/compiler/code-generator-impl.h
@@ -170,15 +170,17 @@
 // Eager deoptimization exit.
 class DeoptimizationExit : public ZoneObject {
  public:
-  explicit DeoptimizationExit(int deoptimization_id)
-      : deoptimization_id_(deoptimization_id) {}
+  explicit DeoptimizationExit(int deoptimization_id, SourcePosition pos)
+      : deoptimization_id_(deoptimization_id), pos_(pos) {}
 
   int deoptimization_id() const { return deoptimization_id_; }
   Label* label() { return &label_; }
+  SourcePosition pos() const { return pos_; }
 
  private:
   int const deoptimization_id_;
   Label label_;
+  SourcePosition const pos_;
 };
 
 // Generator for out-of-line code that is emitted after the main code is done.
diff --git a/src/compiler/code-generator.cc b/src/compiler/code-generator.cc
index 03136a7..043582b 100644
--- a/src/compiler/code-generator.cc
+++ b/src/compiler/code-generator.cc
@@ -6,6 +6,7 @@
 
 #include "src/address-map.h"
 #include "src/base/adapters.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/linkage.h"
 #include "src/compiler/pipeline.h"
@@ -63,6 +64,8 @@
   CreateFrameAccessState(frame);
 }
 
+Isolate* CodeGenerator::isolate() const { return info_->isolate(); }
+
 void CodeGenerator::CreateFrameAccessState(Frame* frame) {
   FinishFrame(frame);
   frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
@@ -185,7 +188,8 @@
   // Assemble all eager deoptimization exits.
   for (DeoptimizationExit* exit : deoptimization_exits_) {
     masm()->bind(exit->label());
-    AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER);
+    AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER,
+                            exit->pos());
   }
 
   // Ensure there is space for lazy deoptimization in the code.
@@ -805,7 +809,7 @@
     } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
                type == MachineType::Uint32()) {
       translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
-    } else if (type.representation() == MachineRepresentation::kTagged) {
+    } else if (IsAnyTagged(type.representation())) {
       translation->StoreStackSlot(LocationOperand::cast(op)->index());
     } else {
       CHECK(false);
@@ -827,7 +831,7 @@
     } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
                type == MachineType::Uint32()) {
       translation->StoreUint32Register(converter.ToRegister(op));
-    } else if (type.representation() == MachineRepresentation::kTagged) {
+    } else if (IsAnyTagged(type.representation())) {
       translation->StoreRegister(converter.ToRegister(op));
     } else {
       CHECK(false);
@@ -846,7 +850,8 @@
     Handle<Object> constant_object;
     switch (constant.type()) {
       case Constant::kInt32:
-        if (type.representation() == MachineRepresentation::kTagged) {
+        if (type.representation() == MachineRepresentation::kTagged ||
+            type.representation() == MachineRepresentation::kTaggedSigned) {
           // When pointers are 4 bytes, we can use int32 constants to represent
           // Smis.
           DCHECK_EQ(4, kPointerSize);
@@ -868,24 +873,33 @@
       case Constant::kInt64:
         // When pointers are 8 bytes, we can use int64 constants to represent
         // Smis.
-        DCHECK_EQ(type.representation(), MachineRepresentation::kTagged);
+        DCHECK(type.representation() == MachineRepresentation::kTagged ||
+               type.representation() == MachineRepresentation::kTaggedSigned);
         DCHECK_EQ(8, kPointerSize);
         constant_object =
             handle(reinterpret_cast<Smi*>(constant.ToInt64()), isolate());
         DCHECK(constant_object->IsSmi());
         break;
       case Constant::kFloat32:
-        DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
-               type.representation() == MachineRepresentation::kTagged);
+        if (type.representation() == MachineRepresentation::kTaggedSigned) {
+          DCHECK(IsSmiDouble(constant.ToFloat32()));
+        } else {
+          DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
+                 CanBeTaggedPointer(type.representation()));
+        }
         constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
         break;
       case Constant::kFloat64:
-        DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
-               type.representation() == MachineRepresentation::kTagged);
+        if (type.representation() == MachineRepresentation::kTaggedSigned) {
+          DCHECK(IsSmiDouble(constant.ToFloat64()));
+        } else {
+          DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
+                 CanBeTaggedPointer(type.representation()));
+        }
         constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
         break;
       case Constant::kHeapObject:
-        DCHECK(type.representation() == MachineRepresentation::kTagged);
+        DCHECK(CanBeTaggedPointer(type.representation()));
         constant_object = constant.ToHeapObject();
         break;
       default:
@@ -911,8 +925,8 @@
     Instruction* instr, size_t frame_state_offset) {
   int const deoptimization_id = BuildTranslation(
       instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
-  DeoptimizationExit* const exit =
-      new (zone()) DeoptimizationExit(deoptimization_id);
+  DeoptimizationExit* const exit = new (zone())
+      DeoptimizationExit(deoptimization_id, current_source_position_);
   deoptimization_exits_.push_back(exit);
   return exit;
 }
diff --git a/src/compiler/code-generator.h b/src/compiler/code-generator.h
index 21c13f8..3032163 100644
--- a/src/compiler/code-generator.h
+++ b/src/compiler/code-generator.h
@@ -5,7 +5,6 @@
 #ifndef V8_COMPILER_CODE_GENERATOR_H_
 #define V8_COMPILER_CODE_GENERATOR_H_
 
-#include "src/compiler.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/instruction.h"
 #include "src/compiler/unwinding-info-writer.h"
@@ -16,6 +15,9 @@
 
 namespace v8 {
 namespace internal {
+
+class CompilationInfo;
+
 namespace compiler {
 
 // Forward declarations.
@@ -58,7 +60,7 @@
   InstructionSequence* code() const { return code_; }
   FrameAccessState* frame_access_state() const { return frame_access_state_; }
   const Frame* frame() const { return frame_access_state_->frame(); }
-  Isolate* isolate() const { return info_->isolate(); }
+  Isolate* isolate() const;
   Linkage* linkage() const { return linkage_; }
 
   Label* GetLabel(RpoNumber rpo) { return &labels_[rpo.ToSize()]; }
@@ -118,7 +120,8 @@
   void AssembleArchTableSwitch(Instruction* instr);
 
   CodeGenResult AssembleDeoptimizerCall(int deoptimization_id,
-                                        Deoptimizer::BailoutType bailout_type);
+                                        Deoptimizer::BailoutType bailout_type,
+                                        SourcePosition pos);
 
   // Generates an architecture-specific, descriptor-specific prologue
   // to set up a stack frame.
diff --git a/src/compiler/common-operator-reducer.cc b/src/compiler/common-operator-reducer.cc
index 9527c75..c5ced20 100644
--- a/src/compiler/common-operator-reducer.cc
+++ b/src/compiler/common-operator-reducer.cc
@@ -77,8 +77,12 @@
   // Swap IfTrue/IfFalse on {branch} if {cond} is a BooleanNot and use the input
   // to BooleanNot as new condition for {branch}. Note we assume that {cond} was
   // already properly optimized before we get here (as guaranteed by the graph
-  // reduction logic).
-  if (cond->opcode() == IrOpcode::kBooleanNot) {
+  // reduction logic). The same applies if {cond} is a Select acting as boolean
+  // not (i.e. true being returned in the false case and vice versa).
+  if (cond->opcode() == IrOpcode::kBooleanNot ||
+      (cond->opcode() == IrOpcode::kSelect &&
+       DecideCondition(cond->InputAt(1)) == Decision::kFalse &&
+       DecideCondition(cond->InputAt(2)) == Decision::kTrue)) {
     for (Node* const use : node->uses()) {
       switch (use->opcode()) {
         case IrOpcode::kIfTrue:
diff --git a/src/compiler/common-operator.cc b/src/compiler/common-operator.cc
index f732375..e57160a 100644
--- a/src/compiler/common-operator.cc
+++ b/src/compiler/common-operator.cc
@@ -10,7 +10,7 @@
 #include "src/compiler/opcodes.h"
 #include "src/compiler/operator.h"
 #include "src/handles-inl.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/common-operator.h b/src/compiler/common-operator.h
index 9e4d259..2db0bfa 100644
--- a/src/compiler/common-operator.h
+++ b/src/compiler/common-operator.h
@@ -9,7 +9,7 @@
 #include "src/compiler/frame-states.h"
 #include "src/deoptimize-reason.h"
 #include "src/machine-type.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -19,7 +19,7 @@
 class CallDescriptor;
 struct CommonOperatorGlobalCache;
 class Operator;
-
+class Type;
 
 // Prediction hint for branches.
 enum class BranchHint : uint8_t { kNone, kTrue, kFalse };
diff --git a/src/compiler/control-equivalence.h b/src/compiler/control-equivalence.h
index 478e48b..4fb9c27 100644
--- a/src/compiler/control-equivalence.h
+++ b/src/compiler/control-equivalence.h
@@ -7,7 +7,7 @@
 
 #include "src/compiler/graph.h"
 #include "src/compiler/node.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/control-flow-optimizer.h b/src/compiler/control-flow-optimizer.h
index f72fa58..61785a0 100644
--- a/src/compiler/control-flow-optimizer.h
+++ b/src/compiler/control-flow-optimizer.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_CONTROL_FLOW_OPTIMIZER_H_
 
 #include "src/compiler/node-marker.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/effect-control-linearizer.cc b/src/compiler/effect-control-linearizer.cc
index 9cc6ddc..4e53e5d 100644
--- a/src/compiler/effect-control-linearizer.cc
+++ b/src/compiler/effect-control-linearizer.cc
@@ -265,7 +265,6 @@
     Node* phi_false = graph->NewNode(phi->op(), input_count + 1, inputs);
     if (phi->UseCount() == 0) {
       DCHECK_EQ(phi->opcode(), IrOpcode::kEffectPhi);
-      DCHECK_EQ(input_count, block->SuccessorCount());
     } else {
       for (Edge edge : phi->use_edges()) {
         Node* control = NodeProperties::GetControlInput(edge.from());
@@ -616,6 +615,9 @@
     case IrOpcode::kChangeTaggedToFloat64:
       state = LowerChangeTaggedToFloat64(node, *effect, *control);
       break;
+    case IrOpcode::kTruncateTaggedToBit:
+      state = LowerTruncateTaggedToBit(node, *effect, *control);
+      break;
     case IrOpcode::kTruncateTaggedToFloat64:
       state = LowerTruncateTaggedToFloat64(node, *effect, *control);
       break;
@@ -634,11 +636,8 @@
     case IrOpcode::kCheckIf:
       state = LowerCheckIf(node, frame_state, *effect, *control);
       break;
-    case IrOpcode::kCheckTaggedPointer:
-      state = LowerCheckTaggedPointer(node, frame_state, *effect, *control);
-      break;
-    case IrOpcode::kCheckTaggedSigned:
-      state = LowerCheckTaggedSigned(node, frame_state, *effect, *control);
+    case IrOpcode::kCheckHeapObject:
+      state = LowerCheckHeapObject(node, frame_state, *effect, *control);
       break;
     case IrOpcode::kCheckedInt32Add:
       state = LowerCheckedInt32Add(node, frame_state, *effect, *control);
@@ -661,9 +660,17 @@
     case IrOpcode::kCheckedInt32Mul:
       state = LowerCheckedInt32Mul(node, frame_state, *effect, *control);
       break;
+    case IrOpcode::kCheckedInt32ToTaggedSigned:
+      state =
+          LowerCheckedInt32ToTaggedSigned(node, frame_state, *effect, *control);
+      break;
     case IrOpcode::kCheckedUint32ToInt32:
       state = LowerCheckedUint32ToInt32(node, frame_state, *effect, *control);
       break;
+    case IrOpcode::kCheckedUint32ToTaggedSigned:
+      state = LowerCheckedUint32ToTaggedSigned(node, frame_state, *effect,
+                                               *control);
+      break;
     case IrOpcode::kCheckedFloat64ToInt32:
       state = LowerCheckedFloat64ToInt32(node, frame_state, *effect, *control);
       break;
@@ -677,6 +684,10 @@
     case IrOpcode::kCheckedTaggedToFloat64:
       state = LowerCheckedTaggedToFloat64(node, frame_state, *effect, *control);
       break;
+    case IrOpcode::kCheckedTaggedToTaggedSigned:
+      state = LowerCheckedTaggedToTaggedSigned(node, frame_state, *effect,
+                                               *control);
+      break;
     case IrOpcode::kTruncateTaggedToWord32:
       state = LowerTruncateTaggedToWord32(node, *effect, *control);
       break;
@@ -702,12 +713,27 @@
     case IrOpcode::kObjectIsUndetectable:
       state = LowerObjectIsUndetectable(node, *effect, *control);
       break;
+    case IrOpcode::kArrayBufferWasNeutered:
+      state = LowerArrayBufferWasNeutered(node, *effect, *control);
+      break;
     case IrOpcode::kStringFromCharCode:
       state = LowerStringFromCharCode(node, *effect, *control);
       break;
+    case IrOpcode::kStringFromCodePoint:
+      state = LowerStringFromCodePoint(node, *effect, *control);
+      break;
     case IrOpcode::kStringCharCodeAt:
       state = LowerStringCharCodeAt(node, *effect, *control);
       break;
+    case IrOpcode::kStringEqual:
+      state = LowerStringEqual(node, *effect, *control);
+      break;
+    case IrOpcode::kStringLessThan:
+      state = LowerStringLessThan(node, *effect, *control);
+      break;
+    case IrOpcode::kStringLessThanOrEqual:
+      state = LowerStringLessThanOrEqual(node, *effect, *control);
+      break;
     case IrOpcode::kCheckFloat64Hole:
       state = LowerCheckFloat64Hole(node, frame_state, *effect, *control);
       break;
@@ -762,75 +788,8 @@
 EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::LowerChangeFloat64ToTagged(Node* node, Node* effect,
                                                     Node* control) {
-  CheckForMinusZeroMode mode = CheckMinusZeroModeOf(node->op());
   Node* value = node->InputAt(0);
-
-  Node* value32 = graph()->NewNode(machine()->RoundFloat64ToInt32(), value);
-  Node* check_same = graph()->NewNode(
-      machine()->Float64Equal(), value,
-      graph()->NewNode(machine()->ChangeInt32ToFloat64(), value32));
-  Node* branch_same = graph()->NewNode(common()->Branch(), check_same, control);
-
-  Node* if_smi = graph()->NewNode(common()->IfTrue(), branch_same);
-  Node* vsmi;
-  Node* if_box = graph()->NewNode(common()->IfFalse(), branch_same);
-
-  if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
-    // Check if {value} is -0.
-    Node* check_zero = graph()->NewNode(machine()->Word32Equal(), value32,
-                                        jsgraph()->Int32Constant(0));
-    Node* branch_zero = graph()->NewNode(common()->Branch(BranchHint::kFalse),
-                                         check_zero, if_smi);
-
-    Node* if_zero = graph()->NewNode(common()->IfTrue(), branch_zero);
-    Node* if_notzero = graph()->NewNode(common()->IfFalse(), branch_zero);
-
-    // In case of 0, we need to check the high bits for the IEEE -0 pattern.
-    Node* check_negative = graph()->NewNode(
-        machine()->Int32LessThan(),
-        graph()->NewNode(machine()->Float64ExtractHighWord32(), value),
-        jsgraph()->Int32Constant(0));
-    Node* branch_negative = graph()->NewNode(
-        common()->Branch(BranchHint::kFalse), check_negative, if_zero);
-
-    Node* if_negative = graph()->NewNode(common()->IfTrue(), branch_negative);
-    Node* if_notnegative =
-        graph()->NewNode(common()->IfFalse(), branch_negative);
-
-    // We need to create a box for negative 0.
-    if_smi = graph()->NewNode(common()->Merge(2), if_notzero, if_notnegative);
-    if_box = graph()->NewNode(common()->Merge(2), if_box, if_negative);
-  }
-
-  // On 64-bit machines we can just wrap the 32-bit integer in a smi, for 32-bit
-  // machines we need to deal with potential overflow and fallback to boxing.
-  if (machine()->Is64()) {
-    vsmi = ChangeInt32ToSmi(value32);
-  } else {
-    Node* smi_tag = graph()->NewNode(machine()->Int32AddWithOverflow(), value32,
-                                     value32, if_smi);
-
-    Node* check_ovf =
-        graph()->NewNode(common()->Projection(1), smi_tag, if_smi);
-    Node* branch_ovf = graph()->NewNode(common()->Branch(BranchHint::kFalse),
-                                        check_ovf, if_smi);
-
-    Node* if_ovf = graph()->NewNode(common()->IfTrue(), branch_ovf);
-    if_box = graph()->NewNode(common()->Merge(2), if_ovf, if_box);
-
-    if_smi = graph()->NewNode(common()->IfFalse(), branch_ovf);
-    vsmi = graph()->NewNode(common()->Projection(0), smi_tag, if_smi);
-  }
-
-  // Allocate the box for the {value}.
-  ValueEffectControl box = AllocateHeapNumberWithValue(value, effect, if_box);
-
-  control = graph()->NewNode(common()->Merge(2), if_smi, box.control);
-  value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
-                           vsmi, box.value, control);
-  effect =
-      graph()->NewNode(common()->EffectPhi(2), effect, box.effect, control);
-  return ValueEffectControl(value, effect, control);
+  return AllocateHeapNumberWithValue(value, effect, control);
 }
 
 EffectControlLinearizer::ValueEffectControl
@@ -939,6 +898,157 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerTruncateTaggedToBit(Node* node, Node* effect,
+                                                  Node* control) {
+  Node* value = node->InputAt(0);
+  Node* one = jsgraph()->Int32Constant(1);
+  Node* zero = jsgraph()->Int32Constant(0);
+  Node* fzero = jsgraph()->Float64Constant(0.0);
+
+  // Collect effect/control/value triples.
+  int count = 0;
+  Node* values[7];
+  Node* effects[7];
+  Node* controls[6];
+
+  // Check if {value} is a Smi.
+  Node* check_smi = ObjectIsSmi(value);
+  Node* branch_smi = graph()->NewNode(common()->Branch(BranchHint::kFalse),
+                                      check_smi, control);
+
+  // If {value} is a Smi, then we only need to check that it's not zero.
+  Node* if_smi = graph()->NewNode(common()->IfTrue(), branch_smi);
+  Node* esmi = effect;
+  {
+    controls[count] = if_smi;
+    effects[count] = esmi;
+    values[count] =
+        graph()->NewNode(machine()->Word32Equal(),
+                         graph()->NewNode(machine()->WordEqual(), value,
+                                          jsgraph()->ZeroConstant()),
+                         zero);
+    count++;
+  }
+  control = graph()->NewNode(common()->IfFalse(), branch_smi);
+
+  // Load the map instance type of {value}.
+  Node* value_map = effect = graph()->NewNode(
+      simplified()->LoadField(AccessBuilder::ForMap()), value, effect, control);
+  Node* value_instance_type = effect = graph()->NewNode(
+      simplified()->LoadField(AccessBuilder::ForMapInstanceType()), value_map,
+      effect, control);
+
+  // Check if {value} is an Oddball.
+  Node* check_oddball =
+      graph()->NewNode(machine()->Word32Equal(), value_instance_type,
+                       jsgraph()->Int32Constant(ODDBALL_TYPE));
+  Node* branch_oddball = graph()->NewNode(common()->Branch(BranchHint::kTrue),
+                                          check_oddball, control);
+
+  // The only Oddball {value} that is trueish is true itself.
+  Node* if_oddball = graph()->NewNode(common()->IfTrue(), branch_oddball);
+  Node* eoddball = effect;
+  {
+    controls[count] = if_oddball;
+    effects[count] = eoddball;
+    values[count] = graph()->NewNode(machine()->WordEqual(), value,
+                                     jsgraph()->TrueConstant());
+    count++;
+  }
+  control = graph()->NewNode(common()->IfFalse(), branch_oddball);
+
+  // Check if {value} is a String.
+  Node* check_string =
+      graph()->NewNode(machine()->Int32LessThan(), value_instance_type,
+                       jsgraph()->Int32Constant(FIRST_NONSTRING_TYPE));
+  Node* branch_string =
+      graph()->NewNode(common()->Branch(), check_string, control);
+
+  // For String {value}, we need to check that the length is not zero.
+  Node* if_string = graph()->NewNode(common()->IfTrue(), branch_string);
+  Node* estring = effect;
+  {
+    // Load the {value} length.
+    Node* value_length = estring = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForStringLength()), value,
+        estring, if_string);
+
+    controls[count] = if_string;
+    effects[count] = estring;
+    values[count] =
+        graph()->NewNode(machine()->Word32Equal(),
+                         graph()->NewNode(machine()->WordEqual(), value_length,
+                                          jsgraph()->ZeroConstant()),
+                         zero);
+    count++;
+  }
+  control = graph()->NewNode(common()->IfFalse(), branch_string);
+
+  // Check if {value} is a HeapNumber.
+  Node* check_heapnumber =
+      graph()->NewNode(machine()->Word32Equal(), value_instance_type,
+                       jsgraph()->Int32Constant(HEAP_NUMBER_TYPE));
+  Node* branch_heapnumber =
+      graph()->NewNode(common()->Branch(), check_heapnumber, control);
+
+  // For HeapNumber {value}, just check that its value is not 0.0, -0.0 or NaN.
+  Node* if_heapnumber = graph()->NewNode(common()->IfTrue(), branch_heapnumber);
+  Node* eheapnumber = effect;
+  {
+    // Load the raw value of {value}.
+    Node* value_value = eheapnumber = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForHeapNumberValue()), value,
+        eheapnumber, if_heapnumber);
+
+    // Check if {value} is either less than 0.0 or greater than 0.0.
+    Node* check =
+        graph()->NewNode(machine()->Float64LessThan(), fzero, value_value);
+    Node* branch = graph()->NewNode(common()->Branch(), check, if_heapnumber);
+
+    controls[count] = graph()->NewNode(common()->IfTrue(), branch);
+    effects[count] = eheapnumber;
+    values[count] = one;
+    count++;
+
+    controls[count] = graph()->NewNode(common()->IfFalse(), branch);
+    effects[count] = eheapnumber;
+    values[count] =
+        graph()->NewNode(machine()->Float64LessThan(), value_value, fzero);
+    count++;
+  }
+  control = graph()->NewNode(common()->IfFalse(), branch_heapnumber);
+
+  // The {value} is either a JSReceiver, a Symbol or some Simd128Value. In
+  // those cases we can just the undetectable bit on the map, which will only
+  // be set for certain JSReceivers, i.e. document.all.
+  {
+    // Load the {value} map bit field.
+    Node* value_map_bitfield = effect = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForMapBitField()), value_map,
+        effect, control);
+
+    controls[count] = control;
+    effects[count] = effect;
+    values[count] = graph()->NewNode(
+        machine()->Word32Equal(),
+        graph()->NewNode(machine()->Word32And(), value_map_bitfield,
+                         jsgraph()->Int32Constant(1 << Map::kIsUndetectable)),
+        zero);
+    count++;
+  }
+
+  // Merge the different controls.
+  control = graph()->NewNode(common()->Merge(count), count, controls);
+  effects[count] = control;
+  effect = graph()->NewNode(common()->EffectPhi(count), count + 1, effects);
+  values[count] = control;
+  value = graph()->NewNode(common()->Phi(MachineRepresentation::kBit, count),
+                           count + 1, values);
+
+  return ValueEffectControl(value, effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::LowerChangeTaggedToInt32(Node* node, Node* effect,
                                                   Node* control) {
   Node* value = node->InputAt(0);
@@ -1164,8 +1274,8 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
-EffectControlLinearizer::LowerCheckTaggedPointer(Node* node, Node* frame_state,
-                                                 Node* effect, Node* control) {
+EffectControlLinearizer::LowerCheckHeapObject(Node* node, Node* frame_state,
+                                              Node* effect, Node* control) {
   Node* value = node->InputAt(0);
 
   Node* check = ObjectIsSmi(value);
@@ -1177,19 +1287,6 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
-EffectControlLinearizer::LowerCheckTaggedSigned(Node* node, Node* frame_state,
-                                                Node* effect, Node* control) {
-  Node* value = node->InputAt(0);
-
-  Node* check = ObjectIsSmi(value);
-  control = effect =
-      graph()->NewNode(common()->DeoptimizeUnless(DeoptimizeReason::kNotASmi),
-                       check, frame_state, effect, control);
-
-  return ValueEffectControl(value, effect, control);
-}
-
-EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::LowerCheckedInt32Add(Node* node, Node* frame_state,
                                               Node* effect, Node* control) {
   Node* lhs = node->InputAt(0);
@@ -1515,6 +1612,27 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerCheckedInt32ToTaggedSigned(Node* node,
+                                                         Node* frame_state,
+                                                         Node* effect,
+                                                         Node* control) {
+  DCHECK(SmiValuesAre31Bits());
+  Node* value = node->InputAt(0);
+
+  Node* add = graph()->NewNode(machine()->Int32AddWithOverflow(), value, value,
+                               control);
+
+  Node* check = graph()->NewNode(common()->Projection(1), add, control);
+  control = effect =
+      graph()->NewNode(common()->DeoptimizeIf(DeoptimizeReason::kOverflow),
+                       check, frame_state, effect, control);
+
+  value = graph()->NewNode(common()->Projection(0), add, control);
+
+  return ValueEffectControl(value, effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::LowerCheckedUint32ToInt32(Node* node,
                                                    Node* frame_state,
                                                    Node* effect,
@@ -1531,6 +1649,22 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerCheckedUint32ToTaggedSigned(Node* node,
+                                                          Node* frame_state,
+                                                          Node* effect,
+                                                          Node* control) {
+  Node* value = node->InputAt(0);
+  Node* check = graph()->NewNode(machine()->Uint32LessThanOrEqual(), value,
+                                 SmiMaxValueConstant());
+  control = effect = graph()->NewNode(
+      common()->DeoptimizeUnless(DeoptimizeReason::kLostPrecision), check,
+      frame_state, effect, control);
+  value = ChangeUint32ToSmi(value);
+
+  return ValueEffectControl(value, effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::BuildCheckedFloat64ToInt32(CheckForMinusZeroMode mode,
                                                     Node* value,
                                                     Node* frame_state,
@@ -1667,8 +1801,8 @@
       break;
     }
     case CheckTaggedInputMode::kNumberOrOddball: {
-      Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
-                                      check_number, control);
+      Node* branch =
+          graph()->NewNode(common()->Branch(), check_number, control);
 
       Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
       Node* etrue = effect;
@@ -1710,8 +1844,7 @@
   Node* value = node->InputAt(0);
 
   Node* check = ObjectIsSmi(value);
-  Node* branch =
-      graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
+  Node* branch = graph()->NewNode(common()->Branch(), check, control);
 
   // In the Smi case, just convert to int32 and then float64.
   Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
@@ -1736,6 +1869,21 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerCheckedTaggedToTaggedSigned(Node* node,
+                                                          Node* frame_state,
+                                                          Node* effect,
+                                                          Node* control) {
+  Node* value = node->InputAt(0);
+
+  Node* check = ObjectIsSmi(value);
+  control = effect =
+      graph()->NewNode(common()->DeoptimizeUnless(DeoptimizeReason::kNotASmi),
+                       check, frame_state, effect, control);
+
+  return ValueEffectControl(value, effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::LowerTruncateTaggedToWord32(Node* node, Node* effect,
                                                      Node* control) {
   Node* value = node->InputAt(0);
@@ -1996,6 +2144,26 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerArrayBufferWasNeutered(Node* node, Node* effect,
+                                                     Node* control) {
+  Node* value = node->InputAt(0);
+
+  Node* value_bit_field = effect = graph()->NewNode(
+      simplified()->LoadField(AccessBuilder::ForJSArrayBufferBitField()), value,
+      effect, control);
+  value = graph()->NewNode(
+      machine()->Word32Equal(),
+      graph()->NewNode(machine()->Word32Equal(),
+                       graph()->NewNode(machine()->Word32And(), value_bit_field,
+                                        jsgraph()->Int32Constant(
+                                            JSArrayBuffer::WasNeutered::kMask)),
+                       jsgraph()->Int32Constant(0)),
+      jsgraph()->Int32Constant(0));
+
+  return ValueEffectControl(value, effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::LowerStringCharCodeAt(Node* node, Node* effect,
                                                Node* control) {
   Node* subject = node->InputAt(0);
@@ -2382,6 +2550,236 @@
 }
 
 EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerStringFromCodePoint(Node* node, Node* effect,
+                                                  Node* control) {
+  Node* value = node->InputAt(0);
+  Node* code = value;
+
+  Node* etrue0 = effect;
+  Node* vtrue0;
+
+  // Check if the {code} is a single code unit
+  Node* check0 = graph()->NewNode(machine()->Uint32LessThanOrEqual(), code,
+                                  jsgraph()->Uint32Constant(0xFFFF));
+  Node* branch0 =
+      graph()->NewNode(common()->Branch(BranchHint::kTrue), check0, control);
+
+  Node* if_true0 = graph()->NewNode(common()->IfTrue(), branch0);
+  {
+    // Check if the {code} is a one byte character
+    Node* check1 = graph()->NewNode(
+        machine()->Uint32LessThanOrEqual(), code,
+        jsgraph()->Uint32Constant(String::kMaxOneByteCharCode));
+    Node* branch1 =
+        graph()->NewNode(common()->Branch(BranchHint::kTrue), check1, if_true0);
+
+    Node* if_true1 = graph()->NewNode(common()->IfTrue(), branch1);
+    Node* etrue1 = etrue0;
+    Node* vtrue1;
+    {
+      // Load the isolate wide single character string cache.
+      Node* cache =
+          jsgraph()->HeapConstant(factory()->single_character_string_cache());
+
+      // Compute the {cache} index for {code}.
+      Node* index =
+          machine()->Is32()
+              ? code
+              : graph()->NewNode(machine()->ChangeUint32ToUint64(), code);
+
+      // Check if we have an entry for the {code} in the single character string
+      // cache already.
+      Node* entry = etrue1 = graph()->NewNode(
+          simplified()->LoadElement(AccessBuilder::ForFixedArrayElement()),
+          cache, index, etrue1, if_true1);
+
+      Node* check2 = graph()->NewNode(machine()->WordEqual(), entry,
+                                      jsgraph()->UndefinedConstant());
+      Node* branch2 = graph()->NewNode(common()->Branch(BranchHint::kFalse),
+                                       check2, if_true1);
+
+      Node* if_true2 = graph()->NewNode(common()->IfTrue(), branch2);
+      Node* etrue2 = etrue1;
+      Node* vtrue2;
+      {
+        // Allocate a new SeqOneByteString for {code}.
+        vtrue2 = etrue2 = graph()->NewNode(
+            simplified()->Allocate(NOT_TENURED),
+            jsgraph()->Int32Constant(SeqOneByteString::SizeFor(1)), etrue2,
+            if_true2);
+        etrue2 = graph()->NewNode(
+            simplified()->StoreField(AccessBuilder::ForMap()), vtrue2,
+            jsgraph()->HeapConstant(factory()->one_byte_string_map()), etrue2,
+            if_true2);
+        etrue2 = graph()->NewNode(
+            simplified()->StoreField(AccessBuilder::ForNameHashField()), vtrue2,
+            jsgraph()->IntPtrConstant(Name::kEmptyHashField), etrue2, if_true2);
+        etrue2 = graph()->NewNode(
+            simplified()->StoreField(AccessBuilder::ForStringLength()), vtrue2,
+            jsgraph()->SmiConstant(1), etrue2, if_true2);
+        etrue2 = graph()->NewNode(
+            machine()->Store(StoreRepresentation(MachineRepresentation::kWord8,
+                                                 kNoWriteBarrier)),
+            vtrue2, jsgraph()->IntPtrConstant(SeqOneByteString::kHeaderSize -
+                                              kHeapObjectTag),
+            code, etrue2, if_true2);
+
+        // Remember it in the {cache}.
+        etrue2 = graph()->NewNode(
+            simplified()->StoreElement(AccessBuilder::ForFixedArrayElement()),
+            cache, index, vtrue2, etrue2, if_true2);
+      }
+
+      // Use the {entry} from the {cache}.
+      Node* if_false2 = graph()->NewNode(common()->IfFalse(), branch2);
+      Node* efalse2 = etrue0;
+      Node* vfalse2 = entry;
+
+      if_true1 = graph()->NewNode(common()->Merge(2), if_true2, if_false2);
+      etrue1 =
+          graph()->NewNode(common()->EffectPhi(2), etrue2, efalse2, if_true1);
+      vtrue1 =
+          graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
+                           vtrue2, vfalse2, if_true1);
+    }
+
+    Node* if_false1 = graph()->NewNode(common()->IfFalse(), branch1);
+    Node* efalse1 = effect;
+    Node* vfalse1;
+    {
+      // Allocate a new SeqTwoByteString for {code}.
+      vfalse1 = efalse1 = graph()->NewNode(
+          simplified()->Allocate(NOT_TENURED),
+          jsgraph()->Int32Constant(SeqTwoByteString::SizeFor(1)), efalse1,
+          if_false1);
+      efalse1 = graph()->NewNode(
+          simplified()->StoreField(AccessBuilder::ForMap()), vfalse1,
+          jsgraph()->HeapConstant(factory()->string_map()), efalse1, if_false1);
+      efalse1 = graph()->NewNode(
+          simplified()->StoreField(AccessBuilder::ForNameHashField()), vfalse1,
+          jsgraph()->IntPtrConstant(Name::kEmptyHashField), efalse1, if_false1);
+      efalse1 = graph()->NewNode(
+          simplified()->StoreField(AccessBuilder::ForStringLength()), vfalse1,
+          jsgraph()->SmiConstant(1), efalse1, if_false1);
+      efalse1 = graph()->NewNode(
+          machine()->Store(StoreRepresentation(MachineRepresentation::kWord16,
+                                               kNoWriteBarrier)),
+          vfalse1, jsgraph()->IntPtrConstant(SeqTwoByteString::kHeaderSize -
+                                             kHeapObjectTag),
+          code, efalse1, if_false1);
+    }
+
+    if_true0 = graph()->NewNode(common()->Merge(2), if_true1, if_false1);
+    etrue0 =
+        graph()->NewNode(common()->EffectPhi(2), etrue1, efalse1, if_true0);
+    vtrue0 = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
+                              vtrue1, vfalse1, if_true0);
+  }
+
+  // Generate surrogate pair string
+  Node* if_false0 = graph()->NewNode(common()->IfFalse(), branch0);
+  Node* efalse0 = effect;
+  Node* vfalse0;
+  {
+    switch (UnicodeEncodingOf(node->op())) {
+      case UnicodeEncoding::UTF16:
+        break;
+
+      case UnicodeEncoding::UTF32: {
+        // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
+        Node* lead_offset = jsgraph()->Int32Constant(0xD800 - (0x10000 >> 10));
+
+        // lead = (codepoint >> 10) + LEAD_OFFSET
+        Node* lead =
+            graph()->NewNode(machine()->Int32Add(),
+                             graph()->NewNode(machine()->Word32Shr(), code,
+                                              jsgraph()->Int32Constant(10)),
+                             lead_offset);
+
+        // trail = (codepoint & 0x3FF) + 0xDC00;
+        Node* trail =
+            graph()->NewNode(machine()->Int32Add(),
+                             graph()->NewNode(machine()->Word32And(), code,
+                                              jsgraph()->Int32Constant(0x3FF)),
+                             jsgraph()->Int32Constant(0xDC00));
+
+        // codpoint = (trail << 16) | lead;
+        code = graph()->NewNode(machine()->Word32Or(),
+                                graph()->NewNode(machine()->Word32Shl(), trail,
+                                                 jsgraph()->Int32Constant(16)),
+                                lead);
+        break;
+      }
+    }
+
+    // Allocate a new SeqTwoByteString for {code}.
+    vfalse0 = efalse0 =
+        graph()->NewNode(simplified()->Allocate(NOT_TENURED),
+                         jsgraph()->Int32Constant(SeqTwoByteString::SizeFor(2)),
+                         efalse0, if_false0);
+    efalse0 = graph()->NewNode(
+        simplified()->StoreField(AccessBuilder::ForMap()), vfalse0,
+        jsgraph()->HeapConstant(factory()->string_map()), efalse0, if_false0);
+    efalse0 = graph()->NewNode(
+        simplified()->StoreField(AccessBuilder::ForNameHashField()), vfalse0,
+        jsgraph()->IntPtrConstant(Name::kEmptyHashField), efalse0, if_false0);
+    efalse0 = graph()->NewNode(
+        simplified()->StoreField(AccessBuilder::ForStringLength()), vfalse0,
+        jsgraph()->SmiConstant(2), efalse0, if_false0);
+    efalse0 = graph()->NewNode(
+        machine()->Store(StoreRepresentation(MachineRepresentation::kWord32,
+                                             kNoWriteBarrier)),
+        vfalse0, jsgraph()->IntPtrConstant(SeqTwoByteString::kHeaderSize -
+                                           kHeapObjectTag),
+        code, efalse0, if_false0);
+  }
+
+  control = graph()->NewNode(common()->Merge(2), if_true0, if_false0);
+  effect = graph()->NewNode(common()->EffectPhi(2), etrue0, efalse0, control);
+  value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
+                           vtrue0, vfalse0, control);
+
+  return ValueEffectControl(value, effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerStringComparison(Callable const& callable,
+                                               Node* node, Node* effect,
+                                               Node* control) {
+  Operator::Properties properties = Operator::kEliminatable;
+  CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
+  CallDescriptor* desc = Linkage::GetStubCallDescriptor(
+      isolate(), graph()->zone(), callable.descriptor(), 0, flags, properties);
+  node->InsertInput(graph()->zone(), 0,
+                    jsgraph()->HeapConstant(callable.code()));
+  node->AppendInput(graph()->zone(), jsgraph()->NoContextConstant());
+  node->AppendInput(graph()->zone(), effect);
+  NodeProperties::ChangeOp(node, common()->Call(desc));
+  return ValueEffectControl(node, node, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerStringEqual(Node* node, Node* effect,
+                                          Node* control) {
+  return LowerStringComparison(CodeFactory::StringEqual(isolate()), node,
+                               effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerStringLessThan(Node* node, Node* effect,
+                                             Node* control) {
+  return LowerStringComparison(CodeFactory::StringLessThan(isolate()), node,
+                               effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
+EffectControlLinearizer::LowerStringLessThanOrEqual(Node* node, Node* effect,
+                                                    Node* control) {
+  return LowerStringComparison(CodeFactory::StringLessThanOrEqual(isolate()),
+                               node, effect, control);
+}
+
+EffectControlLinearizer::ValueEffectControl
 EffectControlLinearizer::LowerCheckFloat64Hole(Node* node, Node* frame_state,
                                                Node* effect, Node* control) {
   // If we reach this point w/o eliminating the {node} that's marked
diff --git a/src/compiler/effect-control-linearizer.h b/src/compiler/effect-control-linearizer.h
index 98f08c7..0199fd0 100644
--- a/src/compiler/effect-control-linearizer.h
+++ b/src/compiler/effect-control-linearizer.h
@@ -12,6 +12,8 @@
 namespace v8 {
 namespace internal {
 
+// Forward declarations.
+class Callable;
 class Zone;
 
 namespace compiler {
@@ -71,10 +73,8 @@
                                       Node* effect, Node* control);
   ValueEffectControl LowerCheckIf(Node* node, Node* frame_state, Node* effect,
                                   Node* control);
-  ValueEffectControl LowerCheckTaggedPointer(Node* node, Node* frame_state,
-                                             Node* effect, Node* control);
-  ValueEffectControl LowerCheckTaggedSigned(Node* node, Node* frame_state,
-                                            Node* effect, Node* control);
+  ValueEffectControl LowerCheckHeapObject(Node* node, Node* frame_state,
+                                          Node* effect, Node* control);
   ValueEffectControl LowerCheckedInt32Add(Node* node, Node* frame_state,
                                           Node* effect, Node* control);
   ValueEffectControl LowerCheckedInt32Sub(Node* node, Node* frame_state,
@@ -89,8 +89,16 @@
                                            Node* effect, Node* control);
   ValueEffectControl LowerCheckedInt32Mul(Node* node, Node* frame_state,
                                           Node* effect, Node* control);
+  ValueEffectControl LowerCheckedInt32ToTaggedSigned(Node* node,
+                                                     Node* frame_state,
+                                                     Node* effect,
+                                                     Node* control);
   ValueEffectControl LowerCheckedUint32ToInt32(Node* node, Node* frame_state,
                                                Node* effect, Node* control);
+  ValueEffectControl LowerCheckedUint32ToTaggedSigned(Node* node,
+                                                      Node* frame_state,
+                                                      Node* effect,
+                                                      Node* control);
   ValueEffectControl LowerCheckedFloat64ToInt32(Node* node, Node* frame_state,
                                                 Node* effect, Node* control);
   ValueEffectControl LowerCheckedTaggedSignedToInt32(Node* node,
@@ -101,8 +109,14 @@
                                                Node* effect, Node* control);
   ValueEffectControl LowerCheckedTaggedToFloat64(Node* node, Node* frame_state,
                                                  Node* effect, Node* control);
+  ValueEffectControl LowerCheckedTaggedToTaggedSigned(Node* node,
+                                                      Node* frame_state,
+                                                      Node* effect,
+                                                      Node* control);
   ValueEffectControl LowerChangeTaggedToFloat64(Node* node, Node* effect,
                                                 Node* control);
+  ValueEffectControl LowerTruncateTaggedToBit(Node* node, Node* effect,
+                                              Node* control);
   ValueEffectControl LowerTruncateTaggedToFloat64(Node* node, Node* effect,
                                                   Node* control);
   ValueEffectControl LowerTruncateTaggedToWord32(Node* node, Node* effect,
@@ -122,10 +136,19 @@
                                          Node* control);
   ValueEffectControl LowerObjectIsUndetectable(Node* node, Node* effect,
                                                Node* control);
+  ValueEffectControl LowerArrayBufferWasNeutered(Node* node, Node* effect,
+                                                 Node* control);
   ValueEffectControl LowerStringCharCodeAt(Node* node, Node* effect,
                                            Node* control);
   ValueEffectControl LowerStringFromCharCode(Node* node, Node* effect,
                                              Node* control);
+  ValueEffectControl LowerStringFromCodePoint(Node* node, Node* effect,
+                                              Node* control);
+  ValueEffectControl LowerStringEqual(Node* node, Node* effect, Node* control);
+  ValueEffectControl LowerStringLessThan(Node* node, Node* effect,
+                                         Node* control);
+  ValueEffectControl LowerStringLessThanOrEqual(Node* node, Node* effect,
+                                                Node* control);
   ValueEffectControl LowerCheckFloat64Hole(Node* node, Node* frame_state,
                                            Node* effect, Node* control);
   ValueEffectControl LowerCheckTaggedHole(Node* node, Node* frame_state,
@@ -165,6 +188,8 @@
   ValueEffectControl BuildCheckedHeapNumberOrOddballToFloat64(
       CheckTaggedInputMode mode, Node* value, Node* frame_state, Node* effect,
       Node* control);
+  ValueEffectControl LowerStringComparison(Callable const& callable, Node* node,
+                                           Node* effect, Node* control);
 
   Node* ChangeInt32ToSmi(Node* value);
   Node* ChangeUint32ToSmi(Node* value);
diff --git a/src/compiler/escape-analysis-reducer.cc b/src/compiler/escape-analysis-reducer.cc
index c69b86c..d997813 100644
--- a/src/compiler/escape-analysis-reducer.cc
+++ b/src/compiler/escape-analysis-reducer.cc
@@ -97,6 +97,22 @@
   return NoChange();
 }
 
+namespace {
+
+Node* MaybeGuard(JSGraph* jsgraph, Node* original, Node* replacement) {
+  // We might need to guard the replacement if the type of the {replacement}
+  // node is not in a sub-type relation to the type of the the {original} node.
+  Type* const replacement_type = NodeProperties::GetType(replacement);
+  Type* const original_type = NodeProperties::GetType(original);
+  if (!replacement_type->Is(original_type)) {
+    Node* const control = NodeProperties::GetControlInput(original);
+    replacement = jsgraph->graph()->NewNode(
+        jsgraph->common()->TypeGuard(original_type), replacement, control);
+  }
+  return replacement;
+}
+
+}  // namespace
 
 Reduction EscapeAnalysisReducer::ReduceLoad(Node* node) {
   DCHECK(node->opcode() == IrOpcode::kLoadField ||
@@ -104,12 +120,15 @@
   if (node->id() < static_cast<NodeId>(fully_reduced_.length())) {
     fully_reduced_.Add(node->id());
   }
-  if (Node* rep = escape_analysis()->GetReplacement(node)) {
-    isolate()->counters()->turbo_escape_loads_replaced()->Increment();
-    TRACE("Replaced #%d (%s) with #%d (%s)\n", node->id(),
-          node->op()->mnemonic(), rep->id(), rep->op()->mnemonic());
-    ReplaceWithValue(node, rep);
-    return Replace(rep);
+  if (escape_analysis()->IsVirtual(NodeProperties::GetValueInput(node, 0))) {
+    if (Node* rep = escape_analysis()->GetReplacement(node)) {
+      isolate()->counters()->turbo_escape_loads_replaced()->Increment();
+      TRACE("Replaced #%d (%s) with #%d (%s)\n", node->id(),
+            node->op()->mnemonic(), rep->id(), rep->op()->mnemonic());
+      rep = MaybeGuard(jsgraph(), node, rep);
+      ReplaceWithValue(node, rep);
+      return Replace(rep);
+    }
   }
   return NoChange();
 }
@@ -305,6 +324,11 @@
   if (input->opcode() == IrOpcode::kFinishRegion ||
       input->opcode() == IrOpcode::kAllocate) {
     if (escape_analysis()->IsVirtual(input)) {
+      if (escape_analysis()->IsCyclicObjectState(effect, input)) {
+        // TODO(mstarzinger): Represent cyclic object states differently to
+        // ensure the scheduler can properly handle such object states.
+        FATAL("Cyclic object state detected by escape analysis.");
+      }
       if (Node* object_state =
               escape_analysis()->GetOrCreateObjectState(effect, input)) {
         if (node_multiused || (multiple_users && !already_cloned)) {
diff --git a/src/compiler/escape-analysis.cc b/src/compiler/escape-analysis.cc
index 437c01f..3f889cc 100644
--- a/src/compiler/escape-analysis.cc
+++ b/src/compiler/escape-analysis.cc
@@ -12,13 +12,13 @@
 #include "src/compiler/common-operator.h"
 #include "src/compiler/graph-reducer.h"
 #include "src/compiler/js-operator.h"
-#include "src/compiler/node.h"
 #include "src/compiler/node-matchers.h"
 #include "src/compiler/node-properties.h"
+#include "src/compiler/node.h"
 #include "src/compiler/operator-properties.h"
 #include "src/compiler/simplified-operator.h"
+#include "src/compiler/type-cache.h"
 #include "src/objects-inl.h"
-#include "src/type-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -795,8 +795,16 @@
       case IrOpcode::kSelect:
       // TODO(mstarzinger): The following list of operators will eventually be
       // handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
+      case IrOpcode::kStringEqual:
+      case IrOpcode::kStringLessThan:
+      case IrOpcode::kStringLessThanOrEqual:
+      case IrOpcode::kPlainPrimitiveToNumber:
+      case IrOpcode::kPlainPrimitiveToWord32:
+      case IrOpcode::kPlainPrimitiveToFloat64:
+      case IrOpcode::kStringCharCodeAt:
       case IrOpcode::kObjectIsCallable:
       case IrOpcode::kObjectIsNumber:
+      case IrOpcode::kObjectIsReceiver:
       case IrOpcode::kObjectIsString:
       case IrOpcode::kObjectIsUndetectable:
         if (SetEscaped(rep)) {
@@ -853,6 +861,7 @@
       status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)),
       virtual_states_(zone),
       replacements_(zone),
+      cycle_detection_(zone),
       cache_(nullptr) {}
 
 EscapeAnalysis::~EscapeAnalysis() {}
@@ -1456,13 +1465,13 @@
     int offset = OffsetForFieldAccess(node);
     if (static_cast<size_t>(offset) >= object->field_count()) return;
     Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
-    // TODO(mstarzinger): The following is a workaround to not track the code
-    // entry field in virtual JSFunction objects. We only ever store the inner
-    // pointer into the compile lazy stub in this field and the deoptimizer has
-    // this assumption hard-coded in {TranslatedState::MaterializeAt} as well.
+    // TODO(mstarzinger): The following is a workaround to not track some well
+    // known raw fields. We only ever store default initial values into these
+    // fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
     if (val->opcode() == IrOpcode::kInt32Constant ||
         val->opcode() == IrOpcode::kInt64Constant) {
-      DCHECK_EQ(JSFunction::kCodeEntryOffset, FieldAccessOf(node->op()).offset);
+      DCHECK(FieldAccessOf(node->op()).offset == JSFunction::kCodeEntryOffset ||
+             FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
       val = slot_not_analyzed_;
     }
     if (object->GetField(offset) != val) {
@@ -1557,6 +1566,27 @@
   return nullptr;
 }
 
+bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) {
+  if ((node->opcode() == IrOpcode::kFinishRegion ||
+       node->opcode() == IrOpcode::kAllocate) &&
+      IsVirtual(node)) {
+    if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
+                                               ResolveReplacement(node))) {
+      if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true;
+      cycle_detection_.insert(vobj);
+      bool cycle_detected = false;
+      for (size_t i = 0; i < vobj->field_count(); ++i) {
+        if (Node* field = vobj->GetField(i)) {
+          if (IsCyclicObjectState(effect, field)) cycle_detected = true;
+        }
+      }
+      cycle_detection_.erase(vobj);
+      return cycle_detected;
+    }
+  }
+  return false;
+}
+
 void EscapeAnalysis::DebugPrintState(VirtualState* state) {
   PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
   for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
diff --git a/src/compiler/escape-analysis.h b/src/compiler/escape-analysis.h
index 839e54c..ec5154e 100644
--- a/src/compiler/escape-analysis.h
+++ b/src/compiler/escape-analysis.h
@@ -32,6 +32,7 @@
   bool IsEscaped(Node* node);
   bool CompareVirtualObjects(Node* left, Node* right);
   Node* GetOrCreateObjectState(Node* effect, Node* node);
+  bool IsCyclicObjectState(Node* effect, Node* node);
   bool ExistsVirtualAllocate();
 
  private:
@@ -75,6 +76,7 @@
   EscapeStatusAnalysis* status_analysis_;
   ZoneVector<VirtualState*> virtual_states_;
   ZoneVector<Node*> replacements_;
+  ZoneSet<VirtualObject*> cycle_detection_;
   MergeCache* cache_;
 
   DISALLOW_COPY_AND_ASSIGN(EscapeAnalysis);
diff --git a/src/compiler/graph-reducer.h b/src/compiler/graph-reducer.h
index 2ac60a6..a089c12 100644
--- a/src/compiler/graph-reducer.h
+++ b/src/compiler/graph-reducer.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_GRAPH_REDUCER_H_
 
 #include "src/compiler/node-marker.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/graph-visualizer.cc b/src/compiler/graph-visualizer.cc
index 9fd80ea..d810c37 100644
--- a/src/compiler/graph-visualizer.cc
+++ b/src/compiler/graph-visualizer.cc
@@ -9,7 +9,7 @@
 #include <string>
 
 #include "src/code-stubs.h"
-#include "src/compiler.h"
+#include "src/compilation-info.h"
 #include "src/compiler/all-nodes.h"
 #include "src/compiler/graph.h"
 #include "src/compiler/node-properties.h"
@@ -239,7 +239,7 @@
 
 
 std::ostream& operator<<(std::ostream& os, const AsJSON& ad) {
-  base::AccountingAllocator allocator;
+  AccountingAllocator allocator;
   Zone tmp_zone(&allocator);
   os << "{\n\"nodes\":[";
   JSONGraphNodeWriter(os, &tmp_zone, &ad.graph, ad.positions).Print();
@@ -629,7 +629,7 @@
 
 
 std::ostream& operator<<(std::ostream& os, const AsC1VCompilation& ac) {
-  base::AccountingAllocator allocator;
+  AccountingAllocator allocator;
   Zone tmp_zone(&allocator);
   GraphC1Visualizer(os, &tmp_zone).PrintCompilation(ac.info_);
   return os;
@@ -637,7 +637,7 @@
 
 
 std::ostream& operator<<(std::ostream& os, const AsC1V& ac) {
-  base::AccountingAllocator allocator;
+  AccountingAllocator allocator;
   Zone tmp_zone(&allocator);
   GraphC1Visualizer(os, &tmp_zone)
       .PrintSchedule(ac.phase_, ac.schedule_, ac.positions_, ac.instructions_);
@@ -647,7 +647,7 @@
 
 std::ostream& operator<<(std::ostream& os,
                          const AsC1VRegisterAllocationData& ac) {
-  base::AccountingAllocator allocator;
+  AccountingAllocator allocator;
   Zone tmp_zone(&allocator);
   GraphC1Visualizer(os, &tmp_zone).PrintLiveRanges(ac.phase_, ac.data_);
   return os;
@@ -658,7 +658,7 @@
 const int kVisited = 2;
 
 std::ostream& operator<<(std::ostream& os, const AsRPO& ar) {
-  base::AccountingAllocator allocator;
+  AccountingAllocator allocator;
   Zone local_zone(&allocator);
 
   // Do a post-order depth-first search on the RPO graph. For every node,
diff --git a/src/compiler/graph.h b/src/compiler/graph.h
index a694a0b..1d9e85e 100644
--- a/src/compiler/graph.h
+++ b/src/compiler/graph.h
@@ -5,8 +5,8 @@
 #ifndef V8_COMPILER_GRAPH_H_
 #define V8_COMPILER_GRAPH_H_
 
-#include "src/zone.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/ia32/code-generator-ia32.cc b/src/compiler/ia32/code-generator-ia32.cc
index ad1a992..428570a 100644
--- a/src/compiler/ia32/code-generator-ia32.cc
+++ b/src/compiler/ia32/code-generator-ia32.cc
@@ -4,7 +4,7 @@
 
 #include "src/compiler/code-generator.h"
 
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -637,9 +637,6 @@
     case kArchDebugBreak:
       __ int3();
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchNop:
     case kArchThrowTerminator:
       // don't emit code for nops.
@@ -649,8 +646,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -1786,13 +1783,14 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
diff --git a/src/compiler/ia32/instruction-scheduler-ia32.cc b/src/compiler/ia32/instruction-scheduler-ia32.cc
index 1c62de5..ad7535c 100644
--- a/src/compiler/ia32/instruction-scheduler-ia32.cc
+++ b/src/compiler/ia32/instruction-scheduler-ia32.cc
@@ -28,8 +28,6 @@
     case kIA32Imul:
     case kIA32ImulHigh:
     case kIA32UmulHigh:
-    case kIA32Idiv:
-    case kIA32Udiv:
     case kIA32Not:
     case kIA32Neg:
     case kIA32Shl:
@@ -103,6 +101,12 @@
           ? kNoOpcodeFlags
           : kIsLoadOperation | kHasSideEffect;
 
+    case kIA32Idiv:
+    case kIA32Udiv:
+      return (instr->addressing_mode() == kMode_None)
+                 ? kMayNeedDeoptCheck
+                 : kMayNeedDeoptCheck | kIsLoadOperation | kHasSideEffect;
+
     case kIA32Movsxbl:
     case kIA32Movzxbl:
     case kIA32Movb:
diff --git a/src/compiler/ia32/instruction-selector-ia32.cc b/src/compiler/ia32/instruction-selector-ia32.cc
index 4a1e19b..7e98023 100644
--- a/src/compiler/ia32/instruction-selector-ia32.cc
+++ b/src/compiler/ia32/instruction-selector-ia32.cc
@@ -250,6 +250,10 @@
   Emit(code, 1, outputs, input_count, inputs);
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   IA32OperandGenerator g(this);
@@ -262,7 +266,7 @@
   MachineRepresentation rep = store_rep.representation();
 
   if (write_barrier_kind != kNoWriteBarrier) {
-    DCHECK_EQ(MachineRepresentation::kTagged, rep);
+    DCHECK(CanBeTaggedPointer(rep));
     AddressingMode addressing_mode;
     InstructionOperand inputs[3];
     size_t input_count = 0;
diff --git a/src/compiler/instruction-codes.h b/src/compiler/instruction-codes.h
index c6689d8..22279fe 100644
--- a/src/compiler/instruction-codes.h
+++ b/src/compiler/instruction-codes.h
@@ -57,7 +57,6 @@
   V(ArchTableSwitch)                      \
   V(ArchNop)                              \
   V(ArchDebugBreak)                       \
-  V(ArchImpossible)                       \
   V(ArchComment)                          \
   V(ArchThrowTerminator)                  \
   V(ArchDeoptimize)                       \
diff --git a/src/compiler/instruction-scheduler.cc b/src/compiler/instruction-scheduler.cc
index 2e10794..c7fd1cc 100644
--- a/src/compiler/instruction-scheduler.cc
+++ b/src/compiler/instruction-scheduler.cc
@@ -11,11 +11,16 @@
 namespace internal {
 namespace compiler {
 
-// Compare the two nodes and return true if node1 is a better candidate than
-// node2 (i.e. node1 should be scheduled before node2).
-bool InstructionScheduler::CriticalPathFirstQueue::CompareNodes(
-    ScheduleGraphNode *node1, ScheduleGraphNode *node2) const {
-  return node1->total_latency() > node2->total_latency();
+void InstructionScheduler::SchedulingQueueBase::AddNode(
+    ScheduleGraphNode* node) {
+  // We keep the ready list sorted by total latency so that we can quickly find
+  // the next best candidate to schedule.
+  auto it = nodes_.begin();
+  while ((it != nodes_.end()) &&
+         ((*it)->total_latency() >= node->total_latency())) {
+    ++it;
+  }
+  nodes_.insert(it, node);
 }
 
 
@@ -24,12 +29,10 @@
   DCHECK(!IsEmpty());
   auto candidate = nodes_.end();
   for (auto iterator = nodes_.begin(); iterator != nodes_.end(); ++iterator) {
-    // We only consider instructions that have all their operands ready and
-    // we try to schedule the critical path first.
+    // We only consider instructions that have all their operands ready.
     if (cycle >= (*iterator)->start_cycle()) {
-      if ((candidate == nodes_.end()) || CompareNodes(*iterator, *candidate)) {
-        candidate = iterator;
-      }
+      candidate = iterator;
+      break;
     }
   }
 
@@ -133,9 +136,9 @@
       last_live_in_reg_marker_->AddSuccessor(new_node);
     }
 
-    // Make sure that new instructions are not scheduled before the last
-    // deoptimization point.
-    if (last_deopt_ != nullptr) {
+    // Make sure that instructions are not scheduled before the last
+    // deoptimization point when they depend on it.
+    if ((last_deopt_ != nullptr) && DependsOnDeoptimization(instr)) {
       last_deopt_->AddSuccessor(new_node);
     }
 
@@ -242,7 +245,6 @@
     case kArchTruncateDoubleToI:
     case kArchStackSlot:
     case kArchDebugBreak:
-    case kArchImpossible:
     case kArchComment:
     case kIeee754Float64Acos:
     case kIeee754Float64Acosh:
diff --git a/src/compiler/instruction-scheduler.h b/src/compiler/instruction-scheduler.h
index 271aa0d..7660520 100644
--- a/src/compiler/instruction-scheduler.h
+++ b/src/compiler/instruction-scheduler.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_INSTRUCTION_SCHEDULER_H_
 
 #include "src/compiler/instruction.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -21,9 +21,12 @@
   kHasSideEffect = 2,      // The instruction has some side effects (memory
                            // store, function call...)
   kIsLoadOperation = 4,    // The instruction is a memory load.
+  kMayNeedDeoptCheck = 8,  // The instruction might be associated with a deopt
+                           // check. This is the case of instruction which can
+                           // blow up with particular inputs (e.g.: division by
+                           // zero on Intel platforms).
 };
 
-
 class InstructionScheduler final : public ZoneObject {
  public:
   InstructionScheduler(Zone* zone, InstructionSequence* sequence);
@@ -101,9 +104,7 @@
         nodes_(scheduler->zone()) {
     }
 
-    void AddNode(ScheduleGraphNode* node) {
-      nodes_.push_back(node);
-    }
+    void AddNode(ScheduleGraphNode* node);
 
     bool IsEmpty() const {
       return nodes_.empty();
@@ -125,11 +126,6 @@
     // Look for the best candidate to schedule, remove it from the queue and
     // return it.
     ScheduleGraphNode* PopBestCandidate(int cycle);
-
-   private:
-    // Compare the two nodes and return true if node1 is a better candidate than
-    // node2 (i.e. node1 should be scheduled before node2).
-    bool CompareNodes(ScheduleGraphNode *node1, ScheduleGraphNode *node2) const;
   };
 
   // A queue which pop a random node from the queue to perform stress tests on
@@ -162,12 +158,25 @@
   // Check whether the given instruction has side effects (e.g. function call,
   // memory store).
   bool HasSideEffect(const Instruction* instr) const {
-    return GetInstructionFlags(instr) & kHasSideEffect;
+    return (GetInstructionFlags(instr) & kHasSideEffect) != 0;
   }
 
   // Return true if the instruction is a memory load.
   bool IsLoadOperation(const Instruction* instr) const {
-    return GetInstructionFlags(instr) & kIsLoadOperation;
+    return (GetInstructionFlags(instr) & kIsLoadOperation) != 0;
+  }
+
+  // Return true if this instruction is usually associated with a deopt check
+  // to validate its input.
+  bool MayNeedDeoptCheck(const Instruction* instr) const {
+    return (GetInstructionFlags(instr) & kMayNeedDeoptCheck) != 0;
+  }
+
+  // Return true if the instruction cannot be moved before the last deopt
+  // point we encountered.
+  bool DependsOnDeoptimization(const Instruction* instr) const {
+    return MayNeedDeoptCheck(instr) || instr->IsDeoptimizeCall() ||
+           HasSideEffect(instr) || IsLoadOperation(instr);
   }
 
   // Identify nops used as a definition point for live-in registers at
diff --git a/src/compiler/instruction-selector-impl.h b/src/compiler/instruction-selector-impl.h
index 25d8a99..673d1b0 100644
--- a/src/compiler/instruction-selector-impl.h
+++ b/src/compiler/instruction-selector-impl.h
@@ -90,6 +90,12 @@
                                         GetVReg(node)));
   }
 
+  InstructionOperand UseAnyAtEnd(Node* node) {
+    return Use(node, UnallocatedOperand(UnallocatedOperand::ANY,
+                                        UnallocatedOperand::USED_AT_END,
+                                        GetVReg(node)));
+  }
+
   InstructionOperand UseAny(Node* node) {
     return Use(node, UnallocatedOperand(UnallocatedOperand::ANY,
                                         UnallocatedOperand::USED_AT_START,
diff --git a/src/compiler/instruction-selector.cc b/src/compiler/instruction-selector.cc
index ac8e64a..b150725 100644
--- a/src/compiler/instruction-selector.cc
+++ b/src/compiler/instruction-selector.cc
@@ -22,7 +22,9 @@
     Zone* zone, size_t node_count, Linkage* linkage,
     InstructionSequence* sequence, Schedule* schedule,
     SourcePositionTable* source_positions, Frame* frame,
-    SourcePositionMode source_position_mode, Features features)
+    SourcePositionMode source_position_mode, Features features,
+    EnableScheduling enable_scheduling,
+    EnableSerialization enable_serialization)
     : zone_(zone),
       linkage_(linkage),
       sequence_(sequence),
@@ -37,13 +39,16 @@
       effect_level_(node_count, 0, zone),
       virtual_registers_(node_count,
                          InstructionOperand::kInvalidVirtualRegister, zone),
+      virtual_register_rename_(zone),
       scheduler_(nullptr),
-      frame_(frame) {
+      enable_scheduling_(enable_scheduling),
+      enable_serialization_(enable_serialization),
+      frame_(frame),
+      instruction_selection_failed_(false) {
   instructions_.reserve(node_count);
 }
 
-
-void InstructionSelector::SelectInstructions() {
+bool InstructionSelector::SelectInstructions() {
   // Mark the inputs of all phis in loop headers as used.
   BasicBlockVector* blocks = schedule()->rpo_order();
   for (auto const block : *blocks) {
@@ -62,22 +67,26 @@
   // Visit each basic block in post order.
   for (auto i = blocks->rbegin(); i != blocks->rend(); ++i) {
     VisitBlock(*i);
+    if (instruction_selection_failed()) return false;
   }
 
   // Schedule the selected instructions.
-  if (FLAG_turbo_instruction_scheduling &&
-      InstructionScheduler::SchedulerSupported()) {
+  if (UseInstructionScheduling()) {
     scheduler_ = new (zone()) InstructionScheduler(zone(), sequence());
   }
 
   for (auto const block : *blocks) {
     InstructionBlock* instruction_block =
         sequence()->InstructionBlockAt(RpoNumber::FromInt(block->rpo_number()));
+    for (size_t i = 0; i < instruction_block->phis().size(); i++) {
+      UpdateRenamesInPhi(instruction_block->PhiAt(i));
+    }
     size_t end = instruction_block->code_end();
     size_t start = instruction_block->code_start();
     DCHECK_LE(end, start);
     StartBlock(RpoNumber::FromInt(block->rpo_number()));
     while (start-- > end) {
+      UpdateRenames(instructions_[start]);
       AddInstruction(instructions_[start]);
     }
     EndBlock(RpoNumber::FromInt(block->rpo_number()));
@@ -85,11 +94,11 @@
 #if DEBUG
   sequence()->ValidateSSA();
 #endif
+  return true;
 }
 
 void InstructionSelector::StartBlock(RpoNumber rpo) {
-  if (FLAG_turbo_instruction_scheduling &&
-      InstructionScheduler::SchedulerSupported()) {
+  if (UseInstructionScheduling()) {
     DCHECK_NOT_NULL(scheduler_);
     scheduler_->StartBlock(rpo);
   } else {
@@ -99,8 +108,7 @@
 
 
 void InstructionSelector::EndBlock(RpoNumber rpo) {
-  if (FLAG_turbo_instruction_scheduling &&
-      InstructionScheduler::SchedulerSupported()) {
+  if (UseInstructionScheduling()) {
     DCHECK_NOT_NULL(scheduler_);
     scheduler_->EndBlock(rpo);
   } else {
@@ -110,8 +118,7 @@
 
 
 void InstructionSelector::AddInstruction(Instruction* instr) {
-  if (FLAG_turbo_instruction_scheduling &&
-      InstructionScheduler::SchedulerSupported()) {
+  if (UseInstructionScheduling()) {
     DCHECK_NOT_NULL(scheduler_);
     scheduler_->AddInstruction(instr);
   } else {
@@ -206,6 +213,13 @@
     InstructionCode opcode, size_t output_count, InstructionOperand* outputs,
     size_t input_count, InstructionOperand* inputs, size_t temp_count,
     InstructionOperand* temps) {
+  if (output_count >= Instruction::kMaxOutputCount ||
+      input_count >= Instruction::kMaxInputCount ||
+      temp_count >= Instruction::kMaxTempCount) {
+    set_instruction_selection_failed();
+    return nullptr;
+  }
+
   Instruction* instr =
       Instruction::New(instruction_zone(), opcode, output_count, outputs,
                        input_count, inputs, temp_count, temps);
@@ -255,6 +269,53 @@
   return true;
 }
 
+void InstructionSelector::UpdateRenames(Instruction* instruction) {
+  for (size_t i = 0; i < instruction->InputCount(); i++) {
+    TryRename(instruction->InputAt(i));
+  }
+}
+
+void InstructionSelector::UpdateRenamesInPhi(PhiInstruction* phi) {
+  for (size_t i = 0; i < phi->operands().size(); i++) {
+    int vreg = phi->operands()[i];
+    int renamed = GetRename(vreg);
+    if (vreg != renamed) {
+      phi->RenameInput(i, renamed);
+    }
+  }
+}
+
+int InstructionSelector::GetRename(int virtual_register) {
+  int rename = virtual_register;
+  while (true) {
+    if (static_cast<size_t>(rename) >= virtual_register_rename_.size()) break;
+    int next = virtual_register_rename_[rename];
+    if (next == InstructionOperand::kInvalidVirtualRegister) {
+      break;
+    }
+    rename = next;
+  }
+  return rename;
+}
+
+void InstructionSelector::TryRename(InstructionOperand* op) {
+  if (!op->IsUnallocated()) return;
+  int vreg = UnallocatedOperand::cast(op)->virtual_register();
+  int rename = GetRename(vreg);
+  if (rename != vreg) {
+    UnallocatedOperand::cast(op)->set_virtual_register(rename);
+  }
+}
+
+void InstructionSelector::SetRename(const Node* node, const Node* rename) {
+  int vreg = GetVirtualRegister(node);
+  if (static_cast<size_t>(vreg) >= virtual_register_rename_.size()) {
+    int invalid = InstructionOperand::kInvalidVirtualRegister;
+    virtual_register_rename_.resize(vreg + 1, invalid);
+  }
+  virtual_register_rename_[vreg] = GetVirtualRegister(rename);
+}
+
 int InstructionSelector::GetVirtualRegister(const Node* node) {
   DCHECK_NOT_NULL(node);
   size_t const id = node->id();
@@ -330,6 +391,12 @@
   effect_level_[id] = effect_level;
 }
 
+bool InstructionSelector::CanAddressRelativeToRootsRegister() const {
+  return (enable_serialization_ == kDisableSerialization &&
+          (linkage()->GetIncomingDescriptor()->flags() &
+           CallDescriptor::kCanUseRoots));
+}
+
 void InstructionSelector::MarkAsRepresentation(MachineRepresentation rep,
                                                const InstructionOperand& op) {
   UnallocatedOperand unalloc = UnallocatedOperand::cast(op);
@@ -350,6 +417,10 @@
 InstructionOperand OperandForDeopt(OperandGenerator* g, Node* input,
                                    FrameStateInputKind kind,
                                    MachineRepresentation rep) {
+  if (rep == MachineRepresentation::kNone) {
+    return g->TempImmediate(FrameStateDescriptor::kImpossibleValue);
+  }
+
   switch (input->opcode()) {
     case IrOpcode::kInt32Constant:
     case IrOpcode::kInt64Constant:
@@ -362,15 +433,13 @@
       UNREACHABLE();
       break;
     default:
-      if (rep == MachineRepresentation::kNone) {
-        return g->TempImmediate(FrameStateDescriptor::kImpossibleValue);
-      } else {
-        switch (kind) {
-          case FrameStateInputKind::kStackSlot:
-            return g->UseUniqueSlot(input);
-          case FrameStateInputKind::kAny:
-            return g->UseAny(input);
-        }
+      switch (kind) {
+        case FrameStateInputKind::kStackSlot:
+          return g->UseUniqueSlot(input);
+        case FrameStateInputKind::kAny:
+          // Currently deopts "wrap" other operations, so the deopt's inputs
+          // are potentially needed untill the end of the deoptimising code.
+          return g->UseAnyAtEnd(input);
       }
   }
   UNREACHABLE();
@@ -716,7 +785,6 @@
   }
 }
 
-
 void InstructionSelector::VisitBlock(BasicBlock* block) {
   DCHECK(!current_block_);
   current_block_ = block;
@@ -753,6 +821,7 @@
     // up".
     size_t current_node_end = instructions_.size();
     VisitNode(node);
+    if (instruction_selection_failed()) return;
     std::reverse(instructions_.begin() + current_node_end, instructions_.end());
     if (instructions_.size() == current_node_end) continue;
     // Mark source position on first instruction emitted.
@@ -1053,8 +1122,14 @@
       return VisitUint64LessThanOrEqual(node);
     case IrOpcode::kUint64Mod:
       return MarkAsWord64(node), VisitUint64Mod(node);
+    case IrOpcode::kBitcastTaggedToWord:
+      return MarkAsRepresentation(MachineType::PointerRepresentation(), node),
+             VisitBitcastTaggedToWord(node);
     case IrOpcode::kBitcastWordToTagged:
       return MarkAsReference(node), VisitBitcastWordToTagged(node);
+    case IrOpcode::kBitcastWordToTaggedSigned:
+      return MarkAsRepresentation(MachineRepresentation::kTaggedSigned, node),
+             EmitIdentity(node);
     case IrOpcode::kChangeFloat32ToFloat64:
       return MarkAsFloat64(node), VisitChangeFloat32ToFloat64(node);
     case IrOpcode::kChangeInt32ToFloat64:
@@ -1065,19 +1140,6 @@
       return MarkAsWord32(node), VisitChangeFloat64ToInt32(node);
     case IrOpcode::kChangeFloat64ToUint32:
       return MarkAsWord32(node), VisitChangeFloat64ToUint32(node);
-    case IrOpcode::kImpossibleToWord32:
-      return MarkAsWord32(node), VisitImpossibleToWord32(node);
-    case IrOpcode::kImpossibleToWord64:
-      return MarkAsWord64(node), VisitImpossibleToWord64(node);
-    case IrOpcode::kImpossibleToFloat32:
-      return MarkAsFloat32(node), VisitImpossibleToFloat32(node);
-    case IrOpcode::kImpossibleToFloat64:
-      return MarkAsFloat64(node), VisitImpossibleToFloat64(node);
-    case IrOpcode::kImpossibleToTagged:
-      MarkAsRepresentation(MachineType::PointerRepresentation(), node);
-      return VisitImpossibleToTagged(node);
-    case IrOpcode::kImpossibleToBit:
-      return MarkAsWord32(node), VisitImpossibleToBit(node);
     case IrOpcode::kFloat64SilenceNaN:
       MarkAsFloat64(node);
       if (CanProduceSignalingNaN(node->InputAt(0))) {
@@ -1304,9 +1366,15 @@
     }
     case IrOpcode::kAtomicStore:
       return VisitAtomicStore(node);
+    case IrOpcode::kProtectedLoad:
+      return VisitProtectedLoad(node);
     case IrOpcode::kUnsafePointerAdd:
       MarkAsRepresentation(MachineType::PointerRepresentation(), node);
       return VisitUnsafePointerAdd(node);
+    case IrOpcode::kCreateInt32x4:
+      return MarkAsSimd128(node), VisitCreateInt32x4(node);
+    case IrOpcode::kInt32x4ExtractLane:
+      return MarkAsWord32(node), VisitInt32x4ExtractLane(node);
     default:
       V8_Fatal(__FILE__, __LINE__, "Unexpected operator #%d:%s @ node #%d",
                node->opcode(), node->op()->mnemonic(), node->id());
@@ -1314,42 +1382,6 @@
   }
 }
 
-void InstructionSelector::VisitImpossibleToWord32(Node* node) {
-  OperandGenerator g(this);
-  Emit(kArchImpossible, g.DefineAsConstant(node, Constant(0)));
-}
-
-void InstructionSelector::VisitImpossibleToWord64(Node* node) {
-  OperandGenerator g(this);
-  Emit(kArchImpossible,
-       g.DefineAsConstant(node, Constant(static_cast<int64_t>(0))));
-}
-
-void InstructionSelector::VisitImpossibleToFloat32(Node* node) {
-  OperandGenerator g(this);
-  Emit(kArchImpossible, g.DefineAsConstant(node, Constant(0.0f)));
-}
-
-void InstructionSelector::VisitImpossibleToFloat64(Node* node) {
-  OperandGenerator g(this);
-  Emit(kArchImpossible, g.DefineAsConstant(node, Constant(0.0)));
-}
-
-void InstructionSelector::VisitImpossibleToBit(Node* node) {
-  OperandGenerator g(this);
-  Emit(kArchImpossible, g.DefineAsConstant(node, Constant(0)));
-}
-
-void InstructionSelector::VisitImpossibleToTagged(Node* node) {
-  OperandGenerator g(this);
-#if V8_TARGET_ARCH_64_BIT
-  Emit(kArchImpossible,
-       g.DefineAsConstant(node, Constant(static_cast<int64_t>(0))));
-#else   // V8_TARGET_ARCH_64_BIT
-  Emit(kArchImpossible, g.DefineAsConstant(node, Constant(0)));
-#endif  // V8_TARGET_ARCH_64_BIT
-}
-
 void InstructionSelector::VisitLoadStackPointer(Node* node) {
   OperandGenerator g(this);
   Emit(kArchStackPointer, g.DefineAsRegister(node));
@@ -1493,8 +1525,14 @@
        sequence()->AddImmediate(Constant(slot)), 0, nullptr);
 }
 
+void InstructionSelector::VisitBitcastTaggedToWord(Node* node) {
+  OperandGenerator g(this);
+  Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(node->InputAt(0)));
+}
+
 void InstructionSelector::VisitBitcastWordToTagged(Node* node) {
-  EmitIdentity(node);
+  OperandGenerator g(this);
+  Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(node->InputAt(0)));
 }
 
 // 32 bit targets do not implement the following instructions.
@@ -1647,7 +1685,6 @@
 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
   UNIMPLEMENTED();
 }
-
 #endif  // V8_TARGET_ARCH_32_BIT
 
 // 64 bit targets do not implement the following instructions.
@@ -1665,6 +1702,14 @@
 void InstructionSelector::VisitWord32PairSar(Node* node) { UNIMPLEMENTED(); }
 #endif  // V8_TARGET_ARCH_64_BIT
 
+#if !V8_TARGET_ARCH_X64
+void InstructionSelector::VisitCreateInt32x4(Node* node) { UNIMPLEMENTED(); }
+
+void InstructionSelector::VisitInt32x4ExtractLane(Node* node) {
+  UNIMPLEMENTED();
+}
+#endif  // !V8_TARGET_ARCH_X64
+
 void InstructionSelector::VisitFinishRegion(Node* node) { EmitIdentity(node); }
 
 void InstructionSelector::VisitParameter(Node* node) {
@@ -1680,13 +1725,17 @@
   Emit(kArchNop, op);
 }
 
+namespace {
+LinkageLocation ExceptionLocation() {
+  return LinkageLocation::ForRegister(kReturnRegister0.code(),
+                                      MachineType::IntPtr());
+}
+}
 
 void InstructionSelector::VisitIfException(Node* node) {
   OperandGenerator g(this);
-  Node* call = node->InputAt(1);
-  DCHECK_EQ(IrOpcode::kCall, call->opcode());
-  const CallDescriptor* descriptor = CallDescriptorOf(call->op());
-  Emit(kArchNop, g.DefineAsLocation(node, descriptor->GetReturnLocation(0)));
+  DCHECK_EQ(IrOpcode::kCall, node->InputAt(1)->opcode());
+  Emit(kArchNop, g.DefineAsLocation(node, ExceptionLocation()));
 }
 
 
@@ -1812,9 +1861,11 @@
   // Emit the call instruction.
   size_t const output_count = buffer.outputs.size();
   auto* outputs = output_count ? &buffer.outputs.front() : nullptr;
-  Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
-       &buffer.instruction_args.front())
-      ->MarkAsCall();
+  Instruction* call_instr =
+      Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+           &buffer.instruction_args.front());
+  if (instruction_selection_failed()) return;
+  call_instr->MarkAsCall();
 }
 
 
@@ -1920,9 +1971,11 @@
     // Emit the call instruction.
     size_t output_count = buffer.outputs.size();
     auto* outputs = &buffer.outputs.front();
-    Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
-         &buffer.instruction_args.front())
-        ->MarkAsCall();
+    Instruction* call_instr =
+        Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
+             &buffer.instruction_args.front());
+    if (instruction_selection_failed()) return;
+    call_instr->MarkAsCall();
     Emit(kArchRet, 0, nullptr, output_count, outputs);
   }
 }
@@ -1984,8 +2037,8 @@
 
 void InstructionSelector::EmitIdentity(Node* node) {
   OperandGenerator g(this);
-  Node* value = node->InputAt(0);
-  Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
+  MarkAsUsed(node->InputAt(0));
+  SetRename(node, node->InputAt(0));
 }
 
 void InstructionSelector::VisitDeoptimize(DeoptimizeKind kind,
diff --git a/src/compiler/instruction-selector.h b/src/compiler/instruction-selector.h
index f9f43e9..2981f90 100644
--- a/src/compiler/instruction-selector.h
+++ b/src/compiler/instruction-selector.h
@@ -8,11 +8,11 @@
 #include <map>
 
 #include "src/compiler/common-operator.h"
-#include "src/compiler/instruction.h"
 #include "src/compiler/instruction-scheduler.h"
+#include "src/compiler/instruction.h"
 #include "src/compiler/machine-operator.h"
 #include "src/compiler/node.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -48,16 +48,22 @@
   class Features;
 
   enum SourcePositionMode { kCallSourcePositions, kAllSourcePositions };
+  enum EnableScheduling { kDisableScheduling, kEnableScheduling };
+  enum EnableSerialization { kDisableSerialization, kEnableSerialization };
 
   InstructionSelector(
       Zone* zone, size_t node_count, Linkage* linkage,
       InstructionSequence* sequence, Schedule* schedule,
       SourcePositionTable* source_positions, Frame* frame,
       SourcePositionMode source_position_mode = kCallSourcePositions,
-      Features features = SupportedFeatures());
+      Features features = SupportedFeatures(),
+      EnableScheduling enable_scheduling = FLAG_turbo_instruction_scheduling
+                                               ? kEnableScheduling
+                                               : kDisableScheduling,
+      EnableSerialization enable_serialization = kDisableSerialization);
 
   // Visit code for the entire graph with the included schedule.
-  void SelectInstructions();
+  bool SelectInstructions();
 
   void StartBlock(RpoNumber rpo);
   void EndBlock(RpoNumber rpo);
@@ -194,15 +200,31 @@
   int GetVirtualRegister(const Node* node);
   const std::map<NodeId, int> GetVirtualRegistersForTesting() const;
 
+  // Check if we can generate loads and stores of ExternalConstants relative
+  // to the roots register, i.e. if both a root register is available for this
+  // compilation unit and the serializer is disabled.
+  bool CanAddressRelativeToRootsRegister() const;
+
   Isolate* isolate() const { return sequence()->isolate(); }
 
  private:
   friend class OperandGenerator;
 
+  bool UseInstructionScheduling() const {
+    return (enable_scheduling_ == kEnableScheduling) &&
+           InstructionScheduler::SchedulerSupported();
+  }
+
   void EmitTableSwitch(const SwitchInfo& sw, InstructionOperand& index_operand);
   void EmitLookupSwitch(const SwitchInfo& sw,
                         InstructionOperand& value_operand);
 
+  void TryRename(InstructionOperand* op);
+  int GetRename(int virtual_register);
+  void SetRename(const Node* node, const Node* rename);
+  void UpdateRenames(Instruction* instruction);
+  void UpdateRenamesInPhi(PhiInstruction* phi);
+
   // Inform the instruction selection that {node} was just defined.
   void MarkAsDefined(Node* node);
 
@@ -228,6 +250,9 @@
   void MarkAsFloat64(Node* node) {
     MarkAsRepresentation(MachineRepresentation::kFloat64, node);
   }
+  void MarkAsSimd128(Node* node) {
+    MarkAsRepresentation(MachineRepresentation::kSimd128, node);
+  }
   void MarkAsReference(Node* node) {
     MarkAsRepresentation(MachineRepresentation::kTagged, node);
   }
@@ -276,6 +301,8 @@
 
 #define DECLARE_GENERATOR(x) void Visit##x(Node* node);
   MACHINE_OP_LIST(DECLARE_GENERATOR)
+  MACHINE_SIMD_RETURN_NUM_OP_LIST(DECLARE_GENERATOR)
+  MACHINE_SIMD_RETURN_SIMD_OP_LIST(DECLARE_GENERATOR)
 #undef DECLARE_GENERATOR
 
   void VisitFinishRegion(Node* node);
@@ -312,6 +339,11 @@
   Zone* instruction_zone() const { return sequence()->zone(); }
   Zone* zone() const { return zone_; }
 
+  void set_instruction_selection_failed() {
+    instruction_selection_failed_ = true;
+  }
+  bool instruction_selection_failed() { return instruction_selection_failed_; }
+
   // ===========================================================================
 
   Zone* const zone_;
@@ -327,8 +359,12 @@
   BoolVector used_;
   IntVector effect_level_;
   IntVector virtual_registers_;
+  IntVector virtual_register_rename_;
   InstructionScheduler* scheduler_;
+  EnableScheduling enable_scheduling_;
+  EnableSerialization enable_serialization_;
   Frame* frame_;
+  bool instruction_selection_failed_;
 };
 
 }  // namespace compiler
diff --git a/src/compiler/instruction.cc b/src/compiler/instruction.cc
index 615b644..0df7ca0 100644
--- a/src/compiler/instruction.cc
+++ b/src/compiler/instruction.cc
@@ -314,7 +314,6 @@
   return true;
 }
 
-
 void Instruction::Print(const RegisterConfiguration* config) const {
   OFStream os(stdout);
   PrintableInstruction wrapper;
@@ -569,6 +568,10 @@
   operands_[offset] = virtual_register;
 }
 
+void PhiInstruction::RenameInput(size_t offset, int virtual_register) {
+  DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
+  operands_[offset] = virtual_register;
+}
 
 InstructionBlock::InstructionBlock(Zone* zone, RpoNumber rpo_number,
                                    RpoNumber loop_header, RpoNumber loop_end,
@@ -631,6 +634,58 @@
   return instr_block;
 }
 
+std::ostream& operator<<(std::ostream& os,
+                         PrintableInstructionBlock& printable_block) {
+  const InstructionBlock* block = printable_block.block_;
+  const RegisterConfiguration* config = printable_block.register_configuration_;
+  const InstructionSequence* code = printable_block.code_;
+
+  os << "B" << block->rpo_number();
+  os << ": AO#" << block->ao_number();
+  if (block->IsDeferred()) os << " (deferred)";
+  if (!block->needs_frame()) os << " (no frame)";
+  if (block->must_construct_frame()) os << " (construct frame)";
+  if (block->must_deconstruct_frame()) os << " (deconstruct frame)";
+  if (block->IsLoopHeader()) {
+    os << " loop blocks: [" << block->rpo_number() << ", " << block->loop_end()
+       << ")";
+  }
+  os << "  instructions: [" << block->code_start() << ", " << block->code_end()
+     << ")" << std::endl
+     << " predecessors:";
+
+  for (RpoNumber pred : block->predecessors()) {
+    os << " B" << pred.ToInt();
+  }
+  os << std::endl;
+
+  for (const PhiInstruction* phi : block->phis()) {
+    PrintableInstructionOperand printable_op = {config, phi->output()};
+    os << "     phi: " << printable_op << " =";
+    for (int input : phi->operands()) {
+      os << " v" << input;
+    }
+    os << std::endl;
+  }
+
+  ScopedVector<char> buf(32);
+  PrintableInstruction printable_instr;
+  printable_instr.register_configuration_ = config;
+  for (int j = block->first_instruction_index();
+       j <= block->last_instruction_index(); j++) {
+    // TODO(svenpanne) Add some basic formatting to our streams.
+    SNPrintF(buf, "%5d", j);
+    printable_instr.instr_ = code->InstructionAt(j);
+    os << "   " << buf.start() << ": " << printable_instr << std::endl;
+  }
+
+  for (RpoNumber succ : block->successors()) {
+    os << " B" << succ.ToInt();
+  }
+  os << std::endl;
+  return os;
+}
+
 InstructionBlocks* InstructionSequence::InstructionBlocksFor(
     Zone* zone, const Schedule* schedule) {
   InstructionBlocks* blocks = zone->NewArray<InstructionBlocks>(1);
@@ -874,7 +929,6 @@
   source_positions_.insert(std::make_pair(instr, value));
 }
 
-
 void InstructionSequence::Print(const RegisterConfiguration* config) const {
   OFStream os(stdout);
   PrintableInstructionSequence wrapper;
@@ -891,49 +945,8 @@
   RpoNumber rpo = RpoNumber::FromInt(block_id);
   const InstructionBlock* block = InstructionBlockAt(rpo);
   CHECK(block->rpo_number() == rpo);
-
-  os << "B" << block->rpo_number();
-  os << ": AO#" << block->ao_number();
-  if (block->IsDeferred()) os << " (deferred)";
-  if (!block->needs_frame()) os << " (no frame)";
-  if (block->must_construct_frame()) os << " (construct frame)";
-  if (block->must_deconstruct_frame()) os << " (deconstruct frame)";
-  if (block->IsLoopHeader()) {
-    os << " loop blocks: [" << block->rpo_number() << ", " << block->loop_end()
-       << ")";
-  }
-  os << "  instructions: [" << block->code_start() << ", " << block->code_end()
-     << ")\n  predecessors:";
-
-  for (RpoNumber pred : block->predecessors()) {
-    os << " B" << pred.ToInt();
-  }
-  os << "\n";
-
-  for (const PhiInstruction* phi : block->phis()) {
-    PrintableInstructionOperand printable_op = {config, phi->output()};
-    os << "     phi: " << printable_op << " =";
-    for (int input : phi->operands()) {
-      os << " v" << input;
-    }
-    os << "\n";
-  }
-
-  ScopedVector<char> buf(32);
-  PrintableInstruction printable_instr;
-  printable_instr.register_configuration_ = config;
-  for (int j = block->first_instruction_index();
-       j <= block->last_instruction_index(); j++) {
-    // TODO(svenpanne) Add some basic formatting to our streams.
-    SNPrintF(buf, "%5d", j);
-    printable_instr.instr_ = InstructionAt(j);
-    os << "   " << buf.start() << ": " << printable_instr << "\n";
-  }
-
-  for (RpoNumber succ : block->successors()) {
-    os << " B" << succ.ToInt();
-  }
-  os << "\n";
+  PrintableInstructionBlock printable_block = {config, block, this};
+  os << printable_block << std::endl;
 }
 
 void InstructionSequence::PrintBlock(int block_id) const {
@@ -1020,8 +1033,11 @@
        it != code.constants_.end(); ++i, ++it) {
     os << "CST#" << i << ": v" << it->first << " = " << it->second << "\n";
   }
+  PrintableInstructionBlock printable_block = {
+      printable.register_configuration_, nullptr, printable.sequence_};
   for (int i = 0; i < code.InstructionBlockCount(); i++) {
-    printable.sequence_->PrintBlock(printable.register_configuration_, i);
+    printable_block.block_ = code.InstructionBlockAt(RpoNumber::FromInt(i));
+    os << printable_block;
   }
   return os;
 }
diff --git a/src/compiler/instruction.h b/src/compiler/instruction.h
index b5aea70..b5c5914 100644
--- a/src/compiler/instruction.h
+++ b/src/compiler/instruction.h
@@ -17,7 +17,7 @@
 #include "src/compiler/source-position.h"
 #include "src/macro-assembler.h"
 #include "src/register-configuration.h"
-#include "src/zone-allocator.h"
+#include "src/zone/zone-allocator.h"
 
 namespace v8 {
 namespace internal {
@@ -33,7 +33,17 @@
 
   // TODO(dcarney): recover bit. INVALID can be represented as UNALLOCATED with
   // kInvalidVirtualRegister and some DCHECKS.
-  enum Kind { INVALID, UNALLOCATED, CONSTANT, IMMEDIATE, EXPLICIT, ALLOCATED };
+  enum Kind {
+    INVALID,
+    UNALLOCATED,
+    CONSTANT,
+    IMMEDIATE,
+    // Location operand kinds.
+    EXPLICIT,
+    ALLOCATED,
+    FIRST_LOCATION_OPERAND_KIND = EXPLICIT
+    // Location operand kinds must be last.
+  };
 
   InstructionOperand() : InstructionOperand(INVALID) {}
 
@@ -64,12 +74,16 @@
   INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
 #undef INSTRUCTION_OPERAND_PREDICATE
 
+  inline bool IsAnyLocationOperand() const;
+  inline bool IsLocationOperand() const;
+  inline bool IsFPLocationOperand() const;
   inline bool IsAnyRegister() const;
   inline bool IsRegister() const;
   inline bool IsFPRegister() const;
   inline bool IsFloatRegister() const;
   inline bool IsDoubleRegister() const;
   inline bool IsSimd128Register() const;
+  inline bool IsAnyStackSlot() const;
   inline bool IsStackSlot() const;
   inline bool IsFPStackSlot() const;
   inline bool IsFloatStackSlot() const;
@@ -105,6 +119,7 @@
 
   bool InterferesWith(const InstructionOperand& that) const;
 
+  // APIs to aid debugging. For general-stream APIs, use operator<<
   void Print(const RegisterConfiguration* config) const;
   void Print() const;
 
@@ -481,17 +496,17 @@
   }
 
   static LocationOperand* cast(InstructionOperand* op) {
-    DCHECK(ALLOCATED == op->kind() || EXPLICIT == op->kind());
+    DCHECK(op->IsAnyLocationOperand());
     return static_cast<LocationOperand*>(op);
   }
 
   static const LocationOperand* cast(const InstructionOperand* op) {
-    DCHECK(ALLOCATED == op->kind() || EXPLICIT == op->kind());
+    DCHECK(op->IsAnyLocationOperand());
     return static_cast<const LocationOperand*>(op);
   }
 
   static LocationOperand cast(const InstructionOperand& op) {
-    DCHECK(ALLOCATED == op.kind() || EXPLICIT == op.kind());
+    DCHECK(op.IsAnyLocationOperand());
     return *static_cast<const LocationOperand*>(&op);
   }
 
@@ -531,9 +546,22 @@
 
 #undef INSTRUCTION_OPERAND_CASTS
 
+bool InstructionOperand::IsAnyLocationOperand() const {
+  return this->kind() >= FIRST_LOCATION_OPERAND_KIND;
+}
+
+bool InstructionOperand::IsLocationOperand() const {
+  return IsAnyLocationOperand() &&
+         !IsFloatingPoint(LocationOperand::cast(this)->representation());
+}
+
+bool InstructionOperand::IsFPLocationOperand() const {
+  return IsAnyLocationOperand() &&
+         IsFloatingPoint(LocationOperand::cast(this)->representation());
+}
 
 bool InstructionOperand::IsAnyRegister() const {
-  return (IsAllocated() || IsExplicit()) &&
+  return IsAnyLocationOperand() &&
          LocationOperand::cast(this)->location_kind() ==
              LocationOperand::REGISTER;
 }
@@ -567,22 +595,24 @@
              MachineRepresentation::kSimd128;
 }
 
-bool InstructionOperand::IsStackSlot() const {
-  return (IsAllocated() || IsExplicit()) &&
+bool InstructionOperand::IsAnyStackSlot() const {
+  return IsAnyLocationOperand() &&
          LocationOperand::cast(this)->location_kind() ==
-             LocationOperand::STACK_SLOT &&
+             LocationOperand::STACK_SLOT;
+}
+
+bool InstructionOperand::IsStackSlot() const {
+  return IsAnyStackSlot() &&
          !IsFloatingPoint(LocationOperand::cast(this)->representation());
 }
 
 bool InstructionOperand::IsFPStackSlot() const {
-  return (IsAllocated() || IsExplicit()) &&
-         LocationOperand::cast(this)->location_kind() ==
-             LocationOperand::STACK_SLOT &&
+  return IsAnyStackSlot() &&
          IsFloatingPoint(LocationOperand::cast(this)->representation());
 }
 
 bool InstructionOperand::IsFloatStackSlot() const {
-  return (IsAllocated() || IsExplicit()) &&
+  return IsAnyLocationOperand() &&
          LocationOperand::cast(this)->location_kind() ==
              LocationOperand::STACK_SLOT &&
          LocationOperand::cast(this)->representation() ==
@@ -590,7 +620,7 @@
 }
 
 bool InstructionOperand::IsDoubleStackSlot() const {
-  return (IsAllocated() || IsExplicit()) &&
+  return IsAnyLocationOperand() &&
          LocationOperand::cast(this)->location_kind() ==
              LocationOperand::STACK_SLOT &&
          LocationOperand::cast(this)->representation() ==
@@ -598,7 +628,7 @@
 }
 
 bool InstructionOperand::IsSimd128StackSlot() const {
-  return (IsAllocated() || IsExplicit()) &&
+  return IsAnyLocationOperand() &&
          LocationOperand::cast(this)->location_kind() ==
              LocationOperand::STACK_SLOT &&
          LocationOperand::cast(this)->representation() ==
@@ -606,7 +636,7 @@
 }
 
 uint64_t InstructionOperand::GetCanonicalizedValue() const {
-  if (IsAllocated() || IsExplicit()) {
+  if (IsAnyLocationOperand()) {
     MachineRepresentation canonical = MachineRepresentation::kNone;
     if (IsFPRegister()) {
       // We treat all FP register operands the same for simple aliasing.
@@ -672,6 +702,7 @@
     return source_.IsInvalid();
   }
 
+  // APIs to aid debugging. For general-stream APIs, use operator<<
   void Print(const RegisterConfiguration* config) const;
   void Print() const;
 
@@ -856,10 +887,7 @@
     reference_map_ = nullptr;
   }
 
-  bool IsNop() const {
-    return arch_opcode() == kArchNop && InputCount() == 0 &&
-           OutputCount() == 0 && TempCount() == 0;
-  }
+  bool IsNop() const { return arch_opcode() == kArchNop; }
 
   bool IsDeoptimizeCall() const {
     return arch_opcode() == ArchOpcode::kArchDeoptimize ||
@@ -915,9 +943,18 @@
     block_ = block;
   }
 
+  // APIs to aid debugging. For general-stream APIs, use operator<<
   void Print(const RegisterConfiguration* config) const;
   void Print() const;
 
+  typedef BitField<size_t, 0, 8> OutputCountField;
+  typedef BitField<size_t, 8, 16> InputCountField;
+  typedef BitField<size_t, 24, 6> TempCountField;
+
+  static const size_t kMaxOutputCount = OutputCountField::kMax;
+  static const size_t kMaxInputCount = InputCountField::kMax;
+  static const size_t kMaxTempCount = TempCountField::kMax;
+
  private:
   explicit Instruction(InstructionCode opcode);
 
@@ -926,9 +963,6 @@
               InstructionOperand* inputs, size_t temp_count,
               InstructionOperand* temps);
 
-  typedef BitField<size_t, 0, 8> OutputCountField;
-  typedef BitField<size_t, 8, 16> InputCountField;
-  typedef BitField<size_t, 24, 6> TempCountField;
   typedef BitField<bool, 30, 1> IsCallField;
 
   InstructionCode opcode_;
@@ -1184,6 +1218,7 @@
   PhiInstruction(Zone* zone, int virtual_register, size_t input_count);
 
   void SetInput(size_t offset, int virtual_register);
+  void RenameInput(size_t offset, int virtual_register);
 
   int virtual_register() const { return virtual_register_; }
   const IntVector& operands() const { return operands_; }
@@ -1251,6 +1286,7 @@
 
   typedef ZoneVector<PhiInstruction*> PhiInstructions;
   const PhiInstructions& phis() const { return phis_; }
+  PhiInstruction* PhiAt(size_t i) const { return phis_[i]; }
   void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); }
 
   void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; }
@@ -1285,6 +1321,17 @@
   RpoNumber last_deferred_;
 };
 
+class InstructionSequence;
+
+struct PrintableInstructionBlock {
+  const RegisterConfiguration* register_configuration_;
+  const InstructionBlock* block_;
+  const InstructionSequence* code_;
+};
+
+std::ostream& operator<<(std::ostream& os,
+                         const PrintableInstructionBlock& printable_block);
+
 typedef ZoneDeque<Constant> ConstantDeque;
 typedef std::map<int, Constant, std::less<int>,
                  zone_allocator<std::pair<const int, Constant> > > ConstantMap;
@@ -1343,8 +1390,7 @@
   void MarkAsRepresentation(MachineRepresentation rep, int virtual_register);
 
   bool IsReference(int virtual_register) const {
-    return GetRepresentation(virtual_register) ==
-           MachineRepresentation::kTagged;
+    return CanBeTaggedPointer(GetRepresentation(virtual_register));
   }
   bool IsFP(int virtual_register) const {
     return IsFloatingPoint(GetRepresentation(virtual_register));
@@ -1445,6 +1491,8 @@
     }
     return false;
   }
+
+  // APIs to aid debugging. For general-stream APIs, use operator<<
   void Print(const RegisterConfiguration* config) const;
   void Print() const;
 
diff --git a/src/compiler/int64-lowering.cc b/src/compiler/int64-lowering.cc
index 737947a..539a372 100644
--- a/src/compiler/int64-lowering.cc
+++ b/src/compiler/int64-lowering.cc
@@ -13,7 +13,7 @@
 
 #include "src/compiler/node.h"
 #include "src/wasm/wasm-module.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -778,6 +778,18 @@
       }
       break;
     }
+    case IrOpcode::kProjection: {
+      Node* call = node->InputAt(0);
+      DCHECK_EQ(IrOpcode::kCall, call->opcode());
+      CallDescriptor* descriptor =
+          const_cast<CallDescriptor*>(CallDescriptorOf(call->op()));
+      for (size_t i = 0; i < descriptor->ReturnCount(); i++) {
+        if (descriptor->GetReturnType(i) == MachineType::Int64()) {
+          UNREACHABLE();  // TODO(titzer): implement multiple i64 returns.
+        }
+      }
+      break;
+    }
     case IrOpcode::kWord64ReverseBytes: {
       Node* input = node->InputAt(0);
       ReplaceNode(node, graph()->NewNode(machine()->Word32ReverseBytes().op(),
diff --git a/src/compiler/int64-lowering.h b/src/compiler/int64-lowering.h
index 4ec4e82..084c07a 100644
--- a/src/compiler/int64-lowering.h
+++ b/src/compiler/int64-lowering.h
@@ -9,7 +9,7 @@
 #include "src/compiler/graph.h"
 #include "src/compiler/machine-operator.h"
 #include "src/compiler/node-marker.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/js-builtin-reducer.cc b/src/compiler/js-builtin-reducer.cc
index 926bd3f..41d4a00 100644
--- a/src/compiler/js-builtin-reducer.cc
+++ b/src/compiler/js-builtin-reducer.cc
@@ -10,9 +10,9 @@
 #include "src/compiler/node-matchers.h"
 #include "src/compiler/node-properties.h"
 #include "src/compiler/simplified-operator.h"
+#include "src/compiler/type-cache.h"
+#include "src/compiler/types.h"
 #include "src/objects-inl.h"
-#include "src/type-cache.h"
-#include "src/types.h"
 
 namespace v8 {
 namespace internal {
@@ -275,8 +275,8 @@
     // here is to learn on deopt, i.e. disable Array.prototype.push inlining
     // for this function.
     if (IsFastSmiElementsKind(receiver_map->elements_kind())) {
-      value = effect = graph()->NewNode(simplified()->CheckTaggedSigned(),
-                                        value, effect, control);
+      value = effect =
+          graph()->NewNode(simplified()->CheckSmi(), value, effect, control);
     } else if (IsFastDoubleElementsKind(receiver_map->elements_kind())) {
       value = effect =
           graph()->NewNode(simplified()->CheckNumber(), value, effect, control);
@@ -323,6 +323,123 @@
   return NoChange();
 }
 
+namespace {
+
+bool HasInstanceTypeWitness(Node* receiver, Node* effect,
+                            InstanceType instance_type) {
+  for (Node* dominator = effect;;) {
+    if (dominator->opcode() == IrOpcode::kCheckMaps &&
+        dominator->InputAt(0) == receiver) {
+      // Check if all maps have the given {instance_type}.
+      for (int i = 1; i < dominator->op()->ValueInputCount(); ++i) {
+        Node* const map = NodeProperties::GetValueInput(dominator, i);
+        Type* const map_type = NodeProperties::GetType(map);
+        if (!map_type->IsConstant()) return false;
+        Handle<Map> const map_value =
+            Handle<Map>::cast(map_type->AsConstant()->Value());
+        if (map_value->instance_type() != instance_type) return false;
+      }
+      return true;
+    }
+    switch (dominator->opcode()) {
+      case IrOpcode::kStoreField: {
+        FieldAccess const& access = FieldAccessOf(dominator->op());
+        if (access.base_is_tagged == kTaggedBase &&
+            access.offset == HeapObject::kMapOffset) {
+          return false;
+        }
+        break;
+      }
+      case IrOpcode::kStoreElement:
+      case IrOpcode::kStoreTypedElement:
+        break;
+      default: {
+        DCHECK_EQ(1, dominator->op()->EffectOutputCount());
+        if (dominator->op()->EffectInputCount() != 1 ||
+            !dominator->op()->HasProperty(Operator::kNoWrite)) {
+          // Didn't find any appropriate CheckMaps node.
+          return false;
+        }
+        break;
+      }
+    }
+    dominator = NodeProperties::GetEffectInput(dominator);
+  }
+}
+
+}  // namespace
+
+// ES6 section 20.3.4.10 Date.prototype.getTime ( )
+Reduction JSBuiltinReducer::ReduceDateGetTime(Node* node) {
+  Node* receiver = NodeProperties::GetValueInput(node, 1);
+  Node* effect = NodeProperties::GetEffectInput(node);
+  Node* control = NodeProperties::GetControlInput(node);
+  if (HasInstanceTypeWitness(receiver, effect, JS_DATE_TYPE)) {
+    Node* value = effect = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForJSDateValue()), receiver,
+        effect, control);
+    ReplaceWithValue(node, value, effect, control);
+    return Replace(value);
+  }
+  return NoChange();
+}
+
+// ES6 section 19.2.3.6 Function.prototype [ @@hasInstance ] ( V )
+Reduction JSBuiltinReducer::ReduceFunctionHasInstance(Node* node) {
+  Node* receiver = NodeProperties::GetValueInput(node, 1);
+  Node* object = (node->op()->ValueInputCount() >= 3)
+                     ? NodeProperties::GetValueInput(node, 2)
+                     : jsgraph()->UndefinedConstant();
+  Node* context = NodeProperties::GetContextInput(node);
+  Node* frame_state = NodeProperties::GetFrameStateInput(node);
+  Node* effect = NodeProperties::GetEffectInput(node);
+  Node* control = NodeProperties::GetControlInput(node);
+
+  // TODO(turbofan): If JSOrdinaryToInstance raises an exception, the
+  // stack trace doesn't contain the @@hasInstance call; we have the
+  // corresponding bug in the baseline case. Some massaging of the frame
+  // state would be necessary here.
+
+  // Morph this {node} into a JSOrdinaryHasInstance node.
+  node->ReplaceInput(0, receiver);
+  node->ReplaceInput(1, object);
+  node->ReplaceInput(2, context);
+  node->ReplaceInput(3, frame_state);
+  node->ReplaceInput(4, effect);
+  node->ReplaceInput(5, control);
+  node->TrimInputCount(6);
+  NodeProperties::ChangeOp(node, javascript()->OrdinaryHasInstance());
+  return Changed(node);
+}
+
+// ES6 section 18.2.2 isFinite ( number )
+Reduction JSBuiltinReducer::ReduceGlobalIsFinite(Node* node) {
+  JSCallReduction r(node);
+  if (r.InputsMatchOne(Type::PlainPrimitive())) {
+    // isFinite(a:plain-primitive) -> NumberEqual(a', a')
+    // where a' = NumberSubtract(ToNumber(a), ToNumber(a))
+    Node* input = ToNumber(r.GetJSCallInput(0));
+    Node* diff = graph()->NewNode(simplified()->NumberSubtract(), input, input);
+    Node* value = graph()->NewNode(simplified()->NumberEqual(), diff, diff);
+    return Replace(value);
+  }
+  return NoChange();
+}
+
+// ES6 section 18.2.3 isNaN ( number )
+Reduction JSBuiltinReducer::ReduceGlobalIsNaN(Node* node) {
+  JSCallReduction r(node);
+  if (r.InputsMatchOne(Type::PlainPrimitive())) {
+    // isNaN(a:plain-primitive) -> BooleanNot(NumberEqual(a', a'))
+    // where a' = ToNumber(a)
+    Node* input = ToNumber(r.GetJSCallInput(0));
+    Node* check = graph()->NewNode(simplified()->NumberEqual(), input, input);
+    Node* value = graph()->NewNode(simplified()->BooleanNot(), check);
+    return Replace(value);
+  }
+  return NoChange();
+}
+
 // ES6 section 20.2.2.1 Math.abs ( x )
 Reduction JSBuiltinReducer::ReduceMathAbs(Node* node) {
   JSCallReduction r(node);
@@ -737,6 +854,60 @@
   return NoChange();
 }
 
+// ES6 section 20.1.2.2 Number.isFinite ( number )
+Reduction JSBuiltinReducer::ReduceNumberIsFinite(Node* node) {
+  JSCallReduction r(node);
+  if (r.InputsMatchOne(Type::Number())) {
+    // Number.isFinite(a:number) -> NumberEqual(a', a')
+    // where a' = NumberSubtract(a, a)
+    Node* input = r.GetJSCallInput(0);
+    Node* diff = graph()->NewNode(simplified()->NumberSubtract(), input, input);
+    Node* value = graph()->NewNode(simplified()->NumberEqual(), diff, diff);
+    return Replace(value);
+  }
+  return NoChange();
+}
+
+// ES6 section 20.1.2.3 Number.isInteger ( number )
+Reduction JSBuiltinReducer::ReduceNumberIsInteger(Node* node) {
+  JSCallReduction r(node);
+  if (r.InputsMatchOne(Type::Number())) {
+    // Number.isInteger(x:number) -> NumberEqual(NumberSubtract(x, x'), #0)
+    // where x' = NumberTrunc(x)
+    Node* input = r.GetJSCallInput(0);
+    Node* trunc = graph()->NewNode(simplified()->NumberTrunc(), input);
+    Node* diff = graph()->NewNode(simplified()->NumberSubtract(), input, trunc);
+    Node* value = graph()->NewNode(simplified()->NumberEqual(), diff,
+                                   jsgraph()->ZeroConstant());
+    return Replace(value);
+  }
+  return NoChange();
+}
+
+// ES6 section 20.1.2.4 Number.isNaN ( number )
+Reduction JSBuiltinReducer::ReduceNumberIsNaN(Node* node) {
+  JSCallReduction r(node);
+  if (r.InputsMatchOne(Type::Number())) {
+    // Number.isNaN(a:number) -> BooleanNot(NumberEqual(a, a))
+    Node* input = r.GetJSCallInput(0);
+    Node* check = graph()->NewNode(simplified()->NumberEqual(), input, input);
+    Node* value = graph()->NewNode(simplified()->BooleanNot(), check);
+    return Replace(value);
+  }
+  return NoChange();
+}
+
+// ES6 section 20.1.2.5 Number.isSafeInteger ( number )
+Reduction JSBuiltinReducer::ReduceNumberIsSafeInteger(Node* node) {
+  JSCallReduction r(node);
+  if (r.InputsMatchOne(type_cache_.kSafeInteger)) {
+    // Number.isInteger(x:safe-integer) -> #true
+    Node* value = jsgraph()->TrueConstant();
+    return Replace(value);
+  }
+  return NoChange();
+}
+
 // ES6 section 20.1.2.13 Number.parseInt ( string, radix )
 Reduction JSBuiltinReducer::ReduceNumberParseInt(Node* node) {
   JSCallReduction r(node);
@@ -887,51 +1058,146 @@
   return NoChange();
 }
 
-namespace {
+Reduction JSBuiltinReducer::ReduceStringIteratorNext(Node* node) {
+  Node* receiver = NodeProperties::GetValueInput(node, 1);
+  Node* effect = NodeProperties::GetEffectInput(node);
+  Node* control = NodeProperties::GetControlInput(node);
+  Node* context = NodeProperties::GetContextInput(node);
+  if (HasInstanceTypeWitness(receiver, effect, JS_STRING_ITERATOR_TYPE)) {
+    Node* string = effect = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForJSStringIteratorString()),
+        receiver, effect, control);
+    Node* index = effect = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForJSStringIteratorIndex()),
+        receiver, effect, control);
+    Node* length = effect = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForStringLength()), string,
+        effect, control);
 
-bool HasInstanceTypeWitness(Node* receiver, Node* effect,
-                            InstanceType instance_type) {
-  for (Node* dominator = effect;;) {
-    if (dominator->opcode() == IrOpcode::kCheckMaps &&
-        dominator->InputAt(0) == receiver) {
-      // Check if all maps have the given {instance_type}.
-      for (int i = 1; i < dominator->op()->ValueInputCount(); ++i) {
-        Node* const map = NodeProperties::GetValueInput(dominator, i);
-        Type* const map_type = NodeProperties::GetType(map);
-        if (!map_type->IsConstant()) return false;
-        Handle<Map> const map_value =
-            Handle<Map>::cast(map_type->AsConstant()->Value());
-        if (map_value->instance_type() != instance_type) return false;
-      }
-      return true;
-    }
-    switch (dominator->opcode()) {
-      case IrOpcode::kStoreField: {
-        FieldAccess const& access = FieldAccessOf(dominator->op());
-        if (access.base_is_tagged == kTaggedBase &&
-            access.offset == HeapObject::kMapOffset) {
-          return false;
+    // branch0: if (index < length)
+    Node* check0 =
+        graph()->NewNode(simplified()->NumberLessThan(), index, length);
+    Node* branch0 =
+        graph()->NewNode(common()->Branch(BranchHint::kTrue), check0, control);
+
+    Node* etrue0 = effect;
+    Node* if_true0 = graph()->NewNode(common()->IfTrue(), branch0);
+    Node* done_true;
+    Node* vtrue0;
+    {
+      done_true = jsgraph()->FalseConstant();
+      Node* lead = graph()->NewNode(simplified()->StringCharCodeAt(), string,
+                                    index, if_true0);
+
+      // branch1: if ((lead & 0xFC00) === 0xD800)
+      Node* check1 = graph()->NewNode(
+          simplified()->NumberEqual(),
+          graph()->NewNode(simplified()->NumberBitwiseAnd(), lead,
+                           jsgraph()->Int32Constant(0xFC00)),
+          jsgraph()->Int32Constant(0xD800));
+      Node* branch1 = graph()->NewNode(common()->Branch(BranchHint::kFalse),
+                                       check1, if_true0);
+      Node* if_true1 = graph()->NewNode(common()->IfTrue(), branch1);
+      Node* vtrue1;
+      {
+        Node* next_index = graph()->NewNode(simplified()->NumberAdd(), index,
+                                            jsgraph()->OneConstant());
+        // branch2: if ((index + 1) < length)
+        Node* check2 = graph()->NewNode(simplified()->NumberLessThan(),
+                                        next_index, length);
+        Node* branch2 = graph()->NewNode(common()->Branch(BranchHint::kTrue),
+                                         check2, if_true1);
+        Node* if_true2 = graph()->NewNode(common()->IfTrue(), branch2);
+        Node* vtrue2;
+        {
+          Node* trail = graph()->NewNode(simplified()->StringCharCodeAt(),
+                                         string, next_index, if_true2);
+          // branch3: if ((trail & 0xFC00) === 0xDC00)
+          Node* check3 = graph()->NewNode(
+              simplified()->NumberEqual(),
+              graph()->NewNode(simplified()->NumberBitwiseAnd(), trail,
+                               jsgraph()->Int32Constant(0xFC00)),
+              jsgraph()->Int32Constant(0xDC00));
+          Node* branch3 = graph()->NewNode(common()->Branch(BranchHint::kTrue),
+                                           check3, if_true2);
+          Node* if_true3 = graph()->NewNode(common()->IfTrue(), branch3);
+          Node* vtrue3;
+          {
+            vtrue3 = graph()->NewNode(
+                simplified()->NumberBitwiseOr(),
+// Need to swap the order for big-endian platforms
+#if V8_TARGET_BIG_ENDIAN
+                graph()->NewNode(simplified()->NumberShiftLeft(), lead,
+                                 jsgraph()->Int32Constant(16)),
+                trail);
+#else
+                graph()->NewNode(simplified()->NumberShiftLeft(), trail,
+                                 jsgraph()->Int32Constant(16)),
+                lead);
+#endif
+          }
+
+          Node* if_false3 = graph()->NewNode(common()->IfFalse(), branch3);
+          Node* vfalse3 = lead;
+          if_true2 = graph()->NewNode(common()->Merge(2), if_true3, if_false3);
+          vtrue2 =
+              graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
+                               vtrue3, vfalse3, if_true2);
         }
-        break;
+
+        Node* if_false2 = graph()->NewNode(common()->IfFalse(), branch2);
+        Node* vfalse2 = lead;
+        if_true1 = graph()->NewNode(common()->Merge(2), if_true2, if_false2);
+        vtrue1 =
+            graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
+                             vtrue2, vfalse2, if_true1);
       }
-      case IrOpcode::kStoreElement:
-        break;
-      default: {
-        DCHECK_EQ(1, dominator->op()->EffectOutputCount());
-        if (dominator->op()->EffectInputCount() != 1 ||
-            !dominator->op()->HasProperty(Operator::kNoWrite)) {
-          // Didn't find any appropriate CheckMaps node.
-          return false;
-        }
-        break;
-      }
+
+      Node* if_false1 = graph()->NewNode(common()->IfFalse(), branch1);
+      Node* vfalse1 = lead;
+      if_true0 = graph()->NewNode(common()->Merge(2), if_true1, if_false1);
+      vtrue0 =
+          graph()->NewNode(common()->Phi(MachineRepresentation::kWord32, 2),
+                           vtrue1, vfalse1, if_true0);
+      vtrue0 = graph()->NewNode(
+          simplified()->StringFromCodePoint(UnicodeEncoding::UTF16), vtrue0);
+
+      // Update iterator.[[NextIndex]]
+      Node* char_length = etrue0 = graph()->NewNode(
+          simplified()->LoadField(AccessBuilder::ForStringLength()), vtrue0,
+          etrue0, if_true0);
+      index = graph()->NewNode(simplified()->NumberAdd(), index, char_length);
+      etrue0 = graph()->NewNode(
+          simplified()->StoreField(AccessBuilder::ForJSStringIteratorIndex()),
+          receiver, index, etrue0, if_true0);
     }
-    dominator = NodeProperties::GetEffectInput(dominator);
+
+    Node* if_false0 = graph()->NewNode(common()->IfFalse(), branch0);
+    Node* done_false;
+    Node* vfalse0;
+    {
+      vfalse0 = jsgraph()->UndefinedConstant();
+      done_false = jsgraph()->TrueConstant();
+    }
+
+    control = graph()->NewNode(common()->Merge(2), if_true0, if_false0);
+    effect = graph()->NewNode(common()->EffectPhi(2), etrue0, effect, control);
+    Node* value =
+        graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
+                         vtrue0, vfalse0, control);
+    Node* done =
+        graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
+                         done_true, done_false, control);
+
+    value = effect = graph()->NewNode(javascript()->CreateIterResultObject(),
+                                      value, done, context, effect);
+
+    ReplaceWithValue(node, value, effect, control);
+    return Replace(value);
   }
+  return NoChange();
 }
 
-}  // namespace
-
 Reduction JSBuiltinReducer::ReduceArrayBufferViewAccessor(
     Node* node, InstanceType instance_type, FieldAccess const& access) {
   Node* receiver = NodeProperties::GetValueInput(node, 1);
@@ -939,27 +1205,21 @@
   Node* control = NodeProperties::GetControlInput(node);
   if (HasInstanceTypeWitness(receiver, effect, instance_type)) {
     // Load the {receiver}s field.
-    Node* receiver_length = effect = graph()->NewNode(
+    Node* receiver_value = effect = graph()->NewNode(
         simplified()->LoadField(access), receiver, effect, control);
 
     // Check if the {receiver}s buffer was neutered.
     Node* receiver_buffer = effect = graph()->NewNode(
         simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
         receiver, effect, control);
-    Node* receiver_buffer_bitfield = effect = graph()->NewNode(
-        simplified()->LoadField(AccessBuilder::ForJSArrayBufferBitField()),
-        receiver_buffer, effect, control);
-    Node* check = graph()->NewNode(
-        simplified()->NumberEqual(),
-        graph()->NewNode(
-            simplified()->NumberBitwiseAnd(), receiver_buffer_bitfield,
-            jsgraph()->Constant(JSArrayBuffer::WasNeutered::kMask)),
-        jsgraph()->ZeroConstant());
+    Node* check = effect =
+        graph()->NewNode(simplified()->ArrayBufferWasNeutered(),
+                         receiver_buffer, effect, control);
 
     // Default to zero if the {receiver}s buffer was neutered.
     Node* value = graph()->NewNode(
-        common()->Select(MachineRepresentation::kTagged, BranchHint::kTrue),
-        check, receiver_length, jsgraph()->ZeroConstant());
+        common()->Select(MachineRepresentation::kTagged, BranchHint::kFalse),
+        check, jsgraph()->ZeroConstant(), receiver_value);
 
     ReplaceWithValue(node, value, effect, control);
     return Replace(value);
@@ -978,6 +1238,17 @@
       return ReduceArrayPop(node);
     case kArrayPush:
       return ReduceArrayPush(node);
+    case kDateGetTime:
+      return ReduceDateGetTime(node);
+    case kFunctionHasInstance:
+      return ReduceFunctionHasInstance(node);
+      break;
+    case kGlobalIsFinite:
+      reduction = ReduceGlobalIsFinite(node);
+      break;
+    case kGlobalIsNaN:
+      reduction = ReduceGlobalIsNaN(node);
+      break;
     case kMathAbs:
       reduction = ReduceMathAbs(node);
       break;
@@ -1077,6 +1348,18 @@
     case kMathTrunc:
       reduction = ReduceMathTrunc(node);
       break;
+    case kNumberIsFinite:
+      reduction = ReduceNumberIsFinite(node);
+      break;
+    case kNumberIsInteger:
+      reduction = ReduceNumberIsInteger(node);
+      break;
+    case kNumberIsNaN:
+      reduction = ReduceNumberIsNaN(node);
+      break;
+    case kNumberIsSafeInteger:
+      reduction = ReduceNumberIsSafeInteger(node);
+      break;
     case kNumberParseInt:
       reduction = ReduceNumberParseInt(node);
       break;
@@ -1087,6 +1370,8 @@
       return ReduceStringCharAt(node);
     case kStringCharCodeAt:
       return ReduceStringCharCodeAt(node);
+    case kStringIteratorNext:
+      return ReduceStringIteratorNext(node);
     case kDataViewByteLength:
       return ReduceArrayBufferViewAccessor(
           node, JS_DATA_VIEW_TYPE,
@@ -1146,6 +1431,10 @@
   return jsgraph()->simplified();
 }
 
+JSOperatorBuilder* JSBuiltinReducer::javascript() const {
+  return jsgraph()->javascript();
+}
+
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8
diff --git a/src/compiler/js-builtin-reducer.h b/src/compiler/js-builtin-reducer.h
index 2da8347..524d006 100644
--- a/src/compiler/js-builtin-reducer.h
+++ b/src/compiler/js-builtin-reducer.h
@@ -14,7 +14,6 @@
 // Forward declarations.
 class CompilationDependencies;
 class Factory;
-class TypeCache;
 
 namespace compiler {
 
@@ -22,8 +21,9 @@
 class CommonOperatorBuilder;
 struct FieldAccess;
 class JSGraph;
+class JSOperatorBuilder;
 class SimplifiedOperatorBuilder;
-
+class TypeCache;
 
 class JSBuiltinReducer final : public AdvancedReducer {
  public:
@@ -43,6 +43,10 @@
  private:
   Reduction ReduceArrayPop(Node* node);
   Reduction ReduceArrayPush(Node* node);
+  Reduction ReduceDateGetTime(Node* node);
+  Reduction ReduceFunctionHasInstance(Node* node);
+  Reduction ReduceGlobalIsFinite(Node* node);
+  Reduction ReduceGlobalIsNaN(Node* node);
   Reduction ReduceMathAbs(Node* node);
   Reduction ReduceMathAcos(Node* node);
   Reduction ReduceMathAcosh(Node* node);
@@ -76,10 +80,15 @@
   Reduction ReduceMathTan(Node* node);
   Reduction ReduceMathTanh(Node* node);
   Reduction ReduceMathTrunc(Node* node);
+  Reduction ReduceNumberIsFinite(Node* node);
+  Reduction ReduceNumberIsInteger(Node* node);
+  Reduction ReduceNumberIsNaN(Node* node);
+  Reduction ReduceNumberIsSafeInteger(Node* node);
   Reduction ReduceNumberParseInt(Node* node);
   Reduction ReduceStringCharAt(Node* node);
   Reduction ReduceStringCharCodeAt(Node* node);
   Reduction ReduceStringFromCharCode(Node* node);
+  Reduction ReduceStringIteratorNext(Node* node);
   Reduction ReduceArrayBufferViewAccessor(Node* node,
                                           InstanceType instance_type,
                                           FieldAccess const& access);
@@ -94,6 +103,7 @@
   Isolate* isolate() const;
   CommonOperatorBuilder* common() const;
   SimplifiedOperatorBuilder* simplified() const;
+  JSOperatorBuilder* javascript() const;
   CompilationDependencies* dependencies() const { return dependencies_; }
 
   CompilationDependencies* const dependencies_;
diff --git a/src/compiler/js-call-reducer.cc b/src/compiler/js-call-reducer.cc
index e390214..dd8f064 100644
--- a/src/compiler/js-call-reducer.cc
+++ b/src/compiler/js-call-reducer.cc
@@ -14,30 +14,6 @@
 namespace internal {
 namespace compiler {
 
-namespace {
-
-VectorSlotPair CallCountFeedback(VectorSlotPair p) {
-  // Extract call count from {p}.
-  if (!p.IsValid()) return VectorSlotPair();
-  CallICNexus n(p.vector(), p.slot());
-  int const call_count = n.ExtractCallCount();
-  if (call_count <= 0) return VectorSlotPair();
-
-  // Create megamorphic CallIC feedback with the given {call_count}.
-  StaticFeedbackVectorSpec spec;
-  FeedbackVectorSlot slot = spec.AddCallICSlot();
-  Handle<TypeFeedbackMetadata> metadata =
-      TypeFeedbackMetadata::New(n.GetIsolate(), &spec);
-  Handle<TypeFeedbackVector> vector =
-      TypeFeedbackVector::New(n.GetIsolate(), metadata);
-  CallICNexus nexus(vector, slot);
-  nexus.ConfigureMegamorphic(call_count);
-  return VectorSlotPair(vector, slot);
-}
-
-}  // namespace
-
-
 Reduction JSCallReducer::Reduce(Node* node) {
   switch (node->opcode()) {
     case IrOpcode::kJSCallConstruct:
@@ -166,7 +142,7 @@
   }
   // Change {node} to the new {JSCallFunction} operator.
   NodeProperties::ChangeOp(
-      node, javascript()->CallFunction(arity, CallCountFeedback(p.feedback()),
+      node, javascript()->CallFunction(arity, p.frequency(), VectorSlotPair(),
                                        convert_mode, p.tail_call_mode()));
   // Change context of {node} to the Function.prototype.apply context,
   // to ensure any exception is thrown in the correct context.
@@ -206,7 +182,7 @@
     --arity;
   }
   NodeProperties::ChangeOp(
-      node, javascript()->CallFunction(arity, CallCountFeedback(p.feedback()),
+      node, javascript()->CallFunction(arity, p.frequency(), VectorSlotPair(),
                                        convert_mode, p.tail_call_mode()));
   // Try to further reduce the JSCallFunction {node}.
   Reduction const reduction = ReduceJSCallFunction(node);
@@ -287,7 +263,7 @@
         arity++;
       }
       NodeProperties::ChangeOp(node, javascript()->CallFunction(
-                                         arity, CallCountFeedback(p.feedback()),
+                                         arity, p.frequency(), VectorSlotPair(),
                                          convert_mode, p.tail_call_mode()));
       // Try to further reduce the JSCallFunction {node}.
       Reduction const reduction = ReduceJSCallFunction(node);
@@ -305,6 +281,20 @@
   // Extract feedback from the {node} using the CallICNexus.
   if (!p.feedback().IsValid()) return NoChange();
   CallICNexus nexus(p.feedback().vector(), p.feedback().slot());
+  if (nexus.IsUninitialized() && (flags() & kBailoutOnUninitialized)) {
+    Node* frame_state = NodeProperties::FindFrameStateBefore(node);
+    Node* deoptimize = graph()->NewNode(
+        common()->Deoptimize(
+            DeoptimizeKind::kSoft,
+            DeoptimizeReason::kInsufficientTypeFeedbackForCall),
+        frame_state, effect, control);
+    // TODO(bmeurer): This should be on the AdvancedReducer somehow.
+    NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
+    Revisit(graph()->end());
+    node->TrimInputCount(0);
+    NodeProperties::ChangeOp(node, common()->Dead());
+    return Changed(node);
+  }
   Handle<Object> feedback(nexus.GetFeedback(), isolate());
   if (feedback->IsAllocationSite()) {
     // Retrieve the Array function from the {node}.
@@ -386,8 +376,8 @@
         // Check if we have an allocation site.
         Handle<AllocationSite> site;
         if (p.feedback().IsValid()) {
-          Handle<Object> feedback(
-              p.feedback().vector()->Get(p.feedback().slot()), isolate());
+          CallICNexus nexus(p.feedback().vector(), p.feedback().slot());
+          Handle<Object> feedback(nexus.GetFeedback(), isolate());
           if (feedback->IsAllocationSite()) {
             site = Handle<AllocationSite>::cast(feedback);
           }
@@ -412,10 +402,9 @@
   // Not much we can do if deoptimization support is disabled.
   if (!(flags() & kDeoptimizationEnabled)) return NoChange();
 
-  // TODO(mvstanton): Use ConstructICNexus here, once available.
-  Handle<Object> feedback;
   if (!p.feedback().IsValid()) return NoChange();
-  feedback = handle(p.feedback().vector()->Get(p.feedback().slot()), isolate());
+  CallICNexus nexus(p.feedback().vector(), p.feedback().slot());
+  Handle<Object> feedback(nexus.GetFeedback(), isolate());
   if (feedback->IsAllocationSite()) {
     // The feedback is an AllocationSite, which means we have called the
     // Array function and collected transition (and pretenuring) feedback
diff --git a/src/compiler/js-call-reducer.h b/src/compiler/js-call-reducer.h
index 8d9700a..0c3835c 100644
--- a/src/compiler/js-call-reducer.h
+++ b/src/compiler/js-call-reducer.h
@@ -20,18 +20,22 @@
 
 // Performs strength reduction on {JSCallConstruct} and {JSCallFunction} nodes,
 // which might allow inlining or other optimizations to be performed afterwards.
-class JSCallReducer final : public Reducer {
+class JSCallReducer final : public AdvancedReducer {
  public:
   // Flags that control the mode of operation.
   enum Flag {
     kNoFlags = 0u,
-    kDeoptimizationEnabled = 1u << 0,
+    kBailoutOnUninitialized = 1u << 0,
+    kDeoptimizationEnabled = 1u << 1
   };
   typedef base::Flags<Flag> Flags;
 
-  JSCallReducer(JSGraph* jsgraph, Flags flags,
+  JSCallReducer(Editor* editor, JSGraph* jsgraph, Flags flags,
                 MaybeHandle<Context> native_context)
-      : jsgraph_(jsgraph), flags_(flags), native_context_(native_context) {}
+      : AdvancedReducer(editor),
+        jsgraph_(jsgraph),
+        flags_(flags),
+        native_context_(native_context) {}
 
   Reduction Reduce(Node* node) final;
 
diff --git a/src/compiler/js-create-lowering.cc b/src/compiler/js-create-lowering.cc
index f2c5edd..b68bb70 100644
--- a/src/compiler/js-create-lowering.cc
+++ b/src/compiler/js-create-lowering.cc
@@ -722,16 +722,25 @@
   DCHECK_EQ(IrOpcode::kJSCreateIterResultObject, node->opcode());
   Node* value = NodeProperties::GetValueInput(node, 0);
   Node* done = NodeProperties::GetValueInput(node, 1);
-  Node* context = NodeProperties::GetContextInput(node);
   Node* effect = NodeProperties::GetEffectInput(node);
 
-  // Load the JSIteratorResult map for the {context}.
-  Node* native_context = effect = graph()->NewNode(
-      javascript()->LoadContext(0, Context::NATIVE_CONTEXT_INDEX, true),
-      context, context, effect);
-  Node* iterator_result_map = effect = graph()->NewNode(
-      javascript()->LoadContext(0, Context::ITERATOR_RESULT_MAP_INDEX, true),
-      native_context, native_context, effect);
+  Node* iterator_result_map;
+  Handle<Context> native_context;
+  if (GetSpecializationNativeContext(node).ToHandle(&native_context)) {
+    // Specialize to the constant JSIteratorResult map to enable map check
+    // elimination to eliminate subsequent checks in case of inlining.
+    iterator_result_map = jsgraph()->HeapConstant(
+        handle(native_context->iterator_result_map(), isolate()));
+  } else {
+    // Load the JSIteratorResult map for the {context}.
+    Node* context = NodeProperties::GetContextInput(node);
+    Node* native_context = effect = graph()->NewNode(
+        javascript()->LoadContext(0, Context::NATIVE_CONTEXT_INDEX, true),
+        context, context, effect);
+    iterator_result_map = effect = graph()->NewNode(
+        javascript()->LoadContext(0, Context::ITERATOR_RESULT_MAP_INDEX, true),
+        native_context, native_context, effect);
+  }
 
   // Emit code to allocate the JSIteratorResult instance.
   AllocationBuilder a(jsgraph(), effect, graph()->start());
@@ -815,6 +824,7 @@
 
 Reduction JSCreateLowering::ReduceJSCreateWithContext(Node* node) {
   DCHECK_EQ(IrOpcode::kJSCreateWithContext, node->opcode());
+  Handle<ScopeInfo> scope_info = OpParameter<Handle<ScopeInfo>>(node);
   Node* object = NodeProperties::GetValueInput(node, 0);
   Node* closure = NodeProperties::GetValueInput(node, 1);
   Node* effect = NodeProperties::GetEffectInput(node);
@@ -823,12 +833,20 @@
   Node* native_context = effect = graph()->NewNode(
       javascript()->LoadContext(0, Context::NATIVE_CONTEXT_INDEX, true),
       context, context, effect);
-  AllocationBuilder a(jsgraph(), effect, control);
+
+  AllocationBuilder aa(jsgraph(), effect, control);
+  aa.Allocate(ContextExtension::kSize);
+  aa.Store(AccessBuilder::ForMap(), factory()->context_extension_map());
+  aa.Store(AccessBuilder::ForContextExtensionScopeInfo(), scope_info);
+  aa.Store(AccessBuilder::ForContextExtensionExtension(), object);
+  Node* extension = aa.Finish();
+
+  AllocationBuilder a(jsgraph(), extension, control);
   STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == 4);  // Ensure fully covered.
   a.AllocateArray(Context::MIN_CONTEXT_SLOTS, factory()->with_context_map());
   a.Store(AccessBuilder::ForContextSlot(Context::CLOSURE_INDEX), closure);
   a.Store(AccessBuilder::ForContextSlot(Context::PREVIOUS_INDEX), context);
-  a.Store(AccessBuilder::ForContextSlot(Context::EXTENSION_INDEX), object);
+  a.Store(AccessBuilder::ForContextSlot(Context::EXTENSION_INDEX), extension);
   a.Store(AccessBuilder::ForContextSlot(Context::NATIVE_CONTEXT_INDEX),
           native_context);
   RelaxControls(node);
@@ -838,7 +856,8 @@
 
 Reduction JSCreateLowering::ReduceJSCreateCatchContext(Node* node) {
   DCHECK_EQ(IrOpcode::kJSCreateCatchContext, node->opcode());
-  Handle<String> name = OpParameter<Handle<String>>(node);
+  const CreateCatchContextParameters& parameters =
+      CreateCatchContextParametersOf(node->op());
   Node* exception = NodeProperties::GetValueInput(node, 0);
   Node* closure = NodeProperties::GetValueInput(node, 1);
   Node* effect = NodeProperties::GetEffectInput(node);
@@ -847,13 +866,23 @@
   Node* native_context = effect = graph()->NewNode(
       javascript()->LoadContext(0, Context::NATIVE_CONTEXT_INDEX, true),
       context, context, effect);
-  AllocationBuilder a(jsgraph(), effect, control);
+
+  AllocationBuilder aa(jsgraph(), effect, control);
+  aa.Allocate(ContextExtension::kSize);
+  aa.Store(AccessBuilder::ForMap(), factory()->context_extension_map());
+  aa.Store(AccessBuilder::ForContextExtensionScopeInfo(),
+           parameters.scope_info());
+  aa.Store(AccessBuilder::ForContextExtensionExtension(),
+           parameters.catch_name());
+  Node* extension = aa.Finish();
+
+  AllocationBuilder a(jsgraph(), extension, control);
   STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == 4);  // Ensure fully covered.
   a.AllocateArray(Context::MIN_CONTEXT_SLOTS + 1,
                   factory()->catch_context_map());
   a.Store(AccessBuilder::ForContextSlot(Context::CLOSURE_INDEX), closure);
   a.Store(AccessBuilder::ForContextSlot(Context::PREVIOUS_INDEX), context);
-  a.Store(AccessBuilder::ForContextSlot(Context::EXTENSION_INDEX), name);
+  a.Store(AccessBuilder::ForContextSlot(Context::EXTENSION_INDEX), extension);
   a.Store(AccessBuilder::ForContextSlot(Context::NATIVE_CONTEXT_INDEX),
           native_context);
   a.Store(AccessBuilder::ForContextSlot(Context::THROWN_OBJECT_INDEX),
@@ -1013,10 +1042,17 @@
   ElementAccess access = IsFastDoubleElementsKind(elements_kind)
                              ? AccessBuilder::ForFixedDoubleArrayElement()
                              : AccessBuilder::ForFixedArrayElement();
-  Node* value =
-      IsFastDoubleElementsKind(elements_kind)
-          ? jsgraph()->Float64Constant(bit_cast<double>(kHoleNanInt64))
-          : jsgraph()->TheHoleConstant();
+  Node* value;
+  if (IsFastDoubleElementsKind(elements_kind)) {
+    // Load the hole NaN pattern from the canonical location.
+    value = effect = graph()->NewNode(
+        simplified()->LoadField(AccessBuilder::ForExternalDoubleValue()),
+        jsgraph()->ExternalConstant(
+            ExternalReference::address_of_the_hole_nan()),
+        effect, control);
+  } else {
+    value = jsgraph()->TheHoleConstant();
+  }
 
   // Actually allocate the backing store.
   AllocationBuilder a(jsgraph(), effect, control);
@@ -1065,8 +1101,8 @@
         boilerplate_map->instance_descriptors()->GetKey(i), isolate());
     FieldIndex index = FieldIndex::ForDescriptor(*boilerplate_map, i);
     FieldAccess access = {
-        kTaggedBase,    index.offset(),           property_name,
-        Type::Tagged(), MachineType::AnyTagged(), kFullWriteBarrier};
+        kTaggedBase, index.offset(),           property_name,
+        Type::Any(), MachineType::AnyTagged(), kFullWriteBarrier};
     Node* value;
     if (boilerplate->IsUnboxedDoubleField(index)) {
       access.machine_type = MachineType::Float64();
@@ -1169,18 +1205,18 @@
   if (elements_map->instance_type() == FIXED_DOUBLE_ARRAY_TYPE) {
     Handle<FixedDoubleArray> elements =
         Handle<FixedDoubleArray>::cast(boilerplate_elements);
+    Node* the_hole_value = nullptr;
     for (int i = 0; i < elements_length; ++i) {
       if (elements->is_the_hole(i)) {
-        // TODO(turbofan): We cannot currently safely pass thru the (signaling)
-        // hole NaN in C++ code, as the C++ compiler on Intel might use FPU
-        // instructions/registers for doubles and therefore make the NaN quiet.
-        // We should consider passing doubles in the compiler as raw int64
-        // values to prevent this.
-        elements_values[i] = effect =
-            graph()->NewNode(simplified()->LoadElement(
-                                 AccessBuilder::ForFixedDoubleArrayElement()),
-                             jsgraph()->HeapConstant(elements),
-                             jsgraph()->Constant(i), effect, control);
+        if (the_hole_value == nullptr) {
+          // Load the hole NaN pattern from the canonical location.
+          the_hole_value = effect = graph()->NewNode(
+              simplified()->LoadField(AccessBuilder::ForExternalDoubleValue()),
+              jsgraph()->ExternalConstant(
+                  ExternalReference::address_of_the_hole_nan()),
+              effect, control);
+        }
+        elements_values[i] = the_hole_value;
       } else {
         elements_values[i] = jsgraph()->Constant(elements->get_scalar(i));
       }
@@ -1244,6 +1280,13 @@
   return MaybeHandle<LiteralsArray>();
 }
 
+MaybeHandle<Context> JSCreateLowering::GetSpecializationNativeContext(
+    Node* node) {
+  Node* const context = NodeProperties::GetContextInput(node);
+  return NodeProperties::GetSpecializationNativeContext(context,
+                                                        native_context_);
+}
+
 Factory* JSCreateLowering::factory() const { return isolate()->factory(); }
 
 Graph* JSCreateLowering::graph() const { return jsgraph()->graph(); }
diff --git a/src/compiler/js-create-lowering.h b/src/compiler/js-create-lowering.h
index 2262e66..6248ca2 100644
--- a/src/compiler/js-create-lowering.h
+++ b/src/compiler/js-create-lowering.h
@@ -31,11 +31,12 @@
  public:
   JSCreateLowering(Editor* editor, CompilationDependencies* dependencies,
                    JSGraph* jsgraph, MaybeHandle<LiteralsArray> literals_array,
-                   Zone* zone)
+                   MaybeHandle<Context> native_context, Zone* zone)
       : AdvancedReducer(editor),
         dependencies_(dependencies),
         jsgraph_(jsgraph),
         literals_array_(literals_array),
+        native_context_(native_context),
         zone_(zone) {}
   ~JSCreateLowering() final {}
 
@@ -76,6 +77,8 @@
 
   // Infers the LiteralsArray to use for a given {node}.
   MaybeHandle<LiteralsArray> GetSpecializationLiterals(Node* node);
+  // Infers the native context to use for a given {node}.
+  MaybeHandle<Context> GetSpecializationNativeContext(Node* node);
 
   Factory* factory() const;
   Graph* graph() const;
@@ -91,6 +94,7 @@
   CompilationDependencies* const dependencies_;
   JSGraph* const jsgraph_;
   MaybeHandle<LiteralsArray> const literals_array_;
+  MaybeHandle<Context> const native_context_;
   Zone* const zone_;
 };
 
diff --git a/src/compiler/js-generic-lowering.cc b/src/compiler/js-generic-lowering.cc
index 812d3e7..22d6c86 100644
--- a/src/compiler/js-generic-lowering.cc
+++ b/src/compiler/js-generic-lowering.cc
@@ -2,10 +2,12 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
+#include "src/compiler/js-generic-lowering.h"
+
+#include "src/ast/ast.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/compiler/common-operator.h"
-#include "src/compiler/js-generic-lowering.h"
 #include "src/compiler/js-graph.h"
 #include "src/compiler/machine-operator.h"
 #include "src/compiler/node-matchers.h"
@@ -45,13 +47,6 @@
   }
   return Changed(node);
 }
-#define REPLACE_RUNTIME_CALL(op, fun)             \
-  void JSGenericLowering::Lower##op(Node* node) { \
-    ReplaceWithRuntimeCall(node, fun);            \
-  }
-REPLACE_RUNTIME_CALL(JSCreateWithContext, Runtime::kPushWithContext)
-REPLACE_RUNTIME_CALL(JSConvertReceiver, Runtime::kConvertReceiver)
-#undef REPLACE_RUNTIME_CALL
 
 #define REPLACE_STUB_CALL(Name)                                \
   void JSGenericLowering::LowerJS##Name(Node* node) {          \
@@ -93,8 +88,10 @@
 void JSGenericLowering::ReplaceWithStubCall(Node* node, Callable callable,
                                             CallDescriptor::Flags flags,
                                             Operator::Properties properties) {
+  const CallInterfaceDescriptor& descriptor = callable.descriptor();
   CallDescriptor* desc = Linkage::GetStubCallDescriptor(
-      isolate(), zone(), callable.descriptor(), 0, flags, properties);
+      isolate(), zone(), descriptor, descriptor.GetStackParameterCount(), flags,
+      properties);
   Node* stub_code = jsgraph()->HeapConstant(callable.code());
   node->InsertInput(zone(), 0, stub_code);
   NodeProperties::ChangeOp(node, common()->Call(desc));
@@ -346,6 +343,11 @@
   ReplaceWithStubCall(node, callable, flags);
 }
 
+void JSGenericLowering::LowerJSOrdinaryHasInstance(Node* node) {
+  CallDescriptor::Flags flags = FrameStateFlagForCall(node);
+  Callable callable = CodeFactory::OrdinaryHasInstance(isolate());
+  ReplaceWithStubCall(node, callable, flags);
+}
 
 void JSGenericLowering::LowerJSLoadContext(Node* node) {
   const ContextAccess& access = ContextAccessOf(node->op());
@@ -513,11 +515,20 @@
 
 
 void JSGenericLowering::LowerJSCreateCatchContext(Node* node) {
-  Handle<String> name = OpParameter<Handle<String>>(node);
-  node->InsertInput(zone(), 0, jsgraph()->HeapConstant(name));
+  const CreateCatchContextParameters& parameters =
+      CreateCatchContextParametersOf(node->op());
+  node->InsertInput(zone(), 0,
+                    jsgraph()->HeapConstant(parameters.catch_name()));
+  node->InsertInput(zone(), 2,
+                    jsgraph()->HeapConstant(parameters.scope_info()));
   ReplaceWithRuntimeCall(node, Runtime::kPushCatchContext);
 }
 
+void JSGenericLowering::LowerJSCreateWithContext(Node* node) {
+  Handle<ScopeInfo> scope_info = OpParameter<Handle<ScopeInfo>>(node);
+  node->InsertInput(zone(), 1, jsgraph()->HeapConstant(scope_info));
+  ReplaceWithRuntimeCall(node, Runtime::kPushWithContext);
+}
 
 void JSGenericLowering::LowerJSCreateBlockContext(Node* node) {
   Handle<ScopeInfo> scope_info = OpParameter<Handle<ScopeInfo>>(node);
@@ -577,12 +588,10 @@
   ReplaceWithRuntimeCall(node, p.id(), static_cast<int>(p.arity()));
 }
 
-
-void JSGenericLowering::LowerJSForInDone(Node* node) {
-  ReplaceWithRuntimeCall(node, Runtime::kForInDone);
+void JSGenericLowering::LowerJSConvertReceiver(Node* node) {
+  ReplaceWithRuntimeCall(node, Runtime::kConvertReceiver);
 }
 
-
 void JSGenericLowering::LowerJSForInNext(Node* node) {
   ReplaceWithRuntimeCall(node, Runtime::kForInNext);
 }
@@ -592,12 +601,6 @@
   ReplaceWithRuntimeCall(node, Runtime::kForInPrepare);
 }
 
-
-void JSGenericLowering::LowerJSForInStep(Node* node) {
-  ReplaceWithRuntimeCall(node, Runtime::kForInStep);
-}
-
-
 void JSGenericLowering::LowerJSLoadMessage(Node* node) {
   ExternalReference message_address =
       ExternalReference::address_of_pending_message_obj(isolate());
diff --git a/src/compiler/js-global-object-specialization.cc b/src/compiler/js-global-object-specialization.cc
index 2b4bf1c..10130f4 100644
--- a/src/compiler/js-global-object-specialization.cc
+++ b/src/compiler/js-global-object-specialization.cc
@@ -11,9 +11,9 @@
 #include "src/compiler/js-operator.h"
 #include "src/compiler/node-properties.h"
 #include "src/compiler/simplified-operator.h"
+#include "src/compiler/type-cache.h"
 #include "src/lookup.h"
 #include "src/objects-inl.h"
-#include "src/type-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -48,6 +48,23 @@
   return NoChange();
 }
 
+namespace {
+
+FieldAccess ForPropertyCellValue(MachineRepresentation representation,
+                                 Type* type, Handle<Name> name) {
+  WriteBarrierKind kind = kFullWriteBarrier;
+  if (representation == MachineRepresentation::kTaggedSigned) {
+    kind = kNoWriteBarrier;
+  } else if (representation == MachineRepresentation::kTaggedPointer) {
+    kind = kPointerWriteBarrier;
+  }
+  MachineType r = MachineType::TypeForRepresentation(representation);
+  FieldAccess access = {kTaggedBase, PropertyCell::kValueOffset, name, type, r,
+                        kind};
+  return access;
+}
+}  // namespace
+
 Reduction JSGlobalObjectSpecialization::ReduceJSLoadGlobal(Node* node) {
   DCHECK_EQ(IrOpcode::kJSLoadGlobal, node->opcode());
   Handle<Name> name = LoadGlobalParametersOf(node->op()).name();
@@ -104,24 +121,31 @@
   }
 
   // Load from constant type cell can benefit from type feedback.
-  Type* property_cell_value_type = Type::Tagged();
+  Type* property_cell_value_type = Type::NonInternal();
+  MachineRepresentation representation = MachineRepresentation::kTagged;
   if (property_details.cell_type() == PropertyCellType::kConstantType) {
     // Compute proper type based on the current value in the cell.
     if (property_cell_value->IsSmi()) {
       property_cell_value_type = type_cache_.kSmi;
+      representation = MachineRepresentation::kTaggedSigned;
     } else if (property_cell_value->IsNumber()) {
+      // TODO(mvstanton): Remove kHeapNumber from type cache, it's just
+      // Type::Number().
       property_cell_value_type = type_cache_.kHeapNumber;
+      representation = MachineRepresentation::kTaggedPointer;
     } else {
+      // TODO(turbofan): Track the property_cell_value_map on the FieldAccess
+      // below and use it in LoadElimination to eliminate map checks.
       Handle<Map> property_cell_value_map(
           Handle<HeapObject>::cast(property_cell_value)->map(), isolate());
-      property_cell_value_type =
-          Type::Class(property_cell_value_map, graph()->zone());
+      property_cell_value_type = Type::For(property_cell_value_map);
+      representation = MachineRepresentation::kTaggedPointer;
     }
   }
-  Node* value = effect = graph()->NewNode(
-      simplified()->LoadField(
-          AccessBuilder::ForPropertyCellValue(property_cell_value_type)),
-      jsgraph()->HeapConstant(property_cell), effect, control);
+  Node* value = effect =
+      graph()->NewNode(simplified()->LoadField(ForPropertyCellValue(
+                           representation, property_cell_value_type, name)),
+                       jsgraph()->HeapConstant(property_cell), effect, control);
   ReplaceWithValue(node, value, effect, control);
   return Replace(value);
 }
@@ -180,6 +204,7 @@
       // values' type doesn't match the type of the previous value in the cell.
       dependencies()->AssumePropertyCell(property_cell);
       Type* property_cell_value_type;
+      MachineRepresentation representation = MachineRepresentation::kTagged;
       if (property_cell_value->IsHeapObject()) {
         // We cannot do anything if the {property_cell_value}s map is no
         // longer stable.
@@ -189,23 +214,25 @@
         dependencies()->AssumeMapStable(property_cell_value_map);
 
         // Check that the {value} is a HeapObject.
-        value = effect = graph()->NewNode(simplified()->CheckTaggedPointer(),
+        value = effect = graph()->NewNode(simplified()->CheckHeapObject(),
                                           value, effect, control);
 
         // Check {value} map agains the {property_cell} map.
         effect = graph()->NewNode(
             simplified()->CheckMaps(1), value,
             jsgraph()->HeapConstant(property_cell_value_map), effect, control);
-        property_cell_value_type = Type::TaggedPointer();
+        property_cell_value_type = Type::OtherInternal();
+        representation = MachineRepresentation::kTaggedPointer;
       } else {
         // Check that the {value} is a Smi.
-        value = effect = graph()->NewNode(simplified()->CheckTaggedSigned(),
-                                          value, effect, control);
-        property_cell_value_type = Type::TaggedSigned();
+        value = effect =
+            graph()->NewNode(simplified()->CheckSmi(), value, effect, control);
+        property_cell_value_type = Type::SignedSmall();
+        representation = MachineRepresentation::kTaggedSigned;
       }
       effect = graph()->NewNode(
-          simplified()->StoreField(
-              AccessBuilder::ForPropertyCellValue(property_cell_value_type)),
+          simplified()->StoreField(ForPropertyCellValue(
+              representation, property_cell_value_type, name)),
           jsgraph()->HeapConstant(property_cell), value, effect, control);
       break;
     }
@@ -219,7 +246,8 @@
         dependencies()->AssumePropertyCell(property_cell);
       }
       effect = graph()->NewNode(
-          simplified()->StoreField(AccessBuilder::ForPropertyCellValue()),
+          simplified()->StoreField(ForPropertyCellValue(
+              MachineRepresentation::kTagged, Type::NonInternal(), name)),
           jsgraph()->HeapConstant(property_cell), value, effect, control);
       break;
     }
@@ -251,7 +279,7 @@
   Handle<Context> script_context = ScriptContextTable::GetContext(
       script_context_table, lookup_result.context_index);
   result->context = script_context;
-  result->immutable = IsImmutableVariableMode(lookup_result.mode);
+  result->immutable = lookup_result.mode == CONST;
   result->index = lookup_result.slot_index;
   return true;
 }
diff --git a/src/compiler/js-global-object-specialization.h b/src/compiler/js-global-object-specialization.h
index 3ffc67a..a6c511e 100644
--- a/src/compiler/js-global-object-specialization.h
+++ b/src/compiler/js-global-object-specialization.h
@@ -12,8 +12,6 @@
 
 // Forward declarations.
 class CompilationDependencies;
-class TypeCache;
-
 
 namespace compiler {
 
@@ -22,7 +20,7 @@
 class JSGraph;
 class JSOperatorBuilder;
 class SimplifiedOperatorBuilder;
-
+class TypeCache;
 
 // Specializes a given JSGraph to a given global object, potentially constant
 // folding some {JSLoadGlobal} nodes or strength reducing some {JSStoreGlobal}
diff --git a/src/compiler/js-inlining-heuristic.cc b/src/compiler/js-inlining-heuristic.cc
index ce7b33b..5c626d1 100644
--- a/src/compiler/js-inlining-heuristic.cc
+++ b/src/compiler/js-inlining-heuristic.cc
@@ -4,14 +4,63 @@
 
 #include "src/compiler/js-inlining-heuristic.h"
 
-#include "src/compiler.h"
+#include "src/compilation-info.h"
+#include "src/compiler/common-operator.h"
 #include "src/compiler/node-matchers.h"
+#include "src/compiler/simplified-operator.h"
 #include "src/objects-inl.h"
 
 namespace v8 {
 namespace internal {
 namespace compiler {
 
+#define TRACE(...)                                      \
+  do {                                                  \
+    if (FLAG_trace_turbo_inlining) PrintF(__VA_ARGS__); \
+  } while (false)
+
+namespace {
+
+int CollectFunctions(Node* node, Handle<JSFunction>* functions,
+                     int functions_size) {
+  DCHECK_NE(0u, functions_size);
+  HeapObjectMatcher m(node);
+  if (m.HasValue() && m.Value()->IsJSFunction()) {
+    functions[0] = Handle<JSFunction>::cast(m.Value());
+    return 1;
+  }
+  if (m.IsPhi()) {
+    int const value_input_count = m.node()->op()->ValueInputCount();
+    if (value_input_count > functions_size) return 0;
+    for (int n = 0; n < value_input_count; ++n) {
+      HeapObjectMatcher m(node->InputAt(n));
+      if (!m.HasValue() || !m.Value()->IsJSFunction()) return 0;
+      functions[n] = Handle<JSFunction>::cast(m.Value());
+    }
+    return value_input_count;
+  }
+  return 0;
+}
+
+bool CanInlineFunction(Handle<JSFunction> function) {
+  // Built-in functions are handled by the JSBuiltinReducer.
+  if (function->shared()->HasBuiltinFunctionId()) return false;
+
+  // Don't inline builtins.
+  if (function->shared()->IsBuiltin()) return false;
+
+  // Quick check on the size of the AST to avoid parsing large candidate.
+  if (function->shared()->ast_node_count() > FLAG_max_inlined_nodes) {
+    return false;
+  }
+
+  // Avoid inlining across the boundary of asm.js code.
+  if (function->shared()->asm_function()) return false;
+  return true;
+}
+
+}  // namespace
+
 Reduction JSInliningHeuristic::Reduce(Node* node) {
   if (!IrOpcode::IsInlineeOpcode(node->opcode())) return NoChange();
 
@@ -19,14 +68,61 @@
   if (seen_.find(node->id()) != seen_.end()) return NoChange();
   seen_.insert(node->id());
 
+  // Check if the {node} is an appropriate candidate for inlining.
   Node* callee = node->InputAt(0);
-  HeapObjectMatcher match(callee);
-  if (!match.HasValue() || !match.Value()->IsJSFunction()) return NoChange();
-  Handle<JSFunction> function = Handle<JSFunction>::cast(match.Value());
+  Candidate candidate;
+  candidate.node = node;
+  candidate.num_functions =
+      CollectFunctions(callee, candidate.functions, kMaxCallPolymorphism);
+  if (candidate.num_functions == 0) {
+    return NoChange();
+  } else if (candidate.num_functions > 1 && !FLAG_polymorphic_inlining) {
+    TRACE(
+        "Not considering call site #%d:%s, because polymorphic inlining "
+        "is disabled\n",
+        node->id(), node->op()->mnemonic());
+    return NoChange();
+  }
 
   // Functions marked with %SetForceInlineFlag are immediately inlined.
-  if (function->shared()->force_inline()) {
-    return inliner_.ReduceJSCall(node, function);
+  bool can_inline = false, force_inline = true;
+  for (int i = 0; i < candidate.num_functions; ++i) {
+    Handle<JSFunction> function = candidate.functions[i];
+    if (!function->shared()->force_inline()) {
+      force_inline = false;
+    }
+    if (CanInlineFunction(function)) {
+      can_inline = true;
+    }
+  }
+  if (force_inline) return InlineCandidate(candidate);
+  if (!can_inline) return NoChange();
+
+  // Stop inlining once the maximum allowed level is reached.
+  int level = 0;
+  for (Node* frame_state = NodeProperties::GetFrameStateInput(node);
+       frame_state->opcode() == IrOpcode::kFrameState;
+       frame_state = NodeProperties::GetFrameStateInput(frame_state)) {
+    FrameStateInfo const& frame_info = OpParameter<FrameStateInfo>(frame_state);
+    if (FrameStateFunctionInfo::IsJSFunctionType(frame_info.type())) {
+      if (++level > FLAG_max_inlining_levels) {
+        TRACE(
+            "Not considering call site #%d:%s, because inlining depth "
+            "%d exceeds maximum allowed level %d\n",
+            node->id(), node->op()->mnemonic(), level,
+            FLAG_max_inlining_levels);
+        return NoChange();
+      }
+    }
+  }
+
+  // Gather feedback on how often this call site has been hit before.
+  if (node->opcode() == IrOpcode::kJSCallFunction) {
+    CallFunctionParameters const p = CallFunctionParametersOf(node->op());
+    candidate.frequency = p.frequency();
+  } else {
+    CallConstructParameters const p = CallConstructParametersOf(node->op());
+    candidate.frequency = p.frequency();
   }
 
   // Handling of special inlining modes right away:
@@ -36,75 +132,16 @@
     case kRestrictedInlining:
       return NoChange();
     case kStressInlining:
-      return inliner_.ReduceJSCall(node, function);
+      return InlineCandidate(candidate);
     case kGeneralInlining:
       break;
   }
 
-  // ---------------------------------------------------------------------------
-  // Everything below this line is part of the inlining heuristic.
-  // ---------------------------------------------------------------------------
-
-  // Built-in functions are handled by the JSBuiltinReducer.
-  if (function->shared()->HasBuiltinFunctionId()) return NoChange();
-
-  // Don't inline builtins.
-  if (function->shared()->IsBuiltin()) return NoChange();
-
-  // Quick check on source code length to avoid parsing large candidate.
-  if (function->shared()->SourceSize() > FLAG_max_inlined_source_size) {
-    return NoChange();
-  }
-
-  // Quick check on the size of the AST to avoid parsing large candidate.
-  if (function->shared()->ast_node_count() > FLAG_max_inlined_nodes) {
-    return NoChange();
-  }
-
-  // Avoid inlining within or across the boundary of asm.js code.
-  if (info_->shared_info()->asm_function()) return NoChange();
-  if (function->shared()->asm_function()) return NoChange();
-
-  // Stop inlinining once the maximum allowed level is reached.
-  int level = 0;
-  for (Node* frame_state = NodeProperties::GetFrameStateInput(node);
-       frame_state->opcode() == IrOpcode::kFrameState;
-       frame_state = NodeProperties::GetFrameStateInput(frame_state)) {
-    if (++level > FLAG_max_inlining_levels) return NoChange();
-  }
-
-  // Gather feedback on how often this call site has been hit before.
-  int calls = -1;  // Same default as CallICNexus::ExtractCallCount.
-  if (node->opcode() == IrOpcode::kJSCallFunction) {
-    CallFunctionParameters p = CallFunctionParametersOf(node->op());
-    if (p.feedback().IsValid()) {
-      CallICNexus nexus(p.feedback().vector(), p.feedback().slot());
-      calls = nexus.ExtractCallCount();
-    }
-  } else {
-    DCHECK_EQ(IrOpcode::kJSCallConstruct, node->opcode());
-    CallConstructParameters p = CallConstructParametersOf(node->op());
-    if (p.feedback().IsValid()) {
-      int const extra_index =
-          p.feedback().vector()->GetIndex(p.feedback().slot()) + 1;
-      Handle<Object> feedback_extra(p.feedback().vector()->get(extra_index),
-                                    function->GetIsolate());
-      if (feedback_extra->IsSmi()) {
-        calls = Handle<Smi>::cast(feedback_extra)->value();
-      }
-    }
-  }
-
-  // ---------------------------------------------------------------------------
-  // Everything above this line is part of the inlining heuristic.
-  // ---------------------------------------------------------------------------
-
   // In the general case we remember the candidate for later.
-  candidates_.insert({function, node, calls});
+  candidates_.insert(candidate);
   return NoChange();
 }
 
-
 void JSInliningHeuristic::Finalize() {
   if (candidates_.empty()) return;  // Nothing to do without candidates.
   if (FLAG_trace_turbo_inlining) PrintCandidates();
@@ -120,36 +157,147 @@
     candidates_.erase(i);
     // Make sure we don't try to inline dead candidate nodes.
     if (!candidate.node->IsDead()) {
-      Reduction r = inliner_.ReduceJSCall(candidate.node, candidate.function);
-      if (r.Changed()) {
-        cumulative_count_ += candidate.function->shared()->ast_node_count();
-        return;
-      }
+      Reduction const reduction = InlineCandidate(candidate);
+      if (reduction.Changed()) return;
     }
   }
 }
 
+Reduction JSInliningHeuristic::InlineCandidate(Candidate const& candidate) {
+  int const num_calls = candidate.num_functions;
+  Node* const node = candidate.node;
+  if (num_calls == 1) {
+    Handle<JSFunction> function = candidate.functions[0];
+    Reduction const reduction = inliner_.ReduceJSCall(node, function);
+    if (reduction.Changed()) {
+      cumulative_count_ += function->shared()->ast_node_count();
+    }
+    return reduction;
+  }
+
+  // Expand the JSCallFunction/JSCallConstruct node to a subgraph first if
+  // we have multiple known target functions.
+  DCHECK_LT(1, num_calls);
+  Node* calls[kMaxCallPolymorphism + 1];
+  Node* if_successes[kMaxCallPolymorphism];
+  Node* callee = NodeProperties::GetValueInput(node, 0);
+  Node* fallthrough_control = NodeProperties::GetControlInput(node);
+
+  // Setup the inputs for the cloned call nodes.
+  int const input_count = node->InputCount();
+  Node** inputs = graph()->zone()->NewArray<Node*>(input_count);
+  for (int i = 0; i < input_count; ++i) {
+    inputs[i] = node->InputAt(i);
+  }
+
+  // Create the appropriate control flow to dispatch to the cloned calls.
+  for (int i = 0; i < num_calls; ++i) {
+    Node* target = jsgraph()->HeapConstant(candidate.functions[i]);
+    if (i != (num_calls - 1)) {
+      Node* check =
+          graph()->NewNode(simplified()->ReferenceEqual(), callee, target);
+      Node* branch =
+          graph()->NewNode(common()->Branch(), check, fallthrough_control);
+      fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
+      if_successes[i] = graph()->NewNode(common()->IfTrue(), branch);
+    } else {
+      if_successes[i] = fallthrough_control;
+    }
+
+    // The first input to the call is the actual target (which we specialize
+    // to the known {target}); the last input is the control dependency.
+    inputs[0] = target;
+    inputs[input_count - 1] = if_successes[i];
+    calls[i] = graph()->NewNode(node->op(), input_count, inputs);
+    if_successes[i] = graph()->NewNode(common()->IfSuccess(), calls[i]);
+  }
+
+  // Check if we have an exception projection for the call {node}.
+  Node* if_exception = nullptr;
+  for (Edge const edge : node->use_edges()) {
+    if (NodeProperties::IsControlEdge(edge) &&
+        edge.from()->opcode() == IrOpcode::kIfException) {
+      if_exception = edge.from();
+      break;
+    }
+  }
+  if (if_exception != nullptr) {
+    // Morph the {if_exception} projection into a join.
+    Node* if_exceptions[kMaxCallPolymorphism + 1];
+    for (int i = 0; i < num_calls; ++i) {
+      if_exceptions[i] =
+          graph()->NewNode(common()->IfException(), calls[i], calls[i]);
+    }
+    Node* exception_control =
+        graph()->NewNode(common()->Merge(num_calls), num_calls, if_exceptions);
+    if_exceptions[num_calls] = exception_control;
+    Node* exception_effect = graph()->NewNode(common()->EffectPhi(num_calls),
+                                              num_calls + 1, if_exceptions);
+    Node* exception_value = graph()->NewNode(
+        common()->Phi(MachineRepresentation::kTagged, num_calls), num_calls + 1,
+        if_exceptions);
+    ReplaceWithValue(if_exception, exception_value, exception_effect,
+                     exception_control);
+  }
+
+  // Morph the call site into the dispatched call sites.
+  Node* control =
+      graph()->NewNode(common()->Merge(num_calls), num_calls, if_successes);
+  calls[num_calls] = control;
+  Node* effect =
+      graph()->NewNode(common()->EffectPhi(num_calls), num_calls + 1, calls);
+  Node* value =
+      graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, num_calls),
+                       num_calls + 1, calls);
+  ReplaceWithValue(node, value, effect, control);
+
+  // Inline the individual, cloned call sites.
+  for (int i = 0; i < num_calls; ++i) {
+    Handle<JSFunction> function = candidate.functions[i];
+    Node* node = calls[i];
+    Reduction const reduction = inliner_.ReduceJSCall(node, function);
+    if (reduction.Changed()) {
+      cumulative_count_ += function->shared()->ast_node_count();
+    }
+  }
+
+  return Replace(value);
+}
 
 bool JSInliningHeuristic::CandidateCompare::operator()(
     const Candidate& left, const Candidate& right) const {
-  if (left.calls != right.calls) {
-    return left.calls > right.calls;
+  if (left.frequency > right.frequency) {
+    return true;
+  } else if (left.frequency < right.frequency) {
+    return false;
+  } else {
+    return left.node->id() > right.node->id();
   }
-  return left.node < right.node;
 }
 
-
 void JSInliningHeuristic::PrintCandidates() {
   PrintF("Candidates for inlining (size=%zu):\n", candidates_.size());
   for (const Candidate& candidate : candidates_) {
-    PrintF("  id:%d, calls:%d, size[source]:%d, size[ast]:%d / %s\n",
-           candidate.node->id(), candidate.calls,
-           candidate.function->shared()->SourceSize(),
-           candidate.function->shared()->ast_node_count(),
-           candidate.function->shared()->DebugName()->ToCString().get());
+    PrintF("  #%d:%s, frequency:%g\n", candidate.node->id(),
+           candidate.node->op()->mnemonic(), candidate.frequency);
+    for (int i = 0; i < candidate.num_functions; ++i) {
+      Handle<JSFunction> function = candidate.functions[i];
+      PrintF("  - size:%d, name: %s\n", function->shared()->ast_node_count(),
+             function->shared()->DebugName()->ToCString().get());
+    }
   }
 }
 
+Graph* JSInliningHeuristic::graph() const { return jsgraph()->graph(); }
+
+CommonOperatorBuilder* JSInliningHeuristic::common() const {
+  return jsgraph()->common();
+}
+
+SimplifiedOperatorBuilder* JSInliningHeuristic::simplified() const {
+  return jsgraph()->simplified();
+}
+
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8
diff --git a/src/compiler/js-inlining-heuristic.h b/src/compiler/js-inlining-heuristic.h
index 7f57747..367e35a 100644
--- a/src/compiler/js-inlining-heuristic.h
+++ b/src/compiler/js-inlining-heuristic.h
@@ -21,7 +21,7 @@
         inliner_(editor, local_zone, info, jsgraph),
         candidates_(local_zone),
         seen_(local_zone),
-        info_(info) {}
+        jsgraph_(jsgraph) {}
 
   Reduction Reduce(Node* node) final;
 
@@ -30,10 +30,15 @@
   void Finalize() final;
 
  private:
+  // This limit currently matches what Crankshaft does. We may want to
+  // re-evaluate and come up with a proper limit for TurboFan.
+  static const int kMaxCallPolymorphism = 4;
+
   struct Candidate {
-    Handle<JSFunction> function;  // The call target being inlined.
-    Node* node;                   // The call site at which to inline.
-    int calls;                    // Number of times the call site was hit.
+    Handle<JSFunction> functions[kMaxCallPolymorphism];
+    int num_functions;
+    Node* node = nullptr;    // The call site at which to inline.
+    float frequency = 0.0f;  // Relative frequency of this call site.
   };
 
   // Comparator for candidates.
@@ -46,12 +51,18 @@
 
   // Dumps candidates to console.
   void PrintCandidates();
+  Reduction InlineCandidate(Candidate const& candidate);
+
+  CommonOperatorBuilder* common() const;
+  Graph* graph() const;
+  JSGraph* jsgraph() const { return jsgraph_; }
+  SimplifiedOperatorBuilder* simplified() const;
 
   Mode const mode_;
   JSInliner inliner_;
   Candidates candidates_;
   ZoneSet<NodeId> seen_;
-  CompilationInfo* info_;
+  JSGraph* const jsgraph_;
   int cumulative_count_ = 0;
 };
 
diff --git a/src/compiler/js-inlining.cc b/src/compiler/js-inlining.cc
index 635daa4..58e5a27 100644
--- a/src/compiler/js-inlining.cc
+++ b/src/compiler/js-inlining.cc
@@ -6,10 +6,12 @@
 
 #include "src/ast/ast-numbering.h"
 #include "src/ast/ast.h"
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler.h"
+#include "src/compiler/all-nodes.h"
 #include "src/compiler/ast-graph-builder.h"
 #include "src/compiler/ast-loop-assignment-analyzer.h"
+#include "src/compiler/bytecode-graph-builder.h"
 #include "src/compiler/common-operator.h"
 #include "src/compiler/graph-reducer.h"
 #include "src/compiler/js-operator.h"
@@ -68,13 +70,20 @@
     return call_->op()->ValueInputCount() - 2;
   }
 
+  float frequency() const {
+    return (call_->opcode() == IrOpcode::kJSCallFunction)
+               ? CallFunctionParametersOf(call_->op()).frequency()
+               : CallConstructParametersOf(call_->op()).frequency();
+  }
+
  private:
   Node* call_;
 };
 
-
 Reduction JSInliner::InlineCall(Node* call, Node* new_target, Node* context,
-                                Node* frame_state, Node* start, Node* end) {
+                                Node* frame_state, Node* start, Node* end,
+                                Node* exception_target,
+                                const NodeVector& uncaught_subcalls) {
   // The scheduler is smart enough to place our code; we just ensure {control}
   // becomes the control input of the start of the inlinee, and {effect} becomes
   // the effect input of the start of the inlinee.
@@ -131,6 +140,44 @@
     }
   }
 
+  if (exception_target != nullptr) {
+    // Link uncaught calls in the inlinee to {exception_target}
+    int subcall_count = static_cast<int>(uncaught_subcalls.size());
+    if (subcall_count > 0) {
+      TRACE(
+          "Inlinee contains %d calls without IfException; "
+          "linking to existing IfException\n",
+          subcall_count);
+    }
+    NodeVector on_exception_nodes(local_zone_);
+    for (Node* subcall : uncaught_subcalls) {
+      Node* on_exception =
+          graph()->NewNode(common()->IfException(), subcall, subcall);
+      on_exception_nodes.push_back(on_exception);
+    }
+
+    DCHECK_EQ(subcall_count, static_cast<int>(on_exception_nodes.size()));
+    if (subcall_count > 0) {
+      Node* control_output =
+          graph()->NewNode(common()->Merge(subcall_count), subcall_count,
+                           &on_exception_nodes.front());
+      NodeVector values_effects(local_zone_);
+      values_effects = on_exception_nodes;
+      values_effects.push_back(control_output);
+      Node* value_output = graph()->NewNode(
+          common()->Phi(MachineRepresentation::kTagged, subcall_count),
+          subcall_count + 1, &values_effects.front());
+      Node* effect_output =
+          graph()->NewNode(common()->EffectPhi(subcall_count),
+                           subcall_count + 1, &values_effects.front());
+      ReplaceWithValue(exception_target, value_output, effect_output,
+                       control_output);
+    } else {
+      ReplaceWithValue(exception_target, exception_target, exception_target,
+                       jsgraph()->Dead());
+    }
+  }
+
   NodeVector values(local_zone_);
   NodeVector effects(local_zone_);
   NodeVector controls(local_zone_);
@@ -235,6 +282,56 @@
 
 namespace {
 
+// TODO(bmeurer): Unify this with the witness helper functions in the
+// js-builtin-reducer.cc once we have a better understanding of the
+// map tracking we want to do, and eventually changed the CheckMaps
+// operator to carry map constants on the operator instead of inputs.
+// I.e. if the CheckMaps has some kind of SmallMapSet as operator
+// parameter, then this could be changed to call a generic
+//
+//   SmallMapSet NodeProperties::CollectMapWitness(receiver, effect)
+//
+// function, which either returns the map set from the CheckMaps or
+// a singleton set from a StoreField.
+bool NeedsConvertReceiver(Node* receiver, Node* effect) {
+  for (Node* dominator = effect;;) {
+    if (dominator->opcode() == IrOpcode::kCheckMaps &&
+        dominator->InputAt(0) == receiver) {
+      // Check if all maps have the given {instance_type}.
+      for (int i = 1; i < dominator->op()->ValueInputCount(); ++i) {
+        HeapObjectMatcher m(NodeProperties::GetValueInput(dominator, i));
+        if (!m.HasValue()) return true;
+        Handle<Map> const map = Handle<Map>::cast(m.Value());
+        if (!map->IsJSReceiverMap()) return true;
+      }
+      return false;
+    }
+    switch (dominator->opcode()) {
+      case IrOpcode::kStoreField: {
+        FieldAccess const& access = FieldAccessOf(dominator->op());
+        if (access.base_is_tagged == kTaggedBase &&
+            access.offset == HeapObject::kMapOffset) {
+          return true;
+        }
+        break;
+      }
+      case IrOpcode::kStoreElement:
+      case IrOpcode::kStoreTypedElement:
+        break;
+      default: {
+        DCHECK_EQ(1, dominator->op()->EffectOutputCount());
+        if (dominator->op()->EffectInputCount() != 1 ||
+            !dominator->op()->HasProperty(Operator::kNoWrite)) {
+          // Didn't find any appropriate CheckMaps node.
+          return true;
+        }
+        break;
+      }
+    }
+    dominator = NodeProperties::GetEffectInput(dominator);
+  }
+}
+
 // TODO(mstarzinger,verwaest): Move this predicate onto SharedFunctionInfo?
 bool NeedsImplicitReceiver(Handle<SharedFunctionInfo> shared_info) {
   DisallowHeapAllocation no_gc;
@@ -270,7 +367,6 @@
   return ReduceJSCall(node, function);
 }
 
-
 Reduction JSInliner::ReduceJSCall(Node* node, Handle<JSFunction> function) {
   DCHECK(IrOpcode::IsInlineeOpcode(node->opcode()));
   JSCallAccessor call(node);
@@ -344,12 +440,35 @@
     }
   }
 
-  // TODO(turbofan): Inlining into a try-block is not yet supported.
-  if (NodeProperties::IsExceptionalCall(node)) {
-    TRACE("Not inlining %s into %s because of surrounding try-block\n",
+  // Find the IfException node, if any.
+  Node* exception_target = nullptr;
+  for (Edge edge : node->use_edges()) {
+    if (NodeProperties::IsControlEdge(edge) &&
+        edge.from()->opcode() == IrOpcode::kIfException) {
+      DCHECK_NULL(exception_target);
+      exception_target = edge.from();
+    }
+  }
+
+  NodeVector uncaught_subcalls(local_zone_);
+
+  if (exception_target != nullptr) {
+    if (!FLAG_inline_into_try) {
+      TRACE(
+          "Try block surrounds #%d:%s and --no-inline-into-try active, so not "
+          "inlining %s into %s.\n",
+          exception_target->id(), exception_target->op()->mnemonic(),
           shared_info->DebugName()->ToCString().get(),
           info_->shared_info()->DebugName()->ToCString().get());
-    return NoChange();
+      return NoChange();
+    } else {
+      TRACE(
+          "Inlining %s into %s regardless of surrounding try-block to catcher "
+          "#%d:%s\n",
+          shared_info->DebugName()->ToCString().get(),
+          info_->shared_info()->DebugName()->ToCString().get(),
+          exception_target->id(), exception_target->op()->mnemonic());
+    }
   }
 
   Zone zone(info_->isolate()->allocator());
@@ -357,8 +476,20 @@
   CompilationInfo info(&parse_info, function);
   if (info_->is_deoptimization_enabled()) info.MarkAsDeoptimizationEnabled();
   if (info_->is_type_feedback_enabled()) info.MarkAsTypeFeedbackEnabled();
+  if (info_->is_optimizing_from_bytecode()) info.MarkAsOptimizeFromBytecode();
 
-  if (!Compiler::ParseAndAnalyze(info.parse_info())) {
+  if (info.is_optimizing_from_bytecode() && !Compiler::EnsureBytecode(&info)) {
+    TRACE("Not inlining %s into %s because bytecode generation failed\n",
+          shared_info->DebugName()->ToCString().get(),
+          info_->shared_info()->DebugName()->ToCString().get());
+    if (info_->isolate()->has_pending_exception()) {
+      info_->isolate()->clear_pending_exception();
+    }
+    return NoChange();
+  }
+
+  if (!info.is_optimizing_from_bytecode() &&
+      !Compiler::ParseAndAnalyze(info.parse_info())) {
     TRACE("Not inlining %s into %s because parsing failed\n",
           shared_info->DebugName()->ToCString().get(),
           info_->shared_info()->DebugName()->ToCString().get());
@@ -368,7 +499,8 @@
     return NoChange();
   }
 
-  if (!Compiler::EnsureDeoptimizationSupport(&info)) {
+  if (!info.is_optimizing_from_bytecode() &&
+      !Compiler::EnsureDeoptimizationSupport(&info)) {
     TRACE("Not inlining %s into %s because deoptimization support failed\n",
           shared_info->DebugName()->ToCString().get(),
           info_->shared_info()->DebugName()->ToCString().get());
@@ -388,13 +520,23 @@
         shared_info->DebugName()->ToCString().get(),
         info_->shared_info()->DebugName()->ToCString().get());
 
-  // If function was lazily compiled, it's literals array may not yet be set up.
+  // If function was lazily compiled, its literals array may not yet be set up.
   JSFunction::EnsureLiterals(function);
 
   // Create the subgraph for the inlinee.
   Node* start;
   Node* end;
-  {
+  if (info.is_optimizing_from_bytecode()) {
+    // Run the BytecodeGraphBuilder to create the subgraph.
+    Graph::SubgraphScope scope(graph());
+    BytecodeGraphBuilder graph_builder(&zone, &info, jsgraph(),
+                                       call.frequency());
+    graph_builder.CreateGraph();
+
+    // Extract the inlinee start/end nodes.
+    start = graph()->start();
+    end = graph()->end();
+  } else {
     // Run the loop assignment analyzer on the inlinee.
     AstLoopAssignmentAnalyzer loop_assignment_analyzer(&zone, &info);
     LoopAssignmentAnalysis* loop_assignment =
@@ -407,8 +549,8 @@
 
     // Run the AstGraphBuilder to create the subgraph.
     Graph::SubgraphScope scope(graph());
-    AstGraphBuilder graph_builder(&zone, &info, jsgraph(), loop_assignment,
-                                  type_hint_analysis);
+    AstGraphBuilder graph_builder(&zone, &info, jsgraph(), call.frequency(),
+                                  loop_assignment, type_hint_analysis);
     graph_builder.CreateGraph(false);
 
     // Extract the inlinee start/end nodes.
@@ -416,6 +558,29 @@
     end = graph()->end();
   }
 
+  if (exception_target != nullptr) {
+    // Find all uncaught 'calls' in the inlinee.
+    AllNodes inlined_nodes(local_zone_, end, graph());
+    for (Node* subnode : inlined_nodes.reachable) {
+      // Every possibly throwing node with an IfSuccess should get an
+      // IfException.
+      if (subnode->op()->HasProperty(Operator::kNoThrow)) {
+        continue;
+      }
+      bool hasIfException = false;
+      for (Node* use : subnode->uses()) {
+        if (use->opcode() == IrOpcode::kIfException) {
+          hasIfException = true;
+          break;
+        }
+      }
+      if (!hasIfException) {
+        DCHECK_EQ(2, subnode->op()->ControlOutputCount());
+        uncaught_subcalls.push_back(subnode);
+      }
+    }
+  }
+
   Node* frame_state = call.frame_state();
   Node* new_target = jsgraph()->UndefinedConstant();
 
@@ -475,15 +640,17 @@
   // in that frame state tho, as the conversion of the receiver can be repeated
   // any number of times, it's not observable.
   if (node->opcode() == IrOpcode::kJSCallFunction &&
-      is_sloppy(parse_info.language_mode()) && !shared_info->native()) {
-    const CallFunctionParameters& p = CallFunctionParametersOf(node->op());
-    Node* frame_state_before = NodeProperties::FindFrameStateBefore(node);
+      is_sloppy(shared_info->language_mode()) && !shared_info->native()) {
     Node* effect = NodeProperties::GetEffectInput(node);
-    Node* convert = graph()->NewNode(
-        javascript()->ConvertReceiver(p.convert_mode()), call.receiver(),
-        context, frame_state_before, effect, start);
-    NodeProperties::ReplaceValueInput(node, convert, 1);
-    NodeProperties::ReplaceEffectInput(node, convert);
+    if (NeedsConvertReceiver(call.receiver(), effect)) {
+      const CallFunctionParameters& p = CallFunctionParametersOf(node->op());
+      Node* frame_state_before = NodeProperties::FindFrameStateBefore(node);
+      Node* convert = effect = graph()->NewNode(
+          javascript()->ConvertReceiver(p.convert_mode()), call.receiver(),
+          context, frame_state_before, effect, start);
+      NodeProperties::ReplaceValueInput(node, convert, 1);
+      NodeProperties::ReplaceEffectInput(node, effect);
+    }
   }
 
   // If we are inlining a JS call at tail position then we have to pop current
@@ -504,7 +671,7 @@
   // count (i.e. value outputs of start node minus target, receiver, new target,
   // arguments count and context) have to match the number of arguments passed
   // to the call.
-  int parameter_count = info.literal()->parameter_count();
+  int parameter_count = shared_info->internal_formal_parameter_count();
   DCHECK_EQ(parameter_count, start->op()->ValueOutputCount() - 5);
   if (call.formal_arguments() != parameter_count) {
     frame_state = CreateArtificialFrameState(
@@ -512,7 +679,8 @@
         FrameStateType::kArgumentsAdaptor, shared_info);
   }
 
-  return InlineCall(node, new_target, context, frame_state, start, end);
+  return InlineCall(node, new_target, context, frame_state, start, end,
+                    exception_target, uncaught_subcalls);
 }
 
 Graph* JSInliner::graph() const { return jsgraph()->graph(); }
diff --git a/src/compiler/js-inlining.h b/src/compiler/js-inlining.h
index 49487f5..323c3ae 100644
--- a/src/compiler/js-inlining.h
+++ b/src/compiler/js-inlining.h
@@ -54,7 +54,9 @@
   Node* CreateTailCallerFrameState(Node* node, Node* outer_frame_state);
 
   Reduction InlineCall(Node* call, Node* new_target, Node* context,
-                       Node* frame_state, Node* start, Node* end);
+                       Node* frame_state, Node* start, Node* end,
+                       Node* exception_target,
+                       const NodeVector& uncaught_subcalls);
 };
 
 }  // namespace compiler
diff --git a/src/compiler/js-intrinsic-lowering.cc b/src/compiler/js-intrinsic-lowering.cc
index 3324508..7fc50e5 100644
--- a/src/compiler/js-intrinsic-lowering.cc
+++ b/src/compiler/js-intrinsic-lowering.cc
@@ -302,10 +302,10 @@
 
 Reduction JSIntrinsicLowering::ReduceCall(Node* node) {
   size_t const arity = CallRuntimeParametersOf(node->op()).arity();
-  NodeProperties::ChangeOp(node,
-                           javascript()->CallFunction(arity, VectorSlotPair(),
-                                                      ConvertReceiverMode::kAny,
-                                                      TailCallMode::kDisallow));
+  NodeProperties::ChangeOp(
+      node, javascript()->CallFunction(arity, 0.0f, VectorSlotPair(),
+                                       ConvertReceiverMode::kAny,
+                                       TailCallMode::kDisallow));
   return Changed(node);
 }
 
diff --git a/src/compiler/js-native-context-specialization.cc b/src/compiler/js-native-context-specialization.cc
index b76744e..ab20d93 100644
--- a/src/compiler/js-native-context-specialization.cc
+++ b/src/compiler/js-native-context-specialization.cc
@@ -13,9 +13,9 @@
 #include "src/compiler/js-operator.h"
 #include "src/compiler/linkage.h"
 #include "src/compiler/node-matchers.h"
+#include "src/compiler/type-cache.h"
 #include "src/field-index-inl.h"
 #include "src/isolate-inl.h"
-#include "src/type-cache.h"
 #include "src/type-feedback-vector.h"
 
 namespace v8 {
@@ -70,6 +70,8 @@
 
 Reduction JSNativeContextSpecialization::Reduce(Node* node) {
   switch (node->opcode()) {
+    case IrOpcode::kJSInstanceOf:
+      return ReduceJSInstanceOf(node);
     case IrOpcode::kJSLoadContext:
       return ReduceJSLoadContext(node);
     case IrOpcode::kJSLoadNamed:
@@ -86,6 +88,99 @@
   return NoChange();
 }
 
+Reduction JSNativeContextSpecialization::ReduceJSInstanceOf(Node* node) {
+  DCHECK_EQ(IrOpcode::kJSInstanceOf, node->opcode());
+  Node* object = NodeProperties::GetValueInput(node, 0);
+  Node* constructor = NodeProperties::GetValueInput(node, 1);
+  Node* context = NodeProperties::GetContextInput(node);
+  Node* effect = NodeProperties::GetEffectInput(node);
+  Node* control = NodeProperties::GetControlInput(node);
+
+  // Retrieve the native context from the given {node}.
+  Handle<Context> native_context;
+  if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
+
+  // If deoptimization is disabled, we cannot optimize.
+  if (!(flags() & kDeoptimizationEnabled)) return NoChange();
+
+  // Check if the right hand side is a known {receiver}.
+  HeapObjectMatcher m(constructor);
+  if (!m.HasValue() || !m.Value()->IsJSObject()) return NoChange();
+  Handle<JSObject> receiver = Handle<JSObject>::cast(m.Value());
+  Handle<Map> receiver_map(receiver->map(), isolate());
+
+  // Compute property access info for @@hasInstance on {receiver}.
+  PropertyAccessInfo access_info;
+  AccessInfoFactory access_info_factory(dependencies(), native_context,
+                                        graph()->zone());
+  if (!access_info_factory.ComputePropertyAccessInfo(
+          receiver_map, factory()->has_instance_symbol(), AccessMode::kLoad,
+          &access_info)) {
+    return NoChange();
+  }
+
+  if (access_info.IsNotFound()) {
+    // If there's no @@hasInstance handler, the OrdinaryHasInstance operation
+    // takes over, but that requires the {receiver} to be callable.
+    if (receiver->IsCallable()) {
+      // Determine actual holder and perform prototype chain checks.
+      Handle<JSObject> holder;
+      if (access_info.holder().ToHandle(&holder)) {
+        AssumePrototypesStable(access_info.receiver_maps(), native_context,
+                               holder);
+      }
+
+      // Monomorphic property access.
+      effect =
+          BuildCheckMaps(constructor, effect, control, MapList{receiver_map});
+
+      // Lower to OrdinaryHasInstance(C, O).
+      NodeProperties::ReplaceValueInput(node, constructor, 0);
+      NodeProperties::ReplaceValueInput(node, object, 1);
+      NodeProperties::ReplaceEffectInput(node, effect);
+      NodeProperties::ChangeOp(node, javascript()->OrdinaryHasInstance());
+      return Changed(node);
+    }
+  } else if (access_info.IsDataConstant()) {
+    DCHECK(access_info.constant()->IsCallable());
+
+    // Determine actual holder and perform prototype chain checks.
+    Handle<JSObject> holder;
+    if (access_info.holder().ToHandle(&holder)) {
+      AssumePrototypesStable(access_info.receiver_maps(), native_context,
+                             holder);
+    }
+
+    // Monomorphic property access.
+    effect =
+        BuildCheckMaps(constructor, effect, control, MapList{receiver_map});
+
+    // Call the @@hasInstance handler.
+    Node* target = jsgraph()->Constant(access_info.constant());
+    node->InsertInput(graph()->zone(), 0, target);
+    node->ReplaceInput(1, constructor);
+    node->ReplaceInput(2, object);
+    node->ReplaceInput(5, effect);
+    NodeProperties::ChangeOp(
+        node,
+        javascript()->CallFunction(3, 0.0f, VectorSlotPair(),
+                                   ConvertReceiverMode::kNotNullOrUndefined));
+
+    // Rewire the value uses of {node} to ToBoolean conversion of the result.
+    Node* value = graph()->NewNode(javascript()->ToBoolean(ToBooleanHint::kAny),
+                                   node, context);
+    for (Edge edge : node->use_edges()) {
+      if (NodeProperties::IsValueEdge(edge) && edge.from() != value) {
+        edge.UpdateTo(value);
+        Revisit(edge.from());
+      }
+    }
+    return Changed(node);
+  }
+
+  return NoChange();
+}
+
 Reduction JSNativeContextSpecialization::ReduceJSLoadContext(Node* node) {
   DCHECK_EQ(IrOpcode::kJSLoadContext, node->opcode());
   ContextAccess const& access = ContextAccessOf(node->op());
@@ -168,7 +263,7 @@
                                            receiver, effect, control);
     } else {
       // Monomorphic property access.
-      effect = BuildCheckTaggedPointer(receiver, effect, control);
+      effect = BuildCheckHeapObject(receiver, effect, control);
       effect = BuildCheckMaps(receiver, effect, control,
                               access_info.receiver_maps());
     }
@@ -206,7 +301,7 @@
       receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
       receiverissmi_effect = effect;
     } else {
-      effect = BuildCheckTaggedPointer(receiver, effect, control);
+      effect = BuildCheckHeapObject(receiver, effect, control);
     }
 
     // Load the {receiver} map. The resulting effect is the dominating effect
@@ -510,7 +605,7 @@
     }
 
     // Ensure that {receiver} is a heap object.
-    effect = BuildCheckTaggedPointer(receiver, effect, control);
+    effect = BuildCheckHeapObject(receiver, effect, control);
 
     // Check for the monomorphic case.
     if (access_infos.size() == 1) {
@@ -818,13 +913,14 @@
     DCHECK_EQ(AccessMode::kLoad, access_mode);
     value = jsgraph()->UndefinedConstant();
   } else if (access_info.IsDataConstant()) {
-    value = jsgraph()->Constant(access_info.constant());
+    Node* constant_value = jsgraph()->Constant(access_info.constant());
     if (access_mode == AccessMode::kStore) {
-      Node* check =
-          graph()->NewNode(simplified()->ReferenceEqual(), value, value);
+      Node* check = graph()->NewNode(simplified()->ReferenceEqual(), value,
+                                     constant_value);
       effect =
           graph()->NewNode(simplified()->CheckIf(), check, effect, control);
     }
+    value = constant_value;
   } else if (access_info.IsAccessorConstant()) {
     // TODO(bmeurer): Properly rewire the IfException edge here if there's any.
     Node* target = jsgraph()->Constant(access_info.constant());
@@ -849,7 +945,8 @@
         // Introduce the call to the getter function.
         value = effect = graph()->NewNode(
             javascript()->CallFunction(
-                2, VectorSlotPair(), ConvertReceiverMode::kNotNullOrUndefined),
+                2, 0.0f, VectorSlotPair(),
+                ConvertReceiverMode::kNotNullOrUndefined),
             target, receiver, context, frame_state0, effect, control);
         control = graph()->NewNode(common()->IfSuccess(), value);
         break;
@@ -869,10 +966,11 @@
             context, target, frame_state);
 
         // Introduce the call to the setter function.
-        effect = graph()->NewNode(
-            javascript()->CallFunction(
-                3, VectorSlotPair(), ConvertReceiverMode::kNotNullOrUndefined),
-            target, receiver, value, context, frame_state0, effect, control);
+        effect = graph()->NewNode(javascript()->CallFunction(
+                                      3, 0.0f, VectorSlotPair(),
+                                      ConvertReceiverMode::kNotNullOrUndefined),
+                                  target, receiver, value, context,
+                                  frame_state0, effect, control);
         control = graph()->NewNode(common()->IfSuccess(), effect);
         break;
       }
@@ -881,9 +979,25 @@
     DCHECK(access_info.IsDataField());
     FieldIndex const field_index = access_info.field_index();
     Type* const field_type = access_info.field_type();
-    if (access_mode == AccessMode::kLoad &&
-        access_info.holder().ToHandle(&holder)) {
-      receiver = jsgraph()->Constant(holder);
+    MachineRepresentation const field_representation =
+        access_info.field_representation();
+    if (access_mode == AccessMode::kLoad) {
+      if (access_info.holder().ToHandle(&holder)) {
+        receiver = jsgraph()->Constant(holder);
+      }
+      // Optimize immutable property loads.
+      HeapObjectMatcher m(receiver);
+      if (m.HasValue() && m.Value()->IsJSObject()) {
+        // TODO(turbofan): Given that we already have the field_index here, we
+        // might be smarter in the future and not rely on the LookupIterator,
+        // but for now let's just do what Crankshaft does.
+        LookupIterator it(m.Value(), name,
+                          LookupIterator::OWN_SKIP_INTERCEPTOR);
+        if (it.IsFound() && it.IsReadOnly() && !it.IsConfigurable()) {
+          Node* value = jsgraph()->Constant(JSReceiver::GetDataProperty(&it));
+          return ValueEffectControl(value, effect, control);
+        }
+      }
     }
     Node* storage = receiver;
     if (!field_index.is_inobject()) {
@@ -892,89 +1006,112 @@
           storage, effect, control);
     }
     FieldAccess field_access = {
-        kTaggedBase, field_index.offset(),     name,
-        field_type,  MachineType::AnyTagged(), kFullWriteBarrier};
+        kTaggedBase,
+        field_index.offset(),
+        name,
+        field_type,
+        MachineType::TypeForRepresentation(field_representation),
+        kFullWriteBarrier};
     if (access_mode == AccessMode::kLoad) {
-      if (field_type->Is(Type::UntaggedFloat64())) {
-        // TODO(turbofan): We remove the representation axis from the type to
-        // avoid uninhabited representation types. This is a workaround until
-        // the {PropertyAccessInfo} is using {MachineRepresentation} instead.
-        field_access.type = Type::Union(
-            field_type, Type::Representation(Type::Number(), zone()), zone());
+      if (field_representation == MachineRepresentation::kFloat64) {
         if (!field_index.is_inobject() || field_index.is_hidden_field() ||
             !FLAG_unbox_double_fields) {
-          storage = effect = graph()->NewNode(
-              simplified()->LoadField(field_access), storage, effect, control);
+          FieldAccess const storage_access = {kTaggedBase,
+                                              field_index.offset(),
+                                              name,
+                                              Type::OtherInternal(),
+                                              MachineType::TaggedPointer(),
+                                              kPointerWriteBarrier};
+          storage = effect =
+              graph()->NewNode(simplified()->LoadField(storage_access), storage,
+                               effect, control);
           field_access.offset = HeapNumber::kValueOffset;
           field_access.name = MaybeHandle<Name>();
         }
-        field_access.machine_type = MachineType::Float64();
       }
+      // TODO(turbofan): Track the field_map (if any) on the {field_access} and
+      // use it in LoadElimination to eliminate map checks.
       value = effect = graph()->NewNode(simplified()->LoadField(field_access),
                                         storage, effect, control);
     } else {
       DCHECK_EQ(AccessMode::kStore, access_mode);
-      if (field_type->Is(Type::UntaggedFloat64())) {
-        // TODO(turbofan): We remove the representation axis from the type to
-        // avoid uninhabited representation types. This is a workaround until
-        // the {PropertyAccessInfo} is using {MachineRepresentation} instead.
-        field_access.type = Type::Union(
-            field_type, Type::Representation(Type::Number(), zone()), zone());
-        value = effect = graph()->NewNode(simplified()->CheckNumber(), value,
-                                          effect, control);
+      switch (field_representation) {
+        case MachineRepresentation::kFloat64: {
+          value = effect = graph()->NewNode(simplified()->CheckNumber(), value,
+                                            effect, control);
+          if (!field_index.is_inobject() || field_index.is_hidden_field() ||
+              !FLAG_unbox_double_fields) {
+            if (access_info.HasTransitionMap()) {
+              // Allocate a MutableHeapNumber for the new property.
+              effect = graph()->NewNode(
+                  common()->BeginRegion(RegionObservability::kNotObservable),
+                  effect);
+              Node* box = effect = graph()->NewNode(
+                  simplified()->Allocate(NOT_TENURED),
+                  jsgraph()->Constant(HeapNumber::kSize), effect, control);
+              effect = graph()->NewNode(
+                  simplified()->StoreField(AccessBuilder::ForMap()), box,
+                  jsgraph()->HeapConstant(factory()->mutable_heap_number_map()),
+                  effect, control);
+              effect = graph()->NewNode(
+                  simplified()->StoreField(AccessBuilder::ForHeapNumberValue()),
+                  box, value, effect, control);
+              value = effect =
+                  graph()->NewNode(common()->FinishRegion(), box, effect);
 
-        if (!field_index.is_inobject() || field_index.is_hidden_field() ||
-            !FLAG_unbox_double_fields) {
-          if (access_info.HasTransitionMap()) {
-            // Allocate a MutableHeapNumber for the new property.
-            effect = graph()->NewNode(
-                common()->BeginRegion(RegionObservability::kNotObservable),
-                effect);
-            Node* box = effect = graph()->NewNode(
-                simplified()->Allocate(NOT_TENURED),
-                jsgraph()->Constant(HeapNumber::kSize), effect, control);
-            effect = graph()->NewNode(
-                simplified()->StoreField(AccessBuilder::ForMap()), box,
-                jsgraph()->HeapConstant(factory()->mutable_heap_number_map()),
-                effect, control);
-            effect = graph()->NewNode(
-                simplified()->StoreField(AccessBuilder::ForHeapNumberValue()),
-                box, value, effect, control);
-            value = effect =
-                graph()->NewNode(common()->FinishRegion(), box, effect);
-
-            field_access.type = Type::TaggedPointer();
-          } else {
-            // We just store directly to the MutableHeapNumber.
-            storage = effect =
-                graph()->NewNode(simplified()->LoadField(field_access), storage,
-                                 effect, control);
-            field_access.offset = HeapNumber::kValueOffset;
-            field_access.name = MaybeHandle<Name>();
-            field_access.machine_type = MachineType::Float64();
+              field_access.type = Type::Any();
+              field_access.machine_type = MachineType::TaggedPointer();
+              field_access.write_barrier_kind = kPointerWriteBarrier;
+            } else {
+              // We just store directly to the MutableHeapNumber.
+              FieldAccess const storage_access = {kTaggedBase,
+                                                  field_index.offset(),
+                                                  name,
+                                                  Type::OtherInternal(),
+                                                  MachineType::TaggedPointer(),
+                                                  kPointerWriteBarrier};
+              storage = effect =
+                  graph()->NewNode(simplified()->LoadField(storage_access),
+                                   storage, effect, control);
+              field_access.offset = HeapNumber::kValueOffset;
+              field_access.name = MaybeHandle<Name>();
+              field_access.machine_type = MachineType::Float64();
+            }
           }
-        } else {
-          // Unboxed double field, we store directly to the field.
-          field_access.machine_type = MachineType::Float64();
+          break;
         }
-      } else if (field_type->Is(Type::TaggedSigned())) {
-        value = effect = graph()->NewNode(simplified()->CheckTaggedSigned(),
-                                          value, effect, control);
-      } else if (field_type->Is(Type::TaggedPointer())) {
-        // Ensure that {value} is a HeapObject.
-        value = effect = graph()->NewNode(simplified()->CheckTaggedPointer(),
-                                          value, effect, control);
-        if (field_type->NumClasses() == 1) {
-          // Emit a map check for the value.
-          Node* field_map =
-              jsgraph()->Constant(field_type->Classes().Current());
-          effect = graph()->NewNode(simplified()->CheckMaps(1), value,
-                                    field_map, effect, control);
-        } else {
-          DCHECK_EQ(0, field_type->NumClasses());
+        case MachineRepresentation::kTaggedSigned: {
+          value = effect = graph()->NewNode(simplified()->CheckSmi(), value,
+                                            effect, control);
+          field_access.write_barrier_kind = kNoWriteBarrier;
+          break;
         }
-      } else {
-        DCHECK(field_type->Is(Type::Tagged()));
+        case MachineRepresentation::kTaggedPointer: {
+          // Ensure that {value} is a HeapObject.
+          value = effect = graph()->NewNode(simplified()->CheckHeapObject(),
+                                            value, effect, control);
+          Handle<Map> field_map;
+          if (access_info.field_map().ToHandle(&field_map)) {
+            // Emit a map check for the value.
+            effect = graph()->NewNode(simplified()->CheckMaps(1), value,
+                                      jsgraph()->HeapConstant(field_map),
+                                      effect, control);
+          }
+          field_access.write_barrier_kind = kPointerWriteBarrier;
+          break;
+        }
+        case MachineRepresentation::kTagged:
+          break;
+        case MachineRepresentation::kNone:
+        case MachineRepresentation::kBit:
+        case MachineRepresentation::kWord8:
+        case MachineRepresentation::kWord16:
+        case MachineRepresentation::kWord32:
+        case MachineRepresentation::kWord64:
+        case MachineRepresentation::kFloat32:
+        case MachineRepresentation::kSimd128:
+          UNREACHABLE();
+          break;
       }
       Handle<Map> transition_map;
       if (access_info.transition_map().ToHandle(&transition_map)) {
@@ -1048,20 +1185,13 @@
     Node* buffer = effect = graph()->NewNode(
         simplified()->LoadField(AccessBuilder::ForJSArrayBufferViewBuffer()),
         receiver, effect, control);
-    Node* buffer_bitfield = effect = graph()->NewNode(
-        simplified()->LoadField(AccessBuilder::ForJSArrayBufferBitField()),
-        buffer, effect, control);
-    Node* check = graph()->NewNode(
-        simplified()->NumberEqual(),
-        graph()->NewNode(
-            simplified()->NumberBitwiseAnd(), buffer_bitfield,
-            jsgraph()->Constant(JSArrayBuffer::WasNeutered::kMask)),
-        jsgraph()->ZeroConstant());
+    Node* check = effect = graph()->NewNode(
+        simplified()->ArrayBufferWasNeutered(), buffer, effect, control);
 
     // Default to zero if the {receiver}s buffer was neutered.
     length = graph()->NewNode(
-        common()->Select(MachineRepresentation::kTagged, BranchHint::kTrue),
-        check, length, jsgraph()->ZeroConstant());
+        common()->Select(MachineRepresentation::kTagged, BranchHint::kFalse),
+        check, jsgraph()->ZeroConstant(), length);
 
     if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
       // Check that the {index} is a valid array index, we do the actual
@@ -1175,6 +1305,7 @@
       element_machine_type = MachineType::Float64();
     } else if (IsFastSmiElementsKind(elements_kind)) {
       element_type = type_cache_.kSmi;
+      element_machine_type = MachineType::TaggedSigned();
     }
     ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
                                     element_type, element_machine_type,
@@ -1188,6 +1319,7 @@
           elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
         element_access.type =
             Type::Union(element_type, Type::Hole(), graph()->zone());
+        element_access.machine_type = MachineType::AnyTagged();
       }
       // Perform the actual backing store access.
       value = effect =
@@ -1221,8 +1353,8 @@
     } else {
       DCHECK_EQ(AccessMode::kStore, access_mode);
       if (IsFastSmiElementsKind(elements_kind)) {
-        value = effect = graph()->NewNode(simplified()->CheckTaggedSigned(),
-                                          value, effect, control);
+        value = effect =
+            graph()->NewNode(simplified()->CheckSmi(), value, effect, control);
       } else if (IsFastDoubleElementsKind(elements_kind)) {
         value = effect = graph()->NewNode(simplified()->CheckNumber(), value,
                                           effect, control);
@@ -1293,9 +1425,9 @@
                           inputs);
 }
 
-Node* JSNativeContextSpecialization::BuildCheckTaggedPointer(Node* receiver,
-                                                             Node* effect,
-                                                             Node* control) {
+Node* JSNativeContextSpecialization::BuildCheckHeapObject(Node* receiver,
+                                                          Node* effect,
+                                                          Node* control) {
   switch (receiver->opcode()) {
     case IrOpcode::kHeapConstant:
     case IrOpcode::kJSCreate:
@@ -1314,8 +1446,8 @@
       return effect;
     }
     default: {
-      return graph()->NewNode(simplified()->CheckTaggedPointer(), receiver,
-                              effect, control);
+      return graph()->NewNode(simplified()->CheckHeapObject(), receiver, effect,
+                              control);
     }
   }
 }
diff --git a/src/compiler/js-native-context-specialization.h b/src/compiler/js-native-context-specialization.h
index 549dc93..c015de0 100644
--- a/src/compiler/js-native-context-specialization.h
+++ b/src/compiler/js-native-context-specialization.h
@@ -16,8 +16,6 @@
 class CompilationDependencies;
 class Factory;
 class FeedbackNexus;
-class TypeCache;
-
 
 namespace compiler {
 
@@ -30,7 +28,7 @@
 class MachineOperatorBuilder;
 class PropertyAccessInfo;
 class SimplifiedOperatorBuilder;
-
+class TypeCache;
 
 // Specializes a given JSGraph to a given native context, potentially constant
 // folding some {LoadGlobal} nodes or strength reducing some {StoreGlobal}
@@ -55,6 +53,7 @@
   Reduction Reduce(Node* node) final;
 
  private:
+  Reduction ReduceJSInstanceOf(Node* node);
   Reduction ReduceJSLoadContext(Node* node);
   Reduction ReduceJSLoadNamed(Node* node);
   Reduction ReduceJSStoreNamed(Node* node);
@@ -120,7 +119,7 @@
                        std::vector<Handle<Map>> const& maps);
 
   // Construct an appropriate heap object check.
-  Node* BuildCheckTaggedPointer(Node* receiver, Node* effect, Node* control);
+  Node* BuildCheckHeapObject(Node* receiver, Node* effect, Node* control);
 
   // Adds stability dependencies on all prototypes of every class in
   // {receiver_type} up to (and including) the {holder}.
diff --git a/src/compiler/js-operator.cc b/src/compiler/js-operator.cc
index d19bb76..21e905a 100644
--- a/src/compiler/js-operator.cc
+++ b/src/compiler/js-operator.cc
@@ -54,7 +54,8 @@
 
 bool operator==(CallConstructParameters const& lhs,
                 CallConstructParameters const& rhs) {
-  return lhs.arity() == rhs.arity() && lhs.feedback() == rhs.feedback();
+  return lhs.arity() == rhs.arity() && lhs.frequency() == rhs.frequency() &&
+         lhs.feedback() == rhs.feedback();
 }
 
 
@@ -65,12 +66,12 @@
 
 
 size_t hash_value(CallConstructParameters const& p) {
-  return base::hash_combine(p.arity(), p.feedback());
+  return base::hash_combine(p.arity(), p.frequency(), p.feedback());
 }
 
 
 std::ostream& operator<<(std::ostream& os, CallConstructParameters const& p) {
-  return os << p.arity();
+  return os << p.arity() << ", " << p.frequency();
 }
 
 
@@ -81,7 +82,8 @@
 
 
 std::ostream& operator<<(std::ostream& os, CallFunctionParameters const& p) {
-  os << p.arity() << ", " << p.convert_mode() << ", " << p.tail_call_mode();
+  os << p.arity() << ", " << p.frequency() << ", " << p.convert_mode() << ", "
+     << p.tail_call_mode();
   return os;
 }
 
@@ -157,6 +159,37 @@
   return OpParameter<ContextAccess>(op);
 }
 
+CreateCatchContextParameters::CreateCatchContextParameters(
+    Handle<String> catch_name, Handle<ScopeInfo> scope_info)
+    : catch_name_(catch_name), scope_info_(scope_info) {}
+
+bool operator==(CreateCatchContextParameters const& lhs,
+                CreateCatchContextParameters const& rhs) {
+  return lhs.catch_name().location() == rhs.catch_name().location() &&
+         lhs.scope_info().location() == rhs.scope_info().location();
+}
+
+bool operator!=(CreateCatchContextParameters const& lhs,
+                CreateCatchContextParameters const& rhs) {
+  return !(lhs == rhs);
+}
+
+size_t hash_value(CreateCatchContextParameters const& parameters) {
+  return base::hash_combine(parameters.catch_name().location(),
+                            parameters.scope_info().location());
+}
+
+std::ostream& operator<<(std::ostream& os,
+                         CreateCatchContextParameters const& parameters) {
+  return os << Brief(*parameters.catch_name()) << ", "
+            << Brief(*parameters.scope_info());
+}
+
+CreateCatchContextParameters const& CreateCatchContextParametersOf(
+    Operator const* op) {
+  DCHECK_EQ(IrOpcode::kJSCreateCatchContext, op->opcode());
+  return OpParameter<CreateCatchContextParameters>(op);
+}
 
 bool operator==(NamedAccess const& lhs, NamedAccess const& rhs) {
   return lhs.name().location() == rhs.name().location() &&
@@ -376,7 +409,7 @@
   return OpParameter<CreateLiteralParameters>(op);
 }
 
-const BinaryOperationHint BinaryOperationHintOf(const Operator* op) {
+BinaryOperationHint BinaryOperationHintOf(const Operator* op) {
   DCHECK(op->opcode() == IrOpcode::kJSBitwiseOr ||
          op->opcode() == IrOpcode::kJSBitwiseXor ||
          op->opcode() == IrOpcode::kJSBitwiseAnd ||
@@ -391,7 +424,7 @@
   return OpParameter<BinaryOperationHint>(op);
 }
 
-const CompareOperationHint CompareOperationHintOf(const Operator* op) {
+CompareOperationHint CompareOperationHintOf(const Operator* op) {
   DCHECK(op->opcode() == IrOpcode::kJSEqual ||
          op->opcode() == IrOpcode::kJSNotEqual ||
          op->opcode() == IrOpcode::kJSStrictEqual ||
@@ -415,15 +448,13 @@
   V(HasProperty, Operator::kNoProperties, 2, 1)             \
   V(TypeOf, Operator::kPure, 1, 1)                          \
   V(InstanceOf, Operator::kNoProperties, 2, 1)              \
-  V(ForInDone, Operator::kPure, 2, 1)                       \
+  V(OrdinaryHasInstance, Operator::kNoProperties, 2, 1)     \
   V(ForInNext, Operator::kNoProperties, 4, 1)               \
   V(ForInPrepare, Operator::kNoProperties, 1, 3)            \
-  V(ForInStep, Operator::kPure, 1, 1)                       \
   V(LoadMessage, Operator::kNoThrow, 0, 1)                  \
   V(StoreMessage, Operator::kNoThrow, 1, 0)                 \
   V(GeneratorRestoreContinuation, Operator::kNoThrow, 1, 1) \
-  V(StackCheck, Operator::kNoWrite, 0, 0)                   \
-  V(CreateWithContext, Operator::kNoProperties, 2, 1)
+  V(StackCheck, Operator::kNoWrite, 0, 0)
 
 #define BINARY_OP_LIST(V) \
   V(BitwiseOr)            \
@@ -476,6 +507,7 @@
   Name##Operator<BinaryOperationHint::kSigned32> k##Name##Signed32Operator;   \
   Name##Operator<BinaryOperationHint::kNumberOrOddball>                       \
       k##Name##NumberOrOddballOperator;                                       \
+  Name##Operator<BinaryOperationHint::kString> k##Name##StringOperator;       \
   Name##Operator<BinaryOperationHint::kAny> k##Name##AnyOperator;
   BINARY_OP_LIST(BINARY_OP)
 #undef BINARY_OP
@@ -523,6 +555,8 @@
         return &cache_.k##Name##Signed32Operator;                     \
       case BinaryOperationHint::kNumberOrOddball:                     \
         return &cache_.k##Name##NumberOrOddballOperator;              \
+      case BinaryOperationHint::kString:                              \
+        return &cache_.k##Name##StringOperator;                       \
       case BinaryOperationHint::kAny:                                 \
         return &cache_.k##Name##AnyOperator;                          \
     }                                                                 \
@@ -562,9 +596,9 @@
 }
 
 const Operator* JSOperatorBuilder::CallFunction(
-    size_t arity, VectorSlotPair const& feedback,
+    size_t arity, float frequency, VectorSlotPair const& feedback,
     ConvertReceiverMode convert_mode, TailCallMode tail_call_mode) {
-  CallFunctionParameters parameters(arity, feedback, tail_call_mode,
+  CallFunctionParameters parameters(arity, frequency, feedback, tail_call_mode,
                                     convert_mode);
   return new (zone()) Operator1<CallFunctionParameters>(   // --
       IrOpcode::kJSCallFunction, Operator::kNoProperties,  // opcode
@@ -598,10 +632,9 @@
       parameters);                                        // parameter
 }
 
-
 const Operator* JSOperatorBuilder::CallConstruct(
-    size_t arity, VectorSlotPair const& feedback) {
-  CallConstructParameters parameters(arity, feedback);
+    uint32_t arity, float frequency, VectorSlotPair const& feedback) {
+  CallConstructParameters parameters(arity, frequency, feedback);
   return new (zone()) Operator1<CallConstructParameters>(   // --
       IrOpcode::kJSCallConstruct, Operator::kNoProperties,  // opcode
       "JSCallConstruct",                                    // name
@@ -811,16 +844,24 @@
       slot_count);                                                  // parameter
 }
 
-
 const Operator* JSOperatorBuilder::CreateCatchContext(
-    const Handle<String>& name) {
-  return new (zone()) Operator1<Handle<String>>(                 // --
+    const Handle<String>& name, const Handle<ScopeInfo>& scope_info) {
+  CreateCatchContextParameters parameters(name, scope_info);
+  return new (zone()) Operator1<CreateCatchContextParameters>(
       IrOpcode::kJSCreateCatchContext, Operator::kNoProperties,  // opcode
       "JSCreateCatchContext",                                    // name
       2, 1, 1, 1, 1, 2,                                          // counts
-      name);                                                     // parameter
+      parameters);                                               // parameter
 }
 
+const Operator* JSOperatorBuilder::CreateWithContext(
+    const Handle<ScopeInfo>& scope_info) {
+  return new (zone()) Operator1<Handle<ScopeInfo>>(
+      IrOpcode::kJSCreateWithContext, Operator::kNoProperties,  // opcode
+      "JSCreateWithContext",                                    // name
+      2, 1, 1, 1, 1, 2,                                         // counts
+      scope_info);                                              // parameter
+}
 
 const Operator* JSOperatorBuilder::CreateBlockContext(
     const Handle<ScopeInfo>& scpope_info) {
diff --git a/src/compiler/js-operator.h b/src/compiler/js-operator.h
index 19022fa..2374ae6 100644
--- a/src/compiler/js-operator.h
+++ b/src/compiler/js-operator.h
@@ -5,8 +5,8 @@
 #ifndef V8_COMPILER_JS_OPERATOR_H_
 #define V8_COMPILER_JS_OPERATOR_H_
 
-#include "src/compiler/type-hints.h"
 #include "src/runtime/runtime.h"
+#include "src/type-hints.h"
 
 namespace v8 {
 namespace internal {
@@ -55,14 +55,17 @@
 // used as a parameter by JSCallConstruct operators.
 class CallConstructParameters final {
  public:
-  CallConstructParameters(size_t arity, VectorSlotPair const& feedback)
-      : arity_(arity), feedback_(feedback) {}
+  CallConstructParameters(uint32_t arity, float frequency,
+                          VectorSlotPair const& feedback)
+      : arity_(arity), frequency_(frequency), feedback_(feedback) {}
 
-  size_t arity() const { return arity_; }
+  uint32_t arity() const { return arity_; }
+  float frequency() const { return frequency_; }
   VectorSlotPair const& feedback() const { return feedback_; }
 
  private:
-  size_t const arity_;
+  uint32_t const arity_;
+  float const frequency_;
   VectorSlotPair const feedback_;
 };
 
@@ -80,15 +83,18 @@
 // used as a parameter by JSCallFunction operators.
 class CallFunctionParameters final {
  public:
-  CallFunctionParameters(size_t arity, VectorSlotPair const& feedback,
+  CallFunctionParameters(size_t arity, float frequency,
+                         VectorSlotPair const& feedback,
                          TailCallMode tail_call_mode,
                          ConvertReceiverMode convert_mode)
       : bit_field_(ArityField::encode(arity) |
                    ConvertReceiverModeField::encode(convert_mode) |
                    TailCallModeField::encode(tail_call_mode)),
+        frequency_(frequency),
         feedback_(feedback) {}
 
   size_t arity() const { return ArityField::decode(bit_field_); }
+  float frequency() const { return frequency_; }
   ConvertReceiverMode convert_mode() const {
     return ConvertReceiverModeField::decode(bit_field_);
   }
@@ -99,6 +105,7 @@
 
   bool operator==(CallFunctionParameters const& that) const {
     return this->bit_field_ == that.bit_field_ &&
+           this->frequency_ == that.frequency_ &&
            this->feedback_ == that.feedback_;
   }
   bool operator!=(CallFunctionParameters const& that) const {
@@ -107,15 +114,16 @@
 
  private:
   friend size_t hash_value(CallFunctionParameters const& p) {
-    return base::hash_combine(p.bit_field_, p.feedback_);
+    return base::hash_combine(p.bit_field_, p.frequency_, p.feedback_);
   }
 
   typedef BitField<size_t, 0, 29> ArityField;
   typedef BitField<ConvertReceiverMode, 29, 2> ConvertReceiverModeField;
   typedef BitField<TailCallMode, 31, 1> TailCallModeField;
 
-  const uint32_t bit_field_;
-  const VectorSlotPair feedback_;
+  uint32_t const bit_field_;
+  float const frequency_;
+  VectorSlotPair const feedback_;
 };
 
 size_t hash_value(CallFunctionParameters const&);
@@ -178,6 +186,33 @@
 
 ContextAccess const& ContextAccessOf(Operator const*);
 
+// Defines the name and ScopeInfo for a new catch context. This is used as a
+// parameter by the JSCreateCatchContext operator.
+class CreateCatchContextParameters final {
+ public:
+  CreateCatchContextParameters(Handle<String> catch_name,
+                               Handle<ScopeInfo> scope_info);
+
+  Handle<String> catch_name() const { return catch_name_; }
+  Handle<ScopeInfo> scope_info() const { return scope_info_; }
+
+ private:
+  Handle<String> const catch_name_;
+  Handle<ScopeInfo> const scope_info_;
+};
+
+bool operator==(CreateCatchContextParameters const& lhs,
+                CreateCatchContextParameters const& rhs);
+bool operator!=(CreateCatchContextParameters const& lhs,
+                CreateCatchContextParameters const& rhs);
+
+size_t hash_value(CreateCatchContextParameters const& parameters);
+
+std::ostream& operator<<(std::ostream& os,
+                         CreateCatchContextParameters const& parameters);
+
+CreateCatchContextParameters const& CreateCatchContextParametersOf(
+    Operator const*);
 
 // Defines the property of an object for a named access. This is
 // used as a parameter by the JSLoadNamed and JSStoreNamed operators.
@@ -374,9 +409,9 @@
 
 const CreateLiteralParameters& CreateLiteralParametersOf(const Operator* op);
 
-const BinaryOperationHint BinaryOperationHintOf(const Operator* op);
+BinaryOperationHint BinaryOperationHintOf(const Operator* op);
 
-const CompareOperationHint CompareOperationHintOf(const Operator* op);
+CompareOperationHint CompareOperationHintOf(const Operator* op);
 
 // Interface for building JavaScript-level operators, e.g. directly from the
 // AST. Most operators have no parameters, thus can be globally shared for all
@@ -430,13 +465,15 @@
                                       int literal_flags, int literal_index);
 
   const Operator* CallFunction(
-      size_t arity, VectorSlotPair const& feedback = VectorSlotPair(),
+      size_t arity, float frequency = 0.0f,
+      VectorSlotPair const& feedback = VectorSlotPair(),
       ConvertReceiverMode convert_mode = ConvertReceiverMode::kAny,
       TailCallMode tail_call_mode = TailCallMode::kDisallow);
   const Operator* CallRuntime(Runtime::FunctionId id);
   const Operator* CallRuntime(Runtime::FunctionId id, size_t arity);
   const Operator* CallRuntime(const Runtime::Function* function, size_t arity);
-  const Operator* CallConstruct(size_t arity, VectorSlotPair const& feedback);
+  const Operator* CallConstruct(uint32_t arity, float frequency,
+                                VectorSlotPair const& feedback);
 
   const Operator* ConvertReceiver(ConvertReceiverMode convert_mode);
 
@@ -464,11 +501,10 @@
 
   const Operator* TypeOf();
   const Operator* InstanceOf();
+  const Operator* OrdinaryHasInstance();
 
-  const Operator* ForInDone();
   const Operator* ForInNext();
   const Operator* ForInPrepare();
-  const Operator* ForInStep();
 
   const Operator* LoadMessage();
   const Operator* StoreMessage();
@@ -483,8 +519,9 @@
   const Operator* StackCheck();
 
   const Operator* CreateFunctionContext(int slot_count);
-  const Operator* CreateCatchContext(const Handle<String>& name);
-  const Operator* CreateWithContext();
+  const Operator* CreateCatchContext(const Handle<String>& name,
+                                     const Handle<ScopeInfo>& scope_info);
+  const Operator* CreateWithContext(const Handle<ScopeInfo>& scope_info);
   const Operator* CreateBlockContext(const Handle<ScopeInfo>& scpope_info);
   const Operator* CreateModuleContext();
   const Operator* CreateScriptContext(const Handle<ScopeInfo>& scpope_info);
diff --git a/src/compiler/js-typed-lowering.cc b/src/compiler/js-typed-lowering.cc
index 89ab0de..82df4ed 100644
--- a/src/compiler/js-typed-lowering.cc
+++ b/src/compiler/js-typed-lowering.cc
@@ -13,8 +13,8 @@
 #include "src/compiler/node-matchers.h"
 #include "src/compiler/node-properties.h"
 #include "src/compiler/operator-properties.h"
-#include "src/type-cache.h"
-#include "src/types.h"
+#include "src/compiler/type-cache.h"
+#include "src/compiler/types.h"
 
 namespace v8 {
 namespace internal {
@@ -46,6 +46,7 @@
           return true;
         case BinaryOperationHint::kAny:
         case BinaryOperationHint::kNone:
+        case BinaryOperationHint::kString:
           break;
       }
     }
@@ -73,6 +74,37 @@
     return false;
   }
 
+  // Check if a string addition will definitely result in creating a ConsString,
+  // i.e. if the combined length of the resulting string exceeds the ConsString
+  // minimum length.
+  bool ShouldCreateConsString() {
+    DCHECK_EQ(IrOpcode::kJSAdd, node_->opcode());
+    if (BothInputsAre(Type::String()) ||
+        ((lowering_->flags() & JSTypedLowering::kDeoptimizationEnabled) &&
+         BinaryOperationHintOf(node_->op()) == BinaryOperationHint::kString)) {
+      if (right_type()->IsConstant() &&
+          right_type()->AsConstant()->Value()->IsString()) {
+        Handle<String> right_string =
+            Handle<String>::cast(right_type()->AsConstant()->Value());
+        if (right_string->length() >= ConsString::kMinLength) return true;
+      }
+      if (left_type()->IsConstant() &&
+          left_type()->AsConstant()->Value()->IsString()) {
+        Handle<String> left_string =
+            Handle<String>::cast(left_type()->AsConstant()->Value());
+        if (left_string->length() >= ConsString::kMinLength) {
+          // The invariant for ConsString requires the left hand side to be
+          // a sequential or external string if the right hand side is the
+          // empty string. Since we don't know anything about the right hand
+          // side here, we must ensure that the left hand side satisfy the
+          // constraints independent of the right hand side.
+          return left_string->IsSeqString() || left_string->IsExternalString();
+        }
+      }
+    }
+    return false;
+  }
+
   void ConvertInputsToNumber() {
     // To convert the inputs to numbers, we have to provide frame states
     // for lazy bailouts in the ToNumber conversions.
@@ -430,8 +462,6 @@
       dependencies_(dependencies),
       flags_(flags),
       jsgraph_(jsgraph),
-      true_type_(Type::Constant(factory()->true_value(), graph()->zone())),
-      false_type_(Type::Constant(factory()->false_value(), graph()->zone())),
       the_hole_type_(
           Type::Constant(factory()->the_hole_value(), graph()->zone())),
       type_cache_(TypeCache::Get()) {
@@ -469,6 +499,9 @@
     return r.ChangeToPureOperator(simplified()->NumberAdd(), Type::Number());
   }
   if (r.OneInputIs(Type::String())) {
+    if (r.ShouldCreateConsString()) {
+      return ReduceCreateConsString(node);
+    }
     StringAddFlags flags = STRING_ADD_CHECK_NONE;
     if (!r.LeftInputIs(Type::String())) {
       flags = STRING_ADD_CONVERT_LEFT;
@@ -546,6 +579,123 @@
   return NoChange();
 }
 
+Reduction JSTypedLowering::ReduceCreateConsString(Node* node) {
+  Node* first = NodeProperties::GetValueInput(node, 0);
+  Node* second = NodeProperties::GetValueInput(node, 1);
+  Node* context = NodeProperties::GetContextInput(node);
+  Node* frame_state = NodeProperties::GetFrameStateInput(node);
+  Node* effect = NodeProperties::GetEffectInput(node);
+  Node* control = NodeProperties::GetControlInput(node);
+
+  // Make sure {first} is actually a String.
+  Type* first_type = NodeProperties::GetType(first);
+  if (!first_type->Is(Type::String())) {
+    first = effect =
+        graph()->NewNode(simplified()->CheckString(), first, effect, control);
+    first_type = NodeProperties::GetType(first);
+  }
+
+  // Make sure {second} is actually a String.
+  Type* second_type = NodeProperties::GetType(second);
+  if (!second_type->Is(Type::String())) {
+    second = effect =
+        graph()->NewNode(simplified()->CheckString(), second, effect, control);
+    second_type = NodeProperties::GetType(second);
+  }
+
+  // Determine the {first} length.
+  Node* first_length =
+      first_type->IsConstant()
+          ? jsgraph()->Constant(
+                Handle<String>::cast(first_type->AsConstant()->Value())
+                    ->length())
+          : effect = graph()->NewNode(
+                simplified()->LoadField(AccessBuilder::ForStringLength()),
+                first, effect, control);
+
+  // Determine the {second} length.
+  Node* second_length =
+      second_type->IsConstant()
+          ? jsgraph()->Constant(
+                Handle<String>::cast(second_type->AsConstant()->Value())
+                    ->length())
+          : effect = graph()->NewNode(
+                simplified()->LoadField(AccessBuilder::ForStringLength()),
+                second, effect, control);
+
+  // Compute the resulting length.
+  Node* length =
+      graph()->NewNode(simplified()->NumberAdd(), first_length, second_length);
+
+  // Check if we would overflow the allowed maximum string length.
+  Node* check = graph()->NewNode(simplified()->NumberLessThanOrEqual(), length,
+                                 jsgraph()->Constant(String::kMaxLength));
+  Node* branch =
+      graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
+  Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
+  Node* efalse = effect;
+  {
+    // Throw a RangeError in case of overflow.
+    Node* vfalse = efalse = graph()->NewNode(
+        javascript()->CallRuntime(Runtime::kThrowInvalidStringLength), context,
+        frame_state, efalse, if_false);
+    if_false = graph()->NewNode(common()->IfSuccess(), vfalse);
+    if_false = graph()->NewNode(common()->Throw(), vfalse, efalse, if_false);
+    // TODO(bmeurer): This should be on the AdvancedReducer somehow.
+    NodeProperties::MergeControlToEnd(graph(), common(), if_false);
+    Revisit(graph()->end());
+
+    // Update potential {IfException} uses of {node} to point to the
+    // %ThrowInvalidStringLength runtime call node instead.
+    for (Edge edge : node->use_edges()) {
+      if (edge.from()->opcode() == IrOpcode::kIfException) {
+        DCHECK(NodeProperties::IsControlEdge(edge) ||
+               NodeProperties::IsEffectEdge(edge));
+        edge.UpdateTo(vfalse);
+        Revisit(edge.from());
+      }
+    }
+  }
+  control = graph()->NewNode(common()->IfTrue(), branch);
+
+  // Figure out the map for the resulting ConsString.
+  // TODO(turbofan): We currently just use the cons_string_map here for
+  // the sake of simplicity; we could also try to be smarter here and
+  // use the one_byte_cons_string_map instead when the resulting ConsString
+  // contains only one byte characters.
+  Node* value_map = jsgraph()->HeapConstant(factory()->cons_string_map());
+
+  // Allocate the resulting ConsString.
+  effect = graph()->NewNode(
+      common()->BeginRegion(RegionObservability::kNotObservable), effect);
+  Node* value = effect =
+      graph()->NewNode(simplified()->Allocate(NOT_TENURED),
+                       jsgraph()->Constant(ConsString::kSize), effect, control);
+  NodeProperties::SetType(value, Type::OtherString());
+  effect = graph()->NewNode(simplified()->StoreField(AccessBuilder::ForMap()),
+                            value, value_map, effect, control);
+  effect = graph()->NewNode(
+      simplified()->StoreField(AccessBuilder::ForNameHashField()), value,
+      jsgraph()->Uint32Constant(Name::kEmptyHashField), effect, control);
+  effect = graph()->NewNode(
+      simplified()->StoreField(AccessBuilder::ForStringLength()), value, length,
+      effect, control);
+  effect = graph()->NewNode(
+      simplified()->StoreField(AccessBuilder::ForConsStringFirst()), value,
+      first, effect, control);
+  effect = graph()->NewNode(
+      simplified()->StoreField(AccessBuilder::ForConsStringSecond()), value,
+      second, effect, control);
+
+  // Morph the {node} into a {FinishRegion}.
+  ReplaceWithValue(node, node, node, control);
+  node->ReplaceInput(0, value);
+  node->ReplaceInput(1, effect);
+  node->TrimInputCount(2);
+  NodeProperties::ChangeOp(node, common()->FinishRegion());
+  return Changed(node);
+}
+
 Reduction JSTypedLowering::ReduceJSComparison(Node* node) {
   JSBinopReduction r(this, node);
   if (r.BothInputsAre(Type::String())) {
@@ -779,22 +929,10 @@
     NodeProperties::ChangeOp(node, simplified()->BooleanNot());
     return Changed(node);
   } else if (input_type->Is(Type::Number())) {
-    // JSToBoolean(x:number) => NumberLessThan(#0,NumberAbs(x))
+    // JSToBoolean(x:number) => NumberToBoolean(x)
     RelaxEffectsAndControls(node);
-    node->ReplaceInput(0, jsgraph()->ZeroConstant());
-    node->ReplaceInput(1, graph()->NewNode(simplified()->NumberAbs(), input));
-    node->TrimInputCount(2);
-    NodeProperties::ChangeOp(node, simplified()->NumberLessThan());
-    return Changed(node);
-  } else if (input_type->Is(Type::String())) {
-    // JSToBoolean(x:string) => NumberLessThan(#0,x.length)
-    FieldAccess const access = AccessBuilder::ForStringLength();
-    Node* length = graph()->NewNode(simplified()->LoadField(access), input,
-                                    graph()->start(), graph()->start());
-    ReplaceWithValue(node, node, length);
-    node->ReplaceInput(0, jsgraph()->ZeroConstant());
-    node->ReplaceInput(1, length);
-    NodeProperties::ChangeOp(node, simplified()->NumberLessThan());
+    node->TrimInputCount(1);
+    NodeProperties::ChangeOp(node, simplified()->NumberToBoolean());
     return Changed(node);
   }
   return NoChange();
@@ -821,23 +959,12 @@
       input = jsgraph()->Constant(kMaxSafeInteger);
     } else {
       if (input_type->Min() <= 0.0) {
-        input = graph()->NewNode(
-            common()->Select(MachineRepresentation::kTagged),
-            graph()->NewNode(simplified()->NumberLessThanOrEqual(), input,
-                             jsgraph()->ZeroConstant()),
-            jsgraph()->ZeroConstant(), input);
-        input_type = Type::Range(0.0, input_type->Max(), graph()->zone());
-        NodeProperties::SetType(input, input_type);
+        input = graph()->NewNode(simplified()->NumberMax(),
+                                 jsgraph()->ZeroConstant(), input);
       }
       if (input_type->Max() > kMaxSafeInteger) {
-        input = graph()->NewNode(
-            common()->Select(MachineRepresentation::kTagged),
-            graph()->NewNode(simplified()->NumberLessThanOrEqual(),
-                             jsgraph()->Constant(kMaxSafeInteger), input),
-            jsgraph()->Constant(kMaxSafeInteger), input);
-        input_type =
-            Type::Range(input_type->Min(), kMaxSafeInteger, graph()->zone());
-        NodeProperties::SetType(input, input_type);
+        input = graph()->NewNode(simplified()->NumberMin(),
+                                 jsgraph()->Constant(kMaxSafeInteger), input);
       }
     }
     ReplaceWithValue(node, input);
@@ -1132,169 +1259,162 @@
   return NoChange();
 }
 
-Reduction JSTypedLowering::ReduceJSInstanceOf(Node* node) {
-  DCHECK_EQ(IrOpcode::kJSInstanceOf, node->opcode());
-  Node* const context = NodeProperties::GetContextInput(node);
-  Node* const frame_state = NodeProperties::GetFrameStateInput(node);
+Reduction JSTypedLowering::ReduceJSOrdinaryHasInstance(Node* node) {
+  DCHECK_EQ(IrOpcode::kJSOrdinaryHasInstance, node->opcode());
+  Node* constructor = NodeProperties::GetValueInput(node, 0);
+  Type* constructor_type = NodeProperties::GetType(constructor);
+  Node* object = NodeProperties::GetValueInput(node, 1);
+  Node* context = NodeProperties::GetContextInput(node);
+  Node* frame_state = NodeProperties::GetFrameStateInput(node);
+  Node* effect = NodeProperties::GetEffectInput(node);
+  Node* control = NodeProperties::GetControlInput(node);
 
-  // If deoptimization is disabled, we cannot optimize.
-  if (!(flags() & kDeoptimizationEnabled)) return NoChange();
-
-  // If we are in a try block, don't optimize since the runtime call
-  // in the proxy case can throw.
-  if (NodeProperties::IsExceptionalCall(node)) return NoChange();
-
-  JSBinopReduction r(this, node);
-  Node* effect = r.effect();
-  Node* control = r.control();
-
-  if (!r.right_type()->IsConstant() ||
-      !r.right_type()->AsConstant()->Value()->IsJSFunction()) {
+  // Check if the {constructor} is a (known) JSFunction.
+  if (!constructor_type->IsConstant() ||
+      !constructor_type->AsConstant()->Value()->IsJSFunction()) {
     return NoChange();
   }
-
   Handle<JSFunction> function =
-      Handle<JSFunction>::cast(r.right_type()->AsConstant()->Value());
-  Handle<SharedFunctionInfo> shared(function->shared(), isolate());
+      Handle<JSFunction>::cast(constructor_type->AsConstant()->Value());
 
-  // Make sure the prototype of {function} is the %FunctionPrototype%, and it
-  // already has a meaningful initial map (i.e. we constructed at least one
-  // instance using the constructor {function}).
-  if (function->map()->prototype() != function->native_context()->closure() ||
-      function->map()->has_non_instance_prototype() ||
-      !function->has_initial_map()) {
-    return NoChange();
-  }
+  // Check if the {function} already has an initial map (i.e. the
+  // {function} has been used as a constructor at least once).
+  if (!function->has_initial_map()) return NoChange();
 
-  // We can only use the fast case if @@hasInstance was not used so far.
-  if (!isolate()->IsHasInstanceLookupChainIntact()) return NoChange();
-  dependencies()->AssumePropertyCell(factory()->has_instance_protector());
+  // Check if the {function}s "prototype" is a JSReceiver.
+  if (!function->prototype()->IsJSReceiver()) return NoChange();
 
+  // Install a code dependency on the {function}s initial map.
   Handle<Map> initial_map(function->initial_map(), isolate());
   dependencies()->AssumeInitialMapCantChange(initial_map);
+
   Node* prototype =
       jsgraph()->Constant(handle(initial_map->prototype(), isolate()));
 
-  // If the left hand side is an object, no smi check is needed.
-  Node* is_smi = graph()->NewNode(simplified()->ObjectIsSmi(), r.left());
-  Node* branch_is_smi =
-      graph()->NewNode(common()->Branch(BranchHint::kFalse), is_smi, control);
-  Node* if_is_smi = graph()->NewNode(common()->IfTrue(), branch_is_smi);
-  Node* e_is_smi = effect;
-  control = graph()->NewNode(common()->IfFalse(), branch_is_smi);
+  Node* check0 = graph()->NewNode(simplified()->ObjectIsSmi(), object);
+  Node* branch0 =
+      graph()->NewNode(common()->Branch(BranchHint::kFalse), check0, control);
 
-  Node* object_map = effect =
-      graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
-                       r.left(), effect, control);
+  Node* if_true0 = graph()->NewNode(common()->IfTrue(), branch0);
+  Node* etrue0 = effect;
+  Node* vtrue0 = jsgraph()->FalseConstant();
+
+  control = graph()->NewNode(common()->IfFalse(), branch0);
 
   // Loop through the {object}s prototype chain looking for the {prototype}.
   Node* loop = control = graph()->NewNode(common()->Loop(2), control, control);
-
-  Node* loop_effect = effect =
+  Node* eloop = effect =
       graph()->NewNode(common()->EffectPhi(2), effect, effect, loop);
+  Node* vloop = object = graph()->NewNode(
+      common()->Phi(MachineRepresentation::kTagged, 2), object, object, loop);
+  // TODO(jarin): This is a very ugly hack to work-around the super-smart
+  // implicit typing of the Phi, which goes completely nuts if the {object}
+  // is for example a HeapConstant.
+  NodeProperties::SetType(vloop, Type::NonInternal());
 
-  Node* loop_object_map =
-      graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
-                       object_map, r.left(), loop);
+  // Load the {object} map and instance type.
+  Node* object_map = effect =
+      graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()), object,
+                       effect, control);
+  Node* object_instance_type = effect = graph()->NewNode(
+      simplified()->LoadField(AccessBuilder::ForMapInstanceType()), object_map,
+      effect, control);
 
-  // Check if the lhs needs access checks.
-  Node* map_bit_field = effect =
-      graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMapBitField()),
-                       loop_object_map, loop_effect, control);
-  int is_access_check_needed_bit = 1 << Map::kIsAccessCheckNeeded;
-  Node* is_access_check_needed_num =
-      graph()->NewNode(simplified()->NumberBitwiseAnd(), map_bit_field,
-                       jsgraph()->Constant(is_access_check_needed_bit));
-  Node* is_access_check_needed =
-      graph()->NewNode(simplified()->NumberEqual(), is_access_check_needed_num,
-                       jsgraph()->Constant(is_access_check_needed_bit));
+  // Check if the {object} is a special receiver, because for special
+  // receivers, i.e. proxies or API objects that need access checks,
+  // we have to use the %HasInPrototypeChain runtime function instead.
+  Node* check1 = graph()->NewNode(
+      simplified()->NumberLessThanOrEqual(), object_instance_type,
+      jsgraph()->Constant(LAST_SPECIAL_RECEIVER_TYPE));
+  Node* branch1 =
+      graph()->NewNode(common()->Branch(BranchHint::kFalse), check1, control);
 
-  Node* branch_is_access_check_needed = graph()->NewNode(
-      common()->Branch(BranchHint::kFalse), is_access_check_needed, control);
-  Node* if_is_access_check_needed =
-      graph()->NewNode(common()->IfTrue(), branch_is_access_check_needed);
-  Node* e_is_access_check_needed = effect;
+  control = graph()->NewNode(common()->IfFalse(), branch1);
 
-  control =
-      graph()->NewNode(common()->IfFalse(), branch_is_access_check_needed);
+  Node* if_true1 = graph()->NewNode(common()->IfTrue(), branch1);
+  Node* etrue1 = effect;
+  Node* vtrue1;
 
-  // Check if the lhs is a proxy.
-  Node* map_instance_type = effect = graph()->NewNode(
-      simplified()->LoadField(AccessBuilder::ForMapInstanceType()),
-      loop_object_map, loop_effect, control);
-  Node* is_proxy =
-      graph()->NewNode(simplified()->NumberEqual(), map_instance_type,
-                       jsgraph()->Constant(JS_PROXY_TYPE));
-  Node* branch_is_proxy =
-      graph()->NewNode(common()->Branch(BranchHint::kFalse), is_proxy, control);
-  Node* if_is_proxy = graph()->NewNode(common()->IfTrue(), branch_is_proxy);
-  Node* e_is_proxy = effect;
+  // Check if the {object} is not a receiver at all.
+  Node* check10 =
+      graph()->NewNode(simplified()->NumberLessThan(), object_instance_type,
+                       jsgraph()->Constant(FIRST_JS_RECEIVER_TYPE));
+  Node* branch10 =
+      graph()->NewNode(common()->Branch(BranchHint::kTrue), check10, if_true1);
 
-  control = graph()->NewNode(common()->Merge(2), if_is_access_check_needed,
-                             if_is_proxy);
-  effect = graph()->NewNode(common()->EffectPhi(2), e_is_access_check_needed,
-                            e_is_proxy, control);
+  // A primitive value cannot match the {prototype} we're looking for.
+  if_true1 = graph()->NewNode(common()->IfTrue(), branch10);
+  vtrue1 = jsgraph()->FalseConstant();
 
-  // If we need an access check or the object is a Proxy, make a runtime call
-  // to finish the lowering.
-  Node* runtimecall = graph()->NewNode(
-      javascript()->CallRuntime(Runtime::kHasInPrototypeChain), r.left(),
-      prototype, context, frame_state, effect, control);
+  Node* if_false1 = graph()->NewNode(common()->IfFalse(), branch10);
+  Node* efalse1 = etrue1;
+  Node* vfalse1;
+  {
+    // Slow path, need to call the %HasInPrototypeChain runtime function.
+    vfalse1 = efalse1 = graph()->NewNode(
+        javascript()->CallRuntime(Runtime::kHasInPrototypeChain), object,
+        prototype, context, frame_state, efalse1, if_false1);
+    if_false1 = graph()->NewNode(common()->IfSuccess(), vfalse1);
 
-  Node* runtimecall_control =
-      graph()->NewNode(common()->IfSuccess(), runtimecall);
+    // Replace any potential IfException on {node} to catch exceptions
+    // from this %HasInPrototypeChain runtime call instead.
+    for (Edge edge : node->use_edges()) {
+      if (edge.from()->opcode() == IrOpcode::kIfException) {
+        edge.UpdateTo(vfalse1);
+        Revisit(edge.from());
+      }
+    }
+  }
 
-  control = graph()->NewNode(common()->IfFalse(), branch_is_proxy);
-
+  // Load the {object} prototype.
   Node* object_prototype = effect = graph()->NewNode(
-      simplified()->LoadField(AccessBuilder::ForMapPrototype()),
-      loop_object_map, loop_effect, control);
+      simplified()->LoadField(AccessBuilder::ForMapPrototype()), object_map,
+      effect, control);
 
-  // If not, check if object prototype is the null prototype.
-  Node* null_proto =
-      graph()->NewNode(simplified()->ReferenceEqual(), object_prototype,
-                       jsgraph()->NullConstant());
-  Node* branch_null_proto = graph()->NewNode(
-      common()->Branch(BranchHint::kFalse), null_proto, control);
-  Node* if_null_proto = graph()->NewNode(common()->IfTrue(), branch_null_proto);
-  Node* e_null_proto = effect;
+  // Check if we reached the end of {object}s prototype chain.
+  Node* check2 = graph()->NewNode(simplified()->ReferenceEqual(),
+                                  object_prototype, jsgraph()->NullConstant());
+  Node* branch2 = graph()->NewNode(common()->Branch(), check2, control);
 
-  control = graph()->NewNode(common()->IfFalse(), branch_null_proto);
+  Node* if_true2 = graph()->NewNode(common()->IfTrue(), branch2);
+  Node* etrue2 = effect;
+  Node* vtrue2 = jsgraph()->FalseConstant();
 
-  // Check if object prototype is equal to function prototype.
-  Node* eq_proto = graph()->NewNode(simplified()->ReferenceEqual(),
-                                    object_prototype, prototype);
-  Node* branch_eq_proto =
-      graph()->NewNode(common()->Branch(BranchHint::kFalse), eq_proto, control);
-  Node* if_eq_proto = graph()->NewNode(common()->IfTrue(), branch_eq_proto);
-  Node* e_eq_proto = effect;
+  control = graph()->NewNode(common()->IfFalse(), branch2);
 
-  control = graph()->NewNode(common()->IfFalse(), branch_eq_proto);
+  // Check if we reached the {prototype}.
+  Node* check3 = graph()->NewNode(simplified()->ReferenceEqual(),
+                                  object_prototype, prototype);
+  Node* branch3 = graph()->NewNode(common()->Branch(), check3, control);
 
-  Node* load_object_map = effect =
-      graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
-                       object_prototype, effect, control);
+  Node* if_true3 = graph()->NewNode(common()->IfTrue(), branch3);
+  Node* etrue3 = effect;
+  Node* vtrue3 = jsgraph()->TrueConstant();
+
+  control = graph()->NewNode(common()->IfFalse(), branch3);
+
   // Close the loop.
-  loop_effect->ReplaceInput(1, effect);
-  loop_object_map->ReplaceInput(1, load_object_map);
+  vloop->ReplaceInput(1, object_prototype);
+  eloop->ReplaceInput(1, effect);
   loop->ReplaceInput(1, control);
 
-  control = graph()->NewNode(common()->Merge(3), runtimecall_control,
-                             if_eq_proto, if_null_proto);
-  effect = graph()->NewNode(common()->EffectPhi(3), runtimecall, e_eq_proto,
-                            e_null_proto, control);
+  control = graph()->NewNode(common()->Merge(5), if_true0, if_true1, if_true2,
+                             if_true3, if_false1);
+  effect = graph()->NewNode(common()->EffectPhi(5), etrue0, etrue1, etrue2,
+                            etrue3, efalse1, control);
 
-  Node* result = graph()->NewNode(
-      common()->Phi(MachineRepresentation::kTagged, 3), runtimecall,
-      jsgraph()->TrueConstant(), jsgraph()->FalseConstant(), control);
-
-  control = graph()->NewNode(common()->Merge(2), if_is_smi, control);
-  effect = graph()->NewNode(common()->EffectPhi(2), e_is_smi, effect, control);
-  result = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
-                            jsgraph()->FalseConstant(), result, control);
-
-  ReplaceWithValue(node, result, effect, control);
-  return Changed(result);
+  // Morph the {node} into an appropriate Phi.
+  ReplaceWithValue(node, node, effect, control);
+  node->ReplaceInput(0, vtrue0);
+  node->ReplaceInput(1, vtrue1);
+  node->ReplaceInput(2, vtrue2);
+  node->ReplaceInput(3, vtrue3);
+  node->ReplaceInput(4, vfalse1);
+  node->ReplaceInput(5, control);
+  node->TrimInputCount(6);
+  NodeProperties::ChangeOp(node,
+                           common()->Phi(MachineRepresentation::kTagged, 5));
+  return Changed(node);
 }
 
 Reduction JSTypedLowering::ReduceJSLoadContext(Node* node) {
@@ -1546,16 +1666,18 @@
   const int argc = arity + BuiltinArguments::kNumExtraArgsWithReceiver;
   Node* argc_node = jsgraph->Int32Constant(argc);
 
-  node->InsertInput(zone, arity + 2, argc_node);
-  node->InsertInput(zone, arity + 3, target);
-  node->InsertInput(zone, arity + 4, new_target);
+  static const int kStubAndReceiver = 2;
+  int cursor = arity + kStubAndReceiver;
+  node->InsertInput(zone, cursor++, argc_node);
+  node->InsertInput(zone, cursor++, target);
+  node->InsertInput(zone, cursor++, new_target);
 
   Address entry = Builtins::CppEntryOf(builtin_index);
   ExternalReference entry_ref(ExternalReference(entry, isolate));
   Node* entry_node = jsgraph->ExternalConstant(entry_ref);
 
-  node->InsertInput(zone, arity + 5, entry_node);
-  node->InsertInput(zone, arity + 6, argc_node);
+  node->InsertInput(zone, cursor++, entry_node);
+  node->InsertInput(zone, cursor++, argc_node);
 
   static const int kReturnCount = 1;
   const char* debug_name = Builtins::name(builtin_index);
@@ -1566,6 +1688,12 @@
   NodeProperties::ChangeOp(node, jsgraph->common()->Call(desc));
 }
 
+bool NeedsArgumentAdaptorFrame(Handle<SharedFunctionInfo> shared, int arity) {
+  static const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
+  const int num_decl_parms = shared->internal_formal_parameter_count();
+  return (num_decl_parms != arity && num_decl_parms != sentinel);
+}
+
 }  // namespace
 
 Reduction JSTypedLowering::ReduceJSCallConstruct(Node* node) {
@@ -1591,9 +1719,7 @@
     CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
 
     if (is_builtin && Builtins::HasCppImplementation(builtin_index) &&
-        (shared->internal_formal_parameter_count() == arity ||
-         shared->internal_formal_parameter_count() ==
-             SharedFunctionInfo::kDontAdaptArgumentsSentinel)) {
+        !NeedsArgumentAdaptorFrame(shared, arity)) {
       // Patch {node} to a direct CEntryStub call.
 
       // Load the context from the {target}.
@@ -1705,22 +1831,7 @@
 
     Node* new_target = jsgraph()->UndefinedConstant();
     Node* argument_count = jsgraph()->Int32Constant(arity);
-    if (is_builtin && Builtins::HasCppImplementation(builtin_index) &&
-        (shared->internal_formal_parameter_count() == arity ||
-         shared->internal_formal_parameter_count() ==
-             SharedFunctionInfo::kDontAdaptArgumentsSentinel)) {
-      // Patch {node} to a direct CEntryStub call.
-      ReduceBuiltin(isolate(), jsgraph(), node, builtin_index, arity, flags);
-    } else if (shared->internal_formal_parameter_count() == arity ||
-               shared->internal_formal_parameter_count() ==
-                   SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
-      // Patch {node} to a direct call.
-      node->InsertInput(graph()->zone(), arity + 2, new_target);
-      node->InsertInput(graph()->zone(), arity + 3, argument_count);
-      NodeProperties::ChangeOp(node,
-                               common()->Call(Linkage::GetJSCallDescriptor(
-                                   graph()->zone(), false, 1 + arity, flags)));
-    } else {
+    if (NeedsArgumentAdaptorFrame(shared, arity)) {
       // Patch {node} to an indirect call via the ArgumentsAdaptorTrampoline.
       Callable callable = CodeFactory::ArgumentAdaptor(isolate());
       node->InsertInput(graph()->zone(), 0,
@@ -1734,6 +1845,16 @@
           node, common()->Call(Linkage::GetStubCallDescriptor(
                     isolate(), graph()->zone(), callable.descriptor(),
                     1 + arity, flags)));
+    } else if (is_builtin && Builtins::HasCppImplementation(builtin_index)) {
+      // Patch {node} to a direct CEntryStub call.
+      ReduceBuiltin(isolate(), jsgraph(), node, builtin_index, arity, flags);
+    } else {
+      // Patch {node} to a direct call.
+      node->InsertInput(graph()->zone(), arity + 2, new_target);
+      node->InsertInput(graph()->zone(), arity + 3, argument_count);
+      NodeProperties::ChangeOp(node,
+                               common()->Call(Linkage::GetJSCallDescriptor(
+                                   graph()->zone(), false, 1 + arity, flags)));
     }
     return Changed(node);
   }
@@ -1761,8 +1882,8 @@
   // Maybe we did at least learn something about the {receiver}.
   if (p.convert_mode() != convert_mode) {
     NodeProperties::ChangeOp(
-        node, javascript()->CallFunction(p.arity(), p.feedback(), convert_mode,
-                                         p.tail_call_mode()));
+        node, javascript()->CallFunction(p.arity(), p.frequency(), p.feedback(),
+                                         convert_mode, p.tail_call_mode()));
     return Changed(node);
   }
 
@@ -1770,14 +1891,6 @@
 }
 
 
-Reduction JSTypedLowering::ReduceJSForInDone(Node* node) {
-  DCHECK_EQ(IrOpcode::kJSForInDone, node->opcode());
-  node->TrimInputCount(2);
-  NodeProperties::ChangeOp(node, machine()->Word32Equal());
-  return Changed(node);
-}
-
-
 Reduction JSTypedLowering::ReduceJSForInNext(Node* node) {
   DCHECK_EQ(IrOpcode::kJSForInNext, node->opcode());
   Node* receiver = NodeProperties::GetValueInput(node, 0);
@@ -1843,14 +1956,6 @@
   return Changed(node);
 }
 
-
-Reduction JSTypedLowering::ReduceJSForInStep(Node* node) {
-  DCHECK_EQ(IrOpcode::kJSForInStep, node->opcode());
-  node->ReplaceInput(1, jsgraph()->Int32Constant(1));
-  NodeProperties::ChangeOp(node, machine()->Int32Add());
-  return Changed(node);
-}
-
 Reduction JSTypedLowering::ReduceJSGeneratorStore(Node* node) {
   DCHECK_EQ(IrOpcode::kJSGeneratorStore, node->opcode());
   Node* generator = NodeProperties::GetValueInput(node, 0);
@@ -1930,174 +2035,7 @@
   return Changed(element);
 }
 
-Reduction JSTypedLowering::ReduceSelect(Node* node) {
-  DCHECK_EQ(IrOpcode::kSelect, node->opcode());
-  Node* const condition = NodeProperties::GetValueInput(node, 0);
-  Type* const condition_type = NodeProperties::GetType(condition);
-  Node* const vtrue = NodeProperties::GetValueInput(node, 1);
-  Type* const vtrue_type = NodeProperties::GetType(vtrue);
-  Node* const vfalse = NodeProperties::GetValueInput(node, 2);
-  Type* const vfalse_type = NodeProperties::GetType(vfalse);
-  if (condition_type->Is(true_type_)) {
-    // Select(condition:true, vtrue, vfalse) => vtrue
-    return Replace(vtrue);
-  }
-  if (condition_type->Is(false_type_)) {
-    // Select(condition:false, vtrue, vfalse) => vfalse
-    return Replace(vfalse);
-  }
-  if (vtrue_type->Is(true_type_) && vfalse_type->Is(false_type_)) {
-    // Select(condition, vtrue:true, vfalse:false) => condition
-    return Replace(condition);
-  }
-  if (vtrue_type->Is(false_type_) && vfalse_type->Is(true_type_)) {
-    // Select(condition, vtrue:false, vfalse:true) => BooleanNot(condition)
-    node->TrimInputCount(1);
-    NodeProperties::ChangeOp(node, simplified()->BooleanNot());
-    return Changed(node);
-  }
-  return NoChange();
-}
-
-namespace {
-
-MaybeHandle<Map> GetStableMapFromObjectType(Type* object_type) {
-  if (object_type->IsConstant() &&
-      object_type->AsConstant()->Value()->IsHeapObject()) {
-    Handle<Map> object_map(
-        Handle<HeapObject>::cast(object_type->AsConstant()->Value())->map());
-    if (object_map->is_stable()) return object_map;
-  } else if (object_type->IsClass()) {
-    Handle<Map> object_map = object_type->AsClass()->Map();
-    if (object_map->is_stable()) return object_map;
-  }
-  return MaybeHandle<Map>();
-}
-
-}  // namespace
-
-Reduction JSTypedLowering::ReduceCheckMaps(Node* node) {
-  // TODO(bmeurer): Find a better home for this thing!
-  // The CheckMaps(o, ...map...) can be eliminated if map is stable and
-  // either
-  //  (a) o has type Constant(object) and map == object->map, or
-  //  (b) o has type Class(map),
-  // and either
-  //  (1) map cannot transition further, or
-  //  (2) we can add a code dependency on the stability of map
-  //      (to guard the Constant type information).
-  Node* const object = NodeProperties::GetValueInput(node, 0);
-  Type* const object_type = NodeProperties::GetType(object);
-  Node* const effect = NodeProperties::GetEffectInput(node);
-  Handle<Map> object_map;
-  if (GetStableMapFromObjectType(object_type).ToHandle(&object_map)) {
-    for (int i = 1; i < node->op()->ValueInputCount(); ++i) {
-      Node* const map = NodeProperties::GetValueInput(node, i);
-      Type* const map_type = NodeProperties::GetType(map);
-      if (map_type->IsConstant() &&
-          map_type->AsConstant()->Value().is_identical_to(object_map)) {
-        if (object_map->CanTransition()) {
-          DCHECK(flags() & kDeoptimizationEnabled);
-          dependencies()->AssumeMapStable(object_map);
-        }
-        return Replace(effect);
-      }
-    }
-  }
-  return NoChange();
-}
-
-Reduction JSTypedLowering::ReduceCheckString(Node* node) {
-  // TODO(bmeurer): Find a better home for this thing!
-  Node* const input = NodeProperties::GetValueInput(node, 0);
-  Type* const input_type = NodeProperties::GetType(input);
-  if (input_type->Is(Type::String())) {
-    ReplaceWithValue(node, input);
-    return Replace(input);
-  }
-  return NoChange();
-}
-
-Reduction JSTypedLowering::ReduceLoadField(Node* node) {
-  // TODO(bmeurer): Find a better home for this thing!
-  Node* const object = NodeProperties::GetValueInput(node, 0);
-  Type* const object_type = NodeProperties::GetType(object);
-  FieldAccess const& access = FieldAccessOf(node->op());
-  if (access.base_is_tagged == kTaggedBase &&
-      access.offset == HeapObject::kMapOffset) {
-    // We can replace LoadField[Map](o) with map if is stable and either
-    //  (a) o has type Constant(object) and map == object->map, or
-    //  (b) o has type Class(map),
-    // and either
-    //  (1) map cannot transition further, or
-    //  (2) deoptimization is enabled and we can add a code dependency on the
-    //      stability of map (to guard the Constant type information).
-    Handle<Map> object_map;
-    if (GetStableMapFromObjectType(object_type).ToHandle(&object_map)) {
-      if (object_map->CanTransition()) {
-        if (flags() & kDeoptimizationEnabled) {
-          dependencies()->AssumeMapStable(object_map);
-        } else {
-          return NoChange();
-        }
-      }
-      Node* const value = jsgraph()->HeapConstant(object_map);
-      ReplaceWithValue(node, value);
-      return Replace(value);
-    }
-  }
-  return NoChange();
-}
-
-Reduction JSTypedLowering::ReduceNumberRoundop(Node* node) {
-  // TODO(bmeurer): Find a better home for this thing!
-  Node* const input = NodeProperties::GetValueInput(node, 0);
-  Type* const input_type = NodeProperties::GetType(input);
-  if (input_type->Is(type_cache_.kIntegerOrMinusZeroOrNaN)) {
-    return Replace(input);
-  }
-  return NoChange();
-}
-
 Reduction JSTypedLowering::Reduce(Node* node) {
-  // Check if the output type is a singleton.  In that case we already know the
-  // result value and can simply replace the node if it's eliminable.
-  if (!NodeProperties::IsConstant(node) && NodeProperties::IsTyped(node) &&
-      node->op()->HasProperty(Operator::kEliminatable)) {
-    // We can only constant-fold nodes here, that are known to not cause any
-    // side-effect, may it be a JavaScript observable side-effect or a possible
-    // eager deoptimization exit (i.e. {node} has an operator that doesn't have
-    // the Operator::kNoDeopt property).
-    Type* upper = NodeProperties::GetType(node);
-    if (upper->IsInhabited()) {
-      if (upper->IsConstant()) {
-        Node* replacement = jsgraph()->Constant(upper->AsConstant()->Value());
-        ReplaceWithValue(node, replacement);
-        return Changed(replacement);
-      } else if (upper->Is(Type::MinusZero())) {
-        Node* replacement = jsgraph()->Constant(factory()->minus_zero_value());
-        ReplaceWithValue(node, replacement);
-        return Changed(replacement);
-      } else if (upper->Is(Type::NaN())) {
-        Node* replacement = jsgraph()->NaNConstant();
-        ReplaceWithValue(node, replacement);
-        return Changed(replacement);
-      } else if (upper->Is(Type::Null())) {
-        Node* replacement = jsgraph()->NullConstant();
-        ReplaceWithValue(node, replacement);
-        return Changed(replacement);
-      } else if (upper->Is(Type::PlainNumber()) &&
-                 upper->Min() == upper->Max()) {
-        Node* replacement = jsgraph()->Constant(upper->Min());
-        ReplaceWithValue(node, replacement);
-        return Changed(replacement);
-      } else if (upper->Is(Type::Undefined())) {
-        Node* replacement = jsgraph()->UndefinedConstant();
-        ReplaceWithValue(node, replacement);
-        return Changed(replacement);
-      }
-    }
-  }
   switch (node->opcode()) {
     case IrOpcode::kJSEqual:
       return ReduceJSEqual(node, false);
@@ -2128,6 +2066,8 @@
     case IrOpcode::kJSDivide:
     case IrOpcode::kJSModulus:
       return ReduceNumberBinop(node);
+    case IrOpcode::kJSOrdinaryHasInstance:
+      return ReduceJSOrdinaryHasInstance(node);
     case IrOpcode::kJSToBoolean:
       return ReduceJSToBoolean(node);
     case IrOpcode::kJSToInteger:
@@ -2146,8 +2086,6 @@
       return ReduceJSLoadProperty(node);
     case IrOpcode::kJSStoreProperty:
       return ReduceJSStoreProperty(node);
-    case IrOpcode::kJSInstanceOf:
-      return ReduceJSInstanceOf(node);
     case IrOpcode::kJSLoadContext:
       return ReduceJSLoadContext(node);
     case IrOpcode::kJSStoreContext:
@@ -2158,31 +2096,14 @@
       return ReduceJSCallConstruct(node);
     case IrOpcode::kJSCallFunction:
       return ReduceJSCallFunction(node);
-    case IrOpcode::kJSForInDone:
-      return ReduceJSForInDone(node);
     case IrOpcode::kJSForInNext:
       return ReduceJSForInNext(node);
-    case IrOpcode::kJSForInStep:
-      return ReduceJSForInStep(node);
     case IrOpcode::kJSGeneratorStore:
       return ReduceJSGeneratorStore(node);
     case IrOpcode::kJSGeneratorRestoreContinuation:
       return ReduceJSGeneratorRestoreContinuation(node);
     case IrOpcode::kJSGeneratorRestoreRegister:
       return ReduceJSGeneratorRestoreRegister(node);
-    case IrOpcode::kSelect:
-      return ReduceSelect(node);
-    case IrOpcode::kCheckMaps:
-      return ReduceCheckMaps(node);
-    case IrOpcode::kCheckString:
-      return ReduceCheckString(node);
-    case IrOpcode::kNumberCeil:
-    case IrOpcode::kNumberFloor:
-    case IrOpcode::kNumberRound:
-    case IrOpcode::kNumberTrunc:
-      return ReduceNumberRoundop(node);
-    case IrOpcode::kLoadField:
-      return ReduceLoadField(node);
     default:
       break;
   }
@@ -2208,10 +2129,6 @@
   return jsgraph()->common();
 }
 
-MachineOperatorBuilder* JSTypedLowering::machine() const {
-  return jsgraph()->machine();
-}
-
 SimplifiedOperatorBuilder* JSTypedLowering::simplified() const {
   return jsgraph()->simplified();
 }
diff --git a/src/compiler/js-typed-lowering.h b/src/compiler/js-typed-lowering.h
index 35c397f..b0cf1f4 100644
--- a/src/compiler/js-typed-lowering.h
+++ b/src/compiler/js-typed-lowering.h
@@ -15,8 +15,6 @@
 // Forward declarations.
 class CompilationDependencies;
 class Factory;
-class TypeCache;
-
 
 namespace compiler {
 
@@ -24,9 +22,8 @@
 class CommonOperatorBuilder;
 class JSGraph;
 class JSOperatorBuilder;
-class MachineOperatorBuilder;
 class SimplifiedOperatorBuilder;
-
+class TypeCache;
 
 // Lowers JS-level operators to simplified operators based on types.
 class JSTypedLowering final : public AdvancedReducer {
@@ -52,7 +49,7 @@
   Reduction ReduceJSLoadNamed(Node* node);
   Reduction ReduceJSLoadProperty(Node* node);
   Reduction ReduceJSStoreProperty(Node* node);
-  Reduction ReduceJSInstanceOf(Node* node);
+  Reduction ReduceJSOrdinaryHasInstance(Node* node);
   Reduction ReduceJSLoadContext(Node* node);
   Reduction ReduceJSStoreContext(Node* node);
   Reduction ReduceJSEqualTypeOf(Node* node, bool invert);
@@ -69,20 +66,14 @@
   Reduction ReduceJSConvertReceiver(Node* node);
   Reduction ReduceJSCallConstruct(Node* node);
   Reduction ReduceJSCallFunction(Node* node);
-  Reduction ReduceJSForInDone(Node* node);
   Reduction ReduceJSForInNext(Node* node);
-  Reduction ReduceJSForInStep(Node* node);
   Reduction ReduceJSGeneratorStore(Node* node);
   Reduction ReduceJSGeneratorRestoreContinuation(Node* node);
   Reduction ReduceJSGeneratorRestoreRegister(Node* node);
-  Reduction ReduceCheckMaps(Node* node);
-  Reduction ReduceCheckString(Node* node);
-  Reduction ReduceLoadField(Node* node);
-  Reduction ReduceNumberRoundop(Node* node);
-  Reduction ReduceSelect(Node* node);
   Reduction ReduceNumberBinop(Node* node);
   Reduction ReduceInt32Binop(Node* node);
   Reduction ReduceUI32Shift(Node* node, Signedness signedness);
+  Reduction ReduceCreateConsString(Node* node);
 
   Factory* factory() const;
   Graph* graph() const;
@@ -91,7 +82,6 @@
   JSOperatorBuilder* javascript() const;
   CommonOperatorBuilder* common() const;
   SimplifiedOperatorBuilder* simplified() const;
-  MachineOperatorBuilder* machine() const;
   CompilationDependencies* dependencies() const;
   Flags flags() const { return flags_; }
 
@@ -99,8 +89,6 @@
   Flags flags_;
   JSGraph* jsgraph_;
   Type* shifted_int32_ranges_[4];
-  Type* const true_type_;
-  Type* const false_type_;
   Type* const the_hole_type_;
   TypeCache const& type_cache_;
 };
diff --git a/src/compiler/linkage.cc b/src/compiler/linkage.cc
index e4df58d..523ce47 100644
--- a/src/compiler/linkage.cc
+++ b/src/compiler/linkage.cc
@@ -7,7 +7,7 @@
 #include "src/ast/scopes.h"
 #include "src/builtins/builtins-utils.h"
 #include "src/code-stubs.h"
-#include "src/compiler.h"
+#include "src/compilation-info.h"
 #include "src/compiler/common-operator.h"
 #include "src/compiler/frame.h"
 #include "src/compiler/node.h"
@@ -24,34 +24,6 @@
   return LinkageLocation::ForRegister(reg.code(), type);
 }
 
-MachineType reptyp(Representation representation) {
-  switch (representation.kind()) {
-    case Representation::kInteger8:
-      return MachineType::Int8();
-    case Representation::kUInteger8:
-      return MachineType::Uint8();
-    case Representation::kInteger16:
-      return MachineType::Int16();
-    case Representation::kUInteger16:
-      return MachineType::Uint16();
-    case Representation::kInteger32:
-      return MachineType::Int32();
-    case Representation::kSmi:
-    case Representation::kTagged:
-    case Representation::kHeapObject:
-      return MachineType::AnyTagged();
-    case Representation::kDouble:
-      return MachineType::Float64();
-    case Representation::kExternal:
-      return MachineType::Pointer();
-    case Representation::kNone:
-    case Representation::kNumRepresentations:
-      break;
-  }
-  UNREACHABLE();
-  return MachineType::None();
-}
-
 }  // namespace
 
 
@@ -152,17 +124,16 @@
 
 // static
 bool Linkage::NeedsFrameStateInput(Runtime::FunctionId function) {
-  // Most runtime functions need a FrameState. A few chosen ones that we know
-  // not to call into arbitrary JavaScript, not to throw, and not to deoptimize
-  // are blacklisted here and can be called without a FrameState.
   switch (function) {
+    // Most runtime functions need a FrameState. A few chosen ones that we know
+    // not to call into arbitrary JavaScript, not to throw, and not to
+    // deoptimize
+    // are whitelisted here and can be called without a FrameState.
     case Runtime::kAbort:
     case Runtime::kAllocateInTargetSpace:
     case Runtime::kCreateIterResultObject:
     case Runtime::kDefineGetterPropertyUnchecked:  // TODO(jarin): Is it safe?
     case Runtime::kDefineSetterPropertyUnchecked:  // TODO(jarin): Is it safe?
-    case Runtime::kForInDone:
-    case Runtime::kForInStep:
     case Runtime::kGeneratorGetContinuation:
     case Runtime::kGetSuperConstructor:
     case Runtime::kIsFunction:
@@ -183,29 +154,29 @@
     case Runtime::kTraceEnter:
     case Runtime::kTraceExit:
       return false;
-    case Runtime::kInlineCall:
-    case Runtime::kInlineDeoptimizeNow:
-    case Runtime::kInlineGetPrototype:
-    case Runtime::kInlineNewObject:
-    case Runtime::kInlineRegExpConstructResult:
-    case Runtime::kInlineRegExpExec:
-    case Runtime::kInlineSubString:
-    case Runtime::kInlineThrowNotDateError:
-    case Runtime::kInlineToInteger:
-    case Runtime::kInlineToLength:
-    case Runtime::kInlineToNumber:
-    case Runtime::kInlineToObject:
-    case Runtime::kInlineToString:
-      return true;
+
+    // Some inline intrinsics are also safe to call without a FrameState.
+    case Runtime::kInlineCreateIterResultObject:
+    case Runtime::kInlineFixedArrayGet:
+    case Runtime::kInlineFixedArraySet:
+    case Runtime::kInlineGeneratorClose:
+    case Runtime::kInlineGeneratorGetInputOrDebugPos:
+    case Runtime::kInlineGeneratorGetResumeMode:
+    case Runtime::kInlineGetSuperConstructor:
+    case Runtime::kInlineIsArray:
+    case Runtime::kInlineIsJSReceiver:
+    case Runtime::kInlineIsRegExp:
+    case Runtime::kInlineIsSmi:
+    case Runtime::kInlineIsTypedArray:
+    case Runtime::kInlineRegExpFlags:
+    case Runtime::kInlineRegExpSource:
+      return false;
+
     default:
       break;
   }
 
-  // Most inlined runtime functions (except the ones listed above) can be called
-  // without a FrameState or will be lowered by JSIntrinsicLowering internally.
-  const Runtime::Function* const f = Runtime::FunctionForId(function);
-  if (f->intrinsic_type == Runtime::IntrinsicType::INLINE) return false;
-
+  // For safety, default to needing a FrameState unless whitelisted.
   return true;
 }
 
@@ -382,8 +353,7 @@
     if (i < register_parameter_count) {
       // The first parameters go in registers.
       Register reg = descriptor.GetRegisterParameter(i);
-      MachineType type =
-          reptyp(RepresentationFromType(descriptor.GetParameterType(i)));
+      MachineType type = descriptor.GetParameterType(i);
       locations.AddParam(regloc(reg, type));
     } else {
       // The rest of the parameters go on the stack.
@@ -452,8 +422,7 @@
     if (i < register_parameter_count) {
       // The first parameters go in registers.
       Register reg = descriptor.GetRegisterParameter(i);
-      MachineType type =
-          reptyp(RepresentationFromType(descriptor.GetParameterType(i)));
+      MachineType type = descriptor.GetParameterType(i);
       locations.AddParam(regloc(reg, type));
     } else {
       // The rest of the parameters go on the stack.
diff --git a/src/compiler/linkage.h b/src/compiler/linkage.h
index 1c02508..6f302bc 100644
--- a/src/compiler/linkage.h
+++ b/src/compiler/linkage.h
@@ -11,7 +11,7 @@
 #include "src/frames.h"
 #include "src/machine-type.h"
 #include "src/runtime/runtime.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/live-range-separator.cc b/src/compiler/live-range-separator.cc
index e3cd0a3..db65593 100644
--- a/src/compiler/live-range-separator.cc
+++ b/src/compiler/live-range-separator.cc
@@ -58,6 +58,15 @@
   }
 }
 
+void SetSlotUse(TopLevelLiveRange *range) {
+  range->set_has_slot_use(false);
+  for (const UsePosition *pos = range->first_pos();
+       !range->has_slot_use() && pos != nullptr; pos = pos->next()) {
+    if (pos->type() == UsePositionType::kRequiresSlot) {
+      range->set_has_slot_use(true);
+    }
+  }
+}
 
 void SplinterLiveRange(TopLevelLiveRange *range, RegisterAllocationData *data) {
   const InstructionSequence *code = data->code();
@@ -99,7 +108,14 @@
   if (first_cut.IsValid()) {
     CreateSplinter(range, data, first_cut, last_cut);
   }
+
+  // Redo has_slot_use
+  if (range->has_slot_use() && range->splinter() != nullptr) {
+    SetSlotUse(range);
+    SetSlotUse(range->splinter());
+  }
 }
+
 }  // namespace
 
 
diff --git a/src/compiler/live-range-separator.h b/src/compiler/live-range-separator.h
index 57bc982..6aaf6b6 100644
--- a/src/compiler/live-range-separator.h
+++ b/src/compiler/live-range-separator.h
@@ -5,8 +5,7 @@
 #ifndef V8_LIVE_RANGE_SEPARATOR_H_
 #define V8_LIVE_RANGE_SEPARATOR_H_
 
-
-#include <src/zone.h>
+#include "src/zone/zone.h"
 namespace v8 {
 namespace internal {
 
diff --git a/src/compiler/liveness-analyzer.h b/src/compiler/liveness-analyzer.h
index 9b09724..8a3d715 100644
--- a/src/compiler/liveness-analyzer.h
+++ b/src/compiler/liveness-analyzer.h
@@ -7,7 +7,7 @@
 
 #include "src/bit-vector.h"
 #include "src/compiler/node.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/load-elimination.cc b/src/compiler/load-elimination.cc
index ad787f8..93c24a0 100644
--- a/src/compiler/load-elimination.cc
+++ b/src/compiler/load-elimination.cc
@@ -4,6 +4,7 @@
 
 #include "src/compiler/load-elimination.h"
 
+#include "src/compiler/common-operator.h"
 #include "src/compiler/js-graph.h"
 #include "src/compiler/node-properties.h"
 #include "src/compiler/simplified-operator.h"
@@ -21,28 +22,38 @@
   if (!NodeProperties::GetType(a)->Maybe(NodeProperties::GetType(b))) {
     return kNoAlias;
   }
-  if (b->opcode() == IrOpcode::kAllocate) {
-    switch (a->opcode()) {
-      case IrOpcode::kAllocate:
-      case IrOpcode::kHeapConstant:
-      case IrOpcode::kParameter:
-        return kNoAlias;
-      case IrOpcode::kFinishRegion:
-        return QueryAlias(a->InputAt(0), b);
-      default:
-        break;
+  switch (b->opcode()) {
+    case IrOpcode::kAllocate: {
+      switch (a->opcode()) {
+        case IrOpcode::kAllocate:
+        case IrOpcode::kHeapConstant:
+        case IrOpcode::kParameter:
+          return kNoAlias;
+        default:
+          break;
+      }
+      break;
     }
+    case IrOpcode::kFinishRegion:
+      return QueryAlias(a, b->InputAt(0));
+    default:
+      break;
   }
-  if (a->opcode() == IrOpcode::kAllocate) {
-    switch (b->opcode()) {
-      case IrOpcode::kHeapConstant:
-      case IrOpcode::kParameter:
-        return kNoAlias;
-      case IrOpcode::kFinishRegion:
-        return QueryAlias(a, b->InputAt(0));
-      default:
-        break;
+  switch (a->opcode()) {
+    case IrOpcode::kAllocate: {
+      switch (b->opcode()) {
+        case IrOpcode::kHeapConstant:
+        case IrOpcode::kParameter:
+          return kNoAlias;
+        default:
+          break;
+      }
+      break;
     }
+    case IrOpcode::kFinishRegion:
+      return QueryAlias(a->InputAt(0), b);
+    default:
+      break;
   }
   return kMayAlias;
 }
@@ -54,7 +65,35 @@
 }  // namespace
 
 Reduction LoadElimination::Reduce(Node* node) {
+  if (FLAG_trace_turbo_load_elimination) {
+    if (node->op()->EffectInputCount() > 0) {
+      PrintF(" visit #%d:%s", node->id(), node->op()->mnemonic());
+      if (node->op()->ValueInputCount() > 0) {
+        PrintF("(");
+        for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
+          if (i > 0) PrintF(", ");
+          Node* const value = NodeProperties::GetValueInput(node, i);
+          PrintF("#%d:%s", value->id(), value->op()->mnemonic());
+        }
+        PrintF(")");
+      }
+      PrintF("\n");
+      for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
+        Node* const effect = NodeProperties::GetEffectInput(node, i);
+        if (AbstractState const* const state = node_states_.Get(effect)) {
+          PrintF("  state[%i]: #%d:%s\n", i, effect->id(),
+                 effect->op()->mnemonic());
+          state->Print();
+        } else {
+          PrintF("  no state[%i]: #%d:%s\n", i, effect->id(),
+                 effect->op()->mnemonic());
+        }
+      }
+    }
+  }
   switch (node->opcode()) {
+    case IrOpcode::kArrayBufferWasNeutered:
+      return ReduceArrayBufferWasNeutered(node);
     case IrOpcode::kCheckMaps:
       return ReduceCheckMaps(node);
     case IrOpcode::kEnsureWritableFastElements:
@@ -85,6 +124,73 @@
   return NoChange();
 }
 
+namespace {
+
+bool IsCompatibleCheck(Node const* a, Node const* b) {
+  if (a->op() != b->op()) return false;
+  for (int i = a->op()->ValueInputCount(); --i >= 0;) {
+    if (!MustAlias(a->InputAt(i), b->InputAt(i))) return false;
+  }
+  return true;
+}
+
+}  // namespace
+
+Node* LoadElimination::AbstractChecks::Lookup(Node* node) const {
+  for (Node* const check : nodes_) {
+    if (check && IsCompatibleCheck(check, node)) {
+      return check;
+    }
+  }
+  return nullptr;
+}
+
+bool LoadElimination::AbstractChecks::Equals(AbstractChecks const* that) const {
+  if (this == that) return true;
+  for (size_t i = 0; i < arraysize(nodes_); ++i) {
+    if (Node* this_node = this->nodes_[i]) {
+      for (size_t j = 0;; ++j) {
+        if (j == arraysize(nodes_)) return false;
+        if (that->nodes_[j] == this_node) break;
+      }
+    }
+  }
+  for (size_t i = 0; i < arraysize(nodes_); ++i) {
+    if (Node* that_node = that->nodes_[i]) {
+      for (size_t j = 0;; ++j) {
+        if (j == arraysize(nodes_)) return false;
+        if (this->nodes_[j] == that_node) break;
+      }
+    }
+  }
+  return true;
+}
+
+LoadElimination::AbstractChecks const* LoadElimination::AbstractChecks::Merge(
+    AbstractChecks const* that, Zone* zone) const {
+  if (this->Equals(that)) return this;
+  AbstractChecks* copy = new (zone) AbstractChecks(zone);
+  for (Node* const this_node : this->nodes_) {
+    if (this_node == nullptr) continue;
+    for (Node* const that_node : that->nodes_) {
+      if (this_node == that_node) {
+        copy->nodes_[copy->next_index_++] = this_node;
+        break;
+      }
+    }
+  }
+  copy->next_index_ %= arraysize(nodes_);
+  return copy;
+}
+
+void LoadElimination::AbstractChecks::Print() const {
+  for (Node* const node : nodes_) {
+    if (node != nullptr) {
+      PrintF("    #%d:%s\n", node->id(), node->op()->mnemonic());
+    }
+  }
+}
+
 Node* LoadElimination::AbstractElements::Lookup(Node* object,
                                                 Node* index) const {
   for (Element const element : elements_) {
@@ -110,7 +216,8 @@
         DCHECK_NOT_NULL(element.index);
         DCHECK_NOT_NULL(element.value);
         if (!MayAlias(object, element.object) ||
-            !MayAlias(index, element.index)) {
+            !NodeProperties::GetType(index)->Maybe(
+                NodeProperties::GetType(element.index))) {
           that->elements_[that->next_index_++] = element;
         }
       }
@@ -165,6 +272,7 @@
           this_element.index == that_element.index &&
           this_element.value == that_element.value) {
         copy->elements_[copy->next_index_++] = this_element;
+        break;
       }
     }
   }
@@ -172,6 +280,17 @@
   return copy;
 }
 
+void LoadElimination::AbstractElements::Print() const {
+  for (Element const& element : elements_) {
+    if (element.object) {
+      PrintF("    #%d:%s @ #%d:%s -> #%d:%s\n", element.object->id(),
+             element.object->op()->mnemonic(), element.index->id(),
+             element.index->op()->mnemonic(), element.value->id(),
+             element.value->op()->mnemonic());
+    }
+  }
+}
+
 Node* LoadElimination::AbstractField::Lookup(Node* object) const {
   for (auto pair : info_for_node_) {
     if (MustAlias(object, pair.first)) return pair.second;
@@ -193,7 +312,22 @@
   return this;
 }
 
+void LoadElimination::AbstractField::Print() const {
+  for (auto pair : info_for_node_) {
+    PrintF("    #%d:%s -> #%d:%s\n", pair.first->id(),
+           pair.first->op()->mnemonic(), pair.second->id(),
+           pair.second->op()->mnemonic());
+  }
+}
+
 bool LoadElimination::AbstractState::Equals(AbstractState const* that) const {
+  if (this->checks_) {
+    if (!that->checks_ || !that->checks_->Equals(this->checks_)) {
+      return false;
+    }
+  } else if (that->checks_) {
+    return false;
+  }
   if (this->elements_) {
     if (!that->elements_ || !that->elements_->Equals(this->elements_)) {
       return false;
@@ -215,13 +349,17 @@
 
 void LoadElimination::AbstractState::Merge(AbstractState const* that,
                                            Zone* zone) {
+  // Merge the information we have about the checks.
+  if (this->checks_) {
+    this->checks_ =
+        that->checks_ ? that->checks_->Merge(this->checks_, zone) : nullptr;
+  }
+
   // Merge the information we have about the elements.
   if (this->elements_) {
     this->elements_ = that->elements_
                           ? that->elements_->Merge(this->elements_, zone)
-                          : that->elements_;
-  } else {
-    this->elements_ = that->elements_;
+                          : nullptr;
   }
 
   // Merge the information we have about the fields.
@@ -236,6 +374,21 @@
   }
 }
 
+Node* LoadElimination::AbstractState::LookupCheck(Node* node) const {
+  return this->checks_ ? this->checks_->Lookup(node) : nullptr;
+}
+
+LoadElimination::AbstractState const* LoadElimination::AbstractState::AddCheck(
+    Node* node, Zone* zone) const {
+  AbstractState* that = new (zone) AbstractState(*this);
+  if (that->checks_) {
+    that->checks_ = that->checks_->Extend(node, zone);
+  } else {
+    that->checks_ = new (zone) AbstractChecks(node, zone);
+  }
+  return that;
+}
+
 Node* LoadElimination::AbstractState::LookupElement(Node* object,
                                                     Node* index) const {
   if (this->elements_) {
@@ -303,6 +456,23 @@
   return nullptr;
 }
 
+void LoadElimination::AbstractState::Print() const {
+  if (checks_) {
+    PrintF("   checks:\n");
+    checks_->Print();
+  }
+  if (elements_) {
+    PrintF("   elements:\n");
+    elements_->Print();
+  }
+  for (size_t i = 0; i < arraysize(fields_); ++i) {
+    if (AbstractField const* const field = fields_[i]) {
+      PrintF("   field %zu:\n", i);
+      field->Print();
+    }
+  }
+}
+
 LoadElimination::AbstractState const*
 LoadElimination::AbstractStateForEffectNodes::Get(Node* node) const {
   size_t const id = node->id();
@@ -317,13 +487,26 @@
   info_for_node_[id] = state;
 }
 
+Reduction LoadElimination::ReduceArrayBufferWasNeutered(Node* node) {
+  Node* const effect = NodeProperties::GetEffectInput(node);
+  AbstractState const* state = node_states_.Get(effect);
+  if (state == nullptr) return NoChange();
+  if (Node* const check = state->LookupCheck(node)) {
+    ReplaceWithValue(node, check, effect);
+    return Replace(check);
+  }
+  state = state->AddCheck(node, zone());
+  return UpdateState(node, state);
+}
+
 Reduction LoadElimination::ReduceCheckMaps(Node* node) {
   Node* const object = NodeProperties::GetValueInput(node, 0);
   Node* const effect = NodeProperties::GetEffectInput(node);
   AbstractState const* state = node_states_.Get(effect);
   if (state == nullptr) return NoChange();
   int const map_input_count = node->op()->ValueInputCount() - 1;
-  if (Node* const object_map = state->LookupField(object, 0)) {
+  if (Node* const object_map =
+          state->LookupField(object, FieldIndexOf(HeapObject::kMapOffset))) {
     for (int i = 0; i < map_input_count; ++i) {
       Node* map = NodeProperties::GetValueInput(node, 1 + i);
       if (map == object_map) return Replace(effect);
@@ -331,7 +514,8 @@
   }
   if (map_input_count == 1) {
     Node* const map0 = NodeProperties::GetValueInput(node, 1);
-    state = state->AddField(object, 0, map0, zone());
+    state = state->AddField(object, FieldIndexOf(HeapObject::kMapOffset), map0,
+                            zone());
   }
   return UpdateState(node, state);
 }
@@ -343,7 +527,8 @@
   AbstractState const* state = node_states_.Get(effect);
   if (state == nullptr) return NoChange();
   Node* fixed_array_map = jsgraph()->FixedArrayMapConstant();
-  if (Node* const elements_map = state->LookupField(elements, 0)) {
+  if (Node* const elements_map =
+          state->LookupField(elements, FieldIndexOf(HeapObject::kMapOffset))) {
     // Check if the {elements} already have the fixed array map.
     if (elements_map == fixed_array_map) {
       ReplaceWithValue(node, elements, effect);
@@ -351,11 +536,14 @@
     }
   }
   // We know that the resulting elements have the fixed array map.
-  state = state->AddField(node, 0, fixed_array_map, zone());
+  state = state->AddField(node, FieldIndexOf(HeapObject::kMapOffset),
+                          fixed_array_map, zone());
   // Kill the previous elements on {object}.
-  state = state->KillField(object, 2, zone());
+  state =
+      state->KillField(object, FieldIndexOf(JSObject::kElementsOffset), zone());
   // Add the new elements on {object}.
-  state = state->AddField(object, 2, node, zone());
+  state = state->AddField(object, FieldIndexOf(JSObject::kElementsOffset), node,
+                          zone());
   return UpdateState(node, state);
 }
 
@@ -368,20 +556,25 @@
   if (flags & GrowFastElementsFlag::kDoubleElements) {
     // We know that the resulting elements have the fixed double array map.
     Node* fixed_double_array_map = jsgraph()->FixedDoubleArrayMapConstant();
-    state = state->AddField(node, 0, fixed_double_array_map, zone());
+    state = state->AddField(node, FieldIndexOf(HeapObject::kMapOffset),
+                            fixed_double_array_map, zone());
   } else {
     // We know that the resulting elements have the fixed array map.
     Node* fixed_array_map = jsgraph()->FixedArrayMapConstant();
-    state = state->AddField(node, 0, fixed_array_map, zone());
+    state = state->AddField(node, FieldIndexOf(HeapObject::kMapOffset),
+                            fixed_array_map, zone());
   }
   if (flags & GrowFastElementsFlag::kArrayObject) {
     // Kill the previous Array::length on {object}.
-    state = state->KillField(object, 3, zone());
+    state =
+        state->KillField(object, FieldIndexOf(JSArray::kLengthOffset), zone());
   }
   // Kill the previous elements on {object}.
-  state = state->KillField(object, 2, zone());
+  state =
+      state->KillField(object, FieldIndexOf(JSObject::kElementsOffset), zone());
   // Add the new elements on {object}.
-  state = state->AddField(object, 2, node, zone());
+  state = state->AddField(object, FieldIndexOf(JSObject::kElementsOffset), node,
+                          zone());
   return UpdateState(node, state);
 }
 
@@ -392,18 +585,22 @@
   Node* const effect = NodeProperties::GetEffectInput(node);
   AbstractState const* state = node_states_.Get(effect);
   if (state == nullptr) return NoChange();
-  if (Node* const object_map = state->LookupField(object, 0)) {
+  if (Node* const object_map =
+          state->LookupField(object, FieldIndexOf(HeapObject::kMapOffset))) {
     if (target_map == object_map) {
       // The {object} already has the {target_map}, so this TransitionElements
       // {node} is fully redundant (independent of what {source_map} is).
       return Replace(effect);
     }
-    state = state->KillField(object, 0, zone());
+    state =
+        state->KillField(object, FieldIndexOf(HeapObject::kMapOffset), zone());
     if (source_map == object_map) {
-      state = state->AddField(object, 0, target_map, zone());
+      state = state->AddField(object, FieldIndexOf(HeapObject::kMapOffset),
+                              target_map, zone());
     }
   } else {
-    state = state->KillField(object, 0, zone());
+    state =
+        state->KillField(object, FieldIndexOf(HeapObject::kMapOffset), zone());
   }
   ElementsTransition transition = ElementsTransitionOf(node->op());
   switch (transition) {
@@ -411,7 +608,8 @@
       break;
     case ElementsTransition::kSlowTransition:
       // Kill the elements as well.
-      state = state->KillField(object, 2, zone());
+      state = state->KillField(object, FieldIndexOf(JSObject::kElementsOffset),
+                               zone());
       break;
   }
   return UpdateState(node, state);
@@ -421,16 +619,21 @@
   FieldAccess const& access = FieldAccessOf(node->op());
   Node* const object = NodeProperties::GetValueInput(node, 0);
   Node* const effect = NodeProperties::GetEffectInput(node);
+  Node* const control = NodeProperties::GetControlInput(node);
   AbstractState const* state = node_states_.Get(effect);
   if (state == nullptr) return NoChange();
   int field_index = FieldIndexOf(access);
   if (field_index >= 0) {
-    if (Node* const replacement = state->LookupField(object, field_index)) {
-      // Make sure the {replacement} has at least as good type
-      // as the original {node}.
-      if (!replacement->IsDead() &&
-          NodeProperties::GetType(replacement)
-              ->Is(NodeProperties::GetType(node))) {
+    if (Node* replacement = state->LookupField(object, field_index)) {
+      // Make sure we don't resurrect dead {replacement} nodes.
+      if (!replacement->IsDead()) {
+        // We might need to guard the {replacement} if the type of the
+        // {node} is more precise than the type of the {replacement}.
+        Type* const node_type = NodeProperties::GetType(node);
+        if (!NodeProperties::GetType(replacement)->Is(node_type)) {
+          replacement = graph()->NewNode(common()->TypeGuard(node_type),
+                                         replacement, control);
+        }
         ReplaceWithValue(node, replacement, effect);
         return Replace(replacement);
       }
@@ -468,14 +671,19 @@
   Node* const object = NodeProperties::GetValueInput(node, 0);
   Node* const index = NodeProperties::GetValueInput(node, 1);
   Node* const effect = NodeProperties::GetEffectInput(node);
+  Node* const control = NodeProperties::GetControlInput(node);
   AbstractState const* state = node_states_.Get(effect);
   if (state == nullptr) return NoChange();
-  if (Node* const replacement = state->LookupElement(object, index)) {
-    // Make sure the {replacement} has at least as good type
-    // as the original {node}.
-    if (!replacement->IsDead() &&
-        NodeProperties::GetType(replacement)
-            ->Is(NodeProperties::GetType(node))) {
+  if (Node* replacement = state->LookupElement(object, index)) {
+    // Make sure we don't resurrect dead {replacement} nodes.
+    if (!replacement->IsDead()) {
+      // We might need to guard the {replacement} if the type of the
+      // {node} is more precise than the type of the {replacement}.
+      Type* const node_type = NodeProperties::GetType(node);
+      if (!NodeProperties::GetType(replacement)->Is(node_type)) {
+        replacement = graph()->NewNode(common()->TypeGuard(node_type),
+                                       replacement, control);
+      }
       ReplaceWithValue(node, replacement, effect);
       return Replace(replacement);
     }
@@ -620,23 +828,28 @@
         switch (current->opcode()) {
           case IrOpcode::kEnsureWritableFastElements: {
             Node* const object = NodeProperties::GetValueInput(current, 0);
-            state = state->KillField(object, 2, zone());
+            state = state->KillField(
+                object, FieldIndexOf(JSObject::kElementsOffset), zone());
             break;
           }
           case IrOpcode::kMaybeGrowFastElements: {
             GrowFastElementsFlags flags =
                 GrowFastElementsFlagsOf(current->op());
             Node* const object = NodeProperties::GetValueInput(current, 0);
-            state = state->KillField(object, 2, zone());
+            state = state->KillField(
+                object, FieldIndexOf(JSObject::kElementsOffset), zone());
             if (flags & GrowFastElementsFlag::kArrayObject) {
-              state = state->KillField(object, 3, zone());
+              state = state->KillField(
+                  object, FieldIndexOf(JSArray::kLengthOffset), zone());
             }
             break;
           }
           case IrOpcode::kTransitionElementsKind: {
             Node* const object = NodeProperties::GetValueInput(current, 0);
-            state = state->KillField(object, 0, zone());
-            state = state->KillField(object, 2, zone());
+            state = state->KillField(
+                object, FieldIndexOf(HeapObject::kMapOffset), zone());
+            state = state->KillField(
+                object, FieldIndexOf(JSObject::kElementsOffset), zone());
             break;
           }
           case IrOpcode::kStoreField: {
@@ -671,6 +884,14 @@
 }
 
 // static
+int LoadElimination::FieldIndexOf(int offset) {
+  DCHECK_EQ(0, offset % kPointerSize);
+  int field_index = offset / kPointerSize;
+  if (field_index >= static_cast<int>(kMaxTrackedFields)) return -1;
+  return field_index;
+}
+
+// static
 int LoadElimination::FieldIndexOf(FieldAccess const& access) {
   MachineRepresentation rep = access.machine_type.representation();
   switch (rep) {
@@ -699,12 +920,15 @@
       break;
   }
   DCHECK_EQ(kTaggedBase, access.base_is_tagged);
-  DCHECK_EQ(0, access.offset % kPointerSize);
-  int field_index = access.offset / kPointerSize;
-  if (field_index >= static_cast<int>(kMaxTrackedFields)) return -1;
-  return field_index;
+  return FieldIndexOf(access.offset);
 }
 
+CommonOperatorBuilder* LoadElimination::common() const {
+  return jsgraph()->common();
+}
+
+Graph* LoadElimination::graph() const { return jsgraph()->graph(); }
+
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8
diff --git a/src/compiler/load-elimination.h b/src/compiler/load-elimination.h
index 2a4ee40..985e690 100644
--- a/src/compiler/load-elimination.h
+++ b/src/compiler/load-elimination.h
@@ -12,7 +12,9 @@
 namespace compiler {
 
 // Foward declarations.
+class CommonOperatorBuilder;
 struct FieldAccess;
+class Graph;
 class JSGraph;
 
 class LoadElimination final : public AdvancedReducer {
@@ -24,6 +26,39 @@
   Reduction Reduce(Node* node) final;
 
  private:
+  static const size_t kMaxTrackedChecks = 8;
+
+  // Abstract state to approximate the current state of checks that are
+  // only invalidated by calls, i.e. array buffer neutering checks, along
+  // the effect paths through the graph.
+  class AbstractChecks final : public ZoneObject {
+   public:
+    explicit AbstractChecks(Zone* zone) {
+      for (size_t i = 0; i < arraysize(nodes_); ++i) {
+        nodes_[i] = nullptr;
+      }
+    }
+    AbstractChecks(Node* node, Zone* zone) : AbstractChecks(zone) {
+      nodes_[next_index_++] = node;
+    }
+
+    AbstractChecks const* Extend(Node* node, Zone* zone) const {
+      AbstractChecks* that = new (zone) AbstractChecks(*this);
+      that->nodes_[that->next_index_] = node;
+      that->next_index_ = (that->next_index_ + 1) % arraysize(nodes_);
+      return that;
+    }
+    Node* Lookup(Node* node) const;
+    bool Equals(AbstractChecks const* that) const;
+    AbstractChecks const* Merge(AbstractChecks const* that, Zone* zone) const;
+
+    void Print() const;
+
+   private:
+    Node* nodes_[kMaxTrackedChecks];
+    size_t next_index_ = 0;
+  };
+
   static const size_t kMaxTrackedElements = 8;
 
   // Abstract state to approximate the current state of an element along the
@@ -53,6 +88,8 @@
     AbstractElements const* Merge(AbstractElements const* that,
                                   Zone* zone) const;
 
+    void Print() const;
+
    private:
     struct Element {
       Element() {}
@@ -104,6 +141,8 @@
       return copy;
     }
 
+    void Print() const;
+
    private:
     ZoneMap<Node*, Node*> info_for_node_;
   };
@@ -133,7 +172,13 @@
                                      Zone* zone) const;
     Node* LookupElement(Node* object, Node* index) const;
 
+    AbstractState const* AddCheck(Node* node, Zone* zone) const;
+    Node* LookupCheck(Node* node) const;
+
+    void Print() const;
+
    private:
+    AbstractChecks const* checks_ = nullptr;
     AbstractElements const* elements_ = nullptr;
     AbstractField const* fields_[kMaxTrackedFields];
   };
@@ -150,6 +195,7 @@
     ZoneVector<AbstractState const*> info_for_node_;
   };
 
+  Reduction ReduceArrayBufferWasNeutered(Node* node);
   Reduction ReduceCheckMaps(Node* node);
   Reduction ReduceEnsureWritableFastElements(Node* node);
   Reduction ReduceMaybeGrowFastElements(Node* node);
@@ -168,9 +214,12 @@
   AbstractState const* ComputeLoopState(Node* node,
                                         AbstractState const* state) const;
 
+  static int FieldIndexOf(int offset);
   static int FieldIndexOf(FieldAccess const& access);
 
+  CommonOperatorBuilder* common() const;
   AbstractState const* empty_state() const { return &empty_state_; }
+  Graph* graph() const;
   JSGraph* jsgraph() const { return jsgraph_; }
   Zone* zone() const { return node_states_.zone(); }
 
diff --git a/src/compiler/loop-analysis.cc b/src/compiler/loop-analysis.cc
index 2a81aee..f3a7933 100644
--- a/src/compiler/loop-analysis.cc
+++ b/src/compiler/loop-analysis.cc
@@ -5,10 +5,10 @@
 #include "src/compiler/loop-analysis.h"
 
 #include "src/compiler/graph.h"
-#include "src/compiler/node.h"
 #include "src/compiler/node-marker.h"
 #include "src/compiler/node-properties.h"
-#include "src/zone.h"
+#include "src/compiler/node.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/loop-analysis.h b/src/compiler/loop-analysis.h
index a8c3bca..2d0f27b 100644
--- a/src/compiler/loop-analysis.h
+++ b/src/compiler/loop-analysis.h
@@ -8,7 +8,7 @@
 #include "src/base/iterator.h"
 #include "src/compiler/graph.h"
 #include "src/compiler/node.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/loop-peeling.cc b/src/compiler/loop-peeling.cc
index 9535df5..5f8857c 100644
--- a/src/compiler/loop-peeling.cc
+++ b/src/compiler/loop-peeling.cc
@@ -2,13 +2,13 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
+#include "src/compiler/loop-peeling.h"
 #include "src/compiler/common-operator.h"
 #include "src/compiler/graph.h"
-#include "src/compiler/loop-peeling.h"
-#include "src/compiler/node.h"
 #include "src/compiler/node-marker.h"
 #include "src/compiler/node-properties.h"
-#include "src/zone.h"
+#include "src/compiler/node.h"
+#include "src/zone/zone.h"
 
 // Loop peeling is an optimization that copies the body of a loop, creating
 // a new copy of the body called the "peeled iteration" that represents the
diff --git a/src/compiler/loop-variable-optimizer.cc b/src/compiler/loop-variable-optimizer.cc
index 8331963..55cce26 100644
--- a/src/compiler/loop-variable-optimizer.cc
+++ b/src/compiler/loop-variable-optimizer.cc
@@ -9,8 +9,8 @@
 #include "src/compiler/node-marker.h"
 #include "src/compiler/node-properties.h"
 #include "src/compiler/node.h"
-#include "src/zone-containers.h"
-#include "src/zone.h"
+#include "src/zone/zone-containers.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -28,7 +28,7 @@
     : graph_(graph),
       common_(common),
       zone_(zone),
-      limits_(zone),
+      limits_(graph->NodeCount(), zone),
       induction_vars_(zone) {}
 
 void LoopVariableOptimizer::Run() {
@@ -40,14 +40,13 @@
     queue.pop();
     queued.Set(node, false);
 
-    DCHECK(limits_.find(node->id()) == limits_.end());
+    DCHECK_NULL(limits_[node->id()]);
     bool all_inputs_visited = true;
     int inputs_end = (node->opcode() == IrOpcode::kLoop)
                          ? kFirstBackedge
                          : node->op()->ControlInputCount();
     for (int i = 0; i < inputs_end; i++) {
-      auto input = limits_.find(NodeProperties::GetControlInput(node, i)->id());
-      if (input == limits_.end()) {
+      if (limits_[NodeProperties::GetControlInput(node, i)->id()] == nullptr) {
         all_inputs_visited = false;
         break;
       }
@@ -55,7 +54,7 @@
     if (!all_inputs_visited) continue;
 
     VisitNode(node);
-    DCHECK(limits_.find(node->id()) != limits_.end());
+    DCHECK_NOT_NULL(limits_[node->id()]);
 
     // Queue control outputs.
     for (Edge edge : node->use_edges()) {
diff --git a/src/compiler/loop-variable-optimizer.h b/src/compiler/loop-variable-optimizer.h
index a5c1ad4..8054ec1 100644
--- a/src/compiler/loop-variable-optimizer.h
+++ b/src/compiler/loop-variable-optimizer.h
@@ -5,7 +5,7 @@
 #ifndef V8_COMPILER_LOOP_VARIABLE_OPTIMIZER_H_
 #define V8_COMPILER_LOOP_VARIABLE_OPTIMIZER_H_
 
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -106,7 +106,7 @@
   Graph* graph_;
   CommonOperatorBuilder* common_;
   Zone* zone_;
-  ZoneMap<int, const VariableLimits*> limits_;
+  ZoneVector<const VariableLimits*> limits_;
   ZoneMap<int, InductionVariable*> induction_vars_;
 };
 
diff --git a/src/compiler/machine-graph-verifier.cc b/src/compiler/machine-graph-verifier.cc
new file mode 100644
index 0000000..d33ee4e
--- /dev/null
+++ b/src/compiler/machine-graph-verifier.cc
@@ -0,0 +1,667 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler/machine-graph-verifier.h"
+
+#include "src/compiler/common-operator.h"
+#include "src/compiler/graph.h"
+#include "src/compiler/linkage.h"
+#include "src/compiler/machine-operator.h"
+#include "src/compiler/node-properties.h"
+#include "src/compiler/node.h"
+#include "src/compiler/schedule.h"
+#include "src/zone/zone.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+namespace {
+
+class MachineRepresentationInferrer {
+ public:
+  MachineRepresentationInferrer(Schedule const* schedule, Graph const* graph,
+                                Linkage* linkage, Zone* zone)
+      : schedule_(schedule),
+        linkage_(linkage),
+        representation_vector_(graph->NodeCount(), zone) {
+    Run();
+  }
+
+  MachineRepresentation GetRepresentation(Node const* node) const {
+    return representation_vector_.at(node->id());
+  }
+
+ private:
+  MachineRepresentation GetProjectionType(Node const* projection) {
+    size_t index = ProjectionIndexOf(projection->op());
+    Node* input = projection->InputAt(0);
+    switch (input->opcode()) {
+      case IrOpcode::kInt32AddWithOverflow:
+      case IrOpcode::kInt32SubWithOverflow:
+      case IrOpcode::kInt32MulWithOverflow:
+        CHECK_LE(index, static_cast<size_t>(1));
+        return index == 0 ? MachineRepresentation::kWord32
+                          : MachineRepresentation::kBit;
+      case IrOpcode::kInt64AddWithOverflow:
+      case IrOpcode::kInt64SubWithOverflow:
+        CHECK_LE(index, static_cast<size_t>(1));
+        return index == 0 ? MachineRepresentation::kWord64
+                          : MachineRepresentation::kBit;
+      case IrOpcode::kTryTruncateFloat32ToInt64:
+      case IrOpcode::kTryTruncateFloat64ToInt64:
+      case IrOpcode::kTryTruncateFloat32ToUint64:
+      case IrOpcode::kTryTruncateFloat64ToUint64:
+        CHECK_LE(index, static_cast<size_t>(1));
+        return index == 0 ? MachineRepresentation::kWord64
+                          : MachineRepresentation::kBit;
+      case IrOpcode::kCall: {
+        CallDescriptor const* desc = CallDescriptorOf(input->op());
+        return desc->GetReturnType(index).representation();
+      }
+      default:
+        return MachineRepresentation::kNone;
+    }
+  }
+
+  void Run() {
+    auto blocks = schedule_->all_blocks();
+    for (BasicBlock* block : *blocks) {
+      for (size_t i = 0; i <= block->NodeCount(); ++i) {
+        Node const* node =
+            i < block->NodeCount() ? block->NodeAt(i) : block->control_input();
+        if (node == nullptr) {
+          DCHECK_EQ(block->NodeCount(), i);
+          break;
+        }
+        switch (node->opcode()) {
+          case IrOpcode::kParameter:
+            representation_vector_[node->id()] =
+                linkage_->GetParameterType(ParameterIndexOf(node->op()))
+                    .representation();
+            break;
+          case IrOpcode::kProjection: {
+            representation_vector_[node->id()] = GetProjectionType(node);
+          } break;
+          case IrOpcode::kTypedStateValues:
+            representation_vector_[node->id()] = MachineRepresentation::kNone;
+            break;
+          case IrOpcode::kAtomicLoad:
+          case IrOpcode::kLoad:
+          case IrOpcode::kProtectedLoad:
+            representation_vector_[node->id()] =
+                LoadRepresentationOf(node->op()).representation();
+            break;
+          case IrOpcode::kCheckedLoad:
+            representation_vector_[node->id()] =
+                CheckedLoadRepresentationOf(node->op()).representation();
+            break;
+          case IrOpcode::kLoadStackPointer:
+          case IrOpcode::kLoadFramePointer:
+          case IrOpcode::kLoadParentFramePointer:
+            representation_vector_[node->id()] =
+                MachineType::PointerRepresentation();
+            break;
+          case IrOpcode::kPhi:
+            representation_vector_[node->id()] =
+                PhiRepresentationOf(node->op());
+            break;
+          case IrOpcode::kCall: {
+            CallDescriptor const* desc = CallDescriptorOf(node->op());
+            if (desc->ReturnCount() > 0) {
+              representation_vector_[node->id()] =
+                  desc->GetReturnType(0).representation();
+            } else {
+              representation_vector_[node->id()] =
+                  MachineRepresentation::kTagged;
+            }
+            break;
+          }
+          case IrOpcode::kUnalignedLoad:
+            representation_vector_[node->id()] =
+                UnalignedLoadRepresentationOf(node->op()).representation();
+            break;
+          case IrOpcode::kHeapConstant:
+          case IrOpcode::kNumberConstant:
+          case IrOpcode::kChangeBitToTagged:
+          case IrOpcode::kIfException:
+          case IrOpcode::kOsrValue:
+          case IrOpcode::kChangeInt32ToTagged:
+          case IrOpcode::kChangeUint32ToTagged:
+          case IrOpcode::kBitcastWordToTagged:
+            representation_vector_[node->id()] = MachineRepresentation::kTagged;
+            break;
+          case IrOpcode::kExternalConstant:
+            representation_vector_[node->id()] =
+                MachineType::PointerRepresentation();
+            break;
+          case IrOpcode::kBitcastTaggedToWord:
+            representation_vector_[node->id()] =
+                MachineType::PointerRepresentation();
+            break;
+          case IrOpcode::kBitcastWordToTaggedSigned:
+            representation_vector_[node->id()] =
+                MachineRepresentation::kTaggedSigned;
+            break;
+          case IrOpcode::kWord32Equal:
+          case IrOpcode::kInt32LessThan:
+          case IrOpcode::kInt32LessThanOrEqual:
+          case IrOpcode::kUint32LessThan:
+          case IrOpcode::kUint32LessThanOrEqual:
+          case IrOpcode::kWord64Equal:
+          case IrOpcode::kInt64LessThan:
+          case IrOpcode::kInt64LessThanOrEqual:
+          case IrOpcode::kUint64LessThan:
+          case IrOpcode::kUint64LessThanOrEqual:
+          case IrOpcode::kFloat32Equal:
+          case IrOpcode::kFloat32LessThan:
+          case IrOpcode::kFloat32LessThanOrEqual:
+          case IrOpcode::kFloat64Equal:
+          case IrOpcode::kFloat64LessThan:
+          case IrOpcode::kFloat64LessThanOrEqual:
+          case IrOpcode::kChangeTaggedToBit:
+            representation_vector_[node->id()] = MachineRepresentation::kBit;
+            break;
+#define LABEL(opcode) case IrOpcode::k##opcode:
+          case IrOpcode::kTruncateInt64ToInt32:
+          case IrOpcode::kTruncateFloat32ToInt32:
+          case IrOpcode::kTruncateFloat32ToUint32:
+          case IrOpcode::kBitcastFloat32ToInt32:
+          case IrOpcode::kInt32x4ExtractLane:
+          case IrOpcode::kInt32Constant:
+          case IrOpcode::kRelocatableInt32Constant:
+          case IrOpcode::kTruncateFloat64ToWord32:
+          case IrOpcode::kTruncateFloat64ToUint32:
+          case IrOpcode::kChangeFloat64ToInt32:
+          case IrOpcode::kChangeFloat64ToUint32:
+          case IrOpcode::kRoundFloat64ToInt32:
+          case IrOpcode::kFloat64ExtractLowWord32:
+          case IrOpcode::kFloat64ExtractHighWord32:
+            MACHINE_UNOP_32_LIST(LABEL)
+            MACHINE_BINOP_32_LIST(LABEL) {
+              representation_vector_[node->id()] =
+                  MachineRepresentation::kWord32;
+            }
+            break;
+          case IrOpcode::kChangeInt32ToInt64:
+          case IrOpcode::kChangeUint32ToUint64:
+          case IrOpcode::kInt64Constant:
+          case IrOpcode::kRelocatableInt64Constant:
+          case IrOpcode::kBitcastFloat64ToInt64:
+            MACHINE_BINOP_64_LIST(LABEL) {
+              representation_vector_[node->id()] =
+                  MachineRepresentation::kWord64;
+            }
+            break;
+          case IrOpcode::kRoundInt32ToFloat32:
+          case IrOpcode::kRoundUint32ToFloat32:
+          case IrOpcode::kRoundInt64ToFloat32:
+          case IrOpcode::kRoundUint64ToFloat32:
+          case IrOpcode::kFloat32Constant:
+          case IrOpcode::kTruncateFloat64ToFloat32:
+            MACHINE_FLOAT32_BINOP_LIST(LABEL)
+            MACHINE_FLOAT32_UNOP_LIST(LABEL) {
+              representation_vector_[node->id()] =
+                  MachineRepresentation::kFloat32;
+            }
+            break;
+          case IrOpcode::kRoundInt64ToFloat64:
+          case IrOpcode::kRoundUint64ToFloat64:
+          case IrOpcode::kChangeFloat32ToFloat64:
+          case IrOpcode::kChangeInt32ToFloat64:
+          case IrOpcode::kChangeUint32ToFloat64:
+          case IrOpcode::kFloat64Constant:
+          case IrOpcode::kFloat64SilenceNaN:
+            MACHINE_FLOAT64_BINOP_LIST(LABEL)
+            MACHINE_FLOAT64_UNOP_LIST(LABEL) {
+              representation_vector_[node->id()] =
+                  MachineRepresentation::kFloat64;
+            }
+            break;
+#undef LABEL
+          default:
+            break;
+        }
+      }
+    }
+  }
+
+  Schedule const* const schedule_;
+  Linkage const* const linkage_;
+  ZoneVector<MachineRepresentation> representation_vector_;
+};
+
+class MachineRepresentationChecker {
+ public:
+  MachineRepresentationChecker(Schedule const* const schedule,
+                               MachineRepresentationInferrer const* const typer)
+      : schedule_(schedule), typer_(typer) {}
+
+  void Run() {
+    BasicBlockVector const* blocks = schedule_->all_blocks();
+    for (BasicBlock* block : *blocks) {
+      for (size_t i = 0; i <= block->NodeCount(); ++i) {
+        Node const* node =
+            i < block->NodeCount() ? block->NodeAt(i) : block->control_input();
+        if (node == nullptr) {
+          DCHECK_EQ(block->NodeCount(), i);
+          break;
+        }
+        switch (node->opcode()) {
+          case IrOpcode::kCall:
+          case IrOpcode::kTailCall:
+            CheckCallInputs(node);
+            break;
+          case IrOpcode::kChangeBitToTagged:
+            CHECK_EQ(MachineRepresentation::kBit,
+                     typer_->GetRepresentation(node->InputAt(0)));
+            break;
+          case IrOpcode::kChangeTaggedToBit:
+            CHECK_EQ(MachineRepresentation::kTagged,
+                     typer_->GetRepresentation(node->InputAt(0)));
+            break;
+          case IrOpcode::kRoundInt64ToFloat64:
+          case IrOpcode::kRoundUint64ToFloat64:
+          case IrOpcode::kRoundInt64ToFloat32:
+          case IrOpcode::kRoundUint64ToFloat32:
+          case IrOpcode::kTruncateInt64ToInt32:
+            CheckValueInputForInt64Op(node, 0);
+            break;
+          case IrOpcode::kBitcastWordToTagged:
+          case IrOpcode::kBitcastWordToTaggedSigned:
+            CheckValueInputRepresentationIs(
+                node, 0, MachineType::PointerRepresentation());
+            break;
+          case IrOpcode::kBitcastTaggedToWord:
+            CheckValueInputIsTagged(node, 0);
+            break;
+          case IrOpcode::kTruncateFloat64ToWord32:
+          case IrOpcode::kTruncateFloat64ToUint32:
+          case IrOpcode::kTruncateFloat64ToFloat32:
+          case IrOpcode::kChangeFloat64ToInt32:
+          case IrOpcode::kChangeFloat64ToUint32:
+          case IrOpcode::kRoundFloat64ToInt32:
+          case IrOpcode::kFloat64ExtractLowWord32:
+          case IrOpcode::kFloat64ExtractHighWord32:
+          case IrOpcode::kBitcastFloat64ToInt64:
+            CheckValueInputForFloat64Op(node, 0);
+            break;
+          case IrOpcode::kWord64Equal:
+            CheckValueInputIsTaggedOrPointer(node, 0);
+            CheckValueInputRepresentationIs(
+                node, 1, typer_->GetRepresentation(node->InputAt(0)));
+            break;
+          case IrOpcode::kInt64LessThan:
+          case IrOpcode::kInt64LessThanOrEqual:
+          case IrOpcode::kUint64LessThan:
+          case IrOpcode::kUint64LessThanOrEqual:
+            CheckValueInputForInt64Op(node, 0);
+            CheckValueInputForInt64Op(node, 1);
+            break;
+          case IrOpcode::kInt32x4ExtractLane:
+            CheckValueInputRepresentationIs(node, 0,
+                                            MachineRepresentation::kSimd128);
+            break;
+#define LABEL(opcode) case IrOpcode::k##opcode:
+          case IrOpcode::kChangeInt32ToTagged:
+          case IrOpcode::kChangeUint32ToTagged:
+          case IrOpcode::kChangeInt32ToFloat64:
+          case IrOpcode::kChangeUint32ToFloat64:
+          case IrOpcode::kRoundInt32ToFloat32:
+          case IrOpcode::kRoundUint32ToFloat32:
+          case IrOpcode::kChangeInt32ToInt64:
+          case IrOpcode::kChangeUint32ToUint64:
+            MACHINE_UNOP_32_LIST(LABEL) { CheckValueInputForInt32Op(node, 0); }
+            break;
+          case IrOpcode::kWord32Equal:
+          case IrOpcode::kInt32LessThan:
+          case IrOpcode::kInt32LessThanOrEqual:
+          case IrOpcode::kUint32LessThan:
+          case IrOpcode::kUint32LessThanOrEqual:
+            MACHINE_BINOP_32_LIST(LABEL) {
+              CheckValueInputForInt32Op(node, 0);
+              CheckValueInputForInt32Op(node, 1);
+            }
+            break;
+            MACHINE_BINOP_64_LIST(LABEL) {
+              CheckValueInputForInt64Op(node, 0);
+              CheckValueInputForInt64Op(node, 1);
+            }
+            break;
+          case IrOpcode::kFloat32Equal:
+          case IrOpcode::kFloat32LessThan:
+          case IrOpcode::kFloat32LessThanOrEqual:
+            MACHINE_FLOAT32_BINOP_LIST(LABEL) {
+              CheckValueInputForFloat32Op(node, 0);
+              CheckValueInputForFloat32Op(node, 1);
+            }
+            break;
+          case IrOpcode::kChangeFloat32ToFloat64:
+          case IrOpcode::kTruncateFloat32ToInt32:
+          case IrOpcode::kTruncateFloat32ToUint32:
+          case IrOpcode::kBitcastFloat32ToInt32:
+            MACHINE_FLOAT32_UNOP_LIST(LABEL) {
+              CheckValueInputForFloat32Op(node, 0);
+            }
+            break;
+          case IrOpcode::kFloat64Equal:
+          case IrOpcode::kFloat64LessThan:
+          case IrOpcode::kFloat64LessThanOrEqual:
+            MACHINE_FLOAT64_BINOP_LIST(LABEL) {
+              CheckValueInputForFloat64Op(node, 0);
+              CheckValueInputForFloat64Op(node, 1);
+            }
+            break;
+          case IrOpcode::kFloat64SilenceNaN:
+            MACHINE_FLOAT64_UNOP_LIST(LABEL) {
+              CheckValueInputForFloat64Op(node, 0);
+            }
+            break;
+#undef LABEL
+          case IrOpcode::kParameter:
+          case IrOpcode::kProjection:
+            break;
+          case IrOpcode::kLoad:
+          case IrOpcode::kAtomicLoad:
+            CheckValueInputIsTaggedOrPointer(node, 0);
+            CheckValueInputRepresentationIs(
+                node, 1, MachineType::PointerRepresentation());
+            break;
+          case IrOpcode::kStore:
+            CheckValueInputIsTaggedOrPointer(node, 0);
+            CheckValueInputRepresentationIs(
+                node, 1, MachineType::PointerRepresentation());
+            switch (StoreRepresentationOf(node->op()).representation()) {
+              case MachineRepresentation::kTagged:
+              case MachineRepresentation::kTaggedPointer:
+              case MachineRepresentation::kTaggedSigned:
+                CheckValueInputIsTagged(node, 2);
+                break;
+              default:
+                CheckValueInputRepresentationIs(
+                    node, 2,
+                    StoreRepresentationOf(node->op()).representation());
+            }
+            break;
+          case IrOpcode::kAtomicStore:
+            CheckValueInputIsTaggedOrPointer(node, 0);
+            CheckValueInputRepresentationIs(
+                node, 1, MachineType::PointerRepresentation());
+            switch (AtomicStoreRepresentationOf(node->op())) {
+              case MachineRepresentation::kTagged:
+              case MachineRepresentation::kTaggedPointer:
+              case MachineRepresentation::kTaggedSigned:
+                CheckValueInputIsTagged(node, 2);
+                break;
+              default:
+                CheckValueInputRepresentationIs(
+                    node, 2, AtomicStoreRepresentationOf(node->op()));
+            }
+            break;
+          case IrOpcode::kPhi:
+            switch (typer_->GetRepresentation(node)) {
+              case MachineRepresentation::kTagged:
+              case MachineRepresentation::kTaggedPointer:
+              case MachineRepresentation::kTaggedSigned:
+                for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
+                  CheckValueInputIsTagged(node, i);
+                }
+                break;
+              default:
+                for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
+                  CheckValueInputRepresentationIs(
+                      node, i, typer_->GetRepresentation(node));
+                }
+                break;
+            }
+            break;
+          case IrOpcode::kBranch:
+          case IrOpcode::kSwitch:
+            CheckValueInputForInt32Op(node, 0);
+            break;
+          case IrOpcode::kReturn:
+            // TODO(epertoso): use the linkage to determine which tipe we
+            // should have here.
+            break;
+          case IrOpcode::kTypedStateValues:
+          case IrOpcode::kFrameState:
+            break;
+          default:
+            if (node->op()->ValueInputCount() != 0) {
+              std::stringstream str;
+              str << "Node #" << node->id() << ":" << *node->op()
+                  << " in the machine graph is not being checked.";
+              FATAL(str.str().c_str());
+            }
+            break;
+        }
+      }
+    }
+  }
+
+ private:
+  void CheckValueInputRepresentationIs(Node const* node, int index,
+                                       MachineRepresentation representation) {
+    Node const* input = node->InputAt(index);
+    if (typer_->GetRepresentation(input) != representation) {
+      std::stringstream str;
+      str << "TypeError: node #" << node->id() << ":" << *node->op()
+          << " uses node #" << input->id() << ":" << *input->op()
+          << " which doesn't have a " << MachineReprToString(representation)
+          << " representation.";
+      FATAL(str.str().c_str());
+    }
+  }
+
+  void CheckValueInputIsTagged(Node const* node, int index) {
+    Node const* input = node->InputAt(index);
+    switch (typer_->GetRepresentation(input)) {
+      case MachineRepresentation::kTagged:
+      case MachineRepresentation::kTaggedPointer:
+      case MachineRepresentation::kTaggedSigned:
+        return;
+      default:
+        break;
+    }
+    std::ostringstream str;
+    str << "TypeError: node #" << node->id() << ":" << *node->op()
+        << " uses node #" << input->id() << ":" << *input->op()
+        << " which doesn't have a tagged representation.";
+    FATAL(str.str().c_str());
+  }
+
+  void CheckValueInputIsTaggedOrPointer(Node const* node, int index) {
+    Node const* input = node->InputAt(index);
+    switch (typer_->GetRepresentation(input)) {
+      case MachineRepresentation::kTagged:
+      case MachineRepresentation::kTaggedPointer:
+      case MachineRepresentation::kTaggedSigned:
+        return;
+      default:
+        break;
+    }
+    if (typer_->GetRepresentation(input) !=
+        MachineType::PointerRepresentation()) {
+      std::ostringstream str;
+      str << "TypeError: node #" << node->id() << ":" << *node->op()
+          << " uses node #" << input->id() << ":" << *input->op()
+          << " which doesn't have a tagged or pointer representation.";
+      FATAL(str.str().c_str());
+    }
+  }
+
+  void CheckValueInputForInt32Op(Node const* node, int index) {
+    Node const* input = node->InputAt(index);
+    switch (typer_->GetRepresentation(input)) {
+      case MachineRepresentation::kBit:
+      case MachineRepresentation::kWord8:
+      case MachineRepresentation::kWord16:
+      case MachineRepresentation::kWord32:
+        return;
+      case MachineRepresentation::kNone: {
+        std::ostringstream str;
+        str << "TypeError: node #" << input->id() << ":" << *input->op()
+            << " is untyped.";
+        FATAL(str.str().c_str());
+        break;
+      }
+      default:
+        break;
+    }
+    std::ostringstream str;
+    str << "TypeError: node #" << node->id() << ":" << *node->op()
+        << " uses node #" << input->id() << ":" << *input->op()
+        << " which doesn't have an int32-compatible representation.";
+    FATAL(str.str().c_str());
+  }
+
+  void CheckValueInputForInt64Op(Node const* node, int index) {
+    Node const* input = node->InputAt(index);
+    switch (typer_->GetRepresentation(input)) {
+      case MachineRepresentation::kWord64:
+        return;
+      case MachineRepresentation::kNone: {
+        std::ostringstream str;
+        str << "TypeError: node #" << input->id() << ":" << *input->op()
+            << " is untyped.";
+        FATAL(str.str().c_str());
+        break;
+      }
+
+      default:
+        break;
+    }
+    std::ostringstream str;
+    str << "TypeError: node #" << node->id() << ":" << *node->op()
+        << " uses node #" << input->id() << ":" << *input->op()
+        << " which doesn't have a kWord64 representation.";
+    FATAL(str.str().c_str());
+  }
+
+  void CheckValueInputForFloat32Op(Node const* node, int index) {
+    Node const* input = node->InputAt(index);
+    if (MachineRepresentation::kFloat32 == typer_->GetRepresentation(input)) {
+      return;
+    }
+    std::ostringstream str;
+    str << "TypeError: node #" << node->id() << ":" << *node->op()
+        << " uses node #" << input->id() << ":" << *input->op()
+        << " which doesn't have a kFloat32 representation.";
+    FATAL(str.str().c_str());
+  }
+
+  void CheckValueInputForFloat64Op(Node const* node, int index) {
+    Node const* input = node->InputAt(index);
+    if (MachineRepresentation::kFloat64 == typer_->GetRepresentation(input)) {
+      return;
+    }
+    std::ostringstream str;
+    str << "TypeError: node #" << node->id() << ":" << *node->op()
+        << " uses node #" << input->id() << ":" << *input->op()
+        << " which doesn't have a kFloat64 representation.";
+    FATAL(str.str().c_str());
+  }
+
+  void CheckCallInputs(Node const* node) {
+    CallDescriptor const* desc = CallDescriptorOf(node->op());
+    std::ostringstream str;
+    bool should_log_error = false;
+    for (size_t i = 0; i < desc->InputCount(); ++i) {
+      Node const* input = node->InputAt(static_cast<int>(i));
+      MachineRepresentation const input_type = typer_->GetRepresentation(input);
+      MachineRepresentation const expected_input_type =
+          desc->GetInputType(i).representation();
+      if (!IsCompatible(expected_input_type, input_type)) {
+        if (!should_log_error) {
+          should_log_error = true;
+          str << "TypeError: node #" << node->id() << ":" << *node->op()
+              << " has wrong type for:" << std::endl;
+        } else {
+          str << std::endl;
+        }
+        str << " * input " << i << " (" << input->id() << ":" << *input->op()
+            << ") doesn't have a " << MachineReprToString(expected_input_type)
+            << " representation.";
+      }
+    }
+    if (should_log_error) {
+      FATAL(str.str().c_str());
+    }
+  }
+
+  bool Intersect(MachineRepresentation lhs, MachineRepresentation rhs) {
+    return (GetRepresentationProperties(lhs) &
+            GetRepresentationProperties(rhs)) != 0;
+  }
+
+  enum RepresentationProperties { kIsPointer = 1, kIsTagged = 2 };
+
+  int GetRepresentationProperties(MachineRepresentation representation) {
+    switch (representation) {
+      case MachineRepresentation::kTagged:
+      case MachineRepresentation::kTaggedPointer:
+        return kIsPointer | kIsTagged;
+      case MachineRepresentation::kTaggedSigned:
+        return kIsTagged;
+      case MachineRepresentation::kWord32:
+        return MachineRepresentation::kWord32 ==
+                       MachineType::PointerRepresentation()
+                   ? kIsPointer
+                   : 0;
+      case MachineRepresentation::kWord64:
+        return MachineRepresentation::kWord64 ==
+                       MachineType::PointerRepresentation()
+                   ? kIsPointer
+                   : 0;
+      default:
+        return 0;
+    }
+  }
+
+  bool IsCompatible(MachineRepresentation expected,
+                    MachineRepresentation actual) {
+    switch (expected) {
+      case MachineRepresentation::kTagged:
+        return (actual == MachineRepresentation::kTagged ||
+                actual == MachineRepresentation::kTaggedSigned ||
+                actual == MachineRepresentation::kTaggedPointer);
+      case MachineRepresentation::kTaggedSigned:
+      case MachineRepresentation::kTaggedPointer:
+      case MachineRepresentation::kFloat32:
+      case MachineRepresentation::kFloat64:
+      case MachineRepresentation::kSimd128:
+      case MachineRepresentation::kBit:
+      case MachineRepresentation::kWord8:
+      case MachineRepresentation::kWord16:
+      case MachineRepresentation::kWord64:
+        return expected == actual;
+        break;
+      case MachineRepresentation::kWord32:
+        return (actual == MachineRepresentation::kBit ||
+                actual == MachineRepresentation::kWord8 ||
+                actual == MachineRepresentation::kWord16 ||
+                actual == MachineRepresentation::kWord32);
+      case MachineRepresentation::kNone:
+        UNREACHABLE();
+    }
+    return false;
+  }
+
+  Schedule const* const schedule_;
+  MachineRepresentationInferrer const* const typer_;
+};
+
+}  // namespace
+
+void MachineGraphVerifier::Run(Graph* graph, Schedule const* const schedule,
+                               Linkage* linkage, Zone* temp_zone) {
+  MachineRepresentationInferrer representation_inferrer(schedule, graph,
+                                                        linkage, temp_zone);
+  MachineRepresentationChecker checker(schedule, &representation_inferrer);
+  checker.Run();
+}
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
diff --git a/src/compiler/machine-graph-verifier.h b/src/compiler/machine-graph-verifier.h
new file mode 100644
index 0000000..b7d7b61
--- /dev/null
+++ b/src/compiler/machine-graph-verifier.h
@@ -0,0 +1,31 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILER_MACHINE_GRAPH_VERIFIER_H_
+#define V8_COMPILER_MACHINE_GRAPH_VERIFIER_H_
+
+#include "src/base/macros.h"
+
+namespace v8 {
+namespace internal {
+class Zone;
+namespace compiler {
+
+class Graph;
+class Linkage;
+class Schedule;
+
+// Verifies properties of a scheduled graph, such as that the nodes' inputs are
+// of the correct type.
+class MachineGraphVerifier {
+ public:
+  static void Run(Graph* graph, Schedule const* const schedule,
+                  Linkage* linkage, Zone* temp_zone);
+};
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_COMPILER_MACHINE_GRAPH_VERIFIER_H_
diff --git a/src/compiler/machine-operator-reducer.cc b/src/compiler/machine-operator-reducer.cc
index 99044aa..0ad20f0 100644
--- a/src/compiler/machine-operator-reducer.cc
+++ b/src/compiler/machine-operator-reducer.cc
@@ -150,21 +150,8 @@
       return ReduceWord32And(node);
     case IrOpcode::kWord32Or:
       return ReduceWord32Or(node);
-    case IrOpcode::kWord32Xor: {
-      Int32BinopMatcher m(node);
-      if (m.right().Is(0)) return Replace(m.left().node());  // x ^ 0 => x
-      if (m.IsFoldable()) {                                  // K ^ K => K
-        return ReplaceInt32(m.left().Value() ^ m.right().Value());
-      }
-      if (m.LeftEqualsRight()) return ReplaceInt32(0);  // x ^ x => 0
-      if (m.left().IsWord32Xor() && m.right().Is(-1)) {
-        Int32BinopMatcher mleft(m.left().node());
-        if (mleft.right().Is(-1)) {  // (x ^ -1) ^ -1 => x
-          return Replace(mleft.left().node());
-        }
-      }
-      break;
-    }
+    case IrOpcode::kWord32Xor:
+      return ReduceWord32Xor(node);
     case IrOpcode::kWord32Shl:
       return ReduceWord32Shl(node);
     case IrOpcode::kWord64Shl:
@@ -418,6 +405,11 @@
       if (m.IsFoldable()) {  // K * K => K
         return ReplaceFloat64(m.left().Value() * m.right().Value());
       }
+      if (m.right().Is(2)) {  // x * 2.0 => x + x
+        node->ReplaceInput(1, m.left().node());
+        NodeProperties::ChangeOp(node, machine()->Float64Add());
+        return Changed(node);
+      }
       break;
     }
     case IrOpcode::kFloat64Div: {
@@ -432,6 +424,19 @@
       if (m.IsFoldable()) {  // K / K => K
         return ReplaceFloat64(m.left().Value() / m.right().Value());
       }
+      if (m.right().Is(-1)) {  // x / -1.0 => -x
+        node->RemoveInput(1);
+        NodeProperties::ChangeOp(node, machine()->Float64Neg());
+        return Changed(node);
+      }
+      if (m.right().IsNormal() && m.right().IsPositiveOrNegativePowerOf2()) {
+        // All reciprocals of non-denormal powers of two can be represented
+        // exactly, so division by power of two can be reduced to
+        // multiplication by reciprocal, with the same result.
+        node->ReplaceInput(1, Float64Constant(1.0 / m.right().Value()));
+        NodeProperties::ChangeOp(node, machine()->Float64Mul());
+        return Changed(node);
+      }
       break;
     }
     case IrOpcode::kFloat64Mod: {
@@ -541,8 +546,9 @@
     }
     case IrOpcode::kFloat64Pow: {
       Float64BinopMatcher m(node);
-      // TODO(bmeurer): Constant fold once we have a unified pow implementation.
-      if (m.right().Is(0.0)) {  // x ** +-0.0 => 1.0
+      if (m.IsFoldable()) {
+        return ReplaceFloat64(Pow(m.left().Value(), m.right().Value()));
+      } else if (m.right().Is(0.0)) {  // x ** +-0.0 => 1.0
         return ReplaceFloat64(1.0);
       } else if (m.right().Is(-2.0)) {  // x ** -2.0 => 1 / (x * x)
         node->ReplaceInput(0, Float64Constant(1.0));
@@ -1221,22 +1227,17 @@
   return NoChange();
 }
 
-
-Reduction MachineOperatorReducer::ReduceWord32Or(Node* node) {
-  DCHECK_EQ(IrOpcode::kWord32Or, node->opcode());
+Reduction MachineOperatorReducer::TryMatchWord32Ror(Node* node) {
+  DCHECK(IrOpcode::kWord32Or == node->opcode() ||
+         IrOpcode::kWord32Xor == node->opcode());
   Int32BinopMatcher m(node);
-  if (m.right().Is(0)) return Replace(m.left().node());    // x | 0  => x
-  if (m.right().Is(-1)) return Replace(m.right().node());  // x | -1 => -1
-  if (m.IsFoldable()) {                                    // K | K  => K
-    return ReplaceInt32(m.left().Value() | m.right().Value());
-  }
-  if (m.LeftEqualsRight()) return Replace(m.left().node());  // x | x => x
-
   Node* shl = nullptr;
   Node* shr = nullptr;
-  // Recognize rotation, we are matching either:
+  // Recognize rotation, we are matching:
   //  * x << y | x >>> (32 - y) => x ror (32 - y), i.e  x rol y
   //  * x << (32 - y) | x >>> y => x ror y
+  //  * x << y ^ x >>> (32 - y) => x ror (32 - y), i.e. x rol y
+  //  * x << (32 - y) ^ x >>> y => x ror y
   // as well as their commuted form.
   if (m.left().IsWord32Shl() && m.right().IsWord32Shr()) {
     shl = m.left().node();
@@ -1278,6 +1279,36 @@
   return Changed(node);
 }
 
+Reduction MachineOperatorReducer::ReduceWord32Or(Node* node) {
+  DCHECK_EQ(IrOpcode::kWord32Or, node->opcode());
+  Int32BinopMatcher m(node);
+  if (m.right().Is(0)) return Replace(m.left().node());    // x | 0  => x
+  if (m.right().Is(-1)) return Replace(m.right().node());  // x | -1 => -1
+  if (m.IsFoldable()) {                                    // K | K  => K
+    return ReplaceInt32(m.left().Value() | m.right().Value());
+  }
+  if (m.LeftEqualsRight()) return Replace(m.left().node());  // x | x => x
+
+  return TryMatchWord32Ror(node);
+}
+
+Reduction MachineOperatorReducer::ReduceWord32Xor(Node* node) {
+  DCHECK_EQ(IrOpcode::kWord32Xor, node->opcode());
+  Int32BinopMatcher m(node);
+  if (m.right().Is(0)) return Replace(m.left().node());  // x ^ 0 => x
+  if (m.IsFoldable()) {                                  // K ^ K => K
+    return ReplaceInt32(m.left().Value() ^ m.right().Value());
+  }
+  if (m.LeftEqualsRight()) return ReplaceInt32(0);  // x ^ x => 0
+  if (m.left().IsWord32Xor() && m.right().Is(-1)) {
+    Int32BinopMatcher mleft(m.left().node());
+    if (mleft.right().Is(-1)) {  // (x ^ -1) ^ -1 => x
+      return Replace(mleft.left().node());
+    }
+  }
+
+  return TryMatchWord32Ror(node);
+}
 
 Reduction MachineOperatorReducer::ReduceFloat64InsertLowWord32(Node* node) {
   DCHECK_EQ(IrOpcode::kFloat64InsertLowWord32, node->opcode());
diff --git a/src/compiler/machine-operator-reducer.h b/src/compiler/machine-operator-reducer.h
index 167bf7e..574f45c 100644
--- a/src/compiler/machine-operator-reducer.h
+++ b/src/compiler/machine-operator-reducer.h
@@ -87,7 +87,9 @@
   Reduction ReduceWord32Sar(Node* node);
   Reduction ReduceWord64Sar(Node* node);
   Reduction ReduceWord32And(Node* node);
+  Reduction TryMatchWord32Ror(Node* node);
   Reduction ReduceWord32Or(Node* node);
+  Reduction ReduceWord32Xor(Node* node);
   Reduction ReduceFloat64InsertLowWord32(Node* node);
   Reduction ReduceFloat64InsertHighWord32(Node* node);
   Reduction ReduceFloat64Compare(Node* node);
diff --git a/src/compiler/machine-operator.cc b/src/compiler/machine-operator.cc
index 43c6202..e36a61e 100644
--- a/src/compiler/machine-operator.cc
+++ b/src/compiler/machine-operator.cc
@@ -36,6 +36,7 @@
 
 LoadRepresentation LoadRepresentationOf(Operator const* op) {
   DCHECK(IrOpcode::kLoad == op->opcode() ||
+         IrOpcode::kProtectedLoad == op->opcode() ||
          IrOpcode::kAtomicLoad == op->opcode());
   return OpParameter<LoadRepresentation>(op);
 }
@@ -78,315 +79,317 @@
   return OpParameter<MachineRepresentation>(op);
 }
 
-#define PURE_OP_LIST(V)                                                      \
-  V(Word32And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
-  V(Word32Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)      \
-  V(Word32Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
-  V(Word32Shl, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word32Shr, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word32Sar, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word32Ror, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word32Equal, Operator::kCommutative, 2, 0, 1)                            \
-  V(Word32Clz, Operator::kNoProperties, 1, 0, 1)                             \
-  V(Word64And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
-  V(Word64Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)      \
-  V(Word64Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
-  V(Word64Shl, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word64Shr, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word64Sar, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word64Ror, Operator::kNoProperties, 2, 0, 1)                             \
-  V(Word64Clz, Operator::kNoProperties, 1, 0, 1)                             \
-  V(Word64Equal, Operator::kCommutative, 2, 0, 1)                            \
-  V(Int32Add, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)      \
-  V(Int32Sub, Operator::kNoProperties, 2, 0, 1)                              \
-  V(Int32Mul, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)      \
-  V(Int32MulHigh, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
-  V(Int32Div, Operator::kNoProperties, 2, 1, 1)                              \
-  V(Int32Mod, Operator::kNoProperties, 2, 1, 1)                              \
-  V(Int32LessThan, Operator::kNoProperties, 2, 0, 1)                         \
-  V(Int32LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                  \
-  V(Uint32Div, Operator::kNoProperties, 2, 1, 1)                             \
-  V(Uint32LessThan, Operator::kNoProperties, 2, 0, 1)                        \
-  V(Uint32LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                 \
-  V(Uint32Mod, Operator::kNoProperties, 2, 1, 1)                             \
-  V(Uint32MulHigh, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
-  V(Int64Add, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)      \
-  V(Int64Sub, Operator::kNoProperties, 2, 0, 1)                              \
-  V(Int64Mul, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)      \
-  V(Int64Div, Operator::kNoProperties, 2, 1, 1)                              \
-  V(Int64Mod, Operator::kNoProperties, 2, 1, 1)                              \
-  V(Int64LessThan, Operator::kNoProperties, 2, 0, 1)                         \
-  V(Int64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                  \
-  V(Uint64Div, Operator::kNoProperties, 2, 1, 1)                             \
-  V(Uint64Mod, Operator::kNoProperties, 2, 1, 1)                             \
-  V(Uint64LessThan, Operator::kNoProperties, 2, 0, 1)                        \
-  V(Uint64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                 \
-  V(BitcastWordToTagged, Operator::kNoProperties, 1, 0, 1)                   \
-  V(TruncateFloat64ToWord32, Operator::kNoProperties, 1, 0, 1)               \
-  V(ChangeFloat32ToFloat64, Operator::kNoProperties, 1, 0, 1)                \
-  V(ChangeFloat64ToInt32, Operator::kNoProperties, 1, 0, 1)                  \
-  V(ChangeFloat64ToUint32, Operator::kNoProperties, 1, 0, 1)                 \
-  V(TruncateFloat64ToUint32, Operator::kNoProperties, 1, 0, 1)               \
-  V(TruncateFloat32ToInt32, Operator::kNoProperties, 1, 0, 1)                \
-  V(TruncateFloat32ToUint32, Operator::kNoProperties, 1, 0, 1)               \
-  V(TryTruncateFloat32ToInt64, Operator::kNoProperties, 1, 0, 2)             \
-  V(TryTruncateFloat64ToInt64, Operator::kNoProperties, 1, 0, 2)             \
-  V(TryTruncateFloat32ToUint64, Operator::kNoProperties, 1, 0, 2)            \
-  V(TryTruncateFloat64ToUint64, Operator::kNoProperties, 1, 0, 2)            \
-  V(ChangeInt32ToFloat64, Operator::kNoProperties, 1, 0, 1)                  \
-  V(Float64SilenceNaN, Operator::kNoProperties, 1, 0, 1)                     \
-  V(RoundFloat64ToInt32, Operator::kNoProperties, 1, 0, 1)                   \
-  V(RoundInt32ToFloat32, Operator::kNoProperties, 1, 0, 1)                   \
-  V(RoundInt64ToFloat32, Operator::kNoProperties, 1, 0, 1)                   \
-  V(RoundInt64ToFloat64, Operator::kNoProperties, 1, 0, 1)                   \
-  V(RoundUint32ToFloat32, Operator::kNoProperties, 1, 0, 1)                  \
-  V(RoundUint64ToFloat32, Operator::kNoProperties, 1, 0, 1)                  \
-  V(RoundUint64ToFloat64, Operator::kNoProperties, 1, 0, 1)                  \
-  V(ChangeInt32ToInt64, Operator::kNoProperties, 1, 0, 1)                    \
-  V(ChangeUint32ToFloat64, Operator::kNoProperties, 1, 0, 1)                 \
-  V(ChangeUint32ToUint64, Operator::kNoProperties, 1, 0, 1)                  \
-  V(ImpossibleToWord32, Operator::kNoProperties, 1, 0, 1)                    \
-  V(ImpossibleToWord64, Operator::kNoProperties, 1, 0, 1)                    \
-  V(ImpossibleToFloat32, Operator::kNoProperties, 1, 0, 1)                   \
-  V(ImpossibleToFloat64, Operator::kNoProperties, 1, 0, 1)                   \
-  V(ImpossibleToTagged, Operator::kNoProperties, 1, 0, 1)                    \
-  V(ImpossibleToBit, Operator::kNoProperties, 1, 0, 1)                       \
-  V(TruncateFloat64ToFloat32, Operator::kNoProperties, 1, 0, 1)              \
-  V(TruncateInt64ToInt32, Operator::kNoProperties, 1, 0, 1)                  \
-  V(BitcastFloat32ToInt32, Operator::kNoProperties, 1, 0, 1)                 \
-  V(BitcastFloat64ToInt64, Operator::kNoProperties, 1, 0, 1)                 \
-  V(BitcastInt32ToFloat32, Operator::kNoProperties, 1, 0, 1)                 \
-  V(BitcastInt64ToFloat64, Operator::kNoProperties, 1, 0, 1)                 \
-  V(Float32Abs, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float32Add, Operator::kCommutative, 2, 0, 1)                             \
-  V(Float32Sub, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Float32Mul, Operator::kCommutative, 2, 0, 1)                             \
-  V(Float32Div, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Float32Neg, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float32Sqrt, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float32Max, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Float32Min, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Float64Abs, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float64Acos, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Acosh, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float64Asin, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Asinh, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float64Atan, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Atan2, Operator::kNoProperties, 2, 0, 1)                          \
-  V(Float64Atanh, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float64Cbrt, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Cos, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float64Cosh, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Exp, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float64Expm1, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float64Log, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float64Log1p, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float64Log2, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Log10, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float64Max, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Float64Min, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Float64Neg, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float64Add, Operator::kCommutative, 2, 0, 1)                             \
-  V(Float64Sub, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Float64Mul, Operator::kCommutative, 2, 0, 1)                             \
-  V(Float64Div, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Float64Mod, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Float64Pow, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Float64Sin, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float64Sinh, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Sqrt, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float64Tan, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Float64Tanh, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Float32Equal, Operator::kCommutative, 2, 0, 1)                           \
-  V(Float32LessThan, Operator::kNoProperties, 2, 0, 1)                       \
-  V(Float32LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                \
-  V(Float64Equal, Operator::kCommutative, 2, 0, 1)                           \
-  V(Float64LessThan, Operator::kNoProperties, 2, 0, 1)                       \
-  V(Float64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                \
-  V(Float64ExtractLowWord32, Operator::kNoProperties, 1, 0, 1)               \
-  V(Float64ExtractHighWord32, Operator::kNoProperties, 1, 0, 1)              \
-  V(Float64InsertLowWord32, Operator::kNoProperties, 2, 0, 1)                \
-  V(Float64InsertHighWord32, Operator::kNoProperties, 2, 0, 1)               \
-  V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1)                      \
-  V(LoadFramePointer, Operator::kNoProperties, 0, 0, 1)                      \
-  V(LoadParentFramePointer, Operator::kNoProperties, 0, 0, 1)                \
-  V(Int32PairAdd, Operator::kNoProperties, 4, 0, 2)                          \
-  V(Int32PairSub, Operator::kNoProperties, 4, 0, 2)                          \
-  V(Int32PairMul, Operator::kNoProperties, 4, 0, 2)                          \
-  V(Word32PairShl, Operator::kNoProperties, 3, 0, 2)                         \
-  V(Word32PairShr, Operator::kNoProperties, 3, 0, 2)                         \
-  V(Word32PairSar, Operator::kNoProperties, 3, 0, 2)                         \
-  V(CreateFloat32x4, Operator::kNoProperties, 4, 0, 1)                       \
-  V(Float32x4ExtractLane, Operator::kNoProperties, 2, 0, 1)                  \
-  V(Float32x4ReplaceLane, Operator::kNoProperties, 3, 0, 1)                  \
-  V(Float32x4Abs, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float32x4Neg, Operator::kNoProperties, 1, 0, 1)                          \
-  V(Float32x4Sqrt, Operator::kNoProperties, 1, 0, 1)                         \
-  V(Float32x4RecipApprox, Operator::kNoProperties, 1, 0, 1)                  \
-  V(Float32x4RecipSqrtApprox, Operator::kNoProperties, 1, 0, 1)              \
-  V(Float32x4Add, Operator::kCommutative, 2, 0, 1)                           \
-  V(Float32x4Sub, Operator::kNoProperties, 2, 0, 1)                          \
-  V(Float32x4Mul, Operator::kCommutative, 2, 0, 1)                           \
-  V(Float32x4Div, Operator::kNoProperties, 2, 0, 1)                          \
-  V(Float32x4Min, Operator::kCommutative, 2, 0, 1)                           \
-  V(Float32x4Max, Operator::kCommutative, 2, 0, 1)                           \
-  V(Float32x4MinNum, Operator::kCommutative, 2, 0, 1)                        \
-  V(Float32x4MaxNum, Operator::kCommutative, 2, 0, 1)                        \
-  V(Float32x4Equal, Operator::kCommutative, 2, 0, 1)                         \
-  V(Float32x4NotEqual, Operator::kCommutative, 2, 0, 1)                      \
-  V(Float32x4LessThan, Operator::kNoProperties, 2, 0, 1)                     \
-  V(Float32x4LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)              \
-  V(Float32x4GreaterThan, Operator::kNoProperties, 2, 0, 1)                  \
-  V(Float32x4GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)           \
-  V(Float32x4Select, Operator::kNoProperties, 3, 0, 1)                       \
-  V(Float32x4Swizzle, Operator::kNoProperties, 5, 0, 1)                      \
-  V(Float32x4Shuffle, Operator::kNoProperties, 6, 0, 1)                      \
-  V(Float32x4FromInt32x4, Operator::kNoProperties, 1, 0, 1)                  \
-  V(Float32x4FromUint32x4, Operator::kNoProperties, 1, 0, 1)                 \
-  V(CreateInt32x4, Operator::kNoProperties, 4, 0, 1)                         \
-  V(Int32x4ExtractLane, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int32x4ReplaceLane, Operator::kNoProperties, 3, 0, 1)                    \
-  V(Int32x4Neg, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Int32x4Add, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int32x4Sub, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Int32x4Mul, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int32x4Min, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int32x4Max, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int32x4ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)              \
-  V(Int32x4ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)             \
-  V(Int32x4Equal, Operator::kCommutative, 2, 0, 1)                           \
-  V(Int32x4NotEqual, Operator::kCommutative, 2, 0, 1)                        \
-  V(Int32x4LessThan, Operator::kNoProperties, 2, 0, 1)                       \
-  V(Int32x4LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                \
-  V(Int32x4GreaterThan, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int32x4GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)             \
-  V(Int32x4Select, Operator::kNoProperties, 3, 0, 1)                         \
-  V(Int32x4Swizzle, Operator::kNoProperties, 5, 0, 1)                        \
-  V(Int32x4Shuffle, Operator::kNoProperties, 6, 0, 1)                        \
-  V(Int32x4FromFloat32x4, Operator::kNoProperties, 1, 0, 1)                  \
-  V(Uint32x4Min, Operator::kCommutative, 2, 0, 1)                            \
-  V(Uint32x4Max, Operator::kCommutative, 2, 0, 1)                            \
-  V(Uint32x4ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)             \
-  V(Uint32x4ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)            \
-  V(Uint32x4LessThan, Operator::kNoProperties, 2, 0, 1)                      \
-  V(Uint32x4LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)               \
-  V(Uint32x4GreaterThan, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Uint32x4GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)            \
-  V(Uint32x4FromFloat32x4, Operator::kNoProperties, 1, 0, 1)                 \
-  V(CreateBool32x4, Operator::kNoProperties, 4, 0, 1)                        \
-  V(Bool32x4ExtractLane, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Bool32x4ReplaceLane, Operator::kNoProperties, 3, 0, 1)                   \
-  V(Bool32x4And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)   \
-  V(Bool32x4Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Bool32x4Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)   \
-  V(Bool32x4Not, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Bool32x4AnyTrue, Operator::kNoProperties, 1, 0, 1)                       \
-  V(Bool32x4AllTrue, Operator::kNoProperties, 1, 0, 1)                       \
-  V(Bool32x4Swizzle, Operator::kNoProperties, 5, 0, 1)                       \
-  V(Bool32x4Shuffle, Operator::kNoProperties, 6, 0, 1)                       \
-  V(Bool32x4Equal, Operator::kCommutative, 2, 0, 1)                          \
-  V(Bool32x4NotEqual, Operator::kCommutative, 2, 0, 1)                       \
-  V(CreateInt16x8, Operator::kNoProperties, 8, 0, 1)                         \
-  V(Int16x8ExtractLane, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int16x8ReplaceLane, Operator::kNoProperties, 3, 0, 1)                    \
-  V(Int16x8Neg, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Int16x8Add, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int16x8AddSaturate, Operator::kCommutative, 2, 0, 1)                     \
-  V(Int16x8Sub, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Int16x8SubSaturate, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int16x8Mul, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int16x8Min, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int16x8Max, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int16x8ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)              \
-  V(Int16x8ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)             \
-  V(Int16x8Equal, Operator::kCommutative, 2, 0, 1)                           \
-  V(Int16x8NotEqual, Operator::kCommutative, 2, 0, 1)                        \
-  V(Int16x8LessThan, Operator::kNoProperties, 2, 0, 1)                       \
-  V(Int16x8LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                \
-  V(Int16x8GreaterThan, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int16x8GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)             \
-  V(Int16x8Select, Operator::kNoProperties, 3, 0, 1)                         \
-  V(Int16x8Swizzle, Operator::kNoProperties, 9, 0, 1)                        \
-  V(Int16x8Shuffle, Operator::kNoProperties, 10, 0, 1)                       \
-  V(Uint16x8AddSaturate, Operator::kCommutative, 2, 0, 1)                    \
-  V(Uint16x8SubSaturate, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Uint16x8Min, Operator::kCommutative, 2, 0, 1)                            \
-  V(Uint16x8Max, Operator::kCommutative, 2, 0, 1)                            \
-  V(Uint16x8ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)             \
-  V(Uint16x8ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)            \
-  V(Uint16x8LessThan, Operator::kNoProperties, 2, 0, 1)                      \
-  V(Uint16x8LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)               \
-  V(Uint16x8GreaterThan, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Uint16x8GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)            \
-  V(CreateBool16x8, Operator::kNoProperties, 8, 0, 1)                        \
-  V(Bool16x8ExtractLane, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Bool16x8ReplaceLane, Operator::kNoProperties, 3, 0, 1)                   \
-  V(Bool16x8And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)   \
-  V(Bool16x8Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Bool16x8Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)   \
-  V(Bool16x8Not, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Bool16x8AnyTrue, Operator::kNoProperties, 1, 0, 1)                       \
-  V(Bool16x8AllTrue, Operator::kNoProperties, 1, 0, 1)                       \
-  V(Bool16x8Swizzle, Operator::kNoProperties, 9, 0, 1)                       \
-  V(Bool16x8Shuffle, Operator::kNoProperties, 10, 0, 1)                      \
-  V(Bool16x8Equal, Operator::kCommutative, 2, 0, 1)                          \
-  V(Bool16x8NotEqual, Operator::kCommutative, 2, 0, 1)                       \
-  V(CreateInt8x16, Operator::kNoProperties, 16, 0, 1)                        \
-  V(Int8x16ExtractLane, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int8x16ReplaceLane, Operator::kNoProperties, 3, 0, 1)                    \
-  V(Int8x16Neg, Operator::kNoProperties, 1, 0, 1)                            \
-  V(Int8x16Add, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int8x16AddSaturate, Operator::kCommutative, 2, 0, 1)                     \
-  V(Int8x16Sub, Operator::kNoProperties, 2, 0, 1)                            \
-  V(Int8x16SubSaturate, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int8x16Mul, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int8x16Min, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int8x16Max, Operator::kCommutative, 2, 0, 1)                             \
-  V(Int8x16ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)              \
-  V(Int8x16ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)             \
-  V(Int8x16Equal, Operator::kCommutative, 2, 0, 1)                           \
-  V(Int8x16NotEqual, Operator::kCommutative, 2, 0, 1)                        \
-  V(Int8x16LessThan, Operator::kNoProperties, 2, 0, 1)                       \
-  V(Int8x16LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                \
-  V(Int8x16GreaterThan, Operator::kNoProperties, 2, 0, 1)                    \
-  V(Int8x16GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)             \
-  V(Int8x16Select, Operator::kNoProperties, 3, 0, 1)                         \
-  V(Int8x16Swizzle, Operator::kNoProperties, 17, 0, 1)                       \
-  V(Int8x16Shuffle, Operator::kNoProperties, 18, 0, 1)                       \
-  V(Uint8x16AddSaturate, Operator::kCommutative, 2, 0, 1)                    \
-  V(Uint8x16SubSaturate, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Uint8x16Min, Operator::kCommutative, 2, 0, 1)                            \
-  V(Uint8x16Max, Operator::kCommutative, 2, 0, 1)                            \
-  V(Uint8x16ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)             \
-  V(Uint8x16ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)            \
-  V(Uint8x16LessThan, Operator::kNoProperties, 2, 0, 1)                      \
-  V(Uint8x16LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)               \
-  V(Uint8x16GreaterThan, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Uint8x16GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)            \
-  V(CreateBool8x16, Operator::kNoProperties, 16, 0, 1)                       \
-  V(Bool8x16ExtractLane, Operator::kNoProperties, 2, 0, 1)                   \
-  V(Bool8x16ReplaceLane, Operator::kNoProperties, 3, 0, 1)                   \
-  V(Bool8x16And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)   \
-  V(Bool8x16Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Bool8x16Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)   \
-  V(Bool8x16Not, Operator::kNoProperties, 1, 0, 1)                           \
-  V(Bool8x16AnyTrue, Operator::kNoProperties, 1, 0, 1)                       \
-  V(Bool8x16AllTrue, Operator::kNoProperties, 1, 0, 1)                       \
-  V(Bool8x16Swizzle, Operator::kNoProperties, 17, 0, 1)                      \
-  V(Bool8x16Shuffle, Operator::kNoProperties, 18, 0, 1)                      \
-  V(Bool8x16Equal, Operator::kCommutative, 2, 0, 1)                          \
-  V(Bool8x16NotEqual, Operator::kCommutative, 2, 0, 1)                       \
-  V(Simd128Load, Operator::kNoProperties, 2, 0, 1)                           \
-  V(Simd128Load1, Operator::kNoProperties, 2, 0, 1)                          \
-  V(Simd128Load2, Operator::kNoProperties, 2, 0, 1)                          \
-  V(Simd128Load3, Operator::kNoProperties, 2, 0, 1)                          \
-  V(Simd128Store, Operator::kNoProperties, 3, 0, 1)                          \
-  V(Simd128Store1, Operator::kNoProperties, 3, 0, 1)                         \
-  V(Simd128Store2, Operator::kNoProperties, 3, 0, 1)                         \
-  V(Simd128Store3, Operator::kNoProperties, 3, 0, 1)                         \
-  V(Simd128And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
-  V(Simd128Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
-  V(Simd128Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
+#define PURE_BINARY_OP_LIST_32(V)                                           \
+  V(Word32And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
+  V(Word32Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
+  V(Word32Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)    \
+  V(Word32Shl, Operator::kNoProperties, 2, 0, 1)                            \
+  V(Word32Shr, Operator::kNoProperties, 2, 0, 1)                            \
+  V(Word32Sar, Operator::kNoProperties, 2, 0, 1)                            \
+  V(Word32Ror, Operator::kNoProperties, 2, 0, 1)                            \
+  V(Word32Equal, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int32Add, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
+  V(Int32Sub, Operator::kNoProperties, 2, 0, 1)                             \
+  V(Int32Mul, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)     \
+  V(Int32MulHigh, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Int32Div, Operator::kNoProperties, 2, 1, 1)                             \
+  V(Int32Mod, Operator::kNoProperties, 2, 1, 1)                             \
+  V(Int32LessThan, Operator::kNoProperties, 2, 0, 1)                        \
+  V(Int32LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Uint32Div, Operator::kNoProperties, 2, 1, 1)                            \
+  V(Uint32LessThan, Operator::kNoProperties, 2, 0, 1)                       \
+  V(Uint32LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)                \
+  V(Uint32Mod, Operator::kNoProperties, 2, 1, 1)                            \
+  V(Uint32MulHigh, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)
+
+#define PURE_BINARY_OP_LIST_64(V)                                        \
+  V(Word64And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Word64Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Word64Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Word64Shl, Operator::kNoProperties, 2, 0, 1)                         \
+  V(Word64Shr, Operator::kNoProperties, 2, 0, 1)                         \
+  V(Word64Sar, Operator::kNoProperties, 2, 0, 1)                         \
+  V(Word64Ror, Operator::kNoProperties, 2, 0, 1)                         \
+  V(Word64Equal, Operator::kCommutative, 2, 0, 1)                        \
+  V(Int64Add, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Int64Sub, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Int64Mul, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Int64Div, Operator::kNoProperties, 2, 1, 1)                          \
+  V(Int64Mod, Operator::kNoProperties, 2, 1, 1)                          \
+  V(Int64LessThan, Operator::kNoProperties, 2, 0, 1)                     \
+  V(Int64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)              \
+  V(Uint64Div, Operator::kNoProperties, 2, 1, 1)                         \
+  V(Uint64Mod, Operator::kNoProperties, 2, 1, 1)                         \
+  V(Uint64LessThan, Operator::kNoProperties, 2, 0, 1)                    \
+  V(Uint64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)
+
+#define PURE_OP_LIST(V)                                                    \
+  PURE_BINARY_OP_LIST_32(V)                                                \
+  PURE_BINARY_OP_LIST_64(V)                                                \
+  V(Word32Clz, Operator::kNoProperties, 1, 0, 1)                           \
+  V(Word64Clz, Operator::kNoProperties, 1, 0, 1)                           \
+  V(BitcastTaggedToWord, Operator::kNoProperties, 1, 0, 1)                 \
+  V(BitcastWordToTagged, Operator::kNoProperties, 1, 0, 1)                 \
+  V(BitcastWordToTaggedSigned, Operator::kNoProperties, 1, 0, 1)           \
+  V(TruncateFloat64ToWord32, Operator::kNoProperties, 1, 0, 1)             \
+  V(ChangeFloat32ToFloat64, Operator::kNoProperties, 1, 0, 1)              \
+  V(ChangeFloat64ToInt32, Operator::kNoProperties, 1, 0, 1)                \
+  V(ChangeFloat64ToUint32, Operator::kNoProperties, 1, 0, 1)               \
+  V(TruncateFloat64ToUint32, Operator::kNoProperties, 1, 0, 1)             \
+  V(TruncateFloat32ToInt32, Operator::kNoProperties, 1, 0, 1)              \
+  V(TruncateFloat32ToUint32, Operator::kNoProperties, 1, 0, 1)             \
+  V(TryTruncateFloat32ToInt64, Operator::kNoProperties, 1, 0, 2)           \
+  V(TryTruncateFloat64ToInt64, Operator::kNoProperties, 1, 0, 2)           \
+  V(TryTruncateFloat32ToUint64, Operator::kNoProperties, 1, 0, 2)          \
+  V(TryTruncateFloat64ToUint64, Operator::kNoProperties, 1, 0, 2)          \
+  V(ChangeInt32ToFloat64, Operator::kNoProperties, 1, 0, 1)                \
+  V(Float64SilenceNaN, Operator::kNoProperties, 1, 0, 1)                   \
+  V(RoundFloat64ToInt32, Operator::kNoProperties, 1, 0, 1)                 \
+  V(RoundInt32ToFloat32, Operator::kNoProperties, 1, 0, 1)                 \
+  V(RoundInt64ToFloat32, Operator::kNoProperties, 1, 0, 1)                 \
+  V(RoundInt64ToFloat64, Operator::kNoProperties, 1, 0, 1)                 \
+  V(RoundUint32ToFloat32, Operator::kNoProperties, 1, 0, 1)                \
+  V(RoundUint64ToFloat32, Operator::kNoProperties, 1, 0, 1)                \
+  V(RoundUint64ToFloat64, Operator::kNoProperties, 1, 0, 1)                \
+  V(ChangeInt32ToInt64, Operator::kNoProperties, 1, 0, 1)                  \
+  V(ChangeUint32ToFloat64, Operator::kNoProperties, 1, 0, 1)               \
+  V(ChangeUint32ToUint64, Operator::kNoProperties, 1, 0, 1)                \
+  V(TruncateFloat64ToFloat32, Operator::kNoProperties, 1, 0, 1)            \
+  V(TruncateInt64ToInt32, Operator::kNoProperties, 1, 0, 1)                \
+  V(BitcastFloat32ToInt32, Operator::kNoProperties, 1, 0, 1)               \
+  V(BitcastFloat64ToInt64, Operator::kNoProperties, 1, 0, 1)               \
+  V(BitcastInt32ToFloat32, Operator::kNoProperties, 1, 0, 1)               \
+  V(BitcastInt64ToFloat64, Operator::kNoProperties, 1, 0, 1)               \
+  V(Float32Abs, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float32Add, Operator::kCommutative, 2, 0, 1)                           \
+  V(Float32Sub, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Float32Mul, Operator::kCommutative, 2, 0, 1)                           \
+  V(Float32Div, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Float32Neg, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float32Sqrt, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float32Max, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Float32Min, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Float64Abs, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float64Acos, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Acosh, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float64Asin, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Asinh, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float64Atan, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Atan2, Operator::kNoProperties, 2, 0, 1)                        \
+  V(Float64Atanh, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float64Cbrt, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Cos, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float64Cosh, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Exp, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float64Expm1, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float64Log, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float64Log1p, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float64Log2, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Log10, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float64Max, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Float64Min, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Float64Neg, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float64Add, Operator::kCommutative, 2, 0, 1)                           \
+  V(Float64Sub, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Float64Mul, Operator::kCommutative, 2, 0, 1)                           \
+  V(Float64Div, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Float64Mod, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Float64Pow, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Float64Sin, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float64Sinh, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Sqrt, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float64Tan, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Float64Tanh, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Float32Equal, Operator::kCommutative, 2, 0, 1)                         \
+  V(Float32LessThan, Operator::kNoProperties, 2, 0, 1)                     \
+  V(Float32LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)              \
+  V(Float64Equal, Operator::kCommutative, 2, 0, 1)                         \
+  V(Float64LessThan, Operator::kNoProperties, 2, 0, 1)                     \
+  V(Float64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)              \
+  V(Float64ExtractLowWord32, Operator::kNoProperties, 1, 0, 1)             \
+  V(Float64ExtractHighWord32, Operator::kNoProperties, 1, 0, 1)            \
+  V(Float64InsertLowWord32, Operator::kNoProperties, 2, 0, 1)              \
+  V(Float64InsertHighWord32, Operator::kNoProperties, 2, 0, 1)             \
+  V(LoadStackPointer, Operator::kNoProperties, 0, 0, 1)                    \
+  V(LoadFramePointer, Operator::kNoProperties, 0, 0, 1)                    \
+  V(LoadParentFramePointer, Operator::kNoProperties, 0, 0, 1)              \
+  V(Int32PairAdd, Operator::kNoProperties, 4, 0, 2)                        \
+  V(Int32PairSub, Operator::kNoProperties, 4, 0, 2)                        \
+  V(Int32PairMul, Operator::kNoProperties, 4, 0, 2)                        \
+  V(Word32PairShl, Operator::kNoProperties, 3, 0, 2)                       \
+  V(Word32PairShr, Operator::kNoProperties, 3, 0, 2)                       \
+  V(Word32PairSar, Operator::kNoProperties, 3, 0, 2)                       \
+  V(CreateFloat32x4, Operator::kNoProperties, 4, 0, 1)                     \
+  V(Float32x4ExtractLane, Operator::kNoProperties, 2, 0, 1)                \
+  V(Float32x4ReplaceLane, Operator::kNoProperties, 3, 0, 1)                \
+  V(Float32x4Abs, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float32x4Neg, Operator::kNoProperties, 1, 0, 1)                        \
+  V(Float32x4Sqrt, Operator::kNoProperties, 1, 0, 1)                       \
+  V(Float32x4RecipApprox, Operator::kNoProperties, 1, 0, 1)                \
+  V(Float32x4RecipSqrtApprox, Operator::kNoProperties, 1, 0, 1)            \
+  V(Float32x4Add, Operator::kCommutative, 2, 0, 1)                         \
+  V(Float32x4Sub, Operator::kNoProperties, 2, 0, 1)                        \
+  V(Float32x4Mul, Operator::kCommutative, 2, 0, 1)                         \
+  V(Float32x4Div, Operator::kNoProperties, 2, 0, 1)                        \
+  V(Float32x4Min, Operator::kCommutative, 2, 0, 1)                         \
+  V(Float32x4Max, Operator::kCommutative, 2, 0, 1)                         \
+  V(Float32x4MinNum, Operator::kCommutative, 2, 0, 1)                      \
+  V(Float32x4MaxNum, Operator::kCommutative, 2, 0, 1)                      \
+  V(Float32x4Equal, Operator::kCommutative, 2, 0, 1)                       \
+  V(Float32x4NotEqual, Operator::kCommutative, 2, 0, 1)                    \
+  V(Float32x4LessThan, Operator::kNoProperties, 2, 0, 1)                   \
+  V(Float32x4LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)            \
+  V(Float32x4GreaterThan, Operator::kNoProperties, 2, 0, 1)                \
+  V(Float32x4GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)         \
+  V(Float32x4Select, Operator::kNoProperties, 3, 0, 1)                     \
+  V(Float32x4Swizzle, Operator::kNoProperties, 5, 0, 1)                    \
+  V(Float32x4Shuffle, Operator::kNoProperties, 6, 0, 1)                    \
+  V(Float32x4FromInt32x4, Operator::kNoProperties, 1, 0, 1)                \
+  V(Float32x4FromUint32x4, Operator::kNoProperties, 1, 0, 1)               \
+  V(CreateInt32x4, Operator::kNoProperties, 4, 0, 1)                       \
+  V(Int32x4ExtractLane, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int32x4ReplaceLane, Operator::kNoProperties, 3, 0, 1)                  \
+  V(Int32x4Neg, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Int32x4Add, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int32x4Sub, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Int32x4Mul, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int32x4Min, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int32x4Max, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int32x4ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)            \
+  V(Int32x4ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)           \
+  V(Int32x4Equal, Operator::kCommutative, 2, 0, 1)                         \
+  V(Int32x4NotEqual, Operator::kCommutative, 2, 0, 1)                      \
+  V(Int32x4LessThan, Operator::kNoProperties, 2, 0, 1)                     \
+  V(Int32x4LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)              \
+  V(Int32x4GreaterThan, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int32x4GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)           \
+  V(Int32x4Select, Operator::kNoProperties, 3, 0, 1)                       \
+  V(Int32x4Swizzle, Operator::kNoProperties, 5, 0, 1)                      \
+  V(Int32x4Shuffle, Operator::kNoProperties, 6, 0, 1)                      \
+  V(Int32x4FromFloat32x4, Operator::kNoProperties, 1, 0, 1)                \
+  V(Uint32x4Min, Operator::kCommutative, 2, 0, 1)                          \
+  V(Uint32x4Max, Operator::kCommutative, 2, 0, 1)                          \
+  V(Uint32x4ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)           \
+  V(Uint32x4ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)          \
+  V(Uint32x4LessThan, Operator::kNoProperties, 2, 0, 1)                    \
+  V(Uint32x4LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)             \
+  V(Uint32x4GreaterThan, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Uint32x4GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)          \
+  V(Uint32x4FromFloat32x4, Operator::kNoProperties, 1, 0, 1)               \
+  V(CreateBool32x4, Operator::kNoProperties, 4, 0, 1)                      \
+  V(Bool32x4ExtractLane, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Bool32x4ReplaceLane, Operator::kNoProperties, 3, 0, 1)                 \
+  V(Bool32x4And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Bool32x4Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Bool32x4Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Bool32x4Not, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Bool32x4AnyTrue, Operator::kNoProperties, 1, 0, 1)                     \
+  V(Bool32x4AllTrue, Operator::kNoProperties, 1, 0, 1)                     \
+  V(Bool32x4Swizzle, Operator::kNoProperties, 5, 0, 1)                     \
+  V(Bool32x4Shuffle, Operator::kNoProperties, 6, 0, 1)                     \
+  V(Bool32x4Equal, Operator::kCommutative, 2, 0, 1)                        \
+  V(Bool32x4NotEqual, Operator::kCommutative, 2, 0, 1)                     \
+  V(CreateInt16x8, Operator::kNoProperties, 8, 0, 1)                       \
+  V(Int16x8ExtractLane, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int16x8ReplaceLane, Operator::kNoProperties, 3, 0, 1)                  \
+  V(Int16x8Neg, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Int16x8Add, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int16x8AddSaturate, Operator::kCommutative, 2, 0, 1)                   \
+  V(Int16x8Sub, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Int16x8SubSaturate, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int16x8Mul, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int16x8Min, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int16x8Max, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int16x8ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)            \
+  V(Int16x8ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)           \
+  V(Int16x8Equal, Operator::kCommutative, 2, 0, 1)                         \
+  V(Int16x8NotEqual, Operator::kCommutative, 2, 0, 1)                      \
+  V(Int16x8LessThan, Operator::kNoProperties, 2, 0, 1)                     \
+  V(Int16x8LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)              \
+  V(Int16x8GreaterThan, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int16x8GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)           \
+  V(Int16x8Select, Operator::kNoProperties, 3, 0, 1)                       \
+  V(Int16x8Swizzle, Operator::kNoProperties, 9, 0, 1)                      \
+  V(Int16x8Shuffle, Operator::kNoProperties, 10, 0, 1)                     \
+  V(Uint16x8AddSaturate, Operator::kCommutative, 2, 0, 1)                  \
+  V(Uint16x8SubSaturate, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Uint16x8Min, Operator::kCommutative, 2, 0, 1)                          \
+  V(Uint16x8Max, Operator::kCommutative, 2, 0, 1)                          \
+  V(Uint16x8ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)           \
+  V(Uint16x8ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)          \
+  V(Uint16x8LessThan, Operator::kNoProperties, 2, 0, 1)                    \
+  V(Uint16x8LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)             \
+  V(Uint16x8GreaterThan, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Uint16x8GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)          \
+  V(CreateBool16x8, Operator::kNoProperties, 8, 0, 1)                      \
+  V(Bool16x8ExtractLane, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Bool16x8ReplaceLane, Operator::kNoProperties, 3, 0, 1)                 \
+  V(Bool16x8And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Bool16x8Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Bool16x8Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Bool16x8Not, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Bool16x8AnyTrue, Operator::kNoProperties, 1, 0, 1)                     \
+  V(Bool16x8AllTrue, Operator::kNoProperties, 1, 0, 1)                     \
+  V(Bool16x8Swizzle, Operator::kNoProperties, 9, 0, 1)                     \
+  V(Bool16x8Shuffle, Operator::kNoProperties, 10, 0, 1)                    \
+  V(Bool16x8Equal, Operator::kCommutative, 2, 0, 1)                        \
+  V(Bool16x8NotEqual, Operator::kCommutative, 2, 0, 1)                     \
+  V(CreateInt8x16, Operator::kNoProperties, 16, 0, 1)                      \
+  V(Int8x16ExtractLane, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int8x16ReplaceLane, Operator::kNoProperties, 3, 0, 1)                  \
+  V(Int8x16Neg, Operator::kNoProperties, 1, 0, 1)                          \
+  V(Int8x16Add, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int8x16AddSaturate, Operator::kCommutative, 2, 0, 1)                   \
+  V(Int8x16Sub, Operator::kNoProperties, 2, 0, 1)                          \
+  V(Int8x16SubSaturate, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int8x16Mul, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int8x16Min, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int8x16Max, Operator::kCommutative, 2, 0, 1)                           \
+  V(Int8x16ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)            \
+  V(Int8x16ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)           \
+  V(Int8x16Equal, Operator::kCommutative, 2, 0, 1)                         \
+  V(Int8x16NotEqual, Operator::kCommutative, 2, 0, 1)                      \
+  V(Int8x16LessThan, Operator::kNoProperties, 2, 0, 1)                     \
+  V(Int8x16LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)              \
+  V(Int8x16GreaterThan, Operator::kNoProperties, 2, 0, 1)                  \
+  V(Int8x16GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)           \
+  V(Int8x16Select, Operator::kNoProperties, 3, 0, 1)                       \
+  V(Int8x16Swizzle, Operator::kNoProperties, 17, 0, 1)                     \
+  V(Int8x16Shuffle, Operator::kNoProperties, 18, 0, 1)                     \
+  V(Uint8x16AddSaturate, Operator::kCommutative, 2, 0, 1)                  \
+  V(Uint8x16SubSaturate, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Uint8x16Min, Operator::kCommutative, 2, 0, 1)                          \
+  V(Uint8x16Max, Operator::kCommutative, 2, 0, 1)                          \
+  V(Uint8x16ShiftLeftByScalar, Operator::kNoProperties, 2, 0, 1)           \
+  V(Uint8x16ShiftRightByScalar, Operator::kNoProperties, 2, 0, 1)          \
+  V(Uint8x16LessThan, Operator::kNoProperties, 2, 0, 1)                    \
+  V(Uint8x16LessThanOrEqual, Operator::kNoProperties, 2, 0, 1)             \
+  V(Uint8x16GreaterThan, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Uint8x16GreaterThanOrEqual, Operator::kNoProperties, 2, 0, 1)          \
+  V(CreateBool8x16, Operator::kNoProperties, 16, 0, 1)                     \
+  V(Bool8x16ExtractLane, Operator::kNoProperties, 2, 0, 1)                 \
+  V(Bool8x16ReplaceLane, Operator::kNoProperties, 3, 0, 1)                 \
+  V(Bool8x16And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Bool8x16Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Bool8x16Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \
+  V(Bool8x16Not, Operator::kNoProperties, 1, 0, 1)                         \
+  V(Bool8x16AnyTrue, Operator::kNoProperties, 1, 0, 1)                     \
+  V(Bool8x16AllTrue, Operator::kNoProperties, 1, 0, 1)                     \
+  V(Bool8x16Swizzle, Operator::kNoProperties, 17, 0, 1)                    \
+  V(Bool8x16Shuffle, Operator::kNoProperties, 18, 0, 1)                    \
+  V(Bool8x16Equal, Operator::kCommutative, 2, 0, 1)                        \
+  V(Bool8x16NotEqual, Operator::kCommutative, 2, 0, 1)                     \
+  V(Simd128Load, Operator::kNoProperties, 2, 0, 1)                         \
+  V(Simd128Load1, Operator::kNoProperties, 2, 0, 1)                        \
+  V(Simd128Load2, Operator::kNoProperties, 2, 0, 1)                        \
+  V(Simd128Load3, Operator::kNoProperties, 2, 0, 1)                        \
+  V(Simd128Store, Operator::kNoProperties, 3, 0, 1)                        \
+  V(Simd128Store1, Operator::kNoProperties, 3, 0, 1)                       \
+  V(Simd128Store2, Operator::kNoProperties, 3, 0, 1)                       \
+  V(Simd128Store3, Operator::kNoProperties, 3, 0, 1)                       \
+  V(Simd128And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
+  V(Simd128Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)   \
+  V(Simd128Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1)  \
   V(Simd128Not, Operator::kNoProperties, 1, 0, 1)
 
 #define PURE_OPTIONAL_OP_LIST(V)                            \
@@ -428,6 +431,8 @@
   V(Int64)                   \
   V(Uint64)                  \
   V(Pointer)                 \
+  V(TaggedSigned)            \
+  V(TaggedPointer)           \
   V(AnyTagged)
 
 #define MACHINE_REPRESENTATION_LIST(V) \
@@ -504,9 +509,18 @@
               Operator::kNoDeopt | Operator::kNoThrow | Operator::kNoWrite,  \
               "CheckedLoad", 3, 1, 1, 1, 1, 0, MachineType::Type()) {}       \
   };                                                                         \
+  struct ProtectedLoad##Type##Operator final                                 \
+      : public Operator1<ProtectedLoadRepresentation> {                      \
+    ProtectedLoad##Type##Operator()                                          \
+        : Operator1<ProtectedLoadRepresentation>(                            \
+              IrOpcode::kProtectedLoad,                                      \
+              Operator::kNoDeopt | Operator::kNoThrow | Operator::kNoWrite,  \
+              "ProtectedLoad", 4, 1, 1, 1, 1, 0, MachineType::Type()) {}     \
+  };                                                                         \
   Load##Type##Operator kLoad##Type;                                          \
   UnalignedLoad##Type##Operator kUnalignedLoad##Type;                        \
-  CheckedLoad##Type##Operator kCheckedLoad##Type;
+  CheckedLoad##Type##Operator kCheckedLoad##Type;                            \
+  ProtectedLoad##Type##Operator kProtectedLoad##Type;
   MACHINE_TYPE_LIST(LOAD)
 #undef LOAD
 
@@ -701,6 +715,17 @@
   return nullptr;
 }
 
+const Operator* MachineOperatorBuilder::ProtectedLoad(LoadRepresentation rep) {
+#define LOAD(Type)                       \
+  if (rep == MachineType::Type()) {      \
+    return &cache_.kProtectedLoad##Type; \
+  }
+  MACHINE_TYPE_LIST(LOAD)
+#undef LOAD
+  UNREACHABLE();
+  return nullptr;
+}
+
 const Operator* MachineOperatorBuilder::StackSlot(MachineRepresentation rep) {
 #define STACKSLOT(Type)                              \
   if (rep == MachineType::Type().representation()) { \
diff --git a/src/compiler/machine-operator.h b/src/compiler/machine-operator.h
index 611846a..56cefc5 100644
--- a/src/compiler/machine-operator.h
+++ b/src/compiler/machine-operator.h
@@ -41,6 +41,7 @@
 
 // A Load needs a MachineType.
 typedef MachineType LoadRepresentation;
+typedef LoadRepresentation ProtectedLoadRepresentation;
 
 LoadRepresentation LoadRepresentationOf(Operator const*);
 
@@ -276,9 +277,15 @@
   const Operator* Uint64LessThanOrEqual();
   const Operator* Uint64Mod();
 
+  // This operator reinterprets the bits of a tagged pointer as word.
+  const Operator* BitcastTaggedToWord();
+
   // This operator reinterprets the bits of a word as tagged pointer.
   const Operator* BitcastWordToTagged();
 
+  // This operator reinterprets the bits of a word as a Smi.
+  const Operator* BitcastWordToTaggedSigned();
+
   // JavaScript float64 to int32/uint32 truncation.
   const Operator* TruncateFloat64ToWord32();
 
@@ -302,16 +309,6 @@
   const Operator* ChangeUint32ToFloat64();
   const Operator* ChangeUint32ToUint64();
 
-  // These are changes from impossible values (for example a smi-checked
-  // string).  They can safely emit an abort instruction, which should
-  // never be reached.
-  const Operator* ImpossibleToWord32();
-  const Operator* ImpossibleToWord64();
-  const Operator* ImpossibleToFloat32();
-  const Operator* ImpossibleToFloat64();
-  const Operator* ImpossibleToTagged();
-  const Operator* ImpossibleToBit();
-
   // These operators truncate or round numbers, both changing the representation
   // of the number and mapping multiple input values onto the same output value.
   const Operator* TruncateFloat64ToFloat32();
@@ -611,6 +608,7 @@
 
   // load [base + index]
   const Operator* Load(LoadRepresentation rep);
+  const Operator* ProtectedLoad(LoadRepresentation rep);
 
   // store [base + index], value
   const Operator* Store(StoreRepresentation rep);
diff --git a/src/compiler/memory-optimizer.cc b/src/compiler/memory-optimizer.cc
index 97c4362..66fcbb9 100644
--- a/src/compiler/memory-optimizer.cc
+++ b/src/compiler/memory-optimizer.cc
@@ -107,7 +107,38 @@
   Node* size = node->InputAt(0);
   Node* effect = node->InputAt(1);
   Node* control = node->InputAt(2);
-  PretenureFlag pretenure = OpParameter<PretenureFlag>(node->op());
+  PretenureFlag pretenure = PretenureFlagOf(node->op());
+
+  // Propagate tenuring from outer allocations to inner allocations, i.e.
+  // when we allocate an object in old space and store a newly allocated
+  // child object into the pretenured object, then the newly allocated
+  // child object also should get pretenured to old space.
+  if (pretenure == TENURED) {
+    for (Edge const edge : node->use_edges()) {
+      Node* const user = edge.from();
+      if (user->opcode() == IrOpcode::kStoreField && edge.index() == 0) {
+        Node* const child = user->InputAt(1);
+        if (child->opcode() == IrOpcode::kAllocate &&
+            PretenureFlagOf(child->op()) == NOT_TENURED) {
+          NodeProperties::ChangeOp(child, node->op());
+          break;
+        }
+      }
+    }
+  } else {
+    DCHECK_EQ(NOT_TENURED, pretenure);
+    for (Edge const edge : node->use_edges()) {
+      Node* const user = edge.from();
+      if (user->opcode() == IrOpcode::kStoreField && edge.index() == 1) {
+        Node* const parent = user->InputAt(0);
+        if (parent->opcode() == IrOpcode::kAllocate &&
+            PretenureFlagOf(parent->op()) == TENURED) {
+          pretenure = TENURED;
+          break;
+        }
+      }
+    }
+  }
 
   // Determine the top/limit addresses.
   Node* top_address = jsgraph()->ExternalConstant(
@@ -122,9 +153,9 @@
   // Check if we can fold this allocation into a previous allocation represented
   // by the incoming {state}.
   Int32Matcher m(size);
-  if (m.HasValue() && m.Value() < Page::kMaxRegularHeapObjectSize) {
+  if (m.HasValue() && m.Value() < kMaxRegularHeapObjectSize) {
     int32_t const object_size = m.Value();
-    if (state->size() <= Page::kMaxRegularHeapObjectSize - object_size &&
+    if (state->size() <= kMaxRegularHeapObjectSize - object_size &&
         state->group()->pretenure() == pretenure) {
       // We can fold this Allocate {node} into the allocation {group}
       // represented by the given {state}. Compute the upper bound for
@@ -282,8 +313,9 @@
 
     control = graph()->NewNode(common()->Merge(2), if_true, if_false);
     effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
-    value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
-                             vtrue, vfalse, control);
+    value = graph()->NewNode(
+        common()->Phi(MachineRepresentation::kTaggedPointer, 2), vtrue, vfalse,
+        control);
 
     // Create an unfoldable allocation group.
     AllocationGroup* group =
diff --git a/src/compiler/memory-optimizer.h b/src/compiler/memory-optimizer.h
index f0cd546..ba1d6dd 100644
--- a/src/compiler/memory-optimizer.h
+++ b/src/compiler/memory-optimizer.h
@@ -5,7 +5,7 @@
 #ifndef V8_COMPILER_MEMORY_OPTIMIZER_H_
 #define V8_COMPILER_MEMORY_OPTIMIZER_H_
 
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/mips/code-generator-mips.cc b/src/compiler/mips/code-generator-mips.cc
index d06bc30..12ab4af 100644
--- a/src/compiler/mips/code-generator-mips.cc
+++ b/src/compiler/mips/code-generator-mips.cc
@@ -2,8 +2,8 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include "src/ast/scopes.h"
 #include "src/compiler/code-generator.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -693,9 +693,6 @@
     case kArchDebugBreak:
       __ stop("kArchDebugBreak");
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchComment: {
       Address comment_string = i.InputExternalReference(0).address();
       __ RecordComment(reinterpret_cast<const char*>(comment_string));
@@ -710,8 +707,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -1121,6 +1118,38 @@
       __ sub_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
                i.InputDoubleRegister(1));
       break;
+    case kMipsMaddS:
+      __ madd_s(i.OutputFloatRegister(), i.InputFloatRegister(0),
+                i.InputFloatRegister(1), i.InputFloatRegister(2));
+      break;
+    case kMipsMaddD:
+      __ madd_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
+                i.InputDoubleRegister(1), i.InputDoubleRegister(2));
+      break;
+    case kMipsMaddfS:
+      __ maddf_s(i.OutputFloatRegister(), i.InputFloatRegister(1),
+                 i.InputFloatRegister(2));
+      break;
+    case kMipsMaddfD:
+      __ maddf_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
+                 i.InputDoubleRegister(2));
+      break;
+    case kMipsMsubS:
+      __ msub_s(i.OutputFloatRegister(), i.InputFloatRegister(0),
+                i.InputFloatRegister(1), i.InputFloatRegister(2));
+      break;
+    case kMipsMsubD:
+      __ msub_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
+                i.InputDoubleRegister(1), i.InputDoubleRegister(2));
+      break;
+    case kMipsMsubfS:
+      __ msubf_s(i.OutputFloatRegister(), i.InputFloatRegister(1),
+                 i.InputFloatRegister(2));
+      break;
+    case kMipsMsubfD:
+      __ msubf_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
+                 i.InputDoubleRegister(2));
+      break;
     case kMipsMulD:
       // TODO(plind): add special case: right op is -1.0, see arm port.
       __ mul_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
@@ -1358,7 +1387,12 @@
       break;
 
     // ... more basic instructions ...
-
+    case kMipsSeb:
+      __ seb(i.OutputRegister(), i.InputRegister(0));
+      break;
+    case kMipsSeh:
+      __ seh(i.OutputRegister(), i.InputRegister(0));
+      break;
     case kMipsLbu:
       __ lbu(i.OutputRegister(), i.MemoryOperand());
       break;
@@ -1843,13 +1877,14 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
@@ -2028,9 +2063,14 @@
     } else if (src.type() == Constant::kFloat32) {
       if (destination->IsFPStackSlot()) {
         MemOperand dst = g.ToMemOperand(destination);
-        __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32())));
-        __ sw(at, dst);
+        if (bit_cast<int32_t>(src.ToFloat32()) == 0) {
+          __ sw(zero_reg, dst);
+        } else {
+          __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32())));
+          __ sw(at, dst);
+        }
       } else {
+        DCHECK(destination->IsFPRegister());
         FloatRegister dst = g.ToSingleRegister(destination);
         __ Move(dst, src.ToFloat32());
       }
diff --git a/src/compiler/mips/instruction-codes-mips.h b/src/compiler/mips/instruction-codes-mips.h
index 269ac0f..45ed041 100644
--- a/src/compiler/mips/instruction-codes-mips.h
+++ b/src/compiler/mips/instruction-codes-mips.h
@@ -69,6 +69,14 @@
   V(MipsAddPair)                   \
   V(MipsSubPair)                   \
   V(MipsMulPair)                   \
+  V(MipsMaddS)                     \
+  V(MipsMaddD)                     \
+  V(MipsMaddfS)                    \
+  V(MipsMaddfD)                    \
+  V(MipsMsubS)                     \
+  V(MipsMsubD)                     \
+  V(MipsMsubfS)                    \
+  V(MipsMsubfD)                    \
   V(MipsFloat32RoundDown)          \
   V(MipsFloat32RoundTruncate)      \
   V(MipsFloat32RoundUp)            \
@@ -126,7 +134,9 @@
   V(MipsPush)                      \
   V(MipsStoreToStackSlot)          \
   V(MipsByteSwap32)                \
-  V(MipsStackClaim)
+  V(MipsStackClaim)                \
+  V(MipsSeb)                       \
+  V(MipsSeh)
 
 // Addressing modes represent the "shape" of inputs to an instruction.
 // Many instructions support multiple addressing modes. Addressing modes
diff --git a/src/compiler/mips/instruction-selector-mips.cc b/src/compiler/mips/instruction-selector-mips.cc
index 4c35369..0a98930 100644
--- a/src/compiler/mips/instruction-selector-mips.cc
+++ b/src/compiler/mips/instruction-selector-mips.cc
@@ -185,6 +185,10 @@
   }
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   MipsOperandGenerator g(this);
@@ -198,7 +202,7 @@
 
   // TODO(mips): I guess this could be done in a better way.
   if (write_barrier_kind != kNoWriteBarrier) {
-    DCHECK_EQ(MachineRepresentation::kTagged, rep);
+    DCHECK(CanBeTaggedPointer(rep));
     InstructionOperand inputs[3];
     size_t input_count = 0;
     inputs[input_count++] = g.UseUniqueRegister(base);
@@ -403,6 +407,24 @@
 
 
 void InstructionSelector::VisitWord32Sar(Node* node) {
+  Int32BinopMatcher m(node);
+  if (m.left().IsWord32Shl() && CanCover(node, m.left().node())) {
+    Int32BinopMatcher mleft(m.left().node());
+    if (m.right().HasValue() && mleft.right().HasValue()) {
+      MipsOperandGenerator g(this);
+      uint32_t sar = m.right().Value();
+      uint32_t shl = mleft.right().Value();
+      if ((sar == shl) && (sar == 16)) {
+        Emit(kMipsSeh, g.DefineAsRegister(node),
+             g.UseRegister(mleft.left().node()));
+        return;
+      } else if ((sar == shl) && (sar == 24)) {
+        Emit(kMipsSeb, g.DefineAsRegister(node),
+             g.UseRegister(mleft.left().node()));
+        return;
+      }
+    }
+  }
   VisitRRO(this, kMipsSar, node);
 }
 
@@ -759,20 +781,126 @@
 
 
 void InstructionSelector::VisitFloat32Add(Node* node) {
+  MipsOperandGenerator g(this);
+  Float32BinopMatcher m(node);
+  if (m.left().IsFloat32Mul() && CanCover(node, m.left().node())) {
+    // For Add.S(Mul.S(x, y), z):
+    Float32BinopMatcher mleft(m.left().node());
+    if (IsMipsArchVariant(kMips32r2)) {  // Select Madd.S(z, x, y).
+      Emit(kMipsMaddS, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    } else if (IsMipsArchVariant(kMips32r6)) {  // Select Maddf.S(z, x, y).
+      Emit(kMipsMaddfS, g.DefineSameAsFirst(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  }
+  if (m.right().IsFloat32Mul() && CanCover(node, m.right().node())) {
+    // For Add.S(x, Mul.S(y, z)):
+    Float32BinopMatcher mright(m.right().node());
+    if (IsMipsArchVariant(kMips32r2)) {  // Select Madd.S(x, y, z).
+      Emit(kMipsMaddS, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
+           g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    } else if (IsMipsArchVariant(kMips32r6)) {  // Select Maddf.S(x, y, z).
+      Emit(kMipsMaddfS, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMipsAddS, node);
 }
 
 
 void InstructionSelector::VisitFloat64Add(Node* node) {
+  MipsOperandGenerator g(this);
+  Float64BinopMatcher m(node);
+  if (m.left().IsFloat64Mul() && CanCover(node, m.left().node())) {
+    // For Add.D(Mul.D(x, y), z):
+    Float64BinopMatcher mleft(m.left().node());
+    if (IsMipsArchVariant(kMips32r2)) {  // Select Madd.D(z, x, y).
+      Emit(kMipsMaddD, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    } else if (IsMipsArchVariant(kMips32r6)) {  // Select Maddf.D(z, x, y).
+      Emit(kMipsMaddfD, g.DefineSameAsFirst(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  }
+  if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
+    // For Add.D(x, Mul.D(y, z)):
+    Float64BinopMatcher mright(m.right().node());
+    if (IsMipsArchVariant(kMips32r2)) {  // Select Madd.D(x, y, z).
+      Emit(kMipsMaddD, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
+           g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    } else if (IsMipsArchVariant(kMips32r6)) {  // Select Maddf.D(x, y, z).
+      Emit(kMipsMaddfD, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMipsAddD, node);
 }
 
 
 void InstructionSelector::VisitFloat32Sub(Node* node) {
+  MipsOperandGenerator g(this);
+  Float32BinopMatcher m(node);
+  if (m.left().IsFloat32Mul() && CanCover(node, m.left().node())) {
+    if (IsMipsArchVariant(kMips32r2)) {
+      // For Sub.S(Mul.S(x,y), z) select Msub.S(z, x, y).
+      Float32BinopMatcher mleft(m.left().node());
+      Emit(kMipsMsubS, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  } else if (m.right().IsFloat32Mul() && CanCover(node, m.right().node())) {
+    if (IsMipsArchVariant(kMips32r6)) {
+      // For Sub.S(x,Mul.S(y,z)) select Msubf.S(x, y, z).
+      Float32BinopMatcher mright(m.right().node());
+      Emit(kMipsMsubfS, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMipsSubS, node);
 }
 
 void InstructionSelector::VisitFloat64Sub(Node* node) {
+  MipsOperandGenerator g(this);
+  Float64BinopMatcher m(node);
+  if (m.left().IsFloat64Mul() && CanCover(node, m.left().node())) {
+    if (IsMipsArchVariant(kMips32r2)) {
+      // For Sub.D(Mul.S(x,y), z) select Msub.D(z, x, y).
+      Float64BinopMatcher mleft(m.left().node());
+      Emit(kMipsMsubD, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  } else if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
+    if (IsMipsArchVariant(kMips32r6)) {
+      // For Sub.D(x,Mul.S(y,z)) select Msubf.D(x, y, z).
+      Float64BinopMatcher mright(m.right().node());
+      Emit(kMipsMsubfD, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMipsSubD, node);
 }
 
diff --git a/src/compiler/mips64/code-generator-mips64.cc b/src/compiler/mips64/code-generator-mips64.cc
index 3e2e8e2..9ed72ae 100644
--- a/src/compiler/mips64/code-generator-mips64.cc
+++ b/src/compiler/mips64/code-generator-mips64.cc
@@ -2,8 +2,8 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-#include "src/ast/scopes.h"
 #include "src/compiler/code-generator.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -702,9 +702,6 @@
     case kArchDebugBreak:
       __ stop("kArchDebugBreak");
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchComment: {
       Address comment_string = i.InputExternalReference(0).address();
       __ RecordComment(reinterpret_cast<const char*>(comment_string));
@@ -719,8 +716,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -1317,6 +1314,38 @@
       __ sub_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
                i.InputDoubleRegister(1));
       break;
+    case kMips64MaddS:
+      __ madd_s(i.OutputFloatRegister(), i.InputFloatRegister(0),
+                i.InputFloatRegister(1), i.InputFloatRegister(2));
+      break;
+    case kMips64MaddD:
+      __ madd_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
+                i.InputDoubleRegister(1), i.InputDoubleRegister(2));
+      break;
+    case kMips64MaddfS:
+      __ maddf_s(i.OutputFloatRegister(), i.InputFloatRegister(1),
+                 i.InputFloatRegister(2));
+      break;
+    case kMips64MaddfD:
+      __ maddf_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
+                 i.InputDoubleRegister(2));
+      break;
+    case kMips64MsubS:
+      __ msub_s(i.OutputFloatRegister(), i.InputFloatRegister(0),
+                i.InputFloatRegister(1), i.InputFloatRegister(2));
+      break;
+    case kMips64MsubD:
+      __ msub_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
+                i.InputDoubleRegister(1), i.InputDoubleRegister(2));
+      break;
+    case kMips64MsubfS:
+      __ msubf_s(i.OutputFloatRegister(), i.InputFloatRegister(1),
+                 i.InputFloatRegister(2));
+      break;
+    case kMips64MsubfD:
+      __ msubf_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
+                 i.InputDoubleRegister(2));
+      break;
     case kMips64MulD:
       // TODO(plind): add special case: right op is -1.0, see arm port.
       __ mul_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
@@ -1644,6 +1673,12 @@
       break;
     // ... more basic instructions ...
 
+    case kMips64Seb:
+      __ seb(i.OutputRegister(), i.InputRegister(0));
+      break;
+    case kMips64Seh:
+      __ seh(i.OutputRegister(), i.InputRegister(0));
+      break;
     case kMips64Lbu:
       __ lbu(i.OutputRegister(), i.MemoryOperand());
       break;
@@ -2164,13 +2199,14 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
@@ -2350,9 +2386,14 @@
     } else if (src.type() == Constant::kFloat32) {
       if (destination->IsFPStackSlot()) {
         MemOperand dst = g.ToMemOperand(destination);
-        __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32())));
-        __ sw(at, dst);
+        if (bit_cast<int32_t>(src.ToFloat32()) == 0) {
+          __ sw(zero_reg, dst);
+        } else {
+          __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32())));
+          __ sw(at, dst);
+        }
       } else {
+        DCHECK(destination->IsFPRegister());
         FloatRegister dst = g.ToSingleRegister(destination);
         __ Move(dst, src.ToFloat32());
       }
diff --git a/src/compiler/mips64/instruction-codes-mips64.h b/src/compiler/mips64/instruction-codes-mips64.h
index e3dedd1..6a44434 100644
--- a/src/compiler/mips64/instruction-codes-mips64.h
+++ b/src/compiler/mips64/instruction-codes-mips64.h
@@ -85,6 +85,14 @@
   V(Mips64SqrtD)                    \
   V(Mips64MaxD)                     \
   V(Mips64MinD)                     \
+  V(Mips64MaddS)                    \
+  V(Mips64MaddD)                    \
+  V(Mips64MaddfS)                   \
+  V(Mips64MaddfD)                   \
+  V(Mips64MsubS)                    \
+  V(Mips64MsubD)                    \
+  V(Mips64MsubfS)                   \
+  V(Mips64MsubfD)                   \
   V(Mips64Float64RoundDown)         \
   V(Mips64Float64RoundTruncate)     \
   V(Mips64Float64RoundUp)           \
@@ -159,7 +167,9 @@
   V(Mips64StoreToStackSlot)         \
   V(Mips64ByteSwap64)               \
   V(Mips64ByteSwap32)               \
-  V(Mips64StackClaim)
+  V(Mips64StackClaim)               \
+  V(Mips64Seb)                      \
+  V(Mips64Seh)
 
 // Addressing modes represent the "shape" of inputs to an instruction.
 // Many instructions support multiple addressing modes. Addressing modes
diff --git a/src/compiler/mips64/instruction-selector-mips64.cc b/src/compiler/mips64/instruction-selector-mips64.cc
index 1167117..6e937e2 100644
--- a/src/compiler/mips64/instruction-selector-mips64.cc
+++ b/src/compiler/mips64/instruction-selector-mips64.cc
@@ -199,6 +199,10 @@
   EmitLoad(this, node, opcode);
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   Mips64OperandGenerator g(this);
@@ -212,7 +216,7 @@
 
   // TODO(mips): I guess this could be done in a better way.
   if (write_barrier_kind != kNoWriteBarrier) {
-    DCHECK_EQ(MachineRepresentation::kTagged, rep);
+    DCHECK(CanBeTaggedPointer(rep));
     InstructionOperand inputs[3];
     size_t input_count = 0;
     inputs[input_count++] = g.UseUniqueRegister(base);
@@ -500,6 +504,28 @@
 
 
 void InstructionSelector::VisitWord32Sar(Node* node) {
+  Int32BinopMatcher m(node);
+  if (m.left().IsWord32Shl() && CanCover(node, m.left().node())) {
+    Int32BinopMatcher mleft(m.left().node());
+    if (m.right().HasValue() && mleft.right().HasValue()) {
+      Mips64OperandGenerator g(this);
+      uint32_t sar = m.right().Value();
+      uint32_t shl = mleft.right().Value();
+      if ((sar == shl) && (sar == 16)) {
+        Emit(kMips64Seh, g.DefineAsRegister(node),
+             g.UseRegister(mleft.left().node()));
+        return;
+      } else if ((sar == shl) && (sar == 24)) {
+        Emit(kMips64Seb, g.DefineAsRegister(node),
+             g.UseRegister(mleft.left().node()));
+        return;
+      } else if ((sar == shl) && (sar == 32)) {
+        Emit(kMips64Shl, g.DefineAsRegister(node),
+             g.UseRegister(mleft.left().node()), g.TempImmediate(0));
+        return;
+      }
+    }
+  }
   VisitRRO(this, kMips64Sar, node);
 }
 
@@ -1198,20 +1224,126 @@
 
 
 void InstructionSelector::VisitFloat32Add(Node* node) {
+  Mips64OperandGenerator g(this);
+  Float32BinopMatcher m(node);
+  if (m.left().IsFloat32Mul() && CanCover(node, m.left().node())) {
+    // For Add.S(Mul.S(x, y), z):
+    Float32BinopMatcher mleft(m.left().node());
+    if (kArchVariant == kMips64r2) {  // Select Madd.S(z, x, y).
+      Emit(kMips64MaddS, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    } else if (kArchVariant == kMips64r6) {  // Select Maddf.S(z, x, y).
+      Emit(kMips64MaddfS, g.DefineSameAsFirst(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  }
+  if (m.right().IsFloat32Mul() && CanCover(node, m.right().node())) {
+    // For Add.S(x, Mul.S(y, z)):
+    Float32BinopMatcher mright(m.right().node());
+    if (kArchVariant == kMips64r2) {  // Select Madd.S(x, y, z).
+      Emit(kMips64MaddS, g.DefineAsRegister(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    } else if (kArchVariant == kMips64r6) {  // Select Maddf.S(x, y, z).
+      Emit(kMips64MaddfS, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMips64AddS, node);
 }
 
 
 void InstructionSelector::VisitFloat64Add(Node* node) {
+  Mips64OperandGenerator g(this);
+  Float64BinopMatcher m(node);
+  if (m.left().IsFloat64Mul() && CanCover(node, m.left().node())) {
+    // For Add.D(Mul.D(x, y), z):
+    Float64BinopMatcher mleft(m.left().node());
+    if (kArchVariant == kMips64r2) {  // Select Madd.D(z, x, y).
+      Emit(kMips64MaddD, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    } else if (kArchVariant == kMips64r6) {  // Select Maddf.D(z, x, y).
+      Emit(kMips64MaddfD, g.DefineSameAsFirst(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  }
+  if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
+    // For Add.D(x, Mul.D(y, z)):
+    Float64BinopMatcher mright(m.right().node());
+    if (kArchVariant == kMips64r2) {  // Select Madd.D(x, y, z).
+      Emit(kMips64MaddD, g.DefineAsRegister(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    } else if (kArchVariant == kMips64r6) {  // Select Maddf.D(x, y, z).
+      Emit(kMips64MaddfD, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMips64AddD, node);
 }
 
 
 void InstructionSelector::VisitFloat32Sub(Node* node) {
+  Mips64OperandGenerator g(this);
+  Float32BinopMatcher m(node);
+  if (m.left().IsFloat32Mul() && CanCover(node, m.left().node())) {
+    if (kArchVariant == kMips64r2) {
+      // For Sub.S(Mul.S(x,y), z) select Msub.S(z, x, y).
+      Float32BinopMatcher mleft(m.left().node());
+      Emit(kMips64MsubS, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  } else if (m.right().IsFloat32Mul() && CanCover(node, m.right().node())) {
+    if (kArchVariant == kMips64r6) {
+      // For Sub.S(x,Mul.S(y,z)) select Msubf.S(x, y, z).
+      Float32BinopMatcher mright(m.right().node());
+      Emit(kMips64MsubfS, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMips64SubS, node);
 }
 
 void InstructionSelector::VisitFloat64Sub(Node* node) {
+  Mips64OperandGenerator g(this);
+  Float64BinopMatcher m(node);
+  if (m.left().IsFloat64Mul() && CanCover(node, m.left().node())) {
+    if (kArchVariant == kMips64r2) {
+      // For Sub.D(Mul.S(x,y), z) select Msub.D(z, x, y).
+      Float64BinopMatcher mleft(m.left().node());
+      Emit(kMips64MsubD, g.DefineAsRegister(node),
+           g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
+           g.UseRegister(mleft.right().node()));
+      return;
+    }
+  } else if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
+    if (kArchVariant == kMips64r6) {
+      // For Sub.D(x,Mul.S(y,z)) select Msubf.D(x, y, z).
+      Float64BinopMatcher mright(m.right().node());
+      Emit(kMips64MsubfD, g.DefineSameAsFirst(node),
+           g.UseRegister(m.left().node()), g.UseRegister(mright.left().node()),
+           g.UseRegister(mright.right().node()));
+      return;
+    }
+  }
   VisitRRR(this, kMips64SubD, node);
 }
 
diff --git a/src/compiler/move-optimizer.cc b/src/compiler/move-optimizer.cc
index 482c254..d87ece3 100644
--- a/src/compiler/move-optimizer.cc
+++ b/src/compiler/move-optimizer.cc
@@ -424,7 +424,7 @@
 namespace {
 
 bool IsSlot(const InstructionOperand& op) {
-  return op.IsStackSlot() || op.IsDoubleStackSlot();
+  return op.IsStackSlot() || op.IsFPStackSlot();
 }
 
 
diff --git a/src/compiler/move-optimizer.h b/src/compiler/move-optimizer.h
index 8e932a0..ce26a7f 100644
--- a/src/compiler/move-optimizer.h
+++ b/src/compiler/move-optimizer.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_MOVE_OPTIMIZER_
 
 #include "src/compiler/instruction.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/node-aux-data.h b/src/compiler/node-aux-data.h
index 7a88292..b50ff38 100644
--- a/src/compiler/node-aux-data.h
+++ b/src/compiler/node-aux-data.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_NODE_AUX_DATA_H_
 
 #include "src/compiler/node.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/node-cache.cc b/src/compiler/node-cache.cc
index 061a3ae..0be6f81 100644
--- a/src/compiler/node-cache.cc
+++ b/src/compiler/node-cache.cc
@@ -6,8 +6,8 @@
 
 #include <cstring>
 
-#include "src/zone.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/node-matchers.h b/src/compiler/node-matchers.h
index 10aed51..6c283dc 100644
--- a/src/compiler/node-matchers.h
+++ b/src/compiler/node-matchers.h
@@ -11,6 +11,7 @@
 #include "src/assembler.h"
 #include "src/compiler/node.h"
 #include "src/compiler/operator.h"
+#include "src/double.h"
 
 namespace v8 {
 namespace internal {
@@ -161,6 +162,17 @@
   bool IsNegative() const { return this->HasValue() && this->Value() < 0.0; }
   bool IsNaN() const { return this->HasValue() && std::isnan(this->Value()); }
   bool IsZero() const { return this->Is(0.0) && !std::signbit(this->Value()); }
+  bool IsNormal() const {
+    return this->HasValue() && std::isnormal(this->Value());
+  }
+  bool IsPositiveOrNegativePowerOf2() const {
+    if (!this->HasValue() || (this->Value() == 0.0)) {
+      return false;
+    }
+    Double value = Double(this->Value());
+    return !value.IsInfinite() &&
+           base::bits::IsPowerOfTwo64(value.Significand());
+  }
 };
 
 typedef FloatMatcher<float, IrOpcode::kFloat32Constant> Float32Matcher;
diff --git a/src/compiler/node-properties.h b/src/compiler/node-properties.h
index 9812158..ed3c117 100644
--- a/src/compiler/node-properties.h
+++ b/src/compiler/node-properties.h
@@ -6,7 +6,7 @@
 #define V8_COMPILER_NODE_PROPERTIES_H_
 
 #include "src/compiler/node.h"
-#include "src/types.h"
+#include "src/compiler/types.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/node.h b/src/compiler/node.h
index 4935187..e940371 100644
--- a/src/compiler/node.h
+++ b/src/compiler/node.h
@@ -7,8 +7,8 @@
 
 #include "src/compiler/opcodes.h"
 #include "src/compiler/operator.h"
-#include "src/types.h"
-#include "src/zone-containers.h"
+#include "src/compiler/types.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/opcodes.h b/src/compiler/opcodes.h
index c1b5945..5ac2012 100644
--- a/src/compiler/opcodes.h
+++ b/src/compiler/opcodes.h
@@ -134,7 +134,8 @@
   V(JSStoreGlobal)            \
   V(JSDeleteProperty)         \
   V(JSHasProperty)            \
-  V(JSInstanceOf)
+  V(JSInstanceOf)             \
+  V(JSOrdinaryHasInstance)
 
 #define JS_CONTEXT_OP_LIST(V) \
   V(JSLoadContext)            \
@@ -150,10 +151,8 @@
   V(JSCallFunction)                 \
   V(JSCallRuntime)                  \
   V(JSConvertReceiver)              \
-  V(JSForInDone)                    \
   V(JSForInNext)                    \
   V(JSForInPrepare)                 \
-  V(JSForInStep)                    \
   V(JSLoadMessage)                  \
   V(JSStoreMessage)                 \
   V(JSGeneratorStore)               \
@@ -181,7 +180,8 @@
   V(ChangeTaggedToBit)               \
   V(ChangeBitToTagged)               \
   V(TruncateTaggedToWord32)          \
-  V(TruncateTaggedToFloat64)
+  V(TruncateTaggedToFloat64)         \
+  V(TruncateTaggedToBit)
 
 #define SIMPLIFIED_CHECKED_OP_LIST(V) \
   V(CheckedInt32Add)                  \
@@ -191,12 +191,15 @@
   V(CheckedUint32Div)                 \
   V(CheckedUint32Mod)                 \
   V(CheckedInt32Mul)                  \
+  V(CheckedInt32ToTaggedSigned)       \
   V(CheckedUint32ToInt32)             \
+  V(CheckedUint32ToTaggedSigned)      \
   V(CheckedFloat64ToInt32)            \
   V(CheckedTaggedSignedToInt32)       \
   V(CheckedTaggedToInt32)             \
   V(CheckedTruncateTaggedToWord32)    \
-  V(CheckedTaggedToFloat64)
+  V(CheckedTaggedToFloat64)           \
+  V(CheckedTaggedToTaggedSigned)
 
 #define SIMPLIFIED_COMPARE_BINOP_LIST(V) \
   V(NumberEqual)                         \
@@ -270,6 +273,7 @@
   V(NumberTan)                         \
   V(NumberTanh)                        \
   V(NumberTrunc)                       \
+  V(NumberToBoolean)                   \
   V(NumberToInt32)                     \
   V(NumberToUint32)                    \
   V(NumberSilenceNaN)
@@ -281,13 +285,14 @@
   V(BooleanNot)                     \
   V(StringCharCodeAt)               \
   V(StringFromCharCode)             \
+  V(StringFromCodePoint)            \
   V(CheckBounds)                    \
   V(CheckIf)                        \
   V(CheckMaps)                      \
   V(CheckNumber)                    \
   V(CheckString)                    \
-  V(CheckTaggedPointer)             \
-  V(CheckTaggedSigned)              \
+  V(CheckSmi)                       \
+  V(CheckHeapObject)                \
   V(CheckFloat64Hole)               \
   V(CheckTaggedHole)                \
   V(ConvertTaggedHoleToUndefined)   \
@@ -306,6 +311,7 @@
   V(ObjectIsSmi)                    \
   V(ObjectIsString)                 \
   V(ObjectIsUndetectable)           \
+  V(ArrayBufferWasNeutered)         \
   V(EnsureWritableFastElements)     \
   V(MaybeGrowFastElements)          \
   V(TransitionElementsKind)
@@ -338,59 +344,131 @@
   V(Float64LessThan)                  \
   V(Float64LessThanOrEqual)
 
+#define MACHINE_UNOP_32_LIST(V) \
+  V(Word32Clz)                  \
+  V(Word32Ctz)                  \
+  V(Word32ReverseBits)          \
+  V(Word32ReverseBytes)
+
+#define MACHINE_BINOP_32_LIST(V) \
+  V(Word32And)                   \
+  V(Word32Or)                    \
+  V(Word32Xor)                   \
+  V(Word32Shl)                   \
+  V(Word32Shr)                   \
+  V(Word32Sar)                   \
+  V(Word32Ror)                   \
+  V(Int32Add)                    \
+  V(Int32AddWithOverflow)        \
+  V(Int32Sub)                    \
+  V(Int32SubWithOverflow)        \
+  V(Int32Mul)                    \
+  V(Int32MulWithOverflow)        \
+  V(Int32MulHigh)                \
+  V(Int32Div)                    \
+  V(Int32Mod)                    \
+  V(Uint32Div)                   \
+  V(Uint32Mod)                   \
+  V(Uint32MulHigh)
+
+#define MACHINE_BINOP_64_LIST(V) \
+  V(Word64And)                   \
+  V(Word64Or)                    \
+  V(Word64Xor)                   \
+  V(Word64Shl)                   \
+  V(Word64Shr)                   \
+  V(Word64Sar)                   \
+  V(Word64Ror)                   \
+  V(Int64Add)                    \
+  V(Int64AddWithOverflow)        \
+  V(Int64Sub)                    \
+  V(Int64SubWithOverflow)        \
+  V(Int64Mul)                    \
+  V(Int64Div)                    \
+  V(Int64Mod)                    \
+  V(Uint64Div)                   \
+  V(Uint64Mod)
+
+#define MACHINE_FLOAT32_UNOP_LIST(V) \
+  V(Float32Abs)                      \
+  V(Float32Neg)                      \
+  V(Float32RoundDown)                \
+  V(Float32RoundTiesEven)            \
+  V(Float32RoundTruncate)            \
+  V(Float32RoundUp)                  \
+  V(Float32Sqrt)
+
+#define MACHINE_FLOAT32_BINOP_LIST(V) \
+  V(Float32Add)                       \
+  V(Float32Sub)                       \
+  V(Float32Mul)                       \
+  V(Float32Div)                       \
+  V(Float32Max)                       \
+  V(Float32Min)
+
+#define MACHINE_FLOAT64_UNOP_LIST(V) \
+  V(Float64Abs)                      \
+  V(Float64Acos)                     \
+  V(Float64Acosh)                    \
+  V(Float64Asin)                     \
+  V(Float64Asinh)                    \
+  V(Float64Atan)                     \
+  V(Float64Atanh)                    \
+  V(Float64Cbrt)                     \
+  V(Float64Cos)                      \
+  V(Float64Cosh)                     \
+  V(Float64Exp)                      \
+  V(Float64Expm1)                    \
+  V(Float64Log)                      \
+  V(Float64Log1p)                    \
+  V(Float64Log10)                    \
+  V(Float64Log2)                     \
+  V(Float64Neg)                      \
+  V(Float64RoundDown)                \
+  V(Float64RoundTiesAway)            \
+  V(Float64RoundTiesEven)            \
+  V(Float64RoundTruncate)            \
+  V(Float64RoundUp)                  \
+  V(Float64Sin)                      \
+  V(Float64Sinh)                     \
+  V(Float64Sqrt)                     \
+  V(Float64Tan)                      \
+  V(Float64Tanh)
+
+#define MACHINE_FLOAT64_BINOP_LIST(V) \
+  V(Float64Atan2)                     \
+  V(Float64Max)                       \
+  V(Float64Min)                       \
+  V(Float64Add)                       \
+  V(Float64Sub)                       \
+  V(Float64Mul)                       \
+  V(Float64Div)                       \
+  V(Float64Mod)                       \
+  V(Float64Pow)
+
 #define MACHINE_OP_LIST(V)      \
+  MACHINE_UNOP_32_LIST(V)       \
+  MACHINE_BINOP_32_LIST(V)      \
+  MACHINE_BINOP_64_LIST(V)      \
   MACHINE_COMPARE_BINOP_LIST(V) \
+  MACHINE_FLOAT32_BINOP_LIST(V) \
+  MACHINE_FLOAT32_UNOP_LIST(V)  \
+  MACHINE_FLOAT64_BINOP_LIST(V) \
+  MACHINE_FLOAT64_UNOP_LIST(V)  \
   V(DebugBreak)                 \
   V(Comment)                    \
   V(Load)                       \
   V(Store)                      \
   V(StackSlot)                  \
-  V(Word32And)                  \
-  V(Word32Or)                   \
-  V(Word32Xor)                  \
-  V(Word32Shl)                  \
-  V(Word32Shr)                  \
-  V(Word32Sar)                  \
-  V(Word32Ror)                  \
-  V(Word32Clz)                  \
-  V(Word32Ctz)                  \
-  V(Word32ReverseBits)          \
-  V(Word32ReverseBytes)         \
   V(Word32Popcnt)               \
   V(Word64Popcnt)               \
-  V(Word64And)                  \
-  V(Word64Or)                   \
-  V(Word64Xor)                  \
-  V(Word64Shl)                  \
-  V(Word64Shr)                  \
-  V(Word64Sar)                  \
-  V(Word64Ror)                  \
   V(Word64Clz)                  \
   V(Word64Ctz)                  \
   V(Word64ReverseBits)          \
   V(Word64ReverseBytes)         \
-  V(Int32Add)                   \
-  V(Int32AddWithOverflow)       \
-  V(Int32Sub)                   \
-  V(Int32SubWithOverflow)       \
-  V(Int32Mul)                   \
-  V(Int32MulWithOverflow)       \
-  V(Int32MulHigh)               \
-  V(Int32Div)                   \
-  V(Int32Mod)                   \
-  V(Uint32Div)                  \
-  V(Uint32Mod)                  \
-  V(Uint32MulHigh)              \
-  V(Int64Add)                   \
-  V(Int64AddWithOverflow)       \
-  V(Int64Sub)                   \
-  V(Int64SubWithOverflow)       \
-  V(Int64Mul)                   \
-  V(Int64Div)                   \
-  V(Int64Mod)                   \
-  V(Uint64Div)                  \
-  V(Uint64Mod)                  \
+  V(BitcastTaggedToWord)        \
   V(BitcastWordToTagged)        \
+  V(BitcastWordToTaggedSigned)  \
   V(TruncateFloat64ToWord32)    \
   V(ChangeFloat32ToFloat64)     \
   V(ChangeFloat64ToInt32)       \
@@ -407,12 +485,6 @@
   V(ChangeInt32ToInt64)         \
   V(ChangeUint32ToFloat64)      \
   V(ChangeUint32ToUint64)       \
-  V(ImpossibleToBit)            \
-  V(ImpossibleToWord32)         \
-  V(ImpossibleToWord64)         \
-  V(ImpossibleToFloat32)        \
-  V(ImpossibleToFloat64)        \
-  V(ImpossibleToTagged)         \
   V(TruncateFloat64ToFloat32)   \
   V(TruncateInt64ToInt32)       \
   V(RoundFloat64ToInt32)        \
@@ -426,55 +498,6 @@
   V(BitcastFloat64ToInt64)      \
   V(BitcastInt32ToFloat32)      \
   V(BitcastInt64ToFloat64)      \
-  V(Float32Add)                 \
-  V(Float32Sub)                 \
-  V(Float32Neg)                 \
-  V(Float32Mul)                 \
-  V(Float32Div)                 \
-  V(Float32Abs)                 \
-  V(Float32Sqrt)                \
-  V(Float32RoundDown)           \
-  V(Float32Max)                 \
-  V(Float32Min)                 \
-  V(Float64Add)                 \
-  V(Float64Sub)                 \
-  V(Float64Neg)                 \
-  V(Float64Mul)                 \
-  V(Float64Div)                 \
-  V(Float64Mod)                 \
-  V(Float64Max)                 \
-  V(Float64Min)                 \
-  V(Float64Abs)                 \
-  V(Float64Acos)                \
-  V(Float64Acosh)               \
-  V(Float64Asin)                \
-  V(Float64Asinh)               \
-  V(Float64Atan)                \
-  V(Float64Atanh)               \
-  V(Float64Atan2)               \
-  V(Float64Cbrt)                \
-  V(Float64Cos)                 \
-  V(Float64Cosh)                \
-  V(Float64Exp)                 \
-  V(Float64Expm1)               \
-  V(Float64Log)                 \
-  V(Float64Log1p)               \
-  V(Float64Log10)               \
-  V(Float64Log2)                \
-  V(Float64Pow)                 \
-  V(Float64Sin)                 \
-  V(Float64Sinh)                \
-  V(Float64Sqrt)                \
-  V(Float64Tan)                 \
-  V(Float64Tanh)                \
-  V(Float64RoundDown)           \
-  V(Float32RoundUp)             \
-  V(Float64RoundUp)             \
-  V(Float32RoundTruncate)       \
-  V(Float64RoundTruncate)       \
-  V(Float64RoundTiesAway)       \
-  V(Float32RoundTiesEven)       \
-  V(Float64RoundTiesEven)       \
   V(Float64ExtractLowWord32)    \
   V(Float64ExtractHighWord32)   \
   V(Float64InsertLowWord32)     \
@@ -492,6 +515,7 @@
   V(Word32PairShl)              \
   V(Word32PairShr)              \
   V(Word32PairSar)              \
+  V(ProtectedLoad)              \
   V(AtomicLoad)                 \
   V(AtomicStore)                \
   V(UnsafePointerAdd)
diff --git a/src/compiler/operation-typer.cc b/src/compiler/operation-typer.cc
index f3ef778..4295a22 100644
--- a/src/compiler/operation-typer.cc
+++ b/src/compiler/operation-typer.cc
@@ -5,10 +5,10 @@
 #include "src/compiler/operation-typer.h"
 
 #include "src/compiler/common-operator.h"
+#include "src/compiler/type-cache.h"
+#include "src/compiler/types.h"
 #include "src/factory.h"
 #include "src/isolate.h"
-#include "src/type-cache.h"
-#include "src/types.h"
 
 #include "src/objects-inl.h"
 
@@ -460,6 +460,16 @@
   return cache_.kIntegerOrMinusZeroOrNaN;
 }
 
+Type* OperationTyper::NumberToBoolean(Type* type) {
+  DCHECK(type->Is(Type::Number()));
+  if (!type->IsInhabited()) return Type::None();
+  if (type->Is(cache_.kZeroish)) return singleton_false_;
+  if (type->Is(Type::PlainNumber()) && (type->Max() < 0 || 0 < type->Min())) {
+    return singleton_true_;  // Ruled out nan, -0 and +0.
+  }
+  return Type::Boolean();
+}
+
 Type* OperationTyper::NumberToInt32(Type* type) {
   DCHECK(type->Is(Type::Number()));
 
diff --git a/src/compiler/operation-typer.h b/src/compiler/operation-typer.h
index dcfe0c4..09f063c 100644
--- a/src/compiler/operation-typer.h
+++ b/src/compiler/operation-typer.h
@@ -11,15 +11,17 @@
 namespace v8 {
 namespace internal {
 
+// Forward declarations.
 class Isolate;
 class RangeType;
-class Type;
-class TypeCache;
 class Zone;
 
 namespace compiler {
 
+// Forward declarations.
 class Operator;
+class Type;
+class TypeCache;
 
 class OperationTyper {
  public:
diff --git a/src/compiler/operator-properties.cc b/src/compiler/operator-properties.cc
index 68d884d..0a9e644 100644
--- a/src/compiler/operator-properties.cc
+++ b/src/compiler/operator-properties.cc
@@ -61,6 +61,7 @@
     case IrOpcode::kJSLessThanOrEqual:
     case IrOpcode::kJSHasProperty:
     case IrOpcode::kJSInstanceOf:
+    case IrOpcode::kJSOrdinaryHasInstance:
 
     // Object operations
     case IrOpcode::kJSCreate:
diff --git a/src/compiler/operator.h b/src/compiler/operator.h
index b6ec2c6..8e3a9d1 100644
--- a/src/compiler/operator.h
+++ b/src/compiler/operator.h
@@ -10,7 +10,7 @@
 #include "src/base/flags.h"
 #include "src/base/functional.h"
 #include "src/handles.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/osr.cc b/src/compiler/osr.cc
index 187e612..6d61aff 100644
--- a/src/compiler/osr.cc
+++ b/src/compiler/osr.cc
@@ -2,22 +2,23 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
+#include "src/compiler/osr.h"
 #include "src/ast/scopes.h"
-#include "src/compiler.h"
+#include "src/compilation-info.h"
 #include "src/compiler/all-nodes.h"
-#include "src/compiler/common-operator.h"
 #include "src/compiler/common-operator-reducer.h"
+#include "src/compiler/common-operator.h"
 #include "src/compiler/dead-code-elimination.h"
 #include "src/compiler/frame.h"
-#include "src/compiler/graph.h"
 #include "src/compiler/graph-reducer.h"
 #include "src/compiler/graph-trimmer.h"
 #include "src/compiler/graph-visualizer.h"
+#include "src/compiler/graph.h"
 #include "src/compiler/js-graph.h"
 #include "src/compiler/loop-analysis.h"
-#include "src/compiler/node.h"
 #include "src/compiler/node-marker.h"
-#include "src/compiler/osr.h"
+#include "src/compiler/node.h"
+#include "src/objects-inl.h"
 
 namespace v8 {
 namespace internal {
@@ -270,11 +271,8 @@
     }
   }
 
-  if (osr_loop_entry == nullptr) {
-    // No OSR entry found, do nothing.
-    CHECK(osr_normal_entry);
-    return;
-  }
+  CHECK_NOT_NULL(osr_normal_entry);  // Should have found the OSR normal entry.
+  CHECK_NOT_NULL(osr_loop_entry);    // Should have found the OSR loop entry.
 
   for (Node* use : osr_loop_entry->uses()) {
     if (use->opcode() == IrOpcode::kLoop) {
diff --git a/src/compiler/osr.h b/src/compiler/osr.h
index 89773f0..1f562c5 100644
--- a/src/compiler/osr.h
+++ b/src/compiler/osr.h
@@ -5,7 +5,7 @@
 #ifndef V8_COMPILER_OSR_H_
 #define V8_COMPILER_OSR_H_
 
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 // TurboFan structures OSR graphs in a way that separates almost all phases of
 // compilation from OSR implementation details. This is accomplished with
diff --git a/src/compiler/pipeline-statistics.cc b/src/compiler/pipeline-statistics.cc
index 5b97abe..a032c3d 100644
--- a/src/compiler/pipeline-statistics.cc
+++ b/src/compiler/pipeline-statistics.cc
@@ -4,9 +4,10 @@
 
 #include <memory>
 
-#include "src/compiler.h"
+#include "src/compilation-info.h"
 #include "src/compiler/pipeline-statistics.h"
 #include "src/compiler/zone-pool.h"
+#include "src/isolate.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/pipeline.cc b/src/compiler/pipeline.cc
index ba7aa96..805b687 100644
--- a/src/compiler/pipeline.cc
+++ b/src/compiler/pipeline.cc
@@ -10,6 +10,8 @@
 
 #include "src/base/adapters.h"
 #include "src/base/platform/elapsed-timer.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/compiler/ast-graph-builder.h"
 #include "src/compiler/ast-loop-assignment-analyzer.h"
 #include "src/compiler/basic-block-instrumentor.h"
@@ -46,6 +48,7 @@
 #include "src/compiler/loop-analysis.h"
 #include "src/compiler/loop-peeling.h"
 #include "src/compiler/loop-variable-optimizer.h"
+#include "src/compiler/machine-graph-verifier.h"
 #include "src/compiler/machine-operator-reducer.h"
 #include "src/compiler/memory-optimizer.h"
 #include "src/compiler/move-optimizer.h"
@@ -63,6 +66,7 @@
 #include "src/compiler/store-store-elimination.h"
 #include "src/compiler/tail-call-optimization.h"
 #include "src/compiler/type-hint-analyzer.h"
+#include "src/compiler/typed-optimization.h"
 #include "src/compiler/typer.h"
 #include "src/compiler/value-numbering-reducer.h"
 #include "src/compiler/verifier.h"
@@ -426,7 +430,8 @@
   }
   if (FLAG_trace_turbo_graph || FLAG_trace_turbo_scheduler) {
     AllowHandleDereference allow_deref;
-    OFStream os(stdout);
+    CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
+    OFStream os(tracing_scope.file());
     os << "-- Schedule --------------------------------------\n" << *schedule;
   }
 }
@@ -439,14 +444,14 @@
                                LoopAssignmentAnalysis* loop_assignment,
                                TypeHintAnalysis* type_hint_analysis,
                                SourcePositionTable* source_positions)
-      : AstGraphBuilder(local_zone, info, jsgraph, loop_assignment,
+      : AstGraphBuilder(local_zone, info, jsgraph, 1.0f, loop_assignment,
                         type_hint_analysis),
         source_positions_(source_positions),
         start_position_(info->shared_info()->start_position()) {}
 
-  bool CreateGraph(bool stack_check) {
+  bool CreateGraph() {
     SourcePositionTable::Scope pos_scope(source_positions_, start_position_);
-    return AstGraphBuilder::CreateGraph(stack_check);
+    return AstGraphBuilder::CreateGraph();
   }
 
 #define DEF_VISIT(type)                                               \
@@ -562,7 +567,7 @@
   PipelineCompilationJob(Isolate* isolate, Handle<JSFunction> function)
       // Note that the CompilationInfo is not initialized at the time we pass it
       // to the CompilationJob constructor, but it is not dereferenced there.
-      : CompilationJob(&info_, "TurboFan"),
+      : CompilationJob(isolate, &info_, "TurboFan"),
         zone_(isolate->allocator()),
         zone_pool_(isolate->allocator()),
         parse_info_(&zone_, function),
@@ -601,6 +606,9 @@
     if (FLAG_native_context_specialization) {
       info()->MarkAsNativeContextSpecializing();
     }
+    if (FLAG_turbo_inlining) {
+      info()->MarkAsInliningEnabled();
+    }
   }
   if (!info()->shared_info()->asm_function() || FLAG_turbo_asm_deoptimization) {
     info()->MarkAsDeoptimizationEnabled();
@@ -615,14 +623,6 @@
     if (!Compiler::EnsureDeoptimizationSupport(info())) return FAILED;
   }
 
-  // TODO(mstarzinger): Hack to ensure that certain call descriptors are
-  // initialized on the main thread, since it is needed off-thread by the
-  // effect control linearizer.
-  CodeFactory::CopyFastSmiOrObjectElements(info()->isolate());
-  CodeFactory::GrowFastDoubleElements(info()->isolate());
-  CodeFactory::GrowFastSmiOrObjectElements(info()->isolate());
-  CodeFactory::ToNumber(info()->isolate());
-
   linkage_ = new (&zone_) Linkage(Linkage::ComputeIncoming(&zone_, info()));
 
   if (!pipeline_.CreateGraph()) {
@@ -660,7 +660,8 @@
   explicit PipelineWasmCompilationJob(CompilationInfo* info, Graph* graph,
                                       CallDescriptor* descriptor,
                                       SourcePositionTable* source_positions)
-      : CompilationJob(info, "TurboFan", State::kReadyToExecute),
+      : CompilationJob(info->isolate(), info, "TurboFan",
+                       State::kReadyToExecute),
         zone_pool_(info->isolate()->allocator()),
         data_(&zone_pool_, info, graph, source_positions),
         pipeline_(&data_),
@@ -756,18 +757,17 @@
   static const char* phase_name() { return "graph builder"; }
 
   void Run(PipelineData* data, Zone* temp_zone) {
-    bool stack_check = !data->info()->IsStub();
     bool succeeded = false;
 
     if (data->info()->is_optimizing_from_bytecode()) {
       BytecodeGraphBuilder graph_builder(temp_zone, data->info(),
-                                         data->jsgraph());
+                                         data->jsgraph(), 1.0f);
       succeeded = graph_builder.CreateGraph();
     } else {
       AstGraphBuilderWithPositions graph_builder(
           temp_zone, data->info(), data->jsgraph(), data->loop_assignment(),
           data->type_hint_analysis(), data->source_positions());
-      succeeded = graph_builder.CreateGraph(stack_check);
+      succeeded = graph_builder.CreateGraph();
     }
 
     if (!succeeded) {
@@ -786,15 +786,19 @@
                                               data->common());
     CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
                                          data->common(), data->machine());
-    JSCallReducer call_reducer(data->jsgraph(),
-                               data->info()->is_deoptimization_enabled()
-                                   ? JSCallReducer::kDeoptimizationEnabled
-                                   : JSCallReducer::kNoFlags,
-                               data->native_context());
+    JSCallReducer::Flags call_reducer_flags = JSCallReducer::kNoFlags;
+    if (data->info()->is_bailout_on_uninitialized()) {
+      call_reducer_flags |= JSCallReducer::kBailoutOnUninitialized;
+    }
+    if (data->info()->is_deoptimization_enabled()) {
+      call_reducer_flags |= JSCallReducer::kDeoptimizationEnabled;
+    }
+    JSCallReducer call_reducer(&graph_reducer, data->jsgraph(),
+                               call_reducer_flags, data->native_context());
     JSContextSpecialization context_specialization(
         &graph_reducer, data->jsgraph(),
         data->info()->is_function_context_specializing()
-            ? data->info()->context()
+            ? handle(data->info()->context())
             : MaybeHandle<Context>());
     JSFrameSpecialization frame_specialization(data->info()->osr_frame(),
                                                data->jsgraph());
@@ -837,9 +841,7 @@
     AddReducer(data, &graph_reducer, &context_specialization);
     AddReducer(data, &graph_reducer, &intrinsic_lowering);
     AddReducer(data, &graph_reducer, &call_reducer);
-    if (!data->info()->is_optimizing_from_bytecode()) {
-      AddReducer(data, &graph_reducer, &inlining);
-    }
+    AddReducer(data, &graph_reducer, &inlining);
     graph_reducer.ReduceGraph();
   }
 };
@@ -913,7 +915,7 @@
             : MaybeHandle<LiteralsArray>();
     JSCreateLowering create_lowering(
         &graph_reducer, data->info()->dependencies(), data->jsgraph(),
-        literals_array, temp_zone);
+        literals_array, data->native_context(), temp_zone);
     JSTypedLowering::Flags typed_lowering_flags = JSTypedLowering::kNoFlags;
     if (data->info()->is_deoptimization_enabled()) {
       typed_lowering_flags |= JSTypedLowering::kDeoptimizationEnabled;
@@ -921,6 +923,12 @@
     JSTypedLowering typed_lowering(&graph_reducer, data->info()->dependencies(),
                                    typed_lowering_flags, data->jsgraph(),
                                    temp_zone);
+    TypedOptimization typed_optimization(
+        &graph_reducer, data->info()->dependencies(),
+        data->info()->is_deoptimization_enabled()
+            ? TypedOptimization::kDeoptimizationEnabled
+            : TypedOptimization::kNoFlags,
+        data->jsgraph());
     SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph());
     CheckpointElimination checkpoint_elimination(&graph_reducer);
     CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
@@ -930,6 +938,7 @@
     if (data->info()->is_deoptimization_enabled()) {
       AddReducer(data, &graph_reducer, &create_lowering);
     }
+    AddReducer(data, &graph_reducer, &typed_optimization);
     AddReducer(data, &graph_reducer, &typed_lowering);
     AddReducer(data, &graph_reducer, &simple_reducer);
     AddReducer(data, &graph_reducer, &checkpoint_elimination);
@@ -1065,14 +1074,13 @@
 };
 
 // The store-store elimination greatly benefits from doing a common operator
-// reducer just before it, to eliminate conditional deopts with a constant
-// condition.
+// reducer and dead code elimination just before it, to eliminate conditional
+// deopts with a constant condition.
 
 struct DeadCodeEliminationPhase {
-  static const char* phase_name() { return "common operator reducer"; }
+  static const char* phase_name() { return "dead code elimination"; }
 
   void Run(PipelineData* data, Zone* temp_zone) {
-    // Run the common operator reducer.
     JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
     DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
                                               data->common());
@@ -1225,8 +1233,17 @@
         data->schedule(), data->source_positions(), data->frame(),
         data->info()->is_source_positions_enabled()
             ? InstructionSelector::kAllSourcePositions
-            : InstructionSelector::kCallSourcePositions);
-    selector.SelectInstructions();
+            : InstructionSelector::kCallSourcePositions,
+        InstructionSelector::SupportedFeatures(),
+        FLAG_turbo_instruction_scheduling
+            ? InstructionSelector::kEnableScheduling
+            : InstructionSelector::kDisableScheduling,
+        data->info()->will_serialize()
+            ? InstructionSelector::kEnableSerialization
+            : InstructionSelector::kDisableSerialization);
+    if (!selector.SelectInstructions()) {
+      data->set_compilation_failed();
+    }
   }
 };
 
@@ -1426,7 +1443,8 @@
 
     if (FLAG_trace_turbo_graph) {  // Simple textual RPO.
       AllowHandleDereference allow_deref;
-      OFStream os(stdout);
+      CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
+      OFStream os(tracing_scope.file());
       os << "-- Graph after " << phase << " -- " << std::endl;
       os << AsRPO(*graph);
     }
@@ -1459,7 +1477,8 @@
   data->BeginPhaseKind("graph creation");
 
   if (FLAG_trace_turbo) {
-    OFStream os(stdout);
+    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
+    OFStream os(tracing_scope.file());
     os << "---------------------------------------------------\n"
        << "Begin compiling method " << info()->GetDebugName().get()
        << " using Turbofan" << std::endl;
@@ -1585,7 +1604,7 @@
   RunPrintAndVerify("Effect and control linearized", true);
 
   Run<DeadCodeEliminationPhase>();
-  RunPrintAndVerify("Common operator reducer", true);
+  RunPrintAndVerify("Dead code elimination", true);
 
   if (FLAG_turbo_store_elimination) {
     Run<StoreStoreEliminationPhase>();
@@ -1623,6 +1642,7 @@
                                                Code::Flags flags,
                                                const char* debug_name) {
   CompilationInfo info(CStrVector(debug_name), isolate, graph->zone(), flags);
+  if (isolate->serializer_enabled()) info.PrepareForSerializing();
 
   // Construct a pipeline for scheduling and code generation.
   ZonePool zone_pool(isolate->allocator());
@@ -1717,7 +1737,7 @@
                                            InstructionSequence* sequence,
                                            bool run_verifier) {
   CompilationInfo info(ArrayVector("testing"), sequence->isolate(),
-                       sequence->zone());
+                       sequence->zone(), Code::ComputeFlags(Code::STUB));
   ZonePool zone_pool(sequence->isolate()->allocator());
   PipelineData data(&zone_pool, &info, sequence);
   PipelineImpl pipeline(&data);
@@ -1740,11 +1760,22 @@
         info(), data->graph(), data->schedule()));
   }
 
+  if (FLAG_turbo_verify_machine_graph) {
+    Zone temp_zone(data->isolate()->allocator());
+    MachineGraphVerifier::Run(data->graph(), data->schedule(), linkage,
+                              &temp_zone);
+  }
+
   data->InitializeInstructionSequence(call_descriptor);
 
   data->InitializeFrameData(call_descriptor);
   // Select and schedule instructions covering the scheduled graph.
   Run<InstructionSelectionPhase>(linkage);
+  if (data->compilation_failed()) {
+    info()->AbortOptimization(kCodeGenerationFailed);
+    data->EndPhaseKind();
+    return false;
+  }
 
   if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
     AllowHandleDereference allow_deref;
@@ -1825,7 +1856,8 @@
     json_of << data->source_position_output();
     json_of << "}";
 
-    OFStream os(stdout);
+    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
+    OFStream os(tracing_scope.file());
     os << "---------------------------------------------------\n"
        << "Finished compiling method " << info()->GetDebugName().get()
        << " using Turbofan" << std::endl;
@@ -1876,7 +1908,8 @@
   Run<BuildLiveRangesPhase>();
   if (FLAG_trace_turbo_graph) {
     AllowHandleDereference allow_deref;
-    OFStream os(stdout);
+    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
+    OFStream os(tracing_scope.file());
     os << "----- Instruction sequence before register allocation -----\n"
        << PrintableInstructionSequence({config, data->sequence()});
   }
@@ -1911,7 +1944,8 @@
 
   if (FLAG_trace_turbo_graph) {
     AllowHandleDereference allow_deref;
-    OFStream os(stdout);
+    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
+    OFStream os(tracing_scope.file());
     os << "----- Instruction sequence after register allocation -----\n"
        << PrintableInstructionSequence({config, data->sequence()});
   }
diff --git a/src/compiler/ppc/code-generator-ppc.cc b/src/compiler/ppc/code-generator-ppc.cc
index 9db36b4..f8f3099 100644
--- a/src/compiler/ppc/code-generator-ppc.cc
+++ b/src/compiler/ppc/code-generator-ppc.cc
@@ -4,7 +4,7 @@
 
 #include "src/compiler/code-generator.h"
 
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -1077,9 +1077,6 @@
     case kArchDebugBreak:
       __ stop("kArchDebugBreak");
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchNop:
     case kArchThrowTerminator:
       // don't emit code for nops.
@@ -1090,8 +1087,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -2071,7 +2068,8 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   // TODO(turbofan): We should be able to generate better code by sharing the
@@ -2080,7 +2078,7 @@
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
diff --git a/src/compiler/ppc/instruction-selector-ppc.cc b/src/compiler/ppc/instruction-selector-ppc.cc
index bad8ded..a2eb7b8 100644
--- a/src/compiler/ppc/instruction-selector-ppc.cc
+++ b/src/compiler/ppc/instruction-selector-ppc.cc
@@ -229,6 +229,10 @@
   }
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   PPCOperandGenerator g(this);
diff --git a/src/compiler/raw-machine-assembler.cc b/src/compiler/raw-machine-assembler.cc
index ae40f55..cdf45ab 100644
--- a/src/compiler/raw-machine-assembler.cc
+++ b/src/compiler/raw-machine-assembler.cc
@@ -402,6 +402,30 @@
   return tail_call;
 }
 
+Node* RawMachineAssembler::TailCallRuntime6(Runtime::FunctionId function,
+                                            Node* arg1, Node* arg2, Node* arg3,
+                                            Node* arg4, Node* arg5, Node* arg6,
+                                            Node* context) {
+  const int kArity = 6;
+  CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
+      zone(), function, kArity, Operator::kNoProperties,
+      CallDescriptor::kSupportsTailCalls);
+  int return_count = static_cast<int>(desc->ReturnCount());
+
+  Node* centry = HeapConstant(CEntryStub(isolate(), return_count).GetCode());
+  Node* ref = AddNode(
+      common()->ExternalConstant(ExternalReference(function, isolate())));
+  Node* arity = Int32Constant(kArity);
+
+  Node* nodes[] = {centry, arg1, arg2, arg3,  arg4,
+                   arg5,   arg6, ref,  arity, context};
+  Node* tail_call = MakeNode(common()->TailCall(desc), arraysize(nodes), nodes);
+
+  schedule()->AddTailCall(CurrentBlock(), tail_call);
+  current_block_ = nullptr;
+  return tail_call;
+}
+
 Node* RawMachineAssembler::CallCFunction0(MachineType return_type,
                                           Node* function) {
   MachineSignature::Builder builder(zone(), 1, 0);
diff --git a/src/compiler/raw-machine-assembler.h b/src/compiler/raw-machine-assembler.h
index c7d4236..cdd368c 100644
--- a/src/compiler/raw-machine-assembler.h
+++ b/src/compiler/raw-machine-assembler.h
@@ -135,6 +135,7 @@
     return AddNode(machine()->Store(StoreRepresentation(rep, write_barrier)),
                    base, index, value);
   }
+  Node* Retain(Node* value) { return AddNode(common()->Retain(), value); }
 
   // Unaligned memory operations
   Node* UnalignedLoad(MachineType rep, Node* base) {
@@ -531,9 +532,15 @@
   }
 
   // Conversions.
+  Node* BitcastTaggedToWord(Node* a) {
+    return AddNode(machine()->BitcastTaggedToWord(), a);
+  }
   Node* BitcastWordToTagged(Node* a) {
     return AddNode(machine()->BitcastWordToTagged(), a);
   }
+  Node* BitcastWordToTaggedSigned(Node* a) {
+    return AddNode(machine()->BitcastWordToTaggedSigned(), a);
+  }
   Node* TruncateFloat64ToWord32(Node* a) {
     return AddNode(machine()->TruncateFloat64ToWord32(), a);
   }
@@ -659,6 +666,9 @@
   Node* Float64InsertHighWord32(Node* a, Node* b) {
     return AddNode(machine()->Float64InsertHighWord32(), a, b);
   }
+  Node* Float64SilenceNaN(Node* a) {
+    return AddNode(machine()->Float64SilenceNaN(), a);
+  }
 
   // Stack operations.
   Node* LoadStackPointer() { return AddNode(machine()->LoadStackPointer()); }
@@ -744,6 +754,10 @@
   // Tail call to a runtime function with five arguments.
   Node* TailCallRuntime5(Runtime::FunctionId function, Node* arg1, Node* arg2,
                          Node* arg3, Node* arg4, Node* arg5, Node* context);
+  // Tail call to a runtime function with six arguments.
+  Node* TailCallRuntime6(Runtime::FunctionId function, Node* arg1, Node* arg2,
+                         Node* arg3, Node* arg4, Node* arg5, Node* arg6,
+                         Node* context);
 
   // ===========================================================================
   // The following utility methods deal with control flow, hence might switch
diff --git a/src/compiler/redundancy-elimination.cc b/src/compiler/redundancy-elimination.cc
index c671fc2..6dcf2bf 100644
--- a/src/compiler/redundancy-elimination.cc
+++ b/src/compiler/redundancy-elimination.cc
@@ -19,12 +19,12 @@
   switch (node->opcode()) {
     case IrOpcode::kCheckBounds:
     case IrOpcode::kCheckFloat64Hole:
+    case IrOpcode::kCheckHeapObject:
     case IrOpcode::kCheckIf:
     case IrOpcode::kCheckNumber:
+    case IrOpcode::kCheckSmi:
     case IrOpcode::kCheckString:
     case IrOpcode::kCheckTaggedHole:
-    case IrOpcode::kCheckTaggedPointer:
-    case IrOpcode::kCheckTaggedSigned:
     case IrOpcode::kCheckedFloat64ToInt32:
     case IrOpcode::kCheckedInt32Add:
     case IrOpcode::kCheckedInt32Sub:
diff --git a/src/compiler/register-allocator-verifier.h b/src/compiler/register-allocator-verifier.h
index 2db8af5..9a605d6 100644
--- a/src/compiler/register-allocator-verifier.h
+++ b/src/compiler/register-allocator-verifier.h
@@ -5,7 +5,7 @@
 #ifndef V8_REGISTER_ALLOCATOR_VERIFIER_H_
 #define V8_REGISTER_ALLOCATOR_VERIFIER_H_
 
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/register-allocator.cc b/src/compiler/register-allocator.cc
index 5b55b02..efcdcb4 100644
--- a/src/compiler/register-allocator.cc
+++ b/src/compiler/register-allocator.cc
@@ -1041,6 +1041,8 @@
 
   TopLevel()->UpdateParentForAllChildren(TopLevel());
   TopLevel()->UpdateSpillRangePostMerge(other);
+  TopLevel()->set_has_slot_use(TopLevel()->has_slot_use() ||
+                               other->has_slot_use());
 
 #if DEBUG
   Verify();
@@ -1113,9 +1115,9 @@
       first_interval_ = interval;
     } else {
       // Order of instruction's processing (see ProcessInstructions) guarantees
-      // that each new use interval either precedes or intersects with
-      // last added interval.
-      DCHECK(start < first_interval_->end());
+      // that each new use interval either precedes, intersects with or touches
+      // the last added interval.
+      DCHECK(start <= first_interval_->end());
       first_interval_->set_start(Min(start, first_interval_->start()));
       first_interval_->set_end(Max(end, first_interval_->end()));
     }
@@ -2383,17 +2385,15 @@
   return ret;
 }
 
-
-void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand(
-    bool operands_only) {
+void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
   size_t initial_range_count = data()->live_ranges().size();
   for (size_t i = 0; i < initial_range_count; ++i) {
     TopLevelLiveRange* range = data()->live_ranges()[i];
     if (!CanProcessRange(range)) continue;
-    if (range->HasNoSpillType() || (operands_only && range->HasSpillRange())) {
+    if (range->HasNoSpillType() ||
+        (range->HasSpillRange() && !range->has_slot_use())) {
       continue;
     }
-
     LifetimePosition start = range->Start();
     TRACE("Live range %d:%d is defined by a spill operand.\n",
           range->TopLevel()->vreg(), range->relative_id());
@@ -2571,8 +2571,7 @@
   DCHECK(active_live_ranges().empty());
   DCHECK(inactive_live_ranges().empty());
 
-  SplitAndSpillRangesDefinedByMemoryOperand(code()->VirtualRegisterCount() <=
-                                            num_allocatable_registers());
+  SplitAndSpillRangesDefinedByMemoryOperand();
 
   for (TopLevelLiveRange* range : data()->live_ranges()) {
     if (!CanProcessRange(range)) continue;
@@ -3273,8 +3272,8 @@
         spill_operand = range->GetSpillRangeOperand();
       }
       DCHECK(spill_operand.IsStackSlot());
-      DCHECK_EQ(MachineRepresentation::kTagged,
-                AllocatedOperand::cast(spill_operand).representation());
+      DCHECK(CanBeTaggedPointer(
+          AllocatedOperand::cast(spill_operand).representation()));
     }
 
     LiveRange* cur = range;
@@ -3336,8 +3335,8 @@
             safe_point);
         InstructionOperand operand = cur->GetAssignedOperand();
         DCHECK(!operand.IsStackSlot());
-        DCHECK_EQ(MachineRepresentation::kTagged,
-                  AllocatedOperand::cast(operand).representation());
+        DCHECK(CanBeTaggedPointer(
+            AllocatedOperand::cast(operand).representation()));
         map->RecordReference(AllocatedOperand::cast(operand));
       }
     }
diff --git a/src/compiler/register-allocator.h b/src/compiler/register-allocator.h
index 6bfc6c4..2089ea2 100644
--- a/src/compiler/register-allocator.h
+++ b/src/compiler/register-allocator.h
@@ -8,7 +8,7 @@
 #include "src/compiler/instruction.h"
 #include "src/ostreams.h"
 #include "src/register-configuration.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -965,7 +965,7 @@
 
   // Find the optimal split for ranges defined by a memory operand, e.g.
   // constants or function parameters passed on the stack.
-  void SplitAndSpillRangesDefinedByMemoryOperand(bool operands_only);
+  void SplitAndSpillRangesDefinedByMemoryOperand();
 
   // Split the given range at the given position.
   // If range starts at or after the given position then the
diff --git a/src/compiler/representation-change.cc b/src/compiler/representation-change.cc
index 5427bdb..22d809b 100644
--- a/src/compiler/representation-change.cc
+++ b/src/compiler/representation-change.cc
@@ -142,10 +142,17 @@
 
   switch (use_info.representation()) {
     case MachineRepresentation::kTaggedSigned:
+      DCHECK(use_info.type_check() == TypeCheckKind::kNone ||
+             use_info.type_check() == TypeCheckKind::kSignedSmall);
+      return GetTaggedSignedRepresentationFor(node, output_rep, output_type,
+                                              use_node, use_info);
     case MachineRepresentation::kTaggedPointer:
+      DCHECK(use_info.type_check() == TypeCheckKind::kNone);
+      return GetTaggedPointerRepresentationFor(node, output_rep, output_type);
     case MachineRepresentation::kTagged:
       DCHECK(use_info.type_check() == TypeCheckKind::kNone);
-      return GetTaggedRepresentationFor(node, output_rep, output_type);
+      return GetTaggedRepresentationFor(node, output_rep, output_type,
+                                        use_info.truncation());
     case MachineRepresentation::kFloat32:
       DCHECK(use_info.type_check() == TypeCheckKind::kNone);
       return GetFloat32RepresentationFor(node, output_rep, output_type,
@@ -174,10 +181,132 @@
   return nullptr;
 }
 
-Node* RepresentationChanger::GetTaggedRepresentationFor(
+Node* RepresentationChanger::GetTaggedSignedRepresentationFor(
+    Node* node, MachineRepresentation output_rep, Type* output_type,
+    Node* use_node, UseInfo use_info) {
+  // Eagerly fold representation changes for constants.
+  switch (node->opcode()) {
+    case IrOpcode::kNumberConstant:
+      if (output_type->Is(Type::SignedSmall())) {
+        return node;
+      }
+      break;
+    default:
+      break;
+  }
+  // Select the correct X -> Tagged operator.
+  const Operator* op;
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->Constant(0);
+  } else if (IsWord(output_rep)) {
+    if (output_type->Is(Type::Signed31())) {
+      op = simplified()->ChangeInt31ToTaggedSigned();
+    } else if (output_type->Is(Type::Signed32())) {
+      if (SmiValuesAre32Bits()) {
+        op = simplified()->ChangeInt32ToTagged();
+      } else if (use_info.type_check() == TypeCheckKind::kSignedSmall) {
+        op = simplified()->CheckedInt32ToTaggedSigned();
+      } else {
+        return TypeError(node, output_rep, output_type,
+                         MachineRepresentation::kTaggedSigned);
+      }
+    } else if (output_type->Is(Type::Unsigned32()) &&
+               use_info.type_check() == TypeCheckKind::kSignedSmall) {
+      op = simplified()->CheckedUint32ToTaggedSigned();
+    } else {
+      return TypeError(node, output_rep, output_type,
+                       MachineRepresentation::kTaggedSigned);
+    }
+  } else if (output_rep == MachineRepresentation::kFloat64) {
+    if (output_type->Is(Type::Signed31())) {
+      // float64 -> int32 -> tagged signed
+      node = InsertChangeFloat64ToInt32(node);
+      op = simplified()->ChangeInt31ToTaggedSigned();
+    } else if (output_type->Is(Type::Signed32())) {
+      // float64 -> int32 -> tagged signed
+      node = InsertChangeFloat64ToInt32(node);
+      if (SmiValuesAre32Bits()) {
+        op = simplified()->ChangeInt32ToTagged();
+      } else if (use_info.type_check() == TypeCheckKind::kSignedSmall) {
+        op = simplified()->CheckedInt32ToTaggedSigned();
+      } else {
+        return TypeError(node, output_rep, output_type,
+                         MachineRepresentation::kTaggedSigned);
+      }
+    } else if (output_type->Is(Type::Unsigned32()) &&
+               use_info.type_check() == TypeCheckKind::kSignedSmall) {
+      // float64 -> uint32 -> tagged signed
+      node = InsertChangeFloat64ToUint32(node);
+      op = simplified()->CheckedUint32ToTaggedSigned();
+    } else if (use_info.type_check() == TypeCheckKind::kSignedSmall) {
+      op = simplified()->CheckedFloat64ToInt32(
+          output_type->Maybe(Type::MinusZero())
+              ? CheckForMinusZeroMode::kCheckForMinusZero
+              : CheckForMinusZeroMode::kDontCheckForMinusZero);
+      node = InsertConversion(node, op, use_node);
+      if (SmiValuesAre32Bits()) {
+        op = simplified()->ChangeInt32ToTagged();
+      } else {
+        op = simplified()->CheckedInt32ToTaggedSigned();
+      }
+    } else {
+      return TypeError(node, output_rep, output_type,
+                       MachineRepresentation::kTaggedSigned);
+    }
+  } else if (CanBeTaggedPointer(output_rep) &&
+             use_info.type_check() == TypeCheckKind::kSignedSmall) {
+    op = simplified()->CheckedTaggedToTaggedSigned();
+  } else if (output_rep == MachineRepresentation::kBit &&
+             use_info.type_check() == TypeCheckKind::kSignedSmall) {
+    // TODO(turbofan): Consider adding a Bailout operator that just deopts.
+    // Also use that for MachineRepresentation::kPointer case above.
+    node = InsertChangeBitToTagged(node);
+    op = simplified()->CheckedTaggedToTaggedSigned();
+  } else {
+    return TypeError(node, output_rep, output_type,
+                     MachineRepresentation::kTaggedSigned);
+  }
+  return InsertConversion(node, op, use_node);
+}
+
+Node* RepresentationChanger::GetTaggedPointerRepresentationFor(
     Node* node, MachineRepresentation output_rep, Type* output_type) {
   // Eagerly fold representation changes for constants.
   switch (node->opcode()) {
+    case IrOpcode::kHeapConstant:
+      return node;  // No change necessary.
+    case IrOpcode::kInt32Constant:
+      if (output_type->Is(Type::Boolean())) {
+        return OpParameter<int32_t>(node) == 0 ? jsgraph()->FalseConstant()
+                                               : jsgraph()->TrueConstant();
+      } else {
+        return TypeError(node, output_rep, output_type,
+                         MachineRepresentation::kTaggedPointer);
+      }
+    case IrOpcode::kFloat64Constant:
+    case IrOpcode::kFloat32Constant:
+      return TypeError(node, output_rep, output_type,
+                       MachineRepresentation::kTaggedPointer);
+    default:
+      break;
+  }
+  // Select the correct X -> Tagged operator.
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->TheHoleConstant();
+  }
+  return TypeError(node, output_rep, output_type,
+                   MachineRepresentation::kTaggedPointer);
+}
+
+Node* RepresentationChanger::GetTaggedRepresentationFor(
+    Node* node, MachineRepresentation output_rep, Type* output_type,
+    Truncation truncation) {
+  // Eagerly fold representation changes for constants.
+  switch (node->opcode()) {
     case IrOpcode::kNumberConstant:
     case IrOpcode::kHeapConstant:
       return node;  // No change necessary.
@@ -202,12 +331,17 @@
     default:
       break;
   }
+  if (output_rep == MachineRepresentation::kTaggedSigned ||
+      output_rep == MachineRepresentation::kTaggedPointer) {
+    // this is a no-op.
+    return node;
+  }
   // Select the correct X -> Tagged operator.
   const Operator* op;
-  if (output_rep == MachineRepresentation::kNone) {
-    // We should only asisgn this representation if the type is empty.
-    CHECK(!output_type->IsInhabited());
-    op = machine()->ImpossibleToTagged();
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->TheHoleConstant();
   } else if (output_rep == MachineRepresentation::kBit) {
     if (output_type->Is(Type::Boolean())) {
       op = simplified()->ChangeBitToTagged();
@@ -220,7 +354,10 @@
       op = simplified()->ChangeInt31ToTaggedSigned();
     } else if (output_type->Is(Type::Signed32())) {
       op = simplified()->ChangeInt32ToTagged();
-    } else if (output_type->Is(Type::Unsigned32())) {
+    } else if (output_type->Is(Type::Unsigned32()) ||
+               truncation.IsUsedAsWord32()) {
+      // Either the output is uint32 or the uses only care about the
+      // low 32 bits (so we can pick uint32 safely).
       op = simplified()->ChangeUint32ToTagged();
     } else {
       return TypeError(node, output_rep, output_type,
@@ -229,10 +366,7 @@
   } else if (output_rep ==
              MachineRepresentation::kFloat32) {  // float32 -> float64 -> tagged
     node = InsertChangeFloat32ToFloat64(node);
-    op = simplified()->ChangeFloat64ToTagged(
-        output_type->Maybe(Type::MinusZero())
-            ? CheckForMinusZeroMode::kCheckForMinusZero
-            : CheckForMinusZeroMode::kDontCheckForMinusZero);
+    op = simplified()->ChangeFloat64ToTagged();
   } else if (output_rep == MachineRepresentation::kFloat64) {
     if (output_type->Is(Type::Signed31())) {  // float64 -> int32 -> tagged
       node = InsertChangeFloat64ToInt32(node);
@@ -246,10 +380,7 @@
       node = InsertChangeFloat64ToUint32(node);
       op = simplified()->ChangeUint32ToTagged();
     } else {
-      op = simplified()->ChangeFloat64ToTagged(
-          output_type->Maybe(Type::MinusZero())
-              ? CheckForMinusZeroMode::kCheckForMinusZero
-              : CheckForMinusZeroMode::kDontCheckForMinusZero);
+      op = simplified()->ChangeFloat64ToTagged();
     }
   } else {
     return TypeError(node, output_rep, output_type,
@@ -283,10 +414,10 @@
   }
   // Select the correct X -> Float32 operator.
   const Operator* op = nullptr;
-  if (output_rep == MachineRepresentation::kNone) {
-    // We should only use kNone representation if the type is empty.
-    CHECK(!output_type->IsInhabited());
-    op = machine()->ImpossibleToFloat32();
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->Float32Constant(0.0f);
   } else if (IsWord(output_rep)) {
     if (output_type->Is(Type::Signed32())) {
       // int32 -> float64 -> float32
@@ -303,7 +434,8 @@
       node = jsgraph()->graph()->NewNode(op, node);
       op = machine()->TruncateFloat64ToFloat32();
     }
-  } else if (output_rep == MachineRepresentation::kTagged) {
+  } else if (output_rep == MachineRepresentation::kTagged ||
+             output_rep == MachineRepresentation::kTaggedPointer) {
     if (output_type->Is(Type::NumberOrOddball())) {
       // tagged -> float64 -> float32
       if (output_type->Is(Type::Number())) {
@@ -352,10 +484,10 @@
   }
   // Select the correct X -> Float64 operator.
   const Operator* op = nullptr;
-  if (output_rep == MachineRepresentation::kNone) {
-    // We should only use kNone representation if the type is empty.
-    CHECK(!output_type->IsInhabited());
-    op = machine()->ImpossibleToFloat64();
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->Float64Constant(0.0);
   } else if (IsWord(output_rep)) {
     if (output_type->Is(Type::Signed32())) {
       op = machine()->ChangeInt32ToFloat64();
@@ -367,11 +499,14 @@
     }
   } else if (output_rep == MachineRepresentation::kBit) {
     op = machine()->ChangeUint32ToFloat64();
-  } else if (output_rep == MachineRepresentation::kTagged) {
+  } else if (output_rep == MachineRepresentation::kTagged ||
+             output_rep == MachineRepresentation::kTaggedSigned ||
+             output_rep == MachineRepresentation::kTaggedPointer) {
     if (output_type->Is(Type::Undefined())) {
       return jsgraph()->Float64Constant(
           std::numeric_limits<double>::quiet_NaN());
-    } else if (output_type->Is(Type::TaggedSigned())) {
+
+    } else if (output_rep == MachineRepresentation::kTaggedSigned) {
       node = InsertChangeTaggedSignedToInt32(node);
       op = machine()->ChangeInt32ToFloat64();
     } else if (output_type->Is(Type::Number())) {
@@ -435,10 +570,10 @@
 
   // Select the correct X -> Word32 operator.
   const Operator* op = nullptr;
-  if (output_rep == MachineRepresentation::kNone) {
-    // We should only use kNone representation if the type is empty.
-    CHECK(!output_type->IsInhabited());
-    op = machine()->ImpossibleToWord32();
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->Int32Constant(0);
   } else if (output_rep == MachineRepresentation::kBit) {
     return node;  // Sloppy comparison -> word32
   } else if (output_rep == MachineRepresentation::kFloat64) {
@@ -470,10 +605,19 @@
               ? CheckForMinusZeroMode::kCheckForMinusZero
               : CheckForMinusZeroMode::kDontCheckForMinusZero);
     }
-  } else if (output_rep == MachineRepresentation::kTagged) {
-    if (output_type->Is(Type::TaggedSigned())) {
+  } else if (output_rep == MachineRepresentation::kTaggedSigned) {
+    if (output_type->Is(Type::Signed32())) {
       op = simplified()->ChangeTaggedSignedToInt32();
-    } else if (output_type->Is(Type::Unsigned32())) {
+    } else if (use_info.truncation().IsUsedAsWord32()) {
+      if (use_info.type_check() != TypeCheckKind::kNone) {
+        op = simplified()->CheckedTruncateTaggedToWord32();
+      } else {
+        op = simplified()->TruncateTaggedToWord32();
+      }
+    }
+  } else if (output_rep == MachineRepresentation::kTagged ||
+             output_rep == MachineRepresentation::kTaggedPointer) {
+    if (output_type->Is(Type::Unsigned32())) {
       op = simplified()->ChangeTaggedToUint32();
     } else if (output_type->Is(Type::Signed32())) {
       op = simplified()->ChangeTaggedToInt32();
@@ -541,22 +685,43 @@
   switch (node->opcode()) {
     case IrOpcode::kHeapConstant: {
       Handle<HeapObject> value = OpParameter<Handle<HeapObject>>(node);
-      DCHECK(value.is_identical_to(factory()->true_value()) ||
-             value.is_identical_to(factory()->false_value()));
-      return jsgraph()->Int32Constant(
-          value.is_identical_to(factory()->true_value()) ? 1 : 0);
+      return jsgraph()->Int32Constant(value->BooleanValue() ? 1 : 0);
     }
     default:
       break;
   }
   // Select the correct X -> Bit operator.
   const Operator* op;
-  if (output_rep == MachineRepresentation::kNone) {
-    // We should only use kNone representation if the type is empty.
-    CHECK(!output_type->IsInhabited());
-    op = machine()->ImpossibleToBit();
-  } else if (output_rep == MachineRepresentation::kTagged) {
-    op = simplified()->ChangeTaggedToBit();
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->Int32Constant(0);
+  } else if (output_rep == MachineRepresentation::kTagged ||
+             output_rep == MachineRepresentation::kTaggedPointer) {
+    if (output_type->Is(Type::BooleanOrNullOrUndefined())) {
+      // true is the only trueish Oddball.
+      op = simplified()->ChangeTaggedToBit();
+    } else {
+      op = simplified()->TruncateTaggedToBit();
+    }
+  } else if (output_rep == MachineRepresentation::kTaggedSigned) {
+    node = jsgraph()->graph()->NewNode(machine()->WordEqual(), node,
+                                       jsgraph()->ZeroConstant());
+    return jsgraph()->graph()->NewNode(machine()->Word32Equal(), node,
+                                       jsgraph()->Int32Constant(0));
+  } else if (IsWord(output_rep)) {
+    node = jsgraph()->graph()->NewNode(machine()->Word32Equal(), node,
+                                       jsgraph()->Int32Constant(0));
+    return jsgraph()->graph()->NewNode(machine()->Word32Equal(), node,
+                                       jsgraph()->Int32Constant(0));
+  } else if (output_rep == MachineRepresentation::kFloat32) {
+    node = jsgraph()->graph()->NewNode(machine()->Float32Abs(), node);
+    return jsgraph()->graph()->NewNode(machine()->Float32LessThan(),
+                                       jsgraph()->Float32Constant(0.0), node);
+  } else if (output_rep == MachineRepresentation::kFloat64) {
+    node = jsgraph()->graph()->NewNode(machine()->Float64Abs(), node);
+    return jsgraph()->graph()->NewNode(machine()->Float64LessThan(),
+                                       jsgraph()->Float64Constant(0.0), node);
   } else {
     return TypeError(node, output_rep, output_type,
                      MachineRepresentation::kBit);
@@ -566,10 +731,10 @@
 
 Node* RepresentationChanger::GetWord64RepresentationFor(
     Node* node, MachineRepresentation output_rep, Type* output_type) {
-  if (output_rep == MachineRepresentation::kNone) {
-    // We should only use kNone representation if the type is empty.
-    CHECK(!output_type->IsInhabited());
-    return jsgraph()->graph()->NewNode(machine()->ImpossibleToFloat64(), node);
+  if (output_type->Is(Type::None())) {
+    // This is an impossible value; it should not be used at runtime.
+    // We just provide a dummy value here.
+    return jsgraph()->Int64Constant(0);
   } else if (output_rep == MachineRepresentation::kBit) {
     return node;  // Sloppy comparison -> word64
   }
@@ -787,7 +952,7 @@
   if (!testing_type_errors_) {
     std::ostringstream out_str;
     out_str << output_rep << " (";
-    output_type->PrintTo(out_str, Type::SEMANTIC_DIM);
+    output_type->PrintTo(out_str);
     out_str << ")";
 
     std::ostringstream use_str;
@@ -802,6 +967,9 @@
   return node;
 }
 
+Node* RepresentationChanger::InsertChangeBitToTagged(Node* node) {
+  return jsgraph()->graph()->NewNode(simplified()->ChangeBitToTagged(), node);
+}
 
 Node* RepresentationChanger::InsertChangeFloat32ToFloat64(Node* node) {
   return jsgraph()->graph()->NewNode(machine()->ChangeFloat32ToFloat64(), node);
diff --git a/src/compiler/representation-change.h b/src/compiler/representation-change.h
index fac3280..f27108e 100644
--- a/src/compiler/representation-change.h
+++ b/src/compiler/representation-change.h
@@ -29,6 +29,9 @@
 
   // Queries.
   bool IsUnused() const { return kind_ == TruncationKind::kNone; }
+  bool IsUsedAsBool() const {
+    return LessGeneral(kind_, TruncationKind::kBool);
+  }
   bool IsUsedAsWord32() const {
     return LessGeneral(kind_, TruncationKind::kWord32);
   }
@@ -139,8 +142,18 @@
   static UseInfo AnyTagged() {
     return UseInfo(MachineRepresentation::kTagged, Truncation::Any());
   }
+  static UseInfo TaggedSigned() {
+    return UseInfo(MachineRepresentation::kTaggedSigned, Truncation::Any());
+  }
+  static UseInfo TaggedPointer() {
+    return UseInfo(MachineRepresentation::kTaggedPointer, Truncation::Any());
+  }
 
   // Possibly deoptimizing conversions.
+  static UseInfo CheckedSignedSmallAsTaggedSigned() {
+    return UseInfo(MachineRepresentation::kTaggedSigned, Truncation::Any(),
+                   TypeCheckKind::kSignedSmall);
+  }
   static UseInfo CheckedSignedSmallAsWord32() {
     return UseInfo(MachineRepresentation::kWord32, Truncation::Any(),
                    TypeCheckKind::kSignedSmall);
@@ -232,8 +245,15 @@
   bool testing_type_errors_;  // If {true}, don't abort on a type error.
   bool type_error_;           // Set when a type error is detected.
 
+  Node* GetTaggedSignedRepresentationFor(Node* node,
+                                         MachineRepresentation output_rep,
+                                         Type* output_type, Node* use_node,
+                                         UseInfo use_info);
+  Node* GetTaggedPointerRepresentationFor(Node* node,
+                                          MachineRepresentation output_rep,
+                                          Type* output_type);
   Node* GetTaggedRepresentationFor(Node* node, MachineRepresentation output_rep,
-                                   Type* output_type);
+                                   Type* output_type, Truncation truncation);
   Node* GetFloat32RepresentationFor(Node* node,
                                     MachineRepresentation output_rep,
                                     Type* output_type, Truncation truncation);
@@ -251,6 +271,7 @@
   Node* TypeError(Node* node, MachineRepresentation output_rep,
                   Type* output_type, MachineRepresentation use);
   Node* MakeTruncatedInt32Constant(double value);
+  Node* InsertChangeBitToTagged(Node* node);
   Node* InsertChangeFloat32ToFloat64(Node* node);
   Node* InsertChangeFloat64ToInt32(Node* node);
   Node* InsertChangeFloat64ToUint32(Node* node);
diff --git a/src/compiler/s390/code-generator-s390.cc b/src/compiler/s390/code-generator-s390.cc
index e69a7ac..284c3fc 100644
--- a/src/compiler/s390/code-generator-s390.cc
+++ b/src/compiler/s390/code-generator-s390.cc
@@ -4,7 +4,7 @@
 
 #include "src/compiler/code-generator.h"
 
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -980,9 +980,6 @@
     case kArchDebugBreak:
       __ stop("kArchDebugBreak");
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchNop:
     case kArchThrowTerminator:
       // don't emit code for nops.
@@ -992,8 +989,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -2195,7 +2192,8 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   // TODO(turbofan): We should be able to generate better code by sharing the
@@ -2204,7 +2202,7 @@
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
diff --git a/src/compiler/s390/instruction-selector-s390.cc b/src/compiler/s390/instruction-selector-s390.cc
index 6fc8a4d..f1aa332 100644
--- a/src/compiler/s390/instruction-selector-s390.cc
+++ b/src/compiler/s390/instruction-selector-s390.cc
@@ -327,6 +327,11 @@
   Emit(code, 1, outputs, input_count, inputs);
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
+
 void InstructionSelector::VisitStore(Node* node) {
   S390OperandGenerator g(this);
   Node* base = node->InputAt(0);
@@ -1099,7 +1104,7 @@
   Node* right = m.right().node();
   if (g.CanBeImmediate(right, kInt32Imm) &&
       base::bits::IsPowerOfTwo64(g.GetImmediate(right))) {
-    int power = 31 - base::bits::CountLeadingZeros64(g.GetImmediate(right));
+    int power = 63 - base::bits::CountLeadingZeros64(g.GetImmediate(right));
     Emit(kS390_ShiftLeft64, g.DefineSameAsFirst(node), g.UseRegister(left),
          g.UseImmediate(power));
     return;
diff --git a/src/compiler/schedule.h b/src/compiler/schedule.h
index 74ba835..4fc0d0a 100644
--- a/src/compiler/schedule.h
+++ b/src/compiler/schedule.h
@@ -7,7 +7,7 @@
 
 #include <iosfwd>
 
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/scheduler.cc b/src/compiler/scheduler.cc
index 58c01cc..b4e74d9 100644
--- a/src/compiler/scheduler.cc
+++ b/src/compiler/scheduler.cc
@@ -11,10 +11,10 @@
 #include "src/compiler/common-operator.h"
 #include "src/compiler/control-equivalence.h"
 #include "src/compiler/graph.h"
-#include "src/compiler/node.h"
 #include "src/compiler/node-marker.h"
 #include "src/compiler/node-properties.h"
-#include "src/zone-containers.h"
+#include "src/compiler/node.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/scheduler.h b/src/compiler/scheduler.h
index 269c271..416ba5c 100644
--- a/src/compiler/scheduler.h
+++ b/src/compiler/scheduler.h
@@ -10,7 +10,7 @@
 #include "src/compiler/opcodes.h"
 #include "src/compiler/schedule.h"
 #include "src/compiler/zone-pool.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/simplified-lowering.cc b/src/compiler/simplified-lowering.cc
index de64de3..97aacd6 100644
--- a/src/compiler/simplified-lowering.cc
+++ b/src/compiler/simplified-lowering.cc
@@ -20,9 +20,9 @@
 #include "src/compiler/representation-change.h"
 #include "src/compiler/simplified-operator.h"
 #include "src/compiler/source-position.h"
+#include "src/compiler/type-cache.h"
 #include "src/conversions-inl.h"
 #include "src/objects.h"
-#include "src/type-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -311,6 +311,9 @@
       bool updated = UpdateFeedbackType(node);
       TRACE(" visit #%d: %s\n", node->id(), node->op()->mnemonic());
       VisitNode(node, info->truncation(), nullptr);
+      TRACE("  ==> output ");
+      PrintOutputInfo(info);
+      TRACE("\n");
       if (updated) {
         for (Node* const user : node->uses()) {
           if (GetInfo(user)->visited()) {
@@ -330,6 +333,9 @@
       bool updated = UpdateFeedbackType(node);
       TRACE(" visit #%d: %s\n", node->id(), node->op()->mnemonic());
       VisitNode(node, info->truncation(), nullptr);
+      TRACE("  ==> output ");
+      PrintOutputInfo(info);
+      TRACE("\n");
       if (updated) {
         for (Node* const user : node->uses()) {
           if (GetInfo(user)->visited()) {
@@ -534,9 +540,6 @@
       TRACE(" visit #%d: %s (trunc: %s)\n", node->id(), node->op()->mnemonic(),
             info->truncation().description());
       VisitNode(node, info->truncation(), nullptr);
-      TRACE("  ==> output ");
-      PrintOutputInfo(info);
-      TRACE("\n");
     }
   }
 
@@ -804,41 +807,10 @@
     VisitBinop(node, UseInfo::TruncatingFloat64(),
                MachineRepresentation::kFloat64);
   }
-  void VisitInt32Binop(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord32(),
-               MachineRepresentation::kWord32);
-  }
   void VisitWord32TruncatingBinop(Node* node) {
     VisitBinop(node, UseInfo::TruncatingWord32(),
                MachineRepresentation::kWord32);
   }
-  void VisitUint32Binop(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord32(),
-               MachineRepresentation::kWord32);
-  }
-  void VisitInt64Binop(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord64(),
-               MachineRepresentation::kWord64);
-  }
-  void VisitUint64Binop(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord64(),
-               MachineRepresentation::kWord64);
-  }
-  void VisitFloat64Cmp(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingFloat64(), MachineRepresentation::kBit);
-  }
-  void VisitInt32Cmp(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord32(), MachineRepresentation::kBit);
-  }
-  void VisitUint32Cmp(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord32(), MachineRepresentation::kBit);
-  }
-  void VisitInt64Cmp(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord64(), MachineRepresentation::kBit);
-  }
-  void VisitUint64Cmp(Node* node) {
-    VisitBinop(node, UseInfo::TruncatingWord64(), MachineRepresentation::kBit);
-  }
 
   // Infer representation for phi-like nodes.
   // The {node} parameter is only used to decide on the int64 representation.
@@ -875,11 +847,13 @@
       bool is_word64 = GetInfo(node->InputAt(0))->representation() ==
                        MachineRepresentation::kWord64;
 #ifdef DEBUG
-      // Check that all the inputs agree on being Word64.
-      DCHECK_EQ(IrOpcode::kPhi, node->opcode());  // This only works for phis.
-      for (int i = 1; i < node->op()->ValueInputCount(); i++) {
-        DCHECK_EQ(is_word64, GetInfo(node->InputAt(i))->representation() ==
-                                 MachineRepresentation::kWord64);
+      if (node->opcode() != IrOpcode::kTypeGuard) {
+        // Check that all the inputs agree on being Word64.
+        DCHECK_EQ(IrOpcode::kPhi, node->opcode());  // This only works for phis.
+        for (int i = 1; i < node->op()->ValueInputCount(); i++) {
+          DCHECK_EQ(is_word64, GetInfo(node->InputAt(i))->representation() ==
+                                   MachineRepresentation::kWord64);
+        }
       }
 #endif
       return is_word64 ? MachineRepresentation::kWord64
@@ -937,6 +911,21 @@
     }
   }
 
+  void VisitObjectIs(Node* node, Type* type, SimplifiedLowering* lowering) {
+    Type* const input_type = TypeOf(node->InputAt(0));
+    if (input_type->Is(type)) {
+      VisitUnop(node, UseInfo::None(), MachineRepresentation::kBit);
+      if (lower()) {
+        DeferReplacement(node, lowering->jsgraph()->Int32Constant(1));
+      }
+    } else {
+      VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kBit);
+      if (lower() && !input_type->Maybe(type)) {
+        DeferReplacement(node, lowering->jsgraph()->Int32Constant(0));
+      }
+    }
+  }
+
   void VisitCall(Node* node, SimplifiedLowering* lowering) {
     const CallDescriptor* desc = CallDescriptorOf(node->op());
     int params = static_cast<int>(desc->ParameterCount());
@@ -986,8 +975,11 @@
       for (int i = 0; i < node->InputCount(); i++) {
         Node* input = node->InputAt(i);
         NodeInfo* input_info = GetInfo(input);
-        MachineType machine_type(input_info->representation(),
-                                 DeoptValueSemanticOf(TypeOf(input)));
+        Type* input_type = TypeOf(input);
+        MachineRepresentation rep = input_type->IsInhabited()
+                                        ? input_info->representation()
+                                        : MachineRepresentation::kNone;
+        MachineType machine_type(rep, DeoptValueSemanticOf(input_type));
         DCHECK(machine_type.representation() !=
                    MachineRepresentation::kWord32 ||
                machine_type.semantic() == MachineSemantic::kInt32 ||
@@ -1023,12 +1015,12 @@
   WriteBarrierKind WriteBarrierKindFor(
       BaseTaggedness base_taggedness,
       MachineRepresentation field_representation, Type* field_type,
-      Node* value) {
+      MachineRepresentation value_representation, Node* value) {
     if (base_taggedness == kTaggedBase &&
-        field_representation == MachineRepresentation::kTagged) {
+        CanBeTaggedPointer(field_representation)) {
       Type* value_type = NodeProperties::GetType(value);
-      if (field_type->Is(Type::TaggedSigned()) ||
-          value_type->Is(Type::TaggedSigned())) {
+      if (field_representation == MachineRepresentation::kTaggedSigned ||
+          value_representation == MachineRepresentation::kTaggedSigned) {
         // Write barriers are only for stores of heap objects.
         return kNoWriteBarrier;
       }
@@ -1054,8 +1046,8 @@
           return kMapWriteBarrier;
         }
       }
-      if (field_type->Is(Type::TaggedPointer()) ||
-          value_type->Is(Type::TaggedPointer())) {
+      if (field_representation == MachineRepresentation::kTaggedPointer ||
+          value_representation == MachineRepresentation::kTaggedPointer) {
         // Write barriers for heap objects are cheaper.
         return kPointerWriteBarrier;
       }
@@ -1076,13 +1068,14 @@
   WriteBarrierKind WriteBarrierKindFor(
       BaseTaggedness base_taggedness,
       MachineRepresentation field_representation, int field_offset,
-      Type* field_type, Node* value) {
+      Type* field_type, MachineRepresentation value_representation,
+      Node* value) {
     if (base_taggedness == kTaggedBase &&
         field_offset == HeapObject::kMapOffset) {
       return kMapWriteBarrier;
     }
     return WriteBarrierKindFor(base_taggedness, field_representation,
-                               field_type, value);
+                               field_type, value_representation, value);
   }
 
   Graph* graph() const { return jsgraph_->graph(); }
@@ -1169,6 +1162,110 @@
     return;
   }
 
+  void VisitSpeculativeNumberModulus(Node* node, Truncation truncation,
+                                     SimplifiedLowering* lowering) {
+    // ToNumber(x) can throw if x is either a Receiver or a Symbol, so we
+    // can only eliminate an unused speculative number operation if we know
+    // that the inputs are PlainPrimitive, which excludes everything that's
+    // might have side effects or throws during a ToNumber conversion.
+    if (BothInputsAre(node, Type::PlainPrimitive())) {
+      if (truncation.IsUnused()) return VisitUnused(node);
+    }
+    if (BothInputsAre(node, Type::Unsigned32OrMinusZeroOrNaN()) &&
+        (truncation.IsUsedAsWord32() ||
+         NodeProperties::GetType(node)->Is(Type::Unsigned32()))) {
+      // => unsigned Uint32Mod
+      VisitWord32TruncatingBinop(node);
+      if (lower()) DeferReplacement(node, lowering->Uint32Mod(node));
+      return;
+    }
+    if (BothInputsAre(node, Type::Signed32OrMinusZeroOrNaN()) &&
+        (truncation.IsUsedAsWord32() ||
+         NodeProperties::GetType(node)->Is(Type::Signed32()))) {
+      // => signed Int32Mod
+      VisitWord32TruncatingBinop(node);
+      if (lower()) DeferReplacement(node, lowering->Int32Mod(node));
+      return;
+    }
+
+    // Try to use type feedback.
+    NumberOperationHint hint = NumberOperationHintOf(node->op());
+
+    // Handle the case when no uint32 checks on inputs are necessary
+    // (but an overflow check is needed on the output).
+    if (BothInputsAreUnsigned32(node)) {
+      if (hint == NumberOperationHint::kSignedSmall ||
+          hint == NumberOperationHint::kSigned32) {
+        VisitBinop(node, UseInfo::TruncatingWord32(),
+                   MachineRepresentation::kWord32, Type::Unsigned32());
+        if (lower()) ChangeToUint32OverflowOp(node);
+        return;
+      }
+    }
+
+    // Handle the case when no int32 checks on inputs are necessary
+    // (but an overflow check is needed on the output).
+    if (BothInputsAre(node, Type::Signed32())) {
+      // If both the inputs the feedback are int32, use the overflow op.
+      if (hint == NumberOperationHint::kSignedSmall ||
+          hint == NumberOperationHint::kSigned32) {
+        VisitBinop(node, UseInfo::TruncatingWord32(),
+                   MachineRepresentation::kWord32, Type::Signed32());
+        if (lower()) ChangeToInt32OverflowOp(node);
+        return;
+      }
+    }
+
+    if (hint == NumberOperationHint::kSignedSmall ||
+        hint == NumberOperationHint::kSigned32) {
+      // If the result is truncated, we only need to check the inputs.
+      if (truncation.IsUsedAsWord32()) {
+        VisitBinop(node, CheckedUseInfoAsWord32FromHint(hint),
+                   MachineRepresentation::kWord32);
+        if (lower()) DeferReplacement(node, lowering->Int32Mod(node));
+      } else if (BothInputsAre(node, Type::Unsigned32OrMinusZeroOrNaN())) {
+        VisitBinop(node, CheckedUseInfoAsWord32FromHint(hint),
+                   MachineRepresentation::kWord32, Type::Unsigned32());
+        if (lower()) DeferReplacement(node, lowering->Uint32Mod(node));
+      } else {
+        VisitBinop(node, CheckedUseInfoAsWord32FromHint(hint),
+                   MachineRepresentation::kWord32, Type::Signed32());
+        if (lower()) ChangeToInt32OverflowOp(node);
+      }
+      return;
+    }
+
+    if (TypeOf(node->InputAt(0))->Is(Type::Unsigned32()) &&
+        TypeOf(node->InputAt(1))->Is(Type::Unsigned32()) &&
+        (truncation.IsUsedAsWord32() ||
+         NodeProperties::GetType(node)->Is(Type::Unsigned32()))) {
+      // We can only promise Float64 truncation here, as the decision is
+      // based on the feedback types of the inputs.
+      VisitBinop(node,
+                 UseInfo(MachineRepresentation::kWord32, Truncation::Float64()),
+                 MachineRepresentation::kWord32, Type::Number());
+      if (lower()) DeferReplacement(node, lowering->Uint32Mod(node));
+      return;
+    }
+    if (TypeOf(node->InputAt(0))->Is(Type::Signed32()) &&
+        TypeOf(node->InputAt(1))->Is(Type::Signed32()) &&
+        (truncation.IsUsedAsWord32() ||
+         NodeProperties::GetType(node)->Is(Type::Signed32()))) {
+      // We can only promise Float64 truncation here, as the decision is
+      // based on the feedback types of the inputs.
+      VisitBinop(node,
+                 UseInfo(MachineRepresentation::kWord32, Truncation::Float64()),
+                 MachineRepresentation::kWord32, Type::Number());
+      if (lower()) DeferReplacement(node, lowering->Int32Mod(node));
+      return;
+    }
+    // default case => Float64Mod
+    VisitBinop(node, UseInfo::CheckedNumberOrOddballAsFloat64(),
+               MachineRepresentation::kFloat64, Type::Number());
+    if (lower()) ChangeToPureOp(node, Float64Op(node));
+    return;
+  }
+
   // Dispatching routine for visiting the node {node} with the usage {use}.
   // Depending on the operator, propagate new usage info to the inputs.
   void VisitNode(Node* node, Truncation truncation,
@@ -1195,22 +1292,13 @@
         // tho Start doesn't really produce a value, we have to say Tagged
         // here, otherwise the input conversion will fail.
         return VisitLeaf(node, MachineRepresentation::kTagged);
-      case IrOpcode::kDead:
-        return VisitLeaf(node, MachineRepresentation::kNone);
-      case IrOpcode::kParameter: {
+      case IrOpcode::kParameter:
         // TODO(titzer): use representation from linkage.
-        ProcessInput(node, 0, UseInfo::None());
-        SetOutput(node, MachineRepresentation::kTagged);
-        return;
-      }
+        return VisitUnop(node, UseInfo::None(), MachineRepresentation::kTagged);
       case IrOpcode::kInt32Constant:
         return VisitLeaf(node, MachineRepresentation::kWord32);
       case IrOpcode::kInt64Constant:
         return VisitLeaf(node, MachineRepresentation::kWord64);
-      case IrOpcode::kFloat32Constant:
-        return VisitLeaf(node, MachineRepresentation::kFloat32);
-      case IrOpcode::kFloat64Constant:
-        return VisitLeaf(node, MachineRepresentation::kFloat64);
       case IrOpcode::kExternalConstant:
         return VisitLeaf(node, MachineType::PointerRepresentation());
       case IrOpcode::kNumberConstant:
@@ -1218,12 +1306,6 @@
       case IrOpcode::kHeapConstant:
         return VisitLeaf(node, MachineRepresentation::kTagged);
 
-      case IrOpcode::kDeoptimizeIf:
-      case IrOpcode::kDeoptimizeUnless:
-        ProcessInput(node, 0, UseInfo::Bool());
-        ProcessInput(node, 1, UseInfo::AnyTagged());
-        ProcessRemainingInputs(node, 2);
-        return;
       case IrOpcode::kBranch:
         ProcessInput(node, 0, UseInfo::Bool());
         EnqueueInput(node, NodeProperties::FirstControlIndex(node));
@@ -1242,6 +1324,18 @@
       //------------------------------------------------------------------
       // JavaScript operators.
       //------------------------------------------------------------------
+      case IrOpcode::kJSToBoolean: {
+        if (truncation.IsUsedAsBool()) {
+          ProcessInput(node, 0, UseInfo::Bool());
+          ProcessInput(node, 1, UseInfo::None());
+          SetOutput(node, MachineRepresentation::kBit);
+          if (lower()) DeferReplacement(node, node->InputAt(0));
+        } else {
+          VisitInputs(node);
+          SetOutput(node, MachineRepresentation::kTagged);
+        }
+        return;
+      }
       case IrOpcode::kJSToNumber: {
         VisitInputs(node);
         // TODO(bmeurer): Optimize somewhat based on input type?
@@ -1268,6 +1362,8 @@
             node->AppendInput(jsgraph_->zone(), jsgraph_->Int32Constant(0));
             NodeProperties::ChangeOp(node, lowering->machine()->Word32Equal());
           } else {
+            DCHECK_EQ(input_info->representation(),
+                      MachineRepresentation::kTagged);
             // BooleanNot(x: kRepTagged) => WordEqual(x, #false)
             node->AppendInput(jsgraph_->zone(), jsgraph_->FalseConstant());
             NodeProperties::ChangeOp(node, lowering->machine()->WordEqual());
@@ -1289,7 +1385,8 @@
              rhs_type->Is(Type::Unsigned32OrMinusZeroOrNaN()) &&
              OneInputCannotBe(node, type_cache_.kZeroish))) {
           // => unsigned Int32Cmp
-          VisitUint32Cmp(node);
+          VisitBinop(node, UseInfo::TruncatingWord32(),
+                     MachineRepresentation::kBit);
           if (lower()) NodeProperties::ChangeOp(node, Uint32Op(node));
           return;
         }
@@ -1299,12 +1396,14 @@
              rhs_type->Is(Type::Signed32OrMinusZeroOrNaN()) &&
              OneInputCannotBe(node, type_cache_.kZeroish))) {
           // => signed Int32Cmp
-          VisitInt32Cmp(node);
+          VisitBinop(node, UseInfo::TruncatingWord32(),
+                     MachineRepresentation::kBit);
           if (lower()) NodeProperties::ChangeOp(node, Int32Op(node));
           return;
         }
         // => Float64Cmp
-        VisitFloat64Cmp(node);
+        VisitBinop(node, UseInfo::TruncatingFloat64(),
+                   MachineRepresentation::kBit);
         if (lower()) NodeProperties::ChangeOp(node, Float64Op(node));
         return;
       }
@@ -1314,16 +1413,19 @@
         if (TypeOf(node->InputAt(0))->Is(Type::Unsigned32()) &&
             TypeOf(node->InputAt(1))->Is(Type::Unsigned32())) {
           // => unsigned Int32Cmp
-          VisitUint32Cmp(node);
+          VisitBinop(node, UseInfo::TruncatingWord32(),
+                     MachineRepresentation::kBit);
           if (lower()) NodeProperties::ChangeOp(node, Uint32Op(node));
         } else if (TypeOf(node->InputAt(0))->Is(Type::Signed32()) &&
                    TypeOf(node->InputAt(1))->Is(Type::Signed32())) {
           // => signed Int32Cmp
-          VisitInt32Cmp(node);
+          VisitBinop(node, UseInfo::TruncatingWord32(),
+                     MachineRepresentation::kBit);
           if (lower()) NodeProperties::ChangeOp(node, Int32Op(node));
         } else {
           // => Float64Cmp
-          VisitFloat64Cmp(node);
+          VisitBinop(node, UseInfo::TruncatingFloat64(),
+                     MachineRepresentation::kBit);
           if (lower()) NodeProperties::ChangeOp(node, Float64Op(node));
         }
         return;
@@ -1347,13 +1449,15 @@
         if (TypeOf(node->InputAt(0))->Is(Type::Unsigned32()) &&
             TypeOf(node->InputAt(1))->Is(Type::Unsigned32())) {
           // => unsigned Int32Cmp
-          VisitUint32Cmp(node);
+          VisitBinop(node, UseInfo::TruncatingWord32(),
+                     MachineRepresentation::kBit);
           if (lower()) ChangeToPureOp(node, Uint32Op(node));
           return;
         } else if (TypeOf(node->InputAt(0))->Is(Type::Signed32()) &&
                    TypeOf(node->InputAt(1))->Is(Type::Signed32())) {
           // => signed Int32Cmp
-          VisitInt32Cmp(node);
+          VisitBinop(node, UseInfo::TruncatingWord32(),
+                     MachineRepresentation::kBit);
           if (lower()) ChangeToPureOp(node, Int32Op(node));
           return;
         }
@@ -1490,10 +1594,10 @@
         }
         if (BothInputsAreSigned32(node)) {
           if (NodeProperties::GetType(node)->Is(Type::Signed32())) {
-          // => signed Int32Div
-          VisitInt32Binop(node);
-          if (lower()) DeferReplacement(node, lowering->Int32Div(node));
-          return;
+            // => signed Int32Div
+            VisitWord32TruncatingBinop(node);
+            if (lower()) DeferReplacement(node, lowering->Int32Div(node));
+            return;
           }
           if (truncation.IsUsedAsWord32()) {
             // => signed Int32Div
@@ -1562,7 +1666,7 @@
         if (BothInputsAreSigned32(node)) {
           if (NodeProperties::GetType(node)->Is(Type::Signed32())) {
             // => signed Int32Div
-            VisitInt32Binop(node);
+            VisitWord32TruncatingBinop(node);
             if (lower()) DeferReplacement(node, lowering->Int32Div(node));
             return;
           }
@@ -1574,116 +1678,12 @@
           }
         }
         // Number x Number => Float64Div
-        if (BothInputsAre(node, Type::NumberOrUndefined())) {
-          VisitFloat64Binop(node);
-          if (lower()) ChangeToPureOp(node, Float64Op(node));
-          return;
-        }
-        // Checked float64 x float64 => float64
-        DCHECK_EQ(IrOpcode::kSpeculativeNumberDivide, node->opcode());
-        VisitBinop(node, UseInfo::CheckedNumberOrOddballAsFloat64(),
-                   MachineRepresentation::kFloat64, Type::Number());
+        VisitFloat64Binop(node);
         if (lower()) ChangeToPureOp(node, Float64Op(node));
         return;
       }
-      case IrOpcode::kSpeculativeNumberModulus: {
-        // ToNumber(x) can throw if x is either a Receiver or a Symbol, so we
-        // can only eliminate an unused speculative number operation if we know
-        // that the inputs are PlainPrimitive, which excludes everything that's
-        // might have side effects or throws during a ToNumber conversion.
-        if (BothInputsAre(node, Type::PlainPrimitive())) {
-          if (truncation.IsUnused()) return VisitUnused(node);
-        }
-        if (BothInputsAre(node, Type::Unsigned32OrMinusZeroOrNaN()) &&
-            (truncation.IsUsedAsWord32() ||
-             NodeProperties::GetType(node)->Is(Type::Unsigned32()))) {
-          // => unsigned Uint32Mod
-          VisitWord32TruncatingBinop(node);
-          if (lower()) DeferReplacement(node, lowering->Uint32Mod(node));
-          return;
-        }
-        if (BothInputsAre(node, Type::Signed32OrMinusZeroOrNaN()) &&
-            (truncation.IsUsedAsWord32() ||
-             NodeProperties::GetType(node)->Is(Type::Signed32()))) {
-          // => signed Int32Mod
-          VisitWord32TruncatingBinop(node);
-          if (lower()) DeferReplacement(node, lowering->Int32Mod(node));
-          return;
-        }
-
-        // Try to use type feedback.
-        NumberOperationHint hint = NumberOperationHintOf(node->op());
-
-        // Handle the case when no uint32 checks on inputs are necessary
-        // (but an overflow check is needed on the output).
-        if (BothInputsAreUnsigned32(node)) {
-          if (hint == NumberOperationHint::kSignedSmall ||
-              hint == NumberOperationHint::kSigned32) {
-            VisitBinop(node, UseInfo::TruncatingWord32(),
-                       MachineRepresentation::kWord32, Type::Unsigned32());
-            if (lower()) ChangeToUint32OverflowOp(node);
-            return;
-          }
-        }
-
-        // Handle the case when no int32 checks on inputs are necessary
-        // (but an overflow check is needed on the output).
-        if (BothInputsAre(node, Type::Signed32())) {
-          // If both the inputs the feedback are int32, use the overflow op.
-          if (hint == NumberOperationHint::kSignedSmall ||
-              hint == NumberOperationHint::kSigned32) {
-            VisitBinop(node, UseInfo::TruncatingWord32(),
-                       MachineRepresentation::kWord32, Type::Signed32());
-            if (lower()) ChangeToInt32OverflowOp(node);
-            return;
-          }
-        }
-
-        if (hint == NumberOperationHint::kSignedSmall ||
-            hint == NumberOperationHint::kSigned32) {
-          // If the result is truncated, we only need to check the inputs.
-          if (truncation.IsUsedAsWord32()) {
-            VisitBinop(node, CheckedUseInfoAsWord32FromHint(hint),
-                       MachineRepresentation::kWord32);
-            if (lower()) DeferReplacement(node, lowering->Int32Mod(node));
-          } else {
-            VisitBinop(node, CheckedUseInfoAsWord32FromHint(hint),
-                       MachineRepresentation::kWord32, Type::Signed32());
-            if (lower()) ChangeToInt32OverflowOp(node);
-          }
-          return;
-        }
-
-        if (TypeOf(node->InputAt(0))->Is(Type::Unsigned32()) &&
-            TypeOf(node->InputAt(1))->Is(Type::Unsigned32()) &&
-            (truncation.IsUsedAsWord32() ||
-             NodeProperties::GetType(node)->Is(Type::Unsigned32()))) {
-          // We can only promise Float64 truncation here, as the decision is
-          // based on the feedback types of the inputs.
-          VisitBinop(node, UseInfo(MachineRepresentation::kWord32,
-                                   Truncation::Float64()),
-                     MachineRepresentation::kWord32);
-          if (lower()) DeferReplacement(node, lowering->Uint32Mod(node));
-          return;
-        }
-        if (TypeOf(node->InputAt(0))->Is(Type::Signed32()) &&
-            TypeOf(node->InputAt(1))->Is(Type::Signed32()) &&
-            (truncation.IsUsedAsWord32() ||
-             NodeProperties::GetType(node)->Is(Type::Signed32()))) {
-          // We can only promise Float64 truncation here, as the decision is
-          // based on the feedback types of the inputs.
-          VisitBinop(node, UseInfo(MachineRepresentation::kWord32,
-                                   Truncation::Float64()),
-                     MachineRepresentation::kWord32);
-          if (lower()) DeferReplacement(node, lowering->Int32Mod(node));
-          return;
-        }
-        // default case => Float64Mod
-        VisitBinop(node, UseInfo::CheckedNumberOrOddballAsFloat64(),
-                   MachineRepresentation::kFloat64, Type::Number());
-        if (lower()) ChangeToPureOp(node, Float64Op(node));
-        return;
-      }
+      case IrOpcode::kSpeculativeNumberModulus:
+        return VisitSpeculativeNumberModulus(node, truncation, lowering);
       case IrOpcode::kNumberModulus: {
         if (BothInputsAre(node, Type::Unsigned32OrMinusZeroOrNaN()) &&
             (truncation.IsUsedAsWord32() ||
@@ -1733,7 +1733,7 @@
       case IrOpcode::kNumberBitwiseOr:
       case IrOpcode::kNumberBitwiseXor:
       case IrOpcode::kNumberBitwiseAnd: {
-        VisitInt32Binop(node);
+        VisitWord32TruncatingBinop(node);
         if (lower()) NodeProperties::ChangeOp(node, Int32Op(node));
         return;
       }
@@ -1895,13 +1895,13 @@
       case IrOpcode::kNumberMax: {
         // TODO(turbofan): We should consider feedback types here as well.
         if (BothInputsAreUnsigned32(node)) {
-          VisitUint32Binop(node);
+          VisitWord32TruncatingBinop(node);
           if (lower()) {
             lowering->DoMax(node, lowering->machine()->Uint32LessThan(),
                             MachineRepresentation::kWord32);
           }
         } else if (BothInputsAreSigned32(node)) {
-          VisitInt32Binop(node);
+          VisitWord32TruncatingBinop(node);
           if (lower()) {
             lowering->DoMax(node, lowering->machine()->Int32LessThan(),
                             MachineRepresentation::kWord32);
@@ -1921,13 +1921,13 @@
       case IrOpcode::kNumberMin: {
         // TODO(turbofan): We should consider feedback types here as well.
         if (BothInputsAreUnsigned32(node)) {
-          VisitUint32Binop(node);
+          VisitWord32TruncatingBinop(node);
           if (lower()) {
             lowering->DoMin(node, lowering->machine()->Uint32LessThan(),
                             MachineRepresentation::kWord32);
           }
         } else if (BothInputsAreSigned32(node)) {
-          VisitInt32Binop(node);
+          VisitWord32TruncatingBinop(node);
           if (lower()) {
             lowering->DoMin(node, lowering->machine()->Int32LessThan(),
                             MachineRepresentation::kWord32);
@@ -2002,6 +2002,23 @@
         if (lower()) NodeProperties::ChangeOp(node, Float64Op(node));
         return;
       }
+      case IrOpcode::kNumberToBoolean: {
+        Type* const input_type = TypeOf(node->InputAt(0));
+        if (input_type->Is(Type::Integral32())) {
+          VisitUnop(node, UseInfo::TruncatingWord32(),
+                    MachineRepresentation::kBit);
+          if (lower()) lowering->DoIntegral32ToBit(node);
+        } else if (input_type->Is(Type::OrderedNumber())) {
+          VisitUnop(node, UseInfo::TruncatingFloat64(),
+                    MachineRepresentation::kBit);
+          if (lower()) lowering->DoOrderedNumberToBit(node);
+        } else {
+          VisitUnop(node, UseInfo::TruncatingFloat64(),
+                    MachineRepresentation::kBit);
+          if (lower()) lowering->DoNumberToBit(node);
+        }
+        return;
+      }
       case IrOpcode::kNumberToInt32: {
         // Just change representation if necessary.
         VisitUnop(node, UseInfo::TruncatingWord32(),
@@ -2023,62 +2040,11 @@
         }
         return;
       }
-      case IrOpcode::kStringEqual: {
-        VisitBinop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
-        if (lower()) {
-          // StringEqual(x, y) => Call(StringEqualStub, x, y, no-context)
-          Operator::Properties properties =
-              Operator::kCommutative | Operator::kEliminatable;
-          Callable callable = CodeFactory::StringEqual(jsgraph_->isolate());
-          CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
-          CallDescriptor* desc = Linkage::GetStubCallDescriptor(
-              jsgraph_->isolate(), jsgraph_->zone(), callable.descriptor(), 0,
-              flags, properties);
-          node->InsertInput(jsgraph_->zone(), 0,
-                            jsgraph_->HeapConstant(callable.code()));
-          node->AppendInput(jsgraph_->zone(), jsgraph_->NoContextConstant());
-          node->AppendInput(jsgraph_->zone(), jsgraph_->graph()->start());
-          NodeProperties::ChangeOp(node, jsgraph_->common()->Call(desc));
-        }
-        return;
-      }
-      case IrOpcode::kStringLessThan: {
-        VisitBinop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
-        if (lower()) {
-          // StringLessThan(x, y) => Call(StringLessThanStub, x, y, no-context)
-          Operator::Properties properties = Operator::kEliminatable;
-          Callable callable = CodeFactory::StringLessThan(jsgraph_->isolate());
-          CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
-          CallDescriptor* desc = Linkage::GetStubCallDescriptor(
-              jsgraph_->isolate(), jsgraph_->zone(), callable.descriptor(), 0,
-              flags, properties);
-          node->InsertInput(jsgraph_->zone(), 0,
-                            jsgraph_->HeapConstant(callable.code()));
-          node->AppendInput(jsgraph_->zone(), jsgraph_->NoContextConstant());
-          node->AppendInput(jsgraph_->zone(), jsgraph_->graph()->start());
-          NodeProperties::ChangeOp(node, jsgraph_->common()->Call(desc));
-        }
-        return;
-      }
+      case IrOpcode::kStringEqual:
+      case IrOpcode::kStringLessThan:
       case IrOpcode::kStringLessThanOrEqual: {
-        VisitBinop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
-        if (lower()) {
-          // StringLessThanOrEqual(x, y)
-          //   => Call(StringLessThanOrEqualStub, x, y, no-context)
-          Operator::Properties properties = Operator::kEliminatable;
-          Callable callable =
-              CodeFactory::StringLessThanOrEqual(jsgraph_->isolate());
-          CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
-          CallDescriptor* desc = Linkage::GetStubCallDescriptor(
-              jsgraph_->isolate(), jsgraph_->zone(), callable.descriptor(), 0,
-              flags, properties);
-          node->InsertInput(jsgraph_->zone(), 0,
-                            jsgraph_->HeapConstant(callable.code()));
-          node->AppendInput(jsgraph_->zone(), jsgraph_->NoContextConstant());
-          node->AppendInput(jsgraph_->zone(), jsgraph_->graph()->start());
-          NodeProperties::ChangeOp(node, jsgraph_->common()->Call(desc));
-        }
-        return;
+        return VisitBinop(node, UseInfo::AnyTagged(),
+                          MachineRepresentation::kTagged);
       }
       case IrOpcode::kStringCharCodeAt: {
         VisitBinop(node, UseInfo::AnyTagged(), UseInfo::TruncatingWord32(),
@@ -2090,23 +2056,36 @@
                   MachineRepresentation::kTagged);
         return;
       }
+      case IrOpcode::kStringFromCodePoint: {
+        VisitUnop(node, UseInfo::TruncatingWord32(),
+                  MachineRepresentation::kTagged);
+        return;
+      }
 
       case IrOpcode::kCheckBounds: {
         Type* index_type = TypeOf(node->InputAt(0));
+        Type* length_type = TypeOf(node->InputAt(1));
         if (index_type->Is(Type::Unsigned32())) {
           VisitBinop(node, UseInfo::TruncatingWord32(),
                      MachineRepresentation::kWord32);
+          if (lower() && index_type->Max() < length_type->Min()) {
+            // The bounds check is redundant if we already know that
+            // the index is within the bounds of [0.0, length[.
+            DeferReplacement(node, node->InputAt(0));
+          }
         } else {
           VisitBinop(node, UseInfo::CheckedSigned32AsWord32(),
                      UseInfo::TruncatingWord32(),
                      MachineRepresentation::kWord32);
         }
-        if (lower()) {
-          // The bounds check is redundant if we already know that
-          // the index is within the bounds of [0.0, length[.
-          if (index_type->Is(NodeProperties::GetType(node))) {
-            DeferReplacement(node, node->InputAt(0));
-          }
+        return;
+      }
+      case IrOpcode::kCheckHeapObject: {
+        if (InputCannotBe(node, Type::SignedSmall())) {
+          VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
+          if (lower()) DeferReplacement(node, node->InputAt(0));
+        } else {
+          VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
         }
         return;
       }
@@ -2135,28 +2114,20 @@
         }
         return;
       }
-      case IrOpcode::kCheckString: {
-        if (InputIs(node, Type::String())) {
-          VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
-          if (lower()) DeferReplacement(node, node->InputAt(0));
-        } else {
-          VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
-        }
-        return;
-      }
-      case IrOpcode::kCheckTaggedPointer: {
-        if (InputCannotBe(node, Type::SignedSmall())) {
-          VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
-          if (lower()) DeferReplacement(node, node->InputAt(0));
-        } else {
-          VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
-        }
-        return;
-      }
-      case IrOpcode::kCheckTaggedSigned: {
+      case IrOpcode::kCheckSmi: {
         if (SmiValuesAre32Bits() && truncation.IsUsedAsWord32()) {
           VisitUnop(node, UseInfo::CheckedSignedSmallAsWord32(),
                     MachineRepresentation::kWord32);
+        } else {
+          VisitUnop(node, UseInfo::CheckedSignedSmallAsTaggedSigned(),
+                    MachineRepresentation::kTaggedSigned);
+        }
+        if (lower()) DeferReplacement(node, node->InputAt(0));
+        return;
+      }
+      case IrOpcode::kCheckString: {
+        if (InputIs(node, Type::String())) {
+          VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
           if (lower()) DeferReplacement(node, node->InputAt(0));
         } else {
           VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kTagged);
@@ -2175,15 +2146,16 @@
         FieldAccess access = FieldAccessOf(node->op());
         MachineRepresentation const representation =
             access.machine_type.representation();
-        // TODO(bmeurer): Introduce an appropriate tagged-signed machine rep.
         VisitUnop(node, UseInfoForBasePointer(access), representation);
         return;
       }
       case IrOpcode::kStoreField: {
         FieldAccess access = FieldAccessOf(node->op());
+        NodeInfo* input_info = GetInfo(node->InputAt(1));
         WriteBarrierKind write_barrier_kind = WriteBarrierKindFor(
             access.base_is_tagged, access.machine_type.representation(),
-            access.offset, access.type, node->InputAt(1));
+            access.offset, access.type, input_info->representation(),
+            node->InputAt(1));
         ProcessInput(node, 0, UseInfoForBasePointer(access));
         ProcessInput(node, 1, TruncatingUseInfoFromRepresentation(
                                   access.machine_type.representation()));
@@ -2255,9 +2227,10 @@
       }
       case IrOpcode::kStoreElement: {
         ElementAccess access = ElementAccessOf(node->op());
+        NodeInfo* input_info = GetInfo(node->InputAt(2));
         WriteBarrierKind write_barrier_kind = WriteBarrierKindFor(
             access.base_is_tagged, access.machine_type.representation(),
-            access.type, node->InputAt(2));
+            access.type, input_info->representation(), node->InputAt(2));
         ProcessInput(node, 0, UseInfoForBasePointer(access));  // base
         ProcessInput(node, 1, UseInfo::TruncatingWord32());    // index
         ProcessInput(node, 2,
@@ -2336,14 +2309,34 @@
         }
         return;
       }
-      case IrOpcode::kObjectIsCallable:
-      case IrOpcode::kObjectIsNumber:
-      case IrOpcode::kObjectIsReceiver:
-      case IrOpcode::kObjectIsSmi:
-      case IrOpcode::kObjectIsString:
+      case IrOpcode::kObjectIsCallable: {
+        // TODO(turbofan): Add Type::Callable to optimize this?
+        VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kBit);
+        return;
+      }
+      case IrOpcode::kObjectIsNumber: {
+        VisitObjectIs(node, Type::Number(), lowering);
+        return;
+      }
+      case IrOpcode::kObjectIsReceiver: {
+        VisitObjectIs(node, Type::Receiver(), lowering);
+        return;
+      }
+      case IrOpcode::kObjectIsSmi: {
+        // TODO(turbofan): Optimize based on input representation.
+        VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kBit);
+        return;
+      }
+      case IrOpcode::kObjectIsString: {
+        VisitObjectIs(node, Type::String(), lowering);
+        return;
+      }
       case IrOpcode::kObjectIsUndetectable: {
-        ProcessInput(node, 0, UseInfo::AnyTagged());
-        SetOutput(node, MachineRepresentation::kBit);
+        VisitObjectIs(node, Type::Undetectable(), lowering);
+        return;
+      }
+      case IrOpcode::kArrayBufferWasNeutered: {
+        VisitUnop(node, UseInfo::AnyTagged(), MachineRepresentation::kBit);
         return;
       }
       case IrOpcode::kCheckFloat64Hole: {
@@ -2403,158 +2396,11 @@
         return;
       }
 
-      //------------------------------------------------------------------
-      // Machine-level operators.
-      //------------------------------------------------------------------
-      case IrOpcode::kLoad: {
-        // TODO(jarin) Eventually, we should get rid of all machine stores
-        // from the high-level phases, then this becomes UNREACHABLE.
-        LoadRepresentation rep = LoadRepresentationOf(node->op());
-        ProcessInput(node, 0, UseInfo::AnyTagged());   // tagged pointer
-        ProcessInput(node, 1, UseInfo::PointerInt());  // index
-        ProcessRemainingInputs(node, 2);
-        return SetOutput(node, rep.representation());
-      }
-      case IrOpcode::kStore: {
-        // TODO(jarin) Eventually, we should get rid of all machine stores
-        // from the high-level phases, then this becomes UNREACHABLE.
-        StoreRepresentation rep = StoreRepresentationOf(node->op());
-        ProcessInput(node, 0, UseInfo::AnyTagged());   // tagged pointer
-        ProcessInput(node, 1, UseInfo::PointerInt());  // index
-        ProcessInput(node, 2,
-                     TruncatingUseInfoFromRepresentation(rep.representation()));
-        ProcessRemainingInputs(node, 3);
-        return SetOutput(node, MachineRepresentation::kNone);
-      }
-      case IrOpcode::kWord32Shr:
-        // We output unsigned int32 for shift right because JavaScript.
-        return VisitBinop(node, UseInfo::TruncatingWord32(),
-                          MachineRepresentation::kWord32);
-      case IrOpcode::kWord32And:
-      case IrOpcode::kWord32Or:
-      case IrOpcode::kWord32Xor:
-      case IrOpcode::kWord32Shl:
-      case IrOpcode::kWord32Sar:
-        // We use signed int32 as the output type for these word32 operations,
-        // though the machine bits are the same for either signed or unsigned,
-        // because JavaScript considers the result from these operations signed.
-        return VisitBinop(node, UseInfo::TruncatingWord32(),
-                          MachineRepresentation::kWord32);
-      case IrOpcode::kWord32Equal:
-        return VisitBinop(node, UseInfo::TruncatingWord32(),
-                          MachineRepresentation::kBit);
-
-      case IrOpcode::kWord32Clz:
-        return VisitUnop(node, UseInfo::TruncatingWord32(),
-                         MachineRepresentation::kWord32);
-
-      case IrOpcode::kInt32Add:
-      case IrOpcode::kInt32Sub:
-      case IrOpcode::kInt32Mul:
-      case IrOpcode::kInt32MulHigh:
-      case IrOpcode::kInt32Div:
-      case IrOpcode::kInt32Mod:
-        return VisitInt32Binop(node);
-      case IrOpcode::kUint32Div:
-      case IrOpcode::kUint32Mod:
-      case IrOpcode::kUint32MulHigh:
-        return VisitUint32Binop(node);
-      case IrOpcode::kInt32LessThan:
-      case IrOpcode::kInt32LessThanOrEqual:
-        return VisitInt32Cmp(node);
-
-      case IrOpcode::kUint32LessThan:
-      case IrOpcode::kUint32LessThanOrEqual:
-        return VisitUint32Cmp(node);
-
-      case IrOpcode::kInt64Add:
-      case IrOpcode::kInt64Sub:
-      case IrOpcode::kInt64Mul:
-      case IrOpcode::kInt64Div:
-      case IrOpcode::kInt64Mod:
-        return VisitInt64Binop(node);
-      case IrOpcode::kInt64LessThan:
-      case IrOpcode::kInt64LessThanOrEqual:
-        return VisitInt64Cmp(node);
-
-      case IrOpcode::kUint64LessThan:
-        return VisitUint64Cmp(node);
-
-      case IrOpcode::kUint64Div:
-      case IrOpcode::kUint64Mod:
-        return VisitUint64Binop(node);
-
-      case IrOpcode::kWord64And:
-      case IrOpcode::kWord64Or:
-      case IrOpcode::kWord64Xor:
-      case IrOpcode::kWord64Shl:
-      case IrOpcode::kWord64Shr:
-      case IrOpcode::kWord64Sar:
-        return VisitBinop(node, UseInfo::TruncatingWord64(),
-                          MachineRepresentation::kWord64);
-      case IrOpcode::kWord64Equal:
-        return VisitBinop(node, UseInfo::TruncatingWord64(),
-                          MachineRepresentation::kBit);
-
-      case IrOpcode::kChangeInt32ToInt64:
-        return VisitUnop(node, UseInfo::TruncatingWord32(),
-                         MachineRepresentation::kWord64);
-      case IrOpcode::kChangeUint32ToUint64:
-        return VisitUnop(node, UseInfo::TruncatingWord32(),
-                         MachineRepresentation::kWord64);
-      case IrOpcode::kTruncateFloat64ToFloat32:
-        return VisitUnop(node, UseInfo::TruncatingFloat64(),
-                         MachineRepresentation::kFloat32);
-      case IrOpcode::kTruncateFloat64ToWord32:
-        return VisitUnop(node, UseInfo::TruncatingFloat64(),
-                         MachineRepresentation::kWord32);
-
-      case IrOpcode::kChangeInt32ToFloat64:
-        return VisitUnop(node, UseInfo::TruncatingWord32(),
-                         MachineRepresentation::kFloat64);
-      case IrOpcode::kChangeUint32ToFloat64:
-        return VisitUnop(node, UseInfo::TruncatingWord32(),
-                         MachineRepresentation::kFloat64);
-      case IrOpcode::kFloat64Add:
-      case IrOpcode::kFloat64Sub:
-      case IrOpcode::kFloat64Mul:
-      case IrOpcode::kFloat64Div:
-      case IrOpcode::kFloat64Mod:
-      case IrOpcode::kFloat64Min:
-        return VisitFloat64Binop(node);
-      case IrOpcode::kFloat64Abs:
-      case IrOpcode::kFloat64Sqrt:
-      case IrOpcode::kFloat64RoundDown:
-      case IrOpcode::kFloat64RoundTruncate:
-      case IrOpcode::kFloat64RoundTiesAway:
-      case IrOpcode::kFloat64RoundUp:
-        return VisitUnop(node, UseInfo::TruncatingFloat64(),
-                         MachineRepresentation::kFloat64);
-      case IrOpcode::kFloat64SilenceNaN:
-        return VisitUnop(node, UseInfo::TruncatingFloat64(),
-                         MachineRepresentation::kFloat64);
-      case IrOpcode::kFloat64Equal:
-      case IrOpcode::kFloat64LessThan:
-      case IrOpcode::kFloat64LessThanOrEqual:
-        return VisitFloat64Cmp(node);
-      case IrOpcode::kFloat64ExtractLowWord32:
-      case IrOpcode::kFloat64ExtractHighWord32:
-        return VisitUnop(node, UseInfo::TruncatingFloat64(),
-                         MachineRepresentation::kWord32);
-      case IrOpcode::kFloat64InsertLowWord32:
-      case IrOpcode::kFloat64InsertHighWord32:
-        return VisitBinop(node, UseInfo::TruncatingFloat64(),
-                          UseInfo::TruncatingWord32(),
-                          MachineRepresentation::kFloat64);
       case IrOpcode::kNumberSilenceNaN:
         VisitUnop(node, UseInfo::TruncatingFloat64(),
                   MachineRepresentation::kFloat64);
         if (lower()) NodeProperties::ChangeOp(node, Float64Op(node));
         return;
-      case IrOpcode::kLoadStackPointer:
-      case IrOpcode::kLoadFramePointer:
-      case IrOpcode::kLoadParentFramePointer:
-        return VisitLeaf(node, MachineType::PointerRepresentation());
       case IrOpcode::kStateValues:
         return VisitStateValues(node);
       case IrOpcode::kTypeGuard: {
@@ -2563,37 +2409,55 @@
         // the sigma's type.
         MachineRepresentation output =
             GetOutputInfoForPhi(node, TypeOf(node->InputAt(0)), truncation);
-
         VisitUnop(node, UseInfo(output, truncation), output);
         if (lower()) DeferReplacement(node, node->InputAt(0));
         return;
       }
 
-      // The following opcodes are not produced before representation
-      // inference runs, so we do not have any real test coverage.
-      // Simply fail here.
-      case IrOpcode::kChangeFloat64ToInt32:
-      case IrOpcode::kChangeFloat64ToUint32:
-      case IrOpcode::kTruncateInt64ToInt32:
-      case IrOpcode::kChangeFloat32ToFloat64:
-      case IrOpcode::kCheckedInt32Add:
-      case IrOpcode::kCheckedInt32Sub:
-      case IrOpcode::kCheckedUint32ToInt32:
-      case IrOpcode::kCheckedFloat64ToInt32:
-      case IrOpcode::kCheckedTaggedToInt32:
-      case IrOpcode::kCheckedTaggedToFloat64:
-      case IrOpcode::kPlainPrimitiveToWord32:
-      case IrOpcode::kPlainPrimitiveToFloat64:
-      case IrOpcode::kLoopExit:
-      case IrOpcode::kLoopExitValue:
-      case IrOpcode::kLoopExitEffect:
-        FATAL("Representation inference: unsupported opcodes.");
-        break;
-
-      default:
+      // Operators with all inputs tagged and no or tagged output have uniform
+      // handling.
+      case IrOpcode::kEnd:
+      case IrOpcode::kReturn:
+      case IrOpcode::kIfSuccess:
+      case IrOpcode::kIfException:
+      case IrOpcode::kIfTrue:
+      case IrOpcode::kIfFalse:
+      case IrOpcode::kDeoptimize:
+      case IrOpcode::kEffectPhi:
+      case IrOpcode::kTerminate:
+      case IrOpcode::kFrameState:
+      case IrOpcode::kCheckpoint:
+      case IrOpcode::kLoop:
+      case IrOpcode::kMerge:
+      case IrOpcode::kThrow:
+      case IrOpcode::kBeginRegion:
+      case IrOpcode::kFinishRegion:
+      case IrOpcode::kOsrValue:
+      case IrOpcode::kProjection:
+      case IrOpcode::kObjectState:
+// All JavaScript operators except JSToNumber have uniform handling.
+#define OPCODE_CASE(name) case IrOpcode::k##name:
+        JS_SIMPLE_BINOP_LIST(OPCODE_CASE)
+        JS_OTHER_UNOP_LIST(OPCODE_CASE)
+        JS_OBJECT_OP_LIST(OPCODE_CASE)
+        JS_CONTEXT_OP_LIST(OPCODE_CASE)
+        JS_OTHER_OP_LIST(OPCODE_CASE)
+#undef OPCODE_CASE
+      case IrOpcode::kJSToInteger:
+      case IrOpcode::kJSToLength:
+      case IrOpcode::kJSToName:
+      case IrOpcode::kJSToObject:
+      case IrOpcode::kJSToString:
         VisitInputs(node);
         // Assume the output is tagged.
         return SetOutput(node, MachineRepresentation::kTagged);
+
+      default:
+        V8_Fatal(
+            __FILE__, __LINE__,
+            "Representation inference: unsupported opcode %i (%s), node #%i\n.",
+            node->opcode(), node->op()->mnemonic(), node->id());
+        break;
     }
     UNREACHABLE();
   }
@@ -2977,7 +2841,7 @@
       graph()->NewNode(
           common()->Select(MachineRepresentation::kFloat64),
           graph()->NewNode(machine()->Float64LessThan(), zero, input), one,
-          zero));
+          input));
 }
 
 Node* SimplifiedLowering::Int32Abs(Node* const node) {
@@ -3307,6 +3171,34 @@
   NodeProperties::ChangeOp(node, common()->Call(desc));
 }
 
+void SimplifiedLowering::DoIntegral32ToBit(Node* node) {
+  Node* const input = node->InputAt(0);
+  Node* const zero = jsgraph()->Int32Constant(0);
+  Operator const* const op = machine()->Word32Equal();
+
+  node->ReplaceInput(0, graph()->NewNode(op, input, zero));
+  node->AppendInput(graph()->zone(), zero);
+  NodeProperties::ChangeOp(node, op);
+}
+
+void SimplifiedLowering::DoOrderedNumberToBit(Node* node) {
+  Node* const input = node->InputAt(0);
+
+  node->ReplaceInput(0, graph()->NewNode(machine()->Float64Equal(), input,
+                                         jsgraph()->Float64Constant(0.0)));
+  node->AppendInput(graph()->zone(), jsgraph()->Int32Constant(0));
+  NodeProperties::ChangeOp(node, machine()->Word32Equal());
+}
+
+void SimplifiedLowering::DoNumberToBit(Node* node) {
+  Node* const input = node->InputAt(0);
+
+  node->ReplaceInput(0, jsgraph()->Float64Constant(0.0));
+  node->AppendInput(graph()->zone(),
+                    graph()->NewNode(machine()->Float64Abs(), input));
+  NodeProperties::ChangeOp(node, machine()->Float64LessThan());
+}
+
 Node* SimplifiedLowering::ToNumberCode() {
   if (!to_number_code_.is_set()) {
     Callable callable = CodeFactory::ToNumber(isolate());
diff --git a/src/compiler/simplified-lowering.h b/src/compiler/simplified-lowering.h
index 18c7331..9e2a499 100644
--- a/src/compiler/simplified-lowering.h
+++ b/src/compiler/simplified-lowering.h
@@ -12,17 +12,13 @@
 
 namespace v8 {
 namespace internal {
-
-// Forward declarations.
-class TypeCache;
-
-
 namespace compiler {
 
 // Forward declarations.
 class RepresentationChanger;
 class RepresentationSelector;
 class SourcePositionTable;
+class TypeCache;
 
 class SimplifiedLowering final {
  public:
@@ -45,6 +41,9 @@
   void DoStoreBuffer(Node* node);
   void DoShift(Node* node, Operator const* op, Type* rhs_type);
   void DoStringToNumber(Node* node);
+  void DoIntegral32ToBit(Node* node);
+  void DoOrderedNumberToBit(Node* node);
+  void DoNumberToBit(Node* node);
 
  private:
   JSGraph* const jsgraph_;
diff --git a/src/compiler/simplified-operator-reducer.cc b/src/compiler/simplified-operator-reducer.cc
index d8bd1e0..d172adc 100644
--- a/src/compiler/simplified-operator-reducer.cc
+++ b/src/compiler/simplified-operator-reducer.cc
@@ -9,8 +9,8 @@
 #include "src/compiler/node-matchers.h"
 #include "src/compiler/operator-properties.h"
 #include "src/compiler/simplified-operator.h"
+#include "src/compiler/type-cache.h"
 #include "src/conversions-inl.h"
-#include "src/type-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -126,6 +126,14 @@
       }
       break;
     }
+    case IrOpcode::kCheckedTaggedSignedToInt32: {
+      NodeMatcher m(node->InputAt(0));
+      if (m.IsConvertTaggedHoleToUndefined()) {
+        node->ReplaceInput(0, m.InputAt(0));
+        return Changed(node);
+      }
+      break;
+    }
     case IrOpcode::kCheckIf: {
       HeapObjectMatcher m(node->InputAt(0));
       if (m.Is(factory()->true_value())) {
@@ -142,22 +150,30 @@
       }
       break;
     }
-    case IrOpcode::kCheckTaggedPointer: {
+    case IrOpcode::kCheckHeapObject: {
       Node* const input = node->InputAt(0);
       if (DecideObjectIsSmi(input) == Decision::kFalse) {
         ReplaceWithValue(node, input);
         return Replace(input);
       }
+      NodeMatcher m(input);
+      if (m.IsCheckHeapObject()) {
+        ReplaceWithValue(node, input);
+        return Replace(input);
+      }
       break;
     }
-    case IrOpcode::kCheckTaggedSigned: {
+    case IrOpcode::kCheckSmi: {
       Node* const input = node->InputAt(0);
       if (DecideObjectIsSmi(input) == Decision::kTrue) {
         ReplaceWithValue(node, input);
         return Replace(input);
       }
       NodeMatcher m(input);
-      if (m.IsConvertTaggedHoleToUndefined()) {
+      if (m.IsCheckSmi()) {
+        ReplaceWithValue(node, input);
+        return Replace(input);
+      } else if (m.IsConvertTaggedHoleToUndefined()) {
         node->ReplaceInput(0, m.InputAt(0));
         return Changed(node);
       }
diff --git a/src/compiler/simplified-operator.cc b/src/compiler/simplified-operator.cc
index cf0c3de..400db97 100644
--- a/src/compiler/simplified-operator.cc
+++ b/src/compiler/simplified-operator.cc
@@ -7,7 +7,7 @@
 #include "src/base/lazy-instance.h"
 #include "src/compiler/opcodes.h"
 #include "src/compiler/operator.h"
-#include "src/types.h"
+#include "src/compiler/types.h"
 
 namespace v8 {
 namespace internal {
@@ -208,8 +208,7 @@
 }
 
 CheckForMinusZeroMode CheckMinusZeroModeOf(const Operator* op) {
-  DCHECK(op->opcode() == IrOpcode::kChangeFloat64ToTagged ||
-         op->opcode() == IrOpcode::kCheckedInt32Mul ||
+  DCHECK(op->opcode() == IrOpcode::kCheckedInt32Mul ||
          op->opcode() == IrOpcode::kCheckedFloat64ToInt32 ||
          op->opcode() == IrOpcode::kCheckedTaggedToInt32);
   return OpParameter<CheckForMinusZeroMode>(op);
@@ -332,6 +331,16 @@
   return OpParameter<NumberOperationHint>(op);
 }
 
+PretenureFlag PretenureFlagOf(const Operator* op) {
+  DCHECK_EQ(IrOpcode::kAllocate, op->opcode());
+  return OpParameter<PretenureFlag>(op);
+}
+
+UnicodeEncoding UnicodeEncodingOf(const Operator* op) {
+  DCHECK(op->opcode() == IrOpcode::kStringFromCodePoint);
+  return OpParameter<UnicodeEncoding>(op);
+}
+
 #define PURE_OP_LIST(V)                                          \
   V(BooleanNot, Operator::kNoProperties, 1, 0)                   \
   V(NumberEqual, Operator::kCommutative, 2, 0)                   \
@@ -381,6 +390,7 @@
   V(NumberTan, Operator::kNoProperties, 1, 0)                    \
   V(NumberTanh, Operator::kNoProperties, 1, 0)                   \
   V(NumberTrunc, Operator::kNoProperties, 1, 0)                  \
+  V(NumberToBoolean, Operator::kNoProperties, 1, 0)              \
   V(NumberToInt32, Operator::kNoProperties, 1, 0)                \
   V(NumberToUint32, Operator::kNoProperties, 1, 0)               \
   V(NumberSilenceNaN, Operator::kNoProperties, 1, 0)             \
@@ -393,11 +403,13 @@
   V(ChangeTaggedToInt32, Operator::kNoProperties, 1, 0)          \
   V(ChangeTaggedToUint32, Operator::kNoProperties, 1, 0)         \
   V(ChangeTaggedToFloat64, Operator::kNoProperties, 1, 0)        \
+  V(ChangeFloat64ToTagged, Operator::kNoProperties, 1, 0)        \
   V(ChangeInt31ToTaggedSigned, Operator::kNoProperties, 1, 0)    \
   V(ChangeInt32ToTagged, Operator::kNoProperties, 1, 0)          \
   V(ChangeUint32ToTagged, Operator::kNoProperties, 1, 0)         \
   V(ChangeTaggedToBit, Operator::kNoProperties, 1, 0)            \
   V(ChangeBitToTagged, Operator::kNoProperties, 1, 0)            \
+  V(TruncateTaggedToBit, Operator::kNoProperties, 1, 0)          \
   V(TruncateTaggedToWord32, Operator::kNoProperties, 1, 0)       \
   V(TruncateTaggedToFloat64, Operator::kNoProperties, 1, 0)      \
   V(ObjectIsCallable, Operator::kNoProperties, 1, 0)             \
@@ -418,22 +430,25 @@
   V(SpeculativeNumberLessThan)                \
   V(SpeculativeNumberLessThanOrEqual)
 
-#define CHECKED_OP_LIST(V)            \
-  V(CheckBounds, 2, 1)                \
-  V(CheckIf, 1, 0)                    \
-  V(CheckNumber, 1, 1)                \
-  V(CheckString, 1, 1)                \
-  V(CheckTaggedHole, 1, 1)            \
-  V(CheckTaggedPointer, 1, 1)         \
-  V(CheckTaggedSigned, 1, 1)          \
-  V(CheckedInt32Add, 2, 1)            \
-  V(CheckedInt32Sub, 2, 1)            \
-  V(CheckedInt32Div, 2, 1)            \
-  V(CheckedInt32Mod, 2, 1)            \
-  V(CheckedUint32Div, 2, 1)           \
-  V(CheckedUint32Mod, 2, 1)           \
-  V(CheckedUint32ToInt32, 1, 1)       \
-  V(CheckedTaggedSignedToInt32, 1, 1) \
+#define CHECKED_OP_LIST(V)             \
+  V(CheckBounds, 2, 1)                 \
+  V(CheckHeapObject, 1, 1)             \
+  V(CheckIf, 1, 0)                     \
+  V(CheckNumber, 1, 1)                 \
+  V(CheckSmi, 1, 1)                    \
+  V(CheckString, 1, 1)                 \
+  V(CheckTaggedHole, 1, 1)             \
+  V(CheckedInt32Add, 2, 1)             \
+  V(CheckedInt32Sub, 2, 1)             \
+  V(CheckedInt32Div, 2, 1)             \
+  V(CheckedInt32Mod, 2, 1)             \
+  V(CheckedUint32Div, 2, 1)            \
+  V(CheckedUint32Mod, 2, 1)            \
+  V(CheckedUint32ToInt32, 1, 1)        \
+  V(CheckedUint32ToTaggedSigned, 1, 1) \
+  V(CheckedInt32ToTaggedSigned, 1, 1)  \
+  V(CheckedTaggedSignedToInt32, 1, 1)  \
+  V(CheckedTaggedToTaggedSigned, 1, 1) \
   V(CheckedTruncateTaggedToWord32, 1, 1)
 
 struct SimplifiedOperatorGlobalCache final {
@@ -458,18 +473,24 @@
   CHECKED_OP_LIST(CHECKED)
 #undef CHECKED
 
-  template <CheckForMinusZeroMode kMode>
-  struct ChangeFloat64ToTaggedOperator final
-      : public Operator1<CheckForMinusZeroMode> {
-    ChangeFloat64ToTaggedOperator()
-        : Operator1<CheckForMinusZeroMode>(
-              IrOpcode::kChangeFloat64ToTagged, Operator::kPure,
-              "ChangeFloat64ToTagged", 1, 0, 0, 1, 0, 0, kMode) {}
+  template <UnicodeEncoding kEncoding>
+  struct StringFromCodePointOperator final : public Operator1<UnicodeEncoding> {
+    StringFromCodePointOperator()
+        : Operator1<UnicodeEncoding>(IrOpcode::kStringFromCodePoint,
+                                     Operator::kPure, "StringFromCodePoint", 1,
+                                     0, 0, 1, 0, 0, kEncoding) {}
   };
-  ChangeFloat64ToTaggedOperator<CheckForMinusZeroMode::kCheckForMinusZero>
-      kChangeFloat64ToTaggedCheckForMinusZeroOperator;
-  ChangeFloat64ToTaggedOperator<CheckForMinusZeroMode::kDontCheckForMinusZero>
-      kChangeFloat64ToTaggedDontCheckForMinusZeroOperator;
+  StringFromCodePointOperator<UnicodeEncoding::UTF16>
+      kStringFromCodePointOperatorUTF16;
+  StringFromCodePointOperator<UnicodeEncoding::UTF32>
+      kStringFromCodePointOperatorUTF32;
+
+  struct ArrayBufferWasNeuteredOperator final : public Operator {
+    ArrayBufferWasNeuteredOperator()
+        : Operator(IrOpcode::kArrayBufferWasNeutered, Operator::kEliminatable,
+                   "ArrayBufferWasNeutered", 1, 1, 1, 1, 1, 0) {}
+  };
+  ArrayBufferWasNeuteredOperator kArrayBufferWasNeutered;
 
   template <CheckForMinusZeroMode kMode>
   struct CheckedInt32MulOperator final
@@ -614,20 +635,9 @@
   const Operator* SimplifiedOperatorBuilder::Name() { return &cache_.k##Name; }
 PURE_OP_LIST(GET_FROM_CACHE)
 CHECKED_OP_LIST(GET_FROM_CACHE)
+GET_FROM_CACHE(ArrayBufferWasNeutered)
 #undef GET_FROM_CACHE
 
-const Operator* SimplifiedOperatorBuilder::ChangeFloat64ToTagged(
-    CheckForMinusZeroMode mode) {
-  switch (mode) {
-    case CheckForMinusZeroMode::kCheckForMinusZero:
-      return &cache_.kChangeFloat64ToTaggedCheckForMinusZeroOperator;
-    case CheckForMinusZeroMode::kDontCheckForMinusZero:
-      return &cache_.kChangeFloat64ToTaggedDontCheckForMinusZeroOperator;
-  }
-  UNREACHABLE();
-  return nullptr;
-}
-
 const Operator* SimplifiedOperatorBuilder::CheckedInt32Mul(
     CheckForMinusZeroMode mode) {
   switch (mode) {
@@ -761,6 +771,18 @@
   return nullptr;
 }
 
+const Operator* SimplifiedOperatorBuilder::StringFromCodePoint(
+    UnicodeEncoding encoding) {
+  switch (encoding) {
+    case UnicodeEncoding::UTF16:
+      return &cache_.kStringFromCodePointOperatorUTF16;
+    case UnicodeEncoding::UTF32:
+      return &cache_.kStringFromCodePointOperatorUTF32;
+  }
+  UNREACHABLE();
+  return nullptr;
+}
+
 #define SPECULATIVE_NUMBER_BINOP(Name)                                        \
   const Operator* SimplifiedOperatorBuilder::Name(NumberOperationHint hint) { \
     switch (hint) {                                                           \
diff --git a/src/compiler/simplified-operator.h b/src/compiler/simplified-operator.h
index 5e7fa75..a904391 100644
--- a/src/compiler/simplified-operator.h
+++ b/src/compiler/simplified-operator.h
@@ -8,6 +8,7 @@
 #include <iosfwd>
 
 #include "src/compiler/operator.h"
+#include "src/compiler/types.h"
 #include "src/handles.h"
 #include "src/machine-type.h"
 #include "src/objects.h"
@@ -16,10 +17,8 @@
 namespace internal {
 
 // Forward declarations.
-class Type;
 class Zone;
 
-
 namespace compiler {
 
 // Forward declarations.
@@ -184,6 +183,10 @@
 NumberOperationHint NumberOperationHintOf(const Operator* op)
     WARN_UNUSED_RESULT;
 
+PretenureFlag PretenureFlagOf(const Operator* op) WARN_UNUSED_RESULT;
+
+UnicodeEncoding UnicodeEncodingOf(const Operator*) WARN_UNUSED_RESULT;
+
 // Interface for building simplified operators, which represent the
 // medium-level operations of V8, including adding numbers, allocating objects,
 // indexing into objects and arrays, etc.
@@ -259,6 +262,7 @@
   const Operator* NumberTan();
   const Operator* NumberTanh();
   const Operator* NumberTrunc();
+  const Operator* NumberToBoolean();
   const Operator* NumberToInt32();
   const Operator* NumberToUint32();
 
@@ -287,6 +291,7 @@
   const Operator* StringLessThanOrEqual();
   const Operator* StringCharCodeAt();
   const Operator* StringFromCharCode();
+  const Operator* StringFromCodePoint(UnicodeEncoding encoding);
 
   const Operator* PlainPrimitiveToNumber();
   const Operator* PlainPrimitiveToWord32();
@@ -299,19 +304,21 @@
   const Operator* ChangeInt31ToTaggedSigned();
   const Operator* ChangeInt32ToTagged();
   const Operator* ChangeUint32ToTagged();
-  const Operator* ChangeFloat64ToTagged(CheckForMinusZeroMode);
+  const Operator* ChangeFloat64ToTagged();
   const Operator* ChangeTaggedToBit();
   const Operator* ChangeBitToTagged();
   const Operator* TruncateTaggedToWord32();
   const Operator* TruncateTaggedToFloat64();
+  const Operator* TruncateTaggedToBit();
 
   const Operator* CheckIf();
   const Operator* CheckBounds();
   const Operator* CheckMaps(int map_input_count);
+
+  const Operator* CheckHeapObject();
   const Operator* CheckNumber();
+  const Operator* CheckSmi();
   const Operator* CheckString();
-  const Operator* CheckTaggedPointer();
-  const Operator* CheckTaggedSigned();
 
   const Operator* CheckedInt32Add();
   const Operator* CheckedInt32Sub();
@@ -320,11 +327,14 @@
   const Operator* CheckedUint32Div();
   const Operator* CheckedUint32Mod();
   const Operator* CheckedInt32Mul(CheckForMinusZeroMode);
+  const Operator* CheckedInt32ToTaggedSigned();
   const Operator* CheckedUint32ToInt32();
+  const Operator* CheckedUint32ToTaggedSigned();
   const Operator* CheckedFloat64ToInt32(CheckForMinusZeroMode);
   const Operator* CheckedTaggedSignedToInt32();
   const Operator* CheckedTaggedToInt32(CheckForMinusZeroMode);
   const Operator* CheckedTaggedToFloat64(CheckTaggedInputMode);
+  const Operator* CheckedTaggedToTaggedSigned();
   const Operator* CheckedTruncateTaggedToWord32();
 
   const Operator* CheckFloat64Hole(CheckFloat64HoleMode);
@@ -338,6 +348,9 @@
   const Operator* ObjectIsString();
   const Operator* ObjectIsUndetectable();
 
+  // array-buffer-was-neutered buffer
+  const Operator* ArrayBufferWasNeutered();
+
   // ensure-writable-fast-elements object, elements
   const Operator* EnsureWritableFastElements();
 
diff --git a/src/compiler/state-values-utils.h b/src/compiler/state-values-utils.h
index 79550bd..704f5f6 100644
--- a/src/compiler/state-values-utils.h
+++ b/src/compiler/state-values-utils.h
@@ -55,7 +55,7 @@
   Zone* zone() { return graph()->zone(); }
 
   JSGraph* js_graph_;
-  ZoneHashMap hash_map_;
+  CustomMatcherZoneHashMap hash_map_;
   ZoneVector<NodeVector*> working_space_;  // One working space per level.
   Node* empty_state_values_;
 };
diff --git a/src/compiler/store-store-elimination.cc b/src/compiler/store-store-elimination.cc
index 98904b0..196cb0d 100644
--- a/src/compiler/store-store-elimination.cc
+++ b/src/compiler/store-store-elimination.cc
@@ -72,9 +72,7 @@
 
 namespace {
 
-// 16 bits was chosen fairly arbitrarily; it seems enough now. 8 bits is too
-// few.
-typedef uint16_t StoreOffset;
+typedef uint32_t StoreOffset;
 
 struct UnobservableStore {
   NodeId id_;
@@ -171,11 +169,11 @@
   const UnobservablesSet unobservables_visited_empty_;
 };
 
-// To safely cast an offset from a FieldAccess, which has a wider range
-// (namely int).
+// To safely cast an offset from a FieldAccess, which has a potentially wider
+// range (namely int).
 StoreOffset ToOffset(int offset) {
-  CHECK(0 <= offset && offset < (1 << 8 * sizeof(StoreOffset)));
-  return (StoreOffset)offset;
+  CHECK(0 <= offset);
+  return static_cast<StoreOffset>(offset);
 }
 
 StoreOffset ToOffset(const FieldAccess& access) {
@@ -405,11 +403,9 @@
     // Mark effect inputs for visiting.
     for (int i = 0; i < node->op()->EffectInputCount(); i++) {
       Node* input = NodeProperties::GetEffectInput(node, i);
-      if (!HasBeenVisited(input)) {
-        TRACE("    marking #%d:%s for revisit", input->id(),
-              input->op()->mnemonic());
-        MarkForRevisit(input);
-      }
+      TRACE("    marking #%d:%s for revisit", input->id(),
+            input->op()->mnemonic());
+      MarkForRevisit(input);
     }
   }
 }
diff --git a/src/compiler/store-store-elimination.h b/src/compiler/store-store-elimination.h
index 07ae2c2..cda7591 100644
--- a/src/compiler/store-store-elimination.h
+++ b/src/compiler/store-store-elimination.h
@@ -7,7 +7,7 @@
 
 #include "src/compiler/common-operator.h"
 #include "src/compiler/js-graph.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/compiler/type-cache.cc b/src/compiler/type-cache.cc
new file mode 100644
index 0000000..cd80dc3
--- /dev/null
+++ b/src/compiler/type-cache.cc
@@ -0,0 +1,24 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler/type-cache.h"
+
+#include "src/base/lazy-instance.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+namespace {
+
+base::LazyInstance<TypeCache>::type kCache = LAZY_INSTANCE_INITIALIZER;
+
+}  // namespace
+
+// static
+TypeCache const& TypeCache::Get() { return kCache.Get(); }
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
diff --git a/src/compiler/type-cache.h b/src/compiler/type-cache.h
new file mode 100644
index 0000000..aa51dac
--- /dev/null
+++ b/src/compiler/type-cache.h
@@ -0,0 +1,157 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILER_TYPE_CACHE_H_
+#define V8_COMPILER_TYPE_CACHE_H_
+
+#include "src/compiler/types.h"
+#include "src/date.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+class TypeCache final {
+ private:
+  // This has to be first for the initialization magic to work.
+  AccountingAllocator allocator;
+  Zone zone_;
+
+ public:
+  static TypeCache const& Get();
+
+  TypeCache() : zone_(&allocator) {}
+
+  Type* const kInt8 = CreateRange<int8_t>();
+  Type* const kUint8 = CreateRange<uint8_t>();
+  Type* const kUint8Clamped = kUint8;
+  Type* const kInt16 = CreateRange<int16_t>();
+  Type* const kUint16 = CreateRange<uint16_t>();
+  Type* const kInt32 = Type::Signed32();
+  Type* const kUint32 = Type::Unsigned32();
+  Type* const kFloat32 = Type::Number();
+  Type* const kFloat64 = Type::Number();
+
+  Type* const kSmi = Type::SignedSmall();
+  Type* const kHoleySmi = Type::Union(kSmi, Type::Hole(), zone());
+  Type* const kHeapNumber = Type::Number();
+
+  Type* const kSingletonZero = CreateRange(0.0, 0.0);
+  Type* const kSingletonOne = CreateRange(1.0, 1.0);
+  Type* const kSingletonTen = CreateRange(10.0, 10.0);
+  Type* const kSingletonMinusOne = CreateRange(-1.0, -1.0);
+  Type* const kZeroOrUndefined =
+      Type::Union(kSingletonZero, Type::Undefined(), zone());
+  Type* const kTenOrUndefined =
+      Type::Union(kSingletonTen, Type::Undefined(), zone());
+  Type* const kMinusOneOrZero = CreateRange(-1.0, 0.0);
+  Type* const kMinusOneToOneOrMinusZeroOrNaN = Type::Union(
+      Type::Union(CreateRange(-1.0, 1.0), Type::MinusZero(), zone()),
+      Type::NaN(), zone());
+  Type* const kZeroOrOne = CreateRange(0.0, 1.0);
+  Type* const kZeroOrOneOrNaN = Type::Union(kZeroOrOne, Type::NaN(), zone());
+  Type* const kZeroToThirtyOne = CreateRange(0.0, 31.0);
+  Type* const kZeroToThirtyTwo = CreateRange(0.0, 32.0);
+  Type* const kZeroish =
+      Type::Union(kSingletonZero, Type::MinusZeroOrNaN(), zone());
+  Type* const kInteger = CreateRange(-V8_INFINITY, V8_INFINITY);
+  Type* const kIntegerOrMinusZero =
+      Type::Union(kInteger, Type::MinusZero(), zone());
+  Type* const kIntegerOrMinusZeroOrNaN =
+      Type::Union(kIntegerOrMinusZero, Type::NaN(), zone());
+  Type* const kPositiveInteger = CreateRange(0.0, V8_INFINITY);
+  Type* const kPositiveIntegerOrMinusZero =
+      Type::Union(kPositiveInteger, Type::MinusZero(), zone());
+  Type* const kPositiveIntegerOrMinusZeroOrNaN =
+      Type::Union(kPositiveIntegerOrMinusZero, Type::NaN(), zone());
+
+  Type* const kAdditiveSafeInteger =
+      CreateRange(-4503599627370496.0, 4503599627370496.0);
+  Type* const kSafeInteger = CreateRange(-kMaxSafeInteger, kMaxSafeInteger);
+  Type* const kAdditiveSafeIntegerOrMinusZero =
+      Type::Union(kAdditiveSafeInteger, Type::MinusZero(), zone());
+  Type* const kSafeIntegerOrMinusZero =
+      Type::Union(kSafeInteger, Type::MinusZero(), zone());
+  Type* const kPositiveSafeInteger = CreateRange(0.0, kMaxSafeInteger);
+
+  // The FixedArray::length property always containts a smi in the range
+  // [0, FixedArray::kMaxLength].
+  Type* const kFixedArrayLengthType = CreateRange(0.0, FixedArray::kMaxLength);
+
+  // The FixedDoubleArray::length property always containts a smi in the range
+  // [0, FixedDoubleArray::kMaxLength].
+  Type* const kFixedDoubleArrayLengthType =
+      CreateRange(0.0, FixedDoubleArray::kMaxLength);
+
+  // The JSArray::length property always contains a tagged number in the range
+  // [0, kMaxUInt32].
+  Type* const kJSArrayLengthType = Type::Unsigned32();
+
+  // The JSTyped::length property always contains a tagged number in the range
+  // [0, kMaxSmiValue].
+  Type* const kJSTypedArrayLengthType = Type::UnsignedSmall();
+
+  // The String::length property always contains a smi in the range
+  // [0, String::kMaxLength].
+  Type* const kStringLengthType = CreateRange(0.0, String::kMaxLength);
+
+  // The JSDate::day property always contains a tagged number in the range
+  // [1, 31] or NaN.
+  Type* const kJSDateDayType =
+      Type::Union(CreateRange(1, 31.0), Type::NaN(), zone());
+
+  // The JSDate::hour property always contains a tagged number in the range
+  // [0, 23] or NaN.
+  Type* const kJSDateHourType =
+      Type::Union(CreateRange(0, 23.0), Type::NaN(), zone());
+
+  // The JSDate::minute property always contains a tagged number in the range
+  // [0, 59] or NaN.
+  Type* const kJSDateMinuteType =
+      Type::Union(CreateRange(0, 59.0), Type::NaN(), zone());
+
+  // The JSDate::month property always contains a tagged number in the range
+  // [0, 11] or NaN.
+  Type* const kJSDateMonthType =
+      Type::Union(CreateRange(0, 11.0), Type::NaN(), zone());
+
+  // The JSDate::second property always contains a tagged number in the range
+  // [0, 59] or NaN.
+  Type* const kJSDateSecondType = kJSDateMinuteType;
+
+  // The JSDate::value property always contains a tagged number in the range
+  // [-kMaxTimeInMs, kMaxTimeInMs] or NaN.
+  Type* const kJSDateValueType = Type::Union(
+      CreateRange(-DateCache::kMaxTimeInMs, DateCache::kMaxTimeInMs),
+      Type::NaN(), zone());
+
+  // The JSDate::weekday property always contains a tagged number in the range
+  // [0, 6] or NaN.
+  Type* const kJSDateWeekdayType =
+      Type::Union(CreateRange(0, 6.0), Type::NaN(), zone());
+
+  // The JSDate::year property always contains a tagged number in the signed
+  // small range or NaN.
+  Type* const kJSDateYearType =
+      Type::Union(Type::SignedSmall(), Type::NaN(), zone());
+
+ private:
+  template <typename T>
+  Type* CreateRange() {
+    return CreateRange(std::numeric_limits<T>::min(),
+                       std::numeric_limits<T>::max());
+  }
+
+  Type* CreateRange(double min, double max) {
+    return Type::Range(min, max, zone());
+  }
+
+  Zone* zone() { return &zone_; }
+};
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_COMPILER_TYPE_CACHE_H_
diff --git a/src/compiler/type-hint-analyzer.cc b/src/compiler/type-hint-analyzer.cc
index 8e7a0f3..a668a48 100644
--- a/src/compiler/type-hint-analyzer.cc
+++ b/src/compiler/type-hint-analyzer.cc
@@ -6,8 +6,8 @@
 
 #include "src/assembler.h"
 #include "src/code-stubs.h"
-#include "src/compiler/type-hints.h"
 #include "src/ic/ic-state.h"
+#include "src/type-hints.h"
 
 namespace v8 {
 namespace internal {
@@ -15,17 +15,21 @@
 
 namespace {
 
-BinaryOperationHint ToBinaryOperationHint(BinaryOpICState::Kind kind) {
+BinaryOperationHint ToBinaryOperationHint(Token::Value op,
+                                          BinaryOpICState::Kind kind) {
   switch (kind) {
     case BinaryOpICState::NONE:
       return BinaryOperationHint::kNone;
     case BinaryOpICState::SMI:
       return BinaryOperationHint::kSignedSmall;
     case BinaryOpICState::INT32:
-      return BinaryOperationHint::kSigned32;
+      return (Token::IsTruncatingBinaryOp(op) && SmiValuesAre31Bits())
+                 ? BinaryOperationHint::kNumberOrOddball
+                 : BinaryOperationHint::kSigned32;
     case BinaryOpICState::NUMBER:
       return BinaryOperationHint::kNumberOrOddball;
     case BinaryOpICState::STRING:
+      return BinaryOperationHint::kString;
     case BinaryOpICState::GENERIC:
       return BinaryOperationHint::kAny;
   }
@@ -66,7 +70,7 @@
   Handle<Code> code = i->second;
   DCHECK_EQ(Code::BINARY_OP_IC, code->kind());
   BinaryOpICState state(code->GetIsolate(), code->extra_ic_state());
-  *hint = ToBinaryOperationHint(state.kind());
+  *hint = ToBinaryOperationHint(state.op(), state.kind());
   return true;
 }
 
@@ -132,20 +136,6 @@
   return new (zone()) TypeHintAnalysis(infos, zone());
 }
 
-// Helper function to transform the feedback to BinaryOperationHint.
-BinaryOperationHint BinaryOperationHintFromFeedback(int type_feedback) {
-  switch (type_feedback) {
-    case BinaryOperationFeedback::kSignedSmall:
-      return BinaryOperationHint::kSignedSmall;
-    case BinaryOperationFeedback::kNumber:
-      return BinaryOperationHint::kNumberOrOddball;
-    case BinaryOperationFeedback::kAny:
-    default:
-      return BinaryOperationHint::kAny;
-  }
-  UNREACHABLE();
-  return BinaryOperationHint::kNone;
-}
 
 }  // namespace compiler
 }  // namespace internal
diff --git a/src/compiler/type-hint-analyzer.h b/src/compiler/type-hint-analyzer.h
index e48938a..354f894 100644
--- a/src/compiler/type-hint-analyzer.h
+++ b/src/compiler/type-hint-analyzer.h
@@ -5,9 +5,9 @@
 #ifndef V8_COMPILER_TYPE_HINT_ANALYZER_H_
 #define V8_COMPILER_TYPE_HINT_ANALYZER_H_
 
-#include "src/compiler/type-hints.h"
 #include "src/handles.h"
-#include "src/zone-containers.h"
+#include "src/type-hints.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -50,8 +50,6 @@
   DISALLOW_COPY_AND_ASSIGN(TypeHintAnalyzer);
 };
 
-BinaryOperationHint BinaryOperationHintFromFeedback(int type_feedback);
-
 }  // namespace compiler
 }  // namespace internal
 }  // namespace v8
diff --git a/src/compiler/type-hints.cc b/src/compiler/type-hints.cc
deleted file mode 100644
index a07a870..0000000
--- a/src/compiler/type-hints.cc
+++ /dev/null
@@ -1,91 +0,0 @@
-// Copyright 2015 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/compiler/type-hints.h"
-
-namespace v8 {
-namespace internal {
-namespace compiler {
-
-std::ostream& operator<<(std::ostream& os, BinaryOperationHint hint) {
-  switch (hint) {
-    case BinaryOperationHint::kNone:
-      return os << "None";
-    case BinaryOperationHint::kSignedSmall:
-      return os << "SignedSmall";
-    case BinaryOperationHint::kSigned32:
-      return os << "Signed32";
-    case BinaryOperationHint::kNumberOrOddball:
-      return os << "NumberOrOddball";
-    case BinaryOperationHint::kAny:
-      return os << "Any";
-  }
-  UNREACHABLE();
-  return os;
-}
-
-std::ostream& operator<<(std::ostream& os, CompareOperationHint hint) {
-  switch (hint) {
-    case CompareOperationHint::kNone:
-      return os << "None";
-    case CompareOperationHint::kSignedSmall:
-      return os << "SignedSmall";
-    case CompareOperationHint::kNumber:
-      return os << "Number";
-    case CompareOperationHint::kNumberOrOddball:
-      return os << "NumberOrOddball";
-    case CompareOperationHint::kAny:
-      return os << "Any";
-  }
-  UNREACHABLE();
-  return os;
-}
-
-std::ostream& operator<<(std::ostream& os, ToBooleanHint hint) {
-  switch (hint) {
-    case ToBooleanHint::kNone:
-      return os << "None";
-    case ToBooleanHint::kUndefined:
-      return os << "Undefined";
-    case ToBooleanHint::kBoolean:
-      return os << "Boolean";
-    case ToBooleanHint::kNull:
-      return os << "Null";
-    case ToBooleanHint::kSmallInteger:
-      return os << "SmallInteger";
-    case ToBooleanHint::kReceiver:
-      return os << "Receiver";
-    case ToBooleanHint::kString:
-      return os << "String";
-    case ToBooleanHint::kSymbol:
-      return os << "Symbol";
-    case ToBooleanHint::kHeapNumber:
-      return os << "HeapNumber";
-    case ToBooleanHint::kSimdValue:
-      return os << "SimdValue";
-    case ToBooleanHint::kAny:
-      return os << "Any";
-  }
-  UNREACHABLE();
-  return os;
-}
-
-std::ostream& operator<<(std::ostream& os, ToBooleanHints hints) {
-  if (hints == ToBooleanHint::kAny) return os << "Any";
-  if (hints == ToBooleanHint::kNone) return os << "None";
-  bool first = true;
-  for (ToBooleanHints::mask_type i = 0; i < sizeof(i) * 8; ++i) {
-    ToBooleanHint const hint = static_cast<ToBooleanHint>(1u << i);
-    if (hints & hint) {
-      if (!first) os << "|";
-      first = false;
-      os << hint;
-    }
-  }
-  return os;
-}
-
-}  // namespace compiler
-}  // namespace internal
-}  // namespace v8
diff --git a/src/compiler/type-hints.h b/src/compiler/type-hints.h
deleted file mode 100644
index ad94491..0000000
--- a/src/compiler/type-hints.h
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright 2015 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_COMPILER_TYPE_HINTS_H_
-#define V8_COMPILER_TYPE_HINTS_H_
-
-#include "src/base/flags.h"
-#include "src/utils.h"
-
-namespace v8 {
-namespace internal {
-namespace compiler {
-
-// Type hints for an binary operation.
-enum class BinaryOperationHint : uint8_t {
-  kNone,
-  kSignedSmall,
-  kSigned32,
-  kNumberOrOddball,
-  kAny
-};
-
-inline size_t hash_value(BinaryOperationHint hint) {
-  return static_cast<unsigned>(hint);
-}
-
-std::ostream& operator<<(std::ostream&, BinaryOperationHint);
-
-// Type hints for an compare operation.
-enum class CompareOperationHint : uint8_t {
-  kNone,
-  kSignedSmall,
-  kNumber,
-  kNumberOrOddball,
-  kAny
-};
-
-inline size_t hash_value(CompareOperationHint hint) {
-  return static_cast<unsigned>(hint);
-}
-
-std::ostream& operator<<(std::ostream&, CompareOperationHint);
-
-// Type hints for the ToBoolean type conversion.
-enum class ToBooleanHint : uint16_t {
-  kNone = 0u,
-  kUndefined = 1u << 0,
-  kBoolean = 1u << 1,
-  kNull = 1u << 2,
-  kSmallInteger = 1u << 3,
-  kReceiver = 1u << 4,
-  kString = 1u << 5,
-  kSymbol = 1u << 6,
-  kHeapNumber = 1u << 7,
-  kSimdValue = 1u << 8,
-  kAny = kUndefined | kBoolean | kNull | kSmallInteger | kReceiver | kString |
-         kSymbol | kHeapNumber | kSimdValue
-};
-
-std::ostream& operator<<(std::ostream&, ToBooleanHint);
-
-typedef base::Flags<ToBooleanHint, uint16_t> ToBooleanHints;
-
-std::ostream& operator<<(std::ostream&, ToBooleanHints);
-
-DEFINE_OPERATORS_FOR_FLAGS(ToBooleanHints)
-
-}  // namespace compiler
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_COMPILER_TYPE_HINTS_H_
diff --git a/src/compiler/typed-optimization.cc b/src/compiler/typed-optimization.cc
new file mode 100644
index 0000000..c5e8648
--- /dev/null
+++ b/src/compiler/typed-optimization.cc
@@ -0,0 +1,253 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler/typed-optimization.h"
+
+#include "src/compilation-dependencies.h"
+#include "src/compiler/js-graph.h"
+#include "src/compiler/node-properties.h"
+#include "src/compiler/simplified-operator.h"
+#include "src/compiler/type-cache.h"
+#include "src/isolate-inl.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+TypedOptimization::TypedOptimization(Editor* editor,
+                                     CompilationDependencies* dependencies,
+                                     Flags flags, JSGraph* jsgraph)
+    : AdvancedReducer(editor),
+      dependencies_(dependencies),
+      flags_(flags),
+      jsgraph_(jsgraph),
+      true_type_(Type::Constant(factory()->true_value(), graph()->zone())),
+      false_type_(Type::Constant(factory()->false_value(), graph()->zone())),
+      type_cache_(TypeCache::Get()) {}
+
+TypedOptimization::~TypedOptimization() {}
+
+Reduction TypedOptimization::Reduce(Node* node) {
+  // Check if the output type is a singleton.  In that case we already know the
+  // result value and can simply replace the node if it's eliminable.
+  if (!NodeProperties::IsConstant(node) && NodeProperties::IsTyped(node) &&
+      node->op()->HasProperty(Operator::kEliminatable)) {
+    // TODO(v8:5303): We must not eliminate FinishRegion here. This special
+    // case can be removed once we have separate operators for value and
+    // effect regions.
+    if (node->opcode() == IrOpcode::kFinishRegion) return NoChange();
+    // We can only constant-fold nodes here, that are known to not cause any
+    // side-effect, may it be a JavaScript observable side-effect or a possible
+    // eager deoptimization exit (i.e. {node} has an operator that doesn't have
+    // the Operator::kNoDeopt property).
+    Type* upper = NodeProperties::GetType(node);
+    if (upper->IsInhabited()) {
+      if (upper->IsConstant()) {
+        Node* replacement = jsgraph()->Constant(upper->AsConstant()->Value());
+        ReplaceWithValue(node, replacement);
+        return Changed(replacement);
+      } else if (upper->Is(Type::MinusZero())) {
+        Node* replacement = jsgraph()->Constant(factory()->minus_zero_value());
+        ReplaceWithValue(node, replacement);
+        return Changed(replacement);
+      } else if (upper->Is(Type::NaN())) {
+        Node* replacement = jsgraph()->NaNConstant();
+        ReplaceWithValue(node, replacement);
+        return Changed(replacement);
+      } else if (upper->Is(Type::Null())) {
+        Node* replacement = jsgraph()->NullConstant();
+        ReplaceWithValue(node, replacement);
+        return Changed(replacement);
+      } else if (upper->Is(Type::PlainNumber()) &&
+                 upper->Min() == upper->Max()) {
+        Node* replacement = jsgraph()->Constant(upper->Min());
+        ReplaceWithValue(node, replacement);
+        return Changed(replacement);
+      } else if (upper->Is(Type::Undefined())) {
+        Node* replacement = jsgraph()->UndefinedConstant();
+        ReplaceWithValue(node, replacement);
+        return Changed(replacement);
+      }
+    }
+  }
+  switch (node->opcode()) {
+    case IrOpcode::kCheckMaps:
+      return ReduceCheckMaps(node);
+    case IrOpcode::kCheckString:
+      return ReduceCheckString(node);
+    case IrOpcode::kLoadField:
+      return ReduceLoadField(node);
+    case IrOpcode::kNumberCeil:
+    case IrOpcode::kNumberFloor:
+    case IrOpcode::kNumberRound:
+    case IrOpcode::kNumberTrunc:
+      return ReduceNumberRoundop(node);
+    case IrOpcode::kPhi:
+      return ReducePhi(node);
+    case IrOpcode::kSelect:
+      return ReduceSelect(node);
+    default:
+      break;
+  }
+  return NoChange();
+}
+
+namespace {
+
+MaybeHandle<Map> GetStableMapFromObjectType(Type* object_type) {
+  if (object_type->IsConstant() &&
+      object_type->AsConstant()->Value()->IsHeapObject()) {
+    Handle<Map> object_map(
+        Handle<HeapObject>::cast(object_type->AsConstant()->Value())->map());
+    if (object_map->is_stable()) return object_map;
+  }
+  return MaybeHandle<Map>();
+}
+
+}  // namespace
+
+Reduction TypedOptimization::ReduceCheckMaps(Node* node) {
+  // The CheckMaps(o, ...map...) can be eliminated if map is stable,
+  // o has type Constant(object) and map == object->map, and either
+  //  (1) map cannot transition further, or
+  //  (2) we can add a code dependency on the stability of map
+  //      (to guard the Constant type information).
+  Node* const object = NodeProperties::GetValueInput(node, 0);
+  Type* const object_type = NodeProperties::GetType(object);
+  Node* const effect = NodeProperties::GetEffectInput(node);
+  Handle<Map> object_map;
+  if (GetStableMapFromObjectType(object_type).ToHandle(&object_map)) {
+    for (int i = 1; i < node->op()->ValueInputCount(); ++i) {
+      Node* const map = NodeProperties::GetValueInput(node, i);
+      Type* const map_type = NodeProperties::GetType(map);
+      if (map_type->IsConstant() &&
+          map_type->AsConstant()->Value().is_identical_to(object_map)) {
+        if (object_map->CanTransition()) {
+          dependencies()->AssumeMapStable(object_map);
+        }
+        return Replace(effect);
+      }
+    }
+  }
+  return NoChange();
+}
+
+Reduction TypedOptimization::ReduceCheckString(Node* node) {
+  Node* const input = NodeProperties::GetValueInput(node, 0);
+  Type* const input_type = NodeProperties::GetType(input);
+  if (input_type->Is(Type::String())) {
+    ReplaceWithValue(node, input);
+    return Replace(input);
+  }
+  return NoChange();
+}
+
+Reduction TypedOptimization::ReduceLoadField(Node* node) {
+  Node* const object = NodeProperties::GetValueInput(node, 0);
+  Type* const object_type = NodeProperties::GetType(object);
+  FieldAccess const& access = FieldAccessOf(node->op());
+  if (access.base_is_tagged == kTaggedBase &&
+      access.offset == HeapObject::kMapOffset) {
+    // We can replace LoadField[Map](o) with map if is stable, and
+    // o has type Constant(object) and map == object->map, and either
+    //  (1) map cannot transition further, or
+    //  (2) deoptimization is enabled and we can add a code dependency on the
+    //      stability of map (to guard the Constant type information).
+    Handle<Map> object_map;
+    if (GetStableMapFromObjectType(object_type).ToHandle(&object_map)) {
+      if (object_map->CanTransition()) {
+        if (flags() & kDeoptimizationEnabled) {
+          dependencies()->AssumeMapStable(object_map);
+        } else {
+          return NoChange();
+        }
+      }
+      Node* const value = jsgraph()->HeapConstant(object_map);
+      ReplaceWithValue(node, value);
+      return Replace(value);
+    }
+  }
+  return NoChange();
+}
+
+Reduction TypedOptimization::ReduceNumberRoundop(Node* node) {
+  Node* const input = NodeProperties::GetValueInput(node, 0);
+  Type* const input_type = NodeProperties::GetType(input);
+  if (input_type->Is(type_cache_.kIntegerOrMinusZeroOrNaN)) {
+    return Replace(input);
+  }
+  return NoChange();
+}
+
+Reduction TypedOptimization::ReducePhi(Node* node) {
+  // Try to narrow the type of the Phi {node}, which might be more precise now
+  // after lowering based on types, i.e. a SpeculativeNumberAdd has a more
+  // precise type than the JSAdd that was in the graph when the Typer was run.
+  DCHECK_EQ(IrOpcode::kPhi, node->opcode());
+  int arity = node->op()->ValueInputCount();
+  Type* type = NodeProperties::GetType(node->InputAt(0));
+  for (int i = 1; i < arity; ++i) {
+    type = Type::Union(type, NodeProperties::GetType(node->InputAt(i)),
+                       graph()->zone());
+  }
+  Type* const node_type = NodeProperties::GetType(node);
+  if (!node_type->Is(type)) {
+    type = Type::Intersect(node_type, type, graph()->zone());
+    NodeProperties::SetType(node, type);
+    return Changed(node);
+  }
+  return NoChange();
+}
+
+Reduction TypedOptimization::ReduceSelect(Node* node) {
+  DCHECK_EQ(IrOpcode::kSelect, node->opcode());
+  Node* const condition = NodeProperties::GetValueInput(node, 0);
+  Type* const condition_type = NodeProperties::GetType(condition);
+  Node* const vtrue = NodeProperties::GetValueInput(node, 1);
+  Type* const vtrue_type = NodeProperties::GetType(vtrue);
+  Node* const vfalse = NodeProperties::GetValueInput(node, 2);
+  Type* const vfalse_type = NodeProperties::GetType(vfalse);
+  if (condition_type->Is(true_type_)) {
+    // Select(condition:true, vtrue, vfalse) => vtrue
+    return Replace(vtrue);
+  }
+  if (condition_type->Is(false_type_)) {
+    // Select(condition:false, vtrue, vfalse) => vfalse
+    return Replace(vfalse);
+  }
+  if (vtrue_type->Is(true_type_) && vfalse_type->Is(false_type_)) {
+    // Select(condition, vtrue:true, vfalse:false) => condition
+    return Replace(condition);
+  }
+  if (vtrue_type->Is(false_type_) && vfalse_type->Is(true_type_)) {
+    // Select(condition, vtrue:false, vfalse:true) => BooleanNot(condition)
+    node->TrimInputCount(1);
+    NodeProperties::ChangeOp(node, simplified()->BooleanNot());
+    return Changed(node);
+  }
+  // Try to narrow the type of the Select {node}, which might be more precise
+  // now after lowering based on types.
+  Type* type = Type::Union(vtrue_type, vfalse_type, graph()->zone());
+  Type* const node_type = NodeProperties::GetType(node);
+  if (!node_type->Is(type)) {
+    type = Type::Intersect(node_type, type, graph()->zone());
+    NodeProperties::SetType(node, type);
+    return Changed(node);
+  }
+  return NoChange();
+}
+
+Factory* TypedOptimization::factory() const { return isolate()->factory(); }
+
+Graph* TypedOptimization::graph() const { return jsgraph()->graph(); }
+
+Isolate* TypedOptimization::isolate() const { return jsgraph()->isolate(); }
+
+SimplifiedOperatorBuilder* TypedOptimization::simplified() const {
+  return jsgraph()->simplified();
+}
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
diff --git a/src/compiler/typed-optimization.h b/src/compiler/typed-optimization.h
new file mode 100644
index 0000000..54d780c
--- /dev/null
+++ b/src/compiler/typed-optimization.h
@@ -0,0 +1,73 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILER_TYPED_OPTIMIZATION_H_
+#define V8_COMPILER_TYPED_OPTIMIZATION_H_
+
+#include "src/base/flags.h"
+#include "src/compiler/graph-reducer.h"
+
+namespace v8 {
+namespace internal {
+
+// Forward declarations.
+class CompilationDependencies;
+class Factory;
+class Isolate;
+
+namespace compiler {
+
+// Forward declarations.
+class JSGraph;
+class SimplifiedOperatorBuilder;
+class TypeCache;
+
+class TypedOptimization final : public AdvancedReducer {
+ public:
+  // Flags that control the mode of operation.
+  enum Flag {
+    kNoFlags = 0u,
+    kDeoptimizationEnabled = 1u << 0,
+  };
+  typedef base::Flags<Flag> Flags;
+
+  TypedOptimization(Editor* editor, CompilationDependencies* dependencies,
+                    Flags flags, JSGraph* jsgraph);
+  ~TypedOptimization();
+
+  Reduction Reduce(Node* node) final;
+
+ private:
+  Reduction ReduceCheckMaps(Node* node);
+  Reduction ReduceCheckString(Node* node);
+  Reduction ReduceLoadField(Node* node);
+  Reduction ReduceNumberRoundop(Node* node);
+  Reduction ReducePhi(Node* node);
+  Reduction ReduceSelect(Node* node);
+
+  CompilationDependencies* dependencies() const { return dependencies_; }
+  Factory* factory() const;
+  Flags flags() const { return flags_; }
+  Graph* graph() const;
+  Isolate* isolate() const;
+  JSGraph* jsgraph() const { return jsgraph_; }
+  SimplifiedOperatorBuilder* simplified() const;
+
+  CompilationDependencies* const dependencies_;
+  Flags const flags_;
+  JSGraph* const jsgraph_;
+  Type* const true_type_;
+  Type* const false_type_;
+  TypeCache const& type_cache_;
+
+  DISALLOW_COPY_AND_ASSIGN(TypedOptimization);
+};
+
+DEFINE_OPERATORS_FOR_FLAGS(TypedOptimization::Flags)
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_COMPILER_TYPED_OPTIMIZATION_H_
diff --git a/src/compiler/typer.cc b/src/compiler/typer.cc
index 0d07053..ec1197b 100644
--- a/src/compiler/typer.cc
+++ b/src/compiler/typer.cc
@@ -16,8 +16,8 @@
 #include "src/compiler/node.h"
 #include "src/compiler/operation-typer.h"
 #include "src/compiler/simplified-operator.h"
+#include "src/compiler/type-cache.h"
 #include "src/objects-inl.h"
-#include "src/type-cache.h"
 
 namespace v8 {
 namespace internal {
@@ -88,8 +88,6 @@
       COMMON_OP_LIST(DECLARE_CASE)
       SIMPLIFIED_COMPARE_BINOP_LIST(DECLARE_CASE)
       SIMPLIFIED_OTHER_OP_LIST(DECLARE_CASE)
-      MACHINE_OP_LIST(DECLARE_CASE)
-      MACHINE_SIMD_OP_LIST(DECLARE_CASE)
       JS_SIMPLE_UNOP_LIST(DECLARE_CASE)
       JS_OBJECT_OP_LIST(DECLARE_CASE)
       JS_CONTEXT_OP_LIST(DECLARE_CASE)
@@ -131,6 +129,8 @@
       DECLARE_CASE(End)
       SIMPLIFIED_CHANGE_OP_LIST(DECLARE_CASE)
       SIMPLIFIED_CHECKED_OP_LIST(DECLARE_CASE)
+      MACHINE_SIMD_OP_LIST(DECLARE_CASE)
+      MACHINE_OP_LIST(DECLARE_CASE)
 #undef DECLARE_CASE
       break;
     }
@@ -151,8 +151,6 @@
       COMMON_OP_LIST(DECLARE_CASE)
       SIMPLIFIED_COMPARE_BINOP_LIST(DECLARE_CASE)
       SIMPLIFIED_OTHER_OP_LIST(DECLARE_CASE)
-      MACHINE_OP_LIST(DECLARE_CASE)
-      MACHINE_SIMD_OP_LIST(DECLARE_CASE)
       JS_SIMPLE_UNOP_LIST(DECLARE_CASE)
       JS_OBJECT_OP_LIST(DECLARE_CASE)
       JS_CONTEXT_OP_LIST(DECLARE_CASE)
@@ -194,6 +192,8 @@
       DECLARE_CASE(End)
       SIMPLIFIED_CHANGE_OP_LIST(DECLARE_CASE)
       SIMPLIFIED_CHECKED_OP_LIST(DECLARE_CASE)
+      MACHINE_SIMD_OP_LIST(DECLARE_CASE)
+      MACHINE_OP_LIST(DECLARE_CASE)
 #undef DECLARE_CASE
       break;
     }
@@ -214,8 +214,6 @@
   COMMON_OP_LIST(DECLARE_METHOD)
   SIMPLIFIED_COMPARE_BINOP_LIST(DECLARE_METHOD)
   SIMPLIFIED_OTHER_OP_LIST(DECLARE_METHOD)
-  MACHINE_OP_LIST(DECLARE_METHOD)
-  MACHINE_SIMD_OP_LIST(DECLARE_METHOD)
   JS_OP_LIST(DECLARE_METHOD)
 #undef DECLARE_METHOD
 
@@ -229,7 +227,6 @@
     return TypeOrNone(operand_node);
   }
 
-  Type* WrapContextTypeForInput(Node* node);
   Type* Weaken(Node* node, Type* current_type, Type* previous_type);
 
   Zone* zone() { return typer_->zone(); }
@@ -298,6 +295,7 @@
 
   static Type* ReferenceEqualTyper(Type*, Type*, Typer*);
   static Type* StringFromCharCodeTyper(Type*, Typer*);
+  static Type* StringFromCodePointTyper(Type*, Typer*);
 
   Reduction UpdateType(Node* node, Type* current) {
     if (NodeProperties::IsTyped(node)) {
@@ -426,8 +424,8 @@
   if (type->Is(Type::Boolean())) return type;
   if (type->Is(t->falsish_)) return t->singleton_false_;
   if (type->Is(t->truish_)) return t->singleton_true_;
-  if (type->Is(Type::PlainNumber()) && (type->Max() < 0 || 0 < type->Min())) {
-    return t->singleton_true_;  // Ruled out nan, -0 and +0.
+  if (type->Is(Type::Number())) {
+    return t->operation_typer()->NumberToBoolean(type);
   }
   return Type::Boolean();
 }
@@ -519,8 +517,7 @@
 
 
 Type* Typer::Visitor::ObjectIsSmi(Type* type, Typer* t) {
-  if (type->Is(Type::TaggedSigned())) return t->singleton_true_;
-  if (type->Is(Type::TaggedPointer())) return t->singleton_false_;
+  if (!type->Maybe(Type::SignedSmall())) return t->singleton_false_;
   return Type::Boolean();
 }
 
@@ -554,11 +551,15 @@
 
 Type* Typer::Visitor::TypeOsrValue(Node* node) { return Type::Any(); }
 
+Type* Typer::Visitor::TypeRetain(Node* node) {
+  UNREACHABLE();
+  return nullptr;
+}
 
 Type* Typer::Visitor::TypeInt32Constant(Node* node) {
   double number = OpParameter<int32_t>(node);
   return Type::Intersect(Type::Range(number, number, zone()),
-                         Type::UntaggedIntegral32(), zone());
+                         Type::Integral32(), zone());
 }
 
 
@@ -567,24 +568,25 @@
   return Type::Internal();  // TODO(rossberg): Add int64 bitset type?
 }
 
-// TODO(gdeepti) : Fix this to do something meaningful.
 Type* Typer::Visitor::TypeRelocatableInt32Constant(Node* node) {
-  return Type::Internal();
+  UNREACHABLE();
+  return nullptr;
 }
 
 Type* Typer::Visitor::TypeRelocatableInt64Constant(Node* node) {
-  return Type::Internal();
+  UNREACHABLE();
+  return nullptr;
 }
 
 Type* Typer::Visitor::TypeFloat32Constant(Node* node) {
-  return Type::Intersect(Type::Of(OpParameter<float>(node), zone()),
-                         Type::UntaggedFloat32(), zone());
+  UNREACHABLE();
+  return nullptr;
 }
 
 
 Type* Typer::Visitor::TypeFloat64Constant(Node* node) {
-  return Type::Intersect(Type::Of(OpParameter<double>(node), zone()),
-                         Type::UntaggedFloat64(), zone());
+  UNREACHABLE();
+  return nullptr;
 }
 
 
@@ -633,16 +635,22 @@
   // do not apply and we cannot do anything).
   if (!initial_type->Is(typer_->cache_.kInteger) ||
       !increment_type->Is(typer_->cache_.kInteger)) {
-    // Fallback to normal phi typing.
-    Type* type = Operand(node, 0);
-    for (int i = 1; i < arity; ++i) {
+    // Fallback to normal phi typing, but ensure monotonicity.
+    // (Unfortunately, without baking in the previous type, monotonicity might
+    // be violated because we might not yet have retyped the incrementing
+    // operation even though the increment's type might been already reflected
+    // in the induction variable phi.)
+    Type* type = NodeProperties::IsTyped(node) ? NodeProperties::GetType(node)
+                                               : Type::None();
+    for (int i = 0; i < arity; ++i) {
       type = Type::Union(type, Operand(node, i), zone());
     }
     return type;
   }
   // If we do not have enough type information for the initial value or
   // the increment, just return the initial value's type.
-  if (!initial_type->IsInhabited() || !increment_type->IsInhabited()) {
+  if (!initial_type->IsInhabited() ||
+      increment_type->Is(typer_->cache_.kSingletonZero)) {
     return initial_type;
   }
 
@@ -1219,16 +1227,24 @@
 
 Type* Typer::Visitor::TypeJSInstanceOf(Node* node) { return Type::Boolean(); }
 
+Type* Typer::Visitor::TypeJSOrdinaryHasInstance(Node* node) {
+  return Type::Boolean();
+}
+
 // JS context operators.
 
 
 Type* Typer::Visitor::TypeJSLoadContext(Node* node) {
   ContextAccess const& access = ContextAccessOf(node->op());
-  if (access.index() == Context::EXTENSION_INDEX) {
-    return Type::TaggedPointer();
+  switch (access.index()) {
+    case Context::PREVIOUS_INDEX:
+    case Context::NATIVE_CONTEXT_INDEX:
+      return Type::OtherInternal();
+    case Context::CLOSURE_INDEX:
+      return Type::Function();
+    default:
+      return Type::Any();
   }
-  // Since contexts are mutable, we just return the top.
-  return Type::Any();
 }
 
 
@@ -1238,42 +1254,26 @@
 }
 
 
-Type* Typer::Visitor::WrapContextTypeForInput(Node* node) {
-  Type* outer = TypeOrNone(NodeProperties::GetContextInput(node));
-  if (outer->Is(Type::None())) {
-    return Type::None();
-  } else {
-    DCHECK(outer->Maybe(Type::OtherInternal()));
-    return Type::Context(outer, zone());
-  }
-}
-
-
 Type* Typer::Visitor::TypeJSCreateFunctionContext(Node* node) {
-  return WrapContextTypeForInput(node);
+  return Type::OtherInternal();
 }
 
-
 Type* Typer::Visitor::TypeJSCreateCatchContext(Node* node) {
-  return WrapContextTypeForInput(node);
+  return Type::OtherInternal();
 }
 
-
 Type* Typer::Visitor::TypeJSCreateWithContext(Node* node) {
-  return WrapContextTypeForInput(node);
+  return Type::OtherInternal();
 }
 
-
 Type* Typer::Visitor::TypeJSCreateBlockContext(Node* node) {
-  return WrapContextTypeForInput(node);
+  return Type::OtherInternal();
 }
 
-
 Type* Typer::Visitor::TypeJSCreateScriptContext(Node* node) {
-  return WrapContextTypeForInput(node);
+  return Type::OtherInternal();
 }
 
-
 // JS other operators.
 
 
@@ -1283,16 +1283,13 @@
 
 
 Type* Typer::Visitor::JSCallFunctionTyper(Type* fun, Typer* t) {
-  if (fun->IsFunction()) {
-    return fun->AsFunction()->Result();
-  }
   if (fun->IsConstant() && fun->AsConstant()->Value()->IsJSFunction()) {
     Handle<JSFunction> function =
         Handle<JSFunction>::cast(fun->AsConstant()->Value());
     if (function->shared()->HasBuiltinFunctionId()) {
       switch (function->shared()->builtin_function_id()) {
         case kMathRandom:
-          return Type::OrderedNumber();
+          return Type::PlainNumber();
         case kMathFloor:
         case kMathCeil:
         case kMathRound:
@@ -1321,7 +1318,7 @@
         case kMathTan:
           return Type::Number();
         case kMathSign:
-          return t->cache_.kMinusOneToOne;
+          return t->cache_.kMinusOneToOneOrMinusZeroOrNaN;
         // Binary math functions.
         case kMathAtan2:
         case kMathPow:
@@ -1332,7 +1329,32 @@
           return Type::Signed32();
         case kMathClz32:
           return t->cache_.kZeroToThirtyTwo;
+        // Date functions.
+        case kDateGetDate:
+          return t->cache_.kJSDateDayType;
+        case kDateGetDay:
+          return t->cache_.kJSDateWeekdayType;
+        case kDateGetFullYear:
+          return t->cache_.kJSDateYearType;
+        case kDateGetHours:
+          return t->cache_.kJSDateHourType;
+        case kDateGetMilliseconds:
+          return Type::Union(Type::Range(0.0, 999.0, t->zone()), Type::NaN(),
+                             t->zone());
+        case kDateGetMinutes:
+          return t->cache_.kJSDateMinuteType;
+        case kDateGetMonth:
+          return t->cache_.kJSDateMonthType;
+        case kDateGetSeconds:
+          return t->cache_.kJSDateSecondType;
+        case kDateGetTime:
+          return t->cache_.kJSDateValueType;
         // Number functions.
+        case kNumberIsFinite:
+        case kNumberIsInteger:
+        case kNumberIsNaN:
+        case kNumberIsSafeInteger:
+          return Type::Boolean();
         case kNumberParseInt:
           return t->cache_.kIntegerOrMinusZeroOrNaN;
         case kNumberToString:
@@ -1348,15 +1370,25 @@
         case kStringToLowerCase:
         case kStringToUpperCase:
           return Type::String();
+
+        case kStringIteratorNext:
+          return Type::OtherObject();
+
         // Array functions.
         case kArrayIndexOf:
         case kArrayLastIndexOf:
           return Type::Range(-1, kMaxSafeInteger, t->zone());
         case kArrayPush:
           return t->cache_.kPositiveSafeInteger;
+
         // Object functions.
         case kObjectHasOwnProperty:
           return Type::Boolean();
+
+        // Function functions.
+        case kFunctionHasInstance:
+          return Type::Boolean();
+
         // Global functions.
         case kGlobalDecodeURI:
         case kGlobalDecodeURIComponent:
@@ -1365,6 +1397,9 @@
         case kGlobalEscape:
         case kGlobalUnescape:
           return Type::String();
+        case kGlobalIsFinite:
+        case kGlobalIsNaN:
+          return Type::Boolean();
         default:
           break;
       }
@@ -1432,21 +1467,13 @@
 
 Type* Typer::Visitor::TypeJSForInPrepare(Node* node) {
   STATIC_ASSERT(Map::EnumLengthBits::kMax <= FixedArray::kMaxLength);
-  Factory* const f = isolate()->factory();
-  Type* const cache_type = Type::Union(
-      typer_->cache_.kSmi, Type::Class(f->meta_map(), zone()), zone());
-  Type* const cache_array = Type::Class(f->fixed_array_map(), zone());
+  Type* const cache_type =
+      Type::Union(typer_->cache_.kSmi, Type::OtherInternal(), zone());
+  Type* const cache_array = Type::OtherInternal();
   Type* const cache_length = typer_->cache_.kFixedArrayLengthType;
   return Type::Tuple(cache_type, cache_array, cache_length, zone());
 }
 
-Type* Typer::Visitor::TypeJSForInDone(Node* node) { return Type::Boolean(); }
-
-Type* Typer::Visitor::TypeJSForInStep(Node* node) {
-  STATIC_ASSERT(Map::EnumLengthBits::kMax <= FixedArray::kMaxLength);
-  return Type::Range(1, FixedArray::kMaxLength + 1, zone());
-}
-
 
 Type* Typer::Visitor::TypeJSLoadMessage(Node* node) { return Type::Any(); }
 
@@ -1541,6 +1568,19 @@
   return Type::String();
 }
 
+Type* Typer::Visitor::StringFromCodePointTyper(Type* type, Typer* t) {
+  type = NumberToUint32(ToNumber(type, t), t);
+  Factory* f = t->isolate()->factory();
+  double min = type->Min();
+  double max = type->Max();
+  if (min == max) {
+    uint32_t code = static_cast<uint32_t>(min) & String::kMaxUtf16CodeUnitU;
+    Handle<String> string = f->LookupSingleCharacterStringFromCode(code);
+    return Type::Constant(string, t->zone());
+  }
+  return Type::String();
+}
+
 Type* Typer::Visitor::TypeStringCharCodeAt(Node* node) {
   // TODO(bmeurer): We could do better here based on inputs.
   return Type::Range(0, kMaxUInt16, zone());
@@ -1550,17 +1590,31 @@
   return TypeUnaryOp(node, StringFromCharCodeTyper);
 }
 
+Type* Typer::Visitor::TypeStringFromCodePoint(Node* node) {
+  return TypeUnaryOp(node, StringFromCodePointTyper);
+}
+
 Type* Typer::Visitor::TypeCheckBounds(Node* node) {
   Type* index = Operand(node, 0);
   Type* length = Operand(node, 1);
   index = Type::Intersect(index, Type::Integral32(), zone());
   if (!index->IsInhabited() || !length->IsInhabited()) return Type::None();
   double min = std::max(index->Min(), 0.0);
-  double max = std::min(index->Max(), length->Min() - 1);
+  double max = std::min(index->Max(), length->Max() - 1);
   if (max < min) return Type::None();
   return Type::Range(min, max, zone());
 }
 
+Type* Typer::Visitor::TypeCheckHeapObject(Node* node) {
+  Type* type = Operand(node, 0);
+  return type;
+}
+
+Type* Typer::Visitor::TypeCheckIf(Node* node) {
+  UNREACHABLE();
+  return nullptr;
+}
+
 Type* Typer::Visitor::TypeCheckMaps(Node* node) {
   UNREACHABLE();
   return nullptr;
@@ -1571,26 +1625,16 @@
   return Type::Intersect(arg, Type::Number(), zone());
 }
 
+Type* Typer::Visitor::TypeCheckSmi(Node* node) {
+  Type* arg = Operand(node, 0);
+  return Type::Intersect(arg, Type::SignedSmall(), zone());
+}
+
 Type* Typer::Visitor::TypeCheckString(Node* node) {
   Type* arg = Operand(node, 0);
   return Type::Intersect(arg, Type::String(), zone());
 }
 
-Type* Typer::Visitor::TypeCheckIf(Node* node) {
-  UNREACHABLE();
-  return nullptr;
-}
-
-Type* Typer::Visitor::TypeCheckTaggedPointer(Node* node) {
-  Type* arg = Operand(node, 0);
-  return Type::Intersect(arg, Type::TaggedPointer(), zone());
-}
-
-Type* Typer::Visitor::TypeCheckTaggedSigned(Node* node) {
-  Type* arg = Operand(node, 0);
-  return Type::Intersect(arg, typer_->cache_.kSmi, zone());
-}
-
 Type* Typer::Visitor::TypeCheckFloat64Hole(Node* node) {
   Type* type = Operand(node, 0);
   return type;
@@ -1612,7 +1656,7 @@
   return type;
 }
 
-Type* Typer::Visitor::TypeAllocate(Node* node) { return Type::TaggedPointer(); }
+Type* Typer::Visitor::TypeAllocate(Node* node) { return Type::Any(); }
 
 Type* Typer::Visitor::TypeLoadField(Node* node) {
   return FieldAccessOf(node->op()).type;
@@ -1697,652 +1741,13 @@
   return TypeUnaryOp(node, ObjectIsUndetectable);
 }
 
-
-// Machine operators.
-
-Type* Typer::Visitor::TypeDebugBreak(Node* node) { return Type::None(); }
-
-Type* Typer::Visitor::TypeComment(Node* node) { return Type::None(); }
-
-Type* Typer::Visitor::TypeRetain(Node* node) {
-  UNREACHABLE();
-  return nullptr;
-}
-
-Type* Typer::Visitor::TypeUnsafePointerAdd(Node* node) { return Type::None(); }
-
-Type* Typer::Visitor::TypeLoad(Node* node) { return Type::Any(); }
-
-Type* Typer::Visitor::TypeStackSlot(Node* node) { return Type::Any(); }
-
-Type* Typer::Visitor::TypeStore(Node* node) {
-  UNREACHABLE();
-  return nullptr;
-}
-
-
-Type* Typer::Visitor::TypeWord32And(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Or(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Xor(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Shl(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Shr(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Sar(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Ror(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Equal(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeWord32Clz(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32Ctz(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeWord32ReverseBits(Node* node) {
-  return Type::Integral32();
-}
-
-Type* Typer::Visitor::TypeWord32ReverseBytes(Node* node) {
-  return Type::Integral32();
-}
-
-Type* Typer::Visitor::TypeWord32Popcnt(Node* node) {
-  return Type::Integral32();
-}
-
-
-Type* Typer::Visitor::TypeWord64And(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Or(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Xor(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Shl(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Shr(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Sar(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Ror(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Clz(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Ctz(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64ReverseBits(Node* node) {
-  return Type::Internal();
-}
-
-Type* Typer::Visitor::TypeWord64ReverseBytes(Node* node) {
-  return Type::Internal();
-}
-
-Type* Typer::Visitor::TypeWord64Popcnt(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeWord64Equal(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeInt32Add(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeInt32AddWithOverflow(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeInt32Sub(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeInt32SubWithOverflow(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeInt32Mul(Node* node) { return Type::Integral32(); }
-
-Type* Typer::Visitor::TypeInt32MulWithOverflow(Node* node) {
-  return Type::Internal();
-}
-
-Type* Typer::Visitor::TypeInt32MulHigh(Node* node) { return Type::Signed32(); }
-
-
-Type* Typer::Visitor::TypeInt32Div(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeInt32Mod(Node* node) { return Type::Integral32(); }
-
-
-Type* Typer::Visitor::TypeInt32LessThan(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeInt32LessThanOrEqual(Node* node) {
+Type* Typer::Visitor::TypeArrayBufferWasNeutered(Node* node) {
   return Type::Boolean();
 }
 
-
-Type* Typer::Visitor::TypeUint32Div(Node* node) { return Type::Unsigned32(); }
-
-
-Type* Typer::Visitor::TypeUint32LessThan(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeUint32LessThanOrEqual(Node* node) {
-  return Type::Boolean();
-}
-
-
-Type* Typer::Visitor::TypeUint32Mod(Node* node) { return Type::Unsigned32(); }
-
-
-Type* Typer::Visitor::TypeUint32MulHigh(Node* node) {
-  return Type::Unsigned32();
-}
-
-
-Type* Typer::Visitor::TypeInt64Add(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeInt64AddWithOverflow(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeInt64Sub(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeInt64SubWithOverflow(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeInt64Mul(Node* node) { return Type::Internal(); }
-
-Type* Typer::Visitor::TypeInt64Div(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeInt64Mod(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeInt64LessThan(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeInt64LessThanOrEqual(Node* node) {
-  return Type::Boolean();
-}
-
-
-Type* Typer::Visitor::TypeUint64Div(Node* node) { return Type::Internal(); }
-
-
-Type* Typer::Visitor::TypeUint64LessThan(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeUint64LessThanOrEqual(Node* node) {
-  return Type::Boolean();
-}
-
-
-Type* Typer::Visitor::TypeUint64Mod(Node* node) { return Type::Internal(); }
-
-Type* Typer::Visitor::TypeBitcastWordToTagged(Node* node) {
-  return Type::TaggedPointer();
-}
-
-Type* Typer::Visitor::TypeChangeFloat32ToFloat64(Node* node) {
-  return Type::Intersect(Type::Number(), Type::UntaggedFloat64(), zone());
-}
-
-
-Type* Typer::Visitor::TypeChangeFloat64ToInt32(Node* node) {
-  return Type::Intersect(Type::Signed32(), Type::UntaggedIntegral32(), zone());
-}
-
-Type* Typer::Visitor::TypeChangeFloat64ToUint32(Node* node) {
-  return Type::Intersect(Type::Unsigned32(), Type::UntaggedIntegral32(),
-                         zone());
-}
-
-Type* Typer::Visitor::TypeTruncateFloat64ToUint32(Node* node) {
-  return Type::Intersect(Type::Unsigned32(), Type::UntaggedIntegral32(),
-                         zone());
-}
-
-Type* Typer::Visitor::TypeTruncateFloat32ToInt32(Node* node) {
-  return Type::Intersect(Type::Signed32(), Type::UntaggedIntegral32(), zone());
-}
-
-
-Type* Typer::Visitor::TypeTruncateFloat32ToUint32(Node* node) {
-  return Type::Intersect(Type::Unsigned32(), Type::UntaggedIntegral32(),
-                         zone());
-}
-
-
-Type* Typer::Visitor::TypeTryTruncateFloat32ToInt64(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeTryTruncateFloat64ToInt64(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeTryTruncateFloat32ToUint64(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeTryTruncateFloat64ToUint64(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeChangeInt32ToFloat64(Node* node) {
-  return Type::Intersect(Type::Signed32(), Type::UntaggedFloat64(), zone());
-}
-
-Type* Typer::Visitor::TypeFloat64SilenceNaN(Node* node) {
-  return Type::UntaggedFloat64();
-}
-
-Type* Typer::Visitor::TypeChangeInt32ToInt64(Node* node) {
-  return Type::Internal();
-}
-
-Type* Typer::Visitor::TypeChangeUint32ToFloat64(Node* node) {
-  return Type::Intersect(Type::Unsigned32(), Type::UntaggedFloat64(), zone());
-}
-
-Type* Typer::Visitor::TypeChangeUint32ToUint64(Node* node) {
-  return Type::Internal();
-}
-
-Type* Typer::Visitor::TypeImpossibleToWord32(Node* node) {
-  return Type::None();
-}
-
-Type* Typer::Visitor::TypeImpossibleToWord64(Node* node) {
-  return Type::None();
-}
-
-Type* Typer::Visitor::TypeImpossibleToFloat32(Node* node) {
-  return Type::None();
-}
-
-Type* Typer::Visitor::TypeImpossibleToFloat64(Node* node) {
-  return Type::None();
-}
-
-Type* Typer::Visitor::TypeImpossibleToTagged(Node* node) {
-  return Type::None();
-}
-
-Type* Typer::Visitor::TypeImpossibleToBit(Node* node) { return Type::None(); }
-
-Type* Typer::Visitor::TypeTruncateFloat64ToFloat32(Node* node) {
-  return Type::Intersect(Type::Number(), Type::UntaggedFloat32(), zone());
-}
-
-Type* Typer::Visitor::TypeTruncateFloat64ToWord32(Node* node) {
-  return Type::Intersect(Type::Integral32(), Type::UntaggedIntegral32(),
-                         zone());
-}
-
-Type* Typer::Visitor::TypeTruncateInt64ToInt32(Node* node) {
-  return Type::Intersect(Type::Signed32(), Type::UntaggedIntegral32(), zone());
-}
-
-Type* Typer::Visitor::TypeRoundFloat64ToInt32(Node* node) {
-  return Type::Intersect(Type::Signed32(), Type::UntaggedIntegral32(), zone());
-}
-
-Type* Typer::Visitor::TypeRoundInt32ToFloat32(Node* node) {
-  return Type::Intersect(Type::PlainNumber(), Type::UntaggedFloat32(), zone());
-}
-
-
-Type* Typer::Visitor::TypeRoundInt64ToFloat32(Node* node) {
-  return Type::Intersect(Type::PlainNumber(), Type::UntaggedFloat32(), zone());
-}
-
-
-Type* Typer::Visitor::TypeRoundInt64ToFloat64(Node* node) {
-  return Type::Intersect(Type::PlainNumber(), Type::UntaggedFloat64(), zone());
-}
-
-
-Type* Typer::Visitor::TypeRoundUint32ToFloat32(Node* node) {
-  return Type::Intersect(Type::PlainNumber(), Type::UntaggedFloat32(), zone());
-}
-
-
-Type* Typer::Visitor::TypeRoundUint64ToFloat32(Node* node) {
-  return Type::Intersect(Type::PlainNumber(), Type::UntaggedFloat32(), zone());
-}
-
-
-Type* Typer::Visitor::TypeRoundUint64ToFloat64(Node* node) {
-  return Type::Intersect(Type::PlainNumber(), Type::UntaggedFloat64(), zone());
-}
-
-
-Type* Typer::Visitor::TypeBitcastFloat32ToInt32(Node* node) {
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeBitcastFloat64ToInt64(Node* node) {
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeBitcastInt32ToFloat32(Node* node) {
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeBitcastInt64ToFloat64(Node* node) {
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat32Add(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat32Sub(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat32Neg(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat32Mul(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat32Div(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat32Abs(Node* node) {
-  // TODO(turbofan): We should be able to infer a better type here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat32Sqrt(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat32Equal(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeFloat32LessThan(Node* node) {
-  return Type::Boolean();
-}
-
-
-Type* Typer::Visitor::TypeFloat32LessThanOrEqual(Node* node) {
-  return Type::Boolean();
-}
-
-Type* Typer::Visitor::TypeFloat32Max(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat32Min(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Add(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat64Sub(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Neg(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Mul(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat64Div(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat64Mod(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat64Max(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat64Min(Node* node) { return Type::Number(); }
-
-
-Type* Typer::Visitor::TypeFloat64Abs(Node* node) {
-  // TODO(turbofan): We should be able to infer a better type here.
-  return Type::Number();
-}
-
-Type* Typer::Visitor::TypeFloat64Acos(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Acosh(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Asin(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Asinh(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Atan(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Atanh(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Atan2(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Cbrt(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Cos(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Cosh(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Exp(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Expm1(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Log(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Log1p(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Log10(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Log2(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Pow(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Sin(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Sinh(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Sqrt(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Tan(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Tanh(Node* node) { return Type::Number(); }
-
-Type* Typer::Visitor::TypeFloat64Equal(Node* node) { return Type::Boolean(); }
-
-
-Type* Typer::Visitor::TypeFloat64LessThan(Node* node) {
-  return Type::Boolean();
-}
-
-
-Type* Typer::Visitor::TypeFloat64LessThanOrEqual(Node* node) {
-  return Type::Boolean();
-}
-
-
-Type* Typer::Visitor::TypeFloat32RoundDown(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat64RoundDown(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat32RoundUp(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat64RoundUp(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat32RoundTruncate(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat64RoundTruncate(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat64RoundTiesAway(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat32RoundTiesEven(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat64RoundTiesEven(Node* node) {
-  // TODO(sigurds): We could have a tighter bound here.
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat64ExtractLowWord32(Node* node) {
-  return Type::Signed32();
-}
-
-
-Type* Typer::Visitor::TypeFloat64ExtractHighWord32(Node* node) {
-  return Type::Signed32();
-}
-
-
-Type* Typer::Visitor::TypeFloat64InsertLowWord32(Node* node) {
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeFloat64InsertHighWord32(Node* node) {
-  return Type::Number();
-}
-
-
-Type* Typer::Visitor::TypeLoadStackPointer(Node* node) {
-  return Type::Internal();
-}
-
-
-Type* Typer::Visitor::TypeLoadFramePointer(Node* node) {
-  return Type::Internal();
-}
-
-Type* Typer::Visitor::TypeLoadParentFramePointer(Node* node) {
-  return Type::Internal();
-}
-
-Type* Typer::Visitor::TypeUnalignedLoad(Node* node) { return Type::Any(); }
-
-Type* Typer::Visitor::TypeUnalignedStore(Node* node) {
-  UNREACHABLE();
-  return nullptr;
-}
-
-Type* Typer::Visitor::TypeCheckedLoad(Node* node) { return Type::Any(); }
-
-Type* Typer::Visitor::TypeCheckedStore(Node* node) {
-  UNREACHABLE();
-  return nullptr;
-}
-
-Type* Typer::Visitor::TypeAtomicLoad(Node* node) { return Type::Any(); }
-
-Type* Typer::Visitor::TypeAtomicStore(Node* node) {
-  UNREACHABLE();
-  return nullptr;
-}
-
-Type* Typer::Visitor::TypeInt32PairAdd(Node* node) { return Type::Internal(); }
-
-Type* Typer::Visitor::TypeInt32PairSub(Node* node) { return Type::Internal(); }
-
-Type* Typer::Visitor::TypeInt32PairMul(Node* node) { return Type::Internal(); }
-
-Type* Typer::Visitor::TypeWord32PairShl(Node* node) { return Type::Internal(); }
-
-Type* Typer::Visitor::TypeWord32PairShr(Node* node) { return Type::Internal(); }
-
-Type* Typer::Visitor::TypeWord32PairSar(Node* node) { return Type::Internal(); }
-
-// SIMD type methods.
-
-#define SIMD_RETURN_SIMD(Name) \
-  Type* Typer::Visitor::Type##Name(Node* node) { return Type::Simd(); }
-MACHINE_SIMD_RETURN_SIMD_OP_LIST(SIMD_RETURN_SIMD)
-MACHINE_SIMD_GENERIC_OP_LIST(SIMD_RETURN_SIMD)
-#undef SIMD_RETURN_SIMD
-
-#define SIMD_RETURN_NUM(Name) \
-  Type* Typer::Visitor::Type##Name(Node* node) { return Type::Number(); }
-MACHINE_SIMD_RETURN_NUM_OP_LIST(SIMD_RETURN_NUM)
-#undef SIMD_RETURN_NUM
-
-#define SIMD_RETURN_BOOL(Name) \
-  Type* Typer::Visitor::Type##Name(Node* node) { return Type::Boolean(); }
-MACHINE_SIMD_RETURN_BOOL_OP_LIST(SIMD_RETURN_BOOL)
-#undef SIMD_RETURN_BOOL
-
 // Heap constants.
 
 Type* Typer::Visitor::TypeConstant(Handle<Object> value) {
-  if (value->IsJSTypedArray()) {
-    switch (JSTypedArray::cast(*value)->type()) {
-#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
-  case kExternal##Type##Array:                          \
-    return typer_->cache_.k##Type##Array;
-      TYPED_ARRAYS(TYPED_ARRAY_CASE)
-#undef TYPED_ARRAY_CASE
-    }
-  }
   if (Type::IsInteger(*value)) {
     return Type::Range(value->Number(), value->Number(), zone());
   }
diff --git a/src/compiler/typer.h b/src/compiler/typer.h
index d4d5744..875b483 100644
--- a/src/compiler/typer.h
+++ b/src/compiler/typer.h
@@ -7,18 +7,13 @@
 
 #include "src/compiler/graph.h"
 #include "src/compiler/operation-typer.h"
-#include "src/types.h"
 
 namespace v8 {
 namespace internal {
-
-// Forward declarations.
-class TypeCache;
-
 namespace compiler {
 
+// Forward declarations.
 class LoopVariableOptimizer;
-class OperationTyper;
 
 class Typer {
  public:
diff --git a/src/compiler/types.cc b/src/compiler/types.cc
new file mode 100644
index 0000000..43d2f80
--- /dev/null
+++ b/src/compiler/types.cc
@@ -0,0 +1,961 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iomanip>
+
+#include "src/compiler/types.h"
+
+#include "src/handles-inl.h"
+#include "src/ostreams.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+// NOTE: If code is marked as being a "shortcut", this means that removing
+// the code won't affect the semantics of the surrounding function definition.
+
+// static
+bool Type::IsInteger(i::Object* x) {
+  return x->IsNumber() && Type::IsInteger(x->Number());
+}
+
+// -----------------------------------------------------------------------------
+// Range-related helper functions.
+
+bool RangeType::Limits::IsEmpty() { return this->min > this->max; }
+
+RangeType::Limits RangeType::Limits::Intersect(Limits lhs, Limits rhs) {
+  DisallowHeapAllocation no_allocation;
+  Limits result(lhs);
+  if (lhs.min < rhs.min) result.min = rhs.min;
+  if (lhs.max > rhs.max) result.max = rhs.max;
+  return result;
+}
+
+RangeType::Limits RangeType::Limits::Union(Limits lhs, Limits rhs) {
+  DisallowHeapAllocation no_allocation;
+  if (lhs.IsEmpty()) return rhs;
+  if (rhs.IsEmpty()) return lhs;
+  Limits result(lhs);
+  if (lhs.min > rhs.min) result.min = rhs.min;
+  if (lhs.max < rhs.max) result.max = rhs.max;
+  return result;
+}
+
+bool Type::Overlap(RangeType* lhs, RangeType* rhs) {
+  DisallowHeapAllocation no_allocation;
+  return !RangeType::Limits::Intersect(RangeType::Limits(lhs),
+                                       RangeType::Limits(rhs))
+              .IsEmpty();
+}
+
+bool Type::Contains(RangeType* lhs, RangeType* rhs) {
+  DisallowHeapAllocation no_allocation;
+  return lhs->Min() <= rhs->Min() && rhs->Max() <= lhs->Max();
+}
+
+bool Type::Contains(RangeType* lhs, ConstantType* rhs) {
+  DisallowHeapAllocation no_allocation;
+  return IsInteger(*rhs->Value()) && lhs->Min() <= rhs->Value()->Number() &&
+         rhs->Value()->Number() <= lhs->Max();
+}
+
+bool Type::Contains(RangeType* range, i::Object* val) {
+  DisallowHeapAllocation no_allocation;
+  return IsInteger(val) && range->Min() <= val->Number() &&
+         val->Number() <= range->Max();
+}
+
+// -----------------------------------------------------------------------------
+// Min and Max computation.
+
+double Type::Min() {
+  DCHECK(this->Is(Number()));
+  if (this->IsBitset()) return BitsetType::Min(this->AsBitset());
+  if (this->IsUnion()) {
+    double min = +V8_INFINITY;
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      min = std::min(min, this->AsUnion()->Get(i)->Min());
+    }
+    return min;
+  }
+  if (this->IsRange()) return this->AsRange()->Min();
+  if (this->IsConstant()) return this->AsConstant()->Value()->Number();
+  UNREACHABLE();
+  return 0;
+}
+
+double Type::Max() {
+  DCHECK(this->Is(Number()));
+  if (this->IsBitset()) return BitsetType::Max(this->AsBitset());
+  if (this->IsUnion()) {
+    double max = -V8_INFINITY;
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      max = std::max(max, this->AsUnion()->Get(i)->Max());
+    }
+    return max;
+  }
+  if (this->IsRange()) return this->AsRange()->Max();
+  if (this->IsConstant()) return this->AsConstant()->Value()->Number();
+  UNREACHABLE();
+  return 0;
+}
+
+// -----------------------------------------------------------------------------
+// Glb and lub computation.
+
+// The largest bitset subsumed by this type.
+Type::bitset BitsetType::Glb(Type* type) {
+  DisallowHeapAllocation no_allocation;
+  // Fast case.
+  if (IsBitset(type)) {
+    return type->AsBitset();
+  } else if (type->IsUnion()) {
+    SLOW_DCHECK(type->AsUnion()->Wellformed());
+    return type->AsUnion()->Get(0)->BitsetGlb() |
+           type->AsUnion()->Get(1)->BitsetGlb();  // Shortcut.
+  } else if (type->IsRange()) {
+    bitset glb =
+        BitsetType::Glb(type->AsRange()->Min(), type->AsRange()->Max());
+    return glb;
+  } else {
+    return kNone;
+  }
+}
+
+// The smallest bitset subsuming this type, possibly not a proper one.
+Type::bitset BitsetType::Lub(Type* type) {
+  DisallowHeapAllocation no_allocation;
+  if (IsBitset(type)) return type->AsBitset();
+  if (type->IsUnion()) {
+    // Take the representation from the first element, which is always
+    // a bitset.
+    int bitset = type->AsUnion()->Get(0)->BitsetLub();
+    for (int i = 0, n = type->AsUnion()->Length(); i < n; ++i) {
+      // Other elements only contribute their semantic part.
+      bitset |= type->AsUnion()->Get(i)->BitsetLub();
+    }
+    return bitset;
+  }
+  if (type->IsConstant()) return type->AsConstant()->Lub();
+  if (type->IsRange()) return type->AsRange()->Lub();
+  if (type->IsTuple()) return kOtherInternal;
+  UNREACHABLE();
+  return kNone;
+}
+
+Type::bitset BitsetType::Lub(i::Map* map) {
+  DisallowHeapAllocation no_allocation;
+  switch (map->instance_type()) {
+    case STRING_TYPE:
+    case ONE_BYTE_STRING_TYPE:
+    case CONS_STRING_TYPE:
+    case CONS_ONE_BYTE_STRING_TYPE:
+    case SLICED_STRING_TYPE:
+    case SLICED_ONE_BYTE_STRING_TYPE:
+    case EXTERNAL_STRING_TYPE:
+    case EXTERNAL_ONE_BYTE_STRING_TYPE:
+    case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
+    case SHORT_EXTERNAL_STRING_TYPE:
+    case SHORT_EXTERNAL_ONE_BYTE_STRING_TYPE:
+    case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
+      return kOtherString;
+    case INTERNALIZED_STRING_TYPE:
+    case ONE_BYTE_INTERNALIZED_STRING_TYPE:
+    case EXTERNAL_INTERNALIZED_STRING_TYPE:
+    case EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
+    case EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
+    case SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE:
+    case SHORT_EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
+    case SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
+      return kInternalizedString;
+    case SYMBOL_TYPE:
+      return kSymbol;
+    case ODDBALL_TYPE: {
+      Heap* heap = map->GetHeap();
+      if (map == heap->undefined_map()) return kUndefined;
+      if (map == heap->null_map()) return kNull;
+      if (map == heap->boolean_map()) return kBoolean;
+      if (map == heap->the_hole_map()) return kHole;
+      DCHECK(map == heap->uninitialized_map() ||
+             map == heap->no_interceptor_result_sentinel_map() ||
+             map == heap->termination_exception_map() ||
+             map == heap->arguments_marker_map() ||
+             map == heap->optimized_out_map() ||
+             map == heap->stale_register_map());
+      return kOtherInternal;
+    }
+    case HEAP_NUMBER_TYPE:
+      return kNumber;
+    case SIMD128_VALUE_TYPE:
+      return kSimd;
+    case JS_OBJECT_TYPE:
+    case JS_ARGUMENTS_TYPE:
+    case JS_ERROR_TYPE:
+    case JS_GLOBAL_OBJECT_TYPE:
+    case JS_GLOBAL_PROXY_TYPE:
+    case JS_API_OBJECT_TYPE:
+    case JS_SPECIAL_API_OBJECT_TYPE:
+      if (map->is_undetectable()) return kOtherUndetectable;
+      return kOtherObject;
+    case JS_VALUE_TYPE:
+    case JS_MESSAGE_OBJECT_TYPE:
+    case JS_DATE_TYPE:
+    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+    case JS_GENERATOR_OBJECT_TYPE:
+    case JS_ARRAY_BUFFER_TYPE:
+    case JS_ARRAY_TYPE:
+    case JS_REGEXP_TYPE:  // TODO(rossberg): there should be a RegExp type.
+    case JS_TYPED_ARRAY_TYPE:
+    case JS_DATA_VIEW_TYPE:
+    case JS_SET_TYPE:
+    case JS_MAP_TYPE:
+    case JS_SET_ITERATOR_TYPE:
+    case JS_MAP_ITERATOR_TYPE:
+    case JS_STRING_ITERATOR_TYPE:
+    case JS_WEAK_MAP_TYPE:
+    case JS_WEAK_SET_TYPE:
+    case JS_PROMISE_TYPE:
+    case JS_BOUND_FUNCTION_TYPE:
+      DCHECK(!map->is_undetectable());
+      return kOtherObject;
+    case JS_FUNCTION_TYPE:
+      DCHECK(!map->is_undetectable());
+      return kFunction;
+    case JS_PROXY_TYPE:
+      DCHECK(!map->is_undetectable());
+      return kProxy;
+    case MAP_TYPE:
+    case ALLOCATION_SITE_TYPE:
+    case ACCESSOR_INFO_TYPE:
+    case SHARED_FUNCTION_INFO_TYPE:
+    case ACCESSOR_PAIR_TYPE:
+    case FIXED_ARRAY_TYPE:
+    case FIXED_DOUBLE_ARRAY_TYPE:
+    case BYTE_ARRAY_TYPE:
+    case BYTECODE_ARRAY_TYPE:
+    case TRANSITION_ARRAY_TYPE:
+    case FOREIGN_TYPE:
+    case SCRIPT_TYPE:
+    case CODE_TYPE:
+    case PROPERTY_CELL_TYPE:
+    case MODULE_TYPE:
+      return kOtherInternal;
+
+    // Remaining instance types are unsupported for now. If any of them do
+    // require bit set types, they should get kOtherInternal.
+    case MUTABLE_HEAP_NUMBER_TYPE:
+    case FREE_SPACE_TYPE:
+#define FIXED_TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
+  case FIXED_##TYPE##_ARRAY_TYPE:
+
+      TYPED_ARRAYS(FIXED_TYPED_ARRAY_CASE)
+#undef FIXED_TYPED_ARRAY_CASE
+    case FILLER_TYPE:
+    case ACCESS_CHECK_INFO_TYPE:
+    case INTERCEPTOR_INFO_TYPE:
+    case CALL_HANDLER_INFO_TYPE:
+    case FUNCTION_TEMPLATE_INFO_TYPE:
+    case OBJECT_TEMPLATE_INFO_TYPE:
+    case SIGNATURE_INFO_TYPE:
+    case TYPE_SWITCH_INFO_TYPE:
+    case ALLOCATION_MEMENTO_TYPE:
+    case TYPE_FEEDBACK_INFO_TYPE:
+    case ALIASED_ARGUMENTS_ENTRY_TYPE:
+    case BOX_TYPE:
+    case PROMISE_CONTAINER_TYPE:
+    case DEBUG_INFO_TYPE:
+    case BREAK_POINT_INFO_TYPE:
+    case CELL_TYPE:
+    case WEAK_CELL_TYPE:
+    case PROTOTYPE_INFO_TYPE:
+    case CONTEXT_EXTENSION_TYPE:
+      UNREACHABLE();
+      return kNone;
+  }
+  UNREACHABLE();
+  return kNone;
+}
+
+Type::bitset BitsetType::Lub(i::Object* value) {
+  DisallowHeapAllocation no_allocation;
+  if (value->IsNumber()) {
+    return Lub(value->Number());
+  }
+  return Lub(i::HeapObject::cast(value)->map());
+}
+
+Type::bitset BitsetType::Lub(double value) {
+  DisallowHeapAllocation no_allocation;
+  if (i::IsMinusZero(value)) return kMinusZero;
+  if (std::isnan(value)) return kNaN;
+  if (IsUint32Double(value) || IsInt32Double(value)) return Lub(value, value);
+  return kOtherNumber;
+}
+
+// Minimum values of plain numeric bitsets.
+const BitsetType::Boundary BitsetType::BoundariesArray[] = {
+    {kOtherNumber, kPlainNumber, -V8_INFINITY},
+    {kOtherSigned32, kNegative32, kMinInt},
+    {kNegative31, kNegative31, -0x40000000},
+    {kUnsigned30, kUnsigned30, 0},
+    {kOtherUnsigned31, kUnsigned31, 0x40000000},
+    {kOtherUnsigned32, kUnsigned32, 0x80000000},
+    {kOtherNumber, kPlainNumber, static_cast<double>(kMaxUInt32) + 1}};
+
+const BitsetType::Boundary* BitsetType::Boundaries() { return BoundariesArray; }
+
+size_t BitsetType::BoundariesSize() {
+  // Windows doesn't like arraysize here.
+  // return arraysize(BoundariesArray);
+  return 7;
+}
+
+Type::bitset BitsetType::ExpandInternals(Type::bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  if (!(bits & kPlainNumber)) return bits;  // Shortcut.
+  const Boundary* boundaries = Boundaries();
+  for (size_t i = 0; i < BoundariesSize(); ++i) {
+    DCHECK(BitsetType::Is(boundaries[i].internal, boundaries[i].external));
+    if (bits & boundaries[i].internal) bits |= boundaries[i].external;
+  }
+  return bits;
+}
+
+Type::bitset BitsetType::Lub(double min, double max) {
+  DisallowHeapAllocation no_allocation;
+  int lub = kNone;
+  const Boundary* mins = Boundaries();
+
+  for (size_t i = 1; i < BoundariesSize(); ++i) {
+    if (min < mins[i].min) {
+      lub |= mins[i - 1].internal;
+      if (max < mins[i].min) return lub;
+    }
+  }
+  return lub | mins[BoundariesSize() - 1].internal;
+}
+
+Type::bitset BitsetType::NumberBits(bitset bits) { return bits & kPlainNumber; }
+
+Type::bitset BitsetType::Glb(double min, double max) {
+  DisallowHeapAllocation no_allocation;
+  int glb = kNone;
+  const Boundary* mins = Boundaries();
+
+  // If the range does not touch 0, the bound is empty.
+  if (max < -1 || min > 0) return glb;
+
+  for (size_t i = 1; i + 1 < BoundariesSize(); ++i) {
+    if (min <= mins[i].min) {
+      if (max + 1 < mins[i + 1].min) break;
+      glb |= mins[i].external;
+    }
+  }
+  // OtherNumber also contains float numbers, so it can never be
+  // in the greatest lower bound.
+  return glb & ~(kOtherNumber);
+}
+
+double BitsetType::Min(bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  DCHECK(Is(bits, kNumber));
+  const Boundary* mins = Boundaries();
+  bool mz = bits & kMinusZero;
+  for (size_t i = 0; i < BoundariesSize(); ++i) {
+    if (Is(mins[i].internal, bits)) {
+      return mz ? std::min(0.0, mins[i].min) : mins[i].min;
+    }
+  }
+  if (mz) return 0;
+  return std::numeric_limits<double>::quiet_NaN();
+}
+
+double BitsetType::Max(bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  DCHECK(Is(bits, kNumber));
+  const Boundary* mins = Boundaries();
+  bool mz = bits & kMinusZero;
+  if (BitsetType::Is(mins[BoundariesSize() - 1].internal, bits)) {
+    return +V8_INFINITY;
+  }
+  for (size_t i = BoundariesSize() - 1; i-- > 0;) {
+    if (Is(mins[i].internal, bits)) {
+      return mz ? std::max(0.0, mins[i + 1].min - 1) : mins[i + 1].min - 1;
+    }
+  }
+  if (mz) return 0;
+  return std::numeric_limits<double>::quiet_NaN();
+}
+
+// -----------------------------------------------------------------------------
+// Predicates.
+
+bool Type::SimplyEquals(Type* that) {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsConstant()) {
+    return that->IsConstant() &&
+           *this->AsConstant()->Value() == *that->AsConstant()->Value();
+  }
+  if (this->IsTuple()) {
+    if (!that->IsTuple()) return false;
+    TupleType* this_tuple = this->AsTuple();
+    TupleType* that_tuple = that->AsTuple();
+    if (this_tuple->Arity() != that_tuple->Arity()) {
+      return false;
+    }
+    for (int i = 0, n = this_tuple->Arity(); i < n; ++i) {
+      if (!this_tuple->Element(i)->Equals(that_tuple->Element(i))) return false;
+    }
+    return true;
+  }
+  UNREACHABLE();
+  return false;
+}
+
+// Check if [this] <= [that].
+bool Type::SlowIs(Type* that) {
+  DisallowHeapAllocation no_allocation;
+
+  // Fast bitset cases
+  if (that->IsBitset()) {
+    return BitsetType::Is(this->BitsetLub(), that->AsBitset());
+  }
+
+  if (this->IsBitset()) {
+    return BitsetType::Is(this->AsBitset(), that->BitsetGlb());
+  }
+
+  // (T1 \/ ... \/ Tn) <= T  if  (T1 <= T) /\ ... /\ (Tn <= T)
+  if (this->IsUnion()) {
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      if (!this->AsUnion()->Get(i)->Is(that)) return false;
+    }
+    return true;
+  }
+
+  // T <= (T1 \/ ... \/ Tn)  if  (T <= T1) \/ ... \/ (T <= Tn)
+  if (that->IsUnion()) {
+    for (int i = 0, n = that->AsUnion()->Length(); i < n; ++i) {
+      if (this->Is(that->AsUnion()->Get(i))) return true;
+      if (i > 1 && this->IsRange()) return false;  // Shortcut.
+    }
+    return false;
+  }
+
+  if (that->IsRange()) {
+    return (this->IsRange() && Contains(that->AsRange(), this->AsRange())) ||
+           (this->IsConstant() &&
+            Contains(that->AsRange(), this->AsConstant()));
+  }
+  if (this->IsRange()) return false;
+
+  return this->SimplyEquals(that);
+}
+
+// Check if [this] and [that] overlap.
+bool Type::Maybe(Type* that) {
+  DisallowHeapAllocation no_allocation;
+
+  if (!BitsetType::IsInhabited(this->BitsetLub() & that->BitsetLub()))
+    return false;
+
+  // (T1 \/ ... \/ Tn) overlaps T  if  (T1 overlaps T) \/ ... \/ (Tn overlaps T)
+  if (this->IsUnion()) {
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      if (this->AsUnion()->Get(i)->Maybe(that)) return true;
+    }
+    return false;
+  }
+
+  // T overlaps (T1 \/ ... \/ Tn)  if  (T overlaps T1) \/ ... \/ (T overlaps Tn)
+  if (that->IsUnion()) {
+    for (int i = 0, n = that->AsUnion()->Length(); i < n; ++i) {
+      if (this->Maybe(that->AsUnion()->Get(i))) return true;
+    }
+    return false;
+  }
+
+  if (this->IsBitset() && that->IsBitset()) return true;
+
+  if (this->IsRange()) {
+    if (that->IsConstant()) {
+      return Contains(this->AsRange(), that->AsConstant());
+    }
+    if (that->IsRange()) {
+      return Overlap(this->AsRange(), that->AsRange());
+    }
+    if (that->IsBitset()) {
+      bitset number_bits = BitsetType::NumberBits(that->AsBitset());
+      if (number_bits == BitsetType::kNone) {
+        return false;
+      }
+      double min = std::max(BitsetType::Min(number_bits), this->Min());
+      double max = std::min(BitsetType::Max(number_bits), this->Max());
+      return min <= max;
+    }
+  }
+  if (that->IsRange()) {
+    return that->Maybe(this);  // This case is handled above.
+  }
+
+  if (this->IsBitset() || that->IsBitset()) return true;
+
+  return this->SimplyEquals(that);
+}
+
+// Return the range in [this], or [NULL].
+Type* Type::GetRange() {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsRange()) return this;
+  if (this->IsUnion() && this->AsUnion()->Get(1)->IsRange()) {
+    return this->AsUnion()->Get(1);
+  }
+  return NULL;
+}
+
+bool UnionType::Wellformed() {
+  DisallowHeapAllocation no_allocation;
+  // This checks the invariants of the union representation:
+  // 1. There are at least two elements.
+  // 2. The first element is a bitset, no other element is a bitset.
+  // 3. At most one element is a range, and it must be the second one.
+  // 4. No element is itself a union.
+  // 5. No element (except the bitset) is a subtype of any other.
+  // 6. If there is a range, then the bitset type does not contain
+  //    plain number bits.
+  DCHECK(this->Length() >= 2);       // (1)
+  DCHECK(this->Get(0)->IsBitset());  // (2a)
+
+  for (int i = 0; i < this->Length(); ++i) {
+    if (i != 0) DCHECK(!this->Get(i)->IsBitset());  // (2b)
+    if (i != 1) DCHECK(!this->Get(i)->IsRange());   // (3)
+    DCHECK(!this->Get(i)->IsUnion());               // (4)
+    for (int j = 0; j < this->Length(); ++j) {
+      if (i != j && i != 0) DCHECK(!this->Get(i)->Is(this->Get(j)));  // (5)
+    }
+  }
+  DCHECK(!this->Get(1)->IsRange() ||
+         (BitsetType::NumberBits(this->Get(0)->AsBitset()) ==
+          BitsetType::kNone));  // (6)
+  return true;
+}
+
+// -----------------------------------------------------------------------------
+// Union and intersection
+
+static bool AddIsSafe(int x, int y) {
+  return x >= 0 ? y <= std::numeric_limits<int>::max() - x
+                : y >= std::numeric_limits<int>::min() - x;
+}
+
+Type* Type::Intersect(Type* type1, Type* type2, Zone* zone) {
+  // Fast case: bit sets.
+  if (type1->IsBitset() && type2->IsBitset()) {
+    return BitsetType::New(type1->AsBitset() & type2->AsBitset());
+  }
+
+  // Fast case: top or bottom types.
+  if (type1->IsNone() || type2->IsAny()) return type1;  // Shortcut.
+  if (type2->IsNone() || type1->IsAny()) return type2;  // Shortcut.
+
+  // Semi-fast case.
+  if (type1->Is(type2)) return type1;
+  if (type2->Is(type1)) return type2;
+
+  // Slow case: create union.
+
+  // Semantic subtyping check - this is needed for consistency with the
+  // semi-fast case above.
+  if (type1->Is(type2)) {
+    type2 = Any();
+  } else if (type2->Is(type1)) {
+    type1 = Any();
+  }
+
+  bitset bits = type1->BitsetGlb() & type2->BitsetGlb();
+  int size1 = type1->IsUnion() ? type1->AsUnion()->Length() : 1;
+  int size2 = type2->IsUnion() ? type2->AsUnion()->Length() : 1;
+  if (!AddIsSafe(size1, size2)) return Any();
+  int size = size1 + size2;
+  if (!AddIsSafe(size, 2)) return Any();
+  size += 2;
+  Type* result_type = UnionType::New(size, zone);
+  UnionType* result = result_type->AsUnion();
+  size = 0;
+
+  // Deal with bitsets.
+  result->Set(size++, BitsetType::New(bits));
+
+  RangeType::Limits lims = RangeType::Limits::Empty();
+  size = IntersectAux(type1, type2, result, size, &lims, zone);
+
+  // If the range is not empty, then insert it into the union and
+  // remove the number bits from the bitset.
+  if (!lims.IsEmpty()) {
+    size = UpdateRange(RangeType::New(lims, zone), result, size, zone);
+
+    // Remove the number bits.
+    bitset number_bits = BitsetType::NumberBits(bits);
+    bits &= ~number_bits;
+    result->Set(0, BitsetType::New(bits));
+  }
+  return NormalizeUnion(result_type, size, zone);
+}
+
+int Type::UpdateRange(Type* range, UnionType* result, int size, Zone* zone) {
+  if (size == 1) {
+    result->Set(size++, range);
+  } else {
+    // Make space for the range.
+    result->Set(size++, result->Get(1));
+    result->Set(1, range);
+  }
+
+  // Remove any components that just got subsumed.
+  for (int i = 2; i < size;) {
+    if (result->Get(i)->Is(range)) {
+      result->Set(i, result->Get(--size));
+    } else {
+      ++i;
+    }
+  }
+  return size;
+}
+
+RangeType::Limits Type::ToLimits(bitset bits, Zone* zone) {
+  bitset number_bits = BitsetType::NumberBits(bits);
+
+  if (number_bits == BitsetType::kNone) {
+    return RangeType::Limits::Empty();
+  }
+
+  return RangeType::Limits(BitsetType::Min(number_bits),
+                           BitsetType::Max(number_bits));
+}
+
+RangeType::Limits Type::IntersectRangeAndBitset(Type* range, Type* bitset,
+                                                Zone* zone) {
+  RangeType::Limits range_lims(range->AsRange());
+  RangeType::Limits bitset_lims = ToLimits(bitset->AsBitset(), zone);
+  return RangeType::Limits::Intersect(range_lims, bitset_lims);
+}
+
+int Type::IntersectAux(Type* lhs, Type* rhs, UnionType* result, int size,
+                       RangeType::Limits* lims, Zone* zone) {
+  if (lhs->IsUnion()) {
+    for (int i = 0, n = lhs->AsUnion()->Length(); i < n; ++i) {
+      size =
+          IntersectAux(lhs->AsUnion()->Get(i), rhs, result, size, lims, zone);
+    }
+    return size;
+  }
+  if (rhs->IsUnion()) {
+    for (int i = 0, n = rhs->AsUnion()->Length(); i < n; ++i) {
+      size =
+          IntersectAux(lhs, rhs->AsUnion()->Get(i), result, size, lims, zone);
+    }
+    return size;
+  }
+
+  if (!BitsetType::IsInhabited(lhs->BitsetLub() & rhs->BitsetLub())) {
+    return size;
+  }
+
+  if (lhs->IsRange()) {
+    if (rhs->IsBitset()) {
+      RangeType::Limits lim = IntersectRangeAndBitset(lhs, rhs, zone);
+
+      if (!lim.IsEmpty()) {
+        *lims = RangeType::Limits::Union(lim, *lims);
+      }
+      return size;
+    }
+    if (rhs->IsConstant() && Contains(lhs->AsRange(), rhs->AsConstant())) {
+      return AddToUnion(rhs, result, size, zone);
+    }
+    if (rhs->IsRange()) {
+      RangeType::Limits lim = RangeType::Limits::Intersect(
+          RangeType::Limits(lhs->AsRange()), RangeType::Limits(rhs->AsRange()));
+      if (!lim.IsEmpty()) {
+        *lims = RangeType::Limits::Union(lim, *lims);
+      }
+    }
+    return size;
+  }
+  if (rhs->IsRange()) {
+    // This case is handled symmetrically above.
+    return IntersectAux(rhs, lhs, result, size, lims, zone);
+  }
+  if (lhs->IsBitset() || rhs->IsBitset()) {
+    return AddToUnion(lhs->IsBitset() ? rhs : lhs, result, size, zone);
+  }
+  if (lhs->SimplyEquals(rhs)) {
+    return AddToUnion(lhs, result, size, zone);
+  }
+  return size;
+}
+
+// Make sure that we produce a well-formed range and bitset:
+// If the range is non-empty, the number bits in the bitset should be
+// clear. Moreover, if we have a canonical range (such as Signed32),
+// we want to produce a bitset rather than a range.
+Type* Type::NormalizeRangeAndBitset(Type* range, bitset* bits, Zone* zone) {
+  // Fast path: If the bitset does not mention numbers, we can just keep the
+  // range.
+  bitset number_bits = BitsetType::NumberBits(*bits);
+  if (number_bits == 0) {
+    return range;
+  }
+
+  // If the range is semantically contained within the bitset, return None and
+  // leave the bitset untouched.
+  bitset range_lub = range->BitsetLub();
+  if (BitsetType::Is(range_lub, *bits)) {
+    return None();
+  }
+
+  // Slow path: reconcile the bitset range and the range.
+  double bitset_min = BitsetType::Min(number_bits);
+  double bitset_max = BitsetType::Max(number_bits);
+
+  double range_min = range->Min();
+  double range_max = range->Max();
+
+  // Remove the number bits from the bitset, they would just confuse us now.
+  // NOTE: bits contains OtherNumber iff bits contains PlainNumber, in which
+  // case we already returned after the subtype check above.
+  *bits &= ~number_bits;
+
+  if (range_min <= bitset_min && range_max >= bitset_max) {
+    // Bitset is contained within the range, just return the range.
+    return range;
+  }
+
+  if (bitset_min < range_min) {
+    range_min = bitset_min;
+  }
+  if (bitset_max > range_max) {
+    range_max = bitset_max;
+  }
+  return RangeType::New(range_min, range_max, zone);
+}
+
+Type* Type::Union(Type* type1, Type* type2, Zone* zone) {
+  // Fast case: bit sets.
+  if (type1->IsBitset() && type2->IsBitset()) {
+    return BitsetType::New(type1->AsBitset() | type2->AsBitset());
+  }
+
+  // Fast case: top or bottom types.
+  if (type1->IsAny() || type2->IsNone()) return type1;
+  if (type2->IsAny() || type1->IsNone()) return type2;
+
+  // Semi-fast case.
+  if (type1->Is(type2)) return type2;
+  if (type2->Is(type1)) return type1;
+
+  // Slow case: create union.
+  int size1 = type1->IsUnion() ? type1->AsUnion()->Length() : 1;
+  int size2 = type2->IsUnion() ? type2->AsUnion()->Length() : 1;
+  if (!AddIsSafe(size1, size2)) return Any();
+  int size = size1 + size2;
+  if (!AddIsSafe(size, 2)) return Any();
+  size += 2;
+  Type* result_type = UnionType::New(size, zone);
+  UnionType* result = result_type->AsUnion();
+  size = 0;
+
+  // Compute the new bitset.
+  bitset new_bitset = type1->BitsetGlb() | type2->BitsetGlb();
+
+  // Deal with ranges.
+  Type* range = None();
+  Type* range1 = type1->GetRange();
+  Type* range2 = type2->GetRange();
+  if (range1 != NULL && range2 != NULL) {
+    RangeType::Limits lims =
+        RangeType::Limits::Union(RangeType::Limits(range1->AsRange()),
+                                 RangeType::Limits(range2->AsRange()));
+    Type* union_range = RangeType::New(lims, zone);
+    range = NormalizeRangeAndBitset(union_range, &new_bitset, zone);
+  } else if (range1 != NULL) {
+    range = NormalizeRangeAndBitset(range1, &new_bitset, zone);
+  } else if (range2 != NULL) {
+    range = NormalizeRangeAndBitset(range2, &new_bitset, zone);
+  }
+  Type* bits = BitsetType::New(new_bitset);
+  result->Set(size++, bits);
+  if (!range->IsNone()) result->Set(size++, range);
+
+  size = AddToUnion(type1, result, size, zone);
+  size = AddToUnion(type2, result, size, zone);
+  return NormalizeUnion(result_type, size, zone);
+}
+
+// Add [type] to [result] unless [type] is bitset, range, or already subsumed.
+// Return new size of [result].
+int Type::AddToUnion(Type* type, UnionType* result, int size, Zone* zone) {
+  if (type->IsBitset() || type->IsRange()) return size;
+  if (type->IsUnion()) {
+    for (int i = 0, n = type->AsUnion()->Length(); i < n; ++i) {
+      size = AddToUnion(type->AsUnion()->Get(i), result, size, zone);
+    }
+    return size;
+  }
+  for (int i = 0; i < size; ++i) {
+    if (type->Is(result->Get(i))) return size;
+  }
+  result->Set(size++, type);
+  return size;
+}
+
+Type* Type::NormalizeUnion(Type* union_type, int size, Zone* zone) {
+  UnionType* unioned = union_type->AsUnion();
+  DCHECK(size >= 1);
+  DCHECK(unioned->Get(0)->IsBitset());
+  // If the union has just one element, return it.
+  if (size == 1) {
+    return unioned->Get(0);
+  }
+  bitset bits = unioned->Get(0)->AsBitset();
+  // If the union only consists of a range, we can get rid of the union.
+  if (size == 2 && bits == BitsetType::kNone) {
+    if (unioned->Get(1)->IsRange()) {
+      return RangeType::New(unioned->Get(1)->AsRange()->Min(),
+                            unioned->Get(1)->AsRange()->Max(), zone);
+    }
+  }
+  unioned->Shrink(size);
+  SLOW_DCHECK(unioned->Wellformed());
+  return union_type;
+}
+
+// -----------------------------------------------------------------------------
+// Iteration.
+
+int Type::NumConstants() {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsConstant()) {
+    return 1;
+  } else if (this->IsUnion()) {
+    int result = 0;
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      if (this->AsUnion()->Get(i)->IsConstant()) ++result;
+    }
+    return result;
+  } else {
+    return 0;
+  }
+}
+
+// -----------------------------------------------------------------------------
+// Printing.
+
+const char* BitsetType::Name(bitset bits) {
+  switch (bits) {
+#define RETURN_NAMED_TYPE(type, value) \
+  case k##type:                        \
+    return #type;
+    PROPER_BITSET_TYPE_LIST(RETURN_NAMED_TYPE)
+    INTERNAL_BITSET_TYPE_LIST(RETURN_NAMED_TYPE)
+#undef RETURN_NAMED_TYPE
+
+    default:
+      return NULL;
+  }
+}
+
+void BitsetType::Print(std::ostream& os,  // NOLINT
+                       bitset bits) {
+  DisallowHeapAllocation no_allocation;
+  const char* name = Name(bits);
+  if (name != NULL) {
+    os << name;
+    return;
+  }
+
+  // clang-format off
+  static const bitset named_bitsets[] = {
+#define BITSET_CONSTANT(type, value) k##type,
+    INTERNAL_BITSET_TYPE_LIST(BITSET_CONSTANT)
+    PROPER_BITSET_TYPE_LIST(BITSET_CONSTANT)
+#undef BITSET_CONSTANT
+  };
+  // clang-format on
+
+  bool is_first = true;
+  os << "(";
+  for (int i(arraysize(named_bitsets) - 1); bits != 0 && i >= 0; --i) {
+    bitset subset = named_bitsets[i];
+    if ((bits & subset) == subset) {
+      if (!is_first) os << " | ";
+      is_first = false;
+      os << Name(subset);
+      bits -= subset;
+    }
+  }
+  DCHECK(bits == 0);
+  os << ")";
+}
+
+void Type::PrintTo(std::ostream& os) {
+  DisallowHeapAllocation no_allocation;
+  if (this->IsBitset()) {
+    BitsetType::Print(os, this->AsBitset());
+  } else if (this->IsConstant()) {
+    os << "Constant(" << Brief(*this->AsConstant()->Value()) << ")";
+  } else if (this->IsRange()) {
+    std::ostream::fmtflags saved_flags = os.setf(std::ios::fixed);
+    std::streamsize saved_precision = os.precision(0);
+    os << "Range(" << this->AsRange()->Min() << ", " << this->AsRange()->Max()
+       << ")";
+    os.flags(saved_flags);
+    os.precision(saved_precision);
+  } else if (this->IsUnion()) {
+    os << "(";
+    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
+      Type* type_i = this->AsUnion()->Get(i);
+      if (i > 0) os << " | ";
+      type_i->PrintTo(os);
+    }
+    os << ")";
+  } else if (this->IsTuple()) {
+    os << "<";
+    for (int i = 0, n = this->AsTuple()->Arity(); i < n; ++i) {
+      Type* type_i = this->AsTuple()->Element(i);
+      if (i > 0) os << ", ";
+      type_i->PrintTo(os);
+    }
+    os << ">";
+  } else {
+    UNREACHABLE();
+  }
+}
+
+#ifdef DEBUG
+void Type::Print() {
+  OFStream os(stdout);
+  PrintTo(os);
+  os << std::endl;
+}
+void BitsetType::Print(bitset bits) {
+  OFStream os(stdout);
+  Print(os, bits);
+  os << std::endl;
+}
+#endif
+
+BitsetType::bitset BitsetType::SignedSmall() {
+  return i::SmiValuesAre31Bits() ? kSigned31 : kSigned32;
+}
+
+BitsetType::bitset BitsetType::UnsignedSmall() {
+  return i::SmiValuesAre31Bits() ? kUnsigned30 : kUnsigned31;
+}
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
diff --git a/src/compiler/types.h b/src/compiler/types.h
new file mode 100644
index 0000000..ef5bec3
--- /dev/null
+++ b/src/compiler/types.h
@@ -0,0 +1,607 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILER_TYPES_H_
+#define V8_COMPILER_TYPES_H_
+
+#include "src/conversions.h"
+#include "src/handles.h"
+#include "src/objects.h"
+#include "src/ostreams.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+// SUMMARY
+//
+// A simple type system for compiler-internal use. It is based entirely on
+// union types, and all subtyping hence amounts to set inclusion. Besides the
+// obvious primitive types and some predefined unions, the type language also
+// can express class types (a.k.a. specific maps) and singleton types (i.e.,
+// concrete constants).
+//
+// The following equations and inequations hold:
+//
+//   None <= T
+//   T <= Any
+//
+//   Number = Signed32 \/ Unsigned32 \/ Double
+//   Smi <= Signed32
+//   Name = String \/ Symbol
+//   UniqueName = InternalizedString \/ Symbol
+//   InternalizedString < String
+//
+//   Receiver = Object \/ Proxy
+//   OtherUndetectable < Object
+//   DetectableReceiver = Receiver - OtherUndetectable
+//
+//   Constant(x) < T  iff instance_type(map(x)) < T
+//
+//
+// RANGE TYPES
+//
+// A range type represents a continuous integer interval by its minimum and
+// maximum value.  Either value may be an infinity, in which case that infinity
+// itself is also included in the range.   A range never contains NaN or -0.
+//
+// If a value v happens to be an integer n, then Constant(v) is considered a
+// subtype of Range(n, n) (and therefore also a subtype of any larger range).
+// In order to avoid large unions, however, it is usually a good idea to use
+// Range rather than Constant.
+//
+//
+// PREDICATES
+//
+// There are two main functions for testing types:
+//
+//   T1->Is(T2)     -- tests whether T1 is included in T2 (i.e., T1 <= T2)
+//   T1->Maybe(T2)  -- tests whether T1 and T2 overlap (i.e., T1 /\ T2 =/= 0)
+//
+// Typically, the former is to be used to select representations (e.g., via
+// T->Is(SignedSmall())), and the latter to check whether a specific case needs
+// handling (e.g., via T->Maybe(Number())).
+//
+// There is no functionality to discover whether a type is a leaf in the
+// lattice. That is intentional. It should always be possible to refine the
+// lattice (e.g., splitting up number types further) without invalidating any
+// existing assumptions or tests.
+// Consequently, do not normally use Equals for type tests, always use Is!
+//
+// The NowIs operator implements state-sensitive subtying, as described above.
+// Any compilation decision based on such temporary properties requires runtime
+// guarding!
+//
+//
+// PROPERTIES
+//
+// Various formal properties hold for constructors, operators, and predicates
+// over types. For example, constructors are injective and subtyping is a
+// complete partial order.
+//
+// See test/cctest/test-types.cc for a comprehensive executable specification,
+// especially with respect to the properties of the more exotic 'temporal'
+// constructors and predicates (those prefixed 'Now').
+//
+//
+// IMPLEMENTATION
+//
+// Internally, all 'primitive' types, and their unions, are represented as
+// bitsets. Bit 0 is reserved for tagging. Only structured types require
+// allocation.
+
+// -----------------------------------------------------------------------------
+// Values for bitset types
+
+// clang-format off
+
+#define INTERNAL_BITSET_TYPE_LIST(V)                                      \
+  V(OtherUnsigned31, 1u << 1)  \
+  V(OtherUnsigned32, 1u << 2)  \
+  V(OtherSigned32,   1u << 3)  \
+  V(OtherNumber,     1u << 4)  \
+
+#define PROPER_BITSET_TYPE_LIST(V) \
+  V(None,                0u)        \
+  V(Negative31,          1u << 5)   \
+  V(Null,                1u << 6)   \
+  V(Undefined,           1u << 7)   \
+  V(Boolean,             1u << 8)   \
+  V(Unsigned30,          1u << 9)   \
+  V(MinusZero,           1u << 10)  \
+  V(NaN,                 1u << 11)  \
+  V(Symbol,              1u << 12)  \
+  V(InternalizedString,  1u << 13)  \
+  V(OtherString,         1u << 14)  \
+  V(Simd,                1u << 15)  \
+  V(OtherObject,         1u << 17)  \
+  V(OtherUndetectable,   1u << 16)  \
+  V(Proxy,               1u << 18)  \
+  V(Function,            1u << 19)  \
+  V(Hole,                1u << 20)  \
+  V(OtherInternal,       1u << 21)  \
+  \
+  V(Signed31,                   kUnsigned30 | kNegative31) \
+  V(Signed32,                   kSigned31 | kOtherUnsigned31 | kOtherSigned32) \
+  V(Signed32OrMinusZero,        kSigned32 | kMinusZero) \
+  V(Signed32OrMinusZeroOrNaN,   kSigned32 | kMinusZero | kNaN) \
+  V(Negative32,                 kNegative31 | kOtherSigned32) \
+  V(Unsigned31,                 kUnsigned30 | kOtherUnsigned31) \
+  V(Unsigned32,                 kUnsigned30 | kOtherUnsigned31 | \
+                                kOtherUnsigned32) \
+  V(Unsigned32OrMinusZero,      kUnsigned32 | kMinusZero) \
+  V(Unsigned32OrMinusZeroOrNaN, kUnsigned32 | kMinusZero | kNaN) \
+  V(Integral32,                 kSigned32 | kUnsigned32) \
+  V(PlainNumber,                kIntegral32 | kOtherNumber) \
+  V(OrderedNumber,              kPlainNumber | kMinusZero) \
+  V(MinusZeroOrNaN,             kMinusZero | kNaN) \
+  V(Number,                     kOrderedNumber | kNaN) \
+  V(String,                     kInternalizedString | kOtherString) \
+  V(UniqueName,                 kSymbol | kInternalizedString) \
+  V(Name,                       kSymbol | kString) \
+  V(BooleanOrNumber,            kBoolean | kNumber) \
+  V(BooleanOrNullOrNumber,      kBooleanOrNumber | kNull) \
+  V(BooleanOrNullOrUndefined,   kBoolean | kNull | kUndefined) \
+  V(NullOrNumber,               kNull | kNumber) \
+  V(NullOrUndefined,            kNull | kUndefined) \
+  V(Undetectable,               kNullOrUndefined | kOtherUndetectable) \
+  V(NumberOrOddball,            kNumber | kNullOrUndefined | kBoolean | kHole) \
+  V(NumberOrSimdOrString,       kNumber | kSimd | kString) \
+  V(NumberOrString,             kNumber | kString) \
+  V(NumberOrUndefined,          kNumber | kUndefined) \
+  V(PlainPrimitive,             kNumberOrString | kBoolean | kNullOrUndefined) \
+  V(Primitive,                  kSymbol | kSimd | kPlainPrimitive) \
+  V(DetectableReceiver,         kFunction | kOtherObject | kProxy) \
+  V(Object,                     kFunction | kOtherObject | kOtherUndetectable) \
+  V(Receiver,                   kObject | kProxy) \
+  V(StringOrReceiver,           kString | kReceiver) \
+  V(Unique,                     kBoolean | kUniqueName | kNull | kUndefined | \
+                                kReceiver) \
+  V(Internal,                   kHole | kOtherInternal) \
+  V(NonInternal,                kPrimitive | kReceiver) \
+  V(NonNumber,                  kUnique | kString | kInternal) \
+  V(Any,                        0xfffffffeu)
+
+// clang-format on
+
+/*
+ * The following diagrams show how integers (in the mathematical sense) are
+ * divided among the different atomic numerical types.
+ *
+ *   ON    OS32     N31     U30     OU31    OU32     ON
+ * ______[_______[_______[_______[_______[_______[_______
+ *     -2^31   -2^30     0      2^30    2^31    2^32
+ *
+ * E.g., OtherUnsigned32 (OU32) covers all integers from 2^31 to 2^32-1.
+ *
+ * Some of the atomic numerical bitsets are internal only (see
+ * INTERNAL_BITSET_TYPE_LIST).  To a types user, they should only occur in
+ * union with certain other bitsets.  For instance, OtherNumber should only
+ * occur as part of PlainNumber.
+ */
+
+#define BITSET_TYPE_LIST(V)    \
+  INTERNAL_BITSET_TYPE_LIST(V) \
+  PROPER_BITSET_TYPE_LIST(V)
+
+class Type;
+
+// -----------------------------------------------------------------------------
+// Bitset types (internal).
+
+class BitsetType {
+ public:
+  typedef uint32_t bitset;  // Internal
+
+  enum : uint32_t {
+#define DECLARE_TYPE(type, value) k##type = (value),
+    BITSET_TYPE_LIST(DECLARE_TYPE)
+#undef DECLARE_TYPE
+        kUnusedEOL = 0
+  };
+
+  static bitset SignedSmall();
+  static bitset UnsignedSmall();
+
+  bitset Bitset() {
+    return static_cast<bitset>(reinterpret_cast<uintptr_t>(this) ^ 1u);
+  }
+
+  static bool IsInhabited(bitset bits) { return bits != kNone; }
+
+  static bool Is(bitset bits1, bitset bits2) {
+    return (bits1 | bits2) == bits2;
+  }
+
+  static double Min(bitset);
+  static double Max(bitset);
+
+  static bitset Glb(Type* type);  // greatest lower bound that's a bitset
+  static bitset Glb(double min, double max);
+  static bitset Lub(Type* type);  // least upper bound that's a bitset
+  static bitset Lub(i::Map* map);
+  static bitset Lub(i::Object* value);
+  static bitset Lub(double value);
+  static bitset Lub(double min, double max);
+  static bitset ExpandInternals(bitset bits);
+
+  static const char* Name(bitset);
+  static void Print(std::ostream& os, bitset);  // NOLINT
+#ifdef DEBUG
+  static void Print(bitset);
+#endif
+
+  static bitset NumberBits(bitset bits);
+
+  static bool IsBitset(Type* type) {
+    return reinterpret_cast<uintptr_t>(type) & 1;
+  }
+
+  static Type* NewForTesting(bitset bits) { return New(bits); }
+
+ private:
+  friend class Type;
+
+  static Type* New(bitset bits) {
+    return reinterpret_cast<Type*>(static_cast<uintptr_t>(bits | 1u));
+  }
+
+  struct Boundary {
+    bitset internal;
+    bitset external;
+    double min;
+  };
+  static const Boundary BoundariesArray[];
+  static inline const Boundary* Boundaries();
+  static inline size_t BoundariesSize();
+};
+
+// -----------------------------------------------------------------------------
+// Superclass for non-bitset types (internal).
+class TypeBase {
+ protected:
+  friend class Type;
+
+  enum Kind { kConstant, kTuple, kUnion, kRange };
+
+  Kind kind() const { return kind_; }
+  explicit TypeBase(Kind kind) : kind_(kind) {}
+
+  static bool IsKind(Type* type, Kind kind) {
+    if (BitsetType::IsBitset(type)) return false;
+    TypeBase* base = reinterpret_cast<TypeBase*>(type);
+    return base->kind() == kind;
+  }
+
+  // The hacky conversion to/from Type*.
+  static Type* AsType(TypeBase* type) { return reinterpret_cast<Type*>(type); }
+  static TypeBase* FromType(Type* type) {
+    return reinterpret_cast<TypeBase*>(type);
+  }
+
+ private:
+  Kind kind_;
+};
+
+// -----------------------------------------------------------------------------
+// Constant types.
+
+class ConstantType : public TypeBase {
+ public:
+  i::Handle<i::Object> Value() { return object_; }
+
+ private:
+  friend class Type;
+  friend class BitsetType;
+
+  static Type* New(i::Handle<i::Object> value, Zone* zone) {
+    BitsetType::bitset bitset = BitsetType::Lub(*value);
+    return AsType(new (zone->New(sizeof(ConstantType)))
+                      ConstantType(bitset, value));
+  }
+
+  static ConstantType* cast(Type* type) {
+    DCHECK(IsKind(type, kConstant));
+    return static_cast<ConstantType*>(FromType(type));
+  }
+
+  ConstantType(BitsetType::bitset bitset, i::Handle<i::Object> object)
+      : TypeBase(kConstant), bitset_(bitset), object_(object) {}
+
+  BitsetType::bitset Lub() { return bitset_; }
+
+  BitsetType::bitset bitset_;
+  Handle<i::Object> object_;
+};
+// TODO(neis): Also cache value if numerical.
+
+// -----------------------------------------------------------------------------
+// Range types.
+
+class RangeType : public TypeBase {
+ public:
+  struct Limits {
+    double min;
+    double max;
+    Limits(double min, double max) : min(min), max(max) {}
+    explicit Limits(RangeType* range) : min(range->Min()), max(range->Max()) {}
+    bool IsEmpty();
+    static Limits Empty() { return Limits(1, 0); }
+    static Limits Intersect(Limits lhs, Limits rhs);
+    static Limits Union(Limits lhs, Limits rhs);
+  };
+
+  double Min() { return limits_.min; }
+  double Max() { return limits_.max; }
+
+ private:
+  friend class Type;
+  friend class BitsetType;
+  friend class UnionType;
+
+  static Type* New(double min, double max, Zone* zone) {
+    return New(Limits(min, max), zone);
+  }
+
+  static bool IsInteger(double x) {
+    return nearbyint(x) == x && !i::IsMinusZero(x);  // Allows for infinities.
+  }
+
+  static Type* New(Limits lim, Zone* zone) {
+    DCHECK(IsInteger(lim.min) && IsInteger(lim.max));
+    DCHECK(lim.min <= lim.max);
+    BitsetType::bitset bits = BitsetType::Lub(lim.min, lim.max);
+
+    return AsType(new (zone->New(sizeof(RangeType))) RangeType(bits, lim));
+  }
+
+  static RangeType* cast(Type* type) {
+    DCHECK(IsKind(type, kRange));
+    return static_cast<RangeType*>(FromType(type));
+  }
+
+  RangeType(BitsetType::bitset bitset, Limits limits)
+      : TypeBase(kRange), bitset_(bitset), limits_(limits) {}
+
+  BitsetType::bitset Lub() { return bitset_; }
+
+  BitsetType::bitset bitset_;
+  Limits limits_;
+};
+
+// -----------------------------------------------------------------------------
+// Superclass for types with variable number of type fields.
+class StructuralType : public TypeBase {
+ public:
+  int LengthForTesting() { return Length(); }
+
+ protected:
+  friend class Type;
+
+  int Length() { return length_; }
+
+  Type* Get(int i) {
+    DCHECK(0 <= i && i < this->Length());
+    return elements_[i];
+  }
+
+  void Set(int i, Type* type) {
+    DCHECK(0 <= i && i < this->Length());
+    elements_[i] = type;
+  }
+
+  void Shrink(int length) {
+    DCHECK(2 <= length && length <= this->Length());
+    length_ = length;
+  }
+
+  StructuralType(Kind kind, int length, i::Zone* zone)
+      : TypeBase(kind), length_(length) {
+    elements_ = reinterpret_cast<Type**>(zone->New(sizeof(Type*) * length));
+  }
+
+ private:
+  int length_;
+  Type** elements_;
+};
+
+// -----------------------------------------------------------------------------
+// Tuple types.
+
+class TupleType : public StructuralType {
+ public:
+  int Arity() { return this->Length(); }
+  Type* Element(int i) { return this->Get(i); }
+
+  void InitElement(int i, Type* type) { this->Set(i, type); }
+
+ private:
+  friend class Type;
+
+  TupleType(int length, Zone* zone) : StructuralType(kTuple, length, zone) {}
+
+  static Type* New(int length, Zone* zone) {
+    return AsType(new (zone->New(sizeof(TupleType))) TupleType(length, zone));
+  }
+
+  static TupleType* cast(Type* type) {
+    DCHECK(IsKind(type, kTuple));
+    return static_cast<TupleType*>(FromType(type));
+  }
+};
+
+// -----------------------------------------------------------------------------
+// Union types (internal).
+// A union is a structured type with the following invariants:
+// - its length is at least 2
+// - at most one field is a bitset, and it must go into index 0
+// - no field is a union
+// - no field is a subtype of any other field
+class UnionType : public StructuralType {
+ private:
+  friend Type;
+  friend BitsetType;
+
+  UnionType(int length, Zone* zone) : StructuralType(kUnion, length, zone) {}
+
+  static Type* New(int length, Zone* zone) {
+    return AsType(new (zone->New(sizeof(UnionType))) UnionType(length, zone));
+  }
+
+  static UnionType* cast(Type* type) {
+    DCHECK(IsKind(type, kUnion));
+    return static_cast<UnionType*>(FromType(type));
+  }
+
+  bool Wellformed();
+};
+
+class Type {
+ public:
+  typedef BitsetType::bitset bitset;  // Internal
+
+// Constructors.
+#define DEFINE_TYPE_CONSTRUCTOR(type, value) \
+  static Type* type() { return BitsetType::New(BitsetType::k##type); }
+  PROPER_BITSET_TYPE_LIST(DEFINE_TYPE_CONSTRUCTOR)
+#undef DEFINE_TYPE_CONSTRUCTOR
+
+  static Type* SignedSmall() {
+    return BitsetType::New(BitsetType::SignedSmall());
+  }
+  static Type* UnsignedSmall() {
+    return BitsetType::New(BitsetType::UnsignedSmall());
+  }
+
+  static Type* Constant(i::Handle<i::Object> value, Zone* zone) {
+    return ConstantType::New(value, zone);
+  }
+  static Type* Range(double min, double max, Zone* zone) {
+    return RangeType::New(min, max, zone);
+  }
+  static Type* Tuple(Type* first, Type* second, Type* third, Zone* zone) {
+    Type* tuple = TupleType::New(3, zone);
+    tuple->AsTuple()->InitElement(0, first);
+    tuple->AsTuple()->InitElement(1, second);
+    tuple->AsTuple()->InitElement(2, third);
+    return tuple;
+  }
+
+  static Type* Union(Type* type1, Type* type2, Zone* zone);
+  static Type* Intersect(Type* type1, Type* type2, Zone* zone);
+
+  static Type* Of(double value, Zone* zone) {
+    return BitsetType::New(BitsetType::ExpandInternals(BitsetType::Lub(value)));
+  }
+  static Type* Of(i::Object* value, Zone* zone) {
+    return BitsetType::New(BitsetType::ExpandInternals(BitsetType::Lub(value)));
+  }
+  static Type* Of(i::Handle<i::Object> value, Zone* zone) {
+    return Of(*value, zone);
+  }
+
+  static Type* For(i::Map* map) {
+    return BitsetType::New(BitsetType::ExpandInternals(BitsetType::Lub(map)));
+  }
+  static Type* For(i::Handle<i::Map> map) { return For(*map); }
+
+  // Predicates.
+  bool IsInhabited() { return BitsetType::IsInhabited(this->BitsetLub()); }
+
+  bool Is(Type* that) { return this == that || this->SlowIs(that); }
+  bool Maybe(Type* that);
+  bool Equals(Type* that) { return this->Is(that) && that->Is(this); }
+
+  // Inspection.
+  bool IsRange() { return IsKind(TypeBase::kRange); }
+  bool IsConstant() { return IsKind(TypeBase::kConstant); }
+  bool IsTuple() { return IsKind(TypeBase::kTuple); }
+
+  ConstantType* AsConstant() { return ConstantType::cast(this); }
+  RangeType* AsRange() { return RangeType::cast(this); }
+  TupleType* AsTuple() { return TupleType::cast(this); }
+
+  // Minimum and maximum of a numeric type.
+  // These functions do not distinguish between -0 and +0.  If the type equals
+  // kNaN, they return NaN; otherwise kNaN is ignored.  Only call these
+  // functions on subtypes of Number.
+  double Min();
+  double Max();
+
+  // Extracts a range from the type: if the type is a range or a union
+  // containing a range, that range is returned; otherwise, NULL is returned.
+  Type* GetRange();
+
+  static bool IsInteger(i::Object* x);
+  static bool IsInteger(double x) {
+    return nearbyint(x) == x && !i::IsMinusZero(x);  // Allows for infinities.
+  }
+
+  int NumConstants();
+
+  // Printing.
+
+  void PrintTo(std::ostream& os);
+
+#ifdef DEBUG
+  void Print();
+#endif
+
+  // Helpers for testing.
+  bool IsBitsetForTesting() { return IsBitset(); }
+  bool IsUnionForTesting() { return IsUnion(); }
+  bitset AsBitsetForTesting() { return AsBitset(); }
+  UnionType* AsUnionForTesting() { return AsUnion(); }
+
+ private:
+  // Friends.
+  template <class>
+  friend class Iterator;
+  friend BitsetType;
+  friend UnionType;
+
+  // Internal inspection.
+  bool IsKind(TypeBase::Kind kind) { return TypeBase::IsKind(this, kind); }
+
+  bool IsNone() { return this == None(); }
+  bool IsAny() { return this == Any(); }
+  bool IsBitset() { return BitsetType::IsBitset(this); }
+  bool IsUnion() { return IsKind(TypeBase::kUnion); }
+
+  bitset AsBitset() {
+    DCHECK(this->IsBitset());
+    return reinterpret_cast<BitsetType*>(this)->Bitset();
+  }
+  UnionType* AsUnion() { return UnionType::cast(this); }
+
+  bitset BitsetGlb() { return BitsetType::Glb(this); }
+  bitset BitsetLub() { return BitsetType::Lub(this); }
+
+  bool SlowIs(Type* that);
+
+  static bool Overlap(RangeType* lhs, RangeType* rhs);
+  static bool Contains(RangeType* lhs, RangeType* rhs);
+  static bool Contains(RangeType* range, ConstantType* constant);
+  static bool Contains(RangeType* range, i::Object* val);
+
+  static int UpdateRange(Type* type, UnionType* result, int size, Zone* zone);
+
+  static RangeType::Limits IntersectRangeAndBitset(Type* range, Type* bits,
+                                                   Zone* zone);
+  static RangeType::Limits ToLimits(bitset bits, Zone* zone);
+
+  bool SimplyEquals(Type* that);
+
+  static int AddToUnion(Type* type, UnionType* result, int size, Zone* zone);
+  static int IntersectAux(Type* type, Type* other, UnionType* result, int size,
+                          RangeType::Limits* limits, Zone* zone);
+  static Type* NormalizeUnion(Type* unioned, int size, Zone* zone);
+  static Type* NormalizeRangeAndBitset(Type* range, bitset* bits, Zone* zone);
+};
+
+}  // namespace compiler
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_COMPILER_TYPES_H_
diff --git a/src/compiler/verifier.cc b/src/compiler/verifier.cc
index eb42b39..b9faeee 100644
--- a/src/compiler/verifier.cc
+++ b/src/compiler/verifier.cc
@@ -48,7 +48,7 @@
       FATAL(str.str().c_str());
     }
   }
-  void CheckUpperIs(Node* node, Type* type) {
+  void CheckTypeIs(Node* node, Type* type) {
     if (typing == TYPED && !NodeProperties::GetType(node)->Is(type)) {
       std::ostringstream str;
       str << "TypeError: node #" << node->id() << ":" << *node->op()
@@ -59,7 +59,7 @@
       FATAL(str.str().c_str());
     }
   }
-  void CheckUpperMaybe(Node* node, Type* type) {
+  void CheckTypeMaybe(Node* node, Type* type) {
     if (typing == TYPED && !NodeProperties::GetType(node)->Maybe(type)) {
       std::ostringstream str;
       str << "TypeError: node #" << node->id() << ":" << *node->op()
@@ -181,7 +181,7 @@
       CHECK_EQ(0, input_count);
       // Type is a tuple.
       // TODO(rossberg): Multiple outputs are currently typed as Internal.
-      CheckUpperIs(node, Type::Internal());
+      CheckTypeIs(node, Type::Internal());
       break;
     case IrOpcode::kEnd:
       // End has no outputs.
@@ -230,7 +230,7 @@
       Node* input = NodeProperties::GetControlInput(node, 0);
       CHECK(!input->op()->HasProperty(Operator::kNoThrow));
       // Type can be anything.
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
     }
     case IrOpcode::kSwitch: {
@@ -330,21 +330,21 @@
       CHECK_LE(-1, index);
       CHECK_LT(index + 1, start->op()->ValueOutputCount());
       // Type can be anything.
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
     }
     case IrOpcode::kInt32Constant:  // TODO(rossberg): rename Word32Constant?
       // Constants have no inputs.
       CHECK_EQ(0, input_count);
       // Type is a 32 bit integer, signed or unsigned.
-      CheckUpperIs(node, Type::Integral32());
+      CheckTypeIs(node, Type::Integral32());
       break;
     case IrOpcode::kInt64Constant:
       // Constants have no inputs.
       CHECK_EQ(0, input_count);
       // Type is internal.
       // TODO(rossberg): Introduce proper Int64 type.
-      CheckUpperIs(node, Type::Internal());
+      CheckTypeIs(node, Type::Internal());
       break;
     case IrOpcode::kFloat32Constant:
     case IrOpcode::kFloat64Constant:
@@ -352,7 +352,7 @@
       // Constants have no inputs.
       CHECK_EQ(0, input_count);
       // Type is a number.
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kRelocatableInt32Constant:
     case IrOpcode::kRelocatableInt64Constant:
@@ -361,21 +361,19 @@
     case IrOpcode::kHeapConstant:
       // Constants have no inputs.
       CHECK_EQ(0, input_count);
-      // Type can be anything represented as a heap pointer.
-      CheckUpperIs(node, Type::TaggedPointer());
       break;
     case IrOpcode::kExternalConstant:
       // Constants have no inputs.
       CHECK_EQ(0, input_count);
       // Type is considered internal.
-      CheckUpperIs(node, Type::Internal());
+      CheckTypeIs(node, Type::Internal());
       break;
     case IrOpcode::kOsrValue:
       // OSR values have a value and a control input.
       CHECK_EQ(1, control_count);
       CHECK_EQ(1, input_count);
       // Type is merged from other values in the graph and could be any.
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
     case IrOpcode::kProjection: {
       // Projection has an input that produces enough values.
@@ -385,7 +383,7 @@
       // Type can be anything.
       // TODO(rossberg): Introduce tuple types for this.
       // TODO(titzer): Convince rossberg not to.
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
     }
     case IrOpcode::kSelect: {
@@ -495,7 +493,7 @@
     case IrOpcode::kJSLessThanOrEqual:
     case IrOpcode::kJSGreaterThanOrEqual:
       // Type is Boolean.
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
 
     case IrOpcode::kJSBitwiseOr:
@@ -505,80 +503,80 @@
     case IrOpcode::kJSShiftRight:
     case IrOpcode::kJSShiftRightLogical:
       // Type is 32 bit integral.
-      CheckUpperIs(node, Type::Integral32());
+      CheckTypeIs(node, Type::Integral32());
       break;
     case IrOpcode::kJSAdd:
       // Type is Number or String.
-      CheckUpperIs(node, Type::NumberOrString());
+      CheckTypeIs(node, Type::NumberOrString());
       break;
     case IrOpcode::kJSSubtract:
     case IrOpcode::kJSMultiply:
     case IrOpcode::kJSDivide:
     case IrOpcode::kJSModulus:
       // Type is Number.
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
 
     case IrOpcode::kJSToBoolean:
       // Type is Boolean.
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kJSToInteger:
       // Type is OrderedNumber.
-      CheckUpperIs(node, Type::OrderedNumber());
+      CheckTypeIs(node, Type::OrderedNumber());
       break;
     case IrOpcode::kJSToLength:
       // Type is OrderedNumber.
-      CheckUpperIs(node, Type::OrderedNumber());
+      CheckTypeIs(node, Type::OrderedNumber());
       break;
     case IrOpcode::kJSToName:
       // Type is Name.
-      CheckUpperIs(node, Type::Name());
+      CheckTypeIs(node, Type::Name());
       break;
     case IrOpcode::kJSToNumber:
       // Type is Number.
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kJSToString:
       // Type is String.
-      CheckUpperIs(node, Type::String());
+      CheckTypeIs(node, Type::String());
       break;
     case IrOpcode::kJSToObject:
       // Type is Receiver.
-      CheckUpperIs(node, Type::Receiver());
+      CheckTypeIs(node, Type::Receiver());
       break;
 
     case IrOpcode::kJSCreate:
       // Type is Object.
-      CheckUpperIs(node, Type::Object());
+      CheckTypeIs(node, Type::Object());
       break;
     case IrOpcode::kJSCreateArguments:
       // Type is OtherObject.
-      CheckUpperIs(node, Type::OtherObject());
+      CheckTypeIs(node, Type::OtherObject());
       break;
     case IrOpcode::kJSCreateArray:
       // Type is OtherObject.
-      CheckUpperIs(node, Type::OtherObject());
+      CheckTypeIs(node, Type::OtherObject());
       break;
     case IrOpcode::kJSCreateClosure:
       // Type is Function.
-      CheckUpperIs(node, Type::Function());
+      CheckTypeIs(node, Type::Function());
       break;
     case IrOpcode::kJSCreateIterResultObject:
       // Type is OtherObject.
-      CheckUpperIs(node, Type::OtherObject());
+      CheckTypeIs(node, Type::OtherObject());
       break;
     case IrOpcode::kJSCreateLiteralArray:
     case IrOpcode::kJSCreateLiteralObject:
     case IrOpcode::kJSCreateLiteralRegExp:
       // Type is OtherObject.
-      CheckUpperIs(node, Type::OtherObject());
+      CheckTypeIs(node, Type::OtherObject());
       break;
     case IrOpcode::kJSLoadProperty:
     case IrOpcode::kJSLoadNamed:
     case IrOpcode::kJSLoadGlobal:
       // Type can be anything.
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
     case IrOpcode::kJSStoreProperty:
     case IrOpcode::kJSStoreNamed:
@@ -589,17 +587,18 @@
     case IrOpcode::kJSDeleteProperty:
     case IrOpcode::kJSHasProperty:
     case IrOpcode::kJSInstanceOf:
+    case IrOpcode::kJSOrdinaryHasInstance:
       // Type is Boolean.
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kJSTypeOf:
       // Type is String.
-      CheckUpperIs(node, Type::String());
+      CheckTypeIs(node, Type::String());
       break;
 
     case IrOpcode::kJSLoadContext:
       // Type can be anything.
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
     case IrOpcode::kJSStoreContext:
       // Type is empty.
@@ -612,44 +611,31 @@
     case IrOpcode::kJSCreateScriptContext: {
       // Type is Context, and operand is Internal.
       Node* context = NodeProperties::GetContextInput(node);
-      // TODO(rossberg): This should really be Is(Internal), but the typer
-      // currently can't do backwards propagation.
-      CheckUpperMaybe(context, Type::Internal());
-      if (typing == TYPED) CHECK(NodeProperties::GetType(node)->IsContext());
+      // TODO(bmeurer): This should say CheckTypeIs, but we don't have type
+      // OtherInternal on certain contexts, i.e. those from OsrValue inputs.
+      CheckTypeMaybe(context, Type::OtherInternal());
+      CheckTypeIs(node, Type::OtherInternal());
       break;
     }
 
     case IrOpcode::kJSCallConstruct:
     case IrOpcode::kJSConvertReceiver:
       // Type is Receiver.
-      CheckUpperIs(node, Type::Receiver());
+      CheckTypeIs(node, Type::Receiver());
       break;
     case IrOpcode::kJSCallFunction:
     case IrOpcode::kJSCallRuntime:
       // Type can be anything.
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
 
     case IrOpcode::kJSForInPrepare: {
       // TODO(bmeurer): What are the constraints on thse?
-      CheckUpperIs(node, Type::Any());
-      break;
-    }
-    case IrOpcode::kJSForInDone: {
-      // TODO(bmeurer): OSR breaks this invariant, although the node is not user
-      // visible, so we know it is safe (fullcodegen has an unsigned smi there).
-      // CheckValueInputIs(node, 0, Type::UnsignedSmall());
+      CheckTypeIs(node, Type::Any());
       break;
     }
     case IrOpcode::kJSForInNext: {
-      CheckUpperIs(node, Type::Union(Type::Name(), Type::Undefined(), zone));
-      break;
-    }
-    case IrOpcode::kJSForInStep: {
-      // TODO(bmeurer): OSR breaks this invariant, although the node is not user
-      // visible, so we know it is safe (fullcodegen has an unsigned smi there).
-      // CheckValueInputIs(node, 0, Type::UnsignedSmall());
-      CheckUpperIs(node, Type::UnsignedSmall());
+      CheckTypeIs(node, Type::Union(Type::Name(), Type::Undefined(), zone));
       break;
     }
 
@@ -662,11 +648,11 @@
       break;
 
     case IrOpcode::kJSGeneratorRestoreContinuation:
-      CheckUpperIs(node, Type::SignedSmall());
+      CheckTypeIs(node, Type::SignedSmall());
       break;
 
     case IrOpcode::kJSGeneratorRestoreRegister:
-      CheckUpperIs(node, Type::Any());
+      CheckTypeIs(node, Type::Any());
       break;
 
     case IrOpcode::kJSStackCheck:
@@ -686,32 +672,32 @@
     case IrOpcode::kBooleanNot:
       // Boolean -> Boolean
       CheckValueInputIs(node, 0, Type::Boolean());
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kNumberEqual:
       // (Number, Number) -> Boolean
       CheckValueInputIs(node, 0, Type::Number());
       CheckValueInputIs(node, 1, Type::Number());
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kNumberLessThan:
     case IrOpcode::kNumberLessThanOrEqual:
       // (Number, Number) -> Boolean
       CheckValueInputIs(node, 0, Type::Number());
       CheckValueInputIs(node, 1, Type::Number());
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kSpeculativeNumberAdd:
     case IrOpcode::kSpeculativeNumberSubtract:
     case IrOpcode::kSpeculativeNumberMultiply:
     case IrOpcode::kSpeculativeNumberDivide:
     case IrOpcode::kSpeculativeNumberModulus:
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kSpeculativeNumberEqual:
     case IrOpcode::kSpeculativeNumberLessThan:
     case IrOpcode::kSpeculativeNumberLessThanOrEqual:
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kNumberAdd:
     case IrOpcode::kNumberSubtract:
@@ -720,13 +706,13 @@
       // (Number, Number) -> Number
       CheckValueInputIs(node, 0, Type::Number());
       CheckValueInputIs(node, 1, Type::Number());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kNumberModulus:
       // (Number, Number) -> Number
       CheckValueInputIs(node, 0, Type::Number());
       CheckValueInputIs(node, 1, Type::Number());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kNumberBitwiseOr:
     case IrOpcode::kNumberBitwiseXor:
@@ -734,43 +720,43 @@
       // (Signed32, Signed32) -> Signed32
       CheckValueInputIs(node, 0, Type::Signed32());
       CheckValueInputIs(node, 1, Type::Signed32());
-      CheckUpperIs(node, Type::Signed32());
+      CheckTypeIs(node, Type::Signed32());
       break;
     case IrOpcode::kSpeculativeNumberBitwiseOr:
     case IrOpcode::kSpeculativeNumberBitwiseXor:
     case IrOpcode::kSpeculativeNumberBitwiseAnd:
-      CheckUpperIs(node, Type::Signed32());
+      CheckTypeIs(node, Type::Signed32());
       break;
     case IrOpcode::kNumberShiftLeft:
     case IrOpcode::kNumberShiftRight:
       // (Signed32, Unsigned32) -> Signed32
       CheckValueInputIs(node, 0, Type::Signed32());
       CheckValueInputIs(node, 1, Type::Unsigned32());
-      CheckUpperIs(node, Type::Signed32());
+      CheckTypeIs(node, Type::Signed32());
       break;
     case IrOpcode::kSpeculativeNumberShiftLeft:
     case IrOpcode::kSpeculativeNumberShiftRight:
-      CheckUpperIs(node, Type::Signed32());
+      CheckTypeIs(node, Type::Signed32());
       break;
     case IrOpcode::kNumberShiftRightLogical:
       // (Unsigned32, Unsigned32) -> Unsigned32
       CheckValueInputIs(node, 0, Type::Unsigned32());
       CheckValueInputIs(node, 1, Type::Unsigned32());
-      CheckUpperIs(node, Type::Unsigned32());
+      CheckTypeIs(node, Type::Unsigned32());
       break;
     case IrOpcode::kSpeculativeNumberShiftRightLogical:
-      CheckUpperIs(node, Type::Unsigned32());
+      CheckTypeIs(node, Type::Unsigned32());
       break;
     case IrOpcode::kNumberImul:
       // (Unsigned32, Unsigned32) -> Signed32
       CheckValueInputIs(node, 0, Type::Unsigned32());
       CheckValueInputIs(node, 1, Type::Unsigned32());
-      CheckUpperIs(node, Type::Signed32());
+      CheckTypeIs(node, Type::Signed32());
       break;
     case IrOpcode::kNumberClz32:
       // Unsigned32 -> Unsigned32
       CheckValueInputIs(node, 0, Type::Unsigned32());
-      CheckUpperIs(node, Type::Unsigned32());
+      CheckTypeIs(node, Type::Unsigned32());
       break;
     case IrOpcode::kNumberAtan2:
     case IrOpcode::kNumberMax:
@@ -779,7 +765,7 @@
       // (Number, Number) -> Number
       CheckValueInputIs(node, 0, Type::Number());
       CheckValueInputIs(node, 1, Type::Number());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kNumberAbs:
     case IrOpcode::kNumberCeil:
@@ -810,32 +796,37 @@
     case IrOpcode::kNumberTrunc:
       // Number -> Number
       CheckValueInputIs(node, 0, Type::Number());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
+      break;
+    case IrOpcode::kNumberToBoolean:
+      // Number -> Boolean
+      CheckValueInputIs(node, 0, Type::Number());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kNumberToInt32:
       // Number -> Signed32
       CheckValueInputIs(node, 0, Type::Number());
-      CheckUpperIs(node, Type::Signed32());
+      CheckTypeIs(node, Type::Signed32());
       break;
     case IrOpcode::kNumberToUint32:
       // Number -> Unsigned32
       CheckValueInputIs(node, 0, Type::Number());
-      CheckUpperIs(node, Type::Unsigned32());
+      CheckTypeIs(node, Type::Unsigned32());
       break;
     case IrOpcode::kPlainPrimitiveToNumber:
       // PlainPrimitive -> Number
       CheckValueInputIs(node, 0, Type::PlainPrimitive());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kPlainPrimitiveToWord32:
       // PlainPrimitive -> Integral32
       CheckValueInputIs(node, 0, Type::PlainPrimitive());
-      CheckUpperIs(node, Type::Integral32());
+      CheckTypeIs(node, Type::Integral32());
       break;
     case IrOpcode::kPlainPrimitiveToFloat64:
       // PlainPrimitive -> Number
       CheckValueInputIs(node, 0, Type::PlainPrimitive());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kStringEqual:
     case IrOpcode::kStringLessThan:
@@ -843,23 +834,28 @@
       // (String, String) -> Boolean
       CheckValueInputIs(node, 0, Type::String());
       CheckValueInputIs(node, 1, Type::String());
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kStringCharCodeAt:
       // (String, Unsigned32) -> UnsignedSmall
       CheckValueInputIs(node, 0, Type::String());
       CheckValueInputIs(node, 1, Type::Unsigned32());
-      CheckUpperIs(node, Type::UnsignedSmall());
+      CheckTypeIs(node, Type::UnsignedSmall());
       break;
     case IrOpcode::kStringFromCharCode:
       // Number -> String
       CheckValueInputIs(node, 0, Type::Number());
-      CheckUpperIs(node, Type::String());
+      CheckTypeIs(node, Type::String());
+      break;
+    case IrOpcode::kStringFromCodePoint:
+      // (Unsigned32) -> String
+      CheckValueInputIs(node, 0, Type::Number());
+      CheckTypeIs(node, Type::String());
       break;
     case IrOpcode::kReferenceEqual: {
       // (Unique, Any) -> Boolean  and
       // (Any, Unique) -> Boolean
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     }
     case IrOpcode::kObjectIsCallable:
@@ -868,24 +864,24 @@
     case IrOpcode::kObjectIsSmi:
     case IrOpcode::kObjectIsString:
     case IrOpcode::kObjectIsUndetectable:
+    case IrOpcode::kArrayBufferWasNeutered:
       CheckValueInputIs(node, 0, Type::Any());
-      CheckUpperIs(node, Type::Boolean());
+      CheckTypeIs(node, Type::Boolean());
       break;
     case IrOpcode::kAllocate:
       CheckValueInputIs(node, 0, Type::PlainNumber());
-      CheckUpperIs(node, Type::TaggedPointer());
       break;
     case IrOpcode::kEnsureWritableFastElements:
       CheckValueInputIs(node, 0, Type::Any());
       CheckValueInputIs(node, 1, Type::Internal());
-      CheckUpperIs(node, Type::Internal());
+      CheckTypeIs(node, Type::Internal());
       break;
     case IrOpcode::kMaybeGrowFastElements:
       CheckValueInputIs(node, 0, Type::Any());
       CheckValueInputIs(node, 1, Type::Internal());
       CheckValueInputIs(node, 2, Type::Unsigned31());
       CheckValueInputIs(node, 3, Type::Unsigned31());
-      CheckUpperIs(node, Type::Internal());
+      CheckTypeIs(node, Type::Internal());
       break;
     case IrOpcode::kTransitionElementsKind:
       CheckValueInputIs(node, 0, Type::Any());
@@ -900,7 +896,7 @@
       // Type* from = Type::Intersect(Type::Signed32(), Type::Tagged());
       // Type* to = Type::Intersect(Type::Signed32(), Type::UntaggedInt32());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeTaggedToInt32: {
@@ -909,7 +905,7 @@
       // Type* from = Type::Intersect(Type::Signed32(), Type::Tagged());
       // Type* to = Type::Intersect(Type::Signed32(), Type::UntaggedInt32());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeTaggedToUint32: {
@@ -918,7 +914,7 @@
       // Type* from = Type::Intersect(Type::Unsigned32(), Type::Tagged());
       // Type* to =Type::Intersect(Type::Unsigned32(), Type::UntaggedInt32());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeTaggedToFloat64: {
@@ -927,7 +923,7 @@
       // Type* from = Type::Intersect(Type::Number(), Type::Tagged());
       // Type* to = Type::Intersect(Type::Number(), Type::UntaggedFloat64());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kTruncateTaggedToFloat64: {
@@ -937,7 +933,7 @@
       // Type::Tagged());
       // Type* to = Type::Intersect(Type::Number(), Type::UntaggedFloat64());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeInt31ToTaggedSigned: {
@@ -946,7 +942,7 @@
       // Type* from =Type::Intersect(Type::Signed31(), Type::UntaggedInt32());
       // Type* to = Type::Intersect(Type::Signed31(), Type::Tagged());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeInt32ToTagged: {
@@ -955,7 +951,7 @@
       // Type* from =Type::Intersect(Type::Signed32(), Type::UntaggedInt32());
       // Type* to = Type::Intersect(Type::Signed32(), Type::Tagged());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeUint32ToTagged: {
@@ -964,7 +960,7 @@
       // Type* from=Type::Intersect(Type::Unsigned32(),Type::UntaggedInt32());
       // Type* to = Type::Intersect(Type::Unsigned32(), Type::Tagged());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeFloat64ToTagged: {
@@ -973,7 +969,7 @@
       // Type* from =Type::Intersect(Type::Number(), Type::UntaggedFloat64());
       // Type* to = Type::Intersect(Type::Number(), Type::Tagged());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeTaggedToBit: {
@@ -982,7 +978,7 @@
       // Type* from = Type::Intersect(Type::Boolean(), Type::TaggedPtr());
       // Type* to = Type::Intersect(Type::Boolean(), Type::UntaggedInt1());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kChangeBitToTagged: {
@@ -991,7 +987,7 @@
       // Type* from = Type::Intersect(Type::Boolean(), Type::UntaggedInt1());
       // Type* to = Type::Intersect(Type::Boolean(), Type::TaggedPtr());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
     case IrOpcode::kTruncateTaggedToWord32: {
@@ -1000,21 +996,23 @@
       // Type* from = Type::Intersect(Type::Number(), Type::Tagged());
       // Type* to = Type::Intersect(Type::Number(), Type::UntaggedInt32());
       // CheckValueInputIs(node, 0, from));
-      // CheckUpperIs(node, to));
+      // CheckTypeIs(node, to));
       break;
     }
-    case IrOpcode::kImpossibleToWord32:
-    case IrOpcode::kImpossibleToWord64:
-    case IrOpcode::kImpossibleToFloat32:
-    case IrOpcode::kImpossibleToFloat64:
-    case IrOpcode::kImpossibleToTagged:
-    case IrOpcode::kImpossibleToBit:
+    case IrOpcode::kTruncateTaggedToBit:
       break;
 
     case IrOpcode::kCheckBounds:
       CheckValueInputIs(node, 0, Type::Any());
       CheckValueInputIs(node, 1, Type::Unsigned31());
-      CheckUpperIs(node, Type::Unsigned31());
+      CheckTypeIs(node, Type::Unsigned31());
+      break;
+    case IrOpcode::kCheckHeapObject:
+      CheckValueInputIs(node, 0, Type::Any());
+      break;
+    case IrOpcode::kCheckIf:
+      CheckValueInputIs(node, 0, Type::Boolean());
+      CheckNotTyped(node);
       break;
     case IrOpcode::kCheckMaps:
       // (Any, Internal, ..., Internal) -> Any
@@ -1026,23 +1024,14 @@
       break;
     case IrOpcode::kCheckNumber:
       CheckValueInputIs(node, 0, Type::Any());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
+      break;
+    case IrOpcode::kCheckSmi:
+      CheckValueInputIs(node, 0, Type::Any());
       break;
     case IrOpcode::kCheckString:
       CheckValueInputIs(node, 0, Type::Any());
-      CheckUpperIs(node, Type::String());
-      break;
-    case IrOpcode::kCheckIf:
-      CheckValueInputIs(node, 0, Type::Boolean());
-      CheckNotTyped(node);
-      break;
-    case IrOpcode::kCheckTaggedSigned:
-      CheckValueInputIs(node, 0, Type::Any());
-      CheckUpperIs(node, Type::TaggedSigned());
-      break;
-    case IrOpcode::kCheckTaggedPointer:
-      CheckValueInputIs(node, 0, Type::Any());
-      CheckUpperIs(node, Type::TaggedPointer());
+      CheckTypeIs(node, Type::String());
       break;
 
     case IrOpcode::kCheckedInt32Add:
@@ -1052,32 +1041,35 @@
     case IrOpcode::kCheckedUint32Div:
     case IrOpcode::kCheckedUint32Mod:
     case IrOpcode::kCheckedInt32Mul:
+    case IrOpcode::kCheckedInt32ToTaggedSigned:
     case IrOpcode::kCheckedUint32ToInt32:
+    case IrOpcode::kCheckedUint32ToTaggedSigned:
     case IrOpcode::kCheckedFloat64ToInt32:
     case IrOpcode::kCheckedTaggedSignedToInt32:
     case IrOpcode::kCheckedTaggedToInt32:
     case IrOpcode::kCheckedTaggedToFloat64:
+    case IrOpcode::kCheckedTaggedToTaggedSigned:
     case IrOpcode::kCheckedTruncateTaggedToWord32:
       break;
 
     case IrOpcode::kCheckFloat64Hole:
       CheckValueInputIs(node, 0, Type::Number());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kCheckTaggedHole:
       CheckValueInputIs(node, 0, Type::Any());
-      CheckUpperIs(node, Type::NonInternal());
+      CheckTypeIs(node, Type::NonInternal());
       break;
     case IrOpcode::kConvertTaggedHoleToUndefined:
       CheckValueInputIs(node, 0, Type::Any());
-      CheckUpperIs(node, Type::NonInternal());
+      CheckTypeIs(node, Type::NonInternal());
       break;
 
     case IrOpcode::kLoadField:
       // Object -> fieldtype
       // TODO(rossberg): activate once machine ops are typed.
       // CheckValueInputIs(node, 0, Type::Object());
-      // CheckUpperIs(node, FieldAccessOf(node->op()).type));
+      // CheckTypeIs(node, FieldAccessOf(node->op()).type));
       break;
     case IrOpcode::kLoadBuffer:
       break;
@@ -1085,7 +1077,7 @@
       // Object -> elementtype
       // TODO(rossberg): activate once machine ops are typed.
       // CheckValueInputIs(node, 0, Type::Object());
-      // CheckUpperIs(node, ElementAccessOf(node->op()).type));
+      // CheckTypeIs(node, ElementAccessOf(node->op()).type));
       break;
     case IrOpcode::kLoadTypedElement:
       break;
@@ -1110,15 +1102,16 @@
       break;
     case IrOpcode::kNumberSilenceNaN:
       CheckValueInputIs(node, 0, Type::Number());
-      CheckUpperIs(node, Type::Number());
+      CheckTypeIs(node, Type::Number());
       break;
     case IrOpcode::kTypeGuard:
-      CheckUpperIs(node, TypeGuardTypeOf(node->op()));
+      CheckTypeIs(node, TypeGuardTypeOf(node->op()));
       break;
 
     // Machine operators
     // -----------------------
     case IrOpcode::kLoad:
+    case IrOpcode::kProtectedLoad:
     case IrOpcode::kStore:
     case IrOpcode::kStackSlot:
     case IrOpcode::kWord32And:
@@ -1245,7 +1238,9 @@
     case IrOpcode::kBitcastFloat64ToInt64:
     case IrOpcode::kBitcastInt32ToFloat32:
     case IrOpcode::kBitcastInt64ToFloat64:
+    case IrOpcode::kBitcastTaggedToWord:
     case IrOpcode::kBitcastWordToTagged:
+    case IrOpcode::kBitcastWordToTaggedSigned:
     case IrOpcode::kChangeInt32ToInt64:
     case IrOpcode::kChangeUint32ToUint64:
     case IrOpcode::kChangeInt32ToFloat64:
diff --git a/src/compiler/wasm-compiler.cc b/src/compiler/wasm-compiler.cc
index e92a434..b003e99 100644
--- a/src/compiler/wasm-compiler.cc
+++ b/src/compiler/wasm-compiler.cc
@@ -189,26 +189,29 @@
 
   Node* GetTrapValue(wasm::FunctionSig* sig) {
     if (sig->return_count() > 0) {
-      switch (sig->GetReturn()) {
-        case wasm::kAstI32:
-          return jsgraph()->Int32Constant(0xdeadbeef);
-        case wasm::kAstI64:
-          return jsgraph()->Int64Constant(0xdeadbeefdeadbeef);
-        case wasm::kAstF32:
-          return jsgraph()->Float32Constant(bit_cast<float>(0xdeadbeef));
-        case wasm::kAstF64:
-          return jsgraph()->Float64Constant(
-              bit_cast<double>(0xdeadbeefdeadbeef));
-          break;
-        default:
-          UNREACHABLE();
-          return nullptr;
-      }
+      return GetTrapValue(sig->GetReturn());
     } else {
       return jsgraph()->Int32Constant(0xdeadbeef);
     }
   }
 
+  Node* GetTrapValue(wasm::LocalType type) {
+    switch (type) {
+      case wasm::kAstI32:
+        return jsgraph()->Int32Constant(0xdeadbeef);
+      case wasm::kAstI64:
+        return jsgraph()->Int64Constant(0xdeadbeefdeadbeef);
+      case wasm::kAstF32:
+        return jsgraph()->Float32Constant(bit_cast<float>(0xdeadbeef));
+      case wasm::kAstF64:
+        return jsgraph()->Float64Constant(bit_cast<double>(0xdeadbeefdeadbeef));
+        break;
+      default:
+        UNREACHABLE();
+        return nullptr;
+    }
+  }
+
  private:
   WasmGraphBuilder* builder_;
   JSGraph* jsgraph_;
@@ -334,6 +337,19 @@
          NodeProperties::GetControlInput(phi) == merge;
 }
 
+bool WasmGraphBuilder::ThrowsException(Node* node, Node** if_success,
+                                       Node** if_exception) {
+  if (node->op()->HasProperty(compiler::Operator::kNoThrow)) {
+    return false;
+  }
+
+  *if_success = graph()->NewNode(jsgraph()->common()->IfSuccess(), node);
+  *if_exception =
+      graph()->NewNode(jsgraph()->common()->IfException(), node, node);
+
+  return true;
+}
+
 void WasmGraphBuilder::AppendToMerge(Node* merge, Node* from) {
   DCHECK(IrOpcode::IsMergeOpcode(merge->opcode()));
   merge->AppendInput(jsgraph()->zone(), from);
@@ -932,8 +948,6 @@
       return BuildI64UConvertF32(input, position);
     case wasm::kExprI64UConvertF64:
       return BuildI64UConvertF64(input, position);
-    case wasm::kExprGrowMemory:
-      return BuildGrowMemory(input);
     case wasm::kExprI32AsmjsLoadMem8S:
       return BuildAsmjsLoadMem(MachineType::Int8(), input);
     case wasm::kExprI32AsmjsLoadMem8U:
@@ -995,16 +1009,11 @@
   DCHECK_NOT_NULL(*control_);
   DCHECK_NOT_NULL(*effect_);
 
-  if (count == 0) {
-    // Handle a return of void.
-    vals[0] = jsgraph()->Int32Constant(0);
-    count = 1;
-  }
-
   Node** buf = Realloc(vals, count, count + 2);
   buf[count] = *effect_;
   buf[count + 1] = *control_;
-  Node* ret = graph()->NewNode(jsgraph()->common()->Return(), count + 2, vals);
+  Node* ret =
+      graph()->NewNode(jsgraph()->common()->Return(count), count + 2, vals);
 
   MergeControlToEnd(jsgraph(), ret);
   return ret;
@@ -1667,14 +1676,21 @@
   return load;
 }
 
-Node* WasmGraphBuilder::BuildGrowMemory(Node* input) {
+Node* WasmGraphBuilder::GrowMemory(Node* input) {
+  Diamond check_input_range(
+      graph(), jsgraph()->common(),
+      graph()->NewNode(
+          jsgraph()->machine()->Uint32LessThanOrEqual(), input,
+          jsgraph()->Uint32Constant(wasm::WasmModule::kMaxMemPages)),
+      BranchHint::kTrue);
+
+  check_input_range.Chain(*control_);
+
   Runtime::FunctionId function_id = Runtime::kWasmGrowMemory;
   const Runtime::Function* function = Runtime::FunctionForId(function_id);
   CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
       jsgraph()->zone(), function_id, function->nargs, Operator::kNoThrow,
       CallDescriptor::kNoFlags);
-  Node** control_ptr = control_;
-  Node** effect_ptr = effect_;
   wasm::ModuleEnv* module = module_;
   input = BuildChangeUint32ToSmi(input);
   Node* inputs[] = {
@@ -1683,13 +1699,86 @@
           ExternalReference(function_id, jsgraph()->isolate())),  // ref
       jsgraph()->Int32Constant(function->nargs),                  // arity
       jsgraph()->HeapConstant(module->instance->context),         // context
-      *effect_ptr,
-      *control_ptr};
-  Node* node = graph()->NewNode(jsgraph()->common()->Call(desc),
+      *effect_,
+      check_input_range.if_true};
+  Node* call = graph()->NewNode(jsgraph()->common()->Call(desc),
                                 static_cast<int>(arraysize(inputs)), inputs);
-  *effect_ptr = node;
-  node = BuildChangeSmiToInt32(node);
-  return node;
+
+  Node* result = BuildChangeSmiToInt32(call);
+
+  result = check_input_range.Phi(MachineRepresentation::kWord32, result,
+                                 jsgraph()->Int32Constant(-1));
+  *effect_ = graph()->NewNode(jsgraph()->common()->EffectPhi(2), call, *effect_,
+                              check_input_range.merge);
+  *control_ = check_input_range.merge;
+  return result;
+}
+
+Node* WasmGraphBuilder::Throw(Node* input) {
+  MachineOperatorBuilder* machine = jsgraph()->machine();
+
+  // Pass the thrown value as two SMIs:
+  //
+  // upper = static_cast<uint32_t>(input) >> 16;
+  // lower = input & 0xFFFF;
+  //
+  // This is needed because we can't safely call BuildChangeInt32ToTagged from
+  // this method.
+  //
+  // TODO(wasm): figure out how to properly pass this to the runtime function.
+  Node* upper = BuildChangeInt32ToSmi(
+      graph()->NewNode(machine->Word32Shr(), input, Int32Constant(16)));
+  Node* lower = BuildChangeInt32ToSmi(
+      graph()->NewNode(machine->Word32And(), input, Int32Constant(0xFFFFu)));
+
+  Node* parameters[] = {lower, upper};  // thrown value
+  return BuildCallToRuntime(Runtime::kWasmThrow, jsgraph(),
+                            module_->instance->context, parameters,
+                            arraysize(parameters), effect_, *control_);
+}
+
+Node* WasmGraphBuilder::Catch(Node* input, wasm::WasmCodePosition position) {
+  CommonOperatorBuilder* common = jsgraph()->common();
+
+  Node* parameters[] = {input};  // caught value
+  Node* value =
+      BuildCallToRuntime(Runtime::kWasmGetCaughtExceptionValue, jsgraph(),
+                         module_->instance->context, parameters,
+                         arraysize(parameters), effect_, *control_);
+
+  Node* is_smi;
+  Node* is_heap;
+  Branch(BuildTestNotSmi(value), &is_heap, &is_smi);
+
+  // is_smi
+  Node* smi_i32 = BuildChangeSmiToInt32(value);
+  Node* is_smi_effect = *effect_;
+
+  // is_heap
+  *control_ = is_heap;
+  Node* heap_f64 = BuildLoadHeapNumberValue(value, is_heap);
+
+  // *control_ needs to point to the current control dependency (is_heap) in
+  // case BuildI32SConvertF64 needs to insert nodes that depend on the "current"
+  // control node.
+  Node* heap_i32 = BuildI32SConvertF64(heap_f64, position);
+  // *control_ contains the control node that should be used when merging the
+  // result for the catch clause. It may be different than *control_ because
+  // BuildI32SConvertF64 may introduce a new control node (used for trapping if
+  // heap_f64 cannot be converted to an i32.
+  is_heap = *control_;
+  Node* is_heap_effect = *effect_;
+
+  Node* merge = graph()->NewNode(common->Merge(2), is_heap, is_smi);
+  Node* effect_merge = graph()->NewNode(common->EffectPhi(2), is_heap_effect,
+                                        is_smi_effect, merge);
+
+  Node* value_i32 = graph()->NewNode(
+      common->Phi(MachineRepresentation::kWord32, 2), heap_i32, smi_i32, merge);
+
+  *control_ = merge;
+  *effect_ = effect_merge;
+  return value_i32;
 }
 
 Node* WasmGraphBuilder::BuildI32DivS(Node* left, Node* right,
@@ -1961,6 +2050,7 @@
 }
 
 Node* WasmGraphBuilder::BuildWasmCall(wasm::FunctionSig* sig, Node** args,
+                                      Node*** rets,
                                       wasm::WasmCodePosition position) {
   const size_t params = sig->parameter_count();
   const size_t extra = 2;  // effect and control inputs.
@@ -1980,32 +2070,37 @@
   SetSourcePosition(call, position);
 
   *effect_ = call;
+  size_t ret_count = sig->return_count();
+  if (ret_count == 0) return call;  // No return value.
+
+  *rets = Buffer(ret_count);
+  if (ret_count == 1) {
+    // Only a single return value.
+    (*rets)[0] = call;
+  } else {
+    // Create projections for all return values.
+    for (size_t i = 0; i < ret_count; i++) {
+      (*rets)[i] = graph()->NewNode(jsgraph()->common()->Projection(i), call,
+                                    graph()->start());
+    }
+  }
   return call;
 }
 
-Node* WasmGraphBuilder::CallDirect(uint32_t index, Node** args,
+Node* WasmGraphBuilder::CallDirect(uint32_t index, Node** args, Node*** rets,
                                    wasm::WasmCodePosition position) {
   DCHECK_NULL(args[0]);
 
   // Add code object as constant.
-  args[0] = HeapConstant(module_->GetCodeOrPlaceholder(index));
+  Handle<Code> code = module_->GetFunctionCode(index);
+  DCHECK(!code.is_null());
+  args[0] = HeapConstant(code);
   wasm::FunctionSig* sig = module_->GetFunctionSignature(index);
 
-  return BuildWasmCall(sig, args, position);
+  return BuildWasmCall(sig, args, rets, position);
 }
 
-Node* WasmGraphBuilder::CallImport(uint32_t index, Node** args,
-                                   wasm::WasmCodePosition position) {
-  DCHECK_NULL(args[0]);
-
-  // Add code object as constant.
-  args[0] = HeapConstant(module_->GetImportCode(index));
-  wasm::FunctionSig* sig = module_->GetImportSignature(index);
-
-  return BuildWasmCall(sig, args, position);
-}
-
-Node* WasmGraphBuilder::CallIndirect(uint32_t index, Node** args,
+Node* WasmGraphBuilder::CallIndirect(uint32_t index, Node** args, Node*** rets,
                                      wasm::WasmCodePosition position) {
   DCHECK_NOT_NULL(args[0]);
   DCHECK(module_ && module_->instance);
@@ -2020,6 +2115,7 @@
   // Bounds check the index.
   uint32_t table_size =
       module_->IsValidTable(0) ? module_->GetTable(0)->max_size : 0;
+  wasm::FunctionSig* sig = module_->GetSignature(index);
   if (table_size > 0) {
     // Bounds check against the table size.
     Node* size = Uint32Constant(table_size);
@@ -2028,7 +2124,11 @@
   } else {
     // No function table. Generate a trap and return a constant.
     trap_->AddTrapIfFalse(wasm::kTrapFuncInvalid, Int32Constant(0), position);
-    return trap_->GetTrapValue(module_->GetSignature(index));
+    (*rets) = Buffer(sig->return_count());
+    for (size_t i = 0; i < sig->return_count(); i++) {
+      (*rets)[i] = trap_->GetTrapValue(sig->GetReturn(i));
+    }
+    return trap_->GetTrapValue(sig);
   }
   Node* table = FunctionTable(0);
 
@@ -2062,8 +2162,7 @@
       *effect_, *control_);
 
   args[0] = load_code;
-  wasm::FunctionSig* sig = module_->GetSignature(index);
-  return BuildWasmCall(sig, args, position);
+  return BuildWasmCall(sig, args, rets, position);
 }
 
 Node* WasmGraphBuilder::BuildI32Rol(Node* left, Node* right) {
@@ -2197,11 +2296,11 @@
     case wasm::kAstI32:
       return BuildChangeInt32ToTagged(node);
     case wasm::kAstI64:
-      DCHECK(module_ && !module_->instance->context.is_null());
-      // Throw a TypeError.
+      // Throw a TypeError. The native context is good enough here because we
+      // only throw a TypeError.
       return BuildCallToRuntime(Runtime::kWasmThrowTypeError, jsgraph(),
-                                module_->instance->context, nullptr, 0, effect_,
-                                *control_);
+                                jsgraph()->isolate()->native_context(), nullptr,
+                                0, effect_, *control_);
     case wasm::kAstF32:
       node = graph()->NewNode(jsgraph()->machine()->ChangeFloat32ToFloat64(),
                               node);
@@ -2359,15 +2458,11 @@
       break;
     }
     case wasm::kAstI64:
-      // TODO(titzer): JS->i64 has no good solution right now. Using 32 bits.
-      num = graph()->NewNode(jsgraph()->machine()->TruncateFloat64ToWord32(),
-                             num);
-      if (jsgraph()->machine()->Is64()) {
-        // We cannot change an int32 to an int64 on a 32 bit platform. Instead
-        // we will split the parameter node later.
-        num = graph()->NewNode(jsgraph()->machine()->ChangeInt32ToInt64(), num);
-      }
-      break;
+      // Throw a TypeError. The native context is good enough here because we
+      // only throw a TypeError.
+      return BuildCallToRuntime(Runtime::kWasmThrowTypeError, jsgraph(),
+                                jsgraph()->isolate()->native_context(), nullptr,
+                                0, effect_, *control_);
     case wasm::kAstF32:
       num = graph()->NewNode(jsgraph()->machine()->TruncateFloat64ToFloat32(),
                              num);
@@ -2528,6 +2623,23 @@
   MergeControlToEnd(jsgraph(), ret);
 }
 
+int WasmGraphBuilder::AddParameterNodes(Node** args, int pos, int param_count,
+                                        wasm::FunctionSig* sig) {
+  // Convert WASM numbers to JS values.
+  int param_index = 0;
+  for (int i = 0; i < param_count; ++i) {
+    Node* param = graph()->NewNode(
+        jsgraph()->common()->Parameter(param_index++), graph()->start());
+    args[pos++] = ToJS(param, sig->GetParam(i));
+    if (jsgraph()->machine()->Is32() && sig->GetParam(i) == wasm::kAstI64) {
+      // On 32 bit platforms we have to skip the high word of int64
+      // parameters.
+      param_index++;
+    }
+  }
+  return pos;
+}
+
 void WasmGraphBuilder::BuildWasmToJSWrapper(Handle<JSReceiver> target,
                                             wasm::FunctionSig* sig) {
   DCHECK(target->IsCallable());
@@ -2548,18 +2660,14 @@
   *control_ = start;
   Node** args = Buffer(wasm_count + 7);
 
-  // The default context of the target.
-  Handle<Context> target_context = isolate->native_context();
+  Node* call;
+  bool direct_call = false;
 
-  // Optimization: check if the target is a JSFunction with the right arity so
-  // that we can call it directly.
-  bool call_direct = false;
-  int pos = 0;
   if (target->IsJSFunction()) {
     Handle<JSFunction> function = Handle<JSFunction>::cast(target);
     if (function->shared()->internal_formal_parameter_count() == wasm_count) {
-      call_direct = true;
-
+      direct_call = true;
+      int pos = 0;
       args[pos++] = jsgraph()->Constant(target);  // target callable.
       // Receiver.
       if (is_sloppy(function->shared()->language_mode()) &&
@@ -2574,13 +2682,22 @@
       desc = Linkage::GetJSCallDescriptor(
           graph()->zone(), false, wasm_count + 1, CallDescriptor::kNoFlags);
 
-      // For a direct call we have to use the context of the JSFunction.
-      target_context = handle(function->context());
+      // Convert WASM numbers to JS values.
+      pos = AddParameterNodes(args, pos, wasm_count, sig);
+
+      args[pos++] = jsgraph()->UndefinedConstant();        // new target
+      args[pos++] = jsgraph()->Int32Constant(wasm_count);  // argument count
+      args[pos++] = HeapConstant(handle(function->context()));
+      args[pos++] = *effect_;
+      args[pos++] = *control_;
+
+      call = graph()->NewNode(jsgraph()->common()->Call(desc), pos, args);
     }
   }
 
   // We cannot call the target directly, we have to use the Call builtin.
-  if (!call_direct) {
+  if (!direct_call) {
+    int pos = 0;
     Callable callable = CodeFactory::Call(isolate);
     args[pos++] = jsgraph()->HeapConstant(callable.code());
     args[pos++] = jsgraph()->Constant(target);           // target callable
@@ -2591,31 +2708,22 @@
     desc = Linkage::GetStubCallDescriptor(isolate, graph()->zone(),
                                           callable.descriptor(), wasm_count + 1,
                                           CallDescriptor::kNoFlags);
+
+    // Convert WASM numbers to JS values.
+    pos = AddParameterNodes(args, pos, wasm_count, sig);
+
+    // The native_context is sufficient here, because all kind of callables
+    // which depend on the context provide their own context. The context here
+    // is only needed if the target is a constructor to throw a TypeError, if
+    // the target is a native function, or if the target is a callable JSObject,
+    // which can only be constructed by the runtime.
+    args[pos++] = HeapConstant(isolate->native_context());
+    args[pos++] = *effect_;
+    args[pos++] = *control_;
+
+    call = graph()->NewNode(jsgraph()->common()->Call(desc), pos, args);
   }
 
-  // Convert WASM numbers to JS values.
-  int param_index = 0;
-  for (int i = 0; i < wasm_count; ++i) {
-    Node* param =
-        graph()->NewNode(jsgraph()->common()->Parameter(param_index++), start);
-    args[pos++] = ToJS(param, sig->GetParam(i));
-    if (jsgraph()->machine()->Is32() && sig->GetParam(i) == wasm::kAstI64) {
-      // On 32 bit platforms we have to skip the high word of int64 parameters.
-      param_index++;
-    }
-  }
-
-  if (call_direct) {
-    args[pos++] = jsgraph()->UndefinedConstant();  // new target
-    args[pos++] = jsgraph()->Int32Constant(wasm_count);  // argument count
-  }
-
-  args[pos++] = HeapConstant(target_context);
-  args[pos++] = *effect_;
-  args[pos++] = *control_;
-
-  Node* call = graph()->NewNode(jsgraph()->common()->Call(desc), pos, args);
-
   // Convert the return value back.
   Node* ret;
   Node* val =
@@ -2650,6 +2758,30 @@
   }
 }
 
+Node* WasmGraphBuilder::CurrentMemoryPages() {
+  Runtime::FunctionId function_id = Runtime::kWasmMemorySize;
+  const Runtime::Function* function = Runtime::FunctionForId(function_id);
+  CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
+      jsgraph()->zone(), function_id, function->nargs, Operator::kNoThrow,
+      CallDescriptor::kNoFlags);
+  wasm::ModuleEnv* module = module_;
+  Node* inputs[] = {
+      jsgraph()->CEntryStubConstant(function->result_size),  // C entry
+      jsgraph()->ExternalConstant(
+          ExternalReference(function_id, jsgraph()->isolate())),  // ref
+      jsgraph()->Int32Constant(function->nargs),                  // arity
+      jsgraph()->HeapConstant(module->instance->context),         // context
+      *effect_,
+      *control_};
+  Node* call = graph()->NewNode(jsgraph()->common()->Call(desc),
+                                static_cast<int>(arraysize(inputs)), inputs);
+
+  Node* result = BuildChangeSmiToInt32(call);
+
+  *effect_ = call;
+  return result;
+}
+
 Node* WasmGraphBuilder::MemSize(uint32_t offset) {
   DCHECK(module_ && module_->instance);
   uint32_t size = static_cast<uint32_t>(module_->instance->mem_size);
@@ -2715,19 +2847,34 @@
 
   // Check against the effective size.
   size_t effective_size;
-  if (offset >= size || (static_cast<uint64_t>(offset) + memsize) > size) {
+  if (size == 0) {
     effective_size = 0;
+  } else if (offset >= size ||
+             (static_cast<uint64_t>(offset) + memsize) > size) {
+    // Two checks are needed in the case where the offset is statically
+    // out of bounds; one check for the offset being in bounds, and the next for
+    // the offset + index being out of bounds for code to be patched correctly
+    // on relocation.
+    effective_size = size - memsize + 1;
+    Node* cond = graph()->NewNode(jsgraph()->machine()->Uint32LessThan(),
+                                  jsgraph()->IntPtrConstant(offset),
+                                  jsgraph()->RelocatableInt32Constant(
+                                      static_cast<uint32_t>(effective_size),
+                                      RelocInfo::WASM_MEMORY_SIZE_REFERENCE));
+    trap_->AddTrapIfFalse(wasm::kTrapMemOutOfBounds, cond, position);
+    DCHECK(offset >= effective_size);
+    effective_size = offset - effective_size;
   } else {
     effective_size = size - offset - memsize + 1;
-  }
-  CHECK(effective_size <= kMaxUInt32);
+    CHECK(effective_size <= kMaxUInt32);
 
-  Uint32Matcher m(index);
-  if (m.HasValue()) {
-    uint32_t value = m.Value();
-    if (value < effective_size) {
-      // The bounds check will always succeed.
-      return;
+    Uint32Matcher m(index);
+    if (m.HasValue()) {
+      uint32_t value = m.Value();
+      if (value < effective_size) {
+        // The bounds check will always succeed.
+        return;
+      }
     }
   }
 
@@ -2746,15 +2893,26 @@
   Node* load;
 
   // WASM semantics throw on OOB. Introduce explicit bounds check.
-  BoundsCheckMem(memtype, index, offset, position);
+  if (!FLAG_wasm_trap_handler) {
+    BoundsCheckMem(memtype, index, offset, position);
+  }
   bool aligned = static_cast<int>(alignment) >=
                  ElementSizeLog2Of(memtype.representation());
 
   if (aligned ||
       jsgraph()->machine()->UnalignedLoadSupported(memtype, alignment)) {
-    load = graph()->NewNode(jsgraph()->machine()->Load(memtype),
-                            MemBuffer(offset), index, *effect_, *control_);
+    if (FLAG_wasm_trap_handler) {
+      Node* context = HeapConstant(module_->instance->context);
+      Node* position_node = jsgraph()->Int32Constant(position);
+      load = graph()->NewNode(jsgraph()->machine()->ProtectedLoad(memtype),
+                              MemBuffer(offset), index, context, position_node,
+                              *effect_, *control_);
+    } else {
+      load = graph()->NewNode(jsgraph()->machine()->Load(memtype),
+                              MemBuffer(offset), index, *effect_, *control_);
+    }
   } else {
+    DCHECK(!FLAG_wasm_trap_handler);
     load = graph()->NewNode(jsgraph()->machine()->UnalignedLoad(memtype),
                             MemBuffer(offset), index, *effect_, *control_);
   }
@@ -2866,15 +3024,31 @@
     source_position_table_->SetSourcePosition(node, pos);
 }
 
+Node* WasmGraphBuilder::DefaultS128Value() {
+  // TODO(gdeepti): Introduce Simd128Constant to common-operator.h and use
+  // instead of creating a SIMD Value.
+  return graph()->NewNode(jsgraph()->machine()->CreateInt32x4(),
+                          Int32Constant(0), Int32Constant(0), Int32Constant(0),
+                          Int32Constant(0));
+}
+
 Node* WasmGraphBuilder::SimdOp(wasm::WasmOpcode opcode,
                                const NodeVector& inputs) {
   switch (opcode) {
-    case wasm::kExprI32x4ExtractLane:
-      return graph()->NewNode(jsgraph()->machine()->Int32x4ExtractLane(),
-                              inputs[0], inputs[1]);
     case wasm::kExprI32x4Splat:
-      return graph()->NewNode(jsgraph()->machine()->Int32x4ExtractLane(),
-                              inputs[0], inputs[0], inputs[0], inputs[0]);
+      return graph()->NewNode(jsgraph()->machine()->CreateInt32x4(), inputs[0],
+                              inputs[0], inputs[0], inputs[0]);
+    default:
+      return graph()->NewNode(UnsupportedOpcode(opcode), nullptr);
+  }
+}
+
+Node* WasmGraphBuilder::SimdExtractLane(wasm::WasmOpcode opcode, uint8_t lane,
+                                        Node* input) {
+  switch (opcode) {
+    case wasm::kExprI32x4ExtractLane:
+      return graph()->NewNode(jsgraph()->machine()->Int32x4ExtractLane(), input,
+                              Int32Constant(lane));
     default:
       return graph()->NewNode(UnsupportedOpcode(opcode), nullptr);
   }
diff --git a/src/compiler/wasm-compiler.h b/src/compiler/wasm-compiler.h
index 487ddcb..c980a87 100644
--- a/src/compiler/wasm-compiler.h
+++ b/src/compiler/wasm-compiler.h
@@ -9,10 +9,11 @@
 
 // Clients of this interface shouldn't depend on lots of compiler internals.
 // Do not include anything from src/compiler here!
+#include "src/compilation-info.h"
 #include "src/compiler.h"
 #include "src/wasm/wasm-opcodes.h"
 #include "src/wasm/wasm-result.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -132,8 +133,12 @@
               wasm::WasmCodePosition position = wasm::kNoCodePosition);
   Node* Unop(wasm::WasmOpcode opcode, Node* input,
              wasm::WasmCodePosition position = wasm::kNoCodePosition);
+  Node* GrowMemory(Node* input);
+  Node* Throw(Node* input);
+  Node* Catch(Node* input, wasm::WasmCodePosition position);
   unsigned InputCount(Node* node);
   bool IsPhiWithMerge(Node* phi, Node* merge);
+  bool ThrowsException(Node* node, Node** if_success, Node** if_exception);
   void AppendToMerge(Node* merge, Node* from);
   void AppendToPhi(Node* phi, Node* from);
 
@@ -150,12 +155,11 @@
   Node* ReturnVoid();
   Node* Unreachable(wasm::WasmCodePosition position);
 
-  Node* CallDirect(uint32_t index, Node** args,
+  Node* CallDirect(uint32_t index, Node** args, Node*** rets,
                    wasm::WasmCodePosition position);
-  Node* CallImport(uint32_t index, Node** args,
-                   wasm::WasmCodePosition position);
-  Node* CallIndirect(uint32_t index, Node** args,
+  Node* CallIndirect(uint32_t index, Node** args, Node*** rets,
                      wasm::WasmCodePosition position);
+
   void BuildJSToWasmWrapper(Handle<Code> wasm_code, wasm::FunctionSig* sig);
   void BuildWasmToJSWrapper(Handle<JSReceiver> target, wasm::FunctionSig* sig);
 
@@ -167,7 +171,7 @@
   //-----------------------------------------------------------------------
   // Operations that concern the linear memory.
   //-----------------------------------------------------------------------
-  Node* MemSize(uint32_t offset);
+  Node* CurrentMemoryPages();
   Node* GetGlobal(uint32_t index);
   Node* SetGlobal(uint32_t index, Node* val);
   Node* LoadMem(wasm::LocalType type, MachineType memtype, Node* index,
@@ -194,7 +198,10 @@
 
   void SetSourcePosition(Node* node, wasm::WasmCodePosition position);
 
+  Node* DefaultS128Value();
+
   Node* SimdOp(wasm::WasmOpcode opcode, const NodeVector& inputs);
+  Node* SimdExtractLane(wasm::WasmOpcode opcode, uint8_t lane, Node* input);
 
  private:
   static const int kDefaultBufferSize = 16;
@@ -223,6 +230,7 @@
   Graph* graph();
 
   Node* String(const char* string);
+  Node* MemSize(uint32_t offset);
   Node* MemBuffer(uint32_t offset);
   void BoundsCheckMem(MachineType memtype, Node* index, uint32_t offset,
                       wasm::WasmCodePosition position);
@@ -234,7 +242,7 @@
   Node* MaskShiftCount64(Node* node);
 
   Node* BuildCCall(MachineSignature* sig, Node** args);
-  Node* BuildWasmCall(wasm::FunctionSig* sig, Node** args,
+  Node* BuildWasmCall(wasm::FunctionSig* sig, Node** args, Node*** rets,
                       wasm::WasmCodePosition position);
 
   Node* BuildF32CopySign(Node* left, Node* right);
@@ -301,6 +309,7 @@
 
   Node* BuildJavaScriptToNumber(Node* node, Node* context, Node* effect,
                                 Node* control);
+
   Node* BuildChangeInt32ToTagged(Node* value);
   Node* BuildChangeFloat64ToTagged(Node* value);
   Node* BuildChangeTaggedToFloat64(Node* value);
@@ -315,7 +324,6 @@
   Node* BuildAllocateHeapNumberWithValue(Node* value, Node* control);
   Node* BuildLoadHeapNumberValue(Node* value, Node* control);
   Node* BuildHeapNumberValueIndexConstant();
-  Node* BuildGrowMemory(Node* input);
 
   // Asm.js specific functionality.
   Node* BuildI32AsmjsSConvertF32(Node* input);
@@ -334,6 +342,9 @@
     if (buf != buffer) memcpy(buf, buffer, old_count * sizeof(Node*));
     return buf;
   }
+
+  int AddParameterNodes(Node** args, int pos, int param_count,
+                        wasm::FunctionSig* sig);
 };
 }  // namespace compiler
 }  // namespace internal
diff --git a/src/compiler/wasm-linkage.cc b/src/compiler/wasm-linkage.cc
index c50f643..574db1c 100644
--- a/src/compiler/wasm-linkage.cc
+++ b/src/compiler/wasm-linkage.cc
@@ -11,7 +11,7 @@
 
 #include "src/compiler/linkage.h"
 
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -131,7 +131,7 @@
 // == s390x ==================================================================
 // ===========================================================================
 #define GP_PARAM_REGISTERS r2, r3, r4, r5, r6
-#define GP_RETURN_REGISTERS r2
+#define GP_RETURN_REGISTERS r2, r3
 #define FP_PARAM_REGISTERS d0, d2, d4, d6
 #define FP_RETURN_REGISTERS d0, d2, d4, d6
 
diff --git a/src/compiler/x64/code-generator-x64.cc b/src/compiler/x64/code-generator-x64.cc
index 49a097b..4d63e9a 100644
--- a/src/compiler/x64/code-generator-x64.cc
+++ b/src/compiler/x64/code-generator-x64.cc
@@ -4,11 +4,12 @@
 
 #include "src/compiler/code-generator.h"
 
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
 #include "src/compiler/osr.h"
+#include "src/wasm/wasm-module.h"
 #include "src/x64/assembler-x64.h"
 #include "src/x64/macro-assembler-x64.h"
 
@@ -132,6 +133,11 @@
         int32_t disp = InputInt32(NextOffset(offset));
         return Operand(index, scale, disp);
       }
+      case kMode_Root: {
+        Register base = kRootRegister;
+        int32_t disp = InputInt32(NextOffset(offset));
+        return Operand(base, disp);
+      }
       case kMode_None:
         UNREACHABLE();
         return Operand(no_reg, 0);
@@ -260,6 +266,40 @@
   RecordWriteMode const mode_;
 };
 
+class WasmOutOfLineTrap final : public OutOfLineCode {
+ public:
+  WasmOutOfLineTrap(CodeGenerator* gen, Address pc, bool frame_elided,
+                    Register context, int32_t position)
+      : OutOfLineCode(gen),
+        pc_(pc),
+        frame_elided_(frame_elided),
+        context_(context),
+        position_(position) {}
+
+  void Generate() final {
+    // TODO(eholk): record pc_ and the current pc in a table so that
+    // the signal handler can find it.
+    USE(pc_);
+
+    if (frame_elided_) {
+      __ EnterFrame(StackFrame::WASM);
+    }
+
+    wasm::TrapReason trap_id = wasm::kTrapMemOutOfBounds;
+    int trap_reason = wasm::WasmOpcodes::TrapReasonToMessageId(trap_id);
+    __ Push(Smi::FromInt(trap_reason));
+    __ Push(Smi::FromInt(position_));
+    __ Move(rsi, context_);
+    __ CallRuntime(Runtime::kThrowWasmError);
+  }
+
+ private:
+  Address pc_;
+  bool frame_elided_;
+  Register context_;
+  int32_t position_;
+};
+
 }  // namespace
 
 
@@ -866,9 +906,6 @@
     case kArchDebugBreak:
       __ int3();
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchNop:
     case kArchThrowTerminator:
       // don't emit code for nops.
@@ -878,8 +915,8 @@
           BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -1422,7 +1459,7 @@
       break;
     }
     case kSSEFloat64Sqrt:
-      ASSEMBLE_SSE_UNOP(sqrtsd);
+      ASSEMBLE_SSE_UNOP(Sqrtsd);
       break;
     case kSSEFloat64Round: {
       CpuFeatureScope sse_scope(masm(), SSE4_1);
@@ -1852,6 +1889,7 @@
       break;
     }
     case kX64Movl:
+    case kX64TrapMovl:
       if (instr->HasOutput()) {
         if (instr->addressing_mode() == kMode_None) {
           if (instr->InputAt(0)->IsRegister()) {
@@ -1860,7 +1898,14 @@
             __ movl(i.OutputRegister(), i.InputOperand(0));
           }
         } else {
+          Address pc = __ pc();
           __ movl(i.OutputRegister(), i.MemoryOperand());
+
+          if (arch_opcode == kX64TrapMovl) {
+            bool frame_elided = !frame_access_state()->has_frame();
+            new (zone()) WasmOutOfLineTrap(this, pc, frame_elided,
+                                           i.InputRegister(2), i.InputInt32(3));
+          }
         }
         __ AssertZeroExtended(i.OutputRegister());
       } else {
@@ -2032,6 +2077,18 @@
       __ xchgl(i.InputRegister(index), operand);
       break;
     }
+    case kX64Int32x4Create: {
+      CpuFeatureScope sse_scope(masm(), SSE4_1);
+      XMMRegister dst = i.OutputSimd128Register();
+      __ Movd(dst, i.InputRegister(0));
+      __ shufps(dst, dst, 0x0);
+      break;
+    }
+    case kX64Int32x4ExtractLane: {
+      CpuFeatureScope sse_scope(masm(), SSE4_1);
+      __ Pextrd(i.OutputRegister(), i.InputSimd128Register(0), i.InputInt8(1));
+      break;
+    }
     case kCheckedLoadInt8:
       ASSEMBLE_CHECKED_LOAD_INTEGER(movsxbl);
       break;
@@ -2252,13 +2309,14 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
@@ -2449,7 +2507,11 @@
             if (value == 0) {
               __ xorl(dst, dst);
             } else {
-              __ movl(dst, Immediate(value));
+              if (src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
+                __ movl(dst, Immediate(value, src.rmode()));
+              } else {
+                __ movl(dst, Immediate(value));
+              }
             }
           }
           break;
diff --git a/src/compiler/x64/instruction-codes-x64.h b/src/compiler/x64/instruction-codes-x64.h
index 7ab1097..35acec0 100644
--- a/src/compiler/x64/instruction-codes-x64.h
+++ b/src/compiler/x64/instruction-codes-x64.h
@@ -128,6 +128,7 @@
   V(X64Movzxwq)                    \
   V(X64Movw)                       \
   V(X64Movl)                       \
+  V(X64TrapMovl)                   \
   V(X64Movsxlq)                    \
   V(X64Movq)                       \
   V(X64Movsd)                      \
@@ -145,7 +146,9 @@
   V(X64StackCheck)                 \
   V(X64Xchgb)                      \
   V(X64Xchgw)                      \
-  V(X64Xchgl)
+  V(X64Xchgl)                      \
+  V(X64Int32x4Create)              \
+  V(X64Int32x4ExtractLane)
 
 // Addressing modes represent the "shape" of inputs to an instruction.
 // Many instructions support multiple addressing modes. Addressing modes
@@ -177,7 +180,8 @@
   V(M1I)  /* [      %r2*1 + K] */      \
   V(M2I)  /* [      %r2*2 + K] */      \
   V(M4I)  /* [      %r2*4 + K] */      \
-  V(M8I)  /* [      %r2*8 + K] */
+  V(M8I)  /* [      %r2*8 + K] */      \
+  V(Root) /* [%root       + K] */
 
 }  // namespace compiler
 }  // namespace internal
diff --git a/src/compiler/x64/instruction-scheduler-x64.cc b/src/compiler/x64/instruction-scheduler-x64.cc
index fb4b749..4208d8a 100644
--- a/src/compiler/x64/instruction-scheduler-x64.cc
+++ b/src/compiler/x64/instruction-scheduler-x64.cc
@@ -36,10 +36,6 @@
     case kX64Imul32:
     case kX64ImulHigh32:
     case kX64UmulHigh32:
-    case kX64Idiv:
-    case kX64Idiv32:
-    case kX64Udiv:
-    case kX64Udiv32:
     case kX64Not:
     case kX64Not32:
     case kX64Neg:
@@ -127,10 +123,20 @@
     case kX64Lea:
     case kX64Dec32:
     case kX64Inc32:
+    case kX64Int32x4Create:
+    case kX64Int32x4ExtractLane:
       return (instr->addressing_mode() == kMode_None)
           ? kNoOpcodeFlags
           : kIsLoadOperation | kHasSideEffect;
 
+    case kX64Idiv:
+    case kX64Idiv32:
+    case kX64Udiv:
+    case kX64Udiv32:
+      return (instr->addressing_mode() == kMode_None)
+                 ? kMayNeedDeoptCheck
+                 : kMayNeedDeoptCheck | kIsLoadOperation | kHasSideEffect;
+
     case kX64Movsxbl:
     case kX64Movzxbl:
     case kX64Movsxbq:
@@ -149,6 +155,7 @@
       return kHasSideEffect;
 
     case kX64Movl:
+    case kX64TrapMovl:
       if (instr->HasOutput()) {
         DCHECK(instr->InputCount() >= 1);
         return instr->InputAt(0)->IsRegister() ? kNoOpcodeFlags
diff --git a/src/compiler/x64/instruction-selector-x64.cc b/src/compiler/x64/instruction-selector-x64.cc
index 798d438..9a7657e 100644
--- a/src/compiler/x64/instruction-selector-x64.cc
+++ b/src/compiler/x64/instruction-selector-x64.cc
@@ -60,8 +60,7 @@
     switch (opcode) {
       case kX64Cmp:
       case kX64Test:
-        return rep == MachineRepresentation::kWord64 ||
-               rep == MachineRepresentation::kTagged;
+        return rep == MachineRepresentation::kWord64 || IsAnyTagged(rep);
       case kX64Cmp32:
       case kX64Test32:
         return rep == MachineRepresentation::kWord32;
@@ -137,6 +136,22 @@
   AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
                                                   InstructionOperand inputs[],
                                                   size_t* input_count) {
+    if (selector()->CanAddressRelativeToRootsRegister()) {
+      LoadMatcher<ExternalReferenceMatcher> m(operand);
+      if (m.index().HasValue() && m.object().HasValue()) {
+        Address const kRootsRegisterValue =
+            kRootRegisterBias +
+            reinterpret_cast<Address>(
+                selector()->isolate()->heap()->roots_array_start());
+        ptrdiff_t const delta =
+            m.index().Value() +
+            (m.object().Value().address() - kRootsRegisterValue);
+        if (is_int32(delta)) {
+          inputs[(*input_count)++] = TempImmediate(static_cast<int32_t>(delta));
+          return kMode_Root;
+        }
+      }
+    }
     BaseWithIndexAndDisplacement64Matcher m(operand, AddressOption::kAllowAll);
     DCHECK(m.matches());
     if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) {
@@ -155,11 +170,9 @@
   }
 };
 
+namespace {
 
-void InstructionSelector::VisitLoad(Node* node) {
-  LoadRepresentation load_rep = LoadRepresentationOf(node->op());
-  X64OperandGenerator g(this);
-
+ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
   ArchOpcode opcode = kArchNop;
   switch (load_rep.representation()) {
     case MachineRepresentation::kFloat32:
@@ -187,9 +200,18 @@
     case MachineRepresentation::kSimd128:  // Fall through.
     case MachineRepresentation::kNone:
       UNREACHABLE();
-      return;
+      break;
   }
+  return opcode;
+}
 
+}  // namespace
+
+void InstructionSelector::VisitLoad(Node* node) {
+  LoadRepresentation load_rep = LoadRepresentationOf(node->op());
+  X64OperandGenerator g(this);
+
+  ArchOpcode opcode = GetLoadOpcode(load_rep);
   InstructionOperand outputs[1];
   outputs[0] = g.DefineAsRegister(node);
   InstructionOperand inputs[3];
@@ -200,6 +222,24 @@
   Emit(code, 1, outputs, input_count, inputs);
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  LoadRepresentation load_rep = LoadRepresentationOf(node->op());
+  X64OperandGenerator g(this);
+
+  ArchOpcode opcode = GetLoadOpcode(load_rep);
+  InstructionOperand outputs[1];
+  outputs[0] = g.DefineAsRegister(node);
+  InstructionOperand inputs[4];
+  size_t input_count = 0;
+  AddressingMode mode =
+      g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
+  // Add the context parameter as an input.
+  inputs[input_count++] = g.UseUniqueRegister(node->InputAt(2));
+  // Add the source position as an input
+  inputs[input_count++] = g.UseImmediate(node->InputAt(3));
+  InstructionCode code = opcode | AddressingModeField::encode(mode);
+  Emit(code, 1, outputs, input_count, inputs);
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   X64OperandGenerator g(this);
@@ -212,7 +252,7 @@
   MachineRepresentation rep = store_rep.representation();
 
   if (write_barrier_kind != kNoWriteBarrier) {
-    DCHECK_EQ(MachineRepresentation::kTagged, rep);
+    DCHECK(CanBeTaggedPointer(rep));
     AddressingMode addressing_mode;
     InstructionOperand inputs[3];
     size_t input_count = 0;
@@ -701,6 +741,7 @@
           case kMode_M2I:
           case kMode_M4I:
           case kMode_M8I:
+          case kMode_Root:
             UNREACHABLE();
         }
         inputs[input_count++] = ImmediateOperand(ImmediateOperand::INLINE, 4);
@@ -1170,11 +1211,10 @@
   }
 }
 
+namespace {
 
-void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
-  X64OperandGenerator g(this);
-  Node* value = node->InputAt(0);
-  switch (value->opcode()) {
+bool ZeroExtendsWord32ToWord64(Node* node) {
+  switch (node->opcode()) {
     case IrOpcode::kWord32And:
     case IrOpcode::kWord32Or:
     case IrOpcode::kWord32Xor:
@@ -1195,14 +1235,35 @@
     case IrOpcode::kUint32LessThan:
     case IrOpcode::kUint32LessThanOrEqual:
     case IrOpcode::kUint32Mod:
-    case IrOpcode::kUint32MulHigh: {
+    case IrOpcode::kUint32MulHigh:
       // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
       // zero-extension is a no-op.
-      Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
-      return;
+      return true;
+    case IrOpcode::kProjection: {
+      Node* const value = node->InputAt(0);
+      switch (value->opcode()) {
+        case IrOpcode::kInt32AddWithOverflow:
+        case IrOpcode::kInt32SubWithOverflow:
+        case IrOpcode::kInt32MulWithOverflow:
+          return true;
+        default:
+          return false;
+      }
     }
     default:
-      break;
+      return false;
+  }
+}
+
+}  // namespace
+
+void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
+  X64OperandGenerator g(this);
+  Node* value = node->InputAt(0);
+  if (ZeroExtendsWord32ToWord64(value)) {
+    // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
+    // zero-extension is a no-op.
+    return EmitIdentity(node);
   }
   Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
 }
@@ -1276,8 +1337,7 @@
         Int64BinopMatcher m(value);
         if (m.right().Is(32)) {
           if (TryMatchLoadWord64AndShiftRight(this, value, kX64Movl)) {
-            Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
-            return;
+            return EmitIdentity(node);
           }
           Emit(kX64Shr, g.DefineSameAsFirst(node),
                g.UseRegister(m.left().node()), g.TempImmediate(32));
@@ -2213,6 +2273,17 @@
   Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
 }
 
+void InstructionSelector::VisitCreateInt32x4(Node* node) {
+  X64OperandGenerator g(this);
+  Emit(kX64Int32x4Create, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
+}
+
+void InstructionSelector::VisitInt32x4ExtractLane(Node* node) {
+  X64OperandGenerator g(this);
+  Emit(kX64Int32x4ExtractLane, g.DefineAsRegister(node),
+       g.UseRegister(node->InputAt(0)), g.UseImmediate(node->InputAt(1)));
+}
+
 // static
 MachineOperatorBuilder::Flags
 InstructionSelector::SupportedMachineOperatorFlags() {
diff --git a/src/compiler/x87/code-generator-x87.cc b/src/compiler/x87/code-generator-x87.cc
index 29e2dd7..f5e6634 100644
--- a/src/compiler/x87/code-generator-x87.cc
+++ b/src/compiler/x87/code-generator-x87.cc
@@ -4,7 +4,7 @@
 
 #include "src/compiler/code-generator.h"
 
-#include "src/ast/scopes.h"
+#include "src/compilation-info.h"
 #include "src/compiler/code-generator-impl.h"
 #include "src/compiler/gap-resolver.h"
 #include "src/compiler/node-matchers.h"
@@ -715,9 +715,6 @@
     case kArchDebugBreak:
       __ int3();
       break;
-    case kArchImpossible:
-      __ Abort(kConversionFromImpossibleValue);
-      break;
     case kArchNop:
     case kArchThrowTerminator:
       // don't emit code for nops.
@@ -746,8 +743,8 @@
 
       Deoptimizer::BailoutType bailout_type =
           Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
-      CodeGenResult result =
-          AssembleDeoptimizerCall(deopt_state_id, bailout_type);
+      CodeGenResult result = AssembleDeoptimizerCall(
+          deopt_state_id, bailout_type, current_source_position_);
       if (result != kSuccess) return result;
       break;
     }
@@ -2241,13 +2238,14 @@
 }
 
 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
-    int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
+    int deoptimization_id, Deoptimizer::BailoutType bailout_type,
+    SourcePosition pos) {
   Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
       isolate(), deoptimization_id, bailout_type);
   if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
   DeoptimizeReason deoptimization_reason =
       GetDeoptimizationReason(deoptimization_id);
-  __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
+  __ RecordDeoptReason(deoptimization_reason, pos.raw(), deoptimization_id);
   __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
   return kSuccess;
 }
diff --git a/src/compiler/x87/instruction-selector-x87.cc b/src/compiler/x87/instruction-selector-x87.cc
index 0fe6a4b..757eee9 100644
--- a/src/compiler/x87/instruction-selector-x87.cc
+++ b/src/compiler/x87/instruction-selector-x87.cc
@@ -211,6 +211,10 @@
   Emit(code, 1, outputs, input_count, inputs);
 }
 
+void InstructionSelector::VisitProtectedLoad(Node* node) {
+  // TODO(eholk)
+  UNIMPLEMENTED();
+}
 
 void InstructionSelector::VisitStore(Node* node) {
   X87OperandGenerator g(this);
@@ -223,7 +227,7 @@
   MachineRepresentation rep = store_rep.representation();
 
   if (write_barrier_kind != kNoWriteBarrier) {
-    DCHECK_EQ(MachineRepresentation::kTagged, rep);
+    DCHECK(CanBeTaggedPointer(rep));
     AddressingMode addressing_mode;
     InstructionOperand inputs[3];
     size_t input_count = 0;
diff --git a/src/compiler/zone-pool.cc b/src/compiler/zone-pool.cc
index 13fec35..7681eeb 100644
--- a/src/compiler/zone-pool.cc
+++ b/src/compiler/zone-pool.cc
@@ -64,7 +64,7 @@
   }
 }
 
-ZonePool::ZonePool(base::AccountingAllocator* allocator)
+ZonePool::ZonePool(AccountingAllocator* allocator)
     : max_allocated_bytes_(0), total_deleted_bytes_(0), allocator_(allocator) {}
 
 ZonePool::~ZonePool() {
diff --git a/src/compiler/zone-pool.h b/src/compiler/zone-pool.h
index 44a649f..7a3fe75 100644
--- a/src/compiler/zone-pool.h
+++ b/src/compiler/zone-pool.h
@@ -9,7 +9,7 @@
 #include <set>
 #include <vector>
 
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -61,7 +61,7 @@
     DISALLOW_COPY_AND_ASSIGN(StatsScope);
   };
 
-  explicit ZonePool(base::AccountingAllocator* allocator);
+  explicit ZonePool(AccountingAllocator* allocator);
   ~ZonePool();
 
   size_t GetMaxAllocatedBytes();
@@ -82,7 +82,7 @@
   Stats stats_;
   size_t max_allocated_bytes_;
   size_t total_deleted_bytes_;
-  base::AccountingAllocator* allocator_;
+  AccountingAllocator* allocator_;
 
   DISALLOW_COPY_AND_ASSIGN(ZonePool);
 };
diff --git a/src/contexts.cc b/src/contexts.cc
index b3cf255..4fb3c83 100644
--- a/src/contexts.cc
+++ b/src/contexts.cc
@@ -57,15 +57,16 @@
 
 
 bool Context::is_declaration_context() {
-  if (IsFunctionContext() || IsNativeContext() || IsScriptContext()) {
+  if (IsFunctionContext() || IsNativeContext() || IsScriptContext() ||
+      IsModuleContext()) {
     return true;
   }
   if (!IsBlockContext()) return false;
   Object* ext = extension();
   // If we have the special extension, we immediately know it must be a
   // declaration scope. That's just a small performance shortcut.
-  return ext->IsSloppyBlockWithEvalContextExtension()
-      || ScopeInfo::cast(ext)->is_declaration_scope();
+  return ext->IsContextExtension() ||
+         ScopeInfo::cast(ext)->is_declaration_scope();
 }
 
 
@@ -93,36 +94,47 @@
   HeapObject* object = extension();
   if (object->IsTheHole(GetIsolate())) return nullptr;
   if (IsBlockContext()) {
-    if (!object->IsSloppyBlockWithEvalContextExtension()) return nullptr;
-    object = SloppyBlockWithEvalContextExtension::cast(object)->extension();
+    if (!object->IsContextExtension()) return nullptr;
+    object = JSObject::cast(ContextExtension::cast(object)->extension());
   }
   DCHECK(object->IsJSContextExtensionObject() ||
          (IsNativeContext() && object->IsJSGlobalObject()));
   return JSObject::cast(object);
 }
 
-
 JSReceiver* Context::extension_receiver() {
   DCHECK(IsNativeContext() || IsWithContext() ||
          IsFunctionContext() || IsBlockContext());
-  return IsWithContext() ? JSReceiver::cast(extension()) : extension_object();
+  return IsWithContext() ? JSReceiver::cast(
+                               ContextExtension::cast(extension())->extension())
+                         : extension_object();
 }
 
-
 ScopeInfo* Context::scope_info() {
-  DCHECK(IsModuleContext() || IsScriptContext() || IsBlockContext());
+  DCHECK(!IsNativeContext());
+  if (IsFunctionContext() || IsModuleContext()) {
+    return closure()->shared()->scope_info();
+  }
   HeapObject* object = extension();
-  if (object->IsSloppyBlockWithEvalContextExtension()) {
-    DCHECK(IsBlockContext());
-    object = SloppyBlockWithEvalContextExtension::cast(object)->scope_info();
+  if (object->IsContextExtension()) {
+    DCHECK(IsBlockContext() || IsCatchContext() || IsWithContext() ||
+           IsDebugEvaluateContext());
+    object = ContextExtension::cast(object)->scope_info();
   }
   return ScopeInfo::cast(object);
 }
 
+Module* Context::module() {
+  Context* current = this;
+  while (!current->IsModuleContext()) {
+    current = current->previous();
+  }
+  return Module::cast(current->extension());
+}
 
 String* Context::catch_name() {
   DCHECK(IsCatchContext());
-  return String::cast(extension());
+  return String::cast(ContextExtension::cast(extension())->extension());
 }
 
 
@@ -178,13 +190,14 @@
 
 static PropertyAttributes GetAttributesForMode(VariableMode mode) {
   DCHECK(IsDeclaredVariableMode(mode));
-  return IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
+  return mode == CONST ? READ_ONLY : NONE;
 }
 
 Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
                                int* index, PropertyAttributes* attributes,
                                InitializationFlag* init_flag,
                                VariableMode* variable_mode) {
+  DCHECK(!IsModuleContext());
   Isolate* isolate = GetIsolate();
   Handle<Context> context(this, isolate);
 
@@ -248,8 +261,14 @@
           object->IsJSContextExtensionObject()) {
         maybe = JSReceiver::GetOwnPropertyAttributes(object, name);
       } else if (context->IsWithContext()) {
-        // A with context will never bind "this".
-        if (name->Equals(*isolate->factory()->this_string())) {
+        // A with context will never bind "this", but debug-eval may look into
+        // a with context when resolving "this". Other synthetic variables such
+        // as new.target may be resolved as DYNAMIC_LOCAL due to bug v8:5405 ,
+        // skipping them here serves as a workaround until a more thorough
+        // fix can be applied.
+        // TODO(v8:5405): Replace this check with a DCHECK when resolution of
+        // of synthetic variables does not go through this code path.
+        if (ScopeInfo::VariableIsSynthetic(*name)) {
           maybe = Just(ABSENT);
         } else {
           LookupIterator it(object, name, object);
@@ -307,10 +326,11 @@
       }
 
       // Check the slot corresponding to the intermediate context holding
-      // only the function name variable.
-      if (follow_context_chain && context->IsFunctionContext()) {
-        VariableMode mode;
-        int function_index = scope_info->FunctionContextSlotIndex(*name, &mode);
+      // only the function name variable. It's conceptually (and spec-wise)
+      // in an outer scope of the function's declaration scope.
+      if (follow_context_chain && (flags & STOP_AT_DECLARATION_SCOPE) == 0 &&
+          context->IsFunctionContext()) {
+        int function_index = scope_info->FunctionContextSlotIndex(*name);
         if (function_index >= 0) {
           if (FLAG_trace_contexts) {
             PrintF("=> found intermediate function in context slot %d\n",
@@ -318,9 +338,8 @@
           }
           *index = function_index;
           *attributes = READ_ONLY;
-          DCHECK(mode == CONST_LEGACY || mode == CONST);
           *init_flag = kCreatedInitialized;
-          *variable_mode = mode;
+          *variable_mode = CONST;
           return context;
         }
       }
@@ -339,18 +358,21 @@
       }
     } else if (context->IsDebugEvaluateContext()) {
       // Check materialized locals.
-      Object* obj = context->get(EXTENSION_INDEX);
-      if (obj->IsJSReceiver()) {
-        Handle<JSReceiver> extension(JSReceiver::cast(obj));
-        LookupIterator it(extension, name, extension);
-        Maybe<bool> found = JSReceiver::HasProperty(&it);
-        if (found.FromMaybe(false)) {
-          *attributes = NONE;
-          return extension;
+      Object* ext = context->get(EXTENSION_INDEX);
+      if (ext->IsContextExtension()) {
+        Object* obj = ContextExtension::cast(ext)->extension();
+        if (obj->IsJSReceiver()) {
+          Handle<JSReceiver> extension(JSReceiver::cast(obj));
+          LookupIterator it(extension, name, extension);
+          Maybe<bool> found = JSReceiver::HasProperty(&it);
+          if (found.FromMaybe(false)) {
+            *attributes = NONE;
+            return extension;
+          }
         }
       }
       // Check the original context, but do not follow its context chain.
-      obj = context->get(WRAPPED_CONTEXT_INDEX);
+      Object* obj = context->get(WRAPPED_CONTEXT_INDEX);
       if (obj->IsContext()) {
         Handle<Object> result =
             Context::cast(obj)->Lookup(name, DONT_FOLLOW_CHAINS, index,
@@ -387,25 +409,6 @@
 }
 
 
-void Context::InitializeGlobalSlots() {
-  DCHECK(IsScriptContext());
-  DisallowHeapAllocation no_gc;
-
-  ScopeInfo* scope_info = this->scope_info();
-
-  int context_globals = scope_info->ContextGlobalCount();
-  if (context_globals > 0) {
-    PropertyCell* empty_cell = GetHeap()->empty_property_cell();
-
-    int context_locals = scope_info->ContextLocalCount();
-    int index = Context::MIN_CONTEXT_SLOTS + context_locals;
-    for (int i = 0; i < context_globals; i++) {
-      set(index++, empty_cell);
-    }
-  }
-}
-
-
 void Context::AddOptimizedFunction(JSFunction* function) {
   DCHECK(IsNativeContext());
   Isolate* isolate = GetIsolate();
@@ -544,6 +547,17 @@
 
 #undef COMPARE_NAME
 
+#define COMPARE_NAME(index, type, name) \
+  if (strncmp(string, #name, length) == 0) return index;
+
+int Context::IntrinsicIndexForName(const unsigned char* unsigned_string,
+                                   int length) {
+  const char* string = reinterpret_cast<const char*>(unsigned_string);
+  NATIVE_CONTEXT_INTRINSIC_FUNCTIONS(COMPARE_NAME);
+  return kNotFound;
+}
+
+#undef COMPARE_NAME
 
 #ifdef DEBUG
 
diff --git a/src/contexts.h b/src/contexts.h
index d73135f..b927d05 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -36,6 +36,7 @@
 
 #define NATIVE_CONTEXT_INTRINSIC_FUNCTIONS(V)                           \
   V(IS_ARRAYLIKE, JSFunction, is_arraylike)                             \
+  V(GENERATOR_NEXT_INTERNAL, JSFunction, generator_next_internal)       \
   V(GET_TEMPLATE_CALL_SITE_INDEX, JSFunction, get_template_call_site)   \
   V(MAKE_ERROR_INDEX, JSFunction, make_error)                           \
   V(MAKE_RANGE_ERROR_INDEX, JSFunction, make_range_error)               \
@@ -59,44 +60,53 @@
   V(MATH_FLOOR_INDEX, JSFunction, math_floor)                           \
   V(MATH_POW_INDEX, JSFunction, math_pow)
 
-#define NATIVE_CONTEXT_IMPORTED_FIELDS(V)                                   \
-  V(ARRAY_CONCAT_INDEX, JSFunction, array_concat)                           \
-  V(ARRAY_POP_INDEX, JSFunction, array_pop)                                 \
-  V(ARRAY_PUSH_INDEX, JSFunction, array_push)                               \
-  V(ARRAY_SHIFT_INDEX, JSFunction, array_shift)                             \
-  V(ARRAY_SPLICE_INDEX, JSFunction, array_splice)                           \
-  V(ARRAY_SLICE_INDEX, JSFunction, array_slice)                             \
-  V(ARRAY_UNSHIFT_INDEX, JSFunction, array_unshift)                         \
-  V(ARRAY_VALUES_ITERATOR_INDEX, JSFunction, array_values_iterator)         \
-  V(ASYNC_FUNCTION_AWAIT_INDEX, JSFunction, async_function_await)           \
-  V(DERIVED_GET_TRAP_INDEX, JSFunction, derived_get_trap)                   \
-  V(ERROR_FUNCTION_INDEX, JSFunction, error_function)                       \
-  V(ERROR_TO_STRING, JSFunction, error_to_string)                           \
-  V(EVAL_ERROR_FUNCTION_INDEX, JSFunction, eval_error_function)             \
-  V(GLOBAL_EVAL_FUN_INDEX, JSFunction, global_eval_fun)                     \
-  V(MAP_DELETE_METHOD_INDEX, JSFunction, map_delete)                        \
-  V(MAP_GET_METHOD_INDEX, JSFunction, map_get)                              \
-  V(MAP_HAS_METHOD_INDEX, JSFunction, map_has)                              \
-  V(MAP_SET_METHOD_INDEX, JSFunction, map_set)                              \
-  V(OBJECT_VALUE_OF, JSFunction, object_value_of)                           \
-  V(OBJECT_TO_STRING, JSFunction, object_to_string)                         \
-  V(PROMISE_CATCH_INDEX, JSFunction, promise_catch)                         \
-  V(PROMISE_CREATE_INDEX, JSFunction, promise_create)                       \
-  V(PROMISE_FUNCTION_INDEX, JSFunction, promise_function)                   \
-  V(PROMISE_HAS_USER_DEFINED_REJECT_HANDLER_INDEX, JSFunction,              \
-    promise_has_user_defined_reject_handler)                                \
-  V(PROMISE_REJECT_INDEX, JSFunction, promise_reject)                       \
-  V(PROMISE_RESOLVE_INDEX, JSFunction, promise_resolve)                     \
-  V(PROMISE_CREATE_RESOLVED_INDEX, JSFunction, promise_create_resolved)     \
-  V(PROMISE_CREATE_REJECTED_INDEX, JSFunction, promise_create_rejected)     \
-  V(PROMISE_THEN_INDEX, JSFunction, promise_then)                           \
-  V(RANGE_ERROR_FUNCTION_INDEX, JSFunction, range_error_function)           \
-  V(REFERENCE_ERROR_FUNCTION_INDEX, JSFunction, reference_error_function)   \
-  V(SET_ADD_METHOD_INDEX, JSFunction, set_add)                              \
-  V(SET_DELETE_METHOD_INDEX, JSFunction, set_delete)                        \
-  V(SET_HAS_METHOD_INDEX, JSFunction, set_has)                              \
-  V(SYNTAX_ERROR_FUNCTION_INDEX, JSFunction, syntax_error_function)         \
-  V(TYPE_ERROR_FUNCTION_INDEX, JSFunction, type_error_function)             \
+#define NATIVE_CONTEXT_IMPORTED_FIELDS(V)                                 \
+  V(ARRAY_CONCAT_INDEX, JSFunction, array_concat)                         \
+  V(ARRAY_POP_INDEX, JSFunction, array_pop)                               \
+  V(ARRAY_PUSH_INDEX, JSFunction, array_push)                             \
+  V(ARRAY_SHIFT_INDEX, JSFunction, array_shift)                           \
+  V(ARRAY_SPLICE_INDEX, JSFunction, array_splice)                         \
+  V(ARRAY_SLICE_INDEX, JSFunction, array_slice)                           \
+  V(ARRAY_UNSHIFT_INDEX, JSFunction, array_unshift)                       \
+  V(ARRAY_VALUES_ITERATOR_INDEX, JSFunction, array_values_iterator)       \
+  V(ASYNC_FUNCTION_AWAIT_CAUGHT_INDEX, JSFunction,                        \
+    async_function_await_caught)                                          \
+  V(ASYNC_FUNCTION_AWAIT_UNCAUGHT_INDEX, JSFunction,                      \
+    async_function_await_uncaught)                                        \
+  V(ASYNC_FUNCTION_PROMISE_CREATE_INDEX, JSFunction,                      \
+    async_function_promise_create)                                        \
+  V(ASYNC_FUNCTION_PROMISE_RELEASE_INDEX, JSFunction,                     \
+    async_function_promise_release)                                       \
+  V(DERIVED_GET_TRAP_INDEX, JSFunction, derived_get_trap)                 \
+  V(ERROR_FUNCTION_INDEX, JSFunction, error_function)                     \
+  V(ERROR_TO_STRING, JSFunction, error_to_string)                         \
+  V(EVAL_ERROR_FUNCTION_INDEX, JSFunction, eval_error_function)           \
+  V(GLOBAL_EVAL_FUN_INDEX, JSFunction, global_eval_fun)                   \
+  V(MAP_DELETE_METHOD_INDEX, JSFunction, map_delete)                      \
+  V(MAP_GET_METHOD_INDEX, JSFunction, map_get)                            \
+  V(MAP_HAS_METHOD_INDEX, JSFunction, map_has)                            \
+  V(MAP_SET_METHOD_INDEX, JSFunction, map_set)                            \
+  V(FUNCTION_HAS_INSTANCE_INDEX, JSFunction, function_has_instance)       \
+  V(OBJECT_VALUE_OF, JSFunction, object_value_of)                         \
+  V(OBJECT_TO_STRING, JSFunction, object_to_string)                       \
+  V(PROMISE_CATCH_INDEX, JSFunction, promise_catch)                       \
+  V(PROMISE_CREATE_INDEX, JSFunction, promise_create)                     \
+  V(PROMISE_FUNCTION_INDEX, JSFunction, promise_function)                 \
+  V(PROMISE_HAS_USER_DEFINED_REJECT_HANDLER_INDEX, JSFunction,            \
+    promise_has_user_defined_reject_handler)                              \
+  V(PROMISE_REJECT_INDEX, JSFunction, promise_reject)                     \
+  V(PROMISE_RESOLVE_INDEX, JSFunction, promise_resolve)                   \
+  V(PROMISE_THEN_INDEX, JSFunction, promise_then)                         \
+  V(RANGE_ERROR_FUNCTION_INDEX, JSFunction, range_error_function)         \
+  V(REGEXP_LAST_MATCH_INFO_INDEX, JSObject, regexp_last_match_info)       \
+  V(REJECT_PROMISE_NO_DEBUG_EVENT_INDEX, JSFunction,                      \
+    reject_promise_no_debug_event)                                        \
+  V(REFERENCE_ERROR_FUNCTION_INDEX, JSFunction, reference_error_function) \
+  V(SET_ADD_METHOD_INDEX, JSFunction, set_add)                            \
+  V(SET_DELETE_METHOD_INDEX, JSFunction, set_delete)                      \
+  V(SET_HAS_METHOD_INDEX, JSFunction, set_has)                            \
+  V(SYNTAX_ERROR_FUNCTION_INDEX, JSFunction, syntax_error_function)       \
+  V(TYPE_ERROR_FUNCTION_INDEX, JSFunction, type_error_function)           \
   V(URI_ERROR_FUNCTION_INDEX, JSFunction, uri_error_function)
 
 #define NATIVE_CONTEXT_FIELDS(V)                                               \
@@ -145,6 +155,7 @@
   V(GENERATOR_OBJECT_PROTOTYPE_MAP_INDEX, Map, generator_object_prototype_map) \
   V(INITIAL_ARRAY_PROTOTYPE_INDEX, JSObject, initial_array_prototype)          \
   V(INITIAL_GENERATOR_PROTOTYPE_INDEX, JSObject, initial_generator_prototype)  \
+  V(INITIAL_ITERATOR_PROTOTYPE_INDEX, JSObject, initial_iterator_prototype)    \
   V(INITIAL_OBJECT_PROTOTYPE_INDEX, JSObject, initial_object_prototype)        \
   V(INT16_ARRAY_FUN_INDEX, JSFunction, int16_array_fun)                        \
   V(INT16X8_FUNCTION_INDEX, JSFunction, int16x8_function)                      \
@@ -204,7 +215,11 @@
   V(WASM_FUNCTION_MAP_INDEX, Map, wasm_function_map)                           \
   V(WASM_MODULE_CONSTRUCTOR_INDEX, JSFunction, wasm_module_constructor)        \
   V(WASM_INSTANCE_CONSTRUCTOR_INDEX, JSFunction, wasm_instance_constructor)    \
+  V(WASM_TABLE_CONSTRUCTOR_INDEX, JSFunction, wasm_table_constructor)          \
+  V(WASM_MEMORY_CONSTRUCTOR_INDEX, JSFunction, wasm_memory_constructor)        \
   V(WASM_MODULE_SYM_INDEX, Symbol, wasm_module_sym)                            \
+  V(WASM_TABLE_SYM_INDEX, Symbol, wasm_table_sym)                              \
+  V(WASM_MEMORY_SYM_INDEX, Symbol, wasm_memory_sym)                            \
   V(WASM_INSTANCE_SYM_INDEX, Symbol, wasm_instance_sym)                        \
   V(SLOPPY_ASYNC_FUNCTION_MAP_INDEX, Map, sloppy_async_function_map)           \
   V(SLOPPY_GENERATOR_FUNCTION_MAP_INDEX, Map, sloppy_generator_function_map)   \
@@ -227,6 +242,7 @@
   V(UINT8_ARRAY_FUN_INDEX, JSFunction, uint8_array_fun)                        \
   V(UINT8_CLAMPED_ARRAY_FUN_INDEX, JSFunction, uint8_clamped_array_fun)        \
   V(UINT8X16_FUNCTION_INDEX, JSFunction, uint8x16_function)                    \
+  V(CURRENT_MODULE_INDEX, Module, current_module)                              \
   NATIVE_CONTEXT_INTRINSIC_FUNCTIONS(V)                                        \
   NATIVE_CONTEXT_IMPORTED_FIELDS(V)
 
@@ -298,18 +314,29 @@
 //
 // [ previous  ]  A pointer to the previous context.
 //
-// [ extension ]  A pointer to an extension JSObject, or "the hole". Used to
-//                implement 'with' statements and dynamic declarations
-//                (through 'eval'). The object in a 'with' statement is
-//                stored in the extension slot of a 'with' context.
-//                Dynamically declared variables/functions are also added
-//                to lazily allocated extension object. Context::Lookup
-//                searches the extension object for properties.
-//                For script and block contexts, contains the respective
-//                ScopeInfo. For block contexts representing sloppy declaration
-//                block scopes, it may also be a struct being a
-//                SloppyBlockWithEvalContextExtension, pairing the ScopeInfo
-//                with an extension object.
+// [ extension ]  Additional data.
+//
+//                For script contexts, it contains the respective ScopeInfo.
+//
+//                For catch contexts, it contains a ContextExtension object
+//                consisting of the ScopeInfo and the name of the catch
+//                variable.
+//
+//                For module contexts, it contains the module object.
+//
+//                For block contexts, it contains either the respective
+//                ScopeInfo or a ContextExtension object consisting of the
+//                ScopeInfo and an "extension object" (see below).
+//
+//                For with contexts, it contains a ContextExtension object
+//                consisting of the ScopeInfo and an "extension object".
+//
+//                An "extension object" is used to dynamically extend a context
+//                with additional variables, namely in the implementation of the
+//                'with' construct and the 'eval' construct.  For instance,
+//                Context::Lookup also searches the extension object for
+//                properties.  (Storing the extension object is the original
+//                purpose of this context slot, hence the name.)
 //
 // [ native_context ]  A pointer to the native context.
 //
@@ -387,6 +414,10 @@
   ScopeInfo* scope_info();
   String* catch_name();
 
+  // Find the module context (assuming there is one) and return the associated
+  // module object.
+  Module* module();
+
   // Get the context where var declarations will be hoisted to, which
   // may be the context itself.
   Context* declaration_context();
@@ -400,7 +431,7 @@
   void set_global_proxy(JSObject* global);
 
   // Get the JSGlobalObject object.
-  JSGlobalObject* global_object();
+  V8_EXPORT_PRIVATE JSGlobalObject* global_object();
 
   // Get the script context by traversing the context chain.
   Context* script_context();
@@ -423,9 +454,6 @@
 
   inline bool HasSameSecurityTokenAs(Context* that);
 
-  // Initializes global variable bindings in given script context.
-  void InitializeGlobalSlots();
-
   // A native context holds a list of all functions with optimized code.
   void AddOptimizedFunction(JSFunction* function);
   void RemoveOptimizedFunction(JSFunction* function);
@@ -444,6 +472,7 @@
 
   static int ImportedFieldIndexForName(Handle<String> name);
   static int IntrinsicIndexForName(Handle<String> name);
+  static int IntrinsicIndexForName(const unsigned char* name, int length);
 
 #define NATIVE_CONTEXT_FIELD_ACCESSORS(index, type, name) \
   inline void set_##name(type* value);                    \
@@ -525,7 +554,8 @@
  private:
 #ifdef DEBUG
   // Bootstrapping-aware type checks.
-  static bool IsBootstrappingOrNativeContext(Isolate* isolate, Object* object);
+  V8_EXPORT_PRIVATE static bool IsBootstrappingOrNativeContext(Isolate* isolate,
+                                                               Object* object);
   static bool IsBootstrappingOrValidParentContext(Object* object, Context* kid);
 #endif
 
diff --git a/src/counters-inl.h b/src/counters-inl.h
index c8c06d2..303e5e3 100644
--- a/src/counters-inl.h
+++ b/src/counters-inl.h
@@ -11,10 +11,18 @@
 namespace internal {
 
 RuntimeCallTimerScope::RuntimeCallTimerScope(
+    Isolate* isolate, RuntimeCallStats::CounterId counter_id) {
+  if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
+                  FLAG_runtime_call_stats)) {
+    Initialize(isolate, counter_id);
+  }
+}
+
+RuntimeCallTimerScope::RuntimeCallTimerScope(
     HeapObject* heap_object, RuntimeCallStats::CounterId counter_id) {
-  if (V8_UNLIKELY(FLAG_runtime_call_stats)) {
-    isolate_ = heap_object->GetIsolate();
-    RuntimeCallStats::Enter(isolate_, &timer_, counter_id);
+  if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
+                  FLAG_runtime_call_stats)) {
+    Initialize(heap_object->GetIsolate(), counter_id);
   }
 }
 
diff --git a/src/counters.cc b/src/counters.cc
index 8a5908c..c4e8646 100644
--- a/src/counters.cc
+++ b/src/counters.cc
@@ -282,18 +282,15 @@
 }
 
 // static
-void RuntimeCallStats::Enter(Isolate* isolate, RuntimeCallTimer* timer,
+void RuntimeCallStats::Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer,
                              CounterId counter_id) {
-  RuntimeCallStats* stats = isolate->counters()->runtime_call_stats();
   RuntimeCallCounter* counter = &(stats->*counter_id);
   timer->Start(counter, stats->current_timer_);
   stats->current_timer_ = timer;
 }
 
 // static
-void RuntimeCallStats::Leave(Isolate* isolate, RuntimeCallTimer* timer) {
-  RuntimeCallStats* stats = isolate->counters()->runtime_call_stats();
-
+void RuntimeCallStats::Leave(RuntimeCallStats* stats, RuntimeCallTimer* timer) {
   if (stats->current_timer_ == timer) {
     stats->current_timer_ = timer->Stop();
   } else {
@@ -307,9 +304,8 @@
 }
 
 // static
-void RuntimeCallStats::CorrectCurrentCounterId(Isolate* isolate,
+void RuntimeCallStats::CorrectCurrentCounterId(RuntimeCallStats* stats,
                                                CounterId counter_id) {
-  RuntimeCallStats* stats = isolate->counters()->runtime_call_stats();
   DCHECK_NOT_NULL(stats->current_timer_);
   RuntimeCallCounter* counter = &(stats->*counter_id);
   stats->current_timer_->counter_ = counter;
@@ -342,7 +338,9 @@
 }
 
 void RuntimeCallStats::Reset() {
-  if (!FLAG_runtime_call_stats) return;
+  if (!FLAG_runtime_call_stats &&
+      !TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())
+    return;
 #define RESET_COUNTER(name) this->name.Reset();
   FOR_EACH_MANUAL_COUNTER(RESET_COUNTER)
 #undef RESET_COUNTER
@@ -362,6 +360,41 @@
 #define RESET_COUNTER(name) this->Handler_##name.Reset();
   FOR_EACH_HANDLER_COUNTER(RESET_COUNTER)
 #undef RESET_COUNTER
+
+  in_use_ = true;
+}
+
+std::string RuntimeCallStats::Dump() {
+  buffer_.str(std::string());
+  buffer_.clear();
+  buffer_ << "{";
+#define DUMP_COUNTER(name) \
+  if (this->name.count > 0) this->name.Dump(buffer_);
+  FOR_EACH_MANUAL_COUNTER(DUMP_COUNTER)
+#undef DUMP_COUNTER
+
+#define DUMP_COUNTER(name, nargs, result_size) \
+  if (this->Runtime_##name.count > 0) this->Runtime_##name.Dump(buffer_);
+  FOR_EACH_INTRINSIC(DUMP_COUNTER)
+#undef DUMP_COUNTER
+
+#define DUMP_COUNTER(name) \
+  if (this->Builtin_##name.count > 0) this->Builtin_##name.Dump(buffer_);
+  BUILTIN_LIST_C(DUMP_COUNTER)
+#undef DUMP_COUNTER
+
+#define DUMP_COUNTER(name) \
+  if (this->API_##name.count > 0) this->API_##name.Dump(buffer_);
+  FOR_EACH_API_COUNTER(DUMP_COUNTER)
+#undef DUMP_COUNTER
+
+#define DUMP_COUNTER(name) \
+  if (this->Handler_##name.count > 0) this->Handler_##name.Dump(buffer_);
+  FOR_EACH_HANDLER_COUNTER(DUMP_COUNTER)
+#undef DUMP_COUNTER
+  buffer_ << "\"END\":[]}";
+  in_use_ = false;
+  return buffer_.str();
 }
 
 }  // namespace internal
diff --git a/src/counters.h b/src/counters.h
index 59627f1..707ae9f 100644
--- a/src/counters.h
+++ b/src/counters.h
@@ -11,8 +11,10 @@
 #include "src/base/platform/time.h"
 #include "src/builtins/builtins.h"
 #include "src/globals.h"
+#include "src/isolate.h"
 #include "src/objects.h"
 #include "src/runtime/runtime.h"
+#include "src/tracing/trace-event.h"
 
 namespace v8 {
 namespace internal {
@@ -566,12 +568,15 @@
   V(Message_GetLineNumber)                                 \
   V(Message_GetSourceLine)                                 \
   V(Message_GetStartColumn)                                \
+  V(Module_Evaluate)                                       \
+  V(Module_Instantiate)                                    \
   V(NumberObject_New)                                      \
   V(NumberObject_NumberValue)                              \
   V(Object_CallAsConstructor)                              \
   V(Object_CallAsFunction)                                 \
   V(Object_CreateDataProperty)                             \
   V(Object_DefineOwnProperty)                              \
+  V(Object_DefineProperty)                                 \
   V(Object_Delete)                                         \
   V(Object_DeleteProperty)                                 \
   V(Object_ForceSet)                                       \
@@ -657,7 +662,10 @@
   V(UnboundScript_GetName)                                 \
   V(UnboundScript_GetSourceMappingURL)                     \
   V(UnboundScript_GetSourceURL)                            \
-  V(Value_TypeOf)
+  V(Value_TypeOf)                                          \
+  V(ValueDeserializer_ReadHeader)                          \
+  V(ValueDeserializer_ReadValue)                           \
+  V(ValueSerializer_WriteValue)
 
 #define FOR_EACH_MANUAL_COUNTER(V)                  \
   V(AccessorGetterCallback)                         \
@@ -674,13 +682,18 @@
   V(DeoptimizeCode)                                 \
   V(FunctionCallback)                               \
   V(GC)                                             \
+  V(GenericNamedPropertyDefinerCallback)            \
   V(GenericNamedPropertyDeleterCallback)            \
+  V(GenericNamedPropertyDescriptorCallback)         \
   V(GenericNamedPropertyQueryCallback)              \
   V(GenericNamedPropertySetterCallback)             \
+  V(IndexedPropertyDefinerCallback)                 \
   V(IndexedPropertyDeleterCallback)                 \
+  V(IndexedPropertyDescriptorCallback)              \
   V(IndexedPropertyGetterCallback)                  \
   V(IndexedPropertyQueryCallback)                   \
   V(IndexedPropertySetterCallback)                  \
+  V(InvokeApiInterruptCallbacks)                    \
   V(InvokeFunctionCallback)                         \
   V(JS_Execution)                                   \
   V(Map_SetPrototype)                               \
@@ -765,67 +778,52 @@
 
   // Starting measuring the time for a function. This will establish the
   // connection to the parent counter for properly calculating the own times.
-  static void Enter(Isolate* isolate, RuntimeCallTimer* timer,
+  static void Enter(RuntimeCallStats* stats, RuntimeCallTimer* timer,
                     CounterId counter_id);
 
   // Leave a scope for a measured runtime function. This will properly add
   // the time delta to the current_counter and subtract the delta from its
   // parent.
-  static void Leave(Isolate* isolate, RuntimeCallTimer* timer);
+  static void Leave(RuntimeCallStats* stats, RuntimeCallTimer* timer);
 
   // Set counter id for the innermost measurement. It can be used to refine
   // event kind when a runtime entry counter is too generic.
-  static void CorrectCurrentCounterId(Isolate* isolate, CounterId counter_id);
+  static void CorrectCurrentCounterId(RuntimeCallStats* stats,
+                                      CounterId counter_id);
 
   void Reset();
-  void Print(std::ostream& os);
+  V8_NOINLINE void Print(std::ostream& os);
+  V8_NOINLINE std::string Dump();
 
-  RuntimeCallStats() { Reset(); }
+  RuntimeCallStats() {
+    Reset();
+    in_use_ = false;
+  }
+
   RuntimeCallTimer* current_timer() { return current_timer_; }
+  bool InUse() { return in_use_; }
 
  private:
+  std::stringstream buffer_;
   // Counter to track recursive time events.
   RuntimeCallTimer* current_timer_ = NULL;
+  // Used to track nested tracing scopes.
+  bool in_use_;
 };
 
-#define TRACE_RUNTIME_CALL_STATS(isolate, counter_name) \
-  do {                                                  \
-    if (FLAG_runtime_call_stats) {                      \
-      RuntimeCallStats::CorrectCurrentCounterId(        \
-          isolate, &RuntimeCallStats::counter_name);    \
-    }                                                   \
+#define TRACE_RUNTIME_CALL_STATS(isolate, counter_name)                 \
+  do {                                                                  \
+    if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() || \
+                    FLAG_runtime_call_stats)) {                         \
+      RuntimeCallStats::CorrectCurrentCounterId(                        \
+          isolate->counters()->runtime_call_stats(),                    \
+          &RuntimeCallStats::counter_name);                             \
+    }                                                                   \
   } while (false)
 
 #define TRACE_HANDLER_STATS(isolate, counter_name) \
   TRACE_RUNTIME_CALL_STATS(isolate, Handler_##counter_name)
 
-// A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
-// the time of C++ scope.
-class RuntimeCallTimerScope {
- public:
-  inline RuntimeCallTimerScope(Isolate* isolate,
-                               RuntimeCallStats::CounterId counter_id) {
-    if (V8_UNLIKELY(FLAG_runtime_call_stats)) {
-      isolate_ = isolate;
-      RuntimeCallStats::Enter(isolate_, &timer_, counter_id);
-    }
-  }
-  // This constructor is here just to avoid calling GetIsolate() when the
-  // stats are disabled and the isolate is not directly available.
-  inline RuntimeCallTimerScope(HeapObject* heap_object,
-                               RuntimeCallStats::CounterId counter_id);
-
-  inline ~RuntimeCallTimerScope() {
-    if (V8_UNLIKELY(FLAG_runtime_call_stats)) {
-      RuntimeCallStats::Leave(isolate_, &timer_);
-    }
-  }
-
- private:
-  Isolate* isolate_;
-  RuntimeCallTimer timer_;
-};
-
 #define HISTOGRAM_RANGE_LIST(HR)                                              \
   /* Generic range histograms */                                              \
   HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21)        \
@@ -836,6 +834,9 @@
   HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6)             \
   HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20)        \
   HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7)                      \
+  HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 21, 22)    \
+  HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 21, 22)                  \
+  HR(scavenge_reason, V8.GCScavengeReason, 0, 21, 22)                         \
   /* Asm/Wasm. */                                                             \
   HR(wasm_functions_per_module, V8.WasmFunctionsPerModule, 1, 10000, 51)
 
@@ -1238,6 +1239,36 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
 };
 
+// A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
+// the time of C++ scope.
+class RuntimeCallTimerScope {
+ public:
+  inline RuntimeCallTimerScope(Isolate* isolate,
+                               RuntimeCallStats::CounterId counter_id);
+  // This constructor is here just to avoid calling GetIsolate() when the
+  // stats are disabled and the isolate is not directly available.
+  inline RuntimeCallTimerScope(HeapObject* heap_object,
+                               RuntimeCallStats::CounterId counter_id);
+
+  inline ~RuntimeCallTimerScope() {
+    if (V8_UNLIKELY(isolate_ != nullptr)) {
+      RuntimeCallStats::Leave(isolate_->counters()->runtime_call_stats(),
+                              &timer_);
+    }
+  }
+
+ private:
+  V8_INLINE void Initialize(Isolate* isolate,
+                            RuntimeCallStats::CounterId counter_id) {
+    isolate_ = isolate;
+    RuntimeCallStats::Enter(isolate_->counters()->runtime_call_stats(), &timer_,
+                            counter_id);
+  }
+
+  Isolate* isolate_ = nullptr;
+  RuntimeCallTimer timer_;
+};
+
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/crankshaft/arm/lithium-arm.cc b/src/crankshaft/arm/lithium-arm.cc
index 324dcfe..8c4b735 100644
--- a/src/crankshaft/arm/lithium-arm.cc
+++ b/src/crankshaft/arm/lithium-arm.cc
@@ -304,15 +304,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -345,15 +336,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -877,7 +859,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -1019,6 +1001,9 @@
 LInstruction* LChunkBuilder::DoCallWithDescriptor(
     HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
 
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
@@ -1027,15 +1012,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), cp);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
       descriptor, ops, zone());
@@ -2180,26 +2170,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result =
-      new (zone()) LStoreKeyedGeneric(context, obj, key, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2276,20 +2246,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, obj, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), cp);
   LOperand* left = UseFixed(instr->left(), r1);
diff --git a/src/crankshaft/arm/lithium-arm.h b/src/crankshaft/arm/lithium-arm.h
index 80fbe81..abdfbdd 100644
--- a/src/crankshaft/arm/lithium-arm.h
+++ b/src/crankshaft/arm/lithium-arm.h
@@ -132,9 +132,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -2005,33 +2003,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* object, LOperand* key, LOperand* value,
@@ -2068,34 +2039,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 1> {
  public:
   LTransitionElementsKind(LOperand* object,
diff --git a/src/crankshaft/arm/lithium-codegen-arm.cc b/src/crankshaft/arm/lithium-codegen-arm.cc
index ee3e54b..f2cc4b4 100644
--- a/src/crankshaft/arm/lithium-codegen-arm.cc
+++ b/src/crankshaft/arm/lithium-codegen-arm.cc
@@ -152,7 +152,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info()->scope()->num_heap_slots() > 0) {
+  if (info()->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is in r1.
@@ -160,7 +160,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ push(r1);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2602,20 +2602,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ Move(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ mov(slot_register, Operand(Smi::FromInt(index)));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(cp));
   DCHECK(ToRegister(instr->result()).is(r0));
@@ -3860,21 +3846,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ mov(StoreDescriptor::NameRegister(), Operand(instr->name()));
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
-}
-
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Condition cc = instr->hydrogen()->allow_equality() ? hi : hs;
   if (instr->index()->IsConstantOperand()) {
@@ -4071,21 +4042,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr, NEVER_INLINE_TARGET_ADDRESS);
-}
-
-
 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
   class DeferredMaybeGrowElements final : public LDeferredCode {
    public:
@@ -5063,7 +5019,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -5165,7 +5121,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, scratch1, scratch2, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/arm/lithium-codegen-arm.h b/src/crankshaft/arm/lithium-codegen-arm.h
index 533f4c8..26b7fb5 100644
--- a/src/crankshaft/arm/lithium-codegen-arm.h
+++ b/src/crankshaft/arm/lithium-codegen-arm.h
@@ -311,8 +311,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
   Scope* const scope_;
diff --git a/src/crankshaft/arm64/lithium-arm64.cc b/src/crankshaft/arm64/lithium-arm64.cc
index 8067a6a..8a9ce42 100644
--- a/src/crankshaft/arm64/lithium-arm64.cc
+++ b/src/crankshaft/arm64/lithium-arm64.cc
@@ -252,15 +252,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LStoreNamedField::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   std::ostringstream os;
@@ -271,15 +262,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
   stream->Add("if string_compare(");
   left()->PrintTo(stream);
@@ -726,7 +708,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -981,6 +963,9 @@
 LInstruction* LChunkBuilder::DoCallWithDescriptor(
     HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
 
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
@@ -989,15 +974,30 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), cp);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  if (i < descriptor.GetParameterCount()) {
+    int argc = descriptor.GetParameterCount() - i;
+    AddInstruction(new (zone()) LPreparePushArguments(argc), instr);
+    LPushArguments* push_args = new (zone()) LPushArguments(zone());
+    for (; i < descriptor.GetParameterCount(); i++) {
+      if (push_args->ShouldSplitPush()) {
+        AddInstruction(push_args, instr);
+        push_args = new (zone()) LPushArguments(zone());
+      }
+      op = UseRegisterAtStart(instr->OperandAt(
+          i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+      push_args->AddArgument(op);
+    }
+    AddInstruction(push_args, instr);
+  }
 
   LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(descriptor,
                                                                 ops,
@@ -2209,26 +2209,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result = new (zone())
-      LStoreKeyedGeneric(context, object, key, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
   // TODO(jbramley): It might be beneficial to allow value to be a constant in
   // some cases. x64 makes use of this with FLAG_track_fields, for example.
@@ -2258,21 +2238,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, object, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), cp);
   LOperand* left = UseFixed(instr->left(), x1);
diff --git a/src/crankshaft/arm64/lithium-arm64.h b/src/crankshaft/arm64/lithium-arm64.h
index 782da09..9891f9e 100644
--- a/src/crankshaft/arm64/lithium-arm64.h
+++ b/src/crankshaft/arm64/lithium-arm64.h
@@ -143,9 +143,7 @@
   V(StoreKeyedExternal)                      \
   V(StoreKeyedFixed)                         \
   V(StoreKeyedFixedDouble)                   \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -2336,34 +2334,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreNamedField final : public LTemplateInstruction<0, 2, 2> {
  public:
   LStoreNamedField(LOperand* object, LOperand* value,
@@ -2390,33 +2360,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LMaybeGrowElements final : public LTemplateInstruction<1, 5, 0> {
  public:
   LMaybeGrowElements(LOperand* context, LOperand* object, LOperand* elements,
diff --git a/src/crankshaft/arm64/lithium-codegen-arm64.cc b/src/crankshaft/arm64/lithium-codegen-arm64.cc
index ce5813b..a4aa275 100644
--- a/src/crankshaft/arm64/lithium-codegen-arm64.cc
+++ b/src/crankshaft/arm64/lithium-codegen-arm64.cc
@@ -583,14 +583,14 @@
   Comment(";;; Prologue begin");
 
   // Allocate a local context if needed.
-  if (info()->scope()->num_heap_slots() > 0) {
+  if (info()->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is in x1.
     int slots = info()->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
-      __ Mov(x10, Operand(info()->scope()->GetScopeInfo(info()->isolate())));
+      __ Mov(x10, Operand(info()->scope()->scope_info()));
       __ Push(x1, x10);
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
@@ -1403,7 +1403,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, temp1, temp2, deferred->entry(), flags);
   } else {
     Register size = ToRegister32(instr->size());
@@ -1499,7 +1499,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, scratch1, scratch2, flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -1973,7 +1973,16 @@
     generator.AfterCall();
   }
 
-  RecordPushedArgumentsDelta(instr->hydrogen()->argument_delta());
+  HCallWithDescriptor* hinstr = instr->hydrogen();
+  RecordPushedArgumentsDelta(hinstr->argument_delta());
+
+  // HCallWithDescriptor instruction is translated to zero or more
+  // LPushArguments (they handle parameters passed on the stack) followed by
+  // a LCallWithDescriptor. Each LPushArguments instruction generated records
+  // the number of arguments pushed thus we need to offset them here.
+  // The |argument_delta()| used above "knows" only about JS parameters while
+  // we are dealing here with particular calling convention details.
+  RecordPushedArgumentsDelta(-hinstr->descriptor().GetStackParameterCount());
 }
 
 
@@ -3021,20 +3030,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ Mov(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ Mov(slot_register, Smi::FromInt(index));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(cp));
   DCHECK(ToRegister(instr->result()).Is(x0));
@@ -4933,21 +4928,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
   class DeferredMaybeGrowElements final : public LDeferredCode {
    public:
@@ -5131,21 +5111,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ Mov(StoreDescriptor::NameRegister(), Operand(instr->name()));
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoStringAdd(LStringAdd* instr) {
   DCHECK(ToRegister(instr->context()).is(cp));
   DCHECK(ToRegister(instr->left()).Is(x1));
diff --git a/src/crankshaft/arm64/lithium-codegen-arm64.h b/src/crankshaft/arm64/lithium-codegen-arm64.h
index 2fc6f96..ca04fa2 100644
--- a/src/crankshaft/arm64/lithium-codegen-arm64.h
+++ b/src/crankshaft/arm64/lithium-codegen-arm64.h
@@ -186,8 +186,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   // Emits optimized code for %_IsString(x).  Preserves input register.
   // Returns the condition on which a final split to
diff --git a/src/crankshaft/compilation-phase.h b/src/crankshaft/compilation-phase.h
index 99e24c7..8d6468d 100644
--- a/src/crankshaft/compilation-phase.h
+++ b/src/crankshaft/compilation-phase.h
@@ -6,8 +6,9 @@
 #define V8_CRANKSHAFT_COMPILATION_PHASE_H_
 
 #include "src/allocation.h"
-#include "src/compiler.h"
-#include "src/zone.h"
+#include "src/base/platform/elapsed-timer.h"
+#include "src/compilation-info.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/crankshaft/hydrogen-bce.cc b/src/crankshaft/hydrogen-bce.cc
index d00d8ce..7910c5b 100644
--- a/src/crankshaft/hydrogen-bce.cc
+++ b/src/crankshaft/hydrogen-bce.cc
@@ -307,24 +307,25 @@
   return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
 }
 
-
 BoundsCheckTable::BoundsCheckTable(Zone* zone)
-    : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
-                  ZoneAllocationPolicy(zone)) { }
-
+    : CustomMatcherZoneHashMap(BoundsCheckKeyMatch,
+                               ZoneHashMap::kDefaultHashMapCapacity,
+                               ZoneAllocationPolicy(zone)) {}
 
 BoundsCheckBbData** BoundsCheckTable::LookupOrInsert(BoundsCheckKey* key,
                                                      Zone* zone) {
   return reinterpret_cast<BoundsCheckBbData**>(
-      &(ZoneHashMap::LookupOrInsert(key, key->Hash(),
-                                    ZoneAllocationPolicy(zone))->value));
+      &(CustomMatcherZoneHashMap::LookupOrInsert(key, key->Hash(),
+                                                 ZoneAllocationPolicy(zone))
+            ->value));
 }
 
 
 void BoundsCheckTable::Insert(BoundsCheckKey* key,
                               BoundsCheckBbData* data,
                               Zone* zone) {
-  ZoneHashMap::LookupOrInsert(key, key->Hash(), ZoneAllocationPolicy(zone))
+  CustomMatcherZoneHashMap::LookupOrInsert(key, key->Hash(),
+                                           ZoneAllocationPolicy(zone))
       ->value = data;
 }
 
diff --git a/src/crankshaft/hydrogen-bce.h b/src/crankshaft/hydrogen-bce.h
index e819ffc..237fb95 100644
--- a/src/crankshaft/hydrogen-bce.h
+++ b/src/crankshaft/hydrogen-bce.h
@@ -13,7 +13,7 @@
 
 class BoundsCheckBbData;
 class BoundsCheckKey;
-class BoundsCheckTable : private ZoneHashMap {
+class BoundsCheckTable : private CustomMatcherZoneHashMap {
  public:
   explicit BoundsCheckTable(Zone* zone);
 
diff --git a/src/crankshaft/hydrogen-flow-engine.h b/src/crankshaft/hydrogen-flow-engine.h
index 3a488dd..149c99b 100644
--- a/src/crankshaft/hydrogen-flow-engine.h
+++ b/src/crankshaft/hydrogen-flow-engine.h
@@ -5,9 +5,9 @@
 #ifndef V8_CRANKSHAFT_HYDROGEN_FLOW_ENGINE_H_
 #define V8_CRANKSHAFT_HYDROGEN_FLOW_ENGINE_H_
 
-#include "src/crankshaft/hydrogen.h"
 #include "src/crankshaft/hydrogen-instructions.h"
-#include "src/zone.h"
+#include "src/crankshaft/hydrogen.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/crankshaft/hydrogen-gvn.h b/src/crankshaft/hydrogen-gvn.h
index 9a8d407..5f11737 100644
--- a/src/crankshaft/hydrogen-gvn.h
+++ b/src/crankshaft/hydrogen-gvn.h
@@ -7,9 +7,9 @@
 
 #include <iosfwd>
 
-#include "src/crankshaft/hydrogen.h"
 #include "src/crankshaft/hydrogen-instructions.h"
-#include "src/zone.h"
+#include "src/crankshaft/hydrogen.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/crankshaft/hydrogen-instructions.cc b/src/crankshaft/hydrogen-instructions.cc
index 9fed961..3a0aaa7 100644
--- a/src/crankshaft/hydrogen-instructions.cc
+++ b/src/crankshaft/hydrogen-instructions.cc
@@ -831,7 +831,6 @@
     case HValue::kStoreCodeEntry:
     case HValue::kStoreKeyed:
     case HValue::kStoreNamedField:
-    case HValue::kStoreNamedGeneric:
     case HValue::kStringCharCodeAt:
     case HValue::kStringCharFromCode:
     case HValue::kThisFunction:
@@ -881,7 +880,6 @@
     case HValue::kSimulate:
     case HValue::kStackCheck:
     case HValue::kStoreContextSlot:
-    case HValue::kStoreKeyedGeneric:
     case HValue::kStringAdd:
     case HValue::kStringCompareAndBranch:
     case HValue::kSub:
@@ -3039,14 +3037,6 @@
 }
 
 
-std::ostream& HStoreNamedGeneric::PrintDataTo(
-    std::ostream& os) const {  // NOLINT
-  Handle<String> n = Handle<String>::cast(name());
-  return os << NameOf(object()) << "." << n->ToCString().get() << " = "
-            << NameOf(value());
-}
-
-
 std::ostream& HStoreNamedField::PrintDataTo(std::ostream& os) const {  // NOLINT
   os << NameOf(object()) << access_ << " = " << NameOf(value());
   if (NeedsWriteBarrier()) os << " (write-barrier)";
@@ -3070,13 +3060,6 @@
 }
 
 
-std::ostream& HStoreKeyedGeneric::PrintDataTo(
-    std::ostream& os) const {  // NOLINT
-  return os << NameOf(object()) << "[" << NameOf(key())
-            << "] = " << NameOf(value());
-}
-
-
 std::ostream& HTransitionElementsKind::PrintDataTo(
     std::ostream& os) const {  // NOLINT
   os << NameOf(object());
@@ -3236,8 +3219,8 @@
   int32_t new_dominator_size = dominator_size_constant + current_size_max_value;
 
   // Since we clear the first word after folded memory, we cannot use the
-  // whole Page::kMaxRegularHeapObjectSize memory.
-  if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
+  // whole kMaxRegularHeapObjectSize memory.
+  if (new_dominator_size > kMaxRegularHeapObjectSize - kPointerSize) {
     if (FLAG_trace_allocation_folding) {
       PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
           id(), Mnemonic(), dominator_allocate->id(),
diff --git a/src/crankshaft/hydrogen-instructions.h b/src/crankshaft/hydrogen-instructions.h
index 41b1e1b..cfede98 100644
--- a/src/crankshaft/hydrogen-instructions.h
+++ b/src/crankshaft/hydrogen-instructions.h
@@ -9,6 +9,7 @@
 #include <iosfwd>
 
 #include "src/allocation.h"
+#include "src/ast/ast.h"
 #include "src/base/bits.h"
 #include "src/bit-vector.h"
 #include "src/code-stubs.h"
@@ -19,7 +20,7 @@
 #include "src/globals.h"
 #include "src/small-pointer-list.h"
 #include "src/utils.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -36,6 +37,7 @@
 class HValue;
 class LInstruction;
 class LChunkBuilder;
+class SmallMapList;
 
 #define HYDROGEN_ABSTRACT_INSTRUCTION_LIST(V) \
   V(ArithmeticBinaryOperation)                \
@@ -131,9 +133,7 @@
   V(StoreCodeEntry)                           \
   V(StoreContextSlot)                         \
   V(StoreKeyed)                               \
-  V(StoreKeyedGeneric)                        \
   V(StoreNamedField)                          \
-  V(StoreNamedGeneric)                        \
   V(StringAdd)                                \
   V(StringCharCodeAt)                         \
   V(StringCharFromCode)                       \
@@ -2176,7 +2176,8 @@
     } else {
       int par_index = index - 2;
       DCHECK(par_index < GetParameterCount());
-      return RepresentationFromType(descriptor_.GetParameterType(par_index));
+      return RepresentationFromMachineType(
+          descriptor_.GetParameterType(par_index));
     }
   }
 
@@ -2215,7 +2216,7 @@
                       TailCallMode syntactic_tail_call_mode,
                       TailCallMode tail_call_mode, Zone* zone)
       : descriptor_(descriptor),
-        values_(GetParameterCount() + 1, zone),
+        values_(GetParameterCount() + 1, zone),  // +1 here is for target.
         argument_count_(argument_count),
         bit_field_(
             TailCallModeField::encode(tail_call_mode) |
@@ -2237,7 +2238,7 @@
   }
 
   int GetParameterCount() const {
-    return descriptor_.GetRegisterParameterCount() + 1;
+    return descriptor_.GetParameterCount() + 1;  // +1 here is for context.
   }
 
   void InternalSetOperandAt(int index, HValue* value) final {
@@ -6326,52 +6327,6 @@
   uint32_t bit_field_;
 };
 
-class HStoreNamedGeneric final : public HTemplateInstruction<3> {
- public:
-  DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P6(HStoreNamedGeneric, HValue*,
-                                              Handle<Name>, HValue*,
-                                              LanguageMode,
-                                              Handle<TypeFeedbackVector>,
-                                              FeedbackVectorSlot);
-  HValue* object() const { return OperandAt(0); }
-  HValue* value() const { return OperandAt(1); }
-  HValue* context() const { return OperandAt(2); }
-  Handle<Name> name() const { return name_; }
-  LanguageMode language_mode() const { return language_mode_; }
-
-  std::ostream& PrintDataTo(std::ostream& os) const override;  // NOLINT
-
-  Representation RequiredInputRepresentation(int index) override {
-    return Representation::Tagged();
-  }
-
-  FeedbackVectorSlot slot() const { return slot_; }
-  Handle<TypeFeedbackVector> feedback_vector() const {
-    return feedback_vector_;
-  }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric)
-
- private:
-  HStoreNamedGeneric(HValue* context, HValue* object, Handle<Name> name,
-                     HValue* value, LanguageMode language_mode,
-                     Handle<TypeFeedbackVector> vector, FeedbackVectorSlot slot)
-      : name_(name),
-        feedback_vector_(vector),
-        slot_(slot),
-        language_mode_(language_mode) {
-    SetOperandAt(0, object);
-    SetOperandAt(1, value);
-    SetOperandAt(2, context);
-    SetAllSideEffects();
-  }
-
-  Handle<Name> name_;
-  Handle<TypeFeedbackVector> feedback_vector_;
-  FeedbackVectorSlot slot_;
-  LanguageMode language_mode_;
-};
-
 class HStoreKeyed final : public HTemplateInstruction<4>,
                           public ArrayInstructionInterface {
  public:
@@ -6554,50 +6509,6 @@
   HValue* dominator_;
 };
 
-class HStoreKeyedGeneric final : public HTemplateInstruction<4> {
- public:
-  DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P6(HStoreKeyedGeneric, HValue*,
-                                              HValue*, HValue*, LanguageMode,
-                                              Handle<TypeFeedbackVector>,
-                                              FeedbackVectorSlot);
-
-  HValue* object() const { return OperandAt(0); }
-  HValue* key() const { return OperandAt(1); }
-  HValue* value() const { return OperandAt(2); }
-  HValue* context() const { return OperandAt(3); }
-  LanguageMode language_mode() const { return language_mode_; }
-
-  Representation RequiredInputRepresentation(int index) override {
-    // tagged[tagged] = tagged
-    return Representation::Tagged();
-  }
-
-  FeedbackVectorSlot slot() const { return slot_; }
-  Handle<TypeFeedbackVector> feedback_vector() const {
-    return feedback_vector_;
-  }
-
-  std::ostream& PrintDataTo(std::ostream& os) const override;  // NOLINT
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric)
-
- private:
-  HStoreKeyedGeneric(HValue* context, HValue* object, HValue* key,
-                     HValue* value, LanguageMode language_mode,
-                     Handle<TypeFeedbackVector> vector, FeedbackVectorSlot slot)
-      : feedback_vector_(vector), slot_(slot), language_mode_(language_mode) {
-    SetOperandAt(0, object);
-    SetOperandAt(1, key);
-    SetOperandAt(2, value);
-    SetOperandAt(3, context);
-    SetAllSideEffects();
-  }
-
-  Handle<TypeFeedbackVector> feedback_vector_;
-  FeedbackVectorSlot slot_;
-  LanguageMode language_mode_;
-};
-
 class HTransitionElementsKind final : public HTemplateInstruction<2> {
  public:
   inline static HTransitionElementsKind* New(Isolate* isolate, Zone* zone,
diff --git a/src/crankshaft/hydrogen-osr.h b/src/crankshaft/hydrogen-osr.h
index 0610b42..3bd9b6e 100644
--- a/src/crankshaft/hydrogen-osr.h
+++ b/src/crankshaft/hydrogen-osr.h
@@ -6,7 +6,7 @@
 #define V8_CRANKSHAFT_HYDROGEN_OSR_H_
 
 #include "src/crankshaft/hydrogen.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/crankshaft/hydrogen-types.cc b/src/crankshaft/hydrogen-types.cc
index 20d50d8..684e6ad 100644
--- a/src/crankshaft/hydrogen-types.cc
+++ b/src/crankshaft/hydrogen-types.cc
@@ -12,17 +12,17 @@
 namespace internal {
 
 // static
-HType HType::FromType(Type* type) {
-  if (Type::Any()->Is(type)) return HType::Any();
+HType HType::FromType(AstType* type) {
+  if (AstType::Any()->Is(type)) return HType::Any();
   if (!type->IsInhabited()) return HType::None();
-  if (type->Is(Type::SignedSmall())) return HType::Smi();
-  if (type->Is(Type::Number())) return HType::TaggedNumber();
-  if (type->Is(Type::Null())) return HType::Null();
-  if (type->Is(Type::String())) return HType::String();
-  if (type->Is(Type::Boolean())) return HType::Boolean();
-  if (type->Is(Type::Undefined())) return HType::Undefined();
-  if (type->Is(Type::Object())) return HType::JSObject();
-  if (type->Is(Type::DetectableReceiver())) return HType::JSReceiver();
+  if (type->Is(AstType::SignedSmall())) return HType::Smi();
+  if (type->Is(AstType::Number())) return HType::TaggedNumber();
+  if (type->Is(AstType::Null())) return HType::Null();
+  if (type->Is(AstType::String())) return HType::String();
+  if (type->Is(AstType::Boolean())) return HType::Boolean();
+  if (type->Is(AstType::Undefined())) return HType::Undefined();
+  if (type->Is(AstType::Object())) return HType::JSObject();
+  if (type->Is(AstType::DetectableReceiver())) return HType::JSReceiver();
   return HType::Tagged();
 }
 
diff --git a/src/crankshaft/hydrogen-types.h b/src/crankshaft/hydrogen-types.h
index 0690ece..3e68872 100644
--- a/src/crankshaft/hydrogen-types.h
+++ b/src/crankshaft/hydrogen-types.h
@@ -8,8 +8,8 @@
 #include <climits>
 #include <iosfwd>
 
+#include "src/ast/ast-types.h"
 #include "src/base/macros.h"
-#include "src/types.h"
 
 namespace v8 {
 namespace internal {
@@ -64,7 +64,7 @@
   HTYPE_LIST(DECLARE_IS_TYPE)
   #undef DECLARE_IS_TYPE
 
-  static HType FromType(Type* type) WARN_UNUSED_RESULT;
+  static HType FromType(AstType* type) WARN_UNUSED_RESULT;
   static HType FromFieldType(Handle<FieldType> type,
                              Zone* temp_zone) WARN_UNUSED_RESULT;
   static HType FromValue(Handle<Object> value) WARN_UNUSED_RESULT;
diff --git a/src/crankshaft/hydrogen.cc b/src/crankshaft/hydrogen.cc
index a33d2a6..8d7b479 100644
--- a/src/crankshaft/hydrogen.cc
+++ b/src/crankshaft/hydrogen.cc
@@ -9,6 +9,7 @@
 
 #include "src/allocation-site-scopes.h"
 #include "src/ast/ast-numbering.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/crankshaft/hydrogen-bce.h"
@@ -42,7 +43,6 @@
 // GetRootConstructor
 #include "src/ic/ic-inl.h"
 #include "src/isolate-inl.h"
-#include "src/parsing/parser.h"
 #include "src/runtime/runtime.h"
 
 #if V8_TARGET_ARCH_IA32
@@ -75,7 +75,9 @@
 class HOptimizedGraphBuilderWithPositions : public HOptimizedGraphBuilder {
  public:
   explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
-      : HOptimizedGraphBuilder(info) {}
+      : HOptimizedGraphBuilder(info, true) {
+    SetSourcePosition(info->shared_info()->start_position());
+  }
 
 #define DEF_VISIT(type)                                      \
   void Visit##type(type* node) override {                    \
@@ -178,9 +180,10 @@
   }
 
   HOptimizedGraphBuilder* graph_builder =
-      (info()->is_tracking_positions() || FLAG_trace_ic)
+      (FLAG_hydrogen_track_positions || isolate()->is_profiling() ||
+       FLAG_trace_ic)
           ? new (info()->zone()) HOptimizedGraphBuilderWithPositions(info())
-          : new (info()->zone()) HOptimizedGraphBuilder(info());
+          : new (info()->zone()) HOptimizedGraphBuilder(info(), false);
 
   // Type-check the function.
   AstTyper(info()->isolate(), info()->zone(), info()->closure(),
@@ -1362,7 +1365,7 @@
   DCHECK(!FLAG_minimal);
   graph_ = new (zone()) HGraph(info_, descriptor_);
   if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
-  if (!info_->IsStub() && info_->is_tracking_positions()) {
+  if (!info_->IsStub() && is_tracking_positions()) {
     TraceInlinedFunction(info_->shared_info(), SourcePosition::Unknown());
   }
   CompilationPhase phase("H_Block building", info_);
@@ -1374,7 +1377,7 @@
 
 int HGraphBuilder::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
                                         SourcePosition position) {
-  DCHECK(info_->is_tracking_positions());
+  DCHECK(is_tracking_positions());
 
   int inline_id = static_cast<int>(graph()->inlined_function_infos().size());
   HInlinedFunctionInfo info(shared->start_position());
@@ -1645,48 +1648,6 @@
 }
 
 
-void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
-                                                HValue* map,
-                                                ElementsKind from_kind,
-                                                ElementsKind to_kind,
-                                                bool is_jsarray) {
-  DCHECK(!IsFastHoleyElementsKind(from_kind) ||
-         IsFastHoleyElementsKind(to_kind));
-
-  if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
-    Add<HTrapAllocationMemento>(object);
-  }
-
-  if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
-    HInstruction* elements = AddLoadElements(object);
-
-    HInstruction* empty_fixed_array = Add<HConstant>(
-        isolate()->factory()->empty_fixed_array());
-
-    IfBuilder if_builder(this);
-
-    if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
-
-    if_builder.Then();
-
-    HInstruction* elements_length = AddLoadFixedArrayLength(elements);
-
-    HInstruction* array_length =
-        is_jsarray
-            ? Add<HLoadNamedField>(object, nullptr,
-                                   HObjectAccess::ForArrayLength(from_kind))
-            : elements_length;
-
-    BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
-                              array_length, elements_length);
-
-    if_builder.End();
-  }
-
-  Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
-}
-
-
 void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
                                        int bit_field_mask) {
   // Check that the object isn't a smi.
@@ -2129,8 +2090,7 @@
   return result;
 }
 
-
-HValue* HGraphBuilder::BuildNumberToString(HValue* object, Type* type) {
+HValue* HGraphBuilder::BuildNumberToString(HValue* object, AstType* type) {
   NoObservableSideEffectsScope scope(this);
 
   // Convert constant numbers at compile time.
@@ -2180,7 +2140,7 @@
   }
   if_objectissmi.Else();
   {
-    if (type->Is(Type::SignedSmall())) {
+    if (type->Is(AstType::SignedSmall())) {
       if_objectissmi.Deopt(DeoptimizeReason::kExpectedSmi);
     } else {
       // Check if the object is a heap number.
@@ -2236,7 +2196,7 @@
       }
       if_objectisnumber.Else();
       {
-        if (type->Is(Type::Number())) {
+        if (type->Is(AstType::Number())) {
           if_objectisnumber.Deopt(DeoptimizeReason::kExpectedHeapNumber);
         }
       }
@@ -2411,7 +2371,7 @@
   HValue* length = AddUncasted<HAdd>(left_length, right_length);
   // Check that length <= kMaxLength <=> length < MaxLength + 1.
   HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
-  if (top_info()->IsStub()) {
+  if (top_info()->IsStub() || !isolate()->IsStringLengthOverflowIntact()) {
     // This is a mitigation for crbug.com/627934; the real fix
     // will be to migrate the StringAddStub to TurboFan one day.
     IfBuilder if_invalid(this);
@@ -2423,6 +2383,7 @@
     }
     if_invalid.End();
   } else {
+    graph()->MarkDependsOnStringLengthOverflow();
     Add<HBoundsCheck>(length, max_length);
   }
   return length;
@@ -2652,7 +2613,7 @@
 
       IfBuilder if_size(this);
       if_size.If<HCompareNumericAndBranch>(
-          size, Add<HConstant>(Page::kMaxRegularHeapObjectSize), Token::LT);
+          size, Add<HConstant>(kMaxRegularHeapObjectSize), Token::LT);
       if_size.Then();
       {
         // Allocate the string object. HAllocate does not care whether we pass
@@ -3075,9 +3036,10 @@
                                                  ElementsKind new_kind,
                                                  HValue* length,
                                                  HValue* new_capacity) {
-  Add<HBoundsCheck>(new_capacity, Add<HConstant>(
-          (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
-          ElementsKindToShiftSize(new_kind)));
+  Add<HBoundsCheck>(
+      new_capacity,
+      Add<HConstant>((kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
+                     ElementsKindToShiftSize(new_kind)));
 
   HValue* new_elements =
       BuildAllocateAndInitializeArray(new_kind, new_capacity);
@@ -3268,93 +3230,6 @@
   AddIncrementCounter(counters->inlined_copied_elements());
 }
 
-
-HValue* HGraphBuilder::BuildCloneShallowArrayCow(HValue* boilerplate,
-                                                 HValue* allocation_site,
-                                                 AllocationSiteMode mode,
-                                                 ElementsKind kind) {
-  HAllocate* array = AllocateJSArrayObject(mode);
-
-  HValue* map = AddLoadMap(boilerplate);
-  HValue* elements = AddLoadElements(boilerplate);
-  HValue* length = AddLoadArrayLength(boilerplate, kind);
-
-  BuildJSArrayHeader(array,
-                     map,
-                     elements,
-                     mode,
-                     FAST_ELEMENTS,
-                     allocation_site,
-                     length);
-  return array;
-}
-
-
-HValue* HGraphBuilder::BuildCloneShallowArrayEmpty(HValue* boilerplate,
-                                                   HValue* allocation_site,
-                                                   AllocationSiteMode mode) {
-  HAllocate* array = AllocateJSArrayObject(mode);
-
-  HValue* map = AddLoadMap(boilerplate);
-
-  BuildJSArrayHeader(array,
-                     map,
-                     NULL,  // set elements to empty fixed array
-                     mode,
-                     FAST_ELEMENTS,
-                     allocation_site,
-                     graph()->GetConstant0());
-  return array;
-}
-
-
-HValue* HGraphBuilder::BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
-                                                      HValue* allocation_site,
-                                                      AllocationSiteMode mode,
-                                                      ElementsKind kind) {
-  HValue* boilerplate_elements = AddLoadElements(boilerplate);
-  HValue* capacity = AddLoadFixedArrayLength(boilerplate_elements);
-
-  // Generate size calculation code here in order to make it dominate
-  // the JSArray allocation.
-  HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
-
-  // Create empty JSArray object for now, store elimination should remove
-  // redundant initialization of elements and length fields and at the same
-  // time the object will be fully prepared for GC if it happens during
-  // elements allocation.
-  HValue* result = BuildCloneShallowArrayEmpty(
-      boilerplate, allocation_site, mode);
-
-  HAllocate* elements = BuildAllocateElements(kind, elements_size);
-
-  Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(), elements);
-
-  // The allocation for the cloned array above causes register pressure on
-  // machines with low register counts. Force a reload of the boilerplate
-  // elements here to free up a register for the allocation to avoid unnecessary
-  // spillage.
-  boilerplate_elements = AddLoadElements(boilerplate);
-  boilerplate_elements->SetFlag(HValue::kCantBeReplaced);
-
-  // Copy the elements array header.
-  for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
-    HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
-    Add<HStoreNamedField>(
-        elements, access,
-        Add<HLoadNamedField>(boilerplate_elements, nullptr, access));
-  }
-
-  // And the result of the length
-  HValue* length = AddLoadArrayLength(boilerplate, kind);
-  Add<HStoreNamedField>(result, HObjectAccess::ForArrayLength(kind), length);
-
-  BuildCopyElements(boilerplate_elements, kind, elements,
-                    kind, length, NULL);
-  return result;
-}
-
-
 void HGraphBuilder::BuildCreateAllocationMemento(
     HValue* previous_object,
     HValue* previous_object_size,
@@ -3402,16 +3277,6 @@
 }
 
 
-HInstruction* HGraphBuilder::BuildGetScriptContext(int context_index) {
-  HValue* native_context = BuildGetNativeContext();
-  HValue* script_context_table = Add<HLoadNamedField>(
-      native_context, nullptr,
-      HObjectAccess::ForContextSlot(Context::SCRIPT_CONTEXT_TABLE_INDEX));
-  return Add<HLoadNamedField>(script_context_table, nullptr,
-                              HObjectAccess::ForScriptContext(context_index));
-}
-
-
 HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
   HValue* script_context = context();
   if (depth != NULL) {
@@ -3504,8 +3369,9 @@
   return Add<HLoadNamedField>(native_context, nullptr, function_access);
 }
 
-HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
-    : HGraphBuilder(info, CallInterfaceDescriptor()),
+HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info,
+                                               bool track_positions)
+    : HGraphBuilder(info, CallInterfaceDescriptor(), track_positions),
       function_state_(NULL),
       initial_function_state_(this, info, NORMAL_RETURN, 0,
                               TailCallMode::kAllow),
@@ -3520,9 +3386,6 @@
   // to know it's the initial state.
   function_state_ = &initial_function_state_;
   InitializeAstVisitor(info->isolate());
-  if (top_info()->is_tracking_positions()) {
-    SetSourcePosition(info->shared_info()->start_position());
-  }
 }
 
 
@@ -3622,6 +3485,7 @@
       allow_code_motion_(false),
       use_optimistic_licm_(false),
       depends_on_empty_array_proto_elements_(false),
+      depends_on_string_length_overflow_(false),
       type_change_checksum_(0),
       maximum_environment_size_(0),
       no_side_effects_scope_count_(0),
@@ -3629,8 +3493,8 @@
       inlined_function_infos_(info->zone()) {
   if (info->IsStub()) {
     // For stubs, explicitly add the context to the environment.
-    start_environment_ = new (zone_)
-        HEnvironment(zone_, descriptor.GetRegisterParameterCount() + 1);
+    start_environment_ =
+        new (zone_) HEnvironment(zone_, descriptor.GetParameterCount() + 1);
   } else {
     start_environment_ =
         new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
@@ -4088,7 +3952,7 @@
   // Push on the state stack.
   owner->set_function_state(this);
 
-  if (compilation_info_->is_tracking_positions()) {
+  if (owner->is_tracking_positions()) {
     outer_source_position_ = owner->source_position();
     owner->EnterInlinedSource(
       info->shared_info()->start_position(),
@@ -4102,7 +3966,7 @@
   delete test_context_;
   owner_->set_function_state(outer_);
 
-  if (compilation_info_->is_tracking_positions()) {
+  if (owner_->is_tracking_positions()) {
     owner_->set_source_position(outer_source_position_);
     owner_->EnterInlinedSource(
       outer_->compilation_info()->shared_info()->start_position(),
@@ -4651,9 +4515,7 @@
     environment()->Bind(scope->arguments(), arguments_object);
   }
 
-  int rest_index;
-  Variable* rest = scope->rest_parameter(&rest_index);
-  if (rest) {
+  if (scope->rest_parameter() != nullptr) {
     return Bailout(kRestParameter);
   }
 
@@ -4704,7 +4566,7 @@
         }
         AddInstruction(function);
         // Allocate a block context and store it to the stack frame.
-        HValue* scope_info = Add<HConstant>(scope->GetScopeInfo(isolate()));
+        HValue* scope_info = Add<HConstant>(scope->scope_info());
         Add<HPushArguments>(scope_info, function);
         HInstruction* inner_context = Add<HCallRuntime>(
             Runtime::FunctionForId(Runtime::kPushBlockContext), 2);
@@ -5001,7 +4863,7 @@
   CHECK_ALIVE(VisitForValue(stmt->tag()));
   Add<HSimulate>(stmt->EntryId());
   HValue* tag_value = Top();
-  Type* tag_type = bounds_.get(stmt->tag()).lower;
+  AstType* tag_type = bounds_.get(stmt->tag()).lower;
 
   // 1. Build all the tests, with dangling true branches
   BailoutId default_id = BailoutId::None();
@@ -5018,8 +4880,8 @@
     if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
     HValue* label_value = Pop();
 
-    Type* label_type = bounds_.get(clause->label()).lower;
-    Type* combined_type = clause->compare_type();
+    AstType* label_type = bounds_.get(clause->label()).lower;
+    AstType* combined_type = clause->compare_type();
     HControlInstruction* compare = BuildCompareInstruction(
         Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
         combined_type,
@@ -5634,7 +5496,6 @@
   DCHECK(current_block()->HasPredecessor());
   Variable* variable = expr->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       if (IsLexicalVariableMode(variable->mode())) {
         // TODO(rossberg): should this be an DCHECK?
@@ -6218,7 +6079,7 @@
     PropertyAccessInfo* info) {
   if (!CanInlinePropertyAccess(map_)) return false;
 
-  // Currently only handle Type::Number as a polymorphic case.
+  // Currently only handle AstType::Number as a polymorphic case.
   // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
   // instruction.
   if (IsNumberType()) return false;
@@ -6929,9 +6790,16 @@
         HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
     Handle<TypeFeedbackVector> vector =
         handle(current_feedback_vector(), isolate());
-    HStoreNamedGeneric* instr =
-        Add<HStoreNamedGeneric>(global_object, var->name(), value,
-                                function_language_mode(), vector, slot);
+    HValue* name = Add<HConstant>(var->name());
+    HValue* vector_value = Add<HConstant>(vector);
+    HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
+    Callable callable = CodeFactory::StoreICInOptimizedCode(
+        isolate(), function_language_mode());
+    HValue* stub = Add<HConstant>(callable.code());
+    HValue* values[] = {context(), global_object, name,
+                        value,     slot_value,    vector_value};
+    HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
+        stub, 0, callable.descriptor(), ArrayVector(values));
     USE(instr);
     DCHECK(instr->HasObservableSideEffects());
     Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
@@ -6958,7 +6826,6 @@
     CHECK_ALIVE(VisitForValue(operation));
 
     switch (var->location()) {
-      case VariableLocation::GLOBAL:
       case VariableLocation::UNALLOCATED:
         HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
                                        expr->AssignmentId());
@@ -6966,9 +6833,6 @@
 
       case VariableLocation::PARAMETER:
       case VariableLocation::LOCAL:
-        if (var->mode() == CONST_LEGACY)  {
-          return Bailout(kUnsupportedConstCompoundAssignment);
-        }
         if (var->mode() == CONST) {
           return Bailout(kNonInitializerAssignmentToConst);
         }
@@ -6998,9 +6862,7 @@
             mode = HStoreContextSlot::kCheckDeoptimize;
             break;
           case CONST:
-            return Bailout(kNonInitializerAssignmentToConst);
-          case CONST_LEGACY:
-            if (is_strict(function_language_mode())) {
+            if (var->throw_on_const_assignment(function_language_mode())) {
               return Bailout(kNonInitializerAssignmentToConst);
             } else {
               return ast_context()->ReturnValue(Pop());
@@ -7072,33 +6934,17 @@
 
     if (var->mode() == CONST) {
       if (expr->op() != Token::INIT) {
-        return Bailout(kNonInitializerAssignmentToConst);
-      }
-    } else if (var->mode() == CONST_LEGACY) {
-      if (expr->op() != Token::INIT) {
-        if (is_strict(function_language_mode())) {
+        if (var->throw_on_const_assignment(function_language_mode())) {
           return Bailout(kNonInitializerAssignmentToConst);
         } else {
           CHECK_ALIVE(VisitForValue(expr->value()));
           return ast_context()->ReturnValue(Pop());
         }
       }
-
-      // TODO(adamk): Is this required? Legacy const variables are always
-      // initialized before use.
-      if (var->IsStackAllocated()) {
-        // We insert a use of the old value to detect unsupported uses of const
-        // variables (e.g. initialization inside a loop).
-        HValue* old_value = environment()->Lookup(var);
-        Add<HUseConst>(old_value);
-      }
     }
 
-    if (var->is_arguments()) return Bailout(kAssignmentToArguments);
-
     // Handle the assignment.
     switch (var->location()) {
-      case VariableLocation::GLOBAL:
       case VariableLocation::UNALLOCATED:
         CHECK_ALIVE(VisitForValue(expr->value()));
         HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
@@ -7147,10 +6993,10 @@
               mode = HStoreContextSlot::kCheckDeoptimize;
               break;
             case CONST:
-              // This case is checked statically so no need to
-              // perform checks here
-              UNREACHABLE();
-            case CONST_LEGACY:
+              // If we reached this point, the only possibility
+              // is a sloppy assignment to a function name.
+              DCHECK(function_language_mode() == SLOPPY &&
+                     !var->throw_on_const_assignment(SLOPPY));
               return ast_context()->ReturnValue(Pop());
             default:
               mode = HStoreContextSlot::kNoCheck;
@@ -7200,7 +7046,7 @@
   CHECK_ALIVE(VisitForValue(expr->exception()));
 
   HValue* value = environment()->Pop();
-  if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
+  if (!is_tracking_positions()) SetSourcePosition(expr->position());
   Add<HPushArguments>(value);
   Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
   Add<HSimulate>(expr->id());
@@ -7274,20 +7120,30 @@
     Handle<TypeFeedbackVector> vector =
         handle(current_feedback_vector(), isolate());
 
+    HValue* key = Add<HConstant>(name);
+    HValue* vector_value = Add<HConstant>(vector);
+    HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
+    HValue* values[] = {context(), object,     key,
+                        value,     slot_value, vector_value};
+
     if (current_feedback_vector()->GetKind(slot) ==
         FeedbackVectorSlotKind::KEYED_STORE_IC) {
       // It's possible that a keyed store of a constant string was converted
       // to a named store. Here, at the last minute, we need to make sure to
       // use a generic Keyed Store if we are using the type vector, because
       // it has to share information with full code.
-      HConstant* key = Add<HConstant>(name);
-      HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
-          object, key, value, function_language_mode(), vector, slot);
+      Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
+          isolate(), function_language_mode());
+      HValue* stub = Add<HConstant>(callable.code());
+      HCallWithDescriptor* result = New<HCallWithDescriptor>(
+          stub, 0, callable.descriptor(), ArrayVector(values));
       return result;
     }
-
-    HStoreNamedGeneric* result = New<HStoreNamedGeneric>(
-        object, name, value, function_language_mode(), vector, slot);
+    Callable callable = CodeFactory::StoreICInOptimizedCode(
+        isolate(), function_language_mode());
+    HValue* stub = Add<HConstant>(callable.code());
+    HCallWithDescriptor* result = New<HCallWithDescriptor>(
+        stub, 0, callable.descriptor(), ArrayVector(values));
     return result;
   }
 }
@@ -7303,8 +7159,16 @@
         New<HLoadKeyedGeneric>(object, key, vector, slot);
     return result;
   } else {
-    HStoreKeyedGeneric* result = New<HStoreKeyedGeneric>(
-        object, key, value, function_language_mode(), vector, slot);
+    HValue* vector_value = Add<HConstant>(vector);
+    HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
+    HValue* values[] = {context(), object,     key,
+                        value,     slot_value, vector_value};
+
+    Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
+        isolate(), function_language_mode());
+    HValue* stub = Add<HConstant>(callable.code());
+    HCallWithDescriptor* result = New<HCallWithDescriptor>(
+        stub, 0, callable.descriptor(), ArrayVector(values));
     return result;
   }
 }
@@ -7843,7 +7707,7 @@
     }
 
     HValue* checked_object;
-    // Type::Number() is only supported by polymorphic load/call handling.
+    // AstType::Number() is only supported by polymorphic load/call handling.
     DCHECK(!info.IsNumberType());
     BuildCheckHeapObject(object);
     if (AreStringTypes(maps)) {
@@ -8409,14 +8273,12 @@
     return false;
   }
 
-  if (target_info.scope()->num_heap_slots() > 0) {
+  if (target_info.scope()->NeedsContext()) {
     TraceInline(target, caller, "target has context-allocated variables");
     return false;
   }
 
-  int rest_index;
-  Variable* rest = target_info.scope()->rest_parameter(&rest_index);
-  if (rest) {
+  if (target_info.scope()->rest_parameter() != nullptr) {
     TraceInline(target, caller, "target uses rest parameters");
     return false;
   }
@@ -8490,7 +8352,7 @@
       .Run();
 
   int inlining_id = 0;
-  if (top_info()->is_tracking_positions()) {
+  if (is_tracking_positions()) {
     inlining_id = TraceInlinedFunction(target_shared, source_position());
   }
 
@@ -8539,7 +8401,7 @@
       return_id, target, context, arguments_count, function,
       function_state()->inlining_kind(), function->scope()->arguments(),
       arguments_object, syntactic_tail_call_mode);
-  if (top_info()->is_tracking_positions()) {
+  if (is_tracking_positions()) {
     enter_inlined->set_inlining_id(inlining_id);
   }
   function_state()->set_entry(enter_inlined);
@@ -9375,7 +9237,7 @@
   HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
 
   HValue* op_vals[] = {context(), Add<HConstant>(function), call_data, holder,
-                       api_function_address, nullptr};
+                       api_function_address};
 
   HInstruction* call = nullptr;
   CHECK(argc <= CallApiCallbackStub::kArgMax);
@@ -9386,16 +9248,14 @@
     HConstant* code_value = Add<HConstant>(code);
     call = New<HCallWithDescriptor>(
         code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
-        Vector<HValue*>(op_vals, arraysize(op_vals) - 1),
-        syntactic_tail_call_mode);
+        Vector<HValue*>(op_vals, arraysize(op_vals)), syntactic_tail_call_mode);
   } else {
     CallApiCallbackStub stub(isolate(), argc, call_data_undefined, false);
     Handle<Code> code = stub.GetCode();
     HConstant* code_value = Add<HConstant>(code);
     call = New<HCallWithDescriptor>(
         code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
-        Vector<HValue*>(op_vals, arraysize(op_vals) - 1),
-        syntactic_tail_call_mode);
+        Vector<HValue*>(op_vals, arraysize(op_vals)), syntactic_tail_call_mode);
     Drop(1);  // Drop function.
   }
 
@@ -9461,8 +9321,6 @@
     case kFunctionApply: {
       // For .apply, only the pattern f.apply(receiver, arguments)
       // is supported.
-      if (current_info()->scope()->arguments() == NULL) return false;
-
       if (!CanBeFunctionApplyArguments(expr)) return false;
 
       BuildFunctionApply(expr);
@@ -9482,6 +9340,10 @@
   HValue* function = Pop();  // f
   Drop(1);  // apply
 
+  // Make sure the arguments object is live.
+  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
+  LookupAndMakeLive(arg_two->var());
+
   Handle<Map> function_map = expr->GetReceiverTypes()->first();
   HValue* checked_function = AddCheckMap(function, function_map);
 
@@ -9727,8 +9589,9 @@
   if (args->length() != 2) return false;
   VariableProxy* arg_two = args->at(1)->AsVariableProxy();
   if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
-  HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
+  HValue* arg_two_value = environment()->Lookup(arg_two->var());
   if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
+  DCHECK_NOT_NULL(current_info()->scope()->arguments());
   return true;
 }
 
@@ -9737,7 +9600,7 @@
   DCHECK(!HasStackOverflow());
   DCHECK(current_block() != NULL);
   DCHECK(current_block()->HasPredecessor());
-  if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
+  if (!is_tracking_positions()) SetSourcePosition(expr->position());
   Expression* callee = expr->expression();
   int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
   HInstruction* call = NULL;
@@ -9975,7 +9838,7 @@
   HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
 
   // Bail out for large objects.
-  HValue* max_size = Add<HConstant>(Page::kMaxRegularHeapObjectSize);
+  HValue* max_size = Add<HConstant>(kMaxRegularHeapObjectSize);
   Add<HBoundsCheck>(elements_size, max_size);
 
   // Allocate (dealing with failure appropriately).
@@ -10019,7 +9882,7 @@
   DCHECK(!HasStackOverflow());
   DCHECK(current_block() != NULL);
   DCHECK(current_block()->HasPredecessor());
-  if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
+  if (!is_tracking_positions()) SetSourcePosition(expr->position());
   int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
   Factory* factory = isolate()->factory();
 
@@ -10419,6 +10282,8 @@
 
     HInstruction* length = AddUncasted<HDiv>(byte_length,
         Add<HConstant>(static_cast<int32_t>(element_size)));
+    // Callers (in typedarray.js) ensure that length <= %_MaxSmi().
+    length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
 
     Add<HStoreNamedField>(obj,
         HObjectAccess::ForJSTypedArrayLength(),
@@ -10602,7 +10467,7 @@
     return ast_context()->ReturnInstruction(instr, expr->id());
   } else if (proxy != NULL) {
     Variable* var = proxy->var();
-    if (var->IsUnallocatedOrGlobalSlot()) {
+    if (var->IsUnallocated()) {
       Bailout(kDeleteWithGlobalVariable);
     } else if (var->IsStackAllocated() || var->IsContextSlot()) {
       // Result of deleting non-global variables is false.  'this' is not really
@@ -10680,13 +10545,12 @@
   if (join != NULL) return ast_context()->ReturnValue(Pop());
 }
 
-
-static Representation RepresentationFor(Type* type) {
+static Representation RepresentationFor(AstType* type) {
   DisallowHeapAllocation no_allocation;
-  if (type->Is(Type::None())) return Representation::None();
-  if (type->Is(Type::SignedSmall())) return Representation::Smi();
-  if (type->Is(Type::Signed32())) return Representation::Integer32();
-  if (type->Is(Type::Number())) return Representation::Double();
+  if (type->Is(AstType::None())) return Representation::None();
+  if (type->Is(AstType::SignedSmall())) return Representation::Smi();
+  if (type->Is(AstType::Signed32())) return Representation::Integer32();
+  if (type->Is(AstType::Number())) return Representation::Double();
   return Representation::Tagged();
 }
 
@@ -10745,7 +10609,7 @@
   DCHECK(!HasStackOverflow());
   DCHECK(current_block() != NULL);
   DCHECK(current_block()->HasPredecessor());
-  if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
+  if (!is_tracking_positions()) SetSourcePosition(expr->position());
   Expression* target = expr->expression();
   VariableProxy* proxy = target->AsVariableProxy();
   Property* prop = target->AsProperty();
@@ -10763,9 +10627,6 @@
 
   if (proxy != NULL) {
     Variable* var = proxy->var();
-    if (var->mode() == CONST_LEGACY)  {
-      return Bailout(kUnsupportedCountOperationWithConst);
-    }
     if (var->mode() == CONST) {
       return Bailout(kNonInitializerAssignmentToConst);
     }
@@ -10778,7 +10639,6 @@
     Push(after);
 
     switch (var->location()) {
-      case VariableLocation::GLOBAL:
       case VariableLocation::UNALLOCATED:
         HandleGlobalVariableAssignment(var, after, expr->CountSlot(),
                                        expr->AssignmentId());
@@ -10939,27 +10799,24 @@
   return true;
 }
 
-
-HValue* HGraphBuilder::EnforceNumberType(HValue* number,
-                                         Type* expected) {
-  if (expected->Is(Type::SignedSmall())) {
+HValue* HGraphBuilder::EnforceNumberType(HValue* number, AstType* expected) {
+  if (expected->Is(AstType::SignedSmall())) {
     return AddUncasted<HForceRepresentation>(number, Representation::Smi());
   }
-  if (expected->Is(Type::Signed32())) {
+  if (expected->Is(AstType::Signed32())) {
     return AddUncasted<HForceRepresentation>(number,
                                              Representation::Integer32());
   }
   return number;
 }
 
-
-HValue* HGraphBuilder::TruncateToNumber(HValue* value, Type** expected) {
+HValue* HGraphBuilder::TruncateToNumber(HValue* value, AstType** expected) {
   if (value->IsConstant()) {
     HConstant* constant = HConstant::cast(value);
     Maybe<HConstant*> number =
         constant->CopyToTruncatedNumber(isolate(), zone());
     if (number.IsJust()) {
-      *expected = Type::Number();
+      *expected = AstType::Number();
       return AddInstruction(number.FromJust());
     }
   }
@@ -10969,24 +10826,24 @@
   // pushes with a NoObservableSideEffectsScope.
   NoObservableSideEffectsScope no_effects(this);
 
-  Type* expected_type = *expected;
+  AstType* expected_type = *expected;
 
   // Separate the number type from the rest.
-  Type* expected_obj =
-      Type::Intersect(expected_type, Type::NonNumber(), zone());
-  Type* expected_number =
-      Type::Intersect(expected_type, Type::Number(), zone());
+  AstType* expected_obj =
+      AstType::Intersect(expected_type, AstType::NonNumber(), zone());
+  AstType* expected_number =
+      AstType::Intersect(expected_type, AstType::Number(), zone());
 
   // We expect to get a number.
-  // (We need to check first, since Type::None->Is(Type::Any()) == true.
-  if (expected_obj->Is(Type::None())) {
-    DCHECK(!expected_number->Is(Type::None()));
+  // (We need to check first, since AstType::None->Is(AstType::Any()) == true.
+  if (expected_obj->Is(AstType::None())) {
+    DCHECK(!expected_number->Is(AstType::None()));
     return value;
   }
 
-  if (expected_obj->Is(Type::Undefined())) {
+  if (expected_obj->Is(AstType::Undefined())) {
     // This is already done by HChange.
-    *expected = Type::Union(expected_number, Type::Number(), zone());
+    *expected = AstType::Union(expected_number, AstType::Number(), zone());
     return value;
   }
 
@@ -10999,9 +10856,9 @@
     HValue* left,
     HValue* right,
     PushBeforeSimulateBehavior push_sim_result) {
-  Type* left_type = bounds_.get(expr->left()).lower;
-  Type* right_type = bounds_.get(expr->right()).lower;
-  Type* result_type = bounds_.get(expr).lower;
+  AstType* left_type = bounds_.get(expr->left()).lower;
+  AstType* right_type = bounds_.get(expr->right()).lower;
+  AstType* result_type = bounds_.get(expr).lower;
   Maybe<int> fixed_right_arg = expr->fixed_right_arg();
   Handle<AllocationSite> allocation_site = expr->allocation_site();
 
@@ -11027,12 +10884,10 @@
   return result;
 }
 
-HValue* HGraphBuilder::BuildBinaryOperation(Token::Value op, HValue* left,
-                                            HValue* right, Type* left_type,
-                                            Type* right_type, Type* result_type,
-                                            Maybe<int> fixed_right_arg,
-                                            HAllocationMode allocation_mode,
-                                            BailoutId opt_id) {
+HValue* HGraphBuilder::BuildBinaryOperation(
+    Token::Value op, HValue* left, HValue* right, AstType* left_type,
+    AstType* right_type, AstType* result_type, Maybe<int> fixed_right_arg,
+    HAllocationMode allocation_mode, BailoutId opt_id) {
   bool maybe_string_add = false;
   if (op == Token::ADD) {
     // If we are adding constant string with something for which we don't have
@@ -11040,18 +10895,18 @@
     // generate deopt instructions.
     if (!left_type->IsInhabited() && right->IsConstant() &&
         HConstant::cast(right)->HasStringValue()) {
-      left_type = Type::String();
+      left_type = AstType::String();
     }
 
     if (!right_type->IsInhabited() && left->IsConstant() &&
         HConstant::cast(left)->HasStringValue()) {
-      right_type = Type::String();
+      right_type = AstType::String();
     }
 
-    maybe_string_add = (left_type->Maybe(Type::String()) ||
-                        left_type->Maybe(Type::Receiver()) ||
-                        right_type->Maybe(Type::String()) ||
-                        right_type->Maybe(Type::Receiver()));
+    maybe_string_add = (left_type->Maybe(AstType::String()) ||
+                        left_type->Maybe(AstType::Receiver()) ||
+                        right_type->Maybe(AstType::String()) ||
+                        right_type->Maybe(AstType::Receiver()));
   }
 
   Representation left_rep = RepresentationFor(left_type);
@@ -11061,7 +10916,7 @@
     Add<HDeoptimize>(
         DeoptimizeReason::kInsufficientTypeFeedbackForLHSOfBinaryOperation,
         Deoptimizer::SOFT);
-    left_type = Type::Any();
+    left_type = AstType::Any();
     left_rep = RepresentationFor(left_type);
     maybe_string_add = op == Token::ADD;
   }
@@ -11070,7 +10925,7 @@
     Add<HDeoptimize>(
         DeoptimizeReason::kInsufficientTypeFeedbackForRHSOfBinaryOperation,
         Deoptimizer::SOFT);
-    right_type = Type::Any();
+    right_type = AstType::Any();
     right_rep = RepresentationFor(right_type);
     maybe_string_add = op == Token::ADD;
   }
@@ -11082,34 +10937,34 @@
 
   // Special case for string addition here.
   if (op == Token::ADD &&
-      (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
+      (left_type->Is(AstType::String()) || right_type->Is(AstType::String()))) {
     // Validate type feedback for left argument.
-    if (left_type->Is(Type::String())) {
+    if (left_type->Is(AstType::String())) {
       left = BuildCheckString(left);
     }
 
     // Validate type feedback for right argument.
-    if (right_type->Is(Type::String())) {
+    if (right_type->Is(AstType::String())) {
       right = BuildCheckString(right);
     }
 
     // Convert left argument as necessary.
-    if (left_type->Is(Type::Number())) {
-      DCHECK(right_type->Is(Type::String()));
+    if (left_type->Is(AstType::Number())) {
+      DCHECK(right_type->Is(AstType::String()));
       left = BuildNumberToString(left, left_type);
-    } else if (!left_type->Is(Type::String())) {
-      DCHECK(right_type->Is(Type::String()));
+    } else if (!left_type->Is(AstType::String())) {
+      DCHECK(right_type->Is(AstType::String()));
       return AddUncasted<HStringAdd>(
           left, right, allocation_mode.GetPretenureMode(),
           STRING_ADD_CONVERT_LEFT, allocation_mode.feedback_site());
     }
 
     // Convert right argument as necessary.
-    if (right_type->Is(Type::Number())) {
-      DCHECK(left_type->Is(Type::String()));
+    if (right_type->Is(AstType::Number())) {
+      DCHECK(left_type->Is(AstType::String()));
       right = BuildNumberToString(right, right_type);
-    } else if (!right_type->Is(Type::String())) {
-      DCHECK(left_type->Is(Type::String()));
+    } else if (!right_type->Is(AstType::String())) {
+      DCHECK(left_type->Is(AstType::String()));
       return AddUncasted<HStringAdd>(
           left, right, allocation_mode.GetPretenureMode(),
           STRING_ADD_CONVERT_RIGHT, allocation_mode.feedback_site());
@@ -11267,8 +11122,8 @@
         break;
       case Token::BIT_OR: {
         HValue *operand, *shift_amount;
-        if (left_type->Is(Type::Signed32()) &&
-            right_type->Is(Type::Signed32()) &&
+        if (left_type->Is(AstType::Signed32()) &&
+            right_type->Is(AstType::Signed32()) &&
             MatchRotateRight(left, right, &operand, &shift_amount)) {
           instr = AddUncasted<HRor>(operand, shift_amount);
         } else {
@@ -11470,7 +11325,7 @@
       BuildBinaryOperation(expr, left, right,
           ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
                                     : PUSH_BEFORE_SIMULATE);
-  if (top_info()->is_tracking_positions() && result->IsBinaryOperation()) {
+  if (is_tracking_positions() && result->IsBinaryOperation()) {
     HBinaryOperation::cast(result)->SetOperandPositions(
         zone(),
         ScriptPositionToSourcePosition(expr->left()->position()),
@@ -11512,7 +11367,7 @@
   DCHECK(current_block() != NULL);
   DCHECK(current_block()->HasPredecessor());
 
-  if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
+  if (!is_tracking_positions()) SetSourcePosition(expr->position());
 
   // Check for a few fast cases. The AST visiting behavior must be in sync
   // with the full codegen: We don't push both left and right values onto
@@ -11540,9 +11395,9 @@
     return ast_context()->ReturnControl(instr, expr->id());
   }
 
-  Type* left_type = bounds_.get(expr->left()).lower;
-  Type* right_type = bounds_.get(expr->right()).lower;
-  Type* combined_type = expr->combined_type();
+  AstType* left_type = bounds_.get(expr->left()).lower;
+  AstType* right_type = bounds_.get(expr->right()).lower;
+  AstType* combined_type = expr->combined_type();
 
   CHECK_ALIVE(VisitForValue(expr->left()));
   CHECK_ALIVE(VisitForValue(expr->right()));
@@ -11563,24 +11418,37 @@
         HConstant::cast(right)->handle(isolate())->IsJSFunction()) {
       Handle<JSFunction> function =
           Handle<JSFunction>::cast(HConstant::cast(right)->handle(isolate()));
-      // Make sure the prototype of {function} is the %FunctionPrototype%, and
-      // it already has a meaningful initial map (i.e. we constructed at least
-      // one instance using the constructor {function}).
-      // We can only use the fast case if @@hasInstance was not used so far.
-      if (function->has_initial_map() &&
-          function->map()->prototype() ==
-              function->native_context()->closure() &&
-          !function->map()->has_non_instance_prototype() &&
-          isolate()->IsHasInstanceLookupChainIntact()) {
-        Handle<Map> initial_map(function->initial_map(), isolate());
-        top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
-        top_info()->dependencies()->AssumePropertyCell(
-            isolate()->factory()->has_instance_protector());
-        HInstruction* prototype =
-            Add<HConstant>(handle(initial_map->prototype(), isolate()));
-        HHasInPrototypeChainAndBranch* result =
-            New<HHasInPrototypeChainAndBranch>(left, prototype);
-        return ast_context()->ReturnControl(result, expr->id());
+      // Make sure that the {function} already has a meaningful initial map
+      // (i.e. we constructed at least one instance using the constructor
+      // {function}).
+      if (function->has_initial_map()) {
+        // Lookup @@hasInstance on the {function}.
+        Handle<Map> function_map(function->map(), isolate());
+        PropertyAccessInfo has_instance(
+            this, LOAD, function_map,
+            isolate()->factory()->has_instance_symbol());
+        // Check if we are using the Function.prototype[@@hasInstance].
+        if (has_instance.CanAccessMonomorphic() &&
+            has_instance.IsDataConstant() &&
+            has_instance.constant().is_identical_to(
+                isolate()->function_has_instance())) {
+          // Add appropriate receiver map check and prototype chain
+          // checks to guard the @@hasInstance lookup chain.
+          AddCheckMap(right, function_map);
+          if (has_instance.has_holder()) {
+            Handle<JSObject> prototype(
+                JSObject::cast(has_instance.map()->prototype()), isolate());
+            BuildCheckPrototypeMaps(prototype, has_instance.holder());
+          }
+          // Perform the prototype chain walk.
+          Handle<Map> initial_map(function->initial_map(), isolate());
+          top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
+          HInstruction* prototype =
+              Add<HConstant>(handle(initial_map->prototype(), isolate()));
+          HHasInPrototypeChainAndBranch* result =
+              New<HHasInPrototypeChainAndBranch>(left, prototype);
+          return ast_context()->ReturnControl(result, expr->id());
+        }
       }
     }
 
@@ -11614,10 +11482,9 @@
   return ast_context()->ReturnControl(compare, expr->id());
 }
 
-
 HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
-    Token::Value op, HValue* left, HValue* right, Type* left_type,
-    Type* right_type, Type* combined_type, SourcePosition left_position,
+    Token::Value op, HValue* left, HValue* right, AstType* left_type,
+    AstType* right_type, AstType* combined_type, SourcePosition left_position,
     SourcePosition right_position, PushBeforeSimulateBehavior push_sim_result,
     BailoutId bailout_id) {
   // Cases handled below depend on collected type feedback. They should
@@ -11627,14 +11494,14 @@
         DeoptimizeReason::
             kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation,
         Deoptimizer::SOFT);
-    combined_type = left_type = right_type = Type::Any();
+    combined_type = left_type = right_type = AstType::Any();
   }
 
   Representation left_rep = RepresentationFor(left_type);
   Representation right_rep = RepresentationFor(right_type);
   Representation combined_rep = RepresentationFor(combined_type);
 
-  if (combined_type->Is(Type::Receiver())) {
+  if (combined_type->Is(AstType::Receiver())) {
     if (Token::IsEqualityOp(op)) {
       // HCompareObjectEqAndBranch can only deal with object, so
       // exclude numbers.
@@ -11656,7 +11523,7 @@
         AddCheckMap(operand_to_check, map);
         HCompareObjectEqAndBranch* result =
             New<HCompareObjectEqAndBranch>(left, right);
-        if (top_info()->is_tracking_positions()) {
+        if (is_tracking_positions()) {
           result->set_operand_position(zone(), 0, left_position);
           result->set_operand_position(zone(), 1, right_position);
         }
@@ -11718,7 +11585,7 @@
       Bailout(kUnsupportedNonPrimitiveCompare);
       return NULL;
     }
-  } else if (combined_type->Is(Type::InternalizedString()) &&
+  } else if (combined_type->Is(AstType::InternalizedString()) &&
              Token::IsEqualityOp(op)) {
     // If we have a constant argument, it should be consistent with the type
     // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
@@ -11739,7 +11606,7 @@
     HCompareObjectEqAndBranch* result =
         New<HCompareObjectEqAndBranch>(left, right);
     return result;
-  } else if (combined_type->Is(Type::String())) {
+  } else if (combined_type->Is(AstType::String())) {
     BuildCheckHeapObject(left);
     Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
     BuildCheckHeapObject(right);
@@ -11747,7 +11614,7 @@
     HStringCompareAndBranch* result =
         New<HStringCompareAndBranch>(left, right, op);
     return result;
-  } else if (combined_type->Is(Type::Boolean())) {
+  } else if (combined_type->Is(AstType::Boolean())) {
     AddCheckMap(left, isolate()->factory()->boolean_map());
     AddCheckMap(right, isolate()->factory()->boolean_map());
     if (Token::IsEqualityOp(op)) {
@@ -11799,7 +11666,7 @@
       HCompareNumericAndBranch* result =
           New<HCompareNumericAndBranch>(left, right, op);
       result->set_observed_input_representation(left_rep, right_rep);
-      if (top_info()->is_tracking_positions()) {
+      if (is_tracking_positions()) {
         result->SetOperandPositions(zone(), left_position, right_position);
       }
       return result;
@@ -11815,7 +11682,7 @@
   DCHECK(current_block() != NULL);
   DCHECK(current_block()->HasPredecessor());
   DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
-  if (!top_info()->is_tracking_positions()) SetSourcePosition(expr->position());
+  if (!is_tracking_positions()) SetSourcePosition(expr->position());
   CHECK_ALIVE(VisitForValue(sub_expr));
   HValue* value = Pop();
   HControlInstruction* instr;
@@ -11886,7 +11753,7 @@
       Add<HAllocate>(object_size_constant, type, pretenure_flag, instance_type,
                      graph()->GetConstant0(), top_site);
 
-  // If allocation folding reaches Page::kMaxRegularHeapObjectSize the
+  // If allocation folding reaches kMaxRegularHeapObjectSize the
   // elements array may not get folded into the object. Hence, we set the
   // elements pointer to empty fixed array and let store elimination remove
   // this store in the folding case.
@@ -12183,7 +12050,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -12223,7 +12089,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -12462,27 +12327,18 @@
 }
 
 
-// Fast support for string.charAt(n) and string[n].
-void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
-  DCHECK(call->arguments()->length() == 1);
-  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
-  HValue* char_code = Pop();
-  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
-  return ast_context()->ReturnInstruction(result, call->id());
-}
-
-
 // Fast support for SubString.
 void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
   DCHECK_EQ(3, call->arguments()->length());
   CHECK_ALIVE(VisitExpressions(call->arguments()));
-  PushArgumentsFromEnvironment(call->arguments()->length());
   Callable callable = CodeFactory::SubString(isolate());
   HValue* stub = Add<HConstant>(callable.code());
-  HValue* values[] = {context()};
-  HInstruction* result =
-      New<HCallWithDescriptor>(stub, call->arguments()->length(),
-                               callable.descriptor(), ArrayVector(values));
+  HValue* to = Pop();
+  HValue* from = Pop();
+  HValue* string = Pop();
+  HValue* values[] = {context(), string, from, to};
+  HInstruction* result = New<HCallWithDescriptor>(
+      stub, 0, callable.descriptor(), ArrayVector(values));
   result->set_type(HType::String());
   return ast_context()->ReturnInstruction(result, call->id());
 }
@@ -12504,13 +12360,16 @@
 void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
   DCHECK_EQ(4, call->arguments()->length());
   CHECK_ALIVE(VisitExpressions(call->arguments()));
-  PushArgumentsFromEnvironment(call->arguments()->length());
   Callable callable = CodeFactory::RegExpExec(isolate());
+  HValue* last_match_info = Pop();
+  HValue* index = Pop();
+  HValue* subject = Pop();
+  HValue* regexp_object = Pop();
   HValue* stub = Add<HConstant>(callable.code());
-  HValue* values[] = {context()};
-  HInstruction* result =
-      New<HCallWithDescriptor>(stub, call->arguments()->length(),
-                               callable.descriptor(), ArrayVector(values));
+  HValue* values[] = {context(), regexp_object, subject, index,
+                      last_match_info};
+  HInstruction* result = New<HCallWithDescriptor>(
+      stub, 0, callable.descriptor(), ArrayVector(values));
   return ast_context()->ReturnInstruction(result, call->id());
 }
 
@@ -12554,7 +12413,7 @@
   DCHECK_EQ(1, call->arguments()->length());
   CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
   HValue* number = Pop();
-  HValue* result = BuildNumberToString(number, Type::Any());
+  HValue* result = BuildNumberToString(number, AstType::Any());
   return ast_context()->ReturnValue(result);
 }
 
@@ -13236,8 +13095,7 @@
         PrintIndent();
         std::ostringstream os;
         os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
-        if (graph->info()->is_tracking_positions() &&
-            instruction->has_position() && instruction->position().raw() != 0) {
+        if (instruction->has_position() && instruction->position().raw() != 0) {
           const SourcePosition pos = instruction->position();
           os << " pos:";
           if (pos.inlining_id() != 0) os << pos.inlining_id() << "_";
diff --git a/src/crankshaft/hydrogen.h b/src/crankshaft/hydrogen.h
index 931dd01..d2f1637 100644
--- a/src/crankshaft/hydrogen.h
+++ b/src/crankshaft/hydrogen.h
@@ -8,13 +8,15 @@
 #include "src/accessors.h"
 #include "src/allocation.h"
 #include "src/ast/ast-type-bounds.h"
+#include "src/ast/scopes.h"
 #include "src/bailout-reason.h"
+#include "src/compilation-info.h"
 #include "src/compiler.h"
 #include "src/crankshaft/compilation-phase.h"
 #include "src/crankshaft/hydrogen-instructions.h"
 #include "src/globals.h"
 #include "src/parsing/parse-info.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -30,12 +32,11 @@
 class LAllocator;
 class LChunk;
 class LiveRange;
-class Scope;
 
 class HCompilationJob final : public CompilationJob {
  public:
   explicit HCompilationJob(Handle<JSFunction> function)
-      : CompilationJob(&info_, "Crankshaft"),
+      : CompilationJob(function->GetIsolate(), &info_, "Crankshaft"),
         zone_(function->GetIsolate()->allocator()),
         parse_info_(&zone_, function),
         info_(&parse_info_, function),
@@ -439,6 +440,13 @@
     return depends_on_empty_array_proto_elements_;
   }
 
+  void MarkDependsOnStringLengthOverflow() {
+    if (depends_on_string_length_overflow_) return;
+    info()->dependencies()->AssumePropertyCell(
+        isolate()->factory()->string_length_protector());
+    depends_on_string_length_overflow_ = true;
+  }
+
   bool has_uint32_instructions() {
     DCHECK(uint32_instructions_ == NULL || !uint32_instructions_->is_empty());
     return uint32_instructions_ != NULL;
@@ -514,6 +522,7 @@
   bool allow_code_motion_;
   bool use_optimistic_licm_;
   bool depends_on_empty_array_proto_elements_;
+  bool depends_on_string_length_overflow_;
   int type_change_checksum_;
   int maximum_environment_size_;
   int no_side_effects_scope_count_;
@@ -1056,14 +1065,16 @@
 class HGraphBuilder {
  public:
   explicit HGraphBuilder(CompilationInfo* info,
-                         CallInterfaceDescriptor descriptor)
+                         CallInterfaceDescriptor descriptor,
+                         bool track_positions)
       : info_(info),
         descriptor_(descriptor),
         graph_(NULL),
         current_block_(NULL),
         scope_(info->scope()),
         position_(SourcePosition::Unknown()),
-        start_position_(0) {}
+        start_position_(0),
+        track_positions_(track_positions) {}
   virtual ~HGraphBuilder() {}
 
   Scope* scope() const { return scope_; }
@@ -1395,7 +1406,7 @@
                                    ElementsKind to_kind,
                                    bool is_jsarray);
 
-  HValue* BuildNumberToString(HValue* object, Type* type);
+  HValue* BuildNumberToString(HValue* object, AstType* type);
   HValue* BuildToNumber(HValue* input);
   HValue* BuildToObject(HValue* receiver);
 
@@ -1499,8 +1510,8 @@
                         HValue** shift_amount);
 
   HValue* BuildBinaryOperation(Token::Value op, HValue* left, HValue* right,
-                               Type* left_type, Type* right_type,
-                               Type* result_type, Maybe<int> fixed_right_arg,
+                               AstType* left_type, AstType* right_type,
+                               AstType* result_type, Maybe<int> fixed_right_arg,
                                HAllocationMode allocation_mode,
                                BailoutId opt_id = BailoutId::None());
 
@@ -1513,8 +1524,8 @@
 
   HValue* AddLoadJSBuiltin(int context_index);
 
-  HValue* EnforceNumberType(HValue* number, Type* expected);
-  HValue* TruncateToNumber(HValue* value, Type** expected);
+  HValue* EnforceNumberType(HValue* number, AstType* expected);
+  HValue* TruncateToNumber(HValue* value, AstType** expected);
 
   void FinishExitWithHardDeoptimization(DeoptimizeReason reason);
 
@@ -1833,20 +1844,6 @@
                          HValue* length,
                          HValue* capacity);
 
-  HValue* BuildCloneShallowArrayCow(HValue* boilerplate,
-                                    HValue* allocation_site,
-                                    AllocationSiteMode mode,
-                                    ElementsKind kind);
-
-  HValue* BuildCloneShallowArrayEmpty(HValue* boilerplate,
-                                      HValue* allocation_site,
-                                      AllocationSiteMode mode);
-
-  HValue* BuildCloneShallowArrayNonEmpty(HValue* boilerplate,
-                                         HValue* allocation_site,
-                                         AllocationSiteMode mode,
-                                         ElementsKind kind);
-
   HValue* BuildElementIndexHash(HValue* index);
 
   void BuildCreateAllocationMemento(HValue* previous_object,
@@ -1859,7 +1856,7 @@
 
   HInstruction* BuildGetNativeContext(HValue* closure);
   HInstruction* BuildGetNativeContext();
-  HInstruction* BuildGetScriptContext(int context_index);
+
   // Builds a loop version if |depth| is specified or unrolls the loop to
   // |depth_value| iterations otherwise.
   HValue* BuildGetParentContext(HValue* depth, int depth_value);
@@ -1879,7 +1876,7 @@
   }
 
   void EnterInlinedSource(int start_position, int id) {
-    if (top_info()->is_tracking_positions()) {
+    if (is_tracking_positions()) {
       start_position_ = start_position;
       position_.set_inlining_id(id);
     }
@@ -1900,6 +1897,8 @@
   SourcePosition source_position() { return position_; }
   void set_source_position(SourcePosition position) { position_ = position; }
 
+  bool is_tracking_positions() { return track_positions_; }
+
   int TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
                            SourcePosition position);
 
@@ -1925,6 +1924,7 @@
   Scope* scope_;
   SourcePosition position_;
   int start_position_;
+  bool track_positions_;
 };
 
 template <>
@@ -2122,7 +2122,7 @@
     BreakAndContinueScope* next_;
   };
 
-  explicit HOptimizedGraphBuilder(CompilationInfo* info);
+  explicit HOptimizedGraphBuilder(CompilationInfo* info, bool track_positions);
 
   bool BuildGraph() override;
 
@@ -2214,7 +2214,6 @@
   F(IsJSProxy)                         \
   F(Call)                              \
   F(NewObject)                         \
-  F(StringCharFromCode)                \
   F(ToInteger)                         \
   F(ToObject)                          \
   F(ToString)                          \
@@ -2305,11 +2304,9 @@
                                                 int index,
                                                 HEnvironment* env) {
     if (!FLAG_analyze_environment_liveness) return false;
-    // |this| and |arguments| are always live; zapping parameters isn't
-    // safe because function.arguments can inspect them at any time.
-    return !var->is_this() &&
-           !var->is_arguments() &&
-           env->is_local_index(index);
+    // Zapping parameters isn't safe because function.arguments can inspect them
+    // at any time.
+    return env->is_local_index(index);
   }
   void BindIfLive(Variable* var, HValue* value) {
     HEnvironment* env = environment();
@@ -2706,8 +2703,8 @@
   };
 
   HControlInstruction* BuildCompareInstruction(
-      Token::Value op, HValue* left, HValue* right, Type* left_type,
-      Type* right_type, Type* combined_type, SourcePosition left_position,
+      Token::Value op, HValue* left, HValue* right, AstType* left_type,
+      AstType* right_type, AstType* combined_type, SourcePosition left_position,
       SourcePosition right_position, PushBeforeSimulateBehavior push_sim_result,
       BailoutId bailout_id);
 
diff --git a/src/crankshaft/ia32/lithium-codegen-ia32.cc b/src/crankshaft/ia32/lithium-codegen-ia32.cc
index 2512e2b..6c121dd 100644
--- a/src/crankshaft/ia32/lithium-codegen-ia32.cc
+++ b/src/crankshaft/ia32/lithium-codegen-ia32.cc
@@ -164,7 +164,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is still in edi.
@@ -172,7 +172,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ push(edi);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2397,20 +2397,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ mov(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ mov(slot_register, Immediate(Smi::FromInt(index)));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(esi));
   DCHECK(ToRegister(instr->result()).is(eax));
@@ -3703,21 +3689,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(esi));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ mov(StoreDescriptor::NameRegister(), instr->name());
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal;
   if (instr->index()->IsConstantOperand()) {
@@ -3877,21 +3848,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(esi));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
   Register object = ToRegister(instr->object());
   Register temp = ToRegister(instr->temp());
@@ -4831,7 +4787,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -4874,7 +4830,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, temp, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/ia32/lithium-codegen-ia32.h b/src/crankshaft/ia32/lithium-codegen-ia32.h
index 38a493d..8e16d9c 100644
--- a/src/crankshaft/ia32/lithium-codegen-ia32.h
+++ b/src/crankshaft/ia32/lithium-codegen-ia32.h
@@ -294,8 +294,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   void EmitReturn(LReturn* instr);
 
diff --git a/src/crankshaft/ia32/lithium-ia32.cc b/src/crankshaft/ia32/lithium-ia32.cc
index 6794224..e6077cc 100644
--- a/src/crankshaft/ia32/lithium-ia32.cc
+++ b/src/crankshaft/ia32/lithium-ia32.cc
@@ -351,15 +351,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -392,15 +383,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -910,7 +892,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -1054,6 +1036,10 @@
 LInstruction* LChunkBuilder::DoCallWithDescriptor(
     HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
+
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
   // Target
@@ -1061,15 +1047,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), esi);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
       descriptor, ops, zone());
@@ -2211,26 +2202,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), esi);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result = new (zone())
-      LStoreKeyedGeneric(context, object, key, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2332,20 +2303,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), esi);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, object, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), esi);
   LOperand* left = UseFixed(instr->left(), edx);
diff --git a/src/crankshaft/ia32/lithium-ia32.h b/src/crankshaft/ia32/lithium-ia32.h
index e525341..816d8fd 100644
--- a/src/crankshaft/ia32/lithium-ia32.h
+++ b/src/crankshaft/ia32/lithium-ia32.h
@@ -136,9 +136,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -2022,32 +2020,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* obj, LOperand* key, LOperand* val,
@@ -2078,34 +2050,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 2> {
  public:
   LTransitionElementsKind(LOperand* object,
diff --git a/src/crankshaft/lithium-allocator.h b/src/crankshaft/lithium-allocator.h
index ce0e565..d28ad7f 100644
--- a/src/crankshaft/lithium-allocator.h
+++ b/src/crankshaft/lithium-allocator.h
@@ -9,7 +9,7 @@
 #include "src/base/compiler-specific.h"
 #include "src/crankshaft/compilation-phase.h"
 #include "src/crankshaft/lithium.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/crankshaft/lithium-codegen.cc b/src/crankshaft/lithium-codegen.cc
index 5041de6..decc2a5 100644
--- a/src/crankshaft/lithium-codegen.cc
+++ b/src/crankshaft/lithium-codegen.cc
@@ -66,6 +66,8 @@
       source_position_table_builder_(info->zone(),
                                      info->SourcePositionRecordingMode()) {}
 
+Isolate* LCodeGenBase::isolate() const { return info_->isolate(); }
+
 bool LCodeGenBase::GenerateBody() {
   DCHECK(is_generating());
   bool emit_instructions = true;
diff --git a/src/crankshaft/lithium-codegen.h b/src/crankshaft/lithium-codegen.h
index fbf9692..c6bf447 100644
--- a/src/crankshaft/lithium-codegen.h
+++ b/src/crankshaft/lithium-codegen.h
@@ -6,13 +6,13 @@
 #define V8_CRANKSHAFT_LITHIUM_CODEGEN_H_
 
 #include "src/bailout-reason.h"
-#include "src/compiler.h"
 #include "src/deoptimizer.h"
 #include "src/source-position-table.h"
 
 namespace v8 {
 namespace internal {
 
+class CompilationInfo;
 class HGraph;
 class LChunk;
 class LEnvironment;
@@ -29,7 +29,7 @@
   // Simple accessors.
   MacroAssembler* masm() const { return masm_; }
   CompilationInfo* info() const { return info_; }
-  Isolate* isolate() const { return info_->isolate(); }
+  Isolate* isolate() const;
   Factory* factory() const { return isolate()->factory(); }
   Heap* heap() const { return isolate()->heap(); }
   Zone* zone() const { return zone_; }
diff --git a/src/crankshaft/lithium.h b/src/crankshaft/lithium.h
index a2c0283..d04bd56 100644
--- a/src/crankshaft/lithium.h
+++ b/src/crankshaft/lithium.h
@@ -12,7 +12,7 @@
 #include "src/crankshaft/compilation-phase.h"
 #include "src/crankshaft/hydrogen.h"
 #include "src/safepoint-table.h"
-#include "src/zone-allocator.h"
+#include "src/zone/zone-allocator.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/crankshaft/mips/lithium-codegen-mips.cc b/src/crankshaft/mips/lithium-codegen-mips.cc
index 6be0d13..b24b1c5 100644
--- a/src/crankshaft/mips/lithium-codegen-mips.cc
+++ b/src/crankshaft/mips/lithium-codegen-mips.cc
@@ -171,7 +171,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info()->scope()->num_heap_slots() > 0) {
+  if (info()->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is in a1.
@@ -179,7 +179,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ push(a1);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2499,20 +2499,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ li(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ li(slot_register, Operand(Smi::FromInt(index)));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(cp));
   DCHECK(ToRegister(instr->result()).is(v0));
@@ -3448,7 +3434,9 @@
   // Math.sqrt(-Infinity) == NaN
   Label done;
   __ Move(temp, static_cast<double>(-V8_INFINITY));
+  // Set up Infinity.
   __ Neg_d(result, temp);
+  // result is overwritten if the branch is not taken.
   __ BranchF(&done, NULL, eq, temp, input);
 
   // Add +0 to convert -0 to +0.
@@ -3800,21 +3788,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ li(StoreDescriptor::NameRegister(), Operand(instr->name()));
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Condition cc = instr->hydrogen()->allow_equality() ? hi : hs;
   Operand operand(0);
@@ -4025,21 +3998,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
   class DeferredMaybeGrowElements final : public LDeferredCode {
    public:
@@ -5042,7 +5000,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -5145,7 +5103,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, scratch1, scratch2, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/mips/lithium-codegen-mips.h b/src/crankshaft/mips/lithium-codegen-mips.h
index d51f62c..bb09abc 100644
--- a/src/crankshaft/mips/lithium-codegen-mips.h
+++ b/src/crankshaft/mips/lithium-codegen-mips.h
@@ -340,8 +340,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
   Scope* const scope_;
diff --git a/src/crankshaft/mips/lithium-mips.cc b/src/crankshaft/mips/lithium-mips.cc
index a7880ee..5533b8f 100644
--- a/src/crankshaft/mips/lithium-mips.cc
+++ b/src/crankshaft/mips/lithium-mips.cc
@@ -311,15 +311,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -352,15 +343,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -887,7 +869,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -1024,6 +1006,9 @@
 LInstruction* LChunkBuilder::DoCallWithDescriptor(
     HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
 
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
@@ -1032,15 +1017,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), cp);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
       descriptor, ops, zone());
@@ -2127,26 +2117,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result =
-      new (zone()) LStoreKeyedGeneric(context, obj, key, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2223,20 +2193,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, obj, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), cp);
   LOperand* left = UseFixed(instr->left(), a1);
diff --git a/src/crankshaft/mips/lithium-mips.h b/src/crankshaft/mips/lithium-mips.h
index 9711c9a..f49fb93 100644
--- a/src/crankshaft/mips/lithium-mips.h
+++ b/src/crankshaft/mips/lithium-mips.h
@@ -131,9 +131,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -1969,33 +1967,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* object, LOperand* key, LOperand* value,
@@ -2026,34 +1997,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 1> {
  public:
   LTransitionElementsKind(LOperand* object,
diff --git a/src/crankshaft/mips64/lithium-codegen-mips64.cc b/src/crankshaft/mips64/lithium-codegen-mips64.cc
index 924f552..5f93e55 100644
--- a/src/crankshaft/mips64/lithium-codegen-mips64.cc
+++ b/src/crankshaft/mips64/lithium-codegen-mips64.cc
@@ -147,7 +147,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info()->scope()->num_heap_slots() > 0) {
+  if (info()->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is in a1.
@@ -155,7 +155,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ push(a1);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2623,20 +2623,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ li(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ li(slot_register, Operand(Smi::FromInt(index)));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(cp));
   DCHECK(ToRegister(instr->result()).is(v0));
@@ -3655,7 +3641,9 @@
   // Math.sqrt(-Infinity) == NaN
   Label done;
   __ Move(temp, static_cast<double>(-V8_INFINITY));
+  // Set up Infinity.
   __ Neg_d(result, temp);
+  // result is overwritten if the branch is not taken.
   __ BranchF(&done, NULL, eq, temp, input);
 
   // Add +0 to convert -0 to +0.
@@ -4013,21 +4001,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ li(StoreDescriptor::NameRegister(), Operand(instr->name()));
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Condition cc = instr->hydrogen()->allow_equality() ? hi : hs;
   Operand operand((int64_t)0);
@@ -4260,21 +4233,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
   class DeferredMaybeGrowElements final : public LDeferredCode {
    public:
@@ -5248,7 +5206,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -5353,7 +5311,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, scratch1, scratch2, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/mips64/lithium-codegen-mips64.h b/src/crankshaft/mips64/lithium-codegen-mips64.h
index 41d8b2c..aaa2e6b 100644
--- a/src/crankshaft/mips64/lithium-codegen-mips64.h
+++ b/src/crankshaft/mips64/lithium-codegen-mips64.h
@@ -343,8 +343,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
   Scope* const scope_;
diff --git a/src/crankshaft/mips64/lithium-mips64.cc b/src/crankshaft/mips64/lithium-mips64.cc
index 922f12a..0855754 100644
--- a/src/crankshaft/mips64/lithium-mips64.cc
+++ b/src/crankshaft/mips64/lithium-mips64.cc
@@ -311,15 +311,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -352,15 +343,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -887,7 +869,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -1024,6 +1006,9 @@
 LInstruction* LChunkBuilder::DoCallWithDescriptor(
     HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
 
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
@@ -1032,15 +1017,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), cp);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
       descriptor, ops, zone());
@@ -2132,26 +2122,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result =
-      new (zone()) LStoreKeyedGeneric(context, obj, key, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2228,20 +2198,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, obj, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), cp);
   LOperand* left = UseFixed(instr->left(), a1);
diff --git a/src/crankshaft/mips64/lithium-mips64.h b/src/crankshaft/mips64/lithium-mips64.h
index f8b5c48..7bc89af 100644
--- a/src/crankshaft/mips64/lithium-mips64.h
+++ b/src/crankshaft/mips64/lithium-mips64.h
@@ -133,9 +133,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -2015,33 +2013,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* object, LOperand* key, LOperand* value,
@@ -2072,34 +2043,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 1> {
  public:
   LTransitionElementsKind(LOperand* object,
diff --git a/src/crankshaft/ppc/lithium-codegen-ppc.cc b/src/crankshaft/ppc/lithium-codegen-ppc.cc
index e1203b8..95018e8 100644
--- a/src/crankshaft/ppc/lithium-codegen-ppc.cc
+++ b/src/crankshaft/ppc/lithium-codegen-ppc.cc
@@ -158,7 +158,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info()->scope()->num_heap_slots() > 0) {
+  if (info()->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is in r4.
@@ -166,7 +166,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ push(r4);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2677,20 +2677,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ Move(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ LoadSmiLiteral(slot_register, Smi::FromInt(index));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(cp));
   DCHECK(ToRegister(instr->result()).is(r3));
@@ -4085,21 +4071,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ mov(StoreDescriptor::NameRegister(), Operand(instr->name()));
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Representation representation = instr->hydrogen()->length()->representation();
   DCHECK(representation.Equals(instr->hydrogen()->index()->representation()));
@@ -4344,21 +4315,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
   class DeferredMaybeGrowElements final : public LDeferredCode {
    public:
@@ -5324,7 +5280,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -5430,7 +5386,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, scratch1, scratch2, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/ppc/lithium-codegen-ppc.h b/src/crankshaft/ppc/lithium-codegen-ppc.h
index fe212d4..a4a90a7 100644
--- a/src/crankshaft/ppc/lithium-codegen-ppc.h
+++ b/src/crankshaft/ppc/lithium-codegen-ppc.h
@@ -277,8 +277,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
   Scope* const scope_;
diff --git a/src/crankshaft/ppc/lithium-ppc.cc b/src/crankshaft/ppc/lithium-ppc.cc
index 958620c..738cf23 100644
--- a/src/crankshaft/ppc/lithium-ppc.cc
+++ b/src/crankshaft/ppc/lithium-ppc.cc
@@ -317,15 +317,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -358,15 +349,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -892,7 +874,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -1030,6 +1012,9 @@
 
 LInstruction* LChunkBuilder::DoCallWithDescriptor(HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
 
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
@@ -1038,15 +1023,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), cp);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result =
       new (zone()) LCallWithDescriptor(descriptor, ops, zone());
@@ -2150,26 +2140,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result =
-      new (zone()) LStoreKeyedGeneric(context, obj, key, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2245,19 +2215,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, obj, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), cp);
   LOperand* left = UseFixed(instr->left(), r4);
diff --git a/src/crankshaft/ppc/lithium-ppc.h b/src/crankshaft/ppc/lithium-ppc.h
index f26bfc5..626f00a 100644
--- a/src/crankshaft/ppc/lithium-ppc.h
+++ b/src/crankshaft/ppc/lithium-ppc.h
@@ -134,9 +134,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -1954,33 +1952,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* object, LOperand* key, LOperand* value,
@@ -2015,34 +1986,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 1> {
  public:
   LTransitionElementsKind(LOperand* object, LOperand* context,
diff --git a/src/crankshaft/s390/lithium-codegen-s390.cc b/src/crankshaft/s390/lithium-codegen-s390.cc
index ec2a85a..4511bb9 100644
--- a/src/crankshaft/s390/lithium-codegen-s390.cc
+++ b/src/crankshaft/s390/lithium-codegen-s390.cc
@@ -66,8 +66,8 @@
   BitVector* doubles = chunk()->allocated_double_registers();
   BitVector::Iterator save_iterator(doubles);
   while (!save_iterator.Done()) {
-    __ std(DoubleRegister::from_code(save_iterator.Current()),
-           MemOperand(sp, count * kDoubleSize));
+    __ StoreDouble(DoubleRegister::from_code(save_iterator.Current()),
+                   MemOperand(sp, count * kDoubleSize));
     save_iterator.Advance();
     count++;
   }
@@ -81,8 +81,8 @@
   BitVector::Iterator save_iterator(doubles);
   int count = 0;
   while (!save_iterator.Done()) {
-    __ ld(DoubleRegister::from_code(save_iterator.Current()),
-          MemOperand(sp, count * kDoubleSize));
+    __ LoadDouble(DoubleRegister::from_code(save_iterator.Current()),
+                  MemOperand(sp, count * kDoubleSize));
     save_iterator.Advance();
     count++;
   }
@@ -148,7 +148,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info()->scope()->num_heap_slots() > 0) {
+  if (info()->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is in r3.
@@ -156,7 +156,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ push(r3);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2089,7 +2089,8 @@
       EmitBranch(instr, al);
     } else if (type.IsHeapNumber()) {
       DCHECK(!info()->IsStub());
-      __ ld(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
+      __ LoadDouble(dbl_scratch,
+                    FieldMemOperand(reg, HeapNumber::kValueOffset));
       // Test the double value. Zero and NaN are false.
       __ lzdr(kDoubleRegZero);
       __ cdbr(dbl_scratch, kDoubleRegZero);
@@ -2652,19 +2653,6 @@
   __ LoadSmiLiteral(slot_register, Smi::FromInt(index));
 }
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ Move(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ LoadSmiLiteral(slot_register, Smi::FromInt(index));
-}
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(cp));
   DCHECK(ToRegister(instr->result()).is(r2));
@@ -2739,7 +2727,7 @@
   if (instr->hydrogen()->representation().IsDouble()) {
     DCHECK(access.IsInobject());
     DoubleRegister result = ToDoubleRegister(instr->result());
-    __ ld(result, FieldMemOperand(object, offset));
+    __ LoadDouble(result, FieldMemOperand(object, offset));
     return;
   }
 
@@ -2889,9 +2877,10 @@
       }
     } else {  // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
       if (!use_scratch) {
-        __ ld(result, MemOperand(external_pointer, base_offset));
+        __ LoadDouble(result, MemOperand(external_pointer, base_offset));
       } else {
-        __ ld(result, MemOperand(scratch0(), external_pointer, base_offset));
+        __ LoadDouble(result,
+                      MemOperand(scratch0(), external_pointer, base_offset));
       }
     }
   } else {
@@ -2986,9 +2975,9 @@
   }
 
   if (!use_scratch) {
-    __ ld(result, MemOperand(elements, base_offset));
+    __ LoadDouble(result, MemOperand(elements, base_offset));
   } else {
-    __ ld(result, MemOperand(scratch, elements, base_offset));
+    __ LoadDouble(result, MemOperand(scratch, elements, base_offset));
   }
 
   if (instr->hydrogen()->RequiresHoleCheck()) {
@@ -3919,7 +3908,7 @@
     DCHECK(!hinstr->NeedsWriteBarrier());
     DoubleRegister value = ToDoubleRegister(instr->value());
     DCHECK(offset >= 0);
-    __ std(value, FieldMemOperand(object, offset));
+    __ StoreDouble(value, FieldMemOperand(object, offset));
     return;
   }
 
@@ -3944,7 +3933,7 @@
   if (FLAG_unbox_double_fields && representation.IsDouble()) {
     DCHECK(access.IsInobject());
     DoubleRegister value = ToDoubleRegister(instr->value());
-    __ std(value, FieldMemOperand(object, offset));
+    __ StoreDouble(value, FieldMemOperand(object, offset));
     if (hinstr->NeedsWriteBarrier()) {
       record_value = ToRegister(instr->value());
     }
@@ -3984,20 +3973,6 @@
   }
 }
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ mov(StoreDescriptor::NameRegister(), Operand(instr->name()));
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Representation representation = instr->hydrogen()->length()->representation();
   DCHECK(representation.Equals(instr->hydrogen()->index()->representation()));
@@ -4187,14 +4162,15 @@
     __ CanonicalizeNaN(double_scratch, value);
     DCHECK(address_offset >= 0);
     if (use_scratch)
-      __ std(double_scratch, MemOperand(scratch, elements, address_offset));
+      __ StoreDouble(double_scratch,
+                     MemOperand(scratch, elements, address_offset));
     else
-      __ std(double_scratch, MemOperand(elements, address_offset));
+      __ StoreDouble(double_scratch, MemOperand(elements, address_offset));
   } else {
     if (use_scratch)
-      __ std(value, MemOperand(scratch, elements, address_offset));
+      __ StoreDouble(value, MemOperand(scratch, elements, address_offset));
     else
-      __ std(value, MemOperand(elements, address_offset));
+      __ StoreDouble(value, MemOperand(elements, address_offset));
   }
 }
 
@@ -4286,20 +4262,6 @@
   }
 }
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(cp));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
   class DeferredMaybeGrowElements final : public LDeferredCode {
    public:
@@ -4789,7 +4751,8 @@
       DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber);
     }
     // load heap number
-    __ ld(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
+    __ LoadDouble(result_reg,
+                  FieldMemOperand(input_reg, HeapNumber::kValueOffset));
     if (deoptimize_on_minus_zero) {
       __ TestDoubleIsMinusZero(result_reg, scratch, ip);
       DeoptimizeIf(eq, instr, DeoptimizeReason::kMinusZero);
@@ -4801,7 +4764,8 @@
       __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
       DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined);
       __ LoadRoot(scratch, Heap::kNanValueRootIndex);
-      __ ld(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset));
+      __ LoadDouble(result_reg,
+                    FieldMemOperand(scratch, HeapNumber::kValueOffset));
       __ b(&done, Label::kNear);
     }
   } else {
@@ -4862,8 +4826,8 @@
     // Deoptimize if we don't have a heap number.
     DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber);
 
-    __ ld(double_scratch2,
-          FieldMemOperand(input_reg, HeapNumber::kValueOffset));
+    __ LoadDouble(double_scratch2,
+                  FieldMemOperand(input_reg, HeapNumber::kValueOffset));
     if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
       // preserve heap number pointer in scratch2 for minus zero check below
       __ LoadRR(scratch2, input_reg);
@@ -5177,7 +5141,7 @@
 
   // Heap number
   __ bind(&heap_number);
-  __ ld(temp_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
+  __ LoadDouble(temp_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
   __ ClampDoubleToUint8(result_reg, temp_reg, double_scratch0());
   __ b(&done, Label::kNear);
 
@@ -5224,7 +5188,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -5337,7 +5301,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, scratch1, scratch2, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/s390/lithium-codegen-s390.h b/src/crankshaft/s390/lithium-codegen-s390.h
index e5df255..30e9d2b 100644
--- a/src/crankshaft/s390/lithium-codegen-s390.h
+++ b/src/crankshaft/s390/lithium-codegen-s390.h
@@ -276,8 +276,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
   Scope* const scope_;
diff --git a/src/crankshaft/s390/lithium-s390.cc b/src/crankshaft/s390/lithium-s390.cc
index 3048e4c..bf9dfd5 100644
--- a/src/crankshaft/s390/lithium-s390.cc
+++ b/src/crankshaft/s390/lithium-s390.cc
@@ -287,14 +287,6 @@
   value()->PrintTo(stream);
 }
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -325,14 +317,6 @@
   }
 }
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -815,7 +799,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -937,6 +921,9 @@
 
 LInstruction* LChunkBuilder::DoCallWithDescriptor(HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
 
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
@@ -945,15 +932,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), cp);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result =
       new (zone()) LCallWithDescriptor(descriptor, ops, zone());
@@ -1968,25 +1960,6 @@
   return new (zone()) LStoreKeyed(backing_store, key, val, backing_store_owner);
 }
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result =
-      new (zone()) LStoreKeyedGeneric(context, obj, key, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2058,18 +2031,6 @@
   return new (zone()) LStoreNamedField(obj, val, temp);
 }
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), cp);
-  LOperand* obj =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, obj, val, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), cp);
   LOperand* left = UseFixed(instr->left(), r3);
diff --git a/src/crankshaft/s390/lithium-s390.h b/src/crankshaft/s390/lithium-s390.h
index 1f1e520..70670ac 100644
--- a/src/crankshaft/s390/lithium-s390.h
+++ b/src/crankshaft/s390/lithium-s390.h
@@ -132,9 +132,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -1822,32 +1820,6 @@
   }
 };
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* object, LOperand* key, LOperand* value,
@@ -1881,33 +1853,6 @@
   uint32_t base_offset() const { return hydrogen()->base_offset(); }
 };
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 1> {
  public:
   LTransitionElementsKind(LOperand* object, LOperand* context,
diff --git a/src/crankshaft/typing.cc b/src/crankshaft/typing.cc
index 5961838..d2b56e2 100644
--- a/src/crankshaft/typing.cc
+++ b/src/crankshaft/typing.cc
@@ -4,11 +4,12 @@
 
 #include "src/crankshaft/typing.h"
 
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
-#include "src/frames.h"
+#include "src/ast/variables.h"
 #include "src/frames-inl.h"
+#include "src/frames.h"
 #include "src/ostreams.h"
-#include "src/parsing/parser.h"  // for CompileTimeValue; TODO(rossberg): move
 #include "src/splay-tree-inl.h"
 
 namespace v8 {
@@ -33,20 +34,20 @@
 
 
 #ifdef OBJECT_PRINT
-  static void PrintObserved(Variable* var, Object* value, Type* type) {
-    OFStream os(stdout);
-    os << "  observed " << (var->IsParameter() ? "param" : "local") << "  ";
-    var->name()->Print(os);
-    os << " : " << Brief(value) << " -> ";
-    type->PrintTo(os);
-    os << std::endl;
+static void PrintObserved(Variable* var, Object* value, AstType* type) {
+  OFStream os(stdout);
+  os << "  observed " << (var->IsParameter() ? "param" : "local") << "  ";
+  var->name()->Print(os);
+  os << " : " << Brief(value) << " -> ";
+  type->PrintTo(os);
+  os << std::endl;
   }
 #endif  // OBJECT_PRINT
 
 
 Effect AstTyper::ObservedOnStack(Object* value) {
-  Type* lower = Type::NowOf(value, zone());
-  return Effect(Bounds(lower, Type::Any()));
+  AstType* lower = AstType::NowOf(value, zone());
+  return Effect(AstBounds(lower, AstType::Any()));
 }
 
 
@@ -84,15 +85,16 @@
                     store_.LookupBounds(parameter_index(i)).lower);
     }
 
-    ZoneList<Variable*> local_vars(locals, zone());
-    ZoneList<Variable*> context_vars(scope_->ContextLocalCount(), zone());
-    ZoneList<Variable*> global_vars(scope_->ContextGlobalCount(), zone());
-    scope_->CollectStackAndContextLocals(&local_vars, &context_vars,
-                                         &global_vars);
-    for (int i = 0; i < locals; i++) {
-      PrintObserved(local_vars.at(i),
-                    frame->GetExpression(i),
-                    store_.LookupBounds(stack_local_index(i)).lower);
+    ZoneList<Variable*>* local_vars = scope_->locals();
+    int local_index = 0;
+    for (int i = 0; i < local_vars->length(); i++) {
+      Variable* var = local_vars->at(i);
+      if (var->IsStackLocal()) {
+        PrintObserved(
+            var, frame->GetExpression(local_index),
+            store_.LookupBounds(stack_local_index(local_index)).lower);
+        local_index++;
+      }
     }
   }
 #endif  // OBJECT_PRINT
@@ -205,11 +207,12 @@
     if (!clause->is_default()) {
       Expression* label = clause->label();
       // Collect type feedback.
-      Type* tag_type;
-      Type* label_type;
-      Type* combined_type;
+      AstType* tag_type;
+      AstType* label_type;
+      AstType* combined_type;
       oracle()->CompareType(clause->CompareId(),
-                            &tag_type, &label_type, &combined_type);
+                            clause->CompareOperationFeedbackSlot(), &tag_type,
+                            &label_type, &combined_type);
       NarrowLowerType(stmt->tag(), tag_type);
       NarrowLowerType(label, label_type);
       clause->set_compare_type(combined_type);
@@ -366,8 +369,8 @@
   store_.Seq(then_effects);
 
   NarrowType(expr,
-             Bounds::Either(bounds_->get(expr->then_expression()),
-                            bounds_->get(expr->else_expression()), zone()));
+             AstBounds::Either(bounds_->get(expr->then_expression()),
+                               bounds_->get(expr->else_expression()), zone()));
 }
 
 
@@ -380,14 +383,14 @@
 
 
 void AstTyper::VisitLiteral(Literal* expr) {
-  Type* type = Type::Constant(expr->value(), zone());
-  NarrowType(expr, Bounds(type));
+  AstType* type = AstType::Constant(expr->value(), zone());
+  NarrowType(expr, AstBounds(type));
 }
 
 
 void AstTyper::VisitRegExpLiteral(RegExpLiteral* expr) {
   // TODO(rossberg): Reintroduce RegExp type.
-  NarrowType(expr, Bounds(Type::Object()));
+  NarrowType(expr, AstBounds(AstType::Object()));
 }
 
 
@@ -415,7 +418,7 @@
     RECURSE(Visit(prop->value()));
   }
 
-  NarrowType(expr, Bounds(Type::Object()));
+  NarrowType(expr, AstBounds(AstType::Object()));
 }
 
 
@@ -426,7 +429,7 @@
     RECURSE(Visit(value));
   }
 
-  NarrowType(expr, Bounds(Type::Object()));
+  NarrowType(expr, AstBounds(AstType::Object()));
 }
 
 
@@ -479,7 +482,7 @@
   RECURSE(Visit(expr->exception()));
   // TODO(rossberg): is it worth having a non-termination effect?
 
-  NarrowType(expr, Bounds(Type::None()));
+  NarrowType(expr, AstBounds(AstType::None()));
 }
 
 
@@ -562,7 +565,7 @@
     RECURSE(Visit(arg));
   }
 
-  NarrowType(expr, Bounds(Type::None(), Type::Receiver()));
+  NarrowType(expr, AstBounds(AstType::None(), AstType::Receiver()));
 }
 
 
@@ -589,13 +592,13 @@
   switch (expr->op()) {
     case Token::NOT:
     case Token::DELETE:
-      NarrowType(expr, Bounds(Type::Boolean()));
+      NarrowType(expr, AstBounds(AstType::Boolean()));
       break;
     case Token::VOID:
-      NarrowType(expr, Bounds(Type::Undefined()));
+      NarrowType(expr, AstBounds(AstType::Undefined()));
       break;
     case Token::TYPEOF:
-      NarrowType(expr, Bounds(Type::InternalizedString()));
+      NarrowType(expr, AstBounds(AstType::InternalizedString()));
       break;
     default:
       UNREACHABLE();
@@ -612,12 +615,13 @@
   oracle()->CountReceiverTypes(slot, expr->GetReceiverTypes());
   expr->set_store_mode(store_mode);
   expr->set_key_type(key_type);
-  expr->set_type(oracle()->CountType(expr->CountBinOpFeedbackId()));
+  expr->set_type(oracle()->CountType(expr->CountBinOpFeedbackId(),
+                                     expr->CountBinaryOpFeedbackSlot()));
   // TODO(rossberg): merge the count type with the generic expression type.
 
   RECURSE(Visit(expr->expression()));
 
-  NarrowType(expr, Bounds(Type::SignedSmall(), Type::Number()));
+  NarrowType(expr, AstBounds(AstType::SignedSmall(), AstType::Number()));
 
   VariableProxy* proxy = expr->expression()->AsVariableProxy();
   if (proxy != NULL && proxy->var()->IsStackAllocated()) {
@@ -625,17 +629,18 @@
   }
 }
 
-
 void AstTyper::VisitBinaryOperation(BinaryOperation* expr) {
   // Collect type feedback.
-  Type* type;
-  Type* left_type;
-  Type* right_type;
+  AstType* type;
+  AstType* left_type;
+  AstType* right_type;
   Maybe<int> fixed_right_arg = Nothing<int>();
   Handle<AllocationSite> allocation_site;
   oracle()->BinaryType(expr->BinaryOperationFeedbackId(),
-      &left_type, &right_type, &type, &fixed_right_arg,
-      &allocation_site, expr->op());
+                       expr->BinaryOperationFeedbackSlot(), &left_type,
+                       &right_type, &type, &fixed_right_arg, &allocation_site,
+                       expr->op());
+
   NarrowLowerType(expr, type);
   NarrowLowerType(expr->left(), left_type);
   NarrowLowerType(expr->right(), right_type);
@@ -662,19 +667,21 @@
       left_effects.Alt(right_effects);
       store_.Seq(left_effects);
 
-      NarrowType(expr, Bounds::Either(bounds_->get(expr->left()),
-                                      bounds_->get(expr->right()), zone()));
+      NarrowType(expr, AstBounds::Either(bounds_->get(expr->left()),
+                                         bounds_->get(expr->right()), zone()));
       break;
     }
     case Token::BIT_OR:
     case Token::BIT_AND: {
       RECURSE(Visit(expr->left()));
       RECURSE(Visit(expr->right()));
-      Type* upper = Type::Union(bounds_->get(expr->left()).upper,
-                                bounds_->get(expr->right()).upper, zone());
-      if (!upper->Is(Type::Signed32())) upper = Type::Signed32();
-      Type* lower = Type::Intersect(Type::SignedSmall(), upper, zone());
-      NarrowType(expr, Bounds(lower, upper));
+      AstType* upper =
+          AstType::Union(bounds_->get(expr->left()).upper,
+                         bounds_->get(expr->right()).upper, zone());
+      if (!upper->Is(AstType::Signed32())) upper = AstType::Signed32();
+      AstType* lower =
+          AstType::Intersect(AstType::SignedSmall(), upper, zone());
+      NarrowType(expr, AstBounds(lower, upper));
       break;
     }
     case Token::BIT_XOR:
@@ -682,7 +689,7 @@
     case Token::SAR:
       RECURSE(Visit(expr->left()));
       RECURSE(Visit(expr->right()));
-      NarrowType(expr, Bounds(Type::SignedSmall(), Type::Signed32()));
+      NarrowType(expr, AstBounds(AstType::SignedSmall(), AstType::Signed32()));
       break;
     case Token::SHR:
       RECURSE(Visit(expr->left()));
@@ -690,28 +697,29 @@
       // TODO(rossberg): The upper bound would be Unsigned32, but since there
       // is no 'positive Smi' type for the lower bound, we use the smallest
       // union of Smi and Unsigned32 as upper bound instead.
-      NarrowType(expr, Bounds(Type::SignedSmall(), Type::Number()));
+      NarrowType(expr, AstBounds(AstType::SignedSmall(), AstType::Number()));
       break;
     case Token::ADD: {
       RECURSE(Visit(expr->left()));
       RECURSE(Visit(expr->right()));
-      Bounds l = bounds_->get(expr->left());
-      Bounds r = bounds_->get(expr->right());
-      Type* lower =
+      AstBounds l = bounds_->get(expr->left());
+      AstBounds r = bounds_->get(expr->right());
+      AstType* lower =
           !l.lower->IsInhabited() || !r.lower->IsInhabited()
-              ? Type::None()
-              : l.lower->Is(Type::String()) || r.lower->Is(Type::String())
-                    ? Type::String()
-                    : l.lower->Is(Type::Number()) && r.lower->Is(Type::Number())
-                          ? Type::SignedSmall()
-                          : Type::None();
-      Type* upper =
-          l.upper->Is(Type::String()) || r.upper->Is(Type::String())
-              ? Type::String()
-              : l.upper->Is(Type::Number()) && r.upper->Is(Type::Number())
-                    ? Type::Number()
-                    : Type::NumberOrString();
-      NarrowType(expr, Bounds(lower, upper));
+              ? AstType::None()
+              : l.lower->Is(AstType::String()) || r.lower->Is(AstType::String())
+                    ? AstType::String()
+                    : l.lower->Is(AstType::Number()) &&
+                              r.lower->Is(AstType::Number())
+                          ? AstType::SignedSmall()
+                          : AstType::None();
+      AstType* upper =
+          l.upper->Is(AstType::String()) || r.upper->Is(AstType::String())
+              ? AstType::String()
+              : l.upper->Is(AstType::Number()) && r.upper->Is(AstType::Number())
+                    ? AstType::Number()
+                    : AstType::NumberOrString();
+      NarrowType(expr, AstBounds(lower, upper));
       break;
     }
     case Token::SUB:
@@ -720,7 +728,7 @@
     case Token::MOD:
       RECURSE(Visit(expr->left()));
       RECURSE(Visit(expr->right()));
-      NarrowType(expr, Bounds(Type::SignedSmall(), Type::Number()));
+      NarrowType(expr, AstBounds(AstType::SignedSmall(), AstType::Number()));
       break;
     default:
       UNREACHABLE();
@@ -730,11 +738,12 @@
 
 void AstTyper::VisitCompareOperation(CompareOperation* expr) {
   // Collect type feedback.
-  Type* left_type;
-  Type* right_type;
-  Type* combined_type;
+  AstType* left_type;
+  AstType* right_type;
+  AstType* combined_type;
   oracle()->CompareType(expr->CompareOperationFeedbackId(),
-      &left_type, &right_type, &combined_type);
+                        expr->CompareOperationFeedbackSlot(), &left_type,
+                        &right_type, &combined_type);
   NarrowLowerType(expr->left(), left_type);
   NarrowLowerType(expr->right(), right_type);
   expr->set_combined_type(combined_type);
@@ -742,7 +751,7 @@
   RECURSE(Visit(expr->left()));
   RECURSE(Visit(expr->right()));
 
-  NarrowType(expr, Bounds(Type::Boolean()));
+  NarrowType(expr, AstBounds(AstType::Boolean()));
 }
 
 
@@ -767,6 +776,14 @@
   Visit(expr->expression());
 }
 
+int AstTyper::variable_index(Variable* var) {
+  // Stack locals have the range [0 .. l]
+  // Parameters have the range [-1 .. p]
+  // We map this to [-p-2 .. -1, 0 .. l]
+  return var->IsStackLocal()
+             ? stack_local_index(var->index())
+             : var->IsParameter() ? parameter_index(var->index()) : kNoVar;
+}
 
 void AstTyper::VisitDeclarations(ZoneList<Declaration*>* decls) {
   for (int i = 0; i < decls->length(); ++i) {
diff --git a/src/crankshaft/typing.h b/src/crankshaft/typing.h
index 94340c5..eb88634 100644
--- a/src/crankshaft/typing.h
+++ b/src/crankshaft/typing.h
@@ -7,16 +7,18 @@
 
 #include "src/allocation.h"
 #include "src/ast/ast-type-bounds.h"
-#include "src/ast/scopes.h"
+#include "src/ast/ast-types.h"
+#include "src/ast/ast.h"
 #include "src/ast/variables.h"
 #include "src/effects.h"
 #include "src/type-info.h"
-#include "src/types.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
 
+class DeclarationScope;
+class Isolate;
 class FunctionLiteral;
 
 class AstTyper final : public AstVisitor<AstTyper> {
@@ -49,11 +51,11 @@
   Zone* zone() const { return zone_; }
   TypeFeedbackOracle* oracle() { return &oracle_; }
 
-  void NarrowType(Expression* e, Bounds b) {
-    bounds_->set(e, Bounds::Both(bounds_->get(e), b, zone()));
+  void NarrowType(Expression* e, AstBounds b) {
+    bounds_->set(e, AstBounds::Both(bounds_->get(e), b, zone()));
   }
-  void NarrowLowerType(Expression* e, Type* t) {
-    bounds_->set(e, Bounds::NarrowLower(bounds_->get(e), t, zone()));
+  void NarrowLowerType(Expression* e, AstType* t) {
+    bounds_->set(e, AstBounds::NarrowLower(bounds_->get(e), t, zone()));
   }
 
   Effects EnterEffects() {
@@ -65,13 +67,7 @@
   int parameter_index(int index) { return -index - 2; }
   int stack_local_index(int index) { return index; }
 
-  int variable_index(Variable* var) {
-    // Stack locals have the range [0 .. l]
-    // Parameters have the range [-1 .. p]
-    // We map this to [-p-2 .. -1, 0 .. l]
-    return var->IsStackLocal() ? stack_local_index(var->index()) :
-           var->IsParameter() ? parameter_index(var->index()) : kNoVar;
-  }
+  int variable_index(Variable* var);
 
   void VisitDeclarations(ZoneList<Declaration*>* declarations);
   void VisitStatements(ZoneList<Statement*>* statements);
diff --git a/src/crankshaft/unique.h b/src/crankshaft/unique.h
index 54abfa7..4c6a097 100644
--- a/src/crankshaft/unique.h
+++ b/src/crankshaft/unique.h
@@ -11,7 +11,7 @@
 #include "src/base/functional.h"
 #include "src/handles.h"
 #include "src/utils.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/crankshaft/x64/lithium-codegen-x64.cc b/src/crankshaft/x64/lithium-codegen-x64.cc
index 66046a4..50e2aa0 100644
--- a/src/crankshaft/x64/lithium-codegen-x64.cc
+++ b/src/crankshaft/x64/lithium-codegen-x64.cc
@@ -167,7 +167,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is still in rdi.
@@ -175,7 +175,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ Push(rdi);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2539,20 +2539,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ Move(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ Move(slot_register, Smi::FromInt(index));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(rsi));
   DCHECK(ToRegister(instr->result()).is(rax));
@@ -3902,21 +3888,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(rsi));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ Move(StoreDescriptor::NameRegister(), instr->hydrogen()->name());
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Representation representation = instr->hydrogen()->length()->representation();
   DCHECK(representation.Equals(instr->hydrogen()->index()->representation()));
@@ -4158,21 +4129,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(rsi));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoMaybeGrowElements(LMaybeGrowElements* instr) {
   class DeferredMaybeGrowElements final : public LDeferredCode {
    public:
@@ -5110,7 +5066,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -5153,7 +5109,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, temp, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/x64/lithium-codegen-x64.h b/src/crankshaft/x64/lithium-codegen-x64.h
index 22c39ad..22a32a1 100644
--- a/src/crankshaft/x64/lithium-codegen-x64.h
+++ b/src/crankshaft/x64/lithium-codegen-x64.h
@@ -297,8 +297,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
 #ifdef _MSC_VER
   // On windows, you may not access the stack more than one page below
diff --git a/src/crankshaft/x64/lithium-x64.cc b/src/crankshaft/x64/lithium-x64.cc
index 4245169..18fb5d4 100644
--- a/src/crankshaft/x64/lithium-x64.cc
+++ b/src/crankshaft/x64/lithium-x64.cc
@@ -348,15 +348,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -389,15 +380,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -907,7 +889,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -1042,6 +1024,9 @@
 LInstruction* LChunkBuilder::DoCallWithDescriptor(
     HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
 
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
@@ -1050,15 +1035,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), rsi);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
       descriptor, ops, zone());
@@ -2223,26 +2213,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), rsi);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result = new (zone())
-      LStoreKeyedGeneric(context, object, key, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2337,20 +2307,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), rsi);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, object, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), rsi);
   LOperand* left = UseFixed(instr->left(), rdx);
diff --git a/src/crankshaft/x64/lithium-x64.h b/src/crankshaft/x64/lithium-x64.h
index 5c0ce04..e7eaa01 100644
--- a/src/crankshaft/x64/lithium-x64.h
+++ b/src/crankshaft/x64/lithium-x64.h
@@ -132,9 +132,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -2013,33 +2011,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* object, LOperand* key, LOperand* value,
@@ -2068,34 +2039,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 2> {
  public:
   LTransitionElementsKind(LOperand* object,
diff --git a/src/crankshaft/x87/lithium-codegen-x87.cc b/src/crankshaft/x87/lithium-codegen-x87.cc
index 1a42d5b..2d597d4 100644
--- a/src/crankshaft/x87/lithium-codegen-x87.cc
+++ b/src/crankshaft/x87/lithium-codegen-x87.cc
@@ -134,7 +134,7 @@
   Comment(";;; Prologue begin");
 
   // Possibly allocate a local context.
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     Comment(";;; Allocate local context");
     bool need_write_barrier = true;
     // Argument to NewContext is the function, which is still in edi.
@@ -142,7 +142,7 @@
     Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
     if (info()->scope()->is_script_scope()) {
       __ push(edi);
-      __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+      __ Push(info()->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       deopt_mode = Safepoint::kLazyDeopt;
     } else {
@@ -2681,20 +2681,6 @@
 }
 
 
-template <class T>
-void LCodeGen::EmitVectorStoreICRegisters(T* instr) {
-  Register vector_register = ToRegister(instr->temp_vector());
-  Register slot_register = ToRegister(instr->temp_slot());
-
-  AllowDeferredHandleDereference vector_structure_check;
-  Handle<TypeFeedbackVector> vector = instr->hydrogen()->feedback_vector();
-  __ mov(vector_register, vector);
-  FeedbackVectorSlot slot = instr->hydrogen()->slot();
-  int index = vector->GetIndex(slot);
-  __ mov(slot_register, Immediate(Smi::FromInt(index)));
-}
-
-
 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
   DCHECK(ToRegister(instr->context()).is(esi));
   DCHECK(ToRegister(instr->result()).is(eax));
@@ -3703,7 +3689,9 @@
   __ PrepareCallCFunction(2, eax);
   __ fstp_d(MemOperand(esp, 0));
   X87PrepareToWrite(result);
+  __ X87SetFPUCW(0x027F);
   __ CallCFunction(ExternalReference::ieee754_cos_function(isolate()), 2);
+  __ X87SetFPUCW(0x037F);
   // Return value is in st(0) on ia32.
   X87CommitWrite(result);
 }
@@ -3717,7 +3705,9 @@
   __ PrepareCallCFunction(2, eax);
   __ fstp_d(MemOperand(esp, 0));
   X87PrepareToWrite(result);
+  __ X87SetFPUCW(0x027F);
   __ CallCFunction(ExternalReference::ieee754_sin_function(isolate()), 2);
+  __ X87SetFPUCW(0x037F);
   // Return value is in st(0) on ia32.
   X87CommitWrite(result);
 }
@@ -3976,21 +3966,6 @@
 }
 
 
-void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(esi));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreNamedGeneric>(instr);
-
-  __ mov(StoreDescriptor::NameRegister(), instr->name());
-  Handle<Code> ic =
-      CodeFactory::StoreICInOptimizedCode(isolate(), instr->language_mode())
-          .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
   Condition cc = instr->hydrogen()->allow_equality() ? above : above_equal;
   if (instr->index()->IsConstantOperand()) {
@@ -4199,21 +4174,6 @@
 }
 
 
-void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
-  DCHECK(ToRegister(instr->context()).is(esi));
-  DCHECK(ToRegister(instr->object()).is(StoreDescriptor::ReceiverRegister()));
-  DCHECK(ToRegister(instr->key()).is(StoreDescriptor::NameRegister()));
-  DCHECK(ToRegister(instr->value()).is(StoreDescriptor::ValueRegister()));
-
-  EmitVectorStoreICRegisters<LStoreKeyedGeneric>(instr);
-
-  Handle<Code> ic = CodeFactory::KeyedStoreICInOptimizedCode(
-                        isolate(), instr->language_mode())
-                        .code();
-  CallCode(ic, RelocInfo::CODE_TARGET, instr);
-}
-
-
 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
   Register object = ToRegister(instr->object());
   Register temp = ToRegister(instr->temp());
@@ -5315,7 +5275,7 @@
 
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ Allocate(size, result, temp, no_reg, deferred->entry(), flags);
   } else {
     Register size = ToRegister(instr->size());
@@ -5358,7 +5318,7 @@
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
-    CHECK(size <= Page::kMaxRegularHeapObjectSize);
+    CHECK(size <= kMaxRegularHeapObjectSize);
     __ FastAllocate(size, result, temp, flags);
   } else {
     Register size = ToRegister(instr->size());
diff --git a/src/crankshaft/x87/lithium-codegen-x87.h b/src/crankshaft/x87/lithium-codegen-x87.h
index cdf02f3..850f330 100644
--- a/src/crankshaft/x87/lithium-codegen-x87.h
+++ b/src/crankshaft/x87/lithium-codegen-x87.h
@@ -323,8 +323,6 @@
 
   template <class T>
   void EmitVectorLoadICRegisters(T* instr);
-  template <class T>
-  void EmitVectorStoreICRegisters(T* instr);
 
   void EmitReturn(LReturn* instr);
 
diff --git a/src/crankshaft/x87/lithium-x87.cc b/src/crankshaft/x87/lithium-x87.cc
index f614b93..a319c0c 100644
--- a/src/crankshaft/x87/lithium-x87.cc
+++ b/src/crankshaft/x87/lithium-x87.cc
@@ -362,15 +362,6 @@
 }
 
 
-void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add(".");
-  stream->Add(String::cast(*name())->ToCString().get());
-  stream->Add(" <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LLoadKeyed::PrintDataTo(StringStream* stream) {
   elements()->PrintTo(stream);
   stream->Add("[");
@@ -403,15 +394,6 @@
 }
 
 
-void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
-  object()->PrintTo(stream);
-  stream->Add("[");
-  key()->PrintTo(stream);
-  stream->Add("] <- ");
-  value()->PrintTo(stream);
-}
-
-
 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
   object()->PrintTo(stream);
   stream->Add(" %p -> %p", *original_map(), *transitioned_map());
@@ -925,7 +907,7 @@
 
 LInstruction* LChunkBuilder::DoPrologue(HPrologue* instr) {
   LInstruction* result = new (zone()) LPrologue();
-  if (info_->scope()->num_heap_slots() > 0) {
+  if (info_->scope()->NeedsContext()) {
     result = MarkAsCall(result, instr);
   }
   return result;
@@ -1071,6 +1053,10 @@
 LInstruction* LChunkBuilder::DoCallWithDescriptor(
     HCallWithDescriptor* instr) {
   CallInterfaceDescriptor descriptor = instr->descriptor();
+  DCHECK_EQ(descriptor.GetParameterCount() +
+                LCallWithDescriptor::kImplicitRegisterParameterCount,
+            instr->OperandCount());
+
   LOperand* target = UseRegisterOrConstantAtStart(instr->target());
   ZoneList<LOperand*> ops(instr->OperandCount(), zone());
   // Target
@@ -1078,15 +1064,20 @@
   // Context
   LOperand* op = UseFixed(instr->OperandAt(1), esi);
   ops.Add(op, zone());
-  // Other register parameters
-  for (int i = LCallWithDescriptor::kImplicitRegisterParameterCount;
-       i < instr->OperandCount(); i++) {
-    op =
-        UseFixed(instr->OperandAt(i),
-                 descriptor.GetRegisterParameter(
-                     i - LCallWithDescriptor::kImplicitRegisterParameterCount));
+  // Load register parameters.
+  int i = 0;
+  for (; i < descriptor.GetRegisterParameterCount(); i++) {
+    op = UseFixed(instr->OperandAt(
+                      i + LCallWithDescriptor::kImplicitRegisterParameterCount),
+                  descriptor.GetRegisterParameter(i));
     ops.Add(op, zone());
   }
+  // Push stack parameters.
+  for (; i < descriptor.GetParameterCount(); i++) {
+    op = UseAny(instr->OperandAt(
+        i + LCallWithDescriptor::kImplicitRegisterParameterCount));
+    AddInstruction(new (zone()) LPushArgument(op), instr);
+  }
 
   LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
       descriptor, ops, zone());
@@ -2213,26 +2204,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), esi);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-
-  DCHECK(instr->object()->representation().IsTagged());
-  DCHECK(instr->key()->representation().IsTagged());
-  DCHECK(instr->value()->representation().IsTagged());
-
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreKeyedGeneric* result = new (zone())
-      LStoreKeyedGeneric(context, object, key, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoTransitionElementsKind(
     HTransitionElementsKind* instr) {
   if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
@@ -2334,20 +2305,6 @@
 }
 
 
-LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
-  LOperand* context = UseFixed(instr->context(), esi);
-  LOperand* object =
-      UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
-  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
-  LOperand* slot = FixedTemp(StoreWithVectorDescriptor::SlotRegister());
-  LOperand* vector = FixedTemp(StoreWithVectorDescriptor::VectorRegister());
-
-  LStoreNamedGeneric* result =
-      new (zone()) LStoreNamedGeneric(context, object, value, slot, vector);
-  return MarkAsCall(result, instr);
-}
-
-
 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
   LOperand* context = UseFixed(instr->context(), esi);
   LOperand* left = UseFixed(instr->left(), edx);
diff --git a/src/crankshaft/x87/lithium-x87.h b/src/crankshaft/x87/lithium-x87.h
index 3ef8f75..e2b8043 100644
--- a/src/crankshaft/x87/lithium-x87.h
+++ b/src/crankshaft/x87/lithium-x87.h
@@ -135,9 +135,7 @@
   V(StoreCodeEntry)                          \
   V(StoreContextSlot)                        \
   V(StoreKeyed)                              \
-  V(StoreKeyedGeneric)                       \
   V(StoreNamedField)                         \
-  V(StoreNamedGeneric)                       \
   V(StringAdd)                               \
   V(StringCharCodeAt)                        \
   V(StringCharFromCode)                      \
@@ -2008,32 +2006,6 @@
 };
 
 
-class LStoreNamedGeneric final : public LTemplateInstruction<0, 3, 2> {
- public:
-  LStoreNamedGeneric(LOperand* context, LOperand* object, LOperand* value,
-                     LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* value() { return inputs_[2]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreNamedGeneric, "store-named-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreNamedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-  Handle<Object> name() const { return hydrogen()->name(); }
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LStoreKeyed final : public LTemplateInstruction<0, 4, 0> {
  public:
   LStoreKeyed(LOperand* obj, LOperand* key, LOperand* val,
@@ -2064,34 +2036,6 @@
 };
 
 
-class LStoreKeyedGeneric final : public LTemplateInstruction<0, 4, 2> {
- public:
-  LStoreKeyedGeneric(LOperand* context, LOperand* object, LOperand* key,
-                     LOperand* value, LOperand* slot, LOperand* vector) {
-    inputs_[0] = context;
-    inputs_[1] = object;
-    inputs_[2] = key;
-    inputs_[3] = value;
-    temps_[0] = slot;
-    temps_[1] = vector;
-  }
-
-  LOperand* context() { return inputs_[0]; }
-  LOperand* object() { return inputs_[1]; }
-  LOperand* key() { return inputs_[2]; }
-  LOperand* value() { return inputs_[3]; }
-  LOperand* temp_slot() { return temps_[0]; }
-  LOperand* temp_vector() { return temps_[1]; }
-
-  DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric, "store-keyed-generic")
-  DECLARE_HYDROGEN_ACCESSOR(StoreKeyedGeneric)
-
-  void PrintDataTo(StringStream* stream) override;
-
-  LanguageMode language_mode() { return hydrogen()->language_mode(); }
-};
-
-
 class LTransitionElementsKind final : public LTemplateInstruction<0, 2, 2> {
  public:
   LTransitionElementsKind(LOperand* object,
diff --git a/src/d8.cc b/src/d8.cc
index a8af9de..01801f8 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -2,30 +2,16 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-
-// Defined when linking against shared lib on Windows.
-#if defined(USING_V8_SHARED) && !defined(V8_SHARED)
-#define V8_SHARED
-#endif
-
 #include <errno.h>
 #include <stdlib.h>
 #include <string.h>
 #include <sys/stat.h>
 
-#ifdef V8_SHARED
-#include <assert.h>
-#endif  // V8_SHARED
-
-#ifndef V8_SHARED
 #include <algorithm>
 #include <fstream>
+#include <map>
+#include <utility>
 #include <vector>
-#endif  // !V8_SHARED
-
-#ifdef V8_SHARED
-#include "include/v8-testing.h"
-#endif  // V8_SHARED
 
 #ifdef ENABLE_VTUNE_JIT_INTERFACE
 #include "src/third_party/vtune/v8-vtune.h"
@@ -36,7 +22,6 @@
 
 #include "include/libplatform/libplatform.h"
 #include "include/libplatform/v8-tracing.h"
-#ifndef V8_SHARED
 #include "src/api.h"
 #include "src/base/cpu.h"
 #include "src/base/debug/stack_trace.h"
@@ -48,7 +33,6 @@
 #include "src/snapshot/natives.h"
 #include "src/utils.h"
 #include "src/v8.h"
-#endif  // !V8_SHARED
 
 #if !defined(_WIN32) && !defined(_WIN64)
 #include <unistd.h>  // NOLINT
@@ -72,9 +56,7 @@
 namespace {
 
 const int MB = 1024 * 1024;
-#ifndef V8_SHARED
 const int kMaxWorkers = 50;
-#endif
 
 
 class ShellArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
@@ -102,7 +84,6 @@
 };
 
 
-#ifndef V8_SHARED
 // Predictable v8::Platform implementation. All background and foreground
 // tasks are run immediately, delayed tasks are not executed at all.
 class PredictablePlatform : public Platform {
@@ -136,6 +117,7 @@
     return synthetic_time_in_sec_ += 0.00001;
   }
 
+  using Platform::AddTraceEvent;
   uint64_t AddTraceEvent(char phase, const uint8_t* categoryEnabledFlag,
                          const char* name, const char* scope, uint64_t id,
                          uint64_t bind_id, int numArgs, const char** argNames,
@@ -163,7 +145,6 @@
 
   DISALLOW_COPY_AND_ASSIGN(PredictablePlatform);
 };
-#endif  // !V8_SHARED
 
 
 v8::Platform* g_platform = NULL;
@@ -176,7 +157,6 @@
 }
 
 
-#ifndef V8_SHARED
 bool FindInObjectList(Local<Object> object, const Shell::ObjectList& list) {
   for (int i = 0; i < list.length(); ++i) {
     if (list[i]->StrictEquals(object)) {
@@ -202,7 +182,6 @@
 
   return worker;
 }
-#endif  // !V8_SHARED
 
 
 }  // namespace
@@ -370,7 +349,6 @@
 };
 
 
-#ifndef V8_SHARED
 CounterMap* Shell::counter_map_;
 base::OS::MemoryMappedFile* Shell::counters_file_ = NULL;
 CounterCollection Shell::local_counters_;
@@ -383,20 +361,17 @@
 bool Shell::allow_new_workers_ = true;
 i::List<Worker*> Shell::workers_;
 i::List<SharedArrayBuffer::Contents> Shell::externalized_shared_contents_;
-#endif  // !V8_SHARED
 
 Global<Context> Shell::evaluation_context_;
 ArrayBuffer::Allocator* Shell::array_buffer_allocator;
 ShellOptions Shell::options;
 base::OnceType Shell::quit_once_ = V8_ONCE_INIT;
 
-#ifndef V8_SHARED
 bool CounterMap::Match(void* key1, void* key2) {
   const char* name1 = reinterpret_cast<const char*>(key1);
   const char* name2 = reinterpret_cast<const char*>(key2);
   return strcmp(name1, name2) == 0;
 }
-#endif  // !V8_SHARED
 
 
 // Converts a V8 value to a C string.
@@ -460,18 +435,12 @@
 // Compile a string within the current v8 context.
 MaybeLocal<Script> Shell::CompileString(
     Isolate* isolate, Local<String> source, Local<Value> name,
-    ScriptCompiler::CompileOptions compile_options, SourceType source_type) {
+    ScriptCompiler::CompileOptions compile_options) {
   Local<Context> context(isolate->GetCurrentContext());
   ScriptOrigin origin(name);
-  // TODO(adamk): Make use of compile options for Modules.
-  if (compile_options == ScriptCompiler::kNoCompileOptions ||
-      source_type == MODULE) {
+  if (compile_options == ScriptCompiler::kNoCompileOptions) {
     ScriptCompiler::Source script_source(source, origin);
-    return source_type == SCRIPT
-               ? ScriptCompiler::Compile(context, &script_source,
-                                         compile_options)
-               : ScriptCompiler::CompileModule(context, &script_source,
-                                               compile_options);
+    return ScriptCompiler::Compile(context, &script_source, compile_options);
   }
 
   ScriptCompiler::CachedData* data =
@@ -485,7 +454,6 @@
     DCHECK(false);  // A new compile option?
   }
   if (data == NULL) compile_options = ScriptCompiler::kNoCompileOptions;
-  DCHECK_EQ(SCRIPT, source_type);
   MaybeLocal<Script> result =
       ScriptCompiler::Compile(context, &cached_source, compile_options);
   CHECK(data == NULL || !data->rejected);
@@ -496,7 +464,7 @@
 // Executes a string within the current v8 context.
 bool Shell::ExecuteString(Isolate* isolate, Local<String> source,
                           Local<Value> name, bool print_result,
-                          bool report_exceptions, SourceType source_type) {
+                          bool report_exceptions) {
   HandleScope handle_scope(isolate);
   TryCatch try_catch(isolate);
   try_catch.SetVerbose(true);
@@ -508,8 +476,8 @@
         Local<Context>::New(isolate, data->realms_[data->realm_current_]);
     Context::Scope context_scope(realm);
     Local<Script> script;
-    if (!Shell::CompileString(isolate, source, name, options.compile_options,
-                              source_type).ToLocal(&script)) {
+    if (!Shell::CompileString(isolate, source, name, options.compile_options)
+             .ToLocal(&script)) {
       // Print errors that happened during compilation.
       if (report_exceptions) ReportException(isolate, &try_catch);
       return false;
@@ -527,9 +495,7 @@
   }
   DCHECK(!try_catch.HasCaught());
   if (print_result) {
-#if !defined(V8_SHARED)
     if (options.test_shell) {
-#endif
       if (!result->IsUndefined()) {
         // If all went well and the result wasn't undefined then print
         // the returned value.
@@ -537,17 +503,160 @@
         fwrite(*str, sizeof(**str), str.length(), stdout);
         printf("\n");
       }
-#if !defined(V8_SHARED)
     } else {
       v8::String::Utf8Value str(Stringify(isolate, result));
       fwrite(*str, sizeof(**str), str.length(), stdout);
       printf("\n");
     }
-#endif
   }
   return true;
 }
 
+namespace {
+
+std::string ToSTLString(Local<String> v8_str) {
+  String::Utf8Value utf8(v8_str);
+  // Should not be able to fail since the input is a String.
+  CHECK(*utf8);
+  return *utf8;
+}
+
+bool IsAbsolutePath(const std::string& path) {
+#if defined(_WIN32) || defined(_WIN64)
+  // TODO(adamk): This is an incorrect approximation, but should
+  // work for all our test-running cases.
+  return path.find(':') != std::string::npos;
+#else
+  return path[0] == '/';
+#endif
+}
+
+std::string GetWorkingDirectory() {
+#if defined(_WIN32) || defined(_WIN64)
+  char system_buffer[MAX_PATH];
+  // TODO(adamk): Support Unicode paths.
+  DWORD len = GetCurrentDirectoryA(MAX_PATH, system_buffer);
+  CHECK(len > 0);
+  return system_buffer;
+#else
+  char curdir[PATH_MAX];
+  CHECK_NOT_NULL(getcwd(curdir, PATH_MAX));
+  return curdir;
+#endif
+}
+
+// Returns the directory part of path, without the trailing '/'.
+std::string DirName(const std::string& path) {
+  DCHECK(IsAbsolutePath(path));
+  size_t last_slash = path.find_last_of('/');
+  DCHECK(last_slash != std::string::npos);
+  return path.substr(0, last_slash);
+}
+
+std::string EnsureAbsolutePath(const std::string& path,
+                               const std::string& dir_name) {
+  return IsAbsolutePath(path) ? path : dir_name + '/' + path;
+}
+
+MaybeLocal<Module> ResolveModuleCallback(Local<Context> context,
+                                         Local<String> specifier,
+                                         Local<Module> referrer,
+                                         Local<Value> data) {
+  Isolate* isolate = context->GetIsolate();
+  auto module_map = static_cast<std::map<std::string, Global<Module>>*>(
+      External::Cast(*data)->Value());
+  Local<String> dir_name = Local<String>::Cast(referrer->GetEmbedderData());
+  std::string absolute_path =
+      EnsureAbsolutePath(ToSTLString(specifier), ToSTLString(dir_name));
+  auto it = module_map->find(absolute_path);
+  if (it != module_map->end()) {
+    return it->second.Get(isolate);
+  }
+  return MaybeLocal<Module>();
+}
+
+}  // anonymous namespace
+
+MaybeLocal<Module> Shell::FetchModuleTree(
+    Isolate* isolate, const std::string& file_name,
+    std::map<std::string, Global<Module>>* module_map) {
+  DCHECK(IsAbsolutePath(file_name));
+  TryCatch try_catch(isolate);
+  try_catch.SetVerbose(true);
+  Local<String> source_text = ReadFile(isolate, file_name.c_str());
+  if (source_text.IsEmpty()) {
+    printf("Error reading '%s'\n", file_name.c_str());
+    Shell::Exit(1);
+  }
+  ScriptOrigin origin(
+      String::NewFromUtf8(isolate, file_name.c_str(), NewStringType::kNormal)
+          .ToLocalChecked());
+  ScriptCompiler::Source source(source_text, origin);
+  Local<Module> module;
+  if (!ScriptCompiler::CompileModule(isolate, &source).ToLocal(&module)) {
+    ReportException(isolate, &try_catch);
+    return MaybeLocal<Module>();
+  }
+  module_map->insert(
+      std::make_pair(file_name, Global<Module>(isolate, module)));
+
+  std::string dir_name = DirName(file_name);
+  module->SetEmbedderData(
+      String::NewFromUtf8(isolate, dir_name.c_str(), NewStringType::kNormal)
+          .ToLocalChecked());
+
+  for (int i = 0, length = module->GetModuleRequestsLength(); i < length; ++i) {
+    Local<String> name = module->GetModuleRequest(i);
+    std::string absolute_path = EnsureAbsolutePath(ToSTLString(name), dir_name);
+    if (!module_map->count(absolute_path)) {
+      if (FetchModuleTree(isolate, absolute_path, module_map).IsEmpty()) {
+        return MaybeLocal<Module>();
+      }
+    }
+  }
+
+  return module;
+}
+
+bool Shell::ExecuteModule(Isolate* isolate, const char* file_name) {
+  HandleScope handle_scope(isolate);
+
+  std::string absolute_path =
+      EnsureAbsolutePath(file_name, GetWorkingDirectory());
+  std::replace(absolute_path.begin(), absolute_path.end(), '\\', '/');
+
+  Local<Module> root_module;
+  std::map<std::string, Global<Module>> module_map;
+  if (!FetchModuleTree(isolate, absolute_path, &module_map)
+           .ToLocal(&root_module)) {
+    return false;
+  }
+
+  TryCatch try_catch(isolate);
+  try_catch.SetVerbose(true);
+
+  MaybeLocal<Value> maybe_result;
+  {
+    PerIsolateData* data = PerIsolateData::Get(isolate);
+    Local<Context> realm = data->realms_[data->realm_current_].Get(isolate);
+    Context::Scope context_scope(realm);
+
+    if (root_module->Instantiate(realm, ResolveModuleCallback,
+                                 External::New(isolate, &module_map))) {
+      maybe_result = root_module->Evaluate(realm);
+      EmptyMessageQueues(isolate);
+    }
+  }
+  Local<Value> result;
+  if (!maybe_result.ToLocal(&result)) {
+    DCHECK(try_catch.HasCaught());
+    // Print errors that happened during execution.
+    ReportException(isolate, &try_catch);
+    return false;
+  }
+  DCHECK(!try_catch.HasCaught());
+  return true;
+}
 
 PerIsolateData::RealmScope::RealmScope(PerIsolateData* data) : data_(data) {
   data_->realm_count_ = 1;
@@ -595,7 +704,6 @@
 }
 
 
-#ifndef V8_SHARED
 // performance.now() returns a time stamp as double, measured in milliseconds.
 // When FLAG_verify_predictable mode is enabled it returns result of
 // v8::Platform::MonotonicallyIncreasingTime().
@@ -608,7 +716,6 @@
     args.GetReturnValue().Set(delta.InMillisecondsF());
   }
 }
-#endif  // !V8_SHARED
 
 
 // Realm.current() returns the index of the currently active realm.
@@ -879,7 +986,6 @@
 }
 
 
-#ifndef V8_SHARED
 void Shell::WorkerNew(const v8::FunctionCallbackInfo<v8::Value>& args) {
   Isolate* isolate = args.GetIsolate();
   HandleScope handle_scope(isolate);
@@ -1001,16 +1107,13 @@
 
   worker->Terminate();
 }
-#endif  // !V8_SHARED
 
 
 void Shell::QuitOnce(v8::FunctionCallbackInfo<v8::Value>* args) {
   int exit_code = (*args)[0]
                       ->Int32Value(args->GetIsolate()->GetCurrentContext())
                       .FromMaybe(0);
-#ifndef V8_SHARED
   CleanupWorkers();
-#endif  // !V8_SHARED
   OnExit(args->GetIsolate());
   Exit(exit_code);
 }
@@ -1031,14 +1134,12 @@
 
 void Shell::ReportException(Isolate* isolate, v8::TryCatch* try_catch) {
   HandleScope handle_scope(isolate);
-#ifndef V8_SHARED
   Local<Context> context;
   bool enter_context = !isolate->InContext();
   if (enter_context) {
     context = Local<Context>::New(isolate, evaluation_context_);
     context->Enter();
   }
-#endif  // !V8_SHARED
   v8::String::Utf8Value exception(try_catch->Exception());
   const char* exception_string = ToCString(exception);
   Local<Message> message = try_catch->Message();
@@ -1082,13 +1183,10 @@
     }
   }
   printf("\n");
-#ifndef V8_SHARED
   if (enter_context) context->Exit();
-#endif  // !V8_SHARED
 }
 
 
-#ifndef V8_SHARED
 int32_t* Counter::Bind(const char* name, bool is_histogram) {
   int i;
   for (i = 0; i < kMaxNameSize - 1 && name[i]; i++)
@@ -1217,7 +1315,6 @@
   if (result.IsEmpty()) return String::Empty(isolate);
   return result.ToLocalChecked().As<String>();
 }
-#endif  // !V8_SHARED
 
 
 Local<ObjectTemplate> Shell::CreateGlobalTemplate(Isolate* isolate) {
@@ -1308,7 +1405,6 @@
           .ToLocalChecked(),
       realm_template);
 
-#ifndef V8_SHARED
   Local<ObjectTemplate> performance_template = ObjectTemplate::New(isolate);
   performance_template->Set(
       String::NewFromUtf8(isolate, "now", NewStringType::kNormal)
@@ -1347,7 +1443,6 @@
       String::NewFromUtf8(isolate, "Worker", NewStringType::kNormal)
           .ToLocalChecked(),
       worker_fun_template);
-#endif  // !V8_SHARED
 
   Local<ObjectTemplate> os_templ = ObjectTemplate::New(isolate);
   AddOSMethods(isolate, os_templ);
@@ -1365,21 +1460,17 @@
 }
 
 void Shell::Initialize(Isolate* isolate) {
-#ifndef V8_SHARED
   // Set up counters
   if (i::StrLength(i::FLAG_map_counters) != 0)
     MapCounters(isolate, i::FLAG_map_counters);
-#endif  // !V8_SHARED
   // Disable default message reporting.
   isolate->AddMessageListener(EmptyMessageCallback);
 }
 
 
 Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) {
-#ifndef V8_SHARED
   // This needs to be a critical section since this is not thread-safe
   base::LockGuard<base::Mutex> lock_guard(context_mutex_.Pointer());
-#endif  // !V8_SHARED
   // Initialize the global objects
   Local<ObjectTemplate> global_template = CreateGlobalTemplate(isolate);
   EscapableHandleScope handle_scope(isolate);
@@ -1387,7 +1478,6 @@
   DCHECK(!context.IsEmpty());
   Context::Scope scope(context);
 
-#ifndef V8_SHARED
   i::Factory* factory = reinterpret_cast<i::Isolate*>(isolate)->factory();
   i::JSArguments js_args = i::FLAG_js_arguments;
   i::Handle<i::FixedArray> arguments_array =
@@ -1405,7 +1495,6 @@
                 .ToLocalChecked(),
             Utils::ToLocal(arguments_jsarray))
       .FromJust();
-#endif  // !V8_SHARED
   return handle_scope.Escape(context);
 }
 
@@ -1419,7 +1508,6 @@
 }
 
 
-#ifndef V8_SHARED
 struct CounterAndKey {
   Counter* counter;
   const char* key;
@@ -1444,11 +1532,8 @@
       JSON::Stringify(context, dispatch_counters).ToLocalChecked());
 }
 
-#endif  // !V8_SHARED
-
 
 void Shell::OnExit(v8::Isolate* isolate) {
-#ifndef V8_SHARED
   if (i::FLAG_dump_counters) {
     int number_of_counters = 0;
     for (CounterMap::Iterator i(counter_map_); i.More(); i.Next()) {
@@ -1484,7 +1569,6 @@
 
   delete counters_file_;
   delete counter_map_;
-#endif  // !V8_SHARED
 }
 
 
@@ -1618,10 +1702,8 @@
 
 
 SourceGroup::~SourceGroup() {
-#ifndef V8_SHARED
   delete thread_;
   thread_ = NULL;
-#endif  // !V8_SHARED
 }
 
 
@@ -1629,7 +1711,6 @@
   bool exception_was_thrown = false;
   for (int i = begin_offset_; i < end_offset_; ++i) {
     const char* arg = argv_[i];
-    Shell::SourceType source_type = Shell::SCRIPT;
     if (strcmp(arg, "-e") == 0 && i + 1 < end_offset_) {
       // Execute argument given to -e option directly.
       HandleScope handle_scope(isolate);
@@ -1648,8 +1729,13 @@
       continue;
     } else if (strcmp(arg, "--module") == 0 && i + 1 < end_offset_) {
       // Treat the next file as a module.
-      source_type = Shell::MODULE;
       arg = argv_[++i];
+      Shell::options.script_executed = true;
+      if (!Shell::ExecuteModule(isolate, arg)) {
+        exception_was_thrown = true;
+        break;
+      }
+      continue;
     } else if (arg[0] == '-') {
       // Ignore other options. They have been parsed already.
       continue;
@@ -1666,8 +1752,7 @@
       Shell::Exit(1);
     }
     Shell::options.script_executed = true;
-    if (!Shell::ExecuteString(isolate, source, file_name, false, true,
-                              source_type)) {
+    if (!Shell::ExecuteString(isolate, source, file_name, false, true)) {
       exception_was_thrown = true;
       break;
     }
@@ -1690,7 +1775,6 @@
 }
 
 
-#ifndef V8_SHARED
 base::Thread::Options SourceGroup::GetThreadOptions() {
   // On some systems (OSX 10.6) the stack size default is 0.5Mb or less
   // which is not enough to parse the big literal expressions used in tests.
@@ -2014,7 +2098,6 @@
     delete data;
   }
 }
-#endif  // !V8_SHARED
 
 
 void SetFlagsFromString(const char* flags) {
@@ -2070,30 +2153,16 @@
       // JavaScript engines.
       continue;
     } else if (strcmp(argv[i], "--isolate") == 0) {
-#ifdef V8_SHARED
-      printf("D8 with shared library does not support multi-threading\n");
-      return false;
-#endif  // V8_SHARED
       options.num_isolates++;
     } else if (strcmp(argv[i], "--dump-heap-constants") == 0) {
-#ifdef V8_SHARED
-      printf("D8 with shared library does not support constant dumping\n");
-      return false;
-#else
       options.dump_heap_constants = true;
       argv[i] = NULL;
-#endif  // V8_SHARED
     } else if (strcmp(argv[i], "--throws") == 0) {
       options.expected_to_throw = true;
       argv[i] = NULL;
     } else if (strncmp(argv[i], "--icu-data-file=", 16) == 0) {
       options.icu_data_file = argv[i] + 16;
       argv[i] = NULL;
-#ifdef V8_SHARED
-    } else if (strcmp(argv[i], "--dump-counters") == 0) {
-      printf("D8 with shared library does not include counters\n");
-      return false;
-#endif  // V8_SHARED
 #ifdef V8_USE_EXTERNAL_STARTUP_DATA
     } else if (strncmp(argv[i], "--natives_blob=", 15) == 0) {
       options.natives_blob = argv[i] + 15;
@@ -2159,11 +2228,9 @@
 
 
 int Shell::RunMain(Isolate* isolate, int argc, char* argv[], bool last_run) {
-#ifndef V8_SHARED
   for (int i = 1; i < options.num_isolates; ++i) {
     options.isolate_sources[i].StartExecuteInThread();
   }
-#endif  // !V8_SHARED
   {
     HandleScope scope(isolate);
     Local<Context> context = CreateEvaluationContext(isolate);
@@ -2178,7 +2245,6 @@
     }
   }
   CollectGarbage(isolate);
-#ifndef V8_SHARED
   for (int i = 1; i < options.num_isolates; ++i) {
     if (last_run) {
       options.isolate_sources[i].JoinThread();
@@ -2187,7 +2253,6 @@
     }
   }
   CleanupWorkers();
-#endif  // !V8_SHARED
   return 0;
 }
 
@@ -2209,17 +2274,12 @@
 
 
 void Shell::EmptyMessageQueues(Isolate* isolate) {
-#ifndef V8_SHARED
   if (!i::FLAG_verify_predictable) {
-#endif
     while (v8::platform::PumpMessageLoop(g_platform, isolate)) continue;
-#ifndef V8_SHARED
   }
-#endif
 }
 
 
-#ifndef V8_SHARED
 bool Shell::SerializeValue(Isolate* isolate, Local<Value> value,
                            const ObjectList& to_transfer,
                            ObjectList* seen_objects,
@@ -2534,14 +2594,11 @@
   printf("}\n");
 #undef ROOT_LIST_CASE
 }
-#endif  // !V8_SHARED
 
 
 int Shell::Main(int argc, char* argv[]) {
   std::ofstream trace_file;
-#ifndef V8_SHARED
   v8::base::debug::EnableInProcessStackDumping();
-#endif
 #if (defined(_WIN32) || defined(_WIN64))
   UINT new_flags =
       SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX;
@@ -2559,13 +2616,9 @@
 #endif  // defined(_WIN32) || defined(_WIN64)
   if (!SetOptions(argc, argv)) return 1;
   v8::V8::InitializeICUDefaultLocation(argv[0], options.icu_data_file);
-#ifndef V8_SHARED
   g_platform = i::FLAG_verify_predictable
                    ? new PredictablePlatform()
                    : v8::platform::CreateDefaultPlatform();
-#else
-  g_platform = v8::platform::CreateDefaultPlatform();
-#endif  // !V8_SHARED
 
   v8::V8::InitializePlatform(g_platform);
   v8::V8::Initialize();
@@ -2591,7 +2644,6 @@
 #ifdef ENABLE_VTUNE_JIT_INTERFACE
   create_params.code_event_handler = vTune::GetVtuneCodeEventHandler();
 #endif
-#ifndef V8_SHARED
   create_params.constraints.ConfigureDefaults(
       base::SysInfo::AmountOfPhysicalMemory(),
       base::SysInfo::AmountOfVirtualMemory());
@@ -2602,7 +2654,6 @@
     create_params.create_histogram_callback = CreateHistogram;
     create_params.add_histogram_sample_callback = AddHistogramSample;
   }
-#endif
   Isolate* isolate = Isolate::New(create_params);
   {
     Isolate::Scope scope(isolate);
@@ -2632,21 +2683,15 @@
       }
       tracing_controller->Initialize(trace_buffer);
       tracing_controller->StartTracing(trace_config);
-#ifndef V8_SHARED
       if (!i::FLAG_verify_predictable) {
         platform::SetTracingController(g_platform, tracing_controller);
       }
-#else
-      platform::SetTracingController(g_platform, tracing_controller);
-#endif
     }
 
-#ifndef V8_SHARED
     if (options.dump_heap_constants) {
       DumpHeapConstants(reinterpret_cast<i::Isolate*>(isolate));
       return 0;
     }
-#endif
 
     if (options.stress_opt || options.stress_deopt) {
       Testing::SetStressRunType(options.stress_opt
@@ -2662,7 +2707,6 @@
       }
       printf("======== Full Deoptimization =======\n");
       Testing::DeoptimizeAll(isolate);
-#if !defined(V8_SHARED)
     } else if (i::FLAG_stress_runs > 0) {
       options.stress_runs = i::FLAG_stress_runs;
       for (int i = 0; i < options.stress_runs && result == 0; i++) {
@@ -2671,7 +2715,6 @@
         bool last_run = i == options.stress_runs - 1;
         result = RunMain(isolate, argc, argv, last_run);
       }
-#endif
     } else {
       bool last_run = true;
       result = RunMain(isolate, argc, argv, last_run);
@@ -2683,29 +2726,23 @@
       RunShell(isolate);
     }
 
-#ifndef V8_SHARED
     if (i::FLAG_ignition && i::FLAG_trace_ignition_dispatches &&
         i::FLAG_trace_ignition_dispatches_output_file != nullptr) {
       WriteIgnitionDispatchCountersFile(isolate);
     }
-#endif
 
     // Shut down contexts and collect garbage.
     evaluation_context_.Reset();
-#ifndef V8_SHARED
     stringify_function_.Reset();
-#endif  // !V8_SHARED
     CollectGarbage(isolate);
   }
   OnExit(isolate);
-#ifndef V8_SHARED
   // Dump basic block profiling data.
   if (i::BasicBlockProfiler* profiler =
           reinterpret_cast<i::Isolate*>(isolate)->basic_block_profiler()) {
     i::OFStream os(stdout);
     os << *profiler;
   }
-#endif  // !V8_SHARED
   isolate->Dispose();
   V8::Dispose();
   V8::ShutdownPlatform();
diff --git a/src/d8.gyp b/src/d8.gyp
index cc65a5b..e0270f5 100644
--- a/src/d8.gyp
+++ b/src/d8.gyp
@@ -49,10 +49,18 @@
       'sources': [
         'd8.h',
         'd8.cc',
+        '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc',
       ],
       'conditions': [
         [ 'want_separate_host_toolset==1', {
           'toolsets': [ 'target', ],
+          'dependencies': [
+            'd8_js2c#host',
+          ],
+        }, {
+          'dependencies': [
+            'd8_js2c',
+          ],
         }],
         ['(OS=="linux" or OS=="mac" or OS=="freebsd" or OS=="netbsd" \
            or OS=="openbsd" or OS=="solaris" or OS=="android" \
@@ -63,19 +71,7 @@
           'sources': [ 'd8-windows.cc', ]
         }],
         [ 'component!="shared_library"', {
-          'sources': [
-            '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc',
-          ],
           'conditions': [
-            [ 'want_separate_host_toolset==1', {
-              'dependencies': [
-                'd8_js2c#host',
-              ],
-            }, {
-              'dependencies': [
-                'd8_js2c',
-              ],
-            }],
             [ 'v8_postmortem_support=="true"', {
               'xcode_settings': {
                 'OTHER_LDFLAGS': [
diff --git a/src/d8.h b/src/d8.h
index 0e365a5..32a7d25 100644
--- a/src/d8.h
+++ b/src/d8.h
@@ -5,15 +5,13 @@
 #ifndef V8_D8_H_
 #define V8_D8_H_
 
-#ifndef V8_SHARED
+#include <map>
+#include <string>
+
 #include "src/allocation.h"
 #include "src/base/hashmap.h"
 #include "src/base/platform/time.h"
 #include "src/list.h"
-#else
-#include "include/v8.h"
-#include "src/base/compiler-specific.h"
-#endif  // !V8_SHARED
 
 #include "src/base/once.h"
 
@@ -21,7 +19,6 @@
 namespace v8 {
 
 
-#ifndef V8_SHARED
 // A single counter in a counter collection.
 class Counter {
  public:
@@ -81,26 +78,23 @@
     const char* CurrentKey() { return static_cast<const char*>(entry_->key); }
     Counter* CurrentValue() { return static_cast<Counter*>(entry_->value); }
    private:
-    base::HashMap* map_;
-    base::HashMap::Entry* entry_;
+    base::CustomMatcherHashMap* map_;
+    base::CustomMatcherHashMap::Entry* entry_;
   };
 
  private:
   static int Hash(const char* name);
   static bool Match(void* key1, void* key2);
-  base::HashMap hash_map_;
+  base::CustomMatcherHashMap hash_map_;
 };
-#endif  // !V8_SHARED
 
 
 class SourceGroup {
  public:
   SourceGroup() :
-#ifndef V8_SHARED
       next_semaphore_(0),
       done_semaphore_(0),
       thread_(NULL),
-#endif  // !V8_SHARED
       argv_(NULL),
       begin_offset_(0),
       end_offset_(0) {}
@@ -116,7 +110,6 @@
 
   void Execute(Isolate* isolate);
 
-#ifndef V8_SHARED
   void StartExecuteInThread();
   void WaitForThread();
   void JoinThread();
@@ -141,7 +134,6 @@
   base::Semaphore next_semaphore_;
   base::Semaphore done_semaphore_;
   base::Thread* thread_;
-#endif  // !V8_SHARED
 
   void ExitShell(int exit_code);
   Local<String> ReadFile(Isolate* isolate, const char* name);
@@ -151,7 +143,6 @@
   int end_offset_;
 };
 
-#ifndef V8_SHARED
 enum SerializationTag {
   kSerializationTagUndefined,
   kSerializationTagNull,
@@ -267,7 +258,6 @@
   char* script_;
   base::Atomic32 running_;
 };
-#endif  // !V8_SHARED
 
 
 class ShellOptions {
@@ -324,23 +314,15 @@
   const char* trace_config;
 };
 
-#ifdef V8_SHARED
-class Shell {
-#else
 class Shell : public i::AllStatic {
-#endif  // V8_SHARED
-
  public:
-  enum SourceType { SCRIPT, MODULE };
-
   static MaybeLocal<Script> CompileString(
       Isolate* isolate, Local<String> source, Local<Value> name,
-      v8::ScriptCompiler::CompileOptions compile_options,
-      SourceType source_type);
+      v8::ScriptCompiler::CompileOptions compile_options);
   static bool ExecuteString(Isolate* isolate, Local<String> source,
                             Local<Value> name, bool print_result,
-                            bool report_exceptions,
-                            SourceType source_type = SCRIPT);
+                            bool report_exceptions);
+  static bool ExecuteModule(Isolate* isolate, const char* file_name);
   static const char* ToCString(const v8::String::Utf8Value& value);
   static void ReportException(Isolate* isolate, TryCatch* try_catch);
   static Local<String> ReadFile(Isolate* isolate, const char* name);
@@ -352,7 +334,6 @@
   static void CollectGarbage(Isolate* isolate);
   static void EmptyMessageQueues(Isolate* isolate);
 
-#ifndef V8_SHARED
   // TODO(binji): stupid implementation for now. Is there an easy way to hash an
   // object for use in base::HashMap? By pointer?
   typedef i::List<Local<Object>> ObjectList;
@@ -373,7 +354,6 @@
   static void MapCounters(v8::Isolate* isolate, const char* name);
 
   static void PerformanceNow(const v8::FunctionCallbackInfo<v8::Value>& args);
-#endif  // !V8_SHARED
 
   static void RealmCurrent(const v8::FunctionCallbackInfo<v8::Value>& args);
   static void RealmOwner(const v8::FunctionCallbackInfo<v8::Value>& args);
@@ -451,7 +431,6 @@
  private:
   static Global<Context> evaluation_context_;
   static base::OnceType quit_once_;
-#ifndef V8_SHARED
   static Global<Function> stringify_function_;
   static CounterMap* counter_map_;
   // We statically allocate a set of local counters to be used if we
@@ -470,13 +449,15 @@
   static void WriteIgnitionDispatchCountersFile(v8::Isolate* isolate);
   static Counter* GetCounter(const char* name, bool is_histogram);
   static Local<String> Stringify(Isolate* isolate, Local<Value> value);
-#endif  // !V8_SHARED
   static void Initialize(Isolate* isolate);
   static void RunShell(Isolate* isolate);
   static bool SetOptions(int argc, char* argv[]);
   static Local<ObjectTemplate> CreateGlobalTemplate(Isolate* isolate);
   static MaybeLocal<Context> CreateRealm(
       const v8::FunctionCallbackInfo<v8::Value>& args);
+  static MaybeLocal<Module> FetchModuleTree(
+      Isolate* isolate, const std::string& file_name,
+      std::map<std::string, Global<Module>>* module_map);
 };
 
 
diff --git a/src/dateparser.h b/src/dateparser.h
index d7676cb..709c1cb 100644
--- a/src/dateparser.h
+++ b/src/dateparser.h
@@ -7,7 +7,7 @@
 
 #include "src/allocation.h"
 #include "src/char-predicates.h"
-#include "src/parsing/scanner.h"
+#include "src/unicode-cache.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/debug/arm/debug-arm.cc b/src/debug/arm/debug-arm.cc
index 29e4827..d96ec31 100644
--- a/src/debug/arm/debug-arm.cc
+++ b/src/debug/arm/debug-arm.cc
@@ -4,9 +4,11 @@
 
 #if V8_TARGET_ARCH_ARM
 
-#include "src/codegen.h"
 #include "src/debug/debug.h"
 
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
+
 namespace v8 {
 namespace internal {
 
diff --git a/src/debug/arm64/debug-arm64.cc b/src/debug/arm64/debug-arm64.cc
index bf7964a..e344924 100644
--- a/src/debug/arm64/debug-arm64.cc
+++ b/src/debug/arm64/debug-arm64.cc
@@ -4,9 +4,11 @@
 
 #if V8_TARGET_ARCH_ARM64
 
+#include "src/debug/debug.h"
+
 #include "src/arm64/frames-arm64.h"
 #include "src/codegen.h"
-#include "src/debug/debug.h"
+#include "src/debug/liveedit.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/debug/debug-evaluate.cc b/src/debug/debug-evaluate.cc
index fb2df31..8970520 100644
--- a/src/debug/debug-evaluate.cc
+++ b/src/debug/debug-evaluate.cc
@@ -94,7 +94,13 @@
   if (context_extension->IsJSObject()) {
     Handle<JSObject> extension = Handle<JSObject>::cast(context_extension);
     Handle<JSFunction> closure(context->closure(), isolate);
-    context = isolate->factory()->NewWithContext(closure, context, extension);
+    context = isolate->factory()->NewWithContext(
+        closure, context,
+        ScopeInfo::CreateForWithScope(
+            isolate, context->IsNativeContext()
+                         ? Handle<ScopeInfo>::null()
+                         : Handle<ScopeInfo>(context->scope_info())),
+        extension);
   }
 
   Handle<JSFunction> eval_fun;
@@ -203,8 +209,13 @@
   }
 
   for (int i = context_chain_.length() - 1; i >= 0; i--) {
+    Handle<ScopeInfo> scope_info(ScopeInfo::CreateForWithScope(
+        isolate, evaluation_context_->IsNativeContext()
+                     ? Handle<ScopeInfo>::null()
+                     : Handle<ScopeInfo>(evaluation_context_->scope_info())));
+    scope_info->SetIsDebugEvaluateScope();
     evaluation_context_ = factory->NewDebugEvaluateContext(
-        evaluation_context_, context_chain_[i].materialized_object,
+        evaluation_context_, scope_info, context_chain_[i].materialized_object,
         context_chain_[i].wrapped_context, context_chain_[i].whitelist);
   }
 }
diff --git a/src/debug/debug-scopes.cc b/src/debug/debug-scopes.cc
index 55108bb..c7eb0f7 100644
--- a/src/debug/debug-scopes.cc
+++ b/src/debug/debug-scopes.cc
@@ -7,7 +7,6 @@
 #include <memory>
 
 #include "src/ast/scopes.h"
-#include "src/compiler.h"
 #include "src/debug/debug.h"
 #include "src/frames-inl.h"
 #include "src/globals.h"
@@ -100,7 +99,9 @@
     } else {
       DCHECK(scope_info->scope_type() == EVAL_SCOPE);
       info->set_eval();
-      info->set_context(Handle<Context>(function->context()));
+      if (!function->context()->IsNativeContext()) {
+        info->set_outer_scope_info(handle(function->context()->scope_info()));
+      }
       // Language mode may be inherited from the eval caller.
       // Retrieve it from shared function info.
       info->set_language_mode(shared_info->language_mode());
@@ -115,8 +116,7 @@
       CollectNonLocals(info.get(), scope);
     }
     if (!ignore_nested_scopes) {
-      AstNodeFactory ast_node_factory(info.get()->ast_value_factory());
-      scope->AllocateVariables(info.get(), &ast_node_factory);
+      DeclarationScope::Analyze(info.get(), AnalyzeMode::kDebugger);
       RetrieveScopeChain(scope);
     }
   } else if (!ignore_nested_scopes) {
@@ -364,7 +364,7 @@
     case ScopeIterator::ScopeTypeEval:
       return SetInnerScopeVariableValue(variable_name, new_value);
     case ScopeIterator::ScopeTypeModule:
-      // TODO(2399): should we implement it?
+      // TODO(neis): Implement.
       break;
   }
   return false;
@@ -619,6 +619,8 @@
   // Fill all context locals.
   CopyContextLocalsToScopeObject(scope_info, context, module_scope);
 
+  // TODO(neis): Also collect stack locals as well as imports and exports.
+
   return module_scope;
 }
 
@@ -819,11 +821,10 @@
   if (scope->is_hidden()) {
     // We need to add this chain element in case the scope has a context
     // associated. We need to keep the scope chain and context chain in sync.
-    nested_scope_chain_.Add(ExtendedScopeInfo(scope->GetScopeInfo(isolate)));
+    nested_scope_chain_.Add(ExtendedScopeInfo(scope->scope_info()));
   } else {
-    nested_scope_chain_.Add(ExtendedScopeInfo(scope->GetScopeInfo(isolate),
-                                              scope->start_position(),
-                                              scope->end_position()));
+    nested_scope_chain_.Add(ExtendedScopeInfo(
+        scope->scope_info(), scope->start_position(), scope->end_position()));
   }
   for (Scope* inner_scope = scope->inner_scope(); inner_scope != nullptr;
        inner_scope = inner_scope->sibling()) {
diff --git a/src/debug/debug-scopes.h b/src/debug/debug-scopes.h
index 0491d73..026a1da 100644
--- a/src/debug/debug-scopes.h
+++ b/src/debug/debug-scopes.h
@@ -11,6 +11,8 @@
 namespace v8 {
 namespace internal {
 
+class ParseInfo;
+
 // Iterate over the actual scopes visible from a stack frame or from a closure.
 // The iteration proceeds from the innermost visible nested scope outwards.
 // All scopes are backed by an actual context except the local scope,
diff --git a/src/debug/debug.cc b/src/debug/debug.cc
index e046957..5323c13 100644
--- a/src/debug/debug.cc
+++ b/src/debug/debug.cc
@@ -14,6 +14,7 @@
 #include "src/compilation-cache.h"
 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
 #include "src/compiler.h"
+#include "src/debug/liveedit.h"
 #include "src/deoptimizer.h"
 #include "src/execution.h"
 #include "src/frames-inl.h"
@@ -1281,7 +1282,7 @@
 
   // Make sure we abort incremental marking.
   isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
-                                      "prepare for break points");
+                                      GarbageCollectionReason::kDebugger);
 
   DCHECK(shared->is_compiled());
   bool baseline_exists = shared->HasBaselineCode();
@@ -1293,7 +1294,8 @@
     HeapIterator iterator(isolate_->heap());
     HeapObject* obj;
     // Continuation from old-style generators need to be recomputed.
-    bool find_resumables = baseline_exists && shared->is_resumable();
+    bool find_resumables =
+        baseline_exists && IsResumableFunction(shared->kind());
 
     while ((obj = iterator.next())) {
       if (obj->IsJSFunction()) {
@@ -1352,7 +1354,7 @@
 
 void Debug::RecordAsyncFunction(Handle<JSGeneratorObject> generator_object) {
   if (last_step_action() <= StepOut) return;
-  if (!generator_object->function()->shared()->is_async()) return;
+  if (!IsAsyncFunction(generator_object->function()->shared()->kind())) return;
   DCHECK(!has_suspended_generator());
   thread_local_.suspended_generator_ = *generator_object;
   ClearStepping();
@@ -1576,10 +1578,9 @@
   return location.IsReturn() || location.IsTailCall();
 }
 
-
 void Debug::FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
-                                  LiveEdit::FrameDropMode mode) {
-  if (mode != LiveEdit::CURRENTLY_SET_MODE) {
+                                  LiveEditFrameDropMode mode) {
+  if (mode != LIVE_EDIT_CURRENTLY_SET_MODE) {
     thread_local_.frame_drop_mode_ = mode;
   }
   thread_local_.break_frame_id_ = new_break_frame_id;
@@ -1599,7 +1600,8 @@
 
 
 Handle<FixedArray> Debug::GetLoadedScripts() {
-  isolate_->heap()->CollectAllGarbage();
+  isolate_->heap()->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
+                                      GarbageCollectionReason::kDebugger);
   Factory* factory = isolate_->factory();
   if (!factory->script_list()->IsWeakFixedArray()) {
     return factory->empty_fixed_array();
@@ -1681,43 +1683,38 @@
   }
 }
 
-
-void Debug::OnPromiseReject(Handle<JSObject> promise, Handle<Object> value) {
+void Debug::OnPromiseReject(Handle<Object> promise, Handle<Object> value) {
   if (in_debug_scope() || ignore_events()) return;
   HandleScope scope(isolate_);
   // Check whether the promise has been marked as having triggered a message.
   Handle<Symbol> key = isolate_->factory()->promise_debug_marker_symbol();
-  if (JSReceiver::GetDataProperty(promise, key)->IsUndefined(isolate_)) {
+  if (!promise->IsJSObject() ||
+      JSReceiver::GetDataProperty(Handle<JSObject>::cast(promise), key)
+          ->IsUndefined(isolate_)) {
     OnException(value, promise);
   }
 }
 
 
-MaybeHandle<Object> Debug::PromiseHasUserDefinedRejectHandler(
-    Handle<JSObject> promise) {
-  Handle<JSFunction> fun = isolate_->promise_has_user_defined_reject_handler();
-  return Execution::Call(isolate_, fun, promise, 0, NULL);
-}
-
-
 void Debug::OnException(Handle<Object> exception, Handle<Object> promise) {
+  // We cannot generate debug events when JS execution is disallowed.
+  // TODO(5530): Reenable debug events within DisallowJSScopes once relevant
+  // code (MakeExceptionEvent and ProcessDebugEvent) have been moved to C++.
+  if (!AllowJavascriptExecution::IsAllowed(isolate_)) return;
+
   Isolate::CatchType catch_type = isolate_->PredictExceptionCatcher();
 
   // Don't notify listener of exceptions that are internal to a desugaring.
   if (catch_type == Isolate::CAUGHT_BY_DESUGARING) return;
 
-  bool uncaught = (catch_type == Isolate::NOT_CAUGHT);
+  bool uncaught = catch_type == Isolate::NOT_CAUGHT;
   if (promise->IsJSObject()) {
     Handle<JSObject> jspromise = Handle<JSObject>::cast(promise);
     // Mark the promise as already having triggered a message.
     Handle<Symbol> key = isolate_->factory()->promise_debug_marker_symbol();
     JSObject::SetProperty(jspromise, key, key, STRICT).Assert();
     // Check whether the promise reject is considered an uncaught exception.
-    Handle<Object> has_reject_handler;
-    ASSIGN_RETURN_ON_EXCEPTION_VALUE(
-        isolate_, has_reject_handler,
-        PromiseHasUserDefinedRejectHandler(jspromise), /* void */);
-    uncaught = has_reject_handler->IsFalse(isolate_);
+    uncaught = !isolate_->PromiseHasUserDefinedRejectHandler(jspromise);
   }
   // Bail out if exception breaks are not active
   if (uncaught) {
diff --git a/src/debug/debug.h b/src/debug/debug.h
index 36f973c..c4e8c17 100644
--- a/src/debug/debug.h
+++ b/src/debug/debug.h
@@ -11,11 +11,11 @@
 #include "src/base/atomicops.h"
 #include "src/base/hashmap.h"
 #include "src/base/platform/platform.h"
-#include "src/debug/liveedit.h"
 #include "src/execution.h"
 #include "src/factory.h"
 #include "src/flags.h"
 #include "src/frames.h"
+#include "src/globals.h"
 #include "src/runtime/runtime.h"
 #include "src/source-position-table.h"
 #include "src/string-stream.h"
@@ -413,7 +413,7 @@
   void OnDebugBreak(Handle<Object> break_points_hit, bool auto_continue);
 
   void OnThrow(Handle<Object> exception);
-  void OnPromiseReject(Handle<JSObject> promise, Handle<Object> value);
+  void OnPromiseReject(Handle<Object> promise, Handle<Object> value);
   void OnCompileError(Handle<Script> script);
   void OnBeforeCompile(Handle<Script> script);
   void OnAfterCompile(Handle<Script> script);
@@ -489,7 +489,7 @@
 
   // Support for LiveEdit
   void FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
-                             LiveEdit::FrameDropMode mode);
+                             LiveEditFrameDropMode mode);
 
   // Threading support.
   char* ArchiveDebug(char* to);
@@ -594,9 +594,6 @@
   // Mirror cache handling.
   void ClearMirrorCache();
 
-  MaybeHandle<Object> PromiseHasUserDefinedRejectHandler(
-      Handle<JSObject> promise);
-
   void CallEventCallback(v8::DebugEvent event,
                          Handle<Object> exec_state,
                          Handle<Object> event_data,
@@ -704,7 +701,7 @@
 
     // Stores the way how LiveEdit has patched the stack. It is used when
     // debugger returns control back to user script.
-    LiveEdit::FrameDropMode frame_drop_mode_;
+    LiveEditFrameDropMode frame_drop_mode_;
 
     // Value of accumulator in interpreter frames. In non-interpreter frames
     // this value will be the hole.
diff --git a/src/debug/ia32/debug-ia32.cc b/src/debug/ia32/debug-ia32.cc
index 8e4dee7..47ec69e 100644
--- a/src/debug/ia32/debug-ia32.cc
+++ b/src/debug/ia32/debug-ia32.cc
@@ -4,8 +4,10 @@
 
 #if V8_TARGET_ARCH_IA32
 
-#include "src/codegen.h"
 #include "src/debug/debug.h"
+
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
 #include "src/ia32/frames-ia32.h"
 
 namespace v8 {
diff --git a/src/debug/liveedit.cc b/src/debug/liveedit.cc
index b51bb1a..b451842 100644
--- a/src/debug/liveedit.cc
+++ b/src/debug/liveedit.cc
@@ -14,7 +14,6 @@
 #include "src/global-handles.h"
 #include "src/isolate-inl.h"
 #include "src/messages.h"
-#include "src/parsing/parser.h"
 #include "src/source-position-table.h"
 #include "src/v8.h"
 #include "src/v8memory.h"
@@ -655,7 +654,7 @@
 
 
 void LiveEdit::InitializeThreadLocal(Debug* debug) {
-  debug->thread_local_.frame_drop_mode_ = LiveEdit::FRAMES_UNTOUCHED;
+  debug->thread_local_.frame_drop_mode_ = LIVE_EDIT_FRAMES_UNTOUCHED;
 }
 
 
@@ -663,20 +662,20 @@
   Code* code = NULL;
   Isolate* isolate = debug->isolate_;
   switch (debug->thread_local_.frame_drop_mode_) {
-    case FRAMES_UNTOUCHED:
+    case LIVE_EDIT_FRAMES_UNTOUCHED:
       return false;
-    case FRAME_DROPPED_IN_DEBUG_SLOT_CALL:
+    case LIVE_EDIT_FRAME_DROPPED_IN_DEBUG_SLOT_CALL:
       // Debug break slot stub does not return normally, instead it manually
       // cleans the stack and jumps. We should patch the jump address.
       code = isolate->builtins()->builtin(Builtins::kFrameDropper_LiveEdit);
       break;
-    case FRAME_DROPPED_IN_DIRECT_CALL:
+    case LIVE_EDIT_FRAME_DROPPED_IN_DIRECT_CALL:
       // Nothing to do, after_break_target is not used here.
       return true;
-    case FRAME_DROPPED_IN_RETURN_CALL:
+    case LIVE_EDIT_FRAME_DROPPED_IN_RETURN_CALL:
       code = isolate->builtins()->builtin(Builtins::kFrameDropper_LiveEdit);
       break;
-    case CURRENTLY_SET_MODE:
+    case LIVE_EDIT_CURRENTLY_SET_MODE:
       UNREACHABLE();
       break;
   }
@@ -1017,6 +1016,7 @@
           handle(shared_info->GetDebugInfo()));
     }
     shared_info->set_scope_info(new_shared_info->scope_info());
+    shared_info->set_outer_scope_info(new_shared_info->outer_scope_info());
     shared_info->DisableOptimization(kLiveEdit);
     // Update the type feedback vector, if needed.
     Handle<TypeFeedbackMetadata> new_feedback_metadata(
@@ -1303,7 +1303,7 @@
 // Returns error message or NULL.
 static const char* DropFrames(Vector<StackFrame*> frames, int top_frame_index,
                               int bottom_js_frame_index,
-                              LiveEdit::FrameDropMode* mode) {
+                              LiveEditFrameDropMode* mode) {
   if (!LiveEdit::kFrameDropperSupported) {
     return "Stack manipulations are not supported in this architecture.";
   }
@@ -1321,22 +1321,22 @@
   if (pre_top_frame_code ==
       isolate->builtins()->builtin(Builtins::kSlot_DebugBreak)) {
     // OK, we can drop debug break slot.
-    *mode = LiveEdit::FRAME_DROPPED_IN_DEBUG_SLOT_CALL;
+    *mode = LIVE_EDIT_FRAME_DROPPED_IN_DEBUG_SLOT_CALL;
   } else if (pre_top_frame_code ==
              isolate->builtins()->builtin(Builtins::kFrameDropper_LiveEdit)) {
     // OK, we can drop our own code.
     pre_top_frame = frames[top_frame_index - 2];
     top_frame = frames[top_frame_index - 1];
-    *mode = LiveEdit::CURRENTLY_SET_MODE;
+    *mode = LIVE_EDIT_CURRENTLY_SET_MODE;
     frame_has_padding = false;
   } else if (pre_top_frame_code ==
              isolate->builtins()->builtin(Builtins::kReturn_DebugBreak)) {
-    *mode = LiveEdit::FRAME_DROPPED_IN_RETURN_CALL;
+    *mode = LIVE_EDIT_FRAME_DROPPED_IN_RETURN_CALL;
   } else if (pre_top_frame_code->kind() == Code::STUB &&
              CodeStub::GetMajorKey(pre_top_frame_code) == CodeStub::CEntry) {
     // Entry from our unit tests on 'debugger' statement.
     // It's fine, we support this case.
-    *mode = LiveEdit::FRAME_DROPPED_IN_DIRECT_CALL;
+    *mode = LIVE_EDIT_FRAME_DROPPED_IN_DIRECT_CALL;
     // We don't have a padding from 'debugger' statement call.
     // Here the stub is CEntry, it's not debug-only and can't be padded.
     // If anyone would complain, a proxy padded stub could be added.
@@ -1348,13 +1348,13 @@
            isolate->builtins()->builtin(Builtins::kFrameDropper_LiveEdit));
     pre_top_frame = frames[top_frame_index - 3];
     top_frame = frames[top_frame_index - 2];
-    *mode = LiveEdit::CURRENTLY_SET_MODE;
+    *mode = LIVE_EDIT_CURRENTLY_SET_MODE;
     frame_has_padding = false;
   } else if (pre_top_frame_code->kind() == Code::BYTECODE_HANDLER) {
     // Interpreted bytecode takes up two stack frames, one for the bytecode
     // handler and one for the interpreter entry trampoline. Therefore we shift
     // up by one frame.
-    *mode = LiveEdit::FRAME_DROPPED_IN_DIRECT_CALL;
+    *mode = LIVE_EDIT_FRAME_DROPPED_IN_DIRECT_CALL;
     pre_top_frame = frames[top_frame_index - 2];
     top_frame = frames[top_frame_index - 1];
   } else {
@@ -1557,7 +1557,7 @@
     if (frame->is_java_script()) {
       SharedFunctionInfo* shared =
           JavaScriptFrame::cast(frame)->function()->shared();
-      if (shared->is_resumable()) {
+      if (IsResumableFunction(shared->kind())) {
         non_droppable_frame_found = true;
         non_droppable_reason = LiveEdit::FUNCTION_BLOCKED_UNDER_GENERATOR;
         break;
@@ -1605,7 +1605,7 @@
     return target.GetNotFoundMessage();
   }
 
-  LiveEdit::FrameDropMode drop_mode = LiveEdit::FRAMES_UNTOUCHED;
+  LiveEditFrameDropMode drop_mode = LIVE_EDIT_FRAMES_UNTOUCHED;
   const char* error_message =
       DropFrames(frames, top_frame_index, bottom_js_frame_index, &drop_mode);
 
@@ -1900,25 +1900,19 @@
   Scope* current_scope = scope;
   while (current_scope != NULL) {
     HandleScope handle_scope(isolate_);
-    ZoneList<Variable*> stack_list(current_scope->StackLocalCount(), zone_);
-    ZoneList<Variable*> context_list(current_scope->ContextLocalCount(), zone_);
-    ZoneList<Variable*> globals_list(current_scope->ContextGlobalCount(),
-                                     zone_);
-    current_scope->CollectStackAndContextLocals(&stack_list, &context_list,
-                                                &globals_list);
-    context_list.Sort(&Variable::CompareIndex);
-
-    for (int i = 0; i < context_list.length(); i++) {
-      SetElementSloppy(scope_info_list, scope_info_length,
-                       context_list[i]->name());
-      scope_info_length++;
-      SetElementSloppy(
-          scope_info_list, scope_info_length,
-          Handle<Smi>(Smi::FromInt(context_list[i]->index()), isolate_));
-      scope_info_length++;
+    ZoneList<Variable*>* locals = current_scope->locals();
+    for (int i = 0; i < locals->length(); i++) {
+      Variable* var = locals->at(i);
+      if (!var->IsContextSlot()) continue;
+      int context_index = var->index() - Context::MIN_CONTEXT_SLOTS;
+      int location = scope_info_length + context_index * 2;
+      SetElementSloppy(scope_info_list, location, var->name());
+      SetElementSloppy(scope_info_list, location + 1,
+                       handle(Smi::FromInt(var->index()), isolate_));
     }
+    scope_info_length += current_scope->ContextLocalCount() * 2;
     SetElementSloppy(scope_info_list, scope_info_length,
-                     Handle<Object>(isolate_->heap()->null_value(), isolate_));
+                     isolate_->factory()->null_value());
     scope_info_length++;
 
     current_scope = current_scope->outer_scope();
diff --git a/src/debug/liveedit.h b/src/debug/liveedit.h
index 784f828..2034dcb 100644
--- a/src/debug/liveedit.h
+++ b/src/debug/liveedit.h
@@ -72,20 +72,6 @@
 
 class LiveEdit : AllStatic {
  public:
-  // Describes how exactly a frame has been dropped from stack.
-  enum FrameDropMode {
-    // No frame has been dropped.
-    FRAMES_UNTOUCHED,
-    // The top JS frame had been calling debug break slot stub. Patch the
-    // address this stub jumps to in the end.
-    FRAME_DROPPED_IN_DEBUG_SLOT_CALL,
-    // The top JS frame had been calling some C++ function. The return address
-    // gets patched automatically.
-    FRAME_DROPPED_IN_DIRECT_CALL,
-    FRAME_DROPPED_IN_RETURN_CALL,
-    CURRENTLY_SET_MODE
-  };
-
   static void InitializeThreadLocal(Debug* debug);
 
   static bool SetAfterBreakTarget(Debug* debug);
diff --git a/src/debug/mips/debug-mips.cc b/src/debug/mips/debug-mips.cc
index 49320d8..4d8b54f 100644
--- a/src/debug/mips/debug-mips.cc
+++ b/src/debug/mips/debug-mips.cc
@@ -4,9 +4,11 @@
 
 #if V8_TARGET_ARCH_MIPS
 
-#include "src/codegen.h"
 #include "src/debug/debug.h"
 
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
+
 namespace v8 {
 namespace internal {
 
diff --git a/src/debug/mips64/debug-mips64.cc b/src/debug/mips64/debug-mips64.cc
index 2e967d7..2a6ce7b 100644
--- a/src/debug/mips64/debug-mips64.cc
+++ b/src/debug/mips64/debug-mips64.cc
@@ -4,9 +4,11 @@
 
 #if V8_TARGET_ARCH_MIPS64
 
-#include "src/codegen.h"
 #include "src/debug/debug.h"
 
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
+
 namespace v8 {
 namespace internal {
 
diff --git a/src/debug/ppc/debug-ppc.cc b/src/debug/ppc/debug-ppc.cc
index 7facf95..e57aa3c 100644
--- a/src/debug/ppc/debug-ppc.cc
+++ b/src/debug/ppc/debug-ppc.cc
@@ -4,9 +4,11 @@
 
 #if V8_TARGET_ARCH_PPC
 
-#include "src/codegen.h"
 #include "src/debug/debug.h"
 
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
+
 namespace v8 {
 namespace internal {
 
diff --git a/src/debug/s390/debug-s390.cc b/src/debug/s390/debug-s390.cc
index 9c33b95..b745d5b 100644
--- a/src/debug/s390/debug-s390.cc
+++ b/src/debug/s390/debug-s390.cc
@@ -6,9 +6,11 @@
 
 #if V8_TARGET_ARCH_S390
 
-#include "src/codegen.h"
 #include "src/debug/debug.h"
 
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
+
 namespace v8 {
 namespace internal {
 
diff --git a/src/debug/x64/debug-x64.cc b/src/debug/x64/debug-x64.cc
index 910d1ca..4f80e18 100644
--- a/src/debug/x64/debug-x64.cc
+++ b/src/debug/x64/debug-x64.cc
@@ -4,10 +4,11 @@
 
 #if V8_TARGET_ARCH_X64
 
-#include "src/assembler.h"
-#include "src/codegen.h"
 #include "src/debug/debug.h"
 
+#include "src/assembler.h"
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/debug/x87/debug-x87.cc b/src/debug/x87/debug-x87.cc
index 1cbdf45..c29eac1 100644
--- a/src/debug/x87/debug-x87.cc
+++ b/src/debug/x87/debug-x87.cc
@@ -4,8 +4,10 @@
 
 #if V8_TARGET_ARCH_X87
 
-#include "src/codegen.h"
 #include "src/debug/debug.h"
+
+#include "src/codegen.h"
+#include "src/debug/liveedit.h"
 #include "src/x87/frames-x87.h"
 
 namespace v8 {
diff --git a/src/deoptimize-reason.cc b/src/deoptimize-reason.cc
index 87c8905..b0ee780 100644
--- a/src/deoptimize-reason.cc
+++ b/src/deoptimize-reason.cc
@@ -23,7 +23,7 @@
   return static_cast<uint8_t>(reason);
 }
 
-char const* const DeoptimizeReasonToString(DeoptimizeReason reason) {
+char const* DeoptimizeReasonToString(DeoptimizeReason reason) {
   static char const* kDeoptimizeReasonStrings[] = {
 #define DEOPTIMIZE_REASON(Name, message) message,
       DEOPTIMIZE_REASON_LIST(DEOPTIMIZE_REASON)
diff --git a/src/deoptimize-reason.h b/src/deoptimize-reason.h
index 60e0a59..d28ec47 100644
--- a/src/deoptimize-reason.h
+++ b/src/deoptimize-reason.h
@@ -23,6 +23,7 @@
   V(ForcedDeoptToRuntime, "Forced deopt to runtime")                          \
   V(Hole, "hole")                                                             \
   V(InstanceMigrationFailed, "instance migration failed")                     \
+  V(InsufficientTypeFeedbackForCall, "Insufficient type feedback for call")   \
   V(InsufficientTypeFeedbackForCallWithArguments,                             \
     "Insufficient type feedback for call with arguments")                     \
   V(FastPathFailed, "Falling off the fast path")                              \
@@ -68,7 +69,6 @@
     "Unexpected cell contents in global store")                               \
   V(UnexpectedObject, "unexpected object")                                    \
   V(UnexpectedRHSOfBinaryOperation, "Unexpected RHS of binary operation")     \
-  V(UninitializedBoilerplateLiterals, "Uninitialized boilerplate literals")   \
   V(UnknownMapInPolymorphicAccess, "Unknown map in polymorphic access")       \
   V(UnknownMapInPolymorphicCall, "Unknown map in polymorphic call")           \
   V(UnknownMapInPolymorphicElementAccess,                                     \
@@ -90,7 +90,7 @@
 
 size_t hash_value(DeoptimizeReason reason);
 
-char const* const DeoptimizeReasonToString(DeoptimizeReason reason);
+char const* DeoptimizeReasonToString(DeoptimizeReason reason);
 
 }  // namespace internal
 }  // namespace v8
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index d4756ff..971de9e 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -73,13 +73,8 @@
                               Address from,
                               int fp_to_sp_delta,
                               Isolate* isolate) {
-  Deoptimizer* deoptimizer = new Deoptimizer(isolate,
-                                             function,
-                                             type,
-                                             bailout_id,
-                                             from,
-                                             fp_to_sp_delta,
-                                             NULL);
+  Deoptimizer* deoptimizer = new Deoptimizer(isolate, function, type,
+                                             bailout_id, from, fp_to_sp_delta);
   CHECK(isolate->deoptimizer_data()->current_ == NULL);
   isolate->deoptimizer_data()->current_ = deoptimizer;
   return deoptimizer;
@@ -108,23 +103,6 @@
   return result;
 }
 
-
-int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) {
-  if (jsframe_index == 0) return 0;
-
-  int frame_index = 0;
-  while (jsframe_index >= 0) {
-    FrameDescription* frame = output_[frame_index];
-    if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
-      jsframe_index--;
-    }
-    frame_index++;
-  }
-
-  return frame_index - 1;
-}
-
-
 DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
     JavaScriptFrame* frame,
     int jsframe_index,
@@ -366,8 +344,7 @@
   RuntimeCallTimerScope runtimeTimer(isolate,
                                      &RuntimeCallStats::DeoptimizeCode);
   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::DeoptimizeCode);
+  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
   if (FLAG_trace_deopt) {
     CodeTracer::Scope scope(isolate->GetCodeTracer());
     PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
@@ -388,8 +365,7 @@
   RuntimeCallTimerScope runtimeTimer(isolate,
                                      &RuntimeCallStats::DeoptimizeCode);
   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::DeoptimizeCode);
+  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
   if (FLAG_trace_deopt) {
     CodeTracer::Scope scope(isolate->GetCodeTracer());
     PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
@@ -422,8 +398,7 @@
   RuntimeCallTimerScope runtimeTimer(isolate,
                                      &RuntimeCallStats::DeoptimizeCode);
   TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::DeoptimizeCode);
+  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
   Code* code = function->code();
   if (code->kind() == Code::OPTIMIZED_FUNCTION) {
     // Mark the code for deoptimization and unlink any functions that also
@@ -439,19 +414,9 @@
   deoptimizer->DoComputeOutputFrames();
 }
 
-
-bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
-                                  StackFrame::Type frame_type) {
-  switch (deopt_type) {
-    case EAGER:
-    case SOFT:
-    case LAZY:
-      return (frame_type == StackFrame::STUB)
-          ? FLAG_trace_stub_failures
-          : FLAG_trace_deopt;
-  }
-  FATAL("Unsupported deopt type");
-  return false;
+bool Deoptimizer::TraceEnabledFor(StackFrame::Type frame_type) {
+  return (frame_type == StackFrame::STUB) ? FLAG_trace_stub_failures
+                                          : FLAG_trace_deopt;
 }
 
 
@@ -467,7 +432,7 @@
 
 Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function,
                          BailoutType type, unsigned bailout_id, Address from,
-                         int fp_to_sp_delta, Code* optimized_code)
+                         int fp_to_sp_delta)
     : isolate_(isolate),
       function_(function),
       bailout_id_(bailout_id),
@@ -510,7 +475,7 @@
       function->shared()->set_opt_count(opt_count);
     }
   }
-  compiled_code_ = FindOptimizedCode(function, optimized_code);
+  compiled_code_ = FindOptimizedCode(function);
 #if DEBUG
   DCHECK(compiled_code_ != NULL);
   if (type == EAGER || type == SOFT || type == LAZY) {
@@ -521,8 +486,9 @@
   StackFrame::Type frame_type = function == NULL
       ? StackFrame::STUB
       : StackFrame::JAVA_SCRIPT;
-  trace_scope_ = TraceEnabledFor(type, frame_type) ?
-      new CodeTracer::Scope(isolate->GetCodeTracer()) : NULL;
+  trace_scope_ = TraceEnabledFor(frame_type)
+                     ? new CodeTracer::Scope(isolate->GetCodeTracer())
+                     : NULL;
 #ifdef DEBUG
   CHECK(AllowHeapAllocation::IsAllowed());
   disallow_heap_allocation_ = new DisallowHeapAllocation();
@@ -539,21 +505,11 @@
   input_->SetFrameType(frame_type);
 }
 
-
-Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
-                                     Code* optimized_code) {
-  switch (bailout_type_) {
-    case Deoptimizer::SOFT:
-    case Deoptimizer::EAGER:
-    case Deoptimizer::LAZY: {
-      Code* compiled_code = FindDeoptimizingCode(from_);
-      return (compiled_code == NULL)
-          ? static_cast<Code*>(isolate_->FindCodeObject(from_))
-          : compiled_code;
-    }
-  }
-  FATAL("Could not find code for optimized function");
-  return NULL;
+Code* Deoptimizer::FindOptimizedCode(JSFunction* function) {
+  Code* compiled_code = FindDeoptimizingCode(from_);
+  return (compiled_code == NULL)
+             ? static_cast<Code*>(isolate_->FindCodeObject(from_))
+             : compiled_code;
 }
 
 
@@ -912,6 +868,10 @@
                                  output_offset);
   }
 
+  if (trace_scope_ != nullptr) {
+    PrintF(trace_scope_->file(), "    -------------------------\n");
+  }
+
   // There are no translation commands for the caller's pc and fp, the
   // context, and the function.  Synthesize their values and set them up
   // explicitly.
@@ -969,11 +929,11 @@
   // so long as we don't inline functions that need local contexts.
   output_offset -= kPointerSize;
 
-  TranslatedFrame::iterator context_pos = value_iterator;
-  int context_input_index = input_index;
   // When deoptimizing into a catch block, we need to take the context
   // from just above the top of the operand stack (we push the context
   // at the entry of the try block).
+  TranslatedFrame::iterator context_pos = value_iterator;
+  int context_input_index = input_index;
   if (goto_catch_handler) {
     for (unsigned i = 0; i < height + 1; ++i) {
       context_pos++;
@@ -991,10 +951,6 @@
   }
   value = reinterpret_cast<intptr_t>(context);
   output_frame->SetContext(value);
-  if (is_topmost) {
-    Register context_reg = JavaScriptFrame::context_register();
-    output_frame->SetRegister(context_reg.code(), value);
-  }
   WriteValueToOutput(context, context_input_index, frame_index, output_offset,
                      "context    ");
   if (context == isolate_->heap()->arguments_marker()) {
@@ -1011,6 +967,10 @@
   value = reinterpret_cast<intptr_t>(function);
   WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
 
+  if (trace_scope_ != nullptr) {
+    PrintF(trace_scope_->file(), "    -------------------------\n");
+  }
+
   // Translate the rest of the frame.
   for (unsigned i = 0; i < height; ++i) {
     output_offset -= kPointerSize;
@@ -1060,6 +1020,15 @@
           : FullCodeGenerator::BailoutStateField::decode(pc_and_state);
   output_frame->SetState(Smi::FromInt(static_cast<int>(state)));
 
+  // Clear the context register. The context might be a de-materialized object
+  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
+  // safety we use Smi(0) instead of the potential {arguments_marker} here.
+  if (is_topmost) {
+    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
+    Register context_reg = JavaScriptFrame::context_register();
+    output_frame->SetRegister(context_reg.code(), context_value);
+  }
+
   // Set the continuation for the topmost frame.
   if (is_topmost) {
     Builtins* builtins = isolate_->builtins();
@@ -1082,11 +1051,20 @@
   SharedFunctionInfo* shared = translated_frame->raw_shared_info();
 
   TranslatedFrame::iterator value_iterator = translated_frame->begin();
+  bool is_bottommost = (0 == frame_index);
+  bool is_topmost = (output_count_ - 1 == frame_index);
   int input_index = 0;
 
   int bytecode_offset = translated_frame->node_id().ToInt();
   unsigned height = translated_frame->height();
   unsigned height_in_bytes = height * kPointerSize;
+
+  // All tranlations for interpreted frames contain the accumulator and hence
+  // are assumed to be in bailout state {BailoutState::TOS_REGISTER}. However
+  // such a state is only supported for the topmost frame. We need to skip
+  // pushing the accumulator for any non-topmost frame.
+  if (!is_topmost) height_in_bytes -= kPointerSize;
+
   JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
   value_iterator++;
   input_index++;
@@ -1113,8 +1091,6 @@
       FrameDescription(output_frame_size, parameter_count);
   output_frame->SetFrameType(StackFrame::INTERPRETED);
 
-  bool is_bottommost = (0 == frame_index);
-  bool is_topmost = (output_count_ - 1 == frame_index);
   CHECK(frame_index >= 0 && frame_index < output_count_);
   CHECK_NULL(output_[frame_index]);
   output_[frame_index] = output_frame;
@@ -1137,6 +1113,10 @@
                                  output_offset);
   }
 
+  if (trace_scope_ != nullptr) {
+    PrintF(trace_scope_->file(), "    -------------------------\n");
+  }
+
   // There are no translation commands for the caller's pc and fp, the
   // context, the function, new.target and the bytecode offset.  Synthesize
   // their values and set them up
@@ -1193,7 +1173,6 @@
   // For the bottommost output frame the context can be gotten from the input
   // frame. For all subsequent output frames it can be gotten from the function
   // so long as we don't inline functions that need local contexts.
-  Register context_reg = InterpretedFrame::context_register();
   output_offset -= kPointerSize;
 
   // When deoptimizing into a catch block, we need to take the context
@@ -1210,13 +1189,16 @@
   }
   // Read the context from the translations.
   Object* context = context_pos->GetRawValue();
-  // The context should not be a placeholder for a materialized object.
-  CHECK(context != isolate_->heap()->arguments_marker());
   value = reinterpret_cast<intptr_t>(context);
   output_frame->SetContext(value);
-  if (is_topmost) output_frame->SetRegister(context_reg.code(), value);
   WriteValueToOutput(context, context_input_index, frame_index, output_offset,
                      "context    ");
+  if (context == isolate_->heap()->arguments_marker()) {
+    Address output_address =
+        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
+        output_offset;
+    values_to_materialize_.push_back({output_address, context_pos});
+  }
   value_iterator++;
   input_index++;
 
@@ -1248,6 +1230,10 @@
   WriteValueToOutput(smi_bytecode_offset, 0, frame_index, output_offset,
                      "bytecode offset ");
 
+  if (trace_scope_ != nullptr) {
+    PrintF(trace_scope_->file(), "    -------------------------\n");
+  }
+
   // Translate the rest of the interpreter registers in the frame.
   for (unsigned i = 0; i < height - 1; ++i) {
     output_offset -= kPointerSize;
@@ -1255,20 +1241,30 @@
                                  output_offset);
   }
 
-  // Put the accumulator on the stack. It will be popped by the
-  // InterpreterNotifyDeopt builtin (possibly after materialization).
-  output_offset -= kPointerSize;
-  if (goto_catch_handler) {
-    // If we are lazy deopting to a catch handler, we set the accumulator to
-    // the exception (which lives in the result register).
-    intptr_t accumulator_value =
-        input_->GetRegister(FullCodeGenerator::result_register().code());
-    WriteValueToOutput(reinterpret_cast<Object*>(accumulator_value), 0,
-                       frame_index, output_offset, "accumulator ");
-    value_iterator++;
+  // Translate the accumulator register (depending on frame position).
+  if (is_topmost) {
+    // For topmost frmae, p ut the accumulator on the stack. The bailout state
+    // for interpreted frames is always set to {BailoutState::TOS_REGISTER} and
+    // the {NotifyDeoptimized} builtin pops it off the topmost frame (possibly
+    // after materialization).
+    output_offset -= kPointerSize;
+    if (goto_catch_handler) {
+      // If we are lazy deopting to a catch handler, we set the accumulator to
+      // the exception (which lives in the result register).
+      intptr_t accumulator_value =
+          input_->GetRegister(FullCodeGenerator::result_register().code());
+      WriteValueToOutput(reinterpret_cast<Object*>(accumulator_value), 0,
+                         frame_index, output_offset, "accumulator ");
+      value_iterator++;
+    } else {
+      WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
+                                   output_offset, "accumulator ");
+    }
   } else {
-    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
-                                 output_offset);
+    // For non-topmost frames, skip the accumulator translation. For those
+    // frames, the return value from the callee will become the accumulator.
+    value_iterator++;
+    input_index++;
   }
   CHECK_EQ(0u, output_offset);
 
@@ -1292,6 +1288,15 @@
     }
   }
 
+  // Clear the context register. The context might be a de-materialized object
+  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
+  // safety we use Smi(0) instead of the potential {arguments_marker} here.
+  if (is_topmost) {
+    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
+    Register context_reg = JavaScriptFrame::context_register();
+    output_frame->SetRegister(context_reg.code(), context_value);
+  }
+
   // Set the continuation for the topmost frame.
   if (is_topmost) {
     Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
@@ -1595,10 +1600,6 @@
   output_offset -= kPointerSize;
   value = output_[frame_index - 1]->GetContext();
   output_frame->SetFrameSlot(output_offset, value);
-  if (is_topmost) {
-    Register context_reg = JavaScriptFrame::context_register();
-    output_frame->SetRegister(context_reg.code(), value);
-  }
   DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
 
   // The allocation site.
@@ -1654,6 +1655,15 @@
     }
   }
 
+  // Clear the context register. The context might be a de-materialized object
+  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
+  // safety we use Smi(0) instead of the potential {arguments_marker} here.
+  if (is_topmost) {
+    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
+    Register context_reg = JavaScriptFrame::context_register();
+    output_frame->SetRegister(context_reg.code(), context_value);
+  }
+
   // Set the continuation for the topmost frame.
   if (is_topmost) {
     Builtins* builtins = isolate_->builtins();
@@ -1780,10 +1790,6 @@
   output_offset -= kPointerSize;
   value = output_[frame_index - 1]->GetContext();
   output_frame->SetFrameSlot(output_offset, value);
-  if (is_topmost) {
-    Register context_reg = JavaScriptFrame::context_register();
-    output_frame->SetRegister(context_reg.code(), value);
-  }
   DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
 
   // Skip receiver.
@@ -1833,6 +1839,15 @@
     }
   }
 
+  // Clear the context register. The context might be a de-materialized object
+  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
+  // safety we use Smi(0) instead of the potential {arguments_marker} here.
+  if (is_topmost) {
+    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
+    Register context_reg = JavaScriptFrame::context_register();
+    output_frame->SetRegister(context_reg.code(), context_value);
+  }
+
   // Set the continuation for the topmost frame.
   if (is_topmost) {
     Builtins* builtins = isolate_->builtins();
@@ -2214,15 +2229,6 @@
   return height * kPointerSize;
 }
 
-
-Object* Deoptimizer::ComputeLiteral(int index) const {
-  DeoptimizationInputData* data =
-      DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
-  FixedArray* literals = data->LiteralArray();
-  return literals->get(index);
-}
-
-
 void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
                                                    BailoutType type,
                                                    int max_entry_id) {
@@ -2281,33 +2287,6 @@
   }
 }
 
-
-int FrameDescription::ComputeFixedSize() {
-  if (type_ == StackFrame::INTERPRETED) {
-    return InterpreterFrameConstants::kFixedFrameSize +
-           parameter_count() * kPointerSize;
-  } else {
-    return StandardFrameConstants::kFixedFrameSize +
-           parameter_count() * kPointerSize;
-  }
-}
-
-
-unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) {
-  if (slot_index >= 0) {
-    // Local or spill slots. Skip the fixed part of the frame
-    // including all arguments.
-    unsigned base = GetFrameSize() - ComputeFixedSize();
-    return base - ((slot_index + 1) * kPointerSize);
-  } else {
-    // Incoming parameter.
-    int arg_size = parameter_count() * kPointerSize;
-    unsigned base = GetFrameSize() - arg_size;
-    return base - ((slot_index + 1) * kPointerSize);
-  }
-}
-
-
 void TranslationBuffer::Add(int32_t value, Zone* zone) {
   // This wouldn't handle kMinInt correctly if it ever encountered it.
   DCHECK(value != kMinInt);
@@ -3746,8 +3725,8 @@
           return object;
         }
         case JS_ARRAY_TYPE: {
-          Handle<JSArray> object =
-              isolate_->factory()->NewJSArray(0, map->elements_kind());
+          Handle<JSArray> object = Handle<JSArray>::cast(
+              isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED));
           slot->value_ = object;
           Handle<Object> properties = MaterializeAt(frame_index, value_index);
           Handle<Object> elements = MaterializeAt(frame_index, value_index);
@@ -3758,14 +3737,15 @@
           return object;
         }
         case JS_FUNCTION_TYPE: {
+          Handle<SharedFunctionInfo> temporary_shared =
+              isolate_->factory()->NewSharedFunctionInfo(
+                  isolate_->factory()->empty_string(), MaybeHandle<Code>(),
+                  false);
           Handle<JSFunction> object =
               isolate_->factory()->NewFunctionFromSharedFunctionInfo(
-                  handle(isolate_->object_function()->shared()),
-                  handle(isolate_->context()));
+                  map, temporary_shared, isolate_->factory()->undefined_value(),
+                  NOT_TENURED);
           slot->value_ = object;
-          // We temporarily allocated a JSFunction for the {Object} function
-          // within the current context, to break cycles in the object graph.
-          // The correct function and context will be set below once available.
           Handle<Object> properties = MaterializeAt(frame_index, value_index);
           Handle<Object> elements = MaterializeAt(frame_index, value_index);
           Handle<Object> prototype = MaterializeAt(frame_index, value_index);
@@ -3786,6 +3766,36 @@
           CHECK(next_link->IsUndefined(isolate_));
           return object;
         }
+        case CONS_STRING_TYPE: {
+          Handle<ConsString> object = Handle<ConsString>::cast(
+              isolate_->factory()
+                  ->NewConsString(isolate_->factory()->undefined_string(),
+                                  isolate_->factory()->undefined_string())
+                  .ToHandleChecked());
+          slot->value_ = object;
+          Handle<Object> hash = MaterializeAt(frame_index, value_index);
+          Handle<Object> length = MaterializeAt(frame_index, value_index);
+          Handle<Object> first = MaterializeAt(frame_index, value_index);
+          Handle<Object> second = MaterializeAt(frame_index, value_index);
+          object->set_map(*map);
+          object->set_length(Smi::cast(*length)->value());
+          object->set_first(String::cast(*first));
+          object->set_second(String::cast(*second));
+          CHECK(hash->IsNumber());  // The {Name::kEmptyHashField} value.
+          return object;
+        }
+        case CONTEXT_EXTENSION_TYPE: {
+          Handle<ContextExtension> object =
+              isolate_->factory()->NewContextExtension(
+                  isolate_->factory()->NewScopeInfo(1),
+                  isolate_->factory()->undefined_value());
+          slot->value_ = object;
+          Handle<Object> scope_info = MaterializeAt(frame_index, value_index);
+          Handle<Object> extension = MaterializeAt(frame_index, value_index);
+          object->set_scope_info(ScopeInfo::cast(*scope_info));
+          object->set_extension(*extension);
+          return object;
+        }
         case FIXED_ARRAY_TYPE: {
           Handle<Object> lengthObject = MaterializeAt(frame_index, value_index);
           int32_t length = 0;
diff --git a/src/deoptimizer.h b/src/deoptimizer.h
index 7822d1c..4fb7851 100644
--- a/src/deoptimizer.h
+++ b/src/deoptimizer.h
@@ -383,8 +383,7 @@
     bool needs_frame;
   };
 
-  static bool TraceEnabledFor(BailoutType deopt_type,
-                              StackFrame::Type frame_type);
+  static bool TraceEnabledFor(StackFrame::Type frame_type);
   static const char* MessageFor(BailoutType type);
 
   int output_count() const { return output_count_; }
@@ -500,8 +499,6 @@
     int count_;
   };
 
-  int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
-
   static size_t GetMaxDeoptTableSize();
 
   static void EnsureCodeForDeoptimizationEntry(Isolate* isolate,
@@ -514,14 +511,9 @@
   static const int kMinNumberOfEntries = 64;
   static const int kMaxNumberOfEntries = 16384;
 
-  Deoptimizer(Isolate* isolate,
-              JSFunction* function,
-              BailoutType type,
-              unsigned bailout_id,
-              Address from,
-              int fp_to_sp_delta,
-              Code* optimized_code);
-  Code* FindOptimizedCode(JSFunction* function, Code* optimized_code);
+  Deoptimizer(Isolate* isolate, JSFunction* function, BailoutType type,
+              unsigned bailout_id, Address from, int fp_to_sp_delta);
+  Code* FindOptimizedCode(JSFunction* function);
   void PrintFunctionName();
   void DeleteFrameDescriptions();
 
@@ -560,8 +552,6 @@
   static unsigned ComputeIncomingArgumentSize(SharedFunctionInfo* shared);
   static unsigned ComputeOutgoingArgumentSize(Code* code, unsigned bailout_id);
 
-  Object* ComputeLiteral(int index) const;
-
   static void GenerateDeoptimizationEntries(
       MacroAssembler* masm, int count, BailoutType type);
 
@@ -711,8 +701,6 @@
     return static_cast<uint32_t>(frame_size_);
   }
 
-  unsigned GetOffsetFromSlotIndex(int slot_index);
-
   intptr_t GetFrameSlot(unsigned offset) {
     return *GetFrameSlotPointer(offset);
   }
@@ -833,8 +821,6 @@
     return reinterpret_cast<intptr_t*>(
         reinterpret_cast<Address>(this) + frame_content_offset() + offset);
   }
-
-  int ComputeFixedSize();
 };
 
 
diff --git a/src/effects.h b/src/effects.h
index 0204718..f8b1bd9 100644
--- a/src/effects.h
+++ b/src/effects.h
@@ -5,7 +5,7 @@
 #ifndef V8_EFFECTS_H_
 #define V8_EFFECTS_H_
 
-#include "src/types.h"
+#include "src/ast/ast-types.h"
 
 namespace v8 {
 namespace internal {
@@ -28,31 +28,31 @@
   enum Modality { POSSIBLE, DEFINITE };
 
   Modality modality;
-  Bounds bounds;
+  AstBounds bounds;
 
   Effect() : modality(DEFINITE) {}
-  explicit Effect(Bounds b, Modality m = DEFINITE) : modality(m), bounds(b) {}
+  explicit Effect(AstBounds b, Modality m = DEFINITE)
+      : modality(m), bounds(b) {}
 
   // The unknown effect.
   static Effect Unknown(Zone* zone) {
-    return Effect(Bounds::Unbounded(), POSSIBLE);
+    return Effect(AstBounds::Unbounded(), POSSIBLE);
   }
 
   static Effect Forget(Zone* zone) {
-    return Effect(Bounds::Unbounded(), DEFINITE);
+    return Effect(AstBounds::Unbounded(), DEFINITE);
   }
 
   // Sequential composition, as in 'e1; e2'.
   static Effect Seq(Effect e1, Effect e2, Zone* zone) {
     if (e2.modality == DEFINITE) return e2;
-    return Effect(Bounds::Either(e1.bounds, e2.bounds, zone), e1.modality);
+    return Effect(AstBounds::Either(e1.bounds, e2.bounds, zone), e1.modality);
   }
 
   // Alternative composition, as in 'cond ? e1 : e2'.
   static Effect Alt(Effect e1, Effect e2, Zone* zone) {
-    return Effect(
-        Bounds::Either(e1.bounds, e2.bounds, zone),
-        e1.modality == POSSIBLE ? POSSIBLE : e2.modality);
+    return Effect(AstBounds::Either(e1.bounds, e2.bounds, zone),
+                  e1.modality == POSSIBLE ? POSSIBLE : e2.modality);
   }
 };
 
@@ -84,10 +84,10 @@
         ? locator.value() : Effect::Unknown(Base::zone());
   }
 
-  Bounds LookupBounds(Var var) {
+  AstBounds LookupBounds(Var var) {
     Effect effect = Lookup(var);
-    return effect.modality == Effect::DEFINITE
-        ? effect.bounds : Bounds::Unbounded();
+    return effect.modality == Effect::DEFINITE ? effect.bounds
+                                               : AstBounds::Unbounded();
   }
 
   // Sequential composition.
diff --git a/src/elements.cc b/src/elements.cc
index 56d8001..fb73d6c 100644
--- a/src/elements.cc
+++ b/src/elements.cc
@@ -911,6 +911,30 @@
     Subclass::GrowCapacityAndConvertImpl(object, capacity);
   }
 
+  bool GrowCapacity(Handle<JSObject> object, uint32_t index) final {
+    // This function is intended to be called from optimized code. We don't
+    // want to trigger lazy deopts there, so refuse to handle cases that would.
+    if (object->map()->is_prototype_map() ||
+        object->WouldConvertToSlowElements(index)) {
+      return false;
+    }
+    Handle<FixedArrayBase> old_elements(object->elements());
+    uint32_t new_capacity = JSObject::NewElementsCapacity(index + 1);
+    DCHECK(static_cast<uint32_t>(old_elements->length()) < new_capacity);
+    Handle<FixedArrayBase> elements =
+        ConvertElementsWithCapacity(object, old_elements, kind(), new_capacity);
+
+    DCHECK_EQ(object->GetElementsKind(), kind());
+    // Transition through the allocation site as well if present.
+    if (JSObject::UpdateAllocationSite<AllocationSiteUpdateMode::kCheckOnly>(
+            object, kind())) {
+      return false;
+    }
+
+    object->set_elements(*elements);
+    return true;
+  }
+
   void Delete(Handle<JSObject> obj, uint32_t entry) final {
     Subclass::DeleteImpl(obj, entry);
   }
@@ -1165,13 +1189,13 @@
   static uint32_t GetEntryForIndexImpl(JSObject* holder,
                                        FixedArrayBase* backing_store,
                                        uint32_t index, PropertyFilter filter) {
+    uint32_t length = Subclass::GetMaxIndex(holder, backing_store);
     if (IsHoleyElementsKind(kind())) {
-      return index < Subclass::GetCapacityImpl(holder, backing_store) &&
+      return index < length &&
                      !BackingStore::cast(backing_store)->is_the_hole(index)
                  ? index
                  : kMaxUInt32;
     } else {
-      uint32_t length = Subclass::GetMaxIndex(holder, backing_store);
       return index < length ? index : kMaxUInt32;
     }
   }
@@ -2922,8 +2946,7 @@
     FixedArray* parameter_map = FixedArray::cast(parameters);
     uint32_t length = parameter_map->length() - 2;
     if (entry < length) {
-      return !GetParameterMapArg(parameter_map, entry)
-                  ->IsTheHole(parameter_map->GetIsolate());
+      return HasParameterMapArg(parameter_map, entry);
     }
 
     FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1));
@@ -2951,8 +2974,7 @@
                                        FixedArrayBase* parameters,
                                        uint32_t index, PropertyFilter filter) {
     FixedArray* parameter_map = FixedArray::cast(parameters);
-    Object* probe = GetParameterMapArg(parameter_map, index);
-    if (!probe->IsTheHole(holder->GetIsolate())) return index;
+    if (HasParameterMapArg(parameter_map, index)) return index;
 
     FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
     uint32_t entry = ArgumentsAccessor::GetEntryForIndexImpl(holder, arguments,
@@ -2971,11 +2993,11 @@
     return ArgumentsAccessor::GetDetailsImpl(arguments, entry - length);
   }
 
-  static Object* GetParameterMapArg(FixedArray* parameter_map, uint32_t index) {
+  static bool HasParameterMapArg(FixedArray* parameter_map, uint32_t index) {
     uint32_t length = parameter_map->length() - 2;
-    return index < length
-               ? parameter_map->get(index + 2)
-               : Object::cast(parameter_map->GetHeap()->the_hole_value());
+    if (index >= length) return false;
+    return !parameter_map->get(index + 2)->IsTheHole(
+        parameter_map->GetIsolate());
   }
 
   static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
@@ -3012,7 +3034,7 @@
       Handle<FixedArrayBase> backing_store, GetKeysConversion convert,
       PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices,
       uint32_t insertion_index = 0) {
-    FixedArray* parameter_map = FixedArray::cast(*backing_store);
+    Handle<FixedArray> parameter_map(FixedArray::cast(*backing_store), isolate);
     uint32_t length = parameter_map->length() - 2;
 
     for (uint32_t i = 0; i < length; ++i) {
@@ -3038,18 +3060,19 @@
                                        uint32_t start_from, uint32_t length) {
     DCHECK(JSObject::PrototypeHasNoElements(isolate, *object));
     Handle<Map> original_map = handle(object->map(), isolate);
-    FixedArray* parameter_map = FixedArray::cast(object->elements());
+    Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()),
+                                     isolate);
     bool search_for_hole = value->IsUndefined(isolate);
 
     for (uint32_t k = start_from; k < length; ++k) {
       uint32_t entry =
-          GetEntryForIndexImpl(*object, parameter_map, k, ALL_PROPERTIES);
+          GetEntryForIndexImpl(*object, *parameter_map, k, ALL_PROPERTIES);
       if (entry == kMaxUInt32) {
         if (search_for_hole) return Just(true);
         continue;
       }
 
-      Handle<Object> element_k = GetImpl(parameter_map, entry);
+      Handle<Object> element_k = GetImpl(*parameter_map, entry);
 
       if (element_k->IsAccessorPair()) {
         LookupIterator it(isolate, object, k, LookupIterator::OWN);
@@ -3078,16 +3101,17 @@
                                          uint32_t start_from, uint32_t length) {
     DCHECK(JSObject::PrototypeHasNoElements(isolate, *object));
     Handle<Map> original_map = handle(object->map(), isolate);
-    FixedArray* parameter_map = FixedArray::cast(object->elements());
+    Handle<FixedArray> parameter_map(FixedArray::cast(object->elements()),
+                                     isolate);
 
     for (uint32_t k = start_from; k < length; ++k) {
       uint32_t entry =
-          GetEntryForIndexImpl(*object, parameter_map, k, ALL_PROPERTIES);
+          GetEntryForIndexImpl(*object, *parameter_map, k, ALL_PROPERTIES);
       if (entry == kMaxUInt32) {
         continue;
       }
 
-      Handle<Object> element_k = GetImpl(parameter_map, entry);
+      Handle<Object> element_k = GetImpl(*parameter_map, entry);
 
       if (element_k->IsAccessorPair()) {
         LookupIterator it(isolate, object, k, LookupIterator::OWN);
diff --git a/src/elements.h b/src/elements.h
index 1ffd4d9..76e1aa6 100644
--- a/src/elements.h
+++ b/src/elements.h
@@ -114,6 +114,9 @@
                                       Handle<Map> map) = 0;
   virtual void GrowCapacityAndConvert(Handle<JSObject> object,
                                       uint32_t capacity) = 0;
+  // Unlike GrowCapacityAndConvert do not attempt to convert the backing store
+  // and simply return false in this case.
+  virtual bool GrowCapacity(Handle<JSObject> object, uint32_t index) = 0;
 
   static void InitializeOncePerProcess();
   static void TearDown();
diff --git a/src/execution.cc b/src/execution.cc
index c42d164..59421c7 100644
--- a/src/execution.cc
+++ b/src/execution.cc
@@ -136,8 +136,6 @@
       PrintDeserializedCodeInfo(Handle<JSFunction>::cast(target));
     }
     RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::JS_Execution);
-    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-        isolate, &tracing::TraceEventStatsTable::JS_Execution);
     value = CALL_GENERATED_CODE(isolate, stub_entry, orig_func, func, recv,
                                 argc, argv);
   }
@@ -436,31 +434,6 @@
 // --- C a l l s   t o   n a t i v e s ---
 
 
-Handle<String> Execution::GetStackTraceLine(Handle<Object> recv,
-                                            Handle<JSFunction> fun,
-                                            Handle<Object> pos,
-                                            Handle<Object> is_global) {
-  Isolate* isolate = fun->GetIsolate();
-  Handle<Object> strict_mode = isolate->factory()->ToBoolean(false);
-
-  MaybeHandle<Object> maybe_callsite =
-      CallSiteUtils::Construct(isolate, recv, fun, pos, strict_mode);
-  if (maybe_callsite.is_null()) {
-    isolate->clear_pending_exception();
-    return isolate->factory()->empty_string();
-  }
-
-  MaybeHandle<String> maybe_to_string =
-      CallSiteUtils::ToString(isolate, maybe_callsite.ToHandleChecked());
-  if (maybe_to_string.is_null()) {
-    isolate->clear_pending_exception();
-    return isolate->factory()->empty_string();
-  }
-
-  return maybe_to_string.ToHandleChecked();
-}
-
-
 void StackGuard::HandleGCInterrupt() {
   if (CheckAndClearInterrupt(GC_REQUEST)) {
     isolate_->heap()->HandleGCRequest();
diff --git a/src/execution.h b/src/execution.h
index 52c7628..6f4bb33 100644
--- a/src/execution.h
+++ b/src/execution.h
@@ -21,11 +21,9 @@
   // When the function called is not in strict mode, receiver is
   // converted to an object.
   //
-  MUST_USE_RESULT static MaybeHandle<Object> Call(Isolate* isolate,
-                                                  Handle<Object> callable,
-                                                  Handle<Object> receiver,
-                                                  int argc,
-                                                  Handle<Object> argv[]);
+  V8_EXPORT_PRIVATE MUST_USE_RESULT static MaybeHandle<Object> Call(
+      Isolate* isolate, Handle<Object> callable, Handle<Object> receiver,
+      int argc, Handle<Object> argv[]);
 
   // Construct object from function, the caller supplies an array of
   // arguments.
@@ -48,11 +46,6 @@
                                      Handle<Object> receiver, int argc,
                                      Handle<Object> argv[],
                                      MaybeHandle<Object>* exception_out = NULL);
-
-  static Handle<String> GetStackTraceLine(Handle<Object> recv,
-                                          Handle<JSFunction> fun,
-                                          Handle<Object> pos,
-                                          Handle<Object> is_global);
 };
 
 
diff --git a/src/extensions/statistics-extension.cc b/src/extensions/statistics-extension.cc
index 5aafb7a..da53336 100644
--- a/src/extensions/statistics-extension.cc
+++ b/src/extensions/statistics-extension.cc
@@ -67,7 +67,8 @@
         args[0]
             ->BooleanValue(args.GetIsolate()->GetCurrentContext())
             .FromMaybe(false)) {
-      heap->CollectAllGarbage(Heap::kNoGCFlags, "counters extension");
+      heap->CollectAllGarbage(Heap::kNoGCFlags,
+                              GarbageCollectionReason::kCountersExtension);
     }
   }
 
@@ -116,19 +117,24 @@
   };
 
   const StatisticNumber numbers[] = {
-      {heap->memory_allocator()->Size(), "total_committed_bytes"},
+      {static_cast<intptr_t>(heap->memory_allocator()->Size()),
+       "total_committed_bytes"},
       {heap->new_space()->Size(), "new_space_live_bytes"},
       {heap->new_space()->Available(), "new_space_available_bytes"},
-      {heap->new_space()->CommittedMemory(), "new_space_commited_bytes"},
+      {static_cast<intptr_t>(heap->new_space()->CommittedMemory()),
+       "new_space_commited_bytes"},
       {heap->old_space()->Size(), "old_space_live_bytes"},
       {heap->old_space()->Available(), "old_space_available_bytes"},
-      {heap->old_space()->CommittedMemory(), "old_space_commited_bytes"},
+      {static_cast<intptr_t>(heap->old_space()->CommittedMemory()),
+       "old_space_commited_bytes"},
       {heap->code_space()->Size(), "code_space_live_bytes"},
       {heap->code_space()->Available(), "code_space_available_bytes"},
-      {heap->code_space()->CommittedMemory(), "code_space_commited_bytes"},
+      {static_cast<intptr_t>(heap->code_space()->CommittedMemory()),
+       "code_space_commited_bytes"},
       {heap->lo_space()->Size(), "lo_space_live_bytes"},
       {heap->lo_space()->Available(), "lo_space_available_bytes"},
-      {heap->lo_space()->CommittedMemory(), "lo_space_commited_bytes"},
+      {static_cast<intptr_t>(heap->lo_space()->CommittedMemory()),
+       "lo_space_commited_bytes"},
   };
 
   for (size_t i = 0; i < arraysize(numbers); i++) {
diff --git a/src/external-reference-table.cc b/src/external-reference-table.cc
index 5833eef..f908be1 100644
--- a/src/external-reference-table.cc
+++ b/src/external-reference-table.cc
@@ -215,10 +215,6 @@
       "double_constants.minus_one_half");
   Add(ExternalReference::stress_deopt_count(isolate).address(),
       "Isolate::stress_deopt_count_address()");
-  Add(ExternalReference::virtual_handler_register(isolate).address(),
-      "Isolate::virtual_handler_register()");
-  Add(ExternalReference::virtual_slot_register(isolate).address(),
-      "Isolate::virtual_slot_register()");
   Add(ExternalReference::runtime_function_table_address(isolate).address(),
       "Runtime::runtime_function_table_address()");
   Add(ExternalReference::is_tail_call_elimination_enabled_address(isolate)
diff --git a/src/factory.cc b/src/factory.cc
index bedcb9b..163e864 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -37,13 +37,15 @@
     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE)                                 \
     /* Two GCs before panicking.  In newspace will almost always succeed. */  \
     for (int __i__ = 0; __i__ < 2; __i__++) {                                 \
-      (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),          \
-                                        "allocation failure");                \
+      (ISOLATE)->heap()->CollectGarbage(                                      \
+          __allocation__.RetrySpace(),                                        \
+          GarbageCollectionReason::kAllocationFailure);                       \
       __allocation__ = FUNCTION_CALL;                                         \
       RETURN_OBJECT_UNLESS_RETRY(ISOLATE, TYPE)                               \
     }                                                                         \
     (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();        \
-    (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");          \
+    (ISOLATE)->heap()->CollectAllAvailableGarbage(                            \
+        GarbageCollectionReason::kLastResort);                                \
     {                                                                         \
       AlwaysAllocateScope __scope__(ISOLATE);                                 \
       __allocation__ = FUNCTION_CALL;                                         \
@@ -54,7 +56,6 @@
     return Handle<TYPE>();                                                    \
   } while (false)
 
-
 template<typename T>
 Handle<T> Factory::New(Handle<Map> map, AllocationSpace space) {
   CALL_HEAP_FUNCTION(
@@ -91,7 +92,6 @@
   return result;
 }
 
-
 Handle<PrototypeInfo> Factory::NewPrototypeInfo() {
   Handle<PrototypeInfo> result =
       Handle<PrototypeInfo>::cast(NewStruct(PROTOTYPE_INFO_TYPE));
@@ -102,14 +102,10 @@
   return result;
 }
 
-
-Handle<SloppyBlockWithEvalContextExtension>
-Factory::NewSloppyBlockWithEvalContextExtension(
-    Handle<ScopeInfo> scope_info, Handle<JSObject> extension) {
-  DCHECK(scope_info->is_declaration_scope());
-  Handle<SloppyBlockWithEvalContextExtension> result =
-      Handle<SloppyBlockWithEvalContextExtension>::cast(
-          NewStruct(SLOPPY_BLOCK_WITH_EVAL_CONTEXT_EXTENSION_TYPE));
+Handle<ContextExtension> Factory::NewContextExtension(
+    Handle<ScopeInfo> scope_info, Handle<Object> extension) {
+  Handle<ContextExtension> result =
+      Handle<ContextExtension>::cast(NewStruct(CONTEXT_EXTENSION_TYPE));
   result->set_scope_info(*scope_info);
   result->set_extension(*extension);
   return result;
@@ -178,6 +174,14 @@
   return array;
 }
 
+Handle<FrameArray> Factory::NewFrameArray(int number_of_frames,
+                                          PretenureFlag pretenure) {
+  DCHECK_LE(0, number_of_frames);
+  Handle<FixedArray> result =
+      NewFixedArrayWithHoles(FrameArray::LengthFor(number_of_frames));
+  result->set(FrameArray::kFrameCountIndex, Smi::FromInt(0));
+  return Handle<FrameArray>::cast(result);
+}
 
 Handle<OrderedHashSet> Factory::NewOrderedHashSet() {
   return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kMinCapacity);
@@ -595,6 +599,19 @@
   return result;
 }
 
+Handle<String> Factory::NewSurrogatePairString(uint16_t lead, uint16_t trail) {
+  DCHECK_GE(lead, 0xD800);
+  DCHECK_LE(lead, 0xDBFF);
+  DCHECK_GE(trail, 0xDC00);
+  DCHECK_LE(trail, 0xDFFF);
+
+  Handle<SeqTwoByteString> str =
+      isolate()->factory()->NewRawTwoByteString(2).ToHandleChecked();
+  uc16* dest = str->GetChars();
+  dest[0] = lead;
+  dest[1] = trail;
+  return str;
+}
 
 Handle<String> Factory::NewProperSubString(Handle<String> str,
                                            int begin,
@@ -729,6 +746,17 @@
   return external_string;
 }
 
+Handle<JSStringIterator> Factory::NewJSStringIterator(Handle<String> string) {
+  Handle<Map> map(isolate()->native_context()->string_iterator_map(),
+                  isolate());
+  Handle<String> flat_string = String::Flatten(string);
+  Handle<JSStringIterator> iterator =
+      Handle<JSStringIterator>::cast(NewJSObjectFromMap(map));
+  iterator->set_string(*flat_string);
+  iterator->set_index(0);
+
+  return iterator;
+}
 
 Handle<Symbol> Factory::NewSymbol() {
   CALL_HEAP_FUNCTION(
@@ -784,15 +812,19 @@
   return context_table;
 }
 
-
-Handle<Context> Factory::NewModuleContext(Handle<ScopeInfo> scope_info) {
+Handle<Context> Factory::NewModuleContext(Handle<Module> module,
+                                          Handle<JSFunction> function,
+                                          Handle<ScopeInfo> scope_info) {
   DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE);
   Handle<FixedArray> array =
       NewFixedArray(scope_info->ContextLength(), TENURED);
   array->set_map_no_write_barrier(*module_context_map());
-  // Instance link will be set later.
   Handle<Context> context = Handle<Context>::cast(array);
-  context->set_extension(*the_hole_value());
+  context->set_closure(*function);
+  context->set_previous(function->context());
+  context->set_extension(*module);
+  context->set_native_context(function->native_context());
+  DCHECK(context->IsModuleContext());
   return context;
 }
 
@@ -811,35 +843,41 @@
   return context;
 }
 
-
 Handle<Context> Factory::NewCatchContext(Handle<JSFunction> function,
                                          Handle<Context> previous,
+                                         Handle<ScopeInfo> scope_info,
                                          Handle<String> name,
                                          Handle<Object> thrown_object) {
   STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
+  Handle<ContextExtension> extension = NewContextExtension(scope_info, name);
   Handle<FixedArray> array = NewFixedArray(Context::MIN_CONTEXT_SLOTS + 1);
   array->set_map_no_write_barrier(*catch_context_map());
   Handle<Context> context = Handle<Context>::cast(array);
   context->set_closure(*function);
   context->set_previous(*previous);
-  context->set_extension(*name);
+  context->set_extension(*extension);
   context->set_native_context(previous->native_context());
   context->set(Context::THROWN_OBJECT_INDEX, *thrown_object);
   return context;
 }
 
 Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous,
+                                                 Handle<ScopeInfo> scope_info,
                                                  Handle<JSReceiver> extension,
                                                  Handle<Context> wrapped,
                                                  Handle<StringSet> whitelist) {
   STATIC_ASSERT(Context::WHITE_LIST_INDEX == Context::MIN_CONTEXT_SLOTS + 1);
+  DCHECK(scope_info->IsDebugEvaluateScope());
+  Handle<ContextExtension> context_extension = NewContextExtension(
+      scope_info, extension.is_null() ? Handle<Object>::cast(undefined_value())
+                                      : Handle<Object>::cast(extension));
   Handle<FixedArray> array = NewFixedArray(Context::MIN_CONTEXT_SLOTS + 2);
   array->set_map_no_write_barrier(*debug_evaluate_context_map());
   Handle<Context> c = Handle<Context>::cast(array);
   c->set_closure(wrapped.is_null() ? previous->closure() : wrapped->closure());
   c->set_previous(*previous);
   c->set_native_context(previous->native_context());
-  if (!extension.is_null()) c->set(Context::EXTENSION_INDEX, *extension);
+  c->set_extension(*context_extension);
   if (!wrapped.is_null()) c->set(Context::WRAPPED_CONTEXT_INDEX, *wrapped);
   if (!whitelist.is_null()) c->set(Context::WHITE_LIST_INDEX, *whitelist);
   return c;
@@ -847,13 +885,16 @@
 
 Handle<Context> Factory::NewWithContext(Handle<JSFunction> function,
                                         Handle<Context> previous,
+                                        Handle<ScopeInfo> scope_info,
                                         Handle<JSReceiver> extension) {
+  Handle<ContextExtension> context_extension =
+      NewContextExtension(scope_info, extension);
   Handle<FixedArray> array = NewFixedArray(Context::MIN_CONTEXT_SLOTS);
   array->set_map_no_write_barrier(*with_context_map());
   Handle<Context> context = Handle<Context>::cast(array);
   context->set_closure(*function);
   context->set_previous(*previous);
-  context->set_extension(*extension);
+  context->set_extension(*context_extension);
   context->set_native_context(previous->native_context());
   return context;
 }
@@ -881,6 +922,20 @@
       Struct);
 }
 
+Handle<PromiseContainer> Factory::NewPromiseContainer(
+    Handle<JSReceiver> thenable, Handle<JSReceiver> then,
+    Handle<JSFunction> resolve, Handle<JSFunction> reject,
+    Handle<Object> before_debug_event, Handle<Object> after_debug_event) {
+  Handle<PromiseContainer> result =
+      Handle<PromiseContainer>::cast(NewStruct(PROMISE_CONTAINER_TYPE));
+  result->set_thenable(*thenable);
+  result->set_then(*then);
+  result->set_resolve(*resolve);
+  result->set_reject(*reject);
+  result->set_before_debug_event(*before_debug_event);
+  result->set_after_debug_event(*after_debug_event);
+  return result;
+}
 
 Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry(
     int aliased_context_slot) {
@@ -1196,6 +1251,13 @@
   return maybe_error.ToHandleChecked();
 }
 
+Handle<Object> Factory::NewInvalidStringLengthError() {
+  // Invalidate the "string length" protector.
+  if (isolate()->IsStringLengthOverflowIntact()) {
+    isolate()->InvalidateStringLengthOverflowProtector();
+  }
+  return NewRangeError(MessageTemplate::kInvalidStringLength);
+}
 
 #define DEFINE_ERROR(NAME, name)                                              \
   Handle<Object> Factory::New##NAME(MessageTemplate::Template template_index, \
@@ -1296,7 +1358,7 @@
   // TODO(littledan): Why do we have this is_generator test when
   // NewFunctionPrototype already handles finding an appropriately
   // shared prototype?
-  if (!function->shared()->is_resumable()) {
+  if (!IsResumableFunction(function->shared()->kind())) {
     if (prototype->IsTheHole(isolate())) {
       prototype = NewFunctionPrototype(function);
     }
@@ -1322,12 +1384,11 @@
   // can be from a different context.
   Handle<Context> native_context(function->context()->native_context());
   Handle<Map> new_map;
-  if (function->shared()->is_resumable()) {
+  if (IsResumableFunction(function->shared()->kind())) {
     // Generator and async function prototypes can share maps since they
     // don't have "constructor" properties.
     new_map = handle(native_context->generator_object_prototype_map());
   } else {
-    CHECK(!function->shared()->is_async());
     // Each function prototype gets a fresh map to avoid unwanted sharing of
     // maps between prototypes of different constructors.
     Handle<JSFunction> object_function(native_context->object_function());
@@ -1338,7 +1399,7 @@
   DCHECK(!new_map->is_prototype_map());
   Handle<JSObject> prototype = NewJSObjectFromMap(new_map);
 
-  if (!function->shared()->is_resumable()) {
+  if (!IsResumableFunction(function->shared()->kind())) {
     JSObject::AddProperty(prototype, constructor_string(), function, DONT_ENUM);
   }
 
@@ -1385,6 +1446,17 @@
   return scope_info;
 }
 
+Handle<ModuleInfoEntry> Factory::NewModuleInfoEntry() {
+  Handle<FixedArray> array = NewFixedArray(ModuleInfoEntry::kLength, TENURED);
+  array->set_map_no_write_barrier(*module_info_entry_map());
+  return Handle<ModuleInfoEntry>::cast(array);
+}
+
+Handle<ModuleInfo> Factory::NewModuleInfo() {
+  Handle<FixedArray> array = NewFixedArray(ModuleInfo::kLength, TENURED);
+  array->set_map_no_write_barrier(*module_info_map());
+  return Handle<ModuleInfo>::cast(array);
+}
 
 Handle<JSObject> Factory::NewExternal(void* value) {
   Handle<Foreign> foreign = NewForeign(static_cast<Address>(value));
@@ -1666,7 +1738,7 @@
 
 Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
     Handle<JSFunction> function) {
-  DCHECK(function->shared()->is_resumable());
+  DCHECK(IsResumableFunction(function->shared()->kind()));
   JSFunction::EnsureHasInitialMap(function);
   Handle<Map> map(function->initial_map());
   DCHECK_EQ(JS_GENERATOR_OBJECT_TYPE, map->instance_type());
@@ -1676,6 +1748,29 @@
       JSGeneratorObject);
 }
 
+Handle<Module> Factory::NewModule(Handle<SharedFunctionInfo> code) {
+  Handle<ModuleInfo> module_info(code->scope_info()->ModuleDescriptorInfo(),
+                                 isolate());
+  Handle<ObjectHashTable> exports =
+      ObjectHashTable::New(isolate(), module_info->regular_exports()->length());
+  int requested_modules_length = module_info->module_requests()->length();
+  Handle<FixedArray> requested_modules =
+      requested_modules_length > 0 ? NewFixedArray(requested_modules_length)
+                                   : empty_fixed_array();
+
+  // To make it easy to hash Modules, we set a new symbol as the name of
+  // SharedFunctionInfo representing this Module.
+  Handle<Symbol> name_symbol = NewSymbol();
+  code->set_name(*name_symbol);
+
+  Handle<Module> module = Handle<Module>::cast(NewStruct(MODULE_TYPE));
+  module->set_code(*code);
+  module->set_exports(*exports);
+  module->set_requested_modules(*requested_modules);
+  module->set_flags(0);
+  module->set_embedder_data(isolate()->heap()->undefined_value());
+  return module;
+}
 
 Handle<JSArrayBuffer> Factory::NewJSArrayBuffer(SharedFlag shared,
                                                 PretenureFlag pretenure) {
@@ -1698,6 +1793,15 @@
       JSDataView);
 }
 
+Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value,
+                                                      bool done) {
+  Handle<Map> map(isolate()->native_context()->iterator_result_map());
+  Handle<JSIteratorResult> js_iter_result =
+      Handle<JSIteratorResult>::cast(NewJSObjectFromMap(map));
+  js_iter_result->set_value(*value);
+  js_iter_result->set_done(*ToBoolean(done));
+  return js_iter_result;
+}
 
 Handle<JSMap> Factory::NewJSMap() {
   Handle<Map> map(isolate()->native_context()->js_map_map());
@@ -2066,6 +2170,7 @@
   Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(
       name, code, IsConstructable(kind, scope_info->language_mode()));
   shared->set_scope_info(*scope_info);
+  shared->set_outer_scope_info(*the_hole_value());
   shared->set_kind(kind);
   shared->set_num_literals(number_of_literals);
   if (IsGeneratorFunction(kind)) {
@@ -2112,6 +2217,7 @@
   share->set_code(*code);
   share->set_optimized_code_map(*cleared_optimized_code_map());
   share->set_scope_info(ScopeInfo::Empty(isolate()));
+  share->set_outer_scope_info(*the_hole_value());
   Handle<Code> construct_stub =
       is_constructor ? isolate()->builtins()->JSConstructStubGeneric()
                      : isolate()->builtins()->ConstructedNonConstructable();
diff --git a/src/factory.h b/src/factory.h
index 4908d5f..82c2317 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -28,9 +28,8 @@
                              byte kind);
 
   // Allocates a fixed array initialized with undefined values.
-  Handle<FixedArray> NewFixedArray(
-      int size,
-      PretenureFlag pretenure = NOT_TENURED);
+  V8_EXPORT_PRIVATE Handle<FixedArray> NewFixedArray(
+      int size, PretenureFlag pretenure = NOT_TENURED);
 
   // Allocate a new fixed array with non-existing entries (the hole).
   Handle<FixedArray> NewFixedArrayWithHoles(
@@ -52,19 +51,27 @@
       int size,
       PretenureFlag pretenure = NOT_TENURED);
 
+  Handle<FrameArray> NewFrameArray(int number_of_frames,
+                                   PretenureFlag pretenure = NOT_TENURED);
+
   Handle<OrderedHashSet> NewOrderedHashSet();
   Handle<OrderedHashMap> NewOrderedHashMap();
 
   // Create a new boxed value.
   Handle<Box> NewBox(Handle<Object> value);
 
+  // Create a new PromiseContainer struct.
+  Handle<PromiseContainer> NewPromiseContainer(
+      Handle<JSReceiver> thenable, Handle<JSReceiver> then,
+      Handle<JSFunction> resolve, Handle<JSFunction> reject,
+      Handle<Object> before_debug_event, Handle<Object> after_debug_event);
+
   // Create a new PrototypeInfo struct.
   Handle<PrototypeInfo> NewPrototypeInfo();
 
-  // Create a new SloppyBlockWithEvalContextExtension struct.
-  Handle<SloppyBlockWithEvalContextExtension>
-  NewSloppyBlockWithEvalContextExtension(Handle<ScopeInfo> scope_info,
-                                         Handle<JSObject> extension);
+  // Create a new ContextExtension struct.
+  Handle<ContextExtension> NewContextExtension(Handle<ScopeInfo> scope_info,
+                                               Handle<Object> extension);
 
   // Create a pre-tenured empty AccessorPair.
   Handle<AccessorPair> NewAccessorPair();
@@ -74,7 +81,8 @@
 
   // Finds the internalized copy for string in the string table.
   // If not found, a new string is added to the table and returned.
-  Handle<String> InternalizeUtf8String(Vector<const char> str);
+  V8_EXPORT_PRIVATE Handle<String> InternalizeUtf8String(
+      Vector<const char> str);
   Handle<String> InternalizeUtf8String(const char* str) {
     return InternalizeUtf8String(CStrVector(str));
   }
@@ -119,9 +127,8 @@
   //     will be converted to Latin1, otherwise it will be left as two-byte.
   //
   // One-byte strings are pretenured when used as keys in the SourceCodeCache.
-  MUST_USE_RESULT MaybeHandle<String> NewStringFromOneByte(
-      Vector<const uint8_t> str,
-      PretenureFlag pretenure = NOT_TENURED);
+  V8_EXPORT_PRIVATE MUST_USE_RESULT MaybeHandle<String> NewStringFromOneByte(
+      Vector<const uint8_t> str, PretenureFlag pretenure = NOT_TENURED);
 
   template <size_t N>
   inline Handle<String> NewStringFromStaticChars(
@@ -163,17 +170,17 @@
 
   // UTF8 strings are pretenured when used for regexp literal patterns and
   // flags in the parser.
-  MUST_USE_RESULT MaybeHandle<String> NewStringFromUtf8(
-      Vector<const char> str,
-      PretenureFlag pretenure = NOT_TENURED);
+  MUST_USE_RESULT V8_EXPORT_PRIVATE MaybeHandle<String> NewStringFromUtf8(
+      Vector<const char> str, PretenureFlag pretenure = NOT_TENURED);
 
-  MUST_USE_RESULT MaybeHandle<String> NewStringFromTwoByte(
-      Vector<const uc16> str,
-      PretenureFlag pretenure = NOT_TENURED);
+  V8_EXPORT_PRIVATE MUST_USE_RESULT MaybeHandle<String> NewStringFromTwoByte(
+      Vector<const uc16> str, PretenureFlag pretenure = NOT_TENURED);
 
   MUST_USE_RESULT MaybeHandle<String> NewStringFromTwoByte(
       const ZoneVector<uc16>* str, PretenureFlag pretenure = NOT_TENURED);
 
+  Handle<JSStringIterator> NewJSStringIterator(Handle<String> string);
+
   // Allocates an internalized string in old space based on the character
   // stream.
   Handle<String> NewInternalizedStringFromUtf8(Vector<const char> str,
@@ -215,6 +222,10 @@
   MUST_USE_RESULT MaybeHandle<String> NewConsString(Handle<String> left,
                                                     Handle<String> right);
 
+  // Create or lookup a single characters tring made up of a utf16 surrogate
+  // pair.
+  Handle<String> NewSurrogatePairString(uint16_t lead, uint16_t trail);
+
   // Create a new string object which holds a proper substring of a string.
   Handle<String> NewProperSubString(Handle<String> str,
                                     int begin,
@@ -255,7 +266,9 @@
   Handle<ScriptContextTable> NewScriptContextTable();
 
   // Create a module context.
-  Handle<Context> NewModuleContext(Handle<ScopeInfo> scope_info);
+  Handle<Context> NewModuleContext(Handle<Module> module,
+                                   Handle<JSFunction> function,
+                                   Handle<ScopeInfo> scope_info);
 
   // Create a function context.
   Handle<Context> NewFunctionContext(int length, Handle<JSFunction> function);
@@ -263,15 +276,18 @@
   // Create a catch context.
   Handle<Context> NewCatchContext(Handle<JSFunction> function,
                                   Handle<Context> previous,
+                                  Handle<ScopeInfo> scope_info,
                                   Handle<String> name,
                                   Handle<Object> thrown_object);
 
   // Create a 'with' context.
   Handle<Context> NewWithContext(Handle<JSFunction> function,
                                  Handle<Context> previous,
+                                 Handle<ScopeInfo> scope_info,
                                  Handle<JSReceiver> extension);
 
   Handle<Context> NewDebugEvaluateContext(Handle<Context> previous,
+                                          Handle<ScopeInfo> scope_info,
                                           Handle<JSReceiver> extension,
                                           Handle<Context> wrapped,
                                           Handle<StringSet> whitelist);
@@ -290,7 +306,7 @@
 
   Handle<AccessorInfo> NewAccessorInfo();
 
-  Handle<Script> NewScript(Handle<String> source);
+  V8_EXPORT_PRIVATE Handle<Script> NewScript(Handle<String> source);
 
   // Foreign objects are pretenured when allocated by the bootstrapper.
   Handle<Foreign> NewForeign(Address addr,
@@ -434,7 +450,7 @@
 
   // Create a JSArray with a specified length and elements initialized
   // according to the specified mode.
-  Handle<JSArray> NewJSArray(
+  V8_EXPORT_PRIVATE Handle<JSArray> NewJSArray(
       ElementsKind elements_kind, int length, int capacity,
       ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS,
       PretenureFlag pretenure = NOT_TENURED);
@@ -450,11 +466,11 @@
   }
 
   // Create a JSArray with the given elements.
-  Handle<JSArray> NewJSArrayWithElements(Handle<FixedArrayBase> elements,
-                                         ElementsKind elements_kind, int length,
-                                         PretenureFlag pretenure = NOT_TENURED);
+  V8_EXPORT_PRIVATE Handle<JSArray> NewJSArrayWithElements(
+      Handle<FixedArrayBase> elements, ElementsKind elements_kind, int length,
+      PretenureFlag pretenure = NOT_TENURED);
 
-  Handle<JSArray> NewJSArrayWithElements(
+  V8_EXPORT_PRIVATE Handle<JSArray> NewJSArrayWithElements(
       Handle<FixedArrayBase> elements,
       ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
       PretenureFlag pretenure = NOT_TENURED) {
@@ -470,6 +486,8 @@
 
   Handle<JSGeneratorObject> NewJSGeneratorObject(Handle<JSFunction> function);
 
+  Handle<Module> NewModule(Handle<SharedFunctionInfo> code);
+
   Handle<JSArrayBuffer> NewJSArrayBuffer(
       SharedFlag shared = SharedFlag::kNotShared,
       PretenureFlag pretenure = NOT_TENURED);
@@ -495,6 +513,8 @@
   Handle<JSDataView> NewJSDataView(Handle<JSArrayBuffer> buffer,
                                    size_t byte_offset, size_t byte_length);
 
+  Handle<JSIteratorResult> NewJSIteratorResult(Handle<Object> value, bool done);
+
   Handle<JSMap> NewJSMap();
   Handle<JSSet> NewJSSet();
 
@@ -554,6 +574,9 @@
   // Create a serialized scope info.
   Handle<ScopeInfo> NewScopeInfo(int length);
 
+  Handle<ModuleInfoEntry> NewModuleInfoEntry();
+  Handle<ModuleInfo> NewModuleInfo();
+
   // Create an External object for V8's external API.
   Handle<JSObject> NewExternal(void* value);
 
@@ -576,9 +599,7 @@
   Handle<Object> NewError(Handle<JSFunction> constructor,
                           Handle<String> message);
 
-  Handle<Object> NewInvalidStringLengthError() {
-    return NewRangeError(MessageTemplate::kInvalidStringLength);
-  }
+  Handle<Object> NewInvalidStringLengthError();
 
   Handle<Object> NewURIError() {
     return NewError(isolate()->uri_error_function(),
diff --git a/src/fast-accessor-assembler.cc b/src/fast-accessor-assembler.cc
index ebaab9a..a9cde70 100644
--- a/src/fast-accessor-assembler.cc
+++ b/src/fast-accessor-assembler.cc
@@ -179,27 +179,35 @@
                              ExternalReference::DIRECT_API_CALL, isolate());
 
   // Create & call API callback via stub.
-  CallApiCallbackStub stub(isolate(), 1, true, true);
-  DCHECK_EQ(5, stub.GetCallInterfaceDescriptor().GetParameterCount());
-  DCHECK_EQ(1, stub.GetCallInterfaceDescriptor().GetStackParameterCount());
+  const int kJSParameterCount = 1;
+  CallApiCallbackStub stub(isolate(), kJSParameterCount, true, true);
+  CallInterfaceDescriptor descriptor = stub.GetCallInterfaceDescriptor();
+  DCHECK_EQ(4, descriptor.GetParameterCount());
+  DCHECK_EQ(0, descriptor.GetStackParameterCount());
   // TODO(vogelheim): There is currently no clean way to retrieve the context
   //     parameter for a stub and the implementation details are hidden in
   //     compiler/*. The context_paramter is computed as:
   //       Linkage::GetJSCallContextParamIndex(descriptor->JSParameterCount())
-  const int context_parameter = 3;
-  Node* call = assembler_->CallStub(
-      stub.GetCallInterfaceDescriptor(),
-      assembler_->HeapConstant(stub.GetCode()),
-      assembler_->Parameter(context_parameter),
+  const int kContextParameter = 3;
+  Node* context = assembler_->Parameter(kContextParameter);
+  Node* target = assembler_->HeapConstant(stub.GetCode());
 
-      // Stub/register parameters:
-      assembler_->UndefinedConstant(), /* callee (there's no JSFunction) */
-      assembler_->UndefinedConstant(), /* call_data (undefined) */
-      assembler_->Parameter(0), /* receiver (same as holder in this case) */
-      assembler_->ExternalConstant(callback), /* API callback function */
+  int param_count = descriptor.GetParameterCount();
+  Node** args = zone()->NewArray<Node*>(param_count + 1 + kJSParameterCount);
+  // Stub/register parameters:
+  args[0] = assembler_->UndefinedConstant();  // callee (there's no JSFunction)
+  args[1] = assembler_->UndefinedConstant();  // call_data (undefined)
+  args[2] = assembler_->Parameter(0);  // receiver (same as holder in this case)
+  args[3] = assembler_->ExternalConstant(callback);  // API callback function
 
-      // JS arguments, on stack:
-      FromId(arg));
+  // JS arguments, on stack:
+  args[4] = FromId(arg);
+
+  // Context.
+  args[5] = context;
+
+  Node* call =
+      assembler_->CallStubN(descriptor, kJSParameterCount, target, args);
 
   return FromRaw(call);
 }
diff --git a/src/field-type.cc b/src/field-type.cc
index 2e4cbfb..b3b24e2 100644
--- a/src/field-type.cc
+++ b/src/field-type.cc
@@ -4,9 +4,9 @@
 
 #include "src/field-type.h"
 
+#include "src/ast/ast-types.h"
 #include "src/handles-inl.h"
 #include "src/ostreams.h"
-#include "src/types.h"
 
 namespace v8 {
 namespace internal {
@@ -71,11 +71,11 @@
 
 bool FieldType::NowIs(Handle<FieldType> other) { return NowIs(*other); }
 
-Type* FieldType::Convert(Zone* zone) {
-  if (IsAny()) return Type::NonInternal();
-  if (IsNone()) return Type::None();
+AstType* FieldType::Convert(Zone* zone) {
+  if (IsAny()) return AstType::NonInternal();
+  if (IsNone()) return AstType::None();
   DCHECK(IsClass());
-  return Type::Class(AsClass(), zone);
+  return AstType::Class(AsClass(), zone);
 }
 
 void FieldType::PrintTo(std::ostream& os) {
diff --git a/src/field-type.h b/src/field-type.h
index eb7ffca..11e1069 100644
--- a/src/field-type.h
+++ b/src/field-type.h
@@ -5,6 +5,7 @@
 #ifndef V8_FIELD_TYPE_H_
 #define V8_FIELD_TYPE_H_
 
+#include "src/ast/ast-types.h"
 #include "src/handles.h"
 #include "src/objects.h"
 #include "src/ostreams.h"
@@ -38,7 +39,7 @@
   bool NowStable();
   bool NowIs(FieldType* other);
   bool NowIs(Handle<FieldType> other);
-  Type* Convert(Zone* zone);
+  AstType* Convert(Zone* zone);
 
   void PrintTo(std::ostream& os);
 };
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index e5ddbad..779a589 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -23,14 +23,21 @@
 // this will just be an extern declaration, but for a readonly flag we let the
 // compiler make better optimizations by giving it the value.
 #if defined(FLAG_MODE_DECLARE)
-#define FLAG_FULL(ftype, ctype, nam, def, cmt) extern ctype FLAG_##nam;
+#define FLAG_FULL(ftype, ctype, nam, def, cmt) \
+  V8_EXPORT_PRIVATE extern ctype FLAG_##nam;
 #define FLAG_READONLY(ftype, ctype, nam, def, cmt) \
   static ctype const FLAG_##nam = def;
 
 // We want to supply the actual storage and value for the flag variable in the
 // .cc file.  We only do this for writable flags.
 #elif defined(FLAG_MODE_DEFINE)
-#define FLAG_FULL(ftype, ctype, nam, def, cmt) ctype FLAG_##nam = def;
+#ifdef USING_V8_SHARED
+#define FLAG_FULL(ftype, ctype, nam, def, cmt) \
+  V8_EXPORT_PRIVATE extern ctype FLAG_##nam;
+#else
+#define FLAG_FULL(ftype, ctype, nam, def, cmt) \
+  V8_EXPORT_PRIVATE ctype FLAG_##nam = def;
+#endif
 
 // We need to define all of our default values so that the Flag structure can
 // access them by pointer.  These are just used internally inside of one .cc,
@@ -119,31 +126,27 @@
 #else
 #define DEBUG_BOOL false
 #endif
-#if (defined CAN_USE_VFP3_INSTRUCTIONS) || !(defined ARM_TEST_NO_FEATURE_PROBE)
-#define ENABLE_VFP3_DEFAULT true
+
+// Supported ARM configurations are:
+//  "armv6":       ARMv6 + VFPv2
+//  "armv7":       ARMv7 + VFPv3-D32 + NEON
+//  "armv7+sudiv": ARMv7 + VFPv4-D32 + NEON + SUDIV
+//  "armv8":       ARMv8 (including all of the above)
+#if !defined(ARM_TEST_NO_FEATURE_PROBE) ||                            \
+    (defined(CAN_USE_ARMV8_INSTRUCTIONS) &&                           \
+     defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(CAN_USE_SUDIV) && \
+     defined(CAN_USE_NEON) && defined(CAN_USE_VFP3_INSTRUCTIONS))
+#define ARM_ARCH_DEFAULT "armv8"
+#elif defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(CAN_USE_SUDIV) && \
+    defined(CAN_USE_NEON) && defined(CAN_USE_VFP3_INSTRUCTIONS)
+#define ARM_ARCH_DEFAULT "armv7+sudiv"
+#elif defined(CAN_USE_ARMV7_INSTRUCTIONS) && defined(CAN_USE_NEON) && \
+    defined(CAN_USE_VFP3_INSTRUCTIONS)
+#define ARM_ARCH_DEFAULT "armv7"
 #else
-#define ENABLE_VFP3_DEFAULT false
+#define ARM_ARCH_DEFAULT "armv6"
 #endif
-#if (defined CAN_USE_ARMV7_INSTRUCTIONS) || !(defined ARM_TEST_NO_FEATURE_PROBE)
-#define ENABLE_ARMV7_DEFAULT true
-#else
-#define ENABLE_ARMV7_DEFAULT false
-#endif
-#if (defined CAN_USE_ARMV8_INSTRUCTIONS) || !(defined ARM_TEST_NO_FEATURE_PROBE)
-#define ENABLE_ARMV8_DEFAULT true
-#else
-#define ENABLE_ARMV8_DEFAULT false
-#endif
-#if (defined CAN_USE_VFP32DREGS) || !(defined ARM_TEST_NO_FEATURE_PROBE)
-#define ENABLE_32DREGS_DEFAULT true
-#else
-#define ENABLE_32DREGS_DEFAULT false
-#endif
-#if (defined CAN_USE_NEON) || !(defined ARM_TEST_NO_FEATURE_PROBE)
-# define ENABLE_NEON_DEFAULT true
-#else
-# define ENABLE_NEON_DEFAULT false
-#endif
+
 #ifdef V8_OS_WIN
 # define ENABLE_LOG_COLOUR false
 #else
@@ -184,9 +187,6 @@
 DEFINE_BOOL(harmony_shipping, true, "enable all shipped harmony features")
 DEFINE_IMPLICATION(es_staging, harmony)
 
-DEFINE_BOOL(intl_extra, false, "additional V8 Intl functions")
-// Removing extra Intl functions is shipped
-DEFINE_NEG_VALUE_IMPLICATION(harmony_shipping, intl_extra, true)
 
 // Activate on ClusterFuzz.
 DEFINE_IMPLICATION(es_staging, harmony_regexp_lookbehind)
@@ -198,7 +198,6 @@
   V(harmony_function_sent, "harmony function.sent")                     \
   V(harmony_sharedarraybuffer, "harmony sharedarraybuffer")             \
   V(harmony_simd, "harmony simd")                                       \
-  V(harmony_explicit_tailcalls, "harmony explicit tail calls")          \
   V(harmony_do_expressions, "harmony do-expressions")                   \
   V(harmony_restrictive_generators,                                     \
     "harmony restrictions on generator declarations")                   \
@@ -206,18 +205,19 @@
   V(harmony_regexp_property, "harmony unicode regexp property classes") \
   V(harmony_for_in, "harmony for-in syntax")                            \
   V(harmony_trailing_commas,                                            \
-    "harmony trailing commas in function parameter lists")
+    "harmony trailing commas in function parameter lists")              \
+  V(harmony_class_fields, "harmony public fields in class literals")
 
 // Features that are complete (but still behind --harmony/es-staging flag).
 #define HARMONY_STAGED_BASE(V)                                               \
   V(harmony_regexp_lookbehind, "harmony regexp lookbehind")                  \
   V(harmony_tailcalls, "harmony tail calls")                                 \
-  V(harmony_async_await, "harmony async-await")                              \
   V(harmony_string_padding, "harmony String-padding methods")
 
 #ifdef V8_I18N_SUPPORT
-#define HARMONY_STAGED(V) \
-  HARMONY_STAGED_BASE(V)  \
+#define HARMONY_STAGED(V)                                          \
+  HARMONY_STAGED_BASE(V)                                           \
+  V(datetime_format_to_parts, "Intl.DateTimeFormat.formatToParts") \
   V(icu_case_mapping, "case mapping with ICU rather than Unibrow")
 #else
 #define HARMONY_STAGED(V) HARMONY_STAGED_BASE(V)
@@ -225,6 +225,7 @@
 
 // Features that are shipping (turned on by default, but internal flag remains).
 #define HARMONY_SHIPPING(V)                                                  \
+  V(harmony_async_await, "harmony async-await")                              \
   V(harmony_restrictive_declarations,                                        \
     "harmony limitations on sloppy mode function declarations")              \
   V(harmony_object_values_entries, "harmony Object.values / Object.entries") \
@@ -335,7 +336,7 @@
 DEFINE_INT(max_inlining_levels, 5, "maximum number of inlining levels")
 DEFINE_INT(max_inlined_source_size, 600,
            "maximum source size in bytes considered for a single inlining")
-DEFINE_INT(max_inlined_nodes, 196,
+DEFINE_INT(max_inlined_nodes, 200,
            "maximum number of AST nodes considered for a single inlining")
 DEFINE_INT(max_inlined_nodes_cumulative, 400,
            "maximum cumulative number of AST nodes considered for inlining")
@@ -405,6 +406,8 @@
 DEFINE_BOOL(inline_construct, true, "inline constructor calls")
 DEFINE_BOOL(inline_arguments, true, "inline functions with arguments object")
 DEFINE_BOOL(inline_accessors, true, "inline JavaScript accessors")
+DEFINE_BOOL(inline_into_try, false, "inline into try blocks")
+DEFINE_IMPLICATION(turbo, inline_into_try)
 DEFINE_INT(escape_analysis_iterations, 2,
            "maximum number of escape analysis fix-point iterations")
 
@@ -450,6 +453,8 @@
 DEFINE_BOOL(turbo_asm_deoptimization, false,
             "enable deoptimization in TurboFan for asm.js code")
 DEFINE_BOOL(turbo_verify, DEBUG_BOOL, "verify TurboFan graphs at each phase")
+DEFINE_BOOL(turbo_verify_machine_graph, false,
+            "verify TurboFan machine graph before instruction selection")
 DEFINE_BOOL(turbo_stats, false, "print TurboFan statistics")
 DEFINE_BOOL(turbo_stats_nvp, false,
             "print TurboFan statistics in machine-readable format")
@@ -487,9 +492,8 @@
             "enable instruction scheduling in TurboFan")
 DEFINE_BOOL(turbo_stress_instruction_scheduling, false,
             "randomly schedule instructions to stress dependency tracking")
-DEFINE_BOOL(turbo_store_elimination, false,
+DEFINE_BOOL(turbo_store_elimination, true,
             "enable store-store elimination in TurboFan")
-DEFINE_IMPLICATION(turbo, turbo_store_elimination)
 
 // Flags to help platform porters
 DEFINE_BOOL(minimal, false,
@@ -529,6 +533,12 @@
             "enable prototype simd opcodes for wasm")
 DEFINE_BOOL(wasm_eh_prototype, false,
             "enable prototype exception handling opcodes for wasm")
+DEFINE_BOOL(wasm_mv_prototype, false,
+            "enable prototype multi-value support for wasm")
+
+DEFINE_BOOL(wasm_trap_handler, false,
+            "use signal handlers to catch out of bounds memory access in wasm"
+            " (currently Linux x86_64 only)")
 
 // Profiler flags.
 DEFINE_INT(frame_count, 1, "number of stack frames inspected by the profiler")
@@ -548,6 +558,7 @@
 DEFINE_BOOL(debug_code, false, "generate extra code (assertions) for debugging")
 DEFINE_BOOL(code_comments, false, "emit comments in code disassembly")
 DEFINE_BOOL(enable_sse3, true, "enable use of SSE3 instructions if available")
+DEFINE_BOOL(enable_ssse3, true, "enable use of SSSE3 instructions if available")
 DEFINE_BOOL(enable_sse4_1, true,
             "enable use of SSE4.1 instructions if available")
 DEFINE_BOOL(enable_sahf, true,
@@ -559,35 +570,29 @@
 DEFINE_BOOL(enable_lzcnt, true, "enable use of LZCNT instruction if available")
 DEFINE_BOOL(enable_popcnt, true,
             "enable use of POPCNT instruction if available")
-DEFINE_BOOL(enable_vfp3, ENABLE_VFP3_DEFAULT,
-            "enable use of VFP3 instructions if available")
-DEFINE_BOOL(enable_armv7, ENABLE_ARMV7_DEFAULT,
-            "enable use of ARMv7 instructions if available (ARM only)")
-DEFINE_BOOL(enable_armv8, ENABLE_ARMV8_DEFAULT,
-            "enable use of ARMv8 instructions if available (ARM 32-bit only)")
-DEFINE_BOOL(enable_neon, ENABLE_NEON_DEFAULT,
-            "enable use of NEON instructions if available (ARM only)")
-DEFINE_BOOL(enable_sudiv, true,
-            "enable use of SDIV and UDIV instructions if available (ARM only)")
-DEFINE_BOOL(enable_movw_movt, false,
-            "enable loading 32-bit constant by means of movw/movt "
-            "instruction pairs (ARM only)")
-DEFINE_BOOL(enable_32dregs, ENABLE_32DREGS_DEFAULT,
-            "enable use of d16-d31 registers on ARM - this requires VFP3")
+DEFINE_STRING(arm_arch, ARM_ARCH_DEFAULT,
+              "generate instructions for the selected ARM architecture if "
+              "available: armv6, armv7, armv7+sudiv or armv8")
 DEFINE_BOOL(enable_vldr_imm, false,
             "enable use of constant pools for double immediate (ARM only)")
 DEFINE_BOOL(force_long_branches, false,
             "force all emitted branches to be in long mode (MIPS/PPC only)")
 DEFINE_STRING(mcpu, "auto", "enable optimization for specific cpu")
 
+// Deprecated ARM flags (replaced by arm_arch).
+DEFINE_MAYBE_BOOL(enable_armv7, "deprecated (use --arm_arch instead)")
+DEFINE_MAYBE_BOOL(enable_vfp3, "deprecated (use --arm_arch instead)")
+DEFINE_MAYBE_BOOL(enable_32dregs, "deprecated (use --arm_arch instead)")
+DEFINE_MAYBE_BOOL(enable_neon, "deprecated (use --arm_arch instead)")
+DEFINE_MAYBE_BOOL(enable_sudiv, "deprecated (use --arm_arch instead)")
+DEFINE_MAYBE_BOOL(enable_armv8, "deprecated (use --arm_arch instead)")
+
 // regexp-macro-assembler-*.cc
 DEFINE_BOOL(enable_regexp_unaligned_accesses, true,
             "enable unaligned accesses for the regexp engine")
 
-DEFINE_IMPLICATION(enable_armv8, enable_vfp3)
-DEFINE_IMPLICATION(enable_armv8, enable_neon)
-DEFINE_IMPLICATION(enable_armv8, enable_32dregs)
-DEFINE_IMPLICATION(enable_armv8, enable_sudiv)
+// api.cc
+DEFINE_BOOL(script_streaming, true, "enable parsing on background")
 
 // bootstrapper.cc
 DEFINE_STRING(expose_natives_as, NULL, "expose natives in global object")
@@ -711,8 +716,6 @@
             "print one trace line following each idle notification")
 DEFINE_BOOL(trace_idle_notification_verbose, false,
             "prints the heap state used by the idle notification")
-DEFINE_BOOL(print_cumulative_gc_stat, false,
-            "print cumulative GC statistics in name=value format on exit")
 DEFINE_BOOL(print_max_heap_committed, false,
             "print statistics of the maximum memory committed for the heap "
             "in name=value format on exit")
@@ -736,7 +739,7 @@
             "track un-executed functions to age code and flush only "
             "old code (required for code flushing)")
 DEFINE_BOOL(incremental_marking, true, "use incremental marking")
-DEFINE_BOOL(incremental_marking_wrappers, true,
+DEFINE_BOOL(incremental_marking_wrappers, false,
             "use incremental marking for marking wrappers")
 DEFINE_INT(min_progress_during_incremental_marking_finalization, 32,
            "keep finalizing incremental marking as long as we discover at "
@@ -800,6 +803,7 @@
 DEFINE_BOOL(use_ic, true, "use inline caching")
 DEFINE_BOOL(trace_ic, false, "trace inline cache state transitions")
 DEFINE_BOOL(tf_load_ic_stub, true, "use TF LoadIC stub")
+DEFINE_BOOL(tf_store_ic_stub, true, "use TF StoreIC stub")
 
 // macro-assembler-ia32.cc
 DEFINE_BOOL(native_code_counters, false,
@@ -835,6 +839,7 @@
 // parser.cc
 DEFINE_BOOL(allow_natives_syntax, false, "allow natives syntax")
 DEFINE_BOOL(trace_parse, false, "trace parsing and preparsing")
+DEFINE_BOOL(lazy_inner_functions, false, "enable lazy parsing inner functions")
 
 // simulator-arm.cc, simulator-arm64.cc and simulator-mips.cc
 DEFINE_BOOL(trace_sim, false, "Trace simulator execution")
@@ -868,6 +873,10 @@
             "print stack trace when an illegal exception is thrown")
 DEFINE_BOOL(abort_on_uncaught_exception, false,
             "abort program (dump core) when an uncaught exception is thrown")
+DEFINE_BOOL(abort_on_stack_overflow, false,
+            "Abort program when stack overflow (as opposed to throwing "
+            "RangeError). This is useful for fuzzing where the spec behaviour "
+            "would introduce nondeterminism.")
 DEFINE_BOOL(randomize_hashes, true,
             "randomize hashes to avoid predictable hash collisions "
             "(with snapshots this option cannot override the baked-in seed)")
@@ -925,11 +934,6 @@
             "Test mode only flag. It allows an unit test to select evacuation "
             "candidates pages (requires --stress_compaction).")
 
-// api.cc
-DEFINE_INT(external_allocation_limit_incremental_time, 1,
-           "Time spent in incremental marking steps (in ms) once the external "
-           "allocation limit is reached")
-
 DEFINE_BOOL(disable_old_api_accessors, false,
             "Disable old-style API accessors whose setters trigger through the "
             "prototype chain")
@@ -1036,6 +1040,10 @@
 // Debugger
 DEFINE_BOOL(print_break_location, false, "print source location on debug break")
 
+// wasm instance management
+DEFINE_BOOL(trace_wasm_instances, false,
+            "trace creation and collection of wasm instances")
+
 //
 // Logging and profiling flags
 //
@@ -1126,6 +1134,7 @@
 // codegen-ia32.cc / codegen-arm.cc
 DEFINE_BOOL(print_code, false, "print generated code")
 DEFINE_BOOL(print_opt_code, false, "print optimized code")
+DEFINE_STRING(print_opt_code_filter, "*", "filter for printing optimized code")
 DEFINE_BOOL(print_unopt_code, false,
             "print unoptimized code before "
             "printing optimized code based on it")
@@ -1188,8 +1197,6 @@
             "enable in-object double fields unboxing (64-bit only)")
 DEFINE_IMPLICATION(unbox_double_fields, track_double_fields)
 
-DEFINE_BOOL(global_var_shortcuts, false, "use ic-less global loads and stores")
-
 
 // Cleanup...
 #undef FLAG_FULL
diff --git a/src/frames-inl.h b/src/frames-inl.h
index 77784b8..61d0dcd 100644
--- a/src/frames-inl.h
+++ b/src/frames-inl.h
@@ -63,6 +63,8 @@
 
 
 inline Code* StackFrame::LookupCode() const {
+  // TODO(jgruber): This should really check that pc is within the returned
+  // code's instruction range [instruction_start(), instruction_end()[.
   return GetContainingCode(isolate(), pc());
 }
 
diff --git a/src/frames.cc b/src/frames.cc
index f0fa58d..c67fdc2 100644
--- a/src/frames.cc
+++ b/src/frames.cc
@@ -1461,9 +1461,9 @@
 }
 
 Object* WasmFrame::wasm_obj() const {
-  FixedArray* deopt_data = LookupCode()->deoptimization_data();
-  DCHECK(deopt_data->length() == 2);
-  return deopt_data->get(0);
+  Object* ret = wasm::GetOwningWasmInstance(LookupCode());
+  if (ret == nullptr) ret = *(isolate()->factory()->undefined_value());
+  return ret;
 }
 
 uint32_t WasmFrame::function_index() const {
@@ -1478,6 +1478,15 @@
   return wasm::WasmDebugInfo::GetFunctionScript(debug_info, function_index());
 }
 
+int WasmFrame::LookupExceptionHandlerInTable(int* stack_slots) {
+  DCHECK_NOT_NULL(stack_slots);
+  Code* code = LookupCode();
+  HandlerTable* table = HandlerTable::cast(code->handler_table());
+  int pc_offset = static_cast<int>(pc() - code->entry());
+  *stack_slots = code->stack_slots();
+  return table->LookupReturn(pc_offset);
+}
+
 namespace {
 
 
diff --git a/src/frames.h b/src/frames.h
index 1277023..373f4de 100644
--- a/src/frames.h
+++ b/src/frames.h
@@ -525,6 +525,8 @@
 
   Isolate* isolate() const { return isolate_; }
 
+  void operator=(const StackFrame& original) = delete;
+
  protected:
   inline explicit StackFrame(StackFrameIteratorBase* iterator);
   virtual ~StackFrame() { }
@@ -563,9 +565,6 @@
   friend class StackFrameIteratorBase;
   friend class StackHandlerIterator;
   friend class SafeStackFrameIterator;
-
- private:
-  void operator=(const StackFrame& original);
 };
 
 
@@ -1057,6 +1056,10 @@
   void Print(StringStream* accumulator, PrintMode mode,
              int index) const override;
 
+  // Lookup exception handler for current {pc}, returns -1 if none found. Also
+  // returns the stack slot count of the entire frame.
+  int LookupExceptionHandlerInTable(int* data);
+
   // Determine the code for the frame.
   Code* unchecked_code() const override;
 
diff --git a/src/full-codegen/arm/full-codegen-arm.cc b/src/full-codegen/arm/full-codegen-arm.cc
index 7887d32..e8eeb8e 100644
--- a/src/full-codegen/arm/full-codegen-arm.cc
+++ b/src/full-codegen/arm/full-codegen-arm.cc
@@ -4,14 +4,16 @@
 
 #if V8_TARGET_ARCH_ARM
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 #include "src/arm/code-stubs-arm.h"
 #include "src/arm/macro-assembler-arm.h"
@@ -126,6 +128,20 @@
   info->set_prologue_offset(masm_->pc_offset());
   __ Prologue(info->GeneratePreagedPrologue());
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ ldr(r2, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
+    __ ldr(r2, FieldMemOperand(r2, LiteralsArray::kFeedbackVectorOffset));
+    __ ldr(r9, FieldMemOperand(r2, TypeFeedbackVector::kInvocationCountIndex *
+                                           kPointerSize +
+                                       TypeFeedbackVector::kHeaderSize));
+    __ add(r9, r9, Operand(Smi::FromInt(1)));
+    __ str(r9, FieldMemOperand(r2, TypeFeedbackVector::kInvocationCountIndex *
+                                           kPointerSize +
+                                       TypeFeedbackVector::kHeaderSize));
+  }
+
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
     // Generators allocate locals, if any, in context slots.
@@ -167,14 +183,14 @@
   bool function_in_register_r1 = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     // Argument to NewContext is the function, which is still in r1.
     Comment cmnt(masm_, "[ Allocate context");
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     if (info->scope()->is_script_scope()) {
       __ push(r1);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -259,9 +275,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register_r1) {
       __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
@@ -770,7 +785,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -821,7 +835,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1138,6 +1151,7 @@
   // Generate code for the going to the next element by incrementing
   // the index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ pop(r0);
   __ add(r0, r0, Operand(Smi::FromInt(1)));
   __ push(r0);
@@ -1160,12 +1174,9 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ ldr(StoreDescriptor::ValueRegister(),
          MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1174,12 +1185,9 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ Move(StoreDescriptor::ReceiverRegister(), r0);
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ ldr(StoreDescriptor::ValueRegister(),
          MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1219,7 +1227,7 @@
   Register temp = r4;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
@@ -1268,20 +1276,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ mov(LoadGlobalDescriptor::SlotRegister(),
-         Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   // Record position before possible IC call.
@@ -1292,7 +1286,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1415,10 +1408,8 @@
           if (property->emit_store()) {
             VisitForAccumulatorValue(value);
             DCHECK(StoreDescriptor::ValueRegister().is(r0));
-            __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
             __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
 
             if (NeedsHomeObject(value)) {
@@ -1590,6 +1581,7 @@
   } else {
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1599,8 +1591,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
 
@@ -1616,31 +1607,7 @@
 
     __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
     __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(r0);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(r0);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
 
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
@@ -1992,7 +1959,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     Register scratch = r1;
@@ -2019,26 +1986,23 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
 
+      case ClassLiteral::Property::FIELD:
       default:
         UNREACHABLE();
     }
@@ -2075,10 +2039,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ Move(StoreDescriptor::ReceiverRegister(), r0);
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2125,10 +2086,7 @@
       __ Move(StoreDescriptor::NameRegister(), r0);
       PopOperands(StoreDescriptor::ValueRegister(),
                   StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2153,10 +2111,8 @@
                                                FeedbackVectorSlot slot) {
   if (var->IsUnallocated()) {
     // Global var, const, or let.
-    __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2173,10 +2129,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2192,7 +2148,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(var->name());
@@ -2213,13 +2170,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2230,11 +2180,8 @@
   DCHECK(prop != NULL);
   DCHECK(prop->key()->IsLiteral());
 
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(prop->key()->AsLiteral()->value()));
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(r0);
@@ -2276,10 +2223,7 @@
               StoreDescriptor::NameRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(r0));
 
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(r0);
@@ -2839,24 +2783,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(r0, r1);
-  generator.GenerateFast(masm_);
-  __ jmp(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(r1);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -3048,7 +2974,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ LoadGlobalObject(r2);
           __ mov(r1, Operand(var->name()));
           __ Push(r2, r1);
@@ -3333,11 +3259,8 @@
       }
       break;
     case NAMED_PROPERTY: {
-      __ mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3375,10 +3298,7 @@
     case KEYED_PROPERTY: {
       PopOperands(StoreDescriptor::ReceiverRegister(),
                   StoreDescriptor::NameRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
diff --git a/src/full-codegen/arm64/full-codegen-arm64.cc b/src/full-codegen/arm64/full-codegen-arm64.cc
index a4f32da..1854f10 100644
--- a/src/full-codegen/arm64/full-codegen-arm64.cc
+++ b/src/full-codegen/arm64/full-codegen-arm64.cc
@@ -4,14 +4,16 @@
 
 #if V8_TARGET_ARCH_ARM64
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 #include "src/arm64/code-stubs-arm64.h"
 #include "src/arm64/frames-arm64.h"
@@ -130,6 +132,20 @@
   info->set_prologue_offset(masm_->pc_offset());
   __ Prologue(info->GeneratePreagedPrologue());
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ Ldr(x11, FieldMemOperand(x1, JSFunction::kLiteralsOffset));
+    __ Ldr(x11, FieldMemOperand(x11, LiteralsArray::kFeedbackVectorOffset));
+    __ Ldr(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
+                                             kPointerSize +
+                                         TypeFeedbackVector::kHeaderSize));
+    __ Add(x10, x10, Operand(Smi::FromInt(1)));
+    __ Str(x10, FieldMemOperand(x11, TypeFeedbackVector::kInvocationCountIndex *
+                                             kPointerSize +
+                                         TypeFeedbackVector::kHeaderSize));
+  }
+
   // Reserve space on the stack for locals.
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
@@ -170,13 +186,13 @@
 
   bool function_in_register_x1 = true;
 
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     // Argument to NewContext is the function, which is still in x1.
     Comment cmnt(masm_, "[ Allocate context");
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     if (info->scope()->is_script_scope()) {
-      __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
+      __ Mov(x10, Operand(info->scope()->scope_info()));
       __ Push(x1, x10);
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
@@ -261,9 +277,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register_x1) {
       __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
@@ -765,7 +780,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -816,7 +830,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1126,6 +1139,7 @@
   // Generate code for going to the next element by incrementing
   // the index (smi) stored on top of the stack.
   __ Bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   // TODO(all): We could use a callee saved register to avoid popping.
   __ Pop(x0);
   __ Add(x0, x0, Smi::FromInt(1));
@@ -1149,11 +1163,8 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ Peek(StoreDescriptor::ReceiverRegister(), 0);
-  __ Mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1162,11 +1173,8 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ Move(StoreDescriptor::ReceiverRegister(), x0);
-  __ Mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1206,7 +1214,7 @@
   Register temp = x11;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
@@ -1254,20 +1262,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ Mov(LoadGlobalDescriptor::SlotRegister(),
-         SmiFromSlot(proxy->VariableFeedbackSlot()));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   // Record position before possible IC call.
@@ -1278,7 +1272,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1401,10 +1394,8 @@
           if (property->emit_store()) {
             VisitForAccumulatorValue(value);
             DCHECK(StoreDescriptor::ValueRegister().is(x0));
-            __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
             __ Peek(StoreDescriptor::ReceiverRegister(), 0);
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
 
             if (NeedsHomeObject(value)) {
@@ -1572,6 +1563,7 @@
   } else {
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1581,8 +1573,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
 
@@ -1598,31 +1589,7 @@
 
     __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
     __ Peek(StoreDescriptor::ReceiverRegister(), 0);
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(x0);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(x0);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
 
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
@@ -1892,7 +1859,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     Register scratch = x1;
@@ -1919,26 +1886,23 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
 
+      case ClassLiteral::Property::FIELD:
       default:
         UNREACHABLE();
     }
@@ -1967,10 +1931,7 @@
       // this copy.
       __ Mov(StoreDescriptor::ReceiverRegister(), x0);
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ Mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2017,10 +1978,7 @@
       __ Mov(StoreDescriptor::NameRegister(), x0);
       PopOperands(StoreDescriptor::ReceiverRegister(),
                   StoreDescriptor::ValueRegister());
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2046,10 +2004,8 @@
   ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
   if (var->IsUnallocated()) {
     // Global var, const, or let.
-    __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2065,10 +2021,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ Bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2083,7 +2039,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(var->name());
@@ -2103,13 +2060,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2121,11 +2071,8 @@
   DCHECK(prop != NULL);
   DCHECK(prop->key()->IsLiteral());
 
-  __ Mov(StoreDescriptor::NameRegister(),
-         Operand(prop->key()->AsLiteral()->value()));
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(x0);
@@ -2170,10 +2117,7 @@
               StoreDescriptor::ReceiverRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(x0));
 
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(x0);
@@ -2746,28 +2690,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  Register code = x0;
-  Register result = x1;
-
-  StringCharFromCodeGenerator generator(code, result);
-  generator.GenerateFast(masm_);
-  __ B(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ Bind(&done);
-  context()->Plug(result);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -2971,7 +2893,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ LoadGlobalObject(x12);
           __ Mov(x11, Operand(var->name()));
           __ Push(x12, x11);
@@ -3254,11 +3176,8 @@
       }
       break;
     case NAMED_PROPERTY: {
-      __ Mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3296,10 +3215,7 @@
     case KEYED_PROPERTY: {
       PopOperand(StoreDescriptor::NameRegister());
       PopOperand(StoreDescriptor::ReceiverRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
diff --git a/src/full-codegen/full-codegen.cc b/src/full-codegen/full-codegen.cc
index d83a23b..25d7f92 100644
--- a/src/full-codegen/full-codegen.cc
+++ b/src/full-codegen/full-codegen.cc
@@ -10,6 +10,7 @@
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
 #include "src/compiler.h"
 #include "src/debug/debug.h"
 #include "src/debug/liveedit.h"
@@ -25,15 +26,69 @@
 
 #define __ ACCESS_MASM(masm())
 
+class FullCodegenCompilationJob final : public CompilationJob {
+ public:
+  explicit FullCodegenCompilationJob(CompilationInfo* info)
+      : CompilationJob(info->isolate(), info, "Full-Codegen") {}
+
+  bool can_execute_on_background_thread() const override { return false; }
+
+  CompilationJob::Status PrepareJobImpl() final { return SUCCEEDED; }
+
+  CompilationJob::Status ExecuteJobImpl() final {
+    DCHECK(ThreadId::Current().Equals(isolate()->thread_id()));
+    return FullCodeGenerator::MakeCode(info(), stack_limit()) ? SUCCEEDED
+                                                              : FAILED;
+  }
+
+  CompilationJob::Status FinalizeJobImpl() final { return SUCCEEDED; }
+};
+
+FullCodeGenerator::FullCodeGenerator(MacroAssembler* masm,
+                                     CompilationInfo* info,
+                                     uintptr_t stack_limit)
+    : masm_(masm),
+      info_(info),
+      isolate_(info->isolate()),
+      zone_(info->zone()),
+      scope_(info->scope()),
+      nesting_stack_(NULL),
+      loop_depth_(0),
+      operand_stack_depth_(0),
+      globals_(NULL),
+      context_(NULL),
+      bailout_entries_(info->HasDeoptimizationSupport()
+                           ? info->literal()->ast_node_count()
+                           : 0,
+                       info->zone()),
+      back_edges_(2, info->zone()),
+      handler_table_(info->zone()),
+      source_position_table_builder_(info->zone(),
+                                     info->SourcePositionRecordingMode()),
+      ic_total_count_(0) {
+  DCHECK(!info->IsStub());
+  Initialize(stack_limit);
+}
+
+// static
+CompilationJob* FullCodeGenerator::NewCompilationJob(CompilationInfo* info) {
+  return new FullCodegenCompilationJob(info);
+}
+
+// static
 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
+  return MakeCode(info, info->isolate()->stack_guard()->real_climit());
+}
+
+// static
+bool FullCodeGenerator::MakeCode(CompilationInfo* info, uintptr_t stack_limit) {
   Isolate* isolate = info->isolate();
 
   DCHECK(!FLAG_minimal);
   RuntimeCallTimerScope runtimeTimer(isolate,
                                      &RuntimeCallStats::CompileFullCode);
   TimerEventScope<TimerEventCompileFullCode> timer(info->isolate());
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::CompileFullCode);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileFullCode");
 
   Handle<Script> script = info->script();
   if (!script->IsUndefined(isolate) &&
@@ -47,7 +102,7 @@
                       CodeObjectRequired::kYes);
   if (info->will_serialize()) masm.enable_serializer();
 
-  FullCodeGenerator cgen(&masm, info);
+  FullCodeGenerator cgen(&masm, info, stack_limit);
   cgen.Generate();
   if (cgen.HasStackOverflow()) {
     DCHECK(!isolate->has_pending_exception());
@@ -157,9 +212,8 @@
          expr->values()->length() > JSArray::kInitialMaxFastElementArray;
 }
 
-
-void FullCodeGenerator::Initialize() {
-  InitializeAstVisitor(info_->isolate());
+void FullCodeGenerator::Initialize(uintptr_t stack_limit) {
+  InitializeAstVisitor(stack_limit);
   masm_->set_emit_debug_code(FLAG_debug_code);
   masm_->set_predictable_code_size(true);
 }
@@ -169,23 +223,52 @@
   PrepareForBailoutForId(node->id(), state);
 }
 
-void FullCodeGenerator::CallLoadIC(TypeFeedbackId id) {
+void FullCodeGenerator::CallLoadIC(FeedbackVectorSlot slot, Handle<Object> name,
+                                   TypeFeedbackId id) {
+  DCHECK(name->IsName());
+  __ Move(LoadDescriptor::NameRegister(), name);
+
+  EmitLoadSlot(LoadDescriptor::SlotRegister(), slot);
+
   Handle<Code> ic = CodeFactory::LoadIC(isolate()).code();
   CallIC(ic, id);
   if (FLAG_tf_load_ic_stub) RestoreContext();
 }
 
-void FullCodeGenerator::CallLoadGlobalIC(TypeofMode typeof_mode,
-                                         TypeFeedbackId id) {
-  Handle<Code> ic = CodeFactory::LoadGlobalIC(isolate(), typeof_mode).code();
-  CallIC(ic, id);
-}
+void FullCodeGenerator::CallStoreIC(FeedbackVectorSlot slot,
+                                    Handle<Object> name, TypeFeedbackId id) {
+  DCHECK(name->IsName());
+  __ Move(StoreDescriptor::NameRegister(), name);
 
-void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
+  STATIC_ASSERT(!StoreDescriptor::kPassLastArgsOnStack ||
+                StoreDescriptor::kStackArgumentsCount == 2);
+  if (StoreDescriptor::kPassLastArgsOnStack) {
+    __ Push(StoreDescriptor::ValueRegister());
+    EmitPushSlot(slot);
+  } else {
+    EmitLoadSlot(StoreDescriptor::SlotRegister(), slot);
+  }
+
   Handle<Code> ic = CodeFactory::StoreIC(isolate(), language_mode()).code();
   CallIC(ic, id);
+  RestoreContext();
 }
 
+void FullCodeGenerator::CallKeyedStoreIC(FeedbackVectorSlot slot) {
+  STATIC_ASSERT(!StoreDescriptor::kPassLastArgsOnStack ||
+                StoreDescriptor::kStackArgumentsCount == 2);
+  if (StoreDescriptor::kPassLastArgsOnStack) {
+    __ Push(StoreDescriptor::ValueRegister());
+    EmitPushSlot(slot);
+  } else {
+    EmitLoadSlot(StoreDescriptor::SlotRegister(), slot);
+  }
+
+  Handle<Code> ic =
+      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
+  CallIC(ic);
+  RestoreContext();
+}
 
 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
   // We record the offset of the function return so we can rebuild the frame
@@ -411,6 +494,18 @@
   EmitVariableLoad(expr);
 }
 
+void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
+                                               TypeofMode typeof_mode) {
+#ifdef DEBUG
+  Variable* var = proxy->var();
+  DCHECK(var->IsUnallocated() ||
+         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
+#endif
+  EmitLoadSlot(LoadGlobalDescriptor::SlotRegister(),
+               proxy->VariableFeedbackSlot());
+  Handle<Code> ic = CodeFactory::LoadGlobalIC(isolate(), typeof_mode).code();
+  CallIC(ic);
+}
 
 void FullCodeGenerator::VisitSloppyBlockFunctionStatement(
     SloppyBlockFunctionStatement* declaration) {
@@ -473,6 +568,7 @@
   VisitForStackValue(args->at(1));
   VisitForStackValue(args->at(2));
   __ CallStub(&stub);
+  RestoreContext();
   OperandStackDepthDecrement(3);
   context()->Plug(result_register());
 }
@@ -816,8 +912,8 @@
   DCHECK(!context()->IsEffect());
   DCHECK(!context()->IsTest());
 
-  if (proxy != NULL && (proxy->var()->IsUnallocatedOrGlobalSlot() ||
-                        proxy->var()->IsLookupSlot())) {
+  if (proxy != NULL &&
+      (proxy->var()->IsUnallocated() || proxy->var()->IsLookupSlot())) {
     EmitVariableLoad(proxy, INSIDE_TYPEOF);
     PrepareForBailout(proxy, BailoutState::TOS_REGISTER);
   } else {
@@ -896,6 +992,7 @@
   // accumulator on the stack.
   ClearAccumulator();
   while (!current->IsContinueTarget(target)) {
+    if (HasStackOverflow()) return;
     if (current->IsTryFinally()) {
       Comment cmnt(masm(), "[ Deferred continue through finally");
       current->Exit(&context_length);
@@ -936,6 +1033,7 @@
   // accumulator on the stack.
   ClearAccumulator();
   while (!current->IsBreakTarget(target)) {
+    if (HasStackOverflow()) return;
     if (current->IsTryFinally()) {
       Comment cmnt(masm(), "[ Deferred break through finally");
       current->Exit(&context_length);
@@ -971,6 +1069,7 @@
   NestedStatement* current = nesting_stack_;
   int context_length = 0;
   while (current != NULL) {
+    if (HasStackOverflow()) return;
     if (current->IsTryFinally()) {
       Comment cmnt(masm(), "[ Deferred return through finally");
       current->Exit(&context_length);
@@ -1008,10 +1107,7 @@
   DCHECK(!key->value()->IsSmi());
   DCHECK(!prop->IsSuperAccess());
 
-  __ Move(LoadDescriptor::NameRegister(), key->value());
-  __ Move(LoadDescriptor::SlotRegister(),
-          SmiFromSlot(prop->PropertyFeedbackSlot()));
-  CallLoadIC();
+  CallLoadIC(prop->PropertyFeedbackSlot(), key->value());
 }
 
 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
@@ -1027,11 +1123,12 @@
 
 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
   SetExpressionPosition(prop);
+
+  EmitLoadSlot(LoadDescriptor::SlotRegister(), prop->PropertyFeedbackSlot());
+
   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
-  __ Move(LoadDescriptor::SlotRegister(),
-          SmiFromSlot(prop->PropertyFeedbackSlot()));
   CallIC(ic);
-  if (FLAG_tf_load_ic_stub) RestoreContext();
+  RestoreContext();
 }
 
 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
@@ -1040,7 +1137,7 @@
   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
 }
 
-void FullCodeGenerator::EmitPropertyKey(ObjectLiteralProperty* property,
+void FullCodeGenerator::EmitPropertyKey(LiteralProperty* property,
                                         BailoutId bailout_id) {
   VisitForStackValue(property->key());
   CallRuntimeWithOperands(Runtime::kToName);
@@ -1048,9 +1145,14 @@
   PushOperand(result_register());
 }
 
-void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
+void FullCodeGenerator::EmitLoadSlot(Register destination,
+                                     FeedbackVectorSlot slot) {
   DCHECK(!slot.IsInvalid());
-  __ Move(StoreDescriptor::SlotRegister(), SmiFromSlot(slot));
+  __ Move(destination, SmiFromSlot(slot));
+}
+
+void FullCodeGenerator::EmitPushSlot(FeedbackVectorSlot slot) {
+  __ Push(SmiFromSlot(slot));
 }
 
 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
@@ -1073,6 +1175,7 @@
   RestoreContext();
   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
   PushOperand(result_register());
+  PushOperand(stmt->scope()->scope_info());
   PushFunctionArgumentForContextAllocation();
   CallRuntimeWithOperands(Runtime::kPushWithContext);
   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
@@ -1274,6 +1377,7 @@
   { Comment cmnt(masm_, "[ Extend catch context");
     PushOperand(stmt->variable()->name());
     PushOperand(result_register());
+    PushOperand(stmt->scope()->scope_info());
     PushFunctionArgumentForContextAllocation();
     CallRuntimeWithOperands(Runtime::kPushCatchContext);
     StoreToFrameField(StandardFrameConstants::kContextOffset,
@@ -1466,9 +1570,7 @@
 
   // Load the "prototype" from the constructor.
   __ Move(LoadDescriptor::ReceiverRegister(), result_register());
-  __ LoadRoot(LoadDescriptor::NameRegister(), Heap::kprototype_stringRootIndex);
-  __ Move(LoadDescriptor::SlotRegister(), SmiFromSlot(lit->PrototypeSlot()));
-  CallLoadIC();
+  CallLoadIC(lit->PrototypeSlot(), isolate()->factory()->prototype_string());
   PrepareForBailoutForId(lit->PrototypeId(), BailoutState::TOS_REGISTER);
   PushOperand(result_register());
 
@@ -1847,7 +1949,7 @@
     {
       if (needs_block_context_) {
         Comment cmnt(masm(), "[ Extend block context");
-        codegen_->PushOperand(scope->GetScopeInfo(codegen->isolate()));
+        codegen_->PushOperand(scope->scope_info());
         codegen_->PushFunctionArgumentForContextAllocation();
         codegen_->CallRuntimeWithOperands(Runtime::kPushBlockContext);
 
@@ -1939,6 +2041,17 @@
          var->initializer_position() >= proxy->position();
 }
 
+Handle<Script> FullCodeGenerator::script() { return info_->script(); }
+
+LanguageMode FullCodeGenerator::language_mode() {
+  return scope()->language_mode();
+}
+
+bool FullCodeGenerator::has_simple_parameters() {
+  return info_->has_simple_parameters();
+}
+
+FunctionLiteral* FullCodeGenerator::literal() const { return info_->literal(); }
 
 #undef __
 
diff --git a/src/full-codegen/full-codegen.h b/src/full-codegen/full-codegen.h
index 71f065b..2a4eb9d 100644
--- a/src/full-codegen/full-codegen.h
+++ b/src/full-codegen/full-codegen.h
@@ -13,7 +13,6 @@
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
-#include "src/compiler.h"
 #include "src/deoptimizer.h"
 #include "src/globals.h"
 #include "src/objects.h"
@@ -22,39 +21,24 @@
 namespace internal {
 
 // Forward declarations.
+class CompilationInfo;
+class CompilationJob;
 class JumpPatchSite;
+class Scope;
 
 // -----------------------------------------------------------------------------
 // Full code generator.
 
 class FullCodeGenerator final : public AstVisitor<FullCodeGenerator> {
  public:
-  FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
-      : masm_(masm),
-        info_(info),
-        isolate_(info->isolate()),
-        zone_(info->zone()),
-        scope_(info->scope()),
-        nesting_stack_(NULL),
-        loop_depth_(0),
-        operand_stack_depth_(0),
-        globals_(NULL),
-        context_(NULL),
-        bailout_entries_(info->HasDeoptimizationSupport()
-                             ? info->literal()->ast_node_count()
-                             : 0,
-                         info->zone()),
-        back_edges_(2, info->zone()),
-        handler_table_(info->zone()),
-        source_position_table_builder_(info->zone(),
-                                       info->SourcePositionRecordingMode()),
-        ic_total_count_(0) {
-    DCHECK(!info->IsStub());
-    Initialize();
-  }
+  FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info,
+                    uintptr_t stack_limit);
 
-  void Initialize();
+  void Initialize(uintptr_t stack_limit);
 
+  static CompilationJob* NewCompilationJob(CompilationInfo* info);
+
+  static bool MakeCode(CompilationInfo* info, uintptr_t stack_limit);
   static bool MakeCode(CompilationInfo* info);
 
   // Encode bailout state and pc-offset as a BitField<type, start, size>.
@@ -493,7 +477,6 @@
   F(IsJSProxy)                          \
   F(Call)                               \
   F(NewObject)                          \
-  F(StringCharFromCode)                 \
   F(IsJSReceiver)                       \
   F(HasCachedArrayIndex)                \
   F(GetCachedArrayIndex)                \
@@ -572,7 +555,7 @@
   void EmitClassDefineProperties(ClassLiteral* lit);
 
   // Pushes the property key as a Name on the stack.
-  void EmitPropertyKey(ObjectLiteralProperty* property, BailoutId bailout_id);
+  void EmitPropertyKey(LiteralProperty* property, BailoutId bailout_id);
 
   // Apply the compound assignment operator. Expects the left operand on top
   // of the stack and the right one in the accumulator.
@@ -629,16 +612,19 @@
   void EmitSetHomeObjectAccumulator(Expression* initializer, int offset,
                                     FeedbackVectorSlot slot);
 
-  void EmitLoadStoreICSlot(FeedbackVectorSlot slot);
+  // Platform-specific code for loading a slot to a register.
+  void EmitLoadSlot(Register destination, FeedbackVectorSlot slot);
+  // Platform-specific code for pushing a slot to the stack.
+  void EmitPushSlot(FeedbackVectorSlot slot);
 
   void CallIC(Handle<Code> code,
               TypeFeedbackId id = TypeFeedbackId::None());
 
-  void CallLoadIC(TypeFeedbackId id = TypeFeedbackId::None());
-  // Inside typeof reference errors are never thrown.
-  void CallLoadGlobalIC(TypeofMode typeof_mode,
-                        TypeFeedbackId id = TypeFeedbackId::None());
-  void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
+  void CallLoadIC(FeedbackVectorSlot slot, Handle<Object> name,
+                  TypeFeedbackId id = TypeFeedbackId::None());
+  void CallStoreIC(FeedbackVectorSlot slot, Handle<Object> name,
+                   TypeFeedbackId id = TypeFeedbackId::None());
+  void CallKeyedStoreIC(FeedbackVectorSlot slot);
 
   void SetFunctionPosition(FunctionLiteral* fun);
   void SetReturnPosition(FunctionLiteral* fun);
@@ -695,10 +681,10 @@
 
   Isolate* isolate() const { return isolate_; }
   Zone* zone() const { return zone_; }
-  Handle<Script> script() { return info_->script(); }
-  LanguageMode language_mode() { return scope()->language_mode(); }
-  bool has_simple_parameters() { return info_->has_simple_parameters(); }
-  FunctionLiteral* literal() const { return info_->literal(); }
+  Handle<Script> script();
+  LanguageMode language_mode();
+  bool has_simple_parameters();
+  FunctionLiteral* literal() const;
   Scope* scope() { return scope_; }
 
   static Register context_register();
diff --git a/src/full-codegen/ia32/full-codegen-ia32.cc b/src/full-codegen/ia32/full-codegen-ia32.cc
index 3571948..e5f66cd 100644
--- a/src/full-codegen/ia32/full-codegen-ia32.cc
+++ b/src/full-codegen/ia32/full-codegen-ia32.cc
@@ -4,15 +4,17 @@
 
 #if V8_TARGET_ARCH_IA32
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ia32/frames-ia32.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 namespace v8 {
 namespace internal {
@@ -115,6 +117,17 @@
   info->set_prologue_offset(masm_->pc_offset());
   __ Prologue(info->GeneratePreagedPrologue());
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
+    __ mov(ecx, FieldOperand(ecx, LiteralsArray::kFeedbackVectorOffset));
+    __ add(FieldOperand(
+               ecx, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                        TypeFeedbackVector::kHeaderSize),
+           Immediate(Smi::FromInt(1)));
+  }
+
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
     // Generators allocate locals, if any, in context slots.
@@ -159,14 +172,14 @@
   bool function_in_register = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     Comment cmnt(masm_, "[ Allocate context");
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     // Argument to NewContext is the function, which is still in edi.
     if (info->scope()->is_script_scope()) {
       __ push(edi);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -254,9 +267,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register) {
       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
@@ -717,7 +729,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -767,7 +778,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1066,6 +1076,7 @@
   // Generate code for going to the next element by incrementing the
   // index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
 
   EmitBackEdgeBookkeeping(stmt, &loop);
@@ -1086,11 +1097,8 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
-  __ mov(StoreDescriptor::NameRegister(),
-         Immediate(isolate()->factory()->home_object_symbol()));
   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1099,11 +1107,8 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ mov(StoreDescriptor::ReceiverRegister(), eax);
-  __ mov(StoreDescriptor::NameRegister(),
-         Immediate(isolate()->factory()->home_object_symbol()));
   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1141,7 +1146,7 @@
   Register temp = ebx;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
@@ -1189,20 +1194,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ mov(LoadGlobalDescriptor::SlotRegister(),
-         Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   SetExpressionPosition(proxy);
@@ -1212,7 +1203,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1339,10 +1329,8 @@
           if (property->emit_store()) {
             VisitForAccumulatorValue(value);
             DCHECK(StoreDescriptor::ValueRegister().is(eax));
-            __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
             __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
             if (NeedsHomeObject(value)) {
               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
@@ -1506,6 +1494,7 @@
     __ mov(ecx, Immediate(constant_elements));
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1515,8 +1504,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
 
@@ -1533,31 +1521,7 @@
     __ mov(StoreDescriptor::NameRegister(),
            Immediate(Smi::FromInt(array_index)));
     __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(eax);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(eax);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
-
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
   }
@@ -1902,7 +1866,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     if (property->is_static()) {
@@ -1927,25 +1891,25 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
+
+      case ClassLiteral::Property::FIELD:
+        UNREACHABLE();
+        break;
     }
   }
 }
@@ -1980,10 +1944,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ Move(StoreDescriptor::ReceiverRegister(), eax);
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ mov(StoreDescriptor::NameRegister(),
-             prop->key()->AsLiteral()->value());
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2030,10 +1991,7 @@
       __ Move(StoreDescriptor::NameRegister(), eax);
       PopOperand(StoreDescriptor::ReceiverRegister());  // Receiver.
       PopOperand(StoreDescriptor::ValueRegister());     // Restore value.
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2056,13 +2014,11 @@
                                                FeedbackVectorSlot slot) {
   if (var->IsUnallocated()) {
     // Global var, const, or let.
-    __ mov(StoreDescriptor::NameRegister(), var->name());
     __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
     __ mov(StoreDescriptor::ReceiverRegister(),
            ContextOperand(StoreDescriptor::ReceiverRegister(),
                           Context::EXTENSION_INDEX));
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2078,10 +2034,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2096,7 +2052,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(Immediate(var->name()));
@@ -2117,13 +2074,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2136,10 +2086,8 @@
   DCHECK(prop != NULL);
   DCHECK(prop->key()->IsLiteral());
 
-  __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(eax);
 }
@@ -2182,10 +2130,7 @@
   PopOperand(StoreDescriptor::NameRegister());  // Key.
   PopOperand(StoreDescriptor::ReceiverRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(eax));
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(eax);
 }
@@ -2723,25 +2668,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(eax, ebx);
-  generator.GenerateFast(masm_);
-  __ jmp(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(ebx);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -2936,7 +2862,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ mov(eax, NativeContextOperand());
           __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
           __ push(Immediate(var->name()));
@@ -3230,11 +3156,8 @@
       }
       break;
     case NAMED_PROPERTY: {
-      __ mov(StoreDescriptor::NameRegister(),
-             prop->key()->AsLiteral()->value());
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3272,10 +3195,7 @@
     case KEYED_PROPERTY: {
       PopOperand(StoreDescriptor::NameRegister());
       PopOperand(StoreDescriptor::ReceiverRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         // Result is on the stack
diff --git a/src/full-codegen/mips/full-codegen-mips.cc b/src/full-codegen/mips/full-codegen-mips.cc
index 67598d0..7f97686 100644
--- a/src/full-codegen/mips/full-codegen-mips.cc
+++ b/src/full-codegen/mips/full-codegen-mips.cc
@@ -12,14 +12,16 @@
 // places where we have to move a previous result in v0 to a0 for the
 // next call: mov(a0, v0). This is not needed on the other architectures.
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 #include "src/mips/code-stubs-mips.h"
 #include "src/mips/macro-assembler-mips.h"
@@ -135,6 +137,20 @@
   info->set_prologue_offset(masm_->pc_offset());
   __ Prologue(info->GeneratePreagedPrologue());
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ lw(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
+    __ lw(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
+    __ lw(t0, FieldMemOperand(
+                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize));
+    __ Addu(t0, t0, Operand(Smi::FromInt(1)));
+    __ sw(t0, FieldMemOperand(
+                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize));
+  }
+
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
     // Generators allocate locals, if any, in context slots.
@@ -177,14 +193,14 @@
   bool function_in_register_a1 = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     Comment cmnt(masm_, "[ Allocate context");
     // Argument to NewContext is the function, which is still in a1.
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     if (info->scope()->is_script_scope()) {
       __ push(a1);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -269,9 +285,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register_a1) {
       __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
@@ -765,7 +780,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -816,7 +830,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1133,6 +1146,7 @@
   // Generate code for the going to the next element by incrementing
   // the index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ pop(a0);
   __ Addu(a0, a0, Operand(Smi::FromInt(1)));
   __ push(a0);
@@ -1155,12 +1169,9 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-  __ li(StoreDescriptor::NameRegister(),
-        Operand(isolate()->factory()->home_object_symbol()));
   __ lw(StoreDescriptor::ValueRegister(),
         MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1169,12 +1180,9 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ Move(StoreDescriptor::ReceiverRegister(), v0);
-  __ li(StoreDescriptor::NameRegister(),
-        Operand(isolate()->factory()->home_object_symbol()));
   __ lw(StoreDescriptor::ValueRegister(),
         MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1214,7 +1222,7 @@
   Register temp = t0;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
@@ -1264,20 +1272,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ li(LoadGlobalDescriptor::SlotRegister(),
-        Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   // Record position before possible IC call.
@@ -1288,7 +1282,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1412,10 +1405,8 @@
             VisitForAccumulatorValue(value);
             __ mov(StoreDescriptor::ValueRegister(), result_register());
             DCHECK(StoreDescriptor::ValueRegister().is(a0));
-            __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
             __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
 
             if (NeedsHomeObject(value)) {
@@ -1585,6 +1576,7 @@
   } else {
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1594,8 +1586,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
 
@@ -1613,31 +1604,7 @@
     __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
     __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
     __ mov(StoreDescriptor::ValueRegister(), result_register());
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(v0);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(v0);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
 
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
@@ -1995,7 +1962,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     Register scratch = a1;
@@ -2022,26 +1989,23 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
 
+      case ClassLiteral::Property::FIELD:
       default:
         UNREACHABLE();
     }
@@ -2079,10 +2043,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ mov(StoreDescriptor::ReceiverRegister(), result_register());
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ li(StoreDescriptor::NameRegister(),
-            Operand(prop->key()->AsLiteral()->value()));
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2129,10 +2090,7 @@
       __ mov(StoreDescriptor::NameRegister(), result_register());
       PopOperands(StoreDescriptor::ValueRegister(),
                   StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2158,10 +2116,8 @@
   if (var->IsUnallocated()) {
     // Global var, const, or let.
     __ mov(StoreDescriptor::ValueRegister(), result_register());
-    __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2178,10 +2134,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2197,7 +2153,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(var->name());
@@ -2218,13 +2175,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2236,11 +2186,8 @@
   DCHECK(prop->key()->IsLiteral());
 
   __ mov(StoreDescriptor::ValueRegister(), result_register());
-  __ li(StoreDescriptor::NameRegister(),
-        Operand(prop->key()->AsLiteral()->value()));
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(v0);
@@ -2288,10 +2235,7 @@
               StoreDescriptor::NameRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(a0));
 
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(v0);
@@ -2844,25 +2788,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(v0, a1);
-  generator.GenerateFast(masm_);
-  __ jmp(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(a1);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -3056,7 +2981,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ LoadGlobalObject(a2);
           __ li(a1, Operand(var->name()));
           __ Push(a2, a1);
@@ -3339,11 +3264,8 @@
       break;
     case NAMED_PROPERTY: {
       __ mov(StoreDescriptor::ValueRegister(), result_register());
-      __ li(StoreDescriptor::NameRegister(),
-            Operand(prop->key()->AsLiteral()->value()));
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3382,10 +3304,7 @@
       __ mov(StoreDescriptor::ValueRegister(), result_register());
       PopOperands(StoreDescriptor::ReceiverRegister(),
                   StoreDescriptor::NameRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
diff --git a/src/full-codegen/mips64/full-codegen-mips64.cc b/src/full-codegen/mips64/full-codegen-mips64.cc
index c149f13..660adb1 100644
--- a/src/full-codegen/mips64/full-codegen-mips64.cc
+++ b/src/full-codegen/mips64/full-codegen-mips64.cc
@@ -12,14 +12,16 @@
 // places where we have to move a previous result in v0 to a0 for the
 // next call: mov(a0, v0). This is not needed on the other architectures.
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 #include "src/mips64/code-stubs-mips64.h"
 #include "src/mips64/macro-assembler-mips64.h"
@@ -134,6 +136,20 @@
   info->set_prologue_offset(masm_->pc_offset());
   __ Prologue(info->GeneratePreagedPrologue());
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ ld(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
+    __ ld(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
+    __ ld(a4, FieldMemOperand(
+                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize));
+    __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
+    __ sd(a4, FieldMemOperand(
+                  a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize));
+  }
+
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
     // Generators allocate locals, if any, in context slots.
@@ -176,14 +192,14 @@
   bool function_in_register_a1 = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     Comment cmnt(masm_, "[ Allocate context");
     // Argument to NewContext is the function, which is still in a1.
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     if (info->scope()->is_script_scope()) {
       __ push(a1);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -267,9 +283,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register_a1) {
       __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
@@ -764,7 +779,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -815,7 +829,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1134,6 +1147,7 @@
   // Generate code for the going to the next element by incrementing
   // the index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ pop(a0);
   __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
   __ push(a0);
@@ -1156,12 +1170,9 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-  __ li(StoreDescriptor::NameRegister(),
-        Operand(isolate()->factory()->home_object_symbol()));
   __ ld(StoreDescriptor::ValueRegister(),
         MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1170,12 +1181,9 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ Move(StoreDescriptor::ReceiverRegister(), v0);
-  __ li(StoreDescriptor::NameRegister(),
-        Operand(isolate()->factory()->home_object_symbol()));
   __ ld(StoreDescriptor::ValueRegister(),
         MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1215,7 +1223,7 @@
   Register temp = a4;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
@@ -1265,20 +1273,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ li(LoadGlobalDescriptor::SlotRegister(),
-        Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   // Record position before possible IC call.
@@ -1289,7 +1283,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1413,10 +1406,8 @@
             VisitForAccumulatorValue(value);
             __ mov(StoreDescriptor::ValueRegister(), result_register());
             DCHECK(StoreDescriptor::ValueRegister().is(a0));
-            __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
             __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
 
             if (NeedsHomeObject(value)) {
@@ -1586,6 +1577,7 @@
   } else {
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1595,8 +1587,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
 
@@ -1614,31 +1605,7 @@
     __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
     __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
     __ mov(StoreDescriptor::ValueRegister(), result_register());
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(v0);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(v0);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
 
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
@@ -1995,7 +1962,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     Register scratch = a1;
@@ -2022,26 +1989,23 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
 
+      case ClassLiteral::Property::FIELD:
       default:
         UNREACHABLE();
     }
@@ -2079,10 +2043,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ mov(StoreDescriptor::ReceiverRegister(), result_register());
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ li(StoreDescriptor::NameRegister(),
-            Operand(prop->key()->AsLiteral()->value()));
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2129,10 +2090,7 @@
       __ Move(StoreDescriptor::NameRegister(), result_register());
       PopOperands(StoreDescriptor::ValueRegister(),
                   StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2158,10 +2116,8 @@
   if (var->IsUnallocated()) {
     // Global var, const, or let.
     __ mov(StoreDescriptor::ValueRegister(), result_register());
-    __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2178,10 +2134,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2197,7 +2153,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       __ Push(var->name());
       __ Push(v0);
@@ -2217,13 +2174,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2235,11 +2185,8 @@
   DCHECK(prop->key()->IsLiteral());
 
   __ mov(StoreDescriptor::ValueRegister(), result_register());
-  __ li(StoreDescriptor::NameRegister(),
-        Operand(prop->key()->AsLiteral()->value()));
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(v0);
@@ -2287,10 +2234,7 @@
               StoreDescriptor::NameRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(a0));
 
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(v0);
@@ -2843,25 +2787,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(v0, a1);
-  generator.GenerateFast(masm_);
-  __ jmp(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(a1);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -3055,7 +2980,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ LoadGlobalObject(a2);
           __ li(a1, Operand(var->name()));
           __ Push(a2, a1);
@@ -3339,11 +3264,8 @@
       break;
     case NAMED_PROPERTY: {
       __ mov(StoreDescriptor::ValueRegister(), result_register());
-      __ li(StoreDescriptor::NameRegister(),
-            Operand(prop->key()->AsLiteral()->value()));
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3382,10 +3304,7 @@
       __ mov(StoreDescriptor::ValueRegister(), result_register());
       PopOperands(StoreDescriptor::ReceiverRegister(),
                   StoreDescriptor::NameRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
diff --git a/src/full-codegen/ppc/full-codegen-ppc.cc b/src/full-codegen/ppc/full-codegen-ppc.cc
index 6813069..de9a8f4 100644
--- a/src/full-codegen/ppc/full-codegen-ppc.cc
+++ b/src/full-codegen/ppc/full-codegen-ppc.cc
@@ -4,14 +4,16 @@
 
 #if V8_TARGET_ARCH_PPC
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 #include "src/ppc/code-stubs-ppc.h"
 #include "src/ppc/macro-assembler-ppc.h"
@@ -131,6 +133,22 @@
   info->set_prologue_offset(prologue_offset);
   __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ LoadP(r7, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
+    __ LoadP(r7, FieldMemOperand(r7, LiteralsArray::kFeedbackVectorOffset));
+    __ LoadP(r8, FieldMemOperand(r7, TypeFeedbackVector::kInvocationCountIndex *
+                                             kPointerSize +
+                                         TypeFeedbackVector::kHeaderSize));
+    __ AddSmiLiteral(r8, r8, Smi::FromInt(1), r0);
+    __ StoreP(r8,
+              FieldMemOperand(
+                  r7, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize),
+              r0);
+  }
+
   {
     Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
@@ -173,14 +191,14 @@
   bool function_in_register_r4 = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     // Argument to NewContext is the function, which is still in r4.
     Comment cmnt(masm_, "[ Allocate context");
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     if (info->scope()->is_script_scope()) {
       __ push(r4);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -265,9 +283,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register_r4) {
       __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
@@ -732,7 +749,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -783,7 +799,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1102,6 +1117,7 @@
   // Generate code for the going to the next element by incrementing
   // the index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ pop(r3);
   __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
   __ push(r3);
@@ -1124,12 +1140,9 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ LoadP(StoreDescriptor::ValueRegister(),
            MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1138,12 +1151,9 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ Move(StoreDescriptor::ReceiverRegister(), r3);
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ LoadP(StoreDescriptor::ValueRegister(),
            MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1183,7 +1193,7 @@
   Register temp = r7;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
@@ -1232,20 +1242,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ mov(LoadGlobalDescriptor::SlotRegister(),
-         Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   // Record position before possible IC call.
@@ -1256,7 +1252,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1379,10 +1374,8 @@
           if (property->emit_store()) {
             VisitForAccumulatorValue(value);
             DCHECK(StoreDescriptor::ValueRegister().is(r3));
-            __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
             __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
 
             if (NeedsHomeObject(value)) {
@@ -1552,6 +1545,7 @@
   } else {
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1561,8 +1555,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
     // If the subexpression is a literal or a simple materialized literal it
@@ -1578,31 +1571,7 @@
     __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
                       Smi::FromInt(array_index));
     __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(r3);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(r3);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
 
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
@@ -1998,7 +1967,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     Register scratch = r4;
@@ -2025,26 +1994,23 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
 
+      case ClassLiteral::Property::FIELD:
       default:
         UNREACHABLE();
     }
@@ -2081,10 +2047,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ Move(StoreDescriptor::ReceiverRegister(), r3);
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2131,10 +2094,7 @@
       __ Move(StoreDescriptor::NameRegister(), r3);
       PopOperands(StoreDescriptor::ValueRegister(),
                   StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2159,10 +2119,8 @@
                                                FeedbackVectorSlot slot) {
   if (var->IsUnallocated()) {
     // Global var, const, or let.
-    __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2179,10 +2137,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2198,7 +2156,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(var->name());
@@ -2219,12 +2178,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2235,11 +2188,8 @@
   DCHECK(prop != NULL);
   DCHECK(prop->key()->IsLiteral());
 
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(prop->key()->AsLiteral()->value()));
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(r3);
@@ -2281,10 +2231,7 @@
               StoreDescriptor::NameRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(r3));
 
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(r3);
@@ -2838,24 +2785,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(r3, r4);
-  generator.GenerateFast(masm_);
-  __ b(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(r4);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -3048,7 +2977,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ LoadGlobalObject(r5);
           __ mov(r4, Operand(var->name()));
           __ Push(r5, r4);
@@ -3328,11 +3257,8 @@
       }
       break;
     case NAMED_PROPERTY: {
-      __ mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3370,10 +3296,7 @@
     case KEYED_PROPERTY: {
       PopOperands(StoreDescriptor::ReceiverRegister(),
                   StoreDescriptor::NameRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
diff --git a/src/full-codegen/s390/full-codegen-s390.cc b/src/full-codegen/s390/full-codegen-s390.cc
index bd1509b..dfe6527 100644
--- a/src/full-codegen/s390/full-codegen-s390.cc
+++ b/src/full-codegen/s390/full-codegen-s390.cc
@@ -4,14 +4,16 @@
 
 #if V8_TARGET_ARCH_S390
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 #include "src/s390/code-stubs-s390.h"
 #include "src/s390/macro-assembler-s390.h"
@@ -131,6 +133,21 @@
   info->set_prologue_offset(prologue_offset);
   __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ LoadP(r6, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
+    __ LoadP(r6, FieldMemOperand(r6, LiteralsArray::kFeedbackVectorOffset));
+    __ LoadP(r1, FieldMemOperand(r6, TypeFeedbackVector::kInvocationCountIndex *
+                                             kPointerSize +
+                                         TypeFeedbackVector::kHeaderSize));
+    __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0);
+    __ StoreP(r1,
+              FieldMemOperand(
+                  r6, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                          TypeFeedbackVector::kHeaderSize));
+  }
+
   {
     Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
@@ -178,14 +195,14 @@
   bool function_in_register_r3 = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     // Argument to NewContext is the function, which is still in r3.
     Comment cmnt(masm_, "[ Allocate context");
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     if (info->scope()->is_script_scope()) {
       __ push(r3);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -270,9 +287,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
 
     if (!function_in_register_r3) {
@@ -708,7 +724,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -758,7 +773,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1072,6 +1086,7 @@
   // Generate code for the going to the next element by incrementing
   // the index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ pop(r2);
   __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0);
   __ push(r2);
@@ -1093,12 +1108,9 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ LoadP(StoreDescriptor::ValueRegister(),
            MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
@@ -1106,12 +1118,9 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ Move(StoreDescriptor::ReceiverRegister(), r2);
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(isolate()->factory()->home_object_symbol()));
   __ LoadP(StoreDescriptor::ValueRegister(),
            MemOperand(sp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
@@ -1149,7 +1158,7 @@
   Register temp = r6;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
@@ -1197,18 +1206,6 @@
   }
 }
 
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ mov(LoadGlobalDescriptor::SlotRegister(),
-         Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
-  CallLoadGlobalIC(typeof_mode);
-}
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   // Record position before possible IC call.
@@ -1219,7 +1216,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1340,10 +1336,8 @@
           if (property->emit_store()) {
             VisitForAccumulatorValue(value);
             DCHECK(StoreDescriptor::ValueRegister().is(r2));
-            __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
             __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
 
             if (NeedsHomeObject(value)) {
@@ -1512,6 +1506,7 @@
   } else {
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1521,8 +1516,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
     // If the subexpression is a literal or a simple materialized literal it
@@ -1538,31 +1532,7 @@
     __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
                       Smi::FromInt(array_index));
     __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(r2);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(r2);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
 
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
@@ -1956,7 +1926,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     Register scratch = r3;
@@ -1983,26 +1953,23 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
 
+      case ClassLiteral::Property::FIELD:
       default:
         UNREACHABLE();
     }
@@ -2037,10 +2004,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ Move(StoreDescriptor::ReceiverRegister(), r2);
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2087,10 +2051,7 @@
       __ Move(StoreDescriptor::NameRegister(), r2);
       PopOperands(StoreDescriptor::ValueRegister(),
                   StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2113,10 +2074,8 @@
                                                FeedbackVectorSlot slot) {
   if (var->IsUnallocated()) {
     // Global var, const, or let.
-    __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     // Non-initializing assignment to let variable needs a write barrier.
@@ -2134,10 +2093,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2152,8 +2111,8 @@
     __ CallRuntime(Runtime::kThrowReferenceError);
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
-
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(var->name());
@@ -2174,12 +2133,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2189,11 +2142,8 @@
   DCHECK(prop != NULL);
   DCHECK(prop->key()->IsLiteral());
 
-  __ mov(StoreDescriptor::NameRegister(),
-         Operand(prop->key()->AsLiteral()->value()));
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(r2);
@@ -2232,10 +2182,7 @@
               StoreDescriptor::NameRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(r2));
 
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(r2);
@@ -2770,23 +2717,6 @@
   context()->Plug(r2);
 }
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(r2, r3);
-  generator.GenerateFast(masm_);
-  __ b(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(r3);
-}
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -2969,7 +2899,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ LoadGlobalObject(r4);
           __ mov(r3, Operand(var->name()));
           __ Push(r4, r3);
@@ -3248,11 +3178,8 @@
       }
       break;
     case NAMED_PROPERTY: {
-      __ mov(StoreDescriptor::NameRegister(),
-             Operand(prop->key()->AsLiteral()->value()));
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3290,10 +3217,7 @@
     case KEYED_PROPERTY: {
       PopOperands(StoreDescriptor::ReceiverRegister(),
                   StoreDescriptor::NameRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
diff --git a/src/full-codegen/x64/full-codegen-x64.cc b/src/full-codegen/x64/full-codegen-x64.cc
index ce94a99..525319f 100644
--- a/src/full-codegen/x64/full-codegen-x64.cc
+++ b/src/full-codegen/x64/full-codegen-x64.cc
@@ -4,14 +4,16 @@
 
 #if V8_TARGET_ARCH_X64
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 
 namespace v8 {
 namespace internal {
@@ -115,6 +117,18 @@
   info->set_prologue_offset(masm_->pc_offset());
   __ Prologue(info->GeneratePreagedPrologue());
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ movp(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
+    __ movp(rcx, FieldOperand(rcx, LiteralsArray::kFeedbackVectorOffset));
+    __ SmiAddConstant(
+        FieldOperand(rcx,
+                     TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                         TypeFeedbackVector::kHeaderSize),
+        Smi::FromInt(1));
+  }
+
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
     // Generators allocate locals, if any, in context slots.
@@ -158,14 +172,14 @@
   bool function_in_register = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     Comment cmnt(masm_, "[ Allocate context");
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     // Argument to NewContext is the function, which is still in rdi.
     if (info->scope()->is_script_scope()) {
       __ Push(rdi);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -249,9 +263,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register) {
       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
@@ -730,7 +743,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -780,7 +792,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1092,6 +1103,7 @@
   // Generate code for going to the next element by incrementing the
   // index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
 
   EmitBackEdgeBookkeeping(stmt, &loop);
@@ -1112,12 +1124,9 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
-  __ Move(StoreDescriptor::NameRegister(),
-          isolate()->factory()->home_object_symbol());
   __ movp(StoreDescriptor::ValueRegister(),
           Operand(rsp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1126,12 +1135,9 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ movp(StoreDescriptor::ReceiverRegister(), rax);
-  __ Move(StoreDescriptor::NameRegister(),
-          isolate()->factory()->home_object_symbol());
   __ movp(StoreDescriptor::ValueRegister(),
           Operand(rsp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1169,7 +1175,7 @@
   Register temp = rbx;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
@@ -1217,20 +1223,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ Move(LoadGlobalDescriptor::SlotRegister(),
-          SmiFromSlot(proxy->VariableFeedbackSlot()));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   // Record position before possible IC call.
@@ -1241,7 +1233,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1367,10 +1358,8 @@
           if (property->emit_store()) {
             VisitForAccumulatorValue(value);
             DCHECK(StoreDescriptor::ValueRegister().is(rax));
-            __ Move(StoreDescriptor::NameRegister(), key->value());
             __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
 
             if (NeedsHomeObject(value)) {
@@ -1533,6 +1522,7 @@
     __ Move(rcx, constant_elements);
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1542,8 +1532,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
 
@@ -1559,31 +1548,7 @@
 
     __ Move(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
     __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(rax);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(rax);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
 
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
@@ -1893,7 +1858,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     if (property->is_static()) {
@@ -1918,26 +1883,23 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
 
+      case ClassLiteral::Property::FIELD:
       default:
         UNREACHABLE();
     }
@@ -1974,10 +1936,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ Move(StoreDescriptor::ReceiverRegister(), rax);
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ Move(StoreDescriptor::NameRegister(),
-              prop->key()->AsLiteral()->value());
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2024,10 +1983,7 @@
       __ Move(StoreDescriptor::NameRegister(), rax);
       PopOperand(StoreDescriptor::ReceiverRegister());
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2050,10 +2006,8 @@
                                                FeedbackVectorSlot slot) {
   if (var->IsUnallocated()) {
     // Global var, const, or let.
-    __ Move(StoreDescriptor::NameRegister(), var->name());
     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2069,10 +2023,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
 
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
@@ -2088,7 +2042,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(var->name());
@@ -2109,13 +2064,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2126,10 +2074,8 @@
   DCHECK(prop != NULL);
   DCHECK(prop->key()->IsLiteral());
 
-  __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(rax);
@@ -2170,10 +2116,7 @@
   PopOperand(StoreDescriptor::NameRegister());  // Key.
   PopOperand(StoreDescriptor::ReceiverRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(rax));
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
 
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(rax);
@@ -2716,25 +2659,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(rax, rbx);
-  generator.GenerateFast(masm_);
-  __ jmp(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(rbx);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -2929,7 +2853,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ movp(rax, NativeContextOperand());
           __ Push(ContextOperand(rax, Context::EXTENSION_INDEX));
           __ Push(var->name());
@@ -3221,11 +3145,8 @@
       }
       break;
     case NAMED_PROPERTY: {
-      __ Move(StoreDescriptor::NameRegister(),
-              prop->key()->AsLiteral()->value());
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3263,10 +3184,7 @@
     case KEYED_PROPERTY: {
       PopOperand(StoreDescriptor::NameRegister());
       PopOperand(StoreDescriptor::ReceiverRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
diff --git a/src/full-codegen/x87/full-codegen-x87.cc b/src/full-codegen/x87/full-codegen-x87.cc
index 28c8960..47be8b0 100644
--- a/src/full-codegen/x87/full-codegen-x87.cc
+++ b/src/full-codegen/x87/full-codegen-x87.cc
@@ -4,14 +4,16 @@
 
 #if V8_TARGET_ARCH_X87
 
+#include "src/full-codegen/full-codegen.h"
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-factory.h"
 #include "src/code-stubs.h"
 #include "src/codegen.h"
+#include "src/compilation-info.h"
+#include "src/compiler.h"
 #include "src/debug/debug.h"
-#include "src/full-codegen/full-codegen.h"
 #include "src/ic/ic.h"
-#include "src/parsing/parser.h"
 #include "src/x87/frames-x87.h"
 
 namespace v8 {
@@ -115,6 +117,17 @@
   info->set_prologue_offset(masm_->pc_offset());
   __ Prologue(info->GeneratePreagedPrologue());
 
+  // Increment invocation count for the function.
+  {
+    Comment cmnt(masm_, "[ Increment invocation count");
+    __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
+    __ mov(ecx, FieldOperand(ecx, LiteralsArray::kFeedbackVectorOffset));
+    __ add(FieldOperand(
+               ecx, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+                        TypeFeedbackVector::kHeaderSize),
+           Immediate(Smi::FromInt(1)));
+  }
+
   { Comment cmnt(masm_, "[ Allocate locals");
     int locals_count = info->scope()->num_stack_slots();
     // Generators allocate locals, if any, in context slots.
@@ -159,14 +172,14 @@
   bool function_in_register = true;
 
   // Possibly allocate a local context.
-  if (info->scope()->num_heap_slots() > 0) {
+  if (info->scope()->NeedsContext()) {
     Comment cmnt(masm_, "[ Allocate context");
     bool need_write_barrier = true;
     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     // Argument to NewContext is the function, which is still in edi.
     if (info->scope()->is_script_scope()) {
       __ push(edi);
-      __ Push(info->scope()->GetScopeInfo(info->isolate()));
+      __ Push(info->scope()->scope_info());
       __ CallRuntime(Runtime::kNewScriptContext);
       PrepareForBailoutForId(BailoutId::ScriptContext(),
                              BailoutState::TOS_REGISTER);
@@ -251,9 +264,8 @@
   }
 
   // Possibly allocate RestParameters
-  int rest_index;
-  Variable* rest_param = info->scope()->rest_parameter(&rest_index);
-  if (rest_param) {
+  Variable* rest_param = info->scope()->rest_parameter();
+  if (rest_param != nullptr) {
     Comment cmnt(masm_, "[ Allocate rest parameter array");
     if (!function_in_register) {
       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
@@ -714,7 +726,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
@@ -763,7 +774,6 @@
   VariableProxy* proxy = declaration->proxy();
   Variable* variable = proxy->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
       DCHECK(!slot.IsInvalid());
@@ -1058,6 +1068,7 @@
   // Generate code for going to the next element by incrementing the
   // index (smi) stored on top of the stack.
   __ bind(loop_statement.continue_label());
+  PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
 
   EmitBackEdgeBookkeeping(stmt, &loop);
@@ -1078,11 +1089,8 @@
                                           FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
-  __ mov(StoreDescriptor::NameRegister(),
-         Immediate(isolate()->factory()->home_object_symbol()));
   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1091,11 +1099,8 @@
                                                      FeedbackVectorSlot slot) {
   DCHECK(NeedsHomeObject(initializer));
   __ mov(StoreDescriptor::ReceiverRegister(), eax);
-  __ mov(StoreDescriptor::NameRegister(),
-         Immediate(isolate()->factory()->home_object_symbol()));
   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
-  EmitLoadStoreICSlot(slot);
-  CallStoreIC();
+  CallStoreIC(slot, isolate()->factory()->home_object_symbol());
 }
 
 
@@ -1133,7 +1138,7 @@
   Register temp = ebx;
 
   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
-    if (s->num_heap_slots() > 0) {
+    if (s->NeedsContext()) {
       if (s->calls_sloppy_eval()) {
         // Check that extension is "the hole".
         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
@@ -1181,20 +1186,6 @@
   }
 }
 
-
-void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
-                                               TypeofMode typeof_mode) {
-#ifdef DEBUG
-  Variable* var = proxy->var();
-  DCHECK(var->IsUnallocatedOrGlobalSlot() ||
-         (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
-#endif
-  __ mov(LoadGlobalDescriptor::SlotRegister(),
-         Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
-  CallLoadGlobalIC(typeof_mode);
-}
-
-
 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
                                          TypeofMode typeof_mode) {
   SetExpressionPosition(proxy);
@@ -1204,7 +1195,6 @@
   // Three cases: global variables, lookup variables, and all other types of
   // variables.
   switch (var->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       Comment cmnt(masm_, "[ Global variable");
       EmitGlobalVariableLoad(proxy, typeof_mode);
@@ -1331,10 +1321,8 @@
           if (property->emit_store()) {
             VisitForAccumulatorValue(value);
             DCHECK(StoreDescriptor::ValueRegister().is(eax));
-            __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
             __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
-            EmitLoadStoreICSlot(property->GetSlot(0));
-            CallStoreIC();
+            CallStoreIC(property->GetSlot(0), key->value());
             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
             if (NeedsHomeObject(value)) {
               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
@@ -1498,6 +1486,7 @@
     __ mov(ecx, Immediate(constant_elements));
     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
     __ CallStub(&stub);
+    RestoreContext();
   }
   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
 
@@ -1507,8 +1496,7 @@
 
   // Emit code to evaluate all the non-constant subexpressions and to store
   // them into the newly cloned array.
-  int array_index = 0;
-  for (; array_index < length; array_index++) {
+  for (int array_index = 0; array_index < length; array_index++) {
     Expression* subexpr = subexprs->at(array_index);
     DCHECK(!subexpr->IsSpread());
 
@@ -1525,31 +1513,7 @@
     __ mov(StoreDescriptor::NameRegister(),
            Immediate(Smi::FromInt(array_index)));
     __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
-    EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
-    Handle<Code> ic =
-        CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-    CallIC(ic);
-    PrepareForBailoutForId(expr->GetIdForElement(array_index),
-                           BailoutState::NO_REGISTERS);
-  }
-
-  // In case the array literal contains spread expressions it has two parts. The
-  // first part is  the "static" array which has a literal index is  handled
-  // above. The second part is the part after the first spread expression
-  // (inclusive) and these elements gets appended to the array. Note that the
-  // number elements an iterable produces is unknown ahead of time.
-  if (array_index < length && result_saved) {
-    PopOperand(eax);
-    result_saved = false;
-  }
-  for (; array_index < length; array_index++) {
-    Expression* subexpr = subexprs->at(array_index);
-
-    PushOperand(eax);
-    DCHECK(!subexpr->IsSpread());
-    VisitForStackValue(subexpr);
-    CallRuntimeWithOperands(Runtime::kAppendElement);
-
+    CallKeyedStoreIC(expr->LiteralFeedbackSlot());
     PrepareForBailoutForId(expr->GetIdForElement(array_index),
                            BailoutState::NO_REGISTERS);
   }
@@ -1894,7 +1858,7 @@
 
 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
   for (int i = 0; i < lit->properties()->length(); i++) {
-    ObjectLiteral::Property* property = lit->properties()->at(i);
+    ClassLiteral::Property* property = lit->properties()->at(i);
     Expression* value = property->value();
 
     if (property->is_static()) {
@@ -1919,25 +1883,25 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        UNREACHABLE();
-      case ObjectLiteral::Property::COMPUTED:
+      case ClassLiteral::Property::METHOD:
         PushOperand(Smi::FromInt(DONT_ENUM));
         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
         break;
 
-      case ObjectLiteral::Property::GETTER:
+      case ClassLiteral::Property::GETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
         break;
 
-      case ObjectLiteral::Property::SETTER:
+      case ClassLiteral::Property::SETTER:
         PushOperand(Smi::FromInt(DONT_ENUM));
         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
         break;
+
+      case ClassLiteral::Property::FIELD:
+        UNREACHABLE();
+        break;
     }
   }
 }
@@ -1972,10 +1936,7 @@
       VisitForAccumulatorValue(prop->obj());
       __ Move(StoreDescriptor::ReceiverRegister(), eax);
       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
-      __ mov(StoreDescriptor::NameRegister(),
-             prop->key()->AsLiteral()->value());
-      EmitLoadStoreICSlot(slot);
-      CallStoreIC();
+      CallStoreIC(slot, prop->key()->AsLiteral()->value());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
@@ -2022,10 +1983,7 @@
       __ Move(StoreDescriptor::NameRegister(), eax);
       PopOperand(StoreDescriptor::ReceiverRegister());  // Receiver.
       PopOperand(StoreDescriptor::ValueRegister());     // Restore value.
-      EmitLoadStoreICSlot(slot);
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      CallIC(ic);
+      CallKeyedStoreIC(slot);
       break;
     }
   }
@@ -2048,13 +2006,11 @@
                                                FeedbackVectorSlot slot) {
   if (var->IsUnallocated()) {
     // Global var, const, or let.
-    __ mov(StoreDescriptor::NameRegister(), var->name());
     __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
     __ mov(StoreDescriptor::ReceiverRegister(),
            ContextOperand(StoreDescriptor::ReceiverRegister(),
                           Context::EXTENSION_INDEX));
-    EmitLoadStoreICSlot(slot);
-    CallStoreIC();
+    CallStoreIC(slot, var->name());
 
   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
     DCHECK(!var->IsLookupSlot());
@@ -2070,10 +2026,10 @@
       __ CallRuntime(Runtime::kThrowReferenceError);
       __ bind(&assign);
     }
-    if (var->mode() == CONST) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    } else {
+    if (var->mode() != CONST) {
       EmitStoreToStackLocalOrContextSlot(var, location);
+    } else if (var->throw_on_const_assignment(language_mode())) {
+      __ CallRuntime(Runtime::kThrowConstAssignError);
     }
   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
     // Initializing assignment to const {this} needs a write barrier.
@@ -2088,7 +2044,8 @@
     __ bind(&uninitialized_this);
     EmitStoreToStackLocalOrContextSlot(var, location);
 
-  } else if (!var->is_const_mode() || op == Token::INIT) {
+  } else {
+    DCHECK(var->mode() != CONST || op == Token::INIT);
     if (var->IsLookupSlot()) {
       // Assignment to var.
       __ Push(Immediate(var->name()));
@@ -2109,13 +2066,6 @@
       }
       EmitStoreToStackLocalOrContextSlot(var, location);
     }
-
-  } else {
-    DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
-    if (is_strict(language_mode())) {
-      __ CallRuntime(Runtime::kThrowConstAssignError);
-    }
-    // Silently ignore store in sloppy mode.
   }
 }
 
@@ -2128,10 +2078,8 @@
   DCHECK(prop != NULL);
   DCHECK(prop->key()->IsLiteral());
 
-  __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
   PopOperand(StoreDescriptor::ReceiverRegister());
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallStoreIC();
+  CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(eax);
 }
@@ -2174,10 +2122,7 @@
   PopOperand(StoreDescriptor::NameRegister());  // Key.
   PopOperand(StoreDescriptor::ReceiverRegister());
   DCHECK(StoreDescriptor::ValueRegister().is(eax));
-  Handle<Code> ic =
-      CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-  EmitLoadStoreICSlot(expr->AssignmentSlot());
-  CallIC(ic);
+  CallKeyedStoreIC(expr->AssignmentSlot());
   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
   context()->Plug(eax);
 }
@@ -2715,25 +2660,6 @@
 }
 
 
-void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
-  DCHECK(args->length() == 1);
-
-  VisitForAccumulatorValue(args->at(0));
-
-  Label done;
-  StringCharFromCodeGenerator generator(eax, ebx);
-  generator.GenerateFast(masm_);
-  __ jmp(&done);
-
-  NopRuntimeCallHelper call_helper;
-  generator.GenerateSlow(masm_, call_helper);
-
-  __ bind(&done);
-  context()->Plug(ebx);
-}
-
-
 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
   ZoneList<Expression*>* args = expr->arguments();
   DCHECK(args->length() == 2);
@@ -2928,7 +2854,7 @@
         // "delete this" is allowed.
         bool is_this = var->is_this();
         DCHECK(is_sloppy(language_mode()) || is_this);
-        if (var->IsUnallocatedOrGlobalSlot()) {
+        if (var->IsUnallocated()) {
           __ mov(eax, NativeContextOperand());
           __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
           __ push(Immediate(var->name()));
@@ -3222,11 +3148,8 @@
       }
       break;
     case NAMED_PROPERTY: {
-      __ mov(StoreDescriptor::NameRegister(),
-             prop->key()->AsLiteral()->value());
       PopOperand(StoreDescriptor::ReceiverRegister());
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallStoreIC();
+      CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         if (!context()->IsEffect()) {
@@ -3264,10 +3187,7 @@
     case KEYED_PROPERTY: {
       PopOperand(StoreDescriptor::NameRegister());
       PopOperand(StoreDescriptor::ReceiverRegister());
-      Handle<Code> ic =
-          CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
-      EmitLoadStoreICSlot(expr->CountSlot());
-      CallIC(ic);
+      CallKeyedStoreIC(expr->CountSlot());
       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
       if (expr->is_postfix()) {
         // Result is on the stack
diff --git a/src/gdb-jit.cc b/src/gdb-jit.cc
index a3af184..4e73981 100644
--- a/src/gdb-jit.cc
+++ b/src/gdb-jit.cc
@@ -9,7 +9,6 @@
 #include "src/base/bits.h"
 #include "src/base/platform/platform.h"
 #include "src/bootstrapper.h"
-#include "src/compiler.h"
 #include "src/frames-inl.h"
 #include "src/frames.h"
 #include "src/global-handles.h"
@@ -2017,7 +2016,7 @@
 static base::HashMap* GetLineMap() {
   static base::HashMap* line_map = NULL;
   if (line_map == NULL) {
-    line_map = new base::HashMap(&base::HashMap::PointersMatch);
+    line_map = new base::HashMap();
   }
   return line_map;
 }
diff --git a/src/globals.h b/src/globals.h
index 0d02f77..03c5b1d 100644
--- a/src/globals.h
+++ b/src/globals.h
@@ -14,6 +14,32 @@
 #include "src/base/logging.h"
 #include "src/base/macros.h"
 
+#ifdef V8_OS_WIN
+
+// Setup for Windows shared library export.
+#ifdef BUILDING_V8_SHARED
+#define V8_EXPORT_PRIVATE __declspec(dllexport)
+#elif USING_V8_SHARED
+#define V8_EXPORT_PRIVATE __declspec(dllimport)
+#else
+#define V8_EXPORT_PRIVATE
+#endif  // BUILDING_V8_SHARED
+
+#else  // V8_OS_WIN
+
+// Setup for Linux shared library export.
+#if V8_HAS_ATTRIBUTE_VISIBILITY
+#ifdef BUILDING_V8_SHARED
+#define V8_EXPORT_PRIVATE __attribute__((visibility("default")))
+#else
+#define V8_EXPORT_PRIVATE
+#endif
+#else
+#define V8_EXPORT_PRIVATE
+#endif
+
+#endif  // V8_OS_WIN
+
 // Unfortunately, the INFINITY macro cannot be used with the '-pedantic'
 // warning flag and certain versions of GCC due to a bug:
 // http://gcc.gnu.org/bugzilla/show_bug.cgi?id=11931
@@ -161,10 +187,6 @@
 #if V8_OS_WIN
 const size_t kMinimumCodeRangeSize = 4 * MB;
 const size_t kReservedCodeRangePages = 1;
-// On PPC Linux PageSize is 4MB
-#elif V8_HOST_ARCH_PPC && V8_TARGET_ARCH_PPC && V8_OS_LINUX
-const size_t kMinimumCodeRangeSize = 12 * MB;
-const size_t kReservedCodeRangePages = 0;
 #else
 const size_t kMinimumCodeRangeSize = 3 * MB;
 const size_t kReservedCodeRangePages = 0;
@@ -193,9 +215,17 @@
 const size_t kReservedCodeRangePages = 0;
 #endif
 
-// The external allocation limit should be below 256 MB on all architectures
-// to avoid that resource-constrained embedders run low on memory.
-const int kExternalAllocationLimit = 192 * 1024 * 1024;
+// Trigger an incremental GCs once the external memory reaches this limit.
+const int kExternalAllocationSoftLimit = 64 * MB;
+
+// Maximum object size that gets allocated into regular pages. Objects larger
+// than that size are allocated in large object space and are never moved in
+// memory. This also applies to new space allocation, since objects are never
+// migrated from new space to large object space. Takes double alignment into
+// account.
+//
+// Current value: Page::kAllocatableMemory (on 32-bit arch) - 512 (slack).
+const int kMaxRegularHeapObjectSize = 507136;
 
 STATIC_ASSERT(kPointerSize == (1 << kPointerSizeLog2));
 
@@ -722,6 +752,7 @@
 enum CpuFeature {
   // x86
   SSE4_1,
+  SSSE3,
   SSE3,
   SAHF,
   AVX,
@@ -732,13 +763,10 @@
   POPCNT,
   ATOM,
   // ARM
-  VFP3,
-  ARMv7,
-  ARMv8,
-  SUDIV,
-  MOVW_MOVT_IMMEDIATE_LOADS,
-  VFP32DREGS,
-  NEON,
+  // - Standard configurations. The baseline is ARMv6+VFPv2.
+  ARMv7,        // ARMv7-A + VFPv3-D32 + NEON
+  ARMv7_SUDIV,  // ARMv7-A + VFPv4-D32 + NEON + SUDIV
+  ARMv8,        // ARMv8-A (+ all of the above)
   // MIPS, MIPS64
   FPU,
   FP64FPU,
@@ -755,10 +783,14 @@
   DISTINCT_OPS,
   GENERAL_INSTR_EXT,
   FLOATING_POINT_EXT,
-  // PPC/S390
-  UNALIGNED_ACCESSES,
 
-  NUMBER_OF_CPU_FEATURES
+  NUMBER_OF_CPU_FEATURES,
+
+  // ARM feature aliases (based on the standard configurations above).
+  VFPv3 = ARMv7,
+  NEON = ARMv7,
+  VFP32DREGS = ARMv7,
+  SUDIV = ARMv7_SUDIV
 };
 
 // Defines hints about receiver values based on structural knowledge.
@@ -840,8 +872,7 @@
   DO_SMI_CHECK
 };
 
-
-enum ScopeType {
+enum ScopeType : uint8_t {
   EVAL_SCOPE,      // The top-level scope for an eval source.
   FUNCTION_SCOPE,  // The top-level scope for a function.
   MODULE_SCOPE,    // The scope introduced by a module literal
@@ -878,12 +909,10 @@
 
 
 // The order of this enum has to be kept in sync with the predicates below.
-enum VariableMode {
+enum VariableMode : uint8_t {
   // User declared variables:
   VAR,  // declared via 'var', and 'function' declarations
 
-  CONST_LEGACY,  // declared via legacy 'const' declarations
-
   LET,  // declared via 'let' declarations (first lexical)
 
   CONST,  // declared via 'const' declarations (last lexical)
@@ -899,10 +928,44 @@
                    // variable is global unless it has been shadowed
                    // by an eval-introduced variable
 
-  DYNAMIC_LOCAL  // requires dynamic lookup, but we know that the
-                 // variable is local and where it is unless it
-                 // has been shadowed by an eval-introduced
-                 // variable
+  DYNAMIC_LOCAL,  // requires dynamic lookup, but we know that the
+                  // variable is local and where it is unless it
+                  // has been shadowed by an eval-introduced
+                  // variable
+
+  kLastVariableMode = DYNAMIC_LOCAL
+};
+
+// Printing support
+#ifdef DEBUG
+inline const char* VariableMode2String(VariableMode mode) {
+  switch (mode) {
+    case VAR:
+      return "VAR";
+    case LET:
+      return "LET";
+    case CONST:
+      return "CONST";
+    case DYNAMIC:
+      return "DYNAMIC";
+    case DYNAMIC_GLOBAL:
+      return "DYNAMIC_GLOBAL";
+    case DYNAMIC_LOCAL:
+      return "DYNAMIC_LOCAL";
+    case TEMPORARY:
+      return "TEMPORARY";
+  }
+  UNREACHABLE();
+  return NULL;
+}
+#endif
+
+enum VariableKind : uint8_t {
+  NORMAL_VARIABLE,
+  FUNCTION_VARIABLE,
+  THIS_VARIABLE,
+  SLOPPY_FUNCTION_NAME_VARIABLE,
+  kLastKind = SLOPPY_FUNCTION_NAME_VARIABLE
 };
 
 inline bool IsDynamicVariableMode(VariableMode mode) {
@@ -911,7 +974,8 @@
 
 
 inline bool IsDeclaredVariableMode(VariableMode mode) {
-  return mode >= VAR && mode <= CONST;
+  STATIC_ASSERT(VAR == 0);  // Implies that mode >= VAR.
+  return mode <= CONST;
 }
 
 
@@ -919,12 +983,7 @@
   return mode >= LET && mode <= CONST;
 }
 
-
-inline bool IsImmutableVariableMode(VariableMode mode) {
-  return mode == CONST || mode == CONST_LEGACY;
-}
-
-enum class VariableLocation {
+enum VariableLocation : uint8_t {
   // Before and during variable allocation, a variable whose location is
   // not yet determined.  After allocation, a variable looked up as a
   // property on the global object (and possibly absent).  name() is the
@@ -945,19 +1004,15 @@
   // corresponding scope.
   CONTEXT,
 
-  // An indexed slot in a script context that contains a respective global
-  // property cell.  name() is the variable name, index() is the variable
-  // index in the context object on the heap, starting at 0.  scope() is the
-  // corresponding script scope.
-  GLOBAL,
-
   // A named slot in a heap context.  name() is the variable name in the
   // context object on the heap, with lookup starting at the current
   // context.  index() is invalid.
   LOOKUP,
 
   // A named slot in a module's export table.
-  MODULE
+  MODULE,
+
+  kLastVariableLocation = MODULE
 };
 
 // ES6 Draft Rev3 10.2 specifies declarative environment records with mutable
@@ -991,14 +1046,9 @@
 // The following enum specifies a flag that indicates if the binding needs a
 // distinct initialization step (kNeedsInitialization) or if the binding is
 // immediately initialized upon creation (kCreatedInitialized).
-enum InitializationFlag {
-  kNeedsInitialization,
-  kCreatedInitialized
-};
+enum InitializationFlag : uint8_t { kNeedsInitialization, kCreatedInitialized };
 
-
-enum MaybeAssignedFlag { kNotAssigned, kMaybeAssigned };
-
+enum MaybeAssignedFlag : uint8_t { kNotAssigned, kMaybeAssigned };
 
 // Serialized in PreparseData, so numeric values should not be changed.
 enum ParseErrorType { kSyntaxError = 0, kReferenceError = 1 };
@@ -1024,6 +1074,7 @@
   kGetterFunction = 1 << 6,
   kSetterFunction = 1 << 7,
   kAsyncFunction = 1 << 8,
+  kModule = 1 << 9,
   kAccessorFunction = kGetterFunction | kSetterFunction,
   kDefaultBaseConstructor = kDefaultConstructor | kBaseConstructor,
   kDefaultSubclassConstructor = kDefaultConstructor | kSubclassConstructor,
@@ -1037,6 +1088,7 @@
   return kind == FunctionKind::kNormalFunction ||
          kind == FunctionKind::kArrowFunction ||
          kind == FunctionKind::kGeneratorFunction ||
+         kind == FunctionKind::kModule ||
          kind == FunctionKind::kConciseMethod ||
          kind == FunctionKind::kConciseGeneratorMethod ||
          kind == FunctionKind::kGetterFunction ||
@@ -1063,13 +1115,18 @@
   return kind & FunctionKind::kGeneratorFunction;
 }
 
+inline bool IsModule(FunctionKind kind) {
+  DCHECK(IsValidFunctionKind(kind));
+  return kind & FunctionKind::kModule;
+}
+
 inline bool IsAsyncFunction(FunctionKind kind) {
   DCHECK(IsValidFunctionKind(kind));
   return kind & FunctionKind::kAsyncFunction;
 }
 
 inline bool IsResumableFunction(FunctionKind kind) {
-  return IsGeneratorFunction(kind) || IsAsyncFunction(kind);
+  return IsGeneratorFunction(kind) || IsAsyncFunction(kind) || IsModule(kind);
 }
 
 inline bool IsConciseMethod(FunctionKind kind) {
@@ -1152,11 +1209,59 @@
 // at different points by performing an 'OR' operation. Type feedback moves
 // to a more generic type when we combine feedback.
 // kSignedSmall -> kNumber  -> kAny
+//                 kString  -> kAny
 class BinaryOperationFeedback {
  public:
+  enum {
+    kNone = 0x0,
+    kSignedSmall = 0x1,
+    kNumber = 0x3,
+    kString = 0x4,
+    kAny = 0xF
+  };
+};
+
+// TODO(epertoso): consider unifying this with BinaryOperationFeedback.
+class CompareOperationFeedback {
+ public:
   enum { kNone = 0x00, kSignedSmall = 0x01, kNumber = 0x3, kAny = 0x7 };
 };
 
+// Describes how exactly a frame has been dropped from stack.
+enum LiveEditFrameDropMode {
+  // No frame has been dropped.
+  LIVE_EDIT_FRAMES_UNTOUCHED,
+  // The top JS frame had been calling debug break slot stub. Patch the
+  // address this stub jumps to in the end.
+  LIVE_EDIT_FRAME_DROPPED_IN_DEBUG_SLOT_CALL,
+  // The top JS frame had been calling some C++ function. The return address
+  // gets patched automatically.
+  LIVE_EDIT_FRAME_DROPPED_IN_DIRECT_CALL,
+  LIVE_EDIT_FRAME_DROPPED_IN_RETURN_CALL,
+  LIVE_EDIT_CURRENTLY_SET_MODE
+};
+
+enum class UnicodeEncoding : uint8_t {
+  // Different unicode encodings in a |word32|:
+  UTF16,  // hi 16bits -> trailing surrogate or 0, low 16bits -> lead surrogate
+  UTF32,  // full UTF32 code unit / Unicode codepoint
+};
+
+inline size_t hash_value(UnicodeEncoding encoding) {
+  return static_cast<uint8_t>(encoding);
+}
+
+inline std::ostream& operator<<(std::ostream& os, UnicodeEncoding encoding) {
+  switch (encoding) {
+    case UnicodeEncoding::UTF16:
+      return os << "UTF16";
+    case UnicodeEncoding::UTF32:
+      return os << "UTF32";
+  }
+  UNREACHABLE();
+  return os;
+}
+
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/handles.h b/src/handles.h
index a7cd0e2..3587d85 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -10,7 +10,7 @@
 #include "src/base/macros.h"
 #include "src/checks.h"
 #include "src/globals.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -63,10 +63,12 @@
 
   enum DereferenceCheckMode { INCLUDE_DEFERRED_CHECK, NO_DEFERRED_CHECK };
 #ifdef DEBUG
-  bool IsDereferenceAllowed(DereferenceCheckMode mode) const;
+  bool V8_EXPORT_PRIVATE IsDereferenceAllowed(DereferenceCheckMode mode) const;
 #else
   V8_INLINE
-  bool IsDereferenceAllowed(DereferenceCheckMode mode) const { return true; }
+  bool V8_EXPORT_PRIVATE IsDereferenceAllowed(DereferenceCheckMode mode) const {
+    return true;
+  }
 #endif  // DEBUG
 
   Object** location_;
@@ -206,6 +208,10 @@
     USE(a);
   }
 
+  template <typename S>
+  V8_INLINE MaybeHandle(S* object, Isolate* isolate)
+      : MaybeHandle(handle(object, isolate)) {}
+
   V8_INLINE void Assert() const { DCHECK_NOT_NULL(location_); }
   V8_INLINE void Check() const { CHECK_NOT_NULL(location_); }
 
@@ -262,7 +268,7 @@
   inline ~HandleScope();
 
   // Counts the number of allocated handles.
-  static int NumberOfHandles(Isolate* isolate);
+  V8_EXPORT_PRIVATE static int NumberOfHandles(Isolate* isolate);
 
   // Create a new handle or lookup a canonical handle.
   V8_INLINE static Object** GetHandle(Isolate* isolate, Object* value);
@@ -271,7 +277,7 @@
   V8_INLINE static Object** CreateHandle(Isolate* isolate, Object* value);
 
   // Deallocates any extensions used by the current scope.
-  static void DeleteExtensions(Isolate* isolate);
+  V8_EXPORT_PRIVATE static void DeleteExtensions(Isolate* isolate);
 
   static Address current_next_address(Isolate* isolate);
   static Address current_limit_address(Isolate* isolate);
@@ -293,8 +299,6 @@
 
  private:
   // Prevent heap allocation or illegal handle scopes.
-  HandleScope(const HandleScope&);
-  void operator=(const HandleScope&);
   void* operator new(size_t size);
   void operator delete(void* size_t);
 
@@ -308,11 +312,11 @@
                                 Object** prev_limit);
 
   // Extend the handle scope making room for more handles.
-  static Object** Extend(Isolate* isolate);
+  V8_EXPORT_PRIVATE static Object** Extend(Isolate* isolate);
 
 #ifdef ENABLE_HANDLE_ZAPPING
   // Zaps the handles in the half-open interval [start, end).
-  static void ZapRange(Object** start, Object** end);
+  V8_EXPORT_PRIVATE static void ZapRange(Object** start, Object** end);
 #endif
 
   friend class v8::HandleScope;
@@ -320,6 +324,8 @@
   friend class DeferredHandleScope;
   friend class HandleScopeImplementer;
   friend class Isolate;
+
+  DISALLOW_COPY_AND_ASSIGN(HandleScope);
 };
 
 
@@ -340,7 +346,7 @@
   ~CanonicalHandleScope();
 
  private:
-  Object** Lookup(Object* object);
+  V8_EXPORT_PRIVATE Object** Lookup(Object* object);
 
   Isolate* isolate_;
   Zone zone_;
diff --git a/src/heap-symbols.h b/src/heap-symbols.h
index d83f63f..c7b3370 100644
--- a/src/heap-symbols.h
+++ b/src/heap-symbols.h
@@ -8,11 +8,11 @@
 #define INTERNALIZED_STRING_LIST(V)                                \
   V(anonymous_string, "anonymous")                                 \
   V(apply_string, "apply")                                         \
-  V(assign_string, "assign")                                       \
   V(arguments_string, "arguments")                                 \
   V(Arguments_string, "Arguments")                                 \
-  V(Array_string, "Array")                                         \
   V(arguments_to_string, "[object Arguments]")                     \
+  V(Array_string, "Array")                                         \
+  V(assign_string, "assign")                                       \
   V(array_to_string, "[object Array]")                             \
   V(boolean_to_string, "[object Boolean]")                         \
   V(date_to_string, "[object Date]")                               \
@@ -48,6 +48,8 @@
   V(construct_string, "construct")                                 \
   V(create_string, "create")                                       \
   V(Date_string, "Date")                                           \
+  V(dayperiod_string, "dayperiod")                                 \
+  V(day_string, "day")                                             \
   V(default_string, "default")                                     \
   V(defineProperty_string, "defineProperty")                       \
   V(deleteProperty_string, "deleteProperty")                       \
@@ -57,10 +59,12 @@
   V(dot_string, ".")                                               \
   V(entries_string, "entries")                                     \
   V(enumerable_string, "enumerable")                               \
+  V(era_string, "era")                                             \
   V(Error_string, "Error")                                         \
   V(eval_string, "eval")                                           \
   V(EvalError_string, "EvalError")                                 \
   V(false_string, "false")                                         \
+  V(flags_string, "flags")                                         \
   V(float32x4_string, "float32x4")                                 \
   V(Float32x4_string, "Float32x4")                                 \
   V(for_api_string, "for_api")                                     \
@@ -74,6 +78,8 @@
   V(get_string, "get")                                             \
   V(global_string, "global")                                       \
   V(has_string, "has")                                             \
+  V(hour_string, "hour")                                           \
+  V(ignoreCase_string, "ignoreCase")                               \
   V(illegal_access_string, "illegal access")                       \
   V(illegal_argument_string, "illegal argument")                   \
   V(index_string, "index")                                         \
@@ -92,10 +98,14 @@
   V(last_index_string, "lastIndex")                                \
   V(length_string, "length")                                       \
   V(line_string, "line")                                           \
+  V(literal_string, "literal")                                     \
   V(Map_string, "Map")                                             \
   V(message_string, "message")                                     \
   V(minus_infinity_string, "-Infinity")                            \
   V(minus_zero_string, "-0")                                       \
+  V(minute_string, "minute")                                       \
+  V(month_string, "month")                                         \
+  V(multiline_string, "multiline")                                 \
   V(name_string, "name")                                           \
   V(nan_string, "NaN")                                             \
   V(next_string, "next")                                           \
@@ -120,6 +130,7 @@
   V(ReferenceError_string, "ReferenceError")                       \
   V(RegExp_string, "RegExp")                                       \
   V(script_string, "script")                                       \
+  V(second_string, "second")                                       \
   V(setPrototypeOf_string, "setPrototypeOf")                       \
   V(set_string, "set")                                             \
   V(Set_string, "Set")                                             \
@@ -128,6 +139,7 @@
   V(sourceText_string, "sourceText")                               \
   V(source_url_string, "source_url")                               \
   V(stack_string, "stack")                                         \
+  V(stackTraceLimit_string, "stackTraceLimit")                     \
   V(strict_compare_ic_string, "===")                               \
   V(string_string, "string")                                       \
   V(String_string, "String")                                       \
@@ -137,10 +149,12 @@
   V(this_string, "this")                                           \
   V(throw_string, "throw")                                         \
   V(timed_out, "timed-out")                                        \
+  V(timeZoneName_string, "timeZoneName")                           \
   V(toJSON_string, "toJSON")                                       \
   V(toString_string, "toString")                                   \
   V(true_string, "true")                                           \
   V(TypeError_string, "TypeError")                                 \
+  V(type_string, "type")                                           \
   V(uint16x8_string, "uint16x8")                                   \
   V(Uint16x8_string, "Uint16x8")                                   \
   V(uint32x4_string, "uint32x4")                                   \
@@ -155,19 +169,16 @@
   V(value_string, "value")                                         \
   V(WeakMap_string, "WeakMap")                                     \
   V(WeakSet_string, "WeakSet")                                     \
-  V(writable_string, "writable")
+  V(weekday_string, "weekday")                                     \
+  V(writable_string, "writable")                                   \
+  V(year_string, "year")
 
 #define PRIVATE_SYMBOL_LIST(V)              \
   V(array_iteration_kind_symbol)            \
   V(array_iterator_next_symbol)             \
   V(array_iterator_object_symbol)           \
-  V(call_site_constructor_symbol)           \
-  V(call_site_function_symbol)              \
-  V(call_site_position_symbol)              \
-  V(call_site_receiver_symbol)              \
-  V(call_site_strict_symbol)                \
-  V(call_site_wasm_obj_symbol)              \
-  V(call_site_wasm_func_index_symbol)       \
+  V(call_site_frame_array_symbol)           \
+  V(call_site_frame_index_symbol)           \
   V(class_end_position_symbol)              \
   V(class_start_position_symbol)            \
   V(detailed_stack_trace_symbol)            \
@@ -189,10 +200,13 @@
   V(normal_ic_symbol)                       \
   V(not_mapped_symbol)                      \
   V(premonomorphic_symbol)                  \
-  V(promise_combined_deferred_symbol)       \
+  V(promise_async_stack_id_symbol)          \
   V(promise_debug_marker_symbol)            \
   V(promise_deferred_reactions_symbol)      \
+  V(promise_forwarding_handler_symbol)      \
   V(promise_fulfill_reactions_symbol)       \
+  V(promise_handled_by_symbol)              \
+  V(promise_handled_hint_symbol)            \
   V(promise_has_handler_symbol)             \
   V(promise_raw_symbol)                     \
   V(promise_reject_reactions_symbol)        \
diff --git a/src/heap/gc-tracer.cc b/src/heap/gc-tracer.cc
index 695a259..8049ce4 100644
--- a/src/heap/gc-tracer.cc
+++ b/src/heap/gc-tracer.cc
@@ -23,11 +23,16 @@
 
 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
     : tracer_(tracer), scope_(scope) {
+  // All accesses to incremental_marking_scope assume that incremental marking
+  // scopes come first.
+  STATIC_ASSERT(FIRST_INCREMENTAL_SCOPE == 0);
   start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
   // TODO(cbruni): remove once we fully moved to a trace-based system.
-  if (FLAG_runtime_call_stats) {
-    RuntimeCallStats::Enter(tracer_->heap_->isolate(), &timer_,
-                            &RuntimeCallStats::GC);
+  if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
+      FLAG_runtime_call_stats) {
+    RuntimeCallStats::Enter(
+        tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_,
+        &RuntimeCallStats::GC);
   }
 }
 
@@ -35,8 +40,10 @@
   tracer_->AddScopeSample(
       scope_, tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_);
   // TODO(cbruni): remove once we fully moved to a trace-based system.
-  if (FLAG_runtime_call_stats) {
-    RuntimeCallStats::Leave(tracer_->heap_->isolate(), &timer_);
+  if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
+      FLAG_runtime_call_stats) {
+    RuntimeCallStats::Leave(
+        tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_);
   }
 }
 
@@ -53,7 +60,7 @@
   return "(unknown)";
 }
 
-GCTracer::Event::Event(Type type, const char* gc_reason,
+GCTracer::Event::Event(Type type, GarbageCollectionReason gc_reason,
                        const char* collector_reason)
     : type(type),
       gc_reason(gc_reason),
@@ -69,10 +76,8 @@
       end_holes_size(0),
       new_space_object_size(0),
       survived_new_space_object_size(0),
-      cumulative_incremental_marking_bytes(0),
       incremental_marking_bytes(0),
-      cumulative_pure_incremental_marking_duration(0.0),
-      pure_incremental_marking_duration(0.0) {
+      incremental_marking_duration(0.0) {
   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
     scopes[i] = 0;
   }
@@ -106,14 +111,11 @@
 
 GCTracer::GCTracer(Heap* heap)
     : heap_(heap),
-      current_(Event::START, nullptr, nullptr),
+      current_(Event::START, GarbageCollectionReason::kUnknown, nullptr),
       previous_(current_),
-      previous_incremental_mark_compactor_event_(current_),
-      cumulative_incremental_marking_bytes_(0),
-      cumulative_incremental_marking_duration_(0.0),
-      cumulative_pure_incremental_marking_duration_(0.0),
-      cumulative_marking_duration_(0.0),
-      cumulative_sweeping_duration_(0.0),
+      incremental_marking_bytes_(0),
+      incremental_marking_duration_(0.0),
+      recorded_incremental_marking_speed_(0.0),
       allocation_time_ms_(0.0),
       new_space_allocation_counter_bytes_(0),
       old_generation_allocation_counter_bytes_(0),
@@ -126,19 +128,10 @@
 }
 
 void GCTracer::ResetForTesting() {
-  current_ = Event(Event::START, NULL, NULL);
+  current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr);
   current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
-  previous_ = previous_incremental_mark_compactor_event_ = current_;
-  cumulative_incremental_marking_bytes_ = 0.0;
-  cumulative_incremental_marking_duration_ = 0.0;
-  cumulative_pure_incremental_marking_duration_ = 0.0;
-  cumulative_marking_duration_ = 0.0;
-  for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
-    incremental_marking_scopes_[i].cumulative_duration = 0.0;
-    incremental_marking_scopes_[i].steps = 0;
-    incremental_marking_scopes_[i].longest_step = 0.0;
-  }
-  cumulative_sweeping_duration_ = 0.0;
+  previous_ = current_;
+  ResetIncrementalMarkingCounters();
   allocation_time_ms_ = 0.0;
   new_space_allocation_counter_bytes_ = 0.0;
   old_generation_allocation_counter_bytes_ = 0.0;
@@ -158,7 +151,8 @@
   start_counter_ = 0;
 }
 
-void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
+void GCTracer::Start(GarbageCollector collector,
+                     GarbageCollectionReason gc_reason,
                      const char* collector_reason) {
   start_counter_++;
   if (start_counter_ != 1) return;
@@ -167,8 +161,6 @@
   double start_time = heap_->MonotonicallyIncreasingTimeInMs();
   SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
                    heap_->OldGenerationAllocationCounter());
-  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR)
-    previous_incremental_mark_compactor_event_ = current_;
 
   if (collector == SCAVENGER) {
     current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
@@ -189,10 +181,8 @@
   current_.new_space_object_size =
       heap_->new_space()->top() - heap_->new_space()->bottom();
 
-  current_.cumulative_incremental_marking_bytes =
-      cumulative_incremental_marking_bytes_;
-  current_.cumulative_pure_incremental_marking_duration =
-      cumulative_pure_incremental_marking_duration_;
+  current_.incremental_marking_bytes = 0;
+  current_.incremental_marking_duration = 0;
 
   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
     current_.scopes[i] = 0;
@@ -200,37 +190,40 @@
 
   int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
   int used_memory = static_cast<int>(current_.start_object_size / KB);
-  heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
-      start_time, committed_memory);
-  heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
-      start_time, used_memory);
+
+  Counters* counters = heap_->isolate()->counters();
+
+  if (collector == SCAVENGER) {
+    counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason));
+  } else {
+    counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason));
+  }
+  counters->aggregated_memory_heap_committed()->AddSample(start_time,
+                                                          committed_memory);
+  counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory);
   // TODO(cbruni): remove once we fully moved to a trace-based system.
-  if (FLAG_runtime_call_stats) {
-    RuntimeCallStats::Enter(heap_->isolate(), &timer_, &RuntimeCallStats::GC);
+  if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
+      FLAG_runtime_call_stats) {
+    RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(),
+                            &timer_, &RuntimeCallStats::GC);
   }
 }
 
-void GCTracer::MergeBaseline(const Event& baseline) {
-  current_.incremental_marking_bytes =
-      current_.cumulative_incremental_marking_bytes -
-      baseline.cumulative_incremental_marking_bytes;
-  current_.pure_incremental_marking_duration =
-      current_.cumulative_pure_incremental_marking_duration -
-      baseline.cumulative_pure_incremental_marking_duration;
-  for (int i = Scope::FIRST_INCREMENTAL_SCOPE;
-       i <= Scope::LAST_INCREMENTAL_SCOPE; i++) {
-    current_.scopes[i] =
-        current_.incremental_marking_scopes[i].cumulative_duration -
-        baseline.incremental_marking_scopes[i].cumulative_duration;
+void GCTracer::ResetIncrementalMarkingCounters() {
+  incremental_marking_bytes_ = 0;
+  incremental_marking_duration_ = 0;
+  for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
+    incremental_marking_scopes_[i].ResetCurrentCycle();
   }
 }
 
 void GCTracer::Stop(GarbageCollector collector) {
   start_counter_--;
   if (start_counter_ != 0) {
-    PrintIsolate(heap_->isolate(), "[Finished reentrant %s during %s.]\n",
-                 collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
-                 current_.TypeName(false));
+    heap_->isolate()->PrintWithTimestamp(
+        "[Finished reentrant %s during %s.]\n",
+        collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
+        current_.TypeName(false));
     return;
   }
 
@@ -240,11 +233,6 @@
           (current_.type == Event::MARK_COMPACTOR ||
            current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
 
-  for (int i = Scope::FIRST_INCREMENTAL_SCOPE;
-       i <= Scope::LAST_INCREMENTAL_SCOPE; i++) {
-    current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
-  }
-
   current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
   current_.end_object_size = heap_->SizeOfObjects();
   current_.end_memory_size = heap_->memory_allocator()->Size();
@@ -263,36 +251,33 @@
   double duration = current_.end_time - current_.start_time;
 
   if (current_.type == Event::SCAVENGER) {
-    MergeBaseline(previous_);
     recorded_scavenges_total_.Push(
         MakeBytesAndDuration(current_.new_space_object_size, duration));
     recorded_scavenges_survived_.Push(MakeBytesAndDuration(
         current_.survived_new_space_object_size, duration));
   } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
-    MergeBaseline(previous_incremental_mark_compactor_event_);
-    recorded_incremental_marking_steps_.Push(
-        MakeBytesAndDuration(current_.incremental_marking_bytes,
-                             current_.pure_incremental_marking_duration));
+    current_.incremental_marking_bytes = incremental_marking_bytes_;
+    current_.incremental_marking_duration = incremental_marking_duration_;
+    for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
+      current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i];
+      current_.scopes[i] = incremental_marking_scopes_[i].duration;
+    }
+    RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes,
+                                  current_.incremental_marking_duration);
     recorded_incremental_mark_compacts_.Push(
         MakeBytesAndDuration(current_.start_object_size, duration));
+    ResetIncrementalMarkingCounters();
     combined_mark_compact_speed_cache_ = 0.0;
-    for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
-      incremental_marking_scopes_[i].ResetCurrentCycle();
-    }
   } else {
-    DCHECK(current_.incremental_marking_bytes == 0);
-    DCHECK(current_.pure_incremental_marking_duration == 0);
+    DCHECK_EQ(0, current_.incremental_marking_bytes);
+    DCHECK_EQ(0, current_.incremental_marking_duration);
     recorded_mark_compacts_.Push(
         MakeBytesAndDuration(current_.start_object_size, duration));
+    ResetIncrementalMarkingCounters();
     combined_mark_compact_speed_cache_ = 0.0;
-    for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) {
-      incremental_marking_scopes_[i].ResetCurrentCycle();
-    }
   }
 
-  double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
-  heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator,
-                                      current_.scopes[Scope::MC_MARK]);
+  heap_->UpdateTotalGCTime(duration);
 
   if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
     return;
@@ -308,8 +293,10 @@
   }
 
   // TODO(cbruni): remove once we fully moved to a trace-based system.
-  if (FLAG_runtime_call_stats) {
-    RuntimeCallStats::Leave(heap_->isolate(), &timer_);
+  if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() ||
+      FLAG_runtime_call_stats) {
+    RuntimeCallStats::Leave(heap_->isolate()->counters()->runtime_call_stats(),
+                            &timer_);
   }
 }
 
@@ -375,11 +362,9 @@
 
 
 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
-  cumulative_incremental_marking_bytes_ += bytes;
-  cumulative_incremental_marking_duration_ += duration;
-  cumulative_marking_duration_ += duration;
   if (bytes > 0) {
-    cumulative_pure_incremental_marking_duration_ += duration;
+    incremental_marking_bytes_ += bytes;
+    incremental_marking_duration_ += duration;
   }
 }
 
@@ -402,29 +387,20 @@
   heap_->AddToRingBuffer(buffer.start());
 }
 
-
 void GCTracer::Print() const {
   double duration = current_.end_time - current_.start_time;
   const size_t kIncrementalStatsSize = 128;
   char incremental_buffer[kIncrementalStatsSize] = {0};
 
-  if (current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps > 0) {
-    if (current_.type == Event::SCAVENGER) {
-      base::OS::SNPrintF(
-          incremental_buffer, kIncrementalStatsSize,
-          " (+ %.1f ms in %d steps since last GC)",
-          current_.scopes[Scope::MC_INCREMENTAL],
-          current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps);
-    } else {
-      base::OS::SNPrintF(
-          incremental_buffer, kIncrementalStatsSize,
-          " (+ %.1f ms in %d steps since start of marking, "
-          "biggest step %.1f ms)",
-          current_.scopes[Scope::MC_INCREMENTAL],
-          current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
-          current_.incremental_marking_scopes[Scope::MC_INCREMENTAL]
-              .longest_step);
-    }
+  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
+    base::OS::SNPrintF(
+        incremental_buffer, kIncrementalStatsSize,
+        " (+ %.1f ms in %d steps since start of marking, "
+        "biggest step %.1f ms, walltime since start of marking %.f ms)",
+        current_.scopes[Scope::MC_INCREMENTAL],
+        current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
+        current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].longest_step,
+        current_.end_time - incremental_marking_start_time_);
   }
 
   // Avoid PrintF as Output also appends the string to the tracing ring buffer
@@ -442,7 +418,7 @@
       static_cast<double>(current_.end_object_size) / MB,
       static_cast<double>(current_.end_memory_size) / MB, duration,
       TotalExternalTime(), incremental_buffer,
-      current_.gc_reason != nullptr ? current_.gc_reason : "",
+      Heap::GarbageCollectionReasonToString(current_.gc_reason),
       current_.collector_reason != nullptr ? current_.collector_reason : "");
 }
 
@@ -453,11 +429,16 @@
   intptr_t allocated_since_last_gc =
       current_.start_object_size - previous_.end_object_size;
 
+  double incremental_walltime_duration = 0;
+
+  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
+    incremental_walltime_duration =
+        current_.end_time - incremental_marking_start_time_;
+  }
+
   switch (current_.type) {
     case Event::SCAVENGER:
-      PrintIsolate(
-          heap_->isolate(),
-          "%8.0f ms: "
+      heap_->isolate()->PrintWithTimestamp(
           "pause=%.1f "
           "mutator=%.1f "
           "gc=%s "
@@ -498,9 +479,8 @@
           "semi_space_copy_rate=%.1f%% "
           "new_space_allocation_throughput=%.1f "
           "context_disposal_rate=%.1f\n",
-          heap_->isolate()->time_millis_since_init(), duration,
-          spent_in_mutator, current_.TypeName(true), current_.reduce_memory,
-          current_.scopes[Scope::SCAVENGER_SCAVENGE],
+          duration, spent_in_mutator, current_.TypeName(true),
+          current_.reduce_memory, current_.scopes[Scope::SCAVENGER_SCAVENGE],
           current_.scopes[Scope::SCAVENGER_OLD_TO_NEW_POINTERS],
           current_.scopes[Scope::SCAVENGER_WEAK],
           current_.scopes[Scope::SCAVENGER_ROOTS],
@@ -527,9 +507,7 @@
       break;
     case Event::MARK_COMPACTOR:
     case Event::INCREMENTAL_MARK_COMPACTOR:
-      PrintIsolate(
-          heap_->isolate(),
-          "%8.0f ms: "
+      heap_->isolate()->PrintWithTimestamp(
           "pause=%.1f "
           "mutator=%.1f "
           "gc=%s "
@@ -580,6 +558,7 @@
           "incremental.finalize.external.prologue=%.1f "
           "incremental.finalize.external.epilogue=%.1f "
           "incremental.finalize.object_grouping=%.1f "
+          "incremental.sweeping=%.1f "
           "incremental.wrapper_prologue=%.1f "
           "incremental.wrapper_tracing=%.1f "
           "incremental_wrapper_tracing_longest_step=%.1f "
@@ -588,6 +567,7 @@
           "incremental_longest_step=%.1f "
           "incremental_steps_count=%d "
           "incremental_marking_throughput=%.f "
+          "incremental_walltime_duration=%.f "
           "total_size_before=%" V8PRIdPTR
           " "
           "total_size_after=%" V8PRIdPTR
@@ -612,9 +592,8 @@
           "new_space_allocation_throughput=%.1f "
           "context_disposal_rate=%.1f "
           "compaction_speed=%.f\n",
-          heap_->isolate()->time_millis_since_init(), duration,
-          spent_in_mutator, current_.TypeName(true), current_.reduce_memory,
-          current_.scopes[Scope::MC_CLEAR],
+          duration, spent_in_mutator, current_.TypeName(true),
+          current_.reduce_memory, current_.scopes[Scope::MC_CLEAR],
           current_.scopes[Scope::MC_CLEAR_CODE_FLUSH],
           current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
           current_.scopes[Scope::MC_CLEAR_GLOBAL_HANDLES],
@@ -659,6 +638,7 @@
           current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE],
           current_.scopes[Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE],
           current_.scopes[Scope::MC_INCREMENTAL_FINALIZE_OBJECT_GROUPING],
+          current_.scopes[Scope::MC_INCREMENTAL_SWEEPING],
           current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE],
           current_.scopes[Scope::MC_INCREMENTAL_WRAPPER_TRACING],
           current_
@@ -674,9 +654,10 @@
               .longest_step,
           current_.incremental_marking_scopes[Scope::MC_INCREMENTAL].steps,
           IncrementalMarkingSpeedInBytesPerMillisecond(),
-          current_.start_object_size, current_.end_object_size,
-          current_.start_holes_size, current_.end_holes_size,
-          allocated_since_last_gc, heap_->promoted_objects_size(),
+          incremental_walltime_duration, current_.start_object_size,
+          current_.end_object_size, current_.start_holes_size,
+          current_.end_holes_size, allocated_since_last_gc,
+          heap_->promoted_objects_size(),
           heap_->semi_space_copied_object_size(),
           heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
           heap_->nodes_promoted_, heap_->promotion_ratio_,
@@ -716,15 +697,26 @@
   return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
 }
 
-double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
-  if (cumulative_incremental_marking_duration_ == 0.0) return 0;
-  // We haven't completed an entire round of incremental marking, yet.
-  // Use data from GCTracer instead of data from event buffers.
-  if (recorded_incremental_marking_steps_.Count() == 0) {
-    return cumulative_incremental_marking_bytes_ /
-           cumulative_pure_incremental_marking_duration_;
+void GCTracer::RecordIncrementalMarkingSpeed(intptr_t bytes, double duration) {
+  if (duration == 0 || bytes == 0) return;
+  double current_speed = bytes / duration;
+  if (recorded_incremental_marking_speed_ == 0) {
+    recorded_incremental_marking_speed_ = current_speed;
+  } else {
+    recorded_incremental_marking_speed_ =
+        (recorded_incremental_marking_speed_ + current_speed) / 2;
   }
-  return AverageSpeed(recorded_incremental_marking_steps_);
+}
+
+double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
+  const int kConservativeSpeedInBytesPerMillisecond = 128 * KB;
+  if (recorded_incremental_marking_speed_ != 0) {
+    return recorded_incremental_marking_speed_;
+  }
+  if (incremental_marking_duration_ != 0.0) {
+    return incremental_marking_bytes_ / incremental_marking_duration_;
+  }
+  return kConservativeSpeedInBytesPerMillisecond;
 }
 
 double GCTracer::ScavengeSpeedInBytesPerMillisecond(
@@ -821,5 +813,10 @@
 }
 
 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
+
+void GCTracer::NotifyIncrementalMarkingStart() {
+  incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs();
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/heap/gc-tracer.h b/src/heap/gc-tracer.h
index a11823e..e8c72c1 100644
--- a/src/heap/gc-tracer.h
+++ b/src/heap/gc-tracer.h
@@ -63,6 +63,7 @@
 #define INCREMENTAL_SCOPES(F)                                      \
   /* MC_INCREMENTAL is the top-level incremental marking scope. */ \
   F(MC_INCREMENTAL)                                                \
+  F(MC_INCREMENTAL_SWEEPING)                                       \
   F(MC_INCREMENTAL_WRAPPER_PROLOGUE)                               \
   F(MC_INCREMENTAL_WRAPPER_TRACING)                                \
   F(MC_INCREMENTAL_FINALIZE)                                       \
@@ -134,23 +135,23 @@
 class GCTracer {
  public:
   struct IncrementalMarkingInfos {
-    IncrementalMarkingInfos()
-        : cumulative_duration(0), longest_step(0), steps(0) {}
+    IncrementalMarkingInfos() : duration(0), longest_step(0), steps(0) {}
 
     void Update(double duration) {
       steps++;
-      cumulative_duration += duration;
+      this->duration += duration;
       if (duration > longest_step) {
         longest_step = duration;
       }
     }
 
     void ResetCurrentCycle() {
+      duration = 0;
       longest_step = 0;
       steps = 0;
     }
 
-    double cumulative_duration;
+    double duration;
     double longest_step;
     int steps;
   };
@@ -192,7 +193,8 @@
       START = 3
     };
 
-    Event(Type type, const char* gc_reason, const char* collector_reason);
+    Event(Type type, GarbageCollectionReason gc_reason,
+          const char* collector_reason);
 
     // Returns a string describing the event type.
     const char* TypeName(bool short_name) const;
@@ -200,7 +202,7 @@
     // Type of event
     Type type;
 
-    const char* gc_reason;
+    GarbageCollectionReason gc_reason;
     const char* collector_reason;
 
     // Timestamp set in the constructor.
@@ -219,10 +221,10 @@
     intptr_t end_object_size;
 
     // Size of memory allocated from OS set in constructor.
-    intptr_t start_memory_size;
+    size_t start_memory_size;
 
     // Size of memory allocated from OS set in destructor.
-    intptr_t end_memory_size;
+    size_t end_memory_size;
 
     // Total amount of space either wasted or contained in one of free lists
     // before the current GC.
@@ -241,21 +243,11 @@
     // Bytes marked since creation of tracer (value at start of event).
     intptr_t cumulative_incremental_marking_bytes;
 
-    // Bytes marked since
-    // - last event for SCAVENGER events
-    // - last INCREMENTAL_MARK_COMPACTOR event for INCREMENTAL_MARK_COMPACTOR
-    // events
+    // Bytes marked incrementally for INCREMENTAL_MARK_COMPACTOR
     intptr_t incremental_marking_bytes;
 
-    // Cumulative pure duration of incremental marking steps since creation of
-    // tracer. (value at start of event)
-    double cumulative_pure_incremental_marking_duration;
-
-    // Duration of pure incremental marking steps since
-    // - last event for SCAVENGER events
-    // - last INCREMENTAL_MARK_COMPACTOR event for INCREMENTAL_MARK_COMPACTOR
-    // events
-    double pure_incremental_marking_duration;
+    // Duration of incremental marking steps for INCREMENTAL_MARK_COMPACTOR.
+    double incremental_marking_duration;
 
     // Amounts of time spent in different scopes during GC.
     double scopes[Scope::NUMBER_OF_SCOPES];
@@ -270,7 +262,7 @@
   explicit GCTracer(Heap* heap);
 
   // Start collecting data.
-  void Start(GarbageCollector collector, const char* gc_reason,
+  void Start(GarbageCollector collector, GarbageCollectionReason gc_reason,
              const char* collector_reason);
 
   // Stop collecting data and print results.
@@ -292,26 +284,6 @@
   // Log an incremental marking step.
   void AddIncrementalMarkingStep(double duration, intptr_t bytes);
 
-  // Log time spent in marking.
-  void AddMarkingTime(double duration) {
-    cumulative_marking_duration_ += duration;
-  }
-
-  // Time spent in marking.
-  double cumulative_marking_duration() const {
-    return cumulative_marking_duration_;
-  }
-
-  // Log time spent in sweeping on main thread.
-  void AddSweepingTime(double duration) {
-    cumulative_sweeping_duration_ += duration;
-  }
-
-  // Time spent in sweeping on main thread.
-  double cumulative_sweeping_duration() const {
-    return cumulative_sweeping_duration_;
-  }
-
   // Compute the average incremental marking speed in bytes/millisecond.
   // Returns 0 if no events have been recorded.
   double IncrementalMarkingSpeedInBytesPerMillisecond() const;
@@ -381,11 +353,14 @@
   // Discard all recorded survival events.
   void ResetSurvivalEvents();
 
+  void NotifyIncrementalMarkingStart();
+
   V8_INLINE void AddScopeSample(Scope::ScopeId scope, double duration) {
     DCHECK(scope < Scope::NUMBER_OF_SCOPES);
     if (scope >= Scope::FIRST_INCREMENTAL_SCOPE &&
         scope <= Scope::LAST_INCREMENTAL_SCOPE) {
-      incremental_marking_scopes_[scope].Update(duration);
+      incremental_marking_scopes_[scope - Scope::FIRST_INCREMENTAL_SCOPE]
+          .Update(duration);
     } else {
       current_.scopes[scope] += duration;
     }
@@ -400,6 +375,7 @@
   FRIEND_TEST(GCTracerTest, RegularScope);
   FRIEND_TEST(GCTracerTest, IncrementalMarkingDetails);
   FRIEND_TEST(GCTracerTest, IncrementalScope);
+  FRIEND_TEST(GCTracerTest, IncrementalMarkingSpeed);
 
   // Returns the average speed of the events in the buffer.
   // If the buffer is empty, the result is 0.
@@ -408,9 +384,9 @@
   static double AverageSpeed(const RingBuffer<BytesAndDuration>& buffer,
                              const BytesAndDuration& initial, double time_ms);
 
-  void MergeBaseline(const Event& baseline);
-
   void ResetForTesting();
+  void ResetIncrementalMarkingCounters();
+  void RecordIncrementalMarkingSpeed(intptr_t bytes, double duration);
 
   // Print one detailed trace line in name=value format.
   // TODO(ernstm): Move to Heap.
@@ -444,37 +420,23 @@
   // Previous tracer event.
   Event previous_;
 
-  // Previous INCREMENTAL_MARK_COMPACTOR event.
-  Event previous_incremental_mark_compactor_event_;
+  // Size of incremental marking steps (in bytes) accumulated since the end of
+  // the last mark compact GC.
+  intptr_t incremental_marking_bytes_;
 
-  // Cumulative size of incremental marking steps (in bytes) since creation of
-  // tracer.
-  intptr_t cumulative_incremental_marking_bytes_;
+  // Duration of incremental marking steps since the end of the last mark-
+  // compact event.
+  double incremental_marking_duration_;
 
-  // Cumulative duration of incremental marking steps since creation of tracer.
-  double cumulative_incremental_marking_duration_;
+  double incremental_marking_start_time_;
 
-  // Cumulative duration of pure incremental marking steps since creation of
-  // tracer.
-  double cumulative_pure_incremental_marking_duration_;
-
-  // Total marking time.
-  // This timer is precise when run with --print-cumulative-gc-stat
-  double cumulative_marking_duration_;
+  double recorded_incremental_marking_speed_;
 
   // Incremental scopes carry more information than just the duration. The infos
   // here are merged back upon starting/stopping the GC tracer.
   IncrementalMarkingInfos
       incremental_marking_scopes_[Scope::NUMBER_OF_INCREMENTAL_SCOPES];
 
-  // Total sweeping time on the main thread.
-  // This timer is precise when run with --print-cumulative-gc-stat
-  // TODO(hpayer): Account for sweeping time on sweeper threads. Add a
-  // different field for that.
-  // TODO(hpayer): This timer right now just holds the sweeping time
-  // of the initial atomic sweeping pause. Make sure that it accumulates
-  // all sweeping operations performed on the main thread.
-  double cumulative_sweeping_duration_;
 
   // Timestamp and allocation counter at the last sampled allocation event.
   double allocation_time_ms_;
@@ -494,12 +456,11 @@
   // Separate timer used for --runtime_call_stats
   RuntimeCallTimer timer_;
 
-  RingBuffer<BytesAndDuration> recorded_incremental_marking_steps_;
   RingBuffer<BytesAndDuration> recorded_scavenges_total_;
   RingBuffer<BytesAndDuration> recorded_scavenges_survived_;
   RingBuffer<BytesAndDuration> recorded_compactions_;
-  RingBuffer<BytesAndDuration> recorded_mark_compacts_;
   RingBuffer<BytesAndDuration> recorded_incremental_mark_compacts_;
+  RingBuffer<BytesAndDuration> recorded_mark_compacts_;
   RingBuffer<BytesAndDuration> recorded_new_generation_allocations_;
   RingBuffer<BytesAndDuration> recorded_old_generation_allocations_;
   RingBuffer<double> recorded_context_disposal_times_;
diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h
index 21f465f..23e1712 100644
--- a/src/heap/heap-inl.h
+++ b/src/heap/heap-inl.h
@@ -8,7 +8,7 @@
 #include <cmath>
 
 #include "src/base/platform/platform.h"
-#include "src/counters.h"
+#include "src/counters-inl.h"
 #include "src/heap/heap.h"
 #include "src/heap/incremental-marking-inl.h"
 #include "src/heap/mark-compact.h"
@@ -25,6 +25,16 @@
 namespace v8 {
 namespace internal {
 
+AllocationSpace AllocationResult::RetrySpace() {
+  DCHECK(IsRetry());
+  return static_cast<AllocationSpace>(Smi::cast(object_)->value());
+}
+
+HeapObject* AllocationResult::ToObjectChecked() {
+  CHECK(!IsRetry());
+  return HeapObject::cast(object_);
+}
+
 void PromotionQueue::insert(HeapObject* target, int32_t size,
                             bool was_marked_black) {
   if (emergency_stack_ != NULL) {
@@ -50,6 +60,62 @@
 #endif
 }
 
+void PromotionQueue::remove(HeapObject** target, int32_t* size,
+                            bool* was_marked_black) {
+  DCHECK(!is_empty());
+  if (front_ == rear_) {
+    Entry e = emergency_stack_->RemoveLast();
+    *target = e.obj_;
+    *size = e.size_;
+    *was_marked_black = e.was_marked_black_;
+    return;
+  }
+
+  struct Entry* entry = reinterpret_cast<struct Entry*>(--front_);
+  *target = entry->obj_;
+  *size = entry->size_;
+  *was_marked_black = entry->was_marked_black_;
+
+  // Assert no underflow.
+  SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
+                              reinterpret_cast<Address>(front_));
+}
+
+Page* PromotionQueue::GetHeadPage() {
+  return Page::FromAllocationAreaAddress(reinterpret_cast<Address>(rear_));
+}
+
+void PromotionQueue::SetNewLimit(Address limit) {
+  // If we are already using an emergency stack, we can ignore it.
+  if (emergency_stack_) return;
+
+  // If the limit is not on the same page, we can ignore it.
+  if (Page::FromAllocationAreaAddress(limit) != GetHeadPage()) return;
+
+  limit_ = reinterpret_cast<struct Entry*>(limit);
+
+  if (limit_ <= rear_) {
+    return;
+  }
+
+  RelocateQueueHead();
+}
+
+bool PromotionQueue::IsBelowPromotionQueue(Address to_space_top) {
+  // If an emergency stack is used, the to-space address cannot interfere
+  // with the promotion queue.
+  if (emergency_stack_) return true;
+
+  // If the given to-space top pointer and the head of the promotion queue
+  // are not on the same page, then the to-space objects are below the
+  // promotion queue.
+  if (GetHeadPage() != Page::FromAddress(to_space_top)) {
+    return true;
+  }
+  // If the to space top pointer is smaller or equal than the promotion
+  // queue head, then the to-space objects are below the promotion queue.
+  return reinterpret_cast<struct Entry*>(to_space_top) <= rear_;
+}
 
 #define ROOT_ACCESSOR(type, name, camel_name) \
   type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
@@ -89,6 +155,37 @@
 ROOT_LIST(ROOT_ACCESSOR)
 #undef ROOT_ACCESSOR
 
+PagedSpace* Heap::paged_space(int idx) {
+  DCHECK_NE(idx, LO_SPACE);
+  DCHECK_NE(idx, NEW_SPACE);
+  return static_cast<PagedSpace*>(space_[idx]);
+}
+
+Space* Heap::space(int idx) { return space_[idx]; }
+
+Address* Heap::NewSpaceAllocationTopAddress() {
+  return new_space_->allocation_top_address();
+}
+
+Address* Heap::NewSpaceAllocationLimitAddress() {
+  return new_space_->allocation_limit_address();
+}
+
+Address* Heap::OldSpaceAllocationTopAddress() {
+  return old_space_->allocation_top_address();
+}
+
+Address* Heap::OldSpaceAllocationLimitAddress() {
+  return old_space_->allocation_limit_address();
+}
+
+void Heap::UpdateNewSpaceAllocationCounter() {
+  new_space_allocation_counter_ = NewSpaceAllocationCounter();
+}
+
+size_t Heap::NewSpaceAllocationCounter() {
+  return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
+}
 
 template <>
 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
@@ -209,14 +306,14 @@
   isolate_->counters()->objs_since_last_young()->Increment();
 #endif
 
-  bool large_object = size_in_bytes > Page::kMaxRegularHeapObjectSize;
+  bool large_object = size_in_bytes > kMaxRegularHeapObjectSize;
   HeapObject* object = nullptr;
   AllocationResult allocation;
   if (NEW_SPACE == space) {
     if (large_object) {
       space = LO_SPACE;
     } else {
-      allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
+      allocation = new_space_->AllocateRaw(size_in_bytes, alignment);
       if (allocation.To(&object)) {
         OnAllocationEvent(object, size_in_bytes);
       }
@@ -248,8 +345,6 @@
   }
   if (allocation.To(&object)) {
     OnAllocationEvent(object, size_in_bytes);
-  } else {
-    old_gen_exhausted_ = true;
   }
 
   return allocation;
@@ -355,9 +450,17 @@
   }
 }
 
+Address Heap::NewSpaceTop() { return new_space_->top(); }
+
+bool Heap::DeoptMaybeTenuredAllocationSites() {
+  return new_space_->IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
+}
 
 bool Heap::InNewSpace(Object* object) {
-  bool result = new_space_.Contains(object);
+  // Inlined check from NewSpace::Contains.
+  bool result =
+      object->IsHeapObject() &&
+      Page::FromAddress(HeapObject::cast(object)->address())->InNewSpace();
   DCHECK(!result ||                 // Either not in new space
          gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
          InToSpace(object));        // ... or in to-space (where we allocate).
@@ -365,35 +468,32 @@
 }
 
 bool Heap::InFromSpace(Object* object) {
-  return new_space_.FromSpaceContains(object);
+  return object->IsHeapObject() &&
+         MemoryChunk::FromAddress(HeapObject::cast(object)->address())
+             ->IsFlagSet(Page::IN_FROM_SPACE);
 }
 
 
 bool Heap::InToSpace(Object* object) {
-  return new_space_.ToSpaceContains(object);
+  return object->IsHeapObject() &&
+         MemoryChunk::FromAddress(HeapObject::cast(object)->address())
+             ->IsFlagSet(Page::IN_TO_SPACE);
 }
 
 bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
 
 bool Heap::InNewSpaceSlow(Address address) {
-  return new_space_.ContainsSlow(address);
+  return new_space_->ContainsSlow(address);
 }
 
 bool Heap::InOldSpaceSlow(Address address) {
   return old_space_->ContainsSlow(address);
 }
 
-bool Heap::OldGenerationAllocationLimitReached() {
-  if (!incremental_marking()->IsStopped() && !ShouldOptimizeForMemoryUsage()) {
-    return false;
-  }
-  return OldGenerationSpaceAvailable() < 0;
-}
-
 template <PromotionMode promotion_mode>
 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
   Page* page = Page::FromAddress(old_address);
-  Address age_mark = new_space_.age_mark();
+  Address age_mark = new_space_->age_mark();
 
   if (promotion_mode == PROMOTE_MARKED) {
     MarkBit mark_bit = ObjectMarking::MarkBitFrom(old_address);
@@ -587,8 +687,8 @@
       site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
 }
 
-
-bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
+bool Heap::CollectGarbage(AllocationSpace space,
+                          GarbageCollectionReason gc_reason,
                           const v8::GCCallbackFlags callbackFlags) {
   const char* collector_reason = NULL;
   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
@@ -659,35 +759,6 @@
 #endif
 }
 
-// static
-int DescriptorLookupCache::Hash(Object* source, Name* name) {
-  DCHECK(name->IsUniqueName());
-  // Uses only lower 32 bits if pointers are larger.
-  uint32_t source_hash =
-      static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
-      kPointerSizeLog2;
-  uint32_t name_hash = name->hash_field();
-  return (source_hash ^ name_hash) % kLength;
-}
-
-int DescriptorLookupCache::Lookup(Map* source, Name* name) {
-  int index = Hash(source, name);
-  Key& key = keys_[index];
-  if ((key.source == source) && (key.name == name)) return results_[index];
-  return kAbsent;
-}
-
-
-void DescriptorLookupCache::Update(Map* source, Name* name, int result) {
-  DCHECK(result != kAbsent);
-  int index = Hash(source, name);
-  Key& key = keys_[index];
-  key.source = source;
-  key.name = name;
-  results_[index] = result;
-}
-
-
 void Heap::ClearInstanceofCache() {
   set_instanceof_cache_function(Smi::FromInt(0));
 }
diff --git a/src/heap/heap.cc b/src/heap/heap.cc
index 7eb5af3..54b8589 100644
--- a/src/heap/heap.cc
+++ b/src/heap/heap.cc
@@ -71,14 +71,14 @@
 
 Heap::Heap()
     : external_memory_(0),
-      external_memory_limit_(kExternalAllocationLimit),
+      external_memory_limit_(kExternalAllocationSoftLimit),
       external_memory_at_last_mark_compact_(0),
       isolate_(nullptr),
       code_range_size_(0),
       // semispace_size_ should be a power of 2 and old_generation_size_ should
       // be a multiple of Page::kPageSize.
       max_semi_space_size_(8 * (kPointerSize / 4) * MB),
-      initial_semispace_size_(Page::kPageSize),
+      initial_semispace_size_(MB),
       max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
       initial_old_generation_size_(max_old_generation_size_ /
                                    kInitalOldGenerationLimitFactor),
@@ -96,7 +96,7 @@
       contexts_disposed_(0),
       number_of_disposed_maps_(0),
       global_ic_age_(0),
-      new_space_(this),
+      new_space_(nullptr),
       old_space_(NULL),
       code_space_(NULL),
       map_space_(NULL),
@@ -112,11 +112,9 @@
       allocation_timeout_(0),
 #endif  // DEBUG
       old_generation_allocation_limit_(initial_old_generation_size_),
-      old_gen_exhausted_(false),
       inline_allocation_disabled_(false),
       total_regexp_code_generated_(0),
       tracer_(nullptr),
-      high_survival_rate_period_length_(0),
       promoted_objects_size_(0),
       promotion_ratio_(0),
       semi_space_copied_object_size_(0),
@@ -126,12 +124,6 @@
       nodes_copied_in_new_space_(0),
       nodes_promoted_(0),
       maximum_size_scavenges_(0),
-      max_gc_pause_(0.0),
-      total_gc_time_ms_(0.0),
-      max_alive_after_gc_(0),
-      min_in_mutator_(kMaxInt),
-      marking_time_(0.0),
-      sweeping_time_(0.0),
       last_idle_notification_time_(0.0),
       last_gc_time_(0.0),
       scavenge_collector_(nullptr),
@@ -148,7 +140,7 @@
       full_codegen_bytes_generated_(0),
       crankshaft_codegen_bytes_generated_(0),
       new_space_allocation_counter_(0),
-      old_generation_allocation_counter_(0),
+      old_generation_allocation_counter_at_last_gc_(0),
       old_generation_size_at_last_gc_(0),
       gcs_since_last_deopt_(0),
       global_pretenuring_feedback_(nullptr),
@@ -163,6 +155,8 @@
       deserialization_complete_(false),
       strong_roots_list_(NULL),
       heap_iterator_depth_(0),
+      embedder_heap_tracer_(nullptr),
+      embedder_reference_reporter_(new TracePossibleWrapperReporter(this)),
       force_oom_(false) {
 // Allow build-time customization of the max semispace size. Building
 // V8 with snapshots and a non-default max semispace size is much
@@ -189,7 +183,7 @@
 intptr_t Heap::Capacity() {
   if (!HasBeenSetUp()) return 0;
 
-  return new_space_.Capacity() + OldGenerationCapacity();
+  return new_space_->Capacity() + OldGenerationCapacity();
 }
 
 intptr_t Heap::OldGenerationCapacity() {
@@ -199,44 +193,41 @@
          map_space_->Capacity() + lo_space_->SizeOfObjects();
 }
 
-
-intptr_t Heap::CommittedOldGenerationMemory() {
+size_t Heap::CommittedOldGenerationMemory() {
   if (!HasBeenSetUp()) return 0;
 
   return old_space_->CommittedMemory() + code_space_->CommittedMemory() +
          map_space_->CommittedMemory() + lo_space_->Size();
 }
 
-
-intptr_t Heap::CommittedMemory() {
+size_t Heap::CommittedMemory() {
   if (!HasBeenSetUp()) return 0;
 
-  return new_space_.CommittedMemory() + CommittedOldGenerationMemory();
+  return new_space_->CommittedMemory() + CommittedOldGenerationMemory();
 }
 
 
 size_t Heap::CommittedPhysicalMemory() {
   if (!HasBeenSetUp()) return 0;
 
-  return new_space_.CommittedPhysicalMemory() +
+  return new_space_->CommittedPhysicalMemory() +
          old_space_->CommittedPhysicalMemory() +
          code_space_->CommittedPhysicalMemory() +
          map_space_->CommittedPhysicalMemory() +
          lo_space_->CommittedPhysicalMemory();
 }
 
-
-intptr_t Heap::CommittedMemoryExecutable() {
+size_t Heap::CommittedMemoryExecutable() {
   if (!HasBeenSetUp()) return 0;
 
-  return memory_allocator()->SizeExecutable();
+  return static_cast<size_t>(memory_allocator()->SizeExecutable());
 }
 
 
 void Heap::UpdateMaximumCommitted() {
   if (!HasBeenSetUp()) return;
 
-  intptr_t current_committed_memory = CommittedMemory();
+  const size_t current_committed_memory = CommittedMemory();
   if (current_committed_memory > maximum_committed_) {
     maximum_committed_ = current_committed_memory;
   }
@@ -275,22 +266,6 @@
     return MARK_COMPACTOR;
   }
 
-  // Is enough data promoted to justify a global GC?
-  if (OldGenerationAllocationLimitReached()) {
-    isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
-    *reason = "promotion limit reached";
-    return MARK_COMPACTOR;
-  }
-
-  // Have allocation in OLD and LO failed?
-  if (old_gen_exhausted_) {
-    isolate_->counters()
-        ->gc_compactor_caused_by_oldspace_exhaustion()
-        ->Increment();
-    *reason = "old generations exhausted";
-    return MARK_COMPACTOR;
-  }
-
   // Is there enough space left in OLD to guarantee that a scavenge can
   // succeed?
   //
@@ -300,7 +275,8 @@
   // and does not count available bytes already in the old space or code
   // space.  Undercounting is safe---we may get an unrequested full GC when
   // a scavenge would have succeeded.
-  if (memory_allocator()->MaxAvailable() <= new_space_.Size()) {
+  if (static_cast<intptr_t>(memory_allocator()->MaxAvailable()) <=
+      new_space_->Size()) {
     isolate_->counters()
         ->gc_compactor_caused_by_oldspace_exhaustion()
         ->Increment();
@@ -321,18 +297,18 @@
 // compiled --log-gc is set.  The following logic is used to avoid
 // double logging.
 #ifdef DEBUG
-  if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
+  if (FLAG_heap_stats || FLAG_log_gc) new_space_->CollectStatistics();
   if (FLAG_heap_stats) {
     ReportHeapStatistics("Before GC");
   } else if (FLAG_log_gc) {
-    new_space_.ReportStatistics();
+    new_space_->ReportStatistics();
   }
-  if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
+  if (FLAG_heap_stats || FLAG_log_gc) new_space_->ClearHistograms();
 #else
   if (FLAG_log_gc) {
-    new_space_.CollectStatistics();
-    new_space_.ReportStatistics();
-    new_space_.ClearHistograms();
+    new_space_->CollectStatistics();
+    new_space_->ReportStatistics();
+    new_space_->ClearHistograms();
   }
 #endif  // DEBUG
 }
@@ -340,50 +316,51 @@
 
 void Heap::PrintShortHeapStatistics() {
   if (!FLAG_trace_gc_verbose) return;
-  PrintIsolate(isolate_, "Memory allocator,   used: %6" V8PRIdPTR
-                         " KB, available: %6" V8PRIdPTR " KB\n",
+  PrintIsolate(isolate_,
+               "Memory allocator,   used: %6zu KB,"
+               " available: %6zu KB\n",
                memory_allocator()->Size() / KB,
                memory_allocator()->Available() / KB);
   PrintIsolate(isolate_, "New space,          used: %6" V8PRIdPTR
                          " KB"
                          ", available: %6" V8PRIdPTR
                          " KB"
-                         ", committed: %6" V8PRIdPTR " KB\n",
-               new_space_.Size() / KB, new_space_.Available() / KB,
-               new_space_.CommittedMemory() / KB);
+                         ", committed: %6zu KB\n",
+               new_space_->Size() / KB, new_space_->Available() / KB,
+               new_space_->CommittedMemory() / KB);
   PrintIsolate(isolate_, "Old space,          used: %6" V8PRIdPTR
                          " KB"
                          ", available: %6" V8PRIdPTR
                          " KB"
-                         ", committed: %6" V8PRIdPTR " KB\n",
+                         ", committed: %6zu KB\n",
                old_space_->SizeOfObjects() / KB, old_space_->Available() / KB,
                old_space_->CommittedMemory() / KB);
   PrintIsolate(isolate_, "Code space,         used: %6" V8PRIdPTR
                          " KB"
                          ", available: %6" V8PRIdPTR
                          " KB"
-                         ", committed: %6" V8PRIdPTR " KB\n",
+                         ", committed: %6zu KB\n",
                code_space_->SizeOfObjects() / KB, code_space_->Available() / KB,
                code_space_->CommittedMemory() / KB);
   PrintIsolate(isolate_, "Map space,          used: %6" V8PRIdPTR
                          " KB"
                          ", available: %6" V8PRIdPTR
                          " KB"
-                         ", committed: %6" V8PRIdPTR " KB\n",
+                         ", committed: %6zu KB\n",
                map_space_->SizeOfObjects() / KB, map_space_->Available() / KB,
                map_space_->CommittedMemory() / KB);
   PrintIsolate(isolate_, "Large object space, used: %6" V8PRIdPTR
                          " KB"
                          ", available: %6" V8PRIdPTR
                          " KB"
-                         ", committed: %6" V8PRIdPTR " KB\n",
+                         ", committed: %6zu KB\n",
                lo_space_->SizeOfObjects() / KB, lo_space_->Available() / KB,
                lo_space_->CommittedMemory() / KB);
   PrintIsolate(isolate_, "All spaces,         used: %6" V8PRIdPTR
                          " KB"
                          ", available: %6" V8PRIdPTR
                          " KB"
-                         ", committed: %6" V8PRIdPTR " KB\n",
+                         ", committed: %6zu KB\n",
                this->SizeOfObjects() / KB, this->Available() / KB,
                this->CommittedMemory() / KB);
   PrintIsolate(isolate_, "External memory reported: %6" V8PRIdPTR " KB\n",
@@ -399,13 +376,13 @@
 // NewSpace statistics are logged exactly once when --log-gc is turned on.
 #if defined(DEBUG)
   if (FLAG_heap_stats) {
-    new_space_.CollectStatistics();
+    new_space_->CollectStatistics();
     ReportHeapStatistics("After GC");
   } else if (FLAG_log_gc) {
-    new_space_.ReportStatistics();
+    new_space_->ReportStatistics();
   }
 #else
-  if (FLAG_log_gc) new_space_.ReportStatistics();
+  if (FLAG_log_gc) new_space_->ReportStatistics();
 #endif  // DEBUG
   for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
        ++i) {
@@ -423,6 +400,7 @@
   deferred_counters_[feature]++;
 }
 
+bool Heap::UncommitFromSpace() { return new_space_->UncommitFromSpace(); }
 
 void Heap::GarbageCollectionPrologue() {
   {
@@ -454,7 +432,7 @@
   ReportStatisticsBeforeGC();
 #endif  // DEBUG
 
-  if (new_space_.IsAtMaximumCapacity()) {
+  if (new_space_->IsAtMaximumCapacity()) {
     maximum_size_scavenges_++;
   } else {
     maximum_size_scavenges_ = 0;
@@ -534,8 +512,8 @@
 class Heap::PretenuringScope {
  public:
   explicit PretenuringScope(Heap* heap) : heap_(heap) {
-    heap_->global_pretenuring_feedback_ = new base::HashMap(
-        base::HashMap::PointersMatch, kInitialFeedbackCapacity);
+    heap_->global_pretenuring_feedback_ =
+        new base::HashMap(kInitialFeedbackCapacity);
   }
 
   ~PretenuringScope() {
@@ -789,14 +767,16 @@
   } else if (incremental_marking()->request_type() ==
              IncrementalMarking::COMPLETE_MARKING) {
     incremental_marking()->reset_request_type();
-    CollectAllGarbage(current_gc_flags_, "GC interrupt",
+    CollectAllGarbage(current_gc_flags_,
+                      GarbageCollectionReason::kFinalizeMarkingViaStackGuard,
                       current_gc_callback_flags_);
   } else if (incremental_marking()->request_type() ==
                  IncrementalMarking::FINALIZATION &&
              incremental_marking()->IsMarking() &&
              !incremental_marking()->finalize_marking_completed()) {
     incremental_marking()->reset_request_type();
-    FinalizeIncrementalMarking("GC interrupt: finalize incremental marking");
+    FinalizeIncrementalMarking(
+        GarbageCollectionReason::kFinalizeMarkingViaStackGuard);
   }
 }
 
@@ -805,10 +785,11 @@
   scavenge_job_->ScheduleIdleTaskIfNeeded(this, bytes_allocated);
 }
 
-
-void Heap::FinalizeIncrementalMarking(const char* gc_reason) {
+void Heap::FinalizeIncrementalMarking(GarbageCollectionReason gc_reason) {
   if (FLAG_trace_incremental_marking) {
-    PrintF("[IncrementalMarking] (%s).\n", gc_reason);
+    isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] (%s).\n",
+        Heap::GarbageCollectionReasonToString(gc_reason));
   }
 
   HistogramTimerScope incremental_marking_scope(
@@ -856,7 +837,7 @@
   }
 }
 
-void Heap::CollectAllGarbage(int flags, const char* gc_reason,
+void Heap::CollectAllGarbage(int flags, GarbageCollectionReason gc_reason,
                              const v8::GCCallbackFlags gc_callback_flags) {
   // Since we are ignoring the return value, the exact choice of space does
   // not matter, so long as we do not specify NEW_SPACE, which would not
@@ -866,8 +847,7 @@
   set_current_gc_flags(kNoGCFlags);
 }
 
-
-void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
+void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
   // Since we are ignoring the return value, the exact choice of space does
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
@@ -897,36 +877,46 @@
     }
   }
   set_current_gc_flags(kNoGCFlags);
-  new_space_.Shrink();
+  new_space_->Shrink();
   UncommitFromSpace();
 }
 
-
-void Heap::ReportExternalMemoryPressure(const char* gc_reason) {
+void Heap::ReportExternalMemoryPressure() {
+  if (external_memory_ >
+      (external_memory_at_last_mark_compact_ + external_memory_hard_limit())) {
+    CollectAllGarbage(
+        kReduceMemoryFootprintMask | kFinalizeIncrementalMarkingMask,
+        GarbageCollectionReason::kExternalMemoryPressure,
+        static_cast<GCCallbackFlags>(kGCCallbackFlagCollectAllAvailableGarbage |
+                                     kGCCallbackFlagCollectAllExternalMemory));
+    return;
+  }
   if (incremental_marking()->IsStopped()) {
     if (incremental_marking()->CanBeActivated()) {
       StartIncrementalMarking(
-          i::Heap::kNoGCFlags,
+          i::Heap::kNoGCFlags, GarbageCollectionReason::kExternalMemoryPressure,
           static_cast<GCCallbackFlags>(
               kGCCallbackFlagSynchronousPhantomCallbackProcessing |
-              kGCCallbackFlagCollectAllExternalMemory),
-          gc_reason);
+              kGCCallbackFlagCollectAllExternalMemory));
     } else {
-      CollectAllGarbage(i::Heap::kNoGCFlags, gc_reason,
+      CollectAllGarbage(i::Heap::kNoGCFlags,
+                        GarbageCollectionReason::kExternalMemoryPressure,
                         kGCCallbackFlagSynchronousPhantomCallbackProcessing);
     }
   } else {
     // Incremental marking is turned on an has already been started.
-
-    // TODO(mlippautz): Compute the time slice for incremental marking based on
-    // memory pressure.
-    double deadline = MonotonicallyIncreasingTimeInMs() +
-                      FLAG_external_allocation_limit_incremental_time;
+    const double pressure =
+        static_cast<double>(external_memory_ -
+                            external_memory_at_last_mark_compact_ -
+                            kExternalAllocationSoftLimit) /
+        external_memory_hard_limit();
+    DCHECK_GE(1, pressure);
+    const double kMaxStepSizeOnExternalLimit = 25;
+    const double deadline = MonotonicallyIncreasingTimeInMs() +
+                            pressure * kMaxStepSizeOnExternalLimit;
     incremental_marking()->AdvanceIncrementalMarking(
-        deadline,
-        IncrementalMarking::StepActions(IncrementalMarking::GC_VIA_STACK_GUARD,
-                                        IncrementalMarking::FORCE_MARKING,
-                                        IncrementalMarking::FORCE_COMPLETION));
+        deadline, IncrementalMarking::GC_VIA_STACK_GUARD,
+        IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8);
   }
 }
 
@@ -936,7 +926,7 @@
   // evacuation of a non-full new space (or if we are on the last page) there
   // may be uninitialized memory behind top. We fill the remainder of the page
   // with a filler.
-  Address to_top = new_space_.top();
+  Address to_top = new_space_->top();
   Page* page = Page::FromAddress(to_top - kPointerSize);
   if (page->Contains(to_top)) {
     int remaining_in_page = static_cast<int>(page->area_end() - to_top);
@@ -944,8 +934,8 @@
   }
 }
 
-
-bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
+bool Heap::CollectGarbage(GarbageCollector collector,
+                          GarbageCollectionReason gc_reason,
                           const char* collector_reason,
                           const v8::GCCallbackFlags gc_callback_flags) {
   // The VM is in the GC state until exiting this function.
@@ -964,19 +954,21 @@
 
   if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
     if (FLAG_trace_incremental_marking) {
-      PrintF("[IncrementalMarking] Scavenge during marking.\n");
+      isolate()->PrintWithTimestamp(
+          "[IncrementalMarking] Scavenge during marking.\n");
     }
   }
 
   if (collector == MARK_COMPACTOR && !ShouldFinalizeIncrementalMarking() &&
       !ShouldAbortIncrementalMarking() && !incremental_marking()->IsStopped() &&
       !incremental_marking()->should_hurry() && FLAG_incremental_marking &&
-      OldGenerationAllocationLimitReached()) {
+      OldGenerationSpaceAvailable() <= 0) {
     if (!incremental_marking()->IsComplete() &&
-        !mark_compact_collector()->marking_deque_.IsEmpty() &&
+        !mark_compact_collector()->marking_deque()->IsEmpty() &&
         !FLAG_gc_global) {
       if (FLAG_trace_incremental_marking) {
-        PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
+        isolate()->PrintWithTimestamp(
+            "[IncrementalMarking] Delaying MarkSweep.\n");
       }
       collector = SCAVENGER;
       collector_reason = "incremental marking delaying mark-sweep";
@@ -1041,9 +1033,11 @@
 
   // Start incremental marking for the next cycle. The heap snapshot
   // generator needs incremental marking to stay off after it aborted.
-  if (!ShouldAbortIncrementalMarking() && incremental_marking()->IsStopped() &&
-      incremental_marking()->ShouldActivateEvenWithoutIdleNotification()) {
-    StartIncrementalMarking(kNoGCFlags, kNoGCCallbackFlags, "GC epilogue");
+  // We do this only for scavenger to avoid a loop where mark-compact
+  // causes another mark-compact.
+  if (collector == SCAVENGER && !ShouldAbortIncrementalMarking()) {
+    StartIncrementalMarkingIfAllocationLimitIsReached(kNoGCFlags,
+                                                      kNoGCCallbackFlags);
   }
 
   return next_gc_likely_to_collect_more;
@@ -1069,21 +1063,33 @@
   return ++contexts_disposed_;
 }
 
-
 void Heap::StartIncrementalMarking(int gc_flags,
-                                   const GCCallbackFlags gc_callback_flags,
-                                   const char* reason) {
+                                   GarbageCollectionReason gc_reason,
+                                   GCCallbackFlags gc_callback_flags) {
   DCHECK(incremental_marking()->IsStopped());
   set_current_gc_flags(gc_flags);
   current_gc_callback_flags_ = gc_callback_flags;
-  incremental_marking()->Start(reason);
+  incremental_marking()->Start(gc_reason);
 }
 
+void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
+    int gc_flags, const GCCallbackFlags gc_callback_flags) {
+  if (incremental_marking()->IsStopped()) {
+    IncrementalMarkingLimit reached_limit = IncrementalMarkingLimitReached();
+    if (reached_limit == IncrementalMarkingLimit::kSoftLimit) {
+      incremental_marking()->incremental_marking_job()->ScheduleTask(this);
+    } else if (reached_limit == IncrementalMarkingLimit::kHardLimit) {
+      StartIncrementalMarking(gc_flags,
+                              GarbageCollectionReason::kAllocationLimit,
+                              gc_callback_flags);
+    }
+  }
+}
 
-void Heap::StartIdleIncrementalMarking() {
+void Heap::StartIdleIncrementalMarking(GarbageCollectionReason gc_reason) {
   gc_idle_time_handler_->ResetNoProgressCounter();
-  StartIncrementalMarking(kReduceMemoryFootprintMask, kNoGCCallbackFlags,
-                          "idle");
+  StartIncrementalMarking(kReduceMemoryFootprintMask, gc_reason,
+                          kNoGCCallbackFlags);
 }
 
 
@@ -1192,17 +1198,15 @@
       }
       if (perform_gc) {
         if (space == NEW_SPACE) {
-          CollectGarbage(NEW_SPACE, "failed to reserve space in the new space");
+          CollectGarbage(NEW_SPACE, GarbageCollectionReason::kDeserializer);
         } else {
           if (counter > 1) {
             CollectAllGarbage(
                 kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
-                "failed to reserve space in paged or large "
-                "object space, trying to reduce memory footprint");
+                GarbageCollectionReason::kDeserializer);
           } else {
-            CollectAllGarbage(
-                kAbortIncrementalMarkingMask,
-                "failed to reserve space in paged or large object space");
+            CollectAllGarbage(kAbortIncrementalMarkingMask,
+                              GarbageCollectionReason::kDeserializer);
           }
         }
         gc_performed = true;
@@ -1216,7 +1220,7 @@
 
 
 void Heap::EnsureFromSpaceIsCommitted() {
-  if (new_space_.CommitFromSpaceIfNeeded()) return;
+  if (new_space_->CommitFromSpaceIfNeeded()) return;
 
   // Committing memory to from space failed.
   // Memory is exhausted and we will die.
@@ -1264,11 +1268,6 @@
 
   double survival_rate = promotion_ratio_ + semi_space_copied_rate_;
   tracer()->AddSurvivalRatio(survival_rate);
-  if (survival_rate > kYoungSurvivalRateHighThreshold) {
-    high_survival_rate_period_length_++;
-  } else {
-    high_survival_rate_period_length_ = 0;
-  }
 }
 
 bool Heap::PerformGarbageCollection(
@@ -1303,14 +1302,7 @@
 
   EnsureFromSpaceIsCommitted();
 
-  int start_new_space_size = Heap::new_space()->SizeAsInt();
-
-  if (IsHighSurvivalRate()) {
-    // We speed up the incremental marker if it is running so that it
-    // does not fall behind the rate of promotion, which would cause a
-    // constantly growing old space.
-    incremental_marking()->NotifyOfHighPromotionRate();
-  }
+  int start_new_space_size = static_cast<int>(Heap::new_space()->Size());
 
   {
     Heap::PretenuringScope pretenuring_scope(this);
@@ -1319,11 +1311,10 @@
       UpdateOldGenerationAllocationCounter();
       // Perform mark-sweep with optional compaction.
       MarkCompact();
-      old_gen_exhausted_ = false;
       old_generation_size_configured_ = true;
       // This should be updated before PostGarbageCollectionProcessing, which
       // can cause another GC. Take into account the objects promoted during GC.
-      old_generation_allocation_counter_ +=
+      old_generation_allocation_counter_at_last_gc_ +=
           static_cast<size_t>(promoted_objects_size_);
       old_generation_size_at_last_gc_ = PromotedSpaceSizeOfObjects();
     } else {
@@ -1360,7 +1351,7 @@
   if (collector == MARK_COMPACTOR) {
     // Register the amount of external allocated memory.
     external_memory_at_last_mark_compact_ = external_memory_;
-    external_memory_limit_ = external_memory_ + kExternalAllocationLimit;
+    external_memory_limit_ = external_memory_ + kExternalAllocationSoftLimit;
     SetOldGenerationAllocationLimit(old_gen_size, gc_speed, mutator_speed);
   } else if (HasLowYoungGenerationAllocationRate() &&
              old_generation_size_configured_) {
@@ -1491,18 +1482,18 @@
 
 void Heap::CheckNewSpaceExpansionCriteria() {
   if (FLAG_experimental_new_space_growth_heuristic) {
-    if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() &&
-        survived_last_scavenge_ * 100 / new_space_.TotalCapacity() >= 10) {
+    if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() &&
+        survived_last_scavenge_ * 100 / new_space_->TotalCapacity() >= 10) {
       // Grow the size of new space if there is room to grow, and more than 10%
       // have survived the last scavenge.
-      new_space_.Grow();
+      new_space_->Grow();
       survived_since_last_expansion_ = 0;
     }
-  } else if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() &&
-             survived_since_last_expansion_ > new_space_.TotalCapacity()) {
+  } else if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() &&
+             survived_since_last_expansion_ > new_space_->TotalCapacity()) {
     // Grow the size of new space if there is room to grow, and enough data
     // has survived scavenge since the last expansion.
-    new_space_.Grow();
+    new_space_->Grow();
     survived_since_last_expansion_ = 0;
   }
 }
@@ -1541,6 +1532,11 @@
   emergency_stack_ = NULL;
 }
 
+void PromotionQueue::Destroy() {
+  DCHECK(is_empty());
+  delete emergency_stack_;
+  emergency_stack_ = NULL;
+}
 
 void PromotionQueue::RelocateQueueHead() {
   DCHECK(emergency_stack_ == NULL);
@@ -1615,13 +1611,13 @@
     // Register found wrappers with embedder so it can add them to its marking
     // deque and correctly manage the case when v8 scavenger collects the
     // wrappers by either keeping wrappables alive, or cleaning marking deque.
-    mark_compact_collector()->RegisterWrappersWithEmbedderHeapTracer();
+    RegisterWrappersWithEmbedderHeapTracer();
   }
 
   // Flip the semispaces.  After flipping, to space is empty, from space has
   // live objects.
-  new_space_.Flip();
-  new_space_.ResetAllocationInfo();
+  new_space_->Flip();
+  new_space_->ResetAllocationInfo();
 
   // We need to sweep newly copied objects which can be either in the
   // to space or promoted to the old generation.  For to-space
@@ -1640,7 +1636,7 @@
   // for the addresses of promoted objects: every object promoted
   // frees up its size in bytes from the top of the new space, and
   // objects are at least one pointer in size.
-  Address new_space_front = new_space_.ToSpaceStart();
+  Address new_space_front = new_space_->ToSpaceStart();
   promotion_queue_.Initialize();
 
   PromotionMode promotion_mode = CurrentPromotionMode();
@@ -1737,16 +1733,17 @@
   ScavengeWeakObjectRetainer weak_object_retainer(this);
   ProcessYoungWeakReferences(&weak_object_retainer);
 
-  DCHECK(new_space_front == new_space_.top());
+  DCHECK(new_space_front == new_space_->top());
 
   // Set age mark.
-  new_space_.set_age_mark(new_space_.top());
+  new_space_->set_age_mark(new_space_->top());
 
   ArrayBufferTracker::FreeDeadInNewSpace(this);
 
   // Update how much has survived scavenge.
-  IncrementYoungSurvivorsCounter(static_cast<int>(
-      (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
+  IncrementYoungSurvivorsCounter(
+      static_cast<int>((PromotedSpaceSizeOfObjects() - survived_watermark) +
+                       new_space_->Size()));
 
   LOG(isolate_, ResourceEvent("scavenge", "end"));
 
@@ -1910,11 +1907,11 @@
                          Address new_space_front,
                          PromotionMode promotion_mode) {
   do {
-    SemiSpace::AssertValidRange(new_space_front, new_space_.top());
+    SemiSpace::AssertValidRange(new_space_front, new_space_->top());
     // The addresses new_space_front and new_space_.top() define a
     // queue of unprocessed copied objects.  Process them until the
     // queue is empty.
-    while (new_space_front != new_space_.top()) {
+    while (new_space_front != new_space_->top()) {
       if (!Page::IsAlignedToPageSize(new_space_front)) {
         HeapObject* object = HeapObject::FromAddress(new_space_front);
         if (promotion_mode == PROMOTE_MARKED) {
@@ -1953,7 +1950,7 @@
 
     // Take another spin if there are now unswept objects in new space
     // (there are currently no more unswept promoted objects).
-  } while (new_space_front != new_space_.top());
+  } while (new_space_front != new_space_->top());
 
   return new_space_front;
 }
@@ -2283,6 +2280,8 @@
     DCHECK_NE(fixed_array_map(), fixed_cow_array_map());
 
     ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info)
+    ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info_entry)
+    ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info)
     ALLOCATE_PRIMITIVE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number,
                            Context::NUMBER_FUNCTION_INDEX)
     ALLOCATE_MAP(MUTABLE_HEAP_NUMBER_TYPE, HeapNumber::kSize,
@@ -2391,6 +2390,12 @@
   }
 
   {
+    AllocationResult allocation = AllocateEmptyScopeInfo();
+    if (!allocation.To(&obj)) return false;
+  }
+
+  set_empty_scope_info(ScopeInfo::cast(obj));
+  {
     AllocationResult allocation = Allocate(boolean_map(), OLD_SPACE);
     if (!allocation.To(&obj)) return false;
   }
@@ -2432,7 +2437,7 @@
   // Statically ensure that it is safe to allocate heap numbers in paged
   // spaces.
   int size = HeapNumber::kSize;
-  STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize);
+  STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize);
 
   AllocationSpace space = SelectSpace(pretenure);
 
@@ -2452,7 +2457,7 @@
   AllocationResult Heap::Allocate##Type(lane_type lanes[lane_count],      \
                                         PretenureFlag pretenure) {        \
     int size = Type::kSize;                                               \
-    STATIC_ASSERT(Type::kSize <= Page::kMaxRegularHeapObjectSize);        \
+    STATIC_ASSERT(Type::kSize <= kMaxRegularHeapObjectSize);              \
                                                                           \
     AllocationSpace space = SelectSpace(pretenure);                       \
                                                                           \
@@ -2476,7 +2481,7 @@
 
 AllocationResult Heap::AllocateCell(Object* value) {
   int size = Cell::kSize;
-  STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize);
+  STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
 
   HeapObject* result = nullptr;
   {
@@ -2488,10 +2493,9 @@
   return result;
 }
 
-
 AllocationResult Heap::AllocatePropertyCell() {
   int size = PropertyCell::kSize;
-  STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize);
+  STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
 
   HeapObject* result = nullptr;
   AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
@@ -2509,7 +2513,7 @@
 
 AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
   int size = WeakCell::kSize;
-  STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
+  STATIC_ASSERT(WeakCell::kSize <= kMaxRegularHeapObjectSize);
   HeapObject* result = nullptr;
   {
     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
@@ -2729,12 +2733,6 @@
 #undef SYMBOL_INIT
   }
 
-  // Allocate the dictionary of intrinsic function names.
-  Handle<NameDictionary> intrinsic_names =
-      NameDictionary::New(isolate(), Runtime::kNumFunctions, TENURED);
-  Runtime::InitializeIntrinsicFunctionNames(isolate(), intrinsic_names);
-  set_intrinsic_function_names(*intrinsic_names);
-
   Handle<NameDictionary> empty_properties_dictionary =
       NameDictionary::New(isolate(), 0, TENURED);
   empty_properties_dictionary->SetRequiresCopyOnCapacityChange();
@@ -2777,18 +2775,18 @@
 
   {
     StaticFeedbackVectorSpec spec;
-    FeedbackVectorSlot load_ic_slot = spec.AddLoadICSlot();
-    FeedbackVectorSlot keyed_load_ic_slot = spec.AddKeyedLoadICSlot();
-    FeedbackVectorSlot store_ic_slot = spec.AddStoreICSlot();
-    FeedbackVectorSlot keyed_store_ic_slot = spec.AddKeyedStoreICSlot();
+    FeedbackVectorSlot slot = spec.AddLoadICSlot();
+    DCHECK_EQ(slot, FeedbackVectorSlot(TypeFeedbackVector::kDummyLoadICSlot));
 
-    DCHECK_EQ(load_ic_slot,
-              FeedbackVectorSlot(TypeFeedbackVector::kDummyLoadICSlot));
-    DCHECK_EQ(keyed_load_ic_slot,
+    slot = spec.AddKeyedLoadICSlot();
+    DCHECK_EQ(slot,
               FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot));
-    DCHECK_EQ(store_ic_slot,
-              FeedbackVectorSlot(TypeFeedbackVector::kDummyStoreICSlot));
-    DCHECK_EQ(keyed_store_ic_slot,
+
+    slot = spec.AddStoreICSlot();
+    DCHECK_EQ(slot, FeedbackVectorSlot(TypeFeedbackVector::kDummyStoreICSlot));
+
+    slot = spec.AddKeyedStoreICSlot();
+    DCHECK_EQ(slot,
               FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
 
     Handle<TypeFeedbackMetadata> dummy_metadata =
@@ -2796,19 +2794,36 @@
     Handle<TypeFeedbackVector> dummy_vector =
         TypeFeedbackVector::New(isolate(), dummy_metadata);
 
-    Object* megamorphic = *TypeFeedbackVector::MegamorphicSentinel(isolate());
-    dummy_vector->Set(load_ic_slot, megamorphic, SKIP_WRITE_BARRIER);
-    dummy_vector->Set(keyed_load_ic_slot, megamorphic, SKIP_WRITE_BARRIER);
-    dummy_vector->Set(store_ic_slot, megamorphic, SKIP_WRITE_BARRIER);
-    dummy_vector->Set(keyed_store_ic_slot, megamorphic, SKIP_WRITE_BARRIER);
-
     set_dummy_vector(*dummy_vector);
+
+    // Now initialize dummy vector's entries.
+    LoadICNexus(isolate()).ConfigureMegamorphic();
+    StoreICNexus(isolate()).ConfigureMegamorphic();
+    KeyedLoadICNexus(isolate()).ConfigureMegamorphicKeyed(PROPERTY);
+    KeyedStoreICNexus(isolate()).ConfigureMegamorphicKeyed(PROPERTY);
   }
 
   {
+    // Create a canonical empty TypeFeedbackVector, which is shared by all
+    // functions that don't need actual type feedback slots. Note however
+    // that all these functions will share the same invocation count, but
+    // that shouldn't matter since we only use the invocation count to
+    // relativize the absolute call counts, but we can only have call counts
+    // if we have actual feedback slots.
+    Handle<FixedArray> empty_type_feedback_vector = factory->NewFixedArray(
+        TypeFeedbackVector::kReservedIndexCount, TENURED);
+    empty_type_feedback_vector->set(TypeFeedbackVector::kMetadataIndex,
+                                    empty_fixed_array());
+    empty_type_feedback_vector->set(TypeFeedbackVector::kInvocationCountIndex,
+                                    Smi::FromInt(0));
+    set_empty_type_feedback_vector(*empty_type_feedback_vector);
+
+    // We use a canonical empty LiteralsArray for all functions that neither
+    // have literals nor need a TypeFeedbackVector (besides the invocation
+    // count special slot).
     Handle<FixedArray> empty_literals_array =
         factory->NewFixedArray(1, TENURED);
-    empty_literals_array->set(0, *factory->empty_fixed_array());
+    empty_literals_array->set(0, *empty_type_feedback_vector);
     set_empty_literals_array(*empty_literals_array);
   }
 
@@ -2882,6 +2897,10 @@
       handle(Smi::FromInt(Isolate::kArrayProtectorValid), isolate()));
   set_species_protector(*species_cell);
 
+  cell = factory->NewPropertyCell();
+  cell->set_value(Smi::FromInt(Isolate::kArrayProtectorValid));
+  set_string_length_protector(*cell);
+
   set_serialized_templates(empty_fixed_array());
 
   set_weak_stack_trace_list(Smi::FromInt(0));
@@ -3009,7 +3028,7 @@
 AllocationResult Heap::AllocateForeign(Address address,
                                        PretenureFlag pretenure) {
   // Statically ensure that it is safe to allocate foreigns in paged spaces.
-  STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize);
+  STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
   AllocationSpace space = (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
   Foreign* result = nullptr;
   AllocationResult allocation = Allocate(foreign_map(), space);
@@ -3776,6 +3795,18 @@
   return result;
 }
 
+AllocationResult Heap::AllocateEmptyScopeInfo() {
+  int size = FixedArray::SizeFor(0);
+  HeapObject* result = nullptr;
+  {
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
+    if (!allocation.To(&result)) return allocation;
+  }
+  // Initialize the object.
+  result->set_map_no_write_barrier(scope_info_map());
+  FixedArray::cast(result)->set_length(0);
+  return result;
+}
 
 AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) {
   if (!InNewSpace(src)) {
@@ -3908,7 +3939,14 @@
   int size = FixedArray::SizeFor(length);
   AllocationSpace space = SelectSpace(pretenure);
 
-  return AllocateRaw(size, space);
+  AllocationResult result = AllocateRaw(size, space);
+  if (!result.IsRetry() && size > kMaxRegularHeapObjectSize &&
+      FLAG_use_marking_progress_bar) {
+    MemoryChunk* chunk =
+        MemoryChunk::FromAddress(result.ToObjectChecked()->address());
+    chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR);
+  }
+  return result;
 }
 
 
@@ -3988,7 +4026,7 @@
 
 AllocationResult Heap::AllocateSymbol() {
   // Statically ensure that it is safe to allocate symbols in paged spaces.
-  STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize);
+  STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);
 
   HeapObject* result = nullptr;
   AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_SPACE);
@@ -4049,7 +4087,8 @@
 void Heap::MakeHeapIterable() {
   DCHECK(AllowHeapAllocation::IsAllowed());
   if (!IsHeapIterable()) {
-    CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable");
+    CollectAllGarbage(kMakeHeapIterableMask,
+                      GarbageCollectionReason::kMakeHeapIterable);
   }
   if (mark_compact_collector()->sweeping_in_progress()) {
     mark_compact_collector()->EnsureSweepingCompleted();
@@ -4081,10 +4120,10 @@
       tracer()->ScavengeSpeedInBytesPerMillisecond(kForSurvivedObjects);
   double result = ComputeMutatorUtilization(mutator_speed, gc_speed);
   if (FLAG_trace_mutator_utilization) {
-    PrintIsolate(isolate(),
-                 "Young generation mutator utilization = %.3f ("
-                 "mutator_speed=%.f, gc_speed=%.f)\n",
-                 result, mutator_speed, gc_speed);
+    isolate()->PrintWithTimestamp(
+        "Young generation mutator utilization = %.3f ("
+        "mutator_speed=%.f, gc_speed=%.f)\n",
+        result, mutator_speed, gc_speed);
   }
   return result;
 }
@@ -4097,10 +4136,10 @@
       tracer()->CombinedMarkCompactSpeedInBytesPerMillisecond());
   double result = ComputeMutatorUtilization(mutator_speed, gc_speed);
   if (FLAG_trace_mutator_utilization) {
-    PrintIsolate(isolate(),
-                 "Old generation mutator utilization = %.3f ("
-                 "mutator_speed=%.f, gc_speed=%.f)\n",
-                 result, mutator_speed, gc_speed);
+    isolate()->PrintWithTimestamp(
+        "Old generation mutator utilization = %.3f ("
+        "mutator_speed=%.f, gc_speed=%.f)\n",
+        result, mutator_speed, gc_speed);
   }
   return result;
 }
@@ -4170,44 +4209,49 @@
   if (ShouldReduceMemory() ||
       ((allocation_throughput != 0) &&
        (allocation_throughput < kLowAllocationThroughput))) {
-    new_space_.Shrink();
+    new_space_->Shrink();
     UncommitFromSpace();
   }
 }
 
+bool Heap::MarkingDequesAreEmpty() {
+  return mark_compact_collector()->marking_deque()->IsEmpty() &&
+         (!UsingEmbedderHeapTracer() ||
+          (wrappers_to_trace() == 0 &&
+           embedder_heap_tracer()->NumberOfWrappersToTrace() == 0));
+}
 
-void Heap::FinalizeIncrementalMarkingIfComplete(const char* comment) {
+void Heap::FinalizeIncrementalMarkingIfComplete(
+    GarbageCollectionReason gc_reason) {
   if (incremental_marking()->IsMarking() &&
       (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
        (!incremental_marking()->finalize_marking_completed() &&
-        mark_compact_collector()->marking_deque()->IsEmpty()))) {
-    FinalizeIncrementalMarking(comment);
+        MarkingDequesAreEmpty()))) {
+    FinalizeIncrementalMarking(gc_reason);
   } else if (incremental_marking()->IsComplete() ||
              (mark_compact_collector()->marking_deque()->IsEmpty())) {
-    CollectAllGarbage(current_gc_flags_, comment);
+    CollectAllGarbage(current_gc_flags_, gc_reason);
   }
 }
 
-
-bool Heap::TryFinalizeIdleIncrementalMarking(double idle_time_in_ms) {
+bool Heap::TryFinalizeIdleIncrementalMarking(
+    double idle_time_in_ms, GarbageCollectionReason gc_reason) {
   size_t size_of_objects = static_cast<size_t>(SizeOfObjects());
   double final_incremental_mark_compact_speed_in_bytes_per_ms =
       tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond();
   if (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
       (!incremental_marking()->finalize_marking_completed() &&
-       mark_compact_collector()->marking_deque()->IsEmpty() &&
+       MarkingDequesAreEmpty() &&
        gc_idle_time_handler_->ShouldDoOverApproximateWeakClosure(
            idle_time_in_ms))) {
-    FinalizeIncrementalMarking(
-        "Idle notification: finalize incremental marking");
+    FinalizeIncrementalMarking(gc_reason);
     return true;
   } else if (incremental_marking()->IsComplete() ||
-             (mark_compact_collector()->marking_deque()->IsEmpty() &&
+             (MarkingDequesAreEmpty() &&
               gc_idle_time_handler_->ShouldDoFinalIncrementalMarkCompact(
                   idle_time_in_ms, size_of_objects,
                   final_incremental_mark_compact_speed_in_bytes_per_ms))) {
-    CollectAllGarbage(current_gc_flags_,
-                      "idle notification: finalize incremental marking");
+    CollectAllGarbage(current_gc_flags_, gc_reason);
     return true;
   }
   return false;
@@ -4267,22 +4311,23 @@
       result = true;
       break;
     case DO_INCREMENTAL_STEP: {
-      if (incremental_marking()->incremental_marking_job()->IdleTaskPending()) {
-        result = true;
-      } else {
-        incremental_marking()
-            ->incremental_marking_job()
-            ->NotifyIdleTaskProgress();
-        result = IncrementalMarkingJob::IdleTask::Step(this, deadline_in_ms) ==
-                 IncrementalMarkingJob::IdleTask::kDone;
+      const double remaining_idle_time_in_ms =
+          incremental_marking()->AdvanceIncrementalMarking(
+              deadline_in_ms, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+              IncrementalMarking::FORCE_COMPLETION, StepOrigin::kTask);
+      if (remaining_idle_time_in_ms > 0.0) {
+        TryFinalizeIdleIncrementalMarking(
+            remaining_idle_time_in_ms,
+            GarbageCollectionReason::kFinalizeMarkingViaTask);
       }
+      result = incremental_marking()->IsStopped();
       break;
     }
     case DO_FULL_GC: {
       DCHECK(contexts_disposed_ > 0);
       HistogramTimerScope scope(isolate_->counters()->gc_context());
       TRACE_EVENT0("v8", "V8.GCContext");
-      CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed");
+      CollectAllGarbage(kNoGCFlags, GarbageCollectionReason::kContextDisposal);
       break;
     }
     case DO_NOTHING:
@@ -4328,8 +4373,7 @@
 
   if ((FLAG_trace_idle_notification && action.type > DO_NOTHING) ||
       FLAG_trace_idle_notification_verbose) {
-    PrintIsolate(isolate_, "%8.0f ms: ", isolate()->time_millis_since_init());
-    PrintF(
+    isolate_->PrintWithTimestamp(
         "Idle notification: requested idle time %.2f ms, used idle time %.2f "
         "ms, deadline usage %.2f ms [",
         idle_time_in_ms, idle_time_in_ms - deadline_difference,
@@ -4416,10 +4460,11 @@
     }
   }
   if (memory_pressure_level_.Value() == MemoryPressureLevel::kCritical) {
-    CollectGarbageOnMemoryPressure("memory pressure");
+    CollectGarbageOnMemoryPressure();
   } else if (memory_pressure_level_.Value() == MemoryPressureLevel::kModerate) {
     if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
-      StartIdleIncrementalMarking();
+      StartIncrementalMarking(kReduceMemoryFootprintMask,
+                              GarbageCollectionReason::kMemoryPressure);
     }
   }
   MemoryReducer::Event event;
@@ -4428,7 +4473,7 @@
   memory_reducer_->NotifyPossibleGarbage(event);
 }
 
-void Heap::CollectGarbageOnMemoryPressure(const char* source) {
+void Heap::CollectGarbageOnMemoryPressure() {
   const int kGarbageThresholdInBytes = 8 * MB;
   const double kGarbageThresholdAsFractionOfTotalMemory = 0.1;
   // This constant is the maximum response time in RAIL performance model.
@@ -4436,7 +4481,8 @@
 
   double start = MonotonicallyIncreasingTimeInMs();
   CollectAllGarbage(kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
-                    source, kGCCallbackFlagCollectAllAvailableGarbage);
+                    GarbageCollectionReason::kMemoryPressure,
+                    kGCCallbackFlagCollectAllAvailableGarbage);
   double end = MonotonicallyIncreasingTimeInMs();
 
   // Estimate how much memory we can free.
@@ -4451,11 +4497,13 @@
     // Otherwise, start incremental marking.
     if (end - start < kMaxMemoryPressurePauseMs / 2) {
       CollectAllGarbage(
-          kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask, source,
+          kReduceMemoryFootprintMask | kAbortIncrementalMarkingMask,
+          GarbageCollectionReason::kMemoryPressure,
           kGCCallbackFlagCollectAllAvailableGarbage);
     } else {
       if (FLAG_incremental_marking && incremental_marking()->IsStopped()) {
-        StartIdleIncrementalMarking();
+        StartIncrementalMarking(kReduceMemoryFootprintMask,
+                                GarbageCollectionReason::kMemoryPressure);
       }
     }
   }
@@ -4527,7 +4575,7 @@
   PrintF("Heap statistics : ");
   memory_allocator()->ReportStatistics();
   PrintF("To space : ");
-  new_space_.ReportStatistics();
+  new_space_->ReportStatistics();
   PrintF("Old space : ");
   old_space_->ReportStatistics();
   PrintF("Code space : ");
@@ -4541,12 +4589,64 @@
 
 #endif  // DEBUG
 
+const char* Heap::GarbageCollectionReasonToString(
+    GarbageCollectionReason gc_reason) {
+  switch (gc_reason) {
+    case GarbageCollectionReason::kAllocationFailure:
+      return "allocation failure";
+    case GarbageCollectionReason::kAllocationLimit:
+      return "allocation limit";
+    case GarbageCollectionReason::kContextDisposal:
+      return "context disposal";
+    case GarbageCollectionReason::kCountersExtension:
+      return "counters extension";
+    case GarbageCollectionReason::kDebugger:
+      return "debugger";
+    case GarbageCollectionReason::kDeserializer:
+      return "deserialize";
+    case GarbageCollectionReason::kExternalMemoryPressure:
+      return "external memory pressure";
+    case GarbageCollectionReason::kFinalizeMarkingViaStackGuard:
+      return "finalize incremental marking via stack guard";
+    case GarbageCollectionReason::kFinalizeMarkingViaTask:
+      return "finalize incremental marking via task";
+    case GarbageCollectionReason::kFullHashtable:
+      return "full hash-table";
+    case GarbageCollectionReason::kHeapProfiler:
+      return "heap profiler";
+    case GarbageCollectionReason::kIdleTask:
+      return "idle task";
+    case GarbageCollectionReason::kLastResort:
+      return "last resort";
+    case GarbageCollectionReason::kLowMemoryNotification:
+      return "low memory notification";
+    case GarbageCollectionReason::kMakeHeapIterable:
+      return "make heap iterable";
+    case GarbageCollectionReason::kMemoryPressure:
+      return "memory pressure";
+    case GarbageCollectionReason::kMemoryReducer:
+      return "memory reducer";
+    case GarbageCollectionReason::kRuntime:
+      return "runtime";
+    case GarbageCollectionReason::kSamplingProfiler:
+      return "sampling profiler";
+    case GarbageCollectionReason::kSnapshotCreator:
+      return "snapshot creator";
+    case GarbageCollectionReason::kTesting:
+      return "testing";
+    case GarbageCollectionReason::kUnknown:
+      return "unknown";
+  }
+  UNREACHABLE();
+  return "";
+}
+
 bool Heap::Contains(HeapObject* value) {
   if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) {
     return false;
   }
   return HasBeenSetUp() &&
-         (new_space_.ToSpaceContains(value) || old_space_->Contains(value) ||
+         (new_space_->ToSpaceContains(value) || old_space_->Contains(value) ||
           code_space_->Contains(value) || map_space_->Contains(value) ||
           lo_space_->Contains(value));
 }
@@ -4556,7 +4656,7 @@
     return false;
   }
   return HasBeenSetUp() &&
-         (new_space_.ToSpaceContainsSlow(addr) ||
+         (new_space_->ToSpaceContainsSlow(addr) ||
           old_space_->ContainsSlow(addr) || code_space_->ContainsSlow(addr) ||
           map_space_->ContainsSlow(addr) || lo_space_->ContainsSlow(addr));
 }
@@ -4569,7 +4669,7 @@
 
   switch (space) {
     case NEW_SPACE:
-      return new_space_.ToSpaceContains(value);
+      return new_space_->ToSpaceContains(value);
     case OLD_SPACE:
       return old_space_->Contains(value);
     case CODE_SPACE:
@@ -4591,7 +4691,7 @@
 
   switch (space) {
     case NEW_SPACE:
-      return new_space_.ToSpaceContainsSlow(addr);
+      return new_space_->ToSpaceContainsSlow(addr);
     case OLD_SPACE:
       return old_space_->ContainsSlow(addr);
     case CODE_SPACE:
@@ -4654,7 +4754,7 @@
   VerifySmisVisitor smis_visitor;
   IterateSmiRoots(&smis_visitor);
 
-  new_space_.Verify();
+  new_space_->Verify();
 
   old_space_->Verify(&visitor);
   map_space_->Verify(&visitor);
@@ -4673,9 +4773,9 @@
 
 
 void Heap::ZapFromSpace() {
-  if (!new_space_.IsFromSpaceCommitted()) return;
-  for (Page* page : NewSpacePageRange(new_space_.FromSpaceStart(),
-                                      new_space_.FromSpaceEnd())) {
+  if (!new_space_->IsFromSpaceCommitted()) return;
+  for (Page* page : NewSpacePageRange(new_space_->FromSpaceStart(),
+                                      new_space_->FromSpaceEnd())) {
     for (Address cursor = page->area_start(), limit = page->area_end();
          cursor < limit; cursor += kPointerSize) {
       Memory::Address_at(cursor) = kFromSpaceZapValue;
@@ -4967,7 +5067,7 @@
 
   if (FLAG_stress_compaction) {
     // This will cause more frequent GCs when stressing.
-    max_semi_space_size_ = Page::kPageSize;
+    max_semi_space_size_ = MB;
   }
 
   // The new space size must be a power of two to support single-bit testing
@@ -5018,7 +5118,7 @@
   old_generation_allocation_limit_ = initial_old_generation_size_;
 
   // We rely on being able to allocate new arrays in paged spaces.
-  DCHECK(Page::kMaxRegularHeapObjectSize >=
+  DCHECK(kMaxRegularHeapObjectSize >=
          (JSArray::kSize +
           FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) +
           AllocationMemento::kSize));
@@ -5060,8 +5160,8 @@
 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
   *stats->start_marker = HeapStats::kStartMarker;
   *stats->end_marker = HeapStats::kEndMarker;
-  *stats->new_space_size = new_space_.SizeAsInt();
-  *stats->new_space_capacity = new_space_.Capacity();
+  *stats->new_space_size = new_space_->Size();
+  *stats->new_space_capacity = new_space_->Capacity();
   *stats->old_space_size = old_space_->SizeOfObjects();
   *stats->old_space_capacity = old_space_->Capacity();
   *stats->code_space_size = code_space_->SizeOfObjects();
@@ -5183,11 +5283,19 @@
   CHECK(old_gen_size > 0);
   intptr_t limit = static_cast<intptr_t>(old_gen_size * factor);
   limit = Max(limit, old_gen_size + MinimumAllocationLimitGrowingStep());
-  limit += new_space_.Capacity();
+  limit += new_space_->Capacity();
   intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
   return Min(limit, halfway_to_the_max);
 }
 
+intptr_t Heap::MinimumAllocationLimitGrowingStep() {
+  const double kRegularAllocationLimitGrowingStep = 8;
+  const double kLowMemoryAllocationLimitGrowingStep = 2;
+  intptr_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB);
+  return limit * (ShouldOptimizeForMemoryUsage()
+                      ? kLowMemoryAllocationLimitGrowingStep
+                      : kRegularAllocationLimitGrowingStep);
+}
 
 void Heap::SetOldGenerationAllocationLimit(intptr_t old_gen_size,
                                            double gc_speed,
@@ -5195,11 +5303,11 @@
   double factor = HeapGrowingFactor(gc_speed, mutator_speed);
 
   if (FLAG_trace_gc_verbose) {
-    PrintIsolate(isolate_,
-                 "Heap growing factor %.1f based on mu=%.3f, speed_ratio=%.f "
-                 "(gc=%.f, mutator=%.f)\n",
-                 factor, kTargetMutatorUtilization, gc_speed / mutator_speed,
-                 gc_speed, mutator_speed);
+    isolate_->PrintWithTimestamp(
+        "Heap growing factor %.1f based on mu=%.3f, speed_ratio=%.f "
+        "(gc=%.f, mutator=%.f)\n",
+        factor, kTargetMutatorUtilization, gc_speed / mutator_speed, gc_speed,
+        mutator_speed);
   }
 
   if (IsMemoryConstrainedDevice()) {
@@ -5223,14 +5331,13 @@
       CalculateOldGenerationAllocationLimit(factor, old_gen_size);
 
   if (FLAG_trace_gc_verbose) {
-    PrintIsolate(isolate_, "Grow: old size: %" V8PRIdPTR
-                           " KB, new limit: %" V8PRIdPTR " KB (%.1f)\n",
-                 old_gen_size / KB, old_generation_allocation_limit_ / KB,
-                 factor);
+    isolate_->PrintWithTimestamp("Grow: old size: %" V8PRIdPTR
+                                 " KB, new limit: %" V8PRIdPTR " KB (%.1f)\n",
+                                 old_gen_size / KB,
+                                 old_generation_allocation_limit_ / KB, factor);
   }
 }
 
-
 void Heap::DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
                                               double gc_speed,
                                               double mutator_speed) {
@@ -5238,17 +5345,64 @@
   intptr_t limit = CalculateOldGenerationAllocationLimit(factor, old_gen_size);
   if (limit < old_generation_allocation_limit_) {
     if (FLAG_trace_gc_verbose) {
-      PrintIsolate(isolate_,
-                   "Dampen: old size: %" V8PRIdPTR " KB, old limit: %" V8PRIdPTR
-                   " KB, "
-                   "new limit: %" V8PRIdPTR " KB (%.1f)\n",
-                   old_gen_size / KB, old_generation_allocation_limit_ / KB,
-                   limit / KB, factor);
+      isolate_->PrintWithTimestamp(
+          "Dampen: old size: %" V8PRIdPTR " KB, old limit: %" V8PRIdPTR
+          " KB, "
+          "new limit: %" V8PRIdPTR " KB (%.1f)\n",
+          old_gen_size / KB, old_generation_allocation_limit_ / KB, limit / KB,
+          factor);
     }
     old_generation_allocation_limit_ = limit;
   }
 }
 
+// This predicate is called when an old generation space cannot allocated from
+// the free list and is about to add a new page. Returning false will cause a
+// major GC. It happens when the old generation allocation limit is reached and
+// - either we need to optimize for memory usage,
+// - or the incremental marking is not in progress and we cannot start it.
+bool Heap::ShouldExpandOldGenerationOnAllocationFailure() {
+  if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true;
+  // We reached the old generation allocation limit.
+
+  if (ShouldOptimizeForMemoryUsage()) return false;
+
+  if (incremental_marking()->IsStopped() &&
+      IncrementalMarkingLimitReached() == IncrementalMarkingLimit::kNoLimit) {
+    // We cannot start incremental marking.
+    return false;
+  }
+  return true;
+}
+
+// This function returns either kNoLimit, kSoftLimit, or kHardLimit.
+// The kNoLimit means that either incremental marking is disabled or it is too
+// early to start incremental marking.
+// The kSoftLimit means that incremental marking should be started soon.
+// The kHardLimit means that incremental marking should be started immediately.
+Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
+  if (!incremental_marking()->CanBeActivated() ||
+      PromotedSpaceSizeOfObjects() < IncrementalMarking::kActivationThreshold) {
+    // Incremental marking is disabled or it is too early to start.
+    return IncrementalMarkingLimit::kNoLimit;
+  }
+  if ((FLAG_stress_compaction && (gc_count_ & 1) != 0) ||
+      HighMemoryPressure()) {
+    // If there is high memory pressure or stress testing is enabled, then
+    // start marking immediately.
+    return IncrementalMarkingLimit::kHardLimit;
+  }
+  intptr_t old_generation_space_available = OldGenerationSpaceAvailable();
+  if (old_generation_space_available > new_space_->Capacity()) {
+    return IncrementalMarkingLimit::kNoLimit;
+  }
+  // We are close to the allocation limit.
+  // Choose between the hard and the soft limits.
+  if (old_generation_space_available <= 0 || ShouldOptimizeForMemoryUsage()) {
+    return IncrementalMarkingLimit::kHardLimit;
+  }
+  return IncrementalMarkingLimit::kSoftLimit;
+}
 
 void Heap::EnableInlineAllocation() {
   if (!inline_allocation_disabled_) return;
@@ -5316,33 +5470,30 @@
   // Initialize incremental marking.
   incremental_marking_ = new IncrementalMarking(this);
 
-  // Set up new space.
-  if (!new_space_.SetUp(initial_semispace_size_, max_semi_space_size_)) {
+  for (int i = 0; i <= LAST_SPACE; i++) {
+    space_[i] = nullptr;
+  }
+
+  space_[NEW_SPACE] = new_space_ = new NewSpace(this);
+  if (!new_space_->SetUp(initial_semispace_size_, max_semi_space_size_)) {
     return false;
   }
   new_space_top_after_last_gc_ = new_space()->top();
 
-  // Initialize old space.
-  old_space_ = new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE);
-  if (old_space_ == NULL) return false;
+  space_[OLD_SPACE] = old_space_ =
+      new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE);
   if (!old_space_->SetUp()) return false;
 
-  // Initialize the code space, set its maximum capacity to the old
-  // generation size. It needs executable memory.
-  code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE);
-  if (code_space_ == NULL) return false;
+  space_[CODE_SPACE] = code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE);
   if (!code_space_->SetUp()) return false;
 
-  // Initialize map space.
-  map_space_ = new MapSpace(this, MAP_SPACE);
-  if (map_space_ == NULL) return false;
+  space_[MAP_SPACE] = map_space_ = new MapSpace(this, MAP_SPACE);
   if (!map_space_->SetUp()) return false;
 
   // The large object code space may contain code or data.  We set the memory
   // to be non-executable here for safety, but this means we need to enable it
   // explicitly when allocating large code objects.
-  lo_space_ = new LargeObjectSpace(this, LO_SPACE);
-  if (lo_space_ == NULL) return false;
+  space_[LO_SPACE] = lo_space_ = new LargeObjectSpace(this, LO_SPACE);
   if (!lo_space_->SetUp()) return false;
 
   // Set up the seed that is used to randomize the string hash function.
@@ -5362,20 +5513,14 @@
   }
 
   tracer_ = new GCTracer(this);
-
   scavenge_collector_ = new Scavenger(this);
-
   mark_compact_collector_ = new MarkCompactCollector(this);
-
   gc_idle_time_handler_ = new GCIdleTimeHandler();
-
   memory_reducer_ = new MemoryReducer(this);
-
   if (FLAG_track_gc_object_stats) {
     live_object_stats_ = new ObjectStats(this);
     dead_object_stats_ = new ObjectStats(this);
   }
-
   scavenge_job_ = new ScavengeJob();
 
   LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
@@ -5435,28 +5580,52 @@
 
 
 void Heap::NotifyDeserializationComplete() {
-  deserialization_complete_ = true;
-#ifdef DEBUG
-  // All pages right after bootstrapping must be marked as never-evacuate.
+  DCHECK_EQ(0, gc_count());
   PagedSpaces spaces(this);
   for (PagedSpace* s = spaces.next(); s != NULL; s = spaces.next()) {
+    if (isolate()->snapshot_available()) s->ShrinkImmortalImmovablePages();
+#ifdef DEBUG
+    // All pages right after bootstrapping must be marked as never-evacuate.
     for (Page* p : *s) {
       CHECK(p->NeverEvacuate());
     }
-  }
 #endif  // DEBUG
+  }
+
+  deserialization_complete_ = true;
 }
 
 void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
-  mark_compact_collector()->SetEmbedderHeapTracer(tracer);
+  DCHECK_NOT_NULL(tracer);
+  CHECK_NULL(embedder_heap_tracer_);
+  embedder_heap_tracer_ = tracer;
 }
 
-bool Heap::UsingEmbedderHeapTracer() {
-  return mark_compact_collector()->UsingEmbedderHeapTracer();
+void Heap::RegisterWrappersWithEmbedderHeapTracer() {
+  DCHECK(UsingEmbedderHeapTracer());
+  if (wrappers_to_trace_.empty()) {
+    return;
+  }
+  embedder_heap_tracer()->RegisterV8References(wrappers_to_trace_);
+  wrappers_to_trace_.clear();
 }
 
 void Heap::TracePossibleWrapper(JSObject* js_object) {
-  mark_compact_collector()->TracePossibleWrapper(js_object);
+  DCHECK(js_object->WasConstructedFromApiFunction());
+  if (js_object->GetInternalFieldCount() >= 2 &&
+      js_object->GetInternalField(0) &&
+      js_object->GetInternalField(0) != undefined_value() &&
+      js_object->GetInternalField(1) != undefined_value()) {
+    DCHECK(reinterpret_cast<intptr_t>(js_object->GetInternalField(0)) % 2 == 0);
+    wrappers_to_trace_.push_back(std::pair<void*, void*>(
+        reinterpret_cast<void*>(js_object->GetInternalField(0)),
+        reinterpret_cast<void*>(js_object->GetInternalField(1))));
+  }
+}
+
+bool Heap::RequiresImmediateWrapperProcessing() {
+  const size_t kTooManyWrappers = 16000;
+  return wrappers_to_trace_.size() > kTooManyWrappers;
 }
 
 void Heap::RegisterExternallyReferencedObject(Object** object) {
@@ -5480,33 +5649,18 @@
 
   UpdateMaximumCommitted();
 
-  if (FLAG_print_cumulative_gc_stat) {
-    PrintF("\n");
-    PrintF("gc_count=%d ", gc_count_);
-    PrintF("mark_sweep_count=%d ", ms_count_);
-    PrintF("max_gc_pause=%.1f ", get_max_gc_pause());
-    PrintF("total_gc_time=%.1f ", total_gc_time_ms_);
-    PrintF("min_in_mutator=%.1f ", get_min_in_mutator());
-    PrintF("max_alive_after_gc=%" V8PRIdPTR " ", get_max_alive_after_gc());
-    PrintF("total_marking_time=%.1f ", tracer()->cumulative_marking_duration());
-    PrintF("total_sweeping_time=%.1f ",
-           tracer()->cumulative_sweeping_duration());
-    PrintF("\n\n");
-  }
-
   if (FLAG_print_max_heap_committed) {
     PrintF("\n");
-    PrintF("maximum_committed_by_heap=%" V8PRIdPTR " ",
-           MaximumCommittedMemory());
-    PrintF("maximum_committed_by_new_space=%" V8PRIdPTR " ",
-           new_space_.MaximumCommittedMemory());
-    PrintF("maximum_committed_by_old_space=%" V8PRIdPTR " ",
+    PrintF("maximum_committed_by_heap=%" PRIuS " ", MaximumCommittedMemory());
+    PrintF("maximum_committed_by_new_space=%" PRIuS " ",
+           new_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_space=%" PRIuS " ",
            old_space_->MaximumCommittedMemory());
-    PrintF("maximum_committed_by_code_space=%" V8PRIdPTR " ",
+    PrintF("maximum_committed_by_code_space=%" PRIuS " ",
            code_space_->MaximumCommittedMemory());
-    PrintF("maximum_committed_by_map_space=%" V8PRIdPTR " ",
+    PrintF("maximum_committed_by_map_space=%" PRIuS " ",
            map_space_->MaximumCommittedMemory());
-    PrintF("maximum_committed_by_lo_space=%" V8PRIdPTR " ",
+    PrintF("maximum_committed_by_lo_space=%" PRIuS " ",
            lo_space_->MaximumCommittedMemory());
     PrintF("\n\n");
   }
@@ -5560,7 +5714,9 @@
   delete tracer_;
   tracer_ = nullptr;
 
-  new_space_.TearDown();
+  new_space_->TearDown();
+  delete new_space_;
+  new_space_ = nullptr;
 
   if (old_space_ != NULL) {
     delete old_space_;
@@ -5599,6 +5755,9 @@
 
   delete memory_allocator_;
   memory_allocator_ = nullptr;
+
+  delete embedder_reference_reporter_;
+  embedder_reference_reporter_ = nullptr;
 }
 
 
@@ -5879,14 +6038,10 @@
   }
 }
 
-
 SpaceIterator::SpaceIterator(Heap* heap)
-    : heap_(heap), current_space_(FIRST_SPACE), iterator_(NULL) {}
-
+    : heap_(heap), current_space_(FIRST_SPACE - 1) {}
 
 SpaceIterator::~SpaceIterator() {
-  // Delete active iterator if any.
-  delete iterator_;
 }
 
 
@@ -5895,48 +6050,9 @@
   return current_space_ != LAST_SPACE;
 }
 
-
-ObjectIterator* SpaceIterator::next() {
-  if (iterator_ != NULL) {
-    delete iterator_;
-    iterator_ = NULL;
-    // Move to the next space
-    current_space_++;
-    if (current_space_ > LAST_SPACE) {
-      return NULL;
-    }
-  }
-
-  // Return iterator for the new current space.
-  return CreateIterator();
-}
-
-
-// Create an iterator for the space to iterate.
-ObjectIterator* SpaceIterator::CreateIterator() {
-  DCHECK(iterator_ == NULL);
-
-  switch (current_space_) {
-    case NEW_SPACE:
-      iterator_ = new SemiSpaceIterator(heap_->new_space());
-      break;
-    case OLD_SPACE:
-      iterator_ = new HeapObjectIterator(heap_->old_space());
-      break;
-    case CODE_SPACE:
-      iterator_ = new HeapObjectIterator(heap_->code_space());
-      break;
-    case MAP_SPACE:
-      iterator_ = new HeapObjectIterator(heap_->map_space());
-      break;
-    case LO_SPACE:
-      iterator_ = new LargeObjectIterator(heap_->lo_space());
-      break;
-  }
-
-  // Return the newly allocated iterator;
-  DCHECK(iterator_ != NULL);
-  return iterator_;
+Space* SpaceIterator::next() {
+  DCHECK(has_next());
+  return heap_->space(++current_space_);
 }
 
 
@@ -6021,7 +6137,7 @@
     default:
       break;
   }
-  object_iterator_ = space_iterator_->next();
+  object_iterator_ = space_iterator_->next()->GetObjectIterator();
 }
 
 
@@ -6034,8 +6150,6 @@
     DCHECK(object_iterator_ == nullptr);
   }
 #endif
-  // Make sure the last iterator is deallocated.
-  delete object_iterator_;
   delete space_iterator_;
   delete filter_;
 }
@@ -6052,22 +6166,22 @@
 
 HeapObject* HeapIterator::NextObject() {
   // No iterator means we are done.
-  if (object_iterator_ == nullptr) return nullptr;
+  if (object_iterator_.get() == nullptr) return nullptr;
 
-  if (HeapObject* obj = object_iterator_->Next()) {
+  if (HeapObject* obj = object_iterator_.get()->Next()) {
     // If the current iterator has more objects we are fine.
     return obj;
   } else {
     // Go though the spaces looking for one that has objects.
     while (space_iterator_->has_next()) {
-      object_iterator_ = space_iterator_->next();
-      if (HeapObject* obj = object_iterator_->Next()) {
+      object_iterator_ = space_iterator_->next()->GetObjectIterator();
+      if (HeapObject* obj = object_iterator_.get()->Next()) {
         return obj;
       }
     }
   }
   // Done with the last space.
-  object_iterator_ = nullptr;
+  object_iterator_.reset(nullptr);
   return nullptr;
 }
 
@@ -6260,95 +6374,10 @@
 }
 #endif
 
-
-void Heap::UpdateCumulativeGCStatistics(double duration,
-                                        double spent_in_mutator,
-                                        double marking_time) {
-  if (FLAG_print_cumulative_gc_stat) {
-    total_gc_time_ms_ += duration;
-    max_gc_pause_ = Max(max_gc_pause_, duration);
-    max_alive_after_gc_ = Max(max_alive_after_gc_, SizeOfObjects());
-    min_in_mutator_ = Min(min_in_mutator_, spent_in_mutator);
-  } else if (FLAG_trace_gc_verbose) {
+void Heap::UpdateTotalGCTime(double duration) {
+  if (FLAG_trace_gc_verbose) {
     total_gc_time_ms_ += duration;
   }
-
-  marking_time_ += marking_time;
-}
-
-
-int KeyedLookupCache::Hash(Handle<Map> map, Handle<Name> name) {
-  DisallowHeapAllocation no_gc;
-  // Uses only lower 32 bits if pointers are larger.
-  uintptr_t addr_hash =
-      static_cast<uint32_t>(reinterpret_cast<uintptr_t>(*map)) >> kMapHashShift;
-  return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask);
-}
-
-
-int KeyedLookupCache::Lookup(Handle<Map> map, Handle<Name> name) {
-  DisallowHeapAllocation no_gc;
-  int index = (Hash(map, name) & kHashMask);
-  for (int i = 0; i < kEntriesPerBucket; i++) {
-    Key& key = keys_[index + i];
-    if ((key.map == *map) && key.name->Equals(*name)) {
-      return field_offsets_[index + i];
-    }
-  }
-  return kNotFound;
-}
-
-
-void KeyedLookupCache::Update(Handle<Map> map, Handle<Name> name,
-                              int field_offset) {
-  DisallowHeapAllocation no_gc;
-  if (!name->IsUniqueName()) {
-    if (!StringTable::InternalizeStringIfExists(
-             name->GetIsolate(), Handle<String>::cast(name)).ToHandle(&name)) {
-      return;
-    }
-  }
-  // This cache is cleared only between mark compact passes, so we expect the
-  // cache to only contain old space names.
-  DCHECK(!map->GetIsolate()->heap()->InNewSpace(*name));
-
-  int index = (Hash(map, name) & kHashMask);
-  // After a GC there will be free slots, so we use them in order (this may
-  // help to get the most frequently used one in position 0).
-  for (int i = 0; i < kEntriesPerBucket; i++) {
-    Key& key = keys_[index];
-    Object* free_entry_indicator = NULL;
-    if (key.map == free_entry_indicator) {
-      key.map = *map;
-      key.name = *name;
-      field_offsets_[index + i] = field_offset;
-      return;
-    }
-  }
-  // No free entry found in this bucket, so we move them all down one and
-  // put the new entry at position zero.
-  for (int i = kEntriesPerBucket - 1; i > 0; i--) {
-    Key& key = keys_[index + i];
-    Key& key2 = keys_[index + i - 1];
-    key = key2;
-    field_offsets_[index + i] = field_offsets_[index + i - 1];
-  }
-
-  // Write the new first entry.
-  Key& key = keys_[index];
-  key.map = *map;
-  key.name = *name;
-  field_offsets_[index] = field_offset;
-}
-
-
-void KeyedLookupCache::Clear() {
-  for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
-}
-
-
-void DescriptorLookupCache::Clear() {
-  for (int index = 0; index < kLength; index++) keys_[index].source = NULL;
 }
 
 void Heap::ExternalStringTable::CleanUp() {
diff --git a/src/heap/heap.h b/src/heap/heap.h
index b9b058c..cce467f 100644
--- a/src/heap/heap.h
+++ b/src/heap/heap.h
@@ -16,9 +16,8 @@
 #include "src/base/atomic-utils.h"
 #include "src/globals.h"
 #include "src/heap-symbols.h"
-// TODO(mstarzinger): One more include to kill!
-#include "src/heap/spaces.h"
 #include "src/list.h"
+#include "src/objects.h"
 
 namespace v8 {
 namespace internal {
@@ -49,6 +48,8 @@
   V(Map, one_byte_string_map, OneByteStringMap)                                \
   V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap)       \
   V(Map, scope_info_map, ScopeInfoMap)                                         \
+  V(Map, module_info_entry_map, ModuleInfoEntryMap)                            \
+  V(Map, module_info_map, ModuleInfoMap)                                       \
   V(Map, shared_function_info_map, SharedFunctionInfoMap)                      \
   V(Map, code_map, CodeMap)                                                    \
   V(Map, function_context_map, FunctionContextMap)                             \
@@ -59,7 +60,9 @@
   V(Map, heap_number_map, HeapNumberMap)                                       \
   V(Map, transition_array_map, TransitionArrayMap)                             \
   V(FixedArray, empty_literals_array, EmptyLiteralsArray)                      \
+  V(FixedArray, empty_type_feedback_vector, EmptyTypeFeedbackVector)           \
   V(FixedArray, empty_fixed_array, EmptyFixedArray)                            \
+  V(ScopeInfo, empty_scope_info, EmptyScopeInfo)                               \
   V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap)           \
   V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray)             \
   /* Entries beyond the first 32                                            */ \
@@ -164,6 +167,7 @@
   V(Cell, is_concat_spreadable_protector, IsConcatSpreadableProtector)         \
   V(PropertyCell, has_instance_protector, HasInstanceProtector)                \
   V(Cell, species_protector, SpeciesProtector)                                 \
+  V(PropertyCell, string_length_protector, StringLengthProtector)              \
   /* Special numbers */                                                        \
   V(HeapNumber, nan_value, NanValue)                                           \
   V(HeapNumber, hole_nan_value, HoleNanValue)                                  \
@@ -185,7 +189,6 @@
   V(FixedArray, experimental_extra_natives_source_cache,                       \
     ExperimentalExtraNativesSourceCache)                                       \
   /* Lists and dictionaries */                                                 \
-  V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames)          \
   V(NameDictionary, empty_properties_dictionary, EmptyPropertiesDictionary)    \
   V(Object, symbol_registry, SymbolRegistry)                                   \
   V(Object, script_list, ScriptList)                                           \
@@ -275,6 +278,8 @@
   V(FixedArrayMap)                      \
   V(CodeMap)                            \
   V(ScopeInfoMap)                       \
+  V(ModuleInfoEntryMap)                 \
+  V(ModuleInfoMap)                      \
   V(FixedCOWArrayMap)                   \
   V(FixedDoubleArrayMap)                \
   V(WeakCellMap)                        \
@@ -322,100 +327,87 @@
 class HeapStats;
 class HistogramTimer;
 class Isolate;
+class MemoryAllocator;
 class MemoryReducer;
+class ObjectIterator;
 class ObjectStats;
+class Page;
+class PagedSpace;
 class Scavenger;
 class ScavengeJob;
+class Space;
 class StoreBuffer;
+class TracePossibleWrapperReporter;
 class WeakObjectRetainer;
 
-enum PromotionMode { PROMOTE_MARKED, DEFAULT_PROMOTION };
-
 typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
 
-// A queue of objects promoted during scavenge. Each object is accompanied
-// by it's size to avoid dereferencing a map pointer for scanning.
-// The last page in to-space is used for the promotion queue. On conflict
-// during scavenge, the promotion queue is allocated externally and all
-// entries are copied to the external queue.
+enum PromotionMode { PROMOTE_MARKED, DEFAULT_PROMOTION };
+
+enum ArrayStorageAllocationMode {
+  DONT_INITIALIZE_ARRAY_ELEMENTS,
+  INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
+};
+
+enum class ClearRecordedSlots { kYes, kNo };
+
+enum class ClearBlackArea { kYes, kNo };
+
+enum class GarbageCollectionReason {
+  kUnknown = 0,
+  kAllocationFailure = 1,
+  kAllocationLimit = 2,
+  kContextDisposal = 3,
+  kCountersExtension = 4,
+  kDebugger = 5,
+  kDeserializer = 6,
+  kExternalMemoryPressure = 7,
+  kFinalizeMarkingViaStackGuard = 8,
+  kFinalizeMarkingViaTask = 9,
+  kFullHashtable = 10,
+  kHeapProfiler = 11,
+  kIdleTask = 12,
+  kLastResort = 13,
+  kLowMemoryNotification = 14,
+  kMakeHeapIterable = 15,
+  kMemoryPressure = 16,
+  kMemoryReducer = 17,
+  kRuntime = 18,
+  kSamplingProfiler = 19,
+  kSnapshotCreator = 20,
+  kTesting = 21
+  // If you add new items here, then update the incremental_marking_reason,
+  // mark_compact_reason, and scavenge_reason counters in counters.h.
+  // Also update src/tools/metrics/histograms/histograms.xml in chromium.
+};
+
+// A queue of objects promoted during scavenge. Each object is accompanied by
+// its size to avoid dereferencing a map pointer for scanning. The last page in
+// to-space is used for the promotion queue. On conflict during scavenge, the
+// promotion queue is allocated externally and all entries are copied to the
+// external queue.
 class PromotionQueue {
  public:
   explicit PromotionQueue(Heap* heap)
-      : front_(NULL),
-        rear_(NULL),
-        limit_(NULL),
-        emergency_stack_(0),
+      : front_(nullptr),
+        rear_(nullptr),
+        limit_(nullptr),
+        emergency_stack_(nullptr),
         heap_(heap) {}
 
   void Initialize();
+  void Destroy();
 
-  void Destroy() {
-    DCHECK(is_empty());
-    delete emergency_stack_;
-    emergency_stack_ = NULL;
-  }
+  inline void SetNewLimit(Address limit);
+  inline bool IsBelowPromotionQueue(Address to_space_top);
 
-  Page* GetHeadPage() {
-    return Page::FromAllocationAreaAddress(reinterpret_cast<Address>(rear_));
-  }
-
-  void SetNewLimit(Address limit) {
-    // If we are already using an emergency stack, we can ignore it.
-    if (emergency_stack_) return;
-
-    // If the limit is not on the same page, we can ignore it.
-    if (Page::FromAllocationAreaAddress(limit) != GetHeadPage()) return;
-
-    limit_ = reinterpret_cast<struct Entry*>(limit);
-
-    if (limit_ <= rear_) {
-      return;
-    }
-
-    RelocateQueueHead();
-  }
-
-  bool IsBelowPromotionQueue(Address to_space_top) {
-    // If an emergency stack is used, the to-space address cannot interfere
-    // with the promotion queue.
-    if (emergency_stack_) return true;
-
-    // If the given to-space top pointer and the head of the promotion queue
-    // are not on the same page, then the to-space objects are below the
-    // promotion queue.
-    if (GetHeadPage() != Page::FromAddress(to_space_top)) {
-      return true;
-    }
-    // If the to space top pointer is smaller or equal than the promotion
-    // queue head, then the to-space objects are below the promotion queue.
-    return reinterpret_cast<struct Entry*>(to_space_top) <= rear_;
-  }
+  inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
+  inline void remove(HeapObject** target, int32_t* size,
+                     bool* was_marked_black);
 
   bool is_empty() {
     return (front_ == rear_) &&
-           (emergency_stack_ == NULL || emergency_stack_->length() == 0);
-  }
-
-  inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
-
-  void remove(HeapObject** target, int32_t* size, bool* was_marked_black) {
-    DCHECK(!is_empty());
-    if (front_ == rear_) {
-      Entry e = emergency_stack_->RemoveLast();
-      *target = e.obj_;
-      *size = e.size_;
-      *was_marked_black = e.was_marked_black_;
-      return;
-    }
-
-    struct Entry* entry = reinterpret_cast<struct Entry*>(--front_);
-    *target = entry->obj_;
-    *size = entry->size_;
-    *was_marked_black = entry->was_marked_black_;
-
-    // Assert no underflow.
-    SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
-                                reinterpret_cast<Address>(front_));
+           (emergency_stack_ == nullptr || emergency_stack_->length() == 0);
   }
 
  private:
@@ -428,6 +420,8 @@
     bool was_marked_black_ : 1;
   };
 
+  inline Page* GetHeadPage();
+
   void RelocateQueueHead();
 
   // The front of the queue is higher in the memory page chain than the rear.
@@ -436,21 +430,94 @@
   struct Entry* limit_;
 
   List<Entry>* emergency_stack_;
-
   Heap* heap_;
 
   DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
 };
 
+class AllocationResult {
+ public:
+  static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
+    return AllocationResult(space);
+  }
 
-enum ArrayStorageAllocationMode {
-  DONT_INITIALIZE_ARRAY_ELEMENTS,
-  INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
+  // Implicit constructor from Object*.
+  AllocationResult(Object* object)  // NOLINT
+      : object_(object) {
+    // AllocationResults can't return Smis, which are used to represent
+    // failure and the space to retry in.
+    CHECK(!object->IsSmi());
+  }
+
+  AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
+
+  inline bool IsRetry() { return object_->IsSmi(); }
+  inline HeapObject* ToObjectChecked();
+  inline AllocationSpace RetrySpace();
+
+  template <typename T>
+  bool To(T** obj) {
+    if (IsRetry()) return false;
+    *obj = T::cast(object_);
+    return true;
+  }
+
+ private:
+  explicit AllocationResult(AllocationSpace space)
+      : object_(Smi::FromInt(static_cast<int>(space))) {}
+
+  Object* object_;
 };
 
-enum class ClearRecordedSlots { kYes, kNo };
+STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
 
-enum class ClearBlackArea { kYes, kNo };
+#ifdef DEBUG
+struct CommentStatistic {
+  const char* comment;
+  int size;
+  int count;
+  void Clear() {
+    comment = NULL;
+    size = 0;
+    count = 0;
+  }
+  // Must be small, since an iteration is used for lookup.
+  static const int kMaxComments = 64;
+};
+#endif
+
+class NumberAndSizeInfo BASE_EMBEDDED {
+ public:
+  NumberAndSizeInfo() : number_(0), bytes_(0) {}
+
+  int number() const { return number_; }
+  void increment_number(int num) { number_ += num; }
+
+  int bytes() const { return bytes_; }
+  void increment_bytes(int size) { bytes_ += size; }
+
+  void clear() {
+    number_ = 0;
+    bytes_ = 0;
+  }
+
+ private:
+  int number_;
+  int bytes_;
+};
+
+// HistogramInfo class for recording a single "bar" of a histogram.  This
+// class is used for collecting statistics to print to the log file.
+class HistogramInfo : public NumberAndSizeInfo {
+ public:
+  HistogramInfo() : NumberAndSizeInfo(), name_(nullptr) {}
+
+  const char* name() { return name_; }
+  void set_name(const char* name) { name_ = name; }
+
+ private:
+  const char* name_;
+};
 
 class Heap {
  public:
@@ -637,30 +704,10 @@
   // should not happen during deserialization.
   void NotifyDeserializationComplete();
 
-  intptr_t old_generation_allocation_limit() const {
-    return old_generation_allocation_limit_;
-  }
-
-  bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
-
-  Address* NewSpaceAllocationTopAddress() {
-    return new_space_.allocation_top_address();
-  }
-  Address* NewSpaceAllocationLimitAddress() {
-    return new_space_.allocation_limit_address();
-  }
-
-  Address* OldSpaceAllocationTopAddress() {
-    return old_space_->allocation_top_address();
-  }
-  Address* OldSpaceAllocationLimitAddress() {
-    return old_space_->allocation_limit_address();
-  }
-
-  bool CanExpandOldGeneration(int size) {
-    if (force_oom_) return false;
-    return (OldGenerationCapacity() + size) < MaxOldGenerationSize();
-  }
+  inline Address* NewSpaceAllocationTopAddress();
+  inline Address* NewSpaceAllocationLimitAddress();
+  inline Address* OldSpaceAllocationTopAddress();
+  inline Address* OldSpaceAllocationLimitAddress();
 
   // Clear the Instanceof cache (used when a prototype changes).
   inline void ClearInstanceofCache();
@@ -763,14 +810,6 @@
   // Returns false if not able to reserve.
   bool ReserveSpace(Reservation* reservations, List<Address>* maps);
 
-  void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
-
-  bool UsingEmbedderHeapTracer();
-
-  void TracePossibleWrapper(JSObject* js_object);
-
-  void RegisterExternallyReferencedObject(Object** object);
-
   //
   // Support for the API.
   //
@@ -792,18 +831,6 @@
   // Check new space expansion criteria and expand semispaces if it was hit.
   void CheckNewSpaceExpansionCriteria();
 
-  inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
-    if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
-
-    intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
-
-    if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
-
-    if (HighMemoryPressure()) return true;
-
-    return false;
-  }
-
   void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
 
   // An object should be promoted if the object has survived a
@@ -817,8 +844,6 @@
 
   void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
 
-  inline bool OldGenerationAllocationLimitReached();
-
   // Completely clear the Instanceof cache (to stop it keeping objects alive
   // around a GC).
   inline void CompletelyClearInstanceofCache();
@@ -847,6 +872,8 @@
     global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
   }
 
+  int64_t external_memory_hard_limit() { return MaxOldGenerationSize() / 2; }
+
   int64_t external_memory() { return external_memory_; }
   void update_external_memory(int64_t delta) { external_memory_ += delta; }
 
@@ -861,9 +888,7 @@
 
   void DeoptMarkedAllocationSites();
 
-  bool DeoptMaybeTenuredAllocationSites() {
-    return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
-  }
+  inline bool DeoptMaybeTenuredAllocationSites();
 
   void AddWeakNewSpaceObjectToCodeDependency(Handle<HeapObject> obj,
                                              Handle<WeakCell> code);
@@ -937,39 +962,16 @@
   // Getters for spaces. =======================================================
   // ===========================================================================
 
-  Address NewSpaceTop() { return new_space_.top(); }
+  inline Address NewSpaceTop();
 
-  NewSpace* new_space() { return &new_space_; }
+  NewSpace* new_space() { return new_space_; }
   OldSpace* old_space() { return old_space_; }
   OldSpace* code_space() { return code_space_; }
   MapSpace* map_space() { return map_space_; }
   LargeObjectSpace* lo_space() { return lo_space_; }
 
-  PagedSpace* paged_space(int idx) {
-    switch (idx) {
-      case OLD_SPACE:
-        return old_space();
-      case MAP_SPACE:
-        return map_space();
-      case CODE_SPACE:
-        return code_space();
-      case NEW_SPACE:
-      case LO_SPACE:
-        UNREACHABLE();
-    }
-    return NULL;
-  }
-
-  Space* space(int idx) {
-    switch (idx) {
-      case NEW_SPACE:
-        return new_space();
-      case LO_SPACE:
-        return lo_space();
-      default:
-        return paged_space(idx);
-    }
-  }
+  inline PagedSpace* paged_space(int idx);
+  inline Space* space(int idx);
 
   // Returns name of the space.
   const char* GetSpaceName(int idx);
@@ -1090,22 +1092,22 @@
   // Returns whether there is a chance that another major GC could
   // collect more garbage.
   inline bool CollectGarbage(
-      AllocationSpace space, const char* gc_reason = NULL,
+      AllocationSpace space, GarbageCollectionReason gc_reason,
       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
 
   // Performs a full garbage collection.  If (flags & kMakeHeapIterableMask) is
   // non-zero, then the slower precise sweeper is used, which leaves the heap
   // in a state where we can iterate over the heap visiting all objects.
   void CollectAllGarbage(
-      int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
+      int flags, GarbageCollectionReason gc_reason,
       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
 
   // Last hope GC, should try to squeeze as much as possible.
-  void CollectAllAvailableGarbage(const char* gc_reason = NULL);
+  void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason);
 
   // Reports and external memory pressure event, either performs a major GC or
   // completes incremental marking in order to free external resources.
-  void ReportExternalMemoryPressure(const char* gc_reason = NULL);
+  void ReportExternalMemoryPressure();
 
   // Invoked when GC was requested via the stack guard.
   void HandleGCRequest();
@@ -1156,24 +1158,54 @@
 
   // Start incremental marking and ensure that idle time handler can perform
   // incremental steps.
-  void StartIdleIncrementalMarking();
+  void StartIdleIncrementalMarking(GarbageCollectionReason gc_reason);
 
   // Starts incremental marking assuming incremental marking is currently
   // stopped.
-  void StartIncrementalMarking(int gc_flags = kNoGCFlags,
-                               const GCCallbackFlags gc_callback_flags =
-                                   GCCallbackFlags::kNoGCCallbackFlags,
-                               const char* reason = nullptr);
+  void StartIncrementalMarking(
+      int gc_flags, GarbageCollectionReason gc_reason,
+      GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
 
-  void FinalizeIncrementalMarkingIfComplete(const char* comment);
+  void StartIncrementalMarkingIfAllocationLimitIsReached(
+      int gc_flags,
+      GCCallbackFlags gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags);
 
-  bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms);
+  void FinalizeIncrementalMarkingIfComplete(GarbageCollectionReason gc_reason);
+
+  bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms,
+                                         GarbageCollectionReason gc_reason);
 
   void RegisterReservationsForBlackAllocation(Reservation* reservations);
 
   IncrementalMarking* incremental_marking() { return incremental_marking_; }
 
   // ===========================================================================
+  // Embedder heap tracer support. =============================================
+  // ===========================================================================
+
+  void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
+
+  bool UsingEmbedderHeapTracer() { return embedder_heap_tracer() != nullptr; }
+
+  void TracePossibleWrapper(JSObject* js_object);
+
+  void RegisterExternallyReferencedObject(Object** object);
+
+  void RegisterWrappersWithEmbedderHeapTracer();
+
+  // In order to avoid running out of memory we force tracing wrappers if there
+  // are too many of them.
+  bool RequiresImmediateWrapperProcessing();
+
+  EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; }
+
+  EmbedderReachableReferenceReporter* embedder_reachable_reference_reporter() {
+    return embedder_reference_reporter_;
+  }
+
+  size_t wrappers_to_trace() { return wrappers_to_trace_.size(); }
+
+  // ===========================================================================
   // External string table API. ================================================
   // ===========================================================================
 
@@ -1258,19 +1290,19 @@
   intptr_t OldGenerationCapacity();
 
   // Returns the amount of memory currently committed for the heap.
-  intptr_t CommittedMemory();
+  size_t CommittedMemory();
 
   // Returns the amount of memory currently committed for the old space.
-  intptr_t CommittedOldGenerationMemory();
+  size_t CommittedOldGenerationMemory();
 
   // Returns the amount of executable memory currently committed for the heap.
-  intptr_t CommittedMemoryExecutable();
+  size_t CommittedMemoryExecutable();
 
   // Returns the amount of phyical memory currently committed for the heap.
   size_t CommittedPhysicalMemory();
 
   // Returns the maximum amount of memory ever committed for the heap.
-  intptr_t MaximumCommittedMemory() { return maximum_committed_; }
+  size_t MaximumCommittedMemory() { return maximum_committed_; }
 
   // Updates the maximum committed memory for the heap. Should be called
   // whenever a space grows.
@@ -1326,13 +1358,9 @@
     return static_cast<intptr_t>(total);
   }
 
-  void UpdateNewSpaceAllocationCounter() {
-    new_space_allocation_counter_ = NewSpaceAllocationCounter();
-  }
+  inline void UpdateNewSpaceAllocationCounter();
 
-  size_t NewSpaceAllocationCounter() {
-    return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
-  }
+  inline size_t NewSpaceAllocationCounter();
 
   // This should be used only for testing.
   void set_new_space_allocation_counter(size_t new_value) {
@@ -1340,16 +1368,18 @@
   }
 
   void UpdateOldGenerationAllocationCounter() {
-    old_generation_allocation_counter_ = OldGenerationAllocationCounter();
+    old_generation_allocation_counter_at_last_gc_ =
+        OldGenerationAllocationCounter();
   }
 
   size_t OldGenerationAllocationCounter() {
-    return old_generation_allocation_counter_ + PromotedSinceLastGC();
+    return old_generation_allocation_counter_at_last_gc_ +
+           PromotedSinceLastGC();
   }
 
   // This should be used only for testing.
-  void set_old_generation_allocation_counter(size_t new_value) {
-    old_generation_allocation_counter_ = new_value;
+  void set_old_generation_allocation_counter_at_last_gc(size_t new_value) {
+    old_generation_allocation_counter_at_last_gc_ = new_value;
   }
 
   size_t PromotedSinceLastGC() {
@@ -1456,6 +1486,9 @@
   void ReportCodeStatistics(const char* title);
 #endif
 
+  static const char* GarbageCollectionReasonToString(
+      GarbageCollectionReason gc_reason);
+
  private:
   class PretenuringScope;
 
@@ -1588,6 +1621,10 @@
     return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
   }
 
+  // Checks whether both, the internal marking deque, and the embedder provided
+  // one are empty. Avoid in fast path as it potentially calls through the API.
+  bool MarkingDequesAreEmpty();
+
   void PreprocessStackTraces();
 
   // Checks whether a global GC is necessary
@@ -1607,7 +1644,7 @@
   // Returns whether there is a chance that another major GC could
   // collect more garbage.
   bool CollectGarbage(
-      GarbageCollector collector, const char* gc_reason,
+      GarbageCollector collector, GarbageCollectionReason gc_reason,
       const char* collector_reason,
       const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
 
@@ -1646,7 +1683,7 @@
   void EnsureFromSpaceIsCommitted();
 
   // Uncommit unused semi space.
-  bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
+  bool UncommitFromSpace();
 
   // Fill in bogus values in from space
   void ZapFromSpace();
@@ -1669,10 +1706,6 @@
   // Flush the number to string cache.
   void FlushNumberStringCache();
 
-  // TODO(hpayer): Allocation site pretenuring may make this method obsolete.
-  // Re-visit incremental marking heuristics.
-  bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
-
   void ConfigureInitialOldGenerationSize();
 
   bool HasLowYoungGenerationAllocationRate();
@@ -1682,10 +1715,6 @@
 
   void ReduceNewSpaceSize();
 
-  bool TryFinalizeIdleIncrementalMarking(
-      double idle_time_in_ms, size_t size_of_objects,
-      size_t mark_compact_speed_in_bytes_per_ms);
-
   GCIdleTimeHeapState ComputeHeapState();
 
   bool PerformIdleTimeAction(GCIdleTimeAction action,
@@ -1705,13 +1734,13 @@
 
   void CompactRetainedMaps(ArrayList* retained_maps);
 
-  void CollectGarbageOnMemoryPressure(const char* source);
+  void CollectGarbageOnMemoryPressure();
 
   // Attempt to over-approximate the weak closure by marking object groups and
   // implicit references from global handles, but don't atomically complete
   // marking. If we continue to mark incrementally, we might have marked
   // objects that die later.
-  void FinalizeIncrementalMarking(const char* gc_reason);
+  void FinalizeIncrementalMarking(GarbageCollectionReason gc_reason);
 
   // Returns the timer used for a given GC type.
   // - GCScavenger: young generation GC
@@ -1772,18 +1801,7 @@
     return old_generation_allocation_limit_ - PromotedTotalSize();
   }
 
-  // Returns maximum GC pause.
-  double get_max_gc_pause() { return max_gc_pause_; }
-
-  // Returns maximum size of objects alive after GC.
-  intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
-
-  // Returns minimal interval between two subsequent collections.
-  double get_min_in_mutator() { return min_in_mutator_; }
-
-  // Update GC statistics that are tracked on the Heap.
-  void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
-                                    double marking_time);
+  void UpdateTotalGCTime(double duration);
 
   bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
 
@@ -1807,15 +1825,24 @@
   void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
                                        double mutator_speed);
 
-  intptr_t MinimumAllocationLimitGrowingStep() {
-    const double kRegularAllocationLimitGrowingStep = 8;
-    const double kLowMemoryAllocationLimitGrowingStep = 2;
-    intptr_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB);
-    return limit * (ShouldOptimizeForMemoryUsage()
-                        ? kLowMemoryAllocationLimitGrowingStep
-                        : kRegularAllocationLimitGrowingStep);
+  intptr_t MinimumAllocationLimitGrowingStep();
+
+  intptr_t old_generation_allocation_limit() const {
+    return old_generation_allocation_limit_;
   }
 
+  bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
+
+  bool CanExpandOldGeneration(int size) {
+    if (force_oom_) return false;
+    return (OldGenerationCapacity() + size) < MaxOldGenerationSize();
+  }
+
+  bool ShouldExpandOldGenerationOnAllocationFailure();
+
+  enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
+  IncrementalMarkingLimit IncrementalMarkingLimitReached();
+
   // ===========================================================================
   // Idle notification. ========================================================
   // ===========================================================================
@@ -2011,6 +2038,9 @@
   // Allocate empty fixed array.
   MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
 
+  // Allocate empty scope info.
+  MUST_USE_RESULT AllocationResult AllocateEmptyScopeInfo();
+
   // Allocate empty fixed typed array of given type.
   MUST_USE_RESULT AllocationResult
       AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
@@ -2068,7 +2098,7 @@
   intptr_t initial_old_generation_size_;
   bool old_generation_size_configured_;
   intptr_t max_executable_size_;
-  intptr_t maximum_committed_;
+  size_t maximum_committed_;
 
   // For keeping track of how much data has survived
   // scavenge since last new space expansion.
@@ -2095,11 +2125,13 @@
 
   int global_ic_age_;
 
-  NewSpace new_space_;
+  NewSpace* new_space_;
   OldSpace* old_space_;
   OldSpace* code_space_;
   MapSpace* map_space_;
   LargeObjectSpace* lo_space_;
+  // Map from the space id to the space.
+  Space* space_[LAST_SPACE + 1];
   HeapState gc_state_;
   int gc_post_processing_depth_;
   Address new_space_top_after_last_gc_;
@@ -2136,10 +2168,6 @@
   // generation and on every allocation in large object space.
   intptr_t old_generation_allocation_limit_;
 
-  // Indicates that an allocation has failed in the old generation since the
-  // last GC.
-  bool old_gen_exhausted_;
-
   // Indicates that inline bump-pointer allocation has been globally disabled
   // for all spaces. This is used to disable allocations in generated code.
   bool inline_allocation_disabled_;
@@ -2168,7 +2196,6 @@
 
   GCTracer* tracer_;
 
-  int high_survival_rate_period_length_;
   intptr_t promoted_objects_size_;
   double promotion_ratio_;
   double promotion_rate_;
@@ -2185,24 +2212,9 @@
   // of the allocation site.
   unsigned int maximum_size_scavenges_;
 
-  // Maximum GC pause.
-  double max_gc_pause_;
-
   // Total time spent in GC.
   double total_gc_time_ms_;
 
-  // Maximum size of objects alive after GC.
-  intptr_t max_alive_after_gc_;
-
-  // Minimal interval between two subsequent collections.
-  double min_in_mutator_;
-
-  // Cumulative GC time spent in marking.
-  double marking_time_;
-
-  // Cumulative GC time spent in sweeping.
-  double sweeping_time_;
-
   // Last time an idle notification happened.
   double last_idle_notification_time_;
 
@@ -2242,7 +2254,7 @@
   // This counter is increased before each GC and never reset. To
   // account for the bytes allocated since the last GC, use the
   // OldGenerationAllocationCounter() function.
-  size_t old_generation_allocation_counter_;
+  size_t old_generation_allocation_counter_at_last_gc_;
 
   // The size of objects in old generation after the last MarkCompact GC.
   size_t old_generation_size_at_last_gc_;
@@ -2293,6 +2305,10 @@
   // The depth of HeapIterator nestings.
   int heap_iterator_depth_;
 
+  EmbedderHeapTracer* embedder_heap_tracer_;
+  EmbedderReachableReferenceReporter* embedder_reference_reporter_;
+  std::vector<std::pair<void*, void*>> wrappers_to_trace_;
+
   // Used for testing purposes.
   bool force_oom_;
 
@@ -2303,12 +2319,15 @@
   friend class HeapIterator;
   friend class IdleScavengeObserver;
   friend class IncrementalMarking;
+  friend class IncrementalMarkingJob;
   friend class IteratePromotedObjectsVisitor;
+  friend class LargeObjectSpace;
   friend class MarkCompactCollector;
   friend class MarkCompactMarkingVisitor;
   friend class NewSpace;
   friend class ObjectStatsCollector;
   friend class Page;
+  friend class PagedSpace;
   friend class Scavenger;
   friend class StoreBuffer;
   friend class TestMemoryAllocatorScope;
@@ -2402,7 +2421,7 @@
 
 // Space iterator for iterating over all old spaces of the heap: Old space
 // and code space.  Returns each space in turn, and null when it is done.
-class OldSpaces BASE_EMBEDDED {
+class V8_EXPORT_PRIVATE OldSpaces BASE_EMBEDDED {
  public:
   explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
   OldSpace* next();
@@ -2427,23 +2446,17 @@
 };
 
 
-// Space iterator for iterating over all spaces of the heap.
-// For each space an object iterator is provided. The deallocation of the
-// returned object iterators is handled by the space iterator.
 class SpaceIterator : public Malloced {
  public:
   explicit SpaceIterator(Heap* heap);
   virtual ~SpaceIterator();
 
   bool has_next();
-  ObjectIterator* next();
+  Space* next();
 
  private:
-  ObjectIterator* CreateIterator();
-
   Heap* heap_;
   int current_space_;         // from enum AllocationSpace.
-  ObjectIterator* iterator_;  // object iterator for the current space.
 };
 
 
@@ -2489,113 +2502,9 @@
   // Space iterator for iterating all the spaces.
   SpaceIterator* space_iterator_;
   // Object iterator for the space currently being iterated.
-  ObjectIterator* object_iterator_;
+  std::unique_ptr<ObjectIterator> object_iterator_;
 };
 
-
-// Cache for mapping (map, property name) into field offset.
-// Cleared at startup and prior to mark sweep collection.
-class KeyedLookupCache {
- public:
-  // Lookup field offset for (map, name). If absent, -1 is returned.
-  int Lookup(Handle<Map> map, Handle<Name> name);
-
-  // Update an element in the cache.
-  void Update(Handle<Map> map, Handle<Name> name, int field_offset);
-
-  // Clear the cache.
-  void Clear();
-
-  static const int kLength = 256;
-  static const int kCapacityMask = kLength - 1;
-  static const int kMapHashShift = 5;
-  static const int kHashMask = -4;  // Zero the last two bits.
-  static const int kEntriesPerBucket = 4;
-  static const int kEntryLength = 2;
-  static const int kMapIndex = 0;
-  static const int kKeyIndex = 1;
-  static const int kNotFound = -1;
-
-  // kEntriesPerBucket should be a power of 2.
-  STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
-  STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
-
- private:
-  KeyedLookupCache() {
-    for (int i = 0; i < kLength; ++i) {
-      keys_[i].map = NULL;
-      keys_[i].name = NULL;
-      field_offsets_[i] = kNotFound;
-    }
-  }
-
-  static inline int Hash(Handle<Map> map, Handle<Name> name);
-
-  // Get the address of the keys and field_offsets arrays.  Used in
-  // generated code to perform cache lookups.
-  Address keys_address() { return reinterpret_cast<Address>(&keys_); }
-
-  Address field_offsets_address() {
-    return reinterpret_cast<Address>(&field_offsets_);
-  }
-
-  struct Key {
-    Map* map;
-    Name* name;
-  };
-
-  Key keys_[kLength];
-  int field_offsets_[kLength];
-
-  friend class ExternalReference;
-  friend class Isolate;
-  DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
-};
-
-
-// Cache for mapping (map, property name) into descriptor index.
-// The cache contains both positive and negative results.
-// Descriptor index equals kNotFound means the property is absent.
-// Cleared at startup and prior to any gc.
-class DescriptorLookupCache {
- public:
-  // Lookup descriptor index for (map, name).
-  // If absent, kAbsent is returned.
-  inline int Lookup(Map* source, Name* name);
-
-  // Update an element in the cache.
-  inline void Update(Map* source, Name* name, int result);
-
-  // Clear the cache.
-  void Clear();
-
-  static const int kAbsent = -2;
-
- private:
-  DescriptorLookupCache() {
-    for (int i = 0; i < kLength; ++i) {
-      keys_[i].source = NULL;
-      keys_[i].name = NULL;
-      results_[i] = kAbsent;
-    }
-  }
-
-  static inline int Hash(Object* source, Name* name);
-
-  static const int kLength = 64;
-  struct Key {
-    Map* source;
-    Name* name;
-  };
-
-  Key keys_[kLength];
-  int results_[kLength];
-
-  friend class Isolate;
-  DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
-};
-
-
 // Abstract base class for checking whether a weak object should be retained.
 class WeakObjectRetainer {
  public:
@@ -2720,6 +2629,18 @@
   DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
 };
 
+class TracePossibleWrapperReporter : public EmbedderReachableReferenceReporter {
+ public:
+  explicit TracePossibleWrapperReporter(Heap* heap) : heap_(heap) {}
+  void ReportExternalReference(Value* object) override {
+    heap_->RegisterExternallyReferencedObject(
+        reinterpret_cast<Object**>(object));
+  }
+
+ private:
+  Heap* heap_;
+};
+
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/heap/incremental-marking-inl.h b/src/heap/incremental-marking-inl.h
index fa22da6..ee594b2 100644
--- a/src/heap/incremental-marking-inl.h
+++ b/src/heap/incremental-marking-inl.h
@@ -6,6 +6,7 @@
 #define V8_HEAP_INCREMENTAL_MARKING_INL_H_
 
 #include "src/heap/incremental-marking.h"
+#include "src/isolate.h"
 
 namespace v8 {
 namespace internal {
@@ -33,6 +34,15 @@
   }
 }
 
+void IncrementalMarking::RestartIfNotMarking() {
+  if (state_ == COMPLETE) {
+    state_ = MARKING;
+    if (FLAG_trace_incremental_marking) {
+      heap()->isolate()->PrintWithTimestamp(
+          "[IncrementalMarking] Restarting (new grey objects)\n");
+    }
+  }
+}
 
 }  // namespace internal
 }  // namespace v8
diff --git a/src/heap/incremental-marking-job.cc b/src/heap/incremental-marking-job.cc
index fe14dd0..393b9cc 100644
--- a/src/heap/incremental-marking-job.cc
+++ b/src/heap/incremental-marking-job.cc
@@ -14,131 +14,49 @@
 namespace v8 {
 namespace internal {
 
-const double IncrementalMarkingJob::kLongDelayInSeconds = 5;
-const double IncrementalMarkingJob::kShortDelayInSeconds = 0.5;
-
 void IncrementalMarkingJob::Start(Heap* heap) {
   DCHECK(!heap->incremental_marking()->IsStopped());
-  // We don't need to reset the flags because tasks from the previous job
-  // can still be pending. We just want to ensure that tasks are posted
-  // if they are not pending.
-  // If delayed task is pending and made_progress_since_last_delayed_task_ is
-  // true, then the delayed task will clear that flag when it is rescheduled.
-  ScheduleIdleTask(heap);
-  ScheduleDelayedTask(heap);
+  ScheduleTask(heap);
 }
 
+void IncrementalMarkingJob::NotifyTask() { task_pending_ = false; }
 
-void IncrementalMarkingJob::NotifyIdleTask() { idle_task_pending_ = false; }
-
-
-void IncrementalMarkingJob::NotifyDelayedTask() {
-  delayed_task_pending_ = false;
-}
-
-
-void IncrementalMarkingJob::NotifyIdleTaskProgress() {
-  made_progress_since_last_delayed_task_ = true;
-}
-
-
-void IncrementalMarkingJob::ScheduleIdleTask(Heap* heap) {
-  if (!idle_task_pending_) {
+void IncrementalMarkingJob::ScheduleTask(Heap* heap) {
+  if (!task_pending_) {
     v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
-    if (V8::GetCurrentPlatform()->IdleTasksEnabled(isolate)) {
-      idle_task_pending_ = true;
-      auto task = new IdleTask(heap->isolate(), this);
-      V8::GetCurrentPlatform()->CallIdleOnForegroundThread(isolate, task);
-    }
+    task_pending_ = true;
+    auto task = new Task(heap->isolate(), this);
+    V8::GetCurrentPlatform()->CallOnForegroundThread(isolate, task);
   }
 }
 
-
-void IncrementalMarkingJob::ScheduleDelayedTask(Heap* heap) {
-  if (!delayed_task_pending_ && FLAG_memory_reducer) {
-    v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(heap->isolate());
-    delayed_task_pending_ = true;
-    made_progress_since_last_delayed_task_ = false;
-    auto task = new DelayedTask(heap->isolate(), this);
-    double delay =
-        heap->HighMemoryPressure() ? kShortDelayInSeconds : kLongDelayInSeconds;
-    V8::GetCurrentPlatform()->CallDelayedOnForegroundThread(isolate, task,
-                                                            delay);
-  }
-}
-
-
-IncrementalMarkingJob::IdleTask::Progress IncrementalMarkingJob::IdleTask::Step(
-    Heap* heap, double deadline_in_ms) {
-  IncrementalMarking* incremental_marking = heap->incremental_marking();
-  if (incremental_marking->IsStopped()) {
-    return kDone;
-  }
-  if (incremental_marking->IsSweeping()) {
-    incremental_marking->FinalizeSweeping();
-    // TODO(hpayer): We can continue here if enough idle time is left.
-    return kMoreWork;
-  }
-  const double remaining_idle_time_in_ms =
-      incremental_marking->AdvanceIncrementalMarking(
-          deadline_in_ms, IncrementalMarking::IdleStepActions());
-  if (remaining_idle_time_in_ms > 0.0) {
-    heap->TryFinalizeIdleIncrementalMarking(remaining_idle_time_in_ms);
-  }
-  return incremental_marking->IsStopped() ? kDone : kMoreWork;
-}
-
-
-void IncrementalMarkingJob::IdleTask::RunInternal(double deadline_in_seconds) {
-  double deadline_in_ms =
-      deadline_in_seconds *
-      static_cast<double>(base::Time::kMillisecondsPerSecond);
-  Heap* heap = isolate()->heap();
-  double start_ms = heap->MonotonicallyIncreasingTimeInMs();
-  job_->NotifyIdleTask();
-  job_->NotifyIdleTaskProgress();
-  if (Step(heap, deadline_in_ms) == kMoreWork) {
-    job_->ScheduleIdleTask(heap);
-  }
-  if (FLAG_trace_idle_notification) {
-    double current_time_ms = heap->MonotonicallyIncreasingTimeInMs();
-    double idle_time_in_ms = deadline_in_ms - start_ms;
-    double deadline_difference = deadline_in_ms - current_time_ms;
-    PrintIsolate(isolate(), "%8.0f ms: ", isolate()->time_millis_since_init());
-    PrintF(
-        "Idle task: requested idle time %.2f ms, used idle time %.2f "
-        "ms, deadline usage %.2f ms\n",
-        idle_time_in_ms, idle_time_in_ms - deadline_difference,
-        deadline_difference);
-  }
-}
-
-
-void IncrementalMarkingJob::DelayedTask::Step(Heap* heap) {
-  const int kIncrementalMarkingDelayMs = 50;
+void IncrementalMarkingJob::Task::Step(Heap* heap) {
+  const int kIncrementalMarkingDelayMs = 1;
   double deadline =
       heap->MonotonicallyIncreasingTimeInMs() + kIncrementalMarkingDelayMs;
   heap->incremental_marking()->AdvanceIncrementalMarking(
-      deadline, i::IncrementalMarking::StepActions(
-                    i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
-                    i::IncrementalMarking::FORCE_MARKING,
-                    i::IncrementalMarking::FORCE_COMPLETION));
+      deadline, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+      i::IncrementalMarking::FORCE_COMPLETION, i::StepOrigin::kTask);
   heap->FinalizeIncrementalMarkingIfComplete(
-      "Incremental marking task: finalize incremental marking");
+      GarbageCollectionReason::kFinalizeMarkingViaTask);
 }
 
-
-void IncrementalMarkingJob::DelayedTask::RunInternal() {
+void IncrementalMarkingJob::Task::RunInternal() {
   Heap* heap = isolate()->heap();
-  job_->NotifyDelayedTask();
+  job_->NotifyTask();
   IncrementalMarking* incremental_marking = heap->incremental_marking();
-  if (!incremental_marking->IsStopped()) {
-    if (job_->ShouldForceMarkingStep()) {
-      Step(heap);
+  if (incremental_marking->IsStopped()) {
+    if (heap->IncrementalMarkingLimitReached() !=
+        Heap::IncrementalMarkingLimit::kNoLimit) {
+      heap->StartIncrementalMarking(Heap::kNoGCFlags,
+                                    GarbageCollectionReason::kIdleTask,
+                                    kNoGCCallbackFlags);
     }
-    // The Step() above could have finished incremental marking.
+  }
+  if (!incremental_marking->IsStopped()) {
+    Step(heap);
     if (!incremental_marking->IsStopped()) {
-      job_->ScheduleDelayedTask(heap);
+      job_->ScheduleTask(heap);
     }
   }
 }
diff --git a/src/heap/incremental-marking-job.h b/src/heap/incremental-marking-job.h
index 9c78182..ccc60c5 100644
--- a/src/heap/incremental-marking-job.h
+++ b/src/heap/incremental-marking-job.h
@@ -14,31 +14,13 @@
 class Isolate;
 
 // The incremental marking job uses platform tasks to perform incremental
-// marking steps. The job posts an idle and a delayed task with a large delay.
-// The delayed task performs steps only if the idle task is not making progress.
-// We expect this to be a rare event since incremental marking should finish
-// quickly with the help of the mutator and the idle task.
-// The delayed task guarantees that we eventually finish incremental marking
-// even if the mutator becomes idle and the platform stops running idle tasks,
-// which can happen for background tabs in Chrome.
+// marking steps. The job posts a foreground task that makes a small (~1ms)
+// step and posts another task until the marking is completed.
 class IncrementalMarkingJob {
  public:
-  class IdleTask : public CancelableIdleTask {
+  class Task : public CancelableTask {
    public:
-    explicit IdleTask(Isolate* isolate, IncrementalMarkingJob* job)
-        : CancelableIdleTask(isolate), job_(job) {}
-    enum Progress { kDone, kMoreWork };
-    static Progress Step(Heap* heap, double deadline_in_ms);
-    // CancelableIdleTask overrides.
-    void RunInternal(double deadline_in_seconds) override;
-
-   private:
-    IncrementalMarkingJob* job_;
-  };
-
-  class DelayedTask : public CancelableTask {
-   public:
-    explicit DelayedTask(Isolate* isolate, IncrementalMarkingJob* job)
+    explicit Task(Isolate* isolate, IncrementalMarkingJob* job)
         : CancelableTask(isolate), job_(job) {}
     static void Step(Heap* heap);
     // CancelableTask overrides.
@@ -48,33 +30,18 @@
     IncrementalMarkingJob* job_;
   };
 
-  // Delay of the delayed task.
-  static const double kLongDelayInSeconds;
-  static const double kShortDelayInSeconds;
+  IncrementalMarkingJob() : task_pending_(false) {}
 
-  IncrementalMarkingJob()
-      : idle_task_pending_(false),
-        delayed_task_pending_(false),
-        made_progress_since_last_delayed_task_(false) {}
-
-  bool ShouldForceMarkingStep() {
-    return !made_progress_since_last_delayed_task_;
-  }
-
-  bool IdleTaskPending() { return idle_task_pending_; }
+  bool TaskPending() { return task_pending_; }
 
   void Start(Heap* heap);
 
-  void NotifyIdleTask();
-  void NotifyDelayedTask();
-  void NotifyIdleTaskProgress();
-  void ScheduleIdleTask(Heap* heap);
-  void ScheduleDelayedTask(Heap* heap);
+  void NotifyTask();
+
+  void ScheduleTask(Heap* heap);
 
  private:
-  bool idle_task_pending_;
-  bool delayed_task_pending_;
-  bool made_progress_since_last_delayed_task_;
+  bool task_pending_;
 };
 }  // namespace internal
 }  // namespace v8
diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc
index b9e7c61..579228c 100644
--- a/src/heap/incremental-marking.cc
+++ b/src/heap/incremental-marking.cc
@@ -19,33 +19,22 @@
 namespace v8 {
 namespace internal {
 
-IncrementalMarking::StepActions IncrementalMarking::IdleStepActions() {
-  return StepActions(IncrementalMarking::NO_GC_VIA_STACK_GUARD,
-                     IncrementalMarking::FORCE_MARKING,
-                     IncrementalMarking::DO_NOT_FORCE_COMPLETION);
-}
-
 IncrementalMarking::IncrementalMarking(Heap* heap)
     : heap_(heap),
-      observer_(*this, kAllocatedThreshold),
       state_(STOPPED),
-      is_compacting_(false),
-      steps_count_(0),
-      old_generation_space_available_at_start_of_incremental_(0),
-      old_generation_space_used_at_start_of_incremental_(0),
-      bytes_rescanned_(0),
-      should_hurry_(false),
-      marking_speed_(0),
-      bytes_scanned_(0),
-      allocated_(0),
-      write_barriers_invoked_since_last_step_(0),
-      idle_marking_delay_counter_(0),
+      initial_old_generation_size_(0),
+      bytes_marked_ahead_of_schedule_(0),
       unscanned_bytes_of_large_object_(0),
+      idle_marking_delay_counter_(0),
+      incremental_marking_finalization_rounds_(0),
+      is_compacting_(false),
+      should_hurry_(false),
       was_activated_(false),
       black_allocation_(false),
       finalize_marking_completed_(false),
-      incremental_marking_finalization_rounds_(0),
-      request_type_(NONE) {}
+      request_type_(NONE),
+      new_generation_observer_(*this, kAllocatedThreshold),
+      old_generation_observer_(*this, kAllocatedThreshold) {}
 
 bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
   HeapObject* value_heap_obj = HeapObject::cast(value);
@@ -76,19 +65,7 @@
 void IncrementalMarking::RecordWriteFromCode(HeapObject* obj, Object** slot,
                                              Isolate* isolate) {
   DCHECK(obj->IsHeapObject());
-  IncrementalMarking* marking = isolate->heap()->incremental_marking();
-
-  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
-  int counter = chunk->write_barrier_counter();
-  if (counter < (MemoryChunk::kWriteBarrierCounterGranularity / 2)) {
-    marking->write_barriers_invoked_since_last_step_ +=
-        MemoryChunk::kWriteBarrierCounterGranularity -
-        chunk->write_barrier_counter();
-    chunk->set_write_barrier_counter(
-        MemoryChunk::kWriteBarrierCounterGranularity);
-  }
-
-  marking->RecordWrite(obj, slot, *slot);
+  isolate->heap()->incremental_marking()->RecordWrite(obj, slot, *slot);
 }
 
 // static
@@ -202,20 +179,15 @@
     StaticMarkingVisitor<IncrementalMarkingMarkingVisitor>::Initialize();
     table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental);
     table_.Register(kVisitNativeContext, &VisitNativeContextIncremental);
-    table_.Register(kVisitJSRegExp, &VisitJSRegExp);
   }
 
   static const int kProgressBarScanningChunk = 32 * 1024;
 
   static void VisitFixedArrayIncremental(Map* map, HeapObject* object) {
     MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
-    // TODO(mstarzinger): Move setting of the flag to the allocation site of
-    // the array. The visitor should just check the flag.
-    if (FLAG_use_marking_progress_bar &&
-        chunk->owner()->identity() == LO_SPACE) {
-      chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR);
-    }
     if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
+      DCHECK(!FLAG_use_marking_progress_bar ||
+             chunk->owner()->identity() == LO_SPACE);
       Heap* heap = map->GetHeap();
       // When using a progress bar for large fixed arrays, scan only a chunk of
       // the array and try to push it onto the marking deque again until it is
@@ -423,22 +395,6 @@
 }
 
 
-bool IncrementalMarking::ShouldActivateEvenWithoutIdleNotification() {
-#ifndef DEBUG
-  static const intptr_t kActivationThreshold = 8 * MB;
-#else
-  // TODO(gc) consider setting this to some low level so that some
-  // debug tests run with incremental marking and some without.
-  static const intptr_t kActivationThreshold = 0;
-#endif
-  // Don't switch on for very small heaps.
-  return CanBeActivated() &&
-         heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold &&
-         heap_->HeapIsFullEnoughToStartIncrementalMarking(
-             heap_->old_generation_allocation_limit());
-}
-
-
 bool IncrementalMarking::WasActivated() { return was_activated_; }
 
 
@@ -467,21 +423,6 @@
 }
 
 
-void IncrementalMarking::NotifyOfHighPromotionRate() {
-  if (IsMarking()) {
-    if (marking_speed_ < kFastMarking) {
-      if (FLAG_trace_gc) {
-        PrintIsolate(heap()->isolate(),
-                     "Increasing marking speed to %d "
-                     "due to high promotion rate\n",
-                     static_cast<int>(kFastMarking));
-      }
-      marking_speed_ = kFastMarking;
-    }
-  }
-}
-
-
 static void PatchIncrementalMarkingRecordWriteStubs(
     Heap* heap, RecordWriteStub::Mode mode) {
   UnseededNumberDictionary* stubs = heap->code_stubs();
@@ -503,34 +444,60 @@
   }
 }
 
-
-void IncrementalMarking::Start(const char* reason) {
+void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
   if (FLAG_trace_incremental_marking) {
-    PrintF("[IncrementalMarking] Start (%s)\n",
-           (reason == nullptr) ? "unknown reason" : reason);
+    int old_generation_size_mb =
+        static_cast<int>(heap()->PromotedSpaceSizeOfObjects() / MB);
+    int old_generation_limit_mb =
+        static_cast<int>(heap()->old_generation_allocation_limit() / MB);
+    heap()->isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] Start (%s): old generation %dMB, limit %dMB, "
+        "slack %dMB\n",
+        Heap::GarbageCollectionReasonToString(gc_reason),
+        old_generation_size_mb, old_generation_limit_mb,
+        Max(0, old_generation_limit_mb - old_generation_size_mb));
   }
   DCHECK(FLAG_incremental_marking);
   DCHECK(state_ == STOPPED);
   DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
   DCHECK(!heap_->isolate()->serializer_enabled());
 
-  HistogramTimerScope incremental_marking_scope(
-      heap_->isolate()->counters()->gc_incremental_marking_start());
-  TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
-  ResetStepCounters();
+  Counters* counters = heap_->isolate()->counters();
 
+  counters->incremental_marking_reason()->AddSample(
+      static_cast<int>(gc_reason));
+  HistogramTimerScope incremental_marking_scope(
+      counters->gc_incremental_marking_start());
+  TRACE_EVENT0("v8", "V8.GCIncrementalMarkingStart");
+  heap_->tracer()->NotifyIncrementalMarkingStart();
+
+  start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
+  initial_old_generation_size_ = heap_->PromotedSpaceSizeOfObjects();
+  old_generation_allocation_counter_ = heap_->OldGenerationAllocationCounter();
+  bytes_allocated_ = 0;
+  bytes_marked_ahead_of_schedule_ = 0;
+  should_hurry_ = false;
   was_activated_ = true;
 
   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
     StartMarking();
   } else {
     if (FLAG_trace_incremental_marking) {
-      PrintF("[IncrementalMarking] Start sweeping.\n");
+      heap()->isolate()->PrintWithTimestamp(
+          "[IncrementalMarking] Start sweeping.\n");
     }
     state_ = SWEEPING;
   }
 
-  heap_->new_space()->AddAllocationObserver(&observer_);
+  SpaceIterator it(heap_);
+  while (it.has_next()) {
+    Space* space = it.next();
+    if (space == heap_->new_space()) {
+      space->AddAllocationObserver(&new_generation_observer_);
+    } else {
+      space->AddAllocationObserver(&old_generation_observer_);
+    }
+  }
 
   incremental_marking_job()->Start(heap_);
 }
@@ -542,12 +509,14 @@
     // but we cannot enable black allocation while deserializing. Hence, we
     // have to delay the start of incremental marking in that case.
     if (FLAG_trace_incremental_marking) {
-      PrintF("[IncrementalMarking] Start delayed - serializer\n");
+      heap()->isolate()->PrintWithTimestamp(
+          "[IncrementalMarking] Start delayed - serializer\n");
     }
     return;
   }
   if (FLAG_trace_incremental_marking) {
-    PrintF("[IncrementalMarking] Start marking\n");
+    heap()->isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] Start marking\n");
   }
 
   is_compacting_ = !FLAG_never_compact &&
@@ -559,7 +528,8 @@
   if (heap_->UsingEmbedderHeapTracer()) {
     TRACE_GC(heap()->tracer(),
              GCTracer::Scope::MC_INCREMENTAL_WRAPPER_PROLOGUE);
-    heap_->mark_compact_collector()->embedder_heap_tracer()->TracePrologue();
+    heap_->embedder_heap_tracer()->TracePrologue(
+        heap_->embedder_reachable_reference_reporter());
   }
 
   RecordWriteStub::Mode mode = is_compacting_
@@ -589,7 +559,7 @@
 
   // Ready to start incremental marking.
   if (FLAG_trace_incremental_marking) {
-    PrintF("[IncrementalMarking] Running\n");
+    heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Running\n");
   }
 }
 
@@ -601,7 +571,8 @@
   heap()->map_space()->MarkAllocationInfoBlack();
   heap()->code_space()->MarkAllocationInfoBlack();
   if (FLAG_trace_incremental_marking) {
-    PrintF("[IncrementalMarking] Black allocation started\n");
+    heap()->isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] Black allocation started\n");
   }
 }
 
@@ -609,11 +580,22 @@
   if (black_allocation_) {
     black_allocation_ = false;
     if (FLAG_trace_incremental_marking) {
-      PrintF("[IncrementalMarking] Black allocation finished\n");
+      heap()->isolate()->PrintWithTimestamp(
+          "[IncrementalMarking] Black allocation finished\n");
     }
   }
 }
 
+void IncrementalMarking::AbortBlackAllocation() {
+  for (Page* page : *heap()->old_space()) {
+    page->ReleaseBlackAreaEndMarkerMap();
+  }
+  if (FLAG_trace_incremental_marking) {
+    heap()->isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] Black allocation aborted\n");
+  }
+}
+
 void IncrementalMarking::MarkRoots() {
   DCHECK(!finalize_marking_completed_);
   DCHECK(IsMarking());
@@ -742,7 +724,6 @@
   }
 }
 
-
 void IncrementalMarking::FinalizeIncrementally() {
   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_FINALIZE_BODY);
   DCHECK(!finalize_marking_completed_);
@@ -775,11 +756,12 @@
       abs(old_marking_deque_top -
           heap_->mark_compact_collector()->marking_deque()->top());
 
+  marking_progress += static_cast<int>(heap_->wrappers_to_trace());
+
   double end = heap_->MonotonicallyIncreasingTimeInMs();
   double delta = end - start;
-  heap_->tracer()->AddMarkingTime(delta);
   if (FLAG_trace_incremental_marking) {
-    PrintF(
+    heap()->isolate()->PrintWithTimestamp(
         "[IncrementalMarking] Finalize incrementally round %d, "
         "spent %d ms, marking progress %d.\n",
         static_cast<int>(delta), incremental_marking_finalization_rounds_,
@@ -926,23 +908,23 @@
   // because should_hurry_ will force a full GC.
   if (!heap_->mark_compact_collector()->marking_deque()->IsEmpty()) {
     double start = 0.0;
-    if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) {
+    if (FLAG_trace_incremental_marking) {
       start = heap_->MonotonicallyIncreasingTimeInMs();
       if (FLAG_trace_incremental_marking) {
-        PrintF("[IncrementalMarking] Hurry\n");
+        heap()->isolate()->PrintWithTimestamp("[IncrementalMarking] Hurry\n");
       }
     }
     // TODO(gc) hurry can mark objects it encounters black as mutator
     // was stopped.
     ProcessMarkingDeque(0, FORCE_COMPLETION);
     state_ = COMPLETE;
-    if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) {
+    if (FLAG_trace_incremental_marking) {
       double end = heap_->MonotonicallyIncreasingTimeInMs();
       double delta = end - start;
-      heap_->tracer()->AddMarkingTime(delta);
       if (FLAG_trace_incremental_marking) {
-        PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
-               static_cast<int>(delta));
+        heap()->isolate()->PrintWithTimestamp(
+            "[IncrementalMarking] Complete (hurry), spent %d ms.\n",
+            static_cast<int>(delta));
       }
     }
   }
@@ -968,12 +950,28 @@
 void IncrementalMarking::Stop() {
   if (IsStopped()) return;
   if (FLAG_trace_incremental_marking) {
-    PrintF("[IncrementalMarking] Stopping.\n");
+    int old_generation_size_mb =
+        static_cast<int>(heap()->PromotedSpaceSizeOfObjects() / MB);
+    int old_generation_limit_mb =
+        static_cast<int>(heap()->old_generation_allocation_limit() / MB);
+    heap()->isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] Stopping: old generation %dMB, limit %dMB, "
+        "overshoot %dMB\n",
+        old_generation_size_mb, old_generation_limit_mb,
+        Max(0, old_generation_size_mb - old_generation_limit_mb));
   }
 
-  heap_->new_space()->RemoveAllocationObserver(&observer_);
+  SpaceIterator it(heap_);
+  while (it.has_next()) {
+    Space* space = it.next();
+    if (space == heap_->new_space()) {
+      space->RemoveAllocationObserver(&new_generation_observer_);
+    } else {
+      space->RemoveAllocationObserver(&old_generation_observer_);
+    }
+  }
+
   IncrementalMarking::set_should_hurry(false);
-  ResetStepCounters();
   if (IsMarking()) {
     PatchIncrementalMarkingRecordWriteStubs(heap_,
                                             RecordWriteStub::STORE_BUFFER_ONLY);
@@ -995,7 +993,7 @@
 void IncrementalMarking::FinalizeMarking(CompletionAction action) {
   DCHECK(!finalize_marking_completed_);
   if (FLAG_trace_incremental_marking) {
-    PrintF(
+    heap()->isolate()->PrintWithTimestamp(
         "[IncrementalMarking] requesting finalization of incremental "
         "marking.\n");
   }
@@ -1015,7 +1013,8 @@
   // the should-hurry flag to indicate that there can't be much work left to do.
   set_should_hurry(true);
   if (FLAG_trace_incremental_marking) {
-    PrintF("[IncrementalMarking] Complete (normal).\n");
+    heap()->isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] Complete (normal).\n");
   }
   request_type_ = COMPLETE_MARKING;
   if (action == GC_VIA_STACK_GUARD) {
@@ -1031,246 +1030,163 @@
 }
 
 double IncrementalMarking::AdvanceIncrementalMarking(
-    double deadline_in_ms, IncrementalMarking::StepActions step_actions) {
+    double deadline_in_ms, CompletionAction completion_action,
+    ForceCompletionAction force_completion, StepOrigin step_origin) {
   DCHECK(!IsStopped());
 
-  intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
-      GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs,
-      heap()
-          ->tracer()
-          ->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond());
   double remaining_time_in_ms = 0.0;
-  intptr_t bytes_processed = 0;
+  intptr_t step_size_in_bytes = GCIdleTimeHandler::EstimateMarkingStepSize(
+      kStepSizeInMs,
+      heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
 
   do {
-    bytes_processed =
-        Step(step_size_in_bytes, step_actions.completion_action,
-             step_actions.force_marking, step_actions.force_completion);
+    Step(step_size_in_bytes, completion_action, force_completion, step_origin);
     remaining_time_in_ms =
         deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
-  } while (bytes_processed > 0 &&
-           remaining_time_in_ms >=
-               2.0 * GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs &&
-           !IsComplete() &&
+  } while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
            !heap()->mark_compact_collector()->marking_deque()->IsEmpty());
   return remaining_time_in_ms;
 }
 
 
-void IncrementalMarking::OldSpaceStep(intptr_t allocated) {
-  if (IsStopped() && ShouldActivateEvenWithoutIdleNotification()) {
-    heap()->StartIncrementalMarking(Heap::kNoGCFlags, kNoGCCallbackFlags,
-                                    "old space step");
-  } else {
-    Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD);
-  }
-}
-
-
-void IncrementalMarking::SpeedUp() {
-  bool speed_up = false;
-
-  if ((steps_count_ % kMarkingSpeedAccellerationInterval) == 0) {
-    if (FLAG_trace_incremental_marking) {
-      PrintIsolate(heap()->isolate(), "Speed up marking after %d steps\n",
-                   static_cast<int>(kMarkingSpeedAccellerationInterval));
-    }
-    speed_up = true;
-  }
-
-  bool space_left_is_very_small =
-      (old_generation_space_available_at_start_of_incremental_ < 10 * MB);
-
-  bool only_1_nth_of_space_that_was_available_still_left =
-      (SpaceLeftInOldSpace() * (marking_speed_ + 1) <
-       old_generation_space_available_at_start_of_incremental_);
-
-  if (space_left_is_very_small ||
-      only_1_nth_of_space_that_was_available_still_left) {
-    if (FLAG_trace_incremental_marking)
-      PrintIsolate(heap()->isolate(),
-                   "Speed up marking because of low space left\n");
-    speed_up = true;
-  }
-
-  bool size_of_old_space_multiplied_by_n_during_marking =
-      (heap_->PromotedTotalSize() >
-       (marking_speed_ + 1) *
-           old_generation_space_used_at_start_of_incremental_);
-  if (size_of_old_space_multiplied_by_n_during_marking) {
-    speed_up = true;
-    if (FLAG_trace_incremental_marking) {
-      PrintIsolate(heap()->isolate(),
-                   "Speed up marking because of heap size increase\n");
-    }
-  }
-
-  int64_t promoted_during_marking =
-      heap_->PromotedTotalSize() -
-      old_generation_space_used_at_start_of_incremental_;
-  intptr_t delay = marking_speed_ * MB;
-  intptr_t scavenge_slack = heap_->MaxSemiSpaceSize();
-
-  // We try to scan at at least twice the speed that we are allocating.
-  if (promoted_during_marking > bytes_scanned_ / 2 + scavenge_slack + delay) {
-    if (FLAG_trace_incremental_marking) {
-      PrintIsolate(heap()->isolate(),
-                   "Speed up marking because marker was not keeping up\n");
-    }
-    speed_up = true;
-  }
-
-  if (speed_up) {
-    if (state_ != MARKING) {
-      if (FLAG_trace_incremental_marking) {
-        PrintIsolate(heap()->isolate(),
-                     "Postponing speeding up marking until marking starts\n");
-      }
-    } else {
-      marking_speed_ += kMarkingSpeedAccelleration;
-      marking_speed_ = static_cast<int>(
-          Min(kMaxMarkingSpeed, static_cast<intptr_t>(marking_speed_ * 1.3)));
-      if (FLAG_trace_incremental_marking) {
-        PrintIsolate(heap()->isolate(), "Marking speed increased to %d\n",
-                     marking_speed_);
-      }
-    }
-  }
-}
-
 void IncrementalMarking::FinalizeSweeping() {
   DCHECK(state_ == SWEEPING);
   if (heap_->mark_compact_collector()->sweeping_in_progress() &&
-      (heap_->mark_compact_collector()->sweeper().IsSweepingCompleted() ||
-       !FLAG_concurrent_sweeping)) {
+      (!FLAG_concurrent_sweeping ||
+       heap_->mark_compact_collector()->sweeper().IsSweepingCompleted())) {
     heap_->mark_compact_collector()->EnsureSweepingCompleted();
   }
   if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
-    bytes_scanned_ = 0;
     StartMarking();
   }
 }
 
-intptr_t IncrementalMarking::Step(intptr_t allocated_bytes,
-                                  CompletionAction action,
-                                  ForceMarkingAction marking,
-                                  ForceCompletionAction completion) {
-  DCHECK(allocated_bytes >= 0);
+size_t IncrementalMarking::StepSizeToKeepUpWithAllocations() {
+  // Update bytes_allocated_ based on the allocation counter.
+  size_t current_counter = heap_->OldGenerationAllocationCounter();
+  bytes_allocated_ += current_counter - old_generation_allocation_counter_;
+  old_generation_allocation_counter_ = current_counter;
+  return bytes_allocated_;
+}
 
+size_t IncrementalMarking::StepSizeToMakeProgress() {
+  // We increase step size gradually based on the time passed in order to
+  // leave marking work to standalone tasks. The ramp up duration and the
+  // target step count are chosen based on benchmarks.
+  const int kRampUpIntervalMs = 300;
+  const size_t kTargetStepCount = 128;
+  size_t step_size = Max(initial_old_generation_size_ / kTargetStepCount,
+                         IncrementalMarking::kAllocatedThreshold);
+  double time_passed_ms =
+      heap_->MonotonicallyIncreasingTimeInMs() - start_time_ms_;
+  double factor = Min(time_passed_ms / kRampUpIntervalMs, 1.0);
+  return static_cast<size_t>(factor * step_size);
+}
+
+void IncrementalMarking::AdvanceIncrementalMarkingOnAllocation() {
   if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
       (state_ != SWEEPING && state_ != MARKING)) {
-    return 0;
+    return;
   }
 
-  allocated_ += allocated_bytes;
+  size_t bytes_to_process =
+      StepSizeToKeepUpWithAllocations() + StepSizeToMakeProgress();
 
-  if (marking == DO_NOT_FORCE_MARKING && allocated_ < kAllocatedThreshold &&
-      write_barriers_invoked_since_last_step_ <
-          kWriteBarriersInvokedThreshold) {
-    return 0;
-  }
+  if (bytes_to_process >= IncrementalMarking::kAllocatedThreshold) {
+    // The first step after Scavenge will see many allocated bytes.
+    // Cap the step size to distribute the marking work more uniformly.
+    size_t max_step_size = GCIdleTimeHandler::EstimateMarkingStepSize(
+        kMaxStepSizeInMs,
+        heap()->tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
+    bytes_to_process = Min(bytes_to_process, max_step_size);
 
-  // If an idle notification happened recently, we delay marking steps.
-  if (marking == DO_NOT_FORCE_MARKING &&
-      heap_->RecentIdleNotificationHappened()) {
-    return 0;
-  }
-
-  intptr_t bytes_processed = 0;
-  {
-    HistogramTimerScope incremental_marking_scope(
-        heap_->isolate()->counters()->gc_incremental_marking());
-    TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
-    TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
-    double start = heap_->MonotonicallyIncreasingTimeInMs();
-
-    // The marking speed is driven either by the allocation rate or by the rate
-    // at which we are having to check the color of objects in the write
-    // barrier.
-    // It is possible for a tight non-allocating loop to run a lot of write
-    // barriers before we get here and check them (marking can only take place
-    // on
-    // allocation), so to reduce the lumpiness we don't use the write barriers
-    // invoked since last step directly to determine the amount of work to do.
-    intptr_t bytes_to_process =
-        marking_speed_ *
-        Max(allocated_, write_barriers_invoked_since_last_step_);
-    allocated_ = 0;
-    write_barriers_invoked_since_last_step_ = 0;
-
-    bytes_scanned_ += bytes_to_process;
-
-    // TODO(hpayer): Do not account for sweeping finalization while marking.
-    if (state_ == SWEEPING) {
-      FinalizeSweeping();
+    size_t bytes_processed = 0;
+    if (bytes_marked_ahead_of_schedule_ >= bytes_to_process) {
+      // Steps performed in tasks have put us ahead of schedule.
+      // We skip processing of marking dequeue here and thus
+      // shift marking time from inside V8 to standalone tasks.
+      bytes_marked_ahead_of_schedule_ -= bytes_to_process;
+      bytes_processed = bytes_to_process;
+    } else {
+      bytes_processed = Step(bytes_to_process, GC_VIA_STACK_GUARD,
+                             FORCE_COMPLETION, StepOrigin::kV8);
     }
+    bytes_allocated_ -= Min(bytes_allocated_, bytes_processed);
+  }
+}
 
-    if (state_ == MARKING) {
+size_t IncrementalMarking::Step(size_t bytes_to_process,
+                                CompletionAction action,
+                                ForceCompletionAction completion,
+                                StepOrigin step_origin) {
+  HistogramTimerScope incremental_marking_scope(
+      heap_->isolate()->counters()->gc_incremental_marking());
+  TRACE_EVENT0("v8", "V8.GCIncrementalMarking");
+  TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL);
+  double start = heap_->MonotonicallyIncreasingTimeInMs();
+
+  if (state_ == SWEEPING) {
+    TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_INCREMENTAL_SWEEPING);
+    FinalizeSweeping();
+  }
+
+  size_t bytes_processed = 0;
+  if (state_ == MARKING) {
+    const bool incremental_wrapper_tracing =
+        FLAG_incremental_marking_wrappers && heap_->UsingEmbedderHeapTracer();
+    const bool process_wrappers =
+        incremental_wrapper_tracing &&
+        (heap_->RequiresImmediateWrapperProcessing() ||
+         heap_->mark_compact_collector()->marking_deque()->IsEmpty());
+    bool wrapper_work_left = incremental_wrapper_tracing;
+    if (!process_wrappers) {
       bytes_processed = ProcessMarkingDeque(bytes_to_process);
-      if (FLAG_incremental_marking_wrappers &&
-          heap_->UsingEmbedderHeapTracer()) {
-        TRACE_GC(heap()->tracer(),
-                 GCTracer::Scope::MC_INCREMENTAL_WRAPPER_TRACING);
-        // This currently marks through all registered wrappers and does not
-        // respect bytes_to_process.
-        // TODO(hpayer): Integrate incremental marking of wrappers into
-        // bytes_to_process logic.
-        heap_->mark_compact_collector()
-            ->RegisterWrappersWithEmbedderHeapTracer();
-        heap_->mark_compact_collector()->embedder_heap_tracer()->AdvanceTracing(
-            0,
-            EmbedderHeapTracer::AdvanceTracingActions(
-                EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION));
+      if (step_origin == StepOrigin::kTask) {
+        bytes_marked_ahead_of_schedule_ += bytes_processed;
       }
-      if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) {
-        if (completion == FORCE_COMPLETION ||
-            IsIdleMarkingDelayCounterLimitReached()) {
-          if (!finalize_marking_completed_) {
-            FinalizeMarking(action);
-          } else {
-            MarkingComplete(action);
-          }
-        } else {
-          IncrementIdleMarkingDelayCounter();
-        }
-      }
+    } else {
+      const double wrapper_deadline =
+          heap_->MonotonicallyIncreasingTimeInMs() + kStepSizeInMs;
+      TRACE_GC(heap()->tracer(),
+               GCTracer::Scope::MC_INCREMENTAL_WRAPPER_TRACING);
+      heap_->RegisterWrappersWithEmbedderHeapTracer();
+      wrapper_work_left = heap_->embedder_heap_tracer()->AdvanceTracing(
+          wrapper_deadline, EmbedderHeapTracer::AdvanceTracingActions(
+                                EmbedderHeapTracer::ForceCompletionAction::
+                                    DO_NOT_FORCE_COMPLETION));
     }
 
-    steps_count_++;
+    if (heap_->mark_compact_collector()->marking_deque()->IsEmpty() &&
+        !wrapper_work_left) {
+      if (completion == FORCE_COMPLETION ||
+          IsIdleMarkingDelayCounterLimitReached()) {
+        if (!finalize_marking_completed_) {
+          FinalizeMarking(action);
+        } else {
+          MarkingComplete(action);
+        }
+      } else {
+        IncrementIdleMarkingDelayCounter();
+      }
+    }
+  }
 
-    // Speed up marking if we are marking too slow or if we are almost done
-    // with marking.
-    SpeedUp();
-
-    double end = heap_->MonotonicallyIncreasingTimeInMs();
-    double duration = (end - start);
-    // Note that we report zero bytes here when sweeping was in progress or
-    // when we just started incremental marking. In these cases we did not
-    // process the marking deque.
-    heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
+  double end = heap_->MonotonicallyIncreasingTimeInMs();
+  double duration = (end - start);
+  // Note that we report zero bytes here when sweeping was in progress or
+  // when we just started incremental marking. In these cases we did not
+  // process the marking deque.
+  heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
+  if (FLAG_trace_incremental_marking) {
+    heap_->isolate()->PrintWithTimestamp(
+        "[IncrementalMarking] Step %s %zu bytes (%zu) in %.1f\n",
+        step_origin == StepOrigin::kV8 ? "in v8" : "in task", bytes_processed,
+        bytes_to_process, duration);
   }
   return bytes_processed;
 }
 
 
-void IncrementalMarking::ResetStepCounters() {
-  steps_count_ = 0;
-  old_generation_space_available_at_start_of_incremental_ =
-      SpaceLeftInOldSpace();
-  old_generation_space_used_at_start_of_incremental_ =
-      heap_->PromotedTotalSize();
-  bytes_rescanned_ = 0;
-  marking_speed_ = kInitialMarkingSpeed;
-  bytes_scanned_ = 0;
-  write_barriers_invoked_since_last_step_ = 0;
-}
-
-
-int64_t IncrementalMarking::SpaceLeftInOldSpace() {
-  return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
-}
-
-
 bool IncrementalMarking::IsIdleMarkingDelayCounterLimitReached() {
   return idle_marking_delay_counter_ > kMaxIdleMarkingDelayCounter;
 }
@@ -1284,5 +1200,6 @@
 void IncrementalMarking::ClearIdleMarkingDelayCounter() {
   idle_marking_delay_counter_ = 0;
 }
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/heap/incremental-marking.h b/src/heap/incremental-marking.h
index 877f05e..c2290c4 100644
--- a/src/heap/incremental-marking.h
+++ b/src/heap/incremental-marking.h
@@ -20,33 +20,18 @@
 class MarkBit;
 class PagedSpace;
 
+enum class StepOrigin { kV8, kTask };
+
 class IncrementalMarking {
  public:
   enum State { STOPPED, SWEEPING, MARKING, COMPLETE };
 
   enum CompletionAction { GC_VIA_STACK_GUARD, NO_GC_VIA_STACK_GUARD };
 
-  enum ForceMarkingAction { FORCE_MARKING, DO_NOT_FORCE_MARKING };
-
   enum ForceCompletionAction { FORCE_COMPLETION, DO_NOT_FORCE_COMPLETION };
 
   enum GCRequestType { NONE, COMPLETE_MARKING, FINALIZATION };
 
-  struct StepActions {
-    StepActions(CompletionAction complete_action_,
-                ForceMarkingAction force_marking_,
-                ForceCompletionAction force_completion_)
-        : completion_action(complete_action_),
-          force_marking(force_marking_),
-          force_completion(force_completion_) {}
-
-    CompletionAction completion_action;
-    ForceMarkingAction force_marking;
-    ForceCompletionAction force_completion;
-  };
-
-  static StepActions IdleStepActions();
-
   explicit IncrementalMarking(Heap* heap);
 
   static void Initialize();
@@ -87,11 +72,9 @@
 
   bool CanBeActivated();
 
-  bool ShouldActivateEvenWithoutIdleNotification();
-
   bool WasActivated();
 
-  void Start(const char* reason = nullptr);
+  void Start(GarbageCollectionReason gc_reason);
 
   void FinalizeIncrementally();
 
@@ -113,7 +96,9 @@
   // returns the remaining time that cannot be used for incremental marking
   // anymore because a single step would exceed the deadline.
   double AdvanceIncrementalMarking(double deadline_in_ms,
-                                   StepActions step_actions);
+                                   CompletionAction completion_action,
+                                   ForceCompletionAction force_completion,
+                                   StepOrigin step_origin);
 
   // It's hard to know how much work the incremental marker should do to make
   // progress in the face of the mutator creating new work for it.  We start
@@ -121,39 +106,27 @@
   // incremental marker until it completes.
   // Do some marking every time this much memory has been allocated or that many
   // heavy (color-checking) write barriers have been invoked.
-  static const intptr_t kAllocatedThreshold = 65536;
-  static const intptr_t kWriteBarriersInvokedThreshold = 32768;
-  // Start off by marking this many times more memory than has been allocated.
-  static const intptr_t kInitialMarkingSpeed = 1;
-  // But if we are promoting a lot of data we need to mark faster to keep up
-  // with the data that is entering the old space through promotion.
-  static const intptr_t kFastMarking = 3;
-  // After this many steps we increase the marking/allocating factor.
-  static const intptr_t kMarkingSpeedAccellerationInterval = 1024;
-  // This is how much we increase the marking/allocating factor by.
-  static const intptr_t kMarkingSpeedAccelleration = 2;
-  static const intptr_t kMaxMarkingSpeed = 1000;
+  static const size_t kAllocatedThreshold = 64 * KB;
+
+  static const int kStepSizeInMs = 1;
+  static const int kMaxStepSizeInMs = 5;
 
   // This is the upper bound for how many times we allow finalization of
   // incremental marking to be postponed.
-  static const size_t kMaxIdleMarkingDelayCounter = 3;
+  static const int kMaxIdleMarkingDelayCounter = 3;
+
+#ifndef DEBUG
+  static const intptr_t kActivationThreshold = 8 * MB;
+#else
+  static const intptr_t kActivationThreshold = 0;
+#endif
 
   void FinalizeSweeping();
 
-  void OldSpaceStep(intptr_t allocated);
+  size_t Step(size_t bytes_to_process, CompletionAction action,
+              ForceCompletionAction completion, StepOrigin step_origin);
 
-  intptr_t Step(intptr_t allocated, CompletionAction action,
-                ForceMarkingAction marking = DO_NOT_FORCE_MARKING,
-                ForceCompletionAction completion = FORCE_COMPLETION);
-
-  inline void RestartIfNotMarking() {
-    if (state_ == COMPLETE) {
-      state_ = MARKING;
-      if (FLAG_trace_incremental_marking) {
-        PrintF("[IncrementalMarking] Restarting (new grey objects)\n");
-      }
-    }
-  }
+  inline void RestartIfNotMarking();
 
   static void RecordWriteFromCode(HeapObject* obj, Object** slot,
                                   Isolate* isolate);
@@ -173,8 +146,8 @@
   INLINE(void RecordWriteOfCodeEntry(JSFunction* host, Object** slot,
                                      Code* value));
 
-
-  void RecordWriteSlow(HeapObject* obj, Object** slot, Object* value);
+  V8_EXPORT_PRIVATE void RecordWriteSlow(HeapObject* obj, Object** slot,
+                                         Object* value);
   void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* value);
   void RecordWriteOfCodeEntrySlow(JSFunction* host, Object** slot, Code* value);
   void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
@@ -194,8 +167,6 @@
 
   void ActivateGeneratedStub(Code* stub);
 
-  void NotifyOfHighPromotionRate();
-
   void NotifyIncompleteScanOfObject(int unscanned_bytes) {
     unscanned_bytes_of_large_object_ = unscanned_bytes;
   }
@@ -244,6 +215,8 @@
 
   void StartBlackAllocationForTesting() { StartBlackAllocation(); }
 
+  void AbortBlackAllocation();
+
  private:
   class Observer : public AllocationObserver {
    public:
@@ -252,8 +225,7 @@
           incremental_marking_(incremental_marking) {}
 
     void Step(int bytes_allocated, Address, size_t) override {
-      incremental_marking_.Step(bytes_allocated,
-                                IncrementalMarking::GC_VIA_STACK_GUARD);
+      incremental_marking_.AdvanceIncrementalMarkingOnAllocation();
     }
 
    private:
@@ -262,10 +234,6 @@
 
   int64_t SpaceLeftInOldSpace();
 
-  void SpeedUp();
-
-  void ResetStepCounters();
-
   void StartMarking();
 
   void StartBlackAllocation();
@@ -301,37 +269,36 @@
 
   void IncrementIdleMarkingDelayCounter();
 
+  void AdvanceIncrementalMarkingOnAllocation();
+
+  size_t StepSizeToKeepUpWithAllocations();
+  size_t StepSizeToMakeProgress();
+
   Heap* heap_;
 
-  Observer observer_;
-
   State state_;
-  bool is_compacting_;
 
-  int steps_count_;
-  int64_t old_generation_space_available_at_start_of_incremental_;
-  int64_t old_generation_space_used_at_start_of_incremental_;
-  int64_t bytes_rescanned_;
-  bool should_hurry_;
-  int marking_speed_;
-  intptr_t bytes_scanned_;
-  intptr_t allocated_;
-  intptr_t write_barriers_invoked_since_last_step_;
-  size_t idle_marking_delay_counter_;
+  double start_time_ms_;
+  size_t initial_old_generation_size_;
+  size_t old_generation_allocation_counter_;
+  size_t bytes_allocated_;
+  size_t bytes_marked_ahead_of_schedule_;
+  size_t unscanned_bytes_of_large_object_;
 
-  int unscanned_bytes_of_large_object_;
-
-  bool was_activated_;
-
-  bool black_allocation_;
-
-  bool finalize_marking_completed_;
-
+  int idle_marking_delay_counter_;
   int incremental_marking_finalization_rounds_;
 
+  bool is_compacting_;
+  bool should_hurry_;
+  bool was_activated_;
+  bool black_allocation_;
+  bool finalize_marking_completed_;
+
   GCRequestType request_type_;
 
   IncrementalMarkingJob incremental_marking_job_;
+  Observer new_generation_observer_;
+  Observer old_generation_observer_;
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
 };
diff --git a/src/heap/mark-compact-inl.h b/src/heap/mark-compact-inl.h
index 7ead421..fe71fb1 100644
--- a/src/heap/mark-compact-inl.h
+++ b/src/heap/mark-compact-inl.h
@@ -14,7 +14,7 @@
 
 void MarkCompactCollector::PushBlack(HeapObject* obj) {
   DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
-  if (marking_deque_.Push(obj)) {
+  if (marking_deque()->Push(obj)) {
     MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size());
   } else {
     MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
@@ -25,7 +25,7 @@
 
 void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
   DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
-  if (!marking_deque_.Unshift(obj)) {
+  if (!marking_deque()->Unshift(obj)) {
     MemoryChunk::IncrementLiveBytesFromGC(obj, -obj->Size());
     MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
     Marking::BlackToGrey(mark_bit);
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
index ae7b467..7e5ef96 100644
--- a/src/heap/mark-compact.cc
+++ b/src/heap/mark-compact.cc
@@ -61,7 +61,6 @@
       marking_deque_memory_(NULL),
       marking_deque_memory_committed_(0),
       code_flusher_(nullptr),
-      embedder_heap_tracer_(nullptr),
       sweeper_(heap) {
 }
 
@@ -567,6 +566,7 @@
 }
 
 bool MarkCompactCollector::Sweeper::IsSweepingCompleted() {
+  DCHECK(FLAG_concurrent_sweeping);
   while (pending_sweeper_tasks_semaphore_.WaitFor(
       base::TimeDelta::FromSeconds(0))) {
     num_sweeping_tasks_.Increment(-1);
@@ -600,7 +600,7 @@
   // For memory reducing and optimize for memory mode we directly define both
   // constants.
   const int kTargetFragmentationPercentForReduceMemory = 20;
-  const int kMaxEvacuatedBytesForReduceMemory = 12 * Page::kPageSize;
+  const int kMaxEvacuatedBytesForReduceMemory = 12 * MB;
   const int kTargetFragmentationPercentForOptimizeMemory = 20;
   const int kMaxEvacuatedBytesForOptimizeMemory = 6 * MB;
 
@@ -608,10 +608,10 @@
   // defaults to start and switch to a trace-based (using compaction speed)
   // approach as soon as we have enough samples.
   const int kTargetFragmentationPercent = 70;
-  const int kMaxEvacuatedBytes = 4 * Page::kPageSize;
+  const int kMaxEvacuatedBytes = 4 * MB;
   // Time to take for a single area (=payload of page). Used as soon as there
   // exist enough compaction speed samples.
-  const int kTargetMsPerArea = 1;
+  const float kTargetMsPerArea = .5;
 
   if (heap()->ShouldReduceMemory()) {
     *target_fragmentation_percent = kTargetFragmentationPercentForReduceMemory;
@@ -801,13 +801,14 @@
   // Clear marking bits if incremental marking is aborted.
   if (was_marked_incrementally_ && heap_->ShouldAbortIncrementalMarking()) {
     heap()->incremental_marking()->Stop();
+    heap()->incremental_marking()->AbortBlackAllocation();
     ClearMarkbits();
     AbortWeakCollections();
     AbortWeakCells();
     AbortTransitionArrays();
     AbortCompaction();
     if (heap_->UsingEmbedderHeapTracer()) {
-      heap_->mark_compact_collector()->embedder_heap_tracer()->AbortTracing();
+      heap_->embedder_heap_tracer()->AbortTracing();
     }
     was_marked_incrementally_ = false;
   }
@@ -815,12 +816,13 @@
   if (!was_marked_incrementally_) {
     if (heap_->UsingEmbedderHeapTracer()) {
       TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_PROLOGUE);
-      heap_->mark_compact_collector()->embedder_heap_tracer()->TracePrologue();
+      heap_->embedder_heap_tracer()->TracePrologue(
+          heap_->embedder_reachable_reference_reporter());
     }
   }
 
-  if (UsingEmbedderHeapTracer()) {
-    embedder_heap_tracer()->EnterFinalPause();
+  if (heap_->UsingEmbedderHeapTracer()) {
+    heap_->embedder_heap_tracer()->EnterFinalPause();
   }
 
   // Don't start compaction if we are in the middle of incremental
@@ -1244,7 +1246,7 @@
     Heap* heap = map->GetHeap();
     MarkCompactCollector* collector = heap->mark_compact_collector();
     if (!collector->is_code_flushing_enabled()) {
-      VisitJSRegExp(map, object);
+      JSObjectVisitor::Visit(map, object);
       return;
     }
     JSRegExp* re = reinterpret_cast<JSRegExp*>(object);
@@ -1252,7 +1254,7 @@
     UpdateRegExpCodeAgeAndFlush(heap, re, true);
     UpdateRegExpCodeAgeAndFlush(heap, re, false);
     // Visit the fields of the RegExp, including the updated FixedArray.
-    VisitJSRegExp(map, object);
+    JSObjectVisitor::Visit(map, object);
   }
 };
 
@@ -1975,7 +1977,7 @@
   MarkStringTable(visitor);
 
   // There may be overflowed objects in the heap.  Visit them now.
-  while (marking_deque_.overflowed()) {
+  while (marking_deque()->overflowed()) {
     RefillMarkingDeque();
     EmptyMarkingDeque();
   }
@@ -2018,8 +2020,8 @@
 // After: the marking stack is empty, and all objects reachable from the
 // marking stack have been marked, or are overflowed in the heap.
 void MarkCompactCollector::EmptyMarkingDeque() {
-  while (!marking_deque_.IsEmpty()) {
-    HeapObject* object = marking_deque_.Pop();
+  while (!marking_deque()->IsEmpty()) {
+    HeapObject* object = marking_deque()->Pop();
 
     DCHECK(!object->IsFiller());
     DCHECK(object->IsHeapObject());
@@ -2042,25 +2044,25 @@
 // is cleared.
 void MarkCompactCollector::RefillMarkingDeque() {
   isolate()->CountUsage(v8::Isolate::UseCounterFeature::kMarkDequeOverflow);
-  DCHECK(marking_deque_.overflowed());
+  DCHECK(marking_deque()->overflowed());
 
   DiscoverGreyObjectsInNewSpace();
-  if (marking_deque_.IsFull()) return;
+  if (marking_deque()->IsFull()) return;
 
   DiscoverGreyObjectsInSpace(heap()->old_space());
-  if (marking_deque_.IsFull()) return;
+  if (marking_deque()->IsFull()) return;
 
   DiscoverGreyObjectsInSpace(heap()->code_space());
-  if (marking_deque_.IsFull()) return;
+  if (marking_deque()->IsFull()) return;
 
   DiscoverGreyObjectsInSpace(heap()->map_space());
-  if (marking_deque_.IsFull()) return;
+  if (marking_deque()->IsFull()) return;
 
   LargeObjectIterator lo_it(heap()->lo_space());
   DiscoverGreyObjectsWithIterator(&lo_it);
-  if (marking_deque_.IsFull()) return;
+  if (marking_deque()->IsFull()) return;
 
-  marking_deque_.ClearOverflowed();
+  marking_deque()->ClearOverflowed();
 }
 
 
@@ -2070,7 +2072,7 @@
 // objects in the heap.
 void MarkCompactCollector::ProcessMarkingDeque() {
   EmptyMarkingDeque();
-  while (marking_deque_.overflowed()) {
+  while (marking_deque()->overflowed()) {
     RefillMarkingDeque();
     EmptyMarkingDeque();
   }
@@ -2080,13 +2082,13 @@
 // stack including references only considered in the atomic marking pause.
 void MarkCompactCollector::ProcessEphemeralMarking(
     ObjectVisitor* visitor, bool only_process_harmony_weak_collections) {
-  DCHECK(marking_deque_.IsEmpty() && !marking_deque_.overflowed());
+  DCHECK(marking_deque()->IsEmpty() && !marking_deque()->overflowed());
   bool work_to_do = true;
   while (work_to_do) {
-    if (UsingEmbedderHeapTracer()) {
+    if (heap_->UsingEmbedderHeapTracer()) {
       TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_TRACING);
-      RegisterWrappersWithEmbedderHeapTracer();
-      embedder_heap_tracer()->AdvanceTracing(
+      heap_->RegisterWrappersWithEmbedderHeapTracer();
+      heap_->embedder_heap_tracer()->AdvanceTracing(
           0, EmbedderHeapTracer::AdvanceTracingActions(
                  EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION));
     }
@@ -2097,7 +2099,7 @@
       MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject);
     }
     ProcessWeakCollections();
-    work_to_do = !marking_deque_.IsEmpty();
+    work_to_do = !marking_deque()->IsEmpty();
     ProcessMarkingDeque();
   }
 }
@@ -2121,7 +2123,7 @@
 
 
 void MarkCompactCollector::EnsureMarkingDequeIsReserved() {
-  DCHECK(!marking_deque_.in_use());
+  DCHECK(!marking_deque()->in_use());
   if (marking_deque_memory_ == NULL) {
     marking_deque_memory_ = new base::VirtualMemory(kMaxMarkingDequeSize);
     marking_deque_memory_committed_ = 0;
@@ -2135,7 +2137,7 @@
 void MarkCompactCollector::EnsureMarkingDequeIsCommitted(size_t max_size) {
   // If the marking deque is too small, we try to allocate a bigger one.
   // If that fails, make do with a smaller one.
-  CHECK(!marking_deque_.in_use());
+  CHECK(!marking_deque()->in_use());
   for (size_t size = max_size; size >= kMinMarkingDequeSize; size >>= 1) {
     base::VirtualMemory* memory = marking_deque_memory_;
     size_t currently_committed = marking_deque_memory_committed_;
@@ -2167,12 +2169,12 @@
 
 
 void MarkCompactCollector::InitializeMarkingDeque() {
-  DCHECK(!marking_deque_.in_use());
+  DCHECK(!marking_deque()->in_use());
   DCHECK(marking_deque_memory_committed_ > 0);
   Address addr = static_cast<Address>(marking_deque_memory_->address());
   size_t size = marking_deque_memory_committed_;
   if (FLAG_force_marking_deque_overflows) size = 64 * kPointerSize;
-  marking_deque_.Initialize(addr, addr + size);
+  marking_deque()->Initialize(addr, addr + size);
 }
 
 
@@ -2200,34 +2202,6 @@
   in_use_ = false;
 }
 
-void MarkCompactCollector::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) {
-  DCHECK_NOT_NULL(tracer);
-  CHECK_NULL(embedder_heap_tracer_);
-  embedder_heap_tracer_ = tracer;
-}
-
-void MarkCompactCollector::RegisterWrappersWithEmbedderHeapTracer() {
-  DCHECK(UsingEmbedderHeapTracer());
-  if (wrappers_to_trace_.empty()) {
-    return;
-  }
-  embedder_heap_tracer()->RegisterV8References(wrappers_to_trace_);
-  wrappers_to_trace_.clear();
-}
-
-void MarkCompactCollector::TracePossibleWrapper(JSObject* js_object) {
-  DCHECK(js_object->WasConstructedFromApiFunction());
-  if (js_object->GetInternalFieldCount() >= 2 &&
-      js_object->GetInternalField(0) &&
-      js_object->GetInternalField(0) != heap_->undefined_value() &&
-      js_object->GetInternalField(1) != heap_->undefined_value()) {
-    DCHECK(reinterpret_cast<intptr_t>(js_object->GetInternalField(0)) % 2 == 0);
-    wrappers_to_trace_.push_back(std::pair<void*, void*>(
-        reinterpret_cast<void*>(js_object->GetInternalField(0)),
-        reinterpret_cast<void*>(js_object->GetInternalField(1))));
-  }
-}
-
 class MarkCompactCollector::ObjectStatsVisitor
     : public MarkCompactCollector::HeapObjectVisitor {
  public:
@@ -2259,8 +2233,9 @@
   SpaceIterator space_it(heap());
   HeapObject* obj = nullptr;
   while (space_it.has_next()) {
-    ObjectIterator* it = space_it.next();
-    while ((obj = it->Next()) != nullptr) {
+    std::unique_ptr<ObjectIterator> it(space_it.next()->GetObjectIterator());
+    ObjectIterator* obj_it = it.get();
+    while ((obj = obj_it->Next()) != nullptr) {
       visitor->Visit(obj);
     }
   }
@@ -2271,6 +2246,13 @@
     ObjectStatsVisitor visitor(heap(), heap()->live_object_stats_,
                                heap()->dead_object_stats_);
     VisitAllObjects(&visitor);
+    std::stringstream live, dead;
+    heap()->live_object_stats_->Dump(live);
+    heap()->dead_object_stats_->Dump(dead);
+    TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("v8.gc_stats"),
+                         "V8.GC_Objects_Stats", TRACE_EVENT_SCOPE_THREAD,
+                         "live", TRACE_STR_COPY(live.str().c_str()), "dead",
+                         TRACE_STR_COPY(dead.str().c_str()));
     if (FLAG_trace_gc_object_stats) {
       heap()->live_object_stats_->PrintJSON("live");
       heap()->dead_object_stats_->PrintJSON("dead");
@@ -2282,10 +2264,6 @@
 
 void MarkCompactCollector::MarkLiveObjects() {
   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK);
-  double start_time = 0.0;
-  if (FLAG_print_cumulative_gc_stat) {
-    start_time = heap_->MonotonicallyIncreasingTimeInMs();
-  }
   // The recursive GC marker detects when it is nearing stack overflow,
   // and switches to a different marking system.  JS interrupts interfere
   // with the C stack limit check.
@@ -2299,8 +2277,8 @@
     } else {
       // Abort any pending incremental activities e.g. incremental sweeping.
       incremental_marking->Stop();
-      if (marking_deque_.in_use()) {
-        marking_deque_.Uninitialize(true);
+      if (marking_deque()->in_use()) {
+        marking_deque()->Uninitialize(true);
       }
     }
   }
@@ -2369,17 +2347,12 @@
     {
       TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WEAK_CLOSURE_HARMONY);
       ProcessEphemeralMarking(&root_visitor, true);
-      if (UsingEmbedderHeapTracer()) {
+      if (heap_->UsingEmbedderHeapTracer()) {
         TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_MARK_WRAPPER_EPILOGUE);
-        embedder_heap_tracer()->TraceEpilogue();
+        heap()->embedder_heap_tracer()->TraceEpilogue();
       }
     }
   }
-
-  if (FLAG_print_cumulative_gc_stat) {
-    heap_->tracer()->AddMarkingTime(heap_->MonotonicallyIncreasingTimeInMs() -
-                                    start_time);
-  }
 }
 
 
@@ -3079,8 +3052,7 @@
   explicit Evacuator(MarkCompactCollector* collector)
       : collector_(collector),
         compaction_spaces_(collector->heap()),
-        local_pretenuring_feedback_(base::HashMap::PointersMatch,
-                                    kInitialLocalPretenuringFeedbackCapacity),
+        local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity),
         new_space_visitor_(collector->heap(), &compaction_spaces_,
                            &local_pretenuring_feedback_),
         new_space_page_visitor(collector->heap()),
@@ -3221,7 +3193,7 @@
   // The number of parallel compaction tasks is limited by:
   // - #evacuation pages
   // - (#cores - 1)
-  const double kTargetCompactionTimeInMs = 1;
+  const double kTargetCompactionTimeInMs = .5;
   const int kNumSweepingTasks = 3;
 
   double compaction_speed =
@@ -3299,10 +3271,11 @@
     job.AddPage(page, &abandoned_pages);
   }
 
+  const bool reduce_memory = heap()->ShouldReduceMemory();
   const Address age_mark = heap()->new_space()->age_mark();
   for (Page* page : newspace_evacuation_candidates_) {
     live_bytes += page->LiveBytes();
-    if (!page->NeverEvacuate() &&
+    if (!reduce_memory && !page->NeverEvacuate() &&
         (page->LiveBytes() > Evacuator::PageEvacuationThreshold()) &&
         !page->Contains(age_mark)) {
       if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
@@ -3858,6 +3831,15 @@
     } else {
       max_freed = RawSweep(page, REBUILD_FREE_LIST, free_space_mode);
     }
+
+    // After finishing sweeping of a page we clean up its remembered set.
+    if (page->typed_old_to_new_slots()) {
+      page->typed_old_to_new_slots()->FreeToBeFreedChunks();
+    }
+    if (page->old_to_new_slots()) {
+      page->old_to_new_slots()->FreeToBeFreedBuckets();
+    }
+
     {
       base::LockGuard<base::Mutex> guard(&mutex_);
       swept_list_[identity].Add(page);
@@ -3964,11 +3946,6 @@
 
 void MarkCompactCollector::SweepSpaces() {
   TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_SWEEP);
-  double start_time = 0.0;
-  if (FLAG_print_cumulative_gc_stat) {
-    start_time = heap_->MonotonicallyIncreasingTimeInMs();
-  }
-
 #ifdef DEBUG
   state_ = SWEEP_SPACES;
 #endif
@@ -3994,11 +3971,6 @@
 
   // Deallocate unmarked large objects.
   heap_->lo_space()->FreeUnmarkedObjects();
-
-  if (FLAG_print_cumulative_gc_stat) {
-    heap_->tracer()->AddSweepingTime(heap_->MonotonicallyIncreasingTimeInMs() -
-                                     start_time);
-  }
 }
 
 Isolate* MarkCompactCollector::isolate() const { return heap_->isolate(); }
diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h
index b2c637b..2cbb369 100644
--- a/src/heap/mark-compact.h
+++ b/src/heap/mark-compact.h
@@ -467,7 +467,7 @@
   static const size_t kMinMarkingDequeSize = 256 * KB;
 
   void EnsureMarkingDequeIsCommittedAndInitialize(size_t max_size) {
-    if (!marking_deque_.in_use()) {
+    if (!marking_deque()->in_use()) {
       EnsureMarkingDequeIsCommitted(max_size);
       InitializeMarkingDeque();
     }
@@ -490,16 +490,6 @@
 
   Sweeper& sweeper() { return sweeper_; }
 
-  void RegisterWrappersWithEmbedderHeapTracer();
-
-  void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
-
-  EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; }
-
-  bool UsingEmbedderHeapTracer() { return embedder_heap_tracer(); }
-
-  void TracePossibleWrapper(JSObject* js_object);
-
  private:
   class EvacuateNewSpacePageVisitor;
   class EvacuateNewSpaceVisitor;
@@ -739,12 +729,9 @@
   base::VirtualMemory* marking_deque_memory_;
   size_t marking_deque_memory_committed_;
   MarkingDeque marking_deque_;
-  std::vector<std::pair<void*, void*>> wrappers_to_trace_;
 
   CodeFlusher* code_flusher_;
 
-  EmbedderHeapTracer* embedder_heap_tracer_;
-
   List<Page*> evacuation_candidates_;
   List<Page*> newspace_evacuation_candidates_;
 
@@ -768,8 +755,7 @@
   MarkCompactCollector* collector_;
 };
 
-
-const char* AllocationSpaceName(AllocationSpace space);
+V8_EXPORT_PRIVATE const char* AllocationSpaceName(AllocationSpace space);
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/heap/memory-reducer.cc b/src/heap/memory-reducer.cc
index 699e10e..ba9010e 100644
--- a/src/heap/memory-reducer.cc
+++ b/src/heap/memory-reducer.cc
@@ -73,7 +73,8 @@
       PrintIsolate(heap()->isolate(), "Memory reducer: started GC #%d\n",
                    state_.started_gcs);
     }
-    heap()->StartIdleIncrementalMarking();
+    heap()->StartIdleIncrementalMarking(
+        GarbageCollectionReason::kMemoryReducer);
   } else if (state_.action == kWait) {
     if (!heap()->incremental_marking()->IsStopped() &&
         heap()->ShouldOptimizeForMemoryUsage()) {
@@ -84,12 +85,10 @@
       double deadline = heap()->MonotonicallyIncreasingTimeInMs() +
                         kIncrementalMarkingDelayMs;
       heap()->incremental_marking()->AdvanceIncrementalMarking(
-          deadline, i::IncrementalMarking::StepActions(
-                        i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
-                        i::IncrementalMarking::FORCE_MARKING,
-                        i::IncrementalMarking::FORCE_COMPLETION));
+          deadline, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
+          IncrementalMarking::FORCE_COMPLETION, StepOrigin::kTask);
       heap()->FinalizeIncrementalMarkingIfComplete(
-          "Memory reducer: finalize incremental marking");
+          GarbageCollectionReason::kFinalizeMarkingViaTask);
     }
     // Re-schedule the timer.
     ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
diff --git a/src/heap/object-stats.cc b/src/heap/object-stats.cc
index 3f43212..6e4b50e 100644
--- a/src/heap/object-stats.cc
+++ b/src/heap/object-stats.cc
@@ -42,6 +42,16 @@
   PrintF(" ]");
 }
 
+V8_NOINLINE static void DumpJSONArray(std::stringstream& stream, size_t* array,
+                                      const int len) {
+  stream << "[";
+  for (int i = 0; i < len; i++) {
+    stream << array[i];
+    if (i != (len - 1)) stream << ",";
+  }
+  stream << "]";
+}
+
 void ObjectStats::PrintJSON(const char* key) {
   double time = isolate()->time_millis_since_init();
   int gc_count = heap()->gc_count();
@@ -102,6 +112,60 @@
 #undef FIXED_ARRAY_SUB_INSTANCE_TYPE_WRAPPER
 #undef CODE_AGE_WRAPPER
 #undef PRINT_INSTANCE_TYPE_DATA
+#undef PRINT_KEY_AND_ID
+}
+
+void ObjectStats::Dump(std::stringstream& stream) {
+  double time = isolate()->time_millis_since_init();
+  int gc_count = heap()->gc_count();
+
+  stream << "{";
+  stream << "\"isolate\":\"" << reinterpret_cast<void*>(isolate()) << "\",";
+  stream << "\"id\":" << gc_count << ",";
+  stream << "\"time\":" << time << ",";
+  stream << "\"bucket_sizes\":[";
+  for (int i = 0; i < kNumberOfBuckets; i++) {
+    stream << (1 << (kFirstBucketShift + i));
+    if (i != (kNumberOfBuckets - 1)) stream << ",";
+  }
+  stream << "],";
+  stream << "\"type_data\":{";
+
+#define PRINT_INSTANCE_TYPE_DATA(name, index)                                \
+  stream << "\"" << name << "\":{";                                          \
+  stream << "\"type\":" << static_cast<int>(index) << ",";                   \
+  stream << "\"overall\":" << object_sizes_[index] << ",";                   \
+  stream << "\"count\":" << object_counts_[index] << ",";                    \
+  stream << "\"over_allocated\":" << over_allocated_[index] << ",";          \
+  stream << "\"histogram\":";                                                \
+  DumpJSONArray(stream, size_histogram_[index], kNumberOfBuckets);           \
+  stream << ",\"over_allocated_histogram\":";                                \
+  DumpJSONArray(stream, over_allocated_histogram_[index], kNumberOfBuckets); \
+  stream << "},";
+
+#define INSTANCE_TYPE_WRAPPER(name) PRINT_INSTANCE_TYPE_DATA(#name, name)
+#define CODE_KIND_WRAPPER(name)            \
+  PRINT_INSTANCE_TYPE_DATA("*CODE_" #name, \
+                           FIRST_CODE_KIND_SUB_TYPE + Code::name)
+#define FIXED_ARRAY_SUB_INSTANCE_TYPE_WRAPPER(name) \
+  PRINT_INSTANCE_TYPE_DATA("*FIXED_ARRAY_" #name,   \
+                           FIRST_FIXED_ARRAY_SUB_TYPE + name)
+#define CODE_AGE_WRAPPER(name) \
+  PRINT_INSTANCE_TYPE_DATA(    \
+      "*CODE_AGE_" #name,      \
+      FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge)
+
+  INSTANCE_TYPE_LIST(INSTANCE_TYPE_WRAPPER);
+  CODE_KIND_LIST(CODE_KIND_WRAPPER);
+  FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(FIXED_ARRAY_SUB_INSTANCE_TYPE_WRAPPER);
+  CODE_AGE_LIST_COMPLETE(CODE_AGE_WRAPPER);
+  stream << "\"END\":{}}}";
+
+#undef INSTANCE_TYPE_WRAPPER
+#undef CODE_KIND_WRAPPER
+#undef FIXED_ARRAY_SUB_INSTANCE_TYPE_WRAPPER
+#undef CODE_AGE_WRAPPER
+#undef PRINT_INSTANCE_TYPE_DATA
 }
 
 void ObjectStats::CheckpointObjectStats() {
@@ -246,8 +310,6 @@
                         OBJECT_TO_CODE_SUB_TYPE);
   RecordHashTableHelper(nullptr, heap_->code_stubs(),
                         CODE_STUBS_TABLE_SUB_TYPE);
-  RecordHashTableHelper(nullptr, heap_->intrinsic_function_names(),
-                        INTRINSIC_FUNCTION_NAMES_SUB_TYPE);
   RecordHashTableHelper(nullptr, heap_->empty_properties_dictionary(),
                         EMPTY_PROPERTIES_DICTIONARY_SUB_TYPE);
   CompilationCache* compilation_cache = heap_->isolate()->compilation_cache();
@@ -447,9 +509,11 @@
   if (code->kind() == Code::Kind::OPTIMIZED_FUNCTION) {
     DeoptimizationInputData* input_data =
         DeoptimizationInputData::cast(code->deoptimization_data());
-    RecordFixedArrayHelper(code->deoptimization_data(),
-                           input_data->LiteralArray(),
-                           OPTIMIZED_CODE_LITERALS_SUB_TYPE, 0);
+    if (input_data->length() > 0) {
+      RecordFixedArrayHelper(code->deoptimization_data(),
+                             input_data->LiteralArray(),
+                             OPTIMIZED_CODE_LITERALS_SUB_TYPE, 0);
+    }
   }
   RecordFixedArrayHelper(code, code->handler_table(), HANDLER_TABLE_SUB_TYPE,
                          0);
diff --git a/src/heap/object-stats.h b/src/heap/object-stats.h
index 4780696..add5a12 100644
--- a/src/heap/object-stats.h
+++ b/src/heap/object-stats.h
@@ -35,6 +35,7 @@
 
   void CheckpointObjectStats();
   void PrintJSON(const char* key);
+  void Dump(std::stringstream& stream);
 
   void RecordObjectStats(InstanceType type, size_t size) {
     DCHECK(type <= LAST_TYPE);
diff --git a/src/heap/objects-visiting-inl.h b/src/heap/objects-visiting-inl.h
index 148975f..252b2fe 100644
--- a/src/heap/objects-visiting-inl.h
+++ b/src/heap/objects-visiting-inl.h
@@ -147,11 +147,17 @@
 
   table_.Register(kVisitNativeContext, &VisitNativeContext);
 
-  table_.Register(kVisitAllocationSite, &VisitAllocationSite);
+  table_.Register(
+      kVisitAllocationSite,
+      &FixedBodyVisitor<StaticVisitor, AllocationSite::MarkingBodyDescriptor,
+                        void>::Visit);
 
   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
 
-  table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
+  table_.Register(
+      kVisitBytecodeArray,
+      &FixedBodyVisitor<StaticVisitor, BytecodeArray::MarkingBodyDescriptor,
+                        void>::Visit);
 
   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
 
@@ -178,13 +184,15 @@
       &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
                            void>::Visit);
 
-  // Registration for kVisitJSRegExp is done by StaticVisitor.
+  table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
 
   table_.Register(
       kVisitCell,
       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
 
-  table_.Register(kVisitPropertyCell, &VisitPropertyCell);
+  table_.Register(kVisitPropertyCell,
+                  &FixedBodyVisitor<StaticVisitor, PropertyCell::BodyDescriptor,
+                                    void>::Visit);
 
   table_.Register(kVisitWeakCell, &VisitWeakCell);
 
@@ -319,19 +327,6 @@
   }
 }
 
-
-template <typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
-    Map* map, HeapObject* object) {
-  Heap* heap = map->GetHeap();
-
-  StaticVisitor::VisitPointers(
-      heap, object,
-      HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
-      HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
-}
-
-
 template <typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
                                                         HeapObject* object) {
@@ -384,19 +379,6 @@
   }
 }
 
-
-template <typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
-    Map* map, HeapObject* object) {
-  Heap* heap = map->GetHeap();
-
-  StaticVisitor::VisitPointers(
-      heap, object,
-      HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
-      HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
-}
-
-
 template <typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
     Map* map, HeapObject* object) {
@@ -467,11 +449,11 @@
       // optimized code.
       collector->code_flusher()->AddCandidate(shared);
       // Treat the reference to the code object weakly.
-      VisitSharedFunctionInfoWeakCode(heap, object);
+      VisitSharedFunctionInfoWeakCode(map, object);
       return;
     }
   }
-  VisitSharedFunctionInfoStrongCode(heap, object);
+  VisitSharedFunctionInfoStrongCode(map, object);
 }
 
 
@@ -504,23 +486,6 @@
   VisitJSFunctionStrongCode(map, object);
 }
 
-
-template <typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
-                                                        HeapObject* object) {
-  JSObjectVisitor::Visit(map, object);
-}
-
-template <typename StaticVisitor>
-void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
-    Map* map, HeapObject* object) {
-  StaticVisitor::VisitPointers(
-      map->GetHeap(), object,
-      HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
-      HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
-}
-
-
 template <typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
                                                           Map* map) {
@@ -623,7 +588,7 @@
   // We do not (yet?) flush code for generator functions, or async functions,
   // because we don't know if there are still live activations
   // (generator objects) on the heap.
-  if (shared_info->is_resumable()) {
+  if (IsResumableFunction(shared_info->kind())) {
     return false;
   }
 
@@ -656,39 +621,23 @@
   return true;
 }
 
-
 template <typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
-    Heap* heap, HeapObject* object) {
-  Object** start_slot = HeapObject::RawField(
-      object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
-  Object** end_slot = HeapObject::RawField(
-      object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
-  StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
+    Map* map, HeapObject* object) {
+  FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
+                   void>::Visit(map, object);
 }
 
-
 template <typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
-    Heap* heap, HeapObject* object) {
-  Object** name_slot =
-      HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
-  StaticVisitor::VisitPointer(heap, object, name_slot);
-
+    Map* map, HeapObject* object) {
   // Skip visiting kCodeOffset as it is treated weakly here.
-  STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
-                SharedFunctionInfo::kCodeOffset);
-  STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
-                SharedFunctionInfo::kOptimizedCodeMapOffset);
-
-  Object** start_slot =
-      HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
-  Object** end_slot = HeapObject::RawField(
-      object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
-  StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
+  STATIC_ASSERT(SharedFunctionInfo::kCodeOffset <
+                SharedFunctionInfo::BodyDescriptorWeakCode::kStartOffset);
+  FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptorWeakCode,
+                   void>::Visit(map, object);
 }
 
-
 template <typename StaticVisitor>
 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
     Map* map, HeapObject* object) {
diff --git a/src/heap/objects-visiting.cc b/src/heap/objects-visiting.cc
index 83e2e1c..9393fcc 100644
--- a/src/heap/objects-visiting.cc
+++ b/src/heap/objects-visiting.cc
@@ -107,7 +107,6 @@
     case JS_ARGUMENTS_TYPE:
     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
     case JS_GENERATOR_OBJECT_TYPE:
-    case JS_MODULE_TYPE:
     case JS_VALUE_TYPE:
     case JS_DATE_TYPE:
     case JS_ARRAY_TYPE:
@@ -120,6 +119,7 @@
     case JS_MAP_TYPE:
     case JS_SET_ITERATOR_TYPE:
     case JS_MAP_ITERATOR_TYPE:
+    case JS_STRING_ITERATOR_TYPE:
     case JS_PROMISE_TYPE:
     case JS_BOUND_FUNCTION_TYPE:
       return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
diff --git a/src/heap/objects-visiting.h b/src/heap/objects-visiting.h
index 303db0e..633c277 100644
--- a/src/heap/objects-visiting.h
+++ b/src/heap/objects-visiting.h
@@ -132,7 +132,7 @@
            (base == kVisitJSObject) || (base == kVisitJSApiObject));
     DCHECK(IsAligned(object_size, kPointerSize));
     DCHECK(Heap::kMinObjectSizeInWords * kPointerSize <= object_size);
-    DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+    DCHECK(object_size <= kMaxRegularHeapObjectSize);
     DCHECK(!has_unboxed_fields || (base == kVisitJSObject) ||
            (base == kVisitJSApiObject));
 
@@ -354,7 +354,6 @@
     table_.GetVisitor(map)(map, obj);
   }
 
-  INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
   INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
   INLINE(static void VisitTransitionArray(Map* map, HeapObject* object));
   INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
@@ -374,12 +373,9 @@
   INLINE(static void VisitMap(Map* map, HeapObject* object));
   INLINE(static void VisitCode(Map* map, HeapObject* object));
   INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
-  INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
   INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
   INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
-  INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
   INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
-  INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
 
   // Mark pointers in a Map treating some elements of the descriptor array weak.
   static void MarkMapContents(Heap* heap, Map* map);
@@ -390,8 +386,8 @@
 
   // Helpers used by code flushing support that visit pointer fields and treat
   // references to code objects either strongly or weakly.
-  static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
-  static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
+  static void VisitSharedFunctionInfoStrongCode(Map* map, HeapObject* object);
+  static void VisitSharedFunctionInfoWeakCode(Map* map, HeapObject* object);
   static void VisitJSFunctionStrongCode(Map* map, HeapObject* object);
   static void VisitJSFunctionWeakCode(Map* map, HeapObject* object);
 
diff --git a/src/heap/remembered-set.cc b/src/heap/remembered-set.cc
index 6575d55..c5dab90 100644
--- a/src/heap/remembered-set.cc
+++ b/src/heap/remembered-set.cc
@@ -20,10 +20,12 @@
   for (MemoryChunk* chunk : *heap->old_space()) {
     SlotSet* slots = GetSlotSet(chunk);
     if (slots != nullptr) {
-      slots->Iterate([heap, chunk](Address addr) {
-        Object** slot = reinterpret_cast<Object**>(addr);
-        return IsValidSlot(heap, chunk, slot) ? KEEP_SLOT : REMOVE_SLOT;
-      });
+      slots->Iterate(
+          [heap, chunk](Address addr) {
+            Object** slot = reinterpret_cast<Object**>(addr);
+            return IsValidSlot(heap, chunk, slot) ? KEEP_SLOT : REMOVE_SLOT;
+          },
+          SlotSet::PREFREE_EMPTY_BUCKETS);
     }
   }
   for (MemoryChunk* chunk : *heap->code_space()) {
@@ -36,20 +38,24 @@
             } else {
               return REMOVE_SLOT;
             }
-          });
+          },
+          TypedSlotSet::PREFREE_EMPTY_CHUNKS);
     }
   }
   for (MemoryChunk* chunk : *heap->map_space()) {
     SlotSet* slots = GetSlotSet(chunk);
     if (slots != nullptr) {
-      slots->Iterate([heap, chunk](Address addr) {
-        Object** slot = reinterpret_cast<Object**>(addr);
-        // TODO(mlippautz): In map space all allocations would ideally be map
-        // aligned. After establishing this invariant IsValidSlot could just
-        // refer to the containing object using alignment and check the mark
-        // bits.
-        return IsValidSlot(heap, chunk, slot) ? KEEP_SLOT : REMOVE_SLOT;
-      });
+      slots->Iterate(
+          [heap, chunk](Address addr) {
+            Object** slot = reinterpret_cast<Object**>(addr);
+            // TODO(mlippautz): In map space all allocations would ideally be
+            // map
+            // aligned. After establishing this invariant IsValidSlot could just
+            // refer to the containing object using alignment and check the mark
+            // bits.
+            return IsValidSlot(heap, chunk, slot) ? KEEP_SLOT : REMOVE_SLOT;
+          },
+          SlotSet::PREFREE_EMPTY_BUCKETS);
     }
   }
 }
diff --git a/src/heap/remembered-set.h b/src/heap/remembered-set.h
index 8022d52..74791b9 100644
--- a/src/heap/remembered-set.h
+++ b/src/heap/remembered-set.h
@@ -116,10 +116,13 @@
       size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize;
       int new_count = 0;
       for (size_t page = 0; page < pages; page++) {
-        new_count += slots[page].Iterate(callback);
+        new_count +=
+            slots[page].Iterate(callback, SlotSet::PREFREE_EMPTY_BUCKETS);
       }
-      if (new_count == 0) {
-        ReleaseSlotSet(chunk);
+      // Only old-to-old slot sets are released eagerly. Old-new-slot sets are
+      // released by the sweeper threads.
+      if (direction == OLD_TO_OLD && new_count == 0) {
+        chunk->ReleaseOldToOldSlots();
       }
     }
   }
@@ -149,10 +152,13 @@
   static void RemoveRangeTyped(MemoryChunk* page, Address start, Address end) {
     TypedSlotSet* slots = GetTypedSlotSet(page);
     if (slots != nullptr) {
-      slots->Iterate([start, end](SlotType slot_type, Address host_addr,
-                                  Address slot_addr) {
-        return start <= slot_addr && slot_addr < end ? REMOVE_SLOT : KEEP_SLOT;
-      });
+      slots->Iterate(
+          [start, end](SlotType slot_type, Address host_addr,
+                       Address slot_addr) {
+            return start <= slot_addr && slot_addr < end ? REMOVE_SLOT
+                                                         : KEEP_SLOT;
+          },
+          TypedSlotSet::PREFREE_EMPTY_CHUNKS);
     }
   }
 
@@ -173,7 +179,7 @@
   static void IterateTyped(MemoryChunk* chunk, Callback callback) {
     TypedSlotSet* slots = GetTypedSlotSet(chunk);
     if (slots != nullptr) {
-      int new_count = slots->Iterate(callback);
+      int new_count = slots->Iterate(callback, TypedSlotSet::KEEP_EMPTY_CHUNKS);
       if (new_count == 0) {
         ReleaseTypedSlotSet(chunk);
       }
@@ -216,19 +222,9 @@
     }
   }
 
-  static void ReleaseSlotSet(MemoryChunk* chunk) {
-    if (direction == OLD_TO_OLD) {
-      chunk->ReleaseOldToOldSlots();
-    } else {
-      chunk->ReleaseOldToNewSlots();
-    }
-  }
-
   static void ReleaseTypedSlotSet(MemoryChunk* chunk) {
     if (direction == OLD_TO_OLD) {
       chunk->ReleaseTypedOldToOldSlots();
-    } else {
-      chunk->ReleaseTypedOldToNewSlots();
     }
   }
 
@@ -363,7 +359,7 @@
       case OBJECT_SLOT: {
         return callback(reinterpret_cast<Object**>(addr));
       }
-      case NUMBER_OF_SLOT_TYPES:
+      case CLEARED_SLOT:
         break;
     }
     UNREACHABLE();
@@ -382,7 +378,7 @@
     return DEBUG_TARGET_SLOT;
   }
   UNREACHABLE();
-  return NUMBER_OF_SLOT_TYPES;
+  return CLEARED_SLOT;
 }
 
 }  // namespace internal
diff --git a/src/heap/scavenge-job.cc b/src/heap/scavenge-job.cc
index d89c945..66d4307 100644
--- a/src/heap/scavenge-job.cc
+++ b/src/heap/scavenge-job.cc
@@ -34,7 +34,7 @@
                                  new_space_capacity)) {
     if (EnoughIdleTimeForScavenge(
             idle_time_in_ms, scavenge_speed_in_bytes_per_ms, new_space_size)) {
-      heap->CollectGarbage(NEW_SPACE, "idle task: scavenge");
+      heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kIdleTask);
     } else {
       // Immediately request another idle task that can get larger idle time.
       job_->RescheduleIdleTask(heap);
diff --git a/src/heap/slot-set.h b/src/heap/slot-set.h
index 651af88..017667b 100644
--- a/src/heap/slot-set.h
+++ b/src/heap/slot-set.h
@@ -5,7 +5,10 @@
 #ifndef V8_SLOT_SET_H
 #define V8_SLOT_SET_H
 
+#include <stack>
+
 #include "src/allocation.h"
+#include "src/base/atomic-utils.h"
 #include "src/base/bits.h"
 #include "src/utils.h"
 
@@ -22,9 +25,11 @@
 // Each bucket is a bitmap with a bit corresponding to a single slot offset.
 class SlotSet : public Malloced {
  public:
+  enum IterationMode { PREFREE_EMPTY_BUCKETS, KEEP_EMPTY_BUCKETS };
+
   SlotSet() {
     for (int i = 0; i < kBuckets; i++) {
-      bucket[i] = nullptr;
+      bucket[i].SetValue(nullptr);
     }
   }
 
@@ -32,30 +37,38 @@
     for (int i = 0; i < kBuckets; i++) {
       ReleaseBucket(i);
     }
+    FreeToBeFreedBuckets();
   }
 
   void SetPageStart(Address page_start) { page_start_ = page_start; }
 
   // The slot offset specifies a slot at address page_start_ + slot_offset.
+  // This method should only be called on the main thread because concurrent
+  // allocation of the bucket is not thread-safe.
   void Insert(int slot_offset) {
     int bucket_index, cell_index, bit_index;
     SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index);
-    if (bucket[bucket_index] == nullptr) {
-      bucket[bucket_index] = AllocateBucket();
+    base::AtomicValue<uint32_t>* current_bucket = bucket[bucket_index].Value();
+    if (current_bucket == nullptr) {
+      current_bucket = AllocateBucket();
+      bucket[bucket_index].SetValue(current_bucket);
     }
-    bucket[bucket_index][cell_index] |= 1u << bit_index;
+    if (!(current_bucket[cell_index].Value() & (1u << bit_index))) {
+      current_bucket[cell_index].SetBit(bit_index);
+    }
   }
 
   // The slot offset specifies a slot at address page_start_ + slot_offset.
   void Remove(int slot_offset) {
     int bucket_index, cell_index, bit_index;
     SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index);
-    if (bucket[bucket_index] != nullptr) {
-      uint32_t cell = bucket[bucket_index][cell_index];
+    base::AtomicValue<uint32_t>* current_bucket = bucket[bucket_index].Value();
+    if (current_bucket != nullptr) {
+      uint32_t cell = current_bucket[cell_index].Value();
       if (cell) {
         uint32_t bit_mask = 1u << bit_index;
         if (cell & bit_mask) {
-          bucket[bucket_index][cell_index] ^= bit_mask;
+          current_bucket[cell_index].ClearBit(bit_index);
         }
       }
     }
@@ -73,17 +86,17 @@
     uint32_t start_mask = (1u << start_bit) - 1;
     uint32_t end_mask = ~((1u << end_bit) - 1);
     if (start_bucket == end_bucket && start_cell == end_cell) {
-      MaskCell(start_bucket, start_cell, start_mask | end_mask);
+      ClearCell(start_bucket, start_cell, ~(start_mask | end_mask));
       return;
     }
     int current_bucket = start_bucket;
     int current_cell = start_cell;
-    MaskCell(current_bucket, current_cell, start_mask);
+    ClearCell(current_bucket, current_cell, ~start_mask);
     current_cell++;
     if (current_bucket < end_bucket) {
-      if (bucket[current_bucket] != nullptr) {
+      if (bucket[current_bucket].Value() != nullptr) {
         while (current_cell < kCellsPerBucket) {
-          bucket[current_bucket][current_cell] = 0;
+          bucket[current_bucket].Value()[current_cell].SetValue(0);
           current_cell++;
         }
       }
@@ -100,24 +113,25 @@
     }
     // All buckets between start_bucket and end_bucket are cleared.
     DCHECK(current_bucket == end_bucket && current_cell <= end_cell);
-    if (current_bucket == kBuckets || bucket[current_bucket] == nullptr) {
+    if (current_bucket == kBuckets ||
+        bucket[current_bucket].Value() == nullptr) {
       return;
     }
     while (current_cell < end_cell) {
-      bucket[current_bucket][current_cell] = 0;
+      bucket[current_bucket].Value()[current_cell].SetValue(0);
       current_cell++;
     }
     // All cells between start_cell and end_cell are cleared.
     DCHECK(current_bucket == end_bucket && current_cell == end_cell);
-    MaskCell(end_bucket, end_cell, end_mask);
+    ClearCell(end_bucket, end_cell, ~end_mask);
   }
 
   // The slot offset specifies a slot at address page_start_ + slot_offset.
   bool Lookup(int slot_offset) {
     int bucket_index, cell_index, bit_index;
     SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index);
-    if (bucket[bucket_index] != nullptr) {
-      uint32_t cell = bucket[bucket_index][cell_index];
+    if (bucket[bucket_index].Value() != nullptr) {
+      uint32_t cell = bucket[bucket_index].Value()[cell_index].Value();
       return (cell & (1u << bit_index)) != 0;
     }
     return false;
@@ -126,6 +140,7 @@
   // Iterate over all slots in the set and for each slot invoke the callback.
   // If the callback returns REMOVE_SLOT then the slot is removed from the set.
   // Returns the new number of slots.
+  // This method should only be called on the main thread.
   //
   // Sample usage:
   // Iterate([](Address slot_address) {
@@ -133,16 +148,17 @@
   //    else return REMOVE_SLOT;
   // });
   template <typename Callback>
-  int Iterate(Callback callback) {
+  int Iterate(Callback callback, IterationMode mode) {
     int new_count = 0;
     for (int bucket_index = 0; bucket_index < kBuckets; bucket_index++) {
-      if (bucket[bucket_index] != nullptr) {
+      if (bucket[bucket_index].Value() != nullptr) {
         int in_bucket_count = 0;
-        uint32_t* current_bucket = bucket[bucket_index];
+        base::AtomicValue<uint32_t>* current_bucket =
+            bucket[bucket_index].Value();
         int cell_offset = bucket_index * kBitsPerBucket;
         for (int i = 0; i < kCellsPerBucket; i++, cell_offset += kBitsPerCell) {
-          if (current_bucket[i]) {
-            uint32_t cell = current_bucket[i];
+          if (current_bucket[i].Value()) {
+            uint32_t cell = current_bucket[i].Value();
             uint32_t old_cell = cell;
             uint32_t new_cell = cell;
             while (cell) {
@@ -157,12 +173,24 @@
               cell ^= bit_mask;
             }
             if (old_cell != new_cell) {
-              current_bucket[i] = new_cell;
+              while (!current_bucket[i].TrySetValue(old_cell, new_cell)) {
+                // If TrySetValue fails, the cell must have changed. We just
+                // have to read the current value of the cell, & it with the
+                // computed value, and retry. We can do this, because this
+                // method will only be called on the main thread and filtering
+                // threads will only remove slots.
+                old_cell = current_bucket[i].Value();
+                new_cell &= old_cell;
+              }
             }
           }
         }
-        if (in_bucket_count == 0) {
-          ReleaseBucket(bucket_index);
+        if (mode == PREFREE_EMPTY_BUCKETS && in_bucket_count == 0) {
+          base::LockGuard<base::Mutex> guard(&to_be_freed_buckets_mutex_);
+          base::AtomicValue<uint32_t>* bucket_ptr =
+              bucket[bucket_index].Value();
+          to_be_freed_buckets_.push(bucket_ptr);
+          bucket[bucket_index].SetValue(nullptr);
         }
         new_count += in_bucket_count;
       }
@@ -170,6 +198,15 @@
     return new_count;
   }
 
+  void FreeToBeFreedBuckets() {
+    base::LockGuard<base::Mutex> guard(&to_be_freed_buckets_mutex_);
+    while (!to_be_freed_buckets_.empty()) {
+      base::AtomicValue<uint32_t>* top = to_be_freed_buckets_.top();
+      to_be_freed_buckets_.pop();
+      DeleteArray<base::AtomicValue<uint32_t>>(top);
+    }
+  }
+
  private:
   static const int kMaxSlots = (1 << kPageSizeBits) / kPointerSize;
   static const int kCellsPerBucket = 32;
@@ -180,24 +217,26 @@
   static const int kBitsPerBucketLog2 = kCellsPerBucketLog2 + kBitsPerCellLog2;
   static const int kBuckets = kMaxSlots / kCellsPerBucket / kBitsPerCell;
 
-  uint32_t* AllocateBucket() {
-    uint32_t* result = NewArray<uint32_t>(kCellsPerBucket);
+  base::AtomicValue<uint32_t>* AllocateBucket() {
+    base::AtomicValue<uint32_t>* result =
+        NewArray<base::AtomicValue<uint32_t>>(kCellsPerBucket);
     for (int i = 0; i < kCellsPerBucket; i++) {
-      result[i] = 0;
+      result[i].SetValue(0);
     }
     return result;
   }
 
   void ReleaseBucket(int bucket_index) {
-    DeleteArray<uint32_t>(bucket[bucket_index]);
-    bucket[bucket_index] = nullptr;
+    DeleteArray<base::AtomicValue<uint32_t>>(bucket[bucket_index].Value());
+    bucket[bucket_index].SetValue(nullptr);
   }
 
-  void MaskCell(int bucket_index, int cell_index, uint32_t mask) {
+  void ClearCell(int bucket_index, int cell_index, uint32_t mask) {
     if (bucket_index < kBuckets) {
-      uint32_t* cells = bucket[bucket_index];
-      if (cells != nullptr && cells[cell_index] != 0) {
-        cells[cell_index] &= mask;
+      base::AtomicValue<uint32_t>* cells = bucket[bucket_index].Value();
+      if (cells != nullptr) {
+        uint32_t cell = cells[cell_index].Value();
+        if (cell) cells[cell_index].SetBits(0, mask);
       }
     } else {
       // GCC bug 59124: Emits wrong warnings
@@ -217,8 +256,10 @@
     *bit_index = slot & (kBitsPerCell - 1);
   }
 
-  uint32_t* bucket[kBuckets];
+  base::AtomicValue<base::AtomicValue<uint32_t>*> bucket[kBuckets];
   Address page_start_;
+  base::Mutex to_be_freed_buckets_mutex_;
+  std::stack<base::AtomicValue<uint32_t>*> to_be_freed_buckets_;
 };
 
 enum SlotType {
@@ -228,7 +269,7 @@
   CODE_TARGET_SLOT,
   CODE_ENTRY_SLOT,
   DEBUG_TARGET_SLOT,
-  NUMBER_OF_SLOT_TYPES
+  CLEARED_SLOT
 };
 
 // Data structure for maintaining a multiset of typed slots in a page.
@@ -240,51 +281,85 @@
 // typed slots contain V8 internal pointers that are not directly exposed to JS.
 class TypedSlotSet {
  public:
-  struct TypedSlot {
-    TypedSlot() : type_and_offset_(0), host_offset_(0) {}
+  enum IterationMode { PREFREE_EMPTY_CHUNKS, KEEP_EMPTY_CHUNKS };
 
-    TypedSlot(SlotType type, uint32_t host_offset, uint32_t offset)
-        : type_and_offset_(TypeField::encode(type) |
-                           OffsetField::encode(offset)),
-          host_offset_(host_offset) {}
+  typedef std::pair<SlotType, uint32_t> TypeAndOffset;
+
+  struct TypedSlot {
+    TypedSlot() {
+      type_and_offset_.SetValue(0);
+      host_offset_.SetValue(0);
+    }
+
+    TypedSlot(SlotType type, uint32_t host_offset, uint32_t offset) {
+      type_and_offset_.SetValue(TypeField::encode(type) |
+                                OffsetField::encode(offset));
+      host_offset_.SetValue(host_offset);
+    }
 
     bool operator==(const TypedSlot other) {
-      return type_and_offset_ == other.type_and_offset_ &&
-             host_offset_ == other.host_offset_;
+      return type_and_offset_.Value() == other.type_and_offset_.Value() &&
+             host_offset_.Value() == other.host_offset_.Value();
     }
 
     bool operator!=(const TypedSlot other) { return !(*this == other); }
 
-    SlotType type() { return TypeField::decode(type_and_offset_); }
+    SlotType type() { return TypeField::decode(type_and_offset_.Value()); }
 
-    uint32_t offset() { return OffsetField::decode(type_and_offset_); }
+    uint32_t offset() { return OffsetField::decode(type_and_offset_.Value()); }
 
-    uint32_t host_offset() { return host_offset_; }
+    TypeAndOffset GetTypeAndOffset() {
+      uint32_t type_and_offset = type_and_offset_.Value();
+      return std::make_pair(TypeField::decode(type_and_offset),
+                            OffsetField::decode(type_and_offset));
+    }
 
-    uint32_t type_and_offset_;
-    uint32_t host_offset_;
+    uint32_t host_offset() { return host_offset_.Value(); }
+
+    void Set(TypedSlot slot) {
+      type_and_offset_.SetValue(slot.type_and_offset_.Value());
+      host_offset_.SetValue(slot.host_offset_.Value());
+    }
+
+    void Clear() {
+      type_and_offset_.SetValue(TypeField::encode(CLEARED_SLOT) |
+                                OffsetField::encode(0));
+      host_offset_.SetValue(0);
+    }
+
+    base::AtomicValue<uint32_t> type_and_offset_;
+    base::AtomicValue<uint32_t> host_offset_;
   };
   static const int kMaxOffset = 1 << 29;
 
   explicit TypedSlotSet(Address page_start) : page_start_(page_start) {
-    chunk_ = new Chunk(nullptr, kInitialBufferSize);
+    chunk_.SetValue(new Chunk(nullptr, kInitialBufferSize));
   }
 
   ~TypedSlotSet() {
-    Chunk* chunk = chunk_;
+    Chunk* chunk = chunk_.Value();
     while (chunk != nullptr) {
-      Chunk* next = chunk->next;
+      Chunk* next = chunk->next.Value();
       delete chunk;
       chunk = next;
     }
+    FreeToBeFreedChunks();
   }
 
   // The slot offset specifies a slot at address page_start_ + offset.
+  // This method can only be called on the main thread.
   void Insert(SlotType type, uint32_t host_offset, uint32_t offset) {
     TypedSlot slot(type, host_offset, offset);
-    if (!chunk_->AddSlot(slot)) {
-      chunk_ = new Chunk(chunk_, NextCapacity(chunk_->capacity));
-      bool added = chunk_->AddSlot(slot);
+    Chunk* top_chunk = chunk_.Value();
+    if (!top_chunk) {
+      top_chunk = new Chunk(nullptr, kInitialBufferSize);
+      chunk_.SetValue(top_chunk);
+    }
+    if (!top_chunk->AddSlot(slot)) {
+      Chunk* new_top_chunk =
+          new Chunk(top_chunk, NextCapacity(top_chunk->capacity.Value()));
+      bool added = new_top_chunk->AddSlot(slot);
+      chunk_.SetValue(new_top_chunk);
       DCHECK(added);
       USE(added);
     }
@@ -300,32 +375,60 @@
   //    else return REMOVE_SLOT;
   // });
   template <typename Callback>
-  int Iterate(Callback callback) {
-    STATIC_ASSERT(NUMBER_OF_SLOT_TYPES < 8);
-    const TypedSlot kRemovedSlot(NUMBER_OF_SLOT_TYPES, 0, 0);
-    Chunk* chunk = chunk_;
+  int Iterate(Callback callback, IterationMode mode) {
+    STATIC_ASSERT(CLEARED_SLOT < 8);
+    Chunk* chunk = chunk_.Value();
+    Chunk* previous = nullptr;
     int new_count = 0;
     while (chunk != nullptr) {
-      TypedSlot* buffer = chunk->buffer;
-      int count = chunk->count;
+      TypedSlot* buffer = chunk->buffer.Value();
+      int count = chunk->count.Value();
+      bool empty = true;
       for (int i = 0; i < count; i++) {
-        TypedSlot slot = buffer[i];
-        if (slot != kRemovedSlot) {
-          SlotType type = slot.type();
-          Address addr = page_start_ + slot.offset();
-          Address host_addr = page_start_ + slot.host_offset();
+        // Order is important here. We have to read out the slot type last to
+        // observe the concurrent removal case consistently.
+        Address host_addr = page_start_ + buffer[i].host_offset();
+        TypeAndOffset type_and_offset = buffer[i].GetTypeAndOffset();
+        SlotType type = type_and_offset.first;
+        if (type != CLEARED_SLOT) {
+          Address addr = page_start_ + type_and_offset.second;
           if (callback(type, host_addr, addr) == KEEP_SLOT) {
             new_count++;
+            empty = false;
           } else {
-            buffer[i] = kRemovedSlot;
+            buffer[i].Clear();
           }
         }
       }
-      chunk = chunk->next;
+
+      Chunk* next = chunk->next.Value();
+      if (mode == PREFREE_EMPTY_CHUNKS && empty) {
+        // We remove the chunk from the list but let it still point its next
+        // chunk to allow concurrent iteration.
+        if (previous) {
+          previous->next.SetValue(next);
+        } else {
+          chunk_.SetValue(next);
+        }
+        base::LockGuard<base::Mutex> guard(&to_be_freed_chunks_mutex_);
+        to_be_freed_chunks_.push(chunk);
+      } else {
+        previous = chunk;
+      }
+      chunk = next;
     }
     return new_count;
   }
 
+  void FreeToBeFreedChunks() {
+    base::LockGuard<base::Mutex> guard(&to_be_freed_chunks_mutex_);
+    while (!to_be_freed_chunks_.empty()) {
+      Chunk* top = to_be_freed_chunks_.top();
+      to_be_freed_chunks_.pop();
+      delete top;
+    }
+  }
+
  private:
   static const int kInitialBufferSize = 100;
   static const int kMaxBufferSize = 16 * KB;
@@ -338,24 +441,34 @@
   class TypeField : public BitField<SlotType, 29, 3> {};
 
   struct Chunk : Malloced {
-    explicit Chunk(Chunk* next_chunk, int capacity)
-        : next(next_chunk), count(0), capacity(capacity) {
-      buffer = NewArray<TypedSlot>(capacity);
+    explicit Chunk(Chunk* next_chunk, int chunk_capacity) {
+      count.SetValue(0);
+      capacity.SetValue(chunk_capacity);
+      buffer.SetValue(NewArray<TypedSlot>(chunk_capacity));
+      next.SetValue(next_chunk);
     }
     bool AddSlot(TypedSlot slot) {
-      if (count == capacity) return false;
-      buffer[count++] = slot;
+      int current_count = count.Value();
+      if (current_count == capacity.Value()) return false;
+      TypedSlot* current_buffer = buffer.Value();
+      // Order is important here. We have to write the slot first before
+      // increasing the counter to guarantee that a consistent state is
+      // observed by concurrent threads.
+      current_buffer[current_count].Set(slot);
+      count.SetValue(current_count + 1);
       return true;
     }
-    ~Chunk() { DeleteArray(buffer); }
-    Chunk* next;
-    int count;
-    int capacity;
-    TypedSlot* buffer;
+    ~Chunk() { DeleteArray(buffer.Value()); }
+    base::AtomicValue<Chunk*> next;
+    base::AtomicValue<int> count;
+    base::AtomicValue<int> capacity;
+    base::AtomicValue<TypedSlot*> buffer;
   };
 
   Address page_start_;
-  Chunk* chunk_;
+  base::AtomicValue<Chunk*> chunk_;
+  base::Mutex to_be_freed_chunks_mutex_;
+  std::stack<Chunk*> to_be_freed_chunks_;
 };
 
 }  // namespace internal
diff --git a/src/heap/spaces-inl.h b/src/heap/spaces-inl.h
index 0fd69da..314d22f 100644
--- a/src/heap/spaces-inl.h
+++ b/src/heap/spaces-inl.h
@@ -165,14 +165,6 @@
 bool NewSpace::ToSpaceContains(Object* o) { return to_space_.Contains(o); }
 bool NewSpace::FromSpaceContains(Object* o) { return from_space_.Contains(o); }
 
-// --------------------------------------------------------------------------
-// AllocationResult
-
-AllocationSpace AllocationResult::RetrySpace() {
-  DCHECK(IsRetry());
-  return static_cast<AllocationSpace>(Smi::cast(object_)->value());
-}
-
 Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
                        SemiSpace* owner) {
   DCHECK_EQ(executable, Executability::NOT_EXECUTABLE);
diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc
index 95d5687..c2043ed 100644
--- a/src/heap/spaces.cc
+++ b/src/heap/spaces.cc
@@ -398,7 +398,7 @@
   // We cannot free memory chunks in new space while the sweeper is running
   // since a sweeper thread might be stuck right before trying to lock the
   // corresponding page.
-  return !chunk->InNewSpace() || (mc == nullptr) ||
+  return !chunk->InNewSpace() || (mc == nullptr) || !FLAG_concurrent_sweeping ||
          mc->sweeper().IsSweepingCompleted();
 }
 
@@ -446,7 +446,7 @@
   base::VirtualMemory reservation(size, alignment);
 
   if (!reservation.IsReserved()) return NULL;
-  size_.Increment(static_cast<intptr_t>(reservation.size()));
+  size_.Increment(reservation.size());
   Address base =
       RoundUp(static_cast<Address>(reservation.address()), alignment);
   controller->TakeControl(&reservation);
@@ -505,12 +505,12 @@
   chunk->size_ = size;
   chunk->area_start_ = area_start;
   chunk->area_end_ = area_end;
-  chunk->flags_ = 0;
+  chunk->flags_ = Flags(NO_FLAGS);
   chunk->set_owner(owner);
   chunk->InitializeReservedMemory();
-  chunk->old_to_new_slots_ = nullptr;
+  chunk->old_to_new_slots_.SetValue(nullptr);
   chunk->old_to_old_slots_ = nullptr;
-  chunk->typed_old_to_new_slots_ = nullptr;
+  chunk->typed_old_to_new_slots_.SetValue(nullptr);
   chunk->typed_old_to_old_slots_ = nullptr;
   chunk->skip_list_ = nullptr;
   chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity;
@@ -528,7 +528,6 @@
   chunk->black_area_end_marker_map_ = nullptr;
 
   DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
-  DCHECK(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
 
   if (executable == EXECUTABLE) {
     chunk->SetFlag(IS_EXECUTABLE);
@@ -617,6 +616,21 @@
   set_next_chunk(NULL);
 }
 
+void MemoryAllocator::ShrinkChunk(MemoryChunk* chunk, size_t bytes_to_shrink) {
+  DCHECK_GE(bytes_to_shrink, static_cast<size_t>(base::OS::CommitPageSize()));
+  DCHECK_EQ(0, bytes_to_shrink % base::OS::CommitPageSize());
+  Address free_start = chunk->area_end_ - bytes_to_shrink;
+  // Don't adjust the size of the page. The area is just uncomitted but not
+  // released.
+  chunk->area_end_ -= bytes_to_shrink;
+  UncommitBlock(free_start, bytes_to_shrink);
+  if (chunk->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) {
+    if (chunk->reservation_.IsReserved())
+      chunk->reservation_.Guard(chunk->area_end_);
+    else
+      base::OS::Guard(chunk->area_end_, base::OS::CommitPageSize());
+  }
+}
 
 MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t reserve_area_size,
                                             intptr_t commit_area_size,
@@ -667,8 +681,7 @@
                  CodePageGuardSize();
 
     // Check executable memory limit.
-    if ((size_executable_.Value() + static_cast<intptr_t>(chunk_size)) >
-        capacity_executable_) {
+    if ((size_executable_.Value() + chunk_size) > capacity_executable_) {
       LOG(isolate_, StringEvent("MemoryAllocator::AllocateRawMemory",
                                 "V8 Executable Allocation capacity exceeded"));
       return NULL;
@@ -691,16 +704,16 @@
       DCHECK(
           IsAligned(reinterpret_cast<intptr_t>(base), MemoryChunk::kAlignment));
       if (base == NULL) return NULL;
-      size_.Increment(static_cast<intptr_t>(chunk_size));
+      size_.Increment(chunk_size);
       // Update executable memory size.
-      size_executable_.Increment(static_cast<intptr_t>(chunk_size));
+      size_executable_.Increment(chunk_size);
     } else {
       base = AllocateAlignedMemory(chunk_size, commit_size,
                                    MemoryChunk::kAlignment, executable,
                                    &reservation);
       if (base == NULL) return NULL;
       // Update executable memory size.
-      size_executable_.Increment(static_cast<intptr_t>(reservation.size()));
+      size_executable_.Increment(reservation.size());
     }
 
     if (Heap::ShouldZapGarbage()) {
@@ -745,9 +758,9 @@
     last_chunk_.TakeControl(&reservation);
     UncommitBlock(reinterpret_cast<Address>(last_chunk_.address()),
                   last_chunk_.size());
-    size_.Increment(-static_cast<intptr_t>(chunk_size));
+    size_.Decrement(chunk_size);
     if (executable == EXECUTABLE) {
-      size_executable_.Increment(-static_cast<intptr_t>(chunk_size));
+      size_executable_.Decrement(chunk_size);
     }
     CHECK(last_chunk_.IsReserved());
     return AllocateChunk(reserve_area_size, commit_area_size, executable,
@@ -764,6 +777,53 @@
   available_in_free_list_ = 0;
 }
 
+size_t Page::ShrinkToHighWaterMark() {
+  // Shrink pages to high water mark. The water mark points either to a filler
+  // or the area_end.
+  HeapObject* filler = HeapObject::FromAddress(HighWaterMark());
+  if (filler->address() == area_end()) return 0;
+  CHECK(filler->IsFiller());
+  if (!filler->IsFreeSpace()) return 0;
+
+#ifdef DEBUG
+  // Check the the filler is indeed the last filler on the page.
+  HeapObjectIterator it(this);
+  HeapObject* filler2 = nullptr;
+  for (HeapObject* obj = it.Next(); obj != nullptr; obj = it.Next()) {
+    filler2 = HeapObject::FromAddress(obj->address() + obj->Size());
+  }
+  if (filler2 == nullptr || filler2->address() == area_end()) return 0;
+  DCHECK(filler2->IsFiller());
+  // The deserializer might leave behind fillers. In this case we need to
+  // iterate even further.
+  while ((filler2->address() + filler2->Size()) != area_end()) {
+    filler2 = HeapObject::FromAddress(filler2->address() + filler2->Size());
+    DCHECK(filler2->IsFiller());
+  }
+  DCHECK_EQ(filler->address(), filler2->address());
+#endif  // DEBUG
+
+  size_t unused = RoundDown(
+      static_cast<size_t>(area_end() - filler->address() - FreeSpace::kSize),
+      base::OS::CommitPageSize());
+  if (unused > 0) {
+    if (FLAG_trace_gc_verbose) {
+      PrintIsolate(heap()->isolate(), "Shrinking page %p: end %p -> %p\n",
+                   reinterpret_cast<void*>(this),
+                   reinterpret_cast<void*>(area_end()),
+                   reinterpret_cast<void*>(area_end() - unused));
+    }
+    heap()->CreateFillerObjectAt(
+        filler->address(),
+        static_cast<int>(area_end() - filler->address() - unused),
+        ClearRecordedSlots::kNo);
+    heap()->memory_allocator()->ShrinkChunk(this, unused);
+    CHECK(filler->IsFiller());
+    CHECK_EQ(filler->address() + filler->Size(), area_end());
+  }
+  return unused;
+}
+
 void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk,
                                         Address start_free) {
   // We do not allow partial shrink for code.
@@ -776,8 +836,8 @@
 
   size_t to_free_size = size - (start_free - chunk->address());
 
-  DCHECK(size_.Value() >= static_cast<intptr_t>(to_free_size));
-  size_.Increment(-static_cast<intptr_t>(to_free_size));
+  DCHECK(size_.Value() >= to_free_size);
+  size_.Decrement(to_free_size);
   isolate_->counters()->memory_allocated()->Decrement(
       static_cast<int>(to_free_size));
   chunk->set_size(size - to_free_size);
@@ -792,20 +852,15 @@
   isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk),
                                          chunk->IsEvacuationCandidate());
 
-  intptr_t size;
   base::VirtualMemory* reservation = chunk->reserved_memory();
-  if (reservation->IsReserved()) {
-    size = static_cast<intptr_t>(reservation->size());
-  } else {
-    size = static_cast<intptr_t>(chunk->size());
-  }
-  DCHECK(size_.Value() >= size);
-  size_.Increment(-size);
+  const size_t size =
+      reservation->IsReserved() ? reservation->size() : chunk->size();
+  DCHECK_GE(size_.Value(), static_cast<size_t>(size));
+  size_.Decrement(size);
   isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(size));
-
   if (chunk->executable() == EXECUTABLE) {
-    DCHECK(size_executable_.Value() >= size);
-    size_executable_.Increment(-size);
+    DCHECK_GE(size_executable_.Value(), size);
+    size_executable_.Decrement(size);
   }
 
   chunk->SetFlag(MemoryChunk::PRE_FREED);
@@ -938,10 +993,9 @@
 
 #ifdef DEBUG
 void MemoryAllocator::ReportStatistics() {
-  intptr_t size = Size();
+  size_t size = Size();
   float pct = static_cast<float>(capacity_ - size) / capacity_;
-  PrintF("  capacity: %" V8PRIdPTR ", used: %" V8PRIdPTR
-         ", available: %%%d\n\n",
+  PrintF("  capacity: %zu , used: %" V8PRIdPTR ", available: %%%d\n\n",
          capacity_, size, static_cast<int>(pct * 100));
 }
 #endif
@@ -1014,9 +1068,9 @@
     delete mutex_;
     mutex_ = nullptr;
   }
-  if (old_to_new_slots_ != nullptr) ReleaseOldToNewSlots();
+  if (old_to_new_slots_.Value() != nullptr) ReleaseOldToNewSlots();
   if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots();
-  if (typed_old_to_new_slots_ != nullptr) ReleaseTypedOldToNewSlots();
+  if (typed_old_to_new_slots_.Value() != nullptr) ReleaseTypedOldToNewSlots();
   if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots();
   if (local_tracker_ != nullptr) ReleaseLocalTracker();
 }
@@ -1032,13 +1086,14 @@
 }
 
 void MemoryChunk::AllocateOldToNewSlots() {
-  DCHECK(nullptr == old_to_new_slots_);
-  old_to_new_slots_ = AllocateSlotSet(size_, address());
+  DCHECK(nullptr == old_to_new_slots_.Value());
+  old_to_new_slots_.SetValue(AllocateSlotSet(size_, address()));
 }
 
 void MemoryChunk::ReleaseOldToNewSlots() {
-  delete[] old_to_new_slots_;
-  old_to_new_slots_ = nullptr;
+  SlotSet* old_to_new_slots = old_to_new_slots_.Value();
+  delete[] old_to_new_slots;
+  old_to_new_slots_.SetValue(nullptr);
 }
 
 void MemoryChunk::AllocateOldToOldSlots() {
@@ -1052,13 +1107,14 @@
 }
 
 void MemoryChunk::AllocateTypedOldToNewSlots() {
-  DCHECK(nullptr == typed_old_to_new_slots_);
-  typed_old_to_new_slots_ = new TypedSlotSet(address());
+  DCHECK(nullptr == typed_old_to_new_slots_.Value());
+  typed_old_to_new_slots_.SetValue(new TypedSlotSet(address()));
 }
 
 void MemoryChunk::ReleaseTypedOldToNewSlots() {
-  delete typed_old_to_new_slots_;
-  typed_old_to_new_slots_ = nullptr;
+  TypedSlotSet* typed_old_to_new_slots = typed_old_to_new_slots_.Value();
+  delete typed_old_to_new_slots;
+  typed_old_to_new_slots_.SetValue(nullptr);
 }
 
 void MemoryChunk::AllocateTypedOldToOldSlots() {
@@ -1235,18 +1291,29 @@
   return Smi::FromInt(0);
 }
 
-bool PagedSpace::Expand() {
-  int size = AreaSize();
-  if (snapshotable() && !HasPages()) {
-    size = Snapshot::SizeOfFirstPage(heap()->isolate(), identity());
+void PagedSpace::ShrinkImmortalImmovablePages() {
+  DCHECK(!heap()->deserialization_complete());
+  MemoryChunk::UpdateHighWaterMark(allocation_info_.top());
+  EmptyAllocationInfo();
+  ResetFreeList();
+
+  for (Page* page : *this) {
+    DCHECK(page->IsFlagSet(Page::NEVER_EVACUATE));
+    size_t unused = page->ShrinkToHighWaterMark();
+    accounting_stats_.DecreaseCapacity(static_cast<intptr_t>(unused));
+    AccountUncommitted(unused);
   }
+}
+
+bool PagedSpace::Expand() {
+  const int size = AreaSize();
 
   if (!heap()->CanExpandOldGeneration(size)) return false;
 
   Page* p = heap()->memory_allocator()->AllocatePage(size, this, executable());
   if (p == nullptr) return false;
 
-  AccountCommitted(static_cast<intptr_t>(p->size()));
+  AccountCommitted(p->size());
 
   // Pages created during bootstrapping may contain immortal immovable objects.
   if (!heap()->deserialization_complete()) p->MarkNeverEvacuate();
@@ -1336,7 +1403,6 @@
 
 void PagedSpace::ReleasePage(Page* page) {
   DCHECK_EQ(page->LiveBytes(), 0);
-  DCHECK_EQ(AreaSize(), page->area_size());
   DCHECK_EQ(page->owner(), this);
 
   free_list_.EvictFreeListItems(page);
@@ -1354,11 +1420,13 @@
     page->Unlink();
   }
 
-  AccountUncommitted(static_cast<intptr_t>(page->size()));
+  AccountUncommitted(page->size());
+  accounting_stats_.ShrinkSpace(page->area_size());
   heap()->memory_allocator()->Free<MemoryAllocator::kPreFreeAndQueue>(page);
+}
 
-  DCHECK(Capacity() > 0);
-  accounting_stats_.ShrinkSpace(AreaSize());
+std::unique_ptr<ObjectIterator> PagedSpace::GetObjectIterator() {
+  return std::unique_ptr<ObjectIterator>(new HeapObjectIterator(this));
 }
 
 #ifdef DEBUG
@@ -1481,7 +1549,7 @@
 
 
 void NewSpace::Shrink() {
-  int new_capacity = Max(InitialTotalCapacity(), 2 * SizeAsInt());
+  int new_capacity = Max(InitialTotalCapacity(), 2 * static_cast<int>(Size()));
   int rounded_new_capacity = RoundUp(new_capacity, Page::kPageSize);
   if (rounded_new_capacity < TotalCapacity() &&
       to_space_.ShrinkTo(rounded_new_capacity)) {
@@ -1747,6 +1815,10 @@
   }
 }
 
+std::unique_ptr<ObjectIterator> NewSpace::GetObjectIterator() {
+  return std::unique_ptr<ObjectIterator>(new SemiSpaceIterator(this));
+}
+
 #ifdef VERIFY_HEAP
 // We do not use the SemiSpaceIterator because verification doesn't assume
 // that it works (it depends on the invariants we are checking).
@@ -1903,7 +1975,7 @@
     new_page->SetFlags(last_page->GetFlags(), Page::kCopyOnFlipFlagsMask);
     last_page = new_page;
   }
-  AccountCommitted(static_cast<intptr_t>(delta));
+  AccountCommitted(delta);
   current_capacity_ = new_capacity;
   return true;
 }
@@ -1940,7 +2012,7 @@
           last_page);
       delta_pages--;
     }
-    AccountUncommitted(static_cast<intptr_t>(delta));
+    AccountUncommitted(delta);
     heap()->memory_allocator()->unmapper()->FreeQueuedChunks();
   }
   current_capacity_ = new_capacity;
@@ -2010,7 +2082,6 @@
   from->FixPagesFlags(0, 0);
 }
 
-
 void SemiSpace::set_age_mark(Address mark) {
   DCHECK_EQ(Page::FromAllocationAreaAddress(mark)->owner(), this);
   age_mark_ = mark;
@@ -2020,6 +2091,11 @@
   }
 }
 
+std::unique_ptr<ObjectIterator> SemiSpace::GetObjectIterator() {
+  // Use the NewSpace::NewObjectIterator to iterate the ToSpace.
+  UNREACHABLE();
+  return std::unique_ptr<ObjectIterator>();
+}
 
 #ifdef DEBUG
 void SemiSpace::Print() {}
@@ -2490,14 +2566,13 @@
   // Don't free list allocate if there is linear space available.
   DCHECK(owner_->limit() - owner_->top() < size_in_bytes);
 
-  int old_linear_size = static_cast<int>(owner_->limit() - owner_->top());
   // Mark the old linear allocation area with a free space map so it can be
   // skipped when scanning the heap.  This also puts it back in the free list
   // if it is big enough.
   owner_->EmptyAllocationInfo();
 
-  owner_->heap()->incremental_marking()->OldSpaceStep(size_in_bytes -
-                                                      old_linear_size);
+  owner_->heap()->StartIncrementalMarkingIfAllocationLimitIsReached(
+      Heap::kNoGCFlags, kNoGCCallbackFlags);
 
   int new_node_size = 0;
   FreeSpace* new_node = FindNodeFor(size_in_bytes, &new_node_size);
@@ -2778,19 +2853,7 @@
     }
   }
 
-  // Free list allocation failed and there is no next page.  Fail if we have
-  // hit the old generation size limit that should cause a garbage
-  // collection.
-  if (!heap()->always_allocate() &&
-      heap()->OldGenerationAllocationLimitReached()) {
-    // If sweeper threads are active, wait for them at that point and steal
-    // elements form their free-lists.
-    HeapObject* object = SweepAndRetryAllocation(size_in_bytes);
-    return object;
-  }
-
-  // Try to expand the space and allocate in the new next page.
-  if (Expand()) {
+  if (heap()->ShouldExpandOldGenerationOnAllocationFailure() && Expand()) {
     DCHECK((CountTotalPages() > 1) ||
            (size_in_bytes <= free_list_.Available()));
     return free_list_.Allocate(size_in_bytes);
@@ -2874,7 +2937,7 @@
       size_(0),
       page_count_(0),
       objects_size_(0),
-      chunk_map_(base::HashMap::PointersMatch, 1024) {}
+      chunk_map_(1024) {}
 
 LargeObjectSpace::~LargeObjectSpace() {}
 
@@ -2914,7 +2977,7 @@
   DCHECK(page->area_size() >= object_size);
 
   size_ += static_cast<int>(page->size());
-  AccountCommitted(static_cast<intptr_t>(page->size()));
+  AccountCommitted(page->size());
   objects_size_ += object_size;
   page_count_++;
   page->set_next_page(first_page_);
@@ -2933,7 +2996,8 @@
     reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
   }
 
-  heap()->incremental_marking()->OldSpaceStep(object_size);
+  heap()->StartIncrementalMarkingIfAllocationLimitIsReached(Heap::kNoGCFlags,
+                                                            kNoGCCallbackFlags);
   AllocationStep(object->address(), object_size);
 
   if (heap()->incremental_marking()->black_allocation()) {
@@ -3050,7 +3114,7 @@
 
       // Free the chunk.
       size_ -= static_cast<int>(page->size());
-      AccountUncommitted(static_cast<intptr_t>(page->size()));
+      AccountUncommitted(page->size());
       objects_size_ -= object->Size();
       page_count_--;
 
@@ -3072,6 +3136,9 @@
   return owned;
 }
 
+std::unique_ptr<ObjectIterator> LargeObjectSpace::GetObjectIterator() {
+  return std::unique_ptr<ObjectIterator>(new LargeObjectIterator(this));
+}
 
 #ifdef VERIFY_HEAP
 // We do not assume that the large object iterator works, because it depends
diff --git a/src/heap/spaces.h b/src/heap/spaces.h
index de5ea1b..732ba7e 100644
--- a/src/heap/spaces.h
+++ b/src/heap/spaces.h
@@ -16,6 +16,8 @@
 #include "src/base/hashmap.h"
 #include "src/base/platform/mutex.h"
 #include "src/flags.h"
+#include "src/globals.h"
+#include "src/heap/heap.h"
 #include "src/heap/marking.h"
 #include "src/list.h"
 #include "src/objects.h"
@@ -57,7 +59,7 @@
 // area.
 //
 // There is a separate large object space for objects larger than
-// Page::kMaxRegularHeapObjectSize, so that they do not have to move during
+// kMaxRegularHeapObjectSize, so that they do not have to move during
 // collection. The large object space is paged. Pages in large object space
 // may be larger than the page size.
 //
@@ -105,7 +107,7 @@
   DCHECK((OffsetFrom(address) & kObjectAlignmentMask) == 0)
 
 #define DCHECK_OBJECT_SIZE(size) \
-  DCHECK((0 < size) && (size <= Page::kMaxRegularHeapObjectSize))
+  DCHECK((0 < size) && (size <= kMaxRegularHeapObjectSize))
 
 #define DCHECK_CODEOBJECT_SIZE(size, code_space) \
   DCHECK((0 < size) && (size <= code_space->AreaSize()))
@@ -227,62 +229,75 @@
 // any heap object.
 class MemoryChunk {
  public:
-  enum MemoryChunkFlags {
-    IS_EXECUTABLE,
-    POINTERS_TO_HERE_ARE_INTERESTING,
-    POINTERS_FROM_HERE_ARE_INTERESTING,
-    IN_FROM_SPACE,  // Mutually exclusive with IN_TO_SPACE.
-    IN_TO_SPACE,    // All pages in new space has one of these two set.
-    NEW_SPACE_BELOW_AGE_MARK,
-    EVACUATION_CANDIDATE,
-    NEVER_EVACUATE,  // May contain immortal immutables.
+  enum Flag {
+    NO_FLAGS = 0u,
+    IS_EXECUTABLE = 1u << 0,
+    POINTERS_TO_HERE_ARE_INTERESTING = 1u << 1,
+    POINTERS_FROM_HERE_ARE_INTERESTING = 1u << 2,
+    // A page in new space has one of the next to flags set.
+    IN_FROM_SPACE = 1u << 3,
+    IN_TO_SPACE = 1u << 4,
+    NEW_SPACE_BELOW_AGE_MARK = 1u << 5,
+    EVACUATION_CANDIDATE = 1u << 6,
+    NEVER_EVACUATE = 1u << 7,
 
     // Large objects can have a progress bar in their page header. These object
     // are scanned in increments and will be kept black while being scanned.
     // Even if the mutator writes to them they will be kept black and a white
     // to grey transition is performed in the value.
-    HAS_PROGRESS_BAR,
+    HAS_PROGRESS_BAR = 1u << 8,
 
     // |PAGE_NEW_OLD_PROMOTION|: A page tagged with this flag has been promoted
     // from new to old space during evacuation.
-    PAGE_NEW_OLD_PROMOTION,
+    PAGE_NEW_OLD_PROMOTION = 1u << 9,
 
     // |PAGE_NEW_NEW_PROMOTION|: A page tagged with this flag has been moved
     // within the new space during evacuation.
-    PAGE_NEW_NEW_PROMOTION,
+    PAGE_NEW_NEW_PROMOTION = 1u << 10,
 
     // This flag is intended to be used for testing. Works only when both
     // FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection
     // are set. It forces the page to become an evacuation candidate at next
     // candidates selection cycle.
-    FORCE_EVACUATION_CANDIDATE_FOR_TESTING,
+    FORCE_EVACUATION_CANDIDATE_FOR_TESTING = 1u << 11,
 
     // This flag is intended to be used for testing.
-    NEVER_ALLOCATE_ON_PAGE,
+    NEVER_ALLOCATE_ON_PAGE = 1u << 12,
 
     // The memory chunk is already logically freed, however the actual freeing
     // still has to be performed.
-    PRE_FREED,
+    PRE_FREED = 1u << 13,
 
     // |POOLED|: When actually freeing this chunk, only uncommit and do not
     // give up the reservation as we still reuse the chunk at some point.
-    POOLED,
+    POOLED = 1u << 14,
 
     // |COMPACTION_WAS_ABORTED|: Indicates that the compaction in this page
     //   has been aborted and needs special handling by the sweeper.
-    COMPACTION_WAS_ABORTED,
+    COMPACTION_WAS_ABORTED = 1u << 15,
 
     // |COMPACTION_WAS_ABORTED_FOR_TESTING|: During stress testing evacuation
     // on pages is sometimes aborted. The flag is used to avoid repeatedly
     // triggering on the same page.
-    COMPACTION_WAS_ABORTED_FOR_TESTING,
+    COMPACTION_WAS_ABORTED_FOR_TESTING = 1u << 16,
 
     // |ANCHOR|: Flag is set if page is an anchor.
-    ANCHOR,
-
-    // Last flag, keep at bottom.
-    NUM_MEMORY_CHUNK_FLAGS
+    ANCHOR = 1u << 17,
   };
+  typedef base::Flags<Flag, uintptr_t> Flags;
+
+  static const int kPointersToHereAreInterestingMask =
+      POINTERS_TO_HERE_ARE_INTERESTING;
+
+  static const int kPointersFromHereAreInterestingMask =
+      POINTERS_FROM_HERE_ARE_INTERESTING;
+
+  static const int kEvacuationCandidateMask = EVACUATION_CANDIDATE;
+
+  static const int kIsInNewSpaceMask = IN_FROM_SPACE | IN_TO_SPACE;
+
+  static const int kSkipEvacuationSlotsRecordingMask =
+      kEvacuationCandidateMask | kIsInNewSpaceMask;
 
   // |kSweepingDone|: The page state when sweeping is complete or sweeping must
   //   not be performed on that page. Sweeper threads that are done with their
@@ -300,17 +315,6 @@
   // whether we have hit the limit and should do some more marking.
   static const int kWriteBarrierCounterGranularity = 500;
 
-  static const int kPointersToHereAreInterestingMask =
-      1 << POINTERS_TO_HERE_ARE_INTERESTING;
-
-  static const int kPointersFromHereAreInterestingMask =
-      1 << POINTERS_FROM_HERE_ARE_INTERESTING;
-
-  static const int kEvacuationCandidateMask = 1 << EVACUATION_CANDIDATE;
-
-  static const int kSkipEvacuationSlotsRecordingMask =
-      (1 << EVACUATION_CANDIDATE) | (1 << IN_FROM_SPACE) | (1 << IN_TO_SPACE);
-
   static const intptr_t kAlignment =
       (static_cast<uintptr_t>(1) << kPageSizeBits);
 
@@ -320,25 +324,21 @@
 
   static const intptr_t kFlagsOffset = kSizeOffset + kPointerSize;
 
-  static const intptr_t kLiveBytesOffset =
+  static const size_t kWriteBarrierCounterOffset =
       kSizeOffset + kPointerSize  // size_t size
-      + kIntptrSize               // intptr_t flags_
+      + kIntptrSize               // Flags flags_
       + kPointerSize              // Address area_start_
       + kPointerSize              // Address area_end_
       + 2 * kPointerSize          // base::VirtualMemory reservation_
       + kPointerSize              // Address owner_
       + kPointerSize              // Heap* heap_
-      + kIntSize;                 // int progress_bar_
-
-  static const size_t kOldToNewSlotsOffset =
-      kLiveBytesOffset + kIntSize;  // int live_byte_count_
-
-  static const size_t kWriteBarrierCounterOffset =
-      kOldToNewSlotsOffset + kPointerSize  // SlotSet* old_to_new_slots_;
-      + kPointerSize                       // SlotSet* old_to_old_slots_;
-      + kPointerSize   // TypedSlotSet* typed_old_to_new_slots_;
-      + kPointerSize   // TypedSlotSet* typed_old_to_old_slots_;
-      + kPointerSize;  // SkipList* skip_list_;
+      + kIntSize                  // int progress_bar_
+      + kIntSize                  // int live_bytes_count_
+      + kPointerSize              // SlotSet* old_to_new_slots_;
+      + kPointerSize              // SlotSet* old_to_old_slots_;
+      + kPointerSize              // TypedSlotSet* typed_old_to_new_slots_;
+      + kPointerSize              // TypedSlotSet* typed_old_to_old_slots_;
+      + kPointerSize;             // SkipList* skip_list_;
 
   static const size_t kMinHeaderSize =
       kWriteBarrierCounterOffset +
@@ -351,7 +351,7 @@
       + kPointerSize      // AtomicValue prev_chunk_
       // FreeListCategory categories_[kNumberOfCategories]
       + FreeListCategory::kSize * kNumberOfCategories +
-      kPointerSize  // LocalArrayBufferTracker* local_tracker_;
+      kPointerSize  // LocalArrayBufferTracker* local_tracker_
       // std::unordered_set<Address>* black_area_end_marker_map_
       + kPointerSize;
 
@@ -453,17 +453,17 @@
 
   inline void set_skip_list(SkipList* skip_list) { skip_list_ = skip_list; }
 
-  inline SlotSet* old_to_new_slots() { return old_to_new_slots_; }
+  inline SlotSet* old_to_new_slots() { return old_to_new_slots_.Value(); }
   inline SlotSet* old_to_old_slots() { return old_to_old_slots_; }
   inline TypedSlotSet* typed_old_to_new_slots() {
-    return typed_old_to_new_slots_;
+    return typed_old_to_new_slots_.Value();
   }
   inline TypedSlotSet* typed_old_to_old_slots() {
     return typed_old_to_old_slots_;
   }
   inline LocalArrayBufferTracker* local_tracker() { return local_tracker_; }
 
-  void AllocateOldToNewSlots();
+  V8_EXPORT_PRIVATE void AllocateOldToNewSlots();
   void ReleaseOldToNewSlots();
   void AllocateOldToOldSlots();
   void ReleaseOldToOldSlots();
@@ -498,7 +498,6 @@
   void ResetProgressBar() {
     if (IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
       set_progress_bar(0);
-      ClearFlag(MemoryChunk::HAS_PROGRESS_BAR);
     }
   }
 
@@ -518,22 +517,18 @@
 
   void PrintMarkbits() { markbits()->Print(); }
 
-  void SetFlag(int flag) { flags_ |= static_cast<uintptr_t>(1) << flag; }
-
-  void ClearFlag(int flag) { flags_ &= ~(static_cast<uintptr_t>(1) << flag); }
-
-  bool IsFlagSet(int flag) {
-    return (flags_ & (static_cast<uintptr_t>(1) << flag)) != 0;
-  }
+  void SetFlag(Flag flag) { flags_ |= flag; }
+  void ClearFlag(Flag flag) { flags_ &= ~Flags(flag); }
+  bool IsFlagSet(Flag flag) { return flags_ & flag; }
 
   // Set or clear multiple flags at a time. The flags in the mask are set to
   // the value in "flags", the rest retain the current value in |flags_|.
-  void SetFlags(intptr_t flags, intptr_t mask) {
-    flags_ = (flags_ & ~mask) | (flags & mask);
+  void SetFlags(uintptr_t flags, uintptr_t mask) {
+    flags_ = (flags_ & ~Flags(mask)) | (Flags(flags) & Flags(mask));
   }
 
   // Return all current flags.
-  intptr_t GetFlags() { return flags_; }
+  uintptr_t GetFlags() { return flags_; }
 
   bool NeverEvacuate() { return IsFlagSet(NEVER_EVACUATE); }
 
@@ -557,9 +552,7 @@
     return IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
   }
 
-  bool InNewSpace() {
-    return (flags_ & ((1 << IN_FROM_SPACE) | (1 << IN_TO_SPACE))) != 0;
-  }
+  bool InNewSpace() { return (flags_ & kIsInNewSpaceMask) != 0; }
 
   bool InToSpace() { return IsFlagSet(IN_TO_SPACE); }
 
@@ -634,7 +627,7 @@
   base::VirtualMemory* reserved_memory() { return &reservation_; }
 
   size_t size_;
-  intptr_t flags_;
+  Flags flags_;
 
   // Start and end of allocatable memory on this chunk.
   Address area_start_;
@@ -660,9 +653,9 @@
   // A single slot set for small pages (of size kPageSize) or an array of slot
   // set for large pages. In the latter case the number of entries in the array
   // is ceil(size() / kPageSize).
-  SlotSet* old_to_new_slots_;
+  base::AtomicValue<SlotSet*> old_to_new_slots_;
   SlotSet* old_to_old_slots_;
-  TypedSlotSet* typed_old_to_new_slots_;
+  base::AtomicValue<TypedSlotSet*> typed_old_to_new_slots_;
   TypedSlotSet* typed_old_to_old_slots_;
 
   SkipList* skip_list_;
@@ -700,6 +693,11 @@
   friend class MemoryChunkValidator;
 };
 
+DEFINE_OPERATORS_FOR_FLAGS(MemoryChunk::Flags)
+
+static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory,
+              "kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory");
+
 // -----------------------------------------------------------------------------
 // A page is a memory chunk of a size 1MB. Large object pages may be larger.
 //
@@ -712,17 +710,8 @@
 
   // Page flags copied from from-space to to-space when flipping semispaces.
   static const intptr_t kCopyOnFlipFlagsMask =
-      (1 << MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING) |
-      (1 << MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
-
-  // Maximum object size that gets allocated into regular pages. Objects larger
-  // than that size are allocated in large object space and are never moved in
-  // memory. This also applies to new space allocation, since objects are never
-  // migrated from new space to large object space. Takes double alignment into
-  // account.
-  // TODO(hpayer): This limit should be way smaller but we currently have
-  // short living objects >256K.
-  static const int kMaxRegularHeapObjectSize = 600 * KB;
+      static_cast<intptr_t>(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING) |
+      static_cast<intptr_t>(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
 
   static inline Page* ConvertNewToOld(Page* old_page, PagedSpace* new_owner);
 
@@ -823,6 +812,8 @@
     available_in_free_list_.Increment(available);
   }
 
+  size_t ShrinkToHighWaterMark();
+
 #ifdef DEBUG
   void Print();
 #endif  // DEBUG
@@ -918,9 +909,9 @@
 
   // Return the total amount committed memory for this space, i.e., allocatable
   // memory and page headers.
-  virtual intptr_t CommittedMemory() { return committed_; }
+  virtual size_t CommittedMemory() { return committed_; }
 
-  virtual intptr_t MaximumCommittedMemory() { return max_committed_; }
+  virtual size_t MaximumCommittedMemory() { return max_committed_; }
 
   // Returns allocated size.
   virtual intptr_t Size() = 0;
@@ -943,18 +934,19 @@
     }
   }
 
-  void AccountCommitted(intptr_t bytes) {
-    DCHECK_GE(bytes, 0);
+  virtual std::unique_ptr<ObjectIterator> GetObjectIterator() = 0;
+
+  void AccountCommitted(size_t bytes) {
+    DCHECK_GE(committed_ + bytes, committed_);
     committed_ += bytes;
     if (committed_ > max_committed_) {
       max_committed_ = committed_;
     }
   }
 
-  void AccountUncommitted(intptr_t bytes) {
-    DCHECK_GE(bytes, 0);
+  void AccountUncommitted(size_t bytes) {
+    DCHECK_GE(committed_, committed_ - bytes);
     committed_ -= bytes;
-    DCHECK_GE(committed_, 0);
   }
 
 #ifdef DEBUG
@@ -971,8 +963,8 @@
   Executability executable_;
 
   // Keeps track of committed memory in a space.
-  intptr_t committed_;
-  intptr_t max_committed_;
+  size_t committed_;
+  size_t max_committed_;
 
   DISALLOW_COPY_AND_ASSIGN(Space);
 };
@@ -981,10 +973,6 @@
 class MemoryChunkValidator {
   // Computed offsets should match the compiler generated ones.
   STATIC_ASSERT(MemoryChunk::kSizeOffset == offsetof(MemoryChunk, size_));
-  STATIC_ASSERT(MemoryChunk::kLiveBytesOffset ==
-                offsetof(MemoryChunk, live_byte_count_));
-  STATIC_ASSERT(MemoryChunk::kOldToNewSlotsOffset ==
-                offsetof(MemoryChunk, old_to_new_slots_));
   STATIC_ASSERT(MemoryChunk::kWriteBarrierCounterOffset ==
                 offsetof(MemoryChunk, write_barrier_counter_));
 
@@ -1242,12 +1230,31 @@
     kRegular,
     kPooled,
   };
+
   enum FreeMode {
     kFull,
     kPreFreeAndQueue,
     kPooledAndQueue,
   };
 
+  static int CodePageGuardStartOffset();
+
+  static int CodePageGuardSize();
+
+  static int CodePageAreaStartOffset();
+
+  static int CodePageAreaEndOffset();
+
+  static int CodePageAreaSize() {
+    return CodePageAreaEndOffset() - CodePageAreaStartOffset();
+  }
+
+  static int PageAreaSize(AllocationSpace space) {
+    DCHECK_NE(LO_SPACE, space);
+    return (space == CODE_SPACE) ? CodePageAreaSize()
+                                 : Page::kAllocatableMemory;
+  }
+
   explicit MemoryAllocator(Isolate* isolate);
 
   // Initializes its internal bookkeeping structures.
@@ -1273,26 +1280,26 @@
   bool CanFreeMemoryChunk(MemoryChunk* chunk);
 
   // Returns allocated spaces in bytes.
-  intptr_t Size() { return size_.Value(); }
+  size_t Size() { return size_.Value(); }
 
   // Returns allocated executable spaces in bytes.
-  intptr_t SizeExecutable() { return size_executable_.Value(); }
+  size_t SizeExecutable() { return size_executable_.Value(); }
 
   // Returns the maximum available bytes of heaps.
-  intptr_t Available() {
-    intptr_t size = Size();
+  size_t Available() {
+    const size_t size = Size();
     return capacity_ < size ? 0 : capacity_ - size;
   }
 
   // Returns the maximum available executable bytes of heaps.
-  intptr_t AvailableExecutable() {
-    intptr_t executable_size = SizeExecutable();
+  size_t AvailableExecutable() {
+    const size_t executable_size = SizeExecutable();
     if (capacity_executable_ < executable_size) return 0;
     return capacity_executable_ - executable_size;
   }
 
   // Returns maximum available bytes that the old space can have.
-  intptr_t MaxAvailable() {
+  size_t MaxAvailable() {
     return (Available() / Page::kPageSize) * Page::kAllocatableMemory;
   }
 
@@ -1303,11 +1310,6 @@
            address >= highest_ever_allocated_.Value();
   }
 
-#ifdef DEBUG
-  // Reports statistic info of the space.
-  void ReportStatistics();
-#endif
-
   // Returns a MemoryChunk in which the memory region from commit_area_size to
   // reserve_area_size of the chunk area is reserved but not committed, it
   // could be committed later by calling MemoryChunk::CommitArea.
@@ -1315,6 +1317,8 @@
                              intptr_t commit_area_size,
                              Executability executable, Space* space);
 
+  void ShrinkChunk(MemoryChunk* chunk, size_t bytes_to_shrink);
+
   Address ReserveAlignedMemory(size_t requested, size_t alignment,
                                base::VirtualMemory* controller);
   Address AllocateAlignedMemory(size_t reserve_size, size_t commit_size,
@@ -1343,24 +1347,6 @@
   // filling it up with a recognizable non-NULL bit pattern.
   void ZapBlock(Address start, size_t size);
 
-  static int CodePageGuardStartOffset();
-
-  static int CodePageGuardSize();
-
-  static int CodePageAreaStartOffset();
-
-  static int CodePageAreaEndOffset();
-
-  static int CodePageAreaSize() {
-    return CodePageAreaEndOffset() - CodePageAreaStartOffset();
-  }
-
-  static int PageAreaSize(AllocationSpace space) {
-    DCHECK_NE(LO_SPACE, space);
-    return (space == CODE_SPACE) ? CodePageAreaSize()
-                                 : Page::kAllocatableMemory;
-  }
-
   MUST_USE_RESULT bool CommitExecutableMemory(base::VirtualMemory* vm,
                                               Address start, size_t commit_size,
                                               size_t reserved_size);
@@ -1368,6 +1354,11 @@
   CodeRange* code_range() { return code_range_; }
   Unmapper* unmapper() { return &unmapper_; }
 
+#ifdef DEBUG
+  // Reports statistic info of the space.
+  void ReportStatistics();
+#endif
+
  private:
   // PreFree logically frees the object, i.e., it takes care of the size
   // bookkeeping and calls the allocation callback.
@@ -1381,28 +1372,6 @@
   template <typename SpaceType>
   MemoryChunk* AllocatePagePooled(SpaceType* owner);
 
-  Isolate* isolate_;
-
-  CodeRange* code_range_;
-
-  // Maximum space size in bytes.
-  intptr_t capacity_;
-  // Maximum subset of capacity_ that can be executable
-  intptr_t capacity_executable_;
-
-  // Allocated space size in bytes.
-  base::AtomicNumber<intptr_t> size_;
-  // Allocated executable space size in bytes.
-  base::AtomicNumber<intptr_t> size_executable_;
-
-  // We keep the lowest and highest addresses allocated as a quick way
-  // of determining that pointers are outside the heap. The estimate is
-  // conservative, i.e. not all addrsses in 'allocated' space are allocated
-  // to our heap. The range is [lowest, highest[, inclusive on the low end
-  // and exclusive on the high end.
-  base::AtomicValue<void*> lowest_ever_allocated_;
-  base::AtomicValue<void*> highest_ever_allocated_;
-
   // Initializes pages in a chunk. Returns the first page address.
   // This function and GetChunkId() are provided for the mark-compact
   // collector to rebuild page headers in the from space, which is
@@ -1423,6 +1392,27 @@
     } while ((high > ptr) && !highest_ever_allocated_.TrySetValue(ptr, high));
   }
 
+  Isolate* isolate_;
+  CodeRange* code_range_;
+
+  // Maximum space size in bytes.
+  size_t capacity_;
+  // Maximum subset of capacity_ that can be executable
+  size_t capacity_executable_;
+
+  // Allocated space size in bytes.
+  base::AtomicNumber<size_t> size_;
+  // Allocated executable space size in bytes.
+  base::AtomicNumber<size_t> size_executable_;
+
+  // We keep the lowest and highest addresses allocated as a quick way
+  // of determining that pointers are outside the heap. The estimate is
+  // conservative, i.e. not all addresses in 'allocated' space are allocated
+  // to our heap. The range is [lowest, highest[, inclusive on the low end
+  // and exclusive on the high end.
+  base::AtomicValue<void*> lowest_ever_allocated_;
+  base::AtomicValue<void*> highest_ever_allocated_;
+
   base::VirtualMemory last_chunk_;
   Unmapper unmapper_;
 
@@ -1440,7 +1430,7 @@
 //       method which is used to avoid using virtual functions
 //       iterating a specific space.
 
-class ObjectIterator : public Malloced {
+class V8_EXPORT_PRIVATE ObjectIterator : public Malloced {
  public:
   virtual ~ObjectIterator() {}
   virtual HeapObject* Next() = 0;
@@ -1491,7 +1481,7 @@
 // If objects are allocated in the page during iteration the iterator may
 // or may not iterate over those objects.  The caller must create a new
 // iterator in order to be sure to visit these new objects.
-class HeapObjectIterator : public ObjectIterator {
+class V8_EXPORT_PRIVATE HeapObjectIterator : public ObjectIterator {
  public:
   // Creates a new object iterator in a given space.
   explicit HeapObjectIterator(PagedSpace* space);
@@ -1880,50 +1870,6 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(FreeList);
 };
 
-
-class AllocationResult {
- public:
-  // Implicit constructor from Object*.
-  AllocationResult(Object* object)  // NOLINT
-      : object_(object) {
-    // AllocationResults can't return Smis, which are used to represent
-    // failure and the space to retry in.
-    CHECK(!object->IsSmi());
-  }
-
-  AllocationResult() : object_(Smi::FromInt(NEW_SPACE)) {}
-
-  static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
-    return AllocationResult(space);
-  }
-
-  inline bool IsRetry() { return object_->IsSmi(); }
-
-  template <typename T>
-  bool To(T** obj) {
-    if (IsRetry()) return false;
-    *obj = T::cast(object_);
-    return true;
-  }
-
-  Object* ToObjectChecked() {
-    CHECK(!IsRetry());
-    return object_;
-  }
-
-  inline AllocationSpace RetrySpace();
-
- private:
-  explicit AllocationResult(AllocationSpace space)
-      : object_(Smi::FromInt(static_cast<int>(space))) {}
-
-  Object* object_;
-};
-
-
-STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
-
-
 // LocalAllocationBuffer represents a linear allocation area that is created
 // from a given {AllocationResult} and can be used to allocate memory without
 // synchronization.
@@ -2196,6 +2142,12 @@
   iterator begin() { return iterator(anchor_.next_page()); }
   iterator end() { return iterator(&anchor_); }
 
+  // Shrink immortal immovable pages of the space to be exactly the size needed
+  // using the high water mark.
+  void ShrinkImmortalImmovablePages();
+
+  std::unique_ptr<ObjectIterator> GetObjectIterator() override;
+
  protected:
   // PagedSpaces that should be included in snapshots have different, i.e.,
   // smaller, initial pages.
@@ -2255,41 +2207,6 @@
   friend class HeapTester;
 };
 
-
-class NumberAndSizeInfo BASE_EMBEDDED {
- public:
-  NumberAndSizeInfo() : number_(0), bytes_(0) {}
-
-  int number() const { return number_; }
-  void increment_number(int num) { number_ += num; }
-
-  int bytes() const { return bytes_; }
-  void increment_bytes(int size) { bytes_ += size; }
-
-  void clear() {
-    number_ = 0;
-    bytes_ = 0;
-  }
-
- private:
-  int number_;
-  int bytes_;
-};
-
-
-// HistogramInfo class for recording a single "bar" of a histogram.  This
-// class is used for collecting statistics to print to the log file.
-class HistogramInfo : public NumberAndSizeInfo {
- public:
-  HistogramInfo() : NumberAndSizeInfo() {}
-
-  const char* name() { return name_; }
-  void set_name(const char* name) { name_ = name; }
-
- private:
-  const char* name_;
-};
-
 enum SemiSpaceId { kFromSpace = 0, kToSpace = 1 };
 
 // -----------------------------------------------------------------------------
@@ -2411,6 +2328,11 @@
     return 0;
   }
 
+  iterator begin() { return iterator(anchor_.next_page()); }
+  iterator end() { return iterator(anchor()); }
+
+  std::unique_ptr<ObjectIterator> GetObjectIterator() override;
+
 #ifdef DEBUG
   void Print() override;
   // Validate a range of of addresses in a SemiSpace.
@@ -2426,9 +2348,6 @@
   virtual void Verify();
 #endif
 
-  iterator begin() { return iterator(anchor_.next_page()); }
-  iterator end() { return iterator(anchor()); }
-
  private:
   void RewindPages(Page* start, int num_pages);
 
@@ -2534,10 +2453,7 @@
            static_cast<int>(top() - to_space_.page_low());
   }
 
-  // The same, but returning an int.  We have to have the one that returns
-  // intptr_t because it is inherited, but if we know we are dealing with the
-  // new space, which can't get as big as the other spaces then this is useful:
-  int SizeAsInt() { return static_cast<int>(Size()); }
+  intptr_t SizeOfObjects() override { return Size(); }
 
   // Return the allocatable capacity of a semispace.
   intptr_t Capacity() {
@@ -2555,11 +2471,11 @@
 
   // Committed memory for NewSpace is the committed memory of both semi-spaces
   // combined.
-  intptr_t CommittedMemory() override {
+  size_t CommittedMemory() override {
     return from_space_.CommittedMemory() + to_space_.CommittedMemory();
   }
 
-  intptr_t MaximumCommittedMemory() override {
+  size_t MaximumCommittedMemory() override {
     return from_space_.MaximumCommittedMemory() +
            to_space_.MaximumCommittedMemory();
   }
@@ -2760,6 +2676,8 @@
   iterator begin() { return to_space_.begin(); }
   iterator end() { return to_space_.end(); }
 
+  std::unique_ptr<ObjectIterator> GetObjectIterator() override;
+
  private:
   // Update allocation info to match the current to-space page.
   void UpdateAllocationInfo();
@@ -2895,7 +2813,7 @@
 
 
 // -----------------------------------------------------------------------------
-// Large objects ( > Page::kMaxRegularHeapObjectSize ) are allocated and
+// Large objects ( > kMaxRegularHeapObjectSize ) are allocated and
 // managed by the large object space. A large object is allocated from OS
 // heap with extra padding bytes (Page::kPageSize + Page::kObjectStartOffset).
 // A large object always starts at Page::kObjectStartOffset to a page.
@@ -2973,6 +2891,8 @@
   iterator begin() { return iterator(first_page_); }
   iterator end() { return iterator(nullptr); }
 
+  std::unique_ptr<ObjectIterator> GetObjectIterator() override;
+
 #ifdef VERIFY_HEAP
   virtual void Verify();
 #endif
@@ -3030,20 +2950,6 @@
   LargePageIterator lo_iterator_;
 };
 
-#ifdef DEBUG
-struct CommentStatistic {
-  const char* comment;
-  int size;
-  int count;
-  void Clear() {
-    comment = NULL;
-    size = 0;
-    count = 0;
-  }
-  // Must be small, since an iteration is used for lookup.
-  static const int kMaxComments = 64;
-};
-#endif
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/i18n.cc b/src/i18n.cc
index 3418ae7..58b8a8d 100644
--- a/src/i18n.cc
+++ b/src/i18n.cc
@@ -5,6 +5,8 @@
 
 #include "src/i18n.h"
 
+#include <memory>
+
 #include "src/api.h"
 #include "src/factory.h"
 #include "src/isolate.h"
@@ -115,13 +117,11 @@
   icu::SimpleDateFormat* date_format = NULL;
   icu::UnicodeString skeleton;
   if (ExtractStringSetting(isolate, options, "skeleton", &skeleton)) {
-    icu::DateTimePatternGenerator* generator =
-        icu::DateTimePatternGenerator::createInstance(icu_locale, status);
+    std::unique_ptr<icu::DateTimePatternGenerator> generator(
+        icu::DateTimePatternGenerator::createInstance(icu_locale, status));
     icu::UnicodeString pattern;
-    if (U_SUCCESS(status)) {
+    if (U_SUCCESS(status))
       pattern = generator->getBestPattern(skeleton, status);
-      delete generator;
-    }
 
     date_format = new icu::SimpleDateFormat(pattern, icu_locale, status);
     if (U_SUCCESS(status)) {
@@ -132,7 +132,7 @@
   if (U_FAILURE(status)) {
     delete calendar;
     delete date_format;
-    date_format = NULL;
+    date_format = nullptr;
   }
 
   return date_format;
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 6f2fb97..edab277 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -1301,7 +1301,6 @@
   // edi : the function to call
   Isolate* isolate = masm->isolate();
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_increment_count, done_initialize_count;
 
   // Load the cache state into ecx.
   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
@@ -1314,7 +1313,7 @@
   // type-feedback-vector.h).
   Label check_allocation_site;
   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
-  __ j(equal, &done_increment_count, Label::kFar);
+  __ j(equal, &done, Label::kFar);
   __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
   __ j(equal, &done, Label::kFar);
   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
@@ -1337,7 +1336,7 @@
   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   __ cmp(edi, ecx);
   __ j(not_equal, &megamorphic);
-  __ jmp(&done_increment_count, Label::kFar);
+  __ jmp(&done, Label::kFar);
 
   __ bind(&miss);
 
@@ -1366,26 +1365,17 @@
   // slot.
   CreateAllocationSiteStub create_stub(isolate);
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ jmp(&done_initialize_count);
+  __ jmp(&done);
 
   __ bind(&not_array_function);
   CreateWeakCellStub weak_cell_stub(isolate);
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
-  __ bind(&done_initialize_count);
 
-  // Initialize the call counter.
-  __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
-  __ jmp(&done);
-
-  __ bind(&done_increment_count);
-  // Increment the call count for monomorphic function calls.
+  __ bind(&done);
+  // Increment the call count for all function calls.
   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
                       FixedArray::kHeaderSize + kPointerSize),
          Immediate(Smi::FromInt(1)));
-
-  __ bind(&done);
 }
 
 
@@ -1431,6 +1421,12 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot) {
+  __ add(FieldOperand(feedback_vector, slot, times_half_pointer_size,
+                      FixedArray::kHeaderSize + kPointerSize),
+         Immediate(Smi::FromInt(1)));
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // edi - function
@@ -1446,9 +1442,7 @@
                            FixedArray::kHeaderSize));
 
   // Increment the call count for monomorphic function calls.
-  __ add(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
+  IncrementCallCount(masm, ebx, edx);
 
   __ mov(ebx, ecx);
   __ mov(edx, edi);
@@ -1464,7 +1458,7 @@
   // edx - slot id
   // ebx - vector
   Isolate* isolate = masm->isolate();
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -1493,12 +1487,11 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(edi, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  __ add(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
-
   __ bind(&call_function);
+
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, ebx, edx);
+
   __ Set(eax, argc);
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
@@ -1539,6 +1532,12 @@
       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
 
   __ bind(&call);
+
+  // Increment the call count for megamorphic function calls.
+  IncrementCallCount(masm, ebx, edx);
+
+  __ bind(&call_count_incremented);
+
   __ Set(eax, argc);
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET);
@@ -1564,11 +1563,6 @@
   __ cmp(ecx, NativeContextOperand());
   __ j(not_equal, &miss);
 
-  // Initialize the call counter.
-  __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
-
   // Store the function. Use a stub since we need a frame for allocation.
   // ebx - vector
   // edx - slot
@@ -1576,11 +1570,15 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(isolate);
+    __ push(ebx);
+    __ push(edx);
     __ push(edi);
     __ push(esi);
     __ CallStub(&create_stub);
     __ pop(esi);
     __ pop(edi);
+    __ pop(edx);
+    __ pop(ebx);
   }
 
   __ jmp(&call_function);
@@ -1590,7 +1588,7 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ jmp(&call);
+  __ jmp(&call_count_incremented);
 
   // Unreachable
   __ int3();
@@ -2068,297 +2066,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-
-  // Stack frame on entry.
-  //  esp[0]: return address
-  //  esp[4]: to
-  //  esp[8]: from
-  //  esp[12]: string
-
-  // Make sure first argument is a string.
-  __ mov(eax, Operand(esp, 3 * kPointerSize));
-  STATIC_ASSERT(kSmiTag == 0);
-  __ JumpIfSmi(eax, &runtime);
-  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
-  __ j(NegateCondition(is_string), &runtime);
-
-  // eax: string
-  // ebx: instance type
-
-  // Calculate length of sub string using the smi values.
-  __ mov(ecx, Operand(esp, 1 * kPointerSize));  // To index.
-  __ JumpIfNotSmi(ecx, &runtime);
-  __ mov(edx, Operand(esp, 2 * kPointerSize));  // From index.
-  __ JumpIfNotSmi(edx, &runtime);
-  __ sub(ecx, edx);
-  __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
-  Label not_original_string;
-  // Shorter than original string's length: an actual substring.
-  __ j(below, &not_original_string, Label::kNear);
-  // Longer than original string's length or negative: unsafe arguments.
-  __ j(above, &runtime);
-  // Return original string.
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-  __ bind(&not_original_string);
-
-  Label single_char;
-  __ cmp(ecx, Immediate(Smi::FromInt(1)));
-  __ j(equal, &single_char);
-
-  // eax: string
-  // ebx: instance type
-  // ecx: sub string length (smi)
-  // edx: from index (smi)
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into edi.
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ test(ebx, Immediate(kIsIndirectStringMask));
-  __ j(zero, &seq_or_external_string, Label::kNear);
-
-  Factory* factory = isolate()->factory();
-  __ test(ebx, Immediate(kSlicedNotConsMask));
-  __ j(not_zero, &sliced_string, Label::kNear);
-  // Cons string.  Check whether it is flat, then fetch first part.
-  // Flat cons strings have an empty second part.
-  __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
-         factory->empty_string());
-  __ j(not_equal, &runtime);
-  __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
-  // Update instance type.
-  __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
-  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked, Label::kNear);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and adjust start index by offset.
-  __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
-  __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
-  // Update instance type.
-  __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
-  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked, Label::kNear);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the expected register.
-  __ mov(edi, eax);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // edi: underlying subject string
-    // ebx: instance type of underlying subject string
-    // edx: adjusted start index (smi)
-    // ecx: length (smi)
-    __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
-    // Short slice.  Copy instead of slicing.
-    __ j(less, &copy_routine);
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ test(ebx, Immediate(kStringEncodingMask));
-    __ j(zero, &two_byte_slice, Label::kNear);
-    __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
-    __ jmp(&set_slice_header, Label::kNear);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
-    __ bind(&set_slice_header);
-    __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
-    __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
-           Immediate(String::kEmptyHashField));
-    __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
-    __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
-    __ IncrementCounter(counters->sub_string_native(), 1);
-    __ ret(3 * kPointerSize);
-
-    __ bind(&copy_routine);
-  }
-
-  // edi: underlying subject string
-  // ebx: instance type of underlying subject string
-  // edx: adjusted start index (smi)
-  // ecx: length (smi)
-  // The subject string can only be external or sequential string of either
-  // encoding at this point.
-  Label two_byte_sequential, runtime_drop_two, sequential_string;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ test_b(ebx, Immediate(kExternalStringTag));
-  __ j(zero, &sequential_string);
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ test_b(ebx, Immediate(kShortExternalStringMask));
-  __ j(not_zero, &runtime);
-  __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
-  // Move the pointer so that offset-wise, it looks like a sequential string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&sequential_string);
-  // Stash away (adjusted) index and (underlying) string.
-  __ push(edx);
-  __ push(edi);
-  __ SmiUntag(ecx);
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ test_b(ebx, Immediate(kStringEncodingMask));
-  __ j(zero, &two_byte_sequential);
-
-  // Sequential one byte string.  Allocate the result.
-  __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
-
-  // eax: result string
-  // ecx: result string length
-  // Locate first character of result.
-  __ mov(edi, eax);
-  __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-  // Load string argument and locate character of sub string start.
-  __ pop(edx);
-  __ pop(ebx);
-  __ SmiUntag(ebx);
-  __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
-
-  // eax: result string
-  // ecx: result length
-  // edi: first character of result
-  // edx: character of sub string start
-  StringHelper::GenerateCopyCharacters(
-      masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-
-  __ bind(&two_byte_sequential);
-  // Sequential two-byte string.  Allocate the result.
-  __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
-
-  // eax: result string
-  // ecx: result string length
-  // Locate first character of result.
-  __ mov(edi, eax);
-  __ add(edi,
-         Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-  // Load string argument and locate character of sub string start.
-  __ pop(edx);
-  __ pop(ebx);
-  // As from is a smi it is 2 times the value which matches the size of a two
-  // byte character.
-  STATIC_ASSERT(kSmiTag == 0);
-  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
-  __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
-
-  // eax: result string
-  // ecx: result length
-  // edi: first character of result
-  // edx: character of sub string start
-  StringHelper::GenerateCopyCharacters(
-      masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-
-  // Drop pushed values on the stack before tail call.
-  __ bind(&runtime_drop_two);
-  __ Drop(2);
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // eax: string
-  // ebx: instance type
-  // ecx: sub string length (smi)
-  // edx: from index (smi)
-  StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
-                                  &runtime, RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ ret(3 * kPointerSize);
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes one argument in eax.
-  Label is_number;
-  __ JumpIfSmi(eax, &is_number, Label::kNear);
-
-  Label not_string;
-  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
-  // eax: receiver
-  // edi: receiver map
-  __ j(above_equal, &not_string, Label::kNear);
-  __ Ret();
-  __ bind(&not_string);
-
-  Label not_heap_number;
-  __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpInstanceType(edi, ODDBALL_TYPE);
-  __ j(not_equal, &not_oddball, Label::kNear);
-  __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ pop(ecx);   // Pop return address.
-  __ push(eax);  // Push argument.
-  __ push(ecx);  // Push return address.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes one argument in eax.
-  Label is_number;
-  __ JumpIfSmi(eax, &is_number, Label::kNear);
-
-  Label not_name;
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ CmpObjectType(eax, LAST_NAME_TYPE, edi);
-  // eax: receiver
-  // edi: receiver map
-  __ j(above, &not_name, Label::kNear);
-  __ Ret();
-  __ bind(&not_name);
-
-  Label not_heap_number;
-  __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpInstanceType(edi, ODDBALL_TYPE);
-  __ j(not_equal, &not_oddball, Label::kNear);
-  __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ pop(ecx);   // Pop return address.
-  __ push(eax);  // Push argument.
-  __ push(ecx);  // Push return address.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
                                                    Register left,
                                                    Register right,
@@ -3228,17 +2935,6 @@
     Mode mode) {
   Label object_is_black, need_incremental, need_incremental_pop_object;
 
-  __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
-  __ and_(regs_.scratch0(), regs_.object());
-  __ mov(regs_.scratch1(),
-         Operand(regs_.scratch0(),
-                 MemoryChunk::kWriteBarrierCounterOffset));
-  __ sub(regs_.scratch1(), Immediate(1));
-  __ mov(Operand(regs_.scratch0(),
-                 MemoryChunk::kWriteBarrierCounterOffset),
-         regs_.scratch1());
-  __ j(negative, &need_incremental);
-
   // Let's look at the color of the object:  If it is not black we don't have
   // to inform the incremental marker.
   __ JumpIfBlack(regs_.object(),
@@ -3580,11 +3276,10 @@
   Label load_smi_map, compare_map;
   Label start_polymorphic;
   Label pop_and_miss;
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
 
   __ push(receiver);
-  __ push(vector);
+  // Value, vector and slot are passed on the stack, so no need to save/restore
+  // them.
 
   Register receiver_map = receiver;
   Register cached_map = vector;
@@ -3605,12 +3300,9 @@
   Register handler = feedback;
   DCHECK(handler.is(StoreWithVectorDescriptor::ValueRegister()));
   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
-  __ pop(vector);
   __ pop(receiver);
   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), handler);
-  __ pop(handler);  // Pop "value".
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(handler);
 
   // Polymorphic, we have to loop from 2 to N
   __ bind(&start_polymorphic);
@@ -3634,11 +3326,8 @@
                                FixedArray::kHeaderSize + kPointerSize));
   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
-  __ mov(Operand::StaticVariable(virtual_register), handler);
-  __ pop(handler);  // Pop "value".
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(handler);
 
   __ bind(&prepare_next);
   __ add(counter, Immediate(Smi::FromInt(2)));
@@ -3648,7 +3337,6 @@
   // We exhausted our array of map handler pairs.
   __ bind(&pop_and_miss);
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
   __ jmp(miss);
 
@@ -3664,8 +3352,6 @@
                                        Label* miss) {
   // The store ic value is on the stack.
   DCHECK(weak_cell.is(StoreWithVectorDescriptor::ValueRegister()));
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
 
   // feedback initially contains the feedback array
   Label compare_smi_map;
@@ -3681,11 +3367,8 @@
   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
                                  FixedArray::kHeaderSize + kPointerSize));
   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
-  // Put the store ic value back in it's register.
-  __ mov(Operand::StaticVariable(virtual_register), weak_cell);
-  __ pop(weak_cell);  // Pop "value".
   // jump to the handler.
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(weak_cell);
 
   // In microbenchmarks, it made sense to unroll this code so that the call to
   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
@@ -3695,10 +3378,8 @@
   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
                                  FixedArray::kHeaderSize + kPointerSize));
   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), weak_cell);
-  __ pop(weak_cell);  // Pop "value".
   // jump to the handler.
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(weak_cell);
 }
 
 void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
@@ -3709,7 +3390,26 @@
   Register slot = StoreWithVectorDescriptor::SlotRegister();          // edi
   Label miss;
 
-  __ push(value);
+  if (StoreWithVectorDescriptor::kPassLastArgsOnStack) {
+    // Current stack layout:
+    // - esp[8]    -- value
+    // - esp[4]    -- slot
+    // - esp[0]    -- return address
+    STATIC_ASSERT(StoreDescriptor::kStackArgumentsCount == 2);
+    STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+    if (in_frame) {
+      __ RecordComment("[ StoreDescriptor -> StoreWithVectorDescriptor");
+      // If the vector is not on the stack, then insert the vector beneath
+      // return address in order to prepare for calling handler with
+      // StoreWithVector calling convention.
+      __ push(Operand(esp, 0));
+      __ mov(Operand(esp, 4), StoreWithVectorDescriptor::VectorRegister());
+      __ RecordComment("]");
+    } else {
+      __ mov(vector, Operand(esp, 1 * kPointerSize));
+    }
+    __ mov(slot, Operand(esp, 2 * kPointerSize));
+  }
 
   Register scratch = value;
   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
@@ -3733,19 +3433,9 @@
   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   __ j(not_equal, &miss);
 
-  __ pop(value);
-  __ push(slot);
-  __ push(vector);
   masm->isolate()->store_stub_cache()->GenerateProbe(masm, receiver, key, slot,
                                                      no_reg);
-  __ pop(vector);
-  __ pop(slot);
-  Label no_pop_miss;
-  __ jmp(&no_pop_miss);
-
   __ bind(&miss);
-  __ pop(value);
-  __ bind(&no_pop_miss);
   StoreIC::GenerateMiss(masm);
 }
 
@@ -3767,17 +3457,13 @@
   Label load_smi_map, compare_map;
   Label transition_call;
   Label pop_and_miss;
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
-  ExternalReference virtual_slot =
-      ExternalReference::virtual_slot_register(masm->isolate());
 
   __ push(receiver);
-  __ push(vector);
+  // Value, vector and slot are passed on the stack, so no need to save/restore
+  // them.
 
   Register receiver_map = receiver;
   Register cached_map = vector;
-  Register value = StoreDescriptor::ValueRegister();
 
   // Receiver might not be a heap object.
   __ JumpIfSmi(receiver, &load_smi_map);
@@ -3788,15 +3474,18 @@
   __ push(key);
   // Current stack layout:
   // - esp[0]    -- key
-  // - esp[4]    -- vector
-  // - esp[8]    -- receiver
-  // - esp[12]   -- value
-  // - esp[16]   -- return address
+  // - esp[4]    -- receiver
+  // - esp[8]    -- return address
+  // - esp[12]   -- vector
+  // - esp[16]   -- slot
+  // - esp[20]   -- value
   //
-  // Required stack layout for handler call:
+  // Required stack layout for handler call (see StoreWithVectorDescriptor):
   // - esp[0]    -- return address
-  // - receiver, key, value, vector, slot in registers.
-  // - handler in virtual register.
+  // - esp[4]    -- vector
+  // - esp[8]    -- slot
+  // - esp[12]   -- value
+  // - receiver, key, handler in registers.
   Register counter = key;
   __ mov(counter, Immediate(Smi::FromInt(0)));
   __ bind(&next_loop);
@@ -3811,43 +3500,57 @@
   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), feedback);
-  __ pop(value);
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(feedback);
 
   __ bind(&transition_call);
   // Current stack layout:
   // - esp[0]    -- key
-  // - esp[4]    -- vector
-  // - esp[8]    -- receiver
-  // - esp[12]   -- value
-  // - esp[16]   -- return address
+  // - esp[4]    -- receiver
+  // - esp[8]    -- return address
+  // - esp[12]   -- vector
+  // - esp[16]   -- slot
+  // - esp[20]   -- value
   //
-  // Required stack layout for handler call:
+  // Required stack layout for handler call (see StoreTransitionDescriptor):
   // - esp[0]    -- return address
-  // - receiver, key, value, map, vector in registers.
-  // - handler and slot in virtual registers.
-  __ mov(Operand::StaticVariable(virtual_slot), slot);
+  // - esp[4]    -- vector
+  // - esp[8]    -- slot
+  // - esp[12]   -- value
+  // - receiver, key, map, handler in registers.
   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), feedback);
 
   __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   // The weak cell may have been cleared.
   __ JumpIfSmi(cached_map, &pop_and_miss);
-  DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
-  __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
+  DCHECK(!cached_map.is(StoreTransitionDescriptor::MapRegister()));
+  __ mov(StoreTransitionDescriptor::MapRegister(), cached_map);
 
-  // Pop key into place.
+  // Call store transition handler using StoreTransitionDescriptor calling
+  // convention.
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
-  __ pop(value);
-  __ jmp(Operand::StaticVariable(virtual_register));
+  // Ensure that the transition handler we are going to call has the same
+  // number of stack arguments which means that we don't have to adapt them
+  // before the call.
+  STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+  STATIC_ASSERT(StoreTransitionDescriptor::kStackArgumentsCount == 3);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
+                    StoreWithVectorDescriptor::kValue ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kValue);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
+                    StoreWithVectorDescriptor::kSlot ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kSlot);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
+                    StoreWithVectorDescriptor::kVector ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kVector);
+  __ jmp(feedback);
 
   __ bind(&prepare_next);
   __ add(counter, Immediate(Smi::FromInt(3)));
@@ -3857,7 +3560,6 @@
   // We exhausted our array of map handler pairs.
   __ bind(&pop_and_miss);
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
   __ jmp(miss);
 
@@ -3874,7 +3576,26 @@
   Register slot = StoreWithVectorDescriptor::SlotRegister();          // edi
   Label miss;
 
-  __ push(value);
+  if (StoreWithVectorDescriptor::kPassLastArgsOnStack) {
+    // Current stack layout:
+    // - esp[8]    -- value
+    // - esp[4]    -- slot
+    // - esp[0]    -- return address
+    STATIC_ASSERT(StoreDescriptor::kStackArgumentsCount == 2);
+    STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+    if (in_frame) {
+      __ RecordComment("[ StoreDescriptor -> StoreWithVectorDescriptor");
+      // If the vector is not on the stack, then insert the vector beneath
+      // return address in order to prepare for calling handler with
+      // StoreWithVector calling convention.
+      __ push(Operand(esp, 0));
+      __ mov(Operand(esp, 4), StoreWithVectorDescriptor::VectorRegister());
+      __ RecordComment("]");
+    } else {
+      __ mov(vector, Operand(esp, 1 * kPointerSize));
+    }
+    __ mov(slot, Operand(esp, 2 * kPointerSize));
+  }
 
   Register scratch = value;
   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
@@ -3899,8 +3620,6 @@
   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   __ j(not_equal, &try_poly_name);
 
-  __ pop(value);
-
   Handle<Code> megamorphic_stub =
       KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
@@ -3917,7 +3636,6 @@
                              &miss);
 
   __ bind(&miss);
-  __ pop(value);
   KeyedStoreIC::GenerateMiss(masm);
 }
 
@@ -4564,7 +4282,7 @@
     // Fall back to %AllocateInNewSpace (if not too big).
     Label too_big_for_new_space;
     __ bind(&allocate);
-    __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
+    __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
     __ j(greater, &too_big_for_new_space);
     {
       FrameScope scope(masm, StackFrame::INTERNAL);
@@ -4953,7 +4671,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
+  __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
   __ j(greater, &too_big_for_new_space);
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
diff --git a/src/ia32/interface-descriptors-ia32.cc b/src/ia32/interface-descriptors-ia32.cc
index f1972b9..220484c 100644
--- a/src/ia32/interface-descriptors-ia32.cc
+++ b/src/ia32/interface-descriptors-ia32.cc
@@ -39,19 +39,11 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return ebx; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() {
-  return no_reg;
-}
+const Register StoreTransitionDescriptor::SlotRegister() { return no_reg; }
 
+const Register StoreTransitionDescriptor::VectorRegister() { return ebx; }
 
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return ebx; }
-
-
-const Register VectorStoreTransitionDescriptor::MapRegister() { return edi; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return ebx; }
-
+const Register StoreTransitionDescriptor::MapRegister() { return edi; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return ebx; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return eax; }
@@ -365,7 +357,7 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {
       edi,  // callee
@@ -400,7 +392,19 @@
       eax,  // argument count (not including receiver)
       edx,  // new target
       edi,  // constructor
-      ebx,  // address of first argument
+      ebx,  // allocation site feedback
+      ecx,  // address of first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      eax,  // argument count (not including receiver)
+      edx,  // target to the call. It is checked to be Array function.
+      ebx,  // allocation site feedback
+      ecx,  // address of first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 83c7ce8..2bd8760 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -173,9 +173,8 @@
 void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
                                 Label* condition_met,
                                 Label::Distance distance) {
-  const int mask =
-      (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
-  CheckPageFlag(object, scratch, mask, cc, condition_met, distance);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc,
+                condition_met, distance);
 }
 
 
@@ -1545,7 +1544,7 @@
                               Label* gc_required,
                               AllocationFlags flags) {
   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index 08cc7ce..2220ca7 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -798,6 +798,24 @@
   // may be bigger than 2^16 - 1.  Requires a scratch register.
   void Ret(int bytes_dropped, Register scratch);
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp (on ia32 it's at least return address).
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 1) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    DCHECK_LT(parameter_index, Descriptor::kParameterCount);
+    DCHECK_LE(Descriptor::kParameterCount - Descriptor::kStackArgumentsCount,
+              parameter_index);
+    int offset = (Descriptor::kParameterCount - parameter_index - 1 +
+                  sp_to_ra_offset_in_words) *
+                 kPointerSize;
+    mov(reg, Operand(esp, offset));
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the esp register.
   void Drop(int element_count);
diff --git a/src/ic/arm/handler-compiler-arm.cc b/src/ic/arm/handler-compiler-arm.cc
index 4ed765e..691fe3d 100644
--- a/src/ic/arm/handler-compiler-arm.cc
+++ b/src/ic/arm/handler-compiler-arm.cc
@@ -111,15 +111,21 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ push(vector);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
   __ push(slot);
+  __ push(vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ pop(slot);
   __ pop(vector);
+  __ pop(slot);
 }
 
 
@@ -129,6 +135,13 @@
   __ add(sp, sp, Operand(2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -330,24 +343,6 @@
   __ TailCallStub(&stub);
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
-          StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  StoreIC_PushArgs(masm);
-
-  // The slow case calls into the runtime to complete the store without causing
-  // an IC miss that would otherwise cause a transition to the generic stub.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -366,12 +361,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(false);  // Not implemented.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -629,6 +618,9 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
diff --git a/src/ic/arm/ic-arm.cc b/src/ic/arm/ic-arm.cc
index fee6ebf..10ec578 100644
--- a/src/ic/arm/ic-arm.cc
+++ b/src/ic/arm/ic-arm.cc
@@ -441,10 +441,11 @@
 
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
+  __ Push(StoreWithVectorDescriptor::ValueRegister(),
           StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
+          StoreWithVectorDescriptor::VectorRegister(),
+          StoreWithVectorDescriptor::ReceiverRegister(),
+          StoreWithVectorDescriptor::NameRegister());
 }
 
 
@@ -454,6 +455,13 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  StoreIC_PushArgs(masm);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
 
 static void KeyedStoreGenerateMegamorphicHelper(
     MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
diff --git a/src/ic/arm64/handler-compiler-arm64.cc b/src/ic/arm64/handler-compiler-arm64.cc
index 277b4e7..3f97fdd 100644
--- a/src/ic/arm64/handler-compiler-arm64.cc
+++ b/src/ic/arm64/handler-compiler-arm64.cc
@@ -20,15 +20,21 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Push(vector);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
   __ Push(slot);
+  __ Push(vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Pop(slot);
   __ Pop(vector);
+  __ Pop(slot);
 }
 
 
@@ -38,6 +44,13 @@
   __ Drop(2);
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -323,25 +336,6 @@
   __ Ret();
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
-          StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  ASM_LOCATION("ElementHandlerCompiler::GenerateStoreSlow");
-  StoreIC_PushArgs(masm);
-
-  // The slow case calls into the runtime to complete the store without causing
-  // an IC miss that would otherwise cause a transition to the generic stub.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -398,12 +392,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(false);  // Not implemented.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -664,6 +652,9 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
diff --git a/src/ic/arm64/ic-arm64.cc b/src/ic/arm64/ic-arm64.cc
index 9d66eb2..fa9d7c1 100644
--- a/src/ic/arm64/ic-arm64.cc
+++ b/src/ic/arm64/ic-arm64.cc
@@ -445,10 +445,11 @@
 
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
+  __ Push(StoreWithVectorDescriptor::ValueRegister(),
           StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
+          StoreWithVectorDescriptor::VectorRegister(),
+          StoreWithVectorDescriptor::ReceiverRegister(),
+          StoreWithVectorDescriptor::NameRegister());
 }
 
 
@@ -458,6 +459,14 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  ASM_LOCATION("KeyedStoreIC::GenerateSlow");
+  StoreIC_PushArgs(masm);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
 
 static void KeyedStoreGenerateMegamorphicHelper(
     MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
diff --git a/src/ic/handler-compiler.cc b/src/ic/handler-compiler.cc
index b6b81de..3b2e115 100644
--- a/src/ic/handler-compiler.cc
+++ b/src/ic/handler-compiler.cc
@@ -129,13 +129,13 @@
 
 Register PropertyHandlerCompiler::Frontend(Handle<Name> name) {
   Label miss;
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     PushVectorAndSlot();
   }
   Register reg = FrontendHeader(receiver(), name, &miss, RETURN_HOLDER);
   FrontendFooter(name, &miss);
   // The footer consumes the vector and slot from the stack if miss occurs.
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     DiscardVectorAndSlot();
   }
   return reg;
@@ -209,12 +209,12 @@
 Handle<Code> NamedLoadHandlerCompiler::CompileLoadNonexistent(
     Handle<Name> name) {
   Label miss;
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     DCHECK(kind() == Code::LOAD_IC);
     PushVectorAndSlot();
   }
   NonexistentFrontendHeader(name, &miss, scratch2(), scratch3());
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     DiscardVectorAndSlot();
   }
   GenerateLoadConstant(isolate()->factory()->undefined_value());
@@ -247,7 +247,7 @@
 
 
 void NamedLoadHandlerCompiler::InterceptorVectorSlotPush(Register holder_reg) {
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     if (holder_reg.is(receiver())) {
       PushVectorAndSlot();
     } else {
@@ -260,7 +260,7 @@
 
 void NamedLoadHandlerCompiler::InterceptorVectorSlotPop(Register holder_reg,
                                                         PopMode mode) {
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     if (mode == DISCARD) {
       DiscardVectorAndSlot();
     } else {
@@ -438,7 +438,31 @@
     Handle<Map> transition, Handle<Name> name) {
   Label miss;
 
-  PushVectorAndSlot();
+  // Ensure that the StoreTransitionStub we are going to call has the same
+  // number of stack arguments. This means that we don't have to adapt them
+  // if we decide to call the transition or miss stub.
+  STATIC_ASSERT(Descriptor::kStackArgumentsCount ==
+                StoreTransitionDescriptor::kStackArgumentsCount);
+  STATIC_ASSERT(Descriptor::kStackArgumentsCount == 0 ||
+                Descriptor::kStackArgumentsCount == 3);
+  STATIC_ASSERT(Descriptor::kParameterCount - Descriptor::kValue ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kValue);
+  STATIC_ASSERT(Descriptor::kParameterCount - Descriptor::kSlot ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kSlot);
+  STATIC_ASSERT(Descriptor::kParameterCount - Descriptor::kVector ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kVector);
+
+  if (Descriptor::kPassLastArgsOnStack) {
+    __ LoadParameterFromStack<Descriptor>(value(), Descriptor::kValue);
+  }
+
+  bool need_save_restore = IC::ShouldPushPopSlotAndVector(kind());
+  if (need_save_restore) {
+    PushVectorAndSlot();
+  }
 
   // Check that we are allowed to write this.
   bool is_nonexistent = holder()->map() == transition->GetBackPointer();
@@ -470,23 +494,17 @@
   DCHECK(!transition->is_access_check_needed());
 
   // Call to respective StoreTransitionStub.
-  bool virtual_args = StoreTransitionHelper::HasVirtualSlotArg();
-  Register map_reg = StoreTransitionHelper::MapRegister();
+  Register map_reg = StoreTransitionDescriptor::MapRegister();
 
   if (details.type() == DATA_CONSTANT) {
     DCHECK(descriptors->GetValue(descriptor)->IsJSFunction());
-    Register tmp =
-        virtual_args ? StoreWithVectorDescriptor::VectorRegister() : map_reg;
-    GenerateRestoreMap(transition, tmp, scratch2(), &miss);
-    GenerateConstantCheck(tmp, descriptor, value(), scratch2(), &miss);
-    if (virtual_args) {
-      // This will move the map from tmp into map_reg.
-      RearrangeVectorAndSlot(tmp, map_reg);
-    } else {
+    GenerateRestoreMap(transition, map_reg, scratch1(), &miss);
+    GenerateConstantCheck(map_reg, descriptor, value(), scratch1(), &miss);
+    if (need_save_restore) {
       PopVectorAndSlot();
     }
     GenerateRestoreName(name);
-    StoreTransitionStub stub(isolate());
+    StoreMapStub stub(isolate());
     GenerateTailCall(masm(), stub.GetCode());
 
   } else {
@@ -498,24 +516,29 @@
         Map::cast(transition->GetBackPointer())->unused_property_fields() == 0
             ? StoreTransitionStub::ExtendStorageAndStoreMapAndValue
             : StoreTransitionStub::StoreMapAndValue;
-
-    Register tmp =
-        virtual_args ? StoreWithVectorDescriptor::VectorRegister() : map_reg;
-    GenerateRestoreMap(transition, tmp, scratch2(), &miss);
-    if (virtual_args) {
-      RearrangeVectorAndSlot(tmp, map_reg);
-    } else {
+    GenerateRestoreMap(transition, map_reg, scratch1(), &miss);
+    if (need_save_restore) {
       PopVectorAndSlot();
     }
-    GenerateRestoreName(name);
-    StoreTransitionStub stub(isolate(),
-                             FieldIndex::ForDescriptor(*transition, descriptor),
-                             representation, store_mode);
+    // We need to pass name on the stack.
+    PopReturnAddress(this->name());
+    __ Push(name);
+    PushReturnAddress(this->name());
+
+    FieldIndex index = FieldIndex::ForDescriptor(*transition, descriptor);
+    __ Move(StoreNamedTransitionDescriptor::FieldOffsetRegister(),
+            Smi::FromInt(index.index() << kPointerSizeLog2));
+
+    StoreTransitionStub stub(isolate(), index.is_inobject(), representation,
+                             store_mode);
     GenerateTailCall(masm(), stub.GetCode());
   }
 
-  GenerateRestoreName(&miss, name);
-  PopVectorAndSlot();
+  __ bind(&miss);
+  if (need_save_restore) {
+    PopVectorAndSlot();
+  }
+  GenerateRestoreName(name);
   TailCallBuiltin(masm(), MissBuiltin(kind()));
 
   return GetCode(kind(), name);
@@ -534,7 +557,10 @@
   FieldType* field_type = *it->GetFieldType();
   bool need_save_restore = false;
   if (RequiresFieldTypeChecks(field_type)) {
-    need_save_restore = IC::ICUseVector(kind());
+    need_save_restore = IC::ShouldPushPopSlotAndVector(kind());
+    if (Descriptor::kPassLastArgsOnStack) {
+      __ LoadParameterFromStack<Descriptor>(value(), Descriptor::kValue);
+    }
     if (need_save_restore) PushVectorAndSlot();
     GenerateFieldTypeChecks(field_type, value(), &miss);
     if (need_save_restore) PopVectorAndSlot();
@@ -568,6 +594,9 @@
     GenerateTailCall(masm(), slow_stub);
   }
   Register holder = Frontend(name);
+  if (Descriptor::kPassLastArgsOnStack) {
+    __ LoadParameterFromStack<Descriptor>(value(), Descriptor::kValue);
+  }
   GenerateApiAccessorCall(masm(), call_optimization, handle(object->map()),
                           receiver(), scratch2(), true, value(), holder,
                           accessor_index);
@@ -601,13 +630,21 @@
     TRACE_HANDLER_STATS(isolate, KeyedLoadIC_KeyedLoadSloppyArgumentsStub);
     return KeyedLoadSloppyArgumentsStub(isolate).GetCode();
   }
+  bool is_js_array = instance_type == JS_ARRAY_TYPE;
   if (elements_kind == DICTIONARY_ELEMENTS) {
+    if (FLAG_tf_load_ic_stub) {
+      int config = KeyedLoadElementsKind::encode(elements_kind) |
+                   KeyedLoadConvertHole::encode(false) |
+                   KeyedLoadIsJsArray::encode(is_js_array) |
+                   LoadHandlerTypeBit::encode(kLoadICHandlerForElements);
+      return handle(Smi::FromInt(config), isolate);
+    }
     TRACE_HANDLER_STATS(isolate, KeyedLoadIC_LoadDictionaryElementStub);
     return LoadDictionaryElementStub(isolate).GetCode();
   }
   DCHECK(IsFastElementsKind(elements_kind) ||
          IsFixedTypedArrayElementsKind(elements_kind));
-  bool is_js_array = instance_type == JS_ARRAY_TYPE;
+  // TODO(jkummerow): Use IsHoleyElementsKind(elements_kind).
   bool convert_hole_to_undefined =
       is_js_array && elements_kind == FAST_HOLEY_ELEMENTS &&
       *receiver_map == isolate->get_initial_js_array_map(elements_kind);
diff --git a/src/ic/handler-compiler.h b/src/ic/handler-compiler.h
index 525889b..63ca050 100644
--- a/src/ic/handler-compiler.h
+++ b/src/ic/handler-compiler.h
@@ -53,6 +53,9 @@
 
   void DiscardVectorAndSlot();
 
+  void PushReturnAddress(Register tmp);
+  void PopReturnAddress(Register tmp);
+
   // TODO(verwaest): Make non-static.
   static void GenerateApiAccessorCall(MacroAssembler* masm,
                                       const CallOptimization& optimization,
@@ -212,13 +215,24 @@
 
 class NamedStoreHandlerCompiler : public PropertyHandlerCompiler {
  public:
+  // All store handlers use StoreWithVectorDescriptor calling convention.
+  typedef StoreWithVectorDescriptor Descriptor;
+
   explicit NamedStoreHandlerCompiler(Isolate* isolate, Handle<Map> map,
                                      Handle<JSObject> holder)
       : PropertyHandlerCompiler(isolate, Code::STORE_IC, map, holder,
-                                kCacheOnReceiver) {}
+                                kCacheOnReceiver) {
+#ifdef DEBUG
+    if (Descriptor::kPassLastArgsOnStack) {
+      ZapStackArgumentsRegisterAliases();
+    }
+#endif
+  }
 
   virtual ~NamedStoreHandlerCompiler() {}
 
+  void ZapStackArgumentsRegisterAliases();
+
   Handle<Code> CompileStoreTransition(Handle<Map> transition,
                                       Handle<Name> name);
   Handle<Code> CompileStoreField(LookupIterator* it);
@@ -249,10 +263,6 @@
   virtual void FrontendFooter(Handle<Name> name, Label* miss);
   void GenerateRestoreName(Label* label, Handle<Name> name);
 
-  // Pop the vector and slot into appropriate registers, moving the map in
-  // the process. (This is an accomodation for register pressure on ia32).
-  void RearrangeVectorAndSlot(Register current_map, Register destination_map);
-
  private:
   void GenerateRestoreName(Handle<Name> name);
   void GenerateRestoreMap(Handle<Map> transition, Register map_reg,
@@ -283,8 +293,6 @@
                                             Isolate* isolate);
   void CompileElementHandlers(MapHandleList* receiver_maps,
                               List<Handle<Object>>* handlers);
-
-  static void GenerateStoreSlow(MacroAssembler* masm);
 };
 }  // namespace internal
 }  // namespace v8
diff --git a/src/ic/ia32/handler-compiler-ia32.cc b/src/ic/ia32/handler-compiler-ia32.cc
index b332f11..06c58b8 100644
--- a/src/ic/ia32/handler-compiler-ia32.cc
+++ b/src/ic/ia32/handler-compiler-ia32.cc
@@ -59,15 +59,21 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ push(vector);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
   __ push(slot);
+  __ push(vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ pop(slot);
   __ pop(vector);
+  __ pop(slot);
 }
 
 
@@ -77,6 +83,15 @@
   __ add(esp, Immediate(2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  MacroAssembler* masm = this->masm();
+  __ push(tmp);
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  MacroAssembler* masm = this->masm();
+  __ pop(tmp);
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -150,12 +165,16 @@
   DCHECK(!accessor_holder.is(scratch));
   // Copy return value.
   __ pop(scratch);
-  // receiver
-  __ push(receiver);
-  // Write the arguments to stack frame.
+
   if (is_store) {
-    DCHECK(!receiver.is(store_parameter));
-    DCHECK(!scratch.is(store_parameter));
+    // Discard stack arguments.
+    __ add(esp, Immediate(StoreWithVectorDescriptor::kStackArgumentsCount *
+                          kPointerSize));
+  }
+  // Write the receiver and arguments to stack frame.
+  __ push(receiver);
+  if (is_store) {
+    DCHECK(!AreAliased(receiver, scratch, store_parameter));
     __ push(store_parameter);
   }
   __ push(scratch);
@@ -252,8 +271,13 @@
     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
     int accessor_index, int expected_arguments, Register scratch) {
   // ----------- S t a t e -------------
-  //  -- esp[0] : return address
+  //  -- esp[12] : value
+  //  -- esp[8]  : slot
+  //  -- esp[4]  : vector
+  //  -- esp[0]  : return address
   // -----------------------------------
+  __ LoadParameterFromStack<Descriptor>(value(), Descriptor::kValue);
+
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
 
@@ -290,7 +314,14 @@
     // Restore context register.
     __ pop(esi);
   }
-  __ ret(0);
+  if (accessor_index >= 0) {
+    __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
+  } else {
+    // If we generate a global code snippet for deoptimization only, don't try
+    // to drop stack arguments for the StoreIC because they are not a part of
+    // expression stack and deoptimizer does not reconstruct them.
+    __ ret(0);
+  }
 }
 
 
@@ -316,32 +347,6 @@
   __ CallRuntime(id);
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-  Register slot = StoreWithVectorDescriptor::SlotRegister();
-  Register vector = StoreWithVectorDescriptor::VectorRegister();
-
-  __ xchg(receiver, Operand(esp, 0));
-  __ push(name);
-  __ push(value);
-  __ push(slot);
-  __ push(vector);
-  __ push(receiver);  // which contains the return address.
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  // Return address is on the stack.
-  StoreIC_PushArgs(masm);
-
-  // Do tail-call to runtime routine.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -360,19 +365,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(destination_map.is(StoreTransitionHelper::MapRegister()));
-  DCHECK(current_map.is(StoreTransitionHelper::VectorRegister()));
-  ExternalReference virtual_slot =
-      ExternalReference::virtual_slot_register(isolate());
-  __ mov(destination_map, current_map);
-  __ pop(current_map);
-  __ mov(Operand::StaticVariable(virtual_slot), current_map);
-  __ pop(current_map);  // put vector in place.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -532,7 +524,7 @@
     Label success;
     __ jmp(&success);
     __ bind(miss);
-    if (IC::ICUseVector(kind())) {
+    if (IC::ShouldPushPopSlotAndVector(kind())) {
       DCHECK(kind() == Code::LOAD_IC);
       PopVectorAndSlot();
     }
@@ -547,7 +539,7 @@
     Label success;
     __ jmp(&success);
     GenerateRestoreName(miss, name);
-    if (IC::ICUseVector(kind())) PopVectorAndSlot();
+    DCHECK(!IC::ShouldPushPopSlotAndVector(kind()));
     TailCallBuiltin(masm(), MissBuiltin(kind()));
     __ bind(&success);
   }
@@ -641,13 +633,26 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  // Zap register aliases of the arguments passed on the stack to ensure they
+  // are properly loaded by the handler (debug-only).
+  STATIC_ASSERT(Descriptor::kPassLastArgsOnStack);
+  STATIC_ASSERT(Descriptor::kStackArgumentsCount == 3);
+  __ mov(Descriptor::ValueRegister(), Immediate(kDebugZapValue));
+  __ mov(Descriptor::SlotRegister(), Immediate(kDebugZapValue));
+  __ mov(Descriptor::VectorRegister(), Immediate(kDebugZapValue));
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
     LanguageMode language_mode) {
   Register holder_reg = Frontend(name);
+  __ LoadParameterFromStack<Descriptor>(value(), Descriptor::kValue);
 
   __ pop(scratch1());  // remove the return address
+  // Discard stack arguments.
+  __ add(esp, Immediate(StoreWithVectorDescriptor::kStackArgumentsCount *
+                        kPointerSize));
   __ push(receiver());
   __ push(holder_reg);
   // If the callback cannot leak, then push the callback directly,
@@ -679,7 +684,7 @@
 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
     Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
   Label miss;
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     PushVectorAndSlot();
   }
   FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
@@ -701,7 +706,7 @@
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->ic_named_load_global_stub(), 1);
   // The code above already loads the result into the return register.
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     DiscardVectorAndSlot();
   }
   __ ret(0);
diff --git a/src/ic/ia32/ic-compiler-ia32.cc b/src/ic/ia32/ic-compiler-ia32.cc
index d93b67b..a52f046 100644
--- a/src/ic/ia32/ic-compiler-ia32.cc
+++ b/src/ic/ia32/ic-compiler-ia32.cc
@@ -15,14 +15,21 @@
 
 void PropertyICCompiler::GenerateRuntimeSetProperty(
     MacroAssembler* masm, LanguageMode language_mode) {
-  // Return address is on the stack.
-  DCHECK(!ebx.is(StoreDescriptor::ReceiverRegister()) &&
-         !ebx.is(StoreDescriptor::NameRegister()) &&
-         !ebx.is(StoreDescriptor::ValueRegister()));
+  typedef StoreWithVectorDescriptor Descriptor;
+  STATIC_ASSERT(Descriptor::kStackArgumentsCount == 3);
+  // ----------- S t a t e -------------
+  //  -- esp[12] : value
+  //  -- esp[8]  : slot
+  //  -- esp[4]  : vector
+  //  -- esp[0]  : return address
+  // -----------------------------------
+  __ LoadParameterFromStack<Descriptor>(Descriptor::ValueRegister(),
+                                        Descriptor::kValue);
+
+  __ mov(Operand(esp, 12), Descriptor::ReceiverRegister());
+  __ mov(Operand(esp, 8), Descriptor::NameRegister());
+  __ mov(Operand(esp, 4), Descriptor::ValueRegister());
   __ pop(ebx);
-  __ push(StoreDescriptor::ReceiverRegister());
-  __ push(StoreDescriptor::NameRegister());
-  __ push(StoreDescriptor::ValueRegister());
   __ push(Immediate(Smi::FromInt(language_mode)));
   __ push(ebx);  // return address
 
diff --git a/src/ic/ia32/ic-ia32.cc b/src/ic/ia32/ic-ia32.cc
index 0550d92..b7496d4 100644
--- a/src/ic/ia32/ic-ia32.cc
+++ b/src/ic/ia32/ic-ia32.cc
@@ -409,7 +409,7 @@
   }
   // It's irrelevant whether array is smi-only or not when writing a smi.
   __ mov(FixedArrayElementOperand(ebx, key), value);
-  __ ret(0);
+  __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(&non_smi_value);
   // Escape to elements kind transition case.
@@ -428,7 +428,7 @@
   __ mov(edx, value);  // Preserve the value which is returned.
   __ RecordWriteArray(ebx, edx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
                       OMIT_SMI_CHECK);
-  __ ret(0);
+  __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(fast_double);
   if (check_map == kCheckMap) {
@@ -457,7 +457,7 @@
     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
            Immediate(Smi::FromInt(1)));
   }
-  __ ret(0);
+  __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(&transition_smi_elements);
   __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
@@ -504,12 +504,13 @@
 
 void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
                                        LanguageMode language_mode) {
+  typedef StoreWithVectorDescriptor Descriptor;
   // Return address is on the stack.
   Label slow, fast_object, fast_object_grow;
   Label fast_double, fast_double_grow;
   Label array, extra, check_if_double_array, maybe_name_key, miss;
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register key = StoreDescriptor::NameRegister();
+  Register receiver = Descriptor::ReceiverRegister();
+  Register key = Descriptor::NameRegister();
   DCHECK(receiver.is(edx));
   DCHECK(key.is(ecx));
 
@@ -522,6 +523,10 @@
   __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
             Immediate(1 << Map::kIsAccessCheckNeeded));
   __ j(not_zero, &slow);
+
+  __ LoadParameterFromStack<Descriptor>(Descriptor::ValueRegister(),
+                                        Descriptor::kValue);
+
   // Check that the key is a smi.
   __ JumpIfNotSmi(key, &maybe_name_key);
   __ CmpInstanceType(edi, JS_ARRAY_TYPE);
@@ -551,22 +556,9 @@
   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   __ JumpIfNotUniqueNameInstanceType(ebx, &slow);
 
-
-  // The handlers in the stub cache expect a vector and slot. Since we won't
-  // change the IC from any downstream misses, a dummy vector can be used.
-  Handle<TypeFeedbackVector> dummy_vector =
-      TypeFeedbackVector::DummyVector(masm->isolate());
-  int slot = dummy_vector->GetIndex(
-      FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
-  __ push(Immediate(Smi::FromInt(slot)));
-  __ push(Immediate(dummy_vector));
-
   masm->isolate()->store_stub_cache()->GenerateProbe(masm, receiver, key, edi,
                                                      no_reg);
 
-  __ pop(StoreWithVectorDescriptor::VectorRegister());
-  __ pop(StoreWithVectorDescriptor::SlotRegister());
-
   // Cache miss.
   __ jmp(&miss);
 
@@ -705,18 +697,21 @@
 }
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-  Register slot = StoreWithVectorDescriptor::SlotRegister();
-  Register vector = StoreWithVectorDescriptor::VectorRegister();
+  Register receiver = StoreWithVectorDescriptor::ReceiverRegister();
+  Register name = StoreWithVectorDescriptor::NameRegister();
 
-  __ xchg(receiver, Operand(esp, 0));
+  STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+  // Current stack layout:
+  // - esp[12]   -- value
+  // - esp[8]    -- slot
+  // - esp[4]    -- vector
+  // - esp[0]    -- return address
+
+  Register return_address = StoreWithVectorDescriptor::SlotRegister();
+  __ pop(return_address);
+  __ push(receiver);
   __ push(name);
-  __ push(value);
-  __ push(slot);
-  __ push(vector);
-  __ push(receiver);  // Contains the return address.
+  __ push(return_address);
 }
 
 
@@ -730,32 +725,33 @@
 
 
 void StoreIC::GenerateNormal(MacroAssembler* masm) {
+  typedef StoreWithVectorDescriptor Descriptor;
   Label restore_miss;
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-  Register vector = StoreWithVectorDescriptor::VectorRegister();
-  Register slot = StoreWithVectorDescriptor::SlotRegister();
+  Register receiver = Descriptor::ReceiverRegister();
+  Register name = Descriptor::NameRegister();
+  Register value = Descriptor::ValueRegister();
+  // Since the slot and vector values are passed on the stack we can use
+  // respective registers as scratch registers.
+  Register scratch1 = Descriptor::VectorRegister();
+  Register scratch2 = Descriptor::SlotRegister();
 
-  // A lot of registers are needed for storing to slow case
-  // objects. Push and restore receiver but rely on
-  // GenerateDictionaryStore preserving the value and name.
+  __ LoadParameterFromStack<Descriptor>(value, Descriptor::kValue);
+
+  // A lot of registers are needed for storing to slow case objects.
+  // Push and restore receiver but rely on GenerateDictionaryStore preserving
+  // the value and name.
   __ push(receiver);
-  __ push(vector);
-  __ push(slot);
 
-  Register dictionary = ebx;
+  Register dictionary = receiver;
   __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
   GenerateDictionaryStore(masm, &restore_miss, dictionary, name, value,
-                          receiver, edi);
-  __ Drop(3);
+                          scratch1, scratch2);
+  __ Drop(1);
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->ic_store_normal_hit(), 1);
-  __ ret(0);
+  __ ret(Descriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(&restore_miss);
-  __ pop(slot);
-  __ pop(vector);
   __ pop(receiver);
   __ IncrementCounter(counters->ic_store_normal_miss(), 1);
   GenerateMiss(masm);
@@ -770,6 +766,13 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  // Return address is on the stack.
+  StoreIC_PushArgs(masm);
+
+  // Do tail-call to runtime routine.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
 
 #undef __
 
diff --git a/src/ic/ia32/stub-cache-ia32.cc b/src/ic/ia32/stub-cache-ia32.cc
index 939e7fc..82700d3 100644
--- a/src/ic/ia32/stub-cache-ia32.cc
+++ b/src/ic/ia32/stub-cache-ia32.cc
@@ -22,8 +22,6 @@
   ExternalReference key_offset(stub_cache->key_reference(table));
   ExternalReference value_offset(stub_cache->value_reference(table));
   ExternalReference map_offset(stub_cache->map_reference(table));
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
 
   Label miss;
   Code::Kind ic_kind = stub_cache->ic_kind();
@@ -55,19 +53,15 @@
     }
 #endif
 
-    // The vector and slot were pushed onto the stack before starting the
-    // probe, and need to be dropped before calling the handler.
     if (is_vector_store) {
-      // The overlap here is rather embarrassing. One does what one must.
-      Register vector = StoreWithVectorDescriptor::VectorRegister();
+      // The value, vector and slot were passed to the IC on the stack and
+      // they are still there. So we can just jump to the handler.
       DCHECK(extra.is(StoreWithVectorDescriptor::SlotRegister()));
       __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
-      __ pop(vector);
-      __ mov(Operand::StaticVariable(virtual_register), extra);
-      __ pop(extra);  // Pop "slot".
-      // Jump to the first instruction in the code stub.
-      __ jmp(Operand::StaticVariable(virtual_register));
+      __ jmp(extra);
     } else {
+      // The vector and slot were pushed onto the stack before starting the
+      // probe, and need to be dropped before calling the handler.
       __ pop(LoadWithVectorDescriptor::VectorRegister());
       __ pop(LoadDescriptor::SlotRegister());
       __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -110,19 +104,10 @@
 
     // Jump to the first instruction in the code stub.
     if (is_vector_store) {
-      // The vector and slot were pushed onto the stack before starting the
-      // probe, and need to be dropped before calling the handler.
-      Register vector = StoreWithVectorDescriptor::VectorRegister();
       DCHECK(offset.is(StoreWithVectorDescriptor::SlotRegister()));
-      __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
-      __ mov(Operand::StaticVariable(virtual_register), offset);
-      __ pop(vector);
-      __ pop(offset);  // Pop "slot".
-      __ jmp(Operand::StaticVariable(virtual_register));
-    } else {
-      __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
-      __ jmp(offset);
     }
+    __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
+    __ jmp(offset);
 
     // Pop at miss.
     __ bind(&miss);
diff --git a/src/ic/ic-inl.h b/src/ic/ic-inl.h
index f77c40a..4fc8ada 100644
--- a/src/ic/ic-inl.h
+++ b/src/ic/ic-inl.h
@@ -7,7 +7,6 @@
 
 #include "src/ic/ic.h"
 
-#include "src/compiler.h"
 #include "src/debug/debug.h"
 #include "src/macro-assembler.h"
 #include "src/prototype.h"
diff --git a/src/ic/ic-state.cc b/src/ic/ic-state.cc
index d157c92..ea1f16c 100644
--- a/src/ic/ic-state.cc
+++ b/src/ic/ic-state.cc
@@ -189,15 +189,14 @@
 #undef GENERATE
 }
 
-
-Type* BinaryOpICState::GetResultType() const {
+AstType* BinaryOpICState::GetResultType() const {
   Kind result_kind = result_kind_;
   if (HasSideEffects()) {
     result_kind = NONE;
   } else if (result_kind == GENERIC && op_ == Token::ADD) {
-    return Type::NumberOrString();
+    return AstType::NumberOrString();
   } else if (result_kind == NUMBER && op_ == Token::SHR) {
-    return Type::Unsigned32();
+    return AstType::Unsigned32();
   }
   DCHECK_NE(GENERIC, result_kind);
   return KindToType(result_kind);
@@ -318,20 +317,20 @@
 
 
 // static
-Type* BinaryOpICState::KindToType(Kind kind) {
+AstType* BinaryOpICState::KindToType(Kind kind) {
   switch (kind) {
     case NONE:
-      return Type::None();
+      return AstType::None();
     case SMI:
-      return Type::SignedSmall();
+      return AstType::SignedSmall();
     case INT32:
-      return Type::Signed32();
+      return AstType::Signed32();
     case NUMBER:
-      return Type::Number();
+      return AstType::Number();
     case STRING:
-      return Type::String();
+      return AstType::String();
     case GENERIC:
-      return Type::Any();
+      return AstType::Any();
   }
   UNREACHABLE();
   return NULL;
@@ -365,29 +364,28 @@
   return NULL;
 }
 
-
-Type* CompareICState::StateToType(Zone* zone, State state, Handle<Map> map) {
+AstType* CompareICState::StateToType(Zone* zone, State state, Handle<Map> map) {
   switch (state) {
     case UNINITIALIZED:
-      return Type::None();
+      return AstType::None();
     case BOOLEAN:
-      return Type::Boolean();
+      return AstType::Boolean();
     case SMI:
-      return Type::SignedSmall();
+      return AstType::SignedSmall();
     case NUMBER:
-      return Type::Number();
+      return AstType::Number();
     case STRING:
-      return Type::String();
+      return AstType::String();
     case INTERNALIZED_STRING:
-      return Type::InternalizedString();
+      return AstType::InternalizedString();
     case UNIQUE_NAME:
-      return Type::UniqueName();
+      return AstType::UniqueName();
     case RECEIVER:
-      return Type::Receiver();
+      return AstType::Receiver();
     case KNOWN_RECEIVER:
-      return map.is_null() ? Type::Receiver() : Type::Class(map, zone);
+      return map.is_null() ? AstType::Receiver() : AstType::Class(map, zone);
     case GENERIC:
-      return Type::Any();
+      return AstType::Any();
   }
   UNREACHABLE();
   return NULL;
diff --git a/src/ic/ic-state.h b/src/ic/ic-state.h
index 6888a7a..38be57a 100644
--- a/src/ic/ic-state.h
+++ b/src/ic/ic-state.h
@@ -6,6 +6,7 @@
 #define V8_IC_STATE_H_
 
 #include "src/macro-assembler.h"
+#include "src/parsing/token.h"
 
 namespace v8 {
 namespace internal {
@@ -120,9 +121,9 @@
   Token::Value op() const { return op_; }
   Maybe<int> fixed_right_arg() const { return fixed_right_arg_; }
 
-  Type* GetLeftType() const { return KindToType(left_kind_); }
-  Type* GetRightType() const { return KindToType(right_kind_); }
-  Type* GetResultType() const;
+  AstType* GetLeftType() const { return KindToType(left_kind_); }
+  AstType* GetRightType() const { return KindToType(right_kind_); }
+  AstType* GetResultType() const;
 
   void Update(Handle<Object> left, Handle<Object> right, Handle<Object> result);
 
@@ -140,7 +141,7 @@
   Kind UpdateKind(Handle<Object> object, Kind kind) const;
 
   static const char* KindToString(Kind kind);
-  static Type* KindToType(Kind kind);
+  static AstType* KindToType(Kind kind);
   static bool KindMaybeSmi(Kind kind) {
     return (kind >= SMI && kind <= NUMBER) || kind == GENERIC;
   }
@@ -202,8 +203,8 @@
     GENERIC
   };
 
-  static Type* StateToType(Zone* zone, State state,
-                           Handle<Map> map = Handle<Map>());
+  static AstType* StateToType(Zone* zone, State state,
+                              Handle<Map> map = Handle<Map>());
 
   static State NewInputState(State old_state, Handle<Object> value);
 
diff --git a/src/ic/ic.cc b/src/ic/ic.cc
index b72791a..0e751bd 100644
--- a/src/ic/ic.cc
+++ b/src/ic/ic.cc
@@ -183,6 +183,19 @@
   extra_ic_state_ = target->extra_ic_state();
 }
 
+// The ICs that don't pass slot and vector through the stack have to
+// save/restore them in the dispatcher.
+bool IC::ShouldPushPopSlotAndVector(Code::Kind kind) {
+  if (kind == Code::LOAD_IC || kind == Code::LOAD_GLOBAL_IC ||
+      kind == Code::KEYED_LOAD_IC || kind == Code::CALL_IC) {
+    return true;
+  }
+  if (kind == Code::STORE_IC || kind == Code::KEYED_STORE_IC) {
+    return !StoreWithVectorDescriptor::kPassLastArgsOnStack;
+  }
+  return false;
+}
+
 InlineCacheState IC::StateFromCode(Code* code) {
   Isolate* isolate = code->GetIsolate();
   switch (code->kind()) {
@@ -231,13 +244,6 @@
   return code;
 }
 
-
-bool IC::AddressIsOptimizedCode() const {
-  Code* host =
-      isolate()->inner_pointer_to_code_cache()->GetCacheEntry(address())->code;
-  return host->kind() == Code::OPTIMIZED_FUNCTION;
-}
-
 static void LookupForRead(LookupIterator* it) {
   for (; it->IsFound(); it->Next()) {
     switch (it->state()) {
@@ -270,7 +276,7 @@
   }
 }
 
-bool IC::ShouldRecomputeHandler(Handle<Object> receiver, Handle<String> name) {
+bool IC::ShouldRecomputeHandler(Handle<String> name) {
   if (!RecomputeHandlerForName(name)) return false;
 
   DCHECK(UseVector());
@@ -320,7 +326,7 @@
   // Remove the target from the code cache if it became invalid
   // because of changes in the prototype chain to avoid hitting it
   // again.
-  if (ShouldRecomputeHandler(receiver, Handle<String>::cast(name))) {
+  if (ShouldRecomputeHandler(Handle<String>::cast(name))) {
     MarkRecomputeHandler(name);
   }
 }
@@ -728,7 +734,6 @@
 
   number_of_valid_maps++;
   if (number_of_valid_maps > 1 && is_keyed()) return false;
-  Handle<Code> ic;
   if (number_of_valid_maps == 1) {
     ConfigureVectorState(name, receiver_map(), code);
   } else {
@@ -1413,17 +1418,18 @@
                                Object);
   } else if (FLAG_use_ic && !object->IsAccessCheckNeeded() &&
              !object->IsJSValue()) {
-    if (object->IsJSObject() || (object->IsString() && key->IsNumber())) {
-      Handle<HeapObject> receiver = Handle<HeapObject>::cast(object);
-      if (object->IsString() || key->IsSmi()) UpdateLoadElement(receiver);
+    if ((object->IsJSObject() && key->IsSmi()) ||
+        (object->IsString() && key->IsNumber())) {
+      UpdateLoadElement(Handle<HeapObject>::cast(object));
+      TRACE_IC("LoadIC", key);
     }
   }
 
   if (!is_vector_set()) {
     ConfigureVectorState(MEGAMORPHIC, key);
     TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "set generic");
+    TRACE_IC("LoadIC", key);
   }
-  TRACE_IC("LoadIC", key);
 
   if (!load_handle.is_null()) return load_handle;
 
@@ -2237,7 +2243,8 @@
 RUNTIME_FUNCTION(Runtime_CallIC_Miss) {
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
-  DCHECK(args.length() == 3);
+  DCHECK_EQ(3, args.length());
+  // Runtime functions don't follow the IC's calling convention.
   Handle<Object> function = args.at<Object>(0);
   Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(1);
   Handle<Smi> slot = args.at<Smi>(2);
@@ -2253,9 +2260,9 @@
 RUNTIME_FUNCTION(Runtime_LoadIC_Miss) {
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
-  Handle<Object> receiver = args.at<Object>(0);
-
   DCHECK_EQ(4, args.length());
+  // Runtime functions don't follow the IC's calling convention.
+  Handle<Object> receiver = args.at<Object>(0);
   Handle<Smi> slot = args.at<Smi>(2);
   Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(3);
   FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
@@ -2294,6 +2301,7 @@
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
   DCHECK_EQ(2, args.length());
+  // Runtime functions don't follow the IC's calling convention.
   Handle<JSGlobalObject> global = isolate->global_object();
   Handle<Smi> slot = args.at<Smi>(0);
   Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(1);
@@ -2364,10 +2372,10 @@
 RUNTIME_FUNCTION(Runtime_KeyedLoadIC_Miss) {
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
+  DCHECK_EQ(4, args.length());
+  // Runtime functions don't follow the IC's calling convention.
   Handle<Object> receiver = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
-
-  DCHECK(args.length() == 4);
   Handle<Smi> slot = args.at<Smi>(2);
   Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(3);
   FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
@@ -2381,8 +2389,8 @@
 RUNTIME_FUNCTION(Runtime_KeyedLoadIC_MissFromStubFailure) {
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
-  DCHECK_EQ(4, args.length());
   typedef LoadWithVectorDescriptor Descriptor;
+  DCHECK_EQ(Descriptor::kParameterCount, args.length());
   Handle<Object> receiver = args.at<Object>(Descriptor::kReceiver);
   Handle<Object> key = args.at<Object>(Descriptor::kName);
   Handle<Smi> slot = args.at<Smi>(Descriptor::kSlot);
@@ -2400,13 +2408,13 @@
 RUNTIME_FUNCTION(Runtime_StoreIC_Miss) {
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
-  Handle<Object> receiver = args.at<Object>(0);
-  Handle<Name> key = args.at<Name>(1);
-  Handle<Object> value = args.at<Object>(2);
-
-  DCHECK(args.length() == 5 || args.length() == 6);
-  Handle<Smi> slot = args.at<Smi>(3);
-  Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(4);
+  DCHECK_EQ(5, args.length());
+  // Runtime functions don't follow the IC's calling convention.
+  Handle<Object> value = args.at<Object>(0);
+  Handle<Smi> slot = args.at<Smi>(1);
+  Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(2);
+  Handle<Object> receiver = args.at<Object>(3);
+  Handle<Name> key = args.at<Name>(4);
   FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
   if (vector->GetKind(vector_slot) == FeedbackVectorSlotKind::STORE_IC) {
     StoreICNexus nexus(vector, vector_slot);
@@ -2424,88 +2432,17 @@
 }
 
 
-RUNTIME_FUNCTION(Runtime_StoreIC_MissFromStubFailure) {
-  TimerEventScope<TimerEventIcMiss> timer(isolate);
-  HandleScope scope(isolate);
-  DCHECK_EQ(5, args.length());
-  typedef StoreWithVectorDescriptor Descriptor;
-  Handle<Object> receiver = args.at<Object>(Descriptor::kReceiver);
-  Handle<Name> key = args.at<Name>(Descriptor::kName);
-  Handle<Object> value = args.at<Object>(Descriptor::kValue);
-  Handle<Smi> slot = args.at<Smi>(Descriptor::kSlot);
-  Handle<TypeFeedbackVector> vector =
-      args.at<TypeFeedbackVector>(Descriptor::kVector);
-
-  FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
-  if (vector->GetKind(vector_slot) == FeedbackVectorSlotKind::STORE_IC) {
-    StoreICNexus nexus(vector, vector_slot);
-    StoreIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
-    ic.UpdateState(receiver, key);
-    RETURN_RESULT_OR_FAILURE(isolate, ic.Store(receiver, key, value));
-  } else {
-    DCHECK_EQ(FeedbackVectorSlotKind::KEYED_STORE_IC,
-              vector->GetKind(vector_slot));
-    KeyedStoreICNexus nexus(vector, vector_slot);
-    KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
-    ic.UpdateState(receiver, key);
-    RETURN_RESULT_OR_FAILURE(isolate, ic.Store(receiver, key, value));
-  }
-}
-
-RUNTIME_FUNCTION(Runtime_TransitionStoreIC_MissFromStubFailure) {
-  TimerEventScope<TimerEventIcMiss> timer(isolate);
-  HandleScope scope(isolate);
-  Handle<Object> receiver = args.at<Object>(0);
-  Handle<Name> key = args.at<Name>(1);
-  Handle<Object> value = args.at<Object>(2);
-
-  int length = args.length();
-  DCHECK(length == 5 || length == 6);
-  // TODO(ishell): use VectorStoreTransitionDescriptor indices here and update
-  // this comment:
-  //
-  // We might have slot and vector, for a normal miss (slot(3), vector(4)).
-  // Or, map and vector for a transitioning store miss (map(3), vector(4)).
-  // In this case, we need to recover the slot from a virtual register.
-  // If length == 6, then a map is included (map(3), slot(4), vector(5)).
-  Handle<Smi> slot;
-  Handle<TypeFeedbackVector> vector;
-  if (length == 5) {
-    vector = args.at<TypeFeedbackVector>(4);
-    slot = handle(
-        *reinterpret_cast<Smi**>(isolate->virtual_slot_register_address()),
-        isolate);
-  } else {
-    vector = args.at<TypeFeedbackVector>(5);
-    slot = args.at<Smi>(4);
-  }
-
-  FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
-  if (vector->GetKind(vector_slot) == FeedbackVectorSlotKind::STORE_IC) {
-    StoreICNexus nexus(vector, vector_slot);
-    StoreIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
-    ic.UpdateState(receiver, key);
-    RETURN_RESULT_OR_FAILURE(isolate, ic.Store(receiver, key, value));
-  } else {
-    DCHECK_EQ(FeedbackVectorSlotKind::KEYED_STORE_IC,
-              vector->GetKind(vector_slot));
-    KeyedStoreICNexus nexus(vector, vector_slot);
-    KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
-    ic.UpdateState(receiver, key);
-    RETURN_RESULT_OR_FAILURE(isolate, ic.Store(receiver, key, value));
-  }
-}
-
 // Used from ic-<arch>.cc.
 RUNTIME_FUNCTION(Runtime_KeyedStoreIC_Miss) {
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
   DCHECK_EQ(5, args.length());
-  Handle<Object> receiver = args.at<Object>(0);
-  Handle<Object> key = args.at<Object>(1);
-  Handle<Object> value = args.at<Object>(2);
-  Handle<Smi> slot = args.at<Smi>(3);
-  Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(4);
+  // Runtime functions don't follow the IC's calling convention.
+  Handle<Object> value = args.at<Object>(0);
+  Handle<Smi> slot = args.at<Smi>(1);
+  Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(2);
+  Handle<Object> receiver = args.at<Object>(3);
+  Handle<Object> key = args.at<Object>(4);
   FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
   KeyedStoreICNexus nexus(vector, vector_slot);
   KeyedStoreIC ic(IC::NO_EXTRA_FRAME, isolate, &nexus);
@@ -2514,31 +2451,14 @@
 }
 
 
-RUNTIME_FUNCTION(Runtime_KeyedStoreIC_MissFromStubFailure) {
-  TimerEventScope<TimerEventIcMiss> timer(isolate);
-  HandleScope scope(isolate);
-  DCHECK_EQ(5, args.length());
-  typedef StoreWithVectorDescriptor Descriptor;
-  Handle<Object> receiver = args.at<Object>(Descriptor::kReceiver);
-  Handle<Object> key = args.at<Object>(Descriptor::kName);
-  Handle<Object> value = args.at<Object>(Descriptor::kValue);
-  Handle<Smi> slot = args.at<Smi>(Descriptor::kSlot);
-  Handle<TypeFeedbackVector> vector =
-      args.at<TypeFeedbackVector>(Descriptor::kVector);
-  FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
-  KeyedStoreICNexus nexus(vector, vector_slot);
-  KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
-  ic.UpdateState(receiver, key);
-  RETURN_RESULT_OR_FAILURE(isolate, ic.Store(receiver, key, value));
-}
-
-
 RUNTIME_FUNCTION(Runtime_KeyedStoreIC_Slow) {
   HandleScope scope(isolate);
   DCHECK_EQ(5, args.length());
-  Handle<Object> object = args.at<Object>(0);
-  Handle<Object> key = args.at<Object>(1);
-  Handle<Object> value = args.at<Object>(2);
+  // Runtime functions don't follow the IC's calling convention.
+  Handle<Object> value = args.at<Object>(0);
+  // slot and vector parameters are not used.
+  Handle<Object> object = args.at<Object>(3);
+  Handle<Object> key = args.at<Object>(4);
   LanguageMode language_mode;
   KeyedStoreICNexus nexus(isolate);
   KeyedStoreIC ic(IC::NO_EXTRA_FRAME, isolate, &nexus);
@@ -2552,16 +2472,14 @@
 RUNTIME_FUNCTION(Runtime_ElementsTransitionAndStoreIC_Miss) {
   TimerEventScope<TimerEventIcMiss> timer(isolate);
   HandleScope scope(isolate);
-  // Length == 5 or 6, depending on whether the vector slot
-  // is passed in a virtual register or not.
-  DCHECK(args.length() == 5 || args.length() == 6);
+  // Runtime functions don't follow the IC's calling convention.
   Handle<Object> object = args.at<Object>(0);
   Handle<Object> key = args.at<Object>(1);
   Handle<Object> value = args.at<Object>(2);
   Handle<Map> map = args.at<Map>(3);
   LanguageMode language_mode;
   KeyedStoreICNexus nexus(isolate);
-  KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
+  KeyedStoreIC ic(IC::NO_EXTRA_FRAME, isolate, &nexus);
   language_mode = ic.language_mode();
   if (object->IsJSObject()) {
     JSObject::TransitionElementsKind(Handle<JSObject>::cast(object),
@@ -3000,35 +2918,5 @@
 
   return *result;
 }
-
-
-RUNTIME_FUNCTION(Runtime_LoadIC_MissFromStubFailure) {
-  TimerEventScope<TimerEventIcMiss> timer(isolate);
-  HandleScope scope(isolate);
-  DCHECK_EQ(4, args.length());
-  typedef LoadWithVectorDescriptor Descriptor;
-  Handle<Object> receiver = args.at<Object>(Descriptor::kReceiver);
-  Handle<Name> key = args.at<Name>(Descriptor::kName);
-  Handle<Smi> slot = args.at<Smi>(Descriptor::kSlot);
-  Handle<TypeFeedbackVector> vector =
-      args.at<TypeFeedbackVector>(Descriptor::kVector);
-  FeedbackVectorSlot vector_slot = vector->ToSlot(slot->value());
-  // A monomorphic or polymorphic KeyedLoadIC with a string key can call the
-  // LoadIC miss handler if the handler misses. Since the vector Nexus is
-  // set up outside the IC, handle that here.
-  if (vector->GetKind(vector_slot) == FeedbackVectorSlotKind::LOAD_IC) {
-    LoadICNexus nexus(vector, vector_slot);
-    LoadIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
-    ic.UpdateState(receiver, key);
-    RETURN_RESULT_OR_FAILURE(isolate, ic.Load(receiver, key));
-  } else {
-    DCHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC,
-              vector->GetKind(vector_slot));
-    KeyedLoadICNexus nexus(vector, vector_slot);
-    KeyedLoadIC ic(IC::EXTRA_CALL_FRAME, isolate, &nexus);
-    ic.UpdateState(receiver, key);
-    RETURN_RESULT_OR_FAILURE(isolate, ic.Load(receiver, key));
-  }
-}
 }  // namespace internal
 }  // namespace v8
diff --git a/src/ic/ic.h b/src/ic/ic.h
index 35f3844..bf395f1 100644
--- a/src/ic/ic.h
+++ b/src/ic/ic.h
@@ -75,6 +75,10 @@
            kind == Code::STORE_IC || kind == Code::KEYED_STORE_IC;
   }
 
+  // The ICs that don't pass slot and vector through the stack have to
+  // save/restore them in the dispatcher.
+  static bool ShouldPushPopSlotAndVector(Code::Kind kind);
+
   static InlineCacheState StateFromCode(Code* code);
 
  protected:
@@ -87,7 +91,6 @@
   // Get the code object of the caller.
   Code* GetCode() const;
 
-  bool AddressIsOptimizedCode() const;
   inline bool AddressIsDeoptimizedCode() const;
   inline static bool AddressIsDeoptimizedCode(Isolate* isolate,
                                               Address address);
@@ -168,7 +171,7 @@
            kind_ == Code::KEYED_STORE_IC);
     return kind_;
   }
-  bool ShouldRecomputeHandler(Handle<Object> receiver, Handle<String> name);
+  bool ShouldRecomputeHandler(Handle<String> name);
 
   ExtraICState extra_ic_state() const { return extra_ic_state_; }
 
diff --git a/src/ic/mips/handler-compiler-mips.cc b/src/ic/mips/handler-compiler-mips.cc
index f4e0f0b..df7a0df 100644
--- a/src/ic/mips/handler-compiler-mips.cc
+++ b/src/ic/mips/handler-compiler-mips.cc
@@ -107,13 +107,19 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Push(vector, slot);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
+  __ Push(slot, vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Pop(vector, slot);
+  __ Pop(slot, vector);
 }
 
 
@@ -123,6 +129,13 @@
   __ Addu(sp, sp, Operand(2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  // No-op. Return address is in ra register.
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  // No-op. Return address is in ra register.
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -317,24 +330,6 @@
   __ TailCallStub(&stub);
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
-          StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  StoreIC_PushArgs(masm);
-
-  // The slow case calls into the runtime to complete the store without causing
-  // an IC miss that would otherwise cause a transition to the generic stub.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -353,12 +348,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(false);  // Not implemented.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -615,6 +604,9 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
@@ -675,7 +667,7 @@
     DiscardVectorAndSlot();
   }
   __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, result);
+  __ Move(v0, result);  // Ensure the stub returns correct value.
 
   FrontendFooter(name, &miss);
 
diff --git a/src/ic/mips/ic-mips.cc b/src/ic/mips/ic-mips.cc
index 3a28b13..ce9e3d9 100644
--- a/src/ic/mips/ic-mips.cc
+++ b/src/ic/mips/ic-mips.cc
@@ -494,7 +494,8 @@
   __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
   __ Lsa(address, address, key, kPointerSizeLog2 - kSmiTagSize);
   __ sw(value, MemOperand(address));
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(&non_smi_value);
   // Escape to elements kind transition case.
@@ -514,7 +515,8 @@
   __ mov(scratch, value);  // Preserve the value which is returned.
   __ RecordWrite(elements, address, scratch, kRAHasNotBeenSaved,
                  kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(fast_double);
   if (check_map == kCheckMap) {
@@ -543,7 +545,8 @@
     __ Addu(scratch, key, Operand(Smi::FromInt(1)));
     __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   }
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(&transition_smi_elements);
   // Transition the array appropriately depending on the value type.
@@ -710,10 +713,11 @@
 
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
+  __ Push(StoreWithVectorDescriptor::ValueRegister(),
           StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
+          StoreWithVectorDescriptor::VectorRegister(),
+          StoreWithVectorDescriptor::ReceiverRegister(),
+          StoreWithVectorDescriptor::NameRegister());
 }
 
 
@@ -723,6 +727,14 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  StoreIC_PushArgs(masm);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
+
 void StoreIC::GenerateMiss(MacroAssembler* masm) {
   StoreIC_PushArgs(masm);
 
@@ -748,7 +760,8 @@
   GenerateDictionaryStore(masm, &miss, dictionary, name, value, t2, t5);
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->ic_store_normal_hit(), 1, t2, t5);
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(&miss);
   __ IncrementCounter(counters->ic_store_normal_miss(), 1, t2, t5);
diff --git a/src/ic/mips64/handler-compiler-mips64.cc b/src/ic/mips64/handler-compiler-mips64.cc
index 53b097f..2190f6d 100644
--- a/src/ic/mips64/handler-compiler-mips64.cc
+++ b/src/ic/mips64/handler-compiler-mips64.cc
@@ -107,13 +107,19 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Push(vector, slot);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
+  __ Push(slot, vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Pop(vector, slot);
+  __ Pop(slot, vector);
 }
 
 
@@ -123,6 +129,13 @@
   __ Daddu(sp, sp, Operand(2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  // No-op. Return address is in ra register.
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  // No-op. Return address is in ra register.
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -317,24 +330,6 @@
   __ TailCallStub(&stub);
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
-          StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  StoreIC_PushArgs(masm);
-
-  // The slow case calls into the runtime to complete the store without causing
-  // an IC miss that would otherwise cause a transition to the generic stub.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -353,12 +348,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(false);  // Not implemented.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -615,6 +604,9 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
@@ -675,7 +667,7 @@
     DiscardVectorAndSlot();
   }
   __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, result);
+  __ Move(v0, result);  // Ensure the stub returns correct value.
 
   FrontendFooter(name, &miss);
 
diff --git a/src/ic/mips64/ic-mips64.cc b/src/ic/mips64/ic-mips64.cc
index b551bc7..c2f3cb6 100644
--- a/src/ic/mips64/ic-mips64.cc
+++ b/src/ic/mips64/ic-mips64.cc
@@ -496,7 +496,8 @@
   __ SmiScale(scratch, key, kPointerSizeLog2);
   __ Daddu(address, address, scratch);
   __ sd(value, MemOperand(address));
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(&non_smi_value);
   // Escape to elements kind transition case.
@@ -518,7 +519,8 @@
   __ mov(scratch, value);  // Preserve the value which is returned.
   __ RecordWrite(elements, address, scratch, kRAHasNotBeenSaved,
                  kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(fast_double);
   if (check_map == kCheckMap) {
@@ -549,7 +551,8 @@
     __ Daddu(scratch, key, Operand(Smi::FromInt(1)));
     __ sd(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
   }
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(&transition_smi_elements);
   // Transition the array appropriately depending on the value type.
@@ -714,10 +717,11 @@
 
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
+  __ Push(StoreWithVectorDescriptor::ValueRegister(),
           StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
+          StoreWithVectorDescriptor::VectorRegister(),
+          StoreWithVectorDescriptor::ReceiverRegister(),
+          StoreWithVectorDescriptor::NameRegister());
 }
 
 
@@ -727,6 +731,14 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  StoreIC_PushArgs(masm);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
+
 void StoreIC::GenerateMiss(MacroAssembler* masm) {
   StoreIC_PushArgs(masm);
 
@@ -750,7 +762,8 @@
   GenerateDictionaryStore(masm, &miss, dictionary, name, value, a6, a7);
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->ic_store_normal_hit(), 1, a6, a7);
-  __ Ret();
+  __ Ret(USE_DELAY_SLOT);
+  __ Move(v0, value);  // Ensure the stub returns correct value.
 
   __ bind(&miss);
   __ IncrementCounter(counters->ic_store_normal_miss(), 1, a6, a7);
diff --git a/src/ic/ppc/handler-compiler-ppc.cc b/src/ic/ppc/handler-compiler-ppc.cc
index 22c0608..aafdc77 100644
--- a/src/ic/ppc/handler-compiler-ppc.cc
+++ b/src/ic/ppc/handler-compiler-ppc.cc
@@ -108,13 +108,19 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Push(vector, slot);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
+  __ Push(slot, vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Pop(vector, slot);
+  __ Pop(slot, vector);
 }
 
 
@@ -124,6 +130,13 @@
   __ addi(sp, sp, Operand(2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -325,24 +338,6 @@
   __ TailCallStub(&stub);
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
-          StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  StoreIC_PushArgs(masm);
-
-  // The slow case calls into the runtime to complete the store without causing
-  // an IC miss that would otherwise cause a transition to the generic stub.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -361,12 +356,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(false);  // Not implemented.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -624,6 +613,9 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
diff --git a/src/ic/ppc/ic-ppc.cc b/src/ic/ppc/ic-ppc.cc
index fd2962d..6dd7881 100644
--- a/src/ic/ppc/ic-ppc.cc
+++ b/src/ic/ppc/ic-ppc.cc
@@ -451,10 +451,11 @@
 
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
+  __ Push(StoreWithVectorDescriptor::ValueRegister(),
           StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
+          StoreWithVectorDescriptor::VectorRegister(),
+          StoreWithVectorDescriptor::ReceiverRegister(),
+          StoreWithVectorDescriptor::NameRegister());
 }
 
 
@@ -464,6 +465,13 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  StoreIC_PushArgs(masm);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
 
 static void KeyedStoreGenerateMegamorphicHelper(
     MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
diff --git a/src/ic/s390/handler-compiler-s390.cc b/src/ic/s390/handler-compiler-s390.cc
index b399c5a..504bace 100644
--- a/src/ic/s390/handler-compiler-s390.cc
+++ b/src/ic/s390/handler-compiler-s390.cc
@@ -105,12 +105,18 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Push(vector, slot);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
+  __ Push(slot, vector);
 }
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Pop(vector, slot);
+  __ Pop(slot, vector);
 }
 
 void PropertyHandlerCompiler::DiscardVectorAndSlot() {
@@ -119,6 +125,14 @@
   __ la(sp, MemOperand(sp, 2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  // No-op. Return address is in lr register.
+}
+
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
     Handle<Name> name, Register scratch0, Register scratch1) {
@@ -310,21 +324,6 @@
   __ TailCallStub(&stub);
 }
 
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
-          StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
-}
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  StoreIC_PushArgs(masm);
-
-  // The slow case calls into the runtime to complete the store without causing
-  // an IC miss that would otherwise cause a transition to the generic stub.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -340,11 +339,6 @@
   __ mov(this->name(), Operand(name));
 }
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(false);  // Not implemented.
-}
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -593,6 +587,10 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
+}
+
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
     LanguageMode language_mode) {
diff --git a/src/ic/s390/ic-s390.cc b/src/ic/s390/ic-s390.cc
index 6bb484a..08eb3e4 100644
--- a/src/ic/s390/ic-s390.cc
+++ b/src/ic/s390/ic-s390.cc
@@ -437,10 +437,11 @@
 }
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
-          StoreDescriptor::ValueRegister(),
+  __ Push(StoreWithVectorDescriptor::ValueRegister(),
           StoreWithVectorDescriptor::SlotRegister(),
-          StoreWithVectorDescriptor::VectorRegister());
+          StoreWithVectorDescriptor::VectorRegister(),
+          StoreWithVectorDescriptor::ReceiverRegister(),
+          StoreWithVectorDescriptor::NameRegister());
 }
 
 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
@@ -449,6 +450,14 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  StoreIC_PushArgs(masm);
+
+  // The slow case calls into the runtime to complete the store without causing
+  // an IC miss that would otherwise cause a transition to the generic stub.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
+
 static void KeyedStoreGenerateMegamorphicHelper(
     MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
     KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length,
diff --git a/src/ic/stub-cache.cc b/src/ic/stub-cache.cc
index 31d7e2e..fe1adaa 100644
--- a/src/ic/stub-cache.cc
+++ b/src/ic/stub-cache.cc
@@ -4,6 +4,7 @@
 
 #include "src/ic/stub-cache.h"
 
+#include "src/ast/ast.h"
 #include "src/base/bits.h"
 #include "src/type-info.h"
 
diff --git a/src/ic/stub-cache.h b/src/ic/stub-cache.h
index a053555..ebcff44 100644
--- a/src/ic/stub-cache.h
+++ b/src/ic/stub-cache.h
@@ -10,6 +10,7 @@
 namespace v8 {
 namespace internal {
 
+class SmallMapList;
 
 // The stub cache is used for megamorphic property accesses.
 // It maps (map, name, type) to property access handlers. The cache does not
diff --git a/src/ic/x64/handler-compiler-x64.cc b/src/ic/x64/handler-compiler-x64.cc
index ba4daed..f386fc5 100644
--- a/src/ic/x64/handler-compiler-x64.cc
+++ b/src/ic/x64/handler-compiler-x64.cc
@@ -20,15 +20,21 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Push(vector);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
   __ Push(slot);
+  __ Push(vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ Pop(slot);
   __ Pop(vector);
+  __ Pop(slot);
 }
 
 
@@ -38,6 +44,15 @@
   __ addp(rsp, Immediate(2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  MacroAssembler* masm = this->masm();
+  __ Push(tmp);
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  MacroAssembler* masm = this->masm();
+  __ Pop(tmp);
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -321,34 +336,6 @@
   __ ret(0);
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-
-  Register slot = StoreWithVectorDescriptor::SlotRegister();
-  Register vector = StoreWithVectorDescriptor::VectorRegister();
-
-  __ PopReturnAddressTo(r11);
-  __ Push(receiver);
-  __ Push(name);
-  __ Push(value);
-  __ Push(slot);
-  __ Push(vector);
-  __ PushReturnAddressFrom(r11);
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  // Return address is on the stack.
-  StoreIC_PushArgs(masm);
-
-  // Do tail-call to runtime routine.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM((masm()))
 
@@ -367,12 +354,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(false);  // Not implemented.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -638,6 +619,9 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  STATIC_ASSERT(!StoreWithVectorDescriptor::kPassLastArgsOnStack);
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
diff --git a/src/ic/x64/ic-x64.cc b/src/ic/x64/ic-x64.cc
index 21a1148..d0445a2 100644
--- a/src/ic/x64/ic-x64.cc
+++ b/src/ic/x64/ic-x64.cc
@@ -706,21 +706,20 @@
 }
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-  Register temp = r11;
-  DCHECK(!temp.is(receiver) && !temp.is(name) && !temp.is(value));
-
-  __ PopReturnAddressTo(temp);
-  __ Push(receiver);
-  __ Push(name);
-  __ Push(value);
+  Register receiver = StoreWithVectorDescriptor::ReceiverRegister();
+  Register name = StoreWithVectorDescriptor::NameRegister();
+  Register value = StoreWithVectorDescriptor::ValueRegister();
   Register slot = StoreWithVectorDescriptor::SlotRegister();
   Register vector = StoreWithVectorDescriptor::VectorRegister();
-  DCHECK(!temp.is(slot) && !temp.is(vector));
+  Register temp = r11;
+  DCHECK(!AreAliased(receiver, name, value, slot, vector, temp));
+
+  __ PopReturnAddressTo(temp);
+  __ Push(value);
   __ Push(slot);
   __ Push(vector);
+  __ Push(receiver);
+  __ Push(name);
   __ PushReturnAddressFrom(temp);
 }
 
@@ -764,6 +763,13 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  // Return address is on the stack.
+  StoreIC_PushArgs(masm);
+
+  // Do tail-call to runtime routine.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
 
 #undef __
 
diff --git a/src/ic/x87/handler-compiler-x87.cc b/src/ic/x87/handler-compiler-x87.cc
index 4bf0af2..5eca3dc 100644
--- a/src/ic/x87/handler-compiler-x87.cc
+++ b/src/ic/x87/handler-compiler-x87.cc
@@ -59,15 +59,21 @@
 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
                                                 Register slot) {
   MacroAssembler* masm = this->masm();
-  __ push(vector);
+  STATIC_ASSERT(LoadWithVectorDescriptor::kSlot <
+                LoadWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kSlot <
+                StoreWithVectorDescriptor::kVector);
+  STATIC_ASSERT(StoreTransitionDescriptor::kSlot <
+                StoreTransitionDescriptor::kVector);
   __ push(slot);
+  __ push(vector);
 }
 
 
 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
   MacroAssembler* masm = this->masm();
-  __ pop(slot);
   __ pop(vector);
+  __ pop(slot);
 }
 
 
@@ -77,6 +83,15 @@
   __ add(esp, Immediate(2 * kPointerSize));
 }
 
+void PropertyHandlerCompiler::PushReturnAddress(Register tmp) {
+  MacroAssembler* masm = this->masm();
+  __ push(tmp);
+}
+
+void PropertyHandlerCompiler::PopReturnAddress(Register tmp) {
+  MacroAssembler* masm = this->masm();
+  __ pop(tmp);
+}
 
 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
     MacroAssembler* masm, Label* miss_label, Register receiver,
@@ -150,12 +165,16 @@
   DCHECK(!accessor_holder.is(scratch));
   // Copy return value.
   __ pop(scratch);
-  // receiver
-  __ push(receiver);
-  // Write the arguments to stack frame.
+
   if (is_store) {
-    DCHECK(!receiver.is(store_parameter));
-    DCHECK(!scratch.is(store_parameter));
+    // Discard stack arguments.
+    __ add(esp, Immediate(StoreWithVectorDescriptor::kStackArgumentsCount *
+                          kPointerSize));
+  }
+  // Write the receiver and arguments to stack frame.
+  __ push(receiver);
+  if (is_store) {
+    DCHECK(!AreAliased(receiver, scratch, store_parameter));
     __ push(store_parameter);
   }
   __ push(scratch);
@@ -252,8 +271,13 @@
     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
     int accessor_index, int expected_arguments, Register scratch) {
   // ----------- S t a t e -------------
-  //  -- esp[0] : return address
+  //  -- esp[12] : value
+  //  -- esp[8]  : slot
+  //  -- esp[4]  : vector
+  //  -- esp[0]  : return address
   // -----------------------------------
+  __ LoadParameterFromStack<Descriptor>(value(), Descriptor::kValue);
+
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
 
@@ -290,7 +314,14 @@
     // Restore context register.
     __ pop(esi);
   }
-  __ ret(0);
+  if (accessor_index >= 0) {
+    __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
+  } else {
+    // If we generate a global code snippet for deoptimization only, don't try
+    // to drop stack arguments for the StoreIC because they are not a part of
+    // expression stack and deoptimizer does not reconstruct them.
+    __ ret(0);
+  }
 }
 
 
@@ -316,32 +347,6 @@
   __ CallRuntime(id);
 }
 
-
-static void StoreIC_PushArgs(MacroAssembler* masm) {
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-  Register slot = StoreWithVectorDescriptor::SlotRegister();
-  Register vector = StoreWithVectorDescriptor::VectorRegister();
-
-  __ xchg(receiver, Operand(esp, 0));
-  __ push(name);
-  __ push(value);
-  __ push(slot);
-  __ push(vector);
-  __ push(receiver);  // which contains the return address.
-}
-
-
-void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
-  // Return address is on the stack.
-  StoreIC_PushArgs(masm);
-
-  // Do tail-call to runtime routine.
-  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
-}
-
-
 #undef __
 #define __ ACCESS_MASM(masm())
 
@@ -360,19 +365,6 @@
 }
 
 
-void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
-    Register current_map, Register destination_map) {
-  DCHECK(destination_map.is(StoreTransitionHelper::MapRegister()));
-  DCHECK(current_map.is(StoreTransitionHelper::VectorRegister()));
-  ExternalReference virtual_slot =
-      ExternalReference::virtual_slot_register(isolate());
-  __ mov(destination_map, current_map);
-  __ pop(current_map);
-  __ mov(Operand::StaticVariable(virtual_slot), current_map);
-  __ pop(current_map);  // put vector in place.
-}
-
-
 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
                                                    Register map_reg,
                                                    Register scratch,
@@ -532,7 +524,7 @@
     Label success;
     __ jmp(&success);
     __ bind(miss);
-    if (IC::ICUseVector(kind())) {
+    if (IC::ShouldPushPopSlotAndVector(kind())) {
       DCHECK(kind() == Code::LOAD_IC);
       PopVectorAndSlot();
     }
@@ -547,7 +539,7 @@
     Label success;
     __ jmp(&success);
     GenerateRestoreName(miss, name);
-    if (IC::ICUseVector(kind())) PopVectorAndSlot();
+    DCHECK(!IC::ShouldPushPopSlotAndVector(kind()));
     TailCallBuiltin(masm(), MissBuiltin(kind()));
     __ bind(&success);
   }
@@ -641,13 +633,26 @@
   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
 }
 
+void NamedStoreHandlerCompiler::ZapStackArgumentsRegisterAliases() {
+  // Zap register aliases of the arguments passed on the stack to ensure they
+  // are properly loaded by the handler (debug-only).
+  STATIC_ASSERT(Descriptor::kPassLastArgsOnStack);
+  STATIC_ASSERT(Descriptor::kStackArgumentsCount == 3);
+  __ mov(Descriptor::ValueRegister(), Immediate(kDebugZapValue));
+  __ mov(Descriptor::SlotRegister(), Immediate(kDebugZapValue));
+  __ mov(Descriptor::VectorRegister(), Immediate(kDebugZapValue));
+}
 
 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
     LanguageMode language_mode) {
   Register holder_reg = Frontend(name);
+  __ LoadParameterFromStack<Descriptor>(value(), Descriptor::kValue);
 
   __ pop(scratch1());  // remove the return address
+  // Discard stack arguments.
+  __ add(esp, Immediate(StoreWithVectorDescriptor::kStackArgumentsCount *
+                        kPointerSize));
   __ push(receiver());
   __ push(holder_reg);
   // If the callback cannot leak, then push the callback directly,
@@ -679,7 +684,7 @@
 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
     Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
   Label miss;
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     PushVectorAndSlot();
   }
   FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
@@ -701,7 +706,7 @@
   Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->ic_named_load_global_stub(), 1);
   // The code above already loads the result into the return register.
-  if (IC::ICUseVector(kind())) {
+  if (IC::ShouldPushPopSlotAndVector(kind())) {
     DiscardVectorAndSlot();
   }
   __ ret(0);
diff --git a/src/ic/x87/ic-compiler-x87.cc b/src/ic/x87/ic-compiler-x87.cc
index 9edf63b..11a8cdc 100644
--- a/src/ic/x87/ic-compiler-x87.cc
+++ b/src/ic/x87/ic-compiler-x87.cc
@@ -15,14 +15,21 @@
 
 void PropertyICCompiler::GenerateRuntimeSetProperty(
     MacroAssembler* masm, LanguageMode language_mode) {
-  // Return address is on the stack.
-  DCHECK(!ebx.is(StoreDescriptor::ReceiverRegister()) &&
-         !ebx.is(StoreDescriptor::NameRegister()) &&
-         !ebx.is(StoreDescriptor::ValueRegister()));
+  typedef StoreWithVectorDescriptor Descriptor;
+  STATIC_ASSERT(Descriptor::kStackArgumentsCount == 3);
+  // ----------- S t a t e -------------
+  //  -- esp[12] : value
+  //  -- esp[8]  : slot
+  //  -- esp[4]  : vector
+  //  -- esp[0]  : return address
+  // -----------------------------------
+  __ LoadParameterFromStack<Descriptor>(Descriptor::ValueRegister(),
+                                        Descriptor::kValue);
+
+  __ mov(Operand(esp, 12), Descriptor::ReceiverRegister());
+  __ mov(Operand(esp, 8), Descriptor::NameRegister());
+  __ mov(Operand(esp, 4), Descriptor::ValueRegister());
   __ pop(ebx);
-  __ push(StoreDescriptor::ReceiverRegister());
-  __ push(StoreDescriptor::NameRegister());
-  __ push(StoreDescriptor::ValueRegister());
   __ push(Immediate(Smi::FromInt(language_mode)));
   __ push(ebx);  // return address
 
diff --git a/src/ic/x87/ic-x87.cc b/src/ic/x87/ic-x87.cc
index 76933f0..baf435e 100644
--- a/src/ic/x87/ic-x87.cc
+++ b/src/ic/x87/ic-x87.cc
@@ -409,7 +409,7 @@
   }
   // It's irrelevant whether array is smi-only or not when writing a smi.
   __ mov(FixedArrayElementOperand(ebx, key), value);
-  __ ret(0);
+  __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(&non_smi_value);
   // Escape to elements kind transition case.
@@ -428,7 +428,7 @@
   __ mov(edx, value);  // Preserve the value which is returned.
   __ RecordWriteArray(ebx, edx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
                       OMIT_SMI_CHECK);
-  __ ret(0);
+  __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(fast_double);
   if (check_map == kCheckMap) {
@@ -457,7 +457,7 @@
     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
            Immediate(Smi::FromInt(1)));
   }
-  __ ret(0);
+  __ ret(StoreWithVectorDescriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(&transition_smi_elements);
   __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
@@ -504,12 +504,13 @@
 
 void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
                                        LanguageMode language_mode) {
+  typedef StoreWithVectorDescriptor Descriptor;
   // Return address is on the stack.
   Label slow, fast_object, fast_object_grow;
   Label fast_double, fast_double_grow;
   Label array, extra, check_if_double_array, maybe_name_key, miss;
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register key = StoreDescriptor::NameRegister();
+  Register receiver = Descriptor::ReceiverRegister();
+  Register key = Descriptor::NameRegister();
   DCHECK(receiver.is(edx));
   DCHECK(key.is(ecx));
 
@@ -522,6 +523,10 @@
   __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
             Immediate(1 << Map::kIsAccessCheckNeeded));
   __ j(not_zero, &slow);
+
+  __ LoadParameterFromStack<Descriptor>(Descriptor::ValueRegister(),
+                                        Descriptor::kValue);
+
   // Check that the key is a smi.
   __ JumpIfNotSmi(key, &maybe_name_key);
   __ CmpInstanceType(edi, JS_ARRAY_TYPE);
@@ -551,22 +556,9 @@
   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
   __ JumpIfNotUniqueNameInstanceType(ebx, &slow);
 
-
-  // The handlers in the stub cache expect a vector and slot. Since we won't
-  // change the IC from any downstream misses, a dummy vector can be used.
-  Handle<TypeFeedbackVector> dummy_vector =
-      TypeFeedbackVector::DummyVector(masm->isolate());
-  int slot = dummy_vector->GetIndex(
-      FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
-  __ push(Immediate(Smi::FromInt(slot)));
-  __ push(Immediate(dummy_vector));
-
   masm->isolate()->store_stub_cache()->GenerateProbe(masm, receiver, key, edi,
                                                      no_reg);
 
-  __ pop(StoreWithVectorDescriptor::VectorRegister());
-  __ pop(StoreWithVectorDescriptor::SlotRegister());
-
   // Cache miss.
   __ jmp(&miss);
 
@@ -705,18 +697,21 @@
 }
 
 static void StoreIC_PushArgs(MacroAssembler* masm) {
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-  Register slot = StoreWithVectorDescriptor::SlotRegister();
-  Register vector = StoreWithVectorDescriptor::VectorRegister();
+  Register receiver = StoreWithVectorDescriptor::ReceiverRegister();
+  Register name = StoreWithVectorDescriptor::NameRegister();
 
-  __ xchg(receiver, Operand(esp, 0));
+  STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+  // Current stack layout:
+  // - esp[12]   -- value
+  // - esp[8]    -- slot
+  // - esp[4]    -- vector
+  // - esp[0]    -- return address
+
+  Register return_address = StoreWithVectorDescriptor::SlotRegister();
+  __ pop(return_address);
+  __ push(receiver);
   __ push(name);
-  __ push(value);
-  __ push(slot);
-  __ push(vector);
-  __ push(receiver);  // Contains the return address.
+  __ push(return_address);
 }
 
 
@@ -730,32 +725,33 @@
 
 
 void StoreIC::GenerateNormal(MacroAssembler* masm) {
+  typedef StoreWithVectorDescriptor Descriptor;
   Label restore_miss;
-  Register receiver = StoreDescriptor::ReceiverRegister();
-  Register name = StoreDescriptor::NameRegister();
-  Register value = StoreDescriptor::ValueRegister();
-  Register vector = StoreWithVectorDescriptor::VectorRegister();
-  Register slot = StoreWithVectorDescriptor::SlotRegister();
+  Register receiver = Descriptor::ReceiverRegister();
+  Register name = Descriptor::NameRegister();
+  Register value = Descriptor::ValueRegister();
+  // Since the slot and vector values are passed on the stack we can use
+  // respective registers as scratch registers.
+  Register scratch1 = Descriptor::VectorRegister();
+  Register scratch2 = Descriptor::SlotRegister();
 
-  // A lot of registers are needed for storing to slow case
-  // objects. Push and restore receiver but rely on
-  // GenerateDictionaryStore preserving the value and name.
+  __ LoadParameterFromStack<Descriptor>(value, Descriptor::kValue);
+
+  // A lot of registers are needed for storing to slow case objects.
+  // Push and restore receiver but rely on GenerateDictionaryStore preserving
+  // the value and name.
   __ push(receiver);
-  __ push(vector);
-  __ push(slot);
 
-  Register dictionary = ebx;
+  Register dictionary = receiver;
   __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
   GenerateDictionaryStore(masm, &restore_miss, dictionary, name, value,
-                          receiver, edi);
-  __ Drop(3);
+                          scratch1, scratch2);
+  __ Drop(1);
   Counters* counters = masm->isolate()->counters();
   __ IncrementCounter(counters->ic_store_normal_hit(), 1);
-  __ ret(0);
+  __ ret(Descriptor::kStackArgumentsCount * kPointerSize);
 
   __ bind(&restore_miss);
-  __ pop(slot);
-  __ pop(vector);
   __ pop(receiver);
   __ IncrementCounter(counters->ic_store_normal_miss(), 1);
   GenerateMiss(masm);
@@ -770,6 +766,13 @@
   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
 }
 
+void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
+  // Return address is on the stack.
+  StoreIC_PushArgs(masm);
+
+  // Do tail-call to runtime routine.
+  __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
+}
 
 #undef __
 
diff --git a/src/ic/x87/stub-cache-x87.cc b/src/ic/x87/stub-cache-x87.cc
index e0656f7..68fa615 100644
--- a/src/ic/x87/stub-cache-x87.cc
+++ b/src/ic/x87/stub-cache-x87.cc
@@ -22,8 +22,6 @@
   ExternalReference key_offset(stub_cache->key_reference(table));
   ExternalReference value_offset(stub_cache->value_reference(table));
   ExternalReference map_offset(stub_cache->map_reference(table));
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
 
   Label miss;
   Code::Kind ic_kind = stub_cache->ic_kind();
@@ -55,19 +53,15 @@
     }
 #endif
 
-    // The vector and slot were pushed onto the stack before starting the
-    // probe, and need to be dropped before calling the handler.
     if (is_vector_store) {
-      // The overlap here is rather embarrassing. One does what one must.
-      Register vector = StoreWithVectorDescriptor::VectorRegister();
+      // The value, vector and slot were passed to the IC on the stack and
+      // they are still there. So we can just jump to the handler.
       DCHECK(extra.is(StoreWithVectorDescriptor::SlotRegister()));
       __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
-      __ pop(vector);
-      __ mov(Operand::StaticVariable(virtual_register), extra);
-      __ pop(extra);  // Pop "slot".
-      // Jump to the first instruction in the code stub.
-      __ jmp(Operand::StaticVariable(virtual_register));
+      __ jmp(extra);
     } else {
+      // The vector and slot were pushed onto the stack before starting the
+      // probe, and need to be dropped before calling the handler.
       __ pop(LoadWithVectorDescriptor::VectorRegister());
       __ pop(LoadDescriptor::SlotRegister());
       __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -110,19 +104,10 @@
 
     // Jump to the first instruction in the code stub.
     if (is_vector_store) {
-      // The vector and slot were pushed onto the stack before starting the
-      // probe, and need to be dropped before calling the handler.
-      Register vector = StoreWithVectorDescriptor::VectorRegister();
       DCHECK(offset.is(StoreWithVectorDescriptor::SlotRegister()));
-      __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
-      __ mov(Operand::StaticVariable(virtual_register), offset);
-      __ pop(vector);
-      __ pop(offset);  // Pop "slot".
-      __ jmp(Operand::StaticVariable(virtual_register));
-    } else {
-      __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
-      __ jmp(offset);
     }
+    __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
+    __ jmp(offset);
 
     // Pop at miss.
     __ bind(&miss);
diff --git a/src/identity-map.cc b/src/identity-map.cc
index 97b70ae..58dbf6b 100644
--- a/src/identity-map.cc
+++ b/src/identity-map.cc
@@ -6,7 +6,7 @@
 
 #include "src/base/functional.h"
 #include "src/heap/heap-inl.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/inspector/BUILD.gn b/src/inspector/BUILD.gn
index 56b96e1..15c090f 100644
--- a/src/inspector/BUILD.gn
+++ b/src/inspector/BUILD.gn
@@ -2,57 +2,98 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-protocol_path = "//third_party/WebKit/Source/platform/inspector_protocol"
-protocol_sources = [
-  "$target_gen_dir/Console.cpp",
-  "$target_gen_dir/Console.h",
-  "$target_gen_dir/Debugger.cpp",
-  "$target_gen_dir/Debugger.h",
-  "$target_gen_dir/HeapProfiler.cpp",
-  "$target_gen_dir/HeapProfiler.h",
-  "$target_gen_dir/Profiler.cpp",
-  "$target_gen_dir/Profiler.h",
-  "$target_gen_dir/public/Debugger.h",
-  "$target_gen_dir/public/Runtime.h",
-  "$target_gen_dir/Runtime.cpp",
-  "$target_gen_dir/Runtime.h",
+import("../../gni/v8.gni")
+
+_inspector_protocol = "//third_party/WebKit/Source/platform/inspector_protocol"
+import("$_inspector_protocol/inspector_protocol.gni")
+
+_protocol_generated = [
+  "protocol/Forward.h",
+  "protocol/Protocol.cpp",
+  "protocol/Protocol.h",
+  "protocol/Console.cpp",
+  "protocol/Console.h",
+  "protocol/Debugger.cpp",
+  "protocol/Debugger.h",
+  "protocol/HeapProfiler.cpp",
+  "protocol/HeapProfiler.h",
+  "protocol/Profiler.cpp",
+  "protocol/Profiler.h",
+  "protocol/Runtime.cpp",
+  "protocol/Runtime.h",
+  "protocol/Schema.cpp",
+  "protocol/Schema.h",
+  "../../include/inspector/Debugger.h",
+  "../../include/inspector/Runtime.h",
+  "../../include/inspector/Schema.h",
 ]
 
-action("inspector_protocol_sources") {
+action("protocol_compatibility") {
   visibility = [ ":*" ]  # Only targets in this file can depend on this.
-  script = "$protocol_path/CodeGenerator.py"
-  sources = [
-    "$protocol_path/CodeGenerator.py",
-    "$protocol_path/Exported_h.template",
-    "$protocol_path/Imported_h.template",
-    "$protocol_path/TypeBuilder_cpp.template",
-    "$protocol_path/TypeBuilder_h.template",
-  ]
+  script = "$_inspector_protocol/CheckProtocolCompatibility.py"
   inputs = [
     "js_protocol.json",
   ]
-  outputs = protocol_sources
+  _stamp = "$target_gen_dir/js_protocol.stamp"
+  outputs = [
+    _stamp,
+  ]
   args = [
-    "--protocol",
+    "--stamp",
+    rebase_path(_stamp, root_build_dir),
     rebase_path("js_protocol.json", root_build_dir),
-    "--string_type",
-    "String16",
-    "--export_macro",
-    "PLATFORM_EXPORT",
-    "--output_dir",
-    rebase_path(target_gen_dir, root_build_dir),
-    "--output_package",
-    "inspector",
-    "--exported_dir",
-    rebase_path("$target_gen_dir/public", root_build_dir),
-    "--exported_package",
-    "inspector/public",
   ]
 }
 
-config("inspector_protocol_config") {
-  include_dirs = [ "$protocol_path/../.." ]
-  defines = [ "V8_INSPECTOR_USE_STL" ]
+inspector_protocol_generate("protocol_generated_sources") {
+  visibility = [ ":*" ]  # Only targets in this file can depend on this.
+  deps = [
+    ":protocol_compatibility",
+  ]
+
+  out_dir = target_gen_dir
+  config_file = "inspector_protocol_config.json"
+  inputs = [
+    "js_protocol.json",
+    "inspector_protocol_config.json",
+  ]
+  outputs = _protocol_generated
+}
+
+action("inspector_injected_script") {
+  visibility = [ ":*" ]  # Only targets in this file can depend on this.
+  script = "build/xxd.py"
+  inputs = [
+    "injected-script-source.js",
+  ]
+  outputs = [
+    "$target_gen_dir/injected-script-source.h",
+  ]
+  args = [
+    "InjectedScriptSource_js",
+    rebase_path("injected-script-source.js", root_build_dir),
+    rebase_path("$target_gen_dir/injected-script-source.h", root_build_dir),
+  ]
+}
+
+action("inspector_debugger_script") {
+  visibility = [ ":*" ]  # Only targets in this file can depend on this.
+  script = "build/xxd.py"
+  inputs = [
+    "debugger-script.js",
+  ]
+  outputs = [
+    "$target_gen_dir/debugger-script.h",
+  ]
+  args = [
+    "DebuggerScript_js",
+    rebase_path("debugger-script.js", root_build_dir),
+    rebase_path("$target_gen_dir/debugger-script.h", root_build_dir),
+  ]
+}
+
+config("inspector_config") {
+  visibility = [ ":*" ]  # Only targets in this file can depend on this.
   cflags = []
   if (is_win) {
     cflags += [
@@ -64,38 +105,85 @@
       "/wd4996",  # Deprecated function call.
     ]
   }
+  if (is_component_build) {
+    defines = [ "BUILDING_V8_SHARED" ]
+  }
 }
 
-source_set("inspector_protocol") {
+v8_source_set("inspector") {
   deps = [
-    ":inspector_protocol_sources",
+    ":inspector_debugger_script",
+    ":inspector_injected_script",
+    ":protocol_generated_sources",
   ]
-  configs += [ ":inspector_protocol_config" ]
-  include_dirs = [ "$target_gen_dir/.." ]
-  sources = protocol_sources + [
-              "$protocol_path/Allocator.h",
-              "$protocol_path/Array.h",
-              "$protocol_path/BackendCallback.h",
-              "$protocol_path/CodeGenerator.py",
-              "$protocol_path/Collections.h",
-              "$protocol_path/DispatcherBase.cpp",
-              "$protocol_path/DispatcherBase.h",
-              "$protocol_path/ErrorSupport.cpp",
-              "$protocol_path/ErrorSupport.h",
-              "$protocol_path/FrontendChannel.h",
-              "$protocol_path/Maybe.h",
-              "$protocol_path/Object.cpp",
-              "$protocol_path/Object.h",
-              "$protocol_path/Parser.cpp",
-              "$protocol_path/Parser.h",
-              "$protocol_path/Platform.h",
-              "$protocol_path/PlatformSTL.h",
-              "$protocol_path/String16.cpp",
-              "$protocol_path/String16.h",
-              "$protocol_path/String16STL.cpp",
-              "$protocol_path/String16STL.h",
-              "$protocol_path/ValueConversions.h",
-              "$protocol_path/Values.cpp",
-              "$protocol_path/Values.h",
-            ]
+  configs = [ ":inspector_config" ]
+  include_dirs = [
+    "../..",
+    "../../include",
+    "$target_gen_dir/../..",
+    "$target_gen_dir/../../include",
+  ]
+  sources = rebase_path(_protocol_generated, ".", target_gen_dir)
+  sources += [
+    "../../include/v8-inspector-protocol.h",
+    "../../include/v8-inspector.h",
+  ]
+  sources += get_target_outputs(":inspector_injected_script")
+  sources += get_target_outputs(":inspector_debugger_script")
+  sources += [
+    "injected-script-native.cc",
+    "injected-script-native.h",
+    "injected-script.cc",
+    "injected-script.h",
+    "inspected-context.cc",
+    "inspected-context.h",
+    "java-script-call-frame.cc",
+    "java-script-call-frame.h",
+    "protocol-platform.h",
+    "remote-object-id.cc",
+    "remote-object-id.h",
+    "script-breakpoint.h",
+    "search-util.cc",
+    "search-util.h",
+    "string-16.cc",
+    "string-16.h",
+    "string-util.cc",
+    "string-util.h",
+    "v8-console-agent-impl.cc",
+    "v8-console-agent-impl.h",
+    "v8-console-message.cc",
+    "v8-console-message.h",
+    "v8-console.cc",
+    "v8-console.h",
+    "v8-debugger-agent-impl.cc",
+    "v8-debugger-agent-impl.h",
+    "v8-debugger-script.cc",
+    "v8-debugger-script.h",
+    "v8-debugger.cc",
+    "v8-debugger.h",
+    "v8-function-call.cc",
+    "v8-function-call.h",
+    "v8-heap-profiler-agent-impl.cc",
+    "v8-heap-profiler-agent-impl.h",
+    "v8-injected-script-host.cc",
+    "v8-injected-script-host.h",
+    "v8-inspector-impl.cc",
+    "v8-inspector-impl.h",
+    "v8-inspector-session-impl.cc",
+    "v8-inspector-session-impl.h",
+    "v8-internal-value-type.cc",
+    "v8-internal-value-type.h",
+    "v8-profiler-agent-impl.cc",
+    "v8-profiler-agent-impl.h",
+    "v8-regex.cc",
+    "v8-regex.h",
+    "v8-runtime-agent-impl.cc",
+    "v8-runtime-agent-impl.h",
+    "v8-schema-agent-impl.cc",
+    "v8-schema-agent-impl.h",
+    "v8-stack-trace-impl.cc",
+    "v8-stack-trace-impl.h",
+    "v8-value-copier.cc",
+    "v8-value-copier.h",
+  ]
 }
diff --git a/src/inspector/DEPS b/src/inspector/DEPS
new file mode 100644
index 0000000..4486204
--- /dev/null
+++ b/src/inspector/DEPS
@@ -0,0 +1,8 @@
+include_rules = [
+  "-src",
+  "+src/inspector",
+  "+src/base/atomicops.h",
+  "+src/base/macros.h",
+  "+src/base/logging.h",
+  "+src/base/platform/platform.h",
+]
diff --git a/src/inspector/OWNERS b/src/inspector/OWNERS
new file mode 100644
index 0000000..2c4bd8d
--- /dev/null
+++ b/src/inspector/OWNERS
@@ -0,0 +1,15 @@
+set noparent
+
+alph@chromium.org
+caseq@chromium.org
+dgozman@chromium.org
+jochen@chromium.org
+kozyatinskiy@chromium.org
+pfeldman@chromium.org
+yangguo@chromium.org
+
+# Changes to remote debugging protocol require devtools review to
+# ensure backwards compatibility and committment to maintain.
+per-file js_protocol.json=set noparent
+per-file js_protocol.json=dgozman@chromium.org
+per-file js_protocol.json=pfeldman@chromium.org
diff --git a/src/inspector/PRESUBMIT.py b/src/inspector/PRESUBMIT.py
new file mode 100644
index 0000000..491564b
--- /dev/null
+++ b/src/inspector/PRESUBMIT.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""v8_inspect presubmit script
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into gcl.
+"""
+
+compile_note = "Be sure to run your patch by the compile-scripts.py script prior to committing!"
+
+
+def _CompileScripts(input_api, output_api):
+  local_paths = [f.LocalPath() for f in input_api.AffectedFiles()]
+
+  compilation_related_files = [
+    "js_protocol.json"
+    "compile-scripts.js",
+    "injected-script-source.js",
+    "debugger_script_externs.js",
+    "injected_script_externs.js",
+    "check_injected_script_source.js",
+    "debugger-script.js"
+  ]
+
+  for file in compilation_related_files:
+    if (any(file in path for path in local_paths)):
+      script_path = input_api.os_path.join(input_api.PresubmitLocalPath(),
+        "build", "compile-scripts.py")
+      proc = input_api.subprocess.Popen(
+        [input_api.python_executable, script_path],
+        stdout=input_api.subprocess.PIPE,
+        stderr=input_api.subprocess.STDOUT)
+      out, _ = proc.communicate()
+      if "ERROR" in out or "WARNING" in out or proc.returncode:
+        return [output_api.PresubmitError(out)]
+      if "NOTE" in out:
+        return [output_api.PresubmitPromptWarning(out + compile_note)]
+      return []
+  return []
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  results = []
+  results.extend(_CompileScripts(input_api, output_api))
+  return results
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(_CompileScripts(input_api, output_api))
+  return results
diff --git a/src/inspector/build/check_injected_script_source.py b/src/inspector/build/check_injected_script_source.py
new file mode 100644
index 0000000..0f2509c
--- /dev/null
+++ b/src/inspector/build/check_injected_script_source.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# Copied from blink:
+# WebKit/Source/devtools/scripts/check_injected_script_source.py
+#
+
+import re
+import sys
+import os
+
+
+def validate_injected_script(fileName):
+    f = open(fileName, "r")
+    lines = f.readlines()
+    f.close()
+
+    proto_functions = "|".join([
+        # Array.prototype.*
+        "concat", "every", "filter", "forEach", "indexOf", "join", "lastIndexOf", "map", "pop",
+        "push", "reduce", "reduceRight", "reverse", "shift", "slice", "some", "sort", "splice", "toLocaleString", "toString", "unshift",
+        # Function.prototype.*
+        "apply", "bind", "call", "isGenerator", "toSource",
+        # Object.prototype.*
+        "toString",
+    ])
+
+    global_functions = "|".join([
+        "eval", "uneval", "isFinite", "isNaN", "parseFloat", "parseInt", "decodeURI", "decodeURIComponent",
+        "encodeURI", "encodeURIComponent", "escape", "unescape", "Map", "Set"
+    ])
+
+    # Black list:
+    # - instanceof, since e.g. "obj instanceof Error" may throw if Error is overridden and is not a function
+    # - Object.prototype.toString()
+    # - Array.prototype.*
+    # - Function.prototype.*
+    # - Math.*
+    # - Global functions
+    black_list_call_regex = re.compile(r"\sinstanceof\s+\w*|\bMath\.\w+\(|(?<!InjectedScriptHost)\.(" + proto_functions + r")\(|[^\.]\b(" + global_functions + r")\(")
+
+    errors_found = False
+    for i, line in enumerate(lines):
+        if line.find("suppressBlacklist") != -1:
+            continue
+        for match in re.finditer(black_list_call_regex, line):
+            errors_found = True
+            print "ERROR: Black listed expression in %s at line %02d column %02d: %s" % (os.path.basename(fileName), i + 1, match.start(), match.group(0))
+
+    if not errors_found:
+        print "OK"
+
+
+def main(argv):
+    if len(argv) < 2:
+        print('ERROR: Usage: %s path/to/injected-script-source.js' % argv[0])
+        return 1
+
+    validate_injected_script(argv[1])
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv))
diff --git a/src/inspector/build/closure-compiler.tar.gz b/src/inspector/build/closure-compiler.tar.gz
new file mode 100644
index 0000000..92087f3
--- /dev/null
+++ b/src/inspector/build/closure-compiler.tar.gz
Binary files differ
diff --git a/src/inspector/build/closure-compiler.tar.gz.sha1 b/src/inspector/build/closure-compiler.tar.gz.sha1
new file mode 100644
index 0000000..5366f51
--- /dev/null
+++ b/src/inspector/build/closure-compiler.tar.gz.sha1
@@ -0,0 +1 @@
+69937d3c239ca63e4c9045718886ddd096ffc054
\ No newline at end of file
diff --git a/src/inspector/build/closure-compiler/COPYING b/src/inspector/build/closure-compiler/COPYING
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/src/inspector/build/closure-compiler/COPYING
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/src/inspector/build/closure-compiler/README.md b/src/inspector/build/closure-compiler/README.md
new file mode 100644
index 0000000..080319e
--- /dev/null
+++ b/src/inspector/build/closure-compiler/README.md
@@ -0,0 +1,519 @@
+# [Google Closure Compiler](https://developers.google.com/closure/compiler/)
+
+[![Build Status](https://travis-ci.org/google/closure-compiler.svg?branch=master)](https://travis-ci.org/google/closure-compiler)
+
+The [Closure Compiler](https://developers.google.com/closure/compiler/) is a tool for making JavaScript download and run faster. It is a true compiler for JavaScript. Instead of compiling from a source language to machine code, it compiles from JavaScript to better JavaScript. It parses your JavaScript, analyzes it, removes dead code and rewrites and minimizes what's left. It also checks syntax, variable references, and types, and warns about common JavaScript pitfalls.
+
+## Getting Started
+ * [Download the latest version](http://dl.google.com/closure-compiler/compiler-latest.zip) ([Release details here](https://github.com/google/closure-compiler/wiki/Releases))
+ * [Download a specific version](https://github.com/google/closure-compiler/wiki/Binary-Downloads). Also available via:
+   - [Maven](https://github.com/google/closure-compiler/wiki/Maven)
+   - [NPM](https://www.npmjs.com/package/google-closure-compiler)
+ * See the [Google Developers Site](https://developers.google.com/closure/compiler/docs/gettingstarted_app) for documentation including instructions for running the compiler from the command line.
+
+## Options for Getting Help
+1. Post in the [Closure Compiler Discuss Group](https://groups.google.com/forum/#!forum/closure-compiler-discuss)
+2. Ask a question on [Stack Overflow](http://stackoverflow.com/questions/tagged/google-closure-compiler)
+3. Consult the [FAQ](https://github.com/google/closure-compiler/wiki/FAQ)
+
+## Building it Yourself
+
+Note: The Closure Compiler requires [Java 7 or higher](http://www.java.com/).
+
+### Using [Maven](http://maven.apache.org/)
+
+1. Download [Maven](http://maven.apache.org/download.cgi).
+
+2. Add sonatype snapshots repository to `~/.m2/settings.xml`:
+   ```
+   <profile>
+     <id>allow-snapshots</id>
+        <activation><activeByDefault>true</activeByDefault></activation>
+     <repositories>
+       <repository>
+         <id>snapshots-repo</id>
+         <url>https://oss.sonatype.org/content/repositories/snapshots</url>
+         <releases><enabled>false</enabled></releases>
+         <snapshots><enabled>true</enabled></snapshots>
+       </repository>
+     </repositories>
+   </profile>
+   ```
+
+3. Run `mvn -DskipTests` (omit the `-DskipTests` if you want to run all the
+unit tests too).
+
+    This will produce a jar file called `target/closure-compiler-1.0-SNAPSHOT.jar`.
+
+### Using [Eclipse](http://www.eclipse.org/)
+
+1. Download and open the [Eclipse IDE](http://www.eclipse.org/).
+2. Navigate to `File > New > Project ...` and create a Java Project. Give
+   the project a name.
+3. Select `Create project from existing source` and choose the root of the
+   checked-out source tree as the existing directory.
+3. Navigate to the `build.xml` file. You will see all the build rules in
+   the Outline pane. Run the `jar` rule to build the compiler in
+   `build/compiler.jar`.
+
+## Running
+
+On the command line, at the root of this project, type
+
+```
+java -jar target/closure-compiler-1.0-SNAPSHOT.jar
+```
+
+This starts the compiler in interactive mode. Type
+
+```javascript
+var x = 17 + 25;
+```
+
+then hit "Enter", then hit "Ctrl-Z" (on Windows) or "Ctrl-D" (on Mac or Linux)
+and "Enter" again. The Compiler will respond:
+
+```javascript
+var x=42;
+```
+
+The Closure Compiler has many options for reading input from a file, writing
+output to a file, checking your code, and running optimizations. To learn more,
+type
+
+```
+java -jar compiler.jar --help
+```
+
+More detailed information about running the Closure Compiler is available in the
+[documentation](http://code.google.com/closure/compiler/docs/gettingstarted_app.html).
+
+## Compiling Multiple Scripts
+
+If you have multiple scripts, you should compile them all together with one
+compile command.
+
+```bash
+java -jar compiler.jar --js_output_file=out.js in1.js in2.js in3.js ...
+```
+
+You can also use minimatch-style globs.
+
+```bash
+# Recursively include all js files in subdirs
+java -jar compiler.jar --js_output_file=out.js 'src/**.js'
+
+# Recursively include all js files in subdirs, excluding test files.
+# Use single-quotes, so that bash doesn't try to expand the '!'
+java -jar compiler.jar --js_output_file=out.js 'src/**.js' '!**_test.js'
+```
+
+The Closure Compiler will concatenate the files in the order they're passed at
+the command line.
+
+If you're using globs or many files, you may start to run into
+problems with managing dependencies between scripts. In this case, you should
+use the [Closure Library](https://developers.google.com/closure/library/). It
+contains functions for enforcing dependencies between scripts, and Closure Compiler
+will re-order the inputs automatically.
+
+## How to Contribute
+### Reporting a bug
+1. First make sure that it is really a bug and not simply the way that Closure Compiler works (especially true for ADVANCED_OPTIMIZATIONS).
+ * Check the [official documentation](https://developers.google.com/closure/compiler/)
+ * Consult the [FAQ](https://github.com/google/closure-compiler/wiki/FAQ)
+ * Search on [Stack Overflow](http://stackoverflow.com/questions/tagged/google-closure-compiler) and in the [Closure Compiler Discuss Group](https://groups.google.com/forum/#!forum/closure-compiler-discuss)
+2. If you still think you have found a bug, make sure someone hasn't already reported it. See the list of [known issues](https://github.com/google/closure-compiler/issues).
+3. If it hasn't been reported yet, post a new issue. Make sure to add enough detail so that the bug can be recreated. The smaller the reproduction code, the better.
+
+### Suggesting a Feature
+1. Consult the [FAQ](https://github.com/google/closure-compiler/wiki/FAQ) to make sure that the behaviour you would like isn't specifically excluded (such as string inlining).
+2. Make sure someone hasn't requested the same thing. See the list of [known issues](https://github.com/google/closure-compiler/issues).
+3. Read up on [what type of feature requests are accepted](https://github.com/google/closure-compiler/wiki/FAQ#how-do-i-submit-a-feature-request-for-a-new-type-of-optimization).
+4. Submit your request as an issue.
+
+### Submitting patches
+1. All contributors must sign a contributor license agreement (CLA).
+   A CLA basically says that you own the rights to any code you contribute,
+   and that you give us permission to use that code in Closure Compiler.
+   You maintain the copyright on that code.
+   If you own all the rights to your code, you can fill out an
+   [individual CLA](http://code.google.com/legal/individual-cla-v1.0.html).
+   If your employer has any rights to your code, then they also need to fill out
+   a [corporate CLA](http://code.google.com/legal/corporate-cla-v1.0.html).
+   If you don't know if your employer has any rights to your code, you should
+   ask before signing anything.
+   By default, anyone with an @google.com email address already has a CLA
+   signed for them.
+2. To make sure your changes are of the type that will be accepted, ask about your patch on the [Closure Compiler Discuss Group](https://groups.google.com/forum/#!forum/closure-compiler-discuss)
+3. Fork the repository.
+4. Make your changes.
+5. Submit a pull request for your changes. A project developer will review your work and then merge your request into the project.
+
+## Closure Compiler License
+
+Copyright 2009 The Closure Compiler Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+## Dependency Licenses
+
+### Rhino
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td>
+      <code>src/com/google/javascript/rhino</code>, <code>test/com/google/javascript/rhino</code>
+    </td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>http://www.mozilla.org/rhino</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>1.5R3, with heavy modifications</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>Netscape Public License and MPL / GPL dual license</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>A partial copy of Mozilla Rhino. Mozilla Rhino is an
+implementation of JavaScript for the JVM.  The JavaScript
+parse tree data structures were extracted and modified
+significantly for use by Google's JavaScript compiler.</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>The packages have been renamespaced. All code not
+relevant to the parse tree has been removed. A JsDoc parser and static typing
+system have been added.</td>
+  </tr>
+</table>
+
+### Args4j
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>lib/args4j.jar</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>https://args4j.dev.java.net/</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>2.0.26</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>MIT</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>args4j is a small Java class library that makes it easy to parse command line
+options/arguments in your CUI application.</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### Guava Libraries
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>lib/guava.jar</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>https://github.com/google/guava</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>20.0</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>Apache License 2.0</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>Google's core Java libraries.</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### JSR 305
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>lib/jsr305.jar</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>http://code.google.com/p/jsr-305/</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>svn revision 47</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>BSD License</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>Annotations for software defect detection.</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### JUnit
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>lib/junit.jar</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>http://sourceforge.net/projects/junit/</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>4.11</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>Common Public License 1.0</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>A framework for writing and running automated tests in Java.</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### Protocol Buffers
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>lib/protobuf-java.jar</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>https://github.com/google/protobuf</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>2.5.0</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>New BSD License</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>Supporting libraries for protocol buffers,
+an encoding of structured data.</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### Truth
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>lib/truth.jar</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>https://github.com/google/truth</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>0.24</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>Apache License 2.0</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>Assertion/Proposition framework for Java unit tests</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### Ant
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td>
+      <code>lib/ant.jar</code>, <code>lib/ant-launcher.jar</code>
+    </td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>http://ant.apache.org/bindownload.cgi</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>1.8.1</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>Apache License 2.0</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>Ant is a Java based build tool. In theory it is kind of like "make"
+without make's wrinkles and with the full portability of pure java code.</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### GSON
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>lib/gson.jar</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>https://github.com/google/gson</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>2.2.4</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>Apache license 2.0</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>A Java library to convert JSON to Java objects and vice-versa</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>None</td>
+  </tr>
+</table>
+
+### Node.js Closure Compiler Externs
+
+<table>
+  <tr>
+    <td>Code Path</td>
+    <td><code>contrib/nodejs</code></td>
+  </tr>
+
+  <tr>
+    <td>URL</td>
+    <td>https://github.com/dcodeIO/node.js-closure-compiler-externs</td>
+  </tr>
+
+  <tr>
+    <td>Version</td>
+    <td>e891b4fbcf5f466cc4307b0fa842a7d8163a073a</td>
+  </tr>
+
+  <tr>
+    <td>License</td>
+    <td>Apache 2.0 license</td>
+  </tr>
+
+  <tr>
+    <td>Description</td>
+    <td>Type contracts for NodeJS APIs</td>
+  </tr>
+
+  <tr>
+    <td>Local Modifications</td>
+    <td>Substantial changes to make them compatible with NpmCommandLineRunner.</td>
+  </tr>
+</table>
diff --git a/src/inspector/build/closure-compiler/closure-compiler.jar b/src/inspector/build/closure-compiler/closure-compiler.jar
new file mode 100644
index 0000000..0d42389
--- /dev/null
+++ b/src/inspector/build/closure-compiler/closure-compiler.jar
Binary files differ
diff --git a/src/inspector/build/compile-scripts.py b/src/inspector/build/compile-scripts.py
new file mode 100755
index 0000000..abe167a
--- /dev/null
+++ b/src/inspector/build/compile-scripts.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import os.path as path
+import generate_protocol_externs
+import re
+import subprocess
+import sys
+
+if len(sys.argv) == 2 and sys.argv[1] == '--help':
+  print("Usage: %s" % path.basename(sys.argv[0]))
+  sys.exit(0)
+
+java_required_major = 1
+java_required_minor = 7
+
+v8_inspector_path = path.dirname(path.dirname(path.abspath(__file__)))
+
+protocol_externs_file = path.join(v8_inspector_path, 'protocol_externs.js')
+injected_script_source_name = path.join(v8_inspector_path,
+  'injected-script-source.js')
+injected_script_externs_file = path.join(v8_inspector_path,
+  'injected_script_externs.js')
+debugger_script_source_name = path.join(v8_inspector_path,
+  'debugger-script.js')
+debugger_script_externs_file = path.join(v8_inspector_path,
+  'debugger_script_externs.js')
+
+generate_protocol_externs.generate_protocol_externs(protocol_externs_file,
+  path.join(v8_inspector_path, 'js_protocol.json'))
+
+error_warning_regex = re.compile(r'WARNING|ERROR')
+
+closure_compiler_jar = path.join(v8_inspector_path, 'build',
+  'closure-compiler', 'closure-compiler.jar')
+
+common_closure_args = [
+  '--checks_only',
+  '--warning_level', 'VERBOSE'
+]
+
+# Error reporting and checking.
+errors_found = False
+
+def popen(arguments):
+  return subprocess.Popen(arguments, stdout=subprocess.PIPE,
+    stderr=subprocess.STDOUT)
+
+def error_excepthook(exctype, value, traceback):
+  print 'ERROR:'
+  sys.__excepthook__(exctype, value, traceback)
+sys.excepthook = error_excepthook
+
+def has_errors(output):
+  return re.search(error_warning_regex, output) != None
+
+# Find java. Based on
+# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python.
+def which(program):
+  def is_exe(fpath):
+    return path.isfile(fpath) and os.access(fpath, os.X_OK)
+
+  fpath, fname = path.split(program)
+  if fpath:
+    if is_exe(program):
+      return program
+  else:
+    for part in os.environ['PATH'].split(os.pathsep):
+      part = part.strip('"')
+      exe_file = path.join(part, program)
+      if is_exe(exe_file):
+        return exe_file
+  return None
+
+def find_java():
+  exec_command = None
+  has_server_jvm = True
+  java_path = which('java')
+  if not java_path:
+    java_path = which('java.exe')
+
+  if not java_path:
+    print 'NOTE: No Java executable found in $PATH.'
+    sys.exit(0)
+
+  is_ok = False
+  java_version_out, _ = popen([java_path, '-version']).communicate()
+  java_build_regex = re.compile(r'^\w+ version "(\d+)\.(\d+)')
+  # pylint: disable=E1103
+  match = re.search(java_build_regex, java_version_out)
+  if match:
+    major = int(match.group(1))
+    minor = int(match.group(2))
+    is_ok = major >= java_required_major and minor >= java_required_minor
+  if is_ok:
+    exec_command = [java_path, '-Xms1024m', '-server',
+      '-XX:+TieredCompilation']
+    check_server_proc = popen(exec_command + ['-version'])
+    check_server_proc.communicate()
+    if check_server_proc.returncode != 0:
+      # Not all Java installs have server JVMs.
+      exec_command = exec_command.remove('-server')
+      has_server_jvm = False
+
+  if not is_ok:
+    print 'NOTE: Java executable version %d.%d or above not found in $PATH.' % (java_required_major, java_required_minor)
+    sys.exit(0)
+  print 'Java executable: %s%s' % (java_path, '' if has_server_jvm else ' (no server JVM)')
+  return exec_command
+
+java_exec = find_java()
+
+spawned_compiler_command = java_exec + [
+  '-jar',
+  closure_compiler_jar
+] + common_closure_args
+
+print 'Compiling injected-script-source.js...'
+
+command = spawned_compiler_command + [
+  '--externs', injected_script_externs_file,
+  '--externs', protocol_externs_file,
+  '--js', injected_script_source_name
+]
+
+injected_script_compile_proc = popen(command)
+
+print 'Compiling debugger-script.js...'
+
+command = spawned_compiler_command + [
+  '--externs', debugger_script_externs_file,
+  '--js', debugger_script_source_name,
+  '--new_type_inf'
+]
+
+debugger_script_compile_proc = popen(command)
+
+print 'Validating injected-script-source.js...'
+injectedscript_check_script_path = path.join(v8_inspector_path, 'build',
+  'check_injected_script_source.py')
+validate_injected_script_proc = popen([sys.executable,
+  injectedscript_check_script_path, injected_script_source_name])
+
+print
+
+(injected_script_compile_out, _) = injected_script_compile_proc.communicate()
+print 'injected-script-source.js compilation output:%s' % os.linesep
+print injected_script_compile_out
+errors_found |= has_errors(injected_script_compile_out)
+
+(debugger_script_compiler_out, _) = debugger_script_compile_proc.communicate()
+print 'debugger-script.js compilation output:%s' % os.linesep
+print debugger_script_compiler_out
+errors_found |= has_errors(debugger_script_compiler_out)
+
+(validate_injected_script_out, _) = validate_injected_script_proc.communicate()
+print 'Validate injected-script-source.js output:%s' % os.linesep
+print validate_injected_script_out if validate_injected_script_out else '<empty>'
+errors_found |= has_errors(validate_injected_script_out)
+
+os.remove(protocol_externs_file)
+
+if errors_found:
+  print 'ERRORS DETECTED'
+  sys.exit(1)
diff --git a/src/inspector/build/generate_protocol_externs.py b/src/inspector/build/generate_protocol_externs.py
new file mode 100755
index 0000000..c2ba2c5
--- /dev/null
+++ b/src/inspector/build/generate_protocol_externs.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import re
+import json
+
+type_traits = {
+    "any": "*",
+    "string": "string",
+    "integer": "number",
+    "number": "number",
+    "boolean": "boolean",
+    "array": "!Array.<*>",
+    "object": "!Object",
+}
+
+promisified_domains = {
+    "Accessibility",
+    "Animation",
+    "CSS",
+    "Emulation",
+    "Profiler"
+}
+
+ref_types = {}
+
+def full_qualified_type_id(domain_name, type_id):
+    if type_id.find(".") == -1:
+        return "%s.%s" % (domain_name, type_id)
+    return type_id
+
+
+def fix_camel_case(name):
+    prefix = ""
+    if name[0] == "-":
+        prefix = "Negative"
+        name = name[1:]
+    refined = re.sub(r'-(\w)', lambda pat: pat.group(1).upper(), name)
+    refined = to_title_case(refined)
+    return prefix + re.sub(r'(?i)HTML|XML|WML|API', lambda pat: pat.group(0).upper(), refined)
+
+
+def to_title_case(name):
+    return name[:1].upper() + name[1:]
+
+
+def generate_enum(name, json):
+    enum_members = []
+    for member in json["enum"]:
+        enum_members.append("    %s: \"%s\"" % (fix_camel_case(member), member))
+    return "\n/** @enum {string} */\n%s = {\n%s\n};\n" % (name, (",\n".join(enum_members)))
+
+
+def param_type(domain_name, param):
+    if "type" in param:
+        if param["type"] == "array":
+            items = param["items"]
+            return "!Array.<%s>" % param_type(domain_name, items)
+        else:
+            return type_traits[param["type"]]
+    if "$ref" in param:
+        type_id = full_qualified_type_id(domain_name, param["$ref"])
+        if type_id in ref_types:
+            return ref_types[type_id]
+        else:
+            print "Type not found: " + type_id
+            return "!! Type not found: " + type_id
+
+
+def load_schema(file, domains):
+    input_file = open(file, "r")
+    json_string = input_file.read()
+    parsed_json = json.loads(json_string)
+    domains.extend(parsed_json["domains"])
+
+
+def generate_protocol_externs(output_path, file1):
+    domains = []
+    load_schema(file1, domains)
+    output_file = open(output_path, "w")
+
+    output_file.write(
+"""
+var InspectorBackend = {}
+
+var Protocol = {};
+/** @typedef {string}*/
+Protocol.Error;
+""")
+
+    for domain in domains:
+        domain_name = domain["domain"]
+        if "types" in domain:
+            for type in domain["types"]:
+                type_id = full_qualified_type_id(domain_name, type["id"])
+                ref_types[type_id] = "%sAgent.%s" % (domain_name, type["id"])
+
+    for domain in domains:
+        domain_name = domain["domain"]
+        promisified = domain_name in promisified_domains
+
+        output_file.write("\n\n/**\n * @constructor\n*/\n")
+        output_file.write("Protocol.%sAgent = function(){};\n" % domain_name)
+
+        if "commands" in domain:
+            for command in domain["commands"]:
+                output_file.write("\n/**\n")
+                params = []
+                has_return_value = "returns" in command
+                explicit_parameters = promisified and has_return_value
+                if ("parameters" in command):
+                    for in_param in command["parameters"]:
+                        # All parameters are not optional in case of promisified domain with return value.
+                        if (not explicit_parameters and "optional" in in_param):
+                            params.append("opt_%s" % in_param["name"])
+                            output_file.write(" * @param {%s=} opt_%s\n" % (param_type(domain_name, in_param), in_param["name"]))
+                        else:
+                            params.append(in_param["name"])
+                            output_file.write(" * @param {%s} %s\n" % (param_type(domain_name, in_param), in_param["name"]))
+                returns = []
+                returns.append("?Protocol.Error")
+                if ("error" in command):
+                    returns.append("%s=" % param_type(domain_name, command["error"]))
+                if (has_return_value):
+                    for out_param in command["returns"]:
+                        if ("optional" in out_param):
+                            returns.append("%s=" % param_type(domain_name, out_param))
+                        else:
+                            returns.append("%s" % param_type(domain_name, out_param))
+                callback_return_type = "void="
+                if explicit_parameters:
+                    callback_return_type = "T"
+                elif promisified:
+                    callback_return_type = "T="
+                output_file.write(" * @param {function(%s):%s} opt_callback\n" % (", ".join(returns), callback_return_type))
+                if (promisified):
+                    output_file.write(" * @return {!Promise.<T>}\n")
+                    output_file.write(" * @template T\n")
+                params.append("opt_callback")
+
+                output_file.write(" */\n")
+                output_file.write("Protocol.%sAgent.prototype.%s = function(%s) {}\n" % (domain_name, command["name"], ", ".join(params)))
+                output_file.write("/** @param {function(%s):void=} opt_callback */\n" % ", ".join(returns))
+                output_file.write("Protocol.%sAgent.prototype.invoke_%s = function(obj, opt_callback) {}\n" % (domain_name, command["name"]))
+
+        output_file.write("\n\n\nvar %sAgent = function(){};\n" % domain_name)
+
+        if "types" in domain:
+            for type in domain["types"]:
+                if type["type"] == "object":
+                    typedef_args = []
+                    if "properties" in type:
+                        for property in type["properties"]:
+                            suffix = ""
+                            if ("optional" in property):
+                                suffix = "|undefined"
+                            if "enum" in property:
+                                enum_name = "%sAgent.%s%s" % (domain_name, type["id"], to_title_case(property["name"]))
+                                output_file.write(generate_enum(enum_name, property))
+                                typedef_args.append("%s:(%s%s)" % (property["name"], enum_name, suffix))
+                            else:
+                                typedef_args.append("%s:(%s%s)" % (property["name"], param_type(domain_name, property), suffix))
+                    if (typedef_args):
+                        output_file.write("\n/** @typedef {!{%s}} */\n%sAgent.%s;\n" % (", ".join(typedef_args), domain_name, type["id"]))
+                    else:
+                        output_file.write("\n/** @typedef {!Object} */\n%sAgent.%s;\n" % (domain_name, type["id"]))
+                elif type["type"] == "string" and "enum" in type:
+                    output_file.write(generate_enum("%sAgent.%s" % (domain_name, type["id"]), type))
+                elif type["type"] == "array":
+                    output_file.write("\n/** @typedef {!Array.<!%s>} */\n%sAgent.%s;\n" % (param_type(domain_name, type["items"]), domain_name, type["id"]))
+                else:
+                    output_file.write("\n/** @typedef {%s} */\n%sAgent.%s;\n" % (type_traits[type["type"]], domain_name, type["id"]))
+
+        output_file.write("/** @interface */\n")
+        output_file.write("%sAgent.Dispatcher = function() {};\n" % domain_name)
+        if "events" in domain:
+            for event in domain["events"]:
+                params = []
+                if ("parameters" in event):
+                    output_file.write("/**\n")
+                    for param in event["parameters"]:
+                        if ("optional" in param):
+                            params.append("opt_%s" % param["name"])
+                            output_file.write(" * @param {%s=} opt_%s\n" % (param_type(domain_name, param), param["name"]))
+                        else:
+                            params.append(param["name"])
+                            output_file.write(" * @param {%s} %s\n" % (param_type(domain_name, param), param["name"]))
+                    output_file.write(" */\n")
+                output_file.write("%sAgent.Dispatcher.prototype.%s = function(%s) {};\n" % (domain_name, event["name"], ", ".join(params)))
+
+    output_file.write("\n/** @constructor\n * @param {!Object.<string, !Object>} agentsMap\n */\n")
+    output_file.write("Protocol.Agents = function(agentsMap){this._agentsMap;};\n")
+    output_file.write("/**\n * @param {string} domain\n * @param {!Object} dispatcher\n */\n")
+    output_file.write("Protocol.Agents.prototype.registerDispatcher = function(domain, dispatcher){};\n")
+    for domain in domains:
+        domain_name = domain["domain"]
+        uppercase_length = 0
+        while uppercase_length < len(domain_name) and domain_name[uppercase_length].isupper():
+            uppercase_length += 1
+
+        output_file.write("/** @return {!Protocol.%sAgent}*/\n" % domain_name)
+        output_file.write("Protocol.Agents.prototype.%s = function(){};\n" % (domain_name[:uppercase_length].lower() + domain_name[uppercase_length:] + "Agent"))
+
+        output_file.write("/**\n * @param {!%sAgent.Dispatcher} dispatcher\n */\n" % domain_name)
+        output_file.write("Protocol.Agents.prototype.register%sDispatcher = function(dispatcher) {}\n" % domain_name)
+
+
+    output_file.close()
+
+if __name__ == "__main__":
+    import sys
+    import os.path
+    program_name = os.path.basename(__file__)
+    if len(sys.argv) < 4 or sys.argv[1] != "-o":
+        sys.stderr.write("Usage: %s -o OUTPUT_FILE INPUT_FILE\n" % program_name)
+        exit(1)
+    output_path = sys.argv[2]
+    input_path = sys.argv[3]
+    generate_protocol_externs(output_path, input_path)
diff --git a/src/inspector/build/rjsmin.py b/src/inspector/build/rjsmin.py
new file mode 100755
index 0000000..8357a6d
--- /dev/null
+++ b/src/inspector/build/rjsmin.py
@@ -0,0 +1,295 @@
+#!/usr/bin/env python
+#
+# Copyright 2011 - 2013
+# Andr\xe9 Malo or his licensors, as applicable
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+r"""
+=====================
+ Javascript Minifier
+=====================
+
+rJSmin is a javascript minifier written in python.
+
+The minifier is based on the semantics of `jsmin.c by Douglas Crockford`_\.
+
+The module is a re-implementation aiming for speed, so it can be used at
+runtime (rather than during a preprocessing step). Usually it produces the
+same results as the original ``jsmin.c``. It differs in the following ways:
+
+- there is no error detection: unterminated string, regex and comment
+  literals are treated as regular javascript code and minified as such.
+- Control characters inside string and regex literals are left untouched; they
+  are not converted to spaces (nor to \n)
+- Newline characters are not allowed inside string and regex literals, except
+  for line continuations in string literals (ECMA-5).
+- "return /regex/" is recognized correctly.
+- "+ +" and "- -" sequences are not collapsed to '++' or '--'
+- Newlines before ! operators are removed more sensibly
+- rJSmin does not handle streams, but only complete strings. (However, the
+  module provides a "streamy" interface).
+
+Since most parts of the logic are handled by the regex engine it's way
+faster than the original python port of ``jsmin.c`` by Baruch Even. The speed
+factor varies between about 6 and 55 depending on input and python version
+(it gets faster the more compressed the input already is). Compared to the
+speed-refactored python port by Dave St.Germain the performance gain is less
+dramatic but still between 1.2 and 7. See the docs/BENCHMARKS file for
+details.
+
+rjsmin.c is a reimplementation of rjsmin.py in C and speeds it up even more.
+
+Both python 2 and python 3 are supported.
+
+.. _jsmin.c by Douglas Crockford:
+   http://www.crockford.com/javascript/jsmin.c
+"""
+__author__ = "Andr\xe9 Malo"
+__author__ = getattr(__author__, 'decode', lambda x: __author__)('latin-1')
+__docformat__ = "restructuredtext en"
+__license__ = "Apache License, Version 2.0"
+__version__ = '1.0.7'
+__all__ = ['jsmin']
+
+import re as _re
+
+
+def _make_jsmin(python_only=False):
+    """
+    Generate JS minifier based on `jsmin.c by Douglas Crockford`_
+
+    .. _jsmin.c by Douglas Crockford:
+       http://www.crockford.com/javascript/jsmin.c
+
+    :Parameters:
+      `python_only` : ``bool``
+        Use only the python variant. If true, the c extension is not even
+        tried to be loaded.
+
+    :Return: Minifier
+    :Rtype: ``callable``
+    """
+    # pylint: disable = R0912, R0914, W0612
+    if not python_only:
+        try:
+            import _rjsmin
+        except ImportError:
+            pass
+        else:
+            return _rjsmin.jsmin
+    try:
+        xrange
+    except NameError:
+        xrange = range  # pylint: disable = W0622
+
+    space_chars = r'[\000-\011\013\014\016-\040]'
+
+    line_comment = r'(?://[^\r\n]*)'
+    space_comment = r'(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)'
+    string1 = \
+        r'(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^\047\\\r\n]*)*\047)'
+    string2 = r'(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|\r)[^"\\\r\n]*)*")'
+    strings = r'(?:%s|%s)' % (string1, string2)
+
+    charclass = r'(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\])'
+    nospecial = r'[^/\\\[\r\n]'
+    regex = r'(?:/(?![\r\n/*])%s*(?:(?:\\[^\r\n]|%s)%s*)*/)' % (
+        nospecial, charclass, nospecial)
+    space = r'(?:%s|%s)' % (space_chars, space_comment)
+    newline = r'(?:%s?[\r\n])' % line_comment
+
+    def fix_charclass(result):
+        """ Fixup string of chars to fit into a regex char class """
+        pos = result.find('-')
+        if pos >= 0:
+            result = r'%s%s-' % (result[:pos], result[pos + 1:])
+
+        def sequentize(string):
+            """
+            Notate consecutive characters as sequence
+
+            (1-4 instead of 1234)
+            """
+            first, last, result = None, None, []
+            for char in map(ord, string):
+                if last is None:
+                    first = last = char
+                elif last + 1 == char:
+                    last = char
+                else:
+                    result.append((first, last))
+                    first = last = char
+            if last is not None:
+                result.append((first, last))
+            return ''.join(['%s%s%s' % (
+                chr(first),
+                last > first + 1 and '-' or '',
+                last != first and chr(last) or '') for first, last in result])
+
+        return _re.sub(r'([\000-\040\047])',  # for better portability
+            lambda m: '\\%03o' % ord(m.group(1)), (sequentize(result)
+                .replace('\\', '\\\\')
+                .replace('[', '\\[')
+                .replace(']', '\\]')))
+
+    def id_literal_(what):
+        """ Make id_literal like char class """
+        match = _re.compile(what).match
+        result = ''.join([chr(c) for c in xrange(127) if not match(chr(c))])
+        return '[^%s]' % fix_charclass(result)
+
+    def not_id_literal_(keep):
+        """ Make negated id_literal like char class """
+        match = _re.compile(id_literal_(keep)).match
+        result = ''.join([chr(c) for c in xrange(127) if not match(chr(c))])
+        return r'[%s]' % fix_charclass(result)
+
+    not_id_literal = not_id_literal_(r'[a-zA-Z0-9_$]')
+    preregex1 = r'[(,=:\[!&|?{};\r\n]'
+    preregex2 = r'%(not_id_literal)sreturn' % locals()
+
+    id_literal = id_literal_(r'[a-zA-Z0-9_$]')
+    id_literal_open = id_literal_(r'[a-zA-Z0-9_${\[(!+-]')
+    id_literal_close = id_literal_(r'[a-zA-Z0-9_$}\])"\047+-]')
+
+    dull = r'[^\047"/\000-\040]'
+
+    space_sub = _re.compile((
+        r'(%(dull)s+)'
+        r'|(%(strings)s%(dull)s*)'
+        r'|(?<=%(preregex1)s)'
+            r'%(space)s*(?:%(newline)s%(space)s*)*'
+            r'(%(regex)s%(dull)s*)'
+        r'|(?<=%(preregex2)s)'
+            r'%(space)s*(?:%(newline)s%(space)s)*'
+            r'(%(regex)s%(dull)s*)'
+        r'|(?<=%(id_literal_close)s)'
+            r'%(space)s*(?:(%(newline)s)%(space)s*)+'
+            r'(?=%(id_literal_open)s)'
+        r'|(?<=%(id_literal)s)(%(space)s)+(?=%(id_literal)s)'
+        r'|(?<=\+)(%(space)s)+(?=\+)'
+        r'|(?<=-)(%(space)s)+(?=-)'
+        r'|%(space)s+'
+        r'|(?:%(newline)s%(space)s*)+') % locals()).sub
+    #print space_sub.__self__.pattern
+
+    def space_subber(match):
+        """ Substitution callback """
+        # pylint: disable = C0321, R0911
+        groups = match.groups()
+        if groups[0]:
+            return groups[0]
+        elif groups[1]:
+            return groups[1]
+        elif groups[2]:
+            return groups[2]
+        elif groups[3]:
+            return groups[3]
+        elif groups[4]:
+            return '\n'
+        elif groups[5] or groups[6] or groups[7]:
+            return ' '
+        else:
+            return ''
+
+    def jsmin(script):  # pylint: disable = W0621
+        r"""
+        Minify javascript based on `jsmin.c by Douglas Crockford`_\.
+
+        Instead of parsing the stream char by char, it uses a regular
+        expression approach which minifies the whole script with one big
+        substitution regex.
+
+        .. _jsmin.c by Douglas Crockford:
+           http://www.crockford.com/javascript/jsmin.c
+
+        :Parameters:
+          `script` : ``str``
+            Script to minify
+
+        :Return: Minified script
+        :Rtype: ``str``
+        """
+        return space_sub(space_subber, '\n%s\n' % script).strip()
+
+    return jsmin
+
+jsmin = _make_jsmin()
+
+
+def jsmin_for_posers(script):
+    r"""
+    Minify javascript based on `jsmin.c by Douglas Crockford`_\.
+
+    Instead of parsing the stream char by char, it uses a regular
+    expression approach which minifies the whole script with one big
+    substitution regex.
+
+    .. _jsmin.c by Douglas Crockford:
+       http://www.crockford.com/javascript/jsmin.c
+
+    :Warning: This function is the digest of a _make_jsmin() call. It just
+              utilizes the resulting regex. It's just for fun here and may
+              vanish any time. Use the `jsmin` function instead.
+
+    :Parameters:
+      `script` : ``str``
+        Script to minify
+
+    :Return: Minified script
+    :Rtype: ``str``
+    """
+    def subber(match):
+        """ Substitution callback """
+        groups = match.groups()
+        return (
+            groups[0] or
+            groups[1] or
+            groups[2] or
+            groups[3] or
+            (groups[4] and '\n') or
+            (groups[5] and ' ') or
+            (groups[6] and ' ') or
+            (groups[7] and ' ') or
+            '')
+
+    return _re.sub(
+        r'([^\047"/\000-\040]+)|((?:(?:\047[^\047\\\r\n]*(?:\\(?:[^\r\n]|\r?'
+        r'\n|\r)[^\047\\\r\n]*)*\047)|(?:"[^"\\\r\n]*(?:\\(?:[^\r\n]|\r?\n|'
+        r'\r)[^"\\\r\n]*)*"))[^\047"/\000-\040]*)|(?<=[(,=:\[!&|?{};\r\n])(?'
+        r':[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*'
+        r'(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*'
+        r'[^*]*\*+(?:[^/*][^*]*\*+)*/))*)*((?:/(?![\r\n/*])[^/\\\[\r\n]*(?:('
+        r'?:\\[^\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/\\\['
+        r'\r\n]*)*/)[^\047"/\000-\040]*)|(?<=[\000-#%-,./:-@\[-^`{-~-]return'
+        r')(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/'
+        r'))*(?:(?:(?://[^\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:'
+        r'/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))*((?:/(?![\r\n/*])[^/\\\[\r\n]*(?'
+        r':(?:\\[^\r\n]|(?:\[[^\\\]\r\n]*(?:\\[^\r\n][^\\\]\r\n]*)*\]))[^/'
+        r'\\\[\r\n]*)*/)[^\047"/\000-\040]*)|(?<=[^\000-!#%&(*,./:-@\[\\^`{|'
+        r'~])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)'
+        r'*/))*(?:((?:(?://[^\r\n]*)?[\r\n]))(?:[\000-\011\013\014\016-\040]'
+        r'|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))*)+(?=[^\000-\040"#%-\047)*,./'
+        r':-@\\-^`|-~])|(?<=[^\000-#%-,./:-@\[-^`{-~-])((?:[\000-\011\013\01'
+        r'4\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=[^\000-#%-,./:'
+        r'-@\[-^`{-~-])|(?<=\+)((?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*'
+        r'\*+(?:[^/*][^*]*\*+)*/)))+(?=\+)|(?<=-)((?:[\000-\011\013\014\016-'
+        r'\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)))+(?=-)|(?:[\000-\011\013'
+        r'\014\016-\040]|(?:/\*[^*]*\*+(?:[^/*][^*]*\*+)*/))+|(?:(?:(?://[^'
+        r'\r\n]*)?[\r\n])(?:[\000-\011\013\014\016-\040]|(?:/\*[^*]*\*+(?:[^'
+        r'/*][^*]*\*+)*/))*)+', subber, '\n%s\n' % script).strip()
+
+
+if __name__ == '__main__':
+    import sys as _sys
+    _sys.stdout.write(jsmin(_sys.stdin.read()))
diff --git a/src/inspector/build/xxd.py b/src/inspector/build/xxd.py
new file mode 100644
index 0000000..5a63a7c
--- /dev/null
+++ b/src/inspector/build/xxd.py
@@ -0,0 +1,28 @@
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Represent a file as a C++ constant string.
+
+Usage:
+python xxd.py VAR SOURCE DEST
+"""
+
+
+import sys
+import rjsmin
+
+
+def main():
+    variable_name, input_filename, output_filename = sys.argv[1:]
+    with open(input_filename) as input_file:
+        input_text = input_file.read()
+    input_text = rjsmin.jsmin(input_text)
+    hex_values = ['0x{0:02x}'.format(ord(char)) for char in input_text]
+    const_declaration = 'const char %s[] = {\n%s\n};\n' % (
+        variable_name, ', '.join(hex_values))
+    with open(output_filename, 'w') as output_file:
+        output_file.write(const_declaration)
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/inspector/debugger-script.js b/src/inspector/debugger-script.js
new file mode 100644
index 0000000..98910d6
--- /dev/null
+++ b/src/inspector/debugger-script.js
@@ -0,0 +1,712 @@
+/*
+ * Copyright (C) 2010 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+"use strict";
+
+(function () {
+
+var DebuggerScript = {};
+
+/** @enum */
+const PauseOnExceptionsState = {
+    DontPauseOnExceptions: 0,
+    PauseOnAllExceptions: 1,
+    PauseOnUncaughtExceptions: 2
+};
+DebuggerScript.PauseOnExceptionsState = PauseOnExceptionsState;
+
+DebuggerScript._pauseOnExceptionsState = DebuggerScript.PauseOnExceptionsState.DontPauseOnExceptions;
+Debug.clearBreakOnException();
+Debug.clearBreakOnUncaughtException();
+
+/**
+ * @param {?CompileEvent} eventData
+ */
+DebuggerScript.getAfterCompileScript = function(eventData)
+{
+    var script = eventData.script().value();
+    if (!script.is_debugger_script)
+        return DebuggerScript._formatScript(eventData.script().value());
+    return null;
+}
+
+/** @type {!Map<!ScopeType, string>} */
+DebuggerScript._scopeTypeNames = new Map();
+DebuggerScript._scopeTypeNames.set(ScopeType.Global, "global");
+DebuggerScript._scopeTypeNames.set(ScopeType.Local, "local");
+DebuggerScript._scopeTypeNames.set(ScopeType.With, "with");
+DebuggerScript._scopeTypeNames.set(ScopeType.Closure, "closure");
+DebuggerScript._scopeTypeNames.set(ScopeType.Catch, "catch");
+DebuggerScript._scopeTypeNames.set(ScopeType.Block, "block");
+DebuggerScript._scopeTypeNames.set(ScopeType.Script, "script");
+
+/**
+ * @param {function()} fun
+ * @return {?Array<!Scope>}
+ */
+DebuggerScript.getFunctionScopes = function(fun)
+{
+    var mirror = MakeMirror(fun);
+    if (!mirror.isFunction())
+        return null;
+    var functionMirror = /** @type {!FunctionMirror} */(mirror);
+    var count = functionMirror.scopeCount();
+    if (count == 0)
+        return null;
+    var result = [];
+    for (var i = 0; i < count; i++) {
+        var scopeDetails = functionMirror.scope(i).details();
+        var scopeObject = DebuggerScript._buildScopeObject(scopeDetails.type(), scopeDetails.object());
+        if (!scopeObject)
+            continue;
+        result.push({
+            type: /** @type {string} */(DebuggerScript._scopeTypeNames.get(scopeDetails.type())),
+            object: scopeObject,
+            name: scopeDetails.name() || ""
+        });
+    }
+    return result;
+}
+
+/**
+ * @param {Object} object
+ * @return {?RawLocation}
+ */
+DebuggerScript.getGeneratorObjectLocation = function(object)
+{
+    var mirror = MakeMirror(object, true /* transient */);
+    if (!mirror.isGenerator())
+        return null;
+    var generatorMirror = /** @type {!GeneratorMirror} */(mirror);
+    var funcMirror = generatorMirror.func();
+    if (!funcMirror.resolved())
+        return null;
+    var location = generatorMirror.sourceLocation() || funcMirror.sourceLocation();
+    var script = funcMirror.script();
+    if (script && location) {
+        return {
+            scriptId: "" + script.id(),
+            lineNumber: location.line,
+            columnNumber: location.column
+        };
+    }
+    return null;
+}
+
+/**
+ * @param {Object} object
+ * @return {!Array<!{value: *}>|undefined}
+ */
+DebuggerScript.getCollectionEntries = function(object)
+{
+    var mirror = MakeMirror(object, true /* transient */);
+    if (mirror.isMap())
+        return /** @type {!MapMirror} */(mirror).entries();
+    if (mirror.isSet() || mirror.isIterator()) {
+        var result = [];
+        var values = mirror.isSet() ? /** @type {!SetMirror} */(mirror).values() : /** @type {!IteratorMirror} */(mirror).preview();
+        for (var i = 0; i < values.length; ++i)
+            result.push({ value: values[i] });
+        return result;
+    }
+}
+
+/**
+ * @param {string|undefined} contextData
+ * @return {number}
+ */
+DebuggerScript._executionContextId = function(contextData)
+{
+    if (!contextData)
+        return 0;
+    var match = contextData.match(/^[^,]*,([^,]*),.*$/);
+    if (!match)
+        return 0;
+    return parseInt(match[1], 10) || 0;
+}
+
+/**
+ * @param {string|undefined} contextData
+ * @return {string}
+ */
+DebuggerScript._executionContextAuxData = function(contextData)
+{
+    if (!contextData)
+        return "";
+    var match = contextData.match(/^[^,]*,[^,]*,(.*)$/);
+    return match ? match[1] : "";
+}
+
+/**
+ * @param {string} contextGroupId
+ * @return {!Array<!FormattedScript>}
+ */
+DebuggerScript.getScripts = function(contextGroupId)
+{
+    var result = [];
+    var scripts = Debug.scripts();
+    var contextDataPrefix = null;
+    if (contextGroupId)
+        contextDataPrefix = contextGroupId + ",";
+    for (var i = 0; i < scripts.length; ++i) {
+        var script = scripts[i];
+        if (contextDataPrefix) {
+            if (!script.context_data)
+                continue;
+            // Context data is a string in the following format:
+            // <contextGroupId>,<contextId>,<auxData>
+            if (script.context_data.indexOf(contextDataPrefix) !== 0)
+                continue;
+        }
+        if (script.is_debugger_script)
+            continue;
+        result.push(DebuggerScript._formatScript(script));
+    }
+    return result;
+}
+
+/**
+ * @param {!Script} script
+ * @return {!FormattedScript}
+ */
+DebuggerScript._formatScript = function(script)
+{
+    var lineEnds = script.line_ends;
+    var lineCount = lineEnds.length;
+    var endLine = script.line_offset + lineCount - 1;
+    var endColumn;
+    // V8 will not count last line if script source ends with \n.
+    if (script.source[script.source.length - 1] === '\n') {
+        endLine += 1;
+        endColumn = 0;
+    } else {
+        if (lineCount === 1)
+            endColumn = script.source.length + script.column_offset;
+        else
+            endColumn = script.source.length - (lineEnds[lineCount - 2] + 1);
+    }
+    return {
+        id: script.id,
+        name: script.nameOrSourceURL(),
+        sourceURL: script.source_url,
+        sourceMappingURL: script.source_mapping_url,
+        source: script.source,
+        startLine: script.line_offset,
+        startColumn: script.column_offset,
+        endLine: endLine,
+        endColumn: endColumn,
+        executionContextId: DebuggerScript._executionContextId(script.context_data),
+        // Note that we cannot derive aux data from context id because of compilation cache.
+        executionContextAuxData: DebuggerScript._executionContextAuxData(script.context_data)
+    };
+}
+
+/**
+ * @param {!ExecutionState} execState
+ * @param {!BreakpointInfo} info
+ * @return {string|undefined}
+ */
+DebuggerScript.setBreakpoint = function(execState, info)
+{
+    var breakId = Debug.setScriptBreakPointById(info.sourceID, info.lineNumber, info.columnNumber, info.condition, undefined, Debug.BreakPositionAlignment.Statement);
+    var locations = Debug.findBreakPointActualLocations(breakId);
+    if (!locations.length)
+        return undefined;
+    info.lineNumber = locations[0].line;
+    info.columnNumber = locations[0].column;
+    return breakId.toString();
+}
+
+/**
+ * @param {!ExecutionState} execState
+ * @param {!{breakpointId: number}} info
+ */
+DebuggerScript.removeBreakpoint = function(execState, info)
+{
+    Debug.findBreakPoint(info.breakpointId, true);
+}
+
+/**
+ * @return {number}
+ */
+DebuggerScript.pauseOnExceptionsState = function()
+{
+    return DebuggerScript._pauseOnExceptionsState;
+}
+
+/**
+ * @param {number} newState
+ */
+DebuggerScript.setPauseOnExceptionsState = function(newState)
+{
+    DebuggerScript._pauseOnExceptionsState = newState;
+
+    if (DebuggerScript.PauseOnExceptionsState.PauseOnAllExceptions === newState)
+        Debug.setBreakOnException();
+    else
+        Debug.clearBreakOnException();
+
+    if (DebuggerScript.PauseOnExceptionsState.PauseOnUncaughtExceptions === newState)
+        Debug.setBreakOnUncaughtException();
+    else
+        Debug.clearBreakOnUncaughtException();
+}
+
+/**
+ * @param {!ExecutionState} execState
+ * @param {number} limit
+ * @return {!Array<!JavaScriptCallFrame>}
+ */
+DebuggerScript.currentCallFrames = function(execState, limit)
+{
+    var frames = [];
+    for (var i = 0; i < execState.frameCount() && (!limit || i < limit); ++i)
+        frames.push(DebuggerScript._frameMirrorToJSCallFrame(execState.frame(i)));
+    return frames;
+}
+
+/**
+ * @param {!ExecutionState} execState
+ */
+DebuggerScript.stepIntoStatement = function(execState)
+{
+    execState.prepareStep(Debug.StepAction.StepIn);
+}
+
+/**
+ * @param {!ExecutionState} execState
+ */
+DebuggerScript.stepFrameStatement = function(execState)
+{
+    execState.prepareStep(Debug.StepAction.StepFrame);
+}
+
+/**
+ * @param {!ExecutionState} execState
+ */
+DebuggerScript.stepOverStatement = function(execState)
+{
+    execState.prepareStep(Debug.StepAction.StepNext);
+}
+
+/**
+ * @param {!ExecutionState} execState
+ */
+DebuggerScript.stepOutOfFunction = function(execState)
+{
+    execState.prepareStep(Debug.StepAction.StepOut);
+}
+
+DebuggerScript.clearStepping = function()
+{
+    Debug.clearStepping();
+}
+
+// Returns array in form:
+//      [ 0, <v8_result_report> ] in case of success
+//   or [ 1, <general_error_message>, <compiler_message>, <line_number>, <column_number> ] in case of compile error, numbers are 1-based.
+// or throws exception with message.
+/**
+ * @param {number} scriptId
+ * @param {string} newSource
+ * @param {boolean} preview
+ * @return {!Array<*>}
+ */
+DebuggerScript.liveEditScriptSource = function(scriptId, newSource, preview)
+{
+    var scripts = Debug.scripts();
+    var scriptToEdit = null;
+    for (var i = 0; i < scripts.length; i++) {
+        if (scripts[i].id == scriptId) {
+            scriptToEdit = scripts[i];
+            break;
+        }
+    }
+    if (!scriptToEdit)
+        throw("Script not found");
+
+    var changeLog = [];
+    try {
+        var result = Debug.LiveEdit.SetScriptSource(scriptToEdit, newSource, preview, changeLog);
+        return [0, result.stack_modified];
+    } catch (e) {
+        if (e instanceof Debug.LiveEdit.Failure && "details" in e) {
+            var details = /** @type {!LiveEditErrorDetails} */(e.details);
+            if (details.type === "liveedit_compile_error") {
+                var startPosition = details.position.start;
+                return [1, String(e), String(details.syntaxErrorMessage), Number(startPosition.line), Number(startPosition.column)];
+            }
+        }
+        throw e;
+    }
+}
+
+/**
+ * @param {!ExecutionState} execState
+ */
+DebuggerScript.clearBreakpoints = function(execState)
+{
+    Debug.clearAllBreakPoints();
+}
+
+/**
+ * @param {!ExecutionState} execState
+ * @param {!{enabled: boolean}} info
+ */
+DebuggerScript.setBreakpointsActivated = function(execState, info)
+{
+    Debug.debuggerFlags().breakPointsActive.setValue(info.enabled);
+}
+
+/**
+ * @param {!BreakEvent} eventData
+ */
+DebuggerScript.getBreakpointNumbers = function(eventData)
+{
+    var breakpoints = eventData.breakPointsHit();
+    var numbers = [];
+    if (!breakpoints)
+        return numbers;
+
+    for (var i = 0; i < breakpoints.length; i++) {
+        var breakpoint = breakpoints[i];
+        var scriptBreakPoint = breakpoint.script_break_point();
+        numbers.push(scriptBreakPoint ? scriptBreakPoint.number() : breakpoint.number());
+    }
+    return numbers;
+}
+
+// NOTE: This function is performance critical, as it can be run on every
+// statement that generates an async event (like addEventListener) to support
+// asynchronous call stacks. Thus, when possible, initialize the data lazily.
+/**
+ * @param {!FrameMirror} frameMirror
+ * @return {!JavaScriptCallFrame}
+ */
+DebuggerScript._frameMirrorToJSCallFrame = function(frameMirror)
+{
+    // Stuff that can not be initialized lazily (i.e. valid while paused with a valid break_id).
+    // The frameMirror and scopeMirror can be accessed only while paused on the debugger.
+    var frameDetails = frameMirror.details();
+
+    var funcObject = frameDetails.func();
+    var sourcePosition = frameDetails.sourcePosition();
+    var thisObject = frameDetails.receiver();
+
+    var isAtReturn = !!frameDetails.isAtReturn();
+    var returnValue = isAtReturn ? frameDetails.returnValue() : undefined;
+
+    var scopeMirrors = frameMirror.allScopes(false);
+    /** @type {!Array<number>} */
+    var scopeTypes = new Array(scopeMirrors.length);
+    /** @type {?Array<!Object>} */
+    var scopeObjects = new Array(scopeMirrors.length);
+    /** @type {!Array<string|undefined>} */
+    var scopeNames = new Array(scopeMirrors.length);
+    /** @type {?Array<number>} */
+    var scopeStartPositions = new Array(scopeMirrors.length);
+    /** @type {?Array<number>} */
+    var scopeEndPositions = new Array(scopeMirrors.length);
+    /** @type {?Array<function()|null>} */
+    var scopeFunctions = new Array(scopeMirrors.length);
+    for (var i = 0; i < scopeMirrors.length; ++i) {
+        var scopeDetails = scopeMirrors[i].details();
+        scopeTypes[i] = scopeDetails.type();
+        scopeObjects[i] = scopeDetails.object();
+        scopeNames[i] = scopeDetails.name();
+        scopeStartPositions[i] = scopeDetails.startPosition ? scopeDetails.startPosition() : 0;
+        scopeEndPositions[i] = scopeDetails.endPosition ? scopeDetails.endPosition() : 0;
+        scopeFunctions[i] = scopeDetails.func ? scopeDetails.func() : null;
+    }
+
+    // Calculated lazily.
+    var scopeChain;
+    var funcMirror;
+    var location;
+    /** @type {!Array<?RawLocation>} */
+    var scopeStartLocations;
+    /** @type {!Array<?RawLocation>} */
+    var scopeEndLocations;
+    var details;
+
+    /**
+     * @param {!ScriptMirror|undefined} script
+     * @param {number} pos
+     * @return {?RawLocation}
+     */
+    function createLocation(script, pos)
+    {
+        if (!script)
+            return null;
+
+        var location = script.locationFromPosition(pos, true);
+        return {
+            "lineNumber": location.line,
+            "columnNumber": location.column,
+            "scriptId": String(script.id())
+        }
+    }
+
+    /**
+     * @return {!Array<!Object>}
+     */
+    function ensureScopeChain()
+    {
+        if (!scopeChain) {
+            scopeChain = [];
+            scopeStartLocations = [];
+            scopeEndLocations = [];
+            for (var i = 0, j = 0; i < scopeObjects.length; ++i) {
+                var scopeObject = DebuggerScript._buildScopeObject(scopeTypes[i], scopeObjects[i]);
+                if (scopeObject) {
+                    scopeTypes[j] = scopeTypes[i];
+                    scopeNames[j] = scopeNames[i];
+                    scopeChain[j] = scopeObject;
+
+                    var funcMirror = scopeFunctions ? MakeMirror(scopeFunctions[i]) : null;
+                    if (!funcMirror || !funcMirror.isFunction())
+                        funcMirror = new UnresolvedFunctionMirror(funcObject);
+
+                    var script = /** @type {!FunctionMirror} */(funcMirror).script();
+                    scopeStartLocations[j] = createLocation(script, scopeStartPositions[i]);
+                    scopeEndLocations[j] = createLocation(script, scopeEndPositions[i]);
+                    ++j;
+                }
+            }
+            scopeTypes.length = scopeChain.length;
+            scopeNames.length = scopeChain.length;
+            scopeObjects = null; // Free for GC.
+            scopeFunctions = null;
+            scopeStartPositions = null;
+            scopeEndPositions = null;
+        }
+        return scopeChain;
+    }
+
+    /**
+     * @return {!JavaScriptCallFrameDetails}
+     */
+    function lazyDetails()
+    {
+        if (!details) {
+            var scopeObjects = ensureScopeChain();
+            var script = ensureFuncMirror().script();
+            /** @type {!Array<Scope>} */
+            var scopes = [];
+            for (var i = 0; i < scopeObjects.length; ++i) {
+                var scope = {
+                    "type": /** @type {string} */(DebuggerScript._scopeTypeNames.get(scopeTypes[i])),
+                    "object": scopeObjects[i],
+                };
+                if (scopeNames[i])
+                    scope.name = scopeNames[i];
+                if (scopeStartLocations[i])
+                    scope.startLocation = /** @type {!RawLocation} */(scopeStartLocations[i]);
+                if (scopeEndLocations[i])
+                    scope.endLocation = /** @type {!RawLocation} */(scopeEndLocations[i]);
+                scopes.push(scope);
+            }
+            details = {
+                "functionName": ensureFuncMirror().debugName(),
+                "location": {
+                    "lineNumber": line(),
+                    "columnNumber": column(),
+                    "scriptId": String(script.id())
+                },
+                "this": thisObject,
+                "scopeChain": scopes
+            };
+            var functionLocation = ensureFuncMirror().sourceLocation();
+            if (functionLocation) {
+                details.functionLocation = {
+                    "lineNumber": functionLocation.line,
+                    "columnNumber": functionLocation.column,
+                    "scriptId": String(script.id())
+                };
+            }
+            if (isAtReturn)
+                details.returnValue = returnValue;
+        }
+        return details;
+    }
+
+    /**
+     * @return {!FunctionMirror}
+     */
+    function ensureFuncMirror()
+    {
+        if (!funcMirror) {
+            funcMirror = MakeMirror(funcObject);
+            if (!funcMirror.isFunction())
+                funcMirror = new UnresolvedFunctionMirror(funcObject);
+        }
+        return /** @type {!FunctionMirror} */(funcMirror);
+    }
+
+    /**
+     * @return {!{line: number, column: number}}
+     */
+    function ensureLocation()
+    {
+        if (!location) {
+            var script = ensureFuncMirror().script();
+            if (script)
+                location = script.locationFromPosition(sourcePosition, true);
+            if (!location)
+                location = { line: 0, column: 0 };
+        }
+        return location;
+    }
+
+    /**
+     * @return {number}
+     */
+    function line()
+    {
+        return ensureLocation().line;
+    }
+
+    /**
+     * @return {number}
+     */
+    function column()
+    {
+        return ensureLocation().column;
+    }
+
+    /**
+     * @return {number}
+     */
+    function contextId()
+    {
+        var mirror = ensureFuncMirror();
+        // Old V8 do not have context() function on these objects
+        if (!mirror.context)
+            return DebuggerScript._executionContextId(mirror.script().value().context_data);
+        var context = mirror.context();
+        if (context)
+            return DebuggerScript._executionContextId(context.data());
+        return 0;
+    }
+
+    /**
+     * @return {number|undefined}
+     */
+    function sourceID()
+    {
+        var script = ensureFuncMirror().script();
+        return script && script.id();
+    }
+
+    /**
+     * @param {string} expression
+     * @return {*}
+     */
+    function evaluate(expression)
+    {
+        return frameMirror.evaluate(expression, false).value();
+    }
+
+    /** @return {undefined} */
+    function restart()
+    {
+        return frameMirror.restart();
+    }
+
+    /**
+     * @param {number} scopeNumber
+     * @param {string} variableName
+     * @param {*} newValue
+     */
+    function setVariableValue(scopeNumber, variableName, newValue)
+    {
+        var scopeMirror = frameMirror.scope(scopeNumber);
+        if (!scopeMirror)
+            throw new Error("Incorrect scope index");
+        scopeMirror.setVariableValue(variableName, newValue);
+    }
+
+    return {
+        "sourceID": sourceID,
+        "line": line,
+        "column": column,
+        "contextId": contextId,
+        "thisObject": thisObject,
+        "evaluate": evaluate,
+        "restart": restart,
+        "setVariableValue": setVariableValue,
+        "isAtReturn": isAtReturn,
+        "details": lazyDetails
+    };
+}
+
+/**
+ * @param {number} scopeType
+ * @param {!Object} scopeObject
+ * @return {!Object|undefined}
+ */
+DebuggerScript._buildScopeObject = function(scopeType, scopeObject)
+{
+    var result;
+    switch (scopeType) {
+    case ScopeType.Local:
+    case ScopeType.Closure:
+    case ScopeType.Catch:
+    case ScopeType.Block:
+    case ScopeType.Script:
+        // For transient objects we create a "persistent" copy that contains
+        // the same properties.
+        // Reset scope object prototype to null so that the proto properties
+        // don't appear in the local scope section.
+        var properties = /** @type {!ObjectMirror} */(MakeMirror(scopeObject, true /* transient */)).properties();
+        // Almost always Script scope will be empty, so just filter out that noise.
+        // Also drop empty Block scopes, should we get any.
+        if (!properties.length && (scopeType === ScopeType.Script || scopeType === ScopeType.Block))
+            break;
+        result = { __proto__: null };
+        for (var j = 0; j < properties.length; j++) {
+            var name = properties[j].name();
+            if (name.length === 0 || name.charAt(0) === ".")
+                continue; // Skip internal variables like ".arguments" and variables with empty name
+            result[name] = properties[j].value_;
+        }
+        break;
+    case ScopeType.Global:
+    case ScopeType.With:
+        result = scopeObject;
+        break;
+    }
+    return result;
+}
+
+// We never resolve Mirror by its handle so to avoid memory leaks caused by Mirrors in the cache we disable it.
+ToggleMirrorCache(false);
+
+return DebuggerScript;
+})();
diff --git a/src/inspector/debugger_script_externs.js b/src/inspector/debugger_script_externs.js
new file mode 100644
index 0000000..c7df61f
--- /dev/null
+++ b/src/inspector/debugger_script_externs.js
@@ -0,0 +1,522 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/** @typedef {{
+        type: string,
+        object: !Object,
+        name: (string|undefined),
+        startLocation: (!RawLocation|undefined),
+        endLocation: (!RawLocation|undefined)
+    }} */
+var Scope;
+
+/** @typedef {{
+        scriptId: string,
+        lineNumber: number,
+        columnNumber: number
+    }} */
+var RawLocation;
+
+/** @typedef {{
+        id: number,
+        name: string,
+        sourceURL: (string|undefined),
+        sourceMappingURL: (string|undefined),
+        source: string,
+        startLine: number,
+        endLine: number,
+        startColumn: number,
+        endColumn: number,
+        executionContextId: number,
+        executionContextAuxData: string
+    }} */
+var FormattedScript;
+
+/** @typedef {{
+        functionName: string,
+        location: !RawLocation,
+        this: !Object,
+        scopeChain: !Array<!Scope>,
+        functionLocation: (RawLocation|undefined),
+        returnValue: (*|undefined)
+    }} */
+var JavaScriptCallFrameDetails;
+
+/** @typedef {{
+        sourceID: function():(number|undefined),
+        line: function():number,
+        column: function():number,
+        thisObject: !Object,
+        evaluate: function(string):*,
+        restart: function():undefined,
+        setVariableValue: function(number, string, *):undefined,
+        isAtReturn: boolean,
+        details: function():!JavaScriptCallFrameDetails
+    }} */
+var JavaScriptCallFrame;
+
+/**
+ * @const
+ */
+var Debug = {};
+
+Debug.setBreakOnException = function() {}
+
+Debug.clearBreakOnException = function() {}
+
+Debug.setBreakOnUncaughtException = function() {}
+
+/**
+ * @return {undefined}
+ */
+Debug.clearBreakOnUncaughtException = function() {}
+
+Debug.clearStepping = function() {}
+
+Debug.clearAllBreakPoints = function() {}
+
+/** @return {!Array<!Script>} */
+Debug.scripts = function() {}
+
+/**
+ * @param {number} scriptId
+ * @param {number=} line
+ * @param {number=} column
+ * @param {string=} condition
+ * @param {string=} groupId
+ * @param {Debug.BreakPositionAlignment=} positionAlignment
+ */
+Debug.setScriptBreakPointById = function(scriptId, line, column, condition, groupId, positionAlignment) {}
+
+/**
+ * @param {number} breakId
+ * @return {!Array<!SourceLocation>}
+ */
+Debug.findBreakPointActualLocations = function(breakId) {}
+
+/**
+ * @param {number} breakId
+ * @param {boolean} remove
+ * @return {!BreakPoint|undefined}
+ */
+Debug.findBreakPoint = function(breakId, remove) {}
+
+/** @return {!DebuggerFlags} */
+Debug.debuggerFlags = function() {}
+
+
+/** @enum */
+const BreakPositionAlignment = {
+    Statement: 0,
+    BreakPosition: 1
+};
+Debug.BreakPositionAlignment = BreakPositionAlignment;
+
+/** @enum */
+Debug.StepAction = { StepOut: 0,
+                     StepNext: 1,
+                     StepIn: 2,
+                     StepFrame: 3 };
+
+/** @enum */
+const ScriptCompilationType = { Host: 0,
+                              Eval: 1,
+                              JSON: 2 };
+Debug.ScriptCompilationType = ScriptCompilationType;
+
+
+/** @interface */
+function DebuggerFlag() {}
+
+/** @param {boolean} value */
+DebuggerFlag.prototype.setValue = function(value) {}
+
+
+/** @typedef {{
+ *    breakPointsActive: !DebuggerFlag
+ *  }}
+ */
+var DebuggerFlags;
+
+/** @const */
+var LiveEdit = {}
+
+/**
+ * @param {!Script} script
+ * @param {string} newSource
+ * @param {boolean} previewOnly
+ * @return {!{stack_modified: (boolean|undefined)}}
+ */
+LiveEdit.SetScriptSource = function(script, newSource, previewOnly, change_log) {}
+
+/** @constructor */
+function Failure() {}
+LiveEdit.Failure = Failure;
+
+Debug.LiveEdit = LiveEdit;
+
+/** @typedef {{
+ *    type: string,
+ *    syntaxErrorMessage: string,
+ *    position: !{start: !{line: number, column: number}},
+ *  }}
+ */
+var LiveEditErrorDetails;
+
+/** @typedef {{
+ *    breakpointId: number,
+ *    sourceID: number,
+ *    lineNumber: (number|undefined),
+ *    columnNumber: (number|undefined),
+ *    condition: (string|undefined),
+ *    interstatementLocation: (boolean|undefined),
+ *    }}
+ */
+var BreakpointInfo;
+
+
+/** @interface */
+function BreakPoint() {}
+
+/** @return {!BreakPoint|undefined} */
+BreakPoint.prototype.script_break_point = function() {}
+
+/** @return {number} */
+BreakPoint.prototype.number = function() {}
+
+
+/** @interface */
+function CompileEvent() {}
+
+/** @return {!ScriptMirror} */
+CompileEvent.prototype.script = function() {}
+
+
+/** @interface */
+function BreakEvent() {}
+
+/** @return {!Array<!BreakPoint>|undefined} */
+BreakEvent.prototype.breakPointsHit = function() {}
+
+
+/** @interface */
+function ExecutionState() {}
+
+/** @param {!Debug.StepAction} action */
+ExecutionState.prototype.prepareStep = function(action) {}
+
+/**
+ * @param {string} source
+ * @param {boolean} disableBreak
+ * @param {*=} additionalContext
+ */
+ExecutionState.prototype.evaluateGlobal = function(source, disableBreak, additionalContext) {}
+
+/** @return {number} */
+ExecutionState.prototype.frameCount = function() {}
+
+/**
+ * @param {number} index
+ * @return {!FrameMirror}
+ */
+ExecutionState.prototype.frame = function(index) {}
+
+/** @param {number} index */
+ExecutionState.prototype.setSelectedFrame = function(index) {}
+
+/** @return {number} */
+ExecutionState.prototype.selectedFrame = function() {}
+
+
+/** @enum */
+var ScopeType = { Global: 0,
+                  Local: 1,
+                  With: 2,
+                  Closure: 3,
+                  Catch: 4,
+                  Block: 5,
+                  Script: 6 };
+
+
+/** @typedef {{
+ *    script: number,
+ *    position: number,
+ *    line: number,
+ *    column:number,
+ *    start: number,
+ *    end: number,
+ *    }}
+ */
+var SourceLocation;
+
+/** @typedef{{
+ *    id: number,
+ *    context_data: (string|undefined),
+ *    source_url: (string|undefined),
+ *    source_mapping_url: (string|undefined),
+ *    is_debugger_script: boolean,
+ *    source: string,
+ *    line_ends: !Array<number>,
+ *    line_offset: number,
+ *    column_offset: number,
+ *    nameOrSourceURL: function():string,
+ *    compilationType: function():!ScriptCompilationType,
+ *    }}
+ */
+var Script;
+
+/** @interface */
+function ScopeDetails() {}
+
+/** @return {!Object} */
+ScopeDetails.prototype.object = function() {}
+
+/** @return {string|undefined} */
+ScopeDetails.prototype.name = function() {}
+
+/** @return {number} */
+ScopeDetails.prototype.type = function() {}
+
+
+/** @interface */
+function FrameDetails() {}
+
+/** @return {!Object} */
+FrameDetails.prototype.receiver = function() {}
+
+/** @return {function()} */
+FrameDetails.prototype.func = function() {}
+
+/** @return {boolean} */
+FrameDetails.prototype.isAtReturn = function() {}
+
+/** @return {number} */
+FrameDetails.prototype.sourcePosition = function() {}
+
+/** @return {*} */
+FrameDetails.prototype.returnValue = function() {}
+
+/** @return {number} */
+FrameDetails.prototype.scopeCount = function() {}
+
+
+/** @param {boolean} value */
+function ToggleMirrorCache(value) {}
+
+/**
+ * @param {*} value
+ * @param {boolean=} transient
+ * @return {!Mirror}
+ */
+function MakeMirror(value, transient) {}
+
+
+/** @interface */
+function Mirror() {}
+
+/** @return {boolean} */
+Mirror.prototype.isFunction = function() {}
+
+/** @return {boolean} */
+Mirror.prototype.isGenerator = function() {}
+
+/** @return {boolean} */
+Mirror.prototype.isMap = function() {}
+
+/** @return {boolean} */
+Mirror.prototype.isSet = function() {}
+
+/** @return {boolean} */
+Mirror.prototype.isIterator = function() {}
+
+
+/**
+ * @interface
+ * @extends {Mirror}
+ */
+function ObjectMirror() {}
+
+/** @return {!Array<!PropertyMirror>} */
+ObjectMirror.prototype.properties = function() {}
+
+
+/**
+ * @interface
+ * @extends {ObjectMirror}
+ */
+function FunctionMirror () {}
+
+/** @return {number} */
+FunctionMirror.prototype.scopeCount = function() {}
+
+/**
+ * @param {number} index
+ * @return {!ScopeMirror|undefined}
+ */
+FunctionMirror.prototype.scope = function(index) {}
+
+/** @return {boolean} */
+FunctionMirror.prototype.resolved = function() {}
+
+/** @return {function()} */
+FunctionMirror.prototype.value = function() {}
+
+/** @return {string} */
+FunctionMirror.prototype.debugName = function() {}
+
+/** @return {!ScriptMirror|undefined} */
+FunctionMirror.prototype.script = function() {}
+
+/** @return {!SourceLocation|undefined} */
+FunctionMirror.prototype.sourceLocation = function() {}
+
+/** @return {!ContextMirror|undefined} */
+FunctionMirror.prototype.context = function() {}
+
+/**
+ * @constructor
+ * @param {*} value
+ */
+function UnresolvedFunctionMirror(value) {}
+
+
+/**
+ * @interface
+ * @extends {ObjectMirror}
+ */
+function MapMirror () {}
+
+/**
+ * @param {number=} limit
+ * @return {!Array<!{key: *, value: *}>}
+ */
+MapMirror.prototype.entries = function(limit) {}
+
+
+/**
+ * @interface
+ * @extends {ObjectMirror}
+ */
+function SetMirror () {}
+
+/**
+ * @param {number=} limit
+ * @return {!Array<*>}
+ */
+SetMirror.prototype.values = function(limit) {}
+
+
+/**
+ * @interface
+ * @extends {ObjectMirror}
+ */
+function IteratorMirror () {}
+
+/**
+ * @param {number=} limit
+ * @return {!Array<*>}
+ */
+IteratorMirror.prototype.preview = function(limit) {}
+
+
+/**
+ * @interface
+ * @extends {ObjectMirror}
+ */
+function GeneratorMirror () {}
+
+/** @return {string} */
+GeneratorMirror.prototype.status = function() {}
+
+/** @return {!SourceLocation|undefined} */
+GeneratorMirror.prototype.sourceLocation = function() {}
+
+/** @return {!FunctionMirror} */
+GeneratorMirror.prototype.func = function() {}
+
+
+/**
+ * @interface
+ * @extends {Mirror}
+ */
+function PropertyMirror() {}
+
+/** @return {!Mirror} */
+PropertyMirror.prototype.value = function() {}
+
+/** @return {string} */
+PropertyMirror.prototype.name = function() {}
+
+/** @type {*} */
+PropertyMirror.prototype.value_;
+
+/**
+ * @interface
+ * @extends {Mirror}
+ */
+function FrameMirror() {}
+
+/**
+ * @param {boolean=} ignoreNestedScopes
+ * @return {!Array<!ScopeMirror>}
+ */
+FrameMirror.prototype.allScopes = function(ignoreNestedScopes) {}
+
+/** @return {!FrameDetails} */
+FrameMirror.prototype.details = function() {}
+
+/**
+ * @param {string} source
+ * @param {boolean} disableBreak
+ */
+FrameMirror.prototype.evaluate = function(source, disableBreak) {}
+
+FrameMirror.prototype.restart = function() {}
+
+/** @param {number} index */
+FrameMirror.prototype.scope = function(index) {}
+
+
+/**
+ * @interface
+ * @extends {Mirror}
+ */
+function ScriptMirror() {}
+
+/** @return {!Script} */
+ScriptMirror.prototype.value = function() {}
+
+/** @return {number} */
+ScriptMirror.prototype.id = function() {}
+
+/**
+ * @param {number} position
+ * @param {boolean=} includeResourceOffset
+ */
+ScriptMirror.prototype.locationFromPosition = function(position, includeResourceOffset) {}
+
+
+/**
+ * @interface
+ * @extends {Mirror}
+ */
+function ScopeMirror() {}
+
+/** @return {!ScopeDetails} */
+ScopeMirror.prototype.details = function() {}
+
+/**
+ * @param {string} name
+ * @param {*} newValue
+ */
+ScopeMirror.prototype.setVariableValue = function(name, newValue) {}
+
+/**
+ * @interface
+ * @extends {Mirror}
+ */
+function ContextMirror() {}
+
+/** @return {string|undefined} */
+ContextMirror.prototype.data = function() {}
diff --git a/src/inspector/injected-script-native.cc b/src/inspector/injected-script-native.cc
new file mode 100644
index 0000000..fcf2ead
--- /dev/null
+++ b/src/inspector/injected-script-native.cc
@@ -0,0 +1,89 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/injected-script-native.h"
+
+namespace v8_inspector {
+
+InjectedScriptNative::InjectedScriptNative(v8::Isolate* isolate)
+    : m_lastBoundObjectId(1), m_isolate(isolate) {}
+
+static const char privateKeyName[] = "v8-inspector#injectedScript";
+
+InjectedScriptNative::~InjectedScriptNative() {}
+
+void InjectedScriptNative::setOnInjectedScriptHost(
+    v8::Local<v8::Object> injectedScriptHost) {
+  v8::HandleScope handleScope(m_isolate);
+  v8::Local<v8::External> external = v8::External::New(m_isolate, this);
+  v8::Local<v8::Private> privateKey = v8::Private::ForApi(
+      m_isolate, v8::String::NewFromUtf8(m_isolate, privateKeyName,
+                                         v8::NewStringType::kInternalized)
+                     .ToLocalChecked());
+  injectedScriptHost->SetPrivate(m_isolate->GetCurrentContext(), privateKey,
+                                 external);
+}
+
+InjectedScriptNative* InjectedScriptNative::fromInjectedScriptHost(
+    v8::Isolate* isolate, v8::Local<v8::Object> injectedScriptObject) {
+  v8::HandleScope handleScope(isolate);
+  v8::Local<v8::Context> context = isolate->GetCurrentContext();
+  v8::Local<v8::Private> privateKey = v8::Private::ForApi(
+      isolate, v8::String::NewFromUtf8(isolate, privateKeyName,
+                                       v8::NewStringType::kInternalized)
+                   .ToLocalChecked());
+  v8::Local<v8::Value> value =
+      injectedScriptObject->GetPrivate(context, privateKey).ToLocalChecked();
+  DCHECK(value->IsExternal());
+  v8::Local<v8::External> external = value.As<v8::External>();
+  return static_cast<InjectedScriptNative*>(external->Value());
+}
+
+int InjectedScriptNative::bind(v8::Local<v8::Value> value,
+                               const String16& groupName) {
+  if (m_lastBoundObjectId <= 0) m_lastBoundObjectId = 1;
+  int id = m_lastBoundObjectId++;
+  m_idToWrappedObject[id] =
+      wrapUnique(new v8::Global<v8::Value>(m_isolate, value));
+  addObjectToGroup(id, groupName);
+  return id;
+}
+
+void InjectedScriptNative::unbind(int id) {
+  m_idToWrappedObject.erase(id);
+  m_idToObjectGroupName.erase(id);
+}
+
+v8::Local<v8::Value> InjectedScriptNative::objectForId(int id) {
+  auto iter = m_idToWrappedObject.find(id);
+  return iter != m_idToWrappedObject.end() ? iter->second->Get(m_isolate)
+                                           : v8::Local<v8::Value>();
+}
+
+void InjectedScriptNative::addObjectToGroup(int objectId,
+                                            const String16& groupName) {
+  if (groupName.isEmpty()) return;
+  if (objectId <= 0) return;
+  m_idToObjectGroupName[objectId] = groupName;
+  m_nameToObjectGroup[groupName].push_back(
+      objectId);  // Creates an empty vector if key is not there
+}
+
+void InjectedScriptNative::releaseObjectGroup(const String16& groupName) {
+  if (groupName.isEmpty()) return;
+  NameToObjectGroup::iterator groupIt = m_nameToObjectGroup.find(groupName);
+  if (groupIt == m_nameToObjectGroup.end()) return;
+  for (int id : groupIt->second) unbind(id);
+  m_nameToObjectGroup.erase(groupIt);
+}
+
+String16 InjectedScriptNative::groupName(int objectId) const {
+  if (objectId <= 0) return String16();
+  IdToObjectGroupName::const_iterator iterator =
+      m_idToObjectGroupName.find(objectId);
+  return iterator != m_idToObjectGroupName.end() ? iterator->second
+                                                 : String16();
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/injected-script-native.h b/src/inspector/injected-script-native.h
new file mode 100644
index 0000000..3bdf247
--- /dev/null
+++ b/src/inspector/injected-script-native.h
@@ -0,0 +1,47 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_INJECTEDSCRIPTNATIVE_H_
+#define V8_INSPECTOR_INJECTEDSCRIPTNATIVE_H_
+
+#include <vector>
+
+#include "src/inspector/protocol/Protocol.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class InjectedScriptNative final {
+ public:
+  explicit InjectedScriptNative(v8::Isolate*);
+  ~InjectedScriptNative();
+
+  void setOnInjectedScriptHost(v8::Local<v8::Object>);
+  static InjectedScriptNative* fromInjectedScriptHost(v8::Isolate* isolate,
+                                                      v8::Local<v8::Object>);
+
+  int bind(v8::Local<v8::Value>, const String16& groupName);
+  void unbind(int id);
+  v8::Local<v8::Value> objectForId(int id);
+
+  void releaseObjectGroup(const String16& groupName);
+  String16 groupName(int objectId) const;
+
+ private:
+  void addObjectToGroup(int objectId, const String16& groupName);
+
+  int m_lastBoundObjectId;
+  v8::Isolate* m_isolate;
+  protocol::HashMap<int, std::unique_ptr<v8::Global<v8::Value>>>
+      m_idToWrappedObject;
+  typedef protocol::HashMap<int, String16> IdToObjectGroupName;
+  IdToObjectGroupName m_idToObjectGroupName;
+  typedef protocol::HashMap<String16, std::vector<int>> NameToObjectGroup;
+  NameToObjectGroup m_nameToObjectGroup;
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_INJECTEDSCRIPTNATIVE_H_
diff --git a/src/inspector/injected-script-source.js b/src/inspector/injected-script-source.js
new file mode 100644
index 0000000..39c6c9c
--- /dev/null
+++ b/src/inspector/injected-script-source.js
@@ -0,0 +1,1076 @@
+/*
+ * Copyright (C) 2007 Apple Inc.  All rights reserved.
+ * Copyright (C) 2013 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1.  Redistributions of source code must retain the above copyright
+ *     notice, this list of conditions and the following disclaimer.
+ * 2.  Redistributions in binary form must reproduce the above copyright
+ *     notice, this list of conditions and the following disclaimer in the
+ *     documentation and/or other materials provided with the distribution.
+ * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ *     its contributors may be used to endorse or promote products derived
+ *     from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+"use strict";
+
+/**
+ * @param {!InjectedScriptHostClass} InjectedScriptHost
+ * @param {!Window|!WorkerGlobalScope} inspectedGlobalObject
+ * @param {number} injectedScriptId
+ * @suppress {uselessCode}
+ */
+(function (InjectedScriptHost, inspectedGlobalObject, injectedScriptId) {
+
+/**
+ * Protect against Object overwritten by the user code.
+ * @suppress {duplicate}
+ */
+var Object = /** @type {function(new:Object, *=)} */ ({}.constructor);
+
+/**
+ * @param {!Array.<T>} array
+ * @param {...} var_args
+ * @template T
+ */
+function push(array, var_args)
+{
+    for (var i = 1; i < arguments.length; ++i)
+        array[array.length] = arguments[i];
+}
+
+/**
+ * @param {*} obj
+ * @return {string}
+ * @suppress {uselessCode}
+ */
+function toString(obj)
+{
+    // We don't use String(obj) because String could be overridden.
+    // Also the ("" + obj) expression may throw.
+    try {
+        return "" + obj;
+    } catch (e) {
+        var name = InjectedScriptHost.internalConstructorName(obj) || InjectedScriptHost.subtype(obj) || (typeof obj);
+        return "#<" + name + ">";
+    }
+}
+
+/**
+ * @param {*} obj
+ * @return {string}
+ */
+function toStringDescription(obj)
+{
+    if (typeof obj === "number" && obj === 0 && 1 / obj < 0)
+        return "-0"; // Negative zero.
+    return toString(obj);
+}
+
+/**
+ * @param {T} obj
+ * @return {T}
+ * @template T
+ */
+function nullifyObjectProto(obj)
+{
+    if (obj && typeof obj === "object")
+        obj.__proto__ = null;
+    return obj;
+}
+
+/**
+ * @param {number|string} obj
+ * @return {boolean}
+ */
+function isUInt32(obj)
+{
+    if (typeof obj === "number")
+        return obj >>> 0 === obj && (obj > 0 || 1 / obj > 0);
+    return "" + (obj >>> 0) === obj;
+}
+
+/**
+ * FireBug's array detection.
+ * @param {*} obj
+ * @return {boolean}
+ */
+function isArrayLike(obj)
+{
+    if (typeof obj !== "object")
+        return false;
+    try {
+        if (typeof obj.splice === "function") {
+            if (!InjectedScriptHost.objectHasOwnProperty(/** @type {!Object} */ (obj), "length"))
+                return false;
+            var len = obj.length;
+            return typeof len === "number" && isUInt32(len);
+        }
+    } catch (e) {
+    }
+    return false;
+}
+
+/**
+ * @param {number} a
+ * @param {number} b
+ * @return {number}
+ */
+function max(a, b)
+{
+    return a > b ? a : b;
+}
+
+/**
+ * FIXME: Remove once ES6 is supported natively by JS compiler.
+ * @param {*} obj
+ * @return {boolean}
+ */
+function isSymbol(obj)
+{
+    var type = typeof obj;
+    return (type === "symbol");
+}
+
+/**
+ * DOM Attributes which have observable side effect on getter, in the form of
+ *   {interfaceName1: {attributeName1: true,
+ *                     attributeName2: true,
+ *                     ...},
+ *    interfaceName2: {...},
+ *    ...}
+ * @type {!Object<string, !Object<string, boolean>>}
+ * @const
+ */
+var domAttributesWithObservableSideEffectOnGet = nullifyObjectProto({});
+domAttributesWithObservableSideEffectOnGet["Request"] = nullifyObjectProto({});
+domAttributesWithObservableSideEffectOnGet["Request"]["body"] = true;
+domAttributesWithObservableSideEffectOnGet["Response"] = nullifyObjectProto({});
+domAttributesWithObservableSideEffectOnGet["Response"]["body"] = true;
+
+/**
+ * @param {!Object} object
+ * @param {string} attribute
+ * @return {boolean}
+ */
+function doesAttributeHaveObservableSideEffectOnGet(object, attribute)
+{
+    for (var interfaceName in domAttributesWithObservableSideEffectOnGet) {
+        var interfaceFunction = inspectedGlobalObject[interfaceName];
+        // Call to instanceOf looks safe after typeof check.
+        var isInstance = typeof interfaceFunction === "function" && /* suppressBlacklist */ object instanceof interfaceFunction;
+        if (isInstance)
+            return attribute in domAttributesWithObservableSideEffectOnGet[interfaceName];
+    }
+    return false;
+}
+
+/**
+ * @constructor
+ */
+var InjectedScript = function()
+{
+}
+
+/**
+ * @type {!Object.<string, boolean>}
+ * @const
+ */
+InjectedScript.primitiveTypes = {
+    "undefined": true,
+    "boolean": true,
+    "number": true,
+    "string": true,
+    __proto__: null
+}
+
+/**
+ * @type {!Object<string, string>}
+ * @const
+ */
+InjectedScript.closureTypes = { __proto__: null };
+InjectedScript.closureTypes["local"] = "Local";
+InjectedScript.closureTypes["closure"] = "Closure";
+InjectedScript.closureTypes["catch"] = "Catch";
+InjectedScript.closureTypes["block"] = "Block";
+InjectedScript.closureTypes["script"] = "Script";
+InjectedScript.closureTypes["with"] = "With Block";
+InjectedScript.closureTypes["global"] = "Global";
+
+InjectedScript.prototype = {
+    /**
+     * @param {*} object
+     * @return {boolean}
+     */
+    isPrimitiveValue: function(object)
+    {
+        // FIXME(33716): typeof document.all is always 'undefined'.
+        return InjectedScript.primitiveTypes[typeof object] && !this._isHTMLAllCollection(object);
+    },
+
+    /**
+     * @param {*} object
+     * @return {boolean}
+     */
+    _shouldPassByValue: function(object)
+    {
+        return typeof object === "object" && InjectedScriptHost.subtype(object) === "internal#location";
+    },
+
+    /**
+     * @param {*} object
+     * @param {string} groupName
+     * @param {boolean} forceValueType
+     * @param {boolean} generatePreview
+     * @return {!RuntimeAgent.RemoteObject}
+     */
+    wrapObject: function(object, groupName, forceValueType, generatePreview)
+    {
+        return this._wrapObject(object, groupName, forceValueType, generatePreview);
+    },
+
+    /**
+     * @param {!Array<!Object>} array
+     * @param {string} property
+     * @param {string} groupName
+     * @param {boolean} forceValueType
+     * @param {boolean} generatePreview
+     */
+    wrapPropertyInArray: function(array, property, groupName, forceValueType, generatePreview)
+    {
+        for (var i = 0; i < array.length; ++i) {
+            if (typeof array[i] === "object" && property in array[i])
+                array[i][property] = this.wrapObject(array[i][property], groupName, forceValueType, generatePreview);
+        }
+    },
+
+    /**
+     * @param {!Array<*>} array
+     * @param {string} groupName
+     * @param {boolean} forceValueType
+     * @param {boolean} generatePreview
+     */
+    wrapObjectsInArray: function(array, groupName, forceValueType, generatePreview)
+    {
+        for (var i = 0; i < array.length; ++i)
+            array[i] = this.wrapObject(array[i], groupName, forceValueType, generatePreview);
+    },
+
+    /**
+     * @param {!Object} table
+     * @param {!Array.<string>|string|boolean} columns
+     * @return {!RuntimeAgent.RemoteObject}
+     */
+    wrapTable: function(table, columns)
+    {
+        var columnNames = null;
+        if (typeof columns === "string")
+            columns = [columns];
+        if (InjectedScriptHost.subtype(columns) === "array") {
+            columnNames = [];
+            for (var i = 0; i < columns.length; ++i)
+                columnNames[i] = toString(columns[i]);
+        }
+        return this._wrapObject(table, "console", false, true, columnNames, true);
+    },
+
+    /**
+     * This method cannot throw.
+     * @param {*} object
+     * @param {string=} objectGroupName
+     * @param {boolean=} forceValueType
+     * @param {boolean=} generatePreview
+     * @param {?Array.<string>=} columnNames
+     * @param {boolean=} isTable
+     * @param {boolean=} doNotBind
+     * @param {*=} customObjectConfig
+     * @return {!RuntimeAgent.RemoteObject}
+     * @suppress {checkTypes}
+     */
+    _wrapObject: function(object, objectGroupName, forceValueType, generatePreview, columnNames, isTable, doNotBind, customObjectConfig)
+    {
+        try {
+            return new InjectedScript.RemoteObject(object, objectGroupName, doNotBind, forceValueType, generatePreview, columnNames, isTable, undefined, customObjectConfig);
+        } catch (e) {
+            try {
+                var description = injectedScript._describe(e);
+            } catch (ex) {
+                var description = "<failed to convert exception to string>";
+            }
+            return new InjectedScript.RemoteObject(description);
+        }
+    },
+
+    /**
+     * @param {!Object|symbol} object
+     * @param {string=} objectGroupName
+     * @return {string}
+     */
+    _bind: function(object, objectGroupName)
+    {
+        var id = InjectedScriptHost.bind(object, objectGroupName || "");
+        return "{\"injectedScriptId\":" + injectedScriptId + ",\"id\":" + id + "}";
+    },
+
+    /**
+     * @param {!Object} object
+     * @param {string} objectGroupName
+     * @param {boolean} ownProperties
+     * @param {boolean} accessorPropertiesOnly
+     * @param {boolean} generatePreview
+     * @return {!Array<!RuntimeAgent.PropertyDescriptor>|boolean}
+     */
+    getProperties: function(object, objectGroupName, ownProperties, accessorPropertiesOnly, generatePreview)
+    {
+        var subtype = this._subtype(object);
+        if (subtype === "internal#scope") {
+            // Internally, scope contains object with scope variables and additional information like type,
+            // we use additional information for preview and would like to report variables as scope
+            // properties.
+            object = object.object;
+        }
+
+        var descriptors = [];
+        var iter = this._propertyDescriptors(object, ownProperties, accessorPropertiesOnly, undefined);
+        // Go over properties, wrap object values.
+        for (var descriptor of iter) {
+            if (subtype === "internal#scopeList" && descriptor.name === "length")
+                continue;
+            if ("get" in descriptor)
+                descriptor.get = this._wrapObject(descriptor.get, objectGroupName);
+            if ("set" in descriptor)
+                descriptor.set = this._wrapObject(descriptor.set, objectGroupName);
+            if ("value" in descriptor)
+                descriptor.value = this._wrapObject(descriptor.value, objectGroupName, false, generatePreview);
+            if (!("configurable" in descriptor))
+                descriptor.configurable = false;
+            if (!("enumerable" in descriptor))
+                descriptor.enumerable = false;
+            if ("symbol" in descriptor)
+                descriptor.symbol = this._wrapObject(descriptor.symbol, objectGroupName);
+            push(descriptors, descriptor);
+        }
+        return descriptors;
+    },
+
+    /**
+     * @param {!Object} object
+     * @return {?Object}
+     */
+    _objectPrototype: function(object)
+    {
+        if (InjectedScriptHost.subtype(object) === "proxy")
+            return null;
+        try {
+            return Object.getPrototypeOf(object);
+        } catch (e) {
+            return null;
+        }
+    },
+
+    /**
+     * @param {!Object} object
+     * @param {boolean=} ownProperties
+     * @param {boolean=} accessorPropertiesOnly
+     * @param {?Array.<string>=} propertyNamesOnly
+     */
+    _propertyDescriptors: function*(object, ownProperties, accessorPropertiesOnly, propertyNamesOnly)
+    {
+        var propertyProcessed = { __proto__: null };
+
+        /**
+         * @param {?Object} o
+         * @param {!Iterable<string|symbol|number>|!Array<string|number|symbol>} properties
+         */
+        function* process(o, properties)
+        {
+            for (var property of properties) {
+                var name;
+                if (isSymbol(property))
+                    name = /** @type {string} */ (injectedScript._describe(property));
+                else
+                    name = typeof property === "number" ? ("" + property) : /** @type {string} */(property);
+
+                if (propertyProcessed[property])
+                    continue;
+
+                try {
+                    propertyProcessed[property] = true;
+                    var descriptor = nullifyObjectProto(Object.getOwnPropertyDescriptor(o, property));
+                    if (descriptor) {
+                        if (accessorPropertiesOnly && !("get" in descriptor || "set" in descriptor))
+                            continue;
+                        if ("get" in descriptor && "set" in descriptor && name != "__proto__" && InjectedScriptHost.formatAccessorsAsProperties(object, descriptor.get) && !doesAttributeHaveObservableSideEffectOnGet(object, name)) {
+                            descriptor.value = object[property];
+                            descriptor.isOwn = true;
+                            delete descriptor.get;
+                            delete descriptor.set;
+                        }
+                    } else {
+                        // Not all bindings provide proper descriptors. Fall back to the writable, configurable property.
+                        if (accessorPropertiesOnly)
+                            continue;
+                        try {
+                            descriptor = { name: name, value: o[property], writable: false, configurable: false, enumerable: false, __proto__: null };
+                            if (o === object)
+                                descriptor.isOwn = true;
+                            yield descriptor;
+                        } catch (e) {
+                            // Silent catch.
+                        }
+                        continue;
+                    }
+                } catch (e) {
+                    if (accessorPropertiesOnly)
+                        continue;
+                    var descriptor = { __proto__: null };
+                    descriptor.value = e;
+                    descriptor.wasThrown = true;
+                }
+
+                descriptor.name = name;
+                if (o === object)
+                    descriptor.isOwn = true;
+                if (isSymbol(property))
+                    descriptor.symbol = property;
+                yield descriptor;
+            }
+        }
+
+        if (propertyNamesOnly) {
+            for (var i = 0; i < propertyNamesOnly.length; ++i) {
+                var name = propertyNamesOnly[i];
+                for (var o = object; this._isDefined(o); o = this._objectPrototype(o)) {
+                    if (InjectedScriptHost.objectHasOwnProperty(o, name)) {
+                        for (var descriptor of process(o, [name]))
+                            yield descriptor;
+                        break;
+                    }
+                    if (ownProperties)
+                        break;
+                }
+            }
+            return;
+        }
+
+        /**
+         * @param {number} length
+         */
+        function* arrayIndexNames(length)
+        {
+            for (var i = 0; i < length; ++i)
+                yield "" + i;
+        }
+
+        var skipGetOwnPropertyNames;
+        try {
+            skipGetOwnPropertyNames = InjectedScriptHost.subtype(object) === "typedarray" && object.length > 500000;
+        } catch (e) {
+        }
+
+        for (var o = object; this._isDefined(o); o = this._objectPrototype(o)) {
+            if (InjectedScriptHost.subtype(o) === "proxy")
+                continue;
+            if (skipGetOwnPropertyNames && o === object) {
+                // Avoid OOM crashes from getting all own property names of a large TypedArray.
+                for (var descriptor of process(o, arrayIndexNames(o.length)))
+                    yield descriptor;
+            } else {
+                // First call Object.keys() to enforce ordering of the property descriptors.
+                for (var descriptor of process(o, Object.keys(/** @type {!Object} */ (o))))
+                    yield descriptor;
+                for (var descriptor of process(o, Object.getOwnPropertyNames(/** @type {!Object} */ (o))))
+                    yield descriptor;
+            }
+            if (Object.getOwnPropertySymbols) {
+                for (var descriptor of process(o, Object.getOwnPropertySymbols(/** @type {!Object} */ (o))))
+                    yield descriptor;
+            }
+            if (ownProperties) {
+                var proto = this._objectPrototype(o);
+                if (proto && !accessorPropertiesOnly)
+                    yield { name: "__proto__", value: proto, writable: true, configurable: true, enumerable: false, isOwn: true, __proto__: null };
+                break;
+            }
+        }
+    },
+
+    /**
+     * @param {string|undefined} objectGroupName
+     * @param {*} jsonMLObject
+     * @throws {string} error message
+     */
+    _substituteObjectTagsInCustomPreview: function(objectGroupName, jsonMLObject)
+    {
+        var maxCustomPreviewRecursionDepth = 20;
+        this._customPreviewRecursionDepth = (this._customPreviewRecursionDepth || 0) + 1
+        try {
+            if (this._customPreviewRecursionDepth >= maxCustomPreviewRecursionDepth)
+                throw new Error("Too deep hierarchy of inlined custom previews");
+
+            if (!isArrayLike(jsonMLObject))
+                return;
+
+            if (jsonMLObject[0] === "object") {
+                var attributes = jsonMLObject[1];
+                var originObject = attributes["object"];
+                var config = attributes["config"];
+                if (typeof originObject === "undefined")
+                    throw new Error("Illegal format: obligatory attribute \"object\" isn't specified");
+
+                jsonMLObject[1] = this._wrapObject(originObject, objectGroupName, false, false, null, false, false, config);
+                return;
+            }
+
+            for (var i = 0; i < jsonMLObject.length; ++i)
+                this._substituteObjectTagsInCustomPreview(objectGroupName, jsonMLObject[i]);
+        } finally {
+            this._customPreviewRecursionDepth--;
+        }
+    },
+
+    /**
+     * @param {*} object
+     * @return {boolean}
+     */
+    _isDefined: function(object)
+    {
+        return !!object || this._isHTMLAllCollection(object);
+    },
+
+    /**
+     * @param {*} object
+     * @return {boolean}
+     */
+    _isHTMLAllCollection: function(object)
+    {
+        // document.all is reported as undefined, but we still want to process it.
+        return (typeof object === "undefined") && !!InjectedScriptHost.subtype(object);
+    },
+
+    /**
+     * @param {*} obj
+     * @return {?string}
+     */
+    _subtype: function(obj)
+    {
+        if (obj === null)
+            return "null";
+
+        if (this.isPrimitiveValue(obj))
+            return null;
+
+        var subtype = InjectedScriptHost.subtype(obj);
+        if (subtype)
+            return subtype;
+
+        if (isArrayLike(obj))
+            return "array";
+
+        // If owning frame has navigated to somewhere else window properties will be undefined.
+        return null;
+    },
+
+    /**
+     * @param {*} obj
+     * @return {?string}
+     */
+    _describe: function(obj)
+    {
+        if (this.isPrimitiveValue(obj))
+            return null;
+
+        var subtype = this._subtype(obj);
+
+        if (subtype === "regexp")
+            return toString(obj);
+
+        if (subtype === "date")
+            return toString(obj);
+
+        if (subtype === "node") {
+            var description = "";
+            if (obj.nodeName)
+                description = obj.nodeName.toLowerCase();
+            else if (obj.constructor)
+                description = obj.constructor.name.toLowerCase();
+
+            switch (obj.nodeType) {
+            case 1 /* Node.ELEMENT_NODE */:
+                description += obj.id ? "#" + obj.id : "";
+                var className = obj.className;
+                description += (className && typeof className === "string") ? "." + className.trim().replace(/\s+/g, ".") : "";
+                break;
+            case 10 /*Node.DOCUMENT_TYPE_NODE */:
+                description = "<!DOCTYPE " + description + ">";
+                break;
+            }
+            return description;
+        }
+
+        if (subtype === "proxy")
+            return "Proxy";
+
+        var className = InjectedScriptHost.internalConstructorName(obj);
+        if (subtype === "array" || subtype === "typedarray") {
+            if (typeof obj.length === "number")
+                className += "[" + obj.length + "]";
+            return className;
+        }
+
+        if (typeof obj === "function")
+            return toString(obj);
+
+        if (isSymbol(obj)) {
+            try {
+                // It isn't safe, because Symbol.prototype.toString can be overriden.
+                return /* suppressBlacklist */ obj.toString() || "Symbol";
+            } catch (e) {
+                return "Symbol";
+            }
+        }
+
+        if (InjectedScriptHost.subtype(obj) === "error") {
+            try {
+                var stack = obj.stack;
+                var message = obj.message && obj.message.length ? ": " + obj.message : "";
+                var firstCallFrame = /^\s+at\s/m.exec(stack);
+                var stackMessageEnd = firstCallFrame ? firstCallFrame.index : -1;
+                if (stackMessageEnd !== -1) {
+                    var stackTrace = stack.substr(stackMessageEnd);
+                    return className + message + "\n" + stackTrace;
+                }
+                return className + message;
+            } catch(e) {
+            }
+        }
+
+        if (subtype === "internal#entry") {
+            if ("key" in obj)
+                return "{" + this._describeIncludingPrimitives(obj.key) + " => " + this._describeIncludingPrimitives(obj.value) + "}";
+            return this._describeIncludingPrimitives(obj.value);
+        }
+
+        if (subtype === "internal#scopeList")
+            return "Scopes[" + obj.length + "]";
+
+        if (subtype === "internal#scope")
+            return (InjectedScript.closureTypes[obj.type] || "Unknown") + (obj.name ? " (" + obj.name + ")" : "");
+
+        return className;
+    },
+
+    /**
+     * @param {*} value
+     * @return {string}
+     */
+    _describeIncludingPrimitives: function(value)
+    {
+        if (typeof value === "string")
+            return "\"" + value.replace(/\n/g, "\u21B5") + "\"";
+        if (value === null)
+            return "" + value;
+        return this.isPrimitiveValue(value) ? toStringDescription(value) : (this._describe(value) || "");
+    },
+
+    /**
+     * @param {boolean} enabled
+     */
+    setCustomObjectFormatterEnabled: function(enabled)
+    {
+        this._customObjectFormatterEnabled = enabled;
+    }
+}
+
+/**
+ * @type {!InjectedScript}
+ * @const
+ */
+var injectedScript = new InjectedScript();
+
+/**
+ * @constructor
+ * @param {*} object
+ * @param {string=} objectGroupName
+ * @param {boolean=} doNotBind
+ * @param {boolean=} forceValueType
+ * @param {boolean=} generatePreview
+ * @param {?Array.<string>=} columnNames
+ * @param {boolean=} isTable
+ * @param {boolean=} skipEntriesPreview
+ * @param {*=} customObjectConfig
+ */
+InjectedScript.RemoteObject = function(object, objectGroupName, doNotBind, forceValueType, generatePreview, columnNames, isTable, skipEntriesPreview, customObjectConfig)
+{
+    this.type = typeof object;
+    if (this.type === "undefined" && injectedScript._isHTMLAllCollection(object))
+        this.type = "object";
+
+    if (injectedScript.isPrimitiveValue(object) || object === null || forceValueType) {
+        // We don't send undefined values over JSON.
+        if (this.type !== "undefined")
+            this.value = object;
+
+        // Null object is object with 'null' subtype.
+        if (object === null)
+            this.subtype = "null";
+
+        // Provide user-friendly number values.
+        if (this.type === "number") {
+            this.description = toStringDescription(object);
+            switch (this.description) {
+            case "NaN":
+            case "Infinity":
+            case "-Infinity":
+            case "-0":
+                delete this.value;
+                this.unserializableValue = this.description;
+                break;
+            }
+        }
+
+        return;
+    }
+
+    if (injectedScript._shouldPassByValue(object)) {
+        this.value = object;
+        this.subtype = injectedScript._subtype(object);
+        this.description = injectedScript._describeIncludingPrimitives(object);
+        return;
+    }
+
+    object = /** @type {!Object} */ (object);
+
+    if (!doNotBind)
+        this.objectId = injectedScript._bind(object, objectGroupName);
+    var subtype = injectedScript._subtype(object);
+    if (subtype)
+        this.subtype = subtype;
+    var className = InjectedScriptHost.internalConstructorName(object);
+    if (className)
+        this.className = className;
+    this.description = injectedScript._describe(object);
+
+    if (generatePreview && this.type === "object") {
+        if (this.subtype === "proxy")
+            this.preview = this._generatePreview(InjectedScriptHost.proxyTargetValue(object), undefined, columnNames, isTable, skipEntriesPreview);
+        else if (this.subtype !== "node")
+            this.preview = this._generatePreview(object, undefined, columnNames, isTable, skipEntriesPreview);
+    }
+
+    if (injectedScript._customObjectFormatterEnabled) {
+        var customPreview = this._customPreview(object, objectGroupName, customObjectConfig);
+        if (customPreview)
+            this.customPreview = customPreview;
+    }
+}
+
+InjectedScript.RemoteObject.prototype = {
+
+    /**
+     * @param {*} object
+     * @param {string=} objectGroupName
+     * @param {*=} customObjectConfig
+     * @return {?RuntimeAgent.CustomPreview}
+     */
+    _customPreview: function(object, objectGroupName, customObjectConfig)
+    {
+        /**
+         * @param {!Error} error
+         */
+        function logError(error)
+        {
+            // We use user code to generate custom output for object, we can use user code for reporting error too.
+            Promise.resolve().then(/* suppressBlacklist */ inspectedGlobalObject.console.error.bind(inspectedGlobalObject.console, "Custom Formatter Failed: " + error.message));
+        }
+
+        /**
+         * @param {*} object
+         * @param {*=} customObjectConfig
+         * @return {*}
+         */
+        function wrap(object, customObjectConfig)
+        {
+            return injectedScript._wrapObject(object, objectGroupName, false, false, null, false, false, customObjectConfig);
+        }
+
+        try {
+            var formatters = inspectedGlobalObject["devtoolsFormatters"];
+            if (!formatters || !isArrayLike(formatters))
+                return null;
+
+            for (var i = 0; i < formatters.length; ++i) {
+                try {
+                    var formatted = formatters[i].header(object, customObjectConfig);
+                    if (!formatted)
+                        continue;
+
+                    var hasBody = formatters[i].hasBody(object, customObjectConfig);
+                    injectedScript._substituteObjectTagsInCustomPreview(objectGroupName, formatted);
+                    var formatterObjectId = injectedScript._bind(formatters[i], objectGroupName);
+                    var bindRemoteObjectFunctionId = injectedScript._bind(wrap, objectGroupName);
+                    var result = {header: JSON.stringify(formatted), hasBody: !!hasBody, formatterObjectId: formatterObjectId, bindRemoteObjectFunctionId: bindRemoteObjectFunctionId};
+                    if (customObjectConfig)
+                        result["configObjectId"] = injectedScript._bind(customObjectConfig, objectGroupName);
+                    return result;
+                } catch (e) {
+                    logError(e);
+                }
+            }
+        } catch (e) {
+            logError(e);
+        }
+        return null;
+    },
+
+    /**
+     * @return {!RuntimeAgent.ObjectPreview} preview
+     */
+    _createEmptyPreview: function()
+    {
+        var preview = {
+            type: /** @type {!RuntimeAgent.ObjectPreviewType.<string>} */ (this.type),
+            description: this.description || toStringDescription(this.value),
+            overflow: false,
+            properties: [],
+            __proto__: null
+        };
+        if (this.subtype)
+            preview.subtype = /** @type {!RuntimeAgent.ObjectPreviewSubtype.<string>} */ (this.subtype);
+        return preview;
+    },
+
+    /**
+     * @param {!Object} object
+     * @param {?Array.<string>=} firstLevelKeys
+     * @param {?Array.<string>=} secondLevelKeys
+     * @param {boolean=} isTable
+     * @param {boolean=} skipEntriesPreview
+     * @return {!RuntimeAgent.ObjectPreview} preview
+     */
+    _generatePreview: function(object, firstLevelKeys, secondLevelKeys, isTable, skipEntriesPreview)
+    {
+        var preview = this._createEmptyPreview();
+        var firstLevelKeysCount = firstLevelKeys ? firstLevelKeys.length : 0;
+
+        var propertiesThreshold = {
+            properties: isTable ? 1000 : max(5, firstLevelKeysCount),
+            indexes: isTable ? 1000 : max(100, firstLevelKeysCount),
+            __proto__: null
+        };
+
+        try {
+            var descriptors = injectedScript._propertyDescriptors(object, undefined, undefined, firstLevelKeys);
+
+            this._appendPropertyDescriptors(preview, descriptors, propertiesThreshold, secondLevelKeys, isTable);
+            if (propertiesThreshold.indexes < 0 || propertiesThreshold.properties < 0)
+                return preview;
+
+            // Add internal properties to preview.
+            var rawInternalProperties = InjectedScriptHost.getInternalProperties(object) || [];
+            var internalProperties = [];
+            var entries = null;
+            for (var i = 0; i < rawInternalProperties.length; i += 2) {
+                if (rawInternalProperties[i] === "[[Entries]]") {
+                    entries = /** @type {!Array<*>} */(rawInternalProperties[i + 1]);
+                    continue;
+                }
+                push(internalProperties, {
+                    name: rawInternalProperties[i],
+                    value: rawInternalProperties[i + 1],
+                    isOwn: true,
+                    enumerable: true,
+                    __proto__: null
+                });
+            }
+            this._appendPropertyDescriptors(preview, internalProperties, propertiesThreshold, secondLevelKeys, isTable);
+
+            if (this.subtype === "map" || this.subtype === "set" || this.subtype === "iterator")
+                this._appendEntriesPreview(entries, preview, skipEntriesPreview);
+
+        } catch (e) {}
+
+        return preview;
+    },
+
+    /**
+     * @param {!RuntimeAgent.ObjectPreview} preview
+     * @param {!Array.<*>|!Iterable.<*>} descriptors
+     * @param {!Object} propertiesThreshold
+     * @param {?Array.<string>=} secondLevelKeys
+     * @param {boolean=} isTable
+     */
+    _appendPropertyDescriptors: function(preview, descriptors, propertiesThreshold, secondLevelKeys, isTable)
+    {
+        for (var descriptor of descriptors) {
+            if (propertiesThreshold.indexes < 0 || propertiesThreshold.properties < 0)
+                break;
+            if (!descriptor || descriptor.wasThrown)
+                continue;
+
+            var name = descriptor.name;
+
+            // Ignore __proto__ property.
+            if (name === "__proto__")
+                continue;
+
+            // Ignore length property of array.
+            if ((this.subtype === "array" || this.subtype === "typedarray") && name === "length")
+                continue;
+
+            // Ignore size property of map, set.
+            if ((this.subtype === "map" || this.subtype === "set") && name === "size")
+                continue;
+
+            // Never preview prototype properties.
+            if (!descriptor.isOwn)
+                continue;
+
+            // Ignore computed properties.
+            if (!("value" in descriptor))
+                continue;
+
+            var value = descriptor.value;
+            var type = typeof value;
+
+            // Never render functions in object preview.
+            if (type === "function" && (this.subtype !== "array" || !isUInt32(name)))
+                continue;
+
+            // Special-case HTMLAll.
+            if (type === "undefined" && injectedScript._isHTMLAllCollection(value))
+                type = "object";
+
+            // Render own properties.
+            if (value === null) {
+                this._appendPropertyPreview(preview, { name: name, type: "object", subtype: "null", value: "null", __proto__: null }, propertiesThreshold);
+                continue;
+            }
+
+            var maxLength = 100;
+            if (InjectedScript.primitiveTypes[type]) {
+                if (type === "string" && value.length > maxLength)
+                    value = this._abbreviateString(value, maxLength, true);
+                this._appendPropertyPreview(preview, { name: name, type: type, value: toStringDescription(value), __proto__: null }, propertiesThreshold);
+                continue;
+            }
+
+            var property = { name: name, type: type, __proto__: null };
+            var subtype = injectedScript._subtype(value);
+            if (subtype)
+                property.subtype = subtype;
+
+            if (secondLevelKeys === null || secondLevelKeys) {
+                var subPreview = this._generatePreview(value, secondLevelKeys || undefined, undefined, isTable);
+                property.valuePreview = subPreview;
+                if (subPreview.overflow)
+                    preview.overflow = true;
+            } else {
+                var description = "";
+                if (type !== "function")
+                    description = this._abbreviateString(/** @type {string} */ (injectedScript._describe(value)), maxLength, subtype === "regexp");
+                property.value = description;
+            }
+            this._appendPropertyPreview(preview, property, propertiesThreshold);
+        }
+    },
+
+    /**
+     * @param {!RuntimeAgent.ObjectPreview} preview
+     * @param {!Object} property
+     * @param {!Object} propertiesThreshold
+     */
+    _appendPropertyPreview: function(preview, property, propertiesThreshold)
+    {
+        if (toString(property.name >>> 0) === property.name)
+            propertiesThreshold.indexes--;
+        else
+            propertiesThreshold.properties--;
+        if (propertiesThreshold.indexes < 0 || propertiesThreshold.properties < 0) {
+            preview.overflow = true;
+        } else {
+            push(preview.properties, property);
+        }
+    },
+
+    /**
+     * @param {?Array<*>} entries
+     * @param {!RuntimeAgent.ObjectPreview} preview
+     * @param {boolean=} skipEntriesPreview
+     */
+    _appendEntriesPreview: function(entries, preview, skipEntriesPreview)
+    {
+        if (!entries)
+            return;
+        if (skipEntriesPreview) {
+            if (entries.length)
+                preview.overflow = true;
+            return;
+        }
+        preview.entries = [];
+        var entriesThreshold = 5;
+        for (var i = 0; i < entries.length; ++i) {
+            if (preview.entries.length >= entriesThreshold) {
+                preview.overflow = true;
+                break;
+            }
+            var entry = nullifyObjectProto(entries[i]);
+            var previewEntry = {
+                value: generateValuePreview(entry.value),
+                __proto__: null
+            };
+            if ("key" in entry)
+                previewEntry.key = generateValuePreview(entry.key);
+            push(preview.entries, previewEntry);
+        }
+
+        /**
+         * @param {*} value
+         * @return {!RuntimeAgent.ObjectPreview}
+         */
+        function generateValuePreview(value)
+        {
+            var remoteObject = new InjectedScript.RemoteObject(value, undefined, true, undefined, true, undefined, undefined, true);
+            var valuePreview = remoteObject.preview || remoteObject._createEmptyPreview();
+            return valuePreview;
+        }
+    },
+
+    /**
+     * @param {string} string
+     * @param {number} maxLength
+     * @param {boolean=} middle
+     * @return {string}
+     */
+    _abbreviateString: function(string, maxLength, middle)
+    {
+        if (string.length <= maxLength)
+            return string;
+        if (middle) {
+            var leftHalf = maxLength >> 1;
+            var rightHalf = maxLength - leftHalf - 1;
+            return string.substr(0, leftHalf) + "\u2026" + string.substr(string.length - rightHalf, rightHalf);
+        }
+        return string.substr(0, maxLength) + "\u2026";
+    },
+
+    __proto__: null
+}
+
+return injectedScript;
+})
diff --git a/src/inspector/injected-script.cc b/src/inspector/injected-script.cc
new file mode 100644
index 0000000..a100dea
--- /dev/null
+++ b/src/inspector/injected-script.cc
@@ -0,0 +1,581 @@
+/*
+ * Copyright (C) 2012 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "src/inspector/injected-script.h"
+
+#include "src/inspector/injected-script-native.h"
+#include "src/inspector/injected-script-source.h"
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/remote-object-id.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-console.h"
+#include "src/inspector/v8-function-call.h"
+#include "src/inspector/v8-injected-script-host.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+#include "src/inspector/v8-value-copier.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+using protocol::Array;
+using protocol::Runtime::PropertyDescriptor;
+using protocol::Runtime::InternalPropertyDescriptor;
+using protocol::Runtime::RemoteObject;
+using protocol::Maybe;
+
+static bool hasInternalError(ErrorString* errorString, bool hasError) {
+  if (hasError) *errorString = "Internal error";
+  return hasError;
+}
+
+std::unique_ptr<InjectedScript> InjectedScript::create(
+    InspectedContext* inspectedContext) {
+  v8::Isolate* isolate = inspectedContext->isolate();
+  v8::HandleScope handles(isolate);
+  v8::Local<v8::Context> context = inspectedContext->context();
+  v8::Context::Scope scope(context);
+
+  std::unique_ptr<InjectedScriptNative> injectedScriptNative(
+      new InjectedScriptNative(isolate));
+  v8::Local<v8::Object> scriptHostWrapper =
+      V8InjectedScriptHost::create(context, inspectedContext->inspector());
+  injectedScriptNative->setOnInjectedScriptHost(scriptHostWrapper);
+
+  // Inject javascript into the context. The compiled script is supposed to
+  // evaluate into
+  // a single anonymous function(it's anonymous to avoid cluttering the global
+  // object with
+  // inspector's stuff) the function is called a few lines below with
+  // InjectedScriptHost wrapper,
+  // injected script id and explicit reference to the inspected global object.
+  // The function is expected
+  // to create and configure InjectedScript instance that is going to be used by
+  // the inspector.
+  String16 injectedScriptSource(
+      reinterpret_cast<const char*>(InjectedScriptSource_js),
+      sizeof(InjectedScriptSource_js));
+  v8::Local<v8::Value> value;
+  if (!inspectedContext->inspector()
+           ->compileAndRunInternalScript(
+               context, toV8String(isolate, injectedScriptSource))
+           .ToLocal(&value))
+    return nullptr;
+  DCHECK(value->IsFunction());
+  v8::Local<v8::Function> function = v8::Local<v8::Function>::Cast(value);
+  v8::Local<v8::Object> windowGlobal = context->Global();
+  v8::Local<v8::Value> info[] = {
+      scriptHostWrapper, windowGlobal,
+      v8::Number::New(isolate, inspectedContext->contextId())};
+  v8::MicrotasksScope microtasksScope(isolate,
+                                      v8::MicrotasksScope::kDoNotRunMicrotasks);
+
+  int contextGroupId = inspectedContext->contextGroupId();
+  int contextId = inspectedContext->contextId();
+  V8InspectorImpl* inspector = inspectedContext->inspector();
+  v8::Local<v8::Value> injectedScriptValue;
+  if (!function->Call(context, windowGlobal, arraysize(info), info)
+           .ToLocal(&injectedScriptValue))
+    return nullptr;
+  if (inspector->getContext(contextGroupId, contextId) != inspectedContext)
+    return nullptr;
+  if (!injectedScriptValue->IsObject()) return nullptr;
+  return wrapUnique(new InjectedScript(inspectedContext,
+                                       injectedScriptValue.As<v8::Object>(),
+                                       std::move(injectedScriptNative)));
+}
+
+InjectedScript::InjectedScript(
+    InspectedContext* context, v8::Local<v8::Object> object,
+    std::unique_ptr<InjectedScriptNative> injectedScriptNative)
+    : m_context(context),
+      m_value(context->isolate(), object),
+      m_native(std::move(injectedScriptNative)) {}
+
+InjectedScript::~InjectedScript() {}
+
+void InjectedScript::getProperties(
+    ErrorString* errorString, v8::Local<v8::Object> object,
+    const String16& groupName, bool ownProperties, bool accessorPropertiesOnly,
+    bool generatePreview,
+    std::unique_ptr<Array<PropertyDescriptor>>* properties,
+    Maybe<protocol::Runtime::ExceptionDetails>* exceptionDetails) {
+  v8::HandleScope handles(m_context->isolate());
+  v8::Local<v8::Context> context = m_context->context();
+  V8FunctionCall function(m_context->inspector(), m_context->context(),
+                          v8Value(), "getProperties");
+  function.appendArgument(object);
+  function.appendArgument(groupName);
+  function.appendArgument(ownProperties);
+  function.appendArgument(accessorPropertiesOnly);
+  function.appendArgument(generatePreview);
+
+  v8::TryCatch tryCatch(m_context->isolate());
+  v8::Local<v8::Value> resultValue = function.callWithoutExceptionHandling();
+  if (tryCatch.HasCaught()) {
+    *exceptionDetails = createExceptionDetails(errorString, tryCatch, groupName,
+                                               generatePreview);
+    // FIXME: make properties optional
+    *properties = Array<PropertyDescriptor>::create();
+    return;
+  }
+  if (hasInternalError(errorString, resultValue.IsEmpty())) return;
+  std::unique_ptr<protocol::Value> protocolValue =
+      toProtocolValue(errorString, context, resultValue);
+  if (!protocolValue) return;
+  protocol::ErrorSupport errors(errorString);
+  std::unique_ptr<Array<PropertyDescriptor>> result =
+      Array<PropertyDescriptor>::parse(protocolValue.get(), &errors);
+  if (!hasInternalError(errorString, errors.hasErrors()))
+    *properties = std::move(result);
+}
+
+void InjectedScript::releaseObject(const String16& objectId) {
+  std::unique_ptr<protocol::Value> parsedObjectId =
+      protocol::parseJSON(objectId);
+  if (!parsedObjectId) return;
+  protocol::DictionaryValue* object =
+      protocol::DictionaryValue::cast(parsedObjectId.get());
+  if (!object) return;
+  int boundId = 0;
+  if (!object->getInteger("id", &boundId)) return;
+  m_native->unbind(boundId);
+}
+
+std::unique_ptr<protocol::Runtime::RemoteObject> InjectedScript::wrapObject(
+    ErrorString* errorString, v8::Local<v8::Value> value,
+    const String16& groupName, bool forceValueType,
+    bool generatePreview) const {
+  v8::HandleScope handles(m_context->isolate());
+  v8::Local<v8::Value> wrappedObject;
+  v8::Local<v8::Context> context = m_context->context();
+  if (!wrapValue(errorString, value, groupName, forceValueType, generatePreview)
+           .ToLocal(&wrappedObject))
+    return nullptr;
+  protocol::ErrorSupport errors;
+  std::unique_ptr<protocol::Value> protocolValue =
+      toProtocolValue(errorString, context, wrappedObject);
+  if (!protocolValue) return nullptr;
+  std::unique_ptr<protocol::Runtime::RemoteObject> remoteObject =
+      protocol::Runtime::RemoteObject::parse(protocolValue.get(), &errors);
+  if (!remoteObject) *errorString = errors.errors();
+  return remoteObject;
+}
+
+bool InjectedScript::wrapObjectProperty(ErrorString* errorString,
+                                        v8::Local<v8::Object> object,
+                                        v8::Local<v8::Name> key,
+                                        const String16& groupName,
+                                        bool forceValueType,
+                                        bool generatePreview) const {
+  v8::Local<v8::Value> property;
+  v8::Local<v8::Context> context = m_context->context();
+  if (hasInternalError(errorString,
+                       !object->Get(context, key).ToLocal(&property)))
+    return false;
+  v8::Local<v8::Value> wrappedProperty;
+  if (!wrapValue(errorString, property, groupName, forceValueType,
+                 generatePreview)
+           .ToLocal(&wrappedProperty))
+    return false;
+  v8::Maybe<bool> success =
+      createDataProperty(context, object, key, wrappedProperty);
+  if (hasInternalError(errorString, success.IsNothing() || !success.FromJust()))
+    return false;
+  return true;
+}
+
+bool InjectedScript::wrapPropertyInArray(ErrorString* errorString,
+                                         v8::Local<v8::Array> array,
+                                         v8::Local<v8::String> property,
+                                         const String16& groupName,
+                                         bool forceValueType,
+                                         bool generatePreview) const {
+  V8FunctionCall function(m_context->inspector(), m_context->context(),
+                          v8Value(), "wrapPropertyInArray");
+  function.appendArgument(array);
+  function.appendArgument(property);
+  function.appendArgument(groupName);
+  function.appendArgument(forceValueType);
+  function.appendArgument(generatePreview);
+  bool hadException = false;
+  function.call(hadException);
+  return !hasInternalError(errorString, hadException);
+}
+
+bool InjectedScript::wrapObjectsInArray(ErrorString* errorString,
+                                        v8::Local<v8::Array> array,
+                                        const String16& groupName,
+                                        bool forceValueType,
+                                        bool generatePreview) const {
+  V8FunctionCall function(m_context->inspector(), m_context->context(),
+                          v8Value(), "wrapObjectsInArray");
+  function.appendArgument(array);
+  function.appendArgument(groupName);
+  function.appendArgument(forceValueType);
+  function.appendArgument(generatePreview);
+  bool hadException = false;
+  function.call(hadException);
+  return !hasInternalError(errorString, hadException);
+}
+
+v8::MaybeLocal<v8::Value> InjectedScript::wrapValue(
+    ErrorString* errorString, v8::Local<v8::Value> value,
+    const String16& groupName, bool forceValueType,
+    bool generatePreview) const {
+  V8FunctionCall function(m_context->inspector(), m_context->context(),
+                          v8Value(), "wrapObject");
+  function.appendArgument(value);
+  function.appendArgument(groupName);
+  function.appendArgument(forceValueType);
+  function.appendArgument(generatePreview);
+  bool hadException = false;
+  v8::Local<v8::Value> r = function.call(hadException);
+  if (hasInternalError(errorString, hadException || r.IsEmpty()))
+    return v8::MaybeLocal<v8::Value>();
+  return r;
+}
+
+std::unique_ptr<protocol::Runtime::RemoteObject> InjectedScript::wrapTable(
+    v8::Local<v8::Value> table, v8::Local<v8::Value> columns) const {
+  v8::HandleScope handles(m_context->isolate());
+  v8::Local<v8::Context> context = m_context->context();
+  V8FunctionCall function(m_context->inspector(), context, v8Value(),
+                          "wrapTable");
+  function.appendArgument(table);
+  if (columns.IsEmpty())
+    function.appendArgument(false);
+  else
+    function.appendArgument(columns);
+  bool hadException = false;
+  v8::Local<v8::Value> r = function.call(hadException);
+  if (hadException || r.IsEmpty()) return nullptr;
+  protocol::ErrorString errorString;
+  std::unique_ptr<protocol::Value> protocolValue =
+      toProtocolValue(&errorString, context, r);
+  if (!protocolValue) return nullptr;
+  protocol::ErrorSupport errors;
+  return protocol::Runtime::RemoteObject::parse(protocolValue.get(), &errors);
+}
+
+bool InjectedScript::findObject(ErrorString* errorString,
+                                const RemoteObjectId& objectId,
+                                v8::Local<v8::Value>* outObject) const {
+  *outObject = m_native->objectForId(objectId.id());
+  if (outObject->IsEmpty())
+    *errorString = "Could not find object with given id";
+  return !outObject->IsEmpty();
+}
+
+String16 InjectedScript::objectGroupName(const RemoteObjectId& objectId) const {
+  return m_native->groupName(objectId.id());
+}
+
+void InjectedScript::releaseObjectGroup(const String16& objectGroup) {
+  m_native->releaseObjectGroup(objectGroup);
+  if (objectGroup == "console") m_lastEvaluationResult.Reset();
+}
+
+void InjectedScript::setCustomObjectFormatterEnabled(bool enabled) {
+  v8::HandleScope handles(m_context->isolate());
+  V8FunctionCall function(m_context->inspector(), m_context->context(),
+                          v8Value(), "setCustomObjectFormatterEnabled");
+  function.appendArgument(enabled);
+  bool hadException = false;
+  function.call(hadException);
+  DCHECK(!hadException);
+}
+
+v8::Local<v8::Value> InjectedScript::v8Value() const {
+  return m_value.Get(m_context->isolate());
+}
+
+v8::Local<v8::Value> InjectedScript::lastEvaluationResult() const {
+  if (m_lastEvaluationResult.IsEmpty())
+    return v8::Undefined(m_context->isolate());
+  return m_lastEvaluationResult.Get(m_context->isolate());
+}
+
+v8::MaybeLocal<v8::Value> InjectedScript::resolveCallArgument(
+    ErrorString* errorString, protocol::Runtime::CallArgument* callArgument) {
+  if (callArgument->hasObjectId()) {
+    std::unique_ptr<RemoteObjectId> remoteObjectId =
+        RemoteObjectId::parse(errorString, callArgument->getObjectId(""));
+    if (!remoteObjectId) return v8::MaybeLocal<v8::Value>();
+    if (remoteObjectId->contextId() != m_context->contextId()) {
+      *errorString =
+          "Argument should belong to the same JavaScript world as target "
+          "object";
+      return v8::MaybeLocal<v8::Value>();
+    }
+    v8::Local<v8::Value> object;
+    if (!findObject(errorString, *remoteObjectId, &object))
+      return v8::MaybeLocal<v8::Value>();
+    return object;
+  }
+  if (callArgument->hasValue() || callArgument->hasUnserializableValue()) {
+    String16 value =
+        callArgument->hasValue()
+            ? callArgument->getValue(nullptr)->toJSONString()
+            : "Number(\"" + callArgument->getUnserializableValue("") + "\")";
+    v8::Local<v8::Value> object;
+    if (!m_context->inspector()
+             ->compileAndRunInternalScript(
+                 m_context->context(), toV8String(m_context->isolate(), value))
+             .ToLocal(&object)) {
+      *errorString = "Couldn't parse value object in call argument";
+      return v8::MaybeLocal<v8::Value>();
+    }
+    return object;
+  }
+  return v8::Undefined(m_context->isolate());
+}
+
+std::unique_ptr<protocol::Runtime::ExceptionDetails>
+InjectedScript::createExceptionDetails(ErrorString* errorString,
+                                       const v8::TryCatch& tryCatch,
+                                       const String16& objectGroup,
+                                       bool generatePreview) {
+  if (!tryCatch.HasCaught()) return nullptr;
+  v8::Local<v8::Message> message = tryCatch.Message();
+  v8::Local<v8::Value> exception = tryCatch.Exception();
+  String16 messageText =
+      message.IsEmpty() ? String16() : toProtocolString(message->Get());
+  std::unique_ptr<protocol::Runtime::ExceptionDetails> exceptionDetails =
+      protocol::Runtime::ExceptionDetails::create()
+          .setExceptionId(m_context->inspector()->nextExceptionId())
+          .setText(exception.IsEmpty() ? messageText : String16("Uncaught"))
+          .setLineNumber(
+              message.IsEmpty()
+                  ? 0
+                  : message->GetLineNumber(m_context->context()).FromMaybe(1) -
+                        1)
+          .setColumnNumber(
+              message.IsEmpty()
+                  ? 0
+                  : message->GetStartColumn(m_context->context()).FromMaybe(0))
+          .build();
+  if (!message.IsEmpty()) {
+    exceptionDetails->setScriptId(String16::fromInteger(
+        static_cast<int>(message->GetScriptOrigin().ScriptID()->Value())));
+    v8::Local<v8::StackTrace> stackTrace = message->GetStackTrace();
+    if (!stackTrace.IsEmpty() && stackTrace->GetFrameCount() > 0)
+      exceptionDetails->setStackTrace(m_context->inspector()
+                                          ->debugger()
+                                          ->createStackTrace(stackTrace)
+                                          ->buildInspectorObjectImpl());
+  }
+  if (!exception.IsEmpty()) {
+    std::unique_ptr<protocol::Runtime::RemoteObject> wrapped = wrapObject(
+        errorString, exception, objectGroup, false /* forceValueType */,
+        generatePreview && !exception->IsNativeError());
+    if (!wrapped) return nullptr;
+    exceptionDetails->setException(std::move(wrapped));
+  }
+  return exceptionDetails;
+}
+
+void InjectedScript::wrapEvaluateResult(
+    ErrorString* errorString, v8::MaybeLocal<v8::Value> maybeResultValue,
+    const v8::TryCatch& tryCatch, const String16& objectGroup,
+    bool returnByValue, bool generatePreview,
+    std::unique_ptr<protocol::Runtime::RemoteObject>* result,
+    Maybe<protocol::Runtime::ExceptionDetails>* exceptionDetails) {
+  v8::Local<v8::Value> resultValue;
+  if (!tryCatch.HasCaught()) {
+    if (hasInternalError(errorString, !maybeResultValue.ToLocal(&resultValue)))
+      return;
+    std::unique_ptr<RemoteObject> remoteObject = wrapObject(
+        errorString, resultValue, objectGroup, returnByValue, generatePreview);
+    if (!remoteObject) return;
+    if (objectGroup == "console")
+      m_lastEvaluationResult.Reset(m_context->isolate(), resultValue);
+    *result = std::move(remoteObject);
+  } else {
+    v8::Local<v8::Value> exception = tryCatch.Exception();
+    std::unique_ptr<RemoteObject> remoteObject =
+        wrapObject(errorString, exception, objectGroup, false,
+                   generatePreview && !exception->IsNativeError());
+    if (!remoteObject) return;
+    // We send exception in result for compatibility reasons, even though it's
+    // accessible through exceptionDetails.exception.
+    *result = std::move(remoteObject);
+    *exceptionDetails = createExceptionDetails(errorString, tryCatch,
+                                               objectGroup, generatePreview);
+  }
+}
+
+v8::Local<v8::Object> InjectedScript::commandLineAPI() {
+  if (m_commandLineAPI.IsEmpty())
+    m_commandLineAPI.Reset(m_context->isolate(),
+                           V8Console::createCommandLineAPI(m_context));
+  return m_commandLineAPI.Get(m_context->isolate());
+}
+
+InjectedScript::Scope::Scope(ErrorString* errorString,
+                             V8InspectorImpl* inspector, int contextGroupId)
+    : m_errorString(errorString),
+      m_inspector(inspector),
+      m_contextGroupId(contextGroupId),
+      m_injectedScript(nullptr),
+      m_handleScope(inspector->isolate()),
+      m_tryCatch(inspector->isolate()),
+      m_ignoreExceptionsAndMuteConsole(false),
+      m_previousPauseOnExceptionsState(V8Debugger::DontPauseOnExceptions),
+      m_userGesture(false) {}
+
+bool InjectedScript::Scope::initialize() {
+  cleanup();
+  // TODO(dgozman): what if we reattach to the same context group during
+  // evaluate? Introduce a session id?
+  V8InspectorSessionImpl* session =
+      m_inspector->sessionForContextGroup(m_contextGroupId);
+  if (!session) {
+    *m_errorString = "Internal error";
+    return false;
+  }
+  findInjectedScript(session);
+  if (!m_injectedScript) return false;
+  m_context = m_injectedScript->context()->context();
+  m_context->Enter();
+  return true;
+}
+
+bool InjectedScript::Scope::installCommandLineAPI() {
+  DCHECK(m_injectedScript && !m_context.IsEmpty() &&
+         !m_commandLineAPIScope.get());
+  m_commandLineAPIScope.reset(new V8Console::CommandLineAPIScope(
+      m_context, m_injectedScript->commandLineAPI(), m_context->Global()));
+  return true;
+}
+
+void InjectedScript::Scope::ignoreExceptionsAndMuteConsole() {
+  DCHECK(!m_ignoreExceptionsAndMuteConsole);
+  m_ignoreExceptionsAndMuteConsole = true;
+  m_inspector->client()->muteMetrics(m_contextGroupId);
+  m_inspector->muteExceptions(m_contextGroupId);
+  m_previousPauseOnExceptionsState =
+      setPauseOnExceptionsState(V8Debugger::DontPauseOnExceptions);
+}
+
+V8Debugger::PauseOnExceptionsState
+InjectedScript::Scope::setPauseOnExceptionsState(
+    V8Debugger::PauseOnExceptionsState newState) {
+  if (!m_inspector->debugger()->enabled()) return newState;
+  V8Debugger::PauseOnExceptionsState presentState =
+      m_inspector->debugger()->getPauseOnExceptionsState();
+  if (presentState != newState)
+    m_inspector->debugger()->setPauseOnExceptionsState(newState);
+  return presentState;
+}
+
+void InjectedScript::Scope::pretendUserGesture() {
+  DCHECK(!m_userGesture);
+  m_userGesture = true;
+  m_inspector->client()->beginUserGesture();
+}
+
+void InjectedScript::Scope::cleanup() {
+  m_commandLineAPIScope.reset();
+  if (!m_context.IsEmpty()) {
+    m_context->Exit();
+    m_context.Clear();
+  }
+}
+
+InjectedScript::Scope::~Scope() {
+  if (m_ignoreExceptionsAndMuteConsole) {
+    setPauseOnExceptionsState(m_previousPauseOnExceptionsState);
+    m_inspector->client()->unmuteMetrics(m_contextGroupId);
+    m_inspector->unmuteExceptions(m_contextGroupId);
+  }
+  if (m_userGesture) m_inspector->client()->endUserGesture();
+  cleanup();
+}
+
+InjectedScript::ContextScope::ContextScope(ErrorString* errorString,
+                                           V8InspectorImpl* inspector,
+                                           int contextGroupId,
+                                           int executionContextId)
+    : InjectedScript::Scope(errorString, inspector, contextGroupId),
+      m_executionContextId(executionContextId) {}
+
+InjectedScript::ContextScope::~ContextScope() {}
+
+void InjectedScript::ContextScope::findInjectedScript(
+    V8InspectorSessionImpl* session) {
+  m_injectedScript =
+      session->findInjectedScript(m_errorString, m_executionContextId);
+}
+
+InjectedScript::ObjectScope::ObjectScope(ErrorString* errorString,
+                                         V8InspectorImpl* inspector,
+                                         int contextGroupId,
+                                         const String16& remoteObjectId)
+    : InjectedScript::Scope(errorString, inspector, contextGroupId),
+      m_remoteObjectId(remoteObjectId) {}
+
+InjectedScript::ObjectScope::~ObjectScope() {}
+
+void InjectedScript::ObjectScope::findInjectedScript(
+    V8InspectorSessionImpl* session) {
+  std::unique_ptr<RemoteObjectId> remoteId =
+      RemoteObjectId::parse(m_errorString, m_remoteObjectId);
+  if (!remoteId) return;
+  InjectedScript* injectedScript =
+      session->findInjectedScript(m_errorString, remoteId.get());
+  if (!injectedScript) return;
+  m_objectGroupName = injectedScript->objectGroupName(*remoteId);
+  if (!injectedScript->findObject(m_errorString, *remoteId, &m_object)) return;
+  m_injectedScript = injectedScript;
+}
+
+InjectedScript::CallFrameScope::CallFrameScope(ErrorString* errorString,
+                                               V8InspectorImpl* inspector,
+                                               int contextGroupId,
+                                               const String16& remoteObjectId)
+    : InjectedScript::Scope(errorString, inspector, contextGroupId),
+      m_remoteCallFrameId(remoteObjectId) {}
+
+InjectedScript::CallFrameScope::~CallFrameScope() {}
+
+void InjectedScript::CallFrameScope::findInjectedScript(
+    V8InspectorSessionImpl* session) {
+  std::unique_ptr<RemoteCallFrameId> remoteId =
+      RemoteCallFrameId::parse(m_errorString, m_remoteCallFrameId);
+  if (!remoteId) return;
+  m_frameOrdinal = static_cast<size_t>(remoteId->frameOrdinal());
+  m_injectedScript = session->findInjectedScript(m_errorString, remoteId.get());
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/injected-script.h b/src/inspector/injected-script.h
new file mode 100644
index 0000000..9b324c9
--- /dev/null
+++ b/src/inspector/injected-script.h
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2012 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef V8_INSPECTOR_INJECTEDSCRIPT_H_
+#define V8_INSPECTOR_INJECTEDSCRIPT_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/injected-script-native.h"
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Runtime.h"
+#include "src/inspector/v8-console.h"
+#include "src/inspector/v8-debugger.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class RemoteObjectId;
+class V8FunctionCall;
+class V8InspectorImpl;
+class V8InspectorSessionImpl;
+
+using protocol::ErrorString;
+using protocol::Maybe;
+
+class InjectedScript final {
+ public:
+  static std::unique_ptr<InjectedScript> create(InspectedContext*);
+  ~InjectedScript();
+
+  InspectedContext* context() const { return m_context; }
+
+  void getProperties(
+      ErrorString*, v8::Local<v8::Object>, const String16& groupName,
+      bool ownProperties, bool accessorPropertiesOnly, bool generatePreview,
+      std::unique_ptr<protocol::Array<protocol::Runtime::PropertyDescriptor>>*
+          result,
+      Maybe<protocol::Runtime::ExceptionDetails>*);
+  void releaseObject(const String16& objectId);
+
+  std::unique_ptr<protocol::Runtime::RemoteObject> wrapObject(
+      ErrorString*, v8::Local<v8::Value>, const String16& groupName,
+      bool forceValueType = false, bool generatePreview = false) const;
+  bool wrapObjectProperty(ErrorString*, v8::Local<v8::Object>,
+                          v8::Local<v8::Name> key, const String16& groupName,
+                          bool forceValueType = false,
+                          bool generatePreview = false) const;
+  bool wrapPropertyInArray(ErrorString*, v8::Local<v8::Array>,
+                           v8::Local<v8::String> property,
+                           const String16& groupName,
+                           bool forceValueType = false,
+                           bool generatePreview = false) const;
+  bool wrapObjectsInArray(ErrorString*, v8::Local<v8::Array>,
+                          const String16& groupName,
+                          bool forceValueType = false,
+                          bool generatePreview = false) const;
+  std::unique_ptr<protocol::Runtime::RemoteObject> wrapTable(
+      v8::Local<v8::Value> table, v8::Local<v8::Value> columns) const;
+
+  bool findObject(ErrorString*, const RemoteObjectId&,
+                  v8::Local<v8::Value>*) const;
+  String16 objectGroupName(const RemoteObjectId&) const;
+  void releaseObjectGroup(const String16&);
+  void setCustomObjectFormatterEnabled(bool);
+  v8::MaybeLocal<v8::Value> resolveCallArgument(
+      ErrorString*, protocol::Runtime::CallArgument*);
+
+  std::unique_ptr<protocol::Runtime::ExceptionDetails> createExceptionDetails(
+      ErrorString*, const v8::TryCatch&, const String16& groupName,
+      bool generatePreview);
+  void wrapEvaluateResult(
+      ErrorString*, v8::MaybeLocal<v8::Value> maybeResultValue,
+      const v8::TryCatch&, const String16& objectGroup, bool returnByValue,
+      bool generatePreview,
+      std::unique_ptr<protocol::Runtime::RemoteObject>* result,
+      Maybe<protocol::Runtime::ExceptionDetails>*);
+  v8::Local<v8::Value> lastEvaluationResult() const;
+
+  class Scope {
+   public:
+    bool initialize();
+    bool installCommandLineAPI();
+    void ignoreExceptionsAndMuteConsole();
+    void pretendUserGesture();
+    v8::Local<v8::Context> context() const { return m_context; }
+    InjectedScript* injectedScript() const { return m_injectedScript; }
+    const v8::TryCatch& tryCatch() const { return m_tryCatch; }
+
+   protected:
+    Scope(ErrorString*, V8InspectorImpl*, int contextGroupId);
+    virtual ~Scope();
+    virtual void findInjectedScript(V8InspectorSessionImpl*) = 0;
+
+    ErrorString* m_errorString;
+    V8InspectorImpl* m_inspector;
+    int m_contextGroupId;
+    InjectedScript* m_injectedScript;
+
+   private:
+    void cleanup();
+    V8Debugger::PauseOnExceptionsState setPauseOnExceptionsState(
+        V8Debugger::PauseOnExceptionsState);
+
+    v8::HandleScope m_handleScope;
+    v8::TryCatch m_tryCatch;
+    v8::Local<v8::Context> m_context;
+    std::unique_ptr<V8Console::CommandLineAPIScope> m_commandLineAPIScope;
+    bool m_ignoreExceptionsAndMuteConsole;
+    V8Debugger::PauseOnExceptionsState m_previousPauseOnExceptionsState;
+    bool m_userGesture;
+  };
+
+  class ContextScope : public Scope {
+   public:
+    ContextScope(ErrorString*, V8InspectorImpl*, int contextGroupId,
+                 int executionContextId);
+    ~ContextScope();
+
+   private:
+    void findInjectedScript(V8InspectorSessionImpl*) override;
+    int m_executionContextId;
+
+    DISALLOW_COPY_AND_ASSIGN(ContextScope);
+  };
+
+  class ObjectScope : public Scope {
+   public:
+    ObjectScope(ErrorString*, V8InspectorImpl*, int contextGroupId,
+                const String16& remoteObjectId);
+    ~ObjectScope();
+    const String16& objectGroupName() const { return m_objectGroupName; }
+    v8::Local<v8::Value> object() const { return m_object; }
+
+   private:
+    void findInjectedScript(V8InspectorSessionImpl*) override;
+    String16 m_remoteObjectId;
+    String16 m_objectGroupName;
+    v8::Local<v8::Value> m_object;
+
+    DISALLOW_COPY_AND_ASSIGN(ObjectScope);
+  };
+
+  class CallFrameScope : public Scope {
+   public:
+    CallFrameScope(ErrorString*, V8InspectorImpl*, int contextGroupId,
+                   const String16& remoteCallFrameId);
+    ~CallFrameScope();
+    size_t frameOrdinal() const { return m_frameOrdinal; }
+
+   private:
+    void findInjectedScript(V8InspectorSessionImpl*) override;
+    String16 m_remoteCallFrameId;
+    size_t m_frameOrdinal;
+
+    DISALLOW_COPY_AND_ASSIGN(CallFrameScope);
+  };
+
+ private:
+  InjectedScript(InspectedContext*, v8::Local<v8::Object>,
+                 std::unique_ptr<InjectedScriptNative>);
+  v8::Local<v8::Value> v8Value() const;
+  v8::MaybeLocal<v8::Value> wrapValue(ErrorString*, v8::Local<v8::Value>,
+                                      const String16& groupName,
+                                      bool forceValueType,
+                                      bool generatePreview) const;
+  v8::Local<v8::Object> commandLineAPI();
+
+  InspectedContext* m_context;
+  v8::Global<v8::Value> m_value;
+  v8::Global<v8::Value> m_lastEvaluationResult;
+  std::unique_ptr<InjectedScriptNative> m_native;
+  v8::Global<v8::Object> m_commandLineAPI;
+
+  DISALLOW_COPY_AND_ASSIGN(InjectedScript);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_INJECTEDSCRIPT_H_
diff --git a/src/inspector/injected_script_externs.js b/src/inspector/injected_script_externs.js
new file mode 100644
index 0000000..b6339c6
--- /dev/null
+++ b/src/inspector/injected_script_externs.js
@@ -0,0 +1,66 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/** @interface */
+function InjectedScriptHostClass()
+{
+}
+
+/**
+ * @param {*} obj
+ * @return {string}
+ */
+InjectedScriptHostClass.prototype.internalConstructorName = function(obj) {}
+
+/**
+ * @param {*} obj
+ * @param {function()|undefined} func
+ * @return {boolean}
+ */
+InjectedScriptHostClass.prototype.formatAccessorsAsProperties = function(obj, func) {}
+
+/**
+ * @param {*} obj
+ * @return {string}
+ */
+InjectedScriptHostClass.prototype.subtype = function(obj) {}
+
+/**
+ * @param {*} obj
+ * @return {boolean}
+ */
+InjectedScriptHostClass.prototype.isTypedArray = function(obj) {}
+
+/**
+ * @param {*} obj
+ * @return {!Array.<*>}
+ */
+InjectedScriptHostClass.prototype.getInternalProperties = function(obj) {}
+
+/**
+ * @param {!Object} object
+ * @param {string} propertyName
+ * @return {boolean}
+ */
+InjectedScriptHostClass.prototype.objectHasOwnProperty = function(object, propertyName) {}
+
+/**
+ * @param {*} value
+ * @param {string} groupName
+ * @return {number}
+ */
+InjectedScriptHostClass.prototype.bind = function(value, groupName) {}
+
+/**
+ * @param {!Object} object
+ * @return {!Object}
+ */
+InjectedScriptHostClass.prototype.proxyTargetValue = function(object) {}
+
+/** @type {!InjectedScriptHostClass} */
+var InjectedScriptHost;
+/** @type {!Window} */
+var inspectedGlobalObject;
+/** @type {number} */
+var injectedScriptId;
diff --git a/src/inspector/inspected-context.cc b/src/inspector/inspected-context.cc
new file mode 100644
index 0000000..9100f64
--- /dev/null
+++ b/src/inspector/inspected-context.cc
@@ -0,0 +1,88 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/inspected-context.h"
+
+#include "src/inspector/injected-script.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-console.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-value-copier.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+void InspectedContext::weakCallback(
+    const v8::WeakCallbackInfo<InspectedContext>& data) {
+  InspectedContext* context = data.GetParameter();
+  if (!context->m_context.IsEmpty()) {
+    context->m_context.Reset();
+    data.SetSecondPassCallback(&InspectedContext::weakCallback);
+  } else {
+    context->m_inspector->discardInspectedContext(context->m_contextGroupId,
+                                                  context->m_contextId);
+  }
+}
+
+void InspectedContext::consoleWeakCallback(
+    const v8::WeakCallbackInfo<InspectedContext>& data) {
+  data.GetParameter()->m_console.Reset();
+}
+
+InspectedContext::InspectedContext(V8InspectorImpl* inspector,
+                                   const V8ContextInfo& info, int contextId)
+    : m_inspector(inspector),
+      m_context(info.context->GetIsolate(), info.context),
+      m_contextId(contextId),
+      m_contextGroupId(info.contextGroupId),
+      m_origin(toString16(info.origin)),
+      m_humanReadableName(toString16(info.humanReadableName)),
+      m_auxData(toString16(info.auxData)),
+      m_reported(false) {
+  m_context.SetWeak(this, &InspectedContext::weakCallback,
+                    v8::WeakCallbackType::kParameter);
+
+  v8::Isolate* isolate = m_inspector->isolate();
+  v8::Local<v8::Object> global = info.context->Global();
+  v8::Local<v8::Object> console =
+      V8Console::createConsole(this, info.hasMemoryOnConsole);
+  if (!global
+           ->Set(info.context, toV8StringInternalized(isolate, "console"),
+                 console)
+           .FromMaybe(false))
+    return;
+  m_console.Reset(isolate, console);
+  m_console.SetWeak(this, &InspectedContext::consoleWeakCallback,
+                    v8::WeakCallbackType::kParameter);
+}
+
+InspectedContext::~InspectedContext() {
+  if (!m_context.IsEmpty() && !m_console.IsEmpty()) {
+    v8::HandleScope scope(isolate());
+    V8Console::clearInspectedContextIfNeeded(context(),
+                                             m_console.Get(isolate()));
+  }
+}
+
+v8::Local<v8::Context> InspectedContext::context() const {
+  return m_context.Get(isolate());
+}
+
+v8::Isolate* InspectedContext::isolate() const {
+  return m_inspector->isolate();
+}
+
+bool InspectedContext::createInjectedScript() {
+  DCHECK(!m_injectedScript);
+  std::unique_ptr<InjectedScript> injectedScript = InjectedScript::create(this);
+  // InjectedScript::create can destroy |this|.
+  if (!injectedScript) return false;
+  m_injectedScript = std::move(injectedScript);
+  return true;
+}
+
+void InspectedContext::discardInjectedScript() { m_injectedScript.reset(); }
+
+}  // namespace v8_inspector
diff --git a/src/inspector/inspected-context.h b/src/inspector/inspected-context.h
new file mode 100644
index 0000000..d8e72cc
--- /dev/null
+++ b/src/inspector/inspected-context.h
@@ -0,0 +1,64 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_INSPECTEDCONTEXT_H_
+#define V8_INSPECTOR_INSPECTEDCONTEXT_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/string-16.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class InjectedScript;
+class InjectedScriptHost;
+class V8ContextInfo;
+class V8InspectorImpl;
+
+class InspectedContext {
+ public:
+  ~InspectedContext();
+
+  v8::Local<v8::Context> context() const;
+  int contextId() const { return m_contextId; }
+  int contextGroupId() const { return m_contextGroupId; }
+  String16 origin() const { return m_origin; }
+  String16 humanReadableName() const { return m_humanReadableName; }
+  String16 auxData() const { return m_auxData; }
+
+  bool isReported() const { return m_reported; }
+  void setReported(bool reported) { m_reported = reported; }
+
+  v8::Isolate* isolate() const;
+  V8InspectorImpl* inspector() const { return m_inspector; }
+
+  InjectedScript* getInjectedScript() { return m_injectedScript.get(); }
+  bool createInjectedScript();
+  void discardInjectedScript();
+
+ private:
+  friend class V8InspectorImpl;
+  InspectedContext(V8InspectorImpl*, const V8ContextInfo&, int contextId);
+  static void weakCallback(const v8::WeakCallbackInfo<InspectedContext>&);
+  static void consoleWeakCallback(
+      const v8::WeakCallbackInfo<InspectedContext>&);
+
+  V8InspectorImpl* m_inspector;
+  v8::Global<v8::Context> m_context;
+  int m_contextId;
+  int m_contextGroupId;
+  const String16 m_origin;
+  const String16 m_humanReadableName;
+  const String16 m_auxData;
+  bool m_reported;
+  std::unique_ptr<InjectedScript> m_injectedScript;
+  v8::Global<v8::Object> m_console;
+
+  DISALLOW_COPY_AND_ASSIGN(InspectedContext);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_INSPECTEDCONTEXT_H_
diff --git a/src/inspector/inspector.gyp b/src/inspector/inspector.gyp
index 5fc49b1..2d5c7a5 100644
--- a/src/inspector/inspector.gyp
+++ b/src/inspector/inspector.gyp
@@ -2,112 +2,108 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-{ 'variables': {
-    'protocol_path': '../../third_party/WebKit/Source/platform/inspector_protocol',
-    'protocol_sources': [
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Console.cpp',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Console.h',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Debugger.cpp',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Debugger.h',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/HeapProfiler.cpp',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/HeapProfiler.h',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Profiler.cpp',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Profiler.h',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/public/Debugger.h',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/public/Runtime.h',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Runtime.cpp',
-      '<(SHARED_INTERMEDIATE_DIR)/inspector/Runtime.h',
-    ]
+{
+  'variables': {
+    'protocol_path': '<(PRODUCT_DIR)/../../third_party/WebKit/Source/platform/inspector_protocol',
   },
+  'includes': [
+    'inspector.gypi',
+    '<(PRODUCT_DIR)/../../../third_party/WebKit/Source/platform/inspector_protocol/inspector_protocol.gypi',
+  ],
   'targets': [
-    { 'target_name': 'inspector_protocol_sources',
+    { 'target_name': 'inspector_injected_script',
       'type': 'none',
-      'variables': {
-        'jinja_module_files': [
-          # jinja2/__init__.py contains version string, so sufficient for package
-          '../third_party/jinja2/__init__.py',
-          '../third_party/markupsafe/__init__.py',  # jinja2 dep
-        ]
-      },
       'actions': [
         {
-          'action_name': 'generate_inspector_protocol_sources',
+          'action_name': 'convert_js_to_cpp_char_array',
           'inputs': [
-            # Source generator script.
-            '<(protocol_path)/CodeGenerator.py',
-            # Source code templates.
-            '<(protocol_path)/Exported_h.template',
-            '<(protocol_path)/Imported_h.template',
-            '<(protocol_path)/TypeBuilder_h.template',
-            '<(protocol_path)/TypeBuilder_cpp.template',
-            # Protocol definition.
+            'build/xxd.py',
+            '<(inspector_injected_script_source)',
+          ],
+          'outputs': [
+            '<(inspector_generated_injected_script)',
+          ],
+          'action': [
+            'python',
+            'build/xxd.py',
+            'InjectedScriptSource_js',
+            'injected-script-source.js',
+            '<@(_outputs)'
+          ],
+        },
+      ],
+      # Since this target generates header files, it needs to be a hard dependency.
+      'hard_dependency': 1,
+    },
+    { 'target_name': 'inspector_debugger_script',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'convert_js_to_cpp_char_array',
+          'inputs': [
+            'build/xxd.py',
+            '<(inspector_debugger_script_source)',
+          ],
+          'outputs': [
+            '<(inspector_generated_debugger_script)',
+          ],
+          'action': [
+            'python',
+            'build/xxd.py',
+            'DebuggerScript_js',
+            'debugger-script.js',
+            '<@(_outputs)'
+          ],
+        },
+      ],
+      # Since this target generates header files, it needs to be a hard dependency.
+      'hard_dependency': 1,
+    },
+    { 'target_name': 'protocol_compatibility',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'protocol_compatibility',
+          'inputs': [
             'js_protocol.json',
           ],
           'outputs': [
-            '<@(protocol_sources)',
+            '<@(SHARED_INTERMEDIATE_DIR)/src/js_protocol.stamp',
+          ],
+          'action': [
+            'python',
+            '<(protocol_path)/CheckProtocolCompatibility.py',
+            '--stamp', '<@(_outputs)',
+            'js_protocol.json',
+          ],
+          'message': 'Generating inspector protocol sources from protocol json definition',
+        },
+      ]
+    },
+    { 'target_name': 'protocol_generated_sources',
+      'type': 'none',
+      'dependencies': [ 'protocol_compatibility' ],
+      'actions': [
+        {
+          'action_name': 'protocol_generated_sources',
+          'inputs': [
+            'js_protocol.json',
+            'inspector_protocol_config.json',
+            '<@(inspector_protocol_files)',
+          ],
+          'outputs': [
+            '<@(inspector_generated_sources)',
           ],
           'action': [
             'python',
             '<(protocol_path)/CodeGenerator.py',
-            '--protocol', 'js_protocol.json',
-            '--string_type', 'String16',
-            '--export_macro', 'PLATFORM_EXPORT',
-            '--output_dir', '<(SHARED_INTERMEDIATE_DIR)/inspector',
-            '--output_package', 'inspector',
-            '--exported_dir', '<(SHARED_INTERMEDIATE_DIR)/inspector/public',
-            '--exported_package', 'inspector/public',
+            '--jinja_dir', '<(PRODUCT_DIR)/../../third_party',
+            '--output_base', '<(SHARED_INTERMEDIATE_DIR)/src/inspector',
+            '--config', 'inspector_protocol_config.json',
           ],
-          'message': 'Generating Inspector protocol backend sources from json definitions',
+          'message': 'Generating inspector protocol sources from protocol json',
         },
       ]
     },
-    { 'target_name': 'inspector_protocol',
-      'type': 'static_library',
-      'dependencies': [
-        'inspector_protocol_sources',
-      ],
-      'include_dirs+': [
-        '<(protocol_path)/../..',
-        '<(SHARED_INTERMEDIATE_DIR)',
-      ],
-      'defines': [
-        'V8_INSPECTOR_USE_STL',
-      ],
-      'msvs_disabled_warnings': [
-        4267,  # Truncation from size_t to int.
-        4305,  # Truncation from 'type1' to 'type2'.
-        4324,  # Struct padded due to declspec(align).
-        4714,  # Function marked forceinline not inlined.
-        4800,  # Value forced to bool.
-        4996,  # Deprecated function call.
-      ],
-      'sources': [
-        '<@(protocol_sources)',
-        '<(protocol_path)/Allocator.h',
-        '<(protocol_path)/Array.h',
-        '<(protocol_path)/BackendCallback.h',
-        '<(protocol_path)/CodeGenerator.py',
-        '<(protocol_path)/Collections.h',
-        '<(protocol_path)/DispatcherBase.cpp',
-        '<(protocol_path)/DispatcherBase.h',
-        '<(protocol_path)/ErrorSupport.cpp',
-        '<(protocol_path)/ErrorSupport.h',
-        '<(protocol_path)/FrontendChannel.h',
-        '<(protocol_path)/Maybe.h',
-        '<(protocol_path)/Object.cpp',
-        '<(protocol_path)/Object.h',
-        '<(protocol_path)/Parser.cpp',
-        '<(protocol_path)/Parser.h',
-        '<(protocol_path)/Platform.h',
-        '<(protocol_path)/PlatformSTL.h',
-        '<(protocol_path)/String16.cpp',
-        '<(protocol_path)/String16.h',
-        '<(protocol_path)/String16STL.cpp',
-        '<(protocol_path)/String16STL.h',
-        '<(protocol_path)/ValueConversions.h',
-        '<(protocol_path)/Values.cpp',
-        '<(protocol_path)/Values.h',
-      ]
-    },
   ],
 }
diff --git a/src/inspector/inspector.gypi b/src/inspector/inspector.gypi
new file mode 100644
index 0000000..863c038
--- /dev/null
+++ b/src/inspector/inspector.gypi
@@ -0,0 +1,95 @@
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'inspector_generated_sources': [
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Forward.h',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Protocol.cpp',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Protocol.h',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Console.cpp',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Console.h',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Debugger.cpp',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Debugger.h',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/HeapProfiler.cpp',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/HeapProfiler.h',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Profiler.cpp',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Profiler.h',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Runtime.cpp',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Runtime.h',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Schema.cpp',
+      '<(SHARED_INTERMEDIATE_DIR)/src/inspector/protocol/Schema.h',
+      '<(SHARED_INTERMEDIATE_DIR)/include/inspector/Debugger.h',
+      '<(SHARED_INTERMEDIATE_DIR)/include/inspector/Runtime.h',
+      '<(SHARED_INTERMEDIATE_DIR)/include/inspector/Schema.h',
+    ],
+
+    'inspector_injected_script_source': 'injected-script-source.js',
+    'inspector_generated_injected_script': '<(SHARED_INTERMEDIATE_DIR)/src/inspector/injected-script-source.h',
+    'inspector_debugger_script_source': 'debugger-script.js',
+    'inspector_generated_debugger_script': '<(SHARED_INTERMEDIATE_DIR)/src/inspector/debugger-script.h',
+
+    'inspector_all_sources': [
+      '<@(inspector_generated_sources)',
+      '<(inspector_generated_injected_script)',
+      '<(inspector_generated_debugger_script)',
+      '../../include/v8-inspector.h',
+      '../../include/v8-inspector-protocol.h',
+      'inspector/injected-script.cc',
+      'inspector/injected-script.h',
+      'inspector/injected-script-native.cc',
+      'inspector/injected-script-native.h',
+      'inspector/inspected-context.cc',
+      'inspector/inspected-context.h',
+      'inspector/java-script-call-frame.cc',
+      'inspector/java-script-call-frame.h',
+      'inspector/protocol-platform.h',
+      'inspector/remote-object-id.cc',
+      'inspector/remote-object-id.h',
+      'inspector/script-breakpoint.h',
+      'inspector/search-util.cc',
+      'inspector/search-util.h',
+      'inspector/string-16.cc',
+      'inspector/string-16.h',
+      'inspector/string-util.cc',
+      'inspector/string-util.h',
+      'inspector/v8-console.cc',
+      'inspector/v8-console.h',
+      'inspector/v8-console-agent-impl.cc',
+      'inspector/v8-console-agent-impl.h',
+      'inspector/v8-console-message.cc',
+      'inspector/v8-console-message.h',
+      'inspector/v8-debugger.cc',
+      'inspector/v8-debugger.h',
+      'inspector/v8-debugger-agent-impl.cc',
+      'inspector/v8-debugger-agent-impl.h',
+      'inspector/v8-debugger-script.cc',
+      'inspector/v8-debugger-script.h',
+      'inspector/v8-function-call.cc',
+      'inspector/v8-function-call.h',
+      'inspector/v8-heap-profiler-agent-impl.cc',
+      'inspector/v8-heap-profiler-agent-impl.h',
+      'inspector/v8-injected-script-host.cc',
+      'inspector/v8-injected-script-host.h',
+      'inspector/v8-inspector-impl.cc',
+      'inspector/v8-inspector-impl.h',
+      'inspector/v8-inspector-session-impl.cc',
+      'inspector/v8-inspector-session-impl.h',
+      'inspector/v8-internal-value-type.cc',
+      'inspector/v8-internal-value-type.h',
+      'inspector/v8-profiler-agent-impl.cc',
+      'inspector/v8-profiler-agent-impl.h',
+      'inspector/v8-regex.cc',
+      'inspector/v8-regex.h',
+      'inspector/v8-runtime-agent-impl.cc',
+      'inspector/v8-runtime-agent-impl.h',
+      'inspector/v8-schema-agent-impl.cc',
+      'inspector/v8-schema-agent-impl.h',
+      'inspector/v8-stack-trace-impl.cc',
+      'inspector/v8-stack-trace-impl.h',
+      'inspector/v8-value-copier.cc',
+      'inspector/v8-value-copier.h',
+    ]
+  }
+}
diff --git a/src/inspector/inspector_protocol_config.json b/src/inspector/inspector_protocol_config.json
new file mode 100644
index 0000000..cb9e669
--- /dev/null
+++ b/src/inspector/inspector_protocol_config.json
@@ -0,0 +1,25 @@
+{
+    "protocol": {
+        "path": "js_protocol.json",
+        "package": "src/inspector/protocol",
+        "output": "protocol",
+        "namespace": ["v8_inspector", "protocol"]
+    },
+
+    "exported": {
+        "package": "include/inspector",
+        "output": "../../include/inspector",
+        "string_header": "v8-inspector.h",
+        "string_in": "StringView",
+        "string_out": "std::unique_ptr<StringBuffer>",
+        "to_string_out": "StringBufferImpl::adopt(%s)",
+        "export_macro": "V8_EXPORT"
+    },
+
+    "lib": {
+        "package": "src/inspector/protocol",
+        "output": "protocol",
+        "string_header": "src/inspector/string-util.h",
+        "platform_header": "src/inspector/protocol-platform.h"
+    }
+}
diff --git a/src/inspector/java-script-call-frame.cc b/src/inspector/java-script-call-frame.cc
new file mode 100644
index 0000000..b70af21
--- /dev/null
+++ b/src/inspector/java-script-call-frame.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2010, Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "src/inspector/java-script-call-frame.h"
+
+#include "src/inspector/string-util.h"
+
+#include "include/v8-debug.h"
+
+namespace v8_inspector {
+
+JavaScriptCallFrame::JavaScriptCallFrame(v8::Local<v8::Context> debuggerContext,
+                                         v8::Local<v8::Object> callFrame)
+    : m_isolate(debuggerContext->GetIsolate()),
+      m_debuggerContext(m_isolate, debuggerContext),
+      m_callFrame(m_isolate, callFrame) {}
+
+JavaScriptCallFrame::~JavaScriptCallFrame() {}
+
+int JavaScriptCallFrame::callV8FunctionReturnInt(const char* name) const {
+  v8::HandleScope handleScope(m_isolate);
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::Local<v8::Context> context =
+      v8::Local<v8::Context>::New(m_isolate, m_debuggerContext);
+  v8::Local<v8::Object> callFrame =
+      v8::Local<v8::Object>::New(m_isolate, m_callFrame);
+  v8::Local<v8::Function> func = v8::Local<v8::Function>::Cast(
+      callFrame->Get(context, toV8StringInternalized(m_isolate, name))
+          .ToLocalChecked());
+  v8::Local<v8::Value> result;
+  if (!func->Call(context, callFrame, 0, nullptr).ToLocal(&result) ||
+      !result->IsInt32())
+    return 0;
+  return result.As<v8::Int32>()->Value();
+}
+
+int JavaScriptCallFrame::sourceID() const {
+  return callV8FunctionReturnInt("sourceID");
+}
+
+int JavaScriptCallFrame::line() const {
+  return callV8FunctionReturnInt("line");
+}
+
+int JavaScriptCallFrame::column() const {
+  return callV8FunctionReturnInt("column");
+}
+
+int JavaScriptCallFrame::contextId() const {
+  return callV8FunctionReturnInt("contextId");
+}
+
+bool JavaScriptCallFrame::isAtReturn() const {
+  v8::HandleScope handleScope(m_isolate);
+  v8::Local<v8::Context> context =
+      v8::Local<v8::Context>::New(m_isolate, m_debuggerContext);
+  v8::Local<v8::Object> callFrame =
+      v8::Local<v8::Object>::New(m_isolate, m_callFrame);
+  v8::Local<v8::Value> result;
+  if (!callFrame->Get(context, toV8StringInternalized(m_isolate, "isAtReturn"))
+           .ToLocal(&result) ||
+      !result->IsBoolean())
+    return false;
+  return result.As<v8::Boolean>()->BooleanValue(context).FromMaybe(false);
+}
+
+v8::Local<v8::Object> JavaScriptCallFrame::details() const {
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::Local<v8::Context> context =
+      v8::Local<v8::Context>::New(m_isolate, m_debuggerContext);
+  v8::Local<v8::Object> callFrame =
+      v8::Local<v8::Object>::New(m_isolate, m_callFrame);
+  v8::Local<v8::Function> func = v8::Local<v8::Function>::Cast(
+      callFrame->Get(context, toV8StringInternalized(m_isolate, "details"))
+          .ToLocalChecked());
+  return v8::Local<v8::Object>::Cast(
+      func->Call(context, callFrame, 0, nullptr).ToLocalChecked());
+}
+
+v8::MaybeLocal<v8::Value> JavaScriptCallFrame::evaluate(
+    v8::Local<v8::Value> expression) {
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kRunMicrotasks);
+  v8::Local<v8::Context> context =
+      v8::Local<v8::Context>::New(m_isolate, m_debuggerContext);
+  v8::Local<v8::Object> callFrame =
+      v8::Local<v8::Object>::New(m_isolate, m_callFrame);
+  v8::Local<v8::Function> evalFunction = v8::Local<v8::Function>::Cast(
+      callFrame->Get(context, toV8StringInternalized(m_isolate, "evaluate"))
+          .ToLocalChecked());
+  return evalFunction->Call(context, callFrame, 1, &expression);
+}
+
+v8::MaybeLocal<v8::Value> JavaScriptCallFrame::restart() {
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::Local<v8::Context> context =
+      v8::Local<v8::Context>::New(m_isolate, m_debuggerContext);
+  v8::Local<v8::Object> callFrame =
+      v8::Local<v8::Object>::New(m_isolate, m_callFrame);
+  v8::Local<v8::Function> restartFunction = v8::Local<v8::Function>::Cast(
+      callFrame->Get(context, toV8StringInternalized(m_isolate, "restart"))
+          .ToLocalChecked());
+  v8::Debug::SetLiveEditEnabled(m_isolate, true);
+  v8::MaybeLocal<v8::Value> result = restartFunction->Call(
+      m_debuggerContext.Get(m_isolate), callFrame, 0, nullptr);
+  v8::Debug::SetLiveEditEnabled(m_isolate, false);
+  return result;
+}
+
+v8::MaybeLocal<v8::Value> JavaScriptCallFrame::setVariableValue(
+    int scopeNumber, v8::Local<v8::Value> variableName,
+    v8::Local<v8::Value> newValue) {
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::Local<v8::Context> context =
+      v8::Local<v8::Context>::New(m_isolate, m_debuggerContext);
+  v8::Local<v8::Object> callFrame =
+      v8::Local<v8::Object>::New(m_isolate, m_callFrame);
+  v8::Local<v8::Function> setVariableValueFunction =
+      v8::Local<v8::Function>::Cast(
+          callFrame
+              ->Get(context,
+                    toV8StringInternalized(m_isolate, "setVariableValue"))
+              .ToLocalChecked());
+  v8::Local<v8::Value> argv[] = {
+      v8::Local<v8::Value>(v8::Integer::New(m_isolate, scopeNumber)),
+      variableName, newValue};
+  return setVariableValueFunction->Call(context, callFrame, arraysize(argv),
+                                        argv);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/java-script-call-frame.h b/src/inspector/java-script-call-frame.h
new file mode 100644
index 0000000..5a4ce19
--- /dev/null
+++ b/src/inspector/java-script-call-frame.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2010, Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef V8_INSPECTOR_JAVASCRIPTCALLFRAME_H_
+#define V8_INSPECTOR_JAVASCRIPTCALLFRAME_H_
+
+#include <vector>
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol-platform.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class JavaScriptCallFrame {
+ public:
+  static std::unique_ptr<JavaScriptCallFrame> create(
+      v8::Local<v8::Context> debuggerContext, v8::Local<v8::Object> callFrame) {
+    return wrapUnique(new JavaScriptCallFrame(debuggerContext, callFrame));
+  }
+  ~JavaScriptCallFrame();
+
+  int sourceID() const;
+  int line() const;
+  int column() const;
+  int contextId() const;
+
+  bool isAtReturn() const;
+  v8::Local<v8::Object> details() const;
+
+  v8::MaybeLocal<v8::Value> evaluate(v8::Local<v8::Value> expression);
+  v8::MaybeLocal<v8::Value> restart();
+  v8::MaybeLocal<v8::Value> setVariableValue(int scopeNumber,
+                                             v8::Local<v8::Value> variableName,
+                                             v8::Local<v8::Value> newValue);
+
+ private:
+  JavaScriptCallFrame(v8::Local<v8::Context> debuggerContext,
+                      v8::Local<v8::Object> callFrame);
+
+  int callV8FunctionReturnInt(const char* name) const;
+
+  v8::Isolate* m_isolate;
+  v8::Global<v8::Context> m_debuggerContext;
+  v8::Global<v8::Object> m_callFrame;
+
+  DISALLOW_COPY_AND_ASSIGN(JavaScriptCallFrame);
+};
+
+using JavaScriptCallFrames = std::vector<std::unique_ptr<JavaScriptCallFrame>>;
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_JAVASCRIPTCALLFRAME_H_
diff --git a/src/inspector/js_protocol-1.2.json b/src/inspector/js_protocol-1.2.json
new file mode 100644
index 0000000..aff6806
--- /dev/null
+++ b/src/inspector/js_protocol-1.2.json
@@ -0,0 +1,997 @@
+{
+    "version": { "major": "1", "minor": "2" },
+    "domains": [
+    {
+        "domain": "Schema",
+        "description": "Provides information about the protocol schema.",
+        "types": [
+            {
+                "id": "Domain",
+                "type": "object",
+                "description": "Description of the protocol domain.",
+                "exported": true,
+                "properties": [
+                    { "name": "name", "type": "string", "description": "Domain name." },
+                    { "name": "version", "type": "string", "description": "Domain version." }
+                ]
+            }
+        ],
+        "commands": [
+            {
+                "name": "getDomains",
+                "description": "Returns supported domains.",
+                "handlers": ["browser", "renderer"],
+                "returns": [
+                    { "name": "domains", "type": "array", "items": { "$ref": "Domain" }, "description": "List of supported domains." }
+                ]
+            }
+        ]
+    },
+    {
+        "domain": "Runtime",
+        "description": "Runtime domain exposes JavaScript runtime by means of remote evaluation and mirror objects. Evaluation results are returned as mirror object that expose object type, string representation and unique identifier that can be used for further object reference. Original objects are maintained in memory unless they are either explicitly released or are released along with the other objects in their object group.",
+        "types": [
+            {
+                "id": "ScriptId",
+                "type": "string",
+                "description": "Unique script identifier."
+            },
+            {
+                "id": "RemoteObjectId",
+                "type": "string",
+                "description": "Unique object identifier."
+            },
+            {
+                "id": "UnserializableValue",
+                "type": "string",
+                "enum": ["Infinity", "NaN", "-Infinity", "-0"],
+                "description": "Primitive value which cannot be JSON-stringified."
+            },
+            {
+                "id": "RemoteObject",
+                "type": "object",
+                "description": "Mirror object referencing original JavaScript object.",
+                "exported": true,
+                "properties": [
+                    { "name": "type", "type": "string", "enum": ["object", "function", "undefined", "string", "number", "boolean", "symbol"], "description": "Object type." },
+                    { "name": "subtype", "type": "string", "optional": true, "enum": ["array", "null", "node", "regexp", "date", "map", "set", "iterator", "generator", "error", "proxy", "promise", "typedarray"], "description": "Object subtype hint. Specified for <code>object</code> type values only." },
+                    { "name": "className", "type": "string", "optional": true, "description": "Object class (constructor) name. Specified for <code>object</code> type values only." },
+                    { "name": "value", "type": "any", "optional": true, "description": "Remote object value in case of primitive values or JSON values (if it was requested)." },
+                    { "name": "unserializableValue", "$ref": "UnserializableValue", "optional": true, "description": "Primitive value which can not be JSON-stringified does not have <code>value</code>, but gets this property." },
+                    { "name": "description", "type": "string", "optional": true, "description": "String representation of the object." },
+                    { "name": "objectId", "$ref": "RemoteObjectId", "optional": true, "description": "Unique object identifier (for non-primitive values)." },
+                    { "name": "preview", "$ref": "ObjectPreview", "optional": true, "description": "Preview containing abbreviated property values. Specified for <code>object</code> type values only.", "experimental": true },
+                    { "name": "customPreview", "$ref": "CustomPreview", "optional": true, "experimental": true}
+                ]
+            },
+            {
+                "id": "CustomPreview",
+                "type": "object",
+                "experimental": true,
+                "properties": [
+                    { "name": "header", "type": "string"},
+                    { "name": "hasBody", "type": "boolean"},
+                    { "name": "formatterObjectId", "$ref": "RemoteObjectId"},
+                    { "name": "bindRemoteObjectFunctionId", "$ref": "RemoteObjectId" },
+                    { "name": "configObjectId", "$ref": "RemoteObjectId", "optional": true }
+                ]
+            },
+            {
+                "id": "ObjectPreview",
+                "type": "object",
+                "experimental": true,
+                "description": "Object containing abbreviated remote object value.",
+                "properties": [
+                    { "name": "type", "type": "string", "enum": ["object", "function", "undefined", "string", "number", "boolean", "symbol"], "description": "Object type." },
+                    { "name": "subtype", "type": "string", "optional": true, "enum": ["array", "null", "node", "regexp", "date", "map", "set", "iterator", "generator", "error"], "description": "Object subtype hint. Specified for <code>object</code> type values only." },
+                    { "name": "description", "type": "string", "optional": true, "description": "String representation of the object." },
+                    { "name": "overflow", "type": "boolean", "description": "True iff some of the properties or entries of the original object did not fit." },
+                    { "name": "properties", "type": "array", "items": { "$ref": "PropertyPreview" }, "description": "List of the properties." },
+                    { "name": "entries", "type": "array", "items": { "$ref": "EntryPreview" }, "optional": true, "description": "List of the entries. Specified for <code>map</code> and <code>set</code> subtype values only." }
+                ]
+            },
+            {
+                "id": "PropertyPreview",
+                "type": "object",
+                "experimental": true,
+                "properties": [
+                    { "name": "name", "type": "string", "description": "Property name." },
+                    { "name": "type", "type": "string", "enum": ["object", "function", "undefined", "string", "number", "boolean", "symbol", "accessor"], "description": "Object type. Accessor means that the property itself is an accessor property." },
+                    { "name": "value", "type": "string", "optional": true, "description": "User-friendly property value string." },
+                    { "name": "valuePreview", "$ref": "ObjectPreview", "optional": true, "description": "Nested value preview." },
+                    { "name": "subtype", "type": "string", "optional": true, "enum": ["array", "null", "node", "regexp", "date", "map", "set", "iterator", "generator", "error"], "description": "Object subtype hint. Specified for <code>object</code> type values only." }
+                ]
+            },
+            {
+                "id": "EntryPreview",
+                "type": "object",
+                "experimental": true,
+                "properties": [
+                    { "name": "key", "$ref": "ObjectPreview", "optional": true, "description": "Preview of the key. Specified for map-like collection entries." },
+                    { "name": "value", "$ref": "ObjectPreview", "description": "Preview of the value." }
+                ]
+            },
+            {
+                "id": "PropertyDescriptor",
+                "type": "object",
+                "description": "Object property descriptor.",
+                "properties": [
+                    { "name": "name", "type": "string", "description": "Property name or symbol description." },
+                    { "name": "value", "$ref": "RemoteObject", "optional": true, "description": "The value associated with the property." },
+                    { "name": "writable", "type": "boolean", "optional": true, "description": "True if the value associated with the property may be changed (data descriptors only)." },
+                    { "name": "get", "$ref": "RemoteObject", "optional": true, "description": "A function which serves as a getter for the property, or <code>undefined</code> if there is no getter (accessor descriptors only)." },
+                    { "name": "set", "$ref": "RemoteObject", "optional": true, "description": "A function which serves as a setter for the property, or <code>undefined</code> if there is no setter (accessor descriptors only)." },
+                    { "name": "configurable", "type": "boolean", "description": "True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object." },
+                    { "name": "enumerable", "type": "boolean", "description": "True if this property shows up during enumeration of the properties on the corresponding object." },
+                    { "name": "wasThrown", "type": "boolean", "optional": true, "description": "True if the result was thrown during the evaluation." },
+                    { "name": "isOwn", "optional": true, "type": "boolean", "description": "True if the property is owned for the object." },
+                    { "name": "symbol", "$ref": "RemoteObject", "optional": true, "description": "Property symbol object, if the property is of the <code>symbol</code> type." }
+                ]
+            },
+            {
+                "id": "InternalPropertyDescriptor",
+                "type": "object",
+                "description": "Object internal property descriptor. This property isn't normally visible in JavaScript code.",
+                "properties": [
+                    { "name": "name", "type": "string", "description": "Conventional property name." },
+                    { "name": "value", "$ref": "RemoteObject", "optional": true, "description": "The value associated with the property." }
+                ]
+            },
+            {
+                "id": "CallArgument",
+                "type": "object",
+                "description": "Represents function call argument. Either remote object id <code>objectId</code>, primitive <code>value</code>, unserializable primitive value or neither of (for undefined) them should be specified.",
+                "properties": [
+                    { "name": "value", "type": "any", "optional": true, "description": "Primitive value." },
+                    { "name": "unserializableValue", "$ref": "UnserializableValue", "optional": true, "description": "Primitive value which can not be JSON-stringified." },
+                    { "name": "objectId", "$ref": "RemoteObjectId", "optional": true, "description": "Remote object handle." }
+                ]
+            },
+            {
+                "id": "ExecutionContextId",
+                "type": "integer",
+                "description": "Id of an execution context."
+            },
+            {
+                "id": "ExecutionContextDescription",
+                "type": "object",
+                "description": "Description of an isolated world.",
+                "properties": [
+                    { "name": "id", "$ref": "ExecutionContextId", "description": "Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed." },
+                    { "name": "origin", "type": "string", "description": "Execution context origin." },
+                    { "name": "name", "type": "string", "description": "Human readable name describing given context." },
+                    { "name": "auxData", "type": "object", "optional": true, "description": "Embedder-specific auxiliary data." }
+                ]
+            },
+            {
+                "id": "ExceptionDetails",
+                "type": "object",
+                "description": "Detailed information about exception (or error) that was thrown during script compilation or execution.",
+                "properties": [
+                    { "name": "exceptionId", "type": "integer", "description": "Exception id." },
+                    { "name": "text", "type": "string", "description": "Exception text, which should be used together with exception object when available." },
+                    { "name": "lineNumber", "type": "integer", "description": "Line number of the exception location (0-based)." },
+                    { "name": "columnNumber", "type": "integer", "description": "Column number of the exception location (0-based)." },
+                    { "name": "scriptId", "$ref": "ScriptId", "optional": true, "description": "Script ID of the exception location." },
+                    { "name": "url", "type": "string", "optional": true, "description": "URL of the exception location, to be used when the script was not reported." },
+                    { "name": "stackTrace", "$ref": "StackTrace", "optional": true, "description": "JavaScript stack trace if available." },
+                    { "name": "exception", "$ref": "RemoteObject", "optional": true, "description": "Exception object if available." },
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "optional": true, "description": "Identifier of the context where exception happened." }
+                ]
+            },
+            {
+                "id": "Timestamp",
+                "type": "number",
+                "description": "Number of milliseconds since epoch."
+            },
+            {
+                "id": "CallFrame",
+                "type": "object",
+                "description": "Stack entry for runtime errors and assertions.",
+                "properties": [
+                    { "name": "functionName", "type": "string", "description": "JavaScript function name." },
+                    { "name": "scriptId", "$ref": "ScriptId", "description": "JavaScript script id." },
+                    { "name": "url", "type": "string", "description": "JavaScript script name or url." },
+                    { "name": "lineNumber", "type": "integer", "description": "JavaScript script line number (0-based)." },
+                    { "name": "columnNumber", "type": "integer", "description": "JavaScript script column number (0-based)." }
+                ]
+            },
+            {
+                "id": "StackTrace",
+                "type": "object",
+                "description": "Call frames for assertions or error messages.",
+                "exported": true,
+                "properties": [
+                    { "name": "description", "type": "string", "optional": true, "description": "String label of this stack trace. For async traces this may be a name of the function that initiated the async call." },
+                    { "name": "callFrames", "type": "array", "items": { "$ref": "CallFrame" }, "description": "JavaScript function name." },
+                    { "name": "parent", "$ref": "StackTrace", "optional": true, "description": "Asynchronous JavaScript stack trace that preceded this stack, if available." }
+                ]
+            }
+        ],
+        "commands": [
+            {
+                "name": "evaluate",
+                "async": true,
+                "parameters": [
+                    { "name": "expression", "type": "string", "description": "Expression to evaluate." },
+                    { "name": "objectGroup", "type": "string", "optional": true, "description": "Symbolic group name that can be used to release multiple objects." },
+                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Determines whether Command Line API should be available during the evaluation." },
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
+                    { "name": "contextId", "$ref": "ExecutionContextId", "optional": true, "description": "Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page." },
+                    { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object that should be sent by value." },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the result." },
+                    { "name": "userGesture", "type": "boolean", "optional": true, "experimental": true, "description": "Whether execution should be treated as initiated by user in the UI." },
+                    { "name": "awaitPromise", "type": "boolean", "optional":true, "description": "Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error." }
+                ],
+                "returns": [
+                    { "name": "result", "$ref": "RemoteObject", "description": "Evaluation result." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
+                ],
+                "description": "Evaluates expression on global object."
+            },
+            {
+                "name": "awaitPromise",
+                "async": true,
+                "parameters": [
+                    { "name": "promiseObjectId", "$ref": "RemoteObjectId", "description": "Identifier of the promise." },
+                    { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object that should be sent by value." },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "description": "Whether preview should be generated for the result." }
+                ],
+                "returns": [
+                    { "name": "result", "$ref": "RemoteObject", "description": "Promise result. Will contain rejected value if promise was rejected." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details if stack strace is available."}
+                ],
+                "description": "Add handler to promise with given promise object id."
+            },
+            {
+                "name": "callFunctionOn",
+                "async": true,
+                "parameters": [
+                    { "name": "objectId", "$ref": "RemoteObjectId", "description": "Identifier of the object to call function on." },
+                    { "name": "functionDeclaration", "type": "string", "description": "Declaration of the function to call." },
+                    { "name": "arguments", "type": "array", "items": { "$ref": "CallArgument", "description": "Call argument." }, "optional": true, "description": "Call arguments. All call arguments must belong to the same JavaScript world as the target object." },
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
+                    { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object which should be sent by value." },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the result." },
+                    { "name": "userGesture", "type": "boolean", "optional": true, "experimental": true, "description": "Whether execution should be treated as initiated by user in the UI." },
+                    { "name": "awaitPromise", "type": "boolean", "optional":true, "description": "Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error." }
+                ],
+                "returns": [
+                    { "name": "result", "$ref": "RemoteObject", "description": "Call result." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
+                ],
+                "description": "Calls function with given declaration on the given object. Object group of the result is inherited from the target object."
+            },
+            {
+                "name": "getProperties",
+                "parameters": [
+                    { "name": "objectId", "$ref": "RemoteObjectId", "description": "Identifier of the object to return properties for." },
+                    { "name": "ownProperties", "optional": true, "type": "boolean", "description": "If true, returns properties belonging only to the element itself, not to its prototype chain." },
+                    { "name": "accessorPropertiesOnly", "optional": true, "type": "boolean", "description": "If true, returns accessor properties (with getter/setter) only; internal properties are not returned either.", "experimental": true },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the results." }
+                ],
+                "returns": [
+                    { "name": "result", "type": "array", "items": { "$ref": "PropertyDescriptor" }, "description": "Object properties." },
+                    { "name": "internalProperties", "optional": true, "type": "array", "items": { "$ref": "InternalPropertyDescriptor" }, "description": "Internal object properties (only of the element itself)." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
+                ],
+                "description": "Returns properties of a given object. Object group of the result is inherited from the target object."
+            },
+            {
+                "name": "releaseObject",
+                "parameters": [
+                    { "name": "objectId", "$ref": "RemoteObjectId", "description": "Identifier of the object to release." }
+                ],
+                "description": "Releases remote object with given id."
+            },
+            {
+                "name": "releaseObjectGroup",
+                "parameters": [
+                    { "name": "objectGroup", "type": "string", "description": "Symbolic object group name." }
+                ],
+                "description": "Releases all remote objects that belong to a given group."
+            },
+            {
+                "name": "runIfWaitingForDebugger",
+                "description": "Tells inspected instance to run if it was waiting for debugger to attach."
+            },
+            {
+                "name": "enable",
+                "description": "Enables reporting of execution contexts creation by means of <code>executionContextCreated</code> event. When the reporting gets enabled the event will be sent immediately for each existing execution context."
+            },
+            {
+                "name": "disable",
+                "description": "Disables reporting of execution contexts creation."
+            },
+            {
+                "name": "discardConsoleEntries",
+                "description": "Discards collected exceptions and console API calls."
+            },
+            {
+                "name": "setCustomObjectFormatterEnabled",
+                "parameters": [
+                    {
+                        "name": "enabled",
+                        "type": "boolean"
+                    }
+                ],
+                "experimental": true
+            },
+            {
+                "name": "compileScript",
+                "parameters": [
+                    { "name": "expression", "type": "string", "description": "Expression to compile." },
+                    { "name": "sourceURL", "type": "string", "description": "Source url to be set for the script." },
+                    { "name": "persistScript", "type": "boolean", "description": "Specifies whether the compiled script should be persisted." },
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "optional": true, "description": "Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page." }
+                ],
+                "returns": [
+                    { "name": "scriptId", "$ref": "ScriptId", "optional": true, "description": "Id of the script." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
+                ],
+                "description": "Compiles expression."
+            },
+            {
+                "name": "runScript",
+                "async": true,
+                "parameters": [
+                    { "name": "scriptId", "$ref": "ScriptId", "description": "Id of the script to run." },
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "optional": true, "description": "Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page." },
+                    { "name": "objectGroup", "type": "string", "optional": true, "description": "Symbolic group name that can be used to release multiple objects." },
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
+                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Determines whether Command Line API should be available during the evaluation." },
+                    { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object which should be sent by value." },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "description": "Whether preview should be generated for the result." },
+                    { "name": "awaitPromise", "type": "boolean", "optional": true, "description": "Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error." }
+                ],
+                "returns": [
+                    { "name": "result", "$ref": "RemoteObject", "description": "Run result." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
+                ],
+                "description": "Runs script with given id in a given context."
+            }
+        ],
+        "events": [
+            {
+                "name": "executionContextCreated",
+                "parameters": [
+                    { "name": "context", "$ref": "ExecutionContextDescription", "description": "A newly created execution contex." }
+                ],
+                "description": "Issued when new execution context is created."
+            },
+            {
+                "name": "executionContextDestroyed",
+                "parameters": [
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "description": "Id of the destroyed context" }
+                ],
+                "description": "Issued when execution context is destroyed."
+            },
+            {
+                "name": "executionContextsCleared",
+                "description": "Issued when all executionContexts were cleared in browser"
+            },
+            {
+                "name": "exceptionThrown",
+                "description": "Issued when exception was thrown and unhandled.",
+                "parameters": [
+                    { "name": "timestamp", "$ref": "Timestamp", "description": "Timestamp of the exception." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails" }
+                ]
+            },
+            {
+                "name": "exceptionRevoked",
+                "description": "Issued when unhandled exception was revoked.",
+                "parameters": [
+                    { "name": "reason", "type": "string", "description": "Reason describing why exception was revoked." },
+                    { "name": "exceptionId", "type": "integer", "description": "The id of revoked exception, as reported in <code>exceptionUnhandled</code>." }
+                ]
+            },
+            {
+                "name": "consoleAPICalled",
+                "description": "Issued when console API was called.",
+                "parameters": [
+                    { "name": "type", "type": "string", "enum": ["log", "debug", "info", "error", "warning", "dir", "dirxml", "table", "trace", "clear", "startGroup", "startGroupCollapsed", "endGroup", "assert", "profile", "profileEnd"], "description": "Type of the call." },
+                    { "name": "args", "type": "array", "items": { "$ref": "RemoteObject" }, "description": "Call arguments." },
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "description": "Identifier of the context where the call was made." },
+                    { "name": "timestamp", "$ref": "Timestamp", "description": "Call timestamp." },
+                    { "name": "stackTrace", "$ref": "StackTrace", "optional": true, "description": "Stack trace captured when the call was made." }
+                ]
+            },
+            {
+                "name": "inspectRequested",
+                "description": "Issued when object should be inspected (for example, as a result of inspect() command line API call).",
+                "parameters": [
+                    { "name": "object", "$ref": "RemoteObject" },
+                    { "name": "hints", "type": "object" }
+                ]
+            }
+        ]
+    },
+    {
+        "domain": "Debugger",
+        "description": "Debugger domain exposes JavaScript debugging capabilities. It allows setting and removing breakpoints, stepping through execution, exploring stack traces, etc.",
+        "dependencies": ["Runtime"],
+        "types": [
+            {
+                "id": "BreakpointId",
+                "type": "string",
+                "description": "Breakpoint identifier."
+            },
+            {
+                "id": "CallFrameId",
+                "type": "string",
+                "description": "Call frame identifier."
+            },
+            {
+                "id": "Location",
+                "type": "object",
+                "properties": [
+                    { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Script identifier as reported in the <code>Debugger.scriptParsed</code>." },
+                    { "name": "lineNumber", "type": "integer", "description": "Line number in the script (0-based)." },
+                    { "name": "columnNumber", "type": "integer", "optional": true, "description": "Column number in the script (0-based)." }
+                ],
+                "description": "Location in the source code."
+            },
+            {
+                "id": "ScriptPosition",
+                "experimental": true,
+                "type": "object",
+                "properties": [
+                    { "name": "lineNumber", "type": "integer" },
+                    { "name": "columnNumber", "type": "integer" }
+                ],
+                "description": "Location in the source code."
+            },
+            {
+                "id": "CallFrame",
+                "type": "object",
+                "properties": [
+                    { "name": "callFrameId", "$ref": "CallFrameId", "description": "Call frame identifier. This identifier is only valid while the virtual machine is paused." },
+                    { "name": "functionName", "type": "string", "description": "Name of the JavaScript function called on this call frame." },
+                    { "name": "functionLocation", "$ref": "Location", "optional": true, "experimental": true, "description": "Location in the source code." },
+                    { "name": "location", "$ref": "Location", "description": "Location in the source code." },
+                    { "name": "scopeChain", "type": "array", "items": { "$ref": "Scope" }, "description": "Scope chain for this call frame." },
+                    { "name": "this", "$ref": "Runtime.RemoteObject", "description": "<code>this</code> object for this call frame." },
+                    { "name": "returnValue", "$ref": "Runtime.RemoteObject", "optional": true, "description": "The value being returned, if the function is at return point." }
+                ],
+                "description": "JavaScript call frame. Array of call frames form the call stack."
+            },
+            {
+                "id": "Scope",
+                "type": "object",
+                "properties": [
+                    { "name": "type", "type": "string", "enum": ["global", "local", "with", "closure", "catch", "block", "script"], "description": "Scope type." },
+                    { "name": "object", "$ref": "Runtime.RemoteObject", "description": "Object representing the scope. For <code>global</code> and <code>with</code> scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties." },
+                    { "name": "name", "type": "string", "optional": true },
+                    { "name": "startLocation", "$ref": "Location", "optional": true, "description": "Location in the source code where scope starts" },
+                    { "name": "endLocation", "$ref": "Location", "optional": true, "description": "Location in the source code where scope ends" }
+                ],
+                "description": "Scope description."
+            },
+            {
+                "id": "SearchMatch",
+                "type": "object",
+                "description": "Search match for resource.",
+                "exported": true,
+                "properties": [
+                    { "name": "lineNumber", "type": "number", "description": "Line number in resource content." },
+                    { "name": "lineContent", "type": "string", "description": "Line with match content." }
+                ],
+                "experimental": true
+            }
+        ],
+        "commands": [
+            {
+                "name": "enable",
+                "description": "Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received."
+            },
+            {
+                "name": "disable",
+                "description": "Disables debugger for given page."
+            },
+            {
+                "name": "setBreakpointsActive",
+                "parameters": [
+                    { "name": "active", "type": "boolean", "description": "New value for breakpoints active state." }
+                ],
+                "description": "Activates / deactivates all breakpoints on the page."
+            },
+            {
+                "name": "setSkipAllPauses",
+                "parameters": [
+                    { "name": "skip", "type": "boolean", "description": "New value for skip pauses state." }
+                ],
+                "description": "Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc)."
+            },
+            {
+                "name": "setBreakpointByUrl",
+                "parameters": [
+                    { "name": "lineNumber", "type": "integer", "description": "Line number to set breakpoint at." },
+                    { "name": "url", "type": "string", "optional": true, "description": "URL of the resources to set breakpoint on." },
+                    { "name": "urlRegex", "type": "string", "optional": true, "description": "Regex pattern for the URLs of the resources to set breakpoints on. Either <code>url</code> or <code>urlRegex</code> must be specified." },
+                    { "name": "columnNumber", "type": "integer", "optional": true, "description": "Offset in the line to set breakpoint at." },
+                    { "name": "condition", "type": "string", "optional": true, "description": "Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true." }
+                ],
+                "returns": [
+                    { "name": "breakpointId", "$ref": "BreakpointId", "description": "Id of the created breakpoint for further reference." },
+                    { "name": "locations", "type": "array", "items": { "$ref": "Location" }, "description": "List of the locations this breakpoint resolved into upon addition." }
+                ],
+                "description": "Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in <code>locations</code> property. Further matching script parsing will result in subsequent <code>breakpointResolved</code> events issued. This logical breakpoint will survive page reloads."
+            },
+            {
+                "name": "setBreakpoint",
+                "parameters": [
+                    { "name": "location", "$ref": "Location", "description": "Location to set breakpoint in." },
+                    { "name": "condition", "type": "string", "optional": true, "description": "Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true." }
+                ],
+                "returns": [
+                    { "name": "breakpointId", "$ref": "BreakpointId", "description": "Id of the created breakpoint for further reference." },
+                    { "name": "actualLocation", "$ref": "Location", "description": "Location this breakpoint resolved into." }
+                ],
+                "description": "Sets JavaScript breakpoint at a given location."
+            },
+            {
+                "name": "removeBreakpoint",
+                "parameters": [
+                    { "name": "breakpointId", "$ref": "BreakpointId" }
+                ],
+                "description": "Removes JavaScript breakpoint."
+            },
+            {
+                "name": "continueToLocation",
+                "parameters": [
+                    { "name": "location", "$ref": "Location", "description": "Location to continue to." }
+                ],
+                "description": "Continues execution until specific location is reached."
+            },
+            {
+                "name": "stepOver",
+                "description": "Steps over the statement."
+            },
+            {
+                "name": "stepInto",
+                "description": "Steps into the function call."
+            },
+            {
+                "name": "stepOut",
+                "description": "Steps out of the function call."
+            },
+            {
+                "name": "pause",
+                "description": "Stops on the next JavaScript statement."
+            },
+            {
+                "name": "resume",
+                "description": "Resumes JavaScript execution."
+            },
+            {
+                "name": "searchInContent",
+                "parameters": [
+                    { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Id of the script to search in." },
+                    { "name": "query", "type": "string", "description": "String to search for."  },
+                    { "name": "caseSensitive", "type": "boolean", "optional": true, "description": "If true, search is case sensitive." },
+                    { "name": "isRegex", "type": "boolean", "optional": true, "description": "If true, treats string parameter as regex." }
+                ],
+                "returns": [
+                    { "name": "result", "type": "array", "items": { "$ref": "SearchMatch" }, "description": "List of search matches." }
+                ],
+                "experimental": true,
+                "description": "Searches for given string in script content."
+            },
+            {
+                "name": "setScriptSource",
+                "parameters": [
+                    { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Id of the script to edit." },
+                    { "name": "scriptSource", "type": "string", "description": "New content of the script." },
+                    { "name": "dryRun", "type": "boolean", "optional": true, "description": " If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code." }
+                ],
+                "returns": [
+                    { "name": "callFrames", "type": "array", "optional": true, "items": { "$ref": "CallFrame" }, "description": "New stack trace in case editing has happened while VM was stopped." },
+                    { "name": "stackChanged", "type": "boolean", "optional": true, "description": "Whether current call stack  was modified after applying the changes." },
+                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any." },
+                    { "name": "exceptionDetails", "optional": true, "$ref": "Runtime.ExceptionDetails", "description": "Exception details if any." }
+                ],
+                "description": "Edits JavaScript source live."
+            },
+            {
+                "name": "restartFrame",
+                "parameters": [
+                    { "name": "callFrameId", "$ref": "CallFrameId", "description": "Call frame identifier to evaluate on." }
+                ],
+                "returns": [
+                    { "name": "callFrames", "type": "array", "items": { "$ref": "CallFrame" }, "description": "New stack trace." },
+                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any." }
+                ],
+                "description": "Restarts particular call frame from the beginning."
+            },
+            {
+                "name": "getScriptSource",
+                "parameters": [
+                    { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Id of the script to get source for." }
+                ],
+                "returns": [
+                    { "name": "scriptSource", "type": "string", "description": "Script source." }
+                ],
+                "description": "Returns source for the script with given id."
+            },
+            {
+                "name": "setPauseOnExceptions",
+                "parameters": [
+                    { "name": "state", "type": "string", "enum": ["none", "uncaught", "all"], "description": "Pause on exceptions mode." }
+                ],
+                "description": "Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is <code>none</code>."
+            },
+            {
+                "name": "evaluateOnCallFrame",
+                "parameters": [
+                    { "name": "callFrameId", "$ref": "CallFrameId", "description": "Call frame identifier to evaluate on." },
+                    { "name": "expression", "type": "string", "description": "Expression to evaluate." },
+                    { "name": "objectGroup", "type": "string", "optional": true, "description": "String object group name to put result into (allows rapid releasing resulting object handles using <code>releaseObjectGroup</code>)." },
+                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Specifies whether command line API should be available to the evaluated expression, defaults to false." },
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
+                    { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object that should be sent by value." },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the result." }
+                ],
+                "returns": [
+                    { "name": "result", "$ref": "Runtime.RemoteObject", "description": "Object wrapper for the evaluation result." },
+                    { "name": "exceptionDetails", "$ref": "Runtime.ExceptionDetails", "optional": true, "description": "Exception details."}
+                ],
+                "description": "Evaluates expression on a given call frame."
+            },
+            {
+                "name": "setVariableValue",
+                "parameters": [
+                    { "name": "scopeNumber", "type": "integer", "description": "0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually." },
+                    { "name": "variableName", "type": "string", "description": "Variable name." },
+                    { "name": "newValue", "$ref": "Runtime.CallArgument", "description": "New variable value." },
+                    { "name": "callFrameId", "$ref": "CallFrameId", "description": "Id of callframe that holds variable." }
+                ],
+                "description": "Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually."
+            },
+            {
+                "name": "setAsyncCallStackDepth",
+                "parameters": [
+                    { "name": "maxDepth", "type": "integer", "description": "Maximum depth of async call stacks. Setting to <code>0</code> will effectively disable collecting async call stacks (default)." }
+                ],
+                "description": "Enables or disables async call stacks tracking."
+            },
+            {
+                "name": "setBlackboxPatterns",
+                "parameters": [
+                    { "name": "patterns", "type": "array", "items": { "type": "string" }, "description": "Array of regexps that will be used to check script url for blackbox state." }
+                ],
+                "experimental": true,
+                "description": "Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful."
+            },
+            {
+                "name": "setBlackboxedRanges",
+                "parameters": [
+                    { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Id of the script." },
+                    { "name": "positions", "type": "array", "items": { "$ref": "ScriptPosition" } }
+                ],
+                "experimental": true,
+                "description": "Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted."
+            }
+        ],
+        "events": [
+            {
+                "name": "scriptParsed",
+                "parameters": [
+                    { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Identifier of the script parsed." },
+                    { "name": "url", "type": "string", "description": "URL or name of the script parsed (if any)." },
+                    { "name": "startLine", "type": "integer", "description": "Line offset of the script within the resource with given URL (for script tags)." },
+                    { "name": "startColumn", "type": "integer", "description": "Column offset of the script within the resource with given URL." },
+                    { "name": "endLine", "type": "integer", "description": "Last line of the script." },
+                    { "name": "endColumn", "type": "integer", "description": "Length of the last line of the script." },
+                    { "name": "executionContextId", "$ref": "Runtime.ExecutionContextId", "description": "Specifies script creation context." },
+                    { "name": "hash", "type": "string", "description": "Content hash of the script."},
+                    { "name": "executionContextAuxData", "type": "object", "optional": true, "description": "Embedder-specific auxiliary data." },
+                    { "name": "isLiveEdit", "type": "boolean", "optional": true, "description": "True, if this script is generated as a result of the live edit operation.", "experimental": true },
+                    { "name": "sourceMapURL", "type": "string", "optional": true, "description": "URL of source map associated with script (if any)." },
+                    { "name": "hasSourceURL", "type": "boolean", "optional": true, "description": "True, if this script has sourceURL.", "experimental": true }
+                ],
+                "description": "Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger."
+            },
+            {
+                "name": "scriptFailedToParse",
+                "parameters": [
+                    { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Identifier of the script parsed." },
+                    { "name": "url", "type": "string", "description": "URL or name of the script parsed (if any)." },
+                    { "name": "startLine", "type": "integer", "description": "Line offset of the script within the resource with given URL (for script tags)." },
+                    { "name": "startColumn", "type": "integer", "description": "Column offset of the script within the resource with given URL." },
+                    { "name": "endLine", "type": "integer", "description": "Last line of the script." },
+                    { "name": "endColumn", "type": "integer", "description": "Length of the last line of the script." },
+                    { "name": "executionContextId", "$ref": "Runtime.ExecutionContextId", "description": "Specifies script creation context." },
+                    { "name": "hash", "type": "string", "description": "Content hash of the script."},
+                    { "name": "executionContextAuxData", "type": "object", "optional": true, "description": "Embedder-specific auxiliary data." },
+                    { "name": "sourceMapURL", "type": "string", "optional": true, "description": "URL of source map associated with script (if any)." },
+                    { "name": "hasSourceURL", "type": "boolean", "optional": true, "description": "True, if this script has sourceURL.", "experimental": true }
+                ],
+                "description": "Fired when virtual machine fails to parse the script."
+            },
+            {
+                "name": "breakpointResolved",
+                "parameters": [
+                    { "name": "breakpointId", "$ref": "BreakpointId", "description": "Breakpoint unique identifier." },
+                    { "name": "location", "$ref": "Location", "description": "Actual breakpoint location." }
+                ],
+                "description": "Fired when breakpoint is resolved to an actual script and location."
+            },
+            {
+                "name": "paused",
+                "parameters": [
+                    { "name": "callFrames", "type": "array", "items": { "$ref": "CallFrame" }, "description": "Call stack the virtual machine stopped on." },
+                    { "name": "reason", "type": "string", "enum": [ "XHR", "DOM", "EventListener", "exception", "assert", "debugCommand", "promiseRejection", "other" ], "description": "Pause reason.", "exported": true },
+                    { "name": "data", "type": "object", "optional": true, "description": "Object containing break-specific auxiliary properties." },
+                    { "name": "hitBreakpoints", "type": "array", "optional": true, "items": { "type": "string" }, "description": "Hit breakpoints IDs" },
+                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any." }
+                ],
+                "description": "Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria."
+            },
+            {
+                "name": "resumed",
+                "description": "Fired when the virtual machine resumed execution."
+            }
+        ]
+    },
+    {
+        "domain": "Console",
+        "description": "This domain is deprecated - use Runtime or Log instead.",
+        "dependencies": ["Runtime"],
+        "deprecated": true,
+        "types": [
+            {
+                "id": "ConsoleMessage",
+                "type": "object",
+                "description": "Console message.",
+                "properties": [
+                    { "name": "source", "type": "string", "enum": ["xml", "javascript", "network", "console-api", "storage", "appcache", "rendering", "security", "other", "deprecation", "worker"], "description": "Message source." },
+                    { "name": "level", "type": "string", "enum": ["log", "warning", "error", "debug", "info"], "description": "Message severity." },
+                    { "name": "text", "type": "string", "description": "Message text." },
+                    { "name": "url", "type": "string", "optional": true, "description": "URL of the message origin." },
+                    { "name": "line", "type": "integer", "optional": true, "description": "Line number in the resource that generated this message (1-based)." },
+                    { "name": "column", "type": "integer", "optional": true, "description": "Column number in the resource that generated this message (1-based)." }
+                ]
+            }
+        ],
+        "commands": [
+            {
+                "name": "enable",
+                "description": "Enables console domain, sends the messages collected so far to the client by means of the <code>messageAdded</code> notification."
+            },
+            {
+                "name": "disable",
+                "description": "Disables console domain, prevents further console messages from being reported to the client."
+            },
+            {
+                "name": "clearMessages",
+                "description": "Does nothing."
+            }
+        ],
+        "events": [
+            {
+                "name": "messageAdded",
+                "parameters": [
+                    { "name": "message", "$ref": "ConsoleMessage", "description": "Console message that has been added." }
+                ],
+                "description": "Issued when new console message is added."
+            }
+        ]
+    },
+    {
+        "domain": "Profiler",
+        "dependencies": ["Runtime", "Debugger"],
+        "types": [
+            {
+                "id": "ProfileNode",
+                "type": "object",
+                "description": "Profile node. Holds callsite information, execution statistics and child nodes.",
+                "properties": [
+                    { "name": "id", "type": "integer", "description": "Unique id of the node." },
+                    { "name": "callFrame", "$ref": "Runtime.CallFrame", "description": "Function location." },
+                    { "name": "hitCount", "type": "integer", "optional": true, "experimental": true, "description": "Number of samples where this node was on top of the call stack." },
+                    { "name": "children", "type": "array", "items": { "type": "integer" }, "optional": true, "description": "Child node ids." },
+                    { "name": "deoptReason", "type": "string", "optional": true, "description": "The reason of being not optimized. The function may be deoptimized or marked as don't optimize."},
+                    { "name": "positionTicks", "type": "array", "items": { "$ref": "PositionTickInfo" }, "optional": true, "experimental": true, "description": "An array of source position ticks." }
+                ]
+            },
+            {
+                "id": "Profile",
+                "type": "object",
+                "description": "Profile.",
+                "properties": [
+                    { "name": "nodes", "type": "array", "items": { "$ref": "ProfileNode" }, "description": "The list of profile nodes. First item is the root node." },
+                    { "name": "startTime", "type": "number", "description": "Profiling start timestamp in microseconds." },
+                    { "name": "endTime", "type": "number", "description": "Profiling end timestamp in microseconds." },
+                    { "name": "samples", "optional": true, "type": "array", "items": { "type": "integer" }, "description": "Ids of samples top nodes." },
+                    { "name": "timeDeltas", "optional": true, "type": "array", "items": { "type": "integer" }, "description": "Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime." }
+                ]
+            },
+            {
+                "id": "PositionTickInfo",
+                "type": "object",
+                "experimental": true,
+                "description": "Specifies a number of samples attributed to a certain source position.",
+                "properties": [
+                    { "name": "line", "type": "integer", "description": "Source line number (1-based)." },
+                    { "name": "ticks", "type": "integer", "description": "Number of samples attributed to the source line." }
+                ]
+            }
+        ],
+        "commands": [
+            {
+                "name": "enable"
+            },
+            {
+                "name": "disable"
+            },
+            {
+                "name": "setSamplingInterval",
+                "parameters": [
+                    { "name": "interval", "type": "integer", "description": "New sampling interval in microseconds." }
+                ],
+                "description": "Changes CPU profiler sampling interval. Must be called before CPU profiles recording started."
+            },
+            {
+                "name": "start"
+            },
+            {
+                "name": "stop",
+                "returns": [
+                    { "name": "profile", "$ref": "Profile", "description": "Recorded profile." }
+                ]
+            }
+        ],
+        "events": [
+            {
+                "name": "consoleProfileStarted",
+                "parameters": [
+                    { "name": "id", "type": "string" },
+                    { "name": "location", "$ref": "Debugger.Location", "description": "Location of console.profile()." },
+                    { "name": "title", "type": "string", "optional": true, "description": "Profile title passed as an argument to console.profile()." }
+                ],
+                "description": "Sent when new profile recodring is started using console.profile() call."
+            },
+            {
+                "name": "consoleProfileFinished",
+                "parameters": [
+                    { "name": "id", "type": "string" },
+                    { "name": "location", "$ref": "Debugger.Location", "description": "Location of console.profileEnd()." },
+                    { "name": "profile", "$ref": "Profile" },
+                    { "name": "title", "type": "string", "optional": true, "description": "Profile title passed as an argument to console.profile()." }
+                ]
+            }
+        ]
+    },
+    {
+        "domain": "HeapProfiler",
+        "dependencies": ["Runtime"],
+        "experimental": true,
+        "types": [
+            {
+                "id": "HeapSnapshotObjectId",
+                "type": "string",
+                "description": "Heap snapshot object id."
+            },
+            {
+                "id": "SamplingHeapProfileNode",
+                "type": "object",
+                "description": "Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes.",
+                "properties": [
+                    { "name": "callFrame", "$ref": "Runtime.CallFrame", "description": "Function location." },
+                    { "name": "selfSize", "type": "number", "description": "Allocations size in bytes for the node excluding children." },
+                    { "name": "children", "type": "array", "items": { "$ref": "SamplingHeapProfileNode" }, "description": "Child nodes." }
+                ]
+            },
+            {
+                "id": "SamplingHeapProfile",
+                "type": "object",
+                "description": "Profile.",
+                "properties": [
+                    { "name": "head", "$ref": "SamplingHeapProfileNode" }
+                ]
+            }
+        ],
+        "commands": [
+            {
+                "name": "enable"
+            },
+            {
+                "name": "disable"
+            },
+            {
+                "name": "startTrackingHeapObjects",
+                "parameters": [
+                    { "name": "trackAllocations", "type": "boolean", "optional": true }
+                ]
+            },
+            {
+                "name": "stopTrackingHeapObjects",
+                "parameters": [
+                    { "name": "reportProgress", "type": "boolean", "optional": true, "description": "If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped." }
+                ]
+            },
+            {
+                "name": "takeHeapSnapshot",
+                "parameters": [
+                    { "name": "reportProgress", "type": "boolean", "optional": true, "description": "If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken." }
+                ]
+            },
+            {
+                "name": "collectGarbage"
+            },
+            {
+                "name": "getObjectByHeapObjectId",
+                "parameters": [
+                    { "name": "objectId", "$ref": "HeapSnapshotObjectId" },
+                    { "name": "objectGroup", "type": "string", "optional": true, "description": "Symbolic group name that can be used to release multiple objects." }
+                ],
+                "returns": [
+                    { "name": "result", "$ref": "Runtime.RemoteObject", "description": "Evaluation result." }
+                ]
+            },
+            {
+                "name": "addInspectedHeapObject",
+                "parameters": [
+                    { "name": "heapObjectId", "$ref": "HeapSnapshotObjectId", "description": "Heap snapshot object id to be accessible by means of $x command line API." }
+                ],
+                "description": "Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions)."
+            },
+            {
+                "name": "getHeapObjectId",
+                "parameters": [
+                    { "name": "objectId", "$ref": "Runtime.RemoteObjectId", "description": "Identifier of the object to get heap object id for." }
+                ],
+                "returns": [
+                    { "name": "heapSnapshotObjectId", "$ref": "HeapSnapshotObjectId", "description": "Id of the heap snapshot object corresponding to the passed remote object id." }
+                ]
+            },
+            {
+                "name": "startSampling",
+                "parameters": [
+                    { "name": "samplingInterval", "type": "number", "optional": true, "description": "Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes." }
+                ]
+            },
+            {
+                "name": "stopSampling",
+                "returns": [
+                    { "name": "profile", "$ref": "SamplingHeapProfile", "description": "Recorded sampling heap profile." }
+                ]
+            }
+        ],
+        "events": [
+            {
+                "name": "addHeapSnapshotChunk",
+                "parameters": [
+                    { "name": "chunk", "type": "string" }
+                ]
+            },
+            {
+                "name": "resetProfiles"
+            },
+            {
+                "name": "reportHeapSnapshotProgress",
+                "parameters": [
+                    { "name": "done", "type": "integer" },
+                    { "name": "total", "type": "integer" },
+                    { "name": "finished", "type": "boolean", "optional": true }
+                ]
+            },
+            {
+                "name": "lastSeenObjectId",
+                "description": "If heap objects tracking has been started then backend regulary sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event.",
+                "parameters": [
+                    { "name": "lastSeenObjectId", "type": "integer" },
+                    { "name": "timestamp", "type": "number" }
+                ]
+            },
+            {
+                "name": "heapStatsUpdate",
+                "description": "If heap objects tracking has been started then backend may send update for one or more fragments",
+                "parameters": [
+                    { "name": "statsUpdate", "type": "array", "items": { "type": "integer" }, "description": "An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment."}
+                ]
+            }
+        ]
+    }]
+}
diff --git a/src/inspector/js_protocol.json b/src/inspector/js_protocol.json
index 314cb5f..aff6806 100644
--- a/src/inspector/js_protocol.json
+++ b/src/inspector/js_protocol.json
@@ -1,6 +1,33 @@
 {
-    "version": { "major": "1", "minor": "1" },
-    "domains": [{
+    "version": { "major": "1", "minor": "2" },
+    "domains": [
+    {
+        "domain": "Schema",
+        "description": "Provides information about the protocol schema.",
+        "types": [
+            {
+                "id": "Domain",
+                "type": "object",
+                "description": "Description of the protocol domain.",
+                "exported": true,
+                "properties": [
+                    { "name": "name", "type": "string", "description": "Domain name." },
+                    { "name": "version", "type": "string", "description": "Domain version." }
+                ]
+            }
+        ],
+        "commands": [
+            {
+                "name": "getDomains",
+                "description": "Returns supported domains.",
+                "handlers": ["browser", "renderer"],
+                "returns": [
+                    { "name": "domains", "type": "array", "items": { "$ref": "Domain" }, "description": "List of supported domains." }
+                ]
+            }
+        ]
+    },
+    {
         "domain": "Runtime",
         "description": "Runtime domain exposes JavaScript runtime by means of remote evaluation and mirror objects. Evaluation results are returned as mirror object that expose object type, string representation and unique identifier that can be used for further object reference. Original objects are maintained in memory unless they are either explicitly released or are released along with the other objects in their object group.",
         "types": [
@@ -15,25 +42,32 @@
                 "description": "Unique object identifier."
             },
             {
+                "id": "UnserializableValue",
+                "type": "string",
+                "enum": ["Infinity", "NaN", "-Infinity", "-0"],
+                "description": "Primitive value which cannot be JSON-stringified."
+            },
+            {
                 "id": "RemoteObject",
                 "type": "object",
                 "description": "Mirror object referencing original JavaScript object.",
                 "exported": true,
                 "properties": [
                     { "name": "type", "type": "string", "enum": ["object", "function", "undefined", "string", "number", "boolean", "symbol"], "description": "Object type." },
-                    { "name": "subtype", "type": "string", "optional": true, "enum": ["array", "null", "node", "regexp", "date", "map", "set", "iterator", "generator", "error"], "description": "Object subtype hint. Specified for <code>object</code> type values only." },
+                    { "name": "subtype", "type": "string", "optional": true, "enum": ["array", "null", "node", "regexp", "date", "map", "set", "iterator", "generator", "error", "proxy", "promise", "typedarray"], "description": "Object subtype hint. Specified for <code>object</code> type values only." },
                     { "name": "className", "type": "string", "optional": true, "description": "Object class (constructor) name. Specified for <code>object</code> type values only." },
-                    { "name": "value", "type": "any", "optional": true, "description": "Remote object value in case of primitive values or JSON values (if it was requested), or description string if the value can not be JSON-stringified (like NaN, Infinity, -Infinity, -0)." },
+                    { "name": "value", "type": "any", "optional": true, "description": "Remote object value in case of primitive values or JSON values (if it was requested)." },
+                    { "name": "unserializableValue", "$ref": "UnserializableValue", "optional": true, "description": "Primitive value which can not be JSON-stringified does not have <code>value</code>, but gets this property." },
                     { "name": "description", "type": "string", "optional": true, "description": "String representation of the object." },
                     { "name": "objectId", "$ref": "RemoteObjectId", "optional": true, "description": "Unique object identifier (for non-primitive values)." },
-                    { "name": "preview", "$ref": "ObjectPreview", "optional": true, "description": "Preview containing abbreviated property values. Specified for <code>object</code> type values only.", "hidden": true },
-                    { "name": "customPreview", "$ref": "CustomPreview", "optional": true, "hidden": true}
+                    { "name": "preview", "$ref": "ObjectPreview", "optional": true, "description": "Preview containing abbreviated property values. Specified for <code>object</code> type values only.", "experimental": true },
+                    { "name": "customPreview", "$ref": "CustomPreview", "optional": true, "experimental": true}
                 ]
             },
             {
                 "id": "CustomPreview",
                 "type": "object",
-                "hidden": true,
+                "experimental": true,
                 "properties": [
                     { "name": "header", "type": "string"},
                     { "name": "hasBody", "type": "boolean"},
@@ -45,7 +79,7 @@
             {
                 "id": "ObjectPreview",
                 "type": "object",
-                "hidden": true,
+                "experimental": true,
                 "description": "Object containing abbreviated remote object value.",
                 "properties": [
                     { "name": "type", "type": "string", "enum": ["object", "function", "undefined", "string", "number", "boolean", "symbol"], "description": "Object type." },
@@ -59,7 +93,7 @@
             {
                 "id": "PropertyPreview",
                 "type": "object",
-                "hidden": true,
+                "experimental": true,
                 "properties": [
                     { "name": "name", "type": "string", "description": "Property name." },
                     { "name": "type", "type": "string", "enum": ["object", "function", "undefined", "string", "number", "boolean", "symbol", "accessor"], "description": "Object type. Accessor means that the property itself is an accessor property." },
@@ -71,7 +105,7 @@
             {
                 "id": "EntryPreview",
                 "type": "object",
-                "hidden": true,
+                "experimental": true,
                 "properties": [
                     { "name": "key", "$ref": "ObjectPreview", "optional": true, "description": "Preview of the key. Specified for map-like collection entries." },
                     { "name": "value", "$ref": "ObjectPreview", "description": "Preview of the value." }
@@ -90,8 +124,8 @@
                     { "name": "configurable", "type": "boolean", "description": "True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object." },
                     { "name": "enumerable", "type": "boolean", "description": "True if this property shows up during enumeration of the properties on the corresponding object." },
                     { "name": "wasThrown", "type": "boolean", "optional": true, "description": "True if the result was thrown during the evaluation." },
-                    { "name": "isOwn", "optional": true, "type": "boolean", "description": "True if the property is owned for the object.", "hidden": true },
-                    { "name": "symbol", "$ref": "RemoteObject", "optional": true, "description": "Property symbol object, if the property is of the <code>symbol</code> type.", "hidden": true }
+                    { "name": "isOwn", "optional": true, "type": "boolean", "description": "True if the property is owned for the object." },
+                    { "name": "symbol", "$ref": "RemoteObject", "optional": true, "description": "Property symbol object, if the property is of the <code>symbol</code> type." }
                 ]
             },
             {
@@ -101,17 +135,16 @@
                 "properties": [
                     { "name": "name", "type": "string", "description": "Conventional property name." },
                     { "name": "value", "$ref": "RemoteObject", "optional": true, "description": "The value associated with the property." }
-                ],
-                "hidden": true
+                ]
             },
             {
                 "id": "CallArgument",
                 "type": "object",
-                "description": "Represents function call argument. Either remote object id <code>objectId</code> or primitive <code>value</code> or neither of (for undefined) them should be specified.",
+                "description": "Represents function call argument. Either remote object id <code>objectId</code>, primitive <code>value</code>, unserializable primitive value or neither of (for undefined) them should be specified.",
                 "properties": [
-                    { "name": "value", "type": "any", "optional": true, "description": "Primitive value, or description string if the value can not be JSON-stringified (like NaN, Infinity, -Infinity, -0)." },
-                    { "name": "objectId", "$ref": "RemoteObjectId", "optional": true, "description": "Remote object handle." },
-                    { "name": "type", "optional": true, "hidden": true, "type": "string", "enum": ["object", "function", "undefined", "string", "number", "boolean", "symbol"], "description": "Object type." }
+                    { "name": "value", "type": "any", "optional": true, "description": "Primitive value." },
+                    { "name": "unserializableValue", "$ref": "UnserializableValue", "optional": true, "description": "Primitive value which can not be JSON-stringified." },
+                    { "name": "objectId", "$ref": "RemoteObjectId", "optional": true, "description": "Remote object handle." }
                 ]
             },
             {
@@ -125,31 +158,31 @@
                 "description": "Description of an isolated world.",
                 "properties": [
                     { "name": "id", "$ref": "ExecutionContextId", "description": "Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed." },
-                    { "name": "isDefault", "type": "boolean", "description": "Whether context is the default page context (as opposite to e.g. context of content script).", "hidden": true },
-                    { "name": "origin", "type": "string", "description": "Execution context origin.", "hidden": true},
-                    { "name": "name", "type": "string", "description": "Human readable name describing given context.", "hidden": true},
-                    { "name": "frameId", "type": "string", "description": "Id of the owning frame. May be an empty string if the context is not associated with a frame." }
+                    { "name": "origin", "type": "string", "description": "Execution context origin." },
+                    { "name": "name", "type": "string", "description": "Human readable name describing given context." },
+                    { "name": "auxData", "type": "object", "optional": true, "description": "Embedder-specific auxiliary data." }
                 ]
             },
             {
                 "id": "ExceptionDetails",
                 "type": "object",
-                "hidden": true,
                 "description": "Detailed information about exception (or error) that was thrown during script compilation or execution.",
                 "properties": [
-                    { "name": "text", "type": "string", "description": "Exception text." },
-                    { "name": "scriptId", "$ref": "ScriptId", "description": "Script ID of the exception location." },
+                    { "name": "exceptionId", "type": "integer", "description": "Exception id." },
+                    { "name": "text", "type": "string", "description": "Exception text, which should be used together with exception object when available." },
                     { "name": "lineNumber", "type": "integer", "description": "Line number of the exception location (0-based)." },
                     { "name": "columnNumber", "type": "integer", "description": "Column number of the exception location (0-based)." },
+                    { "name": "scriptId", "$ref": "ScriptId", "optional": true, "description": "Script ID of the exception location." },
                     { "name": "url", "type": "string", "optional": true, "description": "URL of the exception location, to be used when the script was not reported." },
-                    { "name": "stackTrace", "$ref": "StackTrace", "optional": true, "description": "JavaScript stack trace if available." }
+                    { "name": "stackTrace", "$ref": "StackTrace", "optional": true, "description": "JavaScript stack trace if available." },
+                    { "name": "exception", "$ref": "RemoteObject", "optional": true, "description": "Exception object if available." },
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "optional": true, "description": "Identifier of the context where exception happened." }
                 ]
             },
             {
                 "id": "Timestamp",
                 "type": "number",
-                "description": "Number of milliseconds since epoch.",
-                "hidden": true
+                "description": "Number of milliseconds since epoch."
             },
             {
                 "id": "CallFrame",
@@ -171,7 +204,7 @@
                 "properties": [
                     { "name": "description", "type": "string", "optional": true, "description": "String label of this stack trace. For async traces this may be a name of the function that initiated the async call." },
                     { "name": "callFrames", "type": "array", "items": { "$ref": "CallFrame" }, "description": "JavaScript function name." },
-                    { "name": "parent", "$ref": "StackTrace", "optional": true, "hidden": true, "description": "Asynchronous JavaScript stack trace that preceded this stack, if available." }
+                    { "name": "parent", "$ref": "StackTrace", "optional": true, "description": "Asynchronous JavaScript stack trace that preceded this stack, if available." }
                 ]
             }
         ],
@@ -182,24 +215,22 @@
                 "parameters": [
                     { "name": "expression", "type": "string", "description": "Expression to evaluate." },
                     { "name": "objectGroup", "type": "string", "optional": true, "description": "Symbolic group name that can be used to release multiple objects." },
-                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Determines whether Command Line API should be available during the evaluation.", "hidden": true },
-                    { "name": "doNotPauseOnExceptionsAndMuteConsole", "type": "boolean", "optional": true, "description": "Specifies whether evaluation should stop on exceptions and mute console. Overrides setPauseOnException state.", "hidden": true },
-                    { "name": "contextId", "$ref": "ExecutionContextId", "optional": true, "description": "Specifies in which isolated context to perform evaluation. Each content script lives in an isolated context and this parameter may be used to specify one of those contexts. If the parameter is omitted or 0 the evaluation will be performed in the context of the inspected page." },
+                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Determines whether Command Line API should be available during the evaluation." },
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
+                    { "name": "contextId", "$ref": "ExecutionContextId", "optional": true, "description": "Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page." },
                     { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object that should be sent by value." },
-                    { "name": "generatePreview", "type": "boolean", "optional": true, "hidden": true, "description": "Whether preview should be generated for the result." },
-                    { "name": "userGesture", "type": "boolean", "optional": true, "hidden": true, "description": "Whether execution should be treated as initiated by user in the UI." },
-                    { "name": "awaitPromise", "type": "boolean", "optional":true, "hidden": true, "description": "Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error." }
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the result." },
+                    { "name": "userGesture", "type": "boolean", "optional": true, "experimental": true, "description": "Whether execution should be treated as initiated by user in the UI." },
+                    { "name": "awaitPromise", "type": "boolean", "optional":true, "description": "Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error." }
                 ],
                 "returns": [
                     { "name": "result", "$ref": "RemoteObject", "description": "Evaluation result." },
-                    { "name": "wasThrown", "type": "boolean", "optional": true, "description": "True if the result was thrown during the evaluation." },
-                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "hidden": true, "description": "Exception details."}
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
                 ],
                 "description": "Evaluates expression on global object."
             },
             {
                 "name": "awaitPromise",
-                "hidden": true,
                 "async": true,
                 "parameters": [
                     { "name": "promiseObjectId", "$ref": "RemoteObjectId", "description": "Identifier of the promise." },
@@ -208,25 +239,26 @@
                 ],
                 "returns": [
                     { "name": "result", "$ref": "RemoteObject", "description": "Promise result. Will contain rejected value if promise was rejected." },
-                    { "name": "wasThrown", "type": "boolean", "optional": true, "description": "True if the promise was rejected." },
                     { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details if stack strace is available."}
                 ],
                 "description": "Add handler to promise with given promise object id."
             },
             {
                 "name": "callFunctionOn",
+                "async": true,
                 "parameters": [
                     { "name": "objectId", "$ref": "RemoteObjectId", "description": "Identifier of the object to call function on." },
                     { "name": "functionDeclaration", "type": "string", "description": "Declaration of the function to call." },
                     { "name": "arguments", "type": "array", "items": { "$ref": "CallArgument", "description": "Call argument." }, "optional": true, "description": "Call arguments. All call arguments must belong to the same JavaScript world as the target object." },
-                    { "name": "doNotPauseOnExceptionsAndMuteConsole", "type": "boolean", "optional": true, "description": "Specifies whether function call should stop on exceptions and mute console. Overrides setPauseOnException state.", "hidden": true },
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
                     { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object which should be sent by value." },
-                    { "name": "generatePreview", "type": "boolean", "optional": true, "hidden": true, "description": "Whether preview should be generated for the result." },
-                    { "name": "userGesture", "type": "boolean", "optional": true, "hidden": true, "description": "Whether execution should be treated as initiated by user in the UI." }
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the result." },
+                    { "name": "userGesture", "type": "boolean", "optional": true, "experimental": true, "description": "Whether execution should be treated as initiated by user in the UI." },
+                    { "name": "awaitPromise", "type": "boolean", "optional":true, "description": "Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error." }
                 ],
                 "returns": [
                     { "name": "result", "$ref": "RemoteObject", "description": "Call result." },
-                    { "name": "wasThrown", "type": "boolean", "optional": true, "description": "True if the result was thrown during the evaluation." }
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
                 ],
                 "description": "Calls function with given declaration on the given object. Object group of the result is inherited from the target object."
             },
@@ -235,13 +267,13 @@
                 "parameters": [
                     { "name": "objectId", "$ref": "RemoteObjectId", "description": "Identifier of the object to return properties for." },
                     { "name": "ownProperties", "optional": true, "type": "boolean", "description": "If true, returns properties belonging only to the element itself, not to its prototype chain." },
-                    { "name": "accessorPropertiesOnly", "optional": true, "type": "boolean", "description": "If true, returns accessor properties (with getter/setter) only; internal properties are not returned either.", "hidden": true },
-                    { "name": "generatePreview", "type": "boolean", "optional": true, "hidden": true, "description": "Whether preview should be generated for the results." }
+                    { "name": "accessorPropertiesOnly", "optional": true, "type": "boolean", "description": "If true, returns accessor properties (with getter/setter) only; internal properties are not returned either.", "experimental": true },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the results." }
                 ],
                 "returns": [
                     { "name": "result", "type": "array", "items": { "$ref": "PropertyDescriptor" }, "description": "Object properties." },
-                    { "name": "internalProperties", "optional": true, "type": "array", "items": { "$ref": "InternalPropertyDescriptor" }, "description": "Internal object properties (only of the element itself).", "hidden": true },
-                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "hidden": true, "description": "Exception details."}
+                    { "name": "internalProperties", "optional": true, "type": "array", "items": { "$ref": "InternalPropertyDescriptor" }, "description": "Internal object properties (only of the element itself)." },
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails", "optional": true, "description": "Exception details."}
                 ],
                 "description": "Returns properties of a given object. Object group of the result is inherited from the target object."
             },
@@ -260,9 +292,8 @@
                 "description": "Releases all remote objects that belong to a given group."
             },
             {
-                "name": "run",
-                "hidden": true,
-                "description": "Tells inspected instance(worker or page) that it can run in case it was started paused."
+                "name": "runIfWaitingForDebugger",
+                "description": "Tells inspected instance to run if it was waiting for debugger to attach."
             },
             {
                 "name": "enable",
@@ -270,12 +301,10 @@
             },
             {
                 "name": "disable",
-                "hidden": true,
                 "description": "Disables reporting of execution contexts creation."
             },
             {
                 "name": "discardConsoleEntries",
-                "hidden": true,
                 "description": "Discards collected exceptions and console API calls."
             },
             {
@@ -286,16 +315,15 @@
                         "type": "boolean"
                     }
                 ],
-                "hidden": true
+                "experimental": true
             },
             {
                 "name": "compileScript",
-                "hidden": true,
                 "parameters": [
                     { "name": "expression", "type": "string", "description": "Expression to compile." },
                     { "name": "sourceURL", "type": "string", "description": "Source url to be set for the script." },
                     { "name": "persistScript", "type": "boolean", "description": "Specifies whether the compiled script should be persisted." },
-                    { "name": "executionContextId", "$ref": "ExecutionContextId", "description": "Specifies in which isolated context to perform script run. Each content script lives in an isolated context and this parameter is used to specify one of those contexts." }
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "optional": true, "description": "Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page." }
                 ],
                 "returns": [
                     { "name": "scriptId", "$ref": "ScriptId", "optional": true, "description": "Id of the script." },
@@ -305,13 +333,16 @@
             },
             {
                 "name": "runScript",
-                "hidden": true,
+                "async": true,
                 "parameters": [
                     { "name": "scriptId", "$ref": "ScriptId", "description": "Id of the script to run." },
-                    { "name": "executionContextId", "$ref": "ExecutionContextId", "description": "Specifies in which isolated context to perform script run. Each content script lives in an isolated context and this parameter is used to specify one of those contexts." },
+                    { "name": "executionContextId", "$ref": "ExecutionContextId", "optional": true, "description": "Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page." },
                     { "name": "objectGroup", "type": "string", "optional": true, "description": "Symbolic group name that can be used to release multiple objects." },
-                    { "name": "doNotPauseOnExceptionsAndMuteConsole", "type": "boolean", "optional": true, "description": "Specifies whether script run should stop on exceptions and mute console. Overrides setPauseOnException state." },
-                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Determines whether Command Line API should be available during the evaluation." }
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
+                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Determines whether Command Line API should be available during the evaluation." },
+                    { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object which should be sent by value." },
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "description": "Whether preview should be generated for the result." },
+                    { "name": "awaitPromise", "type": "boolean", "optional": true, "description": "Whether execution should wait for promise to be resolved. If the result of evaluation is not a Promise, it's considered to be an error." }
                 ],
                 "returns": [
                     { "name": "result", "$ref": "RemoteObject", "description": "Run result." },
@@ -343,22 +374,17 @@
                 "name": "exceptionThrown",
                 "description": "Issued when exception was thrown and unhandled.",
                 "parameters": [
-                    { "name": "exceptionId", "type": "integer", "description": "Exception id." },
                     { "name": "timestamp", "$ref": "Timestamp", "description": "Timestamp of the exception." },
-                    { "name": "details", "$ref": "ExceptionDetails" },
-                    { "name": "exception", "$ref": "RemoteObject", "optional": true, "description": "Exception object." },
-                    { "name": "executionContextId", "$ref": "ExecutionContextId", "optional": true, "description": "Identifier of the context where exception happened." }
-                ],
-                "hidden": true
+                    { "name": "exceptionDetails", "$ref": "ExceptionDetails" }
+                ]
             },
             {
                 "name": "exceptionRevoked",
                 "description": "Issued when unhandled exception was revoked.",
                 "parameters": [
-                    { "name": "message", "type": "string", "description": "Message describing why exception was revoked." },
+                    { "name": "reason", "type": "string", "description": "Reason describing why exception was revoked." },
                     { "name": "exceptionId", "type": "integer", "description": "The id of revoked exception, as reported in <code>exceptionUnhandled</code>." }
-                ],
-                "hidden": true
+                ]
             },
             {
                 "name": "consoleAPICalled",
@@ -369,16 +395,15 @@
                     { "name": "executionContextId", "$ref": "ExecutionContextId", "description": "Identifier of the context where the call was made." },
                     { "name": "timestamp", "$ref": "Timestamp", "description": "Call timestamp." },
                     { "name": "stackTrace", "$ref": "StackTrace", "optional": true, "description": "Stack trace captured when the call was made." }
-                ],
-                "hidden": true
+                ]
             },
             {
                 "name": "inspectRequested",
+                "description": "Issued when object should be inspected (for example, as a result of inspect() command line API call).",
                 "parameters": [
                     { "name": "object", "$ref": "RemoteObject" },
                     { "name": "hints", "type": "object" }
-                ],
-                "hidden": true
+                ]
             }
         ]
     },
@@ -409,7 +434,7 @@
             },
             {
                 "id": "ScriptPosition",
-                "hidden": true,
+                "experimental": true,
                 "type": "object",
                 "properties": [
                     { "name": "lineNumber", "type": "integer" },
@@ -423,11 +448,11 @@
                 "properties": [
                     { "name": "callFrameId", "$ref": "CallFrameId", "description": "Call frame identifier. This identifier is only valid while the virtual machine is paused." },
                     { "name": "functionName", "type": "string", "description": "Name of the JavaScript function called on this call frame." },
-                    { "name": "functionLocation", "$ref": "Location", "optional": true, "hidden": true, "description": "Location in the source code." },
+                    { "name": "functionLocation", "$ref": "Location", "optional": true, "experimental": true, "description": "Location in the source code." },
                     { "name": "location", "$ref": "Location", "description": "Location in the source code." },
                     { "name": "scopeChain", "type": "array", "items": { "$ref": "Scope" }, "description": "Scope chain for this call frame." },
                     { "name": "this", "$ref": "Runtime.RemoteObject", "description": "<code>this</code> object for this call frame." },
-                    { "name": "returnValue", "$ref": "Runtime.RemoteObject", "optional": true, "hidden": true, "description": "The value being returned, if the function is at return point." }
+                    { "name": "returnValue", "$ref": "Runtime.RemoteObject", "optional": true, "description": "The value being returned, if the function is at return point." }
                 ],
                 "description": "JavaScript call frame. Array of call frames form the call stack."
             },
@@ -437,9 +462,9 @@
                 "properties": [
                     { "name": "type", "type": "string", "enum": ["global", "local", "with", "closure", "catch", "block", "script"], "description": "Scope type." },
                     { "name": "object", "$ref": "Runtime.RemoteObject", "description": "Object representing the scope. For <code>global</code> and <code>with</code> scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties." },
-                    { "name": "name", "type": "string", "optional": true, "hidden": true },
-                    { "name": "startLocation", "$ref": "Location", "optional": true, "hidden": true, "description": "Location in the source code where scope starts" },
-                    { "name": "endLocation", "$ref": "Location", "optional": true, "hidden": true, "description": "Location in the source code where scope ends" }
+                    { "name": "name", "type": "string", "optional": true },
+                    { "name": "startLocation", "$ref": "Location", "optional": true, "description": "Location in the source code where scope starts" },
+                    { "name": "endLocation", "$ref": "Location", "optional": true, "description": "Location in the source code where scope ends" }
                 ],
                 "description": "Scope description."
             },
@@ -452,7 +477,7 @@
                     { "name": "lineNumber", "type": "number", "description": "Line number in resource content." },
                     { "name": "lineContent", "type": "string", "description": "Line with match content." }
                 ],
-                "hidden": true
+                "experimental": true
             }
         ],
         "commands": [
@@ -473,9 +498,8 @@
             },
             {
                 "name": "setSkipAllPauses",
-                "hidden": true,
                 "parameters": [
-                    { "name": "skipped", "type": "boolean", "description": "New value for skip pauses state." }
+                    { "name": "skip", "type": "boolean", "description": "New value for skip pauses state." }
                 ],
                 "description": "Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc)."
             },
@@ -516,8 +540,7 @@
             {
                 "name": "continueToLocation",
                 "parameters": [
-                    { "name": "location", "$ref": "Location", "description": "Location to continue to." },
-                    { "name": "interstatementLocation", "type": "boolean", "optional": true, "hidden": true, "description": "Allows breakpoints at the intemediate positions inside statements." }
+                    { "name": "location", "$ref": "Location", "description": "Location to continue to." }
                 ],
                 "description": "Continues execution until specific location is reached."
             },
@@ -552,27 +575,21 @@
                 "returns": [
                     { "name": "result", "type": "array", "items": { "$ref": "SearchMatch" }, "description": "List of search matches." }
                 ],
+                "experimental": true,
                 "description": "Searches for given string in script content."
             },
             {
-                "name": "canSetScriptSource",
-                "returns": [
-                    { "name": "result", "type": "boolean", "description": "True if <code>setScriptSource</code> is supported." }
-                ],
-                "description": "Always returns true."
-            },
-            {
                 "name": "setScriptSource",
                 "parameters": [
                     { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Id of the script to edit." },
                     { "name": "scriptSource", "type": "string", "description": "New content of the script." },
-                    { "name": "preview", "type": "boolean", "optional": true, "description": " If true the change will not actually be applied. Preview mode may be used to get result description without actually modifying the code.", "hidden": true }
+                    { "name": "dryRun", "type": "boolean", "optional": true, "description": " If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code." }
                 ],
                 "returns": [
                     { "name": "callFrames", "type": "array", "optional": true, "items": { "$ref": "CallFrame" }, "description": "New stack trace in case editing has happened while VM was stopped." },
-                    { "name": "stackChanged", "type": "boolean", "optional": true, "description": "Whether current call stack  was modified after applying the changes.", "hidden": true },
-                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any.", "hidden": true },
-                    { "name": "compileError", "optional": true, "$ref": "Runtime.ExceptionDetails", "description": "Error data if any." }
+                    { "name": "stackChanged", "type": "boolean", "optional": true, "description": "Whether current call stack  was modified after applying the changes." },
+                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any." },
+                    { "name": "exceptionDetails", "optional": true, "$ref": "Runtime.ExceptionDetails", "description": "Exception details if any." }
                 ],
                 "description": "Edits JavaScript source live."
             },
@@ -585,7 +602,6 @@
                     { "name": "callFrames", "type": "array", "items": { "$ref": "CallFrame" }, "description": "New stack trace." },
                     { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any." }
                 ],
-                "hidden": true,
                 "description": "Restarts particular call frame from the beginning."
             },
             {
@@ -611,15 +627,14 @@
                     { "name": "callFrameId", "$ref": "CallFrameId", "description": "Call frame identifier to evaluate on." },
                     { "name": "expression", "type": "string", "description": "Expression to evaluate." },
                     { "name": "objectGroup", "type": "string", "optional": true, "description": "String object group name to put result into (allows rapid releasing resulting object handles using <code>releaseObjectGroup</code>)." },
-                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Specifies whether command line API should be available to the evaluated expression, defaults to false.", "hidden": true },
-                    { "name": "doNotPauseOnExceptionsAndMuteConsole", "type": "boolean", "optional": true, "description": "Specifies whether evaluation should stop on exceptions and mute console. Overrides setPauseOnException state.", "hidden": true },
+                    { "name": "includeCommandLineAPI", "type": "boolean", "optional": true, "description": "Specifies whether command line API should be available to the evaluated expression, defaults to false." },
+                    { "name": "silent", "type": "boolean", "optional": true, "description": "In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides <code>setPauseOnException</code> state." },
                     { "name": "returnByValue", "type": "boolean", "optional": true, "description": "Whether the result is expected to be a JSON object that should be sent by value." },
-                    { "name": "generatePreview", "type": "boolean", "optional": true, "hidden": true, "description": "Whether preview should be generated for the result." }
+                    { "name": "generatePreview", "type": "boolean", "optional": true, "experimental": true, "description": "Whether preview should be generated for the result." }
                 ],
                 "returns": [
                     { "name": "result", "$ref": "Runtime.RemoteObject", "description": "Object wrapper for the evaluation result." },
-                    { "name": "wasThrown", "type": "boolean", "optional": true, "description": "True if the result was thrown during the evaluation." },
-                    { "name": "exceptionDetails", "$ref": "Runtime.ExceptionDetails", "optional": true, "hidden": true, "description": "Exception details."}
+                    { "name": "exceptionDetails", "$ref": "Runtime.ExceptionDetails", "optional": true, "description": "Exception details."}
                 ],
                 "description": "Evaluates expression on a given call frame."
             },
@@ -631,24 +646,13 @@
                     { "name": "newValue", "$ref": "Runtime.CallArgument", "description": "New variable value." },
                     { "name": "callFrameId", "$ref": "CallFrameId", "description": "Id of callframe that holds variable." }
                 ],
-                "hidden": true,
                 "description": "Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually."
             },
             {
-                "name": "getBacktrace",
-                "returns": [
-                    { "name": "callFrames", "type": "array", "items": { "$ref": "CallFrame" }, "description": "Call stack the virtual machine stopped on." },
-                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any." }
-                ],
-                "hidden": true,
-                "description": "Returns call stack including variables changed since VM was paused. VM must be paused."
-            },
-            {
                 "name": "setAsyncCallStackDepth",
                 "parameters": [
                     { "name": "maxDepth", "type": "integer", "description": "Maximum depth of async call stacks. Setting to <code>0</code> will effectively disable collecting async call stacks (default)." }
                 ],
-                "hidden": true,
                 "description": "Enables or disables async call stacks tracking."
             },
             {
@@ -656,7 +660,7 @@
                 "parameters": [
                     { "name": "patterns", "type": "array", "items": { "type": "string" }, "description": "Array of regexps that will be used to check script url for blackbox state." }
                 ],
-                "hidden": true,
+                "experimental": true,
                 "description": "Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful."
             },
             {
@@ -665,7 +669,7 @@
                     { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "Id of the script." },
                     { "name": "positions", "type": "array", "items": { "$ref": "ScriptPosition" } }
                 ],
-                "hidden": true,
+                "experimental": true,
                 "description": "Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted."
             }
         ],
@@ -679,14 +683,12 @@
                     { "name": "startColumn", "type": "integer", "description": "Column offset of the script within the resource with given URL." },
                     { "name": "endLine", "type": "integer", "description": "Last line of the script." },
                     { "name": "endColumn", "type": "integer", "description": "Length of the last line of the script." },
-                    { "name": "executionContextId", "$ref": "Runtime.ExecutionContextId", "description": "Specifies script creation context.", "hidden": true },
-                    { "name": "hash", "type": "string", "hidden": true, "description": "Content hash of the script."},
-                    { "name": "isContentScript", "type": "boolean", "optional": true, "description": "Determines whether this script is a user extension script." },
-                    { "name": "isInternalScript", "type": "boolean", "optional": true, "description": "Determines whether this script is an internal script.", "hidden": true },
-                    { "name": "isLiveEdit", "type": "boolean", "optional": true, "description": "True, if this script is generated as a result of the live edit operation.", "hidden": true },
+                    { "name": "executionContextId", "$ref": "Runtime.ExecutionContextId", "description": "Specifies script creation context." },
+                    { "name": "hash", "type": "string", "description": "Content hash of the script."},
+                    { "name": "executionContextAuxData", "type": "object", "optional": true, "description": "Embedder-specific auxiliary data." },
+                    { "name": "isLiveEdit", "type": "boolean", "optional": true, "description": "True, if this script is generated as a result of the live edit operation.", "experimental": true },
                     { "name": "sourceMapURL", "type": "string", "optional": true, "description": "URL of source map associated with script (if any)." },
-                    { "name": "hasSourceURL", "type": "boolean", "optional": true, "description": "True, if this script has sourceURL.", "hidden": true },
-                    { "name": "deprecatedCommentWasUsed", "type": "boolean", "optional": true, "hidden": true, "description": "True, if '//@ sourceURL' or '//@ sourceMappingURL' was used."}
+                    { "name": "hasSourceURL", "type": "boolean", "optional": true, "description": "True, if this script has sourceURL.", "experimental": true }
                 ],
                 "description": "Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger."
             },
@@ -699,13 +701,11 @@
                     { "name": "startColumn", "type": "integer", "description": "Column offset of the script within the resource with given URL." },
                     { "name": "endLine", "type": "integer", "description": "Last line of the script." },
                     { "name": "endColumn", "type": "integer", "description": "Length of the last line of the script." },
-                    { "name": "executionContextId", "$ref": "Runtime.ExecutionContextId", "description": "Specifies script creation context.", "hidden": true },
-                    { "name": "hash", "type": "string", "hidden": true, "description": "Content hash of the script."},
-                    { "name": "isContentScript", "type": "boolean", "optional": true, "description": "Determines whether this script is a user extension script." },
-                    { "name": "isInternalScript", "type": "boolean", "optional": true, "description": "Determines whether this script is an internal script.", "hidden": true },
+                    { "name": "executionContextId", "$ref": "Runtime.ExecutionContextId", "description": "Specifies script creation context." },
+                    { "name": "hash", "type": "string", "description": "Content hash of the script."},
+                    { "name": "executionContextAuxData", "type": "object", "optional": true, "description": "Embedder-specific auxiliary data." },
                     { "name": "sourceMapURL", "type": "string", "optional": true, "description": "URL of source map associated with script (if any)." },
-                    { "name": "hasSourceURL", "type": "boolean", "optional": true, "description": "True, if this script has sourceURL.", "hidden": true },
-                    { "name": "deprecatedCommentWasUsed", "type": "boolean", "optional": true, "hidden": true, "description": "True, if '//@ sourceURL' or '//@ sourceMappingURL' was used."}
+                    { "name": "hasSourceURL", "type": "boolean", "optional": true, "description": "True, if this script has sourceURL.", "experimental": true }
                 ],
                 "description": "Fired when virtual machine fails to parse the script."
             },
@@ -723,8 +723,8 @@
                     { "name": "callFrames", "type": "array", "items": { "$ref": "CallFrame" }, "description": "Call stack the virtual machine stopped on." },
                     { "name": "reason", "type": "string", "enum": [ "XHR", "DOM", "EventListener", "exception", "assert", "debugCommand", "promiseRejection", "other" ], "description": "Pause reason.", "exported": true },
                     { "name": "data", "type": "object", "optional": true, "description": "Object containing break-specific auxiliary properties." },
-                    { "name": "hitBreakpoints", "type": "array", "optional": true, "items": { "type": "string" }, "description": "Hit breakpoints IDs", "hidden": true },
-                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any.", "hidden": true }
+                    { "name": "hitBreakpoints", "type": "array", "optional": true, "items": { "type": "string" }, "description": "Hit breakpoints IDs" },
+                    { "name": "asyncStackTrace", "$ref": "Runtime.StackTrace", "optional": true, "description": "Async stack trace, if any." }
                 ],
                 "description": "Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria."
             },
@@ -775,56 +775,42 @@
                     { "name": "message", "$ref": "ConsoleMessage", "description": "Console message that has been added." }
                 ],
                 "description": "Issued when new console message is added."
-            },
-            {
-                "name": "messageRepeatCountUpdated",
-                "parameters": [
-                    { "name": "count", "type": "integer", "description": "New repeat count value." },
-                    { "name": "timestamp", "$ref": "Runtime.Timestamp", "description": "Timestamp of most recent message in batch.", "hidden": true }
-                ],
-                "description": "Not issued.",
-                "deprecated": true
-            },
-            {
-                "name": "messagesCleared",
-                "description": "Not issued.",
-                "deprecated": true
             }
         ]
     },
     {
         "domain": "Profiler",
         "dependencies": ["Runtime", "Debugger"],
-        "hidden": true,
         "types": [
             {
-                "id": "CPUProfileNode",
+                "id": "ProfileNode",
                 "type": "object",
-                "description": "CPU Profile node. Holds callsite information, execution statistics and child nodes.",
+                "description": "Profile node. Holds callsite information, execution statistics and child nodes.",
                 "properties": [
-                    { "name": "callFrame", "$ref": "Runtime.CallFrame", "description": "Function location." },
-                    { "name": "hitCount", "type": "integer", "description": "Number of samples where this node was on top of the call stack." },
-                    { "name": "children", "type": "array", "items": { "$ref": "CPUProfileNode" }, "description": "Child nodes." },
-                    { "name": "deoptReason", "type": "string", "description": "The reason of being not optimized. The function may be deoptimized or marked as don't optimize."},
                     { "name": "id", "type": "integer", "description": "Unique id of the node." },
-                    { "name": "positionTicks", "type": "array", "items": { "$ref": "PositionTickInfo" }, "description": "An array of source position ticks." }
+                    { "name": "callFrame", "$ref": "Runtime.CallFrame", "description": "Function location." },
+                    { "name": "hitCount", "type": "integer", "optional": true, "experimental": true, "description": "Number of samples where this node was on top of the call stack." },
+                    { "name": "children", "type": "array", "items": { "type": "integer" }, "optional": true, "description": "Child node ids." },
+                    { "name": "deoptReason", "type": "string", "optional": true, "description": "The reason of being not optimized. The function may be deoptimized or marked as don't optimize."},
+                    { "name": "positionTicks", "type": "array", "items": { "$ref": "PositionTickInfo" }, "optional": true, "experimental": true, "description": "An array of source position ticks." }
                 ]
             },
             {
-                "id": "CPUProfile",
+                "id": "Profile",
                 "type": "object",
                 "description": "Profile.",
                 "properties": [
-                    { "name": "head", "$ref": "CPUProfileNode" },
-                    { "name": "startTime", "type": "number", "description": "Profiling start time in seconds." },
-                    { "name": "endTime", "type": "number", "description": "Profiling end time in seconds." },
+                    { "name": "nodes", "type": "array", "items": { "$ref": "ProfileNode" }, "description": "The list of profile nodes. First item is the root node." },
+                    { "name": "startTime", "type": "number", "description": "Profiling start timestamp in microseconds." },
+                    { "name": "endTime", "type": "number", "description": "Profiling end timestamp in microseconds." },
                     { "name": "samples", "optional": true, "type": "array", "items": { "type": "integer" }, "description": "Ids of samples top nodes." },
-                    { "name": "timestamps", "optional": true, "type": "array", "items": { "type": "number" }, "description": "Timestamps of the samples in microseconds." }
+                    { "name": "timeDeltas", "optional": true, "type": "array", "items": { "type": "integer" }, "description": "Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime." }
                 ]
             },
             {
                 "id": "PositionTickInfo",
                 "type": "object",
+                "experimental": true,
                 "description": "Specifies a number of samples attributed to a certain source position.",
                 "properties": [
                     { "name": "line", "type": "integer", "description": "Source line number (1-based)." },
@@ -852,7 +838,7 @@
             {
                 "name": "stop",
                 "returns": [
-                    { "name": "profile", "$ref": "CPUProfile", "description": "Recorded profile." }
+                    { "name": "profile", "$ref": "Profile", "description": "Recorded profile." }
                 ]
             }
         ],
@@ -862,7 +848,7 @@
                 "parameters": [
                     { "name": "id", "type": "string" },
                     { "name": "location", "$ref": "Debugger.Location", "description": "Location of console.profile()." },
-                    { "name": "title", "type": "string", "optional": true, "description": "Profile title passed as argument to console.profile()." }
+                    { "name": "title", "type": "string", "optional": true, "description": "Profile title passed as an argument to console.profile()." }
                 ],
                 "description": "Sent when new profile recodring is started using console.profile() call."
             },
@@ -871,8 +857,8 @@
                 "parameters": [
                     { "name": "id", "type": "string" },
                     { "name": "location", "$ref": "Debugger.Location", "description": "Location of console.profileEnd()." },
-                    { "name": "profile", "$ref": "CPUProfile" },
-                    { "name": "title", "type": "string", "optional": true, "description": "Profile title passed as argunet to console.profile()." }
+                    { "name": "profile", "$ref": "Profile" },
+                    { "name": "title", "type": "string", "optional": true, "description": "Profile title passed as an argument to console.profile()." }
                 ]
             }
         ]
@@ -880,7 +866,7 @@
     {
         "domain": "HeapProfiler",
         "dependencies": ["Runtime"],
-        "hidden": true,
+        "experimental": true,
         "types": [
             {
                 "id": "HeapSnapshotObjectId",
diff --git a/src/inspector/protocol-platform.h b/src/inspector/protocol-platform.h
new file mode 100644
index 0000000..c772393
--- /dev/null
+++ b/src/inspector/protocol-platform.h
@@ -0,0 +1,21 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_PROTOCOLPLATFORM_H_
+#define V8_INSPECTOR_PROTOCOLPLATFORM_H_
+
+#include <memory>
+
+#include "src/base/logging.h"
+
+namespace v8_inspector {
+
+template <typename T>
+std::unique_ptr<T> wrapUnique(T* ptr) {
+  return std::unique_ptr<T>(ptr);
+}
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_PROTOCOLPLATFORM_H_
diff --git a/src/inspector/remote-object-id.cc b/src/inspector/remote-object-id.cc
new file mode 100644
index 0000000..d83020c
--- /dev/null
+++ b/src/inspector/remote-object-id.cc
@@ -0,0 +1,76 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/remote-object-id.h"
+
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/string-util.h"
+
+namespace v8_inspector {
+
+RemoteObjectIdBase::RemoteObjectIdBase() : m_injectedScriptId(0) {}
+
+std::unique_ptr<protocol::DictionaryValue>
+RemoteObjectIdBase::parseInjectedScriptId(const String16& objectId) {
+  std::unique_ptr<protocol::Value> parsedValue = protocol::parseJSON(objectId);
+  if (!parsedValue || parsedValue->type() != protocol::Value::TypeObject)
+    return nullptr;
+
+  std::unique_ptr<protocol::DictionaryValue> parsedObjectId(
+      protocol::DictionaryValue::cast(parsedValue.release()));
+  bool success =
+      parsedObjectId->getInteger("injectedScriptId", &m_injectedScriptId);
+  if (success) return parsedObjectId;
+  return nullptr;
+}
+
+RemoteObjectId::RemoteObjectId() : RemoteObjectIdBase(), m_id(0) {}
+
+std::unique_ptr<RemoteObjectId> RemoteObjectId::parse(
+    ErrorString* errorString, const String16& objectId) {
+  std::unique_ptr<RemoteObjectId> result(new RemoteObjectId());
+  std::unique_ptr<protocol::DictionaryValue> parsedObjectId =
+      result->parseInjectedScriptId(objectId);
+  if (!parsedObjectId) {
+    *errorString = "Invalid remote object id";
+    return nullptr;
+  }
+
+  bool success = parsedObjectId->getInteger("id", &result->m_id);
+  if (!success) {
+    *errorString = "Invalid remote object id";
+    return nullptr;
+  }
+  return result;
+}
+
+RemoteCallFrameId::RemoteCallFrameId()
+    : RemoteObjectIdBase(), m_frameOrdinal(0) {}
+
+std::unique_ptr<RemoteCallFrameId> RemoteCallFrameId::parse(
+    ErrorString* errorString, const String16& objectId) {
+  std::unique_ptr<RemoteCallFrameId> result(new RemoteCallFrameId());
+  std::unique_ptr<protocol::DictionaryValue> parsedObjectId =
+      result->parseInjectedScriptId(objectId);
+  if (!parsedObjectId) {
+    *errorString = "Invalid call frame id";
+    return nullptr;
+  }
+
+  bool success = parsedObjectId->getInteger("ordinal", &result->m_frameOrdinal);
+  if (!success) {
+    *errorString = "Invalid call frame id";
+    return nullptr;
+  }
+
+  return result;
+}
+
+String16 RemoteCallFrameId::serialize(int injectedScriptId, int frameOrdinal) {
+  return "{\"ordinal\":" + String16::fromInteger(frameOrdinal) +
+         ",\"injectedScriptId\":" + String16::fromInteger(injectedScriptId) +
+         "}";
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/remote-object-id.h b/src/inspector/remote-object-id.h
new file mode 100644
index 0000000..a32f568
--- /dev/null
+++ b/src/inspector/remote-object-id.h
@@ -0,0 +1,58 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_REMOTEOBJECTID_H_
+#define V8_INSPECTOR_REMOTEOBJECTID_H_
+
+#include "src/inspector/protocol/Forward.h"
+
+namespace v8_inspector {
+
+using protocol::ErrorString;
+
+class RemoteObjectIdBase {
+ public:
+  int contextId() const { return m_injectedScriptId; }
+
+ protected:
+  RemoteObjectIdBase();
+  ~RemoteObjectIdBase() {}
+
+  std::unique_ptr<protocol::DictionaryValue> parseInjectedScriptId(
+      const String16&);
+
+  int m_injectedScriptId;
+};
+
+class RemoteObjectId final : public RemoteObjectIdBase {
+ public:
+  static std::unique_ptr<RemoteObjectId> parse(ErrorString*, const String16&);
+  ~RemoteObjectId() {}
+  int id() const { return m_id; }
+
+ private:
+  RemoteObjectId();
+
+  int m_id;
+};
+
+class RemoteCallFrameId final : public RemoteObjectIdBase {
+ public:
+  static std::unique_ptr<RemoteCallFrameId> parse(ErrorString*,
+                                                  const String16&);
+  ~RemoteCallFrameId() {}
+
+  int frameOrdinal() const { return m_frameOrdinal; }
+
+  static String16 serialize(int injectedScriptId, int frameOrdinal);
+
+ private:
+  RemoteCallFrameId();
+
+  int m_frameOrdinal;
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_REMOTEOBJECTID_H_
diff --git a/src/inspector/script-breakpoint.h b/src/inspector/script-breakpoint.h
new file mode 100644
index 0000000..025233d
--- /dev/null
+++ b/src/inspector/script-breakpoint.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2009 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1.  Redistributions of source code must retain the above copyright
+ *     notice, this list of conditions and the following disclaimer.
+ * 2.  Redistributions in binary form must reproduce the above copyright
+ *     notice, this list of conditions and the following disclaimer in the
+ *     documentation and/or other materials provided with the distribution.
+ * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ *     its contributors may be used to endorse or promote products derived
+ *     from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef V8_INSPECTOR_SCRIPTBREAKPOINT_H_
+#define V8_INSPECTOR_SCRIPTBREAKPOINT_H_
+
+#include "src/inspector/string-16.h"
+
+namespace v8_inspector {
+
+struct ScriptBreakpoint {
+  ScriptBreakpoint() : ScriptBreakpoint(0, 0, String16()) {}
+
+  ScriptBreakpoint(int lineNumber, int columnNumber, const String16& condition)
+      : lineNumber(lineNumber),
+        columnNumber(columnNumber),
+        condition(condition) {}
+
+  int lineNumber;
+  int columnNumber;
+  String16 condition;
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_SCRIPTBREAKPOINT_H_
diff --git a/src/inspector/search-util.cc b/src/inspector/search-util.cc
new file mode 100644
index 0000000..a6fba06
--- /dev/null
+++ b/src/inspector/search-util.cc
@@ -0,0 +1,164 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/search-util.h"
+
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-regex.h"
+
+namespace v8_inspector {
+
+namespace {
+
+String16 findMagicComment(const String16& content, const String16& name,
+                          bool multiline) {
+  DCHECK(name.find("=") == String16::kNotFound);
+  size_t length = content.length();
+  size_t nameLength = name.length();
+
+  size_t pos = length;
+  size_t equalSignPos = 0;
+  size_t closingCommentPos = 0;
+  while (true) {
+    pos = content.reverseFind(name, pos);
+    if (pos == String16::kNotFound) return String16();
+
+    // Check for a /\/[\/*][@#][ \t]/ regexp (length of 4) before found name.
+    if (pos < 4) return String16();
+    pos -= 4;
+    if (content[pos] != '/') continue;
+    if ((content[pos + 1] != '/' || multiline) &&
+        (content[pos + 1] != '*' || !multiline))
+      continue;
+    if (content[pos + 2] != '#' && content[pos + 2] != '@') continue;
+    if (content[pos + 3] != ' ' && content[pos + 3] != '\t') continue;
+    equalSignPos = pos + 4 + nameLength;
+    if (equalSignPos < length && content[equalSignPos] != '=') continue;
+    if (multiline) {
+      closingCommentPos = content.find("*/", equalSignPos + 1);
+      if (closingCommentPos == String16::kNotFound) return String16();
+    }
+
+    break;
+  }
+
+  DCHECK(equalSignPos);
+  DCHECK(!multiline || closingCommentPos);
+  size_t urlPos = equalSignPos + 1;
+  String16 match = multiline
+                       ? content.substring(urlPos, closingCommentPos - urlPos)
+                       : content.substring(urlPos);
+
+  size_t newLine = match.find("\n");
+  if (newLine != String16::kNotFound) match = match.substring(0, newLine);
+  match = match.stripWhiteSpace();
+
+  for (size_t i = 0; i < match.length(); ++i) {
+    UChar c = match[i];
+    if (c == '"' || c == '\'' || c == ' ' || c == '\t') return "";
+  }
+
+  return match;
+}
+
+String16 createSearchRegexSource(const String16& text) {
+  String16Builder result;
+
+  for (size_t i = 0; i < text.length(); i++) {
+    UChar c = text[i];
+    if (c == '[' || c == ']' || c == '(' || c == ')' || c == '{' || c == '}' ||
+        c == '+' || c == '-' || c == '*' || c == '.' || c == ',' || c == '?' ||
+        c == '\\' || c == '^' || c == '$' || c == '|') {
+      result.append('\\');
+    }
+    result.append(c);
+  }
+
+  return result.toString();
+}
+
+std::unique_ptr<std::vector<size_t>> lineEndings(const String16& text) {
+  std::unique_ptr<std::vector<size_t>> result(new std::vector<size_t>());
+
+  const String16 lineEndString = "\n";
+  size_t start = 0;
+  while (start < text.length()) {
+    size_t lineEnd = text.find(lineEndString, start);
+    if (lineEnd == String16::kNotFound) break;
+
+    result->push_back(lineEnd);
+    start = lineEnd + 1;
+  }
+  result->push_back(text.length());
+
+  return result;
+}
+
+std::vector<std::pair<int, String16>> scriptRegexpMatchesByLines(
+    const V8Regex& regex, const String16& text) {
+  std::vector<std::pair<int, String16>> result;
+  if (text.isEmpty()) return result;
+
+  std::unique_ptr<std::vector<size_t>> endings(lineEndings(text));
+  size_t size = endings->size();
+  size_t start = 0;
+  for (size_t lineNumber = 0; lineNumber < size; ++lineNumber) {
+    size_t lineEnd = endings->at(lineNumber);
+    String16 line = text.substring(start, lineEnd - start);
+    if (line.length() && line[line.length() - 1] == '\r')
+      line = line.substring(0, line.length() - 1);
+
+    int matchLength;
+    if (regex.match(line, 0, &matchLength) != -1)
+      result.push_back(std::pair<int, String16>(lineNumber, line));
+
+    start = lineEnd + 1;
+  }
+  return result;
+}
+
+std::unique_ptr<protocol::Debugger::SearchMatch> buildObjectForSearchMatch(
+    int lineNumber, const String16& lineContent) {
+  return protocol::Debugger::SearchMatch::create()
+      .setLineNumber(lineNumber)
+      .setLineContent(lineContent)
+      .build();
+}
+
+std::unique_ptr<V8Regex> createSearchRegex(V8InspectorImpl* inspector,
+                                           const String16& query,
+                                           bool caseSensitive, bool isRegex) {
+  String16 regexSource = isRegex ? query : createSearchRegexSource(query);
+  return wrapUnique(new V8Regex(inspector, regexSource, caseSensitive));
+}
+
+}  // namespace
+
+std::vector<std::unique_ptr<protocol::Debugger::SearchMatch>>
+searchInTextByLinesImpl(V8InspectorSession* session, const String16& text,
+                        const String16& query, const bool caseSensitive,
+                        const bool isRegex) {
+  std::unique_ptr<V8Regex> regex = createSearchRegex(
+      static_cast<V8InspectorSessionImpl*>(session)->inspector(), query,
+      caseSensitive, isRegex);
+  std::vector<std::pair<int, String16>> matches =
+      scriptRegexpMatchesByLines(*regex.get(), text);
+
+  std::vector<std::unique_ptr<protocol::Debugger::SearchMatch>> result;
+  for (const auto& match : matches)
+    result.push_back(buildObjectForSearchMatch(match.first, match.second));
+  return result;
+}
+
+String16 findSourceURL(const String16& content, bool multiline) {
+  return findMagicComment(content, "sourceURL", multiline);
+}
+
+String16 findSourceMapURL(const String16& content, bool multiline) {
+  return findMagicComment(content, "sourceMappingURL", multiline);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/search-util.h b/src/inspector/search-util.h
new file mode 100644
index 0000000..8f5753b
--- /dev/null
+++ b/src/inspector/search-util.h
@@ -0,0 +1,24 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_SEARCHUTIL_H_
+#define V8_INSPECTOR_SEARCHUTIL_H_
+
+#include "src/inspector/protocol/Debugger.h"
+#include "src/inspector/string-util.h"
+
+namespace v8_inspector {
+
+class V8InspectorSession;
+
+String16 findSourceURL(const String16& content, bool multiline);
+String16 findSourceMapURL(const String16& content, bool multiline);
+std::vector<std::unique_ptr<protocol::Debugger::SearchMatch>>
+searchInTextByLinesImpl(V8InspectorSession*, const String16& text,
+                        const String16& query, bool caseSensitive,
+                        bool isRegex);
+
+}  //  namespace v8_inspector
+
+#endif  // V8_INSPECTOR_SEARCHUTIL_H_
diff --git a/src/inspector/string-16.cc b/src/inspector/string-16.cc
new file mode 100644
index 0000000..f608460
--- /dev/null
+++ b/src/inspector/string-16.cc
@@ -0,0 +1,518 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/string-16.h"
+
+#include <algorithm>
+#include <cctype>
+#include <cstdlib>
+#include <cstring>
+#include <iomanip>
+#include <limits>
+#include <locale>
+#include <sstream>
+#include <string>
+
+#include "src/base/platform/platform.h"
+#include "src/inspector/protocol-platform.h"
+
+namespace v8_inspector {
+
+namespace {
+
+bool isASCII(UChar c) { return !(c & ~0x7F); }
+
+bool isSpaceOrNewLine(UChar c) {
+  return isASCII(c) && c <= ' ' && (c == ' ' || (c <= 0xD && c >= 0x9));
+}
+
+int charactersToInteger(const UChar* characters, size_t length,
+                        bool* ok = nullptr) {
+  std::vector<char> buffer;
+  buffer.reserve(length + 1);
+  for (size_t i = 0; i < length; ++i) {
+    if (!isASCII(characters[i])) {
+      if (ok) *ok = false;
+      return 0;
+    }
+    buffer.push_back(static_cast<char>(characters[i]));
+  }
+  buffer.push_back('\0');
+
+  char* endptr;
+  int64_t result =
+      static_cast<int64_t>(std::strtol(buffer.data(), &endptr, 10));
+  if (ok) {
+    *ok = !(*endptr) && result <= std::numeric_limits<int>::max() &&
+          result >= std::numeric_limits<int>::min();
+  }
+  return static_cast<int>(result);
+}
+
+const UChar replacementCharacter = 0xFFFD;
+using UChar32 = uint32_t;
+
+inline int inlineUTF8SequenceLengthNonASCII(char b0) {
+  if ((b0 & 0xC0) != 0xC0) return 0;
+  if ((b0 & 0xE0) == 0xC0) return 2;
+  if ((b0 & 0xF0) == 0xE0) return 3;
+  if ((b0 & 0xF8) == 0xF0) return 4;
+  return 0;
+}
+
+inline int inlineUTF8SequenceLength(char b0) {
+  return isASCII(b0) ? 1 : inlineUTF8SequenceLengthNonASCII(b0);
+}
+
+// Once the bits are split out into bytes of UTF-8, this is a mask OR-ed
+// into the first byte, depending on how many bytes follow.  There are
+// as many entries in this table as there are UTF-8 sequence types.
+// (I.e., one byte sequence, two byte... etc.). Remember that sequences
+// for *legal* UTF-8 will be 4 or fewer bytes total.
+static const unsigned char firstByteMark[7] = {0x00, 0x00, 0xC0, 0xE0,
+                                               0xF0, 0xF8, 0xFC};
+
+typedef enum {
+  conversionOK,     // conversion successful
+  sourceExhausted,  // partial character in source, but hit end
+  targetExhausted,  // insuff. room in target for conversion
+  sourceIllegal     // source sequence is illegal/malformed
+} ConversionResult;
+
+ConversionResult convertUTF16ToUTF8(const UChar** sourceStart,
+                                    const UChar* sourceEnd, char** targetStart,
+                                    char* targetEnd, bool strict) {
+  ConversionResult result = conversionOK;
+  const UChar* source = *sourceStart;
+  char* target = *targetStart;
+  while (source < sourceEnd) {
+    UChar32 ch;
+    uint32_t bytesToWrite = 0;
+    const UChar32 byteMask = 0xBF;
+    const UChar32 byteMark = 0x80;
+    const UChar* oldSource =
+        source;  // In case we have to back up because of target overflow.
+    ch = static_cast<uint16_t>(*source++);
+    // If we have a surrogate pair, convert to UChar32 first.
+    if (ch >= 0xD800 && ch <= 0xDBFF) {
+      // If the 16 bits following the high surrogate are in the source buffer...
+      if (source < sourceEnd) {
+        UChar32 ch2 = static_cast<uint16_t>(*source);
+        // If it's a low surrogate, convert to UChar32.
+        if (ch2 >= 0xDC00 && ch2 <= 0xDFFF) {
+          ch = ((ch - 0xD800) << 10) + (ch2 - 0xDC00) + 0x0010000;
+          ++source;
+        } else if (strict) {  // it's an unpaired high surrogate
+          --source;           // return to the illegal value itself
+          result = sourceIllegal;
+          break;
+        }
+      } else {     // We don't have the 16 bits following the high surrogate.
+        --source;  // return to the high surrogate
+        result = sourceExhausted;
+        break;
+      }
+    } else if (strict) {
+      // UTF-16 surrogate values are illegal in UTF-32
+      if (ch >= 0xDC00 && ch <= 0xDFFF) {
+        --source;  // return to the illegal value itself
+        result = sourceIllegal;
+        break;
+      }
+    }
+    // Figure out how many bytes the result will require
+    if (ch < (UChar32)0x80) {
+      bytesToWrite = 1;
+    } else if (ch < (UChar32)0x800) {
+      bytesToWrite = 2;
+    } else if (ch < (UChar32)0x10000) {
+      bytesToWrite = 3;
+    } else if (ch < (UChar32)0x110000) {
+      bytesToWrite = 4;
+    } else {
+      bytesToWrite = 3;
+      ch = replacementCharacter;
+    }
+
+    target += bytesToWrite;
+    if (target > targetEnd) {
+      source = oldSource;  // Back up source pointer!
+      target -= bytesToWrite;
+      result = targetExhausted;
+      break;
+    }
+    switch (bytesToWrite) {  // note: everything falls through.
+      case 4:
+        *--target = static_cast<char>((ch | byteMark) & byteMask);
+        ch >>= 6;
+      case 3:
+        *--target = static_cast<char>((ch | byteMark) & byteMask);
+        ch >>= 6;
+      case 2:
+        *--target = static_cast<char>((ch | byteMark) & byteMask);
+        ch >>= 6;
+      case 1:
+        *--target = static_cast<char>(ch | firstByteMark[bytesToWrite]);
+    }
+    target += bytesToWrite;
+  }
+  *sourceStart = source;
+  *targetStart = target;
+  return result;
+}
+
+/**
+ * Is this code point a BMP code point (U+0000..U+ffff)?
+ * @param c 32-bit code point
+ * @return TRUE or FALSE
+ * @stable ICU 2.8
+ */
+#define U_IS_BMP(c) ((uint32_t)(c) <= 0xffff)
+
+/**
+ * Is this code point a supplementary code point (U+10000..U+10ffff)?
+ * @param c 32-bit code point
+ * @return TRUE or FALSE
+ * @stable ICU 2.8
+ */
+#define U_IS_SUPPLEMENTARY(c) ((uint32_t)((c)-0x10000) <= 0xfffff)
+
+/**
+ * Is this code point a surrogate (U+d800..U+dfff)?
+ * @param c 32-bit code point
+ * @return TRUE or FALSE
+ * @stable ICU 2.4
+ */
+#define U_IS_SURROGATE(c) (((c)&0xfffff800) == 0xd800)
+
+/**
+ * Get the lead surrogate (0xd800..0xdbff) for a
+ * supplementary code point (0x10000..0x10ffff).
+ * @param supplementary 32-bit code point (U+10000..U+10ffff)
+ * @return lead surrogate (U+d800..U+dbff) for supplementary
+ * @stable ICU 2.4
+ */
+#define U16_LEAD(supplementary) (UChar)(((supplementary) >> 10) + 0xd7c0)
+
+/**
+ * Get the trail surrogate (0xdc00..0xdfff) for a
+ * supplementary code point (0x10000..0x10ffff).
+ * @param supplementary 32-bit code point (U+10000..U+10ffff)
+ * @return trail surrogate (U+dc00..U+dfff) for supplementary
+ * @stable ICU 2.4
+ */
+#define U16_TRAIL(supplementary) (UChar)(((supplementary)&0x3ff) | 0xdc00)
+
+// This must be called with the length pre-determined by the first byte.
+// If presented with a length > 4, this returns false.  The Unicode
+// definition of UTF-8 goes up to 4-byte sequences.
+static bool isLegalUTF8(const unsigned char* source, int length) {
+  unsigned char a;
+  const unsigned char* srcptr = source + length;
+  switch (length) {
+    default:
+      return false;
+    // Everything else falls through when "true"...
+    case 4:
+      if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return false;
+    case 3:
+      if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return false;
+    case 2:
+      if ((a = (*--srcptr)) > 0xBF) return false;
+
+      // no fall-through in this inner switch
+      switch (*source) {
+        case 0xE0:
+          if (a < 0xA0) return false;
+          break;
+        case 0xED:
+          if (a > 0x9F) return false;
+          break;
+        case 0xF0:
+          if (a < 0x90) return false;
+          break;
+        case 0xF4:
+          if (a > 0x8F) return false;
+          break;
+        default:
+          if (a < 0x80) return false;
+      }
+
+    case 1:
+      if (*source >= 0x80 && *source < 0xC2) return false;
+  }
+  if (*source > 0xF4) return false;
+  return true;
+}
+
+// Magic values subtracted from a buffer value during UTF8 conversion.
+// This table contains as many values as there might be trailing bytes
+// in a UTF-8 sequence.
+static const UChar32 offsetsFromUTF8[6] = {0x00000000UL,
+                                           0x00003080UL,
+                                           0x000E2080UL,
+                                           0x03C82080UL,
+                                           static_cast<UChar32>(0xFA082080UL),
+                                           static_cast<UChar32>(0x82082080UL)};
+
+static inline UChar32 readUTF8Sequence(const char*& sequence, size_t length) {
+  UChar32 character = 0;
+
+  // The cases all fall through.
+  switch (length) {
+    case 6:
+      character += static_cast<unsigned char>(*sequence++);
+      character <<= 6;
+    case 5:
+      character += static_cast<unsigned char>(*sequence++);
+      character <<= 6;
+    case 4:
+      character += static_cast<unsigned char>(*sequence++);
+      character <<= 6;
+    case 3:
+      character += static_cast<unsigned char>(*sequence++);
+      character <<= 6;
+    case 2:
+      character += static_cast<unsigned char>(*sequence++);
+      character <<= 6;
+    case 1:
+      character += static_cast<unsigned char>(*sequence++);
+  }
+
+  return character - offsetsFromUTF8[length - 1];
+}
+
+ConversionResult convertUTF8ToUTF16(const char** sourceStart,
+                                    const char* sourceEnd, UChar** targetStart,
+                                    UChar* targetEnd, bool* sourceAllASCII,
+                                    bool strict) {
+  ConversionResult result = conversionOK;
+  const char* source = *sourceStart;
+  UChar* target = *targetStart;
+  UChar orAllData = 0;
+  while (source < sourceEnd) {
+    int utf8SequenceLength = inlineUTF8SequenceLength(*source);
+    if (sourceEnd - source < utf8SequenceLength) {
+      result = sourceExhausted;
+      break;
+    }
+    // Do this check whether lenient or strict
+    if (!isLegalUTF8(reinterpret_cast<const unsigned char*>(source),
+                     utf8SequenceLength)) {
+      result = sourceIllegal;
+      break;
+    }
+
+    UChar32 character = readUTF8Sequence(source, utf8SequenceLength);
+
+    if (target >= targetEnd) {
+      source -= utf8SequenceLength;  // Back up source pointer!
+      result = targetExhausted;
+      break;
+    }
+
+    if (U_IS_BMP(character)) {
+      // UTF-16 surrogate values are illegal in UTF-32
+      if (U_IS_SURROGATE(character)) {
+        if (strict) {
+          source -= utf8SequenceLength;  // return to the illegal value itself
+          result = sourceIllegal;
+          break;
+        }
+        *target++ = replacementCharacter;
+        orAllData |= replacementCharacter;
+      } else {
+        *target++ = static_cast<UChar>(character);  // normal case
+        orAllData |= character;
+      }
+    } else if (U_IS_SUPPLEMENTARY(character)) {
+      // target is a character in range 0xFFFF - 0x10FFFF
+      if (target + 1 >= targetEnd) {
+        source -= utf8SequenceLength;  // Back up source pointer!
+        result = targetExhausted;
+        break;
+      }
+      *target++ = U16_LEAD(character);
+      *target++ = U16_TRAIL(character);
+      orAllData = 0xffff;
+    } else {
+      if (strict) {
+        source -= utf8SequenceLength;  // return to the start
+        result = sourceIllegal;
+        break;  // Bail out; shouldn't continue
+      } else {
+        *target++ = replacementCharacter;
+        orAllData |= replacementCharacter;
+      }
+    }
+  }
+  *sourceStart = source;
+  *targetStart = target;
+
+  if (sourceAllASCII) *sourceAllASCII = !(orAllData & ~0x7f);
+
+  return result;
+}
+
+// Helper to write a three-byte UTF-8 code point to the buffer, caller must
+// check room is available.
+static inline void putUTF8Triple(char*& buffer, UChar ch) {
+  *buffer++ = static_cast<char>(((ch >> 12) & 0x0F) | 0xE0);
+  *buffer++ = static_cast<char>(((ch >> 6) & 0x3F) | 0x80);
+  *buffer++ = static_cast<char>((ch & 0x3F) | 0x80);
+}
+
+}  // namespace
+
+// static
+String16 String16::fromInteger(int number) {
+  const size_t kBufferSize = 50;
+  char buffer[kBufferSize];
+  v8::base::OS::SNPrintF(buffer, kBufferSize, "%d", number);
+  return String16(buffer);
+}
+
+// static
+String16 String16::fromInteger(size_t number) {
+  const size_t kBufferSize = 50;
+  char buffer[kBufferSize];
+  v8::base::OS::SNPrintF(buffer, kBufferSize, "%zu", number);
+  return String16(buffer);
+}
+
+// static
+String16 String16::fromDouble(double number) {
+  std::ostringstream s;
+  s.imbue(std::locale("C"));
+  s << std::fixed << std::setprecision(std::numeric_limits<double>::digits10)
+    << number;
+  return String16(s.str().c_str());
+}
+
+// static
+String16 String16::fromDouble(double number, int precision) {
+  std::ostringstream s;
+  s.imbue(std::locale("C"));
+  s << std::fixed << std::setprecision(precision) << number;
+  return String16(s.str().c_str());
+}
+
+int String16::toInteger(bool* ok) const {
+  return charactersToInteger(characters16(), length(), ok);
+}
+
+String16 String16::stripWhiteSpace() const {
+  if (!length()) return String16();
+
+  size_t start = 0;
+  size_t end = length() - 1;
+
+  // skip white space from start
+  while (start <= end && isSpaceOrNewLine(characters16()[start])) ++start;
+
+  // only white space
+  if (start > end) return String16();
+
+  // skip white space from end
+  while (end && isSpaceOrNewLine(characters16()[end])) --end;
+
+  if (!start && end == length() - 1) return *this;
+  return String16(characters16() + start, end + 1 - start);
+}
+
+String16Builder::String16Builder() {}
+
+void String16Builder::append(const String16& s) {
+  m_buffer.insert(m_buffer.end(), s.characters16(),
+                  s.characters16() + s.length());
+}
+
+void String16Builder::append(UChar c) { m_buffer.push_back(c); }
+
+void String16Builder::append(char c) {
+  UChar u = c;
+  m_buffer.push_back(u);
+}
+
+void String16Builder::append(const UChar* characters, size_t length) {
+  m_buffer.insert(m_buffer.end(), characters, characters + length);
+}
+
+void String16Builder::append(const char* characters, size_t length) {
+  m_buffer.insert(m_buffer.end(), characters, characters + length);
+}
+
+String16 String16Builder::toString() {
+  return String16(m_buffer.data(), m_buffer.size());
+}
+
+void String16Builder::reserveCapacity(size_t capacity) {
+  m_buffer.reserve(capacity);
+}
+
+String16 String16::fromUTF8(const char* stringStart, size_t length) {
+  if (!stringStart || !length) return String16();
+
+  std::vector<UChar> buffer(length);
+  UChar* bufferStart = buffer.data();
+
+  UChar* bufferCurrent = bufferStart;
+  const char* stringCurrent = stringStart;
+  if (convertUTF8ToUTF16(&stringCurrent, stringStart + length, &bufferCurrent,
+                         bufferCurrent + buffer.size(), 0,
+                         true) != conversionOK)
+    return String16();
+
+  size_t utf16Length = bufferCurrent - bufferStart;
+  return String16(bufferStart, utf16Length);
+}
+
+std::string String16::utf8() const {
+  size_t length = this->length();
+
+  if (!length) return std::string("");
+
+  // Allocate a buffer big enough to hold all the characters
+  // (an individual UTF-16 UChar can only expand to 3 UTF-8 bytes).
+  // Optimization ideas, if we find this function is hot:
+  //  * We could speculatively create a CStringBuffer to contain 'length'
+  //    characters, and resize if necessary (i.e. if the buffer contains
+  //    non-ascii characters). (Alternatively, scan the buffer first for
+  //    ascii characters, so we know this will be sufficient).
+  //  * We could allocate a CStringBuffer with an appropriate size to
+  //    have a good chance of being able to write the string into the
+  //    buffer without reallocing (say, 1.5 x length).
+  if (length > std::numeric_limits<unsigned>::max() / 3) return std::string();
+  std::vector<char> bufferVector(length * 3);
+  char* buffer = bufferVector.data();
+  const UChar* characters = m_impl.data();
+
+  ConversionResult result =
+      convertUTF16ToUTF8(&characters, characters + length, &buffer,
+                         buffer + bufferVector.size(), false);
+  DCHECK(
+      result !=
+      targetExhausted);  // (length * 3) should be sufficient for any conversion
+
+  // Only produced from strict conversion.
+  DCHECK(result != sourceIllegal);
+
+  // Check for an unconverted high surrogate.
+  if (result == sourceExhausted) {
+    // This should be one unpaired high surrogate. Treat it the same
+    // was as an unpaired high surrogate would have been handled in
+    // the middle of a string with non-strict conversion - which is
+    // to say, simply encode it to UTF-8.
+    DCHECK((characters + 1) == (m_impl.data() + length));
+    DCHECK((*characters >= 0xD800) && (*characters <= 0xDBFF));
+    // There should be room left, since one UChar hasn't been
+    // converted.
+    DCHECK((buffer + 3) <= (buffer + bufferVector.size()));
+    putUTF8Triple(buffer, *characters);
+  }
+
+  return std::string(bufferVector.data(), buffer - bufferVector.data());
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/string-16.h b/src/inspector/string-16.h
new file mode 100644
index 0000000..6dc7759
--- /dev/null
+++ b/src/inspector/string-16.h
@@ -0,0 +1,133 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_STRING16_H_
+#define V8_INSPECTOR_STRING16_H_
+
+#include <stdint.h>
+#include <cctype>
+#include <climits>
+#include <cstring>
+#include <string>
+#include <vector>
+
+namespace v8_inspector {
+
+using UChar = uint16_t;
+
+class String16 {
+ public:
+  static const size_t kNotFound = static_cast<size_t>(-1);
+
+  String16() {}
+  String16(const String16& other) : m_impl(other.m_impl) {}
+  String16(const UChar* characters, size_t size) : m_impl(characters, size) {}
+  String16(const UChar* characters)  // NOLINT(runtime/explicit)
+      : m_impl(characters) {}
+  String16(const char* characters)  // NOLINT(runtime/explicit)
+      : String16(characters, std::strlen(characters)) {}
+  String16(const char* characters, size_t size) {
+    m_impl.resize(size);
+    for (size_t i = 0; i < size; ++i) m_impl[i] = characters[i];
+  }
+
+  static String16 fromInteger(int);
+  static String16 fromInteger(size_t);
+  static String16 fromDouble(double);
+  static String16 fromDouble(double, int precision);
+
+  int toInteger(bool* ok = nullptr) const;
+  String16 stripWhiteSpace() const;
+  const UChar* characters16() const { return m_impl.c_str(); }
+  size_t length() const { return m_impl.length(); }
+  bool isEmpty() const { return !m_impl.length(); }
+  UChar operator[](size_t index) const { return m_impl[index]; }
+  String16 substring(size_t pos, size_t len = UINT_MAX) const {
+    return String16(m_impl.substr(pos, len));
+  }
+  size_t find(const String16& str, size_t start = 0) const {
+    return m_impl.find(str.m_impl, start);
+  }
+  size_t reverseFind(const String16& str, size_t start = UINT_MAX) const {
+    return m_impl.rfind(str.m_impl, start);
+  }
+  void swap(String16& other) { m_impl.swap(other.m_impl); }
+
+  // Convenience methods.
+  std::string utf8() const;
+  static String16 fromUTF8(const char* stringStart, size_t length);
+
+  const std::basic_string<UChar>& impl() const { return m_impl; }
+  explicit String16(const std::basic_string<UChar>& impl) : m_impl(impl) {}
+
+  std::size_t hash() const {
+    if (!has_hash) {
+      size_t hash = 0;
+      for (size_t i = 0; i < length(); ++i) hash = 31 * hash + m_impl[i];
+      hash_code = hash;
+      has_hash = true;
+    }
+    return hash_code;
+  }
+
+ private:
+  std::basic_string<UChar> m_impl;
+  mutable bool has_hash = false;
+  mutable std::size_t hash_code = 0;
+};
+
+inline bool operator==(const String16& a, const String16& b) {
+  return a.impl() == b.impl();
+}
+inline bool operator<(const String16& a, const String16& b) {
+  return a.impl() < b.impl();
+}
+inline bool operator!=(const String16& a, const String16& b) {
+  return a.impl() != b.impl();
+}
+inline bool operator==(const String16& a, const char* b) {
+  return a.impl() == String16(b).impl();
+}
+inline String16 operator+(const String16& a, const char* b) {
+  return String16(a.impl() + String16(b).impl());
+}
+inline String16 operator+(const char* a, const String16& b) {
+  return String16(String16(a).impl() + b.impl());
+}
+inline String16 operator+(const String16& a, const String16& b) {
+  return String16(a.impl() + b.impl());
+}
+
+class String16Builder {
+ public:
+  String16Builder();
+  void append(const String16&);
+  void append(UChar);
+  void append(char);
+  void append(const UChar*, size_t);
+  void append(const char*, size_t);
+  String16 toString();
+  void reserveCapacity(size_t);
+
+ private:
+  std::vector<UChar> m_buffer;
+};
+
+}  // namespace v8_inspector
+
+#if !defined(__APPLE__) || defined(_LIBCPP_VERSION)
+
+namespace std {
+template <>
+struct hash<v8_inspector::String16> {
+  std::size_t operator()(const v8_inspector::String16& string) const {
+    return string.hash();
+  }
+};
+
+}  // namespace std
+
+#endif  // !defined(__APPLE__) || defined(_LIBCPP_VERSION)
+
+#endif  // V8_INSPECTOR_STRING16_H_
diff --git a/src/inspector/string-util.cc b/src/inspector/string-util.cc
new file mode 100644
index 0000000..e6b83a5
--- /dev/null
+++ b/src/inspector/string-util.cc
@@ -0,0 +1,218 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/string-util.h"
+
+#include "src/inspector/protocol/Protocol.h"
+
+namespace v8_inspector {
+
+v8::Local<v8::String> toV8String(v8::Isolate* isolate, const String16& string) {
+  if (string.isEmpty()) return v8::String::Empty(isolate);
+  DCHECK(string.length() < v8::String::kMaxLength);
+  return v8::String::NewFromTwoByte(
+             isolate, reinterpret_cast<const uint16_t*>(string.characters16()),
+             v8::NewStringType::kNormal, static_cast<int>(string.length()))
+      .ToLocalChecked();
+}
+
+v8::Local<v8::String> toV8StringInternalized(v8::Isolate* isolate,
+                                             const String16& string) {
+  if (string.isEmpty()) return v8::String::Empty(isolate);
+  DCHECK(string.length() < v8::String::kMaxLength);
+  return v8::String::NewFromTwoByte(
+             isolate, reinterpret_cast<const uint16_t*>(string.characters16()),
+             v8::NewStringType::kInternalized,
+             static_cast<int>(string.length()))
+      .ToLocalChecked();
+}
+
+v8::Local<v8::String> toV8StringInternalized(v8::Isolate* isolate,
+                                             const char* str) {
+  return v8::String::NewFromUtf8(isolate, str, v8::NewStringType::kInternalized)
+      .ToLocalChecked();
+}
+
+v8::Local<v8::String> toV8String(v8::Isolate* isolate,
+                                 const StringView& string) {
+  if (!string.length()) return v8::String::Empty(isolate);
+  DCHECK(string.length() < v8::String::kMaxLength);
+  if (string.is8Bit())
+    return v8::String::NewFromOneByte(
+               isolate, reinterpret_cast<const uint8_t*>(string.characters8()),
+               v8::NewStringType::kNormal, static_cast<int>(string.length()))
+        .ToLocalChecked();
+  return v8::String::NewFromTwoByte(
+             isolate, reinterpret_cast<const uint16_t*>(string.characters16()),
+             v8::NewStringType::kNormal, static_cast<int>(string.length()))
+      .ToLocalChecked();
+}
+
+String16 toProtocolString(v8::Local<v8::String> value) {
+  if (value.IsEmpty() || value->IsNull() || value->IsUndefined())
+    return String16();
+  std::unique_ptr<UChar[]> buffer(new UChar[value->Length()]);
+  value->Write(reinterpret_cast<uint16_t*>(buffer.get()), 0, value->Length());
+  return String16(buffer.get(), value->Length());
+}
+
+String16 toProtocolStringWithTypeCheck(v8::Local<v8::Value> value) {
+  if (value.IsEmpty() || !value->IsString()) return String16();
+  return toProtocolString(value.As<v8::String>());
+}
+
+String16 toString16(const StringView& string) {
+  if (!string.length()) return String16();
+  if (string.is8Bit())
+    return String16(reinterpret_cast<const char*>(string.characters8()),
+                    string.length());
+  return String16(reinterpret_cast<const UChar*>(string.characters16()),
+                  string.length());
+}
+
+StringView toStringView(const String16& string) {
+  if (string.isEmpty()) return StringView();
+  return StringView(reinterpret_cast<const uint16_t*>(string.characters16()),
+                    string.length());
+}
+
+bool stringViewStartsWith(const StringView& string, const char* prefix) {
+  if (!string.length()) return !(*prefix);
+  if (string.is8Bit()) {
+    for (size_t i = 0, j = 0; prefix[j] && i < string.length(); ++i, ++j) {
+      if (string.characters8()[i] != prefix[j]) return false;
+    }
+  } else {
+    for (size_t i = 0, j = 0; prefix[j] && i < string.length(); ++i, ++j) {
+      if (string.characters16()[i] != prefix[j]) return false;
+    }
+  }
+  return true;
+}
+
+namespace protocol {
+
+std::unique_ptr<protocol::Value> parseJSON(const StringView& string) {
+  if (!string.length()) return nullptr;
+  if (string.is8Bit()) {
+    return protocol::parseJSON(string.characters8(),
+                               static_cast<int>(string.length()));
+  }
+  return protocol::parseJSON(string.characters16(),
+                             static_cast<int>(string.length()));
+}
+
+std::unique_ptr<protocol::Value> parseJSON(const String16& string) {
+  if (!string.length()) return nullptr;
+  return protocol::parseJSON(string.characters16(),
+                             static_cast<int>(string.length()));
+}
+
+}  // namespace protocol
+
+std::unique_ptr<protocol::Value> toProtocolValue(protocol::String* errorString,
+                                                 v8::Local<v8::Context> context,
+                                                 v8::Local<v8::Value> value,
+                                                 int maxDepth) {
+  if (value.IsEmpty()) {
+    UNREACHABLE();
+    return nullptr;
+  }
+
+  if (!maxDepth) {
+    *errorString = "Object reference chain is too long";
+    return nullptr;
+  }
+  maxDepth--;
+
+  if (value->IsNull() || value->IsUndefined()) return protocol::Value::null();
+  if (value->IsBoolean())
+    return protocol::FundamentalValue::create(value.As<v8::Boolean>()->Value());
+  if (value->IsNumber()) {
+    double doubleValue = value.As<v8::Number>()->Value();
+    int intValue = static_cast<int>(doubleValue);
+    if (intValue == doubleValue)
+      return protocol::FundamentalValue::create(intValue);
+    return protocol::FundamentalValue::create(doubleValue);
+  }
+  if (value->IsString())
+    return protocol::StringValue::create(
+        toProtocolString(value.As<v8::String>()));
+  if (value->IsArray()) {
+    v8::Local<v8::Array> array = value.As<v8::Array>();
+    std::unique_ptr<protocol::ListValue> inspectorArray =
+        protocol::ListValue::create();
+    uint32_t length = array->Length();
+    for (uint32_t i = 0; i < length; i++) {
+      v8::Local<v8::Value> value;
+      if (!array->Get(context, i).ToLocal(&value)) {
+        *errorString = "Internal error";
+        return nullptr;
+      }
+      std::unique_ptr<protocol::Value> element =
+          toProtocolValue(errorString, context, value, maxDepth);
+      if (!element) return nullptr;
+      inspectorArray->pushValue(std::move(element));
+    }
+    return std::move(inspectorArray);
+  }
+  if (value->IsObject()) {
+    std::unique_ptr<protocol::DictionaryValue> jsonObject =
+        protocol::DictionaryValue::create();
+    v8::Local<v8::Object> object = v8::Local<v8::Object>::Cast(value);
+    v8::Local<v8::Array> propertyNames;
+    if (!object->GetPropertyNames(context).ToLocal(&propertyNames)) {
+      *errorString = "Internal error";
+      return nullptr;
+    }
+    uint32_t length = propertyNames->Length();
+    for (uint32_t i = 0; i < length; i++) {
+      v8::Local<v8::Value> name;
+      if (!propertyNames->Get(context, i).ToLocal(&name)) {
+        *errorString = "Internal error";
+        return nullptr;
+      }
+      // FIXME(yurys): v8::Object should support GetOwnPropertyNames
+      if (name->IsString()) {
+        v8::Maybe<bool> hasRealNamedProperty = object->HasRealNamedProperty(
+            context, v8::Local<v8::String>::Cast(name));
+        if (!hasRealNamedProperty.IsJust() || !hasRealNamedProperty.FromJust())
+          continue;
+      }
+      v8::Local<v8::String> propertyName;
+      if (!name->ToString(context).ToLocal(&propertyName)) continue;
+      v8::Local<v8::Value> property;
+      if (!object->Get(context, name).ToLocal(&property)) {
+        *errorString = "Internal error";
+        return nullptr;
+      }
+      std::unique_ptr<protocol::Value> propertyValue =
+          toProtocolValue(errorString, context, property, maxDepth);
+      if (!propertyValue) return nullptr;
+      jsonObject->setValue(toProtocolString(propertyName),
+                           std::move(propertyValue));
+    }
+    return std::move(jsonObject);
+  }
+  *errorString = "Object couldn't be returned by value";
+  return nullptr;
+}
+
+// static
+std::unique_ptr<StringBuffer> StringBuffer::create(const StringView& string) {
+  String16 owner = toString16(string);
+  return StringBufferImpl::adopt(owner);
+}
+
+// static
+std::unique_ptr<StringBufferImpl> StringBufferImpl::adopt(String16& string) {
+  return wrapUnique(new StringBufferImpl(string));
+}
+
+StringBufferImpl::StringBufferImpl(String16& string) {
+  m_owner.swap(string);
+  m_string = toStringView(m_owner);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/string-util.h b/src/inspector/string-util.h
new file mode 100644
index 0000000..30137b8
--- /dev/null
+++ b/src/inspector/string-util.h
@@ -0,0 +1,75 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_STRINGUTIL_H_
+#define V8_INSPECTOR_STRINGUTIL_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/string-16.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+namespace protocol {
+
+class Value;
+
+using String = v8_inspector::String16;
+using StringBuilder = v8_inspector::String16Builder;
+
+class StringUtil {
+ public:
+  static String substring(const String& s, size_t pos, size_t len) {
+    return s.substring(pos, len);
+  }
+  static String fromInteger(int number) { return String::fromInteger(number); }
+  static String fromInteger(size_t number) {
+    return String::fromInteger(number);
+  }
+  static String fromDouble(double number) { return String::fromDouble(number); }
+  static const size_t kNotFound = String::kNotFound;
+  static void builderReserve(StringBuilder& builder, size_t capacity) {
+    builder.reserveCapacity(capacity);
+  }
+};
+
+std::unique_ptr<protocol::Value> parseJSON(const StringView& json);
+std::unique_ptr<protocol::Value> parseJSON(const String16& json);
+
+}  // namespace protocol
+
+std::unique_ptr<protocol::Value> toProtocolValue(protocol::String* errorString,
+                                                 v8::Local<v8::Context>,
+                                                 v8::Local<v8::Value>,
+                                                 int maxDepth = 1000);
+
+v8::Local<v8::String> toV8String(v8::Isolate*, const String16&);
+v8::Local<v8::String> toV8StringInternalized(v8::Isolate*, const String16&);
+v8::Local<v8::String> toV8StringInternalized(v8::Isolate*, const char*);
+v8::Local<v8::String> toV8String(v8::Isolate*, const StringView&);
+// TODO(dgozman): rename to toString16.
+String16 toProtocolString(v8::Local<v8::String>);
+String16 toProtocolStringWithTypeCheck(v8::Local<v8::Value>);
+String16 toString16(const StringView&);
+StringView toStringView(const String16&);
+bool stringViewStartsWith(const StringView&, const char*);
+
+class StringBufferImpl : public StringBuffer {
+ public:
+  // Destroys string's content.
+  static std::unique_ptr<StringBufferImpl> adopt(String16&);
+  const StringView& string() override { return m_string; }
+
+ private:
+  explicit StringBufferImpl(String16&);
+  String16 m_owner;
+  StringView m_string;
+
+  DISALLOW_COPY_AND_ASSIGN(StringBufferImpl);
+};
+
+}  //  namespace v8_inspector
+
+#endif  // V8_INSPECTOR_STRINGUTIL_H_
diff --git a/src/inspector/v8-console-agent-impl.cc b/src/inspector/v8-console-agent-impl.cc
new file mode 100644
index 0000000..8eb883c
--- /dev/null
+++ b/src/inspector/v8-console-agent-impl.cc
@@ -0,0 +1,79 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-console-agent-impl.h"
+
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/v8-console-message.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+
+namespace v8_inspector {
+
+namespace ConsoleAgentState {
+static const char consoleEnabled[] = "consoleEnabled";
+}
+
+V8ConsoleAgentImpl::V8ConsoleAgentImpl(
+    V8InspectorSessionImpl* session, protocol::FrontendChannel* frontendChannel,
+    protocol::DictionaryValue* state)
+    : m_session(session),
+      m_state(state),
+      m_frontend(frontendChannel),
+      m_enabled(false) {}
+
+V8ConsoleAgentImpl::~V8ConsoleAgentImpl() {}
+
+void V8ConsoleAgentImpl::enable(ErrorString* errorString) {
+  if (m_enabled) return;
+  m_state->setBoolean(ConsoleAgentState::consoleEnabled, true);
+  m_enabled = true;
+  m_session->inspector()->enableStackCapturingIfNeeded();
+  reportAllMessages();
+}
+
+void V8ConsoleAgentImpl::disable(ErrorString* errorString) {
+  if (!m_enabled) return;
+  m_session->inspector()->disableStackCapturingIfNeeded();
+  m_state->setBoolean(ConsoleAgentState::consoleEnabled, false);
+  m_enabled = false;
+}
+
+void V8ConsoleAgentImpl::clearMessages(ErrorString* errorString) {}
+
+void V8ConsoleAgentImpl::restore() {
+  if (!m_state->booleanProperty(ConsoleAgentState::consoleEnabled, false))
+    return;
+  ErrorString ignored;
+  enable(&ignored);
+}
+
+void V8ConsoleAgentImpl::messageAdded(V8ConsoleMessage* message) {
+  if (m_enabled) reportMessage(message, true);
+}
+
+bool V8ConsoleAgentImpl::enabled() { return m_enabled; }
+
+void V8ConsoleAgentImpl::reportAllMessages() {
+  V8ConsoleMessageStorage* storage =
+      m_session->inspector()->ensureConsoleMessageStorage(
+          m_session->contextGroupId());
+  for (const auto& message : storage->messages()) {
+    if (message->origin() == V8MessageOrigin::kConsole) {
+      if (!reportMessage(message.get(), false)) return;
+    }
+  }
+}
+
+bool V8ConsoleAgentImpl::reportMessage(V8ConsoleMessage* message,
+                                       bool generatePreview) {
+  DCHECK(message->origin() == V8MessageOrigin::kConsole);
+  message->reportToFrontend(&m_frontend);
+  m_frontend.flush();
+  return m_session->inspector()->hasConsoleMessageStorage(
+      m_session->contextGroupId());
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-console-agent-impl.h b/src/inspector/v8-console-agent-impl.h
new file mode 100644
index 0000000..f3d598b
--- /dev/null
+++ b/src/inspector/v8-console-agent-impl.h
@@ -0,0 +1,48 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8CONSOLEAGENTIMPL_H_
+#define V8_INSPECTOR_V8CONSOLEAGENTIMPL_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Console.h"
+#include "src/inspector/protocol/Forward.h"
+
+namespace v8_inspector {
+
+class V8ConsoleMessage;
+class V8InspectorSessionImpl;
+
+using protocol::ErrorString;
+
+class V8ConsoleAgentImpl : public protocol::Console::Backend {
+ public:
+  V8ConsoleAgentImpl(V8InspectorSessionImpl*, protocol::FrontendChannel*,
+                     protocol::DictionaryValue* state);
+  ~V8ConsoleAgentImpl() override;
+
+  void enable(ErrorString*) override;
+  void disable(ErrorString*) override;
+  void clearMessages(ErrorString*) override;
+
+  void restore();
+  void messageAdded(V8ConsoleMessage*);
+  void reset();
+  bool enabled();
+
+ private:
+  void reportAllMessages();
+  bool reportMessage(V8ConsoleMessage*, bool generatePreview);
+
+  V8InspectorSessionImpl* m_session;
+  protocol::DictionaryValue* m_state;
+  protocol::Console::Frontend m_frontend;
+  bool m_enabled;
+
+  DISALLOW_COPY_AND_ASSIGN(V8ConsoleAgentImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8CONSOLEAGENTIMPL_H_
diff --git a/src/inspector/v8-console-message.cc b/src/inspector/v8-console-message.cc
new file mode 100644
index 0000000..63f1d49
--- /dev/null
+++ b/src/inspector/v8-console-message.cc
@@ -0,0 +1,485 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-console-message.h"
+
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-console-agent-impl.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-runtime-agent-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+namespace {
+
+String16 consoleAPITypeValue(ConsoleAPIType type) {
+  switch (type) {
+    case ConsoleAPIType::kLog:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Log;
+    case ConsoleAPIType::kDebug:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Debug;
+    case ConsoleAPIType::kInfo:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Info;
+    case ConsoleAPIType::kError:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Error;
+    case ConsoleAPIType::kWarning:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Warning;
+    case ConsoleAPIType::kClear:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Clear;
+    case ConsoleAPIType::kDir:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Dir;
+    case ConsoleAPIType::kDirXML:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Dirxml;
+    case ConsoleAPIType::kTable:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Table;
+    case ConsoleAPIType::kTrace:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Trace;
+    case ConsoleAPIType::kStartGroup:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::StartGroup;
+    case ConsoleAPIType::kStartGroupCollapsed:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::StartGroupCollapsed;
+    case ConsoleAPIType::kEndGroup:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::EndGroup;
+    case ConsoleAPIType::kAssert:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Assert;
+    case ConsoleAPIType::kTimeEnd:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Debug;
+    case ConsoleAPIType::kCount:
+      return protocol::Runtime::ConsoleAPICalled::TypeEnum::Debug;
+  }
+  return protocol::Runtime::ConsoleAPICalled::TypeEnum::Log;
+}
+
+const unsigned maxConsoleMessageCount = 1000;
+const unsigned maxArrayItemsLimit = 10000;
+const unsigned maxStackDepthLimit = 32;
+
+class V8ValueStringBuilder {
+ public:
+  static String16 toString(v8::Local<v8::Value> value,
+                           v8::Local<v8::Context> context) {
+    V8ValueStringBuilder builder(context);
+    if (!builder.append(value)) return String16();
+    return builder.toString();
+  }
+
+ private:
+  enum {
+    IgnoreNull = 1 << 0,
+    IgnoreUndefined = 1 << 1,
+  };
+
+  explicit V8ValueStringBuilder(v8::Local<v8::Context> context)
+      : m_arrayLimit(maxArrayItemsLimit),
+        m_isolate(context->GetIsolate()),
+        m_tryCatch(context->GetIsolate()),
+        m_context(context) {}
+
+  bool append(v8::Local<v8::Value> value, unsigned ignoreOptions = 0) {
+    if (value.IsEmpty()) return true;
+    if ((ignoreOptions & IgnoreNull) && value->IsNull()) return true;
+    if ((ignoreOptions & IgnoreUndefined) && value->IsUndefined()) return true;
+    if (value->IsString()) return append(v8::Local<v8::String>::Cast(value));
+    if (value->IsStringObject())
+      return append(v8::Local<v8::StringObject>::Cast(value)->ValueOf());
+    if (value->IsSymbol()) return append(v8::Local<v8::Symbol>::Cast(value));
+    if (value->IsSymbolObject())
+      return append(v8::Local<v8::SymbolObject>::Cast(value)->ValueOf());
+    if (value->IsNumberObject()) {
+      m_builder.append(String16::fromDouble(
+          v8::Local<v8::NumberObject>::Cast(value)->ValueOf(), 6));
+      return true;
+    }
+    if (value->IsBooleanObject()) {
+      m_builder.append(v8::Local<v8::BooleanObject>::Cast(value)->ValueOf()
+                           ? "true"
+                           : "false");
+      return true;
+    }
+    if (value->IsArray()) return append(v8::Local<v8::Array>::Cast(value));
+    if (value->IsProxy()) {
+      m_builder.append("[object Proxy]");
+      return true;
+    }
+    if (value->IsObject() && !value->IsDate() && !value->IsFunction() &&
+        !value->IsNativeError() && !value->IsRegExp()) {
+      v8::Local<v8::Object> object = v8::Local<v8::Object>::Cast(value);
+      v8::Local<v8::String> stringValue;
+      if (object->ObjectProtoToString(m_isolate->GetCurrentContext())
+              .ToLocal(&stringValue))
+        return append(stringValue);
+    }
+    v8::Local<v8::String> stringValue;
+    if (!value->ToString(m_isolate->GetCurrentContext()).ToLocal(&stringValue))
+      return false;
+    return append(stringValue);
+  }
+
+  bool append(v8::Local<v8::Array> array) {
+    for (const auto& it : m_visitedArrays) {
+      if (it == array) return true;
+    }
+    uint32_t length = array->Length();
+    if (length > m_arrayLimit) return false;
+    if (m_visitedArrays.size() > maxStackDepthLimit) return false;
+
+    bool result = true;
+    m_arrayLimit -= length;
+    m_visitedArrays.push_back(array);
+    for (uint32_t i = 0; i < length; ++i) {
+      if (i) m_builder.append(',');
+      v8::Local<v8::Value> value;
+      if (!array->Get(m_context, i).ToLocal(&value)) continue;
+      if (!append(value, IgnoreNull | IgnoreUndefined)) {
+        result = false;
+        break;
+      }
+    }
+    m_visitedArrays.pop_back();
+    return result;
+  }
+
+  bool append(v8::Local<v8::Symbol> symbol) {
+    m_builder.append("Symbol(");
+    bool result = append(symbol->Name(), IgnoreUndefined);
+    m_builder.append(')');
+    return result;
+  }
+
+  bool append(v8::Local<v8::String> string) {
+    if (m_tryCatch.HasCaught()) return false;
+    if (!string.IsEmpty()) m_builder.append(toProtocolString(string));
+    return true;
+  }
+
+  String16 toString() {
+    if (m_tryCatch.HasCaught()) return String16();
+    return m_builder.toString();
+  }
+
+  uint32_t m_arrayLimit;
+  v8::Isolate* m_isolate;
+  String16Builder m_builder;
+  std::vector<v8::Local<v8::Array>> m_visitedArrays;
+  v8::TryCatch m_tryCatch;
+  v8::Local<v8::Context> m_context;
+};
+
+}  // namespace
+
+V8ConsoleMessage::V8ConsoleMessage(V8MessageOrigin origin, double timestamp,
+                                   const String16& message)
+    : m_origin(origin),
+      m_timestamp(timestamp),
+      m_message(message),
+      m_lineNumber(0),
+      m_columnNumber(0),
+      m_scriptId(0),
+      m_contextId(0),
+      m_type(ConsoleAPIType::kLog),
+      m_exceptionId(0),
+      m_revokedExceptionId(0) {}
+
+V8ConsoleMessage::~V8ConsoleMessage() {}
+
+void V8ConsoleMessage::setLocation(const String16& url, unsigned lineNumber,
+                                   unsigned columnNumber,
+                                   std::unique_ptr<V8StackTraceImpl> stackTrace,
+                                   int scriptId) {
+  m_url = url;
+  m_lineNumber = lineNumber;
+  m_columnNumber = columnNumber;
+  m_stackTrace = std::move(stackTrace);
+  m_scriptId = scriptId;
+}
+
+void V8ConsoleMessage::reportToFrontend(
+    protocol::Console::Frontend* frontend) const {
+  DCHECK(m_origin == V8MessageOrigin::kConsole);
+  String16 level = protocol::Console::ConsoleMessage::LevelEnum::Log;
+  if (m_type == ConsoleAPIType::kDebug || m_type == ConsoleAPIType::kCount ||
+      m_type == ConsoleAPIType::kTimeEnd)
+    level = protocol::Console::ConsoleMessage::LevelEnum::Debug;
+  else if (m_type == ConsoleAPIType::kError ||
+           m_type == ConsoleAPIType::kAssert)
+    level = protocol::Console::ConsoleMessage::LevelEnum::Error;
+  else if (m_type == ConsoleAPIType::kWarning)
+    level = protocol::Console::ConsoleMessage::LevelEnum::Warning;
+  else if (m_type == ConsoleAPIType::kInfo)
+    level = protocol::Console::ConsoleMessage::LevelEnum::Info;
+  std::unique_ptr<protocol::Console::ConsoleMessage> result =
+      protocol::Console::ConsoleMessage::create()
+          .setSource(protocol::Console::ConsoleMessage::SourceEnum::ConsoleApi)
+          .setLevel(level)
+          .setText(m_message)
+          .build();
+  result->setLine(static_cast<int>(m_lineNumber));
+  result->setColumn(static_cast<int>(m_columnNumber));
+  result->setUrl(m_url);
+  frontend->messageAdded(std::move(result));
+}
+
+std::unique_ptr<protocol::Array<protocol::Runtime::RemoteObject>>
+V8ConsoleMessage::wrapArguments(V8InspectorSessionImpl* session,
+                                bool generatePreview) const {
+  V8InspectorImpl* inspector = session->inspector();
+  int contextGroupId = session->contextGroupId();
+  int contextId = m_contextId;
+  if (!m_arguments.size() || !contextId) return nullptr;
+  InspectedContext* inspectedContext =
+      inspector->getContext(contextGroupId, contextId);
+  if (!inspectedContext) return nullptr;
+
+  v8::Isolate* isolate = inspectedContext->isolate();
+  v8::HandleScope handles(isolate);
+  v8::Local<v8::Context> context = inspectedContext->context();
+
+  std::unique_ptr<protocol::Array<protocol::Runtime::RemoteObject>> args =
+      protocol::Array<protocol::Runtime::RemoteObject>::create();
+  if (m_type == ConsoleAPIType::kTable && generatePreview) {
+    v8::Local<v8::Value> table = m_arguments[0]->Get(isolate);
+    v8::Local<v8::Value> columns = m_arguments.size() > 1
+                                       ? m_arguments[1]->Get(isolate)
+                                       : v8::Local<v8::Value>();
+    std::unique_ptr<protocol::Runtime::RemoteObject> wrapped =
+        session->wrapTable(context, table, columns);
+    inspectedContext = inspector->getContext(contextGroupId, contextId);
+    if (!inspectedContext) return nullptr;
+    if (wrapped)
+      args->addItem(std::move(wrapped));
+    else
+      args = nullptr;
+  } else {
+    for (size_t i = 0; i < m_arguments.size(); ++i) {
+      std::unique_ptr<protocol::Runtime::RemoteObject> wrapped =
+          session->wrapObject(context, m_arguments[i]->Get(isolate), "console",
+                              generatePreview);
+      inspectedContext = inspector->getContext(contextGroupId, contextId);
+      if (!inspectedContext) return nullptr;
+      if (!wrapped) {
+        args = nullptr;
+        break;
+      }
+      args->addItem(std::move(wrapped));
+    }
+  }
+  return args;
+}
+
+void V8ConsoleMessage::reportToFrontend(protocol::Runtime::Frontend* frontend,
+                                        V8InspectorSessionImpl* session,
+                                        bool generatePreview) const {
+  int contextGroupId = session->contextGroupId();
+  V8InspectorImpl* inspector = session->inspector();
+
+  if (m_origin == V8MessageOrigin::kException) {
+    std::unique_ptr<protocol::Runtime::RemoteObject> exception =
+        wrapException(session, generatePreview);
+    if (!inspector->hasConsoleMessageStorage(contextGroupId)) return;
+    std::unique_ptr<protocol::Runtime::ExceptionDetails> exceptionDetails =
+        protocol::Runtime::ExceptionDetails::create()
+            .setExceptionId(m_exceptionId)
+            .setText(exception ? m_message : m_detailedMessage)
+            .setLineNumber(m_lineNumber ? m_lineNumber - 1 : 0)
+            .setColumnNumber(m_columnNumber ? m_columnNumber - 1 : 0)
+            .build();
+    if (m_scriptId)
+      exceptionDetails->setScriptId(String16::fromInteger(m_scriptId));
+    if (!m_url.isEmpty()) exceptionDetails->setUrl(m_url);
+    if (m_stackTrace)
+      exceptionDetails->setStackTrace(m_stackTrace->buildInspectorObjectImpl());
+    if (m_contextId) exceptionDetails->setExecutionContextId(m_contextId);
+    if (exception) exceptionDetails->setException(std::move(exception));
+    frontend->exceptionThrown(m_timestamp, std::move(exceptionDetails));
+    return;
+  }
+  if (m_origin == V8MessageOrigin::kRevokedException) {
+    frontend->exceptionRevoked(m_message, m_revokedExceptionId);
+    return;
+  }
+  if (m_origin == V8MessageOrigin::kConsole) {
+    std::unique_ptr<protocol::Array<protocol::Runtime::RemoteObject>>
+        arguments = wrapArguments(session, generatePreview);
+    if (!inspector->hasConsoleMessageStorage(contextGroupId)) return;
+    if (!arguments) {
+      arguments = protocol::Array<protocol::Runtime::RemoteObject>::create();
+      if (!m_message.isEmpty()) {
+        std::unique_ptr<protocol::Runtime::RemoteObject> messageArg =
+            protocol::Runtime::RemoteObject::create()
+                .setType(protocol::Runtime::RemoteObject::TypeEnum::String)
+                .build();
+        messageArg->setValue(protocol::StringValue::create(m_message));
+        arguments->addItem(std::move(messageArg));
+      }
+    }
+    frontend->consoleAPICalled(
+        consoleAPITypeValue(m_type), std::move(arguments), m_contextId,
+        m_timestamp,
+        m_stackTrace ? m_stackTrace->buildInspectorObjectImpl() : nullptr);
+    return;
+  }
+  UNREACHABLE();
+}
+
+std::unique_ptr<protocol::Runtime::RemoteObject>
+V8ConsoleMessage::wrapException(V8InspectorSessionImpl* session,
+                                bool generatePreview) const {
+  if (!m_arguments.size() || !m_contextId) return nullptr;
+  DCHECK_EQ(1u, m_arguments.size());
+  InspectedContext* inspectedContext =
+      session->inspector()->getContext(session->contextGroupId(), m_contextId);
+  if (!inspectedContext) return nullptr;
+
+  v8::Isolate* isolate = inspectedContext->isolate();
+  v8::HandleScope handles(isolate);
+  // TODO(dgozman): should we use different object group?
+  return session->wrapObject(inspectedContext->context(),
+                             m_arguments[0]->Get(isolate), "console",
+                             generatePreview);
+}
+
+V8MessageOrigin V8ConsoleMessage::origin() const { return m_origin; }
+
+ConsoleAPIType V8ConsoleMessage::type() const { return m_type; }
+
+// static
+std::unique_ptr<V8ConsoleMessage> V8ConsoleMessage::createForConsoleAPI(
+    double timestamp, ConsoleAPIType type,
+    const std::vector<v8::Local<v8::Value>>& arguments,
+    std::unique_ptr<V8StackTraceImpl> stackTrace,
+    InspectedContext* inspectedContext) {
+  v8::Isolate* isolate = inspectedContext->isolate();
+  int contextId = inspectedContext->contextId();
+  int contextGroupId = inspectedContext->contextGroupId();
+  V8InspectorImpl* inspector = inspectedContext->inspector();
+  v8::Local<v8::Context> context = inspectedContext->context();
+
+  std::unique_ptr<V8ConsoleMessage> message = wrapUnique(
+      new V8ConsoleMessage(V8MessageOrigin::kConsole, timestamp, String16()));
+  if (stackTrace && !stackTrace->isEmpty()) {
+    message->m_url = toString16(stackTrace->topSourceURL());
+    message->m_lineNumber = stackTrace->topLineNumber();
+    message->m_columnNumber = stackTrace->topColumnNumber();
+  }
+  message->m_stackTrace = std::move(stackTrace);
+  message->m_type = type;
+  message->m_contextId = contextId;
+  for (size_t i = 0; i < arguments.size(); ++i)
+    message->m_arguments.push_back(
+        wrapUnique(new v8::Global<v8::Value>(isolate, arguments.at(i))));
+  if (arguments.size())
+    message->m_message = V8ValueStringBuilder::toString(arguments[0], context);
+
+  V8ConsoleAPIType clientType = V8ConsoleAPIType::kLog;
+  if (type == ConsoleAPIType::kDebug || type == ConsoleAPIType::kCount ||
+      type == ConsoleAPIType::kTimeEnd)
+    clientType = V8ConsoleAPIType::kDebug;
+  else if (type == ConsoleAPIType::kError || type == ConsoleAPIType::kAssert)
+    clientType = V8ConsoleAPIType::kError;
+  else if (type == ConsoleAPIType::kWarning)
+    clientType = V8ConsoleAPIType::kWarning;
+  else if (type == ConsoleAPIType::kInfo)
+    clientType = V8ConsoleAPIType::kInfo;
+  else if (type == ConsoleAPIType::kClear)
+    clientType = V8ConsoleAPIType::kClear;
+  inspector->client()->consoleAPIMessage(
+      contextGroupId, clientType, toStringView(message->m_message),
+      toStringView(message->m_url), message->m_lineNumber,
+      message->m_columnNumber, message->m_stackTrace.get());
+
+  return message;
+}
+
+// static
+std::unique_ptr<V8ConsoleMessage> V8ConsoleMessage::createForException(
+    double timestamp, const String16& detailedMessage, const String16& url,
+    unsigned lineNumber, unsigned columnNumber,
+    std::unique_ptr<V8StackTraceImpl> stackTrace, int scriptId,
+    v8::Isolate* isolate, const String16& message, int contextId,
+    v8::Local<v8::Value> exception, unsigned exceptionId) {
+  std::unique_ptr<V8ConsoleMessage> consoleMessage = wrapUnique(
+      new V8ConsoleMessage(V8MessageOrigin::kException, timestamp, message));
+  consoleMessage->setLocation(url, lineNumber, columnNumber,
+                              std::move(stackTrace), scriptId);
+  consoleMessage->m_exceptionId = exceptionId;
+  consoleMessage->m_detailedMessage = detailedMessage;
+  if (contextId && !exception.IsEmpty()) {
+    consoleMessage->m_contextId = contextId;
+    consoleMessage->m_arguments.push_back(
+        wrapUnique(new v8::Global<v8::Value>(isolate, exception)));
+  }
+  return consoleMessage;
+}
+
+// static
+std::unique_ptr<V8ConsoleMessage> V8ConsoleMessage::createForRevokedException(
+    double timestamp, const String16& messageText,
+    unsigned revokedExceptionId) {
+  std::unique_ptr<V8ConsoleMessage> message = wrapUnique(new V8ConsoleMessage(
+      V8MessageOrigin::kRevokedException, timestamp, messageText));
+  message->m_revokedExceptionId = revokedExceptionId;
+  return message;
+}
+
+void V8ConsoleMessage::contextDestroyed(int contextId) {
+  if (contextId != m_contextId) return;
+  m_contextId = 0;
+  if (m_message.isEmpty()) m_message = "<message collected>";
+  Arguments empty;
+  m_arguments.swap(empty);
+}
+
+// ------------------------ V8ConsoleMessageStorage ----------------------------
+
+V8ConsoleMessageStorage::V8ConsoleMessageStorage(V8InspectorImpl* inspector,
+                                                 int contextGroupId)
+    : m_inspector(inspector),
+      m_contextGroupId(contextGroupId),
+      m_expiredCount(0) {}
+
+V8ConsoleMessageStorage::~V8ConsoleMessageStorage() { clear(); }
+
+void V8ConsoleMessageStorage::addMessage(
+    std::unique_ptr<V8ConsoleMessage> message) {
+  int contextGroupId = m_contextGroupId;
+  V8InspectorImpl* inspector = m_inspector;
+  if (message->type() == ConsoleAPIType::kClear) clear();
+
+  V8InspectorSessionImpl* session =
+      inspector->sessionForContextGroup(contextGroupId);
+  if (session) {
+    if (message->origin() == V8MessageOrigin::kConsole)
+      session->consoleAgent()->messageAdded(message.get());
+    session->runtimeAgent()->messageAdded(message.get());
+  }
+  if (!inspector->hasConsoleMessageStorage(contextGroupId)) return;
+
+  DCHECK(m_messages.size() <= maxConsoleMessageCount);
+  if (m_messages.size() == maxConsoleMessageCount) {
+    ++m_expiredCount;
+    m_messages.pop_front();
+  }
+  m_messages.push_back(std::move(message));
+}
+
+void V8ConsoleMessageStorage::clear() {
+  m_messages.clear();
+  m_expiredCount = 0;
+  if (V8InspectorSessionImpl* session =
+          m_inspector->sessionForContextGroup(m_contextGroupId))
+    session->releaseObjectGroup("console");
+}
+
+void V8ConsoleMessageStorage::contextDestroyed(int contextId) {
+  for (size_t i = 0; i < m_messages.size(); ++i)
+    m_messages[i]->contextDestroyed(contextId);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-console-message.h b/src/inspector/v8-console-message.h
new file mode 100644
index 0000000..a6e9eaf
--- /dev/null
+++ b/src/inspector/v8-console-message.h
@@ -0,0 +1,120 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8CONSOLEMESSAGE_H_
+#define V8_INSPECTOR_V8CONSOLEMESSAGE_H_
+
+#include <deque>
+#include "include/v8.h"
+#include "src/inspector/protocol/Console.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Runtime.h"
+
+namespace v8_inspector {
+
+class InspectedContext;
+class V8InspectorImpl;
+class V8InspectorSessionImpl;
+class V8StackTraceImpl;
+
+enum class V8MessageOrigin { kConsole, kException, kRevokedException };
+
+enum class ConsoleAPIType {
+  kLog,
+  kDebug,
+  kInfo,
+  kError,
+  kWarning,
+  kDir,
+  kDirXML,
+  kTable,
+  kTrace,
+  kStartGroup,
+  kStartGroupCollapsed,
+  kEndGroup,
+  kClear,
+  kAssert,
+  kTimeEnd,
+  kCount
+};
+
+class V8ConsoleMessage {
+ public:
+  ~V8ConsoleMessage();
+
+  static std::unique_ptr<V8ConsoleMessage> createForConsoleAPI(
+      double timestamp, ConsoleAPIType,
+      const std::vector<v8::Local<v8::Value>>& arguments,
+      std::unique_ptr<V8StackTraceImpl>, InspectedContext*);
+
+  static std::unique_ptr<V8ConsoleMessage> createForException(
+      double timestamp, const String16& detailedMessage, const String16& url,
+      unsigned lineNumber, unsigned columnNumber,
+      std::unique_ptr<V8StackTraceImpl>, int scriptId, v8::Isolate*,
+      const String16& message, int contextId, v8::Local<v8::Value> exception,
+      unsigned exceptionId);
+
+  static std::unique_ptr<V8ConsoleMessage> createForRevokedException(
+      double timestamp, const String16& message, unsigned revokedExceptionId);
+
+  V8MessageOrigin origin() const;
+  void reportToFrontend(protocol::Console::Frontend*) const;
+  void reportToFrontend(protocol::Runtime::Frontend*, V8InspectorSessionImpl*,
+                        bool generatePreview) const;
+  ConsoleAPIType type() const;
+  void contextDestroyed(int contextId);
+
+ private:
+  V8ConsoleMessage(V8MessageOrigin, double timestamp, const String16& message);
+
+  using Arguments = std::vector<std::unique_ptr<v8::Global<v8::Value>>>;
+  std::unique_ptr<protocol::Array<protocol::Runtime::RemoteObject>>
+  wrapArguments(V8InspectorSessionImpl*, bool generatePreview) const;
+  std::unique_ptr<protocol::Runtime::RemoteObject> wrapException(
+      V8InspectorSessionImpl*, bool generatePreview) const;
+  void setLocation(const String16& url, unsigned lineNumber,
+                   unsigned columnNumber, std::unique_ptr<V8StackTraceImpl>,
+                   int scriptId);
+
+  V8MessageOrigin m_origin;
+  double m_timestamp;
+  String16 m_message;
+  String16 m_url;
+  unsigned m_lineNumber;
+  unsigned m_columnNumber;
+  std::unique_ptr<V8StackTraceImpl> m_stackTrace;
+  int m_scriptId;
+  int m_contextId;
+  ConsoleAPIType m_type;
+  unsigned m_exceptionId;
+  unsigned m_revokedExceptionId;
+  Arguments m_arguments;
+  String16 m_detailedMessage;
+};
+
+class V8ConsoleMessageStorage {
+ public:
+  V8ConsoleMessageStorage(V8InspectorImpl*, int contextGroupId);
+  ~V8ConsoleMessageStorage();
+
+  int contextGroupId() { return m_contextGroupId; }
+  int expiredCount() { return m_expiredCount; }
+  const std::deque<std::unique_ptr<V8ConsoleMessage>>& messages() const {
+    return m_messages;
+  }
+
+  void addMessage(std::unique_ptr<V8ConsoleMessage>);
+  void contextDestroyed(int contextId);
+  void clear();
+
+ private:
+  V8InspectorImpl* m_inspector;
+  int m_contextGroupId;
+  int m_expiredCount;
+  std::deque<std::unique_ptr<V8ConsoleMessage>> m_messages;
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8CONSOLEMESSAGE_H_
diff --git a/src/inspector/v8-console.cc b/src/inspector/v8-console.cc
new file mode 100644
index 0000000..ddd4bf6
--- /dev/null
+++ b/src/inspector/v8-console.cc
@@ -0,0 +1,922 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-console.h"
+
+#include "src/base/macros.h"
+#include "src/inspector/injected-script.h"
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-console-message.h"
+#include "src/inspector/v8-debugger-agent-impl.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-profiler-agent-impl.h"
+#include "src/inspector/v8-runtime-agent-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+#include "src/inspector/v8-value-copier.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+namespace {
+
+v8::Local<v8::Private> inspectedContextPrivateKey(v8::Isolate* isolate) {
+  return v8::Private::ForApi(
+      isolate, toV8StringInternalized(isolate, "V8Console#InspectedContext"));
+}
+
+class ConsoleHelper {
+ public:
+  explicit ConsoleHelper(const v8::FunctionCallbackInfo<v8::Value>& info)
+      : m_info(info),
+        m_isolate(info.GetIsolate()),
+        m_context(info.GetIsolate()->GetCurrentContext()),
+        m_inspectedContext(nullptr),
+        m_inspectorClient(nullptr) {}
+
+  v8::Local<v8::Object> ensureConsole() {
+    if (m_console.IsEmpty()) {
+      DCHECK(!m_info.Data().IsEmpty());
+      DCHECK(!m_info.Data()->IsUndefined());
+      m_console = m_info.Data().As<v8::Object>();
+    }
+    return m_console;
+  }
+
+  InspectedContext* ensureInspectedContext() {
+    if (m_inspectedContext) return m_inspectedContext;
+    v8::Local<v8::Object> console = ensureConsole();
+
+    v8::Local<v8::Private> key = inspectedContextPrivateKey(m_isolate);
+    v8::Local<v8::Value> inspectedContextValue;
+    if (!console->GetPrivate(m_context, key).ToLocal(&inspectedContextValue))
+      return nullptr;
+    DCHECK(inspectedContextValue->IsExternal());
+    m_inspectedContext = static_cast<InspectedContext*>(
+        inspectedContextValue.As<v8::External>()->Value());
+    return m_inspectedContext;
+  }
+
+  V8InspectorClient* ensureDebuggerClient() {
+    if (m_inspectorClient) return m_inspectorClient;
+    InspectedContext* inspectedContext = ensureInspectedContext();
+    if (!inspectedContext) return nullptr;
+    m_inspectorClient = inspectedContext->inspector()->client();
+    return m_inspectorClient;
+  }
+
+  void reportCall(ConsoleAPIType type) {
+    if (!m_info.Length()) return;
+    std::vector<v8::Local<v8::Value>> arguments;
+    for (int i = 0; i < m_info.Length(); ++i) arguments.push_back(m_info[i]);
+    reportCall(type, arguments);
+  }
+
+  void reportCallWithDefaultArgument(ConsoleAPIType type,
+                                     const String16& message) {
+    std::vector<v8::Local<v8::Value>> arguments;
+    for (int i = 0; i < m_info.Length(); ++i) arguments.push_back(m_info[i]);
+    if (!m_info.Length()) arguments.push_back(toV8String(m_isolate, message));
+    reportCall(type, arguments);
+  }
+
+  void reportCallWithArgument(ConsoleAPIType type, const String16& message) {
+    std::vector<v8::Local<v8::Value>> arguments(1,
+                                                toV8String(m_isolate, message));
+    reportCall(type, arguments);
+  }
+
+  void reportCall(ConsoleAPIType type,
+                  const std::vector<v8::Local<v8::Value>>& arguments) {
+    InspectedContext* inspectedContext = ensureInspectedContext();
+    if (!inspectedContext) return;
+    int contextGroupId = inspectedContext->contextGroupId();
+    V8InspectorImpl* inspector = inspectedContext->inspector();
+    std::unique_ptr<V8ConsoleMessage> message =
+        V8ConsoleMessage::createForConsoleAPI(
+            inspector->client()->currentTimeMS(), type, arguments,
+            inspector->debugger()->captureStackTrace(false), inspectedContext);
+    inspector->ensureConsoleMessageStorage(contextGroupId)
+        ->addMessage(std::move(message));
+  }
+
+  void reportDeprecatedCall(const char* id, const String16& message) {
+    if (checkAndSetPrivateFlagOnConsole(id, false)) return;
+    std::vector<v8::Local<v8::Value>> arguments(1,
+                                                toV8String(m_isolate, message));
+    reportCall(ConsoleAPIType::kWarning, arguments);
+  }
+
+  bool firstArgToBoolean(bool defaultValue) {
+    if (m_info.Length() < 1) return defaultValue;
+    if (m_info[0]->IsBoolean()) return m_info[0].As<v8::Boolean>()->Value();
+    return m_info[0]->BooleanValue(m_context).FromMaybe(defaultValue);
+  }
+
+  String16 firstArgToString(const String16& defaultValue) {
+    if (m_info.Length() < 1) return defaultValue;
+    v8::Local<v8::String> titleValue;
+    if (m_info[0]->IsObject()) {
+      if (!m_info[0].As<v8::Object>()->ObjectProtoToString(m_context).ToLocal(
+              &titleValue))
+        return defaultValue;
+    } else {
+      if (!m_info[0]->ToString(m_context).ToLocal(&titleValue))
+        return defaultValue;
+    }
+    return toProtocolString(titleValue);
+  }
+
+  v8::MaybeLocal<v8::Object> firstArgAsObject() {
+    if (m_info.Length() < 1 || !m_info[0]->IsObject())
+      return v8::MaybeLocal<v8::Object>();
+    return m_info[0].As<v8::Object>();
+  }
+
+  v8::MaybeLocal<v8::Function> firstArgAsFunction() {
+    if (m_info.Length() < 1 || !m_info[0]->IsFunction())
+      return v8::MaybeLocal<v8::Function>();
+    v8::Local<v8::Function> func = m_info[0].As<v8::Function>();
+    while (func->GetBoundFunction()->IsFunction())
+      func = func->GetBoundFunction().As<v8::Function>();
+    return func;
+  }
+
+  v8::MaybeLocal<v8::Map> privateMap(const char* name) {
+    v8::Local<v8::Object> console = ensureConsole();
+    v8::Local<v8::Private> privateKey =
+        v8::Private::ForApi(m_isolate, toV8StringInternalized(m_isolate, name));
+    v8::Local<v8::Value> mapValue;
+    if (!console->GetPrivate(m_context, privateKey).ToLocal(&mapValue))
+      return v8::MaybeLocal<v8::Map>();
+    if (mapValue->IsUndefined()) {
+      v8::Local<v8::Map> map = v8::Map::New(m_isolate);
+      if (!console->SetPrivate(m_context, privateKey, map).FromMaybe(false))
+        return v8::MaybeLocal<v8::Map>();
+      return map;
+    }
+    return mapValue->IsMap() ? mapValue.As<v8::Map>()
+                             : v8::MaybeLocal<v8::Map>();
+  }
+
+  int32_t getIntFromMap(v8::Local<v8::Map> map, const String16& key,
+                        int32_t defaultValue) {
+    v8::Local<v8::String> v8Key = toV8String(m_isolate, key);
+    if (!map->Has(m_context, v8Key).FromMaybe(false)) return defaultValue;
+    v8::Local<v8::Value> intValue;
+    if (!map->Get(m_context, v8Key).ToLocal(&intValue)) return defaultValue;
+    return static_cast<int32_t>(intValue.As<v8::Integer>()->Value());
+  }
+
+  void setIntOnMap(v8::Local<v8::Map> map, const String16& key, int32_t value) {
+    v8::Local<v8::String> v8Key = toV8String(m_isolate, key);
+    if (!map->Set(m_context, v8Key, v8::Integer::New(m_isolate, value))
+             .ToLocal(&map))
+      return;
+  }
+
+  double getDoubleFromMap(v8::Local<v8::Map> map, const String16& key,
+                          double defaultValue) {
+    v8::Local<v8::String> v8Key = toV8String(m_isolate, key);
+    if (!map->Has(m_context, v8Key).FromMaybe(false)) return defaultValue;
+    v8::Local<v8::Value> intValue;
+    if (!map->Get(m_context, v8Key).ToLocal(&intValue)) return defaultValue;
+    return intValue.As<v8::Number>()->Value();
+  }
+
+  void setDoubleOnMap(v8::Local<v8::Map> map, const String16& key,
+                      double value) {
+    v8::Local<v8::String> v8Key = toV8String(m_isolate, key);
+    if (!map->Set(m_context, v8Key, v8::Number::New(m_isolate, value))
+             .ToLocal(&map))
+      return;
+  }
+
+  V8ProfilerAgentImpl* profilerAgent() {
+    if (V8InspectorSessionImpl* session = currentSession()) {
+      if (session && session->profilerAgent()->enabled())
+        return session->profilerAgent();
+    }
+    return nullptr;
+  }
+
+  V8DebuggerAgentImpl* debuggerAgent() {
+    if (V8InspectorSessionImpl* session = currentSession()) {
+      if (session && session->debuggerAgent()->enabled())
+        return session->debuggerAgent();
+    }
+    return nullptr;
+  }
+
+  V8InspectorSessionImpl* currentSession() {
+    InspectedContext* inspectedContext = ensureInspectedContext();
+    if (!inspectedContext) return nullptr;
+    return inspectedContext->inspector()->sessionForContextGroup(
+        inspectedContext->contextGroupId());
+  }
+
+ private:
+  const v8::FunctionCallbackInfo<v8::Value>& m_info;
+  v8::Isolate* m_isolate;
+  v8::Local<v8::Context> m_context;
+  v8::Local<v8::Object> m_console;
+  InspectedContext* m_inspectedContext;
+  V8InspectorClient* m_inspectorClient;
+
+  bool checkAndSetPrivateFlagOnConsole(const char* name, bool defaultValue) {
+    v8::Local<v8::Object> console = ensureConsole();
+    v8::Local<v8::Private> key =
+        v8::Private::ForApi(m_isolate, toV8StringInternalized(m_isolate, name));
+    v8::Local<v8::Value> flagValue;
+    if (!console->GetPrivate(m_context, key).ToLocal(&flagValue))
+      return defaultValue;
+    DCHECK(flagValue->IsUndefined() || flagValue->IsBoolean());
+    if (flagValue->IsBoolean()) {
+      DCHECK(flagValue.As<v8::Boolean>()->Value());
+      return true;
+    }
+    if (!console->SetPrivate(m_context, key, v8::True(m_isolate))
+             .FromMaybe(false))
+      return defaultValue;
+    return false;
+  }
+
+  DISALLOW_COPY_AND_ASSIGN(ConsoleHelper);
+};
+
+void returnDataCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  info.GetReturnValue().Set(info.Data());
+}
+
+void createBoundFunctionProperty(v8::Local<v8::Context> context,
+                                 v8::Local<v8::Object> console,
+                                 const char* name,
+                                 v8::FunctionCallback callback,
+                                 const char* description = nullptr) {
+  v8::Local<v8::String> funcName =
+      toV8StringInternalized(context->GetIsolate(), name);
+  v8::Local<v8::Function> func;
+  if (!v8::Function::New(context, callback, console, 0,
+                         v8::ConstructorBehavior::kThrow)
+           .ToLocal(&func))
+    return;
+  func->SetName(funcName);
+  if (description) {
+    v8::Local<v8::String> returnValue =
+        toV8String(context->GetIsolate(), description);
+    v8::Local<v8::Function> toStringFunction;
+    if (v8::Function::New(context, returnDataCallback, returnValue, 0,
+                          v8::ConstructorBehavior::kThrow)
+            .ToLocal(&toStringFunction))
+      createDataProperty(context, func, toV8StringInternalized(
+                                            context->GetIsolate(), "toString"),
+                         toStringFunction);
+  }
+  createDataProperty(context, console, funcName, func);
+}
+
+}  // namespace
+
+void V8Console::debugCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kDebug);
+}
+
+void V8Console::errorCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kError);
+}
+
+void V8Console::infoCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kInfo);
+}
+
+void V8Console::logCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kLog);
+}
+
+void V8Console::warnCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kWarning);
+}
+
+void V8Console::dirCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kDir);
+}
+
+void V8Console::dirxmlCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kDirXML);
+}
+
+void V8Console::tableCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCall(ConsoleAPIType::kTable);
+}
+
+void V8Console::traceCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCallWithDefaultArgument(ConsoleAPIType::kTrace,
+                                                    String16("console.trace"));
+}
+
+void V8Console::groupCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCallWithDefaultArgument(ConsoleAPIType::kStartGroup,
+                                                    String16("console.group"));
+}
+
+void V8Console::groupCollapsedCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCallWithDefaultArgument(
+      ConsoleAPIType::kStartGroupCollapsed, String16("console.groupCollapsed"));
+}
+
+void V8Console::groupEndCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCallWithDefaultArgument(
+      ConsoleAPIType::kEndGroup, String16("console.groupEnd"));
+}
+
+void V8Console::clearCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportCallWithDefaultArgument(ConsoleAPIType::kClear,
+                                                    String16("console.clear"));
+}
+
+void V8Console::countCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+
+  String16 title = helper.firstArgToString(String16());
+  String16 identifier;
+  if (title.isEmpty()) {
+    std::unique_ptr<V8StackTraceImpl> stackTrace =
+        V8StackTraceImpl::capture(nullptr, 0, 1);
+    if (stackTrace && !stackTrace->isEmpty()) {
+      identifier = toString16(stackTrace->topSourceURL()) + ":" +
+                   String16::fromInteger(stackTrace->topLineNumber());
+    }
+  } else {
+    identifier = title + "@";
+  }
+
+  v8::Local<v8::Map> countMap;
+  if (!helper.privateMap("V8Console#countMap").ToLocal(&countMap)) return;
+  int32_t count = helper.getIntFromMap(countMap, identifier, 0) + 1;
+  helper.setIntOnMap(countMap, identifier, count);
+  helper.reportCallWithArgument(ConsoleAPIType::kCount,
+                                title + ": " + String16::fromInteger(count));
+}
+
+void V8Console::assertCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  if (helper.firstArgToBoolean(false)) return;
+
+  std::vector<v8::Local<v8::Value>> arguments;
+  for (int i = 1; i < info.Length(); ++i) arguments.push_back(info[i]);
+  if (info.Length() < 2)
+    arguments.push_back(
+        toV8String(info.GetIsolate(), String16("console.assert")));
+  helper.reportCall(ConsoleAPIType::kAssert, arguments);
+
+  if (V8DebuggerAgentImpl* debuggerAgent = helper.debuggerAgent())
+    debuggerAgent->breakProgramOnException(
+        protocol::Debugger::Paused::ReasonEnum::Assert, nullptr);
+}
+
+void V8Console::markTimelineCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportDeprecatedCall("V8Console#markTimelineDeprecated",
+                                           "'console.markTimeline' is "
+                                           "deprecated. Please use "
+                                           "'console.timeStamp' instead.");
+  timeStampCallback(info);
+}
+
+void V8Console::profileCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  if (V8ProfilerAgentImpl* profilerAgent = helper.profilerAgent())
+    profilerAgent->consoleProfile(helper.firstArgToString(String16()));
+}
+
+void V8Console::profileEndCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  if (V8ProfilerAgentImpl* profilerAgent = helper.profilerAgent())
+    profilerAgent->consoleProfileEnd(helper.firstArgToString(String16()));
+}
+
+static void timeFunction(const v8::FunctionCallbackInfo<v8::Value>& info,
+                         bool timelinePrefix) {
+  ConsoleHelper helper(info);
+  if (V8InspectorClient* client = helper.ensureDebuggerClient()) {
+    String16 protocolTitle = helper.firstArgToString("default");
+    if (timelinePrefix) protocolTitle = "Timeline '" + protocolTitle + "'";
+    client->consoleTime(toStringView(protocolTitle));
+
+    v8::Local<v8::Map> timeMap;
+    if (!helper.privateMap("V8Console#timeMap").ToLocal(&timeMap)) return;
+    helper.setDoubleOnMap(timeMap, protocolTitle, client->currentTimeMS());
+  }
+}
+
+static void timeEndFunction(const v8::FunctionCallbackInfo<v8::Value>& info,
+                            bool timelinePrefix) {
+  ConsoleHelper helper(info);
+  if (V8InspectorClient* client = helper.ensureDebuggerClient()) {
+    String16 protocolTitle = helper.firstArgToString("default");
+    if (timelinePrefix) protocolTitle = "Timeline '" + protocolTitle + "'";
+    client->consoleTimeEnd(toStringView(protocolTitle));
+
+    v8::Local<v8::Map> timeMap;
+    if (!helper.privateMap("V8Console#timeMap").ToLocal(&timeMap)) return;
+    double elapsed = client->currentTimeMS() -
+                     helper.getDoubleFromMap(timeMap, protocolTitle, 0.0);
+    String16 message =
+        protocolTitle + ": " + String16::fromDouble(elapsed, 3) + "ms";
+    helper.reportCallWithArgument(ConsoleAPIType::kTimeEnd, message);
+  }
+}
+
+void V8Console::timelineCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportDeprecatedCall(
+      "V8Console#timeline",
+      "'console.timeline' is deprecated. Please use 'console.time' instead.");
+  timeFunction(info, true);
+}
+
+void V8Console::timelineEndCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper(info).reportDeprecatedCall("V8Console#timelineEnd",
+                                           "'console.timelineEnd' is "
+                                           "deprecated. Please use "
+                                           "'console.timeEnd' instead.");
+  timeEndFunction(info, true);
+}
+
+void V8Console::timeCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  timeFunction(info, false);
+}
+
+void V8Console::timeEndCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  timeEndFunction(info, false);
+}
+
+void V8Console::timeStampCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  if (V8InspectorClient* client = helper.ensureDebuggerClient()) {
+    String16 title = helper.firstArgToString(String16());
+    client->consoleTimeStamp(toStringView(title));
+  }
+}
+
+void V8Console::memoryGetterCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  if (V8InspectorClient* client = ConsoleHelper(info).ensureDebuggerClient()) {
+    v8::Local<v8::Value> memoryValue;
+    if (!client
+             ->memoryInfo(info.GetIsolate(),
+                          info.GetIsolate()->GetCurrentContext())
+             .ToLocal(&memoryValue))
+      return;
+    info.GetReturnValue().Set(memoryValue);
+  }
+}
+
+void V8Console::memorySetterCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  // We can't make the attribute readonly as it breaks existing code that relies
+  // on being able to assign to console.memory in strict mode. Instead, the
+  // setter just ignores the passed value.  http://crbug.com/468611
+}
+
+void V8Console::keysCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  v8::Isolate* isolate = info.GetIsolate();
+  info.GetReturnValue().Set(v8::Array::New(isolate));
+
+  ConsoleHelper helper(info);
+  v8::Local<v8::Object> obj;
+  if (!helper.firstArgAsObject().ToLocal(&obj)) return;
+  v8::Local<v8::Array> names;
+  if (!obj->GetOwnPropertyNames(isolate->GetCurrentContext()).ToLocal(&names))
+    return;
+  info.GetReturnValue().Set(names);
+}
+
+void V8Console::valuesCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  v8::Isolate* isolate = info.GetIsolate();
+  info.GetReturnValue().Set(v8::Array::New(isolate));
+
+  ConsoleHelper helper(info);
+  v8::Local<v8::Object> obj;
+  if (!helper.firstArgAsObject().ToLocal(&obj)) return;
+  v8::Local<v8::Array> names;
+  v8::Local<v8::Context> context = isolate->GetCurrentContext();
+  if (!obj->GetOwnPropertyNames(context).ToLocal(&names)) return;
+  v8::Local<v8::Array> values = v8::Array::New(isolate, names->Length());
+  for (uint32_t i = 0; i < names->Length(); ++i) {
+    v8::Local<v8::Value> key;
+    if (!names->Get(context, i).ToLocal(&key)) continue;
+    v8::Local<v8::Value> value;
+    if (!obj->Get(context, key).ToLocal(&value)) continue;
+    createDataProperty(context, values, i, value);
+  }
+  info.GetReturnValue().Set(values);
+}
+
+static void setFunctionBreakpoint(ConsoleHelper& helper,
+                                  v8::Local<v8::Function> function,
+                                  V8DebuggerAgentImpl::BreakpointSource source,
+                                  const String16& condition, bool enable) {
+  V8DebuggerAgentImpl* debuggerAgent = helper.debuggerAgent();
+  if (!debuggerAgent) return;
+  String16 scriptId = String16::fromInteger(function->ScriptId());
+  int lineNumber = function->GetScriptLineNumber();
+  int columnNumber = function->GetScriptColumnNumber();
+  if (lineNumber == v8::Function::kLineOffsetNotFound ||
+      columnNumber == v8::Function::kLineOffsetNotFound)
+    return;
+  if (enable)
+    debuggerAgent->setBreakpointAt(scriptId, lineNumber, columnNumber, source,
+                                   condition);
+  else
+    debuggerAgent->removeBreakpointAt(scriptId, lineNumber, columnNumber,
+                                      source);
+}
+
+void V8Console::debugFunctionCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  v8::Local<v8::Function> function;
+  if (!helper.firstArgAsFunction().ToLocal(&function)) return;
+  setFunctionBreakpoint(helper, function,
+                        V8DebuggerAgentImpl::DebugCommandBreakpointSource,
+                        String16(), true);
+}
+
+void V8Console::undebugFunctionCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  v8::Local<v8::Function> function;
+  if (!helper.firstArgAsFunction().ToLocal(&function)) return;
+  setFunctionBreakpoint(helper, function,
+                        V8DebuggerAgentImpl::DebugCommandBreakpointSource,
+                        String16(), false);
+}
+
+void V8Console::monitorFunctionCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  v8::Local<v8::Function> function;
+  if (!helper.firstArgAsFunction().ToLocal(&function)) return;
+  v8::Local<v8::Value> name = function->GetName();
+  if (!name->IsString() || !v8::Local<v8::String>::Cast(name)->Length())
+    name = function->GetInferredName();
+  String16 functionName = toProtocolStringWithTypeCheck(name);
+  String16Builder builder;
+  builder.append("console.log(\"function ");
+  if (functionName.isEmpty())
+    builder.append("(anonymous function)");
+  else
+    builder.append(functionName);
+  builder.append(
+      " called\" + (arguments.length > 0 ? \" with arguments: \" + "
+      "Array.prototype.join.call(arguments, \", \") : \"\")) && false");
+  setFunctionBreakpoint(helper, function,
+                        V8DebuggerAgentImpl::MonitorCommandBreakpointSource,
+                        builder.toString(), true);
+}
+
+void V8Console::unmonitorFunctionCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  v8::Local<v8::Function> function;
+  if (!helper.firstArgAsFunction().ToLocal(&function)) return;
+  setFunctionBreakpoint(helper, function,
+                        V8DebuggerAgentImpl::MonitorCommandBreakpointSource,
+                        String16(), false);
+}
+
+void V8Console::lastEvaluationResultCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  ConsoleHelper helper(info);
+  InspectedContext* context = helper.ensureInspectedContext();
+  if (!context) return;
+  if (InjectedScript* injectedScript = context->getInjectedScript())
+    info.GetReturnValue().Set(injectedScript->lastEvaluationResult());
+}
+
+static void inspectImpl(const v8::FunctionCallbackInfo<v8::Value>& info,
+                        bool copyToClipboard) {
+  if (info.Length() < 1) return;
+  if (!copyToClipboard) info.GetReturnValue().Set(info[0]);
+
+  ConsoleHelper helper(info);
+  InspectedContext* context = helper.ensureInspectedContext();
+  if (!context) return;
+  InjectedScript* injectedScript = context->getInjectedScript();
+  if (!injectedScript) return;
+  ErrorString errorString;
+  std::unique_ptr<protocol::Runtime::RemoteObject> wrappedObject =
+      injectedScript->wrapObject(&errorString, info[0], "",
+                                 false /** forceValueType */,
+                                 false /** generatePreview */);
+  if (!wrappedObject || !errorString.isEmpty()) return;
+
+  std::unique_ptr<protocol::DictionaryValue> hints =
+      protocol::DictionaryValue::create();
+  if (copyToClipboard) hints->setBoolean("copyToClipboard", true);
+  if (V8InspectorSessionImpl* session = helper.currentSession())
+    session->runtimeAgent()->inspect(std::move(wrappedObject),
+                                     std::move(hints));
+}
+
+void V8Console::inspectCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  inspectImpl(info, false);
+}
+
+void V8Console::copyCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+  inspectImpl(info, true);
+}
+
+void V8Console::inspectedObject(const v8::FunctionCallbackInfo<v8::Value>& info,
+                                unsigned num) {
+  DCHECK(num < V8InspectorSessionImpl::kInspectedObjectBufferSize);
+  ConsoleHelper helper(info);
+  if (V8InspectorSessionImpl* session = helper.currentSession()) {
+    V8InspectorSession::Inspectable* object = session->inspectedObject(num);
+    v8::Isolate* isolate = info.GetIsolate();
+    if (object)
+      info.GetReturnValue().Set(object->get(isolate->GetCurrentContext()));
+    else
+      info.GetReturnValue().Set(v8::Undefined(isolate));
+  }
+}
+
+v8::Local<v8::Object> V8Console::createConsole(
+    InspectedContext* inspectedContext, bool hasMemoryAttribute) {
+  v8::Local<v8::Context> context = inspectedContext->context();
+  v8::Context::Scope contextScope(context);
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::MicrotasksScope microtasksScope(isolate,
+                                      v8::MicrotasksScope::kDoNotRunMicrotasks);
+
+  v8::Local<v8::Object> console = v8::Object::New(isolate);
+  bool success =
+      console->SetPrototype(context, v8::Object::New(isolate)).FromMaybe(false);
+  DCHECK(success);
+  USE(success);
+
+  createBoundFunctionProperty(context, console, "debug",
+                              V8Console::debugCallback);
+  createBoundFunctionProperty(context, console, "error",
+                              V8Console::errorCallback);
+  createBoundFunctionProperty(context, console, "info",
+                              V8Console::infoCallback);
+  createBoundFunctionProperty(context, console, "log", V8Console::logCallback);
+  createBoundFunctionProperty(context, console, "warn",
+                              V8Console::warnCallback);
+  createBoundFunctionProperty(context, console, "dir", V8Console::dirCallback);
+  createBoundFunctionProperty(context, console, "dirxml",
+                              V8Console::dirxmlCallback);
+  createBoundFunctionProperty(context, console, "table",
+                              V8Console::tableCallback);
+  createBoundFunctionProperty(context, console, "trace",
+                              V8Console::traceCallback);
+  createBoundFunctionProperty(context, console, "group",
+                              V8Console::groupCallback);
+  createBoundFunctionProperty(context, console, "groupCollapsed",
+                              V8Console::groupCollapsedCallback);
+  createBoundFunctionProperty(context, console, "groupEnd",
+                              V8Console::groupEndCallback);
+  createBoundFunctionProperty(context, console, "clear",
+                              V8Console::clearCallback);
+  createBoundFunctionProperty(context, console, "count",
+                              V8Console::countCallback);
+  createBoundFunctionProperty(context, console, "assert",
+                              V8Console::assertCallback);
+  createBoundFunctionProperty(context, console, "markTimeline",
+                              V8Console::markTimelineCallback);
+  createBoundFunctionProperty(context, console, "profile",
+                              V8Console::profileCallback);
+  createBoundFunctionProperty(context, console, "profileEnd",
+                              V8Console::profileEndCallback);
+  createBoundFunctionProperty(context, console, "timeline",
+                              V8Console::timelineCallback);
+  createBoundFunctionProperty(context, console, "timelineEnd",
+                              V8Console::timelineEndCallback);
+  createBoundFunctionProperty(context, console, "time",
+                              V8Console::timeCallback);
+  createBoundFunctionProperty(context, console, "timeEnd",
+                              V8Console::timeEndCallback);
+  createBoundFunctionProperty(context, console, "timeStamp",
+                              V8Console::timeStampCallback);
+
+  if (hasMemoryAttribute)
+    console->SetAccessorProperty(
+        toV8StringInternalized(isolate, "memory"),
+        v8::Function::New(context, V8Console::memoryGetterCallback, console, 0,
+                          v8::ConstructorBehavior::kThrow)
+            .ToLocalChecked(),
+        v8::Function::New(context, V8Console::memorySetterCallback,
+                          v8::Local<v8::Value>(), 0,
+                          v8::ConstructorBehavior::kThrow)
+            .ToLocalChecked(),
+        static_cast<v8::PropertyAttribute>(v8::None), v8::DEFAULT);
+
+  console->SetPrivate(context, inspectedContextPrivateKey(isolate),
+                      v8::External::New(isolate, inspectedContext));
+  return console;
+}
+
+void V8Console::clearInspectedContextIfNeeded(v8::Local<v8::Context> context,
+                                              v8::Local<v8::Object> console) {
+  v8::Isolate* isolate = context->GetIsolate();
+  console->SetPrivate(context, inspectedContextPrivateKey(isolate),
+                      v8::External::New(isolate, nullptr));
+}
+
+v8::Local<v8::Object> V8Console::createCommandLineAPI(
+    InspectedContext* inspectedContext) {
+  v8::Local<v8::Context> context = inspectedContext->context();
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::MicrotasksScope microtasksScope(isolate,
+                                      v8::MicrotasksScope::kDoNotRunMicrotasks);
+
+  v8::Local<v8::Object> commandLineAPI = v8::Object::New(isolate);
+  bool success =
+      commandLineAPI->SetPrototype(context, v8::Null(isolate)).FromMaybe(false);
+  DCHECK(success);
+  USE(success);
+
+  createBoundFunctionProperty(context, commandLineAPI, "dir",
+                              V8Console::dirCallback,
+                              "function dir(value) { [Command Line API] }");
+  createBoundFunctionProperty(context, commandLineAPI, "dirxml",
+                              V8Console::dirxmlCallback,
+                              "function dirxml(value) { [Command Line API] }");
+  createBoundFunctionProperty(context, commandLineAPI, "profile",
+                              V8Console::profileCallback,
+                              "function profile(title) { [Command Line API] }");
+  createBoundFunctionProperty(
+      context, commandLineAPI, "profileEnd", V8Console::profileEndCallback,
+      "function profileEnd(title) { [Command Line API] }");
+  createBoundFunctionProperty(context, commandLineAPI, "clear",
+                              V8Console::clearCallback,
+                              "function clear() { [Command Line API] }");
+  createBoundFunctionProperty(
+      context, commandLineAPI, "table", V8Console::tableCallback,
+      "function table(data, [columns]) { [Command Line API] }");
+
+  createBoundFunctionProperty(context, commandLineAPI, "keys",
+                              V8Console::keysCallback,
+                              "function keys(object) { [Command Line API] }");
+  createBoundFunctionProperty(context, commandLineAPI, "values",
+                              V8Console::valuesCallback,
+                              "function values(object) { [Command Line API] }");
+  createBoundFunctionProperty(
+      context, commandLineAPI, "debug", V8Console::debugFunctionCallback,
+      "function debug(function) { [Command Line API] }");
+  createBoundFunctionProperty(
+      context, commandLineAPI, "undebug", V8Console::undebugFunctionCallback,
+      "function undebug(function) { [Command Line API] }");
+  createBoundFunctionProperty(
+      context, commandLineAPI, "monitor", V8Console::monitorFunctionCallback,
+      "function monitor(function) { [Command Line API] }");
+  createBoundFunctionProperty(
+      context, commandLineAPI, "unmonitor",
+      V8Console::unmonitorFunctionCallback,
+      "function unmonitor(function) { [Command Line API] }");
+  createBoundFunctionProperty(
+      context, commandLineAPI, "inspect", V8Console::inspectCallback,
+      "function inspect(object) { [Command Line API] }");
+  createBoundFunctionProperty(context, commandLineAPI, "copy",
+                              V8Console::copyCallback,
+                              "function copy(value) { [Command Line API] }");
+  createBoundFunctionProperty(context, commandLineAPI, "$_",
+                              V8Console::lastEvaluationResultCallback);
+  createBoundFunctionProperty(context, commandLineAPI, "$0",
+                              V8Console::inspectedObject0);
+  createBoundFunctionProperty(context, commandLineAPI, "$1",
+                              V8Console::inspectedObject1);
+  createBoundFunctionProperty(context, commandLineAPI, "$2",
+                              V8Console::inspectedObject2);
+  createBoundFunctionProperty(context, commandLineAPI, "$3",
+                              V8Console::inspectedObject3);
+  createBoundFunctionProperty(context, commandLineAPI, "$4",
+                              V8Console::inspectedObject4);
+
+  inspectedContext->inspector()->client()->installAdditionalCommandLineAPI(
+      context, commandLineAPI);
+
+  commandLineAPI->SetPrivate(context, inspectedContextPrivateKey(isolate),
+                             v8::External::New(isolate, inspectedContext));
+  return commandLineAPI;
+}
+
+static bool isCommandLineAPIGetter(const String16& name) {
+  if (name.length() != 2) return false;
+  // $0 ... $4, $_
+  return name[0] == '$' &&
+         ((name[1] >= '0' && name[1] <= '4') || name[1] == '_');
+}
+
+void V8Console::CommandLineAPIScope::accessorGetterCallback(
+    v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
+  CommandLineAPIScope* scope = static_cast<CommandLineAPIScope*>(
+      info.Data().As<v8::External>()->Value());
+  DCHECK(scope);
+
+  v8::Local<v8::Context> context = info.GetIsolate()->GetCurrentContext();
+  if (scope->m_cleanup) {
+    bool removed = info.Holder()->Delete(context, name).FromMaybe(false);
+    DCHECK(removed);
+    USE(removed);
+    return;
+  }
+  v8::Local<v8::Object> commandLineAPI = scope->m_commandLineAPI;
+
+  v8::Local<v8::Value> value;
+  if (!commandLineAPI->Get(context, name).ToLocal(&value)) return;
+  if (isCommandLineAPIGetter(toProtocolStringWithTypeCheck(name))) {
+    DCHECK(value->IsFunction());
+    v8::MicrotasksScope microtasks(info.GetIsolate(),
+                                   v8::MicrotasksScope::kDoNotRunMicrotasks);
+    if (value.As<v8::Function>()
+            ->Call(context, commandLineAPI, 0, nullptr)
+            .ToLocal(&value))
+      info.GetReturnValue().Set(value);
+  } else {
+    info.GetReturnValue().Set(value);
+  }
+}
+
+void V8Console::CommandLineAPIScope::accessorSetterCallback(
+    v8::Local<v8::Name> name, v8::Local<v8::Value> value,
+    const v8::PropertyCallbackInfo<void>& info) {
+  CommandLineAPIScope* scope = static_cast<CommandLineAPIScope*>(
+      info.Data().As<v8::External>()->Value());
+  v8::Local<v8::Context> context = info.GetIsolate()->GetCurrentContext();
+  if (!info.Holder()->Delete(context, name).FromMaybe(false)) return;
+  if (!info.Holder()->CreateDataProperty(context, name, value).FromMaybe(false))
+    return;
+  bool removed =
+      scope->m_installedMethods->Delete(context, name).FromMaybe(false);
+  DCHECK(removed);
+  USE(removed);
+}
+
+V8Console::CommandLineAPIScope::CommandLineAPIScope(
+    v8::Local<v8::Context> context, v8::Local<v8::Object> commandLineAPI,
+    v8::Local<v8::Object> global)
+    : m_context(context),
+      m_commandLineAPI(commandLineAPI),
+      m_global(global),
+      m_installedMethods(v8::Set::New(context->GetIsolate())),
+      m_cleanup(false) {
+  v8::Local<v8::Array> names;
+  if (!m_commandLineAPI->GetOwnPropertyNames(context).ToLocal(&names)) return;
+  v8::Local<v8::External> externalThis =
+      v8::External::New(context->GetIsolate(), this);
+  for (uint32_t i = 0; i < names->Length(); ++i) {
+    v8::Local<v8::Value> name;
+    if (!names->Get(context, i).ToLocal(&name) || !name->IsName()) continue;
+    if (m_global->Has(context, name).FromMaybe(true)) continue;
+    if (!m_installedMethods->Add(context, name).ToLocal(&m_installedMethods))
+      continue;
+    if (!m_global
+             ->SetAccessor(context, v8::Local<v8::Name>::Cast(name),
+                           CommandLineAPIScope::accessorGetterCallback,
+                           CommandLineAPIScope::accessorSetterCallback,
+                           externalThis, v8::DEFAULT, v8::DontEnum)
+             .FromMaybe(false)) {
+      bool removed = m_installedMethods->Delete(context, name).FromMaybe(false);
+      DCHECK(removed);
+      USE(removed);
+      continue;
+    }
+  }
+}
+
+V8Console::CommandLineAPIScope::~CommandLineAPIScope() {
+  m_cleanup = true;
+  v8::Local<v8::Array> names = m_installedMethods->AsArray();
+  for (uint32_t i = 0; i < names->Length(); ++i) {
+    v8::Local<v8::Value> name;
+    if (!names->Get(m_context, i).ToLocal(&name) || !name->IsName()) continue;
+    if (name->IsString()) {
+      v8::Local<v8::Value> descriptor;
+      bool success = m_global
+                         ->GetOwnPropertyDescriptor(
+                             m_context, v8::Local<v8::String>::Cast(name))
+                         .ToLocal(&descriptor);
+      DCHECK(success);
+      USE(success);
+    }
+  }
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-console.h b/src/inspector/v8-console.h
new file mode 100644
index 0000000..c643d49
--- /dev/null
+++ b/src/inspector/v8-console.h
@@ -0,0 +1,119 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8CONSOLE_H_
+#define V8_INSPECTOR_V8CONSOLE_H_
+
+#include "src/base/macros.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class InspectedContext;
+
+// Console API
+// https://console.spec.whatwg.org/#console-interface
+class V8Console {
+ public:
+  static v8::Local<v8::Object> createConsole(InspectedContext*,
+                                             bool hasMemoryAttribute);
+  static void clearInspectedContextIfNeeded(v8::Local<v8::Context>,
+                                            v8::Local<v8::Object> console);
+  static v8::Local<v8::Object> createCommandLineAPI(InspectedContext*);
+
+  class CommandLineAPIScope {
+   public:
+    CommandLineAPIScope(v8::Local<v8::Context>,
+                        v8::Local<v8::Object> commandLineAPI,
+                        v8::Local<v8::Object> global);
+    ~CommandLineAPIScope();
+
+   private:
+    static void accessorGetterCallback(
+        v8::Local<v8::Name>, const v8::PropertyCallbackInfo<v8::Value>&);
+    static void accessorSetterCallback(v8::Local<v8::Name>,
+                                       v8::Local<v8::Value>,
+                                       const v8::PropertyCallbackInfo<void>&);
+
+    v8::Local<v8::Context> m_context;
+    v8::Local<v8::Object> m_commandLineAPI;
+    v8::Local<v8::Object> m_global;
+    v8::Local<v8::Set> m_installedMethods;
+    bool m_cleanup;
+
+    DISALLOW_COPY_AND_ASSIGN(CommandLineAPIScope);
+  };
+
+ private:
+  static void debugCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void errorCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void infoCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void logCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void warnCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void dirCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void dirxmlCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void tableCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void traceCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void groupCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void groupCollapsedCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void groupEndCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void clearCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void countCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void assertCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void markTimelineCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void profileCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void profileEndCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void timelineCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void timelineEndCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void timeCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void timeEndCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void timeStampCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  // TODO(foolip): There is no spec for the Memory Info API, see blink-dev:
+  // https://groups.google.com/a/chromium.org/d/msg/blink-dev/g5YRCGpC9vs/b4OJz71NmPwJ
+  static void memoryGetterCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void memorySetterCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+
+  // CommandLineAPI
+  static void keysCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void valuesCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void debugFunctionCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void undebugFunctionCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void monitorFunctionCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void unmonitorFunctionCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void lastEvaluationResultCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void inspectCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void copyCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void inspectedObject(const v8::FunctionCallbackInfo<v8::Value>&,
+                              unsigned num);
+  static void inspectedObject0(
+      const v8::FunctionCallbackInfo<v8::Value>& info) {
+    inspectedObject(info, 0);
+  }
+  static void inspectedObject1(
+      const v8::FunctionCallbackInfo<v8::Value>& info) {
+    inspectedObject(info, 1);
+  }
+  static void inspectedObject2(
+      const v8::FunctionCallbackInfo<v8::Value>& info) {
+    inspectedObject(info, 2);
+  }
+  static void inspectedObject3(
+      const v8::FunctionCallbackInfo<v8::Value>& info) {
+    inspectedObject(info, 3);
+  }
+  static void inspectedObject4(
+      const v8::FunctionCallbackInfo<v8::Value>& info) {
+    inspectedObject(info, 4);
+  }
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8CONSOLE_H_
diff --git a/src/inspector/v8-debugger-agent-impl.cc b/src/inspector/v8-debugger-agent-impl.cc
new file mode 100644
index 0000000..80e2611
--- /dev/null
+++ b/src/inspector/v8-debugger-agent-impl.cc
@@ -0,0 +1,1255 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-debugger-agent-impl.h"
+
+#include <algorithm>
+
+#include "src/inspector/injected-script.h"
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/java-script-call-frame.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/remote-object-id.h"
+#include "src/inspector/script-breakpoint.h"
+#include "src/inspector/search-util.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-debugger-script.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-regex.h"
+#include "src/inspector/v8-runtime-agent-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+using protocol::Array;
+using protocol::Maybe;
+using protocol::Debugger::BreakpointId;
+using protocol::Debugger::CallFrame;
+using protocol::Runtime::ExceptionDetails;
+using protocol::Runtime::ScriptId;
+using protocol::Runtime::StackTrace;
+using protocol::Runtime::RemoteObject;
+
+namespace DebuggerAgentState {
+static const char javaScriptBreakpoints[] = "javaScriptBreakopints";
+static const char pauseOnExceptionsState[] = "pauseOnExceptionsState";
+static const char asyncCallStackDepth[] = "asyncCallStackDepth";
+static const char blackboxPattern[] = "blackboxPattern";
+static const char debuggerEnabled[] = "debuggerEnabled";
+
+// Breakpoint properties.
+static const char url[] = "url";
+static const char isRegex[] = "isRegex";
+static const char lineNumber[] = "lineNumber";
+static const char columnNumber[] = "columnNumber";
+static const char condition[] = "condition";
+static const char skipAllPauses[] = "skipAllPauses";
+
+}  // namespace DebuggerAgentState
+
+static const int maxSkipStepFrameCount = 128;
+static const char backtraceObjectGroup[] = "backtrace";
+
+static String16 breakpointIdSuffix(
+    V8DebuggerAgentImpl::BreakpointSource source) {
+  switch (source) {
+    case V8DebuggerAgentImpl::UserBreakpointSource:
+      break;
+    case V8DebuggerAgentImpl::DebugCommandBreakpointSource:
+      return ":debug";
+    case V8DebuggerAgentImpl::MonitorCommandBreakpointSource:
+      return ":monitor";
+  }
+  return String16();
+}
+
+static String16 generateBreakpointId(
+    const String16& scriptId, int lineNumber, int columnNumber,
+    V8DebuggerAgentImpl::BreakpointSource source) {
+  return scriptId + ":" + String16::fromInteger(lineNumber) + ":" +
+         String16::fromInteger(columnNumber) + breakpointIdSuffix(source);
+}
+
+static bool positionComparator(const std::pair<int, int>& a,
+                               const std::pair<int, int>& b) {
+  if (a.first != b.first) return a.first < b.first;
+  return a.second < b.second;
+}
+
+static bool hasInternalError(ErrorString* errorString, bool hasError) {
+  if (hasError) *errorString = "Internal error";
+  return hasError;
+}
+
+static std::unique_ptr<protocol::Debugger::Location> buildProtocolLocation(
+    const String16& scriptId, int lineNumber, int columnNumber) {
+  return protocol::Debugger::Location::create()
+      .setScriptId(scriptId)
+      .setLineNumber(lineNumber)
+      .setColumnNumber(columnNumber)
+      .build();
+}
+
+V8DebuggerAgentImpl::V8DebuggerAgentImpl(
+    V8InspectorSessionImpl* session, protocol::FrontendChannel* frontendChannel,
+    protocol::DictionaryValue* state)
+    : m_inspector(session->inspector()),
+      m_debugger(m_inspector->debugger()),
+      m_session(session),
+      m_enabled(false),
+      m_state(state),
+      m_frontend(frontendChannel),
+      m_isolate(m_inspector->isolate()),
+      m_breakReason(protocol::Debugger::Paused::ReasonEnum::Other),
+      m_scheduledDebuggerStep(NoStep),
+      m_skipNextDebuggerStepOut(false),
+      m_javaScriptPauseScheduled(false),
+      m_steppingFromFramework(false),
+      m_pausingOnNativeEvent(false),
+      m_skippedStepFrameCount(0),
+      m_recursionLevelForStepOut(0),
+      m_recursionLevelForStepFrame(0),
+      m_skipAllPauses(false) {
+  clearBreakDetails();
+}
+
+V8DebuggerAgentImpl::~V8DebuggerAgentImpl() {}
+
+bool V8DebuggerAgentImpl::checkEnabled(ErrorString* errorString) {
+  if (enabled()) return true;
+  *errorString = "Debugger agent is not enabled";
+  return false;
+}
+
+void V8DebuggerAgentImpl::enable() {
+  // m_inspector->addListener may result in reporting all parsed scripts to
+  // the agent so it should already be in enabled state by then.
+  m_enabled = true;
+  m_state->setBoolean(DebuggerAgentState::debuggerEnabled, true);
+  m_debugger->enable();
+
+  std::vector<std::unique_ptr<V8DebuggerScript>> compiledScripts;
+  m_debugger->getCompiledScripts(m_session->contextGroupId(), compiledScripts);
+  for (size_t i = 0; i < compiledScripts.size(); i++)
+    didParseSource(std::move(compiledScripts[i]), true);
+
+  // FIXME(WK44513): breakpoints activated flag should be synchronized between
+  // all front-ends
+  m_debugger->setBreakpointsActivated(true);
+}
+
+bool V8DebuggerAgentImpl::enabled() { return m_enabled; }
+
+void V8DebuggerAgentImpl::enable(ErrorString* errorString) {
+  if (enabled()) return;
+
+  if (!m_inspector->client()->canExecuteScripts(m_session->contextGroupId())) {
+    *errorString = "Script execution is prohibited";
+    return;
+  }
+
+  enable();
+}
+
+void V8DebuggerAgentImpl::disable(ErrorString*) {
+  if (!enabled()) return;
+
+  m_state->setObject(DebuggerAgentState::javaScriptBreakpoints,
+                     protocol::DictionaryValue::create());
+  m_state->setInteger(DebuggerAgentState::pauseOnExceptionsState,
+                      V8Debugger::DontPauseOnExceptions);
+  m_state->setInteger(DebuggerAgentState::asyncCallStackDepth, 0);
+
+  if (!m_pausedContext.IsEmpty()) m_debugger->continueProgram();
+  m_debugger->disable();
+  m_pausedContext.Reset();
+  JavaScriptCallFrames emptyCallFrames;
+  m_pausedCallFrames.swap(emptyCallFrames);
+  m_scripts.clear();
+  m_blackboxedPositions.clear();
+  m_breakpointIdToDebuggerBreakpointIds.clear();
+  m_debugger->setAsyncCallStackDepth(this, 0);
+  m_continueToLocationBreakpointId = String16();
+  clearBreakDetails();
+  m_scheduledDebuggerStep = NoStep;
+  m_skipNextDebuggerStepOut = false;
+  m_javaScriptPauseScheduled = false;
+  m_steppingFromFramework = false;
+  m_pausingOnNativeEvent = false;
+  m_skippedStepFrameCount = 0;
+  m_recursionLevelForStepFrame = 0;
+  m_skipAllPauses = false;
+  m_blackboxPattern = nullptr;
+  m_state->remove(DebuggerAgentState::blackboxPattern);
+  m_enabled = false;
+  m_state->setBoolean(DebuggerAgentState::debuggerEnabled, false);
+}
+
+void V8DebuggerAgentImpl::restore() {
+  DCHECK(!m_enabled);
+  if (!m_state->booleanProperty(DebuggerAgentState::debuggerEnabled, false))
+    return;
+  if (!m_inspector->client()->canExecuteScripts(m_session->contextGroupId()))
+    return;
+
+  enable();
+  ErrorString error;
+
+  int pauseState = V8Debugger::DontPauseOnExceptions;
+  m_state->getInteger(DebuggerAgentState::pauseOnExceptionsState, &pauseState);
+  setPauseOnExceptionsImpl(&error, pauseState);
+  DCHECK(error.isEmpty());
+
+  m_skipAllPauses =
+      m_state->booleanProperty(DebuggerAgentState::skipAllPauses, false);
+
+  int asyncCallStackDepth = 0;
+  m_state->getInteger(DebuggerAgentState::asyncCallStackDepth,
+                      &asyncCallStackDepth);
+  m_debugger->setAsyncCallStackDepth(this, asyncCallStackDepth);
+
+  String16 blackboxPattern;
+  if (m_state->getString(DebuggerAgentState::blackboxPattern,
+                         &blackboxPattern)) {
+    if (!setBlackboxPattern(&error, blackboxPattern)) UNREACHABLE();
+  }
+}
+
+void V8DebuggerAgentImpl::setBreakpointsActive(ErrorString* errorString,
+                                               bool active) {
+  if (!checkEnabled(errorString)) return;
+  m_debugger->setBreakpointsActivated(active);
+}
+
+void V8DebuggerAgentImpl::setSkipAllPauses(ErrorString*, bool skip) {
+  m_skipAllPauses = skip;
+  m_state->setBoolean(DebuggerAgentState::skipAllPauses, m_skipAllPauses);
+}
+
+static std::unique_ptr<protocol::DictionaryValue>
+buildObjectForBreakpointCookie(const String16& url, int lineNumber,
+                               int columnNumber, const String16& condition,
+                               bool isRegex) {
+  std::unique_ptr<protocol::DictionaryValue> breakpointObject =
+      protocol::DictionaryValue::create();
+  breakpointObject->setString(DebuggerAgentState::url, url);
+  breakpointObject->setInteger(DebuggerAgentState::lineNumber, lineNumber);
+  breakpointObject->setInteger(DebuggerAgentState::columnNumber, columnNumber);
+  breakpointObject->setString(DebuggerAgentState::condition, condition);
+  breakpointObject->setBoolean(DebuggerAgentState::isRegex, isRegex);
+  return breakpointObject;
+}
+
+static bool matches(V8InspectorImpl* inspector, const String16& url,
+                    const String16& pattern, bool isRegex) {
+  if (isRegex) {
+    V8Regex regex(inspector, pattern, true);
+    return regex.match(url) != -1;
+  }
+  return url == pattern;
+}
+
+void V8DebuggerAgentImpl::setBreakpointByUrl(
+    ErrorString* errorString, int lineNumber,
+    const Maybe<String16>& optionalURL, const Maybe<String16>& optionalURLRegex,
+    const Maybe<int>& optionalColumnNumber,
+    const Maybe<String16>& optionalCondition, String16* outBreakpointId,
+    std::unique_ptr<protocol::Array<protocol::Debugger::Location>>* locations) {
+  *locations = Array<protocol::Debugger::Location>::create();
+  if (optionalURL.isJust() == optionalURLRegex.isJust()) {
+    *errorString = "Either url or urlRegex must be specified.";
+    return;
+  }
+
+  String16 url = optionalURL.isJust() ? optionalURL.fromJust()
+                                      : optionalURLRegex.fromJust();
+  int columnNumber = 0;
+  if (optionalColumnNumber.isJust()) {
+    columnNumber = optionalColumnNumber.fromJust();
+    if (columnNumber < 0) {
+      *errorString = "Incorrect column number";
+      return;
+    }
+  }
+  String16 condition = optionalCondition.fromMaybe("");
+  bool isRegex = optionalURLRegex.isJust();
+
+  String16 breakpointId = (isRegex ? "/" + url + "/" : url) + ":" +
+                          String16::fromInteger(lineNumber) + ":" +
+                          String16::fromInteger(columnNumber);
+  protocol::DictionaryValue* breakpointsCookie =
+      m_state->getObject(DebuggerAgentState::javaScriptBreakpoints);
+  if (!breakpointsCookie) {
+    std::unique_ptr<protocol::DictionaryValue> newValue =
+        protocol::DictionaryValue::create();
+    breakpointsCookie = newValue.get();
+    m_state->setObject(DebuggerAgentState::javaScriptBreakpoints,
+                       std::move(newValue));
+  }
+  if (breakpointsCookie->get(breakpointId)) {
+    *errorString = "Breakpoint at specified location already exists.";
+    return;
+  }
+
+  breakpointsCookie->setObject(
+      breakpointId, buildObjectForBreakpointCookie(
+                        url, lineNumber, columnNumber, condition, isRegex));
+
+  ScriptBreakpoint breakpoint(lineNumber, columnNumber, condition);
+  for (const auto& script : m_scripts) {
+    if (!matches(m_inspector, script.second->sourceURL(), url, isRegex))
+      continue;
+    std::unique_ptr<protocol::Debugger::Location> location = resolveBreakpoint(
+        breakpointId, script.first, breakpoint, UserBreakpointSource);
+    if (location) (*locations)->addItem(std::move(location));
+  }
+
+  *outBreakpointId = breakpointId;
+}
+
+static bool parseLocation(
+    ErrorString* errorString,
+    std::unique_ptr<protocol::Debugger::Location> location, String16* scriptId,
+    int* lineNumber, int* columnNumber) {
+  *scriptId = location->getScriptId();
+  *lineNumber = location->getLineNumber();
+  *columnNumber = location->getColumnNumber(0);
+  return true;
+}
+
+void V8DebuggerAgentImpl::setBreakpoint(
+    ErrorString* errorString,
+    std::unique_ptr<protocol::Debugger::Location> location,
+    const Maybe<String16>& optionalCondition, String16* outBreakpointId,
+    std::unique_ptr<protocol::Debugger::Location>* actualLocation) {
+  String16 scriptId;
+  int lineNumber;
+  int columnNumber;
+
+  if (!parseLocation(errorString, std::move(location), &scriptId, &lineNumber,
+                     &columnNumber))
+    return;
+
+  String16 condition = optionalCondition.fromMaybe("");
+
+  String16 breakpointId = generateBreakpointId(
+      scriptId, lineNumber, columnNumber, UserBreakpointSource);
+  if (m_breakpointIdToDebuggerBreakpointIds.find(breakpointId) !=
+      m_breakpointIdToDebuggerBreakpointIds.end()) {
+    *errorString = "Breakpoint at specified location already exists.";
+    return;
+  }
+  ScriptBreakpoint breakpoint(lineNumber, columnNumber, condition);
+  *actualLocation = resolveBreakpoint(breakpointId, scriptId, breakpoint,
+                                      UserBreakpointSource);
+  if (*actualLocation)
+    *outBreakpointId = breakpointId;
+  else
+    *errorString = "Could not resolve breakpoint";
+}
+
+void V8DebuggerAgentImpl::removeBreakpoint(ErrorString* errorString,
+                                           const String16& breakpointId) {
+  if (!checkEnabled(errorString)) return;
+  protocol::DictionaryValue* breakpointsCookie =
+      m_state->getObject(DebuggerAgentState::javaScriptBreakpoints);
+  if (breakpointsCookie) breakpointsCookie->remove(breakpointId);
+  removeBreakpoint(breakpointId);
+}
+
+void V8DebuggerAgentImpl::removeBreakpoint(const String16& breakpointId) {
+  DCHECK(enabled());
+  BreakpointIdToDebuggerBreakpointIdsMap::iterator
+      debuggerBreakpointIdsIterator =
+          m_breakpointIdToDebuggerBreakpointIds.find(breakpointId);
+  if (debuggerBreakpointIdsIterator ==
+      m_breakpointIdToDebuggerBreakpointIds.end())
+    return;
+  const std::vector<String16>& ids = debuggerBreakpointIdsIterator->second;
+  for (size_t i = 0; i < ids.size(); ++i) {
+    const String16& debuggerBreakpointId = ids[i];
+
+    m_debugger->removeBreakpoint(debuggerBreakpointId);
+    m_serverBreakpoints.erase(debuggerBreakpointId);
+  }
+  m_breakpointIdToDebuggerBreakpointIds.erase(breakpointId);
+}
+
+void V8DebuggerAgentImpl::continueToLocation(
+    ErrorString* errorString,
+    std::unique_ptr<protocol::Debugger::Location> location) {
+  if (!checkEnabled(errorString)) return;
+  if (!m_continueToLocationBreakpointId.isEmpty()) {
+    m_debugger->removeBreakpoint(m_continueToLocationBreakpointId);
+    m_continueToLocationBreakpointId = "";
+  }
+
+  String16 scriptId;
+  int lineNumber;
+  int columnNumber;
+
+  if (!parseLocation(errorString, std::move(location), &scriptId, &lineNumber,
+                     &columnNumber))
+    return;
+
+  ScriptBreakpoint breakpoint(lineNumber, columnNumber, "");
+  m_continueToLocationBreakpointId = m_debugger->setBreakpoint(
+      scriptId, breakpoint, &lineNumber, &columnNumber);
+  resume(errorString);
+}
+
+bool V8DebuggerAgentImpl::isCurrentCallStackEmptyOrBlackboxed() {
+  DCHECK(enabled());
+  JavaScriptCallFrames callFrames = m_debugger->currentCallFrames();
+  for (size_t index = 0; index < callFrames.size(); ++index) {
+    if (!isCallFrameWithUnknownScriptOrBlackboxed(callFrames[index].get()))
+      return false;
+  }
+  return true;
+}
+
+bool V8DebuggerAgentImpl::isTopPausedCallFrameBlackboxed() {
+  DCHECK(enabled());
+  JavaScriptCallFrame* frame =
+      m_pausedCallFrames.size() ? m_pausedCallFrames[0].get() : nullptr;
+  return isCallFrameWithUnknownScriptOrBlackboxed(frame);
+}
+
+bool V8DebuggerAgentImpl::isCallFrameWithUnknownScriptOrBlackboxed(
+    JavaScriptCallFrame* frame) {
+  if (!frame) return true;
+  ScriptsMap::iterator it =
+      m_scripts.find(String16::fromInteger(frame->sourceID()));
+  if (it == m_scripts.end()) {
+    // Unknown scripts are blackboxed.
+    return true;
+  }
+  if (m_blackboxPattern) {
+    const String16& scriptSourceURL = it->second->sourceURL();
+    if (!scriptSourceURL.isEmpty() &&
+        m_blackboxPattern->match(scriptSourceURL) != -1)
+      return true;
+  }
+  auto itBlackboxedPositions =
+      m_blackboxedPositions.find(String16::fromInteger(frame->sourceID()));
+  if (itBlackboxedPositions == m_blackboxedPositions.end()) return false;
+
+  const std::vector<std::pair<int, int>>& ranges =
+      itBlackboxedPositions->second;
+  auto itRange = std::lower_bound(
+      ranges.begin(), ranges.end(),
+      std::make_pair(frame->line(), frame->column()), positionComparator);
+  // Ranges array contains positions in script where blackbox state is changed.
+  // [(0,0) ... ranges[0]) isn't blackboxed, [ranges[0] ... ranges[1]) is
+  // blackboxed...
+  return std::distance(ranges.begin(), itRange) % 2;
+}
+
+V8DebuggerAgentImpl::SkipPauseRequest
+V8DebuggerAgentImpl::shouldSkipExceptionPause(
+    JavaScriptCallFrame* topCallFrame) {
+  if (m_steppingFromFramework) return RequestNoSkip;
+  if (isCallFrameWithUnknownScriptOrBlackboxed(topCallFrame))
+    return RequestContinue;
+  return RequestNoSkip;
+}
+
+V8DebuggerAgentImpl::SkipPauseRequest V8DebuggerAgentImpl::shouldSkipStepPause(
+    JavaScriptCallFrame* topCallFrame) {
+  if (m_steppingFromFramework) return RequestNoSkip;
+
+  if (m_skipNextDebuggerStepOut) {
+    m_skipNextDebuggerStepOut = false;
+    if (m_scheduledDebuggerStep == StepOut) return RequestStepOut;
+  }
+
+  if (!isCallFrameWithUnknownScriptOrBlackboxed(topCallFrame))
+    return RequestNoSkip;
+
+  if (m_skippedStepFrameCount >= maxSkipStepFrameCount) return RequestStepOut;
+
+  if (!m_skippedStepFrameCount) m_recursionLevelForStepFrame = 1;
+
+  ++m_skippedStepFrameCount;
+  return RequestStepFrame;
+}
+
+std::unique_ptr<protocol::Debugger::Location>
+V8DebuggerAgentImpl::resolveBreakpoint(const String16& breakpointId,
+                                       const String16& scriptId,
+                                       const ScriptBreakpoint& breakpoint,
+                                       BreakpointSource source) {
+  DCHECK(enabled());
+  // FIXME: remove these checks once crbug.com/520702 is resolved.
+  CHECK(!breakpointId.isEmpty());
+  CHECK(!scriptId.isEmpty());
+  ScriptsMap::iterator scriptIterator = m_scripts.find(scriptId);
+  if (scriptIterator == m_scripts.end()) return nullptr;
+  if (breakpoint.lineNumber < scriptIterator->second->startLine() ||
+      scriptIterator->second->endLine() < breakpoint.lineNumber)
+    return nullptr;
+
+  int actualLineNumber;
+  int actualColumnNumber;
+  String16 debuggerBreakpointId = m_debugger->setBreakpoint(
+      scriptId, breakpoint, &actualLineNumber, &actualColumnNumber);
+  if (debuggerBreakpointId.isEmpty()) return nullptr;
+
+  m_serverBreakpoints[debuggerBreakpointId] =
+      std::make_pair(breakpointId, source);
+  CHECK(!breakpointId.isEmpty());
+
+  m_breakpointIdToDebuggerBreakpointIds[breakpointId].push_back(
+      debuggerBreakpointId);
+  return buildProtocolLocation(scriptId, actualLineNumber, actualColumnNumber);
+}
+
+void V8DebuggerAgentImpl::searchInContent(
+    ErrorString* error, const String16& scriptId, const String16& query,
+    const Maybe<bool>& optionalCaseSensitive,
+    const Maybe<bool>& optionalIsRegex,
+    std::unique_ptr<Array<protocol::Debugger::SearchMatch>>* results) {
+  v8::HandleScope handles(m_isolate);
+  ScriptsMap::iterator it = m_scripts.find(scriptId);
+  if (it == m_scripts.end()) {
+    *error = String16("No script for id: " + scriptId);
+    return;
+  }
+
+  std::vector<std::unique_ptr<protocol::Debugger::SearchMatch>> matches =
+      searchInTextByLinesImpl(m_session,
+                              toProtocolString(it->second->source(m_isolate)),
+                              query, optionalCaseSensitive.fromMaybe(false),
+                              optionalIsRegex.fromMaybe(false));
+  *results = protocol::Array<protocol::Debugger::SearchMatch>::create();
+  for (size_t i = 0; i < matches.size(); ++i)
+    (*results)->addItem(std::move(matches[i]));
+}
+
+void V8DebuggerAgentImpl::setScriptSource(
+    ErrorString* errorString, const String16& scriptId,
+    const String16& newContent, const Maybe<bool>& dryRun,
+    Maybe<protocol::Array<protocol::Debugger::CallFrame>>* newCallFrames,
+    Maybe<bool>* stackChanged, Maybe<StackTrace>* asyncStackTrace,
+    Maybe<protocol::Runtime::ExceptionDetails>* optOutCompileError) {
+  if (!checkEnabled(errorString)) return;
+
+  v8::HandleScope handles(m_isolate);
+  v8::Local<v8::String> newSource = toV8String(m_isolate, newContent);
+  if (!m_debugger->setScriptSource(scriptId, newSource, dryRun.fromMaybe(false),
+                                   errorString, optOutCompileError,
+                                   &m_pausedCallFrames, stackChanged))
+    return;
+
+  ScriptsMap::iterator it = m_scripts.find(scriptId);
+  if (it != m_scripts.end()) it->second->setSource(m_isolate, newSource);
+
+  std::unique_ptr<Array<CallFrame>> callFrames = currentCallFrames(errorString);
+  if (!callFrames) return;
+  *newCallFrames = std::move(callFrames);
+  *asyncStackTrace = currentAsyncStackTrace();
+}
+
+void V8DebuggerAgentImpl::restartFrame(
+    ErrorString* errorString, const String16& callFrameId,
+    std::unique_ptr<Array<CallFrame>>* newCallFrames,
+    Maybe<StackTrace>* asyncStackTrace) {
+  if (!assertPaused(errorString)) return;
+  InjectedScript::CallFrameScope scope(
+      errorString, m_inspector, m_session->contextGroupId(), callFrameId);
+  if (!scope.initialize()) return;
+  if (scope.frameOrdinal() >= m_pausedCallFrames.size()) {
+    *errorString = "Could not find call frame with given id";
+    return;
+  }
+
+  v8::Local<v8::Value> resultValue;
+  v8::Local<v8::Boolean> result;
+  if (!m_pausedCallFrames[scope.frameOrdinal()]->restart().ToLocal(
+          &resultValue) ||
+      scope.tryCatch().HasCaught() ||
+      !resultValue->ToBoolean(scope.context()).ToLocal(&result) ||
+      !result->Value()) {
+    *errorString = "Internal error";
+    return;
+  }
+  JavaScriptCallFrames frames = m_debugger->currentCallFrames();
+  m_pausedCallFrames.swap(frames);
+
+  *newCallFrames = currentCallFrames(errorString);
+  if (!*newCallFrames) return;
+  *asyncStackTrace = currentAsyncStackTrace();
+}
+
+void V8DebuggerAgentImpl::getScriptSource(ErrorString* error,
+                                          const String16& scriptId,
+                                          String16* scriptSource) {
+  if (!checkEnabled(error)) return;
+  ScriptsMap::iterator it = m_scripts.find(scriptId);
+  if (it == m_scripts.end()) {
+    *error = "No script for id: " + scriptId;
+    return;
+  }
+  v8::HandleScope handles(m_isolate);
+  *scriptSource = toProtocolString(it->second->source(m_isolate));
+}
+
+void V8DebuggerAgentImpl::schedulePauseOnNextStatement(
+    const String16& breakReason,
+    std::unique_ptr<protocol::DictionaryValue> data) {
+  if (!enabled() || m_scheduledDebuggerStep == StepInto ||
+      m_javaScriptPauseScheduled || m_debugger->isPaused() ||
+      !m_debugger->breakpointsActivated())
+    return;
+  m_breakReason = breakReason;
+  m_breakAuxData = std::move(data);
+  m_pausingOnNativeEvent = true;
+  m_skipNextDebuggerStepOut = false;
+  m_debugger->setPauseOnNextStatement(true);
+}
+
+void V8DebuggerAgentImpl::schedulePauseOnNextStatementIfSteppingInto() {
+  DCHECK(enabled());
+  if (m_scheduledDebuggerStep != StepInto || m_javaScriptPauseScheduled ||
+      m_debugger->isPaused())
+    return;
+  clearBreakDetails();
+  m_pausingOnNativeEvent = false;
+  m_skippedStepFrameCount = 0;
+  m_recursionLevelForStepFrame = 0;
+  m_debugger->setPauseOnNextStatement(true);
+}
+
+void V8DebuggerAgentImpl::cancelPauseOnNextStatement() {
+  if (m_javaScriptPauseScheduled || m_debugger->isPaused()) return;
+  clearBreakDetails();
+  m_pausingOnNativeEvent = false;
+  m_debugger->setPauseOnNextStatement(false);
+}
+
+void V8DebuggerAgentImpl::pause(ErrorString* errorString) {
+  if (!checkEnabled(errorString)) return;
+  if (m_javaScriptPauseScheduled || m_debugger->isPaused()) return;
+  clearBreakDetails();
+  m_javaScriptPauseScheduled = true;
+  m_scheduledDebuggerStep = NoStep;
+  m_skippedStepFrameCount = 0;
+  m_steppingFromFramework = false;
+  m_debugger->setPauseOnNextStatement(true);
+}
+
+void V8DebuggerAgentImpl::resume(ErrorString* errorString) {
+  if (!assertPaused(errorString)) return;
+  m_scheduledDebuggerStep = NoStep;
+  m_steppingFromFramework = false;
+  m_session->releaseObjectGroup(backtraceObjectGroup);
+  m_debugger->continueProgram();
+}
+
+void V8DebuggerAgentImpl::stepOver(ErrorString* errorString) {
+  if (!assertPaused(errorString)) return;
+  // StepOver at function return point should fallback to StepInto.
+  JavaScriptCallFrame* frame =
+      !m_pausedCallFrames.empty() ? m_pausedCallFrames[0].get() : nullptr;
+  if (frame && frame->isAtReturn()) {
+    stepInto(errorString);
+    return;
+  }
+  m_scheduledDebuggerStep = StepOver;
+  m_steppingFromFramework = isTopPausedCallFrameBlackboxed();
+  m_session->releaseObjectGroup(backtraceObjectGroup);
+  m_debugger->stepOverStatement();
+}
+
+void V8DebuggerAgentImpl::stepInto(ErrorString* errorString) {
+  if (!assertPaused(errorString)) return;
+  m_scheduledDebuggerStep = StepInto;
+  m_steppingFromFramework = isTopPausedCallFrameBlackboxed();
+  m_session->releaseObjectGroup(backtraceObjectGroup);
+  m_debugger->stepIntoStatement();
+}
+
+void V8DebuggerAgentImpl::stepOut(ErrorString* errorString) {
+  if (!assertPaused(errorString)) return;
+  m_scheduledDebuggerStep = StepOut;
+  m_skipNextDebuggerStepOut = false;
+  m_recursionLevelForStepOut = 1;
+  m_steppingFromFramework = isTopPausedCallFrameBlackboxed();
+  m_session->releaseObjectGroup(backtraceObjectGroup);
+  m_debugger->stepOutOfFunction();
+}
+
+void V8DebuggerAgentImpl::setPauseOnExceptions(
+    ErrorString* errorString, const String16& stringPauseState) {
+  if (!checkEnabled(errorString)) return;
+  V8Debugger::PauseOnExceptionsState pauseState;
+  if (stringPauseState == "none") {
+    pauseState = V8Debugger::DontPauseOnExceptions;
+  } else if (stringPauseState == "all") {
+    pauseState = V8Debugger::PauseOnAllExceptions;
+  } else if (stringPauseState == "uncaught") {
+    pauseState = V8Debugger::PauseOnUncaughtExceptions;
+  } else {
+    *errorString = "Unknown pause on exceptions mode: " + stringPauseState;
+    return;
+  }
+  setPauseOnExceptionsImpl(errorString, pauseState);
+}
+
+void V8DebuggerAgentImpl::setPauseOnExceptionsImpl(ErrorString* errorString,
+                                                   int pauseState) {
+  m_debugger->setPauseOnExceptionsState(
+      static_cast<V8Debugger::PauseOnExceptionsState>(pauseState));
+  if (m_debugger->getPauseOnExceptionsState() != pauseState)
+    *errorString = "Internal error. Could not change pause on exceptions state";
+  else
+    m_state->setInteger(DebuggerAgentState::pauseOnExceptionsState, pauseState);
+}
+
+void V8DebuggerAgentImpl::evaluateOnCallFrame(
+    ErrorString* errorString, const String16& callFrameId,
+    const String16& expression, const Maybe<String16>& objectGroup,
+    const Maybe<bool>& includeCommandLineAPI, const Maybe<bool>& silent,
+    const Maybe<bool>& returnByValue, const Maybe<bool>& generatePreview,
+    std::unique_ptr<RemoteObject>* result,
+    Maybe<protocol::Runtime::ExceptionDetails>* exceptionDetails) {
+  if (!assertPaused(errorString)) return;
+  InjectedScript::CallFrameScope scope(
+      errorString, m_inspector, m_session->contextGroupId(), callFrameId);
+  if (!scope.initialize()) return;
+  if (scope.frameOrdinal() >= m_pausedCallFrames.size()) {
+    *errorString = "Could not find call frame with given id";
+    return;
+  }
+
+  if (includeCommandLineAPI.fromMaybe(false) && !scope.installCommandLineAPI())
+    return;
+  if (silent.fromMaybe(false)) scope.ignoreExceptionsAndMuteConsole();
+
+  v8::MaybeLocal<v8::Value> maybeResultValue =
+      m_pausedCallFrames[scope.frameOrdinal()]->evaluate(
+          toV8String(m_isolate, expression));
+
+  // Re-initialize after running client's code, as it could have destroyed
+  // context or session.
+  if (!scope.initialize()) return;
+  scope.injectedScript()->wrapEvaluateResult(
+      errorString, maybeResultValue, scope.tryCatch(),
+      objectGroup.fromMaybe(""), returnByValue.fromMaybe(false),
+      generatePreview.fromMaybe(false), result, exceptionDetails);
+}
+
+void V8DebuggerAgentImpl::setVariableValue(
+    ErrorString* errorString, int scopeNumber, const String16& variableName,
+    std::unique_ptr<protocol::Runtime::CallArgument> newValueArgument,
+    const String16& callFrameId) {
+  if (!checkEnabled(errorString)) return;
+  if (!assertPaused(errorString)) return;
+  InjectedScript::CallFrameScope scope(
+      errorString, m_inspector, m_session->contextGroupId(), callFrameId);
+  if (!scope.initialize()) return;
+
+  v8::Local<v8::Value> newValue;
+  if (!scope.injectedScript()
+           ->resolveCallArgument(errorString, newValueArgument.get())
+           .ToLocal(&newValue))
+    return;
+
+  if (scope.frameOrdinal() >= m_pausedCallFrames.size()) {
+    *errorString = "Could not find call frame with given id";
+    return;
+  }
+  v8::MaybeLocal<v8::Value> result =
+      m_pausedCallFrames[scope.frameOrdinal()]->setVariableValue(
+          scopeNumber, toV8String(m_isolate, variableName), newValue);
+  if (scope.tryCatch().HasCaught() || result.IsEmpty()) {
+    *errorString = "Internal error";
+    return;
+  }
+}
+
+void V8DebuggerAgentImpl::setAsyncCallStackDepth(ErrorString* errorString,
+                                                 int depth) {
+  if (!checkEnabled(errorString)) return;
+  m_state->setInteger(DebuggerAgentState::asyncCallStackDepth, depth);
+  m_debugger->setAsyncCallStackDepth(this, depth);
+}
+
+void V8DebuggerAgentImpl::setBlackboxPatterns(
+    ErrorString* errorString,
+    std::unique_ptr<protocol::Array<String16>> patterns) {
+  if (!patterns->length()) {
+    m_blackboxPattern = nullptr;
+    m_state->remove(DebuggerAgentState::blackboxPattern);
+    return;
+  }
+
+  String16Builder patternBuilder;
+  patternBuilder.append('(');
+  for (size_t i = 0; i < patterns->length() - 1; ++i) {
+    patternBuilder.append(patterns->get(i));
+    patternBuilder.append("|");
+  }
+  patternBuilder.append(patterns->get(patterns->length() - 1));
+  patternBuilder.append(')');
+  String16 pattern = patternBuilder.toString();
+  if (!setBlackboxPattern(errorString, pattern)) return;
+  m_state->setString(DebuggerAgentState::blackboxPattern, pattern);
+}
+
+bool V8DebuggerAgentImpl::setBlackboxPattern(ErrorString* errorString,
+                                             const String16& pattern) {
+  std::unique_ptr<V8Regex> regex(new V8Regex(
+      m_inspector, pattern, true /** caseSensitive */, false /** multiline */));
+  if (!regex->isValid()) {
+    *errorString = "Pattern parser error: " + regex->errorMessage();
+    return false;
+  }
+  m_blackboxPattern = std::move(regex);
+  return true;
+}
+
+void V8DebuggerAgentImpl::setBlackboxedRanges(
+    ErrorString* error, const String16& scriptId,
+    std::unique_ptr<protocol::Array<protocol::Debugger::ScriptPosition>>
+        inPositions) {
+  if (m_scripts.find(scriptId) == m_scripts.end()) {
+    *error = "No script with passed id.";
+    return;
+  }
+
+  if (!inPositions->length()) {
+    m_blackboxedPositions.erase(scriptId);
+    return;
+  }
+
+  std::vector<std::pair<int, int>> positions;
+  positions.reserve(inPositions->length());
+  for (size_t i = 0; i < inPositions->length(); ++i) {
+    protocol::Debugger::ScriptPosition* position = inPositions->get(i);
+    if (position->getLineNumber() < 0) {
+      *error = "Position missing 'line' or 'line' < 0.";
+      return;
+    }
+    if (position->getColumnNumber() < 0) {
+      *error = "Position missing 'column' or 'column' < 0.";
+      return;
+    }
+    positions.push_back(
+        std::make_pair(position->getLineNumber(), position->getColumnNumber()));
+  }
+
+  for (size_t i = 1; i < positions.size(); ++i) {
+    if (positions[i - 1].first < positions[i].first) continue;
+    if (positions[i - 1].first == positions[i].first &&
+        positions[i - 1].second < positions[i].second)
+      continue;
+    *error =
+        "Input positions array is not sorted or contains duplicate values.";
+    return;
+  }
+
+  m_blackboxedPositions[scriptId] = positions;
+}
+
+void V8DebuggerAgentImpl::willExecuteScript(int scriptId) {
+  changeJavaScriptRecursionLevel(+1);
+  // Fast return.
+  if (m_scheduledDebuggerStep != StepInto) return;
+  schedulePauseOnNextStatementIfSteppingInto();
+}
+
+void V8DebuggerAgentImpl::didExecuteScript() {
+  changeJavaScriptRecursionLevel(-1);
+}
+
+void V8DebuggerAgentImpl::changeJavaScriptRecursionLevel(int step) {
+  if (m_javaScriptPauseScheduled && !m_skipAllPauses &&
+      !m_debugger->isPaused()) {
+    // Do not ever loose user's pause request until we have actually paused.
+    m_debugger->setPauseOnNextStatement(true);
+  }
+  if (m_scheduledDebuggerStep == StepOut) {
+    m_recursionLevelForStepOut += step;
+    if (!m_recursionLevelForStepOut) {
+      // When StepOut crosses a task boundary (i.e. js -> c++) from where it was
+      // requested,
+      // switch stepping to step into a next JS task, as if we exited to a
+      // blackboxed framework.
+      m_scheduledDebuggerStep = StepInto;
+      m_skipNextDebuggerStepOut = false;
+    }
+  }
+  if (m_recursionLevelForStepFrame) {
+    m_recursionLevelForStepFrame += step;
+    if (!m_recursionLevelForStepFrame) {
+      // We have walked through a blackboxed framework and got back to where we
+      // started.
+      // If there was no stepping scheduled, we should cancel the stepping
+      // explicitly,
+      // since there may be a scheduled StepFrame left.
+      // Otherwise, if we were stepping in/over, the StepFrame will stop at the
+      // right location,
+      // whereas if we were stepping out, we should continue doing so after
+      // debugger pauses
+      // from the old StepFrame.
+      m_skippedStepFrameCount = 0;
+      if (m_scheduledDebuggerStep == NoStep)
+        m_debugger->clearStepping();
+      else if (m_scheduledDebuggerStep == StepOut)
+        m_skipNextDebuggerStepOut = true;
+    }
+  }
+}
+
+std::unique_ptr<Array<CallFrame>> V8DebuggerAgentImpl::currentCallFrames(
+    ErrorString* errorString) {
+  if (m_pausedContext.IsEmpty() || !m_pausedCallFrames.size())
+    return Array<CallFrame>::create();
+  ErrorString ignored;
+  v8::HandleScope handles(m_isolate);
+  v8::Local<v8::Context> debuggerContext =
+      v8::Debug::GetDebugContext(m_isolate);
+  v8::Context::Scope contextScope(debuggerContext);
+
+  v8::Local<v8::Array> objects = v8::Array::New(m_isolate);
+
+  for (size_t frameOrdinal = 0; frameOrdinal < m_pausedCallFrames.size();
+       ++frameOrdinal) {
+    const std::unique_ptr<JavaScriptCallFrame>& currentCallFrame =
+        m_pausedCallFrames[frameOrdinal];
+
+    v8::Local<v8::Object> details = currentCallFrame->details();
+    if (hasInternalError(errorString, details.IsEmpty()))
+      return Array<CallFrame>::create();
+
+    int contextId = currentCallFrame->contextId();
+    InjectedScript* injectedScript =
+        contextId ? m_session->findInjectedScript(&ignored, contextId)
+                  : nullptr;
+
+    String16 callFrameId =
+        RemoteCallFrameId::serialize(contextId, static_cast<int>(frameOrdinal));
+    if (hasInternalError(
+            errorString,
+            !details
+                 ->Set(debuggerContext,
+                       toV8StringInternalized(m_isolate, "callFrameId"),
+                       toV8String(m_isolate, callFrameId))
+                 .FromMaybe(false)))
+      return Array<CallFrame>::create();
+
+    if (injectedScript) {
+      v8::Local<v8::Value> scopeChain;
+      if (hasInternalError(
+              errorString,
+              !details->Get(debuggerContext,
+                            toV8StringInternalized(m_isolate, "scopeChain"))
+                      .ToLocal(&scopeChain) ||
+                  !scopeChain->IsArray()))
+        return Array<CallFrame>::create();
+      v8::Local<v8::Array> scopeChainArray = scopeChain.As<v8::Array>();
+      if (!injectedScript->wrapPropertyInArray(
+              errorString, scopeChainArray,
+              toV8StringInternalized(m_isolate, "object"),
+              backtraceObjectGroup))
+        return Array<CallFrame>::create();
+      if (!injectedScript->wrapObjectProperty(
+              errorString, details, toV8StringInternalized(m_isolate, "this"),
+              backtraceObjectGroup))
+        return Array<CallFrame>::create();
+      if (details
+              ->Has(debuggerContext,
+                    toV8StringInternalized(m_isolate, "returnValue"))
+              .FromMaybe(false)) {
+        if (!injectedScript->wrapObjectProperty(
+                errorString, details,
+                toV8StringInternalized(m_isolate, "returnValue"),
+                backtraceObjectGroup))
+          return Array<CallFrame>::create();
+      }
+    } else {
+      if (hasInternalError(errorString, !details
+                                             ->Set(debuggerContext,
+                                                   toV8StringInternalized(
+                                                       m_isolate, "scopeChain"),
+                                                   v8::Array::New(m_isolate, 0))
+                                             .FromMaybe(false)))
+        return Array<CallFrame>::create();
+      v8::Local<v8::Object> remoteObject = v8::Object::New(m_isolate);
+      if (hasInternalError(
+              errorString,
+              !remoteObject
+                   ->Set(debuggerContext,
+                         toV8StringInternalized(m_isolate, "type"),
+                         toV8StringInternalized(m_isolate, "undefined"))
+                   .FromMaybe(false)))
+        return Array<CallFrame>::create();
+      if (hasInternalError(errorString,
+                           !details
+                                ->Set(debuggerContext,
+                                      toV8StringInternalized(m_isolate, "this"),
+                                      remoteObject)
+                                .FromMaybe(false)))
+        return Array<CallFrame>::create();
+      if (hasInternalError(
+              errorString,
+              !details
+                   ->Delete(debuggerContext,
+                            toV8StringInternalized(m_isolate, "returnValue"))
+                   .FromMaybe(false)))
+        return Array<CallFrame>::create();
+    }
+
+    if (hasInternalError(
+            errorString,
+            !objects
+                 ->Set(debuggerContext, static_cast<int>(frameOrdinal), details)
+                 .FromMaybe(false)))
+      return Array<CallFrame>::create();
+  }
+
+  std::unique_ptr<protocol::Value> protocolValue =
+      toProtocolValue(errorString, debuggerContext, objects);
+  if (!protocolValue) return Array<CallFrame>::create();
+  protocol::ErrorSupport errorSupport;
+  std::unique_ptr<Array<CallFrame>> callFrames =
+      Array<CallFrame>::parse(protocolValue.get(), &errorSupport);
+  if (hasInternalError(errorString, !callFrames))
+    return Array<CallFrame>::create();
+  return callFrames;
+}
+
+std::unique_ptr<StackTrace> V8DebuggerAgentImpl::currentAsyncStackTrace() {
+  if (m_pausedContext.IsEmpty()) return nullptr;
+  V8StackTraceImpl* stackTrace = m_debugger->currentAsyncCallChain();
+  return stackTrace ? stackTrace->buildInspectorObjectForTail(m_debugger)
+                    : nullptr;
+}
+
+void V8DebuggerAgentImpl::didParseSource(
+    std::unique_ptr<V8DebuggerScript> script, bool success) {
+  v8::HandleScope handles(m_isolate);
+  String16 scriptSource = toProtocolString(script->source(m_isolate));
+  if (!success) script->setSourceURL(findSourceURL(scriptSource, false));
+  if (!success)
+    script->setSourceMappingURL(findSourceMapURL(scriptSource, false));
+
+  std::unique_ptr<protocol::DictionaryValue> executionContextAuxData;
+  if (!script->executionContextAuxData().isEmpty())
+    executionContextAuxData = protocol::DictionaryValue::cast(
+        protocol::parseJSON(script->executionContextAuxData()));
+  bool isLiveEdit = script->isLiveEdit();
+  bool hasSourceURL = script->hasSourceURL();
+  String16 scriptId = script->scriptId();
+  String16 scriptURL = script->sourceURL();
+
+  const Maybe<String16>& sourceMapURLParam = script->sourceMappingURL();
+  const Maybe<protocol::DictionaryValue>& executionContextAuxDataParam(
+      std::move(executionContextAuxData));
+  const bool* isLiveEditParam = isLiveEdit ? &isLiveEdit : nullptr;
+  const bool* hasSourceURLParam = hasSourceURL ? &hasSourceURL : nullptr;
+  if (success)
+    m_frontend.scriptParsed(
+        scriptId, scriptURL, script->startLine(), script->startColumn(),
+        script->endLine(), script->endColumn(), script->executionContextId(),
+        script->hash(), executionContextAuxDataParam, isLiveEditParam,
+        sourceMapURLParam, hasSourceURLParam);
+  else
+    m_frontend.scriptFailedToParse(
+        scriptId, scriptURL, script->startLine(), script->startColumn(),
+        script->endLine(), script->endColumn(), script->executionContextId(),
+        script->hash(), executionContextAuxDataParam, sourceMapURLParam,
+        hasSourceURLParam);
+
+  m_scripts[scriptId] = std::move(script);
+
+  if (scriptURL.isEmpty() || !success) return;
+
+  protocol::DictionaryValue* breakpointsCookie =
+      m_state->getObject(DebuggerAgentState::javaScriptBreakpoints);
+  if (!breakpointsCookie) return;
+
+  for (size_t i = 0; i < breakpointsCookie->size(); ++i) {
+    auto cookie = breakpointsCookie->at(i);
+    protocol::DictionaryValue* breakpointObject =
+        protocol::DictionaryValue::cast(cookie.second);
+    bool isRegex;
+    breakpointObject->getBoolean(DebuggerAgentState::isRegex, &isRegex);
+    String16 url;
+    breakpointObject->getString(DebuggerAgentState::url, &url);
+    if (!matches(m_inspector, scriptURL, url, isRegex)) continue;
+    ScriptBreakpoint breakpoint;
+    breakpointObject->getInteger(DebuggerAgentState::lineNumber,
+                                 &breakpoint.lineNumber);
+    breakpointObject->getInteger(DebuggerAgentState::columnNumber,
+                                 &breakpoint.columnNumber);
+    breakpointObject->getString(DebuggerAgentState::condition,
+                                &breakpoint.condition);
+    std::unique_ptr<protocol::Debugger::Location> location = resolveBreakpoint(
+        cookie.first, scriptId, breakpoint, UserBreakpointSource);
+    if (location)
+      m_frontend.breakpointResolved(cookie.first, std::move(location));
+  }
+}
+
+V8DebuggerAgentImpl::SkipPauseRequest V8DebuggerAgentImpl::didPause(
+    v8::Local<v8::Context> context, v8::Local<v8::Value> exception,
+    const std::vector<String16>& hitBreakpoints, bool isPromiseRejection) {
+  JavaScriptCallFrames callFrames = m_debugger->currentCallFrames(1);
+  JavaScriptCallFrame* topCallFrame =
+      !callFrames.empty() ? callFrames.begin()->get() : nullptr;
+
+  V8DebuggerAgentImpl::SkipPauseRequest result;
+  if (m_skipAllPauses)
+    result = RequestContinue;
+  else if (!hitBreakpoints.empty())
+    result = RequestNoSkip;  // Don't skip explicit breakpoints even if set in
+                             // frameworks.
+  else if (!exception.IsEmpty())
+    result = shouldSkipExceptionPause(topCallFrame);
+  else if (m_scheduledDebuggerStep != NoStep || m_javaScriptPauseScheduled ||
+           m_pausingOnNativeEvent)
+    result = shouldSkipStepPause(topCallFrame);
+  else
+    result = RequestNoSkip;
+
+  m_skipNextDebuggerStepOut = false;
+  if (result != RequestNoSkip) return result;
+  // Skip pauses inside V8 internal scripts and on syntax errors.
+  if (!topCallFrame) return RequestContinue;
+
+  DCHECK(m_pausedContext.IsEmpty());
+  JavaScriptCallFrames frames = m_debugger->currentCallFrames();
+  m_pausedCallFrames.swap(frames);
+  m_pausedContext.Reset(m_isolate, context);
+  v8::HandleScope handles(m_isolate);
+
+  if (!exception.IsEmpty()) {
+    ErrorString ignored;
+    InjectedScript* injectedScript =
+        m_session->findInjectedScript(&ignored, V8Debugger::contextId(context));
+    if (injectedScript) {
+      m_breakReason =
+          isPromiseRejection
+              ? protocol::Debugger::Paused::ReasonEnum::PromiseRejection
+              : protocol::Debugger::Paused::ReasonEnum::Exception;
+      ErrorString errorString;
+      auto obj = injectedScript->wrapObject(&errorString, exception,
+                                            backtraceObjectGroup);
+      m_breakAuxData = obj ? obj->serialize() : nullptr;
+      // m_breakAuxData might be null after this.
+    }
+  }
+
+  std::unique_ptr<Array<String16>> hitBreakpointIds = Array<String16>::create();
+
+  for (const auto& point : hitBreakpoints) {
+    DebugServerBreakpointToBreakpointIdAndSourceMap::iterator
+        breakpointIterator = m_serverBreakpoints.find(point);
+    if (breakpointIterator != m_serverBreakpoints.end()) {
+      const String16& localId = breakpointIterator->second.first;
+      hitBreakpointIds->addItem(localId);
+
+      BreakpointSource source = breakpointIterator->second.second;
+      if (m_breakReason == protocol::Debugger::Paused::ReasonEnum::Other &&
+          source == DebugCommandBreakpointSource)
+        m_breakReason = protocol::Debugger::Paused::ReasonEnum::DebugCommand;
+    }
+  }
+
+  ErrorString errorString;
+  m_frontend.paused(currentCallFrames(&errorString), m_breakReason,
+                    std::move(m_breakAuxData), std::move(hitBreakpointIds),
+                    currentAsyncStackTrace());
+  m_scheduledDebuggerStep = NoStep;
+  m_javaScriptPauseScheduled = false;
+  m_steppingFromFramework = false;
+  m_pausingOnNativeEvent = false;
+  m_skippedStepFrameCount = 0;
+  m_recursionLevelForStepFrame = 0;
+
+  if (!m_continueToLocationBreakpointId.isEmpty()) {
+    m_debugger->removeBreakpoint(m_continueToLocationBreakpointId);
+    m_continueToLocationBreakpointId = "";
+  }
+  return result;
+}
+
+void V8DebuggerAgentImpl::didContinue() {
+  m_pausedContext.Reset();
+  JavaScriptCallFrames emptyCallFrames;
+  m_pausedCallFrames.swap(emptyCallFrames);
+  clearBreakDetails();
+  m_frontend.resumed();
+}
+
+void V8DebuggerAgentImpl::breakProgram(
+    const String16& breakReason,
+    std::unique_ptr<protocol::DictionaryValue> data) {
+  if (!enabled() || m_skipAllPauses || !m_pausedContext.IsEmpty() ||
+      isCurrentCallStackEmptyOrBlackboxed() ||
+      !m_debugger->breakpointsActivated())
+    return;
+  m_breakReason = breakReason;
+  m_breakAuxData = std::move(data);
+  m_scheduledDebuggerStep = NoStep;
+  m_steppingFromFramework = false;
+  m_pausingOnNativeEvent = false;
+  m_debugger->breakProgram();
+}
+
+void V8DebuggerAgentImpl::breakProgramOnException(
+    const String16& breakReason,
+    std::unique_ptr<protocol::DictionaryValue> data) {
+  if (!enabled() ||
+      m_debugger->getPauseOnExceptionsState() ==
+          V8Debugger::DontPauseOnExceptions)
+    return;
+  breakProgram(breakReason, std::move(data));
+}
+
+bool V8DebuggerAgentImpl::assertPaused(ErrorString* errorString) {
+  if (m_pausedContext.IsEmpty()) {
+    *errorString = "Can only perform operation while paused.";
+    return false;
+  }
+  return true;
+}
+
+void V8DebuggerAgentImpl::clearBreakDetails() {
+  m_breakReason = protocol::Debugger::Paused::ReasonEnum::Other;
+  m_breakAuxData = nullptr;
+}
+
+void V8DebuggerAgentImpl::setBreakpointAt(const String16& scriptId,
+                                          int lineNumber, int columnNumber,
+                                          BreakpointSource source,
+                                          const String16& condition) {
+  String16 breakpointId =
+      generateBreakpointId(scriptId, lineNumber, columnNumber, source);
+  ScriptBreakpoint breakpoint(lineNumber, columnNumber, condition);
+  resolveBreakpoint(breakpointId, scriptId, breakpoint, source);
+}
+
+void V8DebuggerAgentImpl::removeBreakpointAt(const String16& scriptId,
+                                             int lineNumber, int columnNumber,
+                                             BreakpointSource source) {
+  removeBreakpoint(
+      generateBreakpointId(scriptId, lineNumber, columnNumber, source));
+}
+
+void V8DebuggerAgentImpl::reset() {
+  if (!enabled()) return;
+  m_scheduledDebuggerStep = NoStep;
+  m_scripts.clear();
+  m_blackboxedPositions.clear();
+  m_breakpointIdToDebuggerBreakpointIds.clear();
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-debugger-agent-impl.h b/src/inspector/v8-debugger-agent-impl.h
new file mode 100644
index 0000000..62aa67b
--- /dev/null
+++ b/src/inspector/v8-debugger-agent-impl.h
@@ -0,0 +1,224 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8DEBUGGERAGENTIMPL_H_
+#define V8_INSPECTOR_V8DEBUGGERAGENTIMPL_H_
+
+#include <vector>
+
+#include "src/base/macros.h"
+#include "src/inspector/java-script-call-frame.h"
+#include "src/inspector/protocol/Debugger.h"
+#include "src/inspector/protocol/Forward.h"
+
+namespace v8_inspector {
+
+struct ScriptBreakpoint;
+class JavaScriptCallFrame;
+class PromiseTracker;
+class V8Debugger;
+class V8DebuggerScript;
+class V8InspectorImpl;
+class V8InspectorSessionImpl;
+class V8Regex;
+class V8StackTraceImpl;
+
+using protocol::ErrorString;
+using protocol::Maybe;
+
+class V8DebuggerAgentImpl : public protocol::Debugger::Backend {
+ public:
+  enum SkipPauseRequest {
+    RequestNoSkip,
+    RequestContinue,
+    RequestStepInto,
+    RequestStepOut,
+    RequestStepFrame
+  };
+
+  enum BreakpointSource {
+    UserBreakpointSource,
+    DebugCommandBreakpointSource,
+    MonitorCommandBreakpointSource
+  };
+
+  V8DebuggerAgentImpl(V8InspectorSessionImpl*, protocol::FrontendChannel*,
+                      protocol::DictionaryValue* state);
+  ~V8DebuggerAgentImpl() override;
+  void restore();
+
+  // Part of the protocol.
+  void enable(ErrorString*) override;
+  void disable(ErrorString*) override;
+  void setBreakpointsActive(ErrorString*, bool active) override;
+  void setSkipAllPauses(ErrorString*, bool skip) override;
+  void setBreakpointByUrl(
+      ErrorString*, int lineNumber, const Maybe<String16>& optionalURL,
+      const Maybe<String16>& optionalURLRegex,
+      const Maybe<int>& optionalColumnNumber,
+      const Maybe<String16>& optionalCondition, String16*,
+      std::unique_ptr<protocol::Array<protocol::Debugger::Location>>* locations)
+      override;
+  void setBreakpoint(
+      ErrorString*, std::unique_ptr<protocol::Debugger::Location>,
+      const Maybe<String16>& optionalCondition, String16*,
+      std::unique_ptr<protocol::Debugger::Location>* actualLocation) override;
+  void removeBreakpoint(ErrorString*, const String16& breakpointId) override;
+  void continueToLocation(
+      ErrorString*, std::unique_ptr<protocol::Debugger::Location>) override;
+  void searchInContent(
+      ErrorString*, const String16& scriptId, const String16& query,
+      const Maybe<bool>& optionalCaseSensitive,
+      const Maybe<bool>& optionalIsRegex,
+      std::unique_ptr<protocol::Array<protocol::Debugger::SearchMatch>>*)
+      override;
+  void setScriptSource(
+      ErrorString*, const String16& inScriptId, const String16& inScriptSource,
+      const Maybe<bool>& dryRun,
+      Maybe<protocol::Array<protocol::Debugger::CallFrame>>* optOutCallFrames,
+      Maybe<bool>* optOutStackChanged,
+      Maybe<protocol::Runtime::StackTrace>* optOutAsyncStackTrace,
+      Maybe<protocol::Runtime::ExceptionDetails>* optOutCompileError) override;
+  void restartFrame(
+      ErrorString*, const String16& callFrameId,
+      std::unique_ptr<protocol::Array<protocol::Debugger::CallFrame>>*
+          newCallFrames,
+      Maybe<protocol::Runtime::StackTrace>* asyncStackTrace) override;
+  void getScriptSource(ErrorString*, const String16& scriptId,
+                       String16* scriptSource) override;
+  void pause(ErrorString*) override;
+  void resume(ErrorString*) override;
+  void stepOver(ErrorString*) override;
+  void stepInto(ErrorString*) override;
+  void stepOut(ErrorString*) override;
+  void setPauseOnExceptions(ErrorString*, const String16& pauseState) override;
+  void evaluateOnCallFrame(
+      ErrorString*, const String16& callFrameId, const String16& expression,
+      const Maybe<String16>& objectGroup,
+      const Maybe<bool>& includeCommandLineAPI, const Maybe<bool>& silent,
+      const Maybe<bool>& returnByValue, const Maybe<bool>& generatePreview,
+      std::unique_ptr<protocol::Runtime::RemoteObject>* result,
+      Maybe<protocol::Runtime::ExceptionDetails>*) override;
+  void setVariableValue(
+      ErrorString*, int scopeNumber, const String16& variableName,
+      std::unique_ptr<protocol::Runtime::CallArgument> newValue,
+      const String16& callFrame) override;
+  void setAsyncCallStackDepth(ErrorString*, int depth) override;
+  void setBlackboxPatterns(
+      ErrorString*,
+      std::unique_ptr<protocol::Array<String16>> patterns) override;
+  void setBlackboxedRanges(
+      ErrorString*, const String16& scriptId,
+      std::unique_ptr<protocol::Array<protocol::Debugger::ScriptPosition>>
+          positions) override;
+
+  bool enabled();
+
+  void setBreakpointAt(const String16& scriptId, int lineNumber,
+                       int columnNumber, BreakpointSource,
+                       const String16& condition = String16());
+  void removeBreakpointAt(const String16& scriptId, int lineNumber,
+                          int columnNumber, BreakpointSource);
+  void schedulePauseOnNextStatement(
+      const String16& breakReason,
+      std::unique_ptr<protocol::DictionaryValue> data);
+  void cancelPauseOnNextStatement();
+  void breakProgram(const String16& breakReason,
+                    std::unique_ptr<protocol::DictionaryValue> data);
+  void breakProgramOnException(const String16& breakReason,
+                               std::unique_ptr<protocol::DictionaryValue> data);
+
+  void reset();
+
+  // Interface for V8InspectorImpl
+  SkipPauseRequest didPause(v8::Local<v8::Context>,
+                            v8::Local<v8::Value> exception,
+                            const std::vector<String16>& hitBreakpoints,
+                            bool isPromiseRejection);
+  void didContinue();
+  void didParseSource(std::unique_ptr<V8DebuggerScript>, bool success);
+  void willExecuteScript(int scriptId);
+  void didExecuteScript();
+
+  v8::Isolate* isolate() { return m_isolate; }
+
+ private:
+  bool checkEnabled(ErrorString*);
+  void enable();
+
+  SkipPauseRequest shouldSkipExceptionPause(JavaScriptCallFrame* topCallFrame);
+  SkipPauseRequest shouldSkipStepPause(JavaScriptCallFrame* topCallFrame);
+
+  void schedulePauseOnNextStatementIfSteppingInto();
+
+  std::unique_ptr<protocol::Array<protocol::Debugger::CallFrame>>
+  currentCallFrames(ErrorString*);
+  std::unique_ptr<protocol::Runtime::StackTrace> currentAsyncStackTrace();
+
+  void changeJavaScriptRecursionLevel(int step);
+
+  void setPauseOnExceptionsImpl(ErrorString*, int);
+
+  std::unique_ptr<protocol::Debugger::Location> resolveBreakpoint(
+      const String16& breakpointId, const String16& scriptId,
+      const ScriptBreakpoint&, BreakpointSource);
+  void removeBreakpoint(const String16& breakpointId);
+  bool assertPaused(ErrorString*);
+  void clearBreakDetails();
+
+  bool isCurrentCallStackEmptyOrBlackboxed();
+  bool isTopPausedCallFrameBlackboxed();
+  bool isCallFrameWithUnknownScriptOrBlackboxed(JavaScriptCallFrame*);
+
+  void internalSetAsyncCallStackDepth(int);
+  void increaseCachedSkipStackGeneration();
+
+  bool setBlackboxPattern(ErrorString*, const String16& pattern);
+
+  using ScriptsMap =
+      protocol::HashMap<String16, std::unique_ptr<V8DebuggerScript>>;
+  using BreakpointIdToDebuggerBreakpointIdsMap =
+      protocol::HashMap<String16, std::vector<String16>>;
+  using DebugServerBreakpointToBreakpointIdAndSourceMap =
+      protocol::HashMap<String16, std::pair<String16, BreakpointSource>>;
+  using MuteBreakpoins = protocol::HashMap<String16, std::pair<String16, int>>;
+
+  enum DebuggerStep { NoStep = 0, StepInto, StepOver, StepOut };
+
+  V8InspectorImpl* m_inspector;
+  V8Debugger* m_debugger;
+  V8InspectorSessionImpl* m_session;
+  bool m_enabled;
+  protocol::DictionaryValue* m_state;
+  protocol::Debugger::Frontend m_frontend;
+  v8::Isolate* m_isolate;
+  v8::Global<v8::Context> m_pausedContext;
+  JavaScriptCallFrames m_pausedCallFrames;
+  ScriptsMap m_scripts;
+  BreakpointIdToDebuggerBreakpointIdsMap m_breakpointIdToDebuggerBreakpointIds;
+  DebugServerBreakpointToBreakpointIdAndSourceMap m_serverBreakpoints;
+  String16 m_continueToLocationBreakpointId;
+  String16 m_breakReason;
+  std::unique_ptr<protocol::DictionaryValue> m_breakAuxData;
+  DebuggerStep m_scheduledDebuggerStep;
+  bool m_skipNextDebuggerStepOut;
+  bool m_javaScriptPauseScheduled;
+  bool m_steppingFromFramework;
+  bool m_pausingOnNativeEvent;
+
+  int m_skippedStepFrameCount;
+  int m_recursionLevelForStepOut;
+  int m_recursionLevelForStepFrame;
+  bool m_skipAllPauses;
+
+  std::unique_ptr<V8Regex> m_blackboxPattern;
+  protocol::HashMap<String16, std::vector<std::pair<int, int>>>
+      m_blackboxedPositions;
+
+  DISALLOW_COPY_AND_ASSIGN(V8DebuggerAgentImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8DEBUGGERAGENTIMPL_H_
diff --git a/src/inspector/v8-debugger-script.cc b/src/inspector/v8-debugger-script.cc
new file mode 100644
index 0000000..485188a
--- /dev/null
+++ b/src/inspector/v8-debugger-script.cc
@@ -0,0 +1,140 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-debugger-script.h"
+
+#include "src/inspector/protocol-platform.h"
+#include "src/inspector/string-util.h"
+
+namespace v8_inspector {
+
+static const char hexDigits[17] = "0123456789ABCDEF";
+
+static void appendUnsignedAsHex(uint64_t number, String16Builder* destination) {
+  for (size_t i = 0; i < 8; ++i) {
+    UChar c = hexDigits[number & 0xF];
+    destination->append(c);
+    number >>= 4;
+  }
+}
+
+// Hash algorithm for substrings is described in "Über die Komplexität der
+// Multiplikation in
+// eingeschränkten Branchingprogrammmodellen" by Woelfe.
+// http://opendatastructures.org/versions/edition-0.1d/ods-java/node33.html#SECTION00832000000000000000
+static String16 calculateHash(const String16& str) {
+  static uint64_t prime[] = {0x3FB75161, 0xAB1F4E4F, 0x82675BC5, 0xCD924D35,
+                             0x81ABE279};
+  static uint64_t random[] = {0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476,
+                              0xC3D2E1F0};
+  static uint32_t randomOdd[] = {0xB4663807, 0xCC322BF5, 0xD4F91BBD, 0xA7BEA11D,
+                                 0x8F462907};
+
+  uint64_t hashes[] = {0, 0, 0, 0, 0};
+  uint64_t zi[] = {1, 1, 1, 1, 1};
+
+  const size_t hashesSize = arraysize(hashes);
+
+  size_t current = 0;
+  const uint32_t* data = nullptr;
+  size_t sizeInBytes = sizeof(UChar) * str.length();
+  data = reinterpret_cast<const uint32_t*>(str.characters16());
+  for (size_t i = 0; i < sizeInBytes / 4; i += 4) {
+    uint32_t v = data[i];
+    uint64_t xi = v * randomOdd[current] & 0x7FFFFFFF;
+    hashes[current] = (hashes[current] + zi[current] * xi) % prime[current];
+    zi[current] = (zi[current] * random[current]) % prime[current];
+    current = current == hashesSize - 1 ? 0 : current + 1;
+  }
+  if (sizeInBytes % 4) {
+    uint32_t v = 0;
+    for (size_t i = sizeInBytes - sizeInBytes % 4; i < sizeInBytes; ++i) {
+      v <<= 8;
+      v |= reinterpret_cast<const uint8_t*>(data)[i];
+    }
+    uint64_t xi = v * randomOdd[current] & 0x7FFFFFFF;
+    hashes[current] = (hashes[current] + zi[current] * xi) % prime[current];
+    zi[current] = (zi[current] * random[current]) % prime[current];
+    current = current == hashesSize - 1 ? 0 : current + 1;
+  }
+
+  for (size_t i = 0; i < hashesSize; ++i)
+    hashes[i] = (hashes[i] + zi[i] * (prime[i] - 1)) % prime[i];
+
+  String16Builder hash;
+  for (size_t i = 0; i < hashesSize; ++i) appendUnsignedAsHex(hashes[i], &hash);
+  return hash.toString();
+}
+
+static v8::Local<v8::Value> GetChecked(v8::Local<v8::Context> context,
+                                       v8::Local<v8::Object> object,
+                                       const char* name) {
+  return object
+      ->Get(context, toV8StringInternalized(context->GetIsolate(), name))
+      .ToLocalChecked();
+}
+
+static int GetCheckedInt(v8::Local<v8::Context> context,
+                         v8::Local<v8::Object> object, const char* name) {
+  return static_cast<int>(GetChecked(context, object, name)
+                              ->ToInteger(context)
+                              .ToLocalChecked()
+                              ->Value());
+}
+
+V8DebuggerScript::V8DebuggerScript(v8::Local<v8::Context> context,
+                                   v8::Local<v8::Object> object,
+                                   bool isLiveEdit) {
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::Local<v8::Value> idValue = GetChecked(context, object, "id");
+  DCHECK(!idValue.IsEmpty() && idValue->IsInt32());
+  m_id = String16::fromInteger(idValue->Int32Value(context).FromJust());
+
+  m_url = toProtocolStringWithTypeCheck(GetChecked(context, object, "name"));
+  m_sourceURL =
+      toProtocolStringWithTypeCheck(GetChecked(context, object, "sourceURL"));
+  m_sourceMappingURL = toProtocolStringWithTypeCheck(
+      GetChecked(context, object, "sourceMappingURL"));
+  m_startLine = GetCheckedInt(context, object, "startLine");
+  m_startColumn = GetCheckedInt(context, object, "startColumn");
+  m_endLine = GetCheckedInt(context, object, "endLine");
+  m_endColumn = GetCheckedInt(context, object, "endColumn");
+  m_executionContextAuxData = toProtocolStringWithTypeCheck(
+      GetChecked(context, object, "executionContextAuxData"));
+  m_executionContextId = GetCheckedInt(context, object, "executionContextId");
+  m_isLiveEdit = isLiveEdit;
+
+  v8::Local<v8::Value> sourceValue;
+  if (!object->Get(context, toV8StringInternalized(isolate, "source"))
+           .ToLocal(&sourceValue) ||
+      !sourceValue->IsString())
+    return;
+  setSource(isolate, sourceValue.As<v8::String>());
+}
+
+V8DebuggerScript::~V8DebuggerScript() {}
+
+const String16& V8DebuggerScript::sourceURL() const {
+  return m_sourceURL.isEmpty() ? m_url : m_sourceURL;
+}
+
+v8::Local<v8::String> V8DebuggerScript::source(v8::Isolate* isolate) const {
+  return m_source.Get(isolate);
+}
+
+void V8DebuggerScript::setSourceURL(const String16& sourceURL) {
+  m_sourceURL = sourceURL;
+}
+
+void V8DebuggerScript::setSourceMappingURL(const String16& sourceMappingURL) {
+  m_sourceMappingURL = sourceMappingURL;
+}
+
+void V8DebuggerScript::setSource(v8::Isolate* isolate,
+                                 v8::Local<v8::String> source) {
+  m_source.Reset(isolate, source);
+  m_hash = calculateHash(toProtocolString(source));
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-debugger-script.h b/src/inspector/v8-debugger-script.h
new file mode 100644
index 0000000..78c44b5
--- /dev/null
+++ b/src/inspector/v8-debugger-script.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2010 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1.  Redistributions of source code must retain the above copyright
+ *     notice, this list of conditions and the following disclaimer.
+ * 2.  Redistributions in binary form must reproduce the above copyright
+ *     notice, this list of conditions and the following disclaimer in the
+ *     documentation and/or other materials provided with the distribution.
+ * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ *     its contributors may be used to endorse or promote products derived
+ *     from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef V8_INSPECTOR_V8DEBUGGERSCRIPT_H_
+#define V8_INSPECTOR_V8DEBUGGERSCRIPT_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/string-16.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class V8DebuggerScript {
+ public:
+  V8DebuggerScript(v8::Local<v8::Context>, v8::Local<v8::Object>,
+                   bool isLiveEdit);
+  ~V8DebuggerScript();
+
+  const String16& scriptId() const { return m_id; }
+  const String16& url() const { return m_url; }
+  bool hasSourceURL() const { return !m_sourceURL.isEmpty(); }
+  const String16& sourceURL() const;
+  const String16& sourceMappingURL() const { return m_sourceMappingURL; }
+  v8::Local<v8::String> source(v8::Isolate*) const;
+  const String16& hash() const { return m_hash; }
+  int startLine() const { return m_startLine; }
+  int startColumn() const { return m_startColumn; }
+  int endLine() const { return m_endLine; }
+  int endColumn() const { return m_endColumn; }
+  int executionContextId() const { return m_executionContextId; }
+  const String16& executionContextAuxData() const {
+    return m_executionContextAuxData;
+  }
+  bool isLiveEdit() const { return m_isLiveEdit; }
+
+  void setSourceURL(const String16&);
+  void setSourceMappingURL(const String16&);
+  void setSource(v8::Isolate*, v8::Local<v8::String>);
+
+ private:
+  String16 m_id;
+  String16 m_url;
+  String16 m_sourceURL;
+  String16 m_sourceMappingURL;
+  v8::Global<v8::String> m_source;
+  String16 m_hash;
+  int m_startLine;
+  int m_startColumn;
+  int m_endLine;
+  int m_endColumn;
+  int m_executionContextId;
+  String16 m_executionContextAuxData;
+  bool m_isLiveEdit;
+
+  DISALLOW_COPY_AND_ASSIGN(V8DebuggerScript);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8DEBUGGERSCRIPT_H_
diff --git a/src/inspector/v8-debugger.cc b/src/inspector/v8-debugger.cc
new file mode 100644
index 0000000..d393f81
--- /dev/null
+++ b/src/inspector/v8-debugger.cc
@@ -0,0 +1,1002 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-debugger.h"
+
+#include "src/inspector/debugger-script.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/script-breakpoint.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-debugger-agent-impl.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-internal-value-type.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+#include "src/inspector/v8-value-copier.h"
+
+namespace v8_inspector {
+
+namespace {
+const char stepIntoV8MethodName[] = "stepIntoStatement";
+const char stepOutV8MethodName[] = "stepOutOfFunction";
+static const char v8AsyncTaskEventEnqueue[] = "enqueue";
+static const char v8AsyncTaskEventEnqueueRecurring[] = "enqueueRecurring";
+static const char v8AsyncTaskEventWillHandle[] = "willHandle";
+static const char v8AsyncTaskEventDidHandle[] = "didHandle";
+static const char v8AsyncTaskEventCancel[] = "cancel";
+
+inline v8::Local<v8::Boolean> v8Boolean(bool value, v8::Isolate* isolate) {
+  return value ? v8::True(isolate) : v8::False(isolate);
+}
+
+}  // namespace
+
+static bool inLiveEditScope = false;
+
+v8::MaybeLocal<v8::Value> V8Debugger::callDebuggerMethod(
+    const char* functionName, int argc, v8::Local<v8::Value> argv[]) {
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  DCHECK(m_isolate->InContext());
+  v8::Local<v8::Context> context = m_isolate->GetCurrentContext();
+  v8::Local<v8::Object> debuggerScript = m_debuggerScript.Get(m_isolate);
+  v8::Local<v8::Function> function = v8::Local<v8::Function>::Cast(
+      debuggerScript
+          ->Get(context, toV8StringInternalized(m_isolate, functionName))
+          .ToLocalChecked());
+  return function->Call(context, debuggerScript, argc, argv);
+}
+
+V8Debugger::V8Debugger(v8::Isolate* isolate, V8InspectorImpl* inspector)
+    : m_isolate(isolate),
+      m_inspector(inspector),
+      m_lastContextId(0),
+      m_enableCount(0),
+      m_breakpointsActivated(true),
+      m_runningNestedMessageLoop(false),
+      m_ignoreScriptParsedEventsCounter(0),
+      m_maxAsyncCallStackDepth(0) {}
+
+V8Debugger::~V8Debugger() {}
+
+void V8Debugger::enable() {
+  if (m_enableCount++) return;
+  DCHECK(!enabled());
+  v8::HandleScope scope(m_isolate);
+  v8::Debug::SetDebugEventListener(m_isolate, &V8Debugger::v8DebugEventCallback,
+                                   v8::External::New(m_isolate, this));
+  m_debuggerContext.Reset(m_isolate, v8::Debug::GetDebugContext(m_isolate));
+  compileDebuggerScript();
+}
+
+void V8Debugger::disable() {
+  if (--m_enableCount) return;
+  DCHECK(enabled());
+  clearBreakpoints();
+  m_debuggerScript.Reset();
+  m_debuggerContext.Reset();
+  allAsyncTasksCanceled();
+  v8::Debug::SetDebugEventListener(m_isolate, nullptr);
+}
+
+bool V8Debugger::enabled() const { return !m_debuggerScript.IsEmpty(); }
+
+// static
+int V8Debugger::contextId(v8::Local<v8::Context> context) {
+  v8::Local<v8::Value> data =
+      context->GetEmbedderData(static_cast<int>(v8::Context::kDebugIdIndex));
+  if (data.IsEmpty() || !data->IsString()) return 0;
+  String16 dataString = toProtocolString(data.As<v8::String>());
+  if (dataString.isEmpty()) return 0;
+  size_t commaPos = dataString.find(",");
+  if (commaPos == String16::kNotFound) return 0;
+  size_t commaPos2 = dataString.find(",", commaPos + 1);
+  if (commaPos2 == String16::kNotFound) return 0;
+  return dataString.substring(commaPos + 1, commaPos2 - commaPos - 1)
+      .toInteger();
+}
+
+// static
+int V8Debugger::getGroupId(v8::Local<v8::Context> context) {
+  v8::Local<v8::Value> data =
+      context->GetEmbedderData(static_cast<int>(v8::Context::kDebugIdIndex));
+  if (data.IsEmpty() || !data->IsString()) return 0;
+  String16 dataString = toProtocolString(data.As<v8::String>());
+  if (dataString.isEmpty()) return 0;
+  size_t commaPos = dataString.find(",");
+  if (commaPos == String16::kNotFound) return 0;
+  return dataString.substring(0, commaPos).toInteger();
+}
+
+void V8Debugger::getCompiledScripts(
+    int contextGroupId,
+    std::vector<std::unique_ptr<V8DebuggerScript>>& result) {
+  v8::HandleScope scope(m_isolate);
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::Local<v8::Context> context = debuggerContext();
+  v8::Local<v8::Object> debuggerScript = m_debuggerScript.Get(m_isolate);
+  DCHECK(!debuggerScript->IsUndefined());
+  v8::Local<v8::Function> getScriptsFunction = v8::Local<v8::Function>::Cast(
+      debuggerScript
+          ->Get(context, toV8StringInternalized(m_isolate, "getScripts"))
+          .ToLocalChecked());
+  v8::Local<v8::Value> argv[] = {v8::Integer::New(m_isolate, contextGroupId)};
+  v8::Local<v8::Value> value;
+  if (!getScriptsFunction->Call(context, debuggerScript, arraysize(argv), argv)
+           .ToLocal(&value))
+    return;
+  DCHECK(value->IsArray());
+  v8::Local<v8::Array> scriptsArray = v8::Local<v8::Array>::Cast(value);
+  result.reserve(scriptsArray->Length());
+  for (unsigned i = 0; i < scriptsArray->Length(); ++i) {
+    v8::Local<v8::Object> scriptObject = v8::Local<v8::Object>::Cast(
+        scriptsArray->Get(context, v8::Integer::New(m_isolate, i))
+            .ToLocalChecked());
+    result.push_back(wrapUnique(
+        new V8DebuggerScript(context, scriptObject, inLiveEditScope)));
+  }
+}
+
+String16 V8Debugger::setBreakpoint(const String16& sourceID,
+                                   const ScriptBreakpoint& scriptBreakpoint,
+                                   int* actualLineNumber,
+                                   int* actualColumnNumber) {
+  v8::HandleScope scope(m_isolate);
+  v8::Local<v8::Context> context = debuggerContext();
+  v8::Context::Scope contextScope(context);
+
+  v8::Local<v8::Object> info = v8::Object::New(m_isolate);
+  bool success = false;
+  success = info->Set(context, toV8StringInternalized(m_isolate, "sourceID"),
+                      toV8String(m_isolate, sourceID))
+                .FromMaybe(false);
+  DCHECK(success);
+  success = info->Set(context, toV8StringInternalized(m_isolate, "lineNumber"),
+                      v8::Integer::New(m_isolate, scriptBreakpoint.lineNumber))
+                .FromMaybe(false);
+  DCHECK(success);
+  success =
+      info->Set(context, toV8StringInternalized(m_isolate, "columnNumber"),
+                v8::Integer::New(m_isolate, scriptBreakpoint.columnNumber))
+          .FromMaybe(false);
+  DCHECK(success);
+  success = info->Set(context, toV8StringInternalized(m_isolate, "condition"),
+                      toV8String(m_isolate, scriptBreakpoint.condition))
+                .FromMaybe(false);
+  DCHECK(success);
+
+  v8::Local<v8::Function> setBreakpointFunction = v8::Local<v8::Function>::Cast(
+      m_debuggerScript.Get(m_isolate)
+          ->Get(context, toV8StringInternalized(m_isolate, "setBreakpoint"))
+          .ToLocalChecked());
+  v8::Local<v8::Value> breakpointId =
+      v8::Debug::Call(debuggerContext(), setBreakpointFunction, info)
+          .ToLocalChecked();
+  if (!breakpointId->IsString()) return "";
+  *actualLineNumber =
+      info->Get(context, toV8StringInternalized(m_isolate, "lineNumber"))
+          .ToLocalChecked()
+          ->Int32Value(context)
+          .FromJust();
+  *actualColumnNumber =
+      info->Get(context, toV8StringInternalized(m_isolate, "columnNumber"))
+          .ToLocalChecked()
+          ->Int32Value(context)
+          .FromJust();
+  return toProtocolString(breakpointId.As<v8::String>());
+}
+
+void V8Debugger::removeBreakpoint(const String16& breakpointId) {
+  v8::HandleScope scope(m_isolate);
+  v8::Local<v8::Context> context = debuggerContext();
+  v8::Context::Scope contextScope(context);
+
+  v8::Local<v8::Object> info = v8::Object::New(m_isolate);
+  bool success = false;
+  success =
+      info->Set(context, toV8StringInternalized(m_isolate, "breakpointId"),
+                toV8String(m_isolate, breakpointId))
+          .FromMaybe(false);
+  DCHECK(success);
+
+  v8::Local<v8::Function> removeBreakpointFunction =
+      v8::Local<v8::Function>::Cast(
+          m_debuggerScript.Get(m_isolate)
+              ->Get(context,
+                    toV8StringInternalized(m_isolate, "removeBreakpoint"))
+              .ToLocalChecked());
+  v8::Debug::Call(debuggerContext(), removeBreakpointFunction, info)
+      .ToLocalChecked();
+}
+
+void V8Debugger::clearBreakpoints() {
+  v8::HandleScope scope(m_isolate);
+  v8::Local<v8::Context> context = debuggerContext();
+  v8::Context::Scope contextScope(context);
+
+  v8::Local<v8::Function> clearBreakpoints = v8::Local<v8::Function>::Cast(
+      m_debuggerScript.Get(m_isolate)
+          ->Get(context, toV8StringInternalized(m_isolate, "clearBreakpoints"))
+          .ToLocalChecked());
+  v8::Debug::Call(debuggerContext(), clearBreakpoints).ToLocalChecked();
+}
+
+void V8Debugger::setBreakpointsActivated(bool activated) {
+  if (!enabled()) {
+    UNREACHABLE();
+    return;
+  }
+  v8::HandleScope scope(m_isolate);
+  v8::Local<v8::Context> context = debuggerContext();
+  v8::Context::Scope contextScope(context);
+
+  v8::Local<v8::Object> info = v8::Object::New(m_isolate);
+  bool success = false;
+  success = info->Set(context, toV8StringInternalized(m_isolate, "enabled"),
+                      v8::Boolean::New(m_isolate, activated))
+                .FromMaybe(false);
+  DCHECK(success);
+  v8::Local<v8::Function> setBreakpointsActivated =
+      v8::Local<v8::Function>::Cast(
+          m_debuggerScript.Get(m_isolate)
+              ->Get(context, toV8StringInternalized(m_isolate,
+                                                    "setBreakpointsActivated"))
+              .ToLocalChecked());
+  v8::Debug::Call(debuggerContext(), setBreakpointsActivated, info)
+      .ToLocalChecked();
+
+  m_breakpointsActivated = activated;
+}
+
+V8Debugger::PauseOnExceptionsState V8Debugger::getPauseOnExceptionsState() {
+  DCHECK(enabled());
+  v8::HandleScope scope(m_isolate);
+  v8::Local<v8::Context> context = debuggerContext();
+  v8::Context::Scope contextScope(context);
+
+  v8::Local<v8::Value> argv[] = {v8::Undefined(m_isolate)};
+  v8::Local<v8::Value> result =
+      callDebuggerMethod("pauseOnExceptionsState", 0, argv).ToLocalChecked();
+  return static_cast<V8Debugger::PauseOnExceptionsState>(
+      result->Int32Value(context).FromJust());
+}
+
+void V8Debugger::setPauseOnExceptionsState(
+    PauseOnExceptionsState pauseOnExceptionsState) {
+  DCHECK(enabled());
+  v8::HandleScope scope(m_isolate);
+  v8::Context::Scope contextScope(debuggerContext());
+
+  v8::Local<v8::Value> argv[] = {
+      v8::Int32::New(m_isolate, pauseOnExceptionsState)};
+  callDebuggerMethod("setPauseOnExceptionsState", 1, argv);
+}
+
+void V8Debugger::setPauseOnNextStatement(bool pause) {
+  if (m_runningNestedMessageLoop) return;
+  if (pause)
+    v8::Debug::DebugBreak(m_isolate);
+  else
+    v8::Debug::CancelDebugBreak(m_isolate);
+}
+
+bool V8Debugger::canBreakProgram() {
+  if (!m_breakpointsActivated) return false;
+  return m_isolate->InContext();
+}
+
+void V8Debugger::breakProgram() {
+  if (isPaused()) {
+    DCHECK(!m_runningNestedMessageLoop);
+    v8::Local<v8::Value> exception;
+    v8::Local<v8::Array> hitBreakpoints;
+    handleProgramBreak(m_pausedContext, m_executionState, exception,
+                       hitBreakpoints);
+    return;
+  }
+
+  if (!canBreakProgram()) return;
+
+  v8::HandleScope scope(m_isolate);
+  v8::Local<v8::Function> breakFunction;
+  if (!v8::Function::New(m_isolate->GetCurrentContext(),
+                         &V8Debugger::breakProgramCallback,
+                         v8::External::New(m_isolate, this), 0,
+                         v8::ConstructorBehavior::kThrow)
+           .ToLocal(&breakFunction))
+    return;
+  v8::Debug::Call(debuggerContext(), breakFunction).ToLocalChecked();
+}
+
+void V8Debugger::continueProgram() {
+  if (isPaused()) m_inspector->client()->quitMessageLoopOnPause();
+  m_pausedContext.Clear();
+  m_executionState.Clear();
+}
+
+void V8Debugger::stepIntoStatement() {
+  DCHECK(isPaused());
+  DCHECK(!m_executionState.IsEmpty());
+  v8::HandleScope handleScope(m_isolate);
+  v8::Local<v8::Value> argv[] = {m_executionState};
+  callDebuggerMethod(stepIntoV8MethodName, 1, argv);
+  continueProgram();
+}
+
+void V8Debugger::stepOverStatement() {
+  DCHECK(isPaused());
+  DCHECK(!m_executionState.IsEmpty());
+  v8::HandleScope handleScope(m_isolate);
+  v8::Local<v8::Value> argv[] = {m_executionState};
+  callDebuggerMethod("stepOverStatement", 1, argv);
+  continueProgram();
+}
+
+void V8Debugger::stepOutOfFunction() {
+  DCHECK(isPaused());
+  DCHECK(!m_executionState.IsEmpty());
+  v8::HandleScope handleScope(m_isolate);
+  v8::Local<v8::Value> argv[] = {m_executionState};
+  callDebuggerMethod(stepOutV8MethodName, 1, argv);
+  continueProgram();
+}
+
+void V8Debugger::clearStepping() {
+  DCHECK(enabled());
+  v8::HandleScope scope(m_isolate);
+  v8::Context::Scope contextScope(debuggerContext());
+
+  v8::Local<v8::Value> argv[] = {v8::Undefined(m_isolate)};
+  callDebuggerMethod("clearStepping", 0, argv);
+}
+
+bool V8Debugger::setScriptSource(
+    const String16& sourceID, v8::Local<v8::String> newSource, bool dryRun,
+    ErrorString* error,
+    Maybe<protocol::Runtime::ExceptionDetails>* exceptionDetails,
+    JavaScriptCallFrames* newCallFrames, Maybe<bool>* stackChanged) {
+  class EnableLiveEditScope {
+   public:
+    explicit EnableLiveEditScope(v8::Isolate* isolate) : m_isolate(isolate) {
+      v8::Debug::SetLiveEditEnabled(m_isolate, true);
+      inLiveEditScope = true;
+    }
+    ~EnableLiveEditScope() {
+      v8::Debug::SetLiveEditEnabled(m_isolate, false);
+      inLiveEditScope = false;
+    }
+
+   private:
+    v8::Isolate* m_isolate;
+  };
+
+  DCHECK(enabled());
+  v8::HandleScope scope(m_isolate);
+
+  std::unique_ptr<v8::Context::Scope> contextScope;
+  if (!isPaused())
+    contextScope = wrapUnique(new v8::Context::Scope(debuggerContext()));
+
+  v8::Local<v8::Value> argv[] = {toV8String(m_isolate, sourceID), newSource,
+                                 v8Boolean(dryRun, m_isolate)};
+
+  v8::Local<v8::Value> v8result;
+  {
+    EnableLiveEditScope enableLiveEditScope(m_isolate);
+    v8::TryCatch tryCatch(m_isolate);
+    tryCatch.SetVerbose(false);
+    v8::MaybeLocal<v8::Value> maybeResult =
+        callDebuggerMethod("liveEditScriptSource", 3, argv);
+    if (tryCatch.HasCaught()) {
+      v8::Local<v8::Message> message = tryCatch.Message();
+      if (!message.IsEmpty())
+        *error = toProtocolStringWithTypeCheck(message->Get());
+      else
+        *error = "Unknown error.";
+      return false;
+    }
+    v8result = maybeResult.ToLocalChecked();
+  }
+  DCHECK(!v8result.IsEmpty());
+  v8::Local<v8::Context> context = m_isolate->GetCurrentContext();
+  v8::Local<v8::Object> resultTuple =
+      v8result->ToObject(context).ToLocalChecked();
+  int code = static_cast<int>(resultTuple->Get(context, 0)
+                                  .ToLocalChecked()
+                                  ->ToInteger(context)
+                                  .ToLocalChecked()
+                                  ->Value());
+  switch (code) {
+    case 0: {
+      *stackChanged = resultTuple->Get(context, 1)
+                          .ToLocalChecked()
+                          ->BooleanValue(context)
+                          .FromJust();
+      // Call stack may have changed after if the edited function was on the
+      // stack.
+      if (!dryRun && isPaused()) {
+        JavaScriptCallFrames frames = currentCallFrames();
+        newCallFrames->swap(frames);
+      }
+      return true;
+    }
+    // Compile error.
+    case 1: {
+      *exceptionDetails =
+          protocol::Runtime::ExceptionDetails::create()
+              .setExceptionId(m_inspector->nextExceptionId())
+              .setText(toProtocolStringWithTypeCheck(
+                  resultTuple->Get(context, 2).ToLocalChecked()))
+              .setLineNumber(static_cast<int>(resultTuple->Get(context, 3)
+                                                  .ToLocalChecked()
+                                                  ->ToInteger(context)
+                                                  .ToLocalChecked()
+                                                  ->Value()) -
+                             1)
+              .setColumnNumber(static_cast<int>(resultTuple->Get(context, 4)
+                                                    .ToLocalChecked()
+                                                    ->ToInteger(context)
+                                                    .ToLocalChecked()
+                                                    ->Value()) -
+                               1)
+              .build();
+      return false;
+    }
+  }
+  *error = "Unknown error.";
+  return false;
+}
+
+JavaScriptCallFrames V8Debugger::currentCallFrames(int limit) {
+  if (!m_isolate->InContext()) return JavaScriptCallFrames();
+  v8::Local<v8::Value> currentCallFramesV8;
+  if (m_executionState.IsEmpty()) {
+    v8::Local<v8::Function> currentCallFramesFunction =
+        v8::Local<v8::Function>::Cast(
+            m_debuggerScript.Get(m_isolate)
+                ->Get(debuggerContext(),
+                      toV8StringInternalized(m_isolate, "currentCallFrames"))
+                .ToLocalChecked());
+    currentCallFramesV8 =
+        v8::Debug::Call(debuggerContext(), currentCallFramesFunction,
+                        v8::Integer::New(m_isolate, limit))
+            .ToLocalChecked();
+  } else {
+    v8::Local<v8::Value> argv[] = {m_executionState,
+                                   v8::Integer::New(m_isolate, limit)};
+    currentCallFramesV8 =
+        callDebuggerMethod("currentCallFrames", arraysize(argv), argv)
+            .ToLocalChecked();
+  }
+  DCHECK(!currentCallFramesV8.IsEmpty());
+  if (!currentCallFramesV8->IsArray()) return JavaScriptCallFrames();
+  v8::Local<v8::Array> callFramesArray = currentCallFramesV8.As<v8::Array>();
+  JavaScriptCallFrames callFrames;
+  for (uint32_t i = 0; i < callFramesArray->Length(); ++i) {
+    v8::Local<v8::Value> callFrameValue;
+    if (!callFramesArray->Get(debuggerContext(), i).ToLocal(&callFrameValue))
+      return JavaScriptCallFrames();
+    if (!callFrameValue->IsObject()) return JavaScriptCallFrames();
+    v8::Local<v8::Object> callFrameObject = callFrameValue.As<v8::Object>();
+    callFrames.push_back(JavaScriptCallFrame::create(
+        debuggerContext(), v8::Local<v8::Object>::Cast(callFrameObject)));
+  }
+  return callFrames;
+}
+
+static V8Debugger* toV8Debugger(v8::Local<v8::Value> data) {
+  void* p = v8::Local<v8::External>::Cast(data)->Value();
+  return static_cast<V8Debugger*>(p);
+}
+
+void V8Debugger::breakProgramCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  DCHECK_EQ(info.Length(), 2);
+  V8Debugger* thisPtr = toV8Debugger(info.Data());
+  if (!thisPtr->enabled()) return;
+  v8::Local<v8::Context> pausedContext =
+      thisPtr->m_isolate->GetCurrentContext();
+  v8::Local<v8::Value> exception;
+  v8::Local<v8::Array> hitBreakpoints;
+  thisPtr->handleProgramBreak(pausedContext,
+                              v8::Local<v8::Object>::Cast(info[0]), exception,
+                              hitBreakpoints);
+}
+
+void V8Debugger::handleProgramBreak(v8::Local<v8::Context> pausedContext,
+                                    v8::Local<v8::Object> executionState,
+                                    v8::Local<v8::Value> exception,
+                                    v8::Local<v8::Array> hitBreakpointNumbers,
+                                    bool isPromiseRejection) {
+  // Don't allow nested breaks.
+  if (m_runningNestedMessageLoop) return;
+
+  V8DebuggerAgentImpl* agent =
+      m_inspector->enabledDebuggerAgentForGroup(getGroupId(pausedContext));
+  if (!agent) return;
+
+  std::vector<String16> breakpointIds;
+  if (!hitBreakpointNumbers.IsEmpty()) {
+    breakpointIds.reserve(hitBreakpointNumbers->Length());
+    for (uint32_t i = 0; i < hitBreakpointNumbers->Length(); i++) {
+      v8::Local<v8::Value> hitBreakpointNumber =
+          hitBreakpointNumbers->Get(debuggerContext(), i).ToLocalChecked();
+      DCHECK(hitBreakpointNumber->IsInt32());
+      breakpointIds.push_back(String16::fromInteger(
+          hitBreakpointNumber->Int32Value(debuggerContext()).FromJust()));
+    }
+  }
+
+  m_pausedContext = pausedContext;
+  m_executionState = executionState;
+  V8DebuggerAgentImpl::SkipPauseRequest result = agent->didPause(
+      pausedContext, exception, breakpointIds, isPromiseRejection);
+  if (result == V8DebuggerAgentImpl::RequestNoSkip) {
+    m_runningNestedMessageLoop = true;
+    int groupId = getGroupId(pausedContext);
+    DCHECK(groupId);
+    m_inspector->client()->runMessageLoopOnPause(groupId);
+    // The agent may have been removed in the nested loop.
+    agent =
+        m_inspector->enabledDebuggerAgentForGroup(getGroupId(pausedContext));
+    if (agent) agent->didContinue();
+    m_runningNestedMessageLoop = false;
+  }
+  m_pausedContext.Clear();
+  m_executionState.Clear();
+
+  if (result == V8DebuggerAgentImpl::RequestStepFrame) {
+    v8::Local<v8::Value> argv[] = {executionState};
+    callDebuggerMethod("stepFrameStatement", 1, argv);
+  } else if (result == V8DebuggerAgentImpl::RequestStepInto) {
+    v8::Local<v8::Value> argv[] = {executionState};
+    callDebuggerMethod(stepIntoV8MethodName, 1, argv);
+  } else if (result == V8DebuggerAgentImpl::RequestStepOut) {
+    v8::Local<v8::Value> argv[] = {executionState};
+    callDebuggerMethod(stepOutV8MethodName, 1, argv);
+  }
+}
+
+void V8Debugger::v8DebugEventCallback(
+    const v8::Debug::EventDetails& eventDetails) {
+  V8Debugger* thisPtr = toV8Debugger(eventDetails.GetCallbackData());
+  thisPtr->handleV8DebugEvent(eventDetails);
+}
+
+v8::Local<v8::Value> V8Debugger::callInternalGetterFunction(
+    v8::Local<v8::Object> object, const char* functionName) {
+  v8::MicrotasksScope microtasks(m_isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::Local<v8::Value> getterValue =
+      object
+          ->Get(m_isolate->GetCurrentContext(),
+                toV8StringInternalized(m_isolate, functionName))
+          .ToLocalChecked();
+  DCHECK(!getterValue.IsEmpty() && getterValue->IsFunction());
+  return v8::Local<v8::Function>::Cast(getterValue)
+      ->Call(m_isolate->GetCurrentContext(), object, 0, 0)
+      .ToLocalChecked();
+}
+
+void V8Debugger::handleV8DebugEvent(
+    const v8::Debug::EventDetails& eventDetails) {
+  if (!enabled()) return;
+  v8::DebugEvent event = eventDetails.GetEvent();
+  if (event != v8::AsyncTaskEvent && event != v8::Break &&
+      event != v8::Exception && event != v8::AfterCompile &&
+      event != v8::BeforeCompile && event != v8::CompileError)
+    return;
+
+  v8::Local<v8::Context> eventContext = eventDetails.GetEventContext();
+  DCHECK(!eventContext.IsEmpty());
+
+  if (event == v8::AsyncTaskEvent) {
+    v8::HandleScope scope(m_isolate);
+    handleV8AsyncTaskEvent(eventContext, eventDetails.GetExecutionState(),
+                           eventDetails.GetEventData());
+    return;
+  }
+
+  V8DebuggerAgentImpl* agent =
+      m_inspector->enabledDebuggerAgentForGroup(getGroupId(eventContext));
+  if (agent) {
+    v8::HandleScope scope(m_isolate);
+    if (m_ignoreScriptParsedEventsCounter == 0 &&
+        (event == v8::AfterCompile || event == v8::CompileError)) {
+      v8::Context::Scope contextScope(debuggerContext());
+      v8::Local<v8::Value> argv[] = {eventDetails.GetEventData()};
+      v8::Local<v8::Value> value =
+          callDebuggerMethod("getAfterCompileScript", 1, argv).ToLocalChecked();
+      if (value->IsNull()) return;
+      DCHECK(value->IsObject());
+      v8::Local<v8::Object> scriptObject = v8::Local<v8::Object>::Cast(value);
+      agent->didParseSource(
+          wrapUnique(new V8DebuggerScript(debuggerContext(), scriptObject,
+                                          inLiveEditScope)),
+          event == v8::AfterCompile);
+    } else if (event == v8::Exception) {
+      v8::Local<v8::Object> eventData = eventDetails.GetEventData();
+      v8::Local<v8::Value> exception =
+          callInternalGetterFunction(eventData, "exception");
+      v8::Local<v8::Value> promise =
+          callInternalGetterFunction(eventData, "promise");
+      bool isPromiseRejection = !promise.IsEmpty() && promise->IsObject();
+      handleProgramBreak(eventContext, eventDetails.GetExecutionState(),
+                         exception, v8::Local<v8::Array>(), isPromiseRejection);
+    } else if (event == v8::Break) {
+      v8::Local<v8::Value> argv[] = {eventDetails.GetEventData()};
+      v8::Local<v8::Value> hitBreakpoints =
+          callDebuggerMethod("getBreakpointNumbers", 1, argv).ToLocalChecked();
+      DCHECK(hitBreakpoints->IsArray());
+      handleProgramBreak(eventContext, eventDetails.GetExecutionState(),
+                         v8::Local<v8::Value>(),
+                         hitBreakpoints.As<v8::Array>());
+    }
+  }
+}
+
+void V8Debugger::handleV8AsyncTaskEvent(v8::Local<v8::Context> context,
+                                        v8::Local<v8::Object> executionState,
+                                        v8::Local<v8::Object> eventData) {
+  if (!m_maxAsyncCallStackDepth) return;
+
+  String16 type = toProtocolStringWithTypeCheck(
+      callInternalGetterFunction(eventData, "type"));
+  String16 name = toProtocolStringWithTypeCheck(
+      callInternalGetterFunction(eventData, "name"));
+  int id = static_cast<int>(callInternalGetterFunction(eventData, "id")
+                                ->ToInteger(context)
+                                .ToLocalChecked()
+                                ->Value());
+  // Async task events from Promises are given misaligned pointers to prevent
+  // from overlapping with other Blink task identifiers. There is a single
+  // namespace of such ids, managed by src/js/promise.js.
+  void* ptr = reinterpret_cast<void*>(id * 2 + 1);
+  if (type == v8AsyncTaskEventEnqueue)
+    asyncTaskScheduled(name, ptr, false);
+  else if (type == v8AsyncTaskEventEnqueueRecurring)
+    asyncTaskScheduled(name, ptr, true);
+  else if (type == v8AsyncTaskEventWillHandle)
+    asyncTaskStarted(ptr);
+  else if (type == v8AsyncTaskEventDidHandle)
+    asyncTaskFinished(ptr);
+  else if (type == v8AsyncTaskEventCancel)
+    asyncTaskCanceled(ptr);
+  else
+    UNREACHABLE();
+}
+
+V8StackTraceImpl* V8Debugger::currentAsyncCallChain() {
+  if (!m_currentStacks.size()) return nullptr;
+  return m_currentStacks.back().get();
+}
+
+void V8Debugger::compileDebuggerScript() {
+  if (!m_debuggerScript.IsEmpty()) {
+    UNREACHABLE();
+    return;
+  }
+
+  v8::HandleScope scope(m_isolate);
+  v8::Context::Scope contextScope(debuggerContext());
+
+  v8::Local<v8::String> scriptValue =
+      v8::String::NewFromUtf8(m_isolate, DebuggerScript_js,
+                              v8::NewStringType::kInternalized,
+                              sizeof(DebuggerScript_js))
+          .ToLocalChecked();
+  v8::Local<v8::Value> value;
+  if (!m_inspector->compileAndRunInternalScript(debuggerContext(), scriptValue)
+           .ToLocal(&value)) {
+    UNREACHABLE();
+    return;
+  }
+  DCHECK(value->IsObject());
+  m_debuggerScript.Reset(m_isolate, value.As<v8::Object>());
+}
+
+v8::Local<v8::Context> V8Debugger::debuggerContext() const {
+  DCHECK(!m_debuggerContext.IsEmpty());
+  return m_debuggerContext.Get(m_isolate);
+}
+
+v8::MaybeLocal<v8::Value> V8Debugger::functionScopes(
+    v8::Local<v8::Context> context, v8::Local<v8::Function> function) {
+  if (!enabled()) {
+    UNREACHABLE();
+    return v8::Local<v8::Value>::New(m_isolate, v8::Undefined(m_isolate));
+  }
+  v8::Local<v8::Value> argv[] = {function};
+  v8::Local<v8::Value> scopesValue;
+  if (!callDebuggerMethod("getFunctionScopes", 1, argv).ToLocal(&scopesValue))
+    return v8::MaybeLocal<v8::Value>();
+  v8::Local<v8::Value> copied;
+  if (!copyValueFromDebuggerContext(m_isolate, debuggerContext(), context,
+                                    scopesValue)
+           .ToLocal(&copied) ||
+      !copied->IsArray())
+    return v8::MaybeLocal<v8::Value>();
+  if (!markAsInternal(context, v8::Local<v8::Array>::Cast(copied),
+                      V8InternalValueType::kScopeList))
+    return v8::MaybeLocal<v8::Value>();
+  if (!markArrayEntriesAsInternal(context, v8::Local<v8::Array>::Cast(copied),
+                                  V8InternalValueType::kScope))
+    return v8::MaybeLocal<v8::Value>();
+  return copied;
+}
+
+v8::MaybeLocal<v8::Array> V8Debugger::internalProperties(
+    v8::Local<v8::Context> context, v8::Local<v8::Value> value) {
+  v8::Local<v8::Array> properties;
+  if (!v8::Debug::GetInternalProperties(m_isolate, value).ToLocal(&properties))
+    return v8::MaybeLocal<v8::Array>();
+  if (value->IsFunction()) {
+    v8::Local<v8::Function> function = value.As<v8::Function>();
+    v8::Local<v8::Value> location = functionLocation(context, function);
+    if (location->IsObject()) {
+      createDataProperty(
+          context, properties, properties->Length(),
+          toV8StringInternalized(m_isolate, "[[FunctionLocation]]"));
+      createDataProperty(context, properties, properties->Length(), location);
+    }
+    if (function->IsGeneratorFunction()) {
+      createDataProperty(context, properties, properties->Length(),
+                         toV8StringInternalized(m_isolate, "[[IsGenerator]]"));
+      createDataProperty(context, properties, properties->Length(),
+                         v8::True(m_isolate));
+    }
+  }
+  if (!enabled()) return properties;
+  if (value->IsMap() || value->IsWeakMap() || value->IsSet() ||
+      value->IsWeakSet() || value->IsSetIterator() || value->IsMapIterator()) {
+    v8::Local<v8::Value> entries =
+        collectionEntries(context, v8::Local<v8::Object>::Cast(value));
+    if (entries->IsArray()) {
+      createDataProperty(context, properties, properties->Length(),
+                         toV8StringInternalized(m_isolate, "[[Entries]]"));
+      createDataProperty(context, properties, properties->Length(), entries);
+    }
+  }
+  if (value->IsGeneratorObject()) {
+    v8::Local<v8::Value> location =
+        generatorObjectLocation(context, v8::Local<v8::Object>::Cast(value));
+    if (location->IsObject()) {
+      createDataProperty(
+          context, properties, properties->Length(),
+          toV8StringInternalized(m_isolate, "[[GeneratorLocation]]"));
+      createDataProperty(context, properties, properties->Length(), location);
+    }
+  }
+  if (value->IsFunction()) {
+    v8::Local<v8::Function> function = value.As<v8::Function>();
+    v8::Local<v8::Value> boundFunction = function->GetBoundFunction();
+    v8::Local<v8::Value> scopes;
+    if (boundFunction->IsUndefined() &&
+        functionScopes(context, function).ToLocal(&scopes)) {
+      createDataProperty(context, properties, properties->Length(),
+                         toV8StringInternalized(m_isolate, "[[Scopes]]"));
+      createDataProperty(context, properties, properties->Length(), scopes);
+    }
+  }
+  return properties;
+}
+
+v8::Local<v8::Value> V8Debugger::collectionEntries(
+    v8::Local<v8::Context> context, v8::Local<v8::Object> object) {
+  if (!enabled()) {
+    UNREACHABLE();
+    return v8::Undefined(m_isolate);
+  }
+  v8::Local<v8::Value> argv[] = {object};
+  v8::Local<v8::Value> entriesValue =
+      callDebuggerMethod("getCollectionEntries", 1, argv).ToLocalChecked();
+  if (!entriesValue->IsArray()) return v8::Undefined(m_isolate);
+
+  v8::Local<v8::Array> entries = entriesValue.As<v8::Array>();
+  v8::Local<v8::Array> copiedArray =
+      v8::Array::New(m_isolate, entries->Length());
+  if (!copiedArray->SetPrototype(context, v8::Null(m_isolate)).FromMaybe(false))
+    return v8::Undefined(m_isolate);
+  for (uint32_t i = 0; i < entries->Length(); ++i) {
+    v8::Local<v8::Value> item;
+    if (!entries->Get(debuggerContext(), i).ToLocal(&item))
+      return v8::Undefined(m_isolate);
+    v8::Local<v8::Value> copied;
+    if (!copyValueFromDebuggerContext(m_isolate, debuggerContext(), context,
+                                      item)
+             .ToLocal(&copied))
+      return v8::Undefined(m_isolate);
+    if (!createDataProperty(context, copiedArray, i, copied).FromMaybe(false))
+      return v8::Undefined(m_isolate);
+  }
+  if (!markArrayEntriesAsInternal(context,
+                                  v8::Local<v8::Array>::Cast(copiedArray),
+                                  V8InternalValueType::kEntry))
+    return v8::Undefined(m_isolate);
+  return copiedArray;
+}
+
+v8::Local<v8::Value> V8Debugger::generatorObjectLocation(
+    v8::Local<v8::Context> context, v8::Local<v8::Object> object) {
+  if (!enabled()) {
+    UNREACHABLE();
+    return v8::Null(m_isolate);
+  }
+  v8::Local<v8::Value> argv[] = {object};
+  v8::Local<v8::Value> location =
+      callDebuggerMethod("getGeneratorObjectLocation", 1, argv)
+          .ToLocalChecked();
+  v8::Local<v8::Value> copied;
+  if (!copyValueFromDebuggerContext(m_isolate, debuggerContext(), context,
+                                    location)
+           .ToLocal(&copied) ||
+      !copied->IsObject())
+    return v8::Null(m_isolate);
+  if (!markAsInternal(context, v8::Local<v8::Object>::Cast(copied),
+                      V8InternalValueType::kLocation))
+    return v8::Null(m_isolate);
+  return copied;
+}
+
+v8::Local<v8::Value> V8Debugger::functionLocation(
+    v8::Local<v8::Context> context, v8::Local<v8::Function> function) {
+  int scriptId = function->ScriptId();
+  if (scriptId == v8::UnboundScript::kNoScriptId) return v8::Null(m_isolate);
+  int lineNumber = function->GetScriptLineNumber();
+  int columnNumber = function->GetScriptColumnNumber();
+  if (lineNumber == v8::Function::kLineOffsetNotFound ||
+      columnNumber == v8::Function::kLineOffsetNotFound)
+    return v8::Null(m_isolate);
+  v8::Local<v8::Object> location = v8::Object::New(m_isolate);
+  if (!location->SetPrototype(context, v8::Null(m_isolate)).FromMaybe(false))
+    return v8::Null(m_isolate);
+  if (!createDataProperty(
+           context, location, toV8StringInternalized(m_isolate, "scriptId"),
+           toV8String(m_isolate, String16::fromInteger(scriptId)))
+           .FromMaybe(false))
+    return v8::Null(m_isolate);
+  if (!createDataProperty(context, location,
+                          toV8StringInternalized(m_isolate, "lineNumber"),
+                          v8::Integer::New(m_isolate, lineNumber))
+           .FromMaybe(false))
+    return v8::Null(m_isolate);
+  if (!createDataProperty(context, location,
+                          toV8StringInternalized(m_isolate, "columnNumber"),
+                          v8::Integer::New(m_isolate, columnNumber))
+           .FromMaybe(false))
+    return v8::Null(m_isolate);
+  if (!markAsInternal(context, location, V8InternalValueType::kLocation))
+    return v8::Null(m_isolate);
+  return location;
+}
+
+bool V8Debugger::isPaused() { return !m_pausedContext.IsEmpty(); }
+
+std::unique_ptr<V8StackTraceImpl> V8Debugger::createStackTrace(
+    v8::Local<v8::StackTrace> stackTrace) {
+  int contextGroupId =
+      m_isolate->InContext() ? getGroupId(m_isolate->GetCurrentContext()) : 0;
+  return V8StackTraceImpl::create(this, contextGroupId, stackTrace,
+                                  V8StackTraceImpl::maxCallStackSizeToCapture);
+}
+
+int V8Debugger::markContext(const V8ContextInfo& info) {
+  DCHECK(info.context->GetIsolate() == m_isolate);
+  int contextId = ++m_lastContextId;
+  String16 debugData = String16::fromInteger(info.contextGroupId) + "," +
+                       String16::fromInteger(contextId) + "," +
+                       toString16(info.auxData);
+  v8::Context::Scope contextScope(info.context);
+  info.context->SetEmbedderData(static_cast<int>(v8::Context::kDebugIdIndex),
+                                toV8String(m_isolate, debugData));
+  return contextId;
+}
+
+void V8Debugger::setAsyncCallStackDepth(V8DebuggerAgentImpl* agent, int depth) {
+  if (depth <= 0)
+    m_maxAsyncCallStackDepthMap.erase(agent);
+  else
+    m_maxAsyncCallStackDepthMap[agent] = depth;
+
+  int maxAsyncCallStackDepth = 0;
+  for (const auto& pair : m_maxAsyncCallStackDepthMap) {
+    if (pair.second > maxAsyncCallStackDepth)
+      maxAsyncCallStackDepth = pair.second;
+  }
+
+  if (m_maxAsyncCallStackDepth == maxAsyncCallStackDepth) return;
+  m_maxAsyncCallStackDepth = maxAsyncCallStackDepth;
+  if (!maxAsyncCallStackDepth) allAsyncTasksCanceled();
+}
+
+void V8Debugger::asyncTaskScheduled(const StringView& taskName, void* task,
+                                    bool recurring) {
+  if (!m_maxAsyncCallStackDepth) return;
+  asyncTaskScheduled(toString16(taskName), task, recurring);
+}
+
+void V8Debugger::asyncTaskScheduled(const String16& taskName, void* task,
+                                    bool recurring) {
+  if (!m_maxAsyncCallStackDepth) return;
+  v8::HandleScope scope(m_isolate);
+  int contextGroupId =
+      m_isolate->InContext() ? getGroupId(m_isolate->GetCurrentContext()) : 0;
+  std::unique_ptr<V8StackTraceImpl> chain = V8StackTraceImpl::capture(
+      this, contextGroupId, V8StackTraceImpl::maxCallStackSizeToCapture,
+      taskName);
+  if (chain) {
+    m_asyncTaskStacks[task] = std::move(chain);
+    if (recurring) m_recurringTasks.insert(task);
+  }
+}
+
+void V8Debugger::asyncTaskCanceled(void* task) {
+  if (!m_maxAsyncCallStackDepth) return;
+  m_asyncTaskStacks.erase(task);
+  m_recurringTasks.erase(task);
+}
+
+void V8Debugger::asyncTaskStarted(void* task) {
+  if (!m_maxAsyncCallStackDepth) return;
+  m_currentTasks.push_back(task);
+  AsyncTaskToStackTrace::iterator stackIt = m_asyncTaskStacks.find(task);
+  // Needs to support following order of events:
+  // - asyncTaskScheduled
+  //   <-- attached here -->
+  // - asyncTaskStarted
+  // - asyncTaskCanceled <-- canceled before finished
+  //   <-- async stack requested here -->
+  // - asyncTaskFinished
+  std::unique_ptr<V8StackTraceImpl> stack;
+  if (stackIt != m_asyncTaskStacks.end() && stackIt->second)
+    stack = stackIt->second->cloneImpl();
+  m_currentStacks.push_back(std::move(stack));
+}
+
+void V8Debugger::asyncTaskFinished(void* task) {
+  if (!m_maxAsyncCallStackDepth) return;
+  // We could start instrumenting half way and the stack is empty.
+  if (!m_currentStacks.size()) return;
+
+  DCHECK(m_currentTasks.back() == task);
+  m_currentTasks.pop_back();
+
+  m_currentStacks.pop_back();
+  if (m_recurringTasks.find(task) == m_recurringTasks.end())
+    m_asyncTaskStacks.erase(task);
+}
+
+void V8Debugger::allAsyncTasksCanceled() {
+  m_asyncTaskStacks.clear();
+  m_recurringTasks.clear();
+  m_currentStacks.clear();
+  m_currentTasks.clear();
+}
+
+void V8Debugger::muteScriptParsedEvents() {
+  ++m_ignoreScriptParsedEventsCounter;
+}
+
+void V8Debugger::unmuteScriptParsedEvents() {
+  --m_ignoreScriptParsedEventsCounter;
+  DCHECK_GE(m_ignoreScriptParsedEventsCounter, 0);
+}
+
+std::unique_ptr<V8StackTraceImpl> V8Debugger::captureStackTrace(
+    bool fullStack) {
+  if (!m_isolate->InContext()) return nullptr;
+
+  v8::HandleScope handles(m_isolate);
+  int contextGroupId = getGroupId(m_isolate->GetCurrentContext());
+  if (!contextGroupId) return nullptr;
+
+  size_t stackSize =
+      fullStack ? V8StackTraceImpl::maxCallStackSizeToCapture : 1;
+  if (m_inspector->enabledRuntimeAgentForGroup(contextGroupId))
+    stackSize = V8StackTraceImpl::maxCallStackSizeToCapture;
+
+  return V8StackTraceImpl::capture(this, contextGroupId, stackSize);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-debugger.h b/src/inspector/v8-debugger.h
new file mode 100644
index 0000000..83c1b21
--- /dev/null
+++ b/src/inspector/v8-debugger.h
@@ -0,0 +1,160 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8DEBUGGER_H_
+#define V8_INSPECTOR_V8DEBUGGER_H_
+
+#include <vector>
+
+#include "src/base/macros.h"
+#include "src/inspector/java-script-call-frame.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Runtime.h"
+#include "src/inspector/v8-debugger-script.h"
+
+#include "include/v8-debug.h"
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+struct ScriptBreakpoint;
+class V8DebuggerAgentImpl;
+class V8InspectorImpl;
+class V8StackTraceImpl;
+
+using protocol::ErrorString;
+
+class V8Debugger {
+ public:
+  V8Debugger(v8::Isolate*, V8InspectorImpl*);
+  ~V8Debugger();
+
+  static int contextId(v8::Local<v8::Context>);
+  static int getGroupId(v8::Local<v8::Context>);
+  int markContext(const V8ContextInfo&);
+
+  bool enabled() const;
+
+  String16 setBreakpoint(const String16& sourceID, const ScriptBreakpoint&,
+                         int* actualLineNumber, int* actualColumnNumber);
+  void removeBreakpoint(const String16& breakpointId);
+  void setBreakpointsActivated(bool);
+  bool breakpointsActivated() const { return m_breakpointsActivated; }
+
+  enum PauseOnExceptionsState {
+    DontPauseOnExceptions,
+    PauseOnAllExceptions,
+    PauseOnUncaughtExceptions
+  };
+  PauseOnExceptionsState getPauseOnExceptionsState();
+  void setPauseOnExceptionsState(PauseOnExceptionsState);
+  void setPauseOnNextStatement(bool);
+  bool canBreakProgram();
+  void breakProgram();
+  void continueProgram();
+  void stepIntoStatement();
+  void stepOverStatement();
+  void stepOutOfFunction();
+  void clearStepping();
+
+  bool setScriptSource(const String16& sourceID,
+                       v8::Local<v8::String> newSource, bool dryRun,
+                       ErrorString*,
+                       protocol::Maybe<protocol::Runtime::ExceptionDetails>*,
+                       JavaScriptCallFrames* newCallFrames,
+                       protocol::Maybe<bool>* stackChanged);
+  JavaScriptCallFrames currentCallFrames(int limit = 0);
+
+  // Each script inherits debug data from v8::Context where it has been
+  // compiled.
+  // Only scripts whose debug data matches |contextGroupId| will be reported.
+  // Passing 0 will result in reporting all scripts.
+  void getCompiledScripts(int contextGroupId,
+                          std::vector<std::unique_ptr<V8DebuggerScript>>&);
+  void enable();
+  void disable();
+
+  bool isPaused();
+  v8::Local<v8::Context> pausedContext() { return m_pausedContext; }
+
+  int maxAsyncCallChainDepth() { return m_maxAsyncCallStackDepth; }
+  V8StackTraceImpl* currentAsyncCallChain();
+  void setAsyncCallStackDepth(V8DebuggerAgentImpl*, int);
+  std::unique_ptr<V8StackTraceImpl> createStackTrace(v8::Local<v8::StackTrace>);
+  std::unique_ptr<V8StackTraceImpl> captureStackTrace(bool fullStack);
+
+  v8::MaybeLocal<v8::Array> internalProperties(v8::Local<v8::Context>,
+                                               v8::Local<v8::Value>);
+
+  void asyncTaskScheduled(const StringView& taskName, void* task,
+                          bool recurring);
+  void asyncTaskScheduled(const String16& taskName, void* task, bool recurring);
+  void asyncTaskCanceled(void* task);
+  void asyncTaskStarted(void* task);
+  void asyncTaskFinished(void* task);
+  void allAsyncTasksCanceled();
+
+  void muteScriptParsedEvents();
+  void unmuteScriptParsedEvents();
+
+  V8InspectorImpl* inspector() { return m_inspector; }
+
+ private:
+  void compileDebuggerScript();
+  v8::MaybeLocal<v8::Value> callDebuggerMethod(const char* functionName,
+                                               int argc,
+                                               v8::Local<v8::Value> argv[]);
+  v8::Local<v8::Context> debuggerContext() const;
+  void clearBreakpoints();
+
+  static void breakProgramCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  void handleProgramBreak(v8::Local<v8::Context> pausedContext,
+                          v8::Local<v8::Object> executionState,
+                          v8::Local<v8::Value> exception,
+                          v8::Local<v8::Array> hitBreakpoints,
+                          bool isPromiseRejection = false);
+  static void v8DebugEventCallback(const v8::Debug::EventDetails&);
+  v8::Local<v8::Value> callInternalGetterFunction(v8::Local<v8::Object>,
+                                                  const char* functionName);
+  void handleV8DebugEvent(const v8::Debug::EventDetails&);
+  void handleV8AsyncTaskEvent(v8::Local<v8::Context>,
+                              v8::Local<v8::Object> executionState,
+                              v8::Local<v8::Object> eventData);
+
+  v8::Local<v8::Value> collectionEntries(v8::Local<v8::Context>,
+                                         v8::Local<v8::Object>);
+  v8::Local<v8::Value> generatorObjectLocation(v8::Local<v8::Context>,
+                                               v8::Local<v8::Object>);
+  v8::Local<v8::Value> functionLocation(v8::Local<v8::Context>,
+                                        v8::Local<v8::Function>);
+  v8::MaybeLocal<v8::Value> functionScopes(v8::Local<v8::Context>,
+                                           v8::Local<v8::Function>);
+
+  v8::Isolate* m_isolate;
+  V8InspectorImpl* m_inspector;
+  int m_lastContextId;
+  int m_enableCount;
+  bool m_breakpointsActivated;
+  v8::Global<v8::Object> m_debuggerScript;
+  v8::Global<v8::Context> m_debuggerContext;
+  v8::Local<v8::Object> m_executionState;
+  v8::Local<v8::Context> m_pausedContext;
+  bool m_runningNestedMessageLoop;
+  int m_ignoreScriptParsedEventsCounter;
+
+  using AsyncTaskToStackTrace =
+      protocol::HashMap<void*, std::unique_ptr<V8StackTraceImpl>>;
+  AsyncTaskToStackTrace m_asyncTaskStacks;
+  protocol::HashSet<void*> m_recurringTasks;
+  int m_maxAsyncCallStackDepth;
+  std::vector<void*> m_currentTasks;
+  std::vector<std::unique_ptr<V8StackTraceImpl>> m_currentStacks;
+  protocol::HashMap<V8DebuggerAgentImpl*, int> m_maxAsyncCallStackDepthMap;
+
+  DISALLOW_COPY_AND_ASSIGN(V8Debugger);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8DEBUGGER_H_
diff --git a/src/inspector/v8-function-call.cc b/src/inspector/v8-function-call.cc
new file mode 100644
index 0000000..3880e31
--- /dev/null
+++ b/src/inspector/v8-function-call.cc
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2009 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "src/inspector/v8-function-call.h"
+
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-impl.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+V8FunctionCall::V8FunctionCall(V8InspectorImpl* inspector,
+                               v8::Local<v8::Context> context,
+                               v8::Local<v8::Value> value, const String16& name)
+    : m_inspector(inspector),
+      m_context(context),
+      m_name(toV8String(context->GetIsolate(), name)),
+      m_value(value) {}
+
+void V8FunctionCall::appendArgument(v8::Local<v8::Value> value) {
+  m_arguments.push_back(value);
+}
+
+void V8FunctionCall::appendArgument(const String16& argument) {
+  m_arguments.push_back(toV8String(m_context->GetIsolate(), argument));
+}
+
+void V8FunctionCall::appendArgument(int argument) {
+  m_arguments.push_back(v8::Number::New(m_context->GetIsolate(), argument));
+}
+
+void V8FunctionCall::appendArgument(bool argument) {
+  m_arguments.push_back(argument ? v8::True(m_context->GetIsolate())
+                                 : v8::False(m_context->GetIsolate()));
+}
+
+v8::Local<v8::Value> V8FunctionCall::call(bool& hadException,
+                                          bool reportExceptions) {
+  v8::TryCatch tryCatch(m_context->GetIsolate());
+  tryCatch.SetVerbose(reportExceptions);
+
+  v8::Local<v8::Value> result = callWithoutExceptionHandling();
+  hadException = tryCatch.HasCaught();
+  return result;
+}
+
+v8::Local<v8::Value> V8FunctionCall::callWithoutExceptionHandling() {
+  v8::Local<v8::Object> thisObject = v8::Local<v8::Object>::Cast(m_value);
+  v8::Local<v8::Value> value;
+  if (!thisObject->Get(m_context, m_name).ToLocal(&value))
+    return v8::Local<v8::Value>();
+
+  DCHECK(value->IsFunction());
+
+  v8::Local<v8::Function> function = v8::Local<v8::Function>::Cast(value);
+  std::unique_ptr<v8::Local<v8::Value>[]> info(
+      new v8::Local<v8::Value>[m_arguments.size()]);
+  for (size_t i = 0; i < m_arguments.size(); ++i) {
+    info[i] = m_arguments[i];
+    DCHECK(!info[i].IsEmpty());
+  }
+
+  int contextGroupId = V8Debugger::getGroupId(m_context);
+  if (contextGroupId) {
+    m_inspector->client()->muteMetrics(contextGroupId);
+    m_inspector->muteExceptions(contextGroupId);
+  }
+  v8::MicrotasksScope microtasksScope(m_context->GetIsolate(),
+                                      v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::MaybeLocal<v8::Value> maybeResult = function->Call(
+      m_context, thisObject, static_cast<int>(m_arguments.size()), info.get());
+  if (contextGroupId) {
+    m_inspector->client()->unmuteMetrics(contextGroupId);
+    m_inspector->unmuteExceptions(contextGroupId);
+  }
+
+  v8::Local<v8::Value> result;
+  if (!maybeResult.ToLocal(&result)) return v8::Local<v8::Value>();
+  return result;
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-function-call.h b/src/inspector/v8-function-call.h
new file mode 100644
index 0000000..0337caa
--- /dev/null
+++ b/src/inspector/v8-function-call.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2009 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef V8_INSPECTOR_V8FUNCTIONCALL_H_
+#define V8_INSPECTOR_V8FUNCTIONCALL_H_
+
+#include "src/inspector/string-16.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class V8InspectorImpl;
+
+class V8FunctionCall {
+ public:
+  V8FunctionCall(V8InspectorImpl*, v8::Local<v8::Context>, v8::Local<v8::Value>,
+                 const String16& name);
+
+  void appendArgument(v8::Local<v8::Value>);
+  void appendArgument(const String16&);
+  void appendArgument(int);
+  void appendArgument(bool);
+
+  v8::Local<v8::Value> call(bool& hadException, bool reportExceptions = true);
+  v8::Local<v8::Value> callWithoutExceptionHandling();
+
+ protected:
+  V8InspectorImpl* m_inspector;
+  v8::Local<v8::Context> m_context;
+  std::vector<v8::Local<v8::Value>> m_arguments;
+  v8::Local<v8::String> m_name;
+  v8::Local<v8::Value> m_value;
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8FUNCTIONCALL_H_
diff --git a/src/inspector/v8-heap-profiler-agent-impl.cc b/src/inspector/v8-heap-profiler-agent-impl.cc
new file mode 100644
index 0000000..84c890b
--- /dev/null
+++ b/src/inspector/v8-heap-profiler-agent-impl.cc
@@ -0,0 +1,407 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-heap-profiler-agent-impl.h"
+
+#include "src/inspector/injected-script.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+
+#include "include/v8-inspector.h"
+#include "include/v8-profiler.h"
+#include "include/v8-version.h"
+
+namespace v8_inspector {
+
+namespace {
+
+namespace HeapProfilerAgentState {
+static const char heapProfilerEnabled[] = "heapProfilerEnabled";
+static const char heapObjectsTrackingEnabled[] = "heapObjectsTrackingEnabled";
+static const char allocationTrackingEnabled[] = "allocationTrackingEnabled";
+static const char samplingHeapProfilerEnabled[] = "samplingHeapProfilerEnabled";
+static const char samplingHeapProfilerInterval[] =
+    "samplingHeapProfilerInterval";
+}
+
+class HeapSnapshotProgress final : public v8::ActivityControl {
+ public:
+  explicit HeapSnapshotProgress(protocol::HeapProfiler::Frontend* frontend)
+      : m_frontend(frontend) {}
+  ControlOption ReportProgressValue(int done, int total) override {
+    m_frontend->reportHeapSnapshotProgress(done, total,
+                                           protocol::Maybe<bool>());
+    if (done >= total) {
+      m_frontend->reportHeapSnapshotProgress(total, total, true);
+    }
+    m_frontend->flush();
+    return kContinue;
+  }
+
+ private:
+  protocol::HeapProfiler::Frontend* m_frontend;
+};
+
+class GlobalObjectNameResolver final
+    : public v8::HeapProfiler::ObjectNameResolver {
+ public:
+  explicit GlobalObjectNameResolver(V8InspectorSessionImpl* session)
+      : m_offset(0), m_strings(10000), m_session(session) {}
+
+  const char* GetName(v8::Local<v8::Object> object) override {
+    InspectedContext* context = m_session->inspector()->getContext(
+        m_session->contextGroupId(),
+        V8Debugger::contextId(object->CreationContext()));
+    if (!context) return "";
+    String16 name = context->origin();
+    size_t length = name.length();
+    if (m_offset + length + 1 >= m_strings.size()) return "";
+    for (size_t i = 0; i < length; ++i) {
+      UChar ch = name[i];
+      m_strings[m_offset + i] = ch > 0xff ? '?' : static_cast<char>(ch);
+    }
+    m_strings[m_offset + length] = '\0';
+    char* result = &*m_strings.begin() + m_offset;
+    m_offset += length + 1;
+    return result;
+  }
+
+ private:
+  size_t m_offset;
+  std::vector<char> m_strings;
+  V8InspectorSessionImpl* m_session;
+};
+
+class HeapSnapshotOutputStream final : public v8::OutputStream {
+ public:
+  explicit HeapSnapshotOutputStream(protocol::HeapProfiler::Frontend* frontend)
+      : m_frontend(frontend) {}
+  void EndOfStream() override {}
+  int GetChunkSize() override { return 102400; }
+  WriteResult WriteAsciiChunk(char* data, int size) override {
+    m_frontend->addHeapSnapshotChunk(String16(data, size));
+    m_frontend->flush();
+    return kContinue;
+  }
+
+ private:
+  protocol::HeapProfiler::Frontend* m_frontend;
+};
+
+v8::Local<v8::Object> objectByHeapObjectId(v8::Isolate* isolate, int id) {
+  v8::HeapProfiler* profiler = isolate->GetHeapProfiler();
+  v8::Local<v8::Value> value = profiler->FindObjectById(id);
+  if (value.IsEmpty() || !value->IsObject()) return v8::Local<v8::Object>();
+  return value.As<v8::Object>();
+}
+
+class InspectableHeapObject final : public V8InspectorSession::Inspectable {
+ public:
+  explicit InspectableHeapObject(int heapObjectId)
+      : m_heapObjectId(heapObjectId) {}
+  v8::Local<v8::Value> get(v8::Local<v8::Context> context) override {
+    return objectByHeapObjectId(context->GetIsolate(), m_heapObjectId);
+  }
+
+ private:
+  int m_heapObjectId;
+};
+
+class HeapStatsStream final : public v8::OutputStream {
+ public:
+  explicit HeapStatsStream(protocol::HeapProfiler::Frontend* frontend)
+      : m_frontend(frontend) {}
+
+  void EndOfStream() override {}
+
+  WriteResult WriteAsciiChunk(char* data, int size) override {
+    DCHECK(false);
+    return kAbort;
+  }
+
+  WriteResult WriteHeapStatsChunk(v8::HeapStatsUpdate* updateData,
+                                  int count) override {
+    DCHECK_GT(count, 0);
+    std::unique_ptr<protocol::Array<int>> statsDiff =
+        protocol::Array<int>::create();
+    for (int i = 0; i < count; ++i) {
+      statsDiff->addItem(updateData[i].index);
+      statsDiff->addItem(updateData[i].count);
+      statsDiff->addItem(updateData[i].size);
+    }
+    m_frontend->heapStatsUpdate(std::move(statsDiff));
+    return kContinue;
+  }
+
+ private:
+  protocol::HeapProfiler::Frontend* m_frontend;
+};
+
+}  // namespace
+
+V8HeapProfilerAgentImpl::V8HeapProfilerAgentImpl(
+    V8InspectorSessionImpl* session, protocol::FrontendChannel* frontendChannel,
+    protocol::DictionaryValue* state)
+    : m_session(session),
+      m_isolate(session->inspector()->isolate()),
+      m_frontend(frontendChannel),
+      m_state(state),
+      m_hasTimer(false) {}
+
+V8HeapProfilerAgentImpl::~V8HeapProfilerAgentImpl() {}
+
+void V8HeapProfilerAgentImpl::restore() {
+  if (m_state->booleanProperty(HeapProfilerAgentState::heapProfilerEnabled,
+                               false))
+    m_frontend.resetProfiles();
+  if (m_state->booleanProperty(
+          HeapProfilerAgentState::heapObjectsTrackingEnabled, false))
+    startTrackingHeapObjectsInternal(m_state->booleanProperty(
+        HeapProfilerAgentState::allocationTrackingEnabled, false));
+  if (m_state->booleanProperty(
+          HeapProfilerAgentState::samplingHeapProfilerEnabled, false)) {
+    ErrorString error;
+    double samplingInterval = m_state->doubleProperty(
+        HeapProfilerAgentState::samplingHeapProfilerInterval, -1);
+    DCHECK_GE(samplingInterval, 0);
+    startSampling(&error, Maybe<double>(samplingInterval));
+  }
+}
+
+void V8HeapProfilerAgentImpl::collectGarbage(ErrorString*) {
+  m_isolate->LowMemoryNotification();
+}
+
+void V8HeapProfilerAgentImpl::startTrackingHeapObjects(
+    ErrorString*, const protocol::Maybe<bool>& trackAllocations) {
+  m_state->setBoolean(HeapProfilerAgentState::heapObjectsTrackingEnabled, true);
+  bool allocationTrackingEnabled = trackAllocations.fromMaybe(false);
+  m_state->setBoolean(HeapProfilerAgentState::allocationTrackingEnabled,
+                      allocationTrackingEnabled);
+  startTrackingHeapObjectsInternal(allocationTrackingEnabled);
+}
+
+void V8HeapProfilerAgentImpl::stopTrackingHeapObjects(
+    ErrorString* error, const protocol::Maybe<bool>& reportProgress) {
+  requestHeapStatsUpdate();
+  takeHeapSnapshot(error, reportProgress);
+  stopTrackingHeapObjectsInternal();
+}
+
+void V8HeapProfilerAgentImpl::enable(ErrorString*) {
+  m_state->setBoolean(HeapProfilerAgentState::heapProfilerEnabled, true);
+}
+
+void V8HeapProfilerAgentImpl::disable(ErrorString* error) {
+  stopTrackingHeapObjectsInternal();
+  if (m_state->booleanProperty(
+          HeapProfilerAgentState::samplingHeapProfilerEnabled, false)) {
+    v8::HeapProfiler* profiler = m_isolate->GetHeapProfiler();
+    if (profiler) profiler->StopSamplingHeapProfiler();
+  }
+  m_isolate->GetHeapProfiler()->ClearObjectIds();
+  m_state->setBoolean(HeapProfilerAgentState::heapProfilerEnabled, false);
+}
+
+void V8HeapProfilerAgentImpl::takeHeapSnapshot(
+    ErrorString* errorString, const protocol::Maybe<bool>& reportProgress) {
+  v8::HeapProfiler* profiler = m_isolate->GetHeapProfiler();
+  if (!profiler) {
+    *errorString = "Cannot access v8 heap profiler";
+    return;
+  }
+  std::unique_ptr<HeapSnapshotProgress> progress;
+  if (reportProgress.fromMaybe(false))
+    progress = wrapUnique(new HeapSnapshotProgress(&m_frontend));
+
+  GlobalObjectNameResolver resolver(m_session);
+  const v8::HeapSnapshot* snapshot =
+      profiler->TakeHeapSnapshot(progress.get(), &resolver);
+  if (!snapshot) {
+    *errorString = "Failed to take heap snapshot";
+    return;
+  }
+  HeapSnapshotOutputStream stream(&m_frontend);
+  snapshot->Serialize(&stream);
+  const_cast<v8::HeapSnapshot*>(snapshot)->Delete();
+}
+
+void V8HeapProfilerAgentImpl::getObjectByHeapObjectId(
+    ErrorString* error, const String16& heapSnapshotObjectId,
+    const protocol::Maybe<String16>& objectGroup,
+    std::unique_ptr<protocol::Runtime::RemoteObject>* result) {
+  bool ok;
+  int id = heapSnapshotObjectId.toInteger(&ok);
+  if (!ok) {
+    *error = "Invalid heap snapshot object id";
+    return;
+  }
+
+  v8::HandleScope handles(m_isolate);
+  v8::Local<v8::Object> heapObject = objectByHeapObjectId(m_isolate, id);
+  if (heapObject.IsEmpty()) {
+    *error = "Object is not available";
+    return;
+  }
+
+  if (!m_session->inspector()->client()->isInspectableHeapObject(heapObject)) {
+    *error = "Object is not available";
+    return;
+  }
+
+  *result = m_session->wrapObject(heapObject->CreationContext(), heapObject,
+                                  objectGroup.fromMaybe(""), false);
+  if (!result) *error = "Object is not available";
+}
+
+void V8HeapProfilerAgentImpl::addInspectedHeapObject(
+    ErrorString* errorString, const String16& inspectedHeapObjectId) {
+  bool ok;
+  int id = inspectedHeapObjectId.toInteger(&ok);
+  if (!ok) {
+    *errorString = "Invalid heap snapshot object id";
+    return;
+  }
+
+  v8::HandleScope handles(m_isolate);
+  v8::Local<v8::Object> heapObject = objectByHeapObjectId(m_isolate, id);
+  if (heapObject.IsEmpty()) {
+    *errorString = "Object is not available";
+    return;
+  }
+
+  if (!m_session->inspector()->client()->isInspectableHeapObject(heapObject)) {
+    *errorString = "Object is not available";
+    return;
+  }
+
+  m_session->addInspectedObject(wrapUnique(new InspectableHeapObject(id)));
+}
+
+void V8HeapProfilerAgentImpl::getHeapObjectId(ErrorString* errorString,
+                                              const String16& objectId,
+                                              String16* heapSnapshotObjectId) {
+  v8::HandleScope handles(m_isolate);
+  v8::Local<v8::Value> value;
+  v8::Local<v8::Context> context;
+  if (!m_session->unwrapObject(errorString, objectId, &value, &context,
+                               nullptr) ||
+      value->IsUndefined())
+    return;
+
+  v8::SnapshotObjectId id = m_isolate->GetHeapProfiler()->GetObjectId(value);
+  *heapSnapshotObjectId = String16::fromInteger(static_cast<size_t>(id));
+}
+
+void V8HeapProfilerAgentImpl::requestHeapStatsUpdate() {
+  HeapStatsStream stream(&m_frontend);
+  v8::SnapshotObjectId lastSeenObjectId =
+      m_isolate->GetHeapProfiler()->GetHeapStats(&stream);
+  m_frontend.lastSeenObjectId(
+      lastSeenObjectId, m_session->inspector()->client()->currentTimeMS());
+}
+
+// static
+void V8HeapProfilerAgentImpl::onTimer(void* data) {
+  reinterpret_cast<V8HeapProfilerAgentImpl*>(data)->requestHeapStatsUpdate();
+}
+
+void V8HeapProfilerAgentImpl::startTrackingHeapObjectsInternal(
+    bool trackAllocations) {
+  m_isolate->GetHeapProfiler()->StartTrackingHeapObjects(trackAllocations);
+  if (!m_hasTimer) {
+    m_hasTimer = true;
+    m_session->inspector()->client()->startRepeatingTimer(
+        0.05, &V8HeapProfilerAgentImpl::onTimer, reinterpret_cast<void*>(this));
+  }
+}
+
+void V8HeapProfilerAgentImpl::stopTrackingHeapObjectsInternal() {
+  if (m_hasTimer) {
+    m_session->inspector()->client()->cancelTimer(
+        reinterpret_cast<void*>(this));
+    m_hasTimer = false;
+  }
+  m_isolate->GetHeapProfiler()->StopTrackingHeapObjects();
+  m_state->setBoolean(HeapProfilerAgentState::heapObjectsTrackingEnabled,
+                      false);
+  m_state->setBoolean(HeapProfilerAgentState::allocationTrackingEnabled, false);
+}
+
+void V8HeapProfilerAgentImpl::startSampling(
+    ErrorString* errorString, const Maybe<double>& samplingInterval) {
+  v8::HeapProfiler* profiler = m_isolate->GetHeapProfiler();
+  if (!profiler) {
+    *errorString = "Cannot access v8 heap profiler";
+    return;
+  }
+  const unsigned defaultSamplingInterval = 1 << 15;
+  double samplingIntervalValue =
+      samplingInterval.fromMaybe(defaultSamplingInterval);
+  m_state->setDouble(HeapProfilerAgentState::samplingHeapProfilerInterval,
+                     samplingIntervalValue);
+  m_state->setBoolean(HeapProfilerAgentState::samplingHeapProfilerEnabled,
+                      true);
+  profiler->StartSamplingHeapProfiler(
+      static_cast<uint64_t>(samplingIntervalValue), 128,
+      v8::HeapProfiler::kSamplingForceGC);
+}
+
+namespace {
+std::unique_ptr<protocol::HeapProfiler::SamplingHeapProfileNode>
+buildSampingHeapProfileNode(const v8::AllocationProfile::Node* node) {
+  auto children = protocol::Array<
+      protocol::HeapProfiler::SamplingHeapProfileNode>::create();
+  for (const auto* child : node->children)
+    children->addItem(buildSampingHeapProfileNode(child));
+  size_t selfSize = 0;
+  for (const auto& allocation : node->allocations)
+    selfSize += allocation.size * allocation.count;
+  std::unique_ptr<protocol::Runtime::CallFrame> callFrame =
+      protocol::Runtime::CallFrame::create()
+          .setFunctionName(toProtocolString(node->name))
+          .setScriptId(String16::fromInteger(node->script_id))
+          .setUrl(toProtocolString(node->script_name))
+          .setLineNumber(node->line_number - 1)
+          .setColumnNumber(node->column_number - 1)
+          .build();
+  std::unique_ptr<protocol::HeapProfiler::SamplingHeapProfileNode> result =
+      protocol::HeapProfiler::SamplingHeapProfileNode::create()
+          .setCallFrame(std::move(callFrame))
+          .setSelfSize(selfSize)
+          .setChildren(std::move(children))
+          .build();
+  return result;
+}
+}  // namespace
+
+void V8HeapProfilerAgentImpl::stopSampling(
+    ErrorString* errorString,
+    std::unique_ptr<protocol::HeapProfiler::SamplingHeapProfile>* profile) {
+  v8::HeapProfiler* profiler = m_isolate->GetHeapProfiler();
+  if (!profiler) {
+    *errorString = "Cannot access v8 heap profiler";
+    return;
+  }
+  v8::HandleScope scope(
+      m_isolate);  // Allocation profile contains Local handles.
+  std::unique_ptr<v8::AllocationProfile> v8Profile(
+      profiler->GetAllocationProfile());
+  profiler->StopSamplingHeapProfiler();
+  m_state->setBoolean(HeapProfilerAgentState::samplingHeapProfilerEnabled,
+                      false);
+  if (!v8Profile) {
+    *errorString = "Cannot access v8 sampled heap profile.";
+    return;
+  }
+  v8::AllocationProfile::Node* root = v8Profile->GetRootNode();
+  *profile = protocol::HeapProfiler::SamplingHeapProfile::create()
+                 .setHead(buildSampingHeapProfileNode(root))
+                 .build();
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-heap-profiler-agent-impl.h b/src/inspector/v8-heap-profiler-agent-impl.h
new file mode 100644
index 0000000..caa9698
--- /dev/null
+++ b/src/inspector/v8-heap-profiler-agent-impl.h
@@ -0,0 +1,73 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8HEAPPROFILERAGENTIMPL_H_
+#define V8_INSPECTOR_V8HEAPPROFILERAGENTIMPL_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/HeapProfiler.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class V8InspectorSessionImpl;
+
+using protocol::ErrorString;
+using protocol::Maybe;
+
+class V8HeapProfilerAgentImpl : public protocol::HeapProfiler::Backend {
+ public:
+  V8HeapProfilerAgentImpl(V8InspectorSessionImpl*, protocol::FrontendChannel*,
+                          protocol::DictionaryValue* state);
+  ~V8HeapProfilerAgentImpl() override;
+  void restore();
+
+  void collectGarbage(ErrorString*) override;
+
+  void enable(ErrorString*) override;
+  void startTrackingHeapObjects(ErrorString*,
+                                const Maybe<bool>& trackAllocations) override;
+  void stopTrackingHeapObjects(ErrorString*,
+                               const Maybe<bool>& reportProgress) override;
+
+  void disable(ErrorString*) override;
+
+  void takeHeapSnapshot(ErrorString*,
+                        const Maybe<bool>& reportProgress) override;
+
+  void getObjectByHeapObjectId(
+      ErrorString*, const String16& heapSnapshotObjectId,
+      const Maybe<String16>& objectGroup,
+      std::unique_ptr<protocol::Runtime::RemoteObject>* result) override;
+  void addInspectedHeapObject(ErrorString*,
+                              const String16& inspectedHeapObjectId) override;
+  void getHeapObjectId(ErrorString*, const String16& objectId,
+                       String16* heapSnapshotObjectId) override;
+
+  void startSampling(ErrorString*,
+                     const Maybe<double>& samplingInterval) override;
+  void stopSampling(
+      ErrorString*,
+      std::unique_ptr<protocol::HeapProfiler::SamplingHeapProfile>*) override;
+
+ private:
+  void startTrackingHeapObjectsInternal(bool trackAllocations);
+  void stopTrackingHeapObjectsInternal();
+  void requestHeapStatsUpdate();
+  static void onTimer(void*);
+
+  V8InspectorSessionImpl* m_session;
+  v8::Isolate* m_isolate;
+  protocol::HeapProfiler::Frontend m_frontend;
+  protocol::DictionaryValue* m_state;
+  bool m_hasTimer;
+
+  DISALLOW_COPY_AND_ASSIGN(V8HeapProfilerAgentImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8HEAPPROFILERAGENTIMPL_H_
diff --git a/src/inspector/v8-injected-script-host.cc b/src/inspector/v8-injected-script-host.cc
new file mode 100644
index 0000000..dc41ef8
--- /dev/null
+++ b/src/inspector/v8-injected-script-host.cc
@@ -0,0 +1,216 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-injected-script-host.h"
+
+#include "src/base/macros.h"
+#include "src/inspector/injected-script-native.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-internal-value-type.h"
+#include "src/inspector/v8-value-copier.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+namespace {
+
+void setFunctionProperty(v8::Local<v8::Context> context,
+                         v8::Local<v8::Object> obj, const char* name,
+                         v8::FunctionCallback callback,
+                         v8::Local<v8::External> external) {
+  v8::Local<v8::String> funcName =
+      toV8StringInternalized(context->GetIsolate(), name);
+  v8::Local<v8::Function> func;
+  if (!v8::Function::New(context, callback, external, 0,
+                         v8::ConstructorBehavior::kThrow)
+           .ToLocal(&func))
+    return;
+  func->SetName(funcName);
+  createDataProperty(context, obj, funcName, func);
+}
+
+V8InspectorImpl* unwrapInspector(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  DCHECK(!info.Data().IsEmpty());
+  DCHECK(info.Data()->IsExternal());
+  V8InspectorImpl* inspector =
+      static_cast<V8InspectorImpl*>(info.Data().As<v8::External>()->Value());
+  DCHECK(inspector);
+  return inspector;
+}
+
+}  // namespace
+
+v8::Local<v8::Object> V8InjectedScriptHost::create(
+    v8::Local<v8::Context> context, V8InspectorImpl* inspector) {
+  v8::Isolate* isolate = inspector->isolate();
+  v8::Local<v8::Object> injectedScriptHost = v8::Object::New(isolate);
+  bool success = injectedScriptHost->SetPrototype(context, v8::Null(isolate))
+                     .FromMaybe(false);
+  DCHECK(success);
+  USE(success);
+  v8::Local<v8::External> debuggerExternal =
+      v8::External::New(isolate, inspector);
+  setFunctionProperty(context, injectedScriptHost, "internalConstructorName",
+                      V8InjectedScriptHost::internalConstructorNameCallback,
+                      debuggerExternal);
+  setFunctionProperty(
+      context, injectedScriptHost, "formatAccessorsAsProperties",
+      V8InjectedScriptHost::formatAccessorsAsProperties, debuggerExternal);
+  setFunctionProperty(context, injectedScriptHost, "subtype",
+                      V8InjectedScriptHost::subtypeCallback, debuggerExternal);
+  setFunctionProperty(context, injectedScriptHost, "getInternalProperties",
+                      V8InjectedScriptHost::getInternalPropertiesCallback,
+                      debuggerExternal);
+  setFunctionProperty(context, injectedScriptHost, "objectHasOwnProperty",
+                      V8InjectedScriptHost::objectHasOwnPropertyCallback,
+                      debuggerExternal);
+  setFunctionProperty(context, injectedScriptHost, "bind",
+                      V8InjectedScriptHost::bindCallback, debuggerExternal);
+  setFunctionProperty(context, injectedScriptHost, "proxyTargetValue",
+                      V8InjectedScriptHost::proxyTargetValueCallback,
+                      debuggerExternal);
+  return injectedScriptHost;
+}
+
+void V8InjectedScriptHost::internalConstructorNameCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  if (info.Length() < 1 || !info[0]->IsObject()) return;
+
+  v8::Local<v8::Object> object = info[0].As<v8::Object>();
+  info.GetReturnValue().Set(object->GetConstructorName());
+}
+
+void V8InjectedScriptHost::formatAccessorsAsProperties(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  DCHECK_EQ(info.Length(), 2);
+  info.GetReturnValue().Set(false);
+  if (!info[1]->IsFunction()) return;
+  // Check that function is user-defined.
+  if (info[1].As<v8::Function>()->ScriptId() != v8::UnboundScript::kNoScriptId)
+    return;
+  info.GetReturnValue().Set(
+      unwrapInspector(info)->client()->formatAccessorsAsProperties(info[0]));
+}
+
+void V8InjectedScriptHost::subtypeCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  if (info.Length() < 1) return;
+
+  v8::Isolate* isolate = info.GetIsolate();
+  v8::Local<v8::Value> value = info[0];
+  if (value->IsObject()) {
+    v8::Local<v8::Value> internalType = v8InternalValueTypeFrom(
+        isolate->GetCurrentContext(), v8::Local<v8::Object>::Cast(value));
+    if (internalType->IsString()) {
+      info.GetReturnValue().Set(internalType);
+      return;
+    }
+  }
+  if (value->IsArray() || value->IsArgumentsObject()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "array"));
+    return;
+  }
+  if (value->IsTypedArray()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "typedarray"));
+    return;
+  }
+  if (value->IsDate()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "date"));
+    return;
+  }
+  if (value->IsRegExp()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "regexp"));
+    return;
+  }
+  if (value->IsMap() || value->IsWeakMap()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "map"));
+    return;
+  }
+  if (value->IsSet() || value->IsWeakSet()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "set"));
+    return;
+  }
+  if (value->IsMapIterator() || value->IsSetIterator()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "iterator"));
+    return;
+  }
+  if (value->IsGeneratorObject()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "generator"));
+    return;
+  }
+  if (value->IsNativeError()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "error"));
+    return;
+  }
+  if (value->IsProxy()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "proxy"));
+    return;
+  }
+  if (value->IsPromise()) {
+    info.GetReturnValue().Set(toV8StringInternalized(isolate, "promise"));
+    return;
+  }
+  std::unique_ptr<StringBuffer> subtype =
+      unwrapInspector(info)->client()->valueSubtype(value);
+  if (subtype) {
+    info.GetReturnValue().Set(toV8String(isolate, subtype->string()));
+    return;
+  }
+}
+
+void V8InjectedScriptHost::getInternalPropertiesCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  if (info.Length() < 1) return;
+  v8::Local<v8::Array> properties;
+  if (unwrapInspector(info)
+          ->debugger()
+          ->internalProperties(info.GetIsolate()->GetCurrentContext(), info[0])
+          .ToLocal(&properties))
+    info.GetReturnValue().Set(properties);
+}
+
+void V8InjectedScriptHost::objectHasOwnPropertyCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  if (info.Length() < 2 || !info[0]->IsObject() || !info[1]->IsString()) return;
+  bool result = info[0]
+                    .As<v8::Object>()
+                    ->HasOwnProperty(info.GetIsolate()->GetCurrentContext(),
+                                     v8::Local<v8::String>::Cast(info[1]))
+                    .FromMaybe(false);
+  info.GetReturnValue().Set(v8::Boolean::New(info.GetIsolate(), result));
+}
+
+void V8InjectedScriptHost::bindCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  if (info.Length() < 2 || !info[1]->IsString()) return;
+  InjectedScriptNative* injectedScriptNative =
+      InjectedScriptNative::fromInjectedScriptHost(info.GetIsolate(),
+                                                   info.Holder());
+  if (!injectedScriptNative) return;
+
+  v8::Local<v8::Context> context = info.GetIsolate()->GetCurrentContext();
+  v8::Local<v8::String> v8groupName =
+      info[1]->ToString(context).ToLocalChecked();
+  String16 groupName = toProtocolStringWithTypeCheck(v8groupName);
+  int id = injectedScriptNative->bind(info[0], groupName);
+  info.GetReturnValue().Set(id);
+}
+
+void V8InjectedScriptHost::proxyTargetValueCallback(
+    const v8::FunctionCallbackInfo<v8::Value>& info) {
+  if (info.Length() != 1 || !info[0]->IsProxy()) {
+    UNREACHABLE();
+    return;
+  }
+  v8::Local<v8::Object> target = info[0].As<v8::Proxy>();
+  while (target->IsProxy())
+    target = v8::Local<v8::Proxy>::Cast(target)->GetTarget();
+  info.GetReturnValue().Set(target);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-injected-script-host.h b/src/inspector/v8-injected-script-host.h
new file mode 100644
index 0000000..7d293af
--- /dev/null
+++ b/src/inspector/v8-injected-script-host.h
@@ -0,0 +1,46 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8INJECTEDSCRIPTHOST_H_
+#define V8_INSPECTOR_V8INJECTEDSCRIPTHOST_H_
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class V8InspectorImpl;
+
+// SECURITY NOTE: Although the InjectedScriptHost is intended for use solely by
+// the inspector,
+// a reference to the InjectedScriptHost may be leaked to the page being
+// inspected. Thus, the
+// InjectedScriptHost must never implemment methods that have more power over
+// the page than the
+// page already has itself (e.g. origin restriction bypasses).
+
+class V8InjectedScriptHost {
+ public:
+  // We expect that debugger outlives any JS context and thus
+  // V8InjectedScriptHost (owned by JS)
+  // is destroyed before inspector.
+  static v8::Local<v8::Object> create(v8::Local<v8::Context>, V8InspectorImpl*);
+
+ private:
+  static void internalConstructorNameCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void formatAccessorsAsProperties(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void subtypeCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void getInternalPropertiesCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void objectHasOwnPropertyCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+  static void bindCallback(const v8::FunctionCallbackInfo<v8::Value>&);
+  static void proxyTargetValueCallback(
+      const v8::FunctionCallbackInfo<v8::Value>&);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8INJECTEDSCRIPTHOST_H_
diff --git a/src/inspector/v8-inspector-impl.cc b/src/inspector/v8-inspector-impl.cc
new file mode 100644
index 0000000..bd68548
--- /dev/null
+++ b/src/inspector/v8-inspector-impl.cc
@@ -0,0 +1,376 @@
+/*
+ * Copyright (c) 2010-2011 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "src/inspector/v8-inspector-impl.h"
+
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-console-agent-impl.h"
+#include "src/inspector/v8-console-message.h"
+#include "src/inspector/v8-debugger-agent-impl.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-profiler-agent-impl.h"
+#include "src/inspector/v8-runtime-agent-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+
+namespace v8_inspector {
+
+std::unique_ptr<V8Inspector> V8Inspector::create(v8::Isolate* isolate,
+                                                 V8InspectorClient* client) {
+  return wrapUnique(new V8InspectorImpl(isolate, client));
+}
+
+V8InspectorImpl::V8InspectorImpl(v8::Isolate* isolate,
+                                 V8InspectorClient* client)
+    : m_isolate(isolate),
+      m_client(client),
+      m_debugger(new V8Debugger(isolate, this)),
+      m_capturingStackTracesCount(0),
+      m_lastExceptionId(0) {}
+
+V8InspectorImpl::~V8InspectorImpl() {}
+
+V8DebuggerAgentImpl* V8InspectorImpl::enabledDebuggerAgentForGroup(
+    int contextGroupId) {
+  V8InspectorSessionImpl* session = sessionForContextGroup(contextGroupId);
+  V8DebuggerAgentImpl* agent = session ? session->debuggerAgent() : nullptr;
+  return agent && agent->enabled() ? agent : nullptr;
+}
+
+V8RuntimeAgentImpl* V8InspectorImpl::enabledRuntimeAgentForGroup(
+    int contextGroupId) {
+  V8InspectorSessionImpl* session = sessionForContextGroup(contextGroupId);
+  V8RuntimeAgentImpl* agent = session ? session->runtimeAgent() : nullptr;
+  return agent && agent->enabled() ? agent : nullptr;
+}
+
+V8ProfilerAgentImpl* V8InspectorImpl::enabledProfilerAgentForGroup(
+    int contextGroupId) {
+  V8InspectorSessionImpl* session = sessionForContextGroup(contextGroupId);
+  V8ProfilerAgentImpl* agent = session ? session->profilerAgent() : nullptr;
+  return agent && agent->enabled() ? agent : nullptr;
+}
+
+v8::MaybeLocal<v8::Value> V8InspectorImpl::runCompiledScript(
+    v8::Local<v8::Context> context, v8::Local<v8::Script> script) {
+  v8::MicrotasksScope microtasksScope(m_isolate,
+                                      v8::MicrotasksScope::kRunMicrotasks);
+  int groupId = V8Debugger::getGroupId(context);
+  if (V8DebuggerAgentImpl* agent = enabledDebuggerAgentForGroup(groupId))
+    agent->willExecuteScript(script->GetUnboundScript()->GetId());
+  v8::MaybeLocal<v8::Value> result = script->Run(context);
+  // Get agent from the map again, since it could have detached during script
+  // execution.
+  if (V8DebuggerAgentImpl* agent = enabledDebuggerAgentForGroup(groupId))
+    agent->didExecuteScript();
+  return result;
+}
+
+v8::MaybeLocal<v8::Value> V8InspectorImpl::callFunction(
+    v8::Local<v8::Function> function, v8::Local<v8::Context> context,
+    v8::Local<v8::Value> receiver, int argc, v8::Local<v8::Value> info[]) {
+  v8::MicrotasksScope microtasksScope(m_isolate,
+                                      v8::MicrotasksScope::kRunMicrotasks);
+  int groupId = V8Debugger::getGroupId(context);
+  if (V8DebuggerAgentImpl* agent = enabledDebuggerAgentForGroup(groupId))
+    agent->willExecuteScript(function->ScriptId());
+  v8::MaybeLocal<v8::Value> result =
+      function->Call(context, receiver, argc, info);
+  // Get agent from the map again, since it could have detached during script
+  // execution.
+  if (V8DebuggerAgentImpl* agent = enabledDebuggerAgentForGroup(groupId))
+    agent->didExecuteScript();
+  return result;
+}
+
+v8::MaybeLocal<v8::Value> V8InspectorImpl::compileAndRunInternalScript(
+    v8::Local<v8::Context> context, v8::Local<v8::String> source) {
+  v8::Local<v8::Script> script =
+      compileScript(context, source, String16(), true);
+  if (script.IsEmpty()) return v8::MaybeLocal<v8::Value>();
+  v8::MicrotasksScope microtasksScope(m_isolate,
+                                      v8::MicrotasksScope::kDoNotRunMicrotasks);
+  return script->Run(context);
+}
+
+v8::Local<v8::Script> V8InspectorImpl::compileScript(
+    v8::Local<v8::Context> context, v8::Local<v8::String> code,
+    const String16& fileName, bool markAsInternal) {
+  v8::ScriptOrigin origin(
+      toV8String(m_isolate, fileName), v8::Integer::New(m_isolate, 0),
+      v8::Integer::New(m_isolate, 0),
+      v8::False(m_isolate),  // sharable
+      v8::Local<v8::Integer>(),
+      v8::Boolean::New(m_isolate, markAsInternal),  // internal
+      toV8String(m_isolate, String16()),            // sourceMap
+      v8::True(m_isolate));                         // opaqueresource
+  v8::ScriptCompiler::Source source(code, origin);
+  v8::Local<v8::Script> script;
+  if (!v8::ScriptCompiler::Compile(context, &source,
+                                   v8::ScriptCompiler::kNoCompileOptions)
+           .ToLocal(&script))
+    return v8::Local<v8::Script>();
+  return script;
+}
+
+void V8InspectorImpl::enableStackCapturingIfNeeded() {
+  if (!m_capturingStackTracesCount)
+    V8StackTraceImpl::setCaptureStackTraceForUncaughtExceptions(m_isolate,
+                                                                true);
+  ++m_capturingStackTracesCount;
+}
+
+void V8InspectorImpl::disableStackCapturingIfNeeded() {
+  if (!(--m_capturingStackTracesCount))
+    V8StackTraceImpl::setCaptureStackTraceForUncaughtExceptions(m_isolate,
+                                                                false);
+}
+
+void V8InspectorImpl::muteExceptions(int contextGroupId) {
+  m_muteExceptionsMap[contextGroupId]++;
+}
+
+void V8InspectorImpl::unmuteExceptions(int contextGroupId) {
+  m_muteExceptionsMap[contextGroupId]--;
+}
+
+V8ConsoleMessageStorage* V8InspectorImpl::ensureConsoleMessageStorage(
+    int contextGroupId) {
+  ConsoleStorageMap::iterator storageIt =
+      m_consoleStorageMap.find(contextGroupId);
+  if (storageIt == m_consoleStorageMap.end())
+    storageIt =
+        m_consoleStorageMap
+            .insert(std::make_pair(
+                contextGroupId,
+                wrapUnique(new V8ConsoleMessageStorage(this, contextGroupId))))
+            .first;
+  return storageIt->second.get();
+}
+
+bool V8InspectorImpl::hasConsoleMessageStorage(int contextGroupId) {
+  ConsoleStorageMap::iterator storageIt =
+      m_consoleStorageMap.find(contextGroupId);
+  return storageIt != m_consoleStorageMap.end();
+}
+
+std::unique_ptr<V8StackTrace> V8InspectorImpl::createStackTrace(
+    v8::Local<v8::StackTrace> stackTrace) {
+  return m_debugger->createStackTrace(stackTrace);
+}
+
+std::unique_ptr<V8InspectorSession> V8InspectorImpl::connect(
+    int contextGroupId, V8Inspector::Channel* channel,
+    const StringView& state) {
+  DCHECK(m_sessions.find(contextGroupId) == m_sessions.cend());
+  std::unique_ptr<V8InspectorSessionImpl> session =
+      V8InspectorSessionImpl::create(this, contextGroupId, channel, state);
+  m_sessions[contextGroupId] = session.get();
+  return std::move(session);
+}
+
+void V8InspectorImpl::disconnect(V8InspectorSessionImpl* session) {
+  DCHECK(m_sessions.find(session->contextGroupId()) != m_sessions.end());
+  m_sessions.erase(session->contextGroupId());
+}
+
+InspectedContext* V8InspectorImpl::getContext(int groupId,
+                                              int contextId) const {
+  if (!groupId || !contextId) return nullptr;
+
+  ContextsByGroupMap::const_iterator contextGroupIt = m_contexts.find(groupId);
+  if (contextGroupIt == m_contexts.end()) return nullptr;
+
+  ContextByIdMap::iterator contextIt = contextGroupIt->second->find(contextId);
+  if (contextIt == contextGroupIt->second->end()) return nullptr;
+
+  return contextIt->second.get();
+}
+
+void V8InspectorImpl::contextCreated(const V8ContextInfo& info) {
+  int contextId = m_debugger->markContext(info);
+
+  ContextsByGroupMap::iterator contextIt = m_contexts.find(info.contextGroupId);
+  if (contextIt == m_contexts.end())
+    contextIt = m_contexts
+                    .insert(std::make_pair(info.contextGroupId,
+                                           wrapUnique(new ContextByIdMap())))
+                    .first;
+
+  const auto& contextById = contextIt->second;
+
+  DCHECK(contextById->find(contextId) == contextById->cend());
+  InspectedContext* context = new InspectedContext(this, info, contextId);
+  (*contextById)[contextId] = wrapUnique(context);
+  SessionMap::iterator sessionIt = m_sessions.find(info.contextGroupId);
+  if (sessionIt != m_sessions.end())
+    sessionIt->second->runtimeAgent()->reportExecutionContextCreated(context);
+}
+
+void V8InspectorImpl::contextDestroyed(v8::Local<v8::Context> context) {
+  int contextId = V8Debugger::contextId(context);
+  int contextGroupId = V8Debugger::getGroupId(context);
+
+  ConsoleStorageMap::iterator storageIt =
+      m_consoleStorageMap.find(contextGroupId);
+  if (storageIt != m_consoleStorageMap.end())
+    storageIt->second->contextDestroyed(contextId);
+
+  InspectedContext* inspectedContext = getContext(contextGroupId, contextId);
+  if (!inspectedContext) return;
+
+  SessionMap::iterator iter = m_sessions.find(contextGroupId);
+  if (iter != m_sessions.end())
+    iter->second->runtimeAgent()->reportExecutionContextDestroyed(
+        inspectedContext);
+  discardInspectedContext(contextGroupId, contextId);
+}
+
+void V8InspectorImpl::resetContextGroup(int contextGroupId) {
+  m_consoleStorageMap.erase(contextGroupId);
+  m_muteExceptionsMap.erase(contextGroupId);
+  SessionMap::iterator session = m_sessions.find(contextGroupId);
+  if (session != m_sessions.end()) session->second->reset();
+  m_contexts.erase(contextGroupId);
+}
+
+void V8InspectorImpl::willExecuteScript(v8::Local<v8::Context> context,
+                                        int scriptId) {
+  if (V8DebuggerAgentImpl* agent =
+          enabledDebuggerAgentForGroup(V8Debugger::getGroupId(context)))
+    agent->willExecuteScript(scriptId);
+}
+
+void V8InspectorImpl::didExecuteScript(v8::Local<v8::Context> context) {
+  if (V8DebuggerAgentImpl* agent =
+          enabledDebuggerAgentForGroup(V8Debugger::getGroupId(context)))
+    agent->didExecuteScript();
+}
+
+void V8InspectorImpl::idleStarted() {
+  for (auto it = m_sessions.begin(); it != m_sessions.end(); ++it) {
+    if (it->second->profilerAgent()->idleStarted()) return;
+  }
+}
+
+void V8InspectorImpl::idleFinished() {
+  for (auto it = m_sessions.begin(); it != m_sessions.end(); ++it) {
+    if (it->second->profilerAgent()->idleFinished()) return;
+  }
+}
+
+unsigned V8InspectorImpl::exceptionThrown(
+    v8::Local<v8::Context> context, const StringView& message,
+    v8::Local<v8::Value> exception, const StringView& detailedMessage,
+    const StringView& url, unsigned lineNumber, unsigned columnNumber,
+    std::unique_ptr<V8StackTrace> stackTrace, int scriptId) {
+  int contextGroupId = V8Debugger::getGroupId(context);
+  if (!contextGroupId || m_muteExceptionsMap[contextGroupId]) return 0;
+  std::unique_ptr<V8StackTraceImpl> stackTraceImpl =
+      wrapUnique(static_cast<V8StackTraceImpl*>(stackTrace.release()));
+  unsigned exceptionId = nextExceptionId();
+  std::unique_ptr<V8ConsoleMessage> consoleMessage =
+      V8ConsoleMessage::createForException(
+          m_client->currentTimeMS(), toString16(detailedMessage),
+          toString16(url), lineNumber, columnNumber, std::move(stackTraceImpl),
+          scriptId, m_isolate, toString16(message),
+          V8Debugger::contextId(context), exception, exceptionId);
+  ensureConsoleMessageStorage(contextGroupId)
+      ->addMessage(std::move(consoleMessage));
+  return exceptionId;
+}
+
+void V8InspectorImpl::exceptionRevoked(v8::Local<v8::Context> context,
+                                       unsigned exceptionId,
+                                       const StringView& message) {
+  int contextGroupId = V8Debugger::getGroupId(context);
+  if (!contextGroupId) return;
+
+  std::unique_ptr<V8ConsoleMessage> consoleMessage =
+      V8ConsoleMessage::createForRevokedException(
+          m_client->currentTimeMS(), toString16(message), exceptionId);
+  ensureConsoleMessageStorage(contextGroupId)
+      ->addMessage(std::move(consoleMessage));
+}
+
+std::unique_ptr<V8StackTrace> V8InspectorImpl::captureStackTrace(
+    bool fullStack) {
+  return m_debugger->captureStackTrace(fullStack);
+}
+
+void V8InspectorImpl::asyncTaskScheduled(const StringView& taskName, void* task,
+                                         bool recurring) {
+  m_debugger->asyncTaskScheduled(taskName, task, recurring);
+}
+
+void V8InspectorImpl::asyncTaskCanceled(void* task) {
+  m_debugger->asyncTaskCanceled(task);
+}
+
+void V8InspectorImpl::asyncTaskStarted(void* task) {
+  m_debugger->asyncTaskStarted(task);
+}
+
+void V8InspectorImpl::asyncTaskFinished(void* task) {
+  m_debugger->asyncTaskFinished(task);
+}
+
+void V8InspectorImpl::allAsyncTasksCanceled() {
+  m_debugger->allAsyncTasksCanceled();
+}
+
+v8::Local<v8::Context> V8InspectorImpl::regexContext() {
+  if (m_regexContext.IsEmpty())
+    m_regexContext.Reset(m_isolate, v8::Context::New(m_isolate));
+  return m_regexContext.Get(m_isolate);
+}
+
+void V8InspectorImpl::discardInspectedContext(int contextGroupId,
+                                              int contextId) {
+  if (!getContext(contextGroupId, contextId)) return;
+  m_contexts[contextGroupId]->erase(contextId);
+  if (m_contexts[contextGroupId]->empty()) m_contexts.erase(contextGroupId);
+}
+
+const V8InspectorImpl::ContextByIdMap* V8InspectorImpl::contextGroup(
+    int contextGroupId) {
+  ContextsByGroupMap::iterator iter = m_contexts.find(contextGroupId);
+  return iter == m_contexts.end() ? nullptr : iter->second.get();
+}
+
+V8InspectorSessionImpl* V8InspectorImpl::sessionForContextGroup(
+    int contextGroupId) {
+  if (!contextGroupId) return nullptr;
+  SessionMap::iterator iter = m_sessions.find(contextGroupId);
+  return iter == m_sessions.end() ? nullptr : iter->second;
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-inspector-impl.h b/src/inspector/v8-inspector-impl.h
new file mode 100644
index 0000000..0ca1a6a
--- /dev/null
+++ b/src/inspector/v8-inspector-impl.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright (c) 2010, Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef V8_INSPECTOR_V8INSPECTORIMPL_H_
+#define V8_INSPECTOR_V8INSPECTORIMPL_H_
+
+#include <vector>
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Protocol.h"
+
+#include "include/v8-debug.h"
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+class InspectedContext;
+class V8ConsoleMessageStorage;
+class V8Debugger;
+class V8DebuggerAgentImpl;
+class V8InspectorSessionImpl;
+class V8ProfilerAgentImpl;
+class V8RuntimeAgentImpl;
+class V8StackTraceImpl;
+
+class V8InspectorImpl : public V8Inspector {
+ public:
+  V8InspectorImpl(v8::Isolate*, V8InspectorClient*);
+  ~V8InspectorImpl() override;
+
+  v8::Isolate* isolate() const { return m_isolate; }
+  V8InspectorClient* client() { return m_client; }
+  V8Debugger* debugger() { return m_debugger.get(); }
+
+  v8::MaybeLocal<v8::Value> runCompiledScript(v8::Local<v8::Context>,
+                                              v8::Local<v8::Script>);
+  v8::MaybeLocal<v8::Value> callFunction(v8::Local<v8::Function>,
+                                         v8::Local<v8::Context>,
+                                         v8::Local<v8::Value> receiver,
+                                         int argc, v8::Local<v8::Value> info[]);
+  v8::MaybeLocal<v8::Value> compileAndRunInternalScript(v8::Local<v8::Context>,
+                                                        v8::Local<v8::String>);
+  v8::Local<v8::Script> compileScript(v8::Local<v8::Context>,
+                                      v8::Local<v8::String>,
+                                      const String16& fileName,
+                                      bool markAsInternal);
+  v8::Local<v8::Context> regexContext();
+
+  // V8Inspector implementation.
+  std::unique_ptr<V8InspectorSession> connect(int contextGroupId,
+                                              V8Inspector::Channel*,
+                                              const StringView& state) override;
+  void contextCreated(const V8ContextInfo&) override;
+  void contextDestroyed(v8::Local<v8::Context>) override;
+  void resetContextGroup(int contextGroupId) override;
+  void willExecuteScript(v8::Local<v8::Context>, int scriptId) override;
+  void didExecuteScript(v8::Local<v8::Context>) override;
+  void idleStarted() override;
+  void idleFinished() override;
+  unsigned exceptionThrown(v8::Local<v8::Context>, const StringView& message,
+                           v8::Local<v8::Value> exception,
+                           const StringView& detailedMessage,
+                           const StringView& url, unsigned lineNumber,
+                           unsigned columnNumber, std::unique_ptr<V8StackTrace>,
+                           int scriptId) override;
+  void exceptionRevoked(v8::Local<v8::Context>, unsigned exceptionId,
+                        const StringView& message) override;
+  std::unique_ptr<V8StackTrace> createStackTrace(
+      v8::Local<v8::StackTrace>) override;
+  std::unique_ptr<V8StackTrace> captureStackTrace(bool fullStack) override;
+  void asyncTaskScheduled(const StringView& taskName, void* task,
+                          bool recurring) override;
+  void asyncTaskCanceled(void* task) override;
+  void asyncTaskStarted(void* task) override;
+  void asyncTaskFinished(void* task) override;
+  void allAsyncTasksCanceled() override;
+
+  unsigned nextExceptionId() { return ++m_lastExceptionId; }
+  void enableStackCapturingIfNeeded();
+  void disableStackCapturingIfNeeded();
+  void muteExceptions(int contextGroupId);
+  void unmuteExceptions(int contextGroupId);
+  V8ConsoleMessageStorage* ensureConsoleMessageStorage(int contextGroupId);
+  bool hasConsoleMessageStorage(int contextGroupId);
+  using ContextByIdMap =
+      protocol::HashMap<int, std::unique_ptr<InspectedContext>>;
+  void discardInspectedContext(int contextGroupId, int contextId);
+  const ContextByIdMap* contextGroup(int contextGroupId);
+  void disconnect(V8InspectorSessionImpl*);
+  V8InspectorSessionImpl* sessionForContextGroup(int contextGroupId);
+  InspectedContext* getContext(int groupId, int contextId) const;
+  V8DebuggerAgentImpl* enabledDebuggerAgentForGroup(int contextGroupId);
+  V8RuntimeAgentImpl* enabledRuntimeAgentForGroup(int contextGroupId);
+  V8ProfilerAgentImpl* enabledProfilerAgentForGroup(int contextGroupId);
+
+ private:
+  v8::Isolate* m_isolate;
+  V8InspectorClient* m_client;
+  std::unique_ptr<V8Debugger> m_debugger;
+  v8::Global<v8::Context> m_regexContext;
+  int m_capturingStackTracesCount;
+  unsigned m_lastExceptionId;
+
+  using MuteExceptionsMap = protocol::HashMap<int, int>;
+  MuteExceptionsMap m_muteExceptionsMap;
+
+  using ContextsByGroupMap =
+      protocol::HashMap<int, std::unique_ptr<ContextByIdMap>>;
+  ContextsByGroupMap m_contexts;
+
+  using SessionMap = protocol::HashMap<int, V8InspectorSessionImpl*>;
+  SessionMap m_sessions;
+
+  using ConsoleStorageMap =
+      protocol::HashMap<int, std::unique_ptr<V8ConsoleMessageStorage>>;
+  ConsoleStorageMap m_consoleStorageMap;
+
+  DISALLOW_COPY_AND_ASSIGN(V8InspectorImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8INSPECTORIMPL_H_
diff --git a/src/inspector/v8-inspector-session-impl.cc b/src/inspector/v8-inspector-session-impl.cc
new file mode 100644
index 0000000..c3d3f48
--- /dev/null
+++ b/src/inspector/v8-inspector-session-impl.cc
@@ -0,0 +1,417 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-inspector-session-impl.h"
+
+#include "src/inspector/injected-script.h"
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/remote-object-id.h"
+#include "src/inspector/search-util.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-console-agent-impl.h"
+#include "src/inspector/v8-debugger-agent-impl.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-heap-profiler-agent-impl.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-profiler-agent-impl.h"
+#include "src/inspector/v8-runtime-agent-impl.h"
+#include "src/inspector/v8-schema-agent-impl.h"
+
+namespace v8_inspector {
+
+// static
+bool V8InspectorSession::canDispatchMethod(const StringView& method) {
+  return stringViewStartsWith(method,
+                              protocol::Runtime::Metainfo::commandPrefix) ||
+         stringViewStartsWith(method,
+                              protocol::Debugger::Metainfo::commandPrefix) ||
+         stringViewStartsWith(method,
+                              protocol::Profiler::Metainfo::commandPrefix) ||
+         stringViewStartsWith(
+             method, protocol::HeapProfiler::Metainfo::commandPrefix) ||
+         stringViewStartsWith(method,
+                              protocol::Console::Metainfo::commandPrefix) ||
+         stringViewStartsWith(method,
+                              protocol::Schema::Metainfo::commandPrefix);
+}
+
+std::unique_ptr<V8InspectorSessionImpl> V8InspectorSessionImpl::create(
+    V8InspectorImpl* inspector, int contextGroupId,
+    V8Inspector::Channel* channel, const StringView& state) {
+  return wrapUnique(
+      new V8InspectorSessionImpl(inspector, contextGroupId, channel, state));
+}
+
+V8InspectorSessionImpl::V8InspectorSessionImpl(V8InspectorImpl* inspector,
+                                               int contextGroupId,
+                                               V8Inspector::Channel* channel,
+                                               const StringView& savedState)
+    : m_contextGroupId(contextGroupId),
+      m_inspector(inspector),
+      m_channel(channel),
+      m_customObjectFormatterEnabled(false),
+      m_dispatcher(this),
+      m_state(nullptr),
+      m_runtimeAgent(nullptr),
+      m_debuggerAgent(nullptr),
+      m_heapProfilerAgent(nullptr),
+      m_profilerAgent(nullptr),
+      m_consoleAgent(nullptr),
+      m_schemaAgent(nullptr) {
+  if (savedState.length()) {
+    std::unique_ptr<protocol::Value> state =
+        protocol::parseJSON(toString16(savedState));
+    if (state) m_state = protocol::DictionaryValue::cast(std::move(state));
+    if (!m_state) m_state = protocol::DictionaryValue::create();
+  } else {
+    m_state = protocol::DictionaryValue::create();
+  }
+
+  m_runtimeAgent = wrapUnique(new V8RuntimeAgentImpl(
+      this, this, agentState(protocol::Runtime::Metainfo::domainName)));
+  protocol::Runtime::Dispatcher::wire(&m_dispatcher, m_runtimeAgent.get());
+
+  m_debuggerAgent = wrapUnique(new V8DebuggerAgentImpl(
+      this, this, agentState(protocol::Debugger::Metainfo::domainName)));
+  protocol::Debugger::Dispatcher::wire(&m_dispatcher, m_debuggerAgent.get());
+
+  m_profilerAgent = wrapUnique(new V8ProfilerAgentImpl(
+      this, this, agentState(protocol::Profiler::Metainfo::domainName)));
+  protocol::Profiler::Dispatcher::wire(&m_dispatcher, m_profilerAgent.get());
+
+  m_heapProfilerAgent = wrapUnique(new V8HeapProfilerAgentImpl(
+      this, this, agentState(protocol::HeapProfiler::Metainfo::domainName)));
+  protocol::HeapProfiler::Dispatcher::wire(&m_dispatcher,
+                                           m_heapProfilerAgent.get());
+
+  m_consoleAgent = wrapUnique(new V8ConsoleAgentImpl(
+      this, this, agentState(protocol::Console::Metainfo::domainName)));
+  protocol::Console::Dispatcher::wire(&m_dispatcher, m_consoleAgent.get());
+
+  m_schemaAgent = wrapUnique(new V8SchemaAgentImpl(
+      this, this, agentState(protocol::Schema::Metainfo::domainName)));
+  protocol::Schema::Dispatcher::wire(&m_dispatcher, m_schemaAgent.get());
+
+  if (savedState.length()) {
+    m_runtimeAgent->restore();
+    m_debuggerAgent->restore();
+    m_heapProfilerAgent->restore();
+    m_profilerAgent->restore();
+    m_consoleAgent->restore();
+  }
+}
+
+V8InspectorSessionImpl::~V8InspectorSessionImpl() {
+  ErrorString errorString;
+  m_consoleAgent->disable(&errorString);
+  m_profilerAgent->disable(&errorString);
+  m_heapProfilerAgent->disable(&errorString);
+  m_debuggerAgent->disable(&errorString);
+  m_runtimeAgent->disable(&errorString);
+
+  discardInjectedScripts();
+  m_inspector->disconnect(this);
+}
+
+protocol::DictionaryValue* V8InspectorSessionImpl::agentState(
+    const String16& name) {
+  protocol::DictionaryValue* state = m_state->getObject(name);
+  if (!state) {
+    std::unique_ptr<protocol::DictionaryValue> newState =
+        protocol::DictionaryValue::create();
+    state = newState.get();
+    m_state->setObject(name, std::move(newState));
+  }
+  return state;
+}
+
+void V8InspectorSessionImpl::sendProtocolResponse(int callId,
+                                                  const String16& message) {
+  m_channel->sendProtocolResponse(callId, toStringView(message));
+}
+
+void V8InspectorSessionImpl::sendProtocolNotification(const String16& message) {
+  m_channel->sendProtocolNotification(toStringView(message));
+}
+
+void V8InspectorSessionImpl::flushProtocolNotifications() {
+  m_channel->flushProtocolNotifications();
+}
+
+void V8InspectorSessionImpl::reset() {
+  m_debuggerAgent->reset();
+  m_runtimeAgent->reset();
+  discardInjectedScripts();
+}
+
+void V8InspectorSessionImpl::discardInjectedScripts() {
+  m_inspectedObjects.clear();
+  const V8InspectorImpl::ContextByIdMap* contexts =
+      m_inspector->contextGroup(m_contextGroupId);
+  if (!contexts) return;
+
+  std::vector<int> keys;
+  keys.reserve(contexts->size());
+  for (auto& idContext : *contexts) keys.push_back(idContext.first);
+  for (auto& key : keys) {
+    contexts = m_inspector->contextGroup(m_contextGroupId);
+    if (!contexts) continue;
+    auto contextIt = contexts->find(key);
+    if (contextIt != contexts->end())
+      contextIt->second
+          ->discardInjectedScript();  // This may destroy some contexts.
+  }
+}
+
+InjectedScript* V8InspectorSessionImpl::findInjectedScript(
+    ErrorString* errorString, int contextId) {
+  if (!contextId) {
+    *errorString = "Cannot find context with specified id";
+    return nullptr;
+  }
+
+  const V8InspectorImpl::ContextByIdMap* contexts =
+      m_inspector->contextGroup(m_contextGroupId);
+  if (!contexts) {
+    *errorString = "Cannot find context with specified id";
+    return nullptr;
+  }
+
+  auto contextsIt = contexts->find(contextId);
+  if (contextsIt == contexts->end()) {
+    *errorString = "Cannot find context with specified id";
+    return nullptr;
+  }
+
+  const std::unique_ptr<InspectedContext>& context = contextsIt->second;
+  if (!context->getInjectedScript()) {
+    if (!context->createInjectedScript()) {
+      *errorString = "Cannot access specified execution context";
+      return nullptr;
+    }
+    if (m_customObjectFormatterEnabled)
+      context->getInjectedScript()->setCustomObjectFormatterEnabled(true);
+  }
+  return context->getInjectedScript();
+}
+
+InjectedScript* V8InspectorSessionImpl::findInjectedScript(
+    ErrorString* errorString, RemoteObjectIdBase* objectId) {
+  return objectId ? findInjectedScript(errorString, objectId->contextId())
+                  : nullptr;
+}
+
+void V8InspectorSessionImpl::releaseObjectGroup(const StringView& objectGroup) {
+  releaseObjectGroup(toString16(objectGroup));
+}
+
+void V8InspectorSessionImpl::releaseObjectGroup(const String16& objectGroup) {
+  const V8InspectorImpl::ContextByIdMap* contexts =
+      m_inspector->contextGroup(m_contextGroupId);
+  if (!contexts) return;
+
+  std::vector<int> keys;
+  for (auto& idContext : *contexts) keys.push_back(idContext.first);
+  for (auto& key : keys) {
+    contexts = m_inspector->contextGroup(m_contextGroupId);
+    if (!contexts) continue;
+    auto contextsIt = contexts->find(key);
+    if (contextsIt == contexts->end()) continue;
+    InjectedScript* injectedScript = contextsIt->second->getInjectedScript();
+    if (injectedScript)
+      injectedScript->releaseObjectGroup(
+          objectGroup);  // This may destroy some contexts.
+  }
+}
+
+bool V8InspectorSessionImpl::unwrapObject(
+    std::unique_ptr<StringBuffer>* error, const StringView& objectId,
+    v8::Local<v8::Value>* object, v8::Local<v8::Context>* context,
+    std::unique_ptr<StringBuffer>* objectGroup) {
+  ErrorString errorString;
+  String16 objectGroupString;
+  bool result =
+      unwrapObject(&errorString, toString16(objectId), object, context,
+                   objectGroup ? &objectGroupString : nullptr);
+  if (error) *error = StringBufferImpl::adopt(errorString);
+  if (objectGroup) *objectGroup = StringBufferImpl::adopt(objectGroupString);
+  return result;
+}
+
+bool V8InspectorSessionImpl::unwrapObject(ErrorString* errorString,
+                                          const String16& objectId,
+                                          v8::Local<v8::Value>* object,
+                                          v8::Local<v8::Context>* context,
+                                          String16* objectGroup) {
+  std::unique_ptr<RemoteObjectId> remoteId =
+      RemoteObjectId::parse(errorString, objectId);
+  if (!remoteId) return false;
+  InjectedScript* injectedScript =
+      findInjectedScript(errorString, remoteId.get());
+  if (!injectedScript) return false;
+  if (!injectedScript->findObject(errorString, *remoteId, object)) return false;
+  *context = injectedScript->context()->context();
+  if (objectGroup) *objectGroup = injectedScript->objectGroupName(*remoteId);
+  return true;
+}
+
+std::unique_ptr<protocol::Runtime::API::RemoteObject>
+V8InspectorSessionImpl::wrapObject(v8::Local<v8::Context> context,
+                                   v8::Local<v8::Value> value,
+                                   const StringView& groupName) {
+  return wrapObject(context, value, toString16(groupName), false);
+}
+
+std::unique_ptr<protocol::Runtime::RemoteObject>
+V8InspectorSessionImpl::wrapObject(v8::Local<v8::Context> context,
+                                   v8::Local<v8::Value> value,
+                                   const String16& groupName,
+                                   bool generatePreview) {
+  ErrorString errorString;
+  InjectedScript* injectedScript =
+      findInjectedScript(&errorString, V8Debugger::contextId(context));
+  if (!injectedScript) return nullptr;
+  return injectedScript->wrapObject(&errorString, value, groupName, false,
+                                    generatePreview);
+}
+
+std::unique_ptr<protocol::Runtime::RemoteObject>
+V8InspectorSessionImpl::wrapTable(v8::Local<v8::Context> context,
+                                  v8::Local<v8::Value> table,
+                                  v8::Local<v8::Value> columns) {
+  ErrorString errorString;
+  InjectedScript* injectedScript =
+      findInjectedScript(&errorString, V8Debugger::contextId(context));
+  if (!injectedScript) return nullptr;
+  return injectedScript->wrapTable(table, columns);
+}
+
+void V8InspectorSessionImpl::setCustomObjectFormatterEnabled(bool enabled) {
+  m_customObjectFormatterEnabled = enabled;
+  const V8InspectorImpl::ContextByIdMap* contexts =
+      m_inspector->contextGroup(m_contextGroupId);
+  if (!contexts) return;
+  for (auto& idContext : *contexts) {
+    InjectedScript* injectedScript = idContext.second->getInjectedScript();
+    if (injectedScript)
+      injectedScript->setCustomObjectFormatterEnabled(enabled);
+  }
+}
+
+void V8InspectorSessionImpl::reportAllContexts(V8RuntimeAgentImpl* agent) {
+  const V8InspectorImpl::ContextByIdMap* contexts =
+      m_inspector->contextGroup(m_contextGroupId);
+  if (!contexts) return;
+  for (auto& idContext : *contexts)
+    agent->reportExecutionContextCreated(idContext.second.get());
+}
+
+void V8InspectorSessionImpl::dispatchProtocolMessage(
+    const StringView& message) {
+  m_dispatcher.dispatch(protocol::parseJSON(message));
+}
+
+std::unique_ptr<StringBuffer> V8InspectorSessionImpl::stateJSON() {
+  String16 json = m_state->toJSONString();
+  return StringBufferImpl::adopt(json);
+}
+
+std::vector<std::unique_ptr<protocol::Schema::API::Domain>>
+V8InspectorSessionImpl::supportedDomains() {
+  std::vector<std::unique_ptr<protocol::Schema::Domain>> domains =
+      supportedDomainsImpl();
+  std::vector<std::unique_ptr<protocol::Schema::API::Domain>> result;
+  for (size_t i = 0; i < domains.size(); ++i)
+    result.push_back(std::move(domains[i]));
+  return result;
+}
+
+std::vector<std::unique_ptr<protocol::Schema::Domain>>
+V8InspectorSessionImpl::supportedDomainsImpl() {
+  std::vector<std::unique_ptr<protocol::Schema::Domain>> result;
+  result.push_back(protocol::Schema::Domain::create()
+                       .setName(protocol::Runtime::Metainfo::domainName)
+                       .setVersion(protocol::Runtime::Metainfo::version)
+                       .build());
+  result.push_back(protocol::Schema::Domain::create()
+                       .setName(protocol::Debugger::Metainfo::domainName)
+                       .setVersion(protocol::Debugger::Metainfo::version)
+                       .build());
+  result.push_back(protocol::Schema::Domain::create()
+                       .setName(protocol::Profiler::Metainfo::domainName)
+                       .setVersion(protocol::Profiler::Metainfo::version)
+                       .build());
+  result.push_back(protocol::Schema::Domain::create()
+                       .setName(protocol::HeapProfiler::Metainfo::domainName)
+                       .setVersion(protocol::HeapProfiler::Metainfo::version)
+                       .build());
+  result.push_back(protocol::Schema::Domain::create()
+                       .setName(protocol::Schema::Metainfo::domainName)
+                       .setVersion(protocol::Schema::Metainfo::version)
+                       .build());
+  return result;
+}
+
+void V8InspectorSessionImpl::addInspectedObject(
+    std::unique_ptr<V8InspectorSession::Inspectable> inspectable) {
+  m_inspectedObjects.insert(m_inspectedObjects.begin(), std::move(inspectable));
+  if (m_inspectedObjects.size() > kInspectedObjectBufferSize)
+    m_inspectedObjects.resize(kInspectedObjectBufferSize);
+}
+
+V8InspectorSession::Inspectable* V8InspectorSessionImpl::inspectedObject(
+    unsigned num) {
+  if (num >= m_inspectedObjects.size()) return nullptr;
+  return m_inspectedObjects[num].get();
+}
+
+void V8InspectorSessionImpl::schedulePauseOnNextStatement(
+    const StringView& breakReason, const StringView& breakDetails) {
+  m_debuggerAgent->schedulePauseOnNextStatement(
+      toString16(breakReason),
+      protocol::DictionaryValue::cast(protocol::parseJSON(breakDetails)));
+}
+
+void V8InspectorSessionImpl::cancelPauseOnNextStatement() {
+  m_debuggerAgent->cancelPauseOnNextStatement();
+}
+
+void V8InspectorSessionImpl::breakProgram(const StringView& breakReason,
+                                          const StringView& breakDetails) {
+  m_debuggerAgent->breakProgram(
+      toString16(breakReason),
+      protocol::DictionaryValue::cast(protocol::parseJSON(breakDetails)));
+}
+
+void V8InspectorSessionImpl::setSkipAllPauses(bool skip) {
+  ErrorString errorString;
+  m_debuggerAgent->setSkipAllPauses(&errorString, skip);
+}
+
+void V8InspectorSessionImpl::resume() {
+  ErrorString errorString;
+  m_debuggerAgent->resume(&errorString);
+}
+
+void V8InspectorSessionImpl::stepOver() {
+  ErrorString errorString;
+  m_debuggerAgent->stepOver(&errorString);
+}
+
+std::vector<std::unique_ptr<protocol::Debugger::API::SearchMatch>>
+V8InspectorSessionImpl::searchInTextByLines(const StringView& text,
+                                            const StringView& query,
+                                            bool caseSensitive, bool isRegex) {
+  // TODO(dgozman): search may operate on StringView and avoid copying |text|.
+  std::vector<std::unique_ptr<protocol::Debugger::SearchMatch>> matches =
+      searchInTextByLinesImpl(this, toString16(text), toString16(query),
+                              caseSensitive, isRegex);
+  std::vector<std::unique_ptr<protocol::Debugger::API::SearchMatch>> result;
+  for (size_t i = 0; i < matches.size(); ++i)
+    result.push_back(std::move(matches[i]));
+  return result;
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-inspector-session-impl.h b/src/inspector/v8-inspector-session-impl.h
new file mode 100644
index 0000000..e84e8c9
--- /dev/null
+++ b/src/inspector/v8-inspector-session-impl.h
@@ -0,0 +1,126 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8INSPECTORSESSIONIMPL_H_
+#define V8_INSPECTOR_V8INSPECTORSESSIONIMPL_H_
+
+#include <vector>
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Runtime.h"
+#include "src/inspector/protocol/Schema.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+class InjectedScript;
+class RemoteObjectIdBase;
+class V8ConsoleAgentImpl;
+class V8DebuggerAgentImpl;
+class V8InspectorImpl;
+class V8HeapProfilerAgentImpl;
+class V8ProfilerAgentImpl;
+class V8RuntimeAgentImpl;
+class V8SchemaAgentImpl;
+
+using protocol::ErrorString;
+
+class V8InspectorSessionImpl : public V8InspectorSession,
+                               public protocol::FrontendChannel {
+ public:
+  static std::unique_ptr<V8InspectorSessionImpl> create(
+      V8InspectorImpl*, int contextGroupId, V8Inspector::Channel*,
+      const StringView& state);
+  ~V8InspectorSessionImpl();
+
+  V8InspectorImpl* inspector() const { return m_inspector; }
+  V8ConsoleAgentImpl* consoleAgent() { return m_consoleAgent.get(); }
+  V8DebuggerAgentImpl* debuggerAgent() { return m_debuggerAgent.get(); }
+  V8SchemaAgentImpl* schemaAgent() { return m_schemaAgent.get(); }
+  V8ProfilerAgentImpl* profilerAgent() { return m_profilerAgent.get(); }
+  V8RuntimeAgentImpl* runtimeAgent() { return m_runtimeAgent.get(); }
+  int contextGroupId() const { return m_contextGroupId; }
+
+  InjectedScript* findInjectedScript(ErrorString*, int contextId);
+  InjectedScript* findInjectedScript(ErrorString*, RemoteObjectIdBase*);
+  void reset();
+  void discardInjectedScripts();
+  void reportAllContexts(V8RuntimeAgentImpl*);
+  void setCustomObjectFormatterEnabled(bool);
+  std::unique_ptr<protocol::Runtime::RemoteObject> wrapObject(
+      v8::Local<v8::Context>, v8::Local<v8::Value>, const String16& groupName,
+      bool generatePreview);
+  std::unique_ptr<protocol::Runtime::RemoteObject> wrapTable(
+      v8::Local<v8::Context>, v8::Local<v8::Value> table,
+      v8::Local<v8::Value> columns);
+  std::vector<std::unique_ptr<protocol::Schema::Domain>> supportedDomainsImpl();
+  bool unwrapObject(ErrorString*, const String16& objectId,
+                    v8::Local<v8::Value>*, v8::Local<v8::Context>*,
+                    String16* objectGroup);
+  void releaseObjectGroup(const String16& objectGroup);
+
+  // V8InspectorSession implementation.
+  void dispatchProtocolMessage(const StringView& message) override;
+  std::unique_ptr<StringBuffer> stateJSON() override;
+  std::vector<std::unique_ptr<protocol::Schema::API::Domain>> supportedDomains()
+      override;
+  void addInspectedObject(
+      std::unique_ptr<V8InspectorSession::Inspectable>) override;
+  void schedulePauseOnNextStatement(const StringView& breakReason,
+                                    const StringView& breakDetails) override;
+  void cancelPauseOnNextStatement() override;
+  void breakProgram(const StringView& breakReason,
+                    const StringView& breakDetails) override;
+  void setSkipAllPauses(bool) override;
+  void resume() override;
+  void stepOver() override;
+  std::vector<std::unique_ptr<protocol::Debugger::API::SearchMatch>>
+  searchInTextByLines(const StringView& text, const StringView& query,
+                      bool caseSensitive, bool isRegex) override;
+  void releaseObjectGroup(const StringView& objectGroup) override;
+  bool unwrapObject(std::unique_ptr<StringBuffer>*, const StringView& objectId,
+                    v8::Local<v8::Value>*, v8::Local<v8::Context>*,
+                    std::unique_ptr<StringBuffer>* objectGroup) override;
+  std::unique_ptr<protocol::Runtime::API::RemoteObject> wrapObject(
+      v8::Local<v8::Context>, v8::Local<v8::Value>,
+      const StringView& groupName) override;
+
+  V8InspectorSession::Inspectable* inspectedObject(unsigned num);
+  static const unsigned kInspectedObjectBufferSize = 5;
+
+ private:
+  V8InspectorSessionImpl(V8InspectorImpl*, int contextGroupId,
+                         V8Inspector::Channel*, const StringView& state);
+  protocol::DictionaryValue* agentState(const String16& name);
+
+  // protocol::FrontendChannel implementation.
+  void sendProtocolResponse(int callId, const String16& message) override;
+  void sendProtocolNotification(const String16& message) override;
+  void flushProtocolNotifications() override;
+
+  int m_contextGroupId;
+  V8InspectorImpl* m_inspector;
+  V8Inspector::Channel* m_channel;
+  bool m_customObjectFormatterEnabled;
+
+  protocol::UberDispatcher m_dispatcher;
+  std::unique_ptr<protocol::DictionaryValue> m_state;
+
+  std::unique_ptr<V8RuntimeAgentImpl> m_runtimeAgent;
+  std::unique_ptr<V8DebuggerAgentImpl> m_debuggerAgent;
+  std::unique_ptr<V8HeapProfilerAgentImpl> m_heapProfilerAgent;
+  std::unique_ptr<V8ProfilerAgentImpl> m_profilerAgent;
+  std::unique_ptr<V8ConsoleAgentImpl> m_consoleAgent;
+  std::unique_ptr<V8SchemaAgentImpl> m_schemaAgent;
+  std::vector<std::unique_ptr<V8InspectorSession::Inspectable>>
+      m_inspectedObjects;
+
+  DISALLOW_COPY_AND_ASSIGN(V8InspectorSessionImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8INSPECTORSESSIONIMPL_H_
diff --git a/src/inspector/v8-internal-value-type.cc b/src/inspector/v8-internal-value-type.cc
new file mode 100644
index 0000000..cde8bc9
--- /dev/null
+++ b/src/inspector/v8-internal-value-type.cc
@@ -0,0 +1,77 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-internal-value-type.h"
+
+#include "src/inspector/protocol-platform.h"
+#include "src/inspector/string-util.h"
+
+namespace v8_inspector {
+
+namespace {
+
+v8::Local<v8::Private> internalSubtypePrivate(v8::Isolate* isolate) {
+  return v8::Private::ForApi(
+      isolate,
+      toV8StringInternalized(isolate, "V8InternalType#internalSubtype"));
+}
+
+v8::Local<v8::String> subtypeForInternalType(v8::Isolate* isolate,
+                                             V8InternalValueType type) {
+  switch (type) {
+    case V8InternalValueType::kEntry:
+      return toV8StringInternalized(isolate, "internal#entry");
+    case V8InternalValueType::kLocation:
+      return toV8StringInternalized(isolate, "internal#location");
+    case V8InternalValueType::kScope:
+      return toV8StringInternalized(isolate, "internal#scope");
+    case V8InternalValueType::kScopeList:
+      return toV8StringInternalized(isolate, "internal#scopeList");
+  }
+  UNREACHABLE();
+  return v8::Local<v8::String>();
+}
+
+}  // namespace
+
+bool markAsInternal(v8::Local<v8::Context> context,
+                    v8::Local<v8::Object> object, V8InternalValueType type) {
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::Local<v8::Private> privateValue = internalSubtypePrivate(isolate);
+  v8::Local<v8::String> subtype = subtypeForInternalType(isolate, type);
+  return object->SetPrivate(context, privateValue, subtype).FromMaybe(false);
+}
+
+bool markArrayEntriesAsInternal(v8::Local<v8::Context> context,
+                                v8::Local<v8::Array> array,
+                                V8InternalValueType type) {
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::Local<v8::Private> privateValue = internalSubtypePrivate(isolate);
+  v8::Local<v8::String> subtype = subtypeForInternalType(isolate, type);
+  for (uint32_t i = 0; i < array->Length(); ++i) {
+    v8::Local<v8::Value> entry;
+    if (!array->Get(context, i).ToLocal(&entry) || !entry->IsObject())
+      return false;
+    if (!entry.As<v8::Object>()
+             ->SetPrivate(context, privateValue, subtype)
+             .FromMaybe(false))
+      return false;
+  }
+  return true;
+}
+
+v8::Local<v8::Value> v8InternalValueTypeFrom(v8::Local<v8::Context> context,
+                                             v8::Local<v8::Object> object) {
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::Local<v8::Private> privateValue = internalSubtypePrivate(isolate);
+  if (!object->HasPrivate(context, privateValue).FromMaybe(false))
+    return v8::Null(isolate);
+  v8::Local<v8::Value> subtypeValue;
+  if (!object->GetPrivate(context, privateValue).ToLocal(&subtypeValue) ||
+      !subtypeValue->IsString())
+    return v8::Null(isolate);
+  return subtypeValue;
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-internal-value-type.h b/src/inspector/v8-internal-value-type.h
new file mode 100644
index 0000000..e648a0d
--- /dev/null
+++ b/src/inspector/v8-internal-value-type.h
@@ -0,0 +1,23 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8INTERNALVALUETYPE_H_
+#define V8_INSPECTOR_V8INTERNALVALUETYPE_H_
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+enum class V8InternalValueType { kEntry, kLocation, kScope, kScopeList };
+
+bool markAsInternal(v8::Local<v8::Context>, v8::Local<v8::Object>,
+                    V8InternalValueType);
+bool markArrayEntriesAsInternal(v8::Local<v8::Context>, v8::Local<v8::Array>,
+                                V8InternalValueType);
+v8::Local<v8::Value> v8InternalValueTypeFrom(v8::Local<v8::Context>,
+                                             v8::Local<v8::Object>);
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8INTERNALVALUETYPE_H_
diff --git a/src/inspector/v8-profiler-agent-impl.cc b/src/inspector/v8-profiler-agent-impl.cc
new file mode 100644
index 0000000..0511ca3
--- /dev/null
+++ b/src/inspector/v8-profiler-agent-impl.cc
@@ -0,0 +1,321 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-profiler-agent-impl.h"
+
+#include <vector>
+
+#include "src/base/atomicops.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+
+#include "include/v8-profiler.h"
+
+namespace v8_inspector {
+
+namespace ProfilerAgentState {
+static const char samplingInterval[] = "samplingInterval";
+static const char userInitiatedProfiling[] = "userInitiatedProfiling";
+static const char profilerEnabled[] = "profilerEnabled";
+}
+
+namespace {
+
+std::unique_ptr<protocol::Array<protocol::Profiler::PositionTickInfo>>
+buildInspectorObjectForPositionTicks(const v8::CpuProfileNode* node) {
+  unsigned lineCount = node->GetHitLineCount();
+  if (!lineCount) return nullptr;
+  auto array = protocol::Array<protocol::Profiler::PositionTickInfo>::create();
+  std::vector<v8::CpuProfileNode::LineTick> entries(lineCount);
+  if (node->GetLineTicks(&entries[0], lineCount)) {
+    for (unsigned i = 0; i < lineCount; i++) {
+      std::unique_ptr<protocol::Profiler::PositionTickInfo> line =
+          protocol::Profiler::PositionTickInfo::create()
+              .setLine(entries[i].line)
+              .setTicks(entries[i].hit_count)
+              .build();
+      array->addItem(std::move(line));
+    }
+  }
+  return array;
+}
+
+std::unique_ptr<protocol::Profiler::ProfileNode> buildInspectorObjectFor(
+    v8::Isolate* isolate, const v8::CpuProfileNode* node) {
+  v8::HandleScope handleScope(isolate);
+  auto callFrame =
+      protocol::Runtime::CallFrame::create()
+          .setFunctionName(toProtocolString(node->GetFunctionName()))
+          .setScriptId(String16::fromInteger(node->GetScriptId()))
+          .setUrl(toProtocolString(node->GetScriptResourceName()))
+          .setLineNumber(node->GetLineNumber() - 1)
+          .setColumnNumber(node->GetColumnNumber() - 1)
+          .build();
+  auto result = protocol::Profiler::ProfileNode::create()
+                    .setCallFrame(std::move(callFrame))
+                    .setHitCount(node->GetHitCount())
+                    .setId(node->GetNodeId())
+                    .build();
+
+  const int childrenCount = node->GetChildrenCount();
+  if (childrenCount) {
+    auto children = protocol::Array<int>::create();
+    for (int i = 0; i < childrenCount; i++)
+      children->addItem(node->GetChild(i)->GetNodeId());
+    result->setChildren(std::move(children));
+  }
+
+  const char* deoptReason = node->GetBailoutReason();
+  if (deoptReason && deoptReason[0] && strcmp(deoptReason, "no reason"))
+    result->setDeoptReason(deoptReason);
+
+  auto positionTicks = buildInspectorObjectForPositionTicks(node);
+  if (positionTicks) result->setPositionTicks(std::move(positionTicks));
+
+  return result;
+}
+
+std::unique_ptr<protocol::Array<int>> buildInspectorObjectForSamples(
+    v8::CpuProfile* v8profile) {
+  auto array = protocol::Array<int>::create();
+  int count = v8profile->GetSamplesCount();
+  for (int i = 0; i < count; i++)
+    array->addItem(v8profile->GetSample(i)->GetNodeId());
+  return array;
+}
+
+std::unique_ptr<protocol::Array<int>> buildInspectorObjectForTimestamps(
+    v8::CpuProfile* v8profile) {
+  auto array = protocol::Array<int>::create();
+  int count = v8profile->GetSamplesCount();
+  uint64_t lastTime = v8profile->GetStartTime();
+  for (int i = 0; i < count; i++) {
+    uint64_t ts = v8profile->GetSampleTimestamp(i);
+    array->addItem(static_cast<int>(ts - lastTime));
+    lastTime = ts;
+  }
+  return array;
+}
+
+void flattenNodesTree(v8::Isolate* isolate, const v8::CpuProfileNode* node,
+                      protocol::Array<protocol::Profiler::ProfileNode>* list) {
+  list->addItem(buildInspectorObjectFor(isolate, node));
+  const int childrenCount = node->GetChildrenCount();
+  for (int i = 0; i < childrenCount; i++)
+    flattenNodesTree(isolate, node->GetChild(i), list);
+}
+
+std::unique_ptr<protocol::Profiler::Profile> createCPUProfile(
+    v8::Isolate* isolate, v8::CpuProfile* v8profile) {
+  auto nodes = protocol::Array<protocol::Profiler::ProfileNode>::create();
+  flattenNodesTree(isolate, v8profile->GetTopDownRoot(), nodes.get());
+  return protocol::Profiler::Profile::create()
+      .setNodes(std::move(nodes))
+      .setStartTime(static_cast<double>(v8profile->GetStartTime()))
+      .setEndTime(static_cast<double>(v8profile->GetEndTime()))
+      .setSamples(buildInspectorObjectForSamples(v8profile))
+      .setTimeDeltas(buildInspectorObjectForTimestamps(v8profile))
+      .build();
+}
+
+std::unique_ptr<protocol::Debugger::Location> currentDebugLocation(
+    V8InspectorImpl* inspector) {
+  std::unique_ptr<V8StackTraceImpl> callStack =
+      inspector->debugger()->captureStackTrace(false /* fullStack */);
+  auto location = protocol::Debugger::Location::create()
+                      .setScriptId(toString16(callStack->topScriptId()))
+                      .setLineNumber(callStack->topLineNumber())
+                      .build();
+  location->setColumnNumber(callStack->topColumnNumber());
+  return location;
+}
+
+volatile int s_lastProfileId = 0;
+
+}  // namespace
+
+class V8ProfilerAgentImpl::ProfileDescriptor {
+ public:
+  ProfileDescriptor(const String16& id, const String16& title)
+      : m_id(id), m_title(title) {}
+  String16 m_id;
+  String16 m_title;
+};
+
+V8ProfilerAgentImpl::V8ProfilerAgentImpl(
+    V8InspectorSessionImpl* session, protocol::FrontendChannel* frontendChannel,
+    protocol::DictionaryValue* state)
+    : m_session(session),
+      m_isolate(m_session->inspector()->isolate()),
+      m_profiler(nullptr),
+      m_state(state),
+      m_frontend(frontendChannel),
+      m_enabled(false),
+      m_recordingCPUProfile(false) {}
+
+V8ProfilerAgentImpl::~V8ProfilerAgentImpl() {
+  if (m_profiler) m_profiler->Dispose();
+}
+
+void V8ProfilerAgentImpl::consoleProfile(const String16& title) {
+  if (!m_enabled) return;
+  String16 id = nextProfileId();
+  m_startedProfiles.push_back(ProfileDescriptor(id, title));
+  startProfiling(id);
+  m_frontend.consoleProfileStarted(
+      id, currentDebugLocation(m_session->inspector()), title);
+}
+
+void V8ProfilerAgentImpl::consoleProfileEnd(const String16& title) {
+  if (!m_enabled) return;
+  String16 id;
+  String16 resolvedTitle;
+  // Take last started profile if no title was passed.
+  if (title.isEmpty()) {
+    if (m_startedProfiles.empty()) return;
+    id = m_startedProfiles.back().m_id;
+    resolvedTitle = m_startedProfiles.back().m_title;
+    m_startedProfiles.pop_back();
+  } else {
+    for (size_t i = 0; i < m_startedProfiles.size(); i++) {
+      if (m_startedProfiles[i].m_title == title) {
+        resolvedTitle = title;
+        id = m_startedProfiles[i].m_id;
+        m_startedProfiles.erase(m_startedProfiles.begin() + i);
+        break;
+      }
+    }
+    if (id.isEmpty()) return;
+  }
+  std::unique_ptr<protocol::Profiler::Profile> profile =
+      stopProfiling(id, true);
+  if (!profile) return;
+  std::unique_ptr<protocol::Debugger::Location> location =
+      currentDebugLocation(m_session->inspector());
+  m_frontend.consoleProfileFinished(id, std::move(location), std::move(profile),
+                                    resolvedTitle);
+}
+
+void V8ProfilerAgentImpl::enable(ErrorString*) {
+  if (m_enabled) return;
+  m_enabled = true;
+  DCHECK(!m_profiler);
+  m_profiler = v8::CpuProfiler::New(m_isolate);
+  m_state->setBoolean(ProfilerAgentState::profilerEnabled, true);
+}
+
+void V8ProfilerAgentImpl::disable(ErrorString* errorString) {
+  if (!m_enabled) return;
+  for (size_t i = m_startedProfiles.size(); i > 0; --i)
+    stopProfiling(m_startedProfiles[i - 1].m_id, false);
+  m_startedProfiles.clear();
+  stop(nullptr, nullptr);
+  m_profiler->Dispose();
+  m_profiler = nullptr;
+  m_enabled = false;
+  m_state->setBoolean(ProfilerAgentState::profilerEnabled, false);
+}
+
+void V8ProfilerAgentImpl::setSamplingInterval(ErrorString* error,
+                                              int interval) {
+  if (m_recordingCPUProfile) {
+    *error = "Cannot change sampling interval when profiling.";
+    return;
+  }
+  m_state->setInteger(ProfilerAgentState::samplingInterval, interval);
+  m_profiler->SetSamplingInterval(interval);
+}
+
+void V8ProfilerAgentImpl::restore() {
+  DCHECK(!m_enabled);
+  if (!m_state->booleanProperty(ProfilerAgentState::profilerEnabled, false))
+    return;
+  m_enabled = true;
+  DCHECK(!m_profiler);
+  m_profiler = v8::CpuProfiler::New(m_isolate);
+  int interval = 0;
+  m_state->getInteger(ProfilerAgentState::samplingInterval, &interval);
+  if (interval) m_profiler->SetSamplingInterval(interval);
+  if (m_state->booleanProperty(ProfilerAgentState::userInitiatedProfiling,
+                               false)) {
+    ErrorString error;
+    start(&error);
+  }
+}
+
+void V8ProfilerAgentImpl::start(ErrorString* error) {
+  if (m_recordingCPUProfile) return;
+  if (!m_enabled) {
+    *error = "Profiler is not enabled";
+    return;
+  }
+  m_recordingCPUProfile = true;
+  m_frontendInitiatedProfileId = nextProfileId();
+  startProfiling(m_frontendInitiatedProfileId);
+  m_state->setBoolean(ProfilerAgentState::userInitiatedProfiling, true);
+}
+
+void V8ProfilerAgentImpl::stop(
+    ErrorString* errorString,
+    std::unique_ptr<protocol::Profiler::Profile>* profile) {
+  if (!m_recordingCPUProfile) {
+    if (errorString) *errorString = "No recording profiles found";
+    return;
+  }
+  m_recordingCPUProfile = false;
+  std::unique_ptr<protocol::Profiler::Profile> cpuProfile =
+      stopProfiling(m_frontendInitiatedProfileId, !!profile);
+  if (profile) {
+    *profile = std::move(cpuProfile);
+    if (!profile->get() && errorString) *errorString = "Profile is not found";
+  }
+  m_frontendInitiatedProfileId = String16();
+  m_state->setBoolean(ProfilerAgentState::userInitiatedProfiling, false);
+}
+
+String16 V8ProfilerAgentImpl::nextProfileId() {
+  return String16::fromInteger(
+      v8::base::NoBarrier_AtomicIncrement(&s_lastProfileId, 1));
+}
+
+void V8ProfilerAgentImpl::startProfiling(const String16& title) {
+  v8::HandleScope handleScope(m_isolate);
+  m_profiler->StartProfiling(toV8String(m_isolate, title), true);
+}
+
+std::unique_ptr<protocol::Profiler::Profile> V8ProfilerAgentImpl::stopProfiling(
+    const String16& title, bool serialize) {
+  v8::HandleScope handleScope(m_isolate);
+  v8::CpuProfile* profile =
+      m_profiler->StopProfiling(toV8String(m_isolate, title));
+  if (!profile) return nullptr;
+  std::unique_ptr<protocol::Profiler::Profile> result;
+  if (serialize) result = createCPUProfile(m_isolate, profile);
+  profile->Delete();
+  return result;
+}
+
+bool V8ProfilerAgentImpl::isRecording() const {
+  return m_recordingCPUProfile || !m_startedProfiles.empty();
+}
+
+bool V8ProfilerAgentImpl::idleStarted() {
+  if (m_profiler) m_profiler->SetIdle(true);
+  return m_profiler;
+}
+
+bool V8ProfilerAgentImpl::idleFinished() {
+  if (m_profiler) m_profiler->SetIdle(false);
+  return m_profiler;
+}
+
+void V8ProfilerAgentImpl::collectSample() {
+  if (m_profiler) m_profiler->CollectSample();
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-profiler-agent-impl.h b/src/inspector/v8-profiler-agent-impl.h
new file mode 100644
index 0000000..ee89976
--- /dev/null
+++ b/src/inspector/v8-profiler-agent-impl.h
@@ -0,0 +1,74 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8PROFILERAGENTIMPL_H_
+#define V8_INSPECTOR_V8PROFILERAGENTIMPL_H_
+
+#include <vector>
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Profiler.h"
+
+namespace v8 {
+class CpuProfiler;
+class Isolate;
+}
+
+namespace v8_inspector {
+
+class V8InspectorSessionImpl;
+
+using protocol::ErrorString;
+
+class V8ProfilerAgentImpl : public protocol::Profiler::Backend {
+ public:
+  V8ProfilerAgentImpl(V8InspectorSessionImpl*, protocol::FrontendChannel*,
+                      protocol::DictionaryValue* state);
+  ~V8ProfilerAgentImpl() override;
+
+  bool enabled() const { return m_enabled; }
+  void restore();
+
+  void enable(ErrorString*) override;
+  void disable(ErrorString*) override;
+  void setSamplingInterval(ErrorString*, int) override;
+  void start(ErrorString*) override;
+  void stop(ErrorString*,
+            std::unique_ptr<protocol::Profiler::Profile>*) override;
+
+  void consoleProfile(const String16& title);
+  void consoleProfileEnd(const String16& title);
+
+  bool idleStarted();
+  bool idleFinished();
+
+  void collectSample();
+
+ private:
+  String16 nextProfileId();
+
+  void startProfiling(const String16& title);
+  std::unique_ptr<protocol::Profiler::Profile> stopProfiling(
+      const String16& title, bool serialize);
+
+  bool isRecording() const;
+
+  V8InspectorSessionImpl* m_session;
+  v8::Isolate* m_isolate;
+  v8::CpuProfiler* m_profiler;
+  protocol::DictionaryValue* m_state;
+  protocol::Profiler::Frontend m_frontend;
+  bool m_enabled;
+  bool m_recordingCPUProfile;
+  class ProfileDescriptor;
+  std::vector<ProfileDescriptor> m_startedProfiles;
+  String16 m_frontendInitiatedProfileId;
+
+  DISALLOW_COPY_AND_ASSIGN(V8ProfilerAgentImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8PROFILERAGENTIMPL_H_
diff --git a/src/inspector/v8-regex.cc b/src/inspector/v8-regex.cc
new file mode 100644
index 0000000..47af70d
--- /dev/null
+++ b/src/inspector/v8-regex.cc
@@ -0,0 +1,93 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-regex.h"
+
+#include <limits.h>
+
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-inspector-impl.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+V8Regex::V8Regex(V8InspectorImpl* inspector, const String16& pattern,
+                 bool caseSensitive, bool multiline)
+    : m_inspector(inspector) {
+  v8::Isolate* isolate = m_inspector->isolate();
+  v8::HandleScope handleScope(isolate);
+  v8::Local<v8::Context> context = m_inspector->regexContext();
+  v8::Context::Scope contextScope(context);
+  v8::TryCatch tryCatch(isolate);
+
+  unsigned flags = v8::RegExp::kNone;
+  if (!caseSensitive) flags |= v8::RegExp::kIgnoreCase;
+  if (multiline) flags |= v8::RegExp::kMultiline;
+
+  v8::Local<v8::RegExp> regex;
+  if (v8::RegExp::New(context, toV8String(isolate, pattern),
+                      static_cast<v8::RegExp::Flags>(flags))
+          .ToLocal(&regex))
+    m_regex.Reset(isolate, regex);
+  else if (tryCatch.HasCaught())
+    m_errorMessage = toProtocolString(tryCatch.Message()->Get());
+  else
+    m_errorMessage = "Internal error";
+}
+
+int V8Regex::match(const String16& string, int startFrom,
+                   int* matchLength) const {
+  if (matchLength) *matchLength = 0;
+
+  if (m_regex.IsEmpty() || string.isEmpty()) return -1;
+
+  // v8 strings are limited to int.
+  if (string.length() > INT_MAX) return -1;
+
+  v8::Isolate* isolate = m_inspector->isolate();
+  v8::HandleScope handleScope(isolate);
+  v8::Local<v8::Context> context = m_inspector->regexContext();
+  v8::MicrotasksScope microtasks(isolate,
+                                 v8::MicrotasksScope::kDoNotRunMicrotasks);
+  v8::TryCatch tryCatch(isolate);
+
+  v8::Local<v8::RegExp> regex = m_regex.Get(isolate);
+  v8::Local<v8::Value> exec;
+  if (!regex->Get(context, toV8StringInternalized(isolate, "exec"))
+           .ToLocal(&exec))
+    return -1;
+  v8::Local<v8::Value> argv[] = {
+      toV8String(isolate, string.substring(startFrom))};
+  v8::Local<v8::Value> returnValue;
+  if (!exec.As<v8::Function>()
+           ->Call(context, regex, arraysize(argv), argv)
+           .ToLocal(&returnValue))
+    return -1;
+
+  // RegExp#exec returns null if there's no match, otherwise it returns an
+  // Array of strings with the first being the whole match string and others
+  // being subgroups. The Array also has some random properties tacked on like
+  // "index" which is the offset of the match.
+  //
+  // https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Global_Objects/RegExp/exec
+
+  DCHECK(!returnValue.IsEmpty());
+  if (!returnValue->IsArray()) return -1;
+
+  v8::Local<v8::Array> result = returnValue.As<v8::Array>();
+  v8::Local<v8::Value> matchOffset;
+  if (!result->Get(context, toV8StringInternalized(isolate, "index"))
+           .ToLocal(&matchOffset))
+    return -1;
+  if (matchLength) {
+    v8::Local<v8::Value> match;
+    if (!result->Get(context, 0).ToLocal(&match)) return -1;
+    *matchLength = match.As<v8::String>()->Length();
+  }
+
+  return matchOffset.As<v8::Int32>()->Value() + startFrom;
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-regex.h b/src/inspector/v8-regex.h
new file mode 100644
index 0000000..b4b1f8c
--- /dev/null
+++ b/src/inspector/v8-regex.h
@@ -0,0 +1,37 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8REGEX_H_
+#define V8_INSPECTOR_V8REGEX_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/string-16.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class V8InspectorImpl;
+
+enum MultilineMode { MultilineDisabled, MultilineEnabled };
+
+class V8Regex {
+ public:
+  V8Regex(V8InspectorImpl*, const String16&, bool caseSensitive,
+          bool multiline = false);
+  int match(const String16&, int startFrom = 0, int* matchLength = 0) const;
+  bool isValid() const { return !m_regex.IsEmpty(); }
+  const String16& errorMessage() const { return m_errorMessage; }
+
+ private:
+  V8InspectorImpl* m_inspector;
+  v8::Global<v8::RegExp> m_regex;
+  String16 m_errorMessage;
+
+  DISALLOW_COPY_AND_ASSIGN(V8Regex);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8REGEX_H_
diff --git a/src/inspector/v8-runtime-agent-impl.cc b/src/inspector/v8-runtime-agent-impl.cc
new file mode 100644
index 0000000..640ec31
--- /dev/null
+++ b/src/inspector/v8-runtime-agent-impl.cc
@@ -0,0 +1,738 @@
+/*
+ * Copyright (C) 2011 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "src/inspector/v8-runtime-agent-impl.h"
+
+#include "src/inspector/injected-script.h"
+#include "src/inspector/inspected-context.h"
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/remote-object-id.h"
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-console-message.h"
+#include "src/inspector/v8-debugger-agent-impl.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+#include "src/inspector/v8-stack-trace-impl.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+namespace V8RuntimeAgentImplState {
+static const char customObjectFormatterEnabled[] =
+    "customObjectFormatterEnabled";
+static const char runtimeEnabled[] = "runtimeEnabled";
+};
+
+using protocol::Runtime::RemoteObject;
+
+static bool hasInternalError(ErrorString* errorString, bool hasError) {
+  if (hasError) *errorString = "Internal error";
+  return hasError;
+}
+
+namespace {
+
+template <typename Callback>
+class ProtocolPromiseHandler {
+ public:
+  static void add(V8InspectorImpl* inspector, v8::Local<v8::Context> context,
+                  v8::MaybeLocal<v8::Value> value,
+                  const String16& notPromiseError, int contextGroupId,
+                  int executionContextId, const String16& objectGroup,
+                  bool returnByValue, bool generatePreview,
+                  std::unique_ptr<Callback> callback) {
+    if (value.IsEmpty()) {
+      callback->sendFailure("Internal error");
+      return;
+    }
+    if (!value.ToLocalChecked()->IsPromise()) {
+      callback->sendFailure(notPromiseError);
+      return;
+    }
+    v8::MicrotasksScope microtasks_scope(inspector->isolate(),
+                                         v8::MicrotasksScope::kRunMicrotasks);
+    v8::Local<v8::Promise> promise =
+        v8::Local<v8::Promise>::Cast(value.ToLocalChecked());
+    Callback* rawCallback = callback.get();
+    ProtocolPromiseHandler<Callback>* handler = new ProtocolPromiseHandler(
+        inspector, contextGroupId, executionContextId, objectGroup,
+        returnByValue, generatePreview, std::move(callback));
+    v8::Local<v8::Value> wrapper = handler->m_wrapper.Get(inspector->isolate());
+
+    v8::Local<v8::Function> thenCallbackFunction =
+        v8::Function::New(context, thenCallback, wrapper, 0,
+                          v8::ConstructorBehavior::kThrow)
+            .ToLocalChecked();
+    if (promise->Then(context, thenCallbackFunction).IsEmpty()) {
+      rawCallback->sendFailure("Internal error");
+      return;
+    }
+    v8::Local<v8::Function> catchCallbackFunction =
+        v8::Function::New(context, catchCallback, wrapper, 0,
+                          v8::ConstructorBehavior::kThrow)
+            .ToLocalChecked();
+    if (promise->Catch(context, catchCallbackFunction).IsEmpty()) {
+      rawCallback->sendFailure("Internal error");
+      return;
+    }
+  }
+
+ private:
+  static void thenCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+    ProtocolPromiseHandler<Callback>* handler =
+        static_cast<ProtocolPromiseHandler<Callback>*>(
+            info.Data().As<v8::External>()->Value());
+    DCHECK(handler);
+    v8::Local<v8::Value> value =
+        info.Length() > 0
+            ? info[0]
+            : v8::Local<v8::Value>::Cast(v8::Undefined(info.GetIsolate()));
+    std::unique_ptr<protocol::Runtime::RemoteObject> wrappedValue(
+        handler->wrapObject(value));
+    if (!wrappedValue) return;
+    handler->m_callback->sendSuccess(
+        std::move(wrappedValue), Maybe<protocol::Runtime::ExceptionDetails>());
+  }
+
+  static void catchCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
+    ProtocolPromiseHandler<Callback>* handler =
+        static_cast<ProtocolPromiseHandler<Callback>*>(
+            info.Data().As<v8::External>()->Value());
+    DCHECK(handler);
+    v8::Local<v8::Value> value =
+        info.Length() > 0
+            ? info[0]
+            : v8::Local<v8::Value>::Cast(v8::Undefined(info.GetIsolate()));
+
+    std::unique_ptr<protocol::Runtime::RemoteObject> wrappedValue(
+        handler->wrapObject(value));
+    if (!wrappedValue) return;
+
+    std::unique_ptr<V8StackTraceImpl> stack =
+        handler->m_inspector->debugger()->captureStackTrace(true);
+    std::unique_ptr<protocol::Runtime::ExceptionDetails> exceptionDetails =
+        protocol::Runtime::ExceptionDetails::create()
+            .setExceptionId(handler->m_inspector->nextExceptionId())
+            .setText("Uncaught (in promise)")
+            .setLineNumber(stack && !stack->isEmpty() ? stack->topLineNumber()
+                                                      : 0)
+            .setColumnNumber(
+                stack && !stack->isEmpty() ? stack->topColumnNumber() : 0)
+            .setException(wrappedValue->clone())
+            .build();
+    if (stack)
+      exceptionDetails->setStackTrace(stack->buildInspectorObjectImpl());
+    if (stack && !stack->isEmpty())
+      exceptionDetails->setScriptId(toString16(stack->topScriptId()));
+    handler->m_callback->sendSuccess(std::move(wrappedValue),
+                                     std::move(exceptionDetails));
+  }
+
+  ProtocolPromiseHandler(V8InspectorImpl* inspector, int contextGroupId,
+                         int executionContextId, const String16& objectGroup,
+                         bool returnByValue, bool generatePreview,
+                         std::unique_ptr<Callback> callback)
+      : m_inspector(inspector),
+        m_contextGroupId(contextGroupId),
+        m_executionContextId(executionContextId),
+        m_objectGroup(objectGroup),
+        m_returnByValue(returnByValue),
+        m_generatePreview(generatePreview),
+        m_callback(std::move(callback)),
+        m_wrapper(inspector->isolate(),
+                  v8::External::New(inspector->isolate(), this)) {
+    m_wrapper.SetWeak(this, cleanup, v8::WeakCallbackType::kParameter);
+  }
+
+  static void cleanup(
+      const v8::WeakCallbackInfo<ProtocolPromiseHandler<Callback>>& data) {
+    if (!data.GetParameter()->m_wrapper.IsEmpty()) {
+      data.GetParameter()->m_wrapper.Reset();
+      data.SetSecondPassCallback(cleanup);
+    } else {
+      data.GetParameter()->m_callback->sendFailure("Promise was collected");
+      delete data.GetParameter();
+    }
+  }
+
+  std::unique_ptr<protocol::Runtime::RemoteObject> wrapObject(
+      v8::Local<v8::Value> value) {
+    ErrorString errorString;
+    InjectedScript::ContextScope scope(&errorString, m_inspector,
+                                       m_contextGroupId, m_executionContextId);
+    if (!scope.initialize()) {
+      m_callback->sendFailure(errorString);
+      return nullptr;
+    }
+    std::unique_ptr<protocol::Runtime::RemoteObject> wrappedValue =
+        scope.injectedScript()->wrapObject(&errorString, value, m_objectGroup,
+                                           m_returnByValue, m_generatePreview);
+    if (!wrappedValue) {
+      m_callback->sendFailure(errorString);
+      return nullptr;
+    }
+    return wrappedValue;
+  }
+
+  V8InspectorImpl* m_inspector;
+  int m_contextGroupId;
+  int m_executionContextId;
+  String16 m_objectGroup;
+  bool m_returnByValue;
+  bool m_generatePreview;
+  std::unique_ptr<Callback> m_callback;
+  v8::Global<v8::External> m_wrapper;
+};
+
+template <typename Callback>
+bool wrapEvaluateResultAsync(InjectedScript* injectedScript,
+                             v8::MaybeLocal<v8::Value> maybeResultValue,
+                             const v8::TryCatch& tryCatch,
+                             const String16& objectGroup, bool returnByValue,
+                             bool generatePreview, Callback* callback) {
+  std::unique_ptr<RemoteObject> result;
+  Maybe<protocol::Runtime::ExceptionDetails> exceptionDetails;
+
+  ErrorString errorString;
+  injectedScript->wrapEvaluateResult(
+      &errorString, maybeResultValue, tryCatch, objectGroup, returnByValue,
+      generatePreview, &result, &exceptionDetails);
+  if (errorString.isEmpty()) {
+    callback->sendSuccess(std::move(result), exceptionDetails);
+    return true;
+  }
+  callback->sendFailure(errorString);
+  return false;
+}
+
+int ensureContext(ErrorString* errorString, V8InspectorImpl* inspector,
+                  int contextGroupId, const Maybe<int>& executionContextId) {
+  int contextId;
+  if (executionContextId.isJust()) {
+    contextId = executionContextId.fromJust();
+  } else {
+    v8::HandleScope handles(inspector->isolate());
+    v8::Local<v8::Context> defaultContext =
+        inspector->client()->ensureDefaultContextInGroup(contextGroupId);
+    if (defaultContext.IsEmpty()) {
+      *errorString = "Cannot find default execution context";
+      return 0;
+    }
+    contextId = V8Debugger::contextId(defaultContext);
+  }
+  return contextId;
+}
+
+}  // namespace
+
+V8RuntimeAgentImpl::V8RuntimeAgentImpl(
+    V8InspectorSessionImpl* session, protocol::FrontendChannel* FrontendChannel,
+    protocol::DictionaryValue* state)
+    : m_session(session),
+      m_state(state),
+      m_frontend(FrontendChannel),
+      m_inspector(session->inspector()),
+      m_enabled(false) {}
+
+V8RuntimeAgentImpl::~V8RuntimeAgentImpl() {}
+
+void V8RuntimeAgentImpl::evaluate(
+    const String16& expression, const Maybe<String16>& objectGroup,
+    const Maybe<bool>& includeCommandLineAPI, const Maybe<bool>& silent,
+    const Maybe<int>& executionContextId, const Maybe<bool>& returnByValue,
+    const Maybe<bool>& generatePreview, const Maybe<bool>& userGesture,
+    const Maybe<bool>& awaitPromise,
+    std::unique_ptr<EvaluateCallback> callback) {
+  ErrorString errorString;
+  int contextId =
+      ensureContext(&errorString, m_inspector, m_session->contextGroupId(),
+                    executionContextId);
+  if (!errorString.isEmpty()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  InjectedScript::ContextScope scope(&errorString, m_inspector,
+                                     m_session->contextGroupId(), contextId);
+  if (!scope.initialize()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  if (silent.fromMaybe(false)) scope.ignoreExceptionsAndMuteConsole();
+  if (userGesture.fromMaybe(false)) scope.pretendUserGesture();
+
+  if (includeCommandLineAPI.fromMaybe(false) &&
+      !scope.installCommandLineAPI()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  bool evalIsDisabled = !scope.context()->IsCodeGenerationFromStringsAllowed();
+  // Temporarily enable allow evals for inspector.
+  if (evalIsDisabled) scope.context()->AllowCodeGenerationFromStrings(true);
+
+  v8::MaybeLocal<v8::Value> maybeResultValue;
+  v8::Local<v8::Script> script = m_inspector->compileScript(
+      scope.context(), toV8String(m_inspector->isolate(), expression),
+      String16(), false);
+  if (!script.IsEmpty())
+    maybeResultValue = m_inspector->runCompiledScript(scope.context(), script);
+
+  if (evalIsDisabled) scope.context()->AllowCodeGenerationFromStrings(false);
+
+  // Re-initialize after running client's code, as it could have destroyed
+  // context or session.
+  if (!scope.initialize()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  if (!awaitPromise.fromMaybe(false) || scope.tryCatch().HasCaught()) {
+    wrapEvaluateResultAsync(scope.injectedScript(), maybeResultValue,
+                            scope.tryCatch(), objectGroup.fromMaybe(""),
+                            returnByValue.fromMaybe(false),
+                            generatePreview.fromMaybe(false), callback.get());
+    return;
+  }
+  ProtocolPromiseHandler<EvaluateCallback>::add(
+      m_inspector, scope.context(), maybeResultValue,
+      "Result of the evaluation is not a promise", m_session->contextGroupId(),
+      scope.injectedScript()->context()->contextId(), objectGroup.fromMaybe(""),
+      returnByValue.fromMaybe(false), generatePreview.fromMaybe(false),
+      std::move(callback));
+}
+
+void V8RuntimeAgentImpl::awaitPromise(
+    const String16& promiseObjectId, const Maybe<bool>& returnByValue,
+    const Maybe<bool>& generatePreview,
+    std::unique_ptr<AwaitPromiseCallback> callback) {
+  ErrorString errorString;
+  InjectedScript::ObjectScope scope(
+      &errorString, m_inspector, m_session->contextGroupId(), promiseObjectId);
+  if (!scope.initialize()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+  ProtocolPromiseHandler<AwaitPromiseCallback>::add(
+      m_inspector, scope.context(), scope.object(),
+      "Could not find promise with given id", m_session->contextGroupId(),
+      scope.injectedScript()->context()->contextId(), scope.objectGroupName(),
+      returnByValue.fromMaybe(false), generatePreview.fromMaybe(false),
+      std::move(callback));
+}
+
+void V8RuntimeAgentImpl::callFunctionOn(
+    const String16& objectId, const String16& expression,
+    const Maybe<protocol::Array<protocol::Runtime::CallArgument>>&
+        optionalArguments,
+    const Maybe<bool>& silent, const Maybe<bool>& returnByValue,
+    const Maybe<bool>& generatePreview, const Maybe<bool>& userGesture,
+    const Maybe<bool>& awaitPromise,
+    std::unique_ptr<CallFunctionOnCallback> callback) {
+  ErrorString errorString;
+  InjectedScript::ObjectScope scope(&errorString, m_inspector,
+                                    m_session->contextGroupId(), objectId);
+  if (!scope.initialize()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  std::unique_ptr<v8::Local<v8::Value>[]> argv = nullptr;
+  int argc = 0;
+  if (optionalArguments.isJust()) {
+    protocol::Array<protocol::Runtime::CallArgument>* arguments =
+        optionalArguments.fromJust();
+    argc = static_cast<int>(arguments->length());
+    argv.reset(new v8::Local<v8::Value>[argc]);
+    for (int i = 0; i < argc; ++i) {
+      v8::Local<v8::Value> argumentValue;
+      if (!scope.injectedScript()
+               ->resolveCallArgument(&errorString, arguments->get(i))
+               .ToLocal(&argumentValue)) {
+        callback->sendFailure(errorString);
+        return;
+      }
+      argv[i] = argumentValue;
+    }
+  }
+
+  if (silent.fromMaybe(false)) scope.ignoreExceptionsAndMuteConsole();
+  if (userGesture.fromMaybe(false)) scope.pretendUserGesture();
+
+  v8::MaybeLocal<v8::Value> maybeFunctionValue =
+      m_inspector->compileAndRunInternalScript(
+          scope.context(),
+          toV8String(m_inspector->isolate(), "(" + expression + ")"));
+  // Re-initialize after running client's code, as it could have destroyed
+  // context or session.
+  if (!scope.initialize()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  if (scope.tryCatch().HasCaught()) {
+    wrapEvaluateResultAsync(scope.injectedScript(), maybeFunctionValue,
+                            scope.tryCatch(), scope.objectGroupName(), false,
+                            false, callback.get());
+    return;
+  }
+
+  v8::Local<v8::Value> functionValue;
+  if (!maybeFunctionValue.ToLocal(&functionValue) ||
+      !functionValue->IsFunction()) {
+    callback->sendFailure("Given expression does not evaluate to a function");
+    return;
+  }
+
+  v8::MaybeLocal<v8::Value> maybeResultValue = m_inspector->callFunction(
+      functionValue.As<v8::Function>(), scope.context(), scope.object(), argc,
+      argv.get());
+  // Re-initialize after running client's code, as it could have destroyed
+  // context or session.
+  if (!scope.initialize()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  if (!awaitPromise.fromMaybe(false) || scope.tryCatch().HasCaught()) {
+    wrapEvaluateResultAsync(scope.injectedScript(), maybeResultValue,
+                            scope.tryCatch(), scope.objectGroupName(),
+                            returnByValue.fromMaybe(false),
+                            generatePreview.fromMaybe(false), callback.get());
+    return;
+  }
+
+  ProtocolPromiseHandler<CallFunctionOnCallback>::add(
+      m_inspector, scope.context(), maybeResultValue,
+      "Result of the function call is not a promise",
+      m_session->contextGroupId(),
+      scope.injectedScript()->context()->contextId(), scope.objectGroupName(),
+      returnByValue.fromMaybe(false), generatePreview.fromMaybe(false),
+      std::move(callback));
+}
+
+void V8RuntimeAgentImpl::getProperties(
+    ErrorString* errorString, const String16& objectId,
+    const Maybe<bool>& ownProperties, const Maybe<bool>& accessorPropertiesOnly,
+    const Maybe<bool>& generatePreview,
+    std::unique_ptr<protocol::Array<protocol::Runtime::PropertyDescriptor>>*
+        result,
+    Maybe<protocol::Array<protocol::Runtime::InternalPropertyDescriptor>>*
+        internalProperties,
+    Maybe<protocol::Runtime::ExceptionDetails>* exceptionDetails) {
+  using protocol::Runtime::InternalPropertyDescriptor;
+
+  InjectedScript::ObjectScope scope(errorString, m_inspector,
+                                    m_session->contextGroupId(), objectId);
+  if (!scope.initialize()) return;
+
+  scope.ignoreExceptionsAndMuteConsole();
+  if (!scope.object()->IsObject()) {
+    *errorString = "Value with given id is not an object";
+    return;
+  }
+
+  v8::Local<v8::Object> object = scope.object().As<v8::Object>();
+  scope.injectedScript()->getProperties(
+      errorString, object, scope.objectGroupName(),
+      ownProperties.fromMaybe(false), accessorPropertiesOnly.fromMaybe(false),
+      generatePreview.fromMaybe(false), result, exceptionDetails);
+  if (!errorString->isEmpty() || exceptionDetails->isJust() ||
+      accessorPropertiesOnly.fromMaybe(false))
+    return;
+  v8::Local<v8::Array> propertiesArray;
+  if (hasInternalError(errorString, !m_inspector->debugger()
+                                         ->internalProperties(scope.context(),
+                                                              scope.object())
+                                         .ToLocal(&propertiesArray)))
+    return;
+  std::unique_ptr<protocol::Array<InternalPropertyDescriptor>>
+      propertiesProtocolArray =
+          protocol::Array<InternalPropertyDescriptor>::create();
+  for (uint32_t i = 0; i < propertiesArray->Length(); i += 2) {
+    v8::Local<v8::Value> name;
+    if (hasInternalError(
+            errorString,
+            !propertiesArray->Get(scope.context(), i).ToLocal(&name)) ||
+        !name->IsString())
+      return;
+    v8::Local<v8::Value> value;
+    if (hasInternalError(
+            errorString,
+            !propertiesArray->Get(scope.context(), i + 1).ToLocal(&value)))
+      return;
+    std::unique_ptr<RemoteObject> wrappedValue =
+        scope.injectedScript()->wrapObject(errorString, value,
+                                           scope.objectGroupName());
+    if (!wrappedValue) return;
+    propertiesProtocolArray->addItem(
+        InternalPropertyDescriptor::create()
+            .setName(toProtocolString(name.As<v8::String>()))
+            .setValue(std::move(wrappedValue))
+            .build());
+  }
+  if (!propertiesProtocolArray->length()) return;
+  *internalProperties = std::move(propertiesProtocolArray);
+}
+
+void V8RuntimeAgentImpl::releaseObject(ErrorString* errorString,
+                                       const String16& objectId) {
+  InjectedScript::ObjectScope scope(errorString, m_inspector,
+                                    m_session->contextGroupId(), objectId);
+  if (!scope.initialize()) return;
+  scope.injectedScript()->releaseObject(objectId);
+}
+
+void V8RuntimeAgentImpl::releaseObjectGroup(ErrorString*,
+                                            const String16& objectGroup) {
+  m_session->releaseObjectGroup(objectGroup);
+}
+
+void V8RuntimeAgentImpl::runIfWaitingForDebugger(ErrorString* errorString) {
+  m_inspector->client()->runIfWaitingForDebugger(m_session->contextGroupId());
+}
+
+void V8RuntimeAgentImpl::setCustomObjectFormatterEnabled(ErrorString*,
+                                                         bool enabled) {
+  m_state->setBoolean(V8RuntimeAgentImplState::customObjectFormatterEnabled,
+                      enabled);
+  m_session->setCustomObjectFormatterEnabled(enabled);
+}
+
+void V8RuntimeAgentImpl::discardConsoleEntries(ErrorString*) {
+  V8ConsoleMessageStorage* storage =
+      m_inspector->ensureConsoleMessageStorage(m_session->contextGroupId());
+  storage->clear();
+}
+
+void V8RuntimeAgentImpl::compileScript(
+    ErrorString* errorString, const String16& expression,
+    const String16& sourceURL, bool persistScript,
+    const Maybe<int>& executionContextId, Maybe<String16>* scriptId,
+    Maybe<protocol::Runtime::ExceptionDetails>* exceptionDetails) {
+  if (!m_enabled) {
+    *errorString = "Runtime agent is not enabled";
+    return;
+  }
+  int contextId =
+      ensureContext(errorString, m_inspector, m_session->contextGroupId(),
+                    executionContextId);
+  if (!errorString->isEmpty()) return;
+  InjectedScript::ContextScope scope(errorString, m_inspector,
+                                     m_session->contextGroupId(), contextId);
+  if (!scope.initialize()) return;
+
+  if (!persistScript) m_inspector->debugger()->muteScriptParsedEvents();
+  v8::Local<v8::Script> script = m_inspector->compileScript(
+      scope.context(), toV8String(m_inspector->isolate(), expression),
+      sourceURL, false);
+  if (!persistScript) m_inspector->debugger()->unmuteScriptParsedEvents();
+  if (script.IsEmpty()) {
+    if (scope.tryCatch().HasCaught())
+      *exceptionDetails = scope.injectedScript()->createExceptionDetails(
+          errorString, scope.tryCatch(), String16(), false);
+    else
+      *errorString = "Script compilation failed";
+    return;
+  }
+
+  if (!persistScript) return;
+
+  String16 scriptValueId =
+      String16::fromInteger(script->GetUnboundScript()->GetId());
+  std::unique_ptr<v8::Global<v8::Script>> global(
+      new v8::Global<v8::Script>(m_inspector->isolate(), script));
+  m_compiledScripts[scriptValueId] = std::move(global);
+  *scriptId = scriptValueId;
+}
+
+void V8RuntimeAgentImpl::runScript(
+    const String16& scriptId, const Maybe<int>& executionContextId,
+    const Maybe<String16>& objectGroup, const Maybe<bool>& silent,
+    const Maybe<bool>& includeCommandLineAPI, const Maybe<bool>& returnByValue,
+    const Maybe<bool>& generatePreview, const Maybe<bool>& awaitPromise,
+    std::unique_ptr<RunScriptCallback> callback) {
+  if (!m_enabled) {
+    callback->sendFailure("Runtime agent is not enabled");
+    return;
+  }
+
+  auto it = m_compiledScripts.find(scriptId);
+  if (it == m_compiledScripts.end()) {
+    callback->sendFailure("No script with given id");
+    return;
+  }
+
+  ErrorString errorString;
+  int contextId =
+      ensureContext(&errorString, m_inspector, m_session->contextGroupId(),
+                    executionContextId);
+  if (!errorString.isEmpty()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  InjectedScript::ContextScope scope(&errorString, m_inspector,
+                                     m_session->contextGroupId(), contextId);
+  if (!scope.initialize()) {
+    callback->sendFailure(errorString);
+    return;
+  }
+
+  if (silent.fromMaybe(false)) scope.ignoreExceptionsAndMuteConsole();
+
+  std::unique_ptr<v8::Global<v8::Script>> scriptWrapper = std::move(it->second);
+  m_compiledScripts.erase(it);
+  v8::Local<v8::Script> script = scriptWrapper->Get(m_inspector->isolate());
+  if (script.IsEmpty()) {
+    callback->sendFailure("Script execution failed");
+    return;
+  }
+
+  if (includeCommandLineAPI.fromMaybe(false) && !scope.installCommandLineAPI())
+    return;
+
+  v8::MaybeLocal<v8::Value> maybeResultValue =
+      m_inspector->runCompiledScript(scope.context(), script);
+
+  // Re-initialize after running client's code, as it could have destroyed
+  // context or session.
+  if (!scope.initialize()) return;
+
+  if (!awaitPromise.fromMaybe(false) || scope.tryCatch().HasCaught()) {
+    wrapEvaluateResultAsync(scope.injectedScript(), maybeResultValue,
+                            scope.tryCatch(), objectGroup.fromMaybe(""),
+                            returnByValue.fromMaybe(false),
+                            generatePreview.fromMaybe(false), callback.get());
+    return;
+  }
+  ProtocolPromiseHandler<RunScriptCallback>::add(
+      m_inspector, scope.context(), maybeResultValue.ToLocalChecked(),
+      "Result of the script execution is not a promise",
+      m_session->contextGroupId(),
+      scope.injectedScript()->context()->contextId(), objectGroup.fromMaybe(""),
+      returnByValue.fromMaybe(false), generatePreview.fromMaybe(false),
+      std::move(callback));
+}
+
+void V8RuntimeAgentImpl::restore() {
+  if (!m_state->booleanProperty(V8RuntimeAgentImplState::runtimeEnabled, false))
+    return;
+  m_frontend.executionContextsCleared();
+  ErrorString error;
+  enable(&error);
+  if (m_state->booleanProperty(
+          V8RuntimeAgentImplState::customObjectFormatterEnabled, false))
+    m_session->setCustomObjectFormatterEnabled(true);
+}
+
+void V8RuntimeAgentImpl::enable(ErrorString* errorString) {
+  if (m_enabled) return;
+  m_inspector->client()->beginEnsureAllContextsInGroup(
+      m_session->contextGroupId());
+  m_enabled = true;
+  m_state->setBoolean(V8RuntimeAgentImplState::runtimeEnabled, true);
+  m_inspector->enableStackCapturingIfNeeded();
+  m_session->reportAllContexts(this);
+  V8ConsoleMessageStorage* storage =
+      m_inspector->ensureConsoleMessageStorage(m_session->contextGroupId());
+  for (const auto& message : storage->messages()) {
+    if (!reportMessage(message.get(), false)) return;
+  }
+}
+
+void V8RuntimeAgentImpl::disable(ErrorString* errorString) {
+  if (!m_enabled) return;
+  m_enabled = false;
+  m_state->setBoolean(V8RuntimeAgentImplState::runtimeEnabled, false);
+  m_inspector->disableStackCapturingIfNeeded();
+  m_session->discardInjectedScripts();
+  reset();
+  m_inspector->client()->endEnsureAllContextsInGroup(
+      m_session->contextGroupId());
+}
+
+void V8RuntimeAgentImpl::reset() {
+  m_compiledScripts.clear();
+  if (m_enabled) {
+    if (const V8InspectorImpl::ContextByIdMap* contexts =
+            m_inspector->contextGroup(m_session->contextGroupId())) {
+      for (auto& idContext : *contexts) idContext.second->setReported(false);
+    }
+    m_frontend.executionContextsCleared();
+  }
+}
+
+void V8RuntimeAgentImpl::reportExecutionContextCreated(
+    InspectedContext* context) {
+  if (!m_enabled) return;
+  context->setReported(true);
+  std::unique_ptr<protocol::Runtime::ExecutionContextDescription> description =
+      protocol::Runtime::ExecutionContextDescription::create()
+          .setId(context->contextId())
+          .setName(context->humanReadableName())
+          .setOrigin(context->origin())
+          .build();
+  if (!context->auxData().isEmpty())
+    description->setAuxData(protocol::DictionaryValue::cast(
+        protocol::parseJSON(context->auxData())));
+  m_frontend.executionContextCreated(std::move(description));
+}
+
+void V8RuntimeAgentImpl::reportExecutionContextDestroyed(
+    InspectedContext* context) {
+  if (m_enabled && context->isReported()) {
+    context->setReported(false);
+    m_frontend.executionContextDestroyed(context->contextId());
+  }
+}
+
+void V8RuntimeAgentImpl::inspect(
+    std::unique_ptr<protocol::Runtime::RemoteObject> objectToInspect,
+    std::unique_ptr<protocol::DictionaryValue> hints) {
+  if (m_enabled)
+    m_frontend.inspectRequested(std::move(objectToInspect), std::move(hints));
+}
+
+void V8RuntimeAgentImpl::messageAdded(V8ConsoleMessage* message) {
+  if (m_enabled) reportMessage(message, true);
+}
+
+bool V8RuntimeAgentImpl::reportMessage(V8ConsoleMessage* message,
+                                       bool generatePreview) {
+  message->reportToFrontend(&m_frontend, m_session, generatePreview);
+  m_frontend.flush();
+  return m_inspector->hasConsoleMessageStorage(m_session->contextGroupId());
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-runtime-agent-impl.h b/src/inspector/v8-runtime-agent-impl.h
new file mode 100644
index 0000000..edeeed4
--- /dev/null
+++ b/src/inspector/v8-runtime-agent-impl.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2011 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ *     * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ *     * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef V8_INSPECTOR_V8RUNTIMEAGENTIMPL_H_
+#define V8_INSPECTOR_V8RUNTIMEAGENTIMPL_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Runtime.h"
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+class InjectedScript;
+class InspectedContext;
+class RemoteObjectIdBase;
+class V8ConsoleMessage;
+class V8InspectorImpl;
+class V8InspectorSessionImpl;
+
+using protocol::ErrorString;
+using protocol::Maybe;
+
+class V8RuntimeAgentImpl : public protocol::Runtime::Backend {
+ public:
+  V8RuntimeAgentImpl(V8InspectorSessionImpl*, protocol::FrontendChannel*,
+                     protocol::DictionaryValue* state);
+  ~V8RuntimeAgentImpl() override;
+  void restore();
+
+  // Part of the protocol.
+  void enable(ErrorString*) override;
+  void disable(ErrorString*) override;
+  void evaluate(const String16& expression, const Maybe<String16>& objectGroup,
+                const Maybe<bool>& includeCommandLineAPI,
+                const Maybe<bool>& silent, const Maybe<int>& executionContextId,
+                const Maybe<bool>& returnByValue,
+                const Maybe<bool>& generatePreview,
+                const Maybe<bool>& userGesture, const Maybe<bool>& awaitPromise,
+                std::unique_ptr<EvaluateCallback>) override;
+  void awaitPromise(const String16& promiseObjectId,
+                    const Maybe<bool>& returnByValue,
+                    const Maybe<bool>& generatePreview,
+                    std::unique_ptr<AwaitPromiseCallback>) override;
+  void callFunctionOn(
+      const String16& objectId, const String16& expression,
+      const Maybe<protocol::Array<protocol::Runtime::CallArgument>>&
+          optionalArguments,
+      const Maybe<bool>& silent, const Maybe<bool>& returnByValue,
+      const Maybe<bool>& generatePreview, const Maybe<bool>& userGesture,
+      const Maybe<bool>& awaitPromise,
+      std::unique_ptr<CallFunctionOnCallback>) override;
+  void releaseObject(ErrorString*, const String16& objectId) override;
+  void getProperties(
+      ErrorString*, const String16& objectId, const Maybe<bool>& ownProperties,
+      const Maybe<bool>& accessorPropertiesOnly,
+      const Maybe<bool>& generatePreview,
+      std::unique_ptr<protocol::Array<protocol::Runtime::PropertyDescriptor>>*
+          result,
+      Maybe<protocol::Array<protocol::Runtime::InternalPropertyDescriptor>>*
+          internalProperties,
+      Maybe<protocol::Runtime::ExceptionDetails>*) override;
+  void releaseObjectGroup(ErrorString*, const String16& objectGroup) override;
+  void runIfWaitingForDebugger(ErrorString*) override;
+  void setCustomObjectFormatterEnabled(ErrorString*, bool) override;
+  void discardConsoleEntries(ErrorString*) override;
+  void compileScript(ErrorString*, const String16& expression,
+                     const String16& sourceURL, bool persistScript,
+                     const Maybe<int>& executionContextId, Maybe<String16>*,
+                     Maybe<protocol::Runtime::ExceptionDetails>*) override;
+  void runScript(const String16&, const Maybe<int>& executionContextId,
+                 const Maybe<String16>& objectGroup, const Maybe<bool>& silent,
+                 const Maybe<bool>& includeCommandLineAPI,
+                 const Maybe<bool>& returnByValue,
+                 const Maybe<bool>& generatePreview,
+                 const Maybe<bool>& awaitPromise,
+                 std::unique_ptr<RunScriptCallback>) override;
+
+  void reset();
+  void reportExecutionContextCreated(InspectedContext*);
+  void reportExecutionContextDestroyed(InspectedContext*);
+  void inspect(std::unique_ptr<protocol::Runtime::RemoteObject> objectToInspect,
+               std::unique_ptr<protocol::DictionaryValue> hints);
+  void messageAdded(V8ConsoleMessage*);
+  bool enabled() const { return m_enabled; }
+
+ private:
+  bool reportMessage(V8ConsoleMessage*, bool generatePreview);
+
+  V8InspectorSessionImpl* m_session;
+  protocol::DictionaryValue* m_state;
+  protocol::Runtime::Frontend m_frontend;
+  V8InspectorImpl* m_inspector;
+  bool m_enabled;
+  protocol::HashMap<String16, std::unique_ptr<v8::Global<v8::Script>>>
+      m_compiledScripts;
+
+  DISALLOW_COPY_AND_ASSIGN(V8RuntimeAgentImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8RUNTIMEAGENTIMPL_H_
diff --git a/src/inspector/v8-schema-agent-impl.cc b/src/inspector/v8-schema-agent-impl.cc
new file mode 100644
index 0000000..9eed5bd
--- /dev/null
+++ b/src/inspector/v8-schema-agent-impl.cc
@@ -0,0 +1,29 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-schema-agent-impl.h"
+
+#include "src/inspector/protocol/Protocol.h"
+#include "src/inspector/v8-inspector-session-impl.h"
+
+namespace v8_inspector {
+
+V8SchemaAgentImpl::V8SchemaAgentImpl(V8InspectorSessionImpl* session,
+                                     protocol::FrontendChannel* frontendChannel,
+                                     protocol::DictionaryValue* state)
+    : m_session(session), m_frontend(frontendChannel) {}
+
+V8SchemaAgentImpl::~V8SchemaAgentImpl() {}
+
+void V8SchemaAgentImpl::getDomains(
+    ErrorString*,
+    std::unique_ptr<protocol::Array<protocol::Schema::Domain>>* result) {
+  std::vector<std::unique_ptr<protocol::Schema::Domain>> domains =
+      m_session->supportedDomainsImpl();
+  *result = protocol::Array<protocol::Schema::Domain>::create();
+  for (size_t i = 0; i < domains.size(); ++i)
+    (*result)->addItem(std::move(domains[i]));
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-schema-agent-impl.h b/src/inspector/v8-schema-agent-impl.h
new file mode 100644
index 0000000..6150201
--- /dev/null
+++ b/src/inspector/v8-schema-agent-impl.h
@@ -0,0 +1,37 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8SCHEMAAGENTIMPL_H_
+#define V8_INSPECTOR_V8SCHEMAAGENTIMPL_H_
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Schema.h"
+
+namespace v8_inspector {
+
+class V8InspectorSessionImpl;
+
+using protocol::ErrorString;
+
+class V8SchemaAgentImpl : public protocol::Schema::Backend {
+ public:
+  V8SchemaAgentImpl(V8InspectorSessionImpl*, protocol::FrontendChannel*,
+                    protocol::DictionaryValue* state);
+  ~V8SchemaAgentImpl() override;
+
+  void getDomains(
+      ErrorString*,
+      std::unique_ptr<protocol::Array<protocol::Schema::Domain>>*) override;
+
+ private:
+  V8InspectorSessionImpl* m_session;
+  protocol::Schema::Frontend m_frontend;
+
+  DISALLOW_COPY_AND_ASSIGN(V8SchemaAgentImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8SCHEMAAGENTIMPL_H_
diff --git a/src/inspector/v8-stack-trace-impl.cc b/src/inspector/v8-stack-trace-impl.cc
new file mode 100644
index 0000000..1a38c6d
--- /dev/null
+++ b/src/inspector/v8-stack-trace-impl.cc
@@ -0,0 +1,281 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-stack-trace-impl.h"
+
+#include "src/inspector/string-util.h"
+#include "src/inspector/v8-debugger.h"
+#include "src/inspector/v8-inspector-impl.h"
+#include "src/inspector/v8-profiler-agent-impl.h"
+
+#include "include/v8-debug.h"
+#include "include/v8-profiler.h"
+#include "include/v8-version.h"
+
+namespace v8_inspector {
+
+namespace {
+
+static const v8::StackTrace::StackTraceOptions stackTraceOptions =
+    static_cast<v8::StackTrace::StackTraceOptions>(
+        v8::StackTrace::kLineNumber | v8::StackTrace::kColumnOffset |
+        v8::StackTrace::kScriptId | v8::StackTrace::kScriptNameOrSourceURL |
+        v8::StackTrace::kFunctionName);
+
+V8StackTraceImpl::Frame toFrame(v8::Local<v8::StackFrame> frame) {
+  String16 scriptId = String16::fromInteger(frame->GetScriptId());
+  String16 sourceName;
+  v8::Local<v8::String> sourceNameValue(frame->GetScriptNameOrSourceURL());
+  if (!sourceNameValue.IsEmpty())
+    sourceName = toProtocolString(sourceNameValue);
+
+  String16 functionName;
+  v8::Local<v8::String> functionNameValue(frame->GetFunctionName());
+  if (!functionNameValue.IsEmpty())
+    functionName = toProtocolString(functionNameValue);
+
+  int sourceLineNumber = frame->GetLineNumber();
+  int sourceColumn = frame->GetColumn();
+  return V8StackTraceImpl::Frame(functionName, scriptId, sourceName,
+                                 sourceLineNumber, sourceColumn);
+}
+
+void toFramesVector(v8::Local<v8::StackTrace> stackTrace,
+                    std::vector<V8StackTraceImpl::Frame>& frames,
+                    size_t maxStackSize, v8::Isolate* isolate) {
+  DCHECK(isolate->InContext());
+  int frameCount = stackTrace->GetFrameCount();
+  if (frameCount > static_cast<int>(maxStackSize))
+    frameCount = static_cast<int>(maxStackSize);
+  for (int i = 0; i < frameCount; i++) {
+    v8::Local<v8::StackFrame> stackFrame = stackTrace->GetFrame(i);
+    frames.push_back(toFrame(stackFrame));
+  }
+}
+
+}  //  namespace
+
+V8StackTraceImpl::Frame::Frame()
+    : m_functionName("undefined"),
+      m_scriptId(""),
+      m_scriptName("undefined"),
+      m_lineNumber(0),
+      m_columnNumber(0) {}
+
+V8StackTraceImpl::Frame::Frame(const String16& functionName,
+                               const String16& scriptId,
+                               const String16& scriptName, int lineNumber,
+                               int column)
+    : m_functionName(functionName),
+      m_scriptId(scriptId),
+      m_scriptName(scriptName),
+      m_lineNumber(lineNumber),
+      m_columnNumber(column) {
+  DCHECK(m_lineNumber != v8::Message::kNoLineNumberInfo);
+  DCHECK(m_columnNumber != v8::Message::kNoColumnInfo);
+}
+
+V8StackTraceImpl::Frame::~Frame() {}
+
+// buildInspectorObject() and SourceLocation's toTracedValue() should set the
+// same fields.
+// If either of them is modified, the other should be also modified.
+std::unique_ptr<protocol::Runtime::CallFrame>
+V8StackTraceImpl::Frame::buildInspectorObject() const {
+  return protocol::Runtime::CallFrame::create()
+      .setFunctionName(m_functionName)
+      .setScriptId(m_scriptId)
+      .setUrl(m_scriptName)
+      .setLineNumber(m_lineNumber - 1)
+      .setColumnNumber(m_columnNumber - 1)
+      .build();
+}
+
+V8StackTraceImpl::Frame V8StackTraceImpl::Frame::clone() const {
+  return Frame(m_functionName, m_scriptId, m_scriptName, m_lineNumber,
+               m_columnNumber);
+}
+
+// static
+void V8StackTraceImpl::setCaptureStackTraceForUncaughtExceptions(
+    v8::Isolate* isolate, bool capture) {
+  isolate->SetCaptureStackTraceForUncaughtExceptions(
+      capture, V8StackTraceImpl::maxCallStackSizeToCapture, stackTraceOptions);
+}
+
+// static
+std::unique_ptr<V8StackTraceImpl> V8StackTraceImpl::create(
+    V8Debugger* debugger, int contextGroupId,
+    v8::Local<v8::StackTrace> stackTrace, size_t maxStackSize,
+    const String16& description) {
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope scope(isolate);
+  std::vector<V8StackTraceImpl::Frame> frames;
+  if (!stackTrace.IsEmpty())
+    toFramesVector(stackTrace, frames, maxStackSize, isolate);
+
+  int maxAsyncCallChainDepth = 1;
+  V8StackTraceImpl* asyncCallChain = nullptr;
+  if (debugger && maxStackSize > 1) {
+    asyncCallChain = debugger->currentAsyncCallChain();
+    maxAsyncCallChainDepth = debugger->maxAsyncCallChainDepth();
+  }
+  // Do not accidentally append async call chain from another group. This should
+  // not
+  // happen if we have proper instrumentation, but let's double-check to be
+  // safe.
+  if (contextGroupId && asyncCallChain && asyncCallChain->m_contextGroupId &&
+      asyncCallChain->m_contextGroupId != contextGroupId) {
+    asyncCallChain = nullptr;
+    maxAsyncCallChainDepth = 1;
+  }
+
+  // Only the top stack in the chain may be empty, so ensure that second stack
+  // is non-empty (it's the top of appended chain).
+  if (asyncCallChain && asyncCallChain->isEmpty())
+    asyncCallChain = asyncCallChain->m_parent.get();
+
+  if (stackTrace.IsEmpty() && !asyncCallChain) return nullptr;
+
+  std::unique_ptr<V8StackTraceImpl> result(new V8StackTraceImpl(
+      contextGroupId, description, frames,
+      asyncCallChain ? asyncCallChain->cloneImpl() : nullptr));
+
+  // Crop to not exceed maxAsyncCallChainDepth.
+  V8StackTraceImpl* deepest = result.get();
+  while (deepest && maxAsyncCallChainDepth) {
+    deepest = deepest->m_parent.get();
+    maxAsyncCallChainDepth--;
+  }
+  if (deepest) deepest->m_parent.reset();
+
+  return result;
+}
+
+// static
+std::unique_ptr<V8StackTraceImpl> V8StackTraceImpl::capture(
+    V8Debugger* debugger, int contextGroupId, size_t maxStackSize,
+    const String16& description) {
+  v8::Isolate* isolate = v8::Isolate::GetCurrent();
+  v8::HandleScope handleScope(isolate);
+  v8::Local<v8::StackTrace> stackTrace;
+  if (isolate->InContext()) {
+    if (debugger) {
+      V8InspectorImpl* inspector = debugger->inspector();
+      V8ProfilerAgentImpl* profilerAgent =
+          inspector->enabledProfilerAgentForGroup(contextGroupId);
+      if (profilerAgent) profilerAgent->collectSample();
+    }
+    stackTrace = v8::StackTrace::CurrentStackTrace(
+        isolate, static_cast<int>(maxStackSize), stackTraceOptions);
+  }
+  return V8StackTraceImpl::create(debugger, contextGroupId, stackTrace,
+                                  maxStackSize, description);
+}
+
+std::unique_ptr<V8StackTraceImpl> V8StackTraceImpl::cloneImpl() {
+  std::vector<Frame> framesCopy(m_frames);
+  return wrapUnique(
+      new V8StackTraceImpl(m_contextGroupId, m_description, framesCopy,
+                           m_parent ? m_parent->cloneImpl() : nullptr));
+}
+
+std::unique_ptr<V8StackTrace> V8StackTraceImpl::clone() {
+  std::vector<Frame> frames;
+  for (size_t i = 0; i < m_frames.size(); i++)
+    frames.push_back(m_frames.at(i).clone());
+  return wrapUnique(
+      new V8StackTraceImpl(m_contextGroupId, m_description, frames, nullptr));
+}
+
+V8StackTraceImpl::V8StackTraceImpl(int contextGroupId,
+                                   const String16& description,
+                                   std::vector<Frame>& frames,
+                                   std::unique_ptr<V8StackTraceImpl> parent)
+    : m_contextGroupId(contextGroupId),
+      m_description(description),
+      m_parent(std::move(parent)) {
+  m_frames.swap(frames);
+}
+
+V8StackTraceImpl::~V8StackTraceImpl() {}
+
+StringView V8StackTraceImpl::topSourceURL() const {
+  DCHECK(m_frames.size());
+  return toStringView(m_frames[0].m_scriptName);
+}
+
+int V8StackTraceImpl::topLineNumber() const {
+  DCHECK(m_frames.size());
+  return m_frames[0].m_lineNumber;
+}
+
+int V8StackTraceImpl::topColumnNumber() const {
+  DCHECK(m_frames.size());
+  return m_frames[0].m_columnNumber;
+}
+
+StringView V8StackTraceImpl::topFunctionName() const {
+  DCHECK(m_frames.size());
+  return toStringView(m_frames[0].m_functionName);
+}
+
+StringView V8StackTraceImpl::topScriptId() const {
+  DCHECK(m_frames.size());
+  return toStringView(m_frames[0].m_scriptId);
+}
+
+std::unique_ptr<protocol::Runtime::StackTrace>
+V8StackTraceImpl::buildInspectorObjectImpl() const {
+  std::unique_ptr<protocol::Array<protocol::Runtime::CallFrame>> frames =
+      protocol::Array<protocol::Runtime::CallFrame>::create();
+  for (size_t i = 0; i < m_frames.size(); i++)
+    frames->addItem(m_frames.at(i).buildInspectorObject());
+
+  std::unique_ptr<protocol::Runtime::StackTrace> stackTrace =
+      protocol::Runtime::StackTrace::create()
+          .setCallFrames(std::move(frames))
+          .build();
+  if (!m_description.isEmpty()) stackTrace->setDescription(m_description);
+  if (m_parent) stackTrace->setParent(m_parent->buildInspectorObjectImpl());
+  return stackTrace;
+}
+
+std::unique_ptr<protocol::Runtime::StackTrace>
+V8StackTraceImpl::buildInspectorObjectForTail(V8Debugger* debugger) const {
+  v8::HandleScope handleScope(v8::Isolate::GetCurrent());
+  // Next call collapses possible empty stack and ensures
+  // maxAsyncCallChainDepth.
+  std::unique_ptr<V8StackTraceImpl> fullChain = V8StackTraceImpl::create(
+      debugger, m_contextGroupId, v8::Local<v8::StackTrace>(),
+      V8StackTraceImpl::maxCallStackSizeToCapture);
+  if (!fullChain || !fullChain->m_parent) return nullptr;
+  return fullChain->m_parent->buildInspectorObjectImpl();
+}
+
+std::unique_ptr<protocol::Runtime::API::StackTrace>
+V8StackTraceImpl::buildInspectorObject() const {
+  return buildInspectorObjectImpl();
+}
+
+std::unique_ptr<StringBuffer> V8StackTraceImpl::toString() const {
+  String16Builder stackTrace;
+  for (size_t i = 0; i < m_frames.size(); ++i) {
+    const Frame& frame = m_frames[i];
+    stackTrace.append("\n    at " + (frame.functionName().length()
+                                         ? frame.functionName()
+                                         : "(anonymous function)"));
+    stackTrace.append(" (");
+    stackTrace.append(frame.sourceURL());
+    stackTrace.append(':');
+    stackTrace.append(String16::fromInteger(frame.lineNumber()));
+    stackTrace.append(':');
+    stackTrace.append(String16::fromInteger(frame.columnNumber()));
+    stackTrace.append(')');
+  }
+  String16 string = stackTrace.toString();
+  return StringBufferImpl::adopt(string);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-stack-trace-impl.h b/src/inspector/v8-stack-trace-impl.h
new file mode 100644
index 0000000..f0a452e
--- /dev/null
+++ b/src/inspector/v8-stack-trace-impl.h
@@ -0,0 +1,99 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8STACKTRACEIMPL_H_
+#define V8_INSPECTOR_V8STACKTRACEIMPL_H_
+
+#include <vector>
+
+#include "src/base/macros.h"
+#include "src/inspector/protocol/Forward.h"
+#include "src/inspector/protocol/Runtime.h"
+
+#include "include/v8-inspector.h"
+
+namespace v8_inspector {
+
+class TracedValue;
+class V8Debugger;
+
+// Note: async stack trace may have empty top stack with non-empty tail to
+// indicate
+// that current native-only state had some async story.
+// On the other hand, any non-top async stack is guaranteed to be non-empty.
+class V8StackTraceImpl final : public V8StackTrace {
+ public:
+  static const size_t maxCallStackSizeToCapture = 200;
+
+  class Frame {
+   public:
+    Frame();
+    Frame(const String16& functionName, const String16& scriptId,
+          const String16& scriptName, int lineNumber, int column = 0);
+    ~Frame();
+
+    const String16& functionName() const { return m_functionName; }
+    const String16& scriptId() const { return m_scriptId; }
+    const String16& sourceURL() const { return m_scriptName; }
+    int lineNumber() const { return m_lineNumber; }
+    int columnNumber() const { return m_columnNumber; }
+    Frame clone() const;
+
+   private:
+    friend class V8StackTraceImpl;
+    std::unique_ptr<protocol::Runtime::CallFrame> buildInspectorObject() const;
+    void toTracedValue(TracedValue*) const;
+
+    String16 m_functionName;
+    String16 m_scriptId;
+    String16 m_scriptName;
+    int m_lineNumber;
+    int m_columnNumber;
+  };
+
+  static void setCaptureStackTraceForUncaughtExceptions(v8::Isolate*,
+                                                        bool capture);
+  static std::unique_ptr<V8StackTraceImpl> create(
+      V8Debugger*, int contextGroupId, v8::Local<v8::StackTrace>,
+      size_t maxStackSize, const String16& description = String16());
+  static std::unique_ptr<V8StackTraceImpl> capture(
+      V8Debugger*, int contextGroupId, size_t maxStackSize,
+      const String16& description = String16());
+
+  // This method drops the async chain. Use cloneImpl() instead.
+  std::unique_ptr<V8StackTrace> clone() override;
+  std::unique_ptr<V8StackTraceImpl> cloneImpl();
+  std::unique_ptr<protocol::Runtime::StackTrace> buildInspectorObjectForTail(
+      V8Debugger*) const;
+  std::unique_ptr<protocol::Runtime::StackTrace> buildInspectorObjectImpl()
+      const;
+  ~V8StackTraceImpl() override;
+
+  // V8StackTrace implementation.
+  bool isEmpty() const override { return !m_frames.size(); };
+  StringView topSourceURL() const override;
+  int topLineNumber() const override;
+  int topColumnNumber() const override;
+  StringView topScriptId() const override;
+  StringView topFunctionName() const override;
+  std::unique_ptr<protocol::Runtime::API::StackTrace> buildInspectorObject()
+      const override;
+  std::unique_ptr<StringBuffer> toString() const override;
+
+ private:
+  V8StackTraceImpl(int contextGroupId, const String16& description,
+                   std::vector<Frame>& frames,
+                   std::unique_ptr<V8StackTraceImpl> parent);
+
+  int m_contextGroupId;
+  String16 m_description;
+  std::vector<Frame> m_frames;
+  std::unique_ptr<V8StackTraceImpl> m_parent;
+
+  DISALLOW_COPY_AND_ASSIGN(V8StackTraceImpl);
+};
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8STACKTRACEIMPL_H_
diff --git a/src/inspector/v8-value-copier.cc b/src/inspector/v8-value-copier.cc
new file mode 100644
index 0000000..09d86b7
--- /dev/null
+++ b/src/inspector/v8-value-copier.cc
@@ -0,0 +1,110 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/inspector/v8-value-copier.h"
+
+namespace v8_inspector {
+
+namespace {
+
+static int kMaxDepth = 20;
+static int kMaxCalls = 1000;
+
+class V8ValueCopier {
+ public:
+  v8::MaybeLocal<v8::Value> copy(v8::Local<v8::Value> value, int depth) {
+    if (++m_calls > kMaxCalls || depth > kMaxDepth)
+      return v8::MaybeLocal<v8::Value>();
+
+    if (value.IsEmpty()) return v8::MaybeLocal<v8::Value>();
+    if (value->IsNull() || value->IsUndefined() || value->IsBoolean() ||
+        value->IsString() || value->IsNumber())
+      return value;
+    if (!value->IsObject()) return v8::MaybeLocal<v8::Value>();
+    v8::Local<v8::Object> object = value.As<v8::Object>();
+    if (object->CreationContext() != m_from) return value;
+
+    if (object->IsArray()) {
+      v8::Local<v8::Array> array = object.As<v8::Array>();
+      v8::Local<v8::Array> result = v8::Array::New(m_isolate, array->Length());
+      if (!result->SetPrototype(m_to, v8::Null(m_isolate)).FromMaybe(false))
+        return v8::MaybeLocal<v8::Value>();
+      for (uint32_t i = 0; i < array->Length(); ++i) {
+        v8::Local<v8::Value> item;
+        if (!array->Get(m_from, i).ToLocal(&item))
+          return v8::MaybeLocal<v8::Value>();
+        v8::Local<v8::Value> copied;
+        if (!copy(item, depth + 1).ToLocal(&copied))
+          return v8::MaybeLocal<v8::Value>();
+        if (!createDataProperty(m_to, result, i, copied).FromMaybe(false))
+          return v8::MaybeLocal<v8::Value>();
+      }
+      return result;
+    }
+
+    v8::Local<v8::Object> result = v8::Object::New(m_isolate);
+    if (!result->SetPrototype(m_to, v8::Null(m_isolate)).FromMaybe(false))
+      return v8::MaybeLocal<v8::Value>();
+    v8::Local<v8::Array> properties;
+    if (!object->GetOwnPropertyNames(m_from).ToLocal(&properties))
+      return v8::MaybeLocal<v8::Value>();
+    for (uint32_t i = 0; i < properties->Length(); ++i) {
+      v8::Local<v8::Value> name;
+      if (!properties->Get(m_from, i).ToLocal(&name) || !name->IsString())
+        return v8::MaybeLocal<v8::Value>();
+      v8::Local<v8::Value> property;
+      if (!object->Get(m_from, name).ToLocal(&property))
+        return v8::MaybeLocal<v8::Value>();
+      v8::Local<v8::Value> copied;
+      if (!copy(property, depth + 1).ToLocal(&copied))
+        return v8::MaybeLocal<v8::Value>();
+      if (!createDataProperty(m_to, result, v8::Local<v8::String>::Cast(name),
+                              copied)
+               .FromMaybe(false))
+        return v8::MaybeLocal<v8::Value>();
+    }
+    return result;
+  }
+
+  v8::Isolate* m_isolate;
+  v8::Local<v8::Context> m_from;
+  v8::Local<v8::Context> m_to;
+  int m_calls;
+};
+
+}  // namespace
+
+v8::MaybeLocal<v8::Value> copyValueFromDebuggerContext(
+    v8::Isolate* isolate, v8::Local<v8::Context> debuggerContext,
+    v8::Local<v8::Context> toContext, v8::Local<v8::Value> value) {
+  V8ValueCopier copier;
+  copier.m_isolate = isolate;
+  copier.m_from = debuggerContext;
+  copier.m_to = toContext;
+  copier.m_calls = 0;
+  return copier.copy(value, 0);
+}
+
+v8::Maybe<bool> createDataProperty(v8::Local<v8::Context> context,
+                                   v8::Local<v8::Object> object,
+                                   v8::Local<v8::Name> key,
+                                   v8::Local<v8::Value> value) {
+  v8::TryCatch tryCatch(context->GetIsolate());
+  v8::Isolate::DisallowJavascriptExecutionScope throwJs(
+      context->GetIsolate(),
+      v8::Isolate::DisallowJavascriptExecutionScope::THROW_ON_FAILURE);
+  return object->CreateDataProperty(context, key, value);
+}
+
+v8::Maybe<bool> createDataProperty(v8::Local<v8::Context> context,
+                                   v8::Local<v8::Array> array, int index,
+                                   v8::Local<v8::Value> value) {
+  v8::TryCatch tryCatch(context->GetIsolate());
+  v8::Isolate::DisallowJavascriptExecutionScope throwJs(
+      context->GetIsolate(),
+      v8::Isolate::DisallowJavascriptExecutionScope::THROW_ON_FAILURE);
+  return array->CreateDataProperty(context, index, value);
+}
+
+}  // namespace v8_inspector
diff --git a/src/inspector/v8-value-copier.h b/src/inspector/v8-value-copier.h
new file mode 100644
index 0000000..c24a564
--- /dev/null
+++ b/src/inspector/v8-value-copier.h
@@ -0,0 +1,24 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INSPECTOR_V8VALUECOPIER_H_
+#define V8_INSPECTOR_V8VALUECOPIER_H_
+
+#include "include/v8.h"
+
+namespace v8_inspector {
+
+v8::MaybeLocal<v8::Value> copyValueFromDebuggerContext(
+    v8::Isolate*, v8::Local<v8::Context> debuggerContext,
+    v8::Local<v8::Context> toContext, v8::Local<v8::Value>);
+v8::Maybe<bool> createDataProperty(v8::Local<v8::Context>,
+                                   v8::Local<v8::Object>,
+                                   v8::Local<v8::Name> key,
+                                   v8::Local<v8::Value>);
+v8::Maybe<bool> createDataProperty(v8::Local<v8::Context>, v8::Local<v8::Array>,
+                                   int index, v8::Local<v8::Value>);
+
+}  // namespace v8_inspector
+
+#endif  // V8_INSPECTOR_V8VALUECOPIER_H_
diff --git a/src/interface-descriptors.cc b/src/interface-descriptors.cc
index a16cae7..2628b9f 100644
--- a/src/interface-descriptors.cc
+++ b/src/interface-descriptors.cc
@@ -7,41 +7,6 @@
 namespace v8 {
 namespace internal {
 
-namespace {
-// Constructors for common combined semantic and representation types.
-Type* SmiType(Zone* zone) {
-  return Type::Intersect(Type::SignedSmall(), Type::TaggedSigned(), zone);
-}
-
-
-Type* UntaggedIntegral32(Zone* zone) {
-  return Type::Intersect(Type::Signed32(), Type::UntaggedIntegral32(), zone);
-}
-
-
-Type* AnyTagged(Zone* zone) {
-  return Type::Intersect(
-      Type::Any(),
-      Type::Union(Type::TaggedPointer(), Type::TaggedSigned(), zone), zone);
-}
-
-
-Type* ExternalPointer(Zone* zone) {
-  return Type::Intersect(Type::Internal(), Type::UntaggedPointer(), zone);
-}
-}  // namespace
-
-FunctionType* CallInterfaceDescriptor::BuildDefaultFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), parameter_count, zone)
-          ->AsFunction();
-  while (parameter_count-- != 0) {
-    function->InitParameter(parameter_count, AnyTagged(zone));
-  }
-  return function;
-}
 
 void CallInterfaceDescriptorData::InitializePlatformSpecific(
     int register_parameter_count, const Register* registers,
@@ -56,6 +21,22 @@
   }
 }
 
+void CallInterfaceDescriptorData::InitializePlatformIndependent(
+    int parameter_count, int extra_parameter_count,
+    const MachineType* machine_types) {
+  // InterfaceDescriptor owns a copy of the MachineType array.
+  // We only care about parameters, not receiver and result.
+  param_count_ = parameter_count + extra_parameter_count;
+  machine_types_.reset(NewArray<MachineType>(param_count_));
+  for (int i = 0; i < param_count_; i++) {
+    if (machine_types == NULL || i >= parameter_count) {
+      machine_types_[i] = MachineType::AnyTagged();
+    } else {
+      machine_types_[i] = machine_types[i];
+    }
+  }
+}
+
 const char* CallInterfaceDescriptor::DebugName(Isolate* isolate) const {
   CallInterfaceDescriptorData* start = isolate->call_descriptor_data(0);
   size_t index = data_ - start;
@@ -79,15 +60,12 @@
   data->InitializePlatformSpecific(0, nullptr);
 }
 
-FunctionType*
-FastNewFunctionContextDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), 2, zone)->AsFunction();
-  function->InitParameter(0, AnyTagged(zone));
-  function->InitParameter(1, UntaggedIntegral32(zone));
-  return function;
+void FastNewFunctionContextDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::Int32()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 void FastNewFunctionContextDescriptor::InitializePlatformSpecific(
@@ -96,33 +74,28 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-FunctionType* LoadDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kReceiver, AnyTagged(zone));
-  function->InitParameter(kName, AnyTagged(zone));
-  function->InitParameter(kSlot, SmiType(zone));
-  return function;
+void LoadDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kReceiver, kName, kSlot
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::AnyTagged(),
+                                 MachineType::TaggedSigned()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-
 void LoadDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {ReceiverRegister(), NameRegister(), SlotRegister()};
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-FunctionType* LoadGlobalDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kSlot, SmiType(zone));
-  return function;
+void LoadGlobalDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kSlot
+  MachineType machine_types[] = {MachineType::TaggedSigned()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 void LoadGlobalDescriptor::InitializePlatformSpecific(
@@ -131,16 +104,13 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-FunctionType*
-LoadGlobalWithVectorDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kSlot, SmiType(zone));
-  function->InitParameter(kVector, AnyTagged(zone));
-  return function;
+void LoadGlobalWithVectorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kSlot, kVector
+  MachineType machine_types[] = {MachineType::TaggedSigned(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 void LoadGlobalWithVectorDescriptor::InitializePlatformSpecific(
@@ -150,76 +120,77 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-FunctionType* StoreDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kReceiver, AnyTagged(zone));
-  function->InitParameter(kName, AnyTagged(zone));
-  function->InitParameter(kValue, AnyTagged(zone));
-  function->InitParameter(kSlot, SmiType(zone));
-  return function;
+void StoreDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kReceiver, kName, kValue, kSlot
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(), MachineType::AnyTagged(),
+      MachineType::AnyTagged(), MachineType::TaggedSigned()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 void StoreDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {ReceiverRegister(), NameRegister(), ValueRegister(),
                           SlotRegister()};
-  data->InitializePlatformSpecific(arraysize(registers), registers);
-}
 
+  int len = arraysize(registers) - kStackArgumentsCount;
+  data->InitializePlatformSpecific(len, registers);
+}
 
 void StoreTransitionDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
-  Register registers[] = {ReceiverRegister(), NameRegister(), ValueRegister(),
-                          MapRegister()};
-
-  data->InitializePlatformSpecific(arraysize(registers), registers);
+  Register registers[] = {
+      ReceiverRegister(), NameRegister(), MapRegister(),
+      ValueRegister(),    SlotRegister(), VectorRegister(),
+  };
+  int len = arraysize(registers) - kStackArgumentsCount;
+  data->InitializePlatformSpecific(len, registers);
 }
 
-
-void VectorStoreTransitionDescriptor::InitializePlatformSpecific(
+void StoreTransitionDescriptor::InitializePlatformIndependent(
     CallInterfaceDescriptorData* data) {
-  if (SlotRegister().is(no_reg)) {
-    Register registers[] = {ReceiverRegister(), NameRegister(), ValueRegister(),
-                            MapRegister(), VectorRegister()};
-    data->InitializePlatformSpecific(arraysize(registers), registers);
-  } else {
-    Register registers[] = {ReceiverRegister(), NameRegister(),
-                            ValueRegister(),    MapRegister(),
-                            SlotRegister(),     VectorRegister()};
-    data->InitializePlatformSpecific(arraysize(registers), registers);
-  }
+  // kReceiver, kName, kMap, kValue, kSlot, kVector
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(),    MachineType::AnyTagged(),
+      MachineType::AnyTagged(),    MachineType::AnyTagged(),
+      MachineType::TaggedSigned(), MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-StoreTransitionDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kReceiver, AnyTagged(zone));
-  function->InitParameter(kName, AnyTagged(zone));
-  function->InitParameter(kValue, AnyTagged(zone));
-  function->InitParameter(kMap, AnyTagged(zone));
-  return function;
+void StoreNamedTransitionDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kReceiver, kFieldOffset, kMap, kValue, kSlot, kVector, kName
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(),    MachineType::TaggedSigned(),
+      MachineType::AnyTagged(),    MachineType::AnyTagged(),
+      MachineType::TaggedSigned(), MachineType::AnyTagged(),
+      MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-StoreGlobalViaContextDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kSlot, UntaggedIntegral32(zone));
-  function->InitParameter(kValue, AnyTagged(zone));
-  return function;
+void StoreNamedTransitionDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      ReceiverRegister(), FieldOffsetRegister(), MapRegister(),
+      ValueRegister(),    SlotRegister(),        VectorRegister(),
+      NameRegister(),
+  };
+  int len = arraysize(registers) - kStackArgumentsCount;
+  data->InitializePlatformSpecific(len, registers);
 }
 
+void StoreGlobalViaContextDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kSlot, kValue
+  MachineType machine_types[] = {MachineType::Int32(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
+}
 
 void StoreGlobalViaContextDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
@@ -252,18 +223,14 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-FunctionType*
-LoadWithVectorDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kReceiver, AnyTagged(zone));
-  function->InitParameter(kName, AnyTagged(zone));
-  function->InitParameter(kSlot, SmiType(zone));
-  function->InitParameter(kVector, AnyTagged(zone));
-  return function;
+void LoadWithVectorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kReceiver, kName, kSlot, kVector
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(), MachineType::AnyTagged(),
+      MachineType::TaggedSigned(), MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 
@@ -274,63 +241,33 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-FunctionType*
-VectorStoreTransitionDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  bool has_slot = !VectorStoreTransitionDescriptor::SlotRegister().is(no_reg);
-  int arg_count = has_slot ? 6 : 5;
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), arg_count, zone)
-          ->AsFunction();
-  int index = 0;
-  // TODO(ishell): use ParameterIndices here
-  function->InitParameter(index++, AnyTagged(zone));  // receiver
-  function->InitParameter(index++, AnyTagged(zone));  // name
-  function->InitParameter(index++, AnyTagged(zone));  // value
-  function->InitParameter(index++, AnyTagged(zone));  // map
-  if (has_slot) {
-    function->InitParameter(index++, SmiType(zone));  // slot
-  }
-  function->InitParameter(index++, AnyTagged(zone));  // vector
-  return function;
-}
-
-FunctionType*
-StoreWithVectorDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kReceiver, AnyTagged(zone));
-  function->InitParameter(kName, AnyTagged(zone));
-  function->InitParameter(kValue, AnyTagged(zone));
-  function->InitParameter(kSlot, SmiType(zone));
-  function->InitParameter(kVector, AnyTagged(zone));
-  return function;
+void StoreWithVectorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kReceiver, kName, kValue, kSlot, kVector
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(), MachineType::AnyTagged(),
+      MachineType::AnyTagged(), MachineType::TaggedSigned(),
+      MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 void StoreWithVectorDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {ReceiverRegister(), NameRegister(), ValueRegister(),
                           SlotRegister(), VectorRegister()};
-  data->InitializePlatformSpecific(arraysize(registers), registers);
+  int len = arraysize(registers) - kStackArgumentsCount;
+  data->InitializePlatformSpecific(len, registers);
 }
 
-FunctionType*
-BinaryOpWithVectorDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  DCHECK_EQ(parameter_count, kParameterCount);
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kLeft, AnyTagged(zone));
-  function->InitParameter(kRight, AnyTagged(zone));
-  function->InitParameter(kSlot, UntaggedIntegral32(zone));
-  function->InitParameter(kVector, AnyTagged(zone));
-  return function;
+void BinaryOpWithVectorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kLeft, kRight, kSlot, kVector
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::AnyTagged(), MachineType::Int32(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 const Register ApiGetterDescriptor::ReceiverRegister() {
@@ -349,291 +286,204 @@
   data->InitializePlatformSpecific(0, nullptr);
 }
 
-CallInterfaceDescriptor OnStackArgsDescriptorBase::ForArgs(
-    Isolate* isolate, int parameter_count) {
-  switch (parameter_count) {
-    case 1:
-      return OnStackWith1ArgsDescriptor(isolate);
-    case 2:
-      return OnStackWith2ArgsDescriptor(isolate);
-    case 3:
-      return OnStackWith3ArgsDescriptor(isolate);
-    case 4:
-      return OnStackWith4ArgsDescriptor(isolate);
-    case 5:
-      return OnStackWith5ArgsDescriptor(isolate);
-    case 6:
-      return OnStackWith6ArgsDescriptor(isolate);
-    case 7:
-      return OnStackWith7ArgsDescriptor(isolate);
-    default:
-      UNREACHABLE();
-      return VoidDescriptor(isolate);
-  }
-}
-
-FunctionType*
-OnStackArgsDescriptorBase::BuildCallInterfaceDescriptorFunctionTypeWithArg(
-    Isolate* isolate, int register_parameter_count, int parameter_count) {
-  DCHECK_EQ(0, register_parameter_count);
-  DCHECK_GT(parameter_count, 0);
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), AnyTagged(zone), parameter_count, zone)
-          ->AsFunction();
-  for (int i = 0; i < parameter_count; i++) {
-    function->InitParameter(i, AnyTagged(zone));
-  }
-  return function;
-}
-
-void OnStackArgsDescriptorBase::InitializePlatformSpecific(
-    CallInterfaceDescriptorData* data) {
-  data->InitializePlatformSpecific(0, nullptr);
-}
-
 void GrowArrayElementsDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {ObjectRegister(), KeyRegister()};
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-FunctionType*
-VarArgFunctionDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), AnyTagged(zone), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  return function;
+void VarArgFunctionDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kActualArgumentsCount
+  MachineType machine_types[] = {MachineType::Int32()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-FastCloneRegExpDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kClosure, AnyTagged(zone));
-  function->InitParameter(kLiteralIndex, SmiType(zone));
-  function->InitParameter(kPattern, AnyTagged(zone));
-  function->InitParameter(kFlags, AnyTagged(zone));
-  return function;
+void FastCloneRegExpDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kClosure, kLiteralIndex, kPattern, kFlags
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(), MachineType::TaggedSigned(),
+      MachineType::AnyTagged(), MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-FastCloneShallowArrayDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kClosure, AnyTagged(zone));
-  function->InitParameter(kLiteralIndex, SmiType(zone));
-  function->InitParameter(kConstantElements, AnyTagged(zone));
-  return function;
+void FastCloneShallowArrayDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kClosure, kLiteralIndex, kConstantElements
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::TaggedSigned(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-CreateAllocationSiteDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kVector, AnyTagged(zone));
-  function->InitParameter(kSlot, SmiType(zone));
-  return function;
+void CreateAllocationSiteDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kVector, kSlot
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::TaggedSigned()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-CreateWeakCellDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kVector, AnyTagged(zone));
-  function->InitParameter(kSlot, SmiType(zone));
-  function->InitParameter(kValue, AnyTagged(zone));
-  return function;
+void CreateWeakCellDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kVector, kSlot, kValue
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::TaggedSigned(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-CallTrampolineDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, AnyTagged(zone));
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  return function;
+void CallTrampolineDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kActualArgumentsCount
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::Int32()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType* ConstructStubDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, AnyTagged(zone));
-  function->InitParameter(kNewTarget, AnyTagged(zone));
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  function->InitParameter(kAllocationSite, AnyTagged(zone));
-  return function;
+void ConstructStubDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kNewTarget, kActualArgumentsCount, kAllocationSite
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::AnyTagged(), MachineType::Int32(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-ConstructTrampolineDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, AnyTagged(zone));
-  function->InitParameter(kNewTarget, AnyTagged(zone));
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  return function;
+void ConstructTrampolineDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kNewTarget, kActualArgumentsCount
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(), MachineType::AnyTagged(), MachineType::Int32()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-CallFunctionWithFeedbackDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, Type::Receiver());
-  function->InitParameter(kSlot, SmiType(zone));
-  return function;
+void CallFunctionWithFeedbackDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kSlot
+  MachineType machine_types[] = {MachineType::AnyTagged(),
+                                 MachineType::TaggedSigned()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType* CallFunctionWithFeedbackAndVectorDescriptor::
-    BuildCallInterfaceDescriptorFunctionType(Isolate* isolate,
-                                             int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, Type::Receiver());
-  function->InitParameter(kSlot, SmiType(zone));
-  function->InitParameter(kVector, AnyTagged(zone));
-  return function;
+void CallFunctionWithFeedbackAndVectorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kSlot, kVector
+  MachineType machine_types[] = {MachineType::TaggedPointer(),
+                                 MachineType::TaggedSigned(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-ArrayNoArgumentConstructorDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, Type::Receiver());
-  function->InitParameter(kAllocationSite, AnyTagged(zone));
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  function->InitParameter(kFunctionParameter, AnyTagged(zone));
-  return function;
+void ArrayNoArgumentConstructorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kAllocationSite, kActualArgumentsCount, kFunctionParameter
+  MachineType machine_types[] = {MachineType::TaggedPointer(),
+                                 MachineType::AnyTagged(), MachineType::Int32(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType* ArraySingleArgumentConstructorDescriptor::
-    BuildCallInterfaceDescriptorFunctionType(Isolate* isolate,
-                                             int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, Type::Receiver());
-  function->InitParameter(kAllocationSite, AnyTagged(zone));
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  function->InitParameter(kFunctionParameter, AnyTagged(zone));
-  function->InitParameter(kArraySizeSmiParameter, AnyTagged(zone));
-  return function;
+void ArraySingleArgumentConstructorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kAllocationSite, kActualArgumentsCount, kFunctionParameter,
+  // kArraySizeSmiParameter
+  MachineType machine_types[] = {
+      MachineType::TaggedPointer(), MachineType::AnyTagged(),
+      MachineType::Int32(), MachineType::AnyTagged(), MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-ArrayNArgumentsConstructorDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, Type::Receiver());
-  function->InitParameter(kAllocationSite, AnyTagged(zone));
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  return function;
+void ArrayNArgumentsConstructorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kAllocationSite, kActualArgumentsCount
+  MachineType machine_types[] = {MachineType::TaggedPointer(),
+                                 MachineType::AnyTagged(),
+                                 MachineType::Int32()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-ArgumentAdaptorDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kFunction, Type::Receiver());
-  function->InitParameter(kNewTarget, AnyTagged(zone));
-  function->InitParameter(kActualArgumentsCount, UntaggedIntegral32(zone));
-  function->InitParameter(kExpectedArgumentsCount, UntaggedIntegral32(zone));
-  return function;
+void ArgumentAdaptorDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kNewTarget, kActualArgumentsCount, kExpectedArgumentsCount
+  MachineType machine_types[] = {MachineType::TaggedPointer(),
+                                 MachineType::AnyTagged(), MachineType::Int32(),
+                                 MachineType::Int32()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-CallInterfaceDescriptor ApiCallbackDescriptorBase::ForArgs(Isolate* isolate,
-                                                           int argc) {
-  switch (argc) {
-    case 0:
-      return ApiCallbackWith0ArgsDescriptor(isolate);
-    case 1:
-      return ApiCallbackWith1ArgsDescriptor(isolate);
-    case 2:
-      return ApiCallbackWith2ArgsDescriptor(isolate);
-    case 3:
-      return ApiCallbackWith3ArgsDescriptor(isolate);
-    case 4:
-      return ApiCallbackWith4ArgsDescriptor(isolate);
-    case 5:
-      return ApiCallbackWith5ArgsDescriptor(isolate);
-    case 6:
-      return ApiCallbackWith6ArgsDescriptor(isolate);
-    case 7:
-      return ApiCallbackWith7ArgsDescriptor(isolate);
-    default:
-      UNREACHABLE();
-      return VoidDescriptor(isolate);
-  }
+void ApiCallbackDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kFunction, kCallData, kHolder, kApiFunctionAddress
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(), MachineType::AnyTagged(),
+      MachineType::AnyTagged(), MachineType::Pointer()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-ApiCallbackDescriptorBase::BuildCallInterfaceDescriptorFunctionTypeWithArg(
-    Isolate* isolate, int parameter_count, int argc) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function = Type::Function(AnyTagged(zone), Type::Undefined(),
-                                          kParameterCount + argc, zone)
-                               ->AsFunction();
-  function->InitParameter(kFunction, AnyTagged(zone));
-  function->InitParameter(kCallData, AnyTagged(zone));
-  function->InitParameter(kHolder, AnyTagged(zone));
-  function->InitParameter(kApiFunctionAddress, ExternalPointer(zone));
-  for (int i = 0; i < argc; i++) {
-    function->InitParameter(i, AnyTagged(zone));
-  }
-  return function;
+void InterpreterDispatchDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kAccumulator, kBytecodeOffset, kBytecodeArray, kDispatchTable
+  MachineType machine_types[] = {
+      MachineType::AnyTagged(), MachineType::IntPtr(), MachineType::AnyTagged(),
+      MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
-FunctionType*
-InterpreterDispatchDescriptor::BuildCallInterfaceDescriptorFunctionType(
-    Isolate* isolate, int parameter_count) {
-  Zone* zone = isolate->interface_descriptor_zone();
-  FunctionType* function =
-      Type::Function(AnyTagged(zone), Type::Undefined(), kParameterCount, zone)
-          ->AsFunction();
-  function->InitParameter(kAccumulator, AnyTagged(zone));
-  function->InitParameter(kBytecodeOffset, UntaggedIntegral32(zone));
-  function->InitParameter(kBytecodeArray, AnyTagged(zone));
-  function->InitParameter(kDispatchTable, AnyTagged(zone));
-  return function;
+void InterpreterPushArgsAndCallDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kNumberOfArguments, kFirstArgument, kFunction
+  MachineType machine_types[] = {MachineType::Int32(), MachineType::Pointer(),
+                                 MachineType::AnyTagged()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
+}
+
+void InterpreterPushArgsAndConstructDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kNumberOfArguments, kNewTarget, kConstructor, kFeedbackElement,
+  // kFirstArgument
+  MachineType machine_types[] = {
+      MachineType::Int32(), MachineType::AnyTagged(), MachineType::AnyTagged(),
+      MachineType::AnyTagged(), MachineType::Pointer()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::
+    InitializePlatformIndependent(CallInterfaceDescriptorData* data) {
+  // kNumberOfArguments, kFunction, kFeedbackElement, kFirstArgument
+  MachineType machine_types[] = {MachineType::Int32(), MachineType::AnyTagged(),
+                                 MachineType::AnyTagged(),
+                                 MachineType::Pointer()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
+}
+
+void InterpreterCEntryDescriptor::InitializePlatformIndependent(
+    CallInterfaceDescriptorData* data) {
+  // kNumberOfArguments, kFirstArgument, kFunctionEntry
+  MachineType machine_types[] = {MachineType::Int32(), MachineType::Pointer(),
+                                 MachineType::Pointer()};
+  data->InitializePlatformIndependent(arraysize(machine_types), 0,
+                                      machine_types);
 }
 
 }  // namespace internal
diff --git a/src/interface-descriptors.h b/src/interface-descriptors.h
index af59bdb..09dc377 100644
--- a/src/interface-descriptors.h
+++ b/src/interface-descriptors.h
@@ -15,137 +15,130 @@
 
 class PlatformInterfaceDescriptor;
 
-#define INTERFACE_DESCRIPTOR_LIST(V)   \
-  V(Void)                              \
-  V(ContextOnly)                       \
-  V(OnStackWith1Args)                  \
-  V(OnStackWith2Args)                  \
-  V(OnStackWith3Args)                  \
-  V(OnStackWith4Args)                  \
-  V(OnStackWith5Args)                  \
-  V(OnStackWith6Args)                  \
-  V(OnStackWith7Args)                  \
-  V(Load)                              \
-  V(LoadWithVector)                    \
-  V(LoadGlobal)                        \
-  V(LoadGlobalWithVector)              \
-  V(Store)                             \
-  V(StoreWithVector)                   \
-  V(StoreTransition)                   \
-  V(VectorStoreTransition)             \
-  V(VarArgFunction)                    \
-  V(FastNewClosure)                    \
-  V(FastNewFunctionContext)            \
-  V(FastNewObject)                     \
-  V(FastNewRestParameter)              \
-  V(FastNewSloppyArguments)            \
-  V(FastNewStrictArguments)            \
-  V(TypeConversion)                    \
-  V(Typeof)                            \
-  V(FastCloneRegExp)                   \
-  V(FastCloneShallowArray)             \
-  V(FastCloneShallowObject)            \
-  V(CreateAllocationSite)              \
-  V(CreateWeakCell)                    \
-  V(CallFunction)                      \
-  V(CallFunctionWithFeedback)          \
-  V(CallFunctionWithFeedbackAndVector) \
-  V(CallConstruct)                     \
-  V(CallTrampoline)                    \
-  V(ConstructStub)                     \
-  V(ConstructTrampoline)               \
-  V(RegExpConstructResult)             \
-  V(CopyFastSmiOrObjectElements)       \
-  V(TransitionElementsKind)            \
-  V(AllocateHeapNumber)                \
-  V(AllocateFloat32x4)                 \
-  V(AllocateInt32x4)                   \
-  V(AllocateUint32x4)                  \
-  V(AllocateBool32x4)                  \
-  V(AllocateInt16x8)                   \
-  V(AllocateUint16x8)                  \
-  V(AllocateBool16x8)                  \
-  V(AllocateInt8x16)                   \
-  V(AllocateUint8x16)                  \
-  V(AllocateBool8x16)                  \
-  V(ArrayNoArgumentConstructor)        \
-  V(ArraySingleArgumentConstructor)    \
-  V(ArrayNArgumentsConstructor)        \
-  V(Compare)                           \
-  V(BinaryOp)                          \
-  V(BinaryOpWithAllocationSite)        \
-  V(BinaryOpWithVector)                \
-  V(CountOp)                           \
-  V(StringAdd)                         \
-  V(StringCompare)                     \
-  V(Keyed)                             \
-  V(Named)                             \
-  V(HasProperty)                       \
-  V(ForInFilter)                       \
-  V(GetProperty)                       \
-  V(CallHandler)                       \
-  V(ArgumentAdaptor)                   \
-  V(ApiCallbackWith0Args)              \
-  V(ApiCallbackWith1Args)              \
-  V(ApiCallbackWith2Args)              \
-  V(ApiCallbackWith3Args)              \
-  V(ApiCallbackWith4Args)              \
-  V(ApiCallbackWith5Args)              \
-  V(ApiCallbackWith6Args)              \
-  V(ApiCallbackWith7Args)              \
-  V(ApiGetter)                         \
-  V(StoreGlobalViaContext)             \
-  V(MathPowTagged)                     \
-  V(MathPowInteger)                    \
-  V(GrowArrayElements)                 \
-  V(InterpreterDispatch)               \
-  V(InterpreterPushArgsAndCall)        \
-  V(InterpreterPushArgsAndConstruct)   \
-  V(InterpreterCEntry)                 \
+#define INTERFACE_DESCRIPTOR_LIST(V)      \
+  V(Void)                                 \
+  V(ContextOnly)                          \
+  V(Load)                                 \
+  V(LoadWithVector)                       \
+  V(LoadGlobal)                           \
+  V(LoadGlobalWithVector)                 \
+  V(Store)                                \
+  V(StoreWithVector)                      \
+  V(StoreNamedTransition)                 \
+  V(StoreTransition)                      \
+  V(VarArgFunction)                       \
+  V(FastNewClosure)                       \
+  V(FastNewFunctionContext)               \
+  V(FastNewObject)                        \
+  V(FastNewRestParameter)                 \
+  V(FastNewSloppyArguments)               \
+  V(FastNewStrictArguments)               \
+  V(TypeConversion)                       \
+  V(Typeof)                               \
+  V(FastCloneRegExp)                      \
+  V(FastCloneShallowArray)                \
+  V(FastCloneShallowObject)               \
+  V(CreateAllocationSite)                 \
+  V(CreateWeakCell)                       \
+  V(CallFunction)                         \
+  V(CallFunctionWithFeedback)             \
+  V(CallFunctionWithFeedbackAndVector)    \
+  V(CallConstruct)                        \
+  V(CallTrampoline)                       \
+  V(ConstructStub)                        \
+  V(ConstructTrampoline)                  \
+  V(RegExpExec)                           \
+  V(RegExpConstructResult)                \
+  V(CopyFastSmiOrObjectElements)          \
+  V(TransitionElementsKind)               \
+  V(AllocateHeapNumber)                   \
+  V(AllocateFloat32x4)                    \
+  V(AllocateInt32x4)                      \
+  V(AllocateUint32x4)                     \
+  V(AllocateBool32x4)                     \
+  V(AllocateInt16x8)                      \
+  V(AllocateUint16x8)                     \
+  V(AllocateBool16x8)                     \
+  V(AllocateInt8x16)                      \
+  V(AllocateUint8x16)                     \
+  V(AllocateBool8x16)                     \
+  V(ArrayNoArgumentConstructor)           \
+  V(ArraySingleArgumentConstructor)       \
+  V(ArrayNArgumentsConstructor)           \
+  V(Compare)                              \
+  V(BinaryOp)                             \
+  V(BinaryOpWithAllocationSite)           \
+  V(BinaryOpWithVector)                   \
+  V(CountOp)                              \
+  V(StringAdd)                            \
+  V(StringCompare)                        \
+  V(SubString)                            \
+  V(Keyed)                                \
+  V(Named)                                \
+  V(HasProperty)                          \
+  V(ForInFilter)                          \
+  V(GetProperty)                          \
+  V(CallHandler)                          \
+  V(ArgumentAdaptor)                      \
+  V(ApiCallback)                          \
+  V(ApiGetter)                            \
+  V(StoreGlobalViaContext)                \
+  V(MathPowTagged)                        \
+  V(MathPowInteger)                       \
+  V(GrowArrayElements)                    \
+  V(InterpreterDispatch)                  \
+  V(InterpreterPushArgsAndCall)           \
+  V(InterpreterPushArgsAndConstruct)      \
+  V(InterpreterPushArgsAndConstructArray) \
+  V(InterpreterCEntry)                    \
   V(ResumeGenerator)
 
 class CallInterfaceDescriptorData {
  public:
-  CallInterfaceDescriptorData()
-      : register_param_count_(-1), function_type_(nullptr) {}
+  CallInterfaceDescriptorData() : register_param_count_(-1), param_count_(-1) {}
 
   // A copy of the passed in registers and param_representations is made
   // and owned by the CallInterfaceDescriptorData.
 
-  void InitializePlatformIndependent(FunctionType* function_type) {
-    function_type_ = function_type;
-  }
-
-  // TODO(mvstanton): Instead of taking parallel arrays register and
-  // param_representations, how about a struct that puts the representation
-  // and register side by side (eg, RegRep(r1, Representation::Tagged()).
-  // The same should go for the CodeStubDescriptor class.
   void InitializePlatformSpecific(
       int register_parameter_count, const Register* registers,
       PlatformInterfaceDescriptor* platform_descriptor = NULL);
 
-  bool IsInitialized() const { return register_param_count_ >= 0; }
+  // if machine_types is null, then an array of size
+  // (register_parameter_count + extra_parameter_count) will be created
+  // with MachineType::AnyTagged() for each member.
+  //
+  // if machine_types is not null, then it should be of the size
+  // register_parameter_count. Those members of the parameter array
+  // will be initialized from {machine_types}, and the rest initialized
+  // to MachineType::AnyTagged().
+  void InitializePlatformIndependent(int parameter_count,
+                                     int extra_parameter_count,
+                                     const MachineType* machine_types);
 
-  int param_count() const { return function_type_->Arity(); }
+  bool IsInitialized() const {
+    return register_param_count_ >= 0 && param_count_ >= 0;
+  }
+
+  int param_count() const { return param_count_; }
   int register_param_count() const { return register_param_count_; }
   Register register_param(int index) const { return register_params_[index]; }
   Register* register_params() const { return register_params_.get(); }
-  Type* param_type(int index) const { return function_type_->Parameter(index); }
+  MachineType param_type(int index) const { return machine_types_[index]; }
   PlatformInterfaceDescriptor* platform_specific_descriptor() const {
     return platform_specific_descriptor_;
   }
 
  private:
   int register_param_count_;
+  int param_count_;
 
   // The Register params are allocated dynamically by the
   // InterfaceDescriptor, and freed on destruction. This is because static
   // arrays of Registers cause creation of runtime static initializers
   // which we don't want.
   std::unique_ptr<Register[]> register_params_;
-
-  // Specifies types for parameters and return
-  FunctionType* function_type_;
+  std::unique_ptr<MachineType[]> machine_types_;
 
   PlatformInterfaceDescriptor* platform_specific_descriptor_;
 
@@ -186,7 +179,7 @@
     return data()->register_param(index);
   }
 
-  Type* GetParameterType(int index) const {
+  MachineType GetParameterType(int index) const {
     DCHECK(index < data()->param_count());
     return data()->param_type(index);
   }
@@ -200,21 +193,18 @@
 
   const char* DebugName(Isolate* isolate) const;
 
-  static FunctionType* BuildDefaultFunctionType(Isolate* isolate,
-                                                int parameter_count);
-
  protected:
   const CallInterfaceDescriptorData* data() const { return data_; }
 
-  virtual FunctionType* BuildCallInterfaceDescriptorFunctionType(
-      Isolate* isolate, int register_param_count) {
-    return BuildDefaultFunctionType(isolate, register_param_count);
-  }
-
   virtual void InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
     UNREACHABLE();
   }
 
+  virtual void InitializePlatformIndependent(
+      CallInterfaceDescriptorData* data) {
+    data->InitializePlatformIndependent(data->register_param_count(), 0, NULL);
+  }
+
   void Initialize(Isolate* isolate, CallDescriptors::Key key) {
     if (!data()->IsInitialized()) {
       // We should only initialize descriptors on the isolate's main thread.
@@ -222,9 +212,7 @@
       CallInterfaceDescriptorData* d = isolate->call_descriptor_data(key);
       DCHECK(d == data());  // d should be a modifiable pointer to data().
       InitializePlatformSpecific(d);
-      FunctionType* function_type = BuildCallInterfaceDescriptorFunctionType(
-          isolate, d->register_param_count());
-      d->InitializePlatformIndependent(function_type);
+      InitializePlatformIndependent(d);
     }
   }
 
@@ -264,23 +252,26 @@
                                                                                \
  public:
 
-#define DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(name, base) \
-  DECLARE_DESCRIPTOR(name, base)                                 \
- protected:                                                      \
-  FunctionType* BuildCallInterfaceDescriptorFunctionType(        \
-      Isolate* isolate, int register_param_count) override;      \
-                                                                 \
+#define DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(name, base)        \
+  DECLARE_DESCRIPTOR(name, base)                                        \
+ protected:                                                             \
+  void InitializePlatformIndependent(CallInterfaceDescriptorData* data) \
+      override;                                                         \
+                                                                        \
  public:
 
-#define DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(name, base, arg) \
-  DECLARE_DESCRIPTOR_WITH_BASE(name, base)                                  \
- protected:                                                                 \
-  FunctionType* BuildCallInterfaceDescriptorFunctionType(                   \
-      Isolate* isolate, int register_param_count) override {                \
-    return BuildCallInterfaceDescriptorFunctionTypeWithArg(                 \
-        isolate, register_param_count, arg);                                \
-  }                                                                         \
-                                                                            \
+#define DECLARE_DESCRIPTOR_WITH_STACK_ARGS(name, base)                  \
+  DECLARE_DESCRIPTOR_WITH_BASE(name, base)                              \
+ protected:                                                             \
+  void InitializePlatformIndependent(CallInterfaceDescriptorData* data) \
+      override {                                                        \
+    data->InitializePlatformIndependent(0, kParameterCount, NULL);      \
+  }                                                                     \
+  void InitializePlatformSpecific(CallInterfaceDescriptorData* data)    \
+      override {                                                        \
+    data->InitializePlatformSpecific(0, nullptr);                       \
+  }                                                                     \
+                                                                        \
  public:
 
 #define DEFINE_PARAMETERS(...)                          \
@@ -301,73 +292,6 @@
   DECLARE_DESCRIPTOR(ContextOnlyDescriptor, CallInterfaceDescriptor)
 };
 
-// The OnStackWith*ArgsDescriptors have a lot of boilerplate. The superclass
-// OnStackArgsDescriptorBase is not meant to be instantiated directly and has no
-// public constructors to ensure this is so.contains all the logic, and the
-//
-// Use OnStackArgsDescriptorBase::ForArgs(isolate, parameter_count) to
-// instantiate a descriptor with the number of args.
-class OnStackArgsDescriptorBase : public CallInterfaceDescriptor {
- public:
-  static CallInterfaceDescriptor ForArgs(Isolate* isolate, int parameter_count);
-
- protected:
-  OnStackArgsDescriptorBase(Isolate* isolate, CallDescriptors::Key key)
-      : CallInterfaceDescriptor(isolate, key) {}
-  void InitializePlatformSpecific(CallInterfaceDescriptorData* data) override;
-  FunctionType* BuildCallInterfaceDescriptorFunctionTypeWithArg(
-      Isolate* isolate, int register_parameter_count, int parameter_count);
-};
-
-class OnStackWith1ArgsDescriptor : public OnStackArgsDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(OnStackWith1ArgsDescriptor,
-                                                     OnStackArgsDescriptorBase,
-                                                     1)
-};
-
-class OnStackWith2ArgsDescriptor : public OnStackArgsDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(OnStackWith2ArgsDescriptor,
-                                                     OnStackArgsDescriptorBase,
-                                                     2)
-};
-
-class OnStackWith3ArgsDescriptor : public OnStackArgsDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(OnStackWith3ArgsDescriptor,
-                                                     OnStackArgsDescriptorBase,
-                                                     3)
-};
-
-class OnStackWith4ArgsDescriptor : public OnStackArgsDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(OnStackWith4ArgsDescriptor,
-                                                     OnStackArgsDescriptorBase,
-                                                     4)
-};
-
-class OnStackWith5ArgsDescriptor : public OnStackArgsDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(OnStackWith5ArgsDescriptor,
-                                                     OnStackArgsDescriptorBase,
-                                                     5)
-};
-
-class OnStackWith6ArgsDescriptor : public OnStackArgsDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(OnStackWith6ArgsDescriptor,
-                                                     OnStackArgsDescriptorBase,
-                                                     6)
-};
-
-class OnStackWith7ArgsDescriptor : public OnStackArgsDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(OnStackWith7ArgsDescriptor,
-                                                     OnStackArgsDescriptorBase,
-                                                     7)
-};
-
 // LoadDescriptor is used by all stubs that implement Load/KeyedLoad ICs.
 class LoadDescriptor : public CallInterfaceDescriptor {
  public:
@@ -401,42 +325,47 @@
   static const Register NameRegister();
   static const Register ValueRegister();
   static const Register SlotRegister();
-};
 
+#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
+  static const bool kPassLastArgsOnStack = true;
+#else
+  static const bool kPassLastArgsOnStack = false;
+#endif
+
+  // Pass value and slot through the stack.
+  static const int kStackArgumentsCount = kPassLastArgsOnStack ? 2 : 0;
+};
 
 class StoreTransitionDescriptor : public StoreDescriptor {
  public:
-  DEFINE_PARAMETERS(kReceiver, kName, kValue, kMap)
+  DEFINE_PARAMETERS(kReceiver, kName, kMap, kValue, kSlot, kVector)
   DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(StoreTransitionDescriptor,
                                                StoreDescriptor)
 
   static const Register MapRegister();
-};
-
-
-class VectorStoreTransitionDescriptor : public StoreDescriptor {
- public:
-  DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(VectorStoreTransitionDescriptor,
-                                               StoreDescriptor)
-
-  // TODO(ishell): use DEFINE_PARAMETERS macro here
-  // Extends StoreDescriptor with Map parameter.
-  enum ParameterIndices {
-    kReceiver = 0,
-    kName = 1,
-    kValue = 2,
-
-    kMap = 3,
-
-    kSlot = 4,  // not present on ia32.
-    kVirtualSlotVector = 4,
-
-    kVector = 5
-  };
-
-  static const Register MapRegister();
   static const Register SlotRegister();
   static const Register VectorRegister();
+
+  // Pass value, slot and vector through the stack.
+  static const int kStackArgumentsCount = kPassLastArgsOnStack ? 3 : 0;
+};
+
+class StoreNamedTransitionDescriptor : public StoreTransitionDescriptor {
+ public:
+  DEFINE_PARAMETERS(kReceiver, kFieldOffset, kMap, kValue, kSlot, kVector,
+                    kName)
+  DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(StoreNamedTransitionDescriptor,
+                                               StoreTransitionDescriptor)
+
+  // Always pass name on the stack.
+  static const bool kPassLastArgsOnStack = true;
+  static const int kStackArgumentsCount =
+      StoreTransitionDescriptor::kStackArgumentsCount + 1;
+
+  static const Register NameRegister() { return no_reg; }
+  static const Register FieldOffsetRegister() {
+    return StoreTransitionDescriptor::NameRegister();
+  }
 };
 
 class StoreWithVectorDescriptor : public StoreDescriptor {
@@ -446,6 +375,9 @@
                                                StoreDescriptor)
 
   static const Register VectorRegister();
+
+  // Pass value, slot and vector through the stack.
+  static const int kStackArgumentsCount = kPassLastArgsOnStack ? 3 : 0;
 };
 
 class LoadWithVectorDescriptor : public LoadDescriptor {
@@ -632,6 +564,12 @@
   DECLARE_DESCRIPTOR(CallConstructDescriptor, CallInterfaceDescriptor)
 };
 
+class RegExpExecDescriptor : public CallInterfaceDescriptor {
+ public:
+  DEFINE_PARAMETERS(kRegExpObject, kString, kPreviousIndex, kLastMatchInfo)
+  DECLARE_DESCRIPTOR_WITH_STACK_ARGS(RegExpExecDescriptor,
+                                     CallInterfaceDescriptor)
+};
 
 class RegExpConstructResultDescriptor : public CallInterfaceDescriptor {
  public:
@@ -751,6 +689,13 @@
   static const Register RightRegister();
 };
 
+class SubStringDescriptor : public CallInterfaceDescriptor {
+ public:
+  DEFINE_PARAMETERS(kString, kFrom, kTo)
+  DECLARE_DESCRIPTOR_WITH_STACK_ARGS(SubStringDescriptor,
+                                     CallInterfaceDescriptor)
+};
+
 // TODO(ishell): not used, remove.
 class KeyedDescriptor : public CallInterfaceDescriptor {
  public:
@@ -778,79 +723,13 @@
                                                CallInterfaceDescriptor)
 };
 
-// The ApiCallback*Descriptors have a lot of boilerplate. The superclass
-// ApiCallbackDescriptorBase contains all the logic, and the
-// ApiCallbackWith*ArgsDescriptor merely instantiate these with a
-// parameter for the number of args.
-//
-// The base class is not meant to be instantiated directly and has no
-// public constructors to ensure this is so.
-//
-// The simplest usage for all the ApiCallback*Descriptors is probably
-//   ApiCallbackDescriptorBase::ForArgs(isolate, argc)
-//
-class ApiCallbackDescriptorBase : public CallInterfaceDescriptor {
+class ApiCallbackDescriptor : public CallInterfaceDescriptor {
  public:
   DEFINE_PARAMETERS(kFunction, kCallData, kHolder, kApiFunctionAddress)
-  static CallInterfaceDescriptor ForArgs(Isolate* isolate, int argc);
-
- protected:
-  ApiCallbackDescriptorBase(Isolate* isolate, CallDescriptors::Key key)
-      : CallInterfaceDescriptor(isolate, key) {}
-  void InitializePlatformSpecific(CallInterfaceDescriptorData* data) override;
-  FunctionType* BuildCallInterfaceDescriptorFunctionTypeWithArg(
-      Isolate* isolate, int parameter_count, int argc);
+  DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(ApiCallbackDescriptor,
+                                               CallInterfaceDescriptor)
 };
 
-class ApiCallbackWith0ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith0ArgsDescriptor, ApiCallbackDescriptorBase, 0)
-};
-
-class ApiCallbackWith1ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith1ArgsDescriptor, ApiCallbackDescriptorBase, 1)
-};
-
-class ApiCallbackWith2ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith2ArgsDescriptor, ApiCallbackDescriptorBase, 2)
-};
-
-class ApiCallbackWith3ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith3ArgsDescriptor, ApiCallbackDescriptorBase, 3)
-};
-
-class ApiCallbackWith4ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith4ArgsDescriptor, ApiCallbackDescriptorBase, 4)
-};
-
-class ApiCallbackWith5ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith5ArgsDescriptor, ApiCallbackDescriptorBase, 5)
-};
-
-class ApiCallbackWith6ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith6ArgsDescriptor, ApiCallbackDescriptorBase, 6)
-};
-
-class ApiCallbackWith7ArgsDescriptor : public ApiCallbackDescriptorBase {
- public:
-  DECLARE_DESCRIPTOR_WITH_BASE_AND_FUNCTION_TYPE_ARG(
-      ApiCallbackWith7ArgsDescriptor, ApiCallbackDescriptorBase, 7)
-};
-
-
 class ApiGetterDescriptor : public CallInterfaceDescriptor {
  public:
   DEFINE_PARAMETERS(kReceiver, kHolder, kCallback)
@@ -904,22 +783,35 @@
 
 class InterpreterPushArgsAndCallDescriptor : public CallInterfaceDescriptor {
  public:
-  DECLARE_DESCRIPTOR(InterpreterPushArgsAndCallDescriptor,
-                     CallInterfaceDescriptor)
+  DEFINE_PARAMETERS(kNumberOfArguments, kFirstArgument, kFunction)
+  DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(
+      InterpreterPushArgsAndCallDescriptor, CallInterfaceDescriptor)
 };
 
 
 class InterpreterPushArgsAndConstructDescriptor
     : public CallInterfaceDescriptor {
  public:
-  DECLARE_DESCRIPTOR(InterpreterPushArgsAndConstructDescriptor,
-                     CallInterfaceDescriptor)
+  DEFINE_PARAMETERS(kNumberOfArguments, kNewTarget, kConstructor,
+                    kFeedbackElement, kFirstArgument)
+  DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(
+      InterpreterPushArgsAndConstructDescriptor, CallInterfaceDescriptor)
 };
 
+class InterpreterPushArgsAndConstructArrayDescriptor
+    : public CallInterfaceDescriptor {
+ public:
+  DEFINE_PARAMETERS(kNumberOfArguments, kFunction, kFeedbackElement,
+                    kFirstArgument)
+  DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(
+      InterpreterPushArgsAndConstructArrayDescriptor, CallInterfaceDescriptor)
+};
 
 class InterpreterCEntryDescriptor : public CallInterfaceDescriptor {
  public:
-  DECLARE_DESCRIPTOR(InterpreterCEntryDescriptor, CallInterfaceDescriptor)
+  DEFINE_PARAMETERS(kNumberOfArguments, kFirstArgument, kFunctionEntry)
+  DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(InterpreterCEntryDescriptor,
+                                               CallInterfaceDescriptor)
 };
 
 class ResumeGeneratorDescriptor final : public CallInterfaceDescriptor {
diff --git a/src/interpreter/OWNERS b/src/interpreter/OWNERS
index d12fcf9..4e6a721 100644
--- a/src/interpreter/OWNERS
+++ b/src/interpreter/OWNERS
@@ -3,5 +3,4 @@
 bmeurer@chromium.org
 mstarzinger@chromium.org
 mythria@chromium.org
-oth@chromium.org
 rmcilroy@chromium.org
diff --git a/src/interpreter/bytecode-array-builder.cc b/src/interpreter/bytecode-array-builder.cc
index 9bef5a5..dfa3950 100644
--- a/src/interpreter/bytecode-array-builder.cc
+++ b/src/interpreter/bytecode-array-builder.cc
@@ -4,7 +4,6 @@
 
 #include "src/interpreter/bytecode-array-builder.h"
 
-#include "src/compiler.h"
 #include "src/globals.h"
 #include "src/interpreter/bytecode-array-writer.h"
 #include "src/interpreter/bytecode-dead-code-optimizer.h"
@@ -29,7 +28,7 @@
       parameter_count_(parameter_count),
       local_register_count_(locals_count),
       context_register_count_(context_count),
-      temporary_allocator_(zone, fixed_register_count()),
+      register_allocator_(fixed_register_count()),
       bytecode_array_writer_(zone, &constant_array_builder_,
                              source_position_mode),
       pipeline_(&bytecode_array_writer_) {
@@ -47,7 +46,8 @@
 
   if (FLAG_ignition_reo) {
     pipeline_ = new (zone) BytecodeRegisterOptimizer(
-        zone, &temporary_allocator_, parameter_count, pipeline_);
+        zone, &register_allocator_, fixed_register_count(), parameter_count,
+        pipeline_);
   }
 
   return_position_ =
@@ -70,10 +70,6 @@
   return Register::FromParameterIndex(parameter_index, parameter_count());
 }
 
-bool BytecodeArrayBuilder::RegisterIsParameterOrLocal(Register reg) const {
-  return reg.is_parameter() || reg.index() < locals_count();
-}
-
 Handle<BytecodeArray> BytecodeArrayBuilder::ToBytecodeArray(Isolate* isolate) {
   DCHECK(return_seen_in_block_);
   DCHECK(!bytecode_generated_);
@@ -81,86 +77,121 @@
 
   Handle<FixedArray> handler_table =
       handler_table_builder()->ToHandlerTable(isolate);
-  return pipeline_->ToBytecodeArray(isolate, fixed_register_count(),
+  return pipeline_->ToBytecodeArray(isolate, total_register_count(),
                                     parameter_count(), handler_table);
 }
 
-namespace {
-
-static bool ExpressionPositionIsNeeded(Bytecode bytecode) {
-  // An expression position is always needed if filtering is turned
-  // off. Otherwise an expression is only needed if the bytecode has
-  // external side effects.
-  return !FLAG_ignition_filter_expression_positions ||
-         !Bytecodes::IsWithoutExternalSideEffects(bytecode);
-}
-
-}  // namespace
-
-void BytecodeArrayBuilder::AttachSourceInfo(BytecodeNode* node) {
-  if (latest_source_info_.is_valid()) {
-    // Statement positions need to be emitted immediately.  Expression
-    // positions can be pushed back until a bytecode is found that can
-    // throw. Hence we only invalidate the existing source position
-    // information if it is used.
-    if (latest_source_info_.is_statement() ||
-        ExpressionPositionIsNeeded(node->bytecode())) {
-      node->source_info().Clone(latest_source_info_);
-      latest_source_info_.set_invalid();
-    }
-  }
-}
-
 void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0,
                                   uint32_t operand1, uint32_t operand2,
                                   uint32_t operand3) {
   DCHECK(OperandsAreValid(bytecode, 4, operand0, operand1, operand2, operand3));
-  BytecodeNode node(bytecode, operand0, operand1, operand2, operand3);
-  AttachSourceInfo(&node);
+  BytecodeNode node(bytecode, operand0, operand1, operand2, operand3,
+                    &latest_source_info_);
   pipeline()->Write(&node);
 }
 
 void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0,
                                   uint32_t operand1, uint32_t operand2) {
   DCHECK(OperandsAreValid(bytecode, 3, operand0, operand1, operand2));
-  BytecodeNode node(bytecode, operand0, operand1, operand2);
-  AttachSourceInfo(&node);
+  BytecodeNode node(bytecode, operand0, operand1, operand2,
+                    &latest_source_info_);
   pipeline()->Write(&node);
 }
 
 void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0,
                                   uint32_t operand1) {
   DCHECK(OperandsAreValid(bytecode, 2, operand0, operand1));
-  BytecodeNode node(bytecode, operand0, operand1);
-  AttachSourceInfo(&node);
+  BytecodeNode node(bytecode, operand0, operand1, &latest_source_info_);
   pipeline()->Write(&node);
 }
 
 void BytecodeArrayBuilder::Output(Bytecode bytecode, uint32_t operand0) {
   DCHECK(OperandsAreValid(bytecode, 1, operand0));
-  BytecodeNode node(bytecode, operand0);
-  AttachSourceInfo(&node);
+  BytecodeNode node(bytecode, operand0, &latest_source_info_);
   pipeline()->Write(&node);
 }
 
 void BytecodeArrayBuilder::Output(Bytecode bytecode) {
   DCHECK(OperandsAreValid(bytecode, 0));
-  BytecodeNode node(bytecode);
-  AttachSourceInfo(&node);
+  BytecodeNode node(bytecode, &latest_source_info_);
   pipeline()->Write(&node);
 }
 
+void BytecodeArrayBuilder::OutputJump(Bytecode bytecode, BytecodeLabel* label) {
+  BytecodeNode node(bytecode, 0, &latest_source_info_);
+  pipeline_->WriteJump(&node, label);
+  LeaveBasicBlock();
+}
+
+void BytecodeArrayBuilder::OutputJump(Bytecode bytecode, uint32_t operand0,
+                                      BytecodeLabel* label) {
+  BytecodeNode node(bytecode, 0, operand0, &latest_source_info_);
+  pipeline_->WriteJump(&node, label);
+  LeaveBasicBlock();
+}
+
 BytecodeArrayBuilder& BytecodeArrayBuilder::BinaryOperation(Token::Value op,
                                                             Register reg,
                                                             int feedback_slot) {
-  Output(BytecodeForBinaryOperation(op), RegisterOperand(reg),
-         UnsignedOperand(feedback_slot));
+  switch (op) {
+    case Token::Value::ADD:
+      Output(Bytecode::kAdd, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::SUB:
+      Output(Bytecode::kSub, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::MUL:
+      Output(Bytecode::kMul, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::DIV:
+      Output(Bytecode::kDiv, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::MOD:
+      Output(Bytecode::kMod, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::BIT_OR:
+      Output(Bytecode::kBitwiseOr, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::BIT_XOR:
+      Output(Bytecode::kBitwiseXor, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::BIT_AND:
+      Output(Bytecode::kBitwiseAnd, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::SHL:
+      Output(Bytecode::kShiftLeft, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::SAR:
+      Output(Bytecode::kShiftRight, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::SHR:
+      Output(Bytecode::kShiftRightLogical, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    default:
+      UNREACHABLE();
+  }
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::CountOperation(Token::Value op,
                                                            int feedback_slot) {
-  Output(BytecodeForCountOperation(op), UnsignedOperand(feedback_slot));
+  if (op == Token::Value::ADD) {
+    Output(Bytecode::kInc, UnsignedOperand(feedback_slot));
+  } else {
+    DCHECK_EQ(op, Token::Value::SUB);
+    Output(Bytecode::kDec, UnsignedOperand(feedback_slot));
+  }
   return *this;
 }
 
@@ -169,15 +200,51 @@
   return *this;
 }
 
-
 BytecodeArrayBuilder& BytecodeArrayBuilder::TypeOf() {
   Output(Bytecode::kTypeOf);
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::CompareOperation(Token::Value op,
-                                                             Register reg) {
-  Output(BytecodeForCompareOperation(op), RegisterOperand(reg));
+BytecodeArrayBuilder& BytecodeArrayBuilder::CompareOperation(
+    Token::Value op, Register reg, int feedback_slot) {
+  switch (op) {
+    case Token::Value::EQ:
+      Output(Bytecode::kTestEqual, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::NE:
+      Output(Bytecode::kTestNotEqual, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::EQ_STRICT:
+      Output(Bytecode::kTestEqualStrict, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::LT:
+      Output(Bytecode::kTestLessThan, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::GT:
+      Output(Bytecode::kTestGreaterThan, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::LTE:
+      Output(Bytecode::kTestLessThanOrEqual, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::GTE:
+      Output(Bytecode::kTestGreaterThanOrEqual, RegisterOperand(reg),
+             UnsignedOperand(feedback_slot));
+      break;
+    case Token::Value::INSTANCEOF:
+      Output(Bytecode::kTestInstanceOf, RegisterOperand(reg));
+      break;
+    case Token::Value::IN:
+      Output(Bytecode::kTestIn, RegisterOperand(reg));
+      break;
+    default:
+      UNREACHABLE();
+  }
   return *this;
 }
 
@@ -250,50 +317,90 @@
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::LoadGlobal(int feedback_slot,
                                                        TypeofMode typeof_mode) {
-  // TODO(rmcilroy): Potentially store typeof information in an
-  // operand rather than having extra bytecodes.
-  Bytecode bytecode = BytecodeForLoadGlobal(typeof_mode);
-  Output(bytecode, UnsignedOperand(feedback_slot));
+  if (typeof_mode == INSIDE_TYPEOF) {
+    Output(Bytecode::kLdaGlobalInsideTypeof, feedback_slot);
+  } else {
+    DCHECK_EQ(typeof_mode, NOT_INSIDE_TYPEOF);
+    Output(Bytecode::kLdaGlobal, UnsignedOperand(feedback_slot));
+  }
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::StoreGlobal(
     const Handle<String> name, int feedback_slot, LanguageMode language_mode) {
-  Bytecode bytecode = BytecodeForStoreGlobal(language_mode);
   size_t name_index = GetConstantPoolEntry(name);
-  Output(bytecode, UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
+  if (language_mode == SLOPPY) {
+    Output(Bytecode::kStaGlobalSloppy, UnsignedOperand(name_index),
+           UnsignedOperand(feedback_slot));
+  } else {
+    DCHECK_EQ(language_mode, STRICT);
+    Output(Bytecode::kStaGlobalStrict, UnsignedOperand(name_index),
+           UnsignedOperand(feedback_slot));
+  }
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::LoadContextSlot(Register context,
-                                                            int slot_index) {
+                                                            int slot_index,
+                                                            int depth) {
   Output(Bytecode::kLdaContextSlot, RegisterOperand(context),
-         UnsignedOperand(slot_index));
+         UnsignedOperand(slot_index), UnsignedOperand(depth));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::StoreContextSlot(Register context,
-                                                             int slot_index) {
+                                                             int slot_index,
+                                                             int depth) {
   Output(Bytecode::kStaContextSlot, RegisterOperand(context),
-         UnsignedOperand(slot_index));
+         UnsignedOperand(slot_index), UnsignedOperand(depth));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLookupSlot(
     const Handle<String> name, TypeofMode typeof_mode) {
-  Bytecode bytecode = (typeof_mode == INSIDE_TYPEOF)
-                          ? Bytecode::kLdaLookupSlotInsideTypeof
-                          : Bytecode::kLdaLookupSlot;
   size_t name_index = GetConstantPoolEntry(name);
-  Output(bytecode, UnsignedOperand(name_index));
+  if (typeof_mode == INSIDE_TYPEOF) {
+    Output(Bytecode::kLdaLookupSlotInsideTypeof, UnsignedOperand(name_index));
+  } else {
+    DCHECK_EQ(typeof_mode, NOT_INSIDE_TYPEOF);
+    Output(Bytecode::kLdaLookupSlot, UnsignedOperand(name_index));
+  }
+  return *this;
+}
+
+BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLookupContextSlot(
+    const Handle<String> name, TypeofMode typeof_mode, int slot_index,
+    int depth) {
+  Bytecode bytecode = (typeof_mode == INSIDE_TYPEOF)
+                          ? Bytecode::kLdaLookupContextSlotInsideTypeof
+                          : Bytecode::kLdaLookupContextSlot;
+  size_t name_index = GetConstantPoolEntry(name);
+  Output(bytecode, UnsignedOperand(name_index), UnsignedOperand(slot_index),
+         UnsignedOperand(depth));
+  return *this;
+}
+
+BytecodeArrayBuilder& BytecodeArrayBuilder::LoadLookupGlobalSlot(
+    const Handle<String> name, TypeofMode typeof_mode, int feedback_slot,
+    int depth) {
+  Bytecode bytecode = (typeof_mode == INSIDE_TYPEOF)
+                          ? Bytecode::kLdaLookupGlobalSlotInsideTypeof
+                          : Bytecode::kLdaLookupGlobalSlot;
+  size_t name_index = GetConstantPoolEntry(name);
+  Output(bytecode, UnsignedOperand(name_index), UnsignedOperand(feedback_slot),
+         UnsignedOperand(depth));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::StoreLookupSlot(
     const Handle<String> name, LanguageMode language_mode) {
-  Bytecode bytecode = BytecodeForStoreLookupSlot(language_mode);
   size_t name_index = GetConstantPoolEntry(name);
-  Output(bytecode, UnsignedOperand(name_index));
+  if (language_mode == SLOPPY) {
+    Output(Bytecode::kStaLookupSlotSloppy, UnsignedOperand(name_index));
+  } else {
+    DCHECK_EQ(language_mode, STRICT);
+    Output(Bytecode::kStaLookupSlotStrict, UnsignedOperand(name_index));
+  }
   return *this;
 }
 
@@ -315,19 +422,29 @@
 BytecodeArrayBuilder& BytecodeArrayBuilder::StoreNamedProperty(
     Register object, const Handle<Name> name, int feedback_slot,
     LanguageMode language_mode) {
-  Bytecode bytecode = BytecodeForStoreNamedProperty(language_mode);
   size_t name_index = GetConstantPoolEntry(name);
-  Output(bytecode, RegisterOperand(object), UnsignedOperand(name_index),
-         UnsignedOperand(feedback_slot));
+  if (language_mode == SLOPPY) {
+    Output(Bytecode::kStaNamedPropertySloppy, RegisterOperand(object),
+           UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
+  } else {
+    DCHECK_EQ(language_mode, STRICT);
+    Output(Bytecode::kStaNamedPropertyStrict, RegisterOperand(object),
+           UnsignedOperand(name_index), UnsignedOperand(feedback_slot));
+  }
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::StoreKeyedProperty(
     Register object, Register key, int feedback_slot,
     LanguageMode language_mode) {
-  Bytecode bytecode = BytecodeForStoreKeyedProperty(language_mode);
-  Output(bytecode, RegisterOperand(object), RegisterOperand(key),
-         UnsignedOperand(feedback_slot));
+  if (language_mode == SLOPPY) {
+    Output(Bytecode::kStaKeyedPropertySloppy, RegisterOperand(object),
+           RegisterOperand(key), UnsignedOperand(feedback_slot));
+  } else {
+    DCHECK_EQ(language_mode, STRICT);
+    Output(Bytecode::kStaKeyedPropertyStrict, RegisterOperand(object),
+           RegisterOperand(key), UnsignedOperand(feedback_slot));
+  }
   return *this;
 }
 
@@ -346,10 +463,11 @@
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::CreateCatchContext(
-    Register exception, Handle<String> name) {
+    Register exception, Handle<String> name, Handle<ScopeInfo> scope_info) {
   size_t name_index = GetConstantPoolEntry(name);
+  size_t scope_info_index = GetConstantPoolEntry(scope_info);
   Output(Bytecode::kCreateCatchContext, RegisterOperand(exception),
-         UnsignedOperand(name_index));
+         UnsignedOperand(name_index), UnsignedOperand(scope_info_index));
   return *this;
 }
 
@@ -358,18 +476,29 @@
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::CreateWithContext(Register object) {
-    Output(Bytecode::kCreateWithContext, RegisterOperand(object));
+BytecodeArrayBuilder& BytecodeArrayBuilder::CreateWithContext(
+    Register object, Handle<ScopeInfo> scope_info) {
+  size_t scope_info_index = GetConstantPoolEntry(scope_info);
+  Output(Bytecode::kCreateWithContext, RegisterOperand(object),
+         UnsignedOperand(scope_info_index));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::CreateArguments(
     CreateArgumentsType type) {
-  // TODO(rmcilroy): Consider passing the type as a bytecode operand rather
-  // than having two different bytecodes once we have better support for
-  // branches in the InterpreterAssembler.
-  Bytecode bytecode = BytecodeForCreateArguments(type);
-  Output(bytecode);
+  switch (type) {
+    case CreateArgumentsType::kMappedArguments:
+      Output(Bytecode::kCreateMappedArguments);
+      break;
+    case CreateArgumentsType::kUnmappedArguments:
+      Output(Bytecode::kCreateUnmappedArguments);
+      break;
+    case CreateArgumentsType::kRestParameter:
+      Output(Bytecode::kCreateRestParameter);
+      break;
+    default:
+      UNREACHABLE();
+  }
   return *this;
 }
 
@@ -411,19 +540,19 @@
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::CastAccumulatorToJSObject(
+BytecodeArrayBuilder& BytecodeArrayBuilder::ConvertAccumulatorToObject(
     Register out) {
   Output(Bytecode::kToObject, RegisterOperand(out));
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::CastAccumulatorToName(
+BytecodeArrayBuilder& BytecodeArrayBuilder::ConvertAccumulatorToName(
     Register out) {
   Output(Bytecode::kToName, RegisterOperand(out));
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::CastAccumulatorToNumber(
+BytecodeArrayBuilder& BytecodeArrayBuilder::ConvertAccumulatorToNumber(
     Register out) {
   Output(Bytecode::kToNumber, RegisterOperand(out));
   return *this;
@@ -442,43 +571,44 @@
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::OutputJump(Bytecode jump_bytecode,
-                                                       BytecodeLabel* label) {
-  BytecodeNode node(jump_bytecode, 0);
-  AttachSourceInfo(&node);
-  pipeline_->WriteJump(&node, label);
-  LeaveBasicBlock();
-  return *this;
-}
-
 BytecodeArrayBuilder& BytecodeArrayBuilder::Jump(BytecodeLabel* label) {
-  return OutputJump(Bytecode::kJump, label);
+  OutputJump(Bytecode::kJump, label);
+  return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfTrue(BytecodeLabel* label) {
   // The peephole optimizer attempts to simplify JumpIfToBooleanTrue
   // to JumpIfTrue.
-  return OutputJump(Bytecode::kJumpIfToBooleanTrue, label);
+  OutputJump(Bytecode::kJumpIfToBooleanTrue, label);
+  return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfFalse(BytecodeLabel* label) {
-  // The peephole optimizer attempts to simplify JumpIfToBooleanFalse
-  // to JumpIfFalse.
-  return OutputJump(Bytecode::kJumpIfToBooleanFalse, label);
+  OutputJump(Bytecode::kJumpIfToBooleanFalse, label);
+  return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfNull(BytecodeLabel* label) {
-  return OutputJump(Bytecode::kJumpIfNull, label);
+  OutputJump(Bytecode::kJumpIfNull, label);
+  return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfUndefined(
     BytecodeLabel* label) {
-  return OutputJump(Bytecode::kJumpIfUndefined, label);
+  OutputJump(Bytecode::kJumpIfUndefined, label);
+  return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::JumpIfNotHole(
     BytecodeLabel* label) {
-  return OutputJump(Bytecode::kJumpIfNotHole, label);
+  OutputJump(Bytecode::kJumpIfNotHole, label);
+  return *this;
+}
+
+BytecodeArrayBuilder& BytecodeArrayBuilder::JumpLoop(BytecodeLabel* label,
+                                                     int loop_depth) {
+  OutputJump(Bytecode::kJumpLoop, UnsignedOperand(loop_depth), label);
+  return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::StackCheck(int position) {
@@ -499,11 +629,6 @@
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::OsrPoll(int loop_depth) {
-  Output(Bytecode::kOsrPoll, UnsignedOperand(loop_depth));
-  return *this;
-}
-
 BytecodeArrayBuilder& BytecodeArrayBuilder::Throw() {
   Output(Bytecode::kThrow);
   return *this;
@@ -527,24 +652,27 @@
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::ForInPrepare(
-    Register receiver, Register cache_info_triple) {
+    Register receiver, RegisterList cache_info_triple) {
+  DCHECK_EQ(3, cache_info_triple.register_count());
   Output(Bytecode::kForInPrepare, RegisterOperand(receiver),
-         RegisterOperand(cache_info_triple));
+         RegisterOperand(cache_info_triple.first_register()));
   return *this;
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::ForInDone(Register index,
-                                                      Register cache_length) {
-  Output(Bytecode::kForInDone, RegisterOperand(index),
+BytecodeArrayBuilder& BytecodeArrayBuilder::ForInContinue(
+    Register index, Register cache_length) {
+  Output(Bytecode::kForInContinue, RegisterOperand(index),
          RegisterOperand(cache_length));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::ForInNext(
-    Register receiver, Register index, Register cache_type_array_pair,
+    Register receiver, Register index, RegisterList cache_type_array_pair,
     int feedback_slot) {
+  DCHECK_EQ(2, cache_type_array_pair.register_count());
   Output(Bytecode::kForInNext, RegisterOperand(receiver),
-         RegisterOperand(index), RegisterOperand(cache_type_array_pair),
+         RegisterOperand(index),
+         RegisterOperand(cache_type_array_pair.first_register()),
          UnsignedOperand(feedback_slot));
   return *this;
 }
@@ -591,45 +719,39 @@
   return *this;
 }
 
-void BytecodeArrayBuilder::EnsureReturn() {
-  if (!return_seen_in_block_) {
-    LoadUndefined();
-    Return();
-  }
-  DCHECK(return_seen_in_block_);
-}
-
 BytecodeArrayBuilder& BytecodeArrayBuilder::Call(Register callable,
-                                                 Register receiver_args,
-                                                 size_t receiver_args_count,
+                                                 RegisterList args,
                                                  int feedback_slot,
                                                  TailCallMode tail_call_mode) {
-  Bytecode bytecode = BytecodeForCall(tail_call_mode);
-  Output(bytecode, RegisterOperand(callable), RegisterOperand(receiver_args),
-         UnsignedOperand(receiver_args_count), UnsignedOperand(feedback_slot));
+  if (tail_call_mode == TailCallMode::kDisallow) {
+    Output(Bytecode::kCall, RegisterOperand(callable),
+           RegisterOperand(args.first_register()),
+           UnsignedOperand(args.register_count()),
+           UnsignedOperand(feedback_slot));
+  } else {
+    DCHECK(tail_call_mode == TailCallMode::kAllow);
+    Output(Bytecode::kTailCall, RegisterOperand(callable),
+           RegisterOperand(args.first_register()),
+           UnsignedOperand(args.register_count()),
+           UnsignedOperand(feedback_slot));
+  }
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::New(Register constructor,
-                                                Register first_arg,
-                                                size_t arg_count) {
-  if (!first_arg.is_valid()) {
-    DCHECK_EQ(0u, arg_count);
-    first_arg = Register(0);
-  }
+                                                RegisterList args,
+                                                int feedback_slot_id) {
   Output(Bytecode::kNew, RegisterOperand(constructor),
-         RegisterOperand(first_arg), UnsignedOperand(arg_count));
+         RegisterOperand(args.first_register()),
+         UnsignedOperand(args.register_count()),
+         UnsignedOperand(feedback_slot_id));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntime(
-    Runtime::FunctionId function_id, Register first_arg, size_t arg_count) {
+    Runtime::FunctionId function_id, RegisterList args) {
   DCHECK_EQ(1, Runtime::FunctionForId(function_id)->result_size);
   DCHECK(Bytecodes::SizeForUnsignedOperand(function_id) <= OperandSize::kShort);
-  if (!first_arg.is_valid()) {
-    DCHECK_EQ(0u, arg_count);
-    first_arg = Register(0);
-  }
   Bytecode bytecode;
   uint32_t id;
   if (IntrinsicsHelper::IsSupported(function_id)) {
@@ -639,35 +761,56 @@
     bytecode = Bytecode::kCallRuntime;
     id = static_cast<uint32_t>(function_id);
   }
-  Output(bytecode, id, RegisterOperand(first_arg), UnsignedOperand(arg_count));
+  Output(bytecode, id, RegisterOperand(args.first_register()),
+         UnsignedOperand(args.register_count()));
+  return *this;
+}
+
+BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntime(
+    Runtime::FunctionId function_id, Register arg) {
+  return CallRuntime(function_id, RegisterList(arg.index(), 1));
+}
+
+BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntime(
+    Runtime::FunctionId function_id) {
+  return CallRuntime(function_id, RegisterList());
+}
+
+BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntimeForPair(
+    Runtime::FunctionId function_id, RegisterList args,
+    RegisterList return_pair) {
+  DCHECK_EQ(2, Runtime::FunctionForId(function_id)->result_size);
+  DCHECK(Bytecodes::SizeForUnsignedOperand(function_id) <= OperandSize::kShort);
+  DCHECK_EQ(2, return_pair.register_count());
+  Output(Bytecode::kCallRuntimeForPair, static_cast<uint16_t>(function_id),
+         RegisterOperand(args.first_register()),
+         UnsignedOperand(args.register_count()),
+         RegisterOperand(return_pair.first_register()));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::CallRuntimeForPair(
-    Runtime::FunctionId function_id, Register first_arg, size_t arg_count,
-    Register first_return) {
-  DCHECK_EQ(2, Runtime::FunctionForId(function_id)->result_size);
-  DCHECK(Bytecodes::SizeForUnsignedOperand(function_id) <= OperandSize::kShort);
-  if (!first_arg.is_valid()) {
-    DCHECK_EQ(0u, arg_count);
-    first_arg = Register(0);
-  }
-  Output(Bytecode::kCallRuntimeForPair, static_cast<uint16_t>(function_id),
-         RegisterOperand(first_arg), UnsignedOperand(arg_count),
-         RegisterOperand(first_return));
-  return *this;
+    Runtime::FunctionId function_id, Register arg, RegisterList return_pair) {
+  return CallRuntimeForPair(function_id, RegisterList(arg.index(), 1),
+                            return_pair);
 }
 
-BytecodeArrayBuilder& BytecodeArrayBuilder::CallJSRuntime(
-    int context_index, Register receiver_args, size_t receiver_args_count) {
+BytecodeArrayBuilder& BytecodeArrayBuilder::CallJSRuntime(int context_index,
+                                                          RegisterList args) {
   Output(Bytecode::kCallJSRuntime, UnsignedOperand(context_index),
-         RegisterOperand(receiver_args), UnsignedOperand(receiver_args_count));
+         RegisterOperand(args.first_register()),
+         UnsignedOperand(args.register_count()));
   return *this;
 }
 
 BytecodeArrayBuilder& BytecodeArrayBuilder::Delete(Register object,
                                                    LanguageMode language_mode) {
-  Output(BytecodeForDelete(language_mode), RegisterOperand(object));
+  if (language_mode == SLOPPY) {
+    Output(Bytecode::kDeletePropertySloppy, RegisterOperand(object));
+  } else {
+    DCHECK_EQ(language_mode, STRICT);
+    Output(Bytecode::kDeletePropertyStrict, RegisterOperand(object));
+  }
   return *this;
 }
 
@@ -689,29 +832,6 @@
   latest_source_info_.MakeStatementPosition(return_position_);
 }
 
-void BytecodeArrayBuilder::SetStatementPosition(Statement* stmt) {
-  if (stmt->position() == kNoSourcePosition) return;
-  latest_source_info_.MakeStatementPosition(stmt->position());
-}
-
-void BytecodeArrayBuilder::SetExpressionPosition(Expression* expr) {
-  if (expr->position() == kNoSourcePosition) return;
-  if (!latest_source_info_.is_statement()) {
-    // Ensure the current expression position is overwritten with the
-    // latest value.
-    latest_source_info_.MakeExpressionPosition(expr->position());
-  }
-}
-
-void BytecodeArrayBuilder::SetExpressionAsStatementPosition(Expression* expr) {
-  if (expr->position() == kNoSourcePosition) return;
-  latest_source_info_.MakeStatementPosition(expr->position());
-}
-
-bool BytecodeArrayBuilder::TemporaryRegisterIsLive(Register reg) const {
-  return temporary_register_allocator()->RegisterIsLive(reg);
-}
-
 bool BytecodeArrayBuilder::RegisterIsValid(Register reg) const {
   if (!reg.is_valid()) {
     return false;
@@ -726,7 +846,7 @@
   } else if (reg.index() < fixed_register_count()) {
     return true;
   } else {
-    return TemporaryRegisterIsLive(reg);
+    return register_allocator()->RegisterIsLive(reg);
   }
 }
 
@@ -743,19 +863,6 @@
     switch (operand_types[i]) {
       case OperandType::kNone:
         return false;
-      case OperandType::kRegCount: {
-        CHECK_NE(i, 0);
-        CHECK(operand_types[i - 1] == OperandType::kMaybeReg ||
-              operand_types[i - 1] == OperandType::kReg);
-        if (i > 0 && operands[i] > 0) {
-          Register start = Register::FromOperand(operands[i - 1]);
-          Register end(start.index() + static_cast<int>(operands[i]) - 1);
-          if (!RegisterIsValid(start) || !RegisterIsValid(end) || start > end) {
-            return false;
-          }
-        }
-        break;
-      }
       case OperandType::kFlag8:
       case OperandType::kIntrinsicId:
         if (Bytecodes::SizeForUnsignedOperand(operands[i]) >
@@ -770,17 +877,28 @@
         }
         break;
       case OperandType::kIdx:
-        // TODO(oth): Consider splitting OperandType::kIdx into two
-        // operand types. One which is a constant pool index that can
-        // be checked, and the other is an unsigned value.
+        // TODO(leszeks): Possibly split this up into constant pool indices and
+        // other indices, for checking.
         break;
+      case OperandType::kUImm:
       case OperandType::kImm:
         break;
-      case OperandType::kMaybeReg:
-        if (Register::FromOperand(operands[i]) == Register(0)) {
-          break;
+      case OperandType::kRegList: {
+        CHECK_LT(i, operand_count - 1);
+        CHECK(operand_types[i + 1] == OperandType::kRegCount);
+        int reg_count = static_cast<int>(operands[i + 1]);
+        if (reg_count == 0) {
+          return Register::FromOperand(operands[i]) == Register(0);
+        } else {
+          Register start = Register::FromOperand(operands[i]);
+          Register end(start.index() + reg_count - 1);
+          if (!RegisterIsValid(start) || !RegisterIsValid(end) || start > end) {
+            return false;
+          }
         }
-      // Fall-through to kReg case.
+        i++;  // Skip past kRegCount operand.
+        break;
+      }
       case OperandType::kReg:
       case OperandType::kRegOut: {
         Register reg = Register::FromOperand(operands[i]);
@@ -808,186 +926,14 @@
         }
         break;
       }
+      case OperandType::kRegCount:
+        UNREACHABLE();  // Dealt with in kRegList above.
     }
   }
 
   return true;
 }
 
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForBinaryOperation(Token::Value op) {
-  switch (op) {
-    case Token::Value::ADD:
-      return Bytecode::kAdd;
-    case Token::Value::SUB:
-      return Bytecode::kSub;
-    case Token::Value::MUL:
-      return Bytecode::kMul;
-    case Token::Value::DIV:
-      return Bytecode::kDiv;
-    case Token::Value::MOD:
-      return Bytecode::kMod;
-    case Token::Value::BIT_OR:
-      return Bytecode::kBitwiseOr;
-    case Token::Value::BIT_XOR:
-      return Bytecode::kBitwiseXor;
-    case Token::Value::BIT_AND:
-      return Bytecode::kBitwiseAnd;
-    case Token::Value::SHL:
-      return Bytecode::kShiftLeft;
-    case Token::Value::SAR:
-      return Bytecode::kShiftRight;
-    case Token::Value::SHR:
-      return Bytecode::kShiftRightLogical;
-    default:
-      UNREACHABLE();
-      return Bytecode::kIllegal;
-  }
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForCountOperation(Token::Value op) {
-  switch (op) {
-    case Token::Value::ADD:
-      return Bytecode::kInc;
-    case Token::Value::SUB:
-      return Bytecode::kDec;
-    default:
-      UNREACHABLE();
-      return Bytecode::kIllegal;
-  }
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForCompareOperation(Token::Value op) {
-  switch (op) {
-    case Token::Value::EQ:
-      return Bytecode::kTestEqual;
-    case Token::Value::NE:
-      return Bytecode::kTestNotEqual;
-    case Token::Value::EQ_STRICT:
-      return Bytecode::kTestEqualStrict;
-    case Token::Value::LT:
-      return Bytecode::kTestLessThan;
-    case Token::Value::GT:
-      return Bytecode::kTestGreaterThan;
-    case Token::Value::LTE:
-      return Bytecode::kTestLessThanOrEqual;
-    case Token::Value::GTE:
-      return Bytecode::kTestGreaterThanOrEqual;
-    case Token::Value::INSTANCEOF:
-      return Bytecode::kTestInstanceOf;
-    case Token::Value::IN:
-      return Bytecode::kTestIn;
-    default:
-      UNREACHABLE();
-      return Bytecode::kIllegal;
-  }
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForStoreNamedProperty(
-    LanguageMode language_mode) {
-  switch (language_mode) {
-    case SLOPPY:
-      return Bytecode::kStaNamedPropertySloppy;
-    case STRICT:
-      return Bytecode::kStaNamedPropertyStrict;
-    default:
-      UNREACHABLE();
-  }
-  return Bytecode::kIllegal;
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForStoreKeyedProperty(
-    LanguageMode language_mode) {
-  switch (language_mode) {
-    case SLOPPY:
-      return Bytecode::kStaKeyedPropertySloppy;
-    case STRICT:
-      return Bytecode::kStaKeyedPropertyStrict;
-    default:
-      UNREACHABLE();
-  }
-  return Bytecode::kIllegal;
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForLoadGlobal(TypeofMode typeof_mode) {
-  return typeof_mode == INSIDE_TYPEOF ? Bytecode::kLdaGlobalInsideTypeof
-                                      : Bytecode::kLdaGlobal;
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForStoreGlobal(
-    LanguageMode language_mode) {
-  switch (language_mode) {
-    case SLOPPY:
-      return Bytecode::kStaGlobalSloppy;
-    case STRICT:
-      return Bytecode::kStaGlobalStrict;
-    default:
-      UNREACHABLE();
-  }
-  return Bytecode::kIllegal;
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForStoreLookupSlot(
-    LanguageMode language_mode) {
-  switch (language_mode) {
-    case SLOPPY:
-      return Bytecode::kStaLookupSlotSloppy;
-    case STRICT:
-      return Bytecode::kStaLookupSlotStrict;
-    default:
-      UNREACHABLE();
-  }
-  return Bytecode::kIllegal;
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForCreateArguments(
-    CreateArgumentsType type) {
-  switch (type) {
-    case CreateArgumentsType::kMappedArguments:
-      return Bytecode::kCreateMappedArguments;
-    case CreateArgumentsType::kUnmappedArguments:
-      return Bytecode::kCreateUnmappedArguments;
-    case CreateArgumentsType::kRestParameter:
-      return Bytecode::kCreateRestParameter;
-  }
-  UNREACHABLE();
-  return Bytecode::kIllegal;
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForDelete(LanguageMode language_mode) {
-  switch (language_mode) {
-    case SLOPPY:
-      return Bytecode::kDeletePropertySloppy;
-    case STRICT:
-      return Bytecode::kDeletePropertyStrict;
-    default:
-      UNREACHABLE();
-  }
-  return Bytecode::kIllegal;
-}
-
-// static
-Bytecode BytecodeArrayBuilder::BytecodeForCall(TailCallMode tail_call_mode) {
-  switch (tail_call_mode) {
-    case TailCallMode::kDisallow:
-      return Bytecode::kCall;
-    case TailCallMode::kAllow:
-      return Bytecode::kTailCall;
-    default:
-      UNREACHABLE();
-  }
-  return Bytecode::kIllegal;
-}
-
 }  // namespace interpreter
 }  // namespace internal
 }  // namespace v8
diff --git a/src/interpreter/bytecode-array-builder.h b/src/interpreter/bytecode-array-builder.h
index 51b6186..a9fa7a7 100644
--- a/src/interpreter/bytecode-array-builder.h
+++ b/src/interpreter/bytecode-array-builder.h
@@ -12,7 +12,7 @@
 #include "src/interpreter/bytecodes.h"
 #include "src/interpreter/constant-array-builder.h"
 #include "src/interpreter/handler-table-builder.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -61,23 +61,14 @@
   int fixed_register_count() const { return context_count() + locals_count(); }
 
   // Returns the number of fixed and temporary registers.
-  int fixed_and_temporary_register_count() const {
-    return fixed_register_count() + temporary_register_count();
-  }
-
-  int temporary_register_count() const {
-    return temporary_register_allocator()->allocation_count();
+  int total_register_count() const {
+    DCHECK_LE(fixed_register_count(),
+              register_allocator()->maximum_register_count());
+    return register_allocator()->maximum_register_count();
   }
 
   Register Parameter(int parameter_index) const;
 
-  // Return true if the register |reg| represents a parameter or a
-  // local.
-  bool RegisterIsParameterOrLocal(Register reg) const;
-
-  // Returns true if the register |reg| is a live temporary register.
-  bool TemporaryRegisterIsLive(Register reg) const;
-
   // Constant loads to accumulator.
   BytecodeArrayBuilder& LoadConstantPoolEntry(size_t entry);
   BytecodeArrayBuilder& LoadLiteral(v8::internal::Smi* value);
@@ -94,11 +85,15 @@
                                     int feedback_slot,
                                     LanguageMode language_mode);
 
-  // Load the object at |slot_index| in |context| into the accumulator.
-  BytecodeArrayBuilder& LoadContextSlot(Register context, int slot_index);
+  // Load the object at |slot_index| at |depth| in the context chain starting
+  // with |context| into the accumulator.
+  BytecodeArrayBuilder& LoadContextSlot(Register context, int slot_index,
+                                        int depth);
 
-  // Stores the object in the accumulator into |slot_index| of |context|.
-  BytecodeArrayBuilder& StoreContextSlot(Register context, int slot_index);
+  // Stores the object in the accumulator into |slot_index| at |depth| in the
+  // context chain starting with |context|.
+  BytecodeArrayBuilder& StoreContextSlot(Register context, int slot_index,
+                                         int depth);
 
   // Register-accumulator transfers.
   BytecodeArrayBuilder& LoadAccumulatorWithRegister(Register reg);
@@ -127,6 +122,20 @@
   BytecodeArrayBuilder& LoadLookupSlot(const Handle<String> name,
                                        TypeofMode typeof_mode);
 
+  // Lookup the variable with |name|, which is known to be at |slot_index| at
+  // |depth| in the context chain if not shadowed by a context extension
+  // somewhere in that context chain.
+  BytecodeArrayBuilder& LoadLookupContextSlot(const Handle<String> name,
+                                              TypeofMode typeof_mode,
+                                              int slot_index, int depth);
+
+  // Lookup the variable with |name|, which has its feedback in |feedback_slot|
+  // and is known to be global if not shadowed by a context extension somewhere
+  // up to |depth| in that context chain.
+  BytecodeArrayBuilder& LoadLookupGlobalSlot(const Handle<String> name,
+                                             TypeofMode typeof_mode,
+                                             int feedback_slot, int depth);
+
   // Store value in the accumulator into the variable with |name|.
   BytecodeArrayBuilder& StoreLookupSlot(const Handle<String> name,
                                         LanguageMode language_mode);
@@ -139,17 +148,19 @@
   // in the accumulator.
   BytecodeArrayBuilder& CreateBlockContext(Handle<ScopeInfo> scope_info);
 
-  // Create a new context for a catch block with |exception| and |name| and the
-  // closure in the accumulator.
+  // Create a new context for a catch block with |exception|, |name|,
+  // |scope_info|, and the closure in the accumulator.
   BytecodeArrayBuilder& CreateCatchContext(Register exception,
-                                           Handle<String> name);
+                                           Handle<String> name,
+                                           Handle<ScopeInfo> scope_info);
 
   // Create a new context with size |slots|.
   BytecodeArrayBuilder& CreateFunctionContext(int slots);
 
-  // Creates a new context for a with-statement with the |object| in a register
-  // and the closure in the accumulator.
-  BytecodeArrayBuilder& CreateWithContext(Register object);
+  // Creates a new context with the given |scope_info| for a with-statement
+  // with the |object| in a register and the closure in the accumulator.
+  BytecodeArrayBuilder& CreateWithContext(Register object,
+                                          Handle<ScopeInfo> scope_info);
 
   // Create a new arguments object in the accumulator.
   BytecodeArrayBuilder& CreateArguments(CreateArgumentsType type);
@@ -171,46 +182,42 @@
   BytecodeArrayBuilder& PopContext(Register context);
 
   // Call a JS function. The JSFunction or Callable to be called should be in
-  // |callable|, the receiver should be in |receiver_args| and all subsequent
-  // arguments should be in registers <receiver_args + 1> to
-  // <receiver_args + receiver_arg_count - 1>. Type feedback is recorded in
-  // the |feedback_slot| in the type feedback vector.
+  // |callable|. The arguments should be in |args|, with the receiver in
+  // |args[0]|. Type feedback is recorded in the |feedback_slot| in the type
+  // feedback vector.
   BytecodeArrayBuilder& Call(
-      Register callable, Register receiver_args, size_t receiver_arg_count,
-      int feedback_slot, TailCallMode tail_call_mode = TailCallMode::kDisallow);
-
-  BytecodeArrayBuilder& TailCall(Register callable, Register receiver_args,
-                                 size_t receiver_arg_count, int feedback_slot) {
-    return Call(callable, receiver_args, receiver_arg_count, feedback_slot,
-                TailCallMode::kAllow);
-  }
+      Register callable, RegisterList args, int feedback_slot,
+      TailCallMode tail_call_mode = TailCallMode::kDisallow);
 
   // Call the new operator. The accumulator holds the |new_target|.
-  // The |constructor| is in a register followed by |arg_count|
-  // consecutive arguments starting at |first_arg| for the constuctor
-  // invocation.
-  BytecodeArrayBuilder& New(Register constructor, Register first_arg,
-                            size_t arg_count);
+  // The |constructor| is in a register and arguments are in |args|.
+  BytecodeArrayBuilder& New(Register constructor, RegisterList args,
+                            int feedback_slot);
 
-  // Call the runtime function with |function_id|. The first argument should be
-  // in |first_arg| and all subsequent arguments should be in registers
-  // <first_arg + 1> to <first_arg + arg_count - 1>.
+  // Call the runtime function with |function_id| and arguments |args|.
   BytecodeArrayBuilder& CallRuntime(Runtime::FunctionId function_id,
-                                    Register first_arg, size_t arg_count);
+                                    RegisterList args);
+  // Call the runtime function with |function_id| with single argument |arg|.
+  BytecodeArrayBuilder& CallRuntime(Runtime::FunctionId function_id,
+                                    Register arg);
+  // Call the runtime function with |function_id| with no arguments.
+  BytecodeArrayBuilder& CallRuntime(Runtime::FunctionId function_id);
 
-  // Call the runtime function with |function_id| that returns a pair of values.
-  // The first argument should be in |first_arg| and all subsequent arguments
-  // should be in registers <first_arg + 1> to <first_arg + arg_count - 1>. The
-  // return values will be returned in <first_return> and <first_return + 1>.
+  // Call the runtime function with |function_id| and arguments |args|, that
+  // returns a pair of values. The return values will be returned in
+  // |return_pair|.
   BytecodeArrayBuilder& CallRuntimeForPair(Runtime::FunctionId function_id,
-                                           Register first_arg, size_t arg_count,
-                                           Register first_return);
+                                           RegisterList args,
+                                           RegisterList return_pair);
+  // Call the runtime function with |function_id| with single argument |arg|
+  // that returns a pair of values. The return values will be returned in
+  // |return_pair|.
+  BytecodeArrayBuilder& CallRuntimeForPair(Runtime::FunctionId function_id,
+                                           Register arg,
+                                           RegisterList return_pair);
 
-  // Call the JS runtime function with |context_index|. The the receiver should
-  // be in |receiver_args| and all subsequent arguments should be in registers
-  // <receiver + 1> to <receiver + receiver_args_count - 1>.
-  BytecodeArrayBuilder& CallJSRuntime(int context_index, Register receiver_args,
-                                      size_t receiver_args_count);
+  // Call the JS runtime function with |context_index| and arguments |args|.
+  BytecodeArrayBuilder& CallJSRuntime(int context_index, RegisterList args);
 
   // Operators (register holds the lhs value, accumulator holds the rhs value).
   // Type feedback will be recorded in the |feedback_slot|
@@ -230,15 +237,13 @@
   BytecodeArrayBuilder& Delete(Register object, LanguageMode language_mode);
 
   // Tests.
-  BytecodeArrayBuilder& CompareOperation(Token::Value op, Register reg);
+  BytecodeArrayBuilder& CompareOperation(Token::Value op, Register reg,
+                                         int feedback_slot = kNoFeedbackSlot);
 
-  // Casts accumulator and stores result in accumulator.
-  BytecodeArrayBuilder& CastAccumulatorToBoolean();
-
-  // Casts accumulator and stores result in register |out|.
-  BytecodeArrayBuilder& CastAccumulatorToJSObject(Register out);
-  BytecodeArrayBuilder& CastAccumulatorToName(Register out);
-  BytecodeArrayBuilder& CastAccumulatorToNumber(Register out);
+  // Converts accumulator and stores result in register |out|.
+  BytecodeArrayBuilder& ConvertAccumulatorToObject(Register out);
+  BytecodeArrayBuilder& ConvertAccumulatorToName(Register out);
+  BytecodeArrayBuilder& ConvertAccumulatorToNumber(Register out);
 
   // Flow Control.
   BytecodeArrayBuilder& Bind(BytecodeLabel* label);
@@ -250,11 +255,10 @@
   BytecodeArrayBuilder& JumpIfNotHole(BytecodeLabel* label);
   BytecodeArrayBuilder& JumpIfNull(BytecodeLabel* label);
   BytecodeArrayBuilder& JumpIfUndefined(BytecodeLabel* label);
+  BytecodeArrayBuilder& JumpLoop(BytecodeLabel* label, int loop_depth);
 
   BytecodeArrayBuilder& StackCheck(int position);
 
-  BytecodeArrayBuilder& OsrPoll(int loop_depth);
-
   BytecodeArrayBuilder& Throw();
   BytecodeArrayBuilder& ReThrow();
   BytecodeArrayBuilder& Return();
@@ -264,10 +268,10 @@
 
   // Complex flow control.
   BytecodeArrayBuilder& ForInPrepare(Register receiver,
-                                     Register cache_info_triple);
-  BytecodeArrayBuilder& ForInDone(Register index, Register cache_length);
+                                     RegisterList cache_info_triple);
+  BytecodeArrayBuilder& ForInContinue(Register index, Register cache_length);
   BytecodeArrayBuilder& ForInNext(Register receiver, Register index,
-                                  Register cache_type_array_pair,
+                                  RegisterList cache_type_array_pair,
                                   int feedback_slot);
   BytecodeArrayBuilder& ForInStep(Register index);
 
@@ -292,20 +296,55 @@
 
   void InitializeReturnPosition(FunctionLiteral* literal);
 
-  void SetStatementPosition(Statement* stmt);
-  void SetExpressionPosition(Expression* expr);
-  void SetExpressionAsStatementPosition(Expression* expr);
+  void SetStatementPosition(Statement* stmt) {
+    if (stmt->position() == kNoSourcePosition) return;
+    latest_source_info_.MakeStatementPosition(stmt->position());
+  }
+
+  void SetExpressionPosition(Expression* expr) {
+    if (expr->position() == kNoSourcePosition) return;
+    if (!latest_source_info_.is_statement()) {
+      // Ensure the current expression position is overwritten with the
+      // latest value.
+      latest_source_info_.MakeExpressionPosition(expr->position());
+    }
+  }
+
+  void SetExpressionAsStatementPosition(Expression* expr) {
+    if (expr->position() == kNoSourcePosition) return;
+    latest_source_info_.MakeStatementPosition(expr->position());
+  }
+
+  bool RequiresImplicitReturn() const { return !return_seen_in_block_; }
 
   // Accessors
-  TemporaryRegisterAllocator* temporary_register_allocator() {
-    return &temporary_allocator_;
+  BytecodeRegisterAllocator* register_allocator() {
+    return &register_allocator_;
   }
-  const TemporaryRegisterAllocator* temporary_register_allocator() const {
-    return &temporary_allocator_;
+  const BytecodeRegisterAllocator* register_allocator() const {
+    return &register_allocator_;
   }
   Zone* zone() const { return zone_; }
 
-  void EnsureReturn();
+ private:
+  friend class BytecodeRegisterAllocator;
+
+  INLINE(void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
+                     uint32_t operand2, uint32_t operand3));
+  INLINE(void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
+                     uint32_t operand2));
+  INLINE(void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1));
+  INLINE(void Output(Bytecode bytecode, uint32_t operand0));
+  INLINE(void Output(Bytecode bytecode));
+
+  INLINE(void OutputJump(Bytecode bytecode, BytecodeLabel* label));
+  INLINE(void OutputJump(Bytecode bytecode, uint32_t operand0,
+                         BytecodeLabel* label));
+
+  bool RegisterIsValid(Register reg) const;
+  bool OperandsAreValid(Bytecode bytecode, int operand_count,
+                        uint32_t operand0 = 0, uint32_t operand1 = 0,
+                        uint32_t operand2 = 0, uint32_t operand3 = 0) const;
 
   static uint32_t RegisterOperand(Register reg) {
     return static_cast<uint32_t>(reg.ToOperand());
@@ -325,40 +364,6 @@
     return static_cast<uint32_t>(value);
   }
 
- private:
-  friend class BytecodeRegisterAllocator;
-
-  static Bytecode BytecodeForBinaryOperation(Token::Value op);
-  static Bytecode BytecodeForCountOperation(Token::Value op);
-  static Bytecode BytecodeForCompareOperation(Token::Value op);
-  static Bytecode BytecodeForStoreNamedProperty(LanguageMode language_mode);
-  static Bytecode BytecodeForStoreKeyedProperty(LanguageMode language_mode);
-  static Bytecode BytecodeForLoadGlobal(TypeofMode typeof_mode);
-  static Bytecode BytecodeForStoreGlobal(LanguageMode language_mode);
-  static Bytecode BytecodeForStoreLookupSlot(LanguageMode language_mode);
-  static Bytecode BytecodeForCreateArguments(CreateArgumentsType type);
-  static Bytecode BytecodeForDelete(LanguageMode language_mode);
-  static Bytecode BytecodeForCall(TailCallMode tail_call_mode);
-
-  void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
-              uint32_t operand2, uint32_t operand3);
-  void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
-              uint32_t operand2);
-  void Output(Bytecode bytecode, uint32_t operand0, uint32_t operand1);
-  void Output(Bytecode bytecode, uint32_t operand0);
-  void Output(Bytecode bytecode);
-
-  BytecodeArrayBuilder& OutputJump(Bytecode jump_bytecode,
-                                   BytecodeLabel* label);
-
-  bool RegisterIsValid(Register reg) const;
-  bool OperandsAreValid(Bytecode bytecode, int operand_count,
-                        uint32_t operand0 = 0, uint32_t operand1 = 0,
-                        uint32_t operand2 = 0, uint32_t operand3 = 0) const;
-
-  // Attach latest source position to |node|.
-  void AttachSourceInfo(BytecodeNode* node);
-
   // Set position for return.
   void SetReturnPosition();
 
@@ -395,11 +400,13 @@
   int local_register_count_;
   int context_register_count_;
   int return_position_;
-  TemporaryRegisterAllocator temporary_allocator_;
+  BytecodeRegisterAllocator register_allocator_;
   BytecodeArrayWriter bytecode_array_writer_;
   BytecodePipelineStage* pipeline_;
   BytecodeSourceInfo latest_source_info_;
 
+  static int const kNoFeedbackSlot = 0;
+
   DISALLOW_COPY_AND_ASSIGN(BytecodeArrayBuilder);
 };
 
diff --git a/src/interpreter/bytecode-array-iterator.cc b/src/interpreter/bytecode-array-iterator.cc
index 84c0028..e596b11 100644
--- a/src/interpreter/bytecode-array-iterator.cc
+++ b/src/interpreter/bytecode-array-iterator.cc
@@ -97,6 +97,13 @@
   return GetUnsignedOperand(operand_index, OperandType::kFlag8);
 }
 
+uint32_t BytecodeArrayIterator::GetUnsignedImmediateOperand(
+    int operand_index) const {
+  DCHECK_EQ(Bytecodes::GetOperandType(current_bytecode(), operand_index),
+            OperandType::kUImm);
+  return GetUnsignedOperand(operand_index, OperandType::kUImm);
+}
+
 int32_t BytecodeArrayIterator::GetImmediateOperand(int operand_index) const {
   DCHECK_EQ(Bytecodes::GetOperandType(current_bytecode(), operand_index),
             OperandType::kImm);
@@ -133,11 +140,11 @@
   DCHECK_LE(operand_index, Bytecodes::NumberOfOperands(current_bytecode()));
   const OperandType* operand_types =
       Bytecodes::GetOperandTypes(current_bytecode());
-  DCHECK(Bytecodes::IsRegisterOperandType(operand_types[operand_index]));
-  if (operand_types[operand_index + 1] == OperandType::kRegCount) {
+  OperandType operand_type = operand_types[operand_index];
+  DCHECK(Bytecodes::IsRegisterOperandType(operand_type));
+  if (operand_type == OperandType::kRegList) {
     return GetRegisterCountOperand(operand_index + 1);
   } else {
-    OperandType operand_type = operand_types[operand_index];
     return Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type);
   }
 }
diff --git a/src/interpreter/bytecode-array-iterator.h b/src/interpreter/bytecode-array-iterator.h
index 0f7c6c7..0922625 100644
--- a/src/interpreter/bytecode-array-iterator.h
+++ b/src/interpreter/bytecode-array-iterator.h
@@ -31,6 +31,7 @@
   }
 
   uint32_t GetFlagOperand(int operand_index) const;
+  uint32_t GetUnsignedImmediateOperand(int operand_index) const;
   int32_t GetImmediateOperand(int operand_index) const;
   uint32_t GetIndexOperand(int operand_index) const;
   uint32_t GetRegisterCountOperand(int operand_index) const;
diff --git a/src/interpreter/bytecode-array-writer.cc b/src/interpreter/bytecode-array-writer.cc
index 6694a36..fb38768 100644
--- a/src/interpreter/bytecode-array-writer.cc
+++ b/src/interpreter/bytecode-array-writer.cc
@@ -21,27 +21,23 @@
     Zone* zone, ConstantArrayBuilder* constant_array_builder,
     SourcePositionTableBuilder::RecordingMode source_position_mode)
     : bytecodes_(zone),
-      max_register_count_(0),
       unbound_jumps_(0),
       source_position_table_builder_(zone, source_position_mode),
-      constant_array_builder_(constant_array_builder) {}
+      constant_array_builder_(constant_array_builder) {
+  bytecodes_.reserve(512);  // Derived via experimentation.
+}
 
 // override
 BytecodeArrayWriter::~BytecodeArrayWriter() {}
 
 // override
 Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
-    Isolate* isolate, int fixed_register_count, int parameter_count,
+    Isolate* isolate, int register_count, int parameter_count,
     Handle<FixedArray> handler_table) {
   DCHECK_EQ(0, unbound_jumps_);
 
   int bytecode_size = static_cast<int>(bytecodes()->size());
-
-  // All locals need a frame slot for the debugger, but may not be
-  // present in generated code.
-  int frame_size_for_locals = fixed_register_count * kPointerSize;
-  int frame_size_used = max_register_count() * kPointerSize;
-  int frame_size = std::max(frame_size_for_locals, frame_size_used);
+  int frame_size = register_count * kPointerSize;
   Handle<FixedArray> constant_pool =
       constant_array_builder()->ToFixedArray(isolate);
   Handle<BytecodeArray> bytecode_array = isolate->factory()->NewBytecodeArray(
@@ -104,116 +100,48 @@
   }
 }
 
-namespace {
-
-OperandScale ScaleForScalableByteOperand(OperandSize operand_size) {
-  STATIC_ASSERT(static_cast<int>(OperandSize::kByte) ==
-                static_cast<int>(OperandScale::kSingle));
-  STATIC_ASSERT(static_cast<int>(OperandSize::kShort) ==
-                static_cast<int>(OperandScale::kDouble));
-  STATIC_ASSERT(static_cast<int>(OperandSize::kQuad) ==
-                static_cast<int>(OperandScale::kQuadruple));
-  return static_cast<OperandScale>(operand_size);
-}
-
-OperandScale OperandScaleForScalableSignedByte(uint32_t operand_value) {
-  int32_t signed_operand = static_cast<int32_t>(operand_value);
-  OperandSize bytes_required = Bytecodes::SizeForSignedOperand(signed_operand);
-  return ScaleForScalableByteOperand(bytes_required);
-}
-
-OperandScale OperandScaleForScalableUnsignedByte(uint32_t operand_value) {
-  OperandSize bytes_required = Bytecodes::SizeForUnsignedOperand(operand_value);
-  return ScaleForScalableByteOperand(bytes_required);
-}
-
-OperandScale GetOperandScale(const BytecodeNode* const node) {
-  const OperandTypeInfo* operand_type_infos =
-      Bytecodes::GetOperandTypeInfos(node->bytecode());
-  OperandScale operand_scale = OperandScale::kSingle;
-  int operand_count = node->operand_count();
-  for (int i = 0; i < operand_count; ++i) {
-    switch (operand_type_infos[i]) {
-      case OperandTypeInfo::kScalableSignedByte: {
-        uint32_t operand = node->operand(i);
-        operand_scale =
-            std::max(operand_scale, OperandScaleForScalableSignedByte(operand));
-        break;
-      }
-      case OperandTypeInfo::kScalableUnsignedByte: {
-        uint32_t operand = node->operand(i);
-        operand_scale = std::max(operand_scale,
-                                 OperandScaleForScalableUnsignedByte(operand));
-        break;
-      }
-      case OperandTypeInfo::kFixedUnsignedByte:
-      case OperandTypeInfo::kFixedUnsignedShort:
-        break;
-      case OperandTypeInfo::kNone:
-        UNREACHABLE();
-        break;
-    }
-  }
-  return operand_scale;
-}
-
-}  // namespace
-
 void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
   DCHECK_NE(node->bytecode(), Bytecode::kIllegal);
 
-  uint8_t buffer[kMaxSizeOfPackedBytecode];
-  uint8_t* buffer_limit = buffer;
+  Bytecode bytecode = node->bytecode();
+  OperandScale operand_scale = node->operand_scale();
 
-  OperandScale operand_scale = GetOperandScale(node);
   if (operand_scale != OperandScale::kSingle) {
     Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
-    *buffer_limit++ = Bytecodes::ToByte(prefix);
+    bytecodes()->push_back(Bytecodes::ToByte(prefix));
   }
-
-  Bytecode bytecode = node->bytecode();
-  *buffer_limit++ = Bytecodes::ToByte(bytecode);
+  bytecodes()->push_back(Bytecodes::ToByte(bytecode));
 
   const uint32_t* const operands = node->operands();
-  const OperandType* operand_types = Bytecodes::GetOperandTypes(bytecode);
-  const int operand_count = Bytecodes::NumberOfOperands(bytecode);
+  const int operand_count = node->operand_count();
+  const OperandSize* operand_sizes =
+      Bytecodes::GetOperandSizes(bytecode, operand_scale);
   for (int i = 0; i < operand_count; ++i) {
-    OperandSize operand_size =
-        Bytecodes::SizeOfOperand(operand_types[i], operand_scale);
-    switch (operand_size) {
+    switch (operand_sizes[i]) {
       case OperandSize::kNone:
         UNREACHABLE();
         break;
       case OperandSize::kByte:
-        *buffer_limit++ = static_cast<uint8_t>(operands[i]);
+        bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
         break;
       case OperandSize::kShort: {
-        WriteUnalignedUInt16(buffer_limit, operands[i]);
-        buffer_limit += 2;
+        uint16_t operand = static_cast<uint16_t>(operands[i]);
+        const uint8_t* raw_operand = reinterpret_cast<const uint8_t*>(&operand);
+        bytecodes()->push_back(raw_operand[0]);
+        bytecodes()->push_back(raw_operand[1]);
         break;
       }
       case OperandSize::kQuad: {
-        WriteUnalignedUInt32(buffer_limit, operands[i]);
-        buffer_limit += 4;
+        const uint8_t* raw_operand =
+            reinterpret_cast<const uint8_t*>(&operands[i]);
+        bytecodes()->push_back(raw_operand[0]);
+        bytecodes()->push_back(raw_operand[1]);
+        bytecodes()->push_back(raw_operand[2]);
+        bytecodes()->push_back(raw_operand[3]);
         break;
       }
     }
-
-    int count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_types[i]);
-    if (count == 0) {
-      continue;
-    }
-    // NB operand_types is terminated by OperandType::kNone so
-    // operand_types[i + 1] is valid whilst i < operand_count.
-    if (operand_types[i + 1] == OperandType::kRegCount) {
-      count = static_cast<int>(operands[i]);
-    }
-    Register reg = Register::FromOperand(static_cast<int32_t>(operands[i]));
-    max_register_count_ = std::max(max_register_count_, reg.index() + count);
   }
-
-  DCHECK_LE(buffer_limit, buffer + sizeof(buffer));
-  bytecodes()->insert(bytecodes()->end(), buffer, buffer_limit);
 }
 
 // static
@@ -247,18 +175,17 @@
   DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
   size_t operand_location = jump_location + 1;
   DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder);
-  if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) {
-    // The jump fits within the range of an Imm operand, so cancel
+  if (Bytecodes::ScaleForSignedOperand(delta) == OperandScale::kSingle) {
+    // The jump fits within the range of an Imm8 operand, so cancel
     // the reservation and jump directly.
     constant_array_builder()->DiscardReservedEntry(OperandSize::kByte);
     bytecodes()->at(operand_location) = static_cast<uint8_t>(delta);
   } else {
-    // The jump does not fit within the range of an Imm operand, so
+    // The jump does not fit within the range of an Imm8 operand, so
     // commit reservation putting the offset into the constant pool,
     // and update the jump instruction and operand.
     size_t entry = constant_array_builder()->CommitReservedEntry(
         OperandSize::kByte, Smi::FromInt(delta));
-    DCHECK_LE(entry, kMaxUInt32);
     DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
               OperandSize::kByte);
     jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
@@ -273,14 +200,21 @@
   DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
   size_t operand_location = jump_location + 1;
   uint8_t operand_bytes[2];
-  if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) {
+  if (Bytecodes::ScaleForSignedOperand(delta) <= OperandScale::kDouble) {
+    // The jump fits within the range of an Imm16 operand, so cancel
+    // the reservation and jump directly.
     constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
     WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta));
   } else {
-    jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
-    bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
+    // The jump does not fit within the range of an Imm16 operand, so
+    // commit reservation putting the offset into the constant pool,
+    // and update the jump instruction and operand.
     size_t entry = constant_array_builder()->CommitReservedEntry(
         OperandSize::kShort, Smi::FromInt(delta));
+    DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
+              OperandSize::kShort);
+    jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
+    bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
     WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry));
   }
   DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
@@ -351,13 +285,14 @@
     // Label has been bound already so this is a backwards jump.
     size_t abs_delta = current_offset - label->offset();
     int delta = -static_cast<int>(abs_delta);
-    OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta);
-    if (operand_size > OperandSize::kByte) {
+    OperandScale operand_scale = Bytecodes::ScaleForSignedOperand(delta);
+    if (operand_scale > OperandScale::kSingle) {
       // Adjust for scaling byte prefix for wide jump offset.
       DCHECK_LE(delta, 0);
       delta -= 1;
     }
-    node->set_bytecode(node->bytecode(), delta);
+    DCHECK_EQ(Bytecode::kJumpLoop, node->bytecode());
+    node->set_bytecode(node->bytecode(), delta, node->operand(1));
   } else {
     // The label has not yet been bound so this is a forward reference
     // that will be patched when the label is bound. We create a
@@ -369,6 +304,7 @@
     label->set_referrer(current_offset);
     OperandSize reserved_operand_size =
         constant_array_builder()->CreateReservedEntry();
+    DCHECK_NE(Bytecode::kJumpLoop, node->bytecode());
     switch (reserved_operand_size) {
       case OperandSize::kNone:
         UNREACHABLE();
diff --git a/src/interpreter/bytecode-array-writer.h b/src/interpreter/bytecode-array-writer.h
index 17fe3d4..712fcb9 100644
--- a/src/interpreter/bytecode-array-writer.h
+++ b/src/interpreter/bytecode-array-writer.h
@@ -33,7 +33,7 @@
   void BindLabel(BytecodeLabel* label) override;
   void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
   Handle<BytecodeArray> ToBytecodeArray(
-      Isolate* isolate, int fixed_register_count, int parameter_count,
+      Isolate* isolate, int register_count, int parameter_count,
       Handle<FixedArray> handler_table) override;
 
  private:
@@ -69,10 +69,8 @@
   ConstantArrayBuilder* constant_array_builder() {
     return constant_array_builder_;
   }
-  int max_register_count() { return max_register_count_; }
 
   ZoneVector<uint8_t> bytecodes_;
-  int max_register_count_;
   int unbound_jumps_;
   SourcePositionTableBuilder source_position_table_builder_;
   ConstantArrayBuilder* constant_array_builder_;
diff --git a/src/interpreter/bytecode-dead-code-optimizer.cc b/src/interpreter/bytecode-dead-code-optimizer.cc
index 5d301c7..848036c 100644
--- a/src/interpreter/bytecode-dead-code-optimizer.cc
+++ b/src/interpreter/bytecode-dead-code-optimizer.cc
@@ -14,10 +14,10 @@
 
 // override
 Handle<BytecodeArray> BytecodeDeadCodeOptimizer::ToBytecodeArray(
-    Isolate* isolate, int fixed_register_count, int parameter_count,
+    Isolate* isolate, int register_count, int parameter_count,
     Handle<FixedArray> handler_table) {
-  return next_stage_->ToBytecodeArray(isolate, fixed_register_count,
-                                      parameter_count, handler_table);
+  return next_stage_->ToBytecodeArray(isolate, register_count, parameter_count,
+                                      handler_table);
 }
 
 // override
diff --git a/src/interpreter/bytecode-dead-code-optimizer.h b/src/interpreter/bytecode-dead-code-optimizer.h
index 8a9732c..188d610 100644
--- a/src/interpreter/bytecode-dead-code-optimizer.h
+++ b/src/interpreter/bytecode-dead-code-optimizer.h
@@ -24,7 +24,7 @@
   void BindLabel(BytecodeLabel* label) override;
   void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
   Handle<BytecodeArray> ToBytecodeArray(
-      Isolate* isolate, int fixed_register_count, int parameter_count,
+      Isolate* isolate, int register_count, int parameter_count,
       Handle<FixedArray> handler_table) override;
 
  private:
diff --git a/src/interpreter/bytecode-decoder.cc b/src/interpreter/bytecode-decoder.cc
index 74c5806..4975189 100644
--- a/src/interpreter/bytecode-decoder.cc
+++ b/src/interpreter/bytecode-decoder.cc
@@ -23,6 +23,15 @@
 }
 
 // static
+RegisterList BytecodeDecoder::DecodeRegisterListOperand(
+    const uint8_t* operand_start, uint32_t count, OperandType operand_type,
+    OperandScale operand_scale) {
+  Register first_reg =
+      DecodeRegisterOperand(operand_start, operand_type, operand_scale);
+  return RegisterList(first_reg.index(), static_cast<int>(count));
+}
+
+// static
 int32_t BytecodeDecoder::DecodeSignedOperand(const uint8_t* operand_start,
                                              OperandType operand_type,
                                              OperandScale operand_scale) {
@@ -94,7 +103,6 @@
   if (Bytecodes::IsDebugBreak(bytecode)) return os;
 
   int number_of_operands = Bytecodes::NumberOfOperands(bytecode);
-  int range = 0;
   for (int i = 0; i < number_of_operands; i++) {
     OperandType op_type = Bytecodes::GetOperandType(bytecode, i);
     int operand_offset =
@@ -102,11 +110,8 @@
     const uint8_t* operand_start =
         &bytecode_start[prefix_offset + operand_offset];
     switch (op_type) {
-      case interpreter::OperandType::kRegCount:
-        os << "#"
-           << DecodeUnsignedOperand(operand_start, op_type, operand_scale);
-        break;
       case interpreter::OperandType::kIdx:
+      case interpreter::OperandType::kUImm:
       case interpreter::OperandType::kRuntimeId:
       case interpreter::OperandType::kIntrinsicId:
         os << "["
@@ -121,7 +126,6 @@
         os << "#"
            << DecodeUnsignedOperand(operand_start, op_type, operand_scale);
         break;
-      case interpreter::OperandType::kMaybeReg:
       case interpreter::OperandType::kReg:
       case interpreter::OperandType::kRegOut: {
         Register reg =
@@ -129,19 +133,40 @@
         os << reg.ToString(parameter_count);
         break;
       }
-      case interpreter::OperandType::kRegOutTriple:
-        range += 1;
+      case interpreter::OperandType::kRegOutTriple: {
+        RegisterList reg_list =
+            DecodeRegisterListOperand(operand_start, 3, op_type, operand_scale);
+        os << reg_list.first_register().ToString(parameter_count) << "-"
+           << reg_list.last_register().ToString(parameter_count);
+        break;
+      }
       case interpreter::OperandType::kRegOutPair:
       case interpreter::OperandType::kRegPair: {
-        range += 1;
-        Register first_reg =
-            DecodeRegisterOperand(operand_start, op_type, operand_scale);
-        Register last_reg = Register(first_reg.index() + range);
-        os << first_reg.ToString(parameter_count) << "-"
-           << last_reg.ToString(parameter_count);
+        RegisterList reg_list =
+            DecodeRegisterListOperand(operand_start, 2, op_type, operand_scale);
+        os << reg_list.first_register().ToString(parameter_count) << "-"
+           << reg_list.last_register().ToString(parameter_count);
+        break;
+      }
+      case interpreter::OperandType::kRegList: {
+        DCHECK_LT(i, number_of_operands - 1);
+        DCHECK_EQ(Bytecodes::GetOperandType(bytecode, i + 1),
+                  OperandType::kRegCount);
+        int reg_count_offset =
+            Bytecodes::GetOperandOffset(bytecode, i + 1, operand_scale);
+        const uint8_t* reg_count_operand =
+            &bytecode_start[prefix_offset + reg_count_offset];
+        uint32_t count = DecodeUnsignedOperand(
+            reg_count_operand, OperandType::kRegCount, operand_scale);
+        RegisterList reg_list = DecodeRegisterListOperand(
+            operand_start, count, op_type, operand_scale);
+        os << reg_list.first_register().ToString(parameter_count) << "-"
+           << reg_list.last_register().ToString(parameter_count);
+        i++;  // Skip kRegCount.
         break;
       }
       case interpreter::OperandType::kNone:
+      case interpreter::OperandType::kRegCount:  // Dealt with in kRegList.
         UNREACHABLE();
         break;
     }
diff --git a/src/interpreter/bytecode-decoder.h b/src/interpreter/bytecode-decoder.h
index 6613179..d1749ef 100644
--- a/src/interpreter/bytecode-decoder.h
+++ b/src/interpreter/bytecode-decoder.h
@@ -21,6 +21,12 @@
                                         OperandType operand_type,
                                         OperandScale operand_scale);
 
+  // Decodes a register list operand in a byte array.
+  static RegisterList DecodeRegisterListOperand(const uint8_t* operand_start,
+                                                uint32_t count,
+                                                OperandType operand_type,
+                                                OperandScale operand_scale);
+
   // Decodes a signed operand in a byte array.
   static int32_t DecodeSignedOperand(const uint8_t* operand_start,
                                      OperandType operand_type,
diff --git a/src/interpreter/bytecode-flags.cc b/src/interpreter/bytecode-flags.cc
index 9b25dbd..158af13 100644
--- a/src/interpreter/bytecode-flags.cc
+++ b/src/interpreter/bytecode-flags.cc
@@ -11,6 +11,14 @@
 namespace interpreter {
 
 // static
+uint8_t CreateArrayLiteralFlags::Encode(bool use_fast_shallow_clone,
+                                        int runtime_flags) {
+  uint8_t result = FlagsBits::encode(runtime_flags);
+  result |= FastShallowCloneBit::encode(use_fast_shallow_clone);
+  return result;
+}
+
+// static
 uint8_t CreateObjectLiteralFlags::Encode(bool fast_clone_supported,
                                          int properties_count,
                                          int runtime_flags) {
diff --git a/src/interpreter/bytecode-flags.h b/src/interpreter/bytecode-flags.h
index 1068d8a..6e87ce2 100644
--- a/src/interpreter/bytecode-flags.h
+++ b/src/interpreter/bytecode-flags.h
@@ -11,6 +11,17 @@
 namespace internal {
 namespace interpreter {
 
+class CreateArrayLiteralFlags {
+ public:
+  class FlagsBits : public BitField8<int, 0, 3> {};
+  class FastShallowCloneBit : public BitField8<bool, FlagsBits::kNext, 1> {};
+
+  static uint8_t Encode(bool use_fast_shallow_clone, int runtime_flags);
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(CreateArrayLiteralFlags);
+};
+
 class CreateObjectLiteralFlags {
  public:
   class FlagsBits : public BitField8<int, 0, 3> {};
diff --git a/src/interpreter/bytecode-generator.cc b/src/interpreter/bytecode-generator.cc
index 6ff43a4..db5a596 100644
--- a/src/interpreter/bytecode-generator.cc
+++ b/src/interpreter/bytecode-generator.cc
@@ -4,15 +4,16 @@
 
 #include "src/interpreter/bytecode-generator.h"
 
+#include "src/ast/compile-time-value.h"
 #include "src/ast/scopes.h"
 #include "src/code-stubs.h"
+#include "src/compilation-info.h"
 #include "src/compiler.h"
 #include "src/interpreter/bytecode-flags.h"
 #include "src/interpreter/bytecode-label.h"
 #include "src/interpreter/bytecode-register-allocator.h"
 #include "src/interpreter/control-flow-builders.h"
 #include "src/objects.h"
-#include "src/parsing/parser.h"
 #include "src/parsing/token.h"
 
 namespace v8 {
@@ -216,10 +217,10 @@
       case CMD_CONTINUE:
         UNREACHABLE();
       case CMD_RETURN:
-        generator()->builder()->Return();
+        generator()->BuildReturn();
         return true;
       case CMD_RETHROW:
-        generator()->builder()->ReThrow();
+        generator()->BuildReThrow();
         return true;
     }
     return false;
@@ -310,7 +311,7 @@
       case CMD_RETURN:
         break;
       case CMD_RETHROW:
-        generator()->builder()->ReThrow();
+        generator()->BuildReThrow();
         return true;
     }
     return false;
@@ -373,75 +374,35 @@
  public:
   explicit RegisterAllocationScope(BytecodeGenerator* generator)
       : generator_(generator),
-        outer_(generator->register_allocator()),
-        allocator_(builder()->zone(),
-                   builder()->temporary_register_allocator()) {
-    generator_->set_register_allocator(this);
-  }
+        outer_next_register_index_(
+            generator->register_allocator()->next_register_index()) {}
 
   virtual ~RegisterAllocationScope() {
-    generator_->set_register_allocator(outer_);
+    generator_->register_allocator()->ReleaseRegisters(
+        outer_next_register_index_);
   }
 
-  Register NewRegister() {
-    RegisterAllocationScope* current_scope = generator()->register_allocator();
-    if ((current_scope == this) ||
-        (current_scope->outer() == this &&
-         !current_scope->allocator_.HasConsecutiveAllocations())) {
-      // Regular case - Allocating registers in current or outer context.
-      // VisitForRegisterValue allocates register in outer context.
-      return allocator_.NewRegister();
-    } else {
-      // If it is required to allocate a register other than current or outer
-      // scopes, allocate a new temporary register. It might be expensive to
-      // walk the full context chain and compute the list of consecutive
-      // reservations in the innerscopes.
-      UNIMPLEMENTED();
-      return Register::invalid_value();
-    }
-  }
-
-  void PrepareForConsecutiveAllocations(int count) {
-    allocator_.PrepareForConsecutiveAllocations(count);
-  }
-
-  Register NextConsecutiveRegister() {
-    return allocator_.NextConsecutiveRegister();
-  }
-
-  bool RegisterIsAllocatedInThisScope(Register reg) const {
-    return allocator_.RegisterIsAllocatedInThisScope(reg);
-  }
-
-  RegisterAllocationScope* outer() const { return outer_; }
-
  private:
-  BytecodeGenerator* generator() const { return generator_; }
-  BytecodeArrayBuilder* builder() const { return generator_->builder(); }
-
   BytecodeGenerator* generator_;
-  RegisterAllocationScope* outer_;
-  BytecodeRegisterAllocator allocator_;
+  int outer_next_register_index_;
 
   DISALLOW_COPY_AND_ASSIGN(RegisterAllocationScope);
 };
 
-// Scoped base class for determining where the result of an expression
-// is stored.
+// Scoped base class for determining how the result of an expression will be
+// used.
 class BytecodeGenerator::ExpressionResultScope {
  public:
   ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
       : generator_(generator),
         kind_(kind),
         outer_(generator->execution_result()),
-        allocator_(generator),
-        result_identified_(false) {
+        allocator_(generator) {
     generator_->set_execution_result(this);
   }
 
   virtual ~ExpressionResultScope() {
     generator_->set_execution_result(outer_);
-    DCHECK(result_identified() || generator_->HasStackOverflow());
   }
 
   bool IsEffect() const { return kind_ == Expression::kEffect; }
@@ -453,28 +414,11 @@
     return reinterpret_cast<TestResultScope*>(this);
   }
 
-  virtual void SetResultInAccumulator() = 0;
-  virtual void SetResultInRegister(Register reg) = 0;
-
- protected:
-  ExpressionResultScope* outer() const { return outer_; }
-  BytecodeArrayBuilder* builder() const { return generator_->builder(); }
-  BytecodeGenerator* generator() const { return generator_; }
-  const RegisterAllocationScope* allocator() const { return &allocator_; }
-
-  void set_result_identified() {
-    DCHECK(!result_identified());
-    result_identified_ = true;
-  }
-
-  bool result_identified() const { return result_identified_; }
-
  private:
   BytecodeGenerator* generator_;
   Expression::Context kind_;
   ExpressionResultScope* outer_;
   RegisterAllocationScope allocator_;
-  bool result_identified_;
 
   DISALLOW_COPY_AND_ASSIGN(ExpressionResultScope);
 };
@@ -485,61 +429,15 @@
     : public ExpressionResultScope {
  public:
   explicit EffectResultScope(BytecodeGenerator* generator)
-      : ExpressionResultScope(generator, Expression::kEffect) {
-    set_result_identified();
-  }
-
-  virtual void SetResultInAccumulator() {}
-  virtual void SetResultInRegister(Register reg) {}
+      : ExpressionResultScope(generator, Expression::kEffect) {}
 };
 
 // Scoped class used when the result of the current expression to be
-// evaluated should go into the interpreter's accumulator register.
-class BytecodeGenerator::AccumulatorResultScope final
-    : public ExpressionResultScope {
+// evaluated should go into the interpreter's accumulator.
+class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
  public:
-  explicit AccumulatorResultScope(BytecodeGenerator* generator)
+  explicit ValueResultScope(BytecodeGenerator* generator)
       : ExpressionResultScope(generator, Expression::kValue) {}
-
-  virtual void SetResultInAccumulator() { set_result_identified(); }
-
-  virtual void SetResultInRegister(Register reg) {
-    builder()->LoadAccumulatorWithRegister(reg);
-    set_result_identified();
-  }
-};
-
-// Scoped class used when the result of the current expression to be
-// evaluated should go into an interpreter register.
-class BytecodeGenerator::RegisterResultScope final
-    : public ExpressionResultScope {
- public:
-  explicit RegisterResultScope(BytecodeGenerator* generator)
-      : ExpressionResultScope(generator, Expression::kValue) {}
-
-  virtual void SetResultInAccumulator() {
-    result_register_ = allocator()->outer()->NewRegister();
-    builder()->StoreAccumulatorInRegister(result_register_);
-    set_result_identified();
-  }
-
-  virtual void SetResultInRegister(Register reg) {
-    DCHECK(builder()->RegisterIsParameterOrLocal(reg) ||
-           (builder()->TemporaryRegisterIsLive(reg) &&
-            !allocator()->RegisterIsAllocatedInThisScope(reg)));
-    result_register_ = reg;
-    set_result_identified();
-  }
-
-  Register ResultRegister() {
-    if (generator()->HasStackOverflow() && !result_identified()) {
-      SetResultInAccumulator();
-    }
-    return result_register_;
-  }
-
- private:
-  Register result_register_;
 };
 
 // Scoped class used when the result of the current expression to be
@@ -554,18 +452,10 @@
         fallthrough_(fallthrough),
         result_consumed_by_test_(false) {}
 
-  virtual void SetResultInAccumulator() { set_result_identified(); }
-
-  virtual void SetResultInRegister(Register reg) {
-    builder()->LoadAccumulatorWithRegister(reg);
-    set_result_identified();
-  }
-
   // Used when code special cases for TestResultScope and consumes any
   // possible value by testing and jumping to a then/else label.
   void SetResultConsumedByTest() {
     result_consumed_by_test_ = true;
-    set_result_identified();
   }
 
   bool ResultConsumedByTest() { return result_consumed_by_test_; }
@@ -677,22 +567,17 @@
       execution_control_(nullptr),
       execution_context_(nullptr),
       execution_result_(nullptr),
-      register_allocator_(nullptr),
       generator_resume_points_(info->literal()->yield_count(), info->zone()),
       generator_state_(),
       loop_depth_(0),
       home_object_symbol_(info->isolate()->factory()->home_object_symbol()),
       prototype_string_(info->isolate()->factory()->prototype_string()) {
-  InitializeAstVisitor(info->isolate()->stack_guard()->real_climit());
 }
 
 Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(Isolate* isolate) {
-  // Create an inner HandleScope to avoid unnecessarily canonicalizing handles
-  // created as part of bytecode finalization.
-  HandleScope scope(isolate);
   AllocateDeferredConstants();
   if (HasStackOverflow()) return Handle<BytecodeArray>();
-  return scope.CloseAndEscape(builder()->ToBytecodeArray(isolate));
+  return builder()->ToBytecodeArray(isolate);
 }
 
 void BytecodeGenerator::AllocateDeferredConstants() {
@@ -726,11 +611,13 @@
   }
 }
 
-void BytecodeGenerator::GenerateBytecode() {
+void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
   DisallowHeapAllocation no_allocation;
   DisallowHandleAllocation no_handles;
   DisallowHandleDereference no_deref;
 
+  InitializeAstVisitor(stack_limit);
+
   // Initialize the incoming context.
   ContextScope incoming_context(this, scope(), false);
 
@@ -744,12 +631,11 @@
     VisitGeneratorPrologue();
   }
 
-  // Build function context only if there are context allocated variables.
   if (scope()->NeedsContext()) {
     // Push a new inner context scope for the function.
-    VisitNewLocalFunctionContext();
+    BuildNewLocalActivationContext();
     ContextScope local_function_context(this, scope(), false);
-    VisitBuildLocalActivationContext();
+    BuildLocalActivationContextInitialization();
     GenerateBytecodeBody();
   } else {
     GenerateBytecodeBody();
@@ -763,7 +649,13 @@
     if (!label.is_bound()) builder()->Bind(&label);
   }
 
-  builder()->EnsureReturn();
+  // Emit an implicit return instruction in case control flow can fall off the
+  // end of the function without an explicit return being present on all paths.
+  if (builder()->RequiresImplicitReturn()) {
+    builder()->LoadUndefined();
+    BuildReturn();
+  }
+  DCHECK(!builder()->RequiresImplicitReturn());
 }
 
 void BytecodeGenerator::GenerateBytecodeBody() {
@@ -771,8 +663,7 @@
   VisitArgumentsObject(scope()->arguments());
 
   // Build rest arguments array if it is used.
-  int rest_index;
-  Variable* rest_parameter = scope()->rest_parameter(&rest_index);
+  Variable* rest_parameter = scope()->rest_parameter();
   VisitRestArgumentsArray(rest_parameter);
 
   // Build assignment to {.this_function} variable if it is used.
@@ -781,10 +672,8 @@
   // Build assignment to {new.target} variable if it is used.
   VisitNewTargetVariable(scope()->new_target_var());
 
-  // TODO(rmcilroy): Emit tracing call if requested to do so.
-  if (FLAG_trace) {
-    UNIMPLEMENTED();
-  }
+  // Emit tracing call if requested to do so.
+  if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
 
   // Visit declarations within the function scope.
   VisitDeclarations(scope()->declarations());
@@ -829,14 +718,6 @@
 
   loop_builder->LoopHeader(&resume_points_in_loop);
 
-  // Insert an explicit {OsrPoll} right after the loop header, to trigger
-  // on-stack replacement when armed for the given loop nesting depth.
-  if (FLAG_ignition_osr) {
-    // TODO(4764): Merge this with another bytecode (e.g. {Jump} back edge).
-    int level = Min(loop_depth_, AbstractCode::kMaxLoopNestingMarker - 1);
-    builder()->OsrPoll(level);
-  }
-
   if (stmt->yield_count() > 0) {
     // If we are not resuming, fall through to loop body.
     // If we are resuming, perform state dispatch.
@@ -882,7 +763,7 @@
 void BytecodeGenerator::VisitBlock(Block* stmt) {
   // Visit declarations and statements.
   if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
-    VisitNewLocalBlockContext(stmt->scope());
+    BuildNewLocalBlockContext(stmt->scope());
     ContextScope scope(this, stmt->scope());
     VisitBlockDeclarationsAndStatements(stmt);
   } else {
@@ -903,7 +784,6 @@
 void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
   Variable* variable = decl->proxy()->var();
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       DCHECK(!variable->binding_needs_init());
       FeedbackVectorSlot slot = decl->proxy()->VariableFeedbackSlot();
@@ -926,8 +806,9 @@
       break;
     case VariableLocation::CONTEXT:
       if (variable->binding_needs_init()) {
+        DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
         builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
-                                                  variable->index());
+                                                  variable->index(), 0);
       }
       break;
     case VariableLocation::LOOKUP: {
@@ -939,18 +820,24 @@
       builder()
           ->LoadLiteral(variable->name())
           .StoreAccumulatorInRegister(name)
-          .CallRuntime(Runtime::kDeclareEvalVar, name, 1);
+          .CallRuntime(Runtime::kDeclareEvalVar, name);
       break;
     }
     case VariableLocation::MODULE:
-      UNREACHABLE();
+      if (variable->IsExport() && variable->binding_needs_init()) {
+        builder()->LoadTheHole();
+        VisitVariableAssignment(variable, Token::INIT,
+                                FeedbackVectorSlot::Invalid());
+      }
+      // Nothing to do for imports.
+      break;
   }
 }
 
 void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
   Variable* variable = decl->proxy()->var();
+  DCHECK(variable->mode() == LET || variable->mode() == VAR);
   switch (variable->location()) {
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       FeedbackVectorSlot slot = decl->proxy()->VariableFeedbackSlot();
       globals_builder()->AddFunctionDeclaration(slot, decl->fun());
@@ -959,8 +846,6 @@
     case VariableLocation::PARAMETER:
     case VariableLocation::LOCAL: {
       VisitForAccumulatorValue(decl->fun());
-      DCHECK(variable->mode() == LET || variable->mode() == VAR ||
-             variable->mode() == CONST);
       VisitVariableAssignment(variable, Token::INIT,
                               FeedbackVectorSlot::Invalid());
       break;
@@ -968,23 +853,27 @@
     case VariableLocation::CONTEXT: {
       DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
       VisitForAccumulatorValue(decl->fun());
-      builder()->StoreContextSlot(execution_context()->reg(),
-                                  variable->index());
+      builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
+                                  0);
       break;
     }
     case VariableLocation::LOOKUP: {
-      register_allocator()->PrepareForConsecutiveAllocations(2);
-      Register name = register_allocator()->NextConsecutiveRegister();
-      Register literal = register_allocator()->NextConsecutiveRegister();
-      builder()->LoadLiteral(variable->name()).StoreAccumulatorInRegister(name);
-
+      RegisterList args = register_allocator()->NewRegisterList(2);
+      builder()
+          ->LoadLiteral(variable->name())
+          .StoreAccumulatorInRegister(args[0]);
       VisitForAccumulatorValue(decl->fun());
-      builder()->StoreAccumulatorInRegister(literal).CallRuntime(
-          Runtime::kDeclareEvalFunction, name, 2);
+      builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
+          Runtime::kDeclareEvalFunction, args);
       break;
     }
     case VariableLocation::MODULE:
-      UNREACHABLE();
+      DCHECK_EQ(variable->mode(), LET);
+      DCHECK(variable->IsExport());
+      VisitForAccumulatorValue(decl->fun());
+      VisitVariableAssignment(variable, Token::INIT,
+                              FeedbackVectorSlot::Invalid());
+      break;
   }
 }
 
@@ -1002,20 +891,15 @@
       builder()->AllocateConstantPoolEntry());
   int encoded_flags = info()->GetDeclareGlobalsFlags();
 
-  register_allocator()->PrepareForConsecutiveAllocations(3);
-
-  Register pairs = register_allocator()->NextConsecutiveRegister();
-  Register flags = register_allocator()->NextConsecutiveRegister();
-  Register function = register_allocator()->NextConsecutiveRegister();
-
   // Emit code to declare globals.
+  RegisterList args = register_allocator()->NewRegisterList(3);
   builder()
       ->LoadConstantPoolEntry(globals_builder()->constant_pool_entry())
-      .StoreAccumulatorInRegister(pairs)
+      .StoreAccumulatorInRegister(args[0])
       .LoadLiteral(Smi::FromInt(encoded_flags))
-      .StoreAccumulatorInRegister(flags)
-      .MoveRegister(Register::function_closure(), function)
-      .CallRuntime(Runtime::kDeclareGlobalsForInterpreter, pairs, 3);
+      .StoreAccumulatorInRegister(args[1])
+      .MoveRegister(Register::function_closure(), args[2])
+      .CallRuntime(Runtime::kDeclareGlobalsForInterpreter, args);
 
   // Push and reset globals builder.
   global_declarations_.push_back(globals_builder());
@@ -1097,7 +981,7 @@
 void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
   builder()->SetStatementPosition(stmt);
   VisitForAccumulatorValue(stmt->expression());
-  VisitNewLocalWithContext();
+  BuildNewLocalWithContext(stmt->scope());
   VisitInScope(stmt->statement(), stmt->scope());
 }
 
@@ -1126,7 +1010,9 @@
 
     // Perform label comparison as if via '===' with tag.
     VisitForAccumulatorValue(clause->label());
-    builder()->CompareOperation(Token::Value::EQ_STRICT, tag);
+    builder()->CompareOperation(
+        Token::Value::EQ_STRICT, tag,
+        feedback_index(clause->CompareOperationFeedbackSlot()));
     switch_builder.Case(i);
   }
 
@@ -1168,13 +1054,16 @@
   } else if (stmt->cond()->ToBooleanIsTrue()) {
     VisitIterationHeader(stmt, &loop_builder);
     VisitIterationBody(stmt, &loop_builder);
-    loop_builder.JumpToHeader();
+    loop_builder.JumpToHeader(loop_depth_);
   } else {
     VisitIterationHeader(stmt, &loop_builder);
     VisitIterationBody(stmt, &loop_builder);
     builder()->SetExpressionAsStatementPosition(stmt->cond());
-    VisitForTest(stmt->cond(), loop_builder.header_labels(),
-                 loop_builder.break_labels(), TestFallthrough::kElse);
+    BytecodeLabels loop_backbranch(zone());
+    VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
+                 TestFallthrough::kThen);
+    loop_backbranch.Bind(builder());
+    loop_builder.JumpToHeader(loop_depth_);
   }
   loop_builder.EndLoop();
 }
@@ -1195,7 +1084,7 @@
     loop_body.Bind(builder());
   }
   VisitIterationBody(stmt, &loop_builder);
-  loop_builder.JumpToHeader();
+  loop_builder.JumpToHeader(loop_depth_);
   loop_builder.EndLoop();
 }
 
@@ -1223,7 +1112,7 @@
     builder()->SetStatementPosition(stmt->next());
     Visit(stmt->next());
   }
-  loop_builder.JumpToHeader();
+  loop_builder.JumpToHeader(loop_depth_);
   loop_builder.EndLoop();
 }
 
@@ -1265,36 +1154,28 @@
     }
     case NAMED_SUPER_PROPERTY: {
       RegisterAllocationScope register_scope(this);
-      register_allocator()->PrepareForConsecutiveAllocations(4);
-      Register receiver = register_allocator()->NextConsecutiveRegister();
-      Register home_object = register_allocator()->NextConsecutiveRegister();
-      Register name = register_allocator()->NextConsecutiveRegister();
-      Register value = register_allocator()->NextConsecutiveRegister();
-      builder()->StoreAccumulatorInRegister(value);
+      RegisterList args = register_allocator()->NewRegisterList(4);
+      builder()->StoreAccumulatorInRegister(args[3]);
       SuperPropertyReference* super_property =
           property->obj()->AsSuperPropertyReference();
-      VisitForRegisterValue(super_property->this_var(), receiver);
-      VisitForRegisterValue(super_property->home_object(), home_object);
+      VisitForRegisterValue(super_property->this_var(), args[0]);
+      VisitForRegisterValue(super_property->home_object(), args[1]);
       builder()
           ->LoadLiteral(property->key()->AsLiteral()->AsPropertyName())
-          .StoreAccumulatorInRegister(name);
-      BuildNamedSuperPropertyStore(receiver, home_object, name, value);
+          .StoreAccumulatorInRegister(args[2])
+          .CallRuntime(StoreToSuperRuntimeId(), args);
       break;
     }
     case KEYED_SUPER_PROPERTY: {
       RegisterAllocationScope register_scope(this);
-      register_allocator()->PrepareForConsecutiveAllocations(4);
-      Register receiver = register_allocator()->NextConsecutiveRegister();
-      Register home_object = register_allocator()->NextConsecutiveRegister();
-      Register key = register_allocator()->NextConsecutiveRegister();
-      Register value = register_allocator()->NextConsecutiveRegister();
-      builder()->StoreAccumulatorInRegister(value);
+      RegisterList args = register_allocator()->NewRegisterList(4);
+      builder()->StoreAccumulatorInRegister(args[3]);
       SuperPropertyReference* super_property =
           property->obj()->AsSuperPropertyReference();
-      VisitForRegisterValue(super_property->this_var(), receiver);
-      VisitForRegisterValue(super_property->home_object(), home_object);
-      VisitForRegisterValue(property->key(), key);
-      BuildKeyedSuperPropertyStore(receiver, home_object, key, value);
+      VisitForRegisterValue(super_property->this_var(), args[0]);
+      VisitForRegisterValue(super_property->home_object(), args[1]);
+      VisitForRegisterValue(property->key(), args[2]);
+      builder()->CallRuntime(StoreKeyedToSuperRuntimeId(), args);
       break;
     }
   }
@@ -1316,15 +1197,12 @@
   builder()->JumpIfUndefined(&subject_undefined_label);
   builder()->JumpIfNull(&subject_null_label);
   Register receiver = register_allocator()->NewRegister();
-  builder()->CastAccumulatorToJSObject(receiver);
+  builder()->ConvertAccumulatorToObject(receiver);
 
-  register_allocator()->PrepareForConsecutiveAllocations(3);
-  Register cache_type = register_allocator()->NextConsecutiveRegister();
-  Register cache_array = register_allocator()->NextConsecutiveRegister();
-  Register cache_length = register_allocator()->NextConsecutiveRegister();
   // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
-  USE(cache_array);
-  builder()->ForInPrepare(receiver, cache_type);
+  RegisterList triple = register_allocator()->NewRegisterList(3);
+  Register cache_length = triple[2];
+  builder()->ForInPrepare(receiver, triple);
 
   // Set up loop counter
   Register index = register_allocator()->NewRegister();
@@ -1334,17 +1212,17 @@
   // The loop
   VisitIterationHeader(stmt, &loop_builder);
   builder()->SetExpressionAsStatementPosition(stmt->each());
-  builder()->ForInDone(index, cache_length);
-  loop_builder.BreakIfTrue();
-  DCHECK(Register::AreContiguous(cache_type, cache_array));
+  builder()->ForInContinue(index, cache_length);
+  loop_builder.BreakIfFalse();
   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
-  builder()->ForInNext(receiver, index, cache_type, feedback_index(slot));
+  builder()->ForInNext(receiver, index, triple.Truncate(2),
+                       feedback_index(slot));
   loop_builder.ContinueIfUndefined();
   VisitForInAssignment(stmt->each(), stmt->EachFeedbackSlot());
   VisitIterationBody(stmt, &loop_builder);
   builder()->ForInStep(index);
   builder()->StoreAccumulatorInRegister(index);
-  loop_builder.JumpToHeader();
+  loop_builder.JumpToHeader(loop_depth_);
   loop_builder.EndLoop();
   builder()->Bind(&subject_null_label);
   builder()->Bind(&subject_undefined_label);
@@ -1364,13 +1242,12 @@
 
   VisitForEffect(stmt->assign_each());
   VisitIterationBody(stmt, &loop_builder);
-  loop_builder.JumpToHeader();
+  loop_builder.JumpToHeader(loop_depth_);
   loop_builder.EndLoop();
 }
 
 void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
   TryCatchBuilder try_control_builder(builder(), stmt->catch_prediction());
-  Register no_reg;
 
   // Preserve the context in a dedicated register, so that it can be restored
   // when the handler is entered by the stack-unwinding machinery.
@@ -1388,12 +1265,12 @@
   try_control_builder.EndTry();
 
   // Create a catch scope that binds the exception.
-  VisitNewLocalCatchContext(stmt->variable());
+  BuildNewLocalCatchContext(stmt->variable(), stmt->scope());
   builder()->StoreAccumulatorInRegister(context);
 
   // If requested, clear message object as we enter the catch block.
   if (stmt->clear_pending_message()) {
-    builder()->CallRuntime(Runtime::kInterpreterClearPendingMessage, no_reg, 0);
+    builder()->CallRuntime(Runtime::kInterpreterClearPendingMessage);
   }
 
   // Load the catch context into the accumulator.
@@ -1406,7 +1283,6 @@
 
 void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
   TryFinallyBuilder try_control_builder(builder(), stmt->catch_prediction());
-  Register no_reg;
 
   // We keep a record of all paths that enter the finally-block to be able to
   // dispatch to the correct continuation point after the statements in the
@@ -1454,7 +1330,7 @@
 
   // Clear message object as we enter the finally block.
   builder()
-      ->CallRuntime(Runtime::kInterpreterClearPendingMessage, no_reg, 0)
+      ->CallRuntime(Runtime::kInterpreterClearPendingMessage)
       .StoreAccumulatorInRegister(message);
 
   // Evaluate the finally-block.
@@ -1462,7 +1338,7 @@
   try_control_builder.EndFinally();
 
   // Pending message object is restored on exit.
-  builder()->CallRuntime(Runtime::kInterpreterSetPendingMessage, message, 1);
+  builder()->CallRuntime(Runtime::kInterpreterSetPendingMessage, message);
 
   // Dynamic dispatch after the finally-block.
   commands.ApplyDeferredCommands();
@@ -1479,16 +1355,15 @@
   size_t entry = builder()->AllocateConstantPoolEntry();
   builder()->CreateClosure(entry, flags);
   function_literals_.push_back(std::make_pair(expr, entry));
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
   VisitClassLiteralForRuntimeDefinition(expr);
 
   // Load the "prototype" from the constructor.
-  register_allocator()->PrepareForConsecutiveAllocations(2);
-  Register literal = register_allocator()->NextConsecutiveRegister();
-  Register prototype = register_allocator()->NextConsecutiveRegister();
+  RegisterList args = register_allocator()->NewRegisterList(2);
+  Register literal = args[0];
+  Register prototype = args[1];
   FeedbackVectorSlot slot = expr->PrototypeSlot();
   builder()
       ->StoreAccumulatorInRegister(literal)
@@ -1496,7 +1371,7 @@
       .StoreAccumulatorInRegister(prototype);
 
   VisitClassLiteralProperties(expr, literal, prototype);
-  builder()->CallRuntime(Runtime::kToFastProperties, literal, 1);
+  builder()->CallRuntime(Runtime::kToFastProperties, literal);
   // Assign to class variable.
   if (expr->class_variable_proxy() != nullptr) {
     Variable* var = expr->class_variable_proxy()->var();
@@ -1505,49 +1380,37 @@
                                   : FeedbackVectorSlot::Invalid();
     VisitVariableAssignment(var, Token::INIT, slot);
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitClassLiteralForRuntimeDefinition(
     ClassLiteral* expr) {
-  AccumulatorResultScope result_scope(this);
-  register_allocator()->PrepareForConsecutiveAllocations(4);
-  Register extends = register_allocator()->NextConsecutiveRegister();
-  Register constructor = register_allocator()->NextConsecutiveRegister();
-  Register start_position = register_allocator()->NextConsecutiveRegister();
-  Register end_position = register_allocator()->NextConsecutiveRegister();
-
+  RegisterAllocationScope register_scope(this);
+  RegisterList args = register_allocator()->NewRegisterList(4);
   VisitForAccumulatorValueOrTheHole(expr->extends());
-  builder()->StoreAccumulatorInRegister(extends);
-
-  VisitForAccumulatorValue(expr->constructor());
+  builder()->StoreAccumulatorInRegister(args[0]);
+  VisitForRegisterValue(expr->constructor(), args[1]);
   builder()
-      ->StoreAccumulatorInRegister(constructor)
-      .LoadLiteral(Smi::FromInt(expr->start_position()))
-      .StoreAccumulatorInRegister(start_position)
+      ->LoadLiteral(Smi::FromInt(expr->start_position()))
+      .StoreAccumulatorInRegister(args[2])
       .LoadLiteral(Smi::FromInt(expr->end_position()))
-      .StoreAccumulatorInRegister(end_position)
-      .CallRuntime(Runtime::kDefineClass, extends, 4);
-  result_scope.SetResultInAccumulator();
+      .StoreAccumulatorInRegister(args[3])
+      .CallRuntime(Runtime::kDefineClass, args);
 }
 
 void BytecodeGenerator::VisitClassLiteralProperties(ClassLiteral* expr,
                                                     Register literal,
                                                     Register prototype) {
   RegisterAllocationScope register_scope(this);
-  register_allocator()->PrepareForConsecutiveAllocations(5);
-  Register receiver = register_allocator()->NextConsecutiveRegister();
-  Register key = register_allocator()->NextConsecutiveRegister();
-  Register value = register_allocator()->NextConsecutiveRegister();
-  Register attr = register_allocator()->NextConsecutiveRegister();
-  Register set_function_name = register_allocator()->NextConsecutiveRegister();
+  RegisterList args = register_allocator()->NewRegisterList(5);
+  Register receiver = args[0], key = args[1], value = args[2], attr = args[3],
+           set_function_name = args[4];
 
   bool attr_assigned = false;
   Register old_receiver = Register::invalid_value();
 
   // Create nodes to store method values into the literal.
   for (int i = 0; i < expr->properties()->length(); i++) {
-    ObjectLiteral::Property* property = expr->properties()->at(i);
+    ClassLiteral::Property* property = expr->properties()->at(i);
 
     // Set-up receiver.
     Register new_receiver = property->is_static() ? literal : prototype;
@@ -1557,17 +1420,23 @@
     }
 
     VisitForAccumulatorValue(property->key());
-    builder()->CastAccumulatorToName(key);
-    // The static prototype property is read only. We handle the non computed
-    // property name case in the parser. Since this is the only case where we
-    // need to check for an own read only property we special case this so we do
-    // not need to do this for every property.
-    if (property->is_static() && property->is_computed_name()) {
-      VisitClassLiteralStaticPrototypeWithComputedName(key);
-    }
-    VisitForAccumulatorValue(property->value());
-    builder()->StoreAccumulatorInRegister(value);
+    builder()->ConvertAccumulatorToName(key);
 
+    if (property->is_static() && property->is_computed_name()) {
+      // The static prototype property is read only. We handle the non computed
+      // property name case in the parser. Since this is the only case where we
+      // need to check for an own read only property we special case this so we
+      // do not need to do this for every property.
+      BytecodeLabel done;
+      builder()
+          ->LoadLiteral(prototype_string())
+          .CompareOperation(Token::Value::EQ_STRICT, key)
+          .JumpIfFalse(&done)
+          .CallRuntime(Runtime::kThrowStaticPrototypeError)
+          .Bind(&done);
+    }
+
+    VisitForRegisterValue(property->value(), value);
     VisitSetHomeObject(value, receiver, property);
 
     if (!attr_assigned) {
@@ -1578,51 +1447,36 @@
     }
 
     switch (property->kind()) {
-      case ObjectLiteral::Property::CONSTANT:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
-      case ObjectLiteral::Property::PROTOTYPE:
-        // Invalid properties for ES6 classes.
-        UNREACHABLE();
-        break;
-      case ObjectLiteral::Property::COMPUTED: {
+      case ClassLiteral::Property::METHOD: {
         builder()
             ->LoadLiteral(Smi::FromInt(property->NeedsSetFunctionName()))
-            .StoreAccumulatorInRegister(set_function_name);
-        builder()->CallRuntime(Runtime::kDefineDataPropertyInLiteral, receiver,
-                               5);
+            .StoreAccumulatorInRegister(set_function_name)
+            .CallRuntime(Runtime::kDefineDataPropertyInLiteral, args);
         break;
       }
-      case ObjectLiteral::Property::GETTER: {
+      case ClassLiteral::Property::GETTER: {
         builder()->CallRuntime(Runtime::kDefineGetterPropertyUnchecked,
-                               receiver, 4);
+                               args.Truncate(4));
         break;
       }
-      case ObjectLiteral::Property::SETTER: {
+      case ClassLiteral::Property::SETTER: {
         builder()->CallRuntime(Runtime::kDefineSetterPropertyUnchecked,
-                               receiver, 4);
+                               args.Truncate(4));
+        break;
+      }
+      case ClassLiteral::Property::FIELD: {
+        UNREACHABLE();
         break;
       }
     }
   }
 }
 
-void BytecodeGenerator::VisitClassLiteralStaticPrototypeWithComputedName(
-    Register key) {
-  BytecodeLabel done;
-  builder()
-      ->LoadLiteral(prototype_string())
-      .CompareOperation(Token::Value::EQ_STRICT, key)
-      .JumpIfFalse(&done)
-      .CallRuntime(Runtime::kThrowStaticPrototypeError, Register(0), 0)
-      .Bind(&done);
-}
-
 void BytecodeGenerator::VisitNativeFunctionLiteral(
     NativeFunctionLiteral* expr) {
   size_t entry = builder()->AllocateConstantPoolEntry();
   builder()->CreateClosure(entry, NOT_TENURED);
   native_function_literals_.push_back(std::make_pair(expr, entry));
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitDoExpression(DoExpression* expr) {
@@ -1652,8 +1506,6 @@
     VisitForAccumulatorValue(expr->else_expression());
     builder()->Bind(&end_label);
   }
-
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitLiteral(Literal* expr) {
@@ -1674,7 +1526,6 @@
     } else {
       builder()->LoadLiteral(raw_value->value());
     }
-    execution_result()->SetResultInAccumulator();
   }
 }
 
@@ -1682,7 +1533,6 @@
   // Materialize a regular expression literal.
   builder()->CreateRegExpLiteral(expr->pattern(), expr->literal_index(),
                                  expr->flags());
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
@@ -1693,7 +1543,7 @@
       expr->ComputeFlags());
   // Allocate in the outer scope since this register is used to return the
   // expression's results to the caller.
-  Register literal = register_allocator()->outer()->NewRegister();
+  Register literal = register_allocator()->NewRegister();
   builder()->CreateObjectLiteral(expr->constant_properties(),
                                  expr->literal_index(), flags, literal);
 
@@ -1737,23 +1587,17 @@
             VisitForEffect(property->value());
           }
         } else {
-          register_allocator()->PrepareForConsecutiveAllocations(4);
-          Register literal_argument =
-              register_allocator()->NextConsecutiveRegister();
-          Register key = register_allocator()->NextConsecutiveRegister();
-          Register value = register_allocator()->NextConsecutiveRegister();
-          Register language = register_allocator()->NextConsecutiveRegister();
+          RegisterList args = register_allocator()->NewRegisterList(4);
 
-          builder()->MoveRegister(literal, literal_argument);
-          VisitForAccumulatorValue(property->key());
-          builder()->StoreAccumulatorInRegister(key);
-          VisitForAccumulatorValue(property->value());
-          builder()->StoreAccumulatorInRegister(value);
+          builder()->MoveRegister(literal, args[0]);
+          VisitForRegisterValue(property->key(), args[1]);
+          VisitForRegisterValue(property->value(), args[2]);
           if (property->emit_store()) {
             builder()
                 ->LoadLiteral(Smi::FromInt(SLOPPY))
-                .StoreAccumulatorInRegister(language)
-                .CallRuntime(Runtime::kSetProperty, literal_argument, 4);
+                .StoreAccumulatorInRegister(args[3])
+                .CallRuntime(Runtime::kSetProperty, args);
+            Register value = args[2];
             VisitSetHomeObject(value, literal, property);
           }
         }
@@ -1761,15 +1605,10 @@
       }
       case ObjectLiteral::Property::PROTOTYPE: {
         DCHECK(property->emit_store());
-        register_allocator()->PrepareForConsecutiveAllocations(2);
-        Register literal_argument =
-            register_allocator()->NextConsecutiveRegister();
-        Register value = register_allocator()->NextConsecutiveRegister();
-
-        builder()->MoveRegister(literal, literal_argument);
-        VisitForAccumulatorValue(property->value());
-        builder()->StoreAccumulatorInRegister(value).CallRuntime(
-            Runtime::kInternalSetPrototype, literal_argument, 2);
+        RegisterList args = register_allocator()->NewRegisterList(2);
+        builder()->MoveRegister(literal, args[0]);
+        VisitForRegisterValue(property->value(), args[1]);
+        builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
         break;
       }
       case ObjectLiteral::Property::GETTER:
@@ -1790,23 +1629,15 @@
   for (AccessorTable::Iterator it = accessor_table.begin();
        it != accessor_table.end(); ++it) {
     RegisterAllocationScope inner_register_scope(this);
-    register_allocator()->PrepareForConsecutiveAllocations(5);
-    Register literal_argument = register_allocator()->NextConsecutiveRegister();
-    Register name = register_allocator()->NextConsecutiveRegister();
-    Register getter = register_allocator()->NextConsecutiveRegister();
-    Register setter = register_allocator()->NextConsecutiveRegister();
-    Register attr = register_allocator()->NextConsecutiveRegister();
-
-    builder()->MoveRegister(literal, literal_argument);
-    VisitForAccumulatorValue(it->first);
-    builder()->StoreAccumulatorInRegister(name);
-    VisitObjectLiteralAccessor(literal, it->second->getter, getter);
-    VisitObjectLiteralAccessor(literal, it->second->setter, setter);
+    RegisterList args = register_allocator()->NewRegisterList(5);
+    builder()->MoveRegister(literal, args[0]);
+    VisitForRegisterValue(it->first, args[1]);
+    VisitObjectLiteralAccessor(literal, it->second->getter, args[2]);
+    VisitObjectLiteralAccessor(literal, it->second->setter, args[3]);
     builder()
         ->LoadLiteral(Smi::FromInt(NONE))
-        .StoreAccumulatorInRegister(attr)
-        .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked,
-                     literal_argument, 5);
+        .StoreAccumulatorInRegister(args[4])
+        .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
   }
 
   // Object literals have two parts. The "static" part on the left contains no
@@ -1824,66 +1655,68 @@
 
     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
       DCHECK(property->emit_store());
-      register_allocator()->PrepareForConsecutiveAllocations(2);
-      Register literal_argument =
-          register_allocator()->NextConsecutiveRegister();
-      Register value = register_allocator()->NextConsecutiveRegister();
-
-      builder()->MoveRegister(literal, literal_argument);
-      VisitForAccumulatorValue(property->value());
-      builder()->StoreAccumulatorInRegister(value).CallRuntime(
-          Runtime::kInternalSetPrototype, literal_argument, 2);
+      RegisterList args = register_allocator()->NewRegisterList(2);
+      builder()->MoveRegister(literal, args[0]);
+      VisitForRegisterValue(property->value(), args[1]);
+      builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
       continue;
     }
 
-    register_allocator()->PrepareForConsecutiveAllocations(5);
-    Register literal_argument = register_allocator()->NextConsecutiveRegister();
-    Register key = register_allocator()->NextConsecutiveRegister();
-    Register value = register_allocator()->NextConsecutiveRegister();
-    Register attr = register_allocator()->NextConsecutiveRegister();
-    DCHECK(Register::AreContiguous(literal_argument, key, value, attr));
-    Register set_function_name =
-        register_allocator()->NextConsecutiveRegister();
-
-    builder()->MoveRegister(literal, literal_argument);
-    VisitForAccumulatorValue(property->key());
-    builder()->CastAccumulatorToName(key);
-    VisitForAccumulatorValue(property->value());
-    builder()->StoreAccumulatorInRegister(value);
-    VisitSetHomeObject(value, literal, property);
-    builder()->LoadLiteral(Smi::FromInt(NONE)).StoreAccumulatorInRegister(attr);
     switch (property->kind()) {
       case ObjectLiteral::Property::CONSTANT:
       case ObjectLiteral::Property::COMPUTED:
-      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
+      case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
+        RegisterList args = register_allocator()->NewRegisterList(5);
+        builder()->MoveRegister(literal, args[0]);
+        VisitForAccumulatorValue(property->key());
+        builder()->ConvertAccumulatorToName(args[1]);
+        VisitForRegisterValue(property->value(), args[2]);
+        VisitSetHomeObject(args[2], literal, property);
         builder()
-            ->LoadLiteral(Smi::FromInt(property->NeedsSetFunctionName()))
-            .StoreAccumulatorInRegister(set_function_name);
-        builder()->CallRuntime(Runtime::kDefineDataPropertyInLiteral,
-                               literal_argument, 5);
+            ->LoadLiteral(Smi::FromInt(NONE))
+            .StoreAccumulatorInRegister(args[3])
+            .LoadLiteral(Smi::FromInt(property->NeedsSetFunctionName()))
+            .StoreAccumulatorInRegister(args[4]);
+        builder()->CallRuntime(Runtime::kDefineDataPropertyInLiteral, args);
         break;
+      }
+      case ObjectLiteral::Property::GETTER:
+      case ObjectLiteral::Property::SETTER: {
+        RegisterList args = register_allocator()->NewRegisterList(4);
+        builder()->MoveRegister(literal, args[0]);
+        VisitForAccumulatorValue(property->key());
+        builder()->ConvertAccumulatorToName(args[1]);
+        VisitForRegisterValue(property->value(), args[2]);
+        VisitSetHomeObject(args[2], literal, property);
+        builder()
+            ->LoadLiteral(Smi::FromInt(NONE))
+            .StoreAccumulatorInRegister(args[3]);
+        Runtime::FunctionId function_id =
+            property->kind() == ObjectLiteral::Property::GETTER
+                ? Runtime::kDefineGetterPropertyUnchecked
+                : Runtime::kDefineSetterPropertyUnchecked;
+        builder()->CallRuntime(function_id, args);
+        break;
+      }
       case ObjectLiteral::Property::PROTOTYPE:
         UNREACHABLE();  // Handled specially above.
         break;
-      case ObjectLiteral::Property::GETTER:
-        builder()->CallRuntime(Runtime::kDefineGetterPropertyUnchecked,
-                               literal_argument, 4);
-        break;
-      case ObjectLiteral::Property::SETTER:
-        builder()->CallRuntime(Runtime::kDefineSetterPropertyUnchecked,
-                               literal_argument, 4);
-        break;
     }
   }
 
-  execution_result()->SetResultInRegister(literal);
+  builder()->LoadAccumulatorWithRegister(literal);
 }
 
 void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   // Deep-copy the literal boilerplate.
+  int runtime_flags = expr->ComputeFlags();
+  bool use_fast_shallow_clone =
+      (runtime_flags & ArrayLiteral::kShallowElements) != 0 &&
+      expr->values()->length() <= JSArray::kInitialMaxFastElementArray;
+  uint8_t flags =
+      CreateArrayLiteralFlags::Encode(use_fast_shallow_clone, runtime_flags);
   builder()->CreateArrayLiteral(expr->constant_elements(),
-                                expr->literal_index(),
-                                expr->ComputeFlags(true));
+                                expr->literal_index(), flags);
   Register index, literal;
 
   // Evaluate all the non-constant subexpressions and store them into the
@@ -1915,7 +1748,6 @@
     // Restore literal array into accumulator.
     builder()->LoadAccumulatorWithRegister(literal);
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
@@ -1953,7 +1785,6 @@
       BuildHoleCheckForVariableLoad(variable);
       break;
     }
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       builder()->LoadGlobal(feedback_index(slot), typeof_mode);
       break;
@@ -1964,85 +1795,82 @@
       Register context_reg;
       if (context) {
         context_reg = context->reg();
+        depth = 0;
       } else {
-        context_reg = register_allocator()->NewRegister();
-        // Walk the context chain to find the context at the given depth.
-        // TODO(rmcilroy): Perform this work in a bytecode handler once we have
-        // a generic mechanism for performing jumps in interpreter.cc.
-        // TODO(mythria): Also update bytecode graph builder with correct depth
-        // when this changes.
-        builder()
-            ->LoadAccumulatorWithRegister(execution_context()->reg())
-            .StoreAccumulatorInRegister(context_reg);
-        for (int i = 0; i < depth; ++i) {
-          builder()
-              ->LoadContextSlot(context_reg, Context::PREVIOUS_INDEX)
-              .StoreAccumulatorInRegister(context_reg);
-        }
+        context_reg = execution_context()->reg();
       }
 
-      builder()->LoadContextSlot(context_reg, variable->index());
+      builder()->LoadContextSlot(context_reg, variable->index(), depth);
       BuildHoleCheckForVariableLoad(variable);
       break;
     }
     case VariableLocation::LOOKUP: {
-      builder()->LoadLookupSlot(variable->name(), typeof_mode);
+      switch (variable->mode()) {
+        case DYNAMIC_LOCAL: {
+          Variable* local_variable = variable->local_if_not_shadowed();
+          int depth =
+              execution_context()->ContextChainDepth(local_variable->scope());
+          builder()->LoadLookupContextSlot(variable->name(), typeof_mode,
+                                           local_variable->index(), depth);
+          BuildHoleCheckForVariableLoad(variable);
+          break;
+        }
+        case DYNAMIC_GLOBAL: {
+          int depth = scope()->ContextChainLengthUntilOutermostSloppyEval();
+          builder()->LoadLookupGlobalSlot(variable->name(), typeof_mode,
+                                          feedback_index(slot), depth);
+          break;
+        }
+        default:
+          builder()->LoadLookupSlot(variable->name(), typeof_mode);
+      }
       break;
     }
-    case VariableLocation::MODULE:
-      UNREACHABLE();
+    case VariableLocation::MODULE: {
+      ModuleDescriptor* descriptor = scope()->GetModuleScope()->module();
+      if (variable->IsExport()) {
+        auto it = descriptor->regular_exports().find(variable->raw_name());
+        DCHECK(it != descriptor->regular_exports().end());
+        Register export_name = register_allocator()->NewRegister();
+        builder()
+            ->LoadLiteral(it->second->export_name->string())
+            .StoreAccumulatorInRegister(export_name)
+            .CallRuntime(Runtime::kLoadModuleExport, export_name);
+      } else {
+        auto it = descriptor->regular_imports().find(variable->raw_name());
+        DCHECK(it != descriptor->regular_imports().end());
+        RegisterList args = register_allocator()->NewRegisterList(2);
+        builder()
+            ->LoadLiteral(it->second->import_name->string())
+            .StoreAccumulatorInRegister(args[0])
+            .LoadLiteral(Smi::FromInt(it->second->module_request))
+            .StoreAccumulatorInRegister(args[1])
+            .CallRuntime(Runtime::kLoadModuleImport, args);
+      }
+      BuildHoleCheckForVariableLoad(variable);
+      break;
+    }
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitVariableLoadForAccumulatorValue(
     Variable* variable, FeedbackVectorSlot slot, TypeofMode typeof_mode) {
-  AccumulatorResultScope accumulator_result(this);
+  ValueResultScope accumulator_result(this);
   VisitVariableLoad(variable, slot, typeof_mode);
 }
 
-Register BytecodeGenerator::VisitVariableLoadForRegisterValue(
-    Variable* variable, FeedbackVectorSlot slot, TypeofMode typeof_mode) {
-  RegisterResultScope register_scope(this);
-  VisitVariableLoad(variable, slot, typeof_mode);
-  return register_scope.ResultRegister();
+void BytecodeGenerator::BuildReturn() {
+  if (FLAG_trace) {
+    RegisterAllocationScope register_scope(this);
+    Register result = register_allocator()->NewRegister();
+    // Runtime returns {result} value, preserving accumulator.
+    builder()->StoreAccumulatorInRegister(result).CallRuntime(
+        Runtime::kTraceExit, result);
+  }
+  builder()->Return();
 }
 
-void BytecodeGenerator::BuildNamedSuperPropertyLoad(Register receiver,
-                                                    Register home_object,
-                                                    Register name) {
-  DCHECK(Register::AreContiguous(receiver, home_object, name));
-  builder()->CallRuntime(Runtime::kLoadFromSuper, receiver, 3);
-}
-
-void BytecodeGenerator::BuildKeyedSuperPropertyLoad(Register receiver,
-                                                    Register home_object,
-                                                    Register key) {
-  DCHECK(Register::AreContiguous(receiver, home_object, key));
-  builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, receiver, 3);
-}
-
-void BytecodeGenerator::BuildNamedSuperPropertyStore(Register receiver,
-                                                     Register home_object,
-                                                     Register name,
-                                                     Register value) {
-  DCHECK(Register::AreContiguous(receiver, home_object, name, value));
-  Runtime::FunctionId function_id = is_strict(language_mode())
-                                        ? Runtime::kStoreToSuper_Strict
-                                        : Runtime::kStoreToSuper_Sloppy;
-  builder()->CallRuntime(function_id, receiver, 4);
-}
-
-void BytecodeGenerator::BuildKeyedSuperPropertyStore(Register receiver,
-                                                     Register home_object,
-                                                     Register key,
-                                                     Register value) {
-  DCHECK(Register::AreContiguous(receiver, home_object, key, value));
-  Runtime::FunctionId function_id = is_strict(language_mode())
-                                        ? Runtime::kStoreKeyedToSuper_Strict
-                                        : Runtime::kStoreKeyedToSuper_Sloppy;
-  builder()->CallRuntime(function_id, receiver, 4);
-}
+void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }
 
 void BytecodeGenerator::BuildAbort(BailoutReason bailout_reason) {
   RegisterAllocationScope register_scope(this);
@@ -2050,14 +1878,14 @@
   builder()
       ->LoadLiteral(Smi::FromInt(static_cast<int>(bailout_reason)))
       .StoreAccumulatorInRegister(reason)
-      .CallRuntime(Runtime::kAbort, reason, 1);
+      .CallRuntime(Runtime::kAbort, reason);
 }
 
 void BytecodeGenerator::BuildThrowReferenceError(Handle<String> name) {
   RegisterAllocationScope register_scope(this);
   Register name_reg = register_allocator()->NewRegister();
   builder()->LoadLiteral(name).StoreAccumulatorInRegister(name_reg).CallRuntime(
-      Runtime::kThrowReferenceError, name_reg, 1);
+      Runtime::kThrowReferenceError, name_reg);
 }
 
 void BytecodeGenerator::BuildThrowIfHole(Handle<String> name) {
@@ -2083,7 +1911,6 @@
 
 void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
                                                             Token::Value op) {
-  DCHECK(variable->mode() != CONST_LEGACY);
   if (op != Token::INIT) {
     // Perform an initialization check for let/const declared variables.
     // E.g. let x = (x = 20); is not allowed.
@@ -2128,20 +1955,13 @@
         builder()->LoadAccumulatorWithRegister(value_temp);
       }
 
-      if ((mode == CONST || mode == CONST_LEGACY) && op != Token::INIT) {
-        if (mode == CONST || is_strict(language_mode())) {
-          builder()->CallRuntime(Runtime::kThrowConstAssignError, Register(),
-                                 0);
-        }
-        // Non-initializing assignments to legacy constants are ignored
-        // in sloppy mode. Break here to avoid storing into variable.
-        break;
+      if (mode != CONST || op == Token::INIT) {
+        builder()->StoreAccumulatorInRegister(destination);
+      } else if (variable->throw_on_const_assignment(language_mode())) {
+        builder()->CallRuntime(Runtime::kThrowConstAssignError);
       }
-
-      builder()->StoreAccumulatorInRegister(destination);
       break;
     }
-    case VariableLocation::GLOBAL:
     case VariableLocation::UNALLOCATED: {
       builder()->StoreGlobal(variable->name(), feedback_index(slot),
                              language_mode());
@@ -2154,24 +1974,9 @@
 
       if (context) {
         context_reg = context->reg();
+        depth = 0;
       } else {
-        Register value_temp = register_allocator()->NewRegister();
-        context_reg = register_allocator()->NewRegister();
-        // Walk the context chain to find the context at the given depth.
-        // TODO(rmcilroy): Perform this work in a bytecode handler once we have
-        // a generic mechanism for performing jumps in interpreter.cc.
-        // TODO(mythria): Also update bytecode graph builder with correct depth
-        // when this changes.
-        builder()
-            ->StoreAccumulatorInRegister(value_temp)
-            .LoadAccumulatorWithRegister(execution_context()->reg())
-            .StoreAccumulatorInRegister(context_reg);
-        for (int i = 0; i < depth; ++i) {
-          builder()
-              ->LoadContextSlot(context_reg, Context::PREVIOUS_INDEX)
-              .StoreAccumulatorInRegister(context_reg);
-        }
-        builder()->LoadAccumulatorWithRegister(value_temp);
+        context_reg = execution_context()->reg();
       }
 
       if (hole_check_required) {
@@ -2179,38 +1984,57 @@
         Register value_temp = register_allocator()->NewRegister();
         builder()
             ->StoreAccumulatorInRegister(value_temp)
-            .LoadContextSlot(context_reg, variable->index());
+            .LoadContextSlot(context_reg, variable->index(), depth);
 
         BuildHoleCheckForVariableAssignment(variable, op);
         builder()->LoadAccumulatorWithRegister(value_temp);
       }
 
-      if ((mode == CONST || mode == CONST_LEGACY) && op != Token::INIT) {
-        if (mode == CONST || is_strict(language_mode())) {
-          builder()->CallRuntime(Runtime::kThrowConstAssignError, Register(),
-                                 0);
-        }
-        // Non-initializing assignments to legacy constants are ignored
-        // in sloppy mode. Break here to avoid storing into variable.
-        break;
+      if (mode != CONST || op == Token::INIT) {
+        builder()->StoreContextSlot(context_reg, variable->index(), depth);
+      } else if (variable->throw_on_const_assignment(language_mode())) {
+        builder()->CallRuntime(Runtime::kThrowConstAssignError);
       }
-
-      builder()->StoreContextSlot(context_reg, variable->index());
       break;
     }
     case VariableLocation::LOOKUP: {
-      DCHECK_NE(CONST_LEGACY, variable->mode());
       builder()->StoreLookupSlot(variable->name(), language_mode());
       break;
     }
-    case VariableLocation::MODULE:
-      UNREACHABLE();
+    case VariableLocation::MODULE: {
+      DCHECK(IsDeclaredVariableMode(mode));
+
+      if (mode == CONST && op != Token::INIT) {
+        builder()->CallRuntime(Runtime::kThrowConstAssignError);
+        break;
+      }
+
+      // If we don't throw above, we know that we're dealing with an
+      // export because imports are const and we do not generate initializing
+      // assignments for them.
+      DCHECK(variable->IsExport());
+
+      ModuleDescriptor* mod = scope()->GetModuleScope()->module();
+      // There may be several export names for this local name, but it doesn't
+      // matter which one we pick, as they all map to the same cell.
+      auto it = mod->regular_exports().find(variable->raw_name());
+      DCHECK(it != mod->regular_exports().end());
+
+      RegisterList args = register_allocator()->NewRegisterList(2);
+      builder()
+          ->StoreAccumulatorInRegister(args[1])
+          .LoadLiteral(it->second->export_name->string())
+          .StoreAccumulatorInRegister(args[0])
+          .CallRuntime(Runtime::kStoreModuleExport, args);
+      break;
+    }
   }
 }
 
 void BytecodeGenerator::VisitAssignment(Assignment* expr) {
   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
-  Register object, key, home_object, value;
+  Register object, key;
+  RegisterList super_property_args;
   Handle<String> name;
 
   // Left-hand side can only be a property, a global or a variable slot.
@@ -2229,44 +2053,29 @@
     }
     case KEYED_PROPERTY: {
       object = VisitForRegisterValue(property->obj());
-      if (expr->is_compound()) {
-        // Use VisitForAccumulator and store to register so that the key is
-        // still in the accumulator for loading the old value below.
-        key = register_allocator()->NewRegister();
-        VisitForAccumulatorValue(property->key());
-        builder()->StoreAccumulatorInRegister(key);
-      } else {
-        key = VisitForRegisterValue(property->key());
-      }
+      key = VisitForRegisterValue(property->key());
       break;
     }
     case NAMED_SUPER_PROPERTY: {
-      register_allocator()->PrepareForConsecutiveAllocations(4);
-      object = register_allocator()->NextConsecutiveRegister();
-      home_object = register_allocator()->NextConsecutiveRegister();
-      key = register_allocator()->NextConsecutiveRegister();
-      value = register_allocator()->NextConsecutiveRegister();
+      super_property_args = register_allocator()->NewRegisterList(4);
       SuperPropertyReference* super_property =
           property->obj()->AsSuperPropertyReference();
-      VisitForRegisterValue(super_property->this_var(), object);
-      VisitForRegisterValue(super_property->home_object(), home_object);
+      VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
+      VisitForRegisterValue(super_property->home_object(),
+                            super_property_args[1]);
       builder()
           ->LoadLiteral(property->key()->AsLiteral()->AsPropertyName())
-          .StoreAccumulatorInRegister(key);
+          .StoreAccumulatorInRegister(super_property_args[2]);
       break;
     }
     case KEYED_SUPER_PROPERTY: {
-      register_allocator()->PrepareForConsecutiveAllocations(4);
-      object = register_allocator()->NextConsecutiveRegister();
-      home_object = register_allocator()->NextConsecutiveRegister();
-      key = register_allocator()->NextConsecutiveRegister();
-      value = register_allocator()->NextConsecutiveRegister();
-      builder()->StoreAccumulatorInRegister(value);
+      super_property_args = register_allocator()->NewRegisterList(4);
       SuperPropertyReference* super_property =
           property->obj()->AsSuperPropertyReference();
-      VisitForRegisterValue(super_property->this_var(), object);
-      VisitForRegisterValue(super_property->home_object(), home_object);
-      VisitForRegisterValue(property->key(), key);
+      VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
+      VisitForRegisterValue(super_property->home_object(),
+                            super_property_args[1]);
+      VisitForRegisterValue(property->key(), super_property_args[2]);
       break;
     }
   }
@@ -2274,17 +2083,16 @@
   // Evaluate the value and potentially handle compound assignments by loading
   // the left-hand side value and performing a binary operation.
   if (expr->is_compound()) {
-    Register old_value;
+    Register old_value = register_allocator()->NewRegister();
     switch (assign_type) {
       case VARIABLE: {
         VariableProxy* proxy = expr->target()->AsVariableProxy();
-        old_value = VisitVariableLoadForRegisterValue(
-            proxy->var(), proxy->VariableFeedbackSlot());
+        VisitVariableLoad(proxy->var(), proxy->VariableFeedbackSlot());
+        builder()->StoreAccumulatorInRegister(old_value);
         break;
       }
       case NAMED_PROPERTY: {
         FeedbackVectorSlot slot = property->PropertyFeedbackSlot();
-        old_value = register_allocator()->NewRegister();
         builder()
             ->LoadNamedProperty(object, name, feedback_index(slot))
             .StoreAccumulatorInRegister(old_value);
@@ -2294,22 +2102,23 @@
         // Key is already in accumulator at this point due to evaluating the
         // LHS above.
         FeedbackVectorSlot slot = property->PropertyFeedbackSlot();
-        old_value = register_allocator()->NewRegister();
         builder()
             ->LoadKeyedProperty(object, feedback_index(slot))
             .StoreAccumulatorInRegister(old_value);
         break;
       }
       case NAMED_SUPER_PROPERTY: {
-        old_value = register_allocator()->NewRegister();
-        BuildNamedSuperPropertyLoad(object, home_object, key);
-        builder()->StoreAccumulatorInRegister(old_value);
+        builder()
+            ->CallRuntime(Runtime::kLoadFromSuper,
+                          super_property_args.Truncate(3))
+            .StoreAccumulatorInRegister(old_value);
         break;
       }
       case KEYED_SUPER_PROPERTY: {
-        old_value = register_allocator()->NewRegister();
-        BuildKeyedSuperPropertyLoad(object, home_object, key);
-        builder()->StoreAccumulatorInRegister(old_value);
+        builder()
+            ->CallRuntime(Runtime::kLoadKeyedFromSuper,
+                          super_property_args.Truncate(3))
+            .StoreAccumulatorInRegister(old_value);
         break;
       }
     }
@@ -2342,17 +2151,18 @@
                                     language_mode());
       break;
     case NAMED_SUPER_PROPERTY: {
-      builder()->StoreAccumulatorInRegister(value);
-      BuildNamedSuperPropertyStore(object, home_object, key, value);
+      builder()
+          ->StoreAccumulatorInRegister(super_property_args[3])
+          .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
       break;
     }
     case KEYED_SUPER_PROPERTY: {
-      builder()->StoreAccumulatorInRegister(value);
-      BuildKeyedSuperPropertyStore(object, home_object, key, value);
+      builder()
+          ->StoreAccumulatorInRegister(super_property_args[3])
+          .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
       break;
     }
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitYield(Yield* expr) {
@@ -2382,12 +2192,12 @@
 
     Register input = register_allocator()->NewRegister();
     builder()
-        ->CallRuntime(Runtime::kInlineGeneratorGetInputOrDebugPos, generator, 1)
+        ->CallRuntime(Runtime::kInlineGeneratorGetInputOrDebugPos, generator)
         .StoreAccumulatorInRegister(input);
 
     Register resume_mode = register_allocator()->NewRegister();
     builder()
-        ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator, 1)
+        ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator)
         .StoreAccumulatorInRegister(resume_mode);
 
     // Now dispatch on resume mode.
@@ -2407,14 +2217,12 @@
 
     builder()->Bind(&resume_with_return);
     {
-      register_allocator()->PrepareForConsecutiveAllocations(2);
-      Register value = register_allocator()->NextConsecutiveRegister();
-      Register done = register_allocator()->NextConsecutiveRegister();
+      RegisterList args = register_allocator()->NewRegisterList(2);
       builder()
-          ->MoveRegister(input, value)
+          ->MoveRegister(input, args[0])
           .LoadTrue()
-          .StoreAccumulatorInRegister(done)
-          .CallRuntime(Runtime::kInlineCreateIterResultObject, value, 2);
+          .StoreAccumulatorInRegister(args[1])
+          .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
       execution_control()->ReturnAccumulator();
     }
 
@@ -2430,18 +2238,12 @@
     builder()->Bind(&resume_with_next);
     builder()->LoadAccumulatorWithRegister(input);
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitThrow(Throw* expr) {
   VisitForAccumulatorValue(expr->exception());
   builder()->SetExpressionPosition(expr);
   builder()->Throw();
-  // Throw statements are modeled as expressions instead of statements. These
-  // are converted from assignment statements in Rewriter::ReWrite pass. An
-  // assignment statement expects a value in the accumulator. This is a hack to
-  // avoid DCHECK fails assert accumulator has been set.
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* expr) {
@@ -2469,56 +2271,45 @@
       VisitKeyedSuperPropertyLoad(expr, Register::invalid_value());
       break;
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitPropertyLoadForAccumulator(Register obj,
                                                         Property* expr) {
-  AccumulatorResultScope result_scope(this);
+  ValueResultScope result_scope(this);
   VisitPropertyLoad(obj, expr);
 }
 
 void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
                                                     Register opt_receiver_out) {
   RegisterAllocationScope register_scope(this);
-  register_allocator()->PrepareForConsecutiveAllocations(3);
-
-  Register receiver, home_object, name;
-  receiver = register_allocator()->NextConsecutiveRegister();
-  home_object = register_allocator()->NextConsecutiveRegister();
-  name = register_allocator()->NextConsecutiveRegister();
   SuperPropertyReference* super_property =
       property->obj()->AsSuperPropertyReference();
-  VisitForRegisterValue(super_property->this_var(), receiver);
-  VisitForRegisterValue(super_property->home_object(), home_object);
+  RegisterList args = register_allocator()->NewRegisterList(3);
+  VisitForRegisterValue(super_property->this_var(), args[0]);
+  VisitForRegisterValue(super_property->home_object(), args[1]);
   builder()
       ->LoadLiteral(property->key()->AsLiteral()->AsPropertyName())
-      .StoreAccumulatorInRegister(name);
-  BuildNamedSuperPropertyLoad(receiver, home_object, name);
+      .StoreAccumulatorInRegister(args[2])
+      .CallRuntime(Runtime::kLoadFromSuper, args);
 
   if (opt_receiver_out.is_valid()) {
-    builder()->MoveRegister(receiver, opt_receiver_out);
+    builder()->MoveRegister(args[0], opt_receiver_out);
   }
 }
 
 void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
                                                     Register opt_receiver_out) {
   RegisterAllocationScope register_scope(this);
-  register_allocator()->PrepareForConsecutiveAllocations(3);
-
-  Register receiver, home_object, key;
-  receiver = register_allocator()->NextConsecutiveRegister();
-  home_object = register_allocator()->NextConsecutiveRegister();
-  key = register_allocator()->NextConsecutiveRegister();
   SuperPropertyReference* super_property =
       property->obj()->AsSuperPropertyReference();
-  VisitForRegisterValue(super_property->this_var(), receiver);
-  VisitForRegisterValue(super_property->home_object(), home_object);
-  VisitForRegisterValue(property->key(), key);
-  BuildKeyedSuperPropertyLoad(receiver, home_object, key);
+  RegisterList args = register_allocator()->NewRegisterList(3);
+  VisitForRegisterValue(super_property->this_var(), args[0]);
+  VisitForRegisterValue(super_property->home_object(), args[1]);
+  VisitForRegisterValue(property->key(), args[2]);
+  builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
 
   if (opt_receiver_out.is_valid()) {
-    builder()->MoveRegister(receiver, opt_receiver_out);
+    builder()->MoveRegister(args[0], opt_receiver_out);
   }
 }
 
@@ -2533,36 +2324,13 @@
   }
 }
 
-Register BytecodeGenerator::VisitArguments(ZoneList<Expression*>* args) {
-  if (args->length() == 0) {
-    return Register();
+void BytecodeGenerator::VisitArguments(ZoneList<Expression*>* args,
+                                       RegisterList arg_regs,
+                                       size_t first_argument_register) {
+  // Visit arguments.
+  for (int i = 0; i < static_cast<int>(args->length()); i++) {
+    VisitForRegisterValue(args->at(i), arg_regs[first_argument_register + i]);
   }
-
-  // Visit arguments and place in a contiguous block of temporary
-  // registers.  Return the first temporary register corresponding to
-  // the first argument.
-  //
-  // NB the caller may have already called
-  // PrepareForConsecutiveAllocations() with args->length() + N. The
-  // second call here will be a no-op provided there have been N or
-  // less calls to NextConsecutiveRegister(). Otherwise, the arguments
-  // here will be consecutive, but they will not be consecutive with
-  // earlier consecutive allocations made by the caller.
-  register_allocator()->PrepareForConsecutiveAllocations(args->length());
-
-  // Visit for first argument that goes into returned register
-  Register first_arg = register_allocator()->NextConsecutiveRegister();
-  VisitForAccumulatorValue(args->at(0));
-  builder()->StoreAccumulatorInRegister(first_arg);
-
-  // Visit remaining arguments
-  for (int i = 1; i < static_cast<int>(args->length()); i++) {
-    Register ith_arg = register_allocator()->NextConsecutiveRegister();
-    VisitForAccumulatorValue(args->at(i));
-    builder()->StoreAccumulatorInRegister(ith_arg);
-    DCHECK(ith_arg.index() - i == first_arg.index());
-  }
-  return first_arg;
 }
 
 void BytecodeGenerator::VisitCall(Call* expr) {
@@ -2573,18 +2341,15 @@
     return VisitCallSuper(expr);
   }
 
+  Register callee = register_allocator()->NewRegister();
+
+  // Add an argument register for the receiver.
+  RegisterList args =
+      register_allocator()->NewRegisterList(expr->arguments()->length() + 1);
+  Register receiver = args[0];
+
   // Prepare the callee and the receiver to the function call. This depends on
   // the semantics of the underlying call type.
-
-  // The receiver and arguments need to be allocated consecutively for
-  // Call(). We allocate the callee and receiver consecutively for calls to
-  // %LoadLookupSlotForCall. Future optimizations could avoid this there are
-  // no arguments or the receiver and arguments are already consecutive.
-  ZoneList<Expression*>* args = expr->arguments();
-  register_allocator()->PrepareForConsecutiveAllocations(args->length() + 2);
-  Register callee = register_allocator()->NextConsecutiveRegister();
-  Register receiver = register_allocator()->NextConsecutiveRegister();
-
   switch (call_type) {
     case Call::NAMED_PROPERTY_CALL:
     case Call::KEYED_PROPERTY_CALL: {
@@ -2613,12 +2378,13 @@
 
         // Call %LoadLookupSlotForCall to get the callee and receiver.
         DCHECK(Register::AreContiguous(callee, receiver));
+        RegisterList result_pair(callee.index(), 2);
         Variable* variable = callee_expr->AsVariableProxy()->var();
         builder()
             ->LoadLiteral(variable->name())
             .StoreAccumulatorInRegister(name)
-            .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name, 1,
-                                callee);
+            .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
+                                result_pair);
         break;
       }
       // Fall through.
@@ -2626,8 +2392,7 @@
     }
     case Call::OTHER_CALL: {
       builder()->LoadUndefined().StoreAccumulatorInRegister(receiver);
-      VisitForAccumulatorValue(callee_expr);
-      builder()->StoreAccumulatorInRegister(callee);
+      VisitForRegisterValue(callee_expr, callee);
       break;
     }
     case Call::NAMED_SUPER_PROPERTY_CALL: {
@@ -2647,42 +2412,34 @@
       break;
   }
 
-  // Evaluate all arguments to the function call and store in sequential
+  // Evaluate all arguments to the function call and store in sequential args
   // registers.
-  Register arg = VisitArguments(args);
-  CHECK(args->length() == 0 || arg.index() == receiver.index() + 1);
+  VisitArguments(expr->arguments(), args, 1);
 
   // Resolve callee for a potential direct eval call. This block will mutate the
   // callee value.
-  if (call_type == Call::POSSIBLY_EVAL_CALL && args->length() > 0) {
+  if (call_type == Call::POSSIBLY_EVAL_CALL &&
+      expr->arguments()->length() > 0) {
     RegisterAllocationScope inner_register_scope(this);
-    register_allocator()->PrepareForConsecutiveAllocations(6);
-    Register callee_for_eval = register_allocator()->NextConsecutiveRegister();
-    Register source = register_allocator()->NextConsecutiveRegister();
-    Register function = register_allocator()->NextConsecutiveRegister();
-    Register language = register_allocator()->NextConsecutiveRegister();
-    Register eval_scope_position =
-        register_allocator()->NextConsecutiveRegister();
-    Register eval_position = register_allocator()->NextConsecutiveRegister();
-
     // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
     // strings and function closure, and loading language and
     // position.
+    RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
     builder()
-        ->MoveRegister(callee, callee_for_eval)
-        .MoveRegister(arg, source)
-        .MoveRegister(Register::function_closure(), function)
+        ->MoveRegister(callee, runtime_call_args[0])
+        .MoveRegister(args[1], runtime_call_args[1])
+        .MoveRegister(Register::function_closure(), runtime_call_args[2])
         .LoadLiteral(Smi::FromInt(language_mode()))
-        .StoreAccumulatorInRegister(language)
+        .StoreAccumulatorInRegister(runtime_call_args[3])
         .LoadLiteral(
             Smi::FromInt(execution_context()->scope()->start_position()))
-        .StoreAccumulatorInRegister(eval_scope_position)
+        .StoreAccumulatorInRegister(runtime_call_args[4])
         .LoadLiteral(Smi::FromInt(expr->position()))
-        .StoreAccumulatorInRegister(eval_position);
+        .StoreAccumulatorInRegister(runtime_call_args[5]);
 
     // Call ResolvePossiblyDirectEval and modify the callee.
     builder()
-        ->CallRuntime(Runtime::kResolvePossiblyDirectEval, callee_for_eval, 6)
+        ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
         .StoreAccumulatorInRegister(callee);
   }
 
@@ -2692,16 +2449,14 @@
   if (expr->CallFeedbackICSlot().IsInvalid()) {
     DCHECK(call_type == Call::POSSIBLY_EVAL_CALL);
     // Valid type feedback slots can only be greater than kReservedIndexCount.
-    // We use 0 to indicate an invalid slot it. Statically assert that 0 cannot
+    // We use 0 to indicate an invalid slot id. Statically assert that 0 cannot
     // be a valid slot id.
     STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
     feedback_slot_index = 0;
   } else {
     feedback_slot_index = feedback_index(expr->CallFeedbackICSlot());
   }
-  builder()->Call(callee, receiver, 1 + args->length(), feedback_slot_index,
-                  expr->tail_call_mode());
-  execution_result()->SetResultInAccumulator();
+  builder()->Call(callee, args, feedback_slot_index, expr->tail_call_mode());
 }
 
 void BytecodeGenerator::VisitCallSuper(Call* expr) {
@@ -2709,17 +2464,15 @@
   SuperCallReference* super = expr->expression()->AsSuperCallReference();
 
   // Prepare the constructor to the super call.
-  Register this_function = register_allocator()->NewRegister();
-  VisitForAccumulatorValue(super->this_function_var());
-  builder()
-      ->StoreAccumulatorInRegister(this_function)
-      .CallRuntime(Runtime::kInlineGetSuperConstructor, this_function, 1);
+  Register this_function = VisitForRegisterValue(super->this_function_var());
+  builder()->CallRuntime(Runtime::kInlineGetSuperConstructor, this_function);
 
   Register constructor = this_function;  // Re-use dead this_function register.
   builder()->StoreAccumulatorInRegister(constructor);
 
-  ZoneList<Expression*>* args = expr->arguments();
-  Register first_arg = VisitArguments(args);
+  RegisterList args =
+      register_allocator()->NewRegisterList(expr->arguments()->length());
+  VisitArguments(expr->arguments(), args);
 
   // The new target is loaded into the accumulator from the
   // {new.target} variable.
@@ -2727,51 +2480,51 @@
 
   // Call construct.
   builder()->SetExpressionPosition(expr);
-  builder()->New(constructor, first_arg, args->length());
-  execution_result()->SetResultInAccumulator();
+  // Valid type feedback slots can only be greater than kReservedIndexCount.
+  // Assert that 0 cannot be valid a valid slot id.
+  STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
+  // Type feedback is not necessary for super constructor calls. The type
+  // information can be inferred in most cases. Slot id 0 indicates type
+  // feedback is not required.
+  builder()->New(constructor, args, 0);
 }
 
 void BytecodeGenerator::VisitCallNew(CallNew* expr) {
-  Register constructor = register_allocator()->NewRegister();
-  VisitForAccumulatorValue(expr->expression());
-  builder()->StoreAccumulatorInRegister(constructor);
-
-  ZoneList<Expression*>* args = expr->arguments();
-  Register first_arg = VisitArguments(args);
+  Register constructor = VisitForRegisterValue(expr->expression());
+  RegisterList args =
+      register_allocator()->NewRegisterList(expr->arguments()->length());
+  VisitArguments(expr->arguments(), args);
 
   builder()->SetExpressionPosition(expr);
   // The accumulator holds new target which is the same as the
   // constructor for CallNew.
   builder()
       ->LoadAccumulatorWithRegister(constructor)
-      .New(constructor, first_arg, args->length());
-  execution_result()->SetResultInAccumulator();
+      .New(constructor, args, feedback_index(expr->CallNewFeedbackSlot()));
 }
 
 void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
-  ZoneList<Expression*>* args = expr->arguments();
   if (expr->is_jsruntime()) {
     // Allocate a register for the receiver and load it with undefined.
-    register_allocator()->PrepareForConsecutiveAllocations(1 + args->length());
-    Register receiver = register_allocator()->NextConsecutiveRegister();
+    RegisterList args =
+        register_allocator()->NewRegisterList(expr->arguments()->length() + 1);
+    Register receiver = args[0];
     builder()->LoadUndefined().StoreAccumulatorInRegister(receiver);
-    Register first_arg = VisitArguments(args);
-    CHECK(args->length() == 0 || first_arg.index() == receiver.index() + 1);
-    builder()->CallJSRuntime(expr->context_index(), receiver,
-                             1 + args->length());
+    VisitArguments(expr->arguments(), args, 1);
+    builder()->CallJSRuntime(expr->context_index(), args);
   } else {
     // Evaluate all arguments to the runtime call.
-    Register first_arg = VisitArguments(args);
+    RegisterList args =
+        register_allocator()->NewRegisterList(expr->arguments()->length());
+    VisitArguments(expr->arguments(), args);
     Runtime::FunctionId function_id = expr->function()->function_id;
-    builder()->CallRuntime(function_id, first_arg, args->length());
+    builder()->CallRuntime(function_id, args);
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
   VisitForEffect(expr->expression());
   builder()->LoadUndefined();
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
@@ -2785,7 +2538,6 @@
     VisitForAccumulatorValue(expr->expression());
   }
   builder()->TypeOf();
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
@@ -2802,7 +2554,6 @@
   } else {
     VisitForAccumulatorValue(expr->expression());
     builder()->LogicalNot();
-    execution_result()->SetResultInAccumulator();
   }
 }
 
@@ -2846,16 +2597,15 @@
     Variable* variable = proxy->var();
     DCHECK(is_sloppy(language_mode()) || variable->is_this());
     switch (variable->location()) {
-      case VariableLocation::GLOBAL:
       case VariableLocation::UNALLOCATED: {
         // Global var, let, const or variables not explicitly declared.
         Register native_context = register_allocator()->NewRegister();
         Register global_object = register_allocator()->NewRegister();
         builder()
             ->LoadContextSlot(execution_context()->reg(),
-                              Context::NATIVE_CONTEXT_INDEX)
+                              Context::NATIVE_CONTEXT_INDEX, 0)
             .StoreAccumulatorInRegister(native_context)
-            .LoadContextSlot(native_context, Context::EXTENSION_INDEX)
+            .LoadContextSlot(native_context, Context::EXTENSION_INDEX, 0)
             .StoreAccumulatorInRegister(global_object)
             .LoadLiteral(variable->name())
             .Delete(global_object, language_mode());
@@ -2878,7 +2628,7 @@
         builder()
             ->LoadLiteral(variable->name())
             .StoreAccumulatorInRegister(name_reg)
-            .CallRuntime(Runtime::kDeleteLookupSlot, name_reg, 1);
+            .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
         break;
       }
       default:
@@ -2889,7 +2639,6 @@
     VisitForEffect(expr->expression());
     builder()->LoadTrue();
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
@@ -2902,7 +2651,8 @@
   bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
 
   // Evaluate LHS expression and get old value.
-  Register object, home_object, key, old_value, value;
+  Register object, key, old_value;
+  RegisterList super_property_args;
   Handle<String> name;
   switch (assign_type) {
     case VARIABLE: {
@@ -2930,44 +2680,36 @@
       break;
     }
     case NAMED_SUPER_PROPERTY: {
-      register_allocator()->PrepareForConsecutiveAllocations(4);
-      object = register_allocator()->NextConsecutiveRegister();
-      home_object = register_allocator()->NextConsecutiveRegister();
-      key = register_allocator()->NextConsecutiveRegister();
-      value = register_allocator()->NextConsecutiveRegister();
+      super_property_args = register_allocator()->NewRegisterList(4);
+      RegisterList load_super_args = super_property_args.Truncate(3);
       SuperPropertyReference* super_property =
           property->obj()->AsSuperPropertyReference();
-      VisitForRegisterValue(super_property->this_var(), object);
-      VisitForRegisterValue(super_property->home_object(), home_object);
+      VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
+      VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
       builder()
           ->LoadLiteral(property->key()->AsLiteral()->AsPropertyName())
-          .StoreAccumulatorInRegister(key);
-      BuildNamedSuperPropertyLoad(object, home_object, key);
+          .StoreAccumulatorInRegister(load_super_args[2])
+          .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
       break;
     }
     case KEYED_SUPER_PROPERTY: {
-      register_allocator()->PrepareForConsecutiveAllocations(4);
-      object = register_allocator()->NextConsecutiveRegister();
-      home_object = register_allocator()->NextConsecutiveRegister();
-      key = register_allocator()->NextConsecutiveRegister();
-      value = register_allocator()->NextConsecutiveRegister();
-      builder()->StoreAccumulatorInRegister(value);
+      super_property_args = register_allocator()->NewRegisterList(4);
+      RegisterList load_super_args = super_property_args.Truncate(3);
       SuperPropertyReference* super_property =
           property->obj()->AsSuperPropertyReference();
-      VisitForRegisterValue(super_property->this_var(), object);
-      VisitForRegisterValue(super_property->home_object(), home_object);
-      VisitForRegisterValue(property->key(), key);
-      BuildKeyedSuperPropertyLoad(object, home_object, key);
+      VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
+      VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
+      VisitForRegisterValue(property->key(), load_super_args[2]);
+      builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
       break;
     }
   }
 
   // Save result for postfix expressions.
   if (is_postfix) {
-    old_value = register_allocator()->outer()->NewRegister();
-
     // Convert old value into a number before saving it.
-    builder()->CastAccumulatorToNumber(old_value);
+    old_value = register_allocator()->NewRegister();
+    builder()->ConvertAccumulatorToNumber(old_value);
   }
 
   // Perform +1/-1 operation.
@@ -2994,22 +2736,22 @@
       break;
     }
     case NAMED_SUPER_PROPERTY: {
-      builder()->StoreAccumulatorInRegister(value);
-      BuildNamedSuperPropertyStore(object, home_object, key, value);
+      builder()
+          ->StoreAccumulatorInRegister(super_property_args[3])
+          .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
       break;
     }
     case KEYED_SUPER_PROPERTY: {
-      builder()->StoreAccumulatorInRegister(value);
-      BuildKeyedSuperPropertyStore(object, home_object, key, value);
+      builder()
+          ->StoreAccumulatorInRegister(super_property_args[3])
+          .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
       break;
     }
   }
 
   // Restore old value for postfix expressions.
   if (is_postfix) {
-    execution_result()->SetResultInRegister(old_value);
-  } else {
-    execution_result()->SetResultInAccumulator();
+    builder()->LoadAccumulatorWithRegister(old_value);
   }
 }
 
@@ -3034,8 +2776,8 @@
   Register lhs = VisitForRegisterValue(expr->left());
   VisitForAccumulatorValue(expr->right());
   builder()->SetExpressionPosition(expr);
-  builder()->CompareOperation(expr->op(), lhs);
-  execution_result()->SetResultInAccumulator();
+  FeedbackVectorSlot slot = expr->CompareOperationFeedbackSlot();
+  builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
 }
 
 void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
@@ -3045,7 +2787,6 @@
   VisitForAccumulatorValue(expr->right());
   FeedbackVectorSlot slot = expr->BinaryOperationFeedbackSlot();
   builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
-  execution_result()->SetResultInAccumulator();
 }
 
 void BytecodeGenerator::VisitSpread(Spread* expr) { UNREACHABLE(); }
@@ -3055,7 +2796,7 @@
 }
 
 void BytecodeGenerator::VisitThisFunction(ThisFunction* expr) {
-  execution_result()->SetResultInRegister(Register::function_closure());
+  builder()->LoadAccumulatorWithRegister(Register::function_closure());
 }
 
 void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
@@ -3065,8 +2806,7 @@
 
 void BytecodeGenerator::VisitSuperPropertyReference(
     SuperPropertyReference* expr) {
-  builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError, Register(0), 0);
-  execution_result()->SetResultInAccumulator();
+  builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
 }
 
 void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
@@ -3106,7 +2846,6 @@
       VisitForAccumulatorValue(right);
       builder()->Bind(&end_label);
     }
-    execution_result()->SetResultInAccumulator();
   }
 }
 
@@ -3142,7 +2881,6 @@
       VisitForAccumulatorValue(right);
       builder()->Bind(&end_label);
     }
-    execution_result()->SetResultInAccumulator();
   }
 }
 
@@ -3150,35 +2888,45 @@
   Visit(expr->expression());
 }
 
-void BytecodeGenerator::VisitNewLocalFunctionContext() {
-  AccumulatorResultScope accumulator_execution_result(this);
+void BytecodeGenerator::BuildNewLocalActivationContext() {
+  ValueResultScope value_execution_result(this);
   Scope* scope = this->scope();
 
-  // Allocate a new local context.
+  // Create the appropriate context.
   if (scope->is_script_scope()) {
-    RegisterAllocationScope register_scope(this);
-    Register closure = register_allocator()->NewRegister();
-    Register scope_info = register_allocator()->NewRegister();
-    DCHECK(Register::AreContiguous(closure, scope_info));
+    RegisterList args = register_allocator()->NewRegisterList(2);
     builder()
         ->LoadAccumulatorWithRegister(Register::function_closure())
-        .StoreAccumulatorInRegister(closure)
+        .StoreAccumulatorInRegister(args[0])
         .LoadLiteral(scope->scope_info())
-        .StoreAccumulatorInRegister(scope_info)
-        .CallRuntime(Runtime::kNewScriptContext, closure, 2);
+        .StoreAccumulatorInRegister(args[1])
+        .CallRuntime(Runtime::kNewScriptContext, args);
+  } else if (scope->is_module_scope()) {
+    // We don't need to do anything for the outer script scope.
+    DCHECK(scope->outer_scope()->is_script_scope());
+
+    // A JSFunction representing a module is called with the module object as
+    // its sole argument, which we pass on to PushModuleContext.
+    RegisterList args = register_allocator()->NewRegisterList(3);
+    builder()
+        ->MoveRegister(builder()->Parameter(1), args[0])
+        .LoadAccumulatorWithRegister(Register::function_closure())
+        .StoreAccumulatorInRegister(args[1])
+        .LoadLiteral(scope->scope_info())
+        .StoreAccumulatorInRegister(args[2])
+        .CallRuntime(Runtime::kPushModuleContext, args);
   } else {
     int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
     if (slot_count <= FastNewFunctionContextStub::kMaximumSlots) {
       builder()->CreateFunctionContext(slot_count);
     } else {
       builder()->CallRuntime(Runtime::kNewFunctionContext,
-                             Register::function_closure(), 1);
+                             Register::function_closure());
     }
   }
-  execution_result()->SetResultInAccumulator();
 }
 
-void BytecodeGenerator::VisitBuildLocalActivationContext() {
+void BytecodeGenerator::BuildLocalActivationContextInitialization() {
   DeclarationScope* scope = this->scope();
 
   if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
@@ -3187,7 +2935,7 @@
     // Context variable (at bottom of the context chain).
     DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
     builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
-        execution_context()->reg(), variable->index());
+        execution_context()->reg(), variable->index(), 0);
   }
 
   // Copy parameters into context if necessary.
@@ -3201,56 +2949,53 @@
     Register parameter(builder()->Parameter(i + 1));
     // Context variable (at bottom of the context chain).
     DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
-    builder()->LoadAccumulatorWithRegister(parameter)
-        .StoreContextSlot(execution_context()->reg(), variable->index());
+    builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
+        execution_context()->reg(), variable->index(), 0);
   }
 }
 
-void BytecodeGenerator::VisitNewLocalBlockContext(Scope* scope) {
-  AccumulatorResultScope accumulator_execution_result(this);
+void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
+  ValueResultScope value_execution_result(this);
   DCHECK(scope->is_block_scope());
 
   VisitFunctionClosureForContext();
   builder()->CreateBlockContext(scope->scope_info());
-  execution_result()->SetResultInAccumulator();
 }
 
-void BytecodeGenerator::VisitNewLocalWithContext() {
-  AccumulatorResultScope accumulator_execution_result(this);
+void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
+  ValueResultScope value_execution_result(this);
 
   Register extension_object = register_allocator()->NewRegister();
 
-  builder()->CastAccumulatorToJSObject(extension_object);
+  builder()->ConvertAccumulatorToObject(extension_object);
   VisitFunctionClosureForContext();
-  builder()->CreateWithContext(extension_object);
-  execution_result()->SetResultInAccumulator();
+  builder()->CreateWithContext(extension_object, scope->scope_info());
 }
 
-void BytecodeGenerator::VisitNewLocalCatchContext(Variable* variable) {
-  AccumulatorResultScope accumulator_execution_result(this);
+void BytecodeGenerator::BuildNewLocalCatchContext(Variable* variable,
+                                                  Scope* scope) {
+  ValueResultScope value_execution_result(this);
   DCHECK(variable->IsContextSlot());
 
   Register exception = register_allocator()->NewRegister();
   builder()->StoreAccumulatorInRegister(exception);
   VisitFunctionClosureForContext();
-  builder()->CreateCatchContext(exception, variable->name());
-  execution_result()->SetResultInAccumulator();
+  builder()->CreateCatchContext(exception, variable->name(),
+                                scope->scope_info());
 }
 
 void BytecodeGenerator::VisitObjectLiteralAccessor(
     Register home_object, ObjectLiteralProperty* property, Register value_out) {
-  // TODO(rmcilroy): Replace value_out with VisitForRegister();
   if (property == nullptr) {
     builder()->LoadNull().StoreAccumulatorInRegister(value_out);
   } else {
-    VisitForAccumulatorValue(property->value());
-    builder()->StoreAccumulatorInRegister(value_out);
+    VisitForRegisterValue(property->value(), value_out);
     VisitSetHomeObject(value_out, home_object, property);
   }
 }
 
 void BytecodeGenerator::VisitSetHomeObject(Register value, Register home_object,
-                                           ObjectLiteralProperty* property,
+                                           LiteralProperty* property,
                                            int slot_number) {
   Expression* expr = property->value();
   if (FunctionLiteral::NeedsHomeObject(expr)) {
@@ -3302,38 +3047,44 @@
   // Store the new target we were called with in the given variable.
   builder()->LoadAccumulatorWithRegister(Register::new_target());
   VisitVariableAssignment(variable, Token::INIT, FeedbackVectorSlot::Invalid());
+
+  // TODO(mstarzinger): The <new.target> register is not set by the deoptimizer
+  // and we need to make sure {BytecodeRegisterOptimizer} flushes its state
+  // before a local variable containing the <new.target> is used. Using a label
+  // as below flushes the entire pipeline, we should be more specific here.
+  BytecodeLabel flush_state_label;
+  builder()->Bind(&flush_state_label);
 }
 
 void BytecodeGenerator::VisitFunctionClosureForContext() {
-  AccumulatorResultScope accumulator_execution_result(this);
+  ValueResultScope value_execution_result(this);
   DeclarationScope* closure_scope =
       execution_context()->scope()->GetClosureScope();
-  if (closure_scope->is_script_scope() ||
-      closure_scope->is_module_scope()) {
+  if (closure_scope->is_script_scope()) {
     // Contexts nested in the native context have a canonical empty function as
     // their closure, not the anonymous closure containing the global code.
     Register native_context = register_allocator()->NewRegister();
     builder()
         ->LoadContextSlot(execution_context()->reg(),
-                          Context::NATIVE_CONTEXT_INDEX)
+                          Context::NATIVE_CONTEXT_INDEX, 0)
         .StoreAccumulatorInRegister(native_context)
-        .LoadContextSlot(native_context, Context::CLOSURE_INDEX);
+        .LoadContextSlot(native_context, Context::CLOSURE_INDEX, 0);
   } else if (closure_scope->is_eval_scope()) {
     // Contexts created by a call to eval have the same closure as the
     // context calling eval, not the anonymous closure containing the eval
     // code. Fetch it from the context.
     builder()->LoadContextSlot(execution_context()->reg(),
-                               Context::CLOSURE_INDEX);
+                               Context::CLOSURE_INDEX, 0);
   } else {
-    DCHECK(closure_scope->is_function_scope());
+    DCHECK(closure_scope->is_function_scope() ||
+           closure_scope->is_module_scope());
     builder()->LoadAccumulatorWithRegister(Register::function_closure());
   }
-  execution_result()->SetResultInAccumulator();
 }
 
 // Visits the expression |expr| and places the result in the accumulator.
 void BytecodeGenerator::VisitForAccumulatorValue(Expression* expr) {
-  AccumulatorResultScope accumulator_scope(this);
+  ValueResultScope accumulator_scope(this);
   Visit(expr);
 }
 
@@ -3354,16 +3105,17 @@
 // Visits the expression |expr| and returns the register containing
 // the expression result.
 Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
-  RegisterResultScope register_scope(this);
-  Visit(expr);
-  return register_scope.ResultRegister();
+  VisitForAccumulatorValue(expr);
+  Register result = register_allocator()->NewRegister();
+  builder()->StoreAccumulatorInRegister(result);
+  return result;
 }
 
 // Visits the expression |expr| and stores the expression result in
 // |destination|.
 void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
                                               Register destination) {
-  AccumulatorResultScope register_scope(this);
+  ValueResultScope register_scope(this);
   Visit(expr);
   builder()->StoreAccumulatorInRegister(destination);
 }
@@ -3412,6 +3164,16 @@
   return TypeFeedbackVector::GetIndex(slot);
 }
 
+Runtime::FunctionId BytecodeGenerator::StoreToSuperRuntimeId() {
+  return is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
+                                    : Runtime::kStoreToSuper_Sloppy;
+}
+
+Runtime::FunctionId BytecodeGenerator::StoreKeyedToSuperRuntimeId() {
+  return is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
+                                    : Runtime::kStoreKeyedToSuper_Sloppy;
+}
+
 }  // namespace interpreter
 }  // namespace internal
 }  // namespace v8
diff --git a/src/interpreter/bytecode-generator.h b/src/interpreter/bytecode-generator.h
index ee72135..03067de 100644
--- a/src/interpreter/bytecode-generator.h
+++ b/src/interpreter/bytecode-generator.h
@@ -24,7 +24,7 @@
  public:
   explicit BytecodeGenerator(CompilationInfo* info);
 
-  void GenerateBytecode();
+  void GenerateBytecode(uintptr_t stack_limit);
   Handle<BytecodeArray> FinalizeBytecode(Isolate* isolate);
 
 #define DECLARE_VISIT(type) void Visit##type(type* node);
@@ -36,7 +36,6 @@
   void VisitStatements(ZoneList<Statement*>* statments);
 
  private:
-  class AccumulatorResultScope;
   class ContextScope;
   class ControlScope;
   class ControlScopeForBreakable;
@@ -47,9 +46,9 @@
   class ExpressionResultScope;
   class EffectResultScope;
   class GlobalDeclarationsBuilder;
-  class RegisterResultScope;
   class RegisterAllocationScope;
   class TestResultScope;
+  class ValueResultScope;
 
   enum class TestFallthrough { kThen, kElse, kNone };
 
@@ -73,8 +72,10 @@
   // Used by flow control routines to evaluate loop condition.
   void VisitCondition(Expression* expr);
 
-  // Helper visitors which perform common operations.
-  Register VisitArguments(ZoneList<Expression*>* arguments);
+  // Visit the arguments expressions in |args| and store them in |args_regs|
+  // starting at register |first_argument_register| in the list.
+  void VisitArguments(ZoneList<Expression*>* args, RegisterList arg_regs,
+                      size_t first_argument_register = 0);
 
   // Visit a keyed super property load. The optional
   // |opt_receiver_out| register will have the receiver stored to it
@@ -104,15 +105,8 @@
   void VisitVariableAssignment(Variable* variable, Token::Value op,
                                FeedbackVectorSlot slot);
 
-  void BuildNamedSuperPropertyStore(Register receiver, Register home_object,
-                                    Register name, Register value);
-  void BuildKeyedSuperPropertyStore(Register receiver, Register home_object,
-                                    Register key, Register value);
-  void BuildNamedSuperPropertyLoad(Register receiver, Register home_object,
-                                   Register name);
-  void BuildKeyedSuperPropertyLoad(Register receiver, Register home_object,
-                                   Register key);
-
+  void BuildReturn();
+  void BuildReThrow();
   void BuildAbort(BailoutReason bailout_reason);
   void BuildThrowIfHole(Handle<String> name);
   void BuildThrowIfNotHole(Handle<String> name);
@@ -125,6 +119,12 @@
   void BuildIndexedJump(Register value, size_t start_index, size_t size,
                         ZoneVector<BytecodeLabel>& targets);
 
+  void BuildNewLocalActivationContext();
+  void BuildLocalActivationContextInitialization();
+  void BuildNewLocalBlockContext(Scope* scope);
+  void BuildNewLocalCatchContext(Variable* variable, Scope* scope);
+  void BuildNewLocalWithContext(Scope* scope);
+
   void VisitGeneratorPrologue();
 
   void VisitArgumentsObject(Variable* variable);
@@ -133,18 +133,12 @@
   void VisitClassLiteralForRuntimeDefinition(ClassLiteral* expr);
   void VisitClassLiteralProperties(ClassLiteral* expr, Register literal,
                                    Register prototype);
-  void VisitClassLiteralStaticPrototypeWithComputedName(Register name);
   void VisitThisFunctionVariable(Variable* variable);
   void VisitNewTargetVariable(Variable* variable);
-  void VisitNewLocalFunctionContext();
-  void VisitBuildLocalActivationContext();
   void VisitBlockDeclarationsAndStatements(Block* stmt);
-  void VisitNewLocalBlockContext(Scope* scope);
-  void VisitNewLocalCatchContext(Variable* variable);
-  void VisitNewLocalWithContext();
   void VisitFunctionClosureForContext();
   void VisitSetHomeObject(Register value, Register home_object,
-                          ObjectLiteralProperty* property, int slot_number = 0);
+                          LiteralProperty* property, int slot_number = 0);
   void VisitObjectLiteralAccessor(Register home_object,
                                   ObjectLiteralProperty* property,
                                   Register value_out);
@@ -168,13 +162,10 @@
   void VisitForTest(Expression* expr, BytecodeLabels* then_labels,
                     BytecodeLabels* else_labels, TestFallthrough fallthrough);
 
-  // Methods for tracking and remapping register.
-  void RecordStoreToRegister(Register reg);
-  Register LoadFromAliasedRegister(Register reg);
-
-  // Initialize an array of temporary registers with consecutive registers.
-  template <size_t N>
-  void InitializeWithConsecutiveRegisters(Register (&registers)[N]);
+  // Returns the runtime function id for a store to super for the function's
+  // language mode.
+  inline Runtime::FunctionId StoreToSuperRuntimeId();
+  inline Runtime::FunctionId StoreKeyedToSuperRuntimeId();
 
   inline BytecodeArrayBuilder* builder() const { return builder_; }
   inline Zone* zone() const { return zone_; }
@@ -193,12 +184,8 @@
     execution_result_ = execution_result;
   }
   ExpressionResultScope* execution_result() const { return execution_result_; }
-  inline void set_register_allocator(
-      RegisterAllocationScope* register_allocator) {
-    register_allocator_ = register_allocator;
-  }
-  RegisterAllocationScope* register_allocator() const {
-    return register_allocator_;
+  BytecodeRegisterAllocator* register_allocator() const {
+    return builder()->register_allocator();
   }
 
   GlobalDeclarationsBuilder* globals_builder() { return globals_builder_; }
@@ -222,7 +209,6 @@
   ControlScope* execution_control_;
   ContextScope* execution_context_;
   ExpressionResultScope* execution_result_;
-  RegisterAllocationScope* register_allocator_;
 
   ZoneVector<BytecodeLabel> generator_resume_points_;
   Register generator_state_;
diff --git a/src/interpreter/bytecode-label.h b/src/interpreter/bytecode-label.h
index d96cf66..b5f602d 100644
--- a/src/interpreter/bytecode-label.h
+++ b/src/interpreter/bytecode-label.h
@@ -5,7 +5,7 @@
 #ifndef V8_INTERPRETER_BYTECODE_LABEL_H_
 #define V8_INTERPRETER_BYTECODE_LABEL_H_
 
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/interpreter/bytecode-operands.cc b/src/interpreter/bytecode-operands.cc
new file mode 100644
index 0000000..6be81fe
--- /dev/null
+++ b/src/interpreter/bytecode-operands.cc
@@ -0,0 +1,89 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/interpreter/bytecode-operands.h"
+
+#include <iomanip>
+
+namespace v8 {
+namespace internal {
+namespace interpreter {
+
+namespace {
+
+const char* AccumulatorUseToString(AccumulatorUse accumulator_use) {
+  switch (accumulator_use) {
+    case AccumulatorUse::kNone:
+      return "None";
+    case AccumulatorUse::kRead:
+      return "Read";
+    case AccumulatorUse::kWrite:
+      return "Write";
+    case AccumulatorUse::kReadWrite:
+      return "ReadWrite";
+  }
+  UNREACHABLE();
+  return "";
+}
+
+const char* OperandTypeToString(OperandType operand_type) {
+  switch (operand_type) {
+#define CASE(Name, _)        \
+  case OperandType::k##Name: \
+    return #Name;
+    OPERAND_TYPE_LIST(CASE)
+#undef CASE
+  }
+  UNREACHABLE();
+  return "";
+}
+
+const char* OperandScaleToString(OperandScale operand_scale) {
+  switch (operand_scale) {
+#define CASE(Name, _)         \
+  case OperandScale::k##Name: \
+    return #Name;
+    OPERAND_SCALE_LIST(CASE)
+#undef CASE
+  }
+  UNREACHABLE();
+  return "";
+}
+
+const char* OperandSizeToString(OperandSize operand_size) {
+  switch (operand_size) {
+    case OperandSize::kNone:
+      return "None";
+    case OperandSize::kByte:
+      return "Byte";
+    case OperandSize::kShort:
+      return "Short";
+    case OperandSize::kQuad:
+      return "Quad";
+  }
+  UNREACHABLE();
+  return "";
+}
+
+}  // namespace
+
+std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use) {
+  return os << AccumulatorUseToString(use);
+}
+
+std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size) {
+  return os << OperandSizeToString(operand_size);
+}
+
+std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale) {
+  return os << OperandScaleToString(operand_scale);
+}
+
+std::ostream& operator<<(std::ostream& os, const OperandType& operand_type) {
+  return os << OperandTypeToString(operand_type);
+}
+
+}  // namespace interpreter
+}  // namespace internal
+}  // namespace v8
diff --git a/src/interpreter/bytecode-operands.h b/src/interpreter/bytecode-operands.h
new file mode 100644
index 0000000..b35c486
--- /dev/null
+++ b/src/interpreter/bytecode-operands.h
@@ -0,0 +1,126 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_INTERPRETER_BYTECODE_OPERANDS_H_
+#define V8_INTERPRETER_BYTECODE_OPERANDS_H_
+
+#include "src/globals.h"
+
+namespace v8 {
+namespace internal {
+namespace interpreter {
+
+#define INVALID_OPERAND_TYPE_LIST(V) V(None, OperandTypeInfo::kNone)
+
+#define REGISTER_INPUT_OPERAND_TYPE_LIST(V)        \
+  V(RegList, OperandTypeInfo::kScalableSignedByte) \
+  V(Reg, OperandTypeInfo::kScalableSignedByte)     \
+  V(RegPair, OperandTypeInfo::kScalableSignedByte)
+
+#define REGISTER_OUTPUT_OPERAND_TYPE_LIST(V)          \
+  V(RegOut, OperandTypeInfo::kScalableSignedByte)     \
+  V(RegOutPair, OperandTypeInfo::kScalableSignedByte) \
+  V(RegOutTriple, OperandTypeInfo::kScalableSignedByte)
+
+#define SCALAR_OPERAND_TYPE_LIST(V)                   \
+  V(Flag8, OperandTypeInfo::kFixedUnsignedByte)       \
+  V(IntrinsicId, OperandTypeInfo::kFixedUnsignedByte) \
+  V(Idx, OperandTypeInfo::kScalableUnsignedByte)      \
+  V(UImm, OperandTypeInfo::kScalableUnsignedByte)     \
+  V(Imm, OperandTypeInfo::kScalableSignedByte)        \
+  V(RegCount, OperandTypeInfo::kScalableUnsignedByte) \
+  V(RuntimeId, OperandTypeInfo::kFixedUnsignedShort)
+
+#define REGISTER_OPERAND_TYPE_LIST(V) \
+  REGISTER_INPUT_OPERAND_TYPE_LIST(V) \
+  REGISTER_OUTPUT_OPERAND_TYPE_LIST(V)
+
+#define NON_REGISTER_OPERAND_TYPE_LIST(V) \
+  INVALID_OPERAND_TYPE_LIST(V)            \
+  SCALAR_OPERAND_TYPE_LIST(V)
+
+// The list of operand types used by bytecodes.
+#define OPERAND_TYPE_LIST(V)        \
+  NON_REGISTER_OPERAND_TYPE_LIST(V) \
+  REGISTER_OPERAND_TYPE_LIST(V)
+
+// Enumeration of scaling factors applicable to scalable operands. Code
+// relies on being able to cast values to integer scaling values.
+#define OPERAND_SCALE_LIST(V) \
+  V(Single, 1)                \
+  V(Double, 2)                \
+  V(Quadruple, 4)
+
+enum class OperandScale : uint8_t {
+#define DECLARE_OPERAND_SCALE(Name, Scale) k##Name = Scale,
+  OPERAND_SCALE_LIST(DECLARE_OPERAND_SCALE)
+#undef DECLARE_OPERAND_SCALE
+      kLast = kQuadruple
+};
+
+// Enumeration of the size classes of operand types used by
+// bytecodes. Code relies on being able to cast values to integer
+// types to get the size in bytes.
+enum class OperandSize : uint8_t {
+  kNone = 0,
+  kByte = 1,
+  kShort = 2,
+  kQuad = 4,
+  kLast = kQuad
+};
+
+// Primitive operand info used that summarize properties of operands.
+// Columns are Name, IsScalable, IsUnsigned, UnscaledSize.
+#define OPERAND_TYPE_INFO_LIST(V)                         \
+  V(None, false, false, OperandSize::kNone)               \
+  V(ScalableSignedByte, true, false, OperandSize::kByte)  \
+  V(ScalableUnsignedByte, true, true, OperandSize::kByte) \
+  V(FixedUnsignedByte, false, true, OperandSize::kByte)   \
+  V(FixedUnsignedShort, false, true, OperandSize::kShort)
+
+enum class OperandTypeInfo : uint8_t {
+#define DECLARE_OPERAND_TYPE_INFO(Name, ...) k##Name,
+  OPERAND_TYPE_INFO_LIST(DECLARE_OPERAND_TYPE_INFO)
+#undef DECLARE_OPERAND_TYPE_INFO
+};
+
+// Enumeration of operand types used by bytecodes.
+enum class OperandType : uint8_t {
+#define DECLARE_OPERAND_TYPE(Name, _) k##Name,
+  OPERAND_TYPE_LIST(DECLARE_OPERAND_TYPE)
+#undef DECLARE_OPERAND_TYPE
+#define COUNT_OPERAND_TYPES(x, _) +1
+  // The COUNT_OPERAND macro will turn this into kLast = -1 +1 +1... which will
+  // evaluate to the same value as the last operand.
+  kLast = -1 OPERAND_TYPE_LIST(COUNT_OPERAND_TYPES)
+#undef COUNT_OPERAND_TYPES
+};
+
+enum class AccumulatorUse : uint8_t {
+  kNone = 0,
+  kRead = 1 << 0,
+  kWrite = 1 << 1,
+  kReadWrite = kRead | kWrite
+};
+
+inline AccumulatorUse operator&(AccumulatorUse lhs, AccumulatorUse rhs) {
+  int result = static_cast<int>(lhs) & static_cast<int>(rhs);
+  return static_cast<AccumulatorUse>(result);
+}
+
+inline AccumulatorUse operator|(AccumulatorUse lhs, AccumulatorUse rhs) {
+  int result = static_cast<int>(lhs) | static_cast<int>(rhs);
+  return static_cast<AccumulatorUse>(result);
+}
+
+std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use);
+std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale);
+std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size);
+std::ostream& operator<<(std::ostream& os, const OperandType& operand_type);
+
+}  // namespace interpreter
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_INTERPRETER_BYTECODE_OPERANDS_H_
diff --git a/src/interpreter/bytecode-peephole-optimizer.cc b/src/interpreter/bytecode-peephole-optimizer.cc
index 11aebb6..c87d31c 100644
--- a/src/interpreter/bytecode-peephole-optimizer.cc
+++ b/src/interpreter/bytecode-peephole-optimizer.cc
@@ -13,17 +13,17 @@
 
 BytecodePeepholeOptimizer::BytecodePeepholeOptimizer(
     BytecodePipelineStage* next_stage)
-    : next_stage_(next_stage) {
+    : next_stage_(next_stage), last_(Bytecode::kIllegal) {
   InvalidateLast();
 }
 
 // override
 Handle<BytecodeArray> BytecodePeepholeOptimizer::ToBytecodeArray(
-    Isolate* isolate, int fixed_register_count, int parameter_count,
+    Isolate* isolate, int register_count, int parameter_count,
     Handle<FixedArray> handler_table) {
   Flush();
-  return next_stage_->ToBytecodeArray(isolate, fixed_register_count,
-                                      parameter_count, handler_table);
+  return next_stage_->ToBytecodeArray(isolate, register_count, parameter_count,
+                                      handler_table);
 }
 
 // override
@@ -142,7 +142,7 @@
   current->set_bytecode(new_bytecode, last->operand(0), current->operand(0),
                         current->operand(1));
   if (last->source_info().is_valid()) {
-    current->source_info().Clone(last->source_info());
+    current->source_info_ptr()->Clone(last->source_info());
   }
 }
 
@@ -153,7 +153,7 @@
   current->set_bytecode(new_bytecode, 0, current->operand(0),
                         current->operand(1));
   if (last->source_info().is_valid()) {
-    current->source_info().Clone(last->source_info());
+    current->source_info_ptr()->Clone(last->source_info());
   }
 }
 
@@ -223,7 +223,7 @@
       // |node| can not have a valid source position if the source
       // position of last() is valid (per rules in
       // CanElideLastBasedOnSourcePosition()).
-      node->source_info().Clone(last()->source_info());
+      node->source_info_ptr()->Clone(last()->source_info());
     }
     SetLast(node);
   } else {
@@ -314,7 +314,7 @@
   if (!CanElideLastBasedOnSourcePosition(node)) {
     next_stage()->Write(last());
   } else if (!node->source_info().is_valid()) {
-    node->source_info().Clone(last()->source_info());
+    node->source_info_ptr()->Clone(last()->source_info());
   }
   InvalidateLast();
 }
diff --git a/src/interpreter/bytecode-peephole-optimizer.h b/src/interpreter/bytecode-peephole-optimizer.h
index 2f4a35f..cedd742 100644
--- a/src/interpreter/bytecode-peephole-optimizer.h
+++ b/src/interpreter/bytecode-peephole-optimizer.h
@@ -28,7 +28,7 @@
   void BindLabel(BytecodeLabel* label) override;
   void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
   Handle<BytecodeArray> ToBytecodeArray(
-      Isolate* isolate, int fixed_register_count, int parameter_count,
+      Isolate* isolate, int register_count, int parameter_count,
       Handle<FixedArray> handler_table) override;
 
  private:
diff --git a/src/interpreter/bytecode-pipeline.cc b/src/interpreter/bytecode-pipeline.cc
index 66b8bdf..6e6a6b6 100644
--- a/src/interpreter/bytecode-pipeline.cc
+++ b/src/interpreter/bytecode-pipeline.cc
@@ -11,45 +11,6 @@
 namespace internal {
 namespace interpreter {
 
-BytecodeNode::BytecodeNode(Bytecode bytecode) {
-  DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
-  bytecode_ = bytecode;
-}
-
-BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0) {
-  DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
-  bytecode_ = bytecode;
-  operands_[0] = operand0;
-}
-
-BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
-                           uint32_t operand1) {
-  DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 2);
-  bytecode_ = bytecode;
-  operands_[0] = operand0;
-  operands_[1] = operand1;
-}
-
-BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
-                           uint32_t operand1, uint32_t operand2) {
-  DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 3);
-  bytecode_ = bytecode;
-  operands_[0] = operand0;
-  operands_[1] = operand1;
-  operands_[2] = operand2;
-}
-
-BytecodeNode::BytecodeNode(Bytecode bytecode, uint32_t operand0,
-                           uint32_t operand1, uint32_t operand2,
-                           uint32_t operand3) {
-  DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 4);
-  bytecode_ = bytecode;
-  operands_[0] = operand0;
-  operands_[1] = operand1;
-  operands_[2] = operand2;
-  operands_[3] = operand3;
-}
-
 BytecodeNode::BytecodeNode(const BytecodeNode& other) {
   memcpy(this, &other, sizeof(other));
 }
@@ -83,23 +44,6 @@
 #endif  // DEBUG
 }
 
-void BytecodeNode::Transform(Bytecode new_bytecode, uint32_t extra_operand) {
-  DCHECK_EQ(Bytecodes::NumberOfOperands(new_bytecode),
-            Bytecodes::NumberOfOperands(bytecode()) + 1);
-  DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 1 ||
-         Bytecodes::GetOperandType(new_bytecode, 0) ==
-             Bytecodes::GetOperandType(bytecode(), 0));
-  DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 2 ||
-         Bytecodes::GetOperandType(new_bytecode, 1) ==
-             Bytecodes::GetOperandType(bytecode(), 1));
-  DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 3 ||
-         Bytecodes::GetOperandType(new_bytecode, 2) ==
-             Bytecodes::GetOperandType(bytecode(), 2));
-  DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 4);
-  operands_[operand_count()] = extra_operand;
-  bytecode_ = new_bytecode;
-}
-
 bool BytecodeNode::operator==(const BytecodeNode& other) const {
   if (this == &other) {
     return true;
diff --git a/src/interpreter/bytecode-pipeline.h b/src/interpreter/bytecode-pipeline.h
index 1668bab..0b1a1f1 100644
--- a/src/interpreter/bytecode-pipeline.h
+++ b/src/interpreter/bytecode-pipeline.h
@@ -9,7 +9,7 @@
 #include "src/interpreter/bytecode-register.h"
 #include "src/interpreter/bytecodes.h"
 #include "src/objects.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -47,7 +47,7 @@
 
   // Flush the pipeline and generate a bytecode array.
   virtual Handle<BytecodeArray> ToBytecodeArray(
-      Isolate* isolate, int fixed_register_count, int parameter_count,
+      Isolate* isolate, int register_count, int parameter_count,
       Handle<FixedArray> handler_table) = 0;
 };
 
@@ -134,21 +134,69 @@
 
   PositionType position_type_;
   int source_position_;
-
-  DISALLOW_COPY_AND_ASSIGN(BytecodeSourceInfo);
 };
 
 // A container for a generated bytecode, it's operands, and source information.
 // These must be allocated by a BytecodeNodeAllocator instance.
 class BytecodeNode final : ZoneObject {
  public:
-  explicit BytecodeNode(Bytecode bytecode = Bytecode::kIllegal);
-  BytecodeNode(Bytecode bytecode, uint32_t operand0);
-  BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1);
-  BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
-               uint32_t operand2);
-  BytecodeNode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
-               uint32_t operand2, uint32_t operand3);
+  INLINE(BytecodeNode(const Bytecode bytecode,
+                      BytecodeSourceInfo* source_info = nullptr))
+      : bytecode_(bytecode),
+        operand_count_(0),
+        operand_scale_(OperandScale::kSingle) {
+    DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
+    AttachSourceInfo(source_info);
+  }
+
+  INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
+                      BytecodeSourceInfo* source_info = nullptr))
+      : bytecode_(bytecode),
+        operand_count_(1),
+        operand_scale_(OperandScale::kSingle) {
+    DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
+    SetOperand(0, operand0);
+    AttachSourceInfo(source_info);
+  }
+
+  INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
+                      uint32_t operand1,
+                      BytecodeSourceInfo* source_info = nullptr))
+      : bytecode_(bytecode),
+        operand_count_(2),
+        operand_scale_(OperandScale::kSingle) {
+    DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
+    SetOperand(0, operand0);
+    SetOperand(1, operand1);
+    AttachSourceInfo(source_info);
+  }
+
+  INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
+                      uint32_t operand1, uint32_t operand2,
+                      BytecodeSourceInfo* source_info = nullptr))
+      : bytecode_(bytecode),
+        operand_count_(3),
+        operand_scale_(OperandScale::kSingle) {
+    DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
+    SetOperand(0, operand0);
+    SetOperand(1, operand1);
+    SetOperand(2, operand2);
+    AttachSourceInfo(source_info);
+  }
+
+  INLINE(BytecodeNode(const Bytecode bytecode, uint32_t operand0,
+                      uint32_t operand1, uint32_t operand2, uint32_t operand3,
+                      BytecodeSourceInfo* source_info = nullptr))
+      : bytecode_(bytecode),
+        operand_count_(4),
+        operand_scale_(OperandScale::kSingle) {
+    DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), operand_count());
+    SetOperand(0, operand0);
+    SetOperand(1, operand1);
+    SetOperand(2, operand2);
+    SetOperand(3, operand3);
+    AttachSourceInfo(source_info);
+  }
 
   BytecodeNode(const BytecodeNode& other);
   BytecodeNode& operator=(const BytecodeNode& other);
@@ -162,25 +210,33 @@
   void set_bytecode(Bytecode bytecode) {
     DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 0);
     bytecode_ = bytecode;
+    operand_count_ = 0;
+    operand_scale_ = OperandScale::kSingle;
   }
   void set_bytecode(Bytecode bytecode, uint32_t operand0) {
     DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 1);
     bytecode_ = bytecode;
-    operands_[0] = operand0;
+    operand_count_ = 1;
+    operand_scale_ = OperandScale::kSingle;
+    SetOperand(0, operand0);
   }
   void set_bytecode(Bytecode bytecode, uint32_t operand0, uint32_t operand1) {
     DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 2);
     bytecode_ = bytecode;
-    operands_[0] = operand0;
-    operands_[1] = operand1;
+    operand_count_ = 2;
+    operand_scale_ = OperandScale::kSingle;
+    SetOperand(0, operand0);
+    SetOperand(1, operand1);
   }
   void set_bytecode(Bytecode bytecode, uint32_t operand0, uint32_t operand1,
                     uint32_t operand2) {
     DCHECK_EQ(Bytecodes::NumberOfOperands(bytecode), 3);
     bytecode_ = bytecode;
-    operands_[0] = operand0;
-    operands_[1] = operand1;
-    operands_[2] = operand2;
+    operand_count_ = 3;
+    operand_scale_ = OperandScale::kSingle;
+    SetOperand(0, operand0);
+    SetOperand(1, operand1);
+    SetOperand(2, operand2);
   }
 
   // Clone |other|.
@@ -191,7 +247,36 @@
 
   // Transform to a node representing |new_bytecode| which has one
   // operand more than the current bytecode.
-  void Transform(Bytecode new_bytecode, uint32_t extra_operand);
+  void Transform(Bytecode new_bytecode, uint32_t extra_operand) {
+    DCHECK_EQ(Bytecodes::NumberOfOperands(new_bytecode),
+              Bytecodes::NumberOfOperands(bytecode()) + 1);
+    DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 1 ||
+           Bytecodes::GetOperandType(new_bytecode, 0) ==
+               Bytecodes::GetOperandType(bytecode(), 0));
+    DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 2 ||
+           Bytecodes::GetOperandType(new_bytecode, 1) ==
+               Bytecodes::GetOperandType(bytecode(), 1));
+    DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 3 ||
+           Bytecodes::GetOperandType(new_bytecode, 2) ==
+               Bytecodes::GetOperandType(bytecode(), 2));
+    DCHECK(Bytecodes::NumberOfOperands(bytecode()) < 4);
+
+    bytecode_ = new_bytecode;
+    operand_count_++;
+    SetOperand(operand_count() - 1, extra_operand);
+  }
+
+  // Updates the operand at |operand_index| to |operand|.
+  void UpdateOperand(int operand_index, uint32_t operand) {
+    DCHECK_LE(operand_index, Bytecodes::NumberOfOperands(bytecode()));
+    operands_[operand_index] = operand;
+    if ((Bytecodes::OperandIsScalableSignedByte(bytecode(), operand_index) &&
+         Bytecodes::ScaleForSignedOperand(operand) != operand_scale_) ||
+        (Bytecodes::OperandIsScalableUnsignedByte(bytecode(), operand_index) &&
+         Bytecodes::ScaleForUnsignedOperand(operand) != operand_scale_)) {
+      UpdateScale();
+    }
+  }
 
   Bytecode bytecode() const { return bytecode_; }
 
@@ -199,22 +284,60 @@
     DCHECK_LT(i, operand_count());
     return operands_[i];
   }
-  uint32_t* operands() { return operands_; }
   const uint32_t* operands() const { return operands_; }
 
-  int operand_count() const { return Bytecodes::NumberOfOperands(bytecode_); }
+  int operand_count() const { return operand_count_; }
+  OperandScale operand_scale() const { return operand_scale_; }
 
   const BytecodeSourceInfo& source_info() const { return source_info_; }
-  BytecodeSourceInfo& source_info() { return source_info_; }
+  BytecodeSourceInfo* source_info_ptr() { return &source_info_; }
 
   bool operator==(const BytecodeNode& other) const;
   bool operator!=(const BytecodeNode& other) const { return !(*this == other); }
 
  private:
-  static const int kInvalidPosition = kMinInt;
+  INLINE(void AttachSourceInfo(BytecodeSourceInfo* source_info)) {
+    if (source_info && source_info->is_valid()) {
+      // Statement positions need to be emitted immediately.  Expression
+      // positions can be pushed back until a bytecode is found that can
+      // throw (if expression position filtering is turned on). We only
+      // invalidate the existing source position information if it is used.
+      if (source_info->is_statement() ||
+          !FLAG_ignition_filter_expression_positions ||
+          !Bytecodes::IsWithoutExternalSideEffects(bytecode())) {
+        source_info_.Clone(*source_info);
+        source_info->set_invalid();
+      }
+    }
+  }
+
+  INLINE(void UpdateScaleForOperand(int operand_index, uint32_t operand)) {
+    if (Bytecodes::OperandIsScalableSignedByte(bytecode(), operand_index)) {
+      operand_scale_ =
+          std::max(operand_scale_, Bytecodes::ScaleForSignedOperand(operand));
+    } else if (Bytecodes::OperandIsScalableUnsignedByte(bytecode(),
+                                                        operand_index)) {
+      operand_scale_ =
+          std::max(operand_scale_, Bytecodes::ScaleForUnsignedOperand(operand));
+    }
+  }
+
+  INLINE(void SetOperand(int operand_index, uint32_t operand)) {
+    operands_[operand_index] = operand;
+    UpdateScaleForOperand(operand_index, operand);
+  }
+
+  void UpdateScale() {
+    operand_scale_ = OperandScale::kSingle;
+    for (int i = 0; i < operand_count(); i++) {
+      UpdateScaleForOperand(i, operands_[i]);
+    }
+  }
 
   Bytecode bytecode_;
   uint32_t operands_[Bytecodes::kMaxOperands];
+  int operand_count_;
+  OperandScale operand_scale_;
   BytecodeSourceInfo source_info_;
 };
 
diff --git a/src/interpreter/bytecode-register-allocator.cc b/src/interpreter/bytecode-register-allocator.cc
deleted file mode 100644
index 10afcdc..0000000
--- a/src/interpreter/bytecode-register-allocator.cc
+++ /dev/null
@@ -1,210 +0,0 @@
-// Copyright 2015 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/interpreter/bytecode-register-allocator.h"
-
-#include "src/interpreter/bytecode-array-builder.h"
-
-namespace v8 {
-namespace internal {
-namespace interpreter {
-
-TemporaryRegisterAllocator::TemporaryRegisterAllocator(Zone* zone,
-                                                       int allocation_base)
-    : free_temporaries_(zone),
-      allocation_base_(allocation_base),
-      allocation_count_(0),
-      observer_(nullptr) {}
-
-Register TemporaryRegisterAllocator::first_temporary_register() const {
-  DCHECK(allocation_count() > 0);
-  return Register(allocation_base());
-}
-
-Register TemporaryRegisterAllocator::last_temporary_register() const {
-  DCHECK(allocation_count() > 0);
-  return Register(allocation_base() + allocation_count() - 1);
-}
-
-void TemporaryRegisterAllocator::set_observer(
-    TemporaryRegisterObserver* observer) {
-  DCHECK(observer_ == nullptr);
-  observer_ = observer;
-}
-
-int TemporaryRegisterAllocator::AllocateTemporaryRegister() {
-  allocation_count_ += 1;
-  return allocation_base() + allocation_count() - 1;
-}
-
-int TemporaryRegisterAllocator::BorrowTemporaryRegister() {
-  if (free_temporaries_.empty()) {
-    return AllocateTemporaryRegister();
-  } else {
-    auto pos = free_temporaries_.begin();
-    int retval = *pos;
-    free_temporaries_.erase(pos);
-    return retval;
-  }
-}
-
-int TemporaryRegisterAllocator::BorrowTemporaryRegisterNotInRange(
-    int start_index, int end_index) {
-  if (free_temporaries_.empty()) {
-    int next_allocation = allocation_base() + allocation_count();
-    while (next_allocation >= start_index && next_allocation <= end_index) {
-      free_temporaries_.insert(AllocateTemporaryRegister());
-      next_allocation += 1;
-    }
-    return AllocateTemporaryRegister();
-  }
-
-  ZoneSet<int>::iterator index = free_temporaries_.lower_bound(start_index);
-  if (index == free_temporaries_.begin()) {
-    // If start_index is the first free register, check for a register
-    // greater than end_index.
-    index = free_temporaries_.upper_bound(end_index);
-    if (index == free_temporaries_.end()) {
-      return AllocateTemporaryRegister();
-    }
-  } else {
-    // If there is a free register < start_index
-    index--;
-  }
-
-  int retval = *index;
-  free_temporaries_.erase(index);
-  return retval;
-}
-
-int TemporaryRegisterAllocator::PrepareForConsecutiveTemporaryRegisters(
-    size_t count) {
-  if (count == 0) {
-    return -1;
-  }
-
-  // TODO(oth): replace use of set<> here for free_temporaries with a
-  // more efficient structure. And/or partition into two searches -
-  // one before the translation window and one after.
-
-  // A run will require at least |count| free temporaries.
-  while (free_temporaries_.size() < count) {
-    free_temporaries_.insert(AllocateTemporaryRegister());
-  }
-
-  // Search within existing temporaries for a run.
-  auto start = free_temporaries_.begin();
-  size_t run_length = 0;
-  for (auto run_end = start; run_end != free_temporaries_.end(); run_end++) {
-    int expected = *start + static_cast<int>(run_length);
-    if (*run_end != expected) {
-      start = run_end;
-      run_length = 0;
-    }
-    if (++run_length == count) {
-      return *start;
-    }
-  }
-
-  // Continue run if possible across existing last temporary.
-  if (allocation_count_ > 0 && (start == free_temporaries_.end() ||
-                                *start + static_cast<int>(run_length) !=
-                                    last_temporary_register().index() + 1)) {
-    run_length = 0;
-  }
-
-  // Pad temporaries if extended run would cross translation boundary.
-  Register reg_first(*start);
-  Register reg_last(*start + static_cast<int>(count) - 1);
-
-  // Ensure enough registers for run.
-  while (run_length++ < count) {
-    free_temporaries_.insert(AllocateTemporaryRegister());
-  }
-
-  int run_start =
-      last_temporary_register().index() - static_cast<int>(count) + 1;
-  return run_start;
-}
-
-bool TemporaryRegisterAllocator::RegisterIsLive(Register reg) const {
-  if (allocation_count_ > 0) {
-    DCHECK(reg >= first_temporary_register() &&
-           reg <= last_temporary_register());
-    return free_temporaries_.find(reg.index()) == free_temporaries_.end();
-  } else {
-    return false;
-  }
-}
-
-void TemporaryRegisterAllocator::BorrowConsecutiveTemporaryRegister(
-    int reg_index) {
-  DCHECK(free_temporaries_.find(reg_index) != free_temporaries_.end());
-  free_temporaries_.erase(reg_index);
-}
-
-void TemporaryRegisterAllocator::ReturnTemporaryRegister(int reg_index) {
-  DCHECK(free_temporaries_.find(reg_index) == free_temporaries_.end());
-  free_temporaries_.insert(reg_index);
-  if (observer_) {
-    observer_->TemporaryRegisterFreeEvent(Register(reg_index));
-  }
-}
-
-BytecodeRegisterAllocator::BytecodeRegisterAllocator(
-    Zone* zone, TemporaryRegisterAllocator* allocator)
-    : base_allocator_(allocator),
-      allocated_(zone),
-      next_consecutive_register_(-1),
-      next_consecutive_count_(-1) {}
-
-BytecodeRegisterAllocator::~BytecodeRegisterAllocator() {
-  for (auto i = allocated_.rbegin(); i != allocated_.rend(); i++) {
-    base_allocator()->ReturnTemporaryRegister(*i);
-  }
-  allocated_.clear();
-}
-
-Register BytecodeRegisterAllocator::NewRegister() {
-  int allocated = -1;
-  if (next_consecutive_count_ <= 0) {
-    allocated = base_allocator()->BorrowTemporaryRegister();
-  } else {
-    allocated = base_allocator()->BorrowTemporaryRegisterNotInRange(
-        next_consecutive_register_,
-        next_consecutive_register_ + next_consecutive_count_ - 1);
-  }
-  allocated_.push_back(allocated);
-  return Register(allocated);
-}
-
-bool BytecodeRegisterAllocator::RegisterIsAllocatedInThisScope(
-    Register reg) const {
-  for (auto i = allocated_.begin(); i != allocated_.end(); i++) {
-    if (*i == reg.index()) return true;
-  }
-  return false;
-}
-
-void BytecodeRegisterAllocator::PrepareForConsecutiveAllocations(size_t count) {
-  if (static_cast<int>(count) > next_consecutive_count_) {
-    next_consecutive_register_ =
-        base_allocator()->PrepareForConsecutiveTemporaryRegisters(count);
-    next_consecutive_count_ = static_cast<int>(count);
-  }
-}
-
-Register BytecodeRegisterAllocator::NextConsecutiveRegister() {
-  DCHECK_GE(next_consecutive_register_, 0);
-  DCHECK_GT(next_consecutive_count_, 0);
-  base_allocator()->BorrowConsecutiveTemporaryRegister(
-      next_consecutive_register_);
-  allocated_.push_back(next_consecutive_register_);
-  next_consecutive_count_--;
-  return Register(next_consecutive_register_++);
-}
-
-}  // namespace interpreter
-}  // namespace internal
-}  // namespace v8
diff --git a/src/interpreter/bytecode-register-allocator.h b/src/interpreter/bytecode-register-allocator.h
index b8f737b..e9de466 100644
--- a/src/interpreter/bytecode-register-allocator.h
+++ b/src/interpreter/bytecode-register-allocator.h
@@ -5,106 +5,76 @@
 #ifndef V8_INTERPRETER_BYTECODE_REGISTER_ALLOCATOR_H_
 #define V8_INTERPRETER_BYTECODE_REGISTER_ALLOCATOR_H_
 
+#include "src/interpreter/bytecode-register.h"
 #include "src/interpreter/bytecodes.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
 namespace interpreter {
 
-class BytecodeArrayBuilder;
-class Register;
-class TemporaryRegisterObserver;
-
-class TemporaryRegisterAllocator final {
- public:
-  TemporaryRegisterAllocator(Zone* zone, int start_index);
-
-  // Borrow a temporary register.
-  int BorrowTemporaryRegister();
-
-  // Borrow a temporary register from the register range outside of
-  // |start_index| to |end_index|.
-  int BorrowTemporaryRegisterNotInRange(int start_index, int end_index);
-
-  // Return a temporary register when no longer used.
-  void ReturnTemporaryRegister(int reg_index);
-
-  // Ensure a run of consecutive registers is available. Each register in
-  // the range should be borrowed with BorrowConsecutiveTemporaryRegister().
-  // Returns the start index of the run.
-  int PrepareForConsecutiveTemporaryRegisters(size_t count);
-
-  // Borrow a register from a range prepared with
-  // PrepareForConsecutiveTemporaryRegisters().
-  void BorrowConsecutiveTemporaryRegister(int reg_index);
-
-  // Returns true if |reg| is a temporary register and is currently
-  // borrowed.
-  bool RegisterIsLive(Register reg) const;
-
-  // Returns the first register in the range of temporary registers.
-  Register first_temporary_register() const;
-
-  // Returns the last register in the range of temporary registers.
-  Register last_temporary_register() const;
-
-  // Returns the start index of temporary register allocations.
-  int allocation_base() const { return allocation_base_; }
-
-  // Returns the number of temporary register allocations made.
-  int allocation_count() const { return allocation_count_; }
-
-  // Sets an observer for temporary register events.
-  void set_observer(TemporaryRegisterObserver* observer);
-
- private:
-  // Allocate a temporary register.
-  int AllocateTemporaryRegister();
-
-  ZoneSet<int> free_temporaries_;
-  int allocation_base_;
-  int allocation_count_;
-  TemporaryRegisterObserver* observer_;
-
-  DISALLOW_COPY_AND_ASSIGN(TemporaryRegisterAllocator);
-};
-
-class TemporaryRegisterObserver {
- public:
-  virtual ~TemporaryRegisterObserver() {}
-  virtual void TemporaryRegisterFreeEvent(Register reg) = 0;
-};
-
-// A class that allows the instantiator to allocate temporary registers that are
-// cleaned up when scope is closed.
+// A class that allows the allocation of contiguous temporary registers.
 class BytecodeRegisterAllocator final {
  public:
-  explicit BytecodeRegisterAllocator(Zone* zone,
-                                     TemporaryRegisterAllocator* allocator);
-  ~BytecodeRegisterAllocator();
-  Register NewRegister();
+  // Enables observation of register allocation and free events.
+  class Observer {
+   public:
+    virtual ~Observer() {}
+    virtual void RegisterAllocateEvent(Register reg) = 0;
+    virtual void RegisterListAllocateEvent(RegisterList reg_list) = 0;
+    virtual void RegisterListFreeEvent(RegisterList reg_list) = 0;
+  };
 
-  // Ensure |count| consecutive allocations are available.
-  void PrepareForConsecutiveAllocations(size_t count);
+  explicit BytecodeRegisterAllocator(int start_index)
+      : next_register_index_(start_index),
+        max_register_count_(start_index),
+        observer_(nullptr) {}
+  ~BytecodeRegisterAllocator() {}
 
-  // Get the next consecutive allocation after calling
-  // PrepareForConsecutiveAllocations.
-  Register NextConsecutiveRegister();
+  // Returns a new register.
+  Register NewRegister() {
+    Register reg(next_register_index_++);
+    max_register_count_ = std::max(next_register_index_, max_register_count_);
+    if (observer_) {
+      observer_->RegisterAllocateEvent(reg);
+    }
+    return reg;
+  }
 
-  // Returns true if |reg| is allocated in this allocator.
-  bool RegisterIsAllocatedInThisScope(Register reg) const;
+  // Returns a consecutive list of |count| new registers.
+  RegisterList NewRegisterList(int count) {
+    RegisterList reg_list(next_register_index_, count);
+    next_register_index_ += count;
+    max_register_count_ = std::max(next_register_index_, max_register_count_);
+    if (observer_) {
+      observer_->RegisterListAllocateEvent(reg_list);
+    }
+    return reg_list;
+  }
 
-  // Returns true if unused consecutive allocations remain.
-  bool HasConsecutiveAllocations() const { return next_consecutive_count_ > 0; }
+  // Release all registers above |register_index|.
+  void ReleaseRegisters(int register_index) {
+    if (observer_) {
+      observer_->RegisterListFreeEvent(
+          RegisterList(register_index, next_register_index_ - register_index));
+    }
+    next_register_index_ = register_index;
+  }
+
+  // Returns true if the register |reg| is a live register.
+  bool RegisterIsLive(Register reg) const {
+    return reg.index() < next_register_index_;
+  }
+
+  void set_observer(Observer* observer) { observer_ = observer; }
+
+  int next_register_index() const { return next_register_index_; }
+  int maximum_register_count() const { return max_register_count_; }
 
  private:
-  TemporaryRegisterAllocator* base_allocator() const { return base_allocator_; }
-
-  TemporaryRegisterAllocator* base_allocator_;
-  ZoneVector<int> allocated_;
-  int next_consecutive_register_;
-  int next_consecutive_count_;
+  int next_register_index_;
+  int max_register_count_;
+  Observer* observer_;
 
   DISALLOW_COPY_AND_ASSIGN(BytecodeRegisterAllocator);
 };
diff --git a/src/interpreter/bytecode-register-optimizer.cc b/src/interpreter/bytecode-register-optimizer.cc
index d28f215..acbe0ba 100644
--- a/src/interpreter/bytecode-register-optimizer.cc
+++ b/src/interpreter/bytecode-register-optimizer.cc
@@ -15,10 +15,12 @@
 // register is materialized in the bytecode stream.
 class BytecodeRegisterOptimizer::RegisterInfo final : public ZoneObject {
  public:
-  RegisterInfo(Register reg, uint32_t equivalence_id, bool materialized)
+  RegisterInfo(Register reg, uint32_t equivalence_id, bool materialized,
+               bool allocated)
       : register_(reg),
         equivalence_id_(equivalence_id),
         materialized_(materialized),
+        allocated_(allocated),
         next_(this),
         prev_(this) {}
 
@@ -48,12 +50,17 @@
   // exists.
   RegisterInfo* GetEquivalentToMaterialize();
 
+  // Marks all temporary registers of the equivalence set as unmaterialized.
+  void MarkTemporariesAsUnmaterialized(Register temporary_base);
+
   // Get an equivalent register. Returns this if none exists.
   RegisterInfo* GetEquivalent();
 
   Register register_value() const { return register_; }
   bool materialized() const { return materialized_; }
   void set_materialized(bool materialized) { materialized_ = materialized; }
+  bool allocated() const { return allocated_; }
+  void set_allocated(bool allocated) { allocated_ = allocated; }
   void set_equivalence_id(uint32_t equivalence_id) {
     equivalence_id_ = equivalence_id;
   }
@@ -63,6 +70,7 @@
   Register register_;
   uint32_t equivalence_id_;
   bool materialized_;
+  bool allocated_;
 
   // Equivalence set pointers.
   RegisterInfo* next_;
@@ -155,8 +163,9 @@
     if (visitor->materialized()) {
       return nullptr;
     }
-    if (best_info == nullptr ||
-        visitor->register_value() < best_info->register_value()) {
+    if (visitor->allocated() &&
+        (best_info == nullptr ||
+         visitor->register_value() < best_info->register_value())) {
       best_info = visitor;
     }
     visitor = visitor->next_;
@@ -164,16 +173,31 @@
   return best_info;
 }
 
+void BytecodeRegisterOptimizer::RegisterInfo::MarkTemporariesAsUnmaterialized(
+    Register temporary_base) {
+  DCHECK(this->register_value() < temporary_base);
+  DCHECK(this->materialized());
+  RegisterInfo* visitor = this->next_;
+  while (visitor != this) {
+    if (visitor->register_value() >= temporary_base) {
+      visitor->set_materialized(false);
+    }
+    visitor = visitor->next_;
+  }
+}
+
 BytecodeRegisterOptimizer::RegisterInfo*
 BytecodeRegisterOptimizer::RegisterInfo::GetEquivalent() {
   return next_;
 }
 
 BytecodeRegisterOptimizer::BytecodeRegisterOptimizer(
-    Zone* zone, TemporaryRegisterAllocator* register_allocator,
-    int parameter_count, BytecodePipelineStage* next_stage)
+    Zone* zone, BytecodeRegisterAllocator* register_allocator,
+    int fixed_registers_count, int parameter_count,
+    BytecodePipelineStage* next_stage)
     : accumulator_(Register::virtual_accumulator()),
-      temporary_base_(register_allocator->allocation_base()),
+      temporary_base_(fixed_registers_count),
+      max_register_index_(fixed_registers_count - 1),
       register_info_table_(zone),
       equivalence_id_(0),
       next_stage_(next_stage),
@@ -198,7 +222,7 @@
                               static_cast<size_t>(temporary_base_.index()));
   for (size_t i = 0; i < register_info_table_.size(); ++i) {
     register_info_table_[i] = new (zone) RegisterInfo(
-        RegisterFromRegisterInfoTableIndex(i), NextEquivalenceId(), true);
+        RegisterFromRegisterInfoTableIndex(i), NextEquivalenceId(), true, true);
     DCHECK_EQ(register_info_table_[i]->register_value().index(),
               RegisterFromRegisterInfoTableIndex(i).index());
   }
@@ -208,15 +232,17 @@
 
 // override
 Handle<BytecodeArray> BytecodeRegisterOptimizer::ToBytecodeArray(
-    Isolate* isolate, int fixed_register_count, int parameter_count,
+    Isolate* isolate, int register_count, int parameter_count,
     Handle<FixedArray> handler_table) {
   FlushState();
-  return next_stage_->ToBytecodeArray(isolate, fixed_register_count,
+  return next_stage_->ToBytecodeArray(isolate, max_register_index_ + 1,
                                       parameter_count, handler_table);
 }
 
 // override
 void BytecodeRegisterOptimizer::Write(BytecodeNode* node) {
+  // Jumps are handled by WriteJump.
+  DCHECK(!Bytecodes::IsJump(node->bytecode()));
   //
   // Transfers with observable registers as the destination will be
   // immediately materialized so the source position information will
@@ -245,18 +271,16 @@
       break;
   }
 
-  if (Bytecodes::IsJump(node->bytecode()) ||
-      node->bytecode() == Bytecode::kDebugger ||
+  if (node->bytecode() == Bytecode::kDebugger ||
       node->bytecode() == Bytecode::kSuspendGenerator) {
     // All state must be flushed before emitting
-    // - a jump (due to how bytecode offsets for jumps are evaluated),
     // - a call to the debugger (as it can manipulate locals and parameters),
     // - a generator suspend (as this involves saving all registers).
     FlushState();
   }
 
   PrepareOperands(node);
-  WriteToNextStage(node);
+  next_stage_->Write(node);
 }
 
 // override
@@ -295,7 +319,7 @@
       // own equivalence set.
       RegisterInfo* equivalent;
       while ((equivalent = reg_info->GetEquivalent()) != reg_info) {
-        if (!equivalent->materialized()) {
+        if (equivalent->allocated() && !equivalent->materialized()) {
           OutputRegisterTransfer(reg_info, equivalent);
         }
         equivalent->MoveToNewEquivalenceSet(NextEquivalenceId(), true);
@@ -306,38 +330,29 @@
   flush_required_ = false;
 }
 
-void BytecodeRegisterOptimizer::WriteToNextStage(BytecodeNode* node) const {
-  next_stage_->Write(node);
-}
-
-void BytecodeRegisterOptimizer::WriteToNextStage(
-    BytecodeNode* node, const BytecodeSourceInfo& source_info) const {
-  if (source_info.is_valid()) {
-    node->source_info().Clone(source_info);
-  }
-  next_stage_->Write(node);
-}
-
 void BytecodeRegisterOptimizer::OutputRegisterTransfer(
     RegisterInfo* input_info, RegisterInfo* output_info,
-    const BytecodeSourceInfo& source_info) {
+    BytecodeSourceInfo* source_info) {
   Register input = input_info->register_value();
   Register output = output_info->register_value();
   DCHECK_NE(input.index(), output.index());
 
   if (input == accumulator_) {
     uint32_t operand = static_cast<uint32_t>(output.ToOperand());
-    BytecodeNode node(Bytecode::kStar, operand);
-    WriteToNextStage(&node, source_info);
+    BytecodeNode node(Bytecode::kStar, operand, source_info);
+    next_stage_->Write(&node);
   } else if (output == accumulator_) {
     uint32_t operand = static_cast<uint32_t>(input.ToOperand());
-    BytecodeNode node(Bytecode::kLdar, operand);
-    WriteToNextStage(&node, source_info);
+    BytecodeNode node(Bytecode::kLdar, operand, source_info);
+    next_stage_->Write(&node);
   } else {
     uint32_t operand0 = static_cast<uint32_t>(input.ToOperand());
     uint32_t operand1 = static_cast<uint32_t>(output.ToOperand());
-    BytecodeNode node(Bytecode::kMov, operand0, operand1);
-    WriteToNextStage(&node, source_info);
+    BytecodeNode node(Bytecode::kMov, operand0, operand1, source_info);
+    next_stage_->Write(&node);
+  }
+  if (output != accumulator_) {
+    max_register_index_ = std::max(max_register_index_, output.index());
   }
   output_info->set_materialized(true);
 }
@@ -389,7 +404,7 @@
 
 void BytecodeRegisterOptimizer::RegisterTransfer(
     RegisterInfo* input_info, RegisterInfo* output_info,
-    const BytecodeSourceInfo& source_info) {
+    BytecodeSourceInfo* source_info) {
   // Materialize an alternate in the equivalence set that
   // |output_info| is leaving.
   if (output_info->materialized()) {
@@ -408,42 +423,48 @@
     output_info->set_materialized(false);
     RegisterInfo* materialized_info = input_info->GetMaterializedEquivalent();
     OutputRegisterTransfer(materialized_info, output_info, source_info);
-  } else if (source_info.is_valid()) {
+  } else if (source_info->is_valid()) {
     // Emit a placeholder nop to maintain source position info.
     EmitNopForSourceInfo(source_info);
   }
+
+  bool input_is_observable = RegisterIsObservable(input_info->register_value());
+  if (input_is_observable) {
+    // If input is observable by the debugger, mark all other temporaries
+    // registers as unmaterialized so that this register is used in preference.
+    input_info->MarkTemporariesAsUnmaterialized(temporary_base_);
+  }
 }
 
 void BytecodeRegisterOptimizer::EmitNopForSourceInfo(
-    const BytecodeSourceInfo& source_info) const {
-  DCHECK(source_info.is_valid());
-  BytecodeNode nop(Bytecode::kNop);
-  nop.source_info().Clone(source_info);
-  WriteToNextStage(&nop);
+    BytecodeSourceInfo* source_info) const {
+  DCHECK(source_info->is_valid());
+  BytecodeNode nop(Bytecode::kNop, source_info);
+  next_stage_->Write(&nop);
 }
 
-void BytecodeRegisterOptimizer::DoLdar(const BytecodeNode* const node) {
+void BytecodeRegisterOptimizer::DoLdar(BytecodeNode* node) {
   Register input = GetRegisterInputOperand(
       0, node->bytecode(), node->operands(), node->operand_count());
   RegisterInfo* input_info = GetRegisterInfo(input);
-  RegisterTransfer(input_info, accumulator_info_, node->source_info());
+  RegisterTransfer(input_info, accumulator_info_, node->source_info_ptr());
 }
 
-void BytecodeRegisterOptimizer::DoMov(const BytecodeNode* const node) {
+void BytecodeRegisterOptimizer::DoMov(BytecodeNode* node) {
   Register input = GetRegisterInputOperand(
       0, node->bytecode(), node->operands(), node->operand_count());
   RegisterInfo* input_info = GetRegisterInfo(input);
   Register output = GetRegisterOutputOperand(
       1, node->bytecode(), node->operands(), node->operand_count());
-  RegisterInfo* output_info = GetOrCreateRegisterInfo(output);
-  RegisterTransfer(input_info, output_info, node->source_info());
+  RegisterInfo* output_info = GetRegisterInfo(output);
+  RegisterTransfer(input_info, output_info, node->source_info_ptr());
 }
 
-void BytecodeRegisterOptimizer::DoStar(const BytecodeNode* const node) {
+void BytecodeRegisterOptimizer::DoStar(BytecodeNode* node) {
   Register output = GetRegisterOutputOperand(
       0, node->bytecode(), node->operands(), node->operand_count());
-  RegisterInfo* output_info = GetOrCreateRegisterInfo(output);
-  RegisterTransfer(accumulator_info_, output_info, node->source_info());
+  RegisterInfo* output_info = GetRegisterInfo(output);
+  RegisterTransfer(accumulator_info_, output_info, node->source_info_ptr());
 }
 
 void BytecodeRegisterOptimizer::PrepareRegisterOutputOperand(
@@ -451,6 +472,8 @@
   if (reg_info->materialized()) {
     CreateMaterializedEquivalent(reg_info);
   }
+  max_register_index_ =
+      std::max(max_register_index_, reg_info->register_value().index());
   reg_info->MoveToNewEquivalenceSet(NextEquivalenceId(), true);
 }
 
@@ -458,7 +481,7 @@
     Register start, int count) {
   for (int i = 0; i < count; ++i) {
     Register reg(start.index() + i);
-    RegisterInfo* reg_info = GetOrCreateRegisterInfo(reg);
+    RegisterInfo* reg_info = GetRegisterInfo(reg);
     PrepareRegisterOutputOperand(reg_info);
   }
 }
@@ -468,7 +491,7 @@
   // For a temporary register, RegInfo state may need be created. For
   // locals and parameters, the RegInfo state is created in the
   // BytecodeRegisterOptimizer constructor.
-  RegisterInfo* reg_info = GetOrCreateRegisterInfo(reg);
+  RegisterInfo* reg_info = GetRegisterInfo(reg);
   if (reg_info->materialized()) {
     return reg;
   } else {
@@ -481,8 +504,8 @@
 void BytecodeRegisterOptimizer::PrepareRegisterInputOperand(
     BytecodeNode* const node, Register reg, int operand_index) {
   Register equivalent = GetEquivalentRegisterForInputOperand(reg);
-  node->operands()[operand_index] =
-      static_cast<uint32_t>(equivalent.ToOperand());
+  node->UpdateOperand(operand_index,
+                      static_cast<uint32_t>(equivalent.ToOperand()));
 }
 
 void BytecodeRegisterOptimizer::PrepareRegisterRangeInputOperand(Register start,
@@ -510,9 +533,9 @@
       Bytecodes::GetOperandTypes(node->bytecode());
   for (int i = 0; i < operand_count; ++i) {
     int count;
-    // operand_types is terminated by OperandType::kNone so this does not
-    // go out of bounds.
-    if (operand_types[i + 1] == OperandType::kRegCount) {
+    if (operand_types[i] == OperandType::kRegList) {
+      DCHECK_LT(i, operand_count - 1);
+      DCHECK(operand_types[i + 1] == OperandType::kRegCount);
       count = static_cast<int>(operands[i + 1]);
     } else {
       count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_types[i]);
@@ -577,8 +600,8 @@
 BytecodeRegisterOptimizer::RegisterInfo*
 BytecodeRegisterOptimizer::GetRegisterInfo(Register reg) {
   size_t index = GetRegisterInfoTableIndex(reg);
-  return (index < register_info_table_.size()) ? register_info_table_[index]
-                                               : nullptr;
+  DCHECK_LT(index, register_info_table_.size());
+  return register_info_table_[index];
 }
 
 BytecodeRegisterOptimizer::RegisterInfo*
@@ -599,26 +622,37 @@
 void BytecodeRegisterOptimizer::GrowRegisterMap(Register reg) {
   DCHECK(RegisterIsTemporary(reg));
   size_t index = GetRegisterInfoTableIndex(reg);
-  DCHECK_GE(index, register_info_table_.size());
-  size_t new_size = index + 1;
-  size_t old_size = register_info_table_.size();
-  register_info_table_.resize(new_size);
-  for (size_t i = old_size; i < new_size; ++i) {
-    register_info_table_[i] = new (zone()) RegisterInfo(
-        RegisterFromRegisterInfoTableIndex(i), NextEquivalenceId(), false);
+  if (index >= register_info_table_.size()) {
+    size_t new_size = index + 1;
+    size_t old_size = register_info_table_.size();
+    register_info_table_.resize(new_size);
+    for (size_t i = old_size; i < new_size; ++i) {
+      register_info_table_[i] =
+          new (zone()) RegisterInfo(RegisterFromRegisterInfoTableIndex(i),
+                                    NextEquivalenceId(), false, false);
+    }
   }
 }
 
-void BytecodeRegisterOptimizer::TemporaryRegisterFreeEvent(Register reg) {
-  RegisterInfo* info = GetRegisterInfo(reg);
-  if (info != nullptr) {
-    // If register is materialized and part of equivalence set, make
-    // sure another member of the set holds the value before the
-    // temporary register is removed.
-    if (info->materialized()) {
-      CreateMaterializedEquivalent(info);
+void BytecodeRegisterOptimizer::RegisterAllocateEvent(Register reg) {
+  GetOrCreateRegisterInfo(reg)->set_allocated(true);
+}
+
+void BytecodeRegisterOptimizer::RegisterListAllocateEvent(
+    RegisterList reg_list) {
+  if (reg_list.register_count() != 0) {
+    int first_index = reg_list.first_register().index();
+    GrowRegisterMap(Register(first_index + reg_list.register_count() - 1));
+    for (int i = 0; i < reg_list.register_count(); i++) {
+      GetRegisterInfo(Register(first_index + i))->set_allocated(true);
     }
-    info->MoveToNewEquivalenceSet(kInvalidEquivalenceId, false);
+  }
+}
+
+void BytecodeRegisterOptimizer::RegisterListFreeEvent(RegisterList reg_list) {
+  int first_index = reg_list.first_register().index();
+  for (int i = 0; i < reg_list.register_count(); i++) {
+    GetRegisterInfo(Register(first_index + i))->set_allocated(false);
   }
 }
 
diff --git a/src/interpreter/bytecode-register-optimizer.h b/src/interpreter/bytecode-register-optimizer.h
index fb087b5..eda22e5 100644
--- a/src/interpreter/bytecode-register-optimizer.h
+++ b/src/interpreter/bytecode-register-optimizer.h
@@ -15,13 +15,14 @@
 // registers. The bytecode generator uses temporary registers
 // liberally for correctness and convenience and this stage removes
 // transfers that are not required and preserves correctness.
-class BytecodeRegisterOptimizer final : public BytecodePipelineStage,
-                                        public TemporaryRegisterObserver,
-                                        public ZoneObject {
+class BytecodeRegisterOptimizer final
+    : public BytecodePipelineStage,
+      public BytecodeRegisterAllocator::Observer,
+      public ZoneObject {
  public:
   BytecodeRegisterOptimizer(Zone* zone,
-                            TemporaryRegisterAllocator* register_allocator,
-                            int parameter_count,
+                            BytecodeRegisterAllocator* register_allocator,
+                            int fixed_registers_count, int parameter_count,
                             BytecodePipelineStage* next_stage);
   virtual ~BytecodeRegisterOptimizer() {}
 
@@ -31,7 +32,7 @@
   void BindLabel(BytecodeLabel* label) override;
   void BindLabel(const BytecodeLabel& target, BytecodeLabel* label) override;
   Handle<BytecodeArray> ToBytecodeArray(
-      Isolate* isolate, int fixed_register_count, int parameter_count,
+      Isolate* isolate, int register_count, int parameter_count,
       Handle<FixedArray> handler_table) override;
 
  private:
@@ -39,34 +40,32 @@
 
   class RegisterInfo;
 
-  // TemporaryRegisterObserver interface.
-  void TemporaryRegisterFreeEvent(Register reg) override;
+  // BytecodeRegisterAllocator::Observer interface.
+  void RegisterAllocateEvent(Register reg) override;
+  void RegisterListAllocateEvent(RegisterList reg_list) override;
+  void RegisterListFreeEvent(RegisterList reg) override;
 
   // Helpers for BytecodePipelineStage interface.
   void FlushState();
-  void WriteToNextStage(BytecodeNode* node) const;
-  void WriteToNextStage(BytecodeNode* node,
-                        const BytecodeSourceInfo& output_info) const;
 
   // Update internal state for register transfer from |input| to
   // |output| using |source_info| as source position information if
   // any bytecodes are emitted due to transfer.
   void RegisterTransfer(RegisterInfo* input, RegisterInfo* output,
-                        const BytecodeSourceInfo& source_info);
+                        BytecodeSourceInfo* source_info);
 
   // Emit a register transfer bytecode from |input| to |output|.
-  void OutputRegisterTransfer(
-      RegisterInfo* input, RegisterInfo* output,
-      const BytecodeSourceInfo& source_info = BytecodeSourceInfo());
+  void OutputRegisterTransfer(RegisterInfo* input, RegisterInfo* output,
+                              BytecodeSourceInfo* source_info = nullptr);
 
   // Emits a Nop to preserve source position information in the
   // bytecode pipeline.
-  void EmitNopForSourceInfo(const BytecodeSourceInfo& source_info) const;
+  void EmitNopForSourceInfo(BytecodeSourceInfo* source_info) const;
 
   // Handlers for bytecode nodes for register to register transfers.
-  void DoLdar(const BytecodeNode* const node);
-  void DoMov(const BytecodeNode* const node);
-  void DoStar(const BytecodeNode* const node);
+  void DoLdar(BytecodeNode* node);
+  void DoMov(BytecodeNode* node);
+  void DoStar(BytecodeNode* node);
 
   // Operand processing methods for bytecodes other than those
   // performing register to register transfers.
@@ -133,6 +132,7 @@
   const Register accumulator_;
   RegisterInfo* accumulator_info_;
   const Register temporary_base_;
+  int max_register_index_;
 
   // Direct mapping to register info.
   ZoneVector<RegisterInfo*> register_info_table_;
diff --git a/src/interpreter/bytecode-register.cc b/src/interpreter/bytecode-register.cc
index 31e3b90..1ce512b 100644
--- a/src/interpreter/bytecode-register.cc
+++ b/src/interpreter/bytecode-register.cc
@@ -121,7 +121,7 @@
   return true;
 }
 
-std::string Register::ToString(int parameter_count) {
+std::string Register::ToString(int parameter_count) const {
   if (is_current_context()) {
     return std::string("<context>");
   } else if (is_function_closure()) {
diff --git a/src/interpreter/bytecode-register.h b/src/interpreter/bytecode-register.h
index b698da6..d698d40 100644
--- a/src/interpreter/bytecode-register.h
+++ b/src/interpreter/bytecode-register.h
@@ -66,7 +66,7 @@
                             Register reg4 = Register(),
                             Register reg5 = Register());
 
-  std::string ToString(int parameter_count);
+  std::string ToString(int parameter_count) const;
 
   bool operator==(const Register& other) const {
     return index() == other.index();
@@ -98,6 +98,40 @@
   int index_;
 };
 
+class RegisterList {
+ public:
+  RegisterList() : first_reg_index_(Register().index()), register_count_(0) {}
+  RegisterList(int first_reg_index, int register_count)
+      : first_reg_index_(first_reg_index), register_count_(register_count) {}
+
+  // Returns a new RegisterList which is a truncated version of this list, with
+  // |count| registers.
+  const RegisterList Truncate(int new_count) {
+    DCHECK_GE(new_count, 0);
+    DCHECK_LT(new_count, register_count_);
+    return RegisterList(first_reg_index_, new_count);
+  }
+
+  const Register operator[](size_t i) const {
+    DCHECK_LT(static_cast<int>(i), register_count_);
+    return Register(first_reg_index_ + static_cast<int>(i));
+  }
+
+  const Register first_register() const {
+    return (register_count() == 0) ? Register(0) : (*this)[0];
+  }
+
+  const Register last_register() const {
+    return (register_count() == 0) ? Register(0) : (*this)[register_count_ - 1];
+  }
+
+  int register_count() const { return register_count_; }
+
+ private:
+  int first_reg_index_;
+  int register_count_;
+};
+
 }  // namespace interpreter
 }  // namespace internal
 }  // namespace v8
diff --git a/src/interpreter/bytecode-traits.h b/src/interpreter/bytecode-traits.h
index 672a687..f71598c 100644
--- a/src/interpreter/bytecode-traits.h
+++ b/src/interpreter/bytecode-traits.h
@@ -5,7 +5,7 @@
 #ifndef V8_INTERPRETER_BYTECODE_TRAITS_H_
 #define V8_INTERPRETER_BYTECODE_TRAITS_H_
 
-#include "src/interpreter/bytecodes.h"
+#include "src/interpreter/bytecode-operands.h"
 
 namespace v8 {
 namespace internal {
@@ -65,208 +65,88 @@
   static const OperandSize kOperandSize = static_cast<OperandSize>(kSize);
 };
 
-template <OperandType>
-struct RegisterOperandTraits {
-  static const int kIsRegisterOperand = 0;
+template <int... values>
+struct SumHelper;
+template <int value>
+struct SumHelper<value> {
+  static const int kValue = value;
+};
+template <int value, int... values>
+struct SumHelper<value, values...> {
+  static const int kValue = value + SumHelper<values...>::kValue;
 };
 
-#define DECLARE_REGISTER_OPERAND(Name, _)              \
-  template <>                                          \
-  struct RegisterOperandTraits<OperandType::k##Name> { \
-    static const int kIsRegisterOperand = 1;           \
-  };
-REGISTER_OPERAND_TYPE_LIST(DECLARE_REGISTER_OPERAND)
-#undef DECLARE_REGISTER_OPERAND
-
-template <AccumulatorUse, OperandType...>
-struct BytecodeTraits {};
-
-template <AccumulatorUse accumulator_use, OperandType operand_0,
-          OperandType operand_1, OperandType operand_2, OperandType operand_3>
-struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2,
-                      operand_3> {
-  static const OperandType* GetOperandTypes() {
-    static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
-                                                operand_3, OperandType::kNone};
-    return operand_types;
-  }
-
-  static const OperandTypeInfo* GetOperandTypeInfos() {
-    static const OperandTypeInfo operand_type_infos[] = {
-        OperandTraits<operand_0>::kOperandTypeInfo,
-        OperandTraits<operand_1>::kOperandTypeInfo,
-        OperandTraits<operand_2>::kOperandTypeInfo,
-        OperandTraits<operand_3>::kOperandTypeInfo, OperandTypeInfo::kNone};
-    return operand_type_infos;
-  }
-
-  template <OperandType ot>
-  static inline bool HasAnyOperandsOfType() {
-    return operand_0 == ot || operand_1 == ot || operand_2 == ot ||
-           operand_3 == ot;
-  }
-
-  static inline bool IsScalable() {
-    return (OperandTraits<operand_0>::TypeInfoTraits::kIsScalable |
-            OperandTraits<operand_1>::TypeInfoTraits::kIsScalable |
-            OperandTraits<operand_2>::TypeInfoTraits::kIsScalable |
-            OperandTraits<operand_3>::TypeInfoTraits::kIsScalable);
-  }
-
+template <AccumulatorUse accumulator_use, OperandType... operands>
+struct BytecodeTraits {
+  static const OperandType kOperandTypes[];
+  static const OperandTypeInfo kOperandTypeInfos[];
+  static const OperandSize kSingleScaleOperandSizes[];
+  static const OperandSize kDoubleScaleOperandSizes[];
+  static const OperandSize kQuadrupleScaleOperandSizes[];
+  static const int kSingleScaleSize = SumHelper<
+      1, OperandScaler<operands, OperandScale::kSingle>::kSize...>::kValue;
+  static const int kDoubleScaleSize = SumHelper<
+      1, OperandScaler<operands, OperandScale::kDouble>::kSize...>::kValue;
+  static const int kQuadrupleScaleSize = SumHelper<
+      1, OperandScaler<operands, OperandScale::kQuadruple>::kSize...>::kValue;
   static const AccumulatorUse kAccumulatorUse = accumulator_use;
-  static const int kOperandCount = 4;
-  static const int kRegisterOperandCount =
-      RegisterOperandTraits<operand_0>::kIsRegisterOperand +
-      RegisterOperandTraits<operand_1>::kIsRegisterOperand +
-      RegisterOperandTraits<operand_2>::kIsRegisterOperand +
-      RegisterOperandTraits<operand_3>::kIsRegisterOperand;
+  static const int kOperandCount = sizeof...(operands);
 };
 
-template <AccumulatorUse accumulator_use, OperandType operand_0,
-          OperandType operand_1, OperandType operand_2>
-struct BytecodeTraits<accumulator_use, operand_0, operand_1, operand_2> {
-  static const OperandType* GetOperandTypes() {
-    static const OperandType operand_types[] = {operand_0, operand_1, operand_2,
-                                                OperandType::kNone};
-    return operand_types;
-  }
-
-  static const OperandTypeInfo* GetOperandTypeInfos() {
-    static const OperandTypeInfo operand_type_infos[] = {
-        OperandTraits<operand_0>::kOperandTypeInfo,
-        OperandTraits<operand_1>::kOperandTypeInfo,
-        OperandTraits<operand_2>::kOperandTypeInfo, OperandTypeInfo::kNone};
-    return operand_type_infos;
-  }
-
-  template <OperandType ot>
-  static inline bool HasAnyOperandsOfType() {
-    return operand_0 == ot || operand_1 == ot || operand_2 == ot;
-  }
-
-  static inline bool IsScalable() {
-    return (OperandTraits<operand_0>::TypeInfoTraits::kIsScalable |
-            OperandTraits<operand_1>::TypeInfoTraits::kIsScalable |
-            OperandTraits<operand_2>::TypeInfoTraits::kIsScalable);
-  }
-
-  static const AccumulatorUse kAccumulatorUse = accumulator_use;
-  static const int kOperandCount = 3;
-  static const int kRegisterOperandCount =
-      RegisterOperandTraits<operand_0>::kIsRegisterOperand +
-      RegisterOperandTraits<operand_1>::kIsRegisterOperand +
-      RegisterOperandTraits<operand_2>::kIsRegisterOperand;
-};
-
-template <AccumulatorUse accumulator_use, OperandType operand_0,
-          OperandType operand_1>
-struct BytecodeTraits<accumulator_use, operand_0, operand_1> {
-  static const OperandType* GetOperandTypes() {
-    static const OperandType operand_types[] = {operand_0, operand_1,
-                                                OperandType::kNone};
-    return operand_types;
-  }
-
-  static const OperandTypeInfo* GetOperandTypeInfos() {
-    static const OperandTypeInfo operand_type_infos[] = {
-        OperandTraits<operand_0>::kOperandTypeInfo,
-        OperandTraits<operand_1>::kOperandTypeInfo, OperandTypeInfo::kNone};
-    return operand_type_infos;
-  }
-
-  template <OperandType ot>
-  static inline bool HasAnyOperandsOfType() {
-    return operand_0 == ot || operand_1 == ot;
-  }
-
-  static inline bool IsScalable() {
-    return (OperandTraits<operand_0>::TypeInfoTraits::kIsScalable |
-            OperandTraits<operand_1>::TypeInfoTraits::kIsScalable);
-  }
-
-  static const AccumulatorUse kAccumulatorUse = accumulator_use;
-  static const int kOperandCount = 2;
-  static const int kRegisterOperandCount =
-      RegisterOperandTraits<operand_0>::kIsRegisterOperand +
-      RegisterOperandTraits<operand_1>::kIsRegisterOperand;
-};
-
-template <AccumulatorUse accumulator_use, OperandType operand_0>
-struct BytecodeTraits<accumulator_use, operand_0> {
-  static const OperandType* GetOperandTypes() {
-    static const OperandType operand_types[] = {operand_0, OperandType::kNone};
-    return operand_types;
-  }
-
-  static const OperandTypeInfo* GetOperandTypeInfos() {
-    static const OperandTypeInfo operand_type_infos[] = {
-        OperandTraits<operand_0>::kOperandTypeInfo, OperandTypeInfo::kNone};
-    return operand_type_infos;
-  }
-
-  template <OperandType ot>
-  static inline bool HasAnyOperandsOfType() {
-    return operand_0 == ot;
-  }
-
-  static inline bool IsScalable() {
-    return OperandTraits<operand_0>::TypeInfoTraits::kIsScalable;
-  }
-
-  static const AccumulatorUse kAccumulatorUse = accumulator_use;
-  static const int kOperandCount = 1;
-  static const int kRegisterOperandCount =
-      RegisterOperandTraits<operand_0>::kIsRegisterOperand;
-};
+template <AccumulatorUse accumulator_use, OperandType... operands>
+STATIC_CONST_MEMBER_DEFINITION const OperandType
+    BytecodeTraits<accumulator_use, operands...>::kOperandTypes[] = {
+        operands...};
+template <AccumulatorUse accumulator_use, OperandType... operands>
+STATIC_CONST_MEMBER_DEFINITION const OperandTypeInfo
+    BytecodeTraits<accumulator_use, operands...>::kOperandTypeInfos[] = {
+        OperandTraits<operands>::kOperandTypeInfo...};
+template <AccumulatorUse accumulator_use, OperandType... operands>
+STATIC_CONST_MEMBER_DEFINITION const OperandSize
+    BytecodeTraits<accumulator_use, operands...>::kSingleScaleOperandSizes[] = {
+        OperandScaler<operands, OperandScale::kSingle>::kOperandSize...};
+template <AccumulatorUse accumulator_use, OperandType... operands>
+STATIC_CONST_MEMBER_DEFINITION const OperandSize
+    BytecodeTraits<accumulator_use, operands...>::kDoubleScaleOperandSizes[] = {
+        OperandScaler<operands, OperandScale::kDouble>::kOperandSize...};
+template <AccumulatorUse accumulator_use, OperandType... operands>
+STATIC_CONST_MEMBER_DEFINITION const OperandSize BytecodeTraits<
+    accumulator_use, operands...>::kQuadrupleScaleOperandSizes[] = {
+    OperandScaler<operands, OperandScale::kQuadruple>::kOperandSize...};
 
 template <AccumulatorUse accumulator_use>
 struct BytecodeTraits<accumulator_use> {
-  static const OperandType* GetOperandTypes() {
-    static const OperandType operand_types[] = {OperandType::kNone};
-    return operand_types;
-  }
-
-  static const OperandTypeInfo* GetOperandTypeInfos() {
-    static const OperandTypeInfo operand_type_infos[] = {
-        OperandTypeInfo::kNone};
-    return operand_type_infos;
-  }
-
-  template <OperandType ot>
-  static inline bool HasAnyOperandsOfType() {
-    return false;
-  }
-
-  static inline bool IsScalable() { return false; }
-
+  static const OperandType kOperandTypes[];
+  static const OperandTypeInfo kOperandTypeInfos[];
+  static const OperandSize kSingleScaleOperandSizes[];
+  static const OperandSize kDoubleScaleOperandSizes[];
+  static const OperandSize kQuadrupleScaleOperandSizes[];
+  static const int kSingleScaleSize = 1;
+  static const int kDoubleScaleSize = 1;
+  static const int kQuadrupleScaleSize = 1;
   static const AccumulatorUse kAccumulatorUse = accumulator_use;
   static const int kOperandCount = 0;
-  static const int kRegisterOperandCount = 0;
 };
 
-static OperandSize ScaledOperandSize(OperandType operand_type,
-                                     OperandScale operand_scale) {
-  STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
-                OperandScale::kLast == OperandScale::kQuadruple);
-  int index = static_cast<int>(operand_scale) >> 1;
-  switch (operand_type) {
-#define CASE(Name, TypeInfo)                                    \
-  case OperandType::k##Name: {                                  \
-    static const OperandSize kOperandSizes[] = {                \
-        OperandScaler<OperandType::k##Name,                     \
-                      OperandScale::kSingle>::kOperandSize,     \
-        OperandScaler<OperandType::k##Name,                     \
-                      OperandScale::kDouble>::kOperandSize,     \
-        OperandScaler<OperandType::k##Name,                     \
-                      OperandScale::kQuadruple>::kOperandSize}; \
-    return kOperandSizes[index];                                \
-  }
-    OPERAND_TYPE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return OperandSize::kNone;
-}
+template <AccumulatorUse accumulator_use>
+STATIC_CONST_MEMBER_DEFINITION const OperandType
+    BytecodeTraits<accumulator_use>::kOperandTypes[] = {OperandType::kNone};
+template <AccumulatorUse accumulator_use>
+STATIC_CONST_MEMBER_DEFINITION const OperandTypeInfo
+    BytecodeTraits<accumulator_use>::kOperandTypeInfos[] = {
+        OperandTypeInfo::kNone};
+template <AccumulatorUse accumulator_use>
+STATIC_CONST_MEMBER_DEFINITION const OperandSize
+    BytecodeTraits<accumulator_use>::kSingleScaleOperandSizes[] = {
+        OperandSize::kNone};
+template <AccumulatorUse accumulator_use>
+STATIC_CONST_MEMBER_DEFINITION const OperandSize
+    BytecodeTraits<accumulator_use>::kDoubleScaleOperandSizes[] = {
+        OperandSize::kNone};
+template <AccumulatorUse accumulator_use>
+STATIC_CONST_MEMBER_DEFINITION const OperandSize
+    BytecodeTraits<accumulator_use>::kQuadrupleScaleOperandSizes[] = {
+        OperandSize::kNone};
 
 }  // namespace interpreter
 }  // namespace internal
diff --git a/src/interpreter/bytecodes.cc b/src/interpreter/bytecodes.cc
index 09bcd22..c58f468 100644
--- a/src/interpreter/bytecodes.cc
+++ b/src/interpreter/bytecodes.cc
@@ -7,14 +7,55 @@
 #include <iomanip>
 
 #include "src/base/bits.h"
-#include "src/globals.h"
 #include "src/interpreter/bytecode-traits.h"
 
 namespace v8 {
 namespace internal {
 namespace interpreter {
 
-STATIC_CONST_MEMBER_DEFINITION const int Bytecodes::kMaxOperands;
+// clang-format off
+const OperandType* const Bytecodes::kOperandTypes[] = {
+#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kOperandTypes,
+  BYTECODE_LIST(ENTRY)
+#undef ENTRY
+};
+
+const OperandTypeInfo* const Bytecodes::kOperandTypeInfos[] = {
+#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kOperandTypeInfos,
+  BYTECODE_LIST(ENTRY)
+#undef ENTRY
+};
+
+const int Bytecodes::kOperandCount[] = {
+#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kOperandCount,
+  BYTECODE_LIST(ENTRY)
+#undef ENTRY
+};
+
+const AccumulatorUse Bytecodes::kAccumulatorUse[] = {
+#define ENTRY(Name, ...) BytecodeTraits<__VA_ARGS__>::kAccumulatorUse,
+  BYTECODE_LIST(ENTRY)
+#undef ENTRY
+};
+
+const int Bytecodes::kBytecodeSizes[][3] = {
+#define ENTRY(Name, ...)                            \
+  { BytecodeTraits<__VA_ARGS__>::kSingleScaleSize,  \
+    BytecodeTraits<__VA_ARGS__>::kDoubleScaleSize,  \
+    BytecodeTraits<__VA_ARGS__>::kQuadrupleScaleSize },
+  BYTECODE_LIST(ENTRY)
+#undef ENTRY
+};
+
+const OperandSize* const Bytecodes::kOperandSizes[][3] = {
+#define ENTRY(Name, ...)                                    \
+  { BytecodeTraits<__VA_ARGS__>::kSingleScaleOperandSizes,  \
+    BytecodeTraits<__VA_ARGS__>::kDoubleScaleOperandSizes,  \
+    BytecodeTraits<__VA_ARGS__>::kQuadrupleScaleOperandSizes },
+  BYTECODE_LIST(ENTRY)
+#undef ENTRY
+};
+// clang-format on
 
 // static
 const char* Bytecodes::ToString(Bytecode bytecode) {
@@ -44,77 +85,6 @@
 }
 
 // static
-const char* Bytecodes::AccumulatorUseToString(AccumulatorUse accumulator_use) {
-  switch (accumulator_use) {
-    case AccumulatorUse::kNone:
-      return "None";
-    case AccumulatorUse::kRead:
-      return "Read";
-    case AccumulatorUse::kWrite:
-      return "Write";
-    case AccumulatorUse::kReadWrite:
-      return "ReadWrite";
-  }
-  UNREACHABLE();
-  return "";
-}
-
-// static
-const char* Bytecodes::OperandTypeToString(OperandType operand_type) {
-  switch (operand_type) {
-#define CASE(Name, _)        \
-  case OperandType::k##Name: \
-    return #Name;
-    OPERAND_TYPE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return "";
-}
-
-// static
-const char* Bytecodes::OperandScaleToString(OperandScale operand_scale) {
-  switch (operand_scale) {
-#define CASE(Name, _)         \
-  case OperandScale::k##Name: \
-    return #Name;
-    OPERAND_SCALE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return "";
-}
-
-// static
-const char* Bytecodes::OperandSizeToString(OperandSize operand_size) {
-  switch (operand_size) {
-    case OperandSize::kNone:
-      return "None";
-    case OperandSize::kByte:
-      return "Byte";
-    case OperandSize::kShort:
-      return "Short";
-    case OperandSize::kQuad:
-      return "Quad";
-  }
-  UNREACHABLE();
-  return "";
-}
-
-// static
-uint8_t Bytecodes::ToByte(Bytecode bytecode) {
-  DCHECK_LE(bytecode, Bytecode::kLast);
-  return static_cast<uint8_t>(bytecode);
-}
-
-// static
-Bytecode Bytecodes::FromByte(uint8_t value) {
-  Bytecode bytecode = static_cast<Bytecode>(value);
-  DCHECK(bytecode <= Bytecode::kLast);
-  return bytecode;
-}
-
-// static
 Bytecode Bytecodes::GetDebugBreak(Bytecode bytecode) {
   DCHECK(!IsDebugBreak(bytecode));
   if (bytecode == Bytecode::kWide) {
@@ -124,7 +94,7 @@
     return Bytecode::kDebugBreakExtraWide;
   }
   int bytecode_size = Size(bytecode, OperandScale::kSingle);
-#define RETURN_IF_DEBUG_BREAK_SIZE_MATCHES(Name, ...)                    \
+#define RETURN_IF_DEBUG_BREAK_SIZE_MATCHES(Name)                         \
   if (bytecode_size == Size(Bytecode::k##Name, OperandScale::kSingle)) { \
     return Bytecode::k##Name;                                            \
   }
@@ -135,224 +105,6 @@
 }
 
 // static
-int Bytecodes::Size(Bytecode bytecode, OperandScale operand_scale) {
-  int size = 1;
-  for (int i = 0; i < NumberOfOperands(bytecode); i++) {
-    OperandSize operand_size = GetOperandSize(bytecode, i, operand_scale);
-    int delta = static_cast<int>(operand_size);
-    DCHECK(base::bits::IsPowerOfTwo32(static_cast<uint32_t>(delta)));
-    size += delta;
-  }
-  return size;
-}
-
-// static
-size_t Bytecodes::ReturnCount(Bytecode bytecode) {
-  return bytecode == Bytecode::kReturn ? 1 : 0;
-}
-
-// static
-int Bytecodes::NumberOfOperands(Bytecode bytecode) {
-  DCHECK(bytecode <= Bytecode::kLast);
-  switch (bytecode) {
-#define CASE(Name, ...)   \
-  case Bytecode::k##Name: \
-    return BytecodeTraits<__VA_ARGS__>::kOperandCount;
-    BYTECODE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return 0;
-}
-
-// static
-int Bytecodes::NumberOfRegisterOperands(Bytecode bytecode) {
-  DCHECK(bytecode <= Bytecode::kLast);
-  switch (bytecode) {
-#define CASE(Name, ...)                              \
-  case Bytecode::k##Name:                            \
-    typedef BytecodeTraits<__VA_ARGS__> Name##Trait; \
-    return Name##Trait::kRegisterOperandCount;
-    BYTECODE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return false;
-}
-
-// static
-Bytecode Bytecodes::OperandScaleToPrefixBytecode(OperandScale operand_scale) {
-  switch (operand_scale) {
-    case OperandScale::kQuadruple:
-      return Bytecode::kExtraWide;
-    case OperandScale::kDouble:
-      return Bytecode::kWide;
-    default:
-      UNREACHABLE();
-      return Bytecode::kIllegal;
-  }
-}
-
-// static
-bool Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale operand_scale) {
-  return operand_scale != OperandScale::kSingle;
-}
-
-// static
-OperandScale Bytecodes::PrefixBytecodeToOperandScale(Bytecode bytecode) {
-  switch (bytecode) {
-    case Bytecode::kExtraWide:
-    case Bytecode::kDebugBreakExtraWide:
-      return OperandScale::kQuadruple;
-    case Bytecode::kWide:
-    case Bytecode::kDebugBreakWide:
-      return OperandScale::kDouble;
-    default:
-      UNREACHABLE();
-      return OperandScale::kSingle;
-  }
-}
-
-// static
-AccumulatorUse Bytecodes::GetAccumulatorUse(Bytecode bytecode) {
-  DCHECK(bytecode <= Bytecode::kLast);
-  switch (bytecode) {
-#define CASE(Name, ...)   \
-  case Bytecode::k##Name: \
-    return BytecodeTraits<__VA_ARGS__>::kAccumulatorUse;
-    BYTECODE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return AccumulatorUse::kNone;
-}
-
-// static
-bool Bytecodes::ReadsAccumulator(Bytecode bytecode) {
-  return (GetAccumulatorUse(bytecode) & AccumulatorUse::kRead) ==
-         AccumulatorUse::kRead;
-}
-
-// static
-bool Bytecodes::WritesAccumulator(Bytecode bytecode) {
-  return (GetAccumulatorUse(bytecode) & AccumulatorUse::kWrite) ==
-         AccumulatorUse::kWrite;
-}
-
-// static
-bool Bytecodes::WritesBooleanToAccumulator(Bytecode bytecode) {
-  switch (bytecode) {
-    case Bytecode::kLdaTrue:
-    case Bytecode::kLdaFalse:
-    case Bytecode::kToBooleanLogicalNot:
-    case Bytecode::kLogicalNot:
-    case Bytecode::kTestEqual:
-    case Bytecode::kTestNotEqual:
-    case Bytecode::kTestEqualStrict:
-    case Bytecode::kTestLessThan:
-    case Bytecode::kTestLessThanOrEqual:
-    case Bytecode::kTestGreaterThan:
-    case Bytecode::kTestGreaterThanOrEqual:
-    case Bytecode::kTestInstanceOf:
-    case Bytecode::kTestIn:
-    case Bytecode::kForInDone:
-      return true;
-    default:
-      return false;
-  }
-}
-
-// static
-bool Bytecodes::IsAccumulatorLoadWithoutEffects(Bytecode bytecode) {
-  switch (bytecode) {
-    case Bytecode::kLdaZero:
-    case Bytecode::kLdaSmi:
-    case Bytecode::kLdaUndefined:
-    case Bytecode::kLdaNull:
-    case Bytecode::kLdaTheHole:
-    case Bytecode::kLdaTrue:
-    case Bytecode::kLdaFalse:
-    case Bytecode::kLdaConstant:
-    case Bytecode::kLdar:
-      return true;
-    default:
-      return false;
-  }
-}
-
-// static
-bool Bytecodes::IsJumpWithoutEffects(Bytecode bytecode) {
-  return IsJump(bytecode) && !IsJumpIfToBoolean(bytecode);
-}
-
-// static
-bool Bytecodes::IsRegisterLoadWithoutEffects(Bytecode bytecode) {
-  switch (bytecode) {
-    case Bytecode::kMov:
-    case Bytecode::kPopContext:
-    case Bytecode::kPushContext:
-    case Bytecode::kStar:
-    case Bytecode::kLdrUndefined:
-      return true;
-    default:
-      return false;
-  }
-}
-
-// static
-bool Bytecodes::IsWithoutExternalSideEffects(Bytecode bytecode) {
-  // These bytecodes only manipulate interpreter frame state and will
-  // never throw.
-  return (IsAccumulatorLoadWithoutEffects(bytecode) ||
-          IsRegisterLoadWithoutEffects(bytecode) ||
-          bytecode == Bytecode::kNop || IsJumpWithoutEffects(bytecode));
-}
-
-// static
-OperandType Bytecodes::GetOperandType(Bytecode bytecode, int i) {
-  DCHECK_LE(bytecode, Bytecode::kLast);
-  DCHECK_LT(i, NumberOfOperands(bytecode));
-  DCHECK_GE(i, 0);
-  return GetOperandTypes(bytecode)[i];
-}
-
-// static
-const OperandType* Bytecodes::GetOperandTypes(Bytecode bytecode) {
-  DCHECK(bytecode <= Bytecode::kLast);
-  switch (bytecode) {
-#define CASE(Name, ...)   \
-  case Bytecode::k##Name: \
-    return BytecodeTraits<__VA_ARGS__>::GetOperandTypes();
-    BYTECODE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return nullptr;
-}
-
-// static
-const OperandTypeInfo* Bytecodes::GetOperandTypeInfos(Bytecode bytecode) {
-  DCHECK(bytecode <= Bytecode::kLast);
-  switch (bytecode) {
-#define CASE(Name, ...)   \
-  case Bytecode::k##Name: \
-    return BytecodeTraits<__VA_ARGS__>::GetOperandTypeInfos();
-    BYTECODE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return nullptr;
-}
-
-// static
-OperandSize Bytecodes::GetOperandSize(Bytecode bytecode, int i,
-                                      OperandScale operand_scale) {
-  DCHECK_LT(i, NumberOfOperands(bytecode));
-  OperandType operand_type = GetOperandType(bytecode, i);
-  return SizeOfOperand(operand_type, operand_scale);
-}
-
-// static
 int Bytecodes::GetOperandOffset(Bytecode bytecode, int i,
                                 OperandScale operand_scale) {
   DCHECK_LT(i, Bytecodes::NumberOfOperands(bytecode));
@@ -367,67 +119,6 @@
 }
 
 // static
-OperandSize Bytecodes::SizeOfOperand(OperandType operand_type,
-                                     OperandScale operand_scale) {
-  return static_cast<OperandSize>(
-      ScaledOperandSize(operand_type, operand_scale));
-}
-
-// static
-bool Bytecodes::IsConditionalJumpImmediate(Bytecode bytecode) {
-  return bytecode == Bytecode::kJumpIfTrue ||
-         bytecode == Bytecode::kJumpIfFalse ||
-         bytecode == Bytecode::kJumpIfToBooleanTrue ||
-         bytecode == Bytecode::kJumpIfToBooleanFalse ||
-         bytecode == Bytecode::kJumpIfNotHole ||
-         bytecode == Bytecode::kJumpIfNull ||
-         bytecode == Bytecode::kJumpIfUndefined;
-}
-
-// static
-bool Bytecodes::IsConditionalJumpConstant(Bytecode bytecode) {
-  return bytecode == Bytecode::kJumpIfTrueConstant ||
-         bytecode == Bytecode::kJumpIfFalseConstant ||
-         bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
-         bytecode == Bytecode::kJumpIfToBooleanFalseConstant ||
-         bytecode == Bytecode::kJumpIfNotHoleConstant ||
-         bytecode == Bytecode::kJumpIfNullConstant ||
-         bytecode == Bytecode::kJumpIfUndefinedConstant;
-}
-
-// static
-bool Bytecodes::IsConditionalJump(Bytecode bytecode) {
-  return IsConditionalJumpImmediate(bytecode) ||
-         IsConditionalJumpConstant(bytecode);
-}
-
-
-// static
-bool Bytecodes::IsJumpImmediate(Bytecode bytecode) {
-  return bytecode == Bytecode::kJump || IsConditionalJumpImmediate(bytecode);
-}
-
-
-// static
-bool Bytecodes::IsJumpConstant(Bytecode bytecode) {
-  return bytecode == Bytecode::kJumpConstant ||
-         IsConditionalJumpConstant(bytecode);
-}
-
-// static
-bool Bytecodes::IsJump(Bytecode bytecode) {
-  return IsJumpImmediate(bytecode) || IsJumpConstant(bytecode);
-}
-
-// static
-bool Bytecodes::IsJumpIfToBoolean(Bytecode bytecode) {
-  return bytecode == Bytecode::kJumpIfToBooleanTrue ||
-         bytecode == Bytecode::kJumpIfToBooleanFalse ||
-         bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
-         bytecode == Bytecode::kJumpIfToBooleanFalseConstant;
-}
-
-// static
 Bytecode Bytecodes::GetJumpWithoutToBoolean(Bytecode bytecode) {
   switch (bytecode) {
     case Bytecode::kJumpIfToBooleanTrue:
@@ -446,19 +137,6 @@
 }
 
 // static
-bool Bytecodes::IsCallOrNew(Bytecode bytecode) {
-  return bytecode == Bytecode::kCall || bytecode == Bytecode::kTailCall ||
-         bytecode == Bytecode::kNew;
-}
-
-// static
-bool Bytecodes::IsCallRuntime(Bytecode bytecode) {
-  return bytecode == Bytecode::kCallRuntime ||
-         bytecode == Bytecode::kCallRuntimeForPair ||
-         bytecode == Bytecode::kInvokeIntrinsic;
-}
-
-// static
 bool Bytecodes::IsDebugBreak(Bytecode bytecode) {
   switch (bytecode) {
 #define CASE(Name, ...) case Bytecode::k##Name:
@@ -472,53 +150,6 @@
 }
 
 // static
-bool Bytecodes::IsLdarOrStar(Bytecode bytecode) {
-  return bytecode == Bytecode::kLdar || bytecode == Bytecode::kStar;
-}
-
-// static
-bool Bytecodes::IsBytecodeWithScalableOperands(Bytecode bytecode) {
-  switch (bytecode) {
-#define CASE(Name, ...)                              \
-  case Bytecode::k##Name:                            \
-    typedef BytecodeTraits<__VA_ARGS__> Name##Trait; \
-    return Name##Trait::IsScalable();
-    BYTECODE_LIST(CASE)
-#undef CASE
-  }
-  UNREACHABLE();
-  return false;
-}
-
-// static
-bool Bytecodes::IsPrefixScalingBytecode(Bytecode bytecode) {
-  switch (bytecode) {
-    case Bytecode::kExtraWide:
-    case Bytecode::kDebugBreakExtraWide:
-    case Bytecode::kWide:
-    case Bytecode::kDebugBreakWide:
-      return true;
-    default:
-      return false;
-  }
-}
-
-// static
-bool Bytecodes::PutsNameInAccumulator(Bytecode bytecode) {
-  return bytecode == Bytecode::kTypeOf;
-}
-
-// static
-bool Bytecodes::IsJumpOrReturn(Bytecode bytecode) {
-  return bytecode == Bytecode::kReturn || IsJump(bytecode);
-}
-
-// static
-bool Bytecodes::IsMaybeRegisterOperandType(OperandType operand_type) {
-  return operand_type == OperandType::kMaybeReg;
-}
-
-// static
 bool Bytecodes::IsRegisterOperandType(OperandType operand_type) {
   switch (operand_type) {
 #define CASE(Name, _)        \
@@ -599,21 +230,11 @@
 }
 
 // static
-int Bytecodes::GetNumberOfRegistersRepresentedBy(OperandType operand_type) {
-  switch (operand_type) {
-    case OperandType::kMaybeReg:
-    case OperandType::kReg:
-    case OperandType::kRegOut:
-      return 1;
-    case OperandType::kRegPair:
-    case OperandType::kRegOutPair:
-      return 2;
-    case OperandType::kRegOutTriple:
-      return 3;
-    default:
-      return 0;
+bool Bytecodes::IsBytecodeWithScalableOperands(Bytecode bytecode) {
+  for (int i = 0; i < NumberOfOperands(bytecode); i++) {
+    if (OperandIsScalable(bytecode, i)) return true;
   }
-  return 0;
+  return false;
 }
 
 // static
@@ -630,25 +251,28 @@
 }
 
 // static
-OperandSize Bytecodes::SizeForSignedOperand(int value) {
-  if (value >= kMinInt8 && value <= kMaxInt8) {
-    return OperandSize::kByte;
-  } else if (value >= kMinInt16 && value <= kMaxInt16) {
-    return OperandSize::kShort;
-  } else {
-    return OperandSize::kQuad;
-  }
-}
-
-// static
-OperandSize Bytecodes::SizeForUnsignedOperand(uint32_t value) {
-  if (value <= kMaxUInt8) {
-    return OperandSize::kByte;
-  } else if (value <= kMaxUInt16) {
-    return OperandSize::kShort;
-  } else {
-    return OperandSize::kQuad;
-  }
+OperandSize Bytecodes::SizeOfOperand(OperandType operand_type,
+                                     OperandScale operand_scale) {
+  DCHECK_LE(operand_type, OperandType::kLast);
+  DCHECK_GE(operand_scale, OperandScale::kSingle);
+  DCHECK_LE(operand_scale, OperandScale::kLast);
+  STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
+                OperandScale::kLast == OperandScale::kQuadruple);
+  int scale_index = static_cast<int>(operand_scale) >> 1;
+  // clang-format off
+  static const OperandSize kOperandSizes[][3] = {
+#define ENTRY(Name, ...)                                \
+  { OperandScaler<OperandType::k##Name,                 \
+                 OperandScale::kSingle>::kOperandSize,  \
+    OperandScaler<OperandType::k##Name,                 \
+                 OperandScale::kDouble>::kOperandSize,  \
+    OperandScaler<OperandType::k##Name,                 \
+                 OperandScale::kQuadruple>::kOperandSize },
+    OPERAND_TYPE_LIST(ENTRY)
+#undef ENTRY
+  };
+  // clang-format on
+  return kOperandSizes[static_cast<size_t>(operand_type)][scale_index];
 }
 
 // static
@@ -662,22 +286,6 @@
   return os << Bytecodes::ToString(bytecode);
 }
 
-std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use) {
-  return os << Bytecodes::AccumulatorUseToString(use);
-}
-
-std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size) {
-  return os << Bytecodes::OperandSizeToString(operand_size);
-}
-
-std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale) {
-  return os << Bytecodes::OperandScaleToString(operand_scale);
-}
-
-std::ostream& operator<<(std::ostream& os, const OperandType& operand_type) {
-  return os << Bytecodes::OperandTypeToString(operand_type);
-}
-
 }  // namespace interpreter
 }  // namespace internal
 }  // namespace v8
diff --git a/src/interpreter/bytecodes.h b/src/interpreter/bytecodes.h
index 036ae72..6232966 100644
--- a/src/interpreter/bytecodes.h
+++ b/src/interpreter/bytecodes.h
@@ -9,6 +9,9 @@
 #include <iosfwd>
 #include <string>
 
+#include "src/globals.h"
+#include "src/interpreter/bytecode-operands.h"
+
 // This interface and it's implementation are independent of the
 // libv8_base library as they are used by the interpreter and the
 // standalone mkpeephole table generator program.
@@ -17,64 +20,8 @@
 namespace internal {
 namespace interpreter {
 
-#define INVALID_OPERAND_TYPE_LIST(V) V(None, OperandTypeInfo::kNone)
-
-#define REGISTER_INPUT_OPERAND_TYPE_LIST(V)         \
-  V(MaybeReg, OperandTypeInfo::kScalableSignedByte) \
-  V(Reg, OperandTypeInfo::kScalableSignedByte)      \
-  V(RegPair, OperandTypeInfo::kScalableSignedByte)
-
-#define REGISTER_OUTPUT_OPERAND_TYPE_LIST(V)          \
-  V(RegOut, OperandTypeInfo::kScalableSignedByte)     \
-  V(RegOutPair, OperandTypeInfo::kScalableSignedByte) \
-  V(RegOutTriple, OperandTypeInfo::kScalableSignedByte)
-
-#define SCALAR_OPERAND_TYPE_LIST(V)                   \
-  V(Flag8, OperandTypeInfo::kFixedUnsignedByte)       \
-  V(IntrinsicId, OperandTypeInfo::kFixedUnsignedByte) \
-  V(Idx, OperandTypeInfo::kScalableUnsignedByte)      \
-  V(Imm, OperandTypeInfo::kScalableSignedByte)        \
-  V(RegCount, OperandTypeInfo::kScalableUnsignedByte) \
-  V(RuntimeId, OperandTypeInfo::kFixedUnsignedShort)
-
-#define REGISTER_OPERAND_TYPE_LIST(V) \
-  REGISTER_INPUT_OPERAND_TYPE_LIST(V) \
-  REGISTER_OUTPUT_OPERAND_TYPE_LIST(V)
-
-#define NON_REGISTER_OPERAND_TYPE_LIST(V) \
-  INVALID_OPERAND_TYPE_LIST(V)            \
-  SCALAR_OPERAND_TYPE_LIST(V)
-
-// The list of operand types used by bytecodes.
-#define OPERAND_TYPE_LIST(V)        \
-  NON_REGISTER_OPERAND_TYPE_LIST(V) \
-  REGISTER_OPERAND_TYPE_LIST(V)
-
-// Define one debug break bytecode for each possible size of unscaled
-// bytecodes. Format is V(<bytecode>, <accumulator_use>, <operands>).
-#define DEBUG_BREAK_PLAIN_BYTECODE_LIST(V)                                    \
-  V(DebugBreak0, AccumulatorUse::kRead)                                       \
-  V(DebugBreak1, AccumulatorUse::kRead, OperandType::kReg)                    \
-  V(DebugBreak2, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg) \
-  V(DebugBreak3, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg, \
-    OperandType::kReg)                                                        \
-  V(DebugBreak4, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg, \
-    OperandType::kReg, OperandType::kReg)                                     \
-  V(DebugBreak5, AccumulatorUse::kRead, OperandType::kRuntimeId,              \
-    OperandType::kReg, OperandType::kReg)                                     \
-  V(DebugBreak6, AccumulatorUse::kRead, OperandType::kRuntimeId,              \
-    OperandType::kReg, OperandType::kReg, OperandType::kReg)
-
-// Define one debug break for each widening prefix.
-#define DEBUG_BREAK_PREFIX_BYTECODE_LIST(V) \
-  V(DebugBreakWide, AccumulatorUse::kRead)  \
-  V(DebugBreakExtraWide, AccumulatorUse::kRead)
-
-#define DEBUG_BREAK_BYTECODE_LIST(V) \
-  DEBUG_BREAK_PLAIN_BYTECODE_LIST(V) \
-  DEBUG_BREAK_PREFIX_BYTECODE_LIST(V)
-
 // The list of bytecodes which are interpreted by the interpreter.
+// Format is V(<bytecode>, <accumulator_use>, <operands>).
 #define BYTECODE_LIST(V)                                                       \
   /* Extended width operands */                                                \
   V(Wide, AccumulatorUse::kNone)                                               \
@@ -106,15 +53,23 @@
   V(PushContext, AccumulatorUse::kRead, OperandType::kRegOut)                  \
   V(PopContext, AccumulatorUse::kNone, OperandType::kReg)                      \
   V(LdaContextSlot, AccumulatorUse::kWrite, OperandType::kReg,                 \
-    OperandType::kIdx)                                                         \
+    OperandType::kIdx, OperandType::kUImm)                                     \
   V(LdrContextSlot, AccumulatorUse::kNone, OperandType::kReg,                  \
-    OperandType::kIdx, OperandType::kRegOut)                                   \
+    OperandType::kIdx, OperandType::kUImm, OperandType::kRegOut)               \
   V(StaContextSlot, AccumulatorUse::kRead, OperandType::kReg,                  \
-    OperandType::kIdx)                                                         \
+    OperandType::kIdx, OperandType::kUImm)                                     \
                                                                                \
   /* Load-Store lookup slots */                                                \
   V(LdaLookupSlot, AccumulatorUse::kWrite, OperandType::kIdx)                  \
+  V(LdaLookupContextSlot, AccumulatorUse::kWrite, OperandType::kIdx,           \
+    OperandType::kIdx, OperandType::kUImm)                                     \
+  V(LdaLookupGlobalSlot, AccumulatorUse::kWrite, OperandType::kIdx,            \
+    OperandType::kIdx, OperandType::kUImm)                                     \
   V(LdaLookupSlotInsideTypeof, AccumulatorUse::kWrite, OperandType::kIdx)      \
+  V(LdaLookupContextSlotInsideTypeof, AccumulatorUse::kWrite,                  \
+    OperandType::kIdx, OperandType::kIdx, OperandType::kUImm)                  \
+  V(LdaLookupGlobalSlotInsideTypeof, AccumulatorUse::kWrite,                   \
+    OperandType::kIdx, OperandType::kIdx, OperandType::kUImm)                  \
   V(StaLookupSlotSloppy, AccumulatorUse::kReadWrite, OperandType::kIdx)        \
   V(StaLookupSlotStrict, AccumulatorUse::kReadWrite, OperandType::kIdx)        \
                                                                                \
@@ -188,33 +143,40 @@
   V(DeletePropertySloppy, AccumulatorUse::kReadWrite, OperandType::kReg)       \
                                                                                \
   /* Call operations */                                                        \
-  V(Call, AccumulatorUse::kWrite, OperandType::kReg, OperandType::kReg,        \
+  V(Call, AccumulatorUse::kWrite, OperandType::kReg, OperandType::kRegList,    \
     OperandType::kRegCount, OperandType::kIdx)                                 \
-  V(TailCall, AccumulatorUse::kWrite, OperandType::kReg, OperandType::kReg,    \
-    OperandType::kRegCount, OperandType::kIdx)                                 \
+  V(TailCall, AccumulatorUse::kWrite, OperandType::kReg,                       \
+    OperandType::kRegList, OperandType::kRegCount, OperandType::kIdx)          \
   V(CallRuntime, AccumulatorUse::kWrite, OperandType::kRuntimeId,              \
-    OperandType::kMaybeReg, OperandType::kRegCount)                            \
+    OperandType::kRegList, OperandType::kRegCount)                             \
   V(CallRuntimeForPair, AccumulatorUse::kNone, OperandType::kRuntimeId,        \
-    OperandType::kMaybeReg, OperandType::kRegCount, OperandType::kRegOutPair)  \
+    OperandType::kRegList, OperandType::kRegCount, OperandType::kRegOutPair)   \
   V(CallJSRuntime, AccumulatorUse::kWrite, OperandType::kIdx,                  \
-    OperandType::kReg, OperandType::kRegCount)                                 \
+    OperandType::kRegList, OperandType::kRegCount)                             \
                                                                                \
   /* Intrinsics */                                                             \
   V(InvokeIntrinsic, AccumulatorUse::kWrite, OperandType::kIntrinsicId,        \
-    OperandType::kMaybeReg, OperandType::kRegCount)                            \
+    OperandType::kRegList, OperandType::kRegCount)                             \
                                                                                \
   /* New operator */                                                           \
-  V(New, AccumulatorUse::kReadWrite, OperandType::kReg,                        \
-    OperandType::kMaybeReg, OperandType::kRegCount)                            \
+  V(New, AccumulatorUse::kReadWrite, OperandType::kReg, OperandType::kRegList, \
+    OperandType::kRegCount, OperandType::kIdx)                                 \
                                                                                \
   /* Test Operators */                                                         \
-  V(TestEqual, AccumulatorUse::kReadWrite, OperandType::kReg)                  \
-  V(TestNotEqual, AccumulatorUse::kReadWrite, OperandType::kReg)               \
-  V(TestEqualStrict, AccumulatorUse::kReadWrite, OperandType::kReg)            \
-  V(TestLessThan, AccumulatorUse::kReadWrite, OperandType::kReg)               \
-  V(TestGreaterThan, AccumulatorUse::kReadWrite, OperandType::kReg)            \
-  V(TestLessThanOrEqual, AccumulatorUse::kReadWrite, OperandType::kReg)        \
-  V(TestGreaterThanOrEqual, AccumulatorUse::kReadWrite, OperandType::kReg)     \
+  V(TestEqual, AccumulatorUse::kReadWrite, OperandType::kReg,                  \
+    OperandType::kIdx)                                                         \
+  V(TestNotEqual, AccumulatorUse::kReadWrite, OperandType::kReg,               \
+    OperandType::kIdx)                                                         \
+  V(TestEqualStrict, AccumulatorUse::kReadWrite, OperandType::kReg,            \
+    OperandType::kIdx)                                                         \
+  V(TestLessThan, AccumulatorUse::kReadWrite, OperandType::kReg,               \
+    OperandType::kIdx)                                                         \
+  V(TestGreaterThan, AccumulatorUse::kReadWrite, OperandType::kReg,            \
+    OperandType::kIdx)                                                         \
+  V(TestLessThanOrEqual, AccumulatorUse::kReadWrite, OperandType::kReg,        \
+    OperandType::kIdx)                                                         \
+  V(TestGreaterThanOrEqual, AccumulatorUse::kReadWrite, OperandType::kReg,     \
+    OperandType::kIdx)                                                         \
   V(TestInstanceOf, AccumulatorUse::kReadWrite, OperandType::kReg)             \
   V(TestIn, AccumulatorUse::kReadWrite, OperandType::kReg)                     \
                                                                                \
@@ -238,10 +200,10 @@
   /* Context allocation */                                                     \
   V(CreateBlockContext, AccumulatorUse::kReadWrite, OperandType::kIdx)         \
   V(CreateCatchContext, AccumulatorUse::kReadWrite, OperandType::kReg,         \
+    OperandType::kIdx, OperandType::kIdx)                                      \
+  V(CreateFunctionContext, AccumulatorUse::kWrite, OperandType::kUImm)         \
+  V(CreateWithContext, AccumulatorUse::kReadWrite, OperandType::kReg,          \
     OperandType::kIdx)                                                         \
-  /* TODO(klaasb) rename Idx or add unsigned Imm OperandType? */               \
-  V(CreateFunctionContext, AccumulatorUse::kWrite, OperandType::kIdx)          \
-  V(CreateWithContext, AccumulatorUse::kReadWrite, OperandType::kReg)          \
                                                                                \
   /* Arguments allocation */                                                   \
   V(CreateMappedArguments, AccumulatorUse::kWrite)                             \
@@ -265,11 +227,13 @@
   V(JumpIfUndefinedConstant, AccumulatorUse::kRead, OperandType::kIdx)         \
   V(JumpIfNotHole, AccumulatorUse::kRead, OperandType::kImm)                   \
   V(JumpIfNotHoleConstant, AccumulatorUse::kRead, OperandType::kIdx)           \
+  V(JumpLoop, AccumulatorUse::kNone, OperandType::kImm, OperandType::kImm)     \
                                                                                \
   /* Complex flow control For..in */                                           \
   V(ForInPrepare, AccumulatorUse::kNone, OperandType::kReg,                    \
     OperandType::kRegOutTriple)                                                \
-  V(ForInDone, AccumulatorUse::kWrite, OperandType::kReg, OperandType::kReg)   \
+  V(ForInContinue, AccumulatorUse::kWrite, OperandType::kReg,                  \
+    OperandType::kReg)                                                         \
   V(ForInNext, AccumulatorUse::kWrite, OperandType::kReg, OperandType::kReg,   \
     OperandType::kRegPair, OperandType::kIdx)                                  \
   V(ForInStep, AccumulatorUse::kWrite, OperandType::kReg)                      \
@@ -277,9 +241,6 @@
   /* Perform a stack guard check */                                            \
   V(StackCheck, AccumulatorUse::kNone)                                         \
                                                                                \
-  /* Perform a check to trigger on-stack replacement */                        \
-  V(OsrPoll, AccumulatorUse::kNone, OperandType::kImm)                         \
-                                                                               \
   /* Non-local flow control */                                                 \
   V(Throw, AccumulatorUse::kRead)                                              \
   V(ReThrow, AccumulatorUse::kRead)                                            \
@@ -291,7 +252,22 @@
                                                                                \
   /* Debugger */                                                               \
   V(Debugger, AccumulatorUse::kNone)                                           \
-  DEBUG_BREAK_BYTECODE_LIST(V)                                                 \
+                                                                               \
+  /* Debug Breakpoints - one for each possible size of unscaled bytecodes */   \
+  /* and one for each operand widening prefix bytecode                    */   \
+  V(DebugBreak0, AccumulatorUse::kRead)                                        \
+  V(DebugBreak1, AccumulatorUse::kRead, OperandType::kReg)                     \
+  V(DebugBreak2, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg)  \
+  V(DebugBreak3, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg,  \
+    OperandType::kReg)                                                         \
+  V(DebugBreak4, AccumulatorUse::kRead, OperandType::kReg, OperandType::kReg,  \
+    OperandType::kReg, OperandType::kReg)                                      \
+  V(DebugBreak5, AccumulatorUse::kRead, OperandType::kRuntimeId,               \
+    OperandType::kReg, OperandType::kReg)                                      \
+  V(DebugBreak6, AccumulatorUse::kRead, OperandType::kRuntimeId,               \
+    OperandType::kReg, OperandType::kReg, OperandType::kReg)                   \
+  V(DebugBreakWide, AccumulatorUse::kRead)                                     \
+  V(DebugBreakExtraWide, AccumulatorUse::kRead)                                \
                                                                                \
   /* Illegal bytecode (terminates execution) */                                \
   V(Illegal, AccumulatorUse::kNone)                                            \
@@ -300,74 +276,23 @@
   /* eliminated bytecodes). */                                                 \
   V(Nop, AccumulatorUse::kNone)
 
-enum class AccumulatorUse : uint8_t {
-  kNone = 0,
-  kRead = 1 << 0,
-  kWrite = 1 << 1,
-  kReadWrite = kRead | kWrite
-};
+// List of debug break bytecodes.
+#define DEBUG_BREAK_PLAIN_BYTECODE_LIST(V) \
+  V(DebugBreak0)                           \
+  V(DebugBreak1)                           \
+  V(DebugBreak2)                           \
+  V(DebugBreak3)                           \
+  V(DebugBreak4)                           \
+  V(DebugBreak5)                           \
+  V(DebugBreak6)
 
-inline AccumulatorUse operator&(AccumulatorUse lhs, AccumulatorUse rhs) {
-  int result = static_cast<int>(lhs) & static_cast<int>(rhs);
-  return static_cast<AccumulatorUse>(result);
-}
+#define DEBUG_BREAK_PREFIX_BYTECODE_LIST(V) \
+  V(DebugBreakWide)                         \
+  V(DebugBreakExtraWide)
 
-inline AccumulatorUse operator|(AccumulatorUse lhs, AccumulatorUse rhs) {
-  int result = static_cast<int>(lhs) | static_cast<int>(rhs);
-  return static_cast<AccumulatorUse>(result);
-}
-
-// Enumeration of scaling factors applicable to scalable operands. Code
-// relies on being able to cast values to integer scaling values.
-#define OPERAND_SCALE_LIST(V) \
-  V(Single, 1)                \
-  V(Double, 2)                \
-  V(Quadruple, 4)
-
-enum class OperandScale : uint8_t {
-#define DECLARE_OPERAND_SCALE(Name, Scale) k##Name = Scale,
-  OPERAND_SCALE_LIST(DECLARE_OPERAND_SCALE)
-#undef DECLARE_OPERAND_SCALE
-      kLast = kQuadruple
-};
-
-// Enumeration of the size classes of operand types used by
-// bytecodes. Code relies on being able to cast values to integer
-// types to get the size in bytes.
-enum class OperandSize : uint8_t {
-  kNone = 0,
-  kByte = 1,
-  kShort = 2,
-  kQuad = 4,
-  kLast = kQuad
-};
-
-// Primitive operand info used that summarize properties of operands.
-// Columns are Name, IsScalable, IsUnsigned, UnscaledSize.
-#define OPERAND_TYPE_INFO_LIST(V)                         \
-  V(None, false, false, OperandSize::kNone)               \
-  V(ScalableSignedByte, true, false, OperandSize::kByte)  \
-  V(ScalableUnsignedByte, true, true, OperandSize::kByte) \
-  V(FixedUnsignedByte, false, true, OperandSize::kByte)   \
-  V(FixedUnsignedShort, false, true, OperandSize::kShort)
-
-enum class OperandTypeInfo : uint8_t {
-#define DECLARE_OPERAND_TYPE_INFO(Name, ...) k##Name,
-  OPERAND_TYPE_INFO_LIST(DECLARE_OPERAND_TYPE_INFO)
-#undef DECLARE_OPERAND_TYPE_INFO
-};
-
-// Enumeration of operand types used by bytecodes.
-enum class OperandType : uint8_t {
-#define DECLARE_OPERAND_TYPE(Name, _) k##Name,
-  OPERAND_TYPE_LIST(DECLARE_OPERAND_TYPE)
-#undef DECLARE_OPERAND_TYPE
-#define COUNT_OPERAND_TYPES(x, _) +1
-  // The COUNT_OPERAND macro will turn this into kLast = -1 +1 +1... which will
-  // evaluate to the same value as the last operand.
-  kLast = -1 OPERAND_TYPE_LIST(COUNT_OPERAND_TYPES)
-#undef COUNT_OPERAND_TYPES
-};
+#define DEBUG_BREAK_BYTECODE_LIST(V) \
+  DEBUG_BREAK_PLAIN_BYTECODE_LIST(V) \
+  DEBUG_BREAK_PREFIX_BYTECODE_LIST(V)
 
 // Enumeration of interpreter bytecodes.
 enum class Bytecode : uint8_t {
@@ -381,6 +306,14 @@
 #undef COUNT_BYTECODE
 };
 
+// TODO(rmcilroy): Remove once we switch to MSVC 2015 which supports constexpr.
+// See crbug.com/603131.
+#if V8_CC_MSVC
+#define CONSTEXPR const
+#else
+#define CONSTEXPR constexpr
+#endif
+
 class Bytecodes final {
  public:
   //  The maximum number of operands a bytecode may have.
@@ -392,157 +325,315 @@
   // Returns string representation of |bytecode|.
   static std::string ToString(Bytecode bytecode, OperandScale operand_scale);
 
-  // Returns string representation of |accumulator_use|.
-  static const char* AccumulatorUseToString(AccumulatorUse accumulator_use);
-
-  // Returns string representation of |operand_type|.
-  static const char* OperandTypeToString(OperandType operand_type);
-
-  // Returns string representation of |operand_scale|.
-  static const char* OperandScaleToString(OperandScale operand_scale);
-
-  // Returns string representation of |operand_size|.
-  static const char* OperandSizeToString(OperandSize operand_size);
-
   // Returns byte value of bytecode.
-  static uint8_t ToByte(Bytecode bytecode);
+  static uint8_t ToByte(Bytecode bytecode) {
+    DCHECK_LE(bytecode, Bytecode::kLast);
+    return static_cast<uint8_t>(bytecode);
+  }
 
   // Returns bytecode for |value|.
-  static Bytecode FromByte(uint8_t value);
-
-  // Returns the number of operands expected by |bytecode|.
-  static int NumberOfOperands(Bytecode bytecode);
-
-  // Returns the number of register operands expected by |bytecode|.
-  static int NumberOfRegisterOperands(Bytecode bytecode);
+  static Bytecode FromByte(uint8_t value) {
+    Bytecode bytecode = static_cast<Bytecode>(value);
+    DCHECK(bytecode <= Bytecode::kLast);
+    return bytecode;
+  }
 
   // Returns the prefix bytecode representing an operand scale to be
   // applied to a a bytecode.
-  static Bytecode OperandScaleToPrefixBytecode(OperandScale operand_scale);
+  static Bytecode OperandScaleToPrefixBytecode(OperandScale operand_scale) {
+    switch (operand_scale) {
+      case OperandScale::kQuadruple:
+        return Bytecode::kExtraWide;
+      case OperandScale::kDouble:
+        return Bytecode::kWide;
+      default:
+        UNREACHABLE();
+        return Bytecode::kIllegal;
+    }
+  }
 
   // Returns true if the operand scale requires a prefix bytecode.
-  static bool OperandScaleRequiresPrefixBytecode(OperandScale operand_scale);
+  static bool OperandScaleRequiresPrefixBytecode(OperandScale operand_scale) {
+    return operand_scale != OperandScale::kSingle;
+  }
 
   // Returns the scaling applied to scalable operands if bytecode is
   // is a scaling prefix.
-  static OperandScale PrefixBytecodeToOperandScale(Bytecode bytecode);
+  static OperandScale PrefixBytecodeToOperandScale(Bytecode bytecode) {
+    switch (bytecode) {
+      case Bytecode::kExtraWide:
+      case Bytecode::kDebugBreakExtraWide:
+        return OperandScale::kQuadruple;
+      case Bytecode::kWide:
+      case Bytecode::kDebugBreakWide:
+        return OperandScale::kDouble;
+      default:
+        UNREACHABLE();
+        return OperandScale::kSingle;
+    }
+  }
 
   // Returns how accumulator is used by |bytecode|.
-  static AccumulatorUse GetAccumulatorUse(Bytecode bytecode);
+  static AccumulatorUse GetAccumulatorUse(Bytecode bytecode) {
+    DCHECK(bytecode <= Bytecode::kLast);
+    return kAccumulatorUse[static_cast<size_t>(bytecode)];
+  }
 
   // Returns true if |bytecode| reads the accumulator.
-  static bool ReadsAccumulator(Bytecode bytecode);
+  static bool ReadsAccumulator(Bytecode bytecode) {
+    return (GetAccumulatorUse(bytecode) & AccumulatorUse::kRead) ==
+           AccumulatorUse::kRead;
+  }
 
   // Returns true if |bytecode| writes the accumulator.
-  static bool WritesAccumulator(Bytecode bytecode);
+  static bool WritesAccumulator(Bytecode bytecode) {
+    return (GetAccumulatorUse(bytecode) & AccumulatorUse::kWrite) ==
+           AccumulatorUse::kWrite;
+  }
 
   // Return true if |bytecode| writes the accumulator with a boolean value.
-  static bool WritesBooleanToAccumulator(Bytecode bytecode);
+  static bool WritesBooleanToAccumulator(Bytecode bytecode) {
+    switch (bytecode) {
+      case Bytecode::kLdaTrue:
+      case Bytecode::kLdaFalse:
+      case Bytecode::kToBooleanLogicalNot:
+      case Bytecode::kLogicalNot:
+      case Bytecode::kTestEqual:
+      case Bytecode::kTestNotEqual:
+      case Bytecode::kTestEqualStrict:
+      case Bytecode::kTestLessThan:
+      case Bytecode::kTestLessThanOrEqual:
+      case Bytecode::kTestGreaterThan:
+      case Bytecode::kTestGreaterThanOrEqual:
+      case Bytecode::kTestInstanceOf:
+      case Bytecode::kTestIn:
+      case Bytecode::kForInContinue:
+        return true;
+      default:
+        return false;
+    }
+  }
 
   // Return true if |bytecode| is an accumulator load without effects,
   // e.g. LdaConstant, LdaTrue, Ldar.
-  static bool IsAccumulatorLoadWithoutEffects(Bytecode bytecode);
+  static CONSTEXPR bool IsAccumulatorLoadWithoutEffects(Bytecode bytecode) {
+    return bytecode == Bytecode::kLdar || bytecode == Bytecode::kLdaZero ||
+           bytecode == Bytecode::kLdaSmi || bytecode == Bytecode::kLdaNull ||
+           bytecode == Bytecode::kLdaTrue || bytecode == Bytecode::kLdaFalse ||
+           bytecode == Bytecode::kLdaUndefined ||
+           bytecode == Bytecode::kLdaTheHole ||
+           bytecode == Bytecode::kLdaConstant;
+  }
+
+  // Return true if |bytecode| is a register load without effects,
+  // e.g. Mov, Star, LdrUndefined.
+  static CONSTEXPR bool IsRegisterLoadWithoutEffects(Bytecode bytecode) {
+    return bytecode == Bytecode::kMov || bytecode == Bytecode::kPopContext ||
+           bytecode == Bytecode::kPushContext || bytecode == Bytecode::kStar ||
+           bytecode == Bytecode::kLdrUndefined;
+  }
+
+  // Returns true if the bytecode is a conditional jump taking
+  // an immediate byte operand (OperandType::kImm).
+  static CONSTEXPR bool IsConditionalJumpImmediate(Bytecode bytecode) {
+    return bytecode == Bytecode::kJumpIfTrue ||
+           bytecode == Bytecode::kJumpIfFalse ||
+           bytecode == Bytecode::kJumpIfToBooleanTrue ||
+           bytecode == Bytecode::kJumpIfToBooleanFalse ||
+           bytecode == Bytecode::kJumpIfNotHole ||
+           bytecode == Bytecode::kJumpIfNull ||
+           bytecode == Bytecode::kJumpIfUndefined;
+  }
+
+  // Returns true if the bytecode is a conditional jump taking
+  // a constant pool entry (OperandType::kIdx).
+  static CONSTEXPR bool IsConditionalJumpConstant(Bytecode bytecode) {
+    return bytecode == Bytecode::kJumpIfTrueConstant ||
+           bytecode == Bytecode::kJumpIfFalseConstant ||
+           bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
+           bytecode == Bytecode::kJumpIfToBooleanFalseConstant ||
+           bytecode == Bytecode::kJumpIfNotHoleConstant ||
+           bytecode == Bytecode::kJumpIfNullConstant ||
+           bytecode == Bytecode::kJumpIfUndefinedConstant;
+  }
+
+  // Returns true if the bytecode is a conditional jump taking
+  // any kind of operand.
+  static CONSTEXPR bool IsConditionalJump(Bytecode bytecode) {
+    return IsConditionalJumpImmediate(bytecode) ||
+           IsConditionalJumpConstant(bytecode);
+  }
+
+  // Returns true if the bytecode is a jump or a conditional jump taking
+  // an immediate byte operand (OperandType::kImm).
+  static CONSTEXPR bool IsJumpImmediate(Bytecode bytecode) {
+    return bytecode == Bytecode::kJump || bytecode == Bytecode::kJumpLoop ||
+           IsConditionalJumpImmediate(bytecode);
+  }
+
+  // Returns true if the bytecode is a jump or conditional jump taking a
+  // constant pool entry (OperandType::kIdx).
+  static CONSTEXPR bool IsJumpConstant(Bytecode bytecode) {
+    return bytecode == Bytecode::kJumpConstant ||
+           IsConditionalJumpConstant(bytecode);
+  }
+
+  // Returns true if the bytecode is a jump that internally coerces the
+  // accumulator to a boolean.
+  static CONSTEXPR bool IsJumpIfToBoolean(Bytecode bytecode) {
+    return bytecode == Bytecode::kJumpIfToBooleanTrue ||
+           bytecode == Bytecode::kJumpIfToBooleanFalse ||
+           bytecode == Bytecode::kJumpIfToBooleanTrueConstant ||
+           bytecode == Bytecode::kJumpIfToBooleanFalseConstant;
+  }
+
+  // Returns true if the bytecode is a jump or conditional jump taking
+  // any kind of operand.
+  static CONSTEXPR bool IsJump(Bytecode bytecode) {
+    return IsJumpImmediate(bytecode) || IsJumpConstant(bytecode);
+  }
+
+  // Returns true if the bytecode is a conditional jump, a jump, or a return.
+  static CONSTEXPR bool IsJumpOrReturn(Bytecode bytecode) {
+    return bytecode == Bytecode::kReturn || IsJump(bytecode);
+  }
 
   // Return true if |bytecode| is a jump without effects,
   // e.g.  any jump excluding those that include type coercion like
   // JumpIfTrueToBoolean.
-  static bool IsJumpWithoutEffects(Bytecode bytecode);
+  static CONSTEXPR bool IsJumpWithoutEffects(Bytecode bytecode) {
+    return IsJump(bytecode) && !IsJumpIfToBoolean(bytecode);
+  }
 
-  // Return true if |bytecode| is a register load without effects,
-  // e.g. Mov, Star, LdrUndefined.
-  static bool IsRegisterLoadWithoutEffects(Bytecode bytecode);
+  // Returns true if |bytecode| has no effects. These bytecodes only manipulate
+  // interpreter frame state and will never throw.
+  static CONSTEXPR bool IsWithoutExternalSideEffects(Bytecode bytecode) {
+    return (IsAccumulatorLoadWithoutEffects(bytecode) ||
+            IsRegisterLoadWithoutEffects(bytecode) ||
+            bytecode == Bytecode::kNop || IsJumpWithoutEffects(bytecode));
+  }
 
-  // Returns true if |bytecode| has no effects.
-  static bool IsWithoutExternalSideEffects(Bytecode bytecode);
+  // Returns true if the bytecode is Ldar or Star.
+  static CONSTEXPR bool IsLdarOrStar(Bytecode bytecode) {
+    return bytecode == Bytecode::kLdar || bytecode == Bytecode::kStar;
+  }
+
+  // Returns true if |bytecode| puts a name in the accumulator.
+  static CONSTEXPR bool PutsNameInAccumulator(Bytecode bytecode) {
+    return bytecode == Bytecode::kTypeOf;
+  }
+
+  // Returns true if the bytecode is a call or a constructor call.
+  static CONSTEXPR bool IsCallOrNew(Bytecode bytecode) {
+    return bytecode == Bytecode::kCall || bytecode == Bytecode::kTailCall ||
+           bytecode == Bytecode::kNew;
+  }
+
+  // Returns true if the bytecode is a call to the runtime.
+  static CONSTEXPR bool IsCallRuntime(Bytecode bytecode) {
+    return bytecode == Bytecode::kCallRuntime ||
+           bytecode == Bytecode::kCallRuntimeForPair ||
+           bytecode == Bytecode::kInvokeIntrinsic;
+  }
+
+  // Returns true if the bytecode is a scaling prefix bytecode.
+  static CONSTEXPR bool IsPrefixScalingBytecode(Bytecode bytecode) {
+    return bytecode == Bytecode::kExtraWide || bytecode == Bytecode::kWide ||
+           bytecode == Bytecode::kDebugBreakExtraWide ||
+           bytecode == Bytecode::kDebugBreakWide;
+  }
+
+  // Returns the number of values which |bytecode| returns.
+  static CONSTEXPR size_t ReturnCount(Bytecode bytecode) {
+    return bytecode == Bytecode::kReturn ? 1 : 0;
+  }
+
+  // Returns the number of operands expected by |bytecode|.
+  static int NumberOfOperands(Bytecode bytecode) {
+    DCHECK(bytecode <= Bytecode::kLast);
+    return kOperandCount[static_cast<size_t>(bytecode)];
+  }
 
   // Returns the i-th operand of |bytecode|.
-  static OperandType GetOperandType(Bytecode bytecode, int i);
+  static OperandType GetOperandType(Bytecode bytecode, int i) {
+    DCHECK_LE(bytecode, Bytecode::kLast);
+    DCHECK_LT(i, NumberOfOperands(bytecode));
+    DCHECK_GE(i, 0);
+    return GetOperandTypes(bytecode)[i];
+  }
 
   // Returns a pointer to an array of operand types terminated in
   // OperandType::kNone.
-  static const OperandType* GetOperandTypes(Bytecode bytecode);
+  static const OperandType* GetOperandTypes(Bytecode bytecode) {
+    DCHECK(bytecode <= Bytecode::kLast);
+    return kOperandTypes[static_cast<size_t>(bytecode)];
+  }
 
-  // Returns a pointer to an array of operand type info terminated in
-  // OperandTypeInfo::kNone.
-  static const OperandTypeInfo* GetOperandTypeInfos(Bytecode bytecode);
+  static bool OperandIsScalableSignedByte(Bytecode bytecode,
+                                          int operand_index) {
+    DCHECK(bytecode <= Bytecode::kLast);
+    return kOperandTypeInfos[static_cast<size_t>(bytecode)][operand_index] ==
+           OperandTypeInfo::kScalableSignedByte;
+  }
+
+  static bool OperandIsScalableUnsignedByte(Bytecode bytecode,
+                                            int operand_index) {
+    DCHECK(bytecode <= Bytecode::kLast);
+    return kOperandTypeInfos[static_cast<size_t>(bytecode)][operand_index] ==
+           OperandTypeInfo::kScalableUnsignedByte;
+  }
+
+  static bool OperandIsScalable(Bytecode bytecode, int operand_index) {
+    return OperandIsScalableSignedByte(bytecode, operand_index) ||
+           OperandIsScalableUnsignedByte(bytecode, operand_index);
+  }
+
+  // Returns true if the bytecode has wider operand forms.
+  static bool IsBytecodeWithScalableOperands(Bytecode bytecode);
 
   // Returns the size of the i-th operand of |bytecode|.
   static OperandSize GetOperandSize(Bytecode bytecode, int i,
-                                    OperandScale operand_scale);
+                                    OperandScale operand_scale) {
+    CHECK_LT(i, NumberOfOperands(bytecode));
+    return GetOperandSizes(bytecode, operand_scale)[i];
+  }
+
+  // Returns the operand sizes of |bytecode| with scale |operand_scale|.
+  static const OperandSize* GetOperandSizes(Bytecode bytecode,
+                                            OperandScale operand_scale) {
+    DCHECK(bytecode <= Bytecode::kLast);
+    DCHECK_GE(operand_scale, OperandScale::kSingle);
+    DCHECK_LE(operand_scale, OperandScale::kLast);
+    STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
+                  OperandScale::kLast == OperandScale::kQuadruple);
+    int scale_index = static_cast<int>(operand_scale) >> 1;
+    return kOperandSizes[static_cast<size_t>(bytecode)][scale_index];
+  }
 
   // Returns the offset of the i-th operand of |bytecode| relative to the start
   // of the bytecode.
   static int GetOperandOffset(Bytecode bytecode, int i,
                               OperandScale operand_scale);
 
-  // Returns a debug break bytecode to replace |bytecode|.
-  static Bytecode GetDebugBreak(Bytecode bytecode);
-
   // Returns the size of the bytecode including its operands for the
   // given |operand_scale|.
-  static int Size(Bytecode bytecode, OperandScale operand_scale);
+  static int Size(Bytecode bytecode, OperandScale operand_scale) {
+    DCHECK(bytecode <= Bytecode::kLast);
+    STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
+                  OperandScale::kLast == OperandScale::kQuadruple);
+    int scale_index = static_cast<int>(operand_scale) >> 1;
+    return kBytecodeSizes[static_cast<size_t>(bytecode)][scale_index];
+  }
 
-  // Returns the size of |operand|.
-  static OperandSize SizeOfOperand(OperandType operand, OperandScale scale);
-
-  // Returns the number of values which |bytecode| returns.
-  static size_t ReturnCount(Bytecode bytecode);
-
-  // Returns true if the bytecode is a conditional jump taking
-  // an immediate byte operand (OperandType::kImm).
-  static bool IsConditionalJumpImmediate(Bytecode bytecode);
-
-  // Returns true if the bytecode is a conditional jump taking
-  // a constant pool entry (OperandType::kIdx).
-  static bool IsConditionalJumpConstant(Bytecode bytecode);
-
-  // Returns true if the bytecode is a conditional jump taking
-  // any kind of operand.
-  static bool IsConditionalJump(Bytecode bytecode);
-
-  // Returns true if the bytecode is a jump or a conditional jump taking
-  // an immediate byte operand (OperandType::kImm).
-  static bool IsJumpImmediate(Bytecode bytecode);
-
-  // Returns true if the bytecode is a jump or conditional jump taking a
-  // constant pool entry (OperandType::kIdx).
-  static bool IsJumpConstant(Bytecode bytecode);
-
-  // Returns true if the bytecode is a jump or conditional jump taking
-  // any kind of operand.
-  static bool IsJump(Bytecode bytecode);
-
-  // Returns true if the bytecode is a jump that internally coerces the
-  // accumulator to a boolean.
-  static bool IsJumpIfToBoolean(Bytecode bytecode);
+  // Returns a debug break bytecode to replace |bytecode|.
+  static Bytecode GetDebugBreak(Bytecode bytecode);
 
   // Returns the equivalent jump bytecode without the accumulator coercion.
   static Bytecode GetJumpWithoutToBoolean(Bytecode bytecode);
 
-  // Returns true if the bytecode is a conditional jump, a jump, or a return.
-  static bool IsJumpOrReturn(Bytecode bytecode);
-
-  // Returns true if the bytecode is a call or a constructor call.
-  static bool IsCallOrNew(Bytecode bytecode);
-
-  // Returns true if the bytecode is a call to the runtime.
-  static bool IsCallRuntime(Bytecode bytecode);
-
   // Returns true if the bytecode is a debug break.
   static bool IsDebugBreak(Bytecode bytecode);
 
-  // Returns true if the bytecode is Ldar or Star.
-  static bool IsLdarOrStar(Bytecode bytecode);
-
-  // Returns true if the bytecode has wider operand forms.
-  static bool IsBytecodeWithScalableOperands(Bytecode bytecode);
-
-  // Returns true if the bytecode is a scaling prefix bytecode.
-  static bool IsPrefixScalingBytecode(Bytecode bytecode);
-
-  // Returns true if |bytecode| puts a name in the accumulator.
-  static bool PutsNameInAccumulator(Bytecode bytecode);
-
   // Returns true if |operand_type| is any type of register operand.
   static bool IsRegisterOperandType(OperandType operand_type);
 
@@ -557,12 +648,30 @@
   static bool IsStarLookahead(Bytecode bytecode, OperandScale operand_scale);
 
   // Returns the number of registers represented by a register operand. For
-  // instance, a RegPair represents two registers.
-  static int GetNumberOfRegistersRepresentedBy(OperandType operand_type);
+  // instance, a RegPair represents two registers. Should not be called for
+  // kRegList which has a variable number of registers based on the following
+  // kRegCount operand.
+  static int GetNumberOfRegistersRepresentedBy(OperandType operand_type) {
+    switch (operand_type) {
+      case OperandType::kReg:
+      case OperandType::kRegOut:
+        return 1;
+      case OperandType::kRegPair:
+      case OperandType::kRegOutPair:
+        return 2;
+      case OperandType::kRegOutTriple:
+        return 3;
+      case OperandType::kRegList:
+        UNREACHABLE();
+        return 0;
+      default:
+        return 0;
+    }
+    return 0;
+  }
 
-  // Returns true if |operand_type| is a maybe register operand
-  // (kMaybeReg).
-  static bool IsMaybeRegisterOperandType(OperandType operand_type);
+  // Returns the size of |operand| for |operand_scale|.
+  static OperandSize SizeOfOperand(OperandType operand, OperandScale scale);
 
   // Returns true if |operand_type| is a runtime-id operand (kRuntimeId).
   static bool IsRuntimeIdOperandType(OperandType operand_type);
@@ -576,18 +685,55 @@
   // OperandScale values.
   static bool BytecodeHasHandler(Bytecode bytecode, OperandScale operand_scale);
 
-  // Return the operand size required to hold a signed operand.
-  static OperandSize SizeForSignedOperand(int value);
+  // Return the operand scale required to hold a signed operand with |value|.
+  static OperandScale ScaleForSignedOperand(int32_t value) {
+    if (value >= kMinInt8 && value <= kMaxInt8) {
+      return OperandScale::kSingle;
+    } else if (value >= kMinInt16 && value <= kMaxInt16) {
+      return OperandScale::kDouble;
+    } else {
+      return OperandScale::kQuadruple;
+    }
+  }
 
-  // Return the operand size required to hold an unsigned operand.
-  static OperandSize SizeForUnsignedOperand(uint32_t value);
+  // Return the operand scale required to hold an unsigned operand with |value|.
+  static OperandScale ScaleForUnsignedOperand(uint32_t value) {
+    if (value <= kMaxUInt8) {
+      return OperandScale::kSingle;
+    } else if (value <= kMaxUInt16) {
+      return OperandScale::kDouble;
+    } else {
+      return OperandScale::kQuadruple;
+    }
+  }
+
+  // Return the operand size required to hold an unsigned operand with |value|.
+  static OperandSize SizeForUnsignedOperand(uint32_t value) {
+    if (value <= kMaxUInt8) {
+      return OperandSize::kByte;
+    } else if (value <= kMaxUInt16) {
+      return OperandSize::kShort;
+    } else {
+      return OperandSize::kQuad;
+    }
+  }
+
+ private:
+  static const OperandType* const kOperandTypes[];
+  static const OperandTypeInfo* const kOperandTypeInfos[];
+  static const int kOperandCount[];
+  static const int kNumberOfRegisterOperands[];
+  static const AccumulatorUse kAccumulatorUse[];
+  static const bool kIsScalable[];
+  static const int kBytecodeSizes[][3];
+  static const OperandSize* const kOperandSizes[][3];
 };
 
+// TODO(rmcilroy): Remove once we switch to MSVC 2015 which supports constexpr.
+// See crbug.com/603131.
+#undef CONSTEXPR
+
 std::ostream& operator<<(std::ostream& os, const Bytecode& bytecode);
-std::ostream& operator<<(std::ostream& os, const AccumulatorUse& use);
-std::ostream& operator<<(std::ostream& os, const OperandScale& operand_scale);
-std::ostream& operator<<(std::ostream& os, const OperandSize& operand_size);
-std::ostream& operator<<(std::ostream& os, const OperandType& operand_type);
 
 }  // namespace interpreter
 }  // namespace internal
diff --git a/src/interpreter/constant-array-builder.cc b/src/interpreter/constant-array-builder.cc
index ff3823f..d2b7995 100644
--- a/src/interpreter/constant-array-builder.cc
+++ b/src/interpreter/constant-array-builder.cc
@@ -4,6 +4,7 @@
 
 #include "src/interpreter/constant-array-builder.h"
 
+#include <functional>
 #include <set>
 
 #include "src/isolate.h"
@@ -72,9 +73,11 @@
 
 ConstantArrayBuilder::ConstantArrayBuilder(Zone* zone,
                                            Handle<Object> the_hole_value)
-    : constants_map_(zone),
+    : constants_map_(16, base::KeyEqualityMatcher<Address>(),
+                     ZoneAllocationPolicy(zone)),
       smi_map_(zone),
       smi_pairs_(zone),
+      zone_(zone),
       the_hole_value_(the_hole_value) {
   idx_slice_[0] =
       new (zone) ConstantArraySlice(zone, 0, k8BitCapacity, OperandSize::kByte);
@@ -153,16 +156,11 @@
 }
 
 size_t ConstantArrayBuilder::Insert(Handle<Object> object) {
-  auto entry = constants_map_.find(object.address());
-  return (entry == constants_map_.end()) ? AllocateEntry(object)
-                                         : entry->second;
-}
-
-ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateEntry(
-    Handle<Object> object) {
-  index_t index = AllocateIndex(object);
-  constants_map_[object.address()] = index;
-  return index;
+  return constants_map_
+      .LookupOrInsert(object.address(), ObjectHash(object.address()),
+                      [&]() { return AllocateIndex(object); },
+                      ZoneAllocationPolicy(zone_))
+      ->value;
 }
 
 ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateIndex(
diff --git a/src/interpreter/constant-array-builder.h b/src/interpreter/constant-array-builder.h
index 2018f25..78d36f5 100644
--- a/src/interpreter/constant-array-builder.h
+++ b/src/interpreter/constant-array-builder.h
@@ -7,7 +7,7 @@
 
 #include "src/identity-map.h"
 #include "src/interpreter/bytecodes.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -70,7 +70,6 @@
  private:
   typedef uint32_t index_t;
 
-  index_t AllocateEntry(Handle<Object> object);
   index_t AllocateIndex(Handle<Object> object);
   index_t AllocateReservedEntry(Smi* value);
 
@@ -108,9 +107,12 @@
   Handle<Object> the_hole_value() const { return the_hole_value_; }
 
   ConstantArraySlice* idx_slice_[3];
-  ZoneMap<Address, index_t> constants_map_;
+  base::TemplateHashMapImpl<Address, index_t, base::KeyEqualityMatcher<Address>,
+                            ZoneAllocationPolicy>
+      constants_map_;
   ZoneMap<Smi*, index_t> smi_map_;
   ZoneVector<std::pair<Smi*, index_t>> smi_pairs_;
+  Zone* zone_;
   Handle<Object> the_hole_value_;
 };
 
diff --git a/src/interpreter/control-flow-builders.cc b/src/interpreter/control-flow-builders.cc
index 56cd481..0e71b96 100644
--- a/src/interpreter/control-flow-builders.cc
+++ b/src/interpreter/control-flow-builders.cc
@@ -60,18 +60,14 @@
   }
 }
 
-void LoopBuilder::JumpToHeader() {
+void LoopBuilder::JumpToHeader(int loop_depth) {
+  // Pass the proper loop nesting level to the backwards branch, to trigger
+  // on-stack replacement when armed for the given loop nesting depth.
+  int level = Min(loop_depth, AbstractCode::kMaxLoopNestingMarker - 1);
   // Loop must have closed form, i.e. all loop elements are within the loop,
   // the loop header precedes the body and next elements in the loop.
   DCHECK(loop_header_.is_bound());
-  builder()->Jump(&loop_header_);
-}
-
-void LoopBuilder::JumpToHeaderIfTrue() {
-  // Loop must have closed form, i.e. all loop elements are within the loop,
-  // the loop header precedes the body and next elements in the loop.
-  DCHECK(loop_header_.is_bound());
-  builder()->JumpIfTrue(&loop_header_);
+  builder()->JumpLoop(&loop_header_, level);
 }
 
 void LoopBuilder::EndLoop() {
diff --git a/src/interpreter/control-flow-builders.h b/src/interpreter/control-flow-builders.h
index 5cd9b5b..3174db5 100644
--- a/src/interpreter/control-flow-builders.h
+++ b/src/interpreter/control-flow-builders.h
@@ -8,7 +8,7 @@
 #include "src/interpreter/bytecode-array-builder.h"
 
 #include "src/interpreter/bytecode-label.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -86,8 +86,7 @@
   ~LoopBuilder();
 
   void LoopHeader(ZoneVector<BytecodeLabel>* additional_labels);
-  void JumpToHeader();
-  void JumpToHeaderIfTrue();
+  void JumpToHeader(int loop_depth);
   void BindContinueTarget();
   void EndLoop();
 
@@ -99,9 +98,6 @@
   void ContinueIfUndefined() { EmitJumpIfUndefined(&continue_labels_); }
   void ContinueIfNull() { EmitJumpIfNull(&continue_labels_); }
 
-  BytecodeLabels* header_labels() { return &header_labels_; }
-  BytecodeLabels* continue_labels() { return &continue_labels_; }
-
  private:
   BytecodeLabel loop_header_;
 
diff --git a/src/interpreter/handler-table-builder.h b/src/interpreter/handler-table-builder.h
index 26c45f4..25147ca 100644
--- a/src/interpreter/handler-table-builder.h
+++ b/src/interpreter/handler-table-builder.h
@@ -8,7 +8,7 @@
 #include "src/handles.h"
 #include "src/interpreter/bytecode-register.h"
 #include "src/interpreter/bytecodes.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/interpreter/interpreter-assembler.cc b/src/interpreter/interpreter-assembler.cc
index 227fd39..5767ffa 100644
--- a/src/interpreter/interpreter-assembler.cc
+++ b/src/interpreter/interpreter-assembler.cc
@@ -14,7 +14,7 @@
 #include "src/interpreter/interpreter.h"
 #include "src/machine-type.h"
 #include "src/macro-assembler.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -84,6 +84,71 @@
   StoreRegister(value, Register::current_context());
 }
 
+Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
+  Variable cur_context(this, MachineRepresentation::kTaggedPointer);
+  cur_context.Bind(context);
+
+  Variable cur_depth(this, MachineRepresentation::kWord32);
+  cur_depth.Bind(depth);
+
+  Label context_found(this);
+
+  Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
+  Label context_search(this, 2, context_search_loop_variables);
+
+  // Fast path if the depth is 0.
+  BranchIfWord32Equal(depth, Int32Constant(0), &context_found, &context_search);
+
+  // Loop until the depth is 0.
+  Bind(&context_search);
+  {
+    cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
+    cur_context.Bind(
+        LoadContextSlot(cur_context.value(), Context::PREVIOUS_INDEX));
+
+    BranchIfWord32Equal(cur_depth.value(), Int32Constant(0), &context_found,
+                        &context_search);
+  }
+
+  Bind(&context_found);
+  return cur_context.value();
+}
+
+void InterpreterAssembler::GotoIfHasContextExtensionUpToDepth(Node* context,
+                                                              Node* depth,
+                                                              Label* target) {
+  Variable cur_context(this, MachineRepresentation::kTaggedPointer);
+  cur_context.Bind(context);
+
+  Variable cur_depth(this, MachineRepresentation::kWord32);
+  cur_depth.Bind(depth);
+
+  Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
+  Label context_search(this, 2, context_search_loop_variables);
+
+  // Loop until the depth is 0.
+  Goto(&context_search);
+  Bind(&context_search);
+  {
+    // TODO(leszeks): We only need to do this check if the context had a sloppy
+    // eval, we could pass in a context chain bitmask to figure out which
+    // contexts actually need to be checked.
+
+    Node* extension_slot =
+        LoadContextSlot(cur_context.value(), Context::EXTENSION_INDEX);
+
+    // Jump to the target if the extension slot is not a hole.
+    GotoIf(WordNotEqual(extension_slot, TheHoleConstant()), target);
+
+    cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
+    cur_context.Bind(
+        LoadContextSlot(cur_context.value(), Context::PREVIOUS_INDEX));
+
+    GotoIf(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
+           &context_search);
+  }
+}
+
 Node* InterpreterAssembler::BytecodeOffset() {
   return bytecode_offset_.value();
 }
@@ -341,6 +406,14 @@
   return BytecodeUnsignedOperand(operand_index, operand_size);
 }
 
+Node* InterpreterAssembler::BytecodeOperandUImm(int operand_index) {
+  DCHECK_EQ(OperandType::kUImm,
+            Bytecodes::GetOperandType(bytecode_, operand_index));
+  OperandSize operand_size =
+      Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
+  return BytecodeUnsignedOperand(operand_index, operand_size);
+}
+
 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
   DCHECK_EQ(OperandType::kImm,
             Bytecodes::GetOperandType(bytecode_, operand_index));
@@ -460,6 +533,18 @@
   }
 }
 
+Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector,
+                                               Node* slot_id) {
+  Comment("increment call count");
+  Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1));
+  Node* call_count =
+      LoadFixedArrayElement(type_feedback_vector, call_count_slot);
+  Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1)));
+  // Count is Smi, so we don't need a write barrier.
+  return StoreFixedArrayElement(type_feedback_vector, call_count_slot,
+                                new_count, SKIP_WRITE_BARRIER);
+}
+
 Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context,
                                                Node* first_arg, Node* arg_count,
                                                Node* slot_id,
@@ -481,15 +566,16 @@
                 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
 
   Variable return_value(this, MachineRepresentation::kTagged);
-  Label handle_monomorphic(this), extra_checks(this), end(this), call(this);
+  Label handle_monomorphic(this), extra_checks(this), end(this), call(this),
+      call_function(this), call_without_feedback(this);
 
   // Slot id of 0 is used to indicate no typefeedback is available. Call using
   // call builtin.
   STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
   Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0));
-  GotoIf(is_feedback_unavailable, &call);
+  GotoIf(is_feedback_unavailable, &call_without_feedback);
 
-  // The checks. First, does rdi match the recorded monomorphic target?
+  // The checks. First, does function match the recorded monomorphic target?
   Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id);
   Node* feedback_value = LoadWeakCellValue(feedback_element);
   Node* is_monomorphic = WordEqual(function, feedback_value);
@@ -503,13 +589,7 @@
     GotoIf(is_smi, &extra_checks);
 
     // Increment the call count.
-    Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1));
-    Node* call_count =
-        LoadFixedArrayElement(type_feedback_vector, call_count_slot);
-    Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1)));
-    // Count is Smi, so we don't need a write barrier.
-    StoreFixedArrayElement(type_feedback_vector, call_count_slot, new_count,
-                           SKIP_WRITE_BARRIER);
+    IncrementCallCount(type_feedback_vector, slot_id);
 
     // Call using call function builtin.
     Callable callable = CodeFactory::InterpreterPushArgsAndCall(
@@ -523,12 +603,42 @@
 
   Bind(&extra_checks);
   {
-    Label check_initialized(this, Label::kDeferred), mark_megamorphic(this);
+    Label check_initialized(this, Label::kDeferred), mark_megamorphic(this),
+        check_allocation_site(this),
+        create_allocation_site(this, Label::kDeferred);
     // Check if it is a megamorphic target
     Node* is_megamorphic = WordEqual(
         feedback_element,
         HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())));
-    BranchIf(is_megamorphic, &call, &check_initialized);
+    BranchIf(is_megamorphic, &call, &check_allocation_site);
+
+    Bind(&check_allocation_site);
+    {
+      Node* is_allocation_site =
+          WordEqual(LoadMap(feedback_element),
+                    LoadRoot(Heap::kAllocationSiteMapRootIndex));
+      GotoUnless(is_allocation_site, &check_initialized);
+
+      // If it is not the Array() function, mark megamorphic.
+      Node* context_slot =
+          LoadFixedArrayElement(LoadNativeContext(context),
+                                Int32Constant(Context::ARRAY_FUNCTION_INDEX));
+      Node* is_array_function = WordEqual(context_slot, function);
+      GotoUnless(is_array_function, &mark_megamorphic);
+
+      // It is a monomorphic Array function. Increment the call count.
+      IncrementCallCount(type_feedback_vector, slot_id);
+
+      // Call ArrayConstructorStub.
+      Callable callable_call =
+          CodeFactory::InterpreterPushArgsAndConstructArray(isolate());
+      Node* code_target_call = HeapConstant(callable_call.code());
+      Node* ret_value =
+          CallStub(callable_call.descriptor(), code_target_call, context,
+                   arg_count, function, feedback_element, first_arg);
+      return_value.Bind(ret_value);
+      Goto(&end);
+    }
 
     Bind(&check_initialized);
     {
@@ -548,12 +658,12 @@
           WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE));
       GotoUnless(is_js_function, &mark_megamorphic);
 
-      // Check that it is not the Array() function.
+      // Check if it is the Array() function.
       Node* context_slot =
           LoadFixedArrayElement(LoadNativeContext(context),
                                 Int32Constant(Context::ARRAY_FUNCTION_INDEX));
       Node* is_array_function = WordEqual(context_slot, function);
-      GotoIf(is_array_function, &mark_megamorphic);
+      GotoIf(is_array_function, &create_allocation_site);
 
       // Check if the function belongs to the same native context
       Node* native_context = LoadNativeContext(
@@ -562,23 +672,22 @@
           WordEqual(native_context, LoadNativeContext(context));
       GotoUnless(is_same_native_context, &mark_megamorphic);
 
-      // Initialize it to a monomorphic target.
-      Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1));
-      // Count is Smi, so we don't need a write barrier.
-      StoreFixedArrayElement(type_feedback_vector, call_count_slot,
-                             SmiTag(Int32Constant(1)), SKIP_WRITE_BARRIER);
-
       CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id),
                                      function);
 
       // Call using call function builtin.
-      Callable callable = CodeFactory::InterpreterPushArgsAndCall(
-          isolate(), tail_call_mode, CallableType::kJSFunction);
-      Node* code_target = HeapConstant(callable.code());
-      Node* ret_value = CallStub(callable.descriptor(), code_target, context,
-                                 arg_count, first_arg, function);
-      return_value.Bind(ret_value);
-      Goto(&end);
+      Goto(&call_function);
+    }
+
+    Bind(&create_allocation_site);
+    {
+      CreateAllocationSiteInFeedbackVector(type_feedback_vector,
+                                           SmiTag(slot_id));
+
+      // Call using CallFunction builtin. CallICs have a PREMONOMORPHIC state.
+      // They start collecting feedback only when a call is executed the second
+      // time. So, do not pass any feedback here.
+      Goto(&call_function);
     }
 
     Bind(&mark_megamorphic);
@@ -595,8 +704,37 @@
     }
   }
 
+  Bind(&call_function);
+  {
+    // Increment the call count.
+    IncrementCallCount(type_feedback_vector, slot_id);
+
+    Callable callable_call = CodeFactory::InterpreterPushArgsAndCall(
+        isolate(), tail_call_mode, CallableType::kJSFunction);
+    Node* code_target_call = HeapConstant(callable_call.code());
+    Node* ret_value = CallStub(callable_call.descriptor(), code_target_call,
+                               context, arg_count, first_arg, function);
+    return_value.Bind(ret_value);
+    Goto(&end);
+  }
+
   Bind(&call);
   {
+    // Increment the call count.
+    IncrementCallCount(type_feedback_vector, slot_id);
+
+    // Call using call builtin.
+    Callable callable_call = CodeFactory::InterpreterPushArgsAndCall(
+        isolate(), tail_call_mode, CallableType::kAny);
+    Node* code_target_call = HeapConstant(callable_call.code());
+    Node* ret_value = CallStub(callable_call.descriptor(), code_target_call,
+                               context, arg_count, first_arg, function);
+    return_value.Bind(ret_value);
+    Goto(&end);
+  }
+
+  Bind(&call_without_feedback);
+  {
     // Call using call builtin.
     Callable callable_call = CodeFactory::InterpreterPushArgsAndCall(
         isolate(), tail_call_mode, CallableType::kAny);
@@ -623,11 +761,169 @@
 
 Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
                                           Node* new_target, Node* first_arg,
-                                          Node* arg_count) {
-  Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate());
-  Node* code_target = HeapConstant(callable.code());
-  return CallStub(callable.descriptor(), code_target, context, arg_count,
-                  new_target, constructor, first_arg);
+                                          Node* arg_count, Node* slot_id,
+                                          Node* type_feedback_vector) {
+  Label call_construct(this), js_function(this), end(this);
+  Variable return_value(this, MachineRepresentation::kTagged);
+  Variable allocation_feedback(this, MachineRepresentation::kTagged);
+  allocation_feedback.Bind(UndefinedConstant());
+
+  // Slot id of 0 is used to indicate no type feedback is available.
+  STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
+  Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0));
+  GotoIf(is_feedback_unavailable, &call_construct);
+
+  // Check that the constructor is not a smi.
+  Node* is_smi = WordIsSmi(constructor);
+  GotoIf(is_smi, &call_construct);
+
+  // Check that constructor is a JSFunction.
+  Node* instance_type = LoadInstanceType(constructor);
+  Node* is_js_function =
+      WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE));
+  BranchIf(is_js_function, &js_function, &call_construct);
+
+  Bind(&js_function);
+  {
+    // Cache the called function in a feedback vector slot. Cache states
+    // are uninitialized, monomorphic (indicated by a JSFunction), and
+    // megamorphic.
+    // TODO(mythria/v8:5210): Check if it is better to mark extra_checks as a
+    // deferred block so that call_construct_function will be scheduled.
+    Label extra_checks(this), call_construct_function(this);
+
+    Node* feedback_element =
+        LoadFixedArrayElement(type_feedback_vector, slot_id);
+    Node* feedback_value = LoadWeakCellValue(feedback_element);
+    Node* is_monomorphic = WordEqual(constructor, feedback_value);
+    BranchIf(is_monomorphic, &call_construct_function, &extra_checks);
+
+    Bind(&extra_checks);
+    {
+      Label mark_megamorphic(this), initialize(this),
+          check_allocation_site(this), check_initialized(this),
+          set_alloc_feedback_and_call(this);
+      {
+        // Check if it is a megamorphic target
+        Comment("check if megamorphic");
+        Node* is_megamorphic = WordEqual(
+            feedback_element,
+            HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())));
+        GotoIf(is_megamorphic, &call_construct_function);
+
+        Comment("check if weak cell");
+        Node* is_weak_cell = WordEqual(LoadMap(feedback_element),
+                                       LoadRoot(Heap::kWeakCellMapRootIndex));
+        GotoUnless(is_weak_cell, &check_allocation_site);
+        // If the weak cell is cleared, we have a new chance to become
+        // monomorphic.
+        Comment("check if weak cell is cleared");
+        Node* is_smi = WordIsSmi(feedback_value);
+        BranchIf(is_smi, &initialize, &mark_megamorphic);
+      }
+
+      Bind(&check_allocation_site);
+      {
+        Comment("check if it is an allocation site");
+        Node* is_allocation_site =
+            WordEqual(LoadObjectField(feedback_element, 0),
+                      LoadRoot(Heap::kAllocationSiteMapRootIndex));
+        GotoUnless(is_allocation_site, &check_initialized);
+
+        // Make sure the function is the Array() function
+        Node* context_slot =
+            LoadFixedArrayElement(LoadNativeContext(context),
+                                  Int32Constant(Context::ARRAY_FUNCTION_INDEX));
+        Node* is_array_function = WordEqual(context_slot, constructor);
+        BranchIf(is_array_function, &set_alloc_feedback_and_call,
+                 &mark_megamorphic);
+      }
+
+      Bind(&set_alloc_feedback_and_call);
+      {
+        allocation_feedback.Bind(feedback_element);
+        Goto(&call_construct_function);
+      }
+
+      Bind(&check_initialized);
+      {
+        // Check if it is uninitialized.
+        Comment("check if uninitialized");
+        Node* is_uninitialized = WordEqual(
+            feedback_element, LoadRoot(Heap::kuninitialized_symbolRootIndex));
+        BranchIf(is_uninitialized, &initialize, &mark_megamorphic);
+      }
+
+      Bind(&initialize);
+      {
+        Label create_weak_cell(this), create_allocation_site(this);
+        Comment("initialize the feedback element");
+        // Check that it is the Array() function.
+        Node* context_slot =
+            LoadFixedArrayElement(LoadNativeContext(context),
+                                  Int32Constant(Context::ARRAY_FUNCTION_INDEX));
+        Node* is_array_function = WordEqual(context_slot, constructor);
+        BranchIf(is_array_function, &create_allocation_site, &create_weak_cell);
+
+        Bind(&create_allocation_site);
+        {
+          Node* site = CreateAllocationSiteInFeedbackVector(
+              type_feedback_vector, SmiTag(slot_id));
+          allocation_feedback.Bind(site);
+          Goto(&call_construct_function);
+        }
+
+        Bind(&create_weak_cell);
+        {
+          CreateWeakCellInFeedbackVector(type_feedback_vector, SmiTag(slot_id),
+                                         constructor);
+          Goto(&call_construct_function);
+        }
+      }
+
+      Bind(&mark_megamorphic);
+      {
+        // MegamorphicSentinel is an immortal immovable object so
+        // write-barrier is not needed.
+        Comment("transition to megamorphic");
+        DCHECK(
+            Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
+        StoreFixedArrayElement(
+            type_feedback_vector, slot_id,
+            HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())),
+            SKIP_WRITE_BARRIER);
+        Goto(&call_construct_function);
+      }
+    }
+
+    Bind(&call_construct_function);
+    {
+      Comment("call using callConstructFunction");
+      IncrementCallCount(type_feedback_vector, slot_id);
+      Callable callable_function = CodeFactory::InterpreterPushArgsAndConstruct(
+          isolate(), CallableType::kJSFunction);
+      return_value.Bind(CallStub(callable_function.descriptor(),
+                                 HeapConstant(callable_function.code()),
+                                 context, arg_count, new_target, constructor,
+                                 allocation_feedback.value(), first_arg));
+      Goto(&end);
+    }
+  }
+
+  Bind(&call_construct);
+  {
+    Comment("call using callConstruct builtin");
+    Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(
+        isolate(), CallableType::kAny);
+    Node* code_target = HeapConstant(callable.code());
+    return_value.Bind(CallStub(callable.descriptor(), code_target, context,
+                               arg_count, new_target, constructor,
+                               UndefinedConstant(), first_arg));
+    Goto(&end);
+  }
+
+  Bind(&end);
+  return return_value.value();
 }
 
 Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
@@ -651,6 +947,9 @@
 }
 
 void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
+  // TODO(rmcilroy): It might be worthwhile to only update the budget for
+  // backwards branches. Those are distinguishable by the {JumpLoop} bytecode.
+
   Label ok(this), interrupt_check(this, Label::kDeferred), end(this);
   Node* budget_offset =
       IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
diff --git a/src/interpreter/interpreter-assembler.h b/src/interpreter/interpreter-assembler.h
index b3fa42f..9dda20a 100644
--- a/src/interpreter/interpreter-assembler.h
+++ b/src/interpreter/interpreter-assembler.h
@@ -32,6 +32,9 @@
   // Returns the index immediate for bytecode operand |operand_index| in the
   // current bytecode.
   compiler::Node* BytecodeOperandIdx(int operand_index);
+  // Returns the UImm8 immediate for bytecode operand |operand_index| in the
+  // current bytecode.
+  compiler::Node* BytecodeOperandUImm(int operand_index);
   // Returns the Imm8 immediate for bytecode operand |operand_index| in the
   // current bytecode.
   compiler::Node* BytecodeOperandImm(int operand_index);
@@ -53,6 +56,15 @@
   compiler::Node* GetContext();
   void SetContext(compiler::Node* value);
 
+  // Context at |depth| in the context chain starting at |context|.
+  compiler::Node* GetContextAtDepth(compiler::Node* context,
+                                    compiler::Node* depth);
+
+  // Goto the given |target| if the context chain starting at |context| has any
+  // extensions up to the given |depth|.
+  void GotoIfHasContextExtensionUpToDepth(compiler::Node* context,
+                                          compiler::Node* depth, Label* target);
+
   // Number of registers.
   compiler::Node* RegisterCount();
 
@@ -92,6 +104,11 @@
   // Load the TypeFeedbackVector for the current function.
   compiler::Node* LoadTypeFeedbackVector();
 
+  // Increment the call count for a CALL_IC or construct call.
+  // The call count is located at feedback_vector[slot_id + 1].
+  compiler::Node* IncrementCallCount(compiler::Node* type_feedback_vector,
+                                     compiler::Node* slot_id);
+
   // Call JSFunction or Callable |function| with |arg_count|
   // arguments (not including receiver) and the first argument
   // located at |first_arg|. Type feedback is collected in the
@@ -120,7 +137,9 @@
                                 compiler::Node* context,
                                 compiler::Node* new_target,
                                 compiler::Node* first_arg,
-                                compiler::Node* arg_count);
+                                compiler::Node* arg_count,
+                                compiler::Node* slot_id,
+                                compiler::Node* type_feedback_vector);
 
   // Call runtime function with |arg_count| arguments and the first argument
   // located at |first_arg|.
diff --git a/src/interpreter/interpreter.cc b/src/interpreter/interpreter.cc
index 68f0342..4100302 100644
--- a/src/interpreter/interpreter.cc
+++ b/src/interpreter/interpreter.cc
@@ -9,6 +9,7 @@
 
 #include "src/ast/prettyprinter.h"
 #include "src/code-factory.h"
+#include "src/compilation-info.h"
 #include "src/compiler.h"
 #include "src/factory.h"
 #include "src/interpreter/bytecode-flags.h"
@@ -17,7 +18,7 @@
 #include "src/interpreter/interpreter-assembler.h"
 #include "src/interpreter/interpreter-intrinsics.h"
 #include "src/log.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -150,14 +151,39 @@
 }
 
 InterpreterCompilationJob::InterpreterCompilationJob(CompilationInfo* info)
-    : CompilationJob(info, "Ignition"), generator_(info) {}
+    : CompilationJob(info->isolate(), info, "Ignition"), generator_(info) {}
 
 InterpreterCompilationJob::Status InterpreterCompilationJob::PrepareJobImpl() {
+  if (FLAG_print_bytecode || FLAG_print_ast) {
+    OFStream os(stdout);
+    std::unique_ptr<char[]> name = info()->GetDebugName();
+    os << "[generating bytecode for function: " << info()->GetDebugName().get()
+       << "]" << std::endl
+       << std::flush;
+  }
+
+#ifdef DEBUG
+  if (info()->parse_info() && FLAG_print_ast) {
+    OFStream os(stdout);
+    os << "--- AST ---" << std::endl
+       << AstPrinter(info()->isolate()).PrintProgram(info()->literal())
+       << std::endl
+       << std::flush;
+  }
+#endif  // DEBUG
+
   return SUCCEEDED;
 }
 
 InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() {
-  generator()->GenerateBytecode();
+  // TODO(5203): These timers aren't thread safe, move to using the CompilerJob
+  // timers.
+  RuntimeCallTimerScope runtimeTimer(info()->isolate(),
+                                     &RuntimeCallStats::CompileIgnition);
+  TimerEventScope<TimerEventCompileIgnition> timer(info()->isolate());
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition");
+
+  generator()->GenerateBytecode(stack_limit());
 
   if (generator()->HasStackOverflow()) {
     return FAILED;
@@ -182,34 +208,8 @@
   return SUCCEEDED;
 }
 
-bool Interpreter::MakeBytecode(CompilationInfo* info) {
-  RuntimeCallTimerScope runtimeTimer(info->isolate(),
-                                     &RuntimeCallStats::CompileIgnition);
-  TimerEventScope<TimerEventCompileIgnition> timer(info->isolate());
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      info->isolate(), &tracing::TraceEventStatsTable::CompileIgnition);
-
-  if (FLAG_print_bytecode || FLAG_print_ast) {
-    OFStream os(stdout);
-    std::unique_ptr<char[]> name = info->GetDebugName();
-    os << "[generating bytecode for function: " << info->GetDebugName().get()
-       << "]" << std::endl
-       << std::flush;
-  }
-
-#ifdef DEBUG
-  if (info->parse_info() && FLAG_print_ast) {
-    OFStream os(stdout);
-    os << "--- AST ---" << std::endl
-       << AstPrinter(info->isolate()).PrintProgram(info->literal()) << std::endl
-       << std::flush;
-  }
-#endif  // DEBUG
-
-  InterpreterCompilationJob job(info);
-  if (job.PrepareJob() != CompilationJob::SUCCEEDED) return false;
-  if (job.ExecuteJob() != CompilationJob::SUCCEEDED) return false;
-  return job.FinalizeJob() == CompilationJob::SUCCEEDED;
+CompilationJob* Interpreter::NewCompilationJob(CompilationInfo* info) {
+  return new InterpreterCompilationJob(info);
 }
 
 bool Interpreter::IsDispatchTableInitialized() {
@@ -421,16 +421,14 @@
   __ Dispatch();
 }
 
-Node* Interpreter::BuildLoadGlobal(Callable ic,
+Node* Interpreter::BuildLoadGlobal(Callable ic, Node* context,
+                                   Node* feedback_slot,
                                    InterpreterAssembler* assembler) {
   typedef LoadGlobalWithVectorDescriptor Descriptor;
-  // Get the global object.
-  Node* context = __ GetContext();
 
   // Load the global via the LoadGlobalIC.
   Node* code_target = __ HeapConstant(ic.code());
-  Node* raw_slot = __ BytecodeOperandIdx(0);
-  Node* smi_slot = __ SmiTag(raw_slot);
+  Node* smi_slot = __ SmiTag(feedback_slot);
   Node* type_feedback_vector = __ LoadTypeFeedbackVector();
   return __ CallStub(ic.descriptor(), code_target, context,
                      Arg(Descriptor::kSlot, smi_slot),
@@ -444,7 +442,11 @@
 void Interpreter::DoLdaGlobal(InterpreterAssembler* assembler) {
   Callable ic =
       CodeFactory::LoadGlobalICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF);
-  Node* result = BuildLoadGlobal(ic, assembler);
+
+  Node* context = __ GetContext();
+
+  Node* raw_slot = __ BytecodeOperandIdx(0);
+  Node* result = BuildLoadGlobal(ic, context, raw_slot, assembler);
   __ SetAccumulator(result);
   __ Dispatch();
 }
@@ -456,7 +458,11 @@
 void Interpreter::DoLdrGlobal(InterpreterAssembler* assembler) {
   Callable ic =
       CodeFactory::LoadGlobalICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF);
-  Node* result = BuildLoadGlobal(ic, assembler);
+
+  Node* context = __ GetContext();
+
+  Node* raw_slot = __ BytecodeOperandIdx(0);
+  Node* result = BuildLoadGlobal(ic, context, raw_slot, assembler);
   Node* destination = __ BytecodeOperandReg(1);
   __ StoreRegister(result, destination);
   __ Dispatch();
@@ -469,7 +475,11 @@
 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) {
   Callable ic =
       CodeFactory::LoadGlobalICInOptimizedCode(isolate_, INSIDE_TYPEOF);
-  Node* result = BuildLoadGlobal(ic, assembler);
+
+  Node* context = __ GetContext();
+
+  Node* raw_slot = __ BytecodeOperandIdx(0);
+  Node* result = BuildLoadGlobal(ic, context, raw_slot, assembler);
   __ SetAccumulator(result);
   __ Dispatch();
 }
@@ -520,44 +530,51 @@
   Node* reg_index = __ BytecodeOperandReg(0);
   Node* context = __ LoadRegister(reg_index);
   Node* slot_index = __ BytecodeOperandIdx(1);
-  return __ LoadContextSlot(context, slot_index);
+  Node* depth = __ BytecodeOperandUImm(2);
+  Node* slot_context = __ GetContextAtDepth(context, depth);
+  return __ LoadContextSlot(slot_context, slot_index);
 }
 
-// LdaContextSlot <context> <slot_index>
+// LdaContextSlot <context> <slot_index> <depth>
 //
-// Load the object in |slot_index| of |context| into the accumulator.
+// Load the object in |slot_index| of the context at |depth| in the context
+// chain starting at |context| into the accumulator.
 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) {
   Node* result = BuildLoadContextSlot(assembler);
   __ SetAccumulator(result);
   __ Dispatch();
 }
 
-// LdrContextSlot <context> <slot_index> <reg>
+// LdrContextSlot <context> <slot_index> <depth> <reg>
 //
-// Load the object in <slot_index> of <context> into register <reg>.
+// Load the object in |slot_index| of the context at |depth| in the context
+// chain of |context| into register |reg|.
 void Interpreter::DoLdrContextSlot(InterpreterAssembler* assembler) {
   Node* result = BuildLoadContextSlot(assembler);
-  Node* destination = __ BytecodeOperandReg(2);
+  Node* destination = __ BytecodeOperandReg(3);
   __ StoreRegister(result, destination);
   __ Dispatch();
 }
 
-// StaContextSlot <context> <slot_index>
+// StaContextSlot <context> <slot_index> <depth>
 //
-// Stores the object in the accumulator into |slot_index| of |context|.
+// Stores the object in the accumulator into |slot_index| of the context at
+// |depth| in the context chain starting at |context|.
 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) {
   Node* value = __ GetAccumulator();
   Node* reg_index = __ BytecodeOperandReg(0);
   Node* context = __ LoadRegister(reg_index);
   Node* slot_index = __ BytecodeOperandIdx(1);
-  __ StoreContextSlot(context, slot_index, value);
+  Node* depth = __ BytecodeOperandUImm(2);
+  Node* slot_context = __ GetContextAtDepth(context, depth);
+  __ StoreContextSlot(slot_context, slot_index, value);
   __ Dispatch();
 }
 
 void Interpreter::DoLdaLookupSlot(Runtime::FunctionId function_id,
                                   InterpreterAssembler* assembler) {
-  Node* index = __ BytecodeOperandIdx(0);
-  Node* name = __ LoadConstantPoolEntry(index);
+  Node* name_index = __ BytecodeOperandIdx(0);
+  Node* name = __ LoadConstantPoolEntry(name_index);
   Node* context = __ GetContext();
   Node* result = __ CallRuntime(function_id, context, name);
   __ SetAccumulator(result);
@@ -580,6 +597,103 @@
   DoLdaLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler);
 }
 
+void Interpreter::DoLdaLookupContextSlot(Runtime::FunctionId function_id,
+                                         InterpreterAssembler* assembler) {
+  Node* context = __ GetContext();
+  Node* name_index = __ BytecodeOperandIdx(0);
+  Node* slot_index = __ BytecodeOperandIdx(1);
+  Node* depth = __ BytecodeOperandUImm(2);
+
+  Label slowpath(assembler, Label::kDeferred);
+
+  // Check for context extensions to allow the fast path.
+  __ GotoIfHasContextExtensionUpToDepth(context, depth, &slowpath);
+
+  // Fast path does a normal load context.
+  {
+    Node* slot_context = __ GetContextAtDepth(context, depth);
+    Node* result = __ LoadContextSlot(slot_context, slot_index);
+    __ SetAccumulator(result);
+    __ Dispatch();
+  }
+
+  // Slow path when we have to call out to the runtime.
+  __ Bind(&slowpath);
+  {
+    Node* name = __ LoadConstantPoolEntry(name_index);
+    Node* result = __ CallRuntime(function_id, context, name);
+    __ SetAccumulator(result);
+    __ Dispatch();
+  }
+}
+
+// LdaLookupSlot <name_index>
+//
+// Lookup the object with the name in constant pool entry |name_index|
+// dynamically.
+void Interpreter::DoLdaLookupContextSlot(InterpreterAssembler* assembler) {
+  DoLdaLookupContextSlot(Runtime::kLoadLookupSlot, assembler);
+}
+
+// LdaLookupSlotInsideTypeof <name_index>
+//
+// Lookup the object with the name in constant pool entry |name_index|
+// dynamically without causing a NoReferenceError.
+void Interpreter::DoLdaLookupContextSlotInsideTypeof(
+    InterpreterAssembler* assembler) {
+  DoLdaLookupContextSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler);
+}
+
+void Interpreter::DoLdaLookupGlobalSlot(Runtime::FunctionId function_id,
+                                        InterpreterAssembler* assembler) {
+  Node* context = __ GetContext();
+  Node* name_index = __ BytecodeOperandIdx(0);
+  Node* feedback_slot = __ BytecodeOperandIdx(1);
+  Node* depth = __ BytecodeOperandUImm(2);
+
+  Label slowpath(assembler, Label::kDeferred);
+
+  // Check for context extensions to allow the fast path
+  __ GotoIfHasContextExtensionUpToDepth(context, depth, &slowpath);
+
+  // Fast path does a normal load global
+  {
+    Callable ic = CodeFactory::LoadGlobalICInOptimizedCode(
+        isolate_, function_id == Runtime::kLoadLookupSlotInsideTypeof
+                      ? INSIDE_TYPEOF
+                      : NOT_INSIDE_TYPEOF);
+    Node* result = BuildLoadGlobal(ic, context, feedback_slot, assembler);
+    __ SetAccumulator(result);
+    __ Dispatch();
+  }
+
+  // Slow path when we have to call out to the runtime
+  __ Bind(&slowpath);
+  {
+    Node* name = __ LoadConstantPoolEntry(name_index);
+    Node* result = __ CallRuntime(function_id, context, name);
+    __ SetAccumulator(result);
+    __ Dispatch();
+  }
+}
+
+// LdaLookupGlobalSlot <name_index> <feedback_slot> <depth>
+//
+// Lookup the object with the name in constant pool entry |name_index|
+// dynamically.
+void Interpreter::DoLdaLookupGlobalSlot(InterpreterAssembler* assembler) {
+  DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlot, assembler);
+}
+
+// LdaLookupGlobalSlotInsideTypeof <name_index> <feedback_slot> <depth>
+//
+// Lookup the object with the name in constant pool entry |name_index|
+// dynamically without causing a NoReferenceError.
+void Interpreter::DoLdaLookupGlobalSlotInsideTypeof(
+    InterpreterAssembler* assembler) {
+  DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler);
+}
+
 void Interpreter::DoStaLookupSlot(LanguageMode language_mode,
                                   InterpreterAssembler* assembler) {
   Node* value = __ GetAccumulator();
@@ -816,6 +930,80 @@
   __ Dispatch();
 }
 
+template <class Generator>
+void Interpreter::DoCompareOpWithFeedback(InterpreterAssembler* assembler) {
+  Node* reg_index = __ BytecodeOperandReg(0);
+  Node* lhs = __ LoadRegister(reg_index);
+  Node* rhs = __ GetAccumulator();
+  Node* context = __ GetContext();
+  Node* slot_index = __ BytecodeOperandIdx(1);
+  Node* type_feedback_vector = __ LoadTypeFeedbackVector();
+
+  // TODO(interpreter): the only reason this check is here is because we
+  // sometimes emit comparisons that shouldn't collect feedback (e.g.
+  // try-finally blocks and generators), and we could get rid of this by
+  // introducing Smi equality tests.
+  Label skip_feedback_update(assembler);
+  __ GotoIf(__ WordEqual(slot_index, __ IntPtrConstant(0)),
+            &skip_feedback_update);
+
+  Variable var_type_feedback(assembler, MachineRepresentation::kWord32);
+  Label lhs_is_smi(assembler), lhs_is_not_smi(assembler),
+      gather_rhs_type(assembler), do_compare(assembler);
+  __ Branch(__ WordIsSmi(lhs), &lhs_is_smi, &lhs_is_not_smi);
+
+  __ Bind(&lhs_is_smi);
+  var_type_feedback.Bind(
+      __ Int32Constant(CompareOperationFeedback::kSignedSmall));
+  __ Goto(&gather_rhs_type);
+
+  __ Bind(&lhs_is_not_smi);
+  {
+    Label lhs_is_number(assembler), lhs_is_not_number(assembler);
+    Node* lhs_map = __ LoadMap(lhs);
+    __ Branch(__ WordEqual(lhs_map, __ HeapNumberMapConstant()), &lhs_is_number,
+              &lhs_is_not_number);
+
+    __ Bind(&lhs_is_number);
+    var_type_feedback.Bind(__ Int32Constant(CompareOperationFeedback::kNumber));
+    __ Goto(&gather_rhs_type);
+
+    __ Bind(&lhs_is_not_number);
+    var_type_feedback.Bind(__ Int32Constant(CompareOperationFeedback::kAny));
+    __ Goto(&do_compare);
+  }
+
+  __ Bind(&gather_rhs_type);
+  {
+    Label rhs_is_smi(assembler);
+    __ GotoIf(__ WordIsSmi(rhs), &rhs_is_smi);
+
+    Node* rhs_map = __ LoadMap(rhs);
+    Node* rhs_type =
+        __ Select(__ WordEqual(rhs_map, __ HeapNumberMapConstant()),
+                  __ Int32Constant(CompareOperationFeedback::kNumber),
+                  __ Int32Constant(CompareOperationFeedback::kAny));
+    var_type_feedback.Bind(__ Word32Or(var_type_feedback.value(), rhs_type));
+    __ Goto(&do_compare);
+
+    __ Bind(&rhs_is_smi);
+    var_type_feedback.Bind(
+        __ Word32Or(var_type_feedback.value(),
+                    __ Int32Constant(CompareOperationFeedback::kSignedSmall)));
+    __ Goto(&do_compare);
+  }
+
+  __ Bind(&do_compare);
+  __ UpdateFeedback(var_type_feedback.value(), type_feedback_vector,
+                    slot_index);
+  __ Goto(&skip_feedback_update);
+
+  __ Bind(&skip_feedback_update);
+  Node* result = Generator::Generate(assembler, lhs, rhs, context);
+  __ SetAccumulator(result);
+  __ Dispatch();
+}
+
 // Add <src>
 //
 // Add register <src> to accumulator.
@@ -1227,25 +1415,29 @@
 
 // ToName
 //
-// Cast the object referenced by the accumulator to a name.
+// Convert the object referenced by the accumulator to a name.
 void Interpreter::DoToName(InterpreterAssembler* assembler) {
-  Node* result = BuildUnaryOp(CodeFactory::ToName(isolate_), assembler);
+  Node* object = __ GetAccumulator();
+  Node* context = __ GetContext();
+  Node* result = __ ToName(context, object);
   __ StoreRegister(result, __ BytecodeOperandReg(0));
   __ Dispatch();
 }
 
 // ToNumber
 //
-// Cast the object referenced by the accumulator to a number.
+// Convert the object referenced by the accumulator to a number.
 void Interpreter::DoToNumber(InterpreterAssembler* assembler) {
-  Node* result = BuildUnaryOp(CodeFactory::ToNumber(isolate_), assembler);
+  Node* object = __ GetAccumulator();
+  Node* context = __ GetContext();
+  Node* result = __ ToNumber(context, object);
   __ StoreRegister(result, __ BytecodeOperandReg(0));
   __ Dispatch();
 }
 
 // ToObject
 //
-// Cast the object referenced by the accumulator to a JSObject.
+// Convert the object referenced by the accumulator to a JSReceiver.
 void Interpreter::DoToObject(InterpreterAssembler* assembler) {
   Node* result = BuildUnaryOp(CodeFactory::ToObject(isolate_), assembler);
   __ StoreRegister(result, __ BytecodeOperandReg(0));
@@ -1395,7 +1587,12 @@
   DoJSCall(assembler, TailCallMode::kAllow);
 }
 
-void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) {
+// CallRuntime <function_id> <first_arg> <arg_count>
+//
+// Call the runtime function |function_id| with the first argument in
+// register |first_arg| and |arg_count| arguments in subsequent
+// registers.
+void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) {
   Node* function_id = __ BytecodeOperandRuntimeId(0);
   Node* first_arg_reg = __ BytecodeOperandReg(1);
   Node* first_arg = __ RegisterLocation(first_arg_reg);
@@ -1406,15 +1603,6 @@
   __ Dispatch();
 }
 
-// CallRuntime <function_id> <first_arg> <arg_count>
-//
-// Call the runtime function |function_id| with the first argument in
-// register |first_arg| and |arg_count| arguments in subsequent
-// registers.
-void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) {
-  DoCallRuntimeCommon(assembler);
-}
-
 // InvokeIntrinsic <function_id> <first_arg> <arg_count>
 //
 // Implements the semantic equivalent of calling the runtime function
@@ -1432,7 +1620,13 @@
   __ Dispatch();
 }
 
-void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) {
+// CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return>
+//
+// Call the runtime function |function_id| which returns a pair, with the
+// first argument in register |first_arg| and |arg_count| arguments in
+// subsequent registers. Returns the result in <first_return> and
+// <first_return + 1>
+void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) {
   // Call the runtime function.
   Node* function_id = __ BytecodeOperandRuntimeId(0);
   Node* first_arg_reg = __ BytecodeOperandReg(1);
@@ -1452,17 +1646,11 @@
   __ Dispatch();
 }
 
-// CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return>
+// CallJSRuntime <context_index> <receiver> <arg_count>
 //
-// Call the runtime function |function_id| which returns a pair, with the
-// first argument in register |first_arg| and |arg_count| arguments in
-// subsequent registers. Returns the result in <first_return> and
-// <first_return + 1>
-void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) {
-  DoCallRuntimeForPairCommon(assembler);
-}
-
-void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) {
+// Call the JS runtime function that has the |context_index| with the receiver
+// in register |receiver| and |arg_count| arguments in subsequent registers.
+void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) {
   Node* context_index = __ BytecodeOperandIdx(0);
   Node* receiver_reg = __ BytecodeOperandReg(1);
   Node* first_arg = __ RegisterLocation(receiver_reg);
@@ -1483,29 +1671,6 @@
   __ Dispatch();
 }
 
-// CallJSRuntime <context_index> <receiver> <arg_count>
-//
-// Call the JS runtime function that has the |context_index| with the receiver
-// in register |receiver| and |arg_count| arguments in subsequent registers.
-void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) {
-  DoCallJSRuntimeCommon(assembler);
-}
-
-void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) {
-  Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_);
-  Node* new_target = __ GetAccumulator();
-  Node* constructor_reg = __ BytecodeOperandReg(0);
-  Node* constructor = __ LoadRegister(constructor_reg);
-  Node* first_arg_reg = __ BytecodeOperandReg(1);
-  Node* first_arg = __ RegisterLocation(first_arg_reg);
-  Node* args_count = __ BytecodeOperandCount(2);
-  Node* context = __ GetContext();
-  Node* result =
-      __ CallConstruct(constructor, context, new_target, first_arg, args_count);
-  __ SetAccumulator(result);
-  __ Dispatch();
-}
-
 // New <constructor> <first_arg> <arg_count>
 //
 // Call operator new with |constructor| and the first argument in
@@ -1513,42 +1678,55 @@
 // registers. The new.target is in the accumulator.
 //
 void Interpreter::DoNew(InterpreterAssembler* assembler) {
-  DoCallConstruct(assembler);
+  Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_);
+  Node* new_target = __ GetAccumulator();
+  Node* constructor_reg = __ BytecodeOperandReg(0);
+  Node* constructor = __ LoadRegister(constructor_reg);
+  Node* first_arg_reg = __ BytecodeOperandReg(1);
+  Node* first_arg = __ RegisterLocation(first_arg_reg);
+  Node* args_count = __ BytecodeOperandCount(2);
+  Node* slot_id = __ BytecodeOperandIdx(3);
+  Node* type_feedback_vector = __ LoadTypeFeedbackVector();
+  Node* context = __ GetContext();
+  Node* result = __ CallConstruct(constructor, context, new_target, first_arg,
+                                  args_count, slot_id, type_feedback_vector);
+  __ SetAccumulator(result);
+  __ Dispatch();
 }
 
 // TestEqual <src>
 //
 // Test if the value in the <src> register equals the accumulator.
 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) {
-  DoBinaryOp<EqualStub>(assembler);
+  DoCompareOpWithFeedback<EqualStub>(assembler);
 }
 
 // TestNotEqual <src>
 //
 // Test if the value in the <src> register is not equal to the accumulator.
 void Interpreter::DoTestNotEqual(InterpreterAssembler* assembler) {
-  DoBinaryOp<NotEqualStub>(assembler);
+  DoCompareOpWithFeedback<NotEqualStub>(assembler);
 }
 
 // TestEqualStrict <src>
 //
 // Test if the value in the <src> register is strictly equal to the accumulator.
 void Interpreter::DoTestEqualStrict(InterpreterAssembler* assembler) {
-  DoBinaryOp<StrictEqualStub>(assembler);
+  DoCompareOpWithFeedback<StrictEqualStub>(assembler);
 }
 
 // TestLessThan <src>
 //
 // Test if the value in the <src> register is less than the accumulator.
 void Interpreter::DoTestLessThan(InterpreterAssembler* assembler) {
-  DoBinaryOp<LessThanStub>(assembler);
+  DoCompareOpWithFeedback<LessThanStub>(assembler);
 }
 
 // TestGreaterThan <src>
 //
 // Test if the value in the <src> register is greater than the accumulator.
 void Interpreter::DoTestGreaterThan(InterpreterAssembler* assembler) {
-  DoBinaryOp<GreaterThanStub>(assembler);
+  DoCompareOpWithFeedback<GreaterThanStub>(assembler);
 }
 
 // TestLessThanOrEqual <src>
@@ -1556,7 +1734,7 @@
 // Test if the value in the <src> register is less than or equal to the
 // accumulator.
 void Interpreter::DoTestLessThanOrEqual(InterpreterAssembler* assembler) {
-  DoBinaryOp<LessThanOrEqualStub>(assembler);
+  DoCompareOpWithFeedback<LessThanOrEqualStub>(assembler);
 }
 
 // TestGreaterThanOrEqual <src>
@@ -1564,7 +1742,7 @@
 // Test if the value in the <src> register is greater than or equal to the
 // accumulator.
 void Interpreter::DoTestGreaterThanOrEqual(InterpreterAssembler* assembler) {
-  DoBinaryOp<GreaterThanOrEqualStub>(assembler);
+  DoCompareOpWithFeedback<GreaterThanOrEqualStub>(assembler);
 }
 
 // TestIn <src>
@@ -1783,6 +1961,35 @@
   __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump);
 }
 
+// JumpLoop <imm> <loop_depth>
+//
+// Jump by number of bytes represented by the immediate operand |imm|. Also
+// performs a loop nesting check and potentially triggers OSR in case the
+// current OSR level matches (or exceeds) the specified |loop_depth|.
+void Interpreter::DoJumpLoop(InterpreterAssembler* assembler) {
+  Node* relative_jump = __ BytecodeOperandImm(0);
+  Node* loop_depth = __ BytecodeOperandImm(1);
+  Node* osr_level = __ LoadOSRNestingLevel();
+
+  // Check if OSR points at the given {loop_depth} are armed by comparing it to
+  // the current {osr_level} loaded from the header of the BytecodeArray.
+  Label ok(assembler), osr_armed(assembler, Label::kDeferred);
+  Node* condition = __ Int32GreaterThanOrEqual(loop_depth, osr_level);
+  __ Branch(condition, &ok, &osr_armed);
+
+  __ Bind(&ok);
+  __ Jump(relative_jump);
+
+  __ Bind(&osr_armed);
+  {
+    Callable callable = CodeFactory::InterpreterOnStackReplacement(isolate_);
+    Node* target = __ HeapConstant(callable.code());
+    Node* context = __ GetContext();
+    __ CallStub(callable.descriptor(), target, context);
+    __ Jump(relative_jump);
+  }
+}
+
 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags>
 //
 // Creates a regular expression literal for literal index <literal_idx> with
@@ -1804,21 +2011,47 @@
 
 // CreateArrayLiteral <element_idx> <literal_idx> <flags>
 //
-// Creates an array literal for literal index <literal_idx> with flags <flags>
-// and constant elements in <element_idx>.
+// Creates an array literal for literal index <literal_idx> with
+// CreateArrayLiteral flags <flags> and constant elements in <element_idx>.
 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
-  Node* index = __ BytecodeOperandIdx(0);
-  Node* constant_elements = __ LoadConstantPoolEntry(index);
   Node* literal_index_raw = __ BytecodeOperandIdx(1);
   Node* literal_index = __ SmiTag(literal_index_raw);
-  Node* flags_raw = __ BytecodeOperandFlag(2);
-  Node* flags = __ SmiTag(flags_raw);
   Node* closure = __ LoadRegister(Register::function_closure());
   Node* context = __ GetContext();
-  Node* result = __ CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
-                                literal_index, constant_elements, flags);
-  __ SetAccumulator(result);
-  __ Dispatch();
+  Node* bytecode_flags = __ BytecodeOperandFlag(2);
+
+  Label fast_shallow_clone(assembler),
+      call_runtime(assembler, Label::kDeferred);
+  Node* use_fast_shallow_clone = __ Word32And(
+      bytecode_flags,
+      __ Int32Constant(CreateArrayLiteralFlags::FastShallowCloneBit::kMask));
+  __ BranchIf(use_fast_shallow_clone, &fast_shallow_clone, &call_runtime);
+
+  __ Bind(&fast_shallow_clone);
+  {
+    DCHECK(FLAG_allocation_site_pretenuring);
+    Node* result = FastCloneShallowArrayStub::Generate(
+        assembler, closure, literal_index, context, &call_runtime,
+        TRACK_ALLOCATION_SITE);
+    __ SetAccumulator(result);
+    __ Dispatch();
+  }
+
+  __ Bind(&call_runtime);
+  {
+    STATIC_ASSERT(CreateArrayLiteralFlags::FlagsBits::kShift == 0);
+    Node* flags_raw = __ Word32And(
+        bytecode_flags,
+        __ Int32Constant(CreateArrayLiteralFlags::FlagsBits::kMask));
+    Node* flags = __ SmiTag(flags_raw);
+    Node* index = __ BytecodeOperandIdx(0);
+    Node* constant_elements = __ LoadConstantPoolEntry(index);
+    Node* result =
+        __ CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
+                       literal_index, constant_elements, flags);
+    __ SetAccumulator(result);
+    __ Dispatch();
+  }
 }
 
 // CreateObjectLiteral <element_idx> <literal_idx> <flags>
@@ -1915,19 +2148,22 @@
   __ Dispatch();
 }
 
-// CreateCatchContext <exception> <index>
+// CreateCatchContext <exception> <name_idx> <scope_info_idx>
 //
 // Creates a new context for a catch block with the |exception| in a register,
-// the variable name at |index| and the closure in the accumulator.
+// the variable name at |name_idx|, the ScopeInfo at |scope_info_idx|, and the
+// closure in the accumulator.
 void Interpreter::DoCreateCatchContext(InterpreterAssembler* assembler) {
   Node* exception_reg = __ BytecodeOperandReg(0);
   Node* exception = __ LoadRegister(exception_reg);
-  Node* index = __ BytecodeOperandIdx(1);
-  Node* name = __ LoadConstantPoolEntry(index);
+  Node* name_idx = __ BytecodeOperandIdx(1);
+  Node* name = __ LoadConstantPoolEntry(name_idx);
+  Node* scope_info_idx = __ BytecodeOperandIdx(2);
+  Node* scope_info = __ LoadConstantPoolEntry(scope_info_idx);
   Node* closure = __ GetAccumulator();
   Node* context = __ GetContext();
   __ SetAccumulator(__ CallRuntime(Runtime::kPushCatchContext, context, name,
-                                   exception, closure));
+                                   exception, scope_info, closure));
   __ Dispatch();
 }
 
@@ -1936,24 +2172,27 @@
 // Creates a new context with number of |slots| for the function closure.
 void Interpreter::DoCreateFunctionContext(InterpreterAssembler* assembler) {
   Node* closure = __ LoadRegister(Register::function_closure());
-  Node* slots = __ BytecodeOperandIdx(0);
+  Node* slots = __ BytecodeOperandUImm(0);
   Node* context = __ GetContext();
   __ SetAccumulator(
       FastNewFunctionContextStub::Generate(assembler, closure, slots, context));
   __ Dispatch();
 }
 
-// CreateWithContext <register>
+// CreateWithContext <register> <scope_info_idx>
 //
-// Creates a new context for a with-statement with the object in |register| and
-// the closure in the accumulator.
+// Creates a new context with the ScopeInfo at |scope_info_idx| for a
+// with-statement with the object in |register| and the closure in the
+// accumulator.
 void Interpreter::DoCreateWithContext(InterpreterAssembler* assembler) {
   Node* reg_index = __ BytecodeOperandReg(0);
   Node* object = __ LoadRegister(reg_index);
+  Node* scope_info_idx = __ BytecodeOperandIdx(1);
+  Node* scope_info = __ LoadConstantPoolEntry(scope_info_idx);
   Node* closure = __ GetAccumulator();
   Node* context = __ GetContext();
-  __ SetAccumulator(
-      __ CallRuntime(Runtime::kPushWithContext, context, object, closure));
+  __ SetAccumulator(__ CallRuntime(Runtime::kPushWithContext, context, object,
+                                   scope_info, closure));
   __ Dispatch();
 }
 
@@ -2047,32 +2286,6 @@
   }
 }
 
-// OsrPoll <loop_depth>
-//
-// Performs a loop nesting check and potentially triggers OSR.
-void Interpreter::DoOsrPoll(InterpreterAssembler* assembler) {
-  Node* loop_depth = __ BytecodeOperandImm(0);
-  Node* osr_level = __ LoadOSRNestingLevel();
-
-  // Check if OSR points at the given {loop_depth} are armed by comparing it to
-  // the current {osr_level} loaded from the header of the BytecodeArray.
-  Label ok(assembler), osr_armed(assembler, Label::kDeferred);
-  Node* condition = __ Int32GreaterThanOrEqual(loop_depth, osr_level);
-  __ Branch(condition, &ok, &osr_armed);
-
-  __ Bind(&ok);
-  __ Dispatch();
-
-  __ Bind(&osr_armed);
-  {
-    Callable callable = CodeFactory::InterpreterOnStackReplacement(isolate_);
-    Node* target = __ HeapConstant(callable.code());
-    Node* context = __ GetContext();
-    __ CallStub(callable.descriptor(), target, context);
-    __ Dispatch();
-  }
-}
-
 // Throw
 //
 // Throws the exception in the accumulator.
@@ -2158,9 +2371,8 @@
   if (FLAG_debug_code) {
     Label already_receiver(assembler), abort(assembler);
     Node* instance_type = __ LoadInstanceType(receiver);
-    Node* first_receiver_type = __ Int32Constant(FIRST_JS_RECEIVER_TYPE);
-    __ BranchIfInt32GreaterThanOrEqual(instance_type, first_receiver_type,
-                                       &already_receiver, &abort);
+    __ Branch(__ IsJSReceiverInstanceType(instance_type), &already_receiver,
+              &abort);
     __ Bind(&abort);
     {
       __ Abort(kExpectedJSReceiver);
@@ -2260,10 +2472,10 @@
   }
 }
 
-// ForInDone <index> <cache_length>
+// ForInContinue <index> <cache_length>
 //
-// Returns true if the end of the enumerable properties has been reached.
-void Interpreter::DoForInDone(InterpreterAssembler* assembler) {
+// Returns false if the end of the enumerable properties has been reached.
+void Interpreter::DoForInContinue(InterpreterAssembler* assembler) {
   Node* index_reg = __ BytecodeOperandReg(0);
   Node* index = __ LoadRegister(index_reg);
   Node* cache_length_reg = __ BytecodeOperandReg(1);
@@ -2274,12 +2486,12 @@
   __ BranchIfWordEqual(index, cache_length, &if_true, &if_false);
   __ Bind(&if_true);
   {
-    __ SetAccumulator(__ BooleanConstant(true));
+    __ SetAccumulator(__ BooleanConstant(false));
     __ Goto(&end);
   }
   __ Bind(&if_false);
   {
-    __ SetAccumulator(__ BooleanConstant(false));
+    __ SetAccumulator(__ BooleanConstant(true));
     __ Goto(&end);
   }
   __ Bind(&end);
diff --git a/src/interpreter/interpreter.h b/src/interpreter/interpreter.h
index bbd0102..b646bf8 100644
--- a/src/interpreter/interpreter.h
+++ b/src/interpreter/interpreter.h
@@ -22,6 +22,7 @@
 class Isolate;
 class Callable;
 class CompilationInfo;
+class CompilationJob;
 
 namespace compiler {
 class Node;
@@ -42,8 +43,8 @@
   // Returns the interrupt budget which should be used for the profiler counter.
   static int InterruptBudget();
 
-  // Generate bytecode for |info|.
-  static bool MakeBytecode(CompilationInfo* info);
+  // Creates a compilation job which will generate bytecode for |info|.
+  static CompilationJob* NewCompilationJob(CompilationInfo* info);
 
   // Return bytecode handler for |bytecode|.
   Code* GetBytecodeHandler(Bytecode bytecode, OperandScale operand_scale);
@@ -55,7 +56,7 @@
   void TraceCodegen(Handle<Code> code);
   const char* LookupNameOfBytecodeHandler(Code* code);
 
-  Local<v8::Object> GetDispatchCountersObject();
+  V8_EXPORT_PRIVATE Local<v8::Object> GetDispatchCountersObject();
 
   Address dispatch_table_address() {
     return reinterpret_cast<Address>(&dispatch_table_[0]);
@@ -83,6 +84,11 @@
   template <class Generator>
   void DoBinaryOpWithFeedback(InterpreterAssembler* assembler);
 
+  // Generates code to perform the comparison via |Generator| while gathering
+  // type feedback.
+  template <class Generator>
+  void DoCompareOpWithFeedback(InterpreterAssembler* assembler);
+
   // Generates code to perform the bitwise binary operation corresponding to
   // |bitwise_op| while gathering type feedback.
   void DoBitwiseBinaryOp(Token::Value bitwise_op,
@@ -118,18 +124,6 @@
   // Generates code to perform a JS call that collects type feedback.
   void DoJSCall(InterpreterAssembler* assembler, TailCallMode tail_call_mode);
 
-  // Generates code to perform a runtime call.
-  void DoCallRuntimeCommon(InterpreterAssembler* assembler);
-
-  // Generates code to perform a runtime call returning a pair.
-  void DoCallRuntimeForPairCommon(InterpreterAssembler* assembler);
-
-  // Generates code to perform a JS runtime call.
-  void DoCallJSRuntimeCommon(InterpreterAssembler* assembler);
-
-  // Generates code to perform a constructor call.
-  void DoCallConstruct(InterpreterAssembler* assembler);
-
   // Generates code to perform delete via function_id.
   void DoDelete(Runtime::FunctionId function_id,
                 InterpreterAssembler* assembler);
@@ -138,18 +132,28 @@
   void DoLdaLookupSlot(Runtime::FunctionId function_id,
                        InterpreterAssembler* assembler);
 
-  // Generates code to perform a lookup slot store depending on |language_mode|.
+  // Generates code to perform a lookup slot load via |function_id| that can
+  // fast path to a context slot load.
+  void DoLdaLookupContextSlot(Runtime::FunctionId function_id,
+                              InterpreterAssembler* assembler);
+
+  // Generates code to perform a lookup slot load via |function_id| that can
+  // fast path to a global load.
+  void DoLdaLookupGlobalSlot(Runtime::FunctionId function_id,
+                             InterpreterAssembler* assembler);
+
+  // Generates code to perform a lookup slot store depending on
+  // |language_mode|.
   void DoStaLookupSlot(LanguageMode language_mode,
                        InterpreterAssembler* assembler);
 
-  // Generates a node with the undefined constant.
-  compiler::Node* BuildLoadUndefined(InterpreterAssembler* assembler);
-
   // Generates code to load a context slot.
   compiler::Node* BuildLoadContextSlot(InterpreterAssembler* assembler);
 
   // Generates code to load a global.
-  compiler::Node* BuildLoadGlobal(Callable ic, InterpreterAssembler* assembler);
+  compiler::Node* BuildLoadGlobal(Callable ic, compiler::Node* context,
+                                  compiler::Node* feedback_slot,
+                                  InterpreterAssembler* assembler);
 
   // Generates code to load a named property.
   compiler::Node* BuildLoadNamedProperty(Callable ic,
diff --git a/src/interpreter/mkpeephole.cc b/src/interpreter/mkpeephole.cc
index 8e9d5fe..270fe83 100644
--- a/src/interpreter/mkpeephole.cc
+++ b/src/interpreter/mkpeephole.cc
@@ -146,6 +146,9 @@
             Bytecode::kIllegal};
   }
 
+  // TODO(rmcilroy): Add elide for consecutive mov to and from the same
+  // register.
+
   // Remove ToBoolean coercion from conditional jumps where possible.
   if (Bytecodes::WritesBooleanToAccumulator(last)) {
     if (Bytecodes::IsJumpIfToBoolean(current)) {
diff --git a/src/isolate-inl.h b/src/isolate-inl.h
index 5c71d91..34c98bb 100644
--- a/src/isolate-inl.h
+++ b/src/isolate-inl.h
@@ -76,6 +76,11 @@
   return exception != heap()->termination_exception();
 }
 
+bool Isolate::is_catchable_by_wasm(Object* exception) {
+  return is_catchable_by_javascript(exception) &&
+         (exception->IsNumber() || exception->IsSmi());
+}
+
 void Isolate::FireBeforeCallEnteredCallback() {
   for (int i = 0; i < before_call_entered_callbacks_.length(); i++) {
     before_call_entered_callbacks_.at(i)(reinterpret_cast<v8::Isolate*>(this));
@@ -100,20 +105,6 @@
   isolate_->set_pending_exception(*pending_exception_);
 }
 
-SaveContext::SaveContext(Isolate* isolate)
-    : isolate_(isolate), prev_(isolate->save_context()) {
-  if (isolate->context() != NULL) {
-    context_ = Handle<Context>(isolate->context());
-  }
-  isolate->set_save_context(this);
-  c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
-}
-
-SaveContext::~SaveContext() {
-  isolate_->set_context(context_.is_null() ? NULL : *context_);
-  isolate_->set_save_context(prev_);
-}
-
 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name)     \
   Handle<type> Isolate::name() {                             \
     return Handle<type>(raw_native_context()->name(), this); \
@@ -147,6 +138,11 @@
   return has_instance_cell->value() == Smi::FromInt(kArrayProtectorValid);
 }
 
+bool Isolate::IsStringLengthOverflowIntact() {
+  PropertyCell* has_instance_cell = heap()->string_length_protector();
+  return has_instance_cell->value() == Smi::FromInt(kArrayProtectorValid);
+}
+
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/isolate.cc b/src/isolate.cc
index e14db60..63c927b 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -10,7 +10,6 @@
 #include <sstream>
 
 #include "src/ast/context-slot-cache.h"
-#include "src/base/accounting-allocator.h"
 #include "src/base/hashmap.h"
 #include "src/base/platform/platform.h"
 #include "src/base/sys-info.h"
@@ -28,6 +27,7 @@
 #include "src/external-reference-table.h"
 #include "src/frames-inl.h"
 #include "src/ic/stub-cache.h"
+#include "src/interface-descriptors.h"
 #include "src/interpreter/interpreter.h"
 #include "src/isolate-inl.h"
 #include "src/libsampler/sampler.h"
@@ -43,6 +43,7 @@
 #include "src/version.h"
 #include "src/vm-state-inl.h"
 #include "src/wasm/wasm-module.h"
+#include "src/zone/accounting-allocator.h"
 
 namespace v8 {
 namespace internal {
@@ -315,21 +316,7 @@
   base::OS::Abort();
 }
 
-static Handle<FixedArray> MaybeGrow(Isolate* isolate,
-                                    Handle<FixedArray> elements,
-                                    int cur_position, int new_size) {
-  if (new_size > elements->length()) {
-    int new_capacity = JSObject::NewElementsCapacity(elements->length());
-    Handle<FixedArray> new_elements =
-        isolate->factory()->NewFixedArrayWithHoles(new_capacity);
-    for (int i = 0; i < cur_position; i++) {
-      new_elements->set(i, elements->get(i));
-    }
-    elements = new_elements;
-  }
-  DCHECK(new_size <= elements->length());
-  return elements;
-}
+namespace {
 
 class StackTraceHelper {
  public:
@@ -351,21 +338,17 @@
         break;
     }
     encountered_strict_function_ = false;
-    sloppy_frames_ = 0;
   }
 
+  // Poison stack frames below the first strict mode frame.
   // The stack trace API should not expose receivers and function
   // objects on frames deeper than the top-most one with a strict mode
-  // function. The number of sloppy frames is stored as first element in
-  // the result array.
-  void CountSloppyFrames(JSFunction* fun) {
+  // function.
+  bool IsStrictFrame(JSFunction* fun) {
     if (!encountered_strict_function_) {
-      if (is_strict(fun->shared()->language_mode())) {
-        encountered_strict_function_ = true;
-      } else {
-        sloppy_frames_++;
-      }
+      encountered_strict_function_ = is_strict(fun->shared()->language_mode());
     }
+    return encountered_strict_function_;
   }
 
   // Determines whether the given stack frame should be displayed in a stack
@@ -375,8 +358,6 @@
            IsInSameSecurityContext(fun);
   }
 
-  int sloppy_frames() const { return sloppy_frames_; }
-
  private:
   // This mechanism excludes a number of uninteresting frames from the stack
   // trace. This can be be the first frame (which will be a builtin-exit frame
@@ -422,12 +403,9 @@
   const Handle<Object> caller_;
   bool skip_next_frame_;
 
-  int sloppy_frames_;
   bool encountered_strict_function_;
 };
 
-namespace {
-
 // TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the
 // receiver in RegExp constructor frames.
 Handle<Object> TheHoleToUndefined(Isolate* isolate, Handle<Object> in) {
@@ -435,35 +413,36 @@
              ? Handle<Object>::cast(isolate->factory()->undefined_value())
              : in;
 }
+
+bool GetStackTraceLimit(Isolate* isolate, int* result) {
+  Handle<JSObject> error = isolate->error_function();
+
+  Handle<String> key = isolate->factory()->stackTraceLimit_string();
+  Handle<Object> stack_trace_limit = JSReceiver::GetDataProperty(error, key);
+  if (!stack_trace_limit->IsNumber()) return false;
+
+  // Ensure that limit is not negative.
+  *result = Max(FastD2IChecked(stack_trace_limit->Number()), 0);
+  return true;
 }
 
+}  // namespace
+
 Handle<Object> Isolate::CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
                                                 FrameSkipMode mode,
                                                 Handle<Object> caller) {
   DisallowJavascriptExecution no_js(this);
 
-  // Get stack trace limit.
-  Handle<JSObject> error = error_function();
-  Handle<String> stackTraceLimit =
-      factory()->InternalizeUtf8String("stackTraceLimit");
-  DCHECK(!stackTraceLimit.is_null());
-  Handle<Object> stack_trace_limit =
-      JSReceiver::GetDataProperty(error, stackTraceLimit);
-  if (!stack_trace_limit->IsNumber()) return factory()->undefined_value();
-  int limit = FastD2IChecked(stack_trace_limit->Number());
-  limit = Max(limit, 0);  // Ensure that limit is not negative.
+  int limit;
+  if (!GetStackTraceLimit(this, &limit)) return factory()->undefined_value();
 
-  int initial_size = Min(limit, 10);
-  Handle<FixedArray> elements =
-      factory()->NewFixedArrayWithHoles(initial_size * 4 + 1);
+  const int initial_size = Min(limit, 10);
+  Handle<FrameArray> elements = factory()->NewFrameArray(initial_size);
 
   StackTraceHelper helper(this, mode, caller);
 
-  // First element is reserved to store the number of sloppy frames.
-  int cursor = 1;
-  int frames_seen = 0;
-  for (StackFrameIterator iter(this); !iter.done() && frames_seen < limit;
-       iter.Advance()) {
+  for (StackFrameIterator iter(this);
+       !iter.done() && elements->FrameCount() < limit; iter.Advance()) {
     StackFrame* frame = iter.frame();
 
     switch (frame->type()) {
@@ -481,26 +460,27 @@
 
           // Filter out internal frames that we do not want to show.
           if (!helper.IsVisibleInStackTrace(*fun)) continue;
-          helper.CountSloppyFrames(*fun);
 
           Handle<Object> recv = frames[i].receiver();
           Handle<AbstractCode> abstract_code = frames[i].abstract_code();
+          const int offset = frames[i].code_offset();
+
+          bool force_constructor = false;
           if (frame->type() == StackFrame::BUILTIN) {
             // Help CallSite::IsConstructor correctly detect hand-written
             // construct stubs.
-            Code* code = Code::cast(*abstract_code);
-            if (code->is_construct_stub()) {
-              recv = handle(heap()->call_site_constructor_symbol(), this);
+            if (Code::cast(*abstract_code)->is_construct_stub()) {
+              force_constructor = true;
             }
           }
-          Handle<Smi> offset(Smi::FromInt(frames[i].code_offset()), this);
 
-          elements = MaybeGrow(this, elements, cursor, cursor + 4);
-          elements->set(cursor++, *TheHoleToUndefined(this, recv));
-          elements->set(cursor++, *fun);
-          elements->set(cursor++, *abstract_code);
-          elements->set(cursor++, *offset);
-          frames_seen++;
+          int flags = 0;
+          if (helper.IsStrictFrame(*fun)) flags |= FrameArray::kIsStrict;
+          if (force_constructor) flags |= FrameArray::kForceConstructor;
+
+          elements = FrameArray::AppendJSFrame(
+              elements, TheHoleToUndefined(this, recv), fun, abstract_code,
+              offset, flags);
         }
       } break;
 
@@ -510,54 +490,49 @@
 
         // Filter out internal frames that we do not want to show.
         if (!helper.IsVisibleInStackTrace(*fun)) continue;
-        helper.CountSloppyFrames(*fun);
 
-        Handle<Code> code = handle(exit_frame->LookupCode(), this);
-        int offset =
+        Handle<Object> recv(exit_frame->receiver(), this);
+        Handle<Code> code(exit_frame->LookupCode(), this);
+        const int offset =
             static_cast<int>(exit_frame->pc() - code->instruction_start());
 
-        // In order to help CallSite::IsConstructor detect builtin constructors,
-        // we reuse the receiver field to pass along a special symbol.
-        Handle<Object> recv;
-        if (exit_frame->IsConstructor()) {
-          recv = factory()->call_site_constructor_symbol();
-        } else {
-          recv = handle(exit_frame->receiver(), this);
-        }
+        int flags = 0;
+        if (helper.IsStrictFrame(*fun)) flags |= FrameArray::kIsStrict;
+        if (exit_frame->IsConstructor()) flags |= FrameArray::kForceConstructor;
 
-        elements = MaybeGrow(this, elements, cursor, cursor + 4);
-        elements->set(cursor++, *recv);
-        elements->set(cursor++, *fun);
-        elements->set(cursor++, *code);
-        elements->set(cursor++, Smi::FromInt(offset));
-        frames_seen++;
+        elements = FrameArray::AppendJSFrame(elements, recv, fun,
+                                             Handle<AbstractCode>::cast(code),
+                                             offset, flags);
       } break;
 
       case StackFrame::WASM: {
         WasmFrame* wasm_frame = WasmFrame::cast(frame);
+        Handle<Object> wasm_object(wasm_frame->wasm_obj(), this);
+        const int wasm_function_index = wasm_frame->function_index();
         Code* code = wasm_frame->unchecked_code();
-        Handle<AbstractCode> abstract_code =
-            Handle<AbstractCode>(AbstractCode::cast(code), this);
-        int offset =
+        Handle<AbstractCode> abstract_code(AbstractCode::cast(code), this);
+        const int offset =
             static_cast<int>(wasm_frame->pc() - code->instruction_start());
-        elements = MaybeGrow(this, elements, cursor, cursor + 4);
-        elements->set(cursor++, wasm_frame->wasm_obj());
-        elements->set(cursor++, Smi::FromInt(wasm_frame->function_index()));
-        elements->set(cursor++, *abstract_code);
-        elements->set(cursor++, Smi::FromInt(offset));
-        frames_seen++;
+
+        // TODO(wasm): The wasm object returned by the WasmFrame should always
+        //             be a wasm object.
+        DCHECK(wasm::IsWasmObject(*wasm_object) ||
+               wasm_object->IsUndefined(this));
+
+        elements = FrameArray::AppendWasmFrame(
+            elements, wasm_object, wasm_function_index, abstract_code, offset,
+            FrameArray::kIsWasmFrame);
       } break;
 
       default:
         break;
     }
   }
-  elements->set(0, Smi::FromInt(helper.sloppy_frames()));
-  elements->Shrink(cursor);
-  Handle<JSArray> result = factory()->NewJSArrayWithElements(elements);
-  result->set_length(Smi::FromInt(cursor));
+
+  elements->ShrinkToFit();
+
   // TODO(yangguo): Queue this structured stack trace for preprocessing on GC.
-  return result;
+  return factory()->NewJSArrayWithElements(elements);
 }
 
 MaybeHandle<JSReceiver> Isolate::CaptureAndSetDetailedStackTrace(
@@ -764,19 +739,6 @@
   Handle<String> constructor_key_;
 };
 
-
-int PositionFromStackTrace(Handle<FixedArray> elements, int index) {
-  DisallowHeapAllocation no_gc;
-  Object* maybe_code = elements->get(index + 2);
-  if (maybe_code->IsSmi()) {
-    return Smi::cast(maybe_code)->value();
-  } else {
-    AbstractCode* abstract_code = AbstractCode::cast(maybe_code);
-    int code_offset = Smi::cast(elements->get(index + 3))->value();
-    return abstract_code->SourcePosition(code_offset);
-  }
-}
-
 Handle<JSArray> Isolate::CaptureCurrentStackTrace(
     int frame_limit, StackTrace::StackTraceOptions options) {
   DisallowJavascriptExecution no_js(this);
@@ -963,6 +925,10 @@
 
 
 Object* Isolate::StackOverflow() {
+  if (FLAG_abort_on_stack_overflow) {
+    FATAL("Aborting on stack overflow");
+  }
+
   DisallowJavascriptExecution no_js(this);
   HandleScope scope(this);
 
@@ -979,7 +945,8 @@
 
 #ifdef VERIFY_HEAP
   if (FLAG_verify_heap && FLAG_stress_compaction) {
-    heap()->CollectAllGarbage(Heap::kNoGCFlags, "trigger compaction");
+    heap()->CollectAllGarbage(Heap::kNoGCFlags,
+                              GarbageCollectionReason::kTesting);
   }
 #endif  // VERIFY_HEAP
 
@@ -1017,6 +984,8 @@
 
 
 void Isolate::InvokeApiInterruptCallbacks() {
+  RuntimeCallTimerScope runtimeTimer(
+      this, &RuntimeCallStats::InvokeApiInterruptCallbacks);
   // Note: callback below should be called outside of execution access lock.
   while (true) {
     InterruptEntry entry;
@@ -1180,8 +1149,8 @@
   Address handler_sp = nullptr;
   Address handler_fp = nullptr;
 
-  // Special handling of termination exceptions, uncatchable by JavaScript code,
-  // we unwind the handlers until the top ENTRY handler is found.
+  // Special handling of termination exceptions, uncatchable by JavaScript and
+  // Wasm code, we unwind the handlers until the top ENTRY handler is found.
   bool catchable_by_js = is_catchable_by_javascript(exception);
 
   // Compute handler and stack unwinding information by performing a full walk
@@ -1203,6 +1172,28 @@
       break;
     }
 
+    if (FLAG_wasm_eh_prototype) {
+      if (frame->is_wasm() && is_catchable_by_wasm(exception)) {
+        int stack_slots = 0;  // Will contain stack slot count of frame.
+        WasmFrame* wasm_frame = static_cast<WasmFrame*>(frame);
+        offset = wasm_frame->LookupExceptionHandlerInTable(&stack_slots);
+        if (offset >= 0) {
+          // Compute the stack pointer from the frame pointer. This ensures that
+          // argument slots on the stack are dropped as returning would.
+          Address return_sp = frame->fp() +
+                              StandardFrameConstants::kFixedFrameSizeAboveFp -
+                              stack_slots * kPointerSize;
+
+          // Gather information from the frame.
+          code = frame->LookupCode();
+
+          handler_sp = return_sp;
+          handler_fp = frame->fp();
+          break;
+        }
+      }
+    }
+
     // For optimized frames we perform a lookup in the handler table.
     if (frame->is_optimized() && catchable_by_js) {
       OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
@@ -1349,6 +1340,8 @@
       JavaScriptFrame* js_frame = static_cast<JavaScriptFrame*>(frame);
       HandlerTable::CatchPrediction prediction = PredictException(js_frame);
       if (prediction == HandlerTable::DESUGARING) return CAUGHT_BY_DESUGARING;
+      if (prediction == HandlerTable::ASYNC_AWAIT) return CAUGHT_BY_ASYNC_AWAIT;
+      if (prediction == HandlerTable::PROMISE) return CAUGHT_BY_PROMISE;
       if (prediction != HandlerTable::UNCAUGHT) return CAUGHT_BY_JAVASCRIPT;
     }
 
@@ -1425,36 +1418,20 @@
 
 
 void Isolate::PrintCurrentStackTrace(FILE* out) {
-  StackTraceFrameIterator it(this);
-  while (!it.done()) {
+  for (StackTraceFrameIterator it(this); !it.done(); it.Advance()) {
+    if (!it.is_javascript()) continue;
+
     HandleScope scope(this);
-    // Find code position if recorded in relocation info.
-    StandardFrame* frame = it.frame();
-    AbstractCode* abstract_code;
-    int code_offset;
-    if (frame->is_interpreted()) {
-      InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
-      abstract_code = AbstractCode::cast(iframe->GetBytecodeArray());
-      code_offset = iframe->GetBytecodeOffset();
-    } else {
-      DCHECK(frame->is_java_script() || frame->is_wasm());
-      Code* code = frame->LookupCode();
-      abstract_code = AbstractCode::cast(code);
-      code_offset = static_cast<int>(frame->pc() - code->instruction_start());
-    }
-    int pos = abstract_code->SourcePosition(code_offset);
-    JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
-    Handle<Object> pos_obj(Smi::FromInt(pos), this);
-    // Fetch function and receiver.
-    Handle<JSFunction> fun(js_frame->function(), this);
-    Handle<Object> recv(js_frame->receiver(), this);
-    // Advance to the next JavaScript frame and determine if the
-    // current frame is the top-level frame.
-    it.Advance();
-    Handle<Object> is_top_level = factory()->ToBoolean(it.done());
-    // Generate and print stack trace line.
-    Handle<String> line =
-        Execution::GetStackTraceLine(recv, fun, pos_obj, is_top_level);
+    JavaScriptFrame* frame = it.javascript_frame();
+
+    Handle<Object> receiver(frame->receiver(), this);
+    Handle<JSFunction> function(frame->function(), this);
+    Handle<AbstractCode> code(AbstractCode::cast(frame->LookupCode()), this);
+    const int offset =
+        static_cast<int>(frame->pc() - code->instruction_start());
+
+    JSStackFrame site(this, receiver, function, code, offset);
+    Handle<String> line = site.ToString().ToHandleChecked();
     if (line->length() > 0) {
       line->PrintOn(out);
       PrintF(out, "\n");
@@ -1522,22 +1499,25 @@
   if (!property->IsJSArray()) return false;
   Handle<JSArray> simple_stack_trace = Handle<JSArray>::cast(property);
 
-  Handle<FixedArray> elements(FixedArray::cast(simple_stack_trace->elements()));
-  int elements_limit = Smi::cast(simple_stack_trace->length())->value();
+  Handle<FrameArray> elements(FrameArray::cast(simple_stack_trace->elements()));
 
-  for (int i = 1; i < elements_limit; i += 4) {
-    Handle<Object> fun_obj = handle(elements->get(i + 1), this);
-    if (fun_obj->IsSmi()) {
+  const int frame_count = elements->FrameCount();
+  for (int i = 0; i < frame_count; i++) {
+    if (elements->IsWasmFrame(i)) {
       // TODO(clemensh): handle wasm frames
       return false;
     }
-    Handle<JSFunction> fun = Handle<JSFunction>::cast(fun_obj);
+
+    Handle<JSFunction> fun = handle(elements->Function(i), this);
     if (!fun->shared()->IsSubjectToDebugging()) continue;
 
     Object* script = fun->shared()->script();
     if (script->IsScript() &&
         !(Script::cast(script)->source()->IsUndefined(this))) {
-      int pos = PositionFromStackTrace(elements, i);
+      AbstractCode* abstract_code = elements->Code(i);
+      const int code_offset = elements->Offset(i)->value();
+      const int pos = abstract_code->SourcePosition(code_offset);
+
       Handle<Script> casted_script(Script::cast(script));
       *target = MessageLocation(casted_script, pos, pos + 1);
       return true;
@@ -1752,6 +1732,22 @@
   global_handles()->Destroy(global_promise.location());
 }
 
+bool Isolate::PromiseHasUserDefinedRejectHandler(Handle<Object> promise) {
+  Handle<JSFunction> fun = promise_has_user_defined_reject_handler();
+  Handle<Object> has_reject_handler;
+  // If we are, e.g., overflowing the stack, don't try to call out to JS
+  if (!AllowJavascriptExecution::IsAllowed(this)) return false;
+  // Call the registered function to check for a handler
+  if (Execution::TryCall(this, fun, promise, 0, NULL)
+          .ToHandle(&has_reject_handler)) {
+    return has_reject_handler->IsTrue(this);
+  }
+  // If an exception is thrown in the course of execution of this built-in
+  // function, it indicates either a bug, or a synthetic uncatchable
+  // exception in the shutdown path. In either case, it's OK to predict either
+  // way in DevTools.
+  return false;
+}
 
 Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
   Handle<Object> undefined = factory()->undefined_value();
@@ -1762,18 +1758,49 @@
   if (prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) {
     return undefined;
   }
+  Handle<Object> retval = undefined;
+  PromiseOnStack* promise_on_stack = tltop->promise_on_stack_;
   for (JavaScriptFrameIterator it(this); !it.done(); it.Advance()) {
     switch (PredictException(it.frame())) {
       case HandlerTable::UNCAUGHT:
-        break;
+        continue;
       case HandlerTable::CAUGHT:
       case HandlerTable::DESUGARING:
-        return undefined;
+        if (retval->IsJSObject()) {
+          // Caught the result of an inner async/await invocation.
+          // Mark the inner promise as caught in the "synchronous case" so
+          // that Debug::OnException will see. In the synchronous case,
+          // namely in the code in an async function before the first
+          // await, the function which has this exception event has not yet
+          // returned, so the generated Promise has not yet been marked
+          // by AsyncFunctionAwaitCaught with promiseHandledHintSymbol.
+          Handle<Symbol> key = factory()->promise_handled_hint_symbol();
+          JSObject::SetProperty(Handle<JSObject>::cast(retval), key,
+                                factory()->true_value(), STRICT)
+              .Assert();
+        }
+        return retval;
       case HandlerTable::PROMISE:
-        return tltop->promise_on_stack_->promise();
+        return promise_on_stack
+                   ? Handle<Object>::cast(promise_on_stack->promise())
+                   : undefined;
+      case HandlerTable::ASYNC_AWAIT: {
+        // If in the initial portion of async/await, continue the loop to pop up
+        // successive async/await stack frames until an asynchronous one with
+        // dependents is found, or a non-async stack frame is encountered, in
+        // order to handle the synchronous async/await catch prediction case:
+        // assume that async function calls are awaited.
+        if (!promise_on_stack) return retval;
+        retval = promise_on_stack->promise();
+        if (PromiseHasUserDefinedRejectHandler(retval)) {
+          return retval;
+        }
+        promise_on_stack = promise_on_stack->prev();
+        continue;
+      }
     }
   }
-  return undefined;
+  return retval;
 }
 
 
@@ -1904,13 +1931,13 @@
 #define TRACE_ISOLATE(tag)
 #endif
 
-class VerboseAccountingAllocator : public base::AccountingAllocator {
+class VerboseAccountingAllocator : public AccountingAllocator {
  public:
   VerboseAccountingAllocator(Heap* heap, size_t sample_bytes)
       : heap_(heap), last_memory_usage_(0), sample_bytes_(sample_bytes) {}
 
-  void* Allocate(size_t size) override {
-    void* memory = base::AccountingAllocator::Allocate(size);
+  v8::internal::Segment* AllocateSegment(size_t size) override {
+    v8::internal::Segment* memory = AccountingAllocator::AllocateSegment(size);
     if (memory) {
       size_t current = GetCurrentMemoryUsage();
       if (last_memory_usage_.Value() + sample_bytes_ < current) {
@@ -1921,8 +1948,8 @@
     return memory;
   }
 
-  void Free(void* memory, size_t bytes) override {
-    base::AccountingAllocator::Free(memory, bytes);
+  void FreeSegment(v8::internal::Segment* memory) override {
+    AccountingAllocator::FreeSegment(memory);
     size_t current = GetCurrentMemoryUsage();
     if (current + sample_bytes_ < last_memory_usage_.Value()) {
       PrintJSON(current);
@@ -1977,9 +2004,8 @@
       unicode_cache_(NULL),
       allocator_(FLAG_trace_gc_object_stats
                      ? new VerboseAccountingAllocator(&heap_, 256 * KB)
-                     : new base::AccountingAllocator()),
+                     : new AccountingAllocator()),
       runtime_zone_(new Zone(allocator_)),
-      interface_descriptor_zone_(new Zone(allocator_)),
       inner_pointer_to_code_cache_(NULL),
       global_handles_(NULL),
       eternal_handles_(NULL),
@@ -2004,8 +2030,6 @@
       deferred_handles_head_(NULL),
       optimizing_compile_dispatcher_(NULL),
       stress_deopt_count_(0),
-      virtual_handler_register_(NULL),
-      virtual_slot_register_(NULL),
       next_optimization_id_(0),
       js_calls_from_api_counter_(0),
 #if TRACE_MAPS
@@ -2258,9 +2282,6 @@
   delete runtime_zone_;
   runtime_zone_ = nullptr;
 
-  delete interface_descriptor_zone_;
-  interface_descriptor_zone_ = nullptr;
-
   delete allocator_;
   allocator_ = nullptr;
 
@@ -2399,6 +2420,12 @@
     return false;
   }
 
+// Initialize the interface descriptors ahead of time.
+#define INTERFACE_DESCRIPTOR(V) \
+  { V##Descriptor(this); }
+  INTERFACE_DESCRIPTOR_LIST(INTERFACE_DESCRIPTOR)
+#undef INTERFACE_DESCRIPTOR
+
   deoptimizer_data_ = new DeoptimizerData(heap()->memory_allocator());
 
   const bool create_heap_objects = (des == NULL);
@@ -2436,13 +2463,19 @@
   runtime_profiler_ = new RuntimeProfiler(this);
 
   // If we are deserializing, read the state into the now-empty heap.
-  if (!create_heap_objects) {
-    des->Deserialize(this);
-  }
-  load_stub_cache_->Initialize();
-  store_stub_cache_->Initialize();
-  if (FLAG_ignition || serializer_enabled()) {
-    interpreter_->Initialize();
+  {
+    AlwaysAllocateScope always_allocate(this);
+
+    if (!create_heap_objects) {
+      des->Deserialize(this);
+    }
+    load_stub_cache_->Initialize();
+    store_stub_cache_->Initialize();
+    if (FLAG_ignition || serializer_enabled()) {
+      interpreter_->Initialize();
+    }
+
+    heap_.NotifyDeserializationComplete();
   }
 
   // Finish initialization of ThreadLocal after deserialization is done.
@@ -2473,8 +2506,6 @@
 
   time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();
 
-  heap_.NotifyDeserializationComplete();
-
   if (!create_heap_objects) {
     // Now that the heap is consistent, it's OK to generate the code for the
     // deopt entry table that might have been referred to by optimized code in
@@ -2620,7 +2651,8 @@
   turbo_statistics_ = nullptr;
   delete hstatistics_;
   hstatistics_ = nullptr;
-  if (FLAG_runtime_call_stats) {
+  if (FLAG_runtime_call_stats &&
+      !TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED()) {
     OFStream os(stdout);
     counters()->runtime_call_stats()->Print(os);
     counters()->runtime_call_stats()->Reset();
@@ -2823,6 +2855,15 @@
   DCHECK(!IsArraySpeciesLookupChainIntact());
 }
 
+void Isolate::InvalidateStringLengthOverflowProtector() {
+  DCHECK(factory()->string_length_protector()->value()->IsSmi());
+  DCHECK(IsStringLengthOverflowIntact());
+  PropertyCell::SetValueWithInvalidation(
+      factory()->string_length_protector(),
+      handle(Smi::FromInt(kArrayProtectorInvalid), this));
+  DCHECK(!IsStringLengthOverflowIntact());
+}
+
 bool Isolate::IsAnyInitialArrayPrototype(Handle<JSArray> array) {
   DisallowHeapAllocation no_gc;
   return IsInAnyContext(*array, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
@@ -2964,9 +3005,44 @@
       v8::Utils::StackTraceToLocal(stack_trace)));
 }
 
+void Isolate::PromiseResolveThenableJob(Handle<PromiseContainer> container,
+                                        MaybeHandle<Object>* result,
+                                        MaybeHandle<Object>* maybe_exception) {
+  if (debug()->is_active()) {
+    Handle<Object> before_debug_event(container->before_debug_event(), this);
+    if (before_debug_event->IsJSObject()) {
+      debug()->OnAsyncTaskEvent(Handle<JSObject>::cast(before_debug_event));
+    }
+  }
+
+  Handle<JSReceiver> thenable(container->thenable(), this);
+  Handle<JSFunction> resolve(container->resolve(), this);
+  Handle<JSFunction> reject(container->reject(), this);
+  Handle<JSReceiver> then(container->then(), this);
+  Handle<Object> argv[] = {resolve, reject};
+  *result = Execution::TryCall(this, then, thenable, arraysize(argv), argv,
+                               maybe_exception);
+
+  Handle<Object> reason;
+  if (maybe_exception->ToHandle(&reason)) {
+    DCHECK(result->is_null());
+    Handle<Object> reason_arg[] = {reason};
+    *result =
+        Execution::TryCall(this, reject, factory()->undefined_value(),
+                           arraysize(reason_arg), reason_arg, maybe_exception);
+  }
+
+  if (debug()->is_active()) {
+    Handle<Object> after_debug_event(container->after_debug_event(), this);
+    if (after_debug_event->IsJSObject()) {
+      debug()->OnAsyncTaskEvent(Handle<JSObject>::cast(after_debug_event));
+    }
+  }
+}
 
 void Isolate::EnqueueMicrotask(Handle<Object> microtask) {
-  DCHECK(microtask->IsJSFunction() || microtask->IsCallHandlerInfo());
+  DCHECK(microtask->IsJSFunction() || microtask->IsCallHandlerInfo() ||
+         microtask->IsPromiseContainer());
   Handle<FixedArray> queue(heap()->microtask_queue(), this);
   int num_tasks = pending_microtask_count();
   DCHECK(num_tasks <= queue->length());
@@ -2995,6 +3071,8 @@
 
 
 void Isolate::RunMicrotasksInternal() {
+  if (!pending_microtask_count()) return;
+  TRACE_EVENT0("v8.execute", "RunMicrotasks");
   while (pending_microtask_count() > 0) {
     HandleScope scope(this);
     int num_tasks = pending_microtask_count();
@@ -3006,18 +3084,41 @@
     Isolate* isolate = this;
     FOR_WITH_HANDLE_SCOPE(isolate, int, i = 0, i, i < num_tasks, i++, {
       Handle<Object> microtask(queue->get(i), this);
-      if (microtask->IsJSFunction()) {
-        Handle<JSFunction> microtask_function =
-            Handle<JSFunction>::cast(microtask);
+
+      if (microtask->IsCallHandlerInfo()) {
+        Handle<CallHandlerInfo> callback_info =
+            Handle<CallHandlerInfo>::cast(microtask);
+        v8::MicrotaskCallback callback =
+            v8::ToCData<v8::MicrotaskCallback>(callback_info->callback());
+        void* data = v8::ToCData<void*>(callback_info->data());
+        callback(data);
+      } else {
         SaveContext save(this);
-        set_context(microtask_function->context()->native_context());
+        Context* context = microtask->IsJSFunction()
+                               ? Handle<JSFunction>::cast(microtask)->context()
+                               : Handle<PromiseContainer>::cast(microtask)
+                                     ->resolve()
+                                     ->context();
+        set_context(context->native_context());
         handle_scope_implementer_->EnterMicrotaskContext(
-            handle(microtask_function->context(), this));
+            Handle<Context>(context, this));
+
+        MaybeHandle<Object> result;
         MaybeHandle<Object> maybe_exception;
-        MaybeHandle<Object> result = Execution::TryCall(
-            this, microtask_function, factory()->undefined_value(), 0, NULL,
-            &maybe_exception);
+
+        if (microtask->IsJSFunction()) {
+          Handle<JSFunction> microtask_function =
+              Handle<JSFunction>::cast(microtask);
+          result = Execution::TryCall(this, microtask_function,
+                                      factory()->undefined_value(), 0, NULL,
+                                      &maybe_exception);
+        } else {
+          PromiseResolveThenableJob(Handle<PromiseContainer>::cast(microtask),
+                                    &result, &maybe_exception);
+        }
+
         handle_scope_implementer_->LeaveMicrotaskContext();
+
         // If execution is terminating, just bail out.
         if (result.is_null() && maybe_exception.is_null()) {
           // Clear out any remaining callbacks in the queue.
@@ -3025,13 +3126,6 @@
           set_pending_microtask_count(0);
           return;
         }
-      } else {
-        Handle<CallHandlerInfo> callback_info =
-            Handle<CallHandlerInfo>::cast(microtask);
-        v8::MicrotaskCallback callback =
-            v8::ToCData<v8::MicrotaskCallback>(callback_info->callback());
-        void* data = v8::ToCData<void*>(callback_info->data());
-        callback(data);
       }
     });
   }
@@ -3179,6 +3273,15 @@
   is_isolate_in_background_ = false;
 }
 
+void Isolate::PrintWithTimestamp(const char* format, ...) {
+  base::OS::Print("[%d:%p] %8.0f ms: ", base::OS::GetCurrentProcessId(),
+                  static_cast<void*>(this), time_millis_since_init());
+  va_list arguments;
+  va_start(arguments, format);
+  base::OS::VPrint(format, arguments);
+  va_end(arguments);
+}
+
 bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
   StackGuard* stack_guard = isolate_->stack_guard();
 #ifdef USE_SIMULATOR
@@ -3190,6 +3293,21 @@
   return GetCurrentStackPosition() - gap < stack_guard->real_climit();
 }
 
+SaveContext::SaveContext(Isolate* isolate)
+    : isolate_(isolate), prev_(isolate->save_context()) {
+  if (isolate->context() != NULL) {
+    context_ = Handle<Context>(isolate->context());
+  }
+  isolate->set_save_context(this);
+
+  c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
+}
+
+SaveContext::~SaveContext() {
+  isolate_->set_context(context_.is_null() ? NULL : *context_);
+  isolate_->set_save_context(prev_);
+}
+
 #ifdef DEBUG
 AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
     : isolate_(isolate), context_(isolate->context(), isolate) {}
diff --git a/src/isolate.h b/src/isolate.h
index eb1841d..8d0d3b4 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -23,13 +23,11 @@
 #include "src/messages.h"
 #include "src/regexp/regexp-stack.h"
 #include "src/runtime/runtime.h"
-#include "src/tracing/trace-event.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 
 namespace base {
-class AccountingAllocator;
 class RandomNumberGenerator;
 }
 
@@ -52,6 +50,7 @@
 class CpuFeatures;
 class CpuProfiler;
 class DeoptimizerData;
+class DescriptorLookupCache;
 class Deserializer;
 class EmptyStatement;
 class ExternalCallbackScope;
@@ -63,6 +62,7 @@
 class HTracer;
 class InlineRuntimeFunctionsTable;
 class InnerPointerToCodeCache;
+class KeyedLookupCache;
 class Logger;
 class MaterializedObjectStore;
 class OptimizingCompileDispatcher;
@@ -94,14 +94,6 @@
 class Interpreter;
 }
 
-// Static indirection table for handles to constants.  If a frame
-// element represents a constant, the data contains an index into
-// this table of handles to the actual constants.
-// Static indirection table for handles to constants.  If a Result
-// represents a constant, the data contains an index into this table
-// of handles to the actual constants.
-typedef ZoneList<Handle<Object> > ZoneObjectList;
-
 #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate)    \
   do {                                                    \
     Isolate* __isolate__ = (isolate);                     \
@@ -369,9 +361,9 @@
 
 #if USE_SIMULATOR
 
-#define ISOLATE_INIT_SIMULATOR_LIST(V)       \
-  V(bool, simulator_initialized, false)      \
-  V(base::HashMap*, simulator_i_cache, NULL) \
+#define ISOLATE_INIT_SIMULATOR_LIST(V)                    \
+  V(bool, simulator_initialized, false)                   \
+  V(base::CustomMatcherHashMap*, simulator_i_cache, NULL) \
   V(Redirection*, simulator_redirection, NULL)
 #else
 
@@ -629,6 +621,7 @@
   bool IsExternalHandlerOnTop(Object* exception);
 
   inline bool is_catchable_by_javascript(Object* exception);
+  inline bool is_catchable_by_wasm(Object* exception);
 
   // JS execution stack (see frames.h).
   static Address c_entry_fp(ThreadLocalTop* thread) {
@@ -672,8 +665,14 @@
   // Push and pop a promise and the current try-catch handler.
   void PushPromise(Handle<JSObject> promise);
   void PopPromise();
+
+  // Return the relevant Promise that a throw/rejection pertains to, based
+  // on the contents of the Promise stack
   Handle<Object> GetPromiseOnStackOnThrow();
 
+  // Heuristically guess whether a Promise is handled by user catch handler
+  bool PromiseHasUserDefinedRejectHandler(Handle<Object> promise);
+
   class ExceptionScope {
    public:
     // Scope currently can only be used for regular exceptions,
@@ -750,7 +749,9 @@
     NOT_CAUGHT,
     CAUGHT_BY_JAVASCRIPT,
     CAUGHT_BY_EXTERNAL,
-    CAUGHT_BY_DESUGARING
+    CAUGHT_BY_DESUGARING,
+    CAUGHT_BY_PROMISE,
+    CAUGHT_BY_ASYNC_AWAIT
   };
   CatchType PredictExceptionCatcher();
 
@@ -843,9 +844,6 @@
     DCHECK(counters_ != NULL);
     return counters_;
   }
-  tracing::TraceEventStatsTable* trace_event_stats_table() {
-    return &trace_event_stats_table_;
-  }
   RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
   CompilationCache* compilation_cache() { return compilation_cache_; }
   Logger* logger() {
@@ -889,7 +887,6 @@
     return handle_scope_implementer_;
   }
   Zone* runtime_zone() { return runtime_zone_; }
-  Zone* interface_descriptor_zone() { return interface_descriptor_zone_; }
 
   UnicodeCache* unicode_cache() {
     return unicode_cache_;
@@ -1005,6 +1002,7 @@
   inline bool IsHasInstanceLookupChainIntact();
   bool IsIsConcatSpreadableLookupChainIntact();
   bool IsIsConcatSpreadableLookupChainIntact(JSReceiver* receiver);
+  inline bool IsStringLengthOverflowIntact();
 
   // On intent to set an element in object, make sure that appropriate
   // notifications occur if the set is on the elements of the array or
@@ -1023,6 +1021,7 @@
   void InvalidateArraySpeciesProtector();
   void InvalidateHasInstanceProtector();
   void InvalidateIsConcatSpreadableProtector();
+  void InvalidateStringLengthOverflowProtector();
 
   // Returns true if array is the initial array prototype in any native context.
   bool IsAnyInitialArrayPrototype(Handle<JSArray> array);
@@ -1064,12 +1063,6 @@
 
   void* stress_deopt_count_address() { return &stress_deopt_count_; }
 
-  void* virtual_handler_register_address() {
-    return &virtual_handler_register_;
-  }
-
-  void* virtual_slot_register_address() { return &virtual_slot_register_; }
-
   base::RandomNumberGenerator* random_number_generator();
 
   // Given an address occupied by a live code object, return that object.
@@ -1108,6 +1101,9 @@
   void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value,
                            v8::PromiseRejectEvent event);
 
+  void PromiseResolveThenableJob(Handle<PromiseContainer> container,
+                                 MaybeHandle<Object>* result,
+                                 MaybeHandle<Object>* maybe_exception);
   void EnqueueMicrotask(Handle<Object> microtask);
   void RunMicrotasks();
   bool IsRunningMicrotasks() const { return is_running_microtasks_; }
@@ -1153,7 +1149,7 @@
 
   interpreter::Interpreter* interpreter() const { return interpreter_; }
 
-  base::AccountingAllocator* allocator() { return allocator_; }
+  AccountingAllocator* allocator() { return allocator_; }
 
   bool IsInAnyContext(Object* object, uint32_t index);
 
@@ -1165,6 +1161,12 @@
 
   bool IsIsolateInBackground() { return is_isolate_in_background_; }
 
+  PRINTF_FORMAT(2, 3) void PrintWithTimestamp(const char* format, ...);
+
+#ifdef USE_SIMULATOR
+  base::Mutex* simulator_i_cache_mutex() { return &simulator_i_cache_mutex_; }
+#endif
+
  protected:
   explicit Isolate(bool enable_serializer);
   bool IsArrayOrObjectPrototype(Object* object);
@@ -1303,7 +1305,6 @@
   RuntimeProfiler* runtime_profiler_;
   CompilationCache* compilation_cache_;
   Counters* counters_;
-  tracing::TraceEventStatsTable trace_event_stats_table_;
   base::RecursiveMutex break_access_;
   Logger* logger_;
   StackGuard stack_guard_;
@@ -1324,9 +1325,8 @@
   HandleScopeData handle_scope_data_;
   HandleScopeImplementer* handle_scope_implementer_;
   UnicodeCache* unicode_cache_;
-  base::AccountingAllocator* allocator_;
+  AccountingAllocator* allocator_;
   Zone* runtime_zone_;
-  Zone* interface_descriptor_zone_;
   InnerPointerToCodeCache* inner_pointer_to_code_cache_;
   GlobalHandles* global_handles_;
   EternalHandles* eternal_handles_;
@@ -1407,9 +1407,6 @@
   // Counts deopt points if deopt_every_n_times is enabled.
   unsigned int stress_deopt_count_;
 
-  Address virtual_handler_register_;
-  Address virtual_slot_register_;
-
   int next_optimization_id_;
 
   // Counts javascript calls from the API. Wraps around on overflow.
@@ -1443,6 +1440,10 @@
   v8::Isolate::AbortOnUncaughtExceptionCallback
       abort_on_uncaught_exception_callback_;
 
+#ifdef USE_SIMULATOR
+  base::Mutex simulator_i_cache_mutex_;
+#endif
+
   friend class ExecutionAccess;
   friend class HandleScopeImplementer;
   friend class OptimizingCompileDispatcher;
@@ -1485,8 +1486,8 @@
 // versions of GCC. See V8 issue 122 for details.
 class SaveContext BASE_EMBEDDED {
  public:
-  explicit inline SaveContext(Isolate* isolate);
-  inline ~SaveContext();
+  explicit SaveContext(Isolate* isolate);
+  ~SaveContext();
 
   Handle<Context> context() { return context_; }
   SaveContext* prev() { return prev_; }
@@ -1496,8 +1497,6 @@
     return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
   }
 
-  Isolate* isolate() { return isolate_; }
-
  private:
   Isolate* const isolate_;
   Handle<Context> context_;
diff --git a/src/js/async-await.js b/src/js/async-await.js
new file mode 100644
index 0000000..b733f3d
--- /dev/null
+++ b/src/js/async-await.js
@@ -0,0 +1,180 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+(function(global, utils, extrasUtils) {
+
+"use strict";
+
+%CheckIsBootstrapping();
+
+// -------------------------------------------------------------------
+// Imports
+
+var AsyncFunctionNext;
+var AsyncFunctionThrow;
+var GlobalPromise;
+var IsPromise;
+var NewPromiseCapability;
+var PerformPromiseThen;
+var PromiseCreate;
+var PromiseNextMicrotaskID;
+var RejectPromise;
+var ResolvePromise;
+
+utils.Import(function(from) {
+  AsyncFunctionNext = from.AsyncFunctionNext;
+  AsyncFunctionThrow = from.AsyncFunctionThrow;
+  GlobalPromise = from.GlobalPromise;
+  IsPromise = from.IsPromise;
+  NewPromiseCapability = from.NewPromiseCapability;
+  PerformPromiseThen = from.PerformPromiseThen;
+  PromiseCreate = from.PromiseCreate;
+  PromiseNextMicrotaskID = from.PromiseNextMicrotaskID;
+  RejectPromise = from.RejectPromise;
+  ResolvePromise = from.ResolvePromise;
+});
+
+var promiseAsyncStackIDSymbol =
+    utils.ImportNow("promise_async_stack_id_symbol");
+var promiseHandledBySymbol =
+    utils.ImportNow("promise_handled_by_symbol");
+var promiseForwardingHandlerSymbol =
+    utils.ImportNow("promise_forwarding_handler_symbol");
+var promiseHandledHintSymbol =
+    utils.ImportNow("promise_handled_hint_symbol");
+var promiseHasHandlerSymbol =
+    utils.ImportNow("promise_has_handler_symbol");
+
+// -------------------------------------------------------------------
+
+function PromiseCastResolved(value) {
+  if (IsPromise(value)) {
+    return value;
+  } else {
+    var promise = PromiseCreate();
+    ResolvePromise(promise, value);
+    return promise;
+  }
+}
+
+// ES#abstract-ops-async-function-await
+// AsyncFunctionAwait ( value )
+// Shared logic for the core of await. The parser desugars
+//   await awaited
+// into
+//   yield AsyncFunctionAwait{Caught,Uncaught}(.generator, awaited, .promise)
+// The 'awaited' parameter is the value; the generator stands in
+// for the asyncContext, and .promise is the larger promise under
+// construction by the enclosing async function.
+function AsyncFunctionAwait(generator, awaited, outerPromise) {
+  // Promise.resolve(awaited).then(
+  //     value => AsyncFunctionNext(value),
+  //     error => AsyncFunctionThrow(error)
+  // );
+  var promise = PromiseCastResolved(awaited);
+
+  var onFulfilled = sentValue => {
+    %_Call(AsyncFunctionNext, generator, sentValue);
+    // The resulting Promise is a throwaway, so it doesn't matter what it
+    // resolves to. What is important is that we don't end up keeping the
+    // whole chain of intermediate Promises alive by returning the value
+    // of AsyncFunctionNext, as that would create a memory leak.
+    return;
+  };
+  var onRejected = sentError => {
+    %_Call(AsyncFunctionThrow, generator, sentError);
+    // Similarly, returning the huge Promise here would cause a long
+    // resolution chain to find what the exception to throw is, and
+    // create a similar memory leak, and it does not matter what
+    // sort of rejection this intermediate Promise becomes.
+    return;
+  }
+
+  // Just forwarding the exception, so no debugEvent for throwawayCapability
+  var throwawayCapability = NewPromiseCapability(GlobalPromise, false);
+
+  // The Promise will be thrown away and not handled, but it shouldn't trigger
+  // unhandled reject events as its work is done
+  SET_PRIVATE(throwawayCapability.promise, promiseHasHandlerSymbol, true);
+
+  if (DEBUG_IS_ACTIVE) {
+    if (IsPromise(awaited)) {
+      // Mark the reject handler callback to be a forwarding edge, rather
+      // than a meaningful catch handler
+      SET_PRIVATE(onRejected, promiseForwardingHandlerSymbol, true);
+    }
+
+    // Mark the dependency to outerPromise in case the throwaway Promise is
+    // found on the Promise stack
+    SET_PRIVATE(throwawayCapability.promise, promiseHandledBySymbol,
+                outerPromise);
+  }
+
+  PerformPromiseThen(promise, onFulfilled, onRejected, throwawayCapability);
+}
+
+// Called by the parser from the desugaring of 'await' when catch
+// prediction indicates no locally surrounding catch block
+function AsyncFunctionAwaitUncaught(generator, awaited, outerPromise) {
+  AsyncFunctionAwait(generator, awaited, outerPromise);
+}
+
+// Called by the parser from the desugaring of 'await' when catch
+// prediction indicates that there is a locally surrounding catch block
+function AsyncFunctionAwaitCaught(generator, awaited, outerPromise) {
+  if (DEBUG_IS_ACTIVE && IsPromise(awaited)) {
+    SET_PRIVATE(awaited, promiseHandledHintSymbol, true);
+  }
+  AsyncFunctionAwait(generator, awaited, outerPromise);
+}
+
+// How the parser rejects promises from async/await desugaring
+function RejectPromiseNoDebugEvent(promise, reason) {
+  return RejectPromise(promise, reason, false);
+}
+
+function AsyncFunctionPromiseCreate() {
+  var promise = PromiseCreate();
+  if (DEBUG_IS_ACTIVE) {
+    // Push the Promise under construction in an async function on
+    // the catch prediction stack to handle exceptions thrown before
+    // the first await.
+    %DebugPushPromise(promise);
+    // Assign ID and create a recurring task to save stack for future
+    // resumptions from await.
+    var id = PromiseNextMicrotaskID();
+    SET_PRIVATE(promise, promiseAsyncStackIDSymbol, id);
+    %DebugAsyncTaskEvent({
+      type: "enqueueRecurring",
+      id: id,
+      name: "async function",
+    });
+  }
+  return promise;
+}
+
+function AsyncFunctionPromiseRelease(promise) {
+  if (DEBUG_IS_ACTIVE) {
+    // Cancel
+    var id = GET_PRIVATE(promise, promiseAsyncStackIDSymbol);
+    %DebugAsyncTaskEvent({
+      type: "cancel",
+      id: id,
+      name: "async function",
+    });
+    // Pop the Promise under construction in an async function on
+    // from catch prediction stack.
+    %DebugPopPromise();
+  }
+}
+
+%InstallToContext([
+  "async_function_await_caught", AsyncFunctionAwaitCaught,
+  "async_function_await_uncaught", AsyncFunctionAwaitUncaught,
+  "reject_promise_no_debug_event", RejectPromiseNoDebugEvent,
+  "async_function_promise_create", AsyncFunctionPromiseCreate,
+  "async_function_promise_release", AsyncFunctionPromiseRelease,
+]);
+
+})
diff --git a/src/js/collection.js b/src/js/collection.js
index 83763af..6fe880d 100644
--- a/src/js/collection.js
+++ b/src/js/collection.js
@@ -16,7 +16,6 @@
 var hashCodeSymbol = utils.ImportNow("hash_code_symbol");
 var MathRandom;
 var MapIterator;
-var NumberIsNaN;
 var SetIterator;
 var speciesSymbol = utils.ImportNow("species_symbol");
 var toStringTagSymbol = utils.ImportNow("to_string_tag_symbol");
@@ -24,7 +23,6 @@
 utils.Import(function(from) {
   MathRandom = from.MathRandom;
   MapIterator = from.MapIterator;
-  NumberIsNaN = from.NumberIsNaN;
   SetIterator = from.SetIterator;
 });
 
@@ -42,9 +40,9 @@
   if (entry === NOT_FOUND) return entry;
   var candidate = ORDERED_HASH_SET_KEY_AT(table, entry, numBuckets);
   if (key === candidate) return entry;
-  var keyIsNaN = NumberIsNaN(key);
+  var keyIsNaN = NUMBER_IS_NAN(key);
   while (true) {
-    if (keyIsNaN && NumberIsNaN(candidate)) {
+    if (keyIsNaN && NUMBER_IS_NAN(candidate)) {
       return entry;
     }
     entry = ORDERED_HASH_SET_CHAIN_AT(table, entry, numBuckets);
@@ -62,9 +60,9 @@
   if (entry === NOT_FOUND) return entry;
   var candidate = ORDERED_HASH_MAP_KEY_AT(table, entry, numBuckets);
   if (key === candidate) return entry;
-  var keyIsNaN = NumberIsNaN(key);
+  var keyIsNaN = NUMBER_IS_NAN(key);
   while (true) {
-    if (keyIsNaN && NumberIsNaN(candidate)) {
+    if (keyIsNaN && NUMBER_IS_NAN(candidate)) {
       return entry;
     }
     entry = ORDERED_HASH_MAP_CHAIN_AT(table, entry, numBuckets);
diff --git a/src/js/datetime-format-to-parts.js b/src/js/datetime-format-to-parts.js
new file mode 100644
index 0000000..3194f50
--- /dev/null
+++ b/src/js/datetime-format-to-parts.js
@@ -0,0 +1,16 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+(function(global, utils) {
+"use strict";
+
+%CheckIsBootstrapping();
+
+var GlobalIntl = global.Intl;
+var FormatDateToParts = utils.ImportNow("FormatDateToParts");
+
+utils.InstallFunctions(GlobalIntl.DateTimeFormat.prototype,  DONT_ENUM, [
+    'formatToParts', FormatDateToParts
+]);
+})
diff --git a/src/js/harmony-async-await.js b/src/js/harmony-async-await.js
deleted file mode 100644
index 3a48d0c..0000000
--- a/src/js/harmony-async-await.js
+++ /dev/null
@@ -1,51 +0,0 @@
-// Copyright 2016 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-(function(global, utils, extrasUtils) {
-
-"use strict";
-
-%CheckIsBootstrapping();
-
-// -------------------------------------------------------------------
-// Imports
-
-var AsyncFunctionNext;
-var AsyncFunctionThrow;
-var GlobalPromise;
-var NewPromiseCapability;
-var PerformPromiseThen;
-var PromiseCastResolved;
-
-utils.Import(function(from) {
-  AsyncFunctionNext = from.AsyncFunctionNext;
-  AsyncFunctionThrow = from.AsyncFunctionThrow;
-  GlobalPromise = from.GlobalPromise;
-  NewPromiseCapability = from.NewPromiseCapability;
-  PromiseCastResolved = from.PromiseCastResolved;
-  PerformPromiseThen = from.PerformPromiseThen;
-});
-
-// -------------------------------------------------------------------
-
-function AsyncFunctionAwait(generator, value) {
-  // Promise.resolve(value).then(
-  //     value => AsyncFunctionNext(value),
-  //     error => AsyncFunctionThrow(error)
-  // );
-  var promise = PromiseCastResolved(value);
-
-  var onFulfilled =
-      (sentValue) => %_Call(AsyncFunctionNext, generator, sentValue);
-  var onRejected =
-      (sentError) => %_Call(AsyncFunctionThrow, generator, sentError);
-
-  var throwawayCapability = NewPromiseCapability(GlobalPromise);
-  return PerformPromiseThen(promise, onFulfilled, onRejected,
-                            throwawayCapability);
-}
-
-%InstallToContext([ "async_function_await", AsyncFunctionAwait ]);
-
-})
diff --git a/src/js/i18n.js b/src/js/i18n.js
index 6046a6f..a397849 100644
--- a/src/js/i18n.js
+++ b/src/js/i18n.js
@@ -19,7 +19,6 @@
 
 var ArrayJoin;
 var ArrayPush;
-var FLAG_intl_extra;
 var GlobalDate = global.Date;
 var GlobalNumber = global.Number;
 var GlobalRegExp = global.RegExp;
@@ -29,31 +28,21 @@
 var InternalArray = utils.InternalArray;
 var InternalRegExpMatch;
 var InternalRegExpReplace
-var IsNaN;
 var ObjectHasOwnProperty = utils.ImportNow("ObjectHasOwnProperty");
 var OverrideFunction = utils.OverrideFunction;
 var patternSymbol = utils.ImportNow("intl_pattern_symbol");
 var resolvedSymbol = utils.ImportNow("intl_resolved_symbol");
 var SetFunctionName = utils.SetFunctionName;
 var StringIndexOf;
-var StringLastIndexOf;
-var StringSubstr;
-var StringSubstring;
+var StringSubstr = GlobalString.prototype.substr;
+var StringSubstring = GlobalString.prototype.substring;
 
 utils.Import(function(from) {
   ArrayJoin = from.ArrayJoin;
   ArrayPush = from.ArrayPush;
-  IsNaN = from.IsNaN;
   InternalRegExpMatch = from.InternalRegExpMatch;
   InternalRegExpReplace = from.InternalRegExpReplace;
   StringIndexOf = from.StringIndexOf;
-  StringLastIndexOf = from.StringLastIndexOf;
-  StringSubstr = from.StringSubstr;
-  StringSubstring = from.StringSubstring;
-});
-
-utils.ImportFromExperimental(function(from) {
-  FLAG_intl_extra = from.FLAG_intl_extra;
 });
 
 // Utilities for definitions
@@ -318,7 +307,7 @@
         break;
       }
       // Truncate locale if possible, if not break.
-      var pos = %_Call(StringLastIndexOf, locale, '-');
+      var pos = %StringLastIndexOf(locale, '-');
       if (pos === -1) {
         break;
       }
@@ -441,7 +430,7 @@
         return {'locale': locale, 'extension': extension, 'position': i};
       }
       // Truncate locale if possible.
-      var pos = %_Call(StringLastIndexOf, locale, '-');
+      var pos = %StringLastIndexOf(locale, '-');
       if (pos === -1) {
         break;
       }
@@ -1038,9 +1027,6 @@
   // Writable, configurable and enumerable are set to false by default.
   %MarkAsInitializedIntlObjectOfType(collator, 'collator', internalCollator);
   collator[resolvedSymbol] = resolved;
-  if (FLAG_intl_extra) {
-    %object_define_property(collator, 'resolved', resolvedAccessor);
-  }
 
   return collator;
 }
@@ -1282,10 +1268,6 @@
 
   %MarkAsInitializedIntlObjectOfType(numberFormat, 'numberformat', formatter);
   numberFormat[resolvedSymbol] = resolved;
-  if (FLAG_intl_extra) {
-    %object_define_property(resolved, 'pattern', patternAccessor);
-    %object_define_property(numberFormat, 'resolved', resolvedAccessor);
-  }
 
   return numberFormat;
 }
@@ -1388,14 +1370,6 @@
 }
 
 
-/**
- * Returns a Number that represents string value that was passed in.
- */
-function IntlParseNumber(formatter, value) {
-  return %InternalNumberParse(%GetImplFromInitializedIntlObject(formatter),
-                              TO_STRING(value));
-}
-
 AddBoundMethod(Intl.NumberFormat, 'format', formatNumber, 1, 'numberformat');
 
 /**
@@ -1676,10 +1650,6 @@
 
   %MarkAsInitializedIntlObjectOfType(dateFormat, 'dateformat', formatter);
   dateFormat[resolvedSymbol] = resolved;
-  if (FLAG_intl_extra) {
-    %object_define_property(resolved, 'pattern', patternAccessor);
-    %object_define_property(dateFormat, 'resolved', resolvedAccessor);
-  }
 
   return dateFormat;
 }
@@ -1797,18 +1767,29 @@
                              new GlobalDate(dateMs));
 }
 
+function FormatDateToParts(dateValue) {
+  if (!IS_UNDEFINED(new.target)) {
+    throw %make_type_error(kOrdinaryFunctionCalledAsConstructor);
+  }
+  CHECK_OBJECT_COERCIBLE(this, "Intl.DateTimeFormat.prototype.formatToParts");
+  if (!IS_OBJECT(this)) {
+    throw %make_type_error(kCalledOnNonObject, this);
+  }
+  var dateMs;
+  if (IS_UNDEFINED(dateValue)) {
+    dateMs = %DateCurrentTime();
+  } else {
+    dateMs = TO_NUMBER(dateValue);
+  }
 
-/**
- * Returns a Date object representing the result of calling ToString(value)
- * according to the effective locale and the formatting options of this
- * DateTimeFormat.
- * Returns undefined if date string cannot be parsed.
- */
-function IntlParseDate(formatter, value) {
-  return %InternalDateParse(%GetImplFromInitializedIntlObject(formatter),
-                            TO_STRING(value));
+  if (!NUMBER_IS_FINITE(dateMs)) throw %make_range_error(kDateRange);
+
+  return %InternalDateFormatToParts(
+      %GetImplFromInitializedIntlObject(this), new GlobalDate(dateMs));
 }
 
+%FunctionSetLength(FormatDateToParts, 0);
+
 
 // 0 because date is optional argument.
 AddBoundMethod(Intl.DateTimeFormat, 'format', formatDate, 0, 'dateformat');
@@ -1889,9 +1870,6 @@
   %MarkAsInitializedIntlObjectOfType(iterator, 'breakiterator',
                                      internalIterator);
   iterator[resolvedSymbol] = resolved;
-  if (FLAG_intl_extra) {
-    %object_define_property(iterator, 'resolved', resolvedAccessor);
-  }
 
   return iterator;
 }
@@ -2227,7 +2205,8 @@
     throw %make_type_error(kMethodInvokedOnWrongType, "Date");
   }
 
-  if (IsNaN(date)) return 'Invalid Date';
+  var dateValue = TO_NUMBER(date);
+  if (NUMBER_IS_NAN(dateValue)) return 'Invalid Date';
 
   var internalOptions = toDateTimeOptions(options, required, defaults);
 
@@ -2291,10 +2270,10 @@
   }
 );
 
+%FunctionRemovePrototype(FormatDateToParts);
+
 utils.Export(function(to) {
-  to.AddBoundMethod = AddBoundMethod;
-  to.IntlParseDate = IntlParseDate;
-  to.IntlParseNumber = IntlParseNumber;
+  to.FormatDateToParts = FormatDateToParts;
 });
 
 })
diff --git a/src/js/intl-extra.js b/src/js/intl-extra.js
deleted file mode 100644
index a4d2256..0000000
--- a/src/js/intl-extra.js
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright 2016 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-(function(global, utils) {
-
-"use strict";
-
-%CheckIsBootstrapping();
-
-var GlobalIntl = global.Intl;
-
-var AddBoundMethod = utils.ImportNow("AddBoundMethod");
-var IntlParseDate = utils.ImportNow("IntlParseDate");
-var IntlParseNumber = utils.ImportNow("IntlParseNumber");
-
-AddBoundMethod(GlobalIntl.DateTimeFormat, 'v8Parse', IntlParseDate, 1,
-               'dateformat');
-AddBoundMethod(GlobalIntl.NumberFormat, 'v8Parse', IntlParseNumber, 1,
-               'numberformat');
-
-})
diff --git a/src/js/iterator-prototype.js b/src/js/iterator-prototype.js
deleted file mode 100644
index 6f25019..0000000
--- a/src/js/iterator-prototype.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2015 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-(function(global, utils) {
-  "use strict";
-  %CheckIsBootstrapping();
-
-  var GlobalObject = global.Object;
-  var IteratorPrototype = utils.ImportNow("IteratorPrototype");
-  var iteratorSymbol = utils.ImportNow("iterator_symbol");
-
-  // 25.1.2.1 %IteratorPrototype% [ @@iterator ] ( )
-  function IteratorPrototypeIterator() {
-    return this;
-  }
-
-  utils.SetFunctionName(IteratorPrototypeIterator, iteratorSymbol);
-  %AddNamedProperty(IteratorPrototype, iteratorSymbol,
-      IteratorPrototypeIterator, DONT_ENUM);
-})
diff --git a/src/js/prologue.js b/src/js/prologue.js
index bb81879..8a07a4c 100644
--- a/src/js/prologue.js
+++ b/src/js/prologue.js
@@ -120,7 +120,7 @@
   SetFunctionName(setter, name, "set");
   %FunctionRemovePrototype(getter);
   %FunctionRemovePrototype(setter);
-  %DefineAccessorPropertyUnchecked(object, name, getter, setter, DONT_ENUM);
+  %DefineAccessorPropertyUnchecked(object, name, getter, setter, attributes);
   %SetNativeFlag(getter);
   %SetNativeFlag(setter);
 }
@@ -181,32 +181,15 @@
 
   // Whitelist of exports from normal natives to experimental natives and debug.
   var expose_list = [
-    "AddBoundMethod",
     "ArrayToString",
-    "AsyncFunctionNext",
-    "AsyncFunctionThrow",
+    "FormatDateToParts",
     "GetIterator",
     "GetMethod",
-    "GlobalPromise",
-    "IntlParseDate",
-    "IntlParseNumber",
-    "IsNaN",
     "MapEntries",
     "MapIterator",
     "MapIteratorNext",
     "MaxSimple",
     "MinSimple",
-    "NewPromiseCapability",
-    "NumberIsInteger",
-    "PerformPromiseThen",
-    "PromiseCastResolved",
-    "PromiseThen",
-    "RegExpSubclassExecJS",
-    "RegExpSubclassMatch",
-    "RegExpSubclassReplace",
-    "RegExpSubclassSearch",
-    "RegExpSubclassSplit",
-    "RegExpSubclassTest",
     "SetIterator",
     "SetIteratorNext",
     "SetValues",
@@ -218,11 +201,11 @@
     // From runtime:
     "is_concat_spreadable_symbol",
     "iterator_symbol",
-    "promise_result_symbol",
-    "promise_state_symbol",
     "object_freeze",
     "object_is_frozen",
     "object_is_sealed",
+    "promise_result_symbol",
+    "promise_state_symbol",
     "reflect_apply",
     "reflect_construct",
     "regexp_flags_symbol",
diff --git a/src/js/promise.js b/src/js/promise.js
index b50fc80..793d60f 100644
--- a/src/js/promise.js
+++ b/src/js/promise.js
@@ -12,8 +12,12 @@
 // Imports
 
 var InternalArray = utils.InternalArray;
-var promiseCombinedDeferredSymbol =
-    utils.ImportNow("promise_combined_deferred_symbol");
+var promiseAsyncStackIDSymbol =
+    utils.ImportNow("promise_async_stack_id_symbol");
+var promiseHandledBySymbol =
+    utils.ImportNow("promise_handled_by_symbol");
+var promiseForwardingHandlerSymbol =
+    utils.ImportNow("promise_forwarding_handler_symbol");
 var promiseHasHandlerSymbol =
     utils.ImportNow("promise_has_handler_symbol");
 var promiseRejectReactionsSymbol =
@@ -22,14 +26,18 @@
     utils.ImportNow("promise_fulfill_reactions_symbol");
 var promiseDeferredReactionsSymbol =
     utils.ImportNow("promise_deferred_reactions_symbol");
+var promiseHandledHintSymbol =
+    utils.ImportNow("promise_handled_hint_symbol");
 var promiseRawSymbol = utils.ImportNow("promise_raw_symbol");
 var promiseStateSymbol = utils.ImportNow("promise_state_symbol");
 var promiseResultSymbol = utils.ImportNow("promise_result_symbol");
 var SpeciesConstructor;
 var speciesSymbol = utils.ImportNow("species_symbol");
 var toStringTagSymbol = utils.ImportNow("to_string_tag_symbol");
+var ObjectHasOwnProperty;
 
 utils.Import(function(from) {
+  ObjectHasOwnProperty = from.ObjectHasOwnProperty;
   SpeciesConstructor = from.SpeciesConstructor;
 });
 
@@ -42,9 +50,13 @@
 
 var lastMicrotaskId = 0;
 
+function PromiseNextMicrotaskID() {
+  return ++lastMicrotaskId;
+}
+
 // ES#sec-createresolvingfunctions
 // CreateResolvingFunctions ( promise )
-function CreateResolvingFunctions(promise) {
+function CreateResolvingFunctions(promise, debugEvent) {
   var alreadyResolved = false;
 
   // ES#sec-promise-resolve-functions
@@ -60,7 +72,7 @@
   var reject = reason => {
     if (alreadyResolved === true) return;
     alreadyResolved = true;
-    RejectPromise(promise, reason);
+    RejectPromise(promise, reason, debugEvent);
   };
 
   return {
@@ -83,7 +95,8 @@
   }
 
   var promise = PromiseInit(%_NewObject(GlobalPromise, new.target));
-  var callbacks = CreateResolvingFunctions(promise);
+  // Calling the reject function would be a new exception, so debugEvent = true
+  var callbacks = CreateResolvingFunctions(promise, true);
   var debug_is_active = DEBUG_IS_ACTIVE;
   try {
     if (debug_is_active) %DebugPushPromise(promise);
@@ -182,9 +195,24 @@
     }
   });
   if (instrumenting) {
-    id = ++lastMicrotaskId;
-    name = status === kFulfilled ? "Promise.resolve" : "Promise.reject";
-    %DebugAsyncTaskEvent({ type: "enqueue", id: id, name: name });
+    // In an async function, reuse the existing stack related to the outer
+    // Promise. Otherwise, e.g. in a direct call to then, save a new stack.
+    // Promises with multiple reactions with one or more of them being async
+    // functions will not get a good stack trace, as async functions require
+    // different stacks from direct Promise use, but we save and restore a
+    // stack once for all reactions. TODO(littledan): Improve this case.
+    if (!IS_UNDEFINED(deferreds) &&
+        HAS_PRIVATE(deferreds.promise, promiseHandledBySymbol) &&
+        HAS_PRIVATE(GET_PRIVATE(deferreds.promise, promiseHandledBySymbol),
+                    promiseAsyncStackIDSymbol)) {
+      id = GET_PRIVATE(GET_PRIVATE(deferreds.promise, promiseHandledBySymbol),
+                       promiseAsyncStackIDSymbol);
+      name = "async function";
+    } else {
+      id = PromiseNextMicrotaskID();
+      name = status === kFulfilled ? "Promise.resolve" : "Promise.reject";
+      %DebugAsyncTaskEvent({ type: "enqueue", id: id, name: name });
+    }
   }
 }
 
@@ -209,16 +237,16 @@
 
     SET_PRIVATE(promise, promiseFulfillReactionsSymbol, resolveCallbacks);
     SET_PRIVATE(promise, promiseRejectReactionsSymbol, rejectCallbacks);
+    SET_PRIVATE(promise, promiseDeferredReactionsSymbol, UNDEFINED);
   } else {
     maybeResolveCallbacks.push(onResolve, deferred);
     GET_PRIVATE(promise, promiseRejectReactionsSymbol).push(onReject, deferred);
   }
 }
 
-function PromiseIdResolveHandler(x) { return x }
-function PromiseIdRejectHandler(r) { throw r }
-
-function PromiseNopResolver() {}
+function PromiseIdResolveHandler(x) { return x; }
+function PromiseIdRejectHandler(r) { %_ReThrow(r); }
+SET_PRIVATE(PromiseIdRejectHandler, promiseForwardingHandlerSymbol, true);
 
 // -------------------------------------------------------------------
 // Define exported functions.
@@ -231,21 +259,23 @@
 }
 
 function PromiseCreate() {
-  return new GlobalPromise(PromiseNopResolver)
+  return PromiseInit(new GlobalPromise(promiseRawSymbol));
 }
 
 // ES#sec-promise-resolve-functions
 // Promise Resolve Functions, steps 6-13
 function ResolvePromise(promise, resolution) {
   if (resolution === promise) {
-    return RejectPromise(promise, %make_type_error(kPromiseCyclic, resolution));
+    return RejectPromise(promise,
+                         %make_type_error(kPromiseCyclic, resolution),
+                         true);
   }
   if (IS_RECEIVER(resolution)) {
     // 25.4.1.3.2 steps 8-12
     try {
       var then = resolution.then;
     } catch (e) {
-      return RejectPromise(promise, e);
+      return RejectPromise(promise, e, true);
     }
 
     // Resolution is a native promise and if it's already resolved or
@@ -268,63 +298,80 @@
           // Revoke previously triggered reject event.
           %PromiseRevokeReject(resolution);
         }
-        RejectPromise(promise, thenableValue);
+        // Don't cause a debug event as this case is forwarding a rejection
+        RejectPromise(promise, thenableValue, false);
         SET_PRIVATE(resolution, promiseHasHandlerSymbol, true);
         return;
       }
     }
 
     if (IS_CALLABLE(then)) {
-      // PromiseResolveThenableJob
-      var id;
-      var name = "PromiseResolveThenableJob";
+      var callbacks = CreateResolvingFunctions(promise, false);
+      var id, before_debug_event, after_debug_event;
       var instrumenting = DEBUG_IS_ACTIVE;
-      %EnqueueMicrotask(function() {
-        if (instrumenting) {
-          %DebugAsyncTaskEvent({ type: "willHandle", id: id, name: name });
-        }
-        var callbacks = CreateResolvingFunctions(promise);
-        try {
-          %_Call(then, resolution, callbacks.resolve, callbacks.reject);
-        } catch (e) {
-          %_Call(callbacks.reject, UNDEFINED, e);
-        }
-        if (instrumenting) {
-          %DebugAsyncTaskEvent({ type: "didHandle", id: id, name: name });
-        }
-      });
       if (instrumenting) {
-        id = ++lastMicrotaskId;
-        %DebugAsyncTaskEvent({ type: "enqueue", id: id, name: name });
+        if (IsPromise(resolution)) {
+          // Mark the dependency of the new promise on the resolution
+          SET_PRIVATE(resolution, promiseHandledBySymbol, promise);
+        }
+        id = PromiseNextMicrotaskID();
+        before_debug_event = {
+          type: "willHandle",
+          id: id,
+          name: "PromiseResolveThenableJob"
+        };
+        after_debug_event = {
+          type: "didHandle",
+          id: id,
+          name: "PromiseResolveThenableJob"
+        };
+        %DebugAsyncTaskEvent({
+          type: "enqueue",
+          id: id,
+          name: "PromiseResolveThenableJob"
+        });
       }
+      %EnqueuePromiseResolveThenableJob(
+          resolution, then, callbacks.resolve, callbacks.reject,
+          before_debug_event, after_debug_event);
       return;
     }
   }
-  FulfillPromise(promise, kFulfilled, resolution, promiseFulfillReactionsSymbol);
+  FulfillPromise(promise, kFulfilled, resolution,
+                 promiseFulfillReactionsSymbol);
 }
 
 // ES#sec-rejectpromise
 // RejectPromise ( promise, reason )
-function RejectPromise(promise, reason) {
+function RejectPromise(promise, reason, debugEvent) {
   // Check promise status to confirm that this reject has an effect.
   // Call runtime for callbacks to the debugger or for unhandled reject.
+  // The debugEvent parameter sets whether a debug ExceptionEvent should
+  // be triggered. It should be set to false when forwarding a rejection
+  // rather than creating a new one.
   if (GET_PRIVATE(promise, promiseStateSymbol) === kPending) {
-    var debug_is_active = DEBUG_IS_ACTIVE;
-    if (debug_is_active ||
+    // This check is redundant with checks in the runtime, but it may help
+    // avoid unnecessary runtime calls.
+    if ((debugEvent && DEBUG_IS_ACTIVE) ||
         !HAS_DEFINED_PRIVATE(promise, promiseHasHandlerSymbol)) {
-      %PromiseRejectEvent(promise, reason, debug_is_active);
+      %PromiseRejectEvent(promise, reason, debugEvent);
     }
   }
   FulfillPromise(promise, kRejected, reason, promiseRejectReactionsSymbol)
 }
 
+// Export to bindings
+function DoRejectPromise(promise, reason) {
+  return RejectPromise(promise, reason, true);
+}
+
 // ES#sec-newpromisecapability
 // NewPromiseCapability ( C )
-function NewPromiseCapability(C) {
+function NewPromiseCapability(C, debugEvent) {
   if (C === GlobalPromise) {
     // Optimized case, avoid extra closure.
-    var promise = PromiseInit(new GlobalPromise(promiseRawSymbol));
-    var callbacks = CreateResolvingFunctions(promise);
+    var promise = PromiseCreate();
+    var callbacks = CreateResolvingFunctions(promise, debugEvent);
     return {
       promise: promise,
       resolve: callbacks.resolve,
@@ -355,39 +402,17 @@
   if (this === GlobalPromise) {
     // Optimized case, avoid extra closure.
     var promise = PromiseCreateAndSet(kRejected, r);
-    // The debug event for this would always be an uncaught promise reject,
-    // which is usually simply noise. Do not trigger that debug event.
-    %PromiseRejectEvent(promise, r, false);
+    // Trigger debug events if the debugger is on, as Promise.reject is
+    // equivalent to throwing an exception directly.
+    %PromiseRejectEventFromStack(promise, r);
     return promise;
   } else {
-    var promiseCapability = NewPromiseCapability(this);
+    var promiseCapability = NewPromiseCapability(this, true);
     %_Call(promiseCapability.reject, UNDEFINED, r);
     return promiseCapability.promise;
   }
 }
 
-// Shortcut Promise.reject and Promise.resolve() implementations, used by
-// Async Functions implementation.
-function PromiseCreateRejected(r) {
-  return %_Call(PromiseReject, GlobalPromise, r);
-}
-
-function PromiseCreateResolved(value) {
-  var promise = PromiseInit(new GlobalPromise(promiseRawSymbol));
-  var resolveResult = ResolvePromise(promise, value);
-  return promise;
-}
-
-function PromiseCastResolved(value) {
-  if (IsPromise(value)) {
-    return value;
-  } else {
-    var promise = PromiseInit(new GlobalPromise(promiseRawSymbol));
-    var resolveResult = ResolvePromise(promise, value);
-    return promise;
-  }
-}
-
 function PerformPromiseThen(promise, onResolve, onReject, resultCapability) {
   if (!IS_CALLABLE(onResolve)) onResolve = PromiseIdResolveHandler;
   if (!IS_CALLABLE(onReject)) onReject = PromiseIdRejectHandler;
@@ -427,7 +452,9 @@
   }
 
   var constructor = SpeciesConstructor(this, GlobalPromise);
-  var resultCapability = NewPromiseCapability(constructor);
+  // Pass false for debugEvent so .then chaining does not trigger
+  // redundant ExceptionEvents.
+  var resultCapability = NewPromiseCapability(constructor, false);
   return PerformPromiseThen(this, onResolve, onReject, resultCapability);
 }
 
@@ -449,12 +476,13 @@
 
   // Avoid creating resolving functions.
   if (this === GlobalPromise) {
-    var promise = PromiseInit(new GlobalPromise(promiseRawSymbol));
+    var promise = PromiseCreate();
     var resolveResult = ResolvePromise(promise, x);
     return promise;
   }
 
-  var promiseCapability = NewPromiseCapability(this);
+  // debugEvent is not so meaningful here as it will be resolved
+  var promiseCapability = NewPromiseCapability(this, true);
   var resolveResult = %_Call(promiseCapability.resolve, UNDEFINED, x);
   return promiseCapability.promise;
 }
@@ -466,10 +494,19 @@
     throw %make_type_error(kCalledOnNonObject, "Promise.all");
   }
 
-  var deferred = NewPromiseCapability(this);
+  // false debugEvent so that forwarding the rejection through all does not
+  // trigger redundant ExceptionEvents
+  var deferred = NewPromiseCapability(this, false);
   var resolutions = new InternalArray();
   var count;
 
+  // For catch prediction, don't treat the .then calls as handling it;
+  // instead, recurse outwards.
+  var instrumenting = DEBUG_IS_ACTIVE;
+  if (instrumenting) {
+    SET_PRIVATE(deferred.reject, promiseForwardingHandlerSymbol, true);
+  }
+
   function CreateResolveElementFunction(index, values, promiseCapability) {
     var alreadyCalled = false;
     return (x) => {
@@ -490,10 +527,14 @@
     for (var value of iterable) {
       var nextPromise = this.resolve(value);
       ++count;
-      nextPromise.then(
+      var throwawayPromise = nextPromise.then(
           CreateResolveElementFunction(i, resolutions, deferred),
           deferred.reject);
-      SET_PRIVATE(deferred.reject, promiseCombinedDeferredSymbol, deferred);
+      // For catch prediction, mark that rejections here are semantically
+      // handled by the combined Promise.
+      if (instrumenting && IsPromise(throwawayPromise)) {
+        SET_PRIVATE(throwawayPromise, promiseHandledBySymbol, deferred.promise);
+      }
       ++i;
     }
 
@@ -517,11 +558,26 @@
     throw %make_type_error(kCalledOnNonObject, PromiseRace);
   }
 
-  var deferred = NewPromiseCapability(this);
+  // false debugEvent so that forwarding the rejection through race does not
+  // trigger redundant ExceptionEvents
+  var deferred = NewPromiseCapability(this, false);
+
+  // For catch prediction, don't treat the .then calls as handling it;
+  // instead, recurse outwards.
+  var instrumenting = DEBUG_IS_ACTIVE;
+  if (instrumenting) {
+    SET_PRIVATE(deferred.reject, promiseForwardingHandlerSymbol, true);
+  }
+
   try {
     for (var value of iterable) {
-      this.resolve(value).then(deferred.resolve, deferred.reject);
-      SET_PRIVATE(deferred.reject, promiseCombinedDeferredSymbol, deferred);
+      var throwawayPromise = this.resolve(value).then(deferred.resolve,
+                                                      deferred.reject);
+      // For catch prediction, mark that rejections here are semantically
+      // handled by the combined Promise.
+      if (instrumenting && IsPromise(throwawayPromise)) {
+        SET_PRIVATE(throwawayPromise, promiseHandledBySymbol, deferred.promise);
+      }
     }
   } catch (e) {
     deferred.reject(e)
@@ -533,29 +589,48 @@
 // Utility for debugger
 
 function PromiseHasUserDefinedRejectHandlerCheck(handler, deferred) {
-  if (handler !== PromiseIdRejectHandler) {
-    var combinedDeferred = GET_PRIVATE(handler, promiseCombinedDeferredSymbol);
-    if (IS_UNDEFINED(combinedDeferred)) return true;
-    if (PromiseHasUserDefinedRejectHandlerRecursive(combinedDeferred.promise)) {
-      return true;
-    }
-  } else if (PromiseHasUserDefinedRejectHandlerRecursive(deferred.promise)) {
-    return true;
+  // Recurse to the forwarding Promise, if any. This may be due to
+  //  - await reaction forwarding to the throwaway Promise, which has
+  //    a dependency edge to the outer Promise.
+  //  - PromiseIdResolveHandler forwarding to the output of .then
+  //  - Promise.all/Promise.race forwarding to a throwaway Promise, which
+  //    has a dependency edge to the generated outer Promise.
+  if (GET_PRIVATE(handler, promiseForwardingHandlerSymbol)) {
+    return PromiseHasUserDefinedRejectHandlerRecursive(deferred.promise);
   }
-  return false;
+
+  // Otherwise, this is a real reject handler for the Promise
+  return true;
 }
 
 function PromiseHasUserDefinedRejectHandlerRecursive(promise) {
+  // If this promise was marked as being handled by a catch block
+  // in an async function, then it has a user-defined reject handler.
+  if (GET_PRIVATE(promise, promiseHandledHintSymbol)) return true;
+
+  // If this Promise is subsumed by another Promise (a Promise resolved
+  // with another Promise, or an intermediate, hidden, throwaway Promise
+  // within async/await), then recurse on the outer Promise.
+  // In this case, the dependency is one possible way that the Promise
+  // could be resolved, so it does not subsume the other following cases.
+  var outerPromise = GET_PRIVATE(promise, promiseHandledBySymbol);
+  if (outerPromise &&
+      PromiseHasUserDefinedRejectHandlerRecursive(outerPromise)) {
+    return true;
+  }
+
   var queue = GET_PRIVATE(promise, promiseRejectReactionsSymbol);
   var deferreds = GET_PRIVATE(promise, promiseDeferredReactionsSymbol);
+
   if (IS_UNDEFINED(queue)) return false;
+
   if (!IS_ARRAY(queue)) {
     return PromiseHasUserDefinedRejectHandlerCheck(queue, deferreds);
-  } else {
-    for (var i = 0; i < queue.length; i += 2) {
-      if (PromiseHasUserDefinedRejectHandlerCheck(queue[i], queue[i + 1])) {
-        return true;
-      }
+  }
+
+  for (var i = 0; i < queue.length; i += 2) {
+    if (PromiseHasUserDefinedRejectHandlerCheck(queue[i], queue[i + 1])) {
+      return true;
     }
   }
   return false;
@@ -564,6 +639,8 @@
 // Return whether the promise will be handled by a user-defined reject
 // handler somewhere down the promise chain. For this, we do a depth-first
 // search for a reject handler that's not the default PromiseIdRejectHandler.
+// This function also traverses dependencies of one Promise on another,
+// set up through async/await and Promises resolved with Promises.
 function PromiseHasUserDefinedRejectHandler() {
   return PromiseHasUserDefinedRejectHandlerRecursive(this);
 };
@@ -598,11 +675,9 @@
   "promise_catch", PromiseCatch,
   "promise_create", PromiseCreate,
   "promise_has_user_defined_reject_handler", PromiseHasUserDefinedRejectHandler,
-  "promise_reject", RejectPromise,
+  "promise_reject", DoRejectPromise,
   "promise_resolve", ResolvePromise,
-  "promise_then", PromiseThen,
-  "promise_create_rejected", PromiseCreateRejected,
-  "promise_create_resolved", PromiseCreateResolved
+  "promise_then", PromiseThen
 ]);
 
 // This allows extras to create promises quickly without building extra
@@ -611,16 +686,20 @@
 utils.InstallFunctions(extrasUtils, 0, [
   "createPromise", PromiseCreate,
   "resolvePromise", ResolvePromise,
-  "rejectPromise", RejectPromise
+  "rejectPromise", DoRejectPromise
 ]);
 
 utils.Export(function(to) {
-  to.PromiseCastResolved = PromiseCastResolved;
+  to.IsPromise = IsPromise;
+  to.PromiseCreate = PromiseCreate;
   to.PromiseThen = PromiseThen;
+  to.PromiseNextMicrotaskID = PromiseNextMicrotaskID;
 
   to.GlobalPromise = GlobalPromise;
   to.NewPromiseCapability = NewPromiseCapability;
   to.PerformPromiseThen = PerformPromiseThen;
+  to.ResolvePromise = ResolvePromise;
+  to.RejectPromise = RejectPromise;
 });
 
 })
diff --git a/src/js/regexp.js b/src/js/regexp.js
index dbe4837..49da45b 100644
--- a/src/js/regexp.js
+++ b/src/js/regexp.js
@@ -4,20 +4,22 @@
 
 (function(global, utils) {
 
+'use strict';
+
 %CheckIsBootstrapping();
 
 // -------------------------------------------------------------------
 // Imports
 
-var ExpandReplacement;
 var GlobalArray = global.Array;
 var GlobalObject = global.Object;
 var GlobalRegExp = global.RegExp;
-var GlobalRegExpPrototype;
+var GlobalRegExpPrototype = GlobalRegExp.prototype;
 var InternalArray = utils.InternalArray;
 var InternalPackedArray = utils.InternalPackedArray;
 var MaxSimple;
 var MinSimple;
+var RegExpExecJS = GlobalRegExp.prototype.exec;
 var matchSymbol = utils.ImportNow("match_symbol");
 var replaceSymbol = utils.ImportNow("replace_symbol");
 var searchSymbol = utils.ImportNow("search_symbol");
@@ -26,7 +28,6 @@
 var SpeciesConstructor;
 
 utils.Import(function(from) {
-  ExpandReplacement = from.ExpandReplacement;
   MaxSimple = from.MaxSimple;
   MinSimple = from.MinSimple;
   SpeciesConstructor = from.SpeciesConstructor;
@@ -80,37 +81,6 @@
 }
 
 
-// ES#sec-regexp-pattern-flags
-// RegExp ( pattern, flags )
-function RegExpConstructor(pattern, flags) {
-  var newtarget = new.target;
-  var pattern_is_regexp = IsRegExp(pattern);
-
-  if (IS_UNDEFINED(newtarget)) {
-    newtarget = GlobalRegExp;
-
-    // ES6 section 21.2.3.1 step 3.b
-    if (pattern_is_regexp && IS_UNDEFINED(flags) &&
-        pattern.constructor === newtarget) {
-      return pattern;
-    }
-  }
-
-  if (IS_REGEXP(pattern)) {
-    if (IS_UNDEFINED(flags)) flags = PatternFlags(pattern);
-    pattern = REGEXP_SOURCE(pattern);
-
-  } else if (pattern_is_regexp) {
-    var input_pattern = pattern;
-    pattern = pattern.source;
-    if (IS_UNDEFINED(flags)) flags = input_pattern.flags;
-  }
-
-  var object = %_NewObject(GlobalRegExp, newtarget);
-  return RegExpInitialize(object, pattern, flags);
-}
-
-
 // ES#sec-regexp.prototype.compile RegExp.prototype.compile (pattern, flags)
 function RegExpCompileJS(pattern, flags) {
   if (!IS_REGEXP(this)) {
@@ -163,105 +133,6 @@
 endmacro
 
 
-function RegExpExecNoTests(regexp, string, start) {
-  // Must be called with RegExp, string and positive integer as arguments.
-  var matchInfo = %_RegExpExec(regexp, string, start, RegExpLastMatchInfo);
-  if (matchInfo !== null) {
-    // ES6 21.2.5.2.2 step 18.
-    if (REGEXP_STICKY(regexp)) regexp.lastIndex = matchInfo[CAPTURE1];
-    RETURN_NEW_RESULT_FROM_MATCH_INFO(matchInfo, string);
-  }
-  regexp.lastIndex = 0;
-  return null;
-}
-
-
-// ES#sec-regexp.prototype.exec
-// RegExp.prototype.exec ( string )
-function RegExpSubclassExecJS(string) {
-  if (!IS_REGEXP(this)) {
-    throw %make_type_error(kIncompatibleMethodReceiver,
-                        'RegExp.prototype.exec', this);
-  }
-
-  string = TO_STRING(string);
-  var lastIndex = this.lastIndex;
-
-  // Conversion is required by the ES2015 specification (RegExpBuiltinExec
-  // algorithm, step 4) even if the value is discarded for non-global RegExps.
-  var i = TO_LENGTH(lastIndex);
-
-  var global = TO_BOOLEAN(REGEXP_GLOBAL(this));
-  var sticky = TO_BOOLEAN(REGEXP_STICKY(this));
-  var updateLastIndex = global || sticky;
-  if (updateLastIndex) {
-    if (i > string.length) {
-      this.lastIndex = 0;
-      return null;
-    }
-  } else {
-    i = 0;
-  }
-
-  // matchIndices is either null or the RegExpLastMatchInfo array.
-  // TODO(littledan): Whether a RegExp is sticky is compiled into the RegExp
-  // itself, but ES2015 allows monkey-patching this property to differ from
-  // the internal flags. If it differs, recompile a different RegExp?
-  var matchIndices = %_RegExpExec(this, string, i, RegExpLastMatchInfo);
-
-  if (IS_NULL(matchIndices)) {
-    this.lastIndex = 0;
-    return null;
-  }
-
-  // Successful match.
-  if (updateLastIndex) {
-    this.lastIndex = RegExpLastMatchInfo[CAPTURE1];
-  }
-  RETURN_NEW_RESULT_FROM_MATCH_INFO(matchIndices, string);
-}
-%FunctionRemovePrototype(RegExpSubclassExecJS);
-
-
-// Legacy implementation of RegExp.prototype.exec
-function RegExpExecJS(string) {
-  if (!IS_REGEXP(this)) {
-    throw %make_type_error(kIncompatibleMethodReceiver,
-                        'RegExp.prototype.exec', this);
-  }
-
-  string = TO_STRING(string);
-  var lastIndex = this.lastIndex;
-
-  // Conversion is required by the ES2015 specification (RegExpBuiltinExec
-  // algorithm, step 4) even if the value is discarded for non-global RegExps.
-  var i = TO_LENGTH(lastIndex);
-
-  var updateLastIndex = REGEXP_GLOBAL(this) || REGEXP_STICKY(this);
-  if (updateLastIndex) {
-    if (i < 0 || i > string.length) {
-      this.lastIndex = 0;
-      return null;
-    }
-  } else {
-    i = 0;
-  }
-
-  // matchIndices is either null or the RegExpLastMatchInfo array.
-  var matchIndices = %_RegExpExec(this, string, i, RegExpLastMatchInfo);
-
-  if (IS_NULL(matchIndices)) {
-    this.lastIndex = 0;
-    return null;
-  }
-
-  // Successful match.
-  if (updateLastIndex) {
-    this.lastIndex = RegExpLastMatchInfo[CAPTURE1];
-  }
-  RETURN_NEW_RESULT_FROM_MATCH_INFO(matchIndices, string);
-}
-
 
 // ES#sec-regexpexec Runtime Semantics: RegExpExec ( R, S )
 // Also takes an optional exec method in case our caller
@@ -282,65 +153,6 @@
 %SetForceInlineFlag(RegExpSubclassExec);
 
 
-// One-element cache for the simplified test regexp.
-var regexp_key;
-var regexp_val;
-
-// Legacy implementation of RegExp.prototype.test
-// Section 15.10.6.3 doesn't actually make sense, but the intention seems to be
-// that test is defined in terms of String.prototype.exec. However, it probably
-// means the original value of String.prototype.exec, which is what everybody
-// else implements.
-function RegExpTest(string) {
-  if (!IS_REGEXP(this)) {
-    throw %make_type_error(kIncompatibleMethodReceiver,
-                        'RegExp.prototype.test', this);
-  }
-  string = TO_STRING(string);
-
-  var lastIndex = this.lastIndex;
-
-  // Conversion is required by the ES2015 specification (RegExpBuiltinExec
-  // algorithm, step 4) even if the value is discarded for non-global RegExps.
-  var i = TO_LENGTH(lastIndex);
-
-  if (REGEXP_GLOBAL(this) || REGEXP_STICKY(this)) {
-    if (i < 0 || i > string.length) {
-      this.lastIndex = 0;
-      return false;
-    }
-    // matchIndices is either null or the RegExpLastMatchInfo array.
-    var matchIndices = %_RegExpExec(this, string, i, RegExpLastMatchInfo);
-    if (IS_NULL(matchIndices)) {
-      this.lastIndex = 0;
-      return false;
-    }
-    this.lastIndex = RegExpLastMatchInfo[CAPTURE1];
-    return true;
-  } else {
-    // Non-global, non-sticky regexp.
-    // Remove irrelevant preceeding '.*' in a test regexp.  The expression
-    // checks whether this.source starts with '.*' and that the third char is
-    // not a '?'.  But see https://code.google.com/p/v8/issues/detail?id=3560
-    var regexp = this;
-    var source = REGEXP_SOURCE(regexp);
-    if (source.length >= 3 &&
-        %_StringCharCodeAt(source, 0) == 46 &&  // '.'
-        %_StringCharCodeAt(source, 1) == 42 &&  // '*'
-        %_StringCharCodeAt(source, 2) != 63) {  // '?'
-      regexp = TrimRegExp(regexp);
-    }
-    // matchIndices is either null or the RegExpLastMatchInfo array.
-    var matchIndices = %_RegExpExec(regexp, string, 0, RegExpLastMatchInfo);
-    if (IS_NULL(matchIndices)) {
-      this.lastIndex = 0;
-      return false;
-    }
-    return true;
-  }
-}
-
-
 // ES#sec-regexp.prototype.test RegExp.prototype.test ( S )
 function RegExpSubclassTest(string) {
   if (!IS_RECEIVER(this)) {
@@ -353,18 +165,6 @@
 }
 %FunctionRemovePrototype(RegExpSubclassTest);
 
-function TrimRegExp(regexp) {
-  if (regexp_key !== regexp) {
-    regexp_key = regexp;
-    regexp_val =
-      new GlobalRegExp(
-          %_SubString(REGEXP_SOURCE(regexp), 2, REGEXP_SOURCE(regexp).length),
-          (REGEXP_IGNORE_CASE(regexp) ? REGEXP_MULTILINE(regexp) ? "im" : "i"
-                                      : REGEXP_MULTILINE(regexp) ? "m" : ""));
-  }
-  return regexp_val;
-}
-
 
 function RegExpToString() {
   if (!IS_RECEIVER(this)) {
@@ -383,14 +183,13 @@
   var first = %_StringCharCodeAt(subject, index);
   if (first < 0xD800 || first > 0xDBFF) return false;
   var second = %_StringCharCodeAt(subject, index + 1);
-  return second >= 0xDC00 || second <= 0xDFFF;
+  return second >= 0xDC00 && second <= 0xDFFF;
 }
 
 
-// Legacy implementation of RegExp.prototype[Symbol.split] which
+// Fast path implementation of RegExp.prototype[Symbol.split] which
 // doesn't properly call the underlying exec, @@species methods
 function RegExpSplit(string, limit) {
-  // TODO(yangguo): allow non-regexp receivers.
   if (!IS_REGEXP(this)) {
     throw %make_type_error(kIncompatibleMethodReceiver,
                         "RegExp.prototype.@@split", this);
@@ -473,15 +272,11 @@
   var constructor = SpeciesConstructor(this, GlobalRegExp);
   var flags = TO_STRING(this.flags);
 
-  // TODO(adamk): this fast path is wrong with respect to this.global
-  // and this.sticky, but hopefully the spec will remove those gets
-  // and thus make the assumption of 'exec' having no side-effects
-  // more correct. Also, we doesn't ensure that 'exec' is actually
-  // a data property on RegExp.prototype.
-  var exec;
+  // TODO(adamk): this fast path is wrong as we doesn't ensure that 'exec'
+  // is actually a data property on RegExp.prototype.
   if (IS_REGEXP(this) && constructor === GlobalRegExp) {
-    exec = this.exec;
-    if (exec === RegExpSubclassExecJS) {
+    var exec = this.exec;
+    if (exec === RegExpExecJS) {
       return %_Call(RegExpSplit, this, string, limit);
     }
   }
@@ -505,9 +300,7 @@
   var stringIndex = prevStringIndex;
   while (stringIndex < size) {
     splitter.lastIndex = stringIndex;
-    result = RegExpSubclassExec(splitter, string, exec);
-    // Ensure exec will be read again on the next loop through.
-    exec = UNDEFINED;
+    result = RegExpSubclassExec(splitter, string);
     if (IS_NULL(result)) {
       stringIndex += AdvanceStringIndex(string, stringIndex, unicode);
     } else {
@@ -697,6 +490,31 @@
   return result + %_SubString(subject, endOfMatch, subject.length);
 }
 
+// Wraps access to matchInfo's captures into a format understood by
+// GetSubstitution.
+function MatchInfoCaptureWrapper(matches, subject) {
+  this.length = NUMBER_OF_CAPTURES(matches) >> 1;
+  this.match = matches;
+  this.subject = subject;
+}
+
+MatchInfoCaptureWrapper.prototype.at = function(ix) {
+  const match = this.match;
+  const start = match[CAPTURE(ix << 1)];
+  if (start < 0) return UNDEFINED;
+  return %_SubString(this.subject, start, match[CAPTURE((ix << 1) + 1)]);
+};
+%SetForceInlineFlag(MatchInfoCaptureWrapper.prototype.at);
+
+function ArrayCaptureWrapper(array) {
+  this.length = array.length;
+  this.array = array;
+}
+
+ArrayCaptureWrapper.prototype.at = function(ix) {
+  return this.array[ix];
+};
+%SetForceInlineFlag(ArrayCaptureWrapper.prototype.at);
 
 function RegExpReplace(string, replace) {
   if (!IS_REGEXP(this)) {
@@ -720,9 +538,17 @@
         return %_SubString(subject, 0, match[CAPTURE0]) +
                %_SubString(subject, match[CAPTURE1], subject.length)
       }
-      return ExpandReplacement(replace, subject, RegExpLastMatchInfo,
-                                 %_SubString(subject, 0, match[CAPTURE0])) +
-             %_SubString(subject, match[CAPTURE1], subject.length);
+      const captures = new MatchInfoCaptureWrapper(match, subject);
+      const start = match[CAPTURE0];
+      const end = match[CAPTURE1];
+
+      const prefix = %_SubString(subject, 0, start);
+      const matched = %_SubString(subject, start, end);
+      const suffix = %_SubString(subject, end, subject.length);
+
+      return prefix +
+             GetSubstitution(matched, subject, start, captures, replace) +
+             suffix;
     }
 
     // Global regexp search, string replace.
@@ -744,8 +570,6 @@
 // GetSubstitution(matched, str, position, captures, replacement)
 // Expand the $-expressions in the string and return a new string with
 // the result.
-// TODO(littledan): Call this function from String.prototype.replace instead
-// of the very similar ExpandReplacement in src/js/string.js
 function GetSubstitution(matched, string, position, captures, replacement) {
   var matchLength = matched.length;
   var stringLength = string.length;
@@ -794,7 +618,7 @@
           }
         }
         if (scaledIndex != 0 && scaledIndex < capturesLength) {
-          var capture = captures[scaledIndex];
+          var capture = captures.at(scaledIndex);
           if (!IS_UNDEFINED(capture)) result += capture;
           pos += advance;
         } else {
@@ -869,16 +693,12 @@
     this.lastIndex = 0;
   }
 
-  // TODO(adamk): this fast path is wrong with respect to this.global
-  // and this.sticky, but hopefully the spec will remove those gets
-  // and thus make the assumption of 'exec' having no side-effects
-  // more correct. Also, we doesn't ensure that 'exec' is actually
-  // a data property on RegExp.prototype, nor does the fast path
-  // correctly handle lastIndex setting.
+  // TODO(adamk): this fast path is wrong as we doesn't ensure that 'exec'
+  // is actually a data property on RegExp.prototype.
   var exec;
   if (IS_REGEXP(this)) {
     exec = this.exec;
-    if (exec === RegExpSubclassExecJS) {
+    if (exec === RegExpExecJS) {
       return %_Call(RegExpReplace, this, string, replace);
     }
   }
@@ -922,7 +742,8 @@
       replacement = %reflect_apply(replace, UNDEFINED, parameters, 0,
                                    parameters.length);
     } else {
-      replacement = GetSubstitution(matched, string, position, captures,
+      const capturesWrapper = new ArrayCaptureWrapper(captures);
+      replacement = GetSubstitution(matched, string, position, capturesWrapper,
                                     replace);
     }
     if (position >= nextSourcePosition) {
@@ -946,9 +767,10 @@
   }
   string = TO_STRING(string);
   var previousLastIndex = this.lastIndex;
-  this.lastIndex = 0;
+  if (previousLastIndex != 0) this.lastIndex = 0;
   var result = RegExpSubclassExec(this, string);
-  this.lastIndex = previousLastIndex;
+  var currentLastIndex = this.lastIndex;
+  if (currentLastIndex != previousLastIndex) this.lastIndex = previousLastIndex;
   if (IS_NULL(result)) return -1;
   return result.index;
 }
@@ -1035,7 +857,6 @@
 // ES6 21.2.5.4.
 function RegExpGetGlobal() {
   if (!IS_REGEXP(this)) {
-    // TODO(littledan): Remove this RegExp compat workaround
     if (this === GlobalRegExpPrototype) {
       %IncrementUseCounter(kRegExpPrototypeOldFlagGetter);
       return UNDEFINED;
@@ -1050,7 +871,6 @@
 // ES6 21.2.5.5.
 function RegExpGetIgnoreCase() {
   if (!IS_REGEXP(this)) {
-    // TODO(littledan): Remove this RegExp compat workaround
     if (this === GlobalRegExpPrototype) {
       %IncrementUseCounter(kRegExpPrototypeOldFlagGetter);
       return UNDEFINED;
@@ -1064,7 +884,6 @@
 // ES6 21.2.5.7.
 function RegExpGetMultiline() {
   if (!IS_REGEXP(this)) {
-    // TODO(littledan): Remove this RegExp compat workaround
     if (this === GlobalRegExpPrototype) {
       %IncrementUseCounter(kRegExpPrototypeOldFlagGetter);
       return UNDEFINED;
@@ -1078,7 +897,6 @@
 // ES6 21.2.5.10.
 function RegExpGetSource() {
   if (!IS_REGEXP(this)) {
-    // TODO(littledan): Remove this RegExp compat workaround
     if (this === GlobalRegExpPrototype) {
       %IncrementUseCounter(kRegExpPrototypeSourceGetter);
       return "(?:)";
@@ -1092,8 +910,6 @@
 // ES6 21.2.5.12.
 function RegExpGetSticky() {
   if (!IS_REGEXP(this)) {
-    // Compat fix: RegExp.prototype.sticky == undefined; UseCounter tracks it
-    // TODO(littledan): Remove this workaround or standardize it
     if (this === GlobalRegExpPrototype) {
       %IncrementUseCounter(kRegExpPrototypeStickyGetter);
       return UNDEFINED;
@@ -1108,7 +924,6 @@
 // ES6 21.2.5.15.
 function RegExpGetUnicode() {
   if (!IS_REGEXP(this)) {
-    // TODO(littledan): Remove this RegExp compat workaround
     if (this === GlobalRegExpPrototype) {
       %IncrementUseCounter(kRegExpPrototypeUnicodeGetter);
       return UNDEFINED;
@@ -1127,17 +942,9 @@
 
 // -------------------------------------------------------------------
 
-%FunctionSetInstanceClassName(GlobalRegExp, 'RegExp');
-GlobalRegExpPrototype = new GlobalObject();
-%FunctionSetPrototype(GlobalRegExp, GlobalRegExpPrototype);
-%AddNamedProperty(
-    GlobalRegExp.prototype, 'constructor', GlobalRegExp, DONT_ENUM);
-%SetCode(GlobalRegExp, RegExpConstructor);
-
 utils.InstallGetter(GlobalRegExp, speciesSymbol, RegExpSpecies);
 
 utils.InstallFunctions(GlobalRegExp.prototype, DONT_ENUM, [
-  "exec", RegExpSubclassExecJS,
   "test", RegExpSubclassTest,
   "toString", RegExpToString,
   "compile", RegExpCompileJS,
@@ -1166,11 +973,20 @@
   LAST_INPUT(RegExpLastMatchInfo) = TO_STRING(string);
 };
 
+// TODO(jgruber): All of these getters and setters were intended to be installed
+// with various attributes (e.g. DONT_ENUM | DONT_DELETE), but
+// InstallGetterSetter had a bug which ignored the passed attributes and
+// simply installed as DONT_ENUM instead. We might want to change back
+// to the intended attributes at some point.
+// On the other hand, installing attributes as DONT_ENUM matches the draft
+// specification at
+// https://github.com/claudepache/es-regexp-legacy-static-properties
+
 %OptimizeObjectForAddingMultipleProperties(GlobalRegExp, 22);
 utils.InstallGetterSetter(GlobalRegExp, 'input', RegExpGetInput, RegExpSetInput,
-                          DONT_DELETE);
+                          DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, '$_', RegExpGetInput, RegExpSetInput,
-                          DONT_ENUM | DONT_DELETE);
+                          DONT_ENUM);
 
 
 var NoOpSetter = function(ignored) {};
@@ -1178,28 +994,30 @@
 
 // Static properties set by a successful match.
 utils.InstallGetterSetter(GlobalRegExp, 'lastMatch', RegExpGetLastMatch,
-                          NoOpSetter, DONT_DELETE);
+                          NoOpSetter, DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, '$&', RegExpGetLastMatch, NoOpSetter,
-                          DONT_ENUM | DONT_DELETE);
+                          DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, 'lastParen', RegExpGetLastParen,
-                          NoOpSetter, DONT_DELETE);
+                          NoOpSetter, DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, '$+', RegExpGetLastParen, NoOpSetter,
-                          DONT_ENUM | DONT_DELETE);
+                          DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, 'leftContext', RegExpGetLeftContext,
-                          NoOpSetter, DONT_DELETE);
+                          NoOpSetter, DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, '$`', RegExpGetLeftContext, NoOpSetter,
-                          DONT_ENUM | DONT_DELETE);
+                          DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, 'rightContext', RegExpGetRightContext,
-                          NoOpSetter, DONT_DELETE);
+                          NoOpSetter, DONT_ENUM);
 utils.InstallGetterSetter(GlobalRegExp, "$'", RegExpGetRightContext, NoOpSetter,
-                          DONT_ENUM | DONT_DELETE);
+                          DONT_ENUM);
 
 for (var i = 1; i < 10; ++i) {
   utils.InstallGetterSetter(GlobalRegExp, '$' + i, RegExpMakeCaptureGetter(i),
-                            NoOpSetter, DONT_DELETE);
+                            NoOpSetter, DONT_ENUM);
 }
 %ToFastProperties(GlobalRegExp);
 
+%InstallToContext(["regexp_last_match_info", RegExpLastMatchInfo]);
+
 // -------------------------------------------------------------------
 // Internal
 
@@ -1228,13 +1046,13 @@
 // Exports
 
 utils.Export(function(to) {
+  to.GetSubstitution = GetSubstitution;
   to.InternalRegExpMatch = InternalRegExpMatch;
   to.InternalRegExpReplace = InternalRegExpReplace;
   to.IsRegExp = IsRegExp;
   to.RegExpExec = DoRegExpExec;
   to.RegExpInitialize = RegExpInitialize;
   to.RegExpLastMatchInfo = RegExpLastMatchInfo;
-  to.RegExpTest = RegExpTest;
 });
 
 })
diff --git a/src/js/string-iterator.js b/src/js/string-iterator.js
deleted file mode 100644
index 2319e5a..0000000
--- a/src/js/string-iterator.js
+++ /dev/null
@@ -1,98 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-(function(global, utils) {
-
-"use strict";
-
-%CheckIsBootstrapping();
-
-// -------------------------------------------------------------------
-// Imports
-
-var GlobalString = global.String;
-var IteratorPrototype = utils.ImportNow("IteratorPrototype");
-var iteratorSymbol = utils.ImportNow("iterator_symbol");
-var stringIteratorIteratedStringSymbol =
-    utils.ImportNow("string_iterator_iterated_string_symbol");
-var stringIteratorNextIndexSymbol =
-    utils.ImportNow("string_iterator_next_index_symbol");
-var toStringTagSymbol = utils.ImportNow("to_string_tag_symbol");
-
-// -------------------------------------------------------------------
-
-function StringIterator() {}
-
-
-// 21.1.5.1 CreateStringIterator Abstract Operation
-function CreateStringIterator(string) {
-  CHECK_OBJECT_COERCIBLE(string, 'String.prototype[Symbol.iterator]');
-  var s = TO_STRING(string);
-  var iterator = new StringIterator;
-  SET_PRIVATE(iterator, stringIteratorIteratedStringSymbol, s);
-  SET_PRIVATE(iterator, stringIteratorNextIndexSymbol, 0);
-  return iterator;
-}
-
-
-// ES6 section 21.1.5.2.1 %StringIteratorPrototype%.next ( )
-function StringIteratorNext() {
-  var iterator = this;
-  var value = UNDEFINED;
-  var done = true;
-
-  if (!IS_RECEIVER(iterator) ||
-      !HAS_DEFINED_PRIVATE(iterator, stringIteratorNextIndexSymbol)) {
-    throw %make_type_error(kIncompatibleMethodReceiver,
-                        'String Iterator.prototype.next');
-  }
-
-  var s = GET_PRIVATE(iterator, stringIteratorIteratedStringSymbol);
-  if (!IS_UNDEFINED(s)) {
-    var position = GET_PRIVATE(iterator, stringIteratorNextIndexSymbol);
-    var length = TO_UINT32(s.length);
-    if (position >= length) {
-      SET_PRIVATE(iterator, stringIteratorIteratedStringSymbol, UNDEFINED);
-    } else {
-      var first = %_StringCharCodeAt(s, position);
-      value = %_StringCharFromCode(first);
-      done = false;
-      position++;
-
-      if (first >= 0xD800 && first <= 0xDBFF && position < length) {
-        var second = %_StringCharCodeAt(s, position);
-        if (second >= 0xDC00 && second <= 0xDFFF) {
-          value += %_StringCharFromCode(second);
-          position++;
-        }
-      }
-
-      SET_PRIVATE(iterator, stringIteratorNextIndexSymbol, position);
-    }
-  }
-  return %_CreateIterResultObject(value, done);
-}
-
-
-// 21.1.3.27 String.prototype [ @@iterator ]( )
-function StringPrototypeIterator() {
-  return CreateStringIterator(this);
-}
-
-//-------------------------------------------------------------------
-
-%FunctionSetPrototype(StringIterator, {__proto__: IteratorPrototype});
-%FunctionSetInstanceClassName(StringIterator, 'String Iterator');
-
-utils.InstallFunctions(StringIterator.prototype, DONT_ENUM, [
-  'next', StringIteratorNext
-]);
-%AddNamedProperty(StringIterator.prototype, toStringTagSymbol,
-                  "String Iterator", READ_ONLY | DONT_ENUM);
-
-utils.SetFunctionName(StringPrototypeIterator, iteratorSymbol);
-%AddNamedProperty(GlobalString.prototype, iteratorSymbol,
-                  StringPrototypeIterator, DONT_ENUM);
-
-})
diff --git a/src/js/string.js b/src/js/string.js
index 38caab7..7c552a9 100644
--- a/src/js/string.js
+++ b/src/js/string.js
@@ -10,6 +10,7 @@
 // Imports
 
 var ArrayJoin;
+var GetSubstitution;
 var GlobalRegExp = global.RegExp;
 var GlobalString = global.String;
 var IsRegExp;
@@ -23,6 +24,7 @@
 
 utils.Import(function(from) {
   ArrayJoin = from.ArrayJoin;
+  GetSubstitution = from.GetSubstitution;
   IsRegExp = from.IsRegExp;
   MaxSimple = from.MaxSimple;
   MinSimple = from.MinSimple;
@@ -59,45 +61,6 @@
 %FunctionSetLength(StringIndexOf, 1);
 
 
-// ECMA-262 section 15.5.4.8
-function StringLastIndexOf(pat, pos) {  // length == 1
-  CHECK_OBJECT_COERCIBLE(this, "String.prototype.lastIndexOf");
-
-  var sub = TO_STRING(this);
-  var subLength = sub.length;
-  var pat = TO_STRING(pat);
-  var patLength = pat.length;
-  var index = subLength - patLength;
-  var position = TO_NUMBER(pos);
-  if (!NUMBER_IS_NAN(position)) {
-    position = TO_INTEGER(position);
-    if (position < 0) {
-      position = 0;
-    }
-    if (position + patLength < subLength) {
-      index = position;
-    }
-  }
-  if (index < 0) {
-    return -1;
-  }
-  return %StringLastIndexOf(sub, pat, index);
-}
-
-%FunctionSetLength(StringLastIndexOf, 1);
-
-
-// ECMA-262 section 15.5.4.9
-//
-// This function is implementation specific.  For now, we do not
-// do anything locale specific.
-function StringLocaleCompareJS(other) {
-  CHECK_OBJECT_COERCIBLE(this, "String.prototype.localeCompare");
-
-  return %StringLocaleCompare(TO_STRING(this), TO_STRING(other));
-}
-
-
 // ES6 21.1.3.11.
 function StringMatchJS(pattern) {
   CHECK_OBJECT_COERCIBLE(this, "String.prototype.match");
@@ -118,38 +81,6 @@
 }
 
 
-// ECMA-262 v6, section 21.1.3.12
-//
-// For now we do nothing, as proper normalization requires big tables.
-// If Intl is enabled, then i18n.js will override it and provide the the
-// proper functionality.
-function StringNormalize(formArg) {  // length == 0
-  CHECK_OBJECT_COERCIBLE(this, "String.prototype.normalize");
-  var s = TO_STRING(this);
-
-  var form = IS_UNDEFINED(formArg) ? 'NFC' : TO_STRING(formArg);
-
-  var NORMALIZATION_FORMS = ['NFC', 'NFD', 'NFKC', 'NFKD'];
-  var normalizationForm = %ArrayIndexOf(NORMALIZATION_FORMS, form, 0);
-  if (normalizationForm === -1) {
-    throw %make_range_error(kNormalizationForm,
-                         %_Call(ArrayJoin, NORMALIZATION_FORMS, ', '));
-  }
-
-  return s;
-}
-
-%FunctionSetLength(StringNormalize, 0);
-
-
-// This has the same size as the RegExpLastMatchInfo array, and can be used
-// for functions that expect that structure to be returned.  It is used when
-// the needle is a string rather than a regexp.  In this case we can't update
-// lastMatchArray without erroneously affecting the properties on the global
-// RegExp object.
-var reusableMatchInfo = [2, "", "", -1, -1];
-
-
 // ES6, section 21.1.3.14
 function StringReplace(search, replace) {
   CHECK_OBJECT_COERCIBLE(this, "String.prototype.replace");
@@ -201,101 +132,18 @@
   if (IS_CALLABLE(replace)) {
     result += replace(search, start, subject);
   } else {
-    reusableMatchInfo[CAPTURE0] = start;
-    reusableMatchInfo[CAPTURE1] = end;
-    result = ExpandReplacement(TO_STRING(replace),
-                               subject,
-                               reusableMatchInfo,
-                               result);
+    // In this case, we don't have any capture groups and can get away with
+    // faking the captures object by simply setting its length to 1.
+    const captures = { length: 1 };
+    const matched = %_SubString(subject, start, end);
+    result += GetSubstitution(matched, subject, start, captures,
+                              TO_STRING(replace));
   }
 
   return result + %_SubString(subject, end, subject.length);
 }
 
 
-// Expand the $-expressions in the string and return a new string with
-// the result.
-function ExpandReplacement(string, subject, matchInfo, result) {
-  var length = string.length;
-  var next = %StringIndexOf(string, '$', 0);
-  if (next < 0) {
-    if (length > 0) result += string;
-    return result;
-  }
-
-  if (next > 0) result += %_SubString(string, 0, next);
-
-  while (true) {
-    var expansion = '$';
-    var position = next + 1;
-    if (position < length) {
-      var peek = %_StringCharCodeAt(string, position);
-      if (peek == 36) {         // $$
-        ++position;
-        result += '$';
-      } else if (peek == 38) {  // $& - match
-        ++position;
-        result +=
-          %_SubString(subject, matchInfo[CAPTURE0], matchInfo[CAPTURE1]);
-      } else if (peek == 96) {  // $` - prefix
-        ++position;
-        result += %_SubString(subject, 0, matchInfo[CAPTURE0]);
-      } else if (peek == 39) {  // $' - suffix
-        ++position;
-        result += %_SubString(subject, matchInfo[CAPTURE1], subject.length);
-      } else if (peek >= 48 && peek <= 57) {
-        // Valid indices are $1 .. $9, $01 .. $09 and $10 .. $99
-        var scaled_index = (peek - 48) << 1;
-        var advance = 1;
-        var number_of_captures = NUMBER_OF_CAPTURES(matchInfo);
-        if (position + 1 < string.length) {
-          var next = %_StringCharCodeAt(string, position + 1);
-          if (next >= 48 && next <= 57) {
-            var new_scaled_index = scaled_index * 10 + ((next - 48) << 1);
-            if (new_scaled_index < number_of_captures) {
-              scaled_index = new_scaled_index;
-              advance = 2;
-            }
-          }
-        }
-        if (scaled_index != 0 && scaled_index < number_of_captures) {
-          var start = matchInfo[CAPTURE(scaled_index)];
-          if (start >= 0) {
-            result +=
-              %_SubString(subject, start, matchInfo[CAPTURE(scaled_index + 1)]);
-          }
-          position += advance;
-        } else {
-          result += '$';
-        }
-      } else {
-        result += '$';
-      }
-    } else {
-      result += '$';
-    }
-
-    // Go the the next $ in the string.
-    next = %StringIndexOf(string, '$', position);
-
-    // Return if there are no more $ characters in the string. If we
-    // haven't reached the end, we need to append the suffix.
-    if (next < 0) {
-      if (position < length) {
-        result += %_SubString(string, position, length);
-      }
-      return result;
-    }
-
-    // Append substring between the previous and the next $ character.
-    if (next > position) {
-      result += %_SubString(string, position, next);
-    }
-  }
-  return result;
-}
-
-
 // ES6 21.1.3.15.
 function StringSearch(pattern) {
   CHECK_OBJECT_COERCIBLE(this, "String.prototype.search");
@@ -390,55 +238,6 @@
 }
 
 
-// ECMA-262 section 15.5.4.15
-function StringSubstring(start, end) {
-  CHECK_OBJECT_COERCIBLE(this, "String.prototype.subString");
-
-  var s = TO_STRING(this);
-  var s_len = s.length;
-
-  var start_i = TO_INTEGER(start);
-  if (start_i < 0) {
-    start_i = 0;
-  } else if (start_i > s_len) {
-    start_i = s_len;
-  }
-
-  var end_i = s_len;
-  if (!IS_UNDEFINED(end)) {
-    end_i = TO_INTEGER(end);
-    if (end_i > s_len) {
-      end_i = s_len;
-    } else {
-      if (end_i < 0) end_i = 0;
-      if (start_i > end_i) {
-        var tmp = end_i;
-        end_i = start_i;
-        start_i = tmp;
-      }
-    }
-  }
-
-  return %_SubString(s, start_i, end_i);
-}
-
-
-// ecma262/#sec-string.prototype.substr
-function StringSubstr(start, length) {
-  CHECK_OBJECT_COERCIBLE(this, "String.prototype.substr");
-  var s = TO_STRING(this);
-  var size = s.length;
-  start = TO_INTEGER(start);
-  length = IS_UNDEFINED(length) ? size : TO_INTEGER(length);
-
-  if (start < 0) start = MaxSimple(size + start, 0);
-  length = MinSimple(MaxSimple(length, 0), size - start);
-
-  if (length <= 0) return '';
-  return %_SubString(s, start, start + length);
-}
-
-
 // ECMA-262, 15.5.4.16
 function StringToLowerCaseJS() {
   CHECK_OBJECT_COERCIBLE(this, "String.prototype.toLowerCase");
@@ -737,17 +536,12 @@
   "endsWith", StringEndsWith,
   "includes", StringIncludes,
   "indexOf", StringIndexOf,
-  "lastIndexOf", StringLastIndexOf,
-  "localeCompare", StringLocaleCompareJS,
   "match", StringMatchJS,
-  "normalize", StringNormalize,
   "repeat", StringRepeat,
   "replace", StringReplace,
   "search", StringSearch,
   "slice", StringSlice,
   "split", StringSplitJS,
-  "substring", StringSubstring,
-  "substr", StringSubstr,
   "startsWith", StringStartsWith,
   "toLowerCase", StringToLowerCaseJS,
   "toLocaleLowerCase", StringToLocaleLowerCase,
@@ -773,15 +567,11 @@
 // Exports
 
 utils.Export(function(to) {
-  to.ExpandReplacement = ExpandReplacement;
   to.StringIndexOf = StringIndexOf;
-  to.StringLastIndexOf = StringLastIndexOf;
   to.StringMatch = StringMatchJS;
   to.StringReplace = StringReplace;
   to.StringSlice = StringSlice;
   to.StringSplit = StringSplitJS;
-  to.StringSubstr = StringSubstr;
-  to.StringSubstring = StringSubstring;
 });
 
 })
diff --git a/src/js/typedarray.js b/src/js/typedarray.js
index b97a9c8..edb3b06 100644
--- a/src/js/typedarray.js
+++ b/src/js/typedarray.js
@@ -19,7 +19,6 @@
 var GlobalArray = global.Array;
 var GlobalArrayBuffer = global.ArrayBuffer;
 var GlobalArrayBufferPrototype = GlobalArrayBuffer.prototype;
-var GlobalDataView = global.DataView;
 var GlobalObject = global.Object;
 var InnerArrayCopyWithin;
 var InnerArrayEvery;
@@ -35,7 +34,6 @@
 var InnerArraySort;
 var InnerArrayToLocaleString;
 var InternalArray = utils.InternalArray;
-var IsNaN;
 var MaxSimple;
 var MinSimple;
 var PackedArrayReverse;
@@ -84,7 +82,6 @@
   InnerArraySome = from.InnerArraySome;
   InnerArraySort = from.InnerArraySort;
   InnerArrayToLocaleString = from.InnerArrayToLocaleString;
-  IsNaN = from.IsNaN;
   MaxSimple = from.MaxSimple;
   MinSimple = from.MinSimple;
   PackedArrayReverse = from.PackedArrayReverse;
@@ -545,9 +542,9 @@
     return -1;
   } else if (x > y) {
     return 1;
-  } else if (IsNaN(x) && IsNaN(y)) {
-    return IsNaN(y) ? 0 : 1;
-  } else if (IsNaN(x)) {
+  } else if (NUMBER_IS_NAN(x) && NUMBER_IS_NAN(y)) {
+    return NUMBER_IS_NAN(y) ? 0 : 1;
+  } else if (NUMBER_IS_NAN(x)) {
     return 1;
   }
   return 0;
@@ -915,68 +912,4 @@
 
 TYPED_ARRAYS(SETUP_TYPED_ARRAY)
 
-// --------------------------- DataView -----------------------------
-
-macro DATA_VIEW_TYPES(FUNCTION)
-  FUNCTION(Int8)
-  FUNCTION(Uint8)
-  FUNCTION(Int16)
-  FUNCTION(Uint16)
-  FUNCTION(Int32)
-  FUNCTION(Uint32)
-  FUNCTION(Float32)
-  FUNCTION(Float64)
-endmacro
-
-
-macro DATA_VIEW_GETTER_SETTER(TYPENAME)
-function DataViewGetTYPENAMEJS(offset, little_endian) {
-  if (!IS_DATAVIEW(this)) {
-    throw %make_type_error(kIncompatibleMethodReceiver,
-                        'DataView.getTYPENAME', this);
-  }
-  offset = IS_UNDEFINED(offset) ? 0 : ToIndex(offset, kInvalidDataViewAccessorOffset);
-  return %DataViewGetTYPENAME(this, offset, !!little_endian);
-}
-%FunctionSetLength(DataViewGetTYPENAMEJS, 1);
-
-function DataViewSetTYPENAMEJS(offset, value, little_endian) {
-  if (!IS_DATAVIEW(this)) {
-    throw %make_type_error(kIncompatibleMethodReceiver,
-                        'DataView.setTYPENAME', this);
-  }
-  offset = IS_UNDEFINED(offset) ? 0 : ToIndex(offset, kInvalidDataViewAccessorOffset);
-  %DataViewSetTYPENAME(this, offset, TO_NUMBER(value), !!little_endian);
-}
-%FunctionSetLength(DataViewSetTYPENAMEJS, 2);
-endmacro
-
-DATA_VIEW_TYPES(DATA_VIEW_GETTER_SETTER)
-
-utils.InstallFunctions(GlobalDataView.prototype, DONT_ENUM, [
-  "getInt8", DataViewGetInt8JS,
-  "setInt8", DataViewSetInt8JS,
-
-  "getUint8", DataViewGetUint8JS,
-  "setUint8", DataViewSetUint8JS,
-
-  "getInt16", DataViewGetInt16JS,
-  "setInt16", DataViewSetInt16JS,
-
-  "getUint16", DataViewGetUint16JS,
-  "setUint16", DataViewSetUint16JS,
-
-  "getInt32", DataViewGetInt32JS,
-  "setInt32", DataViewSetInt32JS,
-
-  "getUint32", DataViewGetUint32JS,
-  "setUint32", DataViewSetUint32JS,
-
-  "getFloat32", DataViewGetFloat32JS,
-  "setFloat32", DataViewSetFloat32JS,
-
-  "getFloat64", DataViewGetFloat64JS,
-  "setFloat64", DataViewSetFloat64JS
-]);
-
 })
diff --git a/src/js/v8natives.js b/src/js/v8natives.js
index 0c0a792..93636a0 100644
--- a/src/js/v8natives.js
+++ b/src/js/v8natives.js
@@ -18,20 +18,6 @@
 // ----------------------------------------------------------------------------
 
 
-// ES6 18.2.3 isNaN(number)
-function GlobalIsNaN(number) {
-  number = TO_NUMBER(number);
-  return NUMBER_IS_NAN(number);
-}
-
-
-// ES6 18.2.2 isFinite(number)
-function GlobalIsFinite(number) {
-  number = TO_NUMBER(number);
-  return NUMBER_IS_FINITE(number);
-}
-
-
 // ES6 18.2.5 parseInt(string, radix)
 function GlobalParseInt(string, radix) {
   if (IS_UNDEFINED(radix) || radix === 10 || radix === 0) {
@@ -91,8 +77,6 @@
 
 // Set up non-enumerable function on the global object.
 utils.InstallFunctions(global, DONT_ENUM, [
-  "isNaN", GlobalIsNaN,
-  "isFinite", GlobalIsFinite,
   "parseInt", GlobalParseInt,
   "parseFloat", GlobalParseFloat,
 ]);
@@ -207,38 +191,6 @@
 // ----------------------------------------------------------------------------
 // Number
 
-// Harmony isFinite.
-function NumberIsFinite(number) {
-  return IS_NUMBER(number) && NUMBER_IS_FINITE(number);
-}
-
-
-// Harmony isInteger
-function NumberIsInteger(number) {
-  return NumberIsFinite(number) && TO_INTEGER(number) == number;
-}
-
-
-// Harmony isNaN.
-function NumberIsNaN(number) {
-  return IS_NUMBER(number) && NUMBER_IS_NAN(number);
-}
-
-
-// Harmony isSafeInteger
-function NumberIsSafeInteger(number) {
-  if (NumberIsFinite(number)) {
-    var integral = TO_INTEGER(number);
-    if (integral == number) {
-      return -kMaxSafeInteger <= integral && integral <= kMaxSafeInteger;
-    }
-  }
-  return false;
-}
-
-
-// ----------------------------------------------------------------------------
-
 utils.InstallConstants(GlobalNumber, [
   // ECMA-262 section 15.7.3.1.
   "MAX_VALUE", 1.7976931348623157e+308,
@@ -260,15 +212,10 @@
 
 // Harmony Number constructor additions
 utils.InstallFunctions(GlobalNumber, DONT_ENUM, [
-  "isFinite", NumberIsFinite,
-  "isInteger", NumberIsInteger,
-  "isNaN", NumberIsNaN,
-  "isSafeInteger", NumberIsSafeInteger,
   "parseInt", GlobalParseInt,
   "parseFloat", GlobalParseFloat
 ]);
 
-%SetForceInlineFlag(NumberIsNaN);
 
 
 // ----------------------------------------------------------------------------
@@ -295,9 +242,6 @@
 utils.Export(function(to) {
   to.GetIterator = GetIterator;
   to.GetMethod = GetMethod;
-  to.IsNaN = GlobalIsNaN;
-  to.NumberIsNaN = NumberIsNaN;
-  to.NumberIsInteger = NumberIsInteger;
   to.ObjectHasOwnProperty = GlobalObject.prototype.hasOwnProperty;
 });
 
diff --git a/src/json-parser.cc b/src/json-parser.cc
index bf2fd0d..576100a 100644
--- a/src/json-parser.cc
+++ b/src/json-parser.cc
@@ -11,10 +11,10 @@
 #include "src/field-type.h"
 #include "src/messages.h"
 #include "src/objects-inl.h"
-#include "src/parsing/scanner.h"
 #include "src/parsing/token.h"
 #include "src/property-descriptor.h"
 #include "src/transitions.h"
+#include "src/unicode-cache.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/libplatform/default-platform.cc b/src/libplatform/default-platform.cc
index 2f81248..f64143e 100644
--- a/src/libplatform/default-platform.cc
+++ b/src/libplatform/default-platform.cc
@@ -39,9 +39,14 @@
 const int DefaultPlatform::kMaxThreadPoolSize = 8;
 
 DefaultPlatform::DefaultPlatform()
-    : initialized_(false), thread_pool_size_(0), tracing_controller_(NULL) {}
+    : initialized_(false), thread_pool_size_(0) {}
 
 DefaultPlatform::~DefaultPlatform() {
+  if (tracing_controller_) {
+    tracing_controller_->StopTracing();
+    tracing_controller_.reset();
+  }
+
   base::LockGuard<base::Mutex> guard(&lock_);
   queue_.Terminate();
   if (initialized_) {
@@ -63,11 +68,6 @@
       i->second.pop();
     }
   }
-
-  if (tracing_controller_) {
-    tracing_controller_->StopTracing();
-    delete tracing_controller_;
-  }
 }
 
 
@@ -178,16 +178,17 @@
          static_cast<double>(base::Time::kMicrosecondsPerSecond);
 }
 
-
 uint64_t DefaultPlatform::AddTraceEvent(
     char phase, const uint8_t* category_enabled_flag, const char* name,
     const char* scope, uint64_t id, uint64_t bind_id, int num_args,
     const char** arg_names, const uint8_t* arg_types,
-    const uint64_t* arg_values, unsigned int flags) {
+    const uint64_t* arg_values,
+    std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+    unsigned int flags) {
   if (tracing_controller_) {
     return tracing_controller_->AddTraceEvent(
         phase, category_enabled_flag, name, scope, id, bind_id, num_args,
-        arg_names, arg_types, arg_values, flags);
+        arg_names, arg_types, arg_values, arg_convertables, flags);
   }
 
   return 0;
@@ -218,12 +219,22 @@
 
 void DefaultPlatform::SetTracingController(
     tracing::TracingController* tracing_controller) {
-  tracing_controller_ = tracing_controller;
+  tracing_controller_.reset(tracing_controller);
 }
 
 size_t DefaultPlatform::NumberOfAvailableBackgroundThreads() {
   return static_cast<size_t>(thread_pool_size_);
 }
 
+void DefaultPlatform::AddTraceStateObserver(TraceStateObserver* observer) {
+  if (!tracing_controller_) return;
+  tracing_controller_->AddTraceStateObserver(observer);
+}
+
+void DefaultPlatform::RemoveTraceStateObserver(TraceStateObserver* observer) {
+  if (!tracing_controller_) return;
+  tracing_controller_->RemoveTraceStateObserver(observer);
+}
+
 }  // namespace platform
 }  // namespace v8
diff --git a/src/libplatform/default-platform.h b/src/libplatform/default-platform.h
index 0fd7e5a..e36234f 100644
--- a/src/libplatform/default-platform.h
+++ b/src/libplatform/default-platform.h
@@ -7,6 +7,7 @@
 
 #include <functional>
 #include <map>
+#include <memory>
 #include <queue>
 #include <vector>
 
@@ -51,16 +52,21 @@
   const uint8_t* GetCategoryGroupEnabled(const char* name) override;
   const char* GetCategoryGroupName(
       const uint8_t* category_enabled_flag) override;
-  uint64_t AddTraceEvent(char phase, const uint8_t* category_enabled_flag,
-                         const char* name, const char* scope, uint64_t id,
-                         uint64_t bind_id, int32_t num_args,
-                         const char** arg_names, const uint8_t* arg_types,
-                         const uint64_t* arg_values,
-                         unsigned int flags) override;
+  using Platform::AddTraceEvent;
+  uint64_t AddTraceEvent(
+      char phase, const uint8_t* category_enabled_flag, const char* name,
+      const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
+      const char** arg_names, const uint8_t* arg_types,
+      const uint64_t* arg_values,
+      std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+      unsigned int flags) override;
   void UpdateTraceEventDuration(const uint8_t* category_enabled_flag,
                                 const char* name, uint64_t handle) override;
   void SetTracingController(tracing::TracingController* tracing_controller);
 
+  void AddTraceStateObserver(TraceStateObserver* observer) override;
+  void RemoveTraceStateObserver(TraceStateObserver* observer) override;
+
  private:
   static const int kMaxThreadPoolSize;
 
@@ -79,7 +85,7 @@
            std::priority_queue<DelayedEntry, std::vector<DelayedEntry>,
                                std::greater<DelayedEntry> > >
       main_thread_delayed_queue_;
-  tracing::TracingController* tracing_controller_;
+  std::unique_ptr<tracing::TracingController> tracing_controller_;
 
   DISALLOW_COPY_AND_ASSIGN(DefaultPlatform);
 };
diff --git a/src/libplatform/tracing/trace-object.cc b/src/libplatform/tracing/trace-object.cc
index 55be892..bb4bf71 100644
--- a/src/libplatform/tracing/trace-object.cc
+++ b/src/libplatform/tracing/trace-object.cc
@@ -5,6 +5,7 @@
 #include "include/libplatform/v8-tracing.h"
 
 #include "base/trace_event/common/trace_event_common.h"
+#include "include/v8-platform.h"
 #include "src/base/platform/platform.h"
 #include "src/base/platform/time.h"
 
@@ -30,11 +31,13 @@
   }
 }
 
-void TraceObject::Initialize(char phase, const uint8_t* category_enabled_flag,
-                             const char* name, const char* scope, uint64_t id,
-                             uint64_t bind_id, int num_args,
-                             const char** arg_names, const uint8_t* arg_types,
-                             const uint64_t* arg_values, unsigned int flags) {
+void TraceObject::Initialize(
+    char phase, const uint8_t* category_enabled_flag, const char* name,
+    const char* scope, uint64_t id, uint64_t bind_id, int num_args,
+    const char** arg_names, const uint8_t* arg_types,
+    const uint64_t* arg_values,
+    std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+    unsigned int flags) {
   pid_ = base::OS::GetCurrentProcessId();
   tid_ = base::OS::GetCurrentThreadId();
   phase_ = phase;
@@ -55,6 +58,8 @@
     arg_names_[i] = arg_names[i];
     arg_values_[i].as_uint = arg_values[i];
     arg_types_[i] = arg_types[i];
+    if (arg_types[i] == TRACE_VALUE_TYPE_CONVERTABLE)
+      arg_convertables_[i] = std::move(arg_convertables[i]);
   }
 
   bool copy = !!(flags & TRACE_EVENT_FLAG_COPY);
@@ -107,8 +112,10 @@
     char phase, const uint8_t* category_enabled_flag, const char* name,
     const char* scope, uint64_t id, uint64_t bind_id, int num_args,
     const char** arg_names, const uint8_t* arg_types,
-    const uint64_t* arg_values, unsigned int flags, int pid, int tid,
-    int64_t ts, int64_t tts, uint64_t duration, uint64_t cpu_duration) {
+    const uint64_t* arg_values,
+    std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+    unsigned int flags, int pid, int tid, int64_t ts, int64_t tts,
+    uint64_t duration, uint64_t cpu_duration) {
   pid_ = pid;
   tid_ = tid;
   phase_ = phase;
diff --git a/src/libplatform/tracing/trace-writer.cc b/src/libplatform/tracing/trace-writer.cc
index ec95527..7445087 100644
--- a/src/libplatform/tracing/trace-writer.cc
+++ b/src/libplatform/tracing/trace-writer.cc
@@ -7,6 +7,7 @@
 #include <cmath>
 
 #include "base/trace_event/common/trace_event_common.h"
+#include "include/v8-platform.h"
 #include "src/base/platform/platform.h"
 
 namespace v8 {
@@ -112,6 +113,12 @@
   }
 }
 
+void JSONTraceWriter::AppendArgValue(ConvertableToTraceFormat* value) {
+  std::string arg_stringified;
+  value->AppendAsTraceFormat(&arg_stringified);
+  stream_ << arg_stringified;
+}
+
 JSONTraceWriter::JSONTraceWriter(std::ostream& stream) : stream_(stream) {
   stream_ << "{\"traceEvents\":[";
 }
@@ -143,10 +150,16 @@
   const char** arg_names = trace_event->arg_names();
   const uint8_t* arg_types = trace_event->arg_types();
   TraceObject::ArgValue* arg_values = trace_event->arg_values();
+  std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables =
+      trace_event->arg_convertables();
   for (int i = 0; i < trace_event->num_args(); ++i) {
     if (i > 0) stream_ << ",";
     stream_ << "\"" << arg_names[i] << "\":";
-    AppendArgValue(arg_types[i], arg_values[i]);
+    if (arg_types[i] == TRACE_VALUE_TYPE_CONVERTABLE) {
+      AppendArgValue(arg_convertables[i].get());
+    } else {
+      AppendArgValue(arg_types[i], arg_values[i]);
+    }
   }
   stream_ << "}}";
   // TODO(fmeawad): Add support for Flow Events.
diff --git a/src/libplatform/tracing/trace-writer.h b/src/libplatform/tracing/trace-writer.h
index 963fc6a..43d7cb6 100644
--- a/src/libplatform/tracing/trace-writer.h
+++ b/src/libplatform/tracing/trace-writer.h
@@ -20,6 +20,7 @@
 
  private:
   void AppendArgValue(uint8_t type, TraceObject::ArgValue value);
+  void AppendArgValue(v8::ConvertableToTraceFormat*);
 
   std::ostream& stream_;
   bool append_comma_ = false;
diff --git a/src/libplatform/tracing/tracing-controller.cc b/src/libplatform/tracing/tracing-controller.cc
index e9a2172..c1a4057 100644
--- a/src/libplatform/tracing/tracing-controller.cc
+++ b/src/libplatform/tracing/tracing-controller.cc
@@ -38,21 +38,28 @@
 // Skip default categories.
 v8::base::AtomicWord g_category_index = g_num_builtin_categories;
 
+TracingController::TracingController() {}
+
+TracingController::~TracingController() {}
+
 void TracingController::Initialize(TraceBuffer* trace_buffer) {
   trace_buffer_.reset(trace_buffer);
+  mutex_.reset(new base::Mutex());
 }
 
 uint64_t TracingController::AddTraceEvent(
     char phase, const uint8_t* category_enabled_flag, const char* name,
     const char* scope, uint64_t id, uint64_t bind_id, int num_args,
     const char** arg_names, const uint8_t* arg_types,
-    const uint64_t* arg_values, unsigned int flags) {
+    const uint64_t* arg_values,
+    std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
+    unsigned int flags) {
   uint64_t handle;
   TraceObject* trace_object = trace_buffer_->AddTraceEvent(&handle);
   if (trace_object) {
     trace_object->Initialize(phase, category_enabled_flag, name, scope, id,
                              bind_id, num_args, arg_names, arg_types,
-                             arg_values, flags);
+                             arg_values, arg_convertables, flags);
   }
   return handle;
 }
@@ -91,13 +98,29 @@
 
 void TracingController::StartTracing(TraceConfig* trace_config) {
   trace_config_.reset(trace_config);
-  mode_ = RECORDING_MODE;
-  UpdateCategoryGroupEnabledFlags();
+  std::unordered_set<Platform::TraceStateObserver*> observers_copy;
+  {
+    base::LockGuard<base::Mutex> lock(mutex_.get());
+    mode_ = RECORDING_MODE;
+    UpdateCategoryGroupEnabledFlags();
+    observers_copy = observers_;
+  }
+  for (auto o : observers_copy) {
+    o->OnTraceEnabled();
+  }
 }
 
 void TracingController::StopTracing() {
   mode_ = DISABLED;
   UpdateCategoryGroupEnabledFlags();
+  std::unordered_set<Platform::TraceStateObserver*> observers_copy;
+  {
+    base::LockGuard<base::Mutex> lock(mutex_.get());
+    observers_copy = observers_;
+  }
+  for (auto o : observers_copy) {
+    o->OnTraceDisabled();
+  }
   trace_buffer_->Flush();
 }
 
@@ -172,6 +195,24 @@
   return category_group_enabled;
 }
 
+void TracingController::AddTraceStateObserver(
+    Platform::TraceStateObserver* observer) {
+  {
+    base::LockGuard<base::Mutex> lock(mutex_.get());
+    observers_.insert(observer);
+    if (mode_ != RECORDING_MODE) return;
+  }
+  // Fire the observer if recording is already in progress.
+  observer->OnTraceEnabled();
+}
+
+void TracingController::RemoveTraceStateObserver(
+    Platform::TraceStateObserver* observer) {
+  base::LockGuard<base::Mutex> lock(mutex_.get());
+  DCHECK(observers_.find(observer) != observers_.end());
+  observers_.erase(observer);
+}
+
 }  // namespace tracing
 }  // namespace platform
 }  // namespace v8
diff --git a/src/libsampler/sampler.cc b/src/libsampler/sampler.cc
index 71c667f..0b40972 100644
--- a/src/libsampler/sampler.cc
+++ b/src/libsampler/sampler.cc
@@ -217,7 +217,7 @@
 
 class SamplerManager {
  public:
-  SamplerManager() : sampler_map_(base::HashMap::PointersMatch) {}
+  SamplerManager() : sampler_map_() {}
 
   void AddSampler(Sampler* sampler) {
     AtomicGuard atomic_guard(&samplers_access_counter_);
diff --git a/src/lookup-cache-inl.h b/src/lookup-cache-inl.h
new file mode 100644
index 0000000..1998a9d
--- /dev/null
+++ b/src/lookup-cache-inl.h
@@ -0,0 +1,40 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/lookup-cache.h"
+
+#include "src/objects-inl.h"
+
+namespace v8 {
+namespace internal {
+
+// static
+int DescriptorLookupCache::Hash(Object* source, Name* name) {
+  DCHECK(name->IsUniqueName());
+  // Uses only lower 32 bits if pointers are larger.
+  uint32_t source_hash =
+      static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
+      kPointerSizeLog2;
+  uint32_t name_hash = name->hash_field();
+  return (source_hash ^ name_hash) % kLength;
+}
+
+int DescriptorLookupCache::Lookup(Map* source, Name* name) {
+  int index = Hash(source, name);
+  Key& key = keys_[index];
+  if ((key.source == source) && (key.name == name)) return results_[index];
+  return kAbsent;
+}
+
+void DescriptorLookupCache::Update(Map* source, Name* name, int result) {
+  DCHECK(result != kAbsent);
+  int index = Hash(source, name);
+  Key& key = keys_[index];
+  key.source = source;
+  key.name = name;
+  results_[index] = result;
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/lookup-cache.cc b/src/lookup-cache.cc
new file mode 100644
index 0000000..18729d6
--- /dev/null
+++ b/src/lookup-cache.cc
@@ -0,0 +1,84 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/lookup-cache.h"
+
+#include "src/objects-inl.h"
+
+namespace v8 {
+namespace internal {
+
+void DescriptorLookupCache::Clear() {
+  for (int index = 0; index < kLength; index++) keys_[index].source = NULL;
+}
+
+int KeyedLookupCache::Hash(Handle<Map> map, Handle<Name> name) {
+  DisallowHeapAllocation no_gc;
+  // Uses only lower 32 bits if pointers are larger.
+  uintptr_t addr_hash =
+      static_cast<uint32_t>(reinterpret_cast<uintptr_t>(*map)) >> kMapHashShift;
+  return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask);
+}
+
+int KeyedLookupCache::Lookup(Handle<Map> map, Handle<Name> name) {
+  DisallowHeapAllocation no_gc;
+  int index = (Hash(map, name) & kHashMask);
+  for (int i = 0; i < kEntriesPerBucket; i++) {
+    Key& key = keys_[index + i];
+    if ((key.map == *map) && key.name->Equals(*name)) {
+      return field_offsets_[index + i];
+    }
+  }
+  return kNotFound;
+}
+
+void KeyedLookupCache::Update(Handle<Map> map, Handle<Name> name,
+                              int field_offset) {
+  DisallowHeapAllocation no_gc;
+  if (!name->IsUniqueName()) {
+    if (!StringTable::InternalizeStringIfExists(name->GetIsolate(),
+                                                Handle<String>::cast(name))
+             .ToHandle(&name)) {
+      return;
+    }
+  }
+  // This cache is cleared only between mark compact passes, so we expect the
+  // cache to only contain old space names.
+  DCHECK(!map->GetIsolate()->heap()->InNewSpace(*name));
+
+  int index = (Hash(map, name) & kHashMask);
+  // After a GC there will be free slots, so we use them in order (this may
+  // help to get the most frequently used one in position 0).
+  for (int i = 0; i < kEntriesPerBucket; i++) {
+    Key& key = keys_[index];
+    Object* free_entry_indicator = NULL;
+    if (key.map == free_entry_indicator) {
+      key.map = *map;
+      key.name = *name;
+      field_offsets_[index + i] = field_offset;
+      return;
+    }
+  }
+  // No free entry found in this bucket, so we move them all down one and
+  // put the new entry at position zero.
+  for (int i = kEntriesPerBucket - 1; i > 0; i--) {
+    Key& key = keys_[index + i];
+    Key& key2 = keys_[index + i - 1];
+    key = key2;
+    field_offsets_[index + i] = field_offsets_[index + i - 1];
+  }
+
+  // Write the new first entry.
+  Key& key = keys_[index];
+  key.map = *map;
+  key.name = *name;
+  field_offsets_[index] = field_offset;
+}
+
+void KeyedLookupCache::Clear() {
+  for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/lookup-cache.h b/src/lookup-cache.h
new file mode 100644
index 0000000..6da5e5b
--- /dev/null
+++ b/src/lookup-cache.h
@@ -0,0 +1,117 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_LOOKUP_CACHE_H_
+#define V8_LOOKUP_CACHE_H_
+
+#include "src/objects.h"
+
+namespace v8 {
+namespace internal {
+
+// Cache for mapping (map, property name) into descriptor index.
+// The cache contains both positive and negative results.
+// Descriptor index equals kNotFound means the property is absent.
+// Cleared at startup and prior to any gc.
+class DescriptorLookupCache {
+ public:
+  // Lookup descriptor index for (map, name).
+  // If absent, kAbsent is returned.
+  inline int Lookup(Map* source, Name* name);
+
+  // Update an element in the cache.
+  inline void Update(Map* source, Name* name, int result);
+
+  // Clear the cache.
+  void Clear();
+
+  static const int kAbsent = -2;
+
+ private:
+  DescriptorLookupCache() {
+    for (int i = 0; i < kLength; ++i) {
+      keys_[i].source = NULL;
+      keys_[i].name = NULL;
+      results_[i] = kAbsent;
+    }
+  }
+
+  static inline int Hash(Object* source, Name* name);
+
+  static const int kLength = 64;
+  struct Key {
+    Map* source;
+    Name* name;
+  };
+
+  Key keys_[kLength];
+  int results_[kLength];
+
+  friend class Isolate;
+  DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
+};
+
+// Cache for mapping (map, property name) into field offset.
+// Cleared at startup and prior to mark sweep collection.
+class KeyedLookupCache {
+ public:
+  // Lookup field offset for (map, name). If absent, -1 is returned.
+  int Lookup(Handle<Map> map, Handle<Name> name);
+
+  // Update an element in the cache.
+  void Update(Handle<Map> map, Handle<Name> name, int field_offset);
+
+  // Clear the cache.
+  void Clear();
+
+  static const int kLength = 256;
+  static const int kCapacityMask = kLength - 1;
+  static const int kMapHashShift = 5;
+  static const int kHashMask = -4;  // Zero the last two bits.
+  static const int kEntriesPerBucket = 4;
+  static const int kEntryLength = 2;
+  static const int kMapIndex = 0;
+  static const int kKeyIndex = 1;
+  static const int kNotFound = -1;
+
+  // kEntriesPerBucket should be a power of 2.
+  STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
+  STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
+
+ private:
+  KeyedLookupCache() {
+    for (int i = 0; i < kLength; ++i) {
+      keys_[i].map = NULL;
+      keys_[i].name = NULL;
+      field_offsets_[i] = kNotFound;
+    }
+  }
+
+  static inline int Hash(Handle<Map> map, Handle<Name> name);
+
+  // Get the address of the keys and field_offsets arrays.  Used in
+  // generated code to perform cache lookups.
+  Address keys_address() { return reinterpret_cast<Address>(&keys_); }
+
+  Address field_offsets_address() {
+    return reinterpret_cast<Address>(&field_offsets_);
+  }
+
+  struct Key {
+    Map* map;
+    Name* name;
+  };
+
+  Key keys_[kLength];
+  int field_offsets_[kLength];
+
+  friend class ExternalReference;
+  friend class Isolate;
+  DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
+};
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_LOOKUP_CACHE_H_
diff --git a/src/lookup.cc b/src/lookup.cc
index 54015d4..b6c0b92 100644
--- a/src/lookup.cc
+++ b/src/lookup.cc
@@ -13,7 +13,6 @@
 namespace v8 {
 namespace internal {
 
-
 // static
 LookupIterator LookupIterator::PropertyOrElement(Isolate* isolate,
                                                  Handle<Object> receiver,
@@ -308,6 +307,11 @@
     PropertyAttributes attributes, Object::StoreFromKeyed store_mode) {
   DCHECK(receiver.is_identical_to(GetStoreTarget()));
   if (state_ == TRANSITION) return;
+
+  if (!IsElement() && name()->IsPrivate()) {
+    attributes = static_cast<PropertyAttributes>(attributes | DONT_ENUM);
+  }
+
   DCHECK(state_ != LookupIterator::ACCESSOR ||
          (GetAccessors()->IsAccessorInfo() &&
           AccessorInfo::cast(*GetAccessors())->is_special_data_property()));
@@ -416,11 +420,6 @@
         isolate_, is_prototype_map
                       ? &RuntimeCallStats::PrototypeObject_DeleteProperty
                       : &RuntimeCallStats::Object_DeleteProperty);
-    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-        isolate_,
-        (is_prototype_map
-             ? &tracing::TraceEventStatsTable::PrototypeObject_DeleteProperty
-             : &tracing::TraceEventStatsTable::Object_DeleteProperty));
 
     PropertyNormalizationMode mode =
         is_prototype_map ? KEEP_INOBJECT_PROPERTIES : CLEAR_INOBJECT_PROPERTIES;
@@ -447,6 +446,9 @@
   // handled via a trap. Adding properties to primitive values is not
   // observable.
   Handle<JSObject> receiver = GetStoreTarget();
+  if (!IsElement() && name()->IsPrivate()) {
+    attributes = static_cast<PropertyAttributes>(attributes | DONT_ENUM);
+  }
 
   if (!IsElement() && !receiver->map()->is_dictionary_map()) {
     Handle<Map> old_map(receiver->map(), isolate_);
diff --git a/src/lookup.h b/src/lookup.h
index ffc7904..687c677 100644
--- a/src/lookup.h
+++ b/src/lookup.h
@@ -43,30 +43,26 @@
 
   LookupIterator(Handle<Object> receiver, Handle<Name> name,
                  Configuration configuration = DEFAULT)
-      : configuration_(ComputeConfiguration(configuration, name)),
-        interceptor_state_(InterceptorState::kUninitialized),
-        property_details_(PropertyDetails::Empty()),
-        isolate_(name->GetIsolate()),
-        name_(isolate_->factory()->InternalizeName(name)),
-        receiver_(receiver),
-        initial_holder_(GetRoot(isolate_, receiver)),
-        // kMaxUInt32 isn't a valid index.
-        index_(kMaxUInt32),
-        number_(DescriptorArray::kNotFound) {
-#ifdef DEBUG
-    uint32_t index;  // Assert that the name is not an array index.
-    DCHECK(!name->AsArrayIndex(&index));
-#endif  // DEBUG
-    Start<false>();
-  }
+      : LookupIterator(name->GetIsolate(), receiver, name, configuration) {}
+
+  LookupIterator(Isolate* isolate, Handle<Object> receiver, Handle<Name> name,
+                 Configuration configuration = DEFAULT)
+      : LookupIterator(isolate, receiver, name, GetRoot(isolate, receiver),
+                       configuration) {}
 
   LookupIterator(Handle<Object> receiver, Handle<Name> name,
                  Handle<JSReceiver> holder,
                  Configuration configuration = DEFAULT)
+      : LookupIterator(name->GetIsolate(), receiver, name, holder,
+                       configuration) {}
+
+  LookupIterator(Isolate* isolate, Handle<Object> receiver, Handle<Name> name,
+                 Handle<JSReceiver> holder,
+                 Configuration configuration = DEFAULT)
       : configuration_(ComputeConfiguration(configuration, name)),
         interceptor_state_(InterceptorState::kUninitialized),
         property_details_(PropertyDetails::Empty()),
-        isolate_(name->GetIsolate()),
+        isolate_(isolate),
         name_(isolate_->factory()->InternalizeName(name)),
         receiver_(receiver),
         initial_holder_(holder),
@@ -82,18 +78,8 @@
 
   LookupIterator(Isolate* isolate, Handle<Object> receiver, uint32_t index,
                  Configuration configuration = DEFAULT)
-      : configuration_(configuration),
-        interceptor_state_(InterceptorState::kUninitialized),
-        property_details_(PropertyDetails::Empty()),
-        isolate_(isolate),
-        receiver_(receiver),
-        initial_holder_(GetRoot(isolate, receiver, index)),
-        index_(index),
-        number_(DescriptorArray::kNotFound) {
-    // kMaxUInt32 isn't a valid index.
-    DCHECK_NE(kMaxUInt32, index_);
-    Start<true>();
-  }
+      : LookupIterator(isolate, receiver, index,
+                       GetRoot(isolate, receiver, index), configuration) {}
 
   LookupIterator(Isolate* isolate, Handle<Object> receiver, uint32_t index,
                  Handle<JSReceiver> holder,
@@ -289,7 +275,7 @@
   MUST_USE_RESULT inline JSReceiver* NextHolder(Map* map);
 
   template <bool is_element>
-  void Start();
+  V8_EXPORT_PRIVATE void Start();
   template <bool is_element>
   void NextInternal(Map* map, JSReceiver* holder);
   template <bool is_element>
diff --git a/src/machine-type.h b/src/machine-type.h
index bcc85b3..e9605d7 100644
--- a/src/machine-type.h
+++ b/src/machine-type.h
@@ -10,7 +10,7 @@
 #include "src/base/bits.h"
 #include "src/globals.h"
 #include "src/signature.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -22,12 +22,14 @@
   kWord16,
   kWord32,
   kWord64,
-  kFloat32,
-  kFloat64,  // must follow kFloat32
-  kSimd128,  // must follow kFloat64
   kTaggedSigned,
   kTaggedPointer,
-  kTagged
+  kTagged,
+  // FP representations must be last, and in order of increasing size.
+  kFloat32,
+  kFloat64,
+  kSimd128,
+  kFirstFPRepresentation = kFloat32
 };
 
 const char* MachineReprToString(MachineRepresentation);
@@ -62,6 +64,8 @@
   MachineRepresentation representation() const { return representation_; }
   MachineSemantic semantic() const { return semantic_; }
 
+  bool IsNone() { return representation() == MachineRepresentation::kNone; }
+
   bool IsSigned() {
     return semantic() == MachineSemantic::kInt32 ||
            semantic() == MachineSemantic::kInt64;
@@ -119,6 +123,14 @@
     return MachineType(MachineRepresentation::kWord64,
                        MachineSemantic::kUint64);
   }
+  static MachineType TaggedPointer() {
+    return MachineType(MachineRepresentation::kTaggedPointer,
+                       MachineSemantic::kAny);
+  }
+  static MachineType TaggedSigned() {
+    return MachineType(MachineRepresentation::kTaggedSigned,
+                       MachineSemantic::kInt32);
+  }
   static MachineType AnyTagged() {
     return MachineType(MachineRepresentation::kTagged, MachineSemantic::kAny);
   }
@@ -161,7 +173,7 @@
     return MachineType(MachineRepresentation::kBit, MachineSemantic::kNone);
   }
 
-  static MachineType TypeForRepresentation(MachineRepresentation& rep,
+  static MachineType TypeForRepresentation(const MachineRepresentation& rep,
                                            bool isSigned = true) {
     switch (rep) {
       case MachineRepresentation::kNone:
@@ -184,6 +196,10 @@
         return MachineType::Simd128();
       case MachineRepresentation::kTagged:
         return MachineType::AnyTagged();
+      case MachineRepresentation::kTaggedSigned:
+        return MachineType::TaggedSigned();
+      case MachineRepresentation::kTaggedPointer:
+        return MachineType::TaggedPointer();
       default:
         UNREACHABLE();
         return MachineType::None();
@@ -204,14 +220,22 @@
          static_cast<size_t>(type.semantic()) * 16;
 }
 
-std::ostream& operator<<(std::ostream& os, MachineRepresentation rep);
+V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
+                                           MachineRepresentation rep);
 std::ostream& operator<<(std::ostream& os, MachineSemantic type);
 std::ostream& operator<<(std::ostream& os, MachineType type);
 
 inline bool IsFloatingPoint(MachineRepresentation rep) {
-  return rep == MachineRepresentation::kFloat32 ||
-         rep == MachineRepresentation::kFloat64 ||
-         rep == MachineRepresentation::kSimd128;
+  return rep >= MachineRepresentation::kFirstFPRepresentation;
+}
+
+inline bool CanBeTaggedPointer(MachineRepresentation rep) {
+  return rep == MachineRepresentation::kTagged ||
+         rep == MachineRepresentation::kTaggedPointer;
+}
+
+inline bool IsAnyTagged(MachineRepresentation rep) {
+  return CanBeTaggedPointer(rep) || rep == MachineRepresentation::kTaggedSigned;
 }
 
 // Gets the log2 of the element size in bytes of the machine type.
diff --git a/src/messages.cc b/src/messages.cc
index 5d03318..cc6349d 100644
--- a/src/messages.cc
+++ b/src/messages.cc
@@ -164,71 +164,63 @@
   return GetMessage(isolate, data)->ToCString(DISALLOW_NULLS);
 }
 
+void JSStackFrame::FromFrameArray(Isolate* isolate, Handle<FrameArray> array,
+                                  int frame_ix) {
+  DCHECK(!array->IsWasmFrame(frame_ix));
+  isolate_ = isolate;
+  receiver_ = handle(array->Receiver(frame_ix), isolate);
+  function_ = handle(array->Function(frame_ix), isolate);
+  code_ = handle(array->Code(frame_ix), isolate);
+  offset_ = array->Offset(frame_ix)->value();
 
-CallSite::CallSite(Isolate* isolate, Handle<JSObject> call_site_obj)
-    : isolate_(isolate) {
-  Handle<Object> maybe_function = JSObject::GetDataProperty(
-      call_site_obj, isolate->factory()->call_site_function_symbol());
-  if (maybe_function->IsJSFunction()) {
-    // javascript
-    fun_ = Handle<JSFunction>::cast(maybe_function);
-    receiver_ = JSObject::GetDataProperty(
-        call_site_obj, isolate->factory()->call_site_receiver_symbol());
-  } else {
-    Handle<Object> maybe_wasm_func_index = JSObject::GetDataProperty(
-        call_site_obj, isolate->factory()->call_site_wasm_func_index_symbol());
-    if (!maybe_wasm_func_index->IsSmi()) {
-      // invalid: neither javascript nor wasm
-      return;
-    }
-    // wasm
-    wasm_obj_ = Handle<JSObject>::cast(JSObject::GetDataProperty(
-        call_site_obj, isolate->factory()->call_site_wasm_obj_symbol()));
-    wasm_func_index_ = Smi::cast(*maybe_wasm_func_index)->value();
-    DCHECK(static_cast<int>(wasm_func_index_) >= 0);
-  }
-
-  CHECK(JSObject::GetDataProperty(
-            call_site_obj, isolate->factory()->call_site_position_symbol())
-            ->ToInt32(&pos_));
+  const int flags = array->Flags(frame_ix)->value();
+  force_constructor_ = (flags & FrameArray::kForceConstructor) != 0;
+  is_strict_ = (flags & FrameArray::kIsStrict) != 0;
 }
 
+JSStackFrame::JSStackFrame(Isolate* isolate, Handle<Object> receiver,
+                           Handle<JSFunction> function,
+                           Handle<AbstractCode> code, int offset)
+    : isolate_(isolate),
+      receiver_(receiver),
+      function_(function),
+      code_(code),
+      offset_(offset),
+      force_constructor_(false),
+      is_strict_(false) {}
 
-Handle<Object> CallSite::GetFileName() {
-  if (!IsJavaScript()) return isolate_->factory()->null_value();
-  Object* script = fun_->shared()->script();
-  if (!script->IsScript()) return isolate_->factory()->null_value();
-  return Handle<Object>(Script::cast(script)->name(), isolate_);
+JSStackFrame::JSStackFrame() {}
+
+Handle<Object> JSStackFrame::GetFunction() const {
+  return Handle<Object>::cast(function_);
 }
 
+Handle<Object> JSStackFrame::GetFileName() {
+  if (!HasScript()) return isolate_->factory()->null_value();
+  return handle(GetScript()->name(), isolate_);
+}
 
-Handle<Object> CallSite::GetFunctionName() {
-  if (IsWasm()) {
-    return wasm::GetWasmFunctionNameOrNull(isolate_, wasm_obj_,
-                                           wasm_func_index_);
-  }
-  Handle<String> result = JSFunction::GetName(fun_);
+Handle<Object> JSStackFrame::GetFunctionName() {
+  Handle<String> result = JSFunction::GetName(function_);
   if (result->length() != 0) return result;
 
-  Handle<Object> script(fun_->shared()->script(), isolate_);
-  if (script->IsScript() &&
-      Handle<Script>::cast(script)->compilation_type() ==
-          Script::COMPILATION_TYPE_EVAL) {
+  if (HasScript() &&
+      GetScript()->compilation_type() == Script::COMPILATION_TYPE_EVAL) {
     return isolate_->factory()->eval_string();
   }
   return isolate_->factory()->null_value();
 }
 
-Handle<Object> CallSite::GetScriptNameOrSourceUrl() {
-  if (!IsJavaScript()) return isolate_->factory()->null_value();
-  Object* script_obj = fun_->shared()->script();
-  if (!script_obj->IsScript()) return isolate_->factory()->null_value();
-  Handle<Script> script(Script::cast(script_obj), isolate_);
+Handle<Object> JSStackFrame::GetScriptNameOrSourceUrl() {
+  if (!HasScript()) return isolate_->factory()->null_value();
+  Handle<Script> script = GetScript();
   Object* source_url = script->source_url();
-  if (source_url->IsString()) return Handle<Object>(source_url, isolate_);
-  return Handle<Object>(script->name(), isolate_);
+  return (source_url->IsString()) ? handle(source_url, isolate_)
+                                  : handle(script->name(), isolate_);
 }
 
+namespace {
+
 bool CheckMethodName(Isolate* isolate, Handle<JSObject> obj, Handle<Name> name,
                      Handle<JSFunction> fun,
                      LookupIterator::Configuration config) {
@@ -246,12 +238,13 @@
   return false;
 }
 
+}  // namespace
 
-Handle<Object> CallSite::GetMethodName() {
-  if (!IsJavaScript() || receiver_->IsNull(isolate_) ||
-      receiver_->IsUndefined(isolate_)) {
+Handle<Object> JSStackFrame::GetMethodName() {
+  if (receiver_->IsNull(isolate_) || receiver_->IsUndefined(isolate_)) {
     return isolate_->factory()->null_value();
   }
+
   Handle<JSReceiver> receiver =
       Object::ToObject(isolate_, receiver_).ToHandleChecked();
   if (!receiver->IsJSObject()) {
@@ -259,7 +252,7 @@
   }
 
   Handle<JSObject> obj = Handle<JSObject>::cast(receiver);
-  Handle<Object> function_name(fun_->shared()->name(), isolate_);
+  Handle<Object> function_name(function_->shared()->name(), isolate_);
   if (function_name->IsString()) {
     Handle<String> name = Handle<String>::cast(function_name);
     // ES2015 gives getters and setters name prefixes which must
@@ -268,7 +261,7 @@
         name->IsUtf8EqualTo(CStrVector("set "), true)) {
       name = isolate_->factory()->NewProperSubString(name, 4, name->length());
     }
-    if (CheckMethodName(isolate_, obj, name, fun_,
+    if (CheckMethodName(isolate_, obj, name, function_,
                         LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR)) {
       return name;
     }
@@ -288,7 +281,7 @@
       HandleScope inner_scope(isolate_);
       if (!keys->get(i)->IsName()) continue;
       Handle<Name> name_key(Name::cast(keys->get(i)), isolate_);
-      if (!CheckMethodName(isolate_, current_obj, name_key, fun_,
+      if (!CheckMethodName(isolate_, current_obj, name_key, function_,
                            LookupIterator::OWN_SKIP_INTERCEPTOR))
         continue;
       // Return null in case of duplicates to avoid confusion.
@@ -301,20 +294,6 @@
   return isolate_->factory()->null_value();
 }
 
-Handle<Object> CallSite::GetTypeName() {
-  // TODO(jgruber): Check for strict/constructor here as in
-  // CallSitePrototypeGetThis.
-
-  if (receiver_->IsNull(isolate_) || receiver_->IsUndefined(isolate_))
-    return isolate_->factory()->null_value();
-
-  if (receiver_->IsJSProxy()) return isolate_->factory()->Proxy_string();
-
-  Handle<JSReceiver> receiver_object =
-      Object::ToObject(isolate_, receiver_).ToHandleChecked();
-  return JSReceiver::GetConstructorName(receiver_object);
-}
-
 namespace {
 
 Object* EvalFromFunctionName(Isolate* isolate, Handle<Script> script) {
@@ -414,126 +393,344 @@
 
 }  // namespace
 
-Handle<Object> CallSite::GetEvalOrigin() {
-  if (IsWasm()) return isolate_->factory()->undefined_value();
-  DCHECK(IsJavaScript());
+Handle<Object> JSStackFrame::GetTypeName() {
+  // TODO(jgruber): Check for strict/constructor here as in
+  // CallSitePrototypeGetThis.
 
-  Handle<Object> script = handle(fun_->shared()->script(), isolate_);
-  if (!script->IsScript()) return isolate_->factory()->undefined_value();
+  if (receiver_->IsNull(isolate_) || receiver_->IsUndefined(isolate_))
+    return isolate_->factory()->null_value();
 
-  return FormatEvalOrigin(isolate_, Handle<Script>::cast(script))
-      .ToHandleChecked();
+  if (receiver_->IsJSProxy()) return isolate_->factory()->Proxy_string();
+
+  Handle<JSReceiver> receiver_object =
+      Object::ToObject(isolate_, receiver_).ToHandleChecked();
+  return JSReceiver::GetConstructorName(receiver_object);
 }
 
-int CallSite::GetLineNumber() {
-  if (pos_ >= 0 && IsJavaScript()) {
-    Handle<Object> script_obj(fun_->shared()->script(), isolate_);
-    if (script_obj->IsScript()) {
-      Handle<Script> script = Handle<Script>::cast(script_obj);
-      return Script::GetLineNumber(script, pos_) + 1;
-    }
+Handle<Object> JSStackFrame::GetEvalOrigin() {
+  if (!HasScript()) return isolate_->factory()->undefined_value();
+  return FormatEvalOrigin(isolate_, GetScript()).ToHandleChecked();
+}
+
+int JSStackFrame::GetLineNumber() {
+  DCHECK_LE(0, GetPosition());
+  if (HasScript()) return Script::GetLineNumber(GetScript(), GetPosition()) + 1;
+  return -1;
+}
+
+int JSStackFrame::GetColumnNumber() {
+  DCHECK_LE(0, GetPosition());
+  if (HasScript()) {
+    return Script::GetColumnNumber(GetScript(), GetPosition()) + 1;
   }
   return -1;
 }
 
-
-int CallSite::GetColumnNumber() {
-  if (pos_ >= 0 && IsJavaScript()) {
-    Handle<Object> script_obj(fun_->shared()->script(), isolate_);
-    if (script_obj->IsScript()) {
-      Handle<Script> script = Handle<Script>::cast(script_obj);
-      return Script::GetColumnNumber(script, pos_) + 1;
-    }
-  }
-  return -1;
+bool JSStackFrame::IsNative() {
+  return HasScript() && GetScript()->type() == Script::TYPE_NATIVE;
 }
 
-
-bool CallSite::IsNative() {
-  if (!IsJavaScript()) return false;
-  Handle<Object> script(fun_->shared()->script(), isolate_);
-  return script->IsScript() &&
-         Handle<Script>::cast(script)->type() == Script::TYPE_NATIVE;
-}
-
-
-bool CallSite::IsToplevel() {
-  if (IsWasm()) return false;
+bool JSStackFrame::IsToplevel() {
   return receiver_->IsJSGlobalProxy() || receiver_->IsNull(isolate_) ||
          receiver_->IsUndefined(isolate_);
 }
 
-
-bool CallSite::IsEval() {
-  if (!IsJavaScript()) return false;
-  Handle<Object> script(fun_->shared()->script(), isolate_);
-  return script->IsScript() &&
-         Handle<Script>::cast(script)->compilation_type() ==
-             Script::COMPILATION_TYPE_EVAL;
+bool JSStackFrame::IsEval() {
+  return HasScript() &&
+         GetScript()->compilation_type() == Script::COMPILATION_TYPE_EVAL;
 }
 
-
-bool CallSite::IsConstructor() {
-  // Builtin exit frames mark constructors by passing a special symbol as the
-  // receiver.
-  Object* ctor_symbol = isolate_->heap()->call_site_constructor_symbol();
-  if (*receiver_ == ctor_symbol) return true;
-  if (!IsJavaScript() || !receiver_->IsJSObject()) return false;
+bool JSStackFrame::IsConstructor() {
+  if (force_constructor_) return true;
+  if (!receiver_->IsJSObject()) return false;
   Handle<Object> constructor =
       JSReceiver::GetDataProperty(Handle<JSObject>::cast(receiver_),
                                   isolate_->factory()->constructor_string());
-  return constructor.is_identical_to(fun_);
+  return constructor.is_identical_to(function_);
 }
 
 namespace {
 
-// Convert the raw frames as written by Isolate::CaptureSimpleStackTrace into
-// a vector of JS CallSite objects.
-MaybeHandle<FixedArray> GetStackFrames(Isolate* isolate,
-                                       Handle<Object> raw_stack) {
-  DCHECK(raw_stack->IsJSArray());
-  Handle<JSArray> raw_stack_array = Handle<JSArray>::cast(raw_stack);
+bool IsNonEmptyString(Handle<Object> object) {
+  return (object->IsString() && String::cast(*object)->length() > 0);
+}
 
-  DCHECK(raw_stack_array->elements()->IsFixedArray());
-  Handle<FixedArray> raw_stack_elements =
-      handle(FixedArray::cast(raw_stack_array->elements()), isolate);
-
-  const int raw_stack_len = raw_stack_elements->length();
-  DCHECK(raw_stack_len % 4 == 1);  // Multiples of 4 plus sloppy frames count.
-  const int frame_count = (raw_stack_len - 1) / 4;
-
-  Handle<Object> sloppy_frames_obj =
-      FixedArray::get(*raw_stack_elements, 0, isolate);
-  int sloppy_frames = Handle<Smi>::cast(sloppy_frames_obj)->value();
-
-  int dst_ix = 0;
-  Handle<FixedArray> frames = isolate->factory()->NewFixedArray(frame_count);
-  for (int i = 1; i < raw_stack_len; i += 4) {
-    Handle<Object> recv = FixedArray::get(*raw_stack_elements, i, isolate);
-    Handle<Object> fun = FixedArray::get(*raw_stack_elements, i + 1, isolate);
-    Handle<AbstractCode> code = Handle<AbstractCode>::cast(
-        FixedArray::get(*raw_stack_elements, i + 2, isolate));
-    Handle<Smi> pc =
-        Handle<Smi>::cast(FixedArray::get(*raw_stack_elements, i + 3, isolate));
-
-    Handle<Object> pos =
-        (fun->IsSmi() && pc->value() < 0)
-            ? handle(Smi::FromInt(-1 - pc->value()), isolate)
-            : handle(Smi::FromInt(code->SourcePosition(pc->value())), isolate);
-
-    sloppy_frames--;
-    Handle<Object> strict = isolate->factory()->ToBoolean(sloppy_frames < 0);
-
-    Handle<Object> callsite;
-    ASSIGN_RETURN_ON_EXCEPTION(
-        isolate, callsite,
-        CallSiteUtils::Construct(isolate, recv, fun, pos, strict), FixedArray);
-
-    frames->set(dst_ix++, *callsite);
+void AppendFileLocation(Isolate* isolate, JSStackFrame* call_site,
+                        IncrementalStringBuilder* builder) {
+  if (call_site->IsNative()) {
+    builder->AppendCString("native");
+    return;
   }
 
-  DCHECK_EQ(frame_count, dst_ix);
-  return frames;
+  Handle<Object> file_name = call_site->GetScriptNameOrSourceUrl();
+  if (!file_name->IsString() && call_site->IsEval()) {
+    Handle<Object> eval_origin = call_site->GetEvalOrigin();
+    DCHECK(eval_origin->IsString());
+    builder->AppendString(Handle<String>::cast(eval_origin));
+    builder->AppendCString(", ");  // Expecting source position to follow.
+  }
+
+  if (IsNonEmptyString(file_name)) {
+    builder->AppendString(Handle<String>::cast(file_name));
+  } else {
+    // Source code does not originate from a file and is not native, but we
+    // can still get the source position inside the source string, e.g. in
+    // an eval string.
+    builder->AppendCString("<anonymous>");
+  }
+
+  int line_number = call_site->GetLineNumber();
+  if (line_number != -1) {
+    builder->AppendCharacter(':');
+    Handle<String> line_string = isolate->factory()->NumberToString(
+        handle(Smi::FromInt(line_number), isolate), isolate);
+    builder->AppendString(line_string);
+
+    int column_number = call_site->GetColumnNumber();
+    if (column_number != -1) {
+      builder->AppendCharacter(':');
+      Handle<String> column_string = isolate->factory()->NumberToString(
+          handle(Smi::FromInt(column_number), isolate), isolate);
+      builder->AppendString(column_string);
+    }
+  }
+}
+
+int StringIndexOf(Isolate* isolate, Handle<String> subject,
+                  Handle<String> pattern) {
+  if (pattern->length() > subject->length()) return -1;
+  return String::IndexOf(isolate, subject, pattern, 0);
+}
+
+// Returns true iff
+// 1. the subject ends with '.' + pattern, or
+// 2. subject == pattern.
+bool StringEndsWithMethodName(Isolate* isolate, Handle<String> subject,
+                              Handle<String> pattern) {
+  if (String::Equals(subject, pattern)) return true;
+
+  FlatStringReader subject_reader(isolate, String::Flatten(subject));
+  FlatStringReader pattern_reader(isolate, String::Flatten(pattern));
+
+  int pattern_index = pattern_reader.length() - 1;
+  int subject_index = subject_reader.length() - 1;
+  for (int i = 0; i <= pattern_reader.length(); i++) {  // Iterate over len + 1.
+    if (subject_index < 0) {
+      return false;
+    }
+
+    const uc32 subject_char = subject_reader.Get(subject_index);
+    if (i == pattern_reader.length()) {
+      if (subject_char != '.') return false;
+    } else if (subject_char != pattern_reader.Get(pattern_index)) {
+      return false;
+    }
+
+    pattern_index--;
+    subject_index--;
+  }
+
+  return true;
+}
+
+void AppendMethodCall(Isolate* isolate, JSStackFrame* call_site,
+                      IncrementalStringBuilder* builder) {
+  Handle<Object> type_name = call_site->GetTypeName();
+  Handle<Object> method_name = call_site->GetMethodName();
+  Handle<Object> function_name = call_site->GetFunctionName();
+
+  if (IsNonEmptyString(function_name)) {
+    Handle<String> function_string = Handle<String>::cast(function_name);
+    if (IsNonEmptyString(type_name)) {
+      Handle<String> type_string = Handle<String>::cast(type_name);
+      bool starts_with_type_name =
+          (StringIndexOf(isolate, function_string, type_string) == 0);
+      if (!starts_with_type_name) {
+        builder->AppendString(type_string);
+        builder->AppendCharacter('.');
+      }
+    }
+    builder->AppendString(function_string);
+
+    if (IsNonEmptyString(method_name)) {
+      Handle<String> method_string = Handle<String>::cast(method_name);
+      if (!StringEndsWithMethodName(isolate, function_string, method_string)) {
+        builder->AppendCString(" [as ");
+        builder->AppendString(method_string);
+        builder->AppendCharacter(']');
+      }
+    }
+  } else {
+    builder->AppendString(Handle<String>::cast(type_name));
+    builder->AppendCharacter('.');
+    if (IsNonEmptyString(method_name)) {
+      builder->AppendString(Handle<String>::cast(method_name));
+    } else {
+      builder->AppendCString("<anonymous>");
+    }
+  }
+}
+
+}  // namespace
+
+MaybeHandle<String> JSStackFrame::ToString() {
+  IncrementalStringBuilder builder(isolate_);
+
+  Handle<Object> function_name = GetFunctionName();
+
+  const bool is_toplevel = IsToplevel();
+  const bool is_constructor = IsConstructor();
+  const bool is_method_call = !(is_toplevel || is_constructor);
+
+  if (is_method_call) {
+    AppendMethodCall(isolate_, this, &builder);
+  } else if (is_constructor) {
+    builder.AppendCString("new ");
+    if (IsNonEmptyString(function_name)) {
+      builder.AppendString(Handle<String>::cast(function_name));
+    } else {
+      builder.AppendCString("<anonymous>");
+    }
+  } else if (IsNonEmptyString(function_name)) {
+    builder.AppendString(Handle<String>::cast(function_name));
+  } else {
+    AppendFileLocation(isolate_, this, &builder);
+    RETURN_RESULT(isolate_, builder.Finish(), String);
+  }
+
+  builder.AppendCString(" (");
+  AppendFileLocation(isolate_, this, &builder);
+  builder.AppendCString(")");
+
+  RETURN_RESULT(isolate_, builder.Finish(), String);
+}
+
+int JSStackFrame::GetPosition() const { return code_->SourcePosition(offset_); }
+
+bool JSStackFrame::HasScript() const {
+  return function_->shared()->script()->IsScript();
+}
+
+Handle<Script> JSStackFrame::GetScript() const {
+  return handle(Script::cast(function_->shared()->script()), isolate_);
+}
+
+void WasmStackFrame::FromFrameArray(Isolate* isolate, Handle<FrameArray> array,
+                                    int frame_ix) {
+  DCHECK(array->IsWasmFrame(frame_ix));
+  isolate_ = isolate;
+  wasm_obj_ = handle(array->WasmObject(frame_ix), isolate);
+  wasm_func_index_ = array->WasmFunctionIndex(frame_ix)->value();
+  code_ = handle(array->Code(frame_ix), isolate);
+  offset_ = array->Offset(frame_ix)->value();
+}
+
+Handle<Object> WasmStackFrame::GetFunction() const {
+  Handle<Object> obj(Smi::FromInt(wasm_func_index_), isolate_);
+  return obj;
+}
+
+Handle<Object> WasmStackFrame::GetFunctionName() {
+  return wasm::GetWasmFunctionNameOrNull(isolate_, wasm_obj_, wasm_func_index_);
+}
+
+MaybeHandle<String> WasmStackFrame::ToString() {
+  IncrementalStringBuilder builder(isolate_);
+
+  Handle<Object> name = GetFunctionName();
+  if (name->IsNull(isolate_)) {
+    builder.AppendCString("<WASM UNNAMED>");
+  } else {
+    DCHECK(name->IsString());
+    builder.AppendString(Handle<String>::cast(name));
+  }
+
+  builder.AppendCString(" (<WASM>[");
+
+  Handle<Smi> ix(Smi::FromInt(wasm_func_index_), isolate_);
+  builder.AppendString(isolate_->factory()->NumberToString(ix));
+
+  builder.AppendCString("]+");
+
+  Handle<Object> pos(Smi::FromInt(GetPosition()), isolate_);
+  builder.AppendString(isolate_->factory()->NumberToString(pos));
+  builder.AppendCString(")");
+
+  return builder.Finish();
+}
+
+int WasmStackFrame::GetPosition() const {
+  return (offset_ < 0) ? (-1 - offset_) : code_->SourcePosition(offset_);
+}
+
+Handle<Object> WasmStackFrame::Null() const {
+  return isolate_->factory()->null_value();
+}
+
+FrameArrayIterator::FrameArrayIterator(Isolate* isolate,
+                                       Handle<FrameArray> array, int frame_ix)
+    : isolate_(isolate), array_(array), next_frame_ix_(frame_ix) {}
+
+bool FrameArrayIterator::HasNext() const {
+  return (next_frame_ix_ < array_->FrameCount());
+}
+
+void FrameArrayIterator::Next() { next_frame_ix_++; }
+
+StackFrameBase* FrameArrayIterator::Frame() {
+  DCHECK(HasNext());
+  const int flags = array_->Flags(next_frame_ix_)->value();
+  const bool is_js_frame = (flags & FrameArray::kIsWasmFrame) == 0;
+  if (is_js_frame) {
+    js_frame_.FromFrameArray(isolate_, array_, next_frame_ix_);
+    return &js_frame_;
+  } else {
+    wasm_frame_.FromFrameArray(isolate_, array_, next_frame_ix_);
+    return &wasm_frame_;
+  }
+}
+
+namespace {
+
+MaybeHandle<Object> ConstructCallSite(Isolate* isolate,
+                                      Handle<FrameArray> frame_array,
+                                      int frame_index) {
+  Handle<JSFunction> target =
+      handle(isolate->native_context()->callsite_function(), isolate);
+
+  Handle<JSObject> obj;
+  ASSIGN_RETURN_ON_EXCEPTION(isolate, obj, JSObject::New(target, target),
+                             Object);
+
+  Handle<Symbol> key = isolate->factory()->call_site_frame_array_symbol();
+  RETURN_ON_EXCEPTION(isolate, JSObject::SetOwnPropertyIgnoreAttributes(
+                                   obj, key, frame_array, DONT_ENUM),
+                      Object);
+
+  key = isolate->factory()->call_site_frame_index_symbol();
+  Handle<Object> value(Smi::FromInt(frame_index), isolate);
+  RETURN_ON_EXCEPTION(isolate, JSObject::SetOwnPropertyIgnoreAttributes(
+                                   obj, key, value, DONT_ENUM),
+                      Object);
+
+  return obj;
+}
+
+// Convert the raw frames as written by Isolate::CaptureSimpleStackTrace into
+// a JSArray of JSCallSite objects.
+MaybeHandle<JSArray> GetStackFrames(Isolate* isolate,
+                                    Handle<FrameArray> elems) {
+  const int frame_count = elems->FrameCount();
+
+  Handle<FixedArray> frames = isolate->factory()->NewFixedArray(frame_count);
+  for (int i = 0; i < frame_count; i++) {
+    Handle<Object> site;
+    ASSIGN_RETURN_ON_EXCEPTION(isolate, site,
+                               ConstructCallSite(isolate, elems, i), JSArray);
+    frames->set(i, *site);
+  }
+
+  return isolate->factory()->NewJSArrayWithElements(frames);
 }
 
 MaybeHandle<Object> AppendErrorString(Isolate* isolate, Handle<Object> error,
@@ -590,11 +787,11 @@
 MaybeHandle<Object> ErrorUtils::FormatStackTrace(Isolate* isolate,
                                                  Handle<JSObject> error,
                                                  Handle<Object> raw_stack) {
-  // Create JS CallSite objects from the raw stack frame array.
+  DCHECK(raw_stack->IsJSArray());
+  Handle<JSArray> raw_stack_array = Handle<JSArray>::cast(raw_stack);
 
-  Handle<FixedArray> frames;
-  ASSIGN_RETURN_ON_EXCEPTION(isolate, frames,
-                             GetStackFrames(isolate, raw_stack), Object);
+  DCHECK(raw_stack_array->elements()->IsFixedArray());
+  Handle<FrameArray> elems(FrameArray::cast(raw_stack_array->elements()));
 
   // If there's a user-specified "prepareStackFrames" function, call it on the
   // frames and use its result.
@@ -609,12 +806,16 @@
   const bool in_recursion = isolate->formatting_stack_trace();
   if (prepare_stack_trace->IsJSFunction() && !in_recursion) {
     PrepareStackTraceScope scope(isolate);
-    Handle<JSArray> array = isolate->factory()->NewJSArrayWithElements(frames);
+
+    Handle<JSArray> sites;
+    ASSIGN_RETURN_ON_EXCEPTION(isolate, sites, GetStackFrames(isolate, elems),
+                               Object);
 
     const int argc = 2;
     ScopedVector<Handle<Object>> argv(argc);
+
     argv[0] = error;
-    argv[1] = array;
+    argv[1] = sites;
 
     Handle<Object> result;
     ASSIGN_RETURN_ON_EXCEPTION(
@@ -625,17 +826,18 @@
     return result;
   }
 
+  // Otherwise, run our internal formatting logic.
+
   IncrementalStringBuilder builder(isolate);
 
   RETURN_ON_EXCEPTION(isolate, AppendErrorString(isolate, error, &builder),
                       Object);
 
-  for (int i = 0; i < frames->length(); i++) {
+  for (FrameArrayIterator it(isolate, elems); it.HasNext(); it.Next()) {
     builder.AppendCString("\n    at ");
 
-    Handle<Object> frame = FixedArray::get(*frames, i, isolate);
-    MaybeHandle<String> maybe_frame_string =
-        CallSiteUtils::ToString(isolate, frame);
+    StackFrameBase* frame = it.Frame();
+    MaybeHandle<String> maybe_frame_string = frame->ToString();
     if (maybe_frame_string.is_null()) {
       // CallSite.toString threw. Try to return a string representation of the
       // thrown exception instead.
@@ -902,290 +1104,5 @@
                                no_caller, false);
 }
 
-#define SET_CALLSITE_PROPERTY(target, key, value)                        \
-  RETURN_ON_EXCEPTION(                                                   \
-      isolate, JSObject::SetOwnPropertyIgnoreAttributes(                 \
-                   target, isolate->factory()->key(), value, DONT_ENUM), \
-      Object)
-
-MaybeHandle<Object> CallSiteUtils::Construct(Isolate* isolate,
-                                             Handle<Object> receiver,
-                                             Handle<Object> fun,
-                                             Handle<Object> pos,
-                                             Handle<Object> strict_mode) {
-  // Create the JS object.
-
-  Handle<JSFunction> target =
-      handle(isolate->native_context()->callsite_function(), isolate);
-
-  Handle<JSObject> obj;
-  ASSIGN_RETURN_ON_EXCEPTION(isolate, obj, JSObject::New(target, target),
-                             Object);
-
-  // For wasm frames, receiver is the wasm object and fun is the function index
-  // instead of an actual function.
-  const bool is_wasm_object =
-      receiver->IsJSObject() && wasm::IsWasmObject(JSObject::cast(*receiver));
-  if (!fun->IsJSFunction() && !is_wasm_object) {
-    THROW_NEW_ERROR(isolate,
-                    NewTypeError(MessageTemplate::kCallSiteExpectsFunction,
-                                 Object::TypeOf(isolate, receiver),
-                                 Object::TypeOf(isolate, fun)),
-                    Object);
-  }
-
-  if (is_wasm_object) {
-    DCHECK(fun->IsSmi());
-    DCHECK(wasm::GetNumberOfFunctions(JSObject::cast(*receiver)) >
-           Smi::cast(*fun)->value());
-
-    SET_CALLSITE_PROPERTY(obj, call_site_wasm_obj_symbol, receiver);
-    SET_CALLSITE_PROPERTY(obj, call_site_wasm_func_index_symbol, fun);
-  } else {
-    DCHECK(fun->IsJSFunction());
-    SET_CALLSITE_PROPERTY(obj, call_site_receiver_symbol, receiver);
-    SET_CALLSITE_PROPERTY(obj, call_site_function_symbol, fun);
-  }
-
-  DCHECK(pos->IsSmi());
-  SET_CALLSITE_PROPERTY(obj, call_site_position_symbol, pos);
-  SET_CALLSITE_PROPERTY(
-      obj, call_site_strict_symbol,
-      isolate->factory()->ToBoolean(strict_mode->BooleanValue()));
-
-  return obj;
-}
-
-#undef SET_CALLSITE_PROPERTY
-
-namespace {
-
-bool IsNonEmptyString(Handle<Object> object) {
-  return (object->IsString() && String::cast(*object)->length() > 0);
-}
-
-MaybeHandle<JSObject> AppendWasmToString(Isolate* isolate,
-                                         Handle<JSObject> recv,
-                                         CallSite* call_site,
-                                         IncrementalStringBuilder* builder) {
-  Handle<Object> name = call_site->GetFunctionName();
-  if (name->IsNull(isolate)) {
-    builder->AppendCString("<WASM UNNAMED>");
-  } else {
-    DCHECK(name->IsString());
-    builder->AppendString(Handle<String>::cast(name));
-  }
-
-  builder->AppendCString(" (<WASM>[");
-
-  Handle<String> ix = isolate->factory()->NumberToString(
-      handle(Smi::FromInt(call_site->wasm_func_index()), isolate));
-  builder->AppendString(ix);
-
-  builder->AppendCString("]+");
-
-  Handle<Object> pos;
-  ASSIGN_RETURN_ON_EXCEPTION(
-      isolate, pos, JSObject::GetProperty(
-                        recv, isolate->factory()->call_site_position_symbol()),
-      JSObject);
-  DCHECK(pos->IsNumber());
-  builder->AppendString(isolate->factory()->NumberToString(pos));
-  builder->AppendCString(")");
-
-  return recv;
-}
-
-MaybeHandle<JSObject> AppendFileLocation(Isolate* isolate,
-                                         Handle<JSObject> recv,
-                                         CallSite* call_site,
-                                         IncrementalStringBuilder* builder) {
-  if (call_site->IsNative()) {
-    builder->AppendCString("native");
-    return recv;
-  }
-
-  Handle<Object> file_name = call_site->GetScriptNameOrSourceUrl();
-  if (!file_name->IsString() && call_site->IsEval()) {
-    Handle<Object> eval_origin = call_site->GetEvalOrigin();
-    DCHECK(eval_origin->IsString());
-    builder->AppendString(Handle<String>::cast(eval_origin));
-    builder->AppendCString(", ");  // Expecting source position to follow.
-  }
-
-  if (IsNonEmptyString(file_name)) {
-    builder->AppendString(Handle<String>::cast(file_name));
-  } else {
-    // Source code does not originate from a file and is not native, but we
-    // can still get the source position inside the source string, e.g. in
-    // an eval string.
-    builder->AppendCString("<anonymous>");
-  }
-
-  int line_number = call_site->GetLineNumber();
-  if (line_number != -1) {
-    builder->AppendCharacter(':');
-    Handle<String> line_string = isolate->factory()->NumberToString(
-        handle(Smi::FromInt(line_number), isolate), isolate);
-    builder->AppendString(line_string);
-
-    int column_number = call_site->GetColumnNumber();
-    if (column_number != -1) {
-      builder->AppendCharacter(':');
-      Handle<String> column_string = isolate->factory()->NumberToString(
-          handle(Smi::FromInt(column_number), isolate), isolate);
-      builder->AppendString(column_string);
-    }
-  }
-
-  return recv;
-}
-
-int StringIndexOf(Isolate* isolate, Handle<String> subject,
-                  Handle<String> pattern) {
-  if (pattern->length() > subject->length()) return -1;
-  return String::IndexOf(isolate, subject, pattern, 0);
-}
-
-// Returns true iff
-// 1. the subject ends with '.' + pattern, or
-// 2. subject == pattern.
-bool StringEndsWithMethodName(Isolate* isolate, Handle<String> subject,
-                              Handle<String> pattern) {
-  if (String::Equals(subject, pattern)) return true;
-
-  FlatStringReader subject_reader(isolate, String::Flatten(subject));
-  FlatStringReader pattern_reader(isolate, String::Flatten(pattern));
-
-  int pattern_index = pattern_reader.length() - 1;
-  int subject_index = subject_reader.length() - 1;
-  for (int i = 0; i <= pattern_reader.length(); i++) {  // Iterate over len + 1.
-    if (subject_index < 0) {
-      return false;
-    }
-
-    const uc32 subject_char = subject_reader.Get(subject_index);
-    if (i == pattern_reader.length()) {
-      if (subject_char != '.') return false;
-    } else if (subject_char != pattern_reader.Get(pattern_index)) {
-      return false;
-    }
-
-    pattern_index--;
-    subject_index--;
-  }
-
-  return true;
-}
-
-MaybeHandle<JSObject> AppendMethodCall(Isolate* isolate, Handle<JSObject> recv,
-                                       CallSite* call_site,
-                                       IncrementalStringBuilder* builder) {
-  Handle<Object> type_name = call_site->GetTypeName();
-  Handle<Object> method_name = call_site->GetMethodName();
-  Handle<Object> function_name = call_site->GetFunctionName();
-
-  if (IsNonEmptyString(function_name)) {
-    Handle<String> function_string = Handle<String>::cast(function_name);
-    if (IsNonEmptyString(type_name)) {
-      Handle<String> type_string = Handle<String>::cast(type_name);
-      bool starts_with_type_name =
-          (StringIndexOf(isolate, function_string, type_string) == 0);
-      if (!starts_with_type_name) {
-        builder->AppendString(type_string);
-        builder->AppendCharacter('.');
-      }
-    }
-    builder->AppendString(function_string);
-
-    if (IsNonEmptyString(method_name)) {
-      Handle<String> method_string = Handle<String>::cast(method_name);
-      if (!StringEndsWithMethodName(isolate, function_string, method_string)) {
-        builder->AppendCString(" [as ");
-        builder->AppendString(method_string);
-        builder->AppendCharacter(']');
-      }
-    }
-  } else {
-    builder->AppendString(Handle<String>::cast(type_name));
-    builder->AppendCharacter('.');
-    if (IsNonEmptyString(method_name)) {
-      builder->AppendString(Handle<String>::cast(method_name));
-    } else {
-      builder->AppendCString("<anonymous>");
-    }
-  }
-
-  return recv;
-}
-
-}  // namespace
-
-MaybeHandle<String> CallSiteUtils::ToString(Isolate* isolate,
-                                            Handle<Object> receiver) {
-  if (!receiver->IsJSObject()) {
-    THROW_NEW_ERROR(
-        isolate,
-        NewTypeError(MessageTemplate::kIncompatibleMethodReceiver,
-                     isolate->factory()->NewStringFromAsciiChecked("toString"),
-                     receiver),
-        String);
-  }
-  Handle<JSObject> recv = Handle<JSObject>::cast(receiver);
-
-  if (!JSReceiver::HasOwnProperty(
-           recv, isolate->factory()->call_site_position_symbol())
-           .FromMaybe(false)) {
-    THROW_NEW_ERROR(
-        isolate,
-        NewTypeError(MessageTemplate::kCallSiteMethod,
-                     isolate->factory()->NewStringFromAsciiChecked("toString")),
-        String);
-  }
-
-  IncrementalStringBuilder builder(isolate);
-
-  CallSite call_site(isolate, recv);
-  if (call_site.IsWasm()) {
-    RETURN_ON_EXCEPTION(isolate,
-                        AppendWasmToString(isolate, recv, &call_site, &builder),
-                        String);
-    RETURN_RESULT(isolate, builder.Finish(), String);
-  }
-
-  DCHECK(!call_site.IsWasm());
-  Handle<Object> function_name = call_site.GetFunctionName();
-
-  const bool is_toplevel = call_site.IsToplevel();
-  const bool is_constructor = call_site.IsConstructor();
-  const bool is_method_call = !(is_toplevel || is_constructor);
-
-  if (is_method_call) {
-    RETURN_ON_EXCEPTION(
-        isolate, AppendMethodCall(isolate, recv, &call_site, &builder), String);
-  } else if (is_constructor) {
-    builder.AppendCString("new ");
-    if (IsNonEmptyString(function_name)) {
-      builder.AppendString(Handle<String>::cast(function_name));
-    } else {
-      builder.AppendCString("<anonymous>");
-    }
-  } else if (IsNonEmptyString(function_name)) {
-    builder.AppendString(Handle<String>::cast(function_name));
-  } else {
-    RETURN_ON_EXCEPTION(isolate,
-                        AppendFileLocation(isolate, recv, &call_site, &builder),
-                        String);
-    RETURN_RESULT(isolate, builder.Finish(), String);
-  }
-
-  builder.AppendCString(" (");
-  RETURN_ON_EXCEPTION(
-      isolate, AppendFileLocation(isolate, recv, &call_site, &builder), String);
-  builder.AppendCString(")");
-
-  RETURN_RESULT(isolate, builder.Finish(), String);
-}
-
 }  // namespace internal
 }  // namespace v8
diff --git a/src/messages.h b/src/messages.h
index cf49ac9..e7bbcc3 100644
--- a/src/messages.h
+++ b/src/messages.h
@@ -19,6 +19,8 @@
 namespace internal {
 
 // Forward declarations.
+class AbstractCode;
+class FrameArray;
 class JSMessageObject;
 class LookupIterator;
 class SourceInfo;
@@ -42,38 +44,142 @@
   Handle<JSFunction> function_;
 };
 
-
-class CallSite {
+class StackFrameBase {
  public:
-  CallSite(Isolate* isolate, Handle<JSObject> call_site_obj);
+  virtual ~StackFrameBase() {}
 
-  Handle<Object> GetFileName();
-  Handle<Object> GetFunctionName();
-  Handle<Object> GetScriptNameOrSourceUrl();
-  Handle<Object> GetMethodName();
-  Handle<Object> GetTypeName();
-  Handle<Object> GetEvalOrigin();
+  virtual Handle<Object> GetReceiver() const = 0;
+  virtual Handle<Object> GetFunction() const = 0;
+
+  virtual Handle<Object> GetFileName() = 0;
+  virtual Handle<Object> GetFunctionName() = 0;
+  virtual Handle<Object> GetScriptNameOrSourceUrl() = 0;
+  virtual Handle<Object> GetMethodName() = 0;
+  virtual Handle<Object> GetTypeName() = 0;
+  virtual Handle<Object> GetEvalOrigin() = 0;
+
+  virtual int GetPosition() const = 0;
   // Return 1-based line number, including line offset.
-  int GetLineNumber();
+  virtual int GetLineNumber() = 0;
   // Return 1-based column number, including column offset if first line.
-  int GetColumnNumber();
-  bool IsNative();
-  bool IsToplevel();
-  bool IsEval();
-  bool IsConstructor();
+  virtual int GetColumnNumber() = 0;
 
-  bool IsJavaScript() { return !fun_.is_null(); }
-  bool IsWasm() { return !wasm_obj_.is_null(); }
+  virtual bool IsNative() = 0;
+  virtual bool IsToplevel() = 0;
+  virtual bool IsEval() = 0;
+  virtual bool IsConstructor() = 0;
+  virtual bool IsStrict() const = 0;
 
-  int wasm_func_index() const { return wasm_func_index_; }
+  virtual MaybeHandle<String> ToString() = 0;
+};
+
+class JSStackFrame : public StackFrameBase {
+ public:
+  JSStackFrame(Isolate* isolate, Handle<Object> receiver,
+               Handle<JSFunction> function, Handle<AbstractCode> code,
+               int offset);
+  virtual ~JSStackFrame() {}
+
+  Handle<Object> GetReceiver() const override { return receiver_; }
+  Handle<Object> GetFunction() const override;
+
+  Handle<Object> GetFileName() override;
+  Handle<Object> GetFunctionName() override;
+  Handle<Object> GetScriptNameOrSourceUrl() override;
+  Handle<Object> GetMethodName() override;
+  Handle<Object> GetTypeName() override;
+  Handle<Object> GetEvalOrigin() override;
+
+  int GetPosition() const override;
+  int GetLineNumber() override;
+  int GetColumnNumber() override;
+
+  bool IsNative() override;
+  bool IsToplevel() override;
+  bool IsEval() override;
+  bool IsConstructor() override;
+  bool IsStrict() const override { return is_strict_; }
+
+  MaybeHandle<String> ToString() override;
+
+ private:
+  JSStackFrame();
+  void FromFrameArray(Isolate* isolate, Handle<FrameArray> array, int frame_ix);
+
+  bool HasScript() const;
+  Handle<Script> GetScript() const;
+
+  Isolate* isolate_;
+
+  Handle<Object> receiver_;
+  Handle<JSFunction> function_;
+  Handle<AbstractCode> code_;
+  int offset_;
+
+  bool force_constructor_;
+  bool is_strict_;
+
+  friend class FrameArrayIterator;
+};
+
+class WasmStackFrame : public StackFrameBase {
+ public:
+  virtual ~WasmStackFrame() {}
+
+  Handle<Object> GetReceiver() const override { return wasm_obj_; }
+  Handle<Object> GetFunction() const override;
+
+  Handle<Object> GetFileName() override { return Null(); }
+  Handle<Object> GetFunctionName() override;
+  Handle<Object> GetScriptNameOrSourceUrl() override { return Null(); }
+  Handle<Object> GetMethodName() override { return Null(); }
+  Handle<Object> GetTypeName() override { return Null(); }
+  Handle<Object> GetEvalOrigin() override { return Null(); }
+
+  int GetPosition() const override;
+  int GetLineNumber() override { return wasm_func_index_; }
+  int GetColumnNumber() override { return -1; }
+
+  bool IsNative() override { return false; }
+  bool IsToplevel() override { return false; }
+  bool IsEval() override { return false; }
+  bool IsConstructor() override { return false; }
+  bool IsStrict() const override { return false; }
+
+  MaybeHandle<String> ToString() override;
+
+ private:
+  void FromFrameArray(Isolate* isolate, Handle<FrameArray> array, int frame_ix);
+  Handle<Object> Null() const;
+
+  Isolate* isolate_;
+
+  Handle<Object> wasm_obj_;
+  uint32_t wasm_func_index_;
+  Handle<AbstractCode> code_;
+  int offset_;
+
+  friend class FrameArrayIterator;
+};
+
+class FrameArrayIterator {
+ public:
+  FrameArrayIterator(Isolate* isolate, Handle<FrameArray> array,
+                     int frame_ix = 0);
+
+  StackFrameBase* Frame();
+
+  bool HasNext() const;
+  void Next();
 
  private:
   Isolate* isolate_;
-  Handle<Object> receiver_;
-  Handle<JSFunction> fun_;
-  int32_t pos_ = -1;
-  Handle<JSObject> wasm_obj_;
-  uint32_t wasm_func_index_ = static_cast<uint32_t>(-1);
+
+  Handle<FrameArray> array_;
+  int next_frame_ix_;
+
+  WasmStackFrame wasm_frame_;
+  JSStackFrame js_frame_;
 };
 
 // Determines how stack trace collection skips frames.
@@ -107,16 +213,6 @@
                                               Handle<Object> stack_trace);
 };
 
-class CallSiteUtils : public AllStatic {
- public:
-  static MaybeHandle<Object> Construct(Isolate* isolate,
-                                       Handle<Object> receiver,
-                                       Handle<Object> fun, Handle<Object> pos,
-                                       Handle<Object> strict_mode);
-
-  static MaybeHandle<String> ToString(Isolate* isolate, Handle<Object> recv);
-};
-
 #define MESSAGE_TEMPLATES(T)                                                   \
   /* Error */                                                                  \
   T(None, "")                                                                  \
@@ -158,6 +254,7 @@
   T(ConstructorNotFunction, "Constructor % requires 'new'")                    \
   T(ConstructorNotReceiver, "The .constructor property is not an object")      \
   T(CurrencyCode, "Currency code is required with currency style.")            \
+  T(CyclicModuleDependency, "Detected cycle while resolving name '%'")         \
   T(DataViewNotArrayBuffer,                                                    \
     "First argument to DataView constructor must be an ArrayBuffer")           \
   T(DateType, "this is not a Date object.")                                    \
@@ -402,6 +499,7 @@
   T(UnsupportedTimeZone, "Unsupported time zone specified %")                  \
   T(ValueOutOfRange, "Value % out of range for % options property %")          \
   /* SyntaxError */                                                            \
+  T(AmbiguousExport, "Multiple star exports provide name '%'")                 \
   T(BadGetterArity, "Getter must not have any formal parameters.")             \
   T(BadSetterArity, "Setter must have exactly one formal parameter.")          \
   T(ConstructorIsAccessor, "Class constructor may not be an accessor")         \
@@ -454,8 +552,6 @@
   T(NoCatchOrFinally, "Missing catch or finally after try")                    \
   T(NotIsvar, "builtin %%IS_VAR: not a variable")                              \
   T(ParamAfterRest, "Rest parameter must be last formal parameter")            \
-  T(InvalidRestParameter,                                                      \
-    "Rest parameter must be an identifier or destructuring pattern")           \
   T(PushPastSafeLength,                                                        \
     "Pushing % elements on an array-like of length % "                         \
     "is disallowed, as the total surpasses 2**53-1")                           \
@@ -497,19 +593,10 @@
   T(UnexpectedEOS, "Unexpected end of input")                                  \
   T(UnexpectedFunctionSent,                                                    \
     "function.sent expression is not allowed outside a generator")             \
-  T(UnexpectedInsideTailCall, "Unexpected expression inside tail call")        \
   T(UnexpectedReserved, "Unexpected reserved word")                            \
   T(UnexpectedStrictReserved, "Unexpected strict mode reserved word")          \
   T(UnexpectedSuper, "'super' keyword unexpected here")                        \
-  T(UnexpectedSloppyTailCall,                                                  \
-    "Tail call expressions are not allowed in non-strict mode")                \
   T(UnexpectedNewTarget, "new.target expression is not allowed here")          \
-  T(UnexpectedTailCall, "Tail call expression is not allowed here")            \
-  T(UnexpectedTailCallInCatchBlock,                                            \
-    "Tail call expression in catch block when finally block is also present")  \
-  T(UnexpectedTailCallInForInOf, "Tail call expression in for-in/of body")     \
-  T(UnexpectedTailCallInTryBlock, "Tail call expression in try block")         \
-  T(UnexpectedTailCallOfEval, "Tail call of a direct eval is not allowed")     \
   T(UnexpectedTemplateString, "Unexpected template string")                    \
   T(UnexpectedToken, "Unexpected token %")                                     \
   T(UnexpectedTokenIdentifier, "Unexpected identifier")                        \
@@ -517,6 +604,7 @@
   T(UnexpectedTokenString, "Unexpected string")                                \
   T(UnexpectedTokenRegExp, "Unexpected regular expression")                    \
   T(UnknownLabel, "Undefined label '%'")                                       \
+  T(UnresolvableExport, "Module does not provide an export named '%'")         \
   T(UnterminatedArgList, "missing ) after argument list")                      \
   T(UnterminatedRegExp, "Invalid regular expression: missing /")               \
   T(UnterminatedTemplate, "Unterminated template literal")                     \
@@ -540,7 +628,18 @@
   T(WasmTrapFuncInvalid, "invalid function")                                   \
   T(WasmTrapFuncSigMismatch, "function signature mismatch")                    \
   T(WasmTrapInvalidIndex, "invalid index into function table")                 \
-  T(WasmTrapTypeError, "invalid type")
+  T(WasmTrapTypeError, "invalid type")                                         \
+  /* DataCloneError messages */                                                \
+  T(DataCloneError, "% could not be cloned.")                                  \
+  T(DataCloneErrorNeuteredArrayBuffer,                                         \
+    "An ArrayBuffer is neutered and could not be cloned.")                     \
+  T(DataCloneErrorSharedArrayBufferNotTransferred,                             \
+    "A SharedArrayBuffer could not be cloned. SharedArrayBuffer must be "      \
+    "transferred.")                                                            \
+  T(DataCloneDeserializationError, "Unable to deserialize cloned data.")       \
+  T(DataCloneDeserializationVersionError,                                      \
+    "Unable to deserialize cloned data due to invalid or unsupported "         \
+    "version.")
 
 class MessageTemplate {
  public:
diff --git a/src/mips/assembler-mips.cc b/src/mips/assembler-mips.cc
index 20a8a11..f5b235d 100644
--- a/src/mips/assembler-mips.cc
+++ b/src/mips/assembler-mips.cc
@@ -2453,6 +2453,11 @@
   GenInstrRegister(COP1, D, ft, fs, fd, MUL_D);
 }
 
+void Assembler::madd_s(FPURegister fd, FPURegister fr, FPURegister fs,
+                       FPURegister ft) {
+  DCHECK(IsMipsArchVariant(kMips32r2));
+  GenInstrRegister(COP1X, fr, ft, fs, fd, MADD_S);
+}
 
 void Assembler::madd_d(FPURegister fd, FPURegister fr, FPURegister fs,
     FPURegister ft) {
@@ -2460,6 +2465,37 @@
   GenInstrRegister(COP1X, fr, ft, fs, fd, MADD_D);
 }
 
+void Assembler::msub_s(FPURegister fd, FPURegister fr, FPURegister fs,
+                       FPURegister ft) {
+  DCHECK(IsMipsArchVariant(kMips32r2));
+  GenInstrRegister(COP1X, fr, ft, fs, fd, MSUB_S);
+}
+
+void Assembler::msub_d(FPURegister fd, FPURegister fr, FPURegister fs,
+                       FPURegister ft) {
+  DCHECK(IsMipsArchVariant(kMips32r2));
+  GenInstrRegister(COP1X, fr, ft, fs, fd, MSUB_D);
+}
+
+void Assembler::maddf_s(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(IsMipsArchVariant(kMips32r6));
+  GenInstrRegister(COP1, S, ft, fs, fd, MADDF_S);
+}
+
+void Assembler::maddf_d(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(IsMipsArchVariant(kMips32r6));
+  GenInstrRegister(COP1, D, ft, fs, fd, MADDF_D);
+}
+
+void Assembler::msubf_s(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(IsMipsArchVariant(kMips32r6));
+  GenInstrRegister(COP1, S, ft, fs, fd, MSUBF_S);
+}
+
+void Assembler::msubf_d(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(IsMipsArchVariant(kMips32r6));
+  GenInstrRegister(COP1, D, ft, fs, fd, MSUBF_D);
+}
 
 void Assembler::div_s(FPURegister fd, FPURegister fs, FPURegister ft) {
   GenInstrRegister(COP1, S, ft, fs, fd, DIV_S);
@@ -2492,13 +2528,11 @@
 
 
 void Assembler::neg_s(FPURegister fd, FPURegister fs) {
-  DCHECK(!IsMipsArchVariant(kMips32r6));
   GenInstrRegister(COP1, S, f0, fs, fd, NEG_S);
 }
 
 
 void Assembler::neg_d(FPURegister fd, FPURegister fs) {
-  DCHECK(!IsMipsArchVariant(kMips32r6));
   GenInstrRegister(COP1, D, f0, fs, fd, NEG_D);
 }
 
diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h
index 0e41671..e58abd8 100644
--- a/src/mips/assembler-mips.h
+++ b/src/mips/assembler-mips.h
@@ -878,7 +878,14 @@
   void sub_d(FPURegister fd, FPURegister fs, FPURegister ft);
   void mul_s(FPURegister fd, FPURegister fs, FPURegister ft);
   void mul_d(FPURegister fd, FPURegister fs, FPURegister ft);
+  void madd_s(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
   void madd_d(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
+  void msub_s(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
+  void msub_d(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
+  void maddf_s(FPURegister fd, FPURegister fs, FPURegister ft);
+  void maddf_d(FPURegister fd, FPURegister fs, FPURegister ft);
+  void msubf_s(FPURegister fd, FPURegister fs, FPURegister ft);
+  void msubf_d(FPURegister fd, FPURegister fs, FPURegister ft);
   void div_s(FPURegister fd, FPURegister fs, FPURegister ft);
   void div_d(FPURegister fd, FPURegister fs, FPURegister ft);
   void abs_s(FPURegister fd, FPURegister fs);
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index 844958e..43e6735 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -1782,7 +1782,6 @@
   // a2 : feedback vector
   // a3 : slot in feedback vector (Smi)
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_initialize_count, done_increment_count;
 
   DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
             masm->isolate()->heap()->megamorphic_symbol());
@@ -1801,7 +1800,7 @@
   Register feedback_map = t1;
   Register weak_value = t4;
   __ lw(weak_value, FieldMemOperand(t2, WeakCell::kValueOffset));
-  __ Branch(&done_increment_count, eq, a1, Operand(weak_value));
+  __ Branch(&done, eq, a1, Operand(weak_value));
   __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
   __ Branch(&done, eq, t2, Operand(at));
   __ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset));
@@ -1823,7 +1822,7 @@
   // Make sure the function is the Array() function
   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t2);
   __ Branch(&megamorphic, ne, a1, Operand(t2));
-  __ jmp(&done_increment_count);
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -1850,28 +1849,19 @@
   // slot.
   CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ Branch(&done_initialize_count);
+  __ Branch(&done);
 
   __ bind(&not_array_function);
   CreateWeakCellStub weak_cell_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
 
-  __ bind(&done_initialize_count);
-  // Initialize the call counter.
-  __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
-  __ li(t0, Operand(Smi::FromInt(1)));
-  __ Branch(USE_DELAY_SLOT, &done);
-  __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+  __ bind(&done);
 
-  __ bind(&done_increment_count);
-
-  // Increment the call count for monomorphic function calls.
+  // Increment the call count for all function calls.
   __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
   __ lw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
   __ Addu(t0, t0, Operand(Smi::FromInt(1)));
   __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-
-  __ bind(&done);
 }
 
 
@@ -1917,6 +1907,14 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot) {
+  __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize);
+  __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+  __ Addu(slot, slot, Operand(Smi::FromInt(1)));
+  __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // a1 - function
@@ -1929,10 +1927,7 @@
   __ li(a0, Operand(arg_count()));
 
   // Increment the call count for monomorphic function calls.
-  __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
-  __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-  __ Addu(a3, a3, Operand(Smi::FromInt(1)));
-  __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+  IncrementCallCount(masm, a2, a3);
 
   __ mov(a2, t0);
   __ mov(a3, a1);
@@ -1945,7 +1940,7 @@
   // a1 - function
   // a3 - slot id (Smi)
   // a2 - vector
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -1974,13 +1969,11 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(a1, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
-  __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-  __ Addu(a3, a3, Operand(Smi::FromInt(1)));
-  __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-
   __ bind(&call_function);
+
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, a2, a3);
+
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
           RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
@@ -2021,6 +2014,10 @@
   __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
 
   __ bind(&call);
+  IncrementCallCount(masm, a2, a3);
+
+  __ bind(&call_count_incremented);
+
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
           USE_DELAY_SLOT);
@@ -2046,11 +2043,6 @@
   __ lw(t1, NativeContextMemOperand());
   __ Branch(&miss, ne, t0, Operand(t1));
 
-  // Initialize the call counter.
-  __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
-  __ li(t0, Operand(Smi::FromInt(1)));
-  __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-
   // Store the function. Use a stub since we need a frame for allocation.
   // a2 - vector
   // a3 - slot
@@ -2058,9 +2050,11 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(masm->isolate());
+    __ Push(a2, a3);
     __ Push(cp, a1);
     __ CallStub(&create_stub);
     __ Pop(cp, a1);
+    __ Pop(a2, a3);
   }
 
   __ Branch(&call_function);
@@ -2070,7 +2064,7 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ Branch(&call);
+  __ Branch(&call_count_incremented);
 }
 
 
@@ -2275,293 +2269,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-  // Stack frame on entry.
-  //  ra: return address
-  //  sp[0]: to
-  //  sp[4]: from
-  //  sp[8]: string
-
-  // This stub is called from the native-call %_SubString(...), so
-  // nothing can be assumed about the arguments. It is tested that:
-  //  "string" is a sequential string,
-  //  both "from" and "to" are smis, and
-  //  0 <= from <= to <= string.length.
-  // If any of these assumptions fail, we call the runtime system.
-
-  const int kToOffset = 0 * kPointerSize;
-  const int kFromOffset = 1 * kPointerSize;
-  const int kStringOffset = 2 * kPointerSize;
-
-  __ lw(a2, MemOperand(sp, kToOffset));
-  __ lw(a3, MemOperand(sp, kFromOffset));
-  STATIC_ASSERT(kFromOffset == kToOffset + 4);
-  STATIC_ASSERT(kSmiTag == 0);
-  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
-
-  // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
-  // safe in this case.
-  __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
-  __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
-  // Both a2 and a3 are untagged integers.
-
-  __ Branch(&runtime, lt, a3, Operand(zero_reg));  // From < 0.
-
-  __ Branch(&runtime, gt, a3, Operand(a2));  // Fail if from > to.
-  __ Subu(a2, a2, a3);
-
-  // Make sure first argument is a string.
-  __ lw(v0, MemOperand(sp, kStringOffset));
-  __ JumpIfSmi(v0, &runtime);
-  __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
-  __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
-  __ And(t0, a1, Operand(kIsNotStringMask));
-
-  __ Branch(&runtime, ne, t0, Operand(zero_reg));
-
-  Label single_char;
-  __ Branch(&single_char, eq, a2, Operand(1));
-
-  // Short-cut for the case of trivial substring.
-  Label return_v0;
-  // v0: original string
-  // a2: result string length
-  __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
-  __ sra(t0, t0, 1);
-  // Return original string.
-  __ Branch(&return_v0, eq, a2, Operand(t0));
-  // Longer than original string's length or negative: unsafe arguments.
-  __ Branch(&runtime, hi, a2, Operand(t0));
-  // Shorter than original string's length: an actual substring.
-
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into t1.
-  // v0: original string
-  // a1: instance type
-  // a2: length
-  // a3: from index (untagged)
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ And(t0, a1, Operand(kIsIndirectStringMask));
-  __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg));
-  // t0 is used as a scratch register and can be overwritten in either case.
-  __ And(t0, a1, Operand(kSlicedNotConsMask));
-  __ Branch(&sliced_string, ne, t0, Operand(zero_reg));
-  // Cons string.  Check whether it is flat, then fetch first part.
-  __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
-  __ LoadRoot(t0, Heap::kempty_stringRootIndex);
-  __ Branch(&runtime, ne, t1, Operand(t0));
-  __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
-  // Update instance type.
-  __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
-  __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and correct start index by offset.
-  __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
-  __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
-  __ sra(t0, t0, 1);  // Add offset to index.
-  __ Addu(a3, a3, t0);
-  // Update instance type.
-  __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
-  __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the expected register.
-  __ mov(t1, v0);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // t1: underlying subject string
-    // a1: instance type of underlying subject string
-    // a2: length
-    // a3: adjusted start index (untagged)
-    // Short slice.  Copy instead of slicing.
-    __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ And(t0, a1, Operand(kStringEncodingMask));
-    __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
-    __ AllocateOneByteSlicedString(v0, a2, t2, t3, &runtime);
-    __ jmp(&set_slice_header);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
-    __ bind(&set_slice_header);
-    __ sll(a3, a3, 1);
-    __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
-    __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
-    __ jmp(&return_v0);
-
-    __ bind(&copy_routine);
-  }
-
-  // t1: underlying subject string
-  // a1: instance type of underlying subject string
-  // a2: length
-  // a3: adjusted start index (untagged)
-  Label two_byte_sequential, sequential_string, allocate_result;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ And(t0, a1, Operand(kExternalStringTag));
-  __ Branch(&sequential_string, eq, t0, Operand(zero_reg));
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ And(t0, a1, Operand(kShortExternalStringTag));
-  __ Branch(&runtime, ne, t0, Operand(zero_reg));
-  __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset));
-  // t1 already points to the first character of underlying string.
-  __ jmp(&allocate_result);
-
-  __ bind(&sequential_string);
-  // Locate first character of underlying subject string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ Addu(t1, t1, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&allocate_result);
-  // Sequential acii string.  Allocate the result.
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ And(t0, a1, Operand(kStringEncodingMask));
-  __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
-
-  // Allocate and copy the resulting ASCII string.
-  __ AllocateOneByteString(v0, a2, t0, t2, t3, &runtime);
-
-  // Locate first character of substring to copy.
-  __ Addu(t1, t1, a3);
-
-  // Locate first character of result.
-  __ Addu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  // v0: result string
-  // a1: first character of result string
-  // a2: result string length
-  // t1: first character of substring to copy
-  STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(
-      masm, a1, t1, a2, a3, String::ONE_BYTE_ENCODING);
-  __ jmp(&return_v0);
-
-  // Allocate and copy the resulting two-byte string.
-  __ bind(&two_byte_sequential);
-  __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
-
-  // Locate first character of substring to copy.
-  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
-  __ Lsa(t1, t1, a3, 1);
-  // Locate first character of result.
-  __ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  // v0: result string.
-  // a1: first character of result.
-  // a2: result length.
-  // t1: first character of substring to copy.
-  STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(
-      masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING);
-
-  __ bind(&return_v0);
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
-  __ DropAndRet(3);
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // v0: original string
-  // a1: instance type
-  // a2: length
-  // a3: from index (untagged)
-  __ SmiTag(a3, a3);
-  StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
-                                  RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ DropAndRet(3);
-  generator.SkipSlow(masm, &runtime);
-}
-
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes on argument in a0.
-  Label is_number;
-  __ JumpIfSmi(a0, &is_number);
-
-  Label not_string;
-  __ GetObjectType(a0, a1, a1);
-  // a0: receiver
-  // a1: receiver instance type
-  __ Branch(&not_string, ge, a1, Operand(FIRST_NONSTRING_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_string);
-
-  Label not_heap_number;
-  __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
-  __ bind(&not_oddball);
-
-  __ push(a0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes on argument in a0.
-  Label is_number;
-  __ JumpIfSmi(a0, &is_number);
-
-  Label not_name;
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ GetObjectType(a0, a1, a1);
-  // a0: receiver
-  // a1: receiver instance type
-  __ Branch(&not_name, gt, a1, Operand(LAST_NAME_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_name);
-
-  Label not_heap_number;
-  __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
-  __ bind(&not_oddball);
-
-  __ push(a0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(
     MacroAssembler* masm, Register left, Register right, Register scratch1,
     Register scratch2, Register scratch3) {
@@ -3915,7 +3622,7 @@
   __ lw(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
 
   // Load the map into the correct register.
-  DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+  DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
   __ mov(feedback, too_far);
 
   __ Addu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4624,7 +4331,7 @@
     Label too_big_for_new_space;
     __ bind(&allocate);
     __ Branch(&too_big_for_new_space, gt, t0,
-              Operand(Page::kMaxRegularHeapObjectSize));
+              Operand(kMaxRegularHeapObjectSize));
     {
       FrameScope scope(masm, StackFrame::INTERNAL);
       __ SmiTag(t0);
@@ -4968,8 +4675,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ Branch(&too_big_for_new_space, gt, t0,
-            Operand(Page::kMaxRegularHeapObjectSize));
+  __ Branch(&too_big_for_new_space, gt, t0, Operand(kMaxRegularHeapObjectSize));
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     __ SmiTag(t0);
diff --git a/src/mips/constants-mips.cc b/src/mips/constants-mips.cc
index 3afb881..ad97e41 100644
--- a/src/mips/constants-mips.cc
+++ b/src/mips/constants-mips.cc
@@ -123,116 +123,6 @@
 }
 
 
-// -----------------------------------------------------------------------------
-// Instructions.
-
-bool Instruction::IsForbiddenAfterBranchInstr(Instr instr) {
-  Opcode opcode = static_cast<Opcode>(instr & kOpcodeMask);
-  switch (opcode) {
-    case J:
-    case JAL:
-    case BEQ:
-    case BNE:
-    case BLEZ:  // POP06 bgeuc/bleuc, blezalc, bgezalc
-    case BGTZ:  // POP07 bltuc/bgtuc, bgtzalc, bltzalc
-    case BEQL:
-    case BNEL:
-    case BLEZL:  // POP26 bgezc, blezc, bgec/blec
-    case BGTZL:  // POP27 bgtzc, bltzc, bltc/bgtc
-    case BC:
-    case BALC:
-    case POP10:  // beqzalc, bovc, beqc
-    case POP30:  // bnezalc, bnvc, bnec
-    case POP66:  // beqzc, jic
-    case POP76:  // bnezc, jialc
-      return true;
-    case REGIMM:
-      switch (instr & kRtFieldMask) {
-        case BLTZ:
-        case BGEZ:
-        case BLTZAL:
-        case BGEZAL:
-          return true;
-        default:
-          return false;
-      }
-      break;
-    case SPECIAL:
-      switch (instr & kFunctionFieldMask) {
-        case JR:
-        case JALR:
-          return true;
-        default:
-          return false;
-      }
-      break;
-    case COP1:
-      switch (instr & kRsFieldMask) {
-        case BC1:
-        case BC1EQZ:
-        case BC1NEZ:
-          return true;
-          break;
-        default:
-          return false;
-      }
-      break;
-    default:
-      return false;
-  }
-}
-
-
-bool Instruction::IsLinkingInstruction() const {
-  switch (OpcodeFieldRaw()) {
-    case JAL:
-      return true;
-    case POP76:
-      if (RsFieldRawNoAssert() == JIALC)
-        return true;  // JIALC
-      else
-        return false;  // BNEZC
-    case REGIMM:
-      switch (RtFieldRaw()) {
-        case BGEZAL:
-        case BLTZAL:
-          return true;
-      default:
-        return false;
-      }
-    case SPECIAL:
-      switch (FunctionFieldRaw()) {
-        case JALR:
-          return true;
-        default:
-          return false;
-      }
-    default:
-      return false;
-  }
-}
-
-
-bool Instruction::IsTrap() const {
-  if (OpcodeFieldRaw() != SPECIAL) {
-    return false;
-  } else {
-    switch (FunctionFieldRaw()) {
-      case BREAK:
-      case TGE:
-      case TGEU:
-      case TLT:
-      case TLTU:
-      case TEQ:
-      case TNE:
-        return true;
-      default:
-        return false;
-    }
-  }
-}
-
-
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/mips/constants-mips.h b/src/mips/constants-mips.h
index 8301c5e..200939d 100644
--- a/src/mips/constants-mips.h
+++ b/src/mips/constants-mips.h
@@ -525,6 +525,8 @@
   FLOOR_W_S = ((1U << 3) + 7),
   RECIP_S = ((2U << 3) + 5),
   RSQRT_S = ((2U << 3) + 6),
+  MADDF_S = ((3U << 3) + 0),
+  MSUBF_S = ((3U << 3) + 1),
   CLASS_S = ((3U << 3) + 3),
   CVT_D_S = ((4U << 3) + 1),
   CVT_W_S = ((4U << 3) + 4),
@@ -550,6 +552,8 @@
   FLOOR_W_D = ((1U << 3) + 7),
   RECIP_D = ((2U << 3) + 5),
   RSQRT_D = ((2U << 3) + 6),
+  MADDF_D = ((3U << 3) + 0),
+  MSUBF_D = ((3U << 3) + 1),
   CLASS_D = ((3U << 3) + 3),
   MIN = ((3U << 3) + 4),
   MINA = ((3U << 3) + 5),
@@ -616,8 +620,12 @@
   MOVF = ((2U << 3) + 1),      // Function field for MOVT.fmt and MOVF.fmt
   SELNEZ_C = ((2U << 3) + 7),  // COP1 on FPR registers.
   // COP1 Encoding of Function Field When rs=PS.
+
   // COP1X Encoding of Function Field.
+  MADD_S = ((4U << 3) + 0),
   MADD_D = ((4U << 3) + 1),
+  MSUB_S = ((5U << 3) + 0),
+  MSUB_D = ((5U << 3) + 1),
 
   // PCREL Encoding of rt Field.
   ADDIUPC = ((0U << 2) + 0),
@@ -858,8 +866,7 @@
   return 1ULL << (static_cast<uint32_t>(opcode) >> kOpcodeShift);
 }
 
-
-class Instruction {
+class InstructionBase {
  public:
   enum {
     kInstrSize = 4,
@@ -869,6 +876,9 @@
     kPCReadOffset = 0
   };
 
+  // Instruction type.
+  enum Type { kRegisterType, kImmediateType, kJumpType, kUnsupported = -1 };
+
   // Get the raw instruction bits.
   inline Instr InstructionBits() const {
     return *reinterpret_cast<const Instr*>(this);
@@ -889,16 +899,6 @@
     return (InstructionBits() >> lo) & ((2U << (hi - lo)) - 1);
   }
 
-  // Instruction type.
-  enum Type {
-    kRegisterType,
-    kImmediateType,
-    kJumpType,
-    kUnsupported = -1
-  };
-
-  enum TypeChecks { NORMAL, EXTRA };
-
 
   static constexpr uint64_t kOpcodeImmediateTypeMask =
       OpcodeToBitNumber(REGIMM) | OpcodeToBitNumber(BEQ) |
@@ -943,82 +943,14 @@
       FunctionFieldToBitNumber(MOVCI) | FunctionFieldToBitNumber(SELEQZ_S) |
       FunctionFieldToBitNumber(SELNEZ_S) | FunctionFieldToBitNumber(SYNC);
 
-  // Get the encoding type of the instruction.
-  inline Type InstructionType(TypeChecks checks = NORMAL) const;
-
   // Accessors for the different named fields used in the MIPS encoding.
   inline Opcode OpcodeValue() const {
     return static_cast<Opcode>(
         Bits(kOpcodeShift + kOpcodeBits - 1, kOpcodeShift));
   }
 
-  inline int RsValue() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return Bits(kRsShift + kRsBits - 1, kRsShift);
-  }
-
-  inline int RtValue() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return Bits(kRtShift + kRtBits - 1, kRtShift);
-  }
-
-  inline int RdValue() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kRdShift + kRdBits - 1, kRdShift);
-  }
-
-  inline int SaValue() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kSaShift + kSaBits - 1, kSaShift);
-  }
-
-  inline int LsaSaValue() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kSaShift + kLsaSaBits - 1, kSaShift);
-  }
-
-  inline int FunctionValue() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return Bits(kFunctionShift + kFunctionBits - 1, kFunctionShift);
-  }
-
-  inline int FdValue() const {
-    return Bits(kFdShift + kFdBits - 1, kFdShift);
-  }
-
-  inline int FsValue() const {
-    return Bits(kFsShift + kFsBits - 1, kFsShift);
-  }
-
-  inline int FtValue() const {
-    return Bits(kFtShift + kFtBits - 1, kFtShift);
-  }
-
-  inline int FrValue() const {
-    return Bits(kFrShift + kFrBits -1, kFrShift);
-  }
-
-  inline int Bp2Value() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kBp2Shift + kBp2Bits - 1, kBp2Shift);
-  }
-
-  // Float Compare condition code instruction bits.
-  inline int FCccValue() const {
-    return Bits(kFCccShift + kFCccBits - 1, kFCccShift);
-  }
-
-  // Float Branch condition code instruction bits.
-  inline int FBccValue() const {
-    return Bits(kFBccShift + kFBccBits - 1, kFBccShift);
-  }
-
-  // Float Branch true/false instruction bit.
-  inline int FBtrueValue() const {
-    return Bits(kFBtrueShift + kFBtrueBits - 1, kFBtrueShift);
+  inline int FunctionFieldRaw() const {
+    return InstructionBits() & kFunctionFieldMask;
   }
 
   // Return the fields at their original place in the instruction encoding.
@@ -1026,39 +958,125 @@
     return static_cast<Opcode>(InstructionBits() & kOpcodeMask);
   }
 
-  inline int RsFieldRaw() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return InstructionBits() & kRsFieldMask;
-  }
-
-  // Same as above function, but safe to call within InstructionType().
+  // Safe to call within InstructionType().
   inline int RsFieldRawNoAssert() const {
     return InstructionBits() & kRsFieldMask;
   }
 
+  inline int SaFieldRaw() const { return InstructionBits() & kSaFieldMask; }
+
+  // Get the encoding type of the instruction.
+  inline Type InstructionType() const;
+
+ protected:
+  InstructionBase() {}
+};
+
+template <class T>
+class InstructionGetters : public T {
+ public:
+  inline int RsValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return InstructionBase::Bits(kRsShift + kRsBits - 1, kRsShift);
+  }
+
+  inline int RtValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kRtShift + kRtBits - 1, kRtShift);
+  }
+
+  inline int RdValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kRdShift + kRdBits - 1, kRdShift);
+  }
+
+  inline int SaValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kSaShift + kSaBits - 1, kSaShift);
+  }
+
+  inline int LsaSaValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kSaShift + kLsaSaBits - 1, kSaShift);
+  }
+
+  inline int FunctionValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kFunctionShift + kFunctionBits - 1, kFunctionShift);
+  }
+
+  inline int FdValue() const {
+    return this->Bits(kFdShift + kFdBits - 1, kFdShift);
+  }
+
+  inline int FsValue() const {
+    return this->Bits(kFsShift + kFsBits - 1, kFsShift);
+  }
+
+  inline int FtValue() const {
+    return this->Bits(kFtShift + kFtBits - 1, kFtShift);
+  }
+
+  inline int FrValue() const {
+    return this->Bits(kFrShift + kFrBits - 1, kFrShift);
+  }
+
+  inline int Bp2Value() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kBp2Shift + kBp2Bits - 1, kBp2Shift);
+  }
+
+  // Float Compare condition code instruction bits.
+  inline int FCccValue() const {
+    return this->Bits(kFCccShift + kFCccBits - 1, kFCccShift);
+  }
+
+  // Float Branch condition code instruction bits.
+  inline int FBccValue() const {
+    return this->Bits(kFBccShift + kFBccBits - 1, kFBccShift);
+  }
+
+  // Float Branch true/false instruction bit.
+  inline int FBtrueValue() const {
+    return this->Bits(kFBtrueShift + kFBtrueBits - 1, kFBtrueShift);
+  }
+
+  // Return the fields at their original place in the instruction encoding.
+  inline Opcode OpcodeFieldRaw() const {
+    return static_cast<Opcode>(this->InstructionBits() & kOpcodeMask);
+  }
+
+  inline int RsFieldRaw() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->InstructionBits() & kRsFieldMask;
+  }
+
   inline int RtFieldRaw() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return InstructionBits() & kRtFieldMask;
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->InstructionBits() & kRtFieldMask;
   }
 
   inline int RdFieldRaw() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return InstructionBits() & kRdFieldMask;
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->InstructionBits() & kRdFieldMask;
   }
 
   inline int SaFieldRaw() const {
-    return InstructionBits() & kSaFieldMask;
+    return this->InstructionBits() & kSaFieldMask;
   }
 
   inline int FunctionFieldRaw() const {
-    return InstructionBits() & kFunctionFieldMask;
+    return this->InstructionBits() & kFunctionFieldMask;
   }
 
   // Get the secondary field according to the opcode.
   inline int SecondaryValue() const {
-    Opcode op = OpcodeFieldRaw();
+    Opcode op = this->OpcodeFieldRaw();
     switch (op) {
       case SPECIAL:
       case SPECIAL2:
@@ -1073,34 +1091,34 @@
   }
 
   inline int32_t ImmValue(int bits) const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(bits - 1, 0);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(bits - 1, 0);
   }
 
   inline int32_t Imm16Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm16Shift + kImm16Bits - 1, kImm16Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm16Shift + kImm16Bits - 1, kImm16Shift);
   }
 
   inline int32_t Imm18Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm18Shift + kImm18Bits - 1, kImm18Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm18Shift + kImm18Bits - 1, kImm18Shift);
   }
 
   inline int32_t Imm19Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm19Shift + kImm19Bits - 1, kImm19Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm19Shift + kImm19Bits - 1, kImm19Shift);
   }
 
   inline int32_t Imm21Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm21Shift + kImm21Bits - 1, kImm21Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm21Shift + kImm21Bits - 1, kImm21Shift);
   }
 
   inline int32_t Imm26Value() const {
-    DCHECK((InstructionType() == kJumpType) ||
-           (InstructionType() == kImmediateType));
-    return Bits(kImm26Shift + kImm26Bits - 1, kImm26Shift);
+    DCHECK((this->InstructionType() == InstructionBase::kJumpType) ||
+           (this->InstructionType() == InstructionBase::kImmediateType));
+    return this->Bits(kImm26Shift + kImm26Bits - 1, kImm26Shift);
   }
 
   static bool IsForbiddenAfterBranchInstr(Instr instr);
@@ -1108,7 +1126,7 @@
   // Say if the instruction should not be used in a branch delay slot or
   // immediately after a compact branch.
   inline bool IsForbiddenAfterBranch() const {
-    return IsForbiddenAfterBranchInstr(InstructionBits());
+    return IsForbiddenAfterBranchInstr(this->InstructionBits());
   }
 
   inline bool IsForbiddenInBranchDelay() const {
@@ -1119,7 +1137,10 @@
   bool IsLinkingInstruction() const;
   // Say if the instruction is a break or a trap.
   bool IsTrap() const;
+};
 
+class Instruction : public InstructionGetters<InstructionBase> {
+ public:
   // Instructions are read of out a code stream. The only way to get a
   // reference to an instruction is to convert a pointer. There is no way
   // to allocate or create instances of class Instruction.
@@ -1148,26 +1169,14 @@
 
 const int kBranchReturnOffset = 2 * Instruction::kInstrSize;
 
-
-Instruction::Type Instruction::InstructionType(TypeChecks checks) const {
-  if (checks == EXTRA) {
-    if (OpcodeToBitNumber(OpcodeFieldRaw()) & kOpcodeImmediateTypeMask) {
-      return kImmediateType;
-    }
-  }
+InstructionBase::Type InstructionBase::InstructionType() const {
   switch (OpcodeFieldRaw()) {
     case SPECIAL:
-      if (checks == EXTRA) {
-        if (FunctionFieldToBitNumber(FunctionFieldRaw()) &
-            kFunctionFieldRegisterTypeMask) {
-          return kRegisterType;
-        } else {
-          return kUnsupported;
-        }
-      } else {
+      if (FunctionFieldToBitNumber(FunctionFieldRaw()) &
+          kFunctionFieldRegisterTypeMask) {
         return kRegisterType;
       }
-      break;
+      return kUnsupported;
     case SPECIAL2:
       switch (FunctionFieldRaw()) {
         case MUL:
@@ -1222,16 +1231,124 @@
       return kJumpType;
 
     default:
-      if (checks == NORMAL) {
         return kImmediateType;
-      } else {
-        return kUnsupported;
-      }
   }
 }
 
 #undef OpcodeToBitNumber
 #undef FunctionFieldToBitNumber
+
+// -----------------------------------------------------------------------------
+// Instructions.
+
+template <class P>
+bool InstructionGetters<P>::IsLinkingInstruction() const {
+  uint32_t op = this->OpcodeFieldRaw();
+  switch (op) {
+    case JAL:
+      return true;
+    case POP76:
+      if (this->RsFieldRawNoAssert() == JIALC)
+        return true;  // JIALC
+      else
+        return false;  // BNEZC
+    case REGIMM:
+      switch (this->RtFieldRaw()) {
+        case BGEZAL:
+        case BLTZAL:
+          return true;
+        default:
+          return false;
+      }
+    case SPECIAL:
+      switch (this->FunctionFieldRaw()) {
+        case JALR:
+          return true;
+        default:
+          return false;
+      }
+    default:
+      return false;
+  }
+}
+
+template <class P>
+bool InstructionGetters<P>::IsTrap() const {
+  if (this->OpcodeFieldRaw() != SPECIAL) {
+    return false;
+  } else {
+    switch (this->FunctionFieldRaw()) {
+      case BREAK:
+      case TGE:
+      case TGEU:
+      case TLT:
+      case TLTU:
+      case TEQ:
+      case TNE:
+        return true;
+      default:
+        return false;
+    }
+  }
+}
+
+// static
+template <class T>
+bool InstructionGetters<T>::IsForbiddenAfterBranchInstr(Instr instr) {
+  Opcode opcode = static_cast<Opcode>(instr & kOpcodeMask);
+  switch (opcode) {
+    case J:
+    case JAL:
+    case BEQ:
+    case BNE:
+    case BLEZ:  // POP06 bgeuc/bleuc, blezalc, bgezalc
+    case BGTZ:  // POP07 bltuc/bgtuc, bgtzalc, bltzalc
+    case BEQL:
+    case BNEL:
+    case BLEZL:  // POP26 bgezc, blezc, bgec/blec
+    case BGTZL:  // POP27 bgtzc, bltzc, bltc/bgtc
+    case BC:
+    case BALC:
+    case POP10:  // beqzalc, bovc, beqc
+    case POP30:  // bnezalc, bnvc, bnec
+    case POP66:  // beqzc, jic
+    case POP76:  // bnezc, jialc
+      return true;
+    case REGIMM:
+      switch (instr & kRtFieldMask) {
+        case BLTZ:
+        case BGEZ:
+        case BLTZAL:
+        case BGEZAL:
+          return true;
+        default:
+          return false;
+      }
+      break;
+    case SPECIAL:
+      switch (instr & kFunctionFieldMask) {
+        case JR:
+        case JALR:
+          return true;
+        default:
+          return false;
+      }
+      break;
+    case COP1:
+      switch (instr & kRsFieldMask) {
+        case BC1:
+        case BC1EQZ:
+        case BC1NEZ:
+          return true;
+          break;
+        default:
+          return false;
+      }
+      break;
+    default:
+      return false;
+  }
+}
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/mips/disasm-mips.cc b/src/mips/disasm-mips.cc
index bd07874..f541e91 100644
--- a/src/mips/disasm-mips.cc
+++ b/src/mips/disasm-mips.cc
@@ -918,6 +918,12 @@
       case CVT_D_S:
         Format(instr, "cvt.d.'t 'fd, 'fs");
         break;
+      case MADDF_S:
+        Format(instr, "maddf.s  'fd, 'fs, 'ft");
+        break;
+      case MSUBF_S:
+        Format(instr, "msubf.s  'fd, 'fs, 'ft");
+        break;
       default:
         Format(instr, "unknown.cop1.'t");
         break;
@@ -928,7 +934,17 @@
 
 void Decoder::DecodeTypeRegisterDRsType(Instruction* instr) {
   if (!DecodeTypeRegisterRsType(instr)) {
-    Format(instr, "unknown.cop1.'t");
+    switch (instr->FunctionFieldRaw()) {
+      case MADDF_D:
+        Format(instr, "maddf.d  'fd, 'fs, 'ft");
+        break;
+      case MSUBF_D:
+        Format(instr, "msubf.d  'fd, 'fs, 'ft");
+        break;
+      default:
+        Format(instr, "unknown.cop1.'t");
+        break;
+    }
   }
 }
 
@@ -1360,9 +1376,18 @@
       break;
     case COP1X:
       switch (instr->FunctionFieldRaw()) {
+        case MADD_S:
+          Format(instr, "madd.s  'fd, 'fr, 'fs, 'ft");
+          break;
         case MADD_D:
           Format(instr, "madd.d  'fd, 'fr, 'fs, 'ft");
           break;
+        case MSUB_S:
+          Format(instr, "msub.s  'fd, 'fr, 'fs, 'ft");
+          break;
+        case MSUB_D:
+          Format(instr, "msub.d  'fd, 'fr, 'fs, 'ft");
+          break;
         default:
           UNREACHABLE();
       }
@@ -1687,7 +1712,7 @@
   out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
                                    "%08x       ",
                                    instr->InstructionBits());
-  switch (instr->InstructionType(Instruction::EXTRA)) {
+  switch (instr->InstructionType()) {
     case Instruction::kRegisterType: {
       DecodeTypeRegister(instr);
       break;
diff --git a/src/mips/interface-descriptors-mips.cc b/src/mips/interface-descriptors-mips.cc
index bafe0b6..aed4142 100644
--- a/src/mips/interface-descriptors-mips.cc
+++ b/src/mips/interface-descriptors-mips.cc
@@ -40,13 +40,9 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return a3; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() { return t0; }
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return a3; }
-const Register VectorStoreTransitionDescriptor::MapRegister() { return t1; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return a3; }
-
+const Register StoreTransitionDescriptor::SlotRegister() { return t0; }
+const Register StoreTransitionDescriptor::VectorRegister() { return a3; }
+const Register StoreTransitionDescriptor::MapRegister() { return t1; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }
@@ -357,7 +353,7 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {
       a0,  // callee
@@ -392,7 +388,19 @@
       a0,  // argument count (not including receiver)
       a3,  // new target
       a1,  // constructor to call
-      a2   // address of the first argument
+      a2,  // allocation site feedback if available, undefined otherwise.
+      t4   // address of the first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      a0,  // argument count (not including receiver)
+      a1,  // the target to call verified to be Array function
+      a2,  // allocation site feedback
+      a3,  // address of first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index dba1fae..d61717d 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -184,9 +184,7 @@
                                 Condition cc,
                                 Label* branch) {
   DCHECK(cc == eq || cc == ne);
-  const int mask =
-      1 << MemoryChunk::IN_FROM_SPACE | 1 << MemoryChunk::IN_TO_SPACE;
-  CheckPageFlag(object, scratch, mask, cc, branch);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc, branch);
 }
 
 
@@ -1126,8 +1124,13 @@
   if (rt.is_reg()) {
     sltu(rd, rs, rt.rm());
   } else {
-    if (is_uint16(rt.imm32_) && !MustUseReg(rt.rmode_)) {
+    const uint32_t int16_min = std::numeric_limits<int16_t>::min();
+    if (is_uint15(rt.imm32_) && !MustUseReg(rt.rmode_)) {
+      // Imm range is: [0, 32767].
       sltiu(rd, rs, rt.imm32_);
+    } else if (is_uint15(rt.imm32_ - int16_min) && !MustUseReg(rt.rmode_)) {
+      // Imm range is: [max_unsigned-32767,max_unsigned].
+      sltiu(rd, rs, static_cast<uint16_t>(rt.imm32_));
     } else {
       // li handles the relocation.
       DCHECK(!rs.is(at));
@@ -1915,9 +1918,12 @@
 }
 
 void MacroAssembler::Neg_s(FPURegister fd, FPURegister fs) {
-  Register scratch1 = t8;
-  Register scratch2 = t9;
-  if (IsMipsArchVariant(kMips32r2)) {
+  if (IsMipsArchVariant(kMips32r6)) {
+    // r6 neg_s changes the sign for NaN-like operands as well.
+    neg_s(fd, fs);
+  } else {
+    DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
+           IsMipsArchVariant(kLoongson));
     Label is_nan, done;
     Register scratch1 = t8;
     Register scratch2 = t9;
@@ -1926,7 +1932,6 @@
     // For NaN input, neg_s will return the same NaN value,
     // while the sign has to be changed separately.
     neg_s(fd, fs);  // In delay slot.
-
     bind(&is_nan);
     mfc1(scratch1, fs);
     And(scratch2, scratch1, Operand(~kBinary32SignMask));
@@ -1935,27 +1940,24 @@
     Or(scratch2, scratch2, scratch1);
     mtc1(scratch2, fd);
     bind(&done);
-  } else {
-    mfc1(scratch1, fs);
-    And(scratch2, scratch1, Operand(~kBinary32SignMask));
-    And(scratch1, scratch1, Operand(kBinary32SignMask));
-    Xor(scratch1, scratch1, Operand(kBinary32SignMask));
-    Or(scratch2, scratch2, scratch1);
-    mtc1(scratch2, fd);
   }
 }
 
 void MacroAssembler::Neg_d(FPURegister fd, FPURegister fs) {
-  Register scratch1 = t8;
-  Register scratch2 = t9;
-  if (IsMipsArchVariant(kMips32r2)) {
+  if (IsMipsArchVariant(kMips32r6)) {
+    // r6 neg_d changes the sign for NaN-like operands as well.
+    neg_d(fd, fs);
+  } else {
+    DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r1) ||
+           IsMipsArchVariant(kLoongson));
     Label is_nan, done;
+    Register scratch1 = t8;
+    Register scratch2 = t9;
     BranchF64(nullptr, &is_nan, eq, fs, fs);
     Branch(USE_DELAY_SLOT, &done);
     // For NaN input, neg_d will return the same NaN value,
     // while the sign has to be changed separately.
     neg_d(fd, fs);  // In delay slot.
-
     bind(&is_nan);
     Mfhc1(scratch1, fs);
     And(scratch2, scratch1, Operand(~HeapNumber::kSignMask));
@@ -1964,14 +1966,6 @@
     Or(scratch2, scratch2, scratch1);
     Mthc1(scratch2, fd);
     bind(&done);
-  } else {
-    Move_d(fd, fs);
-    Mfhc1(scratch1, fs);
-    And(scratch2, scratch1, Operand(~HeapNumber::kSignMask));
-    And(scratch1, scratch1, Operand(HeapNumber::kSignMask));
-    Xor(scratch1, scratch1, Operand(HeapNumber::kSignMask));
-    Or(scratch2, scratch2, scratch1);
-    Mthc1(scratch2, fd);
   }
 }
 
@@ -2170,7 +2164,7 @@
     // Check for unordered (NaN) cases.
     if (nan) {
       bool long_branch =
-          nan->is_bound() ? is_near(nan) : is_trampoline_emitted();
+          nan->is_bound() ? !is_near(nan) : is_trampoline_emitted();
       if (!IsMipsArchVariant(kMips32r6)) {
         if (long_branch) {
           Label skip;
@@ -2209,7 +2203,7 @@
 
     if (target) {
       bool long_branch =
-          target->is_bound() ? is_near(target) : is_trampoline_emitted();
+          target->is_bound() ? !is_near(target) : is_trampoline_emitted();
       if (long_branch) {
         Label skip;
         Condition neg_cond = NegateFpuCondition(cond);
@@ -4220,7 +4214,7 @@
                               Register scratch2,
                               Label* gc_required,
                               AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
@@ -4402,7 +4396,7 @@
 void MacroAssembler::FastAllocate(int object_size, Register result,
                                   Register scratch1, Register scratch2,
                                   AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK(!AreAliased(result, scratch1, scratch2, t9, at));
 
   // Make object size into bytes.
diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h
index aa5b0f9..4024e52 100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -215,6 +215,18 @@
                            Func GetLabelFunction);
 #undef COND_ARGS
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp.
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 0) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    UNIMPLEMENTED();
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the sp register.
   void Drop(int count,
diff --git a/src/mips/simulator-mips.cc b/src/mips/simulator-mips.cc
index 59dc300..bd42399 100644
--- a/src/mips/simulator-mips.cc
+++ b/src/mips/simulator-mips.cc
@@ -808,8 +808,8 @@
   last_debugger_input_ = input;
 }
 
-void Simulator::FlushICache(base::HashMap* i_cache, void* start_addr,
-                            size_t size) {
+void Simulator::FlushICache(base::CustomMatcherHashMap* i_cache,
+                            void* start_addr, size_t size) {
   intptr_t start = reinterpret_cast<intptr_t>(start_addr);
   int intra_line = (start & CachePage::kLineMask);
   start -= intra_line;
@@ -829,8 +829,10 @@
   }
 }
 
-CachePage* Simulator::GetCachePage(base::HashMap* i_cache, void* page) {
-  base::HashMap::Entry* entry = i_cache->LookupOrInsert(page, ICacheHash(page));
+CachePage* Simulator::GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                   void* page) {
+  base::CustomMatcherHashMap::Entry* entry =
+      i_cache->LookupOrInsert(page, ICacheHash(page));
   if (entry->value == NULL) {
     CachePage* new_page = new CachePage();
     entry->value = new_page;
@@ -840,7 +842,8 @@
 
 
 // Flush from start up to and not including start + size.
-void Simulator::FlushOnePage(base::HashMap* i_cache, intptr_t start, int size) {
+void Simulator::FlushOnePage(base::CustomMatcherHashMap* i_cache,
+                             intptr_t start, int size) {
   DCHECK(size <= CachePage::kPageSize);
   DCHECK(AllOnOnePage(start, size - 1));
   DCHECK((start & CachePage::kLineMask) == 0);
@@ -852,7 +855,8 @@
   memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
 }
 
-void Simulator::CheckICache(base::HashMap* i_cache, Instruction* instr) {
+void Simulator::CheckICache(base::CustomMatcherHashMap* i_cache,
+                            Instruction* instr) {
   intptr_t address = reinterpret_cast<intptr_t>(instr);
   void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
   void* line = reinterpret_cast<void*>(address & (~CachePage::kLineMask));
@@ -885,7 +889,7 @@
 Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
   i_cache_ = isolate_->simulator_i_cache();
   if (i_cache_ == NULL) {
-    i_cache_ = new base::HashMap(&ICacheMatch);
+    i_cache_ = new base::CustomMatcherHashMap(&ICacheMatch);
     isolate_->set_simulator_i_cache(i_cache_);
   }
   Initialize(isolate);
@@ -997,11 +1001,12 @@
 
 
 // static
-void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
+void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
+                         Redirection* first) {
   Redirection::DeleteChain(first);
   if (i_cache != nullptr) {
-    for (base::HashMap::Entry* entry = i_cache->Start(); entry != nullptr;
-         entry = i_cache->Next(entry)) {
+    for (base::CustomMatcherHashMap::Entry* entry = i_cache->Start();
+         entry != nullptr; entry = i_cache->Next(entry)) {
       delete static_cast<CachePage*>(entry->value);
     }
     delete i_cache;
@@ -1929,16 +1934,16 @@
 
 // Software interrupt instructions are used by the simulator to call into the
 // C-based V8 runtime. They are also used for debugging with simulator.
-void Simulator::SoftwareInterrupt(Instruction* instr) {
+void Simulator::SoftwareInterrupt() {
   // There are several instructions that could get us here,
   // the break_ instruction, or several variants of traps. All
   // Are "SPECIAL" class opcode, and are distinuished by function.
-  int32_t func = instr->FunctionFieldRaw();
-  uint32_t code = (func == BREAK) ? instr->Bits(25, 6) : -1;
+  int32_t func = instr_.FunctionFieldRaw();
+  uint32_t code = (func == BREAK) ? instr_.Bits(25, 6) : -1;
 
   // We first check if we met a call_rt_redirected.
-  if (instr->InstructionBits() == rtCallRedirInstr) {
-    Redirection* redirection = Redirection::FromSwiInstruction(instr);
+  if (instr_.InstructionBits() == rtCallRedirInstr) {
+    Redirection* redirection = Redirection::FromSwiInstruction(instr_.instr());
     int32_t arg0 = get_register(a0);
     int32_t arg1 = get_register(a1);
     int32_t arg2 = get_register(a2);
@@ -2173,7 +2178,7 @@
       PrintWatchpoint(code);
     } else {
       IncreaseStopCounter(code);
-      HandleStop(code, instr);
+      HandleStop(code, instr_.instr());
     }
   } else {
     // All remaining break_ codes, and all traps are handled here.
@@ -2366,6 +2371,49 @@
   return result;
 }
 
+enum class KeepSign : bool { no = false, yes };
+
+template <typename T, typename std::enable_if<std::is_floating_point<T>::value,
+                                              int>::type = 0>
+T FPUCanonalizeNaNArg(T result, T arg, KeepSign keepSign = KeepSign::no) {
+  DCHECK(std::isnan(arg));
+  T qNaN = std::numeric_limits<T>::quiet_NaN();
+  if (keepSign == KeepSign::yes) {
+    return std::copysign(qNaN, result);
+  }
+  return qNaN;
+}
+
+template <typename T>
+T FPUCanonalizeNaNArgs(T result, KeepSign keepSign, T first) {
+  if (std::isnan(first)) {
+    return FPUCanonalizeNaNArg(result, first, keepSign);
+  }
+  return result;
+}
+
+template <typename T, typename... Args>
+T FPUCanonalizeNaNArgs(T result, KeepSign keepSign, T first, Args... args) {
+  if (std::isnan(first)) {
+    return FPUCanonalizeNaNArg(result, first, keepSign);
+  }
+  return FPUCanonalizeNaNArgs(result, keepSign, args...);
+}
+
+template <typename Func, typename T, typename... Args>
+T FPUCanonalizeOperation(Func f, T first, Args... args) {
+  return FPUCanonalizeOperation(f, KeepSign::no, first, args...);
+}
+
+template <typename Func, typename T, typename... Args>
+T FPUCanonalizeOperation(Func f, KeepSign keepSign, T first, Args... args) {
+  T result = f(first, args...);
+  if (std::isnan(result)) {
+    result = FPUCanonalizeNaNArgs(result, keepSign, first, args...);
+  }
+  return result;
+}
+
 // Handle execution based on instruction types.
 
 void Simulator::DecodeTypeRegisterDRsType() {
@@ -2373,15 +2421,14 @@
   uint32_t cc, fcsr_cc;
   int64_t i64;
   fs = get_fpu_register_double(fs_reg());
-  ft = (get_instr()->FunctionFieldRaw() != MOVF)
-           ? get_fpu_register_double(ft_reg())
-           : 0.0;
+  ft = (instr_.FunctionFieldRaw() != MOVF) ? get_fpu_register_double(ft_reg())
+                                           : 0.0;
   fd = get_fpu_register_double(fd_reg());
   int64_t ft_int = bit_cast<int64_t>(ft);
   int64_t fd_int = bit_cast<int64_t>(fd);
-  cc = get_instr()->FCccValue();
+  cc = instr_.FCccValue();
   fcsr_cc = get_fcsr_condition_bit(cc);
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case RINT: {
       DCHECK(IsMipsArchVariant(kMips32r6));
       double result, temp, temp_result;
@@ -2440,7 +2487,7 @@
     }
     case MOVN_C: {
       DCHECK(IsMipsArchVariant(kMips32r2));
-      int32_t rt_reg = get_instr()->RtValue();
+      int32_t rt_reg = instr_.RtValue();
       int32_t rt = get_register(rt_reg);
       if (rt != 0) {
         set_fpu_register_double(fd_reg(), fs);
@@ -2451,7 +2498,7 @@
       // Same function field for MOVT.D and MOVF.D
       uint32_t ft_cc = (ft_reg() >> 2) & 0x7;
       ft_cc = get_fcsr_condition_bit(ft_cc);
-      if (get_instr()->Bit(16)) {  // Read Tf bit.
+      if (instr_.Bit(16)) {  // Read Tf bit.
         // MOVT.D
         if (test_fcsr_bit(ft_cc)) set_fpu_register_double(fd_reg(), fs);
       } else {
@@ -2477,43 +2524,65 @@
       set_fpu_register_double(fd_reg(), FPUMaxA(ft, fs));
       break;
     case ADD_D:
-      set_fpu_register_double(fd_reg(), fs + ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs + rhs; }, fs, ft));
       break;
     case SUB_D:
-      set_fpu_register_double(fd_reg(), fs - ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs - rhs; }, fs, ft));
+      break;
+    case MADDF_D:
+      DCHECK(IsMipsArchVariant(kMips32r6));
+      set_fpu_register_double(fd_reg(), fd + (fs * ft));
+      break;
+    case MSUBF_D:
+      DCHECK(IsMipsArchVariant(kMips32r6));
+      set_fpu_register_double(fd_reg(), fd - (fs * ft));
       break;
     case MUL_D:
-      set_fpu_register_double(fd_reg(), fs * ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs * rhs; }, fs, ft));
       break;
     case DIV_D:
-      set_fpu_register_double(fd_reg(), fs / ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs / rhs; }, fs, ft));
       break;
     case ABS_D:
-      set_fpu_register_double(fd_reg(), fabs(fs));
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation([](double fs) { return FPAbs(fs); }, fs));
       break;
     case MOV_D:
       set_fpu_register_double(fd_reg(), fs);
       break;
     case NEG_D:
-      set_fpu_register_double(fd_reg(), -fs);
+      set_fpu_register_double(
+          fd_reg(), FPUCanonalizeOperation([](double src) { return -src; },
+                                           KeepSign::yes, fs));
       break;
     case SQRT_D:
-      lazily_initialize_fast_sqrt(isolate_);
-      set_fpu_register_double(fd_reg(), fast_sqrt(fs, isolate_));
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation([](double fs) { return std::sqrt(fs); }, fs));
       break;
-    case RSQRT_D: {
-      DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
-      lazily_initialize_fast_sqrt(isolate_);
-      double result = 1.0 / fast_sqrt(fs, isolate_);
-      set_fpu_register_double(fd_reg(), result);
+    case RSQRT_D:
+      set_fpu_register_double(
+          fd_reg(), FPUCanonalizeOperation(
+                        [](double fs) { return 1.0 / std::sqrt(fs); }, fs));
       break;
-    }
-    case RECIP_D: {
-      DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
-      double result = 1.0 / fs;
-      set_fpu_register_double(fd_reg(), result);
+    case RECIP_D:
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation([](double fs) { return 1.0 / fs; }, fs));
       break;
-    }
     case C_UN_D:
       set_fcsr_bit(fcsr_cc, std::isnan(fs) || std::isnan(ft));
       break;
@@ -2744,7 +2813,7 @@
   float fs = get_fpu_register_float(fs_reg());
   float ft = get_fpu_register_float(ft_reg());
   int32_t alu_out = 0x12345678;
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case CVT_S_W:  // Convert word to float (single).
       alu_out = get_fpu_register_signed_word(fs_reg());
       set_fpu_register_float(fd_reg(), static_cast<float>(alu_out));
@@ -2840,9 +2909,9 @@
   int32_t ft_int = bit_cast<int32_t>(ft);
   int32_t fd_int = bit_cast<int32_t>(fd);
   uint32_t cc, fcsr_cc;
-  cc = get_instr()->FCccValue();
+  cc = instr_.FCccValue();
   fcsr_cc = get_fcsr_condition_bit(cc);
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case RINT: {
       DCHECK(IsMipsArchVariant(kMips32r6));
       float result, temp_result;
@@ -2882,43 +2951,65 @@
       break;
     }
     case ADD_S:
-      set_fpu_register_float(fd_reg(), fs + ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs + rhs; },
+                                 fs, ft));
       break;
     case SUB_S:
-      set_fpu_register_float(fd_reg(), fs - ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs - rhs; },
+                                 fs, ft));
+      break;
+    case MADDF_S:
+      DCHECK(IsMipsArchVariant(kMips32r6));
+      set_fpu_register_float(fd_reg(), fd + (fs * ft));
+      break;
+    case MSUBF_S:
+      DCHECK(IsMipsArchVariant(kMips32r6));
+      set_fpu_register_float(fd_reg(), fd - (fs * ft));
       break;
     case MUL_S:
-      set_fpu_register_float(fd_reg(), fs * ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs * rhs; },
+                                 fs, ft));
       break;
     case DIV_S:
-      set_fpu_register_float(fd_reg(), fs / ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs / rhs; },
+                                 fs, ft));
       break;
     case ABS_S:
-      set_fpu_register_float(fd_reg(), fabs(fs));
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float fs) { return FPAbs(fs); }, fs));
       break;
     case MOV_S:
       set_fpu_register_float(fd_reg(), fs);
       break;
     case NEG_S:
-      set_fpu_register_float(fd_reg(), -fs);
+      set_fpu_register_float(
+          fd_reg(), FPUCanonalizeOperation([](float src) { return -src; },
+                                           KeepSign::yes, fs));
       break;
     case SQRT_S:
-      lazily_initialize_fast_sqrt(isolate_);
-      set_fpu_register_float(fd_reg(), fast_sqrt(fs, isolate_));
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float src) { return std::sqrt(src); }, fs));
       break;
-    case RSQRT_S: {
-      DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
-      lazily_initialize_fast_sqrt(isolate_);
-      float result = 1.0 / fast_sqrt(fs, isolate_);
-      set_fpu_register_float(fd_reg(), result);
+    case RSQRT_S:
+      set_fpu_register_float(
+          fd_reg(), FPUCanonalizeOperation(
+                        [](float src) { return 1.0 / std::sqrt(src); }, fs));
       break;
-    }
-    case RECIP_S: {
-      DCHECK(IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6));
-      float result = 1.0 / fs;
-      set_fpu_register_float(fd_reg(), result);
+    case RECIP_S:
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float src) { return 1.0 / src; }, fs));
       break;
-    }
     case C_F_D:
       set_fcsr_bit(fcsr_cc, false);
       break;
@@ -3047,7 +3138,7 @@
       uint32_t ft_cc = (ft_reg() >> 2) & 0x7;
       ft_cc = get_fcsr_condition_bit(ft_cc);
 
-      if (get_instr()->Bit(16)) {  // Read Tf bit.
+      if (instr_.Bit(16)) {  // Read Tf bit.
         // MOVT.D
         if (test_fcsr_bit(ft_cc)) set_fpu_register_float(fd_reg(), fs);
       } else {
@@ -3209,7 +3300,7 @@
 void Simulator::DecodeTypeRegisterLRsType() {
   double fs = get_fpu_register_double(fs_reg());
   double ft = get_fpu_register_double(ft_reg());
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case CVT_D_L:  // Mips32r2 instruction.
       // Watch the signs here, we want 2 32-bit vals
       // to make a sign-64.
@@ -3311,7 +3402,7 @@
 
 
 void Simulator::DecodeTypeRegisterCOP1() {
-  switch (get_instr()->RsFieldRaw()) {
+  switch (instr_.RsFieldRaw()) {
     case CFC1:
       // At the moment only FCSR is supported.
       DCHECK(fs_reg() == kFCSRRegister);
@@ -3374,14 +3465,43 @@
 
 
 void Simulator::DecodeTypeRegisterCOP1X() {
-  switch (get_instr()->FunctionFieldRaw()) {
-    case MADD_D:
+  switch (instr_.FunctionFieldRaw()) {
+    case MADD_S: {
+      DCHECK(IsMipsArchVariant(kMips32r2));
+      float fr, ft, fs;
+      fr = get_fpu_register_float(fr_reg());
+      fs = get_fpu_register_float(fs_reg());
+      ft = get_fpu_register_float(ft_reg());
+      set_fpu_register_float(fd_reg(), fs * ft + fr);
+      break;
+    }
+    case MSUB_S: {
+      DCHECK(IsMipsArchVariant(kMips32r2));
+      float fr, ft, fs;
+      fr = get_fpu_register_float(fr_reg());
+      fs = get_fpu_register_float(fs_reg());
+      ft = get_fpu_register_float(ft_reg());
+      set_fpu_register_float(fd_reg(), fs * ft - fr);
+      break;
+    }
+    case MADD_D: {
+      DCHECK(IsMipsArchVariant(kMips32r2));
       double fr, ft, fs;
       fr = get_fpu_register_double(fr_reg());
       fs = get_fpu_register_double(fs_reg());
       ft = get_fpu_register_double(ft_reg());
       set_fpu_register_double(fd_reg(), fs * ft + fr);
       break;
+    }
+    case MSUB_D: {
+      DCHECK(IsMipsArchVariant(kMips32r2));
+      double fr, ft, fs;
+      fr = get_fpu_register_double(fr_reg());
+      fs = get_fpu_register_double(fs_reg());
+      ft = get_fpu_register_double(ft_reg());
+      set_fpu_register_double(fd_reg(), fs * ft - fr);
+      break;
+    }
     default:
       UNREACHABLE();
   }
@@ -3394,7 +3514,7 @@
   uint64_t u64hilo = 0;
   bool do_interrupt = false;
 
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case SELEQZ_S:
       DCHECK(IsMipsArchVariant(kMips32r6));
       set_register(rd_reg(), rt() == 0 ? rs() : 0);
@@ -3534,7 +3654,7 @@
       break;
     case DIV:
       if (IsMipsArchVariant(kMips32r6)) {
-        switch (get_instr()->SaValue()) {
+        switch (sa()) {
           case DIV_OP:
             if (rs() == INT_MIN && rt() == -1) {
               set_register(rd_reg(), INT_MIN);
@@ -3569,7 +3689,7 @@
       break;
     case DIVU:
       if (IsMipsArchVariant(kMips32r6)) {
-        switch (get_instr()->SaValue()) {
+        switch (sa()) {
           case DIV_OP:
             if (rt_u() != 0) {
               set_register(rd_reg(), rs_u() / rt_u());
@@ -3676,9 +3796,9 @@
       }
       break;
     case MOVCI: {
-      uint32_t cc = get_instr()->FBccValue();
+      uint32_t cc = instr_.FBccValue();
       uint32_t fcsr_cc = get_fcsr_condition_bit(cc);
-      if (get_instr()->Bit(16)) {  // Read Tf bit.
+      if (instr_.Bit(16)) {  // Read Tf bit.
         if (test_fcsr_bit(fcsr_cc)) set_register(rd_reg(), rs());
       } else {
         if (!test_fcsr_bit(fcsr_cc)) set_register(rd_reg(), rs());
@@ -3695,14 +3815,14 @@
       UNREACHABLE();
   }
   if (do_interrupt) {
-    SoftwareInterrupt(get_instr());
+    SoftwareInterrupt();
   }
 }
 
 
 void Simulator::DecodeTypeRegisterSPECIAL2() {
   int32_t alu_out;
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case MUL:
       // Only the lower 32 bits are kept.
       alu_out = rs_u() * rt_u();
@@ -3725,7 +3845,7 @@
 
 void Simulator::DecodeTypeRegisterSPECIAL3() {
   int32_t alu_out;
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case INS: {  // Mips32r2 instruction.
       // Interpret rd field as 5-bit msb of insert.
       uint16_t msb = rd_reg();
@@ -3750,7 +3870,7 @@
       break;
     }
     case BSHFL: {
-      int sa = get_instr()->SaFieldRaw() >> kSaShift;
+      int sa = instr_.SaFieldRaw() >> kSaShift;
       switch (sa) {
         case BITSWAP: {
           uint32_t input = static_cast<uint32_t>(rt());
@@ -3822,7 +3942,7 @@
           break;
         }
         default: {
-          const uint8_t bp = get_instr()->Bp2Value();
+          const uint8_t bp = instr_.Bp2Value();
           sa >>= kBp2Bits;
           switch (sa) {
             case ALIGN: {
@@ -3850,16 +3970,9 @@
   }
 }
 
-
-void Simulator::DecodeTypeRegister(Instruction* instr) {
-  const Opcode op = instr->OpcodeFieldRaw();
-
-  // Set up the variables if needed before executing the instruction.
-  //  ConfigureTypeRegister(instr);
-  set_instr(instr);
-
+void Simulator::DecodeTypeRegister() {
   // ---------- Execution.
-  switch (op) {
+  switch (instr_.OpcodeFieldRaw()) {
     case COP1:
       DecodeTypeRegisterCOP1();
       break;
@@ -3882,17 +3995,17 @@
 
 
 // Type 2: instructions using a 16, 21 or 26 bits immediate. (e.g. beq, beqc).
-void Simulator::DecodeTypeImmediate(Instruction* instr) {
+void Simulator::DecodeTypeImmediate() {
   // Instruction fields.
-  Opcode op = instr->OpcodeFieldRaw();
-  int32_t rs_reg = instr->RsValue();
-  int32_t rs = get_register(instr->RsValue());
+  Opcode op = instr_.OpcodeFieldRaw();
+  int32_t rs_reg = instr_.RsValue();
+  int32_t rs = get_register(instr_.RsValue());
   uint32_t rs_u = static_cast<uint32_t>(rs);
-  int32_t rt_reg = instr->RtValue();  // Destination register.
+  int32_t rt_reg = instr_.RtValue();  // Destination register.
   int32_t rt = get_register(rt_reg);
-  int16_t imm16 = instr->Imm16Value();
+  int16_t imm16 = instr_.Imm16Value();
 
-  int32_t ft_reg = instr->FtValue();  // Destination register.
+  int32_t ft_reg = instr_.FtValue();  // Destination register.
 
   // Zero extended immediate.
   uint32_t oe_imm16 = 0xffff & imm16;
@@ -3912,38 +4025,36 @@
   int32_t addr = 0x0;
 
   // Branch instructions common part.
-  auto BranchAndLinkHelper = [this, instr, &next_pc,
-                              &execute_branch_delay_instruction](
-      bool do_branch) {
-    execute_branch_delay_instruction = true;
-    int32_t current_pc = get_pc();
-    if (do_branch) {
-      int16_t imm16 = instr->Imm16Value();
-      next_pc = current_pc + (imm16 << 2) + Instruction::kInstrSize;
-      set_register(31, current_pc + 2 * Instruction::kInstrSize);
-    } else {
-      next_pc = current_pc + 2 * Instruction::kInstrSize;
-    }
-  };
+  auto BranchAndLinkHelper =
+      [this, &next_pc, &execute_branch_delay_instruction](bool do_branch) {
+        execute_branch_delay_instruction = true;
+        int32_t current_pc = get_pc();
+        if (do_branch) {
+          int16_t imm16 = this->instr_.Imm16Value();
+          next_pc = current_pc + (imm16 << 2) + Instruction::kInstrSize;
+          set_register(31, current_pc + 2 * Instruction::kInstrSize);
+        } else {
+          next_pc = current_pc + 2 * Instruction::kInstrSize;
+        }
+      };
 
-  auto BranchHelper = [this, instr, &next_pc,
+  auto BranchHelper = [this, &next_pc,
                        &execute_branch_delay_instruction](bool do_branch) {
     execute_branch_delay_instruction = true;
     int32_t current_pc = get_pc();
     if (do_branch) {
-      int16_t imm16 = instr->Imm16Value();
+      int16_t imm16 = this->instr_.Imm16Value();
       next_pc = current_pc + (imm16 << 2) + Instruction::kInstrSize;
     } else {
       next_pc = current_pc + 2 * Instruction::kInstrSize;
     }
   };
 
-  auto BranchAndLinkCompactHelper = [this, instr, &next_pc](bool do_branch,
-                                                            int bits) {
+  auto BranchAndLinkCompactHelper = [this, &next_pc](bool do_branch, int bits) {
     int32_t current_pc = get_pc();
     CheckForbiddenSlot(current_pc);
     if (do_branch) {
-      int32_t imm = instr->ImmValue(bits);
+      int32_t imm = this->instr_.ImmValue(bits);
       imm <<= 32 - bits;
       imm >>= 32 - bits;
       next_pc = current_pc + (imm << 2) + Instruction::kInstrSize;
@@ -3951,28 +4062,27 @@
     }
   };
 
-  auto BranchCompactHelper = [&next_pc, this, instr](bool do_branch, int bits) {
+  auto BranchCompactHelper = [this, &next_pc](bool do_branch, int bits) {
     int32_t current_pc = get_pc();
     CheckForbiddenSlot(current_pc);
     if (do_branch) {
-      int32_t imm = instr->ImmValue(bits);
+      int32_t imm = this->instr_.ImmValue(bits);
       imm <<= 32 - bits;
       imm >>= 32 - bits;
       next_pc = get_pc() + (imm << 2) + Instruction::kInstrSize;
     }
   };
 
-
   switch (op) {
     // ------------- COP1. Coprocessor instructions.
     case COP1:
-      switch (instr->RsFieldRaw()) {
+      switch (instr_.RsFieldRaw()) {
         case BC1: {  // Branch on coprocessor condition.
           // Floating point.
-          uint32_t cc = instr->FBccValue();
+          uint32_t cc = instr_.FBccValue();
           uint32_t fcsr_cc = get_fcsr_condition_bit(cc);
           uint32_t cc_value = test_fcsr_bit(fcsr_cc);
-          bool do_branch = (instr->FBtrueValue()) ? cc_value : !cc_value;
+          bool do_branch = (instr_.FBtrueValue()) ? cc_value : !cc_value;
           BranchHelper(do_branch);
           break;
         }
@@ -3988,7 +4098,7 @@
       break;
     // ------------- REGIMM class.
     case REGIMM:
-      switch (instr->RtFieldRaw()) {
+      switch (instr_.RtFieldRaw()) {
         case BLTZ:
           BranchHelper(rs < 0);
           break;
@@ -4196,7 +4306,7 @@
       set_register(rt_reg, ReadB(rs + se_imm16));
       break;
     case LH:
-      set_register(rt_reg, ReadH(rs + se_imm16, instr));
+      set_register(rt_reg, ReadH(rs + se_imm16, instr_.instr()));
       break;
     case LWL: {
       // al_offset is offset of the effective address within an aligned word.
@@ -4204,20 +4314,20 @@
       uint8_t byte_shift = kPointerAlignmentMask - al_offset;
       uint32_t mask = (1 << byte_shift * 8) - 1;
       addr = rs + se_imm16 - al_offset;
-      alu_out = ReadW(addr, instr);
+      alu_out = ReadW(addr, instr_.instr());
       alu_out <<= byte_shift * 8;
       alu_out |= rt & mask;
       set_register(rt_reg, alu_out);
       break;
     }
     case LW:
-      set_register(rt_reg, ReadW(rs + se_imm16, instr));
+      set_register(rt_reg, ReadW(rs + se_imm16, instr_.instr()));
       break;
     case LBU:
       set_register(rt_reg, ReadBU(rs + se_imm16));
       break;
     case LHU:
-      set_register(rt_reg, ReadHU(rs + se_imm16, instr));
+      set_register(rt_reg, ReadHU(rs + se_imm16, instr_.instr()));
       break;
     case LWR: {
       // al_offset is offset of the effective address within an aligned word.
@@ -4225,7 +4335,7 @@
       uint8_t byte_shift = kPointerAlignmentMask - al_offset;
       uint32_t mask = al_offset ? (~0 << (byte_shift + 1) * 8) : 0;
       addr = rs + se_imm16 - al_offset;
-      alu_out = ReadW(addr, instr);
+      alu_out = ReadW(addr, instr_.instr());
       alu_out = static_cast<uint32_t> (alu_out) >> al_offset * 8;
       alu_out |= rt & mask;
       set_register(rt_reg, alu_out);
@@ -4235,7 +4345,7 @@
       WriteB(rs + se_imm16, static_cast<int8_t>(rt));
       break;
     case SH:
-      WriteH(rs + se_imm16, static_cast<uint16_t>(rt), instr);
+      WriteH(rs + se_imm16, static_cast<uint16_t>(rt), instr_.instr());
       break;
     case SWL: {
       uint8_t al_offset = (rs + se_imm16) & kPointerAlignmentMask;
@@ -4243,40 +4353,40 @@
       uint32_t mask = byte_shift ? (~0 << (al_offset + 1) * 8) : 0;
       addr = rs + se_imm16 - al_offset;
       // Value to be written in memory.
-      uint32_t mem_value = ReadW(addr, instr) & mask;
+      uint32_t mem_value = ReadW(addr, instr_.instr()) & mask;
       mem_value |= static_cast<uint32_t>(rt) >> byte_shift * 8;
-      WriteW(addr, mem_value, instr);
+      WriteW(addr, mem_value, instr_.instr());
       break;
     }
     case SW:
-      WriteW(rs + se_imm16, rt, instr);
+      WriteW(rs + se_imm16, rt, instr_.instr());
       break;
     case SWR: {
       uint8_t al_offset = (rs + se_imm16) & kPointerAlignmentMask;
       uint32_t mask = (1 << al_offset * 8) - 1;
       addr = rs + se_imm16 - al_offset;
-      uint32_t mem_value = ReadW(addr, instr);
+      uint32_t mem_value = ReadW(addr, instr_.instr());
       mem_value = (rt << al_offset * 8) | (mem_value & mask);
-      WriteW(addr, mem_value, instr);
+      WriteW(addr, mem_value, instr_.instr());
       break;
     }
     case LWC1:
       set_fpu_register_hi_word(ft_reg, 0);
-      set_fpu_register_word(ft_reg, ReadW(rs + se_imm16, instr));
+      set_fpu_register_word(ft_reg, ReadW(rs + se_imm16, instr_.instr()));
       break;
     case LDC1:
-      set_fpu_register_double(ft_reg, ReadD(rs + se_imm16, instr));
+      set_fpu_register_double(ft_reg, ReadD(rs + se_imm16, instr_.instr()));
       break;
     case SWC1:
-      WriteW(rs + se_imm16, get_fpu_register_word(ft_reg), instr);
+      WriteW(rs + se_imm16, get_fpu_register_word(ft_reg), instr_.instr());
       break;
     case SDC1:
-      WriteD(rs + se_imm16, get_fpu_register_double(ft_reg), instr);
+      WriteD(rs + se_imm16, get_fpu_register_double(ft_reg), instr_.instr());
       break;
     // ------------- PC-Relative instructions.
     case PCREL: {
       // rt field: checking 5-bits.
-      int32_t imm21 = instr->Imm21Value();
+      int32_t imm21 = instr_.Imm21Value();
       int32_t current_pc = get_pc();
       uint8_t rt = (imm21 >> kImm16Bits);
       switch (rt) {
@@ -4288,7 +4398,7 @@
           alu_out = current_pc + (se_imm16 << 16);
           break;
         default: {
-          int32_t imm19 = instr->Imm19Value();
+          int32_t imm19 = instr_.Imm19Value();
           // rt field: checking the most significant 2-bits.
           rt = (imm21 >> kImm19Bits);
           switch (rt) {
@@ -4336,13 +4446,15 @@
 
 
 // Type 3: instructions using a 26 bytes immediate. (e.g. j, jal).
-void Simulator::DecodeTypeJump(Instruction* instr) {
+void Simulator::DecodeTypeJump() {
+  SimInstruction simInstr = instr_;
   // Get current pc.
   int32_t current_pc = get_pc();
   // Get unchanged bits of pc.
   int32_t pc_high_bits = current_pc & 0xf0000000;
   // Next pc.
-  int32_t next_pc = pc_high_bits | (instr->Imm26Value() << 2);
+
+  int32_t next_pc = pc_high_bits | (simInstr.Imm26Value() << 2);
 
   // Execute branch delay slot.
   // We don't check for end_sim_pc. First it should not be met as the current pc
@@ -4353,7 +4465,7 @@
 
   // Update pc and ra if necessary.
   // Do this after the branch delay execution.
-  if (instr->IsLinkingInstruction()) {
+  if (simInstr.IsLinkingInstruction()) {
     set_register(31, current_pc + 2 * Instruction::kInstrSize);
   }
   set_pc(next_pc);
@@ -4375,15 +4487,16 @@
     dasm.InstructionDecode(buffer, reinterpret_cast<byte*>(instr));
   }
 
-  switch (instr->InstructionType(Instruction::TypeChecks::EXTRA)) {
+  instr_ = instr;
+  switch (instr_.InstructionType()) {
     case Instruction::kRegisterType:
-      DecodeTypeRegister(instr);
+      DecodeTypeRegister();
       break;
     case Instruction::kImmediateType:
-      DecodeTypeImmediate(instr);
+      DecodeTypeImmediate();
       break;
     case Instruction::kJumpType:
-      DecodeTypeJump(instr);
+      DecodeTypeJump();
       break;
     default:
       UNSUPPORTED();
diff --git a/src/mips/simulator-mips.h b/src/mips/simulator-mips.h
index 5c77756..3795eec 100644
--- a/src/mips/simulator-mips.h
+++ b/src/mips/simulator-mips.h
@@ -113,6 +113,39 @@
   char validity_map_[kValidityMapSize];  // One byte per line.
 };
 
+class SimInstructionBase : public InstructionBase {
+ public:
+  Type InstructionType() const { return type_; }
+  inline Instruction* instr() const { return instr_; }
+  inline int32_t operand() const { return operand_; }
+
+ protected:
+  SimInstructionBase() : operand_(-1), instr_(nullptr), type_(kUnsupported) {}
+  explicit SimInstructionBase(Instruction* instr) {}
+
+  int32_t operand_;
+  Instruction* instr_;
+  Type type_;
+
+ private:
+  DISALLOW_ASSIGN(SimInstructionBase);
+};
+
+class SimInstruction : public InstructionGetters<SimInstructionBase> {
+ public:
+  SimInstruction() {}
+
+  explicit SimInstruction(Instruction* instr) { *this = instr; }
+
+  SimInstruction& operator=(Instruction* instr) {
+    operand_ = *reinterpret_cast<const int32_t*>(instr);
+    instr_ = instr;
+    type_ = InstructionBase::InstructionType();
+    DCHECK(reinterpret_cast<void*>(&operand_) == this);
+    return *this;
+  }
+};
+
 class Simulator {
  public:
   friend class MipsDebugger;
@@ -216,7 +249,7 @@
   // Call on program start.
   static void Initialize(Isolate* isolate);
 
-  static void TearDown(base::HashMap* i_cache, Redirection* first);
+  static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
 
   // V8 generally calls into generated JS code with 5 parameters and into
   // generated RegExp code with 7 parameters. This is a convenience function,
@@ -236,7 +269,8 @@
   char* last_debugger_input() { return last_debugger_input_; }
 
   // ICache checking.
-  static void FlushICache(base::HashMap* i_cache, void* start, size_t size);
+  static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
+                          size_t size);
 
   // Returns true if pc register contains one of the 'special_values' defined
   // below (bad_ra, end_sim_pc).
@@ -299,8 +333,10 @@
   inline int32_t SetDoubleHIW(double* addr);
   inline int32_t SetDoubleLOW(double* addr);
 
+  SimInstruction instr_;
+
   // Executing is handled based on the instruction type.
-  void DecodeTypeRegister(Instruction* instr);
+  void DecodeTypeRegister();
 
   // Functions called from DecodeTypeRegister.
   void DecodeTypeRegisterCOP1();
@@ -322,39 +358,34 @@
 
   void DecodeTypeRegisterLRsType();
 
-  Instruction* currentInstr_;
-
-  inline Instruction* get_instr() const { return currentInstr_; }
-  inline void set_instr(Instruction* instr) { currentInstr_ = instr; }
-
-  inline int32_t rs_reg() const { return currentInstr_->RsValue(); }
+  inline int32_t rs_reg() const { return instr_.RsValue(); }
   inline int32_t rs() const { return get_register(rs_reg()); }
   inline uint32_t rs_u() const {
     return static_cast<uint32_t>(get_register(rs_reg()));
   }
-  inline int32_t rt_reg() const { return currentInstr_->RtValue(); }
+  inline int32_t rt_reg() const { return instr_.RtValue(); }
   inline int32_t rt() const { return get_register(rt_reg()); }
   inline uint32_t rt_u() const {
     return static_cast<uint32_t>(get_register(rt_reg()));
   }
-  inline int32_t rd_reg() const { return currentInstr_->RdValue(); }
-  inline int32_t fr_reg() const { return currentInstr_->FrValue(); }
-  inline int32_t fs_reg() const { return currentInstr_->FsValue(); }
-  inline int32_t ft_reg() const { return currentInstr_->FtValue(); }
-  inline int32_t fd_reg() const { return currentInstr_->FdValue(); }
-  inline int32_t sa() const { return currentInstr_->SaValue(); }
-  inline int32_t lsa_sa() const { return currentInstr_->LsaSaValue(); }
+  inline int32_t rd_reg() const { return instr_.RdValue(); }
+  inline int32_t fr_reg() const { return instr_.FrValue(); }
+  inline int32_t fs_reg() const { return instr_.FsValue(); }
+  inline int32_t ft_reg() const { return instr_.FtValue(); }
+  inline int32_t fd_reg() const { return instr_.FdValue(); }
+  inline int32_t sa() const { return instr_.SaValue(); }
+  inline int32_t lsa_sa() const { return instr_.LsaSaValue(); }
 
   inline void SetResult(int32_t rd_reg, int32_t alu_out) {
     set_register(rd_reg, alu_out);
     TraceRegWr(alu_out);
   }
 
-  void DecodeTypeImmediate(Instruction* instr);
-  void DecodeTypeJump(Instruction* instr);
+  void DecodeTypeImmediate();
+  void DecodeTypeJump();
 
   // Used for breakpoints and traps.
-  void SoftwareInterrupt(Instruction* instr);
+  void SoftwareInterrupt();
 
   // Compact branch guard.
   void CheckForbiddenSlot(int32_t current_pc) {
@@ -400,9 +431,12 @@
   }
 
   // ICache.
-  static void CheckICache(base::HashMap* i_cache, Instruction* instr);
-  static void FlushOnePage(base::HashMap* i_cache, intptr_t start, int size);
-  static CachePage* GetCachePage(base::HashMap* i_cache, void* page);
+  static void CheckICache(base::CustomMatcherHashMap* i_cache,
+                          Instruction* instr);
+  static void FlushOnePage(base::CustomMatcherHashMap* i_cache, intptr_t start,
+                           int size);
+  static CachePage* GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                 void* page);
 
   enum Exception {
     none,
@@ -448,7 +482,7 @@
   char* last_debugger_input_;
 
   // Icache simulation.
-  base::HashMap* i_cache_;
+  base::CustomMatcherHashMap* i_cache_;
 
   v8::internal::Isolate* isolate_;
 
diff --git a/src/mips64/assembler-mips64.cc b/src/mips64/assembler-mips64.cc
index 21a2434..b35b166 100644
--- a/src/mips64/assembler-mips64.cc
+++ b/src/mips64/assembler-mips64.cc
@@ -2780,12 +2780,49 @@
   GenInstrRegister(COP1, D, ft, fs, fd, MUL_D);
 }
 
+void Assembler::madd_s(FPURegister fd, FPURegister fr, FPURegister fs,
+                       FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r2);
+  GenInstrRegister(COP1X, fr, ft, fs, fd, MADD_S);
+}
 
 void Assembler::madd_d(FPURegister fd, FPURegister fr, FPURegister fs,
     FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r2);
   GenInstrRegister(COP1X, fr, ft, fs, fd, MADD_D);
 }
 
+void Assembler::msub_s(FPURegister fd, FPURegister fr, FPURegister fs,
+                       FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r2);
+  GenInstrRegister(COP1X, fr, ft, fs, fd, MSUB_S);
+}
+
+void Assembler::msub_d(FPURegister fd, FPURegister fr, FPURegister fs,
+                       FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r2);
+  GenInstrRegister(COP1X, fr, ft, fs, fd, MSUB_D);
+}
+
+void Assembler::maddf_s(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r6);
+  GenInstrRegister(COP1, S, ft, fs, fd, MADDF_S);
+}
+
+void Assembler::maddf_d(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r6);
+  GenInstrRegister(COP1, D, ft, fs, fd, MADDF_D);
+}
+
+void Assembler::msubf_s(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r6);
+  GenInstrRegister(COP1, S, ft, fs, fd, MSUBF_S);
+}
+
+void Assembler::msubf_d(FPURegister fd, FPURegister fs, FPURegister ft) {
+  DCHECK(kArchVariant == kMips64r6);
+  GenInstrRegister(COP1, D, ft, fs, fd, MSUBF_D);
+}
 
 void Assembler::div_s(FPURegister fd, FPURegister fs, FPURegister ft) {
   GenInstrRegister(COP1, S, ft, fs, fd, DIV_D);
@@ -2818,13 +2855,11 @@
 
 
 void Assembler::neg_s(FPURegister fd, FPURegister fs) {
-  DCHECK(kArchVariant == kMips64r2);
   GenInstrRegister(COP1, S, f0, fs, fd, NEG_D);
 }
 
 
 void Assembler::neg_d(FPURegister fd, FPURegister fs) {
-  DCHECK(kArchVariant == kMips64r2);
   GenInstrRegister(COP1, D, f0, fs, fd, NEG_D);
 }
 
diff --git a/src/mips64/assembler-mips64.h b/src/mips64/assembler-mips64.h
index e269acf..dc3198c 100644
--- a/src/mips64/assembler-mips64.h
+++ b/src/mips64/assembler-mips64.h
@@ -939,7 +939,14 @@
   void sub_d(FPURegister fd, FPURegister fs, FPURegister ft);
   void mul_s(FPURegister fd, FPURegister fs, FPURegister ft);
   void mul_d(FPURegister fd, FPURegister fs, FPURegister ft);
+  void madd_s(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
   void madd_d(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
+  void msub_s(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
+  void msub_d(FPURegister fd, FPURegister fr, FPURegister fs, FPURegister ft);
+  void maddf_s(FPURegister fd, FPURegister fs, FPURegister ft);
+  void maddf_d(FPURegister fd, FPURegister fs, FPURegister ft);
+  void msubf_s(FPURegister fd, FPURegister fs, FPURegister ft);
+  void msubf_d(FPURegister fd, FPURegister fs, FPURegister ft);
   void div_s(FPURegister fd, FPURegister fs, FPURegister ft);
   void div_d(FPURegister fd, FPURegister fs, FPURegister ft);
   void abs_s(FPURegister fd, FPURegister fs);
diff --git a/src/mips64/code-stubs-mips64.cc b/src/mips64/code-stubs-mips64.cc
index 4d9f120..e089b54 100644
--- a/src/mips64/code-stubs-mips64.cc
+++ b/src/mips64/code-stubs-mips64.cc
@@ -1783,7 +1783,6 @@
   // a2 : feedback vector
   // a3 : slot in feedback vector (Smi)
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_initialize_count, done_increment_count;
 
   DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
             masm->isolate()->heap()->megamorphic_symbol());
@@ -1803,7 +1802,7 @@
   Register feedback_map = a6;
   Register weak_value = t0;
   __ ld(weak_value, FieldMemOperand(a5, WeakCell::kValueOffset));
-  __ Branch(&done_increment_count, eq, a1, Operand(weak_value));
+  __ Branch(&done, eq, a1, Operand(weak_value));
   __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
   __ Branch(&done, eq, a5, Operand(at));
   __ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset));
@@ -1825,7 +1824,7 @@
   // Make sure the function is the Array() function
   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
   __ Branch(&megamorphic, ne, a1, Operand(a5));
-  __ jmp(&done_increment_count);
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -1853,32 +1852,21 @@
   // slot.
   CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ Branch(&done_initialize_count);
+  __ Branch(&done);
 
   __ bind(&not_array_function);
 
   CreateWeakCellStub weak_cell_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
 
-  __ bind(&done_initialize_count);
-  // Initialize the call counter.
+  __ bind(&done);
 
-  __ SmiScale(a4, a3, kPointerSizeLog2);
-  __ Daddu(a4, a2, Operand(a4));
-  __ li(a5, Operand(Smi::FromInt(1)));
-  __ Branch(USE_DELAY_SLOT, &done);
-  __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + kPointerSize));
-
-  __ bind(&done_increment_count);
-
-  // Increment the call count for monomorphic function calls.
+  // Increment the call count for all function calls.
   __ SmiScale(a4, a3, kPointerSizeLog2);
   __ Daddu(a5, a2, Operand(a4));
   __ ld(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
   __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
   __ sd(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize));
-
-  __ bind(&done);
 }
 
 
@@ -1965,6 +1953,15 @@
   __ bind(&exit_);
 }
 
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot) {
+  __ dsrl(t0, slot, 32 - kPointerSizeLog2);
+  __ Daddu(slot, feedback_vector, Operand(t0));
+  __ ld(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize));
+  __ Daddu(t0, t0, Operand(Smi::FromInt(1)));
+  __ sd(t0, FieldMemOperand(slot, FixedArray::kHeaderSize + kPointerSize));
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // a1 - function
@@ -1977,11 +1974,7 @@
   __ li(a0, Operand(arg_count()));
 
   // Increment the call count for monomorphic function calls.
-  __ dsrl(t0, a3, 32 - kPointerSizeLog2);
-  __ Daddu(a3, a2, Operand(t0));
-  __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
-  __ Daddu(t0, t0, Operand(Smi::FromInt(1)));
-  __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
+  IncrementCallCount(masm, a2, a3);
 
   __ mov(a2, a4);
   __ mov(a3, a1);
@@ -1994,7 +1987,7 @@
   // a1 - function
   // a3 - slot id (Smi)
   // a2 - vector
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -2024,14 +2017,10 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(a1, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  __ dsrl(t0, a3, 32 - kPointerSizeLog2);
-  __ Daddu(a3, a2, Operand(t0));
-  __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
-  __ Daddu(t0, t0, Operand(Smi::FromInt(1)));
-  __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
-
   __ bind(&call_function);
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, a2, a3);
+
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
           RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
@@ -2073,6 +2062,10 @@
   __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize));
 
   __ bind(&call);
+  IncrementCallCount(masm, a2, a3);
+
+  __ bind(&call_count_incremented);
+
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
           USE_DELAY_SLOT);
@@ -2098,12 +2091,6 @@
   __ ld(t1, NativeContextMemOperand());
   __ Branch(&miss, ne, t0, Operand(t1));
 
-  // Initialize the call counter.
-  __ dsrl(at, a3, 32 - kPointerSizeLog2);
-  __ Daddu(at, a2, Operand(at));
-  __ li(t0, Operand(Smi::FromInt(1)));
-  __ sd(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-
   // Store the function. Use a stub since we need a frame for allocation.
   // a2 - vector
   // a3 - slot
@@ -2111,9 +2098,11 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(masm->isolate());
+    __ Push(a2, a3);
     __ Push(cp, a1);
     __ CallStub(&create_stub);
     __ Pop(cp, a1);
+    __ Pop(a2, a3);
   }
 
   __ Branch(&call_function);
@@ -2123,7 +2112,7 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ Branch(&call);
+  __ Branch(&call_count_incremented);
 }
 
 
@@ -2283,293 +2272,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-  // Stack frame on entry.
-  //  ra: return address
-  //  sp[0]: to
-  //  sp[4]: from
-  //  sp[8]: string
-
-  // This stub is called from the native-call %_SubString(...), so
-  // nothing can be assumed about the arguments. It is tested that:
-  //  "string" is a sequential string,
-  //  both "from" and "to" are smis, and
-  //  0 <= from <= to <= string.length.
-  // If any of these assumptions fail, we call the runtime system.
-
-  const int kToOffset = 0 * kPointerSize;
-  const int kFromOffset = 1 * kPointerSize;
-  const int kStringOffset = 2 * kPointerSize;
-
-  __ ld(a2, MemOperand(sp, kToOffset));
-  __ ld(a3, MemOperand(sp, kFromOffset));
-
-  STATIC_ASSERT(kSmiTag == 0);
-
-  // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
-  // safe in this case.
-  __ JumpIfNotSmi(a2, &runtime);
-  __ JumpIfNotSmi(a3, &runtime);
-  // Both a2 and a3 are untagged integers.
-
-  __ SmiUntag(a2, a2);
-  __ SmiUntag(a3, a3);
-  __ Branch(&runtime, lt, a3, Operand(zero_reg));  // From < 0.
-
-  __ Branch(&runtime, gt, a3, Operand(a2));  // Fail if from > to.
-  __ Dsubu(a2, a2, a3);
-
-  // Make sure first argument is a string.
-  __ ld(v0, MemOperand(sp, kStringOffset));
-  __ JumpIfSmi(v0, &runtime);
-  __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
-  __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
-  __ And(a4, a1, Operand(kIsNotStringMask));
-
-  __ Branch(&runtime, ne, a4, Operand(zero_reg));
-
-  Label single_char;
-  __ Branch(&single_char, eq, a2, Operand(1));
-
-  // Short-cut for the case of trivial substring.
-  Label return_v0;
-  // v0: original string
-  // a2: result string length
-  __ ld(a4, FieldMemOperand(v0, String::kLengthOffset));
-  __ SmiUntag(a4);
-  // Return original string.
-  __ Branch(&return_v0, eq, a2, Operand(a4));
-  // Longer than original string's length or negative: unsafe arguments.
-  __ Branch(&runtime, hi, a2, Operand(a4));
-  // Shorter than original string's length: an actual substring.
-
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into a5.
-  // v0: original string
-  // a1: instance type
-  // a2: length
-  // a3: from index (untagged)
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ And(a4, a1, Operand(kIsIndirectStringMask));
-  __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, a4, Operand(zero_reg));
-  // a4 is used as a scratch register and can be overwritten in either case.
-  __ And(a4, a1, Operand(kSlicedNotConsMask));
-  __ Branch(&sliced_string, ne, a4, Operand(zero_reg));
-  // Cons string.  Check whether it is flat, then fetch first part.
-  __ ld(a5, FieldMemOperand(v0, ConsString::kSecondOffset));
-  __ LoadRoot(a4, Heap::kempty_stringRootIndex);
-  __ Branch(&runtime, ne, a5, Operand(a4));
-  __ ld(a5, FieldMemOperand(v0, ConsString::kFirstOffset));
-  // Update instance type.
-  __ ld(a1, FieldMemOperand(a5, HeapObject::kMapOffset));
-  __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and correct start index by offset.
-  __ ld(a5, FieldMemOperand(v0, SlicedString::kParentOffset));
-  __ ld(a4, FieldMemOperand(v0, SlicedString::kOffsetOffset));
-  __ SmiUntag(a4);  // Add offset to index.
-  __ Daddu(a3, a3, a4);
-  // Update instance type.
-  __ ld(a1, FieldMemOperand(a5, HeapObject::kMapOffset));
-  __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the expected register.
-  __ mov(a5, v0);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // a5: underlying subject string
-    // a1: instance type of underlying subject string
-    // a2: length
-    // a3: adjusted start index (untagged)
-    // Short slice.  Copy instead of slicing.
-    __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ And(a4, a1, Operand(kStringEncodingMask));
-    __ Branch(&two_byte_slice, eq, a4, Operand(zero_reg));
-    __ AllocateOneByteSlicedString(v0, a2, a6, a7, &runtime);
-    __ jmp(&set_slice_header);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(v0, a2, a6, a7, &runtime);
-    __ bind(&set_slice_header);
-    __ SmiTag(a3);
-    __ sd(a5, FieldMemOperand(v0, SlicedString::kParentOffset));
-    __ sd(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
-    __ jmp(&return_v0);
-
-    __ bind(&copy_routine);
-  }
-
-  // a5: underlying subject string
-  // a1: instance type of underlying subject string
-  // a2: length
-  // a3: adjusted start index (untagged)
-  Label two_byte_sequential, sequential_string, allocate_result;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ And(a4, a1, Operand(kExternalStringTag));
-  __ Branch(&sequential_string, eq, a4, Operand(zero_reg));
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ And(a4, a1, Operand(kShortExternalStringTag));
-  __ Branch(&runtime, ne, a4, Operand(zero_reg));
-  __ ld(a5, FieldMemOperand(a5, ExternalString::kResourceDataOffset));
-  // a5 already points to the first character of underlying string.
-  __ jmp(&allocate_result);
-
-  __ bind(&sequential_string);
-  // Locate first character of underlying subject string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ Daddu(a5, a5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&allocate_result);
-  // Sequential acii string.  Allocate the result.
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ And(a4, a1, Operand(kStringEncodingMask));
-  __ Branch(&two_byte_sequential, eq, a4, Operand(zero_reg));
-
-  // Allocate and copy the resulting one_byte string.
-  __ AllocateOneByteString(v0, a2, a4, a6, a7, &runtime);
-
-  // Locate first character of substring to copy.
-  __ Daddu(a5, a5, a3);
-
-  // Locate first character of result.
-  __ Daddu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  // v0: result string
-  // a1: first character of result string
-  // a2: result string length
-  // a5: first character of substring to copy
-  STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(
-      masm, a1, a5, a2, a3, String::ONE_BYTE_ENCODING);
-  __ jmp(&return_v0);
-
-  // Allocate and copy the resulting two-byte string.
-  __ bind(&two_byte_sequential);
-  __ AllocateTwoByteString(v0, a2, a4, a6, a7, &runtime);
-
-  // Locate first character of substring to copy.
-  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
-  __ Dlsa(a5, a5, a3, 1);
-  // Locate first character of result.
-  __ Daddu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  // v0: result string.
-  // a1: first character of result.
-  // a2: result length.
-  // a5: first character of substring to copy.
-  STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(
-      masm, a1, a5, a2, a3, String::TWO_BYTE_ENCODING);
-
-  __ bind(&return_v0);
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1, a3, a4);
-  __ DropAndRet(3);
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // v0: original string
-  // a1: instance type
-  // a2: length
-  // a3: from index (untagged)
-  __ SmiTag(a3);
-  StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
-                                  RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ DropAndRet(3);
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes on argument in a0.
-  Label is_number;
-  __ JumpIfSmi(a0, &is_number);
-
-  Label not_string;
-  __ GetObjectType(a0, a1, a1);
-  // a0: receiver
-  // a1: receiver instance type
-  __ Branch(&not_string, ge, a1, Operand(FIRST_NONSTRING_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_string);
-
-  Label not_heap_number;
-  __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
-  __ bind(&not_oddball);
-
-  __ push(a0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes on argument in a0.
-  Label is_number;
-  __ JumpIfSmi(a0, &is_number);
-
-  Label not_name;
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ GetObjectType(a0, a1, a1);
-  // a0: receiver
-  // a1: receiver instance type
-  __ Branch(&not_name, gt, a1, Operand(LAST_NAME_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ mov(v0, a0);
-  __ bind(&not_name);
-
-  Label not_heap_number;
-  __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
-  __ Ret(USE_DELAY_SLOT);
-  __ ld(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
-  __ bind(&not_oddball);
-
-  __ push(a0);  // Push argument.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(
     MacroAssembler* masm, Register left, Register right, Register scratch1,
     Register scratch2, Register scratch3) {
@@ -3927,7 +3629,7 @@
 
   __ ld(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
   // Load the map into the correct register.
-  DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+  DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
   __ Move(feedback, too_far);
   __ Daddu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
   __ Jump(t9);
@@ -4638,7 +4340,7 @@
     Label too_big_for_new_space;
     __ bind(&allocate);
     __ Branch(&too_big_for_new_space, gt, a5,
-              Operand(Page::kMaxRegularHeapObjectSize));
+              Operand(kMaxRegularHeapObjectSize));
     {
       FrameScope scope(masm, StackFrame::INTERNAL);
       __ SmiTag(a0);
@@ -4993,8 +4695,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ Branch(&too_big_for_new_space, gt, a5,
-            Operand(Page::kMaxRegularHeapObjectSize));
+  __ Branch(&too_big_for_new_space, gt, a5, Operand(kMaxRegularHeapObjectSize));
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     __ SmiTag(a0);
diff --git a/src/mips64/constants-mips64.cc b/src/mips64/constants-mips64.cc
index c0e98eb..11ae242 100644
--- a/src/mips64/constants-mips64.cc
+++ b/src/mips64/constants-mips64.cc
@@ -121,118 +121,6 @@
   // No Cregister with the reguested name found.
   return kInvalidFPURegister;
 }
-
-
-// -----------------------------------------------------------------------------
-// Instructions.
-
-bool Instruction::IsForbiddenAfterBranchInstr(Instr instr) {
-  Opcode opcode = static_cast<Opcode>(instr & kOpcodeMask);
-  switch (opcode) {
-    case J:
-    case JAL:
-    case BEQ:
-    case BNE:
-    case BLEZ:  // POP06 bgeuc/bleuc, blezalc, bgezalc
-    case BGTZ:  // POP07 bltuc/bgtuc, bgtzalc, bltzalc
-    case BEQL:
-    case BNEL:
-    case BLEZL:  // POP26 bgezc, blezc, bgec/blec
-    case BGTZL:  // POP27 bgtzc, bltzc, bltc/bgtc
-    case BC:
-    case BALC:
-    case POP10:  // beqzalc, bovc, beqc
-    case POP30:  // bnezalc, bnvc, bnec
-    case POP66:  // beqzc, jic
-    case POP76:  // bnezc, jialc
-      return true;
-    case REGIMM:
-      switch (instr & kRtFieldMask) {
-        case BLTZ:
-        case BGEZ:
-        case BLTZAL:
-        case BGEZAL:
-          return true;
-        default:
-          return false;
-      }
-      break;
-    case SPECIAL:
-      switch (instr & kFunctionFieldMask) {
-        case JR:
-        case JALR:
-          return true;
-        default:
-          return false;
-      }
-      break;
-    case COP1:
-      switch (instr & kRsFieldMask) {
-        case BC1:
-        case BC1EQZ:
-        case BC1NEZ:
-          return true;
-          break;
-        default:
-          return false;
-      }
-      break;
-    default:
-      return false;
-  }
-}
-
-
-bool Instruction::IsLinkingInstruction() const {
-  switch (OpcodeFieldRaw()) {
-    case JAL:
-      return true;
-    case POP76:
-      if (RsFieldRawNoAssert() == JIALC)
-        return true;  // JIALC
-      else
-        return false;  // BNEZC
-    case REGIMM:
-      switch (RtFieldRaw()) {
-        case BGEZAL:
-        case BLTZAL:
-          return true;
-      default:
-        return false;
-      }
-    case SPECIAL:
-      switch (FunctionFieldRaw()) {
-        case JALR:
-          return true;
-        default:
-          return false;
-      }
-    default:
-      return false;
-  }
-}
-
-
-bool Instruction::IsTrap() const {
-  if (OpcodeFieldRaw() != SPECIAL) {
-    return false;
-  } else {
-    switch (FunctionFieldRaw()) {
-      case BREAK:
-      case TGE:
-      case TGEU:
-      case TLT:
-      case TLTU:
-      case TEQ:
-      case TNE:
-        return true;
-      default:
-        return false;
-    }
-  }
-}
-
-
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/mips64/constants-mips64.h b/src/mips64/constants-mips64.h
index d2b1e92..f96ea23 100644
--- a/src/mips64/constants-mips64.h
+++ b/src/mips64/constants-mips64.h
@@ -555,6 +555,8 @@
   FLOOR_W_S = ((1U << 3) + 7),
   RECIP_S = ((2U << 3) + 5),
   RSQRT_S = ((2U << 3) + 6),
+  MADDF_S = ((3U << 3) + 0),
+  MSUBF_S = ((3U << 3) + 1),
   CLASS_S = ((3U << 3) + 3),
   CVT_D_S = ((4U << 3) + 1),
   CVT_W_S = ((4U << 3) + 4),
@@ -579,6 +581,8 @@
   FLOOR_W_D = ((1U << 3) + 7),
   RECIP_D = ((2U << 3) + 5),
   RSQRT_D = ((2U << 3) + 6),
+  MADDF_D = ((3U << 3) + 0),
+  MSUBF_D = ((3U << 3) + 1),
   CLASS_D = ((3U << 3) + 3),
   MIN = ((3U << 3) + 4),
   MINA = ((3U << 3) + 5),
@@ -646,8 +650,12 @@
   SELNEZ_C = ((2U << 3) + 7),  // COP1 on FPR registers.
 
   // COP1 Encoding of Function Field When rs=PS.
+
   // COP1X Encoding of Function Field.
+  MADD_S = ((4U << 3) + 0),
   MADD_D = ((4U << 3) + 1),
+  MSUB_S = ((5U << 3) + 0),
+  MSUB_D = ((5U << 3) + 1),
 
   // PCREL Encoding of rt Field.
   ADDIUPC = ((0U << 2) + 0),
@@ -891,8 +899,7 @@
   return 1ULL << (static_cast<uint32_t>(opcode) >> kOpcodeShift);
 }
 
-
-class Instruction {
+class InstructionBase {
  public:
   enum {
     kInstrSize = 4,
@@ -902,6 +909,9 @@
     kPCReadOffset = 0
   };
 
+  // Instruction type.
+  enum Type { kRegisterType, kImmediateType, kJumpType, kUnsupported = -1 };
+
   // Get the raw instruction bits.
   inline Instr InstructionBits() const {
     return *reinterpret_cast<const Instr*>(this);
@@ -922,16 +932,6 @@
     return (InstructionBits() >> lo) & ((2U << (hi - lo)) - 1);
   }
 
-  // Instruction type.
-  enum Type {
-    kRegisterType,
-    kImmediateType,
-    kJumpType,
-    kUnsupported = -1
-  };
-
-  enum TypeChecks { NORMAL, EXTRA };
-
   static constexpr uint64_t kOpcodeImmediateTypeMask =
       OpcodeToBitNumber(REGIMM) | OpcodeToBitNumber(BEQ) |
       OpcodeToBitNumber(BNE) | OpcodeToBitNumber(BLEZ) |
@@ -988,9 +988,6 @@
       FunctionFieldToBitNumber(MOVCI) | FunctionFieldToBitNumber(SELEQZ_S) |
       FunctionFieldToBitNumber(SELNEZ_S) | FunctionFieldToBitNumber(SYNC);
 
-  // Get the encoding type of the instruction.
-  inline Type InstructionType(TypeChecks checks = NORMAL) const;
-
 
   // Accessors for the different named fields used in the MIPS encoding.
   inline Opcode OpcodeValue() const {
@@ -998,78 +995,8 @@
         Bits(kOpcodeShift + kOpcodeBits - 1, kOpcodeShift));
   }
 
-  inline int RsValue() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return Bits(kRsShift + kRsBits - 1, kRsShift);
-  }
-
-  inline int RtValue() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return Bits(kRtShift + kRtBits - 1, kRtShift);
-  }
-
-  inline int RdValue() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kRdShift + kRdBits - 1, kRdShift);
-  }
-
-  inline int SaValue() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kSaShift + kSaBits - 1, kSaShift);
-  }
-
-  inline int LsaSaValue() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kSaShift + kLsaSaBits - 1, kSaShift);
-  }
-
-  inline int FunctionValue() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return Bits(kFunctionShift + kFunctionBits - 1, kFunctionShift);
-  }
-
-  inline int FdValue() const {
-    return Bits(kFdShift + kFdBits - 1, kFdShift);
-  }
-
-  inline int FsValue() const {
-    return Bits(kFsShift + kFsBits - 1, kFsShift);
-  }
-
-  inline int FtValue() const {
-    return Bits(kFtShift + kFtBits - 1, kFtShift);
-  }
-
-  inline int FrValue() const {
-    return Bits(kFrShift + kFrBits -1, kFrShift);
-  }
-
-  inline int Bp2Value() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kBp2Shift + kBp2Bits - 1, kBp2Shift);
-  }
-
-  inline int Bp3Value() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return Bits(kBp3Shift + kBp3Bits - 1, kBp3Shift);
-  }
-
-  // Float Compare condition code instruction bits.
-  inline int FCccValue() const {
-    return Bits(kFCccShift + kFCccBits - 1, kFCccShift);
-  }
-
-  // Float Branch condition code instruction bits.
-  inline int FBccValue() const {
-    return Bits(kFBccShift + kFBccBits - 1, kFBccShift);
-  }
-
-  // Float Branch true/false instruction bit.
-  inline int FBtrueValue() const {
-    return Bits(kFBtrueShift + kFBtrueBits - 1, kFBtrueShift);
+  inline int FunctionFieldRaw() const {
+    return InstructionBits() & kFunctionFieldMask;
   }
 
   // Return the fields at their original place in the instruction encoding.
@@ -1077,39 +1004,135 @@
     return static_cast<Opcode>(InstructionBits() & kOpcodeMask);
   }
 
-  inline int RsFieldRaw() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return InstructionBits() & kRsFieldMask;
-  }
-
-  // Same as above function, but safe to call within InstructionType().
+  // Safe to call within InstructionType().
   inline int RsFieldRawNoAssert() const {
     return InstructionBits() & kRsFieldMask;
   }
 
+  inline int SaFieldRaw() const { return InstructionBits() & kSaFieldMask; }
+
+  // Get the encoding type of the instruction.
+  inline Type InstructionType() const;
+
+ protected:
+  InstructionBase() {}
+};
+
+template <class T>
+class InstructionGetters : public T {
+ public:
+  inline int RsValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kRsShift + kRsBits - 1, kRsShift);
+  }
+
+  inline int RtValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kRtShift + kRtBits - 1, kRtShift);
+  }
+
+  inline int RdValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kRdShift + kRdBits - 1, kRdShift);
+  }
+
+  inline int SaValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kSaShift + kSaBits - 1, kSaShift);
+  }
+
+  inline int LsaSaValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kSaShift + kLsaSaBits - 1, kSaShift);
+  }
+
+  inline int FunctionValue() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kFunctionShift + kFunctionBits - 1, kFunctionShift);
+  }
+
+  inline int FdValue() const {
+    return this->Bits(kFdShift + kFdBits - 1, kFdShift);
+  }
+
+  inline int FsValue() const {
+    return this->Bits(kFsShift + kFsBits - 1, kFsShift);
+  }
+
+  inline int FtValue() const {
+    return this->Bits(kFtShift + kFtBits - 1, kFtShift);
+  }
+
+  inline int FrValue() const {
+    return this->Bits(kFrShift + kFrBits - 1, kFrShift);
+  }
+
+  inline int Bp2Value() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kBp2Shift + kBp2Bits - 1, kBp2Shift);
+  }
+
+  inline int Bp3Value() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->Bits(kBp3Shift + kBp3Bits - 1, kBp3Shift);
+  }
+
+  // Float Compare condition code instruction bits.
+  inline int FCccValue() const {
+    return this->Bits(kFCccShift + kFCccBits - 1, kFCccShift);
+  }
+
+  // Float Branch condition code instruction bits.
+  inline int FBccValue() const {
+    return this->Bits(kFBccShift + kFBccBits - 1, kFBccShift);
+  }
+
+  // Float Branch true/false instruction bit.
+  inline int FBtrueValue() const {
+    return this->Bits(kFBtrueShift + kFBtrueBits - 1, kFBtrueShift);
+  }
+
+  // Return the fields at their original place in the instruction encoding.
+  inline Opcode OpcodeFieldRaw() const {
+    return static_cast<Opcode>(this->InstructionBits() & kOpcodeMask);
+  }
+
+  inline int RsFieldRaw() const {
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->InstructionBits() & kRsFieldMask;
+  }
+
+  // Same as above function, but safe to call within InstructionType().
+  inline int RsFieldRawNoAssert() const {
+    return this->InstructionBits() & kRsFieldMask;
+  }
+
   inline int RtFieldRaw() const {
-    DCHECK(InstructionType() == kRegisterType ||
-           InstructionType() == kImmediateType);
-    return InstructionBits() & kRtFieldMask;
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType ||
+           this->InstructionType() == InstructionBase::kImmediateType);
+    return this->InstructionBits() & kRtFieldMask;
   }
 
   inline int RdFieldRaw() const {
-    DCHECK(InstructionType() == kRegisterType);
-    return InstructionBits() & kRdFieldMask;
+    DCHECK(this->InstructionType() == InstructionBase::kRegisterType);
+    return this->InstructionBits() & kRdFieldMask;
   }
 
   inline int SaFieldRaw() const {
-    return InstructionBits() & kSaFieldMask;
+    return this->InstructionBits() & kSaFieldMask;
   }
 
   inline int FunctionFieldRaw() const {
-    return InstructionBits() & kFunctionFieldMask;
+    return this->InstructionBits() & kFunctionFieldMask;
   }
 
   // Get the secondary field according to the opcode.
   inline int SecondaryValue() const {
-    Opcode op = OpcodeFieldRaw();
+    Opcode op = this->OpcodeFieldRaw();
     switch (op) {
       case SPECIAL:
       case SPECIAL2:
@@ -1124,34 +1147,34 @@
   }
 
   inline int32_t ImmValue(int bits) const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(bits - 1, 0);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(bits - 1, 0);
   }
 
   inline int32_t Imm16Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm16Shift + kImm16Bits - 1, kImm16Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm16Shift + kImm16Bits - 1, kImm16Shift);
   }
 
   inline int32_t Imm18Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm18Shift + kImm18Bits - 1, kImm18Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm18Shift + kImm18Bits - 1, kImm18Shift);
   }
 
   inline int32_t Imm19Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm19Shift + kImm19Bits - 1, kImm19Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm19Shift + kImm19Bits - 1, kImm19Shift);
   }
 
   inline int32_t Imm21Value() const {
-    DCHECK(InstructionType() == kImmediateType);
-    return Bits(kImm21Shift + kImm21Bits - 1, kImm21Shift);
+    DCHECK(this->InstructionType() == InstructionBase::kImmediateType);
+    return this->Bits(kImm21Shift + kImm21Bits - 1, kImm21Shift);
   }
 
   inline int32_t Imm26Value() const {
-    DCHECK((InstructionType() == kJumpType) ||
-           (InstructionType() == kImmediateType));
-    return Bits(kImm26Shift + kImm26Bits - 1, kImm26Shift);
+    DCHECK((this->InstructionType() == InstructionBase::kJumpType) ||
+           (this->InstructionType() == InstructionBase::kImmediateType));
+    return this->Bits(kImm26Shift + kImm26Bits - 1, kImm26Shift);
   }
 
   static bool IsForbiddenAfterBranchInstr(Instr instr);
@@ -1159,14 +1182,21 @@
   // Say if the instruction should not be used in a branch delay slot or
   // immediately after a compact branch.
   inline bool IsForbiddenAfterBranch() const {
-    return IsForbiddenAfterBranchInstr(InstructionBits());
+    return IsForbiddenAfterBranchInstr(this->InstructionBits());
+  }
+
+  inline bool IsForbiddenInBranchDelay() const {
+    return IsForbiddenAfterBranch();
   }
 
   // Say if the instruction 'links'. e.g. jal, bal.
   bool IsLinkingInstruction() const;
   // Say if the instruction is a break or a trap.
   bool IsTrap() const;
+};
 
+class Instruction : public InstructionGetters<InstructionBase> {
+ public:
   // Instructions are read of out a code stream. The only way to get a
   // reference to an instruction is to convert a pointer. There is no way
   // to allocate or create instances of class Instruction.
@@ -1194,26 +1224,14 @@
 const int kInvalidStackOffset = -1;
 const int kBranchReturnOffset = 2 * Instruction::kInstrSize;
 
-
-Instruction::Type Instruction::InstructionType(TypeChecks checks) const {
-  if (checks == EXTRA) {
-    if (OpcodeToBitNumber(OpcodeFieldRaw()) & kOpcodeImmediateTypeMask) {
-      return kImmediateType;
-    }
-  }
+InstructionBase::Type InstructionBase::InstructionType() const {
   switch (OpcodeFieldRaw()) {
     case SPECIAL:
-      if (checks == EXTRA) {
-        if (FunctionFieldToBitNumber(FunctionFieldRaw()) &
-            kFunctionFieldRegisterTypeMask) {
-          return kRegisterType;
-        } else {
-          return kUnsupported;
-        }
-      } else {
+      if (FunctionFieldToBitNumber(FunctionFieldRaw()) &
+          kFunctionFieldRegisterTypeMask) {
         return kRegisterType;
       }
-      break;
+      return kUnsupported;
     case SPECIAL2:
       switch (FunctionFieldRaw()) {
         case MUL:
@@ -1290,17 +1308,123 @@
       return kJumpType;
 
     default:
-      if (checks == NORMAL) {
-        return kImmediateType;
-      } else {
-        return kUnsupported;
-      }
+      return kImmediateType;
   }
   return kUnsupported;
 }
-
 #undef OpcodeToBitNumber
 #undef FunctionFieldToBitNumber
+
+// -----------------------------------------------------------------------------
+// Instructions.
+
+template <class P>
+bool InstructionGetters<P>::IsLinkingInstruction() const {
+  switch (OpcodeFieldRaw()) {
+    case JAL:
+      return true;
+    case POP76:
+      if (RsFieldRawNoAssert() == JIALC)
+        return true;  // JIALC
+      else
+        return false;  // BNEZC
+    case REGIMM:
+      switch (RtFieldRaw()) {
+        case BGEZAL:
+        case BLTZAL:
+          return true;
+        default:
+          return false;
+      }
+    case SPECIAL:
+      switch (FunctionFieldRaw()) {
+        case JALR:
+          return true;
+        default:
+          return false;
+      }
+    default:
+      return false;
+  }
+}
+
+template <class P>
+bool InstructionGetters<P>::IsTrap() const {
+  if (OpcodeFieldRaw() != SPECIAL) {
+    return false;
+  } else {
+    switch (FunctionFieldRaw()) {
+      case BREAK:
+      case TGE:
+      case TGEU:
+      case TLT:
+      case TLTU:
+      case TEQ:
+      case TNE:
+        return true;
+      default:
+        return false;
+    }
+  }
+}
+
+// static
+template <class T>
+bool InstructionGetters<T>::IsForbiddenAfterBranchInstr(Instr instr) {
+  Opcode opcode = static_cast<Opcode>(instr & kOpcodeMask);
+  switch (opcode) {
+    case J:
+    case JAL:
+    case BEQ:
+    case BNE:
+    case BLEZ:  // POP06 bgeuc/bleuc, blezalc, bgezalc
+    case BGTZ:  // POP07 bltuc/bgtuc, bgtzalc, bltzalc
+    case BEQL:
+    case BNEL:
+    case BLEZL:  // POP26 bgezc, blezc, bgec/blec
+    case BGTZL:  // POP27 bgtzc, bltzc, bltc/bgtc
+    case BC:
+    case BALC:
+    case POP10:  // beqzalc, bovc, beqc
+    case POP30:  // bnezalc, bnvc, bnec
+    case POP66:  // beqzc, jic
+    case POP76:  // bnezc, jialc
+      return true;
+    case REGIMM:
+      switch (instr & kRtFieldMask) {
+        case BLTZ:
+        case BGEZ:
+        case BLTZAL:
+        case BGEZAL:
+          return true;
+        default:
+          return false;
+      }
+      break;
+    case SPECIAL:
+      switch (instr & kFunctionFieldMask) {
+        case JR:
+        case JALR:
+          return true;
+        default:
+          return false;
+      }
+      break;
+    case COP1:
+      switch (instr & kRsFieldMask) {
+        case BC1:
+        case BC1EQZ:
+        case BC1NEZ:
+          return true;
+          break;
+        default:
+          return false;
+      }
+      break;
+    default:
+      return false;
+  }
+}
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/mips64/disasm-mips64.cc b/src/mips64/disasm-mips64.cc
index 5485f3e..d73f22a 100644
--- a/src/mips64/disasm-mips64.cc
+++ b/src/mips64/disasm-mips64.cc
@@ -959,6 +959,12 @@
       case CVT_D_S:
         Format(instr, "cvt.d.'t 'fd, 'fs");
         break;
+      case MADDF_S:
+        Format(instr, "maddf.s  'fd, 'fs, 'ft");
+        break;
+      case MSUBF_S:
+        Format(instr, "msubf.s  'fd, 'fs, 'ft");
+        break;
       default:
         Format(instr, "unknown.cop1.'t");
         break;
@@ -969,7 +975,17 @@
 
 void Decoder::DecodeTypeRegisterDRsType(Instruction* instr) {
   if (!DecodeTypeRegisterRsType(instr)) {
-    Format(instr, "unknown.cop1.'t");
+    switch (instr->FunctionFieldRaw()) {
+      case MADDF_D:
+        Format(instr, "maddf.d  'fd, 'fs, 'ft");
+        break;
+      case MSUBF_D:
+        Format(instr, "msubf.d  'fd, 'fs, 'ft");
+        break;
+      default:
+        Format(instr, "unknown.cop1.'t");
+        break;
+    }
   }
 }
 
@@ -1115,9 +1131,18 @@
 
 void Decoder::DecodeTypeRegisterCOP1X(Instruction* instr) {
   switch (instr->FunctionFieldRaw()) {
+    case MADD_S:
+      Format(instr, "madd.s  'fd, 'fr, 'fs, 'ft");
+      break;
     case MADD_D:
       Format(instr, "madd.d  'fd, 'fr, 'fs, 'ft");
       break;
+    case MSUB_S:
+      Format(instr, "msub.s  'fd, 'fr, 'fs, 'ft");
+      break;
+    case MSUB_D:
+      Format(instr, "msub.d  'fd, 'fr, 'fs, 'ft");
+      break;
     default:
       UNREACHABLE();
   }
@@ -1483,6 +1508,10 @@
       }
       break;
     }
+    case DINS: {
+      Format(instr, "dins    'rt, 'rs, 'sa, 'ss2");
+      break;
+    }
     case DBSHFL: {
       int sa = instr->SaFieldRaw() >> kSaShift;
       switch (sa) {
@@ -1917,7 +1946,7 @@
   out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
                               "%08x       ",
                               instr->InstructionBits());
-  switch (instr->InstructionType(Instruction::TypeChecks::EXTRA)) {
+  switch (instr->InstructionType()) {
     case Instruction::kRegisterType: {
       return DecodeTypeRegister(instr);
     }
diff --git a/src/mips64/interface-descriptors-mips64.cc b/src/mips64/interface-descriptors-mips64.cc
index 77c71aa..e5b9c2e 100644
--- a/src/mips64/interface-descriptors-mips64.cc
+++ b/src/mips64/interface-descriptors-mips64.cc
@@ -40,13 +40,9 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return a3; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() { return a4; }
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return a3; }
-const Register VectorStoreTransitionDescriptor::MapRegister() { return a5; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return a3; }
-
+const Register StoreTransitionDescriptor::SlotRegister() { return a4; }
+const Register StoreTransitionDescriptor::VectorRegister() { return a3; }
+const Register StoreTransitionDescriptor::MapRegister() { return a5; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }
@@ -356,7 +352,7 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {
       a0,  // callee
@@ -391,7 +387,19 @@
       a0,  // argument count (not including receiver)
       a3,  // new target
       a1,  // constructor to call
-      a2   // address of the first argument
+      a2,  // allocation site feedback if available, undefined otherwise.
+      a4   // address of the first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      a0,  // argument count (not including receiver)
+      a1,  // the target to call verified to be Array function
+      a2,  // allocation site feedback
+      a3,  // address of first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/mips64/macro-assembler-mips64.cc b/src/mips64/macro-assembler-mips64.cc
index aa0de26..dd12f9b 100644
--- a/src/mips64/macro-assembler-mips64.cc
+++ b/src/mips64/macro-assembler-mips64.cc
@@ -200,9 +200,7 @@
                                 Condition cc,
                                 Label* branch) {
   DCHECK(cc == eq || cc == ne);
-  const int mask =
-      1 << MemoryChunk::IN_FROM_SPACE | 1 << MemoryChunk::IN_TO_SPACE;
-  CheckPageFlag(object, scratch, mask, cc, branch);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc, branch);
 }
 
 
@@ -1260,8 +1258,13 @@
   if (rt.is_reg()) {
     sltu(rd, rs, rt.rm());
   } else {
-    if (is_int16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
+    const uint64_t int16_min = std::numeric_limits<int16_t>::min();
+    if (is_uint15(rt.imm64_) && !MustUseReg(rt.rmode_)) {
+      // Imm range is: [0, 32767].
       sltiu(rd, rs, static_cast<int32_t>(rt.imm64_));
+    } else if (is_uint15(rt.imm64_ - int16_min) && !MustUseReg(rt.rmode_)) {
+      // Imm range is: [max_unsigned-32767,max_unsigned].
+      sltiu(rd, rs, static_cast<uint16_t>(rt.imm64_));
     } else {
       // li handles the relocation.
       DCHECK(!rs.is(at));
@@ -1960,10 +1963,14 @@
 }
 
 void MacroAssembler::Neg_s(FPURegister fd, FPURegister fs) {
-  Register scratch1 = t8;
-  Register scratch2 = t9;
-  if (kArchVariant == kMips64r2) {
+  if (kArchVariant == kMips64r6) {
+    // r6 neg_s changes the sign for NaN-like operands as well.
+    neg_s(fd, fs);
+  } else {
+    DCHECK(kArchVariant == kMips64r2);
     Label is_nan, done;
+    Register scratch1 = t8;
+    Register scratch2 = t9;
     BranchF32(nullptr, &is_nan, eq, fs, fs);
     Branch(USE_DELAY_SLOT, &done);
     // For NaN input, neg_s will return the same NaN value,
@@ -1977,21 +1984,18 @@
     Or(scratch2, scratch2, scratch1);
     mtc1(scratch2, fd);
     bind(&done);
-  } else {
-    mfc1(scratch1, fs);
-    And(scratch2, scratch1, Operand(~kBinary32SignMask));
-    And(scratch1, scratch1, Operand(kBinary32SignMask));
-    Xor(scratch1, scratch1, Operand(kBinary32SignMask));
-    Or(scratch2, scratch2, scratch1);
-    mtc1(scratch2, fd);
   }
 }
 
 void MacroAssembler::Neg_d(FPURegister fd, FPURegister fs) {
-  Register scratch1 = t8;
-  Register scratch2 = t9;
-  if (kArchVariant == kMips64r2) {
+  if (kArchVariant == kMips64r6) {
+    // r6 neg_d changes the sign for NaN-like operands as well.
+    neg_d(fd, fs);
+  } else {
+    DCHECK(kArchVariant == kMips64r2);
     Label is_nan, done;
+    Register scratch1 = t8;
+    Register scratch2 = t9;
     BranchF64(nullptr, &is_nan, eq, fs, fs);
     Branch(USE_DELAY_SLOT, &done);
     // For NaN input, neg_d will return the same NaN value,
@@ -2005,13 +2009,6 @@
     Or(scratch2, scratch2, scratch1);
     dmtc1(scratch2, fd);
     bind(&done);
-  } else {
-    dmfc1(scratch1, fs);
-    And(scratch2, scratch1, Operand(~Double::kSignMask));
-    And(scratch1, scratch1, Operand(Double::kSignMask));
-    Xor(scratch1, scratch1, Operand(Double::kSignMask));
-    Or(scratch2, scratch2, scratch1);
-    dmtc1(scratch2, fd);
   }
 }
 
@@ -2387,7 +2384,8 @@
   DCHECK(nan || target);
   // Check for unordered (NaN) cases.
   if (nan) {
-    bool long_branch = nan->is_bound() ? is_near(nan) : is_trampoline_emitted();
+    bool long_branch =
+        nan->is_bound() ? !is_near(nan) : is_trampoline_emitted();
     if (kArchVariant != kMips64r6) {
       if (long_branch) {
         Label skip;
@@ -2427,7 +2425,7 @@
 
   if (target) {
     bool long_branch =
-        target->is_bound() ? is_near(target) : is_trampoline_emitted();
+        target->is_bound() ? !is_near(target) : is_trampoline_emitted();
     if (long_branch) {
       Label skip;
       Condition neg_cond = NegateFpuCondition(cond);
@@ -4379,7 +4377,7 @@
                               Register scratch2,
                               Label* gc_required,
                               AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
       // Trash the registers to simulate an allocation failure.
@@ -4543,7 +4541,7 @@
 void MacroAssembler::FastAllocate(int object_size, Register result,
                                   Register scratch1, Register scratch2,
                                   AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK(!AreAliased(result, scratch1, scratch2, at));
 
   // Make object size into bytes.
diff --git a/src/mips64/macro-assembler-mips64.h b/src/mips64/macro-assembler-mips64.h
index c96525c..4f67d70 100644
--- a/src/mips64/macro-assembler-mips64.h
+++ b/src/mips64/macro-assembler-mips64.h
@@ -243,6 +243,18 @@
                            Func GetLabelFunction);
 #undef COND_ARGS
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp.
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 0) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    UNIMPLEMENTED();
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the sp register.
   void Drop(int count,
diff --git a/src/mips64/simulator-mips64.cc b/src/mips64/simulator-mips64.cc
index 780c90c..02387d0 100644
--- a/src/mips64/simulator-mips64.cc
+++ b/src/mips64/simulator-mips64.cc
@@ -96,7 +96,7 @@
   void RedoBreakpoints();
 };
 
-#define UNSUPPORTED() printf("Sim: Unsupported instruction.\n");
+inline void UNSUPPORTED() { printf("Sim: Unsupported instruction.\n"); }
 
 void MipsDebugger::Stop(Instruction* instr) {
   // Get the stop code.
@@ -741,8 +741,8 @@
   last_debugger_input_ = input;
 }
 
-void Simulator::FlushICache(base::HashMap* i_cache, void* start_addr,
-                            size_t size) {
+void Simulator::FlushICache(base::CustomMatcherHashMap* i_cache,
+                            void* start_addr, size_t size) {
   int64_t start = reinterpret_cast<int64_t>(start_addr);
   int64_t intra_line = (start & CachePage::kLineMask);
   start -= intra_line;
@@ -762,7 +762,8 @@
   }
 }
 
-CachePage* Simulator::GetCachePage(base::HashMap* i_cache, void* page) {
+CachePage* Simulator::GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                   void* page) {
   base::HashMap::Entry* entry = i_cache->LookupOrInsert(page, ICacheHash(page));
   if (entry->value == NULL) {
     CachePage* new_page = new CachePage();
@@ -773,8 +774,8 @@
 
 
 // Flush from start up to and not including start + size.
-void Simulator::FlushOnePage(base::HashMap* i_cache, intptr_t start,
-                             size_t size) {
+void Simulator::FlushOnePage(base::CustomMatcherHashMap* i_cache,
+                             intptr_t start, size_t size) {
   DCHECK(size <= CachePage::kPageSize);
   DCHECK(AllOnOnePage(start, size - 1));
   DCHECK((start & CachePage::kLineMask) == 0);
@@ -786,7 +787,8 @@
   memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
 }
 
-void Simulator::CheckICache(base::HashMap* i_cache, Instruction* instr) {
+void Simulator::CheckICache(base::CustomMatcherHashMap* i_cache,
+                            Instruction* instr) {
   int64_t address = reinterpret_cast<int64_t>(instr);
   void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
   void* line = reinterpret_cast<void*>(address & (~CachePage::kLineMask));
@@ -819,7 +821,7 @@
 Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
   i_cache_ = isolate_->simulator_i_cache();
   if (i_cache_ == NULL) {
-    i_cache_ = new base::HashMap(&ICacheMatch);
+    i_cache_ = new base::CustomMatcherHashMap(&ICacheMatch);
     isolate_->set_simulator_i_cache(i_cache_);
   }
   Initialize(isolate);
@@ -933,7 +935,8 @@
 
 
 // static
-void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
+void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
+                         Redirection* first) {
   Redirection::DeleteChain(first);
   if (i_cache != nullptr) {
     for (base::HashMap::Entry* entry = i_cache->Start(); entry != nullptr;
@@ -1935,15 +1938,15 @@
 
 // Software interrupt instructions are used by the simulator to call into the
 // C-based V8 runtime. They are also used for debugging with simulator.
-void Simulator::SoftwareInterrupt(Instruction* instr) {
+void Simulator::SoftwareInterrupt() {
   // There are several instructions that could get us here,
   // the break_ instruction, or several variants of traps. All
   // Are "SPECIAL" class opcode, and are distinuished by function.
-  int32_t func = instr->FunctionFieldRaw();
-  uint32_t code = (func == BREAK) ? instr->Bits(25, 6) : -1;
+  int32_t func = instr_.FunctionFieldRaw();
+  uint32_t code = (func == BREAK) ? instr_.Bits(25, 6) : -1;
   // We first check if we met a call_rt_redirected.
-  if (instr->InstructionBits() == rtCallRedirInstr) {
-    Redirection* redirection = Redirection::FromSwiInstruction(instr);
+  if (instr_.InstructionBits() == rtCallRedirInstr) {
+    Redirection* redirection = Redirection::FromSwiInstruction(instr_.instr());
     int64_t arg0 = get_register(a0);
     int64_t arg1 = get_register(a1);
     int64_t arg2 = get_register(a2);
@@ -2169,7 +2172,7 @@
       PrintWatchpoint(code);
     } else {
       IncreaseStopCounter(code);
-      HandleStop(code, instr);
+      HandleStop(code, instr_.instr());
     }
   } else {
     // All remaining break_ codes, and all traps are handled here.
@@ -2364,6 +2367,49 @@
   return result;
 }
 
+enum class KeepSign : bool { no = false, yes };
+
+template <typename T, typename std::enable_if<std::is_floating_point<T>::value,
+                                              int>::type = 0>
+T FPUCanonalizeNaNArg(T result, T arg, KeepSign keepSign = KeepSign::no) {
+  DCHECK(std::isnan(arg));
+  T qNaN = std::numeric_limits<T>::quiet_NaN();
+  if (keepSign == KeepSign::yes) {
+    return std::copysign(qNaN, result);
+  }
+  return qNaN;
+}
+
+template <typename T>
+T FPUCanonalizeNaNArgs(T result, KeepSign keepSign, T first) {
+  if (std::isnan(first)) {
+    return FPUCanonalizeNaNArg(result, first, keepSign);
+  }
+  return result;
+}
+
+template <typename T, typename... Args>
+T FPUCanonalizeNaNArgs(T result, KeepSign keepSign, T first, Args... args) {
+  if (std::isnan(first)) {
+    return FPUCanonalizeNaNArg(result, first, keepSign);
+  }
+  return FPUCanonalizeNaNArgs(result, keepSign, args...);
+}
+
+template <typename Func, typename T, typename... Args>
+T FPUCanonalizeOperation(Func f, T first, Args... args) {
+  return FPUCanonalizeOperation(f, KeepSign::no, first, args...);
+}
+
+template <typename Func, typename T, typename... Args>
+T FPUCanonalizeOperation(Func f, KeepSign keepSign, T first, Args... args) {
+  T result = f(first, args...);
+  if (std::isnan(result)) {
+    result = FPUCanonalizeNaNArgs(result, keepSign, first, args...);
+  }
+  return result;
+}
+
 // Handle execution based on instruction types.
 
 void Simulator::DecodeTypeRegisterSRsType() {
@@ -2374,9 +2420,9 @@
   int32_t ft_int = bit_cast<int32_t>(ft);
   int32_t fd_int = bit_cast<int32_t>(fd);
   uint32_t cc, fcsr_cc;
-  cc = get_instr()->FCccValue();
+  cc = instr_.FCccValue();
   fcsr_cc = get_fcsr_condition_bit(cc);
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case RINT: {
       DCHECK(kArchVariant == kMips64r6);
       float result, temp_result;
@@ -2416,41 +2462,65 @@
       break;
     }
     case ADD_S:
-      set_fpu_register_float(fd_reg(), fs + ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs + rhs; },
+                                 fs, ft));
       break;
     case SUB_S:
-      set_fpu_register_float(fd_reg(), fs - ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs - rhs; },
+                                 fs, ft));
+      break;
+    case MADDF_S:
+      DCHECK(kArchVariant == kMips64r6);
+      set_fpu_register_float(fd_reg(), fd + (fs * ft));
+      break;
+    case MSUBF_S:
+      DCHECK(kArchVariant == kMips64r6);
+      set_fpu_register_float(fd_reg(), fd - (fs * ft));
       break;
     case MUL_S:
-      set_fpu_register_float(fd_reg(), fs * ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs * rhs; },
+                                 fs, ft));
       break;
     case DIV_S:
-      set_fpu_register_float(fd_reg(), fs / ft);
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float lhs, float rhs) { return lhs / rhs; },
+                                 fs, ft));
       break;
     case ABS_S:
-      set_fpu_register_float(fd_reg(), fabs(fs));
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float fs) { return FPAbs(fs); }, fs));
       break;
     case MOV_S:
       set_fpu_register_float(fd_reg(), fs);
       break;
     case NEG_S:
-      set_fpu_register_float(fd_reg(), -fs);
+      set_fpu_register_float(
+          fd_reg(), FPUCanonalizeOperation([](float src) { return -src; },
+                                           KeepSign::yes, fs));
       break;
     case SQRT_S:
-      lazily_initialize_fast_sqrt(isolate_);
-      set_fpu_register_float(fd_reg(), fast_sqrt(fs, isolate_));
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float src) { return std::sqrt(src); }, fs));
       break;
-    case RSQRT_S: {
-      lazily_initialize_fast_sqrt(isolate_);
-      float result = 1.0 / fast_sqrt(fs, isolate_);
-      set_fpu_register_float(fd_reg(), result);
+    case RSQRT_S:
+      set_fpu_register_float(
+          fd_reg(), FPUCanonalizeOperation(
+                        [](float src) { return 1.0 / std::sqrt(src); }, fs));
       break;
-    }
-    case RECIP_S: {
-      float result = 1.0 / fs;
-      set_fpu_register_float(fd_reg(), result);
+    case RECIP_S:
+      set_fpu_register_float(
+          fd_reg(),
+          FPUCanonalizeOperation([](float src) { return 1.0 / src; }, fs));
       break;
-    }
     case C_F_D:
       set_fcsr_bit(fcsr_cc, false);
       break;
@@ -2696,7 +2766,7 @@
       uint32_t ft_cc = (ft_reg() >> 2) & 0x7;
       ft_cc = get_fcsr_condition_bit(ft_cc);
 
-      if (get_instr()->Bit(16)) {  // Read Tf bit.
+      if (instr_.Bit(16)) {  // Read Tf bit.
         // MOVT.D
         if (test_fcsr_bit(ft_cc)) set_fpu_register_float(fd_reg(), fs);
       } else {
@@ -2717,15 +2787,14 @@
   double ft, fs, fd;
   uint32_t cc, fcsr_cc;
   fs = get_fpu_register_double(fs_reg());
-  ft = (get_instr()->FunctionFieldRaw() != MOVF)
-           ? get_fpu_register_double(ft_reg())
-           : 0.0;
+  ft = (instr_.FunctionFieldRaw() != MOVF) ? get_fpu_register_double(ft_reg())
+                                           : 0.0;
   fd = get_fpu_register_double(fd_reg());
-  cc = get_instr()->FCccValue();
+  cc = instr_.FCccValue();
   fcsr_cc = get_fcsr_condition_bit(cc);
   int64_t ft_int = bit_cast<int64_t>(ft);
   int64_t fd_int = bit_cast<int64_t>(fd);
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case RINT: {
       DCHECK(kArchVariant == kMips64r6);
       double result, temp, temp_result;
@@ -2793,7 +2862,7 @@
       // Same function field for MOVT.D and MOVF.D
       uint32_t ft_cc = (ft_reg() >> 2) & 0x7;
       ft_cc = get_fcsr_condition_bit(ft_cc);
-      if (get_instr()->Bit(16)) {  // Read Tf bit.
+      if (instr_.Bit(16)) {  // Read Tf bit.
         // MOVT.D
         if (test_fcsr_bit(ft_cc)) set_fpu_register_double(fd_reg(), fs);
       } else {
@@ -2819,41 +2888,65 @@
       set_fpu_register_double(fd_reg(), FPUMax(ft, fs));
       break;
     case ADD_D:
-      set_fpu_register_double(fd_reg(), fs + ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs + rhs; }, fs, ft));
       break;
     case SUB_D:
-      set_fpu_register_double(fd_reg(), fs - ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs - rhs; }, fs, ft));
+      break;
+    case MADDF_D:
+      DCHECK(kArchVariant == kMips64r6);
+      set_fpu_register_double(fd_reg(), fd + (fs * ft));
+      break;
+    case MSUBF_D:
+      DCHECK(kArchVariant == kMips64r6);
+      set_fpu_register_double(fd_reg(), fd - (fs * ft));
       break;
     case MUL_D:
-      set_fpu_register_double(fd_reg(), fs * ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs * rhs; }, fs, ft));
       break;
     case DIV_D:
-      set_fpu_register_double(fd_reg(), fs / ft);
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation(
+              [](double lhs, double rhs) { return lhs / rhs; }, fs, ft));
       break;
     case ABS_D:
-      set_fpu_register_double(fd_reg(), fabs(fs));
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation([](double fs) { return FPAbs(fs); }, fs));
       break;
     case MOV_D:
       set_fpu_register_double(fd_reg(), fs);
       break;
     case NEG_D:
-      set_fpu_register_double(fd_reg(), -fs);
+      set_fpu_register_double(
+          fd_reg(), FPUCanonalizeOperation([](double src) { return -src; },
+                                           KeepSign::yes, fs));
       break;
     case SQRT_D:
-      lazily_initialize_fast_sqrt(isolate_);
-      set_fpu_register_double(fd_reg(), fast_sqrt(fs, isolate_));
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation([](double fs) { return std::sqrt(fs); }, fs));
       break;
-    case RSQRT_D: {
-      lazily_initialize_fast_sqrt(isolate_);
-      double result = 1.0 / fast_sqrt(fs, isolate_);
-      set_fpu_register_double(fd_reg(), result);
+    case RSQRT_D:
+      set_fpu_register_double(
+          fd_reg(), FPUCanonalizeOperation(
+                        [](double fs) { return 1.0 / std::sqrt(fs); }, fs));
       break;
-    }
-    case RECIP_D: {
-      double result = 1.0 / fs;
-      set_fpu_register_double(fd_reg(), result);
+    case RECIP_D:
+      set_fpu_register_double(
+          fd_reg(),
+          FPUCanonalizeOperation([](double fs) { return 1.0 / fs; }, fs));
       break;
-    }
     case C_UN_D:
       set_fcsr_bit(fcsr_cc, std::isnan(fs) || std::isnan(ft));
       break;
@@ -3060,7 +3153,7 @@
   float fs = get_fpu_register_float(fs_reg());
   float ft = get_fpu_register_float(ft_reg());
   int64_t alu_out = 0x12345678;
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case CVT_S_W:  // Convert word to float (single).
       alu_out = get_fpu_register_signed_word(fs_reg());
       set_fpu_register_float(fd_reg(), static_cast<float>(alu_out));
@@ -3152,7 +3245,7 @@
   double fs = get_fpu_register_double(fs_reg());
   double ft = get_fpu_register_double(ft_reg());
   int64_t i64;
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case CVT_D_L:  // Mips32r2 instruction.
       i64 = get_fpu_register(fs_reg());
       set_fpu_register_double(fd_reg(), static_cast<double>(i64));
@@ -3241,7 +3334,7 @@
 
 
 void Simulator::DecodeTypeRegisterCOP1() {
-  switch (get_instr()->RsFieldRaw()) {
+  switch (instr_.RsFieldRaw()) {
     case BC1:  // Branch on coprocessor condition.
     case BC1EQZ:
     case BC1NEZ:
@@ -3304,14 +3397,43 @@
 
 
 void Simulator::DecodeTypeRegisterCOP1X() {
-  switch (get_instr()->FunctionFieldRaw()) {
-    case MADD_D:
+  switch (instr_.FunctionFieldRaw()) {
+    case MADD_S: {
+      DCHECK(kArchVariant == kMips64r2);
+      float fr, ft, fs;
+      fr = get_fpu_register_float(fr_reg());
+      fs = get_fpu_register_float(fs_reg());
+      ft = get_fpu_register_float(ft_reg());
+      set_fpu_register_float(fd_reg(), fs * ft + fr);
+      break;
+    }
+    case MSUB_S: {
+      DCHECK(kArchVariant == kMips64r2);
+      float fr, ft, fs;
+      fr = get_fpu_register_float(fr_reg());
+      fs = get_fpu_register_float(fs_reg());
+      ft = get_fpu_register_float(ft_reg());
+      set_fpu_register_float(fd_reg(), fs * ft - fr);
+      break;
+    }
+    case MADD_D: {
+      DCHECK(kArchVariant == kMips64r2);
       double fr, ft, fs;
       fr = get_fpu_register_double(fr_reg());
       fs = get_fpu_register_double(fs_reg());
       ft = get_fpu_register_double(ft_reg());
       set_fpu_register_double(fd_reg(), fs * ft + fr);
       break;
+    }
+    case MSUB_D: {
+      DCHECK(kArchVariant == kMips64r2);
+      double fr, ft, fs;
+      fr = get_fpu_register_double(fr_reg());
+      fs = get_fpu_register_double(fs_reg());
+      ft = get_fpu_register_double(ft_reg());
+      set_fpu_register_double(fd_reg(), fs * ft - fr);
+      break;
+    }
     default:
       UNREACHABLE();
   }
@@ -3324,7 +3446,7 @@
   int64_t alu_out;
   bool do_interrupt = false;
 
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case SELEQZ_S:
       DCHECK(kArchVariant == kMips64r6);
       set_register(rd_reg(), rt() == 0 ? rs() : 0);
@@ -3570,7 +3692,7 @@
     case DIV:
     case DDIV: {
       const int64_t int_min_value =
-          get_instr()->FunctionFieldRaw() == DIV ? INT_MIN : LONG_MIN;
+          instr_.FunctionFieldRaw() == DIV ? INT_MIN : LONG_MIN;
       switch (kArchVariant) {
         case kMips64r2:
           // Divide by zero and overflow was not checked in the
@@ -3616,7 +3738,7 @@
         case kMips64r6: {
           uint32_t rt_u_32 = static_cast<uint32_t>(rt_u());
           uint32_t rs_u_32 = static_cast<uint32_t>(rs_u());
-          switch (get_instr()->SaValue()) {
+          switch (sa()) {
             case DIV_OP:
               if (rt_u_32 != 0) {
                 set_register(rd_reg(), rs_u_32 / rt_u_32);
@@ -3645,7 +3767,7 @@
     case DDIVU:
       switch (kArchVariant) {
         case kMips64r6: {
-          switch (get_instr()->SaValue()) {
+          switch (instr_.SaValue()) {
             case DIV_OP:
               if (rt_u() != 0) {
                 set_register(rd_reg(), rs_u() / rt_u());
@@ -3767,9 +3889,9 @@
       }
       break;
     case MOVCI: {
-      uint32_t cc = get_instr()->FBccValue();
+      uint32_t cc = instr_.FBccValue();
       uint32_t fcsr_cc = get_fcsr_condition_bit(cc);
-      if (get_instr()->Bit(16)) {  // Read Tf bit.
+      if (instr_.Bit(16)) {  // Read Tf bit.
         if (test_fcsr_bit(fcsr_cc)) set_register(rd_reg(), rs());
       } else {
         if (!test_fcsr_bit(fcsr_cc)) set_register(rd_reg(), rs());
@@ -3785,14 +3907,14 @@
       UNREACHABLE();
   }
   if (do_interrupt) {
-    SoftwareInterrupt(get_instr());
+    SoftwareInterrupt();
   }
 }
 
 
 void Simulator::DecodeTypeRegisterSPECIAL2() {
   int64_t alu_out;
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case MUL:
       alu_out = static_cast<int32_t>(rs_u()) * static_cast<int32_t>(rt_u());
       SetResult(rd_reg(), alu_out);
@@ -3821,7 +3943,7 @@
 
 void Simulator::DecodeTypeRegisterSPECIAL3() {
   int64_t alu_out;
-  switch (get_instr()->FunctionFieldRaw()) {
+  switch (instr_.FunctionFieldRaw()) {
     case INS: {  // Mips64r2 instruction.
       // Interpret rd field as 5-bit msb of insert.
       uint16_t msb = rd_reg();
@@ -3890,7 +4012,7 @@
       break;
     }
     case BSHFL: {
-      int32_t sa = get_instr()->SaFieldRaw() >> kSaShift;
+      int32_t sa = instr_.SaFieldRaw() >> kSaShift;
       switch (sa) {
         case BITSWAP: {
           uint32_t input = static_cast<uint32_t>(rt());
@@ -3968,7 +4090,7 @@
           break;
         }
         default: {
-          const uint8_t bp2 = get_instr()->Bp2Value();
+          const uint8_t bp2 = instr_.Bp2Value();
           sa >>= kBp2Bits;
           switch (sa) {
             case ALIGN: {
@@ -3993,7 +4115,7 @@
       break;
     }
     case DBSHFL: {
-      int32_t sa = get_instr()->SaFieldRaw() >> kSaShift;
+      int32_t sa = instr_.SaFieldRaw() >> kSaShift;
       switch (sa) {
         case DBITSWAP: {
           switch (sa) {
@@ -4067,7 +4189,7 @@
           break;
         }
         default: {
-          const uint8_t bp3 = get_instr()->Bp3Value();
+          const uint8_t bp3 = instr_.Bp3Value();
           sa >>= kBp3Bits;
           switch (sa) {
             case DALIGN: {
@@ -4096,12 +4218,9 @@
   }
 }
 
-
-void Simulator::DecodeTypeRegister(Instruction* instr) {
-  set_instr(instr);
-
+void Simulator::DecodeTypeRegister() {
   // ---------- Execution.
-  switch (instr->OpcodeFieldRaw()) {
+  switch (instr_.OpcodeFieldRaw()) {
     case COP1:
       DecodeTypeRegisterCOP1();
       break;
@@ -4127,18 +4246,18 @@
 
 
 // Type 2: instructions using a 16, 21 or 26 bits immediate. (e.g. beq, beqc).
-void Simulator::DecodeTypeImmediate(Instruction* instr) {
+void Simulator::DecodeTypeImmediate() {
   // Instruction fields.
-  Opcode op = instr->OpcodeFieldRaw();
-  int32_t rs_reg = instr->RsValue();
-  int64_t rs = get_register(instr->RsValue());
+  Opcode op = instr_.OpcodeFieldRaw();
+  int32_t rs_reg = instr_.RsValue();
+  int64_t rs = get_register(instr_.RsValue());
   uint64_t rs_u = static_cast<uint64_t>(rs);
-  int32_t rt_reg = instr->RtValue();  // Destination register.
+  int32_t rt_reg = instr_.RtValue();  // Destination register.
   int64_t rt = get_register(rt_reg);
-  int16_t imm16 = instr->Imm16Value();
-  int32_t imm18 = instr->Imm18Value();
+  int16_t imm16 = instr_.Imm16Value();
+  int32_t imm18 = instr_.Imm18Value();
 
-  int32_t ft_reg = instr->FtValue();  // Destination register.
+  int32_t ft_reg = instr_.FtValue();  // Destination register.
 
   // Zero extended immediate.
   uint64_t oe_imm16 = 0xffff & imm16;
@@ -4163,38 +4282,36 @@
   const int kInt64AlignmentMask = sizeof(uint64_t) - 1;
 
   // Branch instructions common part.
-  auto BranchAndLinkHelper = [this, instr, &next_pc,
-                              &execute_branch_delay_instruction](
-      bool do_branch) {
-    execute_branch_delay_instruction = true;
-    int64_t current_pc = get_pc();
-    if (do_branch) {
-      int16_t imm16 = instr->Imm16Value();
-      next_pc = current_pc + (imm16 << 2) + Instruction::kInstrSize;
-      set_register(31, current_pc + 2 * Instruction::kInstrSize);
-    } else {
-      next_pc = current_pc + 2 * Instruction::kInstrSize;
-    }
-  };
+  auto BranchAndLinkHelper =
+      [this, &next_pc, &execute_branch_delay_instruction](bool do_branch) {
+        execute_branch_delay_instruction = true;
+        int64_t current_pc = get_pc();
+        if (do_branch) {
+          int16_t imm16 = instr_.Imm16Value();
+          next_pc = current_pc + (imm16 << 2) + Instruction::kInstrSize;
+          set_register(31, current_pc + 2 * Instruction::kInstrSize);
+        } else {
+          next_pc = current_pc + 2 * Instruction::kInstrSize;
+        }
+      };
 
-  auto BranchHelper = [this, instr, &next_pc,
+  auto BranchHelper = [this, &next_pc,
                        &execute_branch_delay_instruction](bool do_branch) {
     execute_branch_delay_instruction = true;
     int64_t current_pc = get_pc();
     if (do_branch) {
-      int16_t imm16 = instr->Imm16Value();
+      int16_t imm16 = instr_.Imm16Value();
       next_pc = current_pc + (imm16 << 2) + Instruction::kInstrSize;
     } else {
       next_pc = current_pc + 2 * Instruction::kInstrSize;
     }
   };
 
-  auto BranchAndLinkCompactHelper = [this, instr, &next_pc](bool do_branch,
-                                                            int bits) {
+  auto BranchAndLinkCompactHelper = [this, &next_pc](bool do_branch, int bits) {
     int64_t current_pc = get_pc();
     CheckForbiddenSlot(current_pc);
     if (do_branch) {
-      int32_t imm = instr->ImmValue(bits);
+      int32_t imm = instr_.ImmValue(bits);
       imm <<= 32 - bits;
       imm >>= 32 - bits;
       next_pc = current_pc + (imm << 2) + Instruction::kInstrSize;
@@ -4202,11 +4319,11 @@
     }
   };
 
-  auto BranchCompactHelper = [&next_pc, this, instr](bool do_branch, int bits) {
+  auto BranchCompactHelper = [this, &next_pc](bool do_branch, int bits) {
     int64_t current_pc = get_pc();
     CheckForbiddenSlot(current_pc);
     if (do_branch) {
-      int32_t imm = instr->ImmValue(bits);
+      int32_t imm = instr_.ImmValue(bits);
       imm <<= 32 - bits;
       imm >>= 32 - bits;
       next_pc = get_pc() + (imm << 2) + Instruction::kInstrSize;
@@ -4216,12 +4333,12 @@
   switch (op) {
     // ------------- COP1. Coprocessor instructions.
     case COP1:
-      switch (instr->RsFieldRaw()) {
+      switch (instr_.RsFieldRaw()) {
         case BC1: {  // Branch on coprocessor condition.
-          uint32_t cc = instr->FBccValue();
+          uint32_t cc = instr_.FBccValue();
           uint32_t fcsr_cc = get_fcsr_condition_bit(cc);
           uint32_t cc_value = test_fcsr_bit(fcsr_cc);
-          bool do_branch = (instr->FBtrueValue()) ? cc_value : !cc_value;
+          bool do_branch = (instr_.FBtrueValue()) ? cc_value : !cc_value;
           BranchHelper(do_branch);
           break;
         }
@@ -4237,7 +4354,7 @@
       break;
     // ------------- REGIMM class.
     case REGIMM:
-      switch (instr->RtFieldRaw()) {
+      switch (instr_.RtFieldRaw()) {
         case BLTZ:
           BranchHelper(rs < 0);
           break;
@@ -4455,7 +4572,7 @@
       set_register(rt_reg, ReadB(rs + se_imm16));
       break;
     case LH:
-      set_register(rt_reg, ReadH(rs + se_imm16, instr));
+      set_register(rt_reg, ReadH(rs + se_imm16, instr_.instr()));
       break;
     case LWL: {
       // al_offset is offset of the effective address within an aligned word.
@@ -4463,26 +4580,26 @@
       uint8_t byte_shift = kInt32AlignmentMask - al_offset;
       uint32_t mask = (1 << byte_shift * 8) - 1;
       addr = rs + se_imm16 - al_offset;
-      int32_t val = ReadW(addr, instr);
+      int32_t val = ReadW(addr, instr_.instr());
       val <<= byte_shift * 8;
       val |= rt & mask;
       set_register(rt_reg, static_cast<int64_t>(val));
       break;
     }
     case LW:
-      set_register(rt_reg, ReadW(rs + se_imm16, instr));
+      set_register(rt_reg, ReadW(rs + se_imm16, instr_.instr()));
       break;
     case LWU:
-      set_register(rt_reg, ReadWU(rs + se_imm16, instr));
+      set_register(rt_reg, ReadWU(rs + se_imm16, instr_.instr()));
       break;
     case LD:
-      set_register(rt_reg, Read2W(rs + se_imm16, instr));
+      set_register(rt_reg, Read2W(rs + se_imm16, instr_.instr()));
       break;
     case LBU:
       set_register(rt_reg, ReadBU(rs + se_imm16));
       break;
     case LHU:
-      set_register(rt_reg, ReadHU(rs + se_imm16, instr));
+      set_register(rt_reg, ReadHU(rs + se_imm16, instr_.instr()));
       break;
     case LWR: {
       // al_offset is offset of the effective address within an aligned word.
@@ -4490,7 +4607,7 @@
       uint8_t byte_shift = kInt32AlignmentMask - al_offset;
       uint32_t mask = al_offset ? (~0 << (byte_shift + 1) * 8) : 0;
       addr = rs + se_imm16 - al_offset;
-      alu_out = ReadW(addr, instr);
+      alu_out = ReadW(addr, instr_.instr());
       alu_out = static_cast<uint32_t> (alu_out) >> al_offset * 8;
       alu_out |= rt & mask;
       set_register(rt_reg, alu_out);
@@ -4502,7 +4619,7 @@
       uint8_t byte_shift = kInt64AlignmentMask - al_offset;
       uint64_t mask = (1UL << byte_shift * 8) - 1;
       addr = rs + se_imm16 - al_offset;
-      alu_out = Read2W(addr, instr);
+      alu_out = Read2W(addr, instr_.instr());
       alu_out <<= byte_shift * 8;
       alu_out |= rt & mask;
       set_register(rt_reg, alu_out);
@@ -4514,7 +4631,7 @@
       uint8_t byte_shift = kInt64AlignmentMask - al_offset;
       uint64_t mask = al_offset ? (~0UL << (byte_shift + 1) * 8) : 0UL;
       addr = rs + se_imm16 - al_offset;
-      alu_out = Read2W(addr, instr);
+      alu_out = Read2W(addr, instr_.instr());
       alu_out = alu_out >> al_offset * 8;
       alu_out |= rt & mask;
       set_register(rt_reg, alu_out);
@@ -4524,31 +4641,31 @@
       WriteB(rs + se_imm16, static_cast<int8_t>(rt));
       break;
     case SH:
-      WriteH(rs + se_imm16, static_cast<uint16_t>(rt), instr);
+      WriteH(rs + se_imm16, static_cast<uint16_t>(rt), instr_.instr());
       break;
     case SWL: {
       uint8_t al_offset = (rs + se_imm16) & kInt32AlignmentMask;
       uint8_t byte_shift = kInt32AlignmentMask - al_offset;
       uint32_t mask = byte_shift ? (~0 << (al_offset + 1) * 8) : 0;
       addr = rs + se_imm16 - al_offset;
-      uint64_t mem_value = ReadW(addr, instr) & mask;
+      uint64_t mem_value = ReadW(addr, instr_.instr()) & mask;
       mem_value |= static_cast<uint32_t>(rt) >> byte_shift * 8;
-      WriteW(addr, static_cast<int32_t>(mem_value), instr);
+      WriteW(addr, static_cast<int32_t>(mem_value), instr_.instr());
       break;
     }
     case SW:
-      WriteW(rs + se_imm16, static_cast<int32_t>(rt), instr);
+      WriteW(rs + se_imm16, static_cast<int32_t>(rt), instr_.instr());
       break;
     case SD:
-      Write2W(rs + se_imm16, rt, instr);
+      Write2W(rs + se_imm16, rt, instr_.instr());
       break;
     case SWR: {
       uint8_t al_offset = (rs + se_imm16) & kInt32AlignmentMask;
       uint32_t mask = (1 << al_offset * 8) - 1;
       addr = rs + se_imm16 - al_offset;
-      uint64_t mem_value = ReadW(addr, instr);
+      uint64_t mem_value = ReadW(addr, instr_.instr());
       mem_value = (rt << al_offset * 8) | (mem_value & mask);
-      WriteW(addr, static_cast<int32_t>(mem_value), instr);
+      WriteW(addr, static_cast<int32_t>(mem_value), instr_.instr());
       break;
     }
     case SDL: {
@@ -4556,39 +4673,39 @@
       uint8_t byte_shift = kInt64AlignmentMask - al_offset;
       uint64_t mask = byte_shift ? (~0UL << (al_offset + 1) * 8) : 0;
       addr = rs + se_imm16 - al_offset;
-      uint64_t mem_value = Read2W(addr, instr) & mask;
+      uint64_t mem_value = Read2W(addr, instr_.instr()) & mask;
       mem_value |= rt >> byte_shift * 8;
-      Write2W(addr, mem_value, instr);
+      Write2W(addr, mem_value, instr_.instr());
       break;
     }
     case SDR: {
       uint8_t al_offset = (rs + se_imm16) & kInt64AlignmentMask;
       uint64_t mask = (1UL << al_offset * 8) - 1;
       addr = rs + se_imm16 - al_offset;
-      uint64_t mem_value = Read2W(addr, instr);
+      uint64_t mem_value = Read2W(addr, instr_.instr());
       mem_value = (rt << al_offset * 8) | (mem_value & mask);
-      Write2W(addr, mem_value, instr);
+      Write2W(addr, mem_value, instr_.instr());
       break;
     }
     case LWC1:
       set_fpu_register(ft_reg, kFPUInvalidResult);  // Trash upper 32 bits.
-      set_fpu_register_word(ft_reg, ReadW(rs + se_imm16, instr));
+      set_fpu_register_word(ft_reg, ReadW(rs + se_imm16, instr_.instr()));
       break;
     case LDC1:
-      set_fpu_register_double(ft_reg, ReadD(rs + se_imm16, instr));
+      set_fpu_register_double(ft_reg, ReadD(rs + se_imm16, instr_.instr()));
       break;
     case SWC1: {
       int32_t alu_out_32 = static_cast<int32_t>(get_fpu_register(ft_reg));
-      WriteW(rs + se_imm16, alu_out_32, instr);
+      WriteW(rs + se_imm16, alu_out_32, instr_.instr());
       break;
     }
     case SDC1:
-      WriteD(rs + se_imm16, get_fpu_register_double(ft_reg), instr);
+      WriteD(rs + se_imm16, get_fpu_register_double(ft_reg), instr_.instr());
       break;
     // ------------- PC-Relative instructions.
     case PCREL: {
       // rt field: checking 5-bits.
-      int32_t imm21 = instr->Imm21Value();
+      int32_t imm21 = instr_.Imm21Value();
       int64_t current_pc = get_pc();
       uint8_t rt = (imm21 >> kImm16Bits);
       switch (rt) {
@@ -4600,14 +4717,14 @@
           alu_out = current_pc + (se_imm16 << 16);
           break;
         default: {
-          int32_t imm19 = instr->Imm19Value();
+          int32_t imm19 = instr_.Imm19Value();
           // rt field: checking the most significant 3-bits.
           rt = (imm21 >> kImm18Bits);
           switch (rt) {
             case LDPC:
               addr =
                   (current_pc & static_cast<int64_t>(~0x7)) + (se_imm18 << 3);
-              alu_out = Read2W(addr, instr);
+              alu_out = Read2W(addr, instr_.instr());
               break;
             default: {
               // rt field: checking the most significant 2-bits.
@@ -4671,13 +4788,14 @@
 
 
 // Type 3: instructions using a 26 bytes immediate. (e.g. j, jal).
-void Simulator::DecodeTypeJump(Instruction* instr) {
+void Simulator::DecodeTypeJump() {
+  SimInstruction simInstr = instr_;
   // Get current pc.
   int64_t current_pc = get_pc();
   // Get unchanged bits of pc.
   int64_t pc_high_bits = current_pc & 0xfffffffff0000000;
   // Next pc.
-  int64_t next_pc = pc_high_bits | (instr->Imm26Value() << 2);
+  int64_t next_pc = pc_high_bits | (simInstr.Imm26Value() << 2);
 
   // Execute branch delay slot.
   // We don't check for end_sim_pc. First it should not be met as the current pc
@@ -4688,7 +4806,7 @@
 
   // Update pc and ra if necessary.
   // Do this after the branch delay execution.
-  if (instr->IsLinkingInstruction()) {
+  if (simInstr.IsLinkingInstruction()) {
     set_register(31, current_pc + 2 * Instruction::kInstrSize);
   }
   set_pc(next_pc);
@@ -4713,15 +4831,16 @@
     dasm.InstructionDecode(buffer, reinterpret_cast<byte*>(instr));
   }
 
-  switch (instr->InstructionType(Instruction::TypeChecks::EXTRA)) {
+  instr_ = instr;
+  switch (instr_.InstructionType()) {
     case Instruction::kRegisterType:
-      DecodeTypeRegister(instr);
+      DecodeTypeRegister();
       break;
     case Instruction::kImmediateType:
-      DecodeTypeImmediate(instr);
+      DecodeTypeImmediate();
       break;
     case Instruction::kJumpType:
-      DecodeTypeJump(instr);
+      DecodeTypeJump();
       break;
     default:
       UNSUPPORTED();
diff --git a/src/mips64/simulator-mips64.h b/src/mips64/simulator-mips64.h
index cd606e2..df98465 100644
--- a/src/mips64/simulator-mips64.h
+++ b/src/mips64/simulator-mips64.h
@@ -122,6 +122,39 @@
   char validity_map_[kValidityMapSize];  // One byte per line.
 };
 
+class SimInstructionBase : public InstructionBase {
+ public:
+  Type InstructionType() const { return type_; }
+  inline Instruction* instr() const { return instr_; }
+  inline int32_t operand() const { return operand_; }
+
+ protected:
+  SimInstructionBase() : operand_(-1), instr_(nullptr), type_(kUnsupported) {}
+  explicit SimInstructionBase(Instruction* instr) {}
+
+  int32_t operand_;
+  Instruction* instr_;
+  Type type_;
+
+ private:
+  DISALLOW_ASSIGN(SimInstructionBase);
+};
+
+class SimInstruction : public InstructionGetters<SimInstructionBase> {
+ public:
+  SimInstruction() {}
+
+  explicit SimInstruction(Instruction* instr) { *this = instr; }
+
+  SimInstruction& operator=(Instruction* instr) {
+    operand_ = *reinterpret_cast<const int32_t*>(instr);
+    instr_ = instr;
+    type_ = InstructionBase::InstructionType();
+    DCHECK(reinterpret_cast<void*>(&operand_) == this);
+    return *this;
+  }
+};
+
 class Simulator {
  public:
   friend class MipsDebugger;
@@ -226,7 +259,7 @@
   // Call on program start.
   static void Initialize(Isolate* isolate);
 
-  static void TearDown(base::HashMap* i_cache, Redirection* first);
+  static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
 
   // V8 generally calls into generated JS code with 5 parameters and into
   // generated RegExp code with 7 parameters. This is a convenience function,
@@ -246,7 +279,8 @@
   char* last_debugger_input() { return last_debugger_input_; }
 
   // ICache checking.
-  static void FlushICache(base::HashMap* i_cache, void* start, size_t size);
+  static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
+                          size_t size);
 
   // Returns true if pc register contains one of the 'special_values' defined
   // below (bad_ra, end_sim_pc).
@@ -314,6 +348,8 @@
   inline int32_t SetDoubleHIW(double* addr);
   inline int32_t SetDoubleLOW(double* addr);
 
+  SimInstruction instr_;
+
   // functions called from DecodeTypeRegister.
   void DecodeTypeRegisterCOP1();
 
@@ -335,40 +371,36 @@
   void DecodeTypeRegisterLRsType();
 
   // Executing is handled based on the instruction type.
-  void DecodeTypeRegister(Instruction* instr);
+  void DecodeTypeRegister();
 
-  Instruction* currentInstr_;
-  inline Instruction* get_instr() const { return currentInstr_; }
-  inline void set_instr(Instruction* instr) { currentInstr_ = instr; }
-
-  inline int32_t rs_reg() const { return currentInstr_->RsValue(); }
+  inline int32_t rs_reg() const { return instr_.RsValue(); }
   inline int64_t rs() const { return get_register(rs_reg()); }
   inline uint64_t rs_u() const {
     return static_cast<uint64_t>(get_register(rs_reg()));
   }
-  inline int32_t rt_reg() const { return currentInstr_->RtValue(); }
+  inline int32_t rt_reg() const { return instr_.RtValue(); }
   inline int64_t rt() const { return get_register(rt_reg()); }
   inline uint64_t rt_u() const {
     return static_cast<uint64_t>(get_register(rt_reg()));
   }
-  inline int32_t rd_reg() const { return currentInstr_->RdValue(); }
-  inline int32_t fr_reg() const { return currentInstr_->FrValue(); }
-  inline int32_t fs_reg() const { return currentInstr_->FsValue(); }
-  inline int32_t ft_reg() const { return currentInstr_->FtValue(); }
-  inline int32_t fd_reg() const { return currentInstr_->FdValue(); }
-  inline int32_t sa() const { return currentInstr_->SaValue(); }
-  inline int32_t lsa_sa() const { return currentInstr_->LsaSaValue(); }
+  inline int32_t rd_reg() const { return instr_.RdValue(); }
+  inline int32_t fr_reg() const { return instr_.FrValue(); }
+  inline int32_t fs_reg() const { return instr_.FsValue(); }
+  inline int32_t ft_reg() const { return instr_.FtValue(); }
+  inline int32_t fd_reg() const { return instr_.FdValue(); }
+  inline int32_t sa() const { return instr_.SaValue(); }
+  inline int32_t lsa_sa() const { return instr_.LsaSaValue(); }
 
   inline void SetResult(const int32_t rd_reg, const int64_t alu_out) {
     set_register(rd_reg, alu_out);
     TraceRegWr(alu_out);
   }
 
-  void DecodeTypeImmediate(Instruction* instr);
-  void DecodeTypeJump(Instruction* instr);
+  void DecodeTypeImmediate();
+  void DecodeTypeJump();
 
   // Used for breakpoints and traps.
-  void SoftwareInterrupt(Instruction* instr);
+  void SoftwareInterrupt();
 
   // Compact branch guard.
   void CheckForbiddenSlot(int64_t current_pc) {
@@ -414,9 +446,12 @@
   }
 
   // ICache.
-  static void CheckICache(base::HashMap* i_cache, Instruction* instr);
-  static void FlushOnePage(base::HashMap* i_cache, intptr_t start, size_t size);
-  static CachePage* GetCachePage(base::HashMap* i_cache, void* page);
+  static void CheckICache(base::CustomMatcherHashMap* i_cache,
+                          Instruction* instr);
+  static void FlushOnePage(base::CustomMatcherHashMap* i_cache, intptr_t start,
+                           size_t size);
+  static CachePage* GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                 void* page);
 
   enum Exception {
     none,
@@ -461,7 +496,7 @@
   char* last_debugger_input_;
 
   // Icache simulation.
-  base::HashMap* i_cache_;
+  base::CustomMatcherHashMap* i_cache_;
 
   v8::internal::Isolate* isolate_;
 
diff --git a/src/objects-body-descriptors-inl.h b/src/objects-body-descriptors-inl.h
index ccee37b..0252b64 100644
--- a/src/objects-body-descriptors-inl.h
+++ b/src/objects-body-descriptors-inl.h
@@ -465,7 +465,6 @@
     case JS_PROMISE_TYPE:
     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
     case JS_GENERATOR_OBJECT_TYPE:
-    case JS_MODULE_TYPE:
     case JS_VALUE_TYPE:
     case JS_DATE_TYPE:
     case JS_ARRAY_TYPE:
@@ -475,6 +474,7 @@
     case JS_MAP_TYPE:
     case JS_SET_ITERATOR_TYPE:
     case JS_MAP_ITERATOR_TYPE:
+    case JS_STRING_ITERATOR_TYPE:
     case JS_REGEXP_TYPE:
     case JS_GLOBAL_PROXY_TYPE:
     case JS_GLOBAL_OBJECT_TYPE:
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 7d426a0..3c43f23 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -152,6 +152,9 @@
     case JS_MAP_ITERATOR_TYPE:
       JSMapIterator::cast(this)->JSMapIteratorVerify();
       break;
+    case JS_STRING_ITERATOR_TYPE:
+      JSStringIterator::cast(this)->JSStringIteratorVerify();
+      break;
     case JS_WEAK_MAP_TYPE:
       JSWeakMap::cast(this)->JSWeakMapVerify();
       break;
@@ -562,6 +565,7 @@
   VerifyObjectField(kOptimizedCodeMapOffset);
   VerifyObjectField(kFeedbackMetadataOffset);
   VerifyObjectField(kScopeInfoOffset);
+  VerifyObjectField(kOuterScopeInfoOffset);
   VerifyObjectField(kInstanceClassNameOffset);
   CHECK(function_data()->IsUndefined(GetIsolate()) || IsApiFunction() ||
         HasBytecodeArray() || HasAsmWasmData());
@@ -778,6 +782,14 @@
   CHECK(table()->IsHashTable() || table()->IsUndefined(GetIsolate()));
 }
 
+void JSStringIterator::JSStringIteratorVerify() {
+  CHECK(IsJSStringIterator());
+  JSObjectVerify();
+  CHECK(string()->IsString());
+
+  CHECK_GE(index(), 0);
+  CHECK_LE(index(), String::kMaxLength);
+}
 
 void JSWeakSet::JSWeakSetVerify() {
   CHECK(IsJSWeakSet());
@@ -831,7 +843,6 @@
   }
 }
 
-
 void JSProxy::JSProxyVerify() {
   CHECK(IsJSProxy());
   VerifyPointer(target());
@@ -877,9 +888,7 @@
   CHECK(IsJSTypedArray());
   JSArrayBufferViewVerify();
   VerifyPointer(raw_length());
-  CHECK(raw_length()->IsSmi() || raw_length()->IsHeapNumber() ||
-        raw_length()->IsUndefined(GetIsolate()));
-
+  CHECK(raw_length()->IsSmi() || raw_length()->IsUndefined(GetIsolate()));
   VerifyPointer(elements());
 }
 
@@ -900,6 +909,27 @@
   value()->ObjectVerify();
 }
 
+void PromiseContainer::PromiseContainerVerify() {
+  CHECK(IsPromiseContainer());
+  thenable()->ObjectVerify();
+  then()->ObjectVerify();
+  resolve()->ObjectVerify();
+  reject()->ObjectVerify();
+  before_debug_event()->ObjectVerify();
+  after_debug_event()->ObjectVerify();
+}
+
+void Module::ModuleVerify() {
+  CHECK(IsModule());
+  CHECK(code()->IsSharedFunctionInfo() || code()->IsJSFunction());
+  code()->ObjectVerify();
+  exports()->ObjectVerify();
+  requested_modules()->ObjectVerify();
+  VerifySmiField(kFlagsOffset);
+  embedder_data()->ObjectVerify();
+  CHECK(shared()->name()->IsSymbol());
+  // TODO(neis): Check more.
+}
 
 void PrototypeInfo::PrototypeInfoVerify() {
   CHECK(IsPrototypeInfo());
@@ -911,10 +941,8 @@
   CHECK(validity_cell()->IsCell() || validity_cell()->IsSmi());
 }
 
-
-void SloppyBlockWithEvalContextExtension::
-    SloppyBlockWithEvalContextExtensionVerify() {
-  CHECK(IsSloppyBlockWithEvalContextExtension());
+void ContextExtension::ContextExtensionVerify() {
+  CHECK(IsContextExtension());
   VerifyObjectField(kScopeInfoOffset);
   VerifyObjectField(kExtensionOffset);
 }
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 3d82bf8..af12615 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -27,6 +27,7 @@
 #include "src/isolate.h"
 #include "src/keys.h"
 #include "src/layout-descriptor-inl.h"
+#include "src/lookup-cache-inl.h"
 #include "src/lookup.h"
 #include "src/objects.h"
 #include "src/property.h"
@@ -700,6 +701,7 @@
 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
 TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
+TYPE_CHECKER(JSStringIterator, JS_STRING_ITERATOR_TYPE)
 
 bool HeapObject::IsJSWeakCollection() const {
   return IsJSWeakMap() || IsJSWeakSet();
@@ -709,6 +711,8 @@
 
 bool HeapObject::IsDescriptorArray() const { return IsFixedArray(); }
 
+bool HeapObject::IsFrameArray() const { return IsFixedArray(); }
+
 bool HeapObject::IsArrayList() const { return IsFixedArray(); }
 
 bool Object::IsLayoutDescriptor() const {
@@ -790,6 +794,13 @@
   return map() == GetHeap()->scope_info_map();
 }
 
+bool HeapObject::IsModuleInfoEntry() const {
+  return map() == GetHeap()->module_info_entry_map();
+}
+
+bool HeapObject::IsModuleInfo() const {
+  return map() == GetHeap()->module_info_map();
+}
 
 TYPE_CHECKER(JSBoundFunction, JS_BOUND_FUNCTION_TYPE)
 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
@@ -2103,6 +2114,8 @@
       return JSArgumentsObject::kHeaderSize;
     case JS_ERROR_TYPE:
       return JSObject::kHeaderSize;
+    case JS_STRING_ITERATOR_TYPE:
+      return JSStringIterator::kSize;
     default:
       UNREACHABLE();
       return 0;
@@ -2610,6 +2623,29 @@
   return HeapObject::RawField(this, OffsetOfElementAt(index));
 }
 
+#define DEFINE_FRAME_ARRAY_ACCESSORS(name, type)                              \
+  type* FrameArray::name(int frame_ix) const {                                \
+    Object* obj =                                                             \
+        get(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset);    \
+    return type::cast(obj);                                                   \
+  }                                                                           \
+                                                                              \
+  void FrameArray::Set##name(int frame_ix, type* value) {                     \
+    set(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset, value); \
+  }
+FRAME_ARRAY_FIELD_LIST(DEFINE_FRAME_ARRAY_ACCESSORS)
+#undef DEFINE_FRAME_ARRAY_ACCESSORS
+
+bool FrameArray::IsWasmFrame(int frame_ix) const {
+  const int flags = Flags(frame_ix)->value();
+  return (flags & kIsWasmFrame) != 0;
+}
+
+int FrameArray::FrameCount() const {
+  const int frame_count = Smi::cast(get(kFrameCountIndex))->value();
+  DCHECK_LE(0, frame_count);
+  return frame_count;
+}
 
 bool DescriptorArray::IsEmpty() {
   DCHECK(length() >= kFirstIndex ||
@@ -3223,6 +3259,7 @@
 CAST_ACCESSOR(FixedTypedArrayBase)
 CAST_ACCESSOR(Float32x4)
 CAST_ACCESSOR(Foreign)
+CAST_ACCESSOR(FrameArray)
 CAST_ACCESSOR(GlobalDictionary)
 CAST_ACCESSOR(HandlerTable)
 CAST_ACCESSOR(HeapObject)
@@ -3248,6 +3285,7 @@
 CAST_ACCESSOR(JSRegExp)
 CAST_ACCESSOR(JSSet)
 CAST_ACCESSOR(JSSetIterator)
+CAST_ACCESSOR(JSStringIterator)
 CAST_ACCESSOR(JSTypedArray)
 CAST_ACCESSOR(JSValue)
 CAST_ACCESSOR(JSWeakCollection)
@@ -3255,6 +3293,8 @@
 CAST_ACCESSOR(JSWeakSet)
 CAST_ACCESSOR(LayoutDescriptor)
 CAST_ACCESSOR(Map)
+CAST_ACCESSOR(ModuleInfoEntry)
+CAST_ACCESSOR(ModuleInfo)
 CAST_ACCESSOR(Name)
 CAST_ACCESSOR(NameDictionary)
 CAST_ACCESSOR(NormalizedMapCache)
@@ -5614,6 +5654,13 @@
 
 ACCESSORS(Box, value, Object, kValueOffset)
 
+ACCESSORS(PromiseContainer, thenable, JSReceiver, kThenableOffset)
+ACCESSORS(PromiseContainer, then, JSReceiver, kThenOffset)
+ACCESSORS(PromiseContainer, resolve, JSFunction, kResolveOffset)
+ACCESSORS(PromiseContainer, reject, JSFunction, kRejectOffset)
+ACCESSORS(PromiseContainer, before_debug_event, Object, kBeforeDebugEventOffset)
+ACCESSORS(PromiseContainer, after_debug_event, Object, kAfterDebugEventOffset)
+
 Map* PrototypeInfo::ObjectCreateMap() {
   return Map::cast(WeakCell::cast(object_create_map())->value());
 }
@@ -5662,10 +5709,26 @@
 SMI_ACCESSORS(PrototypeInfo, bit_field, kBitFieldOffset)
 BOOL_ACCESSORS(PrototypeInfo, bit_field, should_be_fast_map, kShouldBeFastBit)
 
-ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
-          kScopeInfoOffset)
-ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
-          kExtensionOffset)
+ACCESSORS(ContextExtension, scope_info, ScopeInfo, kScopeInfoOffset)
+ACCESSORS(ContextExtension, extension, Object, kExtensionOffset)
+
+ACCESSORS(Module, code, Object, kCodeOffset)
+ACCESSORS(Module, exports, ObjectHashTable, kExportsOffset)
+ACCESSORS(Module, requested_modules, FixedArray, kRequestedModulesOffset)
+SMI_ACCESSORS(Module, flags, kFlagsOffset)
+BOOL_ACCESSORS(Module, flags, evaluated, kEvaluatedBit)
+ACCESSORS(Module, embedder_data, Object, kEmbedderDataOffset)
+
+SharedFunctionInfo* Module::shared() const {
+  return code()->IsSharedFunctionInfo() ? SharedFunctionInfo::cast(code())
+                                        : JSFunction::cast(code())->shared();
+}
+
+ModuleInfo* Module::info() const {
+  return shared()->scope_info()->ModuleDescriptorInfo();
+}
+
+uint32_t Module::Hash() const { return Symbol::cast(shared()->name())->Hash(); }
 
 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
@@ -5679,8 +5742,10 @@
 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
+ACCESSORS(InterceptorInfo, descriptor, Object, kDescriptorOffset)
 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
+ACCESSORS(InterceptorInfo, definer, Object, kDefinerOffset)
 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
@@ -6031,8 +6096,7 @@
   set_compiler_hints(hints);
 }
 
-
-FunctionKind SharedFunctionInfo::kind() {
+FunctionKind SharedFunctionInfo::kind() const {
   return FunctionKindBits::decode(compiler_hints());
 }
 
@@ -6057,23 +6121,12 @@
 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
                kDontCrankshaft)
 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_async, kIsAsyncFunction)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
-               kIsConciseMethod)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_getter_function,
-               kIsGetterFunction)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_setter_function,
-               kIsSetterFunction)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
-               kIsDefaultConstructor)
 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_asm_wasm_broken,
                kIsAsmWasmBroken)
-
-inline bool SharedFunctionInfo::is_resumable() const {
-  return is_generator() || is_async();
-}
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, requires_class_field_init,
+               kRequiresClassFieldInit)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_class_field_initializer,
+               kIsClassFieldInitializer)
 
 bool Script::HasValidSource() {
   Object* src = this->source();
@@ -6155,6 +6208,9 @@
                             mode);
 }
 
+ACCESSORS(SharedFunctionInfo, outer_scope_info, HeapObject,
+          kOuterScopeInfoOffset)
+
 bool SharedFunctionInfo::is_compiled() const {
   Builtins* builtins = GetIsolate()->builtins();
   DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
@@ -7890,6 +7946,44 @@
 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
 #undef SCOPE_INFO_FIELD_ACCESSORS
 
+Object* ModuleInfoEntry::export_name() const { return get(kExportNameIndex); }
+
+Object* ModuleInfoEntry::local_name() const { return get(kLocalNameIndex); }
+
+Object* ModuleInfoEntry::import_name() const { return get(kImportNameIndex); }
+
+Object* ModuleInfoEntry::module_request() const {
+  return get(kModuleRequestIndex);
+}
+
+FixedArray* ModuleInfo::module_requests() const {
+  return FixedArray::cast(get(kModuleRequestsIndex));
+}
+
+FixedArray* ModuleInfo::special_exports() const {
+  return FixedArray::cast(get(kSpecialExportsIndex));
+}
+
+FixedArray* ModuleInfo::regular_exports() const {
+  return FixedArray::cast(get(kRegularExportsIndex));
+}
+
+FixedArray* ModuleInfo::regular_imports() const {
+  return FixedArray::cast(get(kRegularImportsIndex));
+}
+
+FixedArray* ModuleInfo::namespace_imports() const {
+  return FixedArray::cast(get(kNamespaceImportsIndex));
+}
+
+#ifdef DEBUG
+bool ModuleInfo::Equals(ModuleInfo* other) const {
+  return regular_exports() == other->regular_exports() &&
+         regular_imports() == other->regular_imports() &&
+         special_exports() == other->special_exports() &&
+         namespace_imports() == other->namespace_imports();
+}
+#endif
 
 void Map::ClearCodeCache(Heap* heap) {
   // No write barrier is needed since empty_fixed_array is not in new space.
@@ -8176,6 +8270,12 @@
                                                     FAST_ELEMENTS, 2);
 }
 
+ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
+ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
+
+ACCESSORS(JSStringIterator, string, String, kStringOffset)
+SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset)
+
 #undef TYPE_CHECKER
 #undef CAST_ACCESSOR
 #undef INT_ACCESSORS
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index 6f1f746..9054371 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -28,7 +28,8 @@
 
 void Object::Print(std::ostream& os) {  // NOLINT
   if (IsSmi()) {
-    Smi::cast(this)->SmiPrint(os);
+    os << "Smi: " << std::hex << "0x" << Smi::cast(this)->value();
+    os << std::dec << " (" << Smi::cast(this)->value() << ")\n";
   } else {
     HeapObject::cast(this)->HeapObjectPrint(os);
   }
@@ -52,6 +53,7 @@
   HandleScope scope(GetIsolate());
   if (instance_type < FIRST_NONSTRING_TYPE) {
     String::cast(this)->StringPrint(os);
+    os << "\n";
     return;
   }
 
@@ -318,18 +320,37 @@
   }
 }
 
+namespace {
+
+template <class T>
+double GetScalarElement(T* array, int index) {
+  return array->get_scalar(index);
+}
+
+double GetScalarElement(FixedDoubleArray* array, int index) {
+  if (array->is_the_hole(index)) return bit_cast<double>(kHoleNanInt64);
+  return array->get_scalar(index);
+}
+
+bool is_the_hole(double maybe_hole) {
+  return bit_cast<uint64_t>(maybe_hole) == kHoleNanInt64;
+}
+
+}  // namespace
+
 template <class T, bool print_the_hole>
 static void DoPrintElements(std::ostream& os, Object* object) {  // NOLINT
   T* array = T::cast(object);
   if (array->length() == 0) return;
   int previous_index = 0;
-  double previous_value = array->get_scalar(0);
+  double previous_value = GetScalarElement(array, 0);
   double value = 0.0;
   int i;
   for (i = 1; i <= array->length(); i++) {
-    if (i < array->length()) value = array->get_scalar(i);
+    if (i < array->length()) value = GetScalarElement(array, i);
     bool values_are_nan = std::isnan(previous_value) && std::isnan(value);
-    if ((previous_value == value || values_are_nan) && i != array->length()) {
+    if (i != array->length() && (previous_value == value || values_are_nan) &&
+        is_the_hole(previous_value) == is_the_hole(value)) {
       continue;
     }
     os << "\n";
@@ -339,8 +360,7 @@
       ss << '-' << (i - 1);
     }
     os << std::setw(12) << ss.str() << ": ";
-    if (print_the_hole &&
-        FixedDoubleArray::cast(object)->is_the_hole(previous_index)) {
+    if (print_the_hole && is_the_hole(previous_value)) {
       os << "<the_hole>";
     } else {
       os << previous_value;
@@ -390,22 +410,12 @@
       break;
     }
 
-#define PRINT_ELEMENTS(Kind, Type)                \
-  case Kind: {                                    \
-    DoPrintElements<Type, false>(os, elements()); \
-    break;                                        \
+#define PRINT_ELEMENTS(Type, type, TYPE, elementType, size)     \
+  case TYPE##_ELEMENTS: {                                       \
+    DoPrintElements<Fixed##Type##Array, false>(os, elements()); \
+    break;                                                      \
   }
-
-      PRINT_ELEMENTS(UINT8_ELEMENTS, FixedUint8Array)
-      PRINT_ELEMENTS(UINT8_CLAMPED_ELEMENTS, FixedUint8ClampedArray)
-      PRINT_ELEMENTS(INT8_ELEMENTS, FixedInt8Array)
-      PRINT_ELEMENTS(UINT16_ELEMENTS, FixedUint16Array)
-      PRINT_ELEMENTS(INT16_ELEMENTS, FixedInt16Array)
-      PRINT_ELEMENTS(UINT32_ELEMENTS, FixedUint32Array)
-      PRINT_ELEMENTS(INT32_ELEMENTS, FixedInt32Array)
-      PRINT_ELEMENTS(FLOAT32_ELEMENTS, FixedFloat32Array)
-      PRINT_ELEMENTS(FLOAT64_ELEMENTS, FixedFloat64Array)
-
+      TYPED_ARRAYS(PRINT_ELEMENTS)
 #undef PRINT_ELEMENTS
 
     case DICTIONARY_ELEMENTS:
@@ -732,6 +742,16 @@
         os << Code::ICState2String(nexus.StateFromFeedback());
         break;
       }
+      case FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC: {
+        BinaryOpICNexus nexus(this, slot);
+        os << Code::ICState2String(nexus.StateFromFeedback());
+        break;
+      }
+      case FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC: {
+        CompareICNexus nexus(this, slot);
+        os << Code::ICState2String(nexus.StateFromFeedback());
+        break;
+      }
       case FeedbackVectorSlotKind::GENERAL:
         break;
       case FeedbackVectorSlotKind::INVALID:
@@ -911,7 +931,7 @@
   JSObjectPrintHeader(os, this, "JSArrayBuffer");
   os << "\n - backing_store = " << backing_store();
   os << "\n - byte_length = " << Brief(byte_length());
-  if (was_neutered()) os << " - neutered\n";
+  if (was_neutered()) os << "\n - neutered";
   JSObjectPrintBody(os, this, !was_neutered());
 }
 
@@ -922,7 +942,7 @@
   os << "\n - byte_offset = " << Brief(byte_offset());
   os << "\n - byte_length = " << Brief(byte_length());
   os << "\n - length = " << Brief(length());
-  if (WasNeutered()) os << " - neutered\n";
+  if (WasNeutered()) os << "\n - neutered";
   JSObjectPrintBody(os, this, !WasNeutered());
 }
 
@@ -932,7 +952,7 @@
   os << "\n - buffer =" << Brief(buffer());
   os << "\n - byte_offset = " << Brief(byte_offset());
   os << "\n - byte_length = " << Brief(byte_length());
-  if (WasNeutered()) os << " - neutered\n";
+  if (WasNeutered()) os << "\n - neutered";
   JSObjectPrintBody(os, this, !WasNeutered());
 }
 
@@ -954,9 +974,9 @@
   os << "\n - name = " << Brief(shared()->name());
   os << "\n - formal_parameter_count = "
      << shared()->internal_formal_parameter_count();
-  if (shared()->is_generator()) {
+  if (IsGeneratorFunction(shared()->kind())) {
     os << "\n   - generator";
-  } else if (shared()->is_async()) {
+  } else if (IsAsyncFunction(shared()->kind())) {
     os << "\n   - async";
   }
   os << "\n - context = " << Brief(context());
@@ -1127,6 +1147,26 @@
   os << "\n";
 }
 
+void PromiseContainer::PromiseContainerPrint(std::ostream& os) {  // NOLINT
+  HeapObject::PrintHeader(os, "PromiseContainer");
+  os << "\n - thenable: " << Brief(thenable());
+  os << "\n - then: " << Brief(then());
+  os << "\n - resolve: " << Brief(resolve());
+  os << "\n - reject: " << Brief(reject());
+  os << "\n - before debug event: " << Brief(before_debug_event());
+  os << "\n - after debug event: " << Brief(after_debug_event());
+  os << "\n";
+}
+
+void Module::ModulePrint(std::ostream& os) {  // NOLINT
+  HeapObject::PrintHeader(os, "Module");
+  os << "\n - code: " << Brief(code());
+  os << "\n - exports: " << Brief(exports());
+  os << "\n - requested_modules: " << Brief(requested_modules());
+  os << "\n - evaluated: " << evaluated();
+  os << "\n - embedder_data: " << Brief(embedder_data());
+  os << "\n";
+}
 
 void PrototypeInfo::PrototypeInfoPrint(std::ostream& os) {  // NOLINT
   HeapObject::PrintHeader(os, "PrototypeInfo");
@@ -1136,10 +1176,8 @@
   os << "\n";
 }
 
-
-void SloppyBlockWithEvalContextExtension::
-    SloppyBlockWithEvalContextExtensionPrint(std::ostream& os) {  // NOLINT
-  HeapObject::PrintHeader(os, "SloppyBlockWithEvalContextExtension");
+void ContextExtension::ContextExtensionPrint(std::ostream& os) {  // NOLINT
+  HeapObject::PrintHeader(os, "ContextExtension");
   os << "\n - scope_info: " << Brief(scope_info());
   os << "\n - extension: " << Brief(extension());
   os << "\n";
diff --git a/src/objects.cc b/src/objects.cc
index 00721c2..44271db 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -8,6 +8,8 @@
 #include <iomanip>
 #include <memory>
 #include <sstream>
+#include <unordered_map>
+#include <unordered_set>
 
 #include "src/objects-inl.h"
 
@@ -60,7 +62,7 @@
 #include "src/string-stream.h"
 #include "src/utils.h"
 #include "src/wasm/wasm-module.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 #ifdef ENABLE_DISASSEMBLER
 #include "src/disasm.h"
@@ -995,12 +997,12 @@
       case LookupIterator::ACCESSOR:
         return GetPropertyWithAccessor(it);
       case LookupIterator::INTEGER_INDEXED_EXOTIC:
-        return ReadAbsentProperty(it);
+        return it->isolate()->factory()->undefined_value();
       case LookupIterator::DATA:
         return it->GetDataValue();
     }
   }
-  return ReadAbsentProperty(it);
+  return it->isolate()->factory()->undefined_value();
 }
 
 
@@ -1349,7 +1351,7 @@
                                    Object::DONT_THROW);
     Handle<Object> result = args.Call(call_fun, name);
     RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
-    if (result.is_null()) return ReadAbsentProperty(isolate, receiver, name);
+    if (result.is_null()) return isolate->factory()->undefined_value();
     // Rebox handle before return.
     return handle(*result, isolate);
   }
@@ -1366,7 +1368,7 @@
         receiver, Handle<JSReceiver>::cast(getter));
   }
   // Getter is not a function.
-  return ReadAbsentProperty(isolate, receiver, it->GetName());
+  return isolate->factory()->undefined_value();
 }
 
 // static
@@ -1677,6 +1679,71 @@
   return Just(result);
 }
 
+Maybe<bool> DefinePropertyWithInterceptorInternal(
+    LookupIterator* it, Handle<InterceptorInfo> interceptor,
+    Object::ShouldThrow should_throw, PropertyDescriptor& desc) {
+  Isolate* isolate = it->isolate();
+  // Make sure that the top context does not change when doing callbacks or
+  // interceptor calls.
+  AssertNoContextChange ncc(isolate);
+
+  if (interceptor->definer()->IsUndefined(isolate)) return Just(false);
+
+  Handle<JSObject> holder = it->GetHolder<JSObject>();
+  bool result;
+  Handle<Object> receiver = it->GetReceiver();
+  if (!receiver->IsJSReceiver()) {
+    ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, receiver,
+                                     Object::ConvertReceiver(isolate, receiver),
+                                     Nothing<bool>());
+  }
+  PropertyCallbackArguments args(isolate, interceptor->data(), *receiver,
+                                 *holder, should_throw);
+
+  std::unique_ptr<v8::PropertyDescriptor> descriptor(
+      new v8::PropertyDescriptor());
+  if (PropertyDescriptor::IsAccessorDescriptor(&desc)) {
+    descriptor.reset(new v8::PropertyDescriptor(
+        v8::Utils::ToLocal(desc.get()), v8::Utils::ToLocal(desc.set())));
+  } else if (PropertyDescriptor::IsDataDescriptor(&desc)) {
+    if (desc.has_writable()) {
+      descriptor.reset(new v8::PropertyDescriptor(
+          v8::Utils::ToLocal(desc.value()), desc.writable()));
+    } else {
+      descriptor.reset(
+          new v8::PropertyDescriptor(v8::Utils::ToLocal(desc.value())));
+    }
+  }
+  if (desc.has_enumerable()) {
+    descriptor->set_enumerable(desc.enumerable());
+  }
+  if (desc.has_configurable()) {
+    descriptor->set_configurable(desc.configurable());
+  }
+
+  if (it->IsElement()) {
+    uint32_t index = it->index();
+    v8::IndexedPropertyDefinerCallback definer =
+        v8::ToCData<v8::IndexedPropertyDefinerCallback>(interceptor->definer());
+    result = !args.Call(definer, index, *descriptor).is_null();
+  } else {
+    Handle<Name> name = it->name();
+    DCHECK(!name->IsPrivate());
+
+    if (name->IsSymbol() && !interceptor->can_intercept_symbols()) {
+      return Just(false);
+    }
+
+    v8::GenericNamedPropertyDefinerCallback definer =
+        v8::ToCData<v8::GenericNamedPropertyDefinerCallback>(
+            interceptor->definer());
+    result = !args.Call(definer, name, *descriptor).is_null();
+  }
+
+  RETURN_VALUE_IF_SCHEDULED_EXCEPTION(it->isolate(), Nothing<bool>());
+  return Just(result);
+}
+
 }  // namespace
 
 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck(
@@ -2415,10 +2482,6 @@
       accumulator->Add("<JS Generator>");
       break;
     }
-    case JS_MODULE_TYPE: {
-      accumulator->Add("<JS Module>");
-      break;
-    }
     // All other JSObjects are rather similar to each other (JSObject,
     // JSGlobalProxy, JSGlobalObject, JSUndetectable, JSValue).
     default: {
@@ -3449,9 +3512,16 @@
   // Ensure that in-object space of slow-mode object does not contain random
   // garbage.
   int inobject_properties = new_map->GetInObjectProperties();
-  for (int i = 0; i < inobject_properties; i++) {
-    FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
-    object->RawFastPropertyAtPut(index, Smi::FromInt(0));
+  if (inobject_properties) {
+    Heap* heap = isolate->heap();
+    heap->ClearRecordedSlotRange(
+        object->address() + map->GetInObjectPropertyOffset(0),
+        object->address() + new_instance_size);
+
+    for (int i = 0; i < inobject_properties; i++) {
+      FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
+      object->RawFastPropertyAtPut(index, Smi::FromInt(0));
+    }
   }
 
   isolate->counters()->props_to_dictionary()->Increment();
@@ -4576,13 +4646,6 @@
           if (result.IsNothing() || result.FromJust()) return result;
           // Interceptor modified the store target but failed to set the
           // property.
-          // TODO(jochen): Remove after we've identified the faulty interceptor.
-          if (!store_target_map.is_null() &&
-              *store_target_map != it->GetStoreTarget()->map()) {
-            it->isolate()->PushStackTraceAndDie(
-                0xabababaa, v8::ToCData<void*>(it->GetInterceptor()->setter()),
-                nullptr, 0xabababab);
-          }
           Utils::ApiCheck(store_target_map.is_null() ||
                               *store_target_map == it->GetStoreTarget()->map(),
                           it->IsElement() ? "v8::IndexedPropertySetterCallback"
@@ -4761,17 +4824,6 @@
   return AddDataProperty(&own_lookup, value, NONE, should_throw, store_mode);
 }
 
-MaybeHandle<Object> Object::ReadAbsentProperty(LookupIterator* it) {
-  return it->isolate()->factory()->undefined_value();
-}
-
-MaybeHandle<Object> Object::ReadAbsentProperty(Isolate* isolate,
-                                               Handle<Object> receiver,
-                                               Handle<Object> name) {
-  return isolate->factory()->undefined_value();
-}
-
-
 Maybe<bool> Object::CannotCreateProperty(Isolate* isolate,
                                          Handle<Object> receiver,
                                          Handle<Object> name,
@@ -6542,6 +6594,34 @@
     it.Next();
   }
 
+  // Handle interceptor
+  if (it.state() == LookupIterator::INTERCEPTOR) {
+    Handle<Map> store_target_map;
+    if (it.GetReceiver()->IsJSObject()) {
+      store_target_map = handle(it.GetStoreTarget()->map(), it.isolate());
+    }
+    if (it.HolderIsReceiverOrHiddenPrototype()) {
+      Maybe<bool> result = DefinePropertyWithInterceptorInternal(
+          &it, it.GetInterceptor(), should_throw, *desc);
+      if (result.IsNothing() || result.FromJust()) {
+        return result;
+      }
+      // Interceptor modified the store target but failed to set the
+      // property.
+      if (!store_target_map.is_null() &&
+          *store_target_map != it.GetStoreTarget()->map()) {
+        it.isolate()->PushStackTraceAndDie(
+            0xabababaa, v8::ToCData<void*>(it.GetInterceptor()->definer()),
+            nullptr, 0xabababab);
+      }
+      Utils::ApiCheck(store_target_map.is_null() ||
+                          *store_target_map == it.GetStoreTarget()->map(),
+                      it.IsElement() ? "v8::IndexedPropertyDefinerCallback"
+                                     : "v8::NamedPropertyDefinerCallback",
+                      "Interceptor silently changed store target.");
+    }
+  }
+
   return OrdinaryDefineOwnProperty(&it, desc, should_throw);
 }
 
@@ -7261,6 +7341,57 @@
   return GetOwnPropertyDescriptor(&it, desc);
 }
 
+namespace {
+
+Maybe<bool> GetPropertyDescriptorWithInterceptor(LookupIterator* it,
+                                                 PropertyDescriptor* desc) {
+  if (it->state() == LookupIterator::INTERCEPTOR) {
+    Isolate* isolate = it->isolate();
+    Handle<InterceptorInfo> interceptor = it->GetInterceptor();
+    if (!interceptor->descriptor()->IsUndefined(isolate)) {
+      Handle<Object> result;
+      Handle<JSObject> holder = it->GetHolder<JSObject>();
+
+      Handle<Object> receiver = it->GetReceiver();
+      if (!receiver->IsJSReceiver()) {
+        ASSIGN_RETURN_ON_EXCEPTION_VALUE(
+            isolate, receiver, Object::ConvertReceiver(isolate, receiver),
+            Nothing<bool>());
+      }
+
+      PropertyCallbackArguments args(isolate, interceptor->data(), *receiver,
+                                     *holder, Object::DONT_THROW);
+      if (it->IsElement()) {
+        uint32_t index = it->index();
+        v8::IndexedPropertyDescriptorCallback descriptorCallback =
+            v8::ToCData<v8::IndexedPropertyDescriptorCallback>(
+                interceptor->descriptor());
+
+        result = args.Call(descriptorCallback, index);
+      } else {
+        Handle<Name> name = it->name();
+        DCHECK(!name->IsPrivate());
+        v8::GenericNamedPropertyDescriptorCallback descriptorCallback =
+            v8::ToCData<v8::GenericNamedPropertyDescriptorCallback>(
+                interceptor->descriptor());
+        result = args.Call(descriptorCallback, name);
+      }
+      if (!result.is_null()) {
+        // Request successfully intercepted, try to set the property
+        // descriptor.
+        Utils::ApiCheck(
+            PropertyDescriptor::ToPropertyDescriptor(isolate, result, desc),
+            it->IsElement() ? "v8::IndexedPropertyDescriptorCallback"
+                            : "v8::NamedPropertyDescriptorCallback",
+            "Invalid property descriptor.");
+
+        return Just(true);
+      }
+    }
+  }
+  return Just(false);
+}
+}  // namespace
 
 // ES6 9.1.5.1
 // Returns true on success, false if the property didn't exist, nothing if
@@ -7275,6 +7406,13 @@
                                              it->GetName(), desc);
   }
 
+  Maybe<bool> intercepted = GetPropertyDescriptorWithInterceptor(it, desc);
+  MAYBE_RETURN(intercepted, Nothing<bool>());
+  if (intercepted.FromJust()) {
+    return Just(true);
+  }
+
+  // Request was not intercepted, continue as normal.
   // 1. (Assert)
   // 2. If O does not have an own property with key P, return undefined.
   Maybe<PropertyAttributes> maybe = JSObject::GetPropertyAttributes(it);
@@ -9367,12 +9505,6 @@
       *map, map->is_prototype_map()
                 ? &RuntimeCallStats::PrototypeMap_TransitionToDataProperty
                 : &RuntimeCallStats::Map_TransitionToDataProperty);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      map->GetIsolate(),
-      (map->is_prototype_map()
-           ? &tracing::TraceEventStatsTable::
-                 PrototypeMap_TransitionToDataProperty
-           : &tracing::TraceEventStatsTable::Map_TransitionToDataProperty))
 
   DCHECK(name->IsUniqueName());
   DCHECK(!map->is_dictionary_map());
@@ -9459,12 +9591,6 @@
       map->is_prototype_map()
           ? &RuntimeCallStats::PrototypeMap_TransitionToAccessorProperty
           : &RuntimeCallStats::Map_TransitionToAccessorProperty);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate,
-      (map->is_prototype_map()
-           ? &tracing::TraceEventStatsTable::
-                 PrototypeMap_TransitionToAccessorProperty
-           : &tracing::TraceEventStatsTable::Map_TransitionToAccessorProperty));
 
   // At least one of the accessors needs to be a new value.
   DCHECK(!getter->IsNull(isolate) || !setter->IsNull(isolate));
@@ -10177,22 +10303,76 @@
   return kFirstIndex + Length() == capacity;
 }
 
+namespace {
 
-Handle<ArrayList> ArrayList::EnsureSpace(Handle<ArrayList> array, int length) {
+Handle<FixedArray> EnsureSpaceInFixedArray(Handle<FixedArray> array,
+                                           int length) {
   int capacity = array->length();
-  bool empty = (capacity == 0);
-  if (capacity < kFirstIndex + length) {
+  if (capacity < length) {
     Isolate* isolate = array->GetIsolate();
-    int new_capacity = kFirstIndex + length;
+    int new_capacity = length;
     new_capacity = new_capacity + Max(new_capacity / 2, 2);
     int grow_by = new_capacity - capacity;
     array = Handle<ArrayList>::cast(
         isolate->factory()->CopyFixedArrayAndGrow(array, grow_by));
-    if (empty) array->SetLength(0);
   }
   return array;
 }
 
+}  // namespace
+
+Handle<ArrayList> ArrayList::EnsureSpace(Handle<ArrayList> array, int length) {
+  const bool empty = (array->length() == 0);
+  auto ret = Handle<ArrayList>::cast(
+      EnsureSpaceInFixedArray(array, kFirstIndex + length));
+  if (empty) ret->SetLength(0);
+  return ret;
+}
+
+// static
+Handle<FrameArray> FrameArray::AppendJSFrame(Handle<FrameArray> in,
+                                             Handle<Object> receiver,
+                                             Handle<JSFunction> function,
+                                             Handle<AbstractCode> code,
+                                             int offset, int flags) {
+  const int frame_count = in->FrameCount();
+  const int new_length = LengthFor(frame_count + 1);
+  Handle<FrameArray> array = EnsureSpace(in, new_length);
+  array->SetReceiver(frame_count, *receiver);
+  array->SetFunction(frame_count, *function);
+  array->SetCode(frame_count, *code);
+  array->SetOffset(frame_count, Smi::FromInt(offset));
+  array->SetFlags(frame_count, Smi::FromInt(flags));
+  array->set(kFrameCountIndex, Smi::FromInt(frame_count + 1));
+  return array;
+}
+
+// static
+Handle<FrameArray> FrameArray::AppendWasmFrame(Handle<FrameArray> in,
+                                               Handle<Object> wasm_object,
+                                               int wasm_function_index,
+                                               Handle<AbstractCode> code,
+                                               int offset, int flags) {
+  const int frame_count = in->FrameCount();
+  const int new_length = LengthFor(frame_count + 1);
+  Handle<FrameArray> array = EnsureSpace(in, new_length);
+  array->SetWasmObject(frame_count, *wasm_object);
+  array->SetWasmFunctionIndex(frame_count, Smi::FromInt(wasm_function_index));
+  array->SetCode(frame_count, *code);
+  array->SetOffset(frame_count, Smi::FromInt(offset));
+  array->SetFlags(frame_count, Smi::FromInt(flags));
+  array->set(kFrameCountIndex, Smi::FromInt(frame_count + 1));
+  return array;
+}
+
+void FrameArray::ShrinkToFit() { Shrink(LengthFor(FrameCount())); }
+
+// static
+Handle<FrameArray> FrameArray::EnsureSpace(Handle<FrameArray> array,
+                                           int length) {
+  return Handle<FrameArray>::cast(EnsureSpaceInFixedArray(array, length));
+}
+
 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
                                                   int number_of_descriptors,
                                                   int slack,
@@ -10919,7 +11099,7 @@
       if ((type & kStringRepresentationMask) != kConsStringTag) {
         AdjustMaximumDepth();
         int length = string->length();
-        DCHECK(length != 0);
+        if (length == 0) break;  // Skip empty left-hand sides of ConsStrings.
         consumed_ += length;
         return string;
       }
@@ -11461,6 +11641,118 @@
   return SearchString(isolate, seq_sub.ToUC16Vector(), pat_vector, start_index);
 }
 
+namespace {  // for String.Prototype.lastIndexOf
+
+template <typename schar, typename pchar>
+int StringMatchBackwards(Vector<const schar> subject,
+                         Vector<const pchar> pattern, int idx) {
+  int pattern_length = pattern.length();
+  DCHECK(pattern_length >= 1);
+  DCHECK(idx + pattern_length <= subject.length());
+
+  if (sizeof(schar) == 1 && sizeof(pchar) > 1) {
+    for (int i = 0; i < pattern_length; i++) {
+      uc16 c = pattern[i];
+      if (c > String::kMaxOneByteCharCode) {
+        return -1;
+      }
+    }
+  }
+
+  pchar pattern_first_char = pattern[0];
+  for (int i = idx; i >= 0; i--) {
+    if (subject[i] != pattern_first_char) continue;
+    int j = 1;
+    while (j < pattern_length) {
+      if (pattern[j] != subject[i + j]) {
+        break;
+      }
+      j++;
+    }
+    if (j == pattern_length) {
+      return i;
+    }
+  }
+  return -1;
+}
+
+}  // namespace
+
+Object* String::LastIndexOf(Isolate* isolate, Handle<Object> receiver,
+                            Handle<Object> search, Handle<Object> position) {
+  if (receiver->IsNull(isolate) || receiver->IsUndefined(isolate)) {
+    THROW_NEW_ERROR_RETURN_FAILURE(
+        isolate, NewTypeError(MessageTemplate::kCalledOnNullOrUndefined,
+                              isolate->factory()->NewStringFromAsciiChecked(
+                                  "String.prototype.lastIndexOf")));
+  }
+  Handle<String> receiver_string;
+  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, receiver_string,
+                                     Object::ToString(isolate, receiver));
+
+  Handle<String> search_string;
+  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, search_string,
+                                     Object::ToString(isolate, search));
+
+  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, position,
+                                     Object::ToNumber(position));
+
+  uint32_t start_index;
+
+  if (position->IsNaN()) {
+    start_index = receiver_string->length();
+  } else {
+    ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, position,
+                                       Object::ToInteger(isolate, position));
+
+    double position_number = std::max(position->Number(), 0.0);
+    position_number = std::min(position_number,
+                               static_cast<double>(receiver_string->length()));
+    start_index = static_cast<uint32_t>(position_number);
+  }
+
+  uint32_t pattern_length = search_string->length();
+  uint32_t receiver_length = receiver_string->length();
+
+  if (start_index + pattern_length > receiver_length) {
+    start_index = receiver_length - pattern_length;
+  }
+
+  if (pattern_length == 0) {
+    return Smi::FromInt(start_index);
+  }
+
+  receiver_string = String::Flatten(receiver_string);
+  search_string = String::Flatten(search_string);
+
+  int last_index = -1;
+  DisallowHeapAllocation no_gc;  // ensure vectors stay valid
+
+  String::FlatContent receiver_content = receiver_string->GetFlatContent();
+  String::FlatContent search_content = search_string->GetFlatContent();
+
+  if (search_content.IsOneByte()) {
+    Vector<const uint8_t> pat_vector = search_content.ToOneByteVector();
+    if (receiver_content.IsOneByte()) {
+      last_index = StringMatchBackwards(receiver_content.ToOneByteVector(),
+                                        pat_vector, start_index);
+    } else {
+      last_index = StringMatchBackwards(receiver_content.ToUC16Vector(),
+                                        pat_vector, start_index);
+    }
+  } else {
+    Vector<const uc16> pat_vector = search_content.ToUC16Vector();
+    if (receiver_content.IsOneByte()) {
+      last_index = StringMatchBackwards(receiver_content.ToOneByteVector(),
+                                        pat_vector, start_index);
+    } else {
+      last_index = StringMatchBackwards(receiver_content.ToUC16Vector(),
+                                        pat_vector, start_index);
+    }
+  }
+  return Smi::FromInt(last_index);
+}
+
 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) {
   int slen = length();
   // Can't check exact length equality, but we can check bounds.
@@ -12361,8 +12653,6 @@
 void Map::SetPrototype(Handle<Map> map, Handle<Object> prototype,
                        PrototypeOptimizationMode proto_mode) {
   RuntimeCallTimerScope stats_scope(*map, &RuntimeCallStats::Map_SetPrototype);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      map->GetIsolate(), &tracing::TraceEventStatsTable::Map_SetPrototype);
 
   bool is_hidden = false;
   if (prototype->IsJSObject()) {
@@ -12562,7 +12852,6 @@
     case JS_MAP_ITERATOR_TYPE:
     case JS_MAP_TYPE:
     case JS_MESSAGE_OBJECT_TYPE:
-    case JS_MODULE_TYPE:
     case JS_OBJECT_TYPE:
     case JS_ERROR_TYPE:
     case JS_ARGUMENTS_TYPE:
@@ -12620,7 +12909,8 @@
 
 
 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) {
-  DCHECK(function->IsConstructor() || function->shared()->is_resumable());
+  DCHECK(function->IsConstructor() ||
+         IsResumableFunction(function->shared()->kind()));
   if (function->has_initial_map()) return;
   Isolate* isolate = function->GetIsolate();
 
@@ -12631,7 +12921,7 @@
   // First create a new map with the size and number of in-object properties
   // suggested by the function.
   InstanceType instance_type;
-  if (function->shared()->is_resumable()) {
+  if (IsResumableFunction(function->shared()->kind())) {
     instance_type = JS_GENERATOR_OBJECT_TYPE;
   } else {
     instance_type = JS_OBJECT_TYPE;
@@ -12862,17 +13152,18 @@
   }
 
   IncrementalStringBuilder builder(isolate);
-  if (!shared_info->is_arrow()) {
-    if (shared_info->is_concise_method()) {
-      if (shared_info->is_generator()) {
+  FunctionKind kind = shared_info->kind();
+  if (!IsArrowFunction(kind)) {
+    if (IsConciseMethod(kind)) {
+      if (IsGeneratorFunction(kind)) {
         builder.AppendCharacter('*');
-      } else if (shared_info->is_async()) {
+      } else if (IsAsyncFunction(kind)) {
         builder.AppendCString("async ");
       }
     } else {
-      if (shared_info->is_generator()) {
+      if (IsGeneratorFunction(kind)) {
         builder.AppendCString("function* ");
-      } else if (shared_info->is_async()) {
+      } else if (IsAsyncFunction(kind)) {
         builder.AppendCString("async function ");
       } else {
         builder.AppendCString("function ");
@@ -13455,9 +13746,9 @@
 
 void SharedFunctionInfo::InitFromFunctionLiteral(
     Handle<SharedFunctionInfo> shared_info, FunctionLiteral* lit) {
-  // When adding fields here, make sure Scope::AnalyzePartially is updated
-  // accordingly.
-  shared_info->set_length(lit->scope()->default_function_length());
+  // When adding fields here, make sure DeclarationScope::AnalyzePartially is
+  // updated accordingly.
+  shared_info->set_length(lit->scope()->arity());
   shared_info->set_internal_formal_parameter_count(lit->parameter_count());
   shared_info->set_function_token_position(lit->function_token_position());
   shared_info->set_start_position(lit->start_position());
@@ -13481,6 +13772,9 @@
   }
   shared_info->set_needs_home_object(lit->scope()->NeedsHomeObject());
   shared_info->set_asm_function(lit->scope()->asm_function());
+  shared_info->set_requires_class_field_init(lit->requires_class_field_init());
+  shared_info->set_is_class_field_initializer(
+      lit->is_class_field_initializer());
   SetExpectedNofPropertiesFromEstimate(shared_info, lit);
 }
 
@@ -15433,10 +15727,11 @@
   return false;
 }
 
-
-void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
+template <AllocationSiteUpdateMode update_or_check>
+bool AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site,
                                               ElementsKind to_kind) {
   Isolate* isolate = site->GetIsolate();
+  bool result = false;
 
   if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) {
     Handle<JSArray> transition_info =
@@ -15452,6 +15747,9 @@
       uint32_t length = 0;
       CHECK(transition_info->length()->ToArrayLength(&length));
       if (length <= kMaximumArrayBytesToPretransition) {
+        if (update_or_check == AllocationSiteUpdateMode::kCheckOnly) {
+          return true;
+        }
         if (FLAG_trace_track_allocation_sites) {
           bool is_nested = site->IsNestedSite();
           PrintF(
@@ -15464,6 +15762,7 @@
         JSObject::TransitionElementsKind(transition_info, to_kind);
         site->dependent_code()->DeoptimizeDependentCodeGroup(
             isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
+        result = true;
       }
     }
   } else {
@@ -15473,6 +15772,7 @@
       to_kind = GetHoleyElementsKind(to_kind);
     }
     if (IsMoreGeneralElementsKindTransition(kind, to_kind)) {
+      if (update_or_check == AllocationSiteUpdateMode::kCheckOnly) return true;
       if (FLAG_trace_track_allocation_sites) {
         PrintF("AllocationSite: JSArray %p site updated %s->%s\n",
                reinterpret_cast<void*>(*site),
@@ -15482,8 +15782,10 @@
       site->SetElementsKind(to_kind);
       site->dependent_code()->DeoptimizeDependentCodeGroup(
           isolate, DependentCode::kAllocationSiteTransitionChangedGroup);
+      result = true;
     }
   }
+  return result;
 }
 
 
@@ -15499,13 +15801,13 @@
   return NULL;
 }
 
-
-void JSObject::UpdateAllocationSite(Handle<JSObject> object,
+template <AllocationSiteUpdateMode update_or_check>
+bool JSObject::UpdateAllocationSite(Handle<JSObject> object,
                                     ElementsKind to_kind) {
-  if (!object->IsJSArray()) return;
+  if (!object->IsJSArray()) return false;
 
   Heap* heap = object->GetHeap();
-  if (!heap->InNewSpace(*object)) return;
+  if (!heap->InNewSpace(*object)) return false;
 
   Handle<AllocationSite> site;
   {
@@ -15513,14 +15815,21 @@
 
     AllocationMemento* memento =
         heap->FindAllocationMemento<Heap::kForRuntime>(*object);
-    if (memento == NULL) return;
+    if (memento == NULL) return false;
 
     // Walk through to the Allocation Site
     site = handle(memento->GetAllocationSite());
   }
-  AllocationSite::DigestTransitionFeedback(site, to_kind);
+  return AllocationSite::DigestTransitionFeedback<update_or_check>(site,
+                                                                   to_kind);
 }
 
+template bool
+JSObject::UpdateAllocationSite<AllocationSiteUpdateMode::kCheckOnly>(
+    Handle<JSObject> object, ElementsKind to_kind);
+
+template bool JSObject::UpdateAllocationSite<AllocationSiteUpdateMode::kUpdate>(
+    Handle<JSObject> object, ElementsKind to_kind);
 
 void JSObject::TransitionElementsKind(Handle<JSObject> object,
                                       ElementsKind to_kind) {
@@ -15729,7 +16038,7 @@
 }
 
 int FixedArrayBase::GetMaxLengthForNewSpaceAllocation(ElementsKind kind) {
-  return ((Page::kMaxRegularHeapObjectSize - FixedArrayBase::kHeaderSize) >>
+  return ((kMaxRegularHeapObjectSize - FixedArrayBase::kHeaderSize) >>
           ElementsKindToShiftSize(kind));
 }
 
@@ -17984,7 +18293,8 @@
     if (capacity > ObjectHashTable::kMaxCapacity) {
       for (size_t i = 0; i < 2; ++i) {
         isolate->heap()->CollectAllGarbage(
-            Heap::kFinalizeIncrementalMarkingMask, "full object hash table");
+            Heap::kFinalizeIncrementalMarkingMask,
+            GarbageCollectionReason::kFullHashtable);
       }
       table->Rehash(isolate->factory()->undefined_value());
     }
@@ -19281,5 +19591,359 @@
   return false;
 }
 
+namespace {
+
+template <typename T>
+struct HandleValueHash {
+  V8_INLINE size_t operator()(Handle<T> handle) const { return handle->Hash(); }
+};
+
+struct ModuleHandleEqual {
+  V8_INLINE bool operator()(Handle<Module> lhs, Handle<Module> rhs) const {
+    return *lhs == *rhs;
+  }
+};
+
+struct StringHandleEqual {
+  V8_INLINE bool operator()(Handle<String> lhs, Handle<String> rhs) const {
+    return lhs->Equals(*rhs);
+  }
+};
+
+class UnorderedStringSet
+    : public std::unordered_set<Handle<String>, HandleValueHash<String>,
+                                StringHandleEqual,
+                                zone_allocator<Handle<String>>> {
+ public:
+  explicit UnorderedStringSet(Zone* zone)
+      : std::unordered_set<Handle<String>, HandleValueHash<String>,
+                           StringHandleEqual, zone_allocator<Handle<String>>>(
+            2 /* bucket count */, HandleValueHash<String>(),
+            StringHandleEqual(), zone_allocator<Handle<String>>(zone)) {}
+};
+
+}  // anonymous namespace
+
+class Module::ResolveSet
+    : public std::unordered_map<
+          Handle<Module>, UnorderedStringSet*, HandleValueHash<Module>,
+          ModuleHandleEqual, zone_allocator<std::pair<const Handle<Module>,
+                                                      UnorderedStringSet*>>> {
+ public:
+  explicit ResolveSet(Zone* zone)
+      : std::unordered_map<Handle<Module>, UnorderedStringSet*,
+                           HandleValueHash<Module>, ModuleHandleEqual,
+                           zone_allocator<std::pair<const Handle<Module>,
+                                                    UnorderedStringSet*>>>(
+            2 /* bucket count */, HandleValueHash<Module>(),
+            ModuleHandleEqual(),
+            zone_allocator<
+                std::pair<const Handle<Module>, UnorderedStringSet*>>(zone)),
+        zone_(zone) {}
+
+  Zone* zone() const { return zone_; }
+
+ private:
+  Zone* zone_;
+};
+
+void Module::CreateIndirectExport(Handle<Module> module, Handle<String> name,
+                                  Handle<ModuleInfoEntry> entry) {
+  Isolate* isolate = module->GetIsolate();
+  Handle<ObjectHashTable> exports(module->exports(), isolate);
+  DCHECK(exports->Lookup(name)->IsTheHole(isolate));
+  exports = ObjectHashTable::Put(exports, name, entry);
+  module->set_exports(*exports);
+}
+
+void Module::CreateExport(Handle<Module> module, Handle<FixedArray> names) {
+  DCHECK_LT(0, names->length());
+  Isolate* isolate = module->GetIsolate();
+  Handle<Cell> cell =
+      isolate->factory()->NewCell(isolate->factory()->undefined_value());
+  Handle<ObjectHashTable> exports(module->exports(), isolate);
+  for (int i = 0, n = names->length(); i < n; ++i) {
+    Handle<String> name(String::cast(names->get(i)), isolate);
+    DCHECK(exports->Lookup(name)->IsTheHole(isolate));
+    exports = ObjectHashTable::Put(exports, name, cell);
+  }
+  module->set_exports(*exports);
+}
+
+void Module::StoreExport(Handle<Module> module, Handle<String> name,
+                         Handle<Object> value) {
+  Handle<Cell> cell(Cell::cast(module->exports()->Lookup(name)));
+  cell->set_value(*value);
+}
+
+Handle<Object> Module::LoadExport(Handle<Module> module, Handle<String> name) {
+  Isolate* isolate = module->GetIsolate();
+  Handle<Object> object(module->exports()->Lookup(name), isolate);
+
+  // TODO(neis): Namespace imports are not yet implemented.  Trying to use this
+  // feature may crash here.
+  if (!object->IsCell()) UNIMPLEMENTED();
+
+  return handle(Handle<Cell>::cast(object)->value(), isolate);
+}
+
+Handle<Object> Module::LoadImport(Handle<Module> module, Handle<String> name,
+                                  int module_request) {
+  Isolate* isolate = module->GetIsolate();
+  Handle<Module> requested_module(
+      Module::cast(module->requested_modules()->get(module_request)), isolate);
+  return Module::LoadExport(requested_module, name);
+}
+
+MaybeHandle<Cell> Module::ResolveImport(Handle<Module> module,
+                                        Handle<String> name, int module_request,
+                                        bool must_resolve,
+                                        Module::ResolveSet* resolve_set) {
+  Isolate* isolate = module->GetIsolate();
+  Handle<Module> requested_module(
+      Module::cast(module->requested_modules()->get(module_request)), isolate);
+  return Module::ResolveExport(requested_module, name, must_resolve,
+                               resolve_set);
+}
+
+MaybeHandle<Cell> Module::ResolveExport(Handle<Module> module,
+                                        Handle<String> name, bool must_resolve,
+                                        Module::ResolveSet* resolve_set) {
+  Isolate* isolate = module->GetIsolate();
+  Handle<Object> object(module->exports()->Lookup(name), isolate);
+  if (object->IsCell()) {
+    // Already resolved (e.g. because it's a local export).
+    return Handle<Cell>::cast(object);
+  }
+
+  // Check for cycle before recursing.
+  {
+    // Attempt insertion with a null string set.
+    auto result = resolve_set->insert({module, nullptr});
+    UnorderedStringSet*& name_set = result.first->second;
+    if (result.second) {
+      // |module| wasn't in the map previously, so allocate a new name set.
+      Zone* zone = resolve_set->zone();
+      name_set =
+          new (zone->New(sizeof(UnorderedStringSet))) UnorderedStringSet(zone);
+    } else if (name_set->count(name)) {
+      // Cycle detected.
+      if (must_resolve) {
+        THROW_NEW_ERROR(
+            isolate,
+            NewSyntaxError(MessageTemplate::kCyclicModuleDependency, name),
+            Cell);
+      }
+      return MaybeHandle<Cell>();
+    }
+    name_set->insert(name);
+  }
+
+  if (object->IsModuleInfoEntry()) {
+    // Not yet resolved indirect export.
+    Handle<ModuleInfoEntry> entry = Handle<ModuleInfoEntry>::cast(object);
+    int module_request = Smi::cast(entry->module_request())->value();
+    Handle<String> import_name(String::cast(entry->import_name()), isolate);
+
+    Handle<Cell> cell;
+    if (!ResolveImport(module, import_name, module_request, true, resolve_set)
+             .ToHandle(&cell)) {
+      DCHECK(isolate->has_pending_exception());
+      return MaybeHandle<Cell>();
+    }
+
+    // The export table may have changed but the entry in question should be
+    // unchanged.
+    Handle<ObjectHashTable> exports(module->exports(), isolate);
+    DCHECK(exports->Lookup(name)->IsModuleInfoEntry());
+
+    exports = ObjectHashTable::Put(exports, name, cell);
+    module->set_exports(*exports);
+    return cell;
+  }
+
+  DCHECK(object->IsTheHole(isolate));
+  return Module::ResolveExportUsingStarExports(module, name, must_resolve,
+                                               resolve_set);
+}
+
+MaybeHandle<Cell> Module::ResolveExportUsingStarExports(
+    Handle<Module> module, Handle<String> name, bool must_resolve,
+    Module::ResolveSet* resolve_set) {
+  Isolate* isolate = module->GetIsolate();
+  if (!name->Equals(isolate->heap()->default_string())) {
+    // Go through all star exports looking for the given name.  If multiple star
+    // exports provide the name, make sure they all map it to the same cell.
+    Handle<Cell> unique_cell;
+    Handle<FixedArray> special_exports(module->info()->special_exports(),
+                                       isolate);
+    for (int i = 0, n = special_exports->length(); i < n; ++i) {
+      i::Handle<i::ModuleInfoEntry> entry(
+          i::ModuleInfoEntry::cast(special_exports->get(i)), isolate);
+      if (!entry->export_name()->IsUndefined(isolate)) {
+        continue;  // Indirect export.
+      }
+      int module_request = Smi::cast(entry->module_request())->value();
+
+      Handle<Cell> cell;
+      if (ResolveImport(module, name, module_request, false, resolve_set)
+              .ToHandle(&cell)) {
+        if (unique_cell.is_null()) unique_cell = cell;
+        if (*unique_cell != *cell) {
+          THROW_NEW_ERROR(
+              isolate, NewSyntaxError(MessageTemplate::kAmbiguousExport, name),
+              Cell);
+        }
+      } else if (isolate->has_pending_exception()) {
+        return MaybeHandle<Cell>();
+      }
+    }
+
+    if (!unique_cell.is_null()) {
+      // Found a unique star export for this name.
+      Handle<ObjectHashTable> exports(module->exports(), isolate);
+      DCHECK(exports->Lookup(name)->IsTheHole(isolate));
+      exports = ObjectHashTable::Put(exports, name, unique_cell);
+      module->set_exports(*exports);
+      return unique_cell;
+    }
+  }
+
+  // Unresolvable.
+  if (must_resolve) {
+    THROW_NEW_ERROR(isolate,
+                    NewSyntaxError(MessageTemplate::kUnresolvableExport, name),
+                    Cell);
+  }
+  return MaybeHandle<Cell>();
+}
+
+bool Module::Instantiate(Handle<Module> module, v8::Local<v8::Context> context,
+                         v8::Module::ResolveCallback callback,
+                         v8::Local<v8::Value> callback_data) {
+  // Already instantiated.
+  if (module->code()->IsJSFunction()) return true;
+
+  Isolate* isolate = module->GetIsolate();
+  Handle<SharedFunctionInfo> shared(SharedFunctionInfo::cast(module->code()),
+                                    isolate);
+  Handle<JSFunction> function =
+      isolate->factory()->NewFunctionFromSharedFunctionInfo(
+          shared,
+          handle(Utils::OpenHandle(*context)->native_context(), isolate));
+  module->set_code(*function);
+
+  Handle<ModuleInfo> module_info(shared->scope_info()->ModuleDescriptorInfo(),
+                                 isolate);
+
+  // Set up local exports.
+  Handle<FixedArray> regular_exports(module_info->regular_exports(), isolate);
+  for (int i = 0, n = regular_exports->length(); i < n; i += 2) {
+    Handle<FixedArray> export_names(
+        FixedArray::cast(regular_exports->get(i + 1)), isolate);
+    CreateExport(module, export_names);
+  }
+
+  // Partially set up indirect exports.
+  // For each indirect export, we create the appropriate slot in the export
+  // table and store its ModuleInfoEntry there.  When we later find the correct
+  // Cell in the module that actually provides the value, we replace the
+  // ModuleInfoEntry by that Cell (see ResolveExport).
+  Handle<FixedArray> special_exports(module_info->special_exports(), isolate);
+  for (int i = 0, n = special_exports->length(); i < n; ++i) {
+    Handle<ModuleInfoEntry> entry(
+        ModuleInfoEntry::cast(special_exports->get(i)), isolate);
+    Handle<Object> export_name(entry->export_name(), isolate);
+    if (export_name->IsUndefined(isolate)) continue;  // Star export.
+    CreateIndirectExport(module, Handle<String>::cast(export_name), entry);
+  }
+
+  Handle<FixedArray> module_requests(module_info->module_requests(), isolate);
+  for (int i = 0, length = module_requests->length(); i < length; ++i) {
+    Handle<String> specifier(String::cast(module_requests->get(i)), isolate);
+    v8::Local<v8::Module> api_requested_module;
+    // TODO(adamk): Revisit these failure cases once d8 knows how to
+    // persist a module_map across multiple top-level module loads, as
+    // the current module is left in a "half-instantiated" state.
+    if (!callback(context, v8::Utils::ToLocal(specifier),
+                  v8::Utils::ToLocal(module), callback_data)
+             .ToLocal(&api_requested_module)) {
+      // TODO(adamk): Give this a better error message. But this is a
+      // misuse of the API anyway.
+      isolate->ThrowIllegalOperation();
+      return false;
+    }
+    Handle<Module> requested_module = Utils::OpenHandle(*api_requested_module);
+    module->requested_modules()->set(i, *requested_module);
+    if (!Instantiate(requested_module, context, callback, callback_data)) {
+      return false;
+    }
+  }
+
+  Zone zone(isolate->allocator());
+
+  // Resolve imports.
+  Handle<FixedArray> regular_imports(module_info->regular_imports(), isolate);
+  for (int i = 0, n = regular_imports->length(); i < n; ++i) {
+    Handle<ModuleInfoEntry> entry(
+        ModuleInfoEntry::cast(regular_imports->get(i)), isolate);
+    Handle<String> name(String::cast(entry->import_name()), isolate);
+    int module_request = Smi::cast(entry->module_request())->value();
+    ResolveSet resolve_set(&zone);
+    if (ResolveImport(module, name, module_request, true, &resolve_set)
+            .is_null()) {
+      return false;
+    }
+  }
+
+  // Resolve indirect exports.
+  for (int i = 0, n = special_exports->length(); i < n; ++i) {
+    Handle<ModuleInfoEntry> entry(
+        ModuleInfoEntry::cast(special_exports->get(i)), isolate);
+    Handle<Object> name(entry->export_name(), isolate);
+    if (name->IsUndefined(isolate)) continue;  // Star export.
+    ResolveSet resolve_set(&zone);
+    if (ResolveExport(module, Handle<String>::cast(name), true, &resolve_set)
+            .is_null()) {
+      return false;
+    }
+  }
+
+  return true;
+}
+
+MaybeHandle<Object> Module::Evaluate(Handle<Module> module) {
+  DCHECK(module->code()->IsJSFunction());  // Instantiated.
+
+  Isolate* isolate = module->GetIsolate();
+
+  // Each module can only be evaluated once.
+  if (module->evaluated()) return isolate->factory()->undefined_value();
+  module->set_evaluated(true);
+
+  // Initialization.
+  Handle<JSFunction> function(JSFunction::cast(module->code()), isolate);
+  DCHECK_EQ(MODULE_SCOPE, function->shared()->scope_info()->scope_type());
+  Handle<Object> receiver = isolate->factory()->undefined_value();
+  Handle<Object> argv[] = {module};
+  Handle<Object> generator;
+  ASSIGN_RETURN_ON_EXCEPTION(
+      isolate, generator,
+      Execution::Call(isolate, function, receiver, arraysize(argv), argv),
+      Object);
+
+  // Recursion.
+  Handle<FixedArray> requested_modules(module->requested_modules(), isolate);
+  for (int i = 0, length = requested_modules->length(); i < length; ++i) {
+    Handle<Module> import(Module::cast(requested_modules->get(i)), isolate);
+    RETURN_ON_EXCEPTION(isolate, Evaluate(import), Object);
+  }
+
+  // Evaluation of module body.
+  Handle<JSFunction> resume(
+      isolate->native_context()->generator_next_internal(), isolate);
+  return Execution::Call(isolate, resume, generator, 0, nullptr);
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/objects.h b/src/objects.h
index b7c6703..fcc1f94 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -22,7 +22,7 @@
 #include "src/property-details.h"
 #include "src/unicode-decoder.h"
 #include "src/unicode.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 #if V8_TARGET_ARCH_ARM
 #include "src/arm/constants-arm.h"  // NOLINT
@@ -57,6 +57,7 @@
 //         - JSCollection
 //           - JSSet
 //           - JSMap
+//         - JSStringIterator
 //         - JSSetIterator
 //         - JSMapIterator
 //         - JSWeakCollection
@@ -76,6 +77,7 @@
 //       - BytecodeArray
 //       - FixedArray
 //         - DescriptorArray
+//         - FrameArray
 //         - LiteralsArray
 //         - HashTable
 //           - Dictionary
@@ -93,6 +95,8 @@
 //         - TemplateList
 //         - TransitionArray
 //         - ScopeInfo
+//         - ModuleInfoEntry
+//         - ModuleInfo
 //         - ScriptContextTable
 //         - WeakFixedArray
 //       - FixedDoubleArray
@@ -150,6 +154,7 @@
 //       - BreakPointInfo
 //       - CodeCache
 //       - PrototypeInfo
+//       - Module
 //     - WeakCell
 //
 // Formats of Object*:
@@ -392,8 +397,10 @@
   V(TYPE_FEEDBACK_INFO_TYPE)                                    \
   V(ALIASED_ARGUMENTS_ENTRY_TYPE)                               \
   V(BOX_TYPE)                                                   \
+  V(PROMISE_CONTAINER_TYPE)                                     \
   V(PROTOTYPE_INFO_TYPE)                                        \
-  V(SLOPPY_BLOCK_WITH_EVAL_CONTEXT_EXTENSION_TYPE)              \
+  V(CONTEXT_EXTENSION_TYPE)                                     \
+  V(MODULE_TYPE)                                                \
                                                                 \
   V(FIXED_ARRAY_TYPE)                                           \
   V(FIXED_DOUBLE_ARRAY_TYPE)                                    \
@@ -409,7 +416,6 @@
   V(JS_ARGUMENTS_TYPE)                                          \
   V(JS_CONTEXT_EXTENSION_OBJECT_TYPE)                           \
   V(JS_GENERATOR_OBJECT_TYPE)                                   \
-  V(JS_MODULE_TYPE)                                             \
   V(JS_GLOBAL_OBJECT_TYPE)                                      \
   V(JS_GLOBAL_PROXY_TYPE)                                       \
   V(JS_API_OBJECT_TYPE)                                         \
@@ -428,6 +434,7 @@
   V(JS_PROMISE_TYPE)                                            \
   V(JS_REGEXP_TYPE)                                             \
   V(JS_ERROR_TYPE)                                              \
+  V(JS_STRING_ITERATOR_TYPE)                                    \
                                                                 \
   V(JS_BOUND_FUNCTION_TYPE)                                     \
   V(JS_FUNCTION_TYPE)                                           \
@@ -496,6 +503,7 @@
 // manually.
 #define STRUCT_LIST(V)                                                       \
   V(BOX, Box, box)                                                           \
+  V(PROMISE_CONTAINER, PromiseContainer, promise_container)                  \
   V(ACCESSOR_INFO, AccessorInfo, accessor_info)                              \
   V(ACCESSOR_PAIR, AccessorPair, accessor_pair)                              \
   V(ACCESS_CHECK_INFO, AccessCheckInfo, access_check_info)                   \
@@ -511,9 +519,8 @@
   V(DEBUG_INFO, DebugInfo, debug_info)                                       \
   V(BREAK_POINT_INFO, BreakPointInfo, break_point_info)                      \
   V(PROTOTYPE_INFO, PrototypeInfo, prototype_info)                           \
-  V(SLOPPY_BLOCK_WITH_EVAL_CONTEXT_EXTENSION,                                \
-    SloppyBlockWithEvalContextExtension,                                     \
-    sloppy_block_with_eval_context_extension)
+  V(MODULE, Module, module)                                                  \
+  V(CONTEXT_EXTENSION, ContextExtension, context_extension)
 
 // We use the full 8 bits of the instance_type field to encode heap object
 // instance types.  The high-order bit (bit 7) is set if the object is not a
@@ -678,6 +685,7 @@
   TYPE_FEEDBACK_INFO_TYPE,
   ALIASED_ARGUMENTS_ENTRY_TYPE,
   BOX_TYPE,
+  PROMISE_CONTAINER_TYPE,
   DEBUG_INFO_TYPE,
   BREAK_POINT_INFO_TYPE,
   FIXED_ARRAY_TYPE,
@@ -687,7 +695,8 @@
   TRANSITION_ARRAY_TYPE,
   PROPERTY_CELL_TYPE,
   PROTOTYPE_INFO_TYPE,
-  SLOPPY_BLOCK_WITH_EVAL_CONTEXT_EXTENSION_TYPE,
+  CONTEXT_EXTENSION_TYPE,
+  MODULE_TYPE,
 
   // All the following types are subtypes of JSReceiver, which corresponds to
   // objects in the JS sense. The first and the last type in this range are
@@ -708,7 +717,6 @@
   JS_ARGUMENTS_TYPE,
   JS_CONTEXT_EXTENSION_OBJECT_TYPE,
   JS_GENERATOR_OBJECT_TYPE,
-  JS_MODULE_TYPE,
   JS_ARRAY_TYPE,
   JS_ARRAY_BUFFER_TYPE,
   JS_TYPED_ARRAY_TYPE,
@@ -722,6 +730,7 @@
   JS_PROMISE_TYPE,
   JS_REGEXP_TYPE,
   JS_ERROR_TYPE,
+  JS_STRING_ITERATOR_TYPE,
   JS_BOUND_FUNCTION_TYPE,
   JS_FUNCTION_TYPE,  // LAST_JS_OBJECT_TYPE, LAST_JS_RECEIVER_TYPE
 
@@ -789,7 +798,6 @@
   V(FAST_PROPERTIES_SUB_TYPE)                    \
   V(FAST_TEMPLATE_INSTANTIATIONS_CACHE_SUB_TYPE) \
   V(HANDLER_TABLE_SUB_TYPE)                      \
-  V(INTRINSIC_FUNCTION_NAMES_SUB_TYPE)           \
   V(JS_COLLECTION_SUB_TYPE)                      \
   V(JS_WEAK_COLLECTION_SUB_TYPE)                 \
   V(LITERALS_ARRAY_SUB_TYPE)                     \
@@ -862,7 +870,7 @@
   INLINE(static type* cast(Object* object));            \
   INLINE(static const type* cast(const Object* object));
 
-
+class AbstractCode;
 class AccessorPair;
 class AllocationSite;
 class AllocationSiteCreationContext;
@@ -878,6 +886,9 @@
 class LiteralsArray;
 class LookupIterator;
 class FieldType;
+class ModuleDescriptor;
+class ModuleInfoEntry;
+class ModuleInfo;
 class ObjectHashTable;
 class ObjectVisitor;
 class PropertyCell;
@@ -961,6 +972,7 @@
   V(JSGeneratorObject)           \
   V(Map)                         \
   V(DescriptorArray)             \
+  V(FrameArray)                  \
   V(TransitionArray)             \
   V(LiteralsArray)               \
   V(TypeFeedbackMetadata)        \
@@ -977,6 +989,8 @@
   V(ScriptContextTable)          \
   V(NativeContext)               \
   V(ScopeInfo)                   \
+  V(ModuleInfoEntry)             \
+  V(ModuleInfo)                  \
   V(JSBoundFunction)             \
   V(JSFunction)                  \
   V(Code)                        \
@@ -998,6 +1012,7 @@
   V(JSProxy)                     \
   V(JSError)                     \
   V(JSPromise)                   \
+  V(JSStringIterator)            \
   V(JSSet)                       \
   V(JSMap)                       \
   V(JSSetIterator)               \
@@ -1287,7 +1302,8 @@
   MUST_USE_RESULT static MaybeHandle<Object> InstanceOf(
       Isolate* isolate, Handle<Object> object, Handle<Object> callable);
 
-  MUST_USE_RESULT static MaybeHandle<Object> GetProperty(LookupIterator* it);
+  V8_EXPORT_PRIVATE MUST_USE_RESULT static MaybeHandle<Object> GetProperty(
+      LookupIterator* it);
 
   // ES6 [[Set]] (when passed DONT_THROW)
   // Invariants for this and related functions (unless stated otherwise):
@@ -1313,10 +1329,6 @@
       LookupIterator* it, Handle<Object> value, LanguageMode language_mode,
       StoreFromKeyed store_mode);
 
-  MUST_USE_RESULT static MaybeHandle<Object> ReadAbsentProperty(
-      LookupIterator* it);
-  MUST_USE_RESULT static MaybeHandle<Object> ReadAbsentProperty(
-      Isolate* isolate, Handle<Object> receiver, Handle<Object> name);
   MUST_USE_RESULT static Maybe<bool> CannotCreateProperty(
       Isolate* isolate, Handle<Object> receiver, Handle<Object> name,
       Handle<Object> value, ShouldThrow should_throw);
@@ -1848,6 +1860,8 @@
       static_cast<int>(v8::KeyCollectionMode::kIncludePrototypes)
 };
 
+enum class AllocationSiteUpdateMode { kUpdate, kCheckOnly };
+
 // JSReceiver includes types on which properties can be defined, i.e.,
 // JSObject and JSProxy.
 class JSReceiver: public HeapObject {
@@ -1952,7 +1966,7 @@
       PropertyDescriptor* desc, PropertyDescriptor* current,
       ShouldThrow should_throw, Handle<Name> property_name = Handle<Name>());
 
-  MUST_USE_RESULT static Maybe<bool> GetOwnPropertyDescriptor(
+  V8_EXPORT_PRIVATE MUST_USE_RESULT static Maybe<bool> GetOwnPropertyDescriptor(
       Isolate* isolate, Handle<JSReceiver> object, Handle<Object> key,
       PropertyDescriptor* desc);
   MUST_USE_RESULT static Maybe<bool> GetOwnPropertyDescriptor(
@@ -2060,7 +2074,7 @@
   // [elements]: The elements (properties with names that are integers).
   //
   // Elements can be in two general modes: fast and slow. Each mode
-  // corrensponds to a set of object representations of elements that
+  // corresponds to a set of object representations of elements that
   // have something in common.
   //
   // In the fast mode elements is a FixedArray and so each element can
@@ -2298,7 +2312,9 @@
   }
 
   // These methods do not perform access checks!
-  static void UpdateAllocationSite(Handle<JSObject> object,
+  template <AllocationSiteUpdateMode update_or_check =
+                AllocationSiteUpdateMode::kUpdate>
+  static bool UpdateAllocationSite(Handle<JSObject> object,
                                    ElementsKind to_kind);
 
   // Lookup interceptors are used for handling properties controlled by host
@@ -2604,6 +2620,10 @@
 // as specified by ES6 section 25.1.1.3 The IteratorResult Interface
 class JSIteratorResult: public JSObject {
  public:
+  DECL_ACCESSORS(value, Object)
+
+  DECL_ACCESSORS(done, Object)
+
   // Offsets of object fields.
   static const int kValueOffset = JSObject::kHeaderSize;
   static const int kDoneOffset = kValueOffset + kPointerSize;
@@ -2895,7 +2915,6 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(WeakFixedArray);
 };
 
-
 // Generic array grows dynamically with O(1) amortized insertion.
 class ArrayList : public FixedArray {
  public:
@@ -2925,6 +2944,82 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(ArrayList);
 };
 
+#define FRAME_ARRAY_FIELD_LIST(V) \
+  V(WasmObject, Object)           \
+  V(WasmFunctionIndex, Smi)       \
+  V(Receiver, Object)             \
+  V(Function, JSFunction)         \
+  V(Code, AbstractCode)           \
+  V(Offset, Smi)                  \
+  V(Flags, Smi)
+
+// Container object for data collected during simple stack trace captures.
+class FrameArray : public FixedArray {
+ public:
+#define DECLARE_FRAME_ARRAY_ACCESSORS(name, type) \
+  inline type* name(int frame_ix) const;          \
+  inline void Set##name(int frame_ix, type* value);
+  FRAME_ARRAY_FIELD_LIST(DECLARE_FRAME_ARRAY_ACCESSORS)
+#undef DECLARE_FRAME_ARRAY_ACCESSORS
+
+  inline bool IsWasmFrame(int frame_ix) const;
+  inline int FrameCount() const;
+
+  void ShrinkToFit();
+
+  // Flags.
+  static const int kIsWasmFrame = 1 << 0;
+  static const int kIsStrict = 1 << 1;
+  static const int kForceConstructor = 1 << 2;
+
+  static Handle<FrameArray> AppendJSFrame(Handle<FrameArray> in,
+                                          Handle<Object> receiver,
+                                          Handle<JSFunction> function,
+                                          Handle<AbstractCode> code, int offset,
+                                          int flags);
+  static Handle<FrameArray> AppendWasmFrame(Handle<FrameArray> in,
+                                            Handle<Object> wasm_object,
+                                            int wasm_function_index,
+                                            Handle<AbstractCode> code,
+                                            int offset, int flags);
+
+  DECLARE_CAST(FrameArray)
+
+ private:
+  // The underlying fixed array embodies a captured stack trace. Frame i
+  // occupies indices
+  //
+  // kFirstIndex + 1 + [i * kElementsPerFrame, (i + 1) * kElementsPerFrame[,
+  //
+  // with internal offsets as below:
+
+  static const int kWasmObjectOffset = 0;
+  static const int kWasmFunctionIndexOffset = 1;
+
+  static const int kReceiverOffset = 0;
+  static const int kFunctionOffset = 1;
+
+  static const int kCodeOffset = 2;
+  static const int kOffsetOffset = 3;
+
+  static const int kFlagsOffset = 4;
+
+  static const int kElementsPerFrame = 5;
+
+  // Array layout indices.
+
+  static const int kFrameCountIndex = 0;
+  static const int kFirstIndex = 1;
+
+  static int LengthFor(int frame_count) {
+    return kFirstIndex + frame_count * kElementsPerFrame;
+  }
+
+  static Handle<FrameArray> EnsureSpace(Handle<FrameArray> array, int length);
+
+  friend class Factory;
+  DISALLOW_IMPLICIT_CONSTRUCTORS(FrameArray);
+};
 
 // DescriptorArrays are fixed arrays used to hold instance descriptors.
 // The format of the these objects is:
@@ -3410,7 +3505,8 @@
  public:
   // Find string in the string table. If it is not there yet, it is
   // added. The return value is the string found.
-  static Handle<String> LookupString(Isolate* isolate, Handle<String> key);
+  V8_EXPORT_PRIVATE static Handle<String> LookupString(Isolate* isolate,
+                                                       Handle<String> key);
   static Handle<String> LookupKey(Isolate* isolate, HashTableKey* key);
   static String* LookupKeyIfExists(Isolate* isolate, HashTableKey* key);
 
@@ -4236,6 +4332,8 @@
   // Return the function_name if present.
   String* FunctionName();
 
+  ModuleInfo* ModuleDescriptorInfo();
+
   // Return the name of the given parameter.
   String* ParameterName(int var);
 
@@ -4279,15 +4377,11 @@
                               VariableMode* mode, InitializationFlag* init_flag,
                               MaybeAssignedFlag* maybe_assigned_flag);
 
-  // Similar to ContextSlotIndex() but this method searches only among
-  // global slots of the serialized scope info. Returns the context slot index
-  // for a given slot name if the slot is present; otherwise returns a
-  // value < 0. The name must be an internalized string. If the slot is present
-  // and mode != NULL, sets *mode to the corresponding mode for that variable.
-  static int ContextGlobalSlotIndex(Handle<ScopeInfo> scope_info,
-                                    Handle<String> name, VariableMode* mode,
-                                    InitializationFlag* init_flag,
-                                    MaybeAssignedFlag* maybe_assigned_flag);
+  // Lookup metadata of a MODULE-allocated variable.  Return a negative value if
+  // there is no module variable with the given name.
+  int ModuleIndex(Handle<String> name, VariableMode* mode,
+                  InitializationFlag* init_flag,
+                  MaybeAssignedFlag* maybe_assigned_flag);
 
   // Lookup the name of a certain context slot by its index.
   String* ContextSlotName(int slot_index);
@@ -4301,7 +4395,7 @@
   // slot index if the function name is present and context-allocated (named
   // function expressions, only), otherwise returns a value < 0. The name
   // must be an internalized string.
-  int FunctionContextSlotIndex(String* name, VariableMode* mode);
+  int FunctionContextSlotIndex(String* name);
 
   // Lookup support for serialized scope info.  Returns the receiver context
   // slot index if scope has a "this" binding, and the binding is
@@ -4310,7 +4404,27 @@
 
   FunctionKind function_kind();
 
-  static Handle<ScopeInfo> Create(Isolate* isolate, Zone* zone, Scope* scope);
+  // Returns true if this ScopeInfo is linked to a outer ScopeInfo.
+  bool HasOuterScopeInfo();
+
+  // Returns true if this ScopeInfo was created for a debug-evaluate scope.
+  bool IsDebugEvaluateScope();
+
+  // Can be used to mark a ScopeInfo that looks like a with-scope as actually
+  // being a debug-evaluate scope.
+  void SetIsDebugEvaluateScope();
+
+  // Return the outer ScopeInfo if present.
+  ScopeInfo* OuterScopeInfo();
+
+#ifdef DEBUG
+  bool Equals(ScopeInfo* other) const;
+#endif
+
+  static Handle<ScopeInfo> Create(Isolate* isolate, Zone* zone, Scope* scope,
+                                  MaybeHandle<ScopeInfo> outer_scope);
+  static Handle<ScopeInfo> CreateForWithScope(
+      Isolate* isolate, MaybeHandle<ScopeInfo> outer_scope);
   static Handle<ScopeInfo> CreateGlobalThisBinding(Isolate* isolate);
 
   // Serializes empty scope info.
@@ -4322,18 +4436,16 @@
 
   // The layout of the static part of a ScopeInfo is as follows. Each entry is
   // numeric and occupies one array slot.
-  // 1. A set of properties of the scope
-  // 2. The number of parameters. This only applies to function scopes. For
-  //    non-function scopes this is 0.
-  // 3. The number of non-parameter variables allocated on the stack.
-  // 4. The number of non-parameter and parameter variables allocated in the
-  //    context.
+// 1. A set of properties of the scope.
+// 2. The number of parameters. For non-function scopes this is 0.
+// 3. The number of non-parameter variables allocated on the stack.
+// 4. The number of non-parameter and parameter variables allocated in the
+//    context.
 #define FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(V) \
   V(Flags)                                   \
   V(ParameterCount)                          \
   V(StackLocalCount)                         \
-  V(ContextLocalCount)                       \
-  V(ContextGlobalCount)
+  V(ContextLocalCount)
 
 #define FIELD_ACCESSORS(name)       \
   inline void Set##name(int value); \
@@ -4350,7 +4462,7 @@
 
  private:
   // The layout of the variable part of a ScopeInfo is as follows:
-  // 1. ParameterEntries:
+  // 1. ParameterNames:
   //    This part stores the names of the parameters for function scopes. One
   //    slot is used per parameter, so in total this part occupies
   //    ParameterCount() slots in the array. For other scopes than function
@@ -4358,40 +4470,48 @@
   // 2. StackLocalFirstSlot:
   //    Index of a first stack slot for stack local. Stack locals belonging to
   //    this scope are located on a stack at slots starting from this index.
-  // 3. StackLocalEntries:
+  // 3. StackLocalNames:
   //    Contains the names of local variables that are allocated on the stack,
-  //    in increasing order of the stack slot index. First local variable has
-  //    a stack slot index defined in StackLocalFirstSlot (point 2 above).
+  //    in increasing order of the stack slot index. First local variable has a
+  //    stack slot index defined in StackLocalFirstSlot (point 2 above).
   //    One slot is used per stack local, so in total this part occupies
   //    StackLocalCount() slots in the array.
-  // 4. ContextLocalNameEntries:
+  // 4. ContextLocalNames:
   //    Contains the names of local variables and parameters that are allocated
   //    in the context. They are stored in increasing order of the context slot
   //    index starting with Context::MIN_CONTEXT_SLOTS. One slot is used per
   //    context local, so in total this part occupies ContextLocalCount() slots
   //    in the array.
-  // 5. ContextLocalInfoEntries:
+  // 5. ContextLocalInfos:
   //    Contains the variable modes and initialization flags corresponding to
-  //    the context locals in ContextLocalNameEntries. One slot is used per
+  //    the context locals in ContextLocalNames. One slot is used per
   //    context local, so in total this part occupies ContextLocalCount()
   //    slots in the array.
-  // 6. RecieverEntryIndex:
+  // 6. ReceiverInfo:
   //    If the scope binds a "this" value, one slot is reserved to hold the
   //    context or stack slot index for the variable.
-  // 7. FunctionNameEntryIndex:
+  // 7. FunctionNameInfo:
   //    If the scope belongs to a named function expression this part contains
   //    information about the function variable. It always occupies two array
   //    slots:  a. The name of the function variable.
   //            b. The context or stack slot index for the variable.
-  int ParameterEntriesIndex();
+  // 8. OuterScopeInfoIndex:
+  //    The outer scope's ScopeInfo or the hole if there's none.
+  // 9. ModuleInfo, ModuleVariableCount, and ModuleVariables:
+  //    For a module scope, this part contains the ModuleInfo, the number of
+  //    MODULE-allocated variables, and the metadata of those variables.  For
+  //    non-module scopes it is empty.
+  int ParameterNamesIndex();
   int StackLocalFirstSlotIndex();
-  int StackLocalEntriesIndex();
-  int ContextLocalNameEntriesIndex();
-  int ContextGlobalNameEntriesIndex();
-  int ContextLocalInfoEntriesIndex();
-  int ContextGlobalInfoEntriesIndex();
-  int ReceiverEntryIndex();
-  int FunctionNameEntryIndex();
+  int StackLocalNamesIndex();
+  int ContextLocalNamesIndex();
+  int ContextLocalInfosIndex();
+  int ReceiverInfoIndex();
+  int FunctionNameInfoIndex();
+  int OuterScopeInfoIndex();
+  int ModuleInfoIndex();
+  int ModuleVariableCountIndex();
+  int ModuleVariablesIndex();
 
   int Lookup(Handle<String> name, int start, int end, VariableMode* mode,
              VariableLocation* location, InitializationFlag* init_flag,
@@ -4416,26 +4536,77 @@
       : public BitField<bool, ReceiverVariableField::kNext, 1> {};
   class FunctionVariableField
       : public BitField<VariableAllocationInfo, HasNewTargetField::kNext, 2> {};
-  class FunctionVariableMode
-      : public BitField<VariableMode, FunctionVariableField::kNext, 3> {};
-  class AsmModuleField : public BitField<bool, FunctionVariableMode::kNext, 1> {
-  };
+  class AsmModuleField
+      : public BitField<bool, FunctionVariableField::kNext, 1> {};
   class AsmFunctionField : public BitField<bool, AsmModuleField::kNext, 1> {};
   class HasSimpleParametersField
       : public BitField<bool, AsmFunctionField::kNext, 1> {};
   class FunctionKindField
-      : public BitField<FunctionKind, HasSimpleParametersField::kNext, 9> {};
+      : public BitField<FunctionKind, HasSimpleParametersField::kNext, 10> {};
+  class HasOuterScopeInfoField
+      : public BitField<bool, FunctionKindField::kNext, 1> {};
+  class IsDebugEvaluateScopeField
+      : public BitField<bool, HasOuterScopeInfoField::kNext, 1> {};
 
-  // BitFields representing the encoded information for context locals in the
-  // ContextLocalInfoEntries part.
-  class ContextLocalMode:      public BitField<VariableMode,         0, 3> {};
-  class ContextLocalInitFlag:  public BitField<InitializationFlag,   3, 1> {};
-  class ContextLocalMaybeAssignedFlag
-      : public BitField<MaybeAssignedFlag, 4, 1> {};
+  // Properties of variables.
+  class VariableModeField : public BitField<VariableMode, 0, 3> {};
+  class InitFlagField : public BitField<InitializationFlag, 3, 1> {};
+  class MaybeAssignedFlagField : public BitField<MaybeAssignedFlag, 4, 1> {};
 
   friend class ScopeIterator;
 };
 
+class ModuleInfoEntry : public FixedArray {
+ public:
+  DECLARE_CAST(ModuleInfoEntry)
+  static Handle<ModuleInfoEntry> New(Isolate* isolate,
+                                     Handle<Object> export_name,
+                                     Handle<Object> local_name,
+                                     Handle<Object> import_name,
+                                     Handle<Object> module_request);
+  inline Object* export_name() const;
+  inline Object* local_name() const;
+  inline Object* import_name() const;
+  inline Object* module_request() const;
+
+ private:
+  friend class Factory;
+  enum {
+    kExportNameIndex,
+    kLocalNameIndex,
+    kImportNameIndex,
+    kModuleRequestIndex,
+    kLength
+  };
+};
+
+// ModuleInfo is to ModuleDescriptor what ScopeInfo is to Scope.
+class ModuleInfo : public FixedArray {
+ public:
+  DECLARE_CAST(ModuleInfo)
+  static Handle<ModuleInfo> New(Isolate* isolate, Zone* zone,
+                                ModuleDescriptor* descr);
+  inline FixedArray* module_requests() const;
+  inline FixedArray* special_exports() const;
+  inline FixedArray* regular_exports() const;
+  inline FixedArray* namespace_imports() const;
+  inline FixedArray* regular_imports() const;
+
+#ifdef DEBUG
+  inline bool Equals(ModuleInfo* other) const;
+#endif
+
+ private:
+  friend class Factory;
+  enum {
+    kModuleRequestsIndex,
+    kSpecialExportsIndex,
+    kRegularExportsIndex,
+    kNamespaceImportsIndex,
+    kRegularImportsIndex,
+    kLength
+  };
+};
 
 // The cache for maps used by normalized (dictionary mode) objects.
 // Such maps do not have property descriptors, so a typical program
@@ -4487,6 +4658,9 @@
                  // catching are part of a desugaring and should therefore not
                  // be visible to the user (we won't notify the debugger of such
                  // exceptions).
+    ASYNC_AWAIT,  // The exception will be caught and cause a promise rejection
+                  // in the desugaring of an async function, so special
+                  // async/await handling in the debugger can take place.
   };
 
   // Getters for handler table based on ranges.
@@ -4539,8 +4713,8 @@
   static const int kReturnEntrySize = 2;
 
   // Encoding of the {handler} field.
-  class HandlerPredictionField : public BitField<CatchPrediction, 0, 2> {};
-  class HandlerOffsetField : public BitField<int, 2, 30> {};
+  class HandlerPredictionField : public BitField<CatchPrediction, 0, 3> {};
+  class HandlerOffsetField : public BitField<int, 3, 29> {};
 };
 
 // ByteArray represents fixed sized byte arrays.  Used for the relocation info
@@ -4684,6 +4858,13 @@
   // Maximal length of a single BytecodeArray.
   static const int kMaxLength = kMaxSize - kHeaderSize;
 
+  static const int kPointerFieldsBeginOffset = kConstantPoolOffset;
+  static const int kPointerFieldsEndOffset = kFrameSizeOffset;
+
+  typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
+                              kPointerFieldsEndOffset, kHeaderSize>
+      MarkingBodyDescriptor;
+
   class BodyDescriptor;
 
  private:
@@ -4721,6 +4902,7 @@
   // Size is smi tagged when it is stored.
   static const int kSizeOffset = HeapObject::kHeaderSize;
   static const int kNextOffset = POINTER_SIZE_ALIGN(kSizeOffset + kPointerSize);
+  static const int kSize = kNextOffset + kPointerSize;
 
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(FreeSpace);
@@ -6226,6 +6408,7 @@
   inline bool IsJSFunctionMap();
   inline bool IsStringMap();
   inline bool IsJSProxyMap();
+  inline bool IsModuleMap();
   inline bool IsJSGlobalProxyMap();
   inline bool IsJSGlobalObjectMap();
   inline bool IsJSTypedArrayMap();
@@ -6482,6 +6665,34 @@
   DECLARE_CAST(Struct)
 };
 
+// A container struct to hold state required for
+// PromiseResolveThenableJob. {before, after}_debug_event could
+// potentially be undefined if the debugger is turned off.
+class PromiseContainer : public Struct {
+ public:
+  DECL_ACCESSORS(thenable, JSReceiver)
+  DECL_ACCESSORS(then, JSReceiver)
+  DECL_ACCESSORS(resolve, JSFunction)
+  DECL_ACCESSORS(reject, JSFunction)
+  DECL_ACCESSORS(before_debug_event, Object)
+  DECL_ACCESSORS(after_debug_event, Object)
+
+  static const int kThenableOffset = Struct::kHeaderSize;
+  static const int kThenOffset = kThenableOffset + kPointerSize;
+  static const int kResolveOffset = kThenOffset + kPointerSize;
+  static const int kRejectOffset = kResolveOffset + kPointerSize;
+  static const int kBeforeDebugEventOffset = kRejectOffset + kPointerSize;
+  static const int kAfterDebugEventOffset =
+      kBeforeDebugEventOffset + kPointerSize;
+  static const int kSize = kAfterDebugEventOffset + kPointerSize;
+
+  DECLARE_CAST(PromiseContainer)
+  DECLARE_PRINTER(PromiseContainer)
+  DECLARE_VERIFIER(PromiseContainer)
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(PromiseContainer);
+};
 
 // A simple one-element struct, useful where smis need to be boxed.
 class Box : public Struct {
@@ -6559,28 +6770,29 @@
 
 
 // Pair used to store both a ScopeInfo and an extension object in the extension
-// slot of a block context. Needed in the rare case where a declaration block
-// scope (a "varblock" as used to desugar parameter destructuring) also contains
-// a sloppy direct eval. (In no other case both are needed at the same time.)
-class SloppyBlockWithEvalContextExtension : public Struct {
+// slot of a block, catch, or with context. Needed in the rare case where a
+// declaration block scope (a "varblock" as used to desugar parameter
+// destructuring) also contains a sloppy direct eval, or for with and catch
+// scopes. (In no other case both are needed at the same time.)
+class ContextExtension : public Struct {
  public:
   // [scope_info]: Scope info.
   DECL_ACCESSORS(scope_info, ScopeInfo)
   // [extension]: Extension object.
-  DECL_ACCESSORS(extension, JSObject)
+  DECL_ACCESSORS(extension, Object)
 
-  DECLARE_CAST(SloppyBlockWithEvalContextExtension)
+  DECLARE_CAST(ContextExtension)
 
   // Dispatched behavior.
-  DECLARE_PRINTER(SloppyBlockWithEvalContextExtension)
-  DECLARE_VERIFIER(SloppyBlockWithEvalContextExtension)
+  DECLARE_PRINTER(ContextExtension)
+  DECLARE_VERIFIER(ContextExtension)
 
   static const int kScopeInfoOffset = HeapObject::kHeaderSize;
   static const int kExtensionOffset = kScopeInfoOffset + kPointerSize;
   static const int kSize = kExtensionOffset + kPointerSize;
 
  private:
-  DISALLOW_IMPLICIT_CONSTRUCTORS(SloppyBlockWithEvalContextExtension);
+  DISALLOW_IMPLICIT_CONSTRUCTORS(ContextExtension);
 };
 
 
@@ -6803,6 +7015,15 @@
   V(Array.prototype, push, ArrayPush)                       \
   V(Array.prototype, pop, ArrayPop)                         \
   V(Array.prototype, shift, ArrayShift)                     \
+  V(Date.prototype, getDate, DateGetDate)                   \
+  V(Date.prototype, getDay, DateGetDay)                     \
+  V(Date.prototype, getFullYear, DateGetFullYear)           \
+  V(Date.prototype, getHours, DateGetHours)                 \
+  V(Date.prototype, getMilliseconds, DateGetMilliseconds)   \
+  V(Date.prototype, getMinutes, DateGetMinutes)             \
+  V(Date.prototype, getMonth, DateGetMonth)                 \
+  V(Date.prototype, getSeconds, DateGetSeconds)             \
+  V(Date.prototype, getTime, DateGetTime)                   \
   V(Function.prototype, apply, FunctionApply)               \
   V(Function.prototype, call, FunctionCall)                 \
   V(Object.prototype, hasOwnProperty, ObjectHasOwnProperty) \
@@ -6847,6 +7068,10 @@
   V(Math, clz32, MathClz32)                                 \
   V(Math, fround, MathFround)                               \
   V(Math, trunc, MathTrunc)                                 \
+  V(Number, isFinite, NumberIsFinite)                       \
+  V(Number, isInteger, NumberIsInteger)                     \
+  V(Number, isNaN, NumberIsNaN)                             \
+  V(Number, isSafeInteger, NumberIsSafeInteger)             \
   V(Number, parseInt, NumberParseInt)                       \
   V(Number.prototype, toString, NumberToString)
 
@@ -6869,16 +7094,20 @@
   kDataViewBuffer,
   kDataViewByteLength,
   kDataViewByteOffset,
+  kFunctionHasInstance,
   kGlobalDecodeURI,
   kGlobalDecodeURIComponent,
   kGlobalEncodeURI,
   kGlobalEncodeURIComponent,
   kGlobalEscape,
   kGlobalUnescape,
+  kGlobalIsFinite,
+  kGlobalIsNaN,
   kTypedArrayByteLength,
   kTypedArrayByteOffset,
   kTypedArrayLength,
   kSharedArrayBufferByteLength,
+  kStringIteratorNext,
 };
 
 
@@ -6984,6 +7213,10 @@
   // [scope_info]: Scope info.
   DECL_ACCESSORS(scope_info, ScopeInfo)
 
+  // The outer scope info for the purpose of parsing this function, or the hole
+  // value if it isn't yet known.
+  DECL_ACCESSORS(outer_scope_info, HeapObject)
+
   // [construct stub]: Code stub for constructing instances of this function.
   DECL_ACCESSORS(construct_stub, Code)
 
@@ -7191,30 +7424,11 @@
   // Indicates that code for this function cannot be flushed.
   DECL_BOOLEAN_ACCESSORS(dont_flush)
 
-  // Indicates that this function is a generator.
-  DECL_BOOLEAN_ACCESSORS(is_generator)
-
-  // Indicates that this function is an async function.
-  DECL_BOOLEAN_ACCESSORS(is_async)
-
-  // Indicates that this function can be suspended, either via YieldExpressions
-  // or AwaitExpressions.
-  inline bool is_resumable() const;
-
-  // Indicates that this function is an arrow function.
-  DECL_BOOLEAN_ACCESSORS(is_arrow)
-
-  // Indicates that this function is a concise method.
-  DECL_BOOLEAN_ACCESSORS(is_concise_method)
-
-  // Indicates that this function is a getter.
-  DECL_BOOLEAN_ACCESSORS(is_getter_function)
-
-  // Indicates that this function is a setter.
-  DECL_BOOLEAN_ACCESSORS(is_setter_function)
-
-  // Indicates that this function is a default constructor.
-  DECL_BOOLEAN_ACCESSORS(is_default_constructor)
+  // Indicates that this is a constructor for a base class with instance fields.
+  DECL_BOOLEAN_ACCESSORS(requires_class_field_init)
+  // Indicates that this is a synthesized function to set up class instance
+  // fields.
+  DECL_BOOLEAN_ACCESSORS(is_class_field_initializer)
 
   // Indicates that this function is an asm function.
   DECL_BOOLEAN_ACCESSORS(asm_function)
@@ -7231,7 +7445,7 @@
   // Indicates that asm->wasm conversion failed and should not be re-attempted.
   DECL_BOOLEAN_ACCESSORS(is_asm_wasm_broken)
 
-  inline FunctionKind kind();
+  inline FunctionKind kind() const;
   inline void set_kind(FunctionKind kind);
 
   // Indicates whether or not the code in the shared function support
@@ -7331,11 +7545,12 @@
 
   // Layout description.
   // Pointer fields.
-  static const int kNameOffset = HeapObject::kHeaderSize;
-  static const int kCodeOffset = kNameOffset + kPointerSize;
-  static const int kOptimizedCodeMapOffset = kCodeOffset + kPointerSize;
+  static const int kCodeOffset = HeapObject::kHeaderSize;
+  static const int kNameOffset = kCodeOffset + kPointerSize;
+  static const int kOptimizedCodeMapOffset = kNameOffset + kPointerSize;
   static const int kScopeInfoOffset = kOptimizedCodeMapOffset + kPointerSize;
-  static const int kConstructStubOffset = kScopeInfoOffset + kPointerSize;
+  static const int kOuterScopeInfoOffset = kScopeInfoOffset + kPointerSize;
+  static const int kConstructStubOffset = kOuterScopeInfoOffset + kPointerSize;
   static const int kInstanceClassNameOffset =
       kConstructStubOffset + kPointerSize;
   static const int kFunctionDataOffset =
@@ -7457,9 +7672,12 @@
 
   static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
 
+  typedef FixedBodyDescriptor<kCodeOffset,
+                              kLastPointerFieldOffset + kPointerSize, kSize>
+      BodyDescriptor;
   typedef FixedBodyDescriptor<kNameOffset,
-                              kLastPointerFieldOffset + kPointerSize,
-                              kSize> BodyDescriptor;
+                              kLastPointerFieldOffset + kPointerSize, kSize>
+      BodyDescriptorWeakCode;
 
   // Bit positions in start_position_and_type.
   // The source code start position is in the 30 most significant bits of
@@ -7491,38 +7709,19 @@
     kDontFlush,
     // byte 2
     kFunctionKind,
-    kIsArrow = kFunctionKind,
-    kIsGenerator,
-    kIsConciseMethod,
-    kIsDefaultConstructor,
-    kIsSubclassConstructor,
-    kIsBaseConstructor,
-    kIsGetterFunction,
-    kIsSetterFunction,
+    // rest of byte 2 and first two bits of byte 3 are used by FunctionKind
     // byte 3
-    kIsAsyncFunction,
-    kDeserialized,
+    kDeserialized = kFunctionKind + 10,
     kIsDeclaration,
     kIsAsmWasmBroken,
+    kRequiresClassFieldInit,
+    kIsClassFieldInitializer,
     kCompilerHintsCount,  // Pseudo entry
   };
   // kFunctionKind has to be byte-aligned
   STATIC_ASSERT((kFunctionKind % kBitsPerByte) == 0);
-// Make sure that FunctionKind and byte 2 are in sync:
-#define ASSERT_FUNCTION_KIND_ORDER(functionKind, compilerFunctionKind) \
-  STATIC_ASSERT(FunctionKind::functionKind ==                          \
-                1 << (compilerFunctionKind - kFunctionKind))
-  ASSERT_FUNCTION_KIND_ORDER(kArrowFunction, kIsArrow);
-  ASSERT_FUNCTION_KIND_ORDER(kGeneratorFunction, kIsGenerator);
-  ASSERT_FUNCTION_KIND_ORDER(kConciseMethod, kIsConciseMethod);
-  ASSERT_FUNCTION_KIND_ORDER(kDefaultConstructor, kIsDefaultConstructor);
-  ASSERT_FUNCTION_KIND_ORDER(kSubclassConstructor, kIsSubclassConstructor);
-  ASSERT_FUNCTION_KIND_ORDER(kBaseConstructor, kIsBaseConstructor);
-  ASSERT_FUNCTION_KIND_ORDER(kGetterFunction, kIsGetterFunction);
-  ASSERT_FUNCTION_KIND_ORDER(kSetterFunction, kIsSetterFunction);
-#undef ASSERT_FUNCTION_KIND_ORDER
 
-  class FunctionKindBits : public BitField<FunctionKind, kIsArrow, 9> {};
+  class FunctionKindBits : public BitField<FunctionKind, kFunctionKind, 10> {};
 
   class DeoptCountBits : public BitField<int, 0, 4> {};
   class OptReenableTriesBits : public BitField<int, 4, 18> {};
@@ -7554,21 +7753,10 @@
   static const int kHasDuplicateParametersBit =
       kHasDuplicateParameters + kCompilerHintsSmiTagSize;
 
-  static const int kIsArrowBit = kIsArrow + kCompilerHintsSmiTagSize;
-  static const int kIsGeneratorBit = kIsGenerator + kCompilerHintsSmiTagSize;
-  static const int kIsConciseMethodBit =
-      kIsConciseMethod + kCompilerHintsSmiTagSize;
-  static const int kIsAsyncFunctionBit =
-      kIsAsyncFunction + kCompilerHintsSmiTagSize;
-
-  static const int kAccessorFunctionBits =
-      FunctionKind::kAccessorFunction
-      << (kFunctionKind + kCompilerHintsSmiTagSize);
-  static const int kClassConstructorBits =
-      FunctionKind::kClassConstructor
-      << (kFunctionKind + kCompilerHintsSmiTagSize);
-  static const int kFunctionKindMaskBits = FunctionKindBits::kMask
-                                           << kCompilerHintsSmiTagSize;
+  static const int kFunctionKindShift =
+      kFunctionKind + kCompilerHintsSmiTagSize;
+  static const int kAllFunctionKindBitsMask = FunctionKindBits::kMask
+                                              << kCompilerHintsSmiTagSize;
 
   // Constants for optimizing codegen for strict mode function and
   // native tests.
@@ -7687,6 +7875,100 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSGeneratorObject);
 };
 
+// A Module object is a mapping from export names to cells
+// This is still very much in flux.
+class Module : public Struct {
+ public:
+  DECLARE_CAST(Module)
+  DECLARE_VERIFIER(Module)
+  DECLARE_PRINTER(Module)
+
+  // The code representing this Module, either a
+  // SharedFunctionInfo or a JSFunction depending
+  // on whether it's been instantiated.
+  DECL_ACCESSORS(code, Object)
+
+  DECL_ACCESSORS(exports, ObjectHashTable)
+
+  // [[RequestedModules]]: Modules imported or re-exported by this module.
+  // Corresponds 1-to-1 to the module specifier strings in
+  // ModuleInfo::module_requests.
+  DECL_ACCESSORS(requested_modules, FixedArray)
+
+  // [[Evaluated]]: Whether this module has been evaluated. Modules
+  // are only evaluated a single time.
+  DECL_BOOLEAN_ACCESSORS(evaluated)
+
+  // Storage for [[Evaluated]]
+  DECL_INT_ACCESSORS(flags)
+
+  // Embedder-specified data
+  DECL_ACCESSORS(embedder_data, Object)
+
+  // Get the SharedFunctionInfo associated with the code.
+  inline SharedFunctionInfo* shared() const;
+
+  // Get the ModuleInfo associated with the code.
+  inline ModuleInfo* info() const;
+
+  // Compute a hash for this object.
+  inline uint32_t Hash() const;
+
+  // Implementation of spec operation ModuleDeclarationInstantiation.
+  // Returns false if an exception occurred during instantiation, true
+  // otherwise.
+  static MUST_USE_RESULT bool Instantiate(Handle<Module> module,
+                                          v8::Local<v8::Context> context,
+                                          v8::Module::ResolveCallback callback,
+                                          v8::Local<v8::Value> callback_data);
+
+  // Implementation of spec operation ModuleEvaluation.
+  static MUST_USE_RESULT MaybeHandle<Object> Evaluate(Handle<Module> module);
+
+  static Handle<Object> LoadExport(Handle<Module> module, Handle<String> name);
+  static void StoreExport(Handle<Module> module, Handle<String> name,
+                          Handle<Object> value);
+
+  static Handle<Object> LoadImport(Handle<Module> module, Handle<String> name,
+                                   int module_request);
+
+  static const int kCodeOffset = HeapObject::kHeaderSize;
+  static const int kExportsOffset = kCodeOffset + kPointerSize;
+  static const int kRequestedModulesOffset = kExportsOffset + kPointerSize;
+  static const int kFlagsOffset = kRequestedModulesOffset + kPointerSize;
+  static const int kEmbedderDataOffset = kFlagsOffset + kPointerSize;
+  static const int kSize = kEmbedderDataOffset + kPointerSize;
+
+ private:
+  enum { kEvaluatedBit };
+
+  static void CreateExport(Handle<Module> module, Handle<FixedArray> names);
+  static void CreateIndirectExport(Handle<Module> module, Handle<String> name,
+                                   Handle<ModuleInfoEntry> entry);
+
+  // The [must_resolve] argument indicates whether or not an exception should be
+  // thrown in case the module does not provide an export named [name]
+  // (including when a cycle is detected).  An exception is always thrown in the
+  // case of conflicting star exports.
+  //
+  // If [must_resolve] is true, a null result indicates an exception. If
+  // [must_resolve] is false, a null result may or may not indicate an
+  // exception (so check manually!).
+  class ResolveSet;
+  static MUST_USE_RESULT MaybeHandle<Cell> ResolveExport(
+      Handle<Module> module, Handle<String> name, bool must_resolve,
+      ResolveSet* resolve_set);
+  static MUST_USE_RESULT MaybeHandle<Cell> ResolveImport(
+      Handle<Module> module, Handle<String> name, int module_request,
+      bool must_resolve, ResolveSet* resolve_set);
+
+  // Helper for ResolveExport.
+  static MUST_USE_RESULT MaybeHandle<Cell> ResolveExportUsingStarExports(
+      Handle<Module> module, Handle<String> name, bool must_resolve,
+      ResolveSet* resolve_set);
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(Module);
+};
 
 // JSBoundFunction describes a bound function exotic object.
 class JSBoundFunction : public JSObject {
@@ -8219,7 +8501,8 @@
   DECL_ACCESSORS(flags, Object)
   DECL_ACCESSORS(source, Object)
 
-  static MaybeHandle<JSRegExp> New(Handle<String> source, Flags flags);
+  V8_EXPORT_PRIVATE static MaybeHandle<JSRegExp> New(Handle<String> source,
+                                                     Flags flags);
   static Handle<JSRegExp> Copy(Handle<JSRegExp> regexp);
 
   static MaybeHandle<JSRegExp> Initialize(Handle<JSRegExp> regexp,
@@ -8585,7 +8868,9 @@
 
   inline bool SitePointsToLiteral();
 
-  static void DigestTransitionFeedback(Handle<AllocationSite> site,
+  template <AllocationSiteUpdateMode update_or_check =
+                AllocationSiteUpdateMode::kUpdate>
+  static bool DigestTransitionFeedback(Handle<AllocationSite> site,
                                        ElementsKind to_kind);
 
   DECLARE_PRINTER(AllocationSite)
@@ -8612,6 +8897,10 @@
   static const int kPointerFieldsBeginOffset = kTransitionInfoOffset;
   static const int kPointerFieldsEndOffset = kWeakNextOffset;
 
+  typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
+                              kPointerFieldsEndOffset, kSize>
+      MarkingBodyDescriptor;
+
   // For other visitors, use the fixed body descriptor below.
   typedef FixedBodyDescriptor<HeapObject::kHeaderSize, kSize, kSize>
       BodyDescriptor;
@@ -8674,8 +8963,7 @@
 enum AllowNullsFlag {ALLOW_NULLS, DISALLOW_NULLS};
 enum RobustnessFlag {ROBUST_STRING_TRAVERSAL, FAST_STRING_TRAVERSAL};
 
-
-class StringHasher {
+class V8_EXPORT_PRIVATE StringHasher {
  public:
   explicit inline StringHasher(int length, uint32_t seed);
 
@@ -9123,6 +9411,9 @@
   static int IndexOf(Isolate* isolate, Handle<String> sub, Handle<String> pat,
                      int start_index);
 
+  static Object* LastIndexOf(Isolate* isolate, Handle<Object> receiver,
+                             Handle<Object> search, Handle<Object> position);
+
   // String equality operations.
   inline bool Equals(String* other);
   inline static bool Equals(Handle<String> one, Handle<String> two);
@@ -9295,7 +9586,7 @@
   static bool SlowEquals(Handle<String> one, Handle<String> two);
 
   // Slow case of AsArrayIndex.
-  bool SlowAsArrayIndex(uint32_t* index);
+  V8_EXPORT_PRIVATE bool SlowAsArrayIndex(uint32_t* index);
 
   // Compute and set the hash code.
   uint32_t ComputeAndSetHash();
@@ -9860,9 +10151,6 @@
   static const int kDependentCodeOffset = kValueOffset + kPointerSize;
   static const int kSize = kDependentCodeOffset + kPointerSize;
 
-  static const int kPointerFieldsBeginOffset = kValueOffset;
-  static const int kPointerFieldsEndOffset = kSize;
-
   typedef FixedBodyDescriptor<kValueOffset,
                               kSize,
                               kSize> BodyDescriptor;
@@ -10055,6 +10343,28 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(JSMap);
 };
 
+class JSStringIterator : public JSObject {
+ public:
+  // Dispatched behavior.
+  DECLARE_PRINTER(JSStringIterator)
+  DECLARE_VERIFIER(JSStringIterator)
+
+  DECLARE_CAST(JSStringIterator)
+
+  // [string]: the [[IteratedString]] internal field.
+  DECL_ACCESSORS(string, String)
+
+  // [index]: The [[StringIteratorNextIndex]] internal field.
+  inline int index() const;
+  inline void set_index(int value);
+
+  static const int kStringOffset = JSObject::kHeaderSize;
+  static const int kNextIndexOffset = kStringOffset + kPointerSize;
+  static const int kSize = kNextIndexOffset + kPointerSize;
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(JSStringIterator);
+};
 
 // OrderedHashTableIterator is an iterator that iterates over the keys and
 // values of an OrderedHashTable.
@@ -10480,12 +10790,9 @@
   static const int kLengthOffset = JSObject::kHeaderSize;
   static const int kSize = kLengthOffset + kPointerSize;
 
-  // 600 * KB is the Page::kMaxRegularHeapObjectSize defined in spaces.h which
-  // we do not want to include in objects.h
-  // Note that Page::kMaxRegularHeapObjectSize has to be in sync with
-  // kInitialMaxFastElementArray which is checked in a DCHECK in heap.cc.
   static const int kInitialMaxFastElementArray =
-      (600 * KB - FixedArray::kHeaderSize - kSize - AllocationMemento::kSize) /
+      (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize - kSize -
+       AllocationMemento::kSize) /
       kPointerSize;
 
  private:
@@ -10684,8 +10991,10 @@
   DECL_ACCESSORS(getter, Object)
   DECL_ACCESSORS(setter, Object)
   DECL_ACCESSORS(query, Object)
+  DECL_ACCESSORS(descriptor, Object)
   DECL_ACCESSORS(deleter, Object)
   DECL_ACCESSORS(enumerator, Object)
+  DECL_ACCESSORS(definer, Object)
   DECL_ACCESSORS(data, Object)
   DECL_BOOLEAN_ACCESSORS(can_intercept_symbols)
   DECL_BOOLEAN_ACCESSORS(all_can_read)
@@ -10703,9 +11012,11 @@
   static const int kGetterOffset = HeapObject::kHeaderSize;
   static const int kSetterOffset = kGetterOffset + kPointerSize;
   static const int kQueryOffset = kSetterOffset + kPointerSize;
-  static const int kDeleterOffset = kQueryOffset + kPointerSize;
+  static const int kDescriptorOffset = kQueryOffset + kPointerSize;
+  static const int kDeleterOffset = kDescriptorOffset + kPointerSize;
   static const int kEnumeratorOffset = kDeleterOffset + kPointerSize;
-  static const int kDataOffset = kEnumeratorOffset + kPointerSize;
+  static const int kDefinerOffset = kEnumeratorOffset + kPointerSize;
+  static const int kDataOffset = kDefinerOffset + kPointerSize;
   static const int kFlagsOffset = kDataOffset + kPointerSize;
   static const int kSize = kFlagsOffset + kPointerSize;
 
diff --git a/src/ostreams.h b/src/ostreams.h
index 977b5c6..dea7514 100644
--- a/src/ostreams.h
+++ b/src/ostreams.h
@@ -13,6 +13,7 @@
 
 #include "include/v8config.h"
 #include "src/base/macros.h"
+#include "src/globals.h"
 
 namespace v8 {
 namespace internal {
@@ -33,7 +34,7 @@
 
 
 // An output stream writing to a file.
-class OFStream : public std::ostream {
+class V8_EXPORT_PRIVATE OFStream : public std::ostream {
  public:
   explicit OFStream(FILE* f);
   virtual ~OFStream();
diff --git a/src/parsing/duplicate-finder.cc b/src/parsing/duplicate-finder.cc
new file mode 100644
index 0000000..6b57153
--- /dev/null
+++ b/src/parsing/duplicate-finder.cc
@@ -0,0 +1,145 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/parsing/duplicate-finder.h"
+
+#include "src/conversions.h"
+#include "src/unicode-cache.h"
+
+namespace v8 {
+namespace internal {
+
+int DuplicateFinder::AddOneByteSymbol(Vector<const uint8_t> key, int value) {
+  return AddSymbol(key, true, value);
+}
+
+int DuplicateFinder::AddTwoByteSymbol(Vector<const uint16_t> key, int value) {
+  return AddSymbol(Vector<const uint8_t>::cast(key), false, value);
+}
+
+int DuplicateFinder::AddSymbol(Vector<const uint8_t> key, bool is_one_byte,
+                               int value) {
+  uint32_t hash = Hash(key, is_one_byte);
+  byte* encoding = BackupKey(key, is_one_byte);
+  base::HashMap::Entry* entry = map_.LookupOrInsert(encoding, hash);
+  int old_value = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
+  entry->value =
+      reinterpret_cast<void*>(static_cast<intptr_t>(value | old_value));
+  return old_value;
+}
+
+int DuplicateFinder::AddNumber(Vector<const uint8_t> key, int value) {
+  DCHECK(key.length() > 0);
+  // Quick check for already being in canonical form.
+  if (IsNumberCanonical(key)) {
+    return AddOneByteSymbol(key, value);
+  }
+
+  int flags = ALLOW_HEX | ALLOW_OCTAL | ALLOW_IMPLICIT_OCTAL | ALLOW_BINARY;
+  double double_value = StringToDouble(unicode_constants_, key, flags, 0.0);
+  int length;
+  const char* string;
+  if (!std::isfinite(double_value)) {
+    string = "Infinity";
+    length = 8;  // strlen("Infinity");
+  } else {
+    string = DoubleToCString(double_value,
+                             Vector<char>(number_buffer_, kBufferSize));
+    length = StrLength(string);
+  }
+  return AddSymbol(
+      Vector<const byte>(reinterpret_cast<const byte*>(string), length), true,
+      value);
+}
+
+bool DuplicateFinder::IsNumberCanonical(Vector<const uint8_t> number) {
+  // Test for a safe approximation of number literals that are already
+  // in canonical form: max 15 digits, no leading zeroes, except an
+  // integer part that is a single zero, and no trailing zeros below
+  // the decimal point.
+  int pos = 0;
+  int length = number.length();
+  if (number.length() > 15) return false;
+  if (number[pos] == '0') {
+    pos++;
+  } else {
+    while (pos < length &&
+           static_cast<unsigned>(number[pos] - '0') <= ('9' - '0'))
+      pos++;
+  }
+  if (length == pos) return true;
+  if (number[pos] != '.') return false;
+  pos++;
+  bool invalid_last_digit = true;
+  while (pos < length) {
+    uint8_t digit = number[pos] - '0';
+    if (digit > '9' - '0') return false;
+    invalid_last_digit = (digit == 0);
+    pos++;
+  }
+  return !invalid_last_digit;
+}
+
+uint32_t DuplicateFinder::Hash(Vector<const uint8_t> key, bool is_one_byte) {
+  // Primitive hash function, almost identical to the one used
+  // for strings (except that it's seeded by the length and representation).
+  int length = key.length();
+  uint32_t hash = (length << 1) | (is_one_byte ? 1 : 0);
+  for (int i = 0; i < length; i++) {
+    uint32_t c = key[i];
+    hash = (hash + c) * 1025;
+    hash ^= (hash >> 6);
+  }
+  return hash;
+}
+
+bool DuplicateFinder::Match(void* first, void* second) {
+  // Decode lengths.
+  // Length + representation is encoded as base 128, most significant heptet
+  // first, with a 8th bit being non-zero while there are more heptets.
+  // The value encodes the number of bytes following, and whether the original
+  // was Latin1.
+  byte* s1 = reinterpret_cast<byte*>(first);
+  byte* s2 = reinterpret_cast<byte*>(second);
+  uint32_t length_one_byte_field = 0;
+  byte c1;
+  do {
+    c1 = *s1;
+    if (c1 != *s2) return false;
+    length_one_byte_field = (length_one_byte_field << 7) | (c1 & 0x7f);
+    s1++;
+    s2++;
+  } while ((c1 & 0x80) != 0);
+  int length = static_cast<int>(length_one_byte_field >> 1);
+  return memcmp(s1, s2, length) == 0;
+}
+
+byte* DuplicateFinder::BackupKey(Vector<const uint8_t> bytes,
+                                 bool is_one_byte) {
+  uint32_t one_byte_length = (bytes.length() << 1) | (is_one_byte ? 1 : 0);
+  backing_store_.StartSequence();
+  // Emit one_byte_length as base-128 encoded number, with the 7th bit set
+  // on the byte of every heptet except the last, least significant, one.
+  if (one_byte_length >= (1 << 7)) {
+    if (one_byte_length >= (1 << 14)) {
+      if (one_byte_length >= (1 << 21)) {
+        if (one_byte_length >= (1 << 28)) {
+          backing_store_.Add(
+              static_cast<uint8_t>((one_byte_length >> 28) | 0x80));
+        }
+        backing_store_.Add(
+            static_cast<uint8_t>((one_byte_length >> 21) | 0x80u));
+      }
+      backing_store_.Add(static_cast<uint8_t>((one_byte_length >> 14) | 0x80u));
+    }
+    backing_store_.Add(static_cast<uint8_t>((one_byte_length >> 7) | 0x80u));
+  }
+  backing_store_.Add(static_cast<uint8_t>(one_byte_length & 0x7f));
+
+  backing_store_.AddBlock(bytes);
+  return backing_store_.EndSequence().start();
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/parsing/duplicate-finder.h b/src/parsing/duplicate-finder.h
new file mode 100644
index 0000000..a3858e7
--- /dev/null
+++ b/src/parsing/duplicate-finder.h
@@ -0,0 +1,64 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_PARSING_DUPLICATE_FINDER_H_
+#define V8_PARSING_DUPLICATE_FINDER_H_
+
+#include "src/base/hashmap.h"
+#include "src/collector.h"
+
+namespace v8 {
+namespace internal {
+
+class UnicodeCache;
+
+// DuplicateFinder discovers duplicate symbols.
+class DuplicateFinder {
+ public:
+  explicit DuplicateFinder(UnicodeCache* constants)
+      : unicode_constants_(constants), backing_store_(16), map_(&Match) {}
+
+  int AddOneByteSymbol(Vector<const uint8_t> key, int value);
+  int AddTwoByteSymbol(Vector<const uint16_t> key, int value);
+  // Add a a number literal by converting it (if necessary)
+  // to the string that ToString(ToNumber(literal)) would generate.
+  // and then adding that string with AddOneByteSymbol.
+  // This string is the actual value used as key in an object literal,
+  // and the one that must be different from the other keys.
+  int AddNumber(Vector<const uint8_t> key, int value);
+
+ private:
+  int AddSymbol(Vector<const uint8_t> key, bool is_one_byte, int value);
+  // Backs up the key and its length in the backing store.
+  // The backup is stored with a base 127 encoding of the
+  // length (plus a bit saying whether the string is one byte),
+  // followed by the bytes of the key.
+  uint8_t* BackupKey(Vector<const uint8_t> key, bool is_one_byte);
+
+  // Compare two encoded keys (both pointing into the backing store)
+  // for having the same base-127 encoded lengths and representation.
+  // and then having the same 'length' bytes following.
+  static bool Match(void* first, void* second);
+  // Creates a hash from a sequence of bytes.
+  static uint32_t Hash(Vector<const uint8_t> key, bool is_one_byte);
+  // Checks whether a string containing a JS number is its canonical
+  // form.
+  static bool IsNumberCanonical(Vector<const uint8_t> key);
+
+  // Size of buffer. Sufficient for using it to call DoubleToCString in
+  // from conversions.h.
+  static const int kBufferSize = 100;
+
+  UnicodeCache* unicode_constants_;
+  // Backing store used to store strings used as hashmap keys.
+  SequenceCollector<unsigned char> backing_store_;
+  base::CustomMatcherHashMap map_;
+  // Buffer used for string->number->canonical string conversions.
+  char number_buffer_[kBufferSize];
+};
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_PARSING_DUPLICATE_FINDER_H_
diff --git a/src/parsing/expression-classifier.h b/src/parsing/expression-classifier.h
index 9190e18..6a1fbac 100644
--- a/src/parsing/expression-classifier.h
+++ b/src/parsing/expression-classifier.h
@@ -7,11 +7,12 @@
 
 #include "src/messages.h"
 #include "src/parsing/scanner.h"
-#include "src/parsing/token.h"
 
 namespace v8 {
 namespace internal {
 
+class DuplicateFinder;
+
 #define ERROR_CODES(T)                       \
   T(ExpressionProduction, 0)                 \
   T(FormalParameterInitializerProduction, 1) \
@@ -21,11 +22,32 @@
   T(StrictModeFormalParametersProduction, 5) \
   T(ArrowFormalParametersProduction, 6)      \
   T(LetPatternProduction, 7)                 \
-  T(ObjectLiteralProduction, 8)              \
-  T(TailCallExpressionProduction, 9)         \
-  T(AsyncArrowFormalParametersProduction, 10)
+  T(TailCallExpressionProduction, 8)         \
+  T(AsyncArrowFormalParametersProduction, 9)
 
-template <typename Traits>
+// Expression classifiers serve two purposes:
+//
+// 1) They keep track of error messages that are pending (and other
+//    related information), waiting for the parser to decide whether
+//    the parsed expression is a pattern or not.
+// 2) They keep track of expressions that may need to be rewritten, if
+//    the parser decides that they are not patterns.  (A different
+//    mechanism implements the rewriting of patterns.)
+//
+// Expression classifiers are used by the parser in a stack fashion.
+// Each new classifier is pushed on top of the stack.  This happens
+// automatically by the class's constructor.  While on top of the
+// stack, the classifier records pending error messages and tracks the
+// pending non-patterns of the expression that is being parsed.
+//
+// At the end of its life, a classifier is either "accumulated" to the
+// one that is below it on the stack, or is "discarded".  The former
+// is achieved by calling the method Accumulate.  The latter is
+// achieved automatically by the destructor, but it can happen earlier
+// by calling the method Discard.  Both actions result in removing the
+// classifier from the parser's stack.
+
+template <typename Types>
 class ExpressionClassifier {
  public:
   enum ErrorKind : unsigned {
@@ -55,51 +77,41 @@
     const char* arg;
   };
 
+  // clang-format off
   enum TargetProduction : unsigned {
 #define DEFINE_PRODUCTION(NAME, CODE) NAME = 1 << CODE,
     ERROR_CODES(DEFINE_PRODUCTION)
 #undef DEFINE_PRODUCTION
 
-        ExpressionProductions =
-            (ExpressionProduction | FormalParameterInitializerProduction |
-             TailCallExpressionProduction),
-    PatternProductions = (BindingPatternProduction |
-                          AssignmentPatternProduction | LetPatternProduction),
-    FormalParametersProductions = (DistinctFormalParametersProduction |
-                                   StrictModeFormalParametersProduction),
-    AllProductions =
-        (ExpressionProductions | PatternProductions |
-         FormalParametersProductions | ArrowFormalParametersProduction |
-         ObjectLiteralProduction | AsyncArrowFormalParametersProduction)
+#define DEFINE_ALL_PRODUCTIONS(NAME, CODE) NAME |
+    AllProductions = ERROR_CODES(DEFINE_ALL_PRODUCTIONS) /* | */ 0
+#undef DEFINE_ALL_PRODUCTIONS
   };
+  // clang-format on
 
   enum FunctionProperties : unsigned {
     NonSimpleParameter = 1 << 0
   };
 
-  explicit ExpressionClassifier(const Traits* t)
-      : zone_(t->zone()),
-        non_patterns_to_rewrite_(t->GetNonPatternList()),
-        reported_errors_(t->GetReportedErrorList()),
-        duplicate_finder_(nullptr),
-        invalid_productions_(0),
-        function_properties_(0) {
-    reported_errors_begin_ = reported_errors_end_ = reported_errors_->length();
-    non_pattern_begin_ = non_patterns_to_rewrite_->length();
-  }
-
-  ExpressionClassifier(const Traits* t, DuplicateFinder* duplicate_finder)
-      : zone_(t->zone()),
-        non_patterns_to_rewrite_(t->GetNonPatternList()),
-        reported_errors_(t->GetReportedErrorList()),
+  explicit ExpressionClassifier(typename Types::Base* base,
+                                DuplicateFinder* duplicate_finder = nullptr)
+      : base_(base),
+        previous_(base->classifier_),
+        zone_(base->impl()->zone()),
+        non_patterns_to_rewrite_(base->impl()->GetNonPatternList()),
+        reported_errors_(base->impl()->GetReportedErrorList()),
         duplicate_finder_(duplicate_finder),
         invalid_productions_(0),
         function_properties_(0) {
+    base->classifier_ = this;
     reported_errors_begin_ = reported_errors_end_ = reported_errors_->length();
     non_pattern_begin_ = non_patterns_to_rewrite_->length();
   }
 
-  ~ExpressionClassifier() { Discard(); }
+  V8_INLINE ~ExpressionClassifier() {
+    Discard();
+    if (base_->classifier_ == this) base_->classifier_ = previous_;
+  }
 
   V8_INLINE bool is_valid(unsigned productions) const {
     return (invalid_productions_ & productions) == 0;
@@ -179,14 +191,6 @@
     return reported_error(kLetPatternProduction);
   }
 
-  V8_INLINE bool has_object_literal_error() const {
-    return !is_valid(ObjectLiteralProduction);
-  }
-
-  V8_INLINE const Error& object_literal_error() const {
-    return reported_error(kObjectLiteralProduction);
-  }
-
   V8_INLINE bool has_tail_call_expression() const {
     return !is_valid(TailCallExpressionProduction);
   }
@@ -295,14 +299,6 @@
     Add(Error(loc, message, kLetPatternProduction, arg));
   }
 
-  void RecordObjectLiteralError(const Scanner::Location& loc,
-                                MessageTemplate::Template message,
-                                const char* arg = nullptr) {
-    if (has_object_literal_error()) return;
-    invalid_productions_ |= ObjectLiteralProduction;
-    Add(Error(loc, message, kObjectLiteralProduction, arg));
-  }
-
   void RecordTailCallExpressionError(const Scanner::Location& loc,
                                      MessageTemplate::Template message,
                                      const char* arg = nullptr) {
@@ -316,7 +312,14 @@
     DCHECK_EQ(inner->reported_errors_, reported_errors_);
     DCHECK_EQ(inner->reported_errors_begin_, reported_errors_end_);
     DCHECK_EQ(inner->reported_errors_end_, reported_errors_->length());
-    if (merge_non_patterns) MergeNonPatterns(inner);
+    DCHECK_EQ(inner->non_patterns_to_rewrite_, non_patterns_to_rewrite_);
+    DCHECK_LE(non_pattern_begin_, inner->non_pattern_begin_);
+    DCHECK_LE(inner->non_pattern_begin_, non_patterns_to_rewrite_->length());
+    // Merge non-patterns from the inner classifier, or discard them.
+    if (merge_non_patterns)
+      inner->non_pattern_begin_ = non_patterns_to_rewrite_->length();
+    else
+      non_patterns_to_rewrite_->Rewind(inner->non_pattern_begin_);
     // Propagate errors from inner, but don't overwrite already recorded
     // errors.
     unsigned non_arrow_inner_invalid_productions =
@@ -393,10 +396,7 @@
     non_patterns_to_rewrite_->Rewind(non_pattern_begin_);
   }
 
-  V8_INLINE void MergeNonPatterns(ExpressionClassifier* inner) {
-    DCHECK_LE(non_pattern_begin_, inner->non_pattern_begin_);
-    inner->non_pattern_begin_ = inner->non_patterns_to_rewrite_->length();
-  }
+  ExpressionClassifier* previous() const { return previous_; }
 
  private:
   V8_INLINE const Error& reported_error(ErrorKind kind) const {
@@ -410,6 +410,9 @@
     // We should only be looking for an error when we know that one has
     // been reported.  But we're not...  So this is to make sure we have
     // the same behaviour.
+    UNREACHABLE();
+
+    // Make MSVC happy by returning an error from this inaccessible path.
     static Error none;
     return none;
   }
@@ -434,8 +437,10 @@
     reported_errors_end_++;
   }
 
+  typename Types::Base* base_;
+  ExpressionClassifier* previous_;
   Zone* zone_;
-  ZoneList<typename Traits::Type::Expression>* non_patterns_to_rewrite_;
+  ZoneList<typename Types::Expression>* non_patterns_to_rewrite_;
   ZoneList<Error>* reported_errors_;
   DuplicateFinder* duplicate_finder_;
   // The uint16_t for non_pattern_begin_ will not be enough in the case,
@@ -456,6 +461,8 @@
   // stack overflow while parsing.
   uint16_t reported_errors_begin_;
   uint16_t reported_errors_end_;
+
+  DISALLOW_COPY_AND_ASSIGN(ExpressionClassifier);
 };
 
 
diff --git a/src/parsing/func-name-inferrer.cc b/src/parsing/func-name-inferrer.cc
index 0821be0..a86e1c2 100644
--- a/src/parsing/func-name-inferrer.cc
+++ b/src/parsing/func-name-inferrer.cc
@@ -45,9 +45,11 @@
 }
 
 void FuncNameInferrer::RemoveAsyncKeywordFromEnd() {
-  DCHECK(names_stack_.length() > 0);
-  DCHECK(names_stack_.last().name->IsOneByteEqualTo("async"));
-  names_stack_.RemoveLast();
+  if (IsOpen()) {
+    DCHECK(names_stack_.length() > 0);
+    DCHECK(names_stack_.last().name->IsOneByteEqualTo("async"));
+    names_stack_.RemoveLast();
+  }
 }
 
 const AstString* FuncNameInferrer::MakeNameFromStack() {
diff --git a/src/parsing/func-name-inferrer.h b/src/parsing/func-name-inferrer.h
index cffd8a8..cc9204b 100644
--- a/src/parsing/func-name-inferrer.h
+++ b/src/parsing/func-name-inferrer.h
@@ -6,7 +6,7 @@
 #define V8_PARSING_FUNC_NAME_INFERRER_H_
 
 #include "src/handles.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/parsing/parameter-initializer-rewriter.cc b/src/parsing/parameter-initializer-rewriter.cc
index b12a80f..73224a2 100644
--- a/src/parsing/parameter-initializer-rewriter.cc
+++ b/src/parsing/parameter-initializer-rewriter.cc
@@ -47,9 +47,9 @@
   }
   // No need to visit the constructor since it will have the class
   // scope on its scope chain.
-  ZoneList<ObjectLiteralProperty*>* props = class_literal->properties();
+  ZoneList<ClassLiteralProperty*>* props = class_literal->properties();
   for (int i = 0; i < props->length(); ++i) {
-    ObjectLiteralProperty* prop = props->at(i);
+    ClassLiteralProperty* prop = props->at(i);
     if (!prop->key()->IsLiteral()) {
       Visit(prop->key());
     }
diff --git a/src/parsing/parameter-initializer-rewriter.h b/src/parsing/parameter-initializer-rewriter.h
index a0ff7d2..5e409b4 100644
--- a/src/parsing/parameter-initializer-rewriter.h
+++ b/src/parsing/parameter-initializer-rewriter.h
@@ -5,7 +5,7 @@
 #ifndef V8_PARSING_PARAMETER_EXPRESSION_REWRITER_H_
 #define V8_PARSING_PARAMETER_EXPRESSION_REWRITER_H_
 
-#include "src/types.h"
+#include "src/ast/ast-types.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/parsing/parse-info.cc b/src/parsing/parse-info.cc
index dfec061..5b9b5e4 100644
--- a/src/parsing/parse-info.cc
+++ b/src/parsing/parse-info.cc
@@ -33,7 +33,9 @@
 
 ParseInfo::ParseInfo(Zone* zone, Handle<JSFunction> function)
     : ParseInfo(zone, Handle<SharedFunctionInfo>(function->shared())) {
-  set_context(Handle<Context>(function->context()));
+  if (!function->context()->IsNativeContext()) {
+    set_outer_scope_info(handle(function->context()->scope_info()));
+  }
 }
 
 ParseInfo::ParseInfo(Zone* zone, Handle<SharedFunctionInfo> shared)
@@ -86,17 +88,13 @@
   return (compiler_hints_ & (1 << SharedFunctionInfo::kIsDeclaration)) != 0;
 }
 
-bool ParseInfo::is_arrow() const {
-  return (compiler_hints_ & (1 << SharedFunctionInfo::kIsArrow)) != 0;
+bool ParseInfo::requires_class_field_init() const {
+  return (compiler_hints_ &
+          (1 << SharedFunctionInfo::kRequiresClassFieldInit)) != 0;
 }
-
-bool ParseInfo::is_async() const {
-  return (compiler_hints_ & (1 << SharedFunctionInfo::kIsAsyncFunction)) != 0;
-}
-
-bool ParseInfo::is_default_constructor() const {
-  return (compiler_hints_ & (1 << SharedFunctionInfo::kIsDefaultConstructor)) !=
-         0;
+bool ParseInfo::is_class_field_initializer() const {
+  return (compiler_hints_ &
+          (1 << SharedFunctionInfo::kIsClassFieldInitializer)) != 0;
 }
 
 FunctionKind ParseInfo::function_kind() const {
diff --git a/src/parsing/parse-info.h b/src/parsing/parse-info.h
index 6176135..4aedae4 100644
--- a/src/parsing/parse-info.h
+++ b/src/parsing/parse-info.h
@@ -148,9 +148,8 @@
 
   // Getters for individual compiler hints.
   bool is_declaration() const;
-  bool is_arrow() const;
-  bool is_async() const;
-  bool is_default_constructor() const;
+  bool requires_class_field_init() const;
+  bool is_class_field_initializer() const;
   FunctionKind function_kind() const;
 
   //--------------------------------------------------------------------------
@@ -159,11 +158,15 @@
   Isolate* isolate() const { return isolate_; }
   Handle<SharedFunctionInfo> shared_info() const { return shared_; }
   Handle<Script> script() const { return script_; }
-  Handle<Context> context() const { return context_; }
+  MaybeHandle<ScopeInfo> maybe_outer_scope_info() const {
+    return maybe_outer_scope_info_;
+  }
   void clear_script() { script_ = Handle<Script>::null(); }
   void set_isolate(Isolate* isolate) { isolate_ = isolate; }
   void set_shared_info(Handle<SharedFunctionInfo> shared) { shared_ = shared; }
-  void set_context(Handle<Context> context) { context_ = context; }
+  void set_outer_scope_info(Handle<ScopeInfo> outer_scope_info) {
+    maybe_outer_scope_info_ = outer_scope_info;
+  }
   void set_script(Handle<Script> script) { script_ = script; }
   //--------------------------------------------------------------------------
 
@@ -178,7 +181,10 @@
   void ReopenHandlesInNewHandleScope() {
     shared_ = Handle<SharedFunctionInfo>(*shared_);
     script_ = Handle<Script>(*script_);
-    context_ = Handle<Context>(*context_);
+    Handle<ScopeInfo> outer_scope_info;
+    if (maybe_outer_scope_info_.ToHandle(&outer_scope_info)) {
+      maybe_outer_scope_info_ = Handle<ScopeInfo>(*outer_scope_info);
+    }
   }
 
 #ifdef DEBUG
@@ -224,7 +230,7 @@
   Isolate* isolate_;
   Handle<SharedFunctionInfo> shared_;
   Handle<Script> script_;
-  Handle<Context> context_;
+  MaybeHandle<ScopeInfo> maybe_outer_scope_info_;
 
   //----------- Inputs+Outputs of parsing and scope analysis -----------------
   ScriptData** cached_data_;  // used if available, populated if requested.
diff --git a/src/parsing/parser-base.h b/src/parsing/parser-base.h
index b8703d0..1ebbee4 100644
--- a/src/parsing/parser-base.h
+++ b/src/parsing/parser-base.h
@@ -5,6 +5,7 @@
 #ifndef V8_PARSING_PARSER_BASE_H
 #define V8_PARSING_PARSER_BASE_H
 
+#include "src/ast/ast.h"
 #include "src/ast/scopes.h"
 #include "src/bailout-reason.h"
 #include "src/base/hashmap.h"
@@ -56,59 +57,6 @@
   return static_cast<T>(bitfield) & static_cast<T>(mask);
 }
 
-enum class MethodKind {
-  kNormal = 0,
-  kStatic = 1 << 0,
-  kGenerator = 1 << 1,
-  kStaticGenerator = kStatic | kGenerator,
-  kAsync = 1 << 2,
-  kStaticAsync = kStatic | kAsync,
-
-  /* Any non-ordinary method kinds */
-  kSpecialMask = kGenerator | kAsync
-};
-
-inline bool IsValidMethodKind(MethodKind kind) {
-  return kind == MethodKind::kNormal || kind == MethodKind::kStatic ||
-         kind == MethodKind::kGenerator ||
-         kind == MethodKind::kStaticGenerator || kind == MethodKind::kAsync ||
-         kind == MethodKind::kStaticAsync;
-}
-
-static inline MethodKind operator|(MethodKind lhs, MethodKind rhs) {
-  typedef unsigned char T;
-  return static_cast<MethodKind>(static_cast<T>(lhs) | static_cast<T>(rhs));
-}
-
-static inline MethodKind& operator|=(MethodKind& lhs, const MethodKind& rhs) {
-  lhs = lhs | rhs;
-  DCHECK(IsValidMethodKind(lhs));
-  return lhs;
-}
-
-static inline bool operator&(MethodKind bitfield, MethodKind mask) {
-  typedef unsigned char T;
-  return static_cast<T>(bitfield) & static_cast<T>(mask);
-}
-
-inline bool IsNormalMethod(MethodKind kind) {
-  return kind == MethodKind::kNormal;
-}
-
-inline bool IsSpecialMethod(MethodKind kind) {
-  return kind & MethodKind::kSpecialMask;
-}
-
-inline bool IsStaticMethod(MethodKind kind) {
-  return kind & MethodKind::kStatic;
-}
-
-inline bool IsGeneratorMethod(MethodKind kind) {
-  return kind & MethodKind::kGenerator;
-}
-
-inline bool IsAsyncMethod(MethodKind kind) { return kind & MethodKind::kAsync; }
-
 struct FormalParametersBase {
   explicit FormalParametersBase(DeclarationScope* scope) : scope(scope) {}
   DeclarationScope* scope;
@@ -126,8 +74,8 @@
 // thus it must never be used where only a single statement
 // is correct (e.g. an if statement branch w/o braces)!
 
-#define CHECK_OK_CUSTOM(x) ok); \
-  if (!*ok) return this->x();   \
+#define CHECK_OK_CUSTOM(x, ...) ok);       \
+  if (!*ok) return impl()->x(__VA_ARGS__); \
   ((void)0
 #define DUMMY )  // to make indentation work
 #undef DUMMY
@@ -140,93 +88,86 @@
 // following the Curiously Recurring Template Pattern (CRTP).
 // The structure of the parser objects is roughly the following:
 //
-//   // Common denominator, needed to avoid cyclic dependency.
-//   // Instances of this template will end up with very minimal
-//   // definitions, ideally containing just typedefs.
+//   // A structure template containing type definitions, needed to
+//   // avoid a cyclic dependency.
 //   template <typename Impl>
-//   class ParserBaseTraits;
-
+//   struct ParserTypes;
+//
 //   // The parser base object, which should just implement pure
 //   // parser behavior.  The Impl parameter is the actual derived
 //   // class (according to CRTP), which implements impure parser
 //   // behavior.
 //   template <typename Impl>
-//   class ParserBase : public ParserBaseTraits<Impl> { ... };
+//   class ParserBase { ... };
 //
 //   // And then, for each parser variant (e.g., parser, preparser, etc):
 //   class Parser;
 //
 //   template <>
-//   class ParserBaseTraits<Parser> { ... };
+//   class ParserTypes<Parser> { ... };
 //
 //   class Parser : public ParserBase<Parser> { ... };
 //
-// TODO(nikolaos): Currently the traits objects contain many things
-// that will be moved to the implementation objects or to the parser
-// base.  The following comments will have to change, when this happens.
+// The parser base object implements pure parsing, according to the
+// language grammar.  Different parser implementations may exhibit
+// different parser-driven behavior that is not considered as pure
+// parsing, e.g., early error detection and reporting, AST generation, etc.
 
-// The traits class template encapsulates the differences between
-// parser/pre-parser implementations.  In particular:
-
-// - Return types: For example, Parser functions return Expression* and
-// PreParser functions return PreParserExpression.
-
-// - Creating parse tree nodes: Parser generates an AST during the recursive
-// descent. PreParser doesn't create a tree. Instead, it passes around minimal
-// data objects (PreParserExpression, PreParserIdentifier etc.) which contain
-// just enough data for the upper layer functions. PreParserFactory is
-// responsible for creating these dummy objects. It provides a similar kind of
-// interface as AstNodeFactory, so ParserBase doesn't need to care which one is
-// used.
-
-// - Miscellaneous other tasks interleaved with the recursive descent. For
-// example, Parser keeps track of which function literals should be marked as
-// pretenured, and PreParser doesn't care.
-
-// The traits are expected to contain the following typedefs:
+// The ParserTypes structure encapsulates the differences in the
+// types used in parsing methods.  E.g., Parser methods use Expression*
+// and PreParser methods use PreParserExpression.  For any given parser
+// implementation class Impl, it is expected to contain the following typedefs:
+//
 // template <>
-// class ParserBaseTraits<Impl> {
-//   // In particular...
-//   struct Type {
-//     typedef GeneratorVariable;
-//     typedef AstProperties;
-//     typedef ExpressionClassifier;
-//     // Return types for traversing functions.
-//     typedef Identifier;
-//     typedef Expression;
-//     typedef YieldExpression;
-//     typedef FunctionLiteral;
-//     typedef ClassLiteral;
-//     typedef Literal;
-//     typedef ObjectLiteralProperty;
-//     typedef ExpressionList;
-//     typedef PropertyList;
-//     typedef FormalParameter;
-//     typedef FormalParameters;
-//     typedef StatementList;
-//     // For constructing objects returned by the traversing functions.
-//     typedef Factory;
-//   };
-//   // ...
+// struct ParserTypes<Impl> {
+//   // Synonyms for ParserBase<Impl> and Impl, respectively.
+//   typedef Base;
+//   typedef Impl;
+//   // TODO(nikolaos): this one will probably go away, as it is
+//   // not related to pure parsing.
+//   typedef Variable;
+//   // Return types for traversing functions.
+//   typedef Identifier;
+//   typedef Expression;
+//   typedef FunctionLiteral;
+//   typedef ObjectLiteralProperty;
+//   typedef ClassLiteralProperty;
+//   typedef ExpressionList;
+//   typedef ObjectPropertyList;
+//   typedef ClassPropertyList;
+//   typedef FormalParameters;
+//   typedef Statement;
+//   typedef StatementList;
+//   typedef Block;
+//   typedef BreakableStatement;
+//   typedef IterationStatement;
+//   // For constructing objects returned by the traversing functions.
+//   typedef Factory;
+//   // For other implementation-specific tasks.
+//   typedef Target;
+//   typedef TargetScope;
 // };
 
 template <typename Impl>
-class ParserBaseTraits;
+struct ParserTypes;
 
 template <typename Impl>
-class ParserBase : public ParserBaseTraits<Impl> {
+class ParserBase {
  public:
-  // Shorten type names defined by Traits.
-  typedef ParserBaseTraits<Impl> Traits;
-  typedef typename Traits::Type::Expression ExpressionT;
-  typedef typename Traits::Type::Identifier IdentifierT;
-  typedef typename Traits::Type::FormalParameter FormalParameterT;
-  typedef typename Traits::Type::FormalParameters FormalParametersT;
-  typedef typename Traits::Type::FunctionLiteral FunctionLiteralT;
-  typedef typename Traits::Type::Literal LiteralT;
-  typedef typename Traits::Type::ObjectLiteralProperty ObjectLiteralPropertyT;
-  typedef typename Traits::Type::StatementList StatementListT;
-  typedef typename Traits::Type::ExpressionClassifier ExpressionClassifier;
+  // Shorten type names defined by ParserTypes<Impl>.
+  typedef ParserTypes<Impl> Types;
+  typedef typename Types::Identifier IdentifierT;
+  typedef typename Types::Expression ExpressionT;
+  typedef typename Types::FunctionLiteral FunctionLiteralT;
+  typedef typename Types::ObjectLiteralProperty ObjectLiteralPropertyT;
+  typedef typename Types::ClassLiteralProperty ClassLiteralPropertyT;
+  typedef typename Types::ExpressionList ExpressionListT;
+  typedef typename Types::FormalParameters FormalParametersT;
+  typedef typename Types::Statement StatementT;
+  typedef typename Types::StatementList StatementListT;
+  typedef typename Types::Block BlockT;
+  typedef typename v8::internal::ExpressionClassifier<Types>
+      ExpressionClassifier;
 
   // All implementation-specific methods must be called through this.
   Impl* impl() { return static_cast<Impl*>(this); }
@@ -246,6 +187,7 @@
         parsing_module_(false),
         stack_limit_(stack_limit),
         zone_(zone),
+        classifier_(nullptr),
         scanner_(scanner),
         stack_overflow_(false),
         allow_lazy_(false),
@@ -257,7 +199,8 @@
         allow_harmony_function_sent_(false),
         allow_harmony_async_await_(false),
         allow_harmony_restrictive_generators_(false),
-        allow_harmony_trailing_commas_(false) {}
+        allow_harmony_trailing_commas_(false),
+        allow_harmony_class_fields_(false) {}
 
 #define ALLOW_ACCESSORS(name)                           \
   bool allow_##name() const { return allow_##name##_; } \
@@ -273,6 +216,7 @@
   ALLOW_ACCESSORS(harmony_async_await);
   ALLOW_ACCESSORS(harmony_restrictive_generators);
   ALLOW_ACCESSORS(harmony_trailing_commas);
+  ALLOW_ACCESSORS(harmony_class_fields);
 
 #undef ALLOW_ACCESSORS
 
@@ -280,7 +224,12 @@
 
   void set_stack_limit(uintptr_t stack_limit) { stack_limit_ = stack_limit; }
 
+  Zone* zone() const { return zone_; }
+
  protected:
+  friend class v8::internal::ExpressionClassifier<ParserTypes<Impl>>;
+
+  // clang-format off
   enum AllowRestrictedIdentifiers {
     kAllowRestrictedIdentifiers,
     kDontAllowRestrictedIdentifiers
@@ -291,14 +240,26 @@
     PARSE_EAGERLY
   };
 
+  enum LazyParsingResult {
+    kLazyParsingComplete,
+    kLazyParsingAborted
+  };
+
   enum VariableDeclarationContext {
     kStatementListItem,
     kStatement,
     kForStatement
   };
 
+  enum class FunctionBodyType {
+    kNormal,
+    kSingleExpression
+  };
+  // clang-format on
+
   class Checkpoint;
-  class ObjectLiteralCheckerBase;
+  class ClassLiteralChecker;
+  class ObjectLiteralChecker;
 
   // ---------------------------------------------------------------------------
   // ScopeState and its subclasses implement the parser's scope stack.
@@ -333,8 +294,8 @@
     // allocation.
     // TODO(verwaest): Move to LazyBlockState class that only allocates the
     // scope when needed.
-    explicit BlockState(ScopeState** scope_stack)
-        : ScopeState(scope_stack, NewScope(*scope_stack)) {}
+    explicit BlockState(Zone* zone, ScopeState** scope_stack)
+        : ScopeState(scope_stack, NewScope(zone, *scope_stack)) {}
 
     void SetNonlinear() { this->scope()->SetNonlinear(); }
     void set_start_position(int pos) { this->scope()->set_start_position(pos); }
@@ -348,9 +309,8 @@
     }
 
    private:
-    Scope* NewScope(ScopeState* outer_state) {
+    Scope* NewScope(Zone* zone, ScopeState* outer_state) {
       Scope* parent = outer_state->scope();
-      Zone* zone = outer_state->zone();
       return new (zone) Scope(zone, parent, BLOCK_SCOPE);
     }
   };
@@ -384,14 +344,6 @@
       expressions_.Add(expr, zone_);
     }
 
-    void AddExplicitTailCall(ExpressionT expr, const Scanner::Location& loc) {
-      if (!has_explicit_tail_calls()) {
-        loc_ = loc;
-        has_explicit_tail_calls_ = true;
-      }
-      expressions_.Add(expr, zone_);
-    }
-
     void Append(const TailCallExpressionList& other) {
       if (!has_explicit_tail_calls()) {
         loc_ = other.loc_;
@@ -425,9 +377,13 @@
   class FunctionState final : public ScopeState {
    public:
     FunctionState(FunctionState** function_state_stack,
-                  ScopeState** scope_stack, Scope* scope, FunctionKind kind);
+                  ScopeState** scope_stack, DeclarationScope* scope);
     ~FunctionState();
 
+    DeclarationScope* scope() const {
+      return ScopeState::scope()->AsDeclarationScope();
+    }
+
     int NextMaterializedLiteralIndex() {
       return next_materialized_literal_index_++;
     }
@@ -442,24 +398,27 @@
     void AddProperty() { expected_property_count_++; }
     int expected_property_count() { return expected_property_count_; }
 
-    bool is_generator() const { return IsGeneratorFunction(kind_); }
-    bool is_async_function() const { return IsAsyncFunction(kind_); }
-    bool is_resumable() const { return is_generator() || is_async_function(); }
-
-    FunctionKind kind() const { return kind_; }
+    FunctionKind kind() const { return scope()->function_kind(); }
     FunctionState* outer() const { return outer_function_state_; }
 
-    void set_generator_object_variable(
-        typename Traits::Type::GeneratorVariable* variable) {
+    void set_generator_object_variable(typename Types::Variable* variable) {
       DCHECK(variable != NULL);
-      DCHECK(is_resumable());
+      DCHECK(IsResumableFunction(kind()));
       generator_object_variable_ = variable;
     }
-    typename Traits::Type::GeneratorVariable* generator_object_variable()
-        const {
+    typename Types::Variable* generator_object_variable() const {
       return generator_object_variable_;
     }
 
+    void set_promise_variable(typename Types::Variable* variable) {
+      DCHECK(variable != NULL);
+      DCHECK(IsAsyncFunction(kind()));
+      promise_variable_ = variable;
+    }
+    typename Types::Variable* promise_variable() const {
+      return promise_variable_;
+    }
+
     const ZoneList<DestructuringAssignment>&
         destructuring_assignments_to_rewrite() const {
       return destructuring_assignments_to_rewrite_;
@@ -474,14 +433,6 @@
         tail_call_expressions_.AddImplicitTailCall(expression);
       }
     }
-    void AddExplicitTailCallExpression(ExpressionT expression,
-                                       const Scanner::Location& loc) {
-      DCHECK(expression->IsCall());
-      if (return_expr_context() ==
-          ReturnExprContext::kInsideValidReturnStatement) {
-        tail_call_expressions_.AddExplicitTailCall(expression, loc);
-      }
-    }
 
     ZoneList<typename ExpressionClassifier::Error>* GetReportedErrorList() {
       return &reported_errors_;
@@ -530,11 +481,13 @@
     // Properties count estimation.
     int expected_property_count_;
 
-    FunctionKind kind_;
     // For generators, this variable may hold the generator object. It variable
     // is used by yield expressions and return statements. It is not necessary
     // for generator functions to have this variable set.
     Variable* generator_object_variable_;
+    // For async functions, this variable holds a temporary for the Promise
+    // being created as output of the async function.
+    Variable* promise_variable_;
 
     FunctionState** function_state_stack_;
     FunctionState* outer_function_state_;
@@ -644,8 +597,97 @@
     Mode old_mode_;
   };
 
+  struct DeclarationDescriptor {
+    enum Kind { NORMAL, PARAMETER };
+    Scope* scope;
+    Scope* hoist_scope;
+    VariableMode mode;
+    int declaration_pos;
+    int initialization_pos;
+    Kind declaration_kind;
+  };
+
+  struct DeclarationParsingResult {
+    struct Declaration {
+      Declaration(ExpressionT pattern, int initializer_position,
+                  ExpressionT initializer)
+          : pattern(pattern),
+            initializer_position(initializer_position),
+            initializer(initializer) {}
+
+      ExpressionT pattern;
+      int initializer_position;
+      ExpressionT initializer;
+    };
+
+    DeclarationParsingResult()
+        : declarations(4),
+          first_initializer_loc(Scanner::Location::invalid()),
+          bindings_loc(Scanner::Location::invalid()) {}
+
+    DeclarationDescriptor descriptor;
+    List<Declaration> declarations;
+    Scanner::Location first_initializer_loc;
+    Scanner::Location bindings_loc;
+  };
+
+  struct CatchInfo {
+   public:
+    explicit CatchInfo(ParserBase* parser)
+        : name(parser->impl()->EmptyIdentifier()),
+          variable(nullptr),
+          pattern(parser->impl()->EmptyExpression()),
+          scope(nullptr),
+          init_block(parser->impl()->NullBlock()),
+          inner_block(parser->impl()->NullBlock()),
+          for_promise_reject(false),
+          bound_names(1, parser->zone()),
+          tail_call_expressions(parser->zone()) {}
+    IdentifierT name;
+    Variable* variable;
+    ExpressionT pattern;
+    Scope* scope;
+    BlockT init_block;
+    BlockT inner_block;
+    bool for_promise_reject;
+    ZoneList<const AstRawString*> bound_names;
+    TailCallExpressionList tail_call_expressions;
+  };
+
+  struct ForInfo {
+   public:
+    explicit ForInfo(ParserBase* parser)
+        : bound_names(1, parser->zone()),
+          mode(ForEachStatement::ENUMERATE),
+          each_loc(),
+          parsing_result() {}
+    ZoneList<const AstRawString*> bound_names;
+    ForEachStatement::VisitMode mode;
+    Scanner::Location each_loc;
+    DeclarationParsingResult parsing_result;
+  };
+
+  struct ClassInfo {
+   public:
+    explicit ClassInfo(ParserBase* parser)
+        : proxy(nullptr),
+          extends(parser->impl()->EmptyExpression()),
+          properties(parser->impl()->NewClassPropertyList(4)),
+          instance_field_initializers(parser->impl()->NewExpressionList(0)),
+          constructor(parser->impl()->EmptyFunctionLiteral()),
+          has_seen_constructor(false),
+          static_initializer_var(nullptr) {}
+    VariableProxy* proxy;
+    ExpressionT extends;
+    typename Types::ClassPropertyList properties;
+    ExpressionListT instance_field_initializers;
+    FunctionLiteralT constructor;
+    bool has_seen_constructor;
+    Variable* static_initializer_var;
+  };
+
   DeclarationScope* NewScriptScope() const {
-    return new (zone()) DeclarationScope(zone());
+    return new (zone()) DeclarationScope(zone(), ast_value_factory());
   }
 
   DeclarationScope* NewVarblockScope() const {
@@ -653,7 +695,7 @@
   }
 
   ModuleScope* NewModuleScope(DeclarationScope* parent) const {
-    return new (zone()) ModuleScope(zone(), parent, ast_value_factory());
+    return new (zone()) ModuleScope(parent, ast_value_factory());
   }
 
   DeclarationScope* NewEvalScope(Scope* parent) const {
@@ -683,12 +725,18 @@
         new (zone()) DeclarationScope(zone(), scope(), FUNCTION_SCOPE, kind);
     // TODO(verwaest): Move into the DeclarationScope constructor.
     if (!IsArrowFunction(kind)) {
-      result->DeclareThis(ast_value_factory());
       result->DeclareDefaultFunctionVariables(ast_value_factory());
     }
     return result;
   }
 
+  V8_INLINE DeclarationScope* GetDeclarationScope() const {
+    return scope()->GetDeclarationScope();
+  }
+  V8_INLINE DeclarationScope* GetClosureScope() const {
+    return scope()->GetClosureScope();
+  }
+
   Scanner* scanner() const { return scanner_; }
   AstValueFactory* ast_value_factory() const { return ast_value_factory_; }
   int position() const { return scanner_->location().beg_pos; }
@@ -696,7 +744,6 @@
   bool stack_overflow() const { return stack_overflow_; }
   void set_stack_overflow() { stack_overflow_ = true; }
   Mode mode() const { return mode_; }
-  Zone* zone() const { return zone_; }
 
   INLINE(Token::Value peek()) {
     if (stack_overflow_) return Token::ILLEGAL;
@@ -761,8 +808,12 @@
     Expect(Token::SEMICOLON, ok);
   }
 
-  // A dummy function, just useful as an argument to CHECK_OK_CUSTOM.
+  // Dummy functions, just useful as arguments to CHECK_OK_CUSTOM.
   static void Void() {}
+  template <typename T>
+  static T Return(T result) {
+    return result;
+  }
 
   bool is_any_identifier(Token::Value token) {
     return token == Token::IDENTIFIER || token == Token::ENUM ||
@@ -796,7 +847,7 @@
     }
   }
 
-  bool CheckInOrOf(ForEachStatement::VisitMode* visit_mode, bool* ok) {
+  bool CheckInOrOf(ForEachStatement::VisitMode* visit_mode) {
     if (Check(Token::IN)) {
       *visit_mode = ForEachStatement::ENUMERATE;
       return true;
@@ -818,21 +869,19 @@
     Scanner::Location octal = scanner()->octal_position();
     if (octal.IsValid() && beg_pos <= octal.beg_pos &&
         octal.end_pos <= end_pos) {
-      ReportMessageAt(octal, message);
+      impl()->ReportMessageAt(octal, message);
       scanner()->clear_octal_position();
       *ok = false;
     }
   }
   // for now, this check just collects statistics.
-  void CheckDecimalLiteralWithLeadingZero(int* use_counts, int beg_pos,
-                                          int end_pos) {
+  void CheckDecimalLiteralWithLeadingZero(int beg_pos, int end_pos) {
     Scanner::Location token_location =
         scanner()->decimal_with_leading_zero_position();
     if (token_location.IsValid() && beg_pos <= token_location.beg_pos &&
         token_location.end_pos <= end_pos) {
       scanner()->clear_decimal_with_leading_zero_position();
-      if (use_counts != nullptr)
-        ++use_counts[v8::Isolate::kDecimalWithLeadingZeroInStrictMode];
+      impl()->CountUsage(v8::Isolate::kDecimalWithLeadingZeroInStrictMode);
     }
   }
 
@@ -846,9 +895,7 @@
                       ok);
   }
 
-  void CheckDestructuringElement(ExpressionT element,
-                                 ExpressionClassifier* classifier, int beg_pos,
-                                 int end_pos);
+  void CheckDestructuringElement(ExpressionT element, int beg_pos, int end_pos);
 
   // Checking the name of a function literal. This has to be done after parsing
   // the function, since the function can declare itself strict.
@@ -859,14 +906,14 @@
     // The function name needs to be checked in strict mode.
     if (is_sloppy(language_mode)) return;
 
-    if (this->IsEvalOrArguments(function_name)) {
-      Traits::ReportMessageAt(function_name_loc,
+    if (impl()->IsEvalOrArguments(function_name)) {
+      impl()->ReportMessageAt(function_name_loc,
                               MessageTemplate::kStrictEvalArguments);
       *ok = false;
       return;
     }
     if (function_name_validity == kFunctionNameIsStrictReserved) {
-      Traits::ReportMessageAt(function_name_loc,
+      impl()->ReportMessageAt(function_name_loc,
                               MessageTemplate::kUnexpectedStrictReserved);
       *ok = false;
       return;
@@ -880,50 +927,45 @@
     return Token::Precedence(token);
   }
 
-  typename Traits::Type::Factory* factory() { return &ast_node_factory_; }
+  typename Types::Factory* factory() { return &ast_node_factory_; }
 
   DeclarationScope* GetReceiverScope() const {
     return scope()->GetReceiverScope();
   }
   LanguageMode language_mode() { return scope()->language_mode(); }
-  bool is_generator() const { return function_state_->is_generator(); }
-  bool is_async_function() const {
-    return function_state_->is_async_function();
+  void RaiseLanguageMode(LanguageMode mode) {
+    LanguageMode old = scope()->language_mode();
+    impl()->SetLanguageMode(scope(), old > mode ? old : mode);
   }
-  bool is_resumable() const { return function_state_->is_resumable(); }
+  bool is_generator() const {
+    return IsGeneratorFunction(function_state_->kind());
+  }
+  bool is_async_function() const {
+    return IsAsyncFunction(function_state_->kind());
+  }
+  bool is_resumable() const {
+    return IsResumableFunction(function_state_->kind());
+  }
 
   // Report syntax errors.
-  void ReportMessage(MessageTemplate::Template message, const char* arg = NULL,
+  void ReportMessage(MessageTemplate::Template message) {
+    Scanner::Location source_location = scanner()->location();
+    impl()->ReportMessageAt(source_location, message,
+                            static_cast<const char*>(nullptr), kSyntaxError);
+  }
+
+  template <typename T>
+  void ReportMessage(MessageTemplate::Template message, T arg,
                      ParseErrorType error_type = kSyntaxError) {
     Scanner::Location source_location = scanner()->location();
-    Traits::ReportMessageAt(source_location, message, arg, error_type);
-  }
-
-  void ReportMessage(MessageTemplate::Template message, const AstRawString* arg,
-                     ParseErrorType error_type = kSyntaxError) {
-    Scanner::Location source_location = scanner()->location();
-    Traits::ReportMessageAt(source_location, message, arg, error_type);
-  }
-
-  void ReportMessageAt(Scanner::Location location,
-                       MessageTemplate::Template message,
-                       const char* arg = NULL,
-                       ParseErrorType error_type = kSyntaxError) {
-    Traits::ReportMessageAt(location, message, arg, error_type);
-  }
-
-  void ReportMessageAt(Scanner::Location location,
-                       MessageTemplate::Template message,
-                       const AstRawString* arg,
-                       ParseErrorType error_type = kSyntaxError) {
-    Traits::ReportMessageAt(location, message, arg, error_type);
+    impl()->ReportMessageAt(source_location, message, arg, error_type);
   }
 
   void ReportMessageAt(Scanner::Location location,
                        MessageTemplate::Template message,
                        ParseErrorType error_type) {
-    ReportMessageAt(location, message, static_cast<const char*>(nullptr),
-                    error_type);
+    impl()->ReportMessageAt(location, message,
+                            static_cast<const char*>(nullptr), error_type);
   }
 
   void GetUnexpectedTokenMessage(
@@ -938,59 +980,47 @@
 
   void ReportClassifierError(
       const typename ExpressionClassifier::Error& error) {
-    Traits::ReportMessageAt(error.location, error.message, error.arg,
+    impl()->ReportMessageAt(error.location, error.message, error.arg,
                             error.type);
   }
 
-  void ValidateExpression(const ExpressionClassifier* classifier, bool* ok) {
-    if (!classifier->is_valid_expression() ||
-        classifier->has_object_literal_error()) {
-      const Scanner::Location& a = classifier->expression_error().location;
-      const Scanner::Location& b =
-          classifier->object_literal_error().location;
-      if (a.beg_pos < 0 || (b.beg_pos >= 0 && a.beg_pos > b.beg_pos)) {
-        ReportClassifierError(classifier->object_literal_error());
-      } else {
-        ReportClassifierError(classifier->expression_error());
-      }
+  void ValidateExpression(bool* ok) {
+    if (!classifier()->is_valid_expression()) {
+      ReportClassifierError(classifier()->expression_error());
       *ok = false;
     }
   }
 
-  void ValidateFormalParameterInitializer(
-      const ExpressionClassifier* classifier, bool* ok) {
-    if (!classifier->is_valid_formal_parameter_initializer()) {
-      ReportClassifierError(classifier->formal_parameter_initializer_error());
+  void ValidateFormalParameterInitializer(bool* ok) {
+    if (!classifier()->is_valid_formal_parameter_initializer()) {
+      ReportClassifierError(classifier()->formal_parameter_initializer_error());
       *ok = false;
     }
   }
 
-  void ValidateBindingPattern(const ExpressionClassifier* classifier,
-                              bool* ok) {
-    if (!classifier->is_valid_binding_pattern()) {
-      ReportClassifierError(classifier->binding_pattern_error());
+  void ValidateBindingPattern(bool* ok) {
+    if (!classifier()->is_valid_binding_pattern()) {
+      ReportClassifierError(classifier()->binding_pattern_error());
       *ok = false;
     }
   }
 
-  void ValidateAssignmentPattern(const ExpressionClassifier* classifier,
-                                 bool* ok) {
-    if (!classifier->is_valid_assignment_pattern()) {
-      ReportClassifierError(classifier->assignment_pattern_error());
+  void ValidateAssignmentPattern(bool* ok) {
+    if (!classifier()->is_valid_assignment_pattern()) {
+      ReportClassifierError(classifier()->assignment_pattern_error());
       *ok = false;
     }
   }
 
-  void ValidateFormalParameters(const ExpressionClassifier* classifier,
-                                LanguageMode language_mode,
+  void ValidateFormalParameters(LanguageMode language_mode,
                                 bool allow_duplicates, bool* ok) {
     if (!allow_duplicates &&
-        !classifier->is_valid_formal_parameter_list_without_duplicates()) {
-      ReportClassifierError(classifier->duplicate_formal_parameter_error());
+        !classifier()->is_valid_formal_parameter_list_without_duplicates()) {
+      ReportClassifierError(classifier()->duplicate_formal_parameter_error());
       *ok = false;
     } else if (is_strict(language_mode) &&
-               !classifier->is_valid_strict_mode_formal_parameters()) {
-      ReportClassifierError(classifier->strict_mode_formal_parameter_error());
+               !classifier()->is_valid_strict_mode_formal_parameters()) {
+      ReportClassifierError(classifier()->strict_mode_formal_parameter_error());
       *ok = false;
     }
   }
@@ -999,78 +1029,73 @@
     return is_any_identifier(token) || token == Token::LPAREN;
   }
 
-  void ValidateArrowFormalParameters(const ExpressionClassifier* classifier,
-                                     ExpressionT expr,
+  void ValidateArrowFormalParameters(ExpressionT expr,
                                      bool parenthesized_formals, bool is_async,
                                      bool* ok) {
-    if (classifier->is_valid_binding_pattern()) {
+    if (classifier()->is_valid_binding_pattern()) {
       // A simple arrow formal parameter: IDENTIFIER => BODY.
-      if (!this->IsIdentifier(expr)) {
-        Traits::ReportMessageAt(scanner()->location(),
+      if (!impl()->IsIdentifier(expr)) {
+        impl()->ReportMessageAt(scanner()->location(),
                                 MessageTemplate::kUnexpectedToken,
                                 Token::String(scanner()->current_token()));
         *ok = false;
       }
-    } else if (!classifier->is_valid_arrow_formal_parameters()) {
+    } else if (!classifier()->is_valid_arrow_formal_parameters()) {
       // If after parsing the expr, we see an error but the expression is
       // neither a valid binding pattern nor a valid parenthesized formal
       // parameter list, show the "arrow formal parameters" error if the formals
       // started with a parenthesis, and the binding pattern error otherwise.
       const typename ExpressionClassifier::Error& error =
-          parenthesized_formals ? classifier->arrow_formal_parameters_error()
-                                : classifier->binding_pattern_error();
+          parenthesized_formals ? classifier()->arrow_formal_parameters_error()
+                                : classifier()->binding_pattern_error();
       ReportClassifierError(error);
       *ok = false;
     }
-    if (is_async && !classifier->is_valid_async_arrow_formal_parameters()) {
+    if (is_async && !classifier()->is_valid_async_arrow_formal_parameters()) {
       const typename ExpressionClassifier::Error& error =
-          classifier->async_arrow_formal_parameters_error();
+          classifier()->async_arrow_formal_parameters_error();
       ReportClassifierError(error);
       *ok = false;
     }
   }
 
-  void ValidateLetPattern(const ExpressionClassifier* classifier, bool* ok) {
-    if (!classifier->is_valid_let_pattern()) {
-      ReportClassifierError(classifier->let_pattern_error());
+  void ValidateLetPattern(bool* ok) {
+    if (!classifier()->is_valid_let_pattern()) {
+      ReportClassifierError(classifier()->let_pattern_error());
       *ok = false;
     }
   }
 
-  void CheckNoTailCallExpressions(const ExpressionClassifier* classifier,
-                                  bool* ok) {
-    if (FLAG_harmony_explicit_tailcalls &&
-        classifier->has_tail_call_expression()) {
-      ReportClassifierError(classifier->tail_call_expression_error());
-      *ok = false;
-    }
-  }
-
-  void ExpressionUnexpectedToken(ExpressionClassifier* classifier) {
+  void ExpressionUnexpectedToken() {
     MessageTemplate::Template message = MessageTemplate::kUnexpectedToken;
     const char* arg;
     Scanner::Location location = scanner()->peek_location();
     GetUnexpectedTokenMessage(peek(), &message, &location, &arg);
-    classifier->RecordExpressionError(location, message, arg);
+    classifier()->RecordExpressionError(location, message, arg);
   }
 
-  void BindingPatternUnexpectedToken(ExpressionClassifier* classifier) {
+  void BindingPatternUnexpectedToken() {
     MessageTemplate::Template message = MessageTemplate::kUnexpectedToken;
     const char* arg;
     Scanner::Location location = scanner()->peek_location();
     GetUnexpectedTokenMessage(peek(), &message, &location, &arg);
-    classifier->RecordBindingPatternError(location, message, arg);
+    classifier()->RecordBindingPatternError(location, message, arg);
   }
 
-  void ArrowFormalParametersUnexpectedToken(ExpressionClassifier* classifier) {
+  void ArrowFormalParametersUnexpectedToken() {
     MessageTemplate::Template message = MessageTemplate::kUnexpectedToken;
     const char* arg;
     Scanner::Location location = scanner()->peek_location();
     GetUnexpectedTokenMessage(peek(), &message, &location, &arg);
-    classifier->RecordArrowFormalParametersError(location, message, arg);
+    classifier()->RecordArrowFormalParametersError(location, message, arg);
   }
 
-  // Recursive descent functions:
+  // Recursive descent functions.
+  // All ParseXXX functions take as the last argument an *ok parameter
+  // which is set to false if parsing failed; it is unchanged otherwise.
+  // By making the 'exception handling' explicit, we are forced to check
+  // for failure at the call sites. The family of CHECK_OK* macros can
+  // be useful for this.
 
   // Parses an identifier that is valid for the current scope, in particular it
   // fails on strict mode future reserved keywords in a strict scope. If
@@ -1078,8 +1103,7 @@
   // "arguments" as identifier even in strict mode (this is needed in cases like
   // "var foo = eval;").
   IdentifierT ParseIdentifier(AllowRestrictedIdentifiers, bool* ok);
-  IdentifierT ParseAndClassifyIdentifier(ExpressionClassifier* classifier,
-                                         bool* ok);
+  IdentifierT ParseAndClassifyIdentifier(bool* ok);
   // Parses an identifier or a strict mode future reserved word, and indicate
   // whether it is strict mode future reserved. Allows passing in function_kind
   // for the case of parsing the identifier in a function expression, where the
@@ -1098,76 +1122,173 @@
 
   ExpressionT ParseRegExpLiteral(bool* ok);
 
-  ExpressionT ParsePrimaryExpression(ExpressionClassifier* classifier,
-                                     bool* is_async, bool* ok);
-  ExpressionT ParsePrimaryExpression(ExpressionClassifier* classifier,
-                                     bool* ok) {
+  ExpressionT ParsePrimaryExpression(bool* is_async, bool* ok);
+  ExpressionT ParsePrimaryExpression(bool* ok) {
     bool is_async;
-    return ParsePrimaryExpression(classifier, &is_async, ok);
-  }
-  ExpressionT ParseExpression(bool accept_IN, bool* ok);
-  ExpressionT ParseExpression(bool accept_IN, ExpressionClassifier* classifier,
-                              bool* ok);
-  ExpressionT ParseArrayLiteral(ExpressionClassifier* classifier, bool* ok);
-  ExpressionT ParsePropertyName(IdentifierT* name, bool* is_get, bool* is_set,
-                                bool* is_computed_name,
-                                ExpressionClassifier* classifier, bool* ok);
-  ExpressionT ParseObjectLiteral(ExpressionClassifier* classifier, bool* ok);
-  ObjectLiteralPropertyT ParsePropertyDefinition(
-      ObjectLiteralCheckerBase* checker, bool in_class, bool has_extends,
-      MethodKind kind, bool* is_computed_name, bool* has_seen_constructor,
-      ExpressionClassifier* classifier, IdentifierT* name, bool* ok);
-  typename Traits::Type::ExpressionList ParseArguments(
-      Scanner::Location* first_spread_pos, bool maybe_arrow,
-      ExpressionClassifier* classifier, bool* ok);
-  typename Traits::Type::ExpressionList ParseArguments(
-      Scanner::Location* first_spread_pos, ExpressionClassifier* classifier,
-      bool* ok) {
-    return ParseArguments(first_spread_pos, false, classifier, ok);
+    return ParsePrimaryExpression(&is_async, ok);
   }
 
-  ExpressionT ParseAssignmentExpression(bool accept_IN,
-                                        ExpressionClassifier* classifier,
-                                        bool* ok);
-  ExpressionT ParseYieldExpression(bool accept_IN,
-                                   ExpressionClassifier* classifier, bool* ok);
-  ExpressionT ParseTailCallExpression(ExpressionClassifier* classifier,
-                                      bool* ok);
-  ExpressionT ParseConditionalExpression(bool accept_IN,
-                                         ExpressionClassifier* classifier,
-                                         bool* ok);
-  ExpressionT ParseBinaryExpression(int prec, bool accept_IN,
-                                    ExpressionClassifier* classifier, bool* ok);
-  ExpressionT ParseUnaryExpression(ExpressionClassifier* classifier, bool* ok);
-  ExpressionT ParsePostfixExpression(ExpressionClassifier* classifier,
-                                     bool* ok);
-  ExpressionT ParseLeftHandSideExpression(ExpressionClassifier* classifier,
-                                          bool* ok);
-  ExpressionT ParseMemberWithNewPrefixesExpression(
-      ExpressionClassifier* classifier, bool* is_async, bool* ok);
-  ExpressionT ParseMemberExpression(ExpressionClassifier* classifier,
-                                    bool* is_async, bool* ok);
-  ExpressionT ParseMemberExpressionContinuation(
-      ExpressionT expression, bool* is_async, ExpressionClassifier* classifier,
-      bool* ok);
+  // This method wraps the parsing of the expression inside a new expression
+  // classifier and calls RewriteNonPattern if parsing is successful.
+  // It should be used whenever we're parsing an expression that will be
+  // used as a non-pattern (i.e., in most cases).
+  V8_INLINE ExpressionT ParseExpression(bool accept_IN, bool* ok);
+
+  // This method does not wrap the parsing of the expression inside a
+  // new expression classifier; it uses the top-level classifier instead.
+  // It should be used whenever we're parsing something with the "cover"
+  // grammar that recognizes both patterns and non-patterns (which roughly
+  // corresponds to what's inside the parentheses generated by the symbol
+  // "CoverParenthesizedExpressionAndArrowParameterList" in the ES 2017
+  // specification).
+  ExpressionT ParseExpressionCoverGrammar(bool accept_IN, bool* ok);
+
+  ExpressionT ParseArrayLiteral(bool* ok);
+
+  enum class PropertyKind {
+    kAccessorProperty,
+    kValueProperty,
+    kShorthandProperty,
+    kMethodProperty,
+    kClassField,
+    kNotSet
+  };
+
+  bool SetPropertyKindFromToken(Token::Value token, PropertyKind* kind);
+  ExpressionT ParsePropertyName(IdentifierT* name, PropertyKind* kind,
+                                bool* is_generator, bool* is_get, bool* is_set,
+                                bool* is_async, bool* is_computed_name,
+                                bool* ok);
+  ExpressionT ParseObjectLiteral(bool* ok);
+  ClassLiteralPropertyT ParseClassPropertyDefinition(
+      ClassLiteralChecker* checker, bool has_extends, bool* is_computed_name,
+      bool* has_seen_constructor, bool* ok);
+  FunctionLiteralT ParseClassFieldForInitializer(bool has_initializer,
+                                                 bool* ok);
+  ObjectLiteralPropertyT ParseObjectPropertyDefinition(
+      ObjectLiteralChecker* checker, bool* is_computed_name, bool* ok);
+  ExpressionListT ParseArguments(Scanner::Location* first_spread_pos,
+                                 bool maybe_arrow, bool* ok);
+  ExpressionListT ParseArguments(Scanner::Location* first_spread_pos,
+                                 bool* ok) {
+    return ParseArguments(first_spread_pos, false, ok);
+  }
+
+  ExpressionT ParseAssignmentExpression(bool accept_IN, bool* ok);
+  ExpressionT ParseYieldExpression(bool accept_IN, bool* ok);
+  ExpressionT ParseConditionalExpression(bool accept_IN, bool* ok);
+  ExpressionT ParseBinaryExpression(int prec, bool accept_IN, bool* ok);
+  ExpressionT ParseUnaryExpression(bool* ok);
+  ExpressionT ParsePostfixExpression(bool* ok);
+  ExpressionT ParseLeftHandSideExpression(bool* ok);
+  ExpressionT ParseMemberWithNewPrefixesExpression(bool* is_async, bool* ok);
+  ExpressionT ParseMemberExpression(bool* is_async, bool* ok);
+  ExpressionT ParseMemberExpressionContinuation(ExpressionT expression,
+                                                bool* is_async, bool* ok);
   ExpressionT ParseArrowFunctionLiteral(bool accept_IN,
                                         const FormalParametersT& parameters,
-                                        bool is_async,
-                                        const ExpressionClassifier& classifier,
                                         bool* ok);
-  ExpressionT ParseTemplateLiteral(ExpressionT tag, int start,
-                                   ExpressionClassifier* classifier, bool* ok);
+  void ParseAsyncFunctionBody(Scope* scope, StatementListT body,
+                              FunctionKind kind, FunctionBodyType type,
+                              bool accept_IN, int pos, bool* ok);
+  ExpressionT ParseAsyncFunctionLiteral(bool* ok);
+  ExpressionT ParseClassLiteral(IdentifierT name,
+                                Scanner::Location class_name_location,
+                                bool name_is_strict_reserved,
+                                int class_token_pos, bool* ok);
+  ExpressionT ParseTemplateLiteral(ExpressionT tag, int start, bool* ok);
   ExpressionT ParseSuperExpression(bool is_new, bool* ok);
   ExpressionT ParseNewTargetExpression(bool* ok);
 
-  void ParseFormalParameter(FormalParametersT* parameters,
-                            ExpressionClassifier* classifier, bool* ok);
-  void ParseFormalParameterList(FormalParametersT* parameters,
-                                ExpressionClassifier* classifier, bool* ok);
+  void ParseFormalParameter(FormalParametersT* parameters, bool* ok);
+  void ParseFormalParameterList(FormalParametersT* parameters, bool* ok);
   void CheckArityRestrictions(int param_count, FunctionKind function_type,
                               bool has_rest, int formals_start_pos,
                               int formals_end_pos, bool* ok);
 
+  BlockT ParseVariableDeclarations(VariableDeclarationContext var_context,
+                                   DeclarationParsingResult* parsing_result,
+                                   ZoneList<const AstRawString*>* names,
+                                   bool* ok);
+  StatementT ParseAsyncFunctionDeclaration(ZoneList<const AstRawString*>* names,
+                                           bool default_export, bool* ok);
+  StatementT ParseFunctionDeclaration(bool* ok);
+  StatementT ParseHoistableDeclaration(ZoneList<const AstRawString*>* names,
+                                       bool default_export, bool* ok);
+  StatementT ParseHoistableDeclaration(int pos, ParseFunctionFlags flags,
+                                       ZoneList<const AstRawString*>* names,
+                                       bool default_export, bool* ok);
+  StatementT ParseClassDeclaration(ZoneList<const AstRawString*>* names,
+                                   bool default_export, bool* ok);
+  StatementT ParseNativeDeclaration(bool* ok);
+
+  // Under some circumstances, we allow preparsing to abort if the preparsed
+  // function is "long and trivial", and fully parse instead. Our current
+  // definition of "long and trivial" is:
+  // - over kLazyParseTrialLimit statements
+  // - all starting with an identifier (i.e., no if, for, while, etc.)
+  static const int kLazyParseTrialLimit = 200;
+
+  // TODO(nikolaos, marja): The first argument should not really be passed
+  // by value. The method is expected to add the parsed statements to the
+  // list. This works because in the case of the parser, StatementListT is
+  // a pointer whereas the preparser does not really modify the body.
+  V8_INLINE void ParseStatementList(StatementListT body, int end_token,
+                                    bool* ok) {
+    LazyParsingResult result = ParseStatementList(body, end_token, false, ok);
+    USE(result);
+    DCHECK_EQ(result, kLazyParsingComplete);
+  }
+  LazyParsingResult ParseStatementList(StatementListT body, int end_token,
+                                       bool may_abort, bool* ok);
+  StatementT ParseStatementListItem(bool* ok);
+  StatementT ParseStatement(ZoneList<const AstRawString*>* labels,
+                            AllowLabelledFunctionStatement allow_function,
+                            bool* ok);
+  StatementT ParseStatementAsUnlabelled(ZoneList<const AstRawString*>* labels,
+                                        bool* ok);
+  BlockT ParseBlock(ZoneList<const AstRawString*>* labels, bool* ok);
+
+  // Parse a SubStatement in strict mode, or with an extra block scope in
+  // sloppy mode to handle
+  // ES#sec-functiondeclarations-in-ifstatement-statement-clauses
+  // The legacy parameter indicates whether function declarations are
+  // banned by the ES2015 specification in this location, and they are being
+  // permitted here to match previous V8 behavior.
+  StatementT ParseScopedStatement(ZoneList<const AstRawString*>* labels,
+                                  bool legacy, bool* ok);
+
+  StatementT ParseVariableStatement(VariableDeclarationContext var_context,
+                                    ZoneList<const AstRawString*>* names,
+                                    bool* ok);
+
+  // Magical syntax support.
+  ExpressionT ParseV8Intrinsic(bool* ok);
+
+  ExpressionT ParseDoExpression(bool* ok);
+
+  StatementT ParseDebuggerStatement(bool* ok);
+
+  StatementT ParseExpressionOrLabelledStatement(
+      ZoneList<const AstRawString*>* labels,
+      AllowLabelledFunctionStatement allow_function, bool* ok);
+  StatementT ParseIfStatement(ZoneList<const AstRawString*>* labels, bool* ok);
+  StatementT ParseContinueStatement(bool* ok);
+  StatementT ParseBreakStatement(ZoneList<const AstRawString*>* labels,
+                                 bool* ok);
+  StatementT ParseReturnStatement(bool* ok);
+  StatementT ParseWithStatement(ZoneList<const AstRawString*>* labels,
+                                bool* ok);
+  StatementT ParseDoWhileStatement(ZoneList<const AstRawString*>* labels,
+                                   bool* ok);
+  StatementT ParseWhileStatement(ZoneList<const AstRawString*>* labels,
+                                 bool* ok);
+  StatementT ParseThrowStatement(bool* ok);
+  StatementT ParseSwitchStatement(ZoneList<const AstRawString*>* labels,
+                                  bool* ok);
+  StatementT ParseTryStatement(bool* ok);
+  StatementT ParseForStatement(ZoneList<const AstRawString*>* labels, bool* ok);
+
   bool IsNextLetKeyword();
   bool IsTrivialExpression();
 
@@ -1184,9 +1305,9 @@
   bool IsValidReferenceExpression(ExpressionT expression);
 
   bool IsAssignableIdentifier(ExpressionT expression) {
-    if (!Traits::IsIdentifier(expression)) return false;
+    if (!impl()->IsIdentifier(expression)) return false;
     if (is_strict(language_mode()) &&
-        Traits::IsEvalOrArguments(Traits::AsIdentifier(expression))) {
+        impl()->IsEvalOrArguments(impl()->AsIdentifier(expression))) {
       return false;
     }
     return true;
@@ -1201,8 +1322,8 @@
   // forwards the information to scope.
   Call::PossiblyEval CheckPossibleEvalCall(ExpressionT expression,
                                            Scope* scope) {
-    if (Traits::IsIdentifier(expression) &&
-        Traits::IsEval(Traits::AsIdentifier(expression))) {
+    if (impl()->IsIdentifier(expression) &&
+        impl()->IsEval(impl()->AsIdentifier(expression))) {
       scope->RecordEvalCall();
       if (is_sloppy(scope->language_mode())) {
         // For sloppy scopes we also have to record the call at function level,
@@ -1214,56 +1335,33 @@
     return Call::NOT_EVAL;
   }
 
-  // Used to validate property names in object literals and class literals
-  enum PropertyKind {
-    kAccessorProperty,
-    kValueProperty,
-    kMethodProperty
-  };
-
-  class ObjectLiteralCheckerBase {
-   public:
-    explicit ObjectLiteralCheckerBase(ParserBase* parser) : parser_(parser) {}
-
-    virtual void CheckProperty(Token::Value property, PropertyKind type,
-                               MethodKind method_type,
-                               ExpressionClassifier* classifier, bool* ok) = 0;
-
-    virtual ~ObjectLiteralCheckerBase() {}
-
-   protected:
-    ParserBase* parser() const { return parser_; }
-    Scanner* scanner() const { return parser_->scanner(); }
-
-   private:
-    ParserBase* parser_;
-  };
-
   // Validation per ES6 object literals.
-  class ObjectLiteralChecker : public ObjectLiteralCheckerBase {
+  class ObjectLiteralChecker {
    public:
     explicit ObjectLiteralChecker(ParserBase* parser)
-        : ObjectLiteralCheckerBase(parser), has_seen_proto_(false) {}
+        : parser_(parser), has_seen_proto_(false) {}
 
-    void CheckProperty(Token::Value property, PropertyKind type,
-                       MethodKind method_type, ExpressionClassifier* classifier,
-                       bool* ok) override;
+    void CheckDuplicateProto(Token::Value property);
 
    private:
     bool IsProto() { return this->scanner()->LiteralMatches("__proto__", 9); }
 
+    ParserBase* parser() const { return parser_; }
+    Scanner* scanner() const { return parser_->scanner(); }
+
+    ParserBase* parser_;
     bool has_seen_proto_;
   };
 
   // Validation per ES6 class literals.
-  class ClassLiteralChecker : public ObjectLiteralCheckerBase {
+  class ClassLiteralChecker {
    public:
     explicit ClassLiteralChecker(ParserBase* parser)
-        : ObjectLiteralCheckerBase(parser), has_seen_constructor_(false) {}
+        : parser_(parser), has_seen_constructor_(false) {}
 
-    void CheckProperty(Token::Value property, PropertyKind type,
-                       MethodKind method_type, ExpressionClassifier* classifier,
-                       bool* ok) override;
+    void CheckClassMethodName(Token::Value property, PropertyKind type,
+                              bool is_generator, bool is_async, bool is_static,
+                              bool* ok);
 
    private:
     bool IsConstructor() {
@@ -1273,6 +1371,10 @@
       return this->scanner()->LiteralMatches("prototype", 9);
     }
 
+    ParserBase* parser() const { return parser_; }
+    Scanner* scanner() const { return parser_->scanner(); }
+
+    ParserBase* parser_;
     bool has_seen_constructor_;
   };
 
@@ -1281,19 +1383,63 @@
   }
   Scope* scope() const { return scope_state_->scope(); }
 
+  // Stack of expression classifiers.
+  // The top of the stack is always pointed to by classifier().
+  V8_INLINE ExpressionClassifier* classifier() const {
+    DCHECK_NOT_NULL(classifier_);
+    return classifier_;
+  }
+
+  // Accumulates the classifier that is on top of the stack (inner) to
+  // the one that is right below (outer) and pops the inner.
+  V8_INLINE void Accumulate(unsigned productions,
+                            bool merge_non_patterns = true) {
+    DCHECK_NOT_NULL(classifier_);
+    ExpressionClassifier* previous = classifier_->previous();
+    DCHECK_NOT_NULL(previous);
+    previous->Accumulate(classifier_, productions, merge_non_patterns);
+    classifier_ = previous;
+  }
+
+  // Pops and discards the classifier that is on top of the stack
+  // without accumulating.
+  V8_INLINE void Discard() {
+    DCHECK_NOT_NULL(classifier_);
+    classifier_->Discard();
+    classifier_ = classifier_->previous();
+  }
+
+  // Accumulate errors that can be arbitrarily deep in an expression.
+  // These correspond to the ECMAScript spec's 'Contains' operation
+  // on productions. This includes:
+  //
+  // - YieldExpression is disallowed in arrow parameters in a generator.
+  // - AwaitExpression is disallowed in arrow parameters in an async function.
+  // - AwaitExpression is disallowed in async arrow parameters.
+  //
+  V8_INLINE void AccumulateFormalParameterContainmentErrors() {
+    Accumulate(ExpressionClassifier::FormalParameterInitializerProduction |
+               ExpressionClassifier::AsyncArrowFormalParametersProduction);
+  }
+
+  // Parser base's protected field members.
+
   ScopeState* scope_state_;        // Scope stack.
   FunctionState* function_state_;  // Function state stack.
   v8::Extension* extension_;
   FuncNameInferrer* fni_;
   AstValueFactory* ast_value_factory_;  // Not owned.
-  typename Traits::Type::Factory ast_node_factory_;
+  typename Types::Factory ast_node_factory_;
   ParserRecorder* log_;
   Mode mode_;
   bool parsing_module_;
   uintptr_t stack_limit_;
 
+  // Parser base's private field members.
+
  private:
   Zone* zone_;
+  ExpressionClassifier* classifier_;
 
   Scanner* scanner_;
   bool stack_overflow_;
@@ -1308,6 +1454,7 @@
   bool allow_harmony_async_await_;
   bool allow_harmony_restrictive_generators_;
   bool allow_harmony_trailing_commas_;
+  bool allow_harmony_class_fields_;
 
   friend class DiscardableZoneScope;
 };
@@ -1315,12 +1462,12 @@
 template <typename Impl>
 ParserBase<Impl>::FunctionState::FunctionState(
     FunctionState** function_state_stack, ScopeState** scope_stack,
-    Scope* scope, FunctionKind kind)
+    DeclarationScope* scope)
     : ScopeState(scope_stack, scope),
       next_materialized_literal_index_(0),
       expected_property_count_(0),
-      kind_(kind),
-      generator_object_variable_(NULL),
+      generator_object_variable_(nullptr),
+      promise_variable_(nullptr),
       function_state_stack_(function_state_stack),
       outer_function_state_(*function_state_stack),
       destructuring_assignments_to_rewrite_(16, scope->zone()),
@@ -1413,19 +1560,18 @@
     MessageTemplate::Template message) {
   const char* arg;
   GetUnexpectedTokenMessage(token, &message, &source_location, &arg);
-  Traits::ReportMessageAt(source_location, message, arg);
+  impl()->ReportMessageAt(source_location, message, arg);
 }
 
 template <typename Impl>
 typename ParserBase<Impl>::IdentifierT ParserBase<Impl>::ParseIdentifier(
     AllowRestrictedIdentifiers allow_restricted_identifiers, bool* ok) {
   ExpressionClassifier classifier(this);
-  auto result =
-      ParseAndClassifyIdentifier(&classifier, CHECK_OK_CUSTOM(EmptyIdentifier));
+  auto result = ParseAndClassifyIdentifier(CHECK_OK_CUSTOM(EmptyIdentifier));
 
   if (allow_restricted_identifiers == kDontAllowRestrictedIdentifiers) {
-    ValidateAssignmentPattern(&classifier, CHECK_OK_CUSTOM(EmptyIdentifier));
-    ValidateBindingPattern(&classifier, CHECK_OK_CUSTOM(EmptyIdentifier));
+    ValidateAssignmentPattern(CHECK_OK_CUSTOM(EmptyIdentifier));
+    ValidateBindingPattern(CHECK_OK_CUSTOM(EmptyIdentifier));
   }
 
   return result;
@@ -1433,33 +1579,32 @@
 
 template <typename Impl>
 typename ParserBase<Impl>::IdentifierT
-ParserBase<Impl>::ParseAndClassifyIdentifier(ExpressionClassifier* classifier,
-                                             bool* ok) {
+ParserBase<Impl>::ParseAndClassifyIdentifier(bool* ok) {
   Token::Value next = Next();
   if (next == Token::IDENTIFIER || next == Token::ASYNC ||
       (next == Token::AWAIT && !parsing_module_ && !is_async_function())) {
-    IdentifierT name = this->GetSymbol(scanner());
+    IdentifierT name = impl()->GetSymbol();
     // When this function is used to read a formal parameter, we don't always
     // know whether the function is going to be strict or sloppy.  Indeed for
     // arrow functions we don't always know that the identifier we are reading
     // is actually a formal parameter.  Therefore besides the errors that we
     // must detect because we know we're in strict mode, we also record any
     // error that we might make in the future once we know the language mode.
-    if (this->IsEvalOrArguments(name)) {
-      classifier->RecordStrictModeFormalParameterError(
+    if (impl()->IsEvalOrArguments(name)) {
+      classifier()->RecordStrictModeFormalParameterError(
           scanner()->location(), MessageTemplate::kStrictEvalArguments);
       if (is_strict(language_mode())) {
-        classifier->RecordBindingPatternError(
+        classifier()->RecordBindingPatternError(
             scanner()->location(), MessageTemplate::kStrictEvalArguments);
       }
     } else if (next == Token::AWAIT) {
-      classifier->RecordAsyncArrowFormalParametersError(
+      classifier()->RecordAsyncArrowFormalParametersError(
           scanner()->location(), MessageTemplate::kAwaitBindingIdentifier);
     }
 
-    if (classifier->duplicate_finder() != nullptr &&
-        scanner()->FindSymbol(classifier->duplicate_finder(), 1) != 0) {
-      classifier->RecordDuplicateFormalParameterError(scanner()->location());
+    if (classifier()->duplicate_finder() != nullptr &&
+        scanner()->FindSymbol(classifier()->duplicate_finder(), 1) != 0) {
+      classifier()->RecordDuplicateFormalParameterError(scanner()->location());
     }
     return name;
   } else if (is_sloppy(language_mode()) &&
@@ -1467,25 +1612,25 @@
               next == Token::ESCAPED_STRICT_RESERVED_WORD ||
               next == Token::LET || next == Token::STATIC ||
               (next == Token::YIELD && !is_generator()))) {
-    classifier->RecordStrictModeFormalParameterError(
+    classifier()->RecordStrictModeFormalParameterError(
         scanner()->location(), MessageTemplate::kUnexpectedStrictReserved);
     if (next == Token::ESCAPED_STRICT_RESERVED_WORD &&
         is_strict(language_mode())) {
       ReportUnexpectedToken(next);
       *ok = false;
-      return Traits::EmptyIdentifier();
+      return impl()->EmptyIdentifier();
     }
     if (next == Token::LET ||
         (next == Token::ESCAPED_STRICT_RESERVED_WORD &&
          scanner()->is_literal_contextual_keyword(CStrVector("let")))) {
-      classifier->RecordLetPatternError(scanner()->location(),
-                                        MessageTemplate::kLetInLexicalBinding);
+      classifier()->RecordLetPatternError(
+          scanner()->location(), MessageTemplate::kLetInLexicalBinding);
     }
-    return this->GetSymbol(scanner());
+    return impl()->GetSymbol();
   } else {
-    this->ReportUnexpectedToken(next);
+    ReportUnexpectedToken(next);
     *ok = false;
-    return Traits::EmptyIdentifier();
+    return impl()->EmptyIdentifier();
   }
 }
 
@@ -1505,10 +1650,10 @@
   } else {
     ReportUnexpectedToken(next);
     *ok = false;
-    return Traits::EmptyIdentifier();
+    return impl()->EmptyIdentifier();
   }
 
-  return this->GetSymbol(scanner());
+  return impl()->GetSymbol();
 }
 
 template <typename Impl>
@@ -1521,12 +1666,12 @@
       next != Token::FUTURE_STRICT_RESERVED_WORD &&
       next != Token::ESCAPED_KEYWORD &&
       next != Token::ESCAPED_STRICT_RESERVED_WORD && !Token::IsKeyword(next)) {
-    this->ReportUnexpectedToken(next);
+    ReportUnexpectedToken(next);
     *ok = false;
-    return Traits::EmptyIdentifier();
+    return impl()->EmptyIdentifier();
   }
 
-  return this->GetSymbol(scanner());
+  return impl()->GetSymbol();
 }
 
 template <typename Impl>
@@ -1537,18 +1682,18 @@
     Next();
     ReportMessage(MessageTemplate::kUnterminatedRegExp);
     *ok = false;
-    return Traits::EmptyExpression();
+    return impl()->EmptyExpression();
   }
 
   int literal_index = function_state_->NextMaterializedLiteralIndex();
 
-  IdentifierT js_pattern = this->GetNextSymbol(scanner());
+  IdentifierT js_pattern = impl()->GetNextSymbol();
   Maybe<RegExp::Flags> flags = scanner()->ScanRegExpFlags();
   if (flags.IsNothing()) {
     Next();
     ReportMessage(MessageTemplate::kMalformedRegExpFlags);
     *ok = false;
-    return Traits::EmptyExpression();
+    return impl()->EmptyExpression();
   }
   int js_flags = flags.FromJust();
   Next();
@@ -1557,7 +1702,7 @@
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParsePrimaryExpression(
-    ExpressionClassifier* classifier, bool* is_async, bool* ok) {
+    bool* is_async, bool* ok) {
   // PrimaryExpression ::
   //   'this'
   //   'null'
@@ -1573,14 +1718,14 @@
   //   '(' Expression ')'
   //   TemplateLiteral
   //   do Block
-  //   AsyncFunctionExpression
+  //   AsyncFunctionLiteral
 
   int beg_pos = peek_position();
   switch (peek()) {
     case Token::THIS: {
-      BindingPatternUnexpectedToken(classifier);
+      BindingPatternUnexpectedToken();
       Consume(Token::THIS);
-      return this->ThisExpression(beg_pos);
+      return impl()->ThisExpression(beg_pos);
     }
 
     case Token::NULL_LITERAL:
@@ -1588,15 +1733,15 @@
     case Token::FALSE_LITERAL:
     case Token::SMI:
     case Token::NUMBER:
-      BindingPatternUnexpectedToken(classifier);
-      return this->ExpressionFromLiteral(Next(), beg_pos, scanner(), factory());
+      BindingPatternUnexpectedToken();
+      return impl()->ExpressionFromLiteral(Next(), beg_pos);
 
     case Token::ASYNC:
       if (allow_harmony_async_await() &&
           !scanner()->HasAnyLineTerminatorAfterNext() &&
           PeekAhead() == Token::FUNCTION) {
         Consume(Token::ASYNC);
-        return impl()->ParseAsyncFunctionExpression(CHECK_OK);
+        return ParseAsyncFunctionLiteral(CHECK_OK);
       }
       // CoverCallExpressionAndAsyncArrowHead
       *is_async = true;
@@ -1609,28 +1754,28 @@
     case Token::ESCAPED_STRICT_RESERVED_WORD:
     case Token::FUTURE_STRICT_RESERVED_WORD: {
       // Using eval or arguments in this context is OK even in strict mode.
-      IdentifierT name = ParseAndClassifyIdentifier(classifier, CHECK_OK);
-      return this->ExpressionFromIdentifier(name, beg_pos,
-                                            scanner()->location().end_pos);
+      IdentifierT name = ParseAndClassifyIdentifier(CHECK_OK);
+      return impl()->ExpressionFromIdentifier(name, beg_pos,
+                                              scanner()->location().end_pos);
     }
 
     case Token::STRING: {
-      BindingPatternUnexpectedToken(classifier);
+      BindingPatternUnexpectedToken();
       Consume(Token::STRING);
-      return this->ExpressionFromString(beg_pos, scanner(), factory());
+      return impl()->ExpressionFromString(beg_pos);
     }
 
     case Token::ASSIGN_DIV:
     case Token::DIV:
-      classifier->RecordBindingPatternError(
+      classifier()->RecordBindingPatternError(
           scanner()->peek_location(), MessageTemplate::kUnexpectedTokenRegExp);
-      return this->ParseRegExpLiteral(ok);
+      return ParseRegExpLiteral(ok);
 
     case Token::LBRACK:
-      return this->ParseArrayLiteral(classifier, ok);
+      return ParseArrayLiteral(ok);
 
     case Token::LBRACE:
-      return this->ParseObjectLiteral(classifier, ok);
+      return ParseObjectLiteral(ok);
 
     case Token::LPAREN: {
       // Arrow function formal parameters are either a single identifier or a
@@ -1638,61 +1783,34 @@
       // Parentheses are not valid on the LHS of a BindingPattern, so we use the
       // is_valid_binding_pattern() check to detect multiple levels of
       // parenthesization.
-      bool pattern_error = !classifier->is_valid_binding_pattern();
-      classifier->RecordPatternError(scanner()->peek_location(),
-                                     MessageTemplate::kUnexpectedToken,
-                                     Token::String(Token::LPAREN));
-      if (pattern_error) ArrowFormalParametersUnexpectedToken(classifier);
+      bool pattern_error = !classifier()->is_valid_binding_pattern();
+      classifier()->RecordPatternError(scanner()->peek_location(),
+                                       MessageTemplate::kUnexpectedToken,
+                                       Token::String(Token::LPAREN));
+      if (pattern_error) ArrowFormalParametersUnexpectedToken();
       Consume(Token::LPAREN);
       if (Check(Token::RPAREN)) {
         // ()=>x.  The continuation that looks for the => is in
         // ParseAssignmentExpression.
-        classifier->RecordExpressionError(scanner()->location(),
-                                          MessageTemplate::kUnexpectedToken,
-                                          Token::String(Token::RPAREN));
+        classifier()->RecordExpressionError(scanner()->location(),
+                                            MessageTemplate::kUnexpectedToken,
+                                            Token::String(Token::RPAREN));
         return factory()->NewEmptyParentheses(beg_pos);
-      } else if (Check(Token::ELLIPSIS)) {
-        // (...x)=>x.  The continuation that looks for the => is in
-        // ParseAssignmentExpression.
-        int ellipsis_pos = position();
-        int expr_pos = peek_position();
-        classifier->RecordExpressionError(scanner()->location(),
-                                          MessageTemplate::kUnexpectedToken,
-                                          Token::String(Token::ELLIPSIS));
-        classifier->RecordNonSimpleParameter();
-        ExpressionClassifier binding_classifier(this);
-        ExpressionT expr = this->ParseAssignmentExpression(
-            true, &binding_classifier, CHECK_OK);
-        classifier->Accumulate(&binding_classifier,
-                               ExpressionClassifier::AllProductions);
-        if (!this->IsIdentifier(expr) && !IsValidPattern(expr)) {
-          classifier->RecordArrowFormalParametersError(
-              Scanner::Location(ellipsis_pos, scanner()->location().end_pos),
-              MessageTemplate::kInvalidRestParameter);
-        }
-        if (peek() == Token::COMMA) {
-          ReportMessageAt(scanner()->peek_location(),
-                          MessageTemplate::kParamAfterRest);
-          *ok = false;
-          return this->EmptyExpression();
-        }
-        Expect(Token::RPAREN, CHECK_OK);
-        return factory()->NewSpread(expr, ellipsis_pos, expr_pos);
       }
       // Heuristically try to detect immediately called functions before
       // seeing the call parentheses.
       function_state_->set_next_function_is_parenthesized(peek() ==
                                                           Token::FUNCTION);
-      ExpressionT expr = this->ParseExpression(true, classifier, CHECK_OK);
+      ExpressionT expr = ParseExpressionCoverGrammar(true, CHECK_OK);
       Expect(Token::RPAREN, CHECK_OK);
       return expr;
     }
 
     case Token::CLASS: {
-      BindingPatternUnexpectedToken(classifier);
+      BindingPatternUnexpectedToken();
       Consume(Token::CLASS);
-      int class_token_position = position();
-      IdentifierT name = this->EmptyIdentifier();
+      int class_token_pos = position();
+      IdentifierT name = impl()->EmptyIdentifier();
       bool is_strict_reserved_name = false;
       Scanner::Location class_name_location = Scanner::Location::invalid();
       if (peek_any_identifier()) {
@@ -1700,28 +1818,26 @@
                                                    CHECK_OK);
         class_name_location = scanner()->location();
       }
-      return impl()->ParseClassLiteral(classifier, name, class_name_location,
-                                       is_strict_reserved_name,
-                                       class_token_position, ok);
+      return ParseClassLiteral(name, class_name_location,
+                               is_strict_reserved_name, class_token_pos, ok);
     }
 
     case Token::TEMPLATE_SPAN:
     case Token::TEMPLATE_TAIL:
-      BindingPatternUnexpectedToken(classifier);
-      return this->ParseTemplateLiteral(Traits::NoTemplateTag(), beg_pos,
-                                        classifier, ok);
+      BindingPatternUnexpectedToken();
+      return ParseTemplateLiteral(impl()->NoTemplateTag(), beg_pos, ok);
 
     case Token::MOD:
       if (allow_natives() || extension_ != NULL) {
-        BindingPatternUnexpectedToken(classifier);
-        return impl()->ParseV8Intrinsic(ok);
+        BindingPatternUnexpectedToken();
+        return ParseV8Intrinsic(ok);
       }
       break;
 
     case Token::DO:
       if (allow_harmony_do_expressions()) {
-        BindingPatternUnexpectedToken(classifier);
-        return impl()->ParseDoExpression(ok);
+        BindingPatternUnexpectedToken();
+        return ParseDoExpression(ok);
       }
       break;
 
@@ -1731,78 +1847,71 @@
 
   ReportUnexpectedToken(Next());
   *ok = false;
-  return this->EmptyExpression();
+  return impl()->EmptyExpression();
 }
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseExpression(
     bool accept_IN, bool* ok) {
   ExpressionClassifier classifier(this);
-  ExpressionT result = ParseExpression(accept_IN, &classifier, CHECK_OK);
-  impl()->RewriteNonPattern(&classifier, CHECK_OK);
+  ExpressionT result = ParseExpressionCoverGrammar(accept_IN, CHECK_OK);
+  impl()->RewriteNonPattern(CHECK_OK);
   return result;
 }
 
 template <typename Impl>
-typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseExpression(
-    bool accept_IN, ExpressionClassifier* classifier, bool* ok) {
+typename ParserBase<Impl>::ExpressionT
+ParserBase<Impl>::ParseExpressionCoverGrammar(bool accept_IN, bool* ok) {
   // Expression ::
   //   AssignmentExpression
   //   Expression ',' AssignmentExpression
 
-  ExpressionT result;
-  {
+  ExpressionT result = impl()->EmptyExpression();
+  while (true) {
+    int comma_pos = position();
     ExpressionClassifier binding_classifier(this);
-    result = this->ParseAssignmentExpression(accept_IN, &binding_classifier,
-                                             CHECK_OK);
-    classifier->Accumulate(&binding_classifier,
-                           ExpressionClassifier::AllProductions);
-  }
-  bool is_simple_parameter_list = this->IsIdentifier(result);
-  bool seen_rest = false;
-  while (peek() == Token::COMMA) {
-    CheckNoTailCallExpressions(classifier, CHECK_OK);
-    if (seen_rest) {
-      // At this point the production can't possibly be valid, but we don't know
-      // which error to signal.
-      classifier->RecordArrowFormalParametersError(
-          scanner()->peek_location(), MessageTemplate::kParamAfterRest);
+    ExpressionT right;
+    if (Check(Token::ELLIPSIS)) {
+      // 'x, y, ...z' in CoverParenthesizedExpressionAndArrowParameterList only
+      // as the formal parameters of'(x, y, ...z) => foo', and is not itself a
+      // valid expression.
+      classifier()->RecordExpressionError(scanner()->location(),
+                                          MessageTemplate::kUnexpectedToken,
+                                          Token::String(Token::ELLIPSIS));
+      int ellipsis_pos = position();
+      int pattern_pos = peek_position();
+      ExpressionT pattern = ParsePrimaryExpression(CHECK_OK);
+      ValidateBindingPattern(CHECK_OK);
+      right = factory()->NewSpread(pattern, ellipsis_pos, pattern_pos);
+    } else {
+      right = ParseAssignmentExpression(accept_IN, CHECK_OK);
     }
-    Consume(Token::COMMA);
-    bool is_rest = false;
+    // No need to accumulate binding pattern-related errors, since
+    // an Expression can't be a binding pattern anyway.
+    impl()->Accumulate(ExpressionClassifier::AllProductions &
+                       ~(ExpressionClassifier::BindingPatternProduction |
+                         ExpressionClassifier::LetPatternProduction));
+    if (!impl()->IsIdentifier(right)) classifier()->RecordNonSimpleParameter();
+    if (impl()->IsEmptyExpression(result)) {
+      // First time through the loop.
+      result = right;
+    } else {
+      result =
+          factory()->NewBinaryOperation(Token::COMMA, result, right, comma_pos);
+    }
+
+    if (!Check(Token::COMMA)) break;
+
+    if (right->IsSpread()) {
+      classifier()->RecordArrowFormalParametersError(
+          scanner()->location(), MessageTemplate::kParamAfterRest);
+    }
+
     if (allow_harmony_trailing_commas() && peek() == Token::RPAREN &&
         PeekAhead() == Token::ARROW) {
       // a trailing comma is allowed at the end of an arrow parameter list
       break;
-    } else if (peek() == Token::ELLIPSIS) {
-      // 'x, y, ...z' in CoverParenthesizedExpressionAndArrowParameterList only
-      // as the formal parameters of'(x, y, ...z) => foo', and is not itself a
-      // valid expression or binding pattern.
-      ExpressionUnexpectedToken(classifier);
-      BindingPatternUnexpectedToken(classifier);
-      Consume(Token::ELLIPSIS);
-      seen_rest = is_rest = true;
     }
-    int pos = position(), expr_pos = peek_position();
-    ExpressionClassifier binding_classifier(this);
-    ExpressionT right = this->ParseAssignmentExpression(
-        accept_IN, &binding_classifier, CHECK_OK);
-    classifier->Accumulate(&binding_classifier,
-                           ExpressionClassifier::AllProductions);
-    if (is_rest) {
-      if (!this->IsIdentifier(right) && !IsValidPattern(right)) {
-        classifier->RecordArrowFormalParametersError(
-            Scanner::Location(pos, scanner()->location().end_pos),
-            MessageTemplate::kInvalidRestParameter);
-      }
-      right = factory()->NewSpread(right, pos, expr_pos);
-    }
-    is_simple_parameter_list =
-        is_simple_parameter_list && this->IsIdentifier(right);
-    result = factory()->NewBinaryOperation(Token::COMMA, result, right, pos);
-  }
-  if (!is_simple_parameter_list || seen_rest) {
-    classifier->RecordNonSimpleParameter();
   }
 
   return result;
@@ -1810,26 +1919,23 @@
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseArrayLiteral(
-    ExpressionClassifier* classifier, bool* ok) {
+    bool* ok) {
   // ArrayLiteral ::
   //   '[' Expression? (',' Expression?)* ']'
 
   int pos = peek_position();
-  typename Traits::Type::ExpressionList values =
-      this->NewExpressionList(4, zone_);
+  ExpressionListT values = impl()->NewExpressionList(4);
   int first_spread_index = -1;
   Expect(Token::LBRACK, CHECK_OK);
   while (peek() != Token::RBRACK) {
     ExpressionT elem;
     if (peek() == Token::COMMA) {
-      elem = this->GetLiteralTheHole(peek_position(), factory());
+      elem = impl()->GetLiteralTheHole(peek_position());
     } else if (peek() == Token::ELLIPSIS) {
       int start_pos = peek_position();
       Consume(Token::ELLIPSIS);
       int expr_pos = peek_position();
-      ExpressionT argument =
-          this->ParseAssignmentExpression(true, classifier, CHECK_OK);
-      CheckNoTailCallExpressions(classifier, CHECK_OK);
+      ExpressionT argument = ParseAssignmentExpression(true, CHECK_OK);
       elem = factory()->NewSpread(argument, start_pos, expr_pos);
 
       if (first_spread_index < 0) {
@@ -1837,25 +1943,23 @@
       }
 
       if (argument->IsAssignment()) {
-        classifier->RecordPatternError(
+        classifier()->RecordPatternError(
             Scanner::Location(start_pos, scanner()->location().end_pos),
             MessageTemplate::kInvalidDestructuringTarget);
       } else {
-        CheckDestructuringElement(argument, classifier, start_pos,
+        CheckDestructuringElement(argument, start_pos,
                                   scanner()->location().end_pos);
       }
 
       if (peek() == Token::COMMA) {
-        classifier->RecordPatternError(
+        classifier()->RecordPatternError(
             Scanner::Location(start_pos, scanner()->location().end_pos),
             MessageTemplate::kElementAfterRest);
       }
     } else {
       int beg_pos = peek_position();
-      elem = this->ParseAssignmentExpression(true, classifier, CHECK_OK);
-      CheckNoTailCallExpressions(classifier, CHECK_OK);
-      CheckDestructuringElement(elem, classifier, beg_pos,
-                                scanner()->location().end_pos);
+      elem = ParseAssignmentExpression(true, CHECK_OK);
+      CheckDestructuringElement(elem, beg_pos, scanner()->location().end_pos);
     }
     values->Add(elem, zone_);
     if (peek() != Token::RBRACK) {
@@ -1878,19 +1982,87 @@
       // to change.  Also, this error message will never appear while pre-
       // parsing (this is OK, as it is an implementation limitation).
       ReportMessage(MessageTemplate::kTooManySpreads);
-      return this->EmptyExpression();
+      return impl()->EmptyExpression();
     }
   }
   return result;
 }
 
 template <class Impl>
+bool ParserBase<Impl>::SetPropertyKindFromToken(Token::Value token,
+                                                PropertyKind* kind) {
+  // This returns true, setting the property kind, iff the given token is one
+  // which must occur after a property name, indicating that the previous token
+  // was in fact a name and not a modifier (like the "get" in "get x").
+  switch (token) {
+    case Token::COLON:
+      *kind = PropertyKind::kValueProperty;
+      return true;
+    case Token::COMMA:
+    case Token::RBRACE:
+    case Token::ASSIGN:
+      *kind = PropertyKind::kShorthandProperty;
+      return true;
+    case Token::LPAREN:
+      *kind = PropertyKind::kMethodProperty;
+      return true;
+    case Token::MUL:
+    case Token::SEMICOLON:
+      *kind = PropertyKind::kClassField;
+      return true;
+    default:
+      break;
+  }
+  return false;
+}
+
+template <class Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParsePropertyName(
-    IdentifierT* name, bool* is_get, bool* is_set, bool* is_computed_name,
-    ExpressionClassifier* classifier, bool* ok) {
+    IdentifierT* name, PropertyKind* kind, bool* is_generator, bool* is_get,
+    bool* is_set, bool* is_async, bool* is_computed_name, bool* ok) {
+  DCHECK(*kind == PropertyKind::kNotSet);
+  DCHECK(!*is_generator);
+  DCHECK(!*is_get);
+  DCHECK(!*is_set);
+  DCHECK(!*is_async);
+  DCHECK(!*is_computed_name);
+
+  *is_generator = Check(Token::MUL);
+  if (*is_generator) {
+    *kind = PropertyKind::kMethodProperty;
+  }
+
   Token::Value token = peek();
   int pos = peek_position();
 
+  if (allow_harmony_async_await() && !*is_generator && token == Token::ASYNC &&
+      !scanner()->HasAnyLineTerminatorAfterNext()) {
+    Consume(Token::ASYNC);
+    token = peek();
+    if (SetPropertyKindFromToken(token, kind)) {
+      *name = impl()->GetSymbol();  // TODO(bakkot) specialize on 'async'
+      impl()->PushLiteralName(*name);
+      return factory()->NewStringLiteral(*name, pos);
+    }
+    *kind = PropertyKind::kMethodProperty;
+    *is_async = true;
+    pos = peek_position();
+  }
+
+  if (token == Token::IDENTIFIER && !*is_generator && !*is_async) {
+    // This is checking for 'get' and 'set' in particular.
+    Consume(Token::IDENTIFIER);
+    token = peek();
+    if (SetPropertyKindFromToken(token, kind) ||
+        !scanner()->IsGetOrSet(is_get, is_set)) {
+      *name = impl()->GetSymbol();
+      impl()->PushLiteralName(*name);
+      return factory()->NewStringLiteral(*name, pos);
+    }
+    *kind = PropertyKind::kAccessorProperty;
+    pos = peek_position();
+  }
+
   // For non computed property names we normalize the name a bit:
   //
   //   "12" -> 12
@@ -1900,274 +2072,417 @@
   //
   // This is important because we use the property name as a key in a hash
   // table when we compute constant properties.
+  ExpressionT expression = impl()->EmptyExpression();
   switch (token) {
     case Token::STRING:
       Consume(Token::STRING);
-      *name = this->GetSymbol(scanner());
+      *name = impl()->GetSymbol();
       break;
 
     case Token::SMI:
       Consume(Token::SMI);
-      *name = this->GetNumberAsSymbol(scanner());
+      *name = impl()->GetNumberAsSymbol();
       break;
 
     case Token::NUMBER:
       Consume(Token::NUMBER);
-      *name = this->GetNumberAsSymbol(scanner());
+      *name = impl()->GetNumberAsSymbol();
       break;
 
     case Token::LBRACK: {
+      *name = impl()->EmptyIdentifier();
       *is_computed_name = true;
       Consume(Token::LBRACK);
       ExpressionClassifier computed_name_classifier(this);
-      ExpressionT expression =
-          ParseAssignmentExpression(true, &computed_name_classifier, CHECK_OK);
-      impl()->RewriteNonPattern(&computed_name_classifier, CHECK_OK);
-      classifier->Accumulate(&computed_name_classifier,
-                             ExpressionClassifier::ExpressionProductions);
+      expression = ParseAssignmentExpression(true, CHECK_OK);
+      impl()->RewriteNonPattern(CHECK_OK);
+      impl()->AccumulateFormalParameterContainmentErrors();
       Expect(Token::RBRACK, CHECK_OK);
-      return expression;
+      break;
     }
 
     default:
       *name = ParseIdentifierName(CHECK_OK);
-      scanner()->IsGetOrSet(is_get, is_set);
       break;
   }
 
+  if (*kind == PropertyKind::kNotSet) {
+    SetPropertyKindFromToken(peek(), kind);
+  }
+
+  if (*is_computed_name) {
+    return expression;
+  }
+
+  impl()->PushLiteralName(*name);
+
   uint32_t index;
-  return this->IsArrayIndex(*name, &index)
+  return impl()->IsArrayIndex(*name, &index)
              ? factory()->NewNumberLiteral(index, pos)
              : factory()->NewStringLiteral(*name, pos);
 }
 
 template <typename Impl>
-typename ParserBase<Impl>::ObjectLiteralPropertyT
-ParserBase<Impl>::ParsePropertyDefinition(
-    ObjectLiteralCheckerBase* checker, bool in_class, bool has_extends,
-    MethodKind method_kind, bool* is_computed_name, bool* has_seen_constructor,
-    ExpressionClassifier* classifier, IdentifierT* name, bool* ok) {
-  DCHECK(!in_class || IsStaticMethod(method_kind) ||
-         has_seen_constructor != nullptr);
+typename ParserBase<Impl>::ClassLiteralPropertyT
+ParserBase<Impl>::ParseClassPropertyDefinition(ClassLiteralChecker* checker,
+                                               bool has_extends,
+                                               bool* is_computed_name,
+                                               bool* has_seen_constructor,
+                                               bool* ok) {
+  DCHECK(has_seen_constructor != nullptr);
   bool is_get = false;
   bool is_set = false;
-  bool is_generator = Check(Token::MUL);
+  bool is_generator = false;
   bool is_async = false;
-  const bool is_static = IsStaticMethod(method_kind);
+  bool is_static = false;
+  PropertyKind kind = PropertyKind::kNotSet;
 
   Token::Value name_token = peek();
 
-  if (is_generator) {
-    method_kind |= MethodKind::kGenerator;
-  } else if (allow_harmony_async_await() && name_token == Token::ASYNC &&
-             !scanner()->HasAnyLineTerminatorAfterNext() &&
-             PeekAhead() != Token::LPAREN && PeekAhead()) {
-    is_async = true;
+  IdentifierT name = impl()->EmptyIdentifier();
+  ExpressionT name_expression;
+  if (name_token == Token::STATIC) {
+    Consume(Token::STATIC);
+    if (peek() == Token::LPAREN) {
+      kind = PropertyKind::kMethodProperty;
+      name = impl()->GetSymbol();  // TODO(bakkot) specialize on 'static'
+      name_expression = factory()->NewStringLiteral(name, position());
+    } else if (peek() == Token::ASSIGN || peek() == Token::SEMICOLON ||
+               peek() == Token::RBRACE) {
+      name = impl()->GetSymbol();  // TODO(bakkot) specialize on 'static'
+      name_expression = factory()->NewStringLiteral(name, position());
+    } else {
+      is_static = true;
+      name_expression = ParsePropertyName(
+          &name, &kind, &is_generator, &is_get, &is_set, &is_async,
+          is_computed_name, CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
+    }
+  } else {
+    name_expression = ParsePropertyName(
+        &name, &kind, &is_generator, &is_get, &is_set, &is_async,
+        is_computed_name, CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
   }
 
+  switch (kind) {
+    case PropertyKind::kClassField:
+    case PropertyKind::kNotSet:  // This case is a name followed by a name or
+                                 // other property. Here we have to assume
+                                 // that's an uninitialized field followed by a
+                                 // linebreak followed by a property, with ASI
+                                 // adding the semicolon. If not, there will be
+                                 // a syntax error after parsing the first name
+                                 // as an uninitialized field.
+    case PropertyKind::kShorthandProperty:
+    case PropertyKind::kValueProperty:
+      if (allow_harmony_class_fields()) {
+        bool has_initializer = Check(Token::ASSIGN);
+        ExpressionT function_literal = ParseClassFieldForInitializer(
+            has_initializer, CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
+        ExpectSemicolon(CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
+        return factory()->NewClassLiteralProperty(
+            name_expression, function_literal, ClassLiteralProperty::FIELD,
+            is_static, *is_computed_name);
+      } else {
+        ReportUnexpectedToken(Next());
+        *ok = false;
+        return impl()->EmptyClassLiteralProperty();
+      }
+
+    case PropertyKind::kMethodProperty: {
+      DCHECK(!is_get && !is_set);
+
+      // MethodDefinition
+      //    PropertyName '(' StrictFormalParameters ')' '{' FunctionBody '}'
+      //    '*' PropertyName '(' StrictFormalParameters ')' '{' FunctionBody '}'
+
+      if (!*is_computed_name) {
+        checker->CheckClassMethodName(
+            name_token, PropertyKind::kMethodProperty, is_generator, is_async,
+            is_static, CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
+      }
+
+      FunctionKind kind = is_generator
+                              ? FunctionKind::kConciseGeneratorMethod
+                              : is_async ? FunctionKind::kAsyncConciseMethod
+                                         : FunctionKind::kConciseMethod;
+
+      if (!is_static && impl()->IsConstructor(name)) {
+        *has_seen_constructor = true;
+        kind = has_extends ? FunctionKind::kSubclassConstructor
+                           : FunctionKind::kBaseConstructor;
+      }
+
+      ExpressionT value = impl()->ParseFunctionLiteral(
+          name, scanner()->location(), kSkipFunctionNameCheck, kind,
+          kNoSourcePosition, FunctionLiteral::kAccessorOrMethod,
+          language_mode(), CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
+
+      return factory()->NewClassLiteralProperty(name_expression, value,
+                                                ClassLiteralProperty::METHOD,
+                                                is_static, *is_computed_name);
+    }
+
+    case PropertyKind::kAccessorProperty: {
+      DCHECK((is_get || is_set) && !is_generator && !is_async);
+
+      if (!*is_computed_name) {
+        checker->CheckClassMethodName(
+            name_token, PropertyKind::kAccessorProperty, false, false,
+            is_static, CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
+        // Make sure the name expression is a string since we need a Name for
+        // Runtime_DefineAccessorPropertyUnchecked and since we can determine
+        // this statically we can skip the extra runtime check.
+        name_expression =
+            factory()->NewStringLiteral(name, name_expression->position());
+      }
+
+      FunctionKind kind = is_get ? FunctionKind::kGetterFunction
+                                 : FunctionKind::kSetterFunction;
+
+      FunctionLiteralT value = impl()->ParseFunctionLiteral(
+          name, scanner()->location(), kSkipFunctionNameCheck, kind,
+          kNoSourcePosition, FunctionLiteral::kAccessorOrMethod,
+          language_mode(), CHECK_OK_CUSTOM(EmptyClassLiteralProperty));
+
+      if (!*is_computed_name) {
+        impl()->AddAccessorPrefixToFunctionName(is_get, value, name);
+      }
+
+      return factory()->NewClassLiteralProperty(
+          name_expression, value,
+          is_get ? ClassLiteralProperty::GETTER : ClassLiteralProperty::SETTER,
+          is_static, *is_computed_name);
+    }
+  }
+  UNREACHABLE();
+  return impl()->EmptyClassLiteralProperty();
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::FunctionLiteralT
+ParserBase<Impl>::ParseClassFieldForInitializer(bool has_initializer,
+                                                bool* ok) {
+  // Makes a concise method which evaluates and returns the initialized value
+  // (or undefined if absent).
+  FunctionKind kind = FunctionKind::kConciseMethod;
+  DeclarationScope* initializer_scope = NewFunctionScope(kind);
+  initializer_scope->set_start_position(scanner()->location().end_pos);
+  FunctionState initializer_state(&function_state_, &scope_state_,
+                                  initializer_scope);
+  DCHECK(scope() == initializer_scope);
+  scope()->SetLanguageMode(STRICT);
+  ExpressionClassifier expression_classifier(this);
+  ExpressionT value;
+  if (has_initializer) {
+    value = this->ParseAssignmentExpression(
+        true, CHECK_OK_CUSTOM(EmptyFunctionLiteral));
+    impl()->RewriteNonPattern(CHECK_OK_CUSTOM(EmptyFunctionLiteral));
+  } else {
+    value = factory()->NewUndefinedLiteral(kNoSourcePosition);
+  }
+  initializer_scope->set_end_position(scanner()->location().end_pos);
+  typename Types::StatementList body = impl()->NewStatementList(1);
+  body->Add(factory()->NewReturnStatement(value, kNoSourcePosition), zone());
+  FunctionLiteralT function_literal = factory()->NewFunctionLiteral(
+      impl()->EmptyIdentifierString(), initializer_scope, body,
+      initializer_state.materialized_literal_count(),
+      initializer_state.expected_property_count(), 0,
+      FunctionLiteral::kNoDuplicateParameters,
+      FunctionLiteral::kAnonymousExpression,
+      FunctionLiteral::kShouldLazyCompile, initializer_scope->start_position());
+  function_literal->set_is_class_field_initializer(true);
+  return function_literal;
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::ObjectLiteralPropertyT
+ParserBase<Impl>::ParseObjectPropertyDefinition(ObjectLiteralChecker* checker,
+                                                bool* is_computed_name,
+                                                bool* ok) {
+  bool is_get = false;
+  bool is_set = false;
+  bool is_generator = false;
+  bool is_async = false;
+  PropertyKind kind = PropertyKind::kNotSet;
+
+  IdentifierT name = impl()->EmptyIdentifier();
+  Token::Value name_token = peek();
   int next_beg_pos = scanner()->peek_location().beg_pos;
   int next_end_pos = scanner()->peek_location().end_pos;
-  ExpressionT name_expression =
-      ParsePropertyName(name, &is_get, &is_set, is_computed_name, classifier,
-                        CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
 
-  if (fni_ != nullptr && !*is_computed_name) {
-    this->PushLiteralName(fni_, *name);
-  }
+  ExpressionT name_expression = ParsePropertyName(
+      &name, &kind, &is_generator, &is_get, &is_set, &is_async,
+      is_computed_name, CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
 
-  if (!in_class && !is_generator) {
-    DCHECK(!IsStaticMethod(method_kind));
-    if (peek() == Token::COLON) {
-      // PropertyDefinition
-      //    PropertyName ':' AssignmentExpression
+  switch (kind) {
+    case PropertyKind::kValueProperty: {
+      DCHECK(!is_get && !is_set && !is_generator && !is_async);
+
       if (!*is_computed_name) {
-        checker->CheckProperty(name_token, kValueProperty, MethodKind::kNormal,
-                               classifier,
-                               CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+        checker->CheckDuplicateProto(name_token);
       }
       Consume(Token::COLON);
       int beg_pos = peek_position();
-      ExpressionT value = this->ParseAssignmentExpression(
-          true, classifier, CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
-      CheckDestructuringElement(value, classifier, beg_pos,
-                                scanner()->location().end_pos);
+      ExpressionT value = ParseAssignmentExpression(
+          true, CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+      CheckDestructuringElement(value, beg_pos, scanner()->location().end_pos);
 
-      return factory()->NewObjectLiteralProperty(name_expression, value,
-                                                 is_static, *is_computed_name);
+      ObjectLiteralPropertyT result = factory()->NewObjectLiteralProperty(
+          name_expression, value, *is_computed_name);
+
+      if (!*is_computed_name) {
+        impl()->SetFunctionNameFromPropertyName(result, name);
+      }
+
+      return result;
     }
 
-    if (Token::IsIdentifier(name_token, language_mode(), this->is_generator(),
-                            parsing_module_ || is_async_function()) &&
-        (peek() == Token::COMMA || peek() == Token::RBRACE ||
-         peek() == Token::ASSIGN)) {
+    case PropertyKind::kShorthandProperty: {
       // PropertyDefinition
       //    IdentifierReference
       //    CoverInitializedName
       //
       // CoverInitializedName
       //    IdentifierReference Initializer?
-      if (classifier->duplicate_finder() != nullptr &&
-          scanner()->FindSymbol(classifier->duplicate_finder(), 1) != 0) {
-        classifier->RecordDuplicateFormalParameterError(scanner()->location());
+      DCHECK(!is_get && !is_set && !is_generator && !is_async);
+
+      if (!Token::IsIdentifier(name_token, language_mode(),
+                               this->is_generator(),
+                               parsing_module_ || is_async_function())) {
+        ReportUnexpectedToken(Next());
+        *ok = false;
+        return impl()->EmptyObjectLiteralProperty();
       }
 
-      if (this->IsEvalOrArguments(*name) && is_strict(language_mode())) {
-        classifier->RecordBindingPatternError(
+      DCHECK(!*is_computed_name);
+
+      if (classifier()->duplicate_finder() != nullptr &&
+          scanner()->FindSymbol(classifier()->duplicate_finder(), 1) != 0) {
+        classifier()->RecordDuplicateFormalParameterError(
+            scanner()->location());
+      }
+
+      if (impl()->IsEvalOrArguments(name) && is_strict(language_mode())) {
+        classifier()->RecordBindingPatternError(
             scanner()->location(), MessageTemplate::kStrictEvalArguments);
       }
 
       if (name_token == Token::LET) {
-        classifier->RecordLetPatternError(
+        classifier()->RecordLetPatternError(
             scanner()->location(), MessageTemplate::kLetInLexicalBinding);
       }
       if (name_token == Token::AWAIT) {
         DCHECK(!is_async_function());
-        classifier->RecordAsyncArrowFormalParametersError(
+        classifier()->RecordAsyncArrowFormalParametersError(
             Scanner::Location(next_beg_pos, next_end_pos),
             MessageTemplate::kAwaitBindingIdentifier);
       }
       ExpressionT lhs =
-          this->ExpressionFromIdentifier(*name, next_beg_pos, next_end_pos);
-      CheckDestructuringElement(lhs, classifier, next_beg_pos, next_end_pos);
+          impl()->ExpressionFromIdentifier(name, next_beg_pos, next_end_pos);
+      CheckDestructuringElement(lhs, next_beg_pos, next_end_pos);
 
       ExpressionT value;
       if (peek() == Token::ASSIGN) {
         Consume(Token::ASSIGN);
         ExpressionClassifier rhs_classifier(this);
-        ExpressionT rhs = this->ParseAssignmentExpression(
-            true, &rhs_classifier, CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
-        impl()->RewriteNonPattern(&rhs_classifier,
-                                  CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
-        classifier->Accumulate(&rhs_classifier,
-                               ExpressionClassifier::ExpressionProductions);
+        ExpressionT rhs = ParseAssignmentExpression(
+            true, CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+        impl()->RewriteNonPattern(CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+        impl()->AccumulateFormalParameterContainmentErrors();
         value = factory()->NewAssignment(Token::ASSIGN, lhs, rhs,
                                          kNoSourcePosition);
-        classifier->RecordObjectLiteralError(
+        classifier()->RecordExpressionError(
             Scanner::Location(next_beg_pos, scanner()->location().end_pos),
             MessageTemplate::kInvalidCoverInitializedName);
 
-        Traits::SetFunctionNameFromIdentifierRef(rhs, lhs);
+        impl()->SetFunctionNameFromIdentifierRef(rhs, lhs);
       } else {
         value = lhs;
       }
 
       return factory()->NewObjectLiteralProperty(
-          name_expression, value, ObjectLiteralProperty::COMPUTED, is_static,
-          false);
-    }
-  }
-
-  // Method definitions are never valid in patterns.
-  classifier->RecordPatternError(
-      Scanner::Location(next_beg_pos, scanner()->location().end_pos),
-      MessageTemplate::kInvalidDestructuringTarget);
-
-  if (is_async && !IsSpecialMethod(method_kind)) {
-    DCHECK(!is_get);
-    DCHECK(!is_set);
-    bool dont_care;
-    name_expression = ParsePropertyName(
-        name, &dont_care, &dont_care, is_computed_name, classifier,
-        CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
-    method_kind |= MethodKind::kAsync;
-  }
-
-  if (is_generator || peek() == Token::LPAREN) {
-    // MethodDefinition
-    //    PropertyName '(' StrictFormalParameters ')' '{' FunctionBody '}'
-    //    '*' PropertyName '(' StrictFormalParameters ')' '{' FunctionBody '}'
-    if (!*is_computed_name) {
-      checker->CheckProperty(name_token, kMethodProperty, method_kind,
-                             classifier,
-                             CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+          name_expression, value, ObjectLiteralProperty::COMPUTED, false);
     }
 
-    FunctionKind kind = is_generator
-                            ? FunctionKind::kConciseGeneratorMethod
-                            : is_async ? FunctionKind::kAsyncConciseMethod
-                                       : FunctionKind::kConciseMethod;
+    case PropertyKind::kMethodProperty: {
+      DCHECK(!is_get && !is_set);
 
-    if (in_class && !IsStaticMethod(method_kind) &&
-        this->IsConstructor(*name)) {
-      *has_seen_constructor = true;
-      kind = has_extends ? FunctionKind::kSubclassConstructor
-                         : FunctionKind::kBaseConstructor;
+      // MethodDefinition
+      //    PropertyName '(' StrictFormalParameters ')' '{' FunctionBody '}'
+      //    '*' PropertyName '(' StrictFormalParameters ')' '{' FunctionBody '}'
+
+      classifier()->RecordPatternError(
+          Scanner::Location(next_beg_pos, scanner()->location().end_pos),
+          MessageTemplate::kInvalidDestructuringTarget);
+
+      FunctionKind kind = is_generator
+                              ? FunctionKind::kConciseGeneratorMethod
+                              : is_async ? FunctionKind::kAsyncConciseMethod
+                                         : FunctionKind::kConciseMethod;
+
+      ExpressionT value = impl()->ParseFunctionLiteral(
+          name, scanner()->location(), kSkipFunctionNameCheck, kind,
+          kNoSourcePosition, FunctionLiteral::kAccessorOrMethod,
+          language_mode(), CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+
+      return factory()->NewObjectLiteralProperty(
+          name_expression, value, ObjectLiteralProperty::COMPUTED,
+          *is_computed_name);
     }
 
-    ExpressionT value = impl()->ParseFunctionLiteral(
-        *name, scanner()->location(), kSkipFunctionNameCheck, kind,
-        kNoSourcePosition, FunctionLiteral::kAccessorOrMethod, language_mode(),
-        CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+    case PropertyKind::kAccessorProperty: {
+      DCHECK((is_get || is_set) && !(is_set && is_get) && !is_generator &&
+             !is_async);
 
-    return factory()->NewObjectLiteralProperty(name_expression, value,
-                                               ObjectLiteralProperty::COMPUTED,
-                                               is_static, *is_computed_name);
-  }
+      classifier()->RecordPatternError(
+          Scanner::Location(next_beg_pos, scanner()->location().end_pos),
+          MessageTemplate::kInvalidDestructuringTarget);
 
-  if (in_class && name_token == Token::STATIC && IsNormalMethod(method_kind)) {
-    // ClassElement (static)
-    //    'static' MethodDefinition
-    *name = this->EmptyIdentifier();
-    ObjectLiteralPropertyT property = ParsePropertyDefinition(
-        checker, true, has_extends, MethodKind::kStatic, is_computed_name,
-        nullptr, classifier, name, ok);
-    impl()->RewriteNonPattern(classifier, ok);
-    return property;
-  }
+      if (!*is_computed_name) {
+        // Make sure the name expression is a string since we need a Name for
+        // Runtime_DefineAccessorPropertyUnchecked and since we can determine
+        // this statically we can skip the extra runtime check.
+        name_expression =
+            factory()->NewStringLiteral(name, name_expression->position());
+      }
 
-  if (is_get || is_set) {
-    // MethodDefinition (Accessors)
-    //    get PropertyName '(' ')' '{' FunctionBody '}'
-    //    set PropertyName '(' PropertySetParameterList ')' '{' FunctionBody '}'
-    *name = this->EmptyIdentifier();
-    bool dont_care = false;
-    name_token = peek();
+      FunctionKind kind = is_get ? FunctionKind::kGetterFunction
+                                 : FunctionKind::kSetterFunction;
 
-    name_expression = ParsePropertyName(
-        name, &dont_care, &dont_care, is_computed_name, classifier,
-        CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+      FunctionLiteralT value = impl()->ParseFunctionLiteral(
+          name, scanner()->location(), kSkipFunctionNameCheck, kind,
+          kNoSourcePosition, FunctionLiteral::kAccessorOrMethod,
+          language_mode(), CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
 
-    if (!*is_computed_name) {
-      checker->CheckProperty(name_token, kAccessorProperty, method_kind,
-                             classifier,
-                             CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
+      if (!*is_computed_name) {
+        impl()->AddAccessorPrefixToFunctionName(is_get, value, name);
+      }
+
+      return factory()->NewObjectLiteralProperty(
+          name_expression, value, is_get ? ObjectLiteralProperty::GETTER
+                                         : ObjectLiteralProperty::SETTER,
+          *is_computed_name);
     }
 
-    typename Traits::Type::FunctionLiteral value = impl()->ParseFunctionLiteral(
-        *name, scanner()->location(), kSkipFunctionNameCheck,
-        is_get ? FunctionKind::kGetterFunction : FunctionKind::kSetterFunction,
-        kNoSourcePosition, FunctionLiteral::kAccessorOrMethod, language_mode(),
-        CHECK_OK_CUSTOM(EmptyObjectLiteralProperty));
-
-    // Make sure the name expression is a string since we need a Name for
-    // Runtime_DefineAccessorPropertyUnchecked and since we can determine this
-    // statically we can skip the extra runtime check.
-    if (!*is_computed_name) {
-      name_expression =
-          factory()->NewStringLiteral(*name, name_expression->position());
-    }
-
-    return factory()->NewObjectLiteralProperty(
-        name_expression, value,
-        is_get ? ObjectLiteralProperty::GETTER : ObjectLiteralProperty::SETTER,
-        is_static, *is_computed_name);
+    case PropertyKind::kClassField:
+    case PropertyKind::kNotSet:
+      ReportUnexpectedToken(Next());
+      *ok = false;
+      return impl()->EmptyObjectLiteralProperty();
   }
-
-  Token::Value next = Next();
-  ReportUnexpectedToken(next);
-  *ok = false;
-  return this->EmptyObjectLiteralProperty();
+  UNREACHABLE();
+  return impl()->EmptyObjectLiteralProperty();
 }
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseObjectLiteral(
-    ExpressionClassifier* classifier, bool* ok) {
+    bool* ok) {
   // ObjectLiteral ::
   // '{' (PropertyDefinition (',' PropertyDefinition)* ','? )? '}'
 
   int pos = peek_position();
-  typename Traits::Type::PropertyList properties =
-      this->NewPropertyList(4, zone_);
+  typename Types::ObjectPropertyList properties =
+      impl()->NewObjectPropertyList(4);
   int number_of_boilerplate_properties = 0;
   bool has_computed_names = false;
   ObjectLiteralChecker checker(this);
@@ -2177,20 +2492,16 @@
   while (peek() != Token::RBRACE) {
     FuncNameInferrer::State fni_state(fni_);
 
-    const bool in_class = false;
-    const bool has_extends = false;
     bool is_computed_name = false;
-    IdentifierT name = this->EmptyIdentifier();
-    ObjectLiteralPropertyT property = this->ParsePropertyDefinition(
-        &checker, in_class, has_extends, MethodKind::kNormal, &is_computed_name,
-        NULL, classifier, &name, CHECK_OK);
+    ObjectLiteralPropertyT property =
+        ParseObjectPropertyDefinition(&checker, &is_computed_name, CHECK_OK);
 
     if (is_computed_name) {
       has_computed_names = true;
     }
 
     // Count CONSTANT or COMPUTED properties to maintain the enumeration order.
-    if (!has_computed_names && this->IsBoilerplateProperty(property)) {
+    if (!has_computed_names && impl()->IsBoilerplateProperty(property)) {
       number_of_boilerplate_properties++;
     }
     properties->Add(property, zone());
@@ -2201,8 +2512,6 @@
     }
 
     if (fni_ != nullptr) fni_->Infer();
-
-    Traits::SetFunctionNameFromPropertyName(property, name);
   }
   Expect(Token::RBRACE, CHECK_OK);
 
@@ -2216,16 +2525,13 @@
 }
 
 template <typename Impl>
-typename ParserBase<Impl>::Traits::Type::ExpressionList
-ParserBase<Impl>::ParseArguments(Scanner::Location* first_spread_arg_loc,
-                                 bool maybe_arrow,
-                                 ExpressionClassifier* classifier, bool* ok) {
+typename ParserBase<Impl>::ExpressionListT ParserBase<Impl>::ParseArguments(
+    Scanner::Location* first_spread_arg_loc, bool maybe_arrow, bool* ok) {
   // Arguments ::
   //   '(' (AssignmentExpression)*[','] ')'
 
   Scanner::Location spread_arg = Scanner::Location::invalid();
-  typename Traits::Type::ExpressionList result =
-      this->NewExpressionList(4, zone_);
+  ExpressionListT result = impl()->NewExpressionList(4);
   Expect(Token::LPAREN, CHECK_OK_CUSTOM(NullExpressionList));
   bool done = (peek() == Token::RPAREN);
   bool was_unspread = false;
@@ -2235,12 +2541,10 @@
     bool is_spread = Check(Token::ELLIPSIS);
     int expr_pos = peek_position();
 
-    ExpressionT argument = this->ParseAssignmentExpression(
-        true, classifier, CHECK_OK_CUSTOM(NullExpressionList));
-    CheckNoTailCallExpressions(classifier, CHECK_OK_CUSTOM(NullExpressionList));
+    ExpressionT argument =
+        ParseAssignmentExpression(true, CHECK_OK_CUSTOM(NullExpressionList));
     if (!maybe_arrow) {
-      impl()->RewriteNonPattern(classifier,
-                                CHECK_OK_CUSTOM(NullExpressionList));
+      impl()->RewriteNonPattern(CHECK_OK_CUSTOM(NullExpressionList));
     }
     if (is_spread) {
       if (!spread_arg.IsValid()) {
@@ -2263,7 +2567,7 @@
     if (result->length() > Code::kMaxArguments) {
       ReportMessage(MessageTemplate::kTooManyArguments);
       *ok = false;
-      return this->NullExpressionList();
+      return impl()->NullExpressionList();
     }
     done = (peek() != Token::COMMA);
     if (!done) {
@@ -2276,22 +2580,21 @@
   }
   Scanner::Location location = scanner_->location();
   if (Token::RPAREN != Next()) {
-    ReportMessageAt(location, MessageTemplate::kUnterminatedArgList);
+    impl()->ReportMessageAt(location, MessageTemplate::kUnterminatedArgList);
     *ok = false;
-    return this->NullExpressionList();
+    return impl()->NullExpressionList();
   }
   *first_spread_arg_loc = spread_arg;
 
   if (!maybe_arrow || peek() != Token::ARROW) {
     if (maybe_arrow) {
-      impl()->RewriteNonPattern(classifier,
-                                CHECK_OK_CUSTOM(NullExpressionList));
+      impl()->RewriteNonPattern(CHECK_OK_CUSTOM(NullExpressionList));
     }
     if (spread_arg.IsValid()) {
       // Unspread parameter sequences are translated into array literals in the
       // parser. Ensure that the number of materialized literals matches between
       // the parser and preparser
-      Traits::MaterializeUnspreadArgumentsLiterals(unspread_sequences_count);
+      impl()->MaterializeUnspreadArgumentsLiterals(unspread_sequences_count);
     }
   }
 
@@ -2301,9 +2604,7 @@
 // Precedence = 2
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT
-ParserBase<Impl>::ParseAssignmentExpression(bool accept_IN,
-                                            ExpressionClassifier* classifier,
-                                            bool* ok) {
+ParserBase<Impl>::ParseAssignmentExpression(bool accept_IN, bool* ok) {
   // AssignmentExpression ::
   //   ConditionalExpression
   //   ArrowFunction
@@ -2312,13 +2613,13 @@
   int lhs_beg_pos = peek_position();
 
   if (peek() == Token::YIELD && is_generator()) {
-    return this->ParseYieldExpression(accept_IN, classifier, ok);
+    return ParseYieldExpression(accept_IN, ok);
   }
 
   FuncNameInferrer::State fni_state(fni_);
   Checkpoint checkpoint(this);
-  ExpressionClassifier arrow_formals_classifier(this,
-                                                classifier->duplicate_finder());
+  ExpressionClassifier arrow_formals_classifier(
+      this, classifier()->duplicate_finder());
 
   Scope::Snapshot scope_snapshot(scope());
 
@@ -2328,26 +2629,23 @@
 
   bool parenthesized_formals = peek() == Token::LPAREN;
   if (!is_async && !parenthesized_formals) {
-    ArrowFormalParametersUnexpectedToken(&arrow_formals_classifier);
+    ArrowFormalParametersUnexpectedToken();
   }
 
   // Parse a simple, faster sub-grammar (primary expression) if it's evident
   // that we have only a trivial expression to parse.
   ExpressionT expression;
   if (IsTrivialExpression()) {
-    expression = this->ParsePrimaryExpression(&arrow_formals_classifier,
-                                              &is_async, CHECK_OK);
+    expression = ParsePrimaryExpression(&is_async, CHECK_OK);
   } else {
-    expression = this->ParseConditionalExpression(
-        accept_IN, &arrow_formals_classifier, CHECK_OK);
+    expression = ParseConditionalExpression(accept_IN, CHECK_OK);
   }
 
-  if (is_async && this->IsIdentifier(expression) && peek_any_identifier() &&
+  if (is_async && impl()->IsIdentifier(expression) && peek_any_identifier() &&
       PeekAhead() == Token::ARROW) {
     // async Identifier => AsyncConciseBody
-    IdentifierT name =
-        ParseAndClassifyIdentifier(&arrow_formals_classifier, CHECK_OK);
-    expression = this->ExpressionFromIdentifier(
+    IdentifierT name = ParseAndClassifyIdentifier(CHECK_OK);
+    expression = impl()->ExpressionFromIdentifier(
         name, position(), scanner()->location().end_pos, InferName::kNo);
     if (fni_) {
       // Remove `async` keyword from inferred name stack.
@@ -2357,26 +2655,29 @@
 
   if (peek() == Token::ARROW) {
     Scanner::Location arrow_loc = scanner()->peek_location();
-    ValidateArrowFormalParameters(&arrow_formals_classifier, expression,
-                                  parenthesized_formals, is_async, CHECK_OK);
+    ValidateArrowFormalParameters(expression, parenthesized_formals, is_async,
+                                  CHECK_OK);
     // This reads strangely, but is correct: it checks whether any
     // sub-expression of the parameter list failed to be a valid formal
     // parameter initializer. Since YieldExpressions are banned anywhere
     // in an arrow parameter list, this is correct.
     // TODO(adamk): Rename "FormalParameterInitializerError" to refer to
     // "YieldExpression", which is its only use.
-    ValidateFormalParameterInitializer(&arrow_formals_classifier, ok);
+    ValidateFormalParameterInitializer(ok);
 
     Scanner::Location loc(lhs_beg_pos, scanner()->location().end_pos);
     DeclarationScope* scope =
-        this->NewFunctionScope(is_async ? FunctionKind::kAsyncArrowFunction
-                                        : FunctionKind::kArrowFunction);
+        NewFunctionScope(is_async ? FunctionKind::kAsyncArrowFunction
+                                  : FunctionKind::kArrowFunction);
     // Because the arrow's parameters were parsed in the outer scope, any
     // usage flags that might have been triggered there need to be copied
     // to the arrow scope.
     this->scope()->PropagateUsageFlagsToScope(scope);
+
+    scope_snapshot.Reparent(scope);
+
     FormalParametersT parameters(scope);
-    if (!arrow_formals_classifier.is_simple_parameter_list()) {
+    if (!classifier()->is_simple_parameter_list()) {
       scope->SetHasNonSimpleParameters();
       parameters.is_simple = false;
     }
@@ -2385,18 +2686,16 @@
 
     scope->set_start_position(lhs_beg_pos);
     Scanner::Location duplicate_loc = Scanner::Location::invalid();
-    this->ParseArrowFunctionFormalParameterList(
-        &parameters, expression, loc, &duplicate_loc, scope_snapshot, CHECK_OK);
+    impl()->DeclareArrowFunctionFormalParameters(&parameters, expression, loc,
+                                                 &duplicate_loc, CHECK_OK);
     if (duplicate_loc.IsValid()) {
-      arrow_formals_classifier.RecordDuplicateFormalParameterError(
-          duplicate_loc);
+      classifier()->RecordDuplicateFormalParameterError(duplicate_loc);
     }
-    expression = this->ParseArrowFunctionLiteral(
-        accept_IN, parameters, is_async, arrow_formals_classifier, CHECK_OK);
-    arrow_formals_classifier.Discard();
-    classifier->RecordPatternError(arrow_loc,
-                                   MessageTemplate::kUnexpectedToken,
-                                   Token::String(Token::ARROW));
+    expression = ParseArrowFunctionLiteral(accept_IN, parameters, CHECK_OK);
+    impl()->Discard();
+    classifier()->RecordPatternError(arrow_loc,
+                                     MessageTemplate::kUnexpectedToken,
+                                     Token::String(Token::ARROW));
 
     if (fni_ != nullptr) fni_->Infer();
 
@@ -2407,87 +2706,70 @@
   // form part of one.  Propagate speculative formal parameter error locations
   // (including those for binding patterns, since formal parameters can
   // themselves contain binding patterns).
-  // Do not merge pending non-pattern expressions yet!
-  unsigned productions =
-      ExpressionClassifier::FormalParametersProductions |
-      ExpressionClassifier::AsyncArrowFormalParametersProduction |
-      ExpressionClassifier::FormalParameterInitializerProduction;
+  unsigned productions = ExpressionClassifier::AllProductions &
+                         ~ExpressionClassifier::ArrowFormalParametersProduction;
 
   // Parenthesized identifiers and property references are allowed as part
-  // of a larger binding pattern, even though parenthesized patterns
+  // of a larger assignment pattern, even though parenthesized patterns
   // themselves are not allowed, e.g., "[(x)] = []". Only accumulate
   // assignment pattern errors if the parsed expression is more complex.
-  if (this->IsValidReferenceExpression(expression)) {
-    productions |= ExpressionClassifier::PatternProductions &
-                   ~ExpressionClassifier::AssignmentPatternProduction;
-  } else {
-    productions |= ExpressionClassifier::PatternProductions;
+  if (IsValidReferenceExpression(expression)) {
+    productions &= ~ExpressionClassifier::AssignmentPatternProduction;
   }
 
   const bool is_destructuring_assignment =
       IsValidPattern(expression) && peek() == Token::ASSIGN;
-  if (!is_destructuring_assignment) {
-    // This may be an expression or a pattern, so we must continue to
-    // accumulate expression-related errors.
-    productions |= ExpressionClassifier::ExpressionProduction |
-                   ExpressionClassifier::TailCallExpressionProduction |
-                   ExpressionClassifier::ObjectLiteralProduction;
+  if (is_destructuring_assignment) {
+    // This is definitely not an expression so don't accumulate
+    // expression-related errors.
+    productions &= ~(ExpressionClassifier::ExpressionProduction |
+                     ExpressionClassifier::TailCallExpressionProduction);
   }
 
-  classifier->Accumulate(&arrow_formals_classifier, productions, false);
-
   if (!Token::IsAssignmentOp(peek())) {
     // Parsed conditional expression only (no assignment).
-    // Now pending non-pattern expressions must be merged.
-    classifier->MergeNonPatterns(&arrow_formals_classifier);
+    // Pending non-pattern expressions must be merged.
+    impl()->Accumulate(productions);
     return expression;
+  } else {
+    // Pending non-pattern expressions must be discarded.
+    impl()->Accumulate(productions, false);
   }
 
-  // Now pending non-pattern expressions must be discarded.
-  arrow_formals_classifier.Discard();
-
-  CheckNoTailCallExpressions(classifier, CHECK_OK);
-
   if (is_destructuring_assignment) {
-    ValidateAssignmentPattern(classifier, CHECK_OK);
+    ValidateAssignmentPattern(CHECK_OK);
   } else {
-    expression = this->CheckAndRewriteReferenceExpression(
+    expression = CheckAndRewriteReferenceExpression(
         expression, lhs_beg_pos, scanner()->location().end_pos,
         MessageTemplate::kInvalidLhsInAssignment, CHECK_OK);
   }
 
-  expression = this->MarkExpressionAsAssigned(expression);
+  expression = impl()->MarkExpressionAsAssigned(expression);
 
   Token::Value op = Next();  // Get assignment operator.
   if (op != Token::ASSIGN) {
-    classifier->RecordPatternError(scanner()->location(),
-                                   MessageTemplate::kUnexpectedToken,
-                                   Token::String(op));
+    classifier()->RecordPatternError(scanner()->location(),
+                                     MessageTemplate::kUnexpectedToken,
+                                     Token::String(op));
   }
   int pos = position();
 
   ExpressionClassifier rhs_classifier(this);
 
-  ExpressionT right =
-      this->ParseAssignmentExpression(accept_IN, &rhs_classifier, CHECK_OK);
-  CheckNoTailCallExpressions(&rhs_classifier, CHECK_OK);
-  impl()->RewriteNonPattern(&rhs_classifier, CHECK_OK);
-  classifier->Accumulate(
-      &rhs_classifier,
-      ExpressionClassifier::ExpressionProductions |
-          ExpressionClassifier::ObjectLiteralProduction |
-          ExpressionClassifier::AsyncArrowFormalParametersProduction);
+  ExpressionT right = ParseAssignmentExpression(accept_IN, CHECK_OK);
+  impl()->RewriteNonPattern(CHECK_OK);
+  impl()->AccumulateFormalParameterContainmentErrors();
 
   // TODO(1231235): We try to estimate the set of properties set by
   // constructors. We define a new property whenever there is an
   // assignment to a property of 'this'. We should probably only add
   // properties if we haven't seen them before. Otherwise we'll
   // probably overestimate the number of properties.
-  if (op == Token::ASSIGN && this->IsThisProperty(expression)) {
+  if (op == Token::ASSIGN && impl()->IsThisProperty(expression)) {
     function_state_->AddProperty();
   }
 
-  this->CheckAssigningFunctionLiteralToProperty(expression, right);
+  impl()->CheckAssigningFunctionLiteralToProperty(expression, right);
 
   if (fni_ != NULL) {
     // Check if the right hand side is a call to avoid inferring a
@@ -2502,7 +2784,7 @@
   }
 
   if (op == Token::ASSIGN) {
-    Traits::SetFunctionNameFromIdentifierRef(right, expression);
+    impl()->SetFunctionNameFromIdentifierRef(right, expression);
   }
 
   if (op == Token::ASSIGN_EXP) {
@@ -2522,19 +2804,19 @@
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseYieldExpression(
-    bool accept_IN, ExpressionClassifier* classifier, bool* ok) {
+    bool accept_IN, bool* ok) {
   // YieldExpression ::
   //   'yield' ([no line terminator] '*'? AssignmentExpression)?
   int pos = peek_position();
-  classifier->RecordPatternError(scanner()->peek_location(),
-                                 MessageTemplate::kInvalidDestructuringTarget);
-  classifier->RecordFormalParameterInitializerError(
+  classifier()->RecordPatternError(
+      scanner()->peek_location(), MessageTemplate::kInvalidDestructuringTarget);
+  classifier()->RecordFormalParameterInitializerError(
       scanner()->peek_location(), MessageTemplate::kYieldInParameter);
   Expect(Token::YIELD, CHECK_OK);
   ExpressionT generator_object =
       factory()->NewVariableProxy(function_state_->generator_object_variable());
   // The following initialization is necessary.
-  ExpressionT expression = Traits::EmptyExpression();
+  ExpressionT expression = impl()->EmptyExpression();
   bool delegating = false;  // yield*
   if (!scanner()->HasAnyLineTerminatorBeforeNext()) {
     if (Check(Token::MUL)) delegating = true;
@@ -2553,8 +2835,8 @@
         if (!delegating) break;
         // Delegating yields require an RHS; fall through.
       default:
-        expression = ParseAssignmentExpression(accept_IN, classifier, CHECK_OK);
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
+        expression = ParseAssignmentExpression(accept_IN, CHECK_OK);
+        impl()->RewriteNonPattern(CHECK_OK);
         break;
     }
   }
@@ -2563,87 +2845,18 @@
     return impl()->RewriteYieldStar(generator_object, expression, pos);
   }
 
-  expression = Traits::BuildIteratorResult(expression, false);
+  expression = impl()->BuildIteratorResult(expression, false);
   // Hackily disambiguate o from o.next and o [Symbol.iterator]().
   // TODO(verwaest): Come up with a better solution.
-  typename Traits::Type::YieldExpression yield = factory()->NewYield(
-      generator_object, expression, pos, Yield::kOnExceptionThrow);
+  ExpressionT yield = factory()->NewYield(generator_object, expression, pos,
+                                          Yield::kOnExceptionThrow);
   return yield;
 }
 
-template <typename Impl>
-typename ParserBase<Impl>::ExpressionT
-ParserBase<Impl>::ParseTailCallExpression(ExpressionClassifier* classifier,
-                                          bool* ok) {
-  // TailCallExpression::
-  //   'continue' MemberExpression  Arguments
-  //   'continue' CallExpression  Arguments
-  //   'continue' MemberExpression  TemplateLiteral
-  //   'continue' CallExpression  TemplateLiteral
-  Expect(Token::CONTINUE, CHECK_OK);
-  int pos = position();
-  int sub_expression_pos = peek_position();
-  ExpressionT expression =
-      this->ParseLeftHandSideExpression(classifier, CHECK_OK);
-  CheckNoTailCallExpressions(classifier, CHECK_OK);
-
-  Scanner::Location loc(pos, scanner()->location().end_pos);
-  if (!expression->IsCall()) {
-    Scanner::Location sub_loc(sub_expression_pos, loc.end_pos);
-    ReportMessageAt(sub_loc, MessageTemplate::kUnexpectedInsideTailCall);
-    *ok = false;
-    return Traits::EmptyExpression();
-  }
-  if (Traits::IsDirectEvalCall(expression)) {
-    Scanner::Location sub_loc(sub_expression_pos, loc.end_pos);
-    ReportMessageAt(sub_loc, MessageTemplate::kUnexpectedTailCallOfEval);
-    *ok = false;
-    return Traits::EmptyExpression();
-  }
-  if (!is_strict(language_mode())) {
-    ReportMessageAt(loc, MessageTemplate::kUnexpectedSloppyTailCall);
-    *ok = false;
-    return Traits::EmptyExpression();
-  }
-  if (is_resumable()) {
-    Scanner::Location sub_loc(sub_expression_pos, loc.end_pos);
-    ReportMessageAt(sub_loc, MessageTemplate::kUnexpectedTailCall);
-    *ok = false;
-    return Traits::EmptyExpression();
-  }
-  ReturnExprContext return_expr_context =
-      function_state_->return_expr_context();
-  if (return_expr_context != ReturnExprContext::kInsideValidReturnStatement) {
-    MessageTemplate::Template msg = MessageTemplate::kNone;
-    switch (return_expr_context) {
-      case ReturnExprContext::kInsideValidReturnStatement:
-        UNREACHABLE();
-        return Traits::EmptyExpression();
-      case ReturnExprContext::kInsideValidBlock:
-        msg = MessageTemplate::kUnexpectedTailCall;
-        break;
-      case ReturnExprContext::kInsideTryBlock:
-        msg = MessageTemplate::kUnexpectedTailCallInTryBlock;
-        break;
-      case ReturnExprContext::kInsideForInOfBody:
-        msg = MessageTemplate::kUnexpectedTailCallInForInOf;
-        break;
-    }
-    ReportMessageAt(loc, msg);
-    *ok = false;
-    return Traits::EmptyExpression();
-  }
-  classifier->RecordTailCallExpressionError(
-      loc, MessageTemplate::kUnexpectedTailCall);
-  function_state_->AddExplicitTailCallExpression(expression, loc);
-  return expression;
-}
-
 // Precedence = 3
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT
 ParserBase<Impl>::ParseConditionalExpression(bool accept_IN,
-                                             ExpressionClassifier* classifier,
                                              bool* ok) {
   // ConditionalExpression ::
   //   LogicalOrExpression
@@ -2651,23 +2864,20 @@
 
   int pos = peek_position();
   // We start using the binary expression parser for prec >= 4 only!
-  ExpressionT expression =
-      this->ParseBinaryExpression(4, accept_IN, classifier, CHECK_OK);
+  ExpressionT expression = ParseBinaryExpression(4, accept_IN, CHECK_OK);
   if (peek() != Token::CONDITIONAL) return expression;
-  CheckNoTailCallExpressions(classifier, CHECK_OK);
-  impl()->RewriteNonPattern(classifier, CHECK_OK);
-  BindingPatternUnexpectedToken(classifier);
-  ArrowFormalParametersUnexpectedToken(classifier);
+  impl()->RewriteNonPattern(CHECK_OK);
+  BindingPatternUnexpectedToken();
+  ArrowFormalParametersUnexpectedToken();
   Consume(Token::CONDITIONAL);
   // In parsing the first assignment expression in conditional
   // expressions we always accept the 'in' keyword; see ECMA-262,
   // section 11.12, page 58.
-  ExpressionT left = ParseAssignmentExpression(true, classifier, CHECK_OK);
-  impl()->RewriteNonPattern(classifier, CHECK_OK);
+  ExpressionT left = ParseAssignmentExpression(true, CHECK_OK);
+  impl()->RewriteNonPattern(CHECK_OK);
   Expect(Token::COLON, CHECK_OK);
-  ExpressionT right =
-      ParseAssignmentExpression(accept_IN, classifier, CHECK_OK);
-  impl()->RewriteNonPattern(classifier, CHECK_OK);
+  ExpressionT right = ParseAssignmentExpression(accept_IN, CHECK_OK);
+  impl()->RewriteNonPattern(CHECK_OK);
   return factory()->NewConditional(expression, left, right, pos);
 }
 
@@ -2675,30 +2885,24 @@
 // Precedence >= 4
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseBinaryExpression(
-    int prec, bool accept_IN, ExpressionClassifier* classifier, bool* ok) {
+    int prec, bool accept_IN, bool* ok) {
   DCHECK(prec >= 4);
-  ExpressionT x = this->ParseUnaryExpression(classifier, CHECK_OK);
+  ExpressionT x = ParseUnaryExpression(CHECK_OK);
   for (int prec1 = Precedence(peek(), accept_IN); prec1 >= prec; prec1--) {
     // prec1 >= 4
     while (Precedence(peek(), accept_IN) == prec1) {
-      CheckNoTailCallExpressions(classifier, CHECK_OK);
-      impl()->RewriteNonPattern(classifier, CHECK_OK);
-      BindingPatternUnexpectedToken(classifier);
-      ArrowFormalParametersUnexpectedToken(classifier);
+      impl()->RewriteNonPattern(CHECK_OK);
+      BindingPatternUnexpectedToken();
+      ArrowFormalParametersUnexpectedToken();
       Token::Value op = Next();
       int pos = position();
 
       const bool is_right_associative = op == Token::EXP;
       const int next_prec = is_right_associative ? prec1 : prec1 + 1;
-      ExpressionT y =
-          ParseBinaryExpression(next_prec, accept_IN, classifier, CHECK_OK);
-      if (op != Token::OR && op != Token::AND) {
-        CheckNoTailCallExpressions(classifier, CHECK_OK);
-      }
-      impl()->RewriteNonPattern(classifier, CHECK_OK);
+      ExpressionT y = ParseBinaryExpression(next_prec, accept_IN, CHECK_OK);
+      impl()->RewriteNonPattern(CHECK_OK);
 
-      if (this->ShortcutNumericLiteralBinaryExpression(&x, y, op, pos,
-                                                       factory())) {
+      if (impl()->ShortcutNumericLiteralBinaryExpression(&x, y, op, pos)) {
         continue;
       }
 
@@ -2731,7 +2935,7 @@
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseUnaryExpression(
-    ExpressionClassifier* classifier, bool* ok) {
+    bool* ok) {
   // UnaryExpression ::
   //   PostfixExpression
   //   'delete' UnaryExpression
@@ -2747,44 +2951,42 @@
 
   Token::Value op = peek();
   if (Token::IsUnaryOp(op)) {
-    BindingPatternUnexpectedToken(classifier);
-    ArrowFormalParametersUnexpectedToken(classifier);
+    BindingPatternUnexpectedToken();
+    ArrowFormalParametersUnexpectedToken();
 
     op = Next();
     int pos = position();
-    ExpressionT expression = ParseUnaryExpression(classifier, CHECK_OK);
-    CheckNoTailCallExpressions(classifier, CHECK_OK);
-    impl()->RewriteNonPattern(classifier, CHECK_OK);
+    ExpressionT expression = ParseUnaryExpression(CHECK_OK);
+    impl()->RewriteNonPattern(CHECK_OK);
 
     if (op == Token::DELETE && is_strict(language_mode())) {
-      if (this->IsIdentifier(expression)) {
+      if (impl()->IsIdentifier(expression)) {
         // "delete identifier" is a syntax error in strict mode.
         ReportMessage(MessageTemplate::kStrictDelete);
         *ok = false;
-        return this->EmptyExpression();
+        return impl()->EmptyExpression();
       }
     }
 
     if (peek() == Token::EXP) {
       ReportUnexpectedToken(Next());
       *ok = false;
-      return this->EmptyExpression();
+      return impl()->EmptyExpression();
     }
 
-    // Allow Traits do rewrite the expression.
-    return this->BuildUnaryExpression(expression, op, pos, factory());
+    // Allow the parser's implementation to rewrite the expression.
+    return impl()->BuildUnaryExpression(expression, op, pos);
   } else if (Token::IsCountOp(op)) {
-    BindingPatternUnexpectedToken(classifier);
-    ArrowFormalParametersUnexpectedToken(classifier);
+    BindingPatternUnexpectedToken();
+    ArrowFormalParametersUnexpectedToken();
     op = Next();
     int beg_pos = peek_position();
-    ExpressionT expression = this->ParseUnaryExpression(classifier, CHECK_OK);
-    CheckNoTailCallExpressions(classifier, CHECK_OK);
-    expression = this->CheckAndRewriteReferenceExpression(
+    ExpressionT expression = ParseUnaryExpression(CHECK_OK);
+    expression = CheckAndRewriteReferenceExpression(
         expression, beg_pos, scanner()->location().end_pos,
         MessageTemplate::kInvalidLhsInPrefixOp, CHECK_OK);
-    this->MarkExpressionAsAssigned(expression);
-    impl()->RewriteNonPattern(classifier, CHECK_OK);
+    expression = impl()->MarkExpressionAsAssigned(expression);
+    impl()->RewriteNonPattern(CHECK_OK);
 
     return factory()->NewCountOperation(op,
                                         true /* prefix */,
@@ -2792,41 +2994,39 @@
                                         position());
 
   } else if (is_async_function() && peek() == Token::AWAIT) {
-    classifier->RecordFormalParameterInitializerError(
+    classifier()->RecordFormalParameterInitializerError(
         scanner()->peek_location(),
         MessageTemplate::kAwaitExpressionFormalParameter);
 
     int await_pos = peek_position();
     Consume(Token::AWAIT);
 
-    ExpressionT value = ParseUnaryExpression(classifier, CHECK_OK);
+    ExpressionT value = ParseUnaryExpression(CHECK_OK);
 
     return impl()->RewriteAwaitExpression(value, await_pos);
   } else {
-    return this->ParsePostfixExpression(classifier, ok);
+    return ParsePostfixExpression(ok);
   }
 }
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParsePostfixExpression(
-    ExpressionClassifier* classifier, bool* ok) {
+    bool* ok) {
   // PostfixExpression ::
   //   LeftHandSideExpression ('++' | '--')?
 
   int lhs_beg_pos = peek_position();
-  ExpressionT expression =
-      this->ParseLeftHandSideExpression(classifier, CHECK_OK);
+  ExpressionT expression = ParseLeftHandSideExpression(CHECK_OK);
   if (!scanner()->HasAnyLineTerminatorBeforeNext() &&
       Token::IsCountOp(peek())) {
-    CheckNoTailCallExpressions(classifier, CHECK_OK);
-    BindingPatternUnexpectedToken(classifier);
-    ArrowFormalParametersUnexpectedToken(classifier);
+    BindingPatternUnexpectedToken();
+    ArrowFormalParametersUnexpectedToken();
 
-    expression = this->CheckAndRewriteReferenceExpression(
+    expression = CheckAndRewriteReferenceExpression(
         expression, lhs_beg_pos, scanner()->location().end_pos,
         MessageTemplate::kInvalidLhsInPostfixOp, CHECK_OK);
-    expression = this->MarkExpressionAsAssigned(expression);
-    impl()->RewriteNonPattern(classifier, CHECK_OK);
+    expression = impl()->MarkExpressionAsAssigned(expression);
+    impl()->RewriteNonPattern(CHECK_OK);
 
     Token::Value next = Next();
     expression =
@@ -2840,40 +3040,33 @@
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT
-ParserBase<Impl>::ParseLeftHandSideExpression(ExpressionClassifier* classifier,
-                                              bool* ok) {
+ParserBase<Impl>::ParseLeftHandSideExpression(bool* ok) {
   // LeftHandSideExpression ::
   //   (NewExpression | MemberExpression) ...
 
-  if (FLAG_harmony_explicit_tailcalls && peek() == Token::CONTINUE) {
-    return this->ParseTailCallExpression(classifier, ok);
-  }
-
   bool is_async = false;
-  ExpressionT result = this->ParseMemberWithNewPrefixesExpression(
-      classifier, &is_async, CHECK_OK);
+  ExpressionT result =
+      ParseMemberWithNewPrefixesExpression(&is_async, CHECK_OK);
 
   while (true) {
     switch (peek()) {
       case Token::LBRACK: {
-        CheckNoTailCallExpressions(classifier, CHECK_OK);
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
-        BindingPatternUnexpectedToken(classifier);
-        ArrowFormalParametersUnexpectedToken(classifier);
+        impl()->RewriteNonPattern(CHECK_OK);
+        BindingPatternUnexpectedToken();
+        ArrowFormalParametersUnexpectedToken();
         Consume(Token::LBRACK);
         int pos = position();
-        ExpressionT index = ParseExpression(true, classifier, CHECK_OK);
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
+        ExpressionT index = ParseExpressionCoverGrammar(true, CHECK_OK);
+        impl()->RewriteNonPattern(CHECK_OK);
         result = factory()->NewProperty(result, index, pos);
         Expect(Token::RBRACK, CHECK_OK);
         break;
       }
 
       case Token::LPAREN: {
-        CheckNoTailCallExpressions(classifier, CHECK_OK);
         int pos;
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
-        BindingPatternUnexpectedToken(classifier);
+        impl()->RewriteNonPattern(CHECK_OK);
+        BindingPatternUnexpectedToken();
         if (scanner()->current_token() == Token::IDENTIFIER ||
             scanner()->current_token() == Token::SUPER ||
             scanner()->current_token() == Token::ASYNC) {
@@ -2895,36 +3088,36 @@
           }
         }
         Scanner::Location spread_pos;
-        typename Traits::Type::ExpressionList args;
-        if (V8_UNLIKELY(is_async && this->IsIdentifier(result))) {
+        ExpressionListT args;
+        if (V8_UNLIKELY(is_async && impl()->IsIdentifier(result))) {
           ExpressionClassifier async_classifier(this);
-          args = ParseArguments(&spread_pos, true, &async_classifier, CHECK_OK);
+          args = ParseArguments(&spread_pos, true, CHECK_OK);
           if (peek() == Token::ARROW) {
             if (fni_) {
               fni_->RemoveAsyncKeywordFromEnd();
             }
-            ValidateBindingPattern(&async_classifier, CHECK_OK);
-            if (!async_classifier.is_valid_async_arrow_formal_parameters()) {
+            ValidateBindingPattern(CHECK_OK);
+            ValidateFormalParameterInitializer(CHECK_OK);
+            if (!classifier()->is_valid_async_arrow_formal_parameters()) {
               ReportClassifierError(
-                  async_classifier.async_arrow_formal_parameters_error());
+                  classifier()->async_arrow_formal_parameters_error());
               *ok = false;
-              return this->EmptyExpression();
+              return impl()->EmptyExpression();
             }
             if (args->length()) {
               // async ( Arguments ) => ...
-              return Traits::ExpressionListToExpression(args);
+              return impl()->ExpressionListToExpression(args);
             }
             // async () => ...
             return factory()->NewEmptyParentheses(pos);
           } else {
-            classifier->Accumulate(&async_classifier,
-                                   ExpressionClassifier::AllProductions);
+            impl()->AccumulateFormalParameterContainmentErrors();
           }
         } else {
-          args = ParseArguments(&spread_pos, false, classifier, CHECK_OK);
+          args = ParseArguments(&spread_pos, false, CHECK_OK);
         }
 
-        ArrowFormalParametersUnexpectedToken(classifier);
+        ArrowFormalParametersUnexpectedToken();
 
         // Keep track of eval() calls since they disable all local variable
         // optimizations.
@@ -2947,7 +3140,8 @@
         // Explicit calls to the super constructor using super() perform an
         // implicit binding assignment to the 'this' variable.
         if (is_super_call) {
-          ExpressionT this_expr = this->ThisExpression(pos);
+          result = impl()->RewriteSuperCall(result);
+          ExpressionT this_expr = impl()->ThisExpression(pos);
           result =
               factory()->NewAssignment(Token::INIT, this_expr, result, pos);
         }
@@ -2957,26 +3151,24 @@
       }
 
       case Token::PERIOD: {
-        CheckNoTailCallExpressions(classifier, CHECK_OK);
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
-        BindingPatternUnexpectedToken(classifier);
-        ArrowFormalParametersUnexpectedToken(classifier);
+        impl()->RewriteNonPattern(CHECK_OK);
+        BindingPatternUnexpectedToken();
+        ArrowFormalParametersUnexpectedToken();
         Consume(Token::PERIOD);
         int pos = position();
         IdentifierT name = ParseIdentifierName(CHECK_OK);
         result = factory()->NewProperty(
             result, factory()->NewStringLiteral(name, pos), pos);
-        if (fni_ != NULL) this->PushLiteralName(fni_, name);
+        impl()->PushLiteralName(name);
         break;
       }
 
       case Token::TEMPLATE_SPAN:
       case Token::TEMPLATE_TAIL: {
-        CheckNoTailCallExpressions(classifier, CHECK_OK);
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
-        BindingPatternUnexpectedToken(classifier);
-        ArrowFormalParametersUnexpectedToken(classifier);
-        result = ParseTemplateLiteral(result, position(), classifier, CHECK_OK);
+        impl()->RewriteNonPattern(CHECK_OK);
+        BindingPatternUnexpectedToken();
+        ArrowFormalParametersUnexpectedToken();
+        result = ParseTemplateLiteral(result, position(), CHECK_OK);
         break;
       }
 
@@ -2988,8 +3180,8 @@
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT
-ParserBase<Impl>::ParseMemberWithNewPrefixesExpression(
-    ExpressionClassifier* classifier, bool* is_async, bool* ok) {
+ParserBase<Impl>::ParseMemberWithNewPrefixesExpression(bool* is_async,
+                                                       bool* ok) {
   // NewExpression ::
   //   ('new')+ MemberExpression
   //
@@ -3011,8 +3203,8 @@
   // new new foo().bar().baz means (new (new foo()).bar()).baz
 
   if (peek() == Token::NEW) {
-    BindingPatternUnexpectedToken(classifier);
-    ArrowFormalParametersUnexpectedToken(classifier);
+    BindingPatternUnexpectedToken();
+    ArrowFormalParametersUnexpectedToken();
     Consume(Token::NEW);
     int new_pos = position();
     ExpressionT result;
@@ -3022,15 +3214,13 @@
     } else if (peek() == Token::PERIOD) {
       return ParseNewTargetExpression(CHECK_OK);
     } else {
-      result = this->ParseMemberWithNewPrefixesExpression(classifier, is_async,
-                                                          CHECK_OK);
+      result = ParseMemberWithNewPrefixesExpression(is_async, CHECK_OK);
     }
-    impl()->RewriteNonPattern(classifier, CHECK_OK);
+    impl()->RewriteNonPattern(CHECK_OK);
     if (peek() == Token::LPAREN) {
       // NewExpression with arguments.
       Scanner::Location spread_pos;
-      typename Traits::Type::ExpressionList args =
-          this->ParseArguments(&spread_pos, classifier, CHECK_OK);
+      ExpressionListT args = ParseArguments(&spread_pos, CHECK_OK);
 
       if (spread_pos.IsValid()) {
         args = impl()->PrepareSpreadArguments(args);
@@ -3039,21 +3229,19 @@
         result = factory()->NewCallNew(result, args, new_pos);
       }
       // The expression can still continue with . or [ after the arguments.
-      result = this->ParseMemberExpressionContinuation(result, is_async,
-                                                       classifier, CHECK_OK);
+      result = ParseMemberExpressionContinuation(result, is_async, CHECK_OK);
       return result;
     }
     // NewExpression without arguments.
-    return factory()->NewCallNew(result, this->NewExpressionList(0, zone_),
-                                 new_pos);
+    return factory()->NewCallNew(result, impl()->NewExpressionList(0), new_pos);
   }
   // No 'new' or 'super' keyword.
-  return this->ParseMemberExpression(classifier, is_async, ok);
+  return ParseMemberExpression(is_async, ok);
 }
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseMemberExpression(
-    ExpressionClassifier* classifier, bool* is_async, bool* ok) {
+    bool* is_async, bool* ok) {
   // MemberExpression ::
   //   (PrimaryExpression | FunctionLiteral | ClassLiteral)
   //     ('[' Expression ']' | '.' Identifier | Arguments | TemplateLiteral)*
@@ -3065,8 +3253,8 @@
   // Parse the initial primary or function expression.
   ExpressionT result;
   if (peek() == Token::FUNCTION) {
-    BindingPatternUnexpectedToken(classifier);
-    ArrowFormalParametersUnexpectedToken(classifier);
+    BindingPatternUnexpectedToken();
+    ArrowFormalParametersUnexpectedToken();
 
     Consume(Token::FUNCTION);
     int function_token_position = position();
@@ -3078,19 +3266,19 @@
 
       if (!is_generator()) {
         // TODO(neis): allow escaping into closures?
-        ReportMessageAt(scanner()->location(),
-                        MessageTemplate::kUnexpectedFunctionSent);
+        impl()->ReportMessageAt(scanner()->location(),
+                                MessageTemplate::kUnexpectedFunctionSent);
         *ok = false;
-        return this->EmptyExpression();
+        return impl()->EmptyExpression();
       }
 
-      return this->FunctionSentExpression(factory(), pos);
+      return impl()->FunctionSentExpression(pos);
     }
 
     FunctionKind function_kind = Check(Token::MUL)
                                      ? FunctionKind::kGeneratorFunction
                                      : FunctionKind::kNormalFunction;
-    IdentifierT name = this->EmptyIdentifier();
+    IdentifierT name = impl()->EmptyIdentifier();
     bool is_strict_reserved_name = false;
     Scanner::Location function_name_location = Scanner::Location::invalid();
     FunctionLiteral::FunctionType function_type =
@@ -3111,11 +3299,10 @@
     const bool is_new = false;
     result = ParseSuperExpression(is_new, CHECK_OK);
   } else {
-    result = ParsePrimaryExpression(classifier, is_async, CHECK_OK);
+    result = ParsePrimaryExpression(is_async, CHECK_OK);
   }
 
-  result =
-      ParseMemberExpressionContinuation(result, is_async, classifier, CHECK_OK);
+  result = ParseMemberExpressionContinuation(result, is_async, CHECK_OK);
   return result;
 }
 
@@ -3131,20 +3318,21 @@
       IsClassConstructor(kind)) {
     if (peek() == Token::PERIOD || peek() == Token::LBRACK) {
       scope->RecordSuperPropertyUsage();
-      return this->NewSuperPropertyReference(factory(), pos);
+      return impl()->NewSuperPropertyReference(pos);
     }
     // new super() is never allowed.
     // super() is only allowed in derived constructor
     if (!is_new && peek() == Token::LPAREN && IsSubclassConstructor(kind)) {
       // TODO(rossberg): This might not be the correct FunctionState for the
       // method here.
-      return this->NewSuperCallReference(factory(), pos);
+      return impl()->NewSuperCallReference(pos);
     }
   }
 
-  ReportMessageAt(scanner()->location(), MessageTemplate::kUnexpectedSuper);
+  impl()->ReportMessageAt(scanner()->location(),
+                          MessageTemplate::kUnexpectedSuper);
   *ok = false;
-  return this->EmptyExpression();
+  return impl()->EmptyExpression();
 }
 
 template <typename Impl>
@@ -3154,7 +3342,7 @@
   Consume(Token::PERIOD);
   ExpectContextualKeyword(property_name, CHECK_OK_CUSTOM(Void));
   if (scanner()->literal_contains_escapes()) {
-    Traits::ReportMessageAt(
+    impl()->ReportMessageAt(
         Scanner::Location(pos, scanner()->location().end_pos),
         MessageTemplate::kInvalidEscapedMetaProperty, full_name);
     *ok = false;
@@ -3168,63 +3356,58 @@
   ExpectMetaProperty(CStrVector("target"), "new.target", pos, CHECK_OK);
 
   if (!GetReceiverScope()->is_function_scope()) {
-    ReportMessageAt(scanner()->location(),
-                    MessageTemplate::kUnexpectedNewTarget);
+    impl()->ReportMessageAt(scanner()->location(),
+                            MessageTemplate::kUnexpectedNewTarget);
     *ok = false;
-    return this->EmptyExpression();
+    return impl()->EmptyExpression();
   }
 
-  return this->NewTargetExpression(pos);
+  return impl()->NewTargetExpression(pos);
 }
 
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT
-ParserBase<Impl>::ParseMemberExpressionContinuation(
-    ExpressionT expression, bool* is_async, ExpressionClassifier* classifier,
-    bool* ok) {
+ParserBase<Impl>::ParseMemberExpressionContinuation(ExpressionT expression,
+                                                    bool* is_async, bool* ok) {
   // Parses this part of MemberExpression:
   // ('[' Expression ']' | '.' Identifier | TemplateLiteral)*
   while (true) {
     switch (peek()) {
       case Token::LBRACK: {
         *is_async = false;
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
-        BindingPatternUnexpectedToken(classifier);
-        ArrowFormalParametersUnexpectedToken(classifier);
+        impl()->RewriteNonPattern(CHECK_OK);
+        BindingPatternUnexpectedToken();
+        ArrowFormalParametersUnexpectedToken();
 
         Consume(Token::LBRACK);
         int pos = position();
-        ExpressionT index = this->ParseExpression(true, classifier, CHECK_OK);
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
+        ExpressionT index = ParseExpressionCoverGrammar(true, CHECK_OK);
+        impl()->RewriteNonPattern(CHECK_OK);
         expression = factory()->NewProperty(expression, index, pos);
-        if (fni_ != NULL) {
-          this->PushPropertyName(fni_, index);
-        }
+        impl()->PushPropertyName(index);
         Expect(Token::RBRACK, CHECK_OK);
         break;
       }
       case Token::PERIOD: {
         *is_async = false;
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
-        BindingPatternUnexpectedToken(classifier);
-        ArrowFormalParametersUnexpectedToken(classifier);
+        impl()->RewriteNonPattern(CHECK_OK);
+        BindingPatternUnexpectedToken();
+        ArrowFormalParametersUnexpectedToken();
 
         Consume(Token::PERIOD);
         int pos = position();
         IdentifierT name = ParseIdentifierName(CHECK_OK);
         expression = factory()->NewProperty(
             expression, factory()->NewStringLiteral(name, pos), pos);
-        if (fni_ != NULL) {
-          this->PushLiteralName(fni_, name);
-        }
+        impl()->PushLiteralName(name);
         break;
       }
       case Token::TEMPLATE_SPAN:
       case Token::TEMPLATE_TAIL: {
         *is_async = false;
-        impl()->RewriteNonPattern(classifier, CHECK_OK);
-        BindingPatternUnexpectedToken(classifier);
-        ArrowFormalParametersUnexpectedToken(classifier);
+        impl()->RewriteNonPattern(CHECK_OK);
+        BindingPatternUnexpectedToken();
+        ArrowFormalParametersUnexpectedToken();
         int pos;
         if (scanner()->current_token() == Token::IDENTIFIER) {
           pos = position();
@@ -3236,62 +3419,58 @@
             expression->AsFunctionLiteral()->set_should_eager_compile();
           }
         }
-        expression =
-            ParseTemplateLiteral(expression, pos, classifier, CHECK_OK);
+        expression = ParseTemplateLiteral(expression, pos, CHECK_OK);
         break;
       }
       case Token::ILLEGAL: {
         ReportUnexpectedTokenAt(scanner()->peek_location(), Token::ILLEGAL);
         *ok = false;
-        return this->EmptyExpression();
+        return impl()->EmptyExpression();
       }
       default:
         return expression;
     }
   }
   DCHECK(false);
-  return this->EmptyExpression();
+  return impl()->EmptyExpression();
 }
 
 template <typename Impl>
 void ParserBase<Impl>::ParseFormalParameter(FormalParametersT* parameters,
-                                            ExpressionClassifier* classifier,
                                             bool* ok) {
   // FormalParameter[Yield,GeneratorParameter] :
   //   BindingElement[?Yield, ?GeneratorParameter]
   bool is_rest = parameters->has_rest;
 
-  ExpressionT pattern =
-      ParsePrimaryExpression(classifier, CHECK_OK_CUSTOM(Void));
-  ValidateBindingPattern(classifier, CHECK_OK_CUSTOM(Void));
+  ExpressionT pattern = ParsePrimaryExpression(CHECK_OK_CUSTOM(Void));
+  ValidateBindingPattern(CHECK_OK_CUSTOM(Void));
 
-  if (!Traits::IsIdentifier(pattern)) {
+  if (!impl()->IsIdentifier(pattern)) {
     parameters->is_simple = false;
-    ValidateFormalParameterInitializer(classifier, CHECK_OK_CUSTOM(Void));
-    classifier->RecordNonSimpleParameter();
+    ValidateFormalParameterInitializer(CHECK_OK_CUSTOM(Void));
+    classifier()->RecordNonSimpleParameter();
   }
 
-  ExpressionT initializer = Traits::EmptyExpression();
+  ExpressionT initializer = impl()->EmptyExpression();
   if (!is_rest && Check(Token::ASSIGN)) {
     ExpressionClassifier init_classifier(this);
-    initializer = ParseAssignmentExpression(true, &init_classifier,
-                                            CHECK_OK_CUSTOM(Void));
-    impl()->RewriteNonPattern(&init_classifier, CHECK_OK_CUSTOM(Void));
-    ValidateFormalParameterInitializer(&init_classifier, CHECK_OK_CUSTOM(Void));
+    initializer = ParseAssignmentExpression(true, CHECK_OK_CUSTOM(Void));
+    impl()->RewriteNonPattern(CHECK_OK_CUSTOM(Void));
+    ValidateFormalParameterInitializer(CHECK_OK_CUSTOM(Void));
     parameters->is_simple = false;
-    init_classifier.Discard();
-    classifier->RecordNonSimpleParameter();
+    impl()->Discard();
+    classifier()->RecordNonSimpleParameter();
 
-    Traits::SetFunctionNameFromIdentifierRef(initializer, pattern);
+    impl()->SetFunctionNameFromIdentifierRef(initializer, pattern);
   }
 
-  Traits::AddFormalParameter(parameters, pattern, initializer,
+  impl()->AddFormalParameter(parameters, pattern, initializer,
                              scanner()->location().end_pos, is_rest);
 }
 
 template <typename Impl>
-void ParserBase<Impl>::ParseFormalParameterList(
-    FormalParametersT* parameters, ExpressionClassifier* classifier, bool* ok) {
+void ParserBase<Impl>::ParseFormalParameterList(FormalParametersT* parameters,
+                                                bool* ok) {
   // FormalParameters[Yield] :
   //   [empty]
   //   FunctionRestParameter[?Yield]
@@ -3313,14 +3492,14 @@
         return;
       }
       parameters->has_rest = Check(Token::ELLIPSIS);
-      ParseFormalParameter(parameters, classifier, CHECK_OK_CUSTOM(Void));
+      ParseFormalParameter(parameters, CHECK_OK_CUSTOM(Void));
 
       if (parameters->has_rest) {
         parameters->is_simple = false;
-        classifier->RecordNonSimpleParameter();
+        classifier()->RecordNonSimpleParameter();
         if (peek() == Token::COMMA) {
-          ReportMessageAt(scanner()->peek_location(),
-                          MessageTemplate::kParamAfterRest);
+          impl()->ReportMessageAt(scanner()->peek_location(),
+                                  MessageTemplate::kParamAfterRest);
           *ok = false;
           return;
         }
@@ -3336,11 +3515,321 @@
 
   for (int i = 0; i < parameters->Arity(); ++i) {
     auto parameter = parameters->at(i);
-    Traits::DeclareFormalParameter(parameters->scope, parameter, classifier);
+    impl()->DeclareFormalParameter(parameters->scope, parameter);
   }
 }
 
 template <typename Impl>
+typename ParserBase<Impl>::BlockT ParserBase<Impl>::ParseVariableDeclarations(
+    VariableDeclarationContext var_context,
+    DeclarationParsingResult* parsing_result,
+    ZoneList<const AstRawString*>* names, bool* ok) {
+  // VariableDeclarations ::
+  //   ('var' | 'const' | 'let') (Identifier ('=' AssignmentExpression)?)+[',']
+  //
+  // ES6:
+  // FIXME(marja, nikolaos): Add an up-to-date comment about ES6 variable
+  // declaration syntax.
+
+  DCHECK_NOT_NULL(parsing_result);
+  parsing_result->descriptor.declaration_kind = DeclarationDescriptor::NORMAL;
+  parsing_result->descriptor.declaration_pos = peek_position();
+  parsing_result->descriptor.initialization_pos = peek_position();
+
+  BlockT init_block = impl()->NullBlock();
+  if (var_context != kForStatement) {
+    init_block = factory()->NewBlock(
+        nullptr, 1, true, parsing_result->descriptor.declaration_pos);
+  }
+
+  switch (peek()) {
+    case Token::VAR:
+      parsing_result->descriptor.mode = VAR;
+      Consume(Token::VAR);
+      break;
+    case Token::CONST:
+      Consume(Token::CONST);
+      DCHECK(var_context != kStatement);
+      parsing_result->descriptor.mode = CONST;
+      break;
+    case Token::LET:
+      Consume(Token::LET);
+      DCHECK(var_context != kStatement);
+      parsing_result->descriptor.mode = LET;
+      break;
+    default:
+      UNREACHABLE();  // by current callers
+      break;
+  }
+
+  parsing_result->descriptor.scope = scope();
+  parsing_result->descriptor.hoist_scope = nullptr;
+
+  // The scope of a var/const declared variable anywhere inside a function
+  // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). The scope
+  // of a let declared variable is the scope of the immediately enclosing
+  // block.
+  int bindings_start = peek_position();
+  do {
+    // Parse binding pattern.
+    FuncNameInferrer::State fni_state(fni_);
+
+    ExpressionT pattern = impl()->EmptyExpression();
+    int decl_pos = peek_position();
+    {
+      ExpressionClassifier pattern_classifier(this);
+      pattern = ParsePrimaryExpression(CHECK_OK_CUSTOM(NullBlock));
+
+      ValidateBindingPattern(CHECK_OK_CUSTOM(NullBlock));
+      if (IsLexicalVariableMode(parsing_result->descriptor.mode)) {
+        ValidateLetPattern(CHECK_OK_CUSTOM(NullBlock));
+      }
+    }
+
+    Scanner::Location variable_loc = scanner()->location();
+    bool single_name = impl()->IsIdentifier(pattern);
+
+    if (single_name) {
+      impl()->PushVariableName(impl()->AsIdentifier(pattern));
+    }
+
+    ExpressionT value = impl()->EmptyExpression();
+    int initializer_position = kNoSourcePosition;
+    if (Check(Token::ASSIGN)) {
+      ExpressionClassifier classifier(this);
+      value = ParseAssignmentExpression(var_context != kForStatement,
+                                        CHECK_OK_CUSTOM(NullBlock));
+      impl()->RewriteNonPattern(CHECK_OK_CUSTOM(NullBlock));
+      variable_loc.end_pos = scanner()->location().end_pos;
+
+      if (!parsing_result->first_initializer_loc.IsValid()) {
+        parsing_result->first_initializer_loc = variable_loc;
+      }
+
+      // Don't infer if it is "a = function(){...}();"-like expression.
+      if (single_name && fni_ != nullptr) {
+        if (!value->IsCall() && !value->IsCallNew()) {
+          fni_->Infer();
+        } else {
+          fni_->RemoveLastFunction();
+        }
+      }
+
+      impl()->SetFunctionNameFromIdentifierRef(value, pattern);
+
+      // End position of the initializer is after the assignment expression.
+      initializer_position = scanner()->location().end_pos;
+    } else {
+      if (var_context != kForStatement || !PeekInOrOf()) {
+        // ES6 'const' and binding patterns require initializers.
+        if (parsing_result->descriptor.mode == CONST ||
+            !impl()->IsIdentifier(pattern)) {
+          impl()->ReportMessageAt(
+              Scanner::Location(decl_pos, scanner()->location().end_pos),
+              MessageTemplate::kDeclarationMissingInitializer,
+              !impl()->IsIdentifier(pattern) ? "destructuring" : "const");
+          *ok = false;
+          return impl()->NullBlock();
+        }
+        // 'let x' initializes 'x' to undefined.
+        if (parsing_result->descriptor.mode == LET) {
+          value = impl()->GetLiteralUndefined(position());
+        }
+      }
+
+      // End position of the initializer is after the variable.
+      initializer_position = position();
+    }
+
+    typename DeclarationParsingResult::Declaration decl(
+        pattern, initializer_position, value);
+    if (var_context == kForStatement) {
+      // Save the declaration for further handling in ParseForStatement.
+      parsing_result->declarations.Add(decl);
+    } else {
+      // Immediately declare the variable otherwise. This avoids O(N^2)
+      // behavior (where N is the number of variables in a single
+      // declaration) in the PatternRewriter having to do with removing
+      // and adding VariableProxies to the Scope (see bug 4699).
+      impl()->DeclareAndInitializeVariables(init_block,
+                                            &parsing_result->descriptor, &decl,
+                                            names, CHECK_OK_CUSTOM(NullBlock));
+    }
+  } while (Check(Token::COMMA));
+
+  parsing_result->bindings_loc =
+      Scanner::Location(bindings_start, scanner()->location().end_pos);
+
+  DCHECK(*ok);
+  return init_block;
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT
+ParserBase<Impl>::ParseFunctionDeclaration(bool* ok) {
+  Consume(Token::FUNCTION);
+  int pos = position();
+  ParseFunctionFlags flags = ParseFunctionFlags::kIsNormal;
+  if (Check(Token::MUL)) {
+    flags |= ParseFunctionFlags::kIsGenerator;
+    if (allow_harmony_restrictive_declarations()) {
+      impl()->ReportMessageAt(scanner()->location(),
+                              MessageTemplate::kGeneratorInLegacyContext);
+      *ok = false;
+      return impl()->NullStatement();
+    }
+  }
+  return ParseHoistableDeclaration(pos, flags, nullptr, false, ok);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT
+ParserBase<Impl>::ParseHoistableDeclaration(
+    ZoneList<const AstRawString*>* names, bool default_export, bool* ok) {
+  Expect(Token::FUNCTION, CHECK_OK_CUSTOM(NullStatement));
+  int pos = position();
+  ParseFunctionFlags flags = ParseFunctionFlags::kIsNormal;
+  if (Check(Token::MUL)) {
+    flags |= ParseFunctionFlags::kIsGenerator;
+  }
+  return ParseHoistableDeclaration(pos, flags, names, default_export, ok);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT
+ParserBase<Impl>::ParseHoistableDeclaration(
+    int pos, ParseFunctionFlags flags, ZoneList<const AstRawString*>* names,
+    bool default_export, bool* ok) {
+  // FunctionDeclaration ::
+  //   'function' Identifier '(' FormalParameters ')' '{' FunctionBody '}'
+  //   'function' '(' FormalParameters ')' '{' FunctionBody '}'
+  // GeneratorDeclaration ::
+  //   'function' '*' Identifier '(' FormalParameters ')' '{' FunctionBody '}'
+  //   'function' '*' '(' FormalParameters ')' '{' FunctionBody '}'
+  //
+  // The anonymous forms are allowed iff [default_export] is true.
+  //
+  // 'function' and '*' (if present) have been consumed by the caller.
+
+  const bool is_generator = flags & ParseFunctionFlags::kIsGenerator;
+  const bool is_async = flags & ParseFunctionFlags::kIsAsync;
+  DCHECK(!is_generator || !is_async);
+
+  IdentifierT name;
+  FunctionNameValidity name_validity;
+  IdentifierT variable_name;
+  if (default_export && peek() == Token::LPAREN) {
+    impl()->GetDefaultStrings(&name, &variable_name);
+    name_validity = kSkipFunctionNameCheck;
+  } else {
+    bool is_strict_reserved;
+    name = ParseIdentifierOrStrictReservedWord(&is_strict_reserved,
+                                               CHECK_OK_CUSTOM(NullStatement));
+    name_validity = is_strict_reserved ? kFunctionNameIsStrictReserved
+                                       : kFunctionNameValidityUnknown;
+    variable_name = name;
+  }
+
+  FuncNameInferrer::State fni_state(fni_);
+  impl()->PushEnclosingName(name);
+  FunctionLiteralT function = impl()->ParseFunctionLiteral(
+      name, scanner()->location(), name_validity,
+      is_generator ? FunctionKind::kGeneratorFunction
+                   : is_async ? FunctionKind::kAsyncFunction
+                              : FunctionKind::kNormalFunction,
+      pos, FunctionLiteral::kDeclaration, language_mode(),
+      CHECK_OK_CUSTOM(NullStatement));
+
+  return impl()->DeclareFunction(variable_name, function, pos, is_generator,
+                                 is_async, names, ok);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseClassDeclaration(
+    ZoneList<const AstRawString*>* names, bool default_export, bool* ok) {
+  // ClassDeclaration ::
+  //   'class' Identifier ('extends' LeftHandExpression)? '{' ClassBody '}'
+  //   'class' ('extends' LeftHandExpression)? '{' ClassBody '}'
+  //
+  // The anonymous form is allowed iff [default_export] is true.
+  //
+  // 'class' is expected to be consumed by the caller.
+  //
+  // A ClassDeclaration
+  //
+  //   class C { ... }
+  //
+  // has the same semantics as:
+  //
+  //   let C = class C { ... };
+  //
+  // so rewrite it as such.
+
+  int class_token_pos = position();
+  IdentifierT name = impl()->EmptyIdentifier();
+  bool is_strict_reserved = false;
+  IdentifierT variable_name = impl()->EmptyIdentifier();
+  if (default_export && (peek() == Token::EXTENDS || peek() == Token::LBRACE)) {
+    impl()->GetDefaultStrings(&name, &variable_name);
+  } else {
+    name = ParseIdentifierOrStrictReservedWord(&is_strict_reserved,
+                                               CHECK_OK_CUSTOM(NullStatement));
+    variable_name = name;
+  }
+
+  ExpressionClassifier no_classifier(this);
+  ExpressionT value =
+      ParseClassLiteral(name, scanner()->location(), is_strict_reserved,
+                        class_token_pos, CHECK_OK_CUSTOM(NullStatement));
+  int end_pos = position();
+  return impl()->DeclareClass(variable_name, value, names, class_token_pos,
+                              end_pos, ok);
+}
+
+// Language extension which is only enabled for source files loaded
+// through the API's extension mechanism.  A native function
+// declaration is resolved by looking up the function through a
+// callback provided by the extension.
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseNativeDeclaration(
+    bool* ok) {
+  int pos = peek_position();
+  Expect(Token::FUNCTION, CHECK_OK_CUSTOM(NullStatement));
+  // Allow "eval" or "arguments" for backward compatibility.
+  IdentifierT name = ParseIdentifier(kAllowRestrictedIdentifiers,
+                                     CHECK_OK_CUSTOM(NullStatement));
+  Expect(Token::LPAREN, CHECK_OK_CUSTOM(NullStatement));
+  if (peek() != Token::RPAREN) {
+    do {
+      ParseIdentifier(kAllowRestrictedIdentifiers,
+                      CHECK_OK_CUSTOM(NullStatement));
+    } while (Check(Token::COMMA));
+  }
+  Expect(Token::RPAREN, CHECK_OK_CUSTOM(NullStatement));
+  Expect(Token::SEMICOLON, CHECK_OK_CUSTOM(NullStatement));
+  return impl()->DeclareNative(name, pos, ok);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT
+ParserBase<Impl>::ParseAsyncFunctionDeclaration(
+    ZoneList<const AstRawString*>* names, bool default_export, bool* ok) {
+  // AsyncFunctionDeclaration ::
+  //   async [no LineTerminator here] function BindingIdentifier[Await]
+  //       ( FormalParameters[Await] ) { AsyncFunctionBody }
+  DCHECK_EQ(scanner()->current_token(), Token::ASYNC);
+  int pos = position();
+  if (scanner()->HasAnyLineTerminatorBeforeNext()) {
+    *ok = false;
+    impl()->ReportUnexpectedToken(scanner()->current_token());
+    return impl()->NullStatement();
+  }
+  Expect(Token::FUNCTION, CHECK_OK_CUSTOM(NullStatement));
+  ParseFunctionFlags flags = ParseFunctionFlags::kIsAsync;
+  return ParseHoistableDeclaration(pos, flags, names, default_export, ok);
+}
+
+template <typename Impl>
 void ParserBase<Impl>::CheckArityRestrictions(int param_count,
                                               FunctionKind function_kind,
                                               bool has_rest,
@@ -3348,19 +3837,22 @@
                                               int formals_end_pos, bool* ok) {
   if (IsGetterFunction(function_kind)) {
     if (param_count != 0) {
-      ReportMessageAt(Scanner::Location(formals_start_pos, formals_end_pos),
-                      MessageTemplate::kBadGetterArity);
+      impl()->ReportMessageAt(
+          Scanner::Location(formals_start_pos, formals_end_pos),
+          MessageTemplate::kBadGetterArity);
       *ok = false;
     }
   } else if (IsSetterFunction(function_kind)) {
     if (param_count != 1) {
-      ReportMessageAt(Scanner::Location(formals_start_pos, formals_end_pos),
-                      MessageTemplate::kBadSetterArity);
+      impl()->ReportMessageAt(
+          Scanner::Location(formals_start_pos, formals_end_pos),
+          MessageTemplate::kBadSetterArity);
       *ok = false;
     }
     if (has_rest) {
-      ReportMessageAt(Scanner::Location(formals_start_pos, formals_end_pos),
-                      MessageTemplate::kBadSetterRestParameter);
+      impl()->ReportMessageAt(
+          Scanner::Location(formals_start_pos, formals_end_pos),
+          MessageTemplate::kBadSetterRestParameter);
       *ok = false;
     }
   }
@@ -3412,31 +3904,33 @@
 template <typename Impl>
 typename ParserBase<Impl>::ExpressionT
 ParserBase<Impl>::ParseArrowFunctionLiteral(
-    bool accept_IN, const FormalParametersT& formal_parameters, bool is_async,
-    const ExpressionClassifier& formals_classifier, bool* ok) {
+    bool accept_IN, const FormalParametersT& formal_parameters, bool* ok) {
   if (peek() == Token::ARROW && scanner_->HasAnyLineTerminatorBeforeNext()) {
     // ASI inserts `;` after arrow parameters if a line terminator is found.
     // `=> ...` is never a valid expression, so report as syntax error.
     // If next token is not `=>`, it's a syntax error anyways.
     ReportUnexpectedTokenAt(scanner_->peek_location(), Token::ARROW);
     *ok = false;
-    return this->EmptyExpression();
+    return impl()->EmptyExpression();
   }
 
-  typename Traits::Type::StatementList body;
+  StatementListT body = impl()->NullStatementList();
   int num_parameters = formal_parameters.scope->num_parameters();
   int materialized_literal_count = -1;
   int expected_property_count = -1;
 
-  FunctionKind arrow_kind = is_async ? kAsyncArrowFunction : kArrowFunction;
+  FunctionKind kind = formal_parameters.scope->function_kind();
+  FunctionLiteral::EagerCompileHint eager_compile_hint =
+      FunctionLiteral::kShouldLazyCompile;
+  bool should_be_used_once_hint = false;
   {
     FunctionState function_state(&function_state_, &scope_state_,
-                                 formal_parameters.scope, arrow_kind);
+                                 formal_parameters.scope);
 
     function_state.SkipMaterializedLiterals(
         formal_parameters.materialized_literals_count);
 
-    this->ReindexLiterals(formal_parameters);
+    impl()->ReindexLiterals(formal_parameters);
 
     Expect(Token::ARROW, CHECK_OK);
 
@@ -3444,20 +3938,42 @@
       // Multiple statement body
       Consume(Token::LBRACE);
       DCHECK_EQ(scope(), formal_parameters.scope);
-      bool is_lazily_parsed = (mode() == PARSE_LAZILY &&
-                               formal_parameters.scope->AllowsLazyParsing());
+      bool is_lazily_parsed =
+          (mode() == PARSE_LAZILY &&
+           formal_parameters.scope
+               ->AllowsLazyParsingWithoutUnresolvedVariables());
+      // TODO(marja): consider lazy-parsing inner arrow functions too. is_this
+      // handling in Scope::ResolveVariable needs to change.
       if (is_lazily_parsed) {
-        body = this->NewStatementList(0, zone());
-        impl()->SkipLazyFunctionBody(&materialized_literal_count,
-                                     &expected_property_count, CHECK_OK);
+        Scanner::BookmarkScope bookmark(scanner());
+        bookmark.Set();
+        LazyParsingResult result = impl()->SkipLazyFunctionBody(
+            &materialized_literal_count, &expected_property_count, false, true,
+            CHECK_OK);
+        formal_parameters.scope->ResetAfterPreparsing(
+            ast_value_factory_, result == kLazyParsingAborted);
+
         if (formal_parameters.materialized_literals_count > 0) {
           materialized_literal_count +=
               formal_parameters.materialized_literals_count;
         }
-      } else {
+
+        if (result == kLazyParsingAborted) {
+          bookmark.Apply();
+          // Trigger eager (re-)parsing, just below this block.
+          is_lazily_parsed = false;
+
+          // This is probably an initialization function. Inform the compiler it
+          // should also eager-compile this function, and that we expect it to
+          // be used once.
+          eager_compile_hint = FunctionLiteral::kShouldEagerCompile;
+          should_be_used_once_hint = true;
+        }
+      }
+      if (!is_lazily_parsed) {
         body = impl()->ParseEagerFunctionBody(
-            this->EmptyIdentifier(), kNoSourcePosition, formal_parameters,
-            arrow_kind, FunctionLiteral::kAnonymousExpression, CHECK_OK);
+            impl()->EmptyIdentifier(), kNoSourcePosition, formal_parameters,
+            kind, FunctionLiteral::kAnonymousExpression, CHECK_OK);
         materialized_literal_count =
             function_state.materialized_literal_count();
         expected_property_count = function_state.expected_property_count();
@@ -3469,18 +3985,18 @@
              function_state_->return_expr_context());
       ReturnExprScope allow_tail_calls(
           function_state_, ReturnExprContext::kInsideValidReturnStatement);
-      body = this->NewStatementList(1, zone());
-      this->AddParameterInitializationBlock(formal_parameters, body, is_async,
-                                            CHECK_OK);
+      body = impl()->NewStatementList(1);
+      impl()->AddParameterInitializationBlock(
+          formal_parameters, body, kind == kAsyncArrowFunction, CHECK_OK);
       ExpressionClassifier classifier(this);
-      if (is_async) {
-        impl()->ParseAsyncArrowSingleExpressionBody(body, accept_IN,
-                                                    &classifier, pos, CHECK_OK);
-        impl()->RewriteNonPattern(&classifier, CHECK_OK);
+      if (kind == kAsyncArrowFunction) {
+        ParseAsyncFunctionBody(scope(), body, kAsyncArrowFunction,
+                               FunctionBodyType::kSingleExpression, accept_IN,
+                               pos, CHECK_OK);
+        impl()->RewriteNonPattern(CHECK_OK);
       } else {
-        ExpressionT expression =
-            ParseAssignmentExpression(accept_IN, &classifier, CHECK_OK);
-        impl()->RewriteNonPattern(&classifier, CHECK_OK);
+        ExpressionT expression = ParseAssignmentExpression(accept_IN, CHECK_OK);
+        impl()->RewriteNonPattern(CHECK_OK);
         body->Add(factory()->NewReturnStatement(expression, pos), zone());
         if (allow_tailcalls() && !is_sloppy(language_mode())) {
           // ES6 14.6.1 Static Semantics: IsInTailPosition
@@ -3499,8 +4015,8 @@
     // that duplicates are not allowed.  Of course, the arrow function may
     // itself be strict as well.
     const bool allow_duplicate_parameters = false;
-    this->ValidateFormalParameters(&formals_classifier, language_mode(),
-                                   allow_duplicate_parameters, CHECK_OK);
+    ValidateFormalParameters(language_mode(), allow_duplicate_parameters,
+                             CHECK_OK);
 
     // Validate strict mode.
     if (is_strict(language_mode())) {
@@ -3513,24 +4029,141 @@
   }
 
   FunctionLiteralT function_literal = factory()->NewFunctionLiteral(
-      this->EmptyIdentifierString(), formal_parameters.scope, body,
+      impl()->EmptyIdentifierString(), formal_parameters.scope, body,
       materialized_literal_count, expected_property_count, num_parameters,
       FunctionLiteral::kNoDuplicateParameters,
-      FunctionLiteral::kAnonymousExpression,
-      FunctionLiteral::kShouldLazyCompile, arrow_kind,
+      FunctionLiteral::kAnonymousExpression, eager_compile_hint,
       formal_parameters.scope->start_position());
 
   function_literal->set_function_token_position(
       formal_parameters.scope->start_position());
+  if (should_be_used_once_hint) {
+    function_literal->set_should_be_used_once_hint();
+  }
 
-  if (fni_ != NULL) this->InferFunctionName(fni_, function_literal);
+  impl()->AddFunctionForNameInference(function_literal);
 
   return function_literal;
 }
 
 template <typename Impl>
+typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseClassLiteral(
+    IdentifierT name, Scanner::Location class_name_location,
+    bool name_is_strict_reserved, int class_token_pos, bool* ok) {
+  // All parts of a ClassDeclaration and ClassExpression are strict code.
+  if (name_is_strict_reserved) {
+    impl()->ReportMessageAt(class_name_location,
+                            MessageTemplate::kUnexpectedStrictReserved);
+    *ok = false;
+    return impl()->EmptyExpression();
+  }
+  if (impl()->IsEvalOrArguments(name)) {
+    impl()->ReportMessageAt(class_name_location,
+                            MessageTemplate::kStrictEvalArguments);
+    *ok = false;
+    return impl()->EmptyExpression();
+  }
+
+  BlockState block_state(zone(), &scope_state_);
+  RaiseLanguageMode(STRICT);
+
+  ClassInfo class_info(this);
+  impl()->DeclareClassVariable(name, block_state.scope(), &class_info,
+                               class_token_pos, CHECK_OK);
+
+  if (Check(Token::EXTENDS)) {
+    block_state.set_start_position(scanner()->location().end_pos);
+    ExpressionClassifier extends_classifier(this);
+    class_info.extends = ParseLeftHandSideExpression(CHECK_OK);
+    impl()->RewriteNonPattern(CHECK_OK);
+    impl()->AccumulateFormalParameterContainmentErrors();
+  } else {
+    block_state.set_start_position(scanner()->location().end_pos);
+  }
+
+  ClassLiteralChecker checker(this);
+
+  Expect(Token::LBRACE, CHECK_OK);
+
+  const bool has_extends = !impl()->IsEmptyExpression(class_info.extends);
+  while (peek() != Token::RBRACE) {
+    if (Check(Token::SEMICOLON)) continue;
+    FuncNameInferrer::State fni_state(fni_);
+    bool is_computed_name = false;  // Classes do not care about computed
+                                    // property names here.
+    ExpressionClassifier property_classifier(this);
+    ClassLiteralPropertyT property = ParseClassPropertyDefinition(
+        &checker, has_extends, &is_computed_name,
+        &class_info.has_seen_constructor, CHECK_OK);
+    impl()->RewriteNonPattern(CHECK_OK);
+    impl()->AccumulateFormalParameterContainmentErrors();
+
+    impl()->DeclareClassProperty(name, property, &class_info, CHECK_OK);
+    impl()->InferFunctionName();
+  }
+
+  Expect(Token::RBRACE, CHECK_OK);
+  return impl()->RewriteClassLiteral(name, &class_info, class_token_pos, ok);
+}
+
+template <typename Impl>
+void ParserBase<Impl>::ParseAsyncFunctionBody(Scope* scope, StatementListT body,
+                                              FunctionKind kind,
+                                              FunctionBodyType body_type,
+                                              bool accept_IN, int pos,
+                                              bool* ok) {
+  scope->ForceContextAllocation();
+
+  impl()->PrepareAsyncFunctionBody(body, kind, pos);
+
+  BlockT block = factory()->NewBlock(nullptr, 8, true, kNoSourcePosition);
+
+  ExpressionT return_value = impl()->EmptyExpression();
+  if (body_type == FunctionBodyType::kNormal) {
+    ParseStatementList(block->statements(), Token::RBRACE,
+                       CHECK_OK_CUSTOM(Void));
+    return_value = factory()->NewUndefinedLiteral(kNoSourcePosition);
+  } else {
+    return_value = ParseAssignmentExpression(accept_IN, CHECK_OK_CUSTOM(Void));
+    impl()->RewriteNonPattern(CHECK_OK_CUSTOM(Void));
+  }
+
+  impl()->RewriteAsyncFunctionBody(body, block, return_value,
+                                   CHECK_OK_CUSTOM(Void));
+  scope->set_end_position(scanner()->location().end_pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::ExpressionT
+ParserBase<Impl>::ParseAsyncFunctionLiteral(bool* ok) {
+  // AsyncFunctionLiteral ::
+  //   async [no LineTerminator here] function ( FormalParameters[Await] )
+  //       { AsyncFunctionBody }
+  //
+  //   async [no LineTerminator here] function BindingIdentifier[Await]
+  //       ( FormalParameters[Await] ) { AsyncFunctionBody }
+  DCHECK_EQ(scanner()->current_token(), Token::ASYNC);
+  int pos = position();
+  Expect(Token::FUNCTION, CHECK_OK);
+  bool is_strict_reserved = false;
+  IdentifierT name = impl()->EmptyIdentifier();
+  FunctionLiteral::FunctionType type = FunctionLiteral::kAnonymousExpression;
+
+  if (peek_any_identifier()) {
+    type = FunctionLiteral::kNamedExpression;
+    name = ParseIdentifierOrStrictReservedWord(FunctionKind::kAsyncFunction,
+                                               &is_strict_reserved, CHECK_OK);
+  }
+  return impl()->ParseFunctionLiteral(
+      name, scanner()->location(),
+      is_strict_reserved ? kFunctionNameIsStrictReserved
+                         : kFunctionNameValidityUnknown,
+      FunctionKind::kAsyncFunction, pos, type, language_mode(), CHECK_OK);
+}
+
+template <typename Impl>
 typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseTemplateLiteral(
-    ExpressionT tag, int start, ExpressionClassifier* classifier, bool* ok) {
+    ExpressionT tag, int start, bool* ok) {
   // A TemplateLiteral is made up of 0 or more TEMPLATE_SPAN tokens (literal
   // text followed by a substitution expression), finalized by a single
   // TEMPLATE_TAIL.
@@ -3569,29 +4202,28 @@
     CheckTemplateOctalLiteral(pos, peek_position(), CHECK_OK);
     next = peek();
     if (next == Token::EOS) {
-      ReportMessageAt(Scanner::Location(start, peek_position()),
-                      MessageTemplate::kUnterminatedTemplate);
+      impl()->ReportMessageAt(Scanner::Location(start, peek_position()),
+                              MessageTemplate::kUnterminatedTemplate);
       *ok = false;
-      return Traits::EmptyExpression();
+      return impl()->EmptyExpression();
     } else if (next == Token::ILLEGAL) {
-      Traits::ReportMessageAt(
+      impl()->ReportMessageAt(
           Scanner::Location(position() + 1, peek_position()),
           MessageTemplate::kUnexpectedToken, "ILLEGAL", kSyntaxError);
       *ok = false;
-      return Traits::EmptyExpression();
+      return impl()->EmptyExpression();
     }
 
     int expr_pos = peek_position();
-    ExpressionT expression = this->ParseExpression(true, classifier, CHECK_OK);
-    CheckNoTailCallExpressions(classifier, CHECK_OK);
-    impl()->RewriteNonPattern(classifier, CHECK_OK);
+    ExpressionT expression = ParseExpressionCoverGrammar(true, CHECK_OK);
+    impl()->RewriteNonPattern(CHECK_OK);
     impl()->AddTemplateExpression(&ts, expression);
 
     if (peek() != Token::RBRACE) {
-      ReportMessageAt(Scanner::Location(expr_pos, peek_position()),
-                      MessageTemplate::kUnterminatedTemplateExpr);
+      impl()->ReportMessageAt(Scanner::Location(expr_pos, peek_position()),
+                              MessageTemplate::kUnterminatedTemplateExpr);
       *ok = false;
-      return Traits::EmptyExpression();
+      return impl()->EmptyExpression();
     }
 
     // If we didn't die parsing that expression, our next token should be a
@@ -3601,16 +4233,16 @@
     pos = position();
 
     if (next == Token::EOS) {
-      ReportMessageAt(Scanner::Location(start, pos),
-                      MessageTemplate::kUnterminatedTemplate);
+      impl()->ReportMessageAt(Scanner::Location(start, pos),
+                              MessageTemplate::kUnterminatedTemplate);
       *ok = false;
-      return Traits::EmptyExpression();
+      return impl()->EmptyExpression();
     } else if (next == Token::ILLEGAL) {
-      Traits::ReportMessageAt(
+      impl()->ReportMessageAt(
           Scanner::Location(position() + 1, peek_position()),
           MessageTemplate::kUnexpectedToken, "ILLEGAL", kSyntaxError);
       *ok = false;
-      return Traits::EmptyExpression();
+      return impl()->EmptyExpression();
     }
 
     impl()->AddTemplateSpan(&ts, next == Token::TEMPLATE_TAIL);
@@ -3627,8 +4259,8 @@
 ParserBase<Impl>::CheckAndRewriteReferenceExpression(
     ExpressionT expression, int beg_pos, int end_pos,
     MessageTemplate::Template message, bool* ok) {
-  return this->CheckAndRewriteReferenceExpression(expression, beg_pos, end_pos,
-                                                  message, kReferenceError, ok);
+  return CheckAndRewriteReferenceExpression(expression, beg_pos, end_pos,
+                                            message, kReferenceError, ok);
 }
 
 template <typename Impl>
@@ -3636,12 +4268,12 @@
 ParserBase<Impl>::CheckAndRewriteReferenceExpression(
     ExpressionT expression, int beg_pos, int end_pos,
     MessageTemplate::Template message, ParseErrorType type, bool* ok) {
-  if (this->IsIdentifier(expression) && is_strict(language_mode()) &&
-      this->IsEvalOrArguments(this->AsIdentifier(expression))) {
+  if (impl()->IsIdentifier(expression) && is_strict(language_mode()) &&
+      impl()->IsEvalOrArguments(impl()->AsIdentifier(expression))) {
     ReportMessageAt(Scanner::Location(beg_pos, end_pos),
                     MessageTemplate::kStrictEvalArguments, kSyntaxError);
     *ok = false;
-    return this->EmptyExpression();
+    return impl()->EmptyExpression();
   }
   if (expression->IsValidReferenceExpression()) {
     return expression;
@@ -3649,47 +4281,1140 @@
   if (expression->IsCall()) {
     // If it is a call, make it a runtime error for legacy web compatibility.
     // Rewrite `expr' to `expr[throw ReferenceError]'.
-    ExpressionT error = this->NewThrowReferenceError(message, beg_pos);
+    ExpressionT error = impl()->NewThrowReferenceError(message, beg_pos);
     return factory()->NewProperty(expression, error, beg_pos);
   }
   ReportMessageAt(Scanner::Location(beg_pos, end_pos), message, type);
   *ok = false;
-  return this->EmptyExpression();
+  return impl()->EmptyExpression();
 }
 
 template <typename Impl>
 bool ParserBase<Impl>::IsValidReferenceExpression(ExpressionT expression) {
-  return this->IsAssignableIdentifier(expression) || expression->IsProperty();
+  return IsAssignableIdentifier(expression) || expression->IsProperty();
 }
 
 template <typename Impl>
-void ParserBase<Impl>::CheckDestructuringElement(
-    ExpressionT expression, ExpressionClassifier* classifier, int begin,
-    int end) {
+void ParserBase<Impl>::CheckDestructuringElement(ExpressionT expression,
+                                                 int begin, int end) {
   if (!IsValidPattern(expression) && !expression->IsAssignment() &&
       !IsValidReferenceExpression(expression)) {
-    classifier->RecordAssignmentPatternError(
+    classifier()->RecordAssignmentPatternError(
         Scanner::Location(begin, end),
         MessageTemplate::kInvalidDestructuringTarget);
   }
 }
 
+template <typename Impl>
+typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseV8Intrinsic(
+    bool* ok) {
+  // CallRuntime ::
+  //   '%' Identifier Arguments
+
+  int pos = peek_position();
+  Expect(Token::MOD, CHECK_OK);
+  // Allow "eval" or "arguments" for backward compatibility.
+  IdentifierT name = ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
+  Scanner::Location spread_pos;
+  ExpressionClassifier classifier(this);
+  ExpressionListT args = ParseArguments(&spread_pos, CHECK_OK);
+
+  DCHECK(!spread_pos.IsValid());
+
+  return impl()->NewV8Intrinsic(name, args, pos, ok);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseDoExpression(
+    bool* ok) {
+  // AssignmentExpression ::
+  //     do '{' StatementList '}'
+
+  int pos = peek_position();
+  Expect(Token::DO, CHECK_OK);
+  BlockT block = ParseBlock(nullptr, CHECK_OK);
+  return impl()->RewriteDoExpression(block, pos, ok);
+}
+
+// Redefinition of CHECK_OK for parsing statements.
+#undef CHECK_OK
+#define CHECK_OK CHECK_OK_CUSTOM(NullStatement)
+
+template <typename Impl>
+typename ParserBase<Impl>::LazyParsingResult
+ParserBase<Impl>::ParseStatementList(StatementListT body, int end_token,
+                                     bool may_abort, bool* ok) {
+  // StatementList ::
+  //   (StatementListItem)* <end_token>
+
+  // Allocate a target stack to use for this set of source
+  // elements. This way, all scripts and functions get their own
+  // target stack thus avoiding illegal breaks and continues across
+  // functions.
+  typename Types::TargetScope target_scope(this);
+  int count_statements = 0;
+
+  DCHECK(!impl()->IsNullStatementList(body));
+  bool directive_prologue = true;  // Parsing directive prologue.
+
+  while (peek() != end_token) {
+    if (directive_prologue && peek() != Token::STRING) {
+      directive_prologue = false;
+    }
+
+    bool starts_with_identifier = peek() == Token::IDENTIFIER;
+    Scanner::Location token_loc = scanner()->peek_location();
+    StatementT stat =
+        ParseStatementListItem(CHECK_OK_CUSTOM(Return, kLazyParsingComplete));
+
+    if (impl()->IsNullStatement(stat) || impl()->IsEmptyStatement(stat)) {
+      directive_prologue = false;  // End of directive prologue.
+      continue;
+    }
+
+    if (directive_prologue) {
+      // The length of the token is used to distinguish between strings literals
+      // that evaluate equal to directives but contain either escape sequences
+      // (e.g., "use \x73trict") or line continuations (e.g., "use \(newline)
+      // strict").
+      if (impl()->IsUseStrictDirective(stat) &&
+          token_loc.end_pos - token_loc.beg_pos == sizeof("use strict") + 1) {
+        // Directive "use strict" (ES5 14.1).
+        RaiseLanguageMode(STRICT);
+        if (!scope()->HasSimpleParameters()) {
+          // TC39 deemed "use strict" directives to be an error when occurring
+          // in the body of a function with non-simple parameter list, on
+          // 29/7/2015. https://goo.gl/ueA7Ln
+          impl()->ReportMessageAt(
+              token_loc, MessageTemplate::kIllegalLanguageModeDirective,
+              "use strict");
+          *ok = false;
+          return kLazyParsingComplete;
+        }
+        // Because declarations in strict eval code don't leak into the scope
+        // of the eval call, it is likely that functions declared in strict
+        // eval code will be used within the eval code, so lazy parsing is
+        // probably not a win.
+        if (scope()->is_eval_scope()) mode_ = PARSE_EAGERLY;
+      } else if (impl()->IsUseAsmDirective(stat) &&
+                 token_loc.end_pos - token_loc.beg_pos ==
+                     sizeof("use asm") + 1) {
+        // Directive "use asm".
+        impl()->SetAsmModule();
+      } else if (impl()->IsStringLiteral(stat)) {
+        // Possibly an unknown directive.
+        // Should not change mode, but will increment usage counters
+        // as appropriate. Ditto usages below.
+        RaiseLanguageMode(SLOPPY);
+      } else {
+        // End of the directive prologue.
+        directive_prologue = false;
+        RaiseLanguageMode(SLOPPY);
+      }
+    } else {
+      RaiseLanguageMode(SLOPPY);
+    }
+
+    // If we're allowed to abort, we will do so when we see a "long and
+    // trivial" function. Our current definition of "long and trivial" is:
+    // - over kLazyParseTrialLimit statements
+    // - all starting with an identifier (i.e., no if, for, while, etc.)
+    if (may_abort) {
+      if (!starts_with_identifier) {
+        may_abort = false;
+      } else if (++count_statements > kLazyParseTrialLimit) {
+        return kLazyParsingAborted;
+      }
+    }
+
+    body->Add(stat, zone());
+  }
+  return kLazyParsingComplete;
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseStatementListItem(
+    bool* ok) {
+  // ECMA 262 6th Edition
+  // StatementListItem[Yield, Return] :
+  //   Statement[?Yield, ?Return]
+  //   Declaration[?Yield]
+  //
+  // Declaration[Yield] :
+  //   HoistableDeclaration[?Yield]
+  //   ClassDeclaration[?Yield]
+  //   LexicalDeclaration[In, ?Yield]
+  //
+  // HoistableDeclaration[Yield, Default] :
+  //   FunctionDeclaration[?Yield, ?Default]
+  //   GeneratorDeclaration[?Yield, ?Default]
+  //
+  // LexicalDeclaration[In, Yield] :
+  //   LetOrConst BindingList[?In, ?Yield] ;
+
+  switch (peek()) {
+    case Token::FUNCTION:
+      return ParseHoistableDeclaration(nullptr, false, ok);
+    case Token::CLASS:
+      Consume(Token::CLASS);
+      return ParseClassDeclaration(nullptr, false, ok);
+    case Token::VAR:
+    case Token::CONST:
+      return ParseVariableStatement(kStatementListItem, nullptr, ok);
+    case Token::LET:
+      if (IsNextLetKeyword()) {
+        return ParseVariableStatement(kStatementListItem, nullptr, ok);
+      }
+      break;
+    case Token::ASYNC:
+      if (allow_harmony_async_await() && PeekAhead() == Token::FUNCTION &&
+          !scanner()->HasAnyLineTerminatorAfterNext()) {
+        Consume(Token::ASYNC);
+        return ParseAsyncFunctionDeclaration(nullptr, false, ok);
+      }
+    /* falls through */
+    default:
+      break;
+  }
+  return ParseStatement(nullptr, kAllowLabelledFunctionStatement, ok);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseStatement(
+    ZoneList<const AstRawString*>* labels,
+    AllowLabelledFunctionStatement allow_function, bool* ok) {
+  // Statement ::
+  //   Block
+  //   VariableStatement
+  //   EmptyStatement
+  //   ExpressionStatement
+  //   IfStatement
+  //   IterationStatement
+  //   ContinueStatement
+  //   BreakStatement
+  //   ReturnStatement
+  //   WithStatement
+  //   LabelledStatement
+  //   SwitchStatement
+  //   ThrowStatement
+  //   TryStatement
+  //   DebuggerStatement
+
+  // Note: Since labels can only be used by 'break' and 'continue'
+  // statements, which themselves are only valid within blocks,
+  // iterations or 'switch' statements (i.e., BreakableStatements),
+  // labels can be simply ignored in all other cases; except for
+  // trivial labeled break statements 'label: break label' which is
+  // parsed into an empty statement.
+  switch (peek()) {
+    case Token::LBRACE:
+      return ParseBlock(labels, ok);
+    case Token::SEMICOLON:
+      Next();
+      return factory()->NewEmptyStatement(kNoSourcePosition);
+    case Token::IF:
+      return ParseIfStatement(labels, ok);
+    case Token::DO:
+      return ParseDoWhileStatement(labels, ok);
+    case Token::WHILE:
+      return ParseWhileStatement(labels, ok);
+    case Token::FOR:
+      return ParseForStatement(labels, ok);
+    case Token::CONTINUE:
+    case Token::BREAK:
+    case Token::RETURN:
+    case Token::THROW:
+    case Token::TRY: {
+      // These statements must have their labels preserved in an enclosing
+      // block, as the corresponding AST nodes do not currently store their
+      // labels.
+      // TODO(nikolaos, marja): Consider adding the labels to the AST nodes.
+      if (labels == nullptr) {
+        return ParseStatementAsUnlabelled(labels, ok);
+      } else {
+        BlockT result =
+            factory()->NewBlock(labels, 1, false, kNoSourcePosition);
+        typename Types::Target target(this, result);
+        StatementT statement = ParseStatementAsUnlabelled(labels, CHECK_OK);
+        result->statements()->Add(statement, zone());
+        return result;
+      }
+    }
+    case Token::WITH:
+      return ParseWithStatement(labels, ok);
+    case Token::SWITCH:
+      return ParseSwitchStatement(labels, ok);
+    case Token::FUNCTION:
+      // FunctionDeclaration only allowed as a StatementListItem, not in
+      // an arbitrary Statement position. Exceptions such as
+      // ES#sec-functiondeclarations-in-ifstatement-statement-clauses
+      // are handled by calling ParseScopedStatement rather than
+      // ParseStatement directly.
+      impl()->ReportMessageAt(scanner()->peek_location(),
+                              is_strict(language_mode())
+                                  ? MessageTemplate::kStrictFunction
+                                  : MessageTemplate::kSloppyFunction);
+      *ok = false;
+      return impl()->NullStatement();
+    case Token::DEBUGGER:
+      return ParseDebuggerStatement(ok);
+    case Token::VAR:
+      return ParseVariableStatement(kStatement, nullptr, ok);
+    default:
+      return ParseExpressionOrLabelledStatement(labels, allow_function, ok);
+  }
+}
+
+// This method parses a subset of statements (break, continue, return, throw,
+// try) which are to be grouped because they all require their labeles to be
+// preserved in an enclosing block.
+template <typename Impl>
+typename ParserBase<Impl>::StatementT
+ParserBase<Impl>::ParseStatementAsUnlabelled(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  switch (peek()) {
+    case Token::CONTINUE:
+      return ParseContinueStatement(ok);
+    case Token::BREAK:
+      return ParseBreakStatement(labels, ok);
+    case Token::RETURN:
+      return ParseReturnStatement(ok);
+    case Token::THROW:
+      return ParseThrowStatement(ok);
+    case Token::TRY:
+      return ParseTryStatement(ok);
+    default:
+      UNREACHABLE();
+      return impl()->NullStatement();
+  }
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::BlockT ParserBase<Impl>::ParseBlock(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  // Block ::
+  //   '{' StatementList '}'
+
+  // Construct block expecting 16 statements.
+  BlockT body = factory()->NewBlock(labels, 16, false, kNoSourcePosition);
+
+  // Parse the statements and collect escaping labels.
+  Expect(Token::LBRACE, CHECK_OK_CUSTOM(NullBlock));
+  {
+    BlockState block_state(zone(), &scope_state_);
+    block_state.set_start_position(scanner()->location().beg_pos);
+    typename Types::Target target(this, body);
+
+    while (peek() != Token::RBRACE) {
+      StatementT stat = ParseStatementListItem(CHECK_OK_CUSTOM(NullBlock));
+      if (!impl()->IsNullStatement(stat) && !impl()->IsEmptyStatement(stat)) {
+        body->statements()->Add(stat, zone());
+      }
+    }
+
+    Expect(Token::RBRACE, CHECK_OK_CUSTOM(NullBlock));
+    block_state.set_end_position(scanner()->location().end_pos);
+    body->set_scope(block_state.FinalizedBlockScope());
+  }
+  return body;
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseScopedStatement(
+    ZoneList<const AstRawString*>* labels, bool legacy, bool* ok) {
+  if (is_strict(language_mode()) || peek() != Token::FUNCTION ||
+      (legacy && allow_harmony_restrictive_declarations())) {
+    return ParseStatement(labels, kDisallowLabelledFunctionStatement, ok);
+  } else {
+    if (legacy) {
+      impl()->CountUsage(v8::Isolate::kLegacyFunctionDeclaration);
+    }
+    // Make a block around the statement for a lexical binding
+    // is introduced by a FunctionDeclaration.
+    BlockState block_state(zone(), &scope_state_);
+    block_state.set_start_position(scanner()->location().beg_pos);
+    BlockT block = factory()->NewBlock(NULL, 1, false, kNoSourcePosition);
+    StatementT body = ParseFunctionDeclaration(CHECK_OK);
+    block->statements()->Add(body, zone());
+    block_state.set_end_position(scanner()->location().end_pos);
+    block->set_scope(block_state.FinalizedBlockScope());
+    return block;
+  }
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseVariableStatement(
+    VariableDeclarationContext var_context,
+    ZoneList<const AstRawString*>* names, bool* ok) {
+  // VariableStatement ::
+  //   VariableDeclarations ';'
+
+  // The scope of a var declared variable anywhere inside a function
+  // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can
+  // transform a source-level var declaration into a (Function) Scope
+  // declaration, and rewrite the source-level initialization into an assignment
+  // statement. We use a block to collect multiple assignments.
+  //
+  // We mark the block as initializer block because we don't want the
+  // rewriter to add a '.result' assignment to such a block (to get compliant
+  // behavior for code such as print(eval('var x = 7')), and for cosmetic
+  // reasons when pretty-printing. Also, unless an assignment (initialization)
+  // is inside an initializer block, it is ignored.
+
+  DeclarationParsingResult parsing_result;
+  StatementT result =
+      ParseVariableDeclarations(var_context, &parsing_result, names, CHECK_OK);
+  ExpectSemicolon(CHECK_OK);
+  return result;
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseDebuggerStatement(
+    bool* ok) {
+  // In ECMA-262 'debugger' is defined as a reserved keyword. In some browser
+  // contexts this is used as a statement which invokes the debugger as i a
+  // break point is present.
+  // DebuggerStatement ::
+  //   'debugger' ';'
+
+  int pos = peek_position();
+  Expect(Token::DEBUGGER, CHECK_OK);
+  ExpectSemicolon(CHECK_OK);
+  return factory()->NewDebuggerStatement(pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT
+ParserBase<Impl>::ParseExpressionOrLabelledStatement(
+    ZoneList<const AstRawString*>* labels,
+    AllowLabelledFunctionStatement allow_function, bool* ok) {
+  // ExpressionStatement | LabelledStatement ::
+  //   Expression ';'
+  //   Identifier ':' Statement
+  //
+  // ExpressionStatement[Yield] :
+  //   [lookahead ∉ {{, function, class, let [}] Expression[In, ?Yield] ;
+
+  int pos = peek_position();
+
+  switch (peek()) {
+    case Token::FUNCTION:
+    case Token::LBRACE:
+      UNREACHABLE();  // Always handled by the callers.
+    case Token::CLASS:
+      ReportUnexpectedToken(Next());
+      *ok = false;
+      return impl()->NullStatement();
+    default:
+      break;
+  }
+
+  bool starts_with_identifier = peek_any_identifier();
+  ExpressionT expr = ParseExpression(true, CHECK_OK);
+  if (peek() == Token::COLON && starts_with_identifier &&
+      impl()->IsIdentifier(expr)) {
+    // The whole expression was a single identifier, and not, e.g.,
+    // something starting with an identifier or a parenthesized identifier.
+    labels = impl()->DeclareLabel(labels, impl()->AsIdentifierExpression(expr),
+                                  CHECK_OK);
+    Consume(Token::COLON);
+    // ES#sec-labelled-function-declarations Labelled Function Declarations
+    if (peek() == Token::FUNCTION && is_sloppy(language_mode())) {
+      if (allow_function == kAllowLabelledFunctionStatement) {
+        return ParseFunctionDeclaration(ok);
+      } else {
+        return ParseScopedStatement(labels, true, ok);
+      }
+    }
+    return ParseStatement(labels, kDisallowLabelledFunctionStatement, ok);
+  }
+
+  // If we have an extension, we allow a native function declaration.
+  // A native function declaration starts with "native function" with
+  // no line-terminator between the two words.
+  if (extension_ != nullptr && peek() == Token::FUNCTION &&
+      !scanner()->HasAnyLineTerminatorBeforeNext() && impl()->IsNative(expr) &&
+      !scanner()->literal_contains_escapes()) {
+    return ParseNativeDeclaration(ok);
+  }
+
+  // Parsed expression statement, followed by semicolon.
+  ExpectSemicolon(CHECK_OK);
+  return factory()->NewExpressionStatement(expr, pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseIfStatement(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  // IfStatement ::
+  //   'if' '(' Expression ')' Statement ('else' Statement)?
+
+  int pos = peek_position();
+  Expect(Token::IF, CHECK_OK);
+  Expect(Token::LPAREN, CHECK_OK);
+  ExpressionT condition = ParseExpression(true, CHECK_OK);
+  Expect(Token::RPAREN, CHECK_OK);
+  StatementT then_statement = ParseScopedStatement(labels, false, CHECK_OK);
+  StatementT else_statement = impl()->NullStatement();
+  if (Check(Token::ELSE)) {
+    else_statement = ParseScopedStatement(labels, false, CHECK_OK);
+  } else {
+    else_statement = factory()->NewEmptyStatement(kNoSourcePosition);
+  }
+  return factory()->NewIfStatement(condition, then_statement, else_statement,
+                                   pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseContinueStatement(
+    bool* ok) {
+  // ContinueStatement ::
+  //   'continue' Identifier? ';'
+
+  int pos = peek_position();
+  Expect(Token::CONTINUE, CHECK_OK);
+  IdentifierT label = impl()->EmptyIdentifier();
+  Token::Value tok = peek();
+  if (!scanner()->HasAnyLineTerminatorBeforeNext() && tok != Token::SEMICOLON &&
+      tok != Token::RBRACE && tok != Token::EOS) {
+    // ECMA allows "eval" or "arguments" as labels even in strict mode.
+    label = ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
+  }
+  typename Types::IterationStatement target =
+      impl()->LookupContinueTarget(label, CHECK_OK);
+  if (impl()->IsNullStatement(target)) {
+    // Illegal continue statement.
+    MessageTemplate::Template message = MessageTemplate::kIllegalContinue;
+    if (!impl()->IsEmptyIdentifier(label)) {
+      message = MessageTemplate::kUnknownLabel;
+    }
+    ReportMessage(message, label);
+    *ok = false;
+    return impl()->NullStatement();
+  }
+  ExpectSemicolon(CHECK_OK);
+  return factory()->NewContinueStatement(target, pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseBreakStatement(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  // BreakStatement ::
+  //   'break' Identifier? ';'
+
+  int pos = peek_position();
+  Expect(Token::BREAK, CHECK_OK);
+  IdentifierT label = impl()->EmptyIdentifier();
+  Token::Value tok = peek();
+  if (!scanner()->HasAnyLineTerminatorBeforeNext() && tok != Token::SEMICOLON &&
+      tok != Token::RBRACE && tok != Token::EOS) {
+    // ECMA allows "eval" or "arguments" as labels even in strict mode.
+    label = ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
+  }
+  // Parse labeled break statements that target themselves into
+  // empty statements, e.g. 'l1: l2: l3: break l2;'
+  if (!impl()->IsEmptyIdentifier(label) &&
+      impl()->ContainsLabel(labels, label)) {
+    ExpectSemicolon(CHECK_OK);
+    return factory()->NewEmptyStatement(pos);
+  }
+  typename Types::BreakableStatement target =
+      impl()->LookupBreakTarget(label, CHECK_OK);
+  if (impl()->IsNullStatement(target)) {
+    // Illegal break statement.
+    MessageTemplate::Template message = MessageTemplate::kIllegalBreak;
+    if (!impl()->IsEmptyIdentifier(label)) {
+      message = MessageTemplate::kUnknownLabel;
+    }
+    ReportMessage(message, label);
+    *ok = false;
+    return impl()->NullStatement();
+  }
+  ExpectSemicolon(CHECK_OK);
+  return factory()->NewBreakStatement(target, pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseReturnStatement(
+    bool* ok) {
+  // ReturnStatement ::
+  //   'return' [no line terminator] Expression? ';'
+
+  // Consume the return token. It is necessary to do that before
+  // reporting any errors on it, because of the way errors are
+  // reported (underlining).
+  Expect(Token::RETURN, CHECK_OK);
+  Scanner::Location loc = scanner()->location();
+
+  switch (GetDeclarationScope()->scope_type()) {
+    case SCRIPT_SCOPE:
+    case EVAL_SCOPE:
+    case MODULE_SCOPE:
+      impl()->ReportMessageAt(loc, MessageTemplate::kIllegalReturn);
+      *ok = false;
+      return impl()->NullStatement();
+    default:
+      break;
+  }
+
+  Token::Value tok = peek();
+  ExpressionT return_value = impl()->EmptyExpression();
+  if (scanner()->HasAnyLineTerminatorBeforeNext() || tok == Token::SEMICOLON ||
+      tok == Token::RBRACE || tok == Token::EOS) {
+    if (IsSubclassConstructor(function_state_->kind())) {
+      return_value = impl()->ThisExpression(loc.beg_pos);
+    } else {
+      return_value = impl()->GetLiteralUndefined(position());
+    }
+  } else {
+    if (IsSubclassConstructor(function_state_->kind())) {
+      // Because of the return code rewriting that happens in case of a subclass
+      // constructor we don't want to accept tail calls, therefore we don't set
+      // ReturnExprScope to kInsideValidReturnStatement here.
+      return_value = ParseExpression(true, CHECK_OK);
+    } else {
+      ReturnExprScope maybe_allow_tail_calls(
+          function_state_, ReturnExprContext::kInsideValidReturnStatement);
+      return_value = ParseExpression(true, CHECK_OK);
+
+      if (allow_tailcalls() && !is_sloppy(language_mode()) && !is_resumable()) {
+        // ES6 14.6.1 Static Semantics: IsInTailPosition
+        function_state_->AddImplicitTailCallExpression(return_value);
+      }
+    }
+  }
+  ExpectSemicolon(CHECK_OK);
+  return_value = impl()->RewriteReturn(return_value, loc.beg_pos);
+  return factory()->NewReturnStatement(return_value, loc.beg_pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseWithStatement(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  // WithStatement ::
+  //   'with' '(' Expression ')' Statement
+
+  Expect(Token::WITH, CHECK_OK);
+  int pos = position();
+
+  if (is_strict(language_mode())) {
+    ReportMessage(MessageTemplate::kStrictWith);
+    *ok = false;
+    return impl()->NullStatement();
+  }
+
+  Expect(Token::LPAREN, CHECK_OK);
+  ExpressionT expr = ParseExpression(true, CHECK_OK);
+  Expect(Token::RPAREN, CHECK_OK);
+
+  Scope* with_scope = NewScope(WITH_SCOPE);
+  StatementT body = impl()->NullStatement();
+  {
+    BlockState block_state(&scope_state_, with_scope);
+    with_scope->set_start_position(scanner()->peek_location().beg_pos);
+    body = ParseScopedStatement(labels, true, CHECK_OK);
+    with_scope->set_end_position(scanner()->location().end_pos);
+  }
+  return factory()->NewWithStatement(with_scope, expr, body, pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseDoWhileStatement(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  // DoStatement ::
+  //   'do' Statement 'while' '(' Expression ')' ';'
+
+  auto loop = factory()->NewDoWhileStatement(labels, peek_position());
+  typename Types::Target target(this, loop);
+
+  Expect(Token::DO, CHECK_OK);
+  StatementT body = ParseScopedStatement(nullptr, true, CHECK_OK);
+  Expect(Token::WHILE, CHECK_OK);
+  Expect(Token::LPAREN, CHECK_OK);
+
+  ExpressionT cond = ParseExpression(true, CHECK_OK);
+  Expect(Token::RPAREN, CHECK_OK);
+
+  // Allow do-statements to be terminated with and without
+  // semi-colons. This allows code such as 'do;while(0)return' to
+  // parse, which would not be the case if we had used the
+  // ExpectSemicolon() functionality here.
+  Check(Token::SEMICOLON);
+
+  loop->Initialize(cond, body);
+  return loop;
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseWhileStatement(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  // WhileStatement ::
+  //   'while' '(' Expression ')' Statement
+
+  auto loop = factory()->NewWhileStatement(labels, peek_position());
+  typename Types::Target target(this, loop);
+
+  Expect(Token::WHILE, CHECK_OK);
+  Expect(Token::LPAREN, CHECK_OK);
+  ExpressionT cond = ParseExpression(true, CHECK_OK);
+  Expect(Token::RPAREN, CHECK_OK);
+  StatementT body = ParseScopedStatement(nullptr, true, CHECK_OK);
+
+  loop->Initialize(cond, body);
+  return loop;
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseThrowStatement(
+    bool* ok) {
+  // ThrowStatement ::
+  //   'throw' Expression ';'
+
+  Expect(Token::THROW, CHECK_OK);
+  int pos = position();
+  if (scanner()->HasAnyLineTerminatorBeforeNext()) {
+    ReportMessage(MessageTemplate::kNewlineAfterThrow);
+    *ok = false;
+    return impl()->NullStatement();
+  }
+  ExpressionT exception = ParseExpression(true, CHECK_OK);
+  ExpectSemicolon(CHECK_OK);
+
+  return impl()->NewThrowStatement(exception, pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseSwitchStatement(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  // SwitchStatement ::
+  //   'switch' '(' Expression ')' '{' CaseClause* '}'
+  // CaseClause ::
+  //   'case' Expression ':' StatementList
+  //   'default' ':' StatementList
+
+  int switch_pos = peek_position();
+
+  Expect(Token::SWITCH, CHECK_OK);
+  Expect(Token::LPAREN, CHECK_OK);
+  ExpressionT tag = ParseExpression(true, CHECK_OK);
+  Expect(Token::RPAREN, CHECK_OK);
+
+  auto switch_statement = factory()->NewSwitchStatement(labels, switch_pos);
+
+  {
+    BlockState cases_block_state(zone(), &scope_state_);
+    cases_block_state.set_start_position(scanner()->location().beg_pos);
+    cases_block_state.SetNonlinear();
+    typename Types::Target target(this, switch_statement);
+
+    bool default_seen = false;
+    auto cases = impl()->NewCaseClauseList(4);
+    Expect(Token::LBRACE, CHECK_OK);
+    while (peek() != Token::RBRACE) {
+      // An empty label indicates the default case.
+      ExpressionT label = impl()->EmptyExpression();
+      if (Check(Token::CASE)) {
+        label = ParseExpression(true, CHECK_OK);
+      } else {
+        Expect(Token::DEFAULT, CHECK_OK);
+        if (default_seen) {
+          ReportMessage(MessageTemplate::kMultipleDefaultsInSwitch);
+          *ok = false;
+          return impl()->NullStatement();
+        }
+        default_seen = true;
+      }
+      Expect(Token::COLON, CHECK_OK);
+      int clause_pos = position();
+      StatementListT statements = impl()->NewStatementList(5);
+      while (peek() != Token::CASE && peek() != Token::DEFAULT &&
+             peek() != Token::RBRACE) {
+        StatementT stat = ParseStatementListItem(CHECK_OK);
+        statements->Add(stat, zone());
+      }
+      auto clause = factory()->NewCaseClause(label, statements, clause_pos);
+      cases->Add(clause, zone());
+    }
+    Expect(Token::RBRACE, CHECK_OK);
+
+    cases_block_state.set_end_position(scanner()->location().end_pos);
+    return impl()->RewriteSwitchStatement(
+        tag, switch_statement, cases, cases_block_state.FinalizedBlockScope());
+  }
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseTryStatement(
+    bool* ok) {
+  // TryStatement ::
+  //   'try' Block Catch
+  //   'try' Block Finally
+  //   'try' Block Catch Finally
+  //
+  // Catch ::
+  //   'catch' '(' Identifier ')' Block
+  //
+  // Finally ::
+  //   'finally' Block
+
+  Expect(Token::TRY, CHECK_OK);
+  int pos = position();
+
+  BlockT try_block = impl()->NullBlock();
+  {
+    ReturnExprScope no_tail_calls(function_state_,
+                                  ReturnExprContext::kInsideTryBlock);
+    try_block = ParseBlock(nullptr, CHECK_OK);
+  }
+
+  CatchInfo catch_info(this);
+  catch_info.for_promise_reject = allow_natives() && Check(Token::MOD);
+
+  if (peek() != Token::CATCH && peek() != Token::FINALLY) {
+    ReportMessage(MessageTemplate::kNoCatchOrFinally);
+    *ok = false;
+    return impl()->NullStatement();
+  }
+
+  BlockT catch_block = impl()->NullBlock();
+  if (Check(Token::CATCH)) {
+    Expect(Token::LPAREN, CHECK_OK);
+    catch_info.scope = NewScope(CATCH_SCOPE);
+    catch_info.scope->set_start_position(scanner()->location().beg_pos);
+
+    {
+      CollectExpressionsInTailPositionToListScope
+          collect_tail_call_expressions_scope(
+              function_state_, &catch_info.tail_call_expressions);
+      BlockState catch_block_state(&scope_state_, catch_info.scope);
+
+      catch_block = factory()->NewBlock(nullptr, 16, false, kNoSourcePosition);
+
+      // Create a block scope to hold any lexical declarations created
+      // as part of destructuring the catch parameter.
+      {
+        BlockState catch_variable_block_state(zone(), &scope_state_);
+        catch_variable_block_state.set_start_position(
+            scanner()->location().beg_pos);
+        typename Types::Target target(this, catch_block);
+
+        // This does not simply call ParsePrimaryExpression to avoid
+        // ExpressionFromIdentifier from being called in the first
+        // branch, which would introduce an unresolved symbol and mess
+        // with arrow function names.
+        if (peek_any_identifier()) {
+          catch_info.name =
+              ParseIdentifier(kDontAllowRestrictedIdentifiers, CHECK_OK);
+        } else {
+          ExpressionClassifier pattern_classifier(this);
+          catch_info.pattern = ParsePrimaryExpression(CHECK_OK);
+          ValidateBindingPattern(CHECK_OK);
+        }
+
+        Expect(Token::RPAREN, CHECK_OK);
+        impl()->RewriteCatchPattern(&catch_info, CHECK_OK);
+        if (!impl()->IsNullStatement(catch_info.init_block)) {
+          catch_block->statements()->Add(catch_info.init_block, zone());
+        }
+
+        catch_info.inner_block = ParseBlock(nullptr, CHECK_OK);
+        catch_block->statements()->Add(catch_info.inner_block, zone());
+        impl()->ValidateCatchBlock(catch_info, CHECK_OK);
+        catch_variable_block_state.set_end_position(
+            scanner()->location().end_pos);
+        catch_block->set_scope(
+            catch_variable_block_state.FinalizedBlockScope());
+      }
+    }
+
+    catch_info.scope->set_end_position(scanner()->location().end_pos);
+  }
+
+  BlockT finally_block = impl()->NullBlock();
+  DCHECK(peek() == Token::FINALLY || !impl()->IsNullStatement(catch_block));
+  if (Check(Token::FINALLY)) {
+    finally_block = ParseBlock(nullptr, CHECK_OK);
+  }
+
+  return impl()->RewriteTryStatement(try_block, catch_block, finally_block,
+                                     catch_info, pos);
+}
+
+template <typename Impl>
+typename ParserBase<Impl>::StatementT ParserBase<Impl>::ParseForStatement(
+    ZoneList<const AstRawString*>* labels, bool* ok) {
+  int stmt_pos = peek_position();
+  ForInfo for_info(this);
+  bool bound_names_are_lexical = false;
+
+  // Create an in-between scope for let-bound iteration variables.
+  BlockState for_state(zone(), &scope_state_);
+  Expect(Token::FOR, CHECK_OK);
+  Expect(Token::LPAREN, CHECK_OK);
+  for_state.set_start_position(scanner()->location().beg_pos);
+  for_state.set_is_hidden();
+
+  StatementT init = impl()->NullStatement();
+  if (peek() != Token::SEMICOLON) {
+    // An initializer is present.
+    if (peek() == Token::VAR || peek() == Token::CONST ||
+        (peek() == Token::LET && IsNextLetKeyword())) {
+      // The initializer contains declarations.
+      ParseVariableDeclarations(kForStatement, &for_info.parsing_result,
+                                nullptr, CHECK_OK);
+      bound_names_are_lexical =
+          IsLexicalVariableMode(for_info.parsing_result.descriptor.mode);
+      for_info.each_loc = scanner()->location();
+
+      if (CheckInOrOf(&for_info.mode)) {
+        // Just one declaration followed by in/of.
+        if (for_info.parsing_result.declarations.length() != 1) {
+          impl()->ReportMessageAt(
+              for_info.parsing_result.bindings_loc,
+              MessageTemplate::kForInOfLoopMultiBindings,
+              ForEachStatement::VisitModeString(for_info.mode));
+          *ok = false;
+          return impl()->NullStatement();
+        }
+        if (for_info.parsing_result.first_initializer_loc.IsValid() &&
+            (is_strict(language_mode()) ||
+             for_info.mode == ForEachStatement::ITERATE ||
+             bound_names_are_lexical ||
+             !impl()->IsIdentifier(
+                 for_info.parsing_result.declarations[0].pattern) ||
+             allow_harmony_for_in())) {
+          // Only increment the use count if we would have let this through
+          // without the flag.
+          if (allow_harmony_for_in()) {
+            impl()->CountUsage(v8::Isolate::kForInInitializer);
+          }
+          impl()->ReportMessageAt(
+              for_info.parsing_result.first_initializer_loc,
+              MessageTemplate::kForInOfLoopInitializer,
+              ForEachStatement::VisitModeString(for_info.mode));
+          *ok = false;
+          return impl()->NullStatement();
+        }
+
+        BlockT init_block = impl()->RewriteForVarInLegacy(for_info);
+
+        auto loop =
+            factory()->NewForEachStatement(for_info.mode, labels, stmt_pos);
+        typename Types::Target target(this, loop);
+
+        int each_keyword_pos = scanner()->location().beg_pos;
+
+        ExpressionT enumerable = impl()->EmptyExpression();
+        if (for_info.mode == ForEachStatement::ITERATE) {
+          ExpressionClassifier classifier(this);
+          enumerable = ParseAssignmentExpression(true, CHECK_OK);
+          impl()->RewriteNonPattern(CHECK_OK);
+        } else {
+          enumerable = ParseExpression(true, CHECK_OK);
+        }
+
+        Expect(Token::RPAREN, CHECK_OK);
+
+        StatementT final_loop = impl()->NullStatement();
+        {
+          ReturnExprScope no_tail_calls(function_state_,
+                                        ReturnExprContext::kInsideForInOfBody);
+          BlockState block_state(zone(), &scope_state_);
+          block_state.set_start_position(scanner()->location().beg_pos);
+
+          StatementT body = ParseScopedStatement(nullptr, true, CHECK_OK);
+
+          BlockT body_block = impl()->NullBlock();
+          ExpressionT each_variable = impl()->EmptyExpression();
+          impl()->DesugarBindingInForEachStatement(&for_info, &body_block,
+                                                   &each_variable, CHECK_OK);
+          body_block->statements()->Add(body, zone());
+          final_loop = impl()->InitializeForEachStatement(
+              loop, each_variable, enumerable, body_block, each_keyword_pos);
+
+          block_state.set_end_position(scanner()->location().end_pos);
+          body_block->set_scope(block_state.FinalizedBlockScope());
+        }
+
+        init_block =
+            impl()->CreateForEachStatementTDZ(init_block, for_info, ok);
+
+        for_state.set_end_position(scanner()->location().end_pos);
+        Scope* for_scope = for_state.FinalizedBlockScope();
+        // Parsed for-in loop w/ variable declarations.
+        if (!impl()->IsNullStatement(init_block)) {
+          init_block->statements()->Add(final_loop, zone());
+          init_block->set_scope(for_scope);
+          return init_block;
+        } else {
+          DCHECK_NULL(for_scope);
+          return final_loop;
+        }
+      } else {
+        // One or more declaration not followed by in/of.
+        init = impl()->BuildInitializationBlock(
+            &for_info.parsing_result,
+            bound_names_are_lexical ? &for_info.bound_names : nullptr,
+            CHECK_OK);
+      }
+    } else {
+      // The initializer does not contain declarations.
+      int lhs_beg_pos = peek_position();
+      ExpressionClassifier classifier(this);
+      ExpressionT expression = ParseExpressionCoverGrammar(false, CHECK_OK);
+      int lhs_end_pos = scanner()->location().end_pos;
+
+      bool is_for_each = CheckInOrOf(&for_info.mode);
+      bool is_destructuring = is_for_each && (expression->IsArrayLiteral() ||
+                                              expression->IsObjectLiteral());
+
+      if (is_destructuring) {
+        ValidateAssignmentPattern(CHECK_OK);
+      } else {
+        impl()->RewriteNonPattern(CHECK_OK);
+      }
+
+      if (is_for_each) {
+        // Initializer is reference followed by in/of.
+        if (!is_destructuring) {
+          expression = impl()->CheckAndRewriteReferenceExpression(
+              expression, lhs_beg_pos, lhs_end_pos,
+              MessageTemplate::kInvalidLhsInFor, kSyntaxError, CHECK_OK);
+        }
+
+        auto loop =
+            factory()->NewForEachStatement(for_info.mode, labels, stmt_pos);
+        typename Types::Target target(this, loop);
+
+        int each_keyword_pos = scanner()->location().beg_pos;
+
+        ExpressionT enumerable = impl()->EmptyExpression();
+        if (for_info.mode == ForEachStatement::ITERATE) {
+          ExpressionClassifier classifier(this);
+          enumerable = ParseAssignmentExpression(true, CHECK_OK);
+          impl()->RewriteNonPattern(CHECK_OK);
+        } else {
+          enumerable = ParseExpression(true, CHECK_OK);
+        }
+
+        Expect(Token::RPAREN, CHECK_OK);
+
+        {
+          ReturnExprScope no_tail_calls(function_state_,
+                                        ReturnExprContext::kInsideForInOfBody);
+          BlockState block_state(zone(), &scope_state_);
+          block_state.set_start_position(scanner()->location().beg_pos);
+
+          // For legacy compat reasons, give for loops similar treatment to
+          // if statements in allowing a function declaration for a body
+          StatementT body = ParseScopedStatement(nullptr, true, CHECK_OK);
+          block_state.set_end_position(scanner()->location().end_pos);
+          StatementT final_loop = impl()->InitializeForEachStatement(
+              loop, expression, enumerable, body, each_keyword_pos);
+
+          Scope* for_scope = for_state.FinalizedBlockScope();
+          DCHECK_NULL(for_scope);
+          USE(for_scope);
+          Scope* block_scope = block_state.FinalizedBlockScope();
+          DCHECK_NULL(block_scope);
+          USE(block_scope);
+          return final_loop;
+        }
+      } else {
+        // Initializer is just an expression.
+        init = factory()->NewExpressionStatement(expression, lhs_beg_pos);
+      }
+    }
+  }
+
+  // Standard 'for' loop, we have parsed the initializer at this point.
+  auto loop = factory()->NewForStatement(labels, stmt_pos);
+  typename Types::Target target(this, loop);
+
+  Expect(Token::SEMICOLON, CHECK_OK);
+
+  ExpressionT cond = impl()->EmptyExpression();
+  StatementT next = impl()->NullStatement();
+  StatementT body = impl()->NullStatement();
+
+  // If there are let bindings, then condition and the next statement of the
+  // for loop must be parsed in a new scope.
+  Scope* inner_scope = scope();
+  // TODO(verwaest): Allocate this through a ScopeState as well.
+  if (bound_names_are_lexical && for_info.bound_names.length() > 0) {
+    inner_scope = NewScopeWithParent(inner_scope, BLOCK_SCOPE);
+    inner_scope->set_start_position(scanner()->location().beg_pos);
+  }
+  {
+    BlockState block_state(&scope_state_, inner_scope);
+
+    if (peek() != Token::SEMICOLON) {
+      cond = ParseExpression(true, CHECK_OK);
+    }
+    Expect(Token::SEMICOLON, CHECK_OK);
+
+    if (peek() != Token::RPAREN) {
+      ExpressionT exp = ParseExpression(true, CHECK_OK);
+      next = factory()->NewExpressionStatement(exp, exp->position());
+    }
+    Expect(Token::RPAREN, CHECK_OK);
+
+    body = ParseScopedStatement(nullptr, true, CHECK_OK);
+  }
+
+  if (bound_names_are_lexical && for_info.bound_names.length() > 0) {
+    auto result = impl()->DesugarLexicalBindingsInForStatement(
+        loop, init, cond, next, body, inner_scope, for_info, CHECK_OK);
+    for_state.set_end_position(scanner()->location().end_pos);
+    return result;
+  } else {
+    for_state.set_end_position(scanner()->location().end_pos);
+    Scope* for_scope = for_state.FinalizedBlockScope();
+    if (for_scope != nullptr) {
+      // Rewrite a for statement of the form
+      //   for (const x = i; c; n) b
+      //
+      // into
+      //
+      //   {
+      //     const x = i;
+      //     for (; c; n) b
+      //   }
+      //
+      // or, desugar
+      //   for (; c; n) b
+      // into
+      //   {
+      //     for (; c; n) b
+      //   }
+      // just in case b introduces a lexical binding some other way, e.g., if b
+      // is a FunctionDeclaration.
+      BlockT block = factory()->NewBlock(nullptr, 2, false, kNoSourcePosition);
+      if (!impl()->IsNullStatement(init)) {
+        block->statements()->Add(init, zone());
+      }
+      block->statements()->Add(loop, zone());
+      block->set_scope(for_scope);
+      loop->Initialize(init, cond, next, body);
+      return block;
+    } else {
+      loop->Initialize(init, cond, next, body);
+      return loop;
+    }
+  }
+}
 
 #undef CHECK_OK
 #undef CHECK_OK_CUSTOM
 
 template <typename Impl>
-void ParserBase<Impl>::ObjectLiteralChecker::CheckProperty(
-    Token::Value property, PropertyKind type, MethodKind method_type,
-    ExpressionClassifier* classifier, bool* ok) {
-  DCHECK(!IsStaticMethod(method_type));
-  DCHECK(!IsSpecialMethod(method_type) || type == kMethodProperty);
-
+void ParserBase<Impl>::ObjectLiteralChecker::CheckDuplicateProto(
+    Token::Value property) {
   if (property == Token::SMI || property == Token::NUMBER) return;
 
-  if (type == kValueProperty && IsProto()) {
+  if (IsProto()) {
     if (has_seen_proto_) {
-      classifier->RecordObjectLiteralError(
+      this->parser()->classifier()->RecordExpressionError(
           this->scanner()->location(), MessageTemplate::kDuplicateProto);
       return;
     }
@@ -3698,23 +5423,22 @@
 }
 
 template <typename Impl>
-void ParserBase<Impl>::ClassLiteralChecker::CheckProperty(
-    Token::Value property, PropertyKind type, MethodKind method_type,
-    ExpressionClassifier* classifier, bool* ok) {
-  DCHECK(type == kMethodProperty || type == kAccessorProperty);
+void ParserBase<Impl>::ClassLiteralChecker::CheckClassMethodName(
+    Token::Value property, PropertyKind type, bool is_generator, bool is_async,
+    bool is_static, bool* ok) {
+  DCHECK(type == PropertyKind::kMethodProperty ||
+         type == PropertyKind::kAccessorProperty);
 
   if (property == Token::SMI || property == Token::NUMBER) return;
 
-  if (IsStaticMethod(method_type)) {
+  if (is_static) {
     if (IsPrototype()) {
       this->parser()->ReportMessage(MessageTemplate::kStaticPrototype);
       *ok = false;
       return;
     }
   } else if (IsConstructor()) {
-    const bool is_generator = IsGeneratorMethod(method_type);
-    const bool is_async = IsAsyncMethod(method_type);
-    if (is_generator || is_async || type == kAccessorProperty) {
+    if (is_generator || is_async || type == PropertyKind::kAccessorProperty) {
       MessageTemplate::Template msg =
           is_generator ? MessageTemplate::kConstructorIsGenerator
                        : is_async ? MessageTemplate::kConstructorIsAsync
diff --git a/src/parsing/parser.cc b/src/parsing/parser.cc
index cfc2de8..7b88695 100644
--- a/src/parsing/parser.cc
+++ b/src/parsing/parser.cc
@@ -15,6 +15,7 @@
 #include "src/base/platform/platform.h"
 #include "src/char-predicates-inl.h"
 #include "src/messages.h"
+#include "src/parsing/duplicate-finder.h"
 #include "src/parsing/parameter-initializer-rewriter.h"
 #include "src/parsing/parse-info.h"
 #include "src/parsing/rewriter.h"
@@ -121,12 +122,20 @@
     if (use_temp_zone) {
       parser_->fni_ = &fni_;
       parser_->zone_ = temp_zone;
+      if (parser_->reusable_preparser_ != nullptr) {
+        parser_->reusable_preparser_->zone_ = temp_zone;
+      }
     }
   }
-  ~DiscardableZoneScope() {
+  void Reset() {
     parser_->fni_ = prev_fni_;
     parser_->zone_ = prev_zone_;
+    if (parser_->reusable_preparser_ != nullptr) {
+      parser_->reusable_preparser_->zone_ = prev_zone_;
+    }
+    ast_node_factory_scope_.Reset();
   }
+  ~DiscardableZoneScope() { Reset(); }
 
  private:
   AstNodeFactory::BodyScope ast_node_factory_scope_;
@@ -149,9 +158,64 @@
   }
 }
 
+Expression* Parser::CallClassFieldInitializer(Scope* scope,
+                                              Expression* this_expr) {
+  // This produces the expression
+  // `.class_field_intializer(this_expr)`, where '.class_field_intializer' is
+  // the name
+  // of a synthetic variable.
+  // 'this_expr' will be 'this' in a base constructor and the result of calling
+  // 'super' in a derived one.
+  const AstRawString* init_fn_name =
+      ast_value_factory()->dot_class_field_init_string();
+  VariableProxy* init_fn_proxy = scope->NewUnresolved(factory(), init_fn_name);
+  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(2, zone());
+  args->Add(init_fn_proxy, zone());
+  args->Add(this_expr, zone());
+  return factory()->NewCallRuntime(Runtime::kInlineCall, args,
+                                   kNoSourcePosition);
+}
+
+Expression* Parser::RewriteSuperCall(Expression* super_call) {
+  // TODO(bakkot) find a way to avoid this for classes without fields.
+  if (!allow_harmony_class_fields()) {
+    return super_call;
+  }
+  // This turns a super call `super()` into a do expression of the form
+  // do {
+  //   tmp x = super();
+  //   if (.class-field-init)
+  //     .class-field-init(x)
+  //   x; // This isn't actually present; our do-expression representation
+  // allows specifying that the expression returns x directly.
+  // }
+  Variable* var_tmp =
+      scope()->NewTemporary(ast_value_factory()->empty_string());
+  Block* block = factory()->NewBlock(nullptr, 1, false, kNoSourcePosition);
+  Assignment* assignment = factory()->NewAssignment(
+      Token::ASSIGN, factory()->NewVariableProxy(var_tmp), super_call,
+      kNoSourcePosition);
+  block->statements()->Add(
+      factory()->NewExpressionStatement(assignment, kNoSourcePosition), zone());
+  const AstRawString* init_fn_name =
+      ast_value_factory()->dot_class_field_init_string();
+  VariableProxy* init_fn_proxy =
+      scope()->NewUnresolved(factory(), init_fn_name);
+  Expression* condition = init_fn_proxy;
+  Statement* initialize = factory()->NewExpressionStatement(
+      CallClassFieldInitializer(scope(), factory()->NewVariableProxy(var_tmp)),
+      kNoSourcePosition);
+  IfStatement* if_statement = factory()->NewIfStatement(
+      condition, initialize, factory()->NewEmptyStatement(kNoSourcePosition),
+      kNoSourcePosition);
+  block->statements()->Add(if_statement, zone());
+  return factory()->NewDoExpression(block, var_tmp, kNoSourcePosition);
+}
+
 FunctionLiteral* Parser::DefaultConstructor(const AstRawString* name,
-                                            bool call_super, int pos,
-                                            int end_pos,
+                                            bool call_super,
+                                            bool requires_class_field_init,
+                                            int pos, int end_pos,
                                             LanguageMode language_mode) {
   int materialized_literal_count = -1;
   int expected_property_count = -1;
@@ -170,7 +234,7 @@
 
   {
     FunctionState function_state(&function_state_, &scope_state_,
-                                 function_scope, kind);
+                                 function_scope);
 
     body = new (zone()) ZoneList<Statement*>(call_super ? 2 : 1, zone());
     if (call_super) {
@@ -204,8 +268,11 @@
       VariableProxy* new_target_proxy =
           NewUnresolved(ast_value_factory()->new_target_string(), pos);
       args->Add(new_target_proxy, zone());
-      CallRuntime* call = factory()->NewCallRuntime(
+      Expression* call = factory()->NewCallRuntime(
           Context::REFLECT_CONSTRUCT_INDEX, args, pos);
+      if (requires_class_field_init) {
+        call = CallClassFieldInitializer(scope(), call);
+      }
       body->Add(factory()->NewReturnStatement(call, pos), zone());
     }
 
@@ -218,7 +285,9 @@
       expected_property_count, parameter_count,
       FunctionLiteral::kNoDuplicateParameters,
       FunctionLiteral::kAnonymousExpression,
-      FunctionLiteral::kShouldLazyCompile, kind, pos);
+      FunctionLiteral::kShouldLazyCompile, pos);
+
+  function_literal->set_requires_class_field_init(requires_class_field_init);
 
   return function_literal;
 }
@@ -230,41 +299,39 @@
 // 'continue' statement targets). Upon construction, a new target is
 // added; it is removed upon destruction.
 
-class Target BASE_EMBEDDED {
+class ParserTarget BASE_EMBEDDED {
  public:
-  Target(Target** variable, BreakableStatement* statement)
-      : variable_(variable), statement_(statement), previous_(*variable) {
-    *variable = this;
+  ParserTarget(ParserBase<Parser>* parser, BreakableStatement* statement)
+      : variable_(&parser->impl()->target_stack_),
+        statement_(statement),
+        previous_(parser->impl()->target_stack_) {
+    parser->impl()->target_stack_ = this;
   }
 
-  ~Target() {
-    *variable_ = previous_;
-  }
+  ~ParserTarget() { *variable_ = previous_; }
 
-  Target* previous() { return previous_; }
+  ParserTarget* previous() { return previous_; }
   BreakableStatement* statement() { return statement_; }
 
  private:
-  Target** variable_;
+  ParserTarget** variable_;
   BreakableStatement* statement_;
-  Target* previous_;
+  ParserTarget* previous_;
 };
 
-
-class TargetScope BASE_EMBEDDED {
+class ParserTargetScope BASE_EMBEDDED {
  public:
-  explicit TargetScope(Target** variable)
-      : variable_(variable), previous_(*variable) {
-    *variable = NULL;
+  explicit ParserTargetScope(ParserBase<Parser>* parser)
+      : variable_(&parser->impl()->target_stack_),
+        previous_(parser->impl()->target_stack_) {
+    parser->impl()->target_stack_ = nullptr;
   }
 
-  ~TargetScope() {
-    *variable_ = previous_;
-  }
+  ~ParserTargetScope() { *variable_ = previous_; }
 
  private:
-  Target** variable_;
-  Target* previous_;
+  ParserTarget** variable_;
+  ParserTarget* previous_;
 };
 
 
@@ -276,17 +343,14 @@
 // thus it must never be used where only a single statement
 // is correct (e.g. an if statement branch w/o braces)!
 
-#define CHECK_OK  ok);      \
-  if (!*ok) return nullptr; \
+#define CHECK_OK_VALUE(x) ok); \
+  if (!*ok) return x;          \
   ((void)0
 #define DUMMY )  // to make indentation work
 #undef DUMMY
 
-#define CHECK_OK_VOID  ok); \
-  if (!*ok) return;         \
-  ((void)0
-#define DUMMY )  // to make indentation work
-#undef DUMMY
+#define CHECK_OK CHECK_OK_VALUE(nullptr)
+#define CHECK_OK_VOID CHECK_OK_VALUE(this->Void())
 
 #define CHECK_FAILED /**/); \
   if (failed_) return nullptr;  \
@@ -297,76 +361,9 @@
 // ----------------------------------------------------------------------------
 // Implementation of Parser
 
-bool ParserBaseTraits<Parser>::IsEval(const AstRawString* identifier) const {
-  return identifier == delegate()->ast_value_factory()->eval_string();
-}
-
-bool ParserBaseTraits<Parser>::IsArguments(
-    const AstRawString* identifier) const {
-  return identifier == delegate()->ast_value_factory()->arguments_string();
-}
-
-bool ParserBaseTraits<Parser>::IsEvalOrArguments(
-    const AstRawString* identifier) const {
-  return IsEval(identifier) || IsArguments(identifier);
-}
-
-bool ParserBaseTraits<Parser>::IsUndefined(
-    const AstRawString* identifier) const {
-  return identifier == delegate()->ast_value_factory()->undefined_string();
-}
-
-bool ParserBaseTraits<Parser>::IsPrototype(
-    const AstRawString* identifier) const {
-  return identifier == delegate()->ast_value_factory()->prototype_string();
-}
-
-bool ParserBaseTraits<Parser>::IsConstructor(
-    const AstRawString* identifier) const {
-  return identifier == delegate()->ast_value_factory()->constructor_string();
-}
-
-bool ParserBaseTraits<Parser>::IsThisProperty(Expression* expression) {
-  DCHECK(expression != NULL);
-  Property* property = expression->AsProperty();
-  return property != NULL && property->obj()->IsVariableProxy() &&
-         property->obj()->AsVariableProxy()->is_this();
-}
-
-bool ParserBaseTraits<Parser>::IsIdentifier(Expression* expression) {
-  VariableProxy* operand = expression->AsVariableProxy();
-  return operand != NULL && !operand->is_this();
-}
-
-void ParserBaseTraits<Parser>::PushPropertyName(FuncNameInferrer* fni,
-                                                Expression* expression) {
-  if (expression->IsPropertyName()) {
-    fni->PushLiteralName(expression->AsLiteral()->AsRawPropertyName());
-  } else {
-    fni->PushLiteralName(
-        delegate()->ast_value_factory()->anonymous_function_string());
-  }
-}
-
-void ParserBaseTraits<Parser>::CheckAssigningFunctionLiteralToProperty(
-    Expression* left, Expression* right) {
-  DCHECK(left != NULL);
-  if (left->IsProperty() && right->IsFunctionLiteral()) {
-    right->AsFunctionLiteral()->set_pretenure();
-  }
-}
-
-Expression* ParserBaseTraits<Parser>::MarkExpressionAsAssigned(
-    Expression* expression) {
-  VariableProxy* proxy =
-      expression != NULL ? expression->AsVariableProxy() : NULL;
-  if (proxy != NULL) proxy->set_is_assigned();
-  return expression;
-}
-
-bool ParserBaseTraits<Parser>::ShortcutNumericLiteralBinaryExpression(
-    Expression** x, Expression* y, Token::Value op, int pos,
-    AstNodeFactory* factory) {
+bool Parser::ShortcutNumericLiteralBinaryExpression(Expression** x,
+                                                    Expression* y,
+                                                    Token::Value op, int pos) {
   if ((*x)->AsLiteral() && (*x)->AsLiteral()->raw_value()->IsNumber() &&
       y->AsLiteral() && y->AsLiteral()->raw_value()->IsNumber()) {
     double x_val = (*x)->AsLiteral()->raw_value()->AsNumber();
@@ -376,53 +373,53 @@
     bool has_dot = x_has_dot || y_has_dot;
     switch (op) {
       case Token::ADD:
-        *x = factory->NewNumberLiteral(x_val + y_val, pos, has_dot);
+        *x = factory()->NewNumberLiteral(x_val + y_val, pos, has_dot);
         return true;
       case Token::SUB:
-        *x = factory->NewNumberLiteral(x_val - y_val, pos, has_dot);
+        *x = factory()->NewNumberLiteral(x_val - y_val, pos, has_dot);
         return true;
       case Token::MUL:
-        *x = factory->NewNumberLiteral(x_val * y_val, pos, has_dot);
+        *x = factory()->NewNumberLiteral(x_val * y_val, pos, has_dot);
         return true;
       case Token::DIV:
-        *x = factory->NewNumberLiteral(x_val / y_val, pos, has_dot);
+        *x = factory()->NewNumberLiteral(x_val / y_val, pos, has_dot);
         return true;
       case Token::BIT_OR: {
         int value = DoubleToInt32(x_val) | DoubleToInt32(y_val);
-        *x = factory->NewNumberLiteral(value, pos, has_dot);
+        *x = factory()->NewNumberLiteral(value, pos, has_dot);
         return true;
       }
       case Token::BIT_AND: {
         int value = DoubleToInt32(x_val) & DoubleToInt32(y_val);
-        *x = factory->NewNumberLiteral(value, pos, has_dot);
+        *x = factory()->NewNumberLiteral(value, pos, has_dot);
         return true;
       }
       case Token::BIT_XOR: {
         int value = DoubleToInt32(x_val) ^ DoubleToInt32(y_val);
-        *x = factory->NewNumberLiteral(value, pos, has_dot);
+        *x = factory()->NewNumberLiteral(value, pos, has_dot);
         return true;
       }
       case Token::SHL: {
         int value = DoubleToInt32(x_val) << (DoubleToInt32(y_val) & 0x1f);
-        *x = factory->NewNumberLiteral(value, pos, has_dot);
+        *x = factory()->NewNumberLiteral(value, pos, has_dot);
         return true;
       }
       case Token::SHR: {
         uint32_t shift = DoubleToInt32(y_val) & 0x1f;
         uint32_t value = DoubleToUint32(x_val) >> shift;
-        *x = factory->NewNumberLiteral(value, pos, has_dot);
+        *x = factory()->NewNumberLiteral(value, pos, has_dot);
         return true;
       }
       case Token::SAR: {
         uint32_t shift = DoubleToInt32(y_val) & 0x1f;
         int value = ArithmeticShiftRight(DoubleToInt32(x_val), shift);
-        *x = factory->NewNumberLiteral(value, pos, has_dot);
+        *x = factory()->NewNumberLiteral(value, pos, has_dot);
         return true;
       }
       case Token::EXP: {
         double value = Pow(x_val, y_val);
         int int_value = static_cast<int>(value);
-        *x = factory->NewNumberLiteral(
+        *x = factory()->NewNumberLiteral(
             int_value == value && value != -0.0 ? int_value : value, pos,
             has_dot);
         return true;
@@ -434,15 +431,15 @@
   return false;
 }
 
-Expression* ParserBaseTraits<Parser>::BuildUnaryExpression(
-    Expression* expression, Token::Value op, int pos, AstNodeFactory* factory) {
+Expression* Parser::BuildUnaryExpression(Expression* expression,
+                                         Token::Value op, int pos) {
   DCHECK(expression != NULL);
   if (expression->IsLiteral()) {
     const AstValue* literal = expression->AsLiteral()->raw_value();
     if (op == Token::NOT) {
       // Convert the literal to a boolean condition and negate it.
       bool condition = literal->BooleanValue();
-      return factory->NewBooleanLiteral(!condition, pos);
+      return factory()->NewBooleanLiteral(!condition, pos);
     } else if (literal->IsNumber()) {
       // Compute some expressions involving only number literals.
       double value = literal->AsNumber();
@@ -451,9 +448,10 @@
         case Token::ADD:
           return expression;
         case Token::SUB:
-          return factory->NewNumberLiteral(-value, pos, has_dot);
+          return factory()->NewNumberLiteral(-value, pos, has_dot);
         case Token::BIT_NOT:
-          return factory->NewNumberLiteral(~DoubleToInt32(value), pos, has_dot);
+          return factory()->NewNumberLiteral(~DoubleToInt32(value), pos,
+                                             has_dot);
         default:
           break;
       }
@@ -461,53 +459,33 @@
   }
   // Desugar '+foo' => 'foo*1'
   if (op == Token::ADD) {
-    return factory->NewBinaryOperation(
-        Token::MUL, expression, factory->NewNumberLiteral(1, pos, true), pos);
+    return factory()->NewBinaryOperation(
+        Token::MUL, expression, factory()->NewNumberLiteral(1, pos, true), pos);
   }
   // The same idea for '-foo' => 'foo*(-1)'.
   if (op == Token::SUB) {
-    return factory->NewBinaryOperation(
-        Token::MUL, expression, factory->NewNumberLiteral(-1, pos), pos);
+    return factory()->NewBinaryOperation(
+        Token::MUL, expression, factory()->NewNumberLiteral(-1, pos), pos);
   }
   // ...and one more time for '~foo' => 'foo^(~0)'.
   if (op == Token::BIT_NOT) {
-    return factory->NewBinaryOperation(
-        Token::BIT_XOR, expression, factory->NewNumberLiteral(~0, pos), pos);
+    return factory()->NewBinaryOperation(
+        Token::BIT_XOR, expression, factory()->NewNumberLiteral(~0, pos), pos);
   }
-  return factory->NewUnaryOperation(op, expression, pos);
+  return factory()->NewUnaryOperation(op, expression, pos);
 }
 
-Expression* ParserBaseTraits<Parser>::BuildIteratorResult(Expression* value,
-                                                          bool done) {
+Expression* Parser::BuildIteratorResult(Expression* value, bool done) {
   int pos = kNoSourcePosition;
-  AstNodeFactory* factory = delegate()->factory();
-  Zone* zone = delegate()->zone();
 
-  if (value == nullptr) value = factory->NewUndefinedLiteral(pos);
+  if (value == nullptr) value = factory()->NewUndefinedLiteral(pos);
 
-  auto args = new (zone) ZoneList<Expression*>(2, zone);
-  args->Add(value, zone);
-  args->Add(factory->NewBooleanLiteral(done, pos), zone);
+  auto args = new (zone()) ZoneList<Expression*>(2, zone());
+  args->Add(value, zone());
+  args->Add(factory()->NewBooleanLiteral(done, pos), zone());
 
-  return factory->NewCallRuntime(Runtime::kInlineCreateIterResultObject, args,
-                                 pos);
-}
-
-Expression* ParserBaseTraits<Parser>::NewThrowReferenceError(
-    MessageTemplate::Template message, int pos) {
-  return delegate()->NewThrowError(
-      Runtime::kNewReferenceError, message,
-      delegate()->ast_value_factory()->empty_string(), pos);
-}
-
-Expression* ParserBaseTraits<Parser>::NewThrowSyntaxError(
-    MessageTemplate::Template message, const AstRawString* arg, int pos) {
-  return delegate()->NewThrowError(Runtime::kNewSyntaxError, message, arg, pos);
-}
-
-Expression* ParserBaseTraits<Parser>::NewThrowTypeError(
-    MessageTemplate::Template message, const AstRawString* arg, int pos) {
-  return delegate()->NewThrowError(Runtime::kNewTypeError, message, arg, pos);
+  return factory()->NewCallRuntime(Runtime::kInlineCreateIterResultObject, args,
+                                   pos);
 }
 
 Expression* Parser::NewThrowError(Runtime::FunctionId id,
@@ -520,124 +498,62 @@
   return factory()->NewThrow(call_constructor, pos);
 }
 
-void ParserBaseTraits<Parser>::ReportMessageAt(
-    Scanner::Location source_location, MessageTemplate::Template message,
-    const char* arg, ParseErrorType error_type) {
-  if (delegate()->stack_overflow()) {
-    // Suppress the error message (syntax error or such) in the presence of a
-    // stack overflow. The isolate allows only one pending exception at at time
-    // and we want to report the stack overflow later.
-    return;
-  }
-  delegate()->pending_error_handler_.ReportMessageAt(source_location.beg_pos,
-                                                     source_location.end_pos,
-                                                     message, arg, error_type);
-}
-
-void ParserBaseTraits<Parser>::ReportMessageAt(
-    Scanner::Location source_location, MessageTemplate::Template message,
-    const AstRawString* arg, ParseErrorType error_type) {
-  if (delegate()->stack_overflow()) {
-    // Suppress the error message (syntax error or such) in the presence of a
-    // stack overflow. The isolate allows only one pending exception at at time
-    // and we want to report the stack overflow later.
-    return;
-  }
-  delegate()->pending_error_handler_.ReportMessageAt(source_location.beg_pos,
-                                                     source_location.end_pos,
-                                                     message, arg, error_type);
-}
-
-const AstRawString* ParserBaseTraits<Parser>::GetSymbol(
-    Scanner* scanner) const {
-  const AstRawString* result =
-      delegate()->scanner()->CurrentSymbol(delegate()->ast_value_factory());
-  DCHECK(result != NULL);
-  return result;
-}
-
-const AstRawString* ParserBaseTraits<Parser>::GetNumberAsSymbol(
-    Scanner* scanner) const {
-  double double_value = delegate()->scanner()->DoubleValue();
-  char array[100];
-  const char* string = DoubleToCString(double_value, ArrayVector(array));
-  return delegate()->ast_value_factory()->GetOneByteString(string);
-}
-
-const AstRawString* ParserBaseTraits<Parser>::GetNextSymbol(
-    Scanner* scanner) const {
-  return delegate()->scanner()->NextSymbol(delegate()->ast_value_factory());
-}
-
-Expression* ParserBaseTraits<Parser>::ThisExpression(int pos) {
-  return delegate()->NewUnresolved(
-      delegate()->ast_value_factory()->this_string(), pos, pos + 4,
-      Variable::THIS);
-}
-
-Expression* ParserBaseTraits<Parser>::NewSuperPropertyReference(
-    AstNodeFactory* factory, int pos) {
+Expression* Parser::NewSuperPropertyReference(int pos) {
   // this_function[home_object_symbol]
-  VariableProxy* this_function_proxy = delegate()->NewUnresolved(
-      delegate()->ast_value_factory()->this_function_string(), pos);
+  VariableProxy* this_function_proxy =
+      NewUnresolved(ast_value_factory()->this_function_string(), pos);
   Expression* home_object_symbol_literal =
-      factory->NewSymbolLiteral("home_object_symbol", kNoSourcePosition);
-  Expression* home_object = factory->NewProperty(
+      factory()->NewSymbolLiteral("home_object_symbol", kNoSourcePosition);
+  Expression* home_object = factory()->NewProperty(
       this_function_proxy, home_object_symbol_literal, pos);
-  return factory->NewSuperPropertyReference(
+  return factory()->NewSuperPropertyReference(
       ThisExpression(pos)->AsVariableProxy(), home_object, pos);
 }
 
-Expression* ParserBaseTraits<Parser>::NewSuperCallReference(
-    AstNodeFactory* factory, int pos) {
-  VariableProxy* new_target_proxy = delegate()->NewUnresolved(
-      delegate()->ast_value_factory()->new_target_string(), pos);
-  VariableProxy* this_function_proxy = delegate()->NewUnresolved(
-      delegate()->ast_value_factory()->this_function_string(), pos);
-  return factory->NewSuperCallReference(ThisExpression(pos)->AsVariableProxy(),
-                                        new_target_proxy, this_function_proxy,
-                                        pos);
+Expression* Parser::NewSuperCallReference(int pos) {
+  VariableProxy* new_target_proxy =
+      NewUnresolved(ast_value_factory()->new_target_string(), pos);
+  VariableProxy* this_function_proxy =
+      NewUnresolved(ast_value_factory()->this_function_string(), pos);
+  return factory()->NewSuperCallReference(
+      ThisExpression(pos)->AsVariableProxy(), new_target_proxy,
+      this_function_proxy, pos);
 }
 
-Expression* ParserBaseTraits<Parser>::NewTargetExpression(int pos) {
+Expression* Parser::NewTargetExpression(int pos) {
   static const int kNewTargetStringLength = 10;
-  auto proxy = delegate()->NewUnresolved(
-      delegate()->ast_value_factory()->new_target_string(), pos,
-      pos + kNewTargetStringLength);
+  auto proxy = NewUnresolved(ast_value_factory()->new_target_string(), pos,
+                             pos + kNewTargetStringLength);
   proxy->set_is_new_target();
   return proxy;
 }
 
-Expression* ParserBaseTraits<Parser>::FunctionSentExpression(
-    AstNodeFactory* factory, int pos) const {
+Expression* Parser::FunctionSentExpression(int pos) {
   // We desugar function.sent into %_GeneratorGetInputOrDebugPos(generator).
-  Zone* zone = delegate()->zone();
-  ZoneList<Expression*>* args = new (zone) ZoneList<Expression*>(1, zone);
-  VariableProxy* generator = factory->NewVariableProxy(
-      delegate()->function_state_->generator_object_variable());
-  args->Add(generator, zone);
-  return factory->NewCallRuntime(Runtime::kInlineGeneratorGetInputOrDebugPos,
-                                 args, pos);
+  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(1, zone());
+  VariableProxy* generator =
+      factory()->NewVariableProxy(function_state_->generator_object_variable());
+  args->Add(generator, zone());
+  return factory()->NewCallRuntime(Runtime::kInlineGeneratorGetInputOrDebugPos,
+                                   args, pos);
 }
 
-Literal* ParserBaseTraits<Parser>::ExpressionFromLiteral(
-    Token::Value token, int pos, Scanner* scanner,
-    AstNodeFactory* factory) const {
+Literal* Parser::ExpressionFromLiteral(Token::Value token, int pos) {
   switch (token) {
     case Token::NULL_LITERAL:
-      return factory->NewNullLiteral(pos);
+      return factory()->NewNullLiteral(pos);
     case Token::TRUE_LITERAL:
-      return factory->NewBooleanLiteral(true, pos);
+      return factory()->NewBooleanLiteral(true, pos);
     case Token::FALSE_LITERAL:
-      return factory->NewBooleanLiteral(false, pos);
+      return factory()->NewBooleanLiteral(false, pos);
     case Token::SMI: {
-      int value = scanner->smi_value();
-      return factory->NewSmiLiteral(value, pos);
+      int value = scanner()->smi_value();
+      return factory()->NewSmiLiteral(value, pos);
     }
     case Token::NUMBER: {
-      bool has_dot = scanner->ContainsDot();
-      double value = scanner->DoubleValue();
-      return factory->NewNumberLiteral(value, pos, has_dot);
+      bool has_dot = scanner()->ContainsDot();
+      double value = scanner()->DoubleValue();
+      return factory()->NewNumberLiteral(value, pos, has_dot);
     }
     default:
       DCHECK(false);
@@ -645,43 +561,74 @@
   return NULL;
 }
 
-Expression* ParserBaseTraits<Parser>::ExpressionFromIdentifier(
-    const AstRawString* name, int start_position, int end_position,
-    InferName infer) {
-  if (infer == InferName::kYes && delegate()->fni_ != NULL) {
-    delegate()->fni_->PushVariableName(name);
-  }
-  return delegate()->NewUnresolved(name, start_position, end_position);
-}
-
-Expression* ParserBaseTraits<Parser>::ExpressionFromString(
-    int pos, Scanner* scanner, AstNodeFactory* factory) const {
-  const AstRawString* symbol = GetSymbol(scanner);
-  if (delegate()->fni_ != NULL) delegate()->fni_->PushLiteralName(symbol);
-  return factory->NewStringLiteral(symbol, pos);
-}
-
-Expression* ParserBaseTraits<Parser>::GetIterator(Expression* iterable,
-                                                  AstNodeFactory* factory,
-                                                  int pos) {
+Expression* Parser::GetIterator(Expression* iterable, int pos) {
   Expression* iterator_symbol_literal =
-      factory->NewSymbolLiteral("iterator_symbol", kNoSourcePosition);
+      factory()->NewSymbolLiteral("iterator_symbol", kNoSourcePosition);
   Expression* prop =
-      factory->NewProperty(iterable, iterator_symbol_literal, pos);
-  Zone* zone = delegate()->zone();
-  ZoneList<Expression*>* args = new (zone) ZoneList<Expression*>(0, zone);
-  return factory->NewCall(prop, args, pos);
-}
-
-Literal* ParserBaseTraits<Parser>::GetLiteralTheHole(
-    int position, AstNodeFactory* factory) const {
-  return factory->NewTheHoleLiteral(kNoSourcePosition);
+      factory()->NewProperty(iterable, iterator_symbol_literal, pos);
+  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(0, zone());
+  return factory()->NewCall(prop, args, pos);
 }
 
 void Parser::MarkTailPosition(Expression* expression) {
   expression->MarkTail();
 }
 
+Expression* Parser::NewV8Intrinsic(const AstRawString* name,
+                                   ZoneList<Expression*>* args, int pos,
+                                   bool* ok) {
+  if (extension_ != nullptr) {
+    // The extension structures are only accessible while parsing the
+    // very first time, not when reparsing because of lazy compilation.
+    GetClosureScope()->ForceEagerCompilation();
+  }
+
+  DCHECK(name->is_one_byte());
+  const Runtime::Function* function =
+      Runtime::FunctionForName(name->raw_data(), name->length());
+
+  if (function != nullptr) {
+    // Check for possible name clash.
+    DCHECK_EQ(Context::kNotFound,
+              Context::IntrinsicIndexForName(name->raw_data(), name->length()));
+    // Check for built-in IS_VAR macro.
+    if (function->function_id == Runtime::kIS_VAR) {
+      DCHECK_EQ(Runtime::RUNTIME, function->intrinsic_type);
+      // %IS_VAR(x) evaluates to x if x is a variable,
+      // leads to a parse error otherwise.  Could be implemented as an
+      // inline function %_IS_VAR(x) to eliminate this special case.
+      if (args->length() == 1 && args->at(0)->AsVariableProxy() != nullptr) {
+        return args->at(0);
+      } else {
+        ReportMessage(MessageTemplate::kNotIsvar);
+        *ok = false;
+        return nullptr;
+      }
+    }
+
+    // Check that the expected number of arguments are being passed.
+    if (function->nargs != -1 && function->nargs != args->length()) {
+      ReportMessage(MessageTemplate::kRuntimeWrongNumArgs);
+      *ok = false;
+      return nullptr;
+    }
+
+    return factory()->NewCallRuntime(function, args, pos);
+  }
+
+  int context_index =
+      Context::IntrinsicIndexForName(name->raw_data(), name->length());
+
+  // Check that the function is defined.
+  if (context_index == Context::kNotFound) {
+    ReportMessage(MessageTemplate::kNotDefined, name);
+    *ok = false;
+    return nullptr;
+  }
+
+  return factory()->NewCallRuntime(context_index, args, pos);
+}
+
 Parser::Parser(ParseInfo* info)
     : ParserBase<Parser>(info->zone(), &scanner_, info->stack_limit(),
                          info->extension(), info->ast_value_factory(), NULL),
@@ -699,7 +646,8 @@
   // ParseInfo during background parsing.
   DCHECK(!info->script().is_null() || info->source_stream() != nullptr ||
          info->character_stream() != nullptr);
-  set_allow_lazy(info->allow_lazy_parsing());
+  set_allow_lazy(FLAG_lazy && info->allow_lazy_parsing() &&
+                 !info->is_native() && info->extension() == nullptr);
   set_allow_natives(FLAG_allow_natives_syntax || info->is_native());
   set_allow_tailcalls(FLAG_harmony_tailcalls && !info->is_native() &&
                       info->isolate()->is_tail_call_elimination_enabled());
@@ -711,6 +659,7 @@
   set_allow_harmony_async_await(FLAG_harmony_async_await);
   set_allow_harmony_restrictive_generators(FLAG_harmony_restrictive_generators);
   set_allow_harmony_trailing_commas(FLAG_harmony_trailing_commas);
+  set_allow_harmony_class_fields(FLAG_harmony_class_fields);
   for (int feature = 0; feature < v8::Isolate::kUseCounterFeatureCount;
        ++feature) {
     use_counts_[feature] = 0;
@@ -725,29 +674,19 @@
 }
 
 void Parser::DeserializeScopeChain(
-    ParseInfo* info, Handle<Context> context,
-    Scope::DeserializationMode deserialization_mode) {
+    ParseInfo* info, MaybeHandle<ScopeInfo> maybe_outer_scope_info) {
   DCHECK(ThreadId::Current().Equals(info->isolate()->thread_id()));
   // TODO(wingo): Add an outer SCRIPT_SCOPE corresponding to the native
   // context, which will have the "this" binding for script scopes.
   DeclarationScope* script_scope = NewScriptScope();
   info->set_script_scope(script_scope);
   Scope* scope = script_scope;
-  if (!context.is_null() && !context->IsNativeContext()) {
-    scope = Scope::DeserializeScopeChain(info->isolate(), zone(), *context,
-                                         script_scope, ast_value_factory(),
-                                         deserialization_mode);
-    if (info->context().is_null()) {
-      DCHECK(deserialization_mode ==
-             Scope::DeserializationMode::kDeserializeOffHeap);
-    } else {
-      // The Scope is backed up by ScopeInfo (which is in the V8 heap); this
-      // means the Parser cannot operate independent of the V8 heap. Tell the
-      // string table to internalize strings and values right after they're
-      // created. This kind of parsing can only be done in the main thread.
-      DCHECK(parsing_on_main_thread_);
-      ast_value_factory()->Internalize(info->isolate());
-    }
+  Handle<ScopeInfo> outer_scope_info;
+  if (maybe_outer_scope_info.ToHandle(&outer_scope_info)) {
+    scope = Scope::DeserializeScopeChain(
+        info->isolate(), zone(), *outer_scope_info, script_scope,
+        ast_value_factory(), Scope::DeserializationMode::kScopesOnly);
+    DCHECK(!info->is_module() || scope->is_module_scope());
   }
   original_scope_ = scope;
 }
@@ -762,8 +701,7 @@
 
   HistogramTimerScope timer_scope(isolate->counters()->parse(), true);
   RuntimeCallTimerScope runtime_timer(isolate, &RuntimeCallStats::Parse);
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::Parse);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.Parse");
   Handle<String> source(String::cast(info->script()->source()));
   isolate->counters()->total_parse_size()->Increment(source->length());
   base::ElapsedTimer timer;
@@ -781,24 +719,13 @@
     cached_parse_data_->Initialize();
   }
 
-  DeserializeScopeChain(info, info->context(),
-                        Scope::DeserializationMode::kKeepScopeInfo);
+  DeserializeScopeChain(info, info->maybe_outer_scope_info());
 
   source = String::Flatten(source);
   FunctionLiteral* result;
 
   {
-    std::unique_ptr<Utf16CharacterStream> stream;
-    if (source->IsExternalTwoByteString()) {
-      stream.reset(new ExternalTwoByteStringUtf16CharacterStream(
-          Handle<ExternalTwoByteString>::cast(source), 0, source->length()));
-    } else if (source->IsExternalOneByteString()) {
-      stream.reset(new ExternalOneByteStringUtf16CharacterStream(
-          Handle<ExternalOneByteString>::cast(source), 0, source->length()));
-    } else {
-      stream.reset(
-          new GenericStringUtf16CharacterStream(source, 0, source->length()));
-    }
+    std::unique_ptr<Utf16CharacterStream> stream(ScannerStream::For(source));
     scanner_.Initialize(stream.get());
     result = DoParseProgram(info);
   }
@@ -835,27 +762,25 @@
   DCHECK_NULL(scope_state_);
   DCHECK_NULL(target_stack_);
 
-  Mode parsing_mode = FLAG_lazy && allow_lazy() ? PARSE_LAZILY : PARSE_EAGERLY;
-  if (allow_natives() || extension_ != NULL) parsing_mode = PARSE_EAGERLY;
+  Mode parsing_mode = allow_lazy() ? PARSE_LAZILY : PARSE_EAGERLY;
 
   FunctionLiteral* result = NULL;
   {
     Scope* outer = original_scope_;
-    // If there's a chance that there's a reference to global 'this', predeclare
-    // it as a dynamic global on the script scope.
-    if (outer->GetReceiverScope()->is_script_scope()) {
-      info->script_scope()->DeclareDynamicGlobal(
-          ast_value_factory()->this_string(), Variable::THIS);
-    }
-    DCHECK(outer);
+    DCHECK_NOT_NULL(outer);
+    parsing_module_ = info->is_module();
     if (info->is_eval()) {
       if (!outer->is_script_scope() || is_strict(info->language_mode())) {
         parsing_mode = PARSE_EAGERLY;
       }
       outer = NewEvalScope(outer);
-    } else if (info->is_module()) {
+    } else if (parsing_module_) {
       DCHECK_EQ(outer, info->script_scope());
       outer = NewModuleScope(info->script_scope());
+      // Never do lazy parsing in modules.  If we want to support this in the
+      // future, we must force context-allocation for all variables that are
+      // declared at the module level but not MODULE-allocated.
+      parsing_mode = PARSE_EAGERLY;
     }
 
     DeclarationScope* scope = outer->AsDeclarationScope();
@@ -864,14 +789,29 @@
 
     // Enter 'scope' with the given parsing mode.
     ParsingModeScope parsing_mode_scope(this, parsing_mode);
-    FunctionState function_state(&function_state_, &scope_state_, scope,
-                                 kNormalFunction);
+    FunctionState function_state(&function_state_, &scope_state_, scope);
 
     ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16, zone());
     bool ok = true;
     int beg_pos = scanner()->location().beg_pos;
-    parsing_module_ = info->is_module();
     if (parsing_module_) {
+      // Declare the special module parameter.
+      auto name = ast_value_factory()->empty_string();
+      bool is_duplicate;
+      bool is_rest = false;
+      bool is_optional = false;
+      auto var = scope->DeclareParameter(name, VAR, is_optional, is_rest,
+                                         &is_duplicate, ast_value_factory());
+      DCHECK(!is_duplicate);
+      var->AllocateTo(VariableLocation::PARAMETER, 0);
+
+      PrepareGeneratorVariables(&function_state);
+      Expression* initial_yield =
+          BuildInitialYield(kNoSourcePosition, kGeneratorFunction);
+      body->Add(
+          factory()->NewExpressionStatement(initial_yield, kNoSourcePosition),
+          zone());
+
       ParseModuleItemList(body, &ok);
       ok = ok &&
            module()->Validate(this->scope()->AsModuleScope(),
@@ -889,7 +829,7 @@
 
     if (ok && is_strict(language_mode())) {
       CheckStrictOctalLiteral(beg_pos, scanner()->location().end_pos, &ok);
-      CheckDecimalLiteralWithLeadingZero(use_counts_, beg_pos,
+      CheckDecimalLiteralWithLeadingZero(beg_pos,
                                          scanner()->location().end_pos);
     }
     if (ok && is_sloppy(language_mode())) {
@@ -897,7 +837,7 @@
       // pre-existing bindings should be made writable, enumerable and
       // nonconfigurable if possible, whereas this code will leave attributes
       // unchanged if the property already exists.
-      InsertSloppyBlockFunctionVarBindings(scope, nullptr, &ok);
+      InsertSloppyBlockFunctionVarBindings(scope);
     }
     if (ok) {
       CheckConflictingVarDeclarations(scope, &ok);
@@ -915,9 +855,10 @@
 
     if (ok) {
       RewriteDestructuringAssignments();
+      int parameter_count = parsing_module_ ? 1 : 0;
       result = factory()->NewScriptOrEvalFunctionLiteral(
           scope, body, function_state.materialized_literal_count(),
-          function_state.expected_property_count());
+          function_state.expected_property_count(), parameter_count);
     }
   }
 
@@ -934,8 +875,7 @@
   DCHECK(parsing_on_main_thread_);
   RuntimeCallTimerScope runtime_timer(isolate, &RuntimeCallStats::ParseLazy);
   HistogramTimerScope timer_scope(isolate->counters()->parse_lazy());
-  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
-      isolate, &tracing::TraceEventStatsTable::ParseLazy);
+  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.ParseLazy");
   Handle<String> source(String::cast(info->script()->source()));
   isolate->counters()->total_parse_size()->Increment(source->length());
   base::ElapsedTimer timer;
@@ -943,26 +883,14 @@
     timer.Start();
   }
   Handle<SharedFunctionInfo> shared_info = info->shared_info();
-  DeserializeScopeChain(info, info->context(),
-                        Scope::DeserializationMode::kKeepScopeInfo);
+  DeserializeScopeChain(info, info->maybe_outer_scope_info());
 
   // Initialize parser state.
   source = String::Flatten(source);
   FunctionLiteral* result;
   {
-    std::unique_ptr<Utf16CharacterStream> stream;
-    if (source->IsExternalTwoByteString()) {
-      stream.reset(new ExternalTwoByteStringUtf16CharacterStream(
-          Handle<ExternalTwoByteString>::cast(source),
-          shared_info->start_position(), shared_info->end_position()));
-    } else if (source->IsExternalOneByteString()) {
-      stream.reset(new ExternalOneByteStringUtf16CharacterStream(
-          Handle<ExternalOneByteString>::cast(source),
-          shared_info->start_position(), shared_info->end_position()));
-    } else {
-      stream.reset(new GenericStringUtf16CharacterStream(
-          source, shared_info->start_position(), shared_info->end_position()));
-    }
+    std::unique_ptr<Utf16CharacterStream> stream(ScannerStream::For(
+        source, shared_info->start_position(), shared_info->end_position()));
     Handle<String> name(String::cast(shared_info->name()));
     result =
         DoParseLazy(info, ast_value_factory()->GetString(name), stream.get());
@@ -974,6 +902,8 @@
 
   if (FLAG_trace_parse && result != NULL) {
     double ms = timer.Elapsed().InMillisecondsF();
+    // We need to make sure that the debug-name is available.
+    ast_value_factory()->Internalize(isolate);
     std::unique_ptr<char[]> name_chars = result->debug_name()->ToCString();
     PrintF("[parsing function: %s - took %0.3f ms]\n", name_chars.get(), ms);
   }
@@ -1010,24 +940,20 @@
 
   {
     // Parse the function literal.
-    Scope* scope = original_scope_;
-    DCHECK(scope);
-    // If there's a chance that there's a reference to global 'this', predeclare
-    // it as a dynamic global on the script scope.
-    if (info->is_arrow() && scope->GetReceiverScope()->is_script_scope()) {
-      info->script_scope()->DeclareDynamicGlobal(
-          ast_value_factory()->this_string(), Variable::THIS);
-    }
-    FunctionState function_state(&function_state_, &scope_state_, scope,
-                                 info->function_kind());
-    DCHECK(is_sloppy(scope->language_mode()) ||
+    Scope* outer = original_scope_;
+    DeclarationScope* outer_function = outer->GetClosureScope();
+    DCHECK(outer);
+    FunctionState function_state(&function_state_, &scope_state_,
+                                 outer_function);
+    BlockState block_state(&scope_state_, outer);
+    DCHECK(is_sloppy(outer->language_mode()) ||
            is_strict(info->language_mode()));
     FunctionLiteral::FunctionType function_type = ComputeFunctionType(info);
+    FunctionKind kind = info->function_kind();
     bool ok = true;
 
-    if (info->is_arrow()) {
-      bool is_async = allow_harmony_async_await() && info->is_async();
-      if (is_async) {
+    if (IsArrowFunction(kind)) {
+      if (allow_harmony_async_await() && IsAsyncFunction(kind)) {
         DCHECK(!scanner()->HasAnyLineTerminatorAfterNext());
         if (!Check(Token::ASYNC)) {
           CHECK(stack_overflow());
@@ -1040,7 +966,7 @@
       }
 
       // TODO(adamk): We should construct this scope from the ScopeInfo.
-      DeclarationScope* scope = NewFunctionScope(FunctionKind::kArrowFunction);
+      DeclarationScope* scope = NewFunctionScope(kind);
 
       // These two bits only need to be explicitly set because we're
       // not passing the ScopeInfo to the Scope constructor.
@@ -1062,15 +988,12 @@
         BlockState block_state(&scope_state_, scope);
         if (Check(Token::LPAREN)) {
           // '(' StrictFormalParameters ')'
-          ParseFormalParameterList(&formals, &formals_classifier, &ok);
+          ParseFormalParameterList(&formals, &ok);
           if (ok) ok = Check(Token::RPAREN);
         } else {
           // BindingIdentifier
-          ParseFormalParameter(&formals, &formals_classifier, &ok);
-          if (ok) {
-            DeclareFormalParameter(formals.scope, formals.at(0),
-                                   &formals_classifier);
-          }
+          ParseFormalParameter(&formals, &ok);
+          if (ok) DeclareFormalParameter(formals.scope, formals.at(0));
         }
       }
 
@@ -1078,8 +1001,7 @@
         checkpoint.Restore(&formals.materialized_literals_count);
         // Pass `accept_IN=true` to ParseArrowFunctionLiteral --- This should
         // not be observable, or else the preparser would have failed.
-        Expression* expression = ParseArrowFunctionLiteral(
-            true, formals, is_async, formals_classifier, &ok);
+        Expression* expression = ParseArrowFunctionLiteral(true, formals, &ok);
         if (ok) {
           // Scanning must end at the same position that was recorded
           // previously. If not, parsing has been interrupted due to a stack
@@ -1097,16 +1019,31 @@
           }
         }
       }
-    } else if (info->is_default_constructor()) {
-      DCHECK_EQ(this->scope(), scope);
+    } else if (IsDefaultConstructor(kind)) {
+      DCHECK_EQ(scope(), outer);
+      bool is_subclass_constructor = IsSubclassConstructor(kind);
       result = DefaultConstructor(
-          raw_name, IsSubclassConstructor(info->function_kind()),
+          raw_name, is_subclass_constructor, info->requires_class_field_init(),
           info->start_position(), info->end_position(), info->language_mode());
+      if (!is_subclass_constructor && info->requires_class_field_init()) {
+        result = InsertClassFieldInitializer(result);
+      }
+    } else if (info->is_class_field_initializer()) {
+      Handle<SharedFunctionInfo> shared_info = info->shared_info();
+      DCHECK(!shared_info.is_null());
+      if (shared_info->length() == 0) {
+        result = ParseClassFieldForInitializer(
+            info->start_position() != info->end_position(), &ok);
+      } else {
+        result = SynthesizeClassFieldInitializer(shared_info->length());
+      }
     } else {
-      result = ParseFunctionLiteral(raw_name, Scanner::Location::invalid(),
-                                    kSkipFunctionNameCheck,
-                                    info->function_kind(), kNoSourcePosition,
-                                    function_type, info->language_mode(), &ok);
+      result = ParseFunctionLiteral(
+          raw_name, Scanner::Location::invalid(), kSkipFunctionNameCheck, kind,
+          kNoSourcePosition, function_type, info->language_mode(), &ok);
+      if (info->requires_class_field_init()) {
+        result = InsertClassFieldInitializer(result);
+      }
     }
     // Make sure the results agree.
     DCHECK(ok == (result != nullptr));
@@ -1117,131 +1054,6 @@
   return result;
 }
 
-
-void Parser::ParseStatementList(ZoneList<Statement*>* body, int end_token,
-                                bool* ok) {
-  // StatementList ::
-  //   (StatementListItem)* <end_token>
-
-  // Allocate a target stack to use for this set of source
-  // elements. This way, all scripts and functions get their own
-  // target stack thus avoiding illegal breaks and continues across
-  // functions.
-  TargetScope scope(&this->target_stack_);
-
-  DCHECK(body != NULL);
-  bool directive_prologue = true;     // Parsing directive prologue.
-
-  while (peek() != end_token) {
-    if (directive_prologue && peek() != Token::STRING) {
-      directive_prologue = false;
-    }
-
-    Scanner::Location token_loc = scanner()->peek_location();
-    Statement* stat = ParseStatementListItem(CHECK_OK_VOID);
-    if (stat == NULL || stat->IsEmpty()) {
-      directive_prologue = false;   // End of directive prologue.
-      continue;
-    }
-
-    if (directive_prologue) {
-      // A shot at a directive.
-      ExpressionStatement* e_stat;
-      Literal* literal;
-      // Still processing directive prologue?
-      if ((e_stat = stat->AsExpressionStatement()) != NULL &&
-          (literal = e_stat->expression()->AsLiteral()) != NULL &&
-          literal->raw_value()->IsString()) {
-        // Check "use strict" directive (ES5 14.1), "use asm" directive.
-        bool use_strict_found =
-            literal->raw_value()->AsString() ==
-                ast_value_factory()->use_strict_string() &&
-            token_loc.end_pos - token_loc.beg_pos ==
-                ast_value_factory()->use_strict_string()->length() + 2;
-        if (use_strict_found) {
-          if (is_sloppy(language_mode())) {
-            RaiseLanguageMode(STRICT);
-          }
-
-          if (!this->scope()->HasSimpleParameters()) {
-            // TC39 deemed "use strict" directives to be an error when occurring
-            // in the body of a function with non-simple parameter list, on
-            // 29/7/2015. https://goo.gl/ueA7Ln
-            const AstRawString* string = literal->raw_value()->AsString();
-            ReportMessageAt(token_loc,
-                            MessageTemplate::kIllegalLanguageModeDirective,
-                            string);
-            *ok = false;
-            return;
-          }
-          // Because declarations in strict eval code don't leak into the scope
-          // of the eval call, it is likely that functions declared in strict
-          // eval code will be used within the eval code, so lazy parsing is
-          // probably not a win.
-          if (this->scope()->is_eval_scope()) mode_ = PARSE_EAGERLY;
-        } else if (literal->raw_value()->AsString() ==
-                       ast_value_factory()->use_asm_string() &&
-                   token_loc.end_pos - token_loc.beg_pos ==
-                       ast_value_factory()->use_asm_string()->length() + 2) {
-          // Store the usage count; The actual use counter on the isolate is
-          // incremented after parsing is done.
-          ++use_counts_[v8::Isolate::kUseAsm];
-          DCHECK(this->scope()->is_declaration_scope());
-          this->scope()->AsDeclarationScope()->set_asm_module();
-        } else {
-          // Should not change mode, but will increment UseCounter
-          // if appropriate. Ditto usages below.
-          RaiseLanguageMode(SLOPPY);
-        }
-      } else {
-        // End of the directive prologue.
-        directive_prologue = false;
-        RaiseLanguageMode(SLOPPY);
-      }
-    } else {
-      RaiseLanguageMode(SLOPPY);
-    }
-
-    body->Add(stat, zone());
-  }
-}
-
-
-Statement* Parser::ParseStatementListItem(bool* ok) {
-  // (Ecma 262 6th Edition, 13.1):
-  // StatementListItem:
-  //    Statement
-  //    Declaration
-  const Token::Value peeked = peek();
-  switch (peeked) {
-    case Token::FUNCTION:
-      return ParseHoistableDeclaration(NULL, false, ok);
-    case Token::CLASS:
-      Consume(Token::CLASS);
-      return ParseClassDeclaration(NULL, false, ok);
-    case Token::CONST:
-      return ParseVariableStatement(kStatementListItem, NULL, ok);
-    case Token::VAR:
-      return ParseVariableStatement(kStatementListItem, NULL, ok);
-    case Token::LET:
-      if (IsNextLetKeyword()) {
-        return ParseVariableStatement(kStatementListItem, NULL, ok);
-      }
-      break;
-    case Token::ASYNC:
-      if (allow_harmony_async_await() && PeekAhead() == Token::FUNCTION &&
-          !scanner()->HasAnyLineTerminatorAfterNext()) {
-        Consume(Token::ASYNC);
-        return ParseAsyncFunctionDeclaration(NULL, false, ok);
-      }
-    /* falls through */
-    default:
-      break;
-  }
-  return ParseStatement(NULL, kAllowLabelledFunctionStatement, ok);
-}
-
-
 Statement* Parser::ParseModuleItem(bool* ok) {
   // ecma262/#prod-ModuleItem
   // ModuleItem :
@@ -1285,7 +1097,7 @@
   //    StringLiteral
 
   Expect(Token::STRING, CHECK_OK);
-  return GetSymbol(scanner());
+  return GetSymbol();
 }
 
 
@@ -1413,7 +1225,7 @@
   if (tok == Token::STRING) {
     const AstRawString* module_specifier = ParseModuleSpecifier(CHECK_OK_VOID);
     ExpectSemicolon(CHECK_OK_VOID);
-    module()->AddEmptyImport(module_specifier, scanner()->location(), zone());
+    module()->AddEmptyImport(module_specifier);
     return;
   }
 
@@ -1481,7 +1293,7 @@
 
   if (named_imports != nullptr) {
     if (named_imports->length() == 0) {
-      module()->AddEmptyImport(module_specifier, scanner()->location(), zone());
+      module()->AddEmptyImport(module_specifier);
     } else {
       for (int i = 0; i < named_imports->length(); ++i) {
         const NamedImport* import = named_imports->at(i);
@@ -1526,9 +1338,8 @@
     default: {
       int pos = position();
       ExpressionClassifier classifier(this);
-      Expression* value =
-          ParseAssignmentExpression(true, &classifier, CHECK_OK);
-      RewriteNonPattern(&classifier, CHECK_OK);
+      Expression* value = ParseAssignmentExpression(true, CHECK_OK);
+      RewriteNonPattern(CHECK_OK);
       SetFunctionName(value, ast_value_factory()->default_string());
 
       const AstRawString* local_name =
@@ -1621,8 +1432,7 @@
                               export_locations[i], zone());
         }
       } else if (length == 0) {
-        module()->AddEmptyImport(module_specifier, scanner()->location(),
-                                 zone());
+        module()->AddEmptyImport(module_specifier);
       } else {
         for (int i = 0; i < length; ++i) {
           module()->AddExport(original_names[i], export_names[i],
@@ -1673,141 +1483,8 @@
   return result;
 }
 
-Statement* Parser::ParseStatement(ZoneList<const AstRawString*>* labels,
-                                  AllowLabelledFunctionStatement allow_function,
-                                  bool* ok) {
-  // Statement ::
-  //   EmptyStatement
-  //   ...
-
-  if (peek() == Token::SEMICOLON) {
-    Next();
-    return factory()->NewEmptyStatement(kNoSourcePosition);
-  }
-  return ParseSubStatement(labels, allow_function, ok);
-}
-
-Statement* Parser::ParseSubStatement(
-    ZoneList<const AstRawString*>* labels,
-    AllowLabelledFunctionStatement allow_function, bool* ok) {
-  // Statement ::
-  //   Block
-  //   VariableStatement
-  //   EmptyStatement
-  //   ExpressionStatement
-  //   IfStatement
-  //   IterationStatement
-  //   ContinueStatement
-  //   BreakStatement
-  //   ReturnStatement
-  //   WithStatement
-  //   LabelledStatement
-  //   SwitchStatement
-  //   ThrowStatement
-  //   TryStatement
-  //   DebuggerStatement
-
-  // Note: Since labels can only be used by 'break' and 'continue'
-  // statements, which themselves are only valid within blocks,
-  // iterations or 'switch' statements (i.e., BreakableStatements),
-  // labels can be simply ignored in all other cases; except for
-  // trivial labeled break statements 'label: break label' which is
-  // parsed into an empty statement.
-  switch (peek()) {
-    case Token::LBRACE:
-      return ParseBlock(labels, ok);
-
-    case Token::SEMICOLON:
-      Next();
-      return factory()->NewEmptyStatement(kNoSourcePosition);
-
-    case Token::IF:
-      return ParseIfStatement(labels, ok);
-
-    case Token::DO:
-      return ParseDoWhileStatement(labels, ok);
-
-    case Token::WHILE:
-      return ParseWhileStatement(labels, ok);
-
-    case Token::FOR:
-      return ParseForStatement(labels, ok);
-
-    case Token::CONTINUE:
-    case Token::BREAK:
-    case Token::RETURN:
-    case Token::THROW:
-    case Token::TRY: {
-      // These statements must have their labels preserved in an enclosing
-      // block
-      if (labels == NULL) {
-        return ParseStatementAsUnlabelled(labels, ok);
-      } else {
-        Block* result =
-            factory()->NewBlock(labels, 1, false, kNoSourcePosition);
-        Target target(&this->target_stack_, result);
-        Statement* statement = ParseStatementAsUnlabelled(labels, CHECK_OK);
-        if (result) result->statements()->Add(statement, zone());
-        return result;
-      }
-    }
-
-    case Token::WITH:
-      return ParseWithStatement(labels, ok);
-
-    case Token::SWITCH:
-      return ParseSwitchStatement(labels, ok);
-
-    case Token::FUNCTION:
-      // FunctionDeclaration only allowed as a StatementListItem, not in
-      // an arbitrary Statement position. Exceptions such as
-      // ES#sec-functiondeclarations-in-ifstatement-statement-clauses
-      // are handled by calling ParseScopedStatement rather than
-      // ParseSubStatement directly.
-      ReportMessageAt(scanner()->peek_location(),
-                      is_strict(language_mode())
-                          ? MessageTemplate::kStrictFunction
-                          : MessageTemplate::kSloppyFunction);
-      *ok = false;
-      return nullptr;
-
-    case Token::DEBUGGER:
-      return ParseDebuggerStatement(ok);
-
-    case Token::VAR:
-      return ParseVariableStatement(kStatement, NULL, ok);
-
-    default:
-      return ParseExpressionOrLabelledStatement(labels, allow_function, ok);
-  }
-}
-
-Statement* Parser::ParseStatementAsUnlabelled(
-    ZoneList<const AstRawString*>* labels, bool* ok) {
-  switch (peek()) {
-    case Token::CONTINUE:
-      return ParseContinueStatement(ok);
-
-    case Token::BREAK:
-      return ParseBreakStatement(labels, ok);
-
-    case Token::RETURN:
-      return ParseReturnStatement(ok);
-
-    case Token::THROW:
-      return ParseThrowStatement(ok);
-
-    case Token::TRY:
-      return ParseTryStatement(ok);
-
-    default:
-      UNREACHABLE();
-      return NULL;
-  }
-}
-
 VariableProxy* Parser::NewUnresolved(const AstRawString* name, int begin_pos,
-                                     int end_pos, Variable::Kind kind) {
+                                     int end_pos, VariableKind kind) {
   return scope()->NewUnresolved(factory(), name, begin_pos, end_pos, kind);
 }
 
@@ -1816,25 +1493,19 @@
                                 scanner()->location().end_pos);
 }
 
-InitializationFlag Parser::DefaultInitializationFlag(VariableMode mode) {
-  DCHECK(IsDeclaredVariableMode(mode));
-  return mode == VAR ? kCreatedInitialized : kNeedsInitialization;
-}
-
 Declaration* Parser::DeclareVariable(const AstRawString* name,
                                      VariableMode mode, int pos, bool* ok) {
-  return DeclareVariable(name, mode, DefaultInitializationFlag(mode), pos, ok);
+  return DeclareVariable(name, mode, Variable::DefaultInitializationFlag(mode),
+                         pos, ok);
 }
 
 Declaration* Parser::DeclareVariable(const AstRawString* name,
                                      VariableMode mode, InitializationFlag init,
                                      int pos, bool* ok) {
   DCHECK_NOT_NULL(name);
-  Scope* scope =
-      IsLexicalVariableMode(mode) ? this->scope() : GetDeclarationScope();
-  VariableProxy* proxy =
-      scope->NewUnresolved(factory(), name, scanner()->location().beg_pos,
-                           scanner()->location().end_pos);
+  VariableProxy* proxy = factory()->NewVariableProxy(
+      name, NORMAL_VARIABLE, scanner()->location().beg_pos,
+      scanner()->location().end_pos);
   Declaration* declaration =
       factory()->NewVariableDeclaration(proxy, this->scope(), pos);
   Declare(declaration, DeclarationDescriptor::NORMAL, mode, init, CHECK_OK);
@@ -1845,132 +1516,99 @@
                           DeclarationDescriptor::Kind declaration_kind,
                           VariableMode mode, InitializationFlag init, bool* ok,
                           Scope* scope) {
-  DCHECK(IsDeclaredVariableMode(mode) && mode != CONST_LEGACY);
-
-  VariableProxy* proxy = declaration->proxy();
-  DCHECK(proxy->raw_name() != NULL);
-  const AstRawString* name = proxy->raw_name();
-
-  if (scope == nullptr) scope = this->scope();
-  if (mode == VAR) scope = scope->GetDeclarationScope();
-  DCHECK(!scope->is_catch_scope());
-  DCHECK(!scope->is_with_scope());
-  DCHECK(scope->is_declaration_scope() ||
-         (IsLexicalVariableMode(mode) && scope->is_block_scope()));
-
-  bool is_function_declaration = declaration->IsFunctionDeclaration();
-
-  Variable* var = NULL;
-  if (scope->is_eval_scope() && is_sloppy(scope->language_mode()) &&
-      mode == VAR) {
-    // In a var binding in a sloppy direct eval, pollute the enclosing scope
-    // with this new binding by doing the following:
-    // The proxy is bound to a lookup variable to force a dynamic declaration
-    // using the DeclareEvalVar or DeclareEvalFunction runtime functions.
-    Variable::Kind kind = Variable::NORMAL;
-    // TODO(sigurds) figure out if kNotAssigned is OK here
-    var = new (zone()) Variable(scope, name, mode, kind, init, kNotAssigned);
-    var->AllocateTo(VariableLocation::LOOKUP, -1);
-  } else {
-    // Declare the variable in the declaration scope.
-    var = scope->LookupLocal(name);
-    if (var == NULL) {
-      // Declare the name.
-      Variable::Kind kind = Variable::NORMAL;
-      if (is_function_declaration) {
-        kind = Variable::FUNCTION;
-      }
-      var = scope->DeclareLocal(name, mode, init, kind, kNotAssigned);
-    } else if (IsLexicalVariableMode(mode) ||
-               IsLexicalVariableMode(var->mode())) {
-      // Allow duplicate function decls for web compat, see bug 4693.
-      bool duplicate_allowed = false;
-      if (is_sloppy(scope->language_mode()) && is_function_declaration &&
-          var->is_function()) {
-        DCHECK(IsLexicalVariableMode(mode) &&
-               IsLexicalVariableMode(var->mode()));
-        // If the duplication is allowed, then the var will show up
-        // in the SloppyBlockFunctionMap and the new FunctionKind
-        // will be a permitted duplicate.
-        FunctionKind function_kind =
-            declaration->AsFunctionDeclaration()->fun()->kind();
-        duplicate_allowed =
-            scope->GetDeclarationScope()->sloppy_block_function_map()->Lookup(
-                const_cast<AstRawString*>(name), name->hash()) != nullptr &&
-            !IsAsyncFunction(function_kind) &&
-            !(allow_harmony_restrictive_generators() &&
-              IsGeneratorFunction(function_kind));
-      }
-      if (duplicate_allowed) {
-        ++use_counts_[v8::Isolate::kSloppyModeBlockScopedFunctionRedefinition];
-      } else {
-        // The name was declared in this scope before; check for conflicting
-        // re-declarations. We have a conflict if either of the declarations
-        // is not a var (in script scope, we also have to ignore legacy const
-        // for compatibility). There is similar code in runtime.cc in the
-        // Declare functions. The function CheckConflictingVarDeclarations
-        // checks for var and let bindings from different scopes whereas this
-        // is a check for conflicting declarations within the same scope. This
-        // check also covers the special case
-        //
-        // function () { let x; { var x; } }
-        //
-        // because the var declaration is hoisted to the function scope where
-        // 'x' is already bound.
-        DCHECK(IsDeclaredVariableMode(var->mode()));
-        // In harmony we treat re-declarations as early errors. See
-        // ES5 16 for a definition of early errors.
-        if (declaration_kind == DeclarationDescriptor::NORMAL) {
-          ReportMessage(MessageTemplate::kVarRedeclaration, name);
-        } else {
-          ReportMessage(MessageTemplate::kParamDupe);
-        }
-        *ok = false;
-        return nullptr;
-      }
-    } else if (mode == VAR) {
-      var->set_maybe_assigned();
-    }
+  if (scope == nullptr) {
+    scope = this->scope();
   }
-  DCHECK_NOT_NULL(var);
-
-  // We add a declaration node for every declaration. The compiler
-  // will only generate code if necessary. In particular, declarations
-  // for inner local variables that do not represent functions won't
-  // result in any generated code.
-  //
-  // This will lead to multiple declaration nodes for the
-  // same variable if it is declared several times. This is not a
-  // semantic issue, but it may be a performance issue since it may
-  // lead to repeated DeclareEvalVar or DeclareEvalFunction calls.
-  scope->AddDeclaration(declaration);
-  proxy->BindTo(var);
-  return var;
+  bool sloppy_mode_block_scope_function_redefinition = false;
+  Variable* variable = scope->DeclareVariable(
+      declaration, mode, init, allow_harmony_restrictive_generators(),
+      &sloppy_mode_block_scope_function_redefinition, ok);
+  if (!*ok) {
+    if (declaration_kind == DeclarationDescriptor::NORMAL) {
+      ReportMessage(MessageTemplate::kVarRedeclaration,
+                    declaration->proxy()->raw_name());
+    } else {
+      ReportMessage(MessageTemplate::kParamDupe);
+    }
+    return nullptr;
+  }
+  if (sloppy_mode_block_scope_function_redefinition) {
+    ++use_counts_[v8::Isolate::kSloppyModeBlockScopedFunctionRedefinition];
+  }
+  return variable;
 }
 
-
-// Language extension which is only enabled for source files loaded
-// through the API's extension mechanism.  A native function
-// declaration is resolved by looking up the function through a
-// callback provided by the extension.
-Statement* Parser::ParseNativeDeclaration(bool* ok) {
-  int pos = peek_position();
-  Expect(Token::FUNCTION, CHECK_OK);
-  // Allow "eval" or "arguments" for backward compatibility.
-  const AstRawString* name =
-      ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  bool done = (peek() == Token::RPAREN);
-  while (!done) {
-    ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
-    done = (peek() == Token::RPAREN);
-    if (!done) {
-      Expect(Token::COMMA, CHECK_OK);
-    }
+Block* Parser::BuildInitializationBlock(
+    DeclarationParsingResult* parsing_result,
+    ZoneList<const AstRawString*>* names, bool* ok) {
+  Block* result = factory()->NewBlock(
+      NULL, 1, true, parsing_result->descriptor.declaration_pos);
+  for (auto declaration : parsing_result->declarations) {
+    PatternRewriter::DeclareAndInitializeVariables(
+        this, result, &(parsing_result->descriptor), &declaration, names,
+        CHECK_OK);
   }
-  Expect(Token::RPAREN, CHECK_OK);
-  Expect(Token::SEMICOLON, CHECK_OK);
+  return result;
+}
 
+void Parser::DeclareAndInitializeVariables(
+    Block* block, const DeclarationDescriptor* declaration_descriptor,
+    const DeclarationParsingResult::Declaration* declaration,
+    ZoneList<const AstRawString*>* names, bool* ok) {
+  DCHECK_NOT_NULL(block);
+  PatternRewriter::DeclareAndInitializeVariables(
+      this, block, declaration_descriptor, declaration, names, ok);
+}
+
+Statement* Parser::DeclareFunction(const AstRawString* variable_name,
+                                   FunctionLiteral* function, int pos,
+                                   bool is_generator, bool is_async,
+                                   ZoneList<const AstRawString*>* names,
+                                   bool* ok) {
+  // In ES6, a function behaves as a lexical binding, except in
+  // a script scope, or the initial scope of eval or another function.
+  VariableMode mode =
+      (!scope()->is_declaration_scope() || scope()->is_module_scope()) ? LET
+                                                                       : VAR;
+  VariableProxy* proxy =
+      factory()->NewVariableProxy(variable_name, NORMAL_VARIABLE);
+  Declaration* declaration =
+      factory()->NewFunctionDeclaration(proxy, function, scope(), pos);
+  Declare(declaration, DeclarationDescriptor::NORMAL, mode, kCreatedInitialized,
+          CHECK_OK);
+  if (names) names->Add(variable_name, zone());
+  // Async functions don't undergo sloppy mode block scoped hoisting, and don't
+  // allow duplicates in a block. Both are represented by the
+  // sloppy_block_function_map. Don't add them to the map for async functions.
+  // Generators are also supposed to be prohibited; currently doing this behind
+  // a flag and UseCounting violations to assess web compatibility.
+  if (is_sloppy(language_mode()) && !scope()->is_declaration_scope() &&
+      !is_async && !(allow_harmony_restrictive_generators() && is_generator)) {
+    SloppyBlockFunctionStatement* delegate =
+        factory()->NewSloppyBlockFunctionStatement(scope());
+    DeclarationScope* target_scope = GetDeclarationScope();
+    target_scope->DeclareSloppyBlockFunction(variable_name, delegate);
+    return delegate;
+  }
+  return factory()->NewEmptyStatement(kNoSourcePosition);
+}
+
+Statement* Parser::DeclareClass(const AstRawString* variable_name,
+                                Expression* value,
+                                ZoneList<const AstRawString*>* names,
+                                int class_token_pos, int end_pos, bool* ok) {
+  Declaration* decl =
+      DeclareVariable(variable_name, LET, class_token_pos, CHECK_OK);
+  decl->proxy()->var()->set_initializer_position(end_pos);
+  Assignment* assignment = factory()->NewAssignment(Token::INIT, decl->proxy(),
+                                                    value, class_token_pos);
+  Statement* assignment_statement =
+      factory()->NewExpressionStatement(assignment, kNoSourcePosition);
+  if (names) names->Add(variable_name, zone());
+  return assignment_statement;
+}
+
+Statement* Parser::DeclareNative(const AstRawString* name, int pos, bool* ok) {
   // Make sure that the function containing the native declaration
   // isn't lazily compiled. The extension structures are only
   // accessible while parsing the first time not when reparsing
@@ -1989,734 +1627,102 @@
       pos);
 }
 
-Statement* Parser::ParseHoistableDeclaration(
-    ZoneList<const AstRawString*>* names, bool default_export, bool* ok) {
-  Expect(Token::FUNCTION, CHECK_OK);
-  int pos = position();
-  ParseFunctionFlags flags = ParseFunctionFlags::kIsNormal;
-  if (Check(Token::MUL)) {
-    flags |= ParseFunctionFlags::kIsGenerator;
-  }
-  return ParseHoistableDeclaration(pos, flags, names, default_export, ok);
-}
-
-Statement* Parser::ParseAsyncFunctionDeclaration(
-    ZoneList<const AstRawString*>* names, bool default_export, bool* ok) {
-  DCHECK_EQ(scanner()->current_token(), Token::ASYNC);
-  int pos = position();
-  if (scanner()->HasAnyLineTerminatorBeforeNext()) {
+ZoneList<const AstRawString*>* Parser::DeclareLabel(
+    ZoneList<const AstRawString*>* labels, VariableProxy* var, bool* ok) {
+  const AstRawString* label = var->raw_name();
+  // TODO(1240780): We don't check for redeclaration of labels
+  // during preparsing since keeping track of the set of active
+  // labels requires nontrivial changes to the way scopes are
+  // structured.  However, these are probably changes we want to
+  // make later anyway so we should go back and fix this then.
+  if (ContainsLabel(labels, label) || TargetStackContainsLabel(label)) {
+    ReportMessage(MessageTemplate::kLabelRedeclaration, label);
     *ok = false;
-    ReportUnexpectedToken(scanner()->current_token());
     return nullptr;
   }
-  Expect(Token::FUNCTION, CHECK_OK);
-  ParseFunctionFlags flags = ParseFunctionFlags::kIsAsync;
-  return ParseHoistableDeclaration(pos, flags, names, default_export, ok);
+  if (labels == nullptr) {
+    labels = new (zone()) ZoneList<const AstRawString*>(1, zone());
+  }
+  labels->Add(label, zone());
+  // Remove the "ghost" variable that turned out to be a label
+  // from the top scope. This way, we don't try to resolve it
+  // during the scope processing.
+  scope()->RemoveUnresolved(var);
+  return labels;
 }
 
-Statement* Parser::ParseHoistableDeclaration(
-    int pos, ParseFunctionFlags flags, ZoneList<const AstRawString*>* names,
-    bool default_export, bool* ok) {
-  // FunctionDeclaration ::
-  //   'function' Identifier '(' FormalParameters ')' '{' FunctionBody '}'
-  //   'function' '(' FormalParameters ')' '{' FunctionBody '}'
-  // GeneratorDeclaration ::
-  //   'function' '*' Identifier '(' FormalParameters ')' '{' FunctionBody '}'
-  //   'function' '*' '(' FormalParameters ')' '{' FunctionBody '}'
-  //
-  // The anonymous forms are allowed iff [default_export] is true.
-  //
-  // 'function' and '*' (if present) have been consumed by the caller.
-
-  const bool is_generator = flags & ParseFunctionFlags::kIsGenerator;
-  const bool is_async = flags & ParseFunctionFlags::kIsAsync;
-  DCHECK(!is_generator || !is_async);
-
-  const AstRawString* name;
-  FunctionNameValidity name_validity;
-  const AstRawString* variable_name;
-  if (default_export && peek() == Token::LPAREN) {
-    name = ast_value_factory()->default_string();
-    name_validity = kSkipFunctionNameCheck;
-    variable_name = ast_value_factory()->star_default_star_string();
-  } else {
-    bool is_strict_reserved;
-    name = ParseIdentifierOrStrictReservedWord(&is_strict_reserved, CHECK_OK);
-    name_validity = is_strict_reserved ? kFunctionNameIsStrictReserved
-                                       : kFunctionNameValidityUnknown;
-    variable_name = name;
-  }
-
-  FuncNameInferrer::State fni_state(fni_);
-  if (fni_ != NULL) fni_->PushEnclosingName(name);
-  FunctionLiteral* fun = ParseFunctionLiteral(
-      name, scanner()->location(), name_validity,
-      is_generator ? FunctionKind::kGeneratorFunction
-                   : is_async ? FunctionKind::kAsyncFunction
-                              : FunctionKind::kNormalFunction,
-      pos, FunctionLiteral::kDeclaration, language_mode(), CHECK_OK);
-
-  // In ES6, a function behaves as a lexical binding, except in
-  // a script scope, or the initial scope of eval or another function.
-  VariableMode mode =
-      (!scope()->is_declaration_scope() || scope()->is_module_scope()) ? LET
-                                                                       : VAR;
-  VariableProxy* proxy = NewUnresolved(variable_name);
-  Declaration* declaration =
-      factory()->NewFunctionDeclaration(proxy, fun, scope(), pos);
-  Declare(declaration, DeclarationDescriptor::NORMAL, mode, kCreatedInitialized,
-          CHECK_OK);
-  if (names) names->Add(variable_name, zone());
-  EmptyStatement* empty = factory()->NewEmptyStatement(kNoSourcePosition);
-  // Async functions don't undergo sloppy mode block scoped hoisting, and don't
-  // allow duplicates in a block. Both are represented by the
-  // sloppy_block_function_map. Don't add them to the map for async functions.
-  // Generators are also supposed to be prohibited; currently doing this behind
-  // a flag and UseCounting violations to assess web compatibility.
-  if (is_sloppy(language_mode()) && !scope()->is_declaration_scope() &&
-      !is_async && !(allow_harmony_restrictive_generators() && is_generator)) {
-    SloppyBlockFunctionStatement* delegate =
-        factory()->NewSloppyBlockFunctionStatement(empty, scope());
-    DeclarationScope* target_scope = GetDeclarationScope();
-    target_scope->DeclareSloppyBlockFunction(variable_name, delegate);
-    return delegate;
-  }
-  return empty;
-}
-
-Statement* Parser::ParseClassDeclaration(ZoneList<const AstRawString*>* names,
-                                         bool default_export, bool* ok) {
-  // ClassDeclaration ::
-  //   'class' Identifier ('extends' LeftHandExpression)? '{' ClassBody '}'
-  //   'class' ('extends' LeftHandExpression)? '{' ClassBody '}'
-  //
-  // The anonymous form is allowed iff [default_export] is true.
-  //
-  // 'class' is expected to be consumed by the caller.
-  //
-  // A ClassDeclaration
-  //
-  //   class C { ... }
-  //
-  // has the same semantics as:
-  //
-  //   let C = class C { ... };
-  //
-  // so rewrite it as such.
-
-  int pos = position();
-
-  const AstRawString* name;
-  bool is_strict_reserved;
-  const AstRawString* variable_name;
-  if (default_export && (peek() == Token::EXTENDS || peek() == Token::LBRACE)) {
-    name = ast_value_factory()->default_string();
-    is_strict_reserved = false;
-    variable_name = ast_value_factory()->star_default_star_string();
-  } else {
-    name = ParseIdentifierOrStrictReservedWord(&is_strict_reserved, CHECK_OK);
-    variable_name = name;
-  }
-
-  Expression* value = ParseClassLiteral(nullptr, name, scanner()->location(),
-                                        is_strict_reserved, pos, CHECK_OK);
-
-  Declaration* decl = DeclareVariable(variable_name, LET, pos, CHECK_OK);
-  decl->proxy()->var()->set_initializer_position(position());
-  Assignment* assignment =
-      factory()->NewAssignment(Token::INIT, decl->proxy(), value, pos);
-  Statement* assignment_statement =
-      factory()->NewExpressionStatement(assignment, kNoSourcePosition);
-  if (names) names->Add(variable_name, zone());
-  return assignment_statement;
-}
-
-Block* Parser::ParseBlock(ZoneList<const AstRawString*>* labels, bool* ok) {
-  // The harmony mode uses block elements instead of statements.
-  //
-  // Block ::
-  //   '{' StatementList '}'
-
-  // Construct block expecting 16 statements.
-  Block* body = factory()->NewBlock(labels, 16, false, kNoSourcePosition);
-
-  // Parse the statements and collect escaping labels.
-  Expect(Token::LBRACE, CHECK_OK);
-  {
-    BlockState block_state(&scope_state_);
-    block_state.set_start_position(scanner()->location().beg_pos);
-    Target target(&this->target_stack_, body);
-
-    while (peek() != Token::RBRACE) {
-      Statement* stat = ParseStatementListItem(CHECK_OK);
-      if (stat && !stat->IsEmpty()) {
-        body->statements()->Add(stat, zone());
-      }
-    }
-
-    Expect(Token::RBRACE, CHECK_OK);
-    block_state.set_end_position(scanner()->location().end_pos);
-    body->set_scope(block_state.FinalizedBlockScope());
-  }
-  return body;
-}
-
-
-Block* Parser::DeclarationParsingResult::BuildInitializationBlock(
-    ZoneList<const AstRawString*>* names, bool* ok) {
-  Block* result = descriptor.parser->factory()->NewBlock(
-      NULL, 1, true, descriptor.declaration_pos);
-  for (auto declaration : declarations) {
-    PatternRewriter::DeclareAndInitializeVariables(
-        result, &descriptor, &declaration, names, CHECK_OK);
-  }
-  return result;
-}
-
-
-Block* Parser::ParseVariableStatement(VariableDeclarationContext var_context,
-                                      ZoneList<const AstRawString*>* names,
-                                      bool* ok) {
-  // VariableStatement ::
-  //   VariableDeclarations ';'
-
-  // The scope of a var declared variable anywhere inside a function
-  // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can
-  // transform a source-level var declaration into a (Function) Scope
-  // declaration, and rewrite the source-level initialization into an assignment
-  // statement. We use a block to collect multiple assignments.
-  //
-  // We mark the block as initializer block because we don't want the
-  // rewriter to add a '.result' assignment to such a block (to get compliant
-  // behavior for code such as print(eval('var x = 7')), and for cosmetic
-  // reasons when pretty-printing. Also, unless an assignment (initialization)
-  // is inside an initializer block, it is ignored.
-
-  DeclarationParsingResult parsing_result;
-  Block* result =
-      ParseVariableDeclarations(var_context, &parsing_result, names, CHECK_OK);
-  ExpectSemicolon(CHECK_OK);
-  return result;
-}
-
-Block* Parser::ParseVariableDeclarations(
-    VariableDeclarationContext var_context,
-    DeclarationParsingResult* parsing_result,
-    ZoneList<const AstRawString*>* names, bool* ok) {
-  // VariableDeclarations ::
-  //   ('var' | 'const' | 'let') (Identifier ('=' AssignmentExpression)?)+[',']
-  //
-  // The ES6 Draft Rev3 specifies the following grammar for const declarations
-  //
-  // ConstDeclaration ::
-  //   const ConstBinding (',' ConstBinding)* ';'
-  // ConstBinding ::
-  //   Identifier '=' AssignmentExpression
-  //
-  // TODO(ES6):
-  // ConstBinding ::
-  //   BindingPattern '=' AssignmentExpression
-
-  parsing_result->descriptor.parser = this;
-  parsing_result->descriptor.declaration_kind = DeclarationDescriptor::NORMAL;
-  parsing_result->descriptor.declaration_pos = peek_position();
-  parsing_result->descriptor.initialization_pos = peek_position();
-  parsing_result->descriptor.mode = VAR;
-
-  Block* init_block = nullptr;
-  if (var_context != kForStatement) {
-    init_block = factory()->NewBlock(
-        NULL, 1, true, parsing_result->descriptor.declaration_pos);
-  }
-
-  if (peek() == Token::VAR) {
-    Consume(Token::VAR);
-  } else if (peek() == Token::CONST) {
-    Consume(Token::CONST);
-    DCHECK(var_context != kStatement);
-    parsing_result->descriptor.mode = CONST;
-  } else if (peek() == Token::LET) {
-    Consume(Token::LET);
-    DCHECK(var_context != kStatement);
-    parsing_result->descriptor.mode = LET;
-  } else {
-    UNREACHABLE();  // by current callers
-  }
-
-  parsing_result->descriptor.scope = scope();
-  parsing_result->descriptor.hoist_scope = nullptr;
-
-
-  bool first_declaration = true;
-  int bindings_start = peek_position();
-  do {
-    FuncNameInferrer::State fni_state(fni_);
-
-    // Parse name.
-    if (!first_declaration) Consume(Token::COMMA);
-
-    Expression* pattern;
-    int decl_pos = peek_position();
-    {
-      ExpressionClassifier pattern_classifier(this);
-      pattern = ParsePrimaryExpression(&pattern_classifier, CHECK_OK);
-      ValidateBindingPattern(&pattern_classifier, CHECK_OK);
-      if (IsLexicalVariableMode(parsing_result->descriptor.mode)) {
-        ValidateLetPattern(&pattern_classifier, CHECK_OK);
-      }
-    }
-
-    Scanner::Location variable_loc = scanner()->location();
-    const AstRawString* single_name =
-        pattern->IsVariableProxy() ? pattern->AsVariableProxy()->raw_name()
-                                   : nullptr;
-    if (single_name != nullptr) {
-      if (fni_ != NULL) fni_->PushVariableName(single_name);
-    }
-
-    Expression* value = NULL;
-    int initializer_position = kNoSourcePosition;
-    if (Check(Token::ASSIGN)) {
-      ExpressionClassifier classifier(this);
-      value = ParseAssignmentExpression(var_context != kForStatement,
-                                        &classifier, CHECK_OK);
-      RewriteNonPattern(&classifier, CHECK_OK);
-      variable_loc.end_pos = scanner()->location().end_pos;
-
-      if (!parsing_result->first_initializer_loc.IsValid()) {
-        parsing_result->first_initializer_loc = variable_loc;
-      }
-
-      // Don't infer if it is "a = function(){...}();"-like expression.
-      if (single_name) {
-        if (fni_ != NULL && value->AsCall() == NULL &&
-            value->AsCallNew() == NULL) {
-          fni_->Infer();
-        } else {
-          fni_->RemoveLastFunction();
-        }
-      }
-
-      ParserBaseTraits<Parser>::SetFunctionNameFromIdentifierRef(value,
-                                                                 pattern);
-
-      // End position of the initializer is after the assignment expression.
-      initializer_position = scanner()->location().end_pos;
-    } else {
-      // Initializers may be either required or implied unless this is a
-      // for-in/of iteration variable.
-      if (var_context != kForStatement || !PeekInOrOf()) {
-        // ES6 'const' and binding patterns require initializers.
-        if (parsing_result->descriptor.mode == CONST ||
-            !pattern->IsVariableProxy()) {
-          ReportMessageAt(
-              Scanner::Location(decl_pos, scanner()->location().end_pos),
-              MessageTemplate::kDeclarationMissingInitializer,
-              !pattern->IsVariableProxy() ? "destructuring" : "const");
-          *ok = false;
-          return nullptr;
-        }
-
-        // 'let x' initializes 'x' to undefined.
-        if (parsing_result->descriptor.mode == LET) {
-          value = GetLiteralUndefined(position());
-        }
-      }
-
-      // End position of the initializer is after the variable.
-      initializer_position = position();
-    }
-
-    DeclarationParsingResult::Declaration decl(pattern, initializer_position,
-                                               value);
-    if (var_context == kForStatement) {
-      // Save the declaration for further handling in ParseForStatement.
-      parsing_result->declarations.Add(decl);
-    } else {
-      // Immediately declare the variable otherwise. This avoids O(N^2)
-      // behavior (where N is the number of variables in a single
-      // declaration) in the PatternRewriter having to do with removing
-      // and adding VariableProxies to the Scope (see bug 4699).
-      DCHECK_NOT_NULL(init_block);
-      PatternRewriter::DeclareAndInitializeVariables(
-          init_block, &parsing_result->descriptor, &decl, names, CHECK_OK);
-    }
-    first_declaration = false;
-  } while (peek() == Token::COMMA);
-
-  parsing_result->bindings_loc =
-      Scanner::Location(bindings_start, scanner()->location().end_pos);
-
-  DCHECK(*ok);
-  return init_block;
-}
-
-
-static bool ContainsLabel(ZoneList<const AstRawString*>* labels,
-                          const AstRawString* label) {
-  DCHECK(label != NULL);
-  if (labels != NULL) {
-    for (int i = labels->length(); i-- > 0; ) {
-      if (labels->at(i) == label) {
-        return true;
-      }
+bool Parser::ContainsLabel(ZoneList<const AstRawString*>* labels,
+                           const AstRawString* label) {
+  DCHECK_NOT_NULL(label);
+  if (labels != nullptr) {
+    for (int i = labels->length(); i-- > 0;) {
+      if (labels->at(i) == label) return true;
     }
   }
   return false;
 }
 
-Statement* Parser::ParseFunctionDeclaration(bool* ok) {
-  Consume(Token::FUNCTION);
-  int pos = position();
-  ParseFunctionFlags flags = ParseFunctionFlags::kIsNormal;
-  if (Check(Token::MUL)) {
-    flags |= ParseFunctionFlags::kIsGenerator;
-    if (allow_harmony_restrictive_declarations()) {
-      ReportMessageAt(scanner()->location(),
-                      MessageTemplate::kGeneratorInLegacyContext);
-      *ok = false;
-      return nullptr;
-    }
+Expression* Parser::RewriteReturn(Expression* return_value, int pos) {
+  if (IsSubclassConstructor(function_state_->kind())) {
+    // For subclass constructors we need to return this in case of undefined
+    // return a Smi (transformed into an exception in the ConstructStub)
+    // for a non object.
+    //
+    //   return expr;
+    //
+    // Is rewritten as:
+    //
+    //   return (temp = expr) === undefined ? this :
+    //       %_IsJSReceiver(temp) ? temp : 1;
+
+    // temp = expr
+    Variable* temp = NewTemporary(ast_value_factory()->empty_string());
+    Assignment* assign = factory()->NewAssignment(
+        Token::ASSIGN, factory()->NewVariableProxy(temp), return_value, pos);
+
+    // %_IsJSReceiver(temp)
+    ZoneList<Expression*>* is_spec_object_args =
+        new (zone()) ZoneList<Expression*>(1, zone());
+    is_spec_object_args->Add(factory()->NewVariableProxy(temp), zone());
+    Expression* is_spec_object_call = factory()->NewCallRuntime(
+        Runtime::kInlineIsJSReceiver, is_spec_object_args, pos);
+
+    // %_IsJSReceiver(temp) ? temp : 1;
+    Expression* is_object_conditional = factory()->NewConditional(
+        is_spec_object_call, factory()->NewVariableProxy(temp),
+        factory()->NewSmiLiteral(1, pos), pos);
+
+    // temp === undefined
+    Expression* is_undefined = factory()->NewCompareOperation(
+        Token::EQ_STRICT, assign,
+        factory()->NewUndefinedLiteral(kNoSourcePosition), pos);
+
+    // is_undefined ? this : is_object_conditional
+    return_value = factory()->NewConditional(is_undefined, ThisExpression(pos),
+                                             is_object_conditional, pos);
   }
-
-  return ParseHoistableDeclaration(pos, flags, nullptr, false, CHECK_OK);
-}
-
-Statement* Parser::ParseExpressionOrLabelledStatement(
-    ZoneList<const AstRawString*>* labels,
-    AllowLabelledFunctionStatement allow_function, bool* ok) {
-  // ExpressionStatement | LabelledStatement ::
-  //   Expression ';'
-  //   Identifier ':' Statement
-  //
-  // ExpressionStatement[Yield] :
-  //   [lookahead ∉ {{, function, class, let [}] Expression[In, ?Yield] ;
-
-  int pos = peek_position();
-
-  switch (peek()) {
-    case Token::FUNCTION:
-    case Token::LBRACE:
-      UNREACHABLE();  // Always handled by the callers.
-    case Token::CLASS:
-      ReportUnexpectedToken(Next());
-      *ok = false;
-      return nullptr;
-    default:
-      break;
-  }
-
-  bool starts_with_idenfifier = peek_any_identifier();
-  Expression* expr = ParseExpression(true, CHECK_OK);
-  if (peek() == Token::COLON && starts_with_idenfifier && expr != NULL &&
-      expr->AsVariableProxy() != NULL &&
-      !expr->AsVariableProxy()->is_this()) {
-    // Expression is a single identifier, and not, e.g., a parenthesized
-    // identifier.
-    VariableProxy* var = expr->AsVariableProxy();
-    const AstRawString* label = var->raw_name();
-    // TODO(1240780): We don't check for redeclaration of labels
-    // during preparsing since keeping track of the set of active
-    // labels requires nontrivial changes to the way scopes are
-    // structured.  However, these are probably changes we want to
-    // make later anyway so we should go back and fix this then.
-    if (ContainsLabel(labels, label) || TargetStackContainsLabel(label)) {
-      ReportMessage(MessageTemplate::kLabelRedeclaration, label);
-      *ok = false;
-      return NULL;
-    }
-    if (labels == NULL) {
-      labels = new(zone()) ZoneList<const AstRawString*>(4, zone());
-    }
-    labels->Add(label, zone());
-    // Remove the "ghost" variable that turned out to be a label
-    // from the top scope. This way, we don't try to resolve it
-    // during the scope processing.
-    scope()->RemoveUnresolved(var);
-    Expect(Token::COLON, CHECK_OK);
-    // ES#sec-labelled-function-declarations Labelled Function Declarations
-    if (peek() == Token::FUNCTION && is_sloppy(language_mode())) {
-      if (allow_function == kAllowLabelledFunctionStatement) {
-        return ParseFunctionDeclaration(ok);
-      } else {
-        return ParseScopedStatement(labels, true, ok);
-      }
-    }
-    return ParseStatement(labels, kDisallowLabelledFunctionStatement, ok);
-  }
-
-  // If we have an extension, we allow a native function declaration.
-  // A native function declaration starts with "native function" with
-  // no line-terminator between the two words.
-  if (extension_ != NULL && peek() == Token::FUNCTION &&
-      !scanner()->HasAnyLineTerminatorBeforeNext() && expr != NULL &&
-      expr->AsVariableProxy() != NULL &&
-      expr->AsVariableProxy()->raw_name() ==
-          ast_value_factory()->native_string() &&
-      !scanner()->literal_contains_escapes()) {
-    return ParseNativeDeclaration(ok);
-  }
-
-  // Parsed expression statement, followed by semicolon.
-  ExpectSemicolon(CHECK_OK);
-  return factory()->NewExpressionStatement(expr, pos);
-}
-
-
-IfStatement* Parser::ParseIfStatement(ZoneList<const AstRawString*>* labels,
-                                      bool* ok) {
-  // IfStatement ::
-  //   'if' '(' Expression ')' Statement ('else' Statement)?
-
-  int pos = peek_position();
-  Expect(Token::IF, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  Expression* condition = ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-  Statement* then_statement = ParseScopedStatement(labels, false, CHECK_OK);
-  Statement* else_statement = NULL;
-  if (peek() == Token::ELSE) {
-    Next();
-    else_statement = ParseScopedStatement(labels, false, CHECK_OK);
-  } else {
-    else_statement = factory()->NewEmptyStatement(kNoSourcePosition);
-  }
-  return factory()->NewIfStatement(
-      condition, then_statement, else_statement, pos);
-}
-
-
-Statement* Parser::ParseContinueStatement(bool* ok) {
-  // ContinueStatement ::
-  //   'continue' Identifier? ';'
-
-  int pos = peek_position();
-  Expect(Token::CONTINUE, CHECK_OK);
-  const AstRawString* label = NULL;
-  Token::Value tok = peek();
-  if (!scanner()->HasAnyLineTerminatorBeforeNext() &&
-      tok != Token::SEMICOLON && tok != Token::RBRACE && tok != Token::EOS) {
-    // ECMA allows "eval" or "arguments" as labels even in strict mode.
-    label = ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
-  }
-  IterationStatement* target = LookupContinueTarget(label, CHECK_OK);
-  if (target == NULL) {
-    // Illegal continue statement.
-    MessageTemplate::Template message = MessageTemplate::kIllegalContinue;
-    if (label != NULL) {
-      message = MessageTemplate::kUnknownLabel;
-    }
-    ReportMessage(message, label);
-    *ok = false;
-    return NULL;
-  }
-  ExpectSemicolon(CHECK_OK);
-  return factory()->NewContinueStatement(target, pos);
-}
-
-
-Statement* Parser::ParseBreakStatement(ZoneList<const AstRawString*>* labels,
-                                       bool* ok) {
-  // BreakStatement ::
-  //   'break' Identifier? ';'
-
-  int pos = peek_position();
-  Expect(Token::BREAK, CHECK_OK);
-  const AstRawString* label = NULL;
-  Token::Value tok = peek();
-  if (!scanner()->HasAnyLineTerminatorBeforeNext() &&
-      tok != Token::SEMICOLON && tok != Token::RBRACE && tok != Token::EOS) {
-    // ECMA allows "eval" or "arguments" as labels even in strict mode.
-    label = ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
-  }
-  // Parse labeled break statements that target themselves into
-  // empty statements, e.g. 'l1: l2: l3: break l2;'
-  if (label != NULL && ContainsLabel(labels, label)) {
-    ExpectSemicolon(CHECK_OK);
-    return factory()->NewEmptyStatement(pos);
-  }
-  BreakableStatement* target = NULL;
-  target = LookupBreakTarget(label, CHECK_OK);
-  if (target == NULL) {
-    // Illegal break statement.
-    MessageTemplate::Template message = MessageTemplate::kIllegalBreak;
-    if (label != NULL) {
-      message = MessageTemplate::kUnknownLabel;
-    }
-    ReportMessage(message, label);
-    *ok = false;
-    return NULL;
-  }
-  ExpectSemicolon(CHECK_OK);
-  return factory()->NewBreakStatement(target, pos);
-}
-
-
-Statement* Parser::ParseReturnStatement(bool* ok) {
-  // ReturnStatement ::
-  //   'return' Expression? ';'
-
-  // Consume the return token. It is necessary to do that before
-  // reporting any errors on it, because of the way errors are
-  // reported (underlining).
-  Expect(Token::RETURN, CHECK_OK);
-  Scanner::Location loc = scanner()->location();
-
-  Token::Value tok = peek();
-  Statement* result;
-  Expression* return_value;
-  if (scanner()->HasAnyLineTerminatorBeforeNext() ||
-      tok == Token::SEMICOLON ||
-      tok == Token::RBRACE ||
-      tok == Token::EOS) {
-    if (IsSubclassConstructor(function_state_->kind())) {
-      return_value = ThisExpression(loc.beg_pos);
-    } else {
-      return_value = GetLiteralUndefined(position());
-    }
-  } else {
-    int pos = peek_position();
-
-    if (IsSubclassConstructor(function_state_->kind())) {
-      // Because of the return code rewriting that happens in case of a subclass
-      // constructor we don't want to accept tail calls, therefore we don't set
-      // ReturnExprScope to kInsideValidReturnStatement here.
-      return_value = ParseExpression(true, CHECK_OK);
-
-      // For subclass constructors we need to return this in case of undefined
-      // return a Smi (transformed into an exception in the ConstructStub)
-      // for a non object.
-      //
-      //   return expr;
-      //
-      // Is rewritten as:
-      //
-      //   return (temp = expr) === undefined ? this :
-      //       %_IsJSReceiver(temp) ? temp : 1;
-
-      // temp = expr
-      Variable* temp = NewTemporary(ast_value_factory()->empty_string());
-      Assignment* assign = factory()->NewAssignment(
-          Token::ASSIGN, factory()->NewVariableProxy(temp), return_value, pos);
-
-      // %_IsJSReceiver(temp)
-      ZoneList<Expression*>* is_spec_object_args =
-          new (zone()) ZoneList<Expression*>(1, zone());
-      is_spec_object_args->Add(factory()->NewVariableProxy(temp), zone());
-      Expression* is_spec_object_call = factory()->NewCallRuntime(
-          Runtime::kInlineIsJSReceiver, is_spec_object_args, pos);
-
-      // %_IsJSReceiver(temp) ? temp : 1;
-      Expression* is_object_conditional = factory()->NewConditional(
-          is_spec_object_call, factory()->NewVariableProxy(temp),
-          factory()->NewSmiLiteral(1, pos), pos);
-
-      // temp === undefined
-      Expression* is_undefined = factory()->NewCompareOperation(
-          Token::EQ_STRICT, assign,
-          factory()->NewUndefinedLiteral(kNoSourcePosition), pos);
-
-      // is_undefined ? this : is_object_conditional
-      return_value = factory()->NewConditional(
-          is_undefined, ThisExpression(pos), is_object_conditional, pos);
-    } else {
-      ReturnExprScope maybe_allow_tail_calls(
-          function_state_, ReturnExprContext::kInsideValidReturnStatement);
-      return_value = ParseExpression(true, CHECK_OK);
-
-      if (allow_tailcalls() && !is_sloppy(language_mode()) && !is_resumable()) {
-        // ES6 14.6.1 Static Semantics: IsInTailPosition
-        function_state_->AddImplicitTailCallExpression(return_value);
-      }
-    }
-  }
-  ExpectSemicolon(CHECK_OK);
-
   if (is_generator()) {
     return_value = BuildIteratorResult(return_value, true);
   } else if (is_async_function()) {
-    return_value = BuildPromiseResolve(return_value, return_value->position());
+    return_value = BuildResolvePromise(return_value, return_value->position());
   }
+  return return_value;
+}
 
-  result = factory()->NewReturnStatement(return_value, loc.beg_pos);
-
-  DeclarationScope* decl_scope = GetDeclarationScope();
-  if (decl_scope->is_script_scope() || decl_scope->is_eval_scope()) {
-    ReportMessageAt(loc, MessageTemplate::kIllegalReturn);
+Expression* Parser::RewriteDoExpression(Block* body, int pos, bool* ok) {
+  Variable* result = NewTemporary(ast_value_factory()->dot_result_string());
+  DoExpression* expr = factory()->NewDoExpression(body, result, pos);
+  if (!Rewriter::Rewrite(this, GetClosureScope(), expr, ast_value_factory())) {
     *ok = false;
-    return NULL;
+    return nullptr;
   }
-  return result;
+  return expr;
 }
 
-
-Statement* Parser::ParseWithStatement(ZoneList<const AstRawString*>* labels,
-                                      bool* ok) {
-  // WithStatement ::
-  //   'with' '(' Expression ')' Statement
-
-  Expect(Token::WITH, CHECK_OK);
-  int pos = position();
-
-  if (is_strict(language_mode())) {
-    ReportMessage(MessageTemplate::kStrictWith);
-    *ok = false;
-    return NULL;
-  }
-
-  Expect(Token::LPAREN, CHECK_OK);
-  Expression* expr = ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-
-  Scope* with_scope = NewScope(WITH_SCOPE);
-  Statement* body;
-  {
-    BlockState block_state(&scope_state_, with_scope);
-    with_scope->set_start_position(scanner()->peek_location().beg_pos);
-    body = ParseScopedStatement(labels, true, CHECK_OK);
-    with_scope->set_end_position(scanner()->location().end_pos);
-  }
-  return factory()->NewWithStatement(with_scope, expr, body, pos);
-}
-
-
-CaseClause* Parser::ParseCaseClause(bool* default_seen_ptr, bool* ok) {
-  // CaseClause ::
-  //   'case' Expression ':' StatementList
-  //   'default' ':' StatementList
-
-  Expression* label = NULL;  // NULL expression indicates default case
-  if (peek() == Token::CASE) {
-    Expect(Token::CASE, CHECK_OK);
-    label = ParseExpression(true, CHECK_OK);
-  } else {
-    Expect(Token::DEFAULT, CHECK_OK);
-    if (*default_seen_ptr) {
-      ReportMessage(MessageTemplate::kMultipleDefaultsInSwitch);
-      *ok = false;
-      return NULL;
-    }
-    *default_seen_ptr = true;
-  }
-  Expect(Token::COLON, CHECK_OK);
-  int pos = position();
-  ZoneList<Statement*>* statements =
-      new(zone()) ZoneList<Statement*>(5, zone());
-  Statement* stat = NULL;
-  while (peek() != Token::CASE &&
-         peek() != Token::DEFAULT &&
-         peek() != Token::RBRACE) {
-    stat = ParseStatementListItem(CHECK_OK);
-    statements->Add(stat, zone());
-  }
-  return factory()->NewCaseClause(label, statements, pos);
-}
-
-
-Statement* Parser::ParseSwitchStatement(ZoneList<const AstRawString*>* labels,
-                                        bool* ok) {
-  // SwitchStatement ::
-  //   'switch' '(' Expression ')' '{' CaseClause* '}'
+Statement* Parser::RewriteSwitchStatement(Expression* tag,
+                                          SwitchStatement* switch_statement,
+                                          ZoneList<CaseClause*>* cases,
+                                          Scope* scope) {
   // In order to get the CaseClauses to execute in their own lexical scope,
   // but without requiring downstream code to have special scope handling
   // code for switch statements, desugar into blocks as follows:
@@ -2728,12 +1734,6 @@
   // }
 
   Block* switch_block = factory()->NewBlock(NULL, 2, false, kNoSourcePosition);
-  int switch_pos = peek_position();
-
-  Expect(Token::SWITCH, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  Expression* tag = ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
 
   Variable* tag_variable =
       NewTemporary(ast_value_factory()->dot_switch_tag_string());
@@ -2752,301 +1752,112 @@
           factory()->NewUndefinedLiteral(kNoSourcePosition), kNoSourcePosition),
       zone());
 
+  Expression* tag_read = factory()->NewVariableProxy(tag_variable);
+  switch_statement->Initialize(tag_read, cases);
   Block* cases_block = factory()->NewBlock(NULL, 1, false, kNoSourcePosition);
-
-  SwitchStatement* switch_statement =
-      factory()->NewSwitchStatement(labels, switch_pos);
-
-  {
-    BlockState cases_block_state(&scope_state_);
-    cases_block_state.set_start_position(scanner()->location().beg_pos);
-    cases_block_state.SetNonlinear();
-    Target target(&this->target_stack_, switch_statement);
-
-    Expression* tag_read = factory()->NewVariableProxy(tag_variable);
-
-    bool default_seen = false;
-    ZoneList<CaseClause*>* cases =
-        new (zone()) ZoneList<CaseClause*>(4, zone());
-    Expect(Token::LBRACE, CHECK_OK);
-    while (peek() != Token::RBRACE) {
-      CaseClause* clause = ParseCaseClause(&default_seen, CHECK_OK);
-      cases->Add(clause, zone());
-    }
-    switch_statement->Initialize(tag_read, cases);
-    cases_block->statements()->Add(switch_statement, zone());
-    Expect(Token::RBRACE, CHECK_OK);
-
-    cases_block_state.set_end_position(scanner()->location().end_pos);
-    cases_block->set_scope(cases_block_state.FinalizedBlockScope());
-  }
-
+  cases_block->statements()->Add(switch_statement, zone());
+  cases_block->set_scope(scope);
   switch_block->statements()->Add(cases_block, zone());
-
   return switch_block;
 }
 
-
-Statement* Parser::ParseThrowStatement(bool* ok) {
-  // ThrowStatement ::
-  //   'throw' Expression ';'
-
-  Expect(Token::THROW, CHECK_OK);
-  int pos = position();
-  if (scanner()->HasAnyLineTerminatorBeforeNext()) {
-    ReportMessage(MessageTemplate::kNewlineAfterThrow);
-    *ok = false;
-    return NULL;
+void Parser::RewriteCatchPattern(CatchInfo* catch_info, bool* ok) {
+  if (catch_info->name == nullptr) {
+    DCHECK_NOT_NULL(catch_info->pattern);
+    catch_info->name = ast_value_factory()->dot_catch_string();
   }
-  Expression* exception = ParseExpression(true, CHECK_OK);
-  ExpectSemicolon(CHECK_OK);
+  catch_info->variable = catch_info->scope->DeclareLocal(
+      catch_info->name, VAR, kCreatedInitialized, NORMAL_VARIABLE);
+  if (catch_info->pattern != nullptr) {
+    DeclarationDescriptor descriptor;
+    descriptor.declaration_kind = DeclarationDescriptor::NORMAL;
+    descriptor.scope = scope();
+    descriptor.hoist_scope = nullptr;
+    descriptor.mode = LET;
+    descriptor.declaration_pos = catch_info->pattern->position();
+    descriptor.initialization_pos = catch_info->pattern->position();
 
-  return factory()->NewExpressionStatement(
-      factory()->NewThrow(exception, pos), pos);
+    // Initializer position for variables declared by the pattern.
+    const int initializer_position = position();
+
+    DeclarationParsingResult::Declaration decl(
+        catch_info->pattern, initializer_position,
+        factory()->NewVariableProxy(catch_info->variable));
+
+    catch_info->init_block =
+        factory()->NewBlock(nullptr, 8, true, kNoSourcePosition);
+    PatternRewriter::DeclareAndInitializeVariables(
+        this, catch_info->init_block, &descriptor, &decl,
+        &catch_info->bound_names, ok);
+  } else {
+    catch_info->bound_names.Add(catch_info->name, zone());
+  }
 }
 
-
-TryStatement* Parser::ParseTryStatement(bool* ok) {
-  // TryStatement ::
-  //   'try' Block Catch
-  //   'try' Block Finally
-  //   'try' Block Catch Finally
-  //
-  // Catch ::
-  //   'catch' '(' Identifier ')' Block
-  //
-  // Finally ::
-  //   'finally' Block
-
-  Expect(Token::TRY, CHECK_OK);
-  int pos = position();
-
-  Block* try_block;
-  {
-    ReturnExprScope no_tail_calls(function_state_,
-                                  ReturnExprContext::kInsideTryBlock);
-    try_block = ParseBlock(NULL, CHECK_OK);
-  }
-
-  Token::Value tok = peek();
-
-  bool catch_for_promise_reject = false;
-  if (allow_natives() && tok == Token::MOD) {
-    Consume(Token::MOD);
-    catch_for_promise_reject = true;
-    tok = peek();
-  }
-
-  if (tok != Token::CATCH && tok != Token::FINALLY) {
-    ReportMessage(MessageTemplate::kNoCatchOrFinally);
-    *ok = false;
-    return NULL;
-  }
-
-  Scope* catch_scope = NULL;
-  Variable* catch_variable = NULL;
-  Block* catch_block = NULL;
-  TailCallExpressionList tail_call_expressions_in_catch_block(zone());
-  if (tok == Token::CATCH) {
-    Consume(Token::CATCH);
-
-    Expect(Token::LPAREN, CHECK_OK);
-    catch_scope = NewScope(CATCH_SCOPE);
-    catch_scope->set_start_position(scanner()->location().beg_pos);
-
-    {
-      CollectExpressionsInTailPositionToListScope
-          collect_tail_call_expressions_scope(
-              function_state_, &tail_call_expressions_in_catch_block);
-      BlockState block_state(&scope_state_, catch_scope);
-
-      catch_block = factory()->NewBlock(nullptr, 16, false, kNoSourcePosition);
-
-      // Create a block scope to hold any lexical declarations created
-      // as part of destructuring the catch parameter.
-      {
-        BlockState block_state(&scope_state_);
-        block_state.set_start_position(scanner()->location().beg_pos);
-        Target target(&this->target_stack_, catch_block);
-
-        const AstRawString* name = ast_value_factory()->dot_catch_string();
-        Expression* pattern = nullptr;
-        if (peek_any_identifier()) {
-          name = ParseIdentifier(kDontAllowRestrictedIdentifiers, CHECK_OK);
-        } else {
-          ExpressionClassifier pattern_classifier(this);
-          pattern = ParsePrimaryExpression(&pattern_classifier, CHECK_OK);
-          ValidateBindingPattern(&pattern_classifier, CHECK_OK);
-        }
-        catch_variable = catch_scope->DeclareLocal(
-            name, VAR, kCreatedInitialized, Variable::NORMAL);
-
-        Expect(Token::RPAREN, CHECK_OK);
-
-        ZoneList<const AstRawString*> bound_names(1, zone());
-        if (pattern != nullptr) {
-          DeclarationDescriptor descriptor;
-          descriptor.declaration_kind = DeclarationDescriptor::NORMAL;
-          descriptor.parser = this;
-          descriptor.scope = scope();
-          descriptor.hoist_scope = nullptr;
-          descriptor.mode = LET;
-          descriptor.declaration_pos = pattern->position();
-          descriptor.initialization_pos = pattern->position();
-
-          // Initializer position for variables declared by the pattern.
-          const int initializer_position = position();
-
-          DeclarationParsingResult::Declaration decl(
-              pattern, initializer_position,
-              factory()->NewVariableProxy(catch_variable));
-
-          Block* init_block =
-              factory()->NewBlock(nullptr, 8, true, kNoSourcePosition);
-          PatternRewriter::DeclareAndInitializeVariables(
-              init_block, &descriptor, &decl, &bound_names, CHECK_OK);
-          catch_block->statements()->Add(init_block, zone());
-        } else {
-          bound_names.Add(name, zone());
-        }
-
-        Block* inner_block = ParseBlock(nullptr, CHECK_OK);
-        catch_block->statements()->Add(inner_block, zone());
-
-        // Check for `catch(e) { let e; }` and similar errors.
-        Scope* inner_block_scope = inner_block->scope();
-        if (inner_block_scope != nullptr) {
-          Declaration* decl =
-              inner_block_scope->CheckLexDeclarationsConflictingWith(
-                  bound_names);
-          if (decl != nullptr) {
-            const AstRawString* name = decl->proxy()->raw_name();
-            int position = decl->proxy()->position();
-            Scanner::Location location =
-                position == kNoSourcePosition
-                    ? Scanner::Location::invalid()
-                    : Scanner::Location(position, position + 1);
-            ReportMessageAt(location, MessageTemplate::kVarRedeclaration, name);
-            *ok = false;
-            return nullptr;
-          }
-        }
-        block_state.set_end_position(scanner()->location().end_pos);
-        catch_block->set_scope(block_state.FinalizedBlockScope());
-      }
+void Parser::ValidateCatchBlock(const CatchInfo& catch_info, bool* ok) {
+  // Check for `catch(e) { let e; }` and similar errors.
+  Scope* inner_block_scope = catch_info.inner_block->scope();
+  if (inner_block_scope != nullptr) {
+    Declaration* decl = inner_block_scope->CheckLexDeclarationsConflictingWith(
+        catch_info.bound_names);
+    if (decl != nullptr) {
+      const AstRawString* name = decl->proxy()->raw_name();
+      int position = decl->proxy()->position();
+      Scanner::Location location =
+          position == kNoSourcePosition
+              ? Scanner::Location::invalid()
+              : Scanner::Location(position, position + 1);
+      ReportMessageAt(location, MessageTemplate::kVarRedeclaration, name);
+      *ok = false;
     }
-
-    catch_scope->set_end_position(scanner()->location().end_pos);
-    tok = peek();
   }
+}
 
-  Block* finally_block = NULL;
-  DCHECK(tok == Token::FINALLY || catch_block != NULL);
-  if (tok == Token::FINALLY) {
-    Consume(Token::FINALLY);
-    finally_block = ParseBlock(NULL, CHECK_OK);
-  }
-
+Statement* Parser::RewriteTryStatement(Block* try_block, Block* catch_block,
+                                       Block* finally_block,
+                                       const CatchInfo& catch_info, int pos) {
   // Simplify the AST nodes by converting:
   //   'try B0 catch B1 finally B2'
   // to:
   //   'try { try B0 catch B1 } finally B2'
 
-  if (catch_block != NULL && finally_block != NULL) {
+  if (catch_block != nullptr && finally_block != nullptr) {
     // If we have both, create an inner try/catch.
-    DCHECK(catch_scope != NULL && catch_variable != NULL);
+    DCHECK_NOT_NULL(catch_info.scope);
+    DCHECK_NOT_NULL(catch_info.variable);
     TryCatchStatement* statement;
-    if (catch_for_promise_reject) {
+    if (catch_info.for_promise_reject) {
       statement = factory()->NewTryCatchStatementForPromiseReject(
-          try_block, catch_scope, catch_variable, catch_block,
+          try_block, catch_info.scope, catch_info.variable, catch_block,
           kNoSourcePosition);
     } else {
-      statement = factory()->NewTryCatchStatement(try_block, catch_scope,
-                                                  catch_variable, catch_block,
-                                                  kNoSourcePosition);
+      statement = factory()->NewTryCatchStatement(
+          try_block, catch_info.scope, catch_info.variable, catch_block,
+          kNoSourcePosition);
     }
 
-    try_block = factory()->NewBlock(NULL, 1, false, kNoSourcePosition);
+    try_block = factory()->NewBlock(nullptr, 1, false, kNoSourcePosition);
     try_block->statements()->Add(statement, zone());
-    catch_block = NULL;  // Clear to indicate it's been handled.
+    catch_block = nullptr;  // Clear to indicate it's been handled.
   }
 
-  TryStatement* result = NULL;
-  if (catch_block != NULL) {
+  if (catch_block != nullptr) {
     // For a try-catch construct append return expressions from the catch block
     // to the list of return expressions.
     function_state_->tail_call_expressions().Append(
-        tail_call_expressions_in_catch_block);
+        catch_info.tail_call_expressions);
 
-    DCHECK(finally_block == NULL);
-    DCHECK(catch_scope != NULL && catch_variable != NULL);
-    result = factory()->NewTryCatchStatement(try_block, catch_scope,
-                                             catch_variable, catch_block, pos);
+    DCHECK_NULL(finally_block);
+    DCHECK_NOT_NULL(catch_info.scope);
+    DCHECK_NOT_NULL(catch_info.variable);
+    return factory()->NewTryCatchStatement(
+        try_block, catch_info.scope, catch_info.variable, catch_block, pos);
   } else {
-    if (FLAG_harmony_explicit_tailcalls &&
-        tail_call_expressions_in_catch_block.has_explicit_tail_calls()) {
-      // TODO(ishell): update chapter number.
-      // ES8 XX.YY.ZZ
-      ReportMessageAt(tail_call_expressions_in_catch_block.location(),
-                      MessageTemplate::kUnexpectedTailCallInCatchBlock);
-      *ok = false;
-      return NULL;
-    }
-    DCHECK(finally_block != NULL);
-    result = factory()->NewTryFinallyStatement(try_block, finally_block, pos);
+    DCHECK_NOT_NULL(finally_block);
+    return factory()->NewTryFinallyStatement(try_block, finally_block, pos);
   }
-
-  return result;
 }
 
-
-DoWhileStatement* Parser::ParseDoWhileStatement(
-    ZoneList<const AstRawString*>* labels, bool* ok) {
-  // DoStatement ::
-  //   'do' Statement 'while' '(' Expression ')' ';'
-
-  DoWhileStatement* loop =
-      factory()->NewDoWhileStatement(labels, peek_position());
-  Target target(&this->target_stack_, loop);
-
-  Expect(Token::DO, CHECK_OK);
-  Statement* body = ParseScopedStatement(NULL, true, CHECK_OK);
-  Expect(Token::WHILE, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-
-  Expression* cond = ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-
-  // Allow do-statements to be terminated with and without
-  // semi-colons. This allows code such as 'do;while(0)return' to
-  // parse, which would not be the case if we had used the
-  // ExpectSemicolon() functionality here.
-  if (peek() == Token::SEMICOLON) Consume(Token::SEMICOLON);
-
-  if (loop != NULL) loop->Initialize(cond, body);
-  return loop;
-}
-
-
-WhileStatement* Parser::ParseWhileStatement(
-    ZoneList<const AstRawString*>* labels, bool* ok) {
-  // WhileStatement ::
-  //   'while' '(' Expression ')' Statement
-
-  WhileStatement* loop = factory()->NewWhileStatement(labels, peek_position());
-  Target target(&this->target_stack_, loop);
-
-  Expect(Token::WHILE, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  Expression* cond = ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-  Statement* body = ParseScopedStatement(NULL, true, CHECK_OK);
-
-  if (loop != NULL) loop->Initialize(cond, body);
-  return loop;
-}
-
-
 // !%_IsJSReceiver(result = iterator.next()) &&
 //     %ThrowIteratorResultNotAnObject(result)
 Expression* Parser::BuildIteratorNextResult(Expression* iterator,
@@ -3115,6 +1926,138 @@
   return stmt;
 }
 
+// Special case for legacy for
+//
+//    for (var x = initializer in enumerable) body
+//
+// An initialization block of the form
+//
+//    {
+//      x = initializer;
+//    }
+//
+// is returned in this case.  It has reserved space for two statements,
+// so that (later on during parsing), the equivalent of
+//
+//   for (x in enumerable) body
+//
+// is added as a second statement to it.
+Block* Parser::RewriteForVarInLegacy(const ForInfo& for_info) {
+  const DeclarationParsingResult::Declaration& decl =
+      for_info.parsing_result.declarations[0];
+  if (!IsLexicalVariableMode(for_info.parsing_result.descriptor.mode) &&
+      decl.pattern->IsVariableProxy() && decl.initializer != nullptr) {
+    DCHECK(!allow_harmony_for_in());
+    ++use_counts_[v8::Isolate::kForInInitializer];
+    const AstRawString* name = decl.pattern->AsVariableProxy()->raw_name();
+    VariableProxy* single_var = NewUnresolved(name);
+    Block* init_block = factory()->NewBlock(
+        nullptr, 2, true, for_info.parsing_result.descriptor.declaration_pos);
+    init_block->statements()->Add(
+        factory()->NewExpressionStatement(
+            factory()->NewAssignment(Token::ASSIGN, single_var,
+                                     decl.initializer, kNoSourcePosition),
+            kNoSourcePosition),
+        zone());
+    return init_block;
+  }
+  return nullptr;
+}
+
+// Rewrite a for-in/of statement of the form
+//
+//   for (let/const/var x in/of e) b
+//
+// into
+//
+//   {
+//     <let x' be a temporary variable>
+//     for (x' in/of e) {
+//       let/const/var x;
+//       x = x';
+//       b;
+//     }
+//     let x;  // for TDZ
+//   }
+void Parser::DesugarBindingInForEachStatement(ForInfo* for_info,
+                                              Block** body_block,
+                                              Expression** each_variable,
+                                              bool* ok) {
+  DeclarationParsingResult::Declaration& decl =
+      for_info->parsing_result.declarations[0];
+  Variable* temp = NewTemporary(ast_value_factory()->dot_for_string());
+  auto each_initialization_block =
+      factory()->NewBlock(nullptr, 1, true, kNoSourcePosition);
+  {
+    auto descriptor = for_info->parsing_result.descriptor;
+    descriptor.declaration_pos = kNoSourcePosition;
+    descriptor.initialization_pos = kNoSourcePosition;
+    decl.initializer = factory()->NewVariableProxy(temp);
+
+    bool is_for_var_of =
+        for_info->mode == ForEachStatement::ITERATE &&
+        for_info->parsing_result.descriptor.mode == VariableMode::VAR;
+
+    PatternRewriter::DeclareAndInitializeVariables(
+        this, each_initialization_block, &descriptor, &decl,
+        (IsLexicalVariableMode(for_info->parsing_result.descriptor.mode) ||
+         is_for_var_of)
+            ? &for_info->bound_names
+            : nullptr,
+        CHECK_OK_VOID);
+
+    // Annex B.3.5 prohibits the form
+    // `try {} catch(e) { for (var e of {}); }`
+    // So if we are parsing a statement like `for (var ... of ...)`
+    // we need to walk up the scope chain and look for catch scopes
+    // which have a simple binding, then compare their binding against
+    // all of the names declared in the init of the for-of we're
+    // parsing.
+    if (is_for_var_of) {
+      Scope* catch_scope = scope();
+      while (catch_scope != nullptr && !catch_scope->is_declaration_scope()) {
+        if (catch_scope->is_catch_scope()) {
+          auto name = catch_scope->catch_variable_name();
+          // If it's a simple binding and the name is declared in the for loop.
+          if (name != ast_value_factory()->dot_catch_string() &&
+              for_info->bound_names.Contains(name)) {
+            ReportMessageAt(for_info->parsing_result.bindings_loc,
+                            MessageTemplate::kVarRedeclaration, name);
+            *ok = false;
+            return;
+          }
+        }
+        catch_scope = catch_scope->outer_scope();
+      }
+    }
+  }
+
+  *body_block = factory()->NewBlock(nullptr, 3, false, kNoSourcePosition);
+  (*body_block)->statements()->Add(each_initialization_block, zone());
+  *each_variable = factory()->NewVariableProxy(temp, for_info->each_loc.beg_pos,
+                                               for_info->each_loc.end_pos);
+}
+
+// Create a TDZ for any lexically-bound names in for in/of statements.
+Block* Parser::CreateForEachStatementTDZ(Block* init_block,
+                                         const ForInfo& for_info, bool* ok) {
+  if (IsLexicalVariableMode(for_info.parsing_result.descriptor.mode)) {
+    DCHECK_NULL(init_block);
+
+    init_block = factory()->NewBlock(nullptr, 1, false, kNoSourcePosition);
+
+    for (int i = 0; i < for_info.bound_names.length(); ++i) {
+      // TODO(adamk): This needs to be some sort of special
+      // INTERNAL variable that's invisible to the debugger
+      // but visible to everything else.
+      Declaration* tdz_decl = DeclareVariable(for_info.bound_names[i], LET,
+                                              kNoSourcePosition, CHECK_OK);
+      tdz_decl->proxy()->var()->set_initializer_position(position());
+    }
+  }
+  return init_block;
+}
+
 Statement* Parser::InitializeForOfStatement(ForOfStatement* for_of,
                                             Expression* each,
                                             Expression* iterable,
@@ -3138,8 +2081,7 @@
   {
     assign_iterator = factory()->NewAssignment(
         Token::ASSIGN, factory()->NewVariableProxy(iterator),
-        GetIterator(iterable, factory(), iterable->position()),
-        iterable->position());
+        GetIterator(iterable, iterable->position()), iterable->position());
   }
 
   // !%_IsJSReceiver(result = iterator.next()) &&
@@ -3240,9 +2182,8 @@
 }
 
 Statement* Parser::DesugarLexicalBindingsInForStatement(
-    Scope* inner_scope, VariableMode mode, ZoneList<const AstRawString*>* names,
     ForStatement* loop, Statement* init, Expression* cond, Statement* next,
-    Statement* body, bool* ok) {
+    Statement* body, Scope* inner_scope, const ForInfo& for_info, bool* ok) {
   // ES6 13.7.4.8 specifies that on each loop iteration the let variables are
   // copied into a new environment.  Moreover, the "next" statement must be
   // evaluated not in the environment of the just completed iteration but in
@@ -3280,11 +2221,11 @@
   //    }
   //  }
 
-  DCHECK(names->length() > 0);
-  ZoneList<Variable*> temps(names->length(), zone());
+  DCHECK(for_info.bound_names.length() > 0);
+  ZoneList<Variable*> temps(for_info.bound_names.length(), zone());
 
-  Block* outer_block =
-      factory()->NewBlock(NULL, names->length() + 4, false, kNoSourcePosition);
+  Block* outer_block = factory()->NewBlock(
+      nullptr, for_info.bound_names.length() + 4, false, kNoSourcePosition);
 
   // Add statement: let/const x = i.
   outer_block->statements()->Add(init, zone());
@@ -3293,8 +2234,8 @@
 
   // For each lexical variable x:
   //   make statement: temp_x = x.
-  for (int i = 0; i < names->length(); i++) {
-    VariableProxy* proxy = NewUnresolved(names->at(i));
+  for (int i = 0; i < for_info.bound_names.length(); i++) {
+    VariableProxy* proxy = NewUnresolved(for_info.bound_names[i]);
     Variable* temp = NewTemporary(temp_name);
     VariableProxy* temp_proxy = factory()->NewVariableProxy(temp);
     Assignment* assignment = factory()->NewAssignment(Token::ASSIGN, temp_proxy,
@@ -3338,14 +2279,15 @@
   {
     BlockState block_state(&scope_state_, inner_scope);
 
-    Block* ignore_completion_block =
-        factory()->NewBlock(NULL, names->length() + 3, true, kNoSourcePosition);
-    ZoneList<Variable*> inner_vars(names->length(), zone());
+    Block* ignore_completion_block = factory()->NewBlock(
+        nullptr, for_info.bound_names.length() + 3, true, kNoSourcePosition);
+    ZoneList<Variable*> inner_vars(for_info.bound_names.length(), zone());
     // For each let variable x:
     //    make statement: let/const x = temp_x.
-    for (int i = 0; i < names->length(); i++) {
-      Declaration* decl =
-          DeclareVariable(names->at(i), mode, kNoSourcePosition, CHECK_OK);
+    for (int i = 0; i < for_info.bound_names.length(); i++) {
+      Declaration* decl = DeclareVariable(
+          for_info.bound_names[i], for_info.parsing_result.descriptor.mode,
+          kNoSourcePosition, CHECK_OK);
       inner_vars.Add(decl->proxy()->var(), zone());
       VariableProxy* temp_proxy = factory()->NewVariableProxy(temps.at(i));
       Assignment* assignment = factory()->NewAssignment(
@@ -3429,7 +2371,7 @@
 
       // Make the comma-separated list of temp_x = x assignments.
       int inner_var_proxy_pos = scanner()->location().beg_pos;
-      for (int i = 0; i < names->length(); i++) {
+      for (int i = 0; i < for_info.bound_names.length(); i++) {
         VariableProxy* temp_proxy = factory()->NewVariableProxy(temps.at(i));
         VariableProxy* proxy =
             factory()->NewVariableProxy(inner_vars.at(i), inner_var_proxy_pos);
@@ -3479,433 +2421,7 @@
   return outer_block;
 }
 
-Statement* Parser::ParseScopedStatement(ZoneList<const AstRawString*>* labels,
-                                        bool legacy, bool* ok) {
-  if (is_strict(language_mode()) || peek() != Token::FUNCTION ||
-      (legacy && allow_harmony_restrictive_declarations())) {
-    return ParseSubStatement(labels, kDisallowLabelledFunctionStatement, ok);
-  } else {
-    if (legacy) {
-      ++use_counts_[v8::Isolate::kLegacyFunctionDeclaration];
-    }
-    // Make a block around the statement for a lexical binding
-    // is introduced by a FunctionDeclaration.
-    BlockState block_state(&scope_state_);
-    block_state.set_start_position(scanner()->location().beg_pos);
-    Block* block = factory()->NewBlock(NULL, 1, false, kNoSourcePosition);
-    Statement* body = ParseFunctionDeclaration(CHECK_OK);
-    block->statements()->Add(body, zone());
-    block_state.set_end_position(scanner()->location().end_pos);
-    block->set_scope(block_state.FinalizedBlockScope());
-    return block;
-  }
-}
-
-Statement* Parser::ParseForStatement(ZoneList<const AstRawString*>* labels,
-                                     bool* ok) {
-  int stmt_pos = peek_position();
-  Statement* init = NULL;
-  ZoneList<const AstRawString*> bound_names(1, zone());
-  bool bound_names_are_lexical = false;
-
-  // Create an in-between scope for let-bound iteration variables.
-  BlockState for_state(&scope_state_);
-  Expect(Token::FOR, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  for_state.set_start_position(scanner()->location().beg_pos);
-  for_state.set_is_hidden();
-  DeclarationParsingResult parsing_result;
-  if (peek() != Token::SEMICOLON) {
-    if (peek() == Token::VAR || peek() == Token::CONST ||
-        (peek() == Token::LET && IsNextLetKeyword())) {
-      ParseVariableDeclarations(kForStatement, &parsing_result, nullptr,
-                                CHECK_OK);
-
-      ForEachStatement::VisitMode mode = ForEachStatement::ENUMERATE;
-      int each_beg_pos = scanner()->location().beg_pos;
-      int each_end_pos = scanner()->location().end_pos;
-
-      if (CheckInOrOf(&mode, ok)) {
-        if (!*ok) return nullptr;
-        if (parsing_result.declarations.length() != 1) {
-          ReportMessageAt(parsing_result.bindings_loc,
-                          MessageTemplate::kForInOfLoopMultiBindings,
-                          ForEachStatement::VisitModeString(mode));
-          *ok = false;
-          return nullptr;
-        }
-        DeclarationParsingResult::Declaration& decl =
-            parsing_result.declarations[0];
-        if (parsing_result.first_initializer_loc.IsValid() &&
-            (is_strict(language_mode()) || mode == ForEachStatement::ITERATE ||
-             IsLexicalVariableMode(parsing_result.descriptor.mode) ||
-             !decl.pattern->IsVariableProxy() || allow_harmony_for_in())) {
-          // Only increment the use count if we would have let this through
-          // without the flag.
-          if (allow_harmony_for_in()) {
-            ++use_counts_[v8::Isolate::kForInInitializer];
-          }
-          ReportMessageAt(parsing_result.first_initializer_loc,
-                          MessageTemplate::kForInOfLoopInitializer,
-                          ForEachStatement::VisitModeString(mode));
-          *ok = false;
-          return nullptr;
-        }
-
-        Block* init_block = nullptr;
-        bound_names_are_lexical =
-            IsLexicalVariableMode(parsing_result.descriptor.mode);
-
-        // special case for legacy for (var ... = ... in ...)
-        if (!bound_names_are_lexical && decl.pattern->IsVariableProxy() &&
-            decl.initializer != nullptr) {
-          DCHECK(!allow_harmony_for_in());
-          ++use_counts_[v8::Isolate::kForInInitializer];
-          const AstRawString* name =
-              decl.pattern->AsVariableProxy()->raw_name();
-          VariableProxy* single_var = NewUnresolved(name);
-          init_block = factory()->NewBlock(
-              nullptr, 2, true, parsing_result.descriptor.declaration_pos);
-          init_block->statements()->Add(
-              factory()->NewExpressionStatement(
-                  factory()->NewAssignment(Token::ASSIGN, single_var,
-                                           decl.initializer, kNoSourcePosition),
-                  kNoSourcePosition),
-              zone());
-        }
-
-        // Rewrite a for-in/of statement of the form
-        //
-        //   for (let/const/var x in/of e) b
-        //
-        // into
-        //
-        //   {
-        //     <let x' be a temporary variable>
-        //     for (x' in/of e) {
-        //       let/const/var x;
-        //       x = x';
-        //       b;
-        //     }
-        //     let x;  // for TDZ
-        //   }
-
-        Variable* temp = NewTemporary(ast_value_factory()->dot_for_string());
-        ForEachStatement* loop =
-            factory()->NewForEachStatement(mode, labels, stmt_pos);
-        Target target(&this->target_stack_, loop);
-
-        int each_keyword_position = scanner()->location().beg_pos;
-
-        Expression* enumerable;
-        if (mode == ForEachStatement::ITERATE) {
-          ExpressionClassifier classifier(this);
-          enumerable = ParseAssignmentExpression(true, &classifier, CHECK_OK);
-          RewriteNonPattern(&classifier, CHECK_OK);
-        } else {
-          enumerable = ParseExpression(true, CHECK_OK);
-        }
-
-        Expect(Token::RPAREN, CHECK_OK);
-
-
-        Block* body_block =
-            factory()->NewBlock(NULL, 3, false, kNoSourcePosition);
-
-        Statement* final_loop;
-        {
-          ReturnExprScope no_tail_calls(function_state_,
-                                        ReturnExprContext::kInsideForInOfBody);
-          BlockState block_state(&scope_state_);
-          block_state.set_start_position(scanner()->location().beg_pos);
-
-          Statement* body = ParseScopedStatement(NULL, true, CHECK_OK);
-
-          auto each_initialization_block =
-              factory()->NewBlock(nullptr, 1, true, kNoSourcePosition);
-          {
-            auto descriptor = parsing_result.descriptor;
-            descriptor.declaration_pos = kNoSourcePosition;
-            descriptor.initialization_pos = kNoSourcePosition;
-            decl.initializer = factory()->NewVariableProxy(temp);
-
-            bool is_for_var_of =
-                mode == ForEachStatement::ITERATE &&
-                parsing_result.descriptor.mode == VariableMode::VAR;
-
-            PatternRewriter::DeclareAndInitializeVariables(
-                each_initialization_block, &descriptor, &decl,
-                bound_names_are_lexical || is_for_var_of ? &bound_names
-                                                         : nullptr,
-                CHECK_OK);
-
-            // Annex B.3.5 prohibits the form
-            // `try {} catch(e) { for (var e of {}); }`
-            // So if we are parsing a statement like `for (var ... of ...)`
-            // we need to walk up the scope chain and look for catch scopes
-            // which have a simple binding, then compare their binding against
-            // all of the names declared in the init of the for-of we're
-            // parsing.
-            if (is_for_var_of) {
-              Scope* catch_scope = scope();
-              while (catch_scope != nullptr &&
-                     !catch_scope->is_declaration_scope()) {
-                if (catch_scope->is_catch_scope()) {
-                  auto name = catch_scope->catch_variable_name();
-                  if (name !=
-                      ast_value_factory()
-                          ->dot_catch_string()) {  // i.e. is a simple binding
-                    if (bound_names.Contains(name)) {
-                      ReportMessageAt(parsing_result.bindings_loc,
-                                      MessageTemplate::kVarRedeclaration, name);
-                      *ok = false;
-                      return nullptr;
-                    }
-                  }
-                }
-                catch_scope = catch_scope->outer_scope();
-              }
-            }
-          }
-
-          body_block->statements()->Add(each_initialization_block, zone());
-          body_block->statements()->Add(body, zone());
-          VariableProxy* temp_proxy =
-              factory()->NewVariableProxy(temp, each_beg_pos, each_end_pos);
-          final_loop = InitializeForEachStatement(
-              loop, temp_proxy, enumerable, body_block, each_keyword_position);
-          block_state.set_end_position(scanner()->location().end_pos);
-          body_block->set_scope(block_state.FinalizedBlockScope());
-        }
-
-        // Create a TDZ for any lexically-bound names.
-        if (bound_names_are_lexical) {
-          DCHECK_NULL(init_block);
-
-          init_block =
-              factory()->NewBlock(nullptr, 1, false, kNoSourcePosition);
-
-          for (int i = 0; i < bound_names.length(); ++i) {
-            // TODO(adamk): This needs to be some sort of special
-            // INTERNAL variable that's invisible to the debugger
-            // but visible to everything else.
-            Declaration* tdz_decl = DeclareVariable(
-                bound_names[i], LET, kNoSourcePosition, CHECK_OK);
-            tdz_decl->proxy()->var()->set_initializer_position(position());
-          }
-        }
-
-        for_state.set_end_position(scanner()->location().end_pos);
-        Scope* for_scope = for_state.FinalizedBlockScope();
-        // Parsed for-in loop w/ variable declarations.
-        if (init_block != nullptr) {
-          init_block->statements()->Add(final_loop, zone());
-          init_block->set_scope(for_scope);
-          return init_block;
-        } else {
-          DCHECK_NULL(for_scope);
-          return final_loop;
-        }
-      } else {
-        bound_names_are_lexical =
-            IsLexicalVariableMode(parsing_result.descriptor.mode);
-        init = parsing_result.BuildInitializationBlock(
-            bound_names_are_lexical ? &bound_names : nullptr, CHECK_OK);
-      }
-    } else {
-      int lhs_beg_pos = peek_position();
-      ExpressionClassifier classifier(this);
-      Expression* expression = ParseExpression(false, &classifier, CHECK_OK);
-      int lhs_end_pos = scanner()->location().end_pos;
-      ForEachStatement::VisitMode mode = ForEachStatement::ENUMERATE;
-
-      bool is_for_each = CheckInOrOf(&mode, CHECK_OK);
-      bool is_destructuring = is_for_each && (expression->IsArrayLiteral() ||
-                                              expression->IsObjectLiteral());
-
-      if (is_destructuring) {
-        ValidateAssignmentPattern(&classifier, CHECK_OK);
-      } else {
-        RewriteNonPattern(&classifier, CHECK_OK);
-      }
-
-      if (is_for_each) {
-        if (!is_destructuring) {
-          expression = this->CheckAndRewriteReferenceExpression(
-              expression, lhs_beg_pos, lhs_end_pos,
-              MessageTemplate::kInvalidLhsInFor, kSyntaxError, CHECK_OK);
-        }
-
-        ForEachStatement* loop =
-            factory()->NewForEachStatement(mode, labels, stmt_pos);
-        Target target(&this->target_stack_, loop);
-
-        int each_keyword_position = scanner()->location().beg_pos;
-
-        Expression* enumerable;
-        if (mode == ForEachStatement::ITERATE) {
-          ExpressionClassifier classifier(this);
-          enumerable = ParseAssignmentExpression(true, &classifier, CHECK_OK);
-          RewriteNonPattern(&classifier, CHECK_OK);
-        } else {
-          enumerable = ParseExpression(true, CHECK_OK);
-        }
-
-        Expect(Token::RPAREN, CHECK_OK);
-
-        // For legacy compat reasons, give for loops similar treatment to
-        // if statements in allowing a function declaration for a body
-        Statement* body = ParseScopedStatement(NULL, true, CHECK_OK);
-        Statement* final_loop = InitializeForEachStatement(
-            loop, expression, enumerable, body, each_keyword_position);
-
-        DCHECK_NULL(for_state.FinalizedBlockScope());
-        return final_loop;
-
-      } else {
-        init = factory()->NewExpressionStatement(expression, lhs_beg_pos);
-      }
-    }
-  }
-
-  // Standard 'for' loop
-  ForStatement* loop = factory()->NewForStatement(labels, stmt_pos);
-  Target target(&this->target_stack_, loop);
-
-  // Parsed initializer at this point.
-  Expect(Token::SEMICOLON, CHECK_OK);
-
-  Expression* cond = NULL;
-  Statement* next = NULL;
-  Statement* body = NULL;
-
-  // If there are let bindings, then condition and the next statement of the
-  // for loop must be parsed in a new scope.
-  Scope* inner_scope = scope();
-  // TODO(verwaest): Allocate this through a ScopeState as well.
-  if (bound_names_are_lexical && bound_names.length() > 0) {
-    inner_scope = NewScopeWithParent(inner_scope, BLOCK_SCOPE);
-    inner_scope->set_start_position(scanner()->location().beg_pos);
-  }
-  {
-    BlockState block_state(&scope_state_, inner_scope);
-
-    if (peek() != Token::SEMICOLON) {
-      cond = ParseExpression(true, CHECK_OK);
-    }
-    Expect(Token::SEMICOLON, CHECK_OK);
-
-    if (peek() != Token::RPAREN) {
-      Expression* exp = ParseExpression(true, CHECK_OK);
-      next = factory()->NewExpressionStatement(exp, exp->position());
-    }
-    Expect(Token::RPAREN, CHECK_OK);
-
-    body = ParseScopedStatement(NULL, true, CHECK_OK);
-  }
-
-  Statement* result = NULL;
-  if (bound_names_are_lexical && bound_names.length() > 0) {
-    result = DesugarLexicalBindingsInForStatement(
-        inner_scope, parsing_result.descriptor.mode, &bound_names, loop, init,
-        cond, next, body, CHECK_OK);
-    for_state.set_end_position(scanner()->location().end_pos);
-  } else {
-    for_state.set_end_position(scanner()->location().end_pos);
-    Scope* for_scope = for_state.FinalizedBlockScope();
-    if (for_scope) {
-      // Rewrite a for statement of the form
-      //   for (const x = i; c; n) b
-      //
-      // into
-      //
-      //   {
-      //     const x = i;
-      //     for (; c; n) b
-      //   }
-      //
-      // or, desugar
-      //   for (; c; n) b
-      // into
-      //   {
-      //     for (; c; n) b
-      //   }
-      // just in case b introduces a lexical binding some other way, e.g., if b
-      // is a FunctionDeclaration.
-      Block* block = factory()->NewBlock(NULL, 2, false, kNoSourcePosition);
-      if (init != nullptr) {
-        block->statements()->Add(init, zone());
-      }
-      block->statements()->Add(loop, zone());
-      block->set_scope(for_scope);
-      loop->Initialize(NULL, cond, next, body);
-      result = block;
-    } else {
-      loop->Initialize(init, cond, next, body);
-      result = loop;
-    }
-  }
-  return result;
-}
-
-
-DebuggerStatement* Parser::ParseDebuggerStatement(bool* ok) {
-  // In ECMA-262 'debugger' is defined as a reserved keyword. In some browser
-  // contexts this is used as a statement which invokes the debugger as i a
-  // break point is present.
-  // DebuggerStatement ::
-  //   'debugger' ';'
-
-  int pos = peek_position();
-  Expect(Token::DEBUGGER, CHECK_OK);
-  ExpectSemicolon(CHECK_OK);
-  return factory()->NewDebuggerStatement(pos);
-}
-
-
-bool CompileTimeValue::IsCompileTimeValue(Expression* expression) {
-  if (expression->IsLiteral()) return true;
-  MaterializedLiteral* lit = expression->AsMaterializedLiteral();
-  return lit != NULL && lit->is_simple();
-}
-
-
-Handle<FixedArray> CompileTimeValue::GetValue(Isolate* isolate,
-                                              Expression* expression) {
-  Factory* factory = isolate->factory();
-  DCHECK(IsCompileTimeValue(expression));
-  Handle<FixedArray> result = factory->NewFixedArray(2, TENURED);
-  ObjectLiteral* object_literal = expression->AsObjectLiteral();
-  if (object_literal != NULL) {
-    DCHECK(object_literal->is_simple());
-    if (object_literal->fast_elements()) {
-      result->set(kLiteralTypeSlot, Smi::FromInt(OBJECT_LITERAL_FAST_ELEMENTS));
-    } else {
-      result->set(kLiteralTypeSlot, Smi::FromInt(OBJECT_LITERAL_SLOW_ELEMENTS));
-    }
-    result->set(kElementsSlot, *object_literal->constant_properties());
-  } else {
-    ArrayLiteral* array_literal = expression->AsArrayLiteral();
-    DCHECK(array_literal != NULL && array_literal->is_simple());
-    result->set(kLiteralTypeSlot, Smi::FromInt(ARRAY_LITERAL));
-    result->set(kElementsSlot, *array_literal->constant_elements());
-  }
-  return result;
-}
-
-
-CompileTimeValue::LiteralType CompileTimeValue::GetLiteralType(
-    Handle<FixedArray> value) {
-  Smi* literal_type = Smi::cast(value->get(kLiteralTypeSlot));
-  return static_cast<LiteralType>(literal_type->value());
-}
-
-
-Handle<FixedArray> CompileTimeValue::GetElements(Handle<FixedArray> value) {
-  return Handle<FixedArray>(FixedArray::cast(value->get(kElementsSlot)));
-}
-
-void Parser::ParseArrowFunctionFormalParameters(
+void Parser::AddArrowFunctionFormalParameters(
     ParserFormalParameters* parameters, Expression* expr, int end_pos,
     bool* ok) {
   // ArrowFunctionFormals ::
@@ -3929,8 +2445,8 @@
     Expression* left = binop->left();
     Expression* right = binop->right();
     int comma_pos = binop->position();
-    ParseArrowFunctionFormalParameters(parameters, left, comma_pos,
-                                       CHECK_OK_VOID);
+    AddArrowFunctionFormalParameters(parameters, left, comma_pos,
+                                     CHECK_OK_VOID);
     // LHS of comma expression should be unparenthesized.
     expr = right;
   }
@@ -3958,80 +2474,14 @@
   AddFormalParameter(parameters, expr, initializer, end_pos, is_rest);
 }
 
-void Parser::DesugarAsyncFunctionBody(const AstRawString* function_name,
-                                      Scope* scope, ZoneList<Statement*>* body,
-                                      ExpressionClassifier* classifier,
-                                      FunctionKind kind,
-                                      FunctionBodyType body_type,
-                                      bool accept_IN, int pos, bool* ok) {
-  // function async_function() {
-  //   try {
-  //     .generator_object = %CreateGeneratorObject();
-  //     ... function body ...
-  //   } catch (e) {
-  //     return Promise.reject(e);
-  //   }
-  // }
-  scope->ForceContextAllocation();
-  Variable* temp =
-      NewTemporary(ast_value_factory()->dot_generator_object_string());
-  function_state_->set_generator_object_variable(temp);
-
-  Expression* init_generator_variable = factory()->NewAssignment(
-      Token::INIT, factory()->NewVariableProxy(temp),
-      BuildCreateJSGeneratorObject(pos, kind), kNoSourcePosition);
-  body->Add(factory()->NewExpressionStatement(init_generator_variable,
-                                              kNoSourcePosition),
-            zone());
-
-  Block* try_block = factory()->NewBlock(NULL, 8, true, kNoSourcePosition);
-
-  ZoneList<Statement*>* inner_body = try_block->statements();
-
-  Expression* return_value = nullptr;
-  if (body_type == FunctionBodyType::kNormal) {
-    ParseStatementList(inner_body, Token::RBRACE, CHECK_OK_VOID);
-    return_value = factory()->NewUndefinedLiteral(kNoSourcePosition);
-  } else {
-    return_value =
-        ParseAssignmentExpression(accept_IN, classifier, CHECK_OK_VOID);
-    RewriteNonPattern(classifier, CHECK_OK_VOID);
-  }
-
-  return_value = BuildPromiseResolve(return_value, return_value->position());
-  inner_body->Add(
-      factory()->NewReturnStatement(return_value, return_value->position()),
-      zone());
-  body->Add(BuildRejectPromiseOnException(try_block), zone());
-  scope->set_end_position(scanner()->location().end_pos);
-}
-
-DoExpression* Parser::ParseDoExpression(bool* ok) {
-  // AssignmentExpression ::
-  //     do '{' StatementList '}'
-  int pos = peek_position();
-
-  Expect(Token::DO, CHECK_OK);
-  Variable* result = NewTemporary(ast_value_factory()->dot_result_string());
-  Block* block = ParseBlock(nullptr, CHECK_OK);
-  DoExpression* expr = factory()->NewDoExpression(block, result, pos);
-  if (!Rewriter::Rewrite(this, GetClosureScope(), expr, ast_value_factory())) {
-    *ok = false;
-    return nullptr;
-  }
-  return expr;
-}
-
-void ParserBaseTraits<Parser>::ParseArrowFunctionFormalParameterList(
+void Parser::DeclareArrowFunctionFormalParameters(
     ParserFormalParameters* parameters, Expression* expr,
     const Scanner::Location& params_loc, Scanner::Location* duplicate_loc,
-    const Scope::Snapshot& scope_snapshot, bool* ok) {
+    bool* ok) {
   if (expr->IsEmptyParentheses()) return;
 
-  delegate()->ParseArrowFunctionFormalParameters(
-      parameters, expr, params_loc.end_pos, CHECK_OK_VOID);
-
-  scope_snapshot.Reparent(parameters->scope);
+  AddArrowFunctionFormalParameters(parameters, expr, params_loc.end_pos,
+                                   CHECK_OK_VOID);
 
   if (parameters->Arity() > Code::kMaxArguments) {
     ReportMessageAt(params_loc, MessageTemplate::kMalformedArrowFunParamList);
@@ -4039,23 +2489,25 @@
     return;
   }
 
-  Type::ExpressionClassifier classifier(delegate());
+  ExpressionClassifier classifier(this);
   if (!parameters->is_simple) {
-    classifier.RecordNonSimpleParameter();
+    this->classifier()->RecordNonSimpleParameter();
   }
   for (int i = 0; i < parameters->Arity(); ++i) {
     auto parameter = parameters->at(i);
-    DeclareFormalParameter(parameters->scope, parameter, &classifier);
-    if (!duplicate_loc->IsValid()) {
-      *duplicate_loc = classifier.duplicate_formal_parameter_error().location;
+    DeclareFormalParameter(parameters->scope, parameter);
+    if (!this->classifier()
+             ->is_valid_formal_parameter_list_without_duplicates() &&
+        !duplicate_loc->IsValid()) {
+      *duplicate_loc =
+          this->classifier()->duplicate_formal_parameter_error().location;
     }
   }
   DCHECK_EQ(parameters->is_simple, parameters->scope->has_simple_parameters());
 }
 
-void ParserBaseTraits<Parser>::ReindexLiterals(
-    const ParserFormalParameters& parameters) {
-  if (delegate()->function_state_->materialized_literal_count() > 0) {
+void Parser::ReindexLiterals(const ParserFormalParameters& parameters) {
+  if (function_state_->materialized_literal_count() > 0) {
     AstLiteralReindexer reindexer;
 
     for (const auto p : parameters.params) {
@@ -4063,11 +2515,24 @@
       if (p.initializer != nullptr) reindexer.Reindex(p.initializer);
     }
 
-    DCHECK(reindexer.count() <=
-           delegate()->function_state_->materialized_literal_count());
+    DCHECK(reindexer.count() <= function_state_->materialized_literal_count());
   }
 }
 
+void Parser::PrepareGeneratorVariables(FunctionState* function_state) {
+  // For generators, allocating variables in contexts is currently a win
+  // because it minimizes the work needed to suspend and resume an
+  // activation.  The machine code produced for generators (by full-codegen)
+  // relies on this forced context allocation, but not in an essential way.
+  scope()->ForceContextAllocation();
+
+  // Calling a generator returns a generator object.  That object is stored
+  // in a temporary variable, a definition that is used by "yield"
+  // expressions.
+  Variable* temp =
+      NewTemporary(ast_value_factory()->dot_generator_object_string());
+  function_state->set_generator_object_variable(temp);
+}
 
 FunctionLiteral* Parser::ParseFunctionLiteral(
     const AstRawString* function_name, Scanner::Location function_name_location,
@@ -4119,7 +2584,9 @@
   // These are all things we can know at this point, without looking at the
   // function itself.
 
-  // In addition, we need to distinguish between these cases:
+  // We separate between lazy parsing top level functions and lazy parsing inner
+  // functions, because the latter needs to do more work. In particular, we need
+  // to track unresolved variables to distinguish between these cases:
   // (function foo() {
   //   bar = function() { return 1; }
   //  })();
@@ -4131,17 +2598,18 @@
 
   // Now foo will be parsed eagerly and compiled eagerly (optimization: assume
   // parenthesis before the function means that it will be called
-  // immediately). The inner function *must* be parsed eagerly to resolve the
-  // possible reference to the variable in foo's scope. However, it's possible
-  // that it will be compiled lazily.
+  // immediately). bar can be parsed lazily, but we need to parse it in a mode
+  // that tracks unresolved variables.
+  DCHECK_IMPLIES(mode() == PARSE_LAZILY, FLAG_lazy);
+  DCHECK_IMPLIES(mode() == PARSE_LAZILY, allow_lazy());
+  DCHECK_IMPLIES(mode() == PARSE_LAZILY, extension_ == nullptr);
 
-  // To make this additional case work, both Parser and PreParser implement a
-  // logic where only top-level functions will be parsed lazily.
-  bool is_lazily_parsed = mode() == PARSE_LAZILY &&
-                          this->scope()->AllowsLazyParsing() &&
-                          !function_state_->next_function_is_parenthesized();
+  bool is_lazy_top_level_function =
+      mode() == PARSE_LAZILY &&
+      eager_compile_hint == FunctionLiteral::kShouldLazyCompile &&
+      scope()->AllowsLazyParsingWithoutUnresolvedVariables();
 
-  // Determine whether the function body can be discarded after parsing.
+  // Determine whether we can still lazy parse the inner function.
   // The preconditions are:
   // - Lazy compilation has to be enabled.
   // - Neither V8 natives nor native function declarations can be allowed,
@@ -4156,18 +2624,20 @@
   // - The function literal shouldn't be hinted to eagerly compile.
   // - For asm.js functions the body needs to be available when module
   //   validation is active, because we examine the entire module at once.
+
+  // Inner functions will be parsed using a temporary Zone. After parsing, we
+  // will migrate unresolved variable into a Scope in the main Zone.
+  // TODO(marja): Refactor parsing modes: simplify this.
   bool use_temp_zone =
-      !is_lazily_parsed && FLAG_lazy && !allow_natives() &&
-      extension_ == NULL && allow_lazy() &&
-      function_type == FunctionLiteral::kDeclaration &&
+      allow_lazy() && function_type == FunctionLiteral::kDeclaration &&
       eager_compile_hint != FunctionLiteral::kShouldEagerCompile &&
       !(FLAG_validate_asm && scope()->IsAsmModule());
+  bool is_lazy_inner_function =
+      use_temp_zone && FLAG_lazy_inner_functions && !is_lazy_top_level_function;
 
-  DeclarationScope* main_scope = nullptr;
-  if (use_temp_zone) {
-    // This Scope lives in the main Zone; we'll migrate data into it later.
-    main_scope = NewFunctionScope(kind);
-  }
+  // This Scope lives in the main zone. We'll migrate data into that zone later.
+  DeclarationScope* scope = NewFunctionScope(kind);
+  SetLanguageMode(scope, language_mode);
 
   ZoneList<Statement*>* body = nullptr;
   int arity = -1;
@@ -4177,6 +2647,32 @@
   bool should_be_used_once_hint = false;
   bool has_duplicate_parameters;
 
+  FunctionState function_state(&function_state_, &scope_state_, scope);
+#ifdef DEBUG
+  scope->SetScopeName(function_name);
+#endif
+
+  ExpressionClassifier formals_classifier(this, &duplicate_finder);
+
+  if (is_generator) PrepareGeneratorVariables(&function_state);
+
+  Expect(Token::LPAREN, CHECK_OK);
+  int start_position = scanner()->location().beg_pos;
+  this->scope()->set_start_position(start_position);
+  ParserFormalParameters formals(scope);
+  ParseFormalParameterList(&formals, CHECK_OK);
+  arity = formals.Arity();
+  Expect(Token::RPAREN, CHECK_OK);
+  int formals_end_position = scanner()->location().end_pos;
+
+  CheckArityRestrictions(arity, kind, formals.has_rest, start_position,
+                         formals_end_position, CHECK_OK);
+  Expect(Token::LBRACE, CHECK_OK);
+  // Don't include the rest parameter into the function's formal parameter
+  // count (esp. the SharedFunctionInfo::internal_formal_parameter_count,
+  // which says whether we need to create an arguments adaptor frame).
+  if (formals.has_rest) arity--;
+
   {
     // Temporary zones can nest. When we migrate free variables (see below), we
     // need to recreate them in the previous Zone.
@@ -4187,94 +2683,58 @@
     // new temporary zone if the preconditions are satisfied, and ensures that
     // the previous zone is always restored after parsing the body. To be able
     // to do scope analysis correctly after full parsing, we migrate needed
-    // information from scope into main_scope when the function has been parsed.
+    // information when the function is parsed.
     Zone temp_zone(zone()->allocator());
     DiscardableZoneScope zone_scope(this, &temp_zone, use_temp_zone);
-
-    DeclarationScope* scope = NewFunctionScope(kind);
-    SetLanguageMode(scope, language_mode);
-    if (!use_temp_zone) {
-      main_scope = scope;
-    } else {
-      DCHECK(main_scope->zone() != scope->zone());
-    }
-
-    FunctionState function_state(&function_state_, &scope_state_, scope, kind);
 #ifdef DEBUG
-    scope->SetScopeName(function_name);
+    if (use_temp_zone) scope->set_needs_migration();
 #endif
-    ExpressionClassifier formals_classifier(this, &duplicate_finder);
 
-    if (is_generator) {
-      // For generators, allocating variables in contexts is currently a win
-      // because it minimizes the work needed to suspend and resume an
-      // activation.  The machine code produced for generators (by full-codegen)
-      // relies on this forced context allocation, but not in an essential way.
-      this->scope()->ForceContextAllocation();
-
-      // Calling a generator returns a generator object.  That object is stored
-      // in a temporary variable, a definition that is used by "yield"
-      // expressions. This also marks the FunctionState as a generator.
-      Variable* temp =
-          NewTemporary(ast_value_factory()->dot_generator_object_string());
-      function_state.set_generator_object_variable(temp);
-    }
-
-    Expect(Token::LPAREN, CHECK_OK);
-    int start_position = scanner()->location().beg_pos;
-    this->scope()->set_start_position(start_position);
-    ParserFormalParameters formals(scope);
-    ParseFormalParameterList(&formals, &formals_classifier, CHECK_OK);
-    arity = formals.Arity();
-    Expect(Token::RPAREN, CHECK_OK);
-    int formals_end_position = scanner()->location().end_pos;
-
-    CheckArityRestrictions(arity, kind, formals.has_rest, start_position,
-                           formals_end_position, CHECK_OK);
-    Expect(Token::LBRACE, CHECK_OK);
-    // Don't include the rest parameter into the function's formal parameter
-    // count (esp. the SharedFunctionInfo::internal_formal_parameter_count,
-    // which says whether we need to create an arguments adaptor frame).
-    if (formals.has_rest) arity--;
-
-    // Eager or lazy parse?
-    // If is_lazily_parsed, we'll parse lazy. If we can set a bookmark, we'll
-    // pass it to SkipLazyFunctionBody, which may use it to abort lazy
-    // parsing if it suspect that wasn't a good idea. If so, or if we didn't
-    // try to lazy parse in the first place, we'll have to parse eagerly.
-    Scanner::BookmarkScope bookmark(scanner());
-    if (is_lazily_parsed) {
-      Scanner::BookmarkScope* maybe_bookmark =
-          bookmark.Set() ? &bookmark : nullptr;
-      SkipLazyFunctionBody(&materialized_literal_count,
-                           &expected_property_count, /*CHECK_OK*/ ok,
-                           maybe_bookmark);
+    // Eager or lazy parse? If is_lazy_top_level_function, we'll parse
+    // lazily. We'll call SkipLazyFunctionBody, which may decide to abort lazy
+    // parsing if it suspects that wasn't a good idea. If so (in which case the
+    // parser is expected to have backtracked), or if we didn't try to lazy
+    // parse in the first place, we'll have to parse eagerly.
+    if (is_lazy_top_level_function || is_lazy_inner_function) {
+      Scanner::BookmarkScope bookmark(scanner());
+      bookmark.Set();
+      LazyParsingResult result = SkipLazyFunctionBody(
+          &materialized_literal_count, &expected_property_count,
+          is_lazy_inner_function, is_lazy_top_level_function, CHECK_OK);
 
       materialized_literal_count += formals.materialized_literals_count +
                                     function_state.materialized_literal_count();
 
-      if (bookmark.HasBeenReset()) {
+      if (result == kLazyParsingAborted) {
+        DCHECK(is_lazy_top_level_function);
+        bookmark.Apply();
         // Trigger eager (re-)parsing, just below this block.
-        is_lazily_parsed = false;
+        is_lazy_top_level_function = false;
 
         // This is probably an initialization function. Inform the compiler it
         // should also eager-compile this function, and that we expect it to be
         // used once.
         eager_compile_hint = FunctionLiteral::kShouldEagerCompile;
         should_be_used_once_hint = true;
+        scope->ResetAfterPreparsing(ast_value_factory(), true);
+        zone_scope.Reset();
+        use_temp_zone = false;
       }
     }
-    if (!is_lazily_parsed) {
+
+    if (!is_lazy_top_level_function && !is_lazy_inner_function) {
       body = ParseEagerFunctionBody(function_name, pos, formals, kind,
                                     function_type, CHECK_OK);
 
       materialized_literal_count = function_state.materialized_literal_count();
       expected_property_count = function_state.expected_property_count();
-      if (use_temp_zone) {
-        // If the preconditions are correct the function body should never be
-        // accessed, but do this anyway for better behaviour if they're wrong.
-        body = nullptr;
-      }
+    }
+
+    if (use_temp_zone || is_lazy_top_level_function) {
+      // If the preconditions are correct the function body should never be
+      // accessed, but do this anyway for better behaviour if they're wrong.
+      body = nullptr;
+      scope->AnalyzePartially(&previous_zone_ast_node_factory);
     }
 
     // Parsing the body may change the language mode in our scope.
@@ -4286,13 +2746,13 @@
                       function_name_location, CHECK_OK);
     const bool allow_duplicate_parameters =
         is_sloppy(language_mode) && formals.is_simple && !IsConciseMethod(kind);
-    ValidateFormalParameters(&formals_classifier, language_mode,
-                             allow_duplicate_parameters, CHECK_OK);
+    ValidateFormalParameters(language_mode, allow_duplicate_parameters,
+                             CHECK_OK);
 
     if (is_strict(language_mode)) {
       CheckStrictOctalLiteral(scope->start_position(), scope->end_position(),
                               CHECK_OK);
-      CheckDecimalLiteralWithLeadingZero(use_counts_, scope->start_position(),
+      CheckDecimalLiteralWithLeadingZero(scope->start_position(),
                                          scope->end_position());
     }
     CheckConflictingVarDeclarations(scope, CHECK_OK);
@@ -4302,12 +2762,7 @@
       RewriteDestructuringAssignments();
     }
     has_duplicate_parameters =
-      !formals_classifier.is_valid_formal_parameter_list_without_duplicates();
-
-    if (use_temp_zone) {
-      DCHECK(main_scope != scope);
-      scope->AnalyzePartially(main_scope, &previous_zone_ast_node_factory);
-    }
+        !classifier()->is_valid_formal_parameter_list_without_duplicates();
   }  // DiscardableZoneScope goes out of scope.
 
   FunctionLiteral::ParameterFlag duplicate_parameters =
@@ -4316,53 +2771,31 @@
 
   // Note that the FunctionLiteral needs to be created in the main Zone again.
   FunctionLiteral* function_literal = factory()->NewFunctionLiteral(
-      function_name, main_scope, body, materialized_literal_count,
+      function_name, scope, body, materialized_literal_count,
       expected_property_count, arity, duplicate_parameters, function_type,
-      eager_compile_hint, kind, pos);
+      eager_compile_hint, pos);
   function_literal->set_function_token_position(function_token_pos);
   if (should_be_used_once_hint)
     function_literal->set_should_be_used_once_hint();
 
-  if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal);
+  if (should_infer_name) {
+    DCHECK_NOT_NULL(fni_);
+    fni_->AddFunction(function_literal);
+  }
   return function_literal;
 }
 
-Expression* Parser::ParseAsyncFunctionExpression(bool* ok) {
-  // AsyncFunctionDeclaration ::
-  //   async [no LineTerminator here] function ( FormalParameters[Await] )
-  //       { AsyncFunctionBody }
-  //
-  //   async [no LineTerminator here] function BindingIdentifier[Await]
-  //       ( FormalParameters[Await] ) { AsyncFunctionBody }
-  DCHECK_EQ(scanner()->current_token(), Token::ASYNC);
-  int pos = position();
-  Expect(Token::FUNCTION, CHECK_OK);
-  bool is_strict_reserved = false;
-  const AstRawString* name = nullptr;
-  FunctionLiteral::FunctionType type = FunctionLiteral::kAnonymousExpression;
-
-  if (peek_any_identifier()) {
-    type = FunctionLiteral::kNamedExpression;
-    name = ParseIdentifierOrStrictReservedWord(FunctionKind::kAsyncFunction,
-                                               &is_strict_reserved, CHECK_OK);
-  }
-  return ParseFunctionLiteral(name, scanner()->location(),
-                              is_strict_reserved ? kFunctionNameIsStrictReserved
-                                                 : kFunctionNameValidityUnknown,
-                              FunctionKind::kAsyncFunction, pos, type,
-                              language_mode(), CHECK_OK);
-}
-
-void Parser::SkipLazyFunctionBody(int* materialized_literal_count,
-                                  int* expected_property_count, bool* ok,
-                                  Scanner::BookmarkScope* bookmark) {
-  DCHECK_IMPLIES(bookmark, bookmark->HasBeenSet());
+Parser::LazyParsingResult Parser::SkipLazyFunctionBody(
+    int* materialized_literal_count, int* expected_property_count,
+    bool is_inner_function, bool may_abort, bool* ok) {
   if (produce_cached_parse_data()) CHECK(log_);
 
   int function_block_pos = position();
-  DeclarationScope* scope = this->scope()->AsDeclarationScope();
+  DeclarationScope* scope = function_state_->scope();
   DCHECK(scope->is_function_scope());
-  if (consume_cached_parse_data() && !cached_parse_data_->rejected()) {
+  // Inner functions are not part of the cached data.
+  if (!is_inner_function && consume_cached_parse_data() &&
+      !cached_parse_data_->rejected()) {
     // If we have cached data, we use it to skip parsing the function body. The
     // data contains the information we need to construct the lazy function.
     FunctionEntry entry =
@@ -4374,14 +2807,14 @@
       scanner()->SeekForward(entry.end_pos() - 1);
 
       scope->set_end_position(entry.end_pos());
-      Expect(Token::RBRACE, CHECK_OK_VOID);
+      Expect(Token::RBRACE, CHECK_OK_VALUE(kLazyParsingComplete));
       total_preparse_skipped_ += scope->end_position() - function_block_pos;
       *materialized_literal_count = entry.literal_count();
       *expected_property_count = entry.property_count();
       SetLanguageMode(scope, entry.language_mode());
       if (entry.uses_super_property()) scope->RecordSuperPropertyUsage();
       if (entry.calls_eval()) scope->RecordEvalCall();
-      return;
+      return kLazyParsingComplete;
     }
     cached_parse_data_->Reject();
   }
@@ -4389,32 +2822,32 @@
   // AST. This gathers the data needed to build a lazy function.
   SingletonLogger logger;
   PreParser::PreParseResult result =
-      ParseLazyFunctionBodyWithPreParser(&logger, bookmark);
-  if (bookmark && bookmark->HasBeenReset()) {
-    return;  // Return immediately if pre-parser devided to abort parsing.
-  }
+      ParseLazyFunctionBodyWithPreParser(&logger, is_inner_function, may_abort);
+
+  // Return immediately if pre-parser decided to abort parsing.
+  if (result == PreParser::kPreParseAbort) return kLazyParsingAborted;
   if (result == PreParser::kPreParseStackOverflow) {
     // Propagate stack overflow.
     set_stack_overflow();
     *ok = false;
-    return;
+    return kLazyParsingComplete;
   }
   if (logger.has_error()) {
     ReportMessageAt(Scanner::Location(logger.start(), logger.end()),
                     logger.message(), logger.argument_opt(),
                     logger.error_type());
     *ok = false;
-    return;
+    return kLazyParsingComplete;
   }
   scope->set_end_position(logger.end());
-  Expect(Token::RBRACE, CHECK_OK_VOID);
+  Expect(Token::RBRACE, CHECK_OK_VALUE(kLazyParsingComplete));
   total_preparse_skipped_ += scope->end_position() - function_block_pos;
   *materialized_literal_count = logger.literals();
   *expected_property_count = logger.properties();
   SetLanguageMode(scope, logger.language_mode());
   if (logger.uses_super_property()) scope->RecordSuperPropertyUsage();
   if (logger.calls_eval()) scope->RecordEvalCall();
-  if (produce_cached_parse_data()) {
+  if (!is_inner_function && produce_cached_parse_data()) {
     DCHECK(log_);
     // Position right after terminal '}'.
     int body_end = scanner()->location().end_pos;
@@ -4422,6 +2855,7 @@
                       *expected_property_count, language_mode(),
                       scope->uses_super_property(), scope->calls_eval());
   }
+  return kLazyParsingComplete;
 }
 
 
@@ -4438,9 +2872,9 @@
           Token::EQ_STRICT, factory()->NewVariableProxy(var),
           factory()->NewNullLiteral(kNoSourcePosition), kNoSourcePosition),
       kNoSourcePosition);
-  Expression* throw_type_error = this->NewThrowTypeError(
-      MessageTemplate::kNonCoercible, ast_value_factory()->empty_string(),
-      kNoSourcePosition);
+  Expression* throw_type_error =
+      NewThrowTypeError(MessageTemplate::kNonCoercible,
+                        ast_value_factory()->empty_string(), kNoSourcePosition);
   IfStatement* if_statement = factory()->NewIfStatement(
       condition,
       factory()->NewExpressionStatement(throw_type_error, kNoSourcePosition),
@@ -4495,7 +2929,6 @@
     if (parameter.is_rest && parameter.pattern->IsVariableProxy()) break;
     DeclarationDescriptor descriptor;
     descriptor.declaration_kind = DeclarationDescriptor::PARAMETER;
-    descriptor.parser = this;
     descriptor.scope = scope();
     descriptor.hoist_scope = nullptr;
     descriptor.mode = LET;
@@ -4544,8 +2977,8 @@
     BlockState block_state(&scope_state_, param_scope);
     DeclarationParsingResult::Declaration decl(
         parameter.pattern, parameter.initializer_end_position, initial_value);
-    PatternRewriter::DeclareAndInitializeVariables(param_block, &descriptor,
-                                                   &decl, nullptr, CHECK_OK);
+    PatternRewriter::DeclareAndInitializeVariables(
+        this, param_block, &descriptor, &decl, nullptr, CHECK_OK);
 
     if (param_block != init_block) {
       param_scope = block_state.FinalizedBlockScope();
@@ -4558,28 +2991,74 @@
   return init_block;
 }
 
-Block* Parser::BuildRejectPromiseOnException(Block* block) {
-  // try { <block> } catch (error) { return Promise.reject(error); }
-  Block* try_block = block;
+Block* Parser::BuildRejectPromiseOnException(Block* inner_block, bool* ok) {
+  // .promise = %AsyncFunctionPromiseCreate();
+  // try {
+  //   <inner_block>
+  // } catch (.catch) {
+  //   %RejectPromise(.promise, .catch);
+  //   return .promise;
+  // } finally {
+  //   %AsyncFunctionPromiseRelease(.promise);
+  // }
+  Block* result = factory()->NewBlock(nullptr, 2, true, kNoSourcePosition);
+
+  // .promise = %AsyncFunctionPromiseCreate();
+  Statement* set_promise;
+  {
+    Expression* create_promise = factory()->NewCallRuntime(
+        Context::ASYNC_FUNCTION_PROMISE_CREATE_INDEX,
+        new (zone()) ZoneList<Expression*>(0, zone()), kNoSourcePosition);
+    Assignment* assign_promise = factory()->NewAssignment(
+        Token::INIT, factory()->NewVariableProxy(PromiseVariable()),
+        create_promise, kNoSourcePosition);
+    set_promise =
+        factory()->NewExpressionStatement(assign_promise, kNoSourcePosition);
+  }
+  result->statements()->Add(set_promise, zone());
+
+  // catch (.catch) { return %RejectPromise(.promise, .catch), .promise }
   Scope* catch_scope = NewScope(CATCH_SCOPE);
   catch_scope->set_is_hidden();
   Variable* catch_variable =
       catch_scope->DeclareLocal(ast_value_factory()->dot_catch_string(), VAR,
-                                kCreatedInitialized, Variable::NORMAL);
+                                kCreatedInitialized, NORMAL_VARIABLE);
   Block* catch_block = factory()->NewBlock(nullptr, 1, true, kNoSourcePosition);
 
-  Expression* promise_reject = BuildPromiseReject(
+  Expression* promise_reject = BuildRejectPromise(
       factory()->NewVariableProxy(catch_variable), kNoSourcePosition);
-
   ReturnStatement* return_promise_reject =
       factory()->NewReturnStatement(promise_reject, kNoSourcePosition);
   catch_block->statements()->Add(return_promise_reject, zone());
-  TryStatement* try_catch_statement = factory()->NewTryCatchStatement(
-      try_block, catch_scope, catch_variable, catch_block, kNoSourcePosition);
 
-  block = factory()->NewBlock(nullptr, 1, true, kNoSourcePosition);
-  block->statements()->Add(try_catch_statement, zone());
-  return block;
+  TryStatement* try_catch_statement =
+      factory()->NewTryCatchStatementForAsyncAwait(inner_block, catch_scope,
+                                                   catch_variable, catch_block,
+                                                   kNoSourcePosition);
+
+  // There is no TryCatchFinally node, so wrap it in an outer try/finally
+  Block* outer_try_block =
+      factory()->NewBlock(nullptr, 1, true, kNoSourcePosition);
+  outer_try_block->statements()->Add(try_catch_statement, zone());
+
+  // finally { %AsyncFunctionPromiseRelease(.promise) }
+  Block* finally_block =
+      factory()->NewBlock(nullptr, 1, true, kNoSourcePosition);
+  {
+    ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(1, zone());
+    args->Add(factory()->NewVariableProxy(PromiseVariable()), zone());
+    Expression* call_promise_release = factory()->NewCallRuntime(
+        Context::ASYNC_FUNCTION_PROMISE_RELEASE_INDEX, args, kNoSourcePosition);
+    Statement* promise_release = factory()->NewExpressionStatement(
+        call_promise_release, kNoSourcePosition);
+    finally_block->statements()->Add(promise_release, zone());
+  }
+
+  Statement* try_finally_statement = factory()->NewTryFinallyStatement(
+      outer_try_block, finally_block, kNoSourcePosition);
+
+  result->statements()->Add(try_finally_statement, zone());
+  return result;
 }
 
 Expression* Parser::BuildCreateJSGeneratorObject(int pos, FunctionKind kind) {
@@ -4593,26 +3072,68 @@
                                    pos);
 }
 
-Expression* Parser::BuildPromiseResolve(Expression* value, int pos) {
-  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(1, zone());
+Expression* Parser::BuildResolvePromise(Expression* value, int pos) {
+  // %ResolvePromise(.promise, value), .promise
+  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(2, zone());
+  args->Add(factory()->NewVariableProxy(PromiseVariable()), zone());
   args->Add(value, zone());
-  return factory()->NewCallRuntime(Context::PROMISE_CREATE_RESOLVED_INDEX, args,
-                                   pos);
+  Expression* call_runtime =
+      factory()->NewCallRuntime(Context::PROMISE_RESOLVE_INDEX, args, pos);
+  return factory()->NewBinaryOperation(
+      Token::COMMA, call_runtime,
+      factory()->NewVariableProxy(PromiseVariable()), pos);
 }
 
-Expression* Parser::BuildPromiseReject(Expression* value, int pos) {
-  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(1, zone());
+Expression* Parser::BuildRejectPromise(Expression* value, int pos) {
+  // %RejectPromiseNoDebugEvent(.promise, value, true), .promise
+  // The NoDebugEvent variant disables the additional debug event for the
+  // rejection since a debug event already happened for the exception that got
+  // us here.
+  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(2, zone());
+  args->Add(factory()->NewVariableProxy(PromiseVariable()), zone());
   args->Add(value, zone());
-  return factory()->NewCallRuntime(Context::PROMISE_CREATE_REJECTED_INDEX, args,
-                                   pos);
+  Expression* call_runtime = factory()->NewCallRuntime(
+      Context::REJECT_PROMISE_NO_DEBUG_EVENT_INDEX, args, pos);
+  return factory()->NewBinaryOperation(
+      Token::COMMA, call_runtime,
+      factory()->NewVariableProxy(PromiseVariable()), pos);
+}
+
+Variable* Parser::PromiseVariable() {
+  // Based on the various compilation paths, there are many different code
+  // paths which may be the first to access the Promise temporary. Whichever
+  // comes first should create it and stash it in the FunctionState.
+  Variable* promise = function_state_->promise_variable();
+  if (function_state_->promise_variable() == nullptr) {
+    promise = scope()->NewTemporary(ast_value_factory()->empty_string());
+    function_state_->set_promise_variable(promise);
+  }
+  return promise;
+}
+
+Expression* Parser::BuildInitialYield(int pos, FunctionKind kind) {
+  Expression* allocation = BuildCreateJSGeneratorObject(pos, kind);
+  VariableProxy* init_proxy =
+      factory()->NewVariableProxy(function_state_->generator_object_variable());
+  Assignment* assignment = factory()->NewAssignment(
+      Token::INIT, init_proxy, allocation, kNoSourcePosition);
+  VariableProxy* get_proxy =
+      factory()->NewVariableProxy(function_state_->generator_object_variable());
+  // The position of the yield is important for reporting the exception
+  // caused by calling the .throw method on a generator suspended at the
+  // initial yield (i.e. right after generator instantiation).
+  return factory()->NewYield(get_proxy, assignment, scope()->start_position(),
+                             Yield::kOnExceptionThrow);
 }
 
 ZoneList<Statement*>* Parser::ParseEagerFunctionBody(
     const AstRawString* function_name, int pos,
     const ParserFormalParameters& parameters, FunctionKind kind,
     FunctionLiteral::FunctionType function_type, bool* ok) {
-  // Everything inside an eagerly parsed function will be parsed eagerly
-  // (see comment above).
+  // Everything inside an eagerly parsed function will be parsed eagerly (see
+  // comment above). Lazy inner functions are handled separately and they won't
+  // require the mode to be PARSE_LAZILY (see ParseFunctionLiteral).
+  // TODO(marja): Refactor parsing modes: remove this.
   ParsingModeScope parsing_mode(this, PARSE_EAGERLY);
   ZoneList<Statement*>* result = new(zone()) ZoneList<Statement*>(8, zone());
 
@@ -4657,26 +3178,10 @@
 
       Block* try_block =
           factory()->NewBlock(nullptr, 3, false, kNoSourcePosition);
-
-      {
-        Expression* allocation = BuildCreateJSGeneratorObject(pos, kind);
-        VariableProxy* init_proxy = factory()->NewVariableProxy(
-            function_state_->generator_object_variable());
-        Assignment* assignment = factory()->NewAssignment(
-            Token::INIT, init_proxy, allocation, kNoSourcePosition);
-        VariableProxy* get_proxy = factory()->NewVariableProxy(
-            function_state_->generator_object_variable());
-        // The position of the yield is important for reporting the exception
-        // caused by calling the .throw method on a generator suspended at the
-        // initial yield (i.e. right after generator instantiation).
-        Yield* yield = factory()->NewYield(get_proxy, assignment,
-                                           scope()->start_position(),
-                                           Yield::kOnExceptionThrow);
-        try_block->statements()->Add(
-            factory()->NewExpressionStatement(yield, kNoSourcePosition),
-            zone());
-      }
-
+      Expression* initial_yield = BuildInitialYield(pos, kind);
+      try_block->statements()->Add(
+          factory()->NewExpressionStatement(initial_yield, kNoSourcePosition),
+          zone());
       ParseStatementList(try_block->statements(), Token::RBRACE, CHECK_OK);
 
       Statement* final_return = factory()->NewReturnStatement(
@@ -4700,16 +3205,15 @@
                 zone());
     } else if (IsAsyncFunction(kind)) {
       const bool accept_IN = true;
-      DesugarAsyncFunctionBody(function_name, inner_scope, body, nullptr, kind,
-                               FunctionBodyType::kNormal, accept_IN, pos,
-                               CHECK_OK);
+      ParseAsyncFunctionBody(inner_scope, body, kind, FunctionBodyType::kNormal,
+                             accept_IN, pos, CHECK_OK);
     } else {
       ParseStatementList(body, Token::RBRACE, CHECK_OK);
     }
 
     if (IsSubclassConstructor(kind)) {
-      body->Add(factory()->NewReturnStatement(
-                    this->ThisExpression(kNoSourcePosition), kNoSourcePosition),
+      body->Add(factory()->NewReturnStatement(ThisExpression(kNoSourcePosition),
+                                              kNoSourcePosition),
                 zone());
     }
   }
@@ -4726,12 +3230,12 @@
     Block* init_block = BuildParameterInitializationBlock(parameters, CHECK_OK);
 
     if (is_sloppy(inner_scope->language_mode())) {
-      InsertSloppyBlockFunctionVarBindings(inner_scope, function_scope,
-                                           CHECK_OK);
+      InsertSloppyBlockFunctionVarBindings(inner_scope);
     }
 
+    // TODO(littledan): Merge the two rejection blocks into one
     if (IsAsyncFunction(kind)) {
-      init_block = BuildRejectPromiseOnException(init_block);
+      init_block = BuildRejectPromiseOnException(init_block, CHECK_OK);
     }
 
     DCHECK_NOT_NULL(init_block);
@@ -4748,31 +3252,42 @@
   } else {
     DCHECK_EQ(inner_scope, function_scope);
     if (is_sloppy(function_scope->language_mode())) {
-      InsertSloppyBlockFunctionVarBindings(function_scope, nullptr, CHECK_OK);
+      InsertSloppyBlockFunctionVarBindings(function_scope);
     }
   }
 
+  if (!IsArrowFunction(kind)) {
+    // Declare arguments after parsing the function since lexical 'arguments'
+    // masks the arguments object. Declare arguments before declaring the
+    // function var since the arguments object masks 'function arguments'.
+    function_scope->DeclareArguments(ast_value_factory());
+  }
+
   if (function_type == FunctionLiteral::kNamedExpression) {
-    // Now that we know the language mode, we can create the const assignment
-    // in the previously reserved spot.
-    DCHECK_EQ(function_scope, scope());
-    Variable* fvar = function_scope->DeclareFunctionVar(function_name);
-    VariableProxy* fproxy = factory()->NewVariableProxy(fvar);
-    result->Set(kFunctionNameAssignmentIndex,
-                factory()->NewExpressionStatement(
-                    factory()->NewAssignment(Token::INIT, fproxy,
-                                             factory()->NewThisFunction(pos),
-                                             kNoSourcePosition),
-                    kNoSourcePosition));
+    Statement* statement;
+    if (function_scope->LookupLocal(function_name) == nullptr) {
+      // Now that we know the language mode, we can create the const assignment
+      // in the previously reserved spot.
+      DCHECK_EQ(function_scope, scope());
+      Variable* fvar = function_scope->DeclareFunctionVar(function_name);
+      VariableProxy* fproxy = factory()->NewVariableProxy(fvar);
+      statement = factory()->NewExpressionStatement(
+          factory()->NewAssignment(Token::INIT, fproxy,
+                                   factory()->NewThisFunction(pos),
+                                   kNoSourcePosition),
+          kNoSourcePosition);
+    } else {
+      statement = factory()->NewEmptyStatement(kNoSourcePosition);
+    }
+    result->Set(kFunctionNameAssignmentIndex, statement);
   }
 
   MarkCollectedTailCallExpressions();
   return result;
 }
 
-
 PreParser::PreParseResult Parser::ParseLazyFunctionBodyWithPreParser(
-    SingletonLogger* logger, Scanner::BookmarkScope* bookmark) {
+    SingletonLogger* logger, bool is_inner_function, bool may_abort) {
   // This function may be called on a background thread too; record only the
   // main thread preparse times.
   if (pre_parse_timer_ != NULL) {
@@ -4794,209 +3309,337 @@
     SET_ALLOW(harmony_restrictive_declarations);
     SET_ALLOW(harmony_async_await);
     SET_ALLOW(harmony_trailing_commas);
+    SET_ALLOW(harmony_class_fields);
 #undef SET_ALLOW
   }
+  // Aborting inner function preparsing would leave scopes in an inconsistent
+  // state; we don't parse inner functions in the abortable mode anyway.
+  DCHECK(!is_inner_function || !may_abort);
+
+  DeclarationScope* function_scope = function_state_->scope();
   PreParser::PreParseResult result = reusable_preparser_->PreParseLazyFunction(
-      language_mode(), function_state_->kind(),
-      scope()->AsDeclarationScope()->has_simple_parameters(), parsing_module_,
-      logger, bookmark, use_counts_);
+      function_scope, parsing_module_, logger, is_inner_function, may_abort,
+      use_counts_);
   if (pre_parse_timer_ != NULL) {
     pre_parse_timer_->Stop();
   }
   return result;
 }
 
-Expression* Parser::ParseClassLiteral(ExpressionClassifier* classifier,
-                                      const AstRawString* name,
-                                      Scanner::Location class_name_location,
-                                      bool name_is_strict_reserved, int pos,
-                                      bool* ok) {
-  // All parts of a ClassDeclaration and ClassExpression are strict code.
-  if (name_is_strict_reserved) {
-    ReportMessageAt(class_name_location,
-                    MessageTemplate::kUnexpectedStrictReserved);
-    *ok = false;
-    return nullptr;
-  }
-  if (IsEvalOrArguments(name)) {
-    ReportMessageAt(class_name_location, MessageTemplate::kStrictEvalArguments);
-    *ok = false;
-    return nullptr;
-  }
+Expression* Parser::InstallHomeObject(Expression* function_literal,
+                                      Expression* home_object) {
+  Block* do_block = factory()->NewBlock(nullptr, 1, false, kNoSourcePosition);
+  Variable* result_var =
+      scope()->NewTemporary(ast_value_factory()->empty_string());
+  DoExpression* do_expr =
+      factory()->NewDoExpression(do_block, result_var, kNoSourcePosition);
+  Assignment* init = factory()->NewAssignment(
+      Token::ASSIGN, factory()->NewVariableProxy(result_var), function_literal,
+      kNoSourcePosition);
+  do_block->statements()->Add(
+      factory()->NewExpressionStatement(init, kNoSourcePosition), zone());
+  Property* home_object_property = factory()->NewProperty(
+      factory()->NewVariableProxy(result_var),
+      factory()->NewSymbolLiteral("home_object_symbol", kNoSourcePosition),
+      kNoSourcePosition);
+  Assignment* assignment = factory()->NewAssignment(
+      Token::ASSIGN, home_object_property, home_object, kNoSourcePosition);
+  do_block->statements()->Add(
+      factory()->NewExpressionStatement(assignment, kNoSourcePosition), zone());
+  return do_expr;
+}
 
-  BlockState block_state(&scope_state_);
+const AstRawString* ClassFieldVariableName(bool is_name,
+                                           AstValueFactory* ast_value_factory,
+                                           int index) {
+  std::string name =
+      ".class-field-" + std::to_string(index) + (is_name ? "-name" : "-func");
+  return ast_value_factory->GetOneByteString(name.c_str());
+}
+
+FunctionLiteral* Parser::SynthesizeClassFieldInitializer(int count) {
+  DCHECK(count > 0);
+  // Makes a function which reads the names and initializers for each class
+  // field out of deterministically named local variables and sets each property
+  // to the result of evaluating its corresponding initializer in turn.
+
+  // This produces a function which looks like
+  // function () {
+  //   this[.class-field-0-name] = .class-field-0-func();
+  //   this[.class-field-1-name] = .class-field-1-func();
+  //   [...]
+  //   this[.class-field-n-name] = .class-field-n-func();
+  //   return this;
+  // }
+  // except that it performs defineProperty, so that instead of '=' it has
+  // %DefineDataPropertyInLiteral(this, .class-field-0-name,
+  // .class-field-0-func(),
+  //   DONT_ENUM, false)
+
   RaiseLanguageMode(STRICT);
+  FunctionKind kind = FunctionKind::kConciseMethod;
+  DeclarationScope* initializer_scope = NewFunctionScope(kind);
+  SetLanguageMode(initializer_scope, language_mode());
+  initializer_scope->set_start_position(scanner()->location().end_pos);
+  initializer_scope->set_end_position(scanner()->location().end_pos);
+  FunctionState initializer_state(&function_state_, &scope_state_,
+                                  initializer_scope);
+  ZoneList<Statement*>* body = new (zone()) ZoneList<Statement*>(count, zone());
+  for (int i = 0; i < count; ++i) {
+    const AstRawString* name =
+        ClassFieldVariableName(true, ast_value_factory(), i);
+    VariableProxy* name_proxy = scope()->NewUnresolved(factory(), name);
+    const AstRawString* function_name =
+        ClassFieldVariableName(false, ast_value_factory(), i);
+    VariableProxy* function_proxy =
+        scope()->NewUnresolved(factory(), function_name);
+    ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(2, zone());
+    args->Add(function_proxy, zone());
+    args->Add(ThisExpression(kNoSourcePosition), zone());
+    Expression* call = factory()->NewCallRuntime(Runtime::kInlineCall, args,
+                                                 kNoSourcePosition);
+    ZoneList<Expression*>* define_property_args =
+        new (zone()) ZoneList<Expression*>(5, zone());
+    define_property_args->Add(ThisExpression(kNoSourcePosition), zone());
+    define_property_args->Add(name_proxy, zone());
+    define_property_args->Add(call, zone());
+    define_property_args->Add(
+        factory()->NewNumberLiteral(DONT_ENUM, kNoSourcePosition), zone());
+    define_property_args->Add(
+        factory()->NewNumberLiteral(
+            false,  // TODO(bakkot) function name inference a la class { x =
+                    // function(){}; static y = function(){}; }
+            kNoSourcePosition),
+        zone());
+    body->Add(factory()->NewExpressionStatement(
+                  factory()->NewCallRuntime(
+                      Runtime::kDefineDataProperty,
+                      define_property_args,  // TODO(bakkot) verify that this is
+                      // the same as object_define_property
+                      kNoSourcePosition),
+                  kNoSourcePosition),
+              zone());
+  }
+  body->Add(factory()->NewReturnStatement(ThisExpression(kNoSourcePosition),
+                                          kNoSourcePosition),
+            zone());
+  FunctionLiteral* function_literal = factory()->NewFunctionLiteral(
+      ast_value_factory()->empty_string(), initializer_scope, body,
+      initializer_state.materialized_literal_count(),
+      initializer_state.expected_property_count(), 0,
+      FunctionLiteral::kNoDuplicateParameters,
+      FunctionLiteral::kAnonymousExpression,
+      FunctionLiteral::kShouldLazyCompile, initializer_scope->start_position());
+  function_literal->set_is_class_field_initializer(true);
+  function_literal->scope()->set_arity(count);
+  return function_literal;
+}
+
+FunctionLiteral* Parser::InsertClassFieldInitializer(
+    FunctionLiteral* constructor) {
+  Statement* call_initializer = factory()->NewExpressionStatement(
+      CallClassFieldInitializer(
+          constructor->scope(),
+          constructor->scope()->NewUnresolved(
+              factory(), ast_value_factory()->this_string(), kNoSourcePosition,
+              kNoSourcePosition + 4, THIS_VARIABLE)),
+      kNoSourcePosition);
+  constructor->body()->InsertAt(0, call_initializer, zone());
+  return constructor;
+}
+
+// If a class name is specified, this method declares the class variable
+// and sets class_info->proxy to point to that name.
+void Parser::DeclareClassVariable(const AstRawString* name, Scope* block_scope,
+                                  ClassInfo* class_info, int class_token_pos,
+                                  bool* ok) {
 #ifdef DEBUG
   scope()->SetScopeName(name);
 #endif
 
-  VariableProxy* proxy = nullptr;
   if (name != nullptr) {
-    proxy = NewUnresolved(name);
-    // TODO(verwaest): declare via block_state.
-    Declaration* declaration =
-        factory()->NewVariableDeclaration(proxy, block_state.scope(), pos);
+    class_info->proxy = factory()->NewVariableProxy(name, NORMAL_VARIABLE);
+    Declaration* declaration = factory()->NewVariableDeclaration(
+        class_info->proxy, block_scope, class_token_pos);
     Declare(declaration, DeclarationDescriptor::NORMAL, CONST,
-            DefaultInitializationFlag(CONST), CHECK_OK);
+            Variable::DefaultInitializationFlag(CONST), ok);
+  }
+}
+
+// This method declares a property of the given class.  It updates the
+// following fields of class_info, as appropriate:
+//   - constructor
+//   - static_initializer_var
+//   - instance_field_initializers
+//   - properties
+void Parser::DeclareClassProperty(const AstRawString* class_name,
+                                  ClassLiteralProperty* property,
+                                  ClassInfo* class_info, bool* ok) {
+  if (class_info->has_seen_constructor && class_info->constructor == nullptr) {
+    class_info->constructor = GetPropertyValue(property)->AsFunctionLiteral();
+    DCHECK_NOT_NULL(class_info->constructor);
+    class_info->constructor->set_raw_name(
+        class_name != nullptr ? class_name
+                              : ast_value_factory()->empty_string());
+    return;
   }
 
-  Expression* extends = nullptr;
-  if (Check(Token::EXTENDS)) {
-    block_state.set_start_position(scanner()->location().end_pos);
-    ExpressionClassifier extends_classifier(this);
-    extends = ParseLeftHandSideExpression(&extends_classifier, CHECK_OK);
-    CheckNoTailCallExpressions(&extends_classifier, CHECK_OK);
-    RewriteNonPattern(&extends_classifier, CHECK_OK);
-    if (classifier != nullptr) {
-      classifier->Accumulate(&extends_classifier,
-                             ExpressionClassifier::ExpressionProductions);
-    }
-  } else {
-    block_state.set_start_position(scanner()->location().end_pos);
-  }
-
-
-  ClassLiteralChecker checker(this);
-  ZoneList<ObjectLiteral::Property*>* properties = NewPropertyList(4, zone());
-  FunctionLiteral* constructor = nullptr;
-  bool has_seen_constructor = false;
-
-  Expect(Token::LBRACE, CHECK_OK);
-
-  const bool has_extends = extends != nullptr;
-  while (peek() != Token::RBRACE) {
-    if (Check(Token::SEMICOLON)) continue;
-    FuncNameInferrer::State fni_state(fni_);
-    const bool in_class = true;
-    bool is_computed_name = false;  // Classes do not care about computed
-                                    // property names here.
-    ExpressionClassifier property_classifier(this);
-    const AstRawString* property_name = nullptr;
-    ObjectLiteral::Property* property = ParsePropertyDefinition(
-        &checker, in_class, has_extends, MethodKind::kNormal, &is_computed_name,
-        &has_seen_constructor, &property_classifier, &property_name, CHECK_OK);
-    RewriteNonPattern(&property_classifier, CHECK_OK);
-    if (classifier != nullptr) {
-      classifier->Accumulate(&property_classifier,
-                             ExpressionClassifier::ExpressionProductions);
-    }
-
-    if (has_seen_constructor && constructor == nullptr) {
-      constructor = GetPropertyValue(property)->AsFunctionLiteral();
-      DCHECK_NOT_NULL(constructor);
-      constructor->set_raw_name(
-          name != nullptr ? name : ast_value_factory()->empty_string());
+  if (property->kind() == ClassLiteralProperty::FIELD) {
+    DCHECK(allow_harmony_class_fields());
+    if (property->is_static()) {
+      if (class_info->static_initializer_var == nullptr) {
+        class_info->static_initializer_var =
+            NewTemporary(ast_value_factory()->empty_string());
+      }
+      // TODO(bakkot) only do this conditionally
+      Expression* function = InstallHomeObject(
+          property->value(),
+          factory()->NewVariableProxy(class_info->static_initializer_var));
+      ZoneList<Expression*>* args =
+          new (zone()) ZoneList<Expression*>(2, zone());
+      args->Add(function, zone());
+      args->Add(factory()->NewVariableProxy(class_info->static_initializer_var),
+                zone());
+      Expression* call = factory()->NewCallRuntime(Runtime::kInlineCall, args,
+                                                   kNoSourcePosition);
+      property->set_value(call);
     } else {
-      properties->Add(property, zone());
-    }
-
-    if (fni_ != nullptr) fni_->Infer();
-
-    if (property_name != ast_value_factory()->constructor_string()) {
-      SetFunctionNameFromPropertyName(property, property_name);
+      // if (is_computed_name) { // TODO(bakkot) figure out why this is
+      // necessary for non-computed names in full-codegen
+      ZoneList<Expression*>* to_name_args =
+          new (zone()) ZoneList<Expression*>(1, zone());
+      to_name_args->Add(property->key(), zone());
+      property->set_key(factory()->NewCallRuntime(
+          Runtime::kToName, to_name_args, kNoSourcePosition));
+      //}
+      const AstRawString* name = ClassFieldVariableName(
+          true, ast_value_factory(),
+          class_info->instance_field_initializers->length());
+      VariableProxy* name_proxy =
+          factory()->NewVariableProxy(name, NORMAL_VARIABLE);
+      Declaration* name_declaration = factory()->NewVariableDeclaration(
+          name_proxy, scope(), kNoSourcePosition);
+      Variable* name_var =
+          Declare(name_declaration, DeclarationDescriptor::NORMAL, CONST,
+                  kNeedsInitialization, ok, scope());
+      DCHECK(*ok);
+      if (!*ok) return;
+      class_info->instance_field_initializers->Add(property->value(), zone());
+      property->set_value(factory()->NewVariableProxy(name_var));
     }
   }
+  class_info->properties->Add(property, zone());
+}
 
-  Expect(Token::RBRACE, CHECK_OK);
+// This method rewrites a class literal into a do-expression.
+// It uses the following fields of class_info:
+//   - constructor (if missing, it updates it with a default constructor)
+//   - proxy
+//   - extends
+//   - static_initializer_var
+//   - instance_field_initializers
+//   - properties
+Expression* Parser::RewriteClassLiteral(const AstRawString* name,
+                                        ClassInfo* class_info, int pos,
+                                        bool* ok) {
   int end_pos = scanner()->location().end_pos;
-
-  if (constructor == nullptr) {
-    constructor = DefaultConstructor(name, has_extends, pos, end_pos,
-                                     block_state.language_mode());
-  }
-
-  // Note that we do not finalize this block scope because it is
-  // used as a sentinel value indicating an anonymous class.
-  block_state.set_end_position(end_pos);
-
-  if (name != nullptr) {
-    DCHECK_NOT_NULL(proxy);
-    proxy->var()->set_initializer_position(end_pos);
-  }
-
   Block* do_block = factory()->NewBlock(nullptr, 1, false, pos);
   Variable* result_var = NewTemporary(ast_value_factory()->empty_string());
-  do_block->set_scope(block_state.FinalizedBlockScope());
   DoExpression* do_expr = factory()->NewDoExpression(do_block, result_var, pos);
 
+  bool has_extends = class_info->extends != nullptr;
+  bool has_instance_fields =
+      class_info->instance_field_initializers->length() > 0;
+  DCHECK(!has_instance_fields || allow_harmony_class_fields());
+  bool has_default_constructor = class_info->constructor == nullptr;
+  if (has_default_constructor) {
+    class_info->constructor =
+        DefaultConstructor(name, has_extends, has_instance_fields, pos, end_pos,
+                           scope()->language_mode());
+  }
+
+  if (has_instance_fields && !has_extends) {
+    class_info->constructor =
+        InsertClassFieldInitializer(class_info->constructor);
+    class_info->constructor->set_requires_class_field_init(true);
+  }  // The derived case is handled by rewriting super calls.
+
+  scope()->set_end_position(end_pos);
+
+  if (name != nullptr) {
+    DCHECK_NOT_NULL(class_info->proxy);
+    class_info->proxy->var()->set_initializer_position(end_pos);
+  }
+
   ClassLiteral* class_literal = factory()->NewClassLiteral(
-      proxy, extends, constructor, properties, pos, end_pos);
+      class_info->proxy, class_info->extends, class_info->constructor,
+      class_info->properties, pos, end_pos);
+
+  if (class_info->static_initializer_var != nullptr) {
+    class_literal->set_static_initializer_proxy(
+        factory()->NewVariableProxy(class_info->static_initializer_var));
+  }
 
   do_block->statements()->Add(
-      factory()->NewExpressionStatement(class_literal, pos), zone());
-  do_expr->set_represented_function(constructor);
-  Rewriter::Rewrite(this, GetClosureScope(), do_expr, ast_value_factory());
+      factory()->NewExpressionStatement(
+          factory()->NewAssignment(Token::ASSIGN,
+                                   factory()->NewVariableProxy(result_var),
+                                   class_literal, kNoSourcePosition),
+          pos),
+      zone());
+  if (allow_harmony_class_fields() &&
+      (has_instance_fields || (has_extends && !has_default_constructor))) {
+    // Default constructors for derived classes without fields will not try to
+    // read this variable, so there's no need to create it.
+    const AstRawString* init_fn_name =
+        ast_value_factory()->dot_class_field_init_string();
+    Variable* init_fn_var = scope()->DeclareLocal(
+        init_fn_name, CONST, kCreatedInitialized, NORMAL_VARIABLE);
+    Expression* initializer =
+        has_instance_fields
+            ? static_cast<Expression*>(SynthesizeClassFieldInitializer(
+                  class_info->instance_field_initializers->length()))
+            : factory()->NewBooleanLiteral(false, kNoSourcePosition);
+    Assignment* assignment = factory()->NewAssignment(
+        Token::INIT, factory()->NewVariableProxy(init_fn_var), initializer,
+        kNoSourcePosition);
+    do_block->statements()->Add(
+        factory()->NewExpressionStatement(assignment, kNoSourcePosition),
+        zone());
+  }
+  for (int i = 0; i < class_info->instance_field_initializers->length(); ++i) {
+    const AstRawString* function_name =
+        ClassFieldVariableName(false, ast_value_factory(), i);
+    VariableProxy* function_proxy =
+        factory()->NewVariableProxy(function_name, NORMAL_VARIABLE);
+    Declaration* function_declaration = factory()->NewVariableDeclaration(
+        function_proxy, scope(), kNoSourcePosition);
+    Variable* function_var =
+        Declare(function_declaration, DeclarationDescriptor::NORMAL, CONST,
+                kNeedsInitialization, ok, scope());
+    if (!*ok) return nullptr;
+    Property* prototype_property = factory()->NewProperty(
+        factory()->NewVariableProxy(result_var),
+        factory()->NewStringLiteral(ast_value_factory()->prototype_string(),
+                                    kNoSourcePosition),
+        kNoSourcePosition);
+    Expression* function_value = InstallHomeObject(
+        class_info->instance_field_initializers->at(i),
+        prototype_property);  // TODO(bakkot) ideally this would be conditional,
+                              // especially in trivial cases
+    Assignment* function_assignment = factory()->NewAssignment(
+        Token::INIT, factory()->NewVariableProxy(function_var), function_value,
+        kNoSourcePosition);
+    do_block->statements()->Add(factory()->NewExpressionStatement(
+                                    function_assignment, kNoSourcePosition),
+                                zone());
+  }
+  do_block->set_scope(scope()->FinalizeBlockScope());
+  do_expr->set_represented_function(class_info->constructor);
 
   return do_expr;
 }
 
-
-Expression* Parser::ParseV8Intrinsic(bool* ok) {
-  // CallRuntime ::
-  //   '%' Identifier Arguments
-
-  int pos = peek_position();
-  Expect(Token::MOD, CHECK_OK);
-  // Allow "eval" or "arguments" for backward compatibility.
-  const AstRawString* name = ParseIdentifier(kAllowRestrictedIdentifiers,
-                                             CHECK_OK);
-  Scanner::Location spread_pos;
-  ExpressionClassifier classifier(this);
-  ZoneList<Expression*>* args =
-      ParseArguments(&spread_pos, &classifier, CHECK_OK);
-
-  DCHECK(!spread_pos.IsValid());
-
-  if (extension_ != NULL) {
-    // The extension structures are only accessible while parsing the
-    // very first time not when reparsing because of lazy compilation.
-    GetClosureScope()->ForceEagerCompilation();
-  }
-
-  const Runtime::Function* function = Runtime::FunctionForName(name->string());
-
-  if (function != NULL) {
-    // Check for possible name clash.
-    DCHECK_EQ(Context::kNotFound,
-              Context::IntrinsicIndexForName(name->string()));
-    // Check for built-in IS_VAR macro.
-    if (function->function_id == Runtime::kIS_VAR) {
-      DCHECK_EQ(Runtime::RUNTIME, function->intrinsic_type);
-      // %IS_VAR(x) evaluates to x if x is a variable,
-      // leads to a parse error otherwise.  Could be implemented as an
-      // inline function %_IS_VAR(x) to eliminate this special case.
-      if (args->length() == 1 && args->at(0)->AsVariableProxy() != NULL) {
-        return args->at(0);
-      } else {
-        ReportMessage(MessageTemplate::kNotIsvar);
-        *ok = false;
-        return NULL;
-      }
-    }
-
-    // Check that the expected number of arguments are being passed.
-    if (function->nargs != -1 && function->nargs != args->length()) {
-      ReportMessage(MessageTemplate::kRuntimeWrongNumArgs);
-      *ok = false;
-      return NULL;
-    }
-
-    return factory()->NewCallRuntime(function, args, pos);
-  }
-
-  int context_index = Context::IntrinsicIndexForName(name->string());
-
-  // Check that the function is defined.
-  if (context_index == Context::kNotFound) {
-    ReportMessage(MessageTemplate::kNotDefined, name);
-    *ok = false;
-    return NULL;
-  }
-
-  return factory()->NewCallRuntime(context_index, args, pos);
-}
-
-
 Literal* Parser::GetLiteralUndefined(int position) {
   return factory()->NewUndefinedLiteral(position);
 }
@@ -5045,100 +3688,22 @@
   }
 }
 
-void Parser::InsertSloppyBlockFunctionVarBindings(DeclarationScope* scope,
-                                                  Scope* complex_params_scope,
-                                                  bool* ok) {
-  // For each variable which is used as a function declaration in a sloppy
-  // block,
-  SloppyBlockFunctionMap* map = scope->sloppy_block_function_map();
-  for (ZoneHashMap::Entry* p = map->Start(); p != nullptr; p = map->Next(p)) {
-    AstRawString* name = static_cast<AstRawString*>(p->key);
-
-    // If the variable wouldn't conflict with a lexical declaration
-    // or parameter,
-
-    // Check if there's a conflict with a parameter.
-    // This depends on the fact that functions always have a scope solely to
-    // hold complex parameters, and the names local to that scope are
-    // precisely the names of the parameters. IsDeclaredParameter(name) does
-    // not hold for names declared by complex parameters, nor are those
-    // bindings necessarily declared lexically, so we have to check for them
-    // explicitly. On the other hand, if there are not complex parameters,
-    // it is sufficient to just check IsDeclaredParameter.
-    if (complex_params_scope != nullptr) {
-      if (complex_params_scope->LookupLocal(name) != nullptr) {
-        continue;
-      }
-    } else {
-      if (scope->IsDeclaredParameter(name)) {
-        continue;
-      }
-    }
-
-    bool var_created = false;
-
-    // Write in assignments to var for each block-scoped function declaration
-    auto delegates = static_cast<SloppyBlockFunctionStatement*>(p->value);
-
-    DeclarationScope* decl_scope = scope;
-    while (decl_scope->is_eval_scope()) {
-      decl_scope = decl_scope->outer_scope()->GetDeclarationScope();
-    }
-    Scope* outer_scope = decl_scope->outer_scope();
-
-    for (SloppyBlockFunctionStatement* delegate = delegates;
-         delegate != nullptr; delegate = delegate->next()) {
-      // Check if there's a conflict with a lexical declaration
-      Scope* query_scope = delegate->scope()->outer_scope();
-      Variable* var = nullptr;
-      bool should_hoist = true;
-
-      // Note that we perform this loop for each delegate named 'name',
-      // which may duplicate work if those delegates share scopes.
-      // It is not sufficient to just do a Lookup on query_scope: for
-      // example, that does not prevent hoisting of the function in
-      // `{ let e; try {} catch (e) { function e(){} } }`
-      do {
-        var = query_scope->LookupLocal(name);
-        if (var != nullptr && IsLexicalVariableMode(var->mode())) {
-          should_hoist = false;
-          break;
-        }
-        query_scope = query_scope->outer_scope();
-      } while (query_scope != outer_scope);
-
-      if (!should_hoist) continue;
-
-      // Declare a var-style binding for the function in the outer scope
-      if (!var_created) {
-        var_created = true;
-        VariableProxy* proxy = scope->NewUnresolved(factory(), name);
-        Declaration* declaration =
-            factory()->NewVariableDeclaration(proxy, scope, kNoSourcePosition);
-        Declare(declaration, DeclarationDescriptor::NORMAL, VAR,
-                DefaultInitializationFlag(VAR), ok, scope);
-        DCHECK(ok);  // Based on the preceding check, this should not fail
-        if (!ok) return;
-      }
-
-      // Read from the local lexical scope and write to the function scope
-      VariableProxy* to = scope->NewUnresolved(factory(), name);
-      VariableProxy* from = delegate->scope()->NewUnresolved(factory(), name);
-      Expression* assignment =
-          factory()->NewAssignment(Token::ASSIGN, to, from, kNoSourcePosition);
-      Statement* statement =
-          factory()->NewExpressionStatement(assignment, kNoSourcePosition);
-      delegate->set_statement(statement);
-    }
+void Parser::InsertSloppyBlockFunctionVarBindings(DeclarationScope* scope) {
+  // For the outermost eval scope, we cannot hoist during parsing: let
+  // declarations in the surrounding scope may prevent hoisting, but the
+  // information is unaccessible during parsing. In this case, we hoist later in
+  // DeclarationScope::Analyze.
+  if (scope->is_eval_scope() && scope->outer_scope() == original_scope_) {
+    return;
   }
+  scope->HoistSloppyBlockFunctions(factory());
 }
 
-
 // ----------------------------------------------------------------------------
 // Parser support
 
 bool Parser::TargetStackContainsLabel(const AstRawString* label) {
-  for (Target* t = target_stack_; t != NULL; t = t->previous()) {
+  for (ParserTarget* t = target_stack_; t != NULL; t = t->previous()) {
     if (ContainsLabel(t->statement()->labels(), label)) return true;
   }
   return false;
@@ -5148,7 +3713,7 @@
 BreakableStatement* Parser::LookupBreakTarget(const AstRawString* label,
                                               bool* ok) {
   bool anonymous = label == NULL;
-  for (Target* t = target_stack_; t != NULL; t = t->previous()) {
+  for (ParserTarget* t = target_stack_; t != NULL; t = t->previous()) {
     BreakableStatement* stat = t->statement();
     if ((anonymous && stat->is_target_for_anonymous()) ||
         (!anonymous && ContainsLabel(stat->labels(), label))) {
@@ -5162,7 +3727,7 @@
 IterationStatement* Parser::LookupContinueTarget(const AstRawString* label,
                                                  bool* ok) {
   bool anonymous = label == NULL;
-  for (Target* t = target_stack_; t != NULL; t = t->previous()) {
+  for (ParserTarget* t = target_stack_; t != NULL; t = t->previous()) {
     IterationStatement* stat = t->statement()->AsIterationStatement();
     if (stat == NULL) continue;
 
@@ -5188,7 +3753,7 @@
 
 
 void Parser::Internalize(Isolate* isolate, Handle<Script> script, bool error) {
-  // Internalize strings.
+  // Internalize strings and values.
   ast_value_factory()->Internalize(isolate);
 
   // Error processing.
@@ -5240,12 +3805,6 @@
   DCHECK(parsing_on_main_thread_);
   Isolate* isolate = info->isolate();
   pre_parse_timer_ = isolate->counters()->pre_parse();
-  if (FLAG_trace_parse || allow_natives() || extension_ != NULL) {
-    // If intrinsics are allowed, the Parser cannot operate independent of the
-    // V8 heap because of Runtime. Tell the string table to internalize strings
-    // and values right after they're created.
-    ast_value_factory()->Internalize(isolate);
-  }
 
   if (info->is_lazy()) {
     DCHECK(!info->is_eval());
@@ -5261,7 +3820,6 @@
   info->set_literal(result);
 
   Internalize(isolate, info->script(), result == NULL);
-  DCHECK(ast_value_factory()->IsInternalized());
   return (result != NULL);
 }
 
@@ -5282,11 +3840,11 @@
     stream_ptr = info->character_stream();
   } else {
     DCHECK(info->character_stream() == nullptr);
-    stream.reset(new ExternalStreamingStream(info->source_stream(),
-                                             info->source_stream_encoding()));
+    stream.reset(ScannerStream::For(info->source_stream(),
+                                    info->source_stream_encoding()));
     stream_ptr = stream.get();
   }
-  DCHECK(info->context().is_null() || info->context()->IsNativeContext());
+  DCHECK(info->maybe_outer_scope_info().is_null());
 
   DCHECK(original_scope_);
 
@@ -5431,11 +3989,9 @@
   return running_hash;
 }
 
-
-ZoneList<v8::internal::Expression*>* Parser::PrepareSpreadArguments(
-    ZoneList<v8::internal::Expression*>* list) {
-  ZoneList<v8::internal::Expression*>* args =
-      new (zone()) ZoneList<v8::internal::Expression*>(1, zone());
+ZoneList<Expression*>* Parser::PrepareSpreadArguments(
+    ZoneList<Expression*>* list) {
+  ZoneList<Expression*>* args = new (zone()) ZoneList<Expression*>(1, zone());
   if (list->length() == 1) {
     // Spread-call with single spread argument produces an InternalArray
     // containing the values from the array.
@@ -5462,8 +4018,8 @@
     int n = list->length();
     while (i < n) {
       if (!list->at(i)->IsSpread()) {
-        ZoneList<v8::internal::Expression*>* unspread =
-            new (zone()) ZoneList<v8::internal::Expression*>(1, zone());
+        ZoneList<Expression*>* unspread =
+            new (zone()) ZoneList<Expression*>(1, zone());
 
         // Push array of unspread parameters
         while (i < n && !list->at(i)->IsSpread()) {
@@ -5478,15 +4034,15 @@
       }
 
       // Push eagerly spread argument
-      ZoneList<v8::internal::Expression*>* spread_list =
-          new (zone()) ZoneList<v8::internal::Expression*>(1, zone());
+      ZoneList<Expression*>* spread_list =
+          new (zone()) ZoneList<Expression*>(1, zone());
       spread_list->Add(list->at(i++)->AsSpread()->expression(), zone());
       args->Add(factory()->NewCallRuntime(Context::SPREAD_ITERABLE_INDEX,
                                           spread_list, kNoSourcePosition),
                 zone());
     }
 
-    list = new (zone()) ZoneList<v8::internal::Expression*>(1, zone());
+    list = new (zone()) ZoneList<Expression*>(1, zone());
     list->Add(factory()->NewCallRuntime(Context::SPREAD_ARGUMENTS_INDEX, args,
                                         kNoSourcePosition),
               zone());
@@ -5495,10 +4051,8 @@
   UNREACHABLE();
 }
 
-
 Expression* Parser::SpreadCall(Expression* function,
-                               ZoneList<v8::internal::Expression*>* args,
-                               int pos) {
+                               ZoneList<Expression*>* args, int pos) {
   if (function->IsSuperCallReference()) {
     // Super calls
     // $super_constructor = %_GetSuperConstructor(<this-function>)
@@ -5540,10 +4094,8 @@
   }
 }
 
-
 Expression* Parser::SpreadCallNew(Expression* function,
-                                  ZoneList<v8::internal::Expression*>* args,
-                                  int pos) {
+                                  ZoneList<Expression*>* args, int pos) {
   args->InsertAt(0, function, zone());
 
   return factory()->NewCallRuntime(Context::REFLECT_CONSTRUCT_INDEX, args, pos);
@@ -5562,90 +4114,141 @@
   scope->SetLanguageMode(mode);
 }
 
-
-void Parser::RaiseLanguageMode(LanguageMode mode) {
-  LanguageMode old = scope()->language_mode();
-  SetLanguageMode(scope(), old > mode ? old : mode);
+void Parser::SetAsmModule() {
+  // Store the usage count; The actual use counter on the isolate is
+  // incremented after parsing is done.
+  ++use_counts_[v8::Isolate::kUseAsm];
+  DCHECK(scope()->is_declaration_scope());
+  scope()->AsDeclarationScope()->set_asm_module();
 }
 
 void Parser::MarkCollectedTailCallExpressions() {
   const ZoneList<Expression*>& tail_call_expressions =
       function_state_->tail_call_expressions().expressions();
   for (int i = 0; i < tail_call_expressions.length(); ++i) {
-    Expression* expression = tail_call_expressions[i];
-    // If only FLAG_harmony_explicit_tailcalls is enabled then expression
-    // must be a Call expression.
-    DCHECK(FLAG_harmony_tailcalls || !FLAG_harmony_explicit_tailcalls ||
-           expression->IsCall());
-    MarkTailPosition(expression);
+    MarkTailPosition(tail_call_expressions[i]);
   }
 }
 
-Expression* ParserBaseTraits<Parser>::ExpressionListToExpression(
-    ZoneList<Expression*>* args) {
-  AstNodeFactory* factory = delegate()->factory();
+Expression* Parser::ExpressionListToExpression(ZoneList<Expression*>* args) {
   Expression* expr = args->at(0);
   for (int i = 1; i < args->length(); ++i) {
-    expr = factory->NewBinaryOperation(Token::COMMA, expr, args->at(i),
-                                       expr->position());
+    expr = factory()->NewBinaryOperation(Token::COMMA, expr, args->at(i),
+                                         expr->position());
   }
   return expr;
 }
 
+// This method intoduces the line initializing the generator object
+// when desugaring the body of async_function.
+void Parser::PrepareAsyncFunctionBody(ZoneList<Statement*>* body,
+                                      FunctionKind kind, int pos) {
+  // function async_function() {
+  //   .generator_object = %CreateGeneratorObject();
+  //   BuildRejectPromiseOnException({
+  //     ... block ...
+  //     return %ResolvePromise(.promise, expr), .promise;
+  //   })
+  // }
+
+  Variable* temp =
+      NewTemporary(ast_value_factory()->dot_generator_object_string());
+  function_state_->set_generator_object_variable(temp);
+
+  Expression* init_generator_variable = factory()->NewAssignment(
+      Token::INIT, factory()->NewVariableProxy(temp),
+      BuildCreateJSGeneratorObject(pos, kind), kNoSourcePosition);
+  body->Add(factory()->NewExpressionStatement(init_generator_variable,
+                                              kNoSourcePosition),
+            zone());
+}
+
+// This method completes the desugaring of the body of async_function.
+void Parser::RewriteAsyncFunctionBody(ZoneList<Statement*>* body, Block* block,
+                                      Expression* return_value, bool* ok) {
+  // function async_function() {
+  //   .generator_object = %CreateGeneratorObject();
+  //   BuildRejectPromiseOnException({
+  //     ... block ...
+  //     return %ResolvePromise(.promise, expr), .promise;
+  //   })
+  // }
+
+  return_value = BuildResolvePromise(return_value, return_value->position());
+  block->statements()->Add(
+      factory()->NewReturnStatement(return_value, return_value->position()),
+      zone());
+  block = BuildRejectPromiseOnException(block, CHECK_OK_VOID);
+  body->Add(block, zone());
+}
+
 Expression* Parser::RewriteAwaitExpression(Expression* value, int await_pos) {
-  // yield %AsyncFunctionAwait(.generator_object, <operand>)
+  // yield do {
+  //   tmp = <operand>;
+  //   %AsyncFunctionAwait(.generator_object, tmp, .promise);
+  //   .promise
+  // }
+  // The value of the expression is returned to the caller of the async
+  // function for the first yield statement; for this, .promise is the
+  // appropriate return value, being a Promise that will be fulfilled or
+  // rejected with the appropriate value by the desugaring. Subsequent yield
+  // occurrences will return to the AsyncFunctionNext call within the
+  // implemementation of the intermediate throwaway Promise's then handler.
+  // This handler has nothing useful to do with the value, as the Promise is
+  // ignored. If we yielded the value of the throwawayPromise that
+  // AsyncFunctionAwait creates as an intermediate, it would create a memory
+  // leak; we must return .promise instead;
+  // The operand needs to be evaluated on a separate statement in order to get
+  // a break location, and the .promise needs to be read earlier so that it
+  // doesn't insert a false location.
+  // TODO(littledan): investigate why this ordering is needed in more detail.
   Variable* generator_object_variable =
-      delegate()->function_state_->generator_object_variable();
+      function_state_->generator_object_variable();
 
   // If generator_object_variable is null,
+  // TODO(littledan): Is this necessary?
   if (!generator_object_variable) return value;
 
-  auto factory = delegate()->factory();
   const int nopos = kNoSourcePosition;
 
-  Variable* temp_var =
-      delegate()->NewTemporary(delegate()->ast_value_factory()->empty_string());
-  VariableProxy* temp_proxy = factory->NewVariableProxy(temp_var);
-  Block* do_block = factory->NewBlock(nullptr, 2, false, nopos);
+  Block* do_block = factory()->NewBlock(nullptr, 2, false, nopos);
+
+  Variable* promise = PromiseVariable();
 
   // Wrap value evaluation to provide a break location.
-  Expression* value_assignment =
-      factory->NewAssignment(Token::ASSIGN, temp_proxy, value, nopos);
+  Variable* temp_var = NewTemporary(ast_value_factory()->empty_string());
+  Expression* value_assignment = factory()->NewAssignment(
+      Token::ASSIGN, factory()->NewVariableProxy(temp_var), value, nopos);
   do_block->statements()->Add(
-      factory->NewExpressionStatement(value_assignment, value->position()),
+      factory()->NewExpressionStatement(value_assignment, value->position()),
       zone());
 
   ZoneList<Expression*>* async_function_await_args =
-      new (zone()) ZoneList<Expression*>(2, zone());
+      new (zone()) ZoneList<Expression*>(3, zone());
   Expression* generator_object =
-      factory->NewVariableProxy(generator_object_variable);
+      factory()->NewVariableProxy(generator_object_variable);
   async_function_await_args->Add(generator_object, zone());
-  async_function_await_args->Add(temp_proxy, zone());
-  Expression* async_function_await = delegate()->factory()->NewCallRuntime(
-      Context::ASYNC_FUNCTION_AWAIT_INDEX, async_function_await_args, nopos);
-  // Wrap await to provide a break location between value evaluation and yield.
-  Expression* await_assignment = factory->NewAssignment(
-      Token::ASSIGN, temp_proxy, async_function_await, nopos);
+  async_function_await_args->Add(factory()->NewVariableProxy(temp_var), zone());
+  async_function_await_args->Add(factory()->NewVariableProxy(promise), zone());
+
+  // The parser emits calls to AsyncFunctionAwaitCaught, but the
+  // AstNumberingVisitor will rewrite this to AsyncFunctionAwaitUncaught
+  // if there is no local enclosing try/catch block.
+  Expression* async_function_await =
+      factory()->NewCallRuntime(Context::ASYNC_FUNCTION_AWAIT_CAUGHT_INDEX,
+                                async_function_await_args, nopos);
   do_block->statements()->Add(
-      factory->NewExpressionStatement(await_assignment, await_pos), zone());
-  Expression* do_expr = factory->NewDoExpression(do_block, temp_var, nopos);
+      factory()->NewExpressionStatement(async_function_await, await_pos),
+      zone());
 
-  generator_object = factory->NewVariableProxy(generator_object_variable);
-  return factory->NewYield(generator_object, do_expr, nopos,
-                           Yield::kOnExceptionRethrow);
+  // Wrap await to provide a break location between value evaluation and yield.
+  Expression* do_expr = factory()->NewDoExpression(do_block, promise, nopos);
+
+  generator_object = factory()->NewVariableProxy(generator_object_variable);
+  return factory()->NewYield(generator_object, do_expr, nopos,
+                             Yield::kOnExceptionRethrow);
 }
 
-ZoneList<Expression*>* ParserBaseTraits<Parser>::GetNonPatternList() const {
-  return delegate()->function_state_->non_patterns_to_rewrite();
-}
-
-ZoneList<typename ParserBaseTraits<Parser>::Type::ExpressionClassifier::Error>*
-ParserBaseTraits<Parser>::GetReportedErrorList() const {
-  return delegate()->function_state_->GetReportedErrorList();
-}
-
-Zone* ParserBaseTraits<Parser>::zone() const { return delegate()->zone(); }
-
 class NonPatternRewriter : public AstExpressionRewriter {
  public:
   NonPatternRewriter(uintptr_t stack_limit, Parser* parser)
@@ -5674,7 +4277,7 @@
     return false;
   }
 
-  void VisitObjectLiteralProperty(ObjectLiteralProperty* property) override {
+  void VisitLiteralProperty(LiteralProperty* property) override {
     if (property == nullptr) return;
     // Do not rewrite (computed) key expressions
     AST_REWRITE_PROPERTY(Expression, property, value);
@@ -5683,11 +4286,10 @@
   Parser* parser_;
 };
 
-
-void Parser::RewriteNonPattern(ExpressionClassifier* classifier, bool* ok) {
-  ValidateExpression(classifier, CHECK_OK_VOID);
+void Parser::RewriteNonPattern(bool* ok) {
+  ValidateExpression(CHECK_OK_VOID);
   auto non_patterns_to_rewrite = function_state_->non_patterns_to_rewrite();
-  int begin = classifier->GetNonPatternBegin();
+  int begin = classifier()->GetNonPatternBegin();
   int end = non_patterns_to_rewrite->length();
   if (begin < end) {
     NonPatternRewriter rewriter(stack_limit_, this);
@@ -5711,8 +4313,11 @@
         pair.assignment->AsRewritableExpression();
     DCHECK_NOT_NULL(to_rewrite);
     if (!to_rewrite->is_rewritten()) {
-      PatternRewriter::RewriteDestructuringAssignment(this, to_rewrite,
-                                                      pair.scope);
+      // Since this function is called at the end of parsing the program,
+      // pair.scope may already have been removed by FinalizeBlockScope in the
+      // meantime.
+      Scope* scope = pair.scope->GetUnremovedScope();
+      PatternRewriter::RewriteDestructuringAssignment(this, to_rewrite, scope);
     }
   }
 }
@@ -5733,8 +4338,8 @@
 
     Expression* result;
     DCHECK_NOT_NULL(lhs->raw_name());
-    result = this->ExpressionFromIdentifier(lhs->raw_name(), lhs->position(),
-                                            lhs->end_position());
+    result = ExpressionFromIdentifier(lhs->raw_name(), lhs->position(),
+                                      lhs->end_position());
     args->Add(left, zone());
     args->Add(right, zone());
     Expression* call =
@@ -5807,8 +4412,7 @@
       // ++($R.length)
       if (!value->IsLiteral() ||
           !value->AsLiteral()->raw_value()->IsTheHole()) {
-        ZoneList<Expression*>* append_element_args =
-            NewExpressionList(2, zone());
+        ZoneList<Expression*>* append_element_args = NewExpressionList(2);
         append_element_args->Add(factory()->NewVariableProxy(result), zone());
         append_element_args->Add(value, zone());
         do_block->statements()->Add(
@@ -5837,8 +4441,7 @@
       // %AppendElement($R, each)
       Statement* append_body;
       {
-        ZoneList<Expression*>* append_element_args =
-            NewExpressionList(2, zone());
+        ZoneList<Expression*>* append_element_args = NewExpressionList(2);
         append_element_args->Add(factory()->NewVariableProxy(result), zone());
         append_element_args->Add(factory()->NewVariableProxy(each), zone());
         append_body = factory()->NewExpressionStatement(
@@ -5865,7 +4468,7 @@
 void Parser::QueueDestructuringAssignmentForRewriting(Expression* expr) {
   DCHECK(expr->IsRewritableExpression());
   function_state_->AddDestructuringAssignment(
-      DestructuringAssignment(expr, delegate()->scope()));
+      DestructuringAssignment(expr, scope()));
 }
 
 void Parser::QueueNonPatternForRewriting(Expression* expr, bool* ok) {
@@ -5873,43 +4476,38 @@
   function_state_->AddNonPatternForRewriting(expr, ok);
 }
 
-void ParserBaseTraits<Parser>::SetFunctionNameFromPropertyName(
-    ObjectLiteralProperty* property, const AstRawString* name) {
-  Expression* value = property->value();
+void Parser::AddAccessorPrefixToFunctionName(bool is_get,
+                                             FunctionLiteral* function,
+                                             const AstRawString* name) {
+  DCHECK_NOT_NULL(name);
+  const AstRawString* prefix = is_get ? ast_value_factory()->get_space_string()
+                                      : ast_value_factory()->set_space_string();
+  function->set_raw_name(ast_value_factory()->NewConsString(prefix, name));
+}
+
+void Parser::SetFunctionNameFromPropertyName(ObjectLiteralProperty* property,
+                                             const AstRawString* name) {
+  DCHECK(property->kind() != ObjectLiteralProperty::GETTER);
+  DCHECK(property->kind() != ObjectLiteralProperty::SETTER);
 
   // Computed name setting must happen at runtime.
-  if (property->is_computed_name()) return;
-
-  // Getter and setter names are handled here because their names
-  // change in ES2015, even though they are not anonymous.
-  auto function = value->AsFunctionLiteral();
-  if (function != nullptr) {
-    bool is_getter = property->kind() == ObjectLiteralProperty::GETTER;
-    bool is_setter = property->kind() == ObjectLiteralProperty::SETTER;
-    if (is_getter || is_setter) {
-      DCHECK_NOT_NULL(name);
-      const AstRawString* prefix =
-          is_getter ? delegate()->ast_value_factory()->get_space_string()
-                    : delegate()->ast_value_factory()->set_space_string();
-      function->set_raw_name(
-          delegate()->ast_value_factory()->NewConsString(prefix, name));
-      return;
-    }
-  }
+  DCHECK(!property->is_computed_name());
 
   // Ignore "__proto__" as a name when it's being used to set the [[Prototype]]
   // of an object literal.
   if (property->kind() == ObjectLiteralProperty::PROTOTYPE) return;
 
+  Expression* value = property->value();
+
   DCHECK(!value->IsAnonymousFunctionDefinition() ||
          property->kind() == ObjectLiteralProperty::COMPUTED);
-  delegate()->SetFunctionName(value, name);
+  SetFunctionName(value, name);
 }
 
-void ParserBaseTraits<Parser>::SetFunctionNameFromIdentifierRef(
-    Expression* value, Expression* identifier) {
+void Parser::SetFunctionNameFromIdentifierRef(Expression* value,
+                                              Expression* identifier) {
   if (!identifier->IsVariableProxy()) return;
-  delegate()->SetFunctionName(value, identifier->AsVariableProxy()->raw_name());
+  SetFunctionName(value, identifier->AsVariableProxy()->raw_name());
 }
 
 void Parser::SetFunctionName(Expression* value, const AstRawString* name) {
@@ -6050,7 +4648,7 @@
   Variable* var_iterator = NewTemporary(ast_value_factory()->empty_string());
   Statement* get_iterator;
   {
-    Expression* iterator = GetIterator(iterable, factory(), nopos);
+    Expression* iterator = GetIterator(iterable, nopos);
     Expression* iterator_proxy = factory()->NewVariableProxy(var_iterator);
     Expression* assignment = factory()->NewAssignment(
         Token::ASSIGN, iterator_proxy, iterator, nopos);
@@ -6155,7 +4753,7 @@
 
     Block* then = factory()->NewBlock(nullptr, 4 + 1, false, nopos);
     BuildIteratorCloseForCompletion(
-        then->statements(), var_iterator,
+        scope(), then->statements(), var_iterator,
         factory()->NewSmiLiteral(Parser::kNormalCompletion, nopos));
     then->statements()->Add(throw_call, zone());
     check_throw = factory()->NewIfStatement(
@@ -6259,7 +4857,7 @@
   // input = function.sent;
   Statement* get_input;
   {
-    Expression* function_sent = FunctionSentExpression(factory(), nopos);
+    Expression* function_sent = FunctionSentExpression(nopos);
     Expression* input_proxy = factory()->NewVariableProxy(var_input);
     Expression* assignment = factory()->NewAssignment(
         Token::ASSIGN, input_proxy, function_sent, nopos);
@@ -6313,9 +4911,8 @@
     Scope* catch_scope = NewScope(CATCH_SCOPE);
     catch_scope->set_is_hidden();
     const AstRawString* name = ast_value_factory()->dot_catch_string();
-    Variable* catch_variable =
-        catch_scope->DeclareLocal(name, VAR, kCreatedInitialized,
-                                               Variable::NORMAL);
+    Variable* catch_variable = catch_scope->DeclareLocal(
+        name, VAR, kCreatedInitialized, NORMAL_VARIABLE);
 
     try_catch = factory()->NewTryCatchStatementForDesugaring(
         try_block, catch_scope, catch_variable, catch_block, nopos);
@@ -6524,9 +5121,9 @@
   statements->Add(validate_output, zone());
 }
 
-void Parser::FinalizeIteratorUse(Variable* completion, Expression* condition,
-                                 Variable* iter, Block* iterator_use,
-                                 Block* target) {
+void Parser::FinalizeIteratorUse(Scope* use_scope, Variable* completion,
+                                 Expression* condition, Variable* iter,
+                                 Block* iterator_use, Block* target) {
   //
   // This function adds two statements to [target], corresponding to the
   // following code:
@@ -6582,7 +5179,8 @@
   {
     Block* block = factory()->NewBlock(nullptr, 2, true, nopos);
     Expression* proxy = factory()->NewVariableProxy(completion);
-    BuildIteratorCloseForCompletion(block->statements(), iter, proxy);
+    BuildIteratorCloseForCompletion(use_scope, block->statements(), iter,
+                                    proxy);
     DCHECK(block->statements()->length() == 2);
 
     maybe_close = factory()->NewBlock(nullptr, 1, true, nopos);
@@ -6599,10 +5197,10 @@
   // }
   Statement* try_catch;
   {
-    Scope* catch_scope = NewScopeWithParent(scope(), CATCH_SCOPE);
+    Scope* catch_scope = NewScopeWithParent(use_scope, CATCH_SCOPE);
     Variable* catch_variable =
         catch_scope->DeclareLocal(ast_value_factory()->dot_catch_string(), VAR,
-                                  kCreatedInitialized, Variable::NORMAL);
+                                  kCreatedInitialized, NORMAL_VARIABLE);
     catch_scope->set_is_hidden();
 
     Statement* rethrow;
@@ -6639,7 +5237,8 @@
   target->statements()->Add(try_finally, zone());
 }
 
-void Parser::BuildIteratorCloseForCompletion(ZoneList<Statement*>* statements,
+void Parser::BuildIteratorCloseForCompletion(Scope* scope,
+                                             ZoneList<Statement*>* statements,
                                              Variable* iterator,
                                              Expression* completion) {
   //
@@ -6705,10 +5304,10 @@
 
     Block* catch_block = factory()->NewBlock(nullptr, 0, false, nopos);
 
-    Scope* catch_scope = NewScope(CATCH_SCOPE);
+    Scope* catch_scope = NewScopeWithParent(scope, CATCH_SCOPE);
     Variable* catch_variable =
         catch_scope->DeclareLocal(ast_value_factory()->dot_catch_string(), VAR,
-                                  kCreatedInitialized, Variable::NORMAL);
+                                  kCreatedInitialized, NORMAL_VARIABLE);
     catch_scope->set_is_hidden();
 
     try_call_return = factory()->NewTryCatchStatement(
@@ -6842,20 +5441,18 @@
     Block* try_block = factory()->NewBlock(nullptr, 1, false, nopos);
     try_block->statements()->Add(loop, zone());
 
-    FinalizeIteratorUse(var_completion, closing_condition, loop->iterator(),
-                        try_block, final_loop);
+    // The scope in which the parser creates this loop.
+    Scope* loop_scope = scope()->outer_scope();
+    DCHECK_EQ(loop_scope->scope_type(), BLOCK_SCOPE);
+    DCHECK_EQ(scope()->scope_type(), BLOCK_SCOPE);
+
+    FinalizeIteratorUse(loop_scope, var_completion, closing_condition,
+                        loop->iterator(), try_block, final_loop);
   }
 
   return final_loop;
 }
 
-#ifdef DEBUG
-void Parser::Print(AstNode* node) {
-  ast_value_factory()->Internalize(Isolate::Current());
-  node->Print(Isolate::Current());
-}
-#endif  // DEBUG
-
 #undef CHECK_OK
 #undef CHECK_OK_VOID
 #undef CHECK_FAILED
diff --git a/src/parsing/parser.h b/src/parsing/parser.h
index b069f9a..418bedf 100644
--- a/src/parsing/parser.h
+++ b/src/parsing/parser.h
@@ -21,7 +21,8 @@
 
 class ParseInfo;
 class ScriptData;
-class Target;
+class ParserTarget;
+class ParserTargetScope;
 
 class FunctionEntry BASE_EMBEDDED {
  public:
@@ -138,239 +139,33 @@
 };
 
 template <>
-class ParserBaseTraits<Parser> {
- public:
-  typedef ParserBaseTraits<Parser> ParserTraits;
+struct ParserTypes<Parser> {
+  typedef ParserBase<Parser> Base;
+  typedef Parser Impl;
 
-  struct Type {
-    typedef Variable GeneratorVariable;
+  typedef v8::internal::Variable Variable;
 
-    typedef v8::internal::AstProperties AstProperties;
+  // Return types for traversing functions.
+  typedef const AstRawString* Identifier;
+  typedef v8::internal::Expression* Expression;
+  typedef v8::internal::FunctionLiteral* FunctionLiteral;
+  typedef ObjectLiteral::Property* ObjectLiteralProperty;
+  typedef ClassLiteral::Property* ClassLiteralProperty;
+  typedef ZoneList<v8::internal::Expression*>* ExpressionList;
+  typedef ZoneList<ObjectLiteral::Property*>* ObjectPropertyList;
+  typedef ZoneList<ClassLiteral::Property*>* ClassPropertyList;
+  typedef ParserFormalParameters FormalParameters;
+  typedef v8::internal::Statement* Statement;
+  typedef ZoneList<v8::internal::Statement*>* StatementList;
+  typedef v8::internal::Block* Block;
+  typedef v8::internal::BreakableStatement* BreakableStatement;
+  typedef v8::internal::IterationStatement* IterationStatement;
 
-    typedef v8::internal::ExpressionClassifier<ParserTraits>
-        ExpressionClassifier;
+  // For constructing objects returned by the traversing functions.
+  typedef AstNodeFactory Factory;
 
-    // Return types for traversing functions.
-    typedef const AstRawString* Identifier;
-    typedef v8::internal::Expression* Expression;
-    typedef Yield* YieldExpression;
-    typedef v8::internal::FunctionLiteral* FunctionLiteral;
-    typedef v8::internal::ClassLiteral* ClassLiteral;
-    typedef v8::internal::Literal* Literal;
-    typedef ObjectLiteral::Property* ObjectLiteralProperty;
-    typedef ZoneList<v8::internal::Expression*>* ExpressionList;
-    typedef ZoneList<ObjectLiteral::Property*>* PropertyList;
-    typedef ParserFormalParameters::Parameter FormalParameter;
-    typedef ParserFormalParameters FormalParameters;
-    typedef ZoneList<v8::internal::Statement*>* StatementList;
-
-    // For constructing objects returned by the traversing functions.
-    typedef AstNodeFactory Factory;
-  };
-
-  // TODO(nikolaos): The traits methods should not need to call methods
-  // of the implementation object.
-  Parser* delegate() { return reinterpret_cast<Parser*>(this); }
-  const Parser* delegate() const {
-    return reinterpret_cast<const Parser*>(this);
-  }
-
-  // Helper functions for recursive descent.
-  bool IsEval(const AstRawString* identifier) const;
-  bool IsArguments(const AstRawString* identifier) const;
-  bool IsEvalOrArguments(const AstRawString* identifier) const;
-  bool IsUndefined(const AstRawString* identifier) const;
-  V8_INLINE bool IsFutureStrictReserved(const AstRawString* identifier) const;
-
-  // Returns true if the expression is of type "this.foo".
-  static bool IsThisProperty(Expression* expression);
-
-  static bool IsIdentifier(Expression* expression);
-
-  static const AstRawString* AsIdentifier(Expression* expression) {
-    DCHECK(IsIdentifier(expression));
-    return expression->AsVariableProxy()->raw_name();
-  }
-
-  bool IsPrototype(const AstRawString* identifier) const;
-
-  bool IsConstructor(const AstRawString* identifier) const;
-
-  bool IsDirectEvalCall(Expression* expression) const {
-    if (!expression->IsCall()) return false;
-    expression = expression->AsCall()->expression();
-    return IsIdentifier(expression) && IsEval(AsIdentifier(expression));
-  }
-
-  static bool IsBoilerplateProperty(ObjectLiteral::Property* property) {
-    return ObjectLiteral::IsBoilerplateProperty(property);
-  }
-
-  static bool IsArrayIndex(const AstRawString* string, uint32_t* index) {
-    return string->AsArrayIndex(index);
-  }
-
-  static Expression* GetPropertyValue(ObjectLiteral::Property* property) {
-    return property->value();
-  }
-
-  // Functions for encapsulating the differences between parsing and preparsing;
-  // operations interleaved with the recursive descent.
-  static void PushLiteralName(FuncNameInferrer* fni, const AstRawString* id) {
-    fni->PushLiteralName(id);
-  }
-
-  void PushPropertyName(FuncNameInferrer* fni, Expression* expression);
-
-  static void InferFunctionName(FuncNameInferrer* fni,
-                                FunctionLiteral* func_to_infer) {
-    fni->AddFunction(func_to_infer);
-  }
-
-  // If we assign a function literal to a property we pretenure the
-  // literal so it can be added as a constant function property.
-  static void CheckAssigningFunctionLiteralToProperty(Expression* left,
-                                                      Expression* right);
-
-  // Determine if the expression is a variable proxy and mark it as being used
-  // in an assignment or with a increment/decrement operator.
-  static Expression* MarkExpressionAsAssigned(Expression* expression);
-
-  // Returns true if we have a binary expression between two numeric
-  // literals. In that case, *x will be changed to an expression which is the
-  // computed value.
-  bool ShortcutNumericLiteralBinaryExpression(Expression** x, Expression* y,
-                                              Token::Value op, int pos,
-                                              AstNodeFactory* factory);
-
-  // Rewrites the following types of unary expressions:
-  // not <literal> -> true / false
-  // + <numeric literal> -> <numeric literal>
-  // - <numeric literal> -> <numeric literal with value negated>
-  // ! <literal> -> true / false
-  // The following rewriting rules enable the collection of type feedback
-  // without any special stub and the multiplication is removed later in
-  // Crankshaft's canonicalization pass.
-  // + foo -> foo * 1
-  // - foo -> foo * (-1)
-  // ~ foo -> foo ^(~0)
-  Expression* BuildUnaryExpression(Expression* expression, Token::Value op,
-                                   int pos, AstNodeFactory* factory);
-
-  Expression* BuildIteratorResult(Expression* value, bool done);
-
-  // Generate AST node that throws a ReferenceError with the given type.
-  Expression* NewThrowReferenceError(MessageTemplate::Template message,
-                                     int pos);
-
-  // Generate AST node that throws a SyntaxError with the given
-  // type. The first argument may be null (in the handle sense) in
-  // which case no arguments are passed to the constructor.
-  Expression* NewThrowSyntaxError(MessageTemplate::Template message,
-                                  const AstRawString* arg, int pos);
-
-  // Generate AST node that throws a TypeError with the given
-  // type. Both arguments must be non-null (in the handle sense).
-  Expression* NewThrowTypeError(MessageTemplate::Template message,
-                                const AstRawString* arg, int pos);
-
-  // Reporting errors.
-  void ReportMessageAt(Scanner::Location source_location,
-                       MessageTemplate::Template message,
-                       const char* arg = NULL,
-                       ParseErrorType error_type = kSyntaxError);
-  void ReportMessageAt(Scanner::Location source_location,
-                       MessageTemplate::Template message,
-                       const AstRawString* arg,
-                       ParseErrorType error_type = kSyntaxError);
-
-  // "null" return type creators.
-  static const AstRawString* EmptyIdentifier() { return nullptr; }
-  static Expression* EmptyExpression() { return nullptr; }
-  static Literal* EmptyLiteral() { return nullptr; }
-  static ObjectLiteralProperty* EmptyObjectLiteralProperty() { return nullptr; }
-  static FunctionLiteral* EmptyFunctionLiteral() { return nullptr; }
-
-  // Used in error return values.
-  static ZoneList<Expression*>* NullExpressionList() { return nullptr; }
-
-  // Non-NULL empty string.
-  V8_INLINE const AstRawString* EmptyIdentifierString() const;
-
-  // Odd-ball literal creators.
-  Literal* GetLiteralTheHole(int position, AstNodeFactory* factory) const;
-
-  // Producing data during the recursive descent.
-  const AstRawString* GetSymbol(Scanner* scanner) const;
-  const AstRawString* GetNextSymbol(Scanner* scanner) const;
-  const AstRawString* GetNumberAsSymbol(Scanner* scanner) const;
-
-  Expression* ThisExpression(int pos = kNoSourcePosition);
-  Expression* NewSuperPropertyReference(AstNodeFactory* factory, int pos);
-  Expression* NewSuperCallReference(AstNodeFactory* factory, int pos);
-  Expression* NewTargetExpression(int pos);
-  Expression* FunctionSentExpression(AstNodeFactory* factory, int pos) const;
-  Literal* ExpressionFromLiteral(Token::Value token, int pos, Scanner* scanner,
-                                 AstNodeFactory* factory) const;
-  Expression* ExpressionFromIdentifier(const AstRawString* name,
-                                       int start_position, int end_position,
-                                       InferName = InferName::kYes);
-  Expression* ExpressionFromString(int pos, Scanner* scanner,
-                                   AstNodeFactory* factory) const;
-  Expression* GetIterator(Expression* iterable, AstNodeFactory* factory,
-                          int pos);
-  ZoneList<v8::internal::Expression*>* NewExpressionList(int size,
-                                                         Zone* zone) const {
-    return new(zone) ZoneList<v8::internal::Expression*>(size, zone);
-  }
-  ZoneList<ObjectLiteral::Property*>* NewPropertyList(int size,
-                                                      Zone* zone) const {
-    return new(zone) ZoneList<ObjectLiteral::Property*>(size, zone);
-  }
-  ZoneList<v8::internal::Statement*>* NewStatementList(int size,
-                                                       Zone* zone) const {
-    return new(zone) ZoneList<v8::internal::Statement*>(size, zone);
-  }
-
-  V8_INLINE void AddParameterInitializationBlock(
-      const ParserFormalParameters& parameters,
-      ZoneList<v8::internal::Statement*>* body, bool is_async, bool* ok);
-
-  V8_INLINE void AddFormalParameter(ParserFormalParameters* parameters,
-                                    Expression* pattern,
-                                    Expression* initializer,
-                                    int initializer_end_position, bool is_rest);
-  V8_INLINE void DeclareFormalParameter(
-      DeclarationScope* scope,
-      const ParserFormalParameters::Parameter& parameter,
-      Type::ExpressionClassifier* classifier);
-  void ParseArrowFunctionFormalParameterList(
-      ParserFormalParameters* parameters, Expression* params,
-      const Scanner::Location& params_loc, Scanner::Location* duplicate_loc,
-      const Scope::Snapshot& scope_snapshot, bool* ok);
-
-  void ReindexLiterals(const ParserFormalParameters& parameters);
-
-  V8_INLINE Expression* NoTemplateTag() { return NULL; }
-  V8_INLINE static bool IsTaggedTemplate(const Expression* tag) {
-    return tag != NULL;
-  }
-
-  V8_INLINE void MaterializeUnspreadArgumentsLiterals(int count) {}
-
-  Expression* ExpressionListToExpression(ZoneList<Expression*>* args);
-
-  void SetFunctionNameFromPropertyName(ObjectLiteralProperty* property,
-                                       const AstRawString* name);
-
-  void SetFunctionNameFromIdentifierRef(Expression* value,
-                                        Expression* identifier);
-
-  V8_INLINE ZoneList<typename Type::ExpressionClassifier::Error>*
-      GetReportedErrorList() const;
-  V8_INLINE Zone* zone() const;
-
-  V8_INLINE ZoneList<Expression*>* GetNonPatternList() const;
+  typedef ParserTarget Target;
+  typedef ParserTargetScope TargetScope;
 };
 
 class Parser : public ParserBase<Parser> {
@@ -390,8 +185,16 @@
   bool Parse(ParseInfo* info);
   void ParseOnBackground(ParseInfo* info);
 
-  void DeserializeScopeChain(ParseInfo* info, Handle<Context> context,
-                             Scope::DeserializationMode deserialization_mode);
+  // Deserialize the scope chain prior to parsing in which the script is going
+  // to be executed. If the script is a top-level script, or the scope chain
+  // consists of only a native context, maybe_outer_scope_info should be an
+  // empty handle.
+  //
+  // This only deserializes the scope chain, but doesn't connect the scopes to
+  // their corresponding scope infos. Therefore, looking up variables in the
+  // deserialized scopes is not possible.
+  void DeserializeScopeChain(ParseInfo* info,
+                             MaybeHandle<ScopeInfo> maybe_outer_scope_info);
 
   // Handle errors detected during parsing, move statistics to Isolate,
   // internalize strings (move them to the heap).
@@ -400,9 +203,7 @@
 
  private:
   friend class ParserBase<Parser>;
-  // TODO(nikolaos): This should not be necessary. It will be removed
-  // when the traits object stops delegating to the implementation object.
-  friend class ParserBaseTraits<Parser>;
+  friend class v8::internal::ExpressionClassifier<ParserTypes<Parser>>;
 
   // Runtime encoding of different completion modes.
   enum CompletionKind {
@@ -411,18 +212,12 @@
     kAbruptCompletion
   };
 
-  enum class FunctionBodyType { kNormal, kSingleExpression };
-
-  DeclarationScope* GetDeclarationScope() const {
-    return scope()->GetDeclarationScope();
-  }
-  DeclarationScope* GetClosureScope() const {
-    return scope()->GetClosureScope();
-  }
   Variable* NewTemporary(const AstRawString* name) {
     return scope()->NewTemporary(name);
   }
 
+  void PrepareGeneratorVariables(FunctionState* function_state);
+
   // Limit the allowed number of local variables in a function. The hard limit
   // is that offsets computed by FullCodeGenerator::StackOperand and similar
   // functions are ints, and they should not overflow. In addition, accessing
@@ -455,12 +250,6 @@
     return compile_options_ == ScriptCompiler::kProduceParserCache;
   }
 
-  // All ParseXXX functions take as the last argument an *ok parameter
-  // which is set to false if parsing failed; it is unchanged otherwise.
-  // By making the 'exception handling' explicit, we are forced to check
-  // for failure at the call sites.
-  void ParseStatementList(ZoneList<Statement*>* body, int end_token, bool* ok);
-  Statement* ParseStatementListItem(bool* ok);
   void ParseModuleItemList(ZoneList<Statement*>* body, bool* ok);
   Statement* ParseModuleItem(bool* ok);
   const AstRawString* ParseModuleSpecifier(bool* ok);
@@ -482,75 +271,52 @@
           location(location) {}
   };
   ZoneList<const NamedImport*>* ParseNamedImports(int pos, bool* ok);
-  Statement* ParseStatement(ZoneList<const AstRawString*>* labels,
-                            AllowLabelledFunctionStatement allow_function,
-                            bool* ok);
-  Statement* ParseSubStatement(ZoneList<const AstRawString*>* labels,
-                               AllowLabelledFunctionStatement allow_function,
-                               bool* ok);
-  Statement* ParseStatementAsUnlabelled(ZoneList<const AstRawString*>* labels,
-                                   bool* ok);
-  Statement* ParseFunctionDeclaration(bool* ok);
-  Statement* ParseHoistableDeclaration(ZoneList<const AstRawString*>* names,
-                                       bool default_export, bool* ok);
-  Statement* ParseHoistableDeclaration(int pos, ParseFunctionFlags flags,
-                                       ZoneList<const AstRawString*>* names,
-                                       bool default_export, bool* ok);
-  Statement* ParseAsyncFunctionDeclaration(ZoneList<const AstRawString*>* names,
-                                           bool default_export, bool* ok);
-  Expression* ParseAsyncFunctionExpression(bool* ok);
-  Statement* ParseClassDeclaration(ZoneList<const AstRawString*>* names,
-                                   bool default_export, bool* ok);
-  Statement* ParseNativeDeclaration(bool* ok);
-  Block* ParseBlock(ZoneList<const AstRawString*>* labels, bool* ok);
-  Block* ParseVariableStatement(VariableDeclarationContext var_context,
-                                ZoneList<const AstRawString*>* names,
-                                bool* ok);
-  DoExpression* ParseDoExpression(bool* ok);
-  Expression* ParseYieldStarExpression(bool* ok);
+  Block* BuildInitializationBlock(DeclarationParsingResult* parsing_result,
+                                  ZoneList<const AstRawString*>* names,
+                                  bool* ok);
+  void DeclareAndInitializeVariables(
+      Block* block, const DeclarationDescriptor* declaration_descriptor,
+      const DeclarationParsingResult::Declaration* declaration,
+      ZoneList<const AstRawString*>* names, bool* ok);
+  ZoneList<const AstRawString*>* DeclareLabel(
+      ZoneList<const AstRawString*>* labels, VariableProxy* expr, bool* ok);
+  bool ContainsLabel(ZoneList<const AstRawString*>* labels,
+                     const AstRawString* label);
+  Expression* RewriteReturn(Expression* return_value, int pos);
+  Statement* RewriteSwitchStatement(Expression* tag,
+                                    SwitchStatement* switch_statement,
+                                    ZoneList<CaseClause*>* cases, Scope* scope);
+  void RewriteCatchPattern(CatchInfo* catch_info, bool* ok);
+  void ValidateCatchBlock(const CatchInfo& catch_info, bool* ok);
+  Statement* RewriteTryStatement(Block* try_block, Block* catch_block,
+                                 Block* finally_block,
+                                 const CatchInfo& catch_info, int pos);
 
-  struct DeclarationDescriptor {
-    enum Kind { NORMAL, PARAMETER };
-    Parser* parser;
-    Scope* scope;
-    Scope* hoist_scope;
-    VariableMode mode;
-    int declaration_pos;
-    int initialization_pos;
-    Kind declaration_kind;
-  };
-
-  struct DeclarationParsingResult {
-    struct Declaration {
-      Declaration(Expression* pattern, int initializer_position,
-                  Expression* initializer)
-          : pattern(pattern),
-            initializer_position(initializer_position),
-            initializer(initializer) {}
-
-      Expression* pattern;
-      int initializer_position;
-      Expression* initializer;
-    };
-
-    DeclarationParsingResult()
-        : declarations(4),
-          first_initializer_loc(Scanner::Location::invalid()),
-          bindings_loc(Scanner::Location::invalid()) {}
-
-    Block* BuildInitializationBlock(ZoneList<const AstRawString*>* names,
-                                    bool* ok);
-
-    DeclarationDescriptor descriptor;
-    List<Declaration> declarations;
-    Scanner::Location first_initializer_loc;
-    Scanner::Location bindings_loc;
-  };
+  Statement* DeclareFunction(const AstRawString* variable_name,
+                             FunctionLiteral* function, int pos,
+                             bool is_generator, bool is_async,
+                             ZoneList<const AstRawString*>* names, bool* ok);
+  V8_INLINE Statement* DeclareClass(const AstRawString* variable_name,
+                                    Expression* value,
+                                    ZoneList<const AstRawString*>* names,
+                                    int class_token_pos, int end_pos, bool* ok);
+  V8_INLINE void DeclareClassVariable(const AstRawString* name,
+                                      Scope* block_scope, ClassInfo* class_info,
+                                      int class_token_pos, bool* ok);
+  V8_INLINE void DeclareClassProperty(const AstRawString* class_name,
+                                      ClassLiteralProperty* property,
+                                      ClassInfo* class_info, bool* ok);
+  V8_INLINE Expression* RewriteClassLiteral(const AstRawString* name,
+                                            ClassInfo* class_info, int pos,
+                                            bool* ok);
+  V8_INLINE Statement* DeclareNative(const AstRawString* name, int pos,
+                                     bool* ok);
 
   class PatternRewriter final : public AstVisitor<PatternRewriter> {
    public:
     static void DeclareAndInitializeVariables(
-        Block* block, const DeclarationDescriptor* declaration_descriptor,
+        Parser* parser, Block* block,
+        const DeclarationDescriptor* declaration_descriptor,
         const DeclarationParsingResult::Declaration* declaration,
         ZoneList<const AstRawString*>* names, bool* ok);
 
@@ -627,47 +393,12 @@
     DEFINE_AST_VISITOR_MEMBERS_WITHOUT_STACKOVERFLOW()
   };
 
-  Block* ParseVariableDeclarations(VariableDeclarationContext var_context,
-                                   DeclarationParsingResult* parsing_result,
-                                   ZoneList<const AstRawString*>* names,
-                                   bool* ok);
-  Statement* ParseExpressionOrLabelledStatement(
-      ZoneList<const AstRawString*>* labels,
-      AllowLabelledFunctionStatement allow_function, bool* ok);
-  IfStatement* ParseIfStatement(ZoneList<const AstRawString*>* labels,
-                                bool* ok);
-  Statement* ParseContinueStatement(bool* ok);
-  Statement* ParseBreakStatement(ZoneList<const AstRawString*>* labels,
-                                 bool* ok);
-  Statement* ParseReturnStatement(bool* ok);
-  Statement* ParseWithStatement(ZoneList<const AstRawString*>* labels,
-                                bool* ok);
-  CaseClause* ParseCaseClause(bool* default_seen_ptr, bool* ok);
-  Statement* ParseSwitchStatement(ZoneList<const AstRawString*>* labels,
-                                  bool* ok);
-  DoWhileStatement* ParseDoWhileStatement(ZoneList<const AstRawString*>* labels,
-                                          bool* ok);
-  WhileStatement* ParseWhileStatement(ZoneList<const AstRawString*>* labels,
-                                      bool* ok);
-  Statement* ParseForStatement(ZoneList<const AstRawString*>* labels, bool* ok);
-  Statement* ParseThrowStatement(bool* ok);
-  Expression* MakeCatchContext(Handle<String> id, VariableProxy* value);
-  TryStatement* ParseTryStatement(bool* ok);
-  DebuggerStatement* ParseDebuggerStatement(bool* ok);
-  // Parse a SubStatement in strict mode, or with an extra block scope in
-  // sloppy mode to handle
-  // ES#sec-functiondeclarations-in-ifstatement-statement-clauses
-  // The legacy parameter indicates whether function declarations are
-  // banned by the ES2015 specification in this location, and they are being
-  // permitted here to match previous V8 behavior.
-  Statement* ParseScopedStatement(ZoneList<const AstRawString*>* labels,
-                                  bool legacy, bool* ok);
-
   // !%_IsJSReceiver(result = iterator.next()) &&
   //     %ThrowIteratorResultNotAnObject(result)
   Expression* BuildIteratorNextResult(Expression* iterator, Variable* result,
                                       int pos);
 
+  Expression* GetIterator(Expression* iterable, int pos);
 
   // Initialize the components of a for-in / for-of statement.
   Statement* InitializeForEachStatement(ForEachStatement* stmt,
@@ -677,18 +408,17 @@
                                       Expression* iterable, Statement* body,
                                       bool finalize,
                                       int next_result_pos = kNoSourcePosition);
+  Block* RewriteForVarInLegacy(const ForInfo& for_info);
+  void DesugarBindingInForEachStatement(ForInfo* for_info, Block** body_block,
+                                        Expression** each_variable, bool* ok);
+  Block* CreateForEachStatementTDZ(Block* init_block, const ForInfo& for_info,
+                                   bool* ok);
+
   Statement* DesugarLexicalBindingsInForStatement(
-      Scope* inner_scope, VariableMode mode,
-      ZoneList<const AstRawString*>* names, ForStatement* loop, Statement* init,
-      Expression* cond, Statement* next, Statement* body, bool* ok);
+      ForStatement* loop, Statement* init, Expression* cond, Statement* next,
+      Statement* body, Scope* inner_scope, const ForInfo& for_info, bool* ok);
 
-  void DesugarAsyncFunctionBody(const AstRawString* function_name, Scope* scope,
-                                ZoneList<Statement*>* body,
-                                Type::ExpressionClassifier* classifier,
-                                FunctionKind kind, FunctionBodyType type,
-                                bool accept_IN, int pos, bool* ok);
-
-  void RewriteDoExpression(Expression* expr, bool* ok);
+  Expression* RewriteDoExpression(Block* body, int pos, bool* ok);
 
   FunctionLiteral* ParseFunctionLiteral(
       const AstRawString* name, Scanner::Location function_name_location,
@@ -696,14 +426,10 @@
       int function_token_position, FunctionLiteral::FunctionType type,
       LanguageMode language_mode, bool* ok);
 
-  Expression* ParseClassLiteral(ExpressionClassifier* classifier,
-                                const AstRawString* name,
-                                Scanner::Location class_name_location,
-                                bool name_is_strict_reserved, int pos,
-                                bool* ok);
-
-  // Magical syntax support.
-  Expression* ParseV8Intrinsic(bool* ok);
+  Expression* InstallHomeObject(Expression* function_literal,
+                                Expression* home_object);
+  FunctionLiteral* SynthesizeClassFieldInitializer(int count);
+  FunctionLiteral* InsertClassFieldInitializer(FunctionLiteral* constructor);
 
   // Get odd-ball literals.
   Literal* GetLiteralUndefined(int position);
@@ -724,14 +450,11 @@
   void InsertShadowingVarBindingInitializers(Block* block);
 
   // Implement sloppy block-scoped functions, ES2015 Annex B 3.3
-  void InsertSloppyBlockFunctionVarBindings(DeclarationScope* scope,
-                                            Scope* complex_params_scope,
-                                            bool* ok);
+  void InsertSloppyBlockFunctionVarBindings(DeclarationScope* scope);
 
-  static InitializationFlag DefaultInitializationFlag(VariableMode mode);
   VariableProxy* NewUnresolved(const AstRawString* name, int begin_pos,
                                int end_pos = kNoSourcePosition,
-                               Variable::Kind kind = Variable::NORMAL);
+                               VariableKind kind = NORMAL_VARIABLE);
   VariableProxy* NewUnresolved(const AstRawString* name);
   Variable* Declare(Declaration* declaration,
                     DeclarationDescriptor::Kind declaration_kind,
@@ -750,25 +473,24 @@
 
   // Factory methods.
   FunctionLiteral* DefaultConstructor(const AstRawString* name, bool call_super,
-                                      int pos, int end_pos,
-                                      LanguageMode language_mode);
+                                      bool requires_class_field_init, int pos,
+                                      int end_pos, LanguageMode language_mode);
 
   // Skip over a lazy function, either using cached data if we have it, or
   // by parsing the function with PreParser. Consumes the ending }.
-  //
-  // If bookmark is set, the (pre-)parser may decide to abort skipping
+  // If may_abort == true, the (pre-)parser may decide to abort skipping
   // in order to force the function to be eagerly parsed, after all.
-  // In this case, it'll reset the scanner using the bookmark.
-  void SkipLazyFunctionBody(int* materialized_literal_count,
-                            int* expected_property_count, bool* ok,
-                            Scanner::BookmarkScope* bookmark = nullptr);
+  LazyParsingResult SkipLazyFunctionBody(int* materialized_literal_count,
+                                         int* expected_property_count,
+                                         bool is_inner_function, bool may_abort,
+                                         bool* ok);
 
   PreParser::PreParseResult ParseLazyFunctionBodyWithPreParser(
-      SingletonLogger* logger, Scanner::BookmarkScope* bookmark = nullptr);
+      SingletonLogger* logger, bool is_inner_function, bool may_abort);
 
   Block* BuildParameterInitializationBlock(
       const ParserFormalParameters& parameters, bool* ok);
-  Block* BuildRejectPromiseOnException(Block* block);
+  Block* BuildRejectPromiseOnException(Block* block, bool* ok);
 
   // Consumes the ending }.
   ZoneList<Statement*>* ParseEagerFunctionBody(
@@ -817,25 +539,16 @@
                                    Expression* tag);
   uint32_t ComputeTemplateLiteralHash(const TemplateLiteral* lit);
 
-  void ParseAsyncArrowSingleExpressionBody(ZoneList<Statement*>* body,
-                                           bool accept_IN,
-                                           ExpressionClassifier* classifier,
-                                           int pos, bool* ok) {
-    DesugarAsyncFunctionBody(ast_value_factory()->empty_string(), scope(), body,
-                             classifier, kAsyncArrowFunction,
-                             FunctionBodyType::kSingleExpression, accept_IN,
-                             pos, ok);
-  }
-
-  ZoneList<v8::internal::Expression*>* PrepareSpreadArguments(
-      ZoneList<v8::internal::Expression*>* list);
-  Expression* SpreadCall(Expression* function,
-                         ZoneList<v8::internal::Expression*>* args, int pos);
-  Expression* SpreadCallNew(Expression* function,
-                            ZoneList<v8::internal::Expression*>* args, int pos);
+  ZoneList<Expression*>* PrepareSpreadArguments(ZoneList<Expression*>* list);
+  Expression* SpreadCall(Expression* function, ZoneList<Expression*>* args,
+                         int pos);
+  Expression* SpreadCallNew(Expression* function, ZoneList<Expression*>* args,
+                            int pos);
+  Expression* CallClassFieldInitializer(Scope* scope, Expression* this_expr);
+  Expression* RewriteSuperCall(Expression* call_expression);
 
   void SetLanguageMode(Scope* scope, LanguageMode mode);
-  void RaiseLanguageMode(LanguageMode mode);
+  void SetAsmModule();
 
   V8_INLINE void MarkCollectedTailCallExpressions();
   V8_INLINE void MarkTailPosition(Expression* expression);
@@ -852,7 +565,7 @@
   V8_INLINE Expression* RewriteSpreads(ArrayLiteral* lit);
 
   // Rewrite expressions that are not used as patterns
-  V8_INLINE void RewriteNonPattern(ExpressionClassifier* classifier, bool* ok);
+  V8_INLINE void RewriteNonPattern(bool* ok);
 
   V8_INLINE void QueueDestructuringAssignmentForRewriting(
       Expression* assignment);
@@ -861,41 +574,513 @@
   friend class InitializerRewriter;
   void RewriteParameterInitializer(Expression* expr, Scope* scope);
 
+  Expression* BuildInitialYield(int pos, FunctionKind kind);
   Expression* BuildCreateJSGeneratorObject(int pos, FunctionKind kind);
-  Expression* BuildPromiseResolve(Expression* value, int pos);
-  Expression* BuildPromiseReject(Expression* value, int pos);
+  Expression* BuildResolvePromise(Expression* value, int pos);
+  Expression* BuildRejectPromise(Expression* value, int pos);
+  Variable* PromiseVariable();
 
   // Generic AST generator for throwing errors from compiled code.
   Expression* NewThrowError(Runtime::FunctionId function_id,
                             MessageTemplate::Template message,
                             const AstRawString* arg, int pos);
 
-  void FinalizeIteratorUse(Variable* completion, Expression* condition,
-                           Variable* iter, Block* iterator_use, Block* result);
+  void FinalizeIteratorUse(Scope* use_scope, Variable* completion,
+                           Expression* condition, Variable* iter,
+                           Block* iterator_use, Block* result);
 
   Statement* FinalizeForOfStatement(ForOfStatement* loop, Variable* completion,
                                     int pos);
   void BuildIteratorClose(ZoneList<Statement*>* statements, Variable* iterator,
                           Variable* input, Variable* output);
-  void BuildIteratorCloseForCompletion(ZoneList<Statement*>* statements,
+  void BuildIteratorCloseForCompletion(Scope* scope,
+                                       ZoneList<Statement*>* statements,
                                        Variable* iterator,
                                        Expression* completion);
   Statement* CheckCallable(Variable* var, Expression* error, int pos);
 
   V8_INLINE Expression* RewriteAwaitExpression(Expression* value, int pos);
+  V8_INLINE void PrepareAsyncFunctionBody(ZoneList<Statement*>* body,
+                                          FunctionKind kind, int pos);
+  V8_INLINE void RewriteAsyncFunctionBody(ZoneList<Statement*>* body,
+                                          Block* block,
+                                          Expression* return_value, bool* ok);
 
   Expression* RewriteYieldStar(Expression* generator, Expression* expression,
                                int pos);
 
-  void ParseArrowFunctionFormalParameters(ParserFormalParameters* parameters,
-                                          Expression* params, int end_pos,
-                                          bool* ok);
+  void AddArrowFunctionFormalParameters(ParserFormalParameters* parameters,
+                                        Expression* params, int end_pos,
+                                        bool* ok);
   void SetFunctionName(Expression* value, const AstRawString* name);
 
+  // Helper functions for recursive descent.
+  V8_INLINE bool IsEval(const AstRawString* identifier) const {
+    return identifier == ast_value_factory()->eval_string();
+  }
+
+  V8_INLINE bool IsArguments(const AstRawString* identifier) const {
+    return identifier == ast_value_factory()->arguments_string();
+  }
+
+  V8_INLINE bool IsEvalOrArguments(const AstRawString* identifier) const {
+    return IsEval(identifier) || IsArguments(identifier);
+  }
+
+  V8_INLINE bool IsUndefined(const AstRawString* identifier) const {
+    return identifier == ast_value_factory()->undefined_string();
+  }
+
+  V8_INLINE bool IsFutureStrictReserved(const AstRawString* identifier) const {
+    return scanner()->IdentifierIsFutureStrictReserved(identifier);
+  }
+
+  // Returns true if the expression is of type "this.foo".
+  V8_INLINE static bool IsThisProperty(Expression* expression) {
+    DCHECK(expression != NULL);
+    Property* property = expression->AsProperty();
+    return property != NULL && property->obj()->IsVariableProxy() &&
+           property->obj()->AsVariableProxy()->is_this();
+  }
+
+  // This returns true if the expression is an indentifier (wrapped
+  // inside a variable proxy).  We exclude the case of 'this', which
+  // has been converted to a variable proxy.
+  V8_INLINE static bool IsIdentifier(Expression* expression) {
+    DCHECK_NOT_NULL(expression);
+    VariableProxy* operand = expression->AsVariableProxy();
+    return operand != nullptr && !operand->is_this();
+  }
+
+  V8_INLINE static const AstRawString* AsIdentifier(Expression* expression) {
+    DCHECK(IsIdentifier(expression));
+    return expression->AsVariableProxy()->raw_name();
+  }
+
+  V8_INLINE VariableProxy* AsIdentifierExpression(Expression* expression) {
+    return expression->AsVariableProxy();
+  }
+
+  V8_INLINE bool IsPrototype(const AstRawString* identifier) const {
+    return identifier == ast_value_factory()->prototype_string();
+  }
+
+  V8_INLINE bool IsConstructor(const AstRawString* identifier) const {
+    return identifier == ast_value_factory()->constructor_string();
+  }
+
+  V8_INLINE bool IsDirectEvalCall(Expression* expression) const {
+    if (!expression->IsCall()) return false;
+    expression = expression->AsCall()->expression();
+    return IsIdentifier(expression) && IsEval(AsIdentifier(expression));
+  }
+
+  V8_INLINE static bool IsBoilerplateProperty(
+      ObjectLiteral::Property* property) {
+    return ObjectLiteral::IsBoilerplateProperty(property);
+  }
+
+  V8_INLINE bool IsNative(Expression* expr) const {
+    DCHECK_NOT_NULL(expr);
+    return expr->IsVariableProxy() &&
+           expr->AsVariableProxy()->raw_name() ==
+               ast_value_factory()->native_string();
+  }
+
+  V8_INLINE static bool IsArrayIndex(const AstRawString* string,
+                                     uint32_t* index) {
+    return string->AsArrayIndex(index);
+  }
+
+  V8_INLINE bool IsUseStrictDirective(Statement* statement) const {
+    return IsStringLiteral(statement, ast_value_factory()->use_strict_string());
+  }
+
+  V8_INLINE bool IsUseAsmDirective(Statement* statement) const {
+    return IsStringLiteral(statement, ast_value_factory()->use_asm_string());
+  }
+
+  // Returns true if the statement is an expression statement containing
+  // a single string literal.  If a second argument is given, the literal
+  // is also compared with it and the result is true only if they are equal.
+  V8_INLINE bool IsStringLiteral(Statement* statement,
+                                 const AstRawString* arg = nullptr) const {
+    ExpressionStatement* e_stat = statement->AsExpressionStatement();
+    if (e_stat == nullptr) return false;
+    Literal* literal = e_stat->expression()->AsLiteral();
+    if (literal == nullptr || !literal->raw_value()->IsString()) return false;
+    return arg == nullptr || literal->raw_value()->AsString() == arg;
+  }
+
+  V8_INLINE static Expression* GetPropertyValue(LiteralProperty* property) {
+    return property->value();
+  }
+
+  V8_INLINE void GetDefaultStrings(
+      const AstRawString** default_string,
+      const AstRawString** star_default_star_string) {
+    *default_string = ast_value_factory()->default_string();
+    *star_default_star_string = ast_value_factory()->star_default_star_string();
+  }
+
+  // Functions for encapsulating the differences between parsing and preparsing;
+  // operations interleaved with the recursive descent.
+  V8_INLINE void PushLiteralName(const AstRawString* id) {
+    DCHECK_NOT_NULL(fni_);
+    fni_->PushLiteralName(id);
+  }
+
+  V8_INLINE void PushVariableName(const AstRawString* id) {
+    DCHECK_NOT_NULL(fni_);
+    fni_->PushVariableName(id);
+  }
+
+  V8_INLINE void PushPropertyName(Expression* expression) {
+    DCHECK_NOT_NULL(fni_);
+    if (expression->IsPropertyName()) {
+      fni_->PushLiteralName(expression->AsLiteral()->AsRawPropertyName());
+    } else {
+      fni_->PushLiteralName(ast_value_factory()->anonymous_function_string());
+    }
+  }
+
+  V8_INLINE void PushEnclosingName(const AstRawString* name) {
+    DCHECK_NOT_NULL(fni_);
+    fni_->PushEnclosingName(name);
+  }
+
+  V8_INLINE void AddFunctionForNameInference(FunctionLiteral* func_to_infer) {
+    DCHECK_NOT_NULL(fni_);
+    fni_->AddFunction(func_to_infer);
+  }
+
+  V8_INLINE void InferFunctionName() {
+    DCHECK_NOT_NULL(fni_);
+    fni_->Infer();
+  }
+
+  // If we assign a function literal to a property we pretenure the
+  // literal so it can be added as a constant function property.
+  V8_INLINE static void CheckAssigningFunctionLiteralToProperty(
+      Expression* left, Expression* right) {
+    DCHECK(left != NULL);
+    if (left->IsProperty() && right->IsFunctionLiteral()) {
+      right->AsFunctionLiteral()->set_pretenure();
+    }
+  }
+
+  // Determine if the expression is a variable proxy and mark it as being used
+  // in an assignment or with a increment/decrement operator.
+  V8_INLINE static Expression* MarkExpressionAsAssigned(
+      Expression* expression) {
+    VariableProxy* proxy =
+        expression != NULL ? expression->AsVariableProxy() : NULL;
+    if (proxy != NULL) proxy->set_is_assigned();
+    return expression;
+  }
+
+  // Returns true if we have a binary expression between two numeric
+  // literals. In that case, *x will be changed to an expression which is the
+  // computed value.
+  bool ShortcutNumericLiteralBinaryExpression(Expression** x, Expression* y,
+                                              Token::Value op, int pos);
+
+  // Rewrites the following types of unary expressions:
+  // not <literal> -> true / false
+  // + <numeric literal> -> <numeric literal>
+  // - <numeric literal> -> <numeric literal with value negated>
+  // ! <literal> -> true / false
+  // The following rewriting rules enable the collection of type feedback
+  // without any special stub and the multiplication is removed later in
+  // Crankshaft's canonicalization pass.
+  // + foo -> foo * 1
+  // - foo -> foo * (-1)
+  // ~ foo -> foo ^(~0)
+  Expression* BuildUnaryExpression(Expression* expression, Token::Value op,
+                                   int pos);
+
+  Expression* BuildIteratorResult(Expression* value, bool done);
+
+  // Generate AST node that throws a ReferenceError with the given type.
+  V8_INLINE Expression* NewThrowReferenceError(
+      MessageTemplate::Template message, int pos) {
+    return NewThrowError(Runtime::kNewReferenceError, message,
+                         ast_value_factory()->empty_string(), pos);
+  }
+
+  // Generate AST node that throws a SyntaxError with the given
+  // type. The first argument may be null (in the handle sense) in
+  // which case no arguments are passed to the constructor.
+  V8_INLINE Expression* NewThrowSyntaxError(MessageTemplate::Template message,
+                                            const AstRawString* arg, int pos) {
+    return NewThrowError(Runtime::kNewSyntaxError, message, arg, pos);
+  }
+
+  // Generate AST node that throws a TypeError with the given
+  // type. Both arguments must be non-null (in the handle sense).
+  V8_INLINE Expression* NewThrowTypeError(MessageTemplate::Template message,
+                                          const AstRawString* arg, int pos) {
+    return NewThrowError(Runtime::kNewTypeError, message, arg, pos);
+  }
+
+  // Reporting errors.
+  V8_INLINE void ReportMessageAt(Scanner::Location source_location,
+                                 MessageTemplate::Template message,
+                                 const char* arg = NULL,
+                                 ParseErrorType error_type = kSyntaxError) {
+    if (stack_overflow()) {
+      // Suppress the error message (syntax error or such) in the presence of a
+      // stack overflow. The isolate allows only one pending exception at at
+      // time
+      // and we want to report the stack overflow later.
+      return;
+    }
+    pending_error_handler_.ReportMessageAt(source_location.beg_pos,
+                                           source_location.end_pos, message,
+                                           arg, error_type);
+  }
+
+  V8_INLINE void ReportMessageAt(Scanner::Location source_location,
+                                 MessageTemplate::Template message,
+                                 const AstRawString* arg,
+                                 ParseErrorType error_type = kSyntaxError) {
+    if (stack_overflow()) {
+      // Suppress the error message (syntax error or such) in the presence of a
+      // stack overflow. The isolate allows only one pending exception at at
+      // time
+      // and we want to report the stack overflow later.
+      return;
+    }
+    pending_error_handler_.ReportMessageAt(source_location.beg_pos,
+                                           source_location.end_pos, message,
+                                           arg, error_type);
+  }
+
+  // "null" return type creators.
+  V8_INLINE static const AstRawString* EmptyIdentifier() { return nullptr; }
+  V8_INLINE static bool IsEmptyIdentifier(const AstRawString* name) {
+    return name == nullptr;
+  }
+  V8_INLINE static Expression* EmptyExpression() { return nullptr; }
+  V8_INLINE static Literal* EmptyLiteral() { return nullptr; }
+  V8_INLINE static ObjectLiteralProperty* EmptyObjectLiteralProperty() {
+    return nullptr;
+  }
+  V8_INLINE static ClassLiteralProperty* EmptyClassLiteralProperty() {
+    return nullptr;
+  }
+  V8_INLINE static FunctionLiteral* EmptyFunctionLiteral() { return nullptr; }
+  V8_INLINE static Block* NullBlock() { return nullptr; }
+
+  V8_INLINE static bool IsEmptyExpression(Expression* expr) {
+    return expr == nullptr;
+  }
+
+  // Used in error return values.
+  V8_INLINE static ZoneList<Expression*>* NullExpressionList() {
+    return nullptr;
+  }
+  V8_INLINE static bool IsNullExpressionList(ZoneList<Expression*>* exprs) {
+    return exprs == nullptr;
+  }
+  V8_INLINE static ZoneList<Statement*>* NullStatementList() { return nullptr; }
+  V8_INLINE static bool IsNullStatementList(ZoneList<Statement*>* stmts) {
+    return stmts == nullptr;
+  }
+  V8_INLINE static Statement* NullStatement() { return nullptr; }
+  V8_INLINE bool IsNullStatement(Statement* stmt) { return stmt == nullptr; }
+  V8_INLINE bool IsEmptyStatement(Statement* stmt) {
+    DCHECK_NOT_NULL(stmt);
+    return stmt->IsEmpty();
+  }
+
+  // Non-NULL empty string.
+  V8_INLINE const AstRawString* EmptyIdentifierString() const {
+    return ast_value_factory()->empty_string();
+  }
+
+  // Odd-ball literal creators.
+  V8_INLINE Literal* GetLiteralTheHole(int position) {
+    return factory()->NewTheHoleLiteral(kNoSourcePosition);
+  }
+
+  // Producing data during the recursive descent.
+  V8_INLINE const AstRawString* GetSymbol() const {
+    const AstRawString* result = scanner()->CurrentSymbol(ast_value_factory());
+    DCHECK(result != NULL);
+    return result;
+  }
+
+  V8_INLINE const AstRawString* GetNextSymbol() const {
+    return scanner()->NextSymbol(ast_value_factory());
+  }
+
+  V8_INLINE const AstRawString* GetNumberAsSymbol() const {
+    double double_value = scanner()->DoubleValue();
+    char array[100];
+    const char* string = DoubleToCString(double_value, ArrayVector(array));
+    return ast_value_factory()->GetOneByteString(string);
+  }
+
+  V8_INLINE Expression* ThisExpression(int pos = kNoSourcePosition) {
+    return NewUnresolved(ast_value_factory()->this_string(), pos, pos + 4,
+                         THIS_VARIABLE);
+  }
+
+  Expression* NewSuperPropertyReference(int pos);
+  Expression* NewSuperCallReference(int pos);
+  Expression* NewTargetExpression(int pos);
+  Expression* FunctionSentExpression(int pos);
+
+  Literal* ExpressionFromLiteral(Token::Value token, int pos);
+
+  V8_INLINE Expression* ExpressionFromIdentifier(
+      const AstRawString* name, int start_position, int end_position,
+      InferName infer = InferName::kYes) {
+    if (infer == InferName::kYes) {
+      fni_->PushVariableName(name);
+    }
+    return NewUnresolved(name, start_position, end_position);
+  }
+
+  V8_INLINE Expression* ExpressionFromString(int pos) {
+    const AstRawString* symbol = GetSymbol();
+    fni_->PushLiteralName(symbol);
+    return factory()->NewStringLiteral(symbol, pos);
+  }
+
+  V8_INLINE ZoneList<Expression*>* NewExpressionList(int size) const {
+    return new (zone()) ZoneList<Expression*>(size, zone());
+  }
+  V8_INLINE ZoneList<ObjectLiteral::Property*>* NewObjectPropertyList(
+      int size) const {
+    return new (zone()) ZoneList<ObjectLiteral::Property*>(size, zone());
+  }
+  V8_INLINE ZoneList<ClassLiteral::Property*>* NewClassPropertyList(
+      int size) const {
+    return new (zone()) ZoneList<ClassLiteral::Property*>(size, zone());
+  }
+  V8_INLINE ZoneList<Statement*>* NewStatementList(int size) const {
+    return new (zone()) ZoneList<Statement*>(size, zone());
+  }
+  V8_INLINE ZoneList<CaseClause*>* NewCaseClauseList(int size) const {
+    return new (zone()) ZoneList<CaseClause*>(size, zone());
+  }
+
+  V8_INLINE Expression* NewV8Intrinsic(const AstRawString* name,
+                                       ZoneList<Expression*>* args, int pos,
+                                       bool* ok);
+
+  V8_INLINE Statement* NewThrowStatement(Expression* exception, int pos) {
+    return factory()->NewExpressionStatement(
+        factory()->NewThrow(exception, pos), pos);
+  }
+
+  V8_INLINE void AddParameterInitializationBlock(
+      const ParserFormalParameters& parameters, ZoneList<Statement*>* body,
+      bool is_async, bool* ok) {
+    if (parameters.is_simple) return;
+    auto* init_block = BuildParameterInitializationBlock(parameters, ok);
+    if (!*ok) return;
+    if (is_async) {
+      init_block = BuildRejectPromiseOnException(init_block, ok);
+      if (!*ok) return;
+    }
+    if (init_block != nullptr) body->Add(init_block, zone());
+  }
+
+  V8_INLINE void AddFormalParameter(ParserFormalParameters* parameters,
+                                    Expression* pattern,
+                                    Expression* initializer,
+                                    int initializer_end_position,
+                                    bool is_rest) {
+    bool is_simple = pattern->IsVariableProxy() && initializer == nullptr;
+    const AstRawString* name = is_simple
+                                   ? pattern->AsVariableProxy()->raw_name()
+                                   : ast_value_factory()->empty_string();
+    parameters->params.Add(
+        ParserFormalParameters::Parameter(name, pattern, initializer,
+                                          initializer_end_position, is_rest),
+        parameters->scope->zone());
+  }
+
+  V8_INLINE void DeclareFormalParameter(
+      DeclarationScope* scope,
+      const ParserFormalParameters::Parameter& parameter) {
+    bool is_duplicate = false;
+    bool is_simple = classifier()->is_simple_parameter_list();
+    auto name = is_simple || parameter.is_rest
+                    ? parameter.name
+                    : ast_value_factory()->empty_string();
+    auto mode = is_simple || parameter.is_rest ? VAR : TEMPORARY;
+    if (!is_simple) scope->SetHasNonSimpleParameters();
+    bool is_optional = parameter.initializer != nullptr;
+    Variable* var =
+        scope->DeclareParameter(name, mode, is_optional, parameter.is_rest,
+                                &is_duplicate, ast_value_factory());
+    if (is_duplicate) {
+      classifier()->RecordDuplicateFormalParameterError(scanner()->location());
+    }
+    if (is_sloppy(scope->language_mode())) {
+      // TODO(sigurds) Mark every parameter as maybe assigned. This is a
+      // conservative approximation necessary to account for parameters
+      // that are assigned via the arguments array.
+      var->set_maybe_assigned();
+    }
+  }
+
+  void DeclareArrowFunctionFormalParameters(ParserFormalParameters* parameters,
+                                            Expression* params,
+                                            const Scanner::Location& params_loc,
+                                            Scanner::Location* duplicate_loc,
+                                            bool* ok);
+
+  void ReindexLiterals(const ParserFormalParameters& parameters);
+
+  V8_INLINE Expression* NoTemplateTag() { return NULL; }
+  V8_INLINE static bool IsTaggedTemplate(const Expression* tag) {
+    return tag != NULL;
+  }
+
+  V8_INLINE void MaterializeUnspreadArgumentsLiterals(int count) {}
+
+  Expression* ExpressionListToExpression(ZoneList<Expression*>* args);
+
+  void AddAccessorPrefixToFunctionName(bool is_get, FunctionLiteral* function,
+                                       const AstRawString* name);
+
+  void SetFunctionNameFromPropertyName(ObjectLiteralProperty* property,
+                                       const AstRawString* name);
+
+  void SetFunctionNameFromIdentifierRef(Expression* value,
+                                        Expression* identifier);
+
+  V8_INLINE ZoneList<typename ExpressionClassifier::Error>*
+  GetReportedErrorList() const {
+    return function_state_->GetReportedErrorList();
+  }
+
+  V8_INLINE ZoneList<Expression*>* GetNonPatternList() const {
+    return function_state_->non_patterns_to_rewrite();
+  }
+
+  V8_INLINE void CountUsage(v8::Isolate::UseCounterFeature feature) {
+    ++use_counts_[feature];
+  }
+
+  // Parser's private field members.
+  friend class DiscardableZoneScope;  // Uses reusable_preparser_.
+  // FIXME(marja): Make reusable_preparser_ always use its own temp Zone (call
+  // DeleteAll after each function), so this won't be needed.
+
   Scanner scanner_;
   PreParser* reusable_preparser_;
   Scope* original_scope_;  // for ES5 function declarations in sloppy eval
-  Target* target_stack_;  // for break, continue statements
+
+  friend class ParserTarget;
+  friend class ParserTargetScope;
+  ParserTarget* target_stack_;  // for break, continue statements
+
   ScriptCompiler::CompileOptions compile_options_;
   ParseData* cached_parse_data_;
 
@@ -908,107 +1093,8 @@
   HistogramTimer* pre_parse_timer_;
 
   bool parsing_on_main_thread_;
-
-#ifdef DEBUG
-  void Print(AstNode* node);
-#endif  // DEBUG
 };
 
-bool ParserBaseTraits<Parser>::IsFutureStrictReserved(
-    const AstRawString* identifier) const {
-  return delegate()->scanner()->IdentifierIsFutureStrictReserved(identifier);
-}
-
-const AstRawString* ParserBaseTraits<Parser>::EmptyIdentifierString() const {
-  return delegate()->ast_value_factory()->empty_string();
-}
-
-
-// Support for handling complex values (array and object literals) that
-// can be fully handled at compile time.
-class CompileTimeValue: public AllStatic {
- public:
-  enum LiteralType {
-    OBJECT_LITERAL_FAST_ELEMENTS,
-    OBJECT_LITERAL_SLOW_ELEMENTS,
-    ARRAY_LITERAL
-  };
-
-  static bool IsCompileTimeValue(Expression* expression);
-
-  // Get the value as a compile time value.
-  static Handle<FixedArray> GetValue(Isolate* isolate, Expression* expression);
-
-  // Get the type of a compile time value returned by GetValue().
-  static LiteralType GetLiteralType(Handle<FixedArray> value);
-
-  // Get the elements array of a compile time value returned by GetValue().
-  static Handle<FixedArray> GetElements(Handle<FixedArray> value);
-
- private:
-  static const int kLiteralTypeSlot = 0;
-  static const int kElementsSlot = 1;
-
-  DISALLOW_IMPLICIT_CONSTRUCTORS(CompileTimeValue);
-};
-
-void ParserBaseTraits<Parser>::AddFormalParameter(
-    ParserFormalParameters* parameters, Expression* pattern,
-    Expression* initializer, int initializer_end_position, bool is_rest) {
-  bool is_simple = pattern->IsVariableProxy() && initializer == nullptr;
-  const AstRawString* name =
-      is_simple ? pattern->AsVariableProxy()->raw_name()
-                : delegate()->ast_value_factory()->empty_string();
-  parameters->params.Add(
-      ParserFormalParameters::Parameter(name, pattern, initializer,
-                                        initializer_end_position, is_rest),
-      parameters->scope->zone());
-}
-
-void ParserBaseTraits<Parser>::DeclareFormalParameter(
-    DeclarationScope* scope, const ParserFormalParameters::Parameter& parameter,
-    Type::ExpressionClassifier* classifier) {
-  bool is_duplicate = false;
-  bool is_simple = classifier->is_simple_parameter_list();
-  auto name = is_simple || parameter.is_rest
-                  ? parameter.name
-                  : delegate()->ast_value_factory()->empty_string();
-  auto mode = is_simple || parameter.is_rest ? VAR : TEMPORARY;
-  if (!is_simple) scope->SetHasNonSimpleParameters();
-  bool is_optional = parameter.initializer != nullptr;
-  Variable* var =
-      scope->DeclareParameter(name, mode, is_optional, parameter.is_rest,
-                              &is_duplicate, delegate()->ast_value_factory());
-  if (is_duplicate) {
-    classifier->RecordDuplicateFormalParameterError(
-        delegate()->scanner()->location());
-  }
-  if (is_sloppy(scope->language_mode())) {
-    // TODO(sigurds) Mark every parameter as maybe assigned. This is a
-    // conservative approximation necessary to account for parameters
-    // that are assigned via the arguments array.
-    var->set_maybe_assigned();
-  }
-}
-
-void ParserBaseTraits<Parser>::AddParameterInitializationBlock(
-    const ParserFormalParameters& parameters,
-    ZoneList<v8::internal::Statement*>* body, bool is_async, bool* ok) {
-  if (!parameters.is_simple) {
-    auto* init_block =
-        delegate()->BuildParameterInitializationBlock(parameters, ok);
-    if (!*ok) return;
-
-    if (is_async) {
-      init_block = delegate()->BuildRejectPromiseOnException(init_block);
-    }
-
-    if (init_block != nullptr) {
-      body->Add(init_block, delegate()->zone());
-    }
-  }
-}
-
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/parsing/pattern-rewriter.cc b/src/parsing/pattern-rewriter.cc
index 1831a29..7898f87 100644
--- a/src/parsing/pattern-rewriter.cc
+++ b/src/parsing/pattern-rewriter.cc
@@ -12,7 +12,8 @@
 namespace internal {
 
 void Parser::PatternRewriter::DeclareAndInitializeVariables(
-    Block* block, const DeclarationDescriptor* declaration_descriptor,
+    Parser* parser, Block* block,
+    const DeclarationDescriptor* declaration_descriptor,
     const DeclarationParsingResult::Declaration* declaration,
     ZoneList<const AstRawString*>* names, bool* ok) {
   PatternRewriter rewriter;
@@ -20,7 +21,7 @@
   DCHECK(block->ignore_completion_value());
 
   rewriter.scope_ = declaration_descriptor->scope;
-  rewriter.parser_ = declaration_descriptor->parser;
+  rewriter.parser_ = parser;
   rewriter.context_ = BINDING;
   rewriter.pattern_ = declaration->pattern;
   rewriter.initializer_position_ = declaration->initializer_position;
@@ -36,11 +37,12 @@
 
 void Parser::PatternRewriter::RewriteDestructuringAssignment(
     Parser* parser, RewritableExpression* to_rewrite, Scope* scope) {
-  PatternRewriter rewriter;
-
+  DCHECK(!scope->HasBeenRemoved());
   DCHECK(!to_rewrite->is_rewritten());
 
   bool ok = true;
+
+  PatternRewriter rewriter;
   rewriter.scope_ = scope;
   rewriter.parser_ = parser;
   rewriter.context_ = ASSIGNMENT;
@@ -139,23 +141,16 @@
   // which the variable or constant is declared. Only function variables have
   // an initial value in the declaration (because they are initialized upon
   // entering the function).
-  //
-  // If we have a legacy const declaration, in an inner scope, the proxy
-  // is always bound to the declared variable (independent of possibly
-  // surrounding 'with' statements).
-  // For let/const declarations in harmony mode, we can also immediately
-  // pre-resolve the proxy because it resides in the same scope as the
-  // declaration.
   const AstRawString* name = pattern->raw_name();
-  VariableProxy* proxy = descriptor_->scope->NewUnresolved(
-      factory(), name, parser_->scanner()->location().beg_pos,
+  VariableProxy* proxy = factory()->NewVariableProxy(
+      name, NORMAL_VARIABLE, parser_->scanner()->location().beg_pos,
       parser_->scanner()->location().end_pos);
   Declaration* declaration = factory()->NewVariableDeclaration(
       proxy, descriptor_->scope, descriptor_->declaration_pos);
-  Variable* var = parser_->Declare(declaration, descriptor_->declaration_kind,
-                                   descriptor_->mode,
-                                   DefaultInitializationFlag(descriptor_->mode),
-                                   ok_, descriptor_->hoist_scope);
+  Variable* var = parser_->Declare(
+      declaration, descriptor_->declaration_kind, descriptor_->mode,
+      Variable::DefaultInitializationFlag(descriptor_->mode), ok_,
+      descriptor_->hoist_scope);
   if (!*ok_) return;
   DCHECK_NOT_NULL(var);
   DCHECK(proxy->is_resolved());
@@ -267,12 +262,14 @@
 void Parser::PatternRewriter::VisitRewritableExpression(
     RewritableExpression* node) {
   // If this is not a destructuring assignment...
-  if (!IsAssignmentContext() || !node->expression()->IsAssignment()) {
+  if (!IsAssignmentContext()) {
     // Mark the node as rewritten to prevent redundant rewriting, and
     // perform BindingPattern rewriting
     DCHECK(!node->is_rewritten());
     node->Rewrite(node->expression());
     return Visit(node->expression());
+  } else if (!node->expression()->IsAssignment()) {
+    return Visit(node->expression());
   }
 
   if (node->is_rewritten()) return;
@@ -374,7 +371,7 @@
 
   auto temp = *temp_var = CreateTempVar(current_value_);
   auto iterator = CreateTempVar(parser_->GetIterator(
-      factory()->NewVariableProxy(temp), factory(), kNoSourcePosition));
+      factory()->NewVariableProxy(temp), kNoSourcePosition));
   auto done =
       CreateTempVar(factory()->NewBooleanLiteral(false, kNoSourcePosition));
   auto result = CreateTempVar();
@@ -601,8 +598,9 @@
 
   Expression* closing_condition = factory()->NewUnaryOperation(
       Token::NOT, factory()->NewVariableProxy(done), nopos);
-  parser_->FinalizeIteratorUse(completion, closing_condition, iterator, block_,
-                               target);
+
+  parser_->FinalizeIteratorUse(scope(), completion, closing_condition, iterator,
+                               block_, target);
   block_ = target;
 }
 
diff --git a/src/parsing/preparser.cc b/src/parsing/preparser.cc
index b1bbbf6..88470f7 100644
--- a/src/parsing/preparser.cc
+++ b/src/parsing/preparser.cc
@@ -10,6 +10,7 @@
 #include "src/conversions.h"
 #include "src/globals.h"
 #include "src/list.h"
+#include "src/parsing/duplicate-finder.h"
 #include "src/parsing/parser-base.h"
 #include "src/parsing/preparse-data-format.h"
 #include "src/parsing/preparse-data.h"
@@ -28,34 +29,18 @@
 // thus it must never be used where only a single statement
 // is correct (e.g. an if statement branch w/o braces)!
 
-#define CHECK_OK  ok);                   \
-  if (!*ok) return Statement::Default(); \
+#define CHECK_OK_VALUE(x) ok); \
+  if (!*ok) return x;          \
   ((void)0
 #define DUMMY )  // to make indentation work
 #undef DUMMY
 
-// Used in functions where the return type is not ExpressionT.
-#define CHECK_OK_CUSTOM(x) ok); \
-  if (!*ok) return this->x();   \
-  ((void)0
-#define DUMMY )  // to make indentation work
-#undef DUMMY
+#define CHECK_OK CHECK_OK_VALUE(Expression::Default())
+#define CHECK_OK_VOID CHECK_OK_VALUE(this->Void())
 
-void ParserBaseTraits<PreParser>::ReportMessageAt(
-    Scanner::Location source_location, MessageTemplate::Template message,
-    const char* arg, ParseErrorType error_type) {
-  delegate()->log_->LogMessage(source_location.beg_pos, source_location.end_pos,
-                               message, arg, error_type);
-}
+namespace {
 
-void ParserBaseTraits<PreParser>::ReportMessageAt(
-    Scanner::Location source_location, MessageTemplate::Template message,
-    const AstRawString* arg, ParseErrorType error_type) {
-  UNREACHABLE();
-}
-
-PreParserIdentifier ParserBaseTraits<PreParser>::GetSymbol(
-    Scanner* scanner) const {
+PreParserIdentifier GetSymbolHelper(Scanner* scanner) {
   switch (scanner->current_token()) {
     case Token::ENUM:
       return PreParserIdentifier::Enum();
@@ -86,49 +71,51 @@
   }
 }
 
-PreParserExpression ParserBaseTraits<PreParser>::ExpressionFromString(
-    int pos, Scanner* scanner, PreParserFactory* factory) const {
-  if (scanner->UnescapedLiteralMatches("use strict", 10)) {
-    return PreParserExpression::UseStrictStringLiteral();
+}  // unnamed namespace
+
+PreParserIdentifier PreParser::GetSymbol() const {
+  PreParserIdentifier symbol = GetSymbolHelper(scanner());
+  if (track_unresolved_variables_) {
+    const AstRawString* result = scanner()->CurrentSymbol(ast_value_factory());
+    DCHECK_NOT_NULL(result);
+    symbol.string_ = result;
   }
-  return PreParserExpression::StringLiteral();
+  return symbol;
 }
 
 PreParser::PreParseResult PreParser::PreParseLazyFunction(
-    LanguageMode language_mode, FunctionKind kind, bool has_simple_parameters,
-    bool parsing_module, ParserRecorder* log, Scanner::BookmarkScope* bookmark,
-    int* use_counts) {
+    DeclarationScope* function_scope, bool parsing_module, ParserRecorder* log,
+    bool is_inner_function, bool may_abort, int* use_counts) {
+  DCHECK_EQ(FUNCTION_SCOPE, function_scope->scope_type());
   parsing_module_ = parsing_module;
   log_ = log;
   use_counts_ = use_counts;
-  // Lazy functions always have trivial outer scopes (no with/catch scopes).
+  DCHECK(!track_unresolved_variables_);
+  track_unresolved_variables_ = is_inner_function;
+
+  // The caller passes the function_scope which is not yet inserted into the
+  // scope_state_. All scopes above the function_scope are ignored by the
+  // PreParser.
   DCHECK_NULL(scope_state_);
-  DeclarationScope* top_scope = NewScriptScope();
-  FunctionState top_state(&function_state_, &scope_state_, top_scope,
-                          kNormalFunction);
-  scope()->SetLanguageMode(language_mode);
-  DeclarationScope* function_scope = NewFunctionScope(kind);
-  if (!has_simple_parameters) function_scope->SetHasNonSimpleParameters();
-  FunctionState function_state(&function_state_, &scope_state_, function_scope,
-                               kind);
+  FunctionState function_state(&function_state_, &scope_state_, function_scope);
   DCHECK_EQ(Token::LBRACE, scanner()->current_token());
   bool ok = true;
   int start_position = peek_position();
-  ParseLazyFunctionLiteralBody(&ok, bookmark);
+  LazyParsingResult result = ParseLazyFunctionLiteralBody(may_abort, &ok);
   use_counts_ = nullptr;
-  if (bookmark && bookmark->HasBeenReset()) {
-    // Do nothing, as we've just aborted scanning this function.
+  track_unresolved_variables_ = false;
+  if (result == kLazyParsingAborted) {
+    return kPreParseAbort;
   } else if (stack_overflow()) {
     return kPreParseStackOverflow;
   } else if (!ok) {
     ReportUnexpectedToken(scanner()->current_token());
   } else {
     DCHECK_EQ(Token::RBRACE, scanner()->peek());
-    if (is_strict(scope()->language_mode())) {
+    if (is_strict(function_scope->language_mode())) {
       int end_pos = scanner()->location().end_pos;
       CheckStrictOctalLiteral(start_position, end_pos, &ok);
-      CheckDecimalLiteralWithLeadingZero(use_counts, start_position, end_pos);
-      if (!ok) return kPreParseSuccess;
+      CheckDecimalLiteralWithLeadingZero(start_position, end_pos);
     }
   }
   return kPreParseSuccess;
@@ -148,908 +135,6 @@
 // That means that contextual checks (like a label being declared where
 // it is used) are generally omitted.
 
-
-PreParser::Statement PreParser::ParseStatementListItem(bool* ok) {
-  // ECMA 262 6th Edition
-  // StatementListItem[Yield, Return] :
-  //   Statement[?Yield, ?Return]
-  //   Declaration[?Yield]
-  //
-  // Declaration[Yield] :
-  //   HoistableDeclaration[?Yield]
-  //   ClassDeclaration[?Yield]
-  //   LexicalDeclaration[In, ?Yield]
-  //
-  // HoistableDeclaration[Yield, Default] :
-  //   FunctionDeclaration[?Yield, ?Default]
-  //   GeneratorDeclaration[?Yield, ?Default]
-  //
-  // LexicalDeclaration[In, Yield] :
-  //   LetOrConst BindingList[?In, ?Yield] ;
-
-  switch (peek()) {
-    case Token::FUNCTION:
-      return ParseHoistableDeclaration(ok);
-    case Token::CLASS:
-      return ParseClassDeclaration(ok);
-    case Token::CONST:
-      return ParseVariableStatement(kStatementListItem, ok);
-    case Token::LET:
-      if (IsNextLetKeyword()) {
-        return ParseVariableStatement(kStatementListItem, ok);
-      }
-      break;
-    case Token::ASYNC:
-      if (allow_harmony_async_await() && PeekAhead() == Token::FUNCTION &&
-          !scanner()->HasAnyLineTerminatorAfterNext()) {
-        Consume(Token::ASYNC);
-        return ParseAsyncFunctionDeclaration(ok);
-      }
-    /* falls through */
-    default:
-      break;
-  }
-  return ParseStatement(kAllowLabelledFunctionStatement, ok);
-}
-
-
-void PreParser::ParseStatementList(int end_token, bool* ok,
-                                   Scanner::BookmarkScope* bookmark) {
-  // SourceElements ::
-  //   (Statement)* <end_token>
-
-  // Bookkeeping for trial parse if bookmark is set:
-  DCHECK_IMPLIES(bookmark, bookmark->HasBeenSet());
-  bool maybe_reset = bookmark != nullptr;
-  int count_statements = 0;
-
-  bool directive_prologue = true;
-  while (peek() != end_token) {
-    if (directive_prologue && peek() != Token::STRING) {
-      directive_prologue = false;
-    }
-    bool starts_with_identifier = peek() == Token::IDENTIFIER;
-    Scanner::Location token_loc = scanner()->peek_location();
-    Statement statement = ParseStatementListItem(CHECK_OK_CUSTOM(Void));
-
-    if (directive_prologue) {
-      bool use_strict_found = statement.IsUseStrictLiteral();
-
-      if (use_strict_found) {
-        scope()->SetLanguageMode(
-            static_cast<LanguageMode>(scope()->language_mode() | STRICT));
-      } else if (!statement.IsStringLiteral()) {
-        directive_prologue = false;
-      }
-
-      if (use_strict_found && !scope()->HasSimpleParameters()) {
-        // TC39 deemed "use strict" directives to be an error when occurring
-        // in the body of a function with non-simple parameter list, on
-        // 29/7/2015. https://goo.gl/ueA7Ln
-        ReportMessageAt(token_loc,
-                        MessageTemplate::kIllegalLanguageModeDirective,
-                        "use strict");
-        *ok = false;
-        return;
-      }
-    }
-
-    // If we're allowed to reset to a bookmark, we will do so when we see a long
-    // and trivial function.
-    // Our current definition of 'long and trivial' is:
-    // - over 200 statements
-    // - all starting with an identifier (i.e., no if, for, while, etc.)
-    if (maybe_reset && (!starts_with_identifier ||
-                        ++count_statements > kLazyParseTrialLimit)) {
-      if (count_statements > kLazyParseTrialLimit) {
-        bookmark->Reset();
-        return;
-      }
-      maybe_reset = false;
-    }
-  }
-}
-
-
-PreParser::Statement PreParser::ParseStatement(
-    AllowLabelledFunctionStatement allow_function, bool* ok) {
-  // Statement ::
-  //   EmptyStatement
-  //   ...
-
-  if (peek() == Token::SEMICOLON) {
-    Next();
-    return Statement::Default();
-  }
-  return ParseSubStatement(allow_function, ok);
-}
-
-PreParser::Statement PreParser::ParseScopedStatement(bool legacy, bool* ok) {
-  if (is_strict(language_mode()) || peek() != Token::FUNCTION ||
-      (legacy && allow_harmony_restrictive_declarations())) {
-    return ParseSubStatement(kDisallowLabelledFunctionStatement, ok);
-  } else {
-    BlockState block_state(&scope_state_);
-    return ParseFunctionDeclaration(ok);
-  }
-}
-
-PreParser::Statement PreParser::ParseSubStatement(
-    AllowLabelledFunctionStatement allow_function, bool* ok) {
-  // Statement ::
-  //   Block
-  //   VariableStatement
-  //   EmptyStatement
-  //   ExpressionStatement
-  //   IfStatement
-  //   IterationStatement
-  //   ContinueStatement
-  //   BreakStatement
-  //   ReturnStatement
-  //   WithStatement
-  //   LabelledStatement
-  //   SwitchStatement
-  //   ThrowStatement
-  //   TryStatement
-  //   DebuggerStatement
-
-  // Note: Since labels can only be used by 'break' and 'continue'
-  // statements, which themselves are only valid within blocks,
-  // iterations or 'switch' statements (i.e., BreakableStatements),
-  // labels can be simply ignored in all other cases; except for
-  // trivial labeled break statements 'label: break label' which is
-  // parsed into an empty statement.
-
-  // Keep the source position of the statement
-  switch (peek()) {
-    case Token::LBRACE:
-      return ParseBlock(ok);
-
-    case Token::SEMICOLON:
-      Next();
-      return Statement::Default();
-
-    case Token::IF:
-      return ParseIfStatement(ok);
-
-    case Token::DO:
-      return ParseDoWhileStatement(ok);
-
-    case Token::WHILE:
-      return ParseWhileStatement(ok);
-
-    case Token::FOR:
-      return ParseForStatement(ok);
-
-    case Token::CONTINUE:
-      return ParseContinueStatement(ok);
-
-    case Token::BREAK:
-      return ParseBreakStatement(ok);
-
-    case Token::RETURN:
-      return ParseReturnStatement(ok);
-
-    case Token::WITH:
-      return ParseWithStatement(ok);
-
-    case Token::SWITCH:
-      return ParseSwitchStatement(ok);
-
-    case Token::THROW:
-      return ParseThrowStatement(ok);
-
-    case Token::TRY:
-      return ParseTryStatement(ok);
-
-    case Token::FUNCTION:
-      // FunctionDeclaration only allowed as a StatementListItem, not in
-      // an arbitrary Statement position. Exceptions such as
-      // ES#sec-functiondeclarations-in-ifstatement-statement-clauses
-      // are handled by calling ParseScopedStatement rather than
-      // ParseSubStatement directly.
-      ReportMessageAt(scanner()->peek_location(),
-                      is_strict(language_mode())
-                          ? MessageTemplate::kStrictFunction
-                          : MessageTemplate::kSloppyFunction);
-      *ok = false;
-      return Statement::Default();
-
-    case Token::DEBUGGER:
-      return ParseDebuggerStatement(ok);
-
-    case Token::VAR:
-      return ParseVariableStatement(kStatement, ok);
-
-    default:
-      return ParseExpressionOrLabelledStatement(allow_function, ok);
-  }
-}
-
-PreParser::Statement PreParser::ParseHoistableDeclaration(
-    int pos, ParseFunctionFlags flags, bool* ok) {
-  const bool is_generator = flags & ParseFunctionFlags::kIsGenerator;
-  const bool is_async = flags & ParseFunctionFlags::kIsAsync;
-  DCHECK(!is_generator || !is_async);
-
-  bool is_strict_reserved = false;
-  Identifier name = ParseIdentifierOrStrictReservedWord(
-      &is_strict_reserved, CHECK_OK);
-
-  ParseFunctionLiteral(name, scanner()->location(),
-                       is_strict_reserved ? kFunctionNameIsStrictReserved
-                                          : kFunctionNameValidityUnknown,
-                       is_generator ? FunctionKind::kGeneratorFunction
-                                    : is_async ? FunctionKind::kAsyncFunction
-                                               : FunctionKind::kNormalFunction,
-                       pos, FunctionLiteral::kDeclaration, language_mode(),
-                       CHECK_OK);
-  return Statement::FunctionDeclaration();
-}
-
-PreParser::Statement PreParser::ParseAsyncFunctionDeclaration(bool* ok) {
-  // AsyncFunctionDeclaration ::
-  //   async [no LineTerminator here] function BindingIdentifier[Await]
-  //       ( FormalParameters[Await] ) { AsyncFunctionBody }
-  DCHECK_EQ(scanner()->current_token(), Token::ASYNC);
-  int pos = position();
-  Expect(Token::FUNCTION, CHECK_OK);
-  ParseFunctionFlags flags = ParseFunctionFlags::kIsAsync;
-  return ParseHoistableDeclaration(pos, flags, ok);
-}
-
-PreParser::Statement PreParser::ParseHoistableDeclaration(bool* ok) {
-  // FunctionDeclaration ::
-  //   'function' Identifier '(' FormalParameterListopt ')' '{' FunctionBody '}'
-  // GeneratorDeclaration ::
-  //   'function' '*' Identifier '(' FormalParameterListopt ')'
-  //      '{' FunctionBody '}'
-
-  Expect(Token::FUNCTION, CHECK_OK);
-  int pos = position();
-  ParseFunctionFlags flags = ParseFunctionFlags::kIsNormal;
-  if (Check(Token::MUL)) {
-    flags |= ParseFunctionFlags::kIsGenerator;
-  }
-  return ParseHoistableDeclaration(pos, flags, ok);
-}
-
-
-PreParser::Statement PreParser::ParseClassDeclaration(bool* ok) {
-  Expect(Token::CLASS, CHECK_OK);
-
-  int pos = position();
-  bool is_strict_reserved = false;
-  Identifier name =
-      ParseIdentifierOrStrictReservedWord(&is_strict_reserved, CHECK_OK);
-  ParseClassLiteral(nullptr, name, scanner()->location(), is_strict_reserved,
-                    pos, CHECK_OK);
-  return Statement::Default();
-}
-
-
-PreParser::Statement PreParser::ParseBlock(bool* ok) {
-  // Block ::
-  //   '{' StatementList '}'
-
-  Expect(Token::LBRACE, CHECK_OK);
-  Statement final = Statement::Default();
-  {
-    BlockState block_state(&scope_state_);
-    while (peek() != Token::RBRACE) {
-      final = ParseStatementListItem(CHECK_OK);
-    }
-  }
-  Expect(Token::RBRACE, ok);
-  return final;
-}
-
-
-PreParser::Statement PreParser::ParseVariableStatement(
-    VariableDeclarationContext var_context,
-    bool* ok) {
-  // VariableStatement ::
-  //   VariableDeclarations ';'
-
-  Statement result = ParseVariableDeclarations(
-      var_context, nullptr, nullptr, nullptr, nullptr, nullptr, CHECK_OK);
-  ExpectSemicolon(CHECK_OK);
-  return result;
-}
-
-
-// If the variable declaration declares exactly one non-const
-// variable, then *var is set to that variable. In all other cases,
-// *var is untouched; in particular, it is the caller's responsibility
-// to initialize it properly. This mechanism is also used for the parsing
-// of 'for-in' loops.
-PreParser::Statement PreParser::ParseVariableDeclarations(
-    VariableDeclarationContext var_context, int* num_decl, bool* is_lexical,
-    bool* is_binding_pattern, Scanner::Location* first_initializer_loc,
-    Scanner::Location* bindings_loc, bool* ok) {
-  // VariableDeclarations ::
-  //   ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[',']
-  //
-  // The ES6 Draft Rev3 specifies the following grammar for const declarations
-  //
-  // ConstDeclaration ::
-  //   const ConstBinding (',' ConstBinding)* ';'
-  // ConstBinding ::
-  //   Identifier '=' AssignmentExpression
-  //
-  // TODO(ES6):
-  // ConstBinding ::
-  //   BindingPattern '=' AssignmentExpression
-  bool require_initializer = false;
-  bool lexical = false;
-  bool is_pattern = false;
-  if (peek() == Token::VAR) {
-    Consume(Token::VAR);
-  } else if (peek() == Token::CONST) {
-    // TODO(ES6): The ES6 Draft Rev4 section 12.2.2 reads:
-    //
-    // ConstDeclaration : const ConstBinding (',' ConstBinding)* ';'
-    //
-    // * It is a Syntax Error if the code that matches this production is not
-    //   contained in extended code.
-    //
-    // However disallowing const in sloppy mode will break compatibility with
-    // existing pages. Therefore we keep allowing const with the old
-    // non-harmony semantics in sloppy mode.
-    Consume(Token::CONST);
-    DCHECK(var_context != kStatement);
-    require_initializer = true;
-    lexical = true;
-  } else if (peek() == Token::LET) {
-    Consume(Token::LET);
-    DCHECK(var_context != kStatement);
-    lexical = true;
-  } else {
-    *ok = false;
-    return Statement::Default();
-  }
-
-  // The scope of a var/const declared variable anywhere inside a function
-  // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). The scope
-  // of a let declared variable is the scope of the immediately enclosing
-  // block.
-  int nvars = 0;  // the number of variables declared
-  int bindings_start = peek_position();
-  do {
-    // Parse binding pattern.
-    if (nvars > 0) Consume(Token::COMMA);
-    int decl_pos = peek_position();
-    PreParserExpression pattern = PreParserExpression::Default();
-    {
-      ExpressionClassifier pattern_classifier(this);
-      pattern = ParsePrimaryExpression(&pattern_classifier, CHECK_OK);
-
-      ValidateBindingPattern(&pattern_classifier, CHECK_OK);
-      if (lexical) {
-        ValidateLetPattern(&pattern_classifier, CHECK_OK);
-      }
-    }
-
-    is_pattern = pattern.IsObjectLiteral() || pattern.IsArrayLiteral();
-
-    Scanner::Location variable_loc = scanner()->location();
-    nvars++;
-    if (Check(Token::ASSIGN)) {
-      ExpressionClassifier classifier(this);
-      ParseAssignmentExpression(var_context != kForStatement, &classifier,
-                                CHECK_OK);
-      ValidateExpression(&classifier, CHECK_OK);
-
-      variable_loc.end_pos = scanner()->location().end_pos;
-      if (first_initializer_loc && !first_initializer_loc->IsValid()) {
-        *first_initializer_loc = variable_loc;
-      }
-    } else if ((require_initializer || is_pattern) &&
-               (var_context != kForStatement || !PeekInOrOf())) {
-      ReportMessageAt(
-          Scanner::Location(decl_pos, scanner()->location().end_pos),
-          MessageTemplate::kDeclarationMissingInitializer,
-          is_pattern ? "destructuring" : "const");
-      *ok = false;
-      return Statement::Default();
-    }
-  } while (peek() == Token::COMMA);
-
-  if (bindings_loc) {
-    *bindings_loc =
-        Scanner::Location(bindings_start, scanner()->location().end_pos);
-  }
-
-  if (num_decl != nullptr) *num_decl = nvars;
-  if (is_lexical != nullptr) *is_lexical = lexical;
-  if (is_binding_pattern != nullptr) *is_binding_pattern = is_pattern;
-  return Statement::Default();
-}
-
-PreParser::Statement PreParser::ParseFunctionDeclaration(bool* ok) {
-  Consume(Token::FUNCTION);
-  int pos = position();
-  ParseFunctionFlags flags = ParseFunctionFlags::kIsNormal;
-  if (Check(Token::MUL)) {
-    flags |= ParseFunctionFlags::kIsGenerator;
-    if (allow_harmony_restrictive_declarations()) {
-      ReportMessageAt(scanner()->location(),
-                      MessageTemplate::kGeneratorInLegacyContext);
-      *ok = false;
-      return Statement::Default();
-    }
-  }
-  return ParseHoistableDeclaration(pos, flags, ok);
-}
-
-PreParser::Statement PreParser::ParseExpressionOrLabelledStatement(
-    AllowLabelledFunctionStatement allow_function, bool* ok) {
-  // ExpressionStatement | LabelledStatement ::
-  //   Expression ';'
-  //   Identifier ':' Statement
-
-  switch (peek()) {
-    case Token::FUNCTION:
-    case Token::LBRACE:
-      UNREACHABLE();  // Always handled by the callers.
-    case Token::CLASS:
-      ReportUnexpectedToken(Next());
-      *ok = false;
-      return Statement::Default();
-
-    default:
-      break;
-  }
-
-  bool starts_with_identifier = peek_any_identifier();
-  ExpressionClassifier classifier(this);
-  Expression expr = ParseExpression(true, &classifier, CHECK_OK);
-  ValidateExpression(&classifier, CHECK_OK);
-
-  // Even if the expression starts with an identifier, it is not necessarily an
-  // identifier. For example, "foo + bar" starts with an identifier but is not
-  // an identifier.
-  if (starts_with_identifier && expr.IsIdentifier() && peek() == Token::COLON) {
-    // Expression is a single identifier, and not, e.g., a parenthesized
-    // identifier.
-    DCHECK(!expr.AsIdentifier().IsEnum());
-    DCHECK(!parsing_module_ || !expr.AsIdentifier().IsAwait());
-    DCHECK(is_sloppy(language_mode()) ||
-           !IsFutureStrictReserved(expr.AsIdentifier()));
-    Consume(Token::COLON);
-    // ES#sec-labelled-function-declarations Labelled Function Declarations
-    if (peek() == Token::FUNCTION && is_sloppy(language_mode())) {
-      if (allow_function == kAllowLabelledFunctionStatement) {
-        return ParseFunctionDeclaration(ok);
-      } else {
-        return ParseScopedStatement(true, ok);
-      }
-    }
-    Statement statement =
-        ParseStatement(kDisallowLabelledFunctionStatement, ok);
-    return statement.IsJumpStatement() ? Statement::Default() : statement;
-    // Preparsing is disabled for extensions (because the extension details
-    // aren't passed to lazily compiled functions), so we don't
-    // accept "native function" in the preparser.
-  }
-  // Parsed expression statement.
-  ExpectSemicolon(CHECK_OK);
-  return Statement::ExpressionStatement(expr);
-}
-
-
-PreParser::Statement PreParser::ParseIfStatement(bool* ok) {
-  // IfStatement ::
-  //   'if' '(' Expression ')' Statement ('else' Statement)?
-
-  Expect(Token::IF, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-  Statement stat = ParseScopedStatement(false, CHECK_OK);
-  if (peek() == Token::ELSE) {
-    Next();
-    Statement else_stat = ParseScopedStatement(false, CHECK_OK);
-    stat = (stat.IsJumpStatement() && else_stat.IsJumpStatement()) ?
-        Statement::Jump() : Statement::Default();
-  } else {
-    stat = Statement::Default();
-  }
-  return stat;
-}
-
-
-PreParser::Statement PreParser::ParseContinueStatement(bool* ok) {
-  // ContinueStatement ::
-  //   'continue' [no line terminator] Identifier? ';'
-
-  Expect(Token::CONTINUE, CHECK_OK);
-  Token::Value tok = peek();
-  if (!scanner()->HasAnyLineTerminatorBeforeNext() &&
-      tok != Token::SEMICOLON &&
-      tok != Token::RBRACE &&
-      tok != Token::EOS) {
-    // ECMA allows "eval" or "arguments" as labels even in strict mode.
-    ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
-  }
-  ExpectSemicolon(CHECK_OK);
-  return Statement::Jump();
-}
-
-
-PreParser::Statement PreParser::ParseBreakStatement(bool* ok) {
-  // BreakStatement ::
-  //   'break' [no line terminator] Identifier? ';'
-
-  Expect(Token::BREAK, CHECK_OK);
-  Token::Value tok = peek();
-  if (!scanner()->HasAnyLineTerminatorBeforeNext() &&
-      tok != Token::SEMICOLON &&
-      tok != Token::RBRACE &&
-      tok != Token::EOS) {
-    // ECMA allows "eval" or "arguments" as labels even in strict mode.
-    ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
-  }
-  ExpectSemicolon(CHECK_OK);
-  return Statement::Jump();
-}
-
-
-PreParser::Statement PreParser::ParseReturnStatement(bool* ok) {
-  // ReturnStatement ::
-  //   'return' [no line terminator] Expression? ';'
-
-  // Consume the return token. It is necessary to do before
-  // reporting any errors on it, because of the way errors are
-  // reported (underlining).
-  Expect(Token::RETURN, CHECK_OK);
-
-  // An ECMAScript program is considered syntactically incorrect if it
-  // contains a return statement that is not within the body of a
-  // function. See ECMA-262, section 12.9, page 67.
-  // This is not handled during preparsing.
-
-  Token::Value tok = peek();
-  if (!scanner()->HasAnyLineTerminatorBeforeNext() &&
-      tok != Token::SEMICOLON &&
-      tok != Token::RBRACE &&
-      tok != Token::EOS) {
-    // Because of the return code rewriting that happens in case of a subclass
-    // constructor we don't want to accept tail calls, therefore we don't set
-    // ReturnExprScope to kInsideValidReturnStatement here.
-    ReturnExprContext return_expr_context =
-        IsSubclassConstructor(function_state_->kind())
-            ? function_state_->return_expr_context()
-            : ReturnExprContext::kInsideValidReturnStatement;
-
-    ReturnExprScope maybe_allow_tail_calls(function_state_,
-                                           return_expr_context);
-    ParseExpression(true, CHECK_OK);
-  }
-  ExpectSemicolon(CHECK_OK);
-  return Statement::Jump();
-}
-
-
-PreParser::Statement PreParser::ParseWithStatement(bool* ok) {
-  // WithStatement ::
-  //   'with' '(' Expression ')' Statement
-  Expect(Token::WITH, CHECK_OK);
-  if (is_strict(language_mode())) {
-    ReportMessageAt(scanner()->location(), MessageTemplate::kStrictWith);
-    *ok = false;
-    return Statement::Default();
-  }
-  Expect(Token::LPAREN, CHECK_OK);
-  ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-
-  Scope* with_scope = NewScope(WITH_SCOPE);
-  BlockState block_state(&scope_state_, with_scope);
-  ParseScopedStatement(true, CHECK_OK);
-  return Statement::Default();
-}
-
-
-PreParser::Statement PreParser::ParseSwitchStatement(bool* ok) {
-  // SwitchStatement ::
-  //   'switch' '(' Expression ')' '{' CaseClause* '}'
-
-  Expect(Token::SWITCH, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-
-  {
-    BlockState cases_block_state(&scope_state_);
-    Expect(Token::LBRACE, CHECK_OK);
-    Token::Value token = peek();
-    while (token != Token::RBRACE) {
-      if (token == Token::CASE) {
-        Expect(Token::CASE, CHECK_OK);
-        ParseExpression(true, CHECK_OK);
-      } else {
-        Expect(Token::DEFAULT, CHECK_OK);
-      }
-      Expect(Token::COLON, CHECK_OK);
-      token = peek();
-      Statement statement = Statement::Jump();
-      while (token != Token::CASE &&
-             token != Token::DEFAULT &&
-             token != Token::RBRACE) {
-        statement = ParseStatementListItem(CHECK_OK);
-        token = peek();
-      }
-    }
-  }
-  Expect(Token::RBRACE, ok);
-  return Statement::Default();
-}
-
-
-PreParser::Statement PreParser::ParseDoWhileStatement(bool* ok) {
-  // DoStatement ::
-  //   'do' Statement 'while' '(' Expression ')' ';'
-
-  Expect(Token::DO, CHECK_OK);
-  ParseScopedStatement(true, CHECK_OK);
-  Expect(Token::WHILE, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, ok);
-  if (peek() == Token::SEMICOLON) Consume(Token::SEMICOLON);
-  return Statement::Default();
-}
-
-
-PreParser::Statement PreParser::ParseWhileStatement(bool* ok) {
-  // WhileStatement ::
-  //   'while' '(' Expression ')' Statement
-
-  Expect(Token::WHILE, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  ParseExpression(true, CHECK_OK);
-  Expect(Token::RPAREN, CHECK_OK);
-  ParseScopedStatement(true, ok);
-  return Statement::Default();
-}
-
-
-PreParser::Statement PreParser::ParseForStatement(bool* ok) {
-  // ForStatement ::
-  //   'for' '(' Expression? ';' Expression? ';' Expression? ')' Statement
-
-  // Create an in-between scope for let-bound iteration variables.
-  bool has_lexical = false;
-
-  BlockState block_state(&scope_state_);
-  Expect(Token::FOR, CHECK_OK);
-  Expect(Token::LPAREN, CHECK_OK);
-  if (peek() != Token::SEMICOLON) {
-    ForEachStatement::VisitMode mode;
-    if (peek() == Token::VAR || peek() == Token::CONST ||
-        (peek() == Token::LET && IsNextLetKeyword())) {
-      int decl_count;
-      bool is_lexical;
-      bool is_binding_pattern;
-      Scanner::Location first_initializer_loc = Scanner::Location::invalid();
-      Scanner::Location bindings_loc = Scanner::Location::invalid();
-      ParseVariableDeclarations(kForStatement, &decl_count, &is_lexical,
-                                &is_binding_pattern, &first_initializer_loc,
-                                &bindings_loc, CHECK_OK);
-      if (is_lexical) has_lexical = true;
-      if (CheckInOrOf(&mode, ok)) {
-        if (!*ok) return Statement::Default();
-        if (decl_count != 1) {
-          ReportMessageAt(bindings_loc,
-                          MessageTemplate::kForInOfLoopMultiBindings,
-                          ForEachStatement::VisitModeString(mode));
-          *ok = false;
-          return Statement::Default();
-        }
-        if (first_initializer_loc.IsValid() &&
-            (is_strict(language_mode()) || mode == ForEachStatement::ITERATE ||
-             is_lexical || is_binding_pattern || allow_harmony_for_in())) {
-          // Only increment the use count if we would have let this through
-          // without the flag.
-          if (use_counts_ != nullptr && allow_harmony_for_in()) {
-            ++use_counts_[v8::Isolate::kForInInitializer];
-          }
-          ReportMessageAt(first_initializer_loc,
-                          MessageTemplate::kForInOfLoopInitializer,
-                          ForEachStatement::VisitModeString(mode));
-          *ok = false;
-          return Statement::Default();
-        }
-
-        if (mode == ForEachStatement::ITERATE) {
-          ExpressionClassifier classifier(this);
-          ParseAssignmentExpression(true, &classifier, CHECK_OK);
-          RewriteNonPattern(&classifier, CHECK_OK);
-        } else {
-          ParseExpression(true, CHECK_OK);
-        }
-
-        Expect(Token::RPAREN, CHECK_OK);
-        {
-          ReturnExprScope no_tail_calls(function_state_,
-                                        ReturnExprContext::kInsideForInOfBody);
-          ParseScopedStatement(true, CHECK_OK);
-        }
-        return Statement::Default();
-      }
-    } else {
-      int lhs_beg_pos = peek_position();
-      ExpressionClassifier classifier(this);
-      Expression lhs = ParseExpression(false, &classifier, CHECK_OK);
-      int lhs_end_pos = scanner()->location().end_pos;
-      bool is_for_each = CheckInOrOf(&mode, CHECK_OK);
-      bool is_destructuring = is_for_each &&
-                              (lhs->IsArrayLiteral() || lhs->IsObjectLiteral());
-
-      if (is_destructuring) {
-        ValidateAssignmentPattern(&classifier, CHECK_OK);
-      } else {
-        ValidateExpression(&classifier, CHECK_OK);
-      }
-
-      if (is_for_each) {
-        if (!is_destructuring) {
-          lhs = CheckAndRewriteReferenceExpression(
-              lhs, lhs_beg_pos, lhs_end_pos, MessageTemplate::kInvalidLhsInFor,
-              kSyntaxError, CHECK_OK);
-        }
-
-        if (mode == ForEachStatement::ITERATE) {
-          ExpressionClassifier classifier(this);
-          ParseAssignmentExpression(true, &classifier, CHECK_OK);
-          RewriteNonPattern(&classifier, CHECK_OK);
-        } else {
-          ParseExpression(true, CHECK_OK);
-        }
-
-        Expect(Token::RPAREN, CHECK_OK);
-        {
-          BlockState block_state(&scope_state_);
-          ParseScopedStatement(true, CHECK_OK);
-        }
-        return Statement::Default();
-      }
-    }
-  }
-
-  // Parsed initializer at this point.
-  Expect(Token::SEMICOLON, CHECK_OK);
-
-  // If there are let bindings, then condition and the next statement of the
-  // for loop must be parsed in a new scope.
-  Scope* inner_scope = scope();
-  // TODO(verwaest): Allocate this through a ScopeState as well.
-  if (has_lexical) inner_scope = NewScopeWithParent(inner_scope, BLOCK_SCOPE);
-
-  {
-    BlockState block_state(&scope_state_, inner_scope);
-
-    if (peek() != Token::SEMICOLON) {
-      ParseExpression(true, CHECK_OK);
-    }
-    Expect(Token::SEMICOLON, CHECK_OK);
-
-    if (peek() != Token::RPAREN) {
-      ParseExpression(true, CHECK_OK);
-    }
-    Expect(Token::RPAREN, CHECK_OK);
-
-    ParseScopedStatement(true, ok);
-  }
-  return Statement::Default();
-}
-
-
-PreParser::Statement PreParser::ParseThrowStatement(bool* ok) {
-  // ThrowStatement ::
-  //   'throw' [no line terminator] Expression ';'
-
-  Expect(Token::THROW, CHECK_OK);
-  if (scanner()->HasAnyLineTerminatorBeforeNext()) {
-    ReportMessageAt(scanner()->location(), MessageTemplate::kNewlineAfterThrow);
-    *ok = false;
-    return Statement::Default();
-  }
-  ParseExpression(true, CHECK_OK);
-  ExpectSemicolon(ok);
-  return Statement::Jump();
-}
-
-
-PreParser::Statement PreParser::ParseTryStatement(bool* ok) {
-  // TryStatement ::
-  //   'try' Block Catch
-  //   'try' Block Finally
-  //   'try' Block Catch Finally
-  //
-  // Catch ::
-  //   'catch' '(' Identifier ')' Block
-  //
-  // Finally ::
-  //   'finally' Block
-
-  Expect(Token::TRY, CHECK_OK);
-
-  {
-    ReturnExprScope no_tail_calls(function_state_,
-                                  ReturnExprContext::kInsideTryBlock);
-    ParseBlock(CHECK_OK);
-  }
-
-  Token::Value tok = peek();
-  if (tok != Token::CATCH && tok != Token::FINALLY) {
-    ReportMessageAt(scanner()->location(), MessageTemplate::kNoCatchOrFinally);
-    *ok = false;
-    return Statement::Default();
-  }
-  TailCallExpressionList tail_call_expressions_in_catch_block(zone());
-  bool catch_block_exists = false;
-  if (tok == Token::CATCH) {
-    Consume(Token::CATCH);
-    Expect(Token::LPAREN, CHECK_OK);
-    Scope* catch_scope = NewScope(CATCH_SCOPE);
-    ExpressionClassifier pattern_classifier(this);
-    ParsePrimaryExpression(&pattern_classifier, CHECK_OK);
-    ValidateBindingPattern(&pattern_classifier, CHECK_OK);
-    Expect(Token::RPAREN, CHECK_OK);
-    {
-      CollectExpressionsInTailPositionToListScope
-          collect_tail_call_expressions_scope(
-              function_state_, &tail_call_expressions_in_catch_block);
-      BlockState block_state(&scope_state_, catch_scope);
-      {
-        BlockState block_state(&scope_state_);
-        ParseBlock(CHECK_OK);
-      }
-    }
-    catch_block_exists = true;
-    tok = peek();
-  }
-  if (tok == Token::FINALLY) {
-    Consume(Token::FINALLY);
-    ParseBlock(CHECK_OK);
-    if (FLAG_harmony_explicit_tailcalls && catch_block_exists &&
-        tail_call_expressions_in_catch_block.has_explicit_tail_calls()) {
-      // TODO(ishell): update chapter number.
-      // ES8 XX.YY.ZZ
-      ReportMessageAt(tail_call_expressions_in_catch_block.location(),
-                      MessageTemplate::kUnexpectedTailCallInCatchBlock);
-      *ok = false;
-      return Statement::Default();
-    }
-  }
-  return Statement::Default();
-}
-
-
-PreParser::Statement PreParser::ParseDebuggerStatement(bool* ok) {
-  // In ECMA-262 'debugger' is defined as a reserved keyword. In some browser
-  // contexts this is used as a statement which invokes the debugger as if a
-  // break point is present.
-  // DebuggerStatement ::
-  //   'debugger' ';'
-
-  Expect(Token::DEBUGGER, CHECK_OK);
-  ExpectSemicolon(ok);
-  return Statement::Default();
-}
-
-
-// Redefinition of CHECK_OK for parsing expressions.
-#undef CHECK_OK
-#define CHECK_OK  ok);                     \
-  if (!*ok) return Expression::Default();  \
-  ((void)0
-#define DUMMY )  // to make indentation work
-#undef DUMMY
-
-
 PreParser::Expression PreParser::ParseFunctionLiteral(
     Identifier function_name, Scanner::Location function_name_location,
     FunctionNameValidity function_name_validity, FunctionKind kind,
@@ -1059,11 +144,11 @@
   //   '(' FormalParameterList? ')' '{' FunctionBody '}'
 
   // Parse function body.
+  PreParserStatementList body;
   bool outer_is_script_scope = scope()->is_script_scope();
   DeclarationScope* function_scope = NewFunctionScope(kind);
   function_scope->SetLanguageMode(language_mode);
-  FunctionState function_state(&function_state_, &scope_state_, function_scope,
-                               kind);
+  FunctionState function_state(&function_state_, &scope_state_, function_scope);
   DuplicateFinder duplicate_finder(scanner()->unicode_cache());
   ExpressionClassifier formals_classifier(this, &duplicate_finder);
 
@@ -1071,7 +156,7 @@
   int start_position = scanner()->location().beg_pos;
   function_scope->set_start_position(start_position);
   PreParserFormalParameters formals(function_scope);
-  ParseFormalParameterList(&formals, &formals_classifier, CHECK_OK);
+  ParseFormalParameterList(&formals, CHECK_OK);
   Expect(Token::RPAREN, CHECK_OK);
   int formals_end_position = scanner()->location().end_pos;
 
@@ -1085,9 +170,9 @@
 
   Expect(Token::LBRACE, CHECK_OK);
   if (is_lazily_parsed) {
-    ParseLazyFunctionLiteralBody(CHECK_OK);
+    ParseLazyFunctionLiteralBody(false, CHECK_OK);
   } else {
-    ParseStatementList(Token::RBRACE, CHECK_OK);
+    ParseStatementList(body, Token::RBRACE, CHECK_OK);
   }
   Expect(Token::RBRACE, CHECK_OK);
 
@@ -1100,52 +185,24 @@
                     function_name_location, CHECK_OK);
   const bool allow_duplicate_parameters =
       is_sloppy(language_mode) && formals.is_simple && !IsConciseMethod(kind);
-  ValidateFormalParameters(&formals_classifier, language_mode,
-                           allow_duplicate_parameters, CHECK_OK);
+  ValidateFormalParameters(language_mode, allow_duplicate_parameters, CHECK_OK);
 
   if (is_strict(language_mode)) {
     int end_position = scanner()->location().end_pos;
     CheckStrictOctalLiteral(start_position, end_position, CHECK_OK);
-    CheckDecimalLiteralWithLeadingZero(use_counts_, start_position,
-                                       end_position);
+    CheckDecimalLiteralWithLeadingZero(start_position, end_position);
   }
 
   return Expression::Default();
 }
 
-PreParser::Expression PreParser::ParseAsyncFunctionExpression(bool* ok) {
-  // AsyncFunctionDeclaration ::
-  //   async [no LineTerminator here] function ( FormalParameters[Await] )
-  //       { AsyncFunctionBody }
-  //
-  //   async [no LineTerminator here] function BindingIdentifier[Await]
-  //       ( FormalParameters[Await] ) { AsyncFunctionBody }
-  int pos = position();
-  Expect(Token::FUNCTION, CHECK_OK);
-  bool is_strict_reserved = false;
-  Identifier name;
-  FunctionLiteral::FunctionType type = FunctionLiteral::kAnonymousExpression;
-
-  if (peek_any_identifier()) {
-    type = FunctionLiteral::kNamedExpression;
-    name = ParseIdentifierOrStrictReservedWord(FunctionKind::kAsyncFunction,
-                                               &is_strict_reserved, CHECK_OK);
-  }
-
-  ParseFunctionLiteral(name, scanner()->location(),
-                       is_strict_reserved ? kFunctionNameIsStrictReserved
-                                          : kFunctionNameValidityUnknown,
-                       FunctionKind::kAsyncFunction, pos, type, language_mode(),
-                       CHECK_OK);
-  return Expression::Default();
-}
-
-void PreParser::ParseLazyFunctionLiteralBody(bool* ok,
-                                             Scanner::BookmarkScope* bookmark) {
+PreParser::LazyParsingResult PreParser::ParseLazyFunctionLiteralBody(
+    bool may_abort, bool* ok) {
   int body_start = position();
-  ParseStatementList(Token::RBRACE, ok, bookmark);
-  if (!*ok) return;
-  if (bookmark && bookmark->HasBeenReset()) return;
+  PreParserStatementList body;
+  LazyParsingResult result = ParseStatementList(
+      body, Token::RBRACE, may_abort, CHECK_OK_VALUE(kLazyParsingComplete));
+  if (result == kLazyParsingAborted) return result;
 
   // Position right after terminal '}'.
   DCHECK_EQ(Token::RBRACE, scanner()->peek());
@@ -1156,113 +213,45 @@
                     function_state_->materialized_literal_count(),
                     function_state_->expected_property_count(), language_mode(),
                     scope->uses_super_property(), scope->calls_eval());
+  return kLazyParsingComplete;
 }
 
-PreParserExpression PreParser::ParseClassLiteral(
-    ExpressionClassifier* classifier, PreParserIdentifier name,
-    Scanner::Location class_name_location, bool name_is_strict_reserved,
-    int pos, bool* ok) {
-  // All parts of a ClassDeclaration and ClassExpression are strict code.
-  if (name_is_strict_reserved) {
-    ReportMessageAt(class_name_location,
-                    MessageTemplate::kUnexpectedStrictReserved);
-    *ok = false;
-    return EmptyExpression();
+PreParserExpression PreParser::ExpressionFromIdentifier(
+    PreParserIdentifier name, int start_position, int end_position,
+    InferName infer) {
+  if (track_unresolved_variables_) {
+    AstNodeFactory factory(ast_value_factory());
+    // Setting the Zone is necessary because zone_ might be the temp Zone, and
+    // AstValueFactory doesn't know about it.
+    factory.set_zone(zone());
+    DCHECK_NOT_NULL(name.string_);
+    scope()->NewUnresolved(&factory, name.string_, start_position, end_position,
+                           NORMAL_VARIABLE);
   }
-  if (IsEvalOrArguments(name)) {
-    ReportMessageAt(class_name_location, MessageTemplate::kStrictEvalArguments);
-    *ok = false;
-    return EmptyExpression();
-  }
+  return PreParserExpression::FromIdentifier(name);
+}
 
-  LanguageMode class_language_mode = language_mode();
-  BlockState block_state(&scope_state_);
-  scope()->SetLanguageMode(
-      static_cast<LanguageMode>(class_language_mode | STRICT));
-  // TODO(marja): Make PreParser use scope names too.
-  // this->scope()->SetScopeName(name);
+void PreParser::DeclareAndInitializeVariables(
+    PreParserStatement block,
+    const DeclarationDescriptor* declaration_descriptor,
+    const DeclarationParsingResult::Declaration* declaration,
+    ZoneList<const AstRawString*>* names, bool* ok) {
+  if (declaration->pattern.string_) {
+    /* Mimic what Parser does when declaring variables (see
+       Parser::PatternRewriter::VisitVariableProxy).
 
-  bool has_extends = Check(Token::EXTENDS);
-  if (has_extends) {
-    ExpressionClassifier extends_classifier(this);
-    ParseLeftHandSideExpression(&extends_classifier, CHECK_OK);
-    CheckNoTailCallExpressions(&extends_classifier, CHECK_OK);
-    ValidateExpression(&extends_classifier, CHECK_OK);
-    if (classifier != nullptr) {
-      classifier->Accumulate(&extends_classifier,
-                             ExpressionClassifier::ExpressionProductions);
+       var + no initializer -> RemoveUnresolved
+       let + no initializer -> RemoveUnresolved
+       var + initializer -> RemoveUnresolved followed by NewUnresolved
+       let + initializer -> RemoveUnresolved
+    */
+
+    if (declaration->initializer.IsEmpty() ||
+        declaration_descriptor->mode == VariableMode::LET) {
+      declaration_descriptor->scope->RemoveUnresolved(
+          declaration->pattern.string_);
     }
   }
-
-  ClassLiteralChecker checker(this);
-  bool has_seen_constructor = false;
-
-  Expect(Token::LBRACE, CHECK_OK);
-  while (peek() != Token::RBRACE) {
-    if (Check(Token::SEMICOLON)) continue;
-    const bool in_class = true;
-    bool is_computed_name = false;  // Classes do not care about computed
-                                    // property names here.
-    Identifier name;
-    ExpressionClassifier property_classifier(this);
-    ParsePropertyDefinition(
-        &checker, in_class, has_extends, MethodKind::kNormal, &is_computed_name,
-        &has_seen_constructor, &property_classifier, &name, CHECK_OK);
-    ValidateExpression(&property_classifier, CHECK_OK);
-    if (classifier != nullptr) {
-      classifier->Accumulate(&property_classifier,
-                             ExpressionClassifier::ExpressionProductions);
-    }
-  }
-
-  Expect(Token::RBRACE, CHECK_OK);
-
-  return Expression::Default();
-}
-
-
-PreParser::Expression PreParser::ParseV8Intrinsic(bool* ok) {
-  // CallRuntime ::
-  //   '%' Identifier Arguments
-  Expect(Token::MOD, CHECK_OK);
-  if (!allow_natives()) {
-    *ok = false;
-    return Expression::Default();
-  }
-  // Allow "eval" or "arguments" for backward compatibility.
-  ParseIdentifier(kAllowRestrictedIdentifiers, CHECK_OK);
-  Scanner::Location spread_pos;
-  ExpressionClassifier classifier(this);
-  ParseArguments(&spread_pos, &classifier, ok);
-  ValidateExpression(&classifier, CHECK_OK);
-
-  DCHECK(!spread_pos.IsValid());
-
-  return Expression::Default();
-}
-
-
-PreParserExpression PreParser::ParseDoExpression(bool* ok) {
-  // AssignmentExpression ::
-  //     do '{' StatementList '}'
-  Expect(Token::DO, CHECK_OK);
-  Expect(Token::LBRACE, CHECK_OK);
-  while (peek() != Token::RBRACE) {
-    ParseStatementListItem(CHECK_OK);
-  }
-  Expect(Token::RBRACE, CHECK_OK);
-  return PreParserExpression::Default();
-}
-
-void PreParser::ParseAsyncArrowSingleExpressionBody(
-    PreParserStatementList body, bool accept_IN,
-    ExpressionClassifier* classifier, int pos, bool* ok) {
-  scope()->ForceContextAllocation();
-
-  PreParserExpression return_value =
-      ParseAssignmentExpression(accept_IN, classifier, CHECK_OK_CUSTOM(Void));
-
-  body->Add(PreParserStatement::ExpressionStatement(return_value), zone());
 }
 
 #undef CHECK_OK
diff --git a/src/parsing/preparser.h b/src/parsing/preparser.h
index 3f268ee..4b54748 100644
--- a/src/parsing/preparser.h
+++ b/src/parsing/preparser.h
@@ -6,18 +6,18 @@
 #define V8_PARSING_PREPARSER_H
 
 #include "src/ast/scopes.h"
-#include "src/bailout-reason.h"
-#include "src/base/hashmap.h"
-#include "src/messages.h"
-#include "src/parsing/expression-classifier.h"
-#include "src/parsing/func-name-inferrer.h"
 #include "src/parsing/parser-base.h"
-#include "src/parsing/scanner.h"
-#include "src/parsing/token.h"
 
 namespace v8 {
 namespace internal {
 
+// Whereas the Parser generates AST during the recursive descent,
+// the PreParser doesn't create a tree. Instead, it passes around minimal
+// data objects (PreParserExpression, PreParserIdentifier etc.) which contain
+// just enough data for the upper layer functions. PreParserFactory is
+// responsible for creating these dummy objects. It provides a similar kind of
+// interface as AstNodeFactory, so ParserBase doesn't need to care which one is
+// used.
 
 class PreParserIdentifier {
  public:
@@ -25,6 +25,9 @@
   static PreParserIdentifier Default() {
     return PreParserIdentifier(kUnknownIdentifier);
   }
+  static PreParserIdentifier Empty() {
+    return PreParserIdentifier(kEmptyIdentifier);
+  }
   static PreParserIdentifier Eval() {
     return PreParserIdentifier(kEvalIdentifier);
   }
@@ -64,6 +67,7 @@
   static PreParserIdentifier Async() {
     return PreParserIdentifier(kAsyncIdentifier);
   }
+  bool IsEmpty() const { return type_ == kEmptyIdentifier; }
   bool IsEval() const { return type_ == kEvalIdentifier; }
   bool IsArguments() const { return type_ == kArgumentsIdentifier; }
   bool IsEvalOrArguments() const { return IsEval() || IsArguments(); }
@@ -91,6 +95,7 @@
 
  private:
   enum Type {
+    kEmptyIdentifier,
     kUnknownIdentifier,
     kFutureReservedIdentifier,
     kFutureStrictReservedIdentifier,
@@ -107,19 +112,23 @@
     kAsyncIdentifier
   };
 
-  explicit PreParserIdentifier(Type type) : type_(type) {}
+  explicit PreParserIdentifier(Type type) : type_(type), string_(nullptr) {}
   Type type_;
-
+  // Only non-nullptr when PreParser.track_unresolved_variables_ is true.
+  const AstRawString* string_;
   friend class PreParserExpression;
+  friend class PreParser;
 };
 
 
 class PreParserExpression {
  public:
-  PreParserExpression() : code_(TypeField::encode(kExpression)) {}
+  PreParserExpression() : code_(TypeField::encode(kEmpty)) {}
+
+  static PreParserExpression Empty() { return PreParserExpression(); }
 
   static PreParserExpression Default() {
-    return PreParserExpression();
+    return PreParserExpression(TypeField::encode(kExpression));
   }
 
   static PreParserExpression Spread(PreParserExpression expression) {
@@ -128,7 +137,8 @@
 
   static PreParserExpression FromIdentifier(PreParserIdentifier id) {
     return PreParserExpression(TypeField::encode(kIdentifierExpression) |
-                               IdentifierTypeField::encode(id.type_));
+                                   IdentifierTypeField::encode(id.type_),
+                               id.string_);
   }
 
   static PreParserExpression BinaryOperation(PreParserExpression left,
@@ -159,6 +169,11 @@
                                IsUseStrictField::encode(true));
   }
 
+  static PreParserExpression UseAsmStringLiteral() {
+    return PreParserExpression(TypeField::encode(kStringLiteralExpression) |
+                               IsUseAsmField::encode(true));
+  }
+
   static PreParserExpression This() {
     return PreParserExpression(TypeField::encode(kExpression) |
                                ExpressionTypeField::encode(kThisExpression));
@@ -199,6 +214,8 @@
         ExpressionTypeField::encode(kNoTemplateTagExpression));
   }
 
+  bool IsEmpty() const { return TypeField::decode(code_) == kEmpty; }
+
   bool IsIdentifier() const {
     return TypeField::decode(code_) == kIdentifierExpression;
   }
@@ -230,6 +247,11 @@
            IsUseStrictField::decode(code_);
   }
 
+  bool IsUseAsmLiteral() const {
+    return TypeField::decode(code_) == kStringLiteralExpression &&
+           IsUseAsmField::decode(code_);
+  }
+
   bool IsThis() const {
     return TypeField::decode(code_) == kExpression &&
            ExpressionTypeField::decode(code_) == kThisExpression;
@@ -275,7 +297,7 @@
            ExpressionTypeField::decode(code_) == kNoTemplateTagExpression;
   }
 
-  bool IsSpreadExpression() const {
+  bool IsSpread() const {
     return TypeField::decode(code_) == kSpreadExpression;
   }
 
@@ -292,12 +314,16 @@
   // More dummy implementations of things PreParser doesn't need to track:
   void set_index(int index) {}  // For YieldExpressions
   void set_should_eager_compile() {}
+  void set_should_be_used_once_hint() {}
 
   int position() const { return kNoSourcePosition; }
   void set_function_token_position(int position) {}
 
+  void set_is_class_field_initializer(bool is_class_field_initializer) {}
+
  private:
   enum Type {
+    kEmpty,
     kExpression,
     kIdentifierExpression,
     kStringLiteralExpression,
@@ -318,8 +344,9 @@
     kAssignment
   };
 
-  explicit PreParserExpression(uint32_t expression_code)
-      : code_(expression_code) {}
+  explicit PreParserExpression(uint32_t expression_code,
+                               const AstRawString* string = nullptr)
+      : code_(expression_code), string_(string) {}
 
   // The first three bits are for the Type.
   typedef BitField<Type, 0, 3> TypeField;
@@ -335,11 +362,16 @@
   // of the Type field, so they can share the storage.
   typedef BitField<ExpressionType, TypeField::kNext, 3> ExpressionTypeField;
   typedef BitField<bool, TypeField::kNext, 1> IsUseStrictField;
+  typedef BitField<bool, IsUseStrictField::kNext, 1> IsUseAsmField;
   typedef BitField<PreParserIdentifier::Type, TypeField::kNext, 10>
       IdentifierTypeField;
   typedef BitField<bool, TypeField::kNext, 1> HasCoverInitializedNameField;
 
   uint32_t code_;
+  // Non-nullptr if the expression is one identifier.
+  const AstRawString* string_;
+
+  friend class PreParser;
 };
 
 
@@ -353,13 +385,18 @@
   PreParserList* operator->() { return this; }
   void Add(T, void*) { ++length_; }
   int length() const { return length_; }
+  static PreParserList Null() { return PreParserList(-1); }
+  bool IsNull() const { return length_ == -1; }
+
  private:
+  explicit PreParserList(int n) : length_(n) {}
   int length_;
 };
 
-
 typedef PreParserList<PreParserExpression> PreParserExpressionList;
 
+class PreParserStatement;
+typedef PreParserList<PreParserStatement> PreParserStatementList;
 
 class PreParserStatement {
  public:
@@ -367,12 +404,16 @@
     return PreParserStatement(kUnknownStatement);
   }
 
-  static PreParserStatement Jump() {
-    return PreParserStatement(kJumpStatement);
+  static PreParserStatement Null() {
+    return PreParserStatement(kNullStatement);
   }
 
-  static PreParserStatement FunctionDeclaration() {
-    return PreParserStatement(kFunctionDeclaration);
+  static PreParserStatement Empty() {
+    return PreParserStatement(kEmptyStatement);
+  }
+
+  static PreParserStatement Jump() {
+    return PreParserStatement(kJumpStatement);
   }
 
   // Creates expression statement from expression.
@@ -383,6 +424,9 @@
     if (expression.IsUseStrictLiteral()) {
       return PreParserStatement(kUseStrictExpressionStatement);
     }
+    if (expression.IsUseAsmLiteral()) {
+      return PreParserStatement(kUseAsmExpressionStatement);
+    }
     if (expression.IsStringLiteral()) {
       return PreParserStatement(kStringLiteralExpressionStatement);
     }
@@ -390,28 +434,43 @@
   }
 
   bool IsStringLiteral() {
-    return code_ == kStringLiteralExpressionStatement || IsUseStrictLiteral();
+    return code_ == kStringLiteralExpressionStatement || IsUseStrictLiteral() ||
+           IsUseAsmLiteral();
   }
 
   bool IsUseStrictLiteral() {
     return code_ == kUseStrictExpressionStatement;
   }
 
-  bool IsFunctionDeclaration() {
-    return code_ == kFunctionDeclaration;
-  }
+  bool IsUseAsmLiteral() { return code_ == kUseAsmExpressionStatement; }
 
   bool IsJumpStatement() {
     return code_ == kJumpStatement;
   }
 
+  bool IsNullStatement() { return code_ == kNullStatement; }
+
+  bool IsEmptyStatement() { return code_ == kEmptyStatement; }
+
+  // Dummy implementation for making statement->somefunc() work in both Parser
+  // and PreParser.
+  PreParserStatement* operator->() { return this; }
+
+  PreParserStatementList statements() { return PreParserStatementList(); }
+  void set_scope(Scope* scope) {}
+  void Initialize(PreParserExpression cond, PreParserStatement body) {}
+  void Initialize(PreParserStatement init, PreParserExpression cond,
+                  PreParserStatement next, PreParserStatement body) {}
+
  private:
   enum Type {
+    kNullStatement,
+    kEmptyStatement,
     kUnknownStatement,
     kJumpStatement,
     kStringLiteralExpressionStatement,
     kUseStrictExpressionStatement,
-    kFunctionDeclaration
+    kUseAsmExpressionStatement,
   };
 
   explicit PreParserStatement(Type code) : code_(code) {}
@@ -419,9 +478,6 @@
 };
 
 
-typedef PreParserList<PreParserStatement> PreParserStatementList;
-
-
 class PreParserFactory {
  public:
   explicit PreParserFactory(void* unused_value_factory) {}
@@ -433,31 +489,34 @@
                                        int pos) {
     return PreParserExpression::Default();
   }
+  PreParserExpression NewUndefinedLiteral(int pos) {
+    return PreParserExpression::Default();
+  }
   PreParserExpression NewRegExpLiteral(PreParserIdentifier js_pattern,
                                        int js_flags, int literal_index,
                                        int pos) {
     return PreParserExpression::Default();
   }
   PreParserExpression NewArrayLiteral(PreParserExpressionList values,
-                                      int literal_index,
-                                      int pos) {
-    return PreParserExpression::ArrayLiteral();
-  }
-  PreParserExpression NewArrayLiteral(PreParserExpressionList values,
                                       int first_spread_index, int literal_index,
                                       int pos) {
     return PreParserExpression::ArrayLiteral();
   }
+  PreParserExpression NewClassLiteralProperty(PreParserExpression key,
+                                              PreParserExpression value,
+                                              ClassLiteralProperty::Kind kind,
+                                              bool is_static,
+                                              bool is_computed_name) {
+    return PreParserExpression::Default();
+  }
   PreParserExpression NewObjectLiteralProperty(PreParserExpression key,
                                                PreParserExpression value,
                                                ObjectLiteralProperty::Kind kind,
-                                               bool is_static,
                                                bool is_computed_name) {
     return PreParserExpression::Default();
   }
   PreParserExpression NewObjectLiteralProperty(PreParserExpression key,
                                                PreParserExpression value,
-                                               bool is_static,
                                                bool is_computed_name) {
     return PreParserExpression::Default();
   }
@@ -533,15 +592,9 @@
                                  int pos) {
     return PreParserExpression::Default();
   }
-  PreParserExpression NewCallRuntime(const AstRawString* name,
-                                     const Runtime::Function* function,
-                                     PreParserExpressionList arguments,
-                                     int pos) {
-    return PreParserExpression::Default();
-  }
   PreParserStatement NewReturnStatement(PreParserExpression expression,
                                         int pos) {
-    return PreParserStatement::Default();
+    return PreParserStatement::Jump();
   }
   PreParserExpression NewFunctionLiteral(
       PreParserIdentifier name, Scope* scope, PreParserStatementList body,
@@ -549,8 +602,7 @@
       int parameter_count,
       FunctionLiteral::ParameterFlag has_duplicate_parameters,
       FunctionLiteral::FunctionType function_type,
-      FunctionLiteral::EagerCompileHint eager_compile_hint, FunctionKind kind,
-      int position) {
+      FunctionLiteral::EagerCompileHint eager_compile_hint, int position) {
     return PreParserExpression::Default();
   }
 
@@ -563,6 +615,77 @@
     return PreParserExpression::Default();
   }
 
+  PreParserStatement NewEmptyStatement(int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewBlock(ZoneList<const AstRawString*>* labels,
+                              int capacity, bool ignore_completion_value,
+                              int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewDebuggerStatement(int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewExpressionStatement(PreParserExpression expr, int pos) {
+    return PreParserStatement::ExpressionStatement(expr);
+  }
+
+  PreParserStatement NewIfStatement(PreParserExpression condition,
+                                    PreParserStatement then_statement,
+                                    PreParserStatement else_statement,
+                                    int pos) {
+    // This must return a jump statement iff both clauses are jump statements.
+    return else_statement.IsJumpStatement() ? then_statement : else_statement;
+  }
+
+  PreParserStatement NewBreakStatement(PreParserStatement target, int pos) {
+    return PreParserStatement::Jump();
+  }
+
+  PreParserStatement NewContinueStatement(PreParserStatement target, int pos) {
+    return PreParserStatement::Jump();
+  }
+
+  PreParserStatement NewWithStatement(Scope* scope,
+                                      PreParserExpression expression,
+                                      PreParserStatement statement, int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewDoWhileStatement(ZoneList<const AstRawString*>* labels,
+                                         int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewWhileStatement(ZoneList<const AstRawString*>* labels,
+                                       int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewSwitchStatement(ZoneList<const AstRawString*>* labels,
+                                        int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewCaseClause(PreParserExpression label,
+                                   PreParserStatementList statements, int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewForStatement(ZoneList<const AstRawString*>* labels,
+                                     int pos) {
+    return PreParserStatement::Default();
+  }
+
+  PreParserStatement NewForEachStatement(ForEachStatement::VisitMode visit_mode,
+                                         ZoneList<const AstRawString*>* labels,
+                                         int pos) {
+    return PreParserStatement::Default();
+  }
+
   // Return the object itself as AstVisitor and implement the needed
   // dummy method right in this class.
   PreParserFactory* visitor() { return this; }
@@ -585,328 +708,46 @@
 
 class PreParser;
 
-template <>
-class ParserBaseTraits<PreParser> {
+class PreParserTarget {
  public:
-  typedef ParserBaseTraits<PreParser> PreParserTraits;
+  PreParserTarget(ParserBase<PreParser>* preparser,
+                  PreParserStatement statement) {}
+};
 
-  struct Type {
-    // PreParser doesn't need to store generator variables.
-    typedef void GeneratorVariable;
+class PreParserTargetScope {
+ public:
+  explicit PreParserTargetScope(ParserBase<PreParser>* preparser) {}
+};
 
-    typedef int AstProperties;
+template <>
+struct ParserTypes<PreParser> {
+  typedef ParserBase<PreParser> Base;
+  typedef PreParser Impl;
 
-    typedef v8::internal::ExpressionClassifier<PreParserTraits>
-        ExpressionClassifier;
+  // PreParser doesn't need to store generator variables.
+  typedef void Variable;
 
-    // Return types for traversing functions.
-    typedef PreParserIdentifier Identifier;
-    typedef PreParserExpression Expression;
-    typedef PreParserExpression YieldExpression;
-    typedef PreParserExpression FunctionLiteral;
-    typedef PreParserExpression ClassLiteral;
-    typedef PreParserExpression Literal;
-    typedef PreParserExpression ObjectLiteralProperty;
-    typedef PreParserExpressionList ExpressionList;
-    typedef PreParserExpressionList PropertyList;
-    typedef PreParserIdentifier FormalParameter;
-    typedef PreParserFormalParameters FormalParameters;
-    typedef PreParserStatementList StatementList;
+  // Return types for traversing functions.
+  typedef PreParserIdentifier Identifier;
+  typedef PreParserExpression Expression;
+  typedef PreParserExpression FunctionLiteral;
+  typedef PreParserExpression ObjectLiteralProperty;
+  typedef PreParserExpression ClassLiteralProperty;
+  typedef PreParserExpressionList ExpressionList;
+  typedef PreParserExpressionList ObjectPropertyList;
+  typedef PreParserExpressionList ClassPropertyList;
+  typedef PreParserFormalParameters FormalParameters;
+  typedef PreParserStatement Statement;
+  typedef PreParserStatementList StatementList;
+  typedef PreParserStatement Block;
+  typedef PreParserStatement BreakableStatement;
+  typedef PreParserStatement IterationStatement;
 
-    // For constructing objects returned by the traversing functions.
-    typedef PreParserFactory Factory;
-  };
+  // For constructing objects returned by the traversing functions.
+  typedef PreParserFactory Factory;
 
-  // TODO(nikolaos): The traits methods should not need to call methods
-  // of the implementation object.
-  PreParser* delegate() { return reinterpret_cast<PreParser*>(this); }
-  const PreParser* delegate() const {
-    return reinterpret_cast<const PreParser*>(this);
-  }
-
-  // Helper functions for recursive descent.
-  bool IsEval(PreParserIdentifier identifier) const {
-    return identifier.IsEval();
-  }
-
-  bool IsArguments(PreParserIdentifier identifier) const {
-    return identifier.IsArguments();
-  }
-
-  bool IsEvalOrArguments(PreParserIdentifier identifier) const {
-    return identifier.IsEvalOrArguments();
-  }
-
-  bool IsUndefined(PreParserIdentifier identifier) const {
-    return identifier.IsUndefined();
-  }
-
-  bool IsAwait(PreParserIdentifier identifier) const {
-    return identifier.IsAwait();
-  }
-
-  bool IsFutureStrictReserved(PreParserIdentifier identifier) const {
-    return identifier.IsFutureStrictReserved();
-  }
-
-  // Returns true if the expression is of type "this.foo".
-  static bool IsThisProperty(PreParserExpression expression) {
-    return expression.IsThisProperty();
-  }
-
-  static bool IsIdentifier(PreParserExpression expression) {
-    return expression.IsIdentifier();
-  }
-
-  static PreParserIdentifier AsIdentifier(PreParserExpression expression) {
-    return expression.AsIdentifier();
-  }
-
-  bool IsPrototype(PreParserIdentifier identifier) const {
-    return identifier.IsPrototype();
-  }
-
-  bool IsConstructor(PreParserIdentifier identifier) const {
-    return identifier.IsConstructor();
-  }
-
-  bool IsDirectEvalCall(PreParserExpression expression) const {
-    return expression.IsDirectEvalCall();
-  }
-
-  static bool IsBoilerplateProperty(PreParserExpression property) {
-    // PreParser doesn't count boilerplate properties.
-    return false;
-  }
-
-  static bool IsArrayIndex(PreParserIdentifier string, uint32_t* index) {
-    return false;
-  }
-
-  static PreParserExpression GetPropertyValue(PreParserExpression property) {
-    return PreParserExpression::Default();
-  }
-
-  // Functions for encapsulating the differences between parsing and preparsing;
-  // operations interleaved with the recursive descent.
-  static void PushLiteralName(FuncNameInferrer* fni, PreParserIdentifier id) {
-    // PreParser should not use FuncNameInferrer.
-    UNREACHABLE();
-  }
-
-  void PushPropertyName(FuncNameInferrer* fni, PreParserExpression expression) {
-    // PreParser should not use FuncNameInferrer.
-    UNREACHABLE();
-  }
-
-  static void InferFunctionName(FuncNameInferrer* fni,
-                                PreParserExpression expression) {
-    // PreParser should not use FuncNameInferrer.
-    UNREACHABLE();
-  }
-
-  static void CheckAssigningFunctionLiteralToProperty(
-      PreParserExpression left, PreParserExpression right) {}
-
-  static PreParserExpression MarkExpressionAsAssigned(
-      PreParserExpression expression) {
-    // TODO(marja): To be able to produce the same errors, the preparser needs
-    // to start tracking which expressions are variables and which are assigned.
-    return expression;
-  }
-
-  bool ShortcutNumericLiteralBinaryExpression(PreParserExpression* x,
-                                              PreParserExpression y,
-                                              Token::Value op, int pos,
-                                              PreParserFactory* factory) {
-    return false;
-  }
-
-  PreParserExpression BuildUnaryExpression(PreParserExpression expression,
-                                           Token::Value op, int pos,
-                                           PreParserFactory* factory) {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression BuildIteratorResult(PreParserExpression value,
-                                          bool done) {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression NewThrowReferenceError(MessageTemplate::Template message,
-                                             int pos) {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression NewThrowSyntaxError(MessageTemplate::Template message,
-                                          PreParserIdentifier arg, int pos) {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression NewThrowTypeError(MessageTemplate::Template message,
-                                        PreParserIdentifier arg, int pos) {
-    return PreParserExpression::Default();
-  }
-
-  // Reporting errors.
-  void ReportMessageAt(Scanner::Location source_location,
-                       MessageTemplate::Template message,
-                       const char* arg = NULL,
-                       ParseErrorType error_type = kSyntaxError);
-  void ReportMessageAt(Scanner::Location source_location,
-                       MessageTemplate::Template message,
-                       const AstRawString* arg,
-                       ParseErrorType error_type = kSyntaxError);
-
-  // A dummy function, just useful as an argument to CHECK_OK_CUSTOM.
-  static void Void() {}
-
-  // "null" return type creators.
-  static PreParserIdentifier EmptyIdentifier() {
-    return PreParserIdentifier::Default();
-  }
-  static PreParserExpression EmptyExpression() {
-    return PreParserExpression::Default();
-  }
-  static PreParserExpression EmptyLiteral() {
-    return PreParserExpression::Default();
-  }
-  static PreParserExpression EmptyObjectLiteralProperty() {
-    return PreParserExpression::Default();
-  }
-  static PreParserExpression EmptyFunctionLiteral() {
-    return PreParserExpression::Default();
-  }
-
-  static PreParserExpressionList NullExpressionList() {
-    return PreParserExpressionList();
-  }
-  PreParserIdentifier EmptyIdentifierString() const {
-    return PreParserIdentifier::Default();
-  }
-
-  // Odd-ball literal creators.
-  PreParserExpression GetLiteralTheHole(int position,
-                                        PreParserFactory* factory) const {
-    return PreParserExpression::Default();
-  }
-
-  // Producing data during the recursive descent.
-  PreParserIdentifier GetSymbol(Scanner* scanner) const;
-
-  PreParserIdentifier GetNextSymbol(Scanner* scanner) const {
-    return PreParserIdentifier::Default();
-  }
-
-  PreParserIdentifier GetNumberAsSymbol(Scanner* scanner) const {
-    return PreParserIdentifier::Default();
-  }
-
-  PreParserExpression ThisExpression(int pos = kNoSourcePosition) {
-    return PreParserExpression::This();
-  }
-
-  PreParserExpression NewSuperPropertyReference(PreParserFactory* factory,
-                                                int pos) {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression NewSuperCallReference(PreParserFactory* factory,
-                                            int pos) {
-    return PreParserExpression::SuperCallReference();
-  }
-
-  PreParserExpression NewTargetExpression(int pos) {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression FunctionSentExpression(PreParserFactory* factory,
-                                             int pos) const {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression ExpressionFromLiteral(Token::Value token, int pos,
-                                            Scanner* scanner,
-                                            PreParserFactory* factory) const {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpression ExpressionFromIdentifier(PreParserIdentifier name,
-                                               int start_position,
-                                               int end_position,
-                                               InferName = InferName::kYes) {
-    return PreParserExpression::FromIdentifier(name);
-  }
-
-  PreParserExpression ExpressionFromString(int pos, Scanner* scanner,
-                                           PreParserFactory* factory) const;
-
-  PreParserExpression GetIterator(PreParserExpression iterable,
-                                  PreParserFactory* factory, int pos) {
-    return PreParserExpression::Default();
-  }
-
-  PreParserExpressionList NewExpressionList(int size, Zone* zone) const {
-    return PreParserExpressionList();
-  }
-
-  PreParserExpressionList NewPropertyList(int size, Zone* zone) const {
-    return PreParserExpressionList();
-  }
-
-  PreParserStatementList NewStatementList(int size, Zone* zone) const {
-    return PreParserStatementList();
-  }
-
-  void AddParameterInitializationBlock(
-      const PreParserFormalParameters& parameters, PreParserStatementList body,
-      bool is_async, bool* ok) {}
-
-  void AddFormalParameter(PreParserFormalParameters* parameters,
-                          PreParserExpression pattern,
-                          PreParserExpression initializer,
-                          int initializer_end_position, bool is_rest) {
-    ++parameters->arity;
-  }
-
-  void DeclareFormalParameter(DeclarationScope* scope,
-                              PreParserIdentifier parameter,
-                              Type::ExpressionClassifier* classifier) {
-    if (!classifier->is_simple_parameter_list()) {
-      scope->SetHasNonSimpleParameters();
-    }
-  }
-
-  V8_INLINE void ParseArrowFunctionFormalParameterList(
-      PreParserFormalParameters* parameters, PreParserExpression params,
-      const Scanner::Location& params_loc, Scanner::Location* duplicate_loc,
-      const Scope::Snapshot& scope_snapshot, bool* ok);
-
-  void ReindexLiterals(const PreParserFormalParameters& parameters) {}
-
-  V8_INLINE PreParserExpression NoTemplateTag() {
-    return PreParserExpression::NoTemplateTag();
-  }
-  V8_INLINE static bool IsTaggedTemplate(const PreParserExpression tag) {
-    return !tag.IsNoTemplateTag();
-  }
-
-  inline void MaterializeUnspreadArgumentsLiterals(int count);
-
-  inline PreParserExpression ExpressionListToExpression(
-      PreParserExpressionList args) {
-    return PreParserExpression::Default();
-  }
-
-  void SetFunctionNameFromPropertyName(PreParserExpression property,
-                                       PreParserIdentifier name) {}
-  void SetFunctionNameFromIdentifierRef(PreParserExpression value,
-                                        PreParserExpression identifier) {}
-
-  V8_INLINE ZoneList<typename Type::ExpressionClassifier::Error>*
-      GetReportedErrorList() const;
-  V8_INLINE Zone* zone() const;
-  V8_INLINE ZoneList<PreParserExpression>* GetNonPatternList() const;
+  typedef PreParserTarget Target;
+  typedef PreParserTargetScope TargetScope;
 };
 
 
@@ -924,9 +765,7 @@
 // it is used) are generally omitted.
 class PreParser : public ParserBase<PreParser> {
   friend class ParserBase<PreParser>;
-  // TODO(nikolaos): This should not be necessary. It will be removed
-  // when the traits object stops delegating to the implementation object.
-  friend class ParserBaseTraits<PreParser>;
+  friend class v8::internal::ExpressionClassifier<ParserTypes<PreParser>>;
 
  public:
   typedef PreParserIdentifier Identifier;
@@ -935,6 +774,7 @@
 
   enum PreParseResult {
     kPreParseStackOverflow,
+    kPreParseAbort,
     kPreParseSuccess
   };
 
@@ -942,7 +782,8 @@
             ParserRecorder* log, uintptr_t stack_limit)
       : ParserBase<PreParser>(zone, scanner, stack_limit, NULL,
                               ast_value_factory, log),
-        use_counts_(nullptr) {}
+        use_counts_(nullptr),
+        track_unresolved_variables_(false) {}
 
   // Pre-parse the program from the character stream; returns true on
   // success (even if parsing failed, the pre-parse data successfully
@@ -958,19 +799,19 @@
     // the global scope.
     if (is_module) scope = NewModuleScope(scope);
 
-    FunctionState top_scope(&function_state_, &scope_state_, scope,
-                            kNormalFunction);
+    FunctionState top_scope(&function_state_, &scope_state_, scope);
     bool ok = true;
     int start_position = scanner()->peek_location().beg_pos;
     parsing_module_ = is_module;
-    ParseStatementList(Token::EOS, &ok);
+    PreParserStatementList body;
+    ParseStatementList(body, Token::EOS, &ok);
     if (stack_overflow()) return kPreParseStackOverflow;
     if (!ok) {
       ReportUnexpectedToken(scanner()->current_token());
     } else if (is_strict(this->scope()->language_mode())) {
       CheckStrictOctalLiteral(start_position, scanner()->location().end_pos,
                               &ok);
-      CheckDecimalLiteralWithLeadingZero(use_counts_, start_position,
+      CheckDecimalLiteralWithLeadingZero(start_position,
                                          scanner()->location().end_pos);
     }
     if (materialized_literals) {
@@ -987,16 +828,12 @@
   // keyword and parameters, and have consumed the initial '{'.
   // At return, unless an error occurred, the scanner is positioned before the
   // the final '}'.
-  PreParseResult PreParseLazyFunction(LanguageMode language_mode,
-                                      FunctionKind kind,
-                                      bool has_simple_parameters,
+  PreParseResult PreParseLazyFunction(DeclarationScope* function_scope,
                                       bool parsing_module, ParserRecorder* log,
-                                      Scanner::BookmarkScope* bookmark,
-                                      int* use_counts);
+                                      bool track_unresolved_variables,
+                                      bool may_abort, int* use_counts);
 
  private:
-  static const int kLazyParseTrialLimit = 200;
-
   // These types form an algebra over syntactic categories that is just
   // rich enough to let us recognize and propagate the constructs that
   // are either being counted in the preparser data, or is important
@@ -1006,72 +843,24 @@
   // which is set to false if parsing failed; it is unchanged otherwise.
   // By making the 'exception handling' explicit, we are forced to check
   // for failure at the call sites.
-  Statement ParseStatementListItem(bool* ok);
-  void ParseStatementList(int end_token, bool* ok,
-                          Scanner::BookmarkScope* bookmark = nullptr);
-  Statement ParseStatement(AllowLabelledFunctionStatement allow_function,
-                           bool* ok);
-  Statement ParseSubStatement(AllowLabelledFunctionStatement allow_function,
-                              bool* ok);
-  Statement ParseScopedStatement(bool legacy, bool* ok);
-  Statement ParseHoistableDeclaration(bool* ok);
-  Statement ParseHoistableDeclaration(int pos, ParseFunctionFlags flags,
-                                      bool* ok);
-  Statement ParseFunctionDeclaration(bool* ok);
-  Statement ParseAsyncFunctionDeclaration(bool* ok);
-  Expression ParseAsyncFunctionExpression(bool* ok);
-  Statement ParseClassDeclaration(bool* ok);
-  Statement ParseBlock(bool* ok);
-  Statement ParseVariableStatement(VariableDeclarationContext var_context,
-                                   bool* ok);
-  Statement ParseVariableDeclarations(VariableDeclarationContext var_context,
-                                      int* num_decl, bool* is_lexical,
-                                      bool* is_binding_pattern,
-                                      Scanner::Location* first_initializer_loc,
-                                      Scanner::Location* bindings_loc,
-                                      bool* ok);
-  Statement ParseExpressionOrLabelledStatement(
-      AllowLabelledFunctionStatement allow_function, bool* ok);
-  Statement ParseIfStatement(bool* ok);
-  Statement ParseContinueStatement(bool* ok);
-  Statement ParseBreakStatement(bool* ok);
-  Statement ParseReturnStatement(bool* ok);
-  Statement ParseWithStatement(bool* ok);
-  Statement ParseSwitchStatement(bool* ok);
-  Statement ParseDoWhileStatement(bool* ok);
-  Statement ParseWhileStatement(bool* ok);
-  Statement ParseForStatement(bool* ok);
-  Statement ParseThrowStatement(bool* ok);
-  Statement ParseTryStatement(bool* ok);
-  Statement ParseDebuggerStatement(bool* ok);
-  Expression ParseConditionalExpression(bool accept_IN, bool* ok);
-  Expression ParseObjectLiteral(bool* ok);
-  Expression ParseV8Intrinsic(bool* ok);
-  Expression ParseDoExpression(bool* ok);
 
   V8_INLINE PreParserStatementList ParseEagerFunctionBody(
       PreParserIdentifier function_name, int pos,
       const PreParserFormalParameters& parameters, FunctionKind kind,
       FunctionLiteral::FunctionType function_type, bool* ok);
 
-  V8_INLINE void SkipLazyFunctionBody(
-      int* materialized_literal_count, int* expected_property_count, bool* ok,
-      Scanner::BookmarkScope* bookmark = nullptr) {
+  V8_INLINE LazyParsingResult SkipLazyFunctionBody(
+      int* materialized_literal_count, int* expected_property_count,
+      bool track_unresolved_variables, bool may_abort, bool* ok) {
     UNREACHABLE();
+    return kLazyParsingComplete;
   }
   Expression ParseFunctionLiteral(
       Identifier name, Scanner::Location function_name_location,
       FunctionNameValidity function_name_validity, FunctionKind kind,
       int function_token_pos, FunctionLiteral::FunctionType function_type,
       LanguageMode language_mode, bool* ok);
-  void ParseLazyFunctionLiteralBody(bool* ok,
-                                    Scanner::BookmarkScope* bookmark = nullptr);
-
-  PreParserExpression ParseClassLiteral(ExpressionClassifier* classifier,
-                                        PreParserIdentifier name,
-                                        Scanner::Location class_name_location,
-                                        bool name_is_strict_reserved, int pos,
-                                        bool* ok);
+  LazyParsingResult ParseLazyFunctionLiteralBody(bool may_abort, bool* ok);
 
   struct TemplateLiteralState {};
 
@@ -1085,14 +874,14 @@
       TemplateLiteralState* state, int start, PreParserExpression tag);
   V8_INLINE void CheckConflictingVarDeclarations(Scope* scope, bool* ok) {}
 
+  V8_INLINE void SetLanguageMode(Scope* scope, LanguageMode mode) {
+    scope->SetLanguageMode(mode);
+  }
+  V8_INLINE void SetAsmModule() {}
+
   V8_INLINE void MarkCollectedTailCallExpressions() {}
   V8_INLINE void MarkTailPosition(PreParserExpression expression) {}
 
-  void ParseAsyncArrowSingleExpressionBody(PreParserStatementList body,
-                                           bool accept_IN,
-                                           ExpressionClassifier* classifier,
-                                           int pos, bool* ok);
-
   V8_INLINE PreParserExpressionList
   PrepareSpreadArguments(PreParserExpressionList list) {
     return list;
@@ -1105,6 +894,11 @@
                                               PreParserExpressionList args,
                                               int pos);
 
+  V8_INLINE PreParserExpression
+  RewriteSuperCall(PreParserExpression call_expression) {
+    return call_expression;
+  }
+
   V8_INLINE void RewriteDestructuringAssignments() {}
 
   V8_INLINE PreParserExpression RewriteExponentiation(PreParserExpression left,
@@ -1121,14 +915,102 @@
   RewriteAwaitExpression(PreParserExpression value, int pos) {
     return value;
   }
+  V8_INLINE void PrepareAsyncFunctionBody(PreParserStatementList body,
+                                          FunctionKind kind, int pos) {}
+  V8_INLINE void RewriteAsyncFunctionBody(PreParserStatementList body,
+                                          PreParserStatement block,
+                                          PreParserExpression return_value,
+                                          bool* ok) {}
   V8_INLINE PreParserExpression RewriteYieldStar(PreParserExpression generator,
                                                  PreParserExpression expression,
                                                  int pos) {
     return PreParserExpression::Default();
   }
-  V8_INLINE void RewriteNonPattern(Type::ExpressionClassifier* classifier,
-                                   bool* ok) {
-    ValidateExpression(classifier, ok);
+  V8_INLINE void RewriteNonPattern(bool* ok) { ValidateExpression(ok); }
+
+  void DeclareAndInitializeVariables(
+      PreParserStatement block,
+      const DeclarationDescriptor* declaration_descriptor,
+      const DeclarationParsingResult::Declaration* declaration,
+      ZoneList<const AstRawString*>* names, bool* ok);
+
+  V8_INLINE ZoneList<const AstRawString*>* DeclareLabel(
+      ZoneList<const AstRawString*>* labels, PreParserExpression expr,
+      bool* ok) {
+    DCHECK(!expr.AsIdentifier().IsEnum());
+    DCHECK(!parsing_module_ || !expr.AsIdentifier().IsAwait());
+    DCHECK(is_sloppy(language_mode()) ||
+           !IsFutureStrictReserved(expr.AsIdentifier()));
+    return labels;
+  }
+
+  // TODO(nikolaos): The preparser currently does not keep track of labels.
+  V8_INLINE bool ContainsLabel(ZoneList<const AstRawString*>* labels,
+                               PreParserIdentifier label) {
+    return false;
+  }
+
+  V8_INLINE PreParserExpression RewriteReturn(PreParserExpression return_value,
+                                              int pos) {
+    return return_value;
+  }
+  V8_INLINE PreParserStatement RewriteSwitchStatement(
+      PreParserExpression tag, PreParserStatement switch_statement,
+      PreParserStatementList cases, Scope* scope) {
+    return PreParserStatement::Default();
+  }
+  V8_INLINE void RewriteCatchPattern(CatchInfo* catch_info, bool* ok) {}
+  V8_INLINE void ValidateCatchBlock(const CatchInfo& catch_info, bool* ok) {}
+  V8_INLINE PreParserStatement RewriteTryStatement(
+      PreParserStatement try_block, PreParserStatement catch_block,
+      PreParserStatement finally_block, const CatchInfo& catch_info, int pos) {
+    return PreParserStatement::Default();
+  }
+
+  V8_INLINE PreParserExpression RewriteDoExpression(PreParserStatement body,
+                                                    int pos, bool* ok) {
+    return PreParserExpression::Default();
+  }
+
+  // TODO(nikolaos): The preparser currently does not keep track of labels
+  // and targets.
+  V8_INLINE PreParserStatement LookupBreakTarget(PreParserIdentifier label,
+                                                 bool* ok) {
+    return PreParserStatement::Default();
+  }
+  V8_INLINE PreParserStatement LookupContinueTarget(PreParserIdentifier label,
+                                                    bool* ok) {
+    return PreParserStatement::Default();
+  }
+
+  V8_INLINE PreParserStatement DeclareFunction(
+      PreParserIdentifier variable_name, PreParserExpression function, int pos,
+      bool is_generator, bool is_async, ZoneList<const AstRawString*>* names,
+      bool* ok) {
+    return Statement::Default();
+  }
+
+  V8_INLINE PreParserStatement
+  DeclareClass(PreParserIdentifier variable_name, PreParserExpression value,
+               ZoneList<const AstRawString*>* names, int class_token_pos,
+               int end_pos, bool* ok) {
+    return PreParserStatement::Default();
+  }
+  V8_INLINE void DeclareClassVariable(PreParserIdentifier name,
+                                      Scope* block_scope, ClassInfo* class_info,
+                                      int class_token_pos, bool* ok) {}
+  V8_INLINE void DeclareClassProperty(PreParserIdentifier class_name,
+                                      PreParserExpression property,
+                                      ClassInfo* class_info, bool* ok) {}
+  V8_INLINE PreParserExpression RewriteClassLiteral(PreParserIdentifier name,
+                                                    ClassInfo* class_info,
+                                                    int pos, bool* ok) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserStatement DeclareNative(PreParserIdentifier name, int pos,
+                                             bool* ok) {
+    return PreParserStatement::Default();
   }
 
   V8_INLINE void QueueDestructuringAssignmentForRewriting(
@@ -1136,15 +1018,425 @@
   V8_INLINE void QueueNonPatternForRewriting(PreParserExpression expr,
                                              bool* ok) {}
 
-  int* use_counts_;
-};
-
-void ParserBaseTraits<PreParser>::MaterializeUnspreadArgumentsLiterals(
-    int count) {
-  for (int i = 0; i < count; ++i) {
-    delegate()->function_state_->NextMaterializedLiteralIndex();
+  // Helper functions for recursive descent.
+  V8_INLINE bool IsEval(PreParserIdentifier identifier) const {
+    return identifier.IsEval();
   }
-}
+
+  V8_INLINE bool IsArguments(PreParserIdentifier identifier) const {
+    return identifier.IsArguments();
+  }
+
+  V8_INLINE bool IsEvalOrArguments(PreParserIdentifier identifier) const {
+    return identifier.IsEvalOrArguments();
+  }
+
+  V8_INLINE bool IsUndefined(PreParserIdentifier identifier) const {
+    return identifier.IsUndefined();
+  }
+
+  V8_INLINE bool IsAwait(PreParserIdentifier identifier) const {
+    return identifier.IsAwait();
+  }
+
+  V8_INLINE bool IsFutureStrictReserved(PreParserIdentifier identifier) const {
+    return identifier.IsFutureStrictReserved();
+  }
+
+  // Returns true if the expression is of type "this.foo".
+  V8_INLINE static bool IsThisProperty(PreParserExpression expression) {
+    return expression.IsThisProperty();
+  }
+
+  V8_INLINE static bool IsIdentifier(PreParserExpression expression) {
+    return expression.IsIdentifier();
+  }
+
+  V8_INLINE static PreParserIdentifier AsIdentifier(
+      PreParserExpression expression) {
+    return expression.AsIdentifier();
+  }
+
+  V8_INLINE static PreParserExpression AsIdentifierExpression(
+      PreParserExpression expression) {
+    return expression;
+  }
+
+  V8_INLINE bool IsPrototype(PreParserIdentifier identifier) const {
+    return identifier.IsPrototype();
+  }
+
+  V8_INLINE bool IsConstructor(PreParserIdentifier identifier) const {
+    return identifier.IsConstructor();
+  }
+
+  V8_INLINE bool IsDirectEvalCall(PreParserExpression expression) const {
+    return expression.IsDirectEvalCall();
+  }
+
+  V8_INLINE static bool IsBoilerplateProperty(PreParserExpression property) {
+    // PreParser doesn't count boilerplate properties.
+    return false;
+  }
+
+  V8_INLINE bool IsNative(PreParserExpression expr) const {
+    // Preparsing is disabled for extensions (because the extension
+    // details aren't passed to lazily compiled functions), so we
+    // don't accept "native function" in the preparser and there is
+    // no need to keep track of "native".
+    return false;
+  }
+
+  V8_INLINE static bool IsArrayIndex(PreParserIdentifier string,
+                                     uint32_t* index) {
+    return false;
+  }
+
+  V8_INLINE bool IsUseStrictDirective(PreParserStatement statement) const {
+    return statement.IsUseStrictLiteral();
+  }
+
+  V8_INLINE bool IsUseAsmDirective(PreParserStatement statement) const {
+    return statement.IsUseAsmLiteral();
+  }
+
+  V8_INLINE bool IsStringLiteral(PreParserStatement statement) const {
+    return statement.IsStringLiteral();
+  }
+
+  V8_INLINE static PreParserExpression GetPropertyValue(
+      PreParserExpression property) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE static void GetDefaultStrings(
+      PreParserIdentifier* default_string,
+      PreParserIdentifier* star_default_star_string) {}
+
+  // Functions for encapsulating the differences between parsing and preparsing;
+  // operations interleaved with the recursive descent.
+  V8_INLINE static void PushLiteralName(PreParserIdentifier id) {}
+  V8_INLINE static void PushVariableName(PreParserIdentifier id) {}
+  V8_INLINE void PushPropertyName(PreParserExpression expression) {}
+  V8_INLINE void PushEnclosingName(PreParserIdentifier name) {}
+  V8_INLINE static void AddFunctionForNameInference(
+      PreParserExpression expression) {}
+  V8_INLINE static void InferFunctionName() {}
+
+  V8_INLINE static void CheckAssigningFunctionLiteralToProperty(
+      PreParserExpression left, PreParserExpression right) {}
+
+  V8_INLINE static PreParserExpression MarkExpressionAsAssigned(
+      PreParserExpression expression) {
+    // TODO(marja): To be able to produce the same errors, the preparser needs
+    // to start tracking which expressions are variables and which are assigned.
+    return expression;
+  }
+
+  V8_INLINE bool ShortcutNumericLiteralBinaryExpression(PreParserExpression* x,
+                                                        PreParserExpression y,
+                                                        Token::Value op,
+                                                        int pos) {
+    return false;
+  }
+
+  V8_INLINE PreParserExpression BuildUnaryExpression(
+      PreParserExpression expression, Token::Value op, int pos) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserExpression BuildIteratorResult(PreParserExpression value,
+                                                    bool done) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserStatement
+  BuildInitializationBlock(DeclarationParsingResult* parsing_result,
+                           ZoneList<const AstRawString*>* names, bool* ok) {
+    return PreParserStatement::Default();
+  }
+
+  V8_INLINE PreParserStatement
+  InitializeForEachStatement(PreParserStatement stmt, PreParserExpression each,
+                             PreParserExpression subject,
+                             PreParserStatement body, int each_keyword_pos) {
+    return stmt;
+  }
+
+  V8_INLINE PreParserStatement RewriteForVarInLegacy(const ForInfo& for_info) {
+    return PreParserStatement::Null();
+  }
+  V8_INLINE void DesugarBindingInForEachStatement(
+      ForInfo* for_info, PreParserStatement* body_block,
+      PreParserExpression* each_variable, bool* ok) {}
+  V8_INLINE PreParserStatement CreateForEachStatementTDZ(
+      PreParserStatement init_block, const ForInfo& for_info, bool* ok) {
+    return init_block;
+  }
+
+  V8_INLINE StatementT DesugarLexicalBindingsInForStatement(
+      PreParserStatement loop, PreParserStatement init,
+      PreParserExpression cond, PreParserStatement next,
+      PreParserStatement body, Scope* inner_scope, const ForInfo& for_info,
+      bool* ok) {
+    return loop;
+  }
+
+  V8_INLINE PreParserExpression
+  NewThrowReferenceError(MessageTemplate::Template message, int pos) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserExpression NewThrowSyntaxError(
+      MessageTemplate::Template message, PreParserIdentifier arg, int pos) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserExpression NewThrowTypeError(
+      MessageTemplate::Template message, PreParserIdentifier arg, int pos) {
+    return PreParserExpression::Default();
+  }
+
+  // Reporting errors.
+  V8_INLINE void ReportMessageAt(Scanner::Location source_location,
+                                 MessageTemplate::Template message,
+                                 const char* arg = NULL,
+                                 ParseErrorType error_type = kSyntaxError) {
+    log_->LogMessage(source_location.beg_pos, source_location.end_pos, message,
+                     arg, error_type);
+  }
+
+  V8_INLINE void ReportMessageAt(Scanner::Location source_location,
+                                 MessageTemplate::Template message,
+                                 PreParserIdentifier arg,
+                                 ParseErrorType error_type = kSyntaxError) {
+    UNREACHABLE();
+  }
+
+  // "null" return type creators.
+  V8_INLINE static PreParserIdentifier EmptyIdentifier() {
+    return PreParserIdentifier::Empty();
+  }
+  V8_INLINE static bool IsEmptyIdentifier(PreParserIdentifier name) {
+    return name.IsEmpty();
+  }
+  V8_INLINE static PreParserExpression EmptyExpression() {
+    return PreParserExpression::Empty();
+  }
+  V8_INLINE static PreParserExpression EmptyLiteral() {
+    return PreParserExpression::Default();
+  }
+  V8_INLINE static PreParserExpression EmptyObjectLiteralProperty() {
+    return PreParserExpression::Default();
+  }
+  V8_INLINE static PreParserExpression EmptyClassLiteralProperty() {
+    return PreParserExpression::Default();
+  }
+  V8_INLINE static PreParserExpression EmptyFunctionLiteral() {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE static bool IsEmptyExpression(PreParserExpression expr) {
+    return expr.IsEmpty();
+  }
+
+  V8_INLINE static PreParserExpressionList NullExpressionList() {
+    return PreParserExpressionList::Null();
+  }
+
+  V8_INLINE static bool IsNullExpressionList(PreParserExpressionList exprs) {
+    return exprs.IsNull();
+  }
+
+  V8_INLINE static PreParserStatementList NullStatementList() {
+    return PreParserStatementList::Null();
+  }
+
+  V8_INLINE static bool IsNullStatementList(PreParserStatementList stmts) {
+    return stmts.IsNull();
+  }
+
+  V8_INLINE static PreParserStatement NullStatement() {
+    return PreParserStatement::Null();
+  }
+
+  V8_INLINE bool IsNullStatement(PreParserStatement stmt) {
+    return stmt.IsNullStatement();
+  }
+
+  V8_INLINE bool IsEmptyStatement(PreParserStatement stmt) {
+    return stmt.IsEmptyStatement();
+  }
+
+  V8_INLINE static PreParserStatement NullBlock() {
+    return PreParserStatement::Null();
+  }
+
+  V8_INLINE PreParserIdentifier EmptyIdentifierString() const {
+    return PreParserIdentifier::Default();
+  }
+
+  // Odd-ball literal creators.
+  V8_INLINE PreParserExpression GetLiteralTheHole(int position) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserExpression GetLiteralUndefined(int position) {
+    return PreParserExpression::Default();
+  }
+
+  // Producing data during the recursive descent.
+  PreParserIdentifier GetSymbol() const;
+
+  V8_INLINE PreParserIdentifier GetNextSymbol() const {
+    return PreParserIdentifier::Default();
+  }
+
+  V8_INLINE PreParserIdentifier GetNumberAsSymbol() const {
+    return PreParserIdentifier::Default();
+  }
+
+  V8_INLINE PreParserExpression ThisExpression(int pos = kNoSourcePosition) {
+    return PreParserExpression::This();
+  }
+
+  V8_INLINE PreParserExpression NewSuperPropertyReference(int pos) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserExpression NewSuperCallReference(int pos) {
+    return PreParserExpression::SuperCallReference();
+  }
+
+  V8_INLINE PreParserExpression NewTargetExpression(int pos) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserExpression FunctionSentExpression(int pos) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserExpression ExpressionFromLiteral(Token::Value token,
+                                                      int pos) {
+    return PreParserExpression::Default();
+  }
+
+  PreParserExpression ExpressionFromIdentifier(
+      PreParserIdentifier name, int start_position, int end_position,
+      InferName infer = InferName::kYes);
+
+  V8_INLINE PreParserExpression ExpressionFromString(int pos) {
+    if (scanner()->UnescapedLiteralMatches("use strict", 10)) {
+      return PreParserExpression::UseStrictStringLiteral();
+    }
+    return PreParserExpression::StringLiteral();
+  }
+
+  V8_INLINE PreParserExpressionList NewExpressionList(int size) const {
+    return PreParserExpressionList();
+  }
+
+  V8_INLINE PreParserExpressionList NewObjectPropertyList(int size) const {
+    return PreParserExpressionList();
+  }
+
+  V8_INLINE PreParserExpressionList NewClassPropertyList(int size) const {
+    return PreParserExpressionList();
+  }
+
+  V8_INLINE PreParserStatementList NewStatementList(int size) const {
+    return PreParserStatementList();
+  }
+
+  PreParserStatementList NewCaseClauseList(int size) {
+    return PreParserStatementList();
+  }
+
+  V8_INLINE PreParserExpression
+  NewV8Intrinsic(PreParserIdentifier name, PreParserExpressionList arguments,
+                 int pos, bool* ok) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE PreParserStatement NewThrowStatement(PreParserExpression exception,
+                                                 int pos) {
+    return PreParserStatement::Jump();
+  }
+
+  V8_INLINE void AddParameterInitializationBlock(
+      const PreParserFormalParameters& parameters, PreParserStatementList body,
+      bool is_async, bool* ok) {}
+
+  V8_INLINE void AddFormalParameter(PreParserFormalParameters* parameters,
+                                    PreParserExpression pattern,
+                                    PreParserExpression initializer,
+                                    int initializer_end_position,
+                                    bool is_rest) {
+    ++parameters->arity;
+  }
+
+  V8_INLINE void DeclareFormalParameter(DeclarationScope* scope,
+                                        PreParserIdentifier parameter) {
+    if (!classifier()->is_simple_parameter_list()) {
+      scope->SetHasNonSimpleParameters();
+    }
+  }
+
+  V8_INLINE void DeclareArrowFunctionFormalParameters(
+      PreParserFormalParameters* parameters, PreParserExpression params,
+      const Scanner::Location& params_loc, Scanner::Location* duplicate_loc,
+      bool* ok) {
+    // TODO(wingo): Detect duplicated identifiers in paramlists.  Detect
+    // parameter lists that are too long.
+  }
+
+  V8_INLINE void ReindexLiterals(const PreParserFormalParameters& parameters) {}
+
+  V8_INLINE PreParserExpression NoTemplateTag() {
+    return PreParserExpression::NoTemplateTag();
+  }
+
+  V8_INLINE static bool IsTaggedTemplate(const PreParserExpression tag) {
+    return !tag.IsNoTemplateTag();
+  }
+
+  V8_INLINE void MaterializeUnspreadArgumentsLiterals(int count) {
+    for (int i = 0; i < count; ++i) {
+      function_state_->NextMaterializedLiteralIndex();
+    }
+  }
+
+  V8_INLINE PreParserExpression
+  ExpressionListToExpression(PreParserExpressionList args) {
+    return PreParserExpression::Default();
+  }
+
+  V8_INLINE void AddAccessorPrefixToFunctionName(bool is_get,
+                                                 PreParserExpression function,
+                                                 PreParserIdentifier name) {}
+  V8_INLINE void SetFunctionNameFromPropertyName(PreParserExpression property,
+                                                 PreParserIdentifier name) {}
+  V8_INLINE void SetFunctionNameFromIdentifierRef(
+      PreParserExpression value, PreParserExpression identifier) {}
+
+  V8_INLINE ZoneList<typename ExpressionClassifier::Error>*
+  GetReportedErrorList() const {
+    return function_state_->GetReportedErrorList();
+  }
+
+  V8_INLINE ZoneList<PreParserExpression>* GetNonPatternList() const {
+    return function_state_->non_patterns_to_rewrite();
+  }
+
+  V8_INLINE void CountUsage(v8::Isolate::UseCounterFeature feature) {
+    if (use_counts_ != nullptr) ++use_counts_[feature];
+  }
+
+  // Preparser's private field members.
+
+  int* use_counts_;
+  bool track_unresolved_variables_;
+};
 
 PreParserExpression PreParser::SpreadCall(PreParserExpression function,
                                           PreParserExpressionList args,
@@ -1158,46 +1450,24 @@
   return factory()->NewCallNew(function, args, pos);
 }
 
-void ParserBaseTraits<PreParser>::ParseArrowFunctionFormalParameterList(
-    PreParserFormalParameters* parameters, PreParserExpression params,
-    const Scanner::Location& params_loc, Scanner::Location* duplicate_loc,
-    const Scope::Snapshot& scope_snapshot, bool* ok) {
-  // TODO(wingo): Detect duplicated identifiers in paramlists.  Detect parameter
-  // lists that are too long.
-}
-
-ZoneList<PreParserExpression>* ParserBaseTraits<PreParser>::GetNonPatternList()
-    const {
-  return delegate()->function_state_->non_patterns_to_rewrite();
-}
-
-ZoneList<
-    typename ParserBaseTraits<PreParser>::Type::ExpressionClassifier::Error>*
-ParserBaseTraits<PreParser>::GetReportedErrorList() const {
-  return delegate()->function_state_->GetReportedErrorList();
-}
-
-Zone* ParserBaseTraits<PreParser>::zone() const {
-  return delegate()->function_state_->scope()->zone();
-}
-
 PreParserStatementList PreParser::ParseEagerFunctionBody(
     PreParserIdentifier function_name, int pos,
     const PreParserFormalParameters& parameters, FunctionKind kind,
     FunctionLiteral::FunctionType function_type, bool* ok) {
   ParsingModeScope parsing_mode(this, PARSE_EAGERLY);
+  PreParserStatementList result;
 
   Scope* inner_scope = scope();
   if (!parameters.is_simple) inner_scope = NewScope(BLOCK_SCOPE);
 
   {
     BlockState block_state(&scope_state_, inner_scope);
-    ParseStatementList(Token::RBRACE, ok);
+    ParseStatementList(result, Token::RBRACE, ok);
     if (!*ok) return PreParserStatementList();
   }
 
   Expect(Token::RBRACE, ok);
-  return PreParserStatementList();
+  return result;
 }
 
 PreParserExpression PreParser::CloseTemplateLiteral(TemplateLiteralState* state,
diff --git a/src/parsing/rewriter.cc b/src/parsing/rewriter.cc
index 51ff547..57009bd 100644
--- a/src/parsing/rewriter.cc
+++ b/src/parsing/rewriter.cc
@@ -347,10 +347,13 @@
     Variable* result = closure_scope->NewTemporary(
         info->ast_value_factory()->dot_result_string());
     // The name string must be internalized at this point.
+    info->ast_value_factory()->Internalize(info->isolate());
     DCHECK(!result->name().is_null());
     Processor processor(info->isolate(), closure_scope, result,
                         info->ast_value_factory());
     processor.Process(body);
+    // Internalize any values created during rewriting.
+    info->ast_value_factory()->Internalize(info->isolate());
     if (processor.HasStackOverflow()) return false;
 
     if (processor.result_assigned()) {
diff --git a/src/parsing/scanner-character-streams.cc b/src/parsing/scanner-character-streams.cc
index 7cdef87..3f10cfa 100644
--- a/src/parsing/scanner-character-streams.cc
+++ b/src/parsing/scanner-character-streams.cc
@@ -7,506 +7,677 @@
 #include "include/v8.h"
 #include "src/globals.h"
 #include "src/handles.h"
-#include "src/list-inl.h"  // TODO(mstarzinger): Temporary cycle breaker!
 #include "src/objects-inl.h"
+#include "src/parsing/scanner.h"
 #include "src/unicode-inl.h"
 
 namespace v8 {
 namespace internal {
 
-namespace {
-
-size_t CopyUtf8CharsToUtf16Chars(uint16_t* dest, size_t length, const byte* src,
-                                 size_t* src_pos, size_t src_length) {
-  static const unibrow::uchar kMaxUtf16Character =
-      unibrow::Utf16::kMaxNonSurrogateCharCode;
-  size_t i = 0;
-  // Because of the UTF-16 lead and trail surrogates, we stop filling the buffer
-  // one character early (in the normal case), because we need to have at least
-  // two free spaces in the buffer to be sure that the next character will fit.
-  while (i < length - 1) {
-    if (*src_pos == src_length) break;
-    unibrow::uchar c = src[*src_pos];
-    if (c <= unibrow::Utf8::kMaxOneByteChar) {
-      *src_pos = *src_pos + 1;
-    } else {
-      c = unibrow::Utf8::CalculateValue(src + *src_pos, src_length - *src_pos,
-                                        src_pos);
-    }
-    if (c > kMaxUtf16Character) {
-      dest[i++] = unibrow::Utf16::LeadSurrogate(c);
-      dest[i++] = unibrow::Utf16::TrailSurrogate(c);
-    } else {
-      dest[i++] = static_cast<uc16>(c);
-    }
-  }
-  return i;
-}
-
-size_t CopyCharsHelper(uint16_t* dest, size_t length, const uint8_t* src,
-                       size_t* src_pos, size_t src_length,
-                       ScriptCompiler::StreamedSource::Encoding encoding) {
-  // It's possible that this will be called with length 0, but don't assume that
-  // the functions this calls handle it gracefully.
-  if (length == 0) return 0;
-
-  if (encoding == ScriptCompiler::StreamedSource::UTF8) {
-    return CopyUtf8CharsToUtf16Chars(dest, length, src, src_pos, src_length);
-  }
-
-  size_t to_fill = length;
-  if (to_fill > src_length - *src_pos) to_fill = src_length - *src_pos;
-
-  if (encoding == ScriptCompiler::StreamedSource::ONE_BYTE) {
-    v8::internal::CopyChars<uint8_t, uint16_t>(dest, src + *src_pos, to_fill);
-  } else {
-    DCHECK(encoding == ScriptCompiler::StreamedSource::TWO_BYTE);
-    v8::internal::CopyChars<uint16_t, uint16_t>(
-        dest, reinterpret_cast<const uint16_t*>(src + *src_pos), to_fill);
-  }
-  *src_pos += to_fill;
-  return to_fill;
-}
-
-}  // namespace
-
-
 // ----------------------------------------------------------------------------
 // BufferedUtf16CharacterStreams
+//
+// A buffered character stream based on a random access character
+// source (ReadBlock can be called with pos() pointing to any position,
+// even positions before the current).
+class BufferedUtf16CharacterStream : public Utf16CharacterStream {
+ public:
+  BufferedUtf16CharacterStream();
+
+ protected:
+  static const size_t kBufferSize = 512;
+
+  bool ReadBlock() override;
+
+  // FillBuffer should read up to kBufferSize characters at position and store
+  // them into buffer_[0..]. It returns the number of characters stored.
+  virtual size_t FillBuffer(size_t position) = 0;
+
+  // Fixed sized buffer that this class reads from.
+  // The base class' buffer_start_ should always point to buffer_.
+  uc16 buffer_[kBufferSize];
+};
 
 BufferedUtf16CharacterStream::BufferedUtf16CharacterStream()
-    : Utf16CharacterStream(),
-      pushback_limit_(NULL) {
-  // Initialize buffer as being empty. First read will fill the buffer.
-  buffer_cursor_ = buffer_;
-  buffer_end_ = buffer_;
-}
-
-
-BufferedUtf16CharacterStream::~BufferedUtf16CharacterStream() { }
-
-void BufferedUtf16CharacterStream::PushBack(uc32 character) {
-  if (character == kEndOfInput) {
-    pos_--;
-    return;
-  }
-  if (pushback_limit_ == NULL && buffer_cursor_ > buffer_) {
-    // buffer_ is writable, buffer_cursor_ is const pointer.
-    buffer_[--buffer_cursor_ - buffer_] = static_cast<uc16>(character);
-    pos_--;
-    return;
-  }
-  SlowPushBack(static_cast<uc16>(character));
-}
-
-
-void BufferedUtf16CharacterStream::SlowPushBack(uc16 character) {
-  // In pushback mode, the end of the buffer contains pushback,
-  // and the start of the buffer (from buffer start to pushback_limit_)
-  // contains valid data that comes just after the pushback.
-  // We NULL the pushback_limit_ if pushing all the way back to the
-  // start of the buffer.
-
-  if (pushback_limit_ == NULL) {
-    // Enter pushback mode.
-    pushback_limit_ = buffer_end_;
-    buffer_end_ = buffer_ + kBufferSize;
-    buffer_cursor_ = buffer_end_;
-  }
-  // Ensure that there is room for at least one pushback.
-  DCHECK(buffer_cursor_ > buffer_);
-  DCHECK(pos_ > 0);
-  buffer_[--buffer_cursor_ - buffer_] = character;
-  if (buffer_cursor_ == buffer_) {
-    pushback_limit_ = NULL;
-  } else if (buffer_cursor_ < pushback_limit_) {
-    pushback_limit_ = buffer_cursor_;
-  }
-  pos_--;
-}
-
+    : Utf16CharacterStream(buffer_, buffer_, buffer_, 0) {}
 
 bool BufferedUtf16CharacterStream::ReadBlock() {
+  DCHECK_EQ(buffer_start_, buffer_);
+
+  size_t position = pos();
+  buffer_pos_ = position;
   buffer_cursor_ = buffer_;
-  if (pushback_limit_ != NULL) {
-    // Leave pushback mode.
-    buffer_end_ = pushback_limit_;
-    pushback_limit_ = NULL;
-    // If there were any valid characters left at the
-    // start of the buffer, use those.
-    if (buffer_cursor_ < buffer_end_) return true;
-    // Otherwise read a new block.
-  }
-  size_t length = FillBuffer(pos_);
-  buffer_end_ = buffer_ + length;
-  return length > 0;
+  buffer_end_ = buffer_ + FillBuffer(position);
+  DCHECK_EQ(pos(), position);
+  DCHECK_LE(buffer_end_, buffer_start_ + kBufferSize);
+  return buffer_cursor_ < buffer_end_;
 }
 
-
-size_t BufferedUtf16CharacterStream::SlowSeekForward(size_t delta) {
-  // Leave pushback mode (i.e., ignore that there might be valid data
-  // in the buffer before the pushback_limit_ point).
-  pushback_limit_ = NULL;
-  return BufferSeekForward(delta);
-}
-
-
 // ----------------------------------------------------------------------------
-// GenericStringUtf16CharacterStream
+// GenericStringUtf16CharacterStream.
+//
+// A stream w/ a data source being a (flattened) Handle<String>.
 
+class GenericStringUtf16CharacterStream : public BufferedUtf16CharacterStream {
+ public:
+  GenericStringUtf16CharacterStream(Handle<String> data, size_t start_position,
+                                    size_t end_position);
+
+ protected:
+  size_t FillBuffer(size_t position) override;
+
+  Handle<String> string_;
+  size_t length_;
+};
 
 GenericStringUtf16CharacterStream::GenericStringUtf16CharacterStream(
     Handle<String> data, size_t start_position, size_t end_position)
-    : string_(data), length_(end_position), bookmark_(kNoBookmark) {
-  DCHECK(end_position >= start_position);
-  pos_ = start_position;
+    : string_(data), length_(end_position) {
+  DCHECK_GE(end_position, start_position);
+  DCHECK_GE(static_cast<size_t>(string_->length()),
+            end_position - start_position);
+  buffer_pos_ = start_position;
 }
 
-
-GenericStringUtf16CharacterStream::~GenericStringUtf16CharacterStream() { }
-
-
-bool GenericStringUtf16CharacterStream::SetBookmark() {
-  bookmark_ = pos_;
-  return true;
-}
-
-
-void GenericStringUtf16CharacterStream::ResetToBookmark() {
-  DCHECK(bookmark_ != kNoBookmark);
-  pos_ = bookmark_;
-  buffer_cursor_ = buffer_;
-  buffer_end_ = buffer_ + FillBuffer(pos_);
-}
-
-
-size_t GenericStringUtf16CharacterStream::BufferSeekForward(size_t delta) {
-  size_t old_pos = pos_;
-  pos_ = Min(pos_ + delta, length_);
-  ReadBlock();
-  return pos_ - old_pos;
-}
-
-
 size_t GenericStringUtf16CharacterStream::FillBuffer(size_t from_pos) {
   if (from_pos >= length_) return 0;
-  size_t length = kBufferSize;
-  if (from_pos + length > length_) {
-    length = length_ - from_pos;
-  }
+
+  size_t length = i::Min(kBufferSize, length_ - from_pos);
   String::WriteToFlat<uc16>(*string_, buffer_, static_cast<int>(from_pos),
                             static_cast<int>(from_pos + length));
   return length;
 }
 
-
 // ----------------------------------------------------------------------------
-// ExternalStreamingStream
+// ExternalTwoByteStringUtf16CharacterStream.
+//
+// A stream whose data source is a Handle<ExternalTwoByteString>. It avoids
+// all data copying.
 
-size_t ExternalStreamingStream::FillBuffer(size_t position) {
-  // Ignore "position" which is the position in the decoded data. Instead,
-  // ExternalStreamingStream keeps track of the position in the raw data.
-  size_t data_in_buffer = 0;
-  // Note that the UTF-8 decoder might not be able to fill the buffer
-  // completely; it will typically leave the last character empty (see
-  // Utf8ToUtf16CharacterStream::CopyChars).
-  while (data_in_buffer < kBufferSize - 1) {
-    if (current_data_ == NULL) {
-      // GetSomeData will wait until the embedder has enough data. Here's an
-      // interface between the API which uses size_t (which is the correct type
-      // here) and the internal parts which use size_t.
-      current_data_length_ = source_stream_->GetMoreData(&current_data_);
-      current_data_offset_ = 0;
-      bool data_ends = current_data_length_ == 0;
-      bookmark_data_is_from_current_data_ = false;
+class ExternalTwoByteStringUtf16CharacterStream : public Utf16CharacterStream {
+ public:
+  ExternalTwoByteStringUtf16CharacterStream(Handle<ExternalTwoByteString> data,
+                                            size_t start_position,
+                                            size_t end_position);
 
-      // A caveat: a data chunk might end with bytes from an incomplete UTF-8
-      // character (the rest of the bytes will be in the next chunk).
-      if (encoding_ == ScriptCompiler::StreamedSource::UTF8) {
-        HandleUtf8SplitCharacters(&data_in_buffer);
-        if (!data_ends && current_data_offset_ == current_data_length_) {
-          // The data stream didn't end, but we used all the data in the
-          // chunk. This will only happen when the chunk was really small. We
-          // don't handle the case where a UTF-8 character is split over several
-          // chunks; in that case V8 won't crash, but it will be a parse error.
-          FlushCurrent();
-          continue;  // Request a new chunk.
-        }
-      }
+ private:
+  bool ReadBlock() override;
 
-      // Did the data stream end?
-      if (data_ends) {
-        DCHECK(utf8_split_char_buffer_length_ == 0);
-        return data_in_buffer;
-      }
-    }
-
-    // Fill the buffer from current_data_.
-    size_t new_offset = 0;
-    size_t new_chars_in_buffer =
-        CopyCharsHelper(buffer_ + data_in_buffer, kBufferSize - data_in_buffer,
-                        current_data_ + current_data_offset_, &new_offset,
-                        current_data_length_ - current_data_offset_, encoding_);
-    data_in_buffer += new_chars_in_buffer;
-    current_data_offset_ += new_offset;
-    DCHECK(data_in_buffer <= kBufferSize);
-
-    // Did we use all the data in the data chunk?
-    if (current_data_offset_ == current_data_length_) {
-      FlushCurrent();
-    }
-  }
-  return data_in_buffer;
-}
-
-
-bool ExternalStreamingStream::SetBookmark() {
-  // Bookmarking for this stream is a bit more complex than expected, since
-  // the stream state is distributed over several places:
-  // - pos_ (inherited from Utf16CharacterStream)
-  // - buffer_cursor_ and buffer_end_ (also from Utf16CharacterStream)
-  // - buffer_ (from BufferedUtf16CharacterStream)
-  // - current_data_ (+ .._offset_ and .._length) (this class)
-  // - utf8_split_char_buffer_* (a partial utf8 symbol at the block boundary)
-  //
-  // The underlying source_stream_ instance likely could re-construct this
-  // local data for us, but with the given interfaces we have no way of
-  // accomplishing this. Thus, we'll have to save all data locally.
-  //
-  // What gets saved where:
-  // - pos_  =>  bookmark_
-  // - buffer_[buffer_cursor_ .. buffer_end_]  =>  bookmark_buffer_
-  // - current_data_[.._offset_ .. .._length_]  =>  bookmark_data_
-  // - utf8_split_char_buffer_* => bookmark_utf8_split...
-  //
-  // To make sure we don't unnecessarily copy data, we also maintain
-  // whether bookmark_data_ contains a copy of the current current_data_
-  // block. This is done with:
-  // - bookmark_data_is_from_current_data_
-  // - bookmark_data_offset_: offset into bookmark_data_
-  //
-  // Note that bookmark_data_is_from_current_data_ must be maintained
-  // whenever current_data_ is updated.
-
-  bookmark_ = pos_;
-
-  size_t buffer_length = buffer_end_ - buffer_cursor_;
-  bookmark_buffer_.Dispose();
-  bookmark_buffer_ = Vector<uint16_t>::New(static_cast<int>(buffer_length));
-  CopyCharsUnsigned(bookmark_buffer_.start(), buffer_cursor_, buffer_length);
-
-  size_t data_length = current_data_length_ - current_data_offset_;
-  size_t bookmark_data_length = static_cast<size_t>(bookmark_data_.length());
-  if (bookmark_data_is_from_current_data_ &&
-      data_length < bookmark_data_length) {
-    // Fast case: bookmark_data_ was previously copied from the current
-    //            data block, and we have enough data for this bookmark.
-    bookmark_data_offset_ = bookmark_data_length - data_length;
-  } else {
-    // Slow case: We need to copy current_data_.
-    bookmark_data_.Dispose();
-    bookmark_data_ = Vector<uint8_t>::New(static_cast<int>(data_length));
-    CopyBytes(bookmark_data_.start(), current_data_ + current_data_offset_,
-              data_length);
-    bookmark_data_is_from_current_data_ = true;
-    bookmark_data_offset_ = 0;
-  }
-
-  bookmark_utf8_split_char_buffer_length_ = utf8_split_char_buffer_length_;
-  for (size_t i = 0; i < utf8_split_char_buffer_length_; i++) {
-    bookmark_utf8_split_char_buffer_[i] = utf8_split_char_buffer_[i];
-  }
-
-  return source_stream_->SetBookmark();
-}
-
-
-void ExternalStreamingStream::ResetToBookmark() {
-  source_stream_->ResetToBookmark();
-  FlushCurrent();
-
-  pos_ = bookmark_;
-
-  // bookmark_data_* => current_data_*
-  // (current_data_ assumes ownership of its memory.)
-  current_data_offset_ = 0;
-  current_data_length_ = bookmark_data_.length() - bookmark_data_offset_;
-  uint8_t* data = new uint8_t[current_data_length_];
-  CopyCharsUnsigned(data, bookmark_data_.begin() + bookmark_data_offset_,
-                    current_data_length_);
-  delete[] current_data_;
-  current_data_ = data;
-  bookmark_data_is_from_current_data_ = true;
-
-  // bookmark_buffer_ needs to be copied to buffer_.
-  CopyCharsUnsigned(buffer_, bookmark_buffer_.begin(),
-                    bookmark_buffer_.length());
-  buffer_cursor_ = buffer_;
-  buffer_end_ = buffer_ + bookmark_buffer_.length();
-
-  // utf8 split char buffer
-  utf8_split_char_buffer_length_ = bookmark_utf8_split_char_buffer_length_;
-  for (size_t i = 0; i < bookmark_utf8_split_char_buffer_length_; i++) {
-    utf8_split_char_buffer_[i] = bookmark_utf8_split_char_buffer_[i];
-  }
-}
-
-
-void ExternalStreamingStream::FlushCurrent() {
-  delete[] current_data_;
-  current_data_ = NULL;
-  current_data_length_ = 0;
-  current_data_offset_ = 0;
-  bookmark_data_is_from_current_data_ = false;
-}
-
-
-void ExternalStreamingStream::HandleUtf8SplitCharacters(
-    size_t* data_in_buffer) {
-  // Note the following property of UTF-8 which makes this function possible:
-  // Given any byte, we can always read its local environment (in both
-  // directions) to find out the (possibly multi-byte) character it belongs
-  // to. Single byte characters are of the form 0b0XXXXXXX. The first byte of a
-  // multi-byte character is of the form 0b110XXXXX, 0b1110XXXX or
-  // 0b11110XXX. The continuation bytes are of the form 0b10XXXXXX.
-
-  // First check if we have leftover data from the last chunk.
-  unibrow::uchar c;
-  if (utf8_split_char_buffer_length_ > 0) {
-    // Move the bytes which are part of the split character (which started in
-    // the previous chunk) into utf8_split_char_buffer_. Note that the
-    // continuation bytes are of the form 0b10XXXXXX, thus c >> 6 == 2.
-    while (current_data_offset_ < current_data_length_ &&
-           utf8_split_char_buffer_length_ < 4 &&
-           (c = current_data_[current_data_offset_]) >> 6 == 2) {
-      utf8_split_char_buffer_[utf8_split_char_buffer_length_] = c;
-      ++utf8_split_char_buffer_length_;
-      ++current_data_offset_;
-    }
-
-    // Convert the data in utf8_split_char_buffer_.
-    size_t new_offset = 0;
-    size_t new_chars_in_buffer =
-        CopyCharsHelper(buffer_ + *data_in_buffer,
-                        kBufferSize - *data_in_buffer, utf8_split_char_buffer_,
-                        &new_offset, utf8_split_char_buffer_length_, encoding_);
-    *data_in_buffer += new_chars_in_buffer;
-    // Make sure we used all the data.
-    DCHECK(new_offset == utf8_split_char_buffer_length_);
-    DCHECK(*data_in_buffer <= kBufferSize);
-
-    utf8_split_char_buffer_length_ = 0;
-  }
-
-  // Move bytes which are part of an incomplete character from the end of the
-  // current chunk to utf8_split_char_buffer_. They will be converted when the
-  // next data chunk arrives. Note that all valid UTF-8 characters are at most 4
-  // bytes long, but if the data is invalid, we can have character values bigger
-  // than unibrow::Utf8::kMaxOneByteChar for more than 4 consecutive bytes.
-  while (current_data_length_ > current_data_offset_ &&
-         (c = current_data_[current_data_length_ - 1]) >
-             unibrow::Utf8::kMaxOneByteChar &&
-         utf8_split_char_buffer_length_ < 4) {
-    --current_data_length_;
-    ++utf8_split_char_buffer_length_;
-    if (c >= (3 << 6)) {
-      // 3 << 6 = 0b11000000; this is the first byte of the multi-byte
-      // character. No need to copy the previous characters into the conversion
-      // buffer (even if they're multi-byte).
-      break;
-    }
-  }
-  CHECK(utf8_split_char_buffer_length_ <= 4);
-  for (size_t i = 0; i < utf8_split_char_buffer_length_; ++i) {
-    utf8_split_char_buffer_[i] = current_data_[current_data_length_ + i];
-  }
-}
-
-
-// ----------------------------------------------------------------------------
-// ExternalTwoByteStringUtf16CharacterStream
-
-ExternalTwoByteStringUtf16CharacterStream::
-    ~ExternalTwoByteStringUtf16CharacterStream() { }
+  const uc16* raw_data_;  // Pointer to the actual array of characters.
+  size_t start_pos_;
+  size_t end_pos_;
+};
 
 ExternalTwoByteStringUtf16CharacterStream::
     ExternalTwoByteStringUtf16CharacterStream(
-        Handle<ExternalTwoByteString> data, int start_position,
-        int end_position)
-    : raw_data_(data->GetTwoByteData(start_position)), bookmark_(kNoBookmark) {
-  buffer_cursor_ = raw_data_,
-  buffer_end_ = raw_data_ + (end_position - start_position);
-  pos_ = start_position;
+        Handle<ExternalTwoByteString> data, size_t start_position,
+        size_t end_position)
+    : raw_data_(data->GetTwoByteData(static_cast<int>(start_position))),
+      start_pos_(start_position),
+      end_pos_(end_position) {
+  buffer_start_ = raw_data_;
+  buffer_cursor_ = raw_data_;
+  buffer_end_ = raw_data_ + (end_pos_ - start_pos_);
+  buffer_pos_ = start_pos_;
 }
 
-
-bool ExternalTwoByteStringUtf16CharacterStream::SetBookmark() {
-  bookmark_ = pos_;
-  return true;
-}
-
-
-void ExternalTwoByteStringUtf16CharacterStream::ResetToBookmark() {
-  DCHECK(bookmark_ != kNoBookmark);
-  pos_ = bookmark_;
-  buffer_cursor_ = raw_data_ + bookmark_;
+bool ExternalTwoByteStringUtf16CharacterStream::ReadBlock() {
+  size_t position = pos();
+  bool have_data = start_pos_ <= position && position < end_pos_;
+  if (have_data) {
+    buffer_pos_ = start_pos_;
+    buffer_cursor_ = raw_data_ + (position - start_pos_),
+    buffer_end_ = raw_data_ + (end_pos_ - start_pos_);
+  } else {
+    buffer_pos_ = position;
+    buffer_cursor_ = raw_data_;
+    buffer_end_ = raw_data_;
+  }
+  return have_data;
 }
 
 // ----------------------------------------------------------------------------
 // ExternalOneByteStringUtf16CharacterStream
+//
+// A stream whose data source is a Handle<ExternalOneByteString>.
 
-ExternalOneByteStringUtf16CharacterStream::
-    ~ExternalOneByteStringUtf16CharacterStream() {}
+class ExternalOneByteStringUtf16CharacterStream
+    : public BufferedUtf16CharacterStream {
+ public:
+  ExternalOneByteStringUtf16CharacterStream(Handle<ExternalOneByteString> data,
+                                            size_t start_position,
+                                            size_t end_position);
+
+  // For testing:
+  ExternalOneByteStringUtf16CharacterStream(const char* data, size_t length);
+
+ protected:
+  size_t FillBuffer(size_t position) override;
+
+  const uint8_t* raw_data_;  // Pointer to the actual array of characters.
+  size_t length_;
+};
 
 ExternalOneByteStringUtf16CharacterStream::
     ExternalOneByteStringUtf16CharacterStream(
-        Handle<ExternalOneByteString> data, int start_position,
-        int end_position)
-    : raw_data_(data->GetChars()),
-      length_(end_position),
-      bookmark_(kNoBookmark) {
+        Handle<ExternalOneByteString> data, size_t start_position,
+        size_t end_position)
+    : raw_data_(data->GetChars()), length_(end_position) {
   DCHECK(end_position >= start_position);
-  pos_ = start_position;
+  buffer_pos_ = start_position;
 }
 
 ExternalOneByteStringUtf16CharacterStream::
     ExternalOneByteStringUtf16CharacterStream(const char* data, size_t length)
-    : raw_data_(reinterpret_cast<const uint8_t*>(data)),
-      length_(length),
-      bookmark_(kNoBookmark) {}
-
-ExternalOneByteStringUtf16CharacterStream::
-    ExternalOneByteStringUtf16CharacterStream(const char* data)
-    : ExternalOneByteStringUtf16CharacterStream(data, strlen(data)) {}
-
-bool ExternalOneByteStringUtf16CharacterStream::SetBookmark() {
-  bookmark_ = pos_;
-  return true;
-}
-
-void ExternalOneByteStringUtf16CharacterStream::ResetToBookmark() {
-  DCHECK(bookmark_ != kNoBookmark);
-  pos_ = bookmark_;
-  buffer_cursor_ = buffer_;
-  buffer_end_ = buffer_ + FillBuffer(pos_);
-}
-
-size_t ExternalOneByteStringUtf16CharacterStream::BufferSeekForward(
-    size_t delta) {
-  size_t old_pos = pos_;
-  pos_ = Min(pos_ + delta, length_);
-  ReadBlock();
-  return pos_ - old_pos;
-}
+    : raw_data_(reinterpret_cast<const uint8_t*>(data)), length_(length) {}
 
 size_t ExternalOneByteStringUtf16CharacterStream::FillBuffer(size_t from_pos) {
   if (from_pos >= length_) return 0;
+
   size_t length = Min(kBufferSize, length_ - from_pos);
-  for (size_t i = 0; i < length; ++i) {
-    buffer_[i] = static_cast<uc16>(raw_data_[from_pos + i]);
-  }
+  i::CopyCharsUnsigned(buffer_, raw_data_ + from_pos, length);
   return length;
 }
 
+// ----------------------------------------------------------------------------
+// Utf8ExternalStreamingStream - chunked streaming of Utf-8 data.
+//
+// This implementation is fairly complex, since data arrives in chunks which
+// may 'cut' arbitrarily into utf-8 characters. Also, seeking to a given
+// character position is tricky because the byte position cannot be dericed
+// from the character position.
+
+class Utf8ExternalStreamingStream : public BufferedUtf16CharacterStream {
+ public:
+  Utf8ExternalStreamingStream(
+      ScriptCompiler::ExternalSourceStream* source_stream)
+      : current_({0, {0, 0, unibrow::Utf8::Utf8IncrementalBuffer(0)}}),
+        source_stream_(source_stream) {}
+  ~Utf8ExternalStreamingStream() override {
+    for (size_t i = 0; i < chunks_.size(); i++) delete[] chunks_[i].data;
+  }
+
+ protected:
+  size_t FillBuffer(size_t position) override;
+
+ private:
+  // A position within the data stream. It stores:
+  // - The 'physical' position (# of bytes in the stream),
+  // - the 'logical' position (# of ucs-2 characters, also within the stream),
+  // - a possibly incomplete utf-8 char at the current 'physical' position.
+  struct StreamPosition {
+    size_t bytes;
+    size_t chars;
+    unibrow::Utf8::Utf8IncrementalBuffer incomplete_char;
+  };
+
+  // Position contains a StreamPosition and the index of the chunk the position
+  // points into. (The chunk_no could be derived from pos, but that'd be
+  // an expensive search through all chunks.)
+  struct Position {
+    size_t chunk_no;
+    StreamPosition pos;
+  };
+
+  // A chunk in the list of chunks, containing:
+  // - The chunk data (data pointer and length), and
+  // - the position at the first byte of the chunk.
+  struct Chunk {
+    const uint8_t* data;
+    size_t length;
+    StreamPosition start;
+  };
+
+  // Within the current chunk, skip forward from current_ towards position.
+  bool SkipToPosition(size_t position);
+  // Within the current chunk, fill the buffer_ (while it has capacity).
+  void FillBufferFromCurrentChunk();
+  // Fetch a new chunk (assuming current_ is at the end of the current data).
+  bool FetchChunk();
+  // Search through the chunks and set current_ to point to the given position.
+  // (This call is potentially expensive.)
+  void SearchPosition(size_t position);
+
+  std::vector<Chunk> chunks_;
+  Position current_;
+  ScriptCompiler::ExternalSourceStream* source_stream_;
+};
+
+bool Utf8ExternalStreamingStream::SkipToPosition(size_t position) {
+  DCHECK_LE(current_.pos.chars, position);  // We can only skip forward.
+
+  // Already there? Then return immediately.
+  if (current_.pos.chars == position) return true;
+
+  const Chunk& chunk = chunks_[current_.chunk_no];
+  DCHECK(current_.pos.bytes >= chunk.start.bytes);
+
+  unibrow::Utf8::Utf8IncrementalBuffer incomplete_char =
+      chunk.start.incomplete_char;
+  size_t it = current_.pos.bytes - chunk.start.bytes;
+  size_t chars = chunk.start.chars;
+  while (it < chunk.length && chars < position) {
+    unibrow::uchar t =
+        unibrow::Utf8::ValueOfIncremental(chunk.data[it], &incomplete_char);
+    if (t != unibrow::Utf8::kIncomplete) {
+      chars++;
+      if (t > unibrow::Utf16::kMaxNonSurrogateCharCode) chars++;
+    }
+    it++;
+  }
+
+  current_.pos.bytes += it;
+  current_.pos.chars = chars;
+  current_.pos.incomplete_char = incomplete_char;
+  current_.chunk_no += (it == chunk.length);
+
+  return current_.pos.chars == position;
+}
+
+void Utf8ExternalStreamingStream::FillBufferFromCurrentChunk() {
+  DCHECK_LT(current_.chunk_no, chunks_.size());
+  DCHECK_EQ(buffer_start_, buffer_cursor_);
+  DCHECK_LT(buffer_end_ + 1, buffer_start_ + kBufferSize);
+
+  const Chunk& chunk = chunks_[current_.chunk_no];
+
+  // The buffer_ is writable, but buffer_*_ members are const. So we get a
+  // non-const pointer into buffer that points to the same char as buffer_end_.
+  uint16_t* cursor = buffer_ + (buffer_end_ - buffer_start_);
+  DCHECK_EQ(cursor, buffer_end_);
+
+  // If the current chunk is the last (empty) chunk we'll have to process
+  // any left-over, partial characters.
+  if (chunk.length == 0) {
+    unibrow::uchar t =
+        unibrow::Utf8::ValueOfIncrementalFinish(&current_.pos.incomplete_char);
+    if (t != unibrow::Utf8::kBufferEmpty) {
+      DCHECK(t < unibrow::Utf16::kMaxNonSurrogateCharCode);
+      *cursor = static_cast<uc16>(t);
+      buffer_end_++;
+      current_.pos.chars++;
+    }
+    return;
+  }
+
+  static const unibrow::uchar kUtf8Bom = 0xfeff;
+
+  unibrow::Utf8::Utf8IncrementalBuffer incomplete_char =
+      current_.pos.incomplete_char;
+  size_t it;
+  for (it = current_.pos.bytes - chunk.start.bytes;
+       it < chunk.length && cursor + 1 < buffer_start_ + kBufferSize; it++) {
+    unibrow::uchar t =
+        unibrow::Utf8::ValueOfIncremental(chunk.data[it], &incomplete_char);
+    if (t == unibrow::Utf8::kIncomplete) continue;
+    if (V8_LIKELY(t < kUtf8Bom)) {
+      *(cursor++) = static_cast<uc16>(t);  // The by most frequent case.
+    } else if (t == kUtf8Bom && current_.pos.bytes + it == 2) {
+      // BOM detected at beginning of the stream. Don't copy it.
+    } else if (t <= unibrow::Utf16::kMaxNonSurrogateCharCode) {
+      *(cursor++) = static_cast<uc16>(t);
+    } else {
+      *(cursor++) = unibrow::Utf16::LeadSurrogate(t);
+      *(cursor++) = unibrow::Utf16::TrailSurrogate(t);
+    }
+  }
+
+  current_.pos.bytes = chunk.start.bytes + it;
+  current_.pos.chars += (cursor - buffer_end_);
+  current_.pos.incomplete_char = incomplete_char;
+  current_.chunk_no += (it == chunk.length);
+
+  buffer_end_ = cursor;
+}
+
+bool Utf8ExternalStreamingStream::FetchChunk() {
+  DCHECK_EQ(current_.chunk_no, chunks_.size());
+  DCHECK(chunks_.empty() || chunks_.back().length != 0);
+
+  const uint8_t* chunk = nullptr;
+  size_t length = source_stream_->GetMoreData(&chunk);
+  chunks_.push_back({chunk, length, current_.pos});
+  return length > 0;
+}
+
+void Utf8ExternalStreamingStream::SearchPosition(size_t position) {
+  // If current_ already points to the right position, we're done.
+  //
+  // This is expected to be the common case, since we typically call
+  // FillBuffer right after the current buffer.
+  if (current_.pos.chars == position) return;
+
+  // No chunks. Fetch at least one, so we can assume !chunks_.empty() below.
+  if (chunks_.empty()) {
+    DCHECK_EQ(current_.chunk_no, 0);
+    DCHECK_EQ(current_.pos.bytes, 0);
+    DCHECK_EQ(current_.pos.chars, 0);
+    FetchChunk();
+  }
+
+  // Search for the last chunk whose start position is less or equal to
+  // position.
+  size_t chunk_no = chunks_.size() - 1;
+  while (chunk_no > 0 && chunks_[chunk_no].start.chars > position) {
+    chunk_no--;
+  }
+
+  // Did we find the terminating (zero-length) chunk? Then we're seeking
+  // behind the end of the data, and position does not exist.
+  // Set current_ to point to the terminating chunk.
+  if (chunks_[chunk_no].length == 0) {
+    current_ = {chunk_no, chunks_[chunk_no].start};
+    return;
+  }
+
+  // Did we find the non-last chunk? Then our position must be within chunk_no.
+  if (chunk_no + 1 < chunks_.size()) {
+    // Fancy-pants optimization for ASCII chunks within a utf-8 stream.
+    // (Many web sites declare utf-8 encoding, but use only (or almost only) the
+    //  ASCII subset for their JavaScript sources. We can exploit this, by
+    //  checking whether the # bytes in a chunk are equal to the # chars, and if
+    //  so avoid the expensive SkipToPosition.)
+    bool ascii_only_chunk =
+        (chunks_[chunk_no + 1].start.bytes - chunks_[chunk_no].start.bytes) ==
+        (chunks_[chunk_no + 1].start.chars - chunks_[chunk_no].start.chars);
+    if (ascii_only_chunk) {
+      size_t skip = position - chunks_[chunk_no].start.chars;
+      current_ = {chunk_no,
+                  {chunks_[chunk_no].start.bytes + skip,
+                   chunks_[chunk_no].start.chars + skip,
+                   unibrow::Utf8::Utf8IncrementalBuffer(0)}};
+    } else {
+      current_ = {chunk_no, chunks_[chunk_no].start};
+      SkipToPosition(position);
+    }
+
+    // Since position was within the chunk, SkipToPosition should have found
+    // something.
+    DCHECK_EQ(position, current_.pos.chars);
+    return;
+  }
+
+  // What's left: We're in the last, non-terminating chunk. Our position
+  // may be in the chunk, but it may also be in 'future' chunks, which we'll
+  // have to obtain.
+  DCHECK_EQ(chunk_no, chunks_.size() - 1);
+  current_ = {chunk_no, chunks_[chunk_no].start};
+  bool have_more_data = true;
+  bool found = SkipToPosition(position);
+  while (have_more_data && !found) {
+    DCHECK_EQ(current_.chunk_no, chunks_.size());
+    have_more_data = FetchChunk();
+    found = have_more_data && SkipToPosition(position);
+  }
+
+  // We'll return with a postion != the desired position only if we're out
+  // of data. In that case, we'll point to the terminating chunk.
+  DCHECK_EQ(found, current_.pos.chars == position);
+  DCHECK_EQ(have_more_data, chunks_.back().length != 0);
+  DCHECK_IMPLIES(!found, !have_more_data);
+  DCHECK_IMPLIES(!found, current_.chunk_no == chunks_.size() - 1);
+}
+
+size_t Utf8ExternalStreamingStream::FillBuffer(size_t position) {
+  buffer_cursor_ = buffer_;
+  buffer_end_ = buffer_;
+
+  SearchPosition(position);
+  bool out_of_data = current_.chunk_no != chunks_.size() &&
+                     chunks_[current_.chunk_no].length == 0;
+  if (out_of_data) return 0;
+
+  // Fill the buffer, until we have at least one char (or are out of data).
+  // (The embedder might give us 1-byte blocks within a utf-8 char, so we
+  //  can't guarantee progress with one chunk. Thus we iterate.)
+  while (!out_of_data && buffer_cursor_ == buffer_end_) {
+    // At end of current data, but there might be more? Then fetch it.
+    if (current_.chunk_no == chunks_.size()) {
+      out_of_data = !FetchChunk();
+    }
+    FillBufferFromCurrentChunk();
+  }
+
+  DCHECK_EQ(current_.pos.chars - position, buffer_end_ - buffer_cursor_);
+  return buffer_end_ - buffer_cursor_;
+}
+
+// ----------------------------------------------------------------------------
+// Chunks - helper for One- + TwoByteExternalStreamingStream
+namespace {
+
+struct Chunk {
+  const uint8_t* data;
+  size_t byte_length;
+  size_t byte_pos;
+};
+
+typedef std::vector<struct Chunk> Chunks;
+
+void DeleteChunks(Chunks& chunks) {
+  for (size_t i = 0; i < chunks.size(); i++) delete[] chunks[i].data;
+}
+
+// Return the chunk index for the chunk containing position.
+// If position is behind the end of the stream, the index of the last,
+// zero-length chunk is returned.
+size_t FindChunk(Chunks& chunks, ScriptCompiler::ExternalSourceStream* source_,
+                 size_t position) {
+  size_t end_pos =
+      chunks.empty() ? 0 : (chunks.back().byte_pos + chunks.back().byte_length);
+
+  // Get more data if needed. We usually won't enter the loop body.
+  bool out_of_data = !chunks.empty() && chunks.back().byte_length == 0;
+  while (!out_of_data && end_pos <= position + 1) {
+    const uint8_t* chunk = nullptr;
+    size_t len = source_->GetMoreData(&chunk);
+
+    chunks.push_back({chunk, len, end_pos});
+    end_pos += len;
+    out_of_data = (len == 0);
+  }
+
+  // Here, we should always have at least one chunk, and we either have the
+  // chunk we were looking for, or we're out of data. Also, out_of_data and
+  // end_pos are current (and designate whether we have exhausted the stream,
+  // and the length of data received so far, respectively).
+  DCHECK(!chunks.empty());
+  DCHECK_EQ(end_pos, chunks.back().byte_pos + chunks.back().byte_length);
+  DCHECK_EQ(out_of_data, chunks.back().byte_length == 0);
+  DCHECK(position < end_pos || out_of_data);
+
+  // Edge case: position is behind the end of stream: Return the last (length 0)
+  // chunk to indicate the end of the stream.
+  if (position >= end_pos) {
+    DCHECK(out_of_data);
+    return chunks.size() - 1;
+  }
+
+  // We almost always 'stream', meaning we want data from the last chunk, so
+  // let's look at chunks back-to-front.
+  size_t chunk_no = chunks.size() - 1;
+  while (chunks[chunk_no].byte_pos > position) {
+    DCHECK_NE(chunk_no, 0);
+    chunk_no--;
+  }
+  DCHECK_LE(chunks[chunk_no].byte_pos, position);
+  DCHECK_LT(position, chunks[chunk_no].byte_pos + chunks[chunk_no].byte_length);
+  return chunk_no;
+}
+
+}  // anonymous namespace
+
+// ----------------------------------------------------------------------------
+// OneByteExternalStreamingStream
+//
+// A stream of latin-1 encoded, chunked data.
+
+class OneByteExternalStreamingStream : public BufferedUtf16CharacterStream {
+ public:
+  explicit OneByteExternalStreamingStream(
+      ScriptCompiler::ExternalSourceStream* source)
+      : source_(source) {}
+  ~OneByteExternalStreamingStream() override { DeleteChunks(chunks_); }
+
+ protected:
+  size_t FillBuffer(size_t position) override;
+
+ private:
+  Chunks chunks_;
+  ScriptCompiler::ExternalSourceStream* source_;
+};
+
+size_t OneByteExternalStreamingStream::FillBuffer(size_t position) {
+  const Chunk& chunk = chunks_[FindChunk(chunks_, source_, position)];
+  if (chunk.byte_length == 0) return 0;
+
+  size_t start_pos = position - chunk.byte_pos;
+  size_t len = i::Min(kBufferSize, chunk.byte_length - start_pos);
+  i::CopyCharsUnsigned(buffer_, chunk.data + start_pos, len);
+  return len;
+}
+
+// ----------------------------------------------------------------------------
+// TwoByteExternalStreamingStream
+//
+// A stream of ucs-2 data, delivered in chunks. Chunks may be 'cut' into the
+// middle of characters (or even contain only one byte), which adds a bit
+// of complexity. This stream avoid all data copying, except for characters
+// that cross chunk boundaries.
+
+class TwoByteExternalStreamingStream : public Utf16CharacterStream {
+ public:
+  explicit TwoByteExternalStreamingStream(
+      ScriptCompiler::ExternalSourceStream* source);
+  ~TwoByteExternalStreamingStream() override;
+
+ protected:
+  bool ReadBlock() override;
+
+  Chunks chunks_;
+  ScriptCompiler::ExternalSourceStream* source_;
+  uc16 one_char_buffer_;
+};
+
+TwoByteExternalStreamingStream::TwoByteExternalStreamingStream(
+    ScriptCompiler::ExternalSourceStream* source)
+    : Utf16CharacterStream(&one_char_buffer_, &one_char_buffer_,
+                           &one_char_buffer_, 0),
+      source_(source),
+      one_char_buffer_(0) {}
+
+TwoByteExternalStreamingStream::~TwoByteExternalStreamingStream() {
+  DeleteChunks(chunks_);
+}
+
+bool TwoByteExternalStreamingStream::ReadBlock() {
+  size_t position = pos();
+
+  // We'll search for the 2nd byte of our character, to make sure we
+  // have enough data for at least one character.
+  size_t chunk_no = FindChunk(chunks_, source_, 2 * position + 1);
+
+  // Out of data? Return 0.
+  if (chunks_[chunk_no].byte_length == 0) {
+    buffer_cursor_ = buffer_start_;
+    buffer_end_ = buffer_start_;
+    return false;
+  }
+
+  Chunk& current = chunks_[chunk_no];
+
+  // Annoying edge case: Chunks may not be 2-byte aligned, meaning that a
+  // character may be split between the previous and the current chunk.
+  // If we find such a lonely byte at the beginning of the chunk, we'll use
+  // one_char_buffer_ to hold the full character.
+  bool lonely_byte = (chunks_[chunk_no].byte_pos == (2 * position + 1));
+  if (lonely_byte) {
+    DCHECK_NE(chunk_no, 0);
+    Chunk& previous_chunk = chunks_[chunk_no - 1];
+#ifdef V8_TARGET_BIG_ENDIAN
+    uc16 character = current.data[0] |
+                     previous_chunk.data[previous_chunk.byte_length - 1] << 8;
+#else
+    uc16 character = previous_chunk.data[previous_chunk.byte_length - 1] |
+                     current.data[0] << 8;
+#endif
+
+    one_char_buffer_ = character;
+    buffer_pos_ = position;
+    buffer_start_ = &one_char_buffer_;
+    buffer_cursor_ = &one_char_buffer_;
+    buffer_end_ = &one_char_buffer_ + 1;
+    return true;
+  }
+
+  // Common case: character is in current chunk.
+  DCHECK_LE(current.byte_pos, 2 * position);
+  DCHECK_LT(2 * position + 1, current.byte_pos + current.byte_length);
+
+  // Determine # of full ucs-2 chars in stream, and whether we started on an odd
+  // byte boundary.
+  bool odd_start = (current.byte_pos % 2) == 1;
+  size_t number_chars = (current.byte_length - odd_start) / 2;
+
+  // Point the buffer_*_ members into the current chunk and set buffer_cursor_
+  // to point to position. Be careful when converting the byte positions (in
+  // Chunk) to the ucs-2 character positions (in buffer_*_ members).
+  buffer_start_ = reinterpret_cast<const uint16_t*>(current.data + odd_start);
+  buffer_end_ = buffer_start_ + number_chars;
+  buffer_pos_ = (current.byte_pos + odd_start) / 2;
+  buffer_cursor_ = buffer_start_ + (position - buffer_pos_);
+  DCHECK_EQ(position, pos());
+  return true;
+}
+
+// ----------------------------------------------------------------------------
+// ScannerStream: Create stream instances.
+
+Utf16CharacterStream* ScannerStream::For(Handle<String> data) {
+  return ScannerStream::For(data, 0, data->length());
+}
+
+Utf16CharacterStream* ScannerStream::For(Handle<String> data, int start_pos,
+                                         int end_pos) {
+  DCHECK(start_pos >= 0);
+  DCHECK(end_pos <= data->length());
+  if (data->IsExternalOneByteString()) {
+    return new ExternalOneByteStringUtf16CharacterStream(
+        Handle<ExternalOneByteString>::cast(data), start_pos, end_pos);
+  } else if (data->IsExternalTwoByteString()) {
+    return new ExternalTwoByteStringUtf16CharacterStream(
+        Handle<ExternalTwoByteString>::cast(data), start_pos, end_pos);
+  } else {
+    // TODO(vogelheim): Maybe call data.Flatten() first?
+    return new GenericStringUtf16CharacterStream(data, start_pos, end_pos);
+  }
+}
+
+std::unique_ptr<Utf16CharacterStream> ScannerStream::ForTesting(
+    const char* data) {
+  return ScannerStream::ForTesting(data, strlen(data));
+}
+
+std::unique_ptr<Utf16CharacterStream> ScannerStream::ForTesting(
+    const char* data, size_t length) {
+  return std::unique_ptr<Utf16CharacterStream>(
+      new ExternalOneByteStringUtf16CharacterStream(data, length));
+}
+
+Utf16CharacterStream* ScannerStream::For(
+    ScriptCompiler::ExternalSourceStream* source_stream,
+    v8::ScriptCompiler::StreamedSource::Encoding encoding) {
+  switch (encoding) {
+    case v8::ScriptCompiler::StreamedSource::TWO_BYTE:
+      return new TwoByteExternalStreamingStream(source_stream);
+    case v8::ScriptCompiler::StreamedSource::ONE_BYTE:
+      return new OneByteExternalStreamingStream(source_stream);
+    case v8::ScriptCompiler::StreamedSource::UTF8:
+      return new Utf8ExternalStreamingStream(source_stream);
+  }
+  UNREACHABLE();
+  return nullptr;
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/parsing/scanner-character-streams.h b/src/parsing/scanner-character-streams.h
index 94d8284..ac81613 100644
--- a/src/parsing/scanner-character-streams.h
+++ b/src/parsing/scanner-character-streams.h
@@ -5,187 +5,27 @@
 #ifndef V8_PARSING_SCANNER_CHARACTER_STREAMS_H_
 #define V8_PARSING_SCANNER_CHARACTER_STREAMS_H_
 
+#include "include/v8.h"  // for v8::ScriptCompiler
 #include "src/handles.h"
-#include "src/parsing/scanner.h"
-#include "src/vector.h"
 
 namespace v8 {
 namespace internal {
 
-// Forward declarations.
-class ExternalTwoByteString;
-class ExternalOneByteString;
+class Utf16CharacterStream;
 
-// A buffered character stream based on a random access character
-// source (ReadBlock can be called with pos_ pointing to any position,
-// even positions before the current).
-class BufferedUtf16CharacterStream: public Utf16CharacterStream {
+class ScannerStream {
  public:
-  BufferedUtf16CharacterStream();
-  ~BufferedUtf16CharacterStream() override;
-
-  void PushBack(uc32 character) override;
-
- protected:
-  static const size_t kBufferSize = 512;
-  static const size_t kPushBackStepSize = 16;
-
-  size_t SlowSeekForward(size_t delta) override;
-  bool ReadBlock() override;
-  virtual void SlowPushBack(uc16 character);
-
-  virtual size_t BufferSeekForward(size_t delta) = 0;
-  virtual size_t FillBuffer(size_t position) = 0;
-
-  const uc16* pushback_limit_;
-  uc16 buffer_[kBufferSize];
-};
-
-
-// Generic string stream.
-class GenericStringUtf16CharacterStream: public BufferedUtf16CharacterStream {
- public:
-  GenericStringUtf16CharacterStream(Handle<String> data, size_t start_position,
-                                    size_t end_position);
-  ~GenericStringUtf16CharacterStream() override;
-
-  bool SetBookmark() override;
-  void ResetToBookmark() override;
-
- protected:
-  static const size_t kNoBookmark = -1;
-
-  size_t BufferSeekForward(size_t delta) override;
-  size_t FillBuffer(size_t position) override;
-
-  Handle<String> string_;
-  size_t length_;
-  size_t bookmark_;
-};
-
-
-// ExternalStreamingStream is a wrapper around an ExternalSourceStream (see
-// include/v8.h) subclass implemented by the embedder.
-class ExternalStreamingStream : public BufferedUtf16CharacterStream {
- public:
-  ExternalStreamingStream(ScriptCompiler::ExternalSourceStream* source_stream,
-                          v8::ScriptCompiler::StreamedSource::Encoding encoding)
-      : source_stream_(source_stream),
-        encoding_(encoding),
-        current_data_(NULL),
-        current_data_offset_(0),
-        current_data_length_(0),
-        utf8_split_char_buffer_length_(0),
-        bookmark_(0),
-        bookmark_data_is_from_current_data_(false),
-        bookmark_data_offset_(0),
-        bookmark_utf8_split_char_buffer_length_(0) {}
-
-  ~ExternalStreamingStream() override {
-    delete[] current_data_;
-    bookmark_buffer_.Dispose();
-    bookmark_data_.Dispose();
-  }
-
-  size_t BufferSeekForward(size_t delta) override {
-    // We never need to seek forward when streaming scripts. We only seek
-    // forward when we want to parse a function whose location we already know,
-    // and when streaming, we don't know the locations of anything we haven't
-    // seen yet.
-    UNREACHABLE();
-    return 0;
-  }
-
-  size_t FillBuffer(size_t position) override;
-
-  bool SetBookmark() override;
-  void ResetToBookmark() override;
-
- private:
-  void HandleUtf8SplitCharacters(size_t* data_in_buffer);
-  void FlushCurrent();
-
-  ScriptCompiler::ExternalSourceStream* source_stream_;
-  v8::ScriptCompiler::StreamedSource::Encoding encoding_;
-  const uint8_t* current_data_;
-  size_t current_data_offset_;
-  size_t current_data_length_;
-  // For converting UTF-8 characters which are split across two data chunks.
-  uint8_t utf8_split_char_buffer_[4];
-  size_t utf8_split_char_buffer_length_;
-
-  // Bookmark support. See comments in ExternalStreamingStream::SetBookmark
-  // for additional details.
-  size_t bookmark_;
-  Vector<uint16_t> bookmark_buffer_;
-  Vector<uint8_t> bookmark_data_;
-  bool bookmark_data_is_from_current_data_;
-  size_t bookmark_data_offset_;
-  uint8_t bookmark_utf8_split_char_buffer_[4];
-  size_t bookmark_utf8_split_char_buffer_length_;
-};
-
-
-// UTF16 buffer to read characters from an external string.
-class ExternalTwoByteStringUtf16CharacterStream: public Utf16CharacterStream {
- public:
-  ExternalTwoByteStringUtf16CharacterStream(Handle<ExternalTwoByteString> data,
-                                            int start_position,
-                                            int end_position);
-  ~ExternalTwoByteStringUtf16CharacterStream() override;
-
-  void PushBack(uc32 character) override {
-    DCHECK(buffer_cursor_ > raw_data_);
-    pos_--;
-    if (character != kEndOfInput) {
-      buffer_cursor_--;
-    }
-  }
-
-  bool SetBookmark() override;
-  void ResetToBookmark() override;
-
- private:
-  size_t SlowSeekForward(size_t delta) override {
-    // Fast case always handles seeking.
-    return 0;
-  }
-  bool ReadBlock() override {
-    // Entire string is read at start.
-    return false;
-  }
-  const uc16* raw_data_;  // Pointer to the actual array of characters.
-
-  static const size_t kNoBookmark = -1;
-
-  size_t bookmark_;
-};
-
-// UTF16 buffer to read characters from an external latin1 string.
-class ExternalOneByteStringUtf16CharacterStream
-    : public BufferedUtf16CharacterStream {
- public:
-  ExternalOneByteStringUtf16CharacterStream(Handle<ExternalOneByteString> data,
-                                            int start_position,
-                                            int end_position);
-  ~ExternalOneByteStringUtf16CharacterStream() override;
+  static Utf16CharacterStream* For(Handle<String> data);
+  static Utf16CharacterStream* For(Handle<String> data, int start_pos,
+                                   int end_pos);
+  static Utf16CharacterStream* For(
+      ScriptCompiler::ExternalSourceStream* source_stream,
+      ScriptCompiler::StreamedSource::Encoding encoding);
 
   // For testing:
-  explicit ExternalOneByteStringUtf16CharacterStream(const char* data);
-  ExternalOneByteStringUtf16CharacterStream(const char* data, size_t length);
-
-  bool SetBookmark() override;
-  void ResetToBookmark() override;
-
- private:
-  static const size_t kNoBookmark = -1;
-
-  size_t BufferSeekForward(size_t delta) override;
-  size_t FillBuffer(size_t position) override;
-
-  const uint8_t* raw_data_;  // Pointer to the actual array of characters.
-  size_t length_;
-  size_t bookmark_;
+  static std::unique_ptr<Utf16CharacterStream> ForTesting(const char* data);
+  static std::unique_ptr<Utf16CharacterStream> ForTesting(const char* data,
+                                                          size_t length);
 };
 
 }  // namespace internal
diff --git a/src/parsing/scanner.cc b/src/parsing/scanner.cc
index 06ead2e..e41b56f 100644
--- a/src/parsing/scanner.cc
+++ b/src/parsing/scanner.cc
@@ -14,7 +14,7 @@
 #include "src/char-predicates-inl.h"
 #include "src/conversions-inl.h"
 #include "src/list-inl.h"
-#include "src/parsing/parser.h"
+#include "src/parsing/duplicate-finder.h"  // For Scanner::FindSymbol
 
 namespace v8 {
 namespace internal {
@@ -26,25 +26,60 @@
   return isolate->factory()->InternalizeTwoByteString(two_byte_literal());
 }
 
+// ----------------------------------------------------------------------------
+// Scanner::BookmarkScope
 
-// Default implementation for streams that do not support bookmarks.
-bool Utf16CharacterStream::SetBookmark() { return false; }
-void Utf16CharacterStream::ResetToBookmark() { UNREACHABLE(); }
+const size_t Scanner::BookmarkScope::kBookmarkAtFirstPos =
+    std::numeric_limits<size_t>::max() - 2;
+const size_t Scanner::BookmarkScope::kNoBookmark =
+    std::numeric_limits<size_t>::max() - 1;
+const size_t Scanner::BookmarkScope::kBookmarkWasApplied =
+    std::numeric_limits<size_t>::max();
 
+void Scanner::BookmarkScope::Set() {
+  DCHECK_EQ(bookmark_, kNoBookmark);
+  DCHECK_EQ(scanner_->next_next_.token, Token::UNINITIALIZED);
+
+  // The first token is a bit special, since current_ will still be
+  // uninitialized. In this case, store kBookmarkAtFirstPos and special-case it
+  // when
+  // applying the bookmark.
+  DCHECK_IMPLIES(
+      scanner_->current_.token == Token::UNINITIALIZED,
+      scanner_->current_.location.beg_pos == scanner_->next_.location.beg_pos);
+  bookmark_ = (scanner_->current_.token == Token::UNINITIALIZED)
+                  ? kBookmarkAtFirstPos
+                  : scanner_->location().beg_pos;
+}
+
+void Scanner::BookmarkScope::Apply() {
+  DCHECK(HasBeenSet());  // Caller hasn't called SetBookmark.
+  if (bookmark_ == kBookmarkAtFirstPos) {
+    scanner_->SeekNext(0);
+  } else {
+    scanner_->SeekNext(bookmark_);
+    scanner_->Next();
+    DCHECK_EQ(scanner_->location().beg_pos, bookmark_);
+  }
+  bookmark_ = kBookmarkWasApplied;
+}
+
+bool Scanner::BookmarkScope::HasBeenSet() {
+  return bookmark_ != kNoBookmark && bookmark_ != kBookmarkWasApplied;
+}
+
+bool Scanner::BookmarkScope::HasBeenApplied() {
+  return bookmark_ == kBookmarkWasApplied;
+}
 
 // ----------------------------------------------------------------------------
 // Scanner
 
 Scanner::Scanner(UnicodeCache* unicode_cache)
     : unicode_cache_(unicode_cache),
-      bookmark_c0_(kNoBookmark),
       octal_pos_(Location::invalid()),
       decimal_with_leading_zero_pos_(Location::invalid()),
       found_html_comment_(false) {
-  bookmark_current_.literal_chars = &bookmark_current_literal_;
-  bookmark_current_.raw_literal_chars = &bookmark_current_raw_literal_;
-  bookmark_next_.literal_chars = &bookmark_next_literal_;
-  bookmark_next_.raw_literal_chars = &bookmark_next_raw_literal_;
 }
 
 
@@ -305,14 +340,14 @@
   return c == 0xFFFE;
 }
 
-
 bool Scanner::SkipWhiteSpace() {
   int start_position = source_pos();
 
   while (true) {
     while (true) {
-      // The unicode cache accepts unsigned inputs.
-      if (c0_ < 0) break;
+      // Don't skip behind the end of input.
+      if (c0_ == kEndOfInput) break;
+
       // Advance as long as character is a WhiteSpace or LineTerminator.
       // Remember if the latter is the case.
       if (unicode_cache_->IsLineTerminator(c0_)) {
@@ -328,25 +363,27 @@
     // line (with only whitespace in front of it), we treat the rest
     // of the line as a comment. This is in line with the way
     // SpiderMonkey handles it.
-    if (c0_ == '-' && has_line_terminator_before_next_) {
-      Advance();
-      if (c0_ == '-') {
-        Advance();
-        if (c0_ == '>') {
-          // Treat the rest of the line as a comment.
-          SkipSingleLineComment();
-          // Continue skipping white space after the comment.
-          continue;
-        }
-        PushBack('-');  // undo Advance()
-      }
-      PushBack('-');  // undo Advance()
-    }
-    // Return whether or not we skipped any characters.
-    return source_pos() != start_position;
-  }
-}
+    if (c0_ != '-' || !has_line_terminator_before_next_) break;
 
+    Advance();
+    if (c0_ != '-') {
+      PushBack('-');  // undo Advance()
+      break;
+    }
+
+    Advance();
+    if (c0_ != '>') {
+      PushBack2('-', '-');  // undo 2x Advance();
+      break;
+    }
+
+    // Treat the rest of the line as a comment.
+    SkipSingleLineComment();
+  }
+
+  // Return whether or not we skipped any characters.
+  return source_pos() != start_position;
+}
 
 Token::Value Scanner::SkipSingleLineComment() {
   Advance();
@@ -356,7 +393,7 @@
   // separately by the lexical grammar and becomes part of the
   // stream of input elements for the syntactic grammar (see
   // ECMA-262, section 7.4).
-  while (c0_ >= 0 && !unicode_cache_->IsLineTerminator(c0_)) {
+  while (c0_ != kEndOfInput && !unicode_cache_->IsLineTerminator(c0_)) {
     Advance();
   }
 
@@ -366,7 +403,7 @@
 
 Token::Value Scanner::SkipSourceURLComment() {
   TryToParseSourceURLComment();
-  while (c0_ >= 0 && !unicode_cache_->IsLineTerminator(c0_)) {
+  while (c0_ != kEndOfInput && !unicode_cache_->IsLineTerminator(c0_)) {
     Advance();
   }
 
@@ -377,11 +414,11 @@
 void Scanner::TryToParseSourceURLComment() {
   // Magic comments are of the form: //[#@]\s<name>=\s*<value>\s*.* and this
   // function will just return if it cannot parse a magic comment.
-  if (c0_ < 0 || !unicode_cache_->IsWhiteSpace(c0_)) return;
+  if (c0_ == kEndOfInput || !unicode_cache_->IsWhiteSpace(c0_)) return;
   Advance();
   LiteralBuffer name;
-  while (c0_ >= 0 && !unicode_cache_->IsWhiteSpaceOrLineTerminator(c0_) &&
-         c0_ != '=') {
+  while (c0_ != kEndOfInput &&
+         !unicode_cache_->IsWhiteSpaceOrLineTerminator(c0_) && c0_ != '=') {
     name.AddChar(c0_);
     Advance();
   }
@@ -399,10 +436,10 @@
     return;
   Advance();
   value->Reset();
-  while (c0_ >= 0 && unicode_cache_->IsWhiteSpace(c0_)) {
+  while (c0_ != kEndOfInput && unicode_cache_->IsWhiteSpace(c0_)) {
     Advance();
   }
-  while (c0_ >= 0 && !unicode_cache_->IsLineTerminator(c0_)) {
+  while (c0_ != kEndOfInput && !unicode_cache_->IsLineTerminator(c0_)) {
     // Disallowed characters.
     if (c0_ == '"' || c0_ == '\'') {
       value->Reset();
@@ -415,7 +452,7 @@
     Advance();
   }
   // Allow whitespace at the end.
-  while (c0_ >= 0 && !unicode_cache_->IsLineTerminator(c0_)) {
+  while (c0_ != kEndOfInput && !unicode_cache_->IsLineTerminator(c0_)) {
     if (!unicode_cache_->IsWhiteSpace(c0_)) {
       value->Reset();
       break;
@@ -429,10 +466,10 @@
   DCHECK(c0_ == '*');
   Advance();
 
-  while (c0_ >= 0) {
+  while (c0_ != kEndOfInput) {
     uc32 ch = c0_;
     Advance();
-    if (c0_ >= 0 && unicode_cache_->IsLineTerminator(ch)) {
+    if (c0_ != kEndOfInput && unicode_cache_->IsLineTerminator(ch)) {
       // Following ECMA-262, section 7.4, a comment containing
       // a newline will make the comment count as a line-terminator.
       has_multiline_comment_before_next_ = true;
@@ -450,24 +487,24 @@
   return Token::ILLEGAL;
 }
 
-
 Token::Value Scanner::ScanHtmlComment() {
   // Check for <!-- comments.
   DCHECK(c0_ == '!');
   Advance();
-  if (c0_ == '-') {
-    Advance();
-    if (c0_ == '-') {
-      found_html_comment_ = true;
-      return SkipSingleLineComment();
-    }
-    PushBack('-');  // undo Advance()
+  if (c0_ != '-') {
+    PushBack('!');  // undo Advance()
+    return Token::LT;
   }
-  PushBack('!');  // undo Advance()
-  DCHECK(c0_ == '!');
-  return Token::LT;
-}
 
+  Advance();
+  if (c0_ != '-') {
+    PushBack2('-', '!');  // undo 2x Advance()
+    return Token::LT;
+  }
+
+  found_html_comment_ = true;
+  return SkipSingleLineComment();
+}
 
 void Scanner::Scan() {
   next_.literal_chars = NULL;
@@ -716,7 +753,7 @@
         break;
 
       default:
-        if (c0_ < 0) {
+        if (c0_ == kEndOfInput) {
           token = Token::EOS;
         } else if (unicode_cache_->IsIdentifierStart(c0_)) {
           token = ScanIdentifierOrKeyword();
@@ -790,7 +827,7 @@
   // Positions inside the lookahead token aren't supported.
   DCHECK(pos >= current_pos);
   if (pos != current_pos) {
-    source_->SeekForward(pos - source_->pos());
+    source_->Seek(pos);
     Advance();
     // This function is only called to seek to the location
     // of the end of a function (at the "}" token). It doesn't matter
@@ -808,7 +845,8 @@
   Advance<capture_raw>();
 
   // Skip escaped newlines.
-  if (!in_template_literal && c0_ >= 0 && unicode_cache_->IsLineTerminator(c)) {
+  if (!in_template_literal && c0_ != kEndOfInput &&
+      unicode_cache_->IsLineTerminator(c)) {
     // Allow CR+LF newlines in multiline string literals.
     if (IsCarriageReturn(c) && IsLineFeed(c0_)) Advance<capture_raw>();
     // Allow LF+CR newlines in multiline string literals.
@@ -894,7 +932,7 @@
       HandleLeadSurrogate();
       break;
     }
-    if (c0_ < 0 || c0_ == '\n' || c0_ == '\r') return Token::ILLEGAL;
+    if (c0_ == kEndOfInput || c0_ == '\n' || c0_ == '\r') return Token::ILLEGAL;
     if (c0_ == quote) {
       literal.Complete();
       Advance<false, false>();
@@ -906,12 +944,12 @@
     AddLiteralChar(c);
   }
 
-  while (c0_ != quote && c0_ >= 0
-         && !unicode_cache_->IsLineTerminator(c0_)) {
+  while (c0_ != quote && c0_ != kEndOfInput &&
+         !unicode_cache_->IsLineTerminator(c0_)) {
     uc32 c = c0_;
     Advance();
     if (c == '\\') {
-      if (c0_ < 0 || !ScanEscape<false, false>()) {
+      if (c0_ == kEndOfInput || !ScanEscape<false, false>()) {
         return Token::ILLEGAL;
       }
     } else {
@@ -957,7 +995,7 @@
       ReduceRawLiteralLength(2);
       break;
     } else if (c == '\\') {
-      if (c0_ > 0 && unicode_cache_->IsLineTerminator(c0_)) {
+      if (c0_ != kEndOfInput && unicode_cache_->IsLineTerminator(c0_)) {
         // The TV of LineContinuation :: \ LineTerminatorSequence is the empty
         // code unit sequence.
         uc32 lastChar = c0_;
@@ -1155,7 +1193,7 @@
   // section 7.8.3, page 17 (note that we read only one decimal digit
   // if the value is 0).
   if (IsDecimalDigit(c0_) ||
-      (c0_ >= 0 && unicode_cache_->IsIdentifierStart(c0_)))
+      (c0_ != kEndOfInput && unicode_cache_->IsIdentifierStart(c0_)))
     return Token::ILLEGAL;
 
   literal.Complete();
@@ -1382,7 +1420,7 @@
   }
 
   // Scan the rest of the identifier characters.
-  while (c0_ >= 0 && unicode_cache_->IsIdentifierPart(c0_)) {
+  while (c0_ != kEndOfInput && unicode_cache_->IsIdentifierPart(c0_)) {
     if (c0_ != '\\') {
       uc32 next_char = c0_;
       Advance();
@@ -1408,7 +1446,7 @@
 Token::Value Scanner::ScanIdentifierSuffix(LiteralScope* literal,
                                            bool escaped) {
   // Scan the rest of the identifier characters.
-  while (c0_ >= 0 && unicode_cache_->IsIdentifierPart(c0_)) {
+  while (c0_ != kEndOfInput && unicode_cache_->IsIdentifierPart(c0_)) {
     if (c0_ == '\\') {
       uc32 c = ScanIdentifierUnicodeEscape();
       escaped = true;
@@ -1465,10 +1503,12 @@
   }
 
   while (c0_ != '/' || in_character_class) {
-    if (c0_ < 0 || unicode_cache_->IsLineTerminator(c0_)) return false;
+    if (c0_ == kEndOfInput || unicode_cache_->IsLineTerminator(c0_))
+      return false;
     if (c0_ == '\\') {  // Escape sequence.
       AddLiteralCharAdvance();
-      if (c0_ < 0 || unicode_cache_->IsLineTerminator(c0_)) return false;
+      if (c0_ == kEndOfInput || unicode_cache_->IsLineTerminator(c0_))
+        return false;
       AddLiteralCharAdvance();
       // If the escape allows more characters, i.e., \x??, \u????, or \c?,
       // only "safe" characters are allowed (letters, digits, underscore),
@@ -1499,7 +1539,7 @@
 
   // Scan regular expression flags.
   int flags = 0;
-  while (c0_ >= 0 && unicode_cache_->IsIdentifierPart(c0_)) {
+  while (c0_ != kEndOfInput && unicode_cache_->IsIdentifierPart(c0_)) {
     RegExp::Flags flag = RegExp::kNone;
     switch (c0_) {
       case 'g':
@@ -1574,202 +1614,31 @@
 
 
 int Scanner::FindSymbol(DuplicateFinder* finder, int value) {
+  // TODO(vogelheim): Move this logic into the calling class; this can be fully
+  //                  implemented using the public interface.
   if (is_literal_one_byte()) {
     return finder->AddOneByteSymbol(literal_one_byte_string(), value);
   }
   return finder->AddTwoByteSymbol(literal_two_byte_string(), value);
 }
 
+void Scanner::SeekNext(size_t position) {
+  // Use with care: This cleanly resets most, but not all scanner state.
+  // TODO(vogelheim): Fix this, or at least DCHECK the relevant conditions.
 
-bool Scanner::SetBookmark() {
-  if (c0_ != kNoBookmark && bookmark_c0_ == kNoBookmark &&
-      next_next_.token == Token::UNINITIALIZED && source_->SetBookmark()) {
-    bookmark_c0_ = c0_;
-    CopyTokenDesc(&bookmark_current_, &current_);
-    CopyTokenDesc(&bookmark_next_, &next_);
-    return true;
-  }
-  return false;
-}
-
-
-void Scanner::ResetToBookmark() {
-  DCHECK(BookmarkHasBeenSet());  // Caller hasn't called SetBookmark.
-
-  source_->ResetToBookmark();
-  c0_ = bookmark_c0_;
-  CopyToNextTokenDesc(&bookmark_current_);
-  current_ = next_;
-  CopyToNextTokenDesc(&bookmark_next_);
-  bookmark_c0_ = kBookmarkWasApplied;
-}
-
-
-bool Scanner::BookmarkHasBeenSet() { return bookmark_c0_ >= 0; }
-
-
-bool Scanner::BookmarkHasBeenReset() {
-  return bookmark_c0_ == kBookmarkWasApplied;
-}
-
-
-void Scanner::DropBookmark() { bookmark_c0_ = kNoBookmark; }
-
-void Scanner::CopyToNextTokenDesc(TokenDesc* from) {
-  StartLiteral();
-  StartRawLiteral();
-  CopyTokenDesc(&next_, from);
-  if (next_.literal_chars->length() == 0) next_.literal_chars = nullptr;
-  if (next_.raw_literal_chars->length() == 0) next_.raw_literal_chars = nullptr;
-}
-
-void Scanner::CopyTokenDesc(TokenDesc* to, TokenDesc* from) {
-  DCHECK_NOT_NULL(to);
-  DCHECK_NOT_NULL(from);
-  to->token = from->token;
-  to->location = from->location;
-  to->literal_chars->CopyFrom(from->literal_chars);
-  to->raw_literal_chars->CopyFrom(from->raw_literal_chars);
-}
-
-
-int DuplicateFinder::AddOneByteSymbol(Vector<const uint8_t> key, int value) {
-  return AddSymbol(key, true, value);
-}
-
-
-int DuplicateFinder::AddTwoByteSymbol(Vector<const uint16_t> key, int value) {
-  return AddSymbol(Vector<const uint8_t>::cast(key), false, value);
-}
-
-
-int DuplicateFinder::AddSymbol(Vector<const uint8_t> key,
-                               bool is_one_byte,
-                               int value) {
-  uint32_t hash = Hash(key, is_one_byte);
-  byte* encoding = BackupKey(key, is_one_byte);
-  base::HashMap::Entry* entry = map_.LookupOrInsert(encoding, hash);
-  int old_value = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
-  entry->value =
-    reinterpret_cast<void*>(static_cast<intptr_t>(value | old_value));
-  return old_value;
-}
-
-
-int DuplicateFinder::AddNumber(Vector<const uint8_t> key, int value) {
-  DCHECK(key.length() > 0);
-  // Quick check for already being in canonical form.
-  if (IsNumberCanonical(key)) {
-    return AddOneByteSymbol(key, value);
-  }
-
-  int flags = ALLOW_HEX | ALLOW_OCTAL | ALLOW_IMPLICIT_OCTAL | ALLOW_BINARY;
-  double double_value = StringToDouble(
-      unicode_constants_, key, flags, 0.0);
-  int length;
-  const char* string;
-  if (!std::isfinite(double_value)) {
-    string = "Infinity";
-    length = 8;  // strlen("Infinity");
-  } else {
-    string = DoubleToCString(double_value,
-                             Vector<char>(number_buffer_, kBufferSize));
-    length = StrLength(string);
-  }
-  return AddSymbol(Vector<const byte>(reinterpret_cast<const byte*>(string),
-                                      length), true, value);
-}
-
-
-bool DuplicateFinder::IsNumberCanonical(Vector<const uint8_t> number) {
-  // Test for a safe approximation of number literals that are already
-  // in canonical form: max 15 digits, no leading zeroes, except an
-  // integer part that is a single zero, and no trailing zeros below
-  // the decimal point.
-  int pos = 0;
-  int length = number.length();
-  if (number.length() > 15) return false;
-  if (number[pos] == '0') {
-    pos++;
-  } else {
-    while (pos < length &&
-           static_cast<unsigned>(number[pos] - '0') <= ('9' - '0')) pos++;
-  }
-  if (length == pos) return true;
-  if (number[pos] != '.') return false;
-  pos++;
-  bool invalid_last_digit = true;
-  while (pos < length) {
-    uint8_t digit = number[pos] - '0';
-    if (digit > '9' - '0') return false;
-    invalid_last_digit = (digit == 0);
-    pos++;
-  }
-  return !invalid_last_digit;
-}
-
-
-uint32_t DuplicateFinder::Hash(Vector<const uint8_t> key, bool is_one_byte) {
-  // Primitive hash function, almost identical to the one used
-  // for strings (except that it's seeded by the length and representation).
-  int length = key.length();
-  uint32_t hash = (length << 1) | (is_one_byte ? 1 : 0);
-  for (int i = 0; i < length; i++) {
-    uint32_t c = key[i];
-    hash = (hash + c) * 1025;
-    hash ^= (hash >> 6);
-  }
-  return hash;
-}
-
-
-bool DuplicateFinder::Match(void* first, void* second) {
-  // Decode lengths.
-  // Length + representation is encoded as base 128, most significant heptet
-  // first, with a 8th bit being non-zero while there are more heptets.
-  // The value encodes the number of bytes following, and whether the original
-  // was Latin1.
-  byte* s1 = reinterpret_cast<byte*>(first);
-  byte* s2 = reinterpret_cast<byte*>(second);
-  uint32_t length_one_byte_field = 0;
-  byte c1;
-  do {
-    c1 = *s1;
-    if (c1 != *s2) return false;
-    length_one_byte_field = (length_one_byte_field << 7) | (c1 & 0x7f);
-    s1++;
-    s2++;
-  } while ((c1 & 0x80) != 0);
-  int length = static_cast<int>(length_one_byte_field >> 1);
-  return memcmp(s1, s2, length) == 0;
-}
-
-
-byte* DuplicateFinder::BackupKey(Vector<const uint8_t> bytes,
-                                 bool is_one_byte) {
-  uint32_t one_byte_length = (bytes.length() << 1) | (is_one_byte ? 1 : 0);
-  backing_store_.StartSequence();
-  // Emit one_byte_length as base-128 encoded number, with the 7th bit set
-  // on the byte of every heptet except the last, least significant, one.
-  if (one_byte_length >= (1 << 7)) {
-    if (one_byte_length >= (1 << 14)) {
-      if (one_byte_length >= (1 << 21)) {
-        if (one_byte_length >= (1 << 28)) {
-          backing_store_.Add(
-              static_cast<uint8_t>((one_byte_length >> 28) | 0x80));
-        }
-        backing_store_.Add(
-            static_cast<uint8_t>((one_byte_length >> 21) | 0x80u));
-      }
-      backing_store_.Add(
-          static_cast<uint8_t>((one_byte_length >> 14) | 0x80u));
-    }
-    backing_store_.Add(static_cast<uint8_t>((one_byte_length >> 7) | 0x80u));
-  }
-  backing_store_.Add(static_cast<uint8_t>(one_byte_length & 0x7f));
-
-  backing_store_.AddBlock(bytes);
-  return backing_store_.EndSequence().start();
+  // To re-scan from a given character position, we need to:
+  // 1, Reset the current_, next_ and next_next_ tokens
+  //    (next_ + next_next_ will be overwrittem by Next(),
+  //     current_ will remain unchanged, so overwrite it fully.)
+  current_ = {{0, 0}, nullptr, nullptr, 0, Token::UNINITIALIZED};
+  next_.token = Token::UNINITIALIZED;
+  next_next_.token = Token::UNINITIALIZED;
+  // 2, reset the source to the desired position,
+  source_->Seek(position);
+  // 3, re-scan, by scanning the look-ahead char + 1 token (next_).
+  c0_ = source_->Advance();
+  Next();
+  DCHECK_EQ(next_.location.beg_pos, position);
 }
 
 }  // namespace internal
diff --git a/src/parsing/scanner.h b/src/parsing/scanner.h
index 66c6ce8..b2b1a8a 100644
--- a/src/parsing/scanner.h
+++ b/src/parsing/scanner.h
@@ -8,12 +8,9 @@
 #define V8_PARSING_SCANNER_H_
 
 #include "src/allocation.h"
-#include "src/base/hashmap.h"
 #include "src/base/logging.h"
 #include "src/char-predicates.h"
-#include "src/collector.h"
 #include "src/globals.h"
-#include "src/list.h"
 #include "src/messages.h"
 #include "src/parsing/token.h"
 #include "src/unicode-decoder.h"
@@ -25,127 +22,127 @@
 
 class AstRawString;
 class AstValueFactory;
+class DuplicateFinder;
+class ExternalOneByteString;
+class ExternalTwoByteString;
 class ParserRecorder;
 class UnicodeCache;
 
-
 // ---------------------------------------------------------------------
 // Buffered stream of UTF-16 code units, using an internal UTF-16 buffer.
 // A code unit is a 16 bit value representing either a 16 bit code point
 // or one part of a surrogate pair that make a single 21 bit code point.
-
 class Utf16CharacterStream {
  public:
-  Utf16CharacterStream() : pos_(0) { }
+  static const uc32 kEndOfInput = -1;
+
   virtual ~Utf16CharacterStream() { }
 
   // Returns and advances past the next UTF-16 code unit in the input
-  // stream. If there are no more code units, it returns a negative
-  // value.
+  // stream. If there are no more code units it returns kEndOfInput.
   inline uc32 Advance() {
-    if (buffer_cursor_ < buffer_end_ || ReadBlock()) {
-      pos_++;
+    if (V8_LIKELY(buffer_cursor_ < buffer_end_)) {
       return static_cast<uc32>(*(buffer_cursor_++));
+    } else if (ReadBlock()) {
+      return static_cast<uc32>(*(buffer_cursor_++));
+    } else {
+      // Note: currently the following increment is necessary to avoid a
+      // parser problem! The scanner treats the final kEndOfInput as
+      // a code unit with a position, and does math relative to that
+      // position.
+      buffer_cursor_++;
+      return kEndOfInput;
     }
-    // Note: currently the following increment is necessary to avoid a
-    // parser problem! The scanner treats the final kEndOfInput as
-    // a code unit with a position, and does math relative to that
-    // position.
-    pos_++;
-
-    return kEndOfInput;
   }
 
-  // Return the current position in the code unit stream.
-  // Starts at zero.
-  inline size_t pos() const { return pos_; }
-
-  // Skips forward past the next code_unit_count UTF-16 code units
-  // in the input, or until the end of input if that comes sooner.
-  // Returns the number of code units actually skipped. If less
-  // than code_unit_count,
-  inline size_t SeekForward(size_t code_unit_count) {
-    size_t buffered_chars = buffer_end_ - buffer_cursor_;
-    if (code_unit_count <= buffered_chars) {
-      buffer_cursor_ += code_unit_count;
-      pos_ += code_unit_count;
-      return code_unit_count;
+  // Go back one by one character in the input stream.
+  // This undoes the most recent Advance().
+  inline void Back() {
+    // The common case - if the previous character is within
+    // buffer_start_ .. buffer_end_ will be handles locally.
+    // Otherwise, a new block is requested.
+    if (V8_LIKELY(buffer_cursor_ > buffer_start_)) {
+      buffer_cursor_--;
+    } else {
+      ReadBlockAt(pos() - 1);
     }
-    return SlowSeekForward(code_unit_count);
   }
 
-  // Pushes back the most recently read UTF-16 code unit (or negative
-  // value if at end of input), i.e., the value returned by the most recent
-  // call to Advance.
-  // Must not be used right after calling SeekForward.
-  virtual void PushBack(int32_t code_unit) = 0;
+  // Go back one by two characters in the input stream. (This is the same as
+  // calling Back() twice. But Back() may - in some instances - do substantial
+  // work. Back2() guarantees this work will be done only once.)
+  inline void Back2() {
+    if (V8_LIKELY(buffer_cursor_ - 2 >= buffer_start_)) {
+      buffer_cursor_ -= 2;
+    } else {
+      ReadBlockAt(pos() - 2);
+    }
+  }
 
-  virtual bool SetBookmark();
-  virtual void ResetToBookmark();
+  inline size_t pos() const {
+    return buffer_pos_ + (buffer_cursor_ - buffer_start_);
+  }
+
+  inline void Seek(size_t pos) {
+    if (V8_LIKELY(pos >= buffer_pos_ &&
+                  pos < (buffer_pos_ + (buffer_end_ - buffer_start_)))) {
+      buffer_cursor_ = buffer_start_ + (pos - buffer_pos_);
+    } else {
+      ReadBlockAt(pos);
+    }
+  }
 
  protected:
-  static const uc32 kEndOfInput = -1;
+  Utf16CharacterStream(const uint16_t* buffer_start,
+                       const uint16_t* buffer_cursor,
+                       const uint16_t* buffer_end, size_t buffer_pos)
+      : buffer_start_(buffer_start),
+        buffer_cursor_(buffer_cursor),
+        buffer_end_(buffer_end),
+        buffer_pos_(buffer_pos) {}
+  Utf16CharacterStream() : Utf16CharacterStream(nullptr, nullptr, nullptr, 0) {}
 
-  // Ensures that the buffer_cursor_ points to the code_unit at
-  // position pos_ of the input, if possible. If the position
-  // is at or after the end of the input, return false. If there
-  // are more code_units available, return true.
+  void ReadBlockAt(size_t new_pos) {
+    // The callers of this method (Back/Back2/Seek) should handle the easy
+    // case (seeking within the current buffer), and we should only get here
+    // if we actually require new data.
+    // (This is really an efficiency check, not a correctness invariant.)
+    DCHECK(new_pos < buffer_pos_ ||
+           new_pos >= buffer_pos_ + (buffer_end_ - buffer_start_));
+
+    // Change pos() to point to new_pos.
+    buffer_pos_ = new_pos;
+    buffer_cursor_ = buffer_start_;
+    bool success = ReadBlock();
+    USE(success);
+
+    // Post-conditions: 1, on success, we should be at the right position.
+    //                  2, success == we should have more characters available.
+    DCHECK_IMPLIES(success, pos() == new_pos);
+    DCHECK_EQ(success, buffer_cursor_ < buffer_end_);
+    DCHECK_EQ(success, buffer_start_ < buffer_end_);
+  }
+
+  // Read more data, and update buffer_*_ to point to it.
+  // Returns true if more data was available.
+  //
+  // ReadBlock() may modify any of the buffer_*_ members, but must sure that
+  // the result of pos() remains unaffected.
+  //
+  // Examples:
+  // - a stream could either fill a separate buffer. Then buffer_start_ and
+  //   buffer_cursor_ would point to the beginning of the buffer, and
+  //   buffer_pos would be the old pos().
+  // - a stream with existing buffer chunks would set buffer_start_ and
+  //   buffer_end_ to cover the full chunk, and then buffer_cursor_ would
+  //   point into the middle of the buffer, while buffer_pos_ would describe
+  //   the start of the buffer.
   virtual bool ReadBlock() = 0;
-  virtual size_t SlowSeekForward(size_t code_unit_count) = 0;
 
+  const uint16_t* buffer_start_;
   const uint16_t* buffer_cursor_;
   const uint16_t* buffer_end_;
-  size_t pos_;
-};
-
-
-// ---------------------------------------------------------------------
-// DuplicateFinder discovers duplicate symbols.
-
-class DuplicateFinder {
- public:
-  explicit DuplicateFinder(UnicodeCache* constants)
-      : unicode_constants_(constants),
-        backing_store_(16),
-        map_(&Match) { }
-
-  int AddOneByteSymbol(Vector<const uint8_t> key, int value);
-  int AddTwoByteSymbol(Vector<const uint16_t> key, int value);
-  // Add a a number literal by converting it (if necessary)
-  // to the string that ToString(ToNumber(literal)) would generate.
-  // and then adding that string with AddOneByteSymbol.
-  // This string is the actual value used as key in an object literal,
-  // and the one that must be different from the other keys.
-  int AddNumber(Vector<const uint8_t> key, int value);
-
- private:
-  int AddSymbol(Vector<const uint8_t> key, bool is_one_byte, int value);
-  // Backs up the key and its length in the backing store.
-  // The backup is stored with a base 127 encoding of the
-  // length (plus a bit saying whether the string is one byte),
-  // followed by the bytes of the key.
-  uint8_t* BackupKey(Vector<const uint8_t> key, bool is_one_byte);
-
-  // Compare two encoded keys (both pointing into the backing store)
-  // for having the same base-127 encoded lengths and representation.
-  // and then having the same 'length' bytes following.
-  static bool Match(void* first, void* second);
-  // Creates a hash from a sequence of bytes.
-  static uint32_t Hash(Vector<const uint8_t> key, bool is_one_byte);
-  // Checks whether a string containing a JS number is its canonical
-  // form.
-  static bool IsNumberCanonical(Vector<const uint8_t> key);
-
-  // Size of buffer. Sufficient for using it to call DoubleToCString in
-  // from conversions.h.
-  static const int kBufferSize = 100;
-
-  UnicodeCache* unicode_constants_;
-  // Backing store used to store strings used as hashmap keys.
-  SequenceCollector<unsigned char> backing_store_;
-  base::HashMap map_;
-  // Buffer used for string->number->canonical string conversions.
-  char number_buffer_[kBufferSize];
+  size_t buffer_pos_;
 };
 
 
@@ -157,18 +154,24 @@
   // Scoped helper for a re-settable bookmark.
   class BookmarkScope {
    public:
-    explicit BookmarkScope(Scanner* scanner) : scanner_(scanner) {
+    explicit BookmarkScope(Scanner* scanner)
+        : scanner_(scanner), bookmark_(kNoBookmark) {
       DCHECK_NOT_NULL(scanner_);
     }
-    ~BookmarkScope() { scanner_->DropBookmark(); }
+    ~BookmarkScope() {}
 
-    bool Set() { return scanner_->SetBookmark(); }
-    void Reset() { scanner_->ResetToBookmark(); }
-    bool HasBeenSet() { return scanner_->BookmarkHasBeenSet(); }
-    bool HasBeenReset() { return scanner_->BookmarkHasBeenReset(); }
+    void Set();
+    void Apply();
+    bool HasBeenSet();
+    bool HasBeenApplied();
 
    private:
+    static const size_t kNoBookmark;
+    static const size_t kBookmarkWasApplied;
+    static const size_t kBookmarkAtFirstPos;
+
     Scanner* scanner_;
+    size_t bookmark_;
 
     DISALLOW_COPY_AND_ASSIGN(BookmarkScope);
   };
@@ -190,6 +193,7 @@
 
   // -1 is outside of the range of any real source code.
   static const int kNoOctalLocation = -1;
+  static const uc32 kEndOfInput = Utf16CharacterStream::kEndOfInput;
 
   explicit Scanner(UnicodeCache* scanner_contants);
 
@@ -251,7 +255,7 @@
     return LiteralMatches(data, length, false);
   }
 
-  void IsGetOrSet(bool* is_get, bool* is_set) {
+  bool IsGetOrSet(bool* is_get, bool* is_set) {
     if (is_literal_one_byte() &&
         literal_length() == 3 &&
         !literal_contains_escapes()) {
@@ -259,7 +263,9 @@
           reinterpret_cast<const char*>(literal_one_byte_string().start());
       *is_get = strncmp(token, "get", 3) == 0;
       *is_set = !*is_get && strncmp(token, "set", 3) == 0;
+      return *is_get || *is_set;
     }
+    return false;
   }
 
   int FindSymbol(DuplicateFinder* finder, int value);
@@ -418,23 +424,6 @@
 
     Handle<String> Internalize(Isolate* isolate) const;
 
-    void CopyFrom(const LiteralBuffer* other) {
-      if (other == nullptr) {
-        Reset();
-      } else {
-        is_one_byte_ = other->is_one_byte_;
-        position_ = other->position_;
-        if (position_ < backing_store_.length()) {
-          std::copy(other->backing_store_.begin(),
-                    other->backing_store_.begin() + position_,
-                    backing_store_.begin());
-        } else {
-          backing_store_.Dispose();
-          backing_store_ = other->backing_store_.Clone();
-        }
-      }
-    }
-
    private:
     static const int kInitialCapacity = 16;
     static const int kGrowthFactory = 4;
@@ -528,15 +517,6 @@
     scanner_error_ = MessageTemplate::kNone;
   }
 
-  // Support BookmarkScope functionality.
-  bool SetBookmark();
-  void ResetToBookmark();
-  bool BookmarkHasBeenSet();
-  bool BookmarkHasBeenReset();
-  void DropBookmark();
-  void CopyToNextTokenDesc(TokenDesc* from);
-  static void CopyTokenDesc(TokenDesc* to, TokenDesc* from);
-
   void ReportScannerError(const Location& location,
                           MessageTemplate::Template error) {
     if (has_error()) return;
@@ -550,6 +530,9 @@
     scanner_error_location_ = Location(pos, pos + 1);
   }
 
+  // Seek to the next_ token at the given position.
+  void SeekNext(size_t position);
+
   // Literal buffer support
   inline void StartLiteral() {
     LiteralBuffer* free_buffer =
@@ -618,7 +601,7 @@
     if (unibrow::Utf16::IsLeadSurrogate(c0_)) {
       uc32 c1 = source_->Advance();
       if (!unibrow::Utf16::IsTrailSurrogate(c1)) {
-        source_->PushBack(c1);
+        source_->Back();
       } else {
         c0_ = unibrow::Utf16::CombineSurrogatePair(c0_, c1);
       }
@@ -627,14 +610,22 @@
 
   void PushBack(uc32 ch) {
     if (c0_ > static_cast<uc32>(unibrow::Utf16::kMaxNonSurrogateCharCode)) {
-      source_->PushBack(unibrow::Utf16::TrailSurrogate(c0_));
-      source_->PushBack(unibrow::Utf16::LeadSurrogate(c0_));
+      source_->Back2();
     } else {
-      source_->PushBack(c0_);
+      source_->Back();
     }
     c0_ = ch;
   }
 
+  // Same as PushBack(ch1); PushBack(ch2).
+  // - Potentially more efficient as it uses Back2() on the stream.
+  // - Uses char as parameters, since we're only calling it with ASCII chars in
+  //   practice. This way, we can avoid a few edge cases.
+  void PushBack2(char ch1, char ch2) {
+    source_->Back2();
+    c0_ = ch2;
+  }
+
   inline Token::Value Select(Token::Value tok) {
     Advance();
     return tok;
@@ -790,37 +781,6 @@
   TokenDesc next_;       // desc for next token (one token look-ahead)
   TokenDesc next_next_;  // desc for the token after next (after PeakAhead())
 
-  // Variables for Scanner::BookmarkScope and the *Bookmark implementation.
-  // These variables contain the scanner state when a bookmark is set.
-  //
-  // We will use bookmark_c0_ as a 'control' variable, where:
-  // - bookmark_c0_ >= 0: A bookmark has been set and this contains c0_.
-  // - bookmark_c0_ == -1: No bookmark has been set.
-  // - bookmark_c0_ == -2: The bookmark has been applied (ResetToBookmark).
-  //
-  // Which state is being bookmarked? The parser state is distributed over
-  // several variables, roughly like this:
-  //   ...    1234        +       5678 ..... [character stream]
-  //       [current_] [next_] c0_ |      [scanner state]
-  // So when the scanner is logically at the beginning of an expression
-  // like "1234 + 4567", then:
-  // - current_ contains "1234"
-  // - next_ contains "+"
-  // - c0_ contains ' ' (the space between "+" and "5678",
-  // - the source_ character stream points to the beginning of "5678".
-  // To be able to restore this state, we will keep copies of current_, next_,
-  // and c0_; we'll ask the stream to bookmark itself, and we'll copy the
-  // contents of current_'s and next_'s literal buffers to bookmark_*_literal_.
-  static const uc32 kNoBookmark = -1;
-  static const uc32 kBookmarkWasApplied = -2;
-  uc32 bookmark_c0_;
-  TokenDesc bookmark_current_;
-  TokenDesc bookmark_next_;
-  LiteralBuffer bookmark_current_literal_;
-  LiteralBuffer bookmark_current_raw_literal_;
-  LiteralBuffer bookmark_next_literal_;
-  LiteralBuffer bookmark_next_raw_literal_;
-
   // Input stream. Must be initialized to an Utf16CharacterStream.
   Utf16CharacterStream* source_;
 
diff --git a/src/pending-compilation-error-handler.cc b/src/pending-compilation-error-handler.cc
index f1f9a20..3e88efc 100644
--- a/src/pending-compilation-error-handler.cc
+++ b/src/pending-compilation-error-handler.cc
@@ -4,6 +4,7 @@
 
 #include "src/pending-compilation-error-handler.h"
 
+#include "src/ast/ast-value-factory.h"
 #include "src/debug/debug.h"
 #include "src/handles.h"
 #include "src/isolate.h"
diff --git a/src/ppc/code-stubs-ppc.cc b/src/ppc/code-stubs-ppc.cc
index 6dd897b..ce423ea 100644
--- a/src/ppc/code-stubs-ppc.cc
+++ b/src/ppc/code-stubs-ppc.cc
@@ -1719,7 +1719,6 @@
   // r5 : feedback vector
   // r6 : slot in feedback vector (Smi)
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_initialize_count, done_increment_count;
 
   DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
             masm->isolate()->heap()->megamorphic_symbol());
@@ -1742,7 +1741,7 @@
   Register weak_value = r10;
   __ LoadP(weak_value, FieldMemOperand(r8, WeakCell::kValueOffset));
   __ cmp(r4, weak_value);
-  __ beq(&done_increment_count);
+  __ beq(&done);
   __ CompareRoot(r8, Heap::kmegamorphic_symbolRootIndex);
   __ beq(&done);
   __ LoadP(feedback_map, FieldMemOperand(r8, HeapObject::kMapOffset));
@@ -1765,7 +1764,7 @@
   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
   __ cmp(r4, r8);
   __ bne(&megamorphic);
-  __ b(&done_increment_count);
+  __ b(&done);
 
   __ bind(&miss);
 
@@ -1795,32 +1794,22 @@
   // slot.
   CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ b(&done_initialize_count);
+  __ b(&done);
 
   __ bind(&not_array_function);
 
   CreateWeakCellStub weak_cell_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
 
-  __ bind(&done_initialize_count);
-  // Initialize the call counter.
-  __ LoadSmiLiteral(r8, Smi::FromInt(1));
-  __ SmiToPtrArrayOffset(r7, r6);
-  __ add(r7, r5, r7);
-  __ StoreP(r8, FieldMemOperand(r7, count_offset), r0);
-  __ b(&done);
+  __ bind(&done);
 
-  __ bind(&done_increment_count);
-
-  // Increment the call count for monomorphic function calls.
+  // Increment the call count for all function calls.
   __ SmiToPtrArrayOffset(r8, r6);
   __ add(r8, r5, r8);
 
   __ LoadP(r7, FieldMemOperand(r8, count_offset));
   __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0);
   __ StoreP(r7, FieldMemOperand(r8, count_offset), r0);
-
-  __ bind(&done);
 }
 
 
@@ -1872,6 +1861,16 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot, Register temp) {
+  const int count_offset = FixedArray::kHeaderSize + kPointerSize;
+  __ SmiToPtrArrayOffset(temp, slot);
+  __ add(feedback_vector, feedback_vector, temp);
+  __ LoadP(slot, FieldMemOperand(feedback_vector, count_offset));
+  __ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp);
+  __ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp);
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // r4 - function
@@ -1885,12 +1884,7 @@
   __ mov(r3, Operand(arg_count()));
 
   // Increment the call count for monomorphic function calls.
-  const int count_offset = FixedArray::kHeaderSize + kPointerSize;
-  __ SmiToPtrArrayOffset(r8, r6);
-  __ add(r5, r5, r8);
-  __ LoadP(r6, FieldMemOperand(r5, count_offset));
-  __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
-  __ StoreP(r6, FieldMemOperand(r5, count_offset), r0);
+  IncrementCallCount(masm, r5, r6, r0);
 
   __ mr(r5, r7);
   __ mr(r6, r4);
@@ -1903,7 +1897,7 @@
   // r4 - function
   // r6 - slot id (Smi)
   // r5 - vector
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -1934,13 +1928,11 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(r4, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  const int count_offset = FixedArray::kHeaderSize + kPointerSize;
-  __ LoadP(r6, FieldMemOperand(r9, count_offset));
-  __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
-  __ StoreP(r6, FieldMemOperand(r9, count_offset), r0);
-
   __ bind(&call_function);
+
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, r5, r6, r0);
+
   __ mov(r3, Operand(argc));
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
@@ -1980,6 +1972,11 @@
   __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0);
 
   __ bind(&call);
+
+  // Increment the call count for megamorphic function calls.
+  IncrementCallCount(masm, r5, r6, r0);
+
+  __ bind(&call_count_incremented);
   __ mov(r3, Operand(argc));
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET);
@@ -2006,10 +2003,6 @@
   __ cmp(r7, ip);
   __ bne(&miss);
 
-  // Initialize the call counter.
-  __ LoadSmiLiteral(r8, Smi::FromInt(1));
-  __ StoreP(r8, FieldMemOperand(r9, count_offset), r0);
-
   // Store the function. Use a stub since we need a frame for allocation.
   // r5 - vector
   // r6 - slot
@@ -2017,9 +2010,13 @@
   {
     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(masm->isolate());
+    __ Push(r5);
+    __ Push(r6);
     __ Push(cp, r4);
     __ CallStub(&create_stub);
     __ Pop(cp, r4);
+    __ Pop(r6);
+    __ Pop(r5);
   }
 
   __ b(&call_function);
@@ -2029,7 +2026,7 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ b(&call);
+  __ b(&call_count_incremented);
 }
 
 
@@ -2211,290 +2208,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-
-  // Stack frame on entry.
-  //  lr: return address
-  //  sp[0]: to
-  //  sp[4]: from
-  //  sp[8]: string
-
-  // This stub is called from the native-call %_SubString(...), so
-  // nothing can be assumed about the arguments. It is tested that:
-  //  "string" is a sequential string,
-  //  both "from" and "to" are smis, and
-  //  0 <= from <= to <= string.length.
-  // If any of these assumptions fail, we call the runtime system.
-
-  const int kToOffset = 0 * kPointerSize;
-  const int kFromOffset = 1 * kPointerSize;
-  const int kStringOffset = 2 * kPointerSize;
-
-  __ LoadP(r5, MemOperand(sp, kToOffset));
-  __ LoadP(r6, MemOperand(sp, kFromOffset));
-
-  // If either to or from had the smi tag bit set, then fail to generic runtime
-  __ JumpIfNotSmi(r5, &runtime);
-  __ JumpIfNotSmi(r6, &runtime);
-  __ SmiUntag(r5);
-  __ SmiUntag(r6, SetRC);
-  // Both r5 and r6 are untagged integers.
-
-  // We want to bailout to runtime here if From is negative.
-  __ blt(&runtime, cr0);  // From < 0.
-
-  __ cmpl(r6, r5);
-  __ bgt(&runtime);  // Fail if from > to.
-  __ sub(r5, r5, r6);
-
-  // Make sure first argument is a string.
-  __ LoadP(r3, MemOperand(sp, kStringOffset));
-  __ JumpIfSmi(r3, &runtime);
-  Condition is_string = masm->IsObjectStringType(r3, r4);
-  __ b(NegateCondition(is_string), &runtime, cr0);
-
-  Label single_char;
-  __ cmpi(r5, Operand(1));
-  __ b(eq, &single_char);
-
-  // Short-cut for the case of trivial substring.
-  Label return_r3;
-  // r3: original string
-  // r5: result string length
-  __ LoadP(r7, FieldMemOperand(r3, String::kLengthOffset));
-  __ SmiUntag(r0, r7);
-  __ cmpl(r5, r0);
-  // Return original string.
-  __ beq(&return_r3);
-  // Longer than original string's length or negative: unsafe arguments.
-  __ bgt(&runtime);
-  // Shorter than original string's length: an actual substring.
-
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into r8.
-  // r3: original string
-  // r4: instance type
-  // r5: length
-  // r6: from index (untagged)
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ andi(r0, r4, Operand(kIsIndirectStringMask));
-  __ beq(&seq_or_external_string, cr0);
-
-  __ andi(r0, r4, Operand(kSlicedNotConsMask));
-  __ bne(&sliced_string, cr0);
-  // Cons string.  Check whether it is flat, then fetch first part.
-  __ LoadP(r8, FieldMemOperand(r3, ConsString::kSecondOffset));
-  __ CompareRoot(r8, Heap::kempty_stringRootIndex);
-  __ bne(&runtime);
-  __ LoadP(r8, FieldMemOperand(r3, ConsString::kFirstOffset));
-  // Update instance type.
-  __ LoadP(r4, FieldMemOperand(r8, HeapObject::kMapOffset));
-  __ lbz(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
-  __ b(&underlying_unpacked);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and correct start index by offset.
-  __ LoadP(r8, FieldMemOperand(r3, SlicedString::kParentOffset));
-  __ LoadP(r7, FieldMemOperand(r3, SlicedString::kOffsetOffset));
-  __ SmiUntag(r4, r7);
-  __ add(r6, r6, r4);  // Add offset to index.
-  // Update instance type.
-  __ LoadP(r4, FieldMemOperand(r8, HeapObject::kMapOffset));
-  __ lbz(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
-  __ b(&underlying_unpacked);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the expected register.
-  __ mr(r8, r3);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // r8: underlying subject string
-    // r4: instance type of underlying subject string
-    // r5: length
-    // r6: adjusted start index (untagged)
-    __ cmpi(r5, Operand(SlicedString::kMinLength));
-    // Short slice.  Copy instead of slicing.
-    __ blt(&copy_routine);
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ andi(r0, r4, Operand(kStringEncodingMask));
-    __ beq(&two_byte_slice, cr0);
-    __ AllocateOneByteSlicedString(r3, r5, r9, r10, &runtime);
-    __ b(&set_slice_header);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(r3, r5, r9, r10, &runtime);
-    __ bind(&set_slice_header);
-    __ SmiTag(r6);
-    __ StoreP(r8, FieldMemOperand(r3, SlicedString::kParentOffset), r0);
-    __ StoreP(r6, FieldMemOperand(r3, SlicedString::kOffsetOffset), r0);
-    __ b(&return_r3);
-
-    __ bind(&copy_routine);
-  }
-
-  // r8: underlying subject string
-  // r4: instance type of underlying subject string
-  // r5: length
-  // r6: adjusted start index (untagged)
-  Label two_byte_sequential, sequential_string, allocate_result;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ andi(r0, r4, Operand(kExternalStringTag));
-  __ beq(&sequential_string, cr0);
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ andi(r0, r4, Operand(kShortExternalStringTag));
-  __ bne(&runtime, cr0);
-  __ LoadP(r8, FieldMemOperand(r8, ExternalString::kResourceDataOffset));
-  // r8 already points to the first character of underlying string.
-  __ b(&allocate_result);
-
-  __ bind(&sequential_string);
-  // Locate first character of underlying subject string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ addi(r8, r8, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&allocate_result);
-  // Sequential acii string.  Allocate the result.
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ andi(r0, r4, Operand(kStringEncodingMask));
-  __ beq(&two_byte_sequential, cr0);
-
-  // Allocate and copy the resulting one-byte string.
-  __ AllocateOneByteString(r3, r5, r7, r9, r10, &runtime);
-
-  // Locate first character of substring to copy.
-  __ add(r8, r8, r6);
-  // Locate first character of result.
-  __ addi(r4, r3, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  // r3: result string
-  // r4: first character of result string
-  // r5: result string length
-  // r8: first character of substring to copy
-  STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(masm, r4, r8, r5, r6,
-                                       String::ONE_BYTE_ENCODING);
-  __ b(&return_r3);
-
-  // Allocate and copy the resulting two-byte string.
-  __ bind(&two_byte_sequential);
-  __ AllocateTwoByteString(r3, r5, r7, r9, r10, &runtime);
-
-  // Locate first character of substring to copy.
-  __ ShiftLeftImm(r4, r6, Operand(1));
-  __ add(r8, r8, r4);
-  // Locate first character of result.
-  __ addi(r4, r3, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  // r3: result string.
-  // r4: first character of result.
-  // r5: result length.
-  // r8: first character of substring to copy.
-  STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(masm, r4, r8, r5, r6,
-                                       String::TWO_BYTE_ENCODING);
-
-  __ bind(&return_r3);
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1, r6, r7);
-  __ Drop(3);
-  __ Ret();
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // r3: original string
-  // r4: instance type
-  // r5: length
-  // r6: from index (untagged)
-  __ SmiTag(r6, r6);
-  StringCharAtGenerator generator(r3, r6, r5, r3, &runtime, &runtime, &runtime,
-                                  RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ Drop(3);
-  __ Ret();
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes one argument in r3.
-  Label is_number;
-  __ JumpIfSmi(r3, &is_number);
-
-  __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
-  // r3: receiver
-  // r4: receiver instance type
-  __ Ret(lt);
-
-  Label not_heap_number;
-  __ cmpi(r4, Operand(HEAP_NUMBER_TYPE));
-  __ bne(&not_heap_number);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ cmpi(r4, Operand(ODDBALL_TYPE));
-  __ bne(&not_oddball);
-  __ LoadP(r3, FieldMemOperand(r3, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ push(r3);  // Push argument.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes one argument in r3.
-  Label is_number;
-  __ JumpIfSmi(r3, &is_number);
-
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ CompareObjectType(r3, r4, r4, LAST_NAME_TYPE);
-  // r3: receiver
-  // r4: receiver instance type
-  __ Ret(le);
-
-  Label not_heap_number;
-  __ cmpi(r4, Operand(HEAP_NUMBER_TYPE));
-  __ bne(&not_heap_number);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ cmpi(r4, Operand(ODDBALL_TYPE));
-  __ bne(&not_oddball);
-  __ LoadP(r3, FieldMemOperand(r3, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ push(r3);  // Push argument.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
                                                    Register left,
                                                    Register right,
@@ -3407,19 +3120,6 @@
   Label need_incremental;
   Label need_incremental_pop_scratch;
 
-  DCHECK((~Page::kPageAlignmentMask & 0xffff) == 0);
-  __ lis(r0, Operand((~Page::kPageAlignmentMask >> 16)));
-  __ and_(regs_.scratch0(), regs_.object(), r0);
-  __ LoadP(
-      regs_.scratch1(),
-      MemOperand(regs_.scratch0(), MemoryChunk::kWriteBarrierCounterOffset));
-  __ subi(regs_.scratch1(), regs_.scratch1(), Operand(1));
-  __ StoreP(
-      regs_.scratch1(),
-      MemOperand(regs_.scratch0(), MemoryChunk::kWriteBarrierCounterOffset));
-  __ cmpi(regs_.scratch1(), Operand::Zero());  // PPC, we could do better here
-  __ blt(&need_incremental);
-
   // Let's look at the color of the object:  If it is not black we don't have
   // to inform the incremental marker.
   __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@@ -3854,7 +3554,7 @@
   __ LoadP(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
 
   // Load the map into the correct register.
-  DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+  DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
   __ mr(feedback, too_far);
 
   __ addi(ip, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4581,7 +4281,7 @@
     // Fall back to %AllocateInNewSpace (if not too big).
     Label too_big_for_new_space;
     __ bind(&allocate);
-    __ Cmpi(r10, Operand(Page::kMaxRegularHeapObjectSize), r0);
+    __ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0);
     __ bgt(&too_big_for_new_space);
     {
       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
@@ -4972,7 +4672,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ Cmpi(r10, Operand(Page::kMaxRegularHeapObjectSize), r0);
+  __ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0);
   __ bgt(&too_big_for_new_space);
   {
     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
diff --git a/src/ppc/interface-descriptors-ppc.cc b/src/ppc/interface-descriptors-ppc.cc
index bc188f4..3ff0fde 100644
--- a/src/ppc/interface-descriptors-ppc.cc
+++ b/src/ppc/interface-descriptors-ppc.cc
@@ -40,13 +40,9 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return r6; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() { return r7; }
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return r6; }
-const Register VectorStoreTransitionDescriptor::MapRegister() { return r8; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return r6; }
-
+const Register StoreTransitionDescriptor::SlotRegister() { return r7; }
+const Register StoreTransitionDescriptor::VectorRegister() { return r6; }
+const Register StoreTransitionDescriptor::MapRegister() { return r8; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r5; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r3; }
@@ -355,7 +351,7 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {
       r3,  // callee
@@ -390,7 +386,19 @@
       r3,  // argument count (not including receiver)
       r6,  // new target
       r4,  // constructor to call
-      r5   // address of the first argument
+      r5,  // allocation site feedback if available, undefined otherwise
+      r7   // address of the first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      r3,  // argument count (not including receiver)
+      r4,  // target to call checked to be Array function
+      r5,  // allocation site feedback if available, undefined otherwise
+      r6   // address of the first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/ppc/macro-assembler-ppc.cc b/src/ppc/macro-assembler-ppc.cc
index 4e39d96..9b5f80e 100644
--- a/src/ppc/macro-assembler-ppc.cc
+++ b/src/ppc/macro-assembler-ppc.cc
@@ -282,9 +282,7 @@
 void MacroAssembler::InNewSpace(Register object, Register scratch,
                                 Condition cond, Label* branch) {
   DCHECK(cond == eq || cond == ne);
-  const int mask =
-      (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
-  CheckPageFlag(object, scratch, mask, cond, branch);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cond, branch);
 }
 
 
@@ -1814,7 +1812,7 @@
 void MacroAssembler::Allocate(int object_size, Register result,
                               Register scratch1, Register scratch2,
                               Label* gc_required, AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
@@ -2070,7 +2068,7 @@
 void MacroAssembler::FastAllocate(int object_size, Register result,
                                   Register scratch1, Register scratch2,
                                   AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK(!AreAliased(result, scratch1, scratch2, ip));
 
   // Make object size into bytes.
diff --git a/src/ppc/macro-assembler-ppc.h b/src/ppc/macro-assembler-ppc.h
index cf9d4b5..ba4d277 100644
--- a/src/ppc/macro-assembler-ppc.h
+++ b/src/ppc/macro-assembler-ppc.h
@@ -140,6 +140,18 @@
   void Ret() { blr(); }
   void Ret(Condition cond, CRegister cr = cr7) { bclr(cond, cr); }
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp.
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 0) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    UNIMPLEMENTED();
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the sp register.
   void Drop(int count);
diff --git a/src/ppc/simulator-ppc.cc b/src/ppc/simulator-ppc.cc
index 2816a87..84fbb39 100644
--- a/src/ppc/simulator-ppc.cc
+++ b/src/ppc/simulator-ppc.cc
@@ -658,9 +658,8 @@
   last_debugger_input_ = input;
 }
 
-
-void Simulator::FlushICache(base::HashMap* i_cache, void* start_addr,
-                            size_t size) {
+void Simulator::FlushICache(base::CustomMatcherHashMap* i_cache,
+                            void* start_addr, size_t size) {
   intptr_t start = reinterpret_cast<intptr_t>(start_addr);
   int intra_line = (start & CachePage::kLineMask);
   start -= intra_line;
@@ -680,8 +679,8 @@
   }
 }
 
-
-CachePage* Simulator::GetCachePage(base::HashMap* i_cache, void* page) {
+CachePage* Simulator::GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                   void* page) {
   base::HashMap::Entry* entry = i_cache->LookupOrInsert(page, ICacheHash(page));
   if (entry->value == NULL) {
     CachePage* new_page = new CachePage();
@@ -692,7 +691,8 @@
 
 
 // Flush from start up to and not including start + size.
-void Simulator::FlushOnePage(base::HashMap* i_cache, intptr_t start, int size) {
+void Simulator::FlushOnePage(base::CustomMatcherHashMap* i_cache,
+                             intptr_t start, int size) {
   DCHECK(size <= CachePage::kPageSize);
   DCHECK(AllOnOnePage(start, size - 1));
   DCHECK((start & CachePage::kLineMask) == 0);
@@ -704,7 +704,8 @@
   memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
 }
 
-void Simulator::CheckICache(base::HashMap* i_cache, Instruction* instr) {
+void Simulator::CheckICache(base::CustomMatcherHashMap* i_cache,
+                            Instruction* instr) {
   intptr_t address = reinterpret_cast<intptr_t>(instr);
   void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
   void* line = reinterpret_cast<void*>(address & (~CachePage::kLineMask));
@@ -737,7 +738,7 @@
 Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
   i_cache_ = isolate_->simulator_i_cache();
   if (i_cache_ == NULL) {
-    i_cache_ = new base::HashMap(&ICacheMatch);
+    i_cache_ = new base::CustomMatcherHashMap(&ICacheMatch);
     isolate_->set_simulator_i_cache(i_cache_);
   }
   Initialize(isolate);
@@ -872,7 +873,8 @@
 
 
 // static
-void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
+void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
+                         Redirection* first) {
   Redirection::DeleteChain(first);
   if (i_cache != nullptr) {
     for (base::HashMap::Entry* entry = i_cache->Start(); entry != nullptr;
diff --git a/src/ppc/simulator-ppc.h b/src/ppc/simulator-ppc.h
index d3163e8..d061545 100644
--- a/src/ppc/simulator-ppc.h
+++ b/src/ppc/simulator-ppc.h
@@ -217,7 +217,7 @@
   // Call on program start.
   static void Initialize(Isolate* isolate);
 
-  static void TearDown(base::HashMap* i_cache, Redirection* first);
+  static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
 
   // V8 generally calls into generated JS code with 5 parameters and into
   // generated RegExp code with 7 parameters. This is a convenience function,
@@ -239,7 +239,8 @@
   char* last_debugger_input() { return last_debugger_input_; }
 
   // ICache checking.
-  static void FlushICache(base::HashMap* i_cache, void* start, size_t size);
+  static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
+                          size_t size);
 
   // Returns true if pc register contains one of the 'special_values' defined
   // below (bad_lr, end_sim_pc).
@@ -329,9 +330,12 @@
   void ExecuteInstruction(Instruction* instr);
 
   // ICache.
-  static void CheckICache(base::HashMap* i_cache, Instruction* instr);
-  static void FlushOnePage(base::HashMap* i_cache, intptr_t start, int size);
-  static CachePage* GetCachePage(base::HashMap* i_cache, void* page);
+  static void CheckICache(base::CustomMatcherHashMap* i_cache,
+                          Instruction* instr);
+  static void FlushOnePage(base::CustomMatcherHashMap* i_cache, intptr_t start,
+                           int size);
+  static CachePage* GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                 void* page);
 
   // Runtime call support.
   static void* RedirectExternalReference(
@@ -369,7 +373,7 @@
   char* last_debugger_input_;
 
   // Icache simulation
-  base::HashMap* i_cache_;
+  base::CustomMatcherHashMap* i_cache_;
 
   // Registered breakpoints.
   Instruction* break_pc_;
diff --git a/src/profiler/OWNERS b/src/profiler/OWNERS
new file mode 100644
index 0000000..87c9661
--- /dev/null
+++ b/src/profiler/OWNERS
@@ -0,0 +1 @@
+alph@chromium.org
diff --git a/src/profiler/allocation-tracker.cc b/src/profiler/allocation-tracker.cc
index d094d0e..99b0b70 100644
--- a/src/profiler/allocation-tracker.cc
+++ b/src/profiler/allocation-tracker.cc
@@ -193,7 +193,7 @@
 AllocationTracker::AllocationTracker(HeapObjectsMap* ids, StringsStorage* names)
     : ids_(ids),
       names_(names),
-      id_to_function_info_index_(base::HashMap::PointersMatch),
+      id_to_function_info_index_(),
       info_index_for_other_state_(0) {
   FunctionInfo* info = new FunctionInfo();
   info->name = "(root)";
diff --git a/src/profiler/cpu-profiler.h b/src/profiler/cpu-profiler.h
index e3df609..e9ccc57 100644
--- a/src/profiler/cpu-profiler.h
+++ b/src/profiler/cpu-profiler.h
@@ -11,7 +11,6 @@
 #include "src/base/atomic-utils.h"
 #include "src/base/atomicops.h"
 #include "src/base/platform/time.h"
-#include "src/compiler.h"
 #include "src/isolate.h"
 #include "src/libsampler/sampler.h"
 #include "src/locked-queue.h"
diff --git a/src/profiler/heap-snapshot-generator.cc b/src/profiler/heap-snapshot-generator.cc
index 9273168..d0fa2e4 100644
--- a/src/profiler/heap-snapshot-generator.cc
+++ b/src/profiler/heap-snapshot-generator.cc
@@ -355,16 +355,8 @@
     HeapObjectsMap::kGcRootsFirstSubrootId +
     VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
 
-
-static bool AddressesMatch(void* key1, void* key2) {
-  return key1 == key2;
-}
-
-
 HeapObjectsMap::HeapObjectsMap(Heap* heap)
-    : next_id_(kFirstAvailableObjectId),
-      entries_map_(AddressesMatch),
-      heap_(heap) {
+    : next_id_(kFirstAvailableObjectId), heap_(heap) {
   // This dummy element solves a problem with entries_map_.
   // When we do lookup in HashMap we see no difference between two cases:
   // it has an entry with NULL as the value or it has created
@@ -476,7 +468,7 @@
            entries_map_.occupancy());
   }
   heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
-                          "HeapObjectsMap::UpdateHeapObjectsMap");
+                           GarbageCollectionReason::kHeapProfiler);
   HeapIterator iterator(heap_);
   for (HeapObject* obj = iterator.next();
        obj != NULL;
@@ -704,7 +696,7 @@
          GetMemoryUsedByList(entries_) + GetMemoryUsedByList(time_intervals_);
 }
 
-HeapEntriesMap::HeapEntriesMap() : entries_(base::HashMap::PointersMatch) {}
+HeapEntriesMap::HeapEntriesMap() : entries_() {}
 
 int HeapEntriesMap::Map(HeapThing thing) {
   base::HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
@@ -720,7 +712,7 @@
   cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
 }
 
-HeapObjectsSet::HeapObjectsSet() : entries_(base::HashMap::PointersMatch) {}
+HeapObjectsSet::HeapObjectsSet() : entries_() {}
 
 void HeapObjectsSet::Clear() {
   entries_.Clear();
@@ -1216,8 +1208,7 @@
     }
     if (scope_info->HasFunctionName()) {
       String* name = scope_info->FunctionName();
-      VariableMode mode;
-      int idx = scope_info->FunctionContextSlotIndex(name, &mode);
+      int idx = scope_info->FunctionContextSlotIndex(name);
       if (idx >= 0) {
         SetContextReference(context, entry, name, context->get(idx),
                             Context::OffsetOfElementAt(idx));
@@ -1831,6 +1822,7 @@
          object != heap_->empty_byte_array() &&
          object != heap_->empty_fixed_array() &&
          object != heap_->empty_descriptor_array() &&
+         object != heap_->empty_type_feedback_vector() &&
          object != heap_->fixed_array_map() && object != heap_->cell_map() &&
          object != heap_->global_property_cell_map() &&
          object != heap_->shared_function_info_map() &&
@@ -2507,12 +2499,10 @@
   // full GC is reachable from the root when computing dominators.
   // This is not true for weakly reachable objects.
   // As a temporary solution we call GC twice.
-  heap_->CollectAllGarbage(
-      Heap::kMakeHeapIterableMask,
-      "HeapSnapshotGenerator::GenerateSnapshot");
-  heap_->CollectAllGarbage(
-      Heap::kMakeHeapIterableMask,
-      "HeapSnapshotGenerator::GenerateSnapshot");
+  heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+                           GarbageCollectionReason::kHeapProfiler);
+  heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+                           GarbageCollectionReason::kHeapProfiler);
 
 #ifdef VERIFY_HEAP
   Heap* debug_heap = heap_;
diff --git a/src/profiler/heap-snapshot-generator.h b/src/profiler/heap-snapshot-generator.h
index b870fbe..b235ff0 100644
--- a/src/profiler/heap-snapshot-generator.h
+++ b/src/profiler/heap-snapshot-generator.h
@@ -525,8 +525,8 @@
   bool embedder_queried_;
   HeapObjectsSet in_groups_;
   // RetainedObjectInfo* -> List<HeapObject*>*
-  base::HashMap objects_by_info_;
-  base::HashMap native_groups_;
+  base::CustomMatcherHashMap objects_by_info_;
+  base::CustomMatcherHashMap native_groups_;
   HeapEntriesAllocator* synthetic_entries_allocator_;
   HeapEntriesAllocator* native_entries_allocator_;
   // Used during references extraction.
@@ -613,7 +613,7 @@
   static const int kNodeFieldsCount;
 
   HeapSnapshot* snapshot_;
-  base::HashMap strings_;
+  base::CustomMatcherHashMap strings_;
   int next_node_id_;
   int next_string_id_;
   OutputStreamWriter* writer_;
diff --git a/src/profiler/profile-generator.h b/src/profiler/profile-generator.h
index b785eaa..179d411 100644
--- a/src/profiler/profile-generator.h
+++ b/src/profiler/profile-generator.h
@@ -8,8 +8,9 @@
 #include <map>
 #include "src/allocation.h"
 #include "src/base/hashmap.h"
-#include "src/compiler.h"
+#include "src/log.h"
 #include "src/profiler/strings-storage.h"
+#include "src/source-position.h"
 
 namespace v8 {
 namespace internal {
@@ -220,10 +221,10 @@
   CodeEntry* entry_;
   unsigned self_ticks_;
   // Mapping from CodeEntry* to ProfileNode*
-  base::HashMap children_;
+  base::CustomMatcherHashMap children_;
   List<ProfileNode*> children_list_;
   unsigned id_;
-  base::HashMap line_ticks_;
+  base::CustomMatcherHashMap line_ticks_;
 
   std::vector<CpuProfileDeoptInfo> deopt_infos_;
 
@@ -260,7 +261,7 @@
   Isolate* isolate_;
 
   unsigned next_function_id_;
-  base::HashMap function_ids_;
+  base::CustomMatcherHashMap function_ids_;
 
   DISALLOW_COPY_AND_ASSIGN(ProfileTree);
 };
diff --git a/src/profiler/profiler-listener.cc b/src/profiler/profiler-listener.cc
index 7ce874e..4bceac2 100644
--- a/src/profiler/profiler-listener.cc
+++ b/src/profiler/profiler-listener.cc
@@ -319,6 +319,7 @@
 }
 
 void ProfilerListener::AddObserver(CodeEventObserver* observer) {
+  base::LockGuard<base::Mutex> guard(&mutex_);
   if (std::find(observers_.begin(), observers_.end(), observer) !=
       observers_.end())
     return;
@@ -326,6 +327,7 @@
 }
 
 void ProfilerListener::RemoveObserver(CodeEventObserver* observer) {
+  base::LockGuard<base::Mutex> guard(&mutex_);
   auto it = std::find(observers_.begin(), observers_.end(), observer);
   if (it == observers_.end()) return;
   observers_.erase(it);
diff --git a/src/profiler/profiler-listener.h b/src/profiler/profiler-listener.h
index 7e24cea..500b7ae 100644
--- a/src/profiler/profiler-listener.h
+++ b/src/profiler/profiler-listener.h
@@ -79,6 +79,7 @@
   void RecordDeoptInlinedFrames(CodeEntry* entry, AbstractCode* abstract_code);
   Name* InferScriptName(Name* name, SharedFunctionInfo* info);
   V8_INLINE void DispatchCodeEvent(const CodeEventsContainer& evt_rec) {
+    base::LockGuard<base::Mutex> guard(&mutex_);
     for (auto observer : observers_) {
       observer->CodeEventHandler(evt_rec);
     }
@@ -87,6 +88,7 @@
   StringsStorage function_and_resource_names_;
   std::vector<CodeEntry*> code_entries_;
   std::vector<CodeEventObserver*> observers_;
+  base::Mutex mutex_;
 
   DISALLOW_COPY_AND_ASSIGN(ProfilerListener);
 };
diff --git a/src/profiler/sampling-heap-profiler.cc b/src/profiler/sampling-heap-profiler.cc
index b4361ee..3b2ca63 100644
--- a/src/profiler/sampling-heap-profiler.cc
+++ b/src/profiler/sampling-heap-profiler.cc
@@ -259,8 +259,8 @@
 
 v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
   if (flags_ & v8::HeapProfiler::kSamplingForceGC) {
-    isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags,
-                                        "SamplingHeapProfiler");
+    isolate_->heap()->CollectAllGarbage(
+        Heap::kNoGCFlags, GarbageCollectionReason::kSamplingProfiler);
   }
   // To resolve positions to line/column numbers, we will need to look up
   // scripts. Build a map to allow fast mapping from script id to script.
diff --git a/src/profiler/strings-storage.h b/src/profiler/strings-storage.h
index f98aa5e..f11afbd 100644
--- a/src/profiler/strings-storage.h
+++ b/src/profiler/strings-storage.h
@@ -36,10 +36,10 @@
 
   static bool StringsMatch(void* key1, void* key2);
   const char* AddOrDisposeString(char* str, int len);
-  base::HashMap::Entry* GetEntry(const char* str, int len);
+  base::CustomMatcherHashMap::Entry* GetEntry(const char* str, int len);
 
   uint32_t hash_seed_;
-  base::HashMap names_;
+  base::CustomMatcherHashMap names_;
 
   DISALLOW_COPY_AND_ASSIGN(StringsStorage);
 };
diff --git a/src/profiler/tracing-cpu-profiler.cc b/src/profiler/tracing-cpu-profiler.cc
new file mode 100644
index 0000000..b24ca2f
--- /dev/null
+++ b/src/profiler/tracing-cpu-profiler.cc
@@ -0,0 +1,25 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/profiler/tracing-cpu-profiler.h"
+
+#include "src/v8.h"
+
+namespace v8 {
+
+std::unique_ptr<TracingCpuProfiler> TracingCpuProfiler::Create(
+    v8::Isolate* isolate) {
+  return std::unique_ptr<TracingCpuProfiler>(
+      new internal::TracingCpuProfilerImpl(
+          reinterpret_cast<internal::Isolate*>(isolate)));
+}
+
+namespace internal {
+
+TracingCpuProfilerImpl::TracingCpuProfilerImpl(Isolate* isolate) {}
+
+TracingCpuProfilerImpl::~TracingCpuProfilerImpl() {}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/profiler/tracing-cpu-profiler.h b/src/profiler/tracing-cpu-profiler.h
new file mode 100644
index 0000000..80f1bdc
--- /dev/null
+++ b/src/profiler/tracing-cpu-profiler.h
@@ -0,0 +1,26 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_PROFILER_TRACING_CPU_PROFILER_H
+#define V8_PROFILER_TRACING_CPU_PROFILER_H
+
+#include "include/v8-profiler.h"
+#include "src/base/macros.h"
+
+namespace v8 {
+namespace internal {
+
+class TracingCpuProfilerImpl final : public TracingCpuProfiler {
+ public:
+  explicit TracingCpuProfilerImpl(Isolate*);
+  ~TracingCpuProfilerImpl();
+
+ private:
+  DISALLOW_COPY_AND_ASSIGN(TracingCpuProfilerImpl);
+};
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_PROFILER_TRACING_CPU_PROFILER_H
diff --git a/src/property-details.h b/src/property-details.h
index 87df02d..d720b1c 100644
--- a/src/property-details.h
+++ b/src/property-details.h
@@ -62,7 +62,6 @@
               static_cast<PropertyFilter>(v8::PropertyFilter::SKIP_SYMBOLS));
 
 class Smi;
-class Type;
 class TypeInfo;
 
 // Type of properties.
diff --git a/src/property.h b/src/property.h
index add9e4d..ebe7d3b 100644
--- a/src/property.h
+++ b/src/property.h
@@ -36,6 +36,7 @@
 
   void Init(Handle<Name> key, Handle<Object> value, PropertyDetails details) {
     DCHECK(key->IsUniqueName());
+    DCHECK_IMPLIES(key->IsPrivate(), !details.IsEnumerable());
     key_ = key;
     value_ = value;
     details_ = details;
@@ -44,6 +45,7 @@
   Descriptor(Handle<Name> key, Handle<Object> value, PropertyDetails details)
       : key_(key), value_(value), details_(details) {
     DCHECK(key->IsUniqueName());
+    DCHECK_IMPLIES(key->IsPrivate(), !details_.IsEnumerable());
   }
 
   Descriptor(Handle<Name> key, Handle<Object> value,
@@ -53,6 +55,7 @@
         value_(value),
         details_(attributes, type, representation, field_index) {
     DCHECK(key->IsUniqueName());
+    DCHECK_IMPLIES(key->IsPrivate(), !details_.IsEnumerable());
   }
 
   friend class DescriptorArray;
diff --git a/src/regexp/jsregexp.cc b/src/regexp/jsregexp.cc
index 0fd1a76..96a778c 100644
--- a/src/regexp/jsregexp.cc
+++ b/src/regexp/jsregexp.cc
@@ -8,7 +8,6 @@
 
 #include "src/base/platform/platform.h"
 #include "src/compilation-cache.h"
-#include "src/compiler.h"
 #include "src/elements.h"
 #include "src/execution.h"
 #include "src/factory.h"
diff --git a/src/regexp/jsregexp.h b/src/regexp/jsregexp.h
index 31c427a..8118889 100644
--- a/src/regexp/jsregexp.h
+++ b/src/regexp/jsregexp.h
@@ -46,7 +46,7 @@
 
   // See ECMA-262 section 15.10.6.2.
   // This function calls the garbage collector if necessary.
-  MUST_USE_RESULT static MaybeHandle<Object> Exec(
+  V8_EXPORT_PRIVATE MUST_USE_RESULT static MaybeHandle<Object> Exec(
       Handle<JSRegExp> regexp, Handle<String> subject, int index,
       Handle<JSObject> lastMatchInfo);
 
@@ -200,7 +200,7 @@
   // is not tracked, however.  As a conservative approximation we track the
   // total regexp code compiled including code that has subsequently been freed
   // and the total executable memory at any point.
-  static const int kRegExpExecutableMemoryLimit = 16 * MB;
+  static const size_t kRegExpExecutableMemoryLimit = 16 * MB;
   static const int kRegExpCompiledLimit = 1 * MB;
   static const int kRegExpTooLargeToOptimize = 20 * KB;
 
diff --git a/src/regexp/regexp-ast.h b/src/regexp/regexp-ast.h
index 406bf84..07a8155 100644
--- a/src/regexp/regexp-ast.h
+++ b/src/regexp/regexp-ast.h
@@ -7,8 +7,8 @@
 
 #include "src/objects.h"
 #include "src/utils.h"
-#include "src/zone-containers.h"
-#include "src/zone.h"
+#include "src/zone/zone-containers.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/regexp/regexp-parser.h b/src/regexp/regexp-parser.h
index a0b975d..2cf937f 100644
--- a/src/regexp/regexp-parser.h
+++ b/src/regexp/regexp-parser.h
@@ -7,7 +7,7 @@
 
 #include "src/objects.h"
 #include "src/regexp/regexp-ast.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc
index fb05690..b1e640c 100644
--- a/src/runtime-profiler.cc
+++ b/src/runtime-profiler.cc
@@ -9,6 +9,7 @@
 #include "src/bootstrapper.h"
 #include "src/code-stubs.h"
 #include "src/compilation-cache.h"
+#include "src/compiler.h"
 #include "src/execution.h"
 #include "src/frames-inl.h"
 #include "src/full-codegen/full-codegen.h"
@@ -54,6 +55,33 @@
 static const int kMaxSizeEarlyOpt =
     5 * FullCodeGenerator::kCodeSizeMultiplier;
 
+#define OPTIMIZATION_REASON_LIST(V)                            \
+  V(DoNotOptimize, "do not optimize")                          \
+  V(HotAndStable, "hot and stable")                            \
+  V(HotEnoughForBaseline, "hot enough for baseline")           \
+  V(HotWithoutMuchTypeInfo, "not much type info but very hot") \
+  V(SmallFunction, "small function")
+
+enum class OptimizationReason : uint8_t {
+#define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant,
+  OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS)
+#undef OPTIMIZATION_REASON_CONSTANTS
+};
+
+char const* OptimizationReasonToString(OptimizationReason reason) {
+  static char const* reasons[] = {
+#define OPTIMIZATION_REASON_TEXTS(Constant, message) message,
+      OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS)
+#undef OPTIMIZATION_REASON_TEXTS
+  };
+  size_t const index = static_cast<size_t>(reason);
+  DCHECK_LT(index, arraysize(reasons));
+  return reasons[index];
+}
+
+std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
+  return os << OptimizationReasonToString(reason);
+}
 
 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
     : isolate_(isolate),
@@ -79,8 +107,15 @@
 
   // Harvest vector-ics as well
   TypeFeedbackVector* vector = function->feedback_vector();
-  int with = 0, gen = 0;
-  vector->ComputeCounts(&with, &gen);
+  int with = 0, gen = 0, type_vector_ic_count = 0;
+  const bool is_interpreted =
+      function->shared()->code()->is_interpreter_trampoline_builtin();
+
+  vector->ComputeCounts(&with, &gen, &type_vector_ic_count, is_interpreted);
+  if (is_interpreted) {
+    DCHECK_EQ(*ic_total_count, 0);
+    *ic_total_count = type_vector_ic_count;
+  }
   *ic_with_type_info_count += with;
   *ic_generic_count += gen;
 
@@ -112,13 +147,17 @@
   }
 }
 
-void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
-  TraceRecompile(function, reason, "optimized");
+void RuntimeProfiler::Optimize(JSFunction* function,
+                               OptimizationReason reason) {
+  DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
+  TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
   function->AttemptConcurrentOptimization();
 }
 
-void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) {
-  TraceRecompile(function, reason, "baseline");
+void RuntimeProfiler::Baseline(JSFunction* function,
+                               OptimizationReason reason) {
+  DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
+  TraceRecompile(function, OptimizationReasonToString(reason), "baseline");
 
   // TODO(4280): Fix this to check function is compiled for the interpreter
   // once we have a standard way to check that. For now function will only
@@ -237,9 +276,9 @@
         generic_percentage <= FLAG_generic_ic_threshold) {
       // If this particular function hasn't had any ICs patched for enough
       // ticks, optimize it now.
-      Optimize(function, "hot and stable");
+      Optimize(function, OptimizationReason::kHotAndStable);
     } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
-      Optimize(function, "not much type info but very hot");
+      Optimize(function, OptimizationReason::kHotWithoutMuchTypeInfo);
     } else {
       shared_code->set_profiler_ticks(ticks + 1);
       if (FLAG_trace_opt_verbose) {
@@ -258,7 +297,7 @@
                 &generic_percentage);
     if (type_percentage >= FLAG_type_info_threshold &&
         generic_percentage <= FLAG_generic_ic_threshold) {
-      Optimize(function, "small function");
+      Optimize(function, OptimizationReason::kSmallFunction);
     } else {
       shared_code->set_profiler_ticks(ticks + 1);
     }
@@ -271,31 +310,16 @@
                                             JavaScriptFrame* frame) {
   if (function->IsInOptimizationQueue()) return;
 
-  SharedFunctionInfo* shared = function->shared();
-  int ticks = shared->profiler_ticks();
-
-  // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
-  // than kMaxToplevelSourceSize.
-
   if (FLAG_always_osr) {
     AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
     // Fall through and do a normal baseline compile as well.
-  } else if (!frame->is_optimized() &&
-             (function->IsMarkedForBaseline() ||
-              function->IsMarkedForOptimization() ||
-              function->IsMarkedForConcurrentOptimization() ||
-              function->IsOptimized())) {
-    // Attempt OSR if we are still running interpreted code even though the
-    // the function has long been marked or even already been optimized.
-    int64_t allowance =
-        kOSRCodeSizeAllowanceBaseIgnition +
-        static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
-    if (shared->bytecode_array()->Size() <= allowance) {
-      AttemptOnStackReplacement(frame);
-    }
+  } else if (MaybeOSRIgnition(function, frame)) {
     return;
   }
 
+  SharedFunctionInfo* shared = function->shared();
+  int ticks = shared->profiler_ticks();
+
   if (shared->optimization_disabled() &&
       shared->disable_optimization_reason() == kOptimizationDisabledForTest) {
     // Don't baseline functions which have been marked by NeverOptimizeFunction
@@ -304,7 +328,7 @@
   }
 
   if (ticks >= kProfilerTicksBeforeBaseline) {
-    Baseline(function, "hot enough for baseline");
+    Baseline(function, OptimizationReason::kHotEnoughForBaseline);
   }
 }
 
@@ -312,31 +336,16 @@
                                             JavaScriptFrame* frame) {
   if (function->IsInOptimizationQueue()) return;
 
-  SharedFunctionInfo* shared = function->shared();
-  int ticks = shared->profiler_ticks();
-
-  // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
-  // than kMaxToplevelSourceSize.
-
   if (FLAG_always_osr) {
     AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
     // Fall through and do a normal optimized compile as well.
-  } else if (!frame->is_optimized() &&
-             (function->IsMarkedForBaseline() ||
-              function->IsMarkedForOptimization() ||
-              function->IsMarkedForConcurrentOptimization() ||
-              function->IsOptimized())) {
-    // Attempt OSR if we are still running interpreted code even though the
-    // the function has long been marked or even already been optimized.
-    int64_t allowance =
-        kOSRCodeSizeAllowanceBaseIgnition +
-        static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
-    if (shared->bytecode_array()->Size() <= allowance) {
-      AttemptOnStackReplacement(frame);
-    }
+  } else if (MaybeOSRIgnition(function, frame)) {
     return;
   }
 
+  SharedFunctionInfo* shared = function->shared();
+  int ticks = shared->profiler_ticks();
+
   if (shared->optimization_disabled()) {
     if (shared->deopt_count() >= FLAG_max_opt_count) {
       // If optimization was disabled due to many deoptimizations,
@@ -348,8 +357,51 @@
     }
     return;
   }
+
   if (function->IsOptimized()) return;
 
+  OptimizationReason reason = ShouldOptimizeIgnition(function, frame);
+
+  if (reason != OptimizationReason::kDoNotOptimize) {
+    Optimize(function, reason);
+  }
+}
+
+bool RuntimeProfiler::MaybeOSRIgnition(JSFunction* function,
+                                       JavaScriptFrame* frame) {
+  if (!FLAG_ignition_osr) return false;
+
+  SharedFunctionInfo* shared = function->shared();
+  int ticks = shared->profiler_ticks();
+
+  // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
+  // than kMaxToplevelSourceSize.
+
+  bool osr_before_baselined = function->IsMarkedForBaseline() &&
+                              ShouldOptimizeIgnition(function, frame) !=
+                                  OptimizationReason::kDoNotOptimize;
+  if (!frame->is_optimized() &&
+      (osr_before_baselined || function->IsMarkedForOptimization() ||
+       function->IsMarkedForConcurrentOptimization() ||
+       function->IsOptimized())) {
+    // Attempt OSR if we are still running interpreted code even though the
+    // the function has long been marked or even already been optimized.
+    int64_t allowance =
+        kOSRCodeSizeAllowanceBaseIgnition +
+        static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
+    if (shared->bytecode_array()->Size() <= allowance) {
+      AttemptOnStackReplacement(frame);
+    }
+    return true;
+  }
+  return false;
+}
+
+OptimizationReason RuntimeProfiler::ShouldOptimizeIgnition(
+    JSFunction* function, JavaScriptFrame* frame) {
+  SharedFunctionInfo* shared = function->shared();
+  int ticks = shared->profiler_ticks();
+
   if (ticks >= kProfilerTicksBeforeOptimization) {
     int typeinfo, generic, total, type_percentage, generic_percentage;
     GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
@@ -358,9 +410,9 @@
         generic_percentage <= FLAG_generic_ic_threshold) {
       // If this particular function hasn't had any ICs patched for enough
       // ticks, optimize it now.
-      Optimize(function, "hot and stable");
+      return OptimizationReason::kHotAndStable;
     } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
-      Optimize(function, "not much type info but very hot");
+      return OptimizationReason::kHotWithoutMuchTypeInfo;
     } else {
       if (FLAG_trace_opt_verbose) {
         PrintF("[not yet optimizing ");
@@ -368,10 +420,12 @@
         PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
                type_percentage);
       }
+      return OptimizationReason::kDoNotOptimize;
     }
   }
   // TODO(rmcilroy): Consider whether we should optimize small functions when
   // they are first seen on the stack (e.g., kMaxSizeEarlyOpt).
+  return OptimizationReason::kDoNotOptimize;
 }
 
 void RuntimeProfiler::MarkCandidatesForOptimization() {
@@ -419,6 +473,5 @@
   any_ic_changed_ = false;
 }
 
-
 }  // namespace internal
 }  // namespace v8
diff --git a/src/runtime-profiler.h b/src/runtime-profiler.h
index 7f2c902..5c538c4 100644
--- a/src/runtime-profiler.h
+++ b/src/runtime-profiler.h
@@ -13,6 +13,7 @@
 class Isolate;
 class JavaScriptFrame;
 class JSFunction;
+enum class OptimizationReason : uint8_t;
 
 class RuntimeProfiler {
  public:
@@ -30,8 +31,13 @@
                                 int frame_count);
   void MaybeBaselineIgnition(JSFunction* function, JavaScriptFrame* frame);
   void MaybeOptimizeIgnition(JSFunction* function, JavaScriptFrame* frame);
-  void Optimize(JSFunction* function, const char* reason);
-  void Baseline(JSFunction* function, const char* reason);
+  // Potentially attempts OSR from ignition and returns whether no other
+  // optimization attempts should be made.
+  bool MaybeOSRIgnition(JSFunction* function, JavaScriptFrame* frame);
+  OptimizationReason ShouldOptimizeIgnition(JSFunction* function,
+                                            JavaScriptFrame* frame);
+  void Optimize(JSFunction* function, OptimizationReason reason);
+  void Baseline(JSFunction* function, OptimizationReason reason);
 
   Isolate* isolate_;
   bool any_ic_changed_;
diff --git a/src/runtime/runtime-array.cc b/src/runtime/runtime-array.cc
index 4b7cd39..cbde8f3 100644
--- a/src/runtime/runtime-array.cc
+++ b/src/runtime/runtime-array.cc
@@ -375,15 +375,9 @@
   uint32_t index = static_cast<uint32_t>(key);
 
   if (index >= capacity) {
-    if (object->map()->is_prototype_map() ||
-        object->WouldConvertToSlowElements(index)) {
-      // We don't want to allow operations that cause lazy deopt. Return a Smi
-      // as a signal that optimized code should eagerly deoptimize.
+    if (!object->GetElementsAccessor()->GrowCapacity(object, index)) {
       return Smi::FromInt(0);
     }
-
-    uint32_t new_capacity = JSObject::NewElementsCapacity(index + 1);
-    object->GetElementsAccessor()->GrowCapacityAndConvert(object, new_capacity);
   }
 
   // On success, return the fixed array elements.
diff --git a/src/runtime/runtime-classes.cc b/src/runtime/runtime-classes.cc
index 5448159..323604f 100644
--- a/src/runtime/runtime-classes.cc
+++ b/src/runtime/runtime-classes.cc
@@ -95,7 +95,8 @@
       prototype_parent = isolate->factory()->null_value();
     } else if (super_class->IsConstructor()) {
       DCHECK(!super_class->IsJSFunction() ||
-             !Handle<JSFunction>::cast(super_class)->shared()->is_resumable());
+             !IsResumableFunction(
+                 Handle<JSFunction>::cast(super_class)->shared()->kind()));
       ASSIGN_RETURN_ON_EXCEPTION(
           isolate, prototype_parent,
           Runtime::GetObjectProperty(isolate, super_class,
@@ -187,52 +188,65 @@
                            end_position));
 }
 
+namespace {
 
-static MaybeHandle<Object> LoadFromSuper(Isolate* isolate,
+enum class SuperMode { kLoad, kStore };
+
+MaybeHandle<JSReceiver> GetSuperHolder(
+    Isolate* isolate, Handle<Object> receiver, Handle<JSObject> home_object,
+    SuperMode mode, MaybeHandle<Name> maybe_name, uint32_t index) {
+  if (home_object->IsAccessCheckNeeded() &&
+      !isolate->MayAccess(handle(isolate->context()), home_object)) {
+    isolate->ReportFailedAccessCheck(home_object);
+    RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, JSReceiver);
+  }
+
+  PrototypeIterator iter(isolate, home_object);
+  Handle<Object> proto = PrototypeIterator::GetCurrent(iter);
+  if (!proto->IsJSReceiver()) {
+    MessageTemplate::Template message =
+        mode == SuperMode::kLoad ? MessageTemplate::kNonObjectPropertyLoad
+                                 : MessageTemplate::kNonObjectPropertyStore;
+    Handle<Name> name;
+    if (!maybe_name.ToHandle(&name)) {
+      name = isolate->factory()->Uint32ToString(index);
+    }
+    THROW_NEW_ERROR(isolate, NewTypeError(message, name, proto), JSReceiver);
+  }
+  return Handle<JSReceiver>::cast(proto);
+}
+
+MaybeHandle<Object> LoadFromSuper(Isolate* isolate, Handle<Object> receiver,
+                                  Handle<JSObject> home_object,
+                                  Handle<Name> name) {
+  Handle<JSReceiver> holder;
+  ASSIGN_RETURN_ON_EXCEPTION(
+      isolate, holder,
+      GetSuperHolder(isolate, receiver, home_object, SuperMode::kLoad, name, 0),
+      Object);
+  LookupIterator it(receiver, name, holder);
+  Handle<Object> result;
+  ASSIGN_RETURN_ON_EXCEPTION(isolate, result, Object::GetProperty(&it), Object);
+  return result;
+}
+
+MaybeHandle<Object> LoadElementFromSuper(Isolate* isolate,
                                          Handle<Object> receiver,
                                          Handle<JSObject> home_object,
-                                         Handle<Name> name) {
-  if (home_object->IsAccessCheckNeeded() &&
-      !isolate->MayAccess(handle(isolate->context()), home_object)) {
-    isolate->ReportFailedAccessCheck(home_object);
-    RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
-  }
-
-  PrototypeIterator iter(isolate, home_object);
-  Handle<Object> proto = PrototypeIterator::GetCurrent(iter);
-  if (!proto->IsJSReceiver()) {
-    return Object::ReadAbsentProperty(isolate, proto, name);
-  }
-
-  LookupIterator it(receiver, name, Handle<JSReceiver>::cast(proto));
+                                         uint32_t index) {
+  Handle<JSReceiver> holder;
+  ASSIGN_RETURN_ON_EXCEPTION(
+      isolate, holder,
+      GetSuperHolder(isolate, receiver, home_object, SuperMode::kLoad,
+                     MaybeHandle<Name>(), index),
+      Object);
+  LookupIterator it(isolate, receiver, index, holder);
   Handle<Object> result;
   ASSIGN_RETURN_ON_EXCEPTION(isolate, result, Object::GetProperty(&it), Object);
   return result;
 }
 
-static MaybeHandle<Object> LoadElementFromSuper(Isolate* isolate,
-                                                Handle<Object> receiver,
-                                                Handle<JSObject> home_object,
-                                                uint32_t index) {
-  if (home_object->IsAccessCheckNeeded() &&
-      !isolate->MayAccess(handle(isolate->context()), home_object)) {
-    isolate->ReportFailedAccessCheck(home_object);
-    RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
-  }
-
-  PrototypeIterator iter(isolate, home_object);
-  Handle<Object> proto = PrototypeIterator::GetCurrent(iter);
-  if (!proto->IsJSReceiver()) {
-    Handle<Object> name = isolate->factory()->NewNumberFromUint(index);
-    return Object::ReadAbsentProperty(isolate, proto, name);
-  }
-
-  LookupIterator it(isolate, receiver, index, Handle<JSReceiver>::cast(proto));
-  Handle<Object> result;
-  ASSIGN_RETURN_ON_EXCEPTION(isolate, result, Object::GetProperty(&it), Object);
-  return result;
-}
-
+}  // anonymous namespace
 
 RUNTIME_FUNCTION(Runtime_LoadFromSuper) {
   HandleScope scope(isolate);
@@ -272,50 +286,43 @@
                            LoadFromSuper(isolate, receiver, home_object, name));
 }
 
+namespace {
 
-static Object* StoreToSuper(Isolate* isolate, Handle<JSObject> home_object,
-                            Handle<Object> receiver, Handle<Name> name,
-                            Handle<Object> value, LanguageMode language_mode) {
-  if (home_object->IsAccessCheckNeeded() &&
-      !isolate->MayAccess(handle(isolate->context()), home_object)) {
-    isolate->ReportFailedAccessCheck(home_object);
-    RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
-  }
-
-  PrototypeIterator iter(isolate, home_object);
-  Handle<Object> proto = PrototypeIterator::GetCurrent(iter);
-  if (!proto->IsJSReceiver()) return isolate->heap()->undefined_value();
-
-  LookupIterator it(receiver, name, Handle<JSReceiver>::cast(proto));
+MaybeHandle<Object> StoreToSuper(Isolate* isolate, Handle<JSObject> home_object,
+                                 Handle<Object> receiver, Handle<Name> name,
+                                 Handle<Object> value,
+                                 LanguageMode language_mode) {
+  Handle<JSReceiver> holder;
+  ASSIGN_RETURN_ON_EXCEPTION(isolate, holder,
+                             GetSuperHolder(isolate, receiver, home_object,
+                                            SuperMode::kStore, name, 0),
+                             Object);
+  LookupIterator it(receiver, name, holder);
   MAYBE_RETURN(Object::SetSuperProperty(&it, value, language_mode,
                                         Object::CERTAINLY_NOT_STORE_FROM_KEYED),
-               isolate->heap()->exception());
-  return *value;
+               MaybeHandle<Object>());
+  return value;
 }
 
-
-static Object* StoreElementToSuper(Isolate* isolate,
-                                   Handle<JSObject> home_object,
-                                   Handle<Object> receiver, uint32_t index,
-                                   Handle<Object> value,
-                                   LanguageMode language_mode) {
-  if (home_object->IsAccessCheckNeeded() &&
-      !isolate->MayAccess(handle(isolate->context()), home_object)) {
-    isolate->ReportFailedAccessCheck(home_object);
-    RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
-  }
-
-  PrototypeIterator iter(isolate, home_object);
-  Handle<Object> proto = PrototypeIterator::GetCurrent(iter);
-  if (!proto->IsJSReceiver()) return isolate->heap()->undefined_value();
-
-  LookupIterator it(isolate, receiver, index, Handle<JSReceiver>::cast(proto));
+MaybeHandle<Object> StoreElementToSuper(Isolate* isolate,
+                                        Handle<JSObject> home_object,
+                                        Handle<Object> receiver, uint32_t index,
+                                        Handle<Object> value,
+                                        LanguageMode language_mode) {
+  Handle<JSReceiver> holder;
+  ASSIGN_RETURN_ON_EXCEPTION(
+      isolate, holder,
+      GetSuperHolder(isolate, receiver, home_object, SuperMode::kStore,
+                     MaybeHandle<Name>(), index),
+      Object);
+  LookupIterator it(isolate, receiver, index, holder);
   MAYBE_RETURN(Object::SetSuperProperty(&it, value, language_mode,
                                         Object::MAY_BE_STORE_FROM_KEYED),
-               isolate->heap()->exception());
-  return *value;
+               MaybeHandle<Object>());
+  return value;
 }
 
+}  // anonymous namespace
 
 RUNTIME_FUNCTION(Runtime_StoreToSuper_Strict) {
   HandleScope scope(isolate);
@@ -325,7 +332,8 @@
   CONVERT_ARG_HANDLE_CHECKED(Name, name, 2);
   CONVERT_ARG_HANDLE_CHECKED(Object, value, 3);
 
-  return StoreToSuper(isolate, home_object, receiver, name, value, STRICT);
+  RETURN_RESULT_OR_FAILURE(isolate, StoreToSuper(isolate, home_object, receiver,
+                                                 name, value, STRICT));
 }
 
 
@@ -337,14 +345,13 @@
   CONVERT_ARG_HANDLE_CHECKED(Name, name, 2);
   CONVERT_ARG_HANDLE_CHECKED(Object, value, 3);
 
-  return StoreToSuper(isolate, home_object, receiver, name, value, SLOPPY);
+  RETURN_RESULT_OR_FAILURE(isolate, StoreToSuper(isolate, home_object, receiver,
+                                                 name, value, SLOPPY));
 }
 
-
-static Object* StoreKeyedToSuper(Isolate* isolate, Handle<JSObject> home_object,
-                                 Handle<Object> receiver, Handle<Object> key,
-                                 Handle<Object> value,
-                                 LanguageMode language_mode) {
+static MaybeHandle<Object> StoreKeyedToSuper(
+    Isolate* isolate, Handle<JSObject> home_object, Handle<Object> receiver,
+    Handle<Object> key, Handle<Object> value, LanguageMode language_mode) {
   uint32_t index = 0;
 
   if (key->ToArrayIndex(&index)) {
@@ -352,8 +359,8 @@
                                language_mode);
   }
   Handle<Name> name;
-  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, name,
-                                     Object::ToName(isolate, key));
+  ASSIGN_RETURN_ON_EXCEPTION(isolate, name, Object::ToName(isolate, key),
+                             Object);
   // TODO(verwaest): Unify using LookupIterator.
   if (name->AsArrayIndex(&index)) {
     return StoreElementToSuper(isolate, home_object, receiver, index, value,
@@ -372,7 +379,9 @@
   CONVERT_ARG_HANDLE_CHECKED(Object, key, 2);
   CONVERT_ARG_HANDLE_CHECKED(Object, value, 3);
 
-  return StoreKeyedToSuper(isolate, home_object, receiver, key, value, STRICT);
+  RETURN_RESULT_OR_FAILURE(
+      isolate,
+      StoreKeyedToSuper(isolate, home_object, receiver, key, value, STRICT));
 }
 
 
@@ -384,7 +393,9 @@
   CONVERT_ARG_HANDLE_CHECKED(Object, key, 2);
   CONVERT_ARG_HANDLE_CHECKED(Object, value, 3);
 
-  return StoreKeyedToSuper(isolate, home_object, receiver, key, value, SLOPPY);
+  RETURN_RESULT_OR_FAILURE(
+      isolate,
+      StoreKeyedToSuper(isolate, home_object, receiver, key, value, SLOPPY));
 }
 
 
diff --git a/src/runtime/runtime-compiler.cc b/src/runtime/runtime-compiler.cc
index b5910e4..01ec73d 100644
--- a/src/runtime/runtime-compiler.cc
+++ b/src/runtime/runtime-compiler.cc
@@ -11,6 +11,7 @@
 #include "src/deoptimizer.h"
 #include "src/frames-inl.h"
 #include "src/full-codegen/full-codegen.h"
+#include "src/interpreter/bytecode-array-iterator.h"
 #include "src/isolate-inl.h"
 #include "src/messages.h"
 #include "src/v8threads.h"
@@ -172,6 +173,17 @@
 
   DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
   DCHECK(type == deoptimizer->bailout_type());
+  DCHECK_NULL(isolate->context());
+
+  // TODO(turbofan): For Crankshaft we restore the context before objects are
+  // being materialized, because it never de-materializes the context but it
+  // requires a context to materialize arguments objects. This is specific to
+  // Crankshaft and can be removed once only TurboFan goes through here.
+  if (!optimized_code->is_turbofanned()) {
+    JavaScriptFrameIterator top_it(isolate);
+    JavaScriptFrame* top_frame = top_it.frame();
+    isolate->set_context(Context::cast(top_frame->context()));
+  }
 
   // Make sure to materialize objects before causing any allocation.
   JavaScriptFrameIterator it(isolate);
@@ -179,9 +191,11 @@
   delete deoptimizer;
 
   // Ensure the context register is updated for materialized objects.
-  JavaScriptFrameIterator top_it(isolate);
-  JavaScriptFrame* top_frame = top_it.frame();
-  isolate->set_context(Context::cast(top_frame->context()));
+  if (optimized_code->is_turbofanned()) {
+    JavaScriptFrameIterator top_it(isolate);
+    JavaScriptFrame* top_frame = top_it.frame();
+    isolate->set_context(Context::cast(top_frame->context()));
+  }
 
   if (type == Deoptimizer::LAZY) {
     return isolate->heap()->undefined_value();
@@ -279,7 +293,20 @@
   // Reset the OSR loop nesting depth to disarm back edges.
   bytecode->set_osr_loop_nesting_level(0);
 
-  return BailoutId(iframe->GetBytecodeOffset());
+  // Translate the offset of the jump instruction to the jump target offset of
+  // that instruction so that the derived BailoutId points to the loop header.
+  // TODO(mstarzinger): This can be merged with {BytecodeBranchAnalysis} which
+  // already performs a pre-pass over the bytecode stream anyways.
+  int jump_offset = iframe->GetBytecodeOffset();
+  interpreter::BytecodeArrayIterator iterator(bytecode);
+  while (iterator.current_offset() + iterator.current_prefix_offset() <
+         jump_offset) {
+    iterator.Advance();
+  }
+  DCHECK(interpreter::Bytecodes::IsJump(iterator.current_bytecode()));
+  int jump_target_offset = iterator.GetJumpTargetOffset();
+
+  return BailoutId(jump_target_offset);
 }
 
 }  // namespace
@@ -335,10 +362,18 @@
       function->shared()->increment_deopt_count();
 
       if (result->is_turbofanned()) {
-        // TurboFanned OSR code cannot be installed into the function.
-        // But the function is obviously hot, so optimize it next time.
-        function->ReplaceCode(
-            isolate->builtins()->builtin(Builtins::kCompileOptimized));
+        // When we're waiting for concurrent optimization, set to compile on
+        // the next call - otherwise we'd run unoptimized once more
+        // and potentially compile for OSR another time as well.
+        if (function->IsMarkedForConcurrentOptimization()) {
+          if (FLAG_trace_osr) {
+            PrintF("[OSR - Re-marking ");
+            function->PrintName();
+            PrintF(" for non-concurrent optimization]\n");
+          }
+          function->ReplaceCode(
+              isolate->builtins()->builtin(Builtins::kCompileOptimized));
+        }
       } else {
         // Crankshafted OSR code can be installed into the function.
         function->ReplaceCode(*result);
diff --git a/src/runtime/runtime-debug.cc b/src/runtime/runtime-debug.cc
index a8c465a..2d217b8 100644
--- a/src/runtime/runtime-debug.cc
+++ b/src/runtime/runtime-debug.cc
@@ -9,6 +9,7 @@
 #include "src/debug/debug-frames.h"
 #include "src/debug/debug-scopes.h"
 #include "src/debug/debug.h"
+#include "src/debug/liveedit.h"
 #include "src/frames-inl.h"
 #include "src/globals.h"
 #include "src/interpreter/bytecodes.h"
@@ -1521,7 +1522,8 @@
 RUNTIME_FUNCTION(Runtime_CollectGarbage) {
   SealHandleScope shs(isolate);
   DCHECK(args.length() == 1);
-  isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "%CollectGarbage");
+  isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags,
+                                     GarbageCollectionReason::kRuntime);
   return isolate->heap()->undefined_value();
 }
 
diff --git a/src/runtime/runtime-forin.cc b/src/runtime/runtime-forin.cc
index 0d624e9..bd37cdc 100644
--- a/src/runtime/runtime-forin.cc
+++ b/src/runtime/runtime-forin.cc
@@ -140,17 +140,6 @@
   return MakeTriple(*cache_type, *cache_array, Smi::FromInt(cache_length));
 }
 
-
-RUNTIME_FUNCTION(Runtime_ForInDone) {
-  SealHandleScope scope(isolate);
-  DCHECK_EQ(2, args.length());
-  CONVERT_SMI_ARG_CHECKED(index, 0);
-  CONVERT_SMI_ARG_CHECKED(length, 1);
-  DCHECK_LE(0, index);
-  DCHECK_LE(index, length);
-  return isolate->heap()->ToBoolean(index == length);
-}
-
 RUNTIME_FUNCTION(Runtime_ForInHasProperty) {
   HandleScope scope(isolate);
   DCHECK_EQ(2, args.length());
@@ -188,15 +177,5 @@
                            HasEnumerableProperty(isolate, receiver, key));
 }
 
-
-RUNTIME_FUNCTION(Runtime_ForInStep) {
-  SealHandleScope scope(isolate);
-  DCHECK_EQ(1, args.length());
-  CONVERT_SMI_ARG_CHECKED(index, 0);
-  DCHECK_LE(0, index);
-  DCHECK_LT(index, Smi::kMaxValue);
-  return Smi::FromInt(index + 1);
-}
-
 }  // namespace internal
 }  // namespace v8
diff --git a/src/runtime/runtime-function.cc b/src/runtime/runtime-function.cc
index 298f1a1..fa50941 100644
--- a/src/runtime/runtime-function.cc
+++ b/src/runtime/runtime-function.cc
@@ -174,6 +174,7 @@
     target_shared->set_bytecode_array(source_shared->bytecode_array());
   }
   target_shared->set_scope_info(source_shared->scope_info());
+  target_shared->set_outer_scope_info(source_shared->outer_scope_info());
   target_shared->set_length(source_shared->length());
   target_shared->set_num_literals(source_shared->num_literals());
   target_shared->set_feedback_metadata(source_shared->feedback_metadata());
diff --git a/src/runtime/runtime-generator.cc b/src/runtime/runtime-generator.cc
index dcc48c5..bb63a3d 100644
--- a/src/runtime/runtime-generator.cc
+++ b/src/runtime/runtime-generator.cc
@@ -18,7 +18,7 @@
   DCHECK(args.length() == 2);
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
   CONVERT_ARG_HANDLE_CHECKED(Object, receiver, 1);
-  CHECK(function->shared()->is_resumable());
+  CHECK(IsResumableFunction(function->shared()->kind()));
 
   Handle<FixedArray> operand_stack;
   if (function->shared()->HasBytecodeArray()) {
@@ -49,7 +49,7 @@
 
   JavaScriptFrameIterator stack_iterator(isolate);
   JavaScriptFrame* frame = stack_iterator.frame();
-  CHECK(frame->function()->shared()->is_resumable());
+  CHECK(IsResumableFunction(frame->function()->shared()->kind()));
   DCHECK_EQ(frame->function(), generator_object->function());
   DCHECK(frame->function()->shared()->is_compiled());
   DCHECK(!frame->function()->IsOptimized());
diff --git a/src/runtime/runtime-i18n.cc b/src/runtime/runtime-i18n.cc
index 8b9d92e..7fcb802 100644
--- a/src/runtime/runtime-i18n.cc
+++ b/src/runtime/runtime-i18n.cc
@@ -25,6 +25,8 @@
 #include "unicode/decimfmt.h"
 #include "unicode/dtfmtsym.h"
 #include "unicode/dtptngen.h"
+#include "unicode/fieldpos.h"
+#include "unicode/fpositer.h"
 #include "unicode/locid.h"
 #include "unicode/normalizer2.h"
 #include "unicode/numfmt.h"
@@ -322,7 +324,7 @@
   Handle<Symbol> marker = isolate->factory()->intl_impl_object_symbol();
 
   Handle<Object> impl = JSReceiver::GetDataProperty(obj, marker);
-  if (impl->IsTheHole(isolate)) {
+  if (!impl->IsJSObject()) {
     THROW_NEW_ERROR_RETURN_FAILURE(
         isolate, NewTypeError(MessageTemplate::kNotIntlObject, obj));
   }
@@ -393,6 +395,138 @@
                    result.length())));
 }
 
+namespace {
+// The list comes from third_party/icu/source/i18n/unicode/udat.h.
+// They're mapped to DateTimeFormat components listed at
+// https://tc39.github.io/ecma402/#sec-datetimeformat-abstracts .
+
+Handle<String> IcuDateFieldIdToDateType(int32_t field_id, Isolate* isolate) {
+  switch (field_id) {
+    case -1:
+      return isolate->factory()->literal_string();
+    case UDAT_YEAR_FIELD:
+    case UDAT_EXTENDED_YEAR_FIELD:
+    case UDAT_YEAR_NAME_FIELD:
+      return isolate->factory()->year_string();
+    case UDAT_MONTH_FIELD:
+    case UDAT_STANDALONE_MONTH_FIELD:
+      return isolate->factory()->month_string();
+    case UDAT_DATE_FIELD:
+      return isolate->factory()->day_string();
+    case UDAT_HOUR_OF_DAY1_FIELD:
+    case UDAT_HOUR_OF_DAY0_FIELD:
+    case UDAT_HOUR1_FIELD:
+    case UDAT_HOUR0_FIELD:
+      return isolate->factory()->hour_string();
+    case UDAT_MINUTE_FIELD:
+      return isolate->factory()->minute_string();
+    case UDAT_SECOND_FIELD:
+      return isolate->factory()->second_string();
+    case UDAT_DAY_OF_WEEK_FIELD:
+    case UDAT_DOW_LOCAL_FIELD:
+    case UDAT_STANDALONE_DAY_FIELD:
+      return isolate->factory()->weekday_string();
+    case UDAT_AM_PM_FIELD:
+      return isolate->factory()->dayperiod_string();
+    case UDAT_TIMEZONE_FIELD:
+    case UDAT_TIMEZONE_RFC_FIELD:
+    case UDAT_TIMEZONE_GENERIC_FIELD:
+    case UDAT_TIMEZONE_SPECIAL_FIELD:
+    case UDAT_TIMEZONE_LOCALIZED_GMT_OFFSET_FIELD:
+    case UDAT_TIMEZONE_ISO_FIELD:
+    case UDAT_TIMEZONE_ISO_LOCAL_FIELD:
+      return isolate->factory()->timeZoneName_string();
+    case UDAT_ERA_FIELD:
+      return isolate->factory()->era_string();
+    default:
+      // Other UDAT_*_FIELD's cannot show up because there is no way to specify
+      // them via options of Intl.DateTimeFormat.
+      UNREACHABLE();
+      // To prevent MSVC from issuing C4715 warning.
+      return Handle<String>();
+  }
+}
+
+bool AddElement(Handle<JSArray> array, int index, int32_t field_id,
+                const icu::UnicodeString& formatted, int32_t begin, int32_t end,
+                Isolate* isolate) {
+  HandleScope scope(isolate);
+  Factory* factory = isolate->factory();
+  Handle<JSObject> element = factory->NewJSObject(isolate->object_function());
+  Handle<String> value = IcuDateFieldIdToDateType(field_id, isolate);
+  JSObject::AddProperty(element, factory->type_string(), value, NONE);
+
+  icu::UnicodeString field(formatted.tempSubStringBetween(begin, end));
+  ASSIGN_RETURN_ON_EXCEPTION_VALUE(
+      isolate, value, factory->NewStringFromTwoByte(Vector<const uint16_t>(
+                          reinterpret_cast<const uint16_t*>(field.getBuffer()),
+                          field.length())),
+      false);
+
+  JSObject::AddProperty(element, factory->value_string(), value, NONE);
+  RETURN_ON_EXCEPTION_VALUE(
+      isolate, JSObject::AddDataElement(array, index, element, NONE), false);
+  return true;
+}
+
+}  // namespace
+
+RUNTIME_FUNCTION(Runtime_InternalDateFormatToParts) {
+  HandleScope scope(isolate);
+  Factory* factory = isolate->factory();
+
+  DCHECK(args.length() == 2);
+
+  CONVERT_ARG_HANDLE_CHECKED(JSObject, date_format_holder, 0);
+  CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 1);
+
+  Handle<Object> value;
+  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, value, Object::ToNumber(date));
+
+  icu::SimpleDateFormat* date_format =
+      DateFormat::UnpackDateFormat(isolate, date_format_holder);
+  if (!date_format) return isolate->ThrowIllegalOperation();
+
+  icu::UnicodeString formatted;
+  icu::FieldPositionIterator fp_iter;
+  icu::FieldPosition fp;
+  UErrorCode status = U_ZERO_ERROR;
+  date_format->format(value->Number(), formatted, &fp_iter, status);
+  if (U_FAILURE(status)) return isolate->heap()->undefined_value();
+
+  Handle<JSArray> result = factory->NewJSArray(0);
+  int32_t length = formatted.length();
+  if (length == 0) return *result;
+
+  int index = 0;
+  int32_t previous_end_pos = 0;
+  while (fp_iter.next(fp)) {
+    int32_t begin_pos = fp.getBeginIndex();
+    int32_t end_pos = fp.getEndIndex();
+
+    if (previous_end_pos < begin_pos) {
+      if (!AddElement(result, index, -1, formatted, previous_end_pos, begin_pos,
+                      isolate)) {
+        return isolate->heap()->undefined_value();
+      }
+      ++index;
+    }
+    if (!AddElement(result, index, fp.getField(), formatted, begin_pos, end_pos,
+                    isolate)) {
+      return isolate->heap()->undefined_value();
+    }
+    previous_end_pos = end_pos;
+    ++index;
+  }
+  if (previous_end_pos < length) {
+    if (!AddElement(result, index, -1, formatted, previous_end_pos, length,
+                    isolate)) {
+      return isolate->heap()->undefined_value();
+    }
+  }
+  JSObject::ValidateElements(result);
+  return *result;
+}
 
 RUNTIME_FUNCTION(Runtime_InternalDateParse) {
   HandleScope scope(isolate);
diff --git a/src/runtime/runtime-internal.cc b/src/runtime/runtime-internal.cc
index 3de0f16..26882b5 100644
--- a/src/runtime/runtime-internal.cc
+++ b/src/runtime/runtime-internal.cc
@@ -120,18 +120,17 @@
       error, isolate->factory()->stack_trace_symbol());
   // Patch the stack trace (array of <receiver, function, code, position>).
   if (stack_trace_obj->IsJSArray()) {
-    Handle<FixedArray> stack_elements(
-        FixedArray::cast(JSArray::cast(*stack_trace_obj)->elements()));
-    DCHECK_EQ(1, stack_elements->length() % 4);
-    DCHECK(Code::cast(stack_elements->get(3))->kind() == Code::WASM_FUNCTION);
-    DCHECK(stack_elements->get(4)->IsSmi() &&
-           Smi::cast(stack_elements->get(4))->value() >= 0);
-    stack_elements->set(4, Smi::FromInt(-1 - byte_offset));
+    Handle<FrameArray> stack_elements(
+        FrameArray::cast(JSArray::cast(*stack_trace_obj)->elements()));
+    DCHECK(stack_elements->Code(0)->kind() == AbstractCode::WASM_FUNCTION);
+    DCHECK(stack_elements->Offset(0)->value() >= 0);
+    stack_elements->SetOffset(0, Smi::FromInt(-1 - byte_offset));
   }
-  Handle<Object> detailed_stack_trace_obj = JSReceiver::GetDataProperty(
-      error, isolate->factory()->detailed_stack_trace_symbol());
+
   // Patch the detailed stack trace (array of JSObjects with various
   // properties).
+  Handle<Object> detailed_stack_trace_obj = JSReceiver::GetDataProperty(
+      error, isolate->factory()->detailed_stack_trace_symbol());
   if (detailed_stack_trace_obj->IsJSArray()) {
     Handle<FixedArray> stack_elements(
         FixedArray::cast(JSArray::cast(*detailed_stack_trace_obj)->elements()));
@@ -235,8 +234,7 @@
 
 RUNTIME_FUNCTION(Runtime_ThrowInvalidStringLength) {
   HandleScope scope(isolate);
-  THROW_NEW_ERROR_RETURN_FAILURE(
-      isolate, NewRangeError(MessageTemplate::kInvalidStringLength));
+  THROW_NEW_ERROR_RETURN_FAILURE(isolate, NewInvalidStringLengthError());
 }
 
 RUNTIME_FUNCTION(Runtime_ThrowIteratorResultNotAnObject) {
@@ -272,6 +270,23 @@
       isolate, NewTypeError(MessageTemplate::kApplyNonFunction, object, type));
 }
 
+namespace {
+
+void PromiseRejectEvent(Isolate* isolate, Handle<JSObject> promise,
+                        Handle<Object> rejected_promise, Handle<Object> value,
+                        bool debug_event) {
+  if (isolate->debug()->is_active() && debug_event) {
+    isolate->debug()->OnPromiseReject(rejected_promise, value);
+  }
+  Handle<Symbol> key = isolate->factory()->promise_has_handler_symbol();
+  // Do not report if we actually have a handler.
+  if (JSReceiver::GetDataProperty(promise, key)->IsUndefined(isolate)) {
+    isolate->ReportPromiseReject(promise, value,
+                                 v8::kPromiseRejectWithNoHandler);
+  }
+}
+
+}  // namespace
 
 RUNTIME_FUNCTION(Runtime_PromiseRejectEvent) {
   DCHECK(args.length() == 3);
@@ -279,16 +294,27 @@
   CONVERT_ARG_HANDLE_CHECKED(JSObject, promise, 0);
   CONVERT_ARG_HANDLE_CHECKED(Object, value, 1);
   CONVERT_BOOLEAN_ARG_CHECKED(debug_event, 2);
-  if (debug_event) isolate->debug()->OnPromiseReject(promise, value);
-  Handle<Symbol> key = isolate->factory()->promise_has_handler_symbol();
-  // Do not report if we actually have a handler.
-  if (JSReceiver::GetDataProperty(promise, key)->IsUndefined(isolate)) {
-    isolate->ReportPromiseReject(promise, value,
-                                 v8::kPromiseRejectWithNoHandler);
-  }
+
+  PromiseRejectEvent(isolate, promise, promise, value, debug_event);
   return isolate->heap()->undefined_value();
 }
 
+RUNTIME_FUNCTION(Runtime_PromiseRejectEventFromStack) {
+  DCHECK(args.length() == 2);
+  HandleScope scope(isolate);
+  CONVERT_ARG_HANDLE_CHECKED(JSObject, promise, 0);
+  CONVERT_ARG_HANDLE_CHECKED(Object, value, 1);
+
+  Handle<Object> rejected_promise = promise;
+  if (isolate->debug()->is_active()) {
+    // If the Promise.reject call is caught, then this will return
+    // undefined, which will be interpreted by PromiseRejectEvent
+    // as being a caught exception event.
+    rejected_promise = isolate->GetPromiseOnStackOnThrow();
+  }
+  PromiseRejectEvent(isolate, promise, rejected_promise, value, true);
+  return isolate->heap()->undefined_value();
+}
 
 RUNTIME_FUNCTION(Runtime_PromiseRevokeReject) {
   DCHECK(args.length() == 1);
@@ -330,7 +356,7 @@
   CONVERT_SMI_ARG_CHECKED(size, 0);
   CHECK(IsAligned(size, kPointerSize));
   CHECK(size > 0);
-  CHECK(size <= Page::kMaxRegularHeapObjectSize);
+  CHECK(size <= kMaxRegularHeapObjectSize);
   return *isolate->factory()->NewFillerObject(size, false, NEW_SPACE);
 }
 
@@ -342,7 +368,7 @@
   CONVERT_SMI_ARG_CHECKED(flags, 1);
   CHECK(IsAligned(size, kPointerSize));
   CHECK(size > 0);
-  CHECK(size <= Page::kMaxRegularHeapObjectSize);
+  CHECK(size <= kMaxRegularHeapObjectSize);
   bool double_align = AllocateDoubleAlignFlag::decode(flags);
   AllocationSpace space = AllocateTargetSpace::decode(flags);
   return *isolate->factory()->NewFillerObject(size, double_align, space);
@@ -528,6 +554,21 @@
   }
 }
 
+RUNTIME_FUNCTION(Runtime_EnqueuePromiseResolveThenableJob) {
+  HandleScope scope(isolate);
+  DCHECK(args.length() == 6);
+  CONVERT_ARG_HANDLE_CHECKED(JSReceiver, resolution, 0);
+  CONVERT_ARG_HANDLE_CHECKED(JSReceiver, then, 1);
+  CONVERT_ARG_HANDLE_CHECKED(JSFunction, resolve, 2);
+  CONVERT_ARG_HANDLE_CHECKED(JSFunction, reject, 3);
+  CONVERT_ARG_HANDLE_CHECKED(Object, before_debug_event, 4);
+  CONVERT_ARG_HANDLE_CHECKED(Object, after_debug_event, 5);
+  Handle<PromiseContainer> container = isolate->factory()->NewPromiseContainer(
+      resolution, then, resolve, reject, before_debug_event, after_debug_event);
+  isolate->EnqueueMicrotask(container);
+  return isolate->heap()->undefined_value();
+}
+
 RUNTIME_FUNCTION(Runtime_EnqueueMicrotask) {
   HandleScope scope(isolate);
   DCHECK(args.length() == 1);
diff --git a/src/runtime/runtime-literals.cc b/src/runtime/runtime-literals.cc
index a0dd3e8..ebdf04c 100644
--- a/src/runtime/runtime-literals.cc
+++ b/src/runtime/runtime-literals.cc
@@ -6,8 +6,9 @@
 
 #include "src/allocation-site-scopes.h"
 #include "src/arguments.h"
+#include "src/ast/ast.h"
+#include "src/ast/compile-time-value.h"
 #include "src/isolate-inl.h"
-#include "src/parsing/parser.h"
 #include "src/runtime/runtime.h"
 
 namespace v8 {
diff --git a/src/runtime/runtime-object.cc b/src/runtime/runtime-object.cc
index 7908c62..70ed23b 100644
--- a/src/runtime/runtime-object.cc
+++ b/src/runtime/runtime-object.cc
@@ -677,6 +677,38 @@
   return *object;
 }
 
+RUNTIME_FUNCTION(Runtime_DefineDataProperty) {
+  HandleScope scope(isolate);
+  DCHECK(args.length() == 5);
+  CONVERT_ARG_HANDLE_CHECKED(JSReceiver, receiver, 0);
+  CONVERT_ARG_HANDLE_CHECKED(Name, name, 1);
+  CONVERT_ARG_HANDLE_CHECKED(Object, value, 2);
+  CONVERT_PROPERTY_ATTRIBUTES_CHECKED(attrs, 3);
+  CONVERT_SMI_ARG_CHECKED(set_function_name, 4);
+
+  if (set_function_name) {
+    DCHECK(value->IsJSFunction());
+    JSFunction::SetName(Handle<JSFunction>::cast(value), name,
+                        isolate->factory()->empty_string());
+  }
+
+  PropertyDescriptor desc;
+  desc.set_writable(!(attrs & ReadOnly));
+  desc.set_enumerable(!(attrs & DontEnum));
+  desc.set_configurable(!(attrs & DontDelete));
+  desc.set_value(value);
+
+  Maybe<bool> result = JSReceiver::DefineOwnProperty(isolate, receiver, name,
+                                                     &desc, Object::DONT_THROW);
+  RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
+  if (result.IsNothing()) {
+    DCHECK(isolate->has_pending_exception());
+    return isolate->heap()->exception();
+  }
+
+  return *receiver;
+}
+
 // Return property without being observable by accessors or interceptors.
 RUNTIME_FUNCTION(Runtime_GetDataProperty) {
   HandleScope scope(isolate);
@@ -928,5 +960,32 @@
   return *value;
 }
 
+RUNTIME_FUNCTION(Runtime_LoadModuleExport) {
+  HandleScope scope(isolate);
+  DCHECK(args.length() == 1);
+  CONVERT_ARG_HANDLE_CHECKED(String, name, 0);
+  Handle<Module> module(isolate->context()->module());
+  return *Module::LoadExport(module, name);
+}
+
+RUNTIME_FUNCTION(Runtime_LoadModuleImport) {
+  HandleScope scope(isolate);
+  DCHECK(args.length() == 2);
+  CONVERT_ARG_HANDLE_CHECKED(String, name, 0);
+  CONVERT_ARG_HANDLE_CHECKED(Smi, module_request, 1);
+  Handle<Module> module(isolate->context()->module());
+  return *Module::LoadImport(module, name, module_request->value());
+}
+
+RUNTIME_FUNCTION(Runtime_StoreModuleExport) {
+  HandleScope scope(isolate);
+  DCHECK(args.length() == 2);
+  CONVERT_ARG_HANDLE_CHECKED(String, name, 0);
+  CONVERT_ARG_HANDLE_CHECKED(Object, value, 1);
+  Handle<Module> module(isolate->context()->module());
+  Module::StoreExport(module, name, value);
+  return isolate->heap()->undefined_value();
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/runtime/runtime-regexp.cc b/src/runtime/runtime-regexp.cc
index b36e5e6..977e6bc 100644
--- a/src/runtime/runtime-regexp.cc
+++ b/src/runtime/runtime-regexp.cc
@@ -794,7 +794,7 @@
   return regexp->source();
 }
 
-
+// TODO(jgruber): Remove this once all uses in regexp.js have been removed.
 RUNTIME_FUNCTION(Runtime_RegExpConstructResult) {
   HandleScope handle_scope(isolate);
   DCHECK(args.length() == 3);
diff --git a/src/runtime/runtime-scopes.cc b/src/runtime/runtime-scopes.cc
index 26bfb29..0c037db 100644
--- a/src/runtime/runtime-scopes.cc
+++ b/src/runtime/runtime-scopes.cc
@@ -44,7 +44,7 @@
 Object* DeclareGlobal(
     Isolate* isolate, Handle<JSGlobalObject> global, Handle<String> name,
     Handle<Object> value, PropertyAttributes attr, bool is_var,
-    bool is_function, RedeclarationType redeclaration_type,
+    bool is_function_declaration, RedeclarationType redeclaration_type,
     Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(),
     FeedbackVectorSlot slot = FeedbackVectorSlot::Invalid()) {
   Handle<ScriptContextTable> script_contexts(
@@ -60,7 +60,14 @@
   }
 
   // Do the lookup own properties only, see ES5 erratum.
-  LookupIterator it(global, name, global, LookupIterator::OWN_SKIP_INTERCEPTOR);
+  LookupIterator::Configuration lookup_config(
+      LookupIterator::Configuration::OWN_SKIP_INTERCEPTOR);
+  if (is_function_declaration) {
+    // For function declarations, use the interceptor on the declaration. For
+    // non-functions, use it only on initialization.
+    lookup_config = LookupIterator::Configuration::OWN;
+  }
+  LookupIterator it(global, name, global, lookup_config);
   Maybe<PropertyAttributes> maybe = JSReceiver::GetPropertyAttributes(&it);
   if (!maybe.IsJust()) return isolate->heap()->exception();
 
@@ -71,7 +78,7 @@
     // Skip var re-declarations.
     if (is_var) return isolate->heap()->undefined_value();
 
-    DCHECK(is_function);
+    DCHECK(is_function_declaration);
     if ((old_attributes & DONT_DELETE) != 0) {
       // Only allow reconfiguring globals to functions in user code (no
       // natives, which are marked as read-only).
@@ -83,9 +90,9 @@
       if (old_details.IsReadOnly() || old_details.IsDontEnum() ||
           (it.state() == LookupIterator::ACCESSOR &&
            it.GetAccessors()->IsAccessorPair())) {
-        // ES#sec-globaldeclarationinstantiation 5.d:
+        // ECMA-262 section 15.1.11 GlobalDeclarationInstantiation 5.d:
         // If hasRestrictedGlobal is true, throw a SyntaxError exception.
-        // ES#sec-evaldeclarationinstantiation 8.a.iv.1.b:
+        // ECMA-262 section 18.2.1.3 EvalDeclarationInstantiation 8.a.iv.1.b:
         // If fnDefinable is false, throw a TypeError exception.
         return ThrowRedeclarationError(isolate, name, redeclaration_type);
       }
@@ -102,6 +109,10 @@
     if (it.state() == LookupIterator::ACCESSOR) it.Delete();
   }
 
+  if (is_function_declaration) {
+    it.Restart();
+  }
+
   // Define or redefine own property.
   RETURN_FAILURE_ON_EXCEPTION(
       isolate, JSObject::DefineOwnPropertyIgnoreAttributes(&it, value, attr));
@@ -294,9 +305,8 @@
       DCHECK(context->IsBlockContext());
       object = isolate->factory()->NewJSObject(
           isolate->context_extension_function());
-      Handle<HeapObject> extension =
-          isolate->factory()->NewSloppyBlockWithEvalContextExtension(
-              handle(context->scope_info()), object);
+      Handle<HeapObject> extension = isolate->factory()->NewContextExtension(
+          handle(context->scope_info()), object);
       context->set_extension(*extension);
     } else {
       object = handle(context->extension_object(), isolate);
@@ -665,8 +675,6 @@
   Handle<Context> result =
       isolate->factory()->NewScriptContext(closure, scope_info);
 
-  result->InitializeGlobalSlots();
-
   DCHECK(function->context() == isolate->context());
   DCHECK(*global_object == result->global_object());
 
@@ -691,26 +699,41 @@
 
 RUNTIME_FUNCTION(Runtime_PushWithContext) {
   HandleScope scope(isolate);
-  DCHECK_EQ(2, args.length());
+  DCHECK_EQ(3, args.length());
   CONVERT_ARG_HANDLE_CHECKED(JSReceiver, extension_object, 0);
-  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 1);
+  CONVERT_ARG_HANDLE_CHECKED(ScopeInfo, scope_info, 1);
+  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 2);
   Handle<Context> current(isolate->context());
-  Handle<Context> context =
-      isolate->factory()->NewWithContext(function, current, extension_object);
+  Handle<Context> context = isolate->factory()->NewWithContext(
+      function, current, scope_info, extension_object);
   isolate->set_context(*context);
   return *context;
 }
 
+RUNTIME_FUNCTION(Runtime_PushModuleContext) {
+  HandleScope scope(isolate);
+  DCHECK_EQ(3, args.length());
+  CONVERT_ARG_HANDLE_CHECKED(Module, module, 0);
+  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 1);
+  CONVERT_ARG_HANDLE_CHECKED(ScopeInfo, scope_info, 2);
+  DCHECK(function->context() == isolate->context());
+
+  Handle<Context> context =
+      isolate->factory()->NewModuleContext(module, function, scope_info);
+  isolate->set_context(*context);
+  return *context;
+}
 
 RUNTIME_FUNCTION(Runtime_PushCatchContext) {
   HandleScope scope(isolate);
-  DCHECK_EQ(3, args.length());
+  DCHECK_EQ(4, args.length());
   CONVERT_ARG_HANDLE_CHECKED(String, name, 0);
   CONVERT_ARG_HANDLE_CHECKED(Object, thrown_object, 1);
-  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 2);
+  CONVERT_ARG_HANDLE_CHECKED(ScopeInfo, scope_info, 2);
+  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 3);
   Handle<Context> current(isolate->context());
   Handle<Context> context = isolate->factory()->NewCatchContext(
-      function, current, name, thrown_object);
+      function, current, scope_info, name, thrown_object);
   isolate->set_context(*context);
   return *context;
 }
diff --git a/src/runtime/runtime-strings.cc b/src/runtime/runtime-strings.cc
index 517513e..f5bda59 100644
--- a/src/runtime/runtime-strings.cc
+++ b/src/runtime/runtime-strings.cc
@@ -103,140 +103,12 @@
   return Smi::FromInt(position);
 }
 
-
-template <typename schar, typename pchar>
-static int StringMatchBackwards(Vector<const schar> subject,
-                                Vector<const pchar> pattern, int idx) {
-  int pattern_length = pattern.length();
-  DCHECK(pattern_length >= 1);
-  DCHECK(idx + pattern_length <= subject.length());
-
-  if (sizeof(schar) == 1 && sizeof(pchar) > 1) {
-    for (int i = 0; i < pattern_length; i++) {
-      uc16 c = pattern[i];
-      if (c > String::kMaxOneByteCharCode) {
-        return -1;
-      }
-    }
-  }
-
-  pchar pattern_first_char = pattern[0];
-  for (int i = idx; i >= 0; i--) {
-    if (subject[i] != pattern_first_char) continue;
-    int j = 1;
-    while (j < pattern_length) {
-      if (pattern[j] != subject[i + j]) {
-        break;
-      }
-      j++;
-    }
-    if (j == pattern_length) {
-      return i;
-    }
-  }
-  return -1;
-}
-
-
 RUNTIME_FUNCTION(Runtime_StringLastIndexOf) {
-  HandleScope scope(isolate);
-  DCHECK(args.length() == 3);
-
-  CONVERT_ARG_HANDLE_CHECKED(String, sub, 0);
-  CONVERT_ARG_HANDLE_CHECKED(String, pat, 1);
-  CONVERT_ARG_HANDLE_CHECKED(Object, index, 2);
-
-  uint32_t start_index = 0;
-  if (!index->ToArrayIndex(&start_index)) return Smi::FromInt(-1);
-
-  uint32_t pat_length = pat->length();
-  uint32_t sub_length = sub->length();
-
-  if (start_index + pat_length > sub_length) {
-    start_index = sub_length - pat_length;
-  }
-
-  if (pat_length == 0) {
-    return Smi::FromInt(start_index);
-  }
-
-  sub = String::Flatten(sub);
-  pat = String::Flatten(pat);
-
-  int position = -1;
-  DisallowHeapAllocation no_gc;  // ensure vectors stay valid
-
-  String::FlatContent sub_content = sub->GetFlatContent();
-  String::FlatContent pat_content = pat->GetFlatContent();
-
-  if (pat_content.IsOneByte()) {
-    Vector<const uint8_t> pat_vector = pat_content.ToOneByteVector();
-    if (sub_content.IsOneByte()) {
-      position = StringMatchBackwards(sub_content.ToOneByteVector(), pat_vector,
-                                      start_index);
-    } else {
-      position = StringMatchBackwards(sub_content.ToUC16Vector(), pat_vector,
-                                      start_index);
-    }
-  } else {
-    Vector<const uc16> pat_vector = pat_content.ToUC16Vector();
-    if (sub_content.IsOneByte()) {
-      position = StringMatchBackwards(sub_content.ToOneByteVector(), pat_vector,
-                                      start_index);
-    } else {
-      position = StringMatchBackwards(sub_content.ToUC16Vector(), pat_vector,
-                                      start_index);
-    }
-  }
-
-  return Smi::FromInt(position);
-}
-
-
-RUNTIME_FUNCTION(Runtime_StringLocaleCompare) {
   HandleScope handle_scope(isolate);
-  DCHECK(args.length() == 2);
-
-  CONVERT_ARG_HANDLE_CHECKED(String, str1, 0);
-  CONVERT_ARG_HANDLE_CHECKED(String, str2, 1);
-
-  if (str1.is_identical_to(str2)) return Smi::FromInt(0);  // Equal.
-  int str1_length = str1->length();
-  int str2_length = str2->length();
-
-  // Decide trivial cases without flattening.
-  if (str1_length == 0) {
-    if (str2_length == 0) return Smi::FromInt(0);  // Equal.
-    return Smi::FromInt(-str2_length);
-  } else {
-    if (str2_length == 0) return Smi::FromInt(str1_length);
-  }
-
-  int end = str1_length < str2_length ? str1_length : str2_length;
-
-  // No need to flatten if we are going to find the answer on the first
-  // character.  At this point we know there is at least one character
-  // in each string, due to the trivial case handling above.
-  int d = str1->Get(0) - str2->Get(0);
-  if (d != 0) return Smi::FromInt(d);
-
-  str1 = String::Flatten(str1);
-  str2 = String::Flatten(str2);
-
-  DisallowHeapAllocation no_gc;
-  String::FlatContent flat1 = str1->GetFlatContent();
-  String::FlatContent flat2 = str2->GetFlatContent();
-
-  for (int i = 0; i < end; i++) {
-    if (flat1.Get(i) != flat2.Get(i)) {
-      return Smi::FromInt(flat1.Get(i) - flat2.Get(i));
-    }
-  }
-
-  return Smi::FromInt(str1_length - str2_length);
+  return String::LastIndexOf(isolate, args.at<Object>(0), args.at<Object>(1),
+                             isolate->factory()->undefined_value());
 }
 
-
 RUNTIME_FUNCTION(Runtime_SubString) {
   HandleScope scope(isolate);
   DCHECK(args.length() == 3);
diff --git a/src/runtime/runtime-test.cc b/src/runtime/runtime-test.cc
index 0d6cb0e..8100d2c 100644
--- a/src/runtime/runtime-test.cc
+++ b/src/runtime/runtime-test.cc
@@ -8,6 +8,7 @@
 
 #include "src/arguments.h"
 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
+#include "src/compiler.h"
 #include "src/deoptimizer.h"
 #include "src/frames-inl.h"
 #include "src/full-codegen/full-codegen.h"
@@ -419,8 +420,8 @@
   SealHandleScope shs(isolate);
   DCHECK(args.length() == 2 || args.length() == 3);
 #ifdef DEBUG
-  CONVERT_SMI_ARG_CHECKED(interval, 0);
-  CONVERT_SMI_ARG_CHECKED(timeout, 1);
+  CONVERT_INT32_ARG_CHECKED(interval, 0);
+  CONVERT_INT32_ARG_CHECKED(timeout, 1);
   isolate->heap()->set_allocation_timeout(timeout);
   FLAG_gc_interval = interval;
   if (args.length() == 3) {
@@ -456,7 +457,6 @@
   }
   args[0]->Print(os);
   if (args[0]->IsHeapObject()) {
-    os << "\n";
     HeapObject::cast(args[0])->map()->Print(os);
   }
 #else
@@ -768,7 +768,34 @@
   if (!maybe_compiled_module.ToHandle(&compiled_module)) {
     return isolate->heap()->undefined_value();
   }
-  return *wasm::CreateCompiledModuleObject(isolate, compiled_module);
+  return *wasm::CreateCompiledModuleObject(isolate, compiled_module,
+                                           wasm::ModuleOrigin::kWasmOrigin);
+}
+
+RUNTIME_FUNCTION(Runtime_ValidateWasmInstancesChain) {
+  HandleScope shs(isolate);
+  DCHECK(args.length() == 2);
+  CONVERT_ARG_HANDLE_CHECKED(JSObject, module_obj, 0);
+  CONVERT_ARG_HANDLE_CHECKED(Smi, instance_count, 1);
+  wasm::testing::ValidateInstancesChain(isolate, module_obj,
+                                        instance_count->value());
+  return isolate->heap()->ToBoolean(true);
+}
+
+RUNTIME_FUNCTION(Runtime_ValidateWasmModuleState) {
+  HandleScope shs(isolate);
+  DCHECK(args.length() == 1);
+  CONVERT_ARG_HANDLE_CHECKED(JSObject, module_obj, 0);
+  wasm::testing::ValidateModuleState(isolate, module_obj);
+  return isolate->heap()->ToBoolean(true);
+}
+
+RUNTIME_FUNCTION(Runtime_ValidateWasmOrphanedInstance) {
+  HandleScope shs(isolate);
+  DCHECK(args.length() == 1);
+  CONVERT_ARG_HANDLE_CHECKED(JSObject, instance_obj, 0);
+  wasm::testing::ValidateOrphanedInstance(isolate, instance_obj);
+  return isolate->heap()->ToBoolean(true);
 }
 
 }  // namespace internal
diff --git a/src/runtime/runtime-typedarray.cc b/src/runtime/runtime-typedarray.cc
index 04bf368..ba422bf 100644
--- a/src/runtime/runtime-typedarray.cc
+++ b/src/runtime/runtime-typedarray.cc
@@ -200,7 +200,6 @@
   size_t length = 0;
   if (source->IsJSTypedArray() &&
       JSTypedArray::cast(*source)->type() == array_type) {
-    length_obj = handle(JSTypedArray::cast(*source)->length(), isolate);
     length = JSTypedArray::cast(*source)->length_value();
   } else {
     CHECK(TryNumberToSize(*length_obj, &length));
@@ -246,6 +245,7 @@
   Handle<Object> byte_length_obj(
       isolate->factory()->NewNumberFromSize(byte_length));
   holder->set_byte_length(*byte_length_obj);
+  length_obj = isolate->factory()->NewNumberFromSize(length);
   holder->set_length(*length_obj);
 
   Handle<FixedTypedArrayBase> elements =
@@ -419,217 +419,5 @@
                                     obj->type() == kExternalInt32Array);
 }
 
-
-inline static bool NeedToFlipBytes(bool is_little_endian) {
-#ifdef V8_TARGET_LITTLE_ENDIAN
-  return !is_little_endian;
-#else
-  return is_little_endian;
-#endif
-}
-
-
-template <int n>
-inline void CopyBytes(uint8_t* target, uint8_t* source) {
-  for (int i = 0; i < n; i++) {
-    *(target++) = *(source++);
-  }
-}
-
-
-template <int n>
-inline void FlipBytes(uint8_t* target, uint8_t* source) {
-  source = source + (n - 1);
-  for (int i = 0; i < n; i++) {
-    *(target++) = *(source--);
-  }
-}
-
-
-template <typename T>
-inline static bool DataViewGetValue(Isolate* isolate,
-                                    Handle<JSDataView> data_view,
-                                    Handle<Object> byte_offset_obj,
-                                    bool is_little_endian, T* result) {
-  size_t byte_offset = 0;
-  if (!TryNumberToSize(*byte_offset_obj, &byte_offset)) {
-    return false;
-  }
-  Handle<JSArrayBuffer> buffer(JSArrayBuffer::cast(data_view->buffer()));
-
-  size_t data_view_byte_offset = NumberToSize(data_view->byte_offset());
-  size_t data_view_byte_length = NumberToSize(data_view->byte_length());
-  if (byte_offset + sizeof(T) > data_view_byte_length ||
-      byte_offset + sizeof(T) < byte_offset) {  // overflow
-    return false;
-  }
-
-  union Value {
-    T data;
-    uint8_t bytes[sizeof(T)];
-  };
-
-  Value value;
-  size_t buffer_offset = data_view_byte_offset + byte_offset;
-  DCHECK(NumberToSize(buffer->byte_length()) >= buffer_offset + sizeof(T));
-  uint8_t* source =
-      static_cast<uint8_t*>(buffer->backing_store()) + buffer_offset;
-  if (NeedToFlipBytes(is_little_endian)) {
-    FlipBytes<sizeof(T)>(value.bytes, source);
-  } else {
-    CopyBytes<sizeof(T)>(value.bytes, source);
-  }
-  *result = value.data;
-  return true;
-}
-
-
-template <typename T>
-static bool DataViewSetValue(Isolate* isolate, Handle<JSDataView> data_view,
-                             Handle<Object> byte_offset_obj,
-                             bool is_little_endian, T data) {
-  size_t byte_offset = 0;
-  if (!TryNumberToSize(*byte_offset_obj, &byte_offset)) {
-    return false;
-  }
-  Handle<JSArrayBuffer> buffer(JSArrayBuffer::cast(data_view->buffer()));
-
-  size_t data_view_byte_offset = NumberToSize(data_view->byte_offset());
-  size_t data_view_byte_length = NumberToSize(data_view->byte_length());
-  if (byte_offset + sizeof(T) > data_view_byte_length ||
-      byte_offset + sizeof(T) < byte_offset) {  // overflow
-    return false;
-  }
-
-  union Value {
-    T data;
-    uint8_t bytes[sizeof(T)];
-  };
-
-  Value value;
-  value.data = data;
-  size_t buffer_offset = data_view_byte_offset + byte_offset;
-  DCHECK(NumberToSize(buffer->byte_length()) >= buffer_offset + sizeof(T));
-  uint8_t* target =
-      static_cast<uint8_t*>(buffer->backing_store()) + buffer_offset;
-  if (NeedToFlipBytes(is_little_endian)) {
-    FlipBytes<sizeof(T)>(target, value.bytes);
-  } else {
-    CopyBytes<sizeof(T)>(target, value.bytes);
-  }
-  return true;
-}
-
-
-#define DATA_VIEW_GETTER(TypeName, Type, Converter)                        \
-  RUNTIME_FUNCTION(Runtime_DataViewGet##TypeName) {                        \
-    HandleScope scope(isolate);                                            \
-    DCHECK(args.length() == 3);                                            \
-    CONVERT_ARG_HANDLE_CHECKED(JSDataView, holder, 0);                     \
-    CONVERT_NUMBER_ARG_HANDLE_CHECKED(offset, 1);                          \
-    CONVERT_BOOLEAN_ARG_CHECKED(is_little_endian, 2);                      \
-    Type result;                                                           \
-    if (DataViewGetValue(isolate, holder, offset, is_little_endian,        \
-                         &result)) {                                       \
-      return *isolate->factory()->Converter(result);                       \
-    } else {                                                               \
-      THROW_NEW_ERROR_RETURN_FAILURE(                                      \
-          isolate,                                                         \
-          NewRangeError(MessageTemplate::kInvalidDataViewAccessorOffset)); \
-    }                                                                      \
-  }
-
-DATA_VIEW_GETTER(Uint8, uint8_t, NewNumberFromUint)
-DATA_VIEW_GETTER(Int8, int8_t, NewNumberFromInt)
-DATA_VIEW_GETTER(Uint16, uint16_t, NewNumberFromUint)
-DATA_VIEW_GETTER(Int16, int16_t, NewNumberFromInt)
-DATA_VIEW_GETTER(Uint32, uint32_t, NewNumberFromUint)
-DATA_VIEW_GETTER(Int32, int32_t, NewNumberFromInt)
-DATA_VIEW_GETTER(Float32, float, NewNumber)
-DATA_VIEW_GETTER(Float64, double, NewNumber)
-
-#undef DATA_VIEW_GETTER
-
-
-template <typename T>
-static T DataViewConvertValue(double value);
-
-
-template <>
-int8_t DataViewConvertValue<int8_t>(double value) {
-  return static_cast<int8_t>(DoubleToInt32(value));
-}
-
-
-template <>
-int16_t DataViewConvertValue<int16_t>(double value) {
-  return static_cast<int16_t>(DoubleToInt32(value));
-}
-
-
-template <>
-int32_t DataViewConvertValue<int32_t>(double value) {
-  return DoubleToInt32(value);
-}
-
-
-template <>
-uint8_t DataViewConvertValue<uint8_t>(double value) {
-  return static_cast<uint8_t>(DoubleToUint32(value));
-}
-
-
-template <>
-uint16_t DataViewConvertValue<uint16_t>(double value) {
-  return static_cast<uint16_t>(DoubleToUint32(value));
-}
-
-
-template <>
-uint32_t DataViewConvertValue<uint32_t>(double value) {
-  return DoubleToUint32(value);
-}
-
-
-template <>
-float DataViewConvertValue<float>(double value) {
-  return static_cast<float>(value);
-}
-
-
-template <>
-double DataViewConvertValue<double>(double value) {
-  return value;
-}
-
-
-#define DATA_VIEW_SETTER(TypeName, Type)                                   \
-  RUNTIME_FUNCTION(Runtime_DataViewSet##TypeName) {                        \
-    HandleScope scope(isolate);                                            \
-    DCHECK(args.length() == 4);                                            \
-    CONVERT_ARG_HANDLE_CHECKED(JSDataView, holder, 0);                     \
-    CONVERT_NUMBER_ARG_HANDLE_CHECKED(offset, 1);                          \
-    CONVERT_NUMBER_ARG_HANDLE_CHECKED(value, 2);                           \
-    CONVERT_BOOLEAN_ARG_CHECKED(is_little_endian, 3);                      \
-    Type v = DataViewConvertValue<Type>(value->Number());                  \
-    if (DataViewSetValue(isolate, holder, offset, is_little_endian, v)) {  \
-      return isolate->heap()->undefined_value();                           \
-    } else {                                                               \
-      THROW_NEW_ERROR_RETURN_FAILURE(                                      \
-          isolate,                                                         \
-          NewRangeError(MessageTemplate::kInvalidDataViewAccessorOffset)); \
-    }                                                                      \
-  }
-
-DATA_VIEW_SETTER(Uint8, uint8_t)
-DATA_VIEW_SETTER(Int8, int8_t)
-DATA_VIEW_SETTER(Uint16, uint16_t)
-DATA_VIEW_SETTER(Int16, int16_t)
-DATA_VIEW_SETTER(Uint32, uint32_t)
-DATA_VIEW_SETTER(Int32, int32_t)
-DATA_VIEW_SETTER(Float32, float)
-DATA_VIEW_SETTER(Float64, double)
-
-#undef DATA_VIEW_SETTER
 }  // namespace internal
 }  // namespace v8
diff --git a/src/runtime/runtime-wasm.cc b/src/runtime/runtime-wasm.cc
index 37608e6..ab69046 100644
--- a/src/runtime/runtime-wasm.cc
+++ b/src/runtime/runtime-wasm.cc
@@ -18,17 +18,11 @@
 namespace v8 {
 namespace internal {
 
-namespace {
-const int kWasmMemArrayBuffer = 2;
-}
-
-RUNTIME_FUNCTION(Runtime_WasmGrowMemory) {
+RUNTIME_FUNCTION(Runtime_WasmMemorySize) {
   HandleScope scope(isolate);
-  DCHECK_EQ(1, args.length());
-  uint32_t delta_pages = 0;
-  CHECK(args[0]->ToUint32(&delta_pages));
-  Handle<JSObject> module_object;
+  DCHECK_EQ(0, args.length());
 
+  Handle<JSObject> module_instance;
   {
     // Get the module JSObject
     DisallowHeapAllocation no_allocation;
@@ -37,77 +31,33 @@
         Memory::Address_at(entry + StandardFrameConstants::kCallerPCOffset);
     Code* code =
         isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code;
-    FixedArray* deopt_data = code->deoptimization_data();
-    DCHECK(deopt_data->length() == 2);
-    module_object = Handle<JSObject>::cast(handle(deopt_data->get(0), isolate));
-    CHECK(!module_object->IsNull(isolate));
+    Object* owning_instance = wasm::GetOwningWasmInstance(code);
+    CHECK_NOT_NULL(owning_instance);
+    module_instance = handle(JSObject::cast(owning_instance), isolate);
   }
+  return *isolate->factory()->NewNumberFromInt(
+      wasm::GetInstanceMemorySize(isolate, module_instance));
+}
 
-  Address old_mem_start, new_mem_start;
-  uint32_t old_size, new_size;
-
-  // Get mem buffer associated with module object
-  Handle<Object> obj(module_object->GetInternalField(kWasmMemArrayBuffer),
-                     isolate);
-
-  if (obj->IsUndefined(isolate)) {
-    // If module object does not have linear memory associated with it,
-    // Allocate new array buffer of given size.
-    old_mem_start = nullptr;
-    old_size = 0;
-    // TODO(gdeepti): Fix bounds check to take into account size of memtype.
-    new_size = delta_pages * wasm::WasmModule::kPageSize;
-    if (delta_pages > wasm::WasmModule::kMaxMemPages) {
-      return *isolate->factory()->NewNumberFromInt(-1);
-    }
-    new_mem_start =
-        static_cast<Address>(isolate->array_buffer_allocator()->Allocate(
-            static_cast<uint32_t>(new_size)));
-    if (new_mem_start == NULL) {
-      return *isolate->factory()->NewNumberFromInt(-1);
-    }
-#if DEBUG
-    // Double check the API allocator actually zero-initialized the memory.
-    for (size_t i = old_size; i < new_size; i++) {
-      DCHECK_EQ(0, new_mem_start[i]);
-    }
-#endif
-  } else {
-    Handle<JSArrayBuffer> old_buffer = Handle<JSArrayBuffer>::cast(obj);
-    old_mem_start = static_cast<Address>(old_buffer->backing_store());
-    old_size = old_buffer->byte_length()->Number();
-    // If the old memory was zero-sized, we should have been in the
-    // "undefined" case above.
-    DCHECK_NOT_NULL(old_mem_start);
-    DCHECK_NE(0, old_size);
-
-    new_size = old_size + delta_pages * wasm::WasmModule::kPageSize;
-    if (new_size >
-        wasm::WasmModule::kMaxMemPages * wasm::WasmModule::kPageSize) {
-      return *isolate->factory()->NewNumberFromInt(-1);
-    }
-    new_mem_start = static_cast<Address>(realloc(old_mem_start, new_size));
-    if (new_mem_start == NULL) {
-      return *isolate->factory()->NewNumberFromInt(-1);
-    }
-    old_buffer->set_is_external(true);
-    isolate->heap()->UnregisterArrayBuffer(*old_buffer);
-    // Zero initializing uninitialized memory from realloc
-    memset(new_mem_start + old_size, 0, new_size - old_size);
+RUNTIME_FUNCTION(Runtime_WasmGrowMemory) {
+  HandleScope scope(isolate);
+  DCHECK_EQ(1, args.length());
+  CONVERT_UINT32_ARG_CHECKED(delta_pages, 0);
+  Handle<JSObject> module_instance;
+  {
+    // Get the module JSObject
+    DisallowHeapAllocation no_allocation;
+    const Address entry = Isolate::c_entry_fp(isolate->thread_local_top());
+    Address pc =
+        Memory::Address_at(entry + StandardFrameConstants::kCallerPCOffset);
+    Code* code =
+        isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code;
+    Object* owning_instance = wasm::GetOwningWasmInstance(code);
+    CHECK_NOT_NULL(owning_instance);
+    module_instance = handle(JSObject::cast(owning_instance), isolate);
   }
-
-  Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer();
-  JSArrayBuffer::Setup(buffer, isolate, false, new_mem_start, new_size);
-  buffer->set_is_neuterable(false);
-
-  // Set new buffer to be wasm memory
-  module_object->SetInternalField(kWasmMemArrayBuffer, *buffer);
-
-  CHECK(wasm::UpdateWasmModuleMemory(module_object, old_mem_start,
-                                     new_mem_start, old_size, new_size));
-
-  return *isolate->factory()->NewNumberFromInt(old_size /
-                                               wasm::WasmModule::kPageSize);
+  return *isolate->factory()->NewNumberFromInt(
+      wasm::GrowInstanceMemory(isolate, module_instance, delta_pages));
 }
 
 RUNTIME_FUNCTION(Runtime_WasmThrowTypeError) {
@@ -116,5 +66,28 @@
   THROW_NEW_ERROR_RETURN_FAILURE(
       isolate, NewTypeError(MessageTemplate::kWasmTrapTypeError));
 }
+
+RUNTIME_FUNCTION(Runtime_WasmThrow) {
+  HandleScope scope(isolate);
+  DCHECK_EQ(2, args.length());
+  CONVERT_SMI_ARG_CHECKED(lower, 0);
+  CONVERT_SMI_ARG_CHECKED(upper, 1);
+
+  const int32_t thrown_value = (upper << 16) | lower;
+
+  return isolate->Throw(*isolate->factory()->NewNumberFromInt(thrown_value));
+}
+
+RUNTIME_FUNCTION(Runtime_WasmGetCaughtExceptionValue) {
+  HandleScope scope(isolate);
+  DCHECK_EQ(1, args.length());
+  Object* exception = args[0];
+  // The unwinder will only deliver exceptions to wasm if the exception is a
+  // Number or a Smi (which we have just converted to a Number.) This logic
+  // lives in Isolate::is_catchable_by_wasm(Object*).
+  CHECK(exception->IsNumber());
+  return exception;
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/runtime/runtime.cc b/src/runtime/runtime.cc
index 151e240..9d1cd39 100644
--- a/src/runtime/runtime.cc
+++ b/src/runtime/runtime.cc
@@ -5,6 +5,7 @@
 #include "src/runtime/runtime.h"
 
 #include "src/assembler.h"
+#include "src/base/hashmap.h"
 #include "src/contexts.h"
 #include "src/handles-inl.h"
 #include "src/heap/heap.h"
@@ -57,30 +58,61 @@
 #undef I
 #undef F
 
+namespace {
 
-void Runtime::InitializeIntrinsicFunctionNames(Isolate* isolate,
-                                               Handle<NameDictionary> dict) {
-  DCHECK(dict->NumberOfElements() == 0);
-  HandleScope scope(isolate);
-  for (int i = 0; i < kNumFunctions; ++i) {
-    const char* name = kIntrinsicFunctions[i].name;
-    if (name == NULL) continue;
-    Handle<NameDictionary> new_dict = NameDictionary::Add(
-        dict, isolate->factory()->InternalizeUtf8String(name),
-        Handle<Smi>(Smi::FromInt(i), isolate), PropertyDetails::Empty());
-    // The dictionary does not need to grow.
-    CHECK(new_dict.is_identical_to(dict));
+V8_DECLARE_ONCE(initialize_function_name_map_once);
+static const base::CustomMatcherHashMap* kRuntimeFunctionNameMap;
+
+struct IntrinsicFunctionIdentifier {
+  IntrinsicFunctionIdentifier(const unsigned char* data, const int length)
+      : data_(data), length_(length) {}
+
+  static bool Match(void* key1, void* key2) {
+    const IntrinsicFunctionIdentifier* lhs =
+        static_cast<IntrinsicFunctionIdentifier*>(key1);
+    const IntrinsicFunctionIdentifier* rhs =
+        static_cast<IntrinsicFunctionIdentifier*>(key2);
+    if (lhs->length_ != rhs->length_) return false;
+    return CompareCharsUnsigned(reinterpret_cast<const uint8_t*>(lhs->data_),
+                                reinterpret_cast<const uint8_t*>(rhs->data_),
+                                rhs->length_) == 0;
   }
+
+  uint32_t Hash() {
+    return StringHasher::HashSequentialString<uint8_t>(
+        data_, length_, v8::internal::kZeroHashSeed);
+  }
+
+  const unsigned char* data_;
+  const int length_;
+};
+
+void InitializeIntrinsicFunctionNames() {
+  base::CustomMatcherHashMap* function_name_map =
+      new base::CustomMatcherHashMap(IntrinsicFunctionIdentifier::Match);
+  for (size_t i = 0; i < arraysize(kIntrinsicFunctions); ++i) {
+    const Runtime::Function* function = &kIntrinsicFunctions[i];
+    IntrinsicFunctionIdentifier* identifier = new IntrinsicFunctionIdentifier(
+        reinterpret_cast<const unsigned char*>(function->name),
+        static_cast<int>(strlen(function->name)));
+    base::HashMap::Entry* entry =
+        function_name_map->InsertNew(identifier, identifier->Hash());
+    entry->value = const_cast<Runtime::Function*>(function);
+  }
+  kRuntimeFunctionNameMap = function_name_map;
 }
 
+}  // namespace
 
-const Runtime::Function* Runtime::FunctionForName(Handle<String> name) {
-  Heap* heap = name->GetHeap();
-  int entry = heap->intrinsic_function_names()->FindEntry(name);
-  if (entry != kNotFound) {
-    Object* smi_index = heap->intrinsic_function_names()->ValueAt(entry);
-    int function_index = Smi::cast(smi_index)->value();
-    return &(kIntrinsicFunctions[function_index]);
+const Runtime::Function* Runtime::FunctionForName(const unsigned char* name,
+                                                  int length) {
+  base::CallOnce(&initialize_function_name_map_once,
+                 &InitializeIntrinsicFunctionNames);
+  IntrinsicFunctionIdentifier identifier(name, length);
+  base::HashMap::Entry* entry =
+      kRuntimeFunctionNameMap->Lookup(&identifier, identifier.Hash());
+  if (entry) {
+    return reinterpret_cast<Function*>(entry->value);
   }
   return NULL;
 }
diff --git a/src/runtime/runtime.h b/src/runtime/runtime.h
index 38eb51d..cbdaf0f 100644
--- a/src/runtime/runtime.h
+++ b/src/runtime/runtime.h
@@ -11,7 +11,7 @@
 #include "src/base/platform/time.h"
 #include "src/objects.h"
 #include "src/unicode.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -204,12 +204,10 @@
 #define FOR_EACH_INTRINSIC_ERROR(F) F(ErrorToString, 1, 1)
 
 #define FOR_EACH_INTRINSIC_FORIN(F) \
-  F(ForInDone, 2, 1)                \
   F(ForInEnumerate, 1, 1)           \
   F(ForInFilter, 2, 1)              \
   F(ForInHasProperty, 2, 1)         \
-  F(ForInNext, 4, 1)                \
-  F(ForInStep, 1, 1)
+  F(ForInNext, 4, 1)
 
 #define FOR_EACH_INTRINSIC_INTERPRETER(F) \
   F(InterpreterNewClosure, 2, 1)          \
@@ -262,6 +260,7 @@
   F(GetImplFromInitializedIntlObject, 1, 1)  \
   F(CreateDateTimeFormat, 3, 1)              \
   F(InternalDateFormat, 2, 1)                \
+  F(InternalDateFormatToParts, 2, 1)         \
   F(InternalDateParse, 2, 1)                 \
   F(CreateNumberFormat, 3, 1)                \
   F(InternalNumberFormat, 2, 1)              \
@@ -291,6 +290,7 @@
   F(CheckIsBootstrapping, 0, 1)                     \
   F(CreateListFromArrayLike, 1, 1)                  \
   F(EnqueueMicrotask, 1, 1)                         \
+  F(EnqueuePromiseResolveThenableJob, 6, 1)         \
   F(GetAndResetRuntimeCallStats, -1 /* <= 2 */, 1)  \
   F(ExportExperimentalFromRuntime, 1, 1)            \
   F(ExportFromRuntime, 1, 1)                        \
@@ -304,6 +304,7 @@
   F(NewTypeError, 2, 1)                             \
   F(OrdinaryHasInstance, 2, 1)                      \
   F(PromiseRejectEvent, 3, 1)                       \
+  F(PromiseRejectEventFromStack, 2, 1)              \
   F(PromiseRevokeReject, 1, 1)                      \
   F(PromoteScheduledException, 0, 1)                \
   F(ReThrow, 1, 1)                                  \
@@ -394,6 +395,7 @@
   F(IsJSGlobalProxy, 1, 1)                           \
   F(DefineAccessorPropertyUnchecked, 5, 1)           \
   F(DefineDataPropertyInLiteral, 5, 1)               \
+  F(DefineDataProperty, 5, 1)                        \
   F(GetDataProperty, 2, 1)                           \
   F(GetConstructorName, 1, 1)                        \
   F(HasFastPackedElements, 1, 1)                     \
@@ -416,7 +418,10 @@
   F(HasInPrototypeChain, 2, 1)                       \
   F(CreateIterResultObject, 2, 1)                    \
   F(IsAccessCheckNeeded, 1, 1)                       \
-  F(CreateDataProperty, 3, 1)
+  F(CreateDataProperty, 3, 1)                        \
+  F(LoadModuleExport, 1, 1)                          \
+  F(LoadModuleImport, 2, 1)                          \
+  F(StoreModuleExport, 2, 1)
 
 #define FOR_EACH_INTRINSIC_OPERATORS(F) \
   F(Multiply, 2, 1)                     \
@@ -475,8 +480,9 @@
   F(NewClosure_Tenured, 1, 1)           \
   F(NewScriptContext, 2, 1)             \
   F(NewFunctionContext, 1, 1)           \
-  F(PushWithContext, 2, 1)              \
-  F(PushCatchContext, 3, 1)             \
+  F(PushModuleContext, 3, 1)            \
+  F(PushWithContext, 3, 1)              \
+  F(PushCatchContext, 4, 1)             \
   F(PushBlockContext, 2, 1)             \
   F(DeleteLookupSlot, 1, 1)             \
   F(LoadLookupSlot, 1, 1)               \
@@ -797,8 +803,7 @@
 #define FOR_EACH_INTRINSIC_STRINGS(F)     \
   F(StringReplaceOneCharWithString, 3, 1) \
   F(StringIndexOf, 3, 1)                  \
-  F(StringLastIndexOf, 3, 1)              \
-  F(StringLocaleCompare, 2, 1)            \
+  F(StringLastIndexOf, 2, 1)              \
   F(SubString, 3, 1)                      \
   F(StringAdd, 2, 1)                      \
   F(InternalizeString, 1, 1)              \
@@ -888,7 +893,10 @@
   F(SerializeWasmModule, 1, 1)                \
   F(DeserializeWasmModule, 1, 1)              \
   F(IsAsmWasmCode, 1, 1)                      \
-  F(IsNotAsmWasmCode, 1, 1)
+  F(IsNotAsmWasmCode, 1, 1)                   \
+  F(ValidateWasmInstancesChain, 2, 1)         \
+  F(ValidateWasmModuleState, 1, 1)            \
+  F(ValidateWasmOrphanedInstance, 1, 1)
 
 #define FOR_EACH_INTRINSIC_TYPEDARRAY(F)     \
   F(ArrayBufferGetByteLength, 1, 1)          \
@@ -905,27 +913,14 @@
   F(IsTypedArray, 1, 1)                      \
   F(IsSharedTypedArray, 1, 1)                \
   F(IsSharedIntegerTypedArray, 1, 1)         \
-  F(IsSharedInteger32TypedArray, 1, 1)       \
-  F(DataViewGetUint8, 3, 1)                  \
-  F(DataViewGetInt8, 3, 1)                   \
-  F(DataViewGetUint16, 3, 1)                 \
-  F(DataViewGetInt16, 3, 1)                  \
-  F(DataViewGetUint32, 3, 1)                 \
-  F(DataViewGetInt32, 3, 1)                  \
-  F(DataViewGetFloat32, 3, 1)                \
-  F(DataViewGetFloat64, 3, 1)                \
-  F(DataViewSetUint8, 4, 1)                  \
-  F(DataViewSetInt8, 4, 1)                   \
-  F(DataViewSetUint16, 4, 1)                 \
-  F(DataViewSetInt16, 4, 1)                  \
-  F(DataViewSetUint32, 4, 1)                 \
-  F(DataViewSetInt32, 4, 1)                  \
-  F(DataViewSetFloat32, 4, 1)                \
-  F(DataViewSetFloat64, 4, 1)
+  F(IsSharedInteger32TypedArray, 1, 1)
 
 #define FOR_EACH_INTRINSIC_WASM(F) \
   F(WasmGrowMemory, 1, 1)          \
-  F(WasmThrowTypeError, 0, 1)
+  F(WasmMemorySize, 0, 1)          \
+  F(WasmThrowTypeError, 0, 1)      \
+  F(WasmThrow, 2, 1)               \
+  F(WasmGetCaughtExceptionValue, 1, 1)
 
 #define FOR_EACH_INTRINSIC_RETURN_PAIR(F) \
   F(LoadLookupSlotForCall, 1, 2)
@@ -935,30 +930,26 @@
 
 // Most intrinsics are implemented in the runtime/ directory, but ICs are
 // implemented in ic.cc for now.
-#define FOR_EACH_INTRINSIC_IC(F)                 \
-  F(BinaryOpIC_Miss, 2, 1)                       \
-  F(BinaryOpIC_MissWithAllocationSite, 3, 1)     \
-  F(CallIC_Miss, 3, 1)                           \
-  F(CompareIC_Miss, 3, 1)                        \
-  F(ElementsTransitionAndStoreIC_Miss, 5, 1)     \
-  F(KeyedLoadIC_Miss, 4, 1)                      \
-  F(KeyedLoadIC_MissFromStubFailure, 4, 1)       \
-  F(KeyedStoreIC_Miss, 5, 1)                     \
-  F(KeyedStoreIC_MissFromStubFailure, 5, 1)      \
-  F(KeyedStoreIC_Slow, 5, 1)                     \
-  F(LoadElementWithInterceptor, 2, 1)            \
-  F(LoadGlobalIC_Miss, 2, 1)                     \
-  F(LoadGlobalIC_Slow, 2, 1)                     \
-  F(LoadIC_Miss, 4, 1)                           \
-  F(LoadIC_MissFromStubFailure, 4, 1)            \
-  F(LoadPropertyWithInterceptor, 3, 1)           \
-  F(LoadPropertyWithInterceptorOnly, 3, 1)       \
-  F(StoreCallbackProperty, 6, 1)                 \
-  F(StoreIC_Miss, 5, 1)                          \
-  F(StoreIC_MissFromStubFailure, 5, 1)           \
-  F(TransitionStoreIC_MissFromStubFailure, 6, 1) \
-  F(StorePropertyWithInterceptor, 3, 1)          \
-  F(ToBooleanIC_Miss, 1, 1)                      \
+#define FOR_EACH_INTRINSIC_IC(F)             \
+  F(BinaryOpIC_Miss, 2, 1)                   \
+  F(BinaryOpIC_MissWithAllocationSite, 3, 1) \
+  F(CallIC_Miss, 3, 1)                       \
+  F(CompareIC_Miss, 3, 1)                    \
+  F(ElementsTransitionAndStoreIC_Miss, 6, 1) \
+  F(KeyedLoadIC_Miss, 4, 1)                  \
+  F(KeyedLoadIC_MissFromStubFailure, 4, 1)   \
+  F(KeyedStoreIC_Miss, 5, 1)                 \
+  F(KeyedStoreIC_Slow, 5, 1)                 \
+  F(LoadElementWithInterceptor, 2, 1)        \
+  F(LoadGlobalIC_Miss, 2, 1)                 \
+  F(LoadGlobalIC_Slow, 2, 1)                 \
+  F(LoadIC_Miss, 4, 1)                       \
+  F(LoadPropertyWithInterceptor, 3, 1)       \
+  F(LoadPropertyWithInterceptorOnly, 3, 1)   \
+  F(StoreCallbackProperty, 6, 1)             \
+  F(StoreIC_Miss, 5, 1)                      \
+  F(StorePropertyWithInterceptor, 3, 1)      \
+  F(ToBooleanIC_Miss, 1, 1)                  \
   F(Unreachable, 0, 1)
 
 #define FOR_EACH_INTRINSIC_RETURN_OBJECT(F) \
@@ -1044,13 +1035,8 @@
 
   static const int kNotFound = -1;
 
-  // Add internalized strings for all the intrinsic function names to a
-  // StringDictionary.
-  static void InitializeIntrinsicFunctionNames(Isolate* isolate,
-                                               Handle<NameDictionary> dict);
-
-  // Get the intrinsic function with the given name, which must be internalized.
-  static const Function* FunctionForName(Handle<String> name);
+  // Get the intrinsic function with the given name.
+  static const Function* FunctionForName(const unsigned char* name, int length);
 
   // Get the intrinsic function with the given FunctionId.
   static const Function* FunctionForId(FunctionId id);
diff --git a/src/s390/code-stubs-s390.cc b/src/s390/code-stubs-s390.cc
index ce80384..b1bf02d 100644
--- a/src/s390/code-stubs-s390.cc
+++ b/src/s390/code-stubs-s390.cc
@@ -1726,7 +1726,6 @@
   // r4 : feedback vector
   // r5 : slot in feedback vector (Smi)
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_initialize_count, done_increment_count;
 
   DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
             masm->isolate()->heap()->megamorphic_symbol());
@@ -1749,7 +1748,7 @@
   Register weak_value = r9;
   __ LoadP(weak_value, FieldMemOperand(r7, WeakCell::kValueOffset));
   __ CmpP(r3, weak_value);
-  __ beq(&done_increment_count, Label::kNear);
+  __ beq(&done, Label::kNear);
   __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex);
   __ beq(&done, Label::kNear);
   __ LoadP(feedback_map, FieldMemOperand(r7, HeapObject::kMapOffset));
@@ -1772,7 +1771,7 @@
   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7);
   __ CmpP(r3, r7);
   __ bne(&megamorphic);
-  __ b(&done_increment_count, Label::kNear);
+  __ b(&done, Label::kNear);
 
   __ bind(&miss);
 
@@ -1802,32 +1801,22 @@
   // slot.
   CreateAllocationSiteStub create_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ b(&done_initialize_count, Label::kNear);
+  __ b(&done, Label::kNear);
 
   __ bind(&not_array_function);
 
   CreateWeakCellStub weak_cell_stub(masm->isolate());
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
 
-  __ bind(&done_initialize_count);
-  // Initialize the call counter.
-  __ LoadSmiLiteral(r7, Smi::FromInt(1));
-  __ SmiToPtrArrayOffset(r6, r5);
-  __ AddP(r6, r4, r6);
-  __ StoreP(r7, FieldMemOperand(r6, count_offset), r0);
-  __ b(&done, Label::kNear);
+  __ bind(&done);
 
-  __ bind(&done_increment_count);
-
-  // Increment the call count for monomorphic function calls.
+  // Increment the call count for all function calls.
   __ SmiToPtrArrayOffset(r7, r5);
   __ AddP(r7, r4, r7);
 
   __ LoadP(r6, FieldMemOperand(r7, count_offset));
   __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
   __ StoreP(r6, FieldMemOperand(r7, count_offset), r0);
-
-  __ bind(&done);
 }
 
 void CallConstructStub::Generate(MacroAssembler* masm) {
@@ -1873,6 +1862,17 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot, Register temp) {
+  const int count_offset = FixedArray::kHeaderSize + kPointerSize;
+  __ SmiToPtrArrayOffset(temp, slot);
+  __ AddP(feedback_vector, feedback_vector, temp);
+  __ LoadP(slot, FieldMemOperand(feedback_vector, count_offset));
+  __ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp);
+  __ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp);
+}
+
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // r3 - function
   // r5 - slot id
@@ -1885,12 +1885,7 @@
   __ mov(r2, Operand(arg_count()));
 
   // Increment the call count for monomorphic function calls.
-  const int count_offset = FixedArray::kHeaderSize + kPointerSize;
-  __ SmiToPtrArrayOffset(r7, r5);
-  __ AddP(r4, r4, r7);
-  __ LoadP(r5, FieldMemOperand(r4, count_offset));
-  __ AddSmiLiteral(r5, r5, Smi::FromInt(1), r0);
-  __ StoreP(r5, FieldMemOperand(r4, count_offset), r0);
+  IncrementCallCount(masm, r4, r5, r1);
 
   __ LoadRR(r4, r6);
   __ LoadRR(r5, r3);
@@ -1902,7 +1897,7 @@
   // r3 - function
   // r5 - slot id (Smi)
   // r4 - vector
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -1933,13 +1928,11 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(r3, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  const int count_offset = FixedArray::kHeaderSize + kPointerSize;
-  __ LoadP(r5, FieldMemOperand(r8, count_offset));
-  __ AddSmiLiteral(r5, r5, Smi::FromInt(1), r0);
-  __ StoreP(r5, FieldMemOperand(r8, count_offset), r0);
-
   __ bind(&call_function);
+
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, r4, r5, r1);
+
   __ mov(r2, Operand(argc));
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
@@ -1979,6 +1972,11 @@
   __ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0);
 
   __ bind(&call);
+
+  // Increment the call count for megamorphic function calls.
+  IncrementCallCount(masm, r4, r5, r1);
+
+  __ bind(&call_count_incremented);
   __ mov(r2, Operand(argc));
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET);
@@ -2005,10 +2003,6 @@
   __ CmpP(r6, ip);
   __ bne(&miss);
 
-  // Initialize the call counter.
-  __ LoadSmiLiteral(r7, Smi::FromInt(1));
-  __ StoreP(r7, FieldMemOperand(r8, count_offset), r0);
-
   // Store the function. Use a stub since we need a frame for allocation.
   // r4 - vector
   // r5 - slot
@@ -2016,9 +2010,13 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(masm->isolate());
+    __ Push(r4);
+    __ Push(r5);
     __ Push(cp, r3);
     __ CallStub(&create_stub);
     __ Pop(cp, r3);
+    __ Pop(r5);
+    __ Pop(r4);
   }
 
   __ b(&call_function);
@@ -2028,7 +2026,7 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ b(&call);
+  __ b(&call_count_incremented);
 }
 
 void CallICStub::GenerateMiss(MacroAssembler* masm) {
@@ -2204,297 +2202,6 @@
   __ bind(&done);
 }
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-
-  // Stack frame on entry.
-  //  lr: return address
-  //  sp[0]: to
-  //  sp[4]: from
-  //  sp[8]: string
-
-  // This stub is called from the native-call %_SubString(...), so
-  // nothing can be assumed about the arguments. It is tested that:
-  //  "string" is a sequential string,
-  //  both "from" and "to" are smis, and
-  //  0 <= from <= to <= string.length.
-  // If any of these assumptions fail, we call the runtime system.
-
-  const int kToOffset = 0 * kPointerSize;
-  const int kFromOffset = 1 * kPointerSize;
-  const int kStringOffset = 2 * kPointerSize;
-
-  __ LoadP(r4, MemOperand(sp, kToOffset));
-  __ LoadP(r5, MemOperand(sp, kFromOffset));
-
-  // If either to or from had the smi tag bit set, then fail to generic runtime
-  __ JumpIfNotSmi(r4, &runtime);
-  __ JumpIfNotSmi(r5, &runtime);
-  __ SmiUntag(r4);
-  __ SmiUntag(r5);
-  // Both r4 and r5 are untagged integers.
-
-  // We want to bailout to runtime here if From is negative.
-  __ blt(&runtime);  // From < 0.
-
-  __ CmpLogicalP(r5, r4);
-  __ bgt(&runtime);  // Fail if from > to.
-  __ SubP(r4, r4, r5);
-
-  // Make sure first argument is a string.
-  __ LoadP(r2, MemOperand(sp, kStringOffset));
-  __ JumpIfSmi(r2, &runtime);
-  Condition is_string = masm->IsObjectStringType(r2, r3);
-  __ b(NegateCondition(is_string), &runtime);
-
-  Label single_char;
-  __ CmpP(r4, Operand(1));
-  __ b(eq, &single_char);
-
-  // Short-cut for the case of trivial substring.
-  Label return_r2;
-  // r2: original string
-  // r4: result string length
-  __ LoadP(r6, FieldMemOperand(r2, String::kLengthOffset));
-  __ SmiUntag(r0, r6);
-  __ CmpLogicalP(r4, r0);
-  // Return original string.
-  __ beq(&return_r2);
-  // Longer than original string's length or negative: unsafe arguments.
-  __ bgt(&runtime);
-  // Shorter than original string's length: an actual substring.
-
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into r7.
-  // r2: original string
-  // r3: instance type
-  // r4: length
-  // r5: from index (untagged)
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ mov(r0, Operand(kIsIndirectStringMask));
-  __ AndP(r0, r3);
-  __ beq(&seq_or_external_string);
-
-  __ mov(r0, Operand(kSlicedNotConsMask));
-  __ AndP(r0, r3);
-  __ bne(&sliced_string);
-  // Cons string.  Check whether it is flat, then fetch first part.
-  __ LoadP(r7, FieldMemOperand(r2, ConsString::kSecondOffset));
-  __ CompareRoot(r7, Heap::kempty_stringRootIndex);
-  __ bne(&runtime);
-  __ LoadP(r7, FieldMemOperand(r2, ConsString::kFirstOffset));
-  // Update instance type.
-  __ LoadP(r3, FieldMemOperand(r7, HeapObject::kMapOffset));
-  __ LoadlB(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
-  __ b(&underlying_unpacked);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and correct start index by offset.
-  __ LoadP(r7, FieldMemOperand(r2, SlicedString::kParentOffset));
-  __ LoadP(r6, FieldMemOperand(r2, SlicedString::kOffsetOffset));
-  __ SmiUntag(r3, r6);
-  __ AddP(r5, r3);  // Add offset to index.
-  // Update instance type.
-  __ LoadP(r3, FieldMemOperand(r7, HeapObject::kMapOffset));
-  __ LoadlB(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
-  __ b(&underlying_unpacked);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the expected register.
-  __ LoadRR(r7, r2);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // r7: underlying subject string
-    // r3: instance type of underlying subject string
-    // r4: length
-    // r5: adjusted start index (untagged)
-    __ CmpP(r4, Operand(SlicedString::kMinLength));
-    // Short slice.  Copy instead of slicing.
-    __ blt(&copy_routine);
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ mov(r0, Operand(kStringEncodingMask));
-    __ AndP(r0, r3);
-    __ beq(&two_byte_slice);
-    __ AllocateOneByteSlicedString(r2, r4, r8, r9, &runtime);
-    __ b(&set_slice_header);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(r2, r4, r8, r9, &runtime);
-    __ bind(&set_slice_header);
-    __ SmiTag(r5);
-    __ StoreP(r7, FieldMemOperand(r2, SlicedString::kParentOffset));
-    __ StoreP(r5, FieldMemOperand(r2, SlicedString::kOffsetOffset));
-    __ b(&return_r2);
-
-    __ bind(&copy_routine);
-  }
-
-  // r7: underlying subject string
-  // r3: instance type of underlying subject string
-  // r4: length
-  // r5: adjusted start index (untagged)
-  Label two_byte_sequential, sequential_string, allocate_result;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ mov(r0, Operand(kExternalStringTag));
-  __ AndP(r0, r3);
-  __ beq(&sequential_string);
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ mov(r0, Operand(kShortExternalStringTag));
-  __ AndP(r0, r3);
-  __ bne(&runtime);
-  __ LoadP(r7, FieldMemOperand(r7, ExternalString::kResourceDataOffset));
-  // r7 already points to the first character of underlying string.
-  __ b(&allocate_result);
-
-  __ bind(&sequential_string);
-  // Locate first character of underlying subject string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ AddP(r7, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&allocate_result);
-  // Sequential acii string.  Allocate the result.
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ mov(r0, Operand(kStringEncodingMask));
-  __ AndP(r0, r3);
-  __ beq(&two_byte_sequential);
-
-  // Allocate and copy the resulting one-byte string.
-  __ AllocateOneByteString(r2, r4, r6, r8, r9, &runtime);
-
-  // Locate first character of substring to copy.
-  __ AddP(r7, r5);
-  // Locate first character of result.
-  __ AddP(r3, r2, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
-  // r2: result string
-  // r3: first character of result string
-  // r4: result string length
-  // r7: first character of substring to copy
-  STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(masm, r3, r7, r4, r5,
-                                       String::ONE_BYTE_ENCODING);
-  __ b(&return_r2);
-
-  // Allocate and copy the resulting two-byte string.
-  __ bind(&two_byte_sequential);
-  __ AllocateTwoByteString(r2, r4, r6, r8, r9, &runtime);
-
-  // Locate first character of substring to copy.
-  __ ShiftLeftP(r3, r5, Operand(1));
-  __ AddP(r7, r3);
-  // Locate first character of result.
-  __ AddP(r3, r2, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  // r2: result string.
-  // r3: first character of result.
-  // r4: result length.
-  // r7: first character of substring to copy.
-  STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
-  StringHelper::GenerateCopyCharacters(masm, r3, r7, r4, r5,
-                                       String::TWO_BYTE_ENCODING);
-
-  __ bind(&return_r2);
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1, r5, r6);
-  __ Drop(3);
-  __ Ret();
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // r2: original string
-  // r3: instance type
-  // r4: length
-  // r5: from index (untagged)
-  __ SmiTag(r5, r5);
-  StringCharAtGenerator generator(r2, r5, r4, r2, &runtime, &runtime, &runtime,
-                                  RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ Drop(3);
-  __ Ret();
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes one argument in r2.
-  Label done;
-  Label is_number;
-  __ JumpIfSmi(r2, &is_number);
-
-  __ CompareObjectType(r2, r3, r3, FIRST_NONSTRING_TYPE);
-  // r2: receiver
-  // r3: receiver instance type
-  __ blt(&done);
-
-  Label not_heap_number;
-  __ CmpP(r3, Operand(HEAP_NUMBER_TYPE));
-  __ bne(&not_heap_number);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpP(r3, Operand(ODDBALL_TYPE));
-  __ bne(&not_oddball);
-  __ LoadP(r2, FieldMemOperand(r2, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ push(r2);  // Push argument.
-  __ TailCallRuntime(Runtime::kToString);
-
-  __ bind(&done);
-  __ Ret();
-}
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes one argument in r2.
-  Label is_number;
-  __ JumpIfSmi(r2, &is_number);
-
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ CompareObjectType(r2, r3, r3, LAST_NAME_TYPE);
-  // r2: receiver
-  // r3: receiver instance type
-  __ Ret(le);
-
-  Label not_heap_number;
-  __ CmpP(r3, Operand(HEAP_NUMBER_TYPE));
-  __ bne(&not_heap_number);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpP(r3, Operand(ODDBALL_TYPE));
-  __ bne(&not_oddball);
-  __ LoadP(r2, FieldMemOperand(r2, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ push(r2);  // Push argument.
-  __ TailCallRuntime(Runtime::kToName);
-}
 
 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
                                                    Register left,
@@ -3357,18 +3064,6 @@
   Label need_incremental;
   Label need_incremental_pop_scratch;
 
-  DCHECK((~Page::kPageAlignmentMask & 0xffff) == 0);
-  __ AndP(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
-  __ LoadP(
-      regs_.scratch1(),
-      MemOperand(regs_.scratch0(), MemoryChunk::kWriteBarrierCounterOffset));
-  __ SubP(regs_.scratch1(), regs_.scratch1(), Operand(1));
-  __ StoreP(
-      regs_.scratch1(),
-      MemOperand(regs_.scratch0(), MemoryChunk::kWriteBarrierCounterOffset));
-  __ CmpP(regs_.scratch1(), Operand::Zero());  // S390, we could do better here
-  __ blt(&need_incremental);
-
   // Let's look at the color of the object:  If it is not black we don't have
   // to inform the incremental marker.
   __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@@ -3785,7 +3480,7 @@
   __ LoadP(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
 
   // Load the map into the correct register.
-  DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+  DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
   __ LoadRR(feedback, too_far);
 
   __ AddP(ip, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4521,7 +4216,7 @@
     // Fall back to %AllocateInNewSpace (if not too big).
     Label too_big_for_new_space;
     __ bind(&allocate);
-    __ CmpP(r9, Operand(Page::kMaxRegularHeapObjectSize));
+    __ CmpP(r9, Operand(kMaxRegularHeapObjectSize));
     __ bgt(&too_big_for_new_space);
     {
       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
@@ -4896,7 +4591,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ CmpP(r9, Operand(Page::kMaxRegularHeapObjectSize));
+  __ CmpP(r9, Operand(kMaxRegularHeapObjectSize));
   __ bgt(&too_big_for_new_space);
   {
     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
diff --git a/src/s390/interface-descriptors-s390.cc b/src/s390/interface-descriptors-s390.cc
index 4cdcd54..ca40a0c 100644
--- a/src/s390/interface-descriptors-s390.cc
+++ b/src/s390/interface-descriptors-s390.cc
@@ -38,11 +38,9 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return r5; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() { return r6; }
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return r5; }
-const Register VectorStoreTransitionDescriptor::MapRegister() { return r7; }
-
-const Register StoreTransitionDescriptor::MapRegister() { return r5; }
+const Register StoreTransitionDescriptor::SlotRegister() { return r6; }
+const Register StoreTransitionDescriptor::VectorRegister() { return r5; }
+const Register StoreTransitionDescriptor::MapRegister() { return r7; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r4; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r2; }
@@ -324,7 +322,7 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {
       r2,  // callee
@@ -359,7 +357,19 @@
       r2,  // argument count (not including receiver)
       r5,  // new target
       r3,  // constructor to call
-      r4   // address of the first argument
+      r4,  // allocation site feedback if available, undefined otherwise
+      r6   // address of the first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      r2,  // argument count (not including receiver)
+      r3,  // target to call checked to be Array function
+      r4,  // allocation site feedback if available, undefined otherwise
+      r5   // address of the first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/s390/macro-assembler-s390.cc b/src/s390/macro-assembler-s390.cc
index 8b708de..769d3dc 100644
--- a/src/s390/macro-assembler-s390.cc
+++ b/src/s390/macro-assembler-s390.cc
@@ -251,10 +251,7 @@
 void MacroAssembler::InNewSpace(Register object, Register scratch,
                                 Condition cond, Label* branch) {
   DCHECK(cond == eq || cond == ne);
-  // TODO(joransiu): check if we can merge mov Operand into AndP.
-  const int mask =
-      (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
-  CheckPageFlag(object, scratch, mask, cond, branch);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cond, branch);
 }
 
 void MacroAssembler::RecordWriteField(
@@ -1709,7 +1706,7 @@
 void MacroAssembler::Allocate(int object_size, Register result,
                               Register scratch1, Register scratch2,
                               Label* gc_required, AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
@@ -1965,7 +1962,7 @@
 void MacroAssembler::FastAllocate(int object_size, Register result,
                                   Register scratch1, Register scratch2,
                                   AllocationFlags flags) {
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK(!AreAliased(result, scratch1, scratch2, ip));
 
   // Make object size into bytes.
diff --git a/src/s390/macro-assembler-s390.h b/src/s390/macro-assembler-s390.h
index b8ed3a0..7f2d042 100644
--- a/src/s390/macro-assembler-s390.h
+++ b/src/s390/macro-assembler-s390.h
@@ -194,6 +194,18 @@
   void Ret() { b(r14); }
   void Ret(Condition cond) { b(cond, r14); }
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp.
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 0) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    UNIMPLEMENTED();
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the sp register.
   void Drop(int count);
diff --git a/src/s390/simulator-s390.cc b/src/s390/simulator-s390.cc
index 91db782..78bc939 100644
--- a/src/s390/simulator-s390.cc
+++ b/src/s390/simulator-s390.cc
@@ -660,8 +660,8 @@
   last_debugger_input_ = input;
 }
 
-void Simulator::FlushICache(base::HashMap* i_cache, void* start_addr,
-                            size_t size) {
+void Simulator::FlushICache(base::CustomMatcherHashMap* i_cache,
+                            void* start_addr, size_t size) {
   intptr_t start = reinterpret_cast<intptr_t>(start_addr);
   int intra_line = (start & CachePage::kLineMask);
   start -= intra_line;
@@ -681,7 +681,8 @@
   }
 }
 
-CachePage* Simulator::GetCachePage(base::HashMap* i_cache, void* page) {
+CachePage* Simulator::GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                   void* page) {
   base::HashMap::Entry* entry = i_cache->LookupOrInsert(page, ICacheHash(page));
   if (entry->value == NULL) {
     CachePage* new_page = new CachePage();
@@ -691,7 +692,8 @@
 }
 
 // Flush from start up to and not including start + size.
-void Simulator::FlushOnePage(base::HashMap* i_cache, intptr_t start, int size) {
+void Simulator::FlushOnePage(base::CustomMatcherHashMap* i_cache,
+                             intptr_t start, int size) {
   DCHECK(size <= CachePage::kPageSize);
   DCHECK(AllOnOnePage(start, size - 1));
   DCHECK((start & CachePage::kLineMask) == 0);
@@ -703,7 +705,8 @@
   memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
 }
 
-void Simulator::CheckICache(base::HashMap* i_cache, Instruction* instr) {
+void Simulator::CheckICache(base::CustomMatcherHashMap* i_cache,
+                            Instruction* instr) {
   intptr_t address = reinterpret_cast<intptr_t>(instr);
   void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
   void* line = reinterpret_cast<void*>(address & (~CachePage::kLineMask));
@@ -1469,7 +1472,7 @@
 Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
   i_cache_ = isolate_->simulator_i_cache();
   if (i_cache_ == NULL) {
-    i_cache_ = new base::HashMap(&ICacheMatch);
+    i_cache_ = new base::CustomMatcherHashMap(&ICacheMatch);
     isolate_->set_simulator_i_cache(i_cache_);
   }
   Initialize(isolate);
@@ -1609,7 +1612,8 @@
 };
 
 // static
-void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
+void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
+                         Redirection* first) {
   Redirection::DeleteChain(first);
   if (i_cache != nullptr) {
     for (base::HashMap::Entry* entry = i_cache->Start(); entry != nullptr;
diff --git a/src/s390/simulator-s390.h b/src/s390/simulator-s390.h
index 7af00ee..1ce6bf7 100644
--- a/src/s390/simulator-s390.h
+++ b/src/s390/simulator-s390.h
@@ -211,7 +211,7 @@
   // Call on program start.
   static void Initialize(Isolate* isolate);
 
-  static void TearDown(base::HashMap* i_cache, Redirection* first);
+  static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
 
   // V8 generally calls into generated JS code with 5 parameters and into
   // generated RegExp code with 7 parameters. This is a convenience function,
@@ -233,7 +233,8 @@
   char* last_debugger_input() { return last_debugger_input_; }
 
   // ICache checking.
-  static void FlushICache(base::HashMap* i_cache, void* start, size_t size);
+  static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
+                          size_t size);
 
   // Returns true if pc register contains one of the 'special_values' defined
   // below (bad_lr, end_sim_pc).
@@ -445,9 +446,12 @@
   void ExecuteInstruction(Instruction* instr, bool auto_incr_pc = true);
 
   // ICache.
-  static void CheckICache(base::HashMap* i_cache, Instruction* instr);
-  static void FlushOnePage(base::HashMap* i_cache, intptr_t start, int size);
-  static CachePage* GetCachePage(base::HashMap* i_cache, void* page);
+  static void CheckICache(base::CustomMatcherHashMap* i_cache,
+                          Instruction* instr);
+  static void FlushOnePage(base::CustomMatcherHashMap* i_cache, intptr_t start,
+                           int size);
+  static CachePage* GetCachePage(base::CustomMatcherHashMap* i_cache,
+                                 void* page);
 
   // Runtime call support.
   static void* RedirectExternalReference(
@@ -482,7 +486,7 @@
   char* last_debugger_input_;
 
   // Icache simulation
-  base::HashMap* i_cache_;
+  base::CustomMatcherHashMap* i_cache_;
 
   // Registered breakpoints.
   Instruction* break_pc_;
diff --git a/src/safepoint-table.h b/src/safepoint-table.h
index fbb0152..e0e9d95 100644
--- a/src/safepoint-table.h
+++ b/src/safepoint-table.h
@@ -8,7 +8,7 @@
 #include "src/allocation.h"
 #include "src/heap/heap.h"
 #include "src/v8memory.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/signature.h b/src/signature.h
index 3fa5f82..97238b6 100644
--- a/src/signature.h
+++ b/src/signature.h
@@ -5,7 +5,7 @@
 #ifndef V8_SIGNATURE_H_
 #define V8_SIGNATURE_H_
 
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/small-pointer-list.h b/src/small-pointer-list.h
index 9ece249..ac5ecaa 100644
--- a/src/small-pointer-list.h
+++ b/src/small-pointer-list.h
@@ -7,7 +7,7 @@
 
 #include "src/base/logging.h"
 #include "src/globals.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/snapshot/code-serializer.cc b/src/snapshot/code-serializer.cc
index 8d2f5d9..16044a5 100644
--- a/src/snapshot/code-serializer.cc
+++ b/src/snapshot/code-serializer.cc
@@ -98,6 +98,10 @@
     UNREACHABLE();
   }
 
+  if (ElideObject(obj)) {
+    return SerializeObject(*isolate()->factory()->undefined_value(),
+                           how_to_code, where_to_point, skip);
+  }
   // Past this point we should not see any (context-specific) maps anymore.
   CHECK(!obj->IsMap());
   // There should be no references to the global object embedded.
diff --git a/src/snapshot/code-serializer.h b/src/snapshot/code-serializer.h
index e82a7d5..b3c54d1 100644
--- a/src/snapshot/code-serializer.h
+++ b/src/snapshot/code-serializer.h
@@ -36,6 +36,7 @@
     UNREACHABLE();
   }
 
+  virtual bool ElideObject(Object* obj) { return false; }
   void SerializeGeneric(HeapObject* heap_object, HowToCode how_to_code,
                         WhereToPoint where_to_point);
 
@@ -73,6 +74,8 @@
     }
   }
 
+  bool ElideObject(Object* obj) override { return obj->IsWeakCell(); };
+
  private:
   WasmCompiledModuleSerializer(Isolate* isolate, uint32_t source_hash)
       : CodeSerializer(isolate, source_hash) {}
diff --git a/src/snapshot/deserializer.cc b/src/snapshot/deserializer.cc
index 7a2df28..b90a2c5 100644
--- a/src/snapshot/deserializer.cc
+++ b/src/snapshot/deserializer.cc
@@ -414,7 +414,7 @@
     LargeObjectSpace* lo_space = isolate_->heap()->lo_space();
     Executability exec = static_cast<Executability>(source_.Get());
     AllocationResult result = lo_space->AllocateRaw(size, exec);
-    HeapObject* obj = HeapObject::cast(result.ToObjectChecked());
+    HeapObject* obj = result.ToObjectChecked();
     deserialized_large_objects_.Add(obj);
     return obj->address();
   } else if (space_index == MAP_SPACE) {
diff --git a/src/snapshot/natives.h b/src/snapshot/natives.h
index e447515..a9dc306 100644
--- a/src/snapshot/natives.h
+++ b/src/snapshot/natives.h
@@ -22,8 +22,15 @@
   TEST
 };
 
+// Extra handling for V8_EXPORT_PRIVATE in combination with USING_V8_SHARED
+// since definition of methods of classes marked as dllimport is not allowed.
 template <NativeType type>
+#ifdef USING_V8_SHARED
 class NativesCollection {
+#else
+class V8_EXPORT_PRIVATE NativesCollection {
+#endif  // USING_V8_SHARED
+
  public:
   // The following methods are implemented in js2c-generated code:
 
diff --git a/src/snapshot/serializer-common.cc b/src/snapshot/serializer-common.cc
index bb3cc5c..adfd6e4 100644
--- a/src/snapshot/serializer-common.cc
+++ b/src/snapshot/serializer-common.cc
@@ -14,7 +14,7 @@
 ExternalReferenceEncoder::ExternalReferenceEncoder(Isolate* isolate) {
   map_ = isolate->external_reference_map();
   if (map_ != NULL) return;
-  map_ = new base::HashMap(base::HashMap::PointersMatch);
+  map_ = new base::HashMap();
   ExternalReferenceTable* table = ExternalReferenceTable::instance(isolate);
   for (int i = 0; i < table->size(); ++i) {
     Address addr = table->address(i);
diff --git a/src/snapshot/serializer.cc b/src/snapshot/serializer.cc
index d7a7f89..f622a5b 100644
--- a/src/snapshot/serializer.cc
+++ b/src/snapshot/serializer.cc
@@ -403,9 +403,8 @@
         ExternalTwoByteString::cast(string)->resource()->data());
   }
 
-  AllocationSpace space = (allocation_size > Page::kMaxRegularHeapObjectSize)
-                              ? LO_SPACE
-                              : OLD_SPACE;
+  AllocationSpace space =
+      (allocation_size > kMaxRegularHeapObjectSize) ? LO_SPACE : OLD_SPACE;
   SerializePrologue(space, allocation_size, map);
 
   // Output the rest of the imaginary string.
diff --git a/src/snapshot/serializer.h b/src/snapshot/serializer.h
index ff2c6a9..0f87774 100644
--- a/src/snapshot/serializer.h
+++ b/src/snapshot/serializer.h
@@ -38,7 +38,7 @@
  private:
   class NameMap {
    public:
-    NameMap() : impl_(base::HashMap::PointersMatch) {}
+    NameMap() : impl_() {}
 
     ~NameMap() {
       for (base::HashMap::Entry* p = impl_.Start(); p != NULL;
diff --git a/src/snapshot/snapshot-common.cc b/src/snapshot/snapshot-common.cc
index fed45d1..959ac56 100644
--- a/src/snapshot/snapshot-common.cc
+++ b/src/snapshot/snapshot-common.cc
@@ -31,19 +31,6 @@
   return index < num_contexts;
 }
 
-
-uint32_t Snapshot::SizeOfFirstPage(Isolate* isolate, AllocationSpace space) {
-  DCHECK(space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE);
-  if (!isolate->snapshot_available()) {
-    return static_cast<uint32_t>(MemoryAllocator::PageAreaSize(space));
-  }
-  uint32_t size;
-  int offset = kFirstPageSizesOffset + (space - FIRST_PAGED_SPACE) * kInt32Size;
-  memcpy(&size, isolate->snapshot_blob()->data + offset, kInt32Size);
-  return size;
-}
-
-
 bool Snapshot::Initialize(Isolate* isolate) {
   if (!isolate->snapshot_available()) return false;
   base::ElapsedTimer timer;
@@ -89,25 +76,8 @@
   return Handle<Context>::cast(result);
 }
 
-void UpdateMaxRequirementPerPage(
-    uint32_t* requirements,
-    Vector<const SerializedData::Reservation> reservations) {
-  int space = 0;
-  uint32_t current_requirement = 0;
-  for (const auto& reservation : reservations) {
-    current_requirement += reservation.chunk_size();
-    if (reservation.is_last()) {
-      requirements[space] = std::max(requirements[space], current_requirement);
-      current_requirement = 0;
-      space++;
-    }
-  }
-  DCHECK_EQ(i::Serializer::kNumberOfSpaces, space);
-}
-
-void CalculateFirstPageSizes(const SnapshotData* startup_snapshot,
-                             const List<SnapshotData*>* context_snapshots,
-                             uint32_t* sizes_out) {
+void ProfileDeserialization(const SnapshotData* startup_snapshot,
+                            const List<SnapshotData*>* context_snapshots) {
   if (FLAG_profile_deserialization) {
     int startup_total = 0;
     PrintF("Deserialization will reserve:\n");
@@ -123,36 +93,6 @@
       PrintF("%10d bytes per context #%d\n", context_total, i);
     }
   }
-
-  uint32_t startup_requirements[i::Serializer::kNumberOfSpaces];
-  uint32_t context_requirements[i::Serializer::kNumberOfSpaces];
-  for (int space = 0; space < i::Serializer::kNumberOfSpaces; space++) {
-    startup_requirements[space] = 0;
-    context_requirements[space] = 0;
-  }
-
-  UpdateMaxRequirementPerPage(startup_requirements,
-                              startup_snapshot->Reservations());
-  for (const auto& context_snapshot : *context_snapshots) {
-    UpdateMaxRequirementPerPage(context_requirements,
-                                context_snapshot->Reservations());
-  }
-
-  for (int space = 0; space < i::Serializer::kNumberOfSpaces; space++) {
-    // If the space requirement for a page is less than a page size, we consider
-    // limiting the size of the first page in order to save memory on startup.
-    uint32_t required = startup_requirements[space] +
-                        2 * context_requirements[space] +
-                        Page::kObjectStartOffset;
-    // Add a small allowance to the code space for small scripts.
-    if (space == CODE_SPACE) required += 32 * KB;
-
-    if (space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE) {
-      uint32_t max_size =
-          MemoryAllocator::PageAreaSize(static_cast<AllocationSpace>(space));
-      sizes_out[space - FIRST_PAGED_SPACE] = std::min(required, max_size);
-    }
-  }
 }
 
 v8::StartupData Snapshot::CreateSnapshotBlob(
@@ -166,13 +106,9 @@
     total_length += context_snapshot->RawData().length();
   }
 
-  uint32_t first_page_sizes[kNumPagedSpaces];
-  CalculateFirstPageSizes(startup_snapshot, context_snapshots,
-                          first_page_sizes);
+  ProfileDeserialization(startup_snapshot, context_snapshots);
 
   char* data = new char[total_length];
-  memcpy(data + kFirstPageSizesOffset, first_page_sizes,
-         kNumPagedSpaces * kInt32Size);
   memcpy(data + kNumberOfContextsOffset, &num_contexts, kInt32Size);
   int payload_offset = StartupSnapshotOffset(num_contexts);
   int payload_length = startup_snapshot->RawData().length();
diff --git a/src/snapshot/snapshot.h b/src/snapshot/snapshot.h
index a541592..49a6092 100644
--- a/src/snapshot/snapshot.h
+++ b/src/snapshot/snapshot.h
@@ -67,9 +67,6 @@
 
   static bool EmbedsScript(Isolate* isolate);
 
-  static uint32_t SizeOfFirstPage(Isolate* isolate, AllocationSpace space);
-
-
   // To be implemented by the snapshot source.
   static const v8::StartupData* DefaultSnapshotBlob();
 
@@ -88,21 +85,16 @@
                                                int index);
 
   // Snapshot blob layout:
-  // [0 - 5] pre-calculated first page sizes for paged spaces
-  // [6] number of contexts N
-  // [7] offset to context 0
-  // [8] offset to context 1
+  // [0] number of contexts N
+  // [1] offset to context 0
+  // [2] offset to context 1
   // ...
   // ... offset to context N - 1
   // ... startup snapshot data
   // ... context 0 snapshot data
   // ... context 1 snapshot data
 
-  static const int kNumPagedSpaces = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
-
-  static const int kFirstPageSizesOffset = 0;
-  static const int kNumberOfContextsOffset =
-      kFirstPageSizesOffset + kNumPagedSpaces * kInt32Size;
+  static const int kNumberOfContextsOffset = 0;
   static const int kFirstContextOffsetOffset =
       kNumberOfContextsOffset + kInt32Size;
 
diff --git a/src/snapshot/startup-serializer.h b/src/snapshot/startup-serializer.h
index cc66f71..9c1c3b9 100644
--- a/src/snapshot/startup-serializer.h
+++ b/src/snapshot/startup-serializer.h
@@ -32,8 +32,7 @@
  private:
   class PartialCacheIndexMap : public AddressMapBase {
    public:
-    PartialCacheIndexMap()
-        : map_(base::HashMap::PointersMatch), next_index_(0) {}
+    PartialCacheIndexMap() : map_(), next_index_(0) {}
 
     // Lookup object in the map. Return its index if found, or create
     // a new entry with new_index as value, and return kInvalidIndex.
diff --git a/src/source-position-table.h b/src/source-position-table.h
index 76ae4a0..74c3b9e 100644
--- a/src/source-position-table.h
+++ b/src/source-position-table.h
@@ -8,7 +8,7 @@
 #include "src/assert-scope.h"
 #include "src/checks.h"
 #include "src/handles.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/tracing/trace-event.cc b/src/tracing/trace-event.cc
index 3e0a0fa..440af19 100644
--- a/src/tracing/trace-event.cc
+++ b/src/tracing/trace-event.cc
@@ -6,6 +6,7 @@
 
 #include <string.h>
 
+#include "src/counters.h"
 #include "src/isolate.h"
 #include "src/v8.h"
 
@@ -26,9 +27,11 @@
     v8::internal::tracing::AddTraceEvent(
         TRACE_EVENT_PHASE_END, p_data_->category_group_enabled, p_data_->name,
         v8::internal::tracing::kGlobalScope, v8::internal::tracing::kNoId,
-        v8::internal::tracing::kNoId, TRACE_EVENT_FLAG_COPY,
-        "runtime-call-stat",
-        TRACE_STR_COPY(p_data_->isolate->trace_event_stats_table()->Dump()));
+        v8::internal::tracing::kNoId, TRACE_EVENT_FLAG_NONE,
+        "runtime-call-stats", TRACE_STR_COPY(p_data_->isolate->counters()
+                                                 ->runtime_call_stats()
+                                                 ->Dump()
+                                                 .c_str()));
   } else {
     v8::internal::tracing::AddTraceEvent(
         TRACE_EVENT_PHASE_END, p_data_->category_group_enabled, p_data_->name,
@@ -37,14 +40,14 @@
   }
 }
 
-void CallStatsScopedTracer::Initialize(Isolate* isolate,
+void CallStatsScopedTracer::Initialize(v8::internal::Isolate* isolate,
                                        const uint8_t* category_group_enabled,
                                        const char* name) {
   data_.isolate = isolate;
   data_.category_group_enabled = category_group_enabled;
   data_.name = name;
   p_data_ = &data_;
-  TraceEventStatsTable* table = isolate->trace_event_stats_table();
+  RuntimeCallStats* table = isolate->counters()->runtime_call_stats();
   has_parent_scope_ = table->InUse();
   if (!has_parent_scope_) table->Reset();
   v8::internal::tracing::AddTraceEvent(
@@ -53,88 +56,6 @@
       TRACE_EVENT_FLAG_NONE, v8::internal::tracing::kNoId);
 }
 
-void TraceEventStatsTable::Enter(Isolate* isolate,
-                                 TraceEventCallStatsTimer* timer,
-                                 CounterId counter_id) {
-  TraceEventStatsTable* table = isolate->trace_event_stats_table();
-  RuntimeCallCounter* counter = &(table->*counter_id);
-  timer->Start(counter, table->current_timer_);
-  table->current_timer_ = timer;
-}
-
-void TraceEventStatsTable::Leave(Isolate* isolate,
-                                 TraceEventCallStatsTimer* timer) {
-  TraceEventStatsTable* table = isolate->trace_event_stats_table();
-  if (table->current_timer_ == timer) {
-    table->current_timer_ = timer->Stop();
-  }
-}
-
-void TraceEventStatsTable::Reset() {
-  in_use_ = true;
-  current_timer_ = nullptr;
-
-#define RESET_COUNTER(name) this->name.Reset();
-  FOR_EACH_MANUAL_COUNTER(RESET_COUNTER)
-#undef RESET_COUNTER
-
-#define RESET_COUNTER(name, nargs, result_size) this->Runtime_##name.Reset();
-  FOR_EACH_INTRINSIC(RESET_COUNTER)
-#undef RESET_COUNTER
-
-#define RESET_COUNTER(name) this->Builtin_##name.Reset();
-  BUILTIN_LIST_C(RESET_COUNTER)
-#undef RESET_COUNTER
-
-#define RESET_COUNTER(name) this->API_##name.Reset();
-  FOR_EACH_API_COUNTER(RESET_COUNTER)
-#undef RESET_COUNTER
-
-#define RESET_COUNTER(name) this->Handler_##name.Reset();
-  FOR_EACH_HANDLER_COUNTER(RESET_COUNTER)
-#undef RESET_COUNTER
-}
-
-const char* TraceEventStatsTable::Dump() {
-  buffer_.str(std::string());
-  buffer_.clear();
-  buffer_ << "{";
-#define DUMP_COUNTER(name) \
-  if (this->name.count > 0) this->name.Dump(buffer_);
-  FOR_EACH_MANUAL_COUNTER(DUMP_COUNTER)
-#undef DUMP_COUNTER
-
-#define DUMP_COUNTER(name, nargs, result_size) \
-  if (this->Runtime_##name.count > 0) this->Runtime_##name.Dump(buffer_);
-  FOR_EACH_INTRINSIC(DUMP_COUNTER)
-#undef DUMP_COUNTER
-
-#define DUMP_COUNTER(name) \
-  if (this->Builtin_##name.count > 0) this->Builtin_##name.Dump(buffer_);
-  BUILTIN_LIST_C(DUMP_COUNTER)
-#undef DUMP_COUNTER
-
-#define DUMP_COUNTER(name) \
-  if (this->API_##name.count > 0) this->API_##name.Dump(buffer_);
-  FOR_EACH_API_COUNTER(DUMP_COUNTER)
-#undef DUMP_COUNTER
-
-#define DUMP_COUNTER(name) \
-  if (this->Handler_##name.count > 0) this->Handler_##name.Dump(buffer_);
-  FOR_EACH_HANDLER_COUNTER(DUMP_COUNTER)
-#undef DUMP_COUNTER
-  buffer_ << "\"END\":[]}";
-  const std::string& buffer_str = buffer_.str();
-  size_t length = buffer_str.size();
-  if (length > len_) {
-    buffer_c_str_.reset(new char[length + 1]);
-    len_ = length;
-  }
-  strncpy(buffer_c_str_.get(), buffer_str.c_str(), length + 1);
-  in_use_ = false;
-  return buffer_c_str_.get();
-}
-
 }  // namespace tracing
 }  // namespace internal
 }  // namespace v8
diff --git a/src/tracing/trace-event.h b/src/tracing/trace-event.h
index 25ccd80..35d2e15 100644
--- a/src/tracing/trace-event.h
+++ b/src/tracing/trace-event.h
@@ -6,12 +6,12 @@
 #define SRC_TRACING_TRACE_EVENT_H_
 
 #include <stddef.h>
+#include <memory>
 
 #include "base/trace_event/common/trace_event_common.h"
 #include "include/v8-platform.h"
 #include "src/base/atomicops.h"
 #include "src/base/macros.h"
-#include "src/counters.h"
 
 // This header file defines implementation details of how the trace macros in
 // trace_event_common.h collect and store trace events. Anything not
@@ -121,8 +121,7 @@
 //                    const uint8_t* arg_types,
 //                    const uint64_t* arg_values,
 //                    unsigned int flags)
-#define TRACE_EVENT_API_ADD_TRACE_EVENT \
-  v8::internal::tracing::TraceEventHelper::GetCurrentPlatform()->AddTraceEvent
+#define TRACE_EVENT_API_ADD_TRACE_EVENT v8::internal::tracing::AddTraceEventImpl
 
 // Set the duration field of a COMPLETE trace event.
 // void TRACE_EVENT_API_UPDATE_TRACE_EVENT_DURATION(
@@ -281,7 +280,7 @@
     uint64_t cid_;                                                         \
   };                                                                       \
   INTERNAL_TRACE_EVENT_UID(ScopedContext)                                  \
-  INTERNAL_TRACE_EVENT_UID(scoped_context)(context.raw_id());
+  INTERNAL_TRACE_EVENT_UID(scoped_context)(context);
 
 #define TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() \
   base::NoBarrier_Load(&v8::internal::tracing::kRuntimeCallStatsTracingEnabled)
@@ -289,9 +288,6 @@
 #define TRACE_EVENT_CALL_STATS_SCOPED(isolate, category_group, name) \
   INTERNAL_TRACE_EVENT_CALL_STATS_SCOPED(isolate, category_group, name)
 
-#define TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(isolate, counter_id) \
-  INTERNAL_TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(isolate, counter_id)
-
 #define INTERNAL_TRACE_EVENT_CALL_STATS_SCOPED(isolate, category_group, name)  \
   {                                                                            \
     INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(                                    \
@@ -309,13 +305,11 @@
                     name);                                                     \
   }
 
-#define INTERNAL_TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(isolate,    \
-                                                               counter_id) \
-  v8::internal::tracing::CounterScope INTERNAL_TRACE_EVENT_UID(scope)(     \
-      isolate, counter_id);
-
 namespace v8 {
 namespace internal {
+
+class Isolate;
+
 namespace tracing {
 
 // Specify these values when the corresponding argument of AddTraceEvent is not
@@ -460,6 +454,28 @@
   const char* str_;
 };
 
+static V8_INLINE uint64_t AddTraceEventImpl(
+    char phase, const uint8_t* category_group_enabled, const char* name,
+    const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
+    const char** arg_names, const uint8_t* arg_types,
+    const uint64_t* arg_values, unsigned int flags) {
+  std::unique_ptr<ConvertableToTraceFormat> arg_convertables[2];
+  if (num_args > 0 && arg_types[0] == TRACE_VALUE_TYPE_CONVERTABLE) {
+    arg_convertables[0].reset(reinterpret_cast<ConvertableToTraceFormat*>(
+        static_cast<intptr_t>(arg_values[0])));
+  }
+  if (num_args > 1 && arg_types[1] == TRACE_VALUE_TYPE_CONVERTABLE) {
+    arg_convertables[1].reset(reinterpret_cast<ConvertableToTraceFormat*>(
+        static_cast<intptr_t>(arg_values[1])));
+  }
+  DCHECK(num_args <= 2);
+  v8::Platform* platform =
+      v8::internal::tracing::TraceEventHelper::GetCurrentPlatform();
+  return platform->AddTraceEvent(phase, category_group_enabled, name, scope, id,
+                                 bind_id, num_args, arg_names, arg_types,
+                                 arg_values, arg_convertables, flags);
+}
+
 // Define SetTraceValue for each allowed type. It stores the type and
 // value in the return arguments. This allows this API to avoid declaring any
 // structures so that it is portable to third_party libraries.
@@ -500,6 +516,19 @@
 #undef INTERNAL_DECLARE_SET_TRACE_VALUE
 #undef INTERNAL_DECLARE_SET_TRACE_VALUE_INT
 
+static V8_INLINE void SetTraceValue(ConvertableToTraceFormat* convertable_value,
+                                    unsigned char* type, uint64_t* value) {
+  *type = TRACE_VALUE_TYPE_CONVERTABLE;
+  *value = static_cast<uint64_t>(reinterpret_cast<intptr_t>(convertable_value));
+}
+
+template <typename T>
+static V8_INLINE typename std::enable_if<
+    std::is_convertible<T*, ConvertableToTraceFormat*>::value>::type
+SetTraceValue(std::unique_ptr<T> ptr, unsigned char* type, uint64_t* value) {
+  SetTraceValue(ptr.release(), type, value);
+}
+
 // These AddTraceEvent template
 // function is defined here instead of in the macro, because the arg_values
 // could be temporary objects, such as std::string. In order to store
@@ -512,36 +541,38 @@
                                         uint64_t id, uint64_t bind_id,
                                         unsigned int flags) {
   return TRACE_EVENT_API_ADD_TRACE_EVENT(phase, category_group_enabled, name,
-                                         scope, id, bind_id, kZeroNumArgs, NULL,
-                                         NULL, NULL, flags);
+                                         scope, id, bind_id, kZeroNumArgs,
+                                         nullptr, nullptr, nullptr, flags);
 }
 
 template <class ARG1_TYPE>
 static V8_INLINE uint64_t AddTraceEvent(
     char phase, const uint8_t* category_group_enabled, const char* name,
     const char* scope, uint64_t id, uint64_t bind_id, unsigned int flags,
-    const char* arg1_name, const ARG1_TYPE& arg1_val) {
+    const char* arg1_name, ARG1_TYPE&& arg1_val) {
   const int num_args = 1;
-  uint8_t arg_types[1];
-  uint64_t arg_values[1];
-  SetTraceValue(arg1_val, &arg_types[0], &arg_values[0]);
+  uint8_t arg_type;
+  uint64_t arg_value;
+  SetTraceValue(std::forward<ARG1_TYPE>(arg1_val), &arg_type, &arg_value);
   return TRACE_EVENT_API_ADD_TRACE_EVENT(
       phase, category_group_enabled, name, scope, id, bind_id, num_args,
-      &arg1_name, arg_types, arg_values, flags);
+      &arg1_name, &arg_type, &arg_value, flags);
 }
 
 template <class ARG1_TYPE, class ARG2_TYPE>
 static V8_INLINE uint64_t AddTraceEvent(
     char phase, const uint8_t* category_group_enabled, const char* name,
     const char* scope, uint64_t id, uint64_t bind_id, unsigned int flags,
-    const char* arg1_name, const ARG1_TYPE& arg1_val, const char* arg2_name,
-    const ARG2_TYPE& arg2_val) {
+    const char* arg1_name, ARG1_TYPE&& arg1_val, const char* arg2_name,
+    ARG2_TYPE&& arg2_val) {
   const int num_args = 2;
   const char* arg_names[2] = {arg1_name, arg2_name};
   unsigned char arg_types[2];
   uint64_t arg_values[2];
-  SetTraceValue(arg1_val, &arg_types[0], &arg_values[0]);
-  SetTraceValue(arg2_val, &arg_types[1], &arg_values[1]);
+  SetTraceValue(std::forward<ARG1_TYPE>(arg1_val), &arg_types[0],
+                &arg_values[0]);
+  SetTraceValue(std::forward<ARG2_TYPE>(arg2_val), &arg_types[1],
+                &arg_values[1]);
   return TRACE_EVENT_API_ADD_TRACE_EVENT(
       phase, category_group_enabled, name, scope, id, bind_id, num_args,
       arg_names, arg_types, arg_values, flags);
@@ -634,136 +665,21 @@
     }
   }
 
-  void Initialize(Isolate* isolate, const uint8_t* category_group_enabled,
-                  const char* name);
+  void Initialize(v8::internal::Isolate* isolate,
+                  const uint8_t* category_group_enabled, const char* name);
 
  private:
   void AddEndTraceEvent();
   struct Data {
     const uint8_t* category_group_enabled;
     const char* name;
-    Isolate* isolate;
+    v8::internal::Isolate* isolate;
   };
   bool has_parent_scope_;
   Data* p_data_;
   Data data_;
 };
 
-// TraceEventCallStatsTimer is used to keep track of the stack of currently
-// active timers used for properly measuring the own time of a
-// RuntimeCallCounter.
-class TraceEventCallStatsTimer {
- public:
-  TraceEventCallStatsTimer() : counter_(nullptr), parent_(nullptr) {}
-  RuntimeCallCounter* counter() { return counter_; }
-  base::ElapsedTimer timer() { return timer_; }
-
- private:
-  friend class TraceEventStatsTable;
-
-  V8_INLINE void Start(RuntimeCallCounter* counter,
-                       TraceEventCallStatsTimer* parent) {
-    counter_ = counter;
-    parent_ = parent;
-    timer_.Start();
-  }
-
-  V8_INLINE TraceEventCallStatsTimer* Stop() {
-    base::TimeDelta delta = timer_.Elapsed();
-    timer_.Stop();
-    counter_->count++;
-    counter_->time += delta;
-    if (parent_ != nullptr) {
-      // Adjust parent timer so that it does not include sub timer's time.
-      parent_->counter_->time -= delta;
-    }
-    return parent_;
-  }
-
-  RuntimeCallCounter* counter_;
-  TraceEventCallStatsTimer* parent_;
-  base::ElapsedTimer timer_;
-};
-
-class TraceEventStatsTable {
- public:
-  typedef RuntimeCallCounter TraceEventStatsTable::*CounterId;
-
-#define CALL_RUNTIME_COUNTER(name) \
-  RuntimeCallCounter name = RuntimeCallCounter(#name);
-  FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER)
-#undef CALL_RUNTIME_COUNTER
-#define CALL_RUNTIME_COUNTER(name, nargs, ressize) \
-  RuntimeCallCounter Runtime_##name = RuntimeCallCounter(#name);
-  FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER)
-#undef CALL_RUNTIME_COUNTER
-#define CALL_BUILTIN_COUNTER(name) \
-  RuntimeCallCounter Builtin_##name = RuntimeCallCounter(#name);
-  BUILTIN_LIST_C(CALL_BUILTIN_COUNTER)
-#undef CALL_BUILTIN_COUNTER
-#define CALL_BUILTIN_COUNTER(name) \
-  RuntimeCallCounter API_##name = RuntimeCallCounter("API_" #name);
-  FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER)
-#undef CALL_BUILTIN_COUNTER
-#define CALL_BUILTIN_COUNTER(name) \
-  RuntimeCallCounter Handler_##name = RuntimeCallCounter(#name);
-  FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER)
-#undef CALL_BUILTIN_COUNTER
-
-  // Starting measuring the time for a function. This will establish the
-  // connection to the parent counter for properly calculating the own times.
-  static void Enter(Isolate* isolate, TraceEventCallStatsTimer* timer,
-                    CounterId counter_id);
-
-  // Leave a scope for a measured runtime function. This will properly add
-  // the time delta to the current_counter and subtract the delta from its
-  // parent.
-  static void Leave(Isolate* isolate, TraceEventCallStatsTimer* timer);
-
-  void Reset();
-  const char* Dump();
-
-  TraceEventStatsTable() {
-    Reset();
-    in_use_ = false;
-  }
-
-  TraceEventCallStatsTimer* current_timer() { return current_timer_; }
-  bool InUse() { return in_use_; }
-
- private:
-  std::stringstream buffer_;
-  std::unique_ptr<char[]> buffer_c_str_;
-  size_t len_ = 0;
-  // Counter to track recursive time events.
-  TraceEventCallStatsTimer* current_timer_ = nullptr;
-  bool in_use_;
-};
-
-class CounterScope {
- public:
-  CounterScope(Isolate* isolate, TraceEventStatsTable::CounterId counter_id)
-      : isolate_(nullptr) {
-    if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) {
-      isolate_ = isolate;
-      TraceEventStatsTable::Enter(isolate_, &timer_, counter_id);
-    }
-  }
-  ~CounterScope() {
-    // A non-nullptr isolate_ means the stats table already entered the scope
-    // and started the timer, we need to leave the scope and reset the timer
-    // even when we stop tracing, otherwise we have the risk to have a dangling
-    // pointer.
-    if (V8_UNLIKELY(isolate_ != nullptr)) {
-      TraceEventStatsTable::Leave(isolate_, &timer_);
-    }
-  }
-
- private:
-  Isolate* isolate_;
-  TraceEventCallStatsTimer timer_;
-};
-
 }  // namespace tracing
 }  // namespace internal
 }  // namespace v8
diff --git a/src/type-cache.cc b/src/type-cache.cc
deleted file mode 100644
index d05aaa1..0000000
--- a/src/type-cache.cc
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2015 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/type-cache.h"
-
-#include "src/base/lazy-instance.h"
-
-namespace v8 {
-namespace internal {
-
-namespace {
-
-base::LazyInstance<TypeCache>::type kCache = LAZY_INSTANCE_INITIALIZER;
-
-}  // namespace
-
-
-// static
-TypeCache const& TypeCache::Get() { return kCache.Get(); }
-
-}  // namespace internal
-}  // namespace v8
diff --git a/src/type-cache.h b/src/type-cache.h
deleted file mode 100644
index f83f3bd..0000000
--- a/src/type-cache.h
+++ /dev/null
@@ -1,174 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_TYPE_CACHE_H_
-#define V8_TYPE_CACHE_H_
-
-#include "src/types.h"
-
-namespace v8 {
-namespace internal {
-
-class TypeCache final {
- private:
-  // This has to be first for the initialization magic to work.
-  base::AccountingAllocator allocator;
-  Zone zone_;
-
- public:
-  static TypeCache const& Get();
-
-  TypeCache() : zone_(&allocator) {}
-
-  Type* const kInt8 =
-      CreateNative(CreateRange<int8_t>(), Type::UntaggedIntegral8());
-  Type* const kUint8 =
-      CreateNative(CreateRange<uint8_t>(), Type::UntaggedIntegral8());
-  Type* const kUint8Clamped = kUint8;
-  Type* const kInt16 =
-      CreateNative(CreateRange<int16_t>(), Type::UntaggedIntegral16());
-  Type* const kUint16 =
-      CreateNative(CreateRange<uint16_t>(), Type::UntaggedIntegral16());
-  Type* const kInt32 =
-      CreateNative(Type::Signed32(), Type::UntaggedIntegral32());
-  Type* const kUint32 =
-      CreateNative(Type::Unsigned32(), Type::UntaggedIntegral32());
-  Type* const kFloat32 = CreateNative(Type::Number(), Type::UntaggedFloat32());
-  Type* const kFloat64 = CreateNative(Type::Number(), Type::UntaggedFloat64());
-
-  Type* const kSmi = CreateNative(Type::SignedSmall(), Type::TaggedSigned());
-  Type* const kHoleySmi = Type::Union(kSmi, Type::Hole(), zone());
-  Type* const kHeapNumber = CreateNative(Type::Number(), Type::TaggedPointer());
-
-  Type* const kSingletonZero = CreateRange(0.0, 0.0);
-  Type* const kSingletonOne = CreateRange(1.0, 1.0);
-  Type* const kSingletonTen = CreateRange(10.0, 10.0);
-  Type* const kSingletonMinusOne = CreateRange(-1.0, -1.0);
-  Type* const kZeroOrUndefined =
-      Type::Union(kSingletonZero, Type::Undefined(), zone());
-  Type* const kTenOrUndefined =
-      Type::Union(kSingletonTen, Type::Undefined(), zone());
-  Type* const kMinusOneOrZero = CreateRange(-1.0, 0.0);
-  Type* const kMinusOneToOne = CreateRange(-1.0, 1.0);
-  Type* const kZeroOrOne = CreateRange(0.0, 1.0);
-  Type* const kZeroOrOneOrNaN = Type::Union(kZeroOrOne, Type::NaN(), zone());
-  Type* const kZeroToThirtyOne = CreateRange(0.0, 31.0);
-  Type* const kZeroToThirtyTwo = CreateRange(0.0, 32.0);
-  Type* const kZeroish =
-      Type::Union(kSingletonZero, Type::MinusZeroOrNaN(), zone());
-  Type* const kInteger = CreateRange(-V8_INFINITY, V8_INFINITY);
-  Type* const kIntegerOrMinusZero =
-      Type::Union(kInteger, Type::MinusZero(), zone());
-  Type* const kIntegerOrMinusZeroOrNaN =
-      Type::Union(kIntegerOrMinusZero, Type::NaN(), zone());
-  Type* const kPositiveInteger = CreateRange(0.0, V8_INFINITY);
-  Type* const kPositiveIntegerOrMinusZero =
-      Type::Union(kPositiveInteger, Type::MinusZero(), zone());
-  Type* const kPositiveIntegerOrMinusZeroOrNaN =
-      Type::Union(kPositiveIntegerOrMinusZero, Type::NaN(), zone());
-
-  Type* const kAdditiveSafeInteger =
-      CreateRange(-4503599627370496.0, 4503599627370496.0);
-  Type* const kSafeInteger = CreateRange(-kMaxSafeInteger, kMaxSafeInteger);
-  Type* const kAdditiveSafeIntegerOrMinusZero =
-      Type::Union(kAdditiveSafeInteger, Type::MinusZero(), zone());
-  Type* const kSafeIntegerOrMinusZero =
-      Type::Union(kSafeInteger, Type::MinusZero(), zone());
-  Type* const kPositiveSafeInteger = CreateRange(0.0, kMaxSafeInteger);
-
-  Type* const kUntaggedUndefined =
-      Type::Intersect(Type::Undefined(), Type::Untagged(), zone());
-
-  // Asm.js related types.
-  Type* const kAsmSigned = kInt32;
-  Type* const kAsmUnsigned = kUint32;
-  Type* const kAsmInt = Type::Union(kAsmSigned, kAsmUnsigned, zone());
-  Type* const kAsmFixnum = Type::Intersect(kAsmSigned, kAsmUnsigned, zone());
-  Type* const kAsmFloat = kFloat32;
-  Type* const kAsmDouble = kFloat64;
-  Type* const kAsmFloatQ = Type::Union(kAsmFloat, kUntaggedUndefined, zone());
-  Type* const kAsmDoubleQ = Type::Union(kAsmDouble, kUntaggedUndefined, zone());
-  // Not part of the Asm.js type hierarchy, but represents a part of what
-  // intish encompasses.
-  Type* const kAsmIntQ = Type::Union(kAsmInt, kUntaggedUndefined, zone());
-  Type* const kAsmFloatDoubleQ = Type::Union(kAsmFloatQ, kAsmDoubleQ, zone());
-  // Asm.js size unions.
-  Type* const kAsmSize8 = Type::Union(kInt8, kUint8, zone());
-  Type* const kAsmSize16 = Type::Union(kInt16, kUint16, zone());
-  Type* const kAsmSize32 =
-      Type::Union(Type::Union(kInt32, kUint32, zone()), kAsmFloat, zone());
-  Type* const kAsmSize64 = kFloat64;
-  // Asm.js other types.
-  Type* const kAsmComparable = Type::Union(
-      kAsmSigned,
-      Type::Union(kAsmUnsigned, Type::Union(kAsmDouble, kAsmFloat, zone()),
-                  zone()),
-      zone());
-  Type* const kAsmIntArrayElement =
-      Type::Union(Type::Union(kInt8, kUint8, zone()),
-                  Type::Union(Type::Union(kInt16, kUint16, zone()),
-                              Type::Union(kInt32, kUint32, zone()), zone()),
-                  zone());
-
-  // The FixedArray::length property always containts a smi in the range
-  // [0, FixedArray::kMaxLength].
-  Type* const kFixedArrayLengthType = CreateNative(
-      CreateRange(0.0, FixedArray::kMaxLength), Type::TaggedSigned());
-
-  // The FixedDoubleArray::length property always containts a smi in the range
-  // [0, FixedDoubleArray::kMaxLength].
-  Type* const kFixedDoubleArrayLengthType = CreateNative(
-      CreateRange(0.0, FixedDoubleArray::kMaxLength), Type::TaggedSigned());
-
-  // The JSArray::length property always contains a tagged number in the range
-  // [0, kMaxUInt32].
-  Type* const kJSArrayLengthType =
-      CreateNative(Type::Unsigned32(), Type::Tagged());
-
-  // The JSTyped::length property always contains a tagged number in the range
-  // [0, kMaxSmiValue].
-  Type* const kJSTypedArrayLengthType =
-      CreateNative(Type::UnsignedSmall(), Type::TaggedSigned());
-
-  // The String::length property always contains a smi in the range
-  // [0, String::kMaxLength].
-  Type* const kStringLengthType =
-      CreateNative(CreateRange(0.0, String::kMaxLength), Type::TaggedSigned());
-
-#define TYPED_ARRAY(TypeName, type_name, TYPE_NAME, ctype, size) \
-  Type* const k##TypeName##Array = CreateArray(k##TypeName);
-  TYPED_ARRAYS(TYPED_ARRAY)
-#undef TYPED_ARRAY
-
- private:
-  Type* CreateArray(Type* element) { return Type::Array(element, zone()); }
-
-  Type* CreateArrayFunction(Type* array) {
-    Type* arg1 = Type::Union(Type::Unsigned32(), Type::Object(), zone());
-    Type* arg2 = Type::Union(Type::Unsigned32(), Type::Undefined(), zone());
-    Type* arg3 = arg2;
-    return Type::Function(array, arg1, arg2, arg3, zone());
-  }
-
-  Type* CreateNative(Type* semantic, Type* representation) {
-    return Type::Intersect(semantic, representation, zone());
-  }
-
-  template <typename T>
-  Type* CreateRange() {
-    return CreateRange(std::numeric_limits<T>::min(),
-                       std::numeric_limits<T>::max());
-  }
-
-  Type* CreateRange(double min, double max) {
-    return Type::Range(min, max, zone());
-  }
-
-  Zone* zone() { return &zone_; }
-};
-
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_TYPE_CACHE_H_
diff --git a/src/type-feedback-vector-inl.h b/src/type-feedback-vector-inl.h
index 771021f..f70f018 100644
--- a/src/type-feedback-vector-inl.h
+++ b/src/type-feedback-vector-inl.h
@@ -5,6 +5,7 @@
 #ifndef V8_TYPE_FEEDBACK_VECTOR_INL_H_
 #define V8_TYPE_FEEDBACK_VECTOR_INL_H_
 
+#include "src/globals.h"
 #include "src/type-feedback-vector.h"
 
 namespace v8 {
@@ -52,7 +53,13 @@
 int TypeFeedbackMetadata::GetSlotSize(FeedbackVectorSlotKind kind) {
   DCHECK_NE(FeedbackVectorSlotKind::INVALID, kind);
   DCHECK_NE(FeedbackVectorSlotKind::KINDS_NUMBER, kind);
-  return kind == FeedbackVectorSlotKind::GENERAL ? 1 : 2;
+  if (kind == FeedbackVectorSlotKind::GENERAL ||
+      kind == FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC ||
+      kind == FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC) {
+    return 1;
+  }
+
+  return 2;
 }
 
 bool TypeFeedbackMetadata::SlotRequiresName(FeedbackVectorSlotKind kind) {
@@ -65,6 +72,8 @@
     case FeedbackVectorSlotKind::KEYED_LOAD_IC:
     case FeedbackVectorSlotKind::STORE_IC:
     case FeedbackVectorSlotKind::KEYED_STORE_IC:
+    case FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC:
+    case FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC:
     case FeedbackVectorSlotKind::GENERAL:
     case FeedbackVectorSlotKind::INVALID:
       return false;
@@ -77,22 +86,20 @@
 }
 
 bool TypeFeedbackVector::is_empty() const {
-  if (length() == 0) return true;
-  DCHECK(length() > kReservedIndexCount);
-  return false;
+  return length() == kReservedIndexCount;
 }
 
-
 int TypeFeedbackVector::slot_count() const {
-  if (length() == 0) return 0;
-  DCHECK(length() > kReservedIndexCount);
   return length() - kReservedIndexCount;
 }
 
 
 TypeFeedbackMetadata* TypeFeedbackVector::metadata() const {
-  return is_empty() ? TypeFeedbackMetadata::cast(GetHeap()->empty_fixed_array())
-                    : TypeFeedbackMetadata::cast(get(kMetadataIndex));
+  return TypeFeedbackMetadata::cast(get(kMetadataIndex));
+}
+
+int TypeFeedbackVector::invocation_count() const {
+  return Smi::cast(get(kInvocationCountIndex))->value();
 }
 
 // Conversion from an integer index to either a slot or an ic slot.
@@ -113,23 +120,93 @@
   set(GetIndex(slot), value, mode);
 }
 
+// Helper function to transform the feedback to BinaryOperationHint.
+BinaryOperationHint BinaryOperationHintFromFeedback(int type_feedback) {
+  switch (type_feedback) {
+    case BinaryOperationFeedback::kNone:
+      return BinaryOperationHint::kNone;
+    case BinaryOperationFeedback::kSignedSmall:
+      return BinaryOperationHint::kSignedSmall;
+    case BinaryOperationFeedback::kNumber:
+      return BinaryOperationHint::kNumberOrOddball;
+    case BinaryOperationFeedback::kString:
+      return BinaryOperationHint::kString;
+    case BinaryOperationFeedback::kAny:
+    default:
+      return BinaryOperationHint::kAny;
+  }
+  UNREACHABLE();
+  return BinaryOperationHint::kNone;
+}
 
-void TypeFeedbackVector::ComputeCounts(int* with_type_info, int* generic) {
+// Helper function to transform the feedback to CompareOperationHint.
+CompareOperationHint CompareOperationHintFromFeedback(int type_feedback) {
+  switch (type_feedback) {
+    case CompareOperationFeedback::kNone:
+      return CompareOperationHint::kNone;
+    case CompareOperationFeedback::kSignedSmall:
+      return CompareOperationHint::kSignedSmall;
+    case CompareOperationFeedback::kNumber:
+      return CompareOperationHint::kNumber;
+    default:
+      return CompareOperationHint::kAny;
+  }
+  UNREACHABLE();
+  return CompareOperationHint::kNone;
+}
+
+void TypeFeedbackVector::ComputeCounts(int* with_type_info, int* generic,
+                                       int* vector_ic_count,
+                                       bool code_is_interpreted) {
   Object* uninitialized_sentinel =
       TypeFeedbackVector::RawUninitializedSentinel(GetIsolate());
   Object* megamorphic_sentinel =
       *TypeFeedbackVector::MegamorphicSentinel(GetIsolate());
   int with = 0;
   int gen = 0;
+  int total = 0;
   TypeFeedbackMetadataIterator iter(metadata());
   while (iter.HasNext()) {
     FeedbackVectorSlot slot = iter.Next();
     FeedbackVectorSlotKind kind = iter.kind();
 
     Object* obj = Get(slot);
-    if (obj != uninitialized_sentinel &&
-        kind != FeedbackVectorSlotKind::GENERAL) {
-      if (obj->IsWeakCell() || obj->IsFixedArray() || obj->IsString()) {
+    if (kind == FeedbackVectorSlotKind::GENERAL) {
+      continue;
+    }
+    total++;
+
+    if (obj != uninitialized_sentinel) {
+      if (kind == FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC ||
+          kind == FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC) {
+        // If we are not running interpreted code, we need to ignore
+        // the special ic slots for binaryop/compare used by the
+        // interpreter.
+        // TODO(mvstanton): Remove code_is_interpreted when full code
+        // is retired from service.
+        if (!code_is_interpreted) continue;
+
+        DCHECK(obj->IsSmi());
+        int op_feedback = static_cast<int>(Smi::cast(obj)->value());
+        if (kind == FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC) {
+          CompareOperationHint hint =
+              CompareOperationHintFromFeedback(op_feedback);
+          if (hint == CompareOperationHint::kAny) {
+            gen++;
+          } else if (hint != CompareOperationHint::kNone) {
+            with++;
+          }
+        } else {
+          DCHECK(kind == FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC);
+          BinaryOperationHint hint =
+              BinaryOperationHintFromFeedback(op_feedback);
+          if (hint == BinaryOperationHint::kAny) {
+            gen++;
+          } else if (hint != BinaryOperationHint::kNone) {
+            with++;
+          }
+        }
+      } else if (obj->IsWeakCell() || obj->IsFixedArray() || obj->IsString()) {
         with++;
       } else if (obj == megamorphic_sentinel) {
         gen++;
@@ -139,6 +216,7 @@
 
   *with_type_info = with;
   *generic = gen;
+  *vector_ic_count = total;
 }
 
 Handle<Symbol> TypeFeedbackVector::UninitializedSentinel(Isolate* isolate) {
diff --git a/src/type-feedback-vector.cc b/src/type-feedback-vector.cc
index 61f5e8b..30bc2d4 100644
--- a/src/type-feedback-vector.cc
+++ b/src/type-feedback-vector.cc
@@ -102,9 +102,7 @@
 
   Handle<UnseededNumberDictionary> names;
   if (name_count) {
-    names = UnseededNumberDictionary::New(
-        isolate, base::bits::RoundUpToPowerOfTwo32(name_count), TENURED,
-        USE_CUSTOM_MINIMUM_CAPACITY);
+    names = UnseededNumberDictionary::New(isolate, name_count, TENURED);
   }
 
   int name_index = 0;
@@ -114,7 +112,10 @@
     if (SlotRequiresName(kind)) {
       Handle<String> name = spec->GetName(name_index);
       DCHECK(!name.is_null());
-      names = UnseededNumberDictionary::AtNumberPut(names, i, name);
+      Handle<UnseededNumberDictionary> new_names =
+          UnseededNumberDictionary::AtNumberPut(names, i, name);
+      DCHECK_EQ(*new_names, *names);
+      names = new_names;
       name_index++;
     }
   }
@@ -202,6 +203,10 @@
       return "STORE_IC";
     case FeedbackVectorSlotKind::KEYED_STORE_IC:
       return "KEYED_STORE_IC";
+    case FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC:
+      return "INTERPRETER_BINARYOP_IC";
+    case FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC:
+      return "INTERPRETER_COMPARE_IC";
     case FeedbackVectorSlotKind::GENERAL:
       return "STUB";
     case FeedbackVectorSlotKind::KINDS_NUMBER:
@@ -230,11 +235,13 @@
   const int slot_count = metadata->slot_count();
   const int length = slot_count + kReservedIndexCount;
   if (length == kReservedIndexCount) {
-    return Handle<TypeFeedbackVector>::cast(factory->empty_fixed_array());
+    return Handle<TypeFeedbackVector>::cast(
+        factory->empty_type_feedback_vector());
   }
 
   Handle<FixedArray> array = factory->NewFixedArray(length, TENURED);
   array->set(kMetadataIndex, *metadata);
+  array->set(kInvocationCountIndex, Smi::FromInt(0));
 
   DisallowHeapAllocation no_gc;
 
@@ -250,12 +257,18 @@
     Object* value;
     if (kind == FeedbackVectorSlotKind::LOAD_GLOBAL_IC) {
       value = *factory->empty_weak_cell();
+    } else if (kind == FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC ||
+               kind == FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC) {
+      value = Smi::FromInt(0);
     } else {
       value = *uninitialized_sentinel;
     }
     array->set(index, value, SKIP_WRITE_BARRIER);
+
+    value = kind == FeedbackVectorSlotKind::CALL_IC ? Smi::FromInt(0)
+                                                    : *uninitialized_sentinel;
     for (int j = 1; j < entry_size; j++) {
-      array->set(index + j, *uninitialized_sentinel, SKIP_WRITE_BARRIER);
+      array->set(index + j, value, SKIP_WRITE_BARRIER);
     }
     i += entry_size;
   }
@@ -334,6 +347,13 @@
           nexus.Clear(shared->code());
           break;
         }
+        case FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC:
+        case FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC: {
+          DCHECK(Get(slot)->IsSmi());
+          // don't clear these smi slots.
+          // Set(slot, Smi::FromInt(0));
+          break;
+        }
         case FeedbackVectorSlotKind::GENERAL: {
           if (obj->IsHeapObject()) {
             InstanceType instance_type =
@@ -620,16 +640,25 @@
 
 int CallICNexus::ExtractCallCount() {
   Object* call_count = GetFeedbackExtra();
-  if (call_count->IsSmi()) {
-    int value = Smi::cast(call_count)->value();
-    return value;
-  }
-  return -1;
+  CHECK(call_count->IsSmi());
+  int value = Smi::cast(call_count)->value();
+  return value;
 }
 
+float CallICNexus::ComputeCallFrequency() {
+  double const invocation_count = vector()->invocation_count();
+  double const call_count = ExtractCallCount();
+  return static_cast<float>(call_count / invocation_count);
+}
 
 void CallICNexus::Clear(Code* host) { CallIC::Clear(GetIsolate(), host, this); }
 
+void CallICNexus::ConfigureUninitialized() {
+  Isolate* isolate = GetIsolate();
+  SetFeedback(*TypeFeedbackVector::UninitializedSentinel(isolate),
+              SKIP_WRITE_BARRIER);
+  SetFeedbackExtra(Smi::FromInt(0), SKIP_WRITE_BARRIER);
+}
 
 void CallICNexus::ConfigureMonomorphicArray() {
   Object* feedback = GetFeedback();
@@ -650,10 +679,13 @@
 
 
 void CallICNexus::ConfigureMegamorphic() {
-  FeedbackNexus::ConfigureMegamorphic();
+  SetFeedback(*TypeFeedbackVector::MegamorphicSentinel(GetIsolate()),
+              SKIP_WRITE_BARRIER);
+  Smi* count = Smi::cast(GetFeedbackExtra());
+  int new_count = count->value() + 1;
+  SetFeedbackExtra(Smi::FromInt(new_count), SKIP_WRITE_BARRIER);
 }
 
-
 void CallICNexus::ConfigureMegamorphic(int call_count) {
   SetFeedback(*TypeFeedbackVector::MegamorphicSentinel(GetIsolate()),
               SKIP_WRITE_BARRIER);
@@ -1020,5 +1052,38 @@
   }
   return IsPropertyNameFeedback(feedback) ? PROPERTY : ELEMENT;
 }
+
+InlineCacheState BinaryOpICNexus::StateFromFeedback() const {
+  BinaryOperationHint hint = GetBinaryOperationFeedback();
+  if (hint == BinaryOperationHint::kNone) {
+    return UNINITIALIZED;
+  } else if (hint == BinaryOperationHint::kAny) {
+    return GENERIC;
+  }
+
+  return MONOMORPHIC;
+}
+
+InlineCacheState CompareICNexus::StateFromFeedback() const {
+  CompareOperationHint hint = GetCompareOperationFeedback();
+  if (hint == CompareOperationHint::kNone) {
+    return UNINITIALIZED;
+  } else if (hint == CompareOperationHint::kAny) {
+    return GENERIC;
+  }
+
+  return MONOMORPHIC;
+}
+
+BinaryOperationHint BinaryOpICNexus::GetBinaryOperationFeedback() const {
+  int feedback = Smi::cast(GetFeedback())->value();
+  return BinaryOperationHintFromFeedback(feedback);
+}
+
+CompareOperationHint CompareICNexus::GetCompareOperationFeedback() const {
+  int feedback = Smi::cast(GetFeedback())->value();
+  return CompareOperationHintFromFeedback(feedback);
+}
+
 }  // namespace internal
 }  // namespace v8
diff --git a/src/type-feedback-vector.h b/src/type-feedback-vector.h
index 5355ee7..af69499 100644
--- a/src/type-feedback-vector.h
+++ b/src/type-feedback-vector.h
@@ -10,7 +10,8 @@
 #include "src/base/logging.h"
 #include "src/elements-kind.h"
 #include "src/objects.h"
-#include "src/zone-containers.h"
+#include "src/type-hints.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -27,6 +28,8 @@
   KEYED_LOAD_IC,
   STORE_IC,
   KEYED_STORE_IC,
+  INTERPRETER_BINARYOP_IC,
+  INTERPRETER_COMPARE_IC,
 
   // This is a general purpose slot that occupies one feedback vector element.
   GENERAL,
@@ -67,6 +70,14 @@
     return AddSlot(FeedbackVectorSlotKind::KEYED_STORE_IC);
   }
 
+  FeedbackVectorSlot AddInterpreterBinaryOpICSlot() {
+    return AddSlot(FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC);
+  }
+
+  FeedbackVectorSlot AddInterpreterCompareICSlot() {
+    return AddSlot(FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC);
+  }
+
   FeedbackVectorSlot AddGeneralSlot() {
     return AddSlot(FeedbackVectorSlotKind::GENERAL);
   }
@@ -207,7 +218,7 @@
   static const char* Kind2String(FeedbackVectorSlotKind kind);
 
  private:
-  static const int kFeedbackVectorSlotKindBits = 4;
+  static const int kFeedbackVectorSlotKindBits = 5;
   STATIC_ASSERT(static_cast<int>(FeedbackVectorSlotKind::KINDS_NUMBER) <
                 (1 << kFeedbackVectorSlotKindBits));
 
@@ -222,11 +233,10 @@
 
 // The shape of the TypeFeedbackVector is an array with:
 // 0: feedback metadata
-// 1: ics_with_types
-// 2: ics_with_generic_info
-// 3: feedback slot #0
+// 1: invocation count
+// 2: feedback slot #0
 // ...
-// 3 + slot_count - 1: feedback slot #(slot_count-1)
+// 2 + slot_count - 1: feedback slot #(slot_count-1)
 //
 class TypeFeedbackVector : public FixedArray {
  public:
@@ -234,9 +244,11 @@
   static inline TypeFeedbackVector* cast(Object* obj);
 
   static const int kMetadataIndex = 0;
-  static const int kReservedIndexCount = 1;
+  static const int kInvocationCountIndex = 1;
+  static const int kReservedIndexCount = 2;
 
-  inline void ComputeCounts(int* with_type_info, int* generic);
+  inline void ComputeCounts(int* with_type_info, int* generic,
+                            int* vector_ic_count, bool code_is_interpreted);
 
   inline bool is_empty() const;
 
@@ -244,6 +256,7 @@
   inline int slot_count() const;
 
   inline TypeFeedbackMetadata* metadata() const;
+  inline int invocation_count() const;
 
   // Conversion from a slot to an integer index to the underlying array.
   static int GetIndex(FeedbackVectorSlot slot) {
@@ -461,6 +474,7 @@
 
   void Clear(Code* host);
 
+  void ConfigureUninitialized() override;
   void ConfigureMonomorphicArray();
   void ConfigureMonomorphic(Handle<JSFunction> function);
   void ConfigureMegamorphic() final;
@@ -481,6 +495,10 @@
   }
 
   int ExtractCallCount();
+
+  // Compute the call frequency based on the call count and the invocation
+  // count (taken from the type feedback vector).
+  float ComputeCallFrequency();
 };
 
 
@@ -548,6 +566,10 @@
       : FeedbackNexus(vector, slot) {
     DCHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
   }
+  explicit KeyedLoadICNexus(Isolate* isolate)
+      : FeedbackNexus(
+            TypeFeedbackVector::DummyVector(isolate),
+            FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot)) {}
   KeyedLoadICNexus(TypeFeedbackVector* vector, FeedbackVectorSlot slot)
       : FeedbackNexus(vector, slot) {
     DCHECK_EQ(FeedbackVectorSlotKind::KEYED_LOAD_IC, vector->GetKind(slot));
@@ -630,6 +652,72 @@
   InlineCacheState StateFromFeedback() const override;
   Name* FindFirstName() const override;
 };
+
+class BinaryOpICNexus final : public FeedbackNexus {
+ public:
+  BinaryOpICNexus(Handle<TypeFeedbackVector> vector, FeedbackVectorSlot slot)
+      : FeedbackNexus(vector, slot) {
+    DCHECK_EQ(FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC,
+              vector->GetKind(slot));
+  }
+  BinaryOpICNexus(TypeFeedbackVector* vector, FeedbackVectorSlot slot)
+      : FeedbackNexus(vector, slot) {
+    DCHECK_EQ(FeedbackVectorSlotKind::INTERPRETER_BINARYOP_IC,
+              vector->GetKind(slot));
+  }
+
+  void Clear(Code* host);
+
+  InlineCacheState StateFromFeedback() const final;
+  BinaryOperationHint GetBinaryOperationFeedback() const;
+
+  int ExtractMaps(MapHandleList* maps) const final {
+    // BinaryOpICs don't record map feedback.
+    return 0;
+  }
+  MaybeHandle<Object> FindHandlerForMap(Handle<Map> map) const final {
+    return MaybeHandle<Code>();
+  }
+  bool FindHandlers(List<Handle<Object>>* code_list,
+                    int length = -1) const final {
+    return length == 0;
+  }
+};
+
+class CompareICNexus final : public FeedbackNexus {
+ public:
+  CompareICNexus(Handle<TypeFeedbackVector> vector, FeedbackVectorSlot slot)
+      : FeedbackNexus(vector, slot) {
+    DCHECK_EQ(FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC,
+              vector->GetKind(slot));
+  }
+  CompareICNexus(TypeFeedbackVector* vector, FeedbackVectorSlot slot)
+      : FeedbackNexus(vector, slot) {
+    DCHECK_EQ(FeedbackVectorSlotKind::INTERPRETER_COMPARE_IC,
+              vector->GetKind(slot));
+  }
+
+  void Clear(Code* host);
+
+  InlineCacheState StateFromFeedback() const final;
+  CompareOperationHint GetCompareOperationFeedback() const;
+
+  int ExtractMaps(MapHandleList* maps) const final {
+    // BinaryOpICs don't record map feedback.
+    return 0;
+  }
+  MaybeHandle<Object> FindHandlerForMap(Handle<Map> map) const final {
+    return MaybeHandle<Code>();
+  }
+  bool FindHandlers(List<Handle<Object>>* code_list,
+                    int length = -1) const final {
+    return length == 0;
+  }
+};
+
+inline BinaryOperationHint BinaryOperationHintFromFeedback(int type_feedback);
+inline CompareOperationHint CompareOperationHintFromFeedback(int type_feedback);
+
 }  // namespace internal
 }  // namespace v8
 
diff --git a/src/type-hints.cc b/src/type-hints.cc
new file mode 100644
index 0000000..ff00eef
--- /dev/null
+++ b/src/type-hints.cc
@@ -0,0 +1,91 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/type-hints.h"
+
+namespace v8 {
+namespace internal {
+
+std::ostream& operator<<(std::ostream& os, BinaryOperationHint hint) {
+  switch (hint) {
+    case BinaryOperationHint::kNone:
+      return os << "None";
+    case BinaryOperationHint::kSignedSmall:
+      return os << "SignedSmall";
+    case BinaryOperationHint::kSigned32:
+      return os << "Signed32";
+    case BinaryOperationHint::kNumberOrOddball:
+      return os << "NumberOrOddball";
+    case BinaryOperationHint::kString:
+      return os << "String";
+    case BinaryOperationHint::kAny:
+      return os << "Any";
+  }
+  UNREACHABLE();
+  return os;
+}
+
+std::ostream& operator<<(std::ostream& os, CompareOperationHint hint) {
+  switch (hint) {
+    case CompareOperationHint::kNone:
+      return os << "None";
+    case CompareOperationHint::kSignedSmall:
+      return os << "SignedSmall";
+    case CompareOperationHint::kNumber:
+      return os << "Number";
+    case CompareOperationHint::kNumberOrOddball:
+      return os << "NumberOrOddball";
+    case CompareOperationHint::kAny:
+      return os << "Any";
+  }
+  UNREACHABLE();
+  return os;
+}
+
+std::ostream& operator<<(std::ostream& os, ToBooleanHint hint) {
+  switch (hint) {
+    case ToBooleanHint::kNone:
+      return os << "None";
+    case ToBooleanHint::kUndefined:
+      return os << "Undefined";
+    case ToBooleanHint::kBoolean:
+      return os << "Boolean";
+    case ToBooleanHint::kNull:
+      return os << "Null";
+    case ToBooleanHint::kSmallInteger:
+      return os << "SmallInteger";
+    case ToBooleanHint::kReceiver:
+      return os << "Receiver";
+    case ToBooleanHint::kString:
+      return os << "String";
+    case ToBooleanHint::kSymbol:
+      return os << "Symbol";
+    case ToBooleanHint::kHeapNumber:
+      return os << "HeapNumber";
+    case ToBooleanHint::kSimdValue:
+      return os << "SimdValue";
+    case ToBooleanHint::kAny:
+      return os << "Any";
+  }
+  UNREACHABLE();
+  return os;
+}
+
+std::ostream& operator<<(std::ostream& os, ToBooleanHints hints) {
+  if (hints == ToBooleanHint::kAny) return os << "Any";
+  if (hints == ToBooleanHint::kNone) return os << "None";
+  bool first = true;
+  for (ToBooleanHints::mask_type i = 0; i < sizeof(i) * 8; ++i) {
+    ToBooleanHint const hint = static_cast<ToBooleanHint>(1u << i);
+    if (hints & hint) {
+      if (!first) os << "|";
+      first = false;
+      os << hint;
+    }
+  }
+  return os;
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/type-hints.h b/src/type-hints.h
new file mode 100644
index 0000000..cdf4709
--- /dev/null
+++ b/src/type-hints.h
@@ -0,0 +1,72 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_TYPE_HINTS_H_
+#define V8_TYPE_HINTS_H_
+
+#include "src/base/flags.h"
+#include "src/utils.h"
+
+namespace v8 {
+namespace internal {
+
+// Type hints for an binary operation.
+enum class BinaryOperationHint : uint8_t {
+  kNone,
+  kSignedSmall,
+  kSigned32,
+  kNumberOrOddball,
+  kString,
+  kAny
+};
+
+inline size_t hash_value(BinaryOperationHint hint) {
+  return static_cast<unsigned>(hint);
+}
+
+std::ostream& operator<<(std::ostream&, BinaryOperationHint);
+
+// Type hints for an compare operation.
+enum class CompareOperationHint : uint8_t {
+  kNone,
+  kSignedSmall,
+  kNumber,
+  kNumberOrOddball,
+  kAny
+};
+
+inline size_t hash_value(CompareOperationHint hint) {
+  return static_cast<unsigned>(hint);
+}
+
+std::ostream& operator<<(std::ostream&, CompareOperationHint);
+
+// Type hints for the ToBoolean type conversion.
+enum class ToBooleanHint : uint16_t {
+  kNone = 0u,
+  kUndefined = 1u << 0,
+  kBoolean = 1u << 1,
+  kNull = 1u << 2,
+  kSmallInteger = 1u << 3,
+  kReceiver = 1u << 4,
+  kString = 1u << 5,
+  kSymbol = 1u << 6,
+  kHeapNumber = 1u << 7,
+  kSimdValue = 1u << 8,
+  kAny = kUndefined | kBoolean | kNull | kSmallInteger | kReceiver | kString |
+         kSymbol | kHeapNumber | kSimdValue
+};
+
+std::ostream& operator<<(std::ostream&, ToBooleanHint);
+
+typedef base::Flags<ToBooleanHint, uint16_t> ToBooleanHints;
+
+std::ostream& operator<<(std::ostream&, ToBooleanHints);
+
+DEFINE_OPERATORS_FOR_FLAGS(ToBooleanHints)
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_TYPE_HINTS_H_
diff --git a/src/type-info.cc b/src/type-info.cc
index 8289d91..ce0ab6c 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -6,7 +6,6 @@
 
 #include "src/ast/ast.h"
 #include "src/code-stubs.h"
-#include "src/compiler.h"
 #include "src/ic/ic.h"
 #include "src/ic/stub-cache.h"
 #include "src/objects-inl.h"
@@ -192,58 +191,129 @@
   return Handle<AllocationSite>::null();
 }
 
+namespace {
 
-void TypeFeedbackOracle::CompareType(TypeFeedbackId id,
-                                     Type** left_type,
-                                     Type** right_type,
-                                     Type** combined_type) {
+AstType* CompareOpHintToType(CompareOperationHint hint) {
+  switch (hint) {
+    case CompareOperationHint::kNone:
+      return AstType::None();
+    case CompareOperationHint::kSignedSmall:
+      return AstType::SignedSmall();
+    case CompareOperationHint::kNumber:
+      return AstType::Number();
+    case CompareOperationHint::kNumberOrOddball:
+      return AstType::NumberOrOddball();
+    case CompareOperationHint::kAny:
+      return AstType::Any();
+  }
+  UNREACHABLE();
+  return AstType::None();
+}
+
+AstType* BinaryOpHintToType(BinaryOperationHint hint) {
+  switch (hint) {
+    case BinaryOperationHint::kNone:
+      return AstType::None();
+    case BinaryOperationHint::kSignedSmall:
+      return AstType::SignedSmall();
+    case BinaryOperationHint::kSigned32:
+      return AstType::Signed32();
+    case BinaryOperationHint::kNumberOrOddball:
+      return AstType::Number();
+    case BinaryOperationHint::kString:
+      return AstType::String();
+    case BinaryOperationHint::kAny:
+      return AstType::Any();
+  }
+  UNREACHABLE();
+  return AstType::None();
+}
+
+}  // end anonymous namespace
+
+void TypeFeedbackOracle::CompareType(TypeFeedbackId id, FeedbackVectorSlot slot,
+                                     AstType** left_type, AstType** right_type,
+                                     AstType** combined_type) {
   Handle<Object> info = GetInfo(id);
+  // A check for a valid slot is not sufficient here. InstanceOf collects
+  // type feedback in a General slot.
   if (!info->IsCode()) {
-    // For some comparisons we don't have ICs, e.g. LiteralCompareTypeof.
-    *left_type = *right_type = *combined_type = Type::None();
+    // For some comparisons we don't have type feedback, e.g.
+    // LiteralCompareTypeof.
+    *left_type = *right_type = *combined_type = AstType::None();
     return;
   }
-  Handle<Code> code = Handle<Code>::cast(info);
 
+  // Feedback from Ignition. The feedback slot will be allocated and initialized
+  // to AstType::None() even when ignition is not enabled. So it is safe to get
+  // feedback from the type feedback vector.
+  DCHECK(!slot.IsInvalid());
+  CompareICNexus nexus(feedback_vector_, slot);
+  *left_type = *right_type = *combined_type =
+      CompareOpHintToType(nexus.GetCompareOperationFeedback());
+
+  // Merge the feedback from full-codegen if available.
+  Handle<Code> code = Handle<Code>::cast(info);
   Handle<Map> map;
   Map* raw_map = code->FindFirstMap();
   if (raw_map != NULL) Map::TryUpdate(handle(raw_map)).ToHandle(&map);
 
   if (code->is_compare_ic_stub()) {
     CompareICStub stub(code->stub_key(), isolate());
-    *left_type = CompareICState::StateToType(zone(), stub.left());
-    *right_type = CompareICState::StateToType(zone(), stub.right());
-    *combined_type = CompareICState::StateToType(zone(), stub.state(), map);
+    AstType* left_type_from_ic =
+        CompareICState::StateToType(zone(), stub.left());
+    *left_type = AstType::Union(*left_type, left_type_from_ic, zone());
+    AstType* right_type_from_ic =
+        CompareICState::StateToType(zone(), stub.right());
+    *right_type = AstType::Union(*right_type, right_type_from_ic, zone());
+    AstType* combined_type_from_ic =
+        CompareICState::StateToType(zone(), stub.state(), map);
+    *combined_type =
+        AstType::Union(*combined_type, combined_type_from_ic, zone());
   }
 }
 
-
-void TypeFeedbackOracle::BinaryType(TypeFeedbackId id,
-                                    Type** left,
-                                    Type** right,
-                                    Type** result,
+void TypeFeedbackOracle::BinaryType(TypeFeedbackId id, FeedbackVectorSlot slot,
+                                    AstType** left, AstType** right,
+                                    AstType** result,
                                     Maybe<int>* fixed_right_arg,
                                     Handle<AllocationSite>* allocation_site,
                                     Token::Value op) {
   Handle<Object> object = GetInfo(id);
-  if (!object->IsCode()) {
-    // For some binary ops we don't have ICs, e.g. Token::COMMA, but for the
-    // operations covered by the BinaryOpIC we should always have them.
+  if (slot.IsInvalid()) {
+    // For some binary ops we don't have ICs or feedback slots,
+    // e.g. Token::COMMA, but for the operations covered by the BinaryOpIC we
+    // should always have them.
+    DCHECK(!object->IsCode());
     DCHECK(op < BinaryOpICState::FIRST_TOKEN ||
            op > BinaryOpICState::LAST_TOKEN);
-    *left = *right = *result = Type::None();
+    *left = *right = *result = AstType::None();
     *fixed_right_arg = Nothing<int>();
     *allocation_site = Handle<AllocationSite>::null();
     return;
   }
+
+  // Feedback from Ignition. The feedback slot will be allocated and initialized
+  // to AstType::None() even when ignition is not enabled. So it is safe to get
+  // feedback from the type feedback vector.
+  DCHECK(!slot.IsInvalid());
+  BinaryOpICNexus nexus(feedback_vector_, slot);
+  *left = *right = *result =
+      BinaryOpHintToType(nexus.GetBinaryOperationFeedback());
+  *fixed_right_arg = Nothing<int>();
+  *allocation_site = Handle<AllocationSite>::null();
+
+  if (!object->IsCode()) return;
+
+  // Merge the feedback from full-codegen if available.
   Handle<Code> code = Handle<Code>::cast(object);
   DCHECK_EQ(Code::BINARY_OP_IC, code->kind());
   BinaryOpICState state(isolate(), code->extra_ic_state());
   DCHECK_EQ(op, state.op());
 
-  *left = state.GetLeftType();
-  *right = state.GetRightType();
-  *result = state.GetResultType();
+  *left = AstType::Union(*left, state.GetLeftType(), zone());
+  *right = AstType::Union(*right, state.GetRightType(), zone());
+  *result = AstType::Union(*result, state.GetResultType(), zone());
   *fixed_right_arg = state.fixed_right_arg();
 
   AllocationSite* first_allocation_site = code->FindFirstAllocationSite();
@@ -254,14 +324,24 @@
   }
 }
 
-
-Type* TypeFeedbackOracle::CountType(TypeFeedbackId id) {
+AstType* TypeFeedbackOracle::CountType(TypeFeedbackId id,
+                                       FeedbackVectorSlot slot) {
   Handle<Object> object = GetInfo(id);
-  if (!object->IsCode()) return Type::None();
+  if (slot.IsInvalid()) {
+    DCHECK(!object->IsCode());
+    return AstType::None();
+  }
+
+  DCHECK(!slot.IsInvalid());
+  BinaryOpICNexus nexus(feedback_vector_, slot);
+  AstType* type = BinaryOpHintToType(nexus.GetBinaryOperationFeedback());
+
+  if (!object->IsCode()) return type;
+
   Handle<Code> code = Handle<Code>::cast(object);
   DCHECK_EQ(Code::BINARY_OP_IC, code->kind());
   BinaryOpICState state(isolate(), code->extra_ic_state());
-  return state.GetLeftType();
+  return AstType::Union(type, state.GetLeftType(), zone());
 }
 
 
diff --git a/src/type-info.h b/src/type-info.h
index 4e8dc54..06a0c9e 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -6,11 +6,11 @@
 #define V8_TYPE_INFO_H_
 
 #include "src/allocation.h"
+#include "src/ast/ast-types.h"
 #include "src/contexts.h"
 #include "src/globals.h"
 #include "src/parsing/token.h"
-#include "src/types.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
@@ -77,20 +77,16 @@
   uint16_t ToBooleanTypes(TypeFeedbackId id);
 
   // Get type information for arithmetic operations and compares.
-  void BinaryType(TypeFeedbackId id,
-                  Type** left,
-                  Type** right,
-                  Type** result,
+  void BinaryType(TypeFeedbackId id, FeedbackVectorSlot slot, AstType** left,
+                  AstType** right, AstType** result,
                   Maybe<int>* fixed_right_arg,
                   Handle<AllocationSite>* allocation_site,
                   Token::Value operation);
 
-  void CompareType(TypeFeedbackId id,
-                   Type** left,
-                   Type** right,
-                   Type** combined);
+  void CompareType(TypeFeedbackId id, FeedbackVectorSlot slot, AstType** left,
+                   AstType** right, AstType** combined);
 
-  Type* CountType(TypeFeedbackId id);
+  AstType* CountType(TypeFeedbackId id, FeedbackVectorSlot slot);
 
   Zone* zone() const { return zone_; }
   Isolate* isolate() const { return isolate_; }
diff --git a/src/types.cc b/src/types.cc
deleted file mode 100644
index c978dac..0000000
--- a/src/types.cc
+++ /dev/null
@@ -1,1279 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <iomanip>
-
-#include "src/types.h"
-
-#include "src/handles-inl.h"
-#include "src/ostreams.h"
-
-namespace v8 {
-namespace internal {
-
-
-// NOTE: If code is marked as being a "shortcut", this means that removing
-// the code won't affect the semantics of the surrounding function definition.
-
-// static
-bool Type::IsInteger(i::Object* x) {
-  return x->IsNumber() && Type::IsInteger(x->Number());
-}
-
-// -----------------------------------------------------------------------------
-// Range-related helper functions.
-
-bool RangeType::Limits::IsEmpty() { return this->min > this->max; }
-
-RangeType::Limits RangeType::Limits::Intersect(Limits lhs, Limits rhs) {
-  DisallowHeapAllocation no_allocation;
-  Limits result(lhs);
-  if (lhs.min < rhs.min) result.min = rhs.min;
-  if (lhs.max > rhs.max) result.max = rhs.max;
-  return result;
-}
-
-RangeType::Limits RangeType::Limits::Union(Limits lhs, Limits rhs) {
-  DisallowHeapAllocation no_allocation;
-  if (lhs.IsEmpty()) return rhs;
-  if (rhs.IsEmpty()) return lhs;
-  Limits result(lhs);
-  if (lhs.min > rhs.min) result.min = rhs.min;
-  if (lhs.max < rhs.max) result.max = rhs.max;
-  return result;
-}
-
-bool Type::Overlap(RangeType* lhs, RangeType* rhs) {
-  DisallowHeapAllocation no_allocation;
-  return !RangeType::Limits::Intersect(RangeType::Limits(lhs),
-                                       RangeType::Limits(rhs))
-              .IsEmpty();
-}
-
-bool Type::Contains(RangeType* lhs, RangeType* rhs) {
-  DisallowHeapAllocation no_allocation;
-  return lhs->Min() <= rhs->Min() && rhs->Max() <= lhs->Max();
-}
-
-bool Type::Contains(RangeType* lhs, ConstantType* rhs) {
-  DisallowHeapAllocation no_allocation;
-  return IsInteger(*rhs->Value()) &&
-         lhs->Min() <= rhs->Value()->Number() &&
-         rhs->Value()->Number() <= lhs->Max();
-}
-
-bool Type::Contains(RangeType* range, i::Object* val) {
-  DisallowHeapAllocation no_allocation;
-  return IsInteger(val) &&
-         range->Min() <= val->Number() && val->Number() <= range->Max();
-}
-
-
-// -----------------------------------------------------------------------------
-// Min and Max computation.
-
-double Type::Min() {
-  DCHECK(this->SemanticIs(Number()));
-  if (this->IsBitset()) return BitsetType::Min(this->AsBitset());
-  if (this->IsUnion()) {
-    double min = +V8_INFINITY;
-    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
-      min = std::min(min, this->AsUnion()->Get(i)->Min());
-    }
-    return min;
-  }
-  if (this->IsRange()) return this->AsRange()->Min();
-  if (this->IsConstant()) return this->AsConstant()->Value()->Number();
-  UNREACHABLE();
-  return 0;
-}
-
-double Type::Max() {
-  DCHECK(this->SemanticIs(Number()));
-  if (this->IsBitset()) return BitsetType::Max(this->AsBitset());
-  if (this->IsUnion()) {
-    double max = -V8_INFINITY;
-    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
-      max = std::max(max, this->AsUnion()->Get(i)->Max());
-    }
-    return max;
-  }
-  if (this->IsRange()) return this->AsRange()->Max();
-  if (this->IsConstant()) return this->AsConstant()->Value()->Number();
-  UNREACHABLE();
-  return 0;
-}
-
-
-// -----------------------------------------------------------------------------
-// Glb and lub computation.
-
-
-// The largest bitset subsumed by this type.
-Type::bitset BitsetType::Glb(Type* type) {
-  DisallowHeapAllocation no_allocation;
-  // Fast case.
-  if (IsBitset(type)) {
-    return type->AsBitset();
-  } else if (type->IsUnion()) {
-    SLOW_DCHECK(type->AsUnion()->Wellformed());
-    return type->AsUnion()->Get(0)->BitsetGlb() |
-           SEMANTIC(type->AsUnion()->Get(1)->BitsetGlb());  // Shortcut.
-  } else if (type->IsRange()) {
-    bitset glb = SEMANTIC(
-        BitsetType::Glb(type->AsRange()->Min(), type->AsRange()->Max()));
-    return glb | REPRESENTATION(type->BitsetLub());
-  } else {
-    return type->Representation();
-  }
-}
-
-
-// The smallest bitset subsuming this type, possibly not a proper one.
-Type::bitset BitsetType::Lub(Type* type) {
-  DisallowHeapAllocation no_allocation;
-  if (IsBitset(type)) return type->AsBitset();
-  if (type->IsUnion()) {
-    // Take the representation from the first element, which is always
-    // a bitset.
-    int bitset = type->AsUnion()->Get(0)->BitsetLub();
-    for (int i = 0, n = type->AsUnion()->Length(); i < n; ++i) {
-      // Other elements only contribute their semantic part.
-      bitset |= SEMANTIC(type->AsUnion()->Get(i)->BitsetLub());
-    }
-    return bitset;
-  }
-  if (type->IsClass()) return type->AsClass()->Lub();
-  if (type->IsConstant()) return type->AsConstant()->Lub();
-  if (type->IsRange()) return type->AsRange()->Lub();
-  if (type->IsContext()) return kOtherInternal & kTaggedPointer;
-  if (type->IsArray()) return kOtherObject;
-  if (type->IsFunction()) return kFunction;
-  if (type->IsTuple()) return kOtherInternal;
-  UNREACHABLE();
-  return kNone;
-}
-
-Type::bitset BitsetType::Lub(i::Map* map) {
-  DisallowHeapAllocation no_allocation;
-  switch (map->instance_type()) {
-    case STRING_TYPE:
-    case ONE_BYTE_STRING_TYPE:
-    case CONS_STRING_TYPE:
-    case CONS_ONE_BYTE_STRING_TYPE:
-    case SLICED_STRING_TYPE:
-    case SLICED_ONE_BYTE_STRING_TYPE:
-    case EXTERNAL_STRING_TYPE:
-    case EXTERNAL_ONE_BYTE_STRING_TYPE:
-    case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
-    case SHORT_EXTERNAL_STRING_TYPE:
-    case SHORT_EXTERNAL_ONE_BYTE_STRING_TYPE:
-    case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
-      return kOtherString;
-    case INTERNALIZED_STRING_TYPE:
-    case ONE_BYTE_INTERNALIZED_STRING_TYPE:
-    case EXTERNAL_INTERNALIZED_STRING_TYPE:
-    case EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
-    case EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
-    case SHORT_EXTERNAL_INTERNALIZED_STRING_TYPE:
-    case SHORT_EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE:
-    case SHORT_EXTERNAL_INTERNALIZED_STRING_WITH_ONE_BYTE_DATA_TYPE:
-      return kInternalizedString;
-    case SYMBOL_TYPE:
-      return kSymbol;
-    case ODDBALL_TYPE: {
-      Heap* heap = map->GetHeap();
-      if (map == heap->undefined_map()) return kUndefined;
-      if (map == heap->null_map()) return kNull;
-      if (map == heap->boolean_map()) return kBoolean;
-      if (map == heap->the_hole_map()) return kHole;
-      DCHECK(map == heap->uninitialized_map() ||
-             map == heap->no_interceptor_result_sentinel_map() ||
-             map == heap->termination_exception_map() ||
-             map == heap->arguments_marker_map() ||
-             map == heap->optimized_out_map() ||
-             map == heap->stale_register_map());
-      return kOtherInternal & kTaggedPointer;
-    }
-    case HEAP_NUMBER_TYPE:
-      return kNumber & kTaggedPointer;
-    case SIMD128_VALUE_TYPE:
-      return kSimd;
-    case JS_OBJECT_TYPE:
-    case JS_ARGUMENTS_TYPE:
-    case JS_ERROR_TYPE:
-    case JS_GLOBAL_OBJECT_TYPE:
-    case JS_GLOBAL_PROXY_TYPE:
-    case JS_API_OBJECT_TYPE:
-    case JS_SPECIAL_API_OBJECT_TYPE:
-      if (map->is_undetectable()) return kOtherUndetectable;
-      return kOtherObject;
-    case JS_VALUE_TYPE:
-    case JS_MESSAGE_OBJECT_TYPE:
-    case JS_DATE_TYPE:
-    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
-    case JS_GENERATOR_OBJECT_TYPE:
-    case JS_MODULE_TYPE:
-    case JS_ARRAY_BUFFER_TYPE:
-    case JS_ARRAY_TYPE:
-    case JS_REGEXP_TYPE:  // TODO(rossberg): there should be a RegExp type.
-    case JS_TYPED_ARRAY_TYPE:
-    case JS_DATA_VIEW_TYPE:
-    case JS_SET_TYPE:
-    case JS_MAP_TYPE:
-    case JS_SET_ITERATOR_TYPE:
-    case JS_MAP_ITERATOR_TYPE:
-    case JS_WEAK_MAP_TYPE:
-    case JS_WEAK_SET_TYPE:
-    case JS_PROMISE_TYPE:
-    case JS_BOUND_FUNCTION_TYPE:
-      DCHECK(!map->is_undetectable());
-      return kOtherObject;
-    case JS_FUNCTION_TYPE:
-      DCHECK(!map->is_undetectable());
-      return kFunction;
-    case JS_PROXY_TYPE:
-      DCHECK(!map->is_undetectable());
-      return kProxy;
-    case MAP_TYPE:
-    case ALLOCATION_SITE_TYPE:
-    case ACCESSOR_INFO_TYPE:
-    case SHARED_FUNCTION_INFO_TYPE:
-    case ACCESSOR_PAIR_TYPE:
-    case FIXED_ARRAY_TYPE:
-    case FIXED_DOUBLE_ARRAY_TYPE:
-    case BYTE_ARRAY_TYPE:
-    case BYTECODE_ARRAY_TYPE:
-    case TRANSITION_ARRAY_TYPE:
-    case FOREIGN_TYPE:
-    case SCRIPT_TYPE:
-    case CODE_TYPE:
-    case PROPERTY_CELL_TYPE:
-      return kOtherInternal & kTaggedPointer;
-
-    // Remaining instance types are unsupported for now. If any of them do
-    // require bit set types, they should get kOtherInternal & kTaggedPointer.
-    case MUTABLE_HEAP_NUMBER_TYPE:
-    case FREE_SPACE_TYPE:
-#define FIXED_TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
-  case FIXED_##TYPE##_ARRAY_TYPE:
-
-      TYPED_ARRAYS(FIXED_TYPED_ARRAY_CASE)
-#undef FIXED_TYPED_ARRAY_CASE
-    case FILLER_TYPE:
-    case ACCESS_CHECK_INFO_TYPE:
-    case INTERCEPTOR_INFO_TYPE:
-    case CALL_HANDLER_INFO_TYPE:
-    case FUNCTION_TEMPLATE_INFO_TYPE:
-    case OBJECT_TEMPLATE_INFO_TYPE:
-    case SIGNATURE_INFO_TYPE:
-    case TYPE_SWITCH_INFO_TYPE:
-    case ALLOCATION_MEMENTO_TYPE:
-    case TYPE_FEEDBACK_INFO_TYPE:
-    case ALIASED_ARGUMENTS_ENTRY_TYPE:
-    case BOX_TYPE:
-    case DEBUG_INFO_TYPE:
-    case BREAK_POINT_INFO_TYPE:
-    case CELL_TYPE:
-    case WEAK_CELL_TYPE:
-    case PROTOTYPE_INFO_TYPE:
-    case SLOPPY_BLOCK_WITH_EVAL_CONTEXT_EXTENSION_TYPE:
-      UNREACHABLE();
-      return kNone;
-  }
-  UNREACHABLE();
-  return kNone;
-}
-
-Type::bitset BitsetType::Lub(i::Object* value) {
-  DisallowHeapAllocation no_allocation;
-  if (value->IsNumber()) {
-    return Lub(value->Number()) &
-        (value->IsSmi() ? kTaggedSigned : kTaggedPointer);
-  }
-  return Lub(i::HeapObject::cast(value)->map());
-}
-
-Type::bitset BitsetType::Lub(double value) {
-  DisallowHeapAllocation no_allocation;
-  if (i::IsMinusZero(value)) return kMinusZero;
-  if (std::isnan(value)) return kNaN;
-  if (IsUint32Double(value) || IsInt32Double(value)) return Lub(value, value);
-  return kOtherNumber;
-}
-
-
-// Minimum values of plain numeric bitsets.
-const BitsetType::Boundary BitsetType::BoundariesArray[] = {
-    {kOtherNumber, kPlainNumber, -V8_INFINITY},
-    {kOtherSigned32, kNegative32, kMinInt},
-    {kNegative31, kNegative31, -0x40000000},
-    {kUnsigned30, kUnsigned30, 0},
-    {kOtherUnsigned31, kUnsigned31, 0x40000000},
-    {kOtherUnsigned32, kUnsigned32, 0x80000000},
-    {kOtherNumber, kPlainNumber, static_cast<double>(kMaxUInt32) + 1}};
-
-const BitsetType::Boundary* BitsetType::Boundaries() { return BoundariesArray; }
-
-size_t BitsetType::BoundariesSize() {
-  // Windows doesn't like arraysize here.
-  // return arraysize(BoundariesArray);
-  return 7;
-}
-
-Type::bitset BitsetType::ExpandInternals(Type::bitset bits) {
-  DisallowHeapAllocation no_allocation;
-  if (!(bits & SEMANTIC(kPlainNumber))) return bits;  // Shortcut.
-  const Boundary* boundaries = Boundaries();
-  for (size_t i = 0; i < BoundariesSize(); ++i) {
-    DCHECK(BitsetType::Is(boundaries[i].internal, boundaries[i].external));
-    if (bits & SEMANTIC(boundaries[i].internal))
-      bits |= SEMANTIC(boundaries[i].external);
-  }
-  return bits;
-}
-
-Type::bitset BitsetType::Lub(double min, double max) {
-  DisallowHeapAllocation no_allocation;
-  int lub = kNone;
-  const Boundary* mins = Boundaries();
-
-  for (size_t i = 1; i < BoundariesSize(); ++i) {
-    if (min < mins[i].min) {
-      lub |= mins[i-1].internal;
-      if (max < mins[i].min) return lub;
-    }
-  }
-  return lub | mins[BoundariesSize() - 1].internal;
-}
-
-Type::bitset BitsetType::NumberBits(bitset bits) {
-  return SEMANTIC(bits & kPlainNumber);
-}
-
-Type::bitset BitsetType::Glb(double min, double max) {
-  DisallowHeapAllocation no_allocation;
-  int glb = kNone;
-  const Boundary* mins = Boundaries();
-
-  // If the range does not touch 0, the bound is empty.
-  if (max < -1 || min > 0) return glb;
-
-  for (size_t i = 1; i + 1 < BoundariesSize(); ++i) {
-    if (min <= mins[i].min) {
-      if (max + 1 < mins[i + 1].min) break;
-      glb |= mins[i].external;
-    }
-  }
-  // OtherNumber also contains float numbers, so it can never be
-  // in the greatest lower bound.
-  return glb & ~(SEMANTIC(kOtherNumber));
-}
-
-double BitsetType::Min(bitset bits) {
-  DisallowHeapAllocation no_allocation;
-  DCHECK(Is(SEMANTIC(bits), kNumber));
-  const Boundary* mins = Boundaries();
-  bool mz = SEMANTIC(bits & kMinusZero);
-  for (size_t i = 0; i < BoundariesSize(); ++i) {
-    if (Is(SEMANTIC(mins[i].internal), bits)) {
-      return mz ? std::min(0.0, mins[i].min) : mins[i].min;
-    }
-  }
-  if (mz) return 0;
-  return std::numeric_limits<double>::quiet_NaN();
-}
-
-double BitsetType::Max(bitset bits) {
-  DisallowHeapAllocation no_allocation;
-  DCHECK(Is(SEMANTIC(bits), kNumber));
-  const Boundary* mins = Boundaries();
-  bool mz = SEMANTIC(bits & kMinusZero);
-  if (BitsetType::Is(SEMANTIC(mins[BoundariesSize() - 1].internal), bits)) {
-    return +V8_INFINITY;
-  }
-  for (size_t i = BoundariesSize() - 1; i-- > 0;) {
-    if (Is(SEMANTIC(mins[i].internal), bits)) {
-      return mz ?
-          std::max(0.0, mins[i+1].min - 1) : mins[i+1].min - 1;
-    }
-  }
-  if (mz) return 0;
-  return std::numeric_limits<double>::quiet_NaN();
-}
-
-
-// -----------------------------------------------------------------------------
-// Predicates.
-
-bool Type::SimplyEquals(Type* that) {
-  DisallowHeapAllocation no_allocation;
-  if (this->IsClass()) {
-    return that->IsClass()
-        && *this->AsClass()->Map() == *that->AsClass()->Map();
-  }
-  if (this->IsConstant()) {
-    return that->IsConstant()
-        && *this->AsConstant()->Value() == *that->AsConstant()->Value();
-  }
-  if (this->IsContext()) {
-    return that->IsContext()
-        && this->AsContext()->Outer()->Equals(that->AsContext()->Outer());
-  }
-  if (this->IsArray()) {
-    return that->IsArray()
-        && this->AsArray()->Element()->Equals(that->AsArray()->Element());
-  }
-  if (this->IsFunction()) {
-    if (!that->IsFunction()) return false;
-    FunctionType* this_fun = this->AsFunction();
-    FunctionType* that_fun = that->AsFunction();
-    if (this_fun->Arity() != that_fun->Arity() ||
-        !this_fun->Result()->Equals(that_fun->Result()) ||
-        !this_fun->Receiver()->Equals(that_fun->Receiver())) {
-      return false;
-    }
-    for (int i = 0, n = this_fun->Arity(); i < n; ++i) {
-      if (!this_fun->Parameter(i)->Equals(that_fun->Parameter(i))) return false;
-    }
-    return true;
-  }
-  if (this->IsTuple()) {
-    if (!that->IsTuple()) return false;
-    TupleType* this_tuple = this->AsTuple();
-    TupleType* that_tuple = that->AsTuple();
-    if (this_tuple->Arity() != that_tuple->Arity()) {
-      return false;
-    }
-    for (int i = 0, n = this_tuple->Arity(); i < n; ++i) {
-      if (!this_tuple->Element(i)->Equals(that_tuple->Element(i))) return false;
-    }
-    return true;
-  }
-  UNREACHABLE();
-  return false;
-}
-
-Type::bitset Type::Representation() {
-  return REPRESENTATION(this->BitsetLub());
-}
-
-
-// Check if [this] <= [that].
-bool Type::SlowIs(Type* that) {
-  DisallowHeapAllocation no_allocation;
-
-  // Fast bitset cases
-  if (that->IsBitset()) {
-    return BitsetType::Is(this->BitsetLub(), that->AsBitset());
-  }
-
-  if (this->IsBitset()) {
-    return BitsetType::Is(this->AsBitset(), that->BitsetGlb());
-  }
-
-  // Check the representations.
-  if (!BitsetType::Is(Representation(), that->Representation())) {
-    return false;
-  }
-
-  // Check the semantic part.
-  return SemanticIs(that);
-}
-
-
-// Check if SEMANTIC([this]) <= SEMANTIC([that]). The result of the method
-// should be independent of the representation axis of the types.
-bool Type::SemanticIs(Type* that) {
-  DisallowHeapAllocation no_allocation;
-
-  if (this == that) return true;
-
-  if (that->IsBitset()) {
-    return BitsetType::Is(SEMANTIC(this->BitsetLub()), that->AsBitset());
-  }
-  if (this->IsBitset()) {
-    return BitsetType::Is(SEMANTIC(this->AsBitset()), that->BitsetGlb());
-  }
-
-  // (T1 \/ ... \/ Tn) <= T  if  (T1 <= T) /\ ... /\ (Tn <= T)
-  if (this->IsUnion()) {
-    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
-      if (!this->AsUnion()->Get(i)->SemanticIs(that)) return false;
-    }
-    return true;
-  }
-
-  // T <= (T1 \/ ... \/ Tn)  if  (T <= T1) \/ ... \/ (T <= Tn)
-  if (that->IsUnion()) {
-    for (int i = 0, n = that->AsUnion()->Length(); i < n; ++i) {
-      if (this->SemanticIs(that->AsUnion()->Get(i))) return true;
-      if (i > 1 && this->IsRange()) return false;  // Shortcut.
-    }
-    return false;
-  }
-
-  if (that->IsRange()) {
-    return (this->IsRange() && Contains(that->AsRange(), this->AsRange())) ||
-           (this->IsConstant() &&
-            Contains(that->AsRange(), this->AsConstant()));
-  }
-  if (this->IsRange()) return false;
-
-  return this->SimplyEquals(that);
-}
-
-// Most precise _current_ type of a value (usually its class).
-Type* Type::NowOf(i::Object* value, Zone* zone) {
-  if (value->IsSmi() ||
-      i::HeapObject::cast(value)->map()->instance_type() == HEAP_NUMBER_TYPE) {
-    return Of(value, zone);
-  }
-  return Class(i::handle(i::HeapObject::cast(value)->map()), zone);
-}
-
-bool Type::NowContains(i::Object* value) {
-  DisallowHeapAllocation no_allocation;
-  if (this->IsAny()) return true;
-  if (value->IsHeapObject()) {
-    i::Map* map = i::HeapObject::cast(value)->map();
-    for (Iterator<i::Map> it = this->Classes(); !it.Done(); it.Advance()) {
-      if (*it.Current() == map) return true;
-    }
-  }
-  return this->Contains(value);
-}
-
-bool Type::NowIs(Type* that) {
-  DisallowHeapAllocation no_allocation;
-
-  // TODO(rossberg): this is incorrect for
-  //   Union(Constant(V), T)->NowIs(Class(M))
-  // but fuzzing does not cover that!
-  if (this->IsConstant()) {
-    i::Object* object = *this->AsConstant()->Value();
-    if (object->IsHeapObject()) {
-      i::Map* map = i::HeapObject::cast(object)->map();
-      for (Iterator<i::Map> it = that->Classes(); !it.Done(); it.Advance()) {
-        if (*it.Current() == map) return true;
-      }
-    }
-  }
-  return this->Is(that);
-}
-
-
-// Check if [this] contains only (currently) stable classes.
-bool Type::NowStable() {
-  DisallowHeapAllocation no_allocation;
-  return !this->IsClass() || this->AsClass()->Map()->is_stable();
-}
-
-
-// Check if [this] and [that] overlap.
-bool Type::Maybe(Type* that) {
-  DisallowHeapAllocation no_allocation;
-
-  // Take care of the representation part (and also approximate
-  // the semantic part).
-  if (!BitsetType::IsInhabited(this->BitsetLub() & that->BitsetLub()))
-    return false;
-
-  return SemanticMaybe(that);
-}
-
-bool Type::SemanticMaybe(Type* that) {
-  DisallowHeapAllocation no_allocation;
-
-  // (T1 \/ ... \/ Tn) overlaps T  if  (T1 overlaps T) \/ ... \/ (Tn overlaps T)
-  if (this->IsUnion()) {
-    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
-      if (this->AsUnion()->Get(i)->SemanticMaybe(that)) return true;
-    }
-    return false;
-  }
-
-  // T overlaps (T1 \/ ... \/ Tn)  if  (T overlaps T1) \/ ... \/ (T overlaps Tn)
-  if (that->IsUnion()) {
-    for (int i = 0, n = that->AsUnion()->Length(); i < n; ++i) {
-      if (this->SemanticMaybe(that->AsUnion()->Get(i))) return true;
-    }
-    return false;
-  }
-
-  if (!BitsetType::SemanticIsInhabited(this->BitsetLub() & that->BitsetLub()))
-    return false;
-
-  if (this->IsBitset() && that->IsBitset()) return true;
-
-  if (this->IsClass() != that->IsClass()) return true;
-
-  if (this->IsRange()) {
-    if (that->IsConstant()) {
-      return Contains(this->AsRange(), that->AsConstant());
-    }
-    if (that->IsRange()) {
-      return Overlap(this->AsRange(), that->AsRange());
-    }
-    if (that->IsBitset()) {
-      bitset number_bits = BitsetType::NumberBits(that->AsBitset());
-      if (number_bits == BitsetType::kNone) {
-        return false;
-      }
-      double min = std::max(BitsetType::Min(number_bits), this->Min());
-      double max = std::min(BitsetType::Max(number_bits), this->Max());
-      return min <= max;
-    }
-  }
-  if (that->IsRange()) {
-    return that->SemanticMaybe(this);  // This case is handled above.
-  }
-
-  if (this->IsBitset() || that->IsBitset()) return true;
-
-  return this->SimplyEquals(that);
-}
-
-
-// Return the range in [this], or [NULL].
-Type* Type::GetRange() {
-  DisallowHeapAllocation no_allocation;
-  if (this->IsRange()) return this;
-  if (this->IsUnion() && this->AsUnion()->Get(1)->IsRange()) {
-    return this->AsUnion()->Get(1);
-  }
-  return NULL;
-}
-
-bool Type::Contains(i::Object* value) {
-  DisallowHeapAllocation no_allocation;
-  for (Iterator<i::Object> it = this->Constants(); !it.Done(); it.Advance()) {
-    if (*it.Current() == value) return true;
-  }
-  if (IsInteger(value)) {
-    Type* range = this->GetRange();
-    if (range != NULL && Contains(range->AsRange(), value)) return true;
-  }
-  return BitsetType::New(BitsetType::Lub(value))->Is(this);
-}
-
-bool UnionType::Wellformed() {
-  DisallowHeapAllocation no_allocation;
-  // This checks the invariants of the union representation:
-  // 1. There are at least two elements.
-  // 2. The first element is a bitset, no other element is a bitset.
-  // 3. At most one element is a range, and it must be the second one.
-  // 4. No element is itself a union.
-  // 5. No element (except the bitset) is a subtype of any other.
-  // 6. If there is a range, then the bitset type does not contain
-  //    plain number bits.
-  DCHECK(this->Length() >= 2);  // (1)
-  DCHECK(this->Get(0)->IsBitset());  // (2a)
-
-  for (int i = 0; i < this->Length(); ++i) {
-    if (i != 0) DCHECK(!this->Get(i)->IsBitset());  // (2b)
-    if (i != 1) DCHECK(!this->Get(i)->IsRange());   // (3)
-    DCHECK(!this->Get(i)->IsUnion());               // (4)
-    for (int j = 0; j < this->Length(); ++j) {
-      if (i != j && i != 0)
-        DCHECK(!this->Get(i)->SemanticIs(this->Get(j)));  // (5)
-    }
-  }
-  DCHECK(!this->Get(1)->IsRange() ||
-         (BitsetType::NumberBits(this->Get(0)->AsBitset()) ==
-          BitsetType::kNone));  // (6)
-  return true;
-}
-
-
-// -----------------------------------------------------------------------------
-// Union and intersection
-
-
-static bool AddIsSafe(int x, int y) {
-  return x >= 0 ?
-      y <= std::numeric_limits<int>::max() - x :
-      y >= std::numeric_limits<int>::min() - x;
-}
-
-Type* Type::Intersect(Type* type1, Type* type2, Zone* zone) {
-  // Fast case: bit sets.
-  if (type1->IsBitset() && type2->IsBitset()) {
-    return BitsetType::New(type1->AsBitset() & type2->AsBitset());
-  }
-
-  // Fast case: top or bottom types.
-  if (type1->IsNone() || type2->IsAny()) return type1;  // Shortcut.
-  if (type2->IsNone() || type1->IsAny()) return type2;  // Shortcut.
-
-  // Semi-fast case.
-  if (type1->Is(type2)) return type1;
-  if (type2->Is(type1)) return type2;
-
-  // Slow case: create union.
-
-  // Figure out the representation of the result first.
-  // The rest of the method should not change this representation and
-  // it should not make any decisions based on representations (i.e.,
-  // it should only use the semantic part of types).
-  const bitset representation =
-      type1->Representation() & type2->Representation();
-
-  // Semantic subtyping check - this is needed for consistency with the
-  // semi-fast case above - we should behave the same way regardless of
-  // representations. Intersection with a universal bitset should only update
-  // the representations.
-  if (type1->SemanticIs(type2)) {
-    type2 = Any();
-  } else if (type2->SemanticIs(type1)) {
-    type1 = Any();
-  }
-
-  bitset bits =
-      SEMANTIC(type1->BitsetGlb() & type2->BitsetGlb()) | representation;
-  int size1 = type1->IsUnion() ? type1->AsUnion()->Length() : 1;
-  int size2 = type2->IsUnion() ? type2->AsUnion()->Length() : 1;
-  if (!AddIsSafe(size1, size2)) return Any();
-  int size = size1 + size2;
-  if (!AddIsSafe(size, 2)) return Any();
-  size += 2;
-  Type* result_type = UnionType::New(size, zone);
-  UnionType* result = result_type->AsUnion();
-  size = 0;
-
-  // Deal with bitsets.
-  result->Set(size++, BitsetType::New(bits));
-
-  RangeType::Limits lims = RangeType::Limits::Empty();
-  size = IntersectAux(type1, type2, result, size, &lims, zone);
-
-  // If the range is not empty, then insert it into the union and
-  // remove the number bits from the bitset.
-  if (!lims.IsEmpty()) {
-    size = UpdateRange(RangeType::New(lims, representation, zone), result, size,
-                       zone);
-
-    // Remove the number bits.
-    bitset number_bits = BitsetType::NumberBits(bits);
-    bits &= ~number_bits;
-    result->Set(0, BitsetType::New(bits));
-  }
-  return NormalizeUnion(result_type, size, zone);
-}
-
-int Type::UpdateRange(Type* range, UnionType* result, int size, Zone* zone) {
-  if (size == 1) {
-    result->Set(size++, range);
-  } else {
-    // Make space for the range.
-    result->Set(size++, result->Get(1));
-    result->Set(1, range);
-  }
-
-  // Remove any components that just got subsumed.
-  for (int i = 2; i < size; ) {
-    if (result->Get(i)->SemanticIs(range)) {
-      result->Set(i, result->Get(--size));
-    } else {
-      ++i;
-    }
-  }
-  return size;
-}
-
-RangeType::Limits Type::ToLimits(bitset bits, Zone* zone) {
-  bitset number_bits = BitsetType::NumberBits(bits);
-
-  if (number_bits == BitsetType::kNone) {
-    return RangeType::Limits::Empty();
-  }
-
-  return RangeType::Limits(BitsetType::Min(number_bits),
-                           BitsetType::Max(number_bits));
-}
-
-RangeType::Limits Type::IntersectRangeAndBitset(Type* range, Type* bitset,
-                                                Zone* zone) {
-  RangeType::Limits range_lims(range->AsRange());
-  RangeType::Limits bitset_lims = ToLimits(bitset->AsBitset(), zone);
-  return RangeType::Limits::Intersect(range_lims, bitset_lims);
-}
-
-int Type::IntersectAux(Type* lhs, Type* rhs, UnionType* result, int size,
-                       RangeType::Limits* lims, Zone* zone) {
-  if (lhs->IsUnion()) {
-    for (int i = 0, n = lhs->AsUnion()->Length(); i < n; ++i) {
-      size =
-          IntersectAux(lhs->AsUnion()->Get(i), rhs, result, size, lims, zone);
-    }
-    return size;
-  }
-  if (rhs->IsUnion()) {
-    for (int i = 0, n = rhs->AsUnion()->Length(); i < n; ++i) {
-      size =
-          IntersectAux(lhs, rhs->AsUnion()->Get(i), result, size, lims, zone);
-    }
-    return size;
-  }
-
-  if (!BitsetType::SemanticIsInhabited(lhs->BitsetLub() & rhs->BitsetLub())) {
-    return size;
-  }
-
-  if (lhs->IsRange()) {
-    if (rhs->IsBitset()) {
-      RangeType::Limits lim = IntersectRangeAndBitset(lhs, rhs, zone);
-
-      if (!lim.IsEmpty()) {
-        *lims = RangeType::Limits::Union(lim, *lims);
-      }
-      return size;
-    }
-    if (rhs->IsClass()) {
-      *lims =
-          RangeType::Limits::Union(RangeType::Limits(lhs->AsRange()), *lims);
-    }
-    if (rhs->IsConstant() && Contains(lhs->AsRange(), rhs->AsConstant())) {
-      return AddToUnion(rhs, result, size, zone);
-    }
-    if (rhs->IsRange()) {
-      RangeType::Limits lim = RangeType::Limits::Intersect(
-          RangeType::Limits(lhs->AsRange()), RangeType::Limits(rhs->AsRange()));
-      if (!lim.IsEmpty()) {
-        *lims = RangeType::Limits::Union(lim, *lims);
-      }
-    }
-    return size;
-  }
-  if (rhs->IsRange()) {
-    // This case is handled symmetrically above.
-    return IntersectAux(rhs, lhs, result, size, lims, zone);
-  }
-  if (lhs->IsBitset() || rhs->IsBitset()) {
-    return AddToUnion(lhs->IsBitset() ? rhs : lhs, result, size, zone);
-  }
-  if (lhs->IsClass() != rhs->IsClass()) {
-    return AddToUnion(lhs->IsClass() ? rhs : lhs, result, size, zone);
-  }
-  if (lhs->SimplyEquals(rhs)) {
-    return AddToUnion(lhs, result, size, zone);
-  }
-  return size;
-}
-
-
-// Make sure that we produce a well-formed range and bitset:
-// If the range is non-empty, the number bits in the bitset should be
-// clear. Moreover, if we have a canonical range (such as Signed32),
-// we want to produce a bitset rather than a range.
-Type* Type::NormalizeRangeAndBitset(Type* range, bitset* bits, Zone* zone) {
-  // Fast path: If the bitset does not mention numbers, we can just keep the
-  // range.
-  bitset number_bits = BitsetType::NumberBits(*bits);
-  if (number_bits == 0) {
-    return range;
-  }
-
-  // If the range is semantically contained within the bitset, return None and
-  // leave the bitset untouched.
-  bitset range_lub = SEMANTIC(range->BitsetLub());
-  if (BitsetType::Is(range_lub, *bits)) {
-    return None();
-  }
-
-  // Slow path: reconcile the bitset range and the range.
-  double bitset_min = BitsetType::Min(number_bits);
-  double bitset_max = BitsetType::Max(number_bits);
-
-  double range_min = range->Min();
-  double range_max = range->Max();
-
-  // Remove the number bits from the bitset, they would just confuse us now.
-  // NOTE: bits contains OtherNumber iff bits contains PlainNumber, in which
-  // case we already returned after the subtype check above.
-  *bits &= ~number_bits;
-
-  if (range_min <= bitset_min && range_max >= bitset_max) {
-    // Bitset is contained within the range, just return the range.
-    return range;
-  }
-
-  if (bitset_min < range_min) {
-    range_min = bitset_min;
-  }
-  if (bitset_max > range_max) {
-    range_max = bitset_max;
-  }
-  return RangeType::New(range_min, range_max, BitsetType::kNone, zone);
-}
-
-Type* Type::Union(Type* type1, Type* type2, Zone* zone) {
-  // Fast case: bit sets.
-  if (type1->IsBitset() && type2->IsBitset()) {
-    return BitsetType::New(type1->AsBitset() | type2->AsBitset());
-  }
-
-  // Fast case: top or bottom types.
-  if (type1->IsAny() || type2->IsNone()) return type1;
-  if (type2->IsAny() || type1->IsNone()) return type2;
-
-  // Semi-fast case.
-  if (type1->Is(type2)) return type2;
-  if (type2->Is(type1)) return type1;
-
-  // Figure out the representation of the result.
-  // The rest of the method should not change this representation and
-  // it should not make any decisions based on representations (i.e.,
-  // it should only use the semantic part of types).
-  const bitset representation =
-      type1->Representation() | type2->Representation();
-
-  // Slow case: create union.
-  int size1 = type1->IsUnion() ? type1->AsUnion()->Length() : 1;
-  int size2 = type2->IsUnion() ? type2->AsUnion()->Length() : 1;
-  if (!AddIsSafe(size1, size2)) return Any();
-  int size = size1 + size2;
-  if (!AddIsSafe(size, 2)) return Any();
-  size += 2;
-  Type* result_type = UnionType::New(size, zone);
-  UnionType* result = result_type->AsUnion();
-  size = 0;
-
-  // Compute the new bitset.
-  bitset new_bitset = SEMANTIC(type1->BitsetGlb() | type2->BitsetGlb());
-
-  // Deal with ranges.
-  Type* range = None();
-  Type* range1 = type1->GetRange();
-  Type* range2 = type2->GetRange();
-  if (range1 != NULL && range2 != NULL) {
-    RangeType::Limits lims =
-        RangeType::Limits::Union(RangeType::Limits(range1->AsRange()),
-                                 RangeType::Limits(range2->AsRange()));
-    Type* union_range = RangeType::New(lims, representation, zone);
-    range = NormalizeRangeAndBitset(union_range, &new_bitset, zone);
-  } else if (range1 != NULL) {
-    range = NormalizeRangeAndBitset(range1, &new_bitset, zone);
-  } else if (range2 != NULL) {
-    range = NormalizeRangeAndBitset(range2, &new_bitset, zone);
-  }
-  new_bitset = SEMANTIC(new_bitset) | representation;
-  Type* bits = BitsetType::New(new_bitset);
-  result->Set(size++, bits);
-  if (!range->IsNone()) result->Set(size++, range);
-
-  size = AddToUnion(type1, result, size, zone);
-  size = AddToUnion(type2, result, size, zone);
-  return NormalizeUnion(result_type, size, zone);
-}
-
-
-// Add [type] to [result] unless [type] is bitset, range, or already subsumed.
-// Return new size of [result].
-int Type::AddToUnion(Type* type, UnionType* result, int size, Zone* zone) {
-  if (type->IsBitset() || type->IsRange()) return size;
-  if (type->IsUnion()) {
-    for (int i = 0, n = type->AsUnion()->Length(); i < n; ++i) {
-      size = AddToUnion(type->AsUnion()->Get(i), result, size, zone);
-    }
-    return size;
-  }
-  for (int i = 0; i < size; ++i) {
-    if (type->SemanticIs(result->Get(i))) return size;
-  }
-  result->Set(size++, type);
-  return size;
-}
-
-Type* Type::NormalizeUnion(Type* union_type, int size, Zone* zone) {
-  UnionType* unioned = union_type->AsUnion();
-  DCHECK(size >= 1);
-  DCHECK(unioned->Get(0)->IsBitset());
-  // If the union has just one element, return it.
-  if (size == 1) {
-    return unioned->Get(0);
-  }
-  bitset bits = unioned->Get(0)->AsBitset();
-  // If the union only consists of a range, we can get rid of the union.
-  if (size == 2 && SEMANTIC(bits) == BitsetType::kNone) {
-    bitset representation = REPRESENTATION(bits);
-    if (representation == unioned->Get(1)->Representation()) {
-      return unioned->Get(1);
-    }
-    if (unioned->Get(1)->IsRange()) {
-      return RangeType::New(unioned->Get(1)->AsRange()->Min(),
-                            unioned->Get(1)->AsRange()->Max(),
-                            unioned->Get(0)->AsBitset(), zone);
-    }
-  }
-  unioned->Shrink(size);
-  SLOW_DCHECK(unioned->Wellformed());
-  return union_type;
-}
-
-
-// -----------------------------------------------------------------------------
-// Component extraction
-
-// static
-Type* Type::Representation(Type* t, Zone* zone) {
-  return BitsetType::New(t->Representation());
-}
-
-
-// static
-Type* Type::Semantic(Type* t, Zone* zone) {
-  return Intersect(t, BitsetType::New(BitsetType::kSemantic), zone);
-}
-
-
-// -----------------------------------------------------------------------------
-// Iteration.
-
-int Type::NumClasses() {
-  DisallowHeapAllocation no_allocation;
-  if (this->IsClass()) {
-    return 1;
-  } else if (this->IsUnion()) {
-    int result = 0;
-    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
-      if (this->AsUnion()->Get(i)->IsClass()) ++result;
-    }
-    return result;
-  } else {
-    return 0;
-  }
-}
-
-int Type::NumConstants() {
-  DisallowHeapAllocation no_allocation;
-  if (this->IsConstant()) {
-    return 1;
-  } else if (this->IsUnion()) {
-    int result = 0;
-    for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
-      if (this->AsUnion()->Get(i)->IsConstant()) ++result;
-    }
-    return result;
-  } else {
-    return 0;
-  }
-}
-
-template <class T>
-Type* Type::Iterator<T>::get_type() {
-  DCHECK(!Done());
-  return type_->IsUnion() ? type_->AsUnion()->Get(index_) : type_;
-}
-
-
-// C++ cannot specialise nested templates, so we have to go through this
-// contortion with an auxiliary template to simulate it.
-template <class T>
-struct TypeImplIteratorAux {
-  static bool matches(Type* type);
-  static i::Handle<T> current(Type* type);
-};
-
-template <>
-struct TypeImplIteratorAux<i::Map> {
-  static bool matches(Type* type) { return type->IsClass(); }
-  static i::Handle<i::Map> current(Type* type) {
-    return type->AsClass()->Map();
-  }
-};
-
-template <>
-struct TypeImplIteratorAux<i::Object> {
-  static bool matches(Type* type) { return type->IsConstant(); }
-  static i::Handle<i::Object> current(Type* type) {
-    return type->AsConstant()->Value();
-  }
-};
-
-template <class T>
-bool Type::Iterator<T>::matches(Type* type) {
-  return TypeImplIteratorAux<T>::matches(type);
-}
-
-template <class T>
-i::Handle<T> Type::Iterator<T>::Current() {
-  return TypeImplIteratorAux<T>::current(get_type());
-}
-
-template <class T>
-void Type::Iterator<T>::Advance() {
-  DisallowHeapAllocation no_allocation;
-  ++index_;
-  if (type_->IsUnion()) {
-    for (int n = type_->AsUnion()->Length(); index_ < n; ++index_) {
-      if (matches(type_->AsUnion()->Get(index_))) return;
-    }
-  } else if (index_ == 0 && matches(type_)) {
-    return;
-  }
-  index_ = -1;
-}
-
-
-// -----------------------------------------------------------------------------
-// Printing.
-
-const char* BitsetType::Name(bitset bits) {
-  switch (bits) {
-    case REPRESENTATION(kAny): return "Any";
-    #define RETURN_NAMED_REPRESENTATION_TYPE(type, value) \
-    case REPRESENTATION(k##type): return #type;
-    REPRESENTATION_BITSET_TYPE_LIST(RETURN_NAMED_REPRESENTATION_TYPE)
-    #undef RETURN_NAMED_REPRESENTATION_TYPE
-
-    #define RETURN_NAMED_SEMANTIC_TYPE(type, value) \
-    case SEMANTIC(k##type): return #type;
-    SEMANTIC_BITSET_TYPE_LIST(RETURN_NAMED_SEMANTIC_TYPE)
-    INTERNAL_BITSET_TYPE_LIST(RETURN_NAMED_SEMANTIC_TYPE)
-    #undef RETURN_NAMED_SEMANTIC_TYPE
-
-    default:
-      return NULL;
-  }
-}
-
-void BitsetType::Print(std::ostream& os,  // NOLINT
-                       bitset bits) {
-  DisallowHeapAllocation no_allocation;
-  const char* name = Name(bits);
-  if (name != NULL) {
-    os << name;
-    return;
-  }
-
-  // clang-format off
-  static const bitset named_bitsets[] = {
-#define BITSET_CONSTANT(type, value) REPRESENTATION(k##type),
-    REPRESENTATION_BITSET_TYPE_LIST(BITSET_CONSTANT)
-#undef BITSET_CONSTANT
-
-#define BITSET_CONSTANT(type, value) SEMANTIC(k##type),
-    INTERNAL_BITSET_TYPE_LIST(BITSET_CONSTANT)
-    SEMANTIC_BITSET_TYPE_LIST(BITSET_CONSTANT)
-#undef BITSET_CONSTANT
-  };
-  // clang-format on
-
-  bool is_first = true;
-  os << "(";
-  for (int i(arraysize(named_bitsets) - 1); bits != 0 && i >= 0; --i) {
-    bitset subset = named_bitsets[i];
-    if ((bits & subset) == subset) {
-      if (!is_first) os << " | ";
-      is_first = false;
-      os << Name(subset);
-      bits -= subset;
-    }
-  }
-  DCHECK(bits == 0);
-  os << ")";
-}
-
-void Type::PrintTo(std::ostream& os, PrintDimension dim) {
-  DisallowHeapAllocation no_allocation;
-  if (dim != REPRESENTATION_DIM) {
-    if (this->IsBitset()) {
-      BitsetType::Print(os, SEMANTIC(this->AsBitset()));
-    } else if (this->IsClass()) {
-      os << "Class(" << static_cast<void*>(*this->AsClass()->Map()) << " < ";
-      BitsetType::New(BitsetType::Lub(this))->PrintTo(os, dim);
-      os << ")";
-    } else if (this->IsConstant()) {
-      os << "Constant(" << Brief(*this->AsConstant()->Value()) << ")";
-    } else if (this->IsRange()) {
-      std::ostream::fmtflags saved_flags = os.setf(std::ios::fixed);
-      std::streamsize saved_precision = os.precision(0);
-      os << "Range(" << this->AsRange()->Min() << ", " << this->AsRange()->Max()
-         << ")";
-      os.flags(saved_flags);
-      os.precision(saved_precision);
-    } else if (this->IsContext()) {
-      os << "Context(";
-      this->AsContext()->Outer()->PrintTo(os, dim);
-      os << ")";
-    } else if (this->IsUnion()) {
-      os << "(";
-      for (int i = 0, n = this->AsUnion()->Length(); i < n; ++i) {
-        Type* type_i = this->AsUnion()->Get(i);
-        if (i > 0) os << " | ";
-        type_i->PrintTo(os, dim);
-      }
-      os << ")";
-    } else if (this->IsArray()) {
-      os << "Array(";
-      AsArray()->Element()->PrintTo(os, dim);
-      os << ")";
-    } else if (this->IsFunction()) {
-      if (!this->AsFunction()->Receiver()->IsAny()) {
-        this->AsFunction()->Receiver()->PrintTo(os, dim);
-        os << ".";
-      }
-      os << "(";
-      for (int i = 0; i < this->AsFunction()->Arity(); ++i) {
-        if (i > 0) os << ", ";
-        this->AsFunction()->Parameter(i)->PrintTo(os, dim);
-      }
-      os << ")->";
-      this->AsFunction()->Result()->PrintTo(os, dim);
-    } else if (this->IsTuple()) {
-      os << "<";
-      for (int i = 0, n = this->AsTuple()->Arity(); i < n; ++i) {
-        Type* type_i = this->AsTuple()->Element(i);
-        if (i > 0) os << ", ";
-        type_i->PrintTo(os, dim);
-      }
-      os << ">";
-    } else {
-      UNREACHABLE();
-    }
-  }
-  if (dim == BOTH_DIMS) os << "/";
-  if (dim != SEMANTIC_DIM) {
-    BitsetType::Print(os, REPRESENTATION(this->BitsetLub()));
-  }
-}
-
-
-#ifdef DEBUG
-void Type::Print() {
-  OFStream os(stdout);
-  PrintTo(os);
-  os << std::endl;
-}
-void BitsetType::Print(bitset bits) {
-  OFStream os(stdout);
-  Print(os, bits);
-  os << std::endl;
-}
-#endif
-
-BitsetType::bitset BitsetType::SignedSmall() {
-  return i::SmiValuesAre31Bits() ? kSigned31 : kSigned32;
-}
-
-BitsetType::bitset BitsetType::UnsignedSmall() {
-  return i::SmiValuesAre31Bits() ? kUnsigned30 : kUnsigned31;
-}
-
-#define CONSTRUCT_SIMD_TYPE(NAME, Name, name, lane_count, lane_type) \
-  Type* Type::Name(Isolate* isolate, Zone* zone) {                   \
-    return Class(i::handle(isolate->heap()->name##_map()), zone);    \
-  }
-SIMD128_TYPES(CONSTRUCT_SIMD_TYPE)
-#undef CONSTRUCT_SIMD_TYPE
-
-// -----------------------------------------------------------------------------
-// Instantiations.
-
-template class Type::Iterator<i::Map>;
-template class Type::Iterator<i::Object>;
-
-}  // namespace internal
-}  // namespace v8
diff --git a/src/types.h b/src/types.h
deleted file mode 100644
index 746cca7..0000000
--- a/src/types.h
+++ /dev/null
@@ -1,982 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_TYPES_H_
-#define V8_TYPES_H_
-
-#include "src/conversions.h"
-#include "src/handles.h"
-#include "src/objects.h"
-#include "src/ostreams.h"
-
-namespace v8 {
-namespace internal {
-
-// SUMMARY
-//
-// A simple type system for compiler-internal use. It is based entirely on
-// union types, and all subtyping hence amounts to set inclusion. Besides the
-// obvious primitive types and some predefined unions, the type language also
-// can express class types (a.k.a. specific maps) and singleton types (i.e.,
-// concrete constants).
-//
-// Types consist of two dimensions: semantic (value range) and representation.
-// Both are related through subtyping.
-//
-//
-// SEMANTIC DIMENSION
-//
-// The following equations and inequations hold for the semantic axis:
-//
-//   None <= T
-//   T <= Any
-//
-//   Number = Signed32 \/ Unsigned32 \/ Double
-//   Smi <= Signed32
-//   Name = String \/ Symbol
-//   UniqueName = InternalizedString \/ Symbol
-//   InternalizedString < String
-//
-//   Receiver = Object \/ Proxy
-//   Array < Object
-//   Function < Object
-//   RegExp < Object
-//   OtherUndetectable < Object
-//   DetectableReceiver = Receiver - OtherUndetectable
-//
-//   Class(map) < T   iff instance_type(map) < T
-//   Constant(x) < T  iff instance_type(map(x)) < T
-//   Array(T) < Array
-//   Function(R, S, T0, T1, ...) < Function
-//   Context(T) < Internal
-//
-// Both structural Array and Function types are invariant in all parameters;
-// relaxing this would make Union and Intersect operations more involved.
-// There is no subtyping relation between Array, Function, or Context types
-// and respective Constant types, since these types cannot be reconstructed
-// for arbitrary heap values.
-// Note also that Constant(x) < Class(map(x)) does _not_ hold, since x's map can
-// change! (Its instance type cannot, however.)
-// TODO(rossberg): the latter is not currently true for proxies, because of fix,
-// but will hold once we implement direct proxies.
-// However, we also define a 'temporal' variant of the subtyping relation that
-// considers the _current_ state only, i.e., Constant(x) <_now Class(map(x)).
-//
-//
-// REPRESENTATIONAL DIMENSION
-//
-// For the representation axis, the following holds:
-//
-//   None <= R
-//   R <= Any
-//
-//   UntaggedInt = UntaggedInt1 \/ UntaggedInt8 \/
-//                 UntaggedInt16 \/ UntaggedInt32
-//   UntaggedFloat = UntaggedFloat32 \/ UntaggedFloat64
-//   UntaggedNumber = UntaggedInt \/ UntaggedFloat
-//   Untagged = UntaggedNumber \/ UntaggedPtr
-//   Tagged = TaggedInt \/ TaggedPtr
-//
-// Subtyping relates the two dimensions, for example:
-//
-//   Number <= Tagged \/ UntaggedNumber
-//   Object <= TaggedPtr \/ UntaggedPtr
-//
-// That holds because the semantic type constructors defined by the API create
-// types that allow for all possible representations, and dually, the ones for
-// representation types initially include all semantic ranges. Representations
-// can then e.g. be narrowed for a given semantic type using intersection:
-//
-//   SignedSmall /\ TaggedInt       (a 'smi')
-//   Number /\ TaggedPtr            (a heap number)
-//
-//
-// RANGE TYPES
-//
-// A range type represents a continuous integer interval by its minimum and
-// maximum value.  Either value may be an infinity, in which case that infinity
-// itself is also included in the range.   A range never contains NaN or -0.
-//
-// If a value v happens to be an integer n, then Constant(v) is considered a
-// subtype of Range(n, n) (and therefore also a subtype of any larger range).
-// In order to avoid large unions, however, it is usually a good idea to use
-// Range rather than Constant.
-//
-//
-// PREDICATES
-//
-// There are two main functions for testing types:
-//
-//   T1->Is(T2)     -- tests whether T1 is included in T2 (i.e., T1 <= T2)
-//   T1->Maybe(T2)  -- tests whether T1 and T2 overlap (i.e., T1 /\ T2 =/= 0)
-//
-// Typically, the former is to be used to select representations (e.g., via
-// T->Is(SignedSmall())), and the latter to check whether a specific case needs
-// handling (e.g., via T->Maybe(Number())).
-//
-// There is no functionality to discover whether a type is a leaf in the
-// lattice. That is intentional. It should always be possible to refine the
-// lattice (e.g., splitting up number types further) without invalidating any
-// existing assumptions or tests.
-// Consequently, do not normally use Equals for type tests, always use Is!
-//
-// The NowIs operator implements state-sensitive subtying, as described above.
-// Any compilation decision based on such temporary properties requires runtime
-// guarding!
-//
-//
-// PROPERTIES
-//
-// Various formal properties hold for constructors, operators, and predicates
-// over types. For example, constructors are injective and subtyping is a
-// complete partial order.
-//
-// See test/cctest/test-types.cc for a comprehensive executable specification,
-// especially with respect to the properties of the more exotic 'temporal'
-// constructors and predicates (those prefixed 'Now').
-//
-//
-// IMPLEMENTATION
-//
-// Internally, all 'primitive' types, and their unions, are represented as
-// bitsets. Bit 0 is reserved for tagging. Class is a heap pointer to the
-// respective map. Only structured types require allocation.
-// Note that the bitset representation is closed under both Union and Intersect.
-
-
-// -----------------------------------------------------------------------------
-// Values for bitset types
-
-// clang-format off
-
-#define MASK_BITSET_TYPE_LIST(V) \
-  V(Representation, 0xffc00000u) \
-  V(Semantic,       0x003ffffeu)
-
-#define REPRESENTATION(k) ((k) & BitsetType::kRepresentation)
-#define SEMANTIC(k)       ((k) & BitsetType::kSemantic)
-
-#define REPRESENTATION_BITSET_TYPE_LIST(V)    \
-  V(None,               0)                    \
-  V(UntaggedBit,        1u << 22 | kSemantic) \
-  V(UntaggedIntegral8,  1u << 23 | kSemantic) \
-  V(UntaggedIntegral16, 1u << 24 | kSemantic) \
-  V(UntaggedIntegral32, 1u << 25 | kSemantic) \
-  V(UntaggedFloat32,    1u << 26 | kSemantic) \
-  V(UntaggedFloat64,    1u << 27 | kSemantic) \
-  V(UntaggedSimd128,    1u << 28 | kSemantic) \
-  V(UntaggedPointer,    1u << 29 | kSemantic) \
-  V(TaggedSigned,       1u << 30 | kSemantic) \
-  V(TaggedPointer,      1u << 31 | kSemantic) \
-  \
-  V(UntaggedIntegral,   kUntaggedBit | kUntaggedIntegral8 |        \
-                        kUntaggedIntegral16 | kUntaggedIntegral32) \
-  V(UntaggedFloat,      kUntaggedFloat32 | kUntaggedFloat64)       \
-  V(UntaggedNumber,     kUntaggedIntegral | kUntaggedFloat)        \
-  V(Untagged,           kUntaggedNumber | kUntaggedPointer)        \
-  V(Tagged,             kTaggedSigned | kTaggedPointer)
-
-#define INTERNAL_BITSET_TYPE_LIST(V)                                      \
-  V(OtherUnsigned31, 1u << 1 | REPRESENTATION(kTagged | kUntaggedNumber)) \
-  V(OtherUnsigned32, 1u << 2 | REPRESENTATION(kTagged | kUntaggedNumber)) \
-  V(OtherSigned32,   1u << 3 | REPRESENTATION(kTagged | kUntaggedNumber)) \
-  V(OtherNumber,     1u << 4 | REPRESENTATION(kTagged | kUntaggedNumber))
-
-#define SEMANTIC_BITSET_TYPE_LIST(V) \
-  V(Negative31,          1u << 5  | REPRESENTATION(kTagged | kUntaggedNumber)) \
-  V(Null,                1u << 6  | REPRESENTATION(kTaggedPointer)) \
-  V(Undefined,           1u << 7  | REPRESENTATION(kTaggedPointer)) \
-  V(Boolean,             1u << 8  | REPRESENTATION(kTaggedPointer)) \
-  V(Unsigned30,          1u << 9  | REPRESENTATION(kTagged | kUntaggedNumber)) \
-  V(MinusZero,           1u << 10 | REPRESENTATION(kTagged | kUntaggedNumber)) \
-  V(NaN,                 1u << 11 | REPRESENTATION(kTagged | kUntaggedNumber)) \
-  V(Symbol,              1u << 12 | REPRESENTATION(kTaggedPointer)) \
-  V(InternalizedString,  1u << 13 | REPRESENTATION(kTaggedPointer)) \
-  V(OtherString,         1u << 14 | REPRESENTATION(kTaggedPointer)) \
-  V(Simd,                1u << 15 | REPRESENTATION(kTaggedPointer)) \
-  V(OtherObject,         1u << 17 | REPRESENTATION(kTaggedPointer)) \
-  V(OtherUndetectable,   1u << 16 | REPRESENTATION(kTaggedPointer)) \
-  V(Proxy,               1u << 18 | REPRESENTATION(kTaggedPointer)) \
-  V(Function,            1u << 19 | REPRESENTATION(kTaggedPointer)) \
-  V(Hole,                1u << 20 | REPRESENTATION(kTaggedPointer)) \
-  V(OtherInternal,       1u << 21 | REPRESENTATION(kTagged | kUntagged)) \
-  \
-  V(Signed31,                   kUnsigned30 | kNegative31) \
-  V(Signed32,                   kSigned31 | kOtherUnsigned31 | kOtherSigned32) \
-  V(Signed32OrMinusZero,        kSigned32 | kMinusZero) \
-  V(Signed32OrMinusZeroOrNaN,   kSigned32 | kMinusZero | kNaN) \
-  V(Negative32,                 kNegative31 | kOtherSigned32) \
-  V(Unsigned31,                 kUnsigned30 | kOtherUnsigned31) \
-  V(Unsigned32,                 kUnsigned30 | kOtherUnsigned31 | \
-                                kOtherUnsigned32) \
-  V(Unsigned32OrMinusZero,      kUnsigned32 | kMinusZero) \
-  V(Unsigned32OrMinusZeroOrNaN, kUnsigned32 | kMinusZero | kNaN) \
-  V(Integral32,                 kSigned32 | kUnsigned32) \
-  V(PlainNumber,                kIntegral32 | kOtherNumber) \
-  V(OrderedNumber,              kPlainNumber | kMinusZero) \
-  V(MinusZeroOrNaN,             kMinusZero | kNaN) \
-  V(Number,                     kOrderedNumber | kNaN) \
-  V(String,                     kInternalizedString | kOtherString) \
-  V(UniqueName,                 kSymbol | kInternalizedString) \
-  V(Name,                       kSymbol | kString) \
-  V(BooleanOrNumber,            kBoolean | kNumber) \
-  V(BooleanOrNullOrNumber,      kBooleanOrNumber | kNull) \
-  V(BooleanOrNullOrUndefined,   kBoolean | kNull | kUndefined) \
-  V(NullOrNumber,               kNull | kNumber) \
-  V(NullOrUndefined,            kNull | kUndefined) \
-  V(Undetectable,               kNullOrUndefined | kOtherUndetectable) \
-  V(NumberOrOddball,            kNumber | kNullOrUndefined | kBoolean | kHole) \
-  V(NumberOrSimdOrString,       kNumber | kSimd | kString) \
-  V(NumberOrString,             kNumber | kString) \
-  V(NumberOrUndefined,          kNumber | kUndefined) \
-  V(PlainPrimitive,             kNumberOrString | kBoolean | kNullOrUndefined) \
-  V(Primitive,                  kSymbol | kSimd | kPlainPrimitive) \
-  V(DetectableReceiver,         kFunction | kOtherObject | kProxy) \
-  V(Object,                     kFunction | kOtherObject | kOtherUndetectable) \
-  V(Receiver,                   kObject | kProxy) \
-  V(StringOrReceiver,           kString | kReceiver) \
-  V(Unique,                     kBoolean | kUniqueName | kNull | kUndefined | \
-                                kReceiver) \
-  V(Internal,                   kHole | kOtherInternal) \
-  V(NonInternal,                kPrimitive | kReceiver) \
-  V(NonNumber,                  kUnique | kString | kInternal) \
-  V(Any,                        0xfffffffeu)
-
-// clang-format on
-
-/*
- * The following diagrams show how integers (in the mathematical sense) are
- * divided among the different atomic numerical types.
- *
- *   ON    OS32     N31     U30     OU31    OU32     ON
- * ______[_______[_______[_______[_______[_______[_______
- *     -2^31   -2^30     0      2^30    2^31    2^32
- *
- * E.g., OtherUnsigned32 (OU32) covers all integers from 2^31 to 2^32-1.
- *
- * Some of the atomic numerical bitsets are internal only (see
- * INTERNAL_BITSET_TYPE_LIST).  To a types user, they should only occur in
- * union with certain other bitsets.  For instance, OtherNumber should only
- * occur as part of PlainNumber.
- */
-
-#define PROPER_BITSET_TYPE_LIST(V) \
-  REPRESENTATION_BITSET_TYPE_LIST(V) \
-  SEMANTIC_BITSET_TYPE_LIST(V)
-
-#define BITSET_TYPE_LIST(V)          \
-  MASK_BITSET_TYPE_LIST(V)           \
-  REPRESENTATION_BITSET_TYPE_LIST(V) \
-  INTERNAL_BITSET_TYPE_LIST(V)       \
-  SEMANTIC_BITSET_TYPE_LIST(V)
-
-class Type;
-
-// -----------------------------------------------------------------------------
-// Bitset types (internal).
-
-class BitsetType {
- public:
-  typedef uint32_t bitset;  // Internal
-
-  enum : uint32_t {
-#define DECLARE_TYPE(type, value) k##type = (value),
-    BITSET_TYPE_LIST(DECLARE_TYPE)
-#undef DECLARE_TYPE
-        kUnusedEOL = 0
-  };
-
-  static bitset SignedSmall();
-  static bitset UnsignedSmall();
-
-  bitset Bitset() {
-    return static_cast<bitset>(reinterpret_cast<uintptr_t>(this) ^ 1u);
-  }
-
-  static bool IsInhabited(bitset bits) {
-    return SEMANTIC(bits) != kNone && REPRESENTATION(bits) != kNone;
-  }
-
-  static bool SemanticIsInhabited(bitset bits) {
-    return SEMANTIC(bits) != kNone;
-  }
-
-  static bool Is(bitset bits1, bitset bits2) {
-    return (bits1 | bits2) == bits2;
-  }
-
-  static double Min(bitset);
-  static double Max(bitset);
-
-  static bitset Glb(Type* type);  // greatest lower bound that's a bitset
-  static bitset Glb(double min, double max);
-  static bitset Lub(Type* type);  // least upper bound that's a bitset
-  static bitset Lub(i::Map* map);
-  static bitset Lub(i::Object* value);
-  static bitset Lub(double value);
-  static bitset Lub(double min, double max);
-  static bitset ExpandInternals(bitset bits);
-
-  static const char* Name(bitset);
-  static void Print(std::ostream& os, bitset);  // NOLINT
-#ifdef DEBUG
-  static void Print(bitset);
-#endif
-
-  static bitset NumberBits(bitset bits);
-
-  static bool IsBitset(Type* type) {
-    return reinterpret_cast<uintptr_t>(type) & 1;
-  }
-
-  static Type* NewForTesting(bitset bits) { return New(bits); }
-
- private:
-  friend class Type;
-
-  static Type* New(bitset bits) {
-    return reinterpret_cast<Type*>(static_cast<uintptr_t>(bits | 1u));
-  }
-
-  struct Boundary {
-    bitset internal;
-    bitset external;
-    double min;
-  };
-  static const Boundary BoundariesArray[];
-  static inline const Boundary* Boundaries();
-  static inline size_t BoundariesSize();
-};
-
-// -----------------------------------------------------------------------------
-// Superclass for non-bitset types (internal).
-class TypeBase {
- protected:
-  friend class Type;
-
-  enum Kind {
-    kClass,
-    kConstant,
-    kContext,
-    kArray,
-    kFunction,
-    kTuple,
-    kUnion,
-    kRange
-  };
-
-  Kind kind() const { return kind_; }
-  explicit TypeBase(Kind kind) : kind_(kind) {}
-
-  static bool IsKind(Type* type, Kind kind) {
-    if (BitsetType::IsBitset(type)) return false;
-    TypeBase* base = reinterpret_cast<TypeBase*>(type);
-    return base->kind() == kind;
-  }
-
-  // The hacky conversion to/from Type*.
-  static Type* AsType(TypeBase* type) { return reinterpret_cast<Type*>(type); }
-  static TypeBase* FromType(Type* type) {
-    return reinterpret_cast<TypeBase*>(type);
-  }
-
- private:
-  Kind kind_;
-};
-
-// -----------------------------------------------------------------------------
-// Class types.
-
-class ClassType : public TypeBase {
- public:
-  i::Handle<i::Map> Map() { return map_; }
-
- private:
-  friend class Type;
-  friend class BitsetType;
-
-  static Type* New(i::Handle<i::Map> map, Zone* zone) {
-    return AsType(new (zone->New(sizeof(ClassType)))
-                      ClassType(BitsetType::Lub(*map), map));
-  }
-
-  static ClassType* cast(Type* type) {
-    DCHECK(IsKind(type, kClass));
-    return static_cast<ClassType*>(FromType(type));
-  }
-
-  ClassType(BitsetType::bitset bitset, i::Handle<i::Map> map)
-      : TypeBase(kClass), bitset_(bitset), map_(map) {}
-
-  BitsetType::bitset Lub() { return bitset_; }
-
-  BitsetType::bitset bitset_;
-  Handle<i::Map> map_;
-};
-
-// -----------------------------------------------------------------------------
-// Constant types.
-
-class ConstantType : public TypeBase {
- public:
-  i::Handle<i::Object> Value() { return object_; }
-
- private:
-  friend class Type;
-  friend class BitsetType;
-
-  static Type* New(i::Handle<i::Object> value, Zone* zone) {
-    BitsetType::bitset bitset = BitsetType::Lub(*value);
-    return AsType(new (zone->New(sizeof(ConstantType)))
-                      ConstantType(bitset, value));
-  }
-
-  static ConstantType* cast(Type* type) {
-    DCHECK(IsKind(type, kConstant));
-    return static_cast<ConstantType*>(FromType(type));
-  }
-
-  ConstantType(BitsetType::bitset bitset, i::Handle<i::Object> object)
-      : TypeBase(kConstant), bitset_(bitset), object_(object) {}
-
-  BitsetType::bitset Lub() { return bitset_; }
-
-  BitsetType::bitset bitset_;
-  Handle<i::Object> object_;
-};
-// TODO(neis): Also cache value if numerical.
-// TODO(neis): Allow restricting the representation.
-
-// -----------------------------------------------------------------------------
-// Range types.
-
-class RangeType : public TypeBase {
- public:
-  struct Limits {
-    double min;
-    double max;
-    Limits(double min, double max) : min(min), max(max) {}
-    explicit Limits(RangeType* range) : min(range->Min()), max(range->Max()) {}
-    bool IsEmpty();
-    static Limits Empty() { return Limits(1, 0); }
-    static Limits Intersect(Limits lhs, Limits rhs);
-    static Limits Union(Limits lhs, Limits rhs);
-  };
-
-  double Min() { return limits_.min; }
-  double Max() { return limits_.max; }
-
- private:
-  friend class Type;
-  friend class BitsetType;
-  friend class UnionType;
-
-  static Type* New(double min, double max, BitsetType::bitset representation,
-                   Zone* zone) {
-    return New(Limits(min, max), representation, zone);
-  }
-
-  static bool IsInteger(double x) {
-    return nearbyint(x) == x && !i::IsMinusZero(x);  // Allows for infinities.
-  }
-
-  static Type* New(Limits lim, BitsetType::bitset representation, Zone* zone) {
-    DCHECK(IsInteger(lim.min) && IsInteger(lim.max));
-    DCHECK(lim.min <= lim.max);
-    DCHECK(REPRESENTATION(representation) == representation);
-    BitsetType::bitset bits =
-        SEMANTIC(BitsetType::Lub(lim.min, lim.max)) | representation;
-
-    return AsType(new (zone->New(sizeof(RangeType))) RangeType(bits, lim));
-  }
-
-  static RangeType* cast(Type* type) {
-    DCHECK(IsKind(type, kRange));
-    return static_cast<RangeType*>(FromType(type));
-  }
-
-  RangeType(BitsetType::bitset bitset, Limits limits)
-      : TypeBase(kRange), bitset_(bitset), limits_(limits) {}
-
-  BitsetType::bitset Lub() { return bitset_; }
-
-  BitsetType::bitset bitset_;
-  Limits limits_;
-};
-
-// -----------------------------------------------------------------------------
-// Context types.
-
-class ContextType : public TypeBase {
- public:
-  Type* Outer() { return outer_; }
-
- private:
-  friend class Type;
-
-  static Type* New(Type* outer, Zone* zone) {
-    return AsType(new (zone->New(sizeof(ContextType))) ContextType(outer));
-  }
-
-  static ContextType* cast(Type* type) {
-    DCHECK(IsKind(type, kContext));
-    return static_cast<ContextType*>(FromType(type));
-  }
-
-  explicit ContextType(Type* outer) : TypeBase(kContext), outer_(outer) {}
-
-  Type* outer_;
-};
-
-// -----------------------------------------------------------------------------
-// Array types.
-
-class ArrayType : public TypeBase {
- public:
-  Type* Element() { return element_; }
-
- private:
-  friend class Type;
-
-  explicit ArrayType(Type* element) : TypeBase(kArray), element_(element) {}
-
-  static Type* New(Type* element, Zone* zone) {
-    return AsType(new (zone->New(sizeof(ArrayType))) ArrayType(element));
-  }
-
-  static ArrayType* cast(Type* type) {
-    DCHECK(IsKind(type, kArray));
-    return static_cast<ArrayType*>(FromType(type));
-  }
-
-  Type* element_;
-};
-
-// -----------------------------------------------------------------------------
-// Superclass for types with variable number of type fields.
-class StructuralType : public TypeBase {
- public:
-  int LengthForTesting() { return Length(); }
-
- protected:
-  friend class Type;
-
-  int Length() { return length_; }
-
-  Type* Get(int i) {
-    DCHECK(0 <= i && i < this->Length());
-    return elements_[i];
-  }
-
-  void Set(int i, Type* type) {
-    DCHECK(0 <= i && i < this->Length());
-    elements_[i] = type;
-  }
-
-  void Shrink(int length) {
-    DCHECK(2 <= length && length <= this->Length());
-    length_ = length;
-  }
-
-  StructuralType(Kind kind, int length, i::Zone* zone)
-      : TypeBase(kind), length_(length) {
-    elements_ = reinterpret_cast<Type**>(zone->New(sizeof(Type*) * length));
-  }
-
- private:
-  int length_;
-  Type** elements_;
-};
-
-// -----------------------------------------------------------------------------
-// Function types.
-
-class FunctionType : public StructuralType {
- public:
-  int Arity() { return this->Length() - 2; }
-  Type* Result() { return this->Get(0); }
-  Type* Receiver() { return this->Get(1); }
-  Type* Parameter(int i) { return this->Get(2 + i); }
-
-  void InitParameter(int i, Type* type) { this->Set(2 + i, type); }
-
- private:
-  friend class Type;
-
-  FunctionType(Type* result, Type* receiver, int arity, Zone* zone)
-      : StructuralType(kFunction, 2 + arity, zone) {
-    Set(0, result);
-    Set(1, receiver);
-  }
-
-  static Type* New(Type* result, Type* receiver, int arity, Zone* zone) {
-    return AsType(new (zone->New(sizeof(FunctionType)))
-                      FunctionType(result, receiver, arity, zone));
-  }
-
-  static FunctionType* cast(Type* type) {
-    DCHECK(IsKind(type, kFunction));
-    return static_cast<FunctionType*>(FromType(type));
-  }
-};
-
-// -----------------------------------------------------------------------------
-// Tuple types.
-
-class TupleType : public StructuralType {
- public:
-  int Arity() { return this->Length(); }
-  Type* Element(int i) { return this->Get(i); }
-
-  void InitElement(int i, Type* type) { this->Set(i, type); }
-
- private:
-  friend class Type;
-
-  TupleType(int length, Zone* zone) : StructuralType(kTuple, length, zone) {}
-
-  static Type* New(int length, Zone* zone) {
-    return AsType(new (zone->New(sizeof(TupleType))) TupleType(length, zone));
-  }
-
-  static TupleType* cast(Type* type) {
-    DCHECK(IsKind(type, kTuple));
-    return static_cast<TupleType*>(FromType(type));
-  }
-};
-
-// -----------------------------------------------------------------------------
-// Union types (internal).
-// A union is a structured type with the following invariants:
-// - its length is at least 2
-// - at most one field is a bitset, and it must go into index 0
-// - no field is a union
-// - no field is a subtype of any other field
-class UnionType : public StructuralType {
- private:
-  friend Type;
-  friend BitsetType;
-
-  UnionType(int length, Zone* zone) : StructuralType(kUnion, length, zone) {}
-
-  static Type* New(int length, Zone* zone) {
-    return AsType(new (zone->New(sizeof(UnionType))) UnionType(length, zone));
-  }
-
-  static UnionType* cast(Type* type) {
-    DCHECK(IsKind(type, kUnion));
-    return static_cast<UnionType*>(FromType(type));
-  }
-
-  bool Wellformed();
-};
-
-class Type {
- public:
-  typedef BitsetType::bitset bitset;  // Internal
-
-// Constructors.
-#define DEFINE_TYPE_CONSTRUCTOR(type, value) \
-  static Type* type() { return BitsetType::New(BitsetType::k##type); }
-  PROPER_BITSET_TYPE_LIST(DEFINE_TYPE_CONSTRUCTOR)
-#undef DEFINE_TYPE_CONSTRUCTOR
-
-  static Type* SignedSmall() {
-    return BitsetType::New(BitsetType::SignedSmall());
-  }
-  static Type* UnsignedSmall() {
-    return BitsetType::New(BitsetType::UnsignedSmall());
-  }
-
-  static Type* Class(i::Handle<i::Map> map, Zone* zone) {
-    return ClassType::New(map, zone);
-  }
-  static Type* Constant(i::Handle<i::Object> value, Zone* zone) {
-    return ConstantType::New(value, zone);
-  }
-  static Type* Range(double min, double max, Zone* zone) {
-    return RangeType::New(min, max, REPRESENTATION(BitsetType::kTagged |
-                                                   BitsetType::kUntaggedNumber),
-                          zone);
-  }
-  static Type* Context(Type* outer, Zone* zone) {
-    return ContextType::New(outer, zone);
-  }
-  static Type* Array(Type* element, Zone* zone) {
-    return ArrayType::New(element, zone);
-  }
-  static Type* Function(Type* result, Type* receiver, int arity, Zone* zone) {
-    return FunctionType::New(result, receiver, arity, zone);
-  }
-  static Type* Function(Type* result, Zone* zone) {
-    return Function(result, Any(), 0, zone);
-  }
-  static Type* Function(Type* result, Type* param0, Zone* zone) {
-    Type* function = Function(result, Any(), 1, zone);
-    function->AsFunction()->InitParameter(0, param0);
-    return function;
-  }
-  static Type* Function(Type* result, Type* param0, Type* param1, Zone* zone) {
-    Type* function = Function(result, Any(), 2, zone);
-    function->AsFunction()->InitParameter(0, param0);
-    function->AsFunction()->InitParameter(1, param1);
-    return function;
-  }
-  static Type* Function(Type* result, Type* param0, Type* param1, Type* param2,
-                        Zone* zone) {
-    Type* function = Function(result, Any(), 3, zone);
-    function->AsFunction()->InitParameter(0, param0);
-    function->AsFunction()->InitParameter(1, param1);
-    function->AsFunction()->InitParameter(2, param2);
-    return function;
-  }
-  static Type* Function(Type* result, int arity, Type** params, Zone* zone) {
-    Type* function = Function(result, Any(), arity, zone);
-    for (int i = 0; i < arity; ++i) {
-      function->AsFunction()->InitParameter(i, params[i]);
-    }
-    return function;
-  }
-  static Type* Tuple(Type* first, Type* second, Type* third, Zone* zone) {
-    Type* tuple = TupleType::New(3, zone);
-    tuple->AsTuple()->InitElement(0, first);
-    tuple->AsTuple()->InitElement(1, second);
-    tuple->AsTuple()->InitElement(2, third);
-    return tuple;
-  }
-
-#define CONSTRUCT_SIMD_TYPE(NAME, Name, name, lane_count, lane_type) \
-  static Type* Name(Isolate* isolate, Zone* zone);
-  SIMD128_TYPES(CONSTRUCT_SIMD_TYPE)
-#undef CONSTRUCT_SIMD_TYPE
-
-  static Type* Union(Type* type1, Type* type2, Zone* zone);
-  static Type* Intersect(Type* type1, Type* type2, Zone* zone);
-
-  static Type* Of(double value, Zone* zone) {
-    return BitsetType::New(BitsetType::ExpandInternals(BitsetType::Lub(value)));
-  }
-  static Type* Of(i::Object* value, Zone* zone) {
-    return BitsetType::New(BitsetType::ExpandInternals(BitsetType::Lub(value)));
-  }
-  static Type* Of(i::Handle<i::Object> value, Zone* zone) {
-    return Of(*value, zone);
-  }
-
-  // Extraction of components.
-  static Type* Representation(Type* t, Zone* zone);
-  static Type* Semantic(Type* t, Zone* zone);
-
-  // Predicates.
-  bool IsInhabited() { return BitsetType::IsInhabited(this->BitsetLub()); }
-
-  bool Is(Type* that) { return this == that || this->SlowIs(that); }
-  bool Maybe(Type* that);
-  bool Equals(Type* that) { return this->Is(that) && that->Is(this); }
-
-  // Equivalent to Constant(val)->Is(this), but avoiding allocation.
-  bool Contains(i::Object* val);
-  bool Contains(i::Handle<i::Object> val) { return this->Contains(*val); }
-
-  // State-dependent versions of the above that consider subtyping between
-  // a constant and its map class.
-  static Type* NowOf(i::Object* value, Zone* zone);
-  static Type* NowOf(i::Handle<i::Object> value, Zone* zone) {
-    return NowOf(*value, zone);
-  }
-  bool NowIs(Type* that);
-  bool NowContains(i::Object* val);
-  bool NowContains(i::Handle<i::Object> val) { return this->NowContains(*val); }
-
-  bool NowStable();
-
-  // Inspection.
-  bool IsRange() { return IsKind(TypeBase::kRange); }
-  bool IsClass() { return IsKind(TypeBase::kClass); }
-  bool IsConstant() { return IsKind(TypeBase::kConstant); }
-  bool IsContext() { return IsKind(TypeBase::kContext); }
-  bool IsArray() { return IsKind(TypeBase::kArray); }
-  bool IsFunction() { return IsKind(TypeBase::kFunction); }
-  bool IsTuple() { return IsKind(TypeBase::kTuple); }
-
-  ClassType* AsClass() { return ClassType::cast(this); }
-  ConstantType* AsConstant() { return ConstantType::cast(this); }
-  RangeType* AsRange() { return RangeType::cast(this); }
-  ContextType* AsContext() { return ContextType::cast(this); }
-  ArrayType* AsArray() { return ArrayType::cast(this); }
-  FunctionType* AsFunction() { return FunctionType::cast(this); }
-  TupleType* AsTuple() { return TupleType::cast(this); }
-
-  // Minimum and maximum of a numeric type.
-  // These functions do not distinguish between -0 and +0.  If the type equals
-  // kNaN, they return NaN; otherwise kNaN is ignored.  Only call these
-  // functions on subtypes of Number.
-  double Min();
-  double Max();
-
-  // Extracts a range from the type: if the type is a range or a union
-  // containing a range, that range is returned; otherwise, NULL is returned.
-  Type* GetRange();
-
-  static bool IsInteger(i::Object* x);
-  static bool IsInteger(double x) {
-    return nearbyint(x) == x && !i::IsMinusZero(x);  // Allows for infinities.
-  }
-
-  int NumClasses();
-  int NumConstants();
-
-  template <class T>
-  class Iterator {
-   public:
-    bool Done() const { return index_ < 0; }
-    i::Handle<T> Current();
-    void Advance();
-
-   private:
-    friend class Type;
-
-    Iterator() : index_(-1) {}
-    explicit Iterator(Type* type) : type_(type), index_(-1) { Advance(); }
-
-    inline bool matches(Type* type);
-    inline Type* get_type();
-
-    Type* type_;
-    int index_;
-  };
-
-  Iterator<i::Map> Classes() {
-    if (this->IsBitset()) return Iterator<i::Map>();
-    return Iterator<i::Map>(this);
-  }
-  Iterator<i::Object> Constants() {
-    if (this->IsBitset()) return Iterator<i::Object>();
-    return Iterator<i::Object>(this);
-  }
-
-  // Printing.
-
-  enum PrintDimension { BOTH_DIMS, SEMANTIC_DIM, REPRESENTATION_DIM };
-
-  void PrintTo(std::ostream& os, PrintDimension dim = BOTH_DIMS);  // NOLINT
-
-#ifdef DEBUG
-  void Print();
-#endif
-
-  // Helpers for testing.
-  bool IsBitsetForTesting() { return IsBitset(); }
-  bool IsUnionForTesting() { return IsUnion(); }
-  bitset AsBitsetForTesting() { return AsBitset(); }
-  UnionType* AsUnionForTesting() { return AsUnion(); }
-
- private:
-  // Friends.
-  template <class>
-  friend class Iterator;
-  friend BitsetType;
-  friend UnionType;
-
-  // Internal inspection.
-  bool IsKind(TypeBase::Kind kind) { return TypeBase::IsKind(this, kind); }
-
-  bool IsNone() { return this == None(); }
-  bool IsAny() { return this == Any(); }
-  bool IsBitset() { return BitsetType::IsBitset(this); }
-  bool IsUnion() { return IsKind(TypeBase::kUnion); }
-
-  bitset AsBitset() {
-    DCHECK(this->IsBitset());
-    return reinterpret_cast<BitsetType*>(this)->Bitset();
-  }
-  UnionType* AsUnion() { return UnionType::cast(this); }
-
-  bitset Representation();
-
-  // Auxiliary functions.
-  bool SemanticMaybe(Type* that);
-
-  bitset BitsetGlb() { return BitsetType::Glb(this); }
-  bitset BitsetLub() { return BitsetType::Lub(this); }
-
-  bool SlowIs(Type* that);
-  bool SemanticIs(Type* that);
-
-  static bool Overlap(RangeType* lhs, RangeType* rhs);
-  static bool Contains(RangeType* lhs, RangeType* rhs);
-  static bool Contains(RangeType* range, ConstantType* constant);
-  static bool Contains(RangeType* range, i::Object* val);
-
-  static int UpdateRange(Type* type, UnionType* result, int size, Zone* zone);
-
-  static RangeType::Limits IntersectRangeAndBitset(Type* range, Type* bits,
-                                                   Zone* zone);
-  static RangeType::Limits ToLimits(bitset bits, Zone* zone);
-
-  bool SimplyEquals(Type* that);
-
-  static int AddToUnion(Type* type, UnionType* result, int size, Zone* zone);
-  static int IntersectAux(Type* type, Type* other, UnionType* result, int size,
-                          RangeType::Limits* limits, Zone* zone);
-  static Type* NormalizeUnion(Type* unioned, int size, Zone* zone);
-  static Type* NormalizeRangeAndBitset(Type* range, bitset* bits, Zone* zone);
-};
-
-// -----------------------------------------------------------------------------
-// Type bounds. A simple struct to represent a pair of lower/upper types.
-
-struct Bounds {
-  Type* lower;
-  Type* upper;
-
-  Bounds()
-      :  // Make sure accessing uninitialized bounds crashes big-time.
-        lower(nullptr),
-        upper(nullptr) {}
-  explicit Bounds(Type* t) : lower(t), upper(t) {}
-  Bounds(Type* l, Type* u) : lower(l), upper(u) { DCHECK(lower->Is(upper)); }
-
-  // Unrestricted bounds.
-  static Bounds Unbounded() { return Bounds(Type::None(), Type::Any()); }
-
-  // Meet: both b1 and b2 are known to hold.
-  static Bounds Both(Bounds b1, Bounds b2, Zone* zone) {
-    Type* lower = Type::Union(b1.lower, b2.lower, zone);
-    Type* upper = Type::Intersect(b1.upper, b2.upper, zone);
-    // Lower bounds are considered approximate, correct as necessary.
-    if (!lower->Is(upper)) lower = upper;
-    return Bounds(lower, upper);
-  }
-
-  // Join: either b1 or b2 is known to hold.
-  static Bounds Either(Bounds b1, Bounds b2, Zone* zone) {
-    Type* lower = Type::Intersect(b1.lower, b2.lower, zone);
-    Type* upper = Type::Union(b1.upper, b2.upper, zone);
-    return Bounds(lower, upper);
-  }
-
-  static Bounds NarrowLower(Bounds b, Type* t, Zone* zone) {
-    Type* lower = Type::Union(b.lower, t, zone);
-    // Lower bounds are considered approximate, correct as necessary.
-    if (!lower->Is(b.upper)) lower = b.upper;
-    return Bounds(lower, b.upper);
-  }
-  static Bounds NarrowUpper(Bounds b, Type* t, Zone* zone) {
-    Type* lower = b.lower;
-    Type* upper = Type::Intersect(b.upper, t, zone);
-    // Lower bounds are considered approximate, correct as necessary.
-    if (!lower->Is(upper)) lower = upper;
-    return Bounds(lower, upper);
-  }
-
-  bool Narrows(Bounds that) {
-    return that.lower->Is(this->lower) && this->upper->Is(that.upper);
-  }
-};
-
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_TYPES_H_
diff --git a/src/unicode-decoder.h b/src/unicode-decoder.h
index c030841..35d23a2 100644
--- a/src/unicode-decoder.h
+++ b/src/unicode-decoder.h
@@ -7,10 +7,11 @@
 
 #include <sys/types.h>
 #include "src/globals.h"
+#include "src/utils.h"
 
 namespace unibrow {
 
-class Utf8DecoderBase {
+class V8_EXPORT_PRIVATE Utf8DecoderBase {
  public:
   // Initialization done in subclass.
   inline Utf8DecoderBase();
diff --git a/src/unicode.cc b/src/unicode.cc
index db98be8..015f8a2 100644
--- a/src/unicode.cc
+++ b/src/unicode.cc
@@ -190,8 +190,7 @@
   }
 }
 
-
-static inline size_t NonASCIISequenceLength(byte first) {
+static inline uint8_t NonASCIISequenceLength(byte first) {
   // clang-format off
   static const uint8_t lengths[256] = {
       // The first 128 entries correspond to ASCII characters.
@@ -229,80 +228,137 @@
 // This method decodes an UTF-8 value according to RFC 3629.
 uchar Utf8::CalculateValue(const byte* str, size_t max_length, size_t* cursor) {
   size_t length = NonASCIISequenceLength(str[0]);
-  if (length == 0 || max_length < length) {
-    *cursor += 1;
-    return kBadChar;
+
+  // Check continuation characters.
+  size_t max_count = std::min(length, max_length);
+  size_t count = 1;
+  while (count < max_count && IsContinuationCharacter(str[count])) {
+    count++;
   }
-  if (length == 2) {
-    if (!IsContinuationCharacter(str[1])) {
-      *cursor += 1;
-      return kBadChar;
-    }
-    *cursor += 2;
-    return ((str[0] << 6) + str[1]) - 0x00003080;
-  }
+  *cursor += count;
+
+  // There must be enough continuation characters.
+  if (count != length) return kBadChar;
+
+  // Check overly long sequences & other conditions.
   if (length == 3) {
-    switch (str[0]) {
-      case 0xE0:
-        // Overlong three-byte sequence.
-        if (str[1] < 0xA0 || str[1] > 0xBF) {
-          *cursor += 1;
-          return kBadChar;
-        }
-        break;
-      case 0xED:
-        // High and low surrogate halves.
-        if (str[1] < 0x80 || str[1] > 0x9F) {
-          *cursor += 1;
-          return kBadChar;
-        }
-        break;
-      default:
-        if (!IsContinuationCharacter(str[1])) {
-          *cursor += 1;
-          return kBadChar;
-        }
-    }
-    if (!IsContinuationCharacter(str[2])) {
-      *cursor += 1;
+    if (str[0] == 0xE0 && (str[1] < 0xA0 || str[1] > 0xBF)) {
+      // Overlong three-byte sequence?
+      return kBadChar;
+    } else if (str[0] == 0xED && (str[1] < 0x80 || str[1] > 0x9F)) {
+      // High and low surrogate halves?
       return kBadChar;
     }
-    *cursor += 3;
-    return ((str[0] << 12) + (str[1] << 6) + str[2]) - 0x000E2080;
-  }
-  DCHECK(length == 4);
-  switch (str[0]) {
-    case 0xF0:
+  } else if (length == 4) {
+    if (str[0] == 0xF0 && (str[1] < 0x90 || str[1] > 0xBF)) {
       // Overlong four-byte sequence.
-      if (str[1] < 0x90 || str[1] > 0xBF) {
-        *cursor += 1;
-        return kBadChar;
-      }
-      break;
-    case 0xF4:
+      return kBadChar;
+    } else if (str[0] == 0xF4 && (str[1] < 0x80 || str[1] > 0x8F)) {
       // Code points outside of the unicode range.
-      if (str[1] < 0x80 || str[1] > 0x8F) {
-        *cursor += 1;
-        return kBadChar;
-      }
-      break;
-    default:
-      if (!IsContinuationCharacter(str[1])) {
-        *cursor += 1;
-        return kBadChar;
-      }
+      return kBadChar;
+    }
   }
-  if (!IsContinuationCharacter(str[2])) {
-    *cursor += 1;
+
+  // All errors have been handled, so we only have to assemble the result.
+  switch (length) {
+    case 1:
+      return str[0];
+    case 2:
+      return ((str[0] << 6) + str[1]) - 0x00003080;
+    case 3:
+      return ((str[0] << 12) + (str[1] << 6) + str[2]) - 0x000E2080;
+    case 4:
+      return ((str[0] << 18) + (str[1] << 12) + (str[2] << 6) + str[3]) -
+             0x03C82080;
+  }
+
+  UNREACHABLE();
+  return kBadChar;
+}
+
+uchar Utf8::ValueOfIncremental(byte next, Utf8IncrementalBuffer* buffer) {
+  DCHECK_NOT_NULL(buffer);
+
+  // The common case: 1-byte Utf8 (and no incomplete char in the buffer)
+  if (V8_LIKELY(next <= kMaxOneByteChar && *buffer == 0)) {
+    return static_cast<uchar>(next);
+  }
+
+  if (*buffer == 0) {
+    // We're at the start of a new character.
+    uint32_t kind = NonASCIISequenceLength(next);
+    if (kind >= 2 && kind <= 4) {
+      // Start of 2..4 byte character, and no buffer.
+
+      // The mask for the lower bits depends on the kind, and is
+      // 0x1F, 0x0F, 0x07 for kinds 2, 3, 4 respectively. We can get that
+      // with one shift.
+      uint8_t mask = 0x7f >> kind;
+
+      // Store the kind in the top nibble, and kind - 1 (i.e., remaining bytes)
+      // in 2nd nibble, and the value  in the bottom three. The 2nd nibble is
+      // intended as a counter about how many bytes are still needed.
+      *buffer = kind << 28 | (kind - 1) << 24 | (next & mask);
+      return kIncomplete;
+    } else {
+      // No buffer, and not the start of a 1-byte char (handled at the
+      // beginning), and not the start of a 2..4 byte char? Bad char.
+      *buffer = 0;
+      return kBadChar;
+    }
+  } else if (*buffer <= 0xff) {
+    // We have one unprocessed byte left (from the last else case in this if
+    // statement).
+    uchar previous = *buffer;
+    *buffer = 0;
+    uchar t = ValueOfIncremental(previous, buffer);
+    if (t == kIncomplete) {
+      // If we have an incomplete character, process both the previous and the
+      // next byte at once.
+      return ValueOfIncremental(next, buffer);
+    } else {
+      // Otherwise, process the previous byte and save the next byte for next
+      // time.
+      DCHECK_EQ(0, *buffer);
+      *buffer = next;
+      return t;
+    }
+  } else if (IsContinuationCharacter(next)) {
+    // We're inside of a character, as described by buffer.
+
+    // How many bytes (excluding this one) do we still expect?
+    uint8_t bytes_expected = *buffer >> 28;
+    uint8_t bytes_left = (*buffer >> 24) & 0x0f;
+    bytes_left--;
+    // Update the value.
+    uint32_t value = ((*buffer & 0xffffff) << 6) | (next & 0x3F);
+    if (bytes_left) {
+      *buffer = (bytes_expected << 28 | bytes_left << 24 | value);
+      return kIncomplete;
+    } else {
+      *buffer = 0;
+      bool sequence_was_too_long = (bytes_expected == 2 && value < 0x80) ||
+                                   (bytes_expected == 3 && value < 0x800);
+      return sequence_was_too_long ? kBadChar : value;
+    }
+  } else {
+    // Within a character, but not a continuation character? Then the
+    // previous char was a bad char. But we need to save the current
+    // one.
+    *buffer = next;
     return kBadChar;
   }
-  if (!IsContinuationCharacter(str[3])) {
-    *cursor += 1;
-    return kBadChar;
+}
+
+uchar Utf8::ValueOfIncrementalFinish(Utf8IncrementalBuffer* buffer) {
+  DCHECK_NOT_NULL(buffer);
+  if (*buffer == 0) {
+    return kBufferEmpty;
+  } else {
+    // Process left-over chars. An incomplete char at the end maps to kBadChar.
+    uchar t = ValueOfIncremental(0, buffer);
+    return (t == kIncomplete) ? kBadChar : t;
   }
-  *cursor += 4;
-  return ((str[0] << 18) + (str[1] << 12) + (str[2] << 6) + str[3]) -
-         0x03C82080;
 }
 
 bool Utf8::Validate(const byte* bytes, size_t length) {
diff --git a/src/unicode.h b/src/unicode.h
index 35717bc..1299a8f 100644
--- a/src/unicode.h
+++ b/src/unicode.h
@@ -141,6 +141,8 @@
   // The unicode replacement character, used to signal invalid unicode
   // sequences (e.g. an orphan surrogate) when converting to a UTF-8 encoding.
   static const uchar kBadChar = 0xFFFD;
+  static const uchar kBufferEmpty = 0x0;
+  static const uchar kIncomplete = 0xFFFFFFFC;  // any non-valid code point.
   static const unsigned kMaxEncodedSize   = 4;
   static const unsigned kMaxOneByteChar   = 0x7f;
   static const unsigned kMaxTwoByteChar   = 0x7ff;
@@ -156,6 +158,11 @@
   static const unsigned kMax16BitCodeUnitSize  = 3;
   static inline uchar ValueOf(const byte* str, size_t length, size_t* cursor);
 
+  typedef uint32_t Utf8IncrementalBuffer;
+  static uchar ValueOfIncremental(byte next_byte,
+                                  Utf8IncrementalBuffer* buffer);
+  static uchar ValueOfIncrementalFinish(Utf8IncrementalBuffer* buffer);
+
   // Excludes non-characters from the set of valid code points.
   static inline bool IsValidCharacter(uchar c);
 
diff --git a/src/utils.cc b/src/utils.cc
index 16b5b7c..ef640c3 100644
--- a/src/utils.cc
+++ b/src/utils.cc
@@ -387,8 +387,8 @@
   }
 }
 
-
-MemCopyUint8Function memcopy_uint8_function = &MemCopyUint8Wrapper;
+V8_EXPORT_PRIVATE MemCopyUint8Function memcopy_uint8_function =
+    &MemCopyUint8Wrapper;
 MemCopyUint16Uint8Function memcopy_uint16_uint8_function =
     &MemCopyUint16Uint8Wrapper;
 // Defined in codegen-arm.cc.
@@ -398,7 +398,8 @@
     Isolate* isolate, MemCopyUint16Uint8Function stub);
 
 #elif V8_OS_POSIX && V8_HOST_ARCH_MIPS
-MemCopyUint8Function memcopy_uint8_function = &MemCopyUint8Wrapper;
+V8_EXPORT_PRIVATE MemCopyUint8Function memcopy_uint8_function =
+    &MemCopyUint8Wrapper;
 // Defined in codegen-mips.cc.
 MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
                                                 MemCopyUint8Function stub);
diff --git a/src/utils.h b/src/utils.h
index 8eca392..314ea9b 100644
--- a/src/utils.h
+++ b/src/utils.h
@@ -234,6 +234,10 @@
 }
 
 inline double Pow(double x, double y) {
+  if (y == 0.0) return 1.0;
+  if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
+    return std::numeric_limits<double>::quiet_NaN();
+  }
 #if (defined(__MINGW64_VERSION_MAJOR) &&                              \
      (!defined(__MINGW64_VERSION_RC) || __MINGW64_VERSION_RC < 1)) || \
     defined(V8_OS_AIX)
@@ -433,7 +437,7 @@
 const int kMinComplexMemCopy = 64;
 
 // Copy memory area. No restrictions.
-void MemMove(void* dest, const void* src, size_t size);
+V8_EXPORT_PRIVATE void MemMove(void* dest, const void* src, size_t size);
 typedef void (*MemMoveFunction)(void* dest, const void* src, size_t size);
 
 // Keep the distinction of "move" vs. "copy" for the benefit of other
@@ -444,7 +448,7 @@
 #elif defined(V8_HOST_ARCH_ARM)
 typedef void (*MemCopyUint8Function)(uint8_t* dest, const uint8_t* src,
                                      size_t size);
-extern MemCopyUint8Function memcopy_uint8_function;
+V8_EXPORT_PRIVATE extern MemCopyUint8Function memcopy_uint8_function;
 V8_INLINE void MemCopyUint8Wrapper(uint8_t* dest, const uint8_t* src,
                                    size_t chars) {
   memcpy(dest, src, chars);
@@ -455,7 +459,8 @@
   (*memcopy_uint8_function)(reinterpret_cast<uint8_t*>(dest),
                             reinterpret_cast<const uint8_t*>(src), size);
 }
-V8_INLINE void MemMove(void* dest, const void* src, size_t size) {
+V8_EXPORT_PRIVATE V8_INLINE void MemMove(void* dest, const void* src,
+                                         size_t size) {
   memmove(dest, src, size);
 }
 
@@ -473,7 +478,7 @@
 #elif defined(V8_HOST_ARCH_MIPS)
 typedef void (*MemCopyUint8Function)(uint8_t* dest, const uint8_t* src,
                                      size_t size);
-extern MemCopyUint8Function memcopy_uint8_function;
+V8_EXPORT_PRIVATE extern MemCopyUint8Function memcopy_uint8_function;
 V8_INLINE void MemCopyUint8Wrapper(uint8_t* dest, const uint8_t* src,
                                    size_t chars) {
   memcpy(dest, src, chars);
@@ -484,7 +489,8 @@
   (*memcopy_uint8_function)(reinterpret_cast<uint8_t*>(dest),
                             reinterpret_cast<const uint8_t*>(src), size);
 }
-V8_INLINE void MemMove(void* dest, const void* src, size_t size) {
+V8_EXPORT_PRIVATE V8_INLINE void MemMove(void* dest, const void* src,
+                                         size_t size) {
   memmove(dest, src, size);
 }
 #else
@@ -492,7 +498,8 @@
 V8_INLINE void MemCopy(void* dest, const void* src, size_t size) {
   memcpy(dest, src, size);
 }
-V8_INLINE void MemMove(void* dest, const void* src, size_t size) {
+V8_EXPORT_PRIVATE V8_INLINE void MemMove(void* dest, const void* src,
+                                         size_t size) {
   memmove(dest, src, size);
 }
 const int kMinComplexMemCopy = 16 * kPointerSize;
diff --git a/src/v8.gyp b/src/v8.gyp
index 1adb2fe..9a38247 100644
--- a/src/v8.gyp
+++ b/src/v8.gyp
@@ -34,10 +34,11 @@
     'warmup_script%': "",
     'v8_extra_library_files%': [],
     'v8_experimental_extra_library_files%': [],
+    'v8_enable_inspector%': 0,
     'mksnapshot_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)',
     'mkpeephole_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mkpeephole<(EXECUTABLE_SUFFIX)',
   },
-  'includes': ['../gypfiles/toolchain.gypi', '../gypfiles/features.gypi'],
+  'includes': ['../gypfiles/toolchain.gypi', '../gypfiles/features.gypi', 'inspector/inspector.gypi'],
   'targets': [
     {
       'target_name': 'v8',
@@ -60,12 +61,10 @@
             '..',
           ],
           'defines': [
-            'V8_SHARED',
             'BUILDING_V8_SHARED',
           ],
           'direct_dependent_settings': {
             'defines': [
-              'V8_SHARED',
               'USING_V8_SHARED',
             ],
           },
@@ -163,12 +162,10 @@
         }],
         ['component=="shared_library"', {
           'defines': [
-            'V8_SHARED',
             'BUILDING_V8_SHARED',
           ],
           'direct_dependent_settings': {
             'defines': [
-              'V8_SHARED',
               'USING_V8_SHARED',
             ],
           },
@@ -258,7 +255,6 @@
         ['component=="shared_library"', {
           'defines': [
             'BUILDING_V8_SHARED',
-            'V8_SHARED',
           ],
         }],
       ]
@@ -285,12 +281,10 @@
             }],
             ['component=="shared_library"', {
               'defines': [
-                'V8_SHARED',
                 'BUILDING_V8_SHARED',
               ],
               'direct_dependent_settings': {
                 'defines': [
-                  'V8_SHARED',
                   'USING_V8_SHARED',
                 ],
               },
@@ -451,10 +445,14 @@
         'ast/ast-numbering.h',
         'ast/ast-traversal-visitor.h',
         'ast/ast-type-bounds.h',
+        'ast/ast-types.cc',
+        'ast/ast-types.h',
         'ast/ast-value-factory.cc',
         'ast/ast-value-factory.h',
         'ast/ast.cc',
         'ast/ast.h',
+        'ast/compile-time-value.cc',
+        'ast/compile-time-value.h',
         'ast/context-slot-cache.cc',
         'ast/context-slot-cache.h',
         'ast/modules.cc',
@@ -462,7 +460,6 @@
         'ast/prettyprinter.cc',
         'ast/prettyprinter.h',
         'ast/scopeinfo.cc',
-        'ast/scopeinfo.h',
         'ast/scopes.cc',
         'ast/scopes.h',
         'ast/variables.cc',
@@ -498,12 +495,14 @@
         'builtins/builtins-handler.cc',
         'builtins/builtins-internal.cc',
         'builtins/builtins-interpreter.cc',
+        'builtins/builtins-iterator.cc',
         'builtins/builtins-json.cc',
         'builtins/builtins-math.cc',
         'builtins/builtins-number.cc',
         'builtins/builtins-object.cc',
         'builtins/builtins-proxy.cc',
         'builtins/builtins-reflect.cc',
+        'builtins/builtins-regexp.cc',
         'builtins/builtins-sharedarraybuffer.cc',
         'builtins/builtins-string.cc',
         'builtins/builtins-symbol.cc',
@@ -534,6 +533,8 @@
         'compilation-cache.h',
         'compilation-dependencies.cc',
         'compilation-dependencies.h',
+        'compilation-info.cc',
+        'compilation-info.h',
         'compilation-statistics.cc',
         'compilation-statistics.h',
         'compiler/access-builder.cc',
@@ -583,14 +584,14 @@
         'compiler/effect-control-linearizer.h',
         'compiler/escape-analysis.cc',
         'compiler/escape-analysis.h',
-        "compiler/escape-analysis-reducer.cc",
-        "compiler/escape-analysis-reducer.h",
+        'compiler/escape-analysis-reducer.cc',
+        'compiler/escape-analysis-reducer.h',
         'compiler/frame.cc',
         'compiler/frame.h',
         'compiler/frame-elider.cc',
         'compiler/frame-elider.h',
-        "compiler/frame-states.cc",
-        "compiler/frame-states.h",
+        'compiler/frame-states.cc',
+        'compiler/frame-states.h',
         'compiler/gap-resolver.cc',
         'compiler/gap-resolver.h',
         'compiler/graph-reducer.cc',
@@ -661,6 +662,8 @@
         'compiler/machine-operator-reducer.h',
         'compiler/machine-operator.cc',
         'compiler/machine-operator.h',
+        'compiler/machine-graph-verifier.cc',
+        'compiler/machine-graph-verifier.h',
         'compiler/memory-optimizer.cc',
         'compiler/memory-optimizer.h',
         'compiler/move-optimizer.cc',
@@ -720,10 +723,14 @@
         'compiler/store-store-elimination.h',
         'compiler/tail-call-optimization.cc',
         'compiler/tail-call-optimization.h',
+        'compiler/types.cc',
+        'compiler/types.h',
+        'compiler/type-cache.cc',
+        'compiler/type-cache.h',
         'compiler/type-hint-analyzer.cc',
         'compiler/type-hint-analyzer.h',
-        'compiler/type-hints.cc',
-        'compiler/type-hints.h',
+        'compiler/typed-optimization.cc',
+        'compiler/typed-optimization.h',
         'compiler/typer.cc',
         'compiler/typer.h',
         'compiler/unwinding-info-writer.h',
@@ -949,6 +956,7 @@
         'ic/call-optimization.h',
         'ic/handler-compiler.cc',
         'ic/handler-compiler.h',
+        'ic/handler-configuration.h',
         'ic/ic-inl.h',
         'ic/ic-state.cc',
         'ic/ic-state.h',
@@ -978,6 +986,8 @@
         'interpreter/bytecode-generator.h',
         'interpreter/bytecode-label.cc',
         'interpreter/bytecode-label.h',
+        'interpreter/bytecode-operands.cc',
+        'interpreter/bytecode-operands.h',
         'interpreter/bytecode-peephole-optimizer.cc',
         'interpreter/bytecode-peephole-optimizer.h',
         'interpreter/bytecode-peephole-table.h',
@@ -985,7 +995,6 @@
         'interpreter/bytecode-pipeline.h',
         'interpreter/bytecode-register.cc',
         'interpreter/bytecode-register.h',
-        'interpreter/bytecode-register-allocator.cc',
         'interpreter/bytecode-register-allocator.h',
         'interpreter/bytecode-register-optimizer.cc',
         'interpreter/bytecode-register-optimizer.h',
@@ -1023,6 +1032,9 @@
         'log-utils.h',
         'log.cc',
         'log.h',
+        'lookup-cache-inl.h',
+        'lookup-cache.cc',
+        'lookup-cache.h',
         'lookup.cc',
         'lookup.h',
         'macro-assembler.h',
@@ -1040,6 +1052,8 @@
         'objects.h',
         'ostreams.cc',
         'ostreams.h',
+        'parsing/duplicate-finder.cc',
+        'parsing/duplicate-finder.h',
         'parsing/expression-classifier.h',
         'parsing/func-name-inferrer.cc',
         'parsing/func-name-inferrer.h',
@@ -1091,6 +1105,8 @@
         'profiler/strings-storage.h',
         'profiler/tick-sample.cc',
         'profiler/tick-sample.h',
+        'profiler/tracing-cpu-profiler.cc',
+        'profiler/tracing-cpu-profiler.h',
         'profiler/unbound-queue-inl.h',
         'profiler/unbound-queue.h',
         'property-descriptor.cc',
@@ -1199,15 +1215,13 @@
         'transitions-inl.h',
         'transitions.cc',
         'transitions.h',
-        'type-cache.cc',
-        'type-cache.h',
         'type-feedback-vector-inl.h',
         'type-feedback-vector.cc',
         'type-feedback-vector.h',
+        'type-hints.cc',
+        'type-hints.h',
         'type-info.cc',
         'type-info.h',
-        'types.cc',
-        'types.h',
         'unicode-inl.h',
         'unicode.cc',
         'unicode.h',
@@ -1235,8 +1249,6 @@
         'wasm/ast-decoder.cc',
         'wasm/ast-decoder.h',
         'wasm/decoder.h',
-        'wasm/encoder.cc',
-        'wasm/encoder.h',
         'wasm/leb-helper.h',
         'wasm/module-decoder.cc',
         'wasm/module-decoder.h',
@@ -1253,16 +1265,22 @@
         'wasm/wasm-macro-gen.h',
         'wasm/wasm-module.cc',
         'wasm/wasm-module.h',
+        'wasm/wasm-module-builder.cc',
+        'wasm/wasm-module-builder.h',
         'wasm/wasm-interpreter.cc',
         'wasm/wasm-interpreter.h',
         'wasm/wasm-opcodes.cc',
         'wasm/wasm-opcodes.h',
         'wasm/wasm-result.cc',
         'wasm/wasm-result.h',
-        'zone.cc',
-        'zone.h',
-        'zone-allocator.h',
-        'zone-containers.h',
+        'zone/accounting-allocator.cc',
+        'zone/accounting-allocator.h',
+        'zone/zone-segment.cc',
+        'zone/zone-segment.h',
+        'zone/zone.cc',
+        'zone/zone.h',
+        'zone/zone-allocator.h',
+        'zone/zone-containers.h',
       ],
       'conditions': [
         ['want_separate_host_toolset==1', {
@@ -1399,6 +1417,8 @@
             'ia32/interface-descriptors-ia32.cc',
             'ia32/macro-assembler-ia32.cc',
             'ia32/macro-assembler-ia32.h',
+            'ia32/simulator-ia32.cc',
+            'ia32/simulator-ia32.h',
             'builtins/ia32/builtins-ia32.cc',
             'compiler/ia32/code-generator-ia32.cc',
             'compiler/ia32/instruction-codes-ia32.h',
@@ -1438,6 +1458,8 @@
             'x87/interface-descriptors-x87.cc',
             'x87/macro-assembler-x87.cc',
             'x87/macro-assembler-x87.h',
+            'x87/simulator-x87.cc',
+            'x87/simulator-x87.h',
             'builtins/x87/builtins-x87.cc',
             'compiler/x87/code-generator-x87.cc',
             'compiler/x87/instruction-codes-x87.h',
@@ -1546,9 +1568,15 @@
             'regexp/mips64/regexp-macro-assembler-mips64.h',
           ],
         }],
-        ['v8_target_arch=="x64" or v8_target_arch=="x32"', {
+        ['v8_target_arch=="x64"', {
           'sources': [  ### gcmole(arch:x64) ###
             'builtins/x64/builtins-x64.cc',
+            'compiler/x64/code-generator-x64.cc',
+            'compiler/x64/instruction-codes-x64.h',
+            'compiler/x64/instruction-scheduler-x64.cc',
+            'compiler/x64/instruction-selector-x64.cc',
+            'compiler/x64/unwinding-info-writer-x64.h',
+            'compiler/x64/unwinding-info-writer-x64.cc',
             'crankshaft/x64/lithium-codegen-x64.cc',
             'crankshaft/x64/lithium-codegen-x64.h',
             'crankshaft/x64/lithium-gap-resolver-x64.cc',
@@ -1565,11 +1593,15 @@
             'x64/cpu-x64.cc',
             'x64/deoptimizer-x64.cc',
             'x64/disasm-x64.cc',
+            'x64/eh-frame-x64.cc',
             'x64/frames-x64.cc',
             'x64/frames-x64.h',
             'x64/interface-descriptors-x64.cc',
             'x64/macro-assembler-x64.cc',
             'x64/macro-assembler-x64.h',
+            'x64/simulator-x64.cc',
+            'x64/simulator-x64.h',
+            'x64/sse-instr.h',
             'debug/x64/debug-x64.cc',
             'full-codegen/x64/full-codegen-x64.cc',
             'ic/x64/access-compiler-x64.cc',
@@ -1579,17 +1611,7 @@
             'ic/x64/stub-cache-x64.cc',
             'regexp/x64/regexp-macro-assembler-x64.cc',
             'regexp/x64/regexp-macro-assembler-x64.h',
-          ],
-        }],
-        ['v8_target_arch=="x64"', {
-          'sources': [
-            'compiler/x64/code-generator-x64.cc',
-            'compiler/x64/instruction-codes-x64.h',
-            'compiler/x64/instruction-scheduler-x64.cc',
-            'compiler/x64/instruction-selector-x64.cc',
-            'compiler/x64/unwinding-info-writer-x64.h',
-            'compiler/x64/unwinding-info-writer-x64.cc',
-            'x64/eh-frame-x64.cc',
+            'third_party/valgrind/valgrind.h',
           ],
         }],
         ['v8_target_arch=="ppc" or v8_target_arch=="ppc64"', {
@@ -1691,7 +1713,6 @@
         ['component=="shared_library"', {
           'defines': [
             'BUILDING_V8_SHARED',
-            'V8_SHARED',
           ],
         }],
         ['v8_postmortem_support=="true"', {
@@ -1723,6 +1744,16 @@
             'i18n.h',
           ],
         }],
+        ['v8_enable_inspector==1', {
+          'sources': [
+            '<@(inspector_all_sources)'
+          ],
+          'dependencies': [
+            'inspector/inspector.gyp:protocol_generated_sources',
+            'inspector/inspector.gyp:inspector_injected_script',
+            'inspector/inspector.gyp:inspector_debugger_script',
+          ],
+        }],
         ['OS=="win" and v8_enable_i18n_support==1', {
           'dependencies': [
             '<(icu_gyp_path):icudata',
@@ -1740,8 +1771,6 @@
         '..',
       ],
       'sources': [
-        'base/accounting-allocator.cc',
-        'base/accounting-allocator.h',
         'base/adapters.h',
         'base/atomic-utils.h',
         'base/atomicops.h',
@@ -1775,6 +1804,7 @@
         'base/functional.cc',
         'base/functional.h',
         'base/hashmap.h',
+        'base/hashmap-entry.h',
         'base/ieee754.cc',
         'base/ieee754.h',
         'base/iterator.h',
@@ -2171,17 +2201,16 @@
           'js/regexp.js',
           'js/arraybuffer.js',
           'js/typedarray.js',
-          'js/iterator-prototype.js',
           'js/collection.js',
           'js/weak-collection.js',
           'js/collection-iterator.js',
           'js/promise.js',
           'js/messages.js',
           'js/array-iterator.js',
-          'js/string-iterator.js',
           'js/templates.js',
           'js/spread.js',
           'js/proxy.js',
+          'js/async-await.js',
           'debug/mirrors.js',
           'debug/debug.js',
           'debug/liveedit.js',
@@ -2192,7 +2221,6 @@
           'js/harmony-atomics.js',
           'js/harmony-simd.js',
           'js/harmony-string-padding.js',
-          'js/harmony-async-await.js'
         ],
         'libraries_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
         'libraries_experimental_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
@@ -2202,8 +2230,8 @@
           ['v8_enable_i18n_support==1', {
             'library_files': ['js/i18n.js'],
             'experimental_library_files': [
+              'js/datetime-format-to-parts.js',
               'js/icu-case-mapping.js',
-              'js/intl-extra.js',
              ],
           }],
         ],
@@ -2402,7 +2430,10 @@
         '..',
        ],
       'sources': [
+        'interpreter/bytecode-operands.h',
+        'interpreter/bytecode-operands.cc',
         'interpreter/bytecode-peephole-table.h',
+        'interpreter/bytecode-traits.h',
         'interpreter/bytecodes.h',
         'interpreter/bytecodes.cc',
         'interpreter/mkpeephole.cc'
diff --git a/src/value-serializer.cc b/src/value-serializer.cc
index 0af4838..1d2e36d 100644
--- a/src/value-serializer.cc
+++ b/src/value-serializer.cc
@@ -7,16 +7,19 @@
 #include <type_traits>
 
 #include "src/base/logging.h"
+#include "src/conversions.h"
 #include "src/factory.h"
 #include "src/handles-inl.h"
 #include "src/isolate.h"
 #include "src/objects-inl.h"
 #include "src/objects.h"
+#include "src/transitions.h"
 
 namespace v8 {
 namespace internal {
 
 static const uint32_t kLatestVersion = 9;
+static const int kPretenureThreshold = 100 * KB;
 
 template <typename T>
 static size_t BytesNeededForVarint(T value) {
@@ -82,12 +85,54 @@
   // Regular expression, UTF-8 encoding. byteLength:uint32_t, raw data,
   // flags:uint32_t.
   kRegExp = 'R',
+  // Beginning of a JS map.
+  kBeginJSMap = ';',
+  // End of a JS map. length:uint32_t.
+  kEndJSMap = ':',
+  // Beginning of a JS set.
+  kBeginJSSet = '\'',
+  // End of a JS set. length:uint32_t.
+  kEndJSSet = ',',
+  // Array buffer. byteLength:uint32_t, then raw data.
+  kArrayBuffer = 'B',
+  // Array buffer (transferred). transferID:uint32_t
+  kArrayBufferTransfer = 't',
+  // View into an array buffer.
+  // subtag:ArrayBufferViewTag, byteOffset:uint32_t, byteLength:uint32_t
+  // For typed arrays, byteOffset and byteLength must be divisible by the size
+  // of the element.
+  // Note: kArrayBufferView is special, and should have an ArrayBuffer (or an
+  // ObjectReference to one) serialized just before it. This is a quirk arising
+  // from the previous stack-based implementation.
+  kArrayBufferView = 'V',
+  // Shared array buffer (transferred). transferID:uint32_t
+  kSharedArrayBufferTransfer = 'u',
 };
 
-ValueSerializer::ValueSerializer(Isolate* isolate)
+namespace {
+
+enum class ArrayBufferViewTag : uint8_t {
+  kInt8Array = 'b',
+  kUint8Array = 'B',
+  kUint8ClampedArray = 'C',
+  kInt16Array = 'w',
+  kUint16Array = 'W',
+  kInt32Array = 'd',
+  kUint32Array = 'D',
+  kFloat32Array = 'f',
+  kFloat64Array = 'F',
+  kDataView = '?',
+};
+
+}  // namespace
+
+ValueSerializer::ValueSerializer(Isolate* isolate,
+                                 v8::ValueSerializer::Delegate* delegate)
     : isolate_(isolate),
+      delegate_(delegate),
       zone_(isolate->allocator()),
-      id_map_(isolate->heap(), &zone_) {}
+      id_map_(isolate->heap(), &zone_),
+      array_buffer_transfer_map_(isolate->heap(), &zone_) {}
 
 ValueSerializer::~ValueSerializer() {}
 
@@ -150,6 +195,11 @@
                  reinterpret_cast<const uint8_t*>(chars.end()));
 }
 
+void ValueSerializer::WriteRawBytes(const void* source, size_t length) {
+  const uint8_t* begin = reinterpret_cast<const uint8_t*>(source);
+  buffer_.insert(buffer_.end(), begin, begin + length);
+}
+
 uint8_t* ValueSerializer::ReserveRawBytes(size_t bytes) {
   if (!bytes) return nullptr;
   auto old_size = buffer_.size();
@@ -157,6 +207,20 @@
   return &buffer_[old_size];
 }
 
+void ValueSerializer::WriteUint32(uint32_t value) {
+  WriteVarint<uint32_t>(value);
+}
+
+void ValueSerializer::WriteUint64(uint64_t value) {
+  WriteVarint<uint64_t>(value);
+}
+
+void ValueSerializer::TransferArrayBuffer(uint32_t transfer_id,
+                                          Handle<JSArrayBuffer> array_buffer) {
+  DCHECK(!array_buffer_transfer_map_.Find(array_buffer));
+  array_buffer_transfer_map_.Set(array_buffer, transfer_id);
+}
+
 Maybe<bool> ValueSerializer::WriteObject(Handle<Object> object) {
   if (object->IsSmi()) {
     WriteSmi(Smi::cast(*object));
@@ -172,15 +236,33 @@
     case MUTABLE_HEAP_NUMBER_TYPE:
       WriteHeapNumber(HeapNumber::cast(*object));
       return Just(true);
+    case JS_TYPED_ARRAY_TYPE:
+    case JS_DATA_VIEW_TYPE: {
+      // Despite being JSReceivers, these have their wrapped buffer serialized
+      // first. That makes this logic a little quirky, because it needs to
+      // happen before we assign object IDs.
+      // TODO(jbroman): It may be possible to avoid materializing a typed
+      // array's buffer here.
+      Handle<JSArrayBufferView> view = Handle<JSArrayBufferView>::cast(object);
+      if (!id_map_.Find(view)) {
+        Handle<JSArrayBuffer> buffer(
+            view->IsJSTypedArray()
+                ? Handle<JSTypedArray>::cast(view)->GetBuffer()
+                : handle(JSArrayBuffer::cast(view->buffer()), isolate_));
+        if (!WriteJSReceiver(buffer).FromMaybe(false)) return Nothing<bool>();
+      }
+      return WriteJSReceiver(view);
+    }
     default:
       if (object->IsString()) {
         WriteString(Handle<String>::cast(object));
         return Just(true);
       } else if (object->IsJSReceiver()) {
         return WriteJSReceiver(Handle<JSReceiver>::cast(object));
+      } else {
+        ThrowDataCloneError(MessageTemplate::kDataCloneError, object);
+        return Nothing<bool>();
       }
-      UNIMPLEMENTED();
-      return Nothing<bool>();
   }
 }
 
@@ -267,20 +349,27 @@
 
   // Eliminate callable and exotic objects, which should not be serialized.
   InstanceType instance_type = receiver->map()->instance_type();
-  if (receiver->IsCallable() || instance_type <= LAST_SPECIAL_RECEIVER_TYPE) {
+  if (receiver->IsCallable() || (instance_type <= LAST_SPECIAL_RECEIVER_TYPE &&
+                                 instance_type != JS_SPECIAL_API_OBJECT_TYPE)) {
+    ThrowDataCloneError(MessageTemplate::kDataCloneError, receiver);
     return Nothing<bool>();
   }
 
   // If we are at the end of the stack, abort. This function may recurse.
-  if (StackLimitCheck(isolate_).HasOverflowed()) return Nothing<bool>();
+  STACK_CHECK(isolate_, Nothing<bool>());
 
   HandleScope scope(isolate_);
   switch (instance_type) {
     case JS_ARRAY_TYPE:
       return WriteJSArray(Handle<JSArray>::cast(receiver));
     case JS_OBJECT_TYPE:
-    case JS_API_OBJECT_TYPE:
-      return WriteJSObject(Handle<JSObject>::cast(receiver));
+    case JS_API_OBJECT_TYPE: {
+      Handle<JSObject> js_object = Handle<JSObject>::cast(receiver);
+      return js_object->GetInternalFieldCount() ? WriteHostObject(js_object)
+                                                : WriteJSObject(js_object);
+    }
+    case JS_SPECIAL_API_OBJECT_TYPE:
+      return WriteHostObject(Handle<JSObject>::cast(receiver));
     case JS_DATE_TYPE:
       WriteJSDate(JSDate::cast(*receiver));
       return Just(true);
@@ -289,21 +378,76 @@
     case JS_REGEXP_TYPE:
       WriteJSRegExp(JSRegExp::cast(*receiver));
       return Just(true);
+    case JS_MAP_TYPE:
+      return WriteJSMap(Handle<JSMap>::cast(receiver));
+    case JS_SET_TYPE:
+      return WriteJSSet(Handle<JSSet>::cast(receiver));
+    case JS_ARRAY_BUFFER_TYPE:
+      return WriteJSArrayBuffer(JSArrayBuffer::cast(*receiver));
+    case JS_TYPED_ARRAY_TYPE:
+    case JS_DATA_VIEW_TYPE:
+      return WriteJSArrayBufferView(JSArrayBufferView::cast(*receiver));
     default:
-      UNIMPLEMENTED();
-      break;
+      ThrowDataCloneError(MessageTemplate::kDataCloneError, receiver);
+      return Nothing<bool>();
   }
   return Nothing<bool>();
 }
 
 Maybe<bool> ValueSerializer::WriteJSObject(Handle<JSObject> object) {
+  DCHECK_GT(object->map()->instance_type(), LAST_CUSTOM_ELEMENTS_RECEIVER);
+  const bool can_serialize_fast =
+      object->HasFastProperties() && object->elements()->length() == 0;
+  if (!can_serialize_fast) return WriteJSObjectSlow(object);
+
+  Handle<Map> map(object->map(), isolate_);
+  WriteTag(SerializationTag::kBeginJSObject);
+
+  // Write out fast properties as long as they are only data properties and the
+  // map doesn't change.
+  uint32_t properties_written = 0;
+  bool map_changed = false;
+  for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
+    Handle<Name> key(map->instance_descriptors()->GetKey(i), isolate_);
+    if (!key->IsString()) continue;
+    PropertyDetails details = map->instance_descriptors()->GetDetails(i);
+    if (details.IsDontEnum()) continue;
+
+    Handle<Object> value;
+    if (V8_LIKELY(!map_changed)) map_changed = *map == object->map();
+    if (V8_LIKELY(!map_changed && details.type() == DATA)) {
+      FieldIndex field_index = FieldIndex::ForDescriptor(*map, i);
+      value = JSObject::FastPropertyAt(object, details.representation(),
+                                       field_index);
+    } else {
+      // This logic should essentially match WriteJSObjectPropertiesSlow.
+      // If the property is no longer found, do not serialize it.
+      // This could happen if a getter deleted the property.
+      LookupIterator it(isolate_, object, key, LookupIterator::OWN);
+      if (!it.IsFound()) continue;
+      if (!Object::GetProperty(&it).ToHandle(&value)) return Nothing<bool>();
+    }
+
+    if (!WriteObject(key).FromMaybe(false) ||
+        !WriteObject(value).FromMaybe(false)) {
+      return Nothing<bool>();
+    }
+    properties_written++;
+  }
+
+  WriteTag(SerializationTag::kEndJSObject);
+  WriteVarint<uint32_t>(properties_written);
+  return Just(true);
+}
+
+Maybe<bool> ValueSerializer::WriteJSObjectSlow(Handle<JSObject> object) {
   WriteTag(SerializationTag::kBeginJSObject);
   Handle<FixedArray> keys;
   uint32_t properties_written;
   if (!KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
                                ENUMERABLE_STRINGS)
            .ToHandle(&keys) ||
-      !WriteJSObjectProperties(object, keys).To(&properties_written)) {
+      !WriteJSObjectPropertiesSlow(object, keys).To(&properties_written)) {
     return Nothing<bool>();
   }
   WriteTag(SerializationTag::kEndJSObject);
@@ -331,7 +475,46 @@
     // format changes.
     WriteTag(SerializationTag::kBeginDenseJSArray);
     WriteVarint<uint32_t>(length);
-    for (uint32_t i = 0; i < length; i++) {
+    uint32_t i = 0;
+
+    // Fast paths. Note that FAST_ELEMENTS in particular can bail due to the
+    // structure of the elements changing.
+    switch (array->GetElementsKind()) {
+      case FAST_SMI_ELEMENTS: {
+        Handle<FixedArray> elements(FixedArray::cast(array->elements()),
+                                    isolate_);
+        for (; i < length; i++) WriteSmi(Smi::cast(elements->get(i)));
+        break;
+      }
+      case FAST_DOUBLE_ELEMENTS: {
+        Handle<FixedDoubleArray> elements(
+            FixedDoubleArray::cast(array->elements()), isolate_);
+        for (; i < length; i++) {
+          WriteTag(SerializationTag::kDouble);
+          WriteDouble(elements->get_scalar(i));
+        }
+        break;
+      }
+      case FAST_ELEMENTS: {
+        Handle<Object> old_length(array->length(), isolate_);
+        for (; i < length; i++) {
+          if (array->length() != *old_length ||
+              array->GetElementsKind() != FAST_ELEMENTS) {
+            // Fall back to slow path.
+            break;
+          }
+          Handle<Object> element(FixedArray::cast(array->elements())->get(i),
+                                 isolate_);
+          if (!WriteObject(element).FromMaybe(false)) return Nothing<bool>();
+        }
+        break;
+      }
+      default:
+        break;
+    }
+
+    // If there are elements remaining, serialize them slowly.
+    for (; i < length; i++) {
       // Serializing the array's elements can have arbitrary side effects, so we
       // cannot rely on still having fast elements, even if it did to begin
       // with.
@@ -342,6 +525,7 @@
         return Nothing<bool>();
       }
     }
+
     KeyAccumulator accumulator(isolate_, KeyCollectionMode::kOwnOnly,
                                ENUMERABLE_STRINGS);
     if (!accumulator.CollectOwnPropertyNames(array, array).FromMaybe(false)) {
@@ -350,7 +534,7 @@
     Handle<FixedArray> keys =
         accumulator.GetKeys(GetKeysConversion::kConvertToString);
     uint32_t properties_written;
-    if (!WriteJSObjectProperties(array, keys).To(&properties_written)) {
+    if (!WriteJSObjectPropertiesSlow(array, keys).To(&properties_written)) {
       return Nothing<bool>();
     }
     WriteTag(SerializationTag::kEndDenseJSArray);
@@ -364,7 +548,7 @@
     if (!KeyAccumulator::GetKeys(array, KeyCollectionMode::kOwnOnly,
                                  ENUMERABLE_STRINGS)
              .ToHandle(&keys) ||
-        !WriteJSObjectProperties(array, keys).To(&properties_written)) {
+        !WriteJSObjectPropertiesSlow(array, keys).To(&properties_written)) {
       return Nothing<bool>();
     }
     WriteTag(SerializationTag::kEndSparseJSArray);
@@ -401,6 +585,7 @@
                           v8::String::NO_NULL_TERMINATION);
   } else {
     DCHECK(inner_value->IsSymbol());
+    ThrowDataCloneError(MessageTemplate::kDataCloneError, value);
     return Nothing<bool>();
   }
   return Just(true);
@@ -417,7 +602,135 @@
   WriteVarint(static_cast<uint32_t>(regexp->GetFlags()));
 }
 
-Maybe<uint32_t> ValueSerializer::WriteJSObjectProperties(
+Maybe<bool> ValueSerializer::WriteJSMap(Handle<JSMap> map) {
+  // First copy the key-value pairs, since getters could mutate them.
+  Handle<OrderedHashMap> table(OrderedHashMap::cast(map->table()));
+  int length = table->NumberOfElements() * 2;
+  Handle<FixedArray> entries = isolate_->factory()->NewFixedArray(length);
+  {
+    DisallowHeapAllocation no_gc;
+    Oddball* the_hole = isolate_->heap()->the_hole_value();
+    int capacity = table->UsedCapacity();
+    int result_index = 0;
+    for (int i = 0; i < capacity; i++) {
+      Object* key = table->KeyAt(i);
+      if (key == the_hole) continue;
+      entries->set(result_index++, key);
+      entries->set(result_index++, table->ValueAt(i));
+    }
+    DCHECK_EQ(result_index, length);
+  }
+
+  // Then write it out.
+  WriteTag(SerializationTag::kBeginJSMap);
+  for (int i = 0; i < length; i++) {
+    if (!WriteObject(handle(entries->get(i), isolate_)).FromMaybe(false)) {
+      return Nothing<bool>();
+    }
+  }
+  WriteTag(SerializationTag::kEndJSMap);
+  WriteVarint<uint32_t>(length);
+  return Just(true);
+}
+
+Maybe<bool> ValueSerializer::WriteJSSet(Handle<JSSet> set) {
+  // First copy the element pointers, since getters could mutate them.
+  Handle<OrderedHashSet> table(OrderedHashSet::cast(set->table()));
+  int length = table->NumberOfElements();
+  Handle<FixedArray> entries = isolate_->factory()->NewFixedArray(length);
+  {
+    DisallowHeapAllocation no_gc;
+    Oddball* the_hole = isolate_->heap()->the_hole_value();
+    int capacity = table->UsedCapacity();
+    int result_index = 0;
+    for (int i = 0; i < capacity; i++) {
+      Object* key = table->KeyAt(i);
+      if (key == the_hole) continue;
+      entries->set(result_index++, key);
+    }
+    DCHECK_EQ(result_index, length);
+  }
+
+  // Then write it out.
+  WriteTag(SerializationTag::kBeginJSSet);
+  for (int i = 0; i < length; i++) {
+    if (!WriteObject(handle(entries->get(i), isolate_)).FromMaybe(false)) {
+      return Nothing<bool>();
+    }
+  }
+  WriteTag(SerializationTag::kEndJSSet);
+  WriteVarint<uint32_t>(length);
+  return Just(true);
+}
+
+Maybe<bool> ValueSerializer::WriteJSArrayBuffer(JSArrayBuffer* array_buffer) {
+  uint32_t* transfer_entry = array_buffer_transfer_map_.Find(array_buffer);
+  if (transfer_entry) {
+    DCHECK(array_buffer->was_neutered() || array_buffer->is_shared());
+    WriteTag(array_buffer->is_shared()
+                 ? SerializationTag::kSharedArrayBufferTransfer
+                 : SerializationTag::kArrayBufferTransfer);
+    WriteVarint(*transfer_entry);
+    return Just(true);
+  }
+
+  if (array_buffer->is_shared()) {
+    ThrowDataCloneError(
+        MessageTemplate::kDataCloneErrorSharedArrayBufferNotTransferred);
+    return Nothing<bool>();
+  }
+  if (array_buffer->was_neutered()) {
+    ThrowDataCloneError(MessageTemplate::kDataCloneErrorNeuteredArrayBuffer);
+    return Nothing<bool>();
+  }
+  double byte_length = array_buffer->byte_length()->Number();
+  if (byte_length > std::numeric_limits<uint32_t>::max()) {
+    ThrowDataCloneError(MessageTemplate::kDataCloneError, handle(array_buffer));
+    return Nothing<bool>();
+  }
+  WriteTag(SerializationTag::kArrayBuffer);
+  WriteVarint<uint32_t>(byte_length);
+  WriteRawBytes(array_buffer->backing_store(), byte_length);
+  return Just(true);
+}
+
+Maybe<bool> ValueSerializer::WriteJSArrayBufferView(JSArrayBufferView* view) {
+  WriteTag(SerializationTag::kArrayBufferView);
+  ArrayBufferViewTag tag = ArrayBufferViewTag::kInt8Array;
+  if (view->IsJSTypedArray()) {
+    switch (JSTypedArray::cast(view)->type()) {
+#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
+  case kExternal##Type##Array:                          \
+    tag = ArrayBufferViewTag::k##Type##Array;           \
+    break;
+      TYPED_ARRAYS(TYPED_ARRAY_CASE)
+#undef TYPED_ARRAY_CASE
+    }
+  } else {
+    DCHECK(view->IsJSDataView());
+    tag = ArrayBufferViewTag::kDataView;
+  }
+  WriteVarint(static_cast<uint8_t>(tag));
+  WriteVarint(NumberToUint32(view->byte_offset()));
+  WriteVarint(NumberToUint32(view->byte_length()));
+  return Just(true);
+}
+
+Maybe<bool> ValueSerializer::WriteHostObject(Handle<JSObject> object) {
+  if (!delegate_) {
+    isolate_->Throw(*isolate_->factory()->NewError(
+        isolate_->error_function(), MessageTemplate::kDataCloneError, object));
+    return Nothing<bool>();
+  }
+  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate_);
+  Maybe<bool> result =
+      delegate_->WriteHostObject(v8_isolate, Utils::ToLocal(object));
+  RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate_, Nothing<bool>());
+  DCHECK(!result.IsNothing());
+  return result;
+}
+
+Maybe<uint32_t> ValueSerializer::WriteJSObjectPropertiesSlow(
     Handle<JSObject> object, Handle<FixedArray> keys) {
   uint32_t properties_written = 0;
   int length = keys->length();
@@ -445,25 +758,56 @@
   return Just(properties_written);
 }
 
+void ValueSerializer::ThrowDataCloneError(
+    MessageTemplate::Template template_index) {
+  return ThrowDataCloneError(template_index,
+                             isolate_->factory()->empty_string());
+}
+
+void ValueSerializer::ThrowDataCloneError(
+    MessageTemplate::Template template_index, Handle<Object> arg0) {
+  Handle<String> message =
+      MessageTemplate::FormatMessage(isolate_, template_index, arg0);
+  if (delegate_) {
+    delegate_->ThrowDataCloneError(Utils::ToLocal(message));
+  } else {
+    isolate_->Throw(
+        *isolate_->factory()->NewError(isolate_->error_function(), message));
+  }
+  if (isolate_->has_scheduled_exception()) {
+    isolate_->PromoteScheduledException();
+  }
+}
+
 ValueDeserializer::ValueDeserializer(Isolate* isolate,
-                                     Vector<const uint8_t> data)
+                                     Vector<const uint8_t> data,
+                                     v8::ValueDeserializer::Delegate* delegate)
     : isolate_(isolate),
+      delegate_(delegate),
       position_(data.start()),
       end_(data.start() + data.length()),
-      id_map_(Handle<SeededNumberDictionary>::cast(
-          isolate->global_handles()->Create(
-              *SeededNumberDictionary::New(isolate, 0)))) {}
+      pretenure_(data.length() > kPretenureThreshold ? TENURED : NOT_TENURED),
+      id_map_(Handle<FixedArray>::cast(isolate->global_handles()->Create(
+          isolate_->heap()->empty_fixed_array()))) {}
 
 ValueDeserializer::~ValueDeserializer() {
   GlobalHandles::Destroy(Handle<Object>::cast(id_map_).location());
+
+  Handle<Object> transfer_map_handle;
+  if (array_buffer_transfer_map_.ToHandle(&transfer_map_handle)) {
+    GlobalHandles::Destroy(transfer_map_handle.location());
+  }
 }
 
 Maybe<bool> ValueDeserializer::ReadHeader() {
   if (position_ < end_ &&
       *position_ == static_cast<uint8_t>(SerializationTag::kVersion)) {
     ReadTag().ToChecked();
-    if (!ReadVarint<uint32_t>().To(&version_)) return Nothing<bool>();
-    if (version_ > kLatestVersion) return Nothing<bool>();
+    if (!ReadVarint<uint32_t>().To(&version_) || version_ > kLatestVersion) {
+      isolate_->Throw(*isolate_->factory()->NewError(
+          MessageTemplate::kDataCloneDeserializationVersionError));
+      return Nothing<bool>();
+    }
   }
   return Just(true);
 }
@@ -511,7 +855,7 @@
     if (position_ >= end_) return Nothing<T>();
     uint8_t byte = *position_;
     if (V8_LIKELY(shift < sizeof(T) * 8)) {
-      value |= (byte & 0x7f) << shift;
+      value |= static_cast<T>(byte & 0x7f) << shift;
       shift += 7;
     }
     has_another_byte = byte & 0x80;
@@ -551,7 +895,67 @@
   return Just(Vector<const uint8_t>(start, size));
 }
 
+bool ValueDeserializer::ReadUint32(uint32_t* value) {
+  return ReadVarint<uint32_t>().To(value);
+}
+
+bool ValueDeserializer::ReadUint64(uint64_t* value) {
+  return ReadVarint<uint64_t>().To(value);
+}
+
+bool ValueDeserializer::ReadDouble(double* value) {
+  return ReadDouble().To(value);
+}
+
+bool ValueDeserializer::ReadRawBytes(size_t length, const void** data) {
+  if (length > static_cast<size_t>(end_ - position_)) return false;
+  *data = position_;
+  position_ += length;
+  return true;
+}
+
+void ValueDeserializer::TransferArrayBuffer(
+    uint32_t transfer_id, Handle<JSArrayBuffer> array_buffer) {
+  if (array_buffer_transfer_map_.is_null()) {
+    array_buffer_transfer_map_ =
+        Handle<SeededNumberDictionary>::cast(isolate_->global_handles()->Create(
+            *SeededNumberDictionary::New(isolate_, 0)));
+  }
+  Handle<SeededNumberDictionary> dictionary =
+      array_buffer_transfer_map_.ToHandleChecked();
+  const bool used_as_prototype = false;
+  Handle<SeededNumberDictionary> new_dictionary =
+      SeededNumberDictionary::AtNumberPut(dictionary, transfer_id, array_buffer,
+                                          used_as_prototype);
+  if (!new_dictionary.is_identical_to(dictionary)) {
+    GlobalHandles::Destroy(Handle<Object>::cast(dictionary).location());
+    array_buffer_transfer_map_ = Handle<SeededNumberDictionary>::cast(
+        isolate_->global_handles()->Create(*new_dictionary));
+  }
+}
+
 MaybeHandle<Object> ValueDeserializer::ReadObject() {
+  MaybeHandle<Object> result = ReadObjectInternal();
+
+  // ArrayBufferView is special in that it consumes the value before it, even
+  // after format version 0.
+  Handle<Object> object;
+  SerializationTag tag;
+  if (result.ToHandle(&object) && V8_UNLIKELY(object->IsJSArrayBuffer()) &&
+      PeekTag().To(&tag) && tag == SerializationTag::kArrayBufferView) {
+    ConsumeTag(SerializationTag::kArrayBufferView);
+    result = ReadJSArrayBufferView(Handle<JSArrayBuffer>::cast(object));
+  }
+
+  if (result.is_null() && !isolate_->has_pending_exception()) {
+    isolate_->Throw(*isolate_->factory()->NewError(
+        MessageTemplate::kDataCloneDeserializationError));
+  }
+
+  return result;
+}
+
+MaybeHandle<Object> ValueDeserializer::ReadObjectInternal() {
   SerializationTag tag;
   if (!ReadTag().To(&tag)) return MaybeHandle<Object>();
   switch (tag) {
@@ -570,17 +974,19 @@
     case SerializationTag::kInt32: {
       Maybe<int32_t> number = ReadZigZag<int32_t>();
       if (number.IsNothing()) return MaybeHandle<Object>();
-      return isolate_->factory()->NewNumberFromInt(number.FromJust());
+      return isolate_->factory()->NewNumberFromInt(number.FromJust(),
+                                                   pretenure_);
     }
     case SerializationTag::kUint32: {
       Maybe<uint32_t> number = ReadVarint<uint32_t>();
       if (number.IsNothing()) return MaybeHandle<Object>();
-      return isolate_->factory()->NewNumberFromUint(number.FromJust());
+      return isolate_->factory()->NewNumberFromUint(number.FromJust(),
+                                                    pretenure_);
     }
     case SerializationTag::kDouble: {
       Maybe<double> number = ReadDouble();
       if (number.IsNothing()) return MaybeHandle<Object>();
-      return isolate_->factory()->NewNumber(number.FromJust());
+      return isolate_->factory()->NewNumber(number.FromJust(), pretenure_);
     }
     case SerializationTag::kUtf8String:
       return ReadUtf8String();
@@ -606,8 +1012,25 @@
       return ReadJSValue(tag);
     case SerializationTag::kRegExp:
       return ReadJSRegExp();
+    case SerializationTag::kBeginJSMap:
+      return ReadJSMap();
+    case SerializationTag::kBeginJSSet:
+      return ReadJSSet();
+    case SerializationTag::kArrayBuffer:
+      return ReadJSArrayBuffer();
+    case SerializationTag::kArrayBufferTransfer: {
+      const bool is_shared = false;
+      return ReadTransferredJSArrayBuffer(is_shared);
+    }
+    case SerializationTag::kSharedArrayBufferTransfer: {
+      const bool is_shared = true;
+      return ReadTransferredJSArrayBuffer(is_shared);
+    }
     default:
-      return MaybeHandle<Object>();
+      // TODO(jbroman): Introduce an explicit tag for host objects to avoid
+      // having to treat every unknown tag as a potential host object.
+      position_--;
+      return ReadHostObject();
   }
 }
 
@@ -620,7 +1043,7 @@
       !ReadRawBytes(utf8_length).To(&utf8_bytes))
     return MaybeHandle<String>();
   return isolate_->factory()->NewStringFromUtf8(
-      Vector<const char>::cast(utf8_bytes));
+      Vector<const char>::cast(utf8_bytes), pretenure_);
 }
 
 MaybeHandle<String> ValueDeserializer::ReadTwoByteString() {
@@ -636,7 +1059,7 @@
   // string on the heap (regardless of alignment).
   Handle<SeqTwoByteString> string;
   if (!isolate_->factory()
-           ->NewRawTwoByteString(byte_length / sizeof(uc16))
+           ->NewRawTwoByteString(byte_length / sizeof(uc16), pretenure_)
            .ToHandle(&string))
     return MaybeHandle<String>();
 
@@ -646,19 +1069,59 @@
   return string;
 }
 
+bool ValueDeserializer::ReadExpectedString(Handle<String> expected) {
+  // In the case of failure, the position in the stream is reset.
+  const uint8_t* original_position = position_;
+
+  SerializationTag tag;
+  uint32_t byte_length;
+  Vector<const uint8_t> bytes;
+  if (!ReadTag().To(&tag) || !ReadVarint<uint32_t>().To(&byte_length) ||
+      byte_length >
+          static_cast<uint32_t>(std::numeric_limits<int32_t>::max()) ||
+      !ReadRawBytes(byte_length).To(&bytes)) {
+    position_ = original_position;
+    return false;
+  }
+
+  expected = String::Flatten(expected);
+  DisallowHeapAllocation no_gc;
+  String::FlatContent flat = expected->GetFlatContent();
+
+  // If the bytes are verbatim what is in the flattened string, then the string
+  // is successfully consumed.
+  if (tag == SerializationTag::kUtf8String && flat.IsOneByte()) {
+    Vector<const uint8_t> chars = flat.ToOneByteVector();
+    if (byte_length == chars.length() &&
+        String::IsAscii(chars.begin(), chars.length()) &&
+        memcmp(bytes.begin(), chars.begin(), byte_length) == 0) {
+      return true;
+    }
+  } else if (tag == SerializationTag::kTwoByteString && flat.IsTwoByte()) {
+    Vector<const uc16> chars = flat.ToUC16Vector();
+    if (byte_length == static_cast<unsigned>(chars.length()) * sizeof(uc16) &&
+        memcmp(bytes.begin(), chars.begin(), byte_length) == 0) {
+      return true;
+    }
+  }
+
+  position_ = original_position;
+  return false;
+}
+
 MaybeHandle<JSObject> ValueDeserializer::ReadJSObject() {
   // If we are at the end of the stack, abort. This function may recurse.
-  if (StackLimitCheck(isolate_).HasOverflowed()) return MaybeHandle<JSObject>();
+  STACK_CHECK(isolate_, MaybeHandle<JSObject>());
 
   uint32_t id = next_id_++;
   HandleScope scope(isolate_);
   Handle<JSObject> object =
-      isolate_->factory()->NewJSObject(isolate_->object_function());
+      isolate_->factory()->NewJSObject(isolate_->object_function(), pretenure_);
   AddObjectWithID(id, object);
 
   uint32_t num_properties;
   uint32_t expected_num_properties;
-  if (!ReadJSObjectProperties(object, SerializationTag::kEndJSObject)
+  if (!ReadJSObjectProperties(object, SerializationTag::kEndJSObject, true)
            .To(&num_properties) ||
       !ReadVarint<uint32_t>().To(&expected_num_properties) ||
       num_properties != expected_num_properties) {
@@ -671,21 +1134,22 @@
 
 MaybeHandle<JSArray> ValueDeserializer::ReadSparseJSArray() {
   // If we are at the end of the stack, abort. This function may recurse.
-  if (StackLimitCheck(isolate_).HasOverflowed()) return MaybeHandle<JSArray>();
+  STACK_CHECK(isolate_, MaybeHandle<JSArray>());
 
   uint32_t length;
   if (!ReadVarint<uint32_t>().To(&length)) return MaybeHandle<JSArray>();
 
   uint32_t id = next_id_++;
   HandleScope scope(isolate_);
-  Handle<JSArray> array = isolate_->factory()->NewJSArray(0);
+  Handle<JSArray> array = isolate_->factory()->NewJSArray(
+      0, TERMINAL_FAST_ELEMENTS_KIND, pretenure_);
   JSArray::SetLength(array, length);
   AddObjectWithID(id, array);
 
   uint32_t num_properties;
   uint32_t expected_num_properties;
   uint32_t expected_length;
-  if (!ReadJSObjectProperties(array, SerializationTag::kEndSparseJSArray)
+  if (!ReadJSObjectProperties(array, SerializationTag::kEndSparseJSArray, false)
            .To(&num_properties) ||
       !ReadVarint<uint32_t>().To(&expected_num_properties) ||
       !ReadVarint<uint32_t>().To(&expected_length) ||
@@ -699,7 +1163,7 @@
 
 MaybeHandle<JSArray> ValueDeserializer::ReadDenseJSArray() {
   // If we are at the end of the stack, abort. This function may recurse.
-  if (StackLimitCheck(isolate_).HasOverflowed()) return MaybeHandle<JSArray>();
+  STACK_CHECK(isolate_, MaybeHandle<JSArray>());
 
   uint32_t length;
   if (!ReadVarint<uint32_t>().To(&length)) return MaybeHandle<JSArray>();
@@ -707,7 +1171,8 @@
   uint32_t id = next_id_++;
   HandleScope scope(isolate_);
   Handle<JSArray> array = isolate_->factory()->NewJSArray(
-      FAST_HOLEY_ELEMENTS, length, length, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
+      FAST_HOLEY_ELEMENTS, length, length, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE,
+      pretenure_);
   AddObjectWithID(id, array);
 
   Handle<FixedArray> elements(FixedArray::cast(array->elements()), isolate_);
@@ -722,7 +1187,7 @@
   uint32_t num_properties;
   uint32_t expected_num_properties;
   uint32_t expected_length;
-  if (!ReadJSObjectProperties(array, SerializationTag::kEndDenseJSArray)
+  if (!ReadJSObjectProperties(array, SerializationTag::kEndDenseJSArray, false)
            .To(&num_properties) ||
       !ReadVarint<uint32_t>().To(&expected_num_properties) ||
       !ReadVarint<uint32_t>().To(&expected_length) ||
@@ -752,29 +1217,30 @@
   Handle<JSValue> value;
   switch (tag) {
     case SerializationTag::kTrueObject:
-      value = Handle<JSValue>::cast(
-          isolate_->factory()->NewJSObject(isolate_->boolean_function()));
+      value = Handle<JSValue>::cast(isolate_->factory()->NewJSObject(
+          isolate_->boolean_function(), pretenure_));
       value->set_value(isolate_->heap()->true_value());
       break;
     case SerializationTag::kFalseObject:
-      value = Handle<JSValue>::cast(
-          isolate_->factory()->NewJSObject(isolate_->boolean_function()));
+      value = Handle<JSValue>::cast(isolate_->factory()->NewJSObject(
+          isolate_->boolean_function(), pretenure_));
       value->set_value(isolate_->heap()->false_value());
       break;
     case SerializationTag::kNumberObject: {
       double number;
       if (!ReadDouble().To(&number)) return MaybeHandle<JSValue>();
-      value = Handle<JSValue>::cast(
-          isolate_->factory()->NewJSObject(isolate_->number_function()));
-      Handle<Object> number_object = isolate_->factory()->NewNumber(number);
+      value = Handle<JSValue>::cast(isolate_->factory()->NewJSObject(
+          isolate_->number_function(), pretenure_));
+      Handle<Object> number_object =
+          isolate_->factory()->NewNumber(number, pretenure_);
       value->set_value(*number_object);
       break;
     }
     case SerializationTag::kStringObject: {
       Handle<String> string;
       if (!ReadUtf8String().ToHandle(&string)) return MaybeHandle<JSValue>();
-      value = Handle<JSValue>::cast(
-          isolate_->factory()->NewJSObject(isolate_->string_function()));
+      value = Handle<JSValue>::cast(isolate_->factory()->NewJSObject(
+          isolate_->string_function(), pretenure_));
       value->set_value(*string);
       break;
     }
@@ -801,9 +1267,296 @@
   return regexp;
 }
 
+MaybeHandle<JSMap> ValueDeserializer::ReadJSMap() {
+  // If we are at the end of the stack, abort. This function may recurse.
+  STACK_CHECK(isolate_, MaybeHandle<JSMap>());
+
+  HandleScope scope(isolate_);
+  uint32_t id = next_id_++;
+  Handle<JSMap> map = isolate_->factory()->NewJSMap();
+  AddObjectWithID(id, map);
+
+  Handle<JSFunction> map_set = isolate_->map_set();
+  uint32_t length = 0;
+  while (true) {
+    SerializationTag tag;
+    if (!PeekTag().To(&tag)) return MaybeHandle<JSMap>();
+    if (tag == SerializationTag::kEndJSMap) {
+      ConsumeTag(SerializationTag::kEndJSMap);
+      break;
+    }
+
+    Handle<Object> argv[2];
+    if (!ReadObject().ToHandle(&argv[0]) || !ReadObject().ToHandle(&argv[1]) ||
+        Execution::Call(isolate_, map_set, map, arraysize(argv), argv)
+            .is_null()) {
+      return MaybeHandle<JSMap>();
+    }
+    length += 2;
+  }
+
+  uint32_t expected_length;
+  if (!ReadVarint<uint32_t>().To(&expected_length) ||
+      length != expected_length) {
+    return MaybeHandle<JSMap>();
+  }
+  DCHECK(HasObjectWithID(id));
+  return scope.CloseAndEscape(map);
+}
+
+MaybeHandle<JSSet> ValueDeserializer::ReadJSSet() {
+  // If we are at the end of the stack, abort. This function may recurse.
+  STACK_CHECK(isolate_, MaybeHandle<JSSet>());
+
+  HandleScope scope(isolate_);
+  uint32_t id = next_id_++;
+  Handle<JSSet> set = isolate_->factory()->NewJSSet();
+  AddObjectWithID(id, set);
+  Handle<JSFunction> set_add = isolate_->set_add();
+  uint32_t length = 0;
+  while (true) {
+    SerializationTag tag;
+    if (!PeekTag().To(&tag)) return MaybeHandle<JSSet>();
+    if (tag == SerializationTag::kEndJSSet) {
+      ConsumeTag(SerializationTag::kEndJSSet);
+      break;
+    }
+
+    Handle<Object> argv[1];
+    if (!ReadObject().ToHandle(&argv[0]) ||
+        Execution::Call(isolate_, set_add, set, arraysize(argv), argv)
+            .is_null()) {
+      return MaybeHandle<JSSet>();
+    }
+    length++;
+  }
+
+  uint32_t expected_length;
+  if (!ReadVarint<uint32_t>().To(&expected_length) ||
+      length != expected_length) {
+    return MaybeHandle<JSSet>();
+  }
+  DCHECK(HasObjectWithID(id));
+  return scope.CloseAndEscape(set);
+}
+
+MaybeHandle<JSArrayBuffer> ValueDeserializer::ReadJSArrayBuffer() {
+  uint32_t id = next_id_++;
+  uint32_t byte_length;
+  Vector<const uint8_t> bytes;
+  if (!ReadVarint<uint32_t>().To(&byte_length) ||
+      byte_length > static_cast<size_t>(end_ - position_)) {
+    return MaybeHandle<JSArrayBuffer>();
+  }
+  const bool should_initialize = false;
+  Handle<JSArrayBuffer> array_buffer =
+      isolate_->factory()->NewJSArrayBuffer(SharedFlag::kNotShared, pretenure_);
+  JSArrayBuffer::SetupAllocatingData(array_buffer, isolate_, byte_length,
+                                     should_initialize);
+  memcpy(array_buffer->backing_store(), position_, byte_length);
+  position_ += byte_length;
+  AddObjectWithID(id, array_buffer);
+  return array_buffer;
+}
+
+MaybeHandle<JSArrayBuffer> ValueDeserializer::ReadTransferredJSArrayBuffer(
+    bool is_shared) {
+  uint32_t id = next_id_++;
+  uint32_t transfer_id;
+  Handle<SeededNumberDictionary> transfer_map;
+  if (!ReadVarint<uint32_t>().To(&transfer_id) ||
+      !array_buffer_transfer_map_.ToHandle(&transfer_map)) {
+    return MaybeHandle<JSArrayBuffer>();
+  }
+  int index = transfer_map->FindEntry(isolate_, transfer_id);
+  if (index == SeededNumberDictionary::kNotFound) {
+    return MaybeHandle<JSArrayBuffer>();
+  }
+  Handle<JSArrayBuffer> array_buffer(
+      JSArrayBuffer::cast(transfer_map->ValueAt(index)), isolate_);
+  DCHECK_EQ(is_shared, array_buffer->is_shared());
+  AddObjectWithID(id, array_buffer);
+  return array_buffer;
+}
+
+MaybeHandle<JSArrayBufferView> ValueDeserializer::ReadJSArrayBufferView(
+    Handle<JSArrayBuffer> buffer) {
+  uint32_t buffer_byte_length = NumberToUint32(buffer->byte_length());
+  uint8_t tag = 0;
+  uint32_t byte_offset = 0;
+  uint32_t byte_length = 0;
+  if (!ReadVarint<uint8_t>().To(&tag) ||
+      !ReadVarint<uint32_t>().To(&byte_offset) ||
+      !ReadVarint<uint32_t>().To(&byte_length) ||
+      byte_offset > buffer_byte_length ||
+      byte_length > buffer_byte_length - byte_offset) {
+    return MaybeHandle<JSArrayBufferView>();
+  }
+  uint32_t id = next_id_++;
+  ExternalArrayType external_array_type = kExternalInt8Array;
+  unsigned element_size = 0;
+  switch (static_cast<ArrayBufferViewTag>(tag)) {
+    case ArrayBufferViewTag::kDataView: {
+      Handle<JSDataView> data_view =
+          isolate_->factory()->NewJSDataView(buffer, byte_offset, byte_length);
+      AddObjectWithID(id, data_view);
+      return data_view;
+    }
+#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
+  case ArrayBufferViewTag::k##Type##Array:              \
+    external_array_type = kExternal##Type##Array;       \
+    element_size = size;                                \
+    break;
+      TYPED_ARRAYS(TYPED_ARRAY_CASE)
+#undef TYPED_ARRAY_CASE
+  }
+  if (byte_offset % element_size != 0 || byte_length % element_size != 0) {
+    return MaybeHandle<JSArrayBufferView>();
+  }
+  Handle<JSTypedArray> typed_array = isolate_->factory()->NewJSTypedArray(
+      external_array_type, buffer, byte_offset, byte_length / element_size,
+      pretenure_);
+  AddObjectWithID(id, typed_array);
+  return typed_array;
+}
+
+MaybeHandle<JSObject> ValueDeserializer::ReadHostObject() {
+  if (!delegate_) return MaybeHandle<JSObject>();
+  STACK_CHECK(isolate_, MaybeHandle<JSObject>());
+  uint32_t id = next_id_++;
+  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate_);
+  v8::Local<v8::Object> object;
+  if (!delegate_->ReadHostObject(v8_isolate).ToLocal(&object)) {
+    RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate_, JSObject);
+    return MaybeHandle<JSObject>();
+  }
+  Handle<JSObject> js_object =
+      Handle<JSObject>::cast(Utils::OpenHandle(*object));
+  AddObjectWithID(id, js_object);
+  return js_object;
+}
+
+// Copies a vector of property values into an object, given the map that should
+// be used.
+static void CommitProperties(Handle<JSObject> object, Handle<Map> map,
+                             const std::vector<Handle<Object>>& properties) {
+  JSObject::AllocateStorageForMap(object, map);
+  DCHECK(!object->map()->is_dictionary_map());
+
+  DisallowHeapAllocation no_gc;
+  DescriptorArray* descriptors = object->map()->instance_descriptors();
+  for (unsigned i = 0; i < properties.size(); i++) {
+    object->WriteToField(i, descriptors->GetDetails(i), *properties[i]);
+  }
+}
+
 Maybe<uint32_t> ValueDeserializer::ReadJSObjectProperties(
-    Handle<JSObject> object, SerializationTag end_tag) {
-  for (uint32_t num_properties = 0;; num_properties++) {
+    Handle<JSObject> object, SerializationTag end_tag,
+    bool can_use_transitions) {
+  uint32_t num_properties = 0;
+
+  // Fast path (following map transitions).
+  if (can_use_transitions) {
+    bool transitioning = true;
+    Handle<Map> map(object->map(), isolate_);
+    DCHECK(!map->is_dictionary_map());
+    DCHECK(map->instance_descriptors()->IsEmpty());
+    std::vector<Handle<Object>> properties;
+    properties.reserve(8);
+
+    while (transitioning) {
+      // If there are no more properties, finish.
+      SerializationTag tag;
+      if (!PeekTag().To(&tag)) return Nothing<uint32_t>();
+      if (tag == end_tag) {
+        ConsumeTag(end_tag);
+        CommitProperties(object, map, properties);
+        CHECK_LT(properties.size(), std::numeric_limits<uint32_t>::max());
+        return Just(static_cast<uint32_t>(properties.size()));
+      }
+
+      // Determine the key to be used and the target map to transition to, if
+      // possible. Transitioning may abort if the key is not a string, or if no
+      // transition was found.
+      Handle<Object> key;
+      Handle<Map> target;
+      Handle<String> expected_key = TransitionArray::ExpectedTransitionKey(map);
+      if (!expected_key.is_null() && ReadExpectedString(expected_key)) {
+        key = expected_key;
+        target = TransitionArray::ExpectedTransitionTarget(map);
+      } else {
+        if (!ReadObject().ToHandle(&key)) return Nothing<uint32_t>();
+        if (key->IsString()) {
+          key =
+              isolate_->factory()->InternalizeString(Handle<String>::cast(key));
+          target = TransitionArray::FindTransitionToField(
+              map, Handle<String>::cast(key));
+          transitioning = !target.is_null();
+        } else {
+          transitioning = false;
+        }
+      }
+
+      // Read the value that corresponds to it.
+      Handle<Object> value;
+      if (!ReadObject().ToHandle(&value)) return Nothing<uint32_t>();
+
+      // If still transitioning and the value fits the field representation
+      // (though generalization may be required), store the property value so
+      // that we can copy them all at once. Otherwise, stop transitioning.
+      if (transitioning) {
+        int descriptor = static_cast<int>(properties.size());
+        PropertyDetails details =
+            target->instance_descriptors()->GetDetails(descriptor);
+        Representation expected_representation = details.representation();
+        if (value->FitsRepresentation(expected_representation)) {
+          if (expected_representation.IsHeapObject() &&
+              !target->instance_descriptors()
+                   ->GetFieldType(descriptor)
+                   ->NowContains(value)) {
+            Handle<FieldType> value_type =
+                value->OptimalType(isolate_, expected_representation);
+            Map::GeneralizeFieldType(target, descriptor,
+                                     expected_representation, value_type);
+          }
+          DCHECK(target->instance_descriptors()
+                     ->GetFieldType(descriptor)
+                     ->NowContains(value));
+          properties.push_back(value);
+          map = target;
+          continue;
+        } else {
+          transitioning = false;
+        }
+      }
+
+      // Fell out of transitioning fast path. Commit the properties gathered so
+      // far, and then start setting properties slowly instead.
+      DCHECK(!transitioning);
+      CHECK_LT(properties.size(), std::numeric_limits<uint32_t>::max());
+      CommitProperties(object, map, properties);
+      num_properties = static_cast<uint32_t>(properties.size());
+
+      bool success;
+      LookupIterator it = LookupIterator::PropertyOrElement(
+          isolate_, object, key, &success, LookupIterator::OWN);
+      if (!success ||
+          JSObject::DefineOwnPropertyIgnoreAttributes(&it, value, NONE)
+              .is_null()) {
+        return Nothing<uint32_t>();
+      }
+      num_properties++;
+    }
+
+    // At this point, transitioning should be done, but at least one property
+    // should have been written (in the zero-property case, there is an early
+    // return).
+    DCHECK(!transitioning);
+    DCHECK_GE(num_properties, 1u);
+  }
+
+  // Slow path.
+  for (;; num_properties++) {
     SerializationTag tag;
     if (!PeekTag().To(&tag)) return Nothing<uint32_t>();
     if (tag == end_tag) {
@@ -828,15 +1581,16 @@
 }
 
 bool ValueDeserializer::HasObjectWithID(uint32_t id) {
-  return id_map_->Has(isolate_, id);
+  return id < static_cast<unsigned>(id_map_->length()) &&
+         !id_map_->get(id)->IsTheHole(isolate_);
 }
 
 MaybeHandle<JSReceiver> ValueDeserializer::GetObjectWithID(uint32_t id) {
-  int index = id_map_->FindEntry(isolate_, id);
-  if (index == SeededNumberDictionary::kNotFound) {
+  if (id >= static_cast<unsigned>(id_map_->length())) {
     return MaybeHandle<JSReceiver>();
   }
-  Object* value = id_map_->ValueAt(index);
+  Object* value = id_map_->get(id);
+  if (value->IsTheHole(isolate_)) return MaybeHandle<JSReceiver>();
   DCHECK(value->IsJSReceiver());
   return Handle<JSReceiver>(JSReceiver::cast(value), isolate_);
 }
@@ -844,16 +1598,13 @@
 void ValueDeserializer::AddObjectWithID(uint32_t id,
                                         Handle<JSReceiver> object) {
   DCHECK(!HasObjectWithID(id));
-  const bool used_as_prototype = false;
-  Handle<SeededNumberDictionary> new_dictionary =
-      SeededNumberDictionary::AtNumberPut(id_map_, id, object,
-                                          used_as_prototype);
+  Handle<FixedArray> new_array = FixedArray::SetAndGrow(id_map_, id, object);
 
   // If the dictionary was reallocated, update the global handle.
-  if (!new_dictionary.is_identical_to(id_map_)) {
+  if (!new_array.is_identical_to(id_map_)) {
     GlobalHandles::Destroy(Handle<Object>::cast(id_map_).location());
-    id_map_ = Handle<SeededNumberDictionary>::cast(
-        isolate_->global_handles()->Create(*new_dictionary));
+    id_map_ = Handle<FixedArray>::cast(
+        isolate_->global_handles()->Create(*new_array));
   }
 }
 
@@ -878,8 +1629,7 @@
 
 MaybeHandle<Object>
 ValueDeserializer::ReadObjectUsingEntireBufferForLegacyFormat() {
-  if (version_ > 0) return MaybeHandle<Object>();
-
+  DCHECK_EQ(version_, 0);
   HandleScope scope(isolate_);
   std::vector<Handle<Object>> stack;
   while (position_ < end_) {
@@ -901,8 +1651,8 @@
 
         size_t begin_properties =
             stack.size() - 2 * static_cast<size_t>(num_properties);
-        Handle<JSObject> js_object =
-            isolate_->factory()->NewJSObject(isolate_->object_function());
+        Handle<JSObject> js_object = isolate_->factory()->NewJSObject(
+            isolate_->object_function(), pretenure_);
         if (num_properties &&
             !SetPropertiesFromKeyValuePairs(
                  isolate_, js_object, &stack[begin_properties], num_properties)
@@ -926,7 +1676,8 @@
           return MaybeHandle<Object>();
         }
 
-        Handle<JSArray> js_array = isolate_->factory()->NewJSArray(0);
+        Handle<JSArray> js_array = isolate_->factory()->NewJSArray(
+            0, TERMINAL_FAST_ELEMENTS_KIND, pretenure_);
         JSArray::SetLength(js_array, length);
         size_t begin_properties =
             stack.size() - 2 * static_cast<size_t>(num_properties);
@@ -941,9 +1692,12 @@
         new_object = js_array;
         break;
       }
-      case SerializationTag::kEndDenseJSArray:
+      case SerializationTag::kEndDenseJSArray: {
         // This was already broken in Chromium, and apparently wasn't missed.
+        isolate_->Throw(*isolate_->factory()->NewError(
+            MessageTemplate::kDataCloneDeserializationError));
         return MaybeHandle<Object>();
+      }
       default:
         if (!ReadObject().ToHandle(&new_object)) return MaybeHandle<Object>();
         break;
@@ -959,7 +1713,11 @@
 #endif
   position_ = end_;
 
-  if (stack.size() != 1) return MaybeHandle<Object>();
+  if (stack.size() != 1) {
+    isolate_->Throw(*isolate_->factory()->NewError(
+        MessageTemplate::kDataCloneDeserializationError));
+    return MaybeHandle<Object>();
+  }
   return scope.CloseAndEscape(stack[0]);
 }
 
diff --git a/src/value-serializer.h b/src/value-serializer.h
index ab9c664..27ce0c1 100644
--- a/src/value-serializer.h
+++ b/src/value-serializer.h
@@ -12,16 +12,21 @@
 #include "src/base/compiler-specific.h"
 #include "src/base/macros.h"
 #include "src/identity-map.h"
+#include "src/messages.h"
 #include "src/vector.h"
-#include "src/zone.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
 
 class HeapNumber;
 class Isolate;
+class JSArrayBuffer;
+class JSArrayBufferView;
 class JSDate;
+class JSMap;
 class JSRegExp;
+class JSSet;
 class JSValue;
 class Object;
 class Oddball;
@@ -37,7 +42,7 @@
  */
 class ValueSerializer {
  public:
-  explicit ValueSerializer(Isolate* isolate);
+  ValueSerializer(Isolate* isolate, v8::ValueSerializer::Delegate* delegate);
   ~ValueSerializer();
 
   /*
@@ -56,6 +61,23 @@
    */
   std::vector<uint8_t> ReleaseBuffer() { return std::move(buffer_); }
 
+  /*
+   * Marks an ArrayBuffer as havings its contents transferred out of band.
+   * Pass the corresponding JSArrayBuffer in the deserializing context to
+   * ValueDeserializer::TransferArrayBuffer.
+   */
+  void TransferArrayBuffer(uint32_t transfer_id,
+                           Handle<JSArrayBuffer> array_buffer);
+
+  /*
+   * Publicly exposed wire format writing methods.
+   * These are intended for use within the delegate's WriteHostObject method.
+   */
+  void WriteUint32(uint32_t value);
+  void WriteUint64(uint64_t value);
+  void WriteRawBytes(const void* source, size_t length);
+  void WriteDouble(double value);
+
  private:
   // Writing the wire format.
   void WriteTag(SerializationTag tag);
@@ -63,7 +85,6 @@
   void WriteVarint(T value);
   template <typename T>
   void WriteZigZag(T value);
-  void WriteDouble(double value);
   void WriteOneByteString(Vector<const uint8_t> chars);
   void WriteTwoByteString(Vector<const uc16> chars);
   uint8_t* ReserveRawBytes(size_t bytes);
@@ -75,20 +96,35 @@
   void WriteString(Handle<String> string);
   Maybe<bool> WriteJSReceiver(Handle<JSReceiver> receiver) WARN_UNUSED_RESULT;
   Maybe<bool> WriteJSObject(Handle<JSObject> object) WARN_UNUSED_RESULT;
+  Maybe<bool> WriteJSObjectSlow(Handle<JSObject> object) WARN_UNUSED_RESULT;
   Maybe<bool> WriteJSArray(Handle<JSArray> array) WARN_UNUSED_RESULT;
   void WriteJSDate(JSDate* date);
   Maybe<bool> WriteJSValue(Handle<JSValue> value) WARN_UNUSED_RESULT;
   void WriteJSRegExp(JSRegExp* regexp);
+  Maybe<bool> WriteJSMap(Handle<JSMap> map) WARN_UNUSED_RESULT;
+  Maybe<bool> WriteJSSet(Handle<JSSet> map) WARN_UNUSED_RESULT;
+  Maybe<bool> WriteJSArrayBuffer(JSArrayBuffer* array_buffer);
+  Maybe<bool> WriteJSArrayBufferView(JSArrayBufferView* array_buffer);
+  Maybe<bool> WriteHostObject(Handle<JSObject> object) WARN_UNUSED_RESULT;
 
   /*
    * Reads the specified keys from the object and writes key-value pairs to the
    * buffer. Returns the number of keys actually written, which may be smaller
    * if some keys are not own properties when accessed.
    */
-  Maybe<uint32_t> WriteJSObjectProperties(
+  Maybe<uint32_t> WriteJSObjectPropertiesSlow(
       Handle<JSObject> object, Handle<FixedArray> keys) WARN_UNUSED_RESULT;
 
+  /*
+   * Asks the delegate to handle an error that occurred during data cloning, by
+   * throwing an exception appropriate for the host.
+   */
+  void ThrowDataCloneError(MessageTemplate::Template template_index);
+  V8_NOINLINE void ThrowDataCloneError(MessageTemplate::Template template_index,
+                                       Handle<Object> arg0);
+
   Isolate* const isolate_;
+  v8::ValueSerializer::Delegate* const delegate_;
   std::vector<uint8_t> buffer_;
   Zone zone_;
 
@@ -98,6 +134,9 @@
   IdentityMap<uint32_t> id_map_;
   uint32_t next_id_ = 0;
 
+  // A similar map, for transferred array buffers.
+  IdentityMap<uint32_t> array_buffer_transfer_map_;
+
   DISALLOW_COPY_AND_ASSIGN(ValueSerializer);
 };
 
@@ -107,7 +146,8 @@
  */
 class ValueDeserializer {
  public:
-  ValueDeserializer(Isolate* isolate, Vector<const uint8_t> data);
+  ValueDeserializer(Isolate* isolate, Vector<const uint8_t> data,
+                    v8::ValueDeserializer::Delegate* delegate);
   ~ValueDeserializer();
 
   /*
@@ -116,6 +156,13 @@
   Maybe<bool> ReadHeader() WARN_UNUSED_RESULT;
 
   /*
+   * Reads the underlying wire format version. Likely mostly to be useful to
+   * legacy code reading old wire format versions. Must be called after
+   * ReadHeader.
+   */
+  uint32_t GetWireFormatVersion() const { return version_; }
+
+  /*
    * Deserializes a V8 object from the buffer.
    */
   MaybeHandle<Object> ReadObject() WARN_UNUSED_RESULT;
@@ -130,6 +177,22 @@
   MaybeHandle<Object> ReadObjectUsingEntireBufferForLegacyFormat()
       WARN_UNUSED_RESULT;
 
+  /*
+   * Accepts the array buffer corresponding to the one passed previously to
+   * ValueSerializer::TransferArrayBuffer.
+   */
+  void TransferArrayBuffer(uint32_t transfer_id,
+                           Handle<JSArrayBuffer> array_buffer);
+
+  /*
+   * Publicly exposed wire format writing methods.
+   * These are intended for use within the delegate's WriteHostObject method.
+   */
+  bool ReadUint32(uint32_t* value) WARN_UNUSED_RESULT;
+  bool ReadUint64(uint64_t* value) WARN_UNUSED_RESULT;
+  bool ReadDouble(double* value) WARN_UNUSED_RESULT;
+  bool ReadRawBytes(size_t length, const void** data) WARN_UNUSED_RESULT;
+
  private:
   // Reading the wire format.
   Maybe<SerializationTag> PeekTag() const WARN_UNUSED_RESULT;
@@ -142,6 +205,14 @@
   Maybe<double> ReadDouble() WARN_UNUSED_RESULT;
   Maybe<Vector<const uint8_t>> ReadRawBytes(int size) WARN_UNUSED_RESULT;
 
+  // Reads a string if it matches the one provided.
+  // Returns true if this was the case. Otherwise, nothing is consumed.
+  bool ReadExpectedString(Handle<String> expected) WARN_UNUSED_RESULT;
+
+  // Like ReadObject, but skips logic for special cases in simulating the
+  // "stack machine".
+  MaybeHandle<Object> ReadObjectInternal() WARN_UNUSED_RESULT;
+
   // Reading V8 objects of specific kinds.
   // The tag is assumed to have already been read.
   MaybeHandle<String> ReadUtf8String() WARN_UNUSED_RESULT;
@@ -152,13 +223,22 @@
   MaybeHandle<JSDate> ReadJSDate() WARN_UNUSED_RESULT;
   MaybeHandle<JSValue> ReadJSValue(SerializationTag tag) WARN_UNUSED_RESULT;
   MaybeHandle<JSRegExp> ReadJSRegExp() WARN_UNUSED_RESULT;
+  MaybeHandle<JSMap> ReadJSMap() WARN_UNUSED_RESULT;
+  MaybeHandle<JSSet> ReadJSSet() WARN_UNUSED_RESULT;
+  MaybeHandle<JSArrayBuffer> ReadJSArrayBuffer() WARN_UNUSED_RESULT;
+  MaybeHandle<JSArrayBuffer> ReadTransferredJSArrayBuffer(bool is_shared)
+      WARN_UNUSED_RESULT;
+  MaybeHandle<JSArrayBufferView> ReadJSArrayBufferView(
+      Handle<JSArrayBuffer> buffer) WARN_UNUSED_RESULT;
+  MaybeHandle<JSObject> ReadHostObject() WARN_UNUSED_RESULT;
 
   /*
    * Reads key-value pairs into the object until the specified end tag is
    * encountered. If successful, returns the number of properties read.
    */
   Maybe<uint32_t> ReadJSObjectProperties(Handle<JSObject> object,
-                                         SerializationTag end_tag);
+                                         SerializationTag end_tag,
+                                         bool can_use_transitions);
 
   // Manipulating the map from IDs to reified objects.
   bool HasObjectWithID(uint32_t id);
@@ -166,12 +246,17 @@
   void AddObjectWithID(uint32_t id, Handle<JSReceiver> object);
 
   Isolate* const isolate_;
+  v8::ValueDeserializer::Delegate* const delegate_;
   const uint8_t* position_;
   const uint8_t* const end_;
+  PretenureFlag pretenure_;
   uint32_t version_ = 0;
-  Handle<SeededNumberDictionary> id_map_;  // Always a global handle.
   uint32_t next_id_ = 0;
 
+  // Always global handles.
+  Handle<FixedArray> id_map_;
+  MaybeHandle<SeededNumberDictionary> array_buffer_transfer_map_;
+
   DISALLOW_COPY_AND_ASSIGN(ValueDeserializer);
 };
 
diff --git a/src/wasm/ast-decoder.cc b/src/wasm/ast-decoder.cc
index 0f19250..02d1db5 100644
--- a/src/wasm/ast-decoder.cc
+++ b/src/wasm/ast-decoder.cc
@@ -7,7 +7,7 @@
 #include "src/bit-vector.h"
 #include "src/flags.h"
 #include "src/handles.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 #include "src/wasm/ast-decoder.h"
 #include "src/wasm/decoder.h"
@@ -36,6 +36,8 @@
     error("Invalid opcode (enable with --" #flag ")"); \
     break;                                             \
   }
+// TODO(titzer): this is only for intermediate migration.
+#define IMPLICIT_FUNCTION_END 1
 
 // An SsaEnv environment carries the current local variable renaming
 // as well as the current effect and control dependency in the TF graph.
@@ -68,62 +70,82 @@
   LocalType type;
 };
 
+struct TryInfo : public ZoneObject {
+  SsaEnv* catch_env;
+  TFNode* exception;
+
+  explicit TryInfo(SsaEnv* c) : catch_env(c), exception(nullptr) {}
+};
+
+struct MergeValues {
+  uint32_t arity;
+  union {
+    Value* array;
+    Value first;
+  } vals;  // Either multiple values or a single value.
+
+  Value& first() {
+    DCHECK_GT(arity, 0u);
+    return arity == 1 ? vals.first : vals.array[0];
+  }
+};
+
+static Value* NO_VALUE = nullptr;
+
+enum ControlKind { kControlIf, kControlBlock, kControlLoop, kControlTry };
+
 // An entry on the control stack (i.e. if, block, loop).
 struct Control {
   const byte* pc;
-  int stack_depth;         // stack height at the beginning of the construct.
-  SsaEnv* end_env;         // end environment for the construct.
-  SsaEnv* false_env;       // false environment (only for if).
-  SsaEnv* catch_env;       // catch environment (only for try with catch).
-  SsaEnv* finish_try_env;  // the environment where a try with finally lives.
-  TFNode* node;            // result node for the construct.
-  LocalType type;          // result type for the construct.
-  bool is_loop;            // true if this is the inner label of a loop.
+  ControlKind kind;
+  int stack_depth;    // stack height at the beginning of the construct.
+  SsaEnv* end_env;    // end environment for the construct.
+  SsaEnv* false_env;  // false environment (only for if).
+  TryInfo* try_info;  // Information used for compiling try statements.
+  int32_t previous_catch;  // The previous Control (on the stack) with a catch.
 
-  bool is_if() const { return *pc == kExprIf; }
+  // Values merged into the end of this control construct.
+  MergeValues merge;
 
-  bool is_try() const {
-    return *pc == kExprTryCatch || *pc == kExprTryCatchFinally ||
-           *pc == kExprTryFinally;
-  }
-
-  bool has_catch() const {
-    return *pc == kExprTryCatch || *pc == kExprTryCatchFinally;
-  }
-
-  bool has_finally() const {
-    return *pc == kExprTryCatchFinally || *pc == kExprTryFinally;
-  }
+  inline bool is_if() const { return kind == kControlIf; }
+  inline bool is_block() const { return kind == kControlBlock; }
+  inline bool is_loop() const { return kind == kControlLoop; }
+  inline bool is_try() const { return kind == kControlTry; }
 
   // Named constructors.
-  static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env) {
-    return {pc,      stack_depth, end_env, nullptr, nullptr,
-            nullptr, nullptr,     kAstEnd, false};
+  static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env,
+                       int32_t previous_catch) {
+    return {pc,      kControlBlock, stack_depth,    end_env,
+            nullptr, nullptr,       previous_catch, {0, {NO_VALUE}}};
   }
 
   static Control If(const byte* pc, int stack_depth, SsaEnv* end_env,
-                    SsaEnv* false_env) {
-    return {pc,      stack_depth, end_env,  false_env, nullptr,
-            nullptr, nullptr,     kAstStmt, false};
+                    SsaEnv* false_env, int32_t previous_catch) {
+    return {pc,        kControlIf, stack_depth,    end_env,
+            false_env, nullptr,    previous_catch, {0, {NO_VALUE}}};
   }
 
-  static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env) {
-    return {pc,      stack_depth, end_env, nullptr, nullptr,
-            nullptr, nullptr,     kAstEnd, true};
+  static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env,
+                      int32_t previous_catch) {
+    return {pc,      kControlLoop, stack_depth,    end_env,
+            nullptr, nullptr,      previous_catch, {0, {NO_VALUE}}};
   }
 
   static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env,
-                     SsaEnv* catch_env, SsaEnv* finish_try_env) {
-    return {pc,      stack_depth, end_env, nullptr, catch_env, finish_try_env,
-            nullptr, kAstEnd,     false};
+                     Zone* zone, SsaEnv* catch_env, int32_t previous_catch) {
+    DCHECK_NOT_NULL(catch_env);
+    TryInfo* try_info = new (zone) TryInfo(catch_env);
+    return {pc,      kControlTry, stack_depth,    end_env,
+            nullptr, try_info,    previous_catch, {0, {NO_VALUE}}};
   }
 };
 
 // Macros that build nodes only if there is a graph and the current SSA
 // environment is reachable from start. This avoids problems with malformed
 // TF graphs when decoding inputs that have unreachable code.
-#define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr)
-#define BUILD0(func) (build() ? builder_->func() : nullptr)
+#define BUILD(func, ...) \
+  (build() ? CheckForException(builder_->func(__VA_ARGS__)) : nullptr)
+#define BUILD0(func) (build() ? CheckForException(builder_->func()) : nullptr)
 
 // Generic Wasm bytecode decoder with utilities for decoding operands,
 // lengths, etc.
@@ -150,17 +172,18 @@
       }
       return true;
     }
-    error(pc, pc + 1, "invalid local index");
+    error(pc, pc + 1, "invalid local index: %u", operand.index);
     return false;
   }
 
   inline bool Validate(const byte* pc, GlobalIndexOperand& operand) {
     ModuleEnv* m = module_;
     if (m && m->module && operand.index < m->module->globals.size()) {
-      operand.type = m->module->globals[operand.index].type;
+      operand.global = &m->module->globals[operand.index];
+      operand.type = operand.global->type;
       return true;
     }
-    error(pc, pc + 1, "invalid global index");
+    error(pc, pc + 1, "invalid global index: %u", operand.index);
     return false;
   }
 
@@ -175,16 +198,9 @@
 
   inline bool Validate(const byte* pc, CallFunctionOperand& operand) {
     if (Complete(pc, operand)) {
-      uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
-      if (operand.arity != expected) {
-        error(pc, pc + 1,
-              "arity mismatch in direct function call (expected %u, got %u)",
-              expected, operand.arity);
-        return false;
-      }
       return true;
     }
-    error(pc, pc + 1, "invalid function index");
+    error(pc, pc + 1, "invalid function index: %u", operand.index);
     return false;
   }
 
@@ -199,161 +215,28 @@
 
   inline bool Validate(const byte* pc, CallIndirectOperand& operand) {
     if (Complete(pc, operand)) {
-      uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
-      if (operand.arity != expected) {
-        error(pc, pc + 1,
-              "arity mismatch in indirect function call (expected %u, got %u)",
-              expected, operand.arity);
-        return false;
-      }
       return true;
     }
-    error(pc, pc + 1, "invalid signature index");
-    return false;
-  }
-
-  inline bool Complete(const byte* pc, CallImportOperand& operand) {
-    ModuleEnv* m = module_;
-    if (m && m->module && operand.index < m->module->import_table.size()) {
-      operand.sig = m->module->import_table[operand.index].sig;
-      return true;
-    }
-    return false;
-  }
-
-  inline bool Validate(const byte* pc, CallImportOperand& operand) {
-    if (Complete(pc, operand)) {
-      uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
-      if (operand.arity != expected) {
-        error(pc, pc + 1, "arity mismatch in import call (expected %u, got %u)",
-              expected, operand.arity);
-        return false;
-      }
-      return true;
-    }
-    error(pc, pc + 1, "invalid signature index");
+    error(pc, pc + 1, "invalid signature index: #%u", operand.index);
     return false;
   }
 
   inline bool Validate(const byte* pc, BreakDepthOperand& operand,
                        ZoneVector<Control>& control) {
-    if (operand.arity > 1) {
-      error(pc, pc + 1, "invalid arity for br or br_if");
-      return false;
-    }
     if (operand.depth < control.size()) {
       operand.target = &control[control.size() - operand.depth - 1];
       return true;
     }
-    error(pc, pc + 1, "invalid break depth");
+    error(pc, pc + 1, "invalid break depth: %u", operand.depth);
     return false;
   }
 
   bool Validate(const byte* pc, BranchTableOperand& operand,
                 size_t block_depth) {
-    if (operand.arity > 1) {
-      error(pc, pc + 1, "invalid arity for break");
-      return false;
-    }
-    // Verify table.
-    for (uint32_t i = 0; i < operand.table_count + 1; ++i) {
-      uint32_t target = operand.read_entry(this, i);
-      if (target >= block_depth) {
-        error(operand.table + i * 2, "improper branch in br_table");
-        return false;
-      }
-    }
+    // TODO(titzer): add extra redundant validation for br_table here?
     return true;
   }
 
-  unsigned OpcodeArity(const byte* pc) {
-#define DECLARE_ARITY(name, ...)                          \
-  static const LocalType kTypes_##name[] = {__VA_ARGS__}; \
-  static const int kArity_##name =                        \
-      static_cast<int>(arraysize(kTypes_##name) - 1);
-
-    FOREACH_SIGNATURE(DECLARE_ARITY);
-#undef DECLARE_ARITY
-
-    switch (static_cast<WasmOpcode>(*pc)) {
-      case kExprI8Const:
-      case kExprI32Const:
-      case kExprI64Const:
-      case kExprF64Const:
-      case kExprF32Const:
-      case kExprGetLocal:
-      case kExprGetGlobal:
-      case kExprNop:
-      case kExprUnreachable:
-      case kExprEnd:
-      case kExprBlock:
-      case kExprThrow:
-      case kExprTryCatch:
-      case kExprTryCatchFinally:
-      case kExprTryFinally:
-      case kExprFinally:
-      case kExprLoop:
-        return 0;
-
-      case kExprSetGlobal:
-      case kExprSetLocal:
-      case kExprElse:
-      case kExprCatch:
-        return 1;
-
-      case kExprBr: {
-        BreakDepthOperand operand(this, pc);
-        return operand.arity;
-      }
-      case kExprBrIf: {
-        BreakDepthOperand operand(this, pc);
-        return 1 + operand.arity;
-      }
-      case kExprBrTable: {
-        BranchTableOperand operand(this, pc);
-        return 1 + operand.arity;
-      }
-
-      case kExprIf:
-        return 1;
-      case kExprSelect:
-        return 3;
-
-      case kExprCallFunction: {
-        CallFunctionOperand operand(this, pc);
-        return operand.arity;
-      }
-      case kExprCallIndirect: {
-        CallIndirectOperand operand(this, pc);
-        return 1 + operand.arity;
-      }
-      case kExprCallImport: {
-        CallImportOperand operand(this, pc);
-        return operand.arity;
-      }
-      case kExprReturn: {
-        ReturnArityOperand operand(this, pc);
-        return operand.arity;
-      }
-
-#define DECLARE_OPCODE_CASE(name, opcode, sig) \
-  case kExpr##name:                            \
-    return kArity_##sig;
-
-        FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
-        FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
-        FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE)
-        FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
-        FOREACH_SIMPLE_MEM_OPCODE(DECLARE_OPCODE_CASE)
-        FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE)
-        FOREACH_SIMD_OPCODE(DECLARE_OPCODE_CASE)
-#undef DECLARE_OPCODE_CASE
-      default:
-        UNREACHABLE();
-        return 0;
-    }
-  }
-
   unsigned OpcodeLength(const byte* pc) {
     switch (static_cast<WasmOpcode>(*pc)) {
 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
@@ -361,7 +244,7 @@
       FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
 #undef DECLARE_OPCODE_CASE
       {
-        MemoryAccessOperand operand(this, pc);
+        MemoryAccessOperand operand(this, pc, UINT32_MAX);
         return 1 + operand.length;
       }
       case kExprBr:
@@ -383,12 +266,17 @@
         CallIndirectOperand operand(this, pc);
         return 1 + operand.length;
       }
-      case kExprCallImport: {
-        CallImportOperand operand(this, pc);
+
+      case kExprTry:
+      case kExprIf:  // fall thru
+      case kExprLoop:
+      case kExprBlock: {
+        BlockTypeOperand operand(this, pc);
         return 1 + operand.length;
       }
 
       case kExprSetLocal:
+      case kExprTeeLocal:
       case kExprGetLocal:
       case kExprCatch: {
         LocalIndexOperand operand(this, pc);
@@ -396,7 +284,8 @@
       }
       case kExprBrTable: {
         BranchTableOperand operand(this, pc);
-        return 1 + operand.length;
+        BranchTableIterator iterator(this, operand);
+        return 1 + iterator.length();
       }
       case kExprI32Const: {
         ImmI32Operand operand(this, pc);
@@ -412,17 +301,14 @@
         return 5;
       case kExprF64Const:
         return 9;
-      case kExprReturn: {
-        ReturnArityOperand operand(this, pc);
-        return 1 + operand.length;
-      }
-
       default:
         return 1;
     }
   }
 };
 
+static const int32_t kNullCatch = -1;
+
 // The full WASM decoder for bytecode. Both verifies bytecode and generates
 // a TurboFan IR graph.
 class WasmFullDecoder : public WasmDecoder {
@@ -434,7 +320,9 @@
         base_(body.base),
         local_type_vec_(zone),
         stack_(zone),
-        control_(zone) {
+        control_(zone),
+        last_end_found_(false),
+        current_catch_(kNullCatch) {
     local_types_ = &local_type_vec_;
   }
 
@@ -447,7 +335,7 @@
     control_.clear();
 
     if (end_ < pc_) {
-      error(pc_, "function body end < start");
+      error("function body end < start");
       return false;
     }
 
@@ -457,23 +345,55 @@
 
     if (failed()) return TraceFailed();
 
+#if IMPLICIT_FUNCTION_END
+    // With implicit end support (old style), the function block
+    // remains on the stack. Other control blocks are an error.
+    if (control_.size() > 1) {
+      error(pc_, control_.back().pc, "unterminated control structure");
+      return TraceFailed();
+    }
+
+    // Assume an implicit end to the function body block.
+    if (control_.size() == 1) {
+      Control* c = &control_.back();
+      if (ssa_env_->go()) {
+        FallThruTo(c);
+      }
+
+      if (c->end_env->go()) {
+        // Push the end values onto the stack.
+        stack_.resize(c->stack_depth);
+        if (c->merge.arity == 1) {
+          stack_.push_back(c->merge.vals.first);
+        } else {
+          for (unsigned i = 0; i < c->merge.arity; i++) {
+            stack_.push_back(c->merge.vals.array[i]);
+          }
+        }
+
+        TRACE("  @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
+        SetEnv("function:end", c->end_env);
+        DoReturn();
+        TRACE("\n");
+      }
+    }
+#else
     if (!control_.empty()) {
       error(pc_, control_.back().pc, "unterminated control structure");
       return TraceFailed();
     }
 
-    if (ssa_env_->go()) {
-      TRACE("  @%-6d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
-      DoReturn();
-      if (failed()) return TraceFailed();
-      TRACE("\n");
+    if (!last_end_found_) {
+      error("function body must end with \"end\" opcode.");
+      return false;
     }
+#endif
 
     if (FLAG_trace_wasm_decode_time) {
       double ms = decode_timer.Elapsed().InMillisecondsF();
-      PrintF("wasm-decode ok (%0.3f ms)\n\n", ms);
+      PrintF("wasm-decode %s (%0.3f ms)\n\n", ok() ? "ok" : "failed", ms);
     } else {
-      TRACE("wasm-decode ok\n\n");
+      TRACE("wasm-decode %s\n\n", ok() ? "ok" : "failed");
     }
 
     return true;
@@ -526,6 +446,11 @@
   ZoneVector<LocalType> local_type_vec_;  // types of local variables.
   ZoneVector<Value> stack_;               // stack of values.
   ZoneVector<Control> control_;           // stack of blocks, loops, and ifs.
+  bool last_end_found_;
+
+  int32_t current_catch_;
+
+  TryInfo* current_try_info() { return control_[current_catch_].try_info; }
 
   inline bool build() { return builder_ && ssa_env_->go(); }
 
@@ -574,6 +499,8 @@
         return builder_->Float32Constant(0);
       case kAstF64:
         return builder_->Float64Constant(0);
+      case kAstS128:
+        return builder_->DefaultS128Value();
       default:
         UNREACHABLE();
         return nullptr;
@@ -603,8 +530,13 @@
     }
     // Decode local declarations, if any.
     uint32_t entries = consume_u32v("local decls count");
+    TRACE("local decls count: %u\n", entries);
     while (entries-- > 0 && pc_ < limit_) {
       uint32_t count = consume_u32v("local count");
+      if (count > kMaxNumWasmLocals) {
+        error(pc_ - 1, "local count too large");
+        return;
+      }
       byte code = consume_u8("local type");
       LocalType type;
       switch (code) {
@@ -620,6 +552,9 @@
         case kLocalF64:
           type = kAstF64;
           break;
+        case kLocalS128:
+          type = kAstS128;
+          break;
         default:
           error(pc_ - 1, "invalid local type");
           return;
@@ -636,82 +571,68 @@
           reinterpret_cast<const void*>(limit_), baserel(pc_),
           static_cast<int>(limit_ - start_), builder_ ? "graph building" : "");
 
+    {
+      // Set up initial function block.
+      SsaEnv* break_env = ssa_env_;
+      SetEnv("initial env", Steal(break_env));
+      PushBlock(break_env);
+      Control* c = &control_.back();
+      c->merge.arity = static_cast<uint32_t>(sig_->return_count());
+
+      if (c->merge.arity == 1) {
+        c->merge.vals.first = {pc_, nullptr, sig_->GetReturn(0)};
+      } else if (c->merge.arity > 1) {
+        c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
+        for (unsigned i = 0; i < c->merge.arity; i++) {
+          c->merge.vals.array[i] = {pc_, nullptr, sig_->GetReturn(i)};
+        }
+      }
+    }
+
     if (pc_ >= limit_) return;  // Nothing to do.
 
     while (true) {  // decoding loop.
       unsigned len = 1;
       WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
-      TRACE("  @%-6d #%02x:%-20s|", startrel(pc_), opcode,
-            WasmOpcodes::ShortOpcodeName(opcode));
+      if (!WasmOpcodes::IsPrefixOpcode(opcode)) {
+        TRACE("  @%-8d #%02x:%-20s|", startrel(pc_), opcode,
+              WasmOpcodes::ShortOpcodeName(opcode));
+      }
 
       FunctionSig* sig = WasmOpcodes::Signature(opcode);
       if (sig) {
-        // Fast case of a simple operator.
-        TFNode* node;
-        switch (sig->parameter_count()) {
-          case 1: {
-            Value val = Pop(0, sig->GetParam(0));
-            node = BUILD(Unop, opcode, val.node, position());
-            break;
-          }
-          case 2: {
-            Value rval = Pop(1, sig->GetParam(1));
-            Value lval = Pop(0, sig->GetParam(0));
-            node = BUILD(Binop, opcode, lval.node, rval.node, position());
-            break;
-          }
-          default:
-            UNREACHABLE();
-            node = nullptr;
-            break;
-        }
-        Push(GetReturnType(sig), node);
+        BuildSimpleOperator(opcode, sig);
       } else {
         // Complex bytecode.
         switch (opcode) {
           case kExprNop:
-            Push(kAstStmt, nullptr);
             break;
           case kExprBlock: {
             // The break environment is the outer environment.
+            BlockTypeOperand operand(this, pc_);
             SsaEnv* break_env = ssa_env_;
             PushBlock(break_env);
             SetEnv("block:start", Steal(break_env));
+            SetBlockType(&control_.back(), operand);
+            len = 1 + operand.length;
             break;
           }
           case kExprThrow: {
             CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
-            Pop(0, kAstI32);
-
-            // TODO(jpp): start exception propagation.
+            Value value = Pop(0, kAstI32);
+            BUILD(Throw, value.node);
             break;
           }
-          case kExprTryCatch: {
+          case kExprTry: {
             CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
+            BlockTypeOperand operand(this, pc_);
             SsaEnv* outer_env = ssa_env_;
             SsaEnv* try_env = Steal(outer_env);
-            SsaEnv* catch_env = Split(try_env);
-            PushTry(outer_env, catch_env, nullptr);
+            SsaEnv* catch_env = UnreachableEnv();
+            PushTry(outer_env, catch_env);
             SetEnv("try_catch:start", try_env);
-            break;
-          }
-          case kExprTryCatchFinally: {
-            CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
-            SsaEnv* outer_env = ssa_env_;
-            SsaEnv* try_env = Steal(outer_env);
-            SsaEnv* catch_env = Split(try_env);
-            SsaEnv* finally_env = Split(try_env);
-            PushTry(finally_env, catch_env, outer_env);
-            SetEnv("try_catch_finally:start", try_env);
-            break;
-          }
-          case kExprTryFinally: {
-            CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
-            SsaEnv* outer_env = ssa_env_;
-            SsaEnv* try_env = Steal(outer_env);
-            SsaEnv* finally_env = Split(outer_env);
-            PushTry(finally_env, nullptr, outer_env);
-            SetEnv("try_finally:start", try_env);
+            SetBlockType(&control_.back(), operand);
+            len = 1 + operand.length;
             break;
           }
           case kExprCatch: {
@@ -720,97 +641,57 @@
             len = 1 + operand.length;
 
             if (control_.empty()) {
-              error(pc_, "catch does not match a any try");
+              error("catch does not match any try");
               break;
             }
 
             Control* c = &control_.back();
-            if (!c->has_catch()) {
-              error(pc_, "catch does not match a try with catch");
+            if (!c->is_try()) {
+              error("catch does not match any try");
               break;
             }
 
-            if (c->catch_env == nullptr) {
+            if (c->try_info->catch_env == nullptr) {
               error(pc_, "catch already present for try with catch");
               break;
             }
 
-            Goto(ssa_env_, c->end_env);
+            if (ssa_env_->go()) {
+              MergeValuesInto(c);
+            }
+            stack_.resize(c->stack_depth);
 
-            SsaEnv* catch_env = c->catch_env;
-            c->catch_env = nullptr;
+            DCHECK_NOT_NULL(c->try_info);
+            SsaEnv* catch_env = c->try_info->catch_env;
+            c->try_info->catch_env = nullptr;
             SetEnv("catch:begin", catch_env);
+            current_catch_ = c->previous_catch;
 
             if (Validate(pc_, operand)) {
-              // TODO(jpp): figure out how thrown value is propagated. It is
-              // unlikely to be a value on the stack.
               if (ssa_env_->locals) {
-                ssa_env_->locals[operand.index] = nullptr;
+                TFNode* exception_as_i32 =
+                    BUILD(Catch, c->try_info->exception, position());
+                ssa_env_->locals[operand.index] = exception_as_i32;
               }
             }
 
-            PopUpTo(c->stack_depth);
-
-            break;
-          }
-          case kExprFinally: {
-            CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
-            if (control_.empty()) {
-              error(pc_, "finally does not match a any try");
-              break;
-            }
-
-            Control* c = &control_.back();
-            if (c->has_catch() && c->catch_env != nullptr) {
-              error(pc_, "missing catch for try with catch and finally");
-              break;
-            }
-
-            if (!c->has_finally()) {
-              error(pc_, "finally does not match a try with finally");
-              break;
-            }
-
-            if (c->finish_try_env == nullptr) {
-              error(pc_, "finally already present for try with finally");
-              break;
-            }
-
-            // ssa_env_ is either the env for either the try or the catch, but
-            // it does not matter: either way we need to direct the control flow
-            // to the end_env, which is the env for the finally.
-            // c->finish_try_env is the the environment enclosing the try block.
-            Goto(ssa_env_, c->end_env);
-
-            PopUpTo(c->stack_depth);
-
-            // The current environment becomes end_env, and finish_try_env
-            // becomes the new end_env. This ensures that any control flow
-            // leaving a try block up to now will do so by branching to the
-            // finally block. Setting the end_env to be finish_try_env ensures
-            // that kExprEnd below can handle the try block as it would any
-            // other block construct.
-            SsaEnv* finally_env = c->end_env;
-            c->end_env = c->finish_try_env;
-            SetEnv("finally:begin", finally_env);
-            c->finish_try_env = nullptr;
-
             break;
           }
           case kExprLoop: {
-            // The break environment is the outer environment.
-            SsaEnv* break_env = ssa_env_;
-            PushBlock(break_env);
-            SsaEnv* finish_try_env = Steal(break_env);
+            BlockTypeOperand operand(this, pc_);
+            SsaEnv* finish_try_env = Steal(ssa_env_);
             // The continue environment is the inner environment.
             PrepareForLoop(pc_, finish_try_env);
             SetEnv("loop:start", Split(finish_try_env));
             ssa_env_->SetNotMerged();
             PushLoop(finish_try_env);
+            SetBlockType(&control_.back(), operand);
+            len = 1 + operand.length;
             break;
           }
           case kExprIf: {
             // Condition on top of stack. Split environments for branches.
+            BlockTypeOperand operand(this, pc_);
             Value cond = Pop(0, kAstI32);
             TFNode* if_true = nullptr;
             TFNode* if_false = nullptr;
@@ -822,11 +703,13 @@
             true_env->control = if_true;
             PushIf(end_env, false_env);
             SetEnv("if:true", true_env);
+            SetBlockType(&control_.back(), operand);
+            len = 1 + operand.length;
             break;
           }
           case kExprElse: {
             if (control_.empty()) {
-              error(pc_, "else does not match any if");
+              error("else does not match any if");
               break;
             }
             Control* c = &control_.back();
@@ -838,31 +721,38 @@
               error(pc_, c->pc, "else already present for if");
               break;
             }
-            Value val = PopUpTo(c->stack_depth);
-            MergeInto(c->end_env, &c->node, &c->type, val);
+            FallThruTo(c);
             // Switch to environment for false branch.
+            stack_.resize(c->stack_depth);
             SetEnv("if_else:false", c->false_env);
             c->false_env = nullptr;  // record that an else is already seen
             break;
           }
           case kExprEnd: {
             if (control_.empty()) {
-              error(pc_, "end does not match any if or block");
-              break;
+              error("end does not match any if, try, or block");
+              return;
             }
             const char* name = "block:end";
             Control* c = &control_.back();
-            Value val = PopUpTo(c->stack_depth);
-            if (c->is_loop) {
-              // Loops always push control in pairs.
-              control_.pop_back();
-              c = &control_.back();
-              name = "loop:end";
-            } else if (c->is_if()) {
+            if (c->is_loop()) {
+              // A loop just leaves the values on the stack.
+              TypeCheckLoopFallThru(c);
+              PopControl();
+              SetEnv("loop:end", ssa_env_);
+              break;
+            }
+            if (c->is_if()) {
               if (c->false_env != nullptr) {
                 // End the true branch of a one-armed if.
                 Goto(c->false_env, c->end_env);
-                val = {val.pc, nullptr, kAstStmt};
+                if (ssa_env_->go() && stack_.size() != c->stack_depth) {
+                  error("end of if expected empty stack");
+                  stack_.resize(c->stack_depth);
+                }
+                if (c->merge.arity > 0) {
+                  error("non-void one-armed if");
+                }
                 name = "if:merge";
               } else {
                 // End the false branch of a two-armed if.
@@ -871,28 +761,41 @@
             } else if (c->is_try()) {
               name = "try:end";
 
-              // try blocks do not yield a value.
-              val = {val.pc, nullptr, kAstStmt};
-
-              // validate that catch/finally were seen.
-              if (c->catch_env != nullptr) {
-                error(pc_, "missing catch in try with catch");
-                break;
-              }
-
-              if (c->finish_try_env != nullptr) {
-                error(pc_, "missing finally in try with finally");
+              // validate that catch was seen.
+              if (c->try_info->catch_env != nullptr) {
+                error(pc_, "missing catch in try");
                 break;
               }
             }
-
-            if (ssa_env_->go()) {
-              MergeInto(c->end_env, &c->node, &c->type, val);
-            }
+            FallThruTo(c);
             SetEnv(name, c->end_env);
+
+            // Push the end values onto the stack.
             stack_.resize(c->stack_depth);
-            Push(c->type, c->node);
-            control_.pop_back();
+            if (c->merge.arity == 1) {
+              stack_.push_back(c->merge.vals.first);
+            } else {
+              for (unsigned i = 0; i < c->merge.arity; i++) {
+                stack_.push_back(c->merge.vals.array[i]);
+              }
+            }
+
+            PopControl();
+
+            if (control_.empty()) {
+              // If the last (implicit) control was popped, check we are at end.
+              if (pc_ + 1 != end_) {
+                error(pc_, pc_ + 1, "trailing code after function end");
+              }
+              last_end_found_ = true;
+              if (ssa_env_->go()) {
+                // The result of the block is the return value.
+                TRACE("  @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
+                DoReturn();
+                TRACE("\n");
+              }
+              return;
+            }
             break;
           }
           case kExprSelect: {
@@ -901,7 +804,7 @@
             Value tval = Pop();
             if (tval.type == kAstStmt || tval.type != fval.type) {
               if (tval.type != kAstEnd && fval.type != kAstEnd) {
-                error(pc_, "type mismatch in select");
+                error("type mismatch in select");
                 break;
               }
             }
@@ -923,39 +826,33 @@
           }
           case kExprBr: {
             BreakDepthOperand operand(this, pc_);
-            Value val = {pc_, nullptr, kAstStmt};
-            if (operand.arity) val = Pop();
             if (Validate(pc_, operand, control_)) {
-              BreakTo(operand.target, val);
+              BreakTo(operand.depth);
             }
             len = 1 + operand.length;
-            Push(kAstEnd, nullptr);
+            EndControl();
             break;
           }
           case kExprBrIf: {
             BreakDepthOperand operand(this, pc_);
-            Value cond = Pop(operand.arity, kAstI32);
-            Value val = {pc_, nullptr, kAstStmt};
-            if (operand.arity == 1) val = Pop();
-            if (Validate(pc_, operand, control_)) {
+            Value cond = Pop(0, kAstI32);
+            if (ok() && Validate(pc_, operand, control_)) {
               SsaEnv* fenv = ssa_env_;
               SsaEnv* tenv = Split(fenv);
               fenv->SetNotMerged();
               BUILD(Branch, cond.node, &tenv->control, &fenv->control);
               ssa_env_ = tenv;
-              BreakTo(operand.target, val);
+              BreakTo(operand.depth);
               ssa_env_ = fenv;
             }
             len = 1 + operand.length;
-            Push(kAstStmt, nullptr);
             break;
           }
           case kExprBrTable: {
             BranchTableOperand operand(this, pc_);
+            BranchTableIterator iterator(this, operand);
             if (Validate(pc_, operand, control_.size())) {
-              Value key = Pop(operand.arity, kAstI32);
-              Value val = {pc_, nullptr, kAstStmt};
-              if (operand.arity == 1) val = Pop();
+              Value key = Pop(0, kAstI32);
               if (failed()) break;
 
               SsaEnv* break_env = ssa_env_;
@@ -965,42 +862,43 @@
 
                 SsaEnv* copy = Steal(break_env);
                 ssa_env_ = copy;
-                for (uint32_t i = 0; i < operand.table_count + 1; ++i) {
-                  uint16_t target = operand.read_entry(this, i);
+                while (iterator.has_next()) {
+                  uint32_t i = iterator.cur_index();
+                  const byte* pos = iterator.pc();
+                  uint32_t target = iterator.next();
+                  if (target >= control_.size()) {
+                    error(pos, "improper branch in br_table");
+                    break;
+                  }
                   ssa_env_ = Split(copy);
                   ssa_env_->control = (i == operand.table_count)
                                           ? BUILD(IfDefault, sw)
                                           : BUILD(IfValue, i, sw);
-                  int depth = target;
-                  Control* c = &control_[control_.size() - depth - 1];
-                  MergeInto(c->end_env, &c->node, &c->type, val);
+                  BreakTo(target);
                 }
               } else {
                 // Only a default target. Do the equivalent of br.
-                uint16_t target = operand.read_entry(this, 0);
-                int depth = target;
-                Control* c = &control_[control_.size() - depth - 1];
-                MergeInto(c->end_env, &c->node, &c->type, val);
+                const byte* pos = iterator.pc();
+                uint32_t target = iterator.next();
+                if (target >= control_.size()) {
+                  error(pos, "improper branch in br_table");
+                  break;
+                }
+                BreakTo(target);
               }
               // br_table ends the control flow like br.
               ssa_env_ = break_env;
-              Push(kAstStmt, nullptr);
             }
-            len = 1 + operand.length;
+            len = 1 + iterator.length();
             break;
           }
           case kExprReturn: {
-            ReturnArityOperand operand(this, pc_);
-            if (operand.arity != sig_->return_count()) {
-              error(pc_, pc_ + 1, "arity mismatch in return");
-            }
             DoReturn();
-            len = 1 + operand.length;
             break;
           }
           case kExprUnreachable: {
-            Push(kAstEnd, BUILD(Unreachable, position()));
-            ssa_env_->Kill(SsaEnv::kControlEnd);
+            BUILD(Unreachable, position());
+            EndControl();
             break;
           }
           case kExprI8Const: {
@@ -1050,11 +948,24 @@
             if (Validate(pc_, operand)) {
               Value val = Pop(0, local_type_vec_[operand.index]);
               if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
+            }
+            len = 1 + operand.length;
+            break;
+          }
+          case kExprTeeLocal: {
+            LocalIndexOperand operand(this, pc_);
+            if (Validate(pc_, operand)) {
+              Value val = Pop(0, local_type_vec_[operand.index]);
+              if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
               Push(val.type, val.node);
             }
             len = 1 + operand.length;
             break;
           }
+          case kExprDrop: {
+            Pop();
+            break;
+          }
           case kExprGetGlobal: {
             GlobalIndexOperand operand(this, pc_);
             if (Validate(pc_, operand)) {
@@ -1066,9 +977,13 @@
           case kExprSetGlobal: {
             GlobalIndexOperand operand(this, pc_);
             if (Validate(pc_, operand)) {
-              Value val = Pop(0, operand.type);
-              BUILD(SetGlobal, operand.index, val.node);
-              Push(val.type, val.node);
+              if (operand.global->mutability) {
+                Value val = Pop(0, operand.type);
+                BUILD(SetGlobal, operand.index, val.node);
+              } else {
+                error(pc_, pc_ + 1, "immutable global #%u cannot be assigned",
+                      operand.index);
+              }
             }
             len = 1 + operand.length;
             break;
@@ -1088,7 +1003,6 @@
           case kExprI32LoadMem:
             len = DecodeLoadMem(kAstI32, MachineType::Int32());
             break;
-
           case kExprI64LoadMem8S:
             len = DecodeLoadMem(kAstI64, MachineType::Int8());
             break;
@@ -1143,17 +1057,24 @@
           case kExprF64StoreMem:
             len = DecodeStoreMem(kAstF64, MachineType::Float64());
             break;
-
+          case kExprGrowMemory:
+            if (module_->origin != kAsmJsOrigin) {
+              Value val = Pop(0, kAstI32);
+              Push(kAstI32, BUILD(GrowMemory, val.node));
+            } else {
+              error("grow_memory is not supported for asmjs modules");
+            }
+            break;
           case kExprMemorySize:
-            Push(kAstI32, BUILD(MemSize, 0));
+            Push(kAstI32, BUILD(CurrentMemoryPages));
             break;
           case kExprCallFunction: {
             CallFunctionOperand operand(this, pc_);
             if (Validate(pc_, operand)) {
               TFNode** buffer = PopArgs(operand.sig);
-              TFNode* call =
-                  BUILD(CallDirect, operand.index, buffer, position());
-              Push(GetReturnType(operand.sig), call);
+              TFNode** rets = nullptr;
+              BUILD(CallDirect, operand.index, buffer, &rets, position());
+              PushReturns(operand.sig, rets);
             }
             len = 1 + operand.length;
             break;
@@ -1161,23 +1082,12 @@
           case kExprCallIndirect: {
             CallIndirectOperand operand(this, pc_);
             if (Validate(pc_, operand)) {
-              TFNode** buffer = PopArgs(operand.sig);
               Value index = Pop(0, kAstI32);
-              if (buffer) buffer[0] = index.node;
-              TFNode* call =
-                  BUILD(CallIndirect, operand.index, buffer, position());
-              Push(GetReturnType(operand.sig), call);
-            }
-            len = 1 + operand.length;
-            break;
-          }
-          case kExprCallImport: {
-            CallImportOperand operand(this, pc_);
-            if (Validate(pc_, operand)) {
               TFNode** buffer = PopArgs(operand.sig);
-              TFNode* call =
-                  BUILD(CallImport, operand.index, buffer, position());
-              Push(GetReturnType(operand.sig), call);
+              if (buffer) buffer[0] = index.node;
+              TFNode** rets = nullptr;
+              BUILD(CallIndirect, operand.index, buffer, &rets, position());
+              PushReturns(operand.sig, rets);
             }
             len = 1 + operand.length;
             break;
@@ -1187,20 +1097,34 @@
             len++;
             byte simd_index = *(pc_ + 1);
             opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index);
-            DecodeSimdOpcode(opcode);
+            TRACE("  @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix,
+                  simd_index, WasmOpcodes::ShortOpcodeName(opcode));
+            len += DecodeSimdOpcode(opcode);
             break;
           }
-          default:
-            error("Invalid opcode");
-            return;
+          default: {
+            // Deal with special asmjs opcodes.
+            if (module_ && module_->origin == kAsmJsOrigin) {
+              sig = WasmOpcodes::AsmjsSignature(opcode);
+              if (sig) {
+                BuildSimpleOperator(opcode, sig);
+              }
+            } else {
+              error("Invalid opcode");
+              return;
+            }
+          }
         }
-      }  // end complex bytecode
+      }
 
 #if DEBUG
       if (FLAG_trace_wasm_decoder) {
         for (size_t i = 0; i < stack_.size(); ++i) {
           Value& val = stack_[i];
           WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc);
+          if (WasmOpcodes::IsPrefixOpcode(opcode)) {
+            opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1));
+          }
           PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type),
                  static_cast<int>(val.pc - start_),
                  WasmOpcodes::ShortOpcodeName(opcode));
@@ -1215,7 +1139,8 @@
               PrintF("[%u]", operand.index);
               break;
             }
-            case kExprSetLocal: {
+            case kExprSetLocal:  // fallthru
+            case kExprTeeLocal: {
               LocalIndexOperand operand(this, val.pc);
               PrintF("[%u]", operand.index);
               break;
@@ -1234,7 +1159,21 @@
         return;
       }
     }  // end decode loop
-  }    // end DecodeFunctionBody()
+  }
+
+  void EndControl() { ssa_env_->Kill(SsaEnv::kControlEnd); }
+
+  void SetBlockType(Control* c, BlockTypeOperand& operand) {
+    c->merge.arity = operand.arity;
+    if (c->merge.arity == 1) {
+      c->merge.vals.first = {pc_, nullptr, operand.read_entry(0)};
+    } else if (c->merge.arity > 1) {
+      c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
+      for (unsigned i = 0; i < c->merge.arity; i++) {
+        c->merge.vals.array[i] = {pc_, nullptr, operand.read_entry(i)};
+      }
+    }
+  }
 
   TFNode** PopArgs(FunctionSig* sig) {
     if (build()) {
@@ -1260,27 +1199,35 @@
 
   void PushBlock(SsaEnv* end_env) {
     const int stack_depth = static_cast<int>(stack_.size());
-    control_.emplace_back(Control::Block(pc_, stack_depth, end_env));
+    control_.emplace_back(
+        Control::Block(pc_, stack_depth, end_env, current_catch_));
   }
 
   void PushLoop(SsaEnv* end_env) {
     const int stack_depth = static_cast<int>(stack_.size());
-    control_.emplace_back(Control::Loop(pc_, stack_depth, end_env));
+    control_.emplace_back(
+        Control::Loop(pc_, stack_depth, end_env, current_catch_));
   }
 
   void PushIf(SsaEnv* end_env, SsaEnv* false_env) {
     const int stack_depth = static_cast<int>(stack_.size());
-    control_.emplace_back(Control::If(pc_, stack_depth, end_env, false_env));
+    control_.emplace_back(
+        Control::If(pc_, stack_depth, end_env, false_env, current_catch_));
   }
 
-  void PushTry(SsaEnv* end_env, SsaEnv* catch_env, SsaEnv* finish_try_env) {
+  void PushTry(SsaEnv* end_env, SsaEnv* catch_env) {
     const int stack_depth = static_cast<int>(stack_.size());
-    control_.emplace_back(
-        Control::Try(pc_, stack_depth, end_env, catch_env, finish_try_env));
+    control_.emplace_back(Control::Try(pc_, stack_depth, end_env, zone_,
+                                       catch_env, current_catch_));
+    current_catch_ = static_cast<int32_t>(control_.size() - 1);
   }
 
+  void PopControl() { control_.pop_back(); }
+
   int DecodeLoadMem(LocalType type, MachineType mem_type) {
-    MemoryAccessOperand operand(this, pc_);
+    MemoryAccessOperand operand(this, pc_,
+                                ElementSizeLog2Of(mem_type.representation()));
+
     Value index = Pop(0, kAstI32);
     TFNode* node = BUILD(LoadMem, type, mem_type, index.node, operand.offset,
                          operand.alignment, position());
@@ -1289,24 +1236,45 @@
   }
 
   int DecodeStoreMem(LocalType type, MachineType mem_type) {
-    MemoryAccessOperand operand(this, pc_);
+    MemoryAccessOperand operand(this, pc_,
+                                ElementSizeLog2Of(mem_type.representation()));
     Value val = Pop(1, type);
     Value index = Pop(0, kAstI32);
     BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment,
           val.node, position());
-    Push(type, val.node);
     return 1 + operand.length;
   }
 
-  void DecodeSimdOpcode(WasmOpcode opcode) {
-    FunctionSig* sig = WasmOpcodes::Signature(opcode);
-    compiler::NodeVector inputs(sig->parameter_count(), zone_);
-    for (size_t i = sig->parameter_count(); i > 0; i--) {
-      Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1));
-      inputs[i - 1] = val.node;
+  unsigned DecodeSimdOpcode(WasmOpcode opcode) {
+    unsigned len = 0;
+    switch (opcode) {
+      case kExprI32x4ExtractLane: {
+        uint8_t lane = this->checked_read_u8(pc_, 2, "lane number");
+        if (lane < 0 || lane > 3) {
+          error(pc_, pc_ + 2, "invalid extract lane value");
+        }
+        TFNode* input = Pop(0, LocalType::kSimd128).node;
+        TFNode* node = BUILD(SimdExtractLane, opcode, lane, input);
+        Push(LocalType::kWord32, node);
+        len++;
+        break;
+      }
+      default: {
+        FunctionSig* sig = WasmOpcodes::Signature(opcode);
+        if (sig != nullptr) {
+          compiler::NodeVector inputs(sig->parameter_count(), zone_);
+          for (size_t i = sig->parameter_count(); i > 0; i--) {
+            Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1));
+            inputs[i - 1] = val.node;
+          }
+          TFNode* node = BUILD(SimdOp, opcode, inputs);
+          Push(GetReturnType(sig), node);
+        } else {
+          error("invalid simd opcode");
+        }
+      }
     }
-    TFNode* node = BUILD(SimdOp, opcode, inputs);
-    Push(GetReturnType(sig), node);
+    return len;
   }
 
   void DoReturn() {
@@ -1320,12 +1288,21 @@
       if (buffer) buffer[i] = val.node;
     }
 
-    Push(kAstEnd, BUILD(Return, count, buffer));
-    ssa_env_->Kill(SsaEnv::kControlEnd);
+    BUILD(Return, count, buffer);
+    EndControl();
   }
 
   void Push(LocalType type, TFNode* node) {
-    stack_.push_back({pc_, node, type});
+    if (type != kAstStmt && type != kAstEnd) {
+      stack_.push_back({pc_, node, type});
+    }
+  }
+
+  void PushReturns(FunctionSig* sig, TFNode** rets) {
+    for (size_t i = 0; i < sig->return_count(); i++) {
+      // When verifying only, then {rets} will be null, so push null.
+      Push(sig->GetReturn(i), rets ? rets[i] : nullptr);
+    }
   }
 
   const char* SafeOpcodeNameAt(const byte* pc) {
@@ -1334,6 +1311,10 @@
   }
 
   Value Pop(int index, LocalType expected) {
+    if (!ssa_env_->go()) {
+      // Unreachable code is essentially not typechecked.
+      return {pc_, nullptr, expected};
+    }
     Value val = Pop();
     if (val.type != expected) {
       if (val.type != kAstEnd) {
@@ -1346,6 +1327,10 @@
   }
 
   Value Pop() {
+    if (!ssa_env_->go()) {
+      // Unreachable code is essentially not typechecked.
+      return {pc_, nullptr, kAstEnd};
+    }
     size_t limit = control_.empty() ? 0 : control_.back().stack_depth;
     if (stack_.size() <= limit) {
       Value val = {pc_, nullptr, kAstStmt};
@@ -1358,6 +1343,10 @@
   }
 
   Value PopUpTo(int stack_depth) {
+    if (!ssa_env_->go()) {
+      // Unreachable code is essentially not typechecked.
+      return {pc_, nullptr, kAstEnd};
+    }
     if (stack_depth == stack_.size()) {
       Value val = {pc_, nullptr, kAstStmt};
       return val;
@@ -1375,34 +1364,82 @@
 
   int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
 
-  void BreakTo(Control* block, Value& val) {
-    if (block->is_loop) {
+  void BreakTo(unsigned depth) {
+    if (!ssa_env_->go()) return;
+    Control* c = &control_[control_.size() - depth - 1];
+    if (c->is_loop()) {
       // This is the inner loop block, which does not have a value.
-      Goto(ssa_env_, block->end_env);
+      Goto(ssa_env_, c->end_env);
     } else {
-      // Merge the value into the production for the block.
-      MergeInto(block->end_env, &block->node, &block->type, val);
+      // Merge the value(s) into the end of the block.
+      if (static_cast<size_t>(c->stack_depth + c->merge.arity) >
+          stack_.size()) {
+        error(
+            pc_, pc_,
+            "expected at least %d values on the stack for br to @%d, found %d",
+            c->merge.arity, startrel(c->pc),
+            static_cast<int>(stack_.size() - c->stack_depth));
+        return;
+      }
+      MergeValuesInto(c);
     }
   }
 
-  void MergeInto(SsaEnv* target, TFNode** node, LocalType* type, Value& val) {
+  void FallThruTo(Control* c) {
     if (!ssa_env_->go()) return;
-    DCHECK_NE(kAstEnd, val.type);
+    // Merge the value(s) into the end of the block.
+    int arity = static_cast<int>(c->merge.arity);
+    if (c->stack_depth + arity != stack_.size()) {
+      error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
+            arity, startrel(c->pc));
+      return;
+    }
+    MergeValuesInto(c);
+  }
 
+  inline Value& GetMergeValueFromStack(Control* c, int i) {
+    return stack_[stack_.size() - c->merge.arity + i];
+  }
+
+  void TypeCheckLoopFallThru(Control* c) {
+    if (!ssa_env_->go()) return;
+    // Fallthru must match arity exactly.
+    int arity = static_cast<int>(c->merge.arity);
+    if (c->stack_depth + arity != stack_.size()) {
+      error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
+            arity, startrel(c->pc));
+      return;
+    }
+    // Typecheck the values left on the stack.
+    for (unsigned i = 0; i < c->merge.arity; i++) {
+      Value& val = GetMergeValueFromStack(c, i);
+      Value& old =
+          c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
+      if (val.type != old.type) {
+        error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
+              WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
+        return;
+      }
+    }
+  }
+
+  void MergeValuesInto(Control* c) {
+    SsaEnv* target = c->end_env;
     bool first = target->state == SsaEnv::kUnreachable;
     Goto(ssa_env_, target);
 
-    if (first) {
-      // first merge to this environment; set the type and the node.
-      *type = val.type;
-      *node = val.node;
-    } else if (val.type == *type && val.type != kAstStmt) {
-      // merge with the existing value for this block.
-      *node = CreateOrMergeIntoPhi(*type, target->control, *node, val.node);
-    } else {
-      // types don't match, or block is already a stmt.
-      *type = kAstStmt;
-      *node = nullptr;
+    for (unsigned i = 0; i < c->merge.arity; i++) {
+      Value& val = GetMergeValueFromStack(c, i);
+      Value& old =
+          c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
+      if (val.type != old.type) {
+        error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
+              WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
+        return;
+      }
+      old.node =
+          first ? val.node : CreateOrMergeIntoPhi(old.type, target->control,
+                                                  old.node, val.node);
     }
   }
 
@@ -1442,6 +1479,45 @@
     }
   }
 
+  TFNode* CheckForException(TFNode* node) {
+    if (node == nullptr) {
+      return nullptr;
+    }
+
+    const bool inside_try_scope = current_catch_ != kNullCatch;
+
+    if (!inside_try_scope) {
+      return node;
+    }
+
+    TFNode* if_success = nullptr;
+    TFNode* if_exception = nullptr;
+    if (!builder_->ThrowsException(node, &if_success, &if_exception)) {
+      return node;
+    }
+
+    SsaEnv* success_env = Steal(ssa_env_);
+    success_env->control = if_success;
+
+    SsaEnv* exception_env = Split(success_env);
+    exception_env->control = if_exception;
+    TryInfo* try_info = current_try_info();
+    Goto(exception_env, try_info->catch_env);
+    TFNode* exception = try_info->exception;
+    if (exception == nullptr) {
+      DCHECK_EQ(SsaEnv::kReached, try_info->catch_env->state);
+      try_info->exception = if_exception;
+    } else {
+      DCHECK_EQ(SsaEnv::kMerged, try_info->catch_env->state);
+      try_info->exception =
+          CreateOrMergeIntoPhi(kAstI32, try_info->catch_env->control,
+                               try_info->exception, if_exception);
+    }
+
+    SetEnv("if_success", success_env);
+    return node;
+  }
+
   void Goto(SsaEnv* from, SsaEnv* to) {
     DCHECK_NOT_NULL(to);
     if (!from->go()) return;
@@ -1630,16 +1706,15 @@
         case kExprLoop:
         case kExprIf:
         case kExprBlock:
-        case kExprTryCatch:
-        case kExprTryCatchFinally:
-        case kExprTryFinally:
+        case kExprTry:
+          length = OpcodeLength(pc);
           depth++;
-          DCHECK_EQ(1, OpcodeLength(pc));
           break;
-        case kExprSetLocal: {
+        case kExprSetLocal:  // fallthru
+        case kExprTeeLocal: {
           LocalIndexOperand operand(this, pc);
           if (assigned->length() > 0 &&
-              static_cast<int>(operand.index) < assigned->length()) {
+              operand.index < static_cast<uint32_t>(assigned->length())) {
             // Unverified code might have an out-of-bounds index.
             assigned->Add(operand.index);
           }
@@ -1664,11 +1739,33 @@
     DCHECK_EQ(pc_ - start_, offset);  // overflows cannot happen
     return offset;
   }
+
+  inline void BuildSimpleOperator(WasmOpcode opcode, FunctionSig* sig) {
+    TFNode* node;
+    switch (sig->parameter_count()) {
+      case 1: {
+        Value val = Pop(0, sig->GetParam(0));
+        node = BUILD(Unop, opcode, val.node, position());
+        break;
+      }
+      case 2: {
+        Value rval = Pop(1, sig->GetParam(1));
+        Value lval = Pop(0, sig->GetParam(0));
+        node = BUILD(Binop, opcode, lval.node, rval.node, position());
+        break;
+      }
+      default:
+        UNREACHABLE();
+        node = nullptr;
+        break;
+    }
+    Push(GetReturnType(sig), node);
+  }
 };
 
 bool DecodeLocalDecls(AstLocalDecls& decls, const byte* start,
                       const byte* end) {
-  base::AccountingAllocator allocator;
+  AccountingAllocator allocator;
   Zone tmp(&allocator);
   FunctionBody body = {nullptr, nullptr, nullptr, start, end};
   WasmFullDecoder decoder(&tmp, nullptr, body);
@@ -1686,7 +1783,7 @@
   }
 }
 
-DecodeResult VerifyWasmCode(base::AccountingAllocator* allocator,
+DecodeResult VerifyWasmCode(AccountingAllocator* allocator,
                             FunctionBody& body) {
   Zone zone(allocator);
   WasmFullDecoder decoder(&zone, nullptr, body);
@@ -1694,8 +1791,8 @@
   return decoder.toResult<DecodeStruct*>(nullptr);
 }
 
-DecodeResult BuildTFGraph(base::AccountingAllocator* allocator,
-                          TFBuilder* builder, FunctionBody& body) {
+DecodeResult BuildTFGraph(AccountingAllocator* allocator, TFBuilder* builder,
+                          FunctionBody& body) {
   Zone zone(allocator);
   WasmFullDecoder decoder(&zone, builder, body);
   decoder.Decode();
@@ -1707,18 +1804,13 @@
   return decoder.OpcodeLength(pc);
 }
 
-unsigned OpcodeArity(const byte* pc, const byte* end) {
-  WasmDecoder decoder(nullptr, nullptr, pc, end);
-  return decoder.OpcodeArity(pc);
-}
-
 void PrintAstForDebugging(const byte* start, const byte* end) {
-  base::AccountingAllocator allocator;
+  AccountingAllocator allocator;
   OFStream os(stdout);
   PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr);
 }
 
-bool PrintAst(base::AccountingAllocator* allocator, const FunctionBody& body,
+bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body,
               std::ostream& os,
               std::vector<std::tuple<uint32_t, int, int>>* offset_table) {
   Zone zone(allocator);
@@ -1777,68 +1869,57 @@
     }
 
     switch (opcode) {
-      case kExprIf:
       case kExprElse:
-      case kExprLoop:
-      case kExprBlock:
-      case kExprTryCatch:
-      case kExprTryCatchFinally:
-      case kExprTryFinally:
         os << "   // @" << i.pc_offset();
         control_depth++;
         break;
+      case kExprLoop:
+      case kExprIf:
+      case kExprBlock:
+      case kExprTry: {
+        BlockTypeOperand operand(&i, i.pc());
+        os << "   // @" << i.pc_offset();
+        for (unsigned i = 0; i < operand.arity; i++) {
+          os << " " << WasmOpcodes::TypeName(operand.read_entry(i));
+        }
+        control_depth++;
+        break;
+      }
       case kExprEnd:
         os << "   // @" << i.pc_offset();
         control_depth--;
         break;
       case kExprBr: {
         BreakDepthOperand operand(&i, i.pc());
-        os << "   // arity=" << operand.arity << " depth=" << operand.depth;
+        os << "   // depth=" << operand.depth;
         break;
       }
       case kExprBrIf: {
         BreakDepthOperand operand(&i, i.pc());
-        os << "   // arity=" << operand.arity << " depth" << operand.depth;
+        os << "   // depth=" << operand.depth;
         break;
       }
       case kExprBrTable: {
         BranchTableOperand operand(&i, i.pc());
-        os << "   // arity=" << operand.arity
-           << " entries=" << operand.table_count;
+        os << " // entries=" << operand.table_count;
         break;
       }
       case kExprCallIndirect: {
         CallIndirectOperand operand(&i, i.pc());
+        os << "   // sig #" << operand.index;
         if (decoder.Complete(i.pc(), operand)) {
-          os << "   // sig #" << operand.index << ": " << *operand.sig;
-        } else {
-          os << " // arity=" << operand.arity << " sig #" << operand.index;
-        }
-        break;
-      }
-      case kExprCallImport: {
-        CallImportOperand operand(&i, i.pc());
-        if (decoder.Complete(i.pc(), operand)) {
-          os << "   // import #" << operand.index << ": " << *operand.sig;
-        } else {
-          os << " // arity=" << operand.arity << " import #" << operand.index;
+          os << ": " << *operand.sig;
         }
         break;
       }
       case kExprCallFunction: {
         CallFunctionOperand operand(&i, i.pc());
+        os << " // function #" << operand.index;
         if (decoder.Complete(i.pc(), operand)) {
-          os << "   // function #" << operand.index << ": " << *operand.sig;
-        } else {
-          os << " // arity=" << operand.arity << " function #" << operand.index;
+          os << ": " << *operand.sig;
         }
         break;
       }
-      case kExprReturn: {
-        ReturnArityOperand operand(&i, i.pc());
-        os << "   // arity=" << operand.arity;
-        break;
-      }
       default:
         break;
       }
diff --git a/src/wasm/ast-decoder.h b/src/wasm/ast-decoder.h
index c4f6c16..8c2c2c4 100644
--- a/src/wasm/ast-decoder.h
+++ b/src/wasm/ast-decoder.h
@@ -21,6 +21,9 @@
 
 namespace wasm {
 
+const uint32_t kMaxNumWasmLocals = 8000000;
+struct WasmGlobal;
+
 // Helpers for decoding different kinds of operands which follow bytecodes.
 struct LocalIndexOperand {
   uint32_t index;
@@ -79,39 +82,111 @@
 struct GlobalIndexOperand {
   uint32_t index;
   LocalType type;
+  const WasmGlobal* global;
   unsigned length;
 
   inline GlobalIndexOperand(Decoder* decoder, const byte* pc) {
     index = decoder->checked_read_u32v(pc, 1, &length, "global index");
+    global = nullptr;
     type = kAstStmt;
   }
 };
 
+struct BlockTypeOperand {
+  uint32_t arity;
+  const byte* types;  // pointer to encoded types for the block.
+  unsigned length;
+
+  inline BlockTypeOperand(Decoder* decoder, const byte* pc) {
+    uint8_t val = decoder->checked_read_u8(pc, 1, "block type");
+    LocalType type = kAstStmt;
+    length = 1;
+    arity = 0;
+    types = nullptr;
+    if (decode_local_type(val, &type)) {
+      arity = type == kAstStmt ? 0 : 1;
+      types = pc + 1;
+    } else {
+      // Handle multi-value blocks.
+      if (!FLAG_wasm_mv_prototype) {
+        decoder->error(pc, pc + 1, "invalid block arity > 1");
+        return;
+      }
+      if (val != kMultivalBlock) {
+        decoder->error(pc, pc + 1, "invalid block type");
+        return;
+      }
+      // Decode and check the types vector of the block.
+      unsigned len = 0;
+      uint32_t count = decoder->checked_read_u32v(pc, 2, &len, "block arity");
+      // {count} is encoded as {arity-2}, so that a {0} count here corresponds
+      // to a block with 2 values. This makes invalid/redundant encodings
+      // impossible.
+      arity = count + 2;
+      length = 1 + len + arity;
+      types = pc + 1 + 1 + len;
+
+      for (uint32_t i = 0; i < arity; i++) {
+        uint32_t offset = 1 + 1 + len + i;
+        val = decoder->checked_read_u8(pc, offset, "block type");
+        decode_local_type(val, &type);
+        if (type == kAstStmt) {
+          decoder->error(pc, pc + offset, "invalid block type");
+          return;
+        }
+      }
+    }
+  }
+  // Decode a byte representing a local type. Return {false} if the encoded
+  // byte was invalid or {kMultivalBlock}.
+  bool decode_local_type(uint8_t val, LocalType* result) {
+    switch (static_cast<LocalTypeCode>(val)) {
+      case kLocalVoid:
+        *result = kAstStmt;
+        return true;
+      case kLocalI32:
+        *result = kAstI32;
+        return true;
+      case kLocalI64:
+        *result = kAstI64;
+        return true;
+      case kLocalF32:
+        *result = kAstF32;
+        return true;
+      case kLocalF64:
+        *result = kAstF64;
+        return true;
+      default:
+        *result = kAstStmt;
+        return false;
+    }
+  }
+  LocalType read_entry(unsigned index) {
+    DCHECK_LT(index, arity);
+    LocalType result;
+    CHECK(decode_local_type(types[index], &result));
+    return result;
+  }
+};
+
 struct Control;
 struct BreakDepthOperand {
-  uint32_t arity;
   uint32_t depth;
   Control* target;
   unsigned length;
   inline BreakDepthOperand(Decoder* decoder, const byte* pc) {
-    unsigned len1 = 0;
-    unsigned len2 = 0;
-    arity = decoder->checked_read_u32v(pc, 1, &len1, "argument count");
-    depth = decoder->checked_read_u32v(pc, 1 + len1, &len2, "break depth");
-    length = len1 + len2;
+    depth = decoder->checked_read_u32v(pc, 1, &length, "break depth");
     target = nullptr;
   }
 };
 
 struct CallIndirectOperand {
-  uint32_t arity;
   uint32_t index;
   FunctionSig* sig;
   unsigned length;
   inline CallIndirectOperand(Decoder* decoder, const byte* pc) {
     unsigned len1 = 0;
     unsigned len2 = 0;
-    arity = decoder->checked_read_u32v(pc, 1, &len1, "argument count");
     index = decoder->checked_read_u32v(pc, 1 + len1, &len2, "signature index");
     length = len1 + len2;
     sig = nullptr;
@@ -119,59 +194,32 @@
 };
 
 struct CallFunctionOperand {
-  uint32_t arity;
   uint32_t index;
   FunctionSig* sig;
   unsigned length;
   inline CallFunctionOperand(Decoder* decoder, const byte* pc) {
     unsigned len1 = 0;
     unsigned len2 = 0;
-    arity = decoder->checked_read_u32v(pc, 1, &len1, "argument count");
     index = decoder->checked_read_u32v(pc, 1 + len1, &len2, "function index");
     length = len1 + len2;
     sig = nullptr;
   }
 };
 
-struct CallImportOperand {
-  uint32_t arity;
-  uint32_t index;
-  FunctionSig* sig;
-  unsigned length;
-  inline CallImportOperand(Decoder* decoder, const byte* pc) {
-    unsigned len1 = 0;
-    unsigned len2 = 0;
-    arity = decoder->checked_read_u32v(pc, 1, &len1, "argument count");
-    index = decoder->checked_read_u32v(pc, 1 + len1, &len2, "import index");
-    length = len1 + len2;
-    sig = nullptr;
-  }
-};
-
 struct BranchTableOperand {
-  uint32_t arity;
   uint32_t table_count;
+  const byte* start;
   const byte* table;
-  unsigned length;
   inline BranchTableOperand(Decoder* decoder, const byte* pc) {
+    DCHECK_EQ(kExprBrTable, decoder->checked_read_u8(pc, 0, "opcode"));
+    start = pc + 1;
     unsigned len1 = 0;
-    unsigned len2 = 0;
-    arity = decoder->checked_read_u32v(pc, 1, &len1, "argument count");
-    table_count =
-        decoder->checked_read_u32v(pc, 1 + len1, &len2, "table count");
+    table_count = decoder->checked_read_u32v(pc, 1, &len1, "table count");
     if (table_count > (UINT_MAX / sizeof(uint32_t)) - 1 ||
-        len1 + len2 > UINT_MAX - (table_count + 1) * sizeof(uint32_t)) {
+        len1 > UINT_MAX - (table_count + 1) * sizeof(uint32_t)) {
       decoder->error(pc, "branch table size overflow");
     }
-    length = len1 + len2 + (table_count + 1) * sizeof(uint32_t);
-
-    uint32_t table_start = 1 + len1 + len2;
-    if (decoder->check(pc, table_start, (table_count + 1) * sizeof(uint32_t),
-                       "expected <table entries>")) {
-      table = pc + table_start;
-    } else {
-      table = nullptr;
-    }
+    table = pc + 1 + len1;
   }
   inline uint32_t read_entry(Decoder* decoder, unsigned i) {
     DCHECK(i <= table_count);
@@ -179,14 +227,58 @@
   }
 };
 
+// A helper to iterate over a branch table.
+class BranchTableIterator {
+ public:
+  unsigned cur_index() { return index_; }
+  bool has_next() { return index_ <= table_count_; }
+  uint32_t next() {
+    DCHECK(has_next());
+    index_++;
+    unsigned length = 0;
+    uint32_t result =
+        decoder_->checked_read_u32v(pc_, 0, &length, "branch table entry");
+    pc_ += length;
+    return result;
+  }
+  // length, including the length of the {BranchTableOperand}, but not the
+  // opcode.
+  unsigned length() {
+    while (has_next()) next();
+    return static_cast<unsigned>(pc_ - start_);
+  }
+  const byte* pc() { return pc_; }
+
+  BranchTableIterator(Decoder* decoder, BranchTableOperand& operand)
+      : decoder_(decoder),
+        start_(operand.start),
+        pc_(operand.table),
+        index_(0),
+        table_count_(operand.table_count) {}
+
+ private:
+  Decoder* decoder_;
+  const byte* start_;
+  const byte* pc_;
+  uint32_t index_;        // the current index.
+  uint32_t table_count_;  // the count of entries, not including default.
+};
+
 struct MemoryAccessOperand {
   uint32_t alignment;
   uint32_t offset;
   unsigned length;
-  inline MemoryAccessOperand(Decoder* decoder, const byte* pc) {
+  inline MemoryAccessOperand(Decoder* decoder, const byte* pc,
+                             uint32_t max_alignment) {
     unsigned alignment_length;
     alignment =
         decoder->checked_read_u32v(pc, 1, &alignment_length, "alignment");
+    if (max_alignment < alignment) {
+      decoder->error(pc, pc + 1,
+                     "invalid alignment; expected maximum alignment is %u, "
+                     "actual alignment is %u",
+                     max_alignment, alignment);
+    }
     unsigned offset_length;
     offset = decoder->checked_read_u32v(pc, 1 + alignment_length,
                                         &offset_length, "offset");
@@ -194,15 +286,6 @@
   }
 };
 
-struct ReturnArityOperand {
-  uint32_t arity;
-  unsigned length;
-
-  inline ReturnArityOperand(Decoder* decoder, const byte* pc) {
-    arity = decoder->checked_read_u32v(pc, 1, &length, "return count");
-  }
-};
-
 typedef compiler::WasmGraphBuilder TFBuilder;
 struct ModuleEnv;  // forward declaration of module interface.
 
@@ -228,25 +311,25 @@
   return os;
 }
 
-DecodeResult VerifyWasmCode(base::AccountingAllocator* allocator,
-                            FunctionBody& body);
-DecodeResult BuildTFGraph(base::AccountingAllocator* allocator,
-                          TFBuilder* builder, FunctionBody& body);
-bool PrintAst(base::AccountingAllocator* allocator, const FunctionBody& body,
+V8_EXPORT_PRIVATE DecodeResult VerifyWasmCode(AccountingAllocator* allocator,
+                                              FunctionBody& body);
+DecodeResult BuildTFGraph(AccountingAllocator* allocator, TFBuilder* builder,
+                          FunctionBody& body);
+bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body,
               std::ostream& os,
               std::vector<std::tuple<uint32_t, int, int>>* offset_table);
 
 // A simplified form of AST printing, e.g. from a debugger.
 void PrintAstForDebugging(const byte* start, const byte* end);
 
-inline DecodeResult VerifyWasmCode(base::AccountingAllocator* allocator,
+inline DecodeResult VerifyWasmCode(AccountingAllocator* allocator,
                                    ModuleEnv* module, FunctionSig* sig,
                                    const byte* start, const byte* end) {
   FunctionBody body = {module, sig, nullptr, start, end};
   return VerifyWasmCode(allocator, body);
 }
 
-inline DecodeResult BuildTFGraph(base::AccountingAllocator* allocator,
+inline DecodeResult BuildTFGraph(AccountingAllocator* allocator,
                                  TFBuilder* builder, ModuleEnv* module,
                                  FunctionSig* sig, const byte* start,
                                  const byte* end) {
@@ -276,9 +359,6 @@
 // Computes the length of the opcode at the given address.
 unsigned OpcodeLength(const byte* pc, const byte* end);
 
-// Computes the arity (number of sub-nodes) of the opcode at the given address.
-unsigned OpcodeArity(const byte* pc, const byte* end);
-
 // A simple forward iterator for bytecodes.
 class BytecodeIterator : public Decoder {
  public:
diff --git a/src/wasm/decoder.h b/src/wasm/decoder.h
index a6ede54..d5c9f43 100644
--- a/src/wasm/decoder.h
+++ b/src/wasm/decoder.h
@@ -12,7 +12,7 @@
 #include "src/signature.h"
 #include "src/utils.h"
 #include "src/wasm/wasm-result.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -208,6 +208,19 @@
 
   // Consume {size} bytes and send them to the bit bucket, advancing {pc_}.
   void consume_bytes(int size) {
+    TRACE("  +%d  %-20s: %d bytes\n", static_cast<int>(pc_ - start_), "skip",
+          size);
+    if (checkAvailable(size)) {
+      pc_ += size;
+    } else {
+      pc_ = limit_;
+    }
+  }
+
+  // Consume {size} bytes and send them to the bit bucket, advancing {pc_}.
+  void consume_bytes(uint32_t size, const char* name = "skip") {
+    TRACE("  +%d  %-20s: %d bytes\n", static_cast<int>(pc_ - start_), name,
+          size);
     if (checkAvailable(size)) {
       pc_ += size;
     } else {
diff --git a/src/wasm/encoder.cc b/src/wasm/encoder.cc
deleted file mode 100644
index ef0bddc..0000000
--- a/src/wasm/encoder.cc
+++ /dev/null
@@ -1,382 +0,0 @@
-// Copyright 2015 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/signature.h"
-
-#include "src/handles.h"
-#include "src/v8.h"
-#include "src/zone-containers.h"
-
-#include "src/wasm/ast-decoder.h"
-#include "src/wasm/encoder.h"
-#include "src/wasm/leb-helper.h"
-#include "src/wasm/wasm-macro-gen.h"
-#include "src/wasm/wasm-module.h"
-#include "src/wasm/wasm-opcodes.h"
-
-#include "src/v8memory.h"
-
-#if DEBUG
-#define TRACE(...)                                    \
-  do {                                                \
-    if (FLAG_trace_wasm_encoder) PrintF(__VA_ARGS__); \
-  } while (false)
-#else
-#define TRACE(...)
-#endif
-
-namespace v8 {
-namespace internal {
-namespace wasm {
-
-// Emit a section name and the size as a padded varint that can be patched
-// later.
-size_t EmitSection(WasmSection::Code code, ZoneBuffer& buffer) {
-  // Emit the section name.
-  const char* name = WasmSection::getName(code);
-  TRACE("emit section: %s\n", name);
-  size_t length = WasmSection::getNameLength(code);
-  buffer.write_size(length);  // Section name string size.
-  buffer.write(reinterpret_cast<const byte*>(name), length);
-
-  // Emit a placeholder for the length.
-  return buffer.reserve_u32v();
-}
-
-// Patch the size of a section after it's finished.
-void FixupSection(ZoneBuffer& buffer, size_t start) {
-  buffer.patch_u32v(start, static_cast<uint32_t>(buffer.offset() - start -
-                                                 kPaddedVarInt32Size));
-}
-
-WasmFunctionBuilder::WasmFunctionBuilder(WasmModuleBuilder* builder)
-    : builder_(builder),
-      locals_(builder->zone()),
-      signature_index_(0),
-      exported_(0),
-      body_(builder->zone()),
-      name_(builder->zone()) {}
-
-void WasmFunctionBuilder::EmitVarInt(uint32_t val) {
-  byte buffer[8];
-  byte* ptr = buffer;
-  LEBHelper::write_u32v(&ptr, val);
-  for (byte* p = buffer; p < ptr; p++) {
-    body_.push_back(*p);
-  }
-}
-
-void WasmFunctionBuilder::SetSignature(FunctionSig* sig) {
-  DCHECK(!locals_.has_sig());
-  locals_.set_sig(sig);
-  signature_index_ = builder_->AddSignature(sig);
-}
-
-uint32_t WasmFunctionBuilder::AddLocal(LocalType type) {
-  DCHECK(locals_.has_sig());
-  return locals_.AddLocals(1, type);
-}
-
-void WasmFunctionBuilder::EmitGetLocal(uint32_t local_index) {
-  EmitWithVarInt(kExprGetLocal, local_index);
-}
-
-void WasmFunctionBuilder::EmitSetLocal(uint32_t local_index) {
-  EmitWithVarInt(kExprSetLocal, local_index);
-}
-
-void WasmFunctionBuilder::EmitCode(const byte* code, uint32_t code_size) {
-  for (size_t i = 0; i < code_size; ++i) {
-    body_.push_back(code[i]);
-  }
-}
-
-void WasmFunctionBuilder::Emit(WasmOpcode opcode) {
-  body_.push_back(static_cast<byte>(opcode));
-}
-
-void WasmFunctionBuilder::EmitWithU8(WasmOpcode opcode, const byte immediate) {
-  body_.push_back(static_cast<byte>(opcode));
-  body_.push_back(immediate);
-}
-
-void WasmFunctionBuilder::EmitWithU8U8(WasmOpcode opcode, const byte imm1,
-                                       const byte imm2) {
-  body_.push_back(static_cast<byte>(opcode));
-  body_.push_back(imm1);
-  body_.push_back(imm2);
-}
-
-void WasmFunctionBuilder::EmitWithVarInt(WasmOpcode opcode,
-                                         uint32_t immediate) {
-  body_.push_back(static_cast<byte>(opcode));
-  EmitVarInt(immediate);
-}
-
-void WasmFunctionBuilder::EmitI32Const(int32_t value) {
-  // TODO(titzer): variable-length signed and unsigned i32 constants.
-  if (-128 <= value && value <= 127) {
-    EmitWithU8(kExprI8Const, static_cast<byte>(value));
-  } else {
-    byte code[] = {WASM_I32V_5(value)};
-    EmitCode(code, sizeof(code));
-  }
-}
-
-void WasmFunctionBuilder::SetExported() { exported_ = true; }
-
-void WasmFunctionBuilder::SetName(const char* name, int name_length) {
-  name_.clear();
-  if (name_length > 0) {
-    for (int i = 0; i < name_length; ++i) {
-      name_.push_back(*(name + i));
-    }
-  }
-}
-
-void WasmFunctionBuilder::WriteSignature(ZoneBuffer& buffer) const {
-  buffer.write_u32v(signature_index_);
-}
-
-void WasmFunctionBuilder::WriteExport(ZoneBuffer& buffer,
-                                      uint32_t func_index) const {
-  if (exported_) {
-    buffer.write_u32v(func_index);
-    buffer.write_size(name_.size());
-    if (name_.size() > 0) {
-      buffer.write(reinterpret_cast<const byte*>(&name_[0]), name_.size());
-    }
-  }
-}
-
-void WasmFunctionBuilder::WriteBody(ZoneBuffer& buffer) const {
-  size_t locals_size = locals_.Size();
-  buffer.write_size(locals_size + body_.size());
-  buffer.EnsureSpace(locals_size);
-  byte** ptr = buffer.pos_ptr();
-  locals_.Emit(*ptr);
-  (*ptr) += locals_size;  // UGLY: manual bump of position pointer
-  if (body_.size() > 0) {
-    buffer.write(&body_[0], body_.size());
-  }
-}
-
-WasmDataSegmentEncoder::WasmDataSegmentEncoder(Zone* zone, const byte* data,
-                                               uint32_t size, uint32_t dest)
-    : data_(zone), dest_(dest) {
-  for (size_t i = 0; i < size; ++i) {
-    data_.push_back(data[i]);
-  }
-}
-
-void WasmDataSegmentEncoder::Write(ZoneBuffer& buffer) const {
-  buffer.write_u32v(dest_);
-  buffer.write_u32v(static_cast<uint32_t>(data_.size()));
-  buffer.write(&data_[0], data_.size());
-}
-
-WasmModuleBuilder::WasmModuleBuilder(Zone* zone)
-    : zone_(zone),
-      signatures_(zone),
-      imports_(zone),
-      functions_(zone),
-      data_segments_(zone),
-      indirect_functions_(zone),
-      globals_(zone),
-      signature_map_(zone),
-      start_function_index_(-1) {}
-
-uint32_t WasmModuleBuilder::AddFunction() {
-  functions_.push_back(new (zone_) WasmFunctionBuilder(this));
-  return static_cast<uint32_t>(functions_.size() - 1);
-}
-
-WasmFunctionBuilder* WasmModuleBuilder::FunctionAt(size_t index) {
-  if (functions_.size() > index) {
-    return functions_.at(index);
-  } else {
-    return nullptr;
-  }
-}
-
-void WasmModuleBuilder::AddDataSegment(WasmDataSegmentEncoder* data) {
-  data_segments_.push_back(data);
-}
-
-bool WasmModuleBuilder::CompareFunctionSigs::operator()(FunctionSig* a,
-                                                        FunctionSig* b) const {
-  if (a->return_count() < b->return_count()) return true;
-  if (a->return_count() > b->return_count()) return false;
-  if (a->parameter_count() < b->parameter_count()) return true;
-  if (a->parameter_count() > b->parameter_count()) return false;
-  for (size_t r = 0; r < a->return_count(); r++) {
-    if (a->GetReturn(r) < b->GetReturn(r)) return true;
-    if (a->GetReturn(r) > b->GetReturn(r)) return false;
-  }
-  for (size_t p = 0; p < a->parameter_count(); p++) {
-    if (a->GetParam(p) < b->GetParam(p)) return true;
-    if (a->GetParam(p) > b->GetParam(p)) return false;
-  }
-  return false;
-}
-
-uint32_t WasmModuleBuilder::AddSignature(FunctionSig* sig) {
-  SignatureMap::iterator pos = signature_map_.find(sig);
-  if (pos != signature_map_.end()) {
-    return pos->second;
-  } else {
-    uint32_t index = static_cast<uint32_t>(signatures_.size());
-    signature_map_[sig] = index;
-    signatures_.push_back(sig);
-    return index;
-  }
-}
-
-void WasmModuleBuilder::AddIndirectFunction(uint32_t index) {
-  indirect_functions_.push_back(index);
-}
-
-uint32_t WasmModuleBuilder::AddImport(const char* name, int name_length,
-                                      FunctionSig* sig) {
-  imports_.push_back({AddSignature(sig), name, name_length});
-  return static_cast<uint32_t>(imports_.size() - 1);
-}
-
-void WasmModuleBuilder::MarkStartFunction(uint32_t index) {
-  start_function_index_ = index;
-}
-
-uint32_t WasmModuleBuilder::AddGlobal(LocalType type, bool exported) {
-  globals_.push_back(std::make_pair(type, exported));
-  return static_cast<uint32_t>(globals_.size() - 1);
-}
-
-void WasmModuleBuilder::WriteTo(ZoneBuffer& buffer) const {
-  uint32_t exports = 0;
-
-  // == Emit magic =============================================================
-  TRACE("emit magic\n");
-  buffer.write_u32(kWasmMagic);
-  buffer.write_u32(kWasmVersion);
-
-  // == Emit signatures ========================================================
-  if (signatures_.size() > 0) {
-    size_t start = EmitSection(WasmSection::Code::Signatures, buffer);
-    buffer.write_size(signatures_.size());
-
-    for (FunctionSig* sig : signatures_) {
-      buffer.write_u8(kWasmFunctionTypeForm);
-      buffer.write_size(sig->parameter_count());
-      for (size_t j = 0; j < sig->parameter_count(); j++) {
-        buffer.write_u8(WasmOpcodes::LocalTypeCodeFor(sig->GetParam(j)));
-      }
-      buffer.write_size(sig->return_count());
-      for (size_t j = 0; j < sig->return_count(); j++) {
-        buffer.write_u8(WasmOpcodes::LocalTypeCodeFor(sig->GetReturn(j)));
-      }
-    }
-    FixupSection(buffer, start);
-  }
-
-  // == Emit globals ===========================================================
-  if (globals_.size() > 0) {
-    size_t start = EmitSection(WasmSection::Code::Globals, buffer);
-    buffer.write_size(globals_.size());
-
-    for (auto global : globals_) {
-      buffer.write_u32v(0);  // Length of the global name.
-      buffer.write_u8(WasmOpcodes::LocalTypeCodeFor(global.first));
-      buffer.write_u8(global.second);
-    }
-    FixupSection(buffer, start);
-  }
-
-  // == Emit imports ===========================================================
-  if (imports_.size() > 0) {
-    size_t start = EmitSection(WasmSection::Code::ImportTable, buffer);
-    buffer.write_size(imports_.size());
-    for (auto import : imports_) {
-      buffer.write_u32v(import.sig_index);
-      buffer.write_u32v(import.name_length);
-      buffer.write(reinterpret_cast<const byte*>(import.name),
-                   import.name_length);
-      buffer.write_u32v(0);
-    }
-    FixupSection(buffer, start);
-  }
-
-  // == Emit function signatures ===============================================
-  if (functions_.size() > 0) {
-    size_t start = EmitSection(WasmSection::Code::FunctionSignatures, buffer);
-    buffer.write_size(functions_.size());
-    for (auto function : functions_) {
-      function->WriteSignature(buffer);
-      if (function->exported()) exports++;
-    }
-    FixupSection(buffer, start);
-  }
-
-  // == emit function table ====================================================
-  if (indirect_functions_.size() > 0) {
-    size_t start = EmitSection(WasmSection::Code::FunctionTable, buffer);
-    buffer.write_size(indirect_functions_.size());
-
-    for (auto index : indirect_functions_) {
-      buffer.write_u32v(index);
-    }
-    FixupSection(buffer, start);
-  }
-
-  // == emit memory declaration ================================================
-  {
-    size_t start = EmitSection(WasmSection::Code::Memory, buffer);
-    buffer.write_u32v(16);  // min memory size
-    buffer.write_u32v(16);  // max memory size
-    buffer.write_u8(0);     // memory export
-    static_assert(kDeclMemorySize == 3, "memory size must match emit above");
-    FixupSection(buffer, start);
-  }
-
-  // == emit exports ===========================================================
-  if (exports > 0) {
-    size_t start = EmitSection(WasmSection::Code::ExportTable, buffer);
-    buffer.write_u32v(exports);
-    uint32_t index = 0;
-    for (auto function : functions_) {
-      function->WriteExport(buffer, index++);
-    }
-    FixupSection(buffer, start);
-  }
-
-  // == emit start function index ==============================================
-  if (start_function_index_ >= 0) {
-    size_t start = EmitSection(WasmSection::Code::StartFunction, buffer);
-    buffer.write_u32v(start_function_index_);
-    FixupSection(buffer, start);
-  }
-
-  // == emit code ==============================================================
-  if (functions_.size() > 0) {
-    size_t start = EmitSection(WasmSection::Code::FunctionBodies, buffer);
-    buffer.write_size(functions_.size());
-    for (auto function : functions_) {
-      function->WriteBody(buffer);
-    }
-    FixupSection(buffer, start);
-  }
-
-  // == emit data segments =====================================================
-  if (data_segments_.size() > 0) {
-    size_t start = EmitSection(WasmSection::Code::DataSegments, buffer);
-    buffer.write_size(data_segments_.size());
-
-    for (auto segment : data_segments_) {
-      segment->Write(buffer);
-    }
-    FixupSection(buffer, start);
-  }
-}
-}  // namespace wasm
-}  // namespace internal
-}  // namespace v8
diff --git a/src/wasm/encoder.h b/src/wasm/encoder.h
deleted file mode 100644
index eb8aa64..0000000
--- a/src/wasm/encoder.h
+++ /dev/null
@@ -1,205 +0,0 @@
-// Copyright 2015 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_WASM_ENCODER_H_
-#define V8_WASM_ENCODER_H_
-
-#include "src/signature.h"
-#include "src/zone-containers.h"
-
-#include "src/wasm/leb-helper.h"
-#include "src/wasm/wasm-macro-gen.h"
-#include "src/wasm/wasm-module.h"
-#include "src/wasm/wasm-opcodes.h"
-#include "src/wasm/wasm-result.h"
-
-namespace v8 {
-namespace internal {
-namespace wasm {
-
-class ZoneBuffer : public ZoneObject {
- public:
-  static const uint32_t kInitialSize = 4096;
-  explicit ZoneBuffer(Zone* zone, size_t initial = kInitialSize)
-      : zone_(zone), buffer_(reinterpret_cast<byte*>(zone->New(initial))) {
-    pos_ = buffer_;
-    end_ = buffer_ + initial;
-  }
-
-  void write_u8(uint8_t x) {
-    EnsureSpace(1);
-    *(pos_++) = x;
-  }
-
-  void write_u16(uint16_t x) {
-    EnsureSpace(2);
-    WriteLittleEndianValue<uint16_t>(pos_, x);
-    pos_ += 2;
-  }
-
-  void write_u32(uint32_t x) {
-    EnsureSpace(4);
-    WriteLittleEndianValue<uint32_t>(pos_, x);
-    pos_ += 4;
-  }
-
-  void write_u32v(uint32_t val) {
-    EnsureSpace(kMaxVarInt32Size);
-    LEBHelper::write_u32v(&pos_, val);
-  }
-
-  void write_size(size_t val) {
-    EnsureSpace(kMaxVarInt32Size);
-    DCHECK_EQ(val, static_cast<uint32_t>(val));
-    LEBHelper::write_u32v(&pos_, static_cast<uint32_t>(val));
-  }
-
-  void write(const byte* data, size_t size) {
-    EnsureSpace(size);
-    memcpy(pos_, data, size);
-    pos_ += size;
-  }
-
-  size_t reserve_u32v() {
-    size_t off = offset();
-    EnsureSpace(kMaxVarInt32Size);
-    pos_ += kMaxVarInt32Size;
-    return off;
-  }
-
-  // Patch a (padded) u32v at the given offset to be the given value.
-  void patch_u32v(size_t offset, uint32_t val) {
-    byte* ptr = buffer_ + offset;
-    for (size_t pos = 0; pos != kPaddedVarInt32Size; ++pos) {
-      uint32_t next = val >> 7;
-      byte out = static_cast<byte>(val & 0x7f);
-      if (pos != kPaddedVarInt32Size - 1) {
-        *(ptr++) = 0x80 | out;
-        val = next;
-      } else {
-        *(ptr++) = out;
-      }
-    }
-  }
-
-  size_t offset() { return static_cast<size_t>(pos_ - buffer_); }
-  size_t size() { return static_cast<size_t>(pos_ - buffer_); }
-  const byte* begin() { return buffer_; }
-  const byte* end() { return pos_; }
-
-  void EnsureSpace(size_t size) {
-    if ((pos_ + size) > end_) {
-      size_t new_size = 4096 + (end_ - buffer_) * 3;
-      byte* new_buffer = reinterpret_cast<byte*>(zone_->New(new_size));
-      memcpy(new_buffer, buffer_, (pos_ - buffer_));
-      pos_ = new_buffer + (pos_ - buffer_);
-      buffer_ = new_buffer;
-      end_ = new_buffer + new_size;
-    }
-  }
-
-  byte** pos_ptr() { return &pos_; }
-
- private:
-  Zone* zone_;
-  byte* buffer_;
-  byte* pos_;
-  byte* end_;
-};
-
-class WasmModuleBuilder;
-
-class WasmFunctionBuilder : public ZoneObject {
- public:
-  // Building methods.
-  void SetSignature(FunctionSig* sig);
-  uint32_t AddLocal(LocalType type);
-  void EmitVarInt(uint32_t val);
-  void EmitCode(const byte* code, uint32_t code_size);
-  void Emit(WasmOpcode opcode);
-  void EmitGetLocal(uint32_t index);
-  void EmitSetLocal(uint32_t index);
-  void EmitI32Const(int32_t val);
-  void EmitWithU8(WasmOpcode opcode, const byte immediate);
-  void EmitWithU8U8(WasmOpcode opcode, const byte imm1, const byte imm2);
-  void EmitWithVarInt(WasmOpcode opcode, uint32_t immediate);
-  void SetExported();
-  void SetName(const char* name, int name_length);
-  bool exported() { return exported_; }
-
-  // Writing methods.
-  void WriteSignature(ZoneBuffer& buffer) const;
-  void WriteExport(ZoneBuffer& buffer, uint32_t func_index) const;
-  void WriteBody(ZoneBuffer& buffer) const;
-
- private:
-  explicit WasmFunctionBuilder(WasmModuleBuilder* builder);
-  friend class WasmModuleBuilder;
-  WasmModuleBuilder* builder_;
-  LocalDeclEncoder locals_;
-  uint32_t signature_index_;
-  bool exported_;
-  ZoneVector<uint8_t> body_;
-  ZoneVector<char> name_;
-};
-
-// TODO(titzer): kill!
-class WasmDataSegmentEncoder : public ZoneObject {
- public:
-  WasmDataSegmentEncoder(Zone* zone, const byte* data, uint32_t size,
-                         uint32_t dest);
-  void Write(ZoneBuffer& buffer) const;
-
- private:
-  ZoneVector<byte> data_;
-  uint32_t dest_;
-};
-
-struct WasmFunctionImport {
-  uint32_t sig_index;
-  const char* name;
-  int name_length;
-};
-
-class WasmModuleBuilder : public ZoneObject {
- public:
-  explicit WasmModuleBuilder(Zone* zone);
-
-  // Building methods.
-  uint32_t AddFunction();
-  uint32_t AddGlobal(LocalType type, bool exported);
-  WasmFunctionBuilder* FunctionAt(size_t index);
-  void AddDataSegment(WasmDataSegmentEncoder* data);
-  uint32_t AddSignature(FunctionSig* sig);
-  void AddIndirectFunction(uint32_t index);
-  void MarkStartFunction(uint32_t index);
-  uint32_t AddImport(const char* name, int name_length, FunctionSig* sig);
-
-  // Writing methods.
-  void WriteTo(ZoneBuffer& buffer) const;
-
-  struct CompareFunctionSigs {
-    bool operator()(FunctionSig* a, FunctionSig* b) const;
-  };
-  typedef ZoneMap<FunctionSig*, uint32_t, CompareFunctionSigs> SignatureMap;
-
-  Zone* zone() { return zone_; }
-
- private:
-  Zone* zone_;
-  ZoneVector<FunctionSig*> signatures_;
-  ZoneVector<WasmFunctionImport> imports_;
-  ZoneVector<WasmFunctionBuilder*> functions_;
-  ZoneVector<WasmDataSegmentEncoder*> data_segments_;
-  ZoneVector<uint32_t> indirect_functions_;
-  ZoneVector<std::pair<LocalType, bool>> globals_;
-  SignatureMap signature_map_;
-  int start_function_index_;
-};
-
-}  // namespace wasm
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_WASM_ENCODER_H_
diff --git a/src/wasm/module-decoder.cc b/src/wasm/module-decoder.cc
index 542c47c..9006561 100644
--- a/src/wasm/module-decoder.cc
+++ b/src/wasm/module-decoder.cc
@@ -27,6 +27,141 @@
 
 namespace {
 
+const char* kNameString = "name";
+const size_t kNameStringLength = 4;
+
+LocalType TypeOf(const WasmModule* module, const WasmInitExpr& expr) {
+  switch (expr.kind) {
+    case WasmInitExpr::kNone:
+      return kAstStmt;
+    case WasmInitExpr::kGlobalIndex:
+      return expr.val.global_index < module->globals.size()
+                 ? module->globals[expr.val.global_index].type
+                 : kAstStmt;
+    case WasmInitExpr::kI32Const:
+      return kAstI32;
+    case WasmInitExpr::kI64Const:
+      return kAstI64;
+    case WasmInitExpr::kF32Const:
+      return kAstF32;
+    case WasmInitExpr::kF64Const:
+      return kAstF64;
+    default:
+      UNREACHABLE();
+      return kAstStmt;
+  }
+}
+
+// An iterator over the sections in a WASM binary module.
+// Automatically skips all unknown sections.
+class WasmSectionIterator {
+ public:
+  explicit WasmSectionIterator(Decoder& decoder)
+      : decoder_(decoder),
+        section_code_(kUnknownSectionCode),
+        section_start_(decoder.pc()),
+        section_end_(decoder.pc()) {
+    next();
+  }
+
+  inline bool more() const {
+    return section_code_ != kUnknownSectionCode && decoder_.more();
+  }
+
+  inline WasmSectionCode section_code() const { return section_code_; }
+
+  inline const byte* section_start() const { return section_start_; }
+
+  inline uint32_t section_length() const {
+    return static_cast<uint32_t>(section_end_ - section_start_);
+  }
+
+  inline const byte* section_end() const { return section_end_; }
+
+  // Advances to the next section, checking that decoding the current section
+  // stopped at {section_end_}.
+  void advance() {
+    if (decoder_.pc() != section_end_) {
+      const char* msg = decoder_.pc() < section_end_ ? "shorter" : "longer";
+      decoder_.error(decoder_.pc(), decoder_.pc(),
+                     "section was %s than expected size "
+                     "(%u bytes expected, %zu decoded)",
+                     msg, section_length(),
+                     static_cast<size_t>(decoder_.pc() - section_start_));
+    }
+    next();
+  }
+
+ private:
+  Decoder& decoder_;
+  WasmSectionCode section_code_;
+  const byte* section_start_;
+  const byte* section_end_;
+
+  // Reads the section code/name at the current position and sets up
+  // the internal fields.
+  void next() {
+    while (true) {
+      if (!decoder_.more()) {
+        section_code_ = kUnknownSectionCode;
+        return;
+      }
+      uint8_t section_code = decoder_.consume_u8("section code");
+      // Read and check the section size.
+      uint32_t section_length = decoder_.consume_u32v("section length");
+      section_start_ = decoder_.pc();
+      if (decoder_.checkAvailable(section_length)) {
+        // Get the limit of the section within the module.
+        section_end_ = section_start_ + section_length;
+      } else {
+        // The section would extend beyond the end of the module.
+        section_end_ = section_start_;
+      }
+
+      if (section_code == kUnknownSectionCode) {
+        // Check for the known "names" section.
+        uint32_t string_length = decoder_.consume_u32v("section name length");
+        const byte* section_name_start = decoder_.pc();
+        decoder_.consume_bytes(string_length, "section name");
+        if (decoder_.failed() || decoder_.pc() > section_end_) {
+          TRACE("Section name of length %u couldn't be read\n", string_length);
+          section_code_ = kUnknownSectionCode;
+          return;
+        }
+
+        TRACE("  +%d  section name        : \"%.*s\"\n",
+              static_cast<int>(section_name_start - decoder_.start()),
+              string_length < 20 ? string_length : 20, section_name_start);
+
+        if (string_length == kNameStringLength &&
+            strncmp(reinterpret_cast<const char*>(section_name_start),
+                    kNameString, kNameStringLength) == 0) {
+          section_code = kNameSectionCode;
+        } else {
+          section_code = kUnknownSectionCode;
+        }
+      } else if (!IsValidSectionCode(section_code)) {
+        decoder_.error(decoder_.pc(), decoder_.pc(),
+                       "unknown section code #0x%02x", section_code);
+        section_code = kUnknownSectionCode;
+      }
+      section_code_ = static_cast<WasmSectionCode>(section_code);
+
+      TRACE("Section: %s\n", SectionName(section_code_));
+      if (section_code_ == kUnknownSectionCode &&
+          section_end_ > decoder_.pc()) {
+        // skip to the end of the unknown section.
+        uint32_t remaining =
+            static_cast<uint32_t>(section_end_ - decoder_.pc());
+        decoder_.consume_bytes(remaining, "section payload");
+        // fall through and continue to the next section.
+      } else {
+        return;
+      }
+    }
+  }
+};
+
 // The main logic for decoding the bytes of a module.
 class ModuleDecoder : public Decoder {
  public:
@@ -77,11 +212,9 @@
     module->min_mem_pages = 0;
     module->max_mem_pages = 0;
     module->mem_export = false;
-    module->mem_external = false;
     module->origin = origin_;
 
     const byte* pos = pc_;
-    int current_order = 0;
     uint32_t magic_word = consume_u32("wasm magic");
 #define BYTES(x) (x & 0xff), (x >> 8) & 0xff, (x >> 16) & 0xff, (x >> 24) & 0xff
     if (magic_word != kWasmMagic) {
@@ -89,7 +222,6 @@
             "expected magic word %02x %02x %02x %02x, "
             "found %02x %02x %02x %02x",
             BYTES(kWasmMagic), BYTES(magic_word));
-      goto done;
     }
 
     pos = pc_;
@@ -100,302 +232,367 @@
               "expected version %02x %02x %02x %02x, "
               "found %02x %02x %02x %02x",
               BYTES(kWasmVersion), BYTES(magic_version));
-        goto done;
       }
     }
 
-    // Decode the module sections.
-    while (pc_ < limit_) {
-      TRACE("DecodeSection\n");
-      pos = pc_;
+    WasmSectionIterator section_iter(*this);
 
-      // Read the section name.
-      uint32_t string_length = consume_u32v("section name length");
-      const byte* section_name_start = pc_;
-      consume_bytes(string_length);
-      if (failed()) {
-        TRACE("Section name of length %u couldn't be read\n", string_length);
-        break;
+    // ===== Type section ====================================================
+    if (section_iter.section_code() == kTypeSectionCode) {
+      uint32_t signatures_count = consume_u32v("signatures count");
+      module->signatures.reserve(SafeReserve(signatures_count));
+      for (uint32_t i = 0; ok() && i < signatures_count; ++i) {
+        TRACE("DecodeSignature[%d] module+%d\n", i,
+              static_cast<int>(pc_ - start_));
+        FunctionSig* s = consume_sig();
+        module->signatures.push_back(s);
       }
+      section_iter.advance();
+    }
 
-      TRACE("  +%d  section name        : \"%.*s\"\n",
-            static_cast<int>(section_name_start - start_),
-            string_length < 20 ? string_length : 20, section_name_start);
+    // ===== Import section ==================================================
+    if (section_iter.section_code() == kImportSectionCode) {
+      uint32_t import_table_count = consume_u32v("import table count");
+      module->import_table.reserve(SafeReserve(import_table_count));
+      for (uint32_t i = 0; ok() && i < import_table_count; ++i) {
+        TRACE("DecodeImportTable[%d] module+%d\n", i,
+              static_cast<int>(pc_ - start_));
 
-      WasmSection::Code section =
-          WasmSection::lookup(section_name_start, string_length);
-
-      // Read and check the section size.
-      uint32_t section_length = consume_u32v("section length");
-      if (!checkAvailable(section_length)) {
-        // The section would extend beyond the end of the module.
-        break;
-      }
-      const byte* section_start = pc_;
-      const byte* expected_section_end = pc_ + section_length;
-
-      current_order = CheckSectionOrder(current_order, section);
-
-      switch (section) {
-        case WasmSection::Code::End:
-          // Terminate section decoding.
-          limit_ = pc_;
-          break;
-        case WasmSection::Code::Memory: {
-          module->min_mem_pages = consume_u32v("min memory");
-          module->max_mem_pages = consume_u32v("max memory");
-          module->mem_export = consume_u8("export memory") != 0;
-          break;
+        module->import_table.push_back({
+            0,                  // module_name_length
+            0,                  // module_name_offset
+            0,                  // field_name_offset
+            0,                  // field_name_length
+            kExternalFunction,  // kind
+            0                   // index
+        });
+        WasmImport* import = &module->import_table.back();
+        const byte* pos = pc_;
+        import->module_name_offset =
+            consume_string(&import->module_name_length, true);
+        if (import->module_name_length == 0) {
+          error(pos, "import module name cannot be NULL");
         }
-        case WasmSection::Code::Signatures: {
-          uint32_t signatures_count = consume_u32v("signatures count");
-          module->signatures.reserve(SafeReserve(signatures_count));
-          // Decode signatures.
-          for (uint32_t i = 0; i < signatures_count; ++i) {
-            if (failed()) break;
-            TRACE("DecodeSignature[%d] module+%d\n", i,
-                  static_cast<int>(pc_ - start_));
-            FunctionSig* s = consume_sig();
-            module->signatures.push_back(s);
-          }
-          break;
-        }
-        case WasmSection::Code::FunctionSignatures: {
-          uint32_t functions_count = consume_u32v("functions count");
-          module->functions.reserve(SafeReserve(functions_count));
-          for (uint32_t i = 0; i < functions_count; ++i) {
-            module->functions.push_back({nullptr,  // sig
-                                         i,        // func_index
-                                         0,        // sig_index
-                                         0,        // name_offset
-                                         0,        // name_length
-                                         0,        // code_start_offset
-                                         0});      // code_end_offset
+        import->field_name_offset =
+            consume_string(&import->field_name_length, true);
+
+        import->kind = static_cast<WasmExternalKind>(consume_u8("import kind"));
+        switch (import->kind) {
+          case kExternalFunction: {
+            // ===== Imported function =======================================
+            import->index = static_cast<uint32_t>(module->functions.size());
+            module->num_imported_functions++;
+            module->functions.push_back({nullptr,        // sig
+                                         import->index,  // func_index
+                                         0,              // sig_index
+                                         0,              // name_offset
+                                         0,              // name_length
+                                         0,              // code_start_offset
+                                         0,              // code_end_offset
+                                         true,           // imported
+                                         false});        // exported
             WasmFunction* function = &module->functions.back();
             function->sig_index = consume_sig_index(module, &function->sig);
-          }
-          break;
-        }
-        case WasmSection::Code::FunctionBodies: {
-          const byte* pos = pc_;
-          uint32_t functions_count = consume_u32v("functions count");
-          if (functions_count != module->functions.size()) {
-            error(pos, pos, "function body count %u mismatch (%u expected)",
-                  functions_count,
-                  static_cast<uint32_t>(module->functions.size()));
             break;
           }
-          for (uint32_t i = 0; i < functions_count; ++i) {
-            WasmFunction* function = &module->functions[i];
-            uint32_t size = consume_u32v("body size");
-            function->code_start_offset = pc_offset();
-            function->code_end_offset = pc_offset() + size;
-
-            TRACE("  +%d  %-20s: (%d bytes)\n", pc_offset(), "function body",
-                  size);
-            pc_ += size;
-            if (pc_ > limit_) {
-              error(pc_, "function body extends beyond end of file");
-            }
-          }
-          break;
-        }
-        case WasmSection::Code::Names: {
-          const byte* pos = pc_;
-          uint32_t functions_count = consume_u32v("functions count");
-          if (functions_count != module->functions.size()) {
-            error(pos, pos, "function name count %u mismatch (%u expected)",
-                  functions_count,
-                  static_cast<uint32_t>(module->functions.size()));
+          case kExternalTable: {
+            // ===== Imported table ==========================================
+            import->index =
+                static_cast<uint32_t>(module->function_tables.size());
+            module->function_tables.push_back(
+                {0, 0, std::vector<int32_t>(), true, false});
+            expect_u8("element type", 0x20);
+            WasmIndirectFunctionTable* table = &module->function_tables.back();
+            consume_resizable_limits("element count", "elements", kMaxUInt32,
+                                     &table->size, &table->max_size);
             break;
           }
-
-          for (uint32_t i = 0; i < functions_count; ++i) {
-            WasmFunction* function = &module->functions[i];
-            function->name_offset =
-                consume_string(&function->name_length, false);
-
-            uint32_t local_names_count = consume_u32v("local names count");
-            for (uint32_t j = 0; j < local_names_count; j++) {
-              uint32_t unused = 0;
-              uint32_t offset = consume_string(&unused, false);
-              USE(unused);
-              USE(offset);
-            }
+          case kExternalMemory: {
+            // ===== Imported memory =========================================
+            //            import->index =
+            //            static_cast<uint32_t>(module->memories.size());
+            // TODO(titzer): imported memories
+            break;
           }
-          break;
-        }
-        case WasmSection::Code::Globals: {
-          uint32_t globals_count = consume_u32v("globals count");
-          module->globals.reserve(SafeReserve(globals_count));
-          // Decode globals.
-          for (uint32_t i = 0; i < globals_count; ++i) {
-            if (failed()) break;
-            TRACE("DecodeGlobal[%d] module+%d\n", i,
-                  static_cast<int>(pc_ - start_));
-            // Add an uninitialized global and pass a pointer to it.
-            module->globals.push_back({0, 0, kAstStmt, 0, false});
+          case kExternalGlobal: {
+            // ===== Imported global =========================================
+            import->index = static_cast<uint32_t>(module->globals.size());
+            module->globals.push_back(
+                {kAstStmt, false, NO_INIT, 0, true, false});
             WasmGlobal* global = &module->globals.back();
-            DecodeGlobalInModule(global);
-          }
-          break;
-        }
-        case WasmSection::Code::DataSegments: {
-          uint32_t data_segments_count = consume_u32v("data segments count");
-          module->data_segments.reserve(SafeReserve(data_segments_count));
-          // Decode data segments.
-          for (uint32_t i = 0; i < data_segments_count; ++i) {
-            if (failed()) break;
-            TRACE("DecodeDataSegment[%d] module+%d\n", i,
-                  static_cast<int>(pc_ - start_));
-            module->data_segments.push_back({0,        // dest_addr
-                                             0,        // source_offset
-                                             0,        // source_size
-                                             false});  // init
-            WasmDataSegment* segment = &module->data_segments.back();
-            DecodeDataSegmentInModule(module, segment);
-          }
-          break;
-        }
-        case WasmSection::Code::FunctionTable: {
-          // An indirect function table requires functions first.
-          CheckForFunctions(module, section);
-          // Assume only one table for now.
-          static const uint32_t kSupportedTableCount = 1;
-          module->function_tables.reserve(SafeReserve(kSupportedTableCount));
-          // Decode function table.
-          for (uint32_t i = 0; i < kSupportedTableCount; ++i) {
-            if (failed()) break;
-            TRACE("DecodeFunctionTable[%d] module+%d\n", i,
-                  static_cast<int>(pc_ - start_));
-            module->function_tables.push_back({0, 0, std::vector<uint16_t>()});
-            DecodeFunctionTableInModule(module, &module->function_tables[i]);
-          }
-          break;
-        }
-        case WasmSection::Code::StartFunction: {
-          // Declares a start function for a module.
-          CheckForFunctions(module, section);
-          if (module->start_function_index >= 0) {
-            error("start function already declared");
+            global->type = consume_value_type();
+            global->mutability = consume_u8("mutability") != 0;
             break;
           }
-          WasmFunction* func;
-          const byte* pos = pc_;
-          module->start_function_index = consume_func_index(module, &func);
-          if (func && func->sig->parameter_count() > 0) {
-            error(pos, "invalid start function: non-zero parameter count");
+          default:
+            error(pos, pos, "unknown import kind 0x%02x", import->kind);
             break;
-          }
-          break;
         }
-        case WasmSection::Code::ImportTable: {
-          uint32_t import_table_count = consume_u32v("import table count");
-          module->import_table.reserve(SafeReserve(import_table_count));
-          // Decode import table.
-          for (uint32_t i = 0; i < import_table_count; ++i) {
-            if (failed()) break;
-            TRACE("DecodeImportTable[%d] module+%d\n", i,
-                  static_cast<int>(pc_ - start_));
-
-            module->import_table.push_back({nullptr,  // sig
-                                            0,        // sig_index
-                                            0,        // module_name_offset
-                                            0,        // module_name_length
-                                            0,        // function_name_offset
-                                            0});      // function_name_length
-            WasmImport* import = &module->import_table.back();
-
-            import->sig_index = consume_sig_index(module, &import->sig);
-            const byte* pos = pc_;
-            import->module_name_offset =
-                consume_string(&import->module_name_length, true);
-            if (import->module_name_length == 0) {
-              error(pos, "import module name cannot be NULL");
-            }
-            import->function_name_offset =
-                consume_string(&import->function_name_length, true);
-          }
-          break;
-        }
-        case WasmSection::Code::ExportTable: {
-          // Declares an export table.
-          CheckForFunctions(module, section);
-          uint32_t export_table_count = consume_u32v("export table count");
-          module->export_table.reserve(SafeReserve(export_table_count));
-          // Decode export table.
-          for (uint32_t i = 0; i < export_table_count; ++i) {
-            if (failed()) break;
-            TRACE("DecodeExportTable[%d] module+%d\n", i,
-                  static_cast<int>(pc_ - start_));
-
-            module->export_table.push_back({0,    // func_index
-                                            0,    // name_offset
-                                            0});  // name_length
-            WasmExport* exp = &module->export_table.back();
-
-            WasmFunction* func;
-            exp->func_index = consume_func_index(module, &func);
-            exp->name_offset = consume_string(&exp->name_length, true);
-          }
-          // Check for duplicate exports.
-          if (ok() && module->export_table.size() > 1) {
-            std::vector<WasmExport> sorted_exports(module->export_table);
-            const byte* base = start_;
-            auto cmp_less = [base](const WasmExport& a, const WasmExport& b) {
-              // Return true if a < b.
-              uint32_t len = a.name_length;
-              if (len != b.name_length) return len < b.name_length;
-              return memcmp(base + a.name_offset, base + b.name_offset, len) <
-                     0;
-            };
-            std::stable_sort(sorted_exports.begin(), sorted_exports.end(),
-                             cmp_less);
-            auto it = sorted_exports.begin();
-            WasmExport* last = &*it++;
-            for (auto end = sorted_exports.end(); it != end; last = &*it++) {
-              DCHECK(!cmp_less(*it, *last));  // Vector must be sorted.
-              if (!cmp_less(*last, *it)) {
-                const byte* pc = start_ + it->name_offset;
-                error(pc, pc,
-                      "Duplicate export name '%.*s' for functions %d and %d",
-                      it->name_length, pc, last->func_index, it->func_index);
-                break;
-              }
-            }
-          }
-          break;
-        }
-        case WasmSection::Code::Max:
-          // Skip unknown sections.
-          TRACE("Unknown section: '");
-          for (uint32_t i = 0; i != string_length; ++i) {
-            TRACE("%c", *(section_name_start + i));
-          }
-          TRACE("'\n");
-          consume_bytes(section_length);
-          break;
       }
-
-      if (pc_ != expected_section_end) {
-        const char* diff = pc_ < expected_section_end ? "shorter" : "longer";
-        size_t expected_length = static_cast<size_t>(section_length);
-        size_t actual_length = static_cast<size_t>(pc_ - section_start);
-        error(pc_, pc_,
-              "section \"%s\" %s (%zu bytes) than specified (%zu bytes)",
-              WasmSection::getName(section), diff, actual_length,
-              expected_length);
-        break;
-      }
+      section_iter.advance();
     }
 
-  done:
-    if (ok()) CalculateGlobalsOffsets(module);
+    // ===== Function section ================================================
+    if (section_iter.section_code() == kFunctionSectionCode) {
+      uint32_t functions_count = consume_u32v("functions count");
+      module->functions.reserve(SafeReserve(functions_count));
+      module->num_declared_functions = functions_count;
+      for (uint32_t i = 0; ok() && i < functions_count; ++i) {
+        uint32_t func_index = static_cast<uint32_t>(module->functions.size());
+        module->functions.push_back({nullptr,     // sig
+                                     func_index,  // func_index
+                                     0,           // sig_index
+                                     0,           // name_offset
+                                     0,           // name_length
+                                     0,           // code_start_offset
+                                     0,           // code_end_offset
+                                     false,       // imported
+                                     false});     // exported
+        WasmFunction* function = &module->functions.back();
+        function->sig_index = consume_sig_index(module, &function->sig);
+      }
+      section_iter.advance();
+    }
+
+    // ===== Table section ===================================================
+    if (section_iter.section_code() == kTableSectionCode) {
+      const byte* pos = pc_;
+      uint32_t table_count = consume_u32v("table count");
+      // Require at most one table for now.
+      if (table_count > 1) {
+        error(pos, pos, "invalid table count %d, maximum 1", table_count);
+      }
+
+      for (uint32_t i = 0; ok() && i < table_count; i++) {
+        module->function_tables.push_back(
+            {0, 0, std::vector<int32_t>(), false, false});
+        WasmIndirectFunctionTable* table = &module->function_tables.back();
+        expect_u8("table type", kWasmAnyFunctionTypeForm);
+        consume_resizable_limits("table elements", "elements", kMaxUInt32,
+                                 &table->size, &table->max_size);
+      }
+      section_iter.advance();
+    }
+
+    // ===== Memory section ==================================================
+    if (section_iter.section_code() == kMemorySectionCode) {
+      const byte* pos = pc_;
+      uint32_t memory_count = consume_u32v("memory count");
+      // Require at most one memory for now.
+      if (memory_count > 1) {
+        error(pos, pos, "invalid memory count %d, maximum 1", memory_count);
+      }
+
+      for (uint32_t i = 0; ok() && i < memory_count; i++) {
+        consume_resizable_limits("memory", "pages", WasmModule::kMaxLegalPages,
+                                 &module->min_mem_pages,
+                                 &module->max_mem_pages);
+      }
+      section_iter.advance();
+    }
+
+    // ===== Global section ==================================================
+    if (section_iter.section_code() == kGlobalSectionCode) {
+      uint32_t globals_count = consume_u32v("globals count");
+      module->globals.reserve(SafeReserve(globals_count));
+      for (uint32_t i = 0; ok() && i < globals_count; ++i) {
+        TRACE("DecodeGlobal[%d] module+%d\n", i,
+              static_cast<int>(pc_ - start_));
+        // Add an uninitialized global and pass a pointer to it.
+        module->globals.push_back({kAstStmt, false, NO_INIT, 0, false, false});
+        WasmGlobal* global = &module->globals.back();
+        DecodeGlobalInModule(module, i, global);
+      }
+      section_iter.advance();
+    }
+
+    // ===== Export section ==================================================
+    if (section_iter.section_code() == kExportSectionCode) {
+      uint32_t export_table_count = consume_u32v("export table count");
+      module->export_table.reserve(SafeReserve(export_table_count));
+      for (uint32_t i = 0; ok() && i < export_table_count; ++i) {
+        TRACE("DecodeExportTable[%d] module+%d\n", i,
+              static_cast<int>(pc_ - start_));
+
+        module->export_table.push_back({
+            0,                  // name_length
+            0,                  // name_offset
+            kExternalFunction,  // kind
+            0                   // index
+        });
+        WasmExport* exp = &module->export_table.back();
+
+        exp->name_offset = consume_string(&exp->name_length, true);
+        const byte* pos = pc();
+        exp->kind = static_cast<WasmExternalKind>(consume_u8("export kind"));
+        switch (exp->kind) {
+          case kExternalFunction: {
+            WasmFunction* func = nullptr;
+            exp->index = consume_func_index(module, &func);
+            module->num_exported_functions++;
+            if (func) func->exported = true;
+            break;
+          }
+          case kExternalTable: {
+            WasmIndirectFunctionTable* table = nullptr;
+            exp->index = consume_table_index(module, &table);
+            if (table) table->exported = true;
+            break;
+          }
+          case kExternalMemory: {
+            uint32_t index = consume_u32v("memory index");
+            if (index != 0) error("invalid memory index != 0");
+            module->mem_export = true;
+            break;
+          }
+          case kExternalGlobal: {
+            WasmGlobal* global = nullptr;
+            exp->index = consume_global_index(module, &global);
+            if (global) global->exported = true;
+            break;
+          }
+          default:
+            error(pos, pos, "invalid export kind 0x%02x", exp->kind);
+            break;
+        }
+      }
+      // Check for duplicate exports.
+      if (ok() && module->export_table.size() > 1) {
+        std::vector<WasmExport> sorted_exports(module->export_table);
+        const byte* base = start_;
+        auto cmp_less = [base](const WasmExport& a, const WasmExport& b) {
+          // Return true if a < b.
+          if (a.name_length != b.name_length) {
+            return a.name_length < b.name_length;
+          }
+          return memcmp(base + a.name_offset, base + b.name_offset,
+                        a.name_length) < 0;
+        };
+        std::stable_sort(sorted_exports.begin(), sorted_exports.end(),
+                         cmp_less);
+        auto it = sorted_exports.begin();
+        WasmExport* last = &*it++;
+        for (auto end = sorted_exports.end(); it != end; last = &*it++) {
+          DCHECK(!cmp_less(*it, *last));  // Vector must be sorted.
+          if (!cmp_less(*last, *it)) {
+            const byte* pc = start_ + it->name_offset;
+            error(pc, pc,
+                  "Duplicate export name '%.*s' for functions %d and %d",
+                  it->name_length, pc, last->index, it->index);
+            break;
+          }
+        }
+      }
+      section_iter.advance();
+    }
+
+    // ===== Start section ===================================================
+    if (section_iter.section_code() == kStartSectionCode) {
+      WasmFunction* func;
+      const byte* pos = pc_;
+      module->start_function_index = consume_func_index(module, &func);
+      if (func && func->sig->parameter_count() > 0) {
+        error(pos, "invalid start function: non-zero parameter count");
+      }
+      section_iter.advance();
+    }
+
+    // ===== Elements section ================================================
+    if (section_iter.section_code() == kElementSectionCode) {
+      uint32_t element_count = consume_u32v("element count");
+      for (uint32_t i = 0; ok() && i < element_count; ++i) {
+        uint32_t table_index = consume_u32v("table index");
+        if (table_index != 0) error("illegal table index != 0");
+        WasmInitExpr offset = consume_init_expr(module, kAstI32);
+        uint32_t num_elem = consume_u32v("number of elements");
+        std::vector<uint32_t> vector;
+        module->table_inits.push_back({table_index, offset, vector});
+        WasmTableInit* init = &module->table_inits.back();
+        init->entries.reserve(SafeReserve(num_elem));
+        for (uint32_t j = 0; ok() && j < num_elem; j++) {
+          WasmFunction* func = nullptr;
+          init->entries.push_back(consume_func_index(module, &func));
+        }
+      }
+
+      section_iter.advance();
+    }
+
+    // ===== Code section ====================================================
+    if (section_iter.section_code() == kCodeSectionCode) {
+      const byte* pos = pc_;
+      uint32_t functions_count = consume_u32v("functions count");
+      if (functions_count != module->num_declared_functions) {
+        error(pos, pos, "function body count %u mismatch (%u expected)",
+              functions_count, module->num_declared_functions);
+      }
+      for (uint32_t i = 0; ok() && i < functions_count; ++i) {
+        WasmFunction* function =
+            &module->functions[i + module->num_imported_functions];
+        uint32_t size = consume_u32v("body size");
+        function->code_start_offset = pc_offset();
+        function->code_end_offset = pc_offset() + size;
+        consume_bytes(size, "function body");
+      }
+      section_iter.advance();
+    }
+
+    // ===== Data section ====================================================
+    if (section_iter.section_code() == kDataSectionCode) {
+      uint32_t data_segments_count = consume_u32v("data segments count");
+      module->data_segments.reserve(SafeReserve(data_segments_count));
+      for (uint32_t i = 0; ok() && i < data_segments_count; ++i) {
+        TRACE("DecodeDataSegment[%d] module+%d\n", i,
+              static_cast<int>(pc_ - start_));
+        module->data_segments.push_back({
+            NO_INIT,  // dest_addr
+            0,        // source_offset
+            0         // source_size
+        });
+        WasmDataSegment* segment = &module->data_segments.back();
+        DecodeDataSegmentInModule(module, segment);
+      }
+      section_iter.advance();
+    }
+
+    // ===== Name section ====================================================
+    if (section_iter.section_code() == kNameSectionCode) {
+      const byte* pos = pc_;
+      uint32_t functions_count = consume_u32v("functions count");
+      if (functions_count != module->num_declared_functions) {
+        error(pos, pos, "function name count %u mismatch (%u expected)",
+              functions_count, module->num_declared_functions);
+      }
+
+      for (uint32_t i = 0; ok() && i < functions_count; ++i) {
+        WasmFunction* function =
+            &module->functions[i + module->num_imported_functions];
+        function->name_offset = consume_string(&function->name_length, false);
+
+        uint32_t local_names_count = consume_u32v("local names count");
+        for (uint32_t j = 0; ok() && j < local_names_count; j++) {
+          uint32_t unused = 0;
+          uint32_t offset = consume_string(&unused, false);
+          USE(unused);
+          USE(offset);
+        }
+      }
+      section_iter.advance();
+    }
+
+    // ===== Remaining sections ==============================================
+    if (section_iter.more() && ok()) {
+      error(pc(), pc(), "unexpected section: %s",
+            SectionName(section_iter.section_code()));
+    }
+
+    if (ok()) {
+      CalculateGlobalOffsets(module);
+      PreinitializeIndirectFunctionTables(module);
+    }
     const WasmModule* finished_module = module;
     ModuleResult result = toResult(finished_module);
-    if (FLAG_dump_wasm_module) {
-      DumpModule(module, result);
-    }
+    if (FLAG_dump_wasm_module) DumpModule(module, result);
     return result;
   }
 
@@ -405,27 +602,6 @@
     return count < kMaxReserve ? count : kMaxReserve;
   }
 
-  void CheckForFunctions(WasmModule* module, WasmSection::Code section) {
-    if (module->functions.size() == 0) {
-      error(pc_ - 1, nullptr, "functions must appear before section %s",
-            WasmSection::getName(section));
-    }
-  }
-
-  int CheckSectionOrder(int current_order, WasmSection::Code section) {
-    int next_order = WasmSection::getOrder(section);
-    if (next_order == 0) return current_order;
-    if (next_order == current_order) {
-      error(pc_, pc_, "section \"%s\" already defined",
-            WasmSection::getName(section));
-    }
-    if (next_order < current_order) {
-      error(pc_, pc_, "section \"%s\" out of order",
-            WasmSection::getName(section));
-    }
-    return next_order;
-  }
-
   // Decodes a single anonymous function starting at {start_}.
   FunctionResult DecodeSingleFunction(ModuleEnv* module_env,
                                       WasmFunction* function) {
@@ -451,6 +627,11 @@
     return ok() ? result : nullptr;
   }
 
+  WasmInitExpr DecodeInitExpr(const byte* start) {
+    pc_ = start;
+    return consume_init_expr(nullptr, kAstStmt);
+  }
+
  private:
   Zone* module_zone;
   ModuleResult result_;
@@ -459,15 +640,28 @@
   uint32_t off(const byte* ptr) { return static_cast<uint32_t>(ptr - start_); }
 
   // Decodes a single global entry inside a module starting at {pc_}.
-  void DecodeGlobalInModule(WasmGlobal* global) {
-    global->name_offset = consume_string(&global->name_length, false);
-    if (!unibrow::Utf8::Validate(start_ + global->name_offset,
-                                 global->name_length)) {
-      error("global name is not valid utf8");
+  void DecodeGlobalInModule(WasmModule* module, uint32_t index,
+                            WasmGlobal* global) {
+    global->type = consume_value_type();
+    global->mutability = consume_u8("mutability") != 0;
+    const byte* pos = pc();
+    global->init = consume_init_expr(module, kAstStmt);
+    switch (global->init.kind) {
+      case WasmInitExpr::kGlobalIndex:
+        if (global->init.val.global_index >= index) {
+          error("invalid global index in init expression");
+        } else if (module->globals[index].type != global->type) {
+          error("type mismatch in global initialization");
+        }
+        break;
+      default:
+        if (global->type != TypeOf(module, global->init)) {
+          error(pos, pos,
+                "type error in global initialization, expected %s, got %s",
+                WasmOpcodes::TypeName(global->type),
+                WasmOpcodes::TypeName(TypeOf(module, global->init)));
+        }
     }
-    global->type = consume_local_type();
-    global->offset = 0;
-    global->exported = consume_u8("exported") != 0;
   }
 
   bool IsWithinLimit(uint32_t limit, uint32_t offset, uint32_t size) {
@@ -479,10 +673,10 @@
   // Decodes a single data segment entry inside a module starting at {pc_}.
   void DecodeDataSegmentInModule(WasmModule* module, WasmDataSegment* segment) {
     const byte* start = pc_;
-    segment->dest_addr = consume_u32v("destination");
+    expect_u8("linear memory index", 0);
+    segment->dest_addr = consume_init_expr(module, kAstI32);
     segment->source_size = consume_u32v("source size");
     segment->source_offset = static_cast<uint32_t>(pc_ - start_);
-    segment->init = true;
 
     // Validate the data is in the module.
     uint32_t module_limit = static_cast<uint32_t>(limit_ - start_);
@@ -491,40 +685,11 @@
       error(start, "segment out of bounds of module");
     }
 
-    // Validate that the segment will fit into the (minimum) memory.
-    uint32_t memory_limit =
-        WasmModule::kPageSize * (module ? module->min_mem_pages
-                                        : WasmModule::kMaxMemPages);
-    if (!IsWithinLimit(memory_limit, segment->dest_addr,
-                       segment->source_size)) {
-      error(start, "segment out of bounds of memory");
-    }
-
-    consume_bytes(segment->source_size);
-  }
-
-  // Decodes a single function table inside a module starting at {pc_}.
-  void DecodeFunctionTableInModule(WasmModule* module,
-                                   WasmIndirectFunctionTable* table) {
-    table->size = consume_u32v("function table entry count");
-    table->max_size = table->size;
-
-    if (table->max_size != table->size) {
-      error("invalid table maximum size");
-    }
-
-    for (uint32_t i = 0; i < table->size; ++i) {
-      uint16_t index = consume_u32v();
-      if (index >= module->functions.size()) {
-        error(pc_ - sizeof(index), "invalid function index");
-        break;
-      }
-      table->values.push_back(index);
-    }
+    consume_bytes(segment->source_size, "segment data");
   }
 
   // Calculate individual global offsets and total size of globals table.
-  void CalculateGlobalsOffsets(WasmModule* module) {
+  void CalculateGlobalOffsets(WasmModule* module) {
     uint32_t offset = 0;
     if (module->globals.size() == 0) {
       module->globals_size = 0;
@@ -540,6 +705,30 @@
     module->globals_size = offset;
   }
 
+  // TODO(titzer): this only works without overlapping initializations from
+  // global bases for entries
+  void PreinitializeIndirectFunctionTables(WasmModule* module) {
+    // Fill all tables with invalid entries first.
+    for (WasmIndirectFunctionTable& table : module->function_tables) {
+      table.values.resize(table.size);
+      for (size_t i = 0; i < table.size; i++) {
+        table.values[i] = kInvalidFunctionIndex;
+      }
+    }
+    for (WasmTableInit& init : module->table_inits) {
+      if (init.offset.kind != WasmInitExpr::kI32Const) continue;
+      if (init.table_index >= module->function_tables.size()) continue;
+      WasmIndirectFunctionTable& table =
+          module->function_tables[init.table_index];
+      for (size_t i = 0; i < init.entries.size(); i++) {
+        size_t index = i + init.offset.val.i32_const;
+        if (index < table.values.size()) {
+          table.values[index] = init.entries[i];
+        }
+      }
+    }
+  }
+
   // Verifies the body (code) of a given function.
   void VerifyFunctionBody(uint32_t func_num, ModuleEnv* menv,
                           WasmFunction* function) {
@@ -570,26 +759,18 @@
     }
   }
 
-  // Reads a single 32-bit unsigned integer interpreted as an offset, checking
-  // the offset is within bounds and advances.
-  uint32_t consume_offset(const char* name = nullptr) {
-    uint32_t offset = consume_u32(name ? name : "offset");
-    if (offset > static_cast<uint32_t>(limit_ - start_)) {
-      error(pc_ - sizeof(uint32_t), "offset out of bounds of module");
-    }
-    return offset;
-  }
-
   // Reads a length-prefixed string, checking that it is within bounds. Returns
   // the offset of the string, and the length as an out parameter.
   uint32_t consume_string(uint32_t* length, bool validate_utf8) {
     *length = consume_u32v("string length");
     uint32_t offset = pc_offset();
-    TRACE("  +%u  %-20s: (%u bytes)\n", offset, "string", *length);
-    if (validate_utf8 && !unibrow::Utf8::Validate(pc_, *length)) {
-      error(pc_, "no valid UTF-8 string");
+    const byte* string_start = pc_;
+    // Consume bytes before validation to guarantee that the string is not oob.
+    consume_bytes(*length, "string");
+    if (ok() && validate_utf8 &&
+        !unibrow::Utf8::Validate(string_start, *length)) {
+      error(string_start, "no valid UTF-8 string");
     }
-    consume_bytes(*length);
     return offset;
   }
 
@@ -607,25 +788,134 @@
   }
 
   uint32_t consume_func_index(WasmModule* module, WasmFunction** func) {
+    return consume_index("function index", module->functions, func);
+  }
+
+  uint32_t consume_global_index(WasmModule* module, WasmGlobal** global) {
+    return consume_index("global index", module->globals, global);
+  }
+
+  uint32_t consume_table_index(WasmModule* module,
+                               WasmIndirectFunctionTable** table) {
+    return consume_index("table index", module->function_tables, table);
+  }
+
+  template <typename T>
+  uint32_t consume_index(const char* name, std::vector<T>& vector, T** ptr) {
     const byte* pos = pc_;
-    uint32_t func_index = consume_u32v("function index");
-    if (func_index >= module->functions.size()) {
-      error(pos, pos, "function index %u out of bounds (%d functions)",
-            func_index, static_cast<int>(module->functions.size()));
-      *func = nullptr;
+    uint32_t index = consume_u32v(name);
+    if (index >= vector.size()) {
+      error(pos, pos, "%s %u out of bounds (%d entries)", name, index,
+            static_cast<int>(vector.size()));
+      *ptr = nullptr;
       return 0;
     }
-    *func = &module->functions[func_index];
-    return func_index;
+    *ptr = &vector[index];
+    return index;
+  }
+
+  void consume_resizable_limits(const char* name, const char* units,
+                                uint32_t max_value, uint32_t* initial,
+                                uint32_t* maximum) {
+    uint32_t flags = consume_u32v("resizable limits flags");
+    const byte* pos = pc();
+    *initial = consume_u32v("initial size");
+    if (*initial > max_value) {
+      error(pos, pos,
+            "initial %s size (%u %s) is larger than maximum allowable (%u)",
+            name, *initial, units, max_value);
+    }
+    if (flags & 1) {
+      pos = pc();
+      *maximum = consume_u32v("maximum size");
+      if (*maximum > max_value) {
+        error(pos, pos,
+              "maximum %s size (%u %s) is larger than maximum allowable (%u)",
+              name, *maximum, units, max_value);
+      }
+      if (*maximum < *initial) {
+        error(pos, pos, "maximum %s size (%u %s) is less than initial (%u %s)",
+              name, *maximum, units, *initial, units);
+      }
+    } else {
+      *maximum = 0;
+    }
+  }
+
+  bool expect_u8(const char* name, uint8_t expected) {
+    const byte* pos = pc();
+    uint8_t value = consume_u8(name);
+    if (value != expected) {
+      error(pos, pos, "expected %s 0x%02x, got 0x%02x", name, expected, value);
+      return false;
+    }
+    return true;
+  }
+
+  WasmInitExpr consume_init_expr(WasmModule* module, LocalType expected) {
+    const byte* pos = pc();
+    uint8_t opcode = consume_u8("opcode");
+    WasmInitExpr expr;
+    unsigned len = 0;
+    switch (opcode) {
+      case kExprGetGlobal: {
+        GlobalIndexOperand operand(this, pc() - 1);
+        expr.kind = WasmInitExpr::kGlobalIndex;
+        expr.val.global_index = operand.index;
+        len = operand.length;
+        break;
+      }
+      case kExprI32Const: {
+        ImmI32Operand operand(this, pc() - 1);
+        expr.kind = WasmInitExpr::kI32Const;
+        expr.val.i32_const = operand.value;
+        len = operand.length;
+        break;
+      }
+      case kExprF32Const: {
+        ImmF32Operand operand(this, pc() - 1);
+        expr.kind = WasmInitExpr::kF32Const;
+        expr.val.f32_const = operand.value;
+        len = operand.length;
+        break;
+      }
+      case kExprI64Const: {
+        ImmI64Operand operand(this, pc() - 1);
+        expr.kind = WasmInitExpr::kI64Const;
+        expr.val.i64_const = operand.value;
+        len = operand.length;
+        break;
+      }
+      case kExprF64Const: {
+        ImmF64Operand operand(this, pc() - 1);
+        expr.kind = WasmInitExpr::kF64Const;
+        expr.val.f64_const = operand.value;
+        len = operand.length;
+        break;
+      }
+      default: {
+        error("invalid opcode in initialization expression");
+        expr.kind = WasmInitExpr::kNone;
+        expr.val.i32_const = 0;
+      }
+    }
+    consume_bytes(len, "init code");
+    if (!expect_u8("end opcode", kExprEnd)) {
+      expr.kind = WasmInitExpr::kNone;
+    }
+    if (expected != kAstStmt && TypeOf(module, expr) != kAstI32) {
+      error(pos, pos, "type error in init expression, expected %s, got %s",
+            WasmOpcodes::TypeName(expected),
+            WasmOpcodes::TypeName(TypeOf(module, expr)));
+    }
+    return expr;
   }
 
   // Reads a single 8-bit integer, interpreting it as a local type.
-  LocalType consume_local_type() {
-    byte val = consume_u8("local type");
+  LocalType consume_value_type() {
+    byte val = consume_u8("value type");
     LocalTypeCode t = static_cast<LocalTypeCode>(val);
     switch (t) {
-      case kLocalVoid:
-        return kAstStmt;
       case kLocalI32:
         return kAstI32;
       case kLocalI64:
@@ -634,6 +924,8 @@
         return kAstF32;
       case kLocalF64:
         return kAstF64;
+      case kLocalS128:
+        return kAstS128;
       default:
         error(pc_ - 1, "invalid local type");
         return kAstStmt;
@@ -642,19 +934,12 @@
 
   // Parses a type entry, which is currently limited to functions only.
   FunctionSig* consume_sig() {
-    const byte* pos = pc_;
-    byte form = consume_u8("type form");
-    if (form != kWasmFunctionTypeForm) {
-      error(pos, pos, "expected function type form (0x%02x), got: 0x%02x",
-            kWasmFunctionTypeForm, form);
-      return nullptr;
-    }
+    if (!expect_u8("type form", kWasmFunctionTypeForm)) return nullptr;
     // parse parameter types
     uint32_t param_count = consume_u32v("param count");
     std::vector<LocalType> params;
-    for (uint32_t i = 0; i < param_count; ++i) {
-      LocalType param = consume_local_type();
-      if (param == kAstStmt) error(pc_ - 1, "invalid void parameter type");
+    for (uint32_t i = 0; ok() && i < param_count; ++i) {
+      LocalType param = consume_value_type();
       params.push_back(param);
     }
 
@@ -667,12 +952,16 @@
       return nullptr;
     }
     std::vector<LocalType> returns;
-    for (uint32_t i = 0; i < return_count; ++i) {
-      LocalType ret = consume_local_type();
-      if (ret == kAstStmt) error(pc_ - 1, "invalid void return type");
+    for (uint32_t i = 0; ok() && i < return_count; ++i) {
+      LocalType ret = consume_value_type();
       returns.push_back(ret);
     }
 
+    if (failed()) {
+      // Decoding failed, return void -> void
+      return new (module_zone) FunctionSig(0, 0, nullptr);
+    }
+
     // FunctionSig stores the return types first.
     LocalType* buffer =
         module_zone->NewArray<LocalType>(param_count + return_count);
@@ -711,7 +1000,7 @@
 };
 
 Vector<const byte> FindSection(const byte* module_start, const byte* module_end,
-                               WasmSection::Code code) {
+                               WasmSectionCode code) {
   Decoder decoder(module_start, module_end);
 
   uint32_t magic_word = decoder.consume_u32("wasm magic");
@@ -720,24 +1009,14 @@
   uint32_t magic_version = decoder.consume_u32("wasm version");
   if (magic_version != kWasmVersion) decoder.error("wrong wasm version");
 
-  while (decoder.more() && decoder.ok()) {
-    // Read the section name.
-    uint32_t string_length = decoder.consume_u32v("section name length");
-    const byte* section_name_start = decoder.pc();
-    decoder.consume_bytes(string_length);
-    if (decoder.failed()) break;
-
-    WasmSection::Code section =
-        WasmSection::lookup(section_name_start, string_length);
-
-    // Read and check the section size.
-    uint32_t section_length = decoder.consume_u32v("section length");
-
-    const byte* section_start = decoder.pc();
-    decoder.consume_bytes(section_length);
-    if (section == code && decoder.ok()) {
-      return Vector<const uint8_t>(section_start, section_length);
+  WasmSectionIterator section_iter(decoder);
+  while (section_iter.more()) {
+    if (section_iter.section_code() == code) {
+      return Vector<const uint8_t>(section_iter.section_start(),
+                                   section_iter.section_length());
     }
+    decoder.consume_bytes(section_iter.section_length(), "section payload");
+    section_iter.advance();
   }
 
   return Vector<const uint8_t>();
@@ -772,6 +1051,13 @@
   return decoder.DecodeFunctionSignature(start);
 }
 
+WasmInitExpr DecodeWasmInitExprForTesting(const byte* start, const byte* end) {
+  AccountingAllocator allocator;
+  Zone zone(&allocator);
+  ModuleDecoder decoder(&zone, start, end, kWasmOrigin);
+  return decoder.DecodeInitExpr(start);
+}
+
 FunctionResult DecodeWasmFunction(Isolate* isolate, Zone* zone,
                                   ModuleEnv* module_env,
                                   const byte* function_start,
@@ -789,15 +1075,26 @@
   return decoder.DecodeSingleFunction(module_env, function);
 }
 
-FunctionOffsetsResult DecodeWasmFunctionOffsets(const byte* module_start,
-                                                const byte* module_end) {
+FunctionOffsetsResult DecodeWasmFunctionOffsets(
+    const byte* module_start, const byte* module_end,
+    uint32_t num_imported_functions) {
+  // Find and decode the code section.
   Vector<const byte> code_section =
-      FindSection(module_start, module_end, WasmSection::Code::FunctionBodies);
+      FindSection(module_start, module_end, kCodeSectionCode);
   Decoder decoder(code_section.start(), code_section.end());
-  if (!code_section.start()) decoder.error("no code section");
+  FunctionOffsets table;
+  if (!code_section.start()) {
+    decoder.error("no code section");
+    return decoder.toResult(std::move(table));
+  }
+
+  // Reserve entries for the imported functions.
+  table.reserve(num_imported_functions);
+  for (uint32_t i = 0; i < num_imported_functions; i++) {
+    table.push_back(std::make_pair(0, 0));
+  }
 
   uint32_t functions_count = decoder.consume_u32v("functions count");
-  FunctionOffsets table;
   // Take care of invalid input here.
   if (functions_count < static_cast<unsigned>(code_section.length()) / 2)
     table.reserve(functions_count);
diff --git a/src/wasm/module-decoder.h b/src/wasm/module-decoder.h
index dd6bd3b..22a313c 100644
--- a/src/wasm/module-decoder.h
+++ b/src/wasm/module-decoder.h
@@ -12,9 +12,11 @@
 namespace internal {
 namespace wasm {
 // Decodes the bytes of a WASM module between {module_start} and {module_end}.
-ModuleResult DecodeWasmModule(Isolate* isolate, Zone* zone,
-                              const byte* module_start, const byte* module_end,
-                              bool verify_functions, ModuleOrigin origin);
+V8_EXPORT_PRIVATE ModuleResult DecodeWasmModule(Isolate* isolate, Zone* zone,
+                                                const byte* module_start,
+                                                const byte* module_end,
+                                                bool verify_functions,
+                                                ModuleOrigin origin);
 
 // Exposed for testing. Decodes a single function signature, allocating it
 // in the given zone. Returns {nullptr} upon failure.
@@ -30,8 +32,11 @@
 // Extracts the function offset table from the wasm module bytes.
 // Returns a vector with <offset, length> entries, or failure if the wasm bytes
 // are detected as invalid. Note that this validation is not complete.
-FunctionOffsetsResult DecodeWasmFunctionOffsets(const byte* module_start,
-                                                const byte* module_end);
+FunctionOffsetsResult DecodeWasmFunctionOffsets(
+    const byte* module_start, const byte* module_end,
+    uint32_t num_imported_functions);
+
+WasmInitExpr DecodeWasmInitExprForTesting(const byte* start, const byte* end);
 
 }  // namespace wasm
 }  // namespace internal
diff --git a/src/wasm/switch-logic.h b/src/wasm/switch-logic.h
index 8cef08b..160e0d6 100644
--- a/src/wasm/switch-logic.h
+++ b/src/wasm/switch-logic.h
@@ -5,8 +5,8 @@
 #ifndef V8_WASM_SWITCH_LOGIC_H
 #define V8_WASM_SWITCH_LOGIC_H
 
-#include "src/zone-containers.h"
-#include "src/zone.h"
+#include "src/zone/zone-containers.h"
+#include "src/zone/zone.h"
 
 namespace v8 {
 namespace internal {
diff --git a/src/wasm/wasm-debug.cc b/src/wasm/wasm-debug.cc
index 54e7100..42a8e5f 100644
--- a/src/wasm/wasm-debug.cc
+++ b/src/wasm/wasm-debug.cc
@@ -32,11 +32,15 @@
   FunctionOffsetsResult function_offsets;
   {
     DisallowHeapAllocation no_gc;
+    Handle<JSObject> wasm_object(debug_info->wasm_object(), isolate);
+    uint32_t num_imported_functions =
+        wasm::GetNumImportedFunctions(wasm_object);
     SeqOneByteString *wasm_bytes =
         wasm::GetWasmBytes(debug_info->wasm_object());
     const byte *bytes_start = wasm_bytes->GetChars();
     const byte *bytes_end = bytes_start + wasm_bytes->length();
-    function_offsets = wasm::DecodeWasmFunctionOffsets(bytes_start, bytes_end);
+    function_offsets = wasm::DecodeWasmFunctionOffsets(bytes_start, bytes_end,
+                                                       num_imported_functions);
   }
   DCHECK(function_offsets.ok());
   size_t array_size = 2 * kIntSize * function_offsets.val.size();
@@ -179,7 +183,7 @@
     Vector<const uint8_t> bytes_vec = GetFunctionBytes(debug_info, func_index);
     DisallowHeapAllocation no_gc;
 
-    base::AccountingAllocator allocator;
+    AccountingAllocator allocator;
     bool ok = PrintAst(
         &allocator, FunctionBodyForTesting(bytes_vec.start(), bytes_vec.end()),
         disassembly_os, nullptr);
@@ -208,7 +212,7 @@
     Vector<const uint8_t> bytes_vec = GetFunctionBytes(debug_info, func_index);
     DisallowHeapAllocation no_gc;
 
-    v8::base::AccountingAllocator allocator;
+    AccountingAllocator allocator;
     bool ok = PrintAst(
         &allocator, FunctionBodyForTesting(bytes_vec.start(), bytes_vec.end()),
         null_stream, &offset_table_vec);
diff --git a/src/wasm/wasm-external-refs.cc b/src/wasm/wasm-external-refs.cc
index 09294c2..4c4c91b 100644
--- a/src/wasm/wasm-external-refs.cc
+++ b/src/wasm/wasm-external-refs.cc
@@ -206,9 +206,6 @@
 void float64_pow_wrapper(double* param0, double* param1) {
   double x = ReadDoubleValue(param0);
   double y = ReadDoubleValue(param1);
-  if (std::isnan(y) || ((x == 1 || x == -1) && std::isinf(y))) {
-    WriteDoubleValue(param0, std::numeric_limits<double>::quiet_NaN());
-  }
   WriteDoubleValue(param0, Pow(x, y));
 }
 }  // namespace wasm
diff --git a/src/wasm/wasm-interpreter.cc b/src/wasm/wasm-interpreter.cc
index 7e3127d..2ac681e 100644
--- a/src/wasm/wasm-interpreter.cc
+++ b/src/wasm/wasm-interpreter.cc
@@ -10,8 +10,8 @@
 #include "src/wasm/wasm-external-refs.h"
 #include "src/wasm/wasm-module.h"
 
-#include "src/base/accounting-allocator.h"
-#include "src/zone-containers.h"
+#include "src/zone/accounting-allocator.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace internal {
@@ -654,6 +654,48 @@
   return bit_cast<int64_t>(a);
 }
 
+static inline int32_t ExecuteGrowMemory(uint32_t delta_pages,
+                                        WasmModuleInstance* instance) {
+  // TODO(ahaas): Move memory allocation to wasm-module.cc for better
+  // encapsulation.
+  if (delta_pages > wasm::WasmModule::kMaxMemPages) {
+    return -1;
+  }
+  uint32_t old_size = instance->mem_size;
+  uint32_t new_size;
+  byte* new_mem_start;
+  if (instance->mem_size == 0) {
+    if (delta_pages > wasm::WasmModule::kMaxMemPages) {
+      return -1;
+    }
+    // TODO(gdeepti): Fix bounds check to take into account size of memtype.
+    new_size = delta_pages * wasm::WasmModule::kPageSize;
+    new_mem_start = static_cast<byte*>(calloc(new_size, sizeof(byte)));
+    if (!new_mem_start) {
+      return -1;
+    }
+  } else {
+    DCHECK_NOT_NULL(instance->mem_start);
+    new_size = old_size + delta_pages * wasm::WasmModule::kPageSize;
+    if (new_size >
+        wasm::WasmModule::kMaxMemPages * wasm::WasmModule::kPageSize) {
+      return -1;
+    }
+    new_mem_start = static_cast<byte*>(realloc(instance->mem_start, new_size));
+    if (!new_mem_start) {
+      return -1;
+    }
+    // Zero initializing uninitialized memory from realloc
+    memset(new_mem_start + old_size, 0, new_size - old_size);
+  }
+  instance->mem_start = new_mem_start;
+  instance->mem_size = new_size;
+  // realloc
+  // update mem_start
+  // update mem_size
+  return static_cast<int32_t>(old_size / WasmModule::kPageSize);
+}
+
 enum InternalOpcode {
 #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
   FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
@@ -680,54 +722,38 @@
  public:
   ControlTransferMap map_;
 
-  ControlTransfers(Zone* zone, size_t locals_encoded_size, const byte* start,
-                   const byte* end)
+  ControlTransfers(Zone* zone, ModuleEnv* env, AstLocalDecls* locals,
+                   const byte* start, const byte* end)
       : map_(zone) {
-    // A control reference including from PC, from value depth, and whether
-    // a value is explicitly passed (e.g. br/br_if/br_table with value).
-    struct CRef {
-      const byte* pc;
-      sp_t value_depth;
-      bool explicit_value;
-    };
-
     // Represents a control flow label.
     struct CLabel : public ZoneObject {
       const byte* target;
-      size_t value_depth;
-      ZoneVector<CRef> refs;
+      ZoneVector<const byte*> refs;
 
-      CLabel(Zone* zone, size_t v)
-          : target(nullptr), value_depth(v), refs(zone) {}
+      explicit CLabel(Zone* zone) : target(nullptr), refs(zone) {}
 
       // Bind this label to the given PC.
-      void Bind(ControlTransferMap* map, const byte* start, const byte* pc,
-                bool expect_value) {
+      void Bind(ControlTransferMap* map, const byte* start, const byte* pc) {
         DCHECK_NULL(target);
         target = pc;
-        for (auto from : refs) {
-          auto pcdiff = static_cast<pcdiff_t>(target - from.pc);
-          auto spdiff = static_cast<spdiff_t>(from.value_depth - value_depth);
-          ControlTransfer::StackAction action = ControlTransfer::kNoAction;
-          if (expect_value && !from.explicit_value) {
-            action = spdiff == 0 ? ControlTransfer::kPushVoid
-                                 : ControlTransfer::kPopAndRepush;
-          }
-          pc_t offset = static_cast<size_t>(from.pc - start);
-          (*map)[offset] = {pcdiff, spdiff, action};
+        for (auto from_pc : refs) {
+          auto pcdiff = static_cast<pcdiff_t>(target - from_pc);
+          size_t offset = static_cast<size_t>(from_pc - start);
+          (*map)[offset] = pcdiff;
         }
       }
 
       // Reference this label from the given location.
-      void Ref(ControlTransferMap* map, const byte* start, CRef from) {
-        DCHECK_GE(from.value_depth, value_depth);
+      void Ref(ControlTransferMap* map, const byte* start,
+               const byte* from_pc) {
         if (target) {
-          auto pcdiff = static_cast<pcdiff_t>(target - from.pc);
-          auto spdiff = static_cast<spdiff_t>(from.value_depth - value_depth);
-          pc_t offset = static_cast<size_t>(from.pc - start);
-          (*map)[offset] = {pcdiff, spdiff, ControlTransfer::kNoAction};
+          // Target being bound before a reference means this is a loop.
+          DCHECK_EQ(kExprLoop, *target);
+          auto pcdiff = static_cast<pcdiff_t>(target - from_pc);
+          size_t offset = static_cast<size_t>(from_pc - start);
+          (*map)[offset] = pcdiff;
         } else {
-          refs.push_back(from);
+          refs.push_back(from_pc);
         }
       }
     };
@@ -738,122 +764,104 @@
       CLabel* end_label;
       CLabel* else_label;
 
-      void Ref(ControlTransferMap* map, const byte* start, const byte* from_pc,
-               size_t from_value_depth, bool explicit_value) {
-        end_label->Ref(map, start, {from_pc, from_value_depth, explicit_value});
+      void Ref(ControlTransferMap* map, const byte* start,
+               const byte* from_pc) {
+        end_label->Ref(map, start, from_pc);
       }
     };
 
     // Compute the ControlTransfer map.
-    // This works by maintaining a stack of control constructs similar to the
+    // This algorithm maintains a stack of control constructs similar to the
     // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
     // bytecodes with their target, as well as determining whether the current
     // bytecodes are within the true or false block of an else.
-    // The value stack depth is tracked as {value_depth} and is needed to
-    // determine how many values to pop off the stack for explicit and
-    // implicit control flow.
-
     std::vector<Control> control_stack;
-    size_t value_depth = 0;
-    for (BytecodeIterator i(start + locals_encoded_size, end); i.has_next();
-         i.next()) {
+    CLabel* func_label = new (zone) CLabel(zone);
+    control_stack.push_back({start, func_label, nullptr});
+    for (BytecodeIterator i(start, end, locals); i.has_next(); i.next()) {
       WasmOpcode opcode = i.current();
-      TRACE("@%u: control %s (depth = %zu)\n", i.pc_offset(),
-            WasmOpcodes::OpcodeName(opcode), value_depth);
+      TRACE("@%u: control %s\n", i.pc_offset(),
+            WasmOpcodes::OpcodeName(opcode));
       switch (opcode) {
         case kExprBlock: {
-          TRACE("control @%u $%zu: Block\n", i.pc_offset(), value_depth);
-          CLabel* label = new (zone) CLabel(zone, value_depth);
+          TRACE("control @%u: Block\n", i.pc_offset());
+          CLabel* label = new (zone) CLabel(zone);
           control_stack.push_back({i.pc(), label, nullptr});
           break;
         }
         case kExprLoop: {
-          TRACE("control @%u $%zu: Loop\n", i.pc_offset(), value_depth);
-          CLabel* label1 = new (zone) CLabel(zone, value_depth);
-          CLabel* label2 = new (zone) CLabel(zone, value_depth);
-          control_stack.push_back({i.pc(), label1, nullptr});
-          control_stack.push_back({i.pc(), label2, nullptr});
-          label2->Bind(&map_, start, i.pc(), false);
+          TRACE("control @%u: Loop\n", i.pc_offset());
+          CLabel* label = new (zone) CLabel(zone);
+          control_stack.push_back({i.pc(), label, nullptr});
+          label->Bind(&map_, start, i.pc());
           break;
         }
         case kExprIf: {
-          TRACE("control @%u $%zu: If\n", i.pc_offset(), value_depth);
-          value_depth--;
-          CLabel* end_label = new (zone) CLabel(zone, value_depth);
-          CLabel* else_label = new (zone) CLabel(zone, value_depth);
+          TRACE("control @%u: If\n", i.pc_offset());
+          CLabel* end_label = new (zone) CLabel(zone);
+          CLabel* else_label = new (zone) CLabel(zone);
           control_stack.push_back({i.pc(), end_label, else_label});
-          else_label->Ref(&map_, start, {i.pc(), value_depth, false});
+          else_label->Ref(&map_, start, i.pc());
           break;
         }
         case kExprElse: {
           Control* c = &control_stack.back();
-          TRACE("control @%u $%zu: Else\n", i.pc_offset(), value_depth);
-          c->end_label->Ref(&map_, start, {i.pc(), value_depth, false});
-          value_depth = c->end_label->value_depth;
+          TRACE("control @%u: Else\n", i.pc_offset());
+          c->end_label->Ref(&map_, start, i.pc());
           DCHECK_NOT_NULL(c->else_label);
-          c->else_label->Bind(&map_, start, i.pc() + 1, false);
+          c->else_label->Bind(&map_, start, i.pc() + 1);
           c->else_label = nullptr;
           break;
         }
         case kExprEnd: {
           Control* c = &control_stack.back();
-          TRACE("control @%u $%zu: End\n", i.pc_offset(), value_depth);
+          TRACE("control @%u: End\n", i.pc_offset());
           if (c->end_label->target) {
             // only loops have bound labels.
             DCHECK_EQ(kExprLoop, *c->pc);
-            control_stack.pop_back();
-            c = &control_stack.back();
+          } else {
+            if (c->else_label) c->else_label->Bind(&map_, start, i.pc());
+            c->end_label->Bind(&map_, start, i.pc() + 1);
           }
-          if (c->else_label)
-            c->else_label->Bind(&map_, start, i.pc() + 1, true);
-          c->end_label->Ref(&map_, start, {i.pc(), value_depth, false});
-          c->end_label->Bind(&map_, start, i.pc() + 1, true);
-          value_depth = c->end_label->value_depth + 1;
           control_stack.pop_back();
           break;
         }
         case kExprBr: {
           BreakDepthOperand operand(&i, i.pc());
-          TRACE("control @%u $%zu: Br[arity=%u, depth=%u]\n", i.pc_offset(),
-                value_depth, operand.arity, operand.depth);
-          value_depth -= operand.arity;
-          control_stack[control_stack.size() - operand.depth - 1].Ref(
-              &map_, start, i.pc(), value_depth, operand.arity > 0);
-          value_depth++;
+          TRACE("control @%u: Br[depth=%u]\n", i.pc_offset(), operand.depth);
+          Control* c = &control_stack[control_stack.size() - operand.depth - 1];
+          c->Ref(&map_, start, i.pc());
           break;
         }
         case kExprBrIf: {
           BreakDepthOperand operand(&i, i.pc());
-          TRACE("control @%u $%zu: BrIf[arity=%u, depth=%u]\n", i.pc_offset(),
-                value_depth, operand.arity, operand.depth);
-          value_depth -= (operand.arity + 1);
-          control_stack[control_stack.size() - operand.depth - 1].Ref(
-              &map_, start, i.pc(), value_depth, operand.arity > 0);
-          value_depth++;
+          TRACE("control @%u: BrIf[depth=%u]\n", i.pc_offset(), operand.depth);
+          Control* c = &control_stack[control_stack.size() - operand.depth - 1];
+          c->Ref(&map_, start, i.pc());
           break;
         }
         case kExprBrTable: {
           BranchTableOperand operand(&i, i.pc());
-          TRACE("control @%u $%zu: BrTable[arity=%u count=%u]\n", i.pc_offset(),
-                value_depth, operand.arity, operand.table_count);
-          value_depth -= (operand.arity + 1);
-          for (uint32_t j = 0; j < operand.table_count + 1; ++j) {
-            uint32_t target = operand.read_entry(&i, j);
-            control_stack[control_stack.size() - target - 1].Ref(
-                &map_, start, i.pc() + j, value_depth, operand.arity > 0);
+          BranchTableIterator iterator(&i, operand);
+          TRACE("control @%u: BrTable[count=%u]\n", i.pc_offset(),
+                operand.table_count);
+          while (iterator.has_next()) {
+            uint32_t j = iterator.cur_index();
+            uint32_t target = iterator.next();
+            Control* c = &control_stack[control_stack.size() - target - 1];
+            c->Ref(&map_, start, i.pc() + j);
           }
-          value_depth++;
           break;
         }
         default: {
-          value_depth = value_depth - OpcodeArity(i.pc(), end) + 1;
           break;
         }
       }
     }
+    if (!func_label->target) func_label->Bind(&map_, start, end);
   }
 
-  ControlTransfer Lookup(pc_t from) {
+  pcdiff_t Lookup(pc_t from) {
     auto result = map_.find(from);
     if (result == map_.end()) {
       V8_Fatal(__FILE__, __LINE__, "no control target for pc %zu", from);
@@ -899,7 +907,7 @@
     if (function->func_index < interpreter_code_.size()) {
       InterpreterCode* code = &interpreter_code_[function->func_index];
       DCHECK_EQ(function, code->function);
-      return code;
+      return Preprocess(code);
     }
     return nullptr;
   }
@@ -923,9 +931,9 @@
     if (code->targets == nullptr && code->start) {
       // Compute the control targets map and the local declarations.
       CHECK(DecodeLocalDecls(code->locals, code->start, code->end));
-      code->targets =
-          new (zone_) ControlTransfers(zone_, code->locals.decls_encoded_size,
-                                       code->orig_start, code->orig_end);
+      ModuleEnv env = {module_, nullptr, kWasmOrigin};
+      code->targets = new (zone_) ControlTransfers(
+          zone_, &env, &code->locals, code->orig_start, code->orig_end);
     }
     return code;
   }
@@ -964,6 +972,7 @@
         instance_(instance),
         stack_(zone),
         frames_(zone),
+        blocks_(zone),
         state_(WasmInterpreter::STOPPED),
         break_pc_(kInvalidPc),
         trap_reason_(kTrapCount) {}
@@ -984,6 +993,9 @@
       stack_.push_back(args[i]);
     }
     frames_.back().ret_pc = InitLocals(code);
+    blocks_.push_back(
+        {0, stack_.size(), frames_.size(),
+         static_cast<uint32_t>(code->function->sig->return_count())});
     TRACE("  => PushFrame(#%u @%zu)\n", code->function->func_index,
           frames_.back().ret_pc);
   }
@@ -1032,11 +1044,11 @@
     return nullptr;
   }
 
-  virtual WasmVal GetReturnValue() {
+  virtual WasmVal GetReturnValue(int index) {
     if (state_ == WasmInterpreter::TRAPPED) return WasmVal(0xdeadbeef);
     CHECK_EQ(WasmInterpreter::FINISHED, state_);
-    CHECK_EQ(1, stack_.size());
-    return stack_[0];
+    CHECK_LT(static_cast<size_t>(index), stack_.size());
+    return stack_[index];
   }
 
   virtual pc_t GetBreakpointPc() { return break_pc_; }
@@ -1060,10 +1072,18 @@
     sp_t llimit() { return plimit() + code->locals.total_local_count; }
   };
 
+  struct Block {
+    pc_t pc;
+    sp_t sp;
+    size_t fp;
+    unsigned arity;
+  };
+
   CodeMap* codemap_;
   WasmModuleInstance* instance_;
   ZoneVector<WasmVal> stack_;
   ZoneVector<Frame> frames_;
+  ZoneVector<Block> blocks_;
   WasmInterpreter::State state_;
   pc_t break_pc_;
   TrapReason trap_reason_;
@@ -1088,6 +1108,9 @@
     DCHECK_GE(stack_.size(), arity);
     // The parameters will overlap the arguments already on the stack.
     frames_.push_back({code, 0, 0, stack_.size() - arity});
+    blocks_.push_back(
+        {0, stack_.size(), frames_.size(),
+         static_cast<uint32_t>(code->function->sig->return_count())});
     frames_.back().ret_pc = InitLocals(code);
     TRACE("  => push func#%u @%zu\n", code->function->func_index,
           frames_.back().ret_pc);
@@ -1126,21 +1149,38 @@
 
   bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
     if (pc == break_pc_) {
+      // Skip the previously hit breakpoint when resuming.
       break_pc_ = kInvalidPc;
       return true;
     }
     return false;
   }
 
-  bool DoReturn(InterpreterCode** code, pc_t* pc, pc_t* limit, WasmVal val) {
+  int LookupTarget(InterpreterCode* code, pc_t pc) {
+    return static_cast<int>(code->targets->Lookup(pc));
+  }
+
+  int DoBreak(InterpreterCode* code, pc_t pc, size_t depth) {
+    size_t bp = blocks_.size() - depth - 1;
+    Block* target = &blocks_[bp];
+    DoStackTransfer(target->sp, target->arity);
+    blocks_.resize(bp);
+    return LookupTarget(code, pc);
+  }
+
+  bool DoReturn(InterpreterCode** code, pc_t* pc, pc_t* limit, size_t arity) {
     DCHECK_GT(frames_.size(), 0u);
-    stack_.resize(frames_.back().sp);
+    // Pop all blocks for this frame.
+    while (!blocks_.empty() && blocks_.back().fp == frames_.size()) {
+      blocks_.pop_back();
+    }
+
+    sp_t dest = frames_.back().sp;
     frames_.pop_back();
     if (frames_.size() == 0) {
-      // A return from the top frame terminates the execution.
+      // A return from the last frame terminates the execution.
       state_ = WasmInterpreter::FINISHED;
-      stack_.clear();
-      stack_.push_back(val);
+      DoStackTransfer(0, arity);
       TRACE("  => finish\n");
       return false;
     } else {
@@ -1149,16 +1189,8 @@
       *code = top->code;
       *pc = top->ret_pc;
       *limit = top->code->end - top->code->start;
-      if (top->code->start[top->call_pc] == kExprCallIndirect ||
-          (top->code->orig_start &&
-           top->code->orig_start[top->call_pc] == kExprCallIndirect)) {
-        // UGLY: An indirect call has the additional function index on the
-        // stack.
-        stack_.pop_back();
-      }
       TRACE("  => pop func#%u @%zu\n", (*code)->function->func_index, *pc);
-
-      stack_.push_back(val);
+      DoStackTransfer(dest, arity);
       return true;
     }
   }
@@ -1169,31 +1201,21 @@
     *limit = target->end - target->start;
   }
 
-  // Adjust the program counter {pc} and the stack contents according to the
-  // code's precomputed control transfer map. Returns the different between
-  // the new pc and the old pc.
-  int DoControlTransfer(InterpreterCode* code, pc_t pc) {
-    auto target = code->targets->Lookup(pc);
-    switch (target.action) {
-      case ControlTransfer::kNoAction:
-        TRACE("  action [sp-%u]\n", target.spdiff);
-        PopN(target.spdiff);
-        break;
-      case ControlTransfer::kPopAndRepush: {
-        WasmVal val = Pop();
-        TRACE("  action [pop x, sp-%u, push x]\n", target.spdiff - 1);
-        DCHECK_GE(target.spdiff, 1u);
-        PopN(target.spdiff - 1);
-        Push(pc, val);
-        break;
-      }
-      case ControlTransfer::kPushVoid:
-        TRACE("  action [sp-%u, push void]\n", target.spdiff);
-        PopN(target.spdiff);
-        Push(pc, WasmVal());
-        break;
+  // Copies {arity} values on the top of the stack down the stack to {dest},
+  // dropping the values in-between.
+  void DoStackTransfer(sp_t dest, size_t arity) {
+    // before: |---------------| pop_count | arity |
+    //         ^ 0             ^ dest              ^ stack_.size()
+    //
+    // after:  |---------------| arity |
+    //         ^ 0                     ^ stack_.size()
+    DCHECK_LE(dest, stack_.size());
+    DCHECK_LE(dest + arity, stack_.size());
+    size_t pop_count = stack_.size() - dest - arity;
+    for (size_t i = 0; i < arity; i++) {
+      stack_[dest + i] = stack_[dest + pop_count + i];
     }
-    return target.pcdiff;
+    stack_.resize(stack_.size() - pop_count);
   }
 
   void Execute(InterpreterCode* code, pc_t pc, int max) {
@@ -1209,8 +1231,8 @@
       if (pc >= limit) {
         // Fell off end of code; do an implicit return.
         TRACE("@%-3zu: ImplicitReturn\n", pc);
-        WasmVal val = PopArity(code->function->sig->return_count());
-        if (!DoReturn(&code, &pc, &limit, val)) return;
+        if (!DoReturn(&code, &pc, &limit, code->function->sig->return_count()))
+          return;
         decoder.Reset(code->start, code->end);
         continue;
       }
@@ -1243,27 +1265,37 @@
 
       switch (orig) {
         case kExprNop:
-          Push(pc, WasmVal());
           break;
-        case kExprBlock:
+        case kExprBlock: {
+          BlockTypeOperand operand(&decoder, code->at(pc));
+          blocks_.push_back({pc, stack_.size(), frames_.size(), operand.arity});
+          len = 1 + operand.length;
+          break;
+        }
         case kExprLoop: {
-          // Do nothing.
+          BlockTypeOperand operand(&decoder, code->at(pc));
+          blocks_.push_back({pc, stack_.size(), frames_.size(), 0});
+          len = 1 + operand.length;
           break;
         }
         case kExprIf: {
+          BlockTypeOperand operand(&decoder, code->at(pc));
           WasmVal cond = Pop();
           bool is_true = cond.to<uint32_t>() != 0;
+          blocks_.push_back({pc, stack_.size(), frames_.size(), operand.arity});
           if (is_true) {
             // fall through to the true block.
+            len = 1 + operand.length;
             TRACE("  true => fallthrough\n");
           } else {
-            len = DoControlTransfer(code, pc);
+            len = LookupTarget(code, pc);
             TRACE("  false => @%zu\n", pc + len);
           }
           break;
         }
         case kExprElse: {
-          len = DoControlTransfer(code, pc);
+          blocks_.pop_back();
+          len = LookupTarget(code, pc);
           TRACE("  end => @%zu\n", pc + len);
           break;
         }
@@ -1276,42 +1308,34 @@
         }
         case kExprBr: {
           BreakDepthOperand operand(&decoder, code->at(pc));
-          WasmVal val = PopArity(operand.arity);
-          len = DoControlTransfer(code, pc);
+          len = DoBreak(code, pc, operand.depth);
           TRACE("  br => @%zu\n", pc + len);
-          if (operand.arity > 0) Push(pc, val);
           break;
         }
         case kExprBrIf: {
           BreakDepthOperand operand(&decoder, code->at(pc));
           WasmVal cond = Pop();
-          WasmVal val = PopArity(operand.arity);
           bool is_true = cond.to<uint32_t>() != 0;
           if (is_true) {
-            len = DoControlTransfer(code, pc);
+            len = DoBreak(code, pc, operand.depth);
             TRACE("  br_if => @%zu\n", pc + len);
-            if (operand.arity > 0) Push(pc, val);
           } else {
             TRACE("  false => fallthrough\n");
             len = 1 + operand.length;
-            Push(pc, WasmVal());
           }
           break;
         }
         case kExprBrTable: {
           BranchTableOperand operand(&decoder, code->at(pc));
           uint32_t key = Pop().to<uint32_t>();
-          WasmVal val = PopArity(operand.arity);
           if (key >= operand.table_count) key = operand.table_count;
-          len = DoControlTransfer(code, pc + key) + key;
-          TRACE("  br[%u] => @%zu\n", key, pc + len);
-          if (operand.arity > 0) Push(pc, val);
+          len = key + DoBreak(code, pc + key, operand.table[key]);
+          TRACE("  br[%u] => @%zu\n", key, pc + key + len);
           break;
         }
         case kExprReturn: {
-          ReturnArityOperand operand(&decoder, code->at(pc));
-          WasmVal val = PopArity(operand.arity);
-          if (!DoReturn(&code, &pc, &limit, val)) return;
+          size_t arity = code->function->sig->return_count();
+          if (!DoReturn(&code, &pc, &limit, arity)) return;
           decoder.Reset(code->start, code->end);
           continue;
         }
@@ -1320,8 +1344,7 @@
           return CommitPc(pc);
         }
         case kExprEnd: {
-          len = DoControlTransfer(code, pc);
-          DCHECK_EQ(1, len);
+          blocks_.pop_back();
           break;
         }
         case kExprI8Const: {
@@ -1364,10 +1387,21 @@
           LocalIndexOperand operand(&decoder, code->at(pc));
           WasmVal val = Pop();
           stack_[frames_.back().sp + operand.index] = val;
+          len = 1 + operand.length;
+          break;
+        }
+        case kExprTeeLocal: {
+          LocalIndexOperand operand(&decoder, code->at(pc));
+          WasmVal val = Pop();
+          stack_[frames_.back().sp + operand.index] = val;
           Push(pc, val);
           len = 1 + operand.length;
           break;
         }
+        case kExprDrop: {
+          Pop();
+          break;
+        }
         case kExprCallFunction: {
           CallFunctionOperand operand(&decoder, code->at(pc));
           InterpreterCode* target = codemap()->GetCode(operand.index);
@@ -1378,9 +1412,7 @@
         }
         case kExprCallIndirect: {
           CallIndirectOperand operand(&decoder, code->at(pc));
-          size_t index = stack_.size() - operand.arity - 1;
-          DCHECK_LT(index, stack_.size());
-          uint32_t entry_index = stack_[index].to<uint32_t>();
+          uint32_t entry_index = Pop().to<uint32_t>();
           // Assume only one table for now.
           DCHECK_LE(module()->function_tables.size(), 1u);
           InterpreterCode* target = codemap()->GetIndirectCode(0, entry_index);
@@ -1395,10 +1427,6 @@
           decoder.Reset(code->start, code->end);
           continue;
         }
-        case kExprCallImport: {
-          UNIMPLEMENTED();
-          break;
-        }
         case kExprGetGlobal: {
           GlobalIndexOperand operand(&decoder, code->at(pc));
           const WasmGlobal* global = &module()->globals[operand.index];
@@ -1437,14 +1465,13 @@
           } else {
             UNREACHABLE();
           }
-          Push(pc, val);
           len = 1 + operand.length;
           break;
         }
 
 #define LOAD_CASE(name, ctype, mtype)                                       \
   case kExpr##name: {                                                       \
-    MemoryAccessOperand operand(&decoder, code->at(pc));                    \
+    MemoryAccessOperand operand(&decoder, code->at(pc), sizeof(ctype));     \
     uint32_t index = Pop().to<uint32_t>();                                  \
     size_t effective_mem_size = instance()->mem_size - sizeof(mtype);       \
     if (operand.offset > effective_mem_size ||                              \
@@ -1476,7 +1503,7 @@
 
 #define STORE_CASE(name, ctype, mtype)                                        \
   case kExpr##name: {                                                         \
-    MemoryAccessOperand operand(&decoder, code->at(pc));                      \
+    MemoryAccessOperand operand(&decoder, code->at(pc), sizeof(ctype));       \
     WasmVal val = Pop();                                                      \
     uint32_t index = Pop().to<uint32_t>();                                    \
     size_t effective_mem_size = instance()->mem_size - sizeof(mtype);         \
@@ -1486,7 +1513,6 @@
     }                                                                         \
     byte* addr = instance()->mem_start + operand.offset + index;              \
     WriteLittleEndianValue<mtype>(addr, static_cast<mtype>(val.to<ctype>())); \
-    Push(pc, val);                                                            \
     len = 1 + operand.length;                                                 \
     break;                                                                    \
   }
@@ -1546,9 +1572,14 @@
           ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
           ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
 #undef ASMJS_STORE_CASE
-
+        case kExprGrowMemory: {
+          uint32_t delta_pages = Pop().to<uint32_t>();
+          Push(pc, WasmVal(ExecuteGrowMemory(delta_pages, instance())));
+          break;
+        }
         case kExprMemorySize: {
-          Push(pc, WasmVal(static_cast<uint32_t>(instance()->mem_size)));
+          Push(pc, WasmVal(static_cast<uint32_t>(instance()->mem_size /
+                                                 WasmModule::kPageSize)));
           break;
         }
 #define EXECUTE_SIMPLE_BINOP(name, ctype, op)             \
@@ -1623,7 +1654,7 @@
 
   void Push(pc_t pc, WasmVal val) {
     // TODO(titzer): store PC as well?
-    stack_.push_back(val);
+    if (val.type != kAstStmt) stack_.push_back(val);
   }
 
   void TraceStack(const char* phase, pc_t pc) {
@@ -1700,7 +1731,7 @@
 // Implementation of the public interface of the interpreter.
 //============================================================================
 WasmInterpreter::WasmInterpreter(WasmModuleInstance* instance,
-                                 base::AccountingAllocator* allocator)
+                                 AccountingAllocator* allocator)
     : zone_(allocator),
       internals_(new (&zone_) WasmInterpreterInternals(&zone_, instance)) {}
 
@@ -1804,7 +1835,7 @@
 
 ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
     Zone* zone, const byte* start, const byte* end) {
-  ControlTransfers targets(zone, 0, start, end);
+  ControlTransfers targets(zone, nullptr, nullptr, start, end);
   return targets.map_;
 }
 
diff --git a/src/wasm/wasm-interpreter.h b/src/wasm/wasm-interpreter.h
index b106a20..b61e092 100644
--- a/src/wasm/wasm-interpreter.h
+++ b/src/wasm/wasm-interpreter.h
@@ -6,7 +6,7 @@
 #define V8_WASM_INTERPRETER_H_
 
 #include "src/wasm/wasm-opcodes.h"
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 namespace v8 {
 namespace base {
@@ -28,15 +28,7 @@
 
 const pc_t kInvalidPc = 0x80000000;
 
-// Visible for testing. A {ControlTransfer} helps the interpreter figure out
-// the target program counter and stack manipulations for a branch.
-struct ControlTransfer {
-  enum StackAction { kNoAction, kPopAndRepush, kPushVoid };
-  pcdiff_t pcdiff;  // adjustment to the program counter (positive or negative).
-  spdiff_t spdiff;  // number of elements to pop off the stack.
-  StackAction action;  // action to perform on the stack.
-};
-typedef ZoneMap<pc_t, ControlTransfer> ControlTransferMap;
+typedef ZoneMap<pc_t, pcdiff_t> ControlTransferMap;
 
 // Macro for defining union members.
 #define FOREACH_UNION_MEMBER(V) \
@@ -102,7 +94,7 @@
 };
 
 // An interpreter capable of executing WASM.
-class WasmInterpreter {
+class V8_EXPORT_PRIVATE WasmInterpreter {
  public:
   // State machine for a Thread:
   //                       +---------------Run()-----------+
@@ -132,15 +124,14 @@
     virtual int GetFrameCount() = 0;
     virtual const WasmFrame* GetFrame(int index) = 0;
     virtual WasmFrame* GetMutableFrame(int index) = 0;
-    virtual WasmVal GetReturnValue() = 0;
+    virtual WasmVal GetReturnValue(int index = 0) = 0;
 
     // Thread-specific breakpoints.
     bool SetBreakpoint(const WasmFunction* function, int pc, bool enabled);
     bool GetBreakpoint(const WasmFunction* function, int pc);
   };
 
-  WasmInterpreter(WasmModuleInstance* instance,
-                  base::AccountingAllocator* allocator);
+  WasmInterpreter(WasmModuleInstance* instance, AccountingAllocator* allocator);
   ~WasmInterpreter();
 
   //==========================================================================
@@ -190,9 +181,8 @@
   bool SetFunctionCodeForTesting(const WasmFunction* function,
                                  const byte* start, const byte* end);
 
-  // Computes the control targets for the given bytecode as {pc offset, sp
-  // offset}
-  // pairs. Used internally in the interpreter, but exposed for testing.
+  // Computes the control transfers for the given bytecode. Used internally in
+  // the interpreter, but exposed for testing.
   static ControlTransferMap ComputeControlTransfersForTesting(Zone* zone,
                                                               const byte* start,
                                                               const byte* end);
diff --git a/src/wasm/wasm-js.cc b/src/wasm/wasm-js.cc
index 10ae43c..254fd70 100644
--- a/src/wasm/wasm-js.cc
+++ b/src/wasm/wasm-js.cc
@@ -9,8 +9,6 @@
 #include "src/asmjs/asm-wasm-builder.h"
 #include "src/assert-scope.h"
 #include "src/ast/ast.h"
-#include "src/ast/scopes.h"
-#include "src/compiler.h"
 #include "src/execution.h"
 #include "src/factory.h"
 #include "src/handles.h"
@@ -18,7 +16,6 @@
 #include "src/objects.h"
 #include "src/parsing/parse-info.h"
 
-#include "src/wasm/encoder.h"
 #include "src/wasm/module-decoder.h"
 #include "src/wasm/wasm-js.h"
 #include "src/wasm/wasm-module.h"
@@ -31,6 +28,13 @@
 namespace v8 {
 
 namespace {
+i::Handle<i::String> v8_str(i::Isolate* isolate, const char* str) {
+  return isolate->factory()->NewStringFromAsciiChecked(str);
+}
+Local<String> v8_str(Isolate* isolate, const char* str) {
+  return Utils::ToLocal(v8_str(reinterpret_cast<i::Isolate*>(isolate), str));
+}
+
 struct RawBuffer {
   const byte* start;
   const byte* end;
@@ -80,7 +84,7 @@
   ErrorThrower thrower(isolate, "Wasm.verifyModule()");
 
   if (args.Length() < 1) {
-    thrower.Error("Argument 0 must be a buffer source");
+    thrower.TypeError("Argument 0 must be a buffer source");
     return;
   }
   RawBuffer buffer = GetRawBufferSource(args[0], &thrower);
@@ -104,7 +108,7 @@
   ErrorThrower thrower(isolate, "Wasm.verifyFunction()");
 
   if (args.Length() < 1) {
-    thrower.Error("Argument 0 must be a buffer source");
+    thrower.TypeError("Argument 0 must be a buffer source");
     return;
   }
   RawBuffer buffer = GetRawBufferSource(args[0], &thrower);
@@ -135,13 +139,11 @@
   // Decode but avoid a redundant pass over function bodies for verification.
   // Verification will happen during compilation.
   i::Zone zone(isolate->allocator());
-  internal::wasm::ModuleResult result = internal::wasm::DecodeWasmModule(
-      isolate, &zone, start, end, false, origin);
-
+  i::MaybeHandle<i::JSObject> module_object =
+      i::wasm::CreateModuleObjectFromBytes(isolate, start, end, thrower,
+                                           origin);
   i::MaybeHandle<i::JSObject> object;
-  if (result.failed()) {
-    thrower->Failed("", result);
-  } else {
+  if (!module_object.is_null()) {
     // Success. Instantiate the module and return the object.
     i::Handle<i::JSObject> ffi = i::Handle<i::JSObject>::null();
     if (args.Length() > 1 && args[1]->IsObject()) {
@@ -156,19 +158,12 @@
       memory = i::Handle<i::JSArrayBuffer>(i::JSArrayBuffer::cast(*mem_obj));
     }
 
-    i::MaybeHandle<i::FixedArray> compiled_module =
-        result.val->CompileFunctions(isolate, thrower);
-    if (!thrower->error()) {
-      DCHECK(!compiled_module.is_null());
-      object = i::wasm::WasmModule::Instantiate(
-          isolate, compiled_module.ToHandleChecked(), ffi, memory);
-      if (!object.is_null()) {
-        args.GetReturnValue().Set(v8::Utils::ToLocal(object.ToHandleChecked()));
-      }
+    object = i::wasm::WasmModule::Instantiate(
+        isolate, thrower, module_object.ToHandleChecked(), ffi, memory);
+    if (!object.is_null()) {
+      args.GetReturnValue().Set(v8::Utils::ToLocal(object.ToHandleChecked()));
     }
   }
-
-  if (result.val) delete result.val;
   return object;
 }
 
@@ -178,7 +173,7 @@
   ErrorThrower thrower(isolate, "Wasm.instantiateModule()");
 
   if (args.Length() < 1) {
-    thrower.Error("Argument 0 must be a buffer source");
+    thrower.TypeError("Argument 0 must be a buffer source");
     return;
   }
   RawBuffer buffer = GetRawBufferSource(args[0], &thrower);
@@ -197,20 +192,37 @@
   if (buffer.start == nullptr) return i::MaybeHandle<i::JSObject>();
 
   DCHECK(source->IsArrayBuffer() || source->IsTypedArray());
-  i::Zone zone(i_isolate->allocator());
-  i::wasm::ModuleResult result = i::wasm::DecodeWasmModule(
-      i_isolate, &zone, buffer.start, buffer.end, false, i::wasm::kWasmOrigin);
-  std::unique_ptr<const i::wasm::WasmModule> decoded_module(result.val);
-  if (result.failed()) {
-    thrower->Failed("", result);
-    return nothing;
-  }
-  i::MaybeHandle<i::FixedArray> compiled_module =
-      decoded_module->CompileFunctions(i_isolate, thrower);
-  if (compiled_module.is_null()) return nothing;
+  return i::wasm::CreateModuleObjectFromBytes(
+      i_isolate, buffer.start, buffer.end, thrower,
+      i::wasm::ModuleOrigin::kWasmOrigin);
+}
 
-  return i::wasm::CreateCompiledModuleObject(i_isolate,
-                                             compiled_module.ToHandleChecked());
+static bool ValidateModule(v8::Isolate* isolate,
+                           const v8::Local<v8::Value> source,
+                           ErrorThrower* thrower) {
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+  i::MaybeHandle<i::JSObject> nothing;
+
+  RawBuffer buffer = GetRawBufferSource(source, thrower);
+  if (buffer.start == nullptr) return false;
+
+  DCHECK(source->IsArrayBuffer() || source->IsTypedArray());
+  return i::wasm::ValidateModuleBytes(i_isolate, buffer.start, buffer.end,
+                                      thrower,
+                                      i::wasm::ModuleOrigin::kWasmOrigin);
+}
+
+bool BrandCheck(Isolate* isolate, i::Handle<i::Object> value,
+                i::Handle<i::Symbol> sym, const char* msg) {
+  if (value->IsJSObject()) {
+    i::Handle<i::JSObject> object = i::Handle<i::JSObject>::cast(value);
+    Maybe<bool> has_brand = i::JSObject::HasOwnProperty(object, sym);
+    if (has_brand.IsNothing()) return false;
+    if (has_brand.ToChecked()) return true;
+  }
+  v8::Local<v8::Value> e = v8::Exception::TypeError(v8_str(isolate, msg));
+  isolate->ThrowException(e);
+  return false;
 }
 
 void WebAssemblyCompile(const v8::FunctionCallbackInfo<v8::Value>& args) {
@@ -220,7 +232,7 @@
                        "WebAssembly.compile()");
 
   if (args.Length() < 1) {
-    thrower.Error("Argument 0 must be a buffer source");
+    thrower.TypeError("Argument 0 must be a buffer source");
     return;
   }
   i::MaybeHandle<i::JSObject> module_obj =
@@ -238,6 +250,25 @@
   return_value.Set(resolver->GetPromise());
 }
 
+void WebAssemblyValidate(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  v8::Isolate* isolate = args.GetIsolate();
+  HandleScope scope(isolate);
+  ErrorThrower thrower(reinterpret_cast<i::Isolate*>(isolate),
+                       "WebAssembly.validate()");
+
+  if (args.Length() < 1) {
+    thrower.TypeError("Argument 0 must be a buffer source");
+    return;
+  }
+
+  v8::ReturnValue<v8::Value> return_value = args.GetReturnValue();
+  if (ValidateModule(isolate, args[0], &thrower)) {
+    return_value.Set(v8::True(isolate));
+  } else {
+    return_value.Set(v8::False(isolate));
+  }
+}
+
 void WebAssemblyModule(const v8::FunctionCallbackInfo<v8::Value>& args) {
   v8::Isolate* isolate = args.GetIsolate();
   HandleScope scope(isolate);
@@ -245,7 +276,7 @@
                        "WebAssembly.Module()");
 
   if (args.Length() < 1) {
-    thrower.Error("Argument 0 must be a buffer source");
+    thrower.TypeError("Argument 0 must be a buffer source");
     return;
   }
   i::MaybeHandle<i::JSObject> module_obj =
@@ -264,18 +295,15 @@
   ErrorThrower thrower(i_isolate, "WebAssembly.Instance()");
 
   if (args.Length() < 1) {
-    thrower.Error(
-        "Argument 0 must be provided, and must be a WebAssembly.Module object");
+    thrower.TypeError("Argument 0 must be a WebAssembly.Module");
     return;
   }
 
   Local<Context> context = isolate->GetCurrentContext();
   i::Handle<i::Context> i_context = Utils::OpenHandle(*context);
-  i::Handle<i::Symbol> module_sym(i_context->wasm_module_sym());
-  i::MaybeHandle<i::Object> source =
-      i::Object::GetProperty(Utils::OpenHandle(*args[0]), module_sym);
-  if (source.is_null() || source.ToHandleChecked()->IsUndefined(i_isolate)) {
-    thrower.Error("Argument 0 must be a WebAssembly.Module");
+  if (!BrandCheck(isolate, Utils::OpenHandle(*args[0]),
+                  i::Handle<i::Symbol>(i_context->wasm_module_sym()),
+                  "Argument 0 must be a WebAssembly.Module")) {
     return;
   }
 
@@ -285,13 +313,10 @@
       i::Handle<i::JSObject>::cast(v8::Utils::OpenHandle(*obj));
   if (module_obj->GetInternalFieldCount() < 1 ||
       !module_obj->GetInternalField(0)->IsFixedArray()) {
-    thrower.Error("Argument 0 is an invalid WebAssembly.Module");
+    thrower.TypeError("Argument 0 is an invalid WebAssembly.Module");
     return;
   }
 
-  i::Handle<i::FixedArray> compiled_code = i::Handle<i::FixedArray>(
-      i::FixedArray::cast(module_obj->GetInternalField(0)));
-
   i::Handle<i::JSReceiver> ffi = i::Handle<i::JSObject>::null();
   if (args.Length() > 1 && args[1]->IsObject()) {
     Local<Object> obj = Local<Object>::Cast(args[1]);
@@ -304,17 +329,211 @@
     i::Handle<i::Object> mem_obj = v8::Utils::OpenHandle(*obj);
     memory = i::Handle<i::JSArrayBuffer>(i::JSArrayBuffer::cast(*mem_obj));
   }
-  i::MaybeHandle<i::JSObject> instance =
-      i::wasm::WasmModule::Instantiate(i_isolate, compiled_code, ffi, memory);
+  i::MaybeHandle<i::JSObject> instance = i::wasm::WasmModule::Instantiate(
+      i_isolate, &thrower, module_obj, ffi, memory);
   if (instance.is_null()) {
-    thrower.Error("Could not instantiate module");
+    if (!thrower.error()) thrower.Error("Could not instantiate module");
     return;
   }
   v8::ReturnValue<v8::Value> return_value = args.GetReturnValue();
   return_value.Set(Utils::ToLocal(instance.ToHandleChecked()));
 }
+
+bool GetIntegerProperty(v8::Isolate* isolate, ErrorThrower* thrower,
+                        Local<Context> context, Local<v8::Object> object,
+                        Local<String> property, int* result, int lower_bound,
+                        int upper_bound) {
+  v8::MaybeLocal<v8::Value> maybe = object->Get(context, property);
+  v8::Local<v8::Value> value;
+  if (maybe.ToLocal(&value)) {
+    int64_t number;
+    if (!value->IntegerValue(context).To(&number)) return false;
+    if (number < static_cast<int64_t>(lower_bound)) {
+      thrower->RangeError("Property value %" PRId64
+                          " is below the lower bound %d",
+                          number, lower_bound);
+      return false;
+    }
+    if (number > static_cast<int64_t>(upper_bound)) {
+      thrower->RangeError("Property value %" PRId64
+                          " is above the upper bound %d",
+                          number, upper_bound);
+      return false;
+    }
+    *result = static_cast<int>(number);
+    return true;
+  }
+  return false;
+}
+
+void WebAssemblyTable(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  v8::Isolate* isolate = args.GetIsolate();
+  HandleScope scope(isolate);
+  ErrorThrower thrower(reinterpret_cast<i::Isolate*>(isolate),
+                       "WebAssembly.Module()");
+  if (args.Length() < 1 || !args[0]->IsObject()) {
+    thrower.TypeError("Argument 0 must be a table descriptor");
+    return;
+  }
+  Local<Context> context = isolate->GetCurrentContext();
+  Local<v8::Object> descriptor = args[0]->ToObject(context).ToLocalChecked();
+  // The descriptor's 'element'.
+  {
+    v8::MaybeLocal<v8::Value> maybe =
+        descriptor->Get(context, v8_str(isolate, "element"));
+    v8::Local<v8::Value> value;
+    if (!maybe.ToLocal(&value)) return;
+    v8::Local<v8::String> string;
+    if (!value->ToString(context).ToLocal(&string)) return;
+    bool equal;
+    if (!string->Equals(context, v8_str(isolate, "anyfunc")).To(&equal)) return;
+    if (!equal) {
+      thrower.TypeError("Descriptor property 'element' must be 'anyfunc'");
+      return;
+    }
+  }
+  const int max_table_size = 1 << 26;
+  // The descriptor's 'initial'.
+  int initial;
+  if (!GetIntegerProperty(isolate, &thrower, context, descriptor,
+                          v8_str(isolate, "initial"), &initial, 0,
+                          max_table_size)) {
+    return;
+  }
+  // The descriptor's 'maximum'.
+  int maximum = 0;
+  Local<String> maximum_key = v8_str(isolate, "maximum");
+  Maybe<bool> has_maximum = descriptor->Has(context, maximum_key);
+
+  if (has_maximum.IsNothing()) {
+    // There has been an exception, just return.
+    return;
+  }
+  if (has_maximum.FromJust()) {
+    if (!GetIntegerProperty(isolate, &thrower, context, descriptor, maximum_key,
+                            &maximum, initial, max_table_size)) {
+      return;
+    }
+  }
+
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+  i::Handle<i::JSFunction> table_ctor(
+      i_isolate->native_context()->wasm_table_constructor());
+  i::Handle<i::JSObject> table_obj =
+      i_isolate->factory()->NewJSObject(table_ctor);
+  i::Handle<i::FixedArray> fixed_array =
+      i_isolate->factory()->NewFixedArray(initial);
+  i::Object* null = i_isolate->heap()->null_value();
+  for (int i = 0; i < initial; ++i) fixed_array->set(i, null);
+  table_obj->SetInternalField(0, *fixed_array);
+  table_obj->SetInternalField(
+      1, has_maximum.FromJust()
+             ? static_cast<i::Object*>(i::Smi::FromInt(maximum))
+             : static_cast<i::Object*>(i_isolate->heap()->undefined_value()));
+  i::Handle<i::Symbol> table_sym(i_isolate->native_context()->wasm_table_sym());
+  i::Object::SetProperty(table_obj, table_sym, table_obj, i::STRICT).Check();
+  v8::ReturnValue<v8::Value> return_value = args.GetReturnValue();
+  return_value.Set(Utils::ToLocal(table_obj));
+}
+
+void WebAssemblyMemory(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  v8::Isolate* isolate = args.GetIsolate();
+  HandleScope scope(isolate);
+  ErrorThrower thrower(reinterpret_cast<i::Isolate*>(isolate),
+                       "WebAssembly.Module()");
+  if (args.Length() < 1 || !args[0]->IsObject()) {
+    thrower.TypeError("Argument 0 must be a table descriptor");
+    return;
+  }
+  Local<Context> context = isolate->GetCurrentContext();
+  Local<v8::Object> descriptor = args[0]->ToObject(context).ToLocalChecked();
+  // The descriptor's 'initial'.
+  int initial;
+  if (!GetIntegerProperty(isolate, &thrower, context, descriptor,
+                          v8_str(isolate, "initial"), &initial, 0, 65536)) {
+    return;
+  }
+  // The descriptor's 'maximum'.
+  int maximum = 0;
+  Local<String> maximum_key = v8_str(isolate, "maximum");
+  Maybe<bool> has_maximum = descriptor->Has(context, maximum_key);
+
+  if (has_maximum.IsNothing()) {
+    // There has been an exception, just return.
+    return;
+  }
+  if (has_maximum.FromJust()) {
+    if (!GetIntegerProperty(isolate, &thrower, context, descriptor, maximum_key,
+                            &maximum, initial, 65536)) {
+      return;
+    }
+  }
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+  i::Handle<i::JSArrayBuffer> buffer =
+      i_isolate->factory()->NewJSArrayBuffer(i::SharedFlag::kNotShared);
+  size_t size = static_cast<size_t>(i::wasm::WasmModule::kPageSize) *
+                static_cast<size_t>(initial);
+  i::JSArrayBuffer::SetupAllocatingData(buffer, i_isolate, size);
+
+  i::Handle<i::JSObject> memory_obj = i::WasmJs::CreateWasmMemoryObject(
+      i_isolate, buffer, has_maximum.FromJust(), maximum);
+  v8::ReturnValue<v8::Value> return_value = args.GetReturnValue();
+  return_value.Set(Utils::ToLocal(memory_obj));
+}
+void WebAssemblyTableGetLength(
+    const v8::FunctionCallbackInfo<v8::Value>& args) {
+  // TODO(rossberg)
+}
+void WebAssemblyTableGrow(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  // TODO(rossberg)
+}
+void WebAssemblyTableGet(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  // TODO(rossberg)
+}
+void WebAssemblyTableSet(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  // TODO(rossberg)
+}
+void WebAssemblyMemoryGrow(const v8::FunctionCallbackInfo<v8::Value>& args) {
+  // TODO(rossberg)
+}
+void WebAssemblyMemoryGetBuffer(
+    const v8::FunctionCallbackInfo<v8::Value>& args) {
+  v8::Isolate* isolate = args.GetIsolate();
+  Local<Context> context = isolate->GetCurrentContext();
+  i::Handle<i::Context> i_context = Utils::OpenHandle(*context);
+  if (!BrandCheck(isolate, Utils::OpenHandle(*args.This()),
+                  i::Handle<i::Symbol>(i_context->wasm_memory_sym()),
+                  "Receiver is not a WebAssembly.Memory")) {
+    return;
+  }
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+  i::Handle<i::JSObject> receiver =
+      i::Handle<i::JSObject>::cast(Utils::OpenHandle(*args.This()));
+  i::Handle<i::Object> buffer(receiver->GetInternalField(0), i_isolate);
+  DCHECK(buffer->IsJSArrayBuffer());
+  v8::ReturnValue<v8::Value> return_value = args.GetReturnValue();
+  return_value.Set(Utils::ToLocal(buffer));
+}
 }  // namespace
 
+i::Handle<i::JSObject> i::WasmJs::CreateWasmMemoryObject(
+    i::Isolate* i_isolate, i::Handle<i::JSArrayBuffer> buffer, bool has_maximum,
+    int maximum) {
+  i::Handle<i::JSFunction> memory_ctor(
+      i_isolate->native_context()->wasm_memory_constructor());
+  i::Handle<i::JSObject> memory_obj =
+      i_isolate->factory()->NewJSObject(memory_ctor);
+  memory_obj->SetInternalField(0, *buffer);
+  memory_obj->SetInternalField(
+      1, has_maximum
+             ? static_cast<i::Object*>(i::Smi::FromInt(maximum))
+             : static_cast<i::Object*>(i_isolate->heap()->undefined_value()));
+  i::Handle<i::Symbol> memory_sym(
+      i_isolate->native_context()->wasm_memory_sym());
+  i::Object::SetProperty(memory_obj, memory_sym, memory_obj, i::STRICT).Check();
+  return memory_obj;
+}
+
 // TODO(titzer): we use the API to create the function template because the
 // internal guts are too ugly to replicate here.
 static i::Handle<i::FunctionTemplateInfo> NewTemplate(i::Isolate* i_isolate,
@@ -325,12 +544,9 @@
 }
 
 namespace internal {
-static Handle<String> v8_str(Isolate* isolate, const char* str) {
-  return isolate->factory()->NewStringFromAsciiChecked(str);
-}
 
-static Handle<JSFunction> InstallFunc(Isolate* isolate, Handle<JSObject> object,
-                                      const char* str, FunctionCallback func) {
+Handle<JSFunction> InstallFunc(Isolate* isolate, Handle<JSObject> object,
+                               const char* str, FunctionCallback func) {
   Handle<String> name = v8_str(isolate, str);
   Handle<FunctionTemplateInfo> temp = NewTemplate(isolate, func);
   Handle<JSFunction> function =
@@ -341,6 +557,112 @@
   return function;
 }
 
+Handle<JSFunction> InstallGetter(Isolate* isolate, Handle<JSObject> object,
+                                 const char* str, FunctionCallback func) {
+  Handle<String> name = v8_str(isolate, str);
+  Handle<FunctionTemplateInfo> temp = NewTemplate(isolate, func);
+  Handle<JSFunction> function =
+      ApiNatives::InstantiateFunction(temp).ToHandleChecked();
+  v8::PropertyAttribute attributes =
+      static_cast<v8::PropertyAttribute>(v8::DontDelete | v8::ReadOnly);
+  Utils::ToLocal(object)->SetAccessorProperty(Utils::ToLocal(name),
+                                              Utils::ToLocal(function),
+                                              Local<Function>(), attributes);
+  return function;
+}
+
+void WasmJs::InstallWasmModuleSymbolIfNeeded(Isolate* isolate,
+                                             Handle<JSGlobalObject> global,
+                                             Handle<Context> context) {
+  if (!context->get(Context::WASM_MODULE_SYM_INDEX)->IsSymbol() ||
+      !context->get(Context::WASM_INSTANCE_SYM_INDEX)->IsSymbol()) {
+    InstallWasmMapsIfNeeded(isolate, isolate->native_context());
+    InstallWasmConstructors(isolate, isolate->global_object(),
+                            isolate->native_context());
+  }
+}
+
+void WasmJs::InstallWasmConstructors(Isolate* isolate,
+                                     Handle<JSGlobalObject> global,
+                                     Handle<Context> context) {
+  Factory* factory = isolate->factory();
+  // Create private symbols.
+  Handle<Symbol> module_sym = factory->NewPrivateSymbol();
+  context->set_wasm_module_sym(*module_sym);
+
+  Handle<Symbol> instance_sym = factory->NewPrivateSymbol();
+  context->set_wasm_instance_sym(*instance_sym);
+
+  Handle<Symbol> table_sym = factory->NewPrivateSymbol();
+  context->set_wasm_table_sym(*table_sym);
+
+  Handle<Symbol> memory_sym = factory->NewPrivateSymbol();
+  context->set_wasm_memory_sym(*memory_sym);
+
+  // Bind the WebAssembly object.
+  Handle<String> name = v8_str(isolate, "WebAssembly");
+  Handle<JSFunction> cons = factory->NewFunction(name);
+  JSFunction::SetInstancePrototype(
+      cons, Handle<Object>(context->initial_object_prototype(), isolate));
+  cons->shared()->set_instance_class_name(*name);
+  Handle<JSObject> wasm_object = factory->NewJSObject(cons, TENURED);
+  PropertyAttributes attributes = static_cast<PropertyAttributes>(DONT_ENUM);
+  JSObject::AddProperty(global, name, wasm_object, attributes);
+
+  // Setup compile
+  InstallFunc(isolate, wasm_object, "compile", WebAssemblyCompile);
+
+  // Setup compile
+  InstallFunc(isolate, wasm_object, "validate", WebAssemblyValidate);
+
+  // Setup Module
+  Handle<JSFunction> module_constructor =
+      InstallFunc(isolate, wasm_object, "Module", WebAssemblyModule);
+  context->set_wasm_module_constructor(*module_constructor);
+  Handle<JSObject> module_proto =
+      factory->NewJSObject(module_constructor, TENURED);
+  i::Handle<i::Map> map = isolate->factory()->NewMap(
+      i::JS_OBJECT_TYPE, i::JSObject::kHeaderSize + i::kPointerSize);
+  JSFunction::SetInitialMap(module_constructor, map, module_proto);
+  JSObject::AddProperty(module_proto, isolate->factory()->constructor_string(),
+                        module_constructor, DONT_ENUM);
+
+  // Setup Instance
+  Handle<JSFunction> instance_constructor =
+      InstallFunc(isolate, wasm_object, "Instance", WebAssemblyInstance);
+  context->set_wasm_instance_constructor(*instance_constructor);
+
+  // Setup Table
+  Handle<JSFunction> table_constructor =
+      InstallFunc(isolate, wasm_object, "Table", WebAssemblyTable);
+  context->set_wasm_table_constructor(*table_constructor);
+  Handle<JSObject> table_proto =
+      factory->NewJSObject(table_constructor, TENURED);
+  map = isolate->factory()->NewMap(
+      i::JS_OBJECT_TYPE, i::JSObject::kHeaderSize + 2 * i::kPointerSize);
+  JSFunction::SetInitialMap(table_constructor, map, table_proto);
+  JSObject::AddProperty(table_proto, isolate->factory()->constructor_string(),
+                        table_constructor, DONT_ENUM);
+  InstallGetter(isolate, table_proto, "length", WebAssemblyTableGetLength);
+  InstallFunc(isolate, table_proto, "grow", WebAssemblyTableGrow);
+  InstallFunc(isolate, table_proto, "get", WebAssemblyTableGet);
+  InstallFunc(isolate, table_proto, "set", WebAssemblyTableSet);
+
+  // Setup Memory
+  Handle<JSFunction> memory_constructor =
+      InstallFunc(isolate, wasm_object, "Memory", WebAssemblyMemory);
+  context->set_wasm_memory_constructor(*memory_constructor);
+  Handle<JSObject> memory_proto =
+      factory->NewJSObject(memory_constructor, TENURED);
+  map = isolate->factory()->NewMap(
+      i::JS_OBJECT_TYPE, i::JSObject::kHeaderSize + 2 * i::kPointerSize);
+  JSFunction::SetInitialMap(memory_constructor, map, memory_proto);
+  JSObject::AddProperty(memory_proto, isolate->factory()->constructor_string(),
+                        memory_constructor, DONT_ENUM);
+  InstallFunc(isolate, memory_proto, "grow", WebAssemblyMemoryGrow);
+  InstallGetter(isolate, memory_proto, "buffer", WebAssemblyMemoryGetBuffer);
+}
+
 void WasmJs::Install(Isolate* isolate, Handle<JSGlobalObject> global) {
   if (!FLAG_expose_wasm && !FLAG_validate_asm) {
     return;
@@ -350,7 +672,7 @@
 
   // Setup wasm function map.
   Handle<Context> context(global->native_context(), isolate);
-  InstallWasmFunctionMap(isolate, context);
+  InstallWasmMapsIfNeeded(isolate, context);
 
   if (!FLAG_expose_wasm) {
     return;
@@ -383,39 +705,11 @@
       JSObject::AddProperty(wasm_object, name, value, attributes);
     }
   }
-
-  // Create private symbols.
-  Handle<Symbol> module_sym = isolate->factory()->NewPrivateSymbol();
-  Handle<Symbol> instance_sym = isolate->factory()->NewPrivateSymbol();
-  context->set_wasm_module_sym(*module_sym);
-  context->set_wasm_instance_sym(*instance_sym);
-
-  // Bind the WebAssembly object.
-  Handle<String> name = v8_str(isolate, "WebAssembly");
-  Handle<JSFunction> cons = factory->NewFunction(name);
-  JSFunction::SetInstancePrototype(
-      cons, Handle<Object>(context->initial_object_prototype(), isolate));
-  cons->shared()->set_instance_class_name(*name);
-  Handle<JSObject> wasm_object = factory->NewJSObject(cons, TENURED);
-  PropertyAttributes attributes = static_cast<PropertyAttributes>(DONT_ENUM);
-  JSObject::AddProperty(global, name, wasm_object, attributes);
-
-  // Install static methods on WebAssembly object.
-  InstallFunc(isolate, wasm_object, "compile", WebAssemblyCompile);
-  Handle<JSFunction> module_constructor =
-      InstallFunc(isolate, wasm_object, "Module", WebAssemblyModule);
-  Handle<JSFunction> instance_constructor =
-      InstallFunc(isolate, wasm_object, "Instance", WebAssemblyInstance);
-  i::Handle<i::Map> map = isolate->factory()->NewMap(
-      i::JS_OBJECT_TYPE, i::JSObject::kHeaderSize + i::kPointerSize);
-  module_constructor->set_prototype_or_initial_map(*map);
-  map->SetConstructor(*module_constructor);
-
-  context->set_wasm_module_constructor(*module_constructor);
-  context->set_wasm_instance_constructor(*instance_constructor);
+  InstallWasmConstructors(isolate, global, context);
 }
 
-void WasmJs::InstallWasmFunctionMap(Isolate* isolate, Handle<Context> context) {
+void WasmJs::InstallWasmMapsIfNeeded(Isolate* isolate,
+                                     Handle<Context> context) {
   if (!context->get(Context::WASM_FUNCTION_MAP_INDEX)->IsMap()) {
     // TODO(titzer): Move this to bootstrapper.cc??
     // TODO(titzer): Also make one for strict mode functions?
diff --git a/src/wasm/wasm-js.h b/src/wasm/wasm-js.h
index ded9a1a..4f26494 100644
--- a/src/wasm/wasm-js.h
+++ b/src/wasm/wasm-js.h
@@ -5,13 +5,8 @@
 #ifndef V8_WASM_JS_H_
 #define V8_WASM_JS_H_
 
-#ifndef V8_SHARED
 #include "src/allocation.h"
 #include "src/base/hashmap.h"
-#else
-#include "include/v8.h"
-#include "src/base/compiler-specific.h"
-#endif  // !V8_SHARED
 
 namespace v8 {
 namespace internal {
@@ -19,7 +14,19 @@
 class WasmJs {
  public:
   static void Install(Isolate* isolate, Handle<JSGlobalObject> global_object);
-  static void InstallWasmFunctionMap(Isolate* isolate, Handle<Context> context);
+
+  V8_EXPORT_PRIVATE static void InstallWasmModuleSymbolIfNeeded(
+      Isolate* isolate, Handle<JSGlobalObject> global, Handle<Context> context);
+
+  V8_EXPORT_PRIVATE static void InstallWasmMapsIfNeeded(
+      Isolate* isolate, Handle<Context> context);
+  static void InstallWasmConstructors(Isolate* isolate,
+                                      Handle<JSGlobalObject> global,
+                                      Handle<Context> context);
+
+  static Handle<JSObject> CreateWasmMemoryObject(Isolate* isolate,
+                                                 Handle<JSArrayBuffer> buffer,
+                                                 bool has_maximum, int maximum);
 };
 
 }  // namespace internal
diff --git a/src/wasm/wasm-macro-gen.h b/src/wasm/wasm-macro-gen.h
index abd57d5..fd10a39 100644
--- a/src/wasm/wasm-macro-gen.h
+++ b/src/wasm/wasm-macro-gen.h
@@ -7,7 +7,7 @@
 
 #include "src/wasm/wasm-opcodes.h"
 
-#include "src/zone-containers.h"
+#include "src/zone/zone-containers.h"
 
 #define U32_LE(v)                                    \
   static_cast<byte>(v), static_cast<byte>((v) >> 8), \
@@ -17,17 +17,17 @@
 
 #define WASM_MODULE_HEADER U32_LE(kWasmMagic), U32_LE(kWasmVersion)
 
-#define SIG_INDEX(v) U16_LE(v)
-// TODO(binji): make SIG_INDEX match this.
 #define IMPORT_SIG_INDEX(v) U32V_1(v)
 #define FUNC_INDEX(v) U32V_1(v)
+#define TABLE_INDEX(v) U32V_1(v)
 #define NO_NAME U32V_1(0)
 #define NAME_LENGTH(v) U32V_1(v)
+#define ENTRY_COUNT(v) U32V_1(v)
 
 #define ZERO_ALIGNMENT 0
 #define ZERO_OFFSET 0
 
-#define BR_TARGET(v) U32_LE(v)
+#define BR_TARGET(v) U32V_1(v)
 
 #define MASK_7 ((1 << 7) - 1)
 #define MASK_14 ((1 << 14) - 1)
@@ -62,36 +62,76 @@
 
 #define ARITY_0 0
 #define ARITY_1 1
+#define ARITY_2 2
 #define DEPTH_0 0
 #define DEPTH_1 1
+#define DEPTH_2 2
+#define ARITY_2 2
 
-#define WASM_BLOCK(...) kExprBlock, __VA_ARGS__, kExprEnd
-#define WASM_INFINITE_LOOP kExprLoop, kExprBr, ARITY_0, DEPTH_0, kExprEnd
-#define WASM_LOOP(...) kExprLoop, __VA_ARGS__, kExprEnd
-#define WASM_IF(cond, tstmt) cond, kExprIf, tstmt, kExprEnd
+#define WASM_BLOCK(...) kExprBlock, kLocalVoid, __VA_ARGS__, kExprEnd
+
+#define WASM_BLOCK_T(t, ...)                                       \
+  kExprBlock, static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(t)), \
+      __VA_ARGS__, kExprEnd
+
+#define WASM_BLOCK_TT(t1, t2, ...)                                       \
+  kExprBlock, kMultivalBlock, 0,                                         \
+      static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(t1)),              \
+      static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(t2)), __VA_ARGS__, \
+      kExprEnd
+
+#define WASM_BLOCK_I(...) kExprBlock, kLocalI32, __VA_ARGS__, kExprEnd
+#define WASM_BLOCK_L(...) kExprBlock, kLocalI64, __VA_ARGS__, kExprEnd
+#define WASM_BLOCK_F(...) kExprBlock, kLocalF32, __VA_ARGS__, kExprEnd
+#define WASM_BLOCK_D(...) kExprBlock, kLocalF64, __VA_ARGS__, kExprEnd
+
+#define WASM_INFINITE_LOOP kExprLoop, kLocalVoid, kExprBr, DEPTH_0, kExprEnd
+
+#define WASM_LOOP(...) kExprLoop, kLocalVoid, __VA_ARGS__, kExprEnd
+#define WASM_LOOP_I(...) kExprLoop, kLocalI32, __VA_ARGS__, kExprEnd
+#define WASM_LOOP_L(...) kExprLoop, kLocalI64, __VA_ARGS__, kExprEnd
+#define WASM_LOOP_F(...) kExprLoop, kLocalF32, __VA_ARGS__, kExprEnd
+#define WASM_LOOP_D(...) kExprLoop, kLocalF64, __VA_ARGS__, kExprEnd
+
+#define WASM_IF(cond, tstmt) cond, kExprIf, kLocalVoid, tstmt, kExprEnd
+
 #define WASM_IF_ELSE(cond, tstmt, fstmt) \
-  cond, kExprIf, tstmt, kExprElse, fstmt, kExprEnd
+  cond, kExprIf, kLocalVoid, tstmt, kExprElse, fstmt, kExprEnd
+
+#define WASM_IF_ELSE_T(t, cond, tstmt, fstmt)                                \
+  cond, kExprIf, static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(t)), tstmt, \
+      kExprElse, fstmt, kExprEnd
+
+#define WASM_IF_ELSE_TT(t1, t2, cond, tstmt, fstmt)                           \
+  cond, kExprIf, kMultivalBlock, 0,                                           \
+      static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(t1)),                   \
+      static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(t2)), tstmt, kExprElse, \
+      fstmt, kExprEnd
+
+#define WASM_IF_ELSE_I(cond, tstmt, fstmt) \
+  cond, kExprIf, kLocalI32, tstmt, kExprElse, fstmt, kExprEnd
+#define WASM_IF_ELSE_L(cond, tstmt, fstmt) \
+  cond, kExprIf, kLocalI64, tstmt, kExprElse, fstmt, kExprEnd
+#define WASM_IF_ELSE_F(cond, tstmt, fstmt) \
+  cond, kExprIf, kLocalF32, tstmt, kExprElse, fstmt, kExprEnd
+#define WASM_IF_ELSE_D(cond, tstmt, fstmt) \
+  cond, kExprIf, kLocalF64, tstmt, kExprElse, fstmt, kExprEnd
+
 #define WASM_SELECT(tval, fval, cond) tval, fval, cond, kExprSelect
-#define WASM_BR(depth) kExprBr, ARITY_0, static_cast<byte>(depth)
-#define WASM_BR_IF(depth, cond) \
-  cond, kExprBrIf, ARITY_0, static_cast<byte>(depth)
-#define WASM_BRV(depth, val) val, kExprBr, ARITY_1, static_cast<byte>(depth)
-#define WASM_BRV_IF(depth, val, cond) \
-  val, cond, kExprBrIf, ARITY_1, static_cast<byte>(depth)
-#define WASM_BREAK(depth) kExprBr, ARITY_0, static_cast<byte>(depth + 1)
-#define WASM_CONTINUE(depth) kExprBr, ARITY_0, static_cast<byte>(depth)
-#define WASM_BREAKV(depth, val) \
-  val, kExprBr, ARITY_1, static_cast<byte>(depth + 1)
-#define WASM_RETURN0 kExprReturn, ARITY_0
-#define WASM_RETURN1(val) val, kExprReturn, ARITY_1
-#define WASM_RETURNN(count, ...) __VA_ARGS__, kExprReturn, count
+
+#define WASM_RETURN0 kExprReturn
+#define WASM_RETURN1(val) val, kExprReturn
+#define WASM_RETURNN(count, ...) __VA_ARGS__, kExprReturn
+
+#define WASM_BR(depth) kExprBr, static_cast<byte>(depth)
+#define WASM_BR_IF(depth, cond) cond, kExprBrIf, static_cast<byte>(depth)
+#define WASM_BR_IFD(depth, val, cond) \
+  val, cond, kExprBrIf, static_cast<byte>(depth), kExprDrop
+#define WASM_CONTINUE(depth) kExprBr, static_cast<byte>(depth)
 #define WASM_UNREACHABLE kExprUnreachable
 
 #define WASM_BR_TABLE(key, count, ...) \
-  key, kExprBrTable, ARITY_0, U32V_1(count), __VA_ARGS__
-
-#define WASM_BR_TABLEV(val, key, count, ...) \
-  val, key, kExprBrTable, ARITY_1, U32V_1(count), __VA_ARGS__
+  key, kExprBrTable, U32V_1(count), __VA_ARGS__
 
 #define WASM_CASE(x) static_cast<byte>(x), static_cast<byte>(x >> 8)
 #define WASM_CASE_BR(x) static_cast<byte>(x), static_cast<byte>(0x80 | (x) >> 8)
@@ -343,6 +383,8 @@
       static_cast<byte>(bit_cast<uint64_t>(val) >> 56)
 #define WASM_GET_LOCAL(index) kExprGetLocal, static_cast<byte>(index)
 #define WASM_SET_LOCAL(index, val) val, kExprSetLocal, static_cast<byte>(index)
+#define WASM_TEE_LOCAL(index, val) val, kExprTeeLocal, static_cast<byte>(index)
+#define WASM_DROP kExprDrop
 #define WASM_GET_GLOBAL(index) kExprGetGlobal, static_cast<byte>(index)
 #define WASM_SET_GLOBAL(index, val) \
   val, kExprSetGlobal, static_cast<byte>(index)
@@ -374,49 +416,25 @@
           v8::internal::wasm::WasmOpcodes::LoadStoreOpcodeOf(type, true)), \
       alignment, ZERO_OFFSET
 
-#define WASM_CALL_FUNCTION0(index) \
-  kExprCallFunction, 0, static_cast<byte>(index)
-#define WASM_CALL_FUNCTION1(index, a) \
-  a, kExprCallFunction, 1, static_cast<byte>(index)
-#define WASM_CALL_FUNCTION2(index, a, b) \
-  a, b, kExprCallFunction, 2, static_cast<byte>(index)
-#define WASM_CALL_FUNCTION3(index, a, b, c) \
-  a, b, c, kExprCallFunction, 3, static_cast<byte>(index)
-#define WASM_CALL_FUNCTION4(index, a, b, c, d) \
-  a, b, c, d, kExprCallFunction, 4, static_cast<byte>(index)
-#define WASM_CALL_FUNCTION5(index, a, b, c, d, e) \
-  kExprCallFunction, 5, static_cast<byte>(index)
-#define WASM_CALL_FUNCTIONN(arity, index, ...) \
-  __VA_ARGS__, kExprCallFunction, arity, static_cast<byte>(index)
+#define WASM_CALL_FUNCTION0(index) kExprCallFunction, static_cast<byte>(index)
+#define WASM_CALL_FUNCTION(index, ...) \
+  __VA_ARGS__, kExprCallFunction, static_cast<byte>(index)
 
-#define WASM_CALL_IMPORT0(index) kExprCallImport, 0, static_cast<byte>(index)
-#define WASM_CALL_IMPORT1(index, a) \
-  a, kExprCallImport, 1, static_cast<byte>(index)
-#define WASM_CALL_IMPORT2(index, a, b) \
-  a, b, kExprCallImport, 2, static_cast<byte>(index)
-#define WASM_CALL_IMPORT3(index, a, b, c) \
-  a, b, c, kExprCallImport, 3, static_cast<byte>(index)
-#define WASM_CALL_IMPORT4(index, a, b, c, d) \
-  a, b, c, d, kExprCallImport, 4, static_cast<byte>(index)
-#define WASM_CALL_IMPORT5(index, a, b, c, d, e) \
-  a, b, c, d, e, kExprCallImport, 5, static_cast<byte>(index)
-#define WASM_CALL_IMPORTN(arity, index, ...) \
-  __VA_ARGS__, kExprCallImport, U32V_1(arity), static_cast<byte>(index),
-
+// TODO(titzer): change usages of these macros to put func last.
 #define WASM_CALL_INDIRECT0(index, func) \
-  func, kExprCallIndirect, 0, static_cast<byte>(index)
+  func, kExprCallIndirect, static_cast<byte>(index)
 #define WASM_CALL_INDIRECT1(index, func, a) \
-  func, a, kExprCallIndirect, 1, static_cast<byte>(index)
+  a, func, kExprCallIndirect, static_cast<byte>(index)
 #define WASM_CALL_INDIRECT2(index, func, a, b) \
-  func, a, b, kExprCallIndirect, 2, static_cast<byte>(index)
+  a, b, func, kExprCallIndirect, static_cast<byte>(index)
 #define WASM_CALL_INDIRECT3(index, func, a, b, c) \
-  func, a, b, c, kExprCallIndirect, 3, static_cast<byte>(index)
+  a, b, c, func, kExprCallIndirect, static_cast<byte>(index)
 #define WASM_CALL_INDIRECT4(index, func, a, b, c, d) \
-  func, a, b, c, d, kExprCallIndirect, 4, static_cast<byte>(index)
+  a, b, c, d, func, kExprCallIndirect, static_cast<byte>(index)
 #define WASM_CALL_INDIRECT5(index, func, a, b, c, d, e) \
-  func, a, b, c, d, e, kExprCallIndirect, 5, static_cast<byte>(index)
+  a, b, c, d, e, func, kExprCallIndirect, static_cast<byte>(index)
 #define WASM_CALL_INDIRECTN(arity, index, func, ...) \
-  func, __VA_ARGS__, kExprCallIndirect, U32V_1(arity), static_cast<byte>(index)
+  __VA_ARGS__, func, kExprCallIndirect, static_cast<byte>(index)
 
 #define WASM_NOT(x) x, kExprI32Eqz
 #define WASM_SEQ(...) __VA_ARGS__
@@ -424,11 +442,16 @@
 //------------------------------------------------------------------------------
 // Constructs that are composed of multiple bytecodes.
 //------------------------------------------------------------------------------
-#define WASM_WHILE(x, y) \
-  kExprLoop, x, kExprIf, y, kExprBr, ARITY_1, DEPTH_1, kExprEnd, kExprEnd
+#define WASM_WHILE(x, y)                                              \
+  kExprLoop, kLocalVoid, x, kExprIf, kLocalVoid, y, kExprBr, DEPTH_1, \
+      kExprEnd, kExprEnd
 #define WASM_INC_LOCAL(index)                                            \
   kExprGetLocal, static_cast<byte>(index), kExprI8Const, 1, kExprI32Add, \
-      kExprSetLocal, static_cast<byte>(index)
+      kExprTeeLocal, static_cast<byte>(index)
+#define WASM_INC_LOCAL_BYV(index, count)                    \
+  kExprGetLocal, static_cast<byte>(index), kExprI8Const,    \
+      static_cast<byte>(count), kExprI32Add, kExprTeeLocal, \
+      static_cast<byte>(index)
 #define WASM_INC_LOCAL_BY(index, count)                     \
   kExprGetLocal, static_cast<byte>(index), kExprI8Const,    \
       static_cast<byte>(count), kExprI32Add, kExprSetLocal, \
@@ -580,11 +603,17 @@
 #define WASM_I64_REINTERPRET_F64(x) x, kExprI64ReinterpretF64
 
 //------------------------------------------------------------------------------
+// Memory Operations.
+//------------------------------------------------------------------------------
+#define WASM_GROW_MEMORY(x) x, kExprGrowMemory
+#define WASM_MEMORY_SIZE kExprMemorySize
+
+//------------------------------------------------------------------------------
 // Simd Operations.
 //------------------------------------------------------------------------------
 #define WASM_SIMD_I32x4_SPLAT(x) x, kSimdPrefix, kExprI32x4Splat & 0xff
-#define WASM_SIMD_I32x4_EXTRACT_LANE(x, y) \
-  x, y, kSimdPrefix, kExprI32x4ExtractLane & 0xff
+#define WASM_SIMD_I32x4_EXTRACT_LANE(lane, x) \
+  x, kSimdPrefix, kExprI32x4ExtractLane & 0xff, static_cast<byte>(lane)
 
 #define SIG_ENTRY_v_v kWasmFunctionTypeForm, 0, 0
 #define SIZEOF_SIG_ENTRY_v_v 3
@@ -605,4 +634,13 @@
 #define SIZEOF_SIG_ENTRY_x_xx 6
 #define SIZEOF_SIG_ENTRY_x_xxx 7
 
+#define WASM_BRV(depth, val) val, kExprBr, static_cast<byte>(depth)
+#define WASM_BRV_IF(depth, val, cond) \
+  val, cond, kExprBrIf, static_cast<byte>(depth)
+#define WASM_BRV_IFD(depth, val, cond) \
+  val, cond, kExprBrIf, static_cast<byte>(depth), kExprDrop
+#define WASM_IFB(cond, ...) cond, kExprIf, kLocalVoid, __VA_ARGS__, kExprEnd
+#define WASM_BR_TABLEV(val, key, count, ...) \
+  val, key, kExprBrTable, U32V_1(count), __VA_ARGS__
+
 #endif  // V8_WASM_MACRO_GEN_H_
diff --git a/src/wasm/wasm-module-builder.cc b/src/wasm/wasm-module-builder.cc
new file mode 100644
index 0000000..084f5a0
--- /dev/null
+++ b/src/wasm/wasm-module-builder.cc
@@ -0,0 +1,459 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/signature.h"
+
+#include "src/handles.h"
+#include "src/v8.h"
+#include "src/zone/zone-containers.h"
+
+#include "src/wasm/ast-decoder.h"
+#include "src/wasm/leb-helper.h"
+#include "src/wasm/wasm-macro-gen.h"
+#include "src/wasm/wasm-module-builder.h"
+#include "src/wasm/wasm-module.h"
+#include "src/wasm/wasm-opcodes.h"
+
+#include "src/v8memory.h"
+
+#if DEBUG
+#define TRACE(...)                                    \
+  do {                                                \
+    if (FLAG_trace_wasm_encoder) PrintF(__VA_ARGS__); \
+  } while (false)
+#else
+#define TRACE(...)
+#endif
+
+namespace v8 {
+namespace internal {
+namespace wasm {
+
+// Emit a section code and the size as a padded varint that can be patched
+// later.
+size_t EmitSection(WasmSectionCode code, ZoneBuffer& buffer) {
+  // Emit the section code.
+  buffer.write_u8(code);
+
+  // Emit a placeholder for the length.
+  return buffer.reserve_u32v();
+}
+
+// Patch the size of a section after it's finished.
+void FixupSection(ZoneBuffer& buffer, size_t start) {
+  buffer.patch_u32v(start, static_cast<uint32_t>(buffer.offset() - start -
+                                                 kPaddedVarInt32Size));
+}
+
+WasmFunctionBuilder::WasmFunctionBuilder(WasmModuleBuilder* builder)
+    : builder_(builder),
+      locals_(builder->zone()),
+      signature_index_(0),
+      exported_(0),
+      func_index_(static_cast<uint32_t>(builder->functions_.size())),
+      body_(builder->zone()),
+      name_(builder->zone()),
+      i32_temps_(builder->zone()),
+      i64_temps_(builder->zone()),
+      f32_temps_(builder->zone()),
+      f64_temps_(builder->zone()),
+      direct_calls_(builder->zone()) {}
+
+void WasmFunctionBuilder::EmitVarInt(uint32_t val) {
+  byte buffer[8];
+  byte* ptr = buffer;
+  LEBHelper::write_u32v(&ptr, val);
+  for (byte* p = buffer; p < ptr; p++) {
+    body_.push_back(*p);
+  }
+}
+
+void WasmFunctionBuilder::SetSignature(FunctionSig* sig) {
+  DCHECK(!locals_.has_sig());
+  locals_.set_sig(sig);
+  signature_index_ = builder_->AddSignature(sig);
+}
+
+uint32_t WasmFunctionBuilder::AddLocal(LocalType type) {
+  DCHECK(locals_.has_sig());
+  return locals_.AddLocals(1, type);
+}
+
+void WasmFunctionBuilder::EmitGetLocal(uint32_t local_index) {
+  EmitWithVarInt(kExprGetLocal, local_index);
+}
+
+void WasmFunctionBuilder::EmitSetLocal(uint32_t local_index) {
+  EmitWithVarInt(kExprSetLocal, local_index);
+}
+
+void WasmFunctionBuilder::EmitTeeLocal(uint32_t local_index) {
+  EmitWithVarInt(kExprTeeLocal, local_index);
+}
+
+void WasmFunctionBuilder::EmitCode(const byte* code, uint32_t code_size) {
+  for (size_t i = 0; i < code_size; ++i) {
+    body_.push_back(code[i]);
+  }
+}
+
+void WasmFunctionBuilder::Emit(WasmOpcode opcode) {
+  body_.push_back(static_cast<byte>(opcode));
+}
+
+void WasmFunctionBuilder::EmitWithU8(WasmOpcode opcode, const byte immediate) {
+  body_.push_back(static_cast<byte>(opcode));
+  body_.push_back(immediate);
+}
+
+void WasmFunctionBuilder::EmitWithU8U8(WasmOpcode opcode, const byte imm1,
+                                       const byte imm2) {
+  body_.push_back(static_cast<byte>(opcode));
+  body_.push_back(imm1);
+  body_.push_back(imm2);
+}
+
+void WasmFunctionBuilder::EmitWithVarInt(WasmOpcode opcode,
+                                         uint32_t immediate) {
+  body_.push_back(static_cast<byte>(opcode));
+  EmitVarInt(immediate);
+}
+
+void WasmFunctionBuilder::EmitI32Const(int32_t value) {
+  // TODO(titzer): variable-length signed and unsigned i32 constants.
+  if (-128 <= value && value <= 127) {
+    EmitWithU8(kExprI8Const, static_cast<byte>(value));
+  } else {
+    byte code[] = {WASM_I32V_5(value)};
+    EmitCode(code, sizeof(code));
+  }
+}
+
+void WasmFunctionBuilder::EmitDirectCallIndex(uint32_t index) {
+  DirectCallIndex call;
+  call.offset = body_.size();
+  call.direct_index = index;
+  direct_calls_.push_back(call);
+  byte code[] = {U32V_5(0)};
+  EmitCode(code, sizeof(code));
+}
+
+void WasmFunctionBuilder::SetExported() { exported_ = true; }
+
+void WasmFunctionBuilder::SetName(const char* name, int name_length) {
+  name_.clear();
+  if (name_length > 0) {
+    for (int i = 0; i < name_length; ++i) {
+      name_.push_back(*(name + i));
+    }
+  }
+}
+
+void WasmFunctionBuilder::WriteSignature(ZoneBuffer& buffer) const {
+  buffer.write_u32v(signature_index_);
+}
+
+void WasmFunctionBuilder::WriteExport(ZoneBuffer& buffer) const {
+  if (exported_) {
+    buffer.write_size(name_.size());
+    if (name_.size() > 0) {
+      buffer.write(reinterpret_cast<const byte*>(&name_[0]), name_.size());
+    }
+    buffer.write_u8(kExternalFunction);
+    buffer.write_u32v(func_index_ +
+                      static_cast<uint32_t>(builder_->imports_.size()));
+  }
+}
+
+void WasmFunctionBuilder::WriteBody(ZoneBuffer& buffer) const {
+  size_t locals_size = locals_.Size();
+  buffer.write_size(locals_size + body_.size());
+  buffer.EnsureSpace(locals_size);
+  byte** ptr = buffer.pos_ptr();
+  locals_.Emit(*ptr);
+  (*ptr) += locals_size;  // UGLY: manual bump of position pointer
+  if (body_.size() > 0) {
+    size_t base = buffer.offset();
+    buffer.write(&body_[0], body_.size());
+    for (DirectCallIndex call : direct_calls_) {
+      buffer.patch_u32v(
+          base + call.offset,
+          call.direct_index + static_cast<uint32_t>(builder_->imports_.size()));
+    }
+  }
+}
+
+WasmModuleBuilder::WasmModuleBuilder(Zone* zone)
+    : zone_(zone),
+      signatures_(zone),
+      imports_(zone),
+      functions_(zone),
+      data_segments_(zone),
+      indirect_functions_(zone),
+      globals_(zone),
+      signature_map_(zone),
+      start_function_index_(-1) {}
+
+WasmFunctionBuilder* WasmModuleBuilder::AddFunction(FunctionSig* sig) {
+  functions_.push_back(new (zone_) WasmFunctionBuilder(this));
+  // Add the signature if one was provided here.
+  if (sig) functions_.back()->SetSignature(sig);
+  return functions_.back();
+}
+
+void WasmModuleBuilder::AddDataSegment(const byte* data, uint32_t size,
+                                       uint32_t dest) {
+  data_segments_.push_back({ZoneVector<byte>(zone()), dest});
+  ZoneVector<byte>& vec = data_segments_.back().data;
+  for (uint32_t i = 0; i < size; i++) {
+    vec.push_back(data[i]);
+  }
+}
+
+bool WasmModuleBuilder::CompareFunctionSigs::operator()(FunctionSig* a,
+                                                        FunctionSig* b) const {
+  if (a->return_count() < b->return_count()) return true;
+  if (a->return_count() > b->return_count()) return false;
+  if (a->parameter_count() < b->parameter_count()) return true;
+  if (a->parameter_count() > b->parameter_count()) return false;
+  for (size_t r = 0; r < a->return_count(); r++) {
+    if (a->GetReturn(r) < b->GetReturn(r)) return true;
+    if (a->GetReturn(r) > b->GetReturn(r)) return false;
+  }
+  for (size_t p = 0; p < a->parameter_count(); p++) {
+    if (a->GetParam(p) < b->GetParam(p)) return true;
+    if (a->GetParam(p) > b->GetParam(p)) return false;
+  }
+  return false;
+}
+
+uint32_t WasmModuleBuilder::AddSignature(FunctionSig* sig) {
+  SignatureMap::iterator pos = signature_map_.find(sig);
+  if (pos != signature_map_.end()) {
+    return pos->second;
+  } else {
+    uint32_t index = static_cast<uint32_t>(signatures_.size());
+    signature_map_[sig] = index;
+    signatures_.push_back(sig);
+    return index;
+  }
+}
+
+void WasmModuleBuilder::AddIndirectFunction(uint32_t index) {
+  indirect_functions_.push_back(index);
+}
+
+uint32_t WasmModuleBuilder::AddImport(const char* name, int name_length,
+                                      FunctionSig* sig) {
+  imports_.push_back({AddSignature(sig), name, name_length});
+  return static_cast<uint32_t>(imports_.size() - 1);
+}
+
+void WasmModuleBuilder::MarkStartFunction(WasmFunctionBuilder* function) {
+  start_function_index_ = function->func_index();
+}
+
+uint32_t WasmModuleBuilder::AddGlobal(LocalType type, bool exported,
+                                      bool mutability) {
+  globals_.push_back({type, exported, mutability});
+  return static_cast<uint32_t>(globals_.size() - 1);
+}
+
+void WasmModuleBuilder::WriteTo(ZoneBuffer& buffer) const {
+  uint32_t exports = 0;
+
+  // == Emit magic =============================================================
+  TRACE("emit magic\n");
+  buffer.write_u32(kWasmMagic);
+  buffer.write_u32(kWasmVersion);
+
+  // == Emit signatures ========================================================
+  if (signatures_.size() > 0) {
+    size_t start = EmitSection(kTypeSectionCode, buffer);
+    buffer.write_size(signatures_.size());
+
+    for (FunctionSig* sig : signatures_) {
+      buffer.write_u8(kWasmFunctionTypeForm);
+      buffer.write_size(sig->parameter_count());
+      for (size_t j = 0; j < sig->parameter_count(); j++) {
+        buffer.write_u8(WasmOpcodes::LocalTypeCodeFor(sig->GetParam(j)));
+      }
+      buffer.write_size(sig->return_count());
+      for (size_t j = 0; j < sig->return_count(); j++) {
+        buffer.write_u8(WasmOpcodes::LocalTypeCodeFor(sig->GetReturn(j)));
+      }
+    }
+    FixupSection(buffer, start);
+  }
+
+  // == Emit imports ===========================================================
+  if (imports_.size() > 0) {
+    size_t start = EmitSection(kImportSectionCode, buffer);
+    buffer.write_size(imports_.size());
+    for (auto import : imports_) {
+      buffer.write_u32v(import.name_length);  // module name length
+      buffer.write(reinterpret_cast<const byte*>(import.name),  // module name
+                   import.name_length);
+      buffer.write_u32v(0);  // field name length
+      buffer.write_u8(kExternalFunction);
+      buffer.write_u32v(import.sig_index);
+    }
+    FixupSection(buffer, start);
+  }
+
+  // == Emit function signatures ===============================================
+  bool has_names = false;
+  if (functions_.size() > 0) {
+    size_t start = EmitSection(kFunctionSectionCode, buffer);
+    buffer.write_size(functions_.size());
+    for (auto function : functions_) {
+      function->WriteSignature(buffer);
+      if (function->exported()) exports++;
+      if (function->name_.size() > 0) has_names = true;
+    }
+    FixupSection(buffer, start);
+  }
+
+  // == emit function table ====================================================
+  if (indirect_functions_.size() > 0) {
+    size_t start = EmitSection(kTableSectionCode, buffer);
+    buffer.write_u8(1);  // table count
+    buffer.write_u8(kWasmAnyFunctionTypeForm);
+    buffer.write_u8(kResizableMaximumFlag);
+    buffer.write_size(indirect_functions_.size());
+    buffer.write_size(indirect_functions_.size());
+    FixupSection(buffer, start);
+  }
+
+  // == emit memory declaration ================================================
+  {
+    size_t start = EmitSection(kMemorySectionCode, buffer);
+    buffer.write_u8(1);  // memory count
+    buffer.write_u32v(kResizableMaximumFlag);
+    buffer.write_u32v(16);  // min memory size
+    buffer.write_u32v(16);  // max memory size
+    FixupSection(buffer, start);
+  }
+
+  // == Emit globals ===========================================================
+  if (globals_.size() > 0) {
+    size_t start = EmitSection(kGlobalSectionCode, buffer);
+    buffer.write_size(globals_.size());
+
+    for (auto global : globals_) {
+      buffer.write_u8(WasmOpcodes::LocalTypeCodeFor(global.type));
+      buffer.write_u8(global.mutability ? 1 : 0);
+      switch (global.type) {
+        case kAstI32: {
+          static const byte code[] = {WASM_I32V_1(0)};
+          buffer.write(code, sizeof(code));
+          break;
+        }
+        case kAstF32: {
+          static const byte code[] = {WASM_F32(0)};
+          buffer.write(code, sizeof(code));
+          break;
+        }
+        case kAstI64: {
+          static const byte code[] = {WASM_I64V_1(0)};
+          buffer.write(code, sizeof(code));
+          break;
+        }
+        case kAstF64: {
+          static const byte code[] = {WASM_F64(0.0)};
+          buffer.write(code, sizeof(code));
+          break;
+        }
+        default:
+          UNREACHABLE();
+      }
+      buffer.write_u8(kExprEnd);
+    }
+    FixupSection(buffer, start);
+  }
+
+  // == emit exports ===========================================================
+  if (exports > 0) {
+    size_t start = EmitSection(kExportSectionCode, buffer);
+    buffer.write_u32v(exports);
+    for (auto function : functions_) function->WriteExport(buffer);
+    FixupSection(buffer, start);
+  }
+
+  // == emit start function index ==============================================
+  if (start_function_index_ >= 0) {
+    size_t start = EmitSection(kStartSectionCode, buffer);
+    buffer.write_u32v(start_function_index_ +
+                      static_cast<uint32_t>(imports_.size()));
+    FixupSection(buffer, start);
+  }
+
+  // == emit function table elements ===========================================
+  if (indirect_functions_.size() > 0) {
+    size_t start = EmitSection(kElementSectionCode, buffer);
+    buffer.write_u8(1);              // count of entries
+    buffer.write_u8(0);              // table index
+    buffer.write_u8(kExprI32Const);  // offset
+    buffer.write_u32v(0);
+    buffer.write_u8(kExprEnd);
+    buffer.write_size(indirect_functions_.size());  // element count
+
+    for (auto index : indirect_functions_) {
+      buffer.write_u32v(index + static_cast<uint32_t>(imports_.size()));
+    }
+
+    FixupSection(buffer, start);
+  }
+
+  // == emit code ==============================================================
+  if (functions_.size() > 0) {
+    size_t start = EmitSection(kCodeSectionCode, buffer);
+    buffer.write_size(functions_.size());
+    for (auto function : functions_) {
+      function->WriteBody(buffer);
+    }
+    FixupSection(buffer, start);
+  }
+
+  // == emit data segments =====================================================
+  if (data_segments_.size() > 0) {
+    size_t start = EmitSection(kDataSectionCode, buffer);
+    buffer.write_size(data_segments_.size());
+
+    for (auto segment : data_segments_) {
+      buffer.write_u8(0);              // linear memory segment
+      buffer.write_u8(kExprI32Const);  // initializer expression for dest
+      buffer.write_u32v(segment.dest);
+      buffer.write_u8(kExprEnd);
+      buffer.write_u32v(static_cast<uint32_t>(segment.data.size()));
+      buffer.write(&segment.data[0], segment.data.size());
+    }
+    FixupSection(buffer, start);
+  }
+
+  // == Emit names =============================================================
+  if (has_names) {
+    // Emit the section code.
+    buffer.write_u8(kUnknownSectionCode);
+    // Emit a placeholder for the length.
+    size_t start = buffer.reserve_u32v();
+    // Emit the section string.
+    buffer.write_size(4);
+    buffer.write(reinterpret_cast<const byte*>("name"), 4);
+    // Emit the names.
+    buffer.write_size(functions_.size());
+    for (auto function : functions_) {
+      buffer.write_size(function->name_.size());
+      if (function->name_.size() > 0) {
+        buffer.write(reinterpret_cast<const byte*>(&function->name_[0]),
+                     function->name_.size());
+      }
+      buffer.write_u8(0);
+    }
+    FixupSection(buffer, start);
+  }
+}
+}  // namespace wasm
+}  // namespace internal
+}  // namespace v8
diff --git a/src/wasm/wasm-module-builder.h b/src/wasm/wasm-module-builder.h
new file mode 100644
index 0000000..dcaf6c8
--- /dev/null
+++ b/src/wasm/wasm-module-builder.h
@@ -0,0 +1,271 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_WASM_WASM_MODULE_BUILDER_H_
+#define V8_WASM_WASM_MODULE_BUILDER_H_
+
+#include "src/signature.h"
+#include "src/zone/zone-containers.h"
+
+#include "src/wasm/leb-helper.h"
+#include "src/wasm/wasm-macro-gen.h"
+#include "src/wasm/wasm-module.h"
+#include "src/wasm/wasm-opcodes.h"
+#include "src/wasm/wasm-result.h"
+
+namespace v8 {
+namespace internal {
+namespace wasm {
+
+class ZoneBuffer : public ZoneObject {
+ public:
+  static const uint32_t kInitialSize = 4096;
+  explicit ZoneBuffer(Zone* zone, size_t initial = kInitialSize)
+      : zone_(zone), buffer_(reinterpret_cast<byte*>(zone->New(initial))) {
+    pos_ = buffer_;
+    end_ = buffer_ + initial;
+  }
+
+  void write_u8(uint8_t x) {
+    EnsureSpace(1);
+    *(pos_++) = x;
+  }
+
+  void write_u16(uint16_t x) {
+    EnsureSpace(2);
+    WriteLittleEndianValue<uint16_t>(pos_, x);
+    pos_ += 2;
+  }
+
+  void write_u32(uint32_t x) {
+    EnsureSpace(4);
+    WriteLittleEndianValue<uint32_t>(pos_, x);
+    pos_ += 4;
+  }
+
+  void write_u32v(uint32_t val) {
+    EnsureSpace(kMaxVarInt32Size);
+    LEBHelper::write_u32v(&pos_, val);
+  }
+
+  void write_size(size_t val) {
+    EnsureSpace(kMaxVarInt32Size);
+    DCHECK_EQ(val, static_cast<uint32_t>(val));
+    LEBHelper::write_u32v(&pos_, static_cast<uint32_t>(val));
+  }
+
+  void write(const byte* data, size_t size) {
+    EnsureSpace(size);
+    memcpy(pos_, data, size);
+    pos_ += size;
+  }
+
+  size_t reserve_u32v() {
+    size_t off = offset();
+    EnsureSpace(kMaxVarInt32Size);
+    pos_ += kMaxVarInt32Size;
+    return off;
+  }
+
+  // Patch a (padded) u32v at the given offset to be the given value.
+  void patch_u32v(size_t offset, uint32_t val) {
+    byte* ptr = buffer_ + offset;
+    for (size_t pos = 0; pos != kPaddedVarInt32Size; ++pos) {
+      uint32_t next = val >> 7;
+      byte out = static_cast<byte>(val & 0x7f);
+      if (pos != kPaddedVarInt32Size - 1) {
+        *(ptr++) = 0x80 | out;
+        val = next;
+      } else {
+        *(ptr++) = out;
+      }
+    }
+  }
+
+  size_t offset() { return static_cast<size_t>(pos_ - buffer_); }
+  size_t size() { return static_cast<size_t>(pos_ - buffer_); }
+  const byte* begin() { return buffer_; }
+  const byte* end() { return pos_; }
+
+  void EnsureSpace(size_t size) {
+    if ((pos_ + size) > end_) {
+      size_t new_size = 4096 + size + (end_ - buffer_) * 3;
+      byte* new_buffer = reinterpret_cast<byte*>(zone_->New(new_size));
+      memcpy(new_buffer, buffer_, (pos_ - buffer_));
+      pos_ = new_buffer + (pos_ - buffer_);
+      buffer_ = new_buffer;
+      end_ = new_buffer + new_size;
+    }
+    DCHECK(pos_ + size <= end_);
+  }
+
+  byte** pos_ptr() { return &pos_; }
+
+ private:
+  Zone* zone_;
+  byte* buffer_;
+  byte* pos_;
+  byte* end_;
+};
+
+class WasmModuleBuilder;
+
+class V8_EXPORT_PRIVATE WasmFunctionBuilder : public ZoneObject {
+ public:
+  // Building methods.
+  void SetSignature(FunctionSig* sig);
+  uint32_t AddLocal(LocalType type);
+  void EmitVarInt(uint32_t val);
+  void EmitCode(const byte* code, uint32_t code_size);
+  void Emit(WasmOpcode opcode);
+  void EmitGetLocal(uint32_t index);
+  void EmitSetLocal(uint32_t index);
+  void EmitTeeLocal(uint32_t index);
+  void EmitI32Const(int32_t val);
+  void EmitWithU8(WasmOpcode opcode, const byte immediate);
+  void EmitWithU8U8(WasmOpcode opcode, const byte imm1, const byte imm2);
+  void EmitWithVarInt(WasmOpcode opcode, uint32_t immediate);
+  void EmitDirectCallIndex(uint32_t index);
+  void SetExported();
+  void SetName(const char* name, int name_length);
+
+  void WriteSignature(ZoneBuffer& buffer) const;
+  void WriteExport(ZoneBuffer& buffer) const;
+  void WriteBody(ZoneBuffer& buffer) const;
+
+  bool exported() { return exported_; }
+  uint32_t func_index() { return func_index_; }
+  FunctionSig* signature();
+
+ private:
+  explicit WasmFunctionBuilder(WasmModuleBuilder* builder);
+  friend class WasmModuleBuilder;
+  friend class WasmTemporary;
+
+  struct DirectCallIndex {
+    size_t offset;
+    uint32_t direct_index;
+  };
+
+  WasmModuleBuilder* builder_;
+  LocalDeclEncoder locals_;
+  uint32_t signature_index_;
+  bool exported_;
+  uint32_t func_index_;
+  ZoneVector<uint8_t> body_;
+  ZoneVector<char> name_;
+  ZoneVector<uint32_t> i32_temps_;
+  ZoneVector<uint32_t> i64_temps_;
+  ZoneVector<uint32_t> f32_temps_;
+  ZoneVector<uint32_t> f64_temps_;
+  ZoneVector<DirectCallIndex> direct_calls_;
+};
+
+class WasmTemporary {
+ public:
+  WasmTemporary(WasmFunctionBuilder* builder, LocalType type) {
+    switch (type) {
+      case kAstI32:
+        temporary_ = &builder->i32_temps_;
+        break;
+      case kAstI64:
+        temporary_ = &builder->i64_temps_;
+        break;
+      case kAstF32:
+        temporary_ = &builder->f32_temps_;
+        break;
+      case kAstF64:
+        temporary_ = &builder->f64_temps_;
+        break;
+      default:
+        UNREACHABLE();
+        temporary_ = nullptr;
+    }
+    if (temporary_->size() == 0) {
+      // Allocate a new temporary.
+      index_ = builder->AddLocal(type);
+    } else {
+      // Reuse a previous temporary.
+      index_ = temporary_->back();
+      temporary_->pop_back();
+    }
+  }
+  ~WasmTemporary() {
+    temporary_->push_back(index_);  // return the temporary to the list.
+  }
+  uint32_t index() { return index_; }
+
+ private:
+  ZoneVector<uint32_t>* temporary_;
+  uint32_t index_;
+};
+
+class V8_EXPORT_PRIVATE WasmModuleBuilder : public ZoneObject {
+ public:
+  explicit WasmModuleBuilder(Zone* zone);
+
+  // Building methods.
+  uint32_t AddImport(const char* name, int name_length, FunctionSig* sig);
+  void SetImportName(uint32_t index, const char* name, int name_length) {
+    imports_[index].name = name;
+    imports_[index].name_length = name_length;
+  }
+  WasmFunctionBuilder* AddFunction(FunctionSig* sig = nullptr);
+  uint32_t AddGlobal(LocalType type, bool exported, bool mutability = true);
+  void AddDataSegment(const byte* data, uint32_t size, uint32_t dest);
+  uint32_t AddSignature(FunctionSig* sig);
+  void AddIndirectFunction(uint32_t index);
+  void MarkStartFunction(WasmFunctionBuilder* builder);
+
+  // Writing methods.
+  void WriteTo(ZoneBuffer& buffer) const;
+
+  struct CompareFunctionSigs {
+    bool operator()(FunctionSig* a, FunctionSig* b) const;
+  };
+  typedef ZoneMap<FunctionSig*, uint32_t, CompareFunctionSigs> SignatureMap;
+
+  Zone* zone() { return zone_; }
+
+  FunctionSig* GetSignature(uint32_t index) { return signatures_[index]; }
+
+ private:
+  struct WasmFunctionImport {
+    uint32_t sig_index;
+    const char* name;
+    int name_length;
+  };
+
+  struct WasmGlobal {
+    LocalType type;
+    bool exported;
+    bool mutability;
+  };
+
+  struct WasmDataSegment {
+    ZoneVector<byte> data;
+    uint32_t dest;
+  };
+
+  friend class WasmFunctionBuilder;
+  Zone* zone_;
+  ZoneVector<FunctionSig*> signatures_;
+  ZoneVector<WasmFunctionImport> imports_;
+  ZoneVector<WasmFunctionBuilder*> functions_;
+  ZoneVector<WasmDataSegment> data_segments_;
+  ZoneVector<uint32_t> indirect_functions_;
+  ZoneVector<WasmGlobal> globals_;
+  SignatureMap signature_map_;
+  int start_function_index_;
+};
+
+inline FunctionSig* WasmFunctionBuilder::signature() {
+  return builder_->signatures_[signature_index_];
+}
+
+}  // namespace wasm
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_WASM_WASM_MODULE_BUILDER_H_
diff --git a/src/wasm/wasm-module.cc b/src/wasm/wasm-module.cc
index 94bf998..f4cf505 100644
--- a/src/wasm/wasm-module.cc
+++ b/src/wasm/wasm-module.cc
@@ -18,6 +18,7 @@
 #include "src/wasm/module-decoder.h"
 #include "src/wasm/wasm-debug.h"
 #include "src/wasm/wasm-function-name-table.h"
+#include "src/wasm/wasm-js.h"
 #include "src/wasm/wasm-module.h"
 #include "src/wasm/wasm-result.h"
 
@@ -27,179 +28,56 @@
 namespace internal {
 namespace wasm {
 
+#define TRACE(...)                                      \
+  do {                                                  \
+    if (FLAG_trace_wasm_instances) PrintF(__VA_ARGS__); \
+  } while (false)
+
+#define TRACE_CHAIN(instance)        \
+  do {                               \
+    instance->PrintInstancesChain(); \
+  } while (false)
+
+namespace {
+
+static const int kPlaceholderMarker = 1000000000;
+
 enum JSFunctionExportInternalField {
   kInternalModuleInstance,
   kInternalArity,
   kInternalSignature
 };
 
-static const int kPlaceholderMarker = 1000000000;
-
-static const char* wasmSections[] = {
-#define F(enumerator, order, string) string,
-    FOR_EACH_WASM_SECTION_TYPE(F)
-#undef F
-        "<unknown>"  // entry for "Max"
-};
-
-static uint8_t wasmSectionsLengths[]{
-#define F(enumerator, order, string) sizeof(string) - 1,
-    FOR_EACH_WASM_SECTION_TYPE(F)
-#undef F
-        9  // entry for "Max"
-};
-
-static uint8_t wasmSectionsOrders[]{
-#define F(enumerator, order, string) order,
-    FOR_EACH_WASM_SECTION_TYPE(F)
-#undef F
-        0  // entry for "Max"
-};
-
-static_assert(sizeof(wasmSections) / sizeof(wasmSections[0]) ==
-                  (size_t)WasmSection::Code::Max + 1,
-              "expected enum WasmSection::Code to be monotonic from 0");
-
-WasmSection::Code WasmSection::begin() { return (WasmSection::Code)0; }
-WasmSection::Code WasmSection::end() { return WasmSection::Code::Max; }
-WasmSection::Code WasmSection::next(WasmSection::Code code) {
-  return (WasmSection::Code)(1 + (uint32_t)code);
-}
-
-const char* WasmSection::getName(WasmSection::Code code) {
-  return wasmSections[(size_t)code];
-}
-
-size_t WasmSection::getNameLength(WasmSection::Code code) {
-  return wasmSectionsLengths[(size_t)code];
-}
-
-int WasmSection::getOrder(WasmSection::Code code) {
-  return wasmSectionsOrders[(size_t)code];
-}
-
-WasmSection::Code WasmSection::lookup(const byte* string, uint32_t length) {
-  // TODO(jfb) Linear search, it may be better to do a common-prefix search.
-  for (Code i = begin(); i != end(); i = next(i)) {
-    if (getNameLength(i) == length && 0 == memcmp(getName(i), string, length)) {
-      return i;
-    }
-  }
-  return Code::Max;
-}
-
-std::ostream& operator<<(std::ostream& os, const WasmModule& module) {
-  os << "WASM module with ";
-  os << (module.min_mem_pages * module.kPageSize) << " min mem";
-  os << (module.max_mem_pages * module.kPageSize) << " max mem";
-  os << module.functions.size() << " functions";
-  os << module.functions.size() << " globals";
-  os << module.functions.size() << " data segments";
-  return os;
-}
-
-std::ostream& operator<<(std::ostream& os, const WasmFunction& function) {
-  os << "WASM function with signature " << *function.sig;
-
-  os << " code bytes: "
-     << (function.code_end_offset - function.code_start_offset);
-  return os;
-}
-
-std::ostream& operator<<(std::ostream& os, const WasmFunctionName& pair) {
-  os << "#" << pair.function_->func_index << ":";
-  if (pair.function_->name_offset > 0) {
-    if (pair.module_) {
-      WasmName name = pair.module_->GetName(pair.function_->name_offset,
-                                            pair.function_->name_length);
-      os.write(name.start(), name.length());
-    } else {
-      os << "+" << pair.function_->func_index;
-    }
-  } else {
-    os << "?";
-  }
-  return os;
-}
-
-Handle<JSFunction> WrapExportCodeAsJSFunction(
-    Isolate* isolate, Handle<Code> export_code, Handle<String> name, int arity,
-    MaybeHandle<ByteArray> maybe_signature, Handle<JSObject> module_instance) {
-  Handle<SharedFunctionInfo> shared =
-      isolate->factory()->NewSharedFunctionInfo(name, export_code, false);
-  shared->set_length(arity);
-  shared->set_internal_formal_parameter_count(arity);
-  Handle<JSFunction> function = isolate->factory()->NewFunction(
-      isolate->wasm_function_map(), name, export_code);
-  function->set_shared(*shared);
-
-  function->SetInternalField(kInternalModuleInstance, *module_instance);
-  // add another Internal Field as the function arity
-  function->SetInternalField(kInternalArity, Smi::FromInt(arity));
-  // add another Internal Field as the signature of the foreign function
-  Handle<ByteArray> signature;
-  if (maybe_signature.ToHandle(&signature)) {
-    function->SetInternalField(kInternalSignature, *signature);
-  }
-  return function;
-}
-
-namespace {
 // Internal constants for the layout of the module object.
-const int kWasmModuleFunctionTable = 0;
-const int kWasmModuleCodeTable = 1;
-const int kWasmMemArrayBuffer = 2;
-const int kWasmGlobalsArrayBuffer = 3;
-// TODO(clemensh): Remove function name array, extract names from module bytes.
-const int kWasmFunctionNamesArray = 4;
-const int kWasmModuleBytesString = 5;
-const int kWasmDebugInfo = 6;
-const int kWasmModuleInternalFieldCount = 7;
-
-// TODO(mtrofin): Unnecessary once we stop using JS Heap for wasm code.
-// For now, each field is expected to have the type commented by its side.
-// The elements typed as "maybe" are optional. The others are mandatory. Since
-// the compiled module is either obtained from the current v8 instance, or from
-// a snapshot produced by a compatible (==identical) v8 instance, we simply
-// fail at instantiation time, in the face of invalid data.
-enum CompiledWasmObjectFields {
-  kFunctions,        // FixedArray of Code
-  kImportData,       // maybe FixedArray of FixedArray respecting the
-                     // WasmImportMetadata structure.
-  kExports,          // maybe FixedArray of FixedArray of WasmExportMetadata
-                     // structure
-  kStartupFunction,  // maybe FixedArray of WasmExportMetadata structure
-  kTableOfIndirectFunctionTables,  // maybe FixedArray of FixedArray of
-                                   // WasmIndirectFunctionTableMetadata
-  kModuleBytes,                    // maybe String
-  kFunctionNameTable,              // maybe ByteArray
-  kMinRequiredMemory,              // Smi. an uint32_t
-  // The following 2 are either together present or absent:
-  kDataSegmentsInfo,  // maybe FixedArray of FixedArray respecting the
-                      // WasmSegmentInfo structure
-  kDataSegments,      // maybe ByteArray.
-
-  kGlobalsSize,                 // Smi. an uint32_t
-  kExportMem,                   // Smi. bool
-  kOrigin,                      // Smi. ModuleOrigin
-  kCompiledWasmObjectTableSize  // Sentinel value.
+enum WasmInstanceObjectFields {
+  kWasmCompiledModule = 0,
+  kWasmModuleFunctionTable,
+  kWasmModuleCodeTable,
+  kWasmMemArrayBuffer,
+  kWasmGlobalsArrayBuffer,
+  // TODO(clemensh): Remove function name array, extract names from module
+  // bytes.
+  kWasmFunctionNamesArray,
+  kWasmModuleBytesString,
+  kWasmDebugInfo,
+  kWasmNumImportedFunctions,
+  kWasmModuleInternalFieldCount
 };
 
-enum WasmImportMetadata {
-  kModuleName,              // String
-  kFunctionName,            // maybe String
-  kOutputCount,             // Smi. an uint32_t
-  kSignature,               // ByteArray. A copy of the data in FunctionSig
-  kWasmImportDataTableSize  // Sentinel value.
+enum WasmImportData {
+  kModuleName,         // String
+  kFunctionName,       // maybe String
+  kOutputCount,        // Smi. an uint32_t
+  kSignature,          // ByteArray. A copy of the data in FunctionSig
+  kWasmImportDataSize  // Sentinel value.
 };
 
-enum WasmExportMetadata {
-  kExportCode,                  // Code
-  kExportName,                  // String
-  kExportArity,                 // Smi, an int
-  kExportedFunctionIndex,       // Smi, an uint32_t
-  kExportedSignature,           // ByteArray. A copy of the data in FunctionSig
-  kWasmExportMetadataTableSize  // Sentinel value.
+enum WasmExportData {
+  kExportName,             // String
+  kExportArity,            // Smi, an int
+  kExportedFunctionIndex,  // Smi, an uint32_t
+  kExportedSignature,      // ByteArray. A copy of the data in FunctionSig
+  kWasmExportDataSize      // Sentinel value.
 };
 
 enum WasmSegmentInfo {
@@ -208,31 +86,26 @@
   kWasmSegmentInfoSize  // Sentinel value.
 };
 
-enum WasmIndirectFunctionTableMetadata {
-  kSize,   // Smi. an uint32_t
-  kTable,  // FixedArray of indirect function table
-  kWasmIndirectFunctionTableMetadataSize  // Sentinel value.
+enum WasmIndirectFunctionTableData {
+  kSize,                              // Smi. an uint32_t
+  kTable,                             // FixedArray of indirect function table
+  kWasmIndirectFunctionTableDataSize  // Sentinel value.
 };
 
 uint32_t GetMinModuleMemSize(const WasmModule* module) {
   return WasmModule::kPageSize * module->min_mem_pages;
 }
 
-void LoadDataSegments(Handle<FixedArray> compiled_module, Address mem_addr,
-                      size_t mem_size) {
-  Isolate* isolate = compiled_module->GetIsolate();
-  MaybeHandle<ByteArray> maybe_data =
-      compiled_module->GetValue<ByteArray>(isolate, kDataSegments);
-  MaybeHandle<FixedArray> maybe_segments =
-      compiled_module->GetValue<FixedArray>(isolate, kDataSegmentsInfo);
+void LoadDataSegments(Handle<WasmCompiledModule> compiled_module,
+                      Address mem_addr, size_t mem_size) {
+  CHECK(compiled_module->has_data_segments() ==
+        compiled_module->has_data_segments_info());
 
-  // We either have both or neither.
-  CHECK(maybe_data.is_null() == maybe_segments.is_null());
   // If we have neither, we're done.
-  if (maybe_data.is_null()) return;
+  if (!compiled_module->has_data_segments()) return;
 
-  Handle<ByteArray> data = maybe_data.ToHandleChecked();
-  Handle<FixedArray> segments = maybe_segments.ToHandleChecked();
+  Handle<ByteArray> data = compiled_module->data_segments();
+  Handle<FixedArray> segments = compiled_module->data_segments_info();
 
   uint32_t last_extraction_pos = 0;
   for (int i = 0; i < segments->length(); ++i) {
@@ -250,12 +123,11 @@
 }
 
 void SaveDataSegmentInfo(Factory* factory, const WasmModule* module,
-                         Handle<FixedArray> compiled_module) {
+                         Handle<WasmCompiledModule> compiled_module) {
   Handle<FixedArray> segments = factory->NewFixedArray(
       static_cast<int>(module->data_segments.size()), TENURED);
   uint32_t data_size = 0;
   for (const WasmDataSegment& segment : module->data_segments) {
-    if (!segment.init) continue;
     if (segment.source_size == 0) continue;
     data_size += segment.source_size;
   }
@@ -264,11 +136,12 @@
   uint32_t last_insertion_pos = 0;
   for (uint32_t i = 0; i < module->data_segments.size(); ++i) {
     const WasmDataSegment& segment = module->data_segments[i];
-    if (!segment.init) continue;
     if (segment.source_size == 0) continue;
     Handle<ByteArray> js_segment =
         factory->NewByteArray(kWasmSegmentInfoSize * sizeof(uint32_t), TENURED);
-    js_segment->set_int(kDestAddr, segment.dest_addr);
+    // TODO(titzer): add support for global offsets for dest_addr
+    CHECK_EQ(WasmInitExpr::kI32Const, segment.dest_addr.kind);
+    js_segment->set_int(kDestAddr, segment.dest_addr.val.i32_const);
     js_segment->set_int(kSourceSize, segment.source_size);
     segments->set(i, *js_segment);
     data->copy_in(last_insertion_pos,
@@ -276,8 +149,8 @@
                   segment.source_size);
     last_insertion_pos += segment.source_size;
   }
-  compiled_module->set(kDataSegmentsInfo, *segments);
-  compiled_module->set(kDataSegments, *data);
+  compiled_module->set_data_segments_info(segments);
+  compiled_module->set_data_segments(data);
 }
 
 void PatchFunctionTable(Handle<Code> code,
@@ -315,8 +188,9 @@
   return buffer;
 }
 
-void RelocateInstanceCode(Handle<JSObject> instance, Address start,
-                          uint32_t prev_size, uint32_t new_size) {
+void RelocateInstanceCode(Handle<JSObject> instance, Address old_start,
+                          Address start, uint32_t prev_size,
+                          uint32_t new_size) {
   Handle<FixedArray> functions = Handle<FixedArray>(
       FixedArray::cast(instance->GetInternalField(kWasmModuleCodeTable)));
   for (int i = 0; i < functions->length(); ++i) {
@@ -325,7 +199,7 @@
     int mask = (1 << RelocInfo::WASM_MEMORY_REFERENCE) |
                (1 << RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
     for (RelocIterator it(*function, mask); !it.done(); it.next()) {
-      it.rinfo()->update_wasm_memory_reference(nullptr, start, prev_size,
+      it.rinfo()->update_wasm_memory_reference(old_start, start, prev_size,
                                                new_size);
     }
   }
@@ -347,7 +221,8 @@
   return mem_buffer;
 }
 
-void RelocateGlobals(Handle<JSObject> instance, Address globals_start) {
+void RelocateGlobals(Handle<JSObject> instance, Address old_start,
+                     Address globals_start) {
   Handle<FixedArray> functions = Handle<FixedArray>(
       FixedArray::cast(instance->GetInternalField(kWasmModuleCodeTable)));
   uint32_t function_count = static_cast<uint32_t>(functions->length());
@@ -356,7 +231,7 @@
     AllowDeferredHandleDereference embedding_raw_address;
     int mask = 1 << RelocInfo::WASM_GLOBAL_REFERENCE;
     for (RelocIterator it(*function, mask); !it.done(); it.next()) {
-      it.rinfo()->update_wasm_global_reference(nullptr, globals_start);
+      it.rinfo()->update_wasm_global_reference(old_start, globals_start);
     }
   }
 }
@@ -375,64 +250,41 @@
   return code;
 }
 
-// TODO(mtrofin): remove when we stop relying on placeholders.
-void InitializePlaceholders(Factory* factory,
-                            std::vector<Handle<Code>>* placeholders,
-                            size_t size) {
-  DCHECK(placeholders->empty());
-  placeholders->reserve(size);
-
-  for (uint32_t i = 0; i < size; ++i) {
-    placeholders->push_back(CreatePlaceholder(factory, i, Code::WASM_FUNCTION));
-  }
-}
-
 bool LinkFunction(Handle<Code> unlinked,
-                  const std::vector<Handle<Code>>& code_targets,
-                  Code::Kind kind) {
+                  std::vector<Handle<Code>>& code_table) {
   bool modified = false;
-  int mode_mask = RelocInfo::kCodeTargetMask;
+  int mode_mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
   AllowDeferredHandleDereference embedding_raw_address;
   for (RelocIterator it(*unlinked, mode_mask); !it.done(); it.next()) {
     RelocInfo::Mode mode = it.rinfo()->rmode();
     if (RelocInfo::IsCodeTarget(mode)) {
       Code* target =
           Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
-      if (target->kind() == kind &&
-          target->constant_pool_offset() >= kPlaceholderMarker) {
-        // Patch direct calls to placeholder code objects.
-        uint32_t index = target->constant_pool_offset() - kPlaceholderMarker;
-        CHECK(index < code_targets.size());
-        Handle<Code> new_target = code_targets[index];
-        if (target != *new_target) {
-          it.rinfo()->set_target_address(new_target->instruction_start(),
-                                         UPDATE_WRITE_BARRIER,
-                                         SKIP_ICACHE_FLUSH);
-          modified = true;
+      if (target->constant_pool_offset() < kPlaceholderMarker) continue;
+      switch (target->kind()) {
+        case Code::WASM_FUNCTION:        // fall through
+        case Code::WASM_TO_JS_FUNCTION:  // fall through
+        case Code::JS_TO_WASM_FUNCTION: {
+          // Patch direct calls to placeholder code objects.
+          uint32_t index = target->constant_pool_offset() - kPlaceholderMarker;
+          Handle<Code> new_target = code_table[index];
+          if (target != *new_target) {
+            it.rinfo()->set_target_address(new_target->instruction_start(),
+                                           UPDATE_WRITE_BARRIER,
+                                           SKIP_ICACHE_FLUSH);
+            modified = true;
+          }
+          break;
         }
+        default:
+          break;
       }
     }
   }
   return modified;
 }
 
-void LinkModuleFunctions(Isolate* isolate,
-                         std::vector<Handle<Code>>& functions) {
-  for (size_t i = 0; i < functions.size(); ++i) {
-    Handle<Code> code = functions[i];
-    LinkFunction(code, functions, Code::WASM_FUNCTION);
-  }
-}
-
-void LinkImports(Isolate* isolate, std::vector<Handle<Code>>& functions,
-                 const std::vector<Handle<Code>>& imports) {
-  for (uint32_t i = 0; i < functions.size(); ++i) {
-    Handle<Code> code = functions[i];
-    LinkFunction(code, imports, Code::WASM_TO_JS_FUNCTION);
-  }
-}
-
-void FlushAssemblyCache(Isolate* isolate, Handle<FixedArray> functions) {
+void FlushICache(Isolate* isolate, Handle<FixedArray> functions) {
   for (int i = 0; i < functions->length(); ++i) {
     Handle<Code> code = functions->GetValueChecked<Code>(isolate, i);
     Assembler::FlushICache(isolate, code->instruction_start(),
@@ -440,39 +292,146 @@
   }
 }
 
-}  // namespace
+// Fetches the compilation unit of a wasm function and executes its parallel
+// phase.
+bool FetchAndExecuteCompilationUnit(
+    Isolate* isolate,
+    std::vector<compiler::WasmCompilationUnit*>* compilation_units,
+    std::queue<compiler::WasmCompilationUnit*>* executed_units,
+    base::Mutex* result_mutex, base::AtomicNumber<size_t>* next_unit) {
+  DisallowHeapAllocation no_allocation;
+  DisallowHandleAllocation no_handles;
+  DisallowHandleDereference no_deref;
+  DisallowCodeDependencyChange no_dependency_change;
 
-WasmModule::WasmModule(byte* module_start)
-    : module_start(module_start),
-      module_end(nullptr),
-      min_mem_pages(0),
-      max_mem_pages(0),
-      mem_export(false),
-      mem_external(false),
-      start_function_index(-1),
-      origin(kWasmOrigin),
-      globals_size(0),
-      pending_tasks(new base::Semaphore(0)) {}
+  // - 1 because AtomicIncrement returns the value after the atomic increment.
+  size_t index = next_unit->Increment(1) - 1;
+  if (index >= compilation_units->size()) {
+    return false;
+  }
+
+  compiler::WasmCompilationUnit* unit = compilation_units->at(index);
+  if (unit != nullptr) {
+    unit->ExecuteCompilation();
+    base::LockGuard<base::Mutex> guard(result_mutex);
+    executed_units->push(unit);
+  }
+  return true;
+}
+
+class WasmCompilationTask : public CancelableTask {
+ public:
+  WasmCompilationTask(
+      Isolate* isolate,
+      std::vector<compiler::WasmCompilationUnit*>* compilation_units,
+      std::queue<compiler::WasmCompilationUnit*>* executed_units,
+      base::Semaphore* on_finished, base::Mutex* result_mutex,
+      base::AtomicNumber<size_t>* next_unit)
+      : CancelableTask(isolate),
+        isolate_(isolate),
+        compilation_units_(compilation_units),
+        executed_units_(executed_units),
+        on_finished_(on_finished),
+        result_mutex_(result_mutex),
+        next_unit_(next_unit) {}
+
+  void RunInternal() override {
+    while (FetchAndExecuteCompilationUnit(isolate_, compilation_units_,
+                                          executed_units_, result_mutex_,
+                                          next_unit_)) {
+    }
+    on_finished_->Signal();
+  }
+
+  Isolate* isolate_;
+  std::vector<compiler::WasmCompilationUnit*>* compilation_units_;
+  std::queue<compiler::WasmCompilationUnit*>* executed_units_;
+  base::Semaphore* on_finished_;
+  base::Mutex* result_mutex_;
+  base::AtomicNumber<size_t>* next_unit_;
+};
+
+static void RecordStats(Isolate* isolate, Code* code) {
+  isolate->counters()->wasm_generated_code_size()->Increment(code->body_size());
+  isolate->counters()->wasm_reloc_size()->Increment(
+      code->relocation_info()->length());
+}
+
+static void RecordStats(Isolate* isolate, Handle<FixedArray> functions) {
+  DisallowHeapAllocation no_gc;
+  for (int i = 0; i < functions->length(); ++i) {
+    RecordStats(isolate, Code::cast(functions->get(i)));
+  }
+}
+
+Address GetGlobalStartAddressFromCodeTemplate(Object* undefined,
+                                              JSObject* owner) {
+  Address old_address = nullptr;
+  Object* stored_value = owner->GetInternalField(kWasmGlobalsArrayBuffer);
+  if (stored_value != undefined) {
+    old_address = static_cast<Address>(
+        JSArrayBuffer::cast(stored_value)->backing_store());
+  }
+  return old_address;
+}
+
+Handle<FixedArray> GetImportsData(Factory* factory, const WasmModule* module) {
+  Handle<FixedArray> ret = factory->NewFixedArray(
+      static_cast<int>(module->import_table.size()), TENURED);
+  for (size_t i = 0; i < module->import_table.size(); ++i) {
+    const WasmImport& import = module->import_table[i];
+    if (import.kind != kExternalFunction) continue;
+    WasmName module_name = module->GetNameOrNull(import.module_name_offset,
+                                                 import.module_name_length);
+    WasmName function_name = module->GetNameOrNull(import.field_name_offset,
+                                                   import.field_name_length);
+
+    Handle<String> module_name_string =
+        factory->InternalizeUtf8String(module_name);
+    Handle<String> function_name_string =
+        function_name.is_empty()
+            ? Handle<String>::null()
+            : factory->InternalizeUtf8String(function_name);
+    FunctionSig* fsig = module->functions[import.index].sig;
+    Handle<ByteArray> sig = factory->NewByteArray(
+        static_cast<int>(fsig->parameter_count() + fsig->return_count()),
+        TENURED);
+    sig->copy_in(0, reinterpret_cast<const byte*>(fsig->raw_data()),
+                 sig->length());
+    Handle<FixedArray> encoded_import =
+        factory->NewFixedArray(kWasmImportDataSize, TENURED);
+    encoded_import->set(kModuleName, *module_name_string);
+    if (!function_name_string.is_null()) {
+      encoded_import->set(kFunctionName, *function_name_string);
+    }
+    encoded_import->set(kOutputCount,
+                        Smi::FromInt(static_cast<int>(fsig->return_count())));
+    encoded_import->set(kSignature, *sig);
+    ret->set(static_cast<int>(i), *encoded_import);
+  }
+  return ret;
+}
 
 static MaybeHandle<JSFunction> ReportFFIError(
-    ErrorThrower& thrower, const char* error, uint32_t index,
+    ErrorThrower* thrower, const char* error, uint32_t index,
     Handle<String> module_name, MaybeHandle<String> function_name) {
   Handle<String> function_name_handle;
   if (function_name.ToHandle(&function_name_handle)) {
-    thrower.Error("Import #%d module=\"%.*s\" function=\"%.*s\" error: %s",
-                  index, module_name->length(), module_name->ToCString().get(),
-                  function_name_handle->length(),
-                  function_name_handle->ToCString().get(), error);
+    thrower->Error("Import #%d module=\"%.*s\" function=\"%.*s\" error: %s",
+                   index, module_name->length(), module_name->ToCString().get(),
+                   function_name_handle->length(),
+                   function_name_handle->ToCString().get(), error);
   } else {
-    thrower.Error("Import #%d module=\"%.*s\" error: %s", index,
-                  module_name->length(), module_name->ToCString().get(), error);
+    thrower->Error("Import #%d module=\"%.*s\" error: %s", index,
+                   module_name->length(), module_name->ToCString().get(),
+                   error);
   }
-  thrower.Error("Import ");
+  thrower->Error("Import ");
   return MaybeHandle<JSFunction>();
 }
 
 static MaybeHandle<JSReceiver> LookupFunction(
-    ErrorThrower& thrower, Factory* factory, Handle<JSReceiver> ffi,
+    ErrorThrower* thrower, Factory* factory, Handle<JSReceiver> ffi,
     uint32_t index, Handle<String> module_name,
     MaybeHandle<String> function_name) {
   if (ffi.is_null()) {
@@ -517,213 +476,88 @@
   return Handle<JSReceiver>::cast(function);
 }
 
-namespace {
-// Fetches the compilation unit of a wasm function and executes its parallel
-// phase.
-bool FetchAndExecuteCompilationUnit(
-    Isolate* isolate,
-    std::vector<compiler::WasmCompilationUnit*>* compilation_units,
-    std::queue<compiler::WasmCompilationUnit*>* executed_units,
-    base::Mutex* result_mutex, base::AtomicNumber<size_t>* next_unit) {
-  DisallowHeapAllocation no_allocation;
-  DisallowHandleAllocation no_handles;
-  DisallowHandleDereference no_deref;
-  DisallowCodeDependencyChange no_dependency_change;
+Handle<Code> CompileImportWrapper(Isolate* isolate,
+                                  const Handle<JSReceiver> ffi, int index,
+                                  Handle<FixedArray> import_data,
+                                  ErrorThrower* thrower) {
+  Handle<FixedArray> data =
+      import_data->GetValueChecked<FixedArray>(isolate, index);
+  Handle<String> module_name =
+      data->GetValueChecked<String>(isolate, kModuleName);
+  MaybeHandle<String> function_name =
+      data->GetValue<String>(isolate, kFunctionName);
 
-  // - 1 because AtomicIntrement returns the value after the atomic increment.
-  size_t index = next_unit->Increment(1) - 1;
-  if (index >= compilation_units->size()) {
-    return false;
-  }
+  // TODO(mtrofin): this is an uint32_t, actually. We should rationalize
+  // it when we rationalize signed/unsigned stuff.
+  int ret_count = Smi::cast(data->get(kOutputCount))->value();
+  CHECK_GE(ret_count, 0);
+  Handle<ByteArray> sig_data =
+      data->GetValueChecked<ByteArray>(isolate, kSignature);
+  int sig_data_size = sig_data->length();
+  int param_count = sig_data_size - ret_count;
+  CHECK(param_count >= 0);
 
-  compiler::WasmCompilationUnit* unit = compilation_units->at(index);
-  if (unit != nullptr) {
-    unit->ExecuteCompilation();
-    {
-      base::LockGuard<base::Mutex> guard(result_mutex);
-      executed_units->push(unit);
-    }
-  }
-  return true;
-}
-
-class WasmCompilationTask : public CancelableTask {
- public:
-  WasmCompilationTask(
-      Isolate* isolate,
-      std::vector<compiler::WasmCompilationUnit*>* compilation_units,
-      std::queue<compiler::WasmCompilationUnit*>* executed_units,
-      base::Semaphore* on_finished, base::Mutex* result_mutex,
-      base::AtomicNumber<size_t>* next_unit)
-      : CancelableTask(isolate),
-        isolate_(isolate),
-        compilation_units_(compilation_units),
-        executed_units_(executed_units),
-        on_finished_(on_finished),
-        result_mutex_(result_mutex),
-        next_unit_(next_unit) {}
-
-  void RunInternal() override {
-    while (FetchAndExecuteCompilationUnit(isolate_, compilation_units_,
-                                          executed_units_, result_mutex_,
-                                          next_unit_)) {
-    }
-    on_finished_->Signal();
-  }
-
-  Isolate* isolate_;
-  std::vector<compiler::WasmCompilationUnit*>* compilation_units_;
-  std::queue<compiler::WasmCompilationUnit*>* executed_units_;
-  base::Semaphore* on_finished_;
-  base::Mutex* result_mutex_;
-  base::AtomicNumber<size_t>* next_unit_;
-};
-
-static void RecordStats(Isolate* isolate, Code* code) {
-  isolate->counters()->wasm_generated_code_size()->Increment(code->body_size());
-  isolate->counters()->wasm_reloc_size()->Increment(
-      code->relocation_info()->length());
-}
-
-static void RecordStats(Isolate* isolate,
-                        const std::vector<Handle<Code>>& functions) {
-  for (Handle<Code> c : functions) RecordStats(isolate, *c);
-}
-
-static void RecordStats(Isolate* isolate, Handle<FixedArray> functions) {
-  DisallowHeapAllocation no_gc;
-  for (int i = 0; i < functions->length(); ++i) {
-    RecordStats(isolate, Code::cast(functions->get(i)));
-  }
-}
-
-Handle<FixedArray> GetImportsMetadata(Factory* factory,
-                                      const WasmModule* module) {
-  Handle<FixedArray> ret = factory->NewFixedArray(
-      static_cast<int>(module->import_table.size()), TENURED);
-  for (size_t i = 0; i < module->import_table.size(); ++i) {
-    const WasmImport& import = module->import_table[i];
-    WasmName module_name = module->GetNameOrNull(import.module_name_offset,
-                                                 import.module_name_length);
-    WasmName function_name = module->GetNameOrNull(import.function_name_offset,
-                                                   import.function_name_length);
-
-    Handle<String> module_name_string =
-        factory->InternalizeUtf8String(module_name);
-    Handle<String> function_name_string =
-        function_name.is_empty()
-            ? Handle<String>::null()
-            : factory->InternalizeUtf8String(function_name);
-    Handle<ByteArray> sig =
-        factory->NewByteArray(static_cast<int>(import.sig->parameter_count() +
-                                               import.sig->return_count()),
-                              TENURED);
-    sig->copy_in(0, reinterpret_cast<const byte*>(import.sig->raw_data()),
-                 sig->length());
-    Handle<FixedArray> encoded_import =
-        factory->NewFixedArray(kWasmImportDataTableSize, TENURED);
-    encoded_import->set(kModuleName, *module_name_string);
-    if (!function_name_string.is_null()) {
-      encoded_import->set(kFunctionName, *function_name_string);
-    }
-    encoded_import->set(
-        kOutputCount,
-        Smi::FromInt(static_cast<int>(import.sig->return_count())));
-    encoded_import->set(kSignature, *sig);
-    ret->set(static_cast<int>(i), *encoded_import);
-  }
-  return ret;
-}
-
-bool CompileWrappersToImportedFunctions(Isolate* isolate,
-                                        const Handle<JSReceiver> ffi,
-                                        std::vector<Handle<Code>>& imports,
-                                        Handle<FixedArray> import_data,
-                                        ErrorThrower* thrower) {
-  uint32_t import_count = static_cast<uint32_t>(import_data->length());
-  if (import_count > 0) {
-    imports.reserve(import_count);
-    for (uint32_t index = 0; index < import_count; ++index) {
-      Handle<FixedArray> data =
-          import_data->GetValueChecked<FixedArray>(isolate, index);
-      Handle<String> module_name =
-          data->GetValueChecked<String>(isolate, kModuleName);
-      MaybeHandle<String> function_name =
-          data->GetValue<String>(isolate, kFunctionName);
-
-      // TODO(mtrofin): this is an uint32_t, actually. We should rationalize
-      // it when we rationalize signed/unsigned stuff.
-      int ret_count = Smi::cast(data->get(kOutputCount))->value();
-      CHECK(ret_count >= 0);
-      Handle<ByteArray> sig_data =
-          data->GetValueChecked<ByteArray>(isolate, kSignature);
-      int sig_data_size = sig_data->length();
-      int param_count = sig_data_size - ret_count;
-      CHECK(param_count >= 0);
-
-      MaybeHandle<JSReceiver> function = LookupFunction(
-          *thrower, isolate->factory(), ffi, index, module_name, function_name);
-      if (function.is_null()) return false;
-      Handle<Code> code;
-      Handle<JSReceiver> target = function.ToHandleChecked();
-      bool isMatch = false;
-      Handle<Code> export_wrapper_code;
-      if (target->IsJSFunction()) {
-        Handle<JSFunction> func = Handle<JSFunction>::cast(target);
-        export_wrapper_code = handle(func->code());
-        if (export_wrapper_code->kind() == Code::JS_TO_WASM_FUNCTION) {
-          int exported_param_count =
-              Smi::cast(func->GetInternalField(kInternalArity))->value();
-          Handle<ByteArray> exportedSig = Handle<ByteArray>(
-              ByteArray::cast(func->GetInternalField(kInternalSignature)));
-          if (exported_param_count == param_count &&
-              exportedSig->length() == sig_data->length() &&
-              memcmp(exportedSig->data(), sig_data->data(),
-                     exportedSig->length()) == 0) {
-            isMatch = true;
-          }
-        }
+  MaybeHandle<JSReceiver> function = LookupFunction(
+      thrower, isolate->factory(), ffi, index, module_name, function_name);
+  if (function.is_null()) return Handle<Code>::null();
+  Handle<Code> code;
+  Handle<JSReceiver> target = function.ToHandleChecked();
+  bool isMatch = false;
+  Handle<Code> export_wrapper_code;
+  if (target->IsJSFunction()) {
+    Handle<JSFunction> func = Handle<JSFunction>::cast(target);
+    export_wrapper_code = handle(func->code());
+    if (export_wrapper_code->kind() == Code::JS_TO_WASM_FUNCTION) {
+      int exported_param_count =
+          Smi::cast(func->GetInternalField(kInternalArity))->value();
+      Handle<ByteArray> exportedSig = Handle<ByteArray>(
+          ByteArray::cast(func->GetInternalField(kInternalSignature)));
+      if (exported_param_count == param_count &&
+          exportedSig->length() == sig_data->length() &&
+          memcmp(exportedSig->data(), sig_data->data(),
+                 exportedSig->length()) == 0) {
+        isMatch = true;
       }
-      if (isMatch) {
-        int wasm_count = 0;
-        int const mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
-        for (RelocIterator it(*export_wrapper_code, mask); !it.done();
-             it.next()) {
-          RelocInfo* rinfo = it.rinfo();
-          Address target_address = rinfo->target_address();
-          Code* target = Code::GetCodeFromTargetAddress(target_address);
-          if (target->kind() == Code::WASM_FUNCTION) {
-            ++wasm_count;
-            code = handle(target);
-          }
-        }
-        DCHECK(wasm_count == 1);
-      } else {
-        // Copy the signature to avoid a raw pointer into a heap object when
-        // GC can happen.
-        Zone zone(isolate->allocator());
-        MachineRepresentation* reps =
-            zone.NewArray<MachineRepresentation>(sig_data_size);
-        memcpy(reps, sig_data->data(),
-               sizeof(MachineRepresentation) * sig_data_size);
-        FunctionSig sig(ret_count, param_count, reps);
-
-        code = compiler::CompileWasmToJSWrapper(isolate, target, &sig, index,
-                                                module_name, function_name);
-      }
-      imports.push_back(code);
     }
   }
-  return true;
+  if (isMatch) {
+    int wasm_count = 0;
+    int const mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
+    for (RelocIterator it(*export_wrapper_code, mask); !it.done(); it.next()) {
+      RelocInfo* rinfo = it.rinfo();
+      Address target_address = rinfo->target_address();
+      Code* target = Code::GetCodeFromTargetAddress(target_address);
+      if (target->kind() == Code::WASM_FUNCTION) {
+        ++wasm_count;
+        code = handle(target);
+      }
+    }
+    DCHECK(wasm_count == 1);
+    return code;
+  } else {
+    // Copy the signature to avoid a raw pointer into a heap object when
+    // GC can happen.
+    Zone zone(isolate->allocator());
+    MachineRepresentation* reps =
+        zone.NewArray<MachineRepresentation>(sig_data_size);
+    memcpy(reps, sig_data->data(),
+           sizeof(MachineRepresentation) * sig_data_size);
+    FunctionSig sig(ret_count, param_count, reps);
+
+    return compiler::CompileWasmToJSWrapper(isolate, target, &sig, index,
+                                            module_name, function_name);
+  }
 }
 
 void InitializeParallelCompilation(
     Isolate* isolate, const std::vector<WasmFunction>& functions,
     std::vector<compiler::WasmCompilationUnit*>& compilation_units,
-    ModuleEnv& module_env, ErrorThrower& thrower) {
+    ModuleEnv& module_env, ErrorThrower* thrower) {
   for (uint32_t i = FLAG_skip_compiling_wasm_funcs; i < functions.size(); ++i) {
-    compilation_units[i] = new compiler::WasmCompilationUnit(
-        &thrower, isolate, &module_env, &functions[i], i);
+    const WasmFunction* func = &functions[i];
+    compilation_units[i] =
+        func->imported ? nullptr : new compiler::WasmCompilationUnit(
+                                       thrower, isolate, &module_env, func, i);
   }
 }
 
@@ -812,7 +646,7 @@
   // 1) The main thread allocates a compilation unit for each wasm function
   //    and stores them in the vector {compilation_units}.
   InitializeParallelCompilation(isolate, module->functions, compilation_units,
-                                *module_env, *thrower);
+                                *module_env, thrower);
 
   // Objects for the synchronization with the background threads.
   base::Mutex result_mutex;
@@ -853,8 +687,8 @@
   for (uint32_t i = FLAG_skip_compiling_wasm_funcs;
        i < module->functions.size(); ++i) {
     const WasmFunction& func = module->functions[i];
+    if (func.imported) continue;  // Imports are compiled at instantiation time.
 
-    DCHECK_EQ(i, func.func_index);
     WasmName str = module->GetName(func.name_offset, func.name_length);
     Handle<Code> code = Handle<Code>::null();
     // Compile the function.
@@ -870,361 +704,161 @@
   }
 }
 
-void SetDebugSupport(Factory* factory, Handle<FixedArray> compiled_module,
-                     Handle<JSObject> js_object) {
-  Isolate* isolate = compiled_module->GetIsolate();
-  MaybeHandle<String> module_bytes_string =
-      compiled_module->GetValue<String>(isolate, kModuleBytes);
-  if (!module_bytes_string.is_null()) {
-    js_object->SetInternalField(kWasmModuleBytesString,
-                                *module_bytes_string.ToHandleChecked());
+void PatchDirectCalls(Handle<FixedArray> old_functions,
+                      Handle<FixedArray> new_functions, int start) {
+  DCHECK_EQ(new_functions->length(), old_functions->length());
+
+  DisallowHeapAllocation no_gc;
+  std::map<Code*, Code*> old_to_new_code;
+  for (int i = 0; i < new_functions->length(); ++i) {
+    old_to_new_code.insert(std::make_pair(Code::cast(old_functions->get(i)),
+                                          Code::cast(new_functions->get(i))));
   }
-  Handle<FixedArray> functions = Handle<FixedArray>(
-      FixedArray::cast(js_object->GetInternalField(kWasmModuleCodeTable)));
-
-  for (int i = FLAG_skip_compiling_wasm_funcs; i < functions->length(); ++i) {
-    Handle<Code> code = functions->GetValueChecked<Code>(isolate, i);
-    DCHECK(code->deoptimization_data() == nullptr ||
-           code->deoptimization_data()->length() == 0);
-    Handle<FixedArray> deopt_data = factory->NewFixedArray(2, TENURED);
-    if (!js_object.is_null()) {
-      deopt_data->set(0, *js_object);
-    }
-    deopt_data->set(1, Smi::FromInt(static_cast<int>(i)));
-    deopt_data->set_length(2);
-    code->set_deoptimization_data(*deopt_data);
-  }
-
-  MaybeHandle<ByteArray> function_name_table =
-      compiled_module->GetValue<ByteArray>(isolate, kFunctionNameTable);
-  if (!function_name_table.is_null()) {
-    js_object->SetInternalField(kWasmFunctionNamesArray,
-                                *function_name_table.ToHandleChecked());
-  }
-}
-
-bool SetupGlobals(Isolate* isolate, Handle<FixedArray> compiled_module,
-                  Handle<JSObject> instance, ErrorThrower* thrower) {
-  uint32_t globals_size = static_cast<uint32_t>(
-      Smi::cast(compiled_module->get(kGlobalsSize))->value());
-  if (globals_size > 0) {
-    Handle<JSArrayBuffer> globals_buffer =
-        NewArrayBuffer(isolate, globals_size);
-    if (globals_buffer.is_null()) {
-      thrower->Error("Out of memory: wasm globals");
-      return false;
-    }
-    RelocateGlobals(instance,
-                    static_cast<Address>(globals_buffer->backing_store()));
-    instance->SetInternalField(kWasmGlobalsArrayBuffer, *globals_buffer);
-  }
-  return true;
-}
-
-bool SetupInstanceHeap(Isolate* isolate, Handle<FixedArray> compiled_module,
-                       Handle<JSObject> instance, Handle<JSArrayBuffer> memory,
-                       ErrorThrower* thrower) {
-  uint32_t min_mem_pages = static_cast<uint32_t>(
-      Smi::cast(compiled_module->get(kMinRequiredMemory))->value());
-  isolate->counters()->wasm_min_mem_pages_count()->AddSample(min_mem_pages);
-  // TODO(wasm): re-enable counter for max_mem_pages when we use that field.
-
-  if (memory.is_null() && min_mem_pages > 0) {
-    memory = AllocateMemory(thrower, isolate, min_mem_pages);
-    if (memory.is_null()) {
-      return false;
-    }
-  }
-
-  if (!memory.is_null()) {
-    instance->SetInternalField(kWasmMemArrayBuffer, *memory);
-    Address mem_start = static_cast<Address>(memory->backing_store());
-    uint32_t mem_size = static_cast<uint32_t>(memory->byte_length()->Number());
-    RelocateInstanceCode(instance, mem_start,
-                         WasmModule::kPageSize * min_mem_pages, mem_size);
-    LoadDataSegments(compiled_module, mem_start, mem_size);
-  }
-  return true;
-}
-
-bool SetupImports(Isolate* isolate, Handle<FixedArray> compiled_module,
-                  Handle<JSObject> instance, ErrorThrower* thrower,
-                  Handle<JSReceiver> ffi) {
-  //-------------------------------------------------------------------------
-  // Compile wrappers to imported functions.
-  //-------------------------------------------------------------------------
-  std::vector<Handle<Code>> import_code;
-  MaybeHandle<FixedArray> maybe_import_data =
-      compiled_module->GetValue<FixedArray>(isolate, kImportData);
-  Handle<FixedArray> import_data;
-  if (maybe_import_data.ToHandle(&import_data)) {
-    if (!CompileWrappersToImportedFunctions(isolate, ffi, import_code,
-                                            import_data, thrower)) {
-      return false;
-    }
-  }
-
-  RecordStats(isolate, import_code);
-
-  Handle<FixedArray> code_table = Handle<FixedArray>(
-      FixedArray::cast(instance->GetInternalField(kWasmModuleCodeTable)));
-  // TODO(mtrofin): get the code off std::vector and on FixedArray, for
-  // consistency.
-  std::vector<Handle<Code>> function_code(code_table->length());
-  for (int i = 0; i < code_table->length(); ++i) {
-    Handle<Code> code = Handle<Code>(Code::cast(code_table->get(i)));
-    function_code[i] = code;
-  }
-
-  LinkImports(isolate, function_code, import_code);
-  return true;
-}
-
-bool SetupExportsObject(Handle<FixedArray> compiled_module, Isolate* isolate,
-                        Handle<JSObject> instance, ErrorThrower* thrower) {
-  Factory* factory = isolate->factory();
-  bool mem_export =
-      static_cast<bool>(Smi::cast(compiled_module->get(kExportMem))->value());
-  ModuleOrigin origin = static_cast<ModuleOrigin>(
-      Smi::cast(compiled_module->get(kOrigin))->value());
-
-  MaybeHandle<FixedArray> maybe_exports =
-      compiled_module->GetValue<FixedArray>(isolate, kExports);
-  if (!maybe_exports.is_null() || mem_export) {
-    PropertyDescriptor desc;
-    desc.set_writable(false);
-
-    Handle<JSObject> exports_object = instance;
-    if (origin == kWasmOrigin) {
-      // Create the "exports" object.
-      Handle<JSFunction> object_function = Handle<JSFunction>(
-          isolate->native_context()->object_function(), isolate);
-      exports_object = factory->NewJSObject(object_function, TENURED);
-      Handle<String> exports_name = factory->InternalizeUtf8String("exports");
-      JSObject::AddProperty(instance, exports_name, exports_object, READ_ONLY);
-    }
-    Handle<FixedArray> exports;
-    if (maybe_exports.ToHandle(&exports)) {
-      int exports_size = exports->length();
-      for (int i = 0; i < exports_size; ++i) {
-        if (thrower->error()) return false;
-        Handle<FixedArray> export_metadata =
-            exports->GetValueChecked<FixedArray>(isolate, i);
-        Handle<Code> export_code =
-            export_metadata->GetValueChecked<Code>(isolate, kExportCode);
-        RecordStats(isolate, *export_code);
-        Handle<String> name =
-            export_metadata->GetValueChecked<String>(isolate, kExportName);
-        int arity = Smi::cast(export_metadata->get(kExportArity))->value();
-        MaybeHandle<ByteArray> signature =
-            export_metadata->GetValue<ByteArray>(isolate, kExportedSignature);
-        Handle<JSFunction> function = WrapExportCodeAsJSFunction(
-            isolate, export_code, name, arity, signature, instance);
-        desc.set_value(function);
-        Maybe<bool> status = JSReceiver::DefineOwnProperty(
-            isolate, exports_object, name, &desc, Object::THROW_ON_ERROR);
-        if (!status.IsJust()) {
-          thrower->Error("export of %.*s failed.", name->length(),
-                         name->ToCString().get());
-          return false;
+  int mode_mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET);
+  AllowDeferredHandleDereference embedding_raw_address;
+  for (int i = start; i < new_functions->length(); ++i) {
+    Code* wasm_function = Code::cast(new_functions->get(i));
+    for (RelocIterator it(wasm_function, mode_mask); !it.done(); it.next()) {
+      Code* old_code =
+          Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
+      if (old_code->kind() == Code::WASM_TO_JS_FUNCTION ||
+          old_code->kind() == Code::WASM_FUNCTION) {
+        auto found = old_to_new_code.find(old_code);
+        DCHECK(found != old_to_new_code.end());
+        Code* new_code = found->second;
+        if (new_code != old_code) {
+          it.rinfo()->set_target_address(new_code->instruction_start(),
+                                         UPDATE_WRITE_BARRIER,
+                                         SKIP_ICACHE_FLUSH);
         }
       }
     }
-    if (mem_export) {
-      // Export the memory as a named property.
-      Handle<String> name = factory->InternalizeUtf8String("memory");
-      Handle<JSArrayBuffer> memory = Handle<JSArrayBuffer>(
-          JSArrayBuffer::cast(instance->GetInternalField(kWasmMemArrayBuffer)));
-      JSObject::AddProperty(exports_object, name, memory, READ_ONLY);
-    }
   }
-  return true;
 }
 
-}  // namespace
+static void ResetCompiledModule(Isolate* isolate, JSObject* owner,
+                                WasmCompiledModule* compiled_module) {
+  TRACE("Resetting %d\n", compiled_module->instance_id());
+  Object* undefined = *isolate->factory()->undefined_value();
+  uint32_t old_mem_size = compiled_module->has_heap()
+                              ? compiled_module->mem_size()
+                              : compiled_module->default_mem_size();
+  uint32_t default_mem_size = compiled_module->default_mem_size();
+  Object* mem_start = compiled_module->ptr_to_heap();
+  Address old_mem_address = nullptr;
+  Address globals_start =
+      GetGlobalStartAddressFromCodeTemplate(undefined, owner);
 
-MaybeHandle<FixedArray> WasmModule::CompileFunctions(
-    Isolate* isolate, ErrorThrower* thrower) const {
-  Factory* factory = isolate->factory();
-
-  MaybeHandle<FixedArray> nothing;
-
-  WasmModuleInstance temp_instance_for_compilation(this);
-  temp_instance_for_compilation.context = isolate->native_context();
-  temp_instance_for_compilation.mem_size = GetMinModuleMemSize(this);
-  temp_instance_for_compilation.mem_start = nullptr;
-  temp_instance_for_compilation.globals_start = nullptr;
-
-  MaybeHandle<FixedArray> indirect_table =
-      function_tables.size()
-          ? factory->NewFixedArray(static_cast<int>(function_tables.size()),
-                                   TENURED)
-          : MaybeHandle<FixedArray>();
-  for (uint32_t i = 0; i < function_tables.size(); ++i) {
-    Handle<FixedArray> values = wasm::BuildFunctionTable(isolate, i, this);
-    temp_instance_for_compilation.function_tables[i] = values;
-
-    Handle<FixedArray> metadata = isolate->factory()->NewFixedArray(
-        kWasmIndirectFunctionTableMetadataSize, TENURED);
-    metadata->set(kSize, Smi::FromInt(function_tables[i].size));
-    metadata->set(kTable, *values);
-    indirect_table.ToHandleChecked()->set(i, *metadata);
+  if (old_mem_size > 0) {
+    CHECK_NE(mem_start, undefined);
+    old_mem_address =
+        static_cast<Address>(JSArrayBuffer::cast(mem_start)->backing_store());
   }
+  int mode_mask = RelocInfo::ModeMask(RelocInfo::WASM_MEMORY_REFERENCE) |
+                  RelocInfo::ModeMask(RelocInfo::WASM_MEMORY_SIZE_REFERENCE) |
+                  RelocInfo::ModeMask(RelocInfo::WASM_GLOBAL_REFERENCE);
 
-  HistogramTimerScope wasm_compile_module_time_scope(
-      isolate->counters()->wasm_compile_module_time());
-
-  ModuleEnv module_env;
-  module_env.module = this;
-  module_env.instance = &temp_instance_for_compilation;
-  module_env.origin = origin;
-  InitializePlaceholders(factory, &module_env.placeholders, functions.size());
-
-  Handle<FixedArray> compiled_functions =
-      factory->NewFixedArray(static_cast<int>(functions.size()), TENURED);
-
-  temp_instance_for_compilation.import_code.resize(import_table.size());
-  for (uint32_t i = 0; i < import_table.size(); ++i) {
-    temp_instance_for_compilation.import_code[i] =
-        CreatePlaceholder(factory, i, Code::WASM_TO_JS_FUNCTION);
-  }
-  isolate->counters()->wasm_functions_per_module()->AddSample(
-      static_cast<int>(functions.size()));
-  if (FLAG_wasm_num_compilation_tasks != 0) {
-    CompileInParallel(isolate, this,
-                      temp_instance_for_compilation.function_code, thrower,
-                      &module_env);
-  } else {
-    CompileSequentially(isolate, this,
-                        temp_instance_for_compilation.function_code, thrower,
-                        &module_env);
-  }
-  if (thrower->error()) return nothing;
-
-  // At this point, compilation has completed. Update the code table.
-  for (size_t i = FLAG_skip_compiling_wasm_funcs;
-       i < temp_instance_for_compilation.function_code.size(); ++i) {
-    Code* code = *temp_instance_for_compilation.function_code[i];
-    compiled_functions->set(static_cast<int>(i), code);
-  }
-
-  // Create the compiled module object, and populate with compiled functions
-  // and information needed at instantiation time. This object needs to be
-  // serializable. Instantiation may occur off a deserialized version of this
-  // object.
-  Handle<FixedArray> ret =
-      factory->NewFixedArray(kCompiledWasmObjectTableSize, TENURED);
-  ret->set(kFunctions, *compiled_functions);
-  if (!indirect_table.is_null()) {
-    ret->set(kTableOfIndirectFunctionTables, *indirect_table.ToHandleChecked());
-  }
-  Handle<FixedArray> import_data = GetImportsMetadata(factory, this);
-  ret->set(kImportData, *import_data);
-
-  // Compile export functions.
-  int export_size = static_cast<int>(export_table.size());
-  Handle<Code> startup_fct;
-  if (export_size > 0) {
-    Handle<FixedArray> exports = factory->NewFixedArray(export_size, TENURED);
-    for (int i = 0; i < export_size; ++i) {
-      Handle<FixedArray> export_metadata =
-          factory->NewFixedArray(kWasmExportMetadataTableSize, TENURED);
-      const WasmExport& exp = export_table[i];
-      FunctionSig* funcSig = functions[exp.func_index].sig;
-      Handle<ByteArray> exportedSig =
-          factory->NewByteArray(static_cast<int>(funcSig->parameter_count() +
-                                                 funcSig->return_count()),
-                                TENURED);
-      exportedSig->copy_in(0,
-                           reinterpret_cast<const byte*>(funcSig->raw_data()),
-                           exportedSig->length());
-      export_metadata->set(kExportedSignature, *exportedSig);
-      WasmName str = GetName(exp.name_offset, exp.name_length);
-      Handle<String> name = factory->InternalizeUtf8String(str);
-      Handle<Code> code =
-          temp_instance_for_compilation.function_code[exp.func_index];
-      Handle<Code> export_code = compiler::CompileJSToWasmWrapper(
-          isolate, &module_env, code, exp.func_index);
-      if (thrower->error()) return nothing;
-      export_metadata->set(kExportCode, *export_code);
-      export_metadata->set(kExportName, *name);
-      export_metadata->set(
-          kExportArity, Smi::FromInt(static_cast<int>(
-                            functions[exp.func_index].sig->parameter_count())));
-      export_metadata->set(kExportedFunctionIndex,
-                           Smi::FromInt(static_cast<int>(exp.func_index)));
-      exports->set(i, *export_metadata);
-      if (exp.func_index == start_function_index) {
-        startup_fct = export_code;
+  Object* fct_obj = compiled_module->ptr_to_code_table();
+  if (fct_obj != nullptr && fct_obj != undefined &&
+      (old_mem_size > 0 || globals_start != nullptr)) {
+    FixedArray* functions = FixedArray::cast(fct_obj);
+    for (int i = 0; i < functions->length(); ++i) {
+      Code* code = Code::cast(functions->get(i));
+      bool changed = false;
+      for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
+        RelocInfo::Mode mode = it.rinfo()->rmode();
+        if (RelocInfo::IsWasmMemoryReference(mode) ||
+            RelocInfo::IsWasmMemorySizeReference(mode)) {
+          it.rinfo()->update_wasm_memory_reference(
+              old_mem_address, nullptr, old_mem_size, default_mem_size);
+          changed = true;
+        } else {
+          CHECK(RelocInfo::IsWasmGlobalReference(mode));
+          it.rinfo()->update_wasm_global_reference(globals_start, nullptr);
+          changed = true;
+        }
+      }
+      if (changed) {
+        Assembler::FlushICache(isolate, code->instruction_start(),
+                               code->instruction_size());
       }
     }
-    ret->set(kExports, *exports);
   }
-
-  // Compile startup function, if we haven't already.
-  if (start_function_index >= 0) {
-    uint32_t index = static_cast<uint32_t>(start_function_index);
-    HandleScope scope(isolate);
-    if (startup_fct.is_null()) {
-      Handle<Code> code = temp_instance_for_compilation.function_code[index];
-      DCHECK_EQ(0, functions[index].sig->parameter_count());
-      startup_fct =
-          compiler::CompileJSToWasmWrapper(isolate, &module_env, code, index);
-    }
-    Handle<FixedArray> metadata =
-        factory->NewFixedArray(kWasmExportMetadataTableSize, TENURED);
-    metadata->set(kExportCode, *startup_fct);
-    metadata->set(kExportArity, Smi::FromInt(0));
-    metadata->set(kExportedFunctionIndex, Smi::FromInt(start_function_index));
-    ret->set(kStartupFunction, *metadata);
-  }
-
-  // TODO(wasm): saving the module bytes for debugging is wasteful. We should
-  // consider downloading this on-demand.
-  {
-    size_t module_bytes_len = module_end - module_start;
-    DCHECK_LE(module_bytes_len, static_cast<size_t>(kMaxInt));
-    Vector<const uint8_t> module_bytes_vec(module_start,
-                                           static_cast<int>(module_bytes_len));
-    Handle<String> module_bytes_string =
-        factory->NewStringFromOneByte(module_bytes_vec, TENURED)
-            .ToHandleChecked();
-    ret->set(kModuleBytes, *module_bytes_string);
-  }
-
-  Handle<ByteArray> function_name_table =
-      BuildFunctionNamesTable(isolate, module_env.module);
-  ret->set(kFunctionNameTable, *function_name_table);
-  ret->set(kMinRequiredMemory, Smi::FromInt(min_mem_pages));
-  if (data_segments.size() > 0) SaveDataSegmentInfo(factory, this, ret);
-  ret->set(kGlobalsSize, Smi::FromInt(globals_size));
-  ret->set(kExportMem, Smi::FromInt(mem_export));
-  ret->set(kOrigin, Smi::FromInt(origin));
-  return ret;
+  compiled_module->reset_heap();
 }
 
-void PatchJSWrapper(Isolate* isolate, Handle<Code> wrapper,
-                    Handle<Code> new_target) {
-  AllowDeferredHandleDereference embedding_raw_address;
-  bool seen = false;
-  for (RelocIterator it(*wrapper, 1 << RelocInfo::CODE_TARGET); !it.done();
-       it.next()) {
-    Code* target = Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
-    if (target->kind() == Code::WASM_FUNCTION) {
-      DCHECK(!seen);
-      seen = true;
-      it.rinfo()->set_target_address(new_target->instruction_start(),
-                                     UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH);
+static void InstanceFinalizer(const v8::WeakCallbackInfo<void>& data) {
+  JSObject** p = reinterpret_cast<JSObject**>(data.GetParameter());
+  JSObject* owner = *p;
+  WasmCompiledModule* compiled_module =
+      WasmCompiledModule::cast(owner->GetInternalField(kWasmCompiledModule));
+  TRACE("Finalizing %d {\n", compiled_module->instance_id());
+  Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
+  DCHECK(compiled_module->has_weak_module_object());
+  WeakCell* weak_module_obj = compiled_module->ptr_to_weak_module_object();
+
+  // weak_module_obj may have been cleared, meaning the module object
+  // was GC-ed. In that case, there won't be any new instances created,
+  // and we don't need to maintain the links between instances.
+  if (!weak_module_obj->cleared()) {
+    JSObject* module_obj = JSObject::cast(weak_module_obj->value());
+    WasmCompiledModule* current_template =
+        WasmCompiledModule::cast(module_obj->GetInternalField(0));
+
+    TRACE("chain before {\n");
+    TRACE_CHAIN(current_template);
+    TRACE("}\n");
+
+    DCHECK(!current_template->has_weak_prev_instance());
+    WeakCell* next = compiled_module->ptr_to_weak_next_instance();
+    WeakCell* prev = compiled_module->ptr_to_weak_prev_instance();
+
+    if (current_template == compiled_module) {
+      if (next == nullptr) {
+        ResetCompiledModule(isolate, owner, compiled_module);
+      } else {
+        DCHECK(next->value()->IsFixedArray());
+        module_obj->SetInternalField(0, next->value());
+        DCHECK_NULL(prev);
+        WasmCompiledModule::cast(next->value())->reset_weak_prev_instance();
+      }
+    } else {
+      DCHECK(!(prev == nullptr && next == nullptr));
+      // the only reason prev or next would be cleared is if the
+      // respective objects got collected, but if that happened,
+      // we would have relinked the list.
+      if (prev != nullptr) {
+        DCHECK(!prev->cleared());
+        if (next == nullptr) {
+          WasmCompiledModule::cast(prev->value())->reset_weak_next_instance();
+        } else {
+          WasmCompiledModule::cast(prev->value())
+              ->set_ptr_to_weak_next_instance(next);
+        }
+      }
+      if (next != nullptr) {
+        DCHECK(!next->cleared());
+        if (prev == nullptr) {
+          WasmCompiledModule::cast(next->value())->reset_weak_prev_instance();
+        } else {
+          WasmCompiledModule::cast(next->value())
+              ->set_ptr_to_weak_prev_instance(prev);
+        }
+      }
     }
+    TRACE("chain after {\n");
+    TRACE_CHAIN(WasmCompiledModule::cast(module_obj->GetInternalField(0)));
+    TRACE("}\n");
   }
-  CHECK(seen);
-  Assembler::FlushICache(isolate, wrapper->instruction_start(),
-                         wrapper->instruction_size());
+  compiled_module->reset_weak_owning_instance();
+  GlobalHandles::Destroy(reinterpret_cast<Object**>(p));
+  TRACE("}\n");
 }
 
 Handle<FixedArray> SetupIndirectFunctionTable(
     Isolate* isolate, Handle<FixedArray> wasm_functions,
-    Handle<FixedArray> indirect_table_template) {
+    Handle<FixedArray> indirect_table_template,
+    Handle<FixedArray> tables_to_replace) {
   Factory* factory = isolate->factory();
   Handle<FixedArray> cloned_indirect_tables =
       factory->CopyFixedArray(indirect_table_template);
@@ -1239,163 +873,633 @@
     Handle<FixedArray> cloned_table = factory->CopyFixedArray(orig_table);
     cloned_metadata->set(kTable, *cloned_table);
     // Patch the cloned code to refer to the cloned kTable.
-    for (int i = 0; i < wasm_functions->length(); ++i) {
+    Handle<FixedArray> table_to_replace =
+        tables_to_replace->GetValueChecked<FixedArray>(isolate, i)
+            ->GetValueChecked<FixedArray>(isolate, kTable);
+    for (int fct_index = 0; fct_index < wasm_functions->length(); ++fct_index) {
       Handle<Code> wasm_function =
-          wasm_functions->GetValueChecked<Code>(isolate, i);
-      PatchFunctionTable(wasm_function, orig_table, cloned_table);
+          wasm_functions->GetValueChecked<Code>(isolate, fct_index);
+      PatchFunctionTable(wasm_function, table_to_replace, cloned_table);
     }
   }
   return cloned_indirect_tables;
 }
 
-Handle<FixedArray> CloneModuleForInstance(Isolate* isolate,
-                                          Handle<FixedArray> original) {
-  Factory* factory = isolate->factory();
-  Handle<FixedArray> clone = factory->CopyFixedArray(original);
+}  // namespace
 
-  // Clone each wasm code object.
-  Handle<FixedArray> orig_wasm_functions =
-      original->GetValueChecked<FixedArray>(isolate, kFunctions);
-  Handle<FixedArray> clone_wasm_functions =
-      factory->CopyFixedArray(orig_wasm_functions);
-  clone->set(kFunctions, *clone_wasm_functions);
-  for (int i = 0; i < clone_wasm_functions->length(); ++i) {
-    Handle<Code> orig_code =
-        clone_wasm_functions->GetValueChecked<Code>(isolate, i);
-    Handle<Code> cloned_code = factory->CopyCode(orig_code);
-    clone_wasm_functions->set(i, *cloned_code);
+const char* SectionName(WasmSectionCode code) {
+  switch (code) {
+    case kUnknownSectionCode:
+      return "Unknown";
+    case kTypeSectionCode:
+      return "Type";
+    case kImportSectionCode:
+      return "Import";
+    case kFunctionSectionCode:
+      return "Function";
+    case kTableSectionCode:
+      return "Table";
+    case kMemorySectionCode:
+      return "Memory";
+    case kGlobalSectionCode:
+      return "Global";
+    case kExportSectionCode:
+      return "Export";
+    case kStartSectionCode:
+      return "Start";
+    case kCodeSectionCode:
+      return "Code";
+    case kElementSectionCode:
+      return "Element";
+    case kDataSectionCode:
+      return "Data";
+    case kNameSectionCode:
+      return "Name";
+    default:
+      return "<unknown>";
+  }
+}
+
+std::ostream& operator<<(std::ostream& os, const WasmModule& module) {
+  os << "WASM module with ";
+  os << (module.min_mem_pages * module.kPageSize) << " min mem";
+  os << (module.max_mem_pages * module.kPageSize) << " max mem";
+  os << module.functions.size() << " functions";
+  os << module.functions.size() << " globals";
+  os << module.functions.size() << " data segments";
+  return os;
+}
+
+std::ostream& operator<<(std::ostream& os, const WasmFunction& function) {
+  os << "WASM function with signature " << *function.sig;
+
+  os << " code bytes: "
+     << (function.code_end_offset - function.code_start_offset);
+  return os;
+}
+
+std::ostream& operator<<(std::ostream& os, const WasmFunctionName& pair) {
+  os << "#" << pair.function_->func_index << ":";
+  if (pair.function_->name_offset > 0) {
+    if (pair.module_) {
+      WasmName name = pair.module_->GetName(pair.function_->name_offset,
+                                            pair.function_->name_length);
+      os.write(name.start(), name.length());
+    } else {
+      os << "+" << pair.function_->func_index;
+    }
+  } else {
+    os << "?";
+  }
+  return os;
+}
+
+Handle<JSFunction> WrapExportCodeAsJSFunction(
+    Isolate* isolate, Handle<Code> export_code, Handle<String> name, int arity,
+    MaybeHandle<ByteArray> maybe_signature, Handle<JSObject> module_instance) {
+  Handle<SharedFunctionInfo> shared =
+      isolate->factory()->NewSharedFunctionInfo(name, export_code, false);
+  shared->set_length(arity);
+  shared->set_internal_formal_parameter_count(arity);
+  Handle<JSFunction> function = isolate->factory()->NewFunction(
+      isolate->wasm_function_map(), name, export_code);
+  function->set_shared(*shared);
+
+  function->SetInternalField(kInternalModuleInstance, *module_instance);
+  // add another Internal Field as the function arity
+  function->SetInternalField(kInternalArity, Smi::FromInt(arity));
+  // add another Internal Field as the signature of the foreign function
+  Handle<ByteArray> signature;
+  if (maybe_signature.ToHandle(&signature)) {
+    function->SetInternalField(kInternalSignature, *signature);
+  }
+  return function;
+}
+
+Object* GetOwningWasmInstance(Code* code) {
+  DCHECK(code->kind() == Code::WASM_FUNCTION);
+  DisallowHeapAllocation no_gc;
+  FixedArray* deopt_data = code->deoptimization_data();
+  DCHECK_NOT_NULL(deopt_data);
+  DCHECK(deopt_data->length() == 2);
+  Object* weak_link = deopt_data->get(0);
+  if (!weak_link->IsWeakCell()) return nullptr;
+  WeakCell* cell = WeakCell::cast(weak_link);
+  return cell->value();
+}
+
+uint32_t GetNumImportedFunctions(Handle<JSObject> wasm_object) {
+  return static_cast<uint32_t>(
+      Smi::cast(wasm_object->GetInternalField(kWasmNumImportedFunctions))
+          ->value());
+}
+
+WasmModule::WasmModule(byte* module_start)
+    : module_start(module_start),
+      module_end(nullptr),
+      min_mem_pages(0),
+      max_mem_pages(0),
+      mem_export(false),
+      start_function_index(-1),
+      origin(kWasmOrigin),
+      globals_size(0),
+      num_imported_functions(0),
+      num_declared_functions(0),
+      num_exported_functions(0),
+      pending_tasks(new base::Semaphore(0)) {}
+
+MaybeHandle<WasmCompiledModule> WasmModule::CompileFunctions(
+    Isolate* isolate, ErrorThrower* thrower) const {
+  Factory* factory = isolate->factory();
+
+  MaybeHandle<WasmCompiledModule> nothing;
+
+  WasmModuleInstance temp_instance(this);
+  temp_instance.context = isolate->native_context();
+  temp_instance.mem_size = GetMinModuleMemSize(this);
+  temp_instance.mem_start = nullptr;
+  temp_instance.globals_start = nullptr;
+
+  MaybeHandle<FixedArray> indirect_table =
+      function_tables.size()
+          ? factory->NewFixedArray(static_cast<int>(function_tables.size()),
+                                   TENURED)
+          : MaybeHandle<FixedArray>();
+  for (uint32_t i = 0; i < function_tables.size(); ++i) {
+    Handle<FixedArray> values = wasm::BuildFunctionTable(isolate, i, this);
+    temp_instance.function_tables[i] = values;
+
+    Handle<FixedArray> metadata = isolate->factory()->NewFixedArray(
+        kWasmIndirectFunctionTableDataSize, TENURED);
+    metadata->set(kSize, Smi::FromInt(function_tables[i].size));
+    metadata->set(kTable, *values);
+    indirect_table.ToHandleChecked()->set(i, *metadata);
   }
 
-  MaybeHandle<FixedArray> maybe_orig_exports =
-      original->GetValue<FixedArray>(isolate, kExports);
-  Handle<FixedArray> orig_exports;
-  if (maybe_orig_exports.ToHandle(&orig_exports)) {
-    Handle<FixedArray> cloned_exports = factory->CopyFixedArray(orig_exports);
-    clone->set(kExports, *cloned_exports);
-    for (int i = 0; i < orig_exports->length(); ++i) {
-      Handle<FixedArray> export_metadata =
-          orig_exports->GetValueChecked<FixedArray>(isolate, i);
-      Handle<FixedArray> clone_metadata =
-          factory->CopyFixedArray(export_metadata);
-      cloned_exports->set(i, *clone_metadata);
-      Handle<Code> orig_code =
-          export_metadata->GetValueChecked<Code>(isolate, kExportCode);
-      Handle<Code> cloned_code = factory->CopyCode(orig_code);
-      clone_metadata->set(kExportCode, *cloned_code);
-      // TODO(wasm): This is actually a uint32_t, but since FixedArray indexes
-      // in int, we are taking the risk of invalid values.
-      int exported_fct_index =
-          Smi::cast(export_metadata->get(kExportedFunctionIndex))->value();
-      CHECK_GE(exported_fct_index, 0);
-      CHECK_LT(exported_fct_index, clone_wasm_functions->length());
-      Handle<Code> new_target = clone_wasm_functions->GetValueChecked<Code>(
-          isolate, exported_fct_index);
-      PatchJSWrapper(isolate, cloned_code, new_target);
+  HistogramTimerScope wasm_compile_module_time_scope(
+      isolate->counters()->wasm_compile_module_time());
+
+  ModuleEnv module_env;
+  module_env.module = this;
+  module_env.instance = &temp_instance;
+  module_env.origin = origin;
+
+  // The {code_table} array contains import wrappers and functions (which
+  // are both included in {functions.size()}, and export wrappers.
+  int code_table_size =
+      static_cast<int>(functions.size() + num_exported_functions);
+  Handle<FixedArray> code_table =
+      factory->NewFixedArray(static_cast<int>(code_table_size), TENURED);
+
+  // Initialize the code table with placeholders.
+  for (uint32_t i = 0; i < functions.size(); i++) {
+    Code::Kind kind = Code::WASM_FUNCTION;
+    if (i < num_imported_functions) kind = Code::WASM_TO_JS_FUNCTION;
+    Handle<Code> placeholder = CreatePlaceholder(factory, i, kind);
+    code_table->set(static_cast<int>(i), *placeholder);
+    temp_instance.function_code[i] = placeholder;
+  }
+
+  isolate->counters()->wasm_functions_per_module()->AddSample(
+      static_cast<int>(functions.size()));
+  if (!FLAG_trace_wasm_decoder && FLAG_wasm_num_compilation_tasks != 0) {
+    // Avoid a race condition by collecting results into a second vector.
+    std::vector<Handle<Code>> results;
+    results.reserve(temp_instance.function_code.size());
+    for (size_t i = 0; i < temp_instance.function_code.size(); i++) {
+      results.push_back(temp_instance.function_code[i]);
+    }
+    CompileInParallel(isolate, this, results, thrower, &module_env);
+
+    for (size_t i = 0; i < results.size(); i++) {
+      temp_instance.function_code[i] = results[i];
+    }
+  } else {
+    CompileSequentially(isolate, this, temp_instance.function_code, thrower,
+                        &module_env);
+  }
+  if (thrower->error()) return nothing;
+
+  // At this point, compilation has completed. Update the code table.
+  for (size_t i = FLAG_skip_compiling_wasm_funcs;
+       i < temp_instance.function_code.size(); ++i) {
+    Code* code = *temp_instance.function_code[i];
+    code_table->set(static_cast<int>(i), code);
+  }
+
+  // Link the functions in the module.
+  for (size_t i = FLAG_skip_compiling_wasm_funcs;
+       i < temp_instance.function_code.size(); ++i) {
+    Handle<Code> code = temp_instance.function_code[i];
+    bool modified = LinkFunction(code, temp_instance.function_code);
+    if (modified) {
+      // TODO(mtrofin): do we need to flush the cache here?
+      Assembler::FlushICache(isolate, code->instruction_start(),
+                             code->instruction_size());
     }
   }
 
-  MaybeHandle<FixedArray> maybe_startup =
-      original->GetValue<FixedArray>(isolate, kStartupFunction);
-  if (!maybe_startup.is_null()) {
-    Handle<FixedArray> startup_metadata =
-        factory->CopyFixedArray(maybe_startup.ToHandleChecked());
-    Handle<Code> startup_fct_clone = factory->CopyCode(
-        startup_metadata->GetValueChecked<Code>(isolate, kExportCode));
-    startup_metadata->set(kExportCode, *startup_fct_clone);
-    clone->set(kStartupFunction, *startup_metadata);
-    // TODO(wasm): see todo above about int vs size_t indexing in FixedArray.
-    int startup_fct_index =
-        Smi::cast(startup_metadata->get(kExportedFunctionIndex))->value();
-    CHECK_GE(startup_fct_index, 0);
-    CHECK_LT(startup_fct_index, clone_wasm_functions->length());
-    Handle<Code> new_target =
-        clone_wasm_functions->GetValueChecked<Code>(isolate, startup_fct_index);
-    PatchJSWrapper(isolate, startup_fct_clone, new_target);
+  // Create the compiled module object, and populate with compiled functions
+  // and information needed at instantiation time. This object needs to be
+  // serializable. Instantiation may occur off a deserialized version of this
+  // object.
+  Handle<WasmCompiledModule> ret = WasmCompiledModule::New(
+      isolate, min_mem_pages, globals_size, mem_export, origin);
+  ret->set_code_table(code_table);
+  if (!indirect_table.is_null()) {
+    ret->set_indirect_function_tables(indirect_table.ToHandleChecked());
   }
-  return clone;
+  Handle<FixedArray> import_data = GetImportsData(factory, this);
+  ret->set_import_data(import_data);
+
+  // Compile exported function wrappers.
+  int export_size = static_cast<int>(num_exported_functions);
+  if (export_size > 0) {
+    Handle<FixedArray> exports = factory->NewFixedArray(export_size, TENURED);
+    int index = -1;
+
+    for (const WasmExport& exp : export_table) {
+      if (exp.kind != kExternalFunction)
+        continue;  // skip non-function exports.
+      index++;
+      Handle<FixedArray> export_data =
+          factory->NewFixedArray(kWasmExportDataSize, TENURED);
+      FunctionSig* funcSig = functions[exp.index].sig;
+      Handle<ByteArray> exportedSig =
+          factory->NewByteArray(static_cast<int>(funcSig->parameter_count() +
+                                                 funcSig->return_count()),
+                                TENURED);
+      exportedSig->copy_in(0,
+                           reinterpret_cast<const byte*>(funcSig->raw_data()),
+                           exportedSig->length());
+      export_data->set(kExportedSignature, *exportedSig);
+      WasmName str = GetName(exp.name_offset, exp.name_length);
+      Handle<String> name = factory->InternalizeUtf8String(str);
+      Handle<Code> code = code_table->GetValueChecked<Code>(isolate, exp.index);
+      Handle<Code> export_code = compiler::CompileJSToWasmWrapper(
+          isolate, &module_env, code, exp.index);
+      if (thrower->error()) return nothing;
+      export_data->set(kExportName, *name);
+      export_data->set(kExportArity,
+                       Smi::FromInt(static_cast<int>(
+                           functions[exp.index].sig->parameter_count())));
+      export_data->set(kExportedFunctionIndex,
+                       Smi::FromInt(static_cast<int>(exp.index)));
+      exports->set(index, *export_data);
+      code_table->set(static_cast<int>(functions.size() + index), *export_code);
+    }
+    ret->set_exports(exports);
+  }
+
+  // Record data for startup function.
+  if (start_function_index >= 0) {
+    HandleScope scope(isolate);
+    Handle<FixedArray> startup_data =
+        factory->NewFixedArray(kWasmExportDataSize, TENURED);
+    startup_data->set(kExportArity, Smi::FromInt(0));
+    startup_data->set(kExportedFunctionIndex,
+                      Smi::FromInt(start_function_index));
+    ret->set_startup_function(startup_data);
+  }
+
+  // TODO(wasm): saving the module bytes for debugging is wasteful. We should
+  // consider downloading this on-demand.
+  {
+    size_t module_bytes_len = module_end - module_start;
+    DCHECK_LE(module_bytes_len, static_cast<size_t>(kMaxInt));
+    Vector<const uint8_t> module_bytes_vec(module_start,
+                                           static_cast<int>(module_bytes_len));
+    Handle<String> module_bytes_string =
+        factory->NewStringFromOneByte(module_bytes_vec, TENURED)
+            .ToHandleChecked();
+    ret->set_module_bytes(module_bytes_string);
+  }
+
+  Handle<ByteArray> function_name_table =
+      BuildFunctionNamesTable(isolate, module_env.module);
+  ret->set_function_names(function_name_table);
+  if (data_segments.size() > 0) SaveDataSegmentInfo(factory, this, ret);
+  DCHECK_EQ(ret->default_mem_size(), temp_instance.mem_size);
+  return ret;
 }
 
-// Instantiates a wasm module as a JSObject.
-//  * allocates a backing store of {mem_size} bytes.
-//  * installs a named property "memory" for that buffer if exported
-//  * installs named properties on the object for exported functions
-//  * compiles wasm code to machine code
-MaybeHandle<JSObject> WasmModule::Instantiate(
-    Isolate* isolate, Handle<FixedArray> compiled_module,
-    Handle<JSReceiver> ffi, Handle<JSArrayBuffer> memory) {
+// Instantiates a WASM module, creating a WebAssembly.Instance from a
+// WebAssembly.Module.
+MaybeHandle<JSObject> WasmModule::Instantiate(Isolate* isolate,
+                                              ErrorThrower* thrower,
+                                              Handle<JSObject> module_object,
+                                              Handle<JSReceiver> ffi,
+                                              Handle<JSArrayBuffer> memory) {
+  MaybeHandle<JSObject> nothing;
   HistogramTimerScope wasm_instantiate_module_time_scope(
       isolate->counters()->wasm_instantiate_module_time());
-  ErrorThrower thrower(isolate, "WasmModule::Instantiate()");
   Factory* factory = isolate->factory();
 
-  compiled_module = CloneModuleForInstance(isolate, compiled_module);
+  //--------------------------------------------------------------------------
+  // Reuse the compiled module (if no owner), otherwise clone.
+  //--------------------------------------------------------------------------
+  Handle<WasmCompiledModule> compiled_module;
+  Handle<FixedArray> code_table;
+  Handle<FixedArray> old_code_table;
+  Handle<JSObject> owner;
+  // If we don't clone, this will be null(). Otherwise, this will
+  // be a weak link to the original. If we lose the original to GC,
+  // this will be a cleared. We'll link the instances chain last.
+  MaybeHandle<WeakCell> link_to_original;
 
-  // These fields are compulsory.
-  Handle<FixedArray> code_table =
-      compiled_module->GetValueChecked<FixedArray>(isolate, kFunctions);
+  TRACE("Starting new module instantiation\n");
+  {
+    Handle<WasmCompiledModule> original(
+        WasmCompiledModule::cast(module_object->GetInternalField(0)), isolate);
+    // Always make a new copy of the code_table, since the old_code_table
+    // may still have placeholders for imports.
+    old_code_table = original->code_table();
+    code_table = factory->CopyFixedArray(old_code_table);
 
-  std::vector<Handle<Code>> functions(
-      static_cast<size_t>(code_table->length()));
-  for (int i = 0; i < code_table->length(); ++i) {
-    functions[static_cast<size_t>(i)] =
-        code_table->GetValueChecked<Code>(isolate, i);
+    if (original->has_weak_owning_instance()) {
+      WeakCell* tmp = original->ptr_to_weak_owning_instance();
+      DCHECK(!tmp->cleared());
+      // There is already an owner, clone everything.
+      owner = Handle<JSObject>(JSObject::cast(tmp->value()), isolate);
+      // Insert the latest clone in front.
+      TRACE("Cloning from %d\n", original->instance_id());
+      compiled_module = WasmCompiledModule::Clone(isolate, original);
+      // Replace the strong reference to point to the new instance here.
+      // This allows any of the other instances, including the original,
+      // to be collected.
+      module_object->SetInternalField(0, *compiled_module);
+      compiled_module->set_weak_module_object(original->weak_module_object());
+      link_to_original = factory->NewWeakCell(original);
+      // Don't link to original here. We remember the original
+      // as a weak link. If that link isn't clear by the time we finish
+      // instantiating this instance, then we link it at that time.
+      compiled_module->reset_weak_next_instance();
+
+      // Clone the code for WASM functions and exports.
+      for (int i = 0; i < code_table->length(); ++i) {
+        Handle<Code> orig_code = code_table->GetValueChecked<Code>(isolate, i);
+        switch (orig_code->kind()) {
+          case Code::WASM_TO_JS_FUNCTION:
+            // Imports will be overwritten with newly compiled wrappers.
+            break;
+          case Code::JS_TO_WASM_FUNCTION:
+          case Code::WASM_FUNCTION: {
+            Handle<Code> code = factory->CopyCode(orig_code);
+            code_table->set(i, *code);
+            break;
+          }
+          default:
+            UNREACHABLE();
+        }
+      }
+      RecordStats(isolate, code_table);
+    } else {
+      // There was no owner, so we can reuse the original.
+      compiled_module = original;
+      TRACE("Reusing existing instance %d\n", compiled_module->instance_id());
+    }
+    compiled_module->set_code_table(code_table);
   }
-  LinkModuleFunctions(isolate, functions);
 
-  RecordStats(isolate, code_table);
-
-  MaybeHandle<JSObject> nothing;
-
+  //--------------------------------------------------------------------------
+  // Allocate the instance object.
+  //--------------------------------------------------------------------------
   Handle<Map> map = factory->NewMap(
       JS_OBJECT_TYPE,
       JSObject::kHeaderSize + kWasmModuleInternalFieldCount * kPointerSize);
-  Handle<JSObject> js_object = factory->NewJSObjectFromMap(map, TENURED);
-  js_object->SetInternalField(kWasmModuleCodeTable, *code_table);
+  Handle<JSObject> instance = factory->NewJSObjectFromMap(map, TENURED);
+  instance->SetInternalField(kWasmModuleCodeTable, *code_table);
 
-  if (!(SetupInstanceHeap(isolate, compiled_module, js_object, memory,
-                          &thrower) &&
-        SetupGlobals(isolate, compiled_module, js_object, &thrower) &&
-        SetupImports(isolate, compiled_module, js_object, &thrower, ffi) &&
-        SetupExportsObject(compiled_module, isolate, js_object, &thrower))) {
-    return nothing;
+  //--------------------------------------------------------------------------
+  // Set up the memory for the new instance.
+  //--------------------------------------------------------------------------
+  MaybeHandle<JSArrayBuffer> old_memory;
+  // TODO(titzer): handle imported memory properly.
+
+  uint32_t min_mem_pages = compiled_module->min_memory_pages();
+  isolate->counters()->wasm_min_mem_pages_count()->AddSample(min_mem_pages);
+  // TODO(wasm): re-enable counter for max_mem_pages when we use that field.
+
+  if (memory.is_null() && min_mem_pages > 0) {
+    memory = AllocateMemory(thrower, isolate, min_mem_pages);
+    if (memory.is_null()) return nothing;  // failed to allocate memory
   }
 
-  SetDebugSupport(factory, compiled_module, js_object);
+  if (!memory.is_null()) {
+    instance->SetInternalField(kWasmMemArrayBuffer, *memory);
+    Address mem_start = static_cast<Address>(memory->backing_store());
+    uint32_t mem_size = static_cast<uint32_t>(memory->byte_length()->Number());
+    LoadDataSegments(compiled_module, mem_start, mem_size);
 
-  FlushAssemblyCache(isolate, code_table);
+    uint32_t old_mem_size = compiled_module->has_heap()
+                                ? compiled_module->mem_size()
+                                : compiled_module->default_mem_size();
+    Address old_mem_start =
+        compiled_module->has_heap()
+            ? static_cast<Address>(compiled_module->heap()->backing_store())
+            : nullptr;
+    RelocateInstanceCode(instance, old_mem_start, mem_start, old_mem_size,
+                         mem_size);
+    compiled_module->set_heap(memory);
+  }
 
-  MaybeHandle<FixedArray> maybe_indirect_tables =
-      compiled_module->GetValue<FixedArray>(isolate,
-                                            kTableOfIndirectFunctionTables);
-  Handle<FixedArray> indirect_tables_template;
-  if (maybe_indirect_tables.ToHandle(&indirect_tables_template)) {
-    Handle<FixedArray> indirect_tables = SetupIndirectFunctionTable(
-        isolate, code_table, indirect_tables_template);
-    for (int i = 0; i < indirect_tables->length(); ++i) {
-      Handle<FixedArray> metadata =
-          indirect_tables->GetValueChecked<FixedArray>(isolate, i);
-      uint32_t size = Smi::cast(metadata->get(kSize))->value();
-      Handle<FixedArray> table =
-          metadata->GetValueChecked<FixedArray>(isolate, kTable);
-      wasm::PopulateFunctionTable(table, size, &functions);
+  //--------------------------------------------------------------------------
+  // Set up the globals for the new instance.
+  //--------------------------------------------------------------------------
+  MaybeHandle<JSArrayBuffer> old_globals;
+  MaybeHandle<JSArrayBuffer> globals;
+  uint32_t globals_size = compiled_module->globals_size();
+  if (globals_size > 0) {
+    Handle<JSArrayBuffer> global_buffer = NewArrayBuffer(isolate, globals_size);
+    globals = global_buffer;
+    if (globals.is_null()) {
+      thrower->Error("Out of memory: wasm globals");
+      return nothing;
     }
-    js_object->SetInternalField(kWasmModuleFunctionTable, *indirect_tables);
+    Address old_address =
+        owner.is_null() ? nullptr : GetGlobalStartAddressFromCodeTemplate(
+                                        *isolate->factory()->undefined_value(),
+                                        JSObject::cast(*owner));
+    RelocateGlobals(instance, old_address,
+                    static_cast<Address>(global_buffer->backing_store()));
+    instance->SetInternalField(kWasmGlobalsArrayBuffer, *global_buffer);
   }
 
+  //--------------------------------------------------------------------------
+  // Compile the import wrappers for the new instance.
+  //--------------------------------------------------------------------------
+  // TODO(titzer): handle imported globals and function tables.
+  int num_imported_functions = 0;
+  if (compiled_module->has_import_data()) {
+    Handle<FixedArray> import_data = compiled_module->import_data();
+    num_imported_functions = import_data->length();
+    for (int index = 0; index < num_imported_functions; index++) {
+      Handle<Code> import_wrapper =
+          CompileImportWrapper(isolate, ffi, index, import_data, thrower);
+      if (thrower->error()) return nothing;
+      code_table->set(index, *import_wrapper);
+      RecordStats(isolate, *import_wrapper);
+    }
+  }
+
+  //--------------------------------------------------------------------------
+  // Set up the debug support for the new instance.
+  //--------------------------------------------------------------------------
+  // TODO(wasm): avoid referencing this stuff from the instance, use it off
+  // the compiled module instead. See the following 3 assignments:
+  if (compiled_module->has_module_bytes()) {
+    instance->SetInternalField(kWasmModuleBytesString,
+                               compiled_module->ptr_to_module_bytes());
+  }
+
+  if (compiled_module->has_function_names()) {
+    instance->SetInternalField(kWasmFunctionNamesArray,
+                               compiled_module->ptr_to_function_names());
+  }
+
+  {
+    Handle<Object> handle = factory->NewNumber(num_imported_functions);
+    instance->SetInternalField(kWasmNumImportedFunctions, *handle);
+  }
+
+  //--------------------------------------------------------------------------
+  // Set up the runtime support for the new instance.
+  //--------------------------------------------------------------------------
+  Handle<WeakCell> weak_link = isolate->factory()->NewWeakCell(instance);
+
+  for (int i = num_imported_functions + FLAG_skip_compiling_wasm_funcs;
+       i < code_table->length(); ++i) {
+    Handle<Code> code = code_table->GetValueChecked<Code>(isolate, i);
+    if (code->kind() == Code::WASM_FUNCTION) {
+      Handle<FixedArray> deopt_data =
+          isolate->factory()->NewFixedArray(2, TENURED);
+      deopt_data->set(0, *weak_link);
+      deopt_data->set(1, Smi::FromInt(static_cast<int>(i)));
+      deopt_data->set_length(2);
+      code->set_deoptimization_data(*deopt_data);
+    }
+  }
+
+  //--------------------------------------------------------------------------
+  // Set up the indirect function tables for the new instance.
+  //--------------------------------------------------------------------------
+  {
+    std::vector<Handle<Code>> functions(
+        static_cast<size_t>(code_table->length()));
+    for (int i = 0; i < code_table->length(); ++i) {
+      functions[i] = code_table->GetValueChecked<Code>(isolate, i);
+    }
+
+    if (compiled_module->has_indirect_function_tables()) {
+      Handle<FixedArray> indirect_tables_template =
+          compiled_module->indirect_function_tables();
+      Handle<FixedArray> to_replace =
+          owner.is_null() ? indirect_tables_template
+                          : handle(FixedArray::cast(owner->GetInternalField(
+                                kWasmModuleFunctionTable)));
+      Handle<FixedArray> indirect_tables = SetupIndirectFunctionTable(
+          isolate, code_table, indirect_tables_template, to_replace);
+      for (int i = 0; i < indirect_tables->length(); ++i) {
+        Handle<FixedArray> metadata =
+            indirect_tables->GetValueChecked<FixedArray>(isolate, i);
+        uint32_t size = Smi::cast(metadata->get(kSize))->value();
+        Handle<FixedArray> table =
+            metadata->GetValueChecked<FixedArray>(isolate, kTable);
+        PopulateFunctionTable(table, size, &functions);
+      }
+      instance->SetInternalField(kWasmModuleFunctionTable, *indirect_tables);
+    }
+  }
+
+  //--------------------------------------------------------------------------
+  // Set up the exports object for the new instance.
+  //--------------------------------------------------------------------------
+  bool mem_export = compiled_module->export_memory();
+  ModuleOrigin origin = compiled_module->origin();
+
+  if (compiled_module->has_exports() || mem_export) {
+    PropertyDescriptor desc;
+    desc.set_writable(false);
+
+    Handle<JSObject> exports_object = instance;
+    if (origin == kWasmOrigin) {
+      // Create the "exports" object.
+      Handle<JSFunction> object_function = Handle<JSFunction>(
+          isolate->native_context()->object_function(), isolate);
+      exports_object = factory->NewJSObject(object_function, TENURED);
+      Handle<String> exports_name = factory->InternalizeUtf8String("exports");
+      JSObject::AddProperty(instance, exports_name, exports_object, READ_ONLY);
+    }
+    int first_export = -1;
+    // TODO(wasm): another iteration over the code objects.
+    for (int i = 0; i < code_table->length(); i++) {
+      Handle<Code> code = code_table->GetValueChecked<Code>(isolate, i);
+      if (code->kind() == Code::JS_TO_WASM_FUNCTION) {
+        first_export = i;
+        break;
+      }
+    }
+    if (compiled_module->has_exports()) {
+      Handle<FixedArray> exports = compiled_module->exports();
+      int export_size = exports->length();
+      for (int i = 0; i < export_size; ++i) {
+        Handle<FixedArray> export_data =
+            exports->GetValueChecked<FixedArray>(isolate, i);
+        Handle<String> name =
+            export_data->GetValueChecked<String>(isolate, kExportName);
+        int arity = Smi::cast(export_data->get(kExportArity))->value();
+        MaybeHandle<ByteArray> signature =
+            export_data->GetValue<ByteArray>(isolate, kExportedSignature);
+        Handle<Code> export_code =
+            code_table->GetValueChecked<Code>(isolate, first_export + i);
+        Handle<JSFunction> function = WrapExportCodeAsJSFunction(
+            isolate, export_code, name, arity, signature, instance);
+        desc.set_value(function);
+        Maybe<bool> status = JSReceiver::DefineOwnProperty(
+            isolate, exports_object, name, &desc, Object::THROW_ON_ERROR);
+        if (!status.IsJust()) {
+          thrower->Error("export of %.*s failed.", name->length(),
+                         name->ToCString().get());
+          return nothing;
+        }
+      }
+    }
+    if (mem_export) {
+      // Export the memory as a named property.
+      Handle<JSArrayBuffer> buffer = Handle<JSArrayBuffer>(
+          JSArrayBuffer::cast(instance->GetInternalField(kWasmMemArrayBuffer)));
+      Handle<Object> memory_object =
+          WasmJs::CreateWasmMemoryObject(isolate, buffer, false, 0);
+      // TODO(titzer): export the memory with the correct name.
+      Handle<String> name = factory->InternalizeUtf8String("memory");
+      JSObject::AddProperty(exports_object, name, memory_object, READ_ONLY);
+    }
+  }
+
+  if (num_imported_functions > 0 || !owner.is_null()) {
+    // If the code was cloned, or new imports were compiled, patch.
+    PatchDirectCalls(old_code_table, code_table, num_imported_functions);
+  }
+
+  FlushICache(isolate, code_table);
+
+  //--------------------------------------------------------------------------
   // Run the start function if one was specified.
-  MaybeHandle<FixedArray> maybe_startup_fct =
-      compiled_module->GetValue<FixedArray>(isolate, kStartupFunction);
-  Handle<FixedArray> metadata;
-  if (maybe_startup_fct.ToHandle(&metadata)) {
+  //--------------------------------------------------------------------------
+  if (compiled_module->has_startup_function()) {
+    Handle<FixedArray> startup_data = compiled_module->startup_function();
     HandleScope scope(isolate);
+    int32_t start_index =
+        startup_data->GetValueChecked<Smi>(isolate, kExportedFunctionIndex)
+            ->value();
     Handle<Code> startup_code =
-        metadata->GetValueChecked<Code>(isolate, kExportCode);
-    int arity = Smi::cast(metadata->get(kExportArity))->value();
+        code_table->GetValueChecked<Code>(isolate, start_index);
+    int arity = Smi::cast(startup_data->get(kExportArity))->value();
     MaybeHandle<ByteArray> startup_signature =
-        metadata->GetValue<ByteArray>(isolate, kExportedSignature);
+        startup_data->GetValue<ByteArray>(isolate, kExportedSignature);
     Handle<JSFunction> startup_fct = WrapExportCodeAsJSFunction(
         isolate, startup_code, factory->InternalizeUtf8String("start"), arity,
-        startup_signature, js_object);
+        startup_signature, instance);
     RecordStats(isolate, *startup_code);
     // Call the JS function.
     Handle<Object> undefined = isolate->factory()->undefined_value();
@@ -1403,35 +1507,86 @@
         Execution::Call(isolate, startup_fct, undefined, 0, nullptr);
 
     if (retval.is_null()) {
-      thrower.Error("WASM.instantiateModule(): start function failed");
+      thrower->Error("WASM.instantiateModule(): start function failed");
       return nothing;
     }
   }
 
-  DCHECK(wasm::IsWasmObject(*js_object));
-  return js_object;
+  DCHECK(wasm::IsWasmObject(*instance));
+
+  {
+    Handle<WeakCell> link_to_owner = factory->NewWeakCell(instance);
+
+    Handle<Object> global_handle = isolate->global_handles()->Create(*instance);
+    Handle<WeakCell> link_to_clone = factory->NewWeakCell(compiled_module);
+    {
+      DisallowHeapAllocation no_gc;
+      compiled_module->set_weak_owning_instance(link_to_owner);
+      Handle<WeakCell> next;
+      if (link_to_original.ToHandle(&next) && !next->cleared()) {
+        WasmCompiledModule* original = WasmCompiledModule::cast(next->value());
+        DCHECK(original->has_weak_owning_instance());
+        DCHECK(!original->weak_owning_instance()->cleared());
+        compiled_module->set_weak_next_instance(next);
+        original->set_weak_prev_instance(link_to_clone);
+      }
+
+      compiled_module->set_weak_owning_instance(link_to_owner);
+      instance->SetInternalField(kWasmCompiledModule, *compiled_module);
+      GlobalHandles::MakeWeak(global_handle.location(),
+                              global_handle.location(), &InstanceFinalizer,
+                              v8::WeakCallbackType::kFinalizer);
+    }
+  }
+  TRACE("Finishing instance %d\n", compiled_module->instance_id());
+  TRACE_CHAIN(WasmCompiledModule::cast(module_object->GetInternalField(0)));
+  return instance;
 }
 
-// TODO(mtrofin): remove this once we move to WASM_DIRECT_CALL
-Handle<Code> ModuleEnv::GetCodeOrPlaceholder(uint32_t index) const {
-  DCHECK(IsValidFunction(index));
-  if (!placeholders.empty()) return placeholders[index];
-  DCHECK_NOT_NULL(instance);
-  return instance->function_code[index];
+#if DEBUG
+uint32_t WasmCompiledModule::instance_id_counter_ = 0;
+#endif
+
+Handle<WasmCompiledModule> WasmCompiledModule::New(Isolate* isolate,
+                                                   uint32_t min_memory_pages,
+                                                   uint32_t globals_size,
+                                                   bool export_memory,
+                                                   ModuleOrigin origin) {
+  Handle<FixedArray> ret =
+      isolate->factory()->NewFixedArray(PropertyIndices::Count, TENURED);
+  // Globals size is expected to fit into an int without overflow. This is not
+  // supported by the spec at the moment, however, we don't support array
+  // buffer sizes over 1g, so, for now, we avoid alocating a HeapNumber for
+  // the globals size. The CHECK guards this assumption.
+  CHECK_GE(static_cast<int>(globals_size), 0);
+  ret->set(kID_min_memory_pages,
+           Smi::FromInt(static_cast<int>(min_memory_pages)));
+  ret->set(kID_globals_size, Smi::FromInt(static_cast<int>(globals_size)));
+  ret->set(kID_export_memory, Smi::FromInt(static_cast<int>(export_memory)));
+  ret->set(kID_origin, Smi::FromInt(static_cast<int>(origin)));
+  WasmCompiledModule::cast(*ret)->Init();
+  return handle(WasmCompiledModule::cast(*ret));
 }
 
-Handle<Code> ModuleEnv::GetImportCode(uint32_t index) {
-  DCHECK(IsValidImport(index));
-  return instance ? instance->import_code[index] : Handle<Code>::null();
+void WasmCompiledModule::Init() {
+#if DEBUG
+  set(kID_instance_id, Smi::FromInt(instance_id_counter_++));
+  TRACE("New compiled module id: %d\n", instance_id());
+#endif
 }
 
-compiler::CallDescriptor* ModuleEnv::GetCallDescriptor(Zone* zone,
-                                                       uint32_t index) {
-  DCHECK(IsValidFunction(index));
-  // Always make a direct call to whatever is in the table at that location.
-  // A wrapper will be generated for FFI calls.
-  const WasmFunction* function = &module->functions[index];
-  return GetWasmCallDescriptor(zone, function->sig);
+void WasmCompiledModule::PrintInstancesChain() {
+#if DEBUG
+  if (!FLAG_trace_wasm_instances) return;
+  for (WasmCompiledModule* current = this; current != nullptr;) {
+    PrintF("->%d", current->instance_id());
+    if (current->ptr_to_weak_next_instance() == nullptr) break;
+    CHECK(!current->ptr_to_weak_next_instance()->cleared());
+    current =
+        WasmCompiledModule::cast(current->ptr_to_weak_next_instance()->value());
+  }
+  PrintF("\n");
+#endif
 }
 
 Handle<Object> GetWasmFunctionNameOrNull(Isolate* isolate, Handle<Object> wasm,
@@ -1577,93 +1732,188 @@
   return ByteArray::cast(func_names_obj)->get_int(0);
 }
 
-Handle<JSObject> CreateCompiledModuleObject(
-    Isolate* isolate, Handle<FixedArray> compiled_module) {
-  Handle<JSFunction> module_cons(
-      isolate->native_context()->wasm_module_constructor());
-  Handle<JSObject> module_obj = isolate->factory()->NewJSObject(module_cons);
+Handle<JSObject> CreateCompiledModuleObject(Isolate* isolate,
+                                            Handle<FixedArray> compiled_module,
+                                            ModuleOrigin origin) {
+  Handle<JSObject> module_obj;
+  if (origin == ModuleOrigin::kWasmOrigin) {
+    Handle<JSFunction> module_cons(
+        isolate->native_context()->wasm_module_constructor());
+    module_obj = isolate->factory()->NewJSObject(module_cons);
+  } else {
+    DCHECK(origin == ModuleOrigin::kAsmJsOrigin);
+    Handle<Map> map = isolate->factory()->NewMap(
+        JS_OBJECT_TYPE, JSObject::kHeaderSize + kPointerSize);
+    module_obj = isolate->factory()->NewJSObjectFromMap(map, TENURED);
+  }
   module_obj->SetInternalField(0, *compiled_module);
-  Handle<Symbol> module_sym(isolate->native_context()->wasm_module_sym());
-  Object::SetProperty(module_obj, module_sym, module_obj, STRICT).Check();
+  if (origin == ModuleOrigin::kWasmOrigin) {
+    Handle<Symbol> module_sym(isolate->native_context()->wasm_module_sym());
+    Object::SetProperty(module_obj, module_sym, module_obj, STRICT).Check();
+  }
+  Handle<WeakCell> link_to_module = isolate->factory()->NewWeakCell(module_obj);
+  WasmCompiledModule::cast(*compiled_module)
+      ->set_weak_module_object(link_to_module);
   return module_obj;
 }
 
+MaybeHandle<JSObject> CreateModuleObjectFromBytes(Isolate* isolate,
+                                                  const byte* start,
+                                                  const byte* end,
+                                                  ErrorThrower* thrower,
+                                                  ModuleOrigin origin) {
+  MaybeHandle<JSObject> nothing;
+  Zone zone(isolate->allocator());
+  ModuleResult result =
+      DecodeWasmModule(isolate, &zone, start, end, false, origin);
+  std::unique_ptr<const WasmModule> decoded_module(result.val);
+  if (result.failed()) {
+    thrower->Failed("Wasm decoding failed", result);
+    return nothing;
+  }
+  MaybeHandle<FixedArray> compiled_module =
+      decoded_module->CompileFunctions(isolate, thrower);
+  if (compiled_module.is_null()) return nothing;
+
+  return CreateCompiledModuleObject(isolate, compiled_module.ToHandleChecked(),
+                                    origin);
+}
+
+bool ValidateModuleBytes(Isolate* isolate, const byte* start, const byte* end,
+                         ErrorThrower* thrower, ModuleOrigin origin) {
+  Zone zone(isolate->allocator());
+  ModuleResult result =
+      DecodeWasmModule(isolate, &zone, start, end, false, origin);
+  if (result.ok()) {
+    DCHECK_NOT_NULL(result.val);
+    delete result.val;
+    return true;
+  }
+  return false;
+}
+
+MaybeHandle<JSArrayBuffer> GetInstanceMemory(Isolate* isolate,
+                                             Handle<JSObject> instance) {
+  Object* mem = instance->GetInternalField(kWasmMemArrayBuffer);
+  DCHECK(IsWasmObject(*instance));
+  if (mem->IsUndefined(isolate)) return MaybeHandle<JSArrayBuffer>();
+  return Handle<JSArrayBuffer>(JSArrayBuffer::cast(mem));
+}
+
+void SetInstanceMemory(Handle<JSObject> instance, JSArrayBuffer* buffer) {
+  DisallowHeapAllocation no_gc;
+  DCHECK(IsWasmObject(*instance));
+  instance->SetInternalField(kWasmMemArrayBuffer, buffer);
+  WasmCompiledModule* module =
+      WasmCompiledModule::cast(instance->GetInternalField(kWasmCompiledModule));
+  module->set_ptr_to_heap(buffer);
+}
+
+int32_t GetInstanceMemorySize(Isolate* isolate, Handle<JSObject> instance) {
+  MaybeHandle<JSArrayBuffer> maybe_mem_buffer =
+      GetInstanceMemory(isolate, instance);
+  Handle<JSArrayBuffer> buffer;
+  if (!maybe_mem_buffer.ToHandle(&buffer)) {
+    return 0;
+  } else {
+    return buffer->byte_length()->Number() / WasmModule::kPageSize;
+  }
+}
+
+int32_t GrowInstanceMemory(Isolate* isolate, Handle<JSObject> instance,
+                           uint32_t pages) {
+  Address old_mem_start = nullptr;
+  uint32_t old_size = 0, new_size = 0;
+
+  MaybeHandle<JSArrayBuffer> maybe_mem_buffer =
+      GetInstanceMemory(isolate, instance);
+  Handle<JSArrayBuffer> old_buffer;
+  if (!maybe_mem_buffer.ToHandle(&old_buffer)) {
+    // If module object does not have linear memory associated with it,
+    // Allocate new array buffer of given size.
+    // TODO(gdeepti): Fix bounds check to take into account size of memtype.
+    new_size = pages * WasmModule::kPageSize;
+    // The code generated in the wasm compiler guarantees this precondition.
+    DCHECK(pages <= WasmModule::kMaxMemPages);
+  } else {
+    old_mem_start = static_cast<Address>(old_buffer->backing_store());
+    old_size = old_buffer->byte_length()->Number();
+    // If the old memory was zero-sized, we should have been in the
+    // "undefined" case above.
+    DCHECK_NOT_NULL(old_mem_start);
+    DCHECK_NE(0, old_size);
+    DCHECK(old_size + pages * WasmModule::kPageSize <=
+           std::numeric_limits<uint32_t>::max());
+    new_size = old_size + pages * WasmModule::kPageSize;
+  }
+
+  if (new_size <= old_size ||
+      WasmModule::kMaxMemPages * WasmModule::kPageSize <= new_size) {
+    return -1;
+  }
+  Handle<JSArrayBuffer> buffer = NewArrayBuffer(isolate, new_size);
+  if (buffer.is_null()) return -1;
+  Address new_mem_start = static_cast<Address>(buffer->backing_store());
+  if (old_size != 0) {
+    memcpy(new_mem_start, old_mem_start, old_size);
+  }
+  SetInstanceMemory(instance, *buffer);
+  if (!UpdateWasmModuleMemory(instance, old_mem_start, new_mem_start, old_size,
+                              new_size)) {
+    return -1;
+  }
+  DCHECK(old_size % WasmModule::kPageSize == 0);
+  return (old_size / WasmModule::kPageSize);
+}
+
 namespace testing {
 
-int32_t CompileAndRunWasmModule(Isolate* isolate, const byte* module_start,
-                                const byte* module_end, bool asm_js) {
-  HandleScope scope(isolate);
-  Zone zone(isolate->allocator());
-  ErrorThrower thrower(isolate, "CompileAndRunWasmModule");
-
-  // Decode the module, but don't verify function bodies, since we'll
-  // be compiling them anyway.
-  ModuleResult decoding_result =
-      DecodeWasmModule(isolate, &zone, module_start, module_end, false,
-                       asm_js ? kAsmJsOrigin : kWasmOrigin);
-
-  std::unique_ptr<const WasmModule> module(decoding_result.val);
-  if (decoding_result.failed()) {
-    // Module verification failed. throw.
-    thrower.Error("WASM.compileRun() failed: %s",
-                  decoding_result.error_msg.get());
-    return -1;
+void ValidateInstancesChain(Isolate* isolate, Handle<JSObject> module_obj,
+                            int instance_count) {
+  CHECK_GE(instance_count, 0);
+  DisallowHeapAllocation no_gc;
+  WasmCompiledModule* compiled_module =
+      WasmCompiledModule::cast(module_obj->GetInternalField(0));
+  CHECK_EQ(
+      JSObject::cast(compiled_module->ptr_to_weak_module_object()->value()),
+      *module_obj);
+  Object* prev = nullptr;
+  int found_instances = compiled_module->has_weak_owning_instance() ? 1 : 0;
+  WasmCompiledModule* current_instance = compiled_module;
+  while (current_instance->has_weak_next_instance()) {
+    CHECK((prev == nullptr && !current_instance->has_weak_prev_instance()) ||
+          current_instance->ptr_to_weak_prev_instance()->value() == prev);
+    CHECK_EQ(current_instance->ptr_to_weak_module_object()->value(),
+             *module_obj);
+    CHECK(
+        IsWasmObject(current_instance->ptr_to_weak_owning_instance()->value()));
+    prev = current_instance;
+    current_instance = WasmCompiledModule::cast(
+        current_instance->ptr_to_weak_next_instance()->value());
+    ++found_instances;
+    CHECK_LE(found_instances, instance_count);
   }
-
-  if (module->import_table.size() > 0) {
-    thrower.Error("Not supported: module has imports.");
-  }
-  if (module->export_table.size() == 0) {
-    thrower.Error("Not supported: module has no exports.");
-  }
-
-  if (thrower.error()) return -1;
-  MaybeHandle<FixedArray> compiled_module =
-      module->CompileFunctions(isolate, &thrower);
-
-  if (compiled_module.is_null()) return -1;
-  Handle<JSObject> instance =
-      WasmModule::Instantiate(isolate, compiled_module.ToHandleChecked(),
-                              Handle<JSReceiver>::null(),
-                              Handle<JSArrayBuffer>::null())
-          .ToHandleChecked();
-
-  return CallFunction(isolate, instance, &thrower, "main", 0, nullptr);
+  CHECK_EQ(found_instances, instance_count);
 }
 
-int32_t CallFunction(Isolate* isolate, Handle<JSObject> instance,
-                     ErrorThrower* thrower, const char* name, int argc,
-                     Handle<Object> argv[]) {
-  Handle<Name> exports = isolate->factory()->InternalizeUtf8String("exports");
-  Handle<JSObject> exports_object = Handle<JSObject>::cast(
-      JSObject::GetProperty(instance, exports).ToHandleChecked());
-  Handle<Name> main_name = isolate->factory()->NewStringFromAsciiChecked(name);
-  PropertyDescriptor desc;
-  Maybe<bool> property_found = JSReceiver::GetOwnPropertyDescriptor(
-      isolate, exports_object, main_name, &desc);
-  if (!property_found.FromMaybe(false)) return -1;
+void ValidateModuleState(Isolate* isolate, Handle<JSObject> module_obj) {
+  DisallowHeapAllocation no_gc;
+  WasmCompiledModule* compiled_module =
+      WasmCompiledModule::cast(module_obj->GetInternalField(0));
+  CHECK(compiled_module->has_weak_module_object());
+  CHECK_EQ(compiled_module->ptr_to_weak_module_object()->value(), *module_obj);
+  CHECK(!compiled_module->has_weak_prev_instance());
+  CHECK(!compiled_module->has_weak_next_instance());
+  CHECK(!compiled_module->has_weak_owning_instance());
+}
 
-  Handle<JSFunction> main_export = Handle<JSFunction>::cast(desc.value());
-
-  // Call the JS function.
-  Handle<Object> undefined = isolate->factory()->undefined_value();
-  MaybeHandle<Object> retval =
-      Execution::Call(isolate, main_export, undefined, argc, argv);
-
-  // The result should be a number.
-  if (retval.is_null()) {
-    thrower->Error("WASM.compileRun() failed: Invocation was null");
-    return -1;
-  }
-  Handle<Object> result = retval.ToHandleChecked();
-  if (result->IsSmi()) {
-    return Smi::cast(*result)->value();
-  }
-  if (result->IsHeapNumber()) {
-    return static_cast<int32_t>(HeapNumber::cast(*result)->value());
-  }
-  thrower->Error("WASM.compileRun() failed: Return value should be number");
-  return -1;
+void ValidateOrphanedInstance(Isolate* isolate, Handle<JSObject> instance) {
+  DisallowHeapAllocation no_gc;
+  CHECK(IsWasmObject(*instance));
+  WasmCompiledModule* compiled_module =
+      WasmCompiledModule::cast(instance->GetInternalField(kWasmCompiledModule));
+  CHECK(compiled_module->has_weak_module_object());
+  CHECK(compiled_module->ptr_to_weak_module_object()->cleared());
 }
 
 }  // namespace testing
diff --git a/src/wasm/wasm-module.h b/src/wasm/wasm-module.h
index 0c3df51..ac75042 100644
--- a/src/wasm/wasm-module.h
+++ b/src/wasm/wasm-module.h
@@ -27,84 +27,71 @@
 const size_t kMaxFunctionSize = 128 * 1024;
 const size_t kMaxStringSize = 256;
 const uint32_t kWasmMagic = 0x6d736100;
-const uint32_t kWasmVersion = 0x0b;
+const uint32_t kWasmVersion = 0x0c;
+
 const uint8_t kWasmFunctionTypeForm = 0x40;
+const uint8_t kWasmAnyFunctionTypeForm = 0x20;
 
-// WebAssembly sections are named as strings in the binary format, but
-// internally V8 uses an enum to handle them.
-//
-// Entries have the form F(enumerator, string).
-#define FOR_EACH_WASM_SECTION_TYPE(F)  \
-  F(Signatures, 1, "type")             \
-  F(ImportTable, 2, "import")          \
-  F(FunctionSignatures, 3, "function") \
-  F(FunctionTable, 4, "table")         \
-  F(Memory, 5, "memory")               \
-  F(ExportTable, 6, "export")          \
-  F(StartFunction, 7, "start")         \
-  F(FunctionBodies, 8, "code")         \
-  F(DataSegments, 9, "data")           \
-  F(Names, 10, "name")                 \
-  F(Globals, 0, "global")              \
-  F(End, 0, "end")
+enum WasmSectionCode {
+  kUnknownSectionCode = 0,   // code for unknown sections
+  kTypeSectionCode = 1,      // Function signature declarations
+  kImportSectionCode = 2,    // Import declarations
+  kFunctionSectionCode = 3,  // Function declarations
+  kTableSectionCode = 4,     // Indirect function table and other tables
+  kMemorySectionCode = 5,    // Memory attributes
+  kGlobalSectionCode = 6,    // Global declarations
+  kExportSectionCode = 7,    // Exports
+  kStartSectionCode = 8,     // Start function declaration
+  kElementSectionCode = 9,   // Elements section
+  kCodeSectionCode = 10,     // Function code
+  kDataSectionCode = 11,     // Data segments
+  kNameSectionCode = 12,     // Name section (encoded as a string)
+};
 
-// Contants for the above section types: {LEB128 length, characters...}.
-#define WASM_SECTION_MEMORY 6, 'm', 'e', 'm', 'o', 'r', 'y'
-#define WASM_SECTION_SIGNATURES 4, 't', 'y', 'p', 'e'
-#define WASM_SECTION_GLOBALS 6, 'g', 'l', 'o', 'b', 'a', 'l'
-#define WASM_SECTION_DATA_SEGMENTS 4, 'd', 'a', 't', 'a'
-#define WASM_SECTION_FUNCTION_TABLE 5, 't', 'a', 'b', 'l', 'e'
-#define WASM_SECTION_END 3, 'e', 'n', 'd'
-#define WASM_SECTION_START_FUNCTION 5, 's', 't', 'a', 'r', 't'
-#define WASM_SECTION_IMPORT_TABLE 6, 'i', 'm', 'p', 'o', 'r', 't'
-#define WASM_SECTION_EXPORT_TABLE 6, 'e', 'x', 'p', 'o', 'r', 't'
-#define WASM_SECTION_FUNCTION_SIGNATURES \
-  8, 'f', 'u', 'n', 'c', 't', 'i', 'o', 'n'
-#define WASM_SECTION_FUNCTION_BODIES 4, 'c', 'o', 'd', 'e'
-#define WASM_SECTION_NAMES 4, 'n', 'a', 'm', 'e'
+inline bool IsValidSectionCode(uint8_t byte) {
+  return kTypeSectionCode <= byte && byte <= kDataSectionCode;
+}
 
-// Constants for the above section headers' size (LEB128 + characters).
-#define WASM_SECTION_MEMORY_SIZE ((size_t)7)
-#define WASM_SECTION_SIGNATURES_SIZE ((size_t)5)
-#define WASM_SECTION_GLOBALS_SIZE ((size_t)7)
-#define WASM_SECTION_DATA_SEGMENTS_SIZE ((size_t)5)
-#define WASM_SECTION_FUNCTION_TABLE_SIZE ((size_t)6)
-#define WASM_SECTION_END_SIZE ((size_t)4)
-#define WASM_SECTION_START_FUNCTION_SIZE ((size_t)6)
-#define WASM_SECTION_IMPORT_TABLE_SIZE ((size_t)7)
-#define WASM_SECTION_EXPORT_TABLE_SIZE ((size_t)7)
-#define WASM_SECTION_FUNCTION_SIGNATURES_SIZE ((size_t)9)
-#define WASM_SECTION_FUNCTION_BODIES_SIZE ((size_t)5)
-#define WASM_SECTION_NAMES_SIZE ((size_t)5)
+const char* SectionName(WasmSectionCode code);
 
 class WasmDebugInfo;
 
-struct WasmSection {
-  enum class Code : uint32_t {
-#define F(enumerator, order, string) enumerator,
-    FOR_EACH_WASM_SECTION_TYPE(F)
-#undef F
-        Max
-  };
-  static WasmSection::Code begin();
-  static WasmSection::Code end();
-  static WasmSection::Code next(WasmSection::Code code);
-  static const char* getName(Code code);
-  static int getOrder(Code code);
-  static size_t getNameLength(Code code);
-  static WasmSection::Code lookup(const byte* string, uint32_t length);
-};
-
-enum WasmFunctionDeclBit {
-  kDeclFunctionName = 0x01,
-  kDeclFunctionExport = 0x08
-};
-
 // Constants for fixed-size elements within a module.
-static const size_t kDeclMemorySize = 3;
-static const size_t kDeclDataSegmentSize = 13;
-
 static const uint32_t kMaxReturnCount = 1;
+static const uint8_t kResizableMaximumFlag = 1;
+static const int32_t kInvalidFunctionIndex = -1;
+
+enum WasmExternalKind {
+  kExternalFunction = 0,
+  kExternalTable = 1,
+  kExternalMemory = 2,
+  kExternalGlobal = 3
+};
+
+// Representation of an initializer expression.
+struct WasmInitExpr {
+  enum WasmInitKind {
+    kNone,
+    kGlobalIndex,
+    kI32Const,
+    kI64Const,
+    kF32Const,
+    kF64Const
+  } kind;
+
+  union {
+    int32_t i32_const;
+    int64_t i64_const;
+    float f32_const;
+    double f64_const;
+    uint32_t global_index;
+  } val;
+};
+
+#define NO_INIT                 \
+  {                             \
+    WasmInitExpr::kNone, { 0u } \
+  }
 
 // Static representation of a WASM function.
 struct WasmFunction {
@@ -115,54 +102,69 @@
   uint32_t name_length;  // length in bytes of the name.
   uint32_t code_start_offset;    // offset in the module bytes of code start.
   uint32_t code_end_offset;      // offset in the module bytes of code end.
-};
-
-// Static representation of an imported WASM function.
-struct WasmImport {
-  FunctionSig* sig;               // signature of the function.
-  uint32_t sig_index;             // index into the signature table.
-  uint32_t module_name_offset;    // offset in module bytes of the module name.
-  uint32_t module_name_length;    // length in bytes of the module name.
-  uint32_t function_name_offset;  // offset in module bytes of the import name.
-  uint32_t function_name_length;  // length in bytes of the import name.
-};
-
-// Static representation of an exported WASM function.
-struct WasmExport {
-  uint32_t func_index;   // index into the function table.
-  uint32_t name_offset;  // offset in module bytes of the name to export.
-  uint32_t name_length;  // length in bytes of the exported name.
+  bool imported;
+  bool exported;
 };
 
 // Static representation of a wasm global variable.
 struct WasmGlobal {
-  uint32_t name_offset;  // offset in the module bytes of the name, if any.
-  uint32_t name_length;  // length in bytes of the global name.
   LocalType type;        // type of the global.
-  uint32_t offset;       // offset from beginning of globals area.
-  bool exported;         // true if this global is exported.
+  bool mutability;       // {true} if mutable.
+  WasmInitExpr init;     // the initialization expression of the global.
+  uint32_t offset;       // offset into global memory.
+  bool imported;         // true if imported.
+  bool exported;         // true if exported.
 };
 
 // Static representation of a wasm data segment.
 struct WasmDataSegment {
-  uint32_t dest_addr;      // destination memory address of the data.
+  WasmInitExpr dest_addr;  // destination memory address of the data.
   uint32_t source_offset;  // start offset in the module bytes.
   uint32_t source_size;    // end offset in the module bytes.
-  bool init;               // true if loaded upon instantiation.
 };
 
 // Static representation of a wasm indirect call table.
 struct WasmIndirectFunctionTable {
-  uint32_t size;                 // initial table size.
-  uint32_t max_size;             // maximum table size.
-  std::vector<uint16_t> values;  // function table.
+  uint32_t size;                // initial table size.
+  uint32_t max_size;            // maximum table size.
+  std::vector<int32_t> values;  // function table, -1 indicating invalid.
+  bool imported;                // true if imported.
+  bool exported;                // true if exported.
+};
+
+// Static representation of how to initialize a table.
+struct WasmTableInit {
+  uint32_t table_index;
+  WasmInitExpr offset;
+  std::vector<uint32_t> entries;
+};
+
+// Static representation of a WASM import.
+struct WasmImport {
+  uint32_t module_name_length;  // length in bytes of the module name.
+  uint32_t module_name_offset;  // offset in module bytes of the module name.
+  uint32_t field_name_length;   // length in bytes of the import name.
+  uint32_t field_name_offset;   // offset in module bytes of the import name.
+  WasmExternalKind kind;        // kind of the import.
+  uint32_t index;               // index into the respective space.
+};
+
+// Static representation of a WASM export.
+struct WasmExport {
+  uint32_t name_length;   // length in bytes of the exported name.
+  uint32_t name_offset;   // offset in module bytes of the name to export.
+  WasmExternalKind kind;  // kind of the export.
+  uint32_t index;         // index into the respective space.
 };
 
 enum ModuleOrigin { kWasmOrigin, kAsmJsOrigin };
 
+class WasmCompiledModule;
+
 // Static representation of a module.
 struct WasmModule {
   static const uint32_t kPageSize = 0x10000;    // Page size, 64kb.
+  static const uint32_t kMaxLegalPages = 65536;  // Maximum legal pages
   static const uint32_t kMinMemPages = 1;       // Minimum memory size = 64kb
   static const uint32_t kMaxMemPages = 16384;   // Maximum memory size =  1gb
 
@@ -171,7 +173,6 @@
   uint32_t min_mem_pages;     // minimum size of the memory in 64k pages.
   uint32_t max_mem_pages;     // maximum size of the memory in 64k pages.
   bool mem_export;            // true if the memory is exported.
-  bool mem_external;          // true if the memory is external.
   // TODO(wasm): reconcile start function index being an int with
   // the fact that we index on uint32_t, so we may technically not be
   // able to represent some start_function_index -es.
@@ -180,12 +181,16 @@
 
   std::vector<WasmGlobal> globals;             // globals in this module.
   uint32_t globals_size;                       // size of globals table.
+  uint32_t num_imported_functions;             // number of imported functions.
+  uint32_t num_declared_functions;             // number of declared functions.
+  uint32_t num_exported_functions;             // number of exported functions.
   std::vector<FunctionSig*> signatures;        // signatures in this module.
   std::vector<WasmFunction> functions;         // functions in this module.
   std::vector<WasmDataSegment> data_segments;  // data segments in this module.
   std::vector<WasmIndirectFunctionTable> function_tables;  // function tables.
   std::vector<WasmImport> import_table;        // import table.
   std::vector<WasmExport> export_table;        // export table.
+  std::vector<WasmTableInit> table_inits;      // initializations of tables
   // We store the semaphore here to extend its lifetime. In <libc-2.21, which we
   // use on the try bots, semaphore::Wait() can return while some compilation
   // tasks are still executing semaphore::Signal(). If the semaphore is cleaned
@@ -233,13 +238,12 @@
   }
 
   // Creates a new instantiation of the module in the given isolate.
-  static MaybeHandle<JSObject> Instantiate(Isolate* isolate,
-                                           Handle<FixedArray> compiled_module,
-                                           Handle<JSReceiver> ffi,
-                                           Handle<JSArrayBuffer> memory);
+  V8_EXPORT_PRIVATE static MaybeHandle<JSObject> Instantiate(
+      Isolate* isolate, ErrorThrower* thrower, Handle<JSObject> module_object,
+      Handle<JSReceiver> ffi, Handle<JSArrayBuffer> memory);
 
-  MaybeHandle<FixedArray> CompileFunctions(Isolate* isolate,
-                                           ErrorThrower* thrower) const;
+  MaybeHandle<WasmCompiledModule> CompileFunctions(Isolate* isolate,
+                                                   ErrorThrower* thrower) const;
 
  private:
   DISALLOW_COPY_AND_ASSIGN(WasmModule);
@@ -255,7 +259,6 @@
   Handle<JSArrayBuffer> globals_buffer;  // Handle to array buffer of globals.
   std::vector<Handle<FixedArray>> function_tables;  // indirect function tables.
   std::vector<Handle<Code>> function_code;  // code objects for each function.
-  std::vector<Handle<Code>> import_code;    // code objects for each import.
   // -- raw memory ------------------------------------------------------------
   byte* mem_start;  // start of linear memory.
   uint32_t mem_size;  // size of the linear memory.
@@ -266,7 +269,6 @@
       : module(m),
         function_tables(m->function_tables.size()),
         function_code(m->functions.size()),
-        import_code(m->import_table.size()),
         mem_start(nullptr),
         mem_size(0),
         globals_start(nullptr) {}
@@ -278,9 +280,6 @@
   const WasmModule* module;
   WasmModuleInstance* instance;
   ModuleOrigin origin;
-  // TODO(mtrofin): remove this once we introduce WASM_DIRECT_CALL
-  // reloc infos.
-  std::vector<Handle<Code>> placeholders;
 
   bool IsValidGlobal(uint32_t index) const {
     return module && index < module->globals.size();
@@ -291,9 +290,6 @@
   bool IsValidSignature(uint32_t index) const {
     return module && index < module->signatures.size();
   }
-  bool IsValidImport(uint32_t index) const {
-    return module && index < module->import_table.size();
-  }
   bool IsValidTable(uint32_t index) const {
     return module && index < module->function_tables.size();
   }
@@ -305,10 +301,6 @@
     DCHECK(IsValidFunction(index));
     return module->functions[index].sig;
   }
-  FunctionSig* GetImportSignature(uint32_t index) {
-    DCHECK(IsValidImport(index));
-    return module->import_table[index].sig;
-  }
   FunctionSig* GetSignature(uint32_t index) {
     DCHECK(IsValidSignature(index));
     return module->signatures[index];
@@ -320,14 +312,15 @@
 
   bool asm_js() { return origin == kAsmJsOrigin; }
 
-  Handle<Code> GetCodeOrPlaceholder(uint32_t index) const;
-  Handle<Code> GetImportCode(uint32_t index);
+  Handle<Code> GetFunctionCode(uint32_t index) {
+    DCHECK_NOT_NULL(instance);
+    return instance->function_code[index];
+  }
 
   static compiler::CallDescriptor* GetWasmCallDescriptor(Zone* zone,
                                                          FunctionSig* sig);
   static compiler::CallDescriptor* GetI32WasmCallDescriptor(
       Zone* zone, compiler::CallDescriptor* descriptor);
-  compiler::CallDescriptor* GetCallDescriptor(Zone* zone, uint32_t index);
 };
 
 // A helper for printing out the names of functions.
@@ -347,6 +340,128 @@
 typedef std::vector<std::pair<int, int>> FunctionOffsets;
 typedef Result<FunctionOffsets> FunctionOffsetsResult;
 
+class WasmCompiledModule : public FixedArray {
+ public:
+  static WasmCompiledModule* cast(Object* fixed_array) {
+    return reinterpret_cast<WasmCompiledModule*>(fixed_array);
+  }
+
+#define WCM_OBJECT_OR_WEAK(TYPE, NAME, ID)                           \
+  Handle<TYPE> NAME() const { return handle(ptr_to_##NAME()); }      \
+                                                                     \
+  MaybeHandle<TYPE> maybe_##NAME() const {                           \
+    if (has_##NAME()) return NAME();                                 \
+    return MaybeHandle<TYPE>();                                      \
+  }                                                                  \
+                                                                     \
+  TYPE* ptr_to_##NAME() const {                                      \
+    Object* obj = get(ID);                                           \
+    if (!obj->Is##TYPE()) return nullptr;                            \
+    return TYPE::cast(obj);                                          \
+  }                                                                  \
+                                                                     \
+  void set_##NAME(Handle<TYPE> value) { set_ptr_to_##NAME(*value); } \
+                                                                     \
+  void set_ptr_to_##NAME(TYPE* value) { set(ID, value); }            \
+                                                                     \
+  bool has_##NAME() const { return get(ID)->Is##TYPE(); }            \
+                                                                     \
+  void reset_##NAME() { set_undefined(ID); }
+
+#define WCM_OBJECT(TYPE, NAME) WCM_OBJECT_OR_WEAK(TYPE, NAME, kID_##NAME)
+
+#define WCM_SMALL_NUMBER(TYPE, NAME)                               \
+  TYPE NAME() const {                                              \
+    return static_cast<TYPE>(Smi::cast(get(kID_##NAME))->value()); \
+  }
+
+#define WCM_WEAK_LINK(TYPE, NAME)                        \
+  WCM_OBJECT_OR_WEAK(WeakCell, weak_##NAME, kID_##NAME); \
+                                                         \
+  Handle<TYPE> NAME() const {                            \
+    return handle(TYPE::cast(weak_##NAME()->value()));   \
+  }
+
+#define CORE_WCM_PROPERTY_TABLE(MACRO)                \
+  MACRO(OBJECT, FixedArray, code_table)               \
+  MACRO(OBJECT, FixedArray, import_data)              \
+  MACRO(OBJECT, FixedArray, exports)                  \
+  MACRO(OBJECT, FixedArray, startup_function)         \
+  MACRO(OBJECT, FixedArray, indirect_function_tables) \
+  MACRO(OBJECT, String, module_bytes)                 \
+  MACRO(OBJECT, ByteArray, function_names)            \
+  MACRO(SMALL_NUMBER, uint32_t, min_memory_pages)     \
+  MACRO(OBJECT, FixedArray, data_segments_info)       \
+  MACRO(OBJECT, ByteArray, data_segments)             \
+  MACRO(SMALL_NUMBER, uint32_t, globals_size)         \
+  MACRO(OBJECT, JSArrayBuffer, heap)                  \
+  MACRO(SMALL_NUMBER, bool, export_memory)            \
+  MACRO(SMALL_NUMBER, ModuleOrigin, origin)           \
+  MACRO(WEAK_LINK, WasmCompiledModule, next_instance) \
+  MACRO(WEAK_LINK, WasmCompiledModule, prev_instance) \
+  MACRO(WEAK_LINK, JSObject, owning_instance)         \
+  MACRO(WEAK_LINK, JSObject, module_object)
+
+#if DEBUG
+#define DEBUG_ONLY_TABLE(MACRO) MACRO(SMALL_NUMBER, uint32_t, instance_id)
+#else
+#define DEBUG_ONLY_TABLE(IGNORE)
+  uint32_t instance_id() const { return -1; }
+#endif
+
+#define WCM_PROPERTY_TABLE(MACRO) \
+  CORE_WCM_PROPERTY_TABLE(MACRO)  \
+  DEBUG_ONLY_TABLE(MACRO)
+
+ private:
+  enum PropertyIndices {
+#define INDICES(IGNORE1, IGNORE2, NAME) kID_##NAME,
+    WCM_PROPERTY_TABLE(INDICES) Count
+#undef INDICES
+  };
+
+ public:
+  static Handle<WasmCompiledModule> New(Isolate* isolate,
+                                        uint32_t min_memory_pages,
+                                        uint32_t globals_size,
+                                        bool export_memory,
+                                        ModuleOrigin origin);
+
+  static Handle<WasmCompiledModule> Clone(Isolate* isolate,
+                                          Handle<WasmCompiledModule> module) {
+    Handle<WasmCompiledModule> ret = Handle<WasmCompiledModule>::cast(
+        isolate->factory()->CopyFixedArray(module));
+    ret->Init();
+    ret->reset_weak_owning_instance();
+    ret->reset_weak_next_instance();
+    ret->reset_weak_prev_instance();
+    return ret;
+  }
+
+  uint32_t mem_size() const {
+    DCHECK(has_heap());
+    return heap()->byte_length()->Number();
+  }
+
+  uint32_t default_mem_size() const {
+    return min_memory_pages() * WasmModule::kPageSize;
+  }
+
+#define DECLARATION(KIND, TYPE, NAME) WCM_##KIND(TYPE, NAME)
+  WCM_PROPERTY_TABLE(DECLARATION)
+#undef DECLARATION
+
+  void PrintInstancesChain();
+
+ private:
+#if DEBUG
+  static uint32_t instance_id_counter_;
+#endif
+  void Init();
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(WasmCompiledModule);
+};
+
 // Extract a function name from the given wasm object.
 // Returns "<WASM UNNAMED>" if the function is unnamed or the name is not a
 // valid UTF-8 string.
@@ -399,18 +514,38 @@
                            const std::vector<Handle<Code>>* code_table);
 
 Handle<JSObject> CreateCompiledModuleObject(Isolate* isolate,
-                                            Handle<FixedArray> compiled_module);
+                                            Handle<FixedArray> compiled_module,
+                                            ModuleOrigin origin);
+
+V8_EXPORT_PRIVATE MaybeHandle<JSObject> CreateModuleObjectFromBytes(
+    Isolate* isolate, const byte* start, const byte* end, ErrorThrower* thrower,
+    ModuleOrigin origin);
+
+V8_EXPORT_PRIVATE bool ValidateModuleBytes(Isolate* isolate, const byte* start,
+                                           const byte* end,
+                                           ErrorThrower* thrower,
+                                           ModuleOrigin origin);
+
+// Get the number of imported functions for a WASM instance.
+uint32_t GetNumImportedFunctions(Handle<JSObject> wasm_object);
+
+// Assumed to be called with a code object associated to a wasm module instance.
+// Intended to be called from runtime functions.
+// Returns nullptr on failing to get owning instance.
+Object* GetOwningWasmInstance(Code* code);
+
+int32_t GetInstanceMemorySize(Isolate* isolate, Handle<JSObject> instance);
+
+int32_t GrowInstanceMemory(Isolate* isolate, Handle<JSObject> instance,
+                           uint32_t pages);
 
 namespace testing {
 
-// Decode, verify, and run the function labeled "main" in the
-// given encoded module. The module should have no imports.
-int32_t CompileAndRunWasmModule(Isolate* isolate, const byte* module_start,
-                                const byte* module_end, bool asm_js = false);
+void ValidateInstancesChain(Isolate* isolate, Handle<JSObject> module_obj,
+                            int instance_count);
+void ValidateModuleState(Isolate* isolate, Handle<JSObject> module_obj);
+void ValidateOrphanedInstance(Isolate* isolate, Handle<JSObject> instance);
 
-int32_t CallFunction(Isolate* isolate, Handle<JSObject> instance,
-                     ErrorThrower* thrower, const char* name, int argc,
-                     Handle<Object> argv[]);
 }  // namespace testing
 }  // namespace wasm
 }  // namespace internal
diff --git a/src/wasm/wasm-opcodes.cc b/src/wasm/wasm-opcodes.cc
index 8f54207..cd2dde4 100644
--- a/src/wasm/wasm-opcodes.cc
+++ b/src/wasm/wasm-opcodes.cc
@@ -38,6 +38,18 @@
   return "Unknown";
 }
 
+bool WasmOpcodes::IsPrefixOpcode(WasmOpcode opcode) {
+  switch (opcode) {
+#define CHECK_PREFIX(name, opcode) \
+  case k##name##Prefix:            \
+    return true;
+    FOREACH_PREFIX(CHECK_PREFIX)
+#undef CHECK_PREFIX
+    default:
+      return false;
+  }
+}
+
 std::ostream& operator<<(std::ostream& os, const FunctionSig& sig) {
   if (sig.return_count() == 0) os << "v";
   for (size_t i = 0; i < sig.return_count(); ++i) {
@@ -74,6 +86,7 @@
     nullptr, FOREACH_SIMD_SIGNATURE(DECLARE_SIMD_SIG_ENTRY)};
 
 static byte kSimpleExprSigTable[256];
+static byte kSimpleAsmjsExprSigTable[256];
 static byte kSimdExprSigTable[256];
 
 // Initialize the signature table.
@@ -81,14 +94,16 @@
 #define SET_SIG_TABLE(name, opcode, sig) \
   kSimpleExprSigTable[opcode] = static_cast<int>(kSigEnum_##sig) + 1;
   FOREACH_SIMPLE_OPCODE(SET_SIG_TABLE);
-  FOREACH_SIMPLE_MEM_OPCODE(SET_SIG_TABLE);
-  FOREACH_ASMJS_COMPAT_OPCODE(SET_SIG_TABLE);
 #undef SET_SIG_TABLE
+#define SET_ASMJS_SIG_TABLE(name, opcode, sig) \
+  kSimpleAsmjsExprSigTable[opcode] = static_cast<int>(kSigEnum_##sig) + 1;
+  FOREACH_ASMJS_COMPAT_OPCODE(SET_ASMJS_SIG_TABLE);
+#undef SET_ASMJS_SIG_TABLE
   byte simd_index;
 #define SET_SIG_TABLE(name, opcode, sig) \
   simd_index = opcode & 0xff;            \
   kSimdExprSigTable[simd_index] = static_cast<int>(kSigEnum_##sig) + 1;
-  FOREACH_SIMD_OPCODE(SET_SIG_TABLE)
+  FOREACH_SIMD_0_OPERAND_OPCODE(SET_SIG_TABLE)
 #undef SET_SIG_TABLE
 }
 
@@ -102,6 +117,10 @@
     return const_cast<FunctionSig*>(
         kSimpleExprSigs[kSimpleExprSigTable[static_cast<byte>(opcode)]]);
   }
+  FunctionSig* AsmjsSignature(WasmOpcode opcode) const {
+    return const_cast<FunctionSig*>(
+        kSimpleExprSigs[kSimpleAsmjsExprSigTable[static_cast<byte>(opcode)]]);
+  }
   FunctionSig* SimdSignature(WasmOpcode opcode) const {
     return const_cast<FunctionSig*>(
         kSimdExprSigs[kSimdExprSigTable[static_cast<byte>(opcode & 0xff)]]);
@@ -118,6 +137,10 @@
   }
 }
 
+FunctionSig* WasmOpcodes::AsmjsSignature(WasmOpcode opcode) {
+  return sig_table.Get().AsmjsSignature(opcode);
+}
+
 // TODO(titzer): pull WASM_64 up to a common header.
 #if !V8_TARGET_ARCH_32_BIT || V8_TARGET_ARCH_X64
 #define WASM_64 1
diff --git a/src/wasm/wasm-opcodes.h b/src/wasm/wasm-opcodes.h
index 4d66e56..03827b2 100644
--- a/src/wasm/wasm-opcodes.h
+++ b/src/wasm/wasm-opcodes.h
@@ -22,6 +22,9 @@
   kLocalS128 = 5
 };
 
+// Type code for multi-value block types.
+static const uint8_t kMultivalBlock = 0x41;
+
 // We reuse the internal machine type to represent WebAssembly AST types.
 // A typedef improves readability without adding a whole new type system.
 typedef MachineRepresentation LocalType;
@@ -44,7 +47,7 @@
 
 // Control expressions and blocks.
 #define FOREACH_CONTROL_OPCODE(V) \
-  V(Nop, 0x00, _)                 \
+  V(Unreachable, 0x00, _)         \
   V(Block, 0x01, _)               \
   V(Loop, 0x02, _)                \
   V(If, 0x03, _)                  \
@@ -54,13 +57,10 @@
   V(BrIf, 0x07, _)                \
   V(BrTable, 0x08, _)             \
   V(Return, 0x09, _)              \
-  V(Unreachable, 0x0a, _)         \
+  V(Nop, 0x0a, _)                 \
   V(Throw, 0xfa, _)               \
-  V(TryCatch, 0xfb, _)            \
-  V(TryCatchFinally, 0xfc, _)     \
-  V(TryFinally, 0xfd, _)          \
+  V(Try, 0xfb, _)                 \
   V(Catch, 0xfe, _)               \
-  V(Finally, 0xff, _)             \
   V(End, 0x0F, _)
 
 // Constants, locals, globals, and calls.
@@ -71,9 +71,10 @@
   V(F32Const, 0x13, _)         \
   V(GetLocal, 0x14, _)         \
   V(SetLocal, 0x15, _)         \
+  V(TeeLocal, 0x19, _)         \
+  V(Drop, 0x0b, _)             \
   V(CallFunction, 0x16, _)     \
   V(CallIndirect, 0x17, _)     \
-  V(CallImport, 0x18, _)       \
   V(I8Const, 0xcb, _)          \
   V(GetGlobal, 0xbb, _)        \
   V(SetGlobal, 0xbc, _)
@@ -273,141 +274,144 @@
   V(I32AsmjsSConvertF64, 0xe2, i_d)    \
   V(I32AsmjsUConvertF64, 0xe3, i_d)
 
-#define FOREACH_SIMD_OPCODE(V)         \
-  V(F32x4Splat, 0xe500, s_f)           \
-  V(F32x4ExtractLane, 0xe501, f_si)    \
-  V(F32x4ReplaceLane, 0xe502, s_sif)   \
-  V(F32x4Abs, 0xe503, s_s)             \
-  V(F32x4Neg, 0xe504, s_s)             \
-  V(F32x4Sqrt, 0xe505, s_s)            \
-  V(F32x4RecipApprox, 0xe506, s_s)     \
-  V(F32x4SqrtApprox, 0xe507, s_s)      \
-  V(F32x4Add, 0xe508, s_ss)            \
-  V(F32x4Sub, 0xe509, s_ss)            \
-  V(F32x4Mul, 0xe50a, s_ss)            \
-  V(F32x4Div, 0xe50b, s_ss)            \
-  V(F32x4Min, 0xe50c, s_ss)            \
-  V(F32x4Max, 0xe50d, s_ss)            \
-  V(F32x4MinNum, 0xe50e, s_ss)         \
-  V(F32x4MaxNum, 0xe50f, s_ss)         \
-  V(F32x4Eq, 0xe510, s_ss)             \
-  V(F32x4Ne, 0xe511, s_ss)             \
-  V(F32x4Lt, 0xe512, s_ss)             \
-  V(F32x4Le, 0xe513, s_ss)             \
-  V(F32x4Gt, 0xe514, s_ss)             \
-  V(F32x4Ge, 0xe515, s_ss)             \
-  V(F32x4Select, 0xe516, s_sss)        \
-  V(F32x4Swizzle, 0xe517, s_s)         \
-  V(F32x4Shuffle, 0xe518, s_ss)        \
-  V(F32x4FromInt32x4, 0xe519, s_s)     \
-  V(F32x4FromUint32x4, 0xe51a, s_s)    \
-  V(I32x4Splat, 0xe51b, s_i)           \
-  V(I32x4ExtractLane, 0xe51c, i_si)    \
-  V(I32x4ReplaceLane, 0xe51d, s_sii)   \
-  V(I32x4Neg, 0xe51e, s_s)             \
-  V(I32x4Add, 0xe51f, s_ss)            \
-  V(I32x4Sub, 0xe520, s_ss)            \
-  V(I32x4Mul, 0xe521, s_ss)            \
-  V(I32x4Min_s, 0xe522, s_ss)          \
-  V(I32x4Max_s, 0xe523, s_ss)          \
-  V(I32x4Shl, 0xe524, s_si)            \
-  V(I32x4Shr_s, 0xe525, s_si)          \
-  V(I32x4Eq, 0xe526, s_ss)             \
-  V(I32x4Ne, 0xe527, s_ss)             \
-  V(I32x4Lt_s, 0xe528, s_ss)           \
-  V(I32x4Le_s, 0xe529, s_ss)           \
-  V(I32x4Gt_s, 0xe52a, s_ss)           \
-  V(I32x4Ge_s, 0xe52b, s_ss)           \
-  V(I32x4Select, 0xe52c, s_sss)        \
-  V(I32x4Swizzle, 0xe52d, s_s)         \
-  V(I32x4Shuffle, 0xe52e, s_ss)        \
-  V(I32x4FromFloat32x4, 0xe52f, s_s)   \
-  V(I32x4Min_u, 0xe530, s_ss)          \
-  V(I32x4Max_u, 0xe531, s_ss)          \
-  V(I32x4Shr_u, 0xe532, s_ss)          \
-  V(I32x4Lt_u, 0xe533, s_ss)           \
-  V(I32x4Le_u, 0xe534, s_ss)           \
-  V(I32x4Gt_u, 0xe535, s_ss)           \
-  V(I32x4Ge_u, 0xe536, s_ss)           \
-  V(Ui32x4FromFloat32x4, 0xe537, s_s)  \
-  V(I16x8Splat, 0xe538, s_i)           \
-  V(I16x8ExtractLane, 0xe539, i_si)    \
-  V(I16x8ReplaceLane, 0xe53a, s_sii)   \
-  V(I16x8Neg, 0xe53b, s_s)             \
-  V(I16x8Add, 0xe53c, s_ss)            \
-  V(I16x8AddSaturate_s, 0xe53d, s_ss)  \
-  V(I16x8Sub, 0xe53e, s_ss)            \
-  V(I16x8SubSaturate_s, 0xe53f, s_ss)  \
-  V(I16x8Mul, 0xe540, s_ss)            \
-  V(I16x8Min_s, 0xe541, s_ss)          \
-  V(I16x8Max_s, 0xe542, s_ss)          \
-  V(I16x8Shl, 0xe543, s_si)            \
-  V(I16x8Shr_s, 0xe544, s_si)          \
-  V(I16x8Eq, 0xe545, s_ss)             \
-  V(I16x8Ne, 0xe546, s_ss)             \
-  V(I16x8Lt_s, 0xe547, s_ss)           \
-  V(I16x8Le_s, 0xe548, s_ss)           \
-  V(I16x8Gt_s, 0xe549, s_ss)           \
-  V(I16x8Ge_s, 0xe54a, s_ss)           \
-  V(I16x8Select, 0xe54b, s_sss)        \
-  V(I16x8Swizzle, 0xe54c, s_s)         \
-  V(I16x8Shuffle, 0xe54d, s_ss)        \
-  V(I16x8AddSaturate_u, 0xe54e, s_ss)  \
-  V(I16x8SubSaturate_u, 0xe54f, s_ss)  \
-  V(I16x8Min_u, 0xe550, s_ss)          \
-  V(I16x8Max_u, 0xe551, s_ss)          \
-  V(I16x8Shr_u, 0xe552, s_si)          \
-  V(I16x8Lt_u, 0xe553, s_ss)           \
-  V(I16x8Le_u, 0xe554, s_ss)           \
-  V(I16x8Gt_u, 0xe555, s_ss)           \
-  V(I16x8Ge_u, 0xe556, s_ss)           \
-  V(I8x16Splat, 0xe557, s_i)           \
-  V(I8x16ExtractLane, 0xe558, i_si)    \
-  V(I8x16ReplaceLane, 0xe559, s_sii)   \
-  V(I8x16Neg, 0xe55a, s_s)             \
-  V(I8x16Add, 0xe55b, s_ss)            \
-  V(I8x16AddSaturate_s, 0xe55c, s_ss)  \
-  V(I8x16Sub, 0xe55d, s_ss)            \
-  V(I8x16SubSaturate_s, 0xe55e, s_ss)  \
-  V(I8x16Mul, 0xe55f, s_ss)            \
-  V(I8x16Min_s, 0xe560, s_ss)          \
-  V(I8x16Max_s, 0xe561, s_ss)          \
-  V(I8x16Shl, 0xe562, s_si)            \
-  V(I8x16Shr_s, 0xe563, s_si)          \
-  V(I8x16Eq, 0xe564, s_ss)             \
-  V(I8x16Neq, 0xe565, s_ss)            \
-  V(I8x16Lt_s, 0xe566, s_ss)           \
-  V(I8x16Le_s, 0xe567, s_ss)           \
-  V(I8x16Gt_s, 0xe568, s_ss)           \
-  V(I8x16Ge_s, 0xe569, s_ss)           \
-  V(I8x16Select, 0xe56a, s_sss)        \
-  V(I8x16Swizzle, 0xe56b, s_s)         \
-  V(I8x16Shuffle, 0xe56c, s_ss)        \
-  V(I8x16AddSaturate_u, 0xe56d, s_ss)  \
-  V(I8x16Sub_saturate_u, 0xe56e, s_ss) \
-  V(I8x16Min_u, 0xe56f, s_ss)          \
-  V(I8x16Max_u, 0xe570, s_ss)          \
-  V(I8x16Shr_u, 0xe571, s_ss)          \
-  V(I8x16Lt_u, 0xe572, s_ss)           \
-  V(I8x16Le_u, 0xe573, s_ss)           \
-  V(I8x16Gt_u, 0xe574, s_ss)           \
-  V(I8x16Ge_u, 0xe575, s_ss)           \
-  V(S128And, 0xe576, s_ss)             \
-  V(S128Ior, 0xe577, s_ss)             \
-  V(S128Xor, 0xe578, s_ss)             \
+#define FOREACH_SIMD_0_OPERAND_OPCODE(V) \
+  V(F32x4Splat, 0xe500, s_f)             \
+  V(F32x4ReplaceLane, 0xe502, s_sif)     \
+  V(F32x4Abs, 0xe503, s_s)               \
+  V(F32x4Neg, 0xe504, s_s)               \
+  V(F32x4Sqrt, 0xe505, s_s)              \
+  V(F32x4RecipApprox, 0xe506, s_s)       \
+  V(F32x4SqrtApprox, 0xe507, s_s)        \
+  V(F32x4Add, 0xe508, s_ss)              \
+  V(F32x4Sub, 0xe509, s_ss)              \
+  V(F32x4Mul, 0xe50a, s_ss)              \
+  V(F32x4Div, 0xe50b, s_ss)              \
+  V(F32x4Min, 0xe50c, s_ss)              \
+  V(F32x4Max, 0xe50d, s_ss)              \
+  V(F32x4MinNum, 0xe50e, s_ss)           \
+  V(F32x4MaxNum, 0xe50f, s_ss)           \
+  V(F32x4Eq, 0xe510, s_ss)               \
+  V(F32x4Ne, 0xe511, s_ss)               \
+  V(F32x4Lt, 0xe512, s_ss)               \
+  V(F32x4Le, 0xe513, s_ss)               \
+  V(F32x4Gt, 0xe514, s_ss)               \
+  V(F32x4Ge, 0xe515, s_ss)               \
+  V(F32x4Select, 0xe516, s_sss)          \
+  V(F32x4Swizzle, 0xe517, s_s)           \
+  V(F32x4Shuffle, 0xe518, s_ss)          \
+  V(F32x4FromInt32x4, 0xe519, s_s)       \
+  V(F32x4FromUint32x4, 0xe51a, s_s)      \
+  V(I32x4Splat, 0xe51b, s_i)             \
+  V(I32x4ReplaceLane, 0xe51d, s_sii)     \
+  V(I32x4Neg, 0xe51e, s_s)               \
+  V(I32x4Add, 0xe51f, s_ss)              \
+  V(I32x4Sub, 0xe520, s_ss)              \
+  V(I32x4Mul, 0xe521, s_ss)              \
+  V(I32x4Min_s, 0xe522, s_ss)            \
+  V(I32x4Max_s, 0xe523, s_ss)            \
+  V(I32x4Shl, 0xe524, s_si)              \
+  V(I32x4Shr_s, 0xe525, s_si)            \
+  V(I32x4Eq, 0xe526, s_ss)               \
+  V(I32x4Ne, 0xe527, s_ss)               \
+  V(I32x4Lt_s, 0xe528, s_ss)             \
+  V(I32x4Le_s, 0xe529, s_ss)             \
+  V(I32x4Gt_s, 0xe52a, s_ss)             \
+  V(I32x4Ge_s, 0xe52b, s_ss)             \
+  V(I32x4Select, 0xe52c, s_sss)          \
+  V(I32x4Swizzle, 0xe52d, s_s)           \
+  V(I32x4Shuffle, 0xe52e, s_ss)          \
+  V(I32x4FromFloat32x4, 0xe52f, s_s)     \
+  V(I32x4Min_u, 0xe530, s_ss)            \
+  V(I32x4Max_u, 0xe531, s_ss)            \
+  V(I32x4Shr_u, 0xe532, s_ss)            \
+  V(I32x4Lt_u, 0xe533, s_ss)             \
+  V(I32x4Le_u, 0xe534, s_ss)             \
+  V(I32x4Gt_u, 0xe535, s_ss)             \
+  V(I32x4Ge_u, 0xe536, s_ss)             \
+  V(Ui32x4FromFloat32x4, 0xe537, s_s)    \
+  V(I16x8Splat, 0xe538, s_i)             \
+  V(I16x8ReplaceLane, 0xe53a, s_sii)     \
+  V(I16x8Neg, 0xe53b, s_s)               \
+  V(I16x8Add, 0xe53c, s_ss)              \
+  V(I16x8AddSaturate_s, 0xe53d, s_ss)    \
+  V(I16x8Sub, 0xe53e, s_ss)              \
+  V(I16x8SubSaturate_s, 0xe53f, s_ss)    \
+  V(I16x8Mul, 0xe540, s_ss)              \
+  V(I16x8Min_s, 0xe541, s_ss)            \
+  V(I16x8Max_s, 0xe542, s_ss)            \
+  V(I16x8Shl, 0xe543, s_si)              \
+  V(I16x8Shr_s, 0xe544, s_si)            \
+  V(I16x8Eq, 0xe545, s_ss)               \
+  V(I16x8Ne, 0xe546, s_ss)               \
+  V(I16x8Lt_s, 0xe547, s_ss)             \
+  V(I16x8Le_s, 0xe548, s_ss)             \
+  V(I16x8Gt_s, 0xe549, s_ss)             \
+  V(I16x8Ge_s, 0xe54a, s_ss)             \
+  V(I16x8Select, 0xe54b, s_sss)          \
+  V(I16x8Swizzle, 0xe54c, s_s)           \
+  V(I16x8Shuffle, 0xe54d, s_ss)          \
+  V(I16x8AddSaturate_u, 0xe54e, s_ss)    \
+  V(I16x8SubSaturate_u, 0xe54f, s_ss)    \
+  V(I16x8Min_u, 0xe550, s_ss)            \
+  V(I16x8Max_u, 0xe551, s_ss)            \
+  V(I16x8Shr_u, 0xe552, s_si)            \
+  V(I16x8Lt_u, 0xe553, s_ss)             \
+  V(I16x8Le_u, 0xe554, s_ss)             \
+  V(I16x8Gt_u, 0xe555, s_ss)             \
+  V(I16x8Ge_u, 0xe556, s_ss)             \
+  V(I8x16Splat, 0xe557, s_i)             \
+  V(I8x16ReplaceLane, 0xe559, s_sii)     \
+  V(I8x16Neg, 0xe55a, s_s)               \
+  V(I8x16Add, 0xe55b, s_ss)              \
+  V(I8x16AddSaturate_s, 0xe55c, s_ss)    \
+  V(I8x16Sub, 0xe55d, s_ss)              \
+  V(I8x16SubSaturate_s, 0xe55e, s_ss)    \
+  V(I8x16Mul, 0xe55f, s_ss)              \
+  V(I8x16Min_s, 0xe560, s_ss)            \
+  V(I8x16Max_s, 0xe561, s_ss)            \
+  V(I8x16Shl, 0xe562, s_si)              \
+  V(I8x16Shr_s, 0xe563, s_si)            \
+  V(I8x16Eq, 0xe564, s_ss)               \
+  V(I8x16Neq, 0xe565, s_ss)              \
+  V(I8x16Lt_s, 0xe566, s_ss)             \
+  V(I8x16Le_s, 0xe567, s_ss)             \
+  V(I8x16Gt_s, 0xe568, s_ss)             \
+  V(I8x16Ge_s, 0xe569, s_ss)             \
+  V(I8x16Select, 0xe56a, s_sss)          \
+  V(I8x16Swizzle, 0xe56b, s_s)           \
+  V(I8x16Shuffle, 0xe56c, s_ss)          \
+  V(I8x16AddSaturate_u, 0xe56d, s_ss)    \
+  V(I8x16Sub_saturate_u, 0xe56e, s_ss)   \
+  V(I8x16Min_u, 0xe56f, s_ss)            \
+  V(I8x16Max_u, 0xe570, s_ss)            \
+  V(I8x16Shr_u, 0xe571, s_ss)            \
+  V(I8x16Lt_u, 0xe572, s_ss)             \
+  V(I8x16Le_u, 0xe573, s_ss)             \
+  V(I8x16Gt_u, 0xe574, s_ss)             \
+  V(I8x16Ge_u, 0xe575, s_ss)             \
+  V(S128And, 0xe576, s_ss)               \
+  V(S128Ior, 0xe577, s_ss)               \
+  V(S128Xor, 0xe578, s_ss)               \
   V(S128Not, 0xe579, s_s)
 
+#define FOREACH_SIMD_1_OPERAND_OPCODE(V) \
+  V(F32x4ExtractLane, 0xe501, _)         \
+  V(I32x4ExtractLane, 0xe51c, _)         \
+  V(I16x8ExtractLane, 0xe539, _)         \
+  V(I8x16ExtractLane, 0xe558, _)
+
 // All opcodes.
-#define FOREACH_OPCODE(V)        \
-  FOREACH_CONTROL_OPCODE(V)      \
-  FOREACH_MISC_OPCODE(V)         \
-  FOREACH_SIMPLE_OPCODE(V)       \
-  FOREACH_SIMPLE_MEM_OPCODE(V)   \
-  FOREACH_STORE_MEM_OPCODE(V)    \
-  FOREACH_LOAD_MEM_OPCODE(V)     \
-  FOREACH_MISC_MEM_OPCODE(V)     \
-  FOREACH_ASMJS_COMPAT_OPCODE(V) \
-  FOREACH_SIMD_OPCODE(V)
+#define FOREACH_OPCODE(V)          \
+  FOREACH_CONTROL_OPCODE(V)        \
+  FOREACH_MISC_OPCODE(V)           \
+  FOREACH_SIMPLE_OPCODE(V)         \
+  FOREACH_SIMPLE_MEM_OPCODE(V)     \
+  FOREACH_STORE_MEM_OPCODE(V)      \
+  FOREACH_LOAD_MEM_OPCODE(V)       \
+  FOREACH_MISC_MEM_OPCODE(V)       \
+  FOREACH_ASMJS_COMPAT_OPCODE(V)   \
+  FOREACH_SIMD_0_OPERAND_OPCODE(V) \
+  FOREACH_SIMD_1_OPERAND_OPCODE(V)
 
 // All signatures.
 #define FOREACH_SIGNATURE(V)         \
@@ -443,12 +447,10 @@
 #define FOREACH_SIMD_SIGNATURE(V)                  \
   V(s_s, kAstS128, kAstS128)                       \
   V(s_f, kAstS128, kAstF32)                        \
-  V(f_si, kAstF32, kAstS128, kAstI32)              \
   V(s_sif, kAstS128, kAstS128, kAstI32, kAstF32)   \
   V(s_ss, kAstS128, kAstS128, kAstS128)            \
   V(s_sss, kAstS128, kAstS128, kAstS128, kAstS128) \
   V(s_i, kAstS128, kAstI32)                        \
-  V(i_si, kAstI32, kAstS128, kAstI32)              \
   V(s_sii, kAstS128, kAstS128, kAstI32, kAstI32)   \
   V(s_si, kAstS128, kAstS128, kAstI32)
 
@@ -489,6 +491,8 @@
   static const char* OpcodeName(WasmOpcode opcode);
   static const char* ShortOpcodeName(WasmOpcode opcode);
   static FunctionSig* Signature(WasmOpcode opcode);
+  static FunctionSig* AsmjsSignature(WasmOpcode opcode);
+  static bool IsPrefixOpcode(WasmOpcode opcode);
 
   static int TrapReasonToMessageId(TrapReason reason);
   static const char* TrapReasonMessage(TrapReason reason);
@@ -497,6 +501,8 @@
     return 1 << ElementSizeLog2Of(type.representation());
   }
 
+  static byte MemSize(LocalType type) { return 1 << ElementSizeLog2Of(type); }
+
   static LocalTypeCode LocalTypeCodeFor(LocalType type) {
     switch (type) {
       case kAstI32:
@@ -507,10 +513,10 @@
         return kLocalF32;
       case kAstF64:
         return kLocalF64;
-      case kAstStmt:
-        return kLocalVoid;
       case kAstS128:
         return kLocalS128;
+      case kAstStmt:
+        return kLocalVoid;
       default:
         UNREACHABLE();
         return kLocalVoid;
diff --git a/src/wasm/wasm-result.cc b/src/wasm/wasm-result.cc
index 30268ac..7d251f0 100644
--- a/src/wasm/wasm-result.cc
+++ b/src/wasm/wasm-result.cc
@@ -27,15 +27,13 @@
   return os;
 }
 
-void ErrorThrower::Error(const char* format, ...) {
+void ErrorThrower::Format(i::Handle<i::JSFunction> constructor,
+                          const char* format, va_list args) {
   // Only report the first error.
   if (error()) return;
 
   char buffer[256];
-  va_list arguments;
-  va_start(arguments, format);
-  base::OS::VSNPrintF(buffer, 255, format, arguments);
-  va_end(arguments);
+  base::OS::VSNPrintF(buffer, 255, format, args);
 
   std::ostringstream str;
   if (context_ != nullptr) {
@@ -43,12 +41,39 @@
   }
   str << buffer;
 
-  message_ = isolate_->factory()->NewStringFromAsciiChecked(str.str().c_str());
+  i::Handle<i::String> message =
+      isolate_->factory()->NewStringFromAsciiChecked(str.str().c_str());
+  exception_ = isolate_->factory()->NewError(constructor, message);
+}
+
+void ErrorThrower::Error(const char* format, ...) {
+  if (error()) return;
+  va_list arguments;
+  va_start(arguments, format);
+  Format(isolate_->error_function(), format, arguments);
+  va_end(arguments);
+}
+
+void ErrorThrower::TypeError(const char* format, ...) {
+  if (error()) return;
+  va_list arguments;
+  va_start(arguments, format);
+  Format(isolate_->type_error_function(), format, arguments);
+  va_end(arguments);
+}
+
+void ErrorThrower::RangeError(const char* format, ...) {
+  if (error()) return;
+  va_list arguments;
+  va_start(arguments, format);
+  CHECK(*isolate_->range_error_function() != *isolate_->type_error_function());
+  Format(isolate_->range_error_function(), format, arguments);
+  va_end(arguments);
 }
 
 ErrorThrower::~ErrorThrower() {
   if (error() && !isolate_->has_pending_exception()) {
-    isolate_->ScheduleThrow(*message_);
+    isolate_->ScheduleThrow(*exception_);
   }
 }
 }  // namespace wasm
diff --git a/src/wasm/wasm-result.h b/src/wasm/wasm-result.h
index f16c159..ecc54e5 100644
--- a/src/wasm/wasm-result.h
+++ b/src/wasm/wasm-result.h
@@ -22,19 +22,7 @@
 // Error codes for programmatic checking of the decoder's verification.
 enum ErrorCode {
   kSuccess,
-  kError,                 // TODO(titzer): remove me
-  kOutOfMemory,           // decoder ran out of memory
-  kEndOfCode,             // end of code reached prematurely
-  kInvalidOpcode,         // found invalid opcode
-  kUnreachableCode,       // found unreachable code
-  kImproperContinue,      // improperly nested continue
-  kImproperBreak,         // improperly nested break
-  kReturnCount,           // return count mismatch
-  kTypeError,             // type mismatch
-  kInvalidLocalIndex,     // invalid local
-  kInvalidGlobalIndex,    // invalid global
-  kInvalidFunctionIndex,  // invalid function
-  kInvalidMemType         // invalid memory type
+  kError,  // TODO(titzer): introduce real error codes
 };
 
 // The overall result of decoding a function or a module.
@@ -97,33 +85,37 @@
 std::ostream& operator<<(std::ostream& os, const ErrorCode& error_code);
 
 // A helper for generating error messages that bubble up to JS exceptions.
-class ErrorThrower {
+class V8_EXPORT_PRIVATE ErrorThrower {
  public:
-  ErrorThrower(Isolate* isolate, const char* context)
+  ErrorThrower(i::Isolate* isolate, const char* context)
       : isolate_(isolate), context_(context) {}
   ~ErrorThrower();
 
   PRINTF_FORMAT(2, 3) void Error(const char* fmt, ...);
+  PRINTF_FORMAT(2, 3) void TypeError(const char* fmt, ...);
+  PRINTF_FORMAT(2, 3) void RangeError(const char* fmt, ...);
 
   template <typename T>
   void Failed(const char* error, Result<T>& result) {
     std::ostringstream str;
     str << error << result;
-    return Error("%s", str.str().c_str());
+    Error("%s", str.str().c_str());
   }
 
-  i::Handle<i::String> Reify() {
-    auto result = message_;
-    message_ = i::Handle<i::String>();
+  i::Handle<i::Object> Reify() {
+    i::Handle<i::Object> result = exception_;
+    exception_ = i::Handle<i::Object>::null();
     return result;
   }
 
-  bool error() const { return !message_.is_null(); }
+  bool error() const { return !exception_.is_null(); }
 
  private:
-  Isolate* isolate_;
+  void Format(i::Handle<i::JSFunction> constructor, const char* fmt, va_list);
+
+  i::Isolate* isolate_;
   const char* context_;
-  i::Handle<i::String> message_;
+  i::Handle<i::Object> exception_;
 };
 }  // namespace wasm
 }  // namespace internal
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc
index 9a0d18e..d202aad 100644
--- a/src/x64/assembler-x64.cc
+++ b/src/x64/assembler-x64.cc
@@ -79,6 +79,7 @@
   if (cross_compile) return;
 
   if (cpu.has_sse41() && FLAG_enable_sse4_1) supported_ |= 1u << SSE4_1;
+  if (cpu.has_ssse3() && FLAG_enable_ssse3) supported_ |= 1u << SSSE3;
   if (cpu.has_sse3() && FLAG_enable_sse3) supported_ |= 1u << SSE3;
   // SAHF is not generally available in long mode.
   if (cpu.has_sahf() && FLAG_enable_sahf) supported_ |= 1u << SAHF;
@@ -105,13 +106,15 @@
 void CpuFeatures::PrintTarget() { }
 void CpuFeatures::PrintFeatures() {
   printf(
-      "SSE3=%d SSE4_1=%d SAHF=%d AVX=%d FMA3=%d BMI1=%d BMI2=%d LZCNT=%d "
+      "SSE3=%d SSSE3=%d SSE4_1=%d SAHF=%d AVX=%d FMA3=%d BMI1=%d BMI2=%d "
+      "LZCNT=%d "
       "POPCNT=%d ATOM=%d\n",
-      CpuFeatures::IsSupported(SSE3), CpuFeatures::IsSupported(SSE4_1),
-      CpuFeatures::IsSupported(SAHF), CpuFeatures::IsSupported(AVX),
-      CpuFeatures::IsSupported(FMA3), CpuFeatures::IsSupported(BMI1),
-      CpuFeatures::IsSupported(BMI2), CpuFeatures::IsSupported(LZCNT),
-      CpuFeatures::IsSupported(POPCNT), CpuFeatures::IsSupported(ATOM));
+      CpuFeatures::IsSupported(SSE3), CpuFeatures::IsSupported(SSSE3),
+      CpuFeatures::IsSupported(SSE4_1), CpuFeatures::IsSupported(SAHF),
+      CpuFeatures::IsSupported(AVX), CpuFeatures::IsSupported(FMA3),
+      CpuFeatures::IsSupported(BMI1), CpuFeatures::IsSupported(BMI2),
+      CpuFeatures::IsSupported(LZCNT), CpuFeatures::IsSupported(POPCNT),
+      CpuFeatures::IsSupported(ATOM));
 }
 
 // -----------------------------------------------------------------------------
@@ -2834,6 +2837,77 @@
   emit(imm8);
 }
 
+void Assembler::pextrb(Register dst, XMMRegister src, int8_t imm8) {
+  DCHECK(IsEnabled(SSE4_1));
+  DCHECK(is_uint8(imm8));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(src, dst);
+  emit(0x0F);
+  emit(0x3A);
+  emit(0x14);
+  emit_sse_operand(src, dst);
+  emit(imm8);
+}
+
+void Assembler::pextrb(const Operand& dst, XMMRegister src, int8_t imm8) {
+  DCHECK(IsEnabled(SSE4_1));
+  DCHECK(is_uint8(imm8));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(src, dst);
+  emit(0x0F);
+  emit(0x3A);
+  emit(0x14);
+  emit_sse_operand(src, dst);
+  emit(imm8);
+}
+
+void Assembler::pinsrw(XMMRegister dst, Register src, int8_t imm8) {
+  DCHECK(is_uint8(imm8));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0xC4);
+  emit_sse_operand(dst, src);
+  emit(imm8);
+}
+
+void Assembler::pinsrw(XMMRegister dst, const Operand& src, int8_t imm8) {
+  DCHECK(is_uint8(imm8));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0xC4);
+  emit_sse_operand(dst, src);
+  emit(imm8);
+}
+
+void Assembler::pextrw(Register dst, XMMRegister src, int8_t imm8) {
+  DCHECK(is_uint8(imm8));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(src, dst);
+  emit(0x0F);
+  emit(0xC5);
+  emit_sse_operand(src, dst);
+  emit(imm8);
+}
+
+void Assembler::pextrw(const Operand& dst, XMMRegister src, int8_t imm8) {
+  DCHECK(IsEnabled(SSE4_1));
+  DCHECK(is_uint8(imm8));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(src, dst);
+  emit(0x0F);
+  emit(0x3A);
+  emit(0x15);
+  emit_sse_operand(src, dst);
+  emit(imm8);
+}
 
 void Assembler::pextrd(Register dst, XMMRegister src, int8_t imm8) {
   DCHECK(IsEnabled(SSE4_1));
@@ -2847,6 +2921,17 @@
   emit(imm8);
 }
 
+void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t imm8) {
+  DCHECK(IsEnabled(SSE4_1));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(src, dst);
+  emit(0x0F);
+  emit(0x3A);
+  emit(0x16);
+  emit_sse_operand(src, dst);
+  emit(imm8);
+}
 
 void Assembler::pinsrd(XMMRegister dst, Register src, int8_t imm8) {
   DCHECK(IsEnabled(SSE4_1));
@@ -2873,6 +2958,30 @@
   emit(imm8);
 }
 
+void Assembler::pinsrb(XMMRegister dst, Register src, int8_t imm8) {
+  DCHECK(IsEnabled(SSE4_1));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0x3A);
+  emit(0x20);
+  emit_sse_operand(dst, src);
+  emit(imm8);
+}
+
+void Assembler::pinsrb(XMMRegister dst, const Operand& src, int8_t imm8) {
+  DCHECK(IsEnabled(SSE4_1));
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0x3A);
+  emit(0x20);
+  emit_sse_operand(dst, src);
+  emit(imm8);
+}
+
 void Assembler::insertps(XMMRegister dst, XMMRegister src, byte imm8) {
   DCHECK(CpuFeatures::IsSupported(SSE4_1));
   DCHECK(is_uint8(imm8));
@@ -3202,6 +3311,15 @@
   emit(imm8);
 }
 
+void Assembler::psllw(XMMRegister reg, byte imm8) {
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(reg);
+  emit(0x0F);
+  emit(0x71);
+  emit_sse_operand(rsi, reg);  // rsi == 6
+  emit(imm8);
+}
 
 void Assembler::pslld(XMMRegister reg, byte imm8) {
   EnsureSpace ensure_space(this);
@@ -3213,6 +3331,15 @@
   emit(imm8);
 }
 
+void Assembler::psrlw(XMMRegister reg, byte imm8) {
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(reg);
+  emit(0x0F);
+  emit(0x71);
+  emit_sse_operand(rdx, reg);  // rdx == 2
+  emit(imm8);
+}
 
 void Assembler::psrld(XMMRegister reg, byte imm8) {
   EnsureSpace ensure_space(this);
@@ -3224,6 +3351,26 @@
   emit(imm8);
 }
 
+void Assembler::psraw(XMMRegister reg, byte imm8) {
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(reg);
+  emit(0x0F);
+  emit(0x71);
+  emit_sse_operand(rsp, reg);  // rsp == 4
+  emit(imm8);
+}
+
+void Assembler::psrad(XMMRegister reg, byte imm8) {
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(reg);
+  emit(0x0F);
+  emit(0x72);
+  emit_sse_operand(rsp, reg);  // rsp == 4
+  emit(imm8);
+}
+
 void Assembler::cmpps(XMMRegister dst, XMMRegister src, int8_t cmp) {
   EnsureSpace ensure_space(this);
   emit_optional_rex_32(dst, src);
@@ -3789,17 +3936,6 @@
 }
 
 
-void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
-  DCHECK(!IsEnabled(AVX));
-  EnsureSpace ensure_space(this);
-  emit(0x66);
-  emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0x76);
-  emit_sse_operand(dst, src);
-}
-
-
 void Assembler::punpckldq(XMMRegister dst, XMMRegister src) {
   EnsureSpace ensure_space(this);
   emit(0x66);
@@ -3926,9 +4062,9 @@
   emit_sse_operand(src, dst);
 }
 
-
-void Assembler::vsd(byte op, XMMRegister dst, XMMRegister src1,
-                    XMMRegister src2, SIMDPrefix pp, LeadingOpcode m, VexW w) {
+void Assembler::vinstr(byte op, XMMRegister dst, XMMRegister src1,
+                       XMMRegister src2, SIMDPrefix pp, LeadingOpcode m,
+                       VexW w) {
   DCHECK(IsEnabled(AVX));
   EnsureSpace ensure_space(this);
   emit_vex_prefix(dst, src1, src2, kLIG, pp, m, w);
@@ -3936,10 +4072,9 @@
   emit_sse_operand(dst, src2);
 }
 
-
-void Assembler::vsd(byte op, XMMRegister dst, XMMRegister src1,
-                    const Operand& src2, SIMDPrefix pp, LeadingOpcode m,
-                    VexW w) {
+void Assembler::vinstr(byte op, XMMRegister dst, XMMRegister src1,
+                       const Operand& src2, SIMDPrefix pp, LeadingOpcode m,
+                       VexW w) {
   DCHECK(IsEnabled(AVX));
   EnsureSpace ensure_space(this);
   emit_vex_prefix(dst, src1, src2, kLIG, pp, m, w);
@@ -4409,78 +4544,81 @@
   emit_sse_operand(src, dst);
 }
 
-void Assembler::paddd(XMMRegister dst, XMMRegister src) {
+void Assembler::sse2_instr(XMMRegister dst, XMMRegister src, byte prefix,
+                           byte escape, byte opcode) {
   EnsureSpace ensure_space(this);
-  emit(0x66);
+  emit(prefix);
   emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0xFE);
+  emit(escape);
+  emit(opcode);
   emit_sse_operand(dst, src);
 }
 
-void Assembler::paddd(XMMRegister dst, const Operand& src) {
+void Assembler::sse2_instr(XMMRegister dst, const Operand& src, byte prefix,
+                           byte escape, byte opcode) {
   EnsureSpace ensure_space(this);
-  emit(0x66);
+  emit(prefix);
   emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0xFE);
+  emit(escape);
+  emit(opcode);
   emit_sse_operand(dst, src);
 }
 
-void Assembler::psubd(XMMRegister dst, XMMRegister src) {
+void Assembler::ssse3_instr(XMMRegister dst, XMMRegister src, byte prefix,
+                            byte escape1, byte escape2, byte opcode) {
+  DCHECK(IsEnabled(SSSE3));
   EnsureSpace ensure_space(this);
-  emit(0x66);
+  emit(prefix);
   emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0xFA);
+  emit(escape1);
+  emit(escape2);
+  emit(opcode);
   emit_sse_operand(dst, src);
 }
 
-void Assembler::psubd(XMMRegister dst, const Operand& src) {
+void Assembler::ssse3_instr(XMMRegister dst, const Operand& src, byte prefix,
+                            byte escape1, byte escape2, byte opcode) {
+  DCHECK(IsEnabled(SSSE3));
   EnsureSpace ensure_space(this);
-  emit(0x66);
+  emit(prefix);
   emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0xFA);
+  emit(escape1);
+  emit(escape2);
+  emit(opcode);
   emit_sse_operand(dst, src);
 }
 
-void Assembler::pmulld(XMMRegister dst, XMMRegister src) {
+void Assembler::sse4_instr(XMMRegister dst, XMMRegister src, byte prefix,
+                           byte escape1, byte escape2, byte opcode) {
   DCHECK(IsEnabled(SSE4_1));
   EnsureSpace ensure_space(this);
-  emit(0x66);
+  emit(prefix);
   emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0x38);
-  emit(0x40);
+  emit(escape1);
+  emit(escape2);
+  emit(opcode);
   emit_sse_operand(dst, src);
 }
 
-void Assembler::pmulld(XMMRegister dst, const Operand& src) {
+void Assembler::sse4_instr(XMMRegister dst, const Operand& src, byte prefix,
+                           byte escape1, byte escape2, byte opcode) {
+  DCHECK(IsEnabled(SSE4_1));
   EnsureSpace ensure_space(this);
-  emit(0x66);
+  emit(prefix);
   emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0x38);
-  emit(0x40);
+  emit(escape1);
+  emit(escape2);
+  emit(opcode);
   emit_sse_operand(dst, src);
 }
 
-void Assembler::pmuludq(XMMRegister dst, XMMRegister src) {
+void Assembler::lddqu(XMMRegister dst, const Operand& src) {
+  DCHECK(IsEnabled(SSE3));
   EnsureSpace ensure_space(this);
-  emit(0x66);
+  emit(0xF2);
   emit_optional_rex_32(dst, src);
   emit(0x0F);
-  emit(0xF4);
-  emit_sse_operand(dst, src);
-}
-
-void Assembler::pmuludq(XMMRegister dst, const Operand& src) {
-  EnsureSpace ensure_space(this);
-  emit(0x66);
-  emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0xF4);
+  emit(0xF0);
   emit_sse_operand(dst, src);
 }
 
@@ -4494,24 +4632,6 @@
   emit(shift);
 }
 
-void Assembler::cvtps2dq(XMMRegister dst, XMMRegister src) {
-  EnsureSpace ensure_space(this);
-  emit(0x66);
-  emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0x5B);
-  emit_sse_operand(dst, src);
-}
-
-void Assembler::cvtps2dq(XMMRegister dst, const Operand& src) {
-  EnsureSpace ensure_space(this);
-  emit(0x66);
-  emit_optional_rex_32(dst, src);
-  emit(0x0F);
-  emit(0x5B);
-  emit_sse_operand(dst, src);
-}
-
 void Assembler::pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
   EnsureSpace ensure_space(this);
   emit(0x66);
@@ -4522,6 +4642,16 @@
   emit(shuffle);
 }
 
+void Assembler::pshufd(XMMRegister dst, const Operand& src, uint8_t shuffle) {
+  EnsureSpace ensure_space(this);
+  emit(0x66);
+  emit_optional_rex_32(dst, src);
+  emit(0x0F);
+  emit(0x70);
+  emit_sse_operand(dst, src);
+  emit(shuffle);
+}
+
 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
   Register ireg = { reg.code() };
   emit_operand(ireg, adr);
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index b2154fb..5de891c 100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -40,6 +40,7 @@
 #include <deque>
 
 #include "src/assembler.h"
+#include "src/x64/sse-instr.h"
 
 namespace v8 {
 namespace internal {
@@ -1072,7 +1073,91 @@
 
   void movmskps(Register dst, XMMRegister src);
 
+  void vinstr(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2,
+              SIMDPrefix pp, LeadingOpcode m, VexW w);
+  void vinstr(byte op, XMMRegister dst, XMMRegister src1, const Operand& src2,
+              SIMDPrefix pp, LeadingOpcode m, VexW w);
+
   // SSE2 instructions
+  void sse2_instr(XMMRegister dst, XMMRegister src, byte prefix, byte escape,
+                  byte opcode);
+  void sse2_instr(XMMRegister dst, const Operand& src, byte prefix, byte escape,
+                  byte opcode);
+#define DECLARE_SSE2_INSTRUCTION(instruction, prefix, escape, opcode) \
+  void instruction(XMMRegister dst, XMMRegister src) {                \
+    sse2_instr(dst, src, 0x##prefix, 0x##escape, 0x##opcode);         \
+  }                                                                   \
+  void instruction(XMMRegister dst, const Operand& src) {             \
+    sse2_instr(dst, src, 0x##prefix, 0x##escape, 0x##opcode);         \
+  }
+
+  SSE2_INSTRUCTION_LIST(DECLARE_SSE2_INSTRUCTION)
+#undef DECLARE_SSE2_INSTRUCTION
+
+#define DECLARE_SSE2_AVX_INSTRUCTION(instruction, prefix, escape, opcode)    \
+  void v##instruction(XMMRegister dst, XMMRegister src1, XMMRegister src2) { \
+    vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0);          \
+  }                                                                          \
+  void v##instruction(XMMRegister dst, XMMRegister src1,                     \
+                      const Operand& src2) {                                 \
+    vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape, kW0);          \
+  }
+
+  SSE2_INSTRUCTION_LIST(DECLARE_SSE2_AVX_INSTRUCTION)
+#undef DECLARE_SSE2_AVX_INSTRUCTION
+
+  // SSE3
+  void lddqu(XMMRegister dst, const Operand& src);
+
+  // SSSE3
+  void ssse3_instr(XMMRegister dst, XMMRegister src, byte prefix, byte escape1,
+                   byte escape2, byte opcode);
+  void ssse3_instr(XMMRegister dst, const Operand& src, byte prefix,
+                   byte escape1, byte escape2, byte opcode);
+
+#define DECLARE_SSSE3_INSTRUCTION(instruction, prefix, escape1, escape2,     \
+                                  opcode)                                    \
+  void instruction(XMMRegister dst, XMMRegister src) {                       \
+    ssse3_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
+  }                                                                          \
+  void instruction(XMMRegister dst, const Operand& src) {                    \
+    ssse3_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
+  }
+
+  SSSE3_INSTRUCTION_LIST(DECLARE_SSSE3_INSTRUCTION)
+#undef DECLARE_SSSE3_INSTRUCTION
+
+  // SSE4
+  void sse4_instr(XMMRegister dst, XMMRegister src, byte prefix, byte escape1,
+                  byte escape2, byte opcode);
+  void sse4_instr(XMMRegister dst, const Operand& src, byte prefix,
+                  byte escape1, byte escape2, byte opcode);
+#define DECLARE_SSE4_INSTRUCTION(instruction, prefix, escape1, escape2,     \
+                                 opcode)                                    \
+  void instruction(XMMRegister dst, XMMRegister src) {                      \
+    sse4_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
+  }                                                                         \
+  void instruction(XMMRegister dst, const Operand& src) {                   \
+    sse4_instr(dst, src, 0x##prefix, 0x##escape1, 0x##escape2, 0x##opcode); \
+  }
+
+  SSE4_INSTRUCTION_LIST(DECLARE_SSE4_INSTRUCTION)
+#undef DECLARE_SSE4_INSTRUCTION
+
+#define DECLARE_SSE34_AVX_INSTRUCTION(instruction, prefix, escape1, escape2,  \
+                                      opcode)                                 \
+  void v##instruction(XMMRegister dst, XMMRegister src1, XMMRegister src2) {  \
+    vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape1##escape2, kW0); \
+  }                                                                           \
+  void v##instruction(XMMRegister dst, XMMRegister src1,                      \
+                      const Operand& src2) {                                  \
+    vinstr(0x##opcode, dst, src1, src2, k##prefix, k##escape1##escape2, kW0); \
+  }
+
+  SSSE3_INSTRUCTION_LIST(DECLARE_SSE34_AVX_INSTRUCTION)
+  SSE4_INSTRUCTION_LIST(DECLARE_SSE34_AVX_INSTRUCTION)
+#undef DECLARE_SSE34_AVX_INSTRUCTION
+
   void movd(XMMRegister dst, Register src);
   void movd(XMMRegister dst, const Operand& src);
   void movd(Register dst, XMMRegister src);
@@ -1101,8 +1186,12 @@
 
   void psllq(XMMRegister reg, byte imm8);
   void psrlq(XMMRegister reg, byte imm8);
+  void psllw(XMMRegister reg, byte imm8);
   void pslld(XMMRegister reg, byte imm8);
+  void psrlw(XMMRegister reg, byte imm8);
   void psrld(XMMRegister reg, byte imm8);
+  void psraw(XMMRegister reg, byte imm8);
+  void psrad(XMMRegister reg, byte imm8);
 
   void cvttsd2si(Register dst, const Operand& src);
   void cvttsd2si(Register dst, XMMRegister src);
@@ -1155,7 +1244,6 @@
   void ucomisd(XMMRegister dst, XMMRegister src);
   void ucomisd(XMMRegister dst, const Operand& src);
   void cmpltsd(XMMRegister dst, XMMRegister src);
-  void pcmpeqd(XMMRegister dst, XMMRegister src);
 
   void movmskpd(Register dst, XMMRegister src);
 
@@ -1166,7 +1254,16 @@
   // SSE 4.1 instruction
   void insertps(XMMRegister dst, XMMRegister src, byte imm8);
   void extractps(Register dst, XMMRegister src, byte imm8);
+  void pextrb(Register dst, XMMRegister src, int8_t imm8);
+  void pextrb(const Operand& dst, XMMRegister src, int8_t imm8);
+  void pextrw(Register dst, XMMRegister src, int8_t imm8);
+  void pextrw(const Operand& dst, XMMRegister src, int8_t imm8);
   void pextrd(Register dst, XMMRegister src, int8_t imm8);
+  void pextrd(const Operand& dst, XMMRegister src, int8_t imm8);
+  void pinsrb(XMMRegister dst, Register src, int8_t imm8);
+  void pinsrb(XMMRegister dst, const Operand& src, int8_t imm8);
+  void pinsrw(XMMRegister dst, Register src, int8_t imm8);
+  void pinsrw(XMMRegister dst, const Operand& src, int8_t imm8);
   void pinsrd(XMMRegister dst, Register src, int8_t imm8);
   void pinsrd(XMMRegister dst, const Operand& src, int8_t imm8);
 
@@ -1208,18 +1305,9 @@
   void movups(XMMRegister dst, XMMRegister src);
   void movups(XMMRegister dst, const Operand& src);
   void movups(const Operand& dst, XMMRegister src);
-  void paddd(XMMRegister dst, XMMRegister src);
-  void paddd(XMMRegister dst, const Operand& src);
-  void psubd(XMMRegister dst, XMMRegister src);
-  void psubd(XMMRegister dst, const Operand& src);
-  void pmulld(XMMRegister dst, XMMRegister src);
-  void pmulld(XMMRegister dst, const Operand& src);
-  void pmuludq(XMMRegister dst, XMMRegister src);
-  void pmuludq(XMMRegister dst, const Operand& src);
   void psrldq(XMMRegister dst, uint8_t shift);
   void pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle);
-  void cvtps2dq(XMMRegister dst, XMMRegister src);
-  void cvtps2dq(XMMRegister dst, const Operand& src);
+  void pshufd(XMMRegister dst, const Operand& src, uint8_t shuffle);
   void cvtdq2ps(XMMRegister dst, XMMRegister src);
   void cvtdq2ps(XMMRegister dst, const Operand& src);
 
@@ -1421,7 +1509,6 @@
   AVX_P_3(vand, 0x54);
   AVX_P_3(vor, 0x56);
   AVX_P_3(vxor, 0x57);
-  AVX_3(vpcmpeqd, 0x76, vpd);
   AVX_3(vcvtsd2ss, 0x5a, vsd);
 
 #undef AVX_3
@@ -1440,102 +1527,98 @@
     emit(imm8);
   }
   void vcvtss2sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
-    vsd(0x5a, dst, src1, src2, kF3, k0F, kWIG);
+    vinstr(0x5a, dst, src1, src2, kF3, k0F, kWIG);
   }
   void vcvtss2sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
-    vsd(0x5a, dst, src1, src2, kF3, k0F, kWIG);
+    vinstr(0x5a, dst, src1, src2, kF3, k0F, kWIG);
   }
   void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, Register src2) {
     XMMRegister isrc2 = {src2.code()};
-    vsd(0x2a, dst, src1, isrc2, kF2, k0F, kW0);
+    vinstr(0x2a, dst, src1, isrc2, kF2, k0F, kW0);
   }
   void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
-    vsd(0x2a, dst, src1, src2, kF2, k0F, kW0);
+    vinstr(0x2a, dst, src1, src2, kF2, k0F, kW0);
   }
   void vcvtlsi2ss(XMMRegister dst, XMMRegister src1, Register src2) {
     XMMRegister isrc2 = {src2.code()};
-    vsd(0x2a, dst, src1, isrc2, kF3, k0F, kW0);
+    vinstr(0x2a, dst, src1, isrc2, kF3, k0F, kW0);
   }
   void vcvtlsi2ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
-    vsd(0x2a, dst, src1, src2, kF3, k0F, kW0);
+    vinstr(0x2a, dst, src1, src2, kF3, k0F, kW0);
   }
   void vcvtqsi2ss(XMMRegister dst, XMMRegister src1, Register src2) {
     XMMRegister isrc2 = {src2.code()};
-    vsd(0x2a, dst, src1, isrc2, kF3, k0F, kW1);
+    vinstr(0x2a, dst, src1, isrc2, kF3, k0F, kW1);
   }
   void vcvtqsi2ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
-    vsd(0x2a, dst, src1, src2, kF3, k0F, kW1);
+    vinstr(0x2a, dst, src1, src2, kF3, k0F, kW1);
   }
   void vcvtqsi2sd(XMMRegister dst, XMMRegister src1, Register src2) {
     XMMRegister isrc2 = {src2.code()};
-    vsd(0x2a, dst, src1, isrc2, kF2, k0F, kW1);
+    vinstr(0x2a, dst, src1, isrc2, kF2, k0F, kW1);
   }
   void vcvtqsi2sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
-    vsd(0x2a, dst, src1, src2, kF2, k0F, kW1);
+    vinstr(0x2a, dst, src1, src2, kF2, k0F, kW1);
   }
   void vcvttss2si(Register dst, XMMRegister src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF3, k0F, kW0);
+    vinstr(0x2c, idst, xmm0, src, kF3, k0F, kW0);
   }
   void vcvttss2si(Register dst, const Operand& src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF3, k0F, kW0);
+    vinstr(0x2c, idst, xmm0, src, kF3, k0F, kW0);
   }
   void vcvttsd2si(Register dst, XMMRegister src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF2, k0F, kW0);
+    vinstr(0x2c, idst, xmm0, src, kF2, k0F, kW0);
   }
   void vcvttsd2si(Register dst, const Operand& src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF2, k0F, kW0);
+    vinstr(0x2c, idst, xmm0, src, kF2, k0F, kW0);
   }
   void vcvttss2siq(Register dst, XMMRegister src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF3, k0F, kW1);
+    vinstr(0x2c, idst, xmm0, src, kF3, k0F, kW1);
   }
   void vcvttss2siq(Register dst, const Operand& src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF3, k0F, kW1);
+    vinstr(0x2c, idst, xmm0, src, kF3, k0F, kW1);
   }
   void vcvttsd2siq(Register dst, XMMRegister src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF2, k0F, kW1);
+    vinstr(0x2c, idst, xmm0, src, kF2, k0F, kW1);
   }
   void vcvttsd2siq(Register dst, const Operand& src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2c, idst, xmm0, src, kF2, k0F, kW1);
+    vinstr(0x2c, idst, xmm0, src, kF2, k0F, kW1);
   }
   void vcvtsd2si(Register dst, XMMRegister src) {
     XMMRegister idst = {dst.code()};
-    vsd(0x2d, idst, xmm0, src, kF2, k0F, kW0);
+    vinstr(0x2d, idst, xmm0, src, kF2, k0F, kW0);
   }
   void vucomisd(XMMRegister dst, XMMRegister src) {
-    vsd(0x2e, dst, xmm0, src, k66, k0F, kWIG);
+    vinstr(0x2e, dst, xmm0, src, k66, k0F, kWIG);
   }
   void vucomisd(XMMRegister dst, const Operand& src) {
-    vsd(0x2e, dst, xmm0, src, k66, k0F, kWIG);
+    vinstr(0x2e, dst, xmm0, src, k66, k0F, kWIG);
   }
   void vroundss(XMMRegister dst, XMMRegister src1, XMMRegister src2,
                 RoundingMode mode) {
-    vsd(0x0a, dst, src1, src2, k66, k0F3A, kWIG);
+    vinstr(0x0a, dst, src1, src2, k66, k0F3A, kWIG);
     emit(static_cast<byte>(mode) | 0x8);  // Mask precision exception.
   }
   void vroundsd(XMMRegister dst, XMMRegister src1, XMMRegister src2,
                 RoundingMode mode) {
-    vsd(0x0b, dst, src1, src2, k66, k0F3A, kWIG);
+    vinstr(0x0b, dst, src1, src2, k66, k0F3A, kWIG);
     emit(static_cast<byte>(mode) | 0x8);  // Mask precision exception.
   }
 
   void vsd(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2) {
-    vsd(op, dst, src1, src2, kF2, k0F, kWIG);
+    vinstr(op, dst, src1, src2, kF2, k0F, kWIG);
   }
   void vsd(byte op, XMMRegister dst, XMMRegister src1, const Operand& src2) {
-    vsd(op, dst, src1, src2, kF2, k0F, kWIG);
+    vinstr(op, dst, src1, src2, kF2, k0F, kWIG);
   }
-  void vsd(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2,
-           SIMDPrefix pp, LeadingOpcode m, VexW w);
-  void vsd(byte op, XMMRegister dst, XMMRegister src1, const Operand& src2,
-           SIMDPrefix pp, LeadingOpcode m, VexW w);
 
   void vmovss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
     vss(0x10, dst, src1, src2);
@@ -1616,6 +1699,101 @@
 
 #undef AVX_CMP_P
 
+  void vlddqu(XMMRegister dst, const Operand& src) {
+    vinstr(0xF0, dst, xmm0, src, kF2, k0F, kWIG);
+  }
+  void vpsllw(XMMRegister dst, XMMRegister src, int8_t imm8) {
+    XMMRegister iop = {6};
+    vinstr(0x71, iop, dst, src, k66, k0F, kWIG);
+    emit(imm8);
+  }
+  void vpsrlw(XMMRegister dst, XMMRegister src, int8_t imm8) {
+    XMMRegister iop = {2};
+    vinstr(0x71, iop, dst, src, k66, k0F, kWIG);
+    emit(imm8);
+  }
+  void vpsraw(XMMRegister dst, XMMRegister src, int8_t imm8) {
+    XMMRegister iop = {4};
+    vinstr(0x71, iop, dst, src, k66, k0F, kWIG);
+    emit(imm8);
+  }
+  void vpslld(XMMRegister dst, XMMRegister src, int8_t imm8) {
+    XMMRegister iop = {6};
+    vinstr(0x72, iop, dst, src, k66, k0F, kWIG);
+    emit(imm8);
+  }
+  void vpsrld(XMMRegister dst, XMMRegister src, int8_t imm8) {
+    XMMRegister iop = {2};
+    vinstr(0x72, iop, dst, src, k66, k0F, kWIG);
+    emit(imm8);
+  }
+  void vpsrad(XMMRegister dst, XMMRegister src, int8_t imm8) {
+    XMMRegister iop = {4};
+    vinstr(0x72, iop, dst, src, k66, k0F, kWIG);
+    emit(imm8);
+  }
+  void vpextrb(Register dst, XMMRegister src, int8_t imm8) {
+    XMMRegister idst = {dst.code()};
+    vinstr(0x14, src, xmm0, idst, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpextrb(const Operand& dst, XMMRegister src, int8_t imm8) {
+    vinstr(0x14, src, xmm0, dst, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpextrw(Register dst, XMMRegister src, int8_t imm8) {
+    XMMRegister idst = {dst.code()};
+    vinstr(0xc5, idst, xmm0, src, k66, k0F, kW0);
+    emit(imm8);
+  }
+  void vpextrw(const Operand& dst, XMMRegister src, int8_t imm8) {
+    vinstr(0x15, src, xmm0, dst, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpextrd(Register dst, XMMRegister src, int8_t imm8) {
+    XMMRegister idst = {dst.code()};
+    vinstr(0x16, src, xmm0, idst, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpextrd(const Operand& dst, XMMRegister src, int8_t imm8) {
+    vinstr(0x16, src, xmm0, dst, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpinsrb(XMMRegister dst, XMMRegister src1, Register src2, int8_t imm8) {
+    XMMRegister isrc = {src2.code()};
+    vinstr(0x20, dst, src1, isrc, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpinsrb(XMMRegister dst, XMMRegister src1, const Operand& src2,
+               int8_t imm8) {
+    vinstr(0x20, dst, src1, src2, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpinsrw(XMMRegister dst, XMMRegister src1, Register src2, int8_t imm8) {
+    XMMRegister isrc = {src2.code()};
+    vinstr(0xc4, dst, src1, isrc, k66, k0F, kW0);
+    emit(imm8);
+  }
+  void vpinsrw(XMMRegister dst, XMMRegister src1, const Operand& src2,
+               int8_t imm8) {
+    vinstr(0xc4, dst, src1, src2, k66, k0F, kW0);
+    emit(imm8);
+  }
+  void vpinsrd(XMMRegister dst, XMMRegister src1, Register src2, int8_t imm8) {
+    XMMRegister isrc = {src2.code()};
+    vinstr(0x22, dst, src1, isrc, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpinsrd(XMMRegister dst, XMMRegister src1, const Operand& src2,
+               int8_t imm8) {
+    vinstr(0x22, dst, src1, src2, k66, k0F3A, kW0);
+    emit(imm8);
+  }
+  void vpshufd(XMMRegister dst, XMMRegister src, int8_t imm8) {
+    vinstr(0x70, dst, xmm0, src, k66, k0F, kWIG);
+    emit(imm8);
+  }
+
   void vps(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2);
   void vps(byte op, XMMRegister dst, XMMRegister src1, const Operand& src2);
   void vpd(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2);
@@ -1852,6 +2030,8 @@
   byte byte_at(int pos)  { return buffer_[pos]; }
   void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
 
+  Address pc() const { return pc_; }
+
  protected:
   // Call near indirect
   void call(const Operand& operand);
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 4b5165a..2a962b3 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -1175,6 +1175,7 @@
   __ Pop(rdx);
   __ Pop(rdi);
   __ Pop(rax);
+  __ SmiToInteger32(rdx, rdx);
   __ SmiToInteger32(rax, rax);
 }
 
@@ -1189,7 +1190,6 @@
   // rdi : the function to call
   Isolate* isolate = masm->isolate();
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_initialize_count, done_increment_count;
 
   // Load the cache state into r11.
   __ SmiToInteger32(rdx, rdx);
@@ -1203,7 +1203,7 @@
   // type-feedback-vector.h).
   Label check_allocation_site;
   __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
-  __ j(equal, &done_increment_count, Label::kFar);
+  __ j(equal, &done, Label::kFar);
   __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
   __ j(equal, &done, Label::kFar);
   __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
@@ -1227,7 +1227,7 @@
   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11);
   __ cmpp(rdi, r11);
   __ j(not_equal, &megamorphic);
-  __ jmp(&done_increment_count);
+  __ jmp(&done);
 
   __ bind(&miss);
 
@@ -1253,29 +1253,17 @@
 
   CreateAllocationSiteStub create_stub(isolate);
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ jmp(&done_initialize_count);
+  __ jmp(&done);
 
   __ bind(&not_array_function);
   CreateWeakCellStub weak_cell_stub(isolate);
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
 
-  __ bind(&done_initialize_count);
-  // Initialize the call counter.
-  __ SmiToInteger32(rdx, rdx);
-  __ Move(FieldOperand(rbx, rdx, times_pointer_size,
-                       FixedArray::kHeaderSize + kPointerSize),
-          Smi::FromInt(1));
-  __ jmp(&done);
-
-  __ bind(&done_increment_count);
-
-  // Increment the call count for monomorphic function calls.
+  __ bind(&done);
+  // Increment the call count for all function calls.
   __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
                                  FixedArray::kHeaderSize + kPointerSize),
                     Smi::FromInt(1));
-
-  __ bind(&done);
-  __ Integer32ToSmi(rdx, rdx);
 }
 
 
@@ -1294,7 +1282,6 @@
 
   GenerateRecordCallTarget(masm);
 
-  __ SmiToInteger32(rdx, rdx);
   Label feedback_register_initialized;
   // Put the AllocationSite from the feedback vector into rbx, or undefined.
   __ movp(rbx,
@@ -1321,6 +1308,12 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot) {
+  __ SmiAddConstant(FieldOperand(feedback_vector, slot, times_pointer_size,
+                                 FixedArray::kHeaderSize + kPointerSize),
+                    Smi::FromInt(1));
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // rdi - function
@@ -1334,9 +1327,7 @@
   __ movp(rax, Immediate(arg_count()));
 
   // Increment the call count for monomorphic function calls.
-  __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
-                                 FixedArray::kHeaderSize + kPointerSize),
-                    Smi::FromInt(1));
+  IncrementCallCount(masm, rbx, rdx);
 
   __ movp(rbx, rcx);
   __ movp(rdx, rdi);
@@ -1352,7 +1343,7 @@
   // -- rbx - vector
   // -----------------------------------
   Isolate* isolate = masm->isolate();
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   StackArgumentsAccessor args(rsp, argc);
   ParameterCount actual(argc);
@@ -1383,12 +1374,10 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(rdi, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
-                                 FixedArray::kHeaderSize + kPointerSize),
-                    Smi::FromInt(1));
-
   __ bind(&call_function);
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, rbx, rdx);
+
   __ Set(rax, argc);
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
@@ -1428,6 +1417,11 @@
           TypeFeedbackVector::MegamorphicSentinel(isolate));
 
   __ bind(&call);
+
+  // Increment the call count for megamorphic function calls.
+  IncrementCallCount(masm, rbx, rdx);
+
+  __ bind(&call_count_incremented);
   __ Set(rax, argc);
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET);
@@ -1453,11 +1447,6 @@
   __ cmpp(rcx, NativeContextOperand());
   __ j(not_equal, &miss);
 
-  // Initialize the call counter.
-  __ Move(FieldOperand(rbx, rdx, times_pointer_size,
-                       FixedArray::kHeaderSize + kPointerSize),
-          Smi::FromInt(1));
-
   // Store the function. Use a stub since we need a frame for allocation.
   // rbx - vector
   // rdx - slot (needs to be in smi form)
@@ -1467,11 +1456,16 @@
     CreateWeakCellStub create_stub(isolate);
 
     __ Integer32ToSmi(rdx, rdx);
+    __ Push(rbx);
+    __ Push(rdx);
     __ Push(rdi);
     __ Push(rsi);
     __ CallStub(&create_stub);
     __ Pop(rsi);
     __ Pop(rdi);
+    __ Pop(rdx);
+    __ Pop(rbx);
+    __ SmiToInteger32(rdx, rdx);
   }
 
   __ jmp(&call_function);
@@ -1481,20 +1475,19 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ jmp(&call);
+  __ jmp(&call_count_incremented);
 
   // Unreachable
   __ int3();
 }
 
-
 void CallICStub::GenerateMiss(MacroAssembler* masm) {
   FrameScope scope(masm, StackFrame::INTERNAL);
 
   // Push the receiver and the function and feedback info.
+  __ Integer32ToSmi(rdx, rdx);
   __ Push(rdi);
   __ Push(rbx);
-  __ Integer32ToSmi(rdx, rdx);
   __ Push(rdx);
 
   // Call the entry.
@@ -1504,7 +1497,6 @@
   __ movp(rdi, rax);
 }
 
-
 bool CEntryStub::NeedsImmovableCode() {
   return false;
 }
@@ -2020,296 +2012,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-
-  // Stack frame on entry.
-  //  rsp[0]  : return address
-  //  rsp[8]  : to
-  //  rsp[16] : from
-  //  rsp[24] : string
-
-  enum SubStringStubArgumentIndices {
-    STRING_ARGUMENT_INDEX,
-    FROM_ARGUMENT_INDEX,
-    TO_ARGUMENT_INDEX,
-    SUB_STRING_ARGUMENT_COUNT
-  };
-
-  StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
-                              ARGUMENTS_DONT_CONTAIN_RECEIVER);
-
-  // Make sure first argument is a string.
-  __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
-  STATIC_ASSERT(kSmiTag == 0);
-  __ testl(rax, Immediate(kSmiTagMask));
-  __ j(zero, &runtime);
-  Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
-  __ j(NegateCondition(is_string), &runtime);
-
-  // rax: string
-  // rbx: instance type
-  // Calculate length of sub string using the smi values.
-  __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
-  __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
-  __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
-
-  __ SmiSub(rcx, rcx, rdx);  // Overflow doesn't happen.
-  __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
-  Label not_original_string;
-  // Shorter than original string's length: an actual substring.
-  __ j(below, &not_original_string, Label::kNear);
-  // Longer than original string's length or negative: unsafe arguments.
-  __ j(above, &runtime);
-  // Return original string.
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
-  __ bind(&not_original_string);
-
-  Label single_char;
-  __ SmiCompare(rcx, Smi::FromInt(1));
-  __ j(equal, &single_char);
-
-  __ SmiToInteger32(rcx, rcx);
-
-  // rax: string
-  // rbx: instance type
-  // rcx: sub string length
-  // rdx: from index (smi)
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into edi.
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ testb(rbx, Immediate(kIsIndirectStringMask));
-  __ j(zero, &seq_or_external_string, Label::kNear);
-
-  __ testb(rbx, Immediate(kSlicedNotConsMask));
-  __ j(not_zero, &sliced_string, Label::kNear);
-  // Cons string.  Check whether it is flat, then fetch first part.
-  // Flat cons strings have an empty second part.
-  __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
-                 Heap::kempty_stringRootIndex);
-  __ j(not_equal, &runtime);
-  __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
-  // Update instance type.
-  __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
-  __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked, Label::kNear);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and correct start index by offset.
-  __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
-  __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
-  // Update instance type.
-  __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
-  __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked, Label::kNear);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the correct register.
-  __ movp(rdi, rax);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // rdi: underlying subject string
-    // rbx: instance type of underlying subject string
-    // rdx: adjusted start index (smi)
-    // rcx: length
-    // If coming from the make_two_character_string path, the string
-    // is too short to be sliced anyways.
-    __ cmpp(rcx, Immediate(SlicedString::kMinLength));
-    // Short slice.  Copy instead of slicing.
-    __ j(less, &copy_routine);
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ testb(rbx, Immediate(kStringEncodingMask));
-    __ j(zero, &two_byte_slice, Label::kNear);
-    __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
-    __ jmp(&set_slice_header, Label::kNear);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
-    __ bind(&set_slice_header);
-    __ Integer32ToSmi(rcx, rcx);
-    __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
-    __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
-           Immediate(String::kEmptyHashField));
-    __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
-    __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
-    __ IncrementCounter(counters->sub_string_native(), 1);
-    __ ret(3 * kPointerSize);
-
-    __ bind(&copy_routine);
-  }
-
-  // rdi: underlying subject string
-  // rbx: instance type of underlying subject string
-  // rdx: adjusted start index (smi)
-  // rcx: length
-  // The subject string can only be external or sequential string of either
-  // encoding at this point.
-  Label two_byte_sequential, sequential_string;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ testb(rbx, Immediate(kExternalStringTag));
-  __ j(zero, &sequential_string);
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ testb(rbx, Immediate(kShortExternalStringMask));
-  __ j(not_zero, &runtime);
-  __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
-  // Move the pointer so that offset-wise, it looks like a sequential string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&sequential_string);
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ testb(rbx, Immediate(kStringEncodingMask));
-  __ j(zero, &two_byte_sequential);
-
-  // Allocate the result.
-  __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
-
-  // rax: result string
-  // rcx: result string length
-  {  // Locate character of sub string start.
-    SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
-    __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
-                        SeqOneByteString::kHeaderSize - kHeapObjectTag));
-  }
-  // Locate first character of result.
-  __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
-
-  // rax: result string
-  // rcx: result length
-  // r14: first character of result
-  // rsi: character of sub string start
-  StringHelper::GenerateCopyCharacters(
-      masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
-
-  __ bind(&two_byte_sequential);
-  // Allocate the result.
-  __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
-
-  // rax: result string
-  // rcx: result string length
-  {  // Locate character of sub string start.
-    SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
-    __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
-                        SeqOneByteString::kHeaderSize - kHeapObjectTag));
-  }
-  // Locate first character of result.
-  __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
-
-  // rax: result string
-  // rcx: result length
-  // rdi: first character of result
-  // r14: character of sub string start
-  StringHelper::GenerateCopyCharacters(
-      masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // rax: string
-  // rbx: instance type
-  // rcx: sub string length (smi)
-  // rdx: from index (smi)
-  StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
-                                  &runtime, RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes one argument in rax.
-  Label is_number;
-  __ JumpIfSmi(rax, &is_number, Label::kNear);
-
-  Label not_string;
-  __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
-  // rax: receiver
-  // rdi: receiver map
-  __ j(above_equal, &not_string, Label::kNear);
-  __ Ret();
-  __ bind(&not_string);
-
-  Label not_heap_number;
-  __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpInstanceType(rdi, ODDBALL_TYPE);
-  __ j(not_equal, &not_oddball, Label::kNear);
-  __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ PopReturnAddressTo(rcx);     // Pop return address.
-  __ Push(rax);                   // Push argument.
-  __ PushReturnAddressFrom(rcx);  // Push return address.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes one argument in rax.
-  Label is_number;
-  __ JumpIfSmi(rax, &is_number, Label::kNear);
-
-  Label not_name;
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ CmpObjectType(rax, LAST_NAME_TYPE, rdi);
-  // rax: receiver
-  // rdi: receiver map
-  __ j(above, &not_name, Label::kNear);
-  __ Ret();
-  __ bind(&not_name);
-
-  Label not_heap_number;
-  __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex);
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpInstanceType(rdi, ODDBALL_TYPE);
-  __ j(not_equal, &not_oddball, Label::kNear);
-  __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ PopReturnAddressTo(rcx);     // Pop return address.
-  __ Push(rax);                   // Push argument.
-  __ PushReturnAddressFrom(rcx);  // Push return address.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
                                                    Register left,
                                                    Register right,
@@ -3172,17 +2874,6 @@
   Label need_incremental;
   Label need_incremental_pop_object;
 
-  __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
-  __ andp(regs_.scratch0(), regs_.object());
-  __ movp(regs_.scratch1(),
-         Operand(regs_.scratch0(),
-                 MemoryChunk::kWriteBarrierCounterOffset));
-  __ subp(regs_.scratch1(), Immediate(1));
-  __ movp(Operand(regs_.scratch0(),
-                 MemoryChunk::kWriteBarrierCounterOffset),
-         regs_.scratch1());
-  __ j(negative, &need_incremental);
-
   // Let's look at the color of the object:  If it is not black we don't have
   // to inform the incremental marker.
   __ JumpIfBlack(regs_.object(),
@@ -3575,7 +3266,7 @@
   __ jmp(feedback);
 
   __ bind(&transition_call);
-  DCHECK(receiver_map.is(VectorStoreTransitionDescriptor::MapRegister()));
+  DCHECK(receiver_map.is(StoreTransitionDescriptor::MapRegister()));
   __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   // The weak cell may have been cleared.
   __ JumpIfSmi(receiver_map, miss);
@@ -4308,7 +3999,7 @@
     // Fall back to %AllocateInNewSpace (if not too big).
     Label too_big_for_new_space;
     __ bind(&allocate);
-    __ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize));
+    __ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize));
     __ j(greater, &too_big_for_new_space);
     {
       FrameScope scope(masm, StackFrame::INTERNAL);
@@ -4671,7 +4362,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize));
+  __ cmpl(rcx, Immediate(kMaxRegularHeapObjectSize));
   __ j(greater, &too_big_for_new_space);
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 83f34d0..6adb820 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -11,6 +11,7 @@
 #include "src/base/compiler-specific.h"
 #include "src/base/lazy-instance.h"
 #include "src/disasm.h"
+#include "src/x64/sse-instr.h"
 
 namespace disasm {
 
@@ -875,6 +876,7 @@
   return 3;  // includes 0x0F
 }
 
+const char* sf_str[4] = {"", "rl", "ra", "ll"};
 
 int DisassemblerX64::AVXInstruction(byte* data) {
   byte opcode = *data;
@@ -949,6 +951,18 @@
         current += PrintRightOperand(current);
         AppendToBuffer(",%s", NameOfCPURegister(vvvv));
         break;
+#define DECLARE_SSE_AVX_DIS_CASE(instruction, notUsed1, notUsed2, notUsed3, \
+                                 opcode)                                    \
+  case 0x##opcode: {                                                        \
+    AppendToBuffer("v" #instruction " %s,%s,", NameOfXMMRegister(regop),    \
+                   NameOfXMMRegister(vvvv));                                \
+    current += PrintRightXMMOperand(current);                               \
+    break;                                                                  \
+  }
+
+        SSSE3_INSTRUCTION_LIST(DECLARE_SSE_AVX_DIS_CASE)
+        SSE4_INSTRUCTION_LIST(DECLARE_SSE_AVX_DIS_CASE)
+#undef DECLARE_SSE_AVX_DIS_CASE
       default:
         UnimplementedInstruction();
     }
@@ -968,6 +982,33 @@
         current += PrintRightXMMOperand(current);
         AppendToBuffer(",0x%x", *current++);
         break;
+      case 0x14:
+        AppendToBuffer("vpextrb ");
+        current += PrintRightByteOperand(current);
+        AppendToBuffer(",%s,0x%x,", NameOfXMMRegister(regop), *current++);
+        break;
+      case 0x15:
+        AppendToBuffer("vpextrw ");
+        current += PrintRightOperand(current);
+        AppendToBuffer(",%s,0x%x,", NameOfXMMRegister(regop), *current++);
+        break;
+      case 0x16:
+        AppendToBuffer("vpextrd ");
+        current += PrintRightOperand(current);
+        AppendToBuffer(",%s,0x%x,", NameOfXMMRegister(regop), *current++);
+        break;
+      case 0x20:
+        AppendToBuffer("vpinsrb %s,%s,", NameOfXMMRegister(regop),
+                       NameOfXMMRegister(vvvv));
+        current += PrintRightByteOperand(current);
+        AppendToBuffer(",0x%x", *current++);
+        break;
+      case 0x22:
+        AppendToBuffer("vpinsrd %s,%s,", NameOfXMMRegister(regop),
+                       NameOfXMMRegister(vvvv));
+        current += PrintRightOperand(current);
+        AppendToBuffer(",0x%x", *current++);
+        break;
       default:
         UnimplementedInstruction();
     }
@@ -1112,6 +1153,10 @@
                        NameOfXMMRegister(vvvv));
         current += PrintRightXMMOperand(current);
         break;
+      case 0xf0:
+        AppendToBuffer("vlddqu %s,", NameOfXMMRegister(regop));
+        current += PrintRightXMMOperand(current);
+        break;
       default:
         UnimplementedInstruction();
     }
@@ -1326,16 +1371,28 @@
                        NameOfXMMRegister(regop));
         current += PrintRightOperand(current);
         break;
-      case 0x73:
-        AppendToBuffer("%s %s,", regop == 6 ? "vpsllq" : "vpsrlq",
+      case 0x70:
+        AppendToBuffer("vpshufd %s,", NameOfXMMRegister(regop));
+        current += PrintRightXMMOperand(current);
+        AppendToBuffer(",0x%x", *current++);
+        break;
+      case 0x71:
+        AppendToBuffer("vps%sw %s,", sf_str[regop / 2],
                        NameOfXMMRegister(vvvv));
         current += PrintRightXMMOperand(current);
         AppendToBuffer(",%u", *current++);
         break;
-      case 0x76:
-        AppendToBuffer("vpcmpeqd %s,%s,", NameOfXMMRegister(regop),
+      case 0x72:
+        AppendToBuffer("vps%sd %s,", sf_str[regop / 2],
                        NameOfXMMRegister(vvvv));
         current += PrintRightXMMOperand(current);
+        AppendToBuffer(",%u", *current++);
+        break;
+      case 0x73:
+        AppendToBuffer("vps%sq %s,", sf_str[regop / 2],
+                       NameOfXMMRegister(vvvv));
+        current += PrintRightXMMOperand(current);
+        AppendToBuffer(",%u", *current++);
         break;
       case 0x7e:
         AppendToBuffer("vmov%c ", vex_w() ? 'q' : 'd');
@@ -1352,6 +1409,27 @@
         current += 1;
         break;
       }
+      case 0xc4:
+        AppendToBuffer("vpinsrw %s,%s,", NameOfXMMRegister(regop),
+                       NameOfXMMRegister(vvvv));
+        current += PrintRightOperand(current);
+        AppendToBuffer(",0x%x", *current++);
+        break;
+      case 0xc5:
+        AppendToBuffer("vpextrw %s,", NameOfCPURegister(regop));
+        current += PrintRightXMMOperand(current);
+        AppendToBuffer(",0x%x", *current++);
+        break;
+#define DECLARE_SSE_AVX_DIS_CASE(instruction, notUsed1, notUsed2, opcode) \
+  case 0x##opcode: {                                                      \
+    AppendToBuffer("v" #instruction " %s,%s,", NameOfXMMRegister(regop),  \
+                   NameOfXMMRegister(vvvv));                              \
+    current += PrintRightXMMOperand(current);                             \
+    break;                                                                \
+  }
+
+        SSE2_INSTRUCTION_LIST(DECLARE_SSE_AVX_DIS_CASE)
+#undef DECLARE_SSE_AVX_DIS_CASE
       default:
         UnimplementedInstruction();
     }
@@ -1363,7 +1441,6 @@
   return static_cast<int>(current - data);
 }
 
-
 // Returns number of bytes used, including *data.
 int DisassemblerX64::FPUInstruction(byte* data) {
   byte escape_opcode = *data;
@@ -1558,11 +1635,20 @@
     if (opcode == 0x38) {
       byte third_byte = *current;
       current = data + 3;
-      if (third_byte == 0x40) {
-        // pmulld xmm, xmm/m128
-        get_modrm(*current, &mod, &regop, &rm);
-        AppendToBuffer("pmulld %s,", NameOfXMMRegister(regop));
-        current += PrintRightXMMOperand(current);
+      get_modrm(*current, &mod, &regop, &rm);
+      switch (third_byte) {
+#define SSE34_DIS_CASE(instruction, notUsed1, notUsed2, notUsed3, opcode) \
+  case 0x##opcode: {                                                      \
+    AppendToBuffer(#instruction " %s,", NameOfXMMRegister(regop));        \
+    current += PrintRightXMMOperand(current);                             \
+    break;                                                                \
+  }
+
+        SSSE3_INSTRUCTION_LIST(SSE34_DIS_CASE)
+        SSE4_INSTRUCTION_LIST(SSE34_DIS_CASE)
+#undef SSE34_DIS_CASE
+        default:
+          UnimplementedInstruction();
       }
     } else if (opcode == 0x3A) {
       byte third_byte = *current;
@@ -1586,12 +1672,31 @@
         current += PrintRightXMMOperand(current);
         AppendToBuffer(",0x%x", (*current) & 3);
         current += 1;
+      } else if (third_byte == 0x14) {
+        get_modrm(*current, &mod, &regop, &rm);
+        AppendToBuffer("pextrb ");  // reg/m32, xmm, imm8
+        current += PrintRightOperand(current);
+        AppendToBuffer(",%s,%d", NameOfXMMRegister(regop), (*current) & 3);
+        current += 1;
+      } else if (third_byte == 0x15) {
+        get_modrm(*current, &mod, &regop, &rm);
+        AppendToBuffer("pextrw ");  // reg/m32, xmm, imm8
+        current += PrintRightOperand(current);
+        AppendToBuffer(",%s,%d", NameOfXMMRegister(regop), (*current) & 3);
+        current += 1;
       } else if (third_byte == 0x16) {
         get_modrm(*current, &mod, &regop, &rm);
         AppendToBuffer("pextrd ");  // reg/m32, xmm, imm8
         current += PrintRightOperand(current);
         AppendToBuffer(",%s,%d", NameOfXMMRegister(regop), (*current) & 3);
         current += 1;
+      } else if (third_byte == 0x20) {
+        get_modrm(*current, &mod, &regop, &rm);
+        AppendToBuffer("pinsrd ");  // xmm, reg/m32, imm8
+        AppendToBuffer(" %s,", NameOfXMMRegister(regop));
+        current += PrintRightOperand(current);
+        AppendToBuffer(",%d", (*current) & 3);
+        current += 1;
       } else if (third_byte == 0x21) {
         get_modrm(*current, &mod, &regop, &rm);
         // insertps xmm, xmm/m32, imm8
@@ -1666,15 +1771,20 @@
         current += PrintRightXMMOperand(current);
         AppendToBuffer(",0x%x", *current);
         current += 1;
+      } else if (opcode == 0x71) {
+        current += 1;
+        AppendToBuffer("ps%sw %s,%d", sf_str[regop / 2], NameOfXMMRegister(rm),
+                       *current & 0x7f);
+        current += 1;
       } else if (opcode == 0x72) {
         current += 1;
-        AppendToBuffer("%s %s,%d", (regop == 6) ? "pslld" : "psrld",
-                       NameOfXMMRegister(rm), *current & 0x7f);
+        AppendToBuffer("ps%sd %s,%d", sf_str[regop / 2], NameOfXMMRegister(rm),
+                       *current & 0x7f);
         current += 1;
       } else if (opcode == 0x73) {
         current += 1;
-        AppendToBuffer("%s %s,%d", (regop == 6) ? "psllq" : "psrlq",
-                       NameOfXMMRegister(rm), *current & 0x7f);
+        AppendToBuffer("ps%sq %s,%d", sf_str[regop / 2], NameOfXMMRegister(rm),
+                       *current & 0x7f);
         current += 1;
       } else if (opcode == 0xB1) {
         current += PrintOperands("cmpxchg", OPER_REG_OP_ORDER, current);
@@ -1692,16 +1802,86 @@
           mnemonic = "ucomisd";
         } else if (opcode == 0x2F) {
           mnemonic = "comisd";
+        } else if (opcode == 0x64) {
+          mnemonic = "pcmpgtb";
+        } else if (opcode == 0x65) {
+          mnemonic = "pcmpgtw";
+        } else if (opcode == 0x66) {
+          mnemonic = "pcmpgtd";
+        } else if (opcode == 0x74) {
+          mnemonic = "pcmpeqb";
+        } else if (opcode == 0x75) {
+          mnemonic = "pcmpeqw";
         } else if (opcode == 0x76) {
           mnemonic = "pcmpeqd";
         } else if (opcode == 0x62) {
           mnemonic = "punpckldq";
+        } else if (opcode == 0x63) {
+          mnemonic = "packsswb";
+        } else if (opcode == 0x67) {
+          mnemonic = "packuswb";
         } else if (opcode == 0x6A) {
           mnemonic = "punpckhdq";
+        } else if (opcode == 0x6B) {
+          mnemonic = "packssdw";
+        } else if (opcode == 0xC4) {
+          mnemonic = "pinsrw";
+        } else if (opcode == 0xC5) {
+          mnemonic = "pextrw";
+        } else if (opcode == 0xD1) {
+          mnemonic = "psrlw";
+        } else if (opcode == 0xD2) {
+          mnemonic = "psrld";
+        } else if (opcode == 0xD5) {
+          mnemonic = "pmullw";
+        } else if (opcode == 0xD7) {
+          mnemonic = "pmovmskb";
+        } else if (opcode == 0xD8) {
+          mnemonic = "psubusb";
+        } else if (opcode == 0xD9) {
+          mnemonic = "psubusw";
+        } else if (opcode == 0xDA) {
+          mnemonic = "pminub";
+        } else if (opcode == 0xDC) {
+          mnemonic = "paddusb";
+        } else if (opcode == 0xDD) {
+          mnemonic = "paddusw";
+        } else if (opcode == 0xDE) {
+          mnemonic = "pmaxub";
+        } else if (opcode == 0xE1) {
+          mnemonic = "psraw";
+        } else if (opcode == 0xE2) {
+          mnemonic = "psrad";
+        } else if (opcode == 0xE8) {
+          mnemonic = "psubsb";
+        } else if (opcode == 0xE9) {
+          mnemonic = "psubsw";
+        } else if (opcode == 0xEA) {
+          mnemonic = "pminsw";
+        } else if (opcode == 0xEC) {
+          mnemonic = "paddsb";
+        } else if (opcode == 0xED) {
+          mnemonic = "paddsw";
+        } else if (opcode == 0xEE) {
+          mnemonic = "pmaxsw";
+        } else if (opcode == 0xEF) {
+          mnemonic = "pxor";
+        } else if (opcode == 0xF1) {
+          mnemonic = "psllw";
+        } else if (opcode == 0xF2) {
+          mnemonic = "pslld";
         } else if (opcode == 0xF4) {
           mnemonic = "pmuludq";
+        } else if (opcode == 0xF8) {
+          mnemonic = "psubb";
+        } else if (opcode == 0xF9) {
+          mnemonic = "psubw";
         } else if (opcode == 0xFA) {
           mnemonic = "psubd";
+        } else if (opcode == 0xFC) {
+          mnemonic = "paddb";
+        } else if (opcode == 0xFD) {
+          mnemonic = "paddw";
         } else if (opcode == 0xFE) {
           mnemonic = "paddd";
         } else if (opcode == 0xC2) {
@@ -1780,6 +1960,11 @@
                      NameOfXMMRegister(regop),
                      NameOfXMMRegister(rm));
       current += 2;
+    } else if (opcode == 0xF0) {
+      int mod, regop, rm;
+      get_modrm(*current, &mod, &regop, &rm);
+      AppendToBuffer("lddqu %s,", NameOfXMMRegister(regop));
+      current += PrintRightOperand(current);
     } else {
       UnimplementedInstruction();
     }
diff --git a/src/x64/interface-descriptors-x64.cc b/src/x64/interface-descriptors-x64.cc
index 7d39b42..9e48644 100644
--- a/src/x64/interface-descriptors-x64.cc
+++ b/src/x64/interface-descriptors-x64.cc
@@ -40,13 +40,9 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return rbx; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() { return rdi; }
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return rbx; }
-const Register VectorStoreTransitionDescriptor::MapRegister() { return r11; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return rbx; }
-
+const Register StoreTransitionDescriptor::SlotRegister() { return rdi; }
+const Register StoreTransitionDescriptor::VectorRegister() { return rbx; }
+const Register StoreTransitionDescriptor::MapRegister() { return r11; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return rbx; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return rax; }
@@ -356,7 +352,7 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {
       rdi,  // callee
@@ -391,7 +387,19 @@
       rax,  // argument count (not including receiver)
       rdx,  // new target
       rdi,  // constructor
-      rbx,  // address of first argument
+      rbx,  // allocation site feedback if available, undefined otherwise
+      rcx,  // address of first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      rax,  // argument count (not including receiver)
+      rdx,  // target to the call. It is checked to be Array function.
+      rbx,  // allocation site feedback
+      rcx,  // address of first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 6dacc01..0fd6333 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -253,9 +253,8 @@
                                 Condition cc,
                                 Label* branch,
                                 Label::Distance distance) {
-  const int mask =
-      (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
-  CheckPageFlag(object, scratch, mask, cc, branch, distance);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc, branch,
+                distance);
 }
 
 
@@ -3325,12 +3324,12 @@
     Movd(dst, src);
     return;
   }
-  DCHECK_EQ(1, imm8);
   if (CpuFeatures::IsSupported(SSE4_1)) {
     CpuFeatureScope sse_scope(this, SSE4_1);
     pextrd(dst, src, imm8);
     return;
   }
+  DCHECK_EQ(1, imm8);
   movq(dst, src);
   shrq(dst, Immediate(32));
 }
@@ -4974,7 +4973,7 @@
                               Label* gc_required,
                               AllocationFlags flags) {
   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index d5e411f..a8d0c60 100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -891,6 +891,18 @@
   // miss label if the weak cell was cleared.
   void LoadWeakValue(Register value, Handle<WeakCell> cell, Label* miss);
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp (on x64 it's at least return address).
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 1) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    UNIMPLEMENTED();
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the rsp register.
   void Drop(int stack_elements);
diff --git a/src/x64/sse-instr.h b/src/x64/sse-instr.h
new file mode 100644
index 0000000..0095727
--- /dev/null
+++ b/src/x64/sse-instr.h
@@ -0,0 +1,69 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_SSE_INSTR_H_
+#define V8_SSE_INSTR_H_
+
+#define SSE2_INSTRUCTION_LIST(V) \
+  V(packsswb, 66, 0F, 63)        \
+  V(packssdw, 66, 0F, 6B)        \
+  V(packuswb, 66, 0F, 67)        \
+  V(paddb, 66, 0F, FC)           \
+  V(paddw, 66, 0F, FD)           \
+  V(paddd, 66, 0F, FE)           \
+  V(paddsb, 66, 0F, EC)          \
+  V(paddsw, 66, 0F, ED)          \
+  V(paddusb, 66, 0F, DC)         \
+  V(paddusw, 66, 0F, DD)         \
+  V(pcmpeqb, 66, 0F, 74)         \
+  V(pcmpeqw, 66, 0F, 75)         \
+  V(pcmpeqd, 66, 0F, 76)         \
+  V(pcmpgtb, 66, 0F, 64)         \
+  V(pcmpgtw, 66, 0F, 65)         \
+  V(pcmpgtd, 66, 0F, 66)         \
+  V(pmaxsw, 66, 0F, EE)          \
+  V(pmaxub, 66, 0F, DE)          \
+  V(pminsw, 66, 0F, EA)          \
+  V(pminub, 66, 0F, DA)          \
+  V(pmullw, 66, 0F, D5)          \
+  V(pmuludq, 66, 0F, F4)         \
+  V(psllw, 66, 0F, F1)           \
+  V(pslld, 66, 0F, F2)           \
+  V(psraw, 66, 0F, E1)           \
+  V(psrad, 66, 0F, E2)           \
+  V(psrlw, 66, 0F, D1)           \
+  V(psrld, 66, 0F, D2)           \
+  V(psubb, 66, 0F, F8)           \
+  V(psubw, 66, 0F, F9)           \
+  V(psubd, 66, 0F, FA)           \
+  V(psubsb, 66, 0F, E8)          \
+  V(psubsw, 66, 0F, E9)          \
+  V(psubusb, 66, 0F, D8)         \
+  V(psubusw, 66, 0F, D9)         \
+  V(pxor, 66, 0F, EF)            \
+  V(cvtps2dq, 66, 0F, 5B)
+
+#define SSSE3_INSTRUCTION_LIST(V) \
+  V(pabsb, 66, 0F, 38, 1C)        \
+  V(pabsw, 66, 0F, 38, 1D)        \
+  V(pabsd, 66, 0F, 38, 1E)        \
+  V(pshufb, 66, 0F, 38, 00)       \
+  V(psignb, 66, 0F, 38, 08)       \
+  V(psignw, 66, 0F, 38, 09)       \
+  V(psignd, 66, 0F, 38, 0A)
+
+#define SSE4_INSTRUCTION_LIST(V) \
+  V(packusdw, 66, 0F, 38, 2B)    \
+  V(pminsb, 66, 0F, 38, 38)      \
+  V(pminsd, 66, 0F, 38, 39)      \
+  V(pminuw, 66, 0F, 38, 3A)      \
+  V(pminud, 66, 0F, 38, 3B)      \
+  V(pmaxsb, 66, 0F, 38, 3C)      \
+  V(pmaxsd, 66, 0F, 38, 3D)      \
+  V(pmaxuw, 66, 0F, 38, 3E)      \
+  V(pmaxud, 66, 0F, 38, 3F)      \
+  V(pmulld, 66, 0F, 38, 40)      \
+  V(ptest, 66, 0F, 38, 17)
+
+#endif  // V8_SSE_INSTR_H_
diff --git a/src/x87/code-stubs-x87.cc b/src/x87/code-stubs-x87.cc
index 02de67a..e70cbad 100644
--- a/src/x87/code-stubs-x87.cc
+++ b/src/x87/code-stubs-x87.cc
@@ -1130,7 +1130,6 @@
   // edi : the function to call
   Isolate* isolate = masm->isolate();
   Label initialize, done, miss, megamorphic, not_array_function;
-  Label done_increment_count, done_initialize_count;
 
   // Load the cache state into ecx.
   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
@@ -1143,7 +1142,7 @@
   // type-feedback-vector.h).
   Label check_allocation_site;
   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
-  __ j(equal, &done_increment_count, Label::kFar);
+  __ j(equal, &done, Label::kFar);
   __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
   __ j(equal, &done, Label::kFar);
   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
@@ -1166,7 +1165,7 @@
   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
   __ cmp(edi, ecx);
   __ j(not_equal, &megamorphic);
-  __ jmp(&done_increment_count, Label::kFar);
+  __ jmp(&done, Label::kFar);
 
   __ bind(&miss);
 
@@ -1195,26 +1194,17 @@
   // slot.
   CreateAllocationSiteStub create_stub(isolate);
   CallStubInRecordCallTarget(masm, &create_stub);
-  __ jmp(&done_initialize_count);
+  __ jmp(&done);
 
   __ bind(&not_array_function);
   CreateWeakCellStub weak_cell_stub(isolate);
   CallStubInRecordCallTarget(masm, &weak_cell_stub);
-  __ bind(&done_initialize_count);
 
-  // Initialize the call counter.
-  __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
-  __ jmp(&done);
-
-  __ bind(&done_increment_count);
-  // Increment the call count for monomorphic function calls.
+  __ bind(&done);
+  // Increment the call count for all function calls.
   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
                       FixedArray::kHeaderSize + kPointerSize),
          Immediate(Smi::FromInt(1)));
-
-  __ bind(&done);
 }
 
 
@@ -1260,6 +1250,12 @@
   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
 }
 
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+                               Register slot) {
+  __ add(FieldOperand(feedback_vector, slot, times_half_pointer_size,
+                      FixedArray::kHeaderSize + kPointerSize),
+         Immediate(Smi::FromInt(1)));
+}
 
 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
   // edi - function
@@ -1275,9 +1271,7 @@
                            FixedArray::kHeaderSize));
 
   // Increment the call count for monomorphic function calls.
-  __ add(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
+  IncrementCallCount(masm, ebx, edx);
 
   __ mov(ebx, ecx);
   __ mov(edx, edi);
@@ -1293,7 +1287,7 @@
   // edx - slot id
   // ebx - vector
   Isolate* isolate = masm->isolate();
-  Label extra_checks_or_miss, call, call_function;
+  Label extra_checks_or_miss, call, call_function, call_count_incremented;
   int argc = arg_count();
   ParameterCount actual(argc);
 
@@ -1322,12 +1316,11 @@
   // convincing us that we have a monomorphic JSFunction.
   __ JumpIfSmi(edi, &extra_checks_or_miss);
 
-  // Increment the call count for monomorphic function calls.
-  __ add(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
-
   __ bind(&call_function);
+
+  // Increment the call count for monomorphic function calls.
+  IncrementCallCount(masm, ebx, edx);
+
   __ Set(eax, argc);
   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
                                                     tail_call_mode()),
@@ -1368,6 +1361,12 @@
       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
 
   __ bind(&call);
+
+  // Increment the call count for megamorphic function calls.
+  IncrementCallCount(masm, ebx, edx);
+
+  __ bind(&call_count_incremented);
+
   __ Set(eax, argc);
   __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
           RelocInfo::CODE_TARGET);
@@ -1393,11 +1392,6 @@
   __ cmp(ecx, NativeContextOperand());
   __ j(not_equal, &miss);
 
-  // Initialize the call counter.
-  __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
-                      FixedArray::kHeaderSize + kPointerSize),
-         Immediate(Smi::FromInt(1)));
-
   // Store the function. Use a stub since we need a frame for allocation.
   // ebx - vector
   // edx - slot
@@ -1405,11 +1399,15 @@
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
     CreateWeakCellStub create_stub(isolate);
+    __ push(ebx);
+    __ push(edx);
     __ push(edi);
     __ push(esi);
     __ CallStub(&create_stub);
     __ pop(esi);
     __ pop(edi);
+    __ pop(edx);
+    __ pop(ebx);
   }
 
   __ jmp(&call_function);
@@ -1419,7 +1417,7 @@
   __ bind(&miss);
   GenerateMiss(masm);
 
-  __ jmp(&call);
+  __ jmp(&call_count_incremented);
 
   // Unreachable
   __ int3();
@@ -1910,297 +1908,6 @@
 }
 
 
-void SubStringStub::Generate(MacroAssembler* masm) {
-  Label runtime;
-
-  // Stack frame on entry.
-  //  esp[0]: return address
-  //  esp[4]: to
-  //  esp[8]: from
-  //  esp[12]: string
-
-  // Make sure first argument is a string.
-  __ mov(eax, Operand(esp, 3 * kPointerSize));
-  STATIC_ASSERT(kSmiTag == 0);
-  __ JumpIfSmi(eax, &runtime);
-  Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
-  __ j(NegateCondition(is_string), &runtime);
-
-  // eax: string
-  // ebx: instance type
-
-  // Calculate length of sub string using the smi values.
-  __ mov(ecx, Operand(esp, 1 * kPointerSize));  // To index.
-  __ JumpIfNotSmi(ecx, &runtime);
-  __ mov(edx, Operand(esp, 2 * kPointerSize));  // From index.
-  __ JumpIfNotSmi(edx, &runtime);
-  __ sub(ecx, edx);
-  __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
-  Label not_original_string;
-  // Shorter than original string's length: an actual substring.
-  __ j(below, &not_original_string, Label::kNear);
-  // Longer than original string's length or negative: unsafe arguments.
-  __ j(above, &runtime);
-  // Return original string.
-  Counters* counters = isolate()->counters();
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-  __ bind(&not_original_string);
-
-  Label single_char;
-  __ cmp(ecx, Immediate(Smi::FromInt(1)));
-  __ j(equal, &single_char);
-
-  // eax: string
-  // ebx: instance type
-  // ecx: sub string length (smi)
-  // edx: from index (smi)
-  // Deal with different string types: update the index if necessary
-  // and put the underlying string into edi.
-  Label underlying_unpacked, sliced_string, seq_or_external_string;
-  // If the string is not indirect, it can only be sequential or external.
-  STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
-  STATIC_ASSERT(kIsIndirectStringMask != 0);
-  __ test(ebx, Immediate(kIsIndirectStringMask));
-  __ j(zero, &seq_or_external_string, Label::kNear);
-
-  Factory* factory = isolate()->factory();
-  __ test(ebx, Immediate(kSlicedNotConsMask));
-  __ j(not_zero, &sliced_string, Label::kNear);
-  // Cons string.  Check whether it is flat, then fetch first part.
-  // Flat cons strings have an empty second part.
-  __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
-         factory->empty_string());
-  __ j(not_equal, &runtime);
-  __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
-  // Update instance type.
-  __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
-  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked, Label::kNear);
-
-  __ bind(&sliced_string);
-  // Sliced string.  Fetch parent and adjust start index by offset.
-  __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
-  __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
-  // Update instance type.
-  __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
-  __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
-  __ jmp(&underlying_unpacked, Label::kNear);
-
-  __ bind(&seq_or_external_string);
-  // Sequential or external string.  Just move string to the expected register.
-  __ mov(edi, eax);
-
-  __ bind(&underlying_unpacked);
-
-  if (FLAG_string_slices) {
-    Label copy_routine;
-    // edi: underlying subject string
-    // ebx: instance type of underlying subject string
-    // edx: adjusted start index (smi)
-    // ecx: length (smi)
-    __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
-    // Short slice.  Copy instead of slicing.
-    __ j(less, &copy_routine);
-    // Allocate new sliced string.  At this point we do not reload the instance
-    // type including the string encoding because we simply rely on the info
-    // provided by the original string.  It does not matter if the original
-    // string's encoding is wrong because we always have to recheck encoding of
-    // the newly created string's parent anyways due to externalized strings.
-    Label two_byte_slice, set_slice_header;
-    STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
-    STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
-    __ test(ebx, Immediate(kStringEncodingMask));
-    __ j(zero, &two_byte_slice, Label::kNear);
-    __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
-    __ jmp(&set_slice_header, Label::kNear);
-    __ bind(&two_byte_slice);
-    __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
-    __ bind(&set_slice_header);
-    __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
-    __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
-           Immediate(String::kEmptyHashField));
-    __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
-    __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
-    __ IncrementCounter(counters->sub_string_native(), 1);
-    __ ret(3 * kPointerSize);
-
-    __ bind(&copy_routine);
-  }
-
-  // edi: underlying subject string
-  // ebx: instance type of underlying subject string
-  // edx: adjusted start index (smi)
-  // ecx: length (smi)
-  // The subject string can only be external or sequential string of either
-  // encoding at this point.
-  Label two_byte_sequential, runtime_drop_two, sequential_string;
-  STATIC_ASSERT(kExternalStringTag != 0);
-  STATIC_ASSERT(kSeqStringTag == 0);
-  __ test_b(ebx, Immediate(kExternalStringTag));
-  __ j(zero, &sequential_string);
-
-  // Handle external string.
-  // Rule out short external strings.
-  STATIC_ASSERT(kShortExternalStringTag != 0);
-  __ test_b(ebx, Immediate(kShortExternalStringMask));
-  __ j(not_zero, &runtime);
-  __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
-  // Move the pointer so that offset-wise, it looks like a sequential string.
-  STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
-  __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
-  __ bind(&sequential_string);
-  // Stash away (adjusted) index and (underlying) string.
-  __ push(edx);
-  __ push(edi);
-  __ SmiUntag(ecx);
-  STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
-  __ test_b(ebx, Immediate(kStringEncodingMask));
-  __ j(zero, &two_byte_sequential);
-
-  // Sequential one byte string.  Allocate the result.
-  __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
-
-  // eax: result string
-  // ecx: result string length
-  // Locate first character of result.
-  __ mov(edi, eax);
-  __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-  // Load string argument and locate character of sub string start.
-  __ pop(edx);
-  __ pop(ebx);
-  __ SmiUntag(ebx);
-  __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
-
-  // eax: result string
-  // ecx: result length
-  // edi: first character of result
-  // edx: character of sub string start
-  StringHelper::GenerateCopyCharacters(
-      masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-
-  __ bind(&two_byte_sequential);
-  // Sequential two-byte string.  Allocate the result.
-  __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
-
-  // eax: result string
-  // ecx: result string length
-  // Locate first character of result.
-  __ mov(edi, eax);
-  __ add(edi,
-         Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-  // Load string argument and locate character of sub string start.
-  __ pop(edx);
-  __ pop(ebx);
-  // As from is a smi it is 2 times the value which matches the size of a two
-  // byte character.
-  STATIC_ASSERT(kSmiTag == 0);
-  STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
-  __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
-
-  // eax: result string
-  // ecx: result length
-  // edi: first character of result
-  // edx: character of sub string start
-  StringHelper::GenerateCopyCharacters(
-      masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
-  __ IncrementCounter(counters->sub_string_native(), 1);
-  __ ret(3 * kPointerSize);
-
-  // Drop pushed values on the stack before tail call.
-  __ bind(&runtime_drop_two);
-  __ Drop(2);
-
-  // Just jump to runtime to create the sub string.
-  __ bind(&runtime);
-  __ TailCallRuntime(Runtime::kSubString);
-
-  __ bind(&single_char);
-  // eax: string
-  // ebx: instance type
-  // ecx: sub string length (smi)
-  // edx: from index (smi)
-  StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
-                                  &runtime, RECEIVER_IS_STRING);
-  generator.GenerateFast(masm);
-  __ ret(3 * kPointerSize);
-  generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
-  // The ToString stub takes one argument in eax.
-  Label is_number;
-  __ JumpIfSmi(eax, &is_number, Label::kNear);
-
-  Label not_string;
-  __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
-  // eax: receiver
-  // edi: receiver map
-  __ j(above_equal, &not_string, Label::kNear);
-  __ Ret();
-  __ bind(&not_string);
-
-  Label not_heap_number;
-  __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpInstanceType(edi, ODDBALL_TYPE);
-  __ j(not_equal, &not_oddball, Label::kNear);
-  __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ pop(ecx);   // Pop return address.
-  __ push(eax);  // Push argument.
-  __ push(ecx);  // Push return address.
-  __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
-  // The ToName stub takes one argument in eax.
-  Label is_number;
-  __ JumpIfSmi(eax, &is_number, Label::kNear);
-
-  Label not_name;
-  STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
-  __ CmpObjectType(eax, LAST_NAME_TYPE, edi);
-  // eax: receiver
-  // edi: receiver map
-  __ j(above, &not_name, Label::kNear);
-  __ Ret();
-  __ bind(&not_name);
-
-  Label not_heap_number;
-  __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
-  __ j(not_equal, &not_heap_number, Label::kNear);
-  __ bind(&is_number);
-  NumberToStringStub stub(isolate());
-  __ TailCallStub(&stub);
-  __ bind(&not_heap_number);
-
-  Label not_oddball;
-  __ CmpInstanceType(edi, ODDBALL_TYPE);
-  __ j(not_equal, &not_oddball, Label::kNear);
-  __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
-  __ Ret();
-  __ bind(&not_oddball);
-
-  __ pop(ecx);   // Pop return address.
-  __ push(eax);  // Push argument.
-  __ push(ecx);  // Push return address.
-  __ TailCallRuntime(Runtime::kToName);
-}
-
-
 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
                                                    Register left,
                                                    Register right,
@@ -3040,17 +2747,6 @@
     Mode mode) {
   Label object_is_black, need_incremental, need_incremental_pop_object;
 
-  __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
-  __ and_(regs_.scratch0(), regs_.object());
-  __ mov(regs_.scratch1(),
-         Operand(regs_.scratch0(),
-                 MemoryChunk::kWriteBarrierCounterOffset));
-  __ sub(regs_.scratch1(), Immediate(1));
-  __ mov(Operand(regs_.scratch0(),
-                 MemoryChunk::kWriteBarrierCounterOffset),
-         regs_.scratch1());
-  __ j(negative, &need_incremental);
-
   // Let's look at the color of the object:  If it is not black we don't have
   // to inform the incremental marker.
   __ JumpIfBlack(regs_.object(),
@@ -3392,11 +3088,10 @@
   Label load_smi_map, compare_map;
   Label start_polymorphic;
   Label pop_and_miss;
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
 
   __ push(receiver);
-  __ push(vector);
+  // Value, vector and slot are passed on the stack, so no need to save/restore
+  // them.
 
   Register receiver_map = receiver;
   Register cached_map = vector;
@@ -3417,12 +3112,9 @@
   Register handler = feedback;
   DCHECK(handler.is(StoreWithVectorDescriptor::ValueRegister()));
   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
-  __ pop(vector);
   __ pop(receiver);
   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), handler);
-  __ pop(handler);  // Pop "value".
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(handler);
 
   // Polymorphic, we have to loop from 2 to N
   __ bind(&start_polymorphic);
@@ -3446,11 +3138,8 @@
                                FixedArray::kHeaderSize + kPointerSize));
   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
-  __ mov(Operand::StaticVariable(virtual_register), handler);
-  __ pop(handler);  // Pop "value".
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(handler);
 
   __ bind(&prepare_next);
   __ add(counter, Immediate(Smi::FromInt(2)));
@@ -3460,7 +3149,6 @@
   // We exhausted our array of map handler pairs.
   __ bind(&pop_and_miss);
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
   __ jmp(miss);
 
@@ -3476,8 +3164,6 @@
                                        Label* miss) {
   // The store ic value is on the stack.
   DCHECK(weak_cell.is(StoreWithVectorDescriptor::ValueRegister()));
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
 
   // feedback initially contains the feedback array
   Label compare_smi_map;
@@ -3493,11 +3179,8 @@
   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
                                  FixedArray::kHeaderSize + kPointerSize));
   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
-  // Put the store ic value back in it's register.
-  __ mov(Operand::StaticVariable(virtual_register), weak_cell);
-  __ pop(weak_cell);  // Pop "value".
   // jump to the handler.
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(weak_cell);
 
   // In microbenchmarks, it made sense to unroll this code so that the call to
   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
@@ -3507,10 +3190,8 @@
   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
                                  FixedArray::kHeaderSize + kPointerSize));
   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), weak_cell);
-  __ pop(weak_cell);  // Pop "value".
   // jump to the handler.
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(weak_cell);
 }
 
 void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
@@ -3521,7 +3202,26 @@
   Register slot = StoreWithVectorDescriptor::SlotRegister();          // edi
   Label miss;
 
-  __ push(value);
+  if (StoreWithVectorDescriptor::kPassLastArgsOnStack) {
+    // Current stack layout:
+    // - esp[8]    -- value
+    // - esp[4]    -- slot
+    // - esp[0]    -- return address
+    STATIC_ASSERT(StoreDescriptor::kStackArgumentsCount == 2);
+    STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+    if (in_frame) {
+      __ RecordComment("[ StoreDescriptor -> StoreWithVectorDescriptor");
+      // If the vector is not on the stack, then insert the vector beneath
+      // return address in order to prepare for calling handler with
+      // StoreWithVector calling convention.
+      __ push(Operand(esp, 0));
+      __ mov(Operand(esp, 4), StoreWithVectorDescriptor::VectorRegister());
+      __ RecordComment("]");
+    } else {
+      __ mov(vector, Operand(esp, 1 * kPointerSize));
+    }
+    __ mov(slot, Operand(esp, 2 * kPointerSize));
+  }
 
   Register scratch = value;
   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
@@ -3545,19 +3245,9 @@
   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   __ j(not_equal, &miss);
 
-  __ pop(value);
-  __ push(slot);
-  __ push(vector);
   masm->isolate()->store_stub_cache()->GenerateProbe(masm, receiver, key, slot,
                                                      no_reg);
-  __ pop(vector);
-  __ pop(slot);
-  Label no_pop_miss;
-  __ jmp(&no_pop_miss);
-
   __ bind(&miss);
-  __ pop(value);
-  __ bind(&no_pop_miss);
   StoreIC::GenerateMiss(masm);
 }
 
@@ -3579,17 +3269,13 @@
   Label load_smi_map, compare_map;
   Label transition_call;
   Label pop_and_miss;
-  ExternalReference virtual_register =
-      ExternalReference::virtual_handler_register(masm->isolate());
-  ExternalReference virtual_slot =
-      ExternalReference::virtual_slot_register(masm->isolate());
 
   __ push(receiver);
-  __ push(vector);
+  // Value, vector and slot are passed on the stack, so no need to save/restore
+  // them.
 
   Register receiver_map = receiver;
   Register cached_map = vector;
-  Register value = StoreDescriptor::ValueRegister();
 
   // Receiver might not be a heap object.
   __ JumpIfSmi(receiver, &load_smi_map);
@@ -3600,15 +3286,18 @@
   __ push(key);
   // Current stack layout:
   // - esp[0]    -- key
-  // - esp[4]    -- vector
-  // - esp[8]    -- receiver
-  // - esp[12]   -- value
-  // - esp[16]   -- return address
+  // - esp[4]    -- receiver
+  // - esp[8]    -- return address
+  // - esp[12]   -- vector
+  // - esp[16]   -- slot
+  // - esp[20]   -- value
   //
-  // Required stack layout for handler call:
+  // Required stack layout for handler call (see StoreWithVectorDescriptor):
   // - esp[0]    -- return address
-  // - receiver, key, value, vector, slot in registers.
-  // - handler in virtual register.
+  // - esp[4]    -- vector
+  // - esp[8]    -- slot
+  // - esp[12]   -- value
+  // - receiver, key, handler in registers.
   Register counter = key;
   __ mov(counter, Immediate(Smi::FromInt(0)));
   __ bind(&next_loop);
@@ -3623,43 +3312,57 @@
   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), feedback);
-  __ pop(value);
-  __ jmp(Operand::StaticVariable(virtual_register));
+  __ jmp(feedback);
 
   __ bind(&transition_call);
   // Current stack layout:
   // - esp[0]    -- key
-  // - esp[4]    -- vector
-  // - esp[8]    -- receiver
-  // - esp[12]   -- value
-  // - esp[16]   -- return address
+  // - esp[4]    -- receiver
+  // - esp[8]    -- return address
+  // - esp[12]   -- vector
+  // - esp[16]   -- slot
+  // - esp[20]   -- value
   //
-  // Required stack layout for handler call:
+  // Required stack layout for handler call (see StoreTransitionDescriptor):
   // - esp[0]    -- return address
-  // - receiver, key, value, map, vector in registers.
-  // - handler and slot in virtual registers.
-  __ mov(Operand::StaticVariable(virtual_slot), slot);
+  // - esp[4]    -- vector
+  // - esp[8]    -- slot
+  // - esp[12]   -- value
+  // - receiver, key, map, handler in registers.
   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
                                 FixedArray::kHeaderSize + 2 * kPointerSize));
   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
-  __ mov(Operand::StaticVariable(virtual_register), feedback);
 
   __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
   // The weak cell may have been cleared.
   __ JumpIfSmi(cached_map, &pop_and_miss);
-  DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
-  __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
+  DCHECK(!cached_map.is(StoreTransitionDescriptor::MapRegister()));
+  __ mov(StoreTransitionDescriptor::MapRegister(), cached_map);
 
-  // Pop key into place.
+  // Call store transition handler using StoreTransitionDescriptor calling
+  // convention.
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
-  __ pop(value);
-  __ jmp(Operand::StaticVariable(virtual_register));
+  // Ensure that the transition handler we are going to call has the same
+  // number of stack arguments which means that we don't have to adapt them
+  // before the call.
+  STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+  STATIC_ASSERT(StoreTransitionDescriptor::kStackArgumentsCount == 3);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
+                    StoreWithVectorDescriptor::kValue ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kValue);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
+                    StoreWithVectorDescriptor::kSlot ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kSlot);
+  STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
+                    StoreWithVectorDescriptor::kVector ==
+                StoreTransitionDescriptor::kParameterCount -
+                    StoreTransitionDescriptor::kVector);
+  __ jmp(feedback);
 
   __ bind(&prepare_next);
   __ add(counter, Immediate(Smi::FromInt(3)));
@@ -3669,7 +3372,6 @@
   // We exhausted our array of map handler pairs.
   __ bind(&pop_and_miss);
   __ pop(key);
-  __ pop(vector);
   __ pop(receiver);
   __ jmp(miss);
 
@@ -3686,7 +3388,26 @@
   Register slot = StoreWithVectorDescriptor::SlotRegister();          // edi
   Label miss;
 
-  __ push(value);
+  if (StoreWithVectorDescriptor::kPassLastArgsOnStack) {
+    // Current stack layout:
+    // - esp[8]    -- value
+    // - esp[4]    -- slot
+    // - esp[0]    -- return address
+    STATIC_ASSERT(StoreDescriptor::kStackArgumentsCount == 2);
+    STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
+    if (in_frame) {
+      __ RecordComment("[ StoreDescriptor -> StoreWithVectorDescriptor");
+      // If the vector is not on the stack, then insert the vector beneath
+      // return address in order to prepare for calling handler with
+      // StoreWithVector calling convention.
+      __ push(Operand(esp, 0));
+      __ mov(Operand(esp, 4), StoreWithVectorDescriptor::VectorRegister());
+      __ RecordComment("]");
+    } else {
+      __ mov(vector, Operand(esp, 1 * kPointerSize));
+    }
+    __ mov(slot, Operand(esp, 2 * kPointerSize));
+  }
 
   Register scratch = value;
   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
@@ -3711,8 +3432,6 @@
   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
   __ j(not_equal, &try_poly_name);
 
-  __ pop(value);
-
   Handle<Code> megamorphic_stub =
       KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
@@ -3729,7 +3448,6 @@
                              &miss);
 
   __ bind(&miss);
-  __ pop(value);
   KeyedStoreIC::GenerateMiss(masm);
 }
 
@@ -4358,7 +4076,7 @@
     // Fall back to %AllocateInNewSpace (if not too big).
     Label too_big_for_new_space;
     __ bind(&allocate);
-    __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
+    __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
     __ j(greater, &too_big_for_new_space);
     {
       FrameScope scope(masm, StackFrame::INTERNAL);
@@ -4745,7 +4463,7 @@
   // Fall back to %AllocateInNewSpace (if not too big).
   Label too_big_for_new_space;
   __ bind(&allocate);
-  __ cmp(ecx, Immediate(Page::kMaxRegularHeapObjectSize));
+  __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
   __ j(greater, &too_big_for_new_space);
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
diff --git a/src/x87/interface-descriptors-x87.cc b/src/x87/interface-descriptors-x87.cc
index 4ef88e8..85b26ca 100644
--- a/src/x87/interface-descriptors-x87.cc
+++ b/src/x87/interface-descriptors-x87.cc
@@ -39,19 +39,11 @@
 
 const Register StoreWithVectorDescriptor::VectorRegister() { return ebx; }
 
-const Register VectorStoreTransitionDescriptor::SlotRegister() {
-  return no_reg;
-}
+const Register StoreTransitionDescriptor::SlotRegister() { return no_reg; }
 
+const Register StoreTransitionDescriptor::VectorRegister() { return ebx; }
 
-const Register VectorStoreTransitionDescriptor::VectorRegister() { return ebx; }
-
-
-const Register VectorStoreTransitionDescriptor::MapRegister() { return edi; }
-
-
-const Register StoreTransitionDescriptor::MapRegister() { return ebx; }
-
+const Register StoreTransitionDescriptor::MapRegister() { return edi; }
 
 const Register StoreGlobalViaContextDescriptor::SlotRegister() { return ebx; }
 const Register StoreGlobalViaContextDescriptor::ValueRegister() { return eax; }
@@ -363,7 +355,7 @@
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
 
-void ApiCallbackDescriptorBase::InitializePlatformSpecific(
+void ApiCallbackDescriptor::InitializePlatformSpecific(
     CallInterfaceDescriptorData* data) {
   Register registers[] = {
       edi,  // callee
@@ -398,7 +390,19 @@
       eax,  // argument count (not including receiver)
       edx,  // new target
       edi,  // constructor
-      ebx,  // address of first argument
+      ebx,  // allocation site feedback
+      ecx,  // address of first argument
+  };
+  data->InitializePlatformSpecific(arraysize(registers), registers);
+}
+
+void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
+    CallInterfaceDescriptorData* data) {
+  Register registers[] = {
+      eax,  // argument count (not including receiver)
+      edx,  // target to the call. It is checked to be Array function.
+      ebx,  // allocation site feedback
+      ecx,  // address of first argument
   };
   data->InitializePlatformSpecific(arraysize(registers), registers);
 }
diff --git a/src/x87/macro-assembler-x87.cc b/src/x87/macro-assembler-x87.cc
index 9ffbf9f..dafe985 100644
--- a/src/x87/macro-assembler-x87.cc
+++ b/src/x87/macro-assembler-x87.cc
@@ -167,9 +167,8 @@
 void MacroAssembler::InNewSpace(Register object, Register scratch, Condition cc,
                                 Label* condition_met,
                                 Label::Distance distance) {
-  const int mask =
-      (1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
-  CheckPageFlag(object, scratch, mask, cc, condition_met, distance);
+  CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cc,
+                condition_met, distance);
 }
 
 
@@ -1487,7 +1486,7 @@
                               Label* gc_required,
                               AllocationFlags flags) {
   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
-  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
+  DCHECK(object_size <= kMaxRegularHeapObjectSize);
   DCHECK((flags & ALLOCATION_FOLDED) == 0);
   if (!FLAG_inline_new) {
     if (emit_debug_code()) {
diff --git a/src/x87/macro-assembler-x87.h b/src/x87/macro-assembler-x87.h
index 13988ae..bdd3c03 100644
--- a/src/x87/macro-assembler-x87.h
+++ b/src/x87/macro-assembler-x87.h
@@ -787,6 +787,24 @@
   // may be bigger than 2^16 - 1.  Requires a scratch register.
   void Ret(int bytes_dropped, Register scratch);
 
+  // Emit code that loads |parameter_index|'th parameter from the stack to
+  // the register according to the CallInterfaceDescriptor definition.
+  // |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
+  // below the caller's sp (on x87 it's at least return address).
+  template <class Descriptor>
+  void LoadParameterFromStack(
+      Register reg, typename Descriptor::ParameterIndices parameter_index,
+      int sp_to_ra_offset_in_words = 1) {
+    DCHECK(Descriptor::kPassLastArgsOnStack);
+    DCHECK_LT(parameter_index, Descriptor::kParameterCount);
+    DCHECK_LE(Descriptor::kParameterCount - Descriptor::kStackArgumentsCount,
+              parameter_index);
+    int offset = (Descriptor::kParameterCount - parameter_index - 1 +
+                  sp_to_ra_offset_in_words) *
+                 kPointerSize;
+    mov(reg, Operand(esp, offset));
+  }
+
   // Emit code to discard a non-negative number of pointer-sized elements
   // from the stack, clobbering only the esp register.
   void Drop(int element_count);
diff --git a/src/zone-allocator.h b/src/zone-allocator.h
deleted file mode 100644
index f46151e..0000000
--- a/src/zone-allocator.h
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_ZONE_ALLOCATOR_H_
-#define V8_ZONE_ALLOCATOR_H_
-
-#include <limits>
-
-#include "src/zone.h"
-
-namespace v8 {
-namespace internal {
-
-template<typename T>
-class zone_allocator {
- public:
-  typedef T* pointer;
-  typedef const T* const_pointer;
-  typedef T& reference;
-  typedef const T& const_reference;
-  typedef T value_type;
-  typedef size_t size_type;
-  typedef ptrdiff_t difference_type;
-  template<class O> struct rebind {
-    typedef zone_allocator<O> other;
-  };
-
-  explicit zone_allocator(Zone* zone) throw() : zone_(zone) {}
-  explicit zone_allocator(const zone_allocator& other) throw()
-      : zone_(other.zone_) {}
-  template<typename U> zone_allocator(const zone_allocator<U>& other) throw()
-      : zone_(other.zone_) {}
-  template<typename U> friend class zone_allocator;
-
-  pointer address(reference x) const {return &x;}
-  const_pointer address(const_reference x) const {return &x;}
-
-  pointer allocate(size_type n, const void* hint = 0) {
-    return static_cast<pointer>(zone_->NewArray<value_type>(
-            static_cast<int>(n)));
-  }
-  void deallocate(pointer p, size_type) { /* noop for Zones */ }
-
-  size_type max_size() const throw() {
-    return std::numeric_limits<int>::max() / sizeof(value_type);
-  }
-  void construct(pointer p, const T& val) {
-    new(static_cast<void*>(p)) T(val);
-  }
-  void destroy(pointer p) { p->~T(); }
-
-  bool operator==(zone_allocator const& other) const {
-    return zone_ == other.zone_;
-  }
-  bool operator!=(zone_allocator const& other) const {
-    return zone_ != other.zone_;
-  }
-
-  Zone* zone() { return zone_; }
-
- private:
-  zone_allocator();
-  Zone* zone_;
-};
-
-typedef zone_allocator<bool> ZoneBoolAllocator;
-typedef zone_allocator<int> ZoneIntAllocator;
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_ZONE_ALLOCATOR_H_
diff --git a/src/zone-containers.h b/src/zone-containers.h
deleted file mode 100644
index 79b168c..0000000
--- a/src/zone-containers.h
+++ /dev/null
@@ -1,133 +0,0 @@
-// Copyright 2014 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_ZONE_CONTAINERS_H_
-#define V8_ZONE_CONTAINERS_H_
-
-#include <deque>
-#include <list>
-#include <map>
-#include <queue>
-#include <set>
-#include <stack>
-#include <vector>
-
-#include "src/zone-allocator.h"
-
-namespace v8 {
-namespace internal {
-
-// A wrapper subclass for std::vector to make it easy to construct one
-// that uses a zone allocator.
-template <typename T>
-class ZoneVector : public std::vector<T, zone_allocator<T>> {
- public:
-  // Constructs an empty vector.
-  explicit ZoneVector(Zone* zone)
-      : std::vector<T, zone_allocator<T>>(zone_allocator<T>(zone)) {}
-
-  // Constructs a new vector and fills it with {size} elements, each
-  // constructed via the default constructor.
-  ZoneVector(size_t size, Zone* zone)
-      : std::vector<T, zone_allocator<T>>(size, T(), zone_allocator<T>(zone)) {}
-
-  // Constructs a new vector and fills it with {size} elements, each
-  // having the value {def}.
-  ZoneVector(size_t size, T def, Zone* zone)
-      : std::vector<T, zone_allocator<T>>(size, def, zone_allocator<T>(zone)) {}
-};
-
-
-// A wrapper subclass std::deque to make it easy to construct one
-// that uses a zone allocator.
-template <typename T>
-class ZoneDeque : public std::deque<T, zone_allocator<T>> {
- public:
-  // Constructs an empty deque.
-  explicit ZoneDeque(Zone* zone)
-      : std::deque<T, zone_allocator<T>>(zone_allocator<T>(zone)) {}
-};
-
-
-// A wrapper subclass std::list to make it easy to construct one
-// that uses a zone allocator.
-// TODO(mstarzinger): This should be renamed to ZoneList once we got rid of our
-// own home-grown ZoneList that actually is a ZoneVector.
-template <typename T>
-class ZoneLinkedList : public std::list<T, zone_allocator<T>> {
- public:
-  // Constructs an empty list.
-  explicit ZoneLinkedList(Zone* zone)
-      : std::list<T, zone_allocator<T>>(zone_allocator<T>(zone)) {}
-};
-
-
-// A wrapper subclass std::priority_queue to make it easy to construct one
-// that uses a zone allocator.
-template <typename T, typename Compare = std::less<T>>
-class ZonePriorityQueue
-    : public std::priority_queue<T, ZoneVector<T>, Compare> {
- public:
-  // Constructs an empty list.
-  explicit ZonePriorityQueue(Zone* zone)
-      : std::priority_queue<T, ZoneVector<T>, Compare>(Compare(),
-                                                       ZoneVector<T>(zone)) {}
-};
-
-
-// A wrapper subclass for std::queue to make it easy to construct one
-// that uses a zone allocator.
-template <typename T>
-class ZoneQueue : public std::queue<T, ZoneDeque<T>> {
- public:
-  // Constructs an empty queue.
-  explicit ZoneQueue(Zone* zone)
-      : std::queue<T, ZoneDeque<T>>(ZoneDeque<T>(zone)) {}
-};
-
-
-// A wrapper subclass for std::stack to make it easy to construct one that uses
-// a zone allocator.
-template <typename T>
-class ZoneStack : public std::stack<T, ZoneDeque<T>> {
- public:
-  // Constructs an empty stack.
-  explicit ZoneStack(Zone* zone)
-      : std::stack<T, ZoneDeque<T>>(ZoneDeque<T>(zone)) {}
-};
-
-
-// A wrapper subclass for std::set to make it easy to construct one that uses
-// a zone allocator.
-template <typename K, typename Compare = std::less<K>>
-class ZoneSet : public std::set<K, Compare, zone_allocator<K>> {
- public:
-  // Constructs an empty set.
-  explicit ZoneSet(Zone* zone)
-      : std::set<K, Compare, zone_allocator<K>>(Compare(),
-                                                zone_allocator<K>(zone)) {}
-};
-
-
-// A wrapper subclass for std::map to make it easy to construct one that uses
-// a zone allocator.
-template <typename K, typename V, typename Compare = std::less<K>>
-class ZoneMap
-    : public std::map<K, V, Compare, zone_allocator<std::pair<const K, V>>> {
- public:
-  // Constructs an empty map.
-  explicit ZoneMap(Zone* zone)
-      : std::map<K, V, Compare, zone_allocator<std::pair<const K, V>>>(
-            Compare(), zone_allocator<std::pair<const K, V>>(zone)) {}
-};
-
-
-// Typedefs to shorten commonly used vectors.
-typedef ZoneVector<bool> BoolVector;
-typedef ZoneVector<int> IntVector;
-
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_ZONE_CONTAINERS_H_
diff --git a/src/zone.cc b/src/zone.cc
deleted file mode 100644
index a10b636..0000000
--- a/src/zone.cc
+++ /dev/null
@@ -1,280 +0,0 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/zone.h"
-
-#include <cstring>
-
-#include "src/v8.h"
-
-#ifdef V8_USE_ADDRESS_SANITIZER
-#include <sanitizer/asan_interface.h>
-#endif  // V8_USE_ADDRESS_SANITIZER
-
-namespace v8 {
-namespace internal {
-
-namespace {
-
-#if V8_USE_ADDRESS_SANITIZER
-
-const size_t kASanRedzoneBytes = 24;  // Must be a multiple of 8.
-
-#else
-
-#define ASAN_POISON_MEMORY_REGION(start, size) \
-  do {                                         \
-    USE(start);                                \
-    USE(size);                                 \
-  } while (false)
-
-#define ASAN_UNPOISON_MEMORY_REGION(start, size) \
-  do {                                           \
-    USE(start);                                  \
-    USE(size);                                   \
-  } while (false)
-
-const size_t kASanRedzoneBytes = 0;
-
-#endif  // V8_USE_ADDRESS_SANITIZER
-
-}  // namespace
-
-
-// Segments represent chunks of memory: They have starting address
-// (encoded in the this pointer) and a size in bytes. Segments are
-// chained together forming a LIFO structure with the newest segment
-// available as segment_head_. Segments are allocated using malloc()
-// and de-allocated using free().
-
-class Segment {
- public:
-  void Initialize(Segment* next, size_t size) {
-    next_ = next;
-    size_ = size;
-  }
-
-  Segment* next() const { return next_; }
-  void clear_next() { next_ = nullptr; }
-
-  size_t size() const { return size_; }
-  size_t capacity() const { return size_ - sizeof(Segment); }
-
-  Address start() const { return address(sizeof(Segment)); }
-  Address end() const { return address(size_); }
-
- private:
-  // Computes the address of the nth byte in this segment.
-  Address address(size_t n) const { return Address(this) + n; }
-
-  Segment* next_;
-  size_t size_;
-};
-
-Zone::Zone(base::AccountingAllocator* allocator)
-    : allocation_size_(0),
-      segment_bytes_allocated_(0),
-      position_(0),
-      limit_(0),
-      allocator_(allocator),
-      segment_head_(nullptr) {}
-
-Zone::~Zone() {
-  DeleteAll();
-  DeleteKeptSegment();
-
-  DCHECK(segment_bytes_allocated_ == 0);
-}
-
-
-void* Zone::New(size_t size) {
-  // Round up the requested size to fit the alignment.
-  size = RoundUp(size, kAlignment);
-
-  // If the allocation size is divisible by 8 then we return an 8-byte aligned
-  // address.
-  if (kPointerSize == 4 && kAlignment == 4) {
-    position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4);
-  } else {
-    DCHECK(kAlignment >= kPointerSize);
-  }
-
-  // Check if the requested size is available without expanding.
-  Address result = position_;
-
-  const size_t size_with_redzone = size + kASanRedzoneBytes;
-  const uintptr_t limit = reinterpret_cast<uintptr_t>(limit_);
-  const uintptr_t position = reinterpret_cast<uintptr_t>(position_);
-  // position_ > limit_ can be true after the alignment correction above.
-  if (limit < position || size_with_redzone > limit - position) {
-    result = NewExpand(size_with_redzone);
-  } else {
-    position_ += size_with_redzone;
-  }
-
-  Address redzone_position = result + size;
-  DCHECK(redzone_position + kASanRedzoneBytes == position_);
-  ASAN_POISON_MEMORY_REGION(redzone_position, kASanRedzoneBytes);
-
-  // Check that the result has the proper alignment and return it.
-  DCHECK(IsAddressAligned(result, kAlignment, 0));
-  allocation_size_ += size;
-  return reinterpret_cast<void*>(result);
-}
-
-
-void Zone::DeleteAll() {
-#ifdef DEBUG
-  // Constant byte value used for zapping dead memory in debug mode.
-  static const unsigned char kZapDeadByte = 0xcd;
-#endif
-
-  // Find a segment with a suitable size to keep around.
-  Segment* keep = nullptr;
-  // Traverse the chained list of segments, zapping (in debug mode)
-  // and freeing every segment except the one we wish to keep.
-  for (Segment* current = segment_head_; current;) {
-    Segment* next = current->next();
-    if (!keep && current->size() <= kMaximumKeptSegmentSize) {
-      // Unlink the segment we wish to keep from the list.
-      keep = current;
-      keep->clear_next();
-    } else {
-      size_t size = current->size();
-#ifdef DEBUG
-      // Un-poison first so the zapping doesn't trigger ASan complaints.
-      ASAN_UNPOISON_MEMORY_REGION(current, size);
-      // Zap the entire current segment (including the header).
-      memset(current, kZapDeadByte, size);
-#endif
-      DeleteSegment(current, size);
-    }
-    current = next;
-  }
-
-  // If we have found a segment we want to keep, we must recompute the
-  // variables 'position' and 'limit' to prepare for future allocate
-  // attempts. Otherwise, we must clear the position and limit to
-  // force a new segment to be allocated on demand.
-  if (keep) {
-    Address start = keep->start();
-    position_ = RoundUp(start, kAlignment);
-    limit_ = keep->end();
-    // Un-poison so we can re-use the segment later.
-    ASAN_UNPOISON_MEMORY_REGION(start, keep->capacity());
-#ifdef DEBUG
-    // Zap the contents of the kept segment (but not the header).
-    memset(start, kZapDeadByte, keep->capacity());
-#endif
-  } else {
-    position_ = limit_ = 0;
-  }
-
-  allocation_size_ = 0;
-  // Update the head segment to be the kept segment (if any).
-  segment_head_ = keep;
-}
-
-
-void Zone::DeleteKeptSegment() {
-#ifdef DEBUG
-  // Constant byte value used for zapping dead memory in debug mode.
-  static const unsigned char kZapDeadByte = 0xcd;
-#endif
-
-  DCHECK(segment_head_ == nullptr || segment_head_->next() == nullptr);
-  if (segment_head_ != nullptr) {
-    size_t size = segment_head_->size();
-#ifdef DEBUG
-    // Un-poison first so the zapping doesn't trigger ASan complaints.
-    ASAN_UNPOISON_MEMORY_REGION(segment_head_, size);
-    // Zap the entire kept segment (including the header).
-    memset(segment_head_, kZapDeadByte, size);
-#endif
-    DeleteSegment(segment_head_, size);
-    segment_head_ = nullptr;
-  }
-
-  DCHECK(segment_bytes_allocated_ == 0);
-}
-
-
-// Creates a new segment, sets it size, and pushes it to the front
-// of the segment chain. Returns the new segment.
-Segment* Zone::NewSegment(size_t size) {
-  Segment* result = reinterpret_cast<Segment*>(allocator_->Allocate(size));
-  segment_bytes_allocated_ += size;
-  if (result != nullptr) {
-    result->Initialize(segment_head_, size);
-    segment_head_ = result;
-  }
-  return result;
-}
-
-
-// Deletes the given segment. Does not touch the segment chain.
-void Zone::DeleteSegment(Segment* segment, size_t size) {
-  segment_bytes_allocated_ -= size;
-  allocator_->Free(segment, size);
-}
-
-
-Address Zone::NewExpand(size_t size) {
-  // Make sure the requested size is already properly aligned and that
-  // there isn't enough room in the Zone to satisfy the request.
-  DCHECK_EQ(size, RoundDown(size, kAlignment));
-  DCHECK(limit_ < position_ ||
-         reinterpret_cast<uintptr_t>(limit_) -
-                 reinterpret_cast<uintptr_t>(position_) <
-             size);
-
-  // Compute the new segment size. We use a 'high water mark'
-  // strategy, where we increase the segment size every time we expand
-  // except that we employ a maximum segment size when we delete. This
-  // is to avoid excessive malloc() and free() overhead.
-  Segment* head = segment_head_;
-  const size_t old_size = (head == nullptr) ? 0 : head->size();
-  static const size_t kSegmentOverhead = sizeof(Segment) + kAlignment;
-  const size_t new_size_no_overhead = size + (old_size << 1);
-  size_t new_size = kSegmentOverhead + new_size_no_overhead;
-  const size_t min_new_size = kSegmentOverhead + size;
-  // Guard against integer overflow.
-  if (new_size_no_overhead < size || new_size < kSegmentOverhead) {
-    V8::FatalProcessOutOfMemory("Zone");
-    return nullptr;
-  }
-  if (new_size < kMinimumSegmentSize) {
-    new_size = kMinimumSegmentSize;
-  } else if (new_size > kMaximumSegmentSize) {
-    // Limit the size of new segments to avoid growing the segment size
-    // exponentially, thus putting pressure on contiguous virtual address space.
-    // All the while making sure to allocate a segment large enough to hold the
-    // requested size.
-    new_size = Max(min_new_size, kMaximumSegmentSize);
-  }
-  if (new_size > INT_MAX) {
-    V8::FatalProcessOutOfMemory("Zone");
-    return nullptr;
-  }
-  Segment* segment = NewSegment(new_size);
-  if (segment == nullptr) {
-    V8::FatalProcessOutOfMemory("Zone");
-    return nullptr;
-  }
-
-  // Recompute 'top' and 'limit' based on the new segment.
-  Address result = RoundUp(segment->start(), kAlignment);
-  position_ = result + size;
-  // Check for address overflow.
-  // (Should not happen since the segment is guaranteed to accomodate
-  // size bytes + header and alignment padding)
-  DCHECK(reinterpret_cast<uintptr_t>(position_) >=
-         reinterpret_cast<uintptr_t>(result));
-  limit_ = segment->end();
-  DCHECK(position_ <= limit_);
-  return result;
-}
-
-}  // namespace internal
-}  // namespace v8
diff --git a/src/zone.h b/src/zone.h
deleted file mode 100644
index 29055cb..0000000
--- a/src/zone.h
+++ /dev/null
@@ -1,252 +0,0 @@
-// Copyright 2012 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef V8_ZONE_H_
-#define V8_ZONE_H_
-
-#include <limits>
-
-#include "src/base/accounting-allocator.h"
-#include "src/base/hashmap.h"
-#include "src/base/logging.h"
-#include "src/globals.h"
-#include "src/list.h"
-#include "src/splay-tree.h"
-
-namespace v8 {
-namespace internal {
-
-// Forward declarations.
-class Segment;
-
-
-// The Zone supports very fast allocation of small chunks of
-// memory. The chunks cannot be deallocated individually, but instead
-// the Zone supports deallocating all chunks in one fast
-// operation. The Zone is used to hold temporary data structures like
-// the abstract syntax tree, which is deallocated after compilation.
-//
-// Note: There is no need to initialize the Zone; the first time an
-// allocation is attempted, a segment of memory will be requested
-// through a call to malloc().
-//
-// Note: The implementation is inherently not thread safe. Do not use
-// from multi-threaded code.
-class Zone final {
- public:
-  explicit Zone(base::AccountingAllocator* allocator);
-  ~Zone();
-
-  // Allocate 'size' bytes of memory in the Zone; expands the Zone by
-  // allocating new segments of memory on demand using malloc().
-  void* New(size_t size);
-
-  template <typename T>
-  T* NewArray(size_t length) {
-    DCHECK_LT(length, std::numeric_limits<size_t>::max() / sizeof(T));
-    return static_cast<T*>(New(length * sizeof(T)));
-  }
-
-  // Deletes all objects and free all memory allocated in the Zone. Keeps one
-  // small (size <= kMaximumKeptSegmentSize) segment around if it finds one.
-  void DeleteAll();
-
-  // Deletes the last small segment kept around by DeleteAll(). You
-  // may no longer allocate in the Zone after a call to this method.
-  void DeleteKeptSegment();
-
-  // Returns true if more memory has been allocated in zones than
-  // the limit allows.
-  bool excess_allocation() const {
-    return segment_bytes_allocated_ > kExcessLimit;
-  }
-
-  size_t allocation_size() const { return allocation_size_; }
-
-  base::AccountingAllocator* allocator() const { return allocator_; }
-
- private:
-  // All pointers returned from New() have this alignment.  In addition, if the
-  // object being allocated has a size that is divisible by 8 then its alignment
-  // will be 8. ASan requires 8-byte alignment.
-#ifdef V8_USE_ADDRESS_SANITIZER
-  static const size_t kAlignment = 8;
-  STATIC_ASSERT(kPointerSize <= 8);
-#else
-  static const size_t kAlignment = kPointerSize;
-#endif
-
-  // Never allocate segments smaller than this size in bytes.
-  static const size_t kMinimumSegmentSize = 8 * KB;
-
-  // Never allocate segments larger than this size in bytes.
-  static const size_t kMaximumSegmentSize = 1 * MB;
-
-  // Never keep segments larger than this size in bytes around.
-  static const size_t kMaximumKeptSegmentSize = 64 * KB;
-
-  // Report zone excess when allocation exceeds this limit.
-  static const size_t kExcessLimit = 256 * MB;
-
-  // The number of bytes allocated in this zone so far.
-  size_t allocation_size_;
-
-  // The number of bytes allocated in segments.  Note that this number
-  // includes memory allocated from the OS but not yet allocated from
-  // the zone.
-  size_t segment_bytes_allocated_;
-
-  // Expand the Zone to hold at least 'size' more bytes and allocate
-  // the bytes. Returns the address of the newly allocated chunk of
-  // memory in the Zone. Should only be called if there isn't enough
-  // room in the Zone already.
-  Address NewExpand(size_t size);
-
-  // Creates a new segment, sets it size, and pushes it to the front
-  // of the segment chain. Returns the new segment.
-  inline Segment* NewSegment(size_t size);
-
-  // Deletes the given segment. Does not touch the segment chain.
-  inline void DeleteSegment(Segment* segment, size_t size);
-
-  // The free region in the current (front) segment is represented as
-  // the half-open interval [position, limit). The 'position' variable
-  // is guaranteed to be aligned as dictated by kAlignment.
-  Address position_;
-  Address limit_;
-
-  base::AccountingAllocator* allocator_;
-
-  Segment* segment_head_;
-};
-
-
-// ZoneObject is an abstraction that helps define classes of objects
-// allocated in the Zone. Use it as a base class; see ast.h.
-class ZoneObject {
- public:
-  // Allocate a new ZoneObject of 'size' bytes in the Zone.
-  void* operator new(size_t size, Zone* zone) { return zone->New(size); }
-
-  // Ideally, the delete operator should be private instead of
-  // public, but unfortunately the compiler sometimes synthesizes
-  // (unused) destructors for classes derived from ZoneObject, which
-  // require the operator to be visible. MSVC requires the delete
-  // operator to be public.
-
-  // ZoneObjects should never be deleted individually; use
-  // Zone::DeleteAll() to delete all zone objects in one go.
-  void operator delete(void*, size_t) { UNREACHABLE(); }
-  void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
-};
-
-
-// The ZoneScope is used to automatically call DeleteAll() on a
-// Zone when the ZoneScope is destroyed (i.e. goes out of scope)
-class ZoneScope final {
- public:
-  explicit ZoneScope(Zone* zone) : zone_(zone) { }
-  ~ZoneScope() { zone_->DeleteAll(); }
-
-  Zone* zone() const { return zone_; }
-
- private:
-  Zone* zone_;
-};
-
-
-// The ZoneAllocationPolicy is used to specialize generic data
-// structures to allocate themselves and their elements in the Zone.
-class ZoneAllocationPolicy final {
- public:
-  explicit ZoneAllocationPolicy(Zone* zone) : zone_(zone) { }
-  void* New(size_t size) { return zone()->New(size); }
-  static void Delete(void* pointer) {}
-  Zone* zone() const { return zone_; }
-
- private:
-  Zone* zone_;
-};
-
-
-// ZoneLists are growable lists with constant-time access to the
-// elements. The list itself and all its elements are allocated in the
-// Zone. ZoneLists cannot be deleted individually; you can delete all
-// objects in the Zone by calling Zone::DeleteAll().
-template <typename T>
-class ZoneList final : public List<T, ZoneAllocationPolicy> {
- public:
-  // Construct a new ZoneList with the given capacity; the length is
-  // always zero. The capacity must be non-negative.
-  ZoneList(int capacity, Zone* zone)
-      : List<T, ZoneAllocationPolicy>(capacity, ZoneAllocationPolicy(zone)) { }
-
-  void* operator new(size_t size, Zone* zone) { return zone->New(size); }
-
-  // Construct a new ZoneList by copying the elements of the given ZoneList.
-  ZoneList(const ZoneList<T>& other, Zone* zone)
-      : List<T, ZoneAllocationPolicy>(other.length(),
-                                      ZoneAllocationPolicy(zone)) {
-    AddAll(other, zone);
-  }
-
-  // We add some convenience wrappers so that we can pass in a Zone
-  // instead of a (less convenient) ZoneAllocationPolicy.
-  void Add(const T& element, Zone* zone) {
-    List<T, ZoneAllocationPolicy>::Add(element, ZoneAllocationPolicy(zone));
-  }
-  void AddAll(const List<T, ZoneAllocationPolicy>& other, Zone* zone) {
-    List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
-  }
-  void AddAll(const Vector<T>& other, Zone* zone) {
-    List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
-  }
-  void InsertAt(int index, const T& element, Zone* zone) {
-    List<T, ZoneAllocationPolicy>::InsertAt(index, element,
-                                            ZoneAllocationPolicy(zone));
-  }
-  Vector<T> AddBlock(T value, int count, Zone* zone) {
-    return List<T, ZoneAllocationPolicy>::AddBlock(value, count,
-                                                   ZoneAllocationPolicy(zone));
-  }
-  void Allocate(int length, Zone* zone) {
-    List<T, ZoneAllocationPolicy>::Allocate(length, ZoneAllocationPolicy(zone));
-  }
-  void Initialize(int capacity, Zone* zone) {
-    List<T, ZoneAllocationPolicy>::Initialize(capacity,
-                                              ZoneAllocationPolicy(zone));
-  }
-
-  void operator delete(void* pointer) { UNREACHABLE(); }
-  void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
-};
-
-
-// A zone splay tree.  The config type parameter encapsulates the
-// different configurations of a concrete splay tree (see splay-tree.h).
-// The tree itself and all its elements are allocated in the Zone.
-template <typename Config>
-class ZoneSplayTree final : public SplayTree<Config, ZoneAllocationPolicy> {
- public:
-  explicit ZoneSplayTree(Zone* zone)
-      : SplayTree<Config, ZoneAllocationPolicy>(ZoneAllocationPolicy(zone)) {}
-  ~ZoneSplayTree() {
-    // Reset the root to avoid unneeded iteration over all tree nodes
-    // in the destructor.  For a zone-allocated tree, nodes will be
-    // freed by the Zone.
-    SplayTree<Config, ZoneAllocationPolicy>::ResetRoot();
-  }
-
-  void* operator new(size_t size, Zone* zone) { return zone->New(size); }
-
-  void operator delete(void* pointer) { UNREACHABLE(); }
-  void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
-};
-
-typedef base::TemplateHashMapImpl<ZoneAllocationPolicy> ZoneHashMap;
-
-}  // namespace internal
-}  // namespace v8
-
-#endif  // V8_ZONE_H_
diff --git a/src/zone/accounting-allocator.cc b/src/zone/accounting-allocator.cc
new file mode 100644
index 0000000..663ea32
--- /dev/null
+++ b/src/zone/accounting-allocator.cc
@@ -0,0 +1,45 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/zone/accounting-allocator.h"
+
+#include <cstdlib>
+
+#if V8_LIBC_BIONIC
+#include <malloc.h>  // NOLINT
+#endif
+
+namespace v8 {
+namespace internal {
+
+Segment* AccountingAllocator::AllocateSegment(size_t bytes) {
+  void* memory = malloc(bytes);
+  if (memory) {
+    base::AtomicWord current =
+        base::NoBarrier_AtomicIncrement(&current_memory_usage_, bytes);
+    base::AtomicWord max = base::NoBarrier_Load(&max_memory_usage_);
+    while (current > max) {
+      max = base::NoBarrier_CompareAndSwap(&max_memory_usage_, max, current);
+    }
+  }
+  return reinterpret_cast<Segment*>(memory);
+}
+
+void AccountingAllocator::FreeSegment(Segment* memory) {
+  base::NoBarrier_AtomicIncrement(
+      &current_memory_usage_, -static_cast<base::AtomicWord>(memory->size()));
+  memory->ZapHeader();
+  free(memory);
+}
+
+size_t AccountingAllocator::GetCurrentMemoryUsage() const {
+  return base::NoBarrier_Load(&current_memory_usage_);
+}
+
+size_t AccountingAllocator::GetMaxMemoryUsage() const {
+  return base::NoBarrier_Load(&max_memory_usage_);
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/zone/accounting-allocator.h b/src/zone/accounting-allocator.h
new file mode 100644
index 0000000..31016a5
--- /dev/null
+++ b/src/zone/accounting-allocator.h
@@ -0,0 +1,41 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_ZONE_ACCOUNTING_ALLOCATOR_H_
+#define V8_ZONE_ACCOUNTING_ALLOCATOR_H_
+
+#include "include/v8-platform.h"
+#include "src/base/atomic-utils.h"
+#include "src/base/atomicops.h"
+#include "src/base/macros.h"
+#include "src/base/platform/mutex.h"
+#include "src/base/platform/semaphore.h"
+#include "src/base/platform/time.h"
+#include "src/zone/zone-segment.h"
+
+namespace v8 {
+namespace internal {
+
+class V8_EXPORT_PRIVATE AccountingAllocator {
+ public:
+  AccountingAllocator() = default;
+  virtual ~AccountingAllocator() = default;
+
+  virtual Segment* AllocateSegment(size_t bytes);
+  virtual void FreeSegment(Segment* memory);
+
+  size_t GetCurrentMemoryUsage() const;
+  size_t GetMaxMemoryUsage() const;
+
+ private:
+  base::AtomicWord current_memory_usage_ = 0;
+  base::AtomicWord max_memory_usage_ = 0;
+
+  DISALLOW_COPY_AND_ASSIGN(AccountingAllocator);
+};
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_ZONE_ACCOUNTING_ALLOCATOR_H_
diff --git a/src/zone/zone-allocator.h b/src/zone/zone-allocator.h
new file mode 100644
index 0000000..8370d73
--- /dev/null
+++ b/src/zone/zone-allocator.h
@@ -0,0 +1,74 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_ZONE_ZONE_ALLOCATOR_H_
+#define V8_ZONE_ZONE_ALLOCATOR_H_
+#include <limits>
+
+#include "src/zone/zone.h"
+
+namespace v8 {
+namespace internal {
+
+template <typename T>
+class zone_allocator {
+ public:
+  typedef T* pointer;
+  typedef const T* const_pointer;
+  typedef T& reference;
+  typedef const T& const_reference;
+  typedef T value_type;
+  typedef size_t size_type;
+  typedef ptrdiff_t difference_type;
+  template <class O>
+  struct rebind {
+    typedef zone_allocator<O> other;
+  };
+
+  explicit zone_allocator(Zone* zone) throw() : zone_(zone) {}
+  explicit zone_allocator(const zone_allocator& other) throw()
+      : zone_(other.zone_) {}
+  template <typename U>
+  zone_allocator(const zone_allocator<U>& other) throw() : zone_(other.zone_) {}
+  template <typename U>
+  friend class zone_allocator;
+
+  pointer address(reference x) const { return &x; }
+  const_pointer address(const_reference x) const { return &x; }
+
+  pointer allocate(size_type n, const void* hint = 0) {
+    return static_cast<pointer>(
+        zone_->NewArray<value_type>(static_cast<int>(n)));
+  }
+  void deallocate(pointer p, size_type) { /* noop for Zones */
+  }
+
+  size_type max_size() const throw() {
+    return std::numeric_limits<int>::max() / sizeof(value_type);
+  }
+  void construct(pointer p, const T& val) {
+    new (static_cast<void*>(p)) T(val);
+  }
+  void destroy(pointer p) { p->~T(); }
+
+  bool operator==(zone_allocator const& other) const {
+    return zone_ == other.zone_;
+  }
+  bool operator!=(zone_allocator const& other) const {
+    return zone_ != other.zone_;
+  }
+
+  Zone* zone() { return zone_; }
+
+ private:
+  zone_allocator();
+  Zone* zone_;
+};
+
+typedef zone_allocator<bool> ZoneBoolAllocator;
+typedef zone_allocator<int> ZoneIntAllocator;
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_ZONE_ZONE_ALLOCATOR_H_
diff --git a/src/zone/zone-containers.h b/src/zone/zone-containers.h
new file mode 100644
index 0000000..0aecd98
--- /dev/null
+++ b/src/zone/zone-containers.h
@@ -0,0 +1,138 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_SRC_ZONE_ZONE_CONTAINERS_H_
+#define V8_SRC_ZONE_ZONE_CONTAINERS_H_
+
+#include <deque>
+#include <list>
+#include <map>
+#include <queue>
+#include <set>
+#include <stack>
+#include <vector>
+
+#include "src/zone/zone-allocator.h"
+
+namespace v8 {
+namespace internal {
+
+// A wrapper subclass for std::vector to make it easy to construct one
+// that uses a zone allocator.
+template <typename T>
+class ZoneVector : public std::vector<T, zone_allocator<T>> {
+ public:
+  // Constructs an empty vector.
+  explicit ZoneVector(Zone* zone)
+      : std::vector<T, zone_allocator<T>>(zone_allocator<T>(zone)) {}
+
+  // Constructs a new vector and fills it with {size} elements, each
+  // constructed via the default constructor.
+  ZoneVector(size_t size, Zone* zone)
+      : std::vector<T, zone_allocator<T>>(size, T(), zone_allocator<T>(zone)) {}
+
+  // Constructs a new vector and fills it with {size} elements, each
+  // having the value {def}.
+  ZoneVector(size_t size, T def, Zone* zone)
+      : std::vector<T, zone_allocator<T>>(size, def, zone_allocator<T>(zone)) {}
+};
+
+// A wrapper subclass std::deque to make it easy to construct one
+// that uses a zone allocator.
+template <typename T>
+class ZoneDeque : public std::deque<T, zone_allocator<T>> {
+ public:
+  // Constructs an empty deque.
+  explicit ZoneDeque(Zone* zone)
+      : std::deque<T, zone_allocator<T>>(zone_allocator<T>(zone)) {}
+};
+
+// A wrapper subclass std::list to make it easy to construct one
+// that uses a zone allocator.
+// TODO(mstarzinger): This should be renamed to ZoneList once we got rid of our
+// own home-grown ZoneList that actually is a ZoneVector.
+template <typename T>
+class ZoneLinkedList : public std::list<T, zone_allocator<T>> {
+ public:
+  // Constructs an empty list.
+  explicit ZoneLinkedList(Zone* zone)
+      : std::list<T, zone_allocator<T>>(zone_allocator<T>(zone)) {}
+};
+
+// A wrapper subclass std::priority_queue to make it easy to construct one
+// that uses a zone allocator.
+template <typename T, typename Compare = std::less<T>>
+class ZonePriorityQueue
+    : public std::priority_queue<T, ZoneVector<T>, Compare> {
+ public:
+  // Constructs an empty list.
+  explicit ZonePriorityQueue(Zone* zone)
+      : std::priority_queue<T, ZoneVector<T>, Compare>(Compare(),
+                                                       ZoneVector<T>(zone)) {}
+};
+
+// A wrapper subclass for std::queue to make it easy to construct one
+// that uses a zone allocator.
+template <typename T>
+class ZoneQueue : public std::queue<T, ZoneDeque<T>> {
+ public:
+  // Constructs an empty queue.
+  explicit ZoneQueue(Zone* zone)
+      : std::queue<T, ZoneDeque<T>>(ZoneDeque<T>(zone)) {}
+};
+
+// A wrapper subclass for std::stack to make it easy to construct one that uses
+// a zone allocator.
+template <typename T>
+class ZoneStack : public std::stack<T, ZoneDeque<T>> {
+ public:
+  // Constructs an empty stack.
+  explicit ZoneStack(Zone* zone)
+      : std::stack<T, ZoneDeque<T>>(ZoneDeque<T>(zone)) {}
+};
+
+// A wrapper subclass for std::set to make it easy to construct one that uses
+// a zone allocator.
+template <typename K, typename Compare = std::less<K>>
+class ZoneSet : public std::set<K, Compare, zone_allocator<K>> {
+ public:
+  // Constructs an empty set.
+  explicit ZoneSet(Zone* zone)
+      : std::set<K, Compare, zone_allocator<K>>(Compare(),
+                                                zone_allocator<K>(zone)) {}
+};
+
+// A wrapper subclass for std::map to make it easy to construct one that uses
+// a zone allocator.
+template <typename K, typename V, typename Compare = std::less<K>>
+class ZoneMap
+    : public std::map<K, V, Compare, zone_allocator<std::pair<const K, V>>> {
+ public:
+  // Constructs an empty map.
+  explicit ZoneMap(Zone* zone)
+      : std::map<K, V, Compare, zone_allocator<std::pair<const K, V>>>(
+            Compare(), zone_allocator<std::pair<const K, V>>(zone)) {}
+};
+
+// A wrapper subclass for std::multimap to make it easy to construct one that
+// uses a zone allocator.
+template <typename K, typename V, typename Compare = std::less<K>>
+class ZoneMultimap
+    : public std::multimap<K, V, Compare,
+                           zone_allocator<std::pair<const K, V>>> {
+ public:
+  // Constructs an empty multimap.
+  explicit ZoneMultimap(Zone* zone)
+      : std::multimap<K, V, Compare, zone_allocator<std::pair<const K, V>>>(
+            Compare(), zone_allocator<std::pair<const K, V>>(zone)) {}
+};
+
+// Typedefs to shorten commonly used vectors.
+typedef ZoneVector<bool> BoolVector;
+typedef ZoneVector<int> IntVector;
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_SRC_ZONE_ZONE_CONTAINERS_H_
diff --git a/src/zone/zone-segment.cc b/src/zone/zone-segment.cc
new file mode 100644
index 0000000..f63b530
--- /dev/null
+++ b/src/zone/zone-segment.cc
@@ -0,0 +1,22 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/zone/zone-segment.h"
+
+namespace v8 {
+namespace internal {
+
+void Segment::ZapContents() {
+#ifdef DEBUG
+  memset(start(), kZapDeadByte, capacity());
+#endif
+}
+
+void Segment::ZapHeader() {
+#ifdef DEBUG
+  memset(this, kZapDeadByte, sizeof(Segment));
+#endif
+}
+}  // namespace internal
+}  // namespace v8
diff --git a/src/zone/zone-segment.h b/src/zone/zone-segment.h
new file mode 100644
index 0000000..d37cf56
--- /dev/null
+++ b/src/zone/zone-segment.h
@@ -0,0 +1,61 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_ZONE_ZONE_SEGMENT_H_
+#define V8_ZONE_ZONE_SEGMENT_H_
+
+#include "src/v8.h"
+
+// Segments represent chunks of memory: They have starting address
+// (encoded in the this pointer) and a size in bytes. Segments are
+// chained together forming a LIFO structure with the newest segment
+// available as segment_head_. Segments are allocated using malloc()
+// and de-allocated using free().
+namespace v8 {
+namespace internal {
+
+//  Forward declaration
+class Zone;
+
+class Segment {
+ public:
+  void Initialize(Segment* next, size_t size, Zone* zone) {
+    next_ = next;
+    size_ = size;
+    zone_ = zone;
+  }
+
+  Zone* zone() const { return zone_; }
+  void set_zone(Zone* const zone) { zone_ = zone; }
+
+  Segment* next() const { return next_; }
+  void set_next(Segment* const next) { next_ = next; }
+
+  size_t size() const { return size_; }
+  size_t capacity() const { return size_ - sizeof(Segment); }
+
+  Address start() const { return address(sizeof(Segment)); }
+  Address end() const { return address(size_); }
+
+  // Zap the contents of the segment (but not the header).
+  void ZapContents();
+  // Zaps the header and makes the segment unusable this way.
+  void ZapHeader();
+
+ private:
+#ifdef DEBUG
+  // Constant byte value used for zapping dead memory in debug mode.
+  static const unsigned char kZapDeadByte = 0xcd;
+#endif
+  // Computes the address of the nth byte in this segment.
+  Address address(size_t n) const { return Address(this) + n; }
+
+  Zone* zone_;
+  Segment* next_;
+  size_t size_;
+};
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_ZONE_ZONE_SEGMENT_H_
diff --git a/src/zone/zone.cc b/src/zone/zone.cc
new file mode 100644
index 0000000..4272e17
--- /dev/null
+++ b/src/zone/zone.cc
@@ -0,0 +1,224 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/zone/zone.h"
+
+#include <cstring>
+
+#include "src/v8.h"
+
+#ifdef V8_USE_ADDRESS_SANITIZER
+#include <sanitizer/asan_interface.h>
+#endif  // V8_USE_ADDRESS_SANITIZER
+
+namespace v8 {
+namespace internal {
+
+namespace {
+
+#if V8_USE_ADDRESS_SANITIZER
+
+const size_t kASanRedzoneBytes = 24;  // Must be a multiple of 8.
+
+#else
+
+#define ASAN_POISON_MEMORY_REGION(start, size) \
+  do {                                         \
+    USE(start);                                \
+    USE(size);                                 \
+  } while (false)
+
+#define ASAN_UNPOISON_MEMORY_REGION(start, size) \
+  do {                                           \
+    USE(start);                                  \
+    USE(size);                                   \
+  } while (false)
+
+const size_t kASanRedzoneBytes = 0;
+
+#endif  // V8_USE_ADDRESS_SANITIZER
+
+}  // namespace
+
+Zone::Zone(AccountingAllocator* allocator)
+    : allocation_size_(0),
+      segment_bytes_allocated_(0),
+      position_(0),
+      limit_(0),
+      allocator_(allocator),
+      segment_head_(nullptr) {}
+
+Zone::~Zone() {
+  DeleteAll();
+  DeleteKeptSegment();
+
+  DCHECK(segment_bytes_allocated_ == 0);
+}
+
+void* Zone::New(size_t size) {
+  // Round up the requested size to fit the alignment.
+  size = RoundUp(size, kAlignment);
+
+  // If the allocation size is divisible by 8 then we return an 8-byte aligned
+  // address.
+  if (kPointerSize == 4 && kAlignment == 4) {
+    position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4);
+  } else {
+    DCHECK(kAlignment >= kPointerSize);
+  }
+
+  // Check if the requested size is available without expanding.
+  Address result = position_;
+
+  const size_t size_with_redzone = size + kASanRedzoneBytes;
+  const uintptr_t limit = reinterpret_cast<uintptr_t>(limit_);
+  const uintptr_t position = reinterpret_cast<uintptr_t>(position_);
+  // position_ > limit_ can be true after the alignment correction above.
+  if (limit < position || size_with_redzone > limit - position) {
+    result = NewExpand(size_with_redzone);
+  } else {
+    position_ += size_with_redzone;
+  }
+
+  Address redzone_position = result + size;
+  DCHECK(redzone_position + kASanRedzoneBytes == position_);
+  ASAN_POISON_MEMORY_REGION(redzone_position, kASanRedzoneBytes);
+
+  // Check that the result has the proper alignment and return it.
+  DCHECK(IsAddressAligned(result, kAlignment, 0));
+  allocation_size_ += size;
+  return reinterpret_cast<void*>(result);
+}
+
+void Zone::DeleteAll() {
+  // Find a segment with a suitable size to keep around.
+  Segment* keep = nullptr;
+  // Traverse the chained list of segments, zapping (in debug mode)
+  // and freeing every segment except the one we wish to keep.
+  for (Segment* current = segment_head_; current;) {
+    Segment* next = current->next();
+    if (!keep && current->size() <= kMaximumKeptSegmentSize) {
+      // Unlink the segment we wish to keep from the list.
+      keep = current;
+      keep->set_next(nullptr);
+    } else {
+      size_t size = current->size();
+#ifdef DEBUG
+      // Un-poison first so the zapping doesn't trigger ASan complaints.
+      ASAN_UNPOISON_MEMORY_REGION(current, size);
+#endif
+      current->ZapContents();
+      segment_bytes_allocated_ -= size;
+      allocator_->FreeSegment(current);
+    }
+    current = next;
+  }
+
+  // If we have found a segment we want to keep, we must recompute the
+  // variables 'position' and 'limit' to prepare for future allocate
+  // attempts. Otherwise, we must clear the position and limit to
+  // force a new segment to be allocated on demand.
+  if (keep) {
+    Address start = keep->start();
+    position_ = RoundUp(start, kAlignment);
+    limit_ = keep->end();
+    // Un-poison so we can re-use the segment later.
+    ASAN_UNPOISON_MEMORY_REGION(start, keep->capacity());
+    keep->ZapContents();
+  } else {
+    position_ = limit_ = 0;
+  }
+
+  allocation_size_ = 0;
+  // Update the head segment to be the kept segment (if any).
+  segment_head_ = keep;
+}
+
+void Zone::DeleteKeptSegment() {
+  DCHECK(segment_head_ == nullptr || segment_head_->next() == nullptr);
+  if (segment_head_ != nullptr) {
+    size_t size = segment_head_->size();
+#ifdef DEBUG
+    // Un-poison first so the zapping doesn't trigger ASan complaints.
+    ASAN_UNPOISON_MEMORY_REGION(segment_head_, size);
+#endif
+    segment_head_->ZapContents();
+    segment_bytes_allocated_ -= size;
+    allocator_->FreeSegment(segment_head_);
+    segment_head_ = nullptr;
+  }
+
+  DCHECK(segment_bytes_allocated_ == 0);
+}
+
+// Creates a new segment, sets it size, and pushes it to the front
+// of the segment chain. Returns the new segment.
+Segment* Zone::NewSegment(size_t size) {
+  Segment* result = allocator_->AllocateSegment(size);
+  segment_bytes_allocated_ += size;
+  if (result != nullptr) {
+    result->Initialize(segment_head_, size, this);
+    segment_head_ = result;
+  }
+  return result;
+}
+
+Address Zone::NewExpand(size_t size) {
+  // Make sure the requested size is already properly aligned and that
+  // there isn't enough room in the Zone to satisfy the request.
+  DCHECK_EQ(size, RoundDown(size, kAlignment));
+  DCHECK(limit_ < position_ ||
+         reinterpret_cast<uintptr_t>(limit_) -
+                 reinterpret_cast<uintptr_t>(position_) <
+             size);
+
+  // Compute the new segment size. We use a 'high water mark'
+  // strategy, where we increase the segment size every time we expand
+  // except that we employ a maximum segment size when we delete. This
+  // is to avoid excessive malloc() and free() overhead.
+  Segment* head = segment_head_;
+  const size_t old_size = (head == nullptr) ? 0 : head->size();
+  static const size_t kSegmentOverhead = sizeof(Segment) + kAlignment;
+  const size_t new_size_no_overhead = size + (old_size << 1);
+  size_t new_size = kSegmentOverhead + new_size_no_overhead;
+  const size_t min_new_size = kSegmentOverhead + size;
+  // Guard against integer overflow.
+  if (new_size_no_overhead < size || new_size < kSegmentOverhead) {
+    V8::FatalProcessOutOfMemory("Zone");
+    return nullptr;
+  }
+  if (new_size < kMinimumSegmentSize) {
+    new_size = kMinimumSegmentSize;
+  } else if (new_size > kMaximumSegmentSize) {
+    // Limit the size of new segments to avoid growing the segment size
+    // exponentially, thus putting pressure on contiguous virtual address space.
+    // All the while making sure to allocate a segment large enough to hold the
+    // requested size.
+    new_size = Max(min_new_size, kMaximumSegmentSize);
+  }
+  if (new_size > INT_MAX) {
+    V8::FatalProcessOutOfMemory("Zone");
+    return nullptr;
+  }
+  Segment* segment = NewSegment(new_size);
+  if (segment == nullptr) {
+    V8::FatalProcessOutOfMemory("Zone");
+    return nullptr;
+  }
+
+  // Recompute 'top' and 'limit' based on the new segment.
+  Address result = RoundUp(segment->start(), kAlignment);
+  position_ = result + size;
+  // Check for address overflow.
+  // (Should not happen since the segment is guaranteed to accomodate
+  // size bytes + header and alignment padding)
+  DCHECK(reinterpret_cast<uintptr_t>(position_) >=
+         reinterpret_cast<uintptr_t>(result));
+  limit_ = segment->end();
+  DCHECK(position_ <= limit_);
+  return result;
+}
+
+}  // namespace internal
+}  // namespace v8
diff --git a/src/zone/zone.h b/src/zone/zone.h
new file mode 100644
index 0000000..9ff259e
--- /dev/null
+++ b/src/zone/zone.h
@@ -0,0 +1,243 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_ZONE_ZONE_H_
+#define V8_ZONE_ZONE_H_
+
+#include <limits>
+
+#include "src/base/hashmap.h"
+#include "src/base/logging.h"
+#include "src/globals.h"
+#include "src/list.h"
+#include "src/splay-tree.h"
+#include "src/zone/accounting-allocator.h"
+
+namespace v8 {
+namespace internal {
+
+// The Zone supports very fast allocation of small chunks of
+// memory. The chunks cannot be deallocated individually, but instead
+// the Zone supports deallocating all chunks in one fast
+// operation. The Zone is used to hold temporary data structures like
+// the abstract syntax tree, which is deallocated after compilation.
+//
+// Note: There is no need to initialize the Zone; the first time an
+// allocation is attempted, a segment of memory will be requested
+// through a call to malloc().
+//
+// Note: The implementation is inherently not thread safe. Do not use
+// from multi-threaded code.
+class V8_EXPORT_PRIVATE Zone final {
+ public:
+  explicit Zone(AccountingAllocator* allocator);
+  ~Zone();
+
+  // Allocate 'size' bytes of memory in the Zone; expands the Zone by
+  // allocating new segments of memory on demand using malloc().
+  void* New(size_t size);
+
+  template <typename T>
+  T* NewArray(size_t length) {
+    DCHECK_LT(length, std::numeric_limits<size_t>::max() / sizeof(T));
+    return static_cast<T*>(New(length * sizeof(T)));
+  }
+
+  // Deletes all objects and free all memory allocated in the Zone. Keeps one
+  // small (size <= kMaximumKeptSegmentSize) segment around if it finds one.
+  void DeleteAll();
+
+  // Deletes the last small segment kept around by DeleteAll(). You
+  // may no longer allocate in the Zone after a call to this method.
+  void DeleteKeptSegment();
+
+  // Returns true if more memory has been allocated in zones than
+  // the limit allows.
+  bool excess_allocation() const {
+    return segment_bytes_allocated_ > kExcessLimit;
+  }
+
+  size_t allocation_size() const { return allocation_size_; }
+
+  AccountingAllocator* allocator() const { return allocator_; }
+
+ private:
+// All pointers returned from New() have this alignment.  In addition, if the
+// object being allocated has a size that is divisible by 8 then its alignment
+// will be 8. ASan requires 8-byte alignment.
+#ifdef V8_USE_ADDRESS_SANITIZER
+  static const size_t kAlignment = 8;
+  STATIC_ASSERT(kPointerSize <= 8);
+#else
+  static const size_t kAlignment = kPointerSize;
+#endif
+
+  // Never allocate segments smaller than this size in bytes.
+  static const size_t kMinimumSegmentSize = 8 * KB;
+
+  // Never allocate segments larger than this size in bytes.
+  static const size_t kMaximumSegmentSize = 1 * MB;
+
+  // Never keep segments larger than this size in bytes around.
+  static const size_t kMaximumKeptSegmentSize = 64 * KB;
+
+  // Report zone excess when allocation exceeds this limit.
+  static const size_t kExcessLimit = 256 * MB;
+
+  // The number of bytes allocated in this zone so far.
+  size_t allocation_size_;
+
+  // The number of bytes allocated in segments.  Note that this number
+  // includes memory allocated from the OS but not yet allocated from
+  // the zone.
+  size_t segment_bytes_allocated_;
+
+  // Expand the Zone to hold at least 'size' more bytes and allocate
+  // the bytes. Returns the address of the newly allocated chunk of
+  // memory in the Zone. Should only be called if there isn't enough
+  // room in the Zone already.
+  Address NewExpand(size_t size);
+
+  // Creates a new segment, sets it size, and pushes it to the front
+  // of the segment chain. Returns the new segment.
+  inline Segment* NewSegment(size_t size);
+
+  // The free region in the current (front) segment is represented as
+  // the half-open interval [position, limit). The 'position' variable
+  // is guaranteed to be aligned as dictated by kAlignment.
+  Address position_;
+  Address limit_;
+
+  AccountingAllocator* allocator_;
+
+  Segment* segment_head_;
+};
+
+// ZoneObject is an abstraction that helps define classes of objects
+// allocated in the Zone. Use it as a base class; see ast.h.
+class ZoneObject {
+ public:
+  // Allocate a new ZoneObject of 'size' bytes in the Zone.
+  void* operator new(size_t size, Zone* zone) { return zone->New(size); }
+
+  // Ideally, the delete operator should be private instead of
+  // public, but unfortunately the compiler sometimes synthesizes
+  // (unused) destructors for classes derived from ZoneObject, which
+  // require the operator to be visible. MSVC requires the delete
+  // operator to be public.
+
+  // ZoneObjects should never be deleted individually; use
+  // Zone::DeleteAll() to delete all zone objects in one go.
+  void operator delete(void*, size_t) { UNREACHABLE(); }
+  void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
+};
+
+// The ZoneScope is used to automatically call DeleteAll() on a
+// Zone when the ZoneScope is destroyed (i.e. goes out of scope)
+class ZoneScope final {
+ public:
+  explicit ZoneScope(Zone* zone) : zone_(zone) {}
+  ~ZoneScope() { zone_->DeleteAll(); }
+
+  Zone* zone() const { return zone_; }
+
+ private:
+  Zone* zone_;
+};
+
+// The ZoneAllocationPolicy is used to specialize generic data
+// structures to allocate themselves and their elements in the Zone.
+class ZoneAllocationPolicy final {
+ public:
+  explicit ZoneAllocationPolicy(Zone* zone) : zone_(zone) {}
+  void* New(size_t size) { return zone()->New(size); }
+  static void Delete(void* pointer) {}
+  Zone* zone() const { return zone_; }
+
+ private:
+  Zone* zone_;
+};
+
+// ZoneLists are growable lists with constant-time access to the
+// elements. The list itself and all its elements are allocated in the
+// Zone. ZoneLists cannot be deleted individually; you can delete all
+// objects in the Zone by calling Zone::DeleteAll().
+template <typename T>
+class ZoneList final : public List<T, ZoneAllocationPolicy> {
+ public:
+  // Construct a new ZoneList with the given capacity; the length is
+  // always zero. The capacity must be non-negative.
+  ZoneList(int capacity, Zone* zone)
+      : List<T, ZoneAllocationPolicy>(capacity, ZoneAllocationPolicy(zone)) {}
+
+  void* operator new(size_t size, Zone* zone) { return zone->New(size); }
+
+  // Construct a new ZoneList by copying the elements of the given ZoneList.
+  ZoneList(const ZoneList<T>& other, Zone* zone)
+      : List<T, ZoneAllocationPolicy>(other.length(),
+                                      ZoneAllocationPolicy(zone)) {
+    AddAll(other, zone);
+  }
+
+  // We add some convenience wrappers so that we can pass in a Zone
+  // instead of a (less convenient) ZoneAllocationPolicy.
+  void Add(const T& element, Zone* zone) {
+    List<T, ZoneAllocationPolicy>::Add(element, ZoneAllocationPolicy(zone));
+  }
+  void AddAll(const List<T, ZoneAllocationPolicy>& other, Zone* zone) {
+    List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
+  }
+  void AddAll(const Vector<T>& other, Zone* zone) {
+    List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
+  }
+  void InsertAt(int index, const T& element, Zone* zone) {
+    List<T, ZoneAllocationPolicy>::InsertAt(index, element,
+                                            ZoneAllocationPolicy(zone));
+  }
+  Vector<T> AddBlock(T value, int count, Zone* zone) {
+    return List<T, ZoneAllocationPolicy>::AddBlock(value, count,
+                                                   ZoneAllocationPolicy(zone));
+  }
+  void Allocate(int length, Zone* zone) {
+    List<T, ZoneAllocationPolicy>::Allocate(length, ZoneAllocationPolicy(zone));
+  }
+  void Initialize(int capacity, Zone* zone) {
+    List<T, ZoneAllocationPolicy>::Initialize(capacity,
+                                              ZoneAllocationPolicy(zone));
+  }
+
+  void operator delete(void* pointer) { UNREACHABLE(); }
+  void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
+};
+
+// A zone splay tree.  The config type parameter encapsulates the
+// different configurations of a concrete splay tree (see splay-tree.h).
+// The tree itself and all its elements are allocated in the Zone.
+template <typename Config>
+class ZoneSplayTree final : public SplayTree<Config, ZoneAllocationPolicy> {
+ public:
+  explicit ZoneSplayTree(Zone* zone)
+      : SplayTree<Config, ZoneAllocationPolicy>(ZoneAllocationPolicy(zone)) {}
+  ~ZoneSplayTree() {
+    // Reset the root to avoid unneeded iteration over all tree nodes
+    // in the destructor.  For a zone-allocated tree, nodes will be
+    // freed by the Zone.
+    SplayTree<Config, ZoneAllocationPolicy>::ResetRoot();
+  }
+
+  void* operator new(size_t size, Zone* zone) { return zone->New(size); }
+
+  void operator delete(void* pointer) { UNREACHABLE(); }
+  void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
+};
+
+typedef base::PointerTemplateHashMapImpl<ZoneAllocationPolicy> ZoneHashMap;
+
+typedef base::CustomMatcherTemplateHashMapImpl<ZoneAllocationPolicy>
+    CustomMatcherZoneHashMap;
+
+}  // namespace internal
+}  // namespace v8
+
+#endif  // V8_ZONE_ZONE_H_
diff --git a/tools/callstats.html b/tools/callstats.html
index 76cc8c6..cb2e0be 100644
--- a/tools/callstats.html
+++ b/tools/callstats.html
@@ -247,8 +247,12 @@
     var selectedPage;
     var baselineVersion;
     var selectedEntry;
+    
+    // Marker to programatically replace the defaultData.
+    var defaultData = /*default-data-start*/undefined/*default-data-end*/;
 
     function initialize() {
+      // Initialize the stats table and toggle lists.
       var original = $("column");
       var view = document.createElement('div');
       view.id = 'view';
@@ -303,6 +307,7 @@
       });
       initializeToggleList(versions.versions, $('versionSelector'));
       initializeToggleList(pages.values(), $('pageSelector'));
+      initializeToggleList(Group.groups.values(), $('groupSelector'));
       initializeToggleContentVisibility();
     }
 
@@ -317,7 +322,7 @@
         checkbox.type = 'checkbox';
         checkbox.checked = item.enabled;
         checkbox.item = item;
-        checkbox.addEventListener('click', handleToggleVersionEnable);
+        checkbox.addEventListener('click', handleToggleVersionOrPageEnable);
         li.appendChild(checkbox);
         li.appendChild(document.createTextNode(item.name));
         list.appendChild(li);
@@ -360,9 +365,8 @@
       });
       if (changeSelectedEntry) {
         showEntryDetail(selectedPage.getEntry(selectedEntry));
-      } else {
-        showImpactList(selectedPage);
       }
+      showImpactList(selectedPage);
     }
 
     function showPageInColumn(page, columnIndex) {
@@ -536,7 +540,7 @@
       table = $('detailView').querySelector('.versionDetailTable');
       tbody = document.createElement('tbody');
       if (entry !== undefined) {
-        $('detailView').querySelector('.versionDetail h3 span').innerHTML =
+        $('detailView').querySelector('.versionDetail h3 span').textContent =
           entry.name + ' in ' + entry.page.name;
         entries = versions.getPageVersions(entry.page).map(
           (page) => {
@@ -571,7 +575,7 @@
       }
       var version = entry.page.version;
       var showDiff = version !== baselineVersion;
-      $('detailView').querySelector('.pageDetail h3 span').innerHTML =
+      $('detailView').querySelector('.pageDetail h3 span').textContent =
         version.name;
       entries = version.pages.map((page) => {
           if (!page.enabled) return;
@@ -597,24 +601,24 @@
       });
       // show the total for all pages
       var tds = table.querySelectorAll('tfoot td');
-      tds[1].innerHTML = ms(entry.getTimeImpact(), showDiff);
+      tds[1].textContent = ms(entry.getTimeImpact(), showDiff);
       // Only show the percentage total if we are in diff mode:
-      tds[2].innerHTML = percent(entry.getTimePercentImpact(), showDiff);
-      tds[3].innerHTML = '';
-      tds[4].innerHTML = count(entry.getCountImpact(), showDiff);
+      tds[2].textContent = percent(entry.getTimePercentImpact(), showDiff);
+      tds[3].textContent = '';
+      tds[4].textContent = count(entry.getCountImpact(), showDiff);
       table.replaceChild(tbody, table.querySelector('tbody'));
     }
 
     function showImpactList(page) {
       var impactView = $('detailView').querySelector('.impactView');
-      impactView.querySelector('h3 span').innerHTML = page.version.name;
+      impactView.querySelector('h3 span').textContent = page.version.name;
 
       var table = impactView.querySelector('table');
       var tbody = document.createElement('tbody');
       var version = page.version;
       var entries = version.allEntries();
       if (selectedEntry !== undefined && selectedEntry.isGroup) {
-        impactView.querySelector('h3 span').innerHTML += " " + selectedEntry.name;
+        impactView.querySelector('h3 span').textContent += " " + selectedEntry.name;
         entries = entries.filter((entry) => {
           return entry.name == selectedEntry.name ||
             (entry.parent && entry.parent.name == selectedEntry.name)
@@ -662,7 +666,7 @@
       if (selectedGroup == undefined) {
         selectedGroup = groups[0];
       } else {
-        groups = groups.filter(each => each.name != selectedGroup.name);
+        groups = groups.filter(each => each.enabled && each.name != selectedGroup.name);
         groups.unshift(selectedGroup);
       }
       showPageGraph(groups, page);
@@ -806,7 +810,7 @@
         colors: groups.map(each => each.color)
       };
       var parentNode = $(id);
-      parentNode.querySelector('h2>span, h3>span').innerHTML = title;
+      parentNode.querySelector('h2>span, h3>span').textContent = title;
       var graphNode = parentNode.querySelector('.content');
 
       var chart = graphNode.chart;
@@ -856,8 +860,8 @@
 
     function showPopover(entry) {
       var popover = $('popover');
-      popover.querySelector('td.name').innerHTML = entry.name;
-      popover.querySelector('td.page').innerHTML = entry.page.name;
+      popover.querySelector('td.name').textContent = entry.name;
+      popover.querySelector('td.page').textContent = entry.page.name;
       setPopoverDetail(popover, entry, '');
       popover.querySelector('table').className = "";
       if (baselineVersion !== undefined) {
@@ -870,32 +874,32 @@
     function setPopoverDetail(popover, entry, prefix) {
       var node = (name) => popover.querySelector(prefix + name);
       if (entry == undefined) {
-        node('.version').innerHTML = baselineVersion.name;
-        node('.time').innerHTML = '-';
-        node('.timeVariance').innerHTML = '-';
-        node('.percent').innerHTML = '-';
-        node('.percentPerEntry').innerHTML = '-';
-        node('.percentVariance').innerHTML  = '-';
-        node('.count').innerHTML =  '-';
-        node('.countVariance').innerHTML = '-';
-        node('.timeImpact').innerHTML = '-';
-        node('.timePercentImpact').innerHTML = '-';
+        node('.version').textContent = baselineVersion.name;
+        node('.time').textContent = '-';
+        node('.timeVariance').textContent = '-';
+        node('.percent').textContent = '-';
+        node('.percentPerEntry').textContent = '-';
+        node('.percentVariance').textContent  = '-';
+        node('.count').textContent =  '-';
+        node('.countVariance').textContent = '-';
+        node('.timeImpact').textContent = '-';
+        node('.timePercentImpact').textContent = '-';
       } else {
-        node('.version').innerHTML = entry.page.version.name;
-        node('.time').innerHTML = ms(entry._time, false);
-        node('.timeVariance').innerHTML
+        node('.version').textContent = entry.page.version.name;
+        node('.time').textContent = ms(entry._time, false);
+        node('.timeVariance').textContent
             = percent(entry.timeVariancePercent, false);
-        node('.percent').innerHTML = percent(entry.timePercent, false);
-        node('.percentPerEntry').innerHTML
+        node('.percent').textContent = percent(entry.timePercent, false);
+        node('.percentPerEntry').textContent
             = percent(entry.timePercentPerEntry, false);
-        node('.percentVariance').innerHTML 
+        node('.percentVariance').textContent 
             = percent(entry.timePercentVariancePercent, false);
-        node('.count').innerHTML = count(entry._count, false);
-        node('.countVariance').innerHTML
+        node('.count').textContent = count(entry._count, false);
+        node('.countVariance').textContent
             = percent(entry.timeVariancePercent, false);
-        node('.timeImpact').innerHTML
+        node('.timeImpact').textContent
             = ms(entry.getTimeImpact(false), false);
-        node('.timePercentImpact').innerHTML
+        node('.timePercentImpact').textContent
             = percent(entry.getTimeImpactVariancePercent(false), false);
       }
     }
@@ -927,7 +931,7 @@
     function addCodeSearchButton(entry, node) {
       if (entry.isGroup) return;
       var button = document.createElement("div");
-      button.innerHTML = '?'
+      button.textContent = '?'
       button.className = "codeSearch"
       button.addEventListener('click', handleCodeSearch);
       node.appendChild(button);
@@ -936,7 +940,11 @@
 
     function td(tr, content, className) {
       var td = document.createElement("td");
-      td.innerHTML = content;
+      if (content[0] == '<') {
+        td.innerHTML = content;
+      } else {
+        td.textContent = content;
+      }
       td.className = className
       tr.appendChild(td);
       return td
@@ -1002,7 +1010,25 @@
     // =========================================================================
     // EventHandlers
     function handleBodyLoad() {
-      $('uploadInput').focus(); 
+      $('uploadInput').focus();
+      if (defaultData) {
+        handleLoadJSON(defaultData);
+      } else if (window.location.protocol !== 'file:') {
+        tryLoadDefaultResults();
+      }
+    }
+
+    function tryLoadDefaultResults() {
+     // Try to load a results.json file adjacent to this day.
+     var xhr = new XMLHttpRequest();
+     // The markers on the following line can be used to replace the url easily
+     // with scripts.
+     xhr.open('GET', /*results-url-start*/'results.json'/*results-url-end*/, true);
+     xhr.onreadystatechange = function(e) {
+       if(this.readyState !== XMLHttpRequest.DONE || this.status !== 200) return;
+       handleLoadText(this.responseText);
+     };
+     xhr.send();
     }
 
     function handleLoadFile() {
@@ -1011,14 +1037,23 @@
       var reader = new FileReader();
 
       reader.onload = function(evt) {
-        pages = new Pages();
-        versions = Versions.fromJSON(JSON.parse(this.result));
-        initialize()
-        showPage(versions.versions[0].pages[0]);
+        handleLoadText(this.result);
       }
       reader.readAsText(file);
     }
 
+    function handleLoadText(text) {
+      handleLoadJSON(JSON.parse(text));
+    }
+
+    function handleLoadJSON(json) {
+      pages = new Pages();
+      versions = Versions.fromJSON(json);
+      initialize()
+      showPage(versions.versions[0].pages[0]);
+      selectEntry(selectedPage.total);
+    }
+
     function handleToggleGroup(event) {
       var group = event.target.parentNode.parentNode.entry;
       toggleGroup(selectedPage.get(group.name));
@@ -1097,7 +1132,7 @@
       showPopover(entry);
     }
 
-    function handleToggleVersionEnable(event) {
+    function handleToggleVersionOrPageEnable(event) {
       var item = this.item ;
       if (item  === undefined) return;
       item .enabled = this.checked;
@@ -1106,6 +1141,9 @@
       if (page === undefined || !page.version.enabled) {
         page = versions.getEnabledPage(page.name);
       }
+      if (!page.enabled) {
+        page = page.getNextPage();
+      }
       showPage(page);
     }
 
@@ -1190,13 +1228,17 @@
         }
         return -1;
       }
+      getNextPage(page) {
+        if (this.length == 0) return undefined;
+        return this.pages[(this.indexOf(page.name) + 1) % this.length];
+      }
       get(name) {
         var index = this.indexOf(name);
         if (0 <= index) return this.pages[index];
         return undefined
       }
       get length() {
-        return this.versions.length
+        return this.pages.length
       }
       getEntry(entry) {
         if (entry === undefined) return undefined;
@@ -1315,21 +1357,20 @@
       constructor(version, page) {
         this.page = page;
         this.page.add(this);
-        this.total = new GroupedEntry('Total', /.*Total.*/, '#BBB');
+        this.total = Group.groups.get('total').entry();
         this.total.isTotal = true;
-        this.unclassified = new UnclassifiedEntry(this, "#000")
+        this.unclassified = new UnclassifiedEntry(this)
         this.groups = [
           this.total,
-          new GroupedEntry('IC', /.*IC.*/, "#3366CC"),
-          new GroupedEntry('Optimize',
-            /StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*/, "#DC3912"),
-          new GroupedEntry('Compile', /.*Compile.*/, "#FFAA00"),
-          new GroupedEntry('Parse', /.*Parse.*/, "#FF6600"),
-          new GroupedEntry('Callback', /.*Callback$/, "#109618"),
-          new GroupedEntry('API', /.*API.*/, "#990099"),
-          new GroupedEntry('GC', /GC|AllocateInTargetSpace/, "#0099C6"),
-          new GroupedEntry('JavaScript', /JS_Execution/, "#DD4477"),
-          new GroupedEntry('Runtime', /.*/, "#88BB00"),
+          Group.groups.get('ic').entry(),
+          Group.groups.get('optimize').entry(),
+          Group.groups.get('compile').entry(),
+          Group.groups.get('parse').entry(),
+          Group.groups.get('callback').entry(),
+          Group.groups.get('api').entry(),
+          Group.groups.get('gc').entry(),
+          Group.groups.get('javascript').entry(),
+          Group.groups.get('runtime').entry(),
           this.unclassified
         ];
         this.entryDict = new Map();
@@ -1400,6 +1441,9 @@
         });
         return sum;
       }
+      getNextPage() {
+        return this.version.getNextPage(this);
+      }
     }
     PageVersion.fromJSON = function(version, name, data) {
       var page = new PageVersion(version, pages.get(name));
@@ -1496,16 +1540,43 @@
       return new Entry(position, ...data);
     }
 
-
-    class GroupedEntry extends Entry {
+    class Group { 
       constructor(name, regexp, color) {
-        super(0, 'Group-' + name, 0, 0, 0, 0, 0, 0);
+        this.name = name;
         this.regexp = regexp;
         this.color = color;
+        this.enabled = true;
+      }
+      entry() { return new GroupedEntry(this) };
+    }
+    Group.groups = new Map();
+    Group.add = function(name, group) {
+      this.groups.set(name, group);
+    }
+    Group.add('total', new Group('Total', /.*Total.*/, '#BBB'));
+    Group.add('ic', new Group('IC', /.*IC.*/, "#3366CC"));
+    Group.add('optimize', new Group('Optimize',
+        /StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*/, "#DC3912"));
+    Group.add('compile', new Group('Compile', /.*Compile.*/, "#FFAA00"));
+    Group.add('parse', new Group('Parse', /.*Parse.*/, "#FF6600"));
+    Group.add('callback', new Group('Callback', /.*Callback$/, "#109618"));
+    Group.add('api', new Group('API', /.*API.*/, "#990099"));
+    Group.add('gc', new Group('GC', /GC|AllocateInTargetSpace/, "#0099C6"));
+    Group.add('javascript', new Group('JavaScript', /JS_Execution/, "#DD4477"));
+    Group.add('runtime', new Group('Runtime', /.*/, "#88BB00"));
+    Group.add('unclassified', new Group('Unclassified', /.*/, "#000"));
+
+    class GroupedEntry extends Entry {
+      constructor(group) {
+        super(0, 'Group-' + group.name, 0, 0, 0, 0, 0, 0);
+        this.group = group;
         this.entries = [];
       }
+      get regexp() { return this.group.regexp }
+      get color() { return this.group.color }
+      get enabled() { return this.group.enabled }
       add(entry) {
-        if (!entry.name.match(this.regexp)) return false;
+        if (!this.regexp.test(entry.name)) return false;
         this._time += entry.time;
         this._count += entry.count;
         // TODO: sum up variance
@@ -1567,8 +1638,8 @@
     }
 
     class UnclassifiedEntry extends GroupedEntry {
-      constructor(page, color) {
-        super('Unclassified', undefined, color);
+      constructor(page) {
+        super(Group.groups.get('unclassified'));
         this.page = page;
         this._time = undefined;
         this._count = undefined;
@@ -1630,14 +1701,21 @@
     </div>
     
     <div id="versionSelector" class="inline toggleContentVisibility">
-      <h2>Version Selector</h2>
+      <h2>Versions</h2>
       <div class="content hidden">
         <ul></ul>
       </div>
     </div>
     
     <div id="pageSelector" class="inline toggleContentVisibility">
-      <h2>Page Selector</h2>
+      <h2>Pages</h2>
+      <div class="content hidden">
+        <ul></ul>
+      </div>
+    </div>
+
+    <div id="groupSelector" class="inline toggleContentVisibility">
+      <h2>Groups</h2>
       <div class="content hidden">
         <ul></ul>
       </div>
diff --git a/tools/callstats.py b/tools/callstats.py
index 6339392..262f9a6 100755
--- a/tools/callstats.py
+++ b/tools/callstats.py
@@ -46,7 +46,7 @@
   print " ".join(map(fix_for_printing, cmd_args))
 
 
-def start_replay_server(args, sites):
+def start_replay_server(args, sites, discard_output=True):
   with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js',
                                    mode='wt', delete=False) as f:
     injection = f.name
@@ -65,8 +65,11 @@
   ]
   print "=" * 80
   print_command(cmd_args)
-  with open(os.devnull, 'w') as null:
-    server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
+  if discard_output:
+    with open(os.devnull, 'w') as null:
+      server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
+  else:
+      server = subprocess.Popen(cmd_args)
   print "RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid)
   print "=" * 80
   return {'process': server, 'injection': injection}
@@ -123,6 +126,31 @@
   onLoad(window.location.href);
 })();"""
 
+def get_chrome_flags(js_flags, user_data_dir):
+  return [
+      "--no-default-browser-check",
+      "--no-sandbox",
+      "--disable-translate",
+      "--enable-benchmarking",
+      "--js-flags={}".format(js_flags),
+      "--no-first-run",
+      "--user-data-dir={}".format(user_data_dir),
+    ]
+
+def get_chrome_replay_flags(args):
+  http_port = 4080 + args.port_offset
+  https_port = 4443 + args.port_offset
+  return [
+      "--host-resolver-rules=MAP *:80 localhost:%s, "  \
+                            "MAP *:443 localhost:%s, " \
+                            "EXCLUDE localhost" % (
+                                http_port, https_port),
+      "--ignore-certificate-errors",
+      "--disable-seccomp-sandbox",
+      "--disable-web-security",
+      "--reduce-security-for-testing",
+      "--allow-insecure-localhost",
+    ]
 
 def run_site(site, domain, args, timeout=None):
   print "="*80
@@ -149,32 +177,11 @@
         js_flags = "--runtime-call-stats"
         if args.replay_wpr: js_flags += " --allow-natives-syntax"
         if args.js_flags: js_flags += " " + args.js_flags
-        chrome_flags = [
-            "--no-default-browser-check",
-            "--no-sandbox",
-            "--disable-translate",
-            "--js-flags={}".format(js_flags),
-            "--no-first-run",
-            "--user-data-dir={}".format(user_data_dir),
-        ]
+        chrome_flags = get_chrome_flags(js_flags, user_data_dir)
         if args.replay_wpr:
-          http_port = 4080 + args.port_offset
-          https_port = 4443 + args.port_offset
-          chrome_flags += [
-              "--host-resolver-rules=MAP *:80 localhost:%s, "  \
-                                    "MAP *:443 localhost:%s, " \
-                                    "EXCLUDE localhost" % (
-                                        http_port, https_port),
-              "--ignore-certificate-errors",
-              "--disable-seccomp-sandbox",
-              "--disable-web-security",
-              "--reduce-security-for-testing",
-              "--allow-insecure-localhost",
-          ]
+          chrome_flags += get_chrome_replay_flags(args)
         else:
-          chrome_flags += [
-              "--single-process",
-          ]
+          chrome_flags += [ "--single-process", ]
         if args.chrome_flags:
           chrome_flags += args.chrome_flags.split()
         cmd_args = [
@@ -234,12 +241,15 @@
     sys.exit(1)
 
 
-def do_run(args):
+def read_sites(args):
   # Determine the websites to benchmark.
   if args.sites_file:
-    sites = read_sites_file(args)
-  else:
-    sites = [{'url': site, 'timeout': args.timeout} for site in args.sites]
+    return read_sites_file(args)
+  return [{'url': site, 'timeout': args.timeout} for site in args.sites]
+
+def do_run(args):
+  sites = read_sites(args)
+  replay_server = start_replay_server(args, sites) if args.replay_wpr else None
   # Disambiguate domains, if needed.
   L = []
   domains = {}
@@ -266,18 +276,37 @@
       domains[domain] += 1
       entry[2] = domains[domain]
     L.append(entry)
-  replay_server = start_replay_server(args, sites) if args.replay_wpr else None
   try:
     # Run them.
     for site, domain, count, timeout in L:
       if count is not None: domain = "{}%{}".format(domain, count)
-      print site, domain, timeout
+      print(site, domain, timeout)
       run_site(site, domain, args, timeout)
   finally:
     if replay_server:
       stop_replay_server(replay_server)
 
 
+def do_run_replay_server(args):
+  sites = read_sites(args)
+  print("- " * 40)
+  print("Available URLs:")
+  for site in sites:
+    print("    "+site['url'])
+  print("- " * 40)
+  print("Launch chromium with the following commands for debugging:")
+  flags = get_chrome_flags("'--runtime-call-stats --allow-natives-syntax'",
+                           "/var/tmp/`date +%s`")
+  flags += get_chrome_replay_flags(args)
+  print("    $CHROMIUM_DIR/out/Release/chomium " + (" ".join(flags)) + " <URL>")
+  print("- " * 40)
+  replay_server = start_replay_server(args, sites, discard_output=False)
+  try:
+    replay_server['process'].wait()
+  finally:
+   stop_replay_server(replay_server)
+
+
 # Calculate statistics.
 
 def statistics(data):
@@ -355,8 +384,15 @@
           entries[group_name]['time'] += time
           entries[group_name]['count'] += count
           break
+    # Calculate the V8-Total (all groups except Callback)
+    total_v8 = { 'time': 0, 'count': 0 }
+    for group_name, regexp in groups:
+      if group_name == 'Group-Callback': continue
+      total_v8['time'] += entries[group_name]['time']
+      total_v8['count'] += entries[group_name]['count']
+    entries['Group-Total-V8'] = total_v8
     # Append the sums as single entries to domain.
-    for key in entries :
+    for key in entries:
       if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
       domain[key]['time_list'].append(entries[key]['time'])
       domain[key]['count_list'].append(entries[key]['count'])
@@ -527,7 +563,7 @@
   subparsers = {}
   # Command: run.
   subparsers["run"] = subparser_adder.add_parser(
-      "run", help="run --help")
+      "run", help="Replay websites and collect runtime stats data.")
   subparsers["run"].set_defaults(
       func=do_run, error=subparsers["run"].error)
   subparsers["run"].add_argument(
@@ -537,37 +573,6 @@
       "--js-flags", type=str, default="",
       help="specify additional V8 flags")
   subparsers["run"].add_argument(
-      "--domain", type=str, default="",
-      help="specify the output file domain name")
-  subparsers["run"].add_argument(
-      "--no-url", dest="print_url", action="store_false", default=True,
-      help="do not include url in statistics file")
-  subparsers["run"].add_argument(
-      "-n", "--repeat", type=int, metavar="<num>",
-      help="specify iterations for each website (default: once)")
-  subparsers["run"].add_argument(
-      "-k", "--refresh", type=int, metavar="<num>", default=0,
-      help="specify refreshes for each iteration (default: 0)")
-  subparsers["run"].add_argument(
-      "--replay-wpr", type=str, metavar="<path>",
-      help="use the specified web page replay (.wpr) archive")
-  subparsers["run"].add_argument(
-      "--replay-bin", type=str, metavar="<path>",
-      help="specify the replay.py script typically located in " \
-           "$CHROMIUM/src/third_party/webpagereplay/replay.py")
-  subparsers["run"].add_argument(
-      "-r", "--retries", type=int, metavar="<num>",
-      help="specify retries if website is down (default: forever)")
-  subparsers["run"].add_argument(
-      "-f", "--sites-file", type=str, metavar="<path>",
-      help="specify file containing benchmark websites")
-  subparsers["run"].add_argument(
-      "-t", "--timeout", type=int, metavar="<seconds>", default=60,
-      help="specify seconds before chrome is killed")
-  subparsers["run"].add_argument(
-      "-p", "--port-offset", type=int, metavar="<offset>", default=0,
-      help="specify the offset for the replay server's default ports")
-  subparsers["run"].add_argument(
       "-u", "--user-data-dir", type=str, metavar="<path>",
       help="specify user data dir (default is temporary)")
   subparsers["run"].add_argument(
@@ -575,14 +580,56 @@
       default="/usr/bin/google-chrome",
       help="specify chrome executable to use")
   subparsers["run"].add_argument(
-      "-l", "--log-stderr", type=str, metavar="<path>",
-      help="specify where chrome's stderr should go (default: /dev/null)")
+      "-r", "--retries", type=int, metavar="<num>",
+      help="specify retries if website is down (default: forever)")
   subparsers["run"].add_argument(
-      "sites", type=str, metavar="<URL>", nargs="*",
-      help="specify benchmark website")
+      "--no-url", dest="print_url", action="store_false", default=True,
+      help="do not include url in statistics file")
+  subparsers["run"].add_argument(
+      "--domain", type=str, default="",
+      help="specify the output file domain name")
+  subparsers["run"].add_argument(
+      "-n", "--repeat", type=int, metavar="<num>",
+      help="specify iterations for each website (default: once)")
+
+  def add_replay_args(subparser):
+    subparser.add_argument(
+        "-k", "--refresh", type=int, metavar="<num>", default=0,
+        help="specify refreshes for each iteration (default: 0)")
+    subparser.add_argument(
+        "--replay-wpr", type=str, metavar="<path>",
+        help="use the specified web page replay (.wpr) archive")
+    subparser.add_argument(
+        "--replay-bin", type=str, metavar="<path>",
+        help="specify the replay.py script typically located in " \
+             "$CHROMIUM/src/third_party/webpagereplay/replay.py")
+    subparser.add_argument(
+        "-f", "--sites-file", type=str, metavar="<path>",
+        help="specify file containing benchmark websites")
+    subparser.add_argument(
+        "-t", "--timeout", type=int, metavar="<seconds>", default=60,
+        help="specify seconds before chrome is killed")
+    subparser.add_argument(
+        "-p", "--port-offset", type=int, metavar="<offset>", default=0,
+        help="specify the offset for the replay server's default ports")
+    subparser.add_argument(
+        "-l", "--log-stderr", type=str, metavar="<path>",
+        help="specify where chrome's stderr should go (default: /dev/null)")
+    subparser.add_argument(
+        "sites", type=str, metavar="<URL>", nargs="*",
+        help="specify benchmark website")
+  add_replay_args(subparsers["run"])
+
+  # Command: replay-server
+  subparsers["replay"] = subparser_adder.add_parser(
+      "replay", help="Run the replay server for debugging purposes")
+  subparsers["replay"].set_defaults(
+      func=do_run_replay_server, error=subparsers["replay"].error)
+  add_replay_args(subparsers["replay"])
+
   # Command: stats.
   subparsers["stats"] = subparser_adder.add_parser(
-      "stats", help="stats --help")
+      "stats", help="Analize the results file create by the 'run' command.")
   subparsers["stats"].set_defaults(
       func=do_stats, error=subparsers["stats"].error)
   subparsers["stats"].add_argument(
@@ -599,11 +646,13 @@
       help="specify log files to parse")
   subparsers["stats"].add_argument(
       "--aggregate", dest="aggregate", action="store_true", default=False,
-      help="Create aggregated entries. Adds Group-* entries at the toplevel. " +
+      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
       "Additionally creates a Total page with all entries.")
+
   # Command: json.
   subparsers["json"] = subparser_adder.add_parser(
-      "json", help="json --help")
+      "json", help="Collect results file created by the 'run' command into" \
+          "a single json file.")
   subparsers["json"].set_defaults(
       func=do_json, error=subparsers["json"].error)
   subparsers["json"].add_argument(
@@ -611,8 +660,9 @@
       help="specify directories with log files to parse")
   subparsers["json"].add_argument(
       "--aggregate", dest="aggregate", action="store_true", default=False,
-      help="Create aggregated entries. Adds Group-* entries at the toplevel. " +
+      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
       "Additionally creates a Total page with all entries.")
+
   # Command: help.
   subparsers["help"] = subparser_adder.add_parser(
       "help", help="help information")
@@ -622,6 +672,7 @@
   subparsers["help"].add_argument(
       "help_cmd", type=str, metavar="<command>", nargs="?",
       help="command for which to display help")
+
   # Execute the command.
   args = parser.parse_args()
   setattr(args, 'script_path', os.path.dirname(sys.argv[0]))
diff --git a/tools/clang/CMakeLists.txt b/tools/clang/CMakeLists.txt
index addcb56..f7c93c3 100644
--- a/tools/clang/CMakeLists.txt
+++ b/tools/clang/CMakeLists.txt
@@ -19,21 +19,15 @@
 endif()
 
 include_directories("${CMAKE_SOURCE_DIR}/include"
+                    "${CMAKE_SOURCE_DIR}/tools/clang/include"
                     "${CMAKE_BINARY_DIR}/include"
                     "${CMAKE_BINARY_DIR}/tools/clang/include")
 
 link_directories("${CMAKE_SOURCE_DIR}/lib"
+                 "${CMAKE_SOURCE_DIR}/tools/clang/lib"
                  "${CMAKE_BINARY_DIR}/lib"
                  "${CMAKE_BINARY_DIR}/tools/clang/lib")
 
-if (DEFINED LLVM_EXTERNAL_CLANG_SOURCE_DIR)
-  include_directories("${LLVM_EXTERNAL_CLANG_SOURCE_DIR}/include")
-  link_directories("${LLVM_EXTERNAL_CLANG_SOURCE_DIR}/lib")
-else ()
-  include_directories("${CMAKE_SOURCE_DIR}/tools/clang/include")
-  link_directories("${CMAKE_SOURCE_DIR}/tools/clang/lib")
-endif ()
-
 # Tests for all enabled tools can be run by building this target.
 add_custom_target(cr-check-all COMMAND ${CMAKE_CTEST_COMMAND} -V)
 
diff --git a/tools/clang/base_bind_rewriters/CMakeLists.txt b/tools/clang/base_bind_rewriters/CMakeLists.txt
index 7abeb72..2939061 100644
--- a/tools/clang/base_bind_rewriters/CMakeLists.txt
+++ b/tools/clang/base_bind_rewriters/CMakeLists.txt
@@ -2,11 +2,8 @@
   BitReader
   MCParser
   Option
-  Support
   X86AsmParser
   X86CodeGen
-  X86Desc
-  X86Info
   )
 
 add_llvm_executable(base_bind_rewriters
@@ -26,7 +23,6 @@
   clangSema
   clangSerialization
   clangTooling
-  clangToolingCore
   )
 
 cr_install(TARGETS base_bind_rewriters RUNTIME DESTINATION bin)
diff --git a/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp b/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
index f33cd39..ba5ba4a 100644
--- a/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
+++ b/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
@@ -143,7 +143,7 @@
 
     // Force parsing and AST building of the yet-uninstantiated function
     // template trace method bodies.
-    clang::LateParsedTemplate* lpt = sema.LateParsedTemplateMap[fd].get();
+    clang::LateParsedTemplate* lpt = sema.LateParsedTemplateMap[fd];
     sema.LateTemplateParser(sema.OpaqueParser, *lpt);
   }
 }
diff --git a/tools/clang/plugins/FindBadConstructsConsumer.cpp b/tools/clang/plugins/FindBadConstructsConsumer.cpp
index 00d4cb9..0fd85b1 100644
--- a/tools/clang/plugins/FindBadConstructsConsumer.cpp
+++ b/tools/clang/plugins/FindBadConstructsConsumer.cpp
@@ -967,7 +967,7 @@
       continue;
 
     // Parse and build AST for yet-uninstantiated template functions.
-    clang::LateParsedTemplate* lpt = sema.LateParsedTemplateMap[fd].get();
+    clang::LateParsedTemplate* lpt = sema.LateParsedTemplateMap[fd];
     sema.LateTemplateParser(sema.OpaqueParser, *lpt);
   }
 }
diff --git a/tools/clang/scripts/update.py b/tools/clang/scripts/update.py
index b09a980..100bdec 100755
--- a/tools/clang/scripts/update.py
+++ b/tools/clang/scripts/update.py
@@ -27,7 +27,7 @@
 # Do NOT CHANGE this if you don't know what you're doing -- see
 # https://chromium.googlesource.com/chromium/src/+/master/docs/updating_clang.md
 # Reverting problematic clang rolls is safe, though.
-CLANG_REVISION = '283753'
+CLANG_REVISION = '282487'
 
 use_head_revision = 'LLVM_FORCE_HEAD_REVISION' in os.environ
 if use_head_revision:
@@ -167,6 +167,14 @@
 
 def GetSvnRevision(svn_repo):
   """Returns current revision of the svn repo at svn_repo."""
+  if sys.platform == 'darwin':
+    # mac_files toolchain must be set for hermetic builds.
+    root = os.path.dirname(os.path.dirname(os.path.dirname(
+        os.path.dirname(__file__))))
+    sys.path.append(os.path.join(root, 'build'))
+    import mac_toolchain
+
+    mac_toolchain.SetToolchainEnvironment()
   svn_info = subprocess.check_output('svn info ' + svn_repo, shell=True)
   m = re.search(r'Revision: (\d+)', svn_info)
   return m.group(1)
@@ -304,17 +312,6 @@
   args.gcc_toolchain = gcc_dir
 
 
-def AddSvnToPathOnWin():
-  """Download svn.exe and add it to PATH."""
-  if sys.platform != 'win32':
-    return
-  svn_ver = 'svn-1.6.6-win'
-  svn_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, svn_ver)
-  if not os.path.exists(svn_dir):
-    DownloadAndUnpack(CDS_URL + '/tools/%s.zip' % svn_ver, LLVM_BUILD_TOOLS_DIR)
-  os.environ['PATH'] = svn_dir + os.pathsep + os.environ.get('PATH', '')
-
-
 def AddCMakeToPath():
   """Download CMake and add it to PATH."""
   if sys.platform == 'win32':
@@ -451,7 +448,6 @@
     return 1
 
   DownloadHostGcc(args)
-  AddSvnToPathOnWin()
   AddCMakeToPath()
   AddGnuWinToPath()
 
diff --git a/tools/dev/v8gen.py b/tools/dev/v8gen.py
index a63a427..f0fb74b 100755
--- a/tools/dev/v8gen.py
+++ b/tools/dev/v8gen.py
@@ -6,31 +6,35 @@
 """Script to generate V8's gn arguments based on common developer defaults
 or builder configurations.
 
-Goma is used by default if a goma folder is detected. The compiler proxy is
-assumed to run.
+Goma is used by default if detected. The compiler proxy is assumed to run.
 
-This script can be added to the PATH and be used on other v8 checkouts than
-the including one. It always runs for the checkout that nests the CWD.
+This script can be added to the PATH and be used on other checkouts. It always
+runs for the checkout nesting the CWD.
 
 Configurations of this script live in infra/mb/mb_config.pyl.
 
+Available actions are: {gen,list}. Omitting the action defaults to "gen".
+
 -------------------------------------------------------------------------------
 
 Examples:
 
-# Generate the x64.release config in out.gn/x64.release.
-v8gen.py x64.release
+# Generate the ia32.release config in out.gn/ia32.release.
+v8gen.py ia32.release
 
-# Generate into out.gn/foo and disable goma auto-detect.
-v8gen.py -b x64.release foo --no-goma
+# Generate into out.gn/foo without goma auto-detect.
+v8gen.py gen -b ia32.release foo --no-goma
 
 # Pass additional gn arguments after -- (don't use spaces within gn args).
-v8gen.py x64.optdebug -- v8_enable_slow_dchecks=true
+v8gen.py ia32.optdebug -- v8_enable_slow_dchecks=true
 
 # Generate gn arguments of 'V8 Linux64 - builder' from 'client.v8'. To switch
 # off goma usage here, the args.gn file must be edited manually.
 v8gen.py -m client.v8 -b 'V8 Linux64 - builder'
 
+# Show available configurations.
+v8gen.py list
+
 -------------------------------------------------------------------------------
 """
 
@@ -40,9 +44,15 @@
 import subprocess
 import sys
 
+CONFIG = os.path.join('infra', 'mb', 'mb_config.pyl')
 GOMA_DEFAULT = os.path.join(os.path.expanduser("~"), 'goma')
 OUT_DIR = 'out.gn'
 
+TOOLS_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.append(os.path.join(TOOLS_PATH, 'mb'))
+
+import mb
+
 
 def _sanitize_nonalpha(text):
   return re.sub(r'[^a-zA-Z0-9.]', '_', text)
@@ -57,30 +67,40 @@
     self._gn_args = args[index + 1:]
 
   def _parse_arguments(self, args):
-    parser = argparse.ArgumentParser(
+    self.parser = argparse.ArgumentParser(
       description=__doc__,
       formatter_class=argparse.RawTextHelpFormatter,
     )
-    parser.add_argument(
+
+    def add_common_options(p):
+      p.add_argument(
+          '-m', '--master', default='developer_default',
+          help='config group or master from mb_config.pyl - default: '
+               'developer_default')
+      p.add_argument(
+          '-v', '--verbosity', action='count',
+          help='print wrapped commands (use -vv to print output of wrapped '
+               'commands)')
+
+    subps = self.parser.add_subparsers()
+
+    # Command: gen.
+    gen_cmd = subps.add_parser(
+        'gen', help='generate a new set of build files (default)')
+    gen_cmd.set_defaults(func=self.cmd_gen)
+    add_common_options(gen_cmd)
+    gen_cmd.add_argument(
         'outdir', nargs='?',
         help='optional gn output directory')
-    parser.add_argument(
+    gen_cmd.add_argument(
         '-b', '--builder',
         help='build configuration or builder name from mb_config.pyl, e.g. '
              'x64.release')
-    parser.add_argument(
-        '-m', '--master', default='developer_default',
-        help='config group or master from mb_config.pyl - default: '
-             'developer_default')
-    parser.add_argument(
+    gen_cmd.add_argument(
         '-p', '--pedantic', action='store_true',
         help='run gn over command-line gn args to catch errors early')
-    parser.add_argument(
-        '-v', '--verbosity', action='count',
-        help='print wrapped commands (use -vv to print output of wrapped '
-             'commands)')
 
-    goma = parser.add_mutually_exclusive_group()
+    goma = gen_cmd.add_mutually_exclusive_group()
     goma.add_argument(
         '-g' , '--goma',
         action='store_true', default=None, dest='goma',
@@ -91,27 +111,83 @@
         help='don\'t use goma auto detection - goma might still be used if '
              'specified as a gn arg')
 
-    options = parser.parse_args(args)
+    # Command: list.
+    list_cmd = subps.add_parser(
+        'list', help='list available configurations')
+    list_cmd.set_defaults(func=self.cmd_list)
+    add_common_options(list_cmd)
 
-    if not options.outdir and not options.builder:
-      parser.error('please specify either an output directory or '
-                   'a builder/config name (-b), e.g. x64.release')
+    # Default to "gen" unless global help is requested.
+    if not args or args[0] not in subps.choices.keys() + ['-h', '--help']:
+      args = ['gen'] + args
 
-    if not options.outdir:
+    return self.parser.parse_args(args)
+
+  def cmd_gen(self):
+    if not self._options.outdir and not self._options.builder:
+      self.parser.error('please specify either an output directory or '
+                        'a builder/config name (-b), e.g. x64.release')
+
+    if not self._options.outdir:
       # Derive output directory from builder name.
-      options.outdir = _sanitize_nonalpha(options.builder)
+      self._options.outdir = _sanitize_nonalpha(self._options.builder)
     else:
       # Also, if this should work on windows, we might need to use \ where
       # outdir is used as path, while using / if it's used in a gn context.
-      if options.outdir.startswith('/'):
-        parser.error(
+      if self._options.outdir.startswith('/'):
+        self.parser.error(
             'only output directories relative to %s are supported' % OUT_DIR)
 
-    if not options.builder:
+    if not self._options.builder:
       # Derive builder from output directory.
-      options.builder = options.outdir
+      self._options.builder = self._options.outdir
 
-    return options
+    # Check for builder/config in mb config.
+    if self._options.builder not in self._mbw.masters[self._options.master]:
+      print '%s does not exist in %s for %s' % (
+          self._options.builder, CONFIG, self._options.master)
+      return 1
+
+    # TODO(machenbach): Check if the requested configurations has switched to
+    # gn at all.
+
+    # The directories are separated with slashes in a gn context (platform
+    # independent).
+    gn_outdir = '/'.join([OUT_DIR, self._options.outdir])
+
+    # Call MB to generate the basic configuration.
+    self._call_cmd([
+      sys.executable,
+      '-u', os.path.join('tools', 'mb', 'mb.py'),
+      'gen',
+      '-f', CONFIG,
+      '-m', self._options.master,
+      '-b', self._options.builder,
+      gn_outdir,
+    ])
+
+    # Handle extra gn arguments.
+    gn_args_path = os.path.join(OUT_DIR, self._options.outdir, 'args.gn')
+
+    # Append command-line args.
+    modified = self._append_gn_args(
+        'command-line', gn_args_path, '\n'.join(self._gn_args))
+
+    # Append goma args.
+    # TODO(machenbach): We currently can't remove existing goma args from the
+    # original config. E.g. to build like a bot that uses goma, but switch
+    # goma off.
+    modified |= self._append_gn_args(
+        'goma', gn_args_path, self._goma_args)
+
+    # Regenerate ninja files to check for errors in the additional gn args.
+    if modified and self._options.pedantic:
+      self._call_cmd(['gn', 'gen', gn_outdir])
+    return 0
+
+  def cmd_list(self):
+    print '\n'.join(sorted(self._mbw.masters[self._options.master]))
+    return 0
 
   def verbose_print_1(self, text):
     if self._options.verbosity >= 1:
@@ -189,6 +265,13 @@
       f.write('\n# Additional %s args:\n' % type)
       f.write(more_gn_args)
       f.write('\n')
+
+    # Artificially increment modification time as our modifications happen too
+    # fast. This makes sure that gn is properly rebuilding the ninja files.
+    mtime = os.path.getmtime(gn_args_path) + 1
+    with open(gn_args_path, 'aw'):
+      os.utime(gn_args_path, (mtime, mtime))
+
     return True
 
   def main(self):
@@ -199,39 +282,21 @@
       self.verbose_print_1('cd ' + workdir)
       os.chdir(workdir)
 
-    # The directories are separated with slashes in a gn context (platform
-    # independent).
-    gn_outdir = '/'.join([OUT_DIR, self._options.outdir])
+    # Initialize MB as a library.
+    self._mbw = mb.MetaBuildWrapper()
 
-    # Call MB to generate the basic configuration.
-    self._call_cmd([
-      sys.executable,
-      '-u', os.path.join('tools', 'mb', 'mb.py'),
-      'gen',
-      '-f', os.path.join('infra', 'mb', 'mb_config.pyl'),
-      '-m', self._options.master,
-      '-b', self._options.builder,
-      gn_outdir,
-    ])
+    # TODO(machenbach): Factor out common methods independent of mb arguments.
+    self._mbw.ParseArgs(['lookup', '-f', CONFIG])
+    self._mbw.ReadConfigFile()
 
-    # Handle extra gn arguments.
-    gn_args_path = os.path.join(OUT_DIR, self._options.outdir, 'args.gn')
+    if not self._options.master in self._mbw.masters:
+      print '%s not found in %s\n' % (self._options.master, CONFIG)
+      print 'Choose one of:\n%s\n' % (
+          '\n'.join(sorted(self._mbw.masters.keys())))
+      return 1
 
-    # Append command-line args.
-    modified = self._append_gn_args(
-        'command-line', gn_args_path, '\n'.join(self._gn_args))
+    return self._options.func()
 
-    # Append goma args.
-    # TODO(machenbach): We currently can't remove existing goma args from the
-    # original config. E.g. to build like a bot that uses goma, but switch
-    # goma off.
-    modified |= self._append_gn_args(
-        'goma', gn_args_path, self._goma_args)
-
-    # Regenerate ninja files to check for errors in the additional gn args.
-    if modified and self._options.pedantic:
-      self._call_cmd(['gn', 'gen', gn_outdir])
-    return 0
 
 if __name__ == "__main__":
   gen = GenerateGnArgs(sys.argv[1:])
diff --git a/tools/gcmole/gcmole.lua b/tools/gcmole/gcmole.lua
index bdbdf36..42cb2e3 100644
--- a/tools/gcmole/gcmole.lua
+++ b/tools/gcmole/gcmole.lua
@@ -183,6 +183,7 @@
 -------------------------------------------------------------------------------
 -- GYP file parsing
 
+-- TODO(machenbach): Remove this when deprecating gyp.
 local function ParseGYPFile()
    local result = {}
    local gyp_files = {
@@ -209,6 +210,32 @@
    return result
 end
 
+local function ParseGNFile()
+   local result = {}
+   local gn_files = {
+       { "BUILD.gn",             '"([^"]-%.cc)"',      ""         },
+       { "test/cctest/BUILD.gn", '"(test-[^"]-%.cc)"', "test/cctest/" }
+   }
+
+   for i = 1, #gn_files do
+      local filename = gn_files[i][1]
+      local pattern = gn_files[i][2]
+      local prefix = gn_files[i][3]
+      local gn_file = assert(io.open(filename), "failed to open GN file")
+      local gn = gn_file:read('*a')
+      for condition, sources in
+         gn:gmatch "### gcmole%((.-)%) ###(.-)%]" do
+         if result[condition] == nil then result[condition] = {} end
+         for file in sources:gmatch(pattern) do
+            table.insert(result[condition], prefix .. file)
+         end
+      end
+      gn_file:close()
+   end
+
+   return result
+end
+
 local function EvaluateCondition(cond, props)
    if cond == 'all' then return true end
 
@@ -230,13 +257,40 @@
    return list
 end
 
-local sources = ParseGYPFile()
+
+local gyp_sources = ParseGYPFile()
+local gn_sources = ParseGNFile()
+
+-- TODO(machenbach): Remove this comparison logic when deprecating gyp.
+local function CompareSources(sources1, sources2, what)
+  for condition, files1 in pairs(sources1) do
+    local files2 = sources2[condition]
+    assert(
+      files2 ~= nil,
+      "Missing gcmole condition in " .. what .. ": " .. condition)
+
+    -- Turn into set for speed.
+    files2_set = {}
+    for i, file in pairs(files2) do files2_set[file] = true end
+
+    for i, file in pairs(files1) do
+      assert(
+        files2_set[file] ~= nil,
+        "Missing file " .. file .. " in " .. what .. " for condition " ..
+        condition)
+    end
+  end
+end
+
+CompareSources(gyp_sources, gn_sources, "GN")
+CompareSources(gn_sources, gyp_sources, "GYP")
+
 
 local function FilesForArch(arch)
-   return BuildFileList(sources, { os = 'linux',
-                                   arch = arch,
-                                   mode = 'debug',
-                                   simulator = ''})
+   return BuildFileList(gn_sources, { os = 'linux',
+                                      arch = arch,
+                                      mode = 'debug',
+                                      simulator = ''})
 end
 
 local mtConfig = {}
diff --git a/tools/gcmole/run-gcmole.isolate b/tools/gcmole/run-gcmole.isolate
index caa4f99..0fba2a1 100644
--- a/tools/gcmole/run-gcmole.isolate
+++ b/tools/gcmole/run-gcmole.isolate
@@ -12,6 +12,7 @@
       'parallel.py',
       'run-gcmole.py',
       # The following contains all relevant source and gyp files.
+      '../../BUILD.gn',
       '../../base/',
       '../../include/',
       '../../src/',
diff --git a/tools/gen-inlining-tests.py b/tools/gen-inlining-tests.py
new file mode 100644
index 0000000..1a377e6
--- /dev/null
+++ b/tools/gen-inlining-tests.py
@@ -0,0 +1,566 @@
+#!/usr/bin/env python3
+
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+from collections import namedtuple
+import textwrap
+import sys
+
+SHARD_FILENAME_TEMPLATE = "test/mjsunit/compiler/inline-exception-{shard}.js"
+# Generates 2 files. Found by trial and error.
+SHARD_SIZE = 97
+
+PREAMBLE = """
+
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax --turbo --no-always-opt
+
+// This test file was generated by tools/gen-inlining-tests.py .
+
+// Global variables
+var deopt = undefined; // either true or false
+var counter = 0;
+
+function resetState() {
+  counter = 0;
+}
+
+function warmUp(f) {
+  try {
+    f();
+  } catch (ex) {
+    // ok
+  }
+  try {
+    f();
+  } catch (ex) {
+    // ok
+  }
+}
+
+function resetOptAndAssertResultEquals(expected, f) {
+  warmUp(f);
+  resetState();
+  // %DebugPrint(f);
+  eval("'dont optimize this function itself please, but do optimize f'");
+  %OptimizeFunctionOnNextCall(f);
+  assertEquals(expected, f());
+}
+
+function resetOptAndAssertThrowsWith(expected, f) {
+  warmUp(f);
+  resetState();
+  // %DebugPrint(f);
+  eval("'dont optimize this function itself please, but do optimize f'");
+  %OptimizeFunctionOnNextCall(f);
+  try {
+    var result = f();
+    fail("resetOptAndAssertThrowsWith",
+        "exception: " + expected,
+        "result: " + result);
+  } catch (ex) {
+    assertEquals(expected, ex);
+  }
+}
+
+function increaseAndReturn15() {
+  if (deopt) %DeoptimizeFunction(f);
+  counter++;
+  return 15;
+}
+
+function increaseAndThrow42() {
+  if (deopt) %DeoptimizeFunction(f);
+  counter++;
+  throw 42;
+}
+
+function increaseAndReturn15_noopt_inner() {
+  if (deopt) %DeoptimizeFunction(f);
+  counter++;
+  return 15;
+}
+
+%NeverOptimizeFunction(increaseAndReturn15_noopt_inner);
+
+function increaseAndThrow42_noopt_inner() {
+  if (deopt) %DeoptimizeFunction(f);
+  counter++;
+  throw 42;
+}
+
+%NeverOptimizeFunction(increaseAndThrow42_noopt_inner);
+
+// Alternative 1
+
+function returnOrThrow(doReturn) {
+  if (doReturn) {
+    return increaseAndReturn15();
+  } else {
+    return increaseAndThrow42();
+  }
+}
+
+// Alternative 2
+
+function increaseAndReturn15_calls_noopt() {
+  return increaseAndReturn15_noopt_inner();
+}
+
+function increaseAndThrow42_calls_noopt() {
+  return increaseAndThrow42_noopt_inner();
+}
+
+// Alternative 3.
+// When passed either {increaseAndReturn15} or {increaseAndThrow42}, it acts
+// as the other one.
+function invertFunctionCall(f) {
+  var result;
+  try {
+    result = f();
+  } catch (ex) {
+    return ex - 27;
+  }
+  throw result + 27;
+}
+
+// Alternative 4: constructor
+function increaseAndStore15Constructor() {
+  if (deopt) %DeoptimizeFunction(f);
+  ++counter;
+  this.x = 15;
+}
+
+function increaseAndThrow42Constructor() {
+  if (deopt) %DeoptimizeFunction(f);
+  ++counter;
+  this.x = 42;
+  throw this.x;
+}
+
+// Alternative 5: property
+var magic = {};
+Object.defineProperty(magic, 'prop', {
+  get: function () {
+    if (deopt) %DeoptimizeFunction(f);
+    return 15 + 0 * ++counter;
+  },
+
+  set: function(x) {
+    // argument should be 37
+    if (deopt) %DeoptimizeFunction(f);
+    counter -= 36 - x; // increments counter
+    throw 42;
+  }
+})
+
+// Generate type feedback.
+
+assertEquals(15, increaseAndReturn15_calls_noopt());
+assertThrowsEquals(function() { return increaseAndThrow42_noopt_inner() }, 42);
+
+assertEquals(15, (new increaseAndStore15Constructor()).x);
+assertThrowsEquals(function() {
+        return (new increaseAndThrow42Constructor()).x;
+    },
+    42);
+
+function runThisShard() {
+
+""".strip()
+
+def booltuples(n):
+  """booltuples(2) yields 4 tuples: (False, False), (False, True),
+  (True, False), (True, True)."""
+
+  assert isinstance(n, int)
+  if n <= 0:
+    yield ()
+  else:
+    for initial in booltuples(n-1):
+      yield initial + (False,)
+      yield initial + (True,)
+
+def fnname(flags):
+    assert len(FLAGLETTERS) == len(flags)
+
+    return "f_" + ''.join(
+          FLAGLETTERS[i] if b else '_'
+          for (i, b) in enumerate(flags))
+
+NUM_TESTS_PRINTED = 0
+NUM_TESTS_IN_SHARD = 0
+
+def printtest(flags):
+  """Print a test case. Takes a couple of boolean flags, on which the
+  printed Javascript code depends."""
+
+  assert all(isinstance(flag, bool) for flag in flags)
+
+  # The alternative flags are in reverse order so that if we take all possible
+  # tuples, ordered lexicographically from false to true, we get first the
+  # default, then alternative 1, then 2, etc.
+  (
+    alternativeFn5,      # use alternative #5 for returning/throwing:
+                         #   return/throw using property
+    alternativeFn4,      # use alternative #4 for returning/throwing:
+                         #   return/throw using constructor
+    alternativeFn3,      # use alternative #3 for returning/throwing:
+                         #   return/throw indirectly, based on function argument
+    alternativeFn2,      # use alternative #2 for returning/throwing:
+                         #   return/throw indirectly in unoptimized code,
+                         #   no branching
+    alternativeFn1,      # use alternative #1 for returning/throwing:
+                         #   return/throw indirectly, based on boolean arg
+    tryThrows,           # in try block, call throwing function
+    tryReturns,          # in try block, call returning function
+    tryFirstReturns,     # in try block, returning goes before throwing
+    tryResultToLocal,    # in try block, result goes to local variable
+    doCatch,             # include catch block
+    catchReturns,        # in catch block, return
+    catchWithLocal,      # in catch block, modify or return the local variable
+    catchThrows,         # in catch block, throw
+    doFinally,           # include finally block
+    finallyReturns,      # in finally block, return local variable
+    finallyThrows,       # in finally block, throw
+    endReturnLocal,      # at very end, return variable local
+    deopt,               # deopt inside inlined function
+  ) = flags
+
+  # BASIC RULES
+
+  # Only one alternative can be applied at any time.
+  if (alternativeFn1 + alternativeFn2 + alternativeFn3 + alternativeFn4
+      + alternativeFn5 > 1):
+    return
+
+  # In try, return or throw, or both.
+  if not (tryReturns or tryThrows): return
+
+  # Either doCatch or doFinally.
+  if not doCatch and not doFinally: return
+
+  # Catch flags only make sense when catching
+  if not doCatch and (catchReturns or catchWithLocal or catchThrows):
+    return
+
+  # Finally flags only make sense when finallying
+  if not doFinally and (finallyReturns or finallyThrows):
+    return
+
+  # tryFirstReturns is only relevant when both tryReturns and tryThrows are
+  # true.
+  if tryFirstReturns and not (tryReturns and tryThrows): return
+
+  # From the try and finally block, we can return or throw, but not both.
+  if catchReturns and catchThrows: return
+  if finallyReturns and finallyThrows: return
+
+  # If at the end we return the local, we need to have touched it.
+  if endReturnLocal and not (tryResultToLocal or catchWithLocal): return
+
+  # PRUNING
+
+  anyAlternative = any([alternativeFn1, alternativeFn2, alternativeFn3,
+      alternativeFn4, alternativeFn5])
+  specificAlternative = any([alternativeFn2, alternativeFn3])
+  rareAlternative = not specificAlternative
+
+  # If try returns and throws, then don't catchWithLocal, endReturnLocal, or
+  # deopt, or do any alternative.
+  if (tryReturns and tryThrows and
+      (catchWithLocal or endReturnLocal or deopt or anyAlternative)):
+    return
+  # We don't do any alternative if we do a finally.
+  if doFinally and anyAlternative: return
+  # We only use the local variable if we do alternative #2 or #3.
+  if ((tryResultToLocal or catchWithLocal or endReturnLocal) and
+      not specificAlternative):
+    return
+  # We don't need to test deopting into a finally.
+  if doFinally and deopt: return
+
+  # We're only interested in alternative #2 if we have endReturnLocal, no
+  # catchReturns, and no catchThrows, and deopt.
+  if (alternativeFn2 and
+      (not endReturnLocal or catchReturns or catchThrows or not deopt)):
+    return
+
+
+  # Flag check succeeded.
+
+  trueFlagNames = [name for (name, value) in flags._asdict().items() if value]
+  flagsMsgLine = "  // Variant flags: [{}]".format(', '.join(trueFlagNames))
+  write(textwrap.fill(flagsMsgLine, subsequent_indent='  //   '))
+  write("")
+
+  if not anyAlternative:
+    fragments = {
+      'increaseAndReturn15': 'increaseAndReturn15()',
+      'increaseAndThrow42': 'increaseAndThrow42()',
+    }
+  elif alternativeFn1:
+    fragments = {
+      'increaseAndReturn15': 'returnOrThrow(true)',
+      'increaseAndThrow42': 'returnOrThrow(false)',
+    }
+  elif alternativeFn2:
+    fragments = {
+      'increaseAndReturn15': 'increaseAndReturn15_calls_noopt()',
+      'increaseAndThrow42': 'increaseAndThrow42_calls_noopt()',
+    }
+  elif alternativeFn3:
+    fragments = {
+      'increaseAndReturn15': 'invertFunctionCall(increaseAndThrow42)',
+      'increaseAndThrow42': 'invertFunctionCall(increaseAndReturn15)',
+    }
+  elif alternativeFn4:
+    fragments = {
+      'increaseAndReturn15': '(new increaseAndStore15Constructor()).x',
+      'increaseAndThrow42': '(new increaseAndThrow42Constructor()).x',
+    }
+  else:
+    assert alternativeFn5
+    fragments = {
+      'increaseAndReturn15': 'magic.prop /* returns 15 */',
+      'increaseAndThrow42': '(magic.prop = 37 /* throws 42 */)',
+    }
+
+  # As we print code, we also maintain what the result should be. Variable
+  # {result} can be one of three things:
+  #
+  # - None, indicating returning JS null
+  # - ("return", n) with n an integer
+  # - ("throw", n), with n an integer
+
+  result = None
+  # We also maintain what the counter should be at the end.
+  # The counter is reset just before f is called.
+  counter = 0
+
+  write(    "  f = function {} () {{".format(fnname(flags)))
+  write(    "    var local = 888;")
+  write(    "    deopt = {};".format("true" if deopt else "false"))
+  local = 888
+  write(    "    try {")
+  write(    "      counter++;")
+  counter += 1
+  resultTo = "local +=" if tryResultToLocal else "return"
+  if tryReturns and not (tryThrows and not tryFirstReturns):
+    write(  "      {} 4 + {increaseAndReturn15};".format(resultTo, **fragments))
+    if result == None:
+      counter += 1
+      if tryResultToLocal:
+        local += 19
+      else:
+        result = ("return", 19)
+  if tryThrows:
+    write(  "      {} 4 + {increaseAndThrow42};".format(resultTo, **fragments))
+    if result == None:
+      counter += 1
+      result = ("throw", 42)
+  if tryReturns and tryThrows and not tryFirstReturns:
+    write(  "      {} 4 + {increaseAndReturn15};".format(resultTo, **fragments))
+    if result == None:
+      counter += 1
+      if tryResultToLocal:
+        local += 19
+      else:
+        result = ("return", 19)
+  write(    "      counter++;")
+  if result == None:
+    counter += 1
+
+  if doCatch:
+    write(  "    } catch (ex) {")
+    write(  "      counter++;")
+    if isinstance(result, tuple) and result[0] == 'throw':
+      counter += 1
+    if catchThrows:
+      write("      throw 2 + ex;")
+      if isinstance(result, tuple) and result[0] == "throw":
+        result = ('throw', 2 + result[1])
+    elif catchReturns and catchWithLocal:
+      write("      return 2 + local;")
+      if isinstance(result, tuple) and result[0] == "throw":
+        result = ('return', 2 + local)
+    elif catchReturns and not catchWithLocal:
+      write("      return 2 + ex;");
+      if isinstance(result, tuple) and result[0] == "throw":
+        result = ('return', 2 + result[1])
+    elif catchWithLocal:
+      write("      local += ex;");
+      if isinstance(result, tuple) and result[0] == "throw":
+        local += result[1]
+        result = None
+        counter += 1
+    else:
+      if isinstance(result, tuple) and result[0] == "throw":
+        result = None
+        counter += 1
+    write(  "      counter++;")
+
+  if doFinally:
+    write(  "    } finally {")
+    write(  "      counter++;")
+    counter += 1
+    if finallyThrows:
+      write("      throw 25;")
+      result = ('throw', 25)
+    elif finallyReturns:
+      write("      return 3 + local;")
+      result = ('return', 3 + local)
+    elif not finallyReturns and not finallyThrows:
+      write("      local += 2;")
+      local += 2
+      counter += 1
+    else: assert False # unreachable
+    write(  "      counter++;")
+
+  write(    "    }")
+  write(    "    counter++;")
+  if result == None:
+    counter += 1
+  if endReturnLocal:
+    write(  "    return 5 + local;")
+    if result == None:
+      result = ('return', 5 + local)
+  write(    "  }")
+
+  if result == None:
+    write(  "  resetOptAndAssertResultEquals(undefined, f);")
+  else:
+    tag, value = result
+    if tag == "return":
+      write(  "  resetOptAndAssertResultEquals({}, f);".format(value))
+    else:
+      assert tag == "throw"
+      write(  "  resetOptAndAssertThrowsWith({}, f);".format(value))
+
+  write(  "  assertEquals({}, counter);".format(counter))
+  write(  "")
+
+  global NUM_TESTS_PRINTED, NUM_TESTS_IN_SHARD
+  NUM_TESTS_PRINTED += 1
+  NUM_TESTS_IN_SHARD += 1
+
+FILE = None # to be initialised to an open file
+SHARD_NUM = 1
+
+def write(*args):
+  return print(*args, file=FILE)
+
+
+
+def rotateshard():
+  global FILE, NUM_TESTS_IN_SHARD, SHARD_SIZE
+  if MODE != 'shard':
+    return
+  if FILE != None and NUM_TESTS_IN_SHARD < SHARD_SIZE:
+    return
+  if FILE != None:
+    finishshard()
+    assert FILE == None
+  FILE = open(SHARD_FILENAME_TEMPLATE.format(shard=SHARD_NUM), 'w')
+  write_shard_header()
+  NUM_TESTS_IN_SHARD = 0
+
+def finishshard():
+  global FILE, SHARD_NUM, MODE
+  assert FILE
+  write_shard_footer()
+  if MODE == 'shard':
+    print("Wrote shard {}.".format(SHARD_NUM))
+    FILE.close()
+    FILE = None
+    SHARD_NUM += 1
+
+
+def write_shard_header():
+  if MODE == 'shard':
+    write("// Shard {}.".format(SHARD_NUM))
+    write("")
+  write(PREAMBLE)
+  write("")
+
+def write_shard_footer():
+  write("}")
+  write("%NeverOptimizeFunction(runThisShard);")
+  write("")
+  write("// {} tests in this shard.".format(NUM_TESTS_IN_SHARD))
+  write("// {} tests up to here.".format(NUM_TESTS_PRINTED))
+  write("")
+  write("runThisShard();")
+
+FLAGLETTERS="54321trflcrltfrtld"
+
+flagtuple = namedtuple('flagtuple', (
+  "alternativeFn5",
+  "alternativeFn4",
+  "alternativeFn3",
+  "alternativeFn2",
+  "alternativeFn1",
+  "tryThrows",
+  "tryReturns",
+  "tryFirstReturns",
+  "tryResultToLocal",
+  "doCatch",
+  "catchReturns",
+  "catchWithLocal",
+  "catchThrows",
+  "doFinally",
+  "finallyReturns",
+  "finallyThrows",
+  "endReturnLocal",
+  "deopt"
+  ))
+
+emptyflags = flagtuple(*((False,) * len(flagtuple._fields)))
+f1 = emptyflags._replace(tryReturns=True, doCatch=True)
+
+# You can test function printtest with f1.
+
+allFlagCombinations = [
+    flagtuple(*bools)
+    for bools in booltuples(len(flagtuple._fields))
+]
+
+if __name__ == '__main__':
+  global MODE
+  if sys.argv[1:] == []:
+    MODE = 'stdout'
+    print("// Printing all shards together to stdout.")
+    print("")
+    write_shard_header()
+    FILE = sys.stdout
+  elif sys.argv[1:] == ['--shard-and-overwrite']:
+    MODE = 'shard'
+  else:
+    print("Usage:")
+    print("")
+    print("  python {}".format(sys.argv[0]))
+    print("      print all tests to standard output")
+    print("  python {} --shard-and-overwrite".format(sys.argv[0]))
+    print("      print all tests to {}".format(SHARD_FILENAME_TEMPLATE))
+
+    print("")
+    print(sys.argv[1:])
+    print("")
+    sys.exit(1)
+
+  rotateshard()
+
+  for flags in allFlagCombinations:
+    printtest(flags)
+    rotateshard()
+
+  finishshard()
+
+  if MODE == 'shard':
+    print("Total: {} tests.".format(NUM_TESTS_PRINTED))
diff --git a/tools/gen-postmortem-metadata.py b/tools/gen-postmortem-metadata.py
index 1275bb5..5fd39f3 100644
--- a/tools/gen-postmortem-metadata.py
+++ b/tools/gen-postmortem-metadata.py
@@ -168,8 +168,6 @@
         'value': 'ScopeInfo::kStackLocalCount' },
     { 'name': 'scopeinfo_idx_ncontextlocals',
         'value': 'ScopeInfo::kContextLocalCount' },
-    { 'name': 'scopeinfo_idx_ncontextglobals',
-        'value': 'ScopeInfo::kContextGlobalCount' },
     { 'name': 'scopeinfo_idx_first_vars',
         'value': 'ScopeInfo::kVariablePartIndex' },
 
diff --git a/tools/grokdump.py b/tools/grokdump.py
index ab8f326..4525e7e 100755
--- a/tools/grokdump.py
+++ b/tools/grokdump.py
@@ -39,6 +39,7 @@
 import optparse
 import os
 import re
+import StringIO
 import sys
 import types
 import urllib
@@ -1745,10 +1746,12 @@
     frame_pointer = self.reader.ExceptionFP()
     self.styles[frame_pointer] = "frame"
     for slot in xrange(stack_top, stack_bottom, self.reader.PointerSize()):
-      self.styles[slot] = "stackaddress"
+      # stack address
+      self.styles[slot] = "sa"
     for slot in xrange(stack_top, stack_bottom, self.reader.PointerSize()):
       maybe_address = self.reader.ReadUIntPtr(slot)
-      self.styles[maybe_address] = "stackval"
+      # stack value
+      self.styles[maybe_address] = "sv"
       if slot == frame_pointer:
         self.styles[slot] = "frame"
         frame_pointer = maybe_address
@@ -1760,7 +1763,7 @@
   def get_style_class_string(self, address):
     style = self.get_style_class(address)
     if style != None:
-      return " class=\"%s\" " % style
+      return " class=%s " % style
     else:
       return ""
 
@@ -1875,11 +1878,13 @@
 .dmptable {
   border-collapse : collapse;
   border-spacing : 0px;
+  table-layout: fixed;
 }
 
 .codedump {
   border-collapse : collapse;
   border-spacing : 0px;
+  table-layout: fixed;
 }
 
 .addrcomments {
@@ -1932,11 +1937,11 @@
   background-color : cyan;
 }
 
-.stackaddress {
+.stackaddress, .sa {
   background-color : LightGray;
 }
 
-.stackval {
+.stackval, .sv {
   background-color : LightCyan;
 }
 
@@ -1944,16 +1949,17 @@
   background-color : cyan;
 }
 
-.commentinput {
+.commentinput, .ci {
   width : 20em;
 }
 
-a.nodump:visited {
+/* a.nodump */
+a.nd:visited {
   color : black;
   text-decoration : none;
 }
 
-a.nodump:link {
+a.nd:link {
   color : black;
   text-decoration : none;
 }
@@ -1984,6 +1990,7 @@
     send_comment(s.substring(index + address_len), event.srcElement.value);
   }
 }
+var c = comment;
 
 function send_comment(address, comment) {
   xmlhttp = new XMLHttpRequest();
@@ -2038,7 +2045,7 @@
 
 <body>
   <div class="header">
-    <form class="navigation" action="search.html">
+    <form class="navigation" action=/search.html">
       <a href="summary.html?%(query_dump)s">Context info</a>&nbsp;&nbsp;&nbsp;
       <a href="info.html?%(query_dump)s">Dump info</a>&nbsp;&nbsp;&nbsp;
       <a href="modules.html?%(query_dump)s">Modules</a>&nbsp;&nbsp;&nbsp;
@@ -2095,24 +2102,34 @@
       query_components = urlparse.parse_qs(parsedurl.query)
       if parsedurl.path == "/dumps.html":
         self.send_success_html_headers()
-        self.server.output_dumps(self.wfile)
+        out_buffer = StringIO.StringIO()
+        self.server.output_dumps(out_buffer)
+        self.wfile.write(out_buffer.getvalue())
       elif parsedurl.path == "/summary.html":
         self.send_success_html_headers()
-        self.formatter(query_components).output_summary(self.wfile)
+        out_buffer = StringIO.StringIO()
+        self.formatter(query_components).output_summary(out_buffer)
+        self.wfile.write(out_buffer.getvalue())
       elif parsedurl.path == "/info.html":
         self.send_success_html_headers()
-        self.formatter(query_components).output_info(self.wfile)
+        out_buffer = StringIO.StringIO()
+        self.formatter(query_components).output_info(out_buffer)
+        self.wfile.write(out_buffer.getvalue())
       elif parsedurl.path == "/modules.html":
         self.send_success_html_headers()
-        self.formatter(query_components).output_modules(self.wfile)
-      elif parsedurl.path == "/search.html":
+        out_buffer = StringIO.StringIO()
+        self.formatter(query_components).output_modules(out_buffer)
+        self.wfile.write(out_buffer.getvalue())
+      elif parsedurl.path == "/search.html" or parsedurl.path == "/s":
         address = query_components.get("val", [])
         if len(address) != 1:
           self.send_error(404, "Invalid params")
           return
         self.send_success_html_headers()
+        out_buffer = StringIO.StringIO()
         self.formatter(query_components).output_search_res(
-            self.wfile, address[0])
+            out_buffer, address[0])
+        self.wfile.write(out_buffer.getvalue())
       elif parsedurl.path == "/disasm.html":
         address = query_components.get("val", [])
         exact = query_components.get("exact", ["on"])
@@ -2120,15 +2137,19 @@
           self.send_error(404, "Invalid params")
           return
         self.send_success_html_headers()
+        out_buffer = StringIO.StringIO()
         self.formatter(query_components).output_disasm(
-            self.wfile, address[0], exact[0])
+            out_buffer, address[0], exact[0])
+        self.wfile.write(out_buffer.getvalue())
       elif parsedurl.path == "/data.html":
         address = query_components.get("val", [])
         datakind = query_components.get("type", ["address"])
         if len(address) == 1 and len(datakind) == 1:
           self.send_success_html_headers()
+          out_buffer = StringIO.StringIO()
           self.formatter(query_components).output_data(
-              self.wfile, address[0], datakind[0])
+              out_buffer, address[0], datakind[0])
+          self.wfile.write(out_buffer.getvalue())
         else:
           self.send_error(404,'Invalid params')
       elif parsedurl.path == "/setdumpdesc":
@@ -2235,8 +2256,8 @@
         straddress = "0x" + self.reader.FormatIntPtr(maybeaddress)
       style_class = ""
       if not self.reader.IsValidAddress(maybeaddress):
-        style_class = " class=\"nodump\""
-      return ("<a %s href=\"search.html?%s&amp;val=%s\">%s</a>" %
+        style_class = "class=nd"
+      return ("<a %s href=s?%s&amp;val=%s>%s</a>" %
               (style_class, self.encfilename, straddress, straddress))
 
   def output_header(self, f):
@@ -2247,7 +2268,7 @@
   def output_footer(self, f):
     f.write(WEB_FOOTER)
 
-  MAX_CONTEXT_STACK = 4096
+  MAX_CONTEXT_STACK = 2048
 
   def output_summary(self, f):
     self.output_header(f)
@@ -2257,9 +2278,10 @@
 
     # Output stack
     exception_thread = self.reader.thread_map[self.reader.exception.thread_id]
-    stack_bottom = exception_thread.stack.start + \
-        min(exception_thread.stack.memory.data_size, self.MAX_CONTEXT_STACK)
     stack_top = self.reader.ExceptionSP()
+    stack_bottom = min(exception_thread.stack.start + \
+        exception_thread.stack.memory.data_size,
+        stack_top + self.MAX_CONTEXT_STACK)
     self.output_words(f, stack_top - 16, stack_bottom, stack_top, "Stack")
 
     f.write('</div>')
@@ -2268,14 +2290,14 @@
 
   def output_info(self, f):
     self.output_header(f)
-    f.write("<h3>Dump info</h3>\n")
+    f.write("<h3>Dump info</h3>")
     f.write("Description: ")
     self.server.output_dump_desc_field(f, self.dumpfilename)
-    f.write("<br>\n")
+    f.write("<br>")
     f.write("Filename: ")
-    f.write("<span class=\"code\">%s</span><br>\n" % (self.dumpfilename))
+    f.write("<span class=\"code\">%s</span><br>" % (self.dumpfilename))
     dt = datetime.datetime.fromtimestamp(self.reader.header.time_date_stampt)
-    f.write("Timestamp: %s<br>\n" % dt.strftime('%Y-%m-%d %H:%M:%S'))
+    f.write("Timestamp: %s<br>" % dt.strftime('%Y-%m-%d %H:%M:%S'))
     self.output_context(f, InspectionWebFormatter.CONTEXT_FULL)
     self.output_address_ranges(f)
     self.output_footer(f)
@@ -2286,22 +2308,22 @@
     def print_region(_reader, start, size, _location):
       regions[start] = size
     self.reader.ForEachMemoryRegion(print_region)
-    f.write("<h3>Available memory regions</h3>\n")
+    f.write("<h3>Available memory regions</h3>")
     f.write('<div class="code">')
-    f.write("<table class=\"regions\">\n")
+    f.write("<table class=\"regions\">")
     f.write("<thead><tr>")
     f.write("<th>Start address</th>")
     f.write("<th>End address</th>")
     f.write("<th>Number of bytes</th>")
-    f.write("</tr></thead>\n")
+    f.write("</tr></thead>")
     for start in sorted(regions):
       size = regions[start]
       f.write("<tr>")
       f.write("<td>%s</td>" % self.format_address(start))
       f.write("<td>&nbsp;%s</td>" % self.format_address(start + size))
       f.write("<td>&nbsp;%d</td>" % size)
-      f.write("</tr>\n")
-    f.write("</table>\n")
+      f.write("</tr>")
+    f.write("</table>")
     f.write('</div>')
     return
 
@@ -2311,19 +2333,19 @@
                                     module.version_info.dwFileVersionLS)
     product_version = GetVersionString(module.version_info.dwProductVersionMS,
                                        module.version_info.dwProductVersionLS)
-    f.write("<br>&nbsp;&nbsp;\n")
+    f.write("<br>&nbsp;&nbsp;")
     f.write("base: %s" % self.reader.FormatIntPtr(module.base_of_image))
-    f.write("<br>&nbsp;&nbsp;\n")
+    f.write("<br>&nbsp;&nbsp;")
     f.write("  end: %s" % self.reader.FormatIntPtr(module.base_of_image +
                                             module.size_of_image))
-    f.write("<br>&nbsp;&nbsp;\n")
+    f.write("<br>&nbsp;&nbsp;")
     f.write("  file version: %s" % file_version)
-    f.write("<br>&nbsp;&nbsp;\n")
+    f.write("<br>&nbsp;&nbsp;")
     f.write("  product version: %s" % product_version)
-    f.write("<br>&nbsp;&nbsp;\n")
+    f.write("<br>&nbsp;&nbsp;")
     time_date_stamp = datetime.datetime.fromtimestamp(module.time_date_stamp)
     f.write("  timestamp: %s" % time_date_stamp)
-    f.write("<br>\n");
+    f.write("<br>");
 
   def output_modules(self, f):
     self.output_header(f)
@@ -2337,16 +2359,16 @@
   def output_context(self, f, details):
     exception_thread = self.reader.thread_map[self.reader.exception.thread_id]
     f.write("<h3>Exception context</h3>")
-    f.write('<div class="code">\n')
+    f.write('<div class="code">')
     f.write("Thread id: %d" % exception_thread.id)
-    f.write("&nbsp;&nbsp; Exception code: %08X<br/>\n" %
+    f.write("&nbsp;&nbsp; Exception code: %08X<br/>" %
             self.reader.exception.exception.code)
     if details == InspectionWebFormatter.CONTEXT_FULL:
       if self.reader.exception.exception.parameter_count > 0:
-        f.write("&nbsp;&nbsp; Exception parameters: \n")
+        f.write("&nbsp;&nbsp; Exception parameters: ")
         for i in xrange(0, self.reader.exception.exception.parameter_count):
           f.write("%08x" % self.reader.exception.exception.information[i])
-        f.write("<br><br>\n")
+        f.write("<br><br>")
 
     for r in CONTEXT_FOR_ARCH[self.reader.arch]:
       f.write(HTML_REG_FORMAT %
@@ -2357,7 +2379,7 @@
     else:
       f.write("<b>eflags</b>: %s" %
               bin(self.reader.exception_context.eflags)[2:])
-    f.write('</div>\n')
+    f.write('</div>')
     return
 
   def align_down(self, a, size):
@@ -2394,7 +2416,7 @@
                    highlight_address, desc):
     region = self.reader.FindRegion(highlight_address)
     if region is None:
-      f.write("<h3>Address 0x%x not found in the dump.</h3>\n" %
+      f.write("<h3>Address 0x%x not found in the dump.</h3>" %
               (highlight_address))
       return
     size = self.heap.PointerSize()
@@ -2415,10 +2437,10 @@
                 (self.encfilename, highlight_address))
 
     f.write("<h3>%s 0x%x - 0x%x, "
-            "highlighting <a href=\"#highlight\">0x%x</a> %s</h3>\n" %
+            "highlighting <a href=\"#highlight\">0x%x</a> %s</h3>" %
             (desc, start_address, end_address, highlight_address, expand))
     f.write('<div class="code">')
-    f.write("<table class=\"codedump\">\n")
+    f.write("<table class=codedump>")
 
     for j in xrange(0, end_address - start_address, size):
       slot = start_address + j
@@ -2440,33 +2462,31 @@
         if maybe_address:
           heap_object = self.format_object(maybe_address)
 
-      address_fmt = "%s&nbsp;</td>\n"
+      address_fmt = "%s&nbsp;</td>"
       if slot == highlight_address:
-        f.write("<tr class=\"highlight-line\">\n")
-        address_fmt = "<a id=\"highlight\"></a>%s&nbsp;</td>\n"
+        f.write("<tr class=highlight-line>")
+        address_fmt = "<a id=highlight></a>%s&nbsp;</td>"
       elif slot < highlight_address and highlight_address < slot + size:
-        f.write("<tr class=\"inexact-highlight-line\">\n")
-        address_fmt = "<a id=\"highlight\"></a>%s&nbsp;</td>\n"
+        f.write("<tr class=inexact-highlight-line>")
+        address_fmt = "<a id=highlight></a>%s&nbsp;</td>"
       else:
-        f.write("<tr>\n")
+        f.write("<tr>")
 
-      f.write("  <td>")
+      f.write("<td>")
       self.output_comment_box(f, "da-", slot)
-      f.write("</td>\n")
-      f.write("  ")
+      f.write("</td>")
       self.td_from_address(f, slot)
       f.write(address_fmt % self.format_address(slot))
-      f.write("  ")
       self.td_from_address(f, maybe_address)
-      f.write(":&nbsp;%s&nbsp;</td>\n" % straddress)
-      f.write("  <td>")
+      f.write(":&nbsp;%s&nbsp;</td>" % straddress)
+      f.write("<td>")
       if maybe_address != None:
         self.output_comment_box(
             f, "sv-" + self.reader.FormatIntPtr(slot), maybe_address)
-      f.write("  </td>\n")
-      f.write("  <td>%s</td>\n" % (heap_object or ''))
-      f.write("</tr>\n")
-    f.write("</table>\n")
+      f.write("</td>")
+      f.write("<td>%s</td>" % (heap_object or ''))
+      f.write("</tr>")
+    f.write("</table>")
     f.write("</div>")
     return
 
@@ -2565,7 +2585,7 @@
     f.write("<h3>Disassembling 0x%x - 0x%x, highlighting 0x%x %s</h3>" %
             (start_address, end_address, highlight_address, expand))
     f.write('<div class="code">')
-    f.write("<table class=\"codedump\">\n");
+    f.write("<table class=\"codedump\">");
     for i in xrange(len(lines)):
       line = lines[i]
       next_address = count
@@ -2574,7 +2594,7 @@
         next_address = next_line[0]
       self.format_disasm_line(
           f, start_address, line, next_address, highlight_address)
-    f.write("</table>\n")
+    f.write("</table>")
     f.write("</div>")
     return
 
@@ -2590,22 +2610,22 @@
       extra.append(cgi.escape(str(object_info)))
     if len(extra) == 0:
       return line
-    return ("%s <span class=\"disasmcomment\">;; %s</span>" %
+    return ("%s <span class=disasmcomment>;; %s</span>" %
             (line, ", ".join(extra)))
 
   def format_disasm_line(
       self, f, start, line, next_address, highlight_address):
     line_address = start + line[0]
-    address_fmt = "  <td>%s</td>\n"
+    address_fmt = "  <td>%s</td>"
     if line_address == highlight_address:
-      f.write("<tr class=\"highlight-line\">\n")
-      address_fmt = "  <td><a id=\"highlight\">%s</a></td>\n"
+      f.write("<tr class=highlight-line>")
+      address_fmt = "  <td><a id=highlight>%s</a></td>"
     elif (line_address < highlight_address and
           highlight_address < next_address + start):
-      f.write("<tr class=\"inexact-highlight-line\">\n")
-      address_fmt = "  <td><a id=\"highlight\">%s</a></td>\n"
+      f.write("<tr class=inexact-highlight-line>")
+      address_fmt = "  <td><a id=highlight>%s</a></td>"
     else:
-      f.write("<tr>\n")
+      f.write("<tr>")
     num_bytes = next_address - line[0]
     stack_slot = self.heap.stack_map.get(line_address)
     marker = ""
@@ -2630,22 +2650,26 @@
     code = self.annotate_disasm_addresses(code[op_offset:])
     f.write("  <td>")
     self.output_comment_box(f, "codel-", line_address)
-    f.write("</td>\n")
+    f.write("</td>")
     f.write(address_fmt % marker)
     f.write("  ")
     self.td_from_address(f, line_address)
-    f.write("%s (+0x%x)</td>\n" %
-            (self.format_address(line_address), line[0]))
-    f.write("  <td>:&nbsp;%s&nbsp;</td>\n" % opcodes)
-    f.write("  <td>%s</td>\n" % code)
-    f.write("</tr>\n")
+    f.write(self.format_address(line_address))
+    f.write(" (+0x%x)</td>" % line[0])
+    f.write("<td>:&nbsp;%s&nbsp;</td>" % opcodes)
+    f.write("<td>%s</td>" % code)
+    f.write("</tr>")
 
   def output_comment_box(self, f, prefix, address):
-    f.write("<input type=\"text\" class=\"commentinput\" "
-            "id=\"%s-address-0x%s\" onchange=\"comment()\" value=\"%s\">" %
+    comment = self.comments.get_comment(address)
+    value = ""
+    if comment:
+      value = " value=\"%s\"" % cgi.escape(comment)
+    f.write("<input type=text class=ci "
+            "id=%s-address-0x%s onchange=c()%s>" %
             (prefix,
              self.reader.FormatIntPtr(address),
-             cgi.escape(self.comments.get_comment(address)) or ""))
+             value))
 
   MAX_FOUND_RESULTS = 100
 
@@ -2655,27 +2679,27 @@
     if toomany:
       f.write("(found %i results, displaying only first %i)" %
               (len(results), self.MAX_FOUND_RESULTS))
-    f.write(": \n")
+    f.write(": ")
     results = sorted(results)
     results = results[:min(len(results), self.MAX_FOUND_RESULTS)]
     for address in results:
-      f.write("<span %s>%s</span>\n" %
+      f.write("<span %s>%s</span>" %
               (self.comments.get_style_class_string(address),
                self.format_address(address)))
     if toomany:
-      f.write("...\n")
+      f.write("...")
 
 
   def output_page_info(self, f, page_kind, page_address, my_page_address):
     if my_page_address == page_address and page_address != 0:
-      f.write("Marked first %s page.\n" % page_kind)
+      f.write("Marked first %s page." % page_kind)
     else:
       f.write("<span id=\"%spage\" style=\"display:none\">" % page_kind)
       f.write("Marked first %s page." % page_kind)
       f.write("</span>\n")
       f.write("<button onclick=\"onpage('%spage', '0x%x')\">" %
               (page_kind, my_page_address))
-      f.write("Mark as first %s page</button>\n" % page_kind)
+      f.write("Mark as first %s page</button>" % page_kind)
     return
 
   def output_search_res(self, f, straddress):
@@ -2687,11 +2711,11 @@
 
       f.write("Comment: ")
       self.output_comment_box(f, "search-", address)
-      f.write("<br>\n")
+      f.write("<br>")
 
       page_address = address & ~self.heap.PageAlignmentMask()
 
-      f.write("Page info: \n")
+      f.write("Page info: ")
       self.output_page_info(f, "old", self.padawan.known_first_old_page, \
                             page_address)
       self.output_page_info(f, "map", self.padawan.known_first_map_page, \
@@ -2705,27 +2729,27 @@
         self.output_words(f, address - 8, address + 32, address, "Dump")
 
         # Print as ASCII
-        f.write("<hr>\n")
+        f.write("<hr>")
         self.output_ascii(f, address, address + 256, address)
 
         # Print as code
-        f.write("<hr>\n")
+        f.write("<hr>")
         self.output_disasm_range(f, address - 16, address + 16, address, True)
 
       aligned_res, unaligned_res = self.reader.FindWordList(address)
 
       if len(aligned_res) > 0:
-        f.write("<h3>Occurrences of 0x%x at aligned addresses</h3>\n" %
+        f.write("<h3>Occurrences of 0x%x at aligned addresses</h3>" %
                 address)
         self.output_find_results(f, aligned_res)
 
       if len(unaligned_res) > 0:
-        f.write("<h3>Occurrences of 0x%x at unaligned addresses</h3>\n" % \
+        f.write("<h3>Occurrences of 0x%x at unaligned addresses</h3>" % \
                 address)
         self.output_find_results(f, unaligned_res)
 
       if len(aligned_res) + len(unaligned_res) == 0:
-        f.write("<h3>No occurences of 0x%x found in the dump</h3>\n" % address)
+        f.write("<h3>No occurences of 0x%x found in the dump</h3>" % address)
 
       self.output_footer(f)
 
diff --git a/tools/gyp/AUTHORS b/tools/gyp/AUTHORS
deleted file mode 100644
index 727df6d..0000000
--- a/tools/gyp/AUTHORS
+++ /dev/null
@@ -1,13 +0,0 @@
-# Names should be added to this file like so:
-# Name or Organization <email address>
-
-Google Inc.
-Bloomberg Finance L.P.
-Yandex LLC
-
-Steven Knight <knight@baldmt.com>
-Ryan Norton <rnorton10@gmail.com>
-David J. Sankel <david@sankelsoftware.com>
-Eric N. Vander Weele <ericvw@gmail.com>
-Tom Freudenberg <th.freudenberg@gmail.com>
-Julien Brianceau <jbriance@cisco.com>
diff --git a/tools/gyp/DEPS b/tools/gyp/DEPS
deleted file mode 100644
index 167fb77..0000000
--- a/tools/gyp/DEPS
+++ /dev/null
@@ -1,23 +0,0 @@
-# DEPS file for gclient use in buildbot execution of gyp tests.
-#
-# (You don't need to use gclient for normal GYP development work.)
-
-vars = {
-  "chromium_git": "https://chromium.googlesource.com/",
-}
-
-deps = {
-}
-
-deps_os = {
-  "win": {
-    "third_party/cygwin":
-      Var("chromium_git") + "chromium/deps/cygwin@4fbd5b9",
-
-    "third_party/python_26":
-      Var("chromium_git") + "chromium/deps/python_26@5bb4080",
-
-    "src/third_party/pefile":
-       Var("chromium_git") + "external/pefile@72c6ae4",
-  },
-}
diff --git a/tools/gyp/LICENSE b/tools/gyp/LICENSE
deleted file mode 100644
index ab6b011..0000000
--- a/tools/gyp/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2009 Google Inc. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-   * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-   * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-   * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/tools/gyp/OWNERS b/tools/gyp/OWNERS
deleted file mode 100644
index 72e8ffc..0000000
--- a/tools/gyp/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-*
diff --git a/tools/gyp/PRESUBMIT.py b/tools/gyp/PRESUBMIT.py
deleted file mode 100644
index dde0253..0000000
--- a/tools/gyp/PRESUBMIT.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""Top-level presubmit script for GYP.
-
-See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
-for more details about the presubmit API built into gcl.
-"""
-
-
-PYLINT_BLACKLIST = [
-    # TODO: fix me.
-    # From SCons, not done in google style.
-    'test/lib/TestCmd.py',
-    'test/lib/TestCommon.py',
-    'test/lib/TestGyp.py',
-]
-
-
-PYLINT_DISABLED_WARNINGS = [
-    # TODO: fix me.
-    # Many tests include modules they don't use.
-    'W0611',
-    # Possible unbalanced tuple unpacking with sequence.
-    'W0632',
-    # Attempting to unpack a non-sequence.
-    'W0633',
-    # Include order doesn't properly include local files?
-    'F0401',
-    # Some use of built-in names.
-    'W0622',
-    # Some unused variables.
-    'W0612',
-    # Operator not preceded/followed by space.
-    'C0323',
-    'C0322',
-    # Unnecessary semicolon.
-    'W0301',
-    # Unused argument.
-    'W0613',
-    # String has no effect (docstring in wrong place).
-    'W0105',
-    # map/filter on lambda could be replaced by comprehension.
-    'W0110',
-    # Use of eval.
-    'W0123',
-    # Comma not followed by space.
-    'C0324',
-    # Access to a protected member.
-    'W0212',
-    # Bad indent.
-    'W0311',
-    # Line too long.
-    'C0301',
-    # Undefined variable.
-    'E0602',
-    # Not exception type specified.
-    'W0702',
-    # No member of that name.
-    'E1101',
-    # Dangerous default {}.
-    'W0102',
-    # Cyclic import.
-    'R0401',
-    # Others, too many to sort.
-    'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
-    'R0201', 'E0101', 'C0321',
-    # ************* Module copy
-    # W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
-    'W0104',
-]
-
-
-def CheckChangeOnUpload(input_api, output_api):
-  report = []
-  report.extend(input_api.canned_checks.PanProjectChecks(
-      input_api, output_api))
-  return report
-
-
-def CheckChangeOnCommit(input_api, output_api):
-  report = []
-
-  # Accept any year number from 2009 to the current year.
-  current_year = int(input_api.time.strftime('%Y'))
-  allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
-  years_re = '(' + '|'.join(allowed_years) + ')'
-
-  # The (c) is deprecated, but tolerate it until it's removed from all files.
-  license = (
-      r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
-      r'.*? Use of this source code is governed by a BSD-style license that '
-        r'can be\n'
-      r'.*? found in the LICENSE file\.\n'
-  ) % {
-      'year': years_re,
-  }
-
-  report.extend(input_api.canned_checks.PanProjectChecks(
-      input_api, output_api, license_header=license))
-  report.extend(input_api.canned_checks.CheckTreeIsOpen(
-      input_api, output_api,
-      'http://gyp-status.appspot.com/status',
-      'http://gyp-status.appspot.com/current'))
-
-  import os
-  import sys
-  old_sys_path = sys.path
-  try:
-    sys.path = ['pylib', 'test/lib'] + sys.path
-    blacklist = PYLINT_BLACKLIST
-    if sys.platform == 'win32':
-      blacklist = [os.path.normpath(x).replace('\\', '\\\\')
-                   for x in PYLINT_BLACKLIST]
-    report.extend(input_api.canned_checks.RunPylint(
-        input_api,
-        output_api,
-        black_list=blacklist,
-        disabled_warnings=PYLINT_DISABLED_WARNINGS))
-  finally:
-    sys.path = old_sys_path
-  return report
-
-
-TRYBOTS = [
-    'linux_try',
-    'mac_try',
-    'win_try',
-]
-
-
-def GetPreferredTryMasters(_, change):
-  return {
-      'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
-  }
diff --git a/tools/gyp/README.md b/tools/gyp/README.md
deleted file mode 100644
index c0d73ac..0000000
--- a/tools/gyp/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-GYP can Generate Your Projects.
-===================================
-
-Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
diff --git a/tools/gyp/buildbot/buildbot_run.py b/tools/gyp/buildbot/buildbot_run.py
deleted file mode 100755
index 9a2b71f..0000000
--- a/tools/gyp/buildbot/buildbot_run.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Argument-less script to select what to run on the buildbots."""
-
-import os
-import shutil
-import subprocess
-import sys
-
-
-BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
-TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
-ROOT_DIR = os.path.dirname(TRUNK_DIR)
-CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
-CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
-OUT_DIR = os.path.join(TRUNK_DIR, 'out')
-
-
-def CallSubProcess(*args, **kwargs):
-  """Wrapper around subprocess.call which treats errors as build exceptions."""
-  with open(os.devnull) as devnull_fd:
-    retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs)
-  if retcode != 0:
-    print '@@@STEP_EXCEPTION@@@'
-    sys.exit(1)
-
-
-def PrepareCmake():
-  """Build CMake 2.8.8 since the version in Precise is 2.8.7."""
-  if os.environ['BUILDBOT_CLOBBER'] == '1':
-    print '@@@BUILD_STEP Clobber CMake checkout@@@'
-    shutil.rmtree(CMAKE_DIR)
-
-  # We always build CMake 2.8.8, so no need to do anything
-  # if the directory already exists.
-  if os.path.isdir(CMAKE_DIR):
-    return
-
-  print '@@@BUILD_STEP Initialize CMake checkout@@@'
-  os.mkdir(CMAKE_DIR)
-
-  print '@@@BUILD_STEP Sync CMake@@@'
-  CallSubProcess(
-      ['git', 'clone',
-       '--depth', '1',
-       '--single-branch',
-       '--branch', 'v2.8.8',
-       '--',
-       'git://cmake.org/cmake.git',
-       CMAKE_DIR],
-      cwd=CMAKE_DIR)
-
-  print '@@@BUILD_STEP Build CMake@@@'
-  CallSubProcess(
-      ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
-      cwd=CMAKE_DIR)
-
-  CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
-
-
-def GypTestFormat(title, format=None, msvs_version=None, tests=[]):
-  """Run the gyp tests for a given format, emitting annotator tags.
-
-  See annotator docs at:
-    https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
-  Args:
-    format: gyp format to test.
-  Returns:
-    0 for sucesss, 1 for failure.
-  """
-  if not format:
-    format = title
-
-  print '@@@BUILD_STEP ' + title + '@@@'
-  sys.stdout.flush()
-  env = os.environ.copy()
-  if msvs_version:
-    env['GYP_MSVS_VERSION'] = msvs_version
-  command = ' '.join(
-      [sys.executable, 'gyp/gyptest.py',
-       '--all',
-       '--passed',
-       '--format', format,
-       '--path', CMAKE_BIN_DIR,
-       '--chdir', 'gyp'] + tests)
-  retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
-  if retcode:
-    # Emit failure tag, and keep going.
-    print '@@@STEP_FAILURE@@@'
-    return 1
-  return 0
-
-
-def GypBuild():
-  # Dump out/ directory.
-  print '@@@BUILD_STEP cleanup@@@'
-  print 'Removing %s...' % OUT_DIR
-  shutil.rmtree(OUT_DIR, ignore_errors=True)
-  print 'Done.'
-
-  retcode = 0
-  if sys.platform.startswith('linux'):
-    retcode += GypTestFormat('ninja')
-    retcode += GypTestFormat('make')
-    PrepareCmake()
-    retcode += GypTestFormat('cmake')
-  elif sys.platform == 'darwin':
-    retcode += GypTestFormat('ninja')
-    retcode += GypTestFormat('xcode')
-    retcode += GypTestFormat('make')
-  elif sys.platform == 'win32':
-    retcode += GypTestFormat('ninja')
-    if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
-      retcode += GypTestFormat('msvs-ninja-2013', format='msvs-ninja',
-                               msvs_version='2013',
-                               tests=[
-                                  r'test\generator-output\gyptest-actions.py',
-                                  r'test\generator-output\gyptest-relocate.py',
-                                  r'test\generator-output\gyptest-rules.py'])
-      retcode += GypTestFormat('msvs-2013', format='msvs', msvs_version='2013')
-  else:
-    raise Exception('Unknown platform')
-  if retcode:
-    # TODO(bradnelson): once the annotator supports a postscript (section for
-    #     after the build proper that could be used for cumulative failures),
-    #     use that instead of this. This isolates the final return value so
-    #     that it isn't misattributed to the last stage.
-    print '@@@BUILD_STEP failures@@@'
-    sys.exit(retcode)
-
-
-if __name__ == '__main__':
-  GypBuild()
diff --git a/tools/gyp/buildbot/commit_queue/OWNERS b/tools/gyp/buildbot/commit_queue/OWNERS
deleted file mode 100644
index b269c19..0000000
--- a/tools/gyp/buildbot/commit_queue/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-set noparent
-bradnelson@chromium.org
-bradnelson@google.com
-iannucci@chromium.org
-scottmg@chromium.org
-thakis@chromium.org
diff --git a/tools/gyp/buildbot/commit_queue/README b/tools/gyp/buildbot/commit_queue/README
deleted file mode 100644
index 9428497..0000000
--- a/tools/gyp/buildbot/commit_queue/README
+++ /dev/null
@@ -1,3 +0,0 @@
-cq_config.json describes the trybots that must pass in order
-to land a change through the commit queue.
-Comments are here as the file is strictly JSON.
diff --git a/tools/gyp/buildbot/commit_queue/cq_config.json b/tools/gyp/buildbot/commit_queue/cq_config.json
deleted file mode 100644
index 656c21e..0000000
--- a/tools/gyp/buildbot/commit_queue/cq_config.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-    "trybots": {
-        "launched": {
-            "tryserver.nacl": {
-                "gyp-presubmit": ["defaulttests"],
-                "gyp-linux": ["defaulttests"],
-                "gyp-mac": ["defaulttests"],
-                "gyp-win32": ["defaulttests"],
-                "gyp-win64": ["defaulttests"]
-            }
-        },
-        "triggered": {
-        }
-    }
-}
diff --git a/tools/gyp/codereview.settings b/tools/gyp/codereview.settings
deleted file mode 100644
index faf37f1..0000000
--- a/tools/gyp/codereview.settings
+++ /dev/null
@@ -1,10 +0,0 @@
-# This file is used by gcl to get repository specific information.
-CODE_REVIEW_SERVER: codereview.chromium.org
-CC_LIST: gyp-developer@googlegroups.com
-VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
-TRY_ON_UPLOAD: False
-TRYSERVER_PROJECT: gyp
-TRYSERVER_PATCHLEVEL: 1
-TRYSERVER_ROOT: gyp
-TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
-PROJECT: gyp
diff --git a/tools/gyp/data/win/large-pdb-shim.cc b/tools/gyp/data/win/large-pdb-shim.cc
deleted file mode 100644
index 8bca510..0000000
--- a/tools/gyp/data/win/large-pdb-shim.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
-// then used during the final link for modules that have large PDBs. Otherwise,
-// the linker will generate a pdb with a page size of 1KB, which imposes a limit
-// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
-// (rather than the linker), this limit is avoided. With this in place PDBs may
-// grow to 2GB.
-//
-// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
diff --git a/tools/gyp/gyp b/tools/gyp/gyp
deleted file mode 100755
index 1b8b9bd..0000000
--- a/tools/gyp/gyp
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-base=$(dirname "$0")
-exec python "${base}/gyp_main.py" "$@"
diff --git a/tools/gyp/gyp.bat b/tools/gyp/gyp.bat
deleted file mode 100755
index c0b4ca2..0000000
--- a/tools/gyp/gyp.bat
+++ /dev/null
@@ -1,5 +0,0 @@
-@rem Copyright (c) 2009 Google Inc. All rights reserved.

-@rem Use of this source code is governed by a BSD-style license that can be

-@rem found in the LICENSE file.

-

-@python "%~dp0gyp_main.py" %*

diff --git a/tools/gyp/gyp_main.py b/tools/gyp/gyp_main.py
deleted file mode 100755
index 25a6eba..0000000
--- a/tools/gyp/gyp_main.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-# Make sure we're using the version of pylib in this repo, not one installed
-# elsewhere on the system.
-sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
-import gyp
-
-if __name__ == '__main__':
-  sys.exit(gyp.script_main())
diff --git a/tools/gyp/gyptest.py b/tools/gyp/gyptest.py
deleted file mode 100755
index 8e4fc47..0000000
--- a/tools/gyp/gyptest.py
+++ /dev/null
@@ -1,274 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-__doc__ = """
-gyptest.py -- test runner for GYP tests.
-"""
-
-import os
-import optparse
-import subprocess
-import sys
-
-class CommandRunner(object):
-  """
-  Executor class for commands, including "commands" implemented by
-  Python functions.
-  """
-  verbose = True
-  active = True
-
-  def __init__(self, dictionary={}):
-    self.subst_dictionary(dictionary)
-
-  def subst_dictionary(self, dictionary):
-    self._subst_dictionary = dictionary
-
-  def subst(self, string, dictionary=None):
-    """
-    Substitutes (via the format operator) the values in the specified
-    dictionary into the specified command.
-
-    The command can be an (action, string) tuple.  In all cases, we
-    perform substitution on strings and don't worry if something isn't
-    a string.  (It's probably a Python function to be executed.)
-    """
-    if dictionary is None:
-      dictionary = self._subst_dictionary
-    if dictionary:
-      try:
-        string = string % dictionary
-      except TypeError:
-        pass
-    return string
-
-  def display(self, command, stdout=None, stderr=None):
-    if not self.verbose:
-      return
-    if type(command) == type(()):
-      func = command[0]
-      args = command[1:]
-      s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
-    if type(command) == type([]):
-      # TODO:  quote arguments containing spaces
-      # TODO:  handle meta characters?
-      s = ' '.join(command)
-    else:
-      s = self.subst(command)
-    if not s.endswith('\n'):
-      s += '\n'
-    sys.stdout.write(s)
-    sys.stdout.flush()
-
-  def execute(self, command, stdout=None, stderr=None):
-    """
-    Executes a single command.
-    """
-    if not self.active:
-      return 0
-    if type(command) == type(''):
-      command = self.subst(command)
-      cmdargs = shlex.split(command)
-      if cmdargs[0] == 'cd':
-         command = (os.chdir,) + tuple(cmdargs[1:])
-    if type(command) == type(()):
-      func = command[0]
-      args = command[1:]
-      return func(*args)
-    else:
-      if stdout is sys.stdout:
-        # Same as passing sys.stdout, except python2.4 doesn't fail on it.
-        subout = None
-      else:
-        # Open pipe for anything else so Popen works on python2.4.
-        subout = subprocess.PIPE
-      if stderr is sys.stderr:
-        # Same as passing sys.stderr, except python2.4 doesn't fail on it.
-        suberr = None
-      elif stderr is None:
-        # Merge with stdout if stderr isn't specified.
-        suberr = subprocess.STDOUT
-      else:
-        # Open pipe for anything else so Popen works on python2.4.
-        suberr = subprocess.PIPE
-      p = subprocess.Popen(command,
-                           shell=(sys.platform == 'win32'),
-                           stdout=subout,
-                           stderr=suberr)
-      p.wait()
-      if stdout is None:
-        self.stdout = p.stdout.read()
-      elif stdout is not sys.stdout:
-        stdout.write(p.stdout.read())
-      if stderr not in (None, sys.stderr):
-        stderr.write(p.stderr.read())
-      return p.returncode
-
-  def run(self, command, display=None, stdout=None, stderr=None):
-    """
-    Runs a single command, displaying it first.
-    """
-    if display is None:
-      display = command
-    self.display(display)
-    return self.execute(command, stdout, stderr)
-
-
-class Unbuffered(object):
-  def __init__(self, fp):
-    self.fp = fp
-  def write(self, arg):
-    self.fp.write(arg)
-    self.fp.flush()
-  def __getattr__(self, attr):
-    return getattr(self.fp, attr)
-
-sys.stdout = Unbuffered(sys.stdout)
-sys.stderr = Unbuffered(sys.stderr)
-
-
-def is_test_name(f):
-  return f.startswith('gyptest') and f.endswith('.py')
-
-
-def find_all_gyptest_files(directory):
-  result = []
-  for root, dirs, files in os.walk(directory):
-    if '.svn' in dirs:
-      dirs.remove('.svn')
-    result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
-  result.sort()
-  return result
-
-
-def main(argv=None):
-  if argv is None:
-    argv = sys.argv
-
-  usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
-  parser = optparse.OptionParser(usage=usage)
-  parser.add_option("-a", "--all", action="store_true",
-            help="run all tests")
-  parser.add_option("-C", "--chdir", action="store", default=None,
-            help="chdir to the specified directory")
-  parser.add_option("-f", "--format", action="store", default='',
-            help="run tests with the specified formats")
-  parser.add_option("-G", '--gyp_option', action="append", default=[],
-            help="Add -G options to the gyp command line")
-  parser.add_option("-l", "--list", action="store_true",
-            help="list available tests and exit")
-  parser.add_option("-n", "--no-exec", action="store_true",
-            help="no execute, just print the command line")
-  parser.add_option("--passed", action="store_true",
-            help="report passed tests")
-  parser.add_option("--path", action="append", default=[],
-            help="additional $PATH directory")
-  parser.add_option("-q", "--quiet", action="store_true",
-            help="quiet, don't print test command lines")
-  opts, args = parser.parse_args(argv[1:])
-
-  if opts.chdir:
-    os.chdir(opts.chdir)
-
-  if opts.path:
-    extra_path = [os.path.abspath(p) for p in opts.path]
-    extra_path = os.pathsep.join(extra_path)
-    os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
-
-  if not args:
-    if not opts.all:
-      sys.stderr.write('Specify -a to get all tests.\n')
-      return 1
-    args = ['test']
-
-  tests = []
-  for arg in args:
-    if os.path.isdir(arg):
-      tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
-    else:
-      if not is_test_name(os.path.basename(arg)):
-        print >>sys.stderr, arg, 'is not a valid gyp test name.'
-        sys.exit(1)
-      tests.append(arg)
-
-  if opts.list:
-    for test in tests:
-      print test
-    sys.exit(0)
-
-  CommandRunner.verbose = not opts.quiet
-  CommandRunner.active = not opts.no_exec
-  cr = CommandRunner()
-
-  os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
-  if not opts.quiet:
-    sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
-
-  passed = []
-  failed = []
-  no_result = []
-
-  if opts.format:
-    format_list = opts.format.split(',')
-  else:
-    # TODO:  not duplicate this mapping from pylib/gyp/__init__.py
-    format_list = {
-      'aix5':     ['make'],
-      'freebsd7': ['make'],
-      'freebsd8': ['make'],
-      'openbsd5': ['make'],
-      'cygwin':   ['msvs'],
-      'win32':    ['msvs', 'ninja'],
-      'linux2':   ['make', 'ninja'],
-      'linux3':   ['make', 'ninja'],
-      'darwin':   ['make', 'ninja', 'xcode', 'xcode-ninja'],
-    }[sys.platform]
-
-  for format in format_list:
-    os.environ['TESTGYP_FORMAT'] = format
-    if not opts.quiet:
-      sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
-
-    gyp_options = []
-    for option in opts.gyp_option:
-      gyp_options += ['-G', option]
-    if gyp_options and not opts.quiet:
-      sys.stdout.write('Extra Gyp options: %s\n' % gyp_options)
-
-    for test in tests:
-      status = cr.run([sys.executable, test] + gyp_options,
-                      stdout=sys.stdout,
-                      stderr=sys.stderr)
-      if status == 2:
-        no_result.append(test)
-      elif status:
-        failed.append(test)
-      else:
-        passed.append(test)
-
-  if not opts.quiet:
-    def report(description, tests):
-      if tests:
-        if len(tests) == 1:
-          sys.stdout.write("\n%s the following test:\n" % description)
-        else:
-          fmt = "\n%s the following %d tests:\n"
-          sys.stdout.write(fmt % (description, len(tests)))
-        sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
-
-    if opts.passed:
-      report("Passed", passed)
-    report("Failed", failed)
-    report("No result from", no_result)
-
-  if failed:
-    return 1
-  else:
-    return 0
-
-
-if __name__ == "__main__":
-  sys.exit(main())
diff --git a/tools/gyp/pylib/gyp/MSVSNew.py b/tools/gyp/pylib/gyp/MSVSNew.py
deleted file mode 100644
index 593f0e5..0000000
--- a/tools/gyp/pylib/gyp/MSVSNew.py
+++ /dev/null
@@ -1,340 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""New implementation of Visual Studio project generation."""
-
-import os
-import random
-
-import gyp.common
-
-# hashlib is supplied as of Python 2.5 as the replacement interface for md5
-# and other secure hashes.  In 2.6, md5 is deprecated.  Import hashlib if
-# available, avoiding a deprecation warning under 2.6.  Import md5 otherwise,
-# preserving 2.4 compatibility.
-try:
-  import hashlib
-  _new_md5 = hashlib.md5
-except ImportError:
-  import md5
-  _new_md5 = md5.new
-
-
-# Initialize random number generator
-random.seed()
-
-# GUIDs for project types
-ENTRY_TYPE_GUIDS = {
-    'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
-    'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
-}
-
-#------------------------------------------------------------------------------
-# Helper functions
-
-
-def MakeGuid(name, seed='msvs_new'):
-  """Returns a GUID for the specified target name.
-
-  Args:
-    name: Target name.
-    seed: Seed for MD5 hash.
-  Returns:
-    A GUID-line string calculated from the name and seed.
-
-  This generates something which looks like a GUID, but depends only on the
-  name and seed.  This means the same name/seed will always generate the same
-  GUID, so that projects and solutions which refer to each other can explicitly
-  determine the GUID to refer to explicitly.  It also means that the GUID will
-  not change when the project for a target is rebuilt.
-  """
-  # Calculate a MD5 signature for the seed and name.
-  d = _new_md5(str(seed) + str(name)).hexdigest().upper()
-  # Convert most of the signature to GUID form (discard the rest)
-  guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
-          + '-' + d[20:32] + '}')
-  return guid
-
-#------------------------------------------------------------------------------
-
-
-class MSVSSolutionEntry(object):
-  def __cmp__(self, other):
-    # Sort by name then guid (so things are in order on vs2008).
-    return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
-
-
-class MSVSFolder(MSVSSolutionEntry):
-  """Folder in a Visual Studio project or solution."""
-
-  def __init__(self, path, name = None, entries = None,
-               guid = None, items = None):
-    """Initializes the folder.
-
-    Args:
-      path: Full path to the folder.
-      name: Name of the folder.
-      entries: List of folder entries to nest inside this folder.  May contain
-          Folder or Project objects.  May be None, if the folder is empty.
-      guid: GUID to use for folder, if not None.
-      items: List of solution items to include in the folder project.  May be
-          None, if the folder does not directly contain items.
-    """
-    if name:
-      self.name = name
-    else:
-      # Use last layer.
-      self.name = os.path.basename(path)
-
-    self.path = path
-    self.guid = guid
-
-    # Copy passed lists (or set to empty lists)
-    self.entries = sorted(list(entries or []))
-    self.items = list(items or [])
-
-    self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
-
-  def get_guid(self):
-    if self.guid is None:
-      # Use consistent guids for folders (so things don't regenerate).
-      self.guid = MakeGuid(self.path, seed='msvs_folder')
-    return self.guid
-
-
-#------------------------------------------------------------------------------
-
-
-class MSVSProject(MSVSSolutionEntry):
-  """Visual Studio project."""
-
-  def __init__(self, path, name = None, dependencies = None, guid = None,
-               spec = None, build_file = None, config_platform_overrides = None,
-               fixpath_prefix = None):
-    """Initializes the project.
-
-    Args:
-      path: Absolute path to the project file.
-      name: Name of project.  If None, the name will be the same as the base
-          name of the project file.
-      dependencies: List of other Project objects this project is dependent
-          upon, if not None.
-      guid: GUID to use for project, if not None.
-      spec: Dictionary specifying how to build this project.
-      build_file: Filename of the .gyp file that the vcproj file comes from.
-      config_platform_overrides: optional dict of configuration platforms to
-          used in place of the default for this target.
-      fixpath_prefix: the path used to adjust the behavior of _fixpath
-    """
-    self.path = path
-    self.guid = guid
-    self.spec = spec
-    self.build_file = build_file
-    # Use project filename if name not specified
-    self.name = name or os.path.splitext(os.path.basename(path))[0]
-
-    # Copy passed lists (or set to empty lists)
-    self.dependencies = list(dependencies or [])
-
-    self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
-
-    if config_platform_overrides:
-      self.config_platform_overrides = config_platform_overrides
-    else:
-      self.config_platform_overrides = {}
-    self.fixpath_prefix = fixpath_prefix
-    self.msbuild_toolset = None
-
-  def set_dependencies(self, dependencies):
-    self.dependencies = list(dependencies or [])
-
-  def get_guid(self):
-    if self.guid is None:
-      # Set GUID from path
-      # TODO(rspangler): This is fragile.
-      # 1. We can't just use the project filename sans path, since there could
-      #    be multiple projects with the same base name (for example,
-      #    foo/unittest.vcproj and bar/unittest.vcproj).
-      # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
-      #    GUID is the same whether it's included from base/base.sln or
-      #    foo/bar/baz/baz.sln.
-      # 3. The GUID needs to be the same each time this builder is invoked, so
-      #    that we don't need to rebuild the solution when the project changes.
-      # 4. We should be able to handle pre-built project files by reading the
-      #    GUID from the files.
-      self.guid = MakeGuid(self.name)
-    return self.guid
-
-  def set_msbuild_toolset(self, msbuild_toolset):
-    self.msbuild_toolset = msbuild_toolset
-
-#------------------------------------------------------------------------------
-
-
-class MSVSSolution(object):
-  """Visual Studio solution."""
-
-  def __init__(self, path, version, entries=None, variants=None,
-               websiteProperties=True):
-    """Initializes the solution.
-
-    Args:
-      path: Path to solution file.
-      version: Format version to emit.
-      entries: List of entries in solution.  May contain Folder or Project
-          objects.  May be None, if the folder is empty.
-      variants: List of build variant strings.  If none, a default list will
-          be used.
-      websiteProperties: Flag to decide if the website properties section
-          is generated.
-    """
-    self.path = path
-    self.websiteProperties = websiteProperties
-    self.version = version
-
-    # Copy passed lists (or set to empty lists)
-    self.entries = list(entries or [])
-
-    if variants:
-      # Copy passed list
-      self.variants = variants[:]
-    else:
-      # Use default
-      self.variants = ['Debug|Win32', 'Release|Win32']
-    # TODO(rspangler): Need to be able to handle a mapping of solution config
-    # to project config.  Should we be able to handle variants being a dict,
-    # or add a separate variant_map variable?  If it's a dict, we can't
-    # guarantee the order of variants since dict keys aren't ordered.
-
-
-    # TODO(rspangler): Automatically write to disk for now; should delay until
-    # node-evaluation time.
-    self.Write()
-
-
-  def Write(self, writer=gyp.common.WriteOnDiff):
-    """Writes the solution file to disk.
-
-    Raises:
-      IndexError: An entry appears multiple times.
-    """
-    # Walk the entry tree and collect all the folders and projects.
-    all_entries = set()
-    entries_to_check = self.entries[:]
-    while entries_to_check:
-      e = entries_to_check.pop(0)
-
-      # If this entry has been visited, nothing to do.
-      if e in all_entries:
-        continue
-
-      all_entries.add(e)
-
-      # If this is a folder, check its entries too.
-      if isinstance(e, MSVSFolder):
-        entries_to_check += e.entries
-
-    all_entries = sorted(all_entries)
-
-    # Open file and print header
-    f = writer(self.path)
-    f.write('Microsoft Visual Studio Solution File, '
-            'Format Version %s\r\n' % self.version.SolutionVersion())
-    f.write('# %s\r\n' % self.version.Description())
-
-    # Project entries
-    sln_root = os.path.split(self.path)[0]
-    for e in all_entries:
-      relative_path = gyp.common.RelativePath(e.path, sln_root)
-      # msbuild does not accept an empty folder_name.
-      # use '.' in case relative_path is empty.
-      folder_name = relative_path.replace('/', '\\') or '.'
-      f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
-          e.entry_type_guid,          # Entry type GUID
-          e.name,                     # Folder name
-          folder_name,                # Folder name (again)
-          e.get_guid(),               # Entry GUID
-      ))
-
-      # TODO(rspangler): Need a way to configure this stuff
-      if self.websiteProperties:
-        f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
-                '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
-                '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
-                '\tEndProjectSection\r\n')
-
-      if isinstance(e, MSVSFolder):
-        if e.items:
-          f.write('\tProjectSection(SolutionItems) = preProject\r\n')
-          for i in e.items:
-            f.write('\t\t%s = %s\r\n' % (i, i))
-          f.write('\tEndProjectSection\r\n')
-
-      if isinstance(e, MSVSProject):
-        if e.dependencies:
-          f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
-          for d in e.dependencies:
-            f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
-          f.write('\tEndProjectSection\r\n')
-
-      f.write('EndProject\r\n')
-
-    # Global section
-    f.write('Global\r\n')
-
-    # Configurations (variants)
-    f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
-    for v in self.variants:
-      f.write('\t\t%s = %s\r\n' % (v, v))
-    f.write('\tEndGlobalSection\r\n')
-
-    # Sort config guids for easier diffing of solution changes.
-    config_guids = []
-    config_guids_overrides = {}
-    for e in all_entries:
-      if isinstance(e, MSVSProject):
-        config_guids.append(e.get_guid())
-        config_guids_overrides[e.get_guid()] = e.config_platform_overrides
-    config_guids.sort()
-
-    f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
-    for g in config_guids:
-      for v in self.variants:
-        nv = config_guids_overrides[g].get(v, v)
-        # Pick which project configuration to build for this solution
-        # configuration.
-        f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
-            g,              # Project GUID
-            v,              # Solution build configuration
-            nv,             # Project build config for that solution config
-        ))
-
-        # Enable project in this solution configuration.
-        f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
-            g,              # Project GUID
-            v,              # Solution build configuration
-            nv,             # Project build config for that solution config
-        ))
-    f.write('\tEndGlobalSection\r\n')
-
-    # TODO(rspangler): Should be able to configure this stuff too (though I've
-    # never seen this be any different)
-    f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
-    f.write('\t\tHideSolutionNode = FALSE\r\n')
-    f.write('\tEndGlobalSection\r\n')
-
-    # Folder mappings
-    # Omit this section if there are no folders
-    if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
-      f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
-      for e in all_entries:
-        if not isinstance(e, MSVSFolder):
-          continue        # Does not apply to projects, only folders
-        for subentry in e.entries:
-          f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
-      f.write('\tEndGlobalSection\r\n')
-
-    f.write('EndGlobal\r\n')
-
-    f.close()
diff --git a/tools/gyp/pylib/gyp/MSVSProject.py b/tools/gyp/pylib/gyp/MSVSProject.py
deleted file mode 100644
index db1ceed..0000000
--- a/tools/gyp/pylib/gyp/MSVSProject.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio project reader/writer."""
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-#------------------------------------------------------------------------------
-
-
-class Tool(object):
-  """Visual Studio tool."""
-
-  def __init__(self, name, attrs=None):
-    """Initializes the tool.
-
-    Args:
-      name: Tool name.
-      attrs: Dict of tool attributes; may be None.
-    """
-    self._attrs = attrs or {}
-    self._attrs['Name'] = name
-
-  def _GetSpecification(self):
-    """Creates an element for the tool.
-
-    Returns:
-      A new xml.dom.Element for the tool.
-    """
-    return ['Tool', self._attrs]
-
-class Filter(object):
-  """Visual Studio filter - that is, a virtual folder."""
-
-  def __init__(self, name, contents=None):
-    """Initializes the folder.
-
-    Args:
-      name: Filter (folder) name.
-      contents: List of filenames and/or Filter objects contained.
-    """
-    self.name = name
-    self.contents = list(contents or [])
-
-
-#------------------------------------------------------------------------------
-
-
-class Writer(object):
-  """Visual Studio XML project writer."""
-
-  def __init__(self, project_path, version, name, guid=None, platforms=None):
-    """Initializes the project.
-
-    Args:
-      project_path: Path to the project file.
-      version: Format version to emit.
-      name: Name of the project.
-      guid: GUID to use for project, if not None.
-      platforms: Array of string, the supported platforms.  If null, ['Win32']
-    """
-    self.project_path = project_path
-    self.version = version
-    self.name = name
-    self.guid = guid
-
-    # Default to Win32 for platforms.
-    if not platforms:
-      platforms = ['Win32']
-
-    # Initialize the specifications of the various sections.
-    self.platform_section = ['Platforms']
-    for platform in platforms:
-      self.platform_section.append(['Platform', {'Name': platform}])
-    self.tool_files_section = ['ToolFiles']
-    self.configurations_section = ['Configurations']
-    self.files_section = ['Files']
-
-    # Keep a dict keyed on filename to speed up access.
-    self.files_dict = dict()
-
-  def AddToolFile(self, path):
-    """Adds a tool file to the project.
-
-    Args:
-      path: Relative path from project to tool file.
-    """
-    self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
-
-  def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
-    """Returns the specification for a configuration.
-
-    Args:
-      config_type: Type of configuration node.
-      config_name: Configuration name.
-      attrs: Dict of configuration attributes; may be None.
-      tools: List of tools (strings or Tool objects); may be None.
-    Returns:
-    """
-    # Handle defaults
-    if not attrs:
-      attrs = {}
-    if not tools:
-      tools = []
-
-    # Add configuration node and its attributes
-    node_attrs = attrs.copy()
-    node_attrs['Name'] = config_name
-    specification = [config_type, node_attrs]
-
-    # Add tool nodes and their attributes
-    if tools:
-      for t in tools:
-        if isinstance(t, Tool):
-          specification.append(t._GetSpecification())
-        else:
-          specification.append(Tool(t)._GetSpecification())
-    return specification
-
-
-  def AddConfig(self, name, attrs=None, tools=None):
-    """Adds a configuration to the project.
-
-    Args:
-      name: Configuration name.
-      attrs: Dict of configuration attributes; may be None.
-      tools: List of tools (strings or Tool objects); may be None.
-    """
-    spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
-    self.configurations_section.append(spec)
-
-  def _AddFilesToNode(self, parent, files):
-    """Adds files and/or filters to the parent node.
-
-    Args:
-      parent: Destination node
-      files: A list of Filter objects and/or relative paths to files.
-
-    Will call itself recursively, if the files list contains Filter objects.
-    """
-    for f in files:
-      if isinstance(f, Filter):
-        node = ['Filter', {'Name': f.name}]
-        self._AddFilesToNode(node, f.contents)
-      else:
-        node = ['File', {'RelativePath': f}]
-        self.files_dict[f] = node
-      parent.append(node)
-
-  def AddFiles(self, files):
-    """Adds files to the project.
-
-    Args:
-      files: A list of Filter objects and/or relative paths to files.
-
-    This makes a copy of the file/filter tree at the time of this call.  If you
-    later add files to a Filter object which was passed into a previous call
-    to AddFiles(), it will not be reflected in this project.
-    """
-    self._AddFilesToNode(self.files_section, files)
-    # TODO(rspangler) This also doesn't handle adding files to an existing
-    # filter.  That is, it doesn't merge the trees.
-
-  def AddFileConfig(self, path, config, attrs=None, tools=None):
-    """Adds a configuration to a file.
-
-    Args:
-      path: Relative path to the file.
-      config: Name of configuration to add.
-      attrs: Dict of configuration attributes; may be None.
-      tools: List of tools (strings or Tool objects); may be None.
-
-    Raises:
-      ValueError: Relative path does not match any file added via AddFiles().
-    """
-    # Find the file node with the right relative path
-    parent = self.files_dict.get(path)
-    if not parent:
-      raise ValueError('AddFileConfig: file "%s" not in project.' % path)
-
-    # Add the config to the file node
-    spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
-                                         tools)
-    parent.append(spec)
-
-  def WriteIfChanged(self):
-    """Writes the project file."""
-    # First create XML content definition
-    content = [
-        'VisualStudioProject',
-        {'ProjectType': 'Visual C++',
-         'Version': self.version.ProjectVersion(),
-         'Name': self.name,
-         'ProjectGUID': self.guid,
-         'RootNamespace': self.name,
-         'Keyword': 'Win32Proj'
-        },
-        self.platform_section,
-        self.tool_files_section,
-        self.configurations_section,
-        ['References'],  # empty section
-        self.files_section,
-        ['Globals']  # empty section
-    ]
-    easy_xml.WriteXmlIfChanged(content, self.project_path,
-                               encoding="Windows-1252")
diff --git a/tools/gyp/pylib/gyp/MSVSSettings.py b/tools/gyp/pylib/gyp/MSVSSettings.py
deleted file mode 100644
index 8ae1918..0000000
--- a/tools/gyp/pylib/gyp/MSVSSettings.py
+++ /dev/null
@@ -1,1097 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-r"""Code to validate and convert settings of the Microsoft build tools.
-
-This file contains code to validate and convert settings of the Microsoft
-build tools.  The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
-and ValidateMSBuildSettings() are the entry points.
-
-This file was created by comparing the projects created by Visual Studio 2008
-and Visual Studio 2010 for all available settings through the user interface.
-The MSBuild schemas were also considered.  They are typically found in the
-MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
-"""
-
-import sys
-import re
-
-# Dictionaries of settings validators. The key is the tool name, the value is
-# a dictionary mapping setting names to validation functions.
-_msvs_validators = {}
-_msbuild_validators = {}
-
-
-# A dictionary of settings converters. The key is the tool name, the value is
-# a dictionary mapping setting names to conversion functions.
-_msvs_to_msbuild_converters = {}
-
-
-# Tool name mapping from MSVS to MSBuild.
-_msbuild_name_of_tool = {}
-
-
-class _Tool(object):
-  """Represents a tool used by MSVS or MSBuild.
-
-  Attributes:
-      msvs_name: The name of the tool in MSVS.
-      msbuild_name: The name of the tool in MSBuild.
-  """
-
-  def __init__(self, msvs_name, msbuild_name):
-    self.msvs_name = msvs_name
-    self.msbuild_name = msbuild_name
-
-
-def _AddTool(tool):
-  """Adds a tool to the four dictionaries used to process settings.
-
-  This only defines the tool.  Each setting also needs to be added.
-
-  Args:
-    tool: The _Tool object to be added.
-  """
-  _msvs_validators[tool.msvs_name] = {}
-  _msbuild_validators[tool.msbuild_name] = {}
-  _msvs_to_msbuild_converters[tool.msvs_name] = {}
-  _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
-
-
-def _GetMSBuildToolSettings(msbuild_settings, tool):
-  """Returns an MSBuild tool dictionary.  Creates it if needed."""
-  return msbuild_settings.setdefault(tool.msbuild_name, {})
-
-
-class _Type(object):
-  """Type of settings (Base class)."""
-
-  def ValidateMSVS(self, value):
-    """Verifies that the value is legal for MSVS.
-
-    Args:
-      value: the value to check for this type.
-
-    Raises:
-      ValueError if value is not valid for MSVS.
-    """
-
-  def ValidateMSBuild(self, value):
-    """Verifies that the value is legal for MSBuild.
-
-    Args:
-      value: the value to check for this type.
-
-    Raises:
-      ValueError if value is not valid for MSBuild.
-    """
-
-  def ConvertToMSBuild(self, value):
-    """Returns the MSBuild equivalent of the MSVS value given.
-
-    Args:
-      value: the MSVS value to convert.
-
-    Returns:
-      the MSBuild equivalent.
-
-    Raises:
-      ValueError if value is not valid.
-    """
-    return value
-
-
-class _String(_Type):
-  """A setting that's just a string."""
-
-  def ValidateMSVS(self, value):
-    if not isinstance(value, basestring):
-      raise ValueError('expected string; got %r' % value)
-
-  def ValidateMSBuild(self, value):
-    if not isinstance(value, basestring):
-      raise ValueError('expected string; got %r' % value)
-
-  def ConvertToMSBuild(self, value):
-    # Convert the macros
-    return ConvertVCMacrosToMSBuild(value)
-
-
-class _StringList(_Type):
-  """A settings that's a list of strings."""
-
-  def ValidateMSVS(self, value):
-    if not isinstance(value, basestring) and not isinstance(value, list):
-      raise ValueError('expected string list; got %r' % value)
-
-  def ValidateMSBuild(self, value):
-    if not isinstance(value, basestring) and not isinstance(value, list):
-      raise ValueError('expected string list; got %r' % value)
-
-  def ConvertToMSBuild(self, value):
-    # Convert the macros
-    if isinstance(value, list):
-      return [ConvertVCMacrosToMSBuild(i) for i in value]
-    else:
-      return ConvertVCMacrosToMSBuild(value)
-
-
-class _Boolean(_Type):
-  """Boolean settings, can have the values 'false' or 'true'."""
-
-  def _Validate(self, value):
-    if value != 'true' and value != 'false':
-      raise ValueError('expected bool; got %r' % value)
-
-  def ValidateMSVS(self, value):
-    self._Validate(value)
-
-  def ValidateMSBuild(self, value):
-    self._Validate(value)
-
-  def ConvertToMSBuild(self, value):
-    self._Validate(value)
-    return value
-
-
-class _Integer(_Type):
-  """Integer settings."""
-
-  def __init__(self, msbuild_base=10):
-    _Type.__init__(self)
-    self._msbuild_base = msbuild_base
-
-  def ValidateMSVS(self, value):
-    # Try to convert, this will raise ValueError if invalid.
-    self.ConvertToMSBuild(value)
-
-  def ValidateMSBuild(self, value):
-    # Try to convert, this will raise ValueError if invalid.
-    int(value, self._msbuild_base)
-
-  def ConvertToMSBuild(self, value):
-    msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x'
-    return msbuild_format % int(value)
-
-
-class _Enumeration(_Type):
-  """Type of settings that is an enumeration.
-
-  In MSVS, the values are indexes like '0', '1', and '2'.
-  MSBuild uses text labels that are more representative, like 'Win32'.
-
-  Constructor args:
-    label_list: an array of MSBuild labels that correspond to the MSVS index.
-        In the rare cases where MSVS has skipped an index value, None is
-        used in the array to indicate the unused spot.
-    new: an array of labels that are new to MSBuild.
-  """
-
-  def __init__(self, label_list, new=None):
-    _Type.__init__(self)
-    self._label_list = label_list
-    self._msbuild_values = set(value for value in label_list
-                               if value is not None)
-    if new is not None:
-      self._msbuild_values.update(new)
-
-  def ValidateMSVS(self, value):
-    # Try to convert.  It will raise an exception if not valid.
-    self.ConvertToMSBuild(value)
-
-  def ValidateMSBuild(self, value):
-    if value not in self._msbuild_values:
-      raise ValueError('unrecognized enumerated value %s' % value)
-
-  def ConvertToMSBuild(self, value):
-    index = int(value)
-    if index < 0 or index >= len(self._label_list):
-      raise ValueError('index value (%d) not in expected range [0, %d)' %
-                       (index, len(self._label_list)))
-    label = self._label_list[index]
-    if label is None:
-      raise ValueError('converted value for %s not specified.' % value)
-    return label
-
-
-# Instantiate the various generic types.
-_boolean = _Boolean()
-_integer = _Integer()
-# For now, we don't do any special validation on these types:
-_string = _String()
-_file_name = _String()
-_folder_name = _String()
-_file_list = _StringList()
-_folder_list = _StringList()
-_string_list = _StringList()
-# Some boolean settings went from numerical values to boolean.  The
-# mapping is 0: default, 1: false, 2: true.
-_newly_boolean = _Enumeration(['', 'false', 'true'])
-
-
-def _Same(tool, name, setting_type):
-  """Defines a setting that has the same name in MSVS and MSBuild.
-
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    name: the name of the setting.
-    setting_type: the type of this setting.
-  """
-  _Renamed(tool, name, name, setting_type)
-
-
-def _Renamed(tool, msvs_name, msbuild_name, setting_type):
-  """Defines a setting for which the name has changed.
-
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    msvs_name: the name of the MSVS setting.
-    msbuild_name: the name of the MSBuild setting.
-    setting_type: the type of this setting.
-  """
-
-  def _Translate(value, msbuild_settings):
-    msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
-    msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
-
-  _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
-  _msbuild_validators[tool.msbuild_name][msbuild_name] = (
-      setting_type.ValidateMSBuild)
-  _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
-  _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
-                   setting_type)
-
-
-def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
-                     msbuild_settings_name, setting_type):
-  """Defines a setting that may have moved to a new section.
-
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    msvs_settings_name: the MSVS name of the setting.
-    msbuild_tool_name: the name of the MSBuild tool to place the setting under.
-    msbuild_settings_name: the MSBuild name of the setting.
-    setting_type: the type of this setting.
-  """
-
-  def _Translate(value, msbuild_settings):
-    tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
-    tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
-
-  _msvs_validators[tool.msvs_name][msvs_settings_name] = (
-      setting_type.ValidateMSVS)
-  validator = setting_type.ValidateMSBuild
-  _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
-  _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
-
-
-def _MSVSOnly(tool, name, setting_type):
-  """Defines a setting that is only found in MSVS.
-
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    name: the name of the setting.
-    setting_type: the type of this setting.
-  """
-
-  def _Translate(unused_value, unused_msbuild_settings):
-    # Since this is for MSVS only settings, no translation will happen.
-    pass
-
-  _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
-  _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
-
-
-def _MSBuildOnly(tool, name, setting_type):
-  """Defines a setting that is only found in MSBuild.
-
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    name: the name of the setting.
-    setting_type: the type of this setting.
-  """
-
-  def _Translate(value, msbuild_settings):
-    # Let msbuild-only properties get translated as-is from msvs_settings.
-    tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
-    tool_settings[name] = value
-
-  _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
-  _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
-
-
-def _ConvertedToAdditionalOption(tool, msvs_name, flag):
-  """Defines a setting that's handled via a command line option in MSBuild.
-
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    msvs_name: the name of the MSVS setting that if 'true' becomes a flag
-    flag: the flag to insert at the end of the AdditionalOptions
-  """
-
-  def _Translate(value, msbuild_settings):
-    if value == 'true':
-      tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
-      if 'AdditionalOptions' in tool_settings:
-        new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag)
-      else:
-        new_flags = flag
-      tool_settings['AdditionalOptions'] = new_flags
-  _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
-  _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-def _CustomGeneratePreprocessedFile(tool, msvs_name):
-  def _Translate(value, msbuild_settings):
-    tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
-    if value == '0':
-      tool_settings['PreprocessToFile'] = 'false'
-      tool_settings['PreprocessSuppressLineNumbers'] = 'false'
-    elif value == '1':  # /P
-      tool_settings['PreprocessToFile'] = 'true'
-      tool_settings['PreprocessSuppressLineNumbers'] = 'false'
-    elif value == '2':  # /EP /P
-      tool_settings['PreprocessToFile'] = 'true'
-      tool_settings['PreprocessSuppressLineNumbers'] = 'true'
-    else:
-      raise ValueError('value must be one of [0, 1, 2]; got %s' % value)
-  # Create a bogus validator that looks for '0', '1', or '2'
-  msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
-  _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
-  msbuild_validator = _boolean.ValidateMSBuild
-  msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
-  msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
-  msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
-  _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir')
-fix_vc_macro_slashes_regex = re.compile(
-  r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
-)
-
-# Regular expression to detect keys that were generated by exclusion lists
-_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
-
-
-def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
-  """Verify that 'setting' is valid if it is generated from an exclusion list.
-
-  If the setting appears to be generated from an exclusion list, the root name
-  is checked.
-
-  Args:
-      setting:   A string that is the setting name to validate
-      settings:  A dictionary where the keys are valid settings
-      error_msg: The message to emit in the event of error
-      stderr:    The stream receiving the error messages.
-  """
-  # This may be unrecognized because it's an exclusion list. If the
-  # setting name has the _excluded suffix, then check the root name.
-  unrecognized = True
-  m = re.match(_EXCLUDED_SUFFIX_RE, setting)
-  if m:
-    root_setting = m.group(1)
-    unrecognized = root_setting not in settings
-
-  if unrecognized:
-    # We don't know this setting. Give a warning.
-    print >> stderr, error_msg
-
-
-def FixVCMacroSlashes(s):
-  """Replace macros which have excessive following slashes.
-
-  These macros are known to have a built-in trailing slash. Furthermore, many
-  scripts hiccup on processing paths with extra slashes in the middle.
-
-  This list is probably not exhaustive.  Add as needed.
-  """
-  if '$' in s:
-    s = fix_vc_macro_slashes_regex.sub(r'\1', s)
-  return s
-
-
-def ConvertVCMacrosToMSBuild(s):
-  """Convert the the MSVS macros found in the string to the MSBuild equivalent.
-
-  This list is probably not exhaustive.  Add as needed.
-  """
-  if '$' in s:
-    replace_map = {
-        '$(ConfigurationName)': '$(Configuration)',
-        '$(InputDir)': '%(RelativeDir)',
-        '$(InputExt)': '%(Extension)',
-        '$(InputFileName)': '%(Filename)%(Extension)',
-        '$(InputName)': '%(Filename)',
-        '$(InputPath)': '%(Identity)',
-        '$(ParentName)': '$(ProjectFileName)',
-        '$(PlatformName)': '$(Platform)',
-        '$(SafeInputName)': '%(Filename)',
-    }
-    for old, new in replace_map.iteritems():
-      s = s.replace(old, new)
-    s = FixVCMacroSlashes(s)
-  return s
-
-
-def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
-  """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
-
-  Args:
-      msvs_settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
-
-  Returns:
-      A dictionary of MSBuild settings.  The key is either the MSBuild tool name
-      or the empty string (for the global settings).  The values are themselves
-      dictionaries of settings and their values.
-  """
-  msbuild_settings = {}
-  for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
-    if msvs_tool_name in _msvs_to_msbuild_converters:
-      msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
-      for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
-        if msvs_setting in msvs_tool:
-          # Invoke the translation function.
-          try:
-            msvs_tool[msvs_setting](msvs_value, msbuild_settings)
-          except ValueError, e:
-            print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
-                              '%s' % (msvs_tool_name, msvs_setting, e))
-        else:
-          _ValidateExclusionSetting(msvs_setting,
-                                    msvs_tool,
-                                    ('Warning: unrecognized setting %s/%s '
-                                     'while converting to MSBuild.' %
-                                     (msvs_tool_name, msvs_setting)),
-                                    stderr)
-    else:
-      print >> stderr, ('Warning: unrecognized tool %s while converting to '
-                        'MSBuild.' % msvs_tool_name)
-  return msbuild_settings
-
-
-def ValidateMSVSSettings(settings, stderr=sys.stderr):
-  """Validates that the names of the settings are valid for MSVS.
-
-  Args:
-      settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
-  """
-  _ValidateSettings(_msvs_validators, settings, stderr)
-
-
-def ValidateMSBuildSettings(settings, stderr=sys.stderr):
-  """Validates that the names of the settings are valid for MSBuild.
-
-  Args:
-      settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
-  """
-  _ValidateSettings(_msbuild_validators, settings, stderr)
-
-
-def _ValidateSettings(validators, settings, stderr):
-  """Validates that the settings are valid for MSBuild or MSVS.
-
-  We currently only validate the names of the settings, not their values.
-
-  Args:
-      validators: A dictionary of tools and their validators.
-      settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
-  """
-  for tool_name in settings:
-    if tool_name in validators:
-      tool_validators = validators[tool_name]
-      for setting, value in settings[tool_name].iteritems():
-        if setting in tool_validators:
-          try:
-            tool_validators[setting](value)
-          except ValueError, e:
-            print >> stderr, ('Warning: for %s/%s, %s' %
-                              (tool_name, setting, e))
-        else:
-          _ValidateExclusionSetting(setting,
-                                    tool_validators,
-                                    ('Warning: unrecognized setting %s/%s' %
-                                     (tool_name, setting)),
-                                    stderr)
-
-    else:
-      print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
-
-
-# MSVS and MBuild names of the tools.
-_compile = _Tool('VCCLCompilerTool', 'ClCompile')
-_link = _Tool('VCLinkerTool', 'Link')
-_midl = _Tool('VCMIDLTool', 'Midl')
-_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
-_lib = _Tool('VCLibrarianTool', 'Lib')
-_manifest = _Tool('VCManifestTool', 'Manifest')
-_masm = _Tool('MASM', 'MASM')
-
-
-_AddTool(_compile)
-_AddTool(_link)
-_AddTool(_midl)
-_AddTool(_rc)
-_AddTool(_lib)
-_AddTool(_manifest)
-_AddTool(_masm)
-# Add sections only found in the MSBuild settings.
-_msbuild_validators[''] = {}
-_msbuild_validators['ProjectReference'] = {}
-_msbuild_validators['ManifestResourceCompile'] = {}
-
-# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
-# ClCompile in MSBuild.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
-# the schema of the MSBuild ClCompile settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_compile, 'AdditionalIncludeDirectories', _folder_list)  # /I
-_Same(_compile, 'AdditionalOptions', _string_list)
-_Same(_compile, 'AdditionalUsingDirectories', _folder_list)  # /AI
-_Same(_compile, 'AssemblerListingLocation', _file_name)  # /Fa
-_Same(_compile, 'BrowseInformationFile', _file_name)
-_Same(_compile, 'BufferSecurityCheck', _boolean)  # /GS
-_Same(_compile, 'DisableLanguageExtensions', _boolean)  # /Za
-_Same(_compile, 'DisableSpecificWarnings', _string_list)  # /wd
-_Same(_compile, 'EnableFiberSafeOptimizations', _boolean)  # /GT
-_Same(_compile, 'EnablePREfast', _boolean)  # /analyze Visible='false'
-_Same(_compile, 'ExpandAttributedSource', _boolean)  # /Fx
-_Same(_compile, 'FloatingPointExceptions', _boolean)  # /fp:except
-_Same(_compile, 'ForceConformanceInForLoopScope', _boolean)  # /Zc:forScope
-_Same(_compile, 'ForcedIncludeFiles', _file_list)  # /FI
-_Same(_compile, 'ForcedUsingFiles', _file_list)  # /FU
-_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean)  # /doc
-_Same(_compile, 'IgnoreStandardIncludePath', _boolean)  # /X
-_Same(_compile, 'MinimalRebuild', _boolean)  # /Gm
-_Same(_compile, 'OmitDefaultLibName', _boolean)  # /Zl
-_Same(_compile, 'OmitFramePointers', _boolean)  # /Oy
-_Same(_compile, 'PreprocessorDefinitions', _string_list)  # /D
-_Same(_compile, 'ProgramDataBaseFileName', _file_name)  # /Fd
-_Same(_compile, 'RuntimeTypeInfo', _boolean)  # /GR
-_Same(_compile, 'ShowIncludes', _boolean)  # /showIncludes
-_Same(_compile, 'SmallerTypeCheck', _boolean)  # /RTCc
-_Same(_compile, 'StringPooling', _boolean)  # /GF
-_Same(_compile, 'SuppressStartupBanner', _boolean)  # /nologo
-_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean)  # /Zc:wchar_t
-_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean)  # /u
-_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list)  # /U
-_Same(_compile, 'UseFullPaths', _boolean)  # /FC
-_Same(_compile, 'WholeProgramOptimization', _boolean)  # /GL
-_Same(_compile, 'XMLDocumentationFileName', _file_name)
-_Same(_compile, 'CompileAsWinRT', _boolean)  # /ZW
-
-_Same(_compile, 'AssemblerOutput',
-      _Enumeration(['NoListing',
-                    'AssemblyCode',  # /FA
-                    'All',  # /FAcs
-                    'AssemblyAndMachineCode',  # /FAc
-                    'AssemblyAndSourceCode']))  # /FAs
-_Same(_compile, 'BasicRuntimeChecks',
-      _Enumeration(['Default',
-                    'StackFrameRuntimeCheck',  # /RTCs
-                    'UninitializedLocalUsageCheck',  # /RTCu
-                    'EnableFastChecks']))  # /RTC1
-_Same(_compile, 'BrowseInformation',
-      _Enumeration(['false',
-                    'true',  # /FR
-                    'true']))  # /Fr
-_Same(_compile, 'CallingConvention',
-      _Enumeration(['Cdecl',  # /Gd
-                    'FastCall',  # /Gr
-                    'StdCall',  # /Gz
-                    'VectorCall']))  # /Gv
-_Same(_compile, 'CompileAs',
-      _Enumeration(['Default',
-                    'CompileAsC',  # /TC
-                    'CompileAsCpp']))  # /TP
-_Same(_compile, 'DebugInformationFormat',
-      _Enumeration(['',  # Disabled
-                    'OldStyle',  # /Z7
-                    None,
-                    'ProgramDatabase',  # /Zi
-                    'EditAndContinue']))  # /ZI
-_Same(_compile, 'EnableEnhancedInstructionSet',
-      _Enumeration(['NotSet',
-                    'StreamingSIMDExtensions',  # /arch:SSE
-                    'StreamingSIMDExtensions2',  # /arch:SSE2
-                    'AdvancedVectorExtensions',  # /arch:AVX (vs2012+)
-                    'NoExtensions',  # /arch:IA32 (vs2012+)
-                    # This one only exists in the new msbuild format.
-                    'AdvancedVectorExtensions2',  # /arch:AVX2 (vs2013r2+)
-                    ]))
-_Same(_compile, 'ErrorReporting',
-      _Enumeration(['None',  # /errorReport:none
-                    'Prompt',  # /errorReport:prompt
-                    'Queue'],  # /errorReport:queue
-                   new=['Send']))  # /errorReport:send"
-_Same(_compile, 'ExceptionHandling',
-      _Enumeration(['false',
-                    'Sync',  # /EHsc
-                    'Async'],  # /EHa
-                   new=['SyncCThrow']))  # /EHs
-_Same(_compile, 'FavorSizeOrSpeed',
-      _Enumeration(['Neither',
-                    'Speed',  # /Ot
-                    'Size']))  # /Os
-_Same(_compile, 'FloatingPointModel',
-      _Enumeration(['Precise',  # /fp:precise
-                    'Strict',  # /fp:strict
-                    'Fast']))  # /fp:fast
-_Same(_compile, 'InlineFunctionExpansion',
-      _Enumeration(['Default',
-                    'OnlyExplicitInline',  # /Ob1
-                    'AnySuitable'],  # /Ob2
-                   new=['Disabled']))  # /Ob0
-_Same(_compile, 'Optimization',
-      _Enumeration(['Disabled',  # /Od
-                    'MinSpace',  # /O1
-                    'MaxSpeed',  # /O2
-                    'Full']))  # /Ox
-_Same(_compile, 'RuntimeLibrary',
-      _Enumeration(['MultiThreaded',  # /MT
-                    'MultiThreadedDebug',  # /MTd
-                    'MultiThreadedDLL',  # /MD
-                    'MultiThreadedDebugDLL']))  # /MDd
-_Same(_compile, 'StructMemberAlignment',
-      _Enumeration(['Default',
-                    '1Byte',  # /Zp1
-                    '2Bytes',  # /Zp2
-                    '4Bytes',  # /Zp4
-                    '8Bytes',  # /Zp8
-                    '16Bytes']))  # /Zp16
-_Same(_compile, 'WarningLevel',
-      _Enumeration(['TurnOffAllWarnings',  # /W0
-                    'Level1',  # /W1
-                    'Level2',  # /W2
-                    'Level3',  # /W3
-                    'Level4'],  # /W4
-                   new=['EnableAllWarnings']))  # /Wall
-
-# Options found in MSVS that have been renamed in MSBuild.
-_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
-         _boolean)  # /Gy
-_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
-         _boolean)  # /Oi
-_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean)  # /C
-_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name)  # /Fo
-_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean)  # /openmp
-_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
-         _file_name)  # Used with /Yc and /Yu
-_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
-         _file_name)  # /Fp
-_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
-         _Enumeration(['NotUsing',  # VS recognized '' for this value too.
-                       'Create',   # /Yc
-                       'Use']))  # /Yu
-_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean)  # /WX
-
-_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
-_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
-_MSBuildOnly(_compile, 'CompileAsManaged',
-             _Enumeration([], new=['false',
-                                   'true']))  # /clr
-_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean)  # /hotpatch
-_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean)  # /MP
-_MSBuildOnly(_compile, 'PreprocessOutputPath', _string)  # /Fi
-_MSBuildOnly(_compile, 'ProcessorNumber', _integer)  # the number of processors
-_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list)  # /we
-_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean)  # /FAu
-
-# Defines a setting that needs very customized processing
-_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
-
-
-# Directives for converting MSVS VCLinkerTool to MSBuild Link.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
-# the schema of the MSBuild Link settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_link, 'AdditionalDependencies', _file_list)
-_Same(_link, 'AdditionalLibraryDirectories', _folder_list)  # /LIBPATH
-#  /MANIFESTDEPENDENCY:
-_Same(_link, 'AdditionalManifestDependencies', _file_list)
-_Same(_link, 'AdditionalOptions', _string_list)
-_Same(_link, 'AddModuleNamesToAssembly', _file_list)  # /ASSEMBLYMODULE
-_Same(_link, 'AllowIsolation', _boolean)  # /ALLOWISOLATION
-_Same(_link, 'AssemblyLinkResource', _file_list)  # /ASSEMBLYLINKRESOURCE
-_Same(_link, 'BaseAddress', _string)  # /BASE
-_Same(_link, 'CLRUnmanagedCodeCheck', _boolean)  # /CLRUNMANAGEDCODECHECK
-_Same(_link, 'DelayLoadDLLs', _file_list)  # /DELAYLOAD
-_Same(_link, 'DelaySign', _boolean)  # /DELAYSIGN
-_Same(_link, 'EmbedManagedResourceFile', _file_list)  # /ASSEMBLYRESOURCE
-_Same(_link, 'EnableUAC', _boolean)  # /MANIFESTUAC
-_Same(_link, 'EntryPointSymbol', _string)  # /ENTRY
-_Same(_link, 'ForceSymbolReferences', _file_list)  # /INCLUDE
-_Same(_link, 'FunctionOrder', _file_name)  # /ORDER
-_Same(_link, 'GenerateDebugInformation', _boolean)  # /DEBUG
-_Same(_link, 'GenerateMapFile', _boolean)  # /MAP
-_Same(_link, 'HeapCommitSize', _string)
-_Same(_link, 'HeapReserveSize', _string)  # /HEAP
-_Same(_link, 'IgnoreAllDefaultLibraries', _boolean)  # /NODEFAULTLIB
-_Same(_link, 'IgnoreEmbeddedIDL', _boolean)  # /IGNOREIDL
-_Same(_link, 'ImportLibrary', _file_name)  # /IMPLIB
-_Same(_link, 'KeyContainer', _file_name)  # /KEYCONTAINER
-_Same(_link, 'KeyFile', _file_name)  # /KEYFILE
-_Same(_link, 'ManifestFile', _file_name)  # /ManifestFile
-_Same(_link, 'MapExports', _boolean)  # /MAPINFO:EXPORTS
-_Same(_link, 'MapFileName', _file_name)
-_Same(_link, 'MergedIDLBaseFileName', _file_name)  # /IDLOUT
-_Same(_link, 'MergeSections', _string)  # /MERGE
-_Same(_link, 'MidlCommandFile', _file_name)  # /MIDL
-_Same(_link, 'ModuleDefinitionFile', _file_name)  # /DEF
-_Same(_link, 'OutputFile', _file_name)  # /OUT
-_Same(_link, 'PerUserRedirection', _boolean)
-_Same(_link, 'Profile', _boolean)  # /PROFILE
-_Same(_link, 'ProfileGuidedDatabase', _file_name)  # /PGD
-_Same(_link, 'ProgramDatabaseFile', _file_name)  # /PDB
-_Same(_link, 'RegisterOutput', _boolean)
-_Same(_link, 'SetChecksum', _boolean)  # /RELEASE
-_Same(_link, 'StackCommitSize', _string)
-_Same(_link, 'StackReserveSize', _string)  # /STACK
-_Same(_link, 'StripPrivateSymbols', _file_name)  # /PDBSTRIPPED
-_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean)  # /DELAY:UNLOAD
-_Same(_link, 'SuppressStartupBanner', _boolean)  # /NOLOGO
-_Same(_link, 'SwapRunFromCD', _boolean)  # /SWAPRUN:CD
-_Same(_link, 'TurnOffAssemblyGeneration', _boolean)  # /NOASSEMBLY
-_Same(_link, 'TypeLibraryFile', _file_name)  # /TLBOUT
-_Same(_link, 'TypeLibraryResourceID', _integer)  # /TLBID
-_Same(_link, 'UACUIAccess', _boolean)  # /uiAccess='true'
-_Same(_link, 'Version', _string)  # /VERSION
-
-_Same(_link, 'EnableCOMDATFolding', _newly_boolean)  # /OPT:ICF
-_Same(_link, 'FixedBaseAddress', _newly_boolean)  # /FIXED
-_Same(_link, 'LargeAddressAware', _newly_boolean)  # /LARGEADDRESSAWARE
-_Same(_link, 'OptimizeReferences', _newly_boolean)  # /OPT:REF
-_Same(_link, 'RandomizedBaseAddress', _newly_boolean)  # /DYNAMICBASE
-_Same(_link, 'TerminalServerAware', _newly_boolean)  # /TSAWARE
-
-_subsystem_enumeration = _Enumeration(
-    ['NotSet',
-     'Console',  # /SUBSYSTEM:CONSOLE
-     'Windows',  # /SUBSYSTEM:WINDOWS
-     'Native',  # /SUBSYSTEM:NATIVE
-     'EFI Application',  # /SUBSYSTEM:EFI_APPLICATION
-     'EFI Boot Service Driver',  # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
-     'EFI ROM',  # /SUBSYSTEM:EFI_ROM
-     'EFI Runtime',  # /SUBSYSTEM:EFI_RUNTIME_DRIVER
-     'WindowsCE'],  # /SUBSYSTEM:WINDOWSCE
-    new=['POSIX'])  # /SUBSYSTEM:POSIX
-
-_target_machine_enumeration = _Enumeration(
-    ['NotSet',
-     'MachineX86',  # /MACHINE:X86
-     None,
-     'MachineARM',  # /MACHINE:ARM
-     'MachineEBC',  # /MACHINE:EBC
-     'MachineIA64',  # /MACHINE:IA64
-     None,
-     'MachineMIPS',  # /MACHINE:MIPS
-     'MachineMIPS16',  # /MACHINE:MIPS16
-     'MachineMIPSFPU',  # /MACHINE:MIPSFPU
-     'MachineMIPSFPU16',  # /MACHINE:MIPSFPU16
-     None,
-     None,
-     None,
-     'MachineSH4',  # /MACHINE:SH4
-     None,
-     'MachineTHUMB',  # /MACHINE:THUMB
-     'MachineX64'])  # /MACHINE:X64
-
-_Same(_link, 'AssemblyDebug',
-      _Enumeration(['',
-                    'true',  # /ASSEMBLYDEBUG
-                    'false']))  # /ASSEMBLYDEBUG:DISABLE
-_Same(_link, 'CLRImageType',
-      _Enumeration(['Default',
-                    'ForceIJWImage',  # /CLRIMAGETYPE:IJW
-                    'ForcePureILImage',  # /Switch="CLRIMAGETYPE:PURE
-                    'ForceSafeILImage']))  # /Switch="CLRIMAGETYPE:SAFE
-_Same(_link, 'CLRThreadAttribute',
-      _Enumeration(['DefaultThreadingAttribute',  # /CLRTHREADATTRIBUTE:NONE
-                    'MTAThreadingAttribute',  # /CLRTHREADATTRIBUTE:MTA
-                    'STAThreadingAttribute']))  # /CLRTHREADATTRIBUTE:STA
-_Same(_link, 'DataExecutionPrevention',
-      _Enumeration(['',
-                    'false',  # /NXCOMPAT:NO
-                    'true']))  # /NXCOMPAT
-_Same(_link, 'Driver',
-      _Enumeration(['NotSet',
-                    'Driver',  # /Driver
-                    'UpOnly',  # /DRIVER:UPONLY
-                    'WDM']))  # /DRIVER:WDM
-_Same(_link, 'LinkTimeCodeGeneration',
-      _Enumeration(['Default',
-                    'UseLinkTimeCodeGeneration',  # /LTCG
-                    'PGInstrument',  # /LTCG:PGInstrument
-                    'PGOptimization',  # /LTCG:PGOptimize
-                    'PGUpdate']))  # /LTCG:PGUpdate
-_Same(_link, 'ShowProgress',
-      _Enumeration(['NotSet',
-                    'LinkVerbose',  # /VERBOSE
-                    'LinkVerboseLib'],  # /VERBOSE:Lib
-                   new=['LinkVerboseICF',  # /VERBOSE:ICF
-                        'LinkVerboseREF',  # /VERBOSE:REF
-                        'LinkVerboseSAFESEH',  # /VERBOSE:SAFESEH
-                        'LinkVerboseCLR']))  # /VERBOSE:CLR
-_Same(_link, 'SubSystem', _subsystem_enumeration)
-_Same(_link, 'TargetMachine', _target_machine_enumeration)
-_Same(_link, 'UACExecutionLevel',
-      _Enumeration(['AsInvoker',  # /level='asInvoker'
-                    'HighestAvailable',  # /level='highestAvailable'
-                    'RequireAdministrator']))  # /level='requireAdministrator'
-_Same(_link, 'MinimumRequiredVersion', _string)
-_Same(_link, 'TreatLinkerWarningAsErrors', _boolean)  # /WX
-
-
-# Options found in MSVS that have been renamed in MSBuild.
-_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
-         _Enumeration(['NoErrorReport',  # /ERRORREPORT:NONE
-                       'PromptImmediately',  # /ERRORREPORT:PROMPT
-                       'QueueForNextLogin'],  # /ERRORREPORT:QUEUE
-                      new=['SendErrorReport']))  # /ERRORREPORT:SEND
-_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
-         _file_list)  # /NODEFAULTLIB
-_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean)  # /NOENTRY
-_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean)  # /SWAPRUN:NET
-
-_Moved(_link, 'GenerateManifest', '', _boolean)
-_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
-_Moved(_link, 'LinkIncremental', '', _newly_boolean)
-_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
-_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
-_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean)  # /SAFESEH
-_MSBuildOnly(_link, 'LinkDLL', _boolean)  # /DLL Visible='false'
-_MSBuildOnly(_link, 'LinkStatus', _boolean)  # /LTCG:STATUS
-_MSBuildOnly(_link, 'PreventDllBinding', _boolean)  # /ALLOWBIND
-_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean)  # /DELAY:NOBIND
-_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name)  # /STUB Visible='false'
-_MSBuildOnly(_link, 'SectionAlignment', _integer)  # /ALIGN
-_MSBuildOnly(_link, 'SpecifySectionAttributes', _string)  # /SECTION
-_MSBuildOnly(_link, 'ForceFileOutput',
-             _Enumeration([], new=['Enabled',  # /FORCE
-                                   # /FORCE:MULTIPLE
-                                   'MultiplyDefinedSymbolOnly',
-                                   'UndefinedSymbolOnly']))  # /FORCE:UNRESOLVED
-_MSBuildOnly(_link, 'CreateHotPatchableImage',
-             _Enumeration([], new=['Enabled',  # /FUNCTIONPADMIN
-                                   'X86Image',  # /FUNCTIONPADMIN:5
-                                   'X64Image',  # /FUNCTIONPADMIN:6
-                                   'ItaniumImage']))  # /FUNCTIONPADMIN:16
-_MSBuildOnly(_link, 'CLRSupportLastError',
-             _Enumeration([], new=['Enabled',  # /CLRSupportLastError
-                                   'Disabled',  # /CLRSupportLastError:NO
-                                   # /CLRSupportLastError:SYSTEMDLL
-                                   'SystemDlls']))
-
-
-# Directives for converting VCResourceCompilerTool to ResourceCompile.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
-# the schema of the MSBuild ResourceCompile settings.
-
-_Same(_rc, 'AdditionalOptions', _string_list)
-_Same(_rc, 'AdditionalIncludeDirectories', _folder_list)  # /I
-_Same(_rc, 'Culture', _Integer(msbuild_base=16))
-_Same(_rc, 'IgnoreStandardIncludePath', _boolean)  # /X
-_Same(_rc, 'PreprocessorDefinitions', _string_list)  # /D
-_Same(_rc, 'ResourceOutputFileName', _string)  # /fo
-_Same(_rc, 'ShowProgress', _boolean)  # /v
-# There is no UI in VisualStudio 2008 to set the following properties.
-# However they are found in CL and other tools.  Include them here for
-# completeness, as they are very likely to have the same usage pattern.
-_Same(_rc, 'SuppressStartupBanner', _boolean)  # /nologo
-_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list)  # /u
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean)  # /n
-_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
-
-
-# Directives for converting VCMIDLTool to Midl.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
-# the schema of the MSBuild Midl settings.
-
-_Same(_midl, 'AdditionalIncludeDirectories', _folder_list)  # /I
-_Same(_midl, 'AdditionalOptions', _string_list)
-_Same(_midl, 'CPreprocessOptions', _string)  # /cpp_opt
-_Same(_midl, 'ErrorCheckAllocations', _boolean)  # /error allocation
-_Same(_midl, 'ErrorCheckBounds', _boolean)  # /error bounds_check
-_Same(_midl, 'ErrorCheckEnumRange', _boolean)  # /error enum
-_Same(_midl, 'ErrorCheckRefPointers', _boolean)  # /error ref
-_Same(_midl, 'ErrorCheckStubData', _boolean)  # /error stub_data
-_Same(_midl, 'GenerateStublessProxies', _boolean)  # /Oicf
-_Same(_midl, 'GenerateTypeLibrary', _boolean)
-_Same(_midl, 'HeaderFileName', _file_name)  # /h
-_Same(_midl, 'IgnoreStandardIncludePath', _boolean)  # /no_def_idir
-_Same(_midl, 'InterfaceIdentifierFileName', _file_name)  # /iid
-_Same(_midl, 'MkTypLibCompatible', _boolean)  # /mktyplib203
-_Same(_midl, 'OutputDirectory', _string)  # /out
-_Same(_midl, 'PreprocessorDefinitions', _string_list)  # /D
-_Same(_midl, 'ProxyFileName', _file_name)  # /proxy
-_Same(_midl, 'RedirectOutputAndErrors', _file_name)  # /o
-_Same(_midl, 'SuppressStartupBanner', _boolean)  # /nologo
-_Same(_midl, 'TypeLibraryName', _file_name)  # /tlb
-_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list)  # /U
-_Same(_midl, 'WarnAsError', _boolean)  # /WX
-
-_Same(_midl, 'DefaultCharType',
-      _Enumeration(['Unsigned',  # /char unsigned
-                    'Signed',  # /char signed
-                    'Ascii']))  # /char ascii7
-_Same(_midl, 'TargetEnvironment',
-      _Enumeration(['NotSet',
-                    'Win32',  # /env win32
-                    'Itanium',  # /env ia64
-                    'X64']))  # /env x64
-_Same(_midl, 'EnableErrorChecks',
-      _Enumeration(['EnableCustom',
-                    'None',  # /error none
-                    'All']))  # /error all
-_Same(_midl, 'StructMemberAlignment',
-      _Enumeration(['NotSet',
-                    '1',  # Zp1
-                    '2',  # Zp2
-                    '4',  # Zp4
-                    '8']))  # Zp8
-_Same(_midl, 'WarningLevel',
-      _Enumeration(['0',  # /W0
-                    '1',  # /W1
-                    '2',  # /W2
-                    '3',  # /W3
-                    '4']))  # /W4
-
-_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name)  # /dlldata
-_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
-         _boolean)  # /robust
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean)  # /app_config
-_MSBuildOnly(_midl, 'ClientStubFile', _file_name)  # /cstub
-_MSBuildOnly(_midl, 'GenerateClientFiles',
-             _Enumeration([], new=['Stub',  # /client stub
-                                   'None']))  # /client none
-_MSBuildOnly(_midl, 'GenerateServerFiles',
-             _Enumeration([], new=['Stub',  # /client stub
-                                   'None']))  # /client none
-_MSBuildOnly(_midl, 'LocaleID', _integer)  # /lcid DECIMAL
-_MSBuildOnly(_midl, 'ServerStubFile', _file_name)  # /sstub
-_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean)  # /no_warn
-_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_midl, 'TypeLibFormat',
-             _Enumeration([], new=['NewFormat',  # /newtlb
-                                   'OldFormat']))  # /oldtlb
-
-
-# Directives for converting VCLibrarianTool to Lib.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
-# the schema of the MSBuild Lib settings.
-
-_Same(_lib, 'AdditionalDependencies', _file_list)
-_Same(_lib, 'AdditionalLibraryDirectories', _folder_list)  # /LIBPATH
-_Same(_lib, 'AdditionalOptions', _string_list)
-_Same(_lib, 'ExportNamedFunctions', _string_list)  # /EXPORT
-_Same(_lib, 'ForceSymbolReferences', _string)  # /INCLUDE
-_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean)  # /NODEFAULTLIB
-_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list)  # /NODEFAULTLIB
-_Same(_lib, 'ModuleDefinitionFile', _file_name)  # /DEF
-_Same(_lib, 'OutputFile', _file_name)  # /OUT
-_Same(_lib, 'SuppressStartupBanner', _boolean)  # /NOLOGO
-_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
-_Same(_lib, 'LinkTimeCodeGeneration', _boolean)  # /LTCG
-_Same(_lib, 'TargetMachine', _target_machine_enumeration)
-
-# TODO(jeanluc) _link defines the same value that gets moved to
-# ProjectReference.  We may want to validate that they are consistent.
-_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-
-_MSBuildOnly(_lib, 'DisplayLibrary', _string)  # /LIST Visible='false'
-_MSBuildOnly(_lib, 'ErrorReporting',
-             _Enumeration([], new=['PromptImmediately',  # /ERRORREPORT:PROMPT
-                                   'QueueForNextLogin',  # /ERRORREPORT:QUEUE
-                                   'SendErrorReport',  # /ERRORREPORT:SEND
-                                   'NoErrorReport']))  # /ERRORREPORT:NONE
-_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
-_MSBuildOnly(_lib, 'Name', _file_name)  # /NAME
-_MSBuildOnly(_lib, 'RemoveObjects', _file_list)  # /REMOVE
-_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
-_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean)  # /WX
-_MSBuildOnly(_lib, 'Verbose', _boolean)
-
-
-# Directives for converting VCManifestTool to Mt.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
-# the schema of the MSBuild Lib settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_manifest, 'AdditionalManifestFiles', _file_list)  # /manifest
-_Same(_manifest, 'AdditionalOptions', _string_list)
-_Same(_manifest, 'AssemblyIdentity', _string)  # /identity:
-_Same(_manifest, 'ComponentFileName', _file_name)  # /dll
-_Same(_manifest, 'GenerateCatalogFiles', _boolean)  # /makecdfs
-_Same(_manifest, 'InputResourceManifests', _string)  # /inputresource
-_Same(_manifest, 'OutputManifestFile', _file_name)  # /out
-_Same(_manifest, 'RegistrarScriptFile', _file_name)  # /rgs
-_Same(_manifest, 'ReplacementsFile', _file_name)  # /replacements
-_Same(_manifest, 'SuppressStartupBanner', _boolean)  # /nologo
-_Same(_manifest, 'TypeLibraryFile', _file_name)  # /tlb:
-_Same(_manifest, 'UpdateFileHashes', _boolean)  # /hashupdate
-_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
-_Same(_manifest, 'VerboseOutput', _boolean)  # /verbose
-
-# Options that have moved location.
-_MovedAndRenamed(_manifest, 'ManifestResourceFile',
-                 'ManifestResourceCompile',
-                 'ResourceOutputFileName',
-                 _file_name)
-_Moved(_manifest, 'EmbedManifest', '', _boolean)
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
-_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
-_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
-_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean)  # /category
-_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
-             _file_name)  # /managedassemblyname
-_MSBuildOnly(_manifest, 'OutputResourceManifests', _string)  # /outputresource
-_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean)  # /nodependency
-_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
-
-
-# Directives for MASM.
-# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
-# MSBuild MASM settings.
-
-# Options that have the same name in MSVS and MSBuild.
-_Same(_masm, 'UseSafeExceptionHandlers', _boolean)  # /safeseh
diff --git a/tools/gyp/pylib/gyp/MSVSSettings_test.py b/tools/gyp/pylib/gyp/MSVSSettings_test.py
deleted file mode 100755
index bf6ea6b..0000000
--- a/tools/gyp/pylib/gyp/MSVSSettings_test.py
+++ /dev/null
@@ -1,1483 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the MSVSSettings.py file."""
-
-import StringIO
-import unittest
-import gyp.MSVSSettings as MSVSSettings
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
-  def setUp(self):
-    self.stderr = StringIO.StringIO()
-
-  def _ExpectedWarnings(self, expected):
-    """Compares recorded lines to expected warnings."""
-    self.stderr.seek(0)
-    actual = self.stderr.read().split('\n')
-    actual = [line for line in actual if line]
-    self.assertEqual(sorted(expected), sorted(actual))
-
-  def testValidateMSVSSettings_tool_names(self):
-    """Tests that only MSVS tool names are allowed."""
-    MSVSSettings.ValidateMSVSSettings(
-        {'VCCLCompilerTool': {},
-         'VCLinkerTool': {},
-         'VCMIDLTool': {},
-         'foo': {},
-         'VCResourceCompilerTool': {},
-         'VCLibrarianTool': {},
-         'VCManifestTool': {},
-         'ClCompile': {}},
-        self.stderr)
-    self._ExpectedWarnings([
-        'Warning: unrecognized tool foo',
-        'Warning: unrecognized tool ClCompile'])
-
-  def testValidateMSVSSettings_settings(self):
-    """Tests that for invalid MSVS settings."""
-    MSVSSettings.ValidateMSVSSettings(
-        {'VCCLCompilerTool': {
-            'AdditionalIncludeDirectories': 'folder1;folder2',
-            'AdditionalOptions': ['string1', 'string2'],
-            'AdditionalUsingDirectories': 'folder1;folder2',
-            'AssemblerListingLocation': 'a_file_name',
-            'AssemblerOutput': '0',
-            'BasicRuntimeChecks': '5',
-            'BrowseInformation': 'fdkslj',
-            'BrowseInformationFile': 'a_file_name',
-            'BufferSecurityCheck': 'true',
-            'CallingConvention': '-1',
-            'CompileAs': '1',
-            'DebugInformationFormat': '2',
-            'DefaultCharIsUnsigned': 'true',
-            'Detect64BitPortabilityProblems': 'true',
-            'DisableLanguageExtensions': 'true',
-            'DisableSpecificWarnings': 'string1;string2',
-            'EnableEnhancedInstructionSet': '1',
-            'EnableFiberSafeOptimizations': 'true',
-            'EnableFunctionLevelLinking': 'true',
-            'EnableIntrinsicFunctions': 'true',
-            'EnablePREfast': 'true',
-            'Enableprefast': 'bogus',
-            'ErrorReporting': '1',
-            'ExceptionHandling': '1',
-            'ExpandAttributedSource': 'true',
-            'FavorSizeOrSpeed': '1',
-            'FloatingPointExceptions': 'true',
-            'FloatingPointModel': '1',
-            'ForceConformanceInForLoopScope': 'true',
-            'ForcedIncludeFiles': 'file1;file2',
-            'ForcedUsingFiles': 'file1;file2',
-            'GeneratePreprocessedFile': '1',
-            'GenerateXMLDocumentationFiles': 'true',
-            'IgnoreStandardIncludePath': 'true',
-            'InlineFunctionExpansion': '1',
-            'KeepComments': 'true',
-            'MinimalRebuild': 'true',
-            'ObjectFile': 'a_file_name',
-            'OmitDefaultLibName': 'true',
-            'OmitFramePointers': 'true',
-            'OpenMP': 'true',
-            'Optimization': '1',
-            'PrecompiledHeaderFile': 'a_file_name',
-            'PrecompiledHeaderThrough': 'a_file_name',
-            'PreprocessorDefinitions': 'string1;string2',
-            'ProgramDataBaseFileName': 'a_file_name',
-            'RuntimeLibrary': '1',
-            'RuntimeTypeInfo': 'true',
-            'ShowIncludes': 'true',
-            'SmallerTypeCheck': 'true',
-            'StringPooling': 'true',
-            'StructMemberAlignment': '1',
-            'SuppressStartupBanner': 'true',
-            'TreatWChar_tAsBuiltInType': 'true',
-            'UndefineAllPreprocessorDefinitions': 'true',
-            'UndefinePreprocessorDefinitions': 'string1;string2',
-            'UseFullPaths': 'true',
-            'UsePrecompiledHeader': '1',
-            'UseUnicodeResponseFiles': 'true',
-            'WarnAsError': 'true',
-            'WarningLevel': '1',
-            'WholeProgramOptimization': 'true',
-            'XMLDocumentationFileName': 'a_file_name',
-            'ZZXYZ': 'bogus'},
-         'VCLinkerTool': {
-             'AdditionalDependencies': 'file1;file2',
-             'AdditionalDependencies_excluded': 'file3',
-             'AdditionalLibraryDirectories': 'folder1;folder2',
-             'AdditionalManifestDependencies': 'file1;file2',
-             'AdditionalOptions': 'a string1',
-             'AddModuleNamesToAssembly': 'file1;file2',
-             'AllowIsolation': 'true',
-             'AssemblyDebug': '2',
-             'AssemblyLinkResource': 'file1;file2',
-             'BaseAddress': 'a string1',
-             'CLRImageType': '2',
-             'CLRThreadAttribute': '2',
-             'CLRUnmanagedCodeCheck': 'true',
-             'DataExecutionPrevention': '2',
-             'DelayLoadDLLs': 'file1;file2',
-             'DelaySign': 'true',
-             'Driver': '2',
-             'EmbedManagedResourceFile': 'file1;file2',
-             'EnableCOMDATFolding': '2',
-             'EnableUAC': 'true',
-             'EntryPointSymbol': 'a string1',
-             'ErrorReporting': '2',
-             'FixedBaseAddress': '2',
-             'ForceSymbolReferences': 'file1;file2',
-             'FunctionOrder': 'a_file_name',
-             'GenerateDebugInformation': 'true',
-             'GenerateManifest': 'true',
-             'GenerateMapFile': 'true',
-             'HeapCommitSize': 'a string1',
-             'HeapReserveSize': 'a string1',
-             'IgnoreAllDefaultLibraries': 'true',
-             'IgnoreDefaultLibraryNames': 'file1;file2',
-             'IgnoreEmbeddedIDL': 'true',
-             'IgnoreImportLibrary': 'true',
-             'ImportLibrary': 'a_file_name',
-             'KeyContainer': 'a_file_name',
-             'KeyFile': 'a_file_name',
-             'LargeAddressAware': '2',
-             'LinkIncremental': '2',
-             'LinkLibraryDependencies': 'true',
-             'LinkTimeCodeGeneration': '2',
-             'ManifestFile': 'a_file_name',
-             'MapExports': 'true',
-             'MapFileName': 'a_file_name',
-             'MergedIDLBaseFileName': 'a_file_name',
-             'MergeSections': 'a string1',
-             'MidlCommandFile': 'a_file_name',
-             'ModuleDefinitionFile': 'a_file_name',
-             'OptimizeForWindows98': '1',
-             'OptimizeReferences': '2',
-             'OutputFile': 'a_file_name',
-             'PerUserRedirection': 'true',
-             'Profile': 'true',
-             'ProfileGuidedDatabase': 'a_file_name',
-             'ProgramDatabaseFile': 'a_file_name',
-             'RandomizedBaseAddress': '2',
-             'RegisterOutput': 'true',
-             'ResourceOnlyDLL': 'true',
-             'SetChecksum': 'true',
-             'ShowProgress': '2',
-             'StackCommitSize': 'a string1',
-             'StackReserveSize': 'a string1',
-             'StripPrivateSymbols': 'a_file_name',
-             'SubSystem': '2',
-             'SupportUnloadOfDelayLoadedDLL': 'true',
-             'SuppressStartupBanner': 'true',
-             'SwapRunFromCD': 'true',
-             'SwapRunFromNet': 'true',
-             'TargetMachine': '2',
-             'TerminalServerAware': '2',
-             'TurnOffAssemblyGeneration': 'true',
-             'TypeLibraryFile': 'a_file_name',
-             'TypeLibraryResourceID': '33',
-             'UACExecutionLevel': '2',
-             'UACUIAccess': 'true',
-             'UseLibraryDependencyInputs': 'true',
-             'UseUnicodeResponseFiles': 'true',
-             'Version': 'a string1'},
-         'VCMIDLTool': {
-             'AdditionalIncludeDirectories': 'folder1;folder2',
-             'AdditionalOptions': 'a string1',
-             'CPreprocessOptions': 'a string1',
-             'DefaultCharType': '1',
-             'DLLDataFileName': 'a_file_name',
-             'EnableErrorChecks': '1',
-             'ErrorCheckAllocations': 'true',
-             'ErrorCheckBounds': 'true',
-             'ErrorCheckEnumRange': 'true',
-             'ErrorCheckRefPointers': 'true',
-             'ErrorCheckStubData': 'true',
-             'GenerateStublessProxies': 'true',
-             'GenerateTypeLibrary': 'true',
-             'HeaderFileName': 'a_file_name',
-             'IgnoreStandardIncludePath': 'true',
-             'InterfaceIdentifierFileName': 'a_file_name',
-             'MkTypLibCompatible': 'true',
-             'notgood': 'bogus',
-             'OutputDirectory': 'a string1',
-             'PreprocessorDefinitions': 'string1;string2',
-             'ProxyFileName': 'a_file_name',
-             'RedirectOutputAndErrors': 'a_file_name',
-             'StructMemberAlignment': '1',
-             'SuppressStartupBanner': 'true',
-             'TargetEnvironment': '1',
-             'TypeLibraryName': 'a_file_name',
-             'UndefinePreprocessorDefinitions': 'string1;string2',
-             'ValidateParameters': 'true',
-             'WarnAsError': 'true',
-             'WarningLevel': '1'},
-         'VCResourceCompilerTool': {
-             'AdditionalOptions': 'a string1',
-             'AdditionalIncludeDirectories': 'folder1;folder2',
-             'Culture': '1003',
-             'IgnoreStandardIncludePath': 'true',
-             'notgood2': 'bogus',
-             'PreprocessorDefinitions': 'string1;string2',
-             'ResourceOutputFileName': 'a string1',
-             'ShowProgress': 'true',
-             'SuppressStartupBanner': 'true',
-             'UndefinePreprocessorDefinitions': 'string1;string2'},
-         'VCLibrarianTool': {
-             'AdditionalDependencies': 'file1;file2',
-             'AdditionalLibraryDirectories': 'folder1;folder2',
-             'AdditionalOptions': 'a string1',
-             'ExportNamedFunctions': 'string1;string2',
-             'ForceSymbolReferences': 'a string1',
-             'IgnoreAllDefaultLibraries': 'true',
-             'IgnoreSpecificDefaultLibraries': 'file1;file2',
-             'LinkLibraryDependencies': 'true',
-             'ModuleDefinitionFile': 'a_file_name',
-             'OutputFile': 'a_file_name',
-             'SuppressStartupBanner': 'true',
-             'UseUnicodeResponseFiles': 'true'},
-         'VCManifestTool': {
-             'AdditionalManifestFiles': 'file1;file2',
-             'AdditionalOptions': 'a string1',
-             'AssemblyIdentity': 'a string1',
-             'ComponentFileName': 'a_file_name',
-             'DependencyInformationFile': 'a_file_name',
-             'GenerateCatalogFiles': 'true',
-             'InputResourceManifests': 'a string1',
-             'ManifestResourceFile': 'a_file_name',
-             'OutputManifestFile': 'a_file_name',
-             'RegistrarScriptFile': 'a_file_name',
-             'ReplacementsFile': 'a_file_name',
-             'SuppressStartupBanner': 'true',
-             'TypeLibraryFile': 'a_file_name',
-             'UpdateFileHashes': 'truel',
-             'UpdateFileHashesSearchPath': 'a_file_name',
-             'UseFAT32Workaround': 'true',
-             'UseUnicodeResponseFiles': 'true',
-             'VerboseOutput': 'true'}},
-        self.stderr)
-    self._ExpectedWarnings([
-        'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
-        'index value (5) not in expected range [0, 4)',
-        'Warning: for VCCLCompilerTool/BrowseInformation, '
-        "invalid literal for int() with base 10: 'fdkslj'",
-        'Warning: for VCCLCompilerTool/CallingConvention, '
-        'index value (-1) not in expected range [0, 4)',
-        'Warning: for VCCLCompilerTool/DebugInformationFormat, '
-        'converted value for 2 not specified.',
-        'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
-        'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
-        'Warning: for VCLinkerTool/TargetMachine, '
-        'converted value for 2 not specified.',
-        'Warning: unrecognized setting VCMIDLTool/notgood',
-        'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
-        'Warning: for VCManifestTool/UpdateFileHashes, '
-        "expected bool; got 'truel'"
-        ''])
-
-  def testValidateMSBuildSettings_settings(self):
-    """Tests that for invalid MSBuild settings."""
-    MSVSSettings.ValidateMSBuildSettings(
-        {'ClCompile': {
-            'AdditionalIncludeDirectories': 'folder1;folder2',
-            'AdditionalOptions': ['string1', 'string2'],
-            'AdditionalUsingDirectories': 'folder1;folder2',
-            'AssemblerListingLocation': 'a_file_name',
-            'AssemblerOutput': 'NoListing',
-            'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
-            'BrowseInformation': 'false',
-            'BrowseInformationFile': 'a_file_name',
-            'BufferSecurityCheck': 'true',
-            'BuildingInIDE': 'true',
-            'CallingConvention': 'Cdecl',
-            'CompileAs': 'CompileAsC',
-            'CompileAsManaged': 'true',
-            'CreateHotpatchableImage': 'true',
-            'DebugInformationFormat': 'ProgramDatabase',
-            'DisableLanguageExtensions': 'true',
-            'DisableSpecificWarnings': 'string1;string2',
-            'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
-            'EnableFiberSafeOptimizations': 'true',
-            'EnablePREfast': 'true',
-            'Enableprefast': 'bogus',
-            'ErrorReporting': 'Prompt',
-            'ExceptionHandling': 'SyncCThrow',
-            'ExpandAttributedSource': 'true',
-            'FavorSizeOrSpeed': 'Neither',
-            'FloatingPointExceptions': 'true',
-            'FloatingPointModel': 'Precise',
-            'ForceConformanceInForLoopScope': 'true',
-            'ForcedIncludeFiles': 'file1;file2',
-            'ForcedUsingFiles': 'file1;file2',
-            'FunctionLevelLinking': 'false',
-            'GenerateXMLDocumentationFiles': 'true',
-            'IgnoreStandardIncludePath': 'true',
-            'InlineFunctionExpansion': 'OnlyExplicitInline',
-            'IntrinsicFunctions': 'false',
-            'MinimalRebuild': 'true',
-            'MultiProcessorCompilation': 'true',
-            'ObjectFileName': 'a_file_name',
-            'OmitDefaultLibName': 'true',
-            'OmitFramePointers': 'true',
-            'OpenMPSupport': 'true',
-            'Optimization': 'Disabled',
-            'PrecompiledHeader': 'NotUsing',
-            'PrecompiledHeaderFile': 'a_file_name',
-            'PrecompiledHeaderOutputFile': 'a_file_name',
-            'PreprocessKeepComments': 'true',
-            'PreprocessorDefinitions': 'string1;string2',
-            'PreprocessOutputPath': 'a string1',
-            'PreprocessSuppressLineNumbers': 'false',
-            'PreprocessToFile': 'false',
-            'ProcessorNumber': '33',
-            'ProgramDataBaseFileName': 'a_file_name',
-            'RuntimeLibrary': 'MultiThreaded',
-            'RuntimeTypeInfo': 'true',
-            'ShowIncludes': 'true',
-            'SmallerTypeCheck': 'true',
-            'StringPooling': 'true',
-            'StructMemberAlignment': '1Byte',
-            'SuppressStartupBanner': 'true',
-            'TrackerLogDirectory': 'a_folder',
-            'TreatSpecificWarningsAsErrors': 'string1;string2',
-            'TreatWarningAsError': 'true',
-            'TreatWChar_tAsBuiltInType': 'true',
-            'UndefineAllPreprocessorDefinitions': 'true',
-            'UndefinePreprocessorDefinitions': 'string1;string2',
-            'UseFullPaths': 'true',
-            'UseUnicodeForAssemblerListing': 'true',
-            'WarningLevel': 'TurnOffAllWarnings',
-            'WholeProgramOptimization': 'true',
-            'XMLDocumentationFileName': 'a_file_name',
-            'ZZXYZ': 'bogus'},
-         'Link': {
-             'AdditionalDependencies': 'file1;file2',
-             'AdditionalLibraryDirectories': 'folder1;folder2',
-             'AdditionalManifestDependencies': 'file1;file2',
-             'AdditionalOptions': 'a string1',
-             'AddModuleNamesToAssembly': 'file1;file2',
-             'AllowIsolation': 'true',
-             'AssemblyDebug': '',
-             'AssemblyLinkResource': 'file1;file2',
-             'BaseAddress': 'a string1',
-             'BuildingInIDE': 'true',
-             'CLRImageType': 'ForceIJWImage',
-             'CLRSupportLastError': 'Enabled',
-             'CLRThreadAttribute': 'MTAThreadingAttribute',
-             'CLRUnmanagedCodeCheck': 'true',
-             'CreateHotPatchableImage': 'X86Image',
-             'DataExecutionPrevention': 'false',
-             'DelayLoadDLLs': 'file1;file2',
-             'DelaySign': 'true',
-             'Driver': 'NotSet',
-             'EmbedManagedResourceFile': 'file1;file2',
-             'EnableCOMDATFolding': 'false',
-             'EnableUAC': 'true',
-             'EntryPointSymbol': 'a string1',
-             'FixedBaseAddress': 'false',
-             'ForceFileOutput': 'Enabled',
-             'ForceSymbolReferences': 'file1;file2',
-             'FunctionOrder': 'a_file_name',
-             'GenerateDebugInformation': 'true',
-             'GenerateMapFile': 'true',
-             'HeapCommitSize': 'a string1',
-             'HeapReserveSize': 'a string1',
-             'IgnoreAllDefaultLibraries': 'true',
-             'IgnoreEmbeddedIDL': 'true',
-             'IgnoreSpecificDefaultLibraries': 'a_file_list',
-             'ImageHasSafeExceptionHandlers': 'true',
-             'ImportLibrary': 'a_file_name',
-             'KeyContainer': 'a_file_name',
-             'KeyFile': 'a_file_name',
-             'LargeAddressAware': 'false',
-             'LinkDLL': 'true',
-             'LinkErrorReporting': 'SendErrorReport',
-             'LinkStatus': 'true',
-             'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
-             'ManifestFile': 'a_file_name',
-             'MapExports': 'true',
-             'MapFileName': 'a_file_name',
-             'MergedIDLBaseFileName': 'a_file_name',
-             'MergeSections': 'a string1',
-             'MidlCommandFile': 'a_file_name',
-             'MinimumRequiredVersion': 'a string1',
-             'ModuleDefinitionFile': 'a_file_name',
-             'MSDOSStubFileName': 'a_file_name',
-             'NoEntryPoint': 'true',
-             'OptimizeReferences': 'false',
-             'OutputFile': 'a_file_name',
-             'PerUserRedirection': 'true',
-             'PreventDllBinding': 'true',
-             'Profile': 'true',
-             'ProfileGuidedDatabase': 'a_file_name',
-             'ProgramDatabaseFile': 'a_file_name',
-             'RandomizedBaseAddress': 'false',
-             'RegisterOutput': 'true',
-             'SectionAlignment': '33',
-             'SetChecksum': 'true',
-             'ShowProgress': 'LinkVerboseREF',
-             'SpecifySectionAttributes': 'a string1',
-             'StackCommitSize': 'a string1',
-             'StackReserveSize': 'a string1',
-             'StripPrivateSymbols': 'a_file_name',
-             'SubSystem': 'Console',
-             'SupportNobindOfDelayLoadedDLL': 'true',
-             'SupportUnloadOfDelayLoadedDLL': 'true',
-             'SuppressStartupBanner': 'true',
-             'SwapRunFromCD': 'true',
-             'SwapRunFromNET': 'true',
-             'TargetMachine': 'MachineX86',
-             'TerminalServerAware': 'false',
-             'TrackerLogDirectory': 'a_folder',
-             'TreatLinkerWarningAsErrors': 'true',
-             'TurnOffAssemblyGeneration': 'true',
-             'TypeLibraryFile': 'a_file_name',
-             'TypeLibraryResourceID': '33',
-             'UACExecutionLevel': 'AsInvoker',
-             'UACUIAccess': 'true',
-             'Version': 'a string1'},
-         'ResourceCompile': {
-             'AdditionalIncludeDirectories': 'folder1;folder2',
-             'AdditionalOptions': 'a string1',
-             'Culture': '0x236',
-             'IgnoreStandardIncludePath': 'true',
-             'NullTerminateStrings': 'true',
-             'PreprocessorDefinitions': 'string1;string2',
-             'ResourceOutputFileName': 'a string1',
-             'ShowProgress': 'true',
-             'SuppressStartupBanner': 'true',
-             'TrackerLogDirectory': 'a_folder',
-             'UndefinePreprocessorDefinitions': 'string1;string2'},
-         'Midl': {
-             'AdditionalIncludeDirectories': 'folder1;folder2',
-             'AdditionalOptions': 'a string1',
-             'ApplicationConfigurationMode': 'true',
-             'ClientStubFile': 'a_file_name',
-             'CPreprocessOptions': 'a string1',
-             'DefaultCharType': 'Signed',
-             'DllDataFileName': 'a_file_name',
-             'EnableErrorChecks': 'EnableCustom',
-             'ErrorCheckAllocations': 'true',
-             'ErrorCheckBounds': 'true',
-             'ErrorCheckEnumRange': 'true',
-             'ErrorCheckRefPointers': 'true',
-             'ErrorCheckStubData': 'true',
-             'GenerateClientFiles': 'Stub',
-             'GenerateServerFiles': 'None',
-             'GenerateStublessProxies': 'true',
-             'GenerateTypeLibrary': 'true',
-             'HeaderFileName': 'a_file_name',
-             'IgnoreStandardIncludePath': 'true',
-             'InterfaceIdentifierFileName': 'a_file_name',
-             'LocaleID': '33',
-             'MkTypLibCompatible': 'true',
-             'OutputDirectory': 'a string1',
-             'PreprocessorDefinitions': 'string1;string2',
-             'ProxyFileName': 'a_file_name',
-             'RedirectOutputAndErrors': 'a_file_name',
-             'ServerStubFile': 'a_file_name',
-             'StructMemberAlignment': 'NotSet',
-             'SuppressCompilerWarnings': 'true',
-             'SuppressStartupBanner': 'true',
-             'TargetEnvironment': 'Itanium',
-             'TrackerLogDirectory': 'a_folder',
-             'TypeLibFormat': 'NewFormat',
-             'TypeLibraryName': 'a_file_name',
-             'UndefinePreprocessorDefinitions': 'string1;string2',
-             'ValidateAllParameters': 'true',
-             'WarnAsError': 'true',
-             'WarningLevel': '1'},
-         'Lib': {
-             'AdditionalDependencies': 'file1;file2',
-             'AdditionalLibraryDirectories': 'folder1;folder2',
-             'AdditionalOptions': 'a string1',
-             'DisplayLibrary': 'a string1',
-             'ErrorReporting': 'PromptImmediately',
-             'ExportNamedFunctions': 'string1;string2',
-             'ForceSymbolReferences': 'a string1',
-             'IgnoreAllDefaultLibraries': 'true',
-             'IgnoreSpecificDefaultLibraries': 'file1;file2',
-             'LinkTimeCodeGeneration': 'true',
-             'MinimumRequiredVersion': 'a string1',
-             'ModuleDefinitionFile': 'a_file_name',
-             'Name': 'a_file_name',
-             'OutputFile': 'a_file_name',
-             'RemoveObjects': 'file1;file2',
-             'SubSystem': 'Console',
-             'SuppressStartupBanner': 'true',
-             'TargetMachine': 'MachineX86i',
-             'TrackerLogDirectory': 'a_folder',
-             'TreatLibWarningAsErrors': 'true',
-             'UseUnicodeResponseFiles': 'true',
-             'Verbose': 'true'},
-         'Manifest': {
-             'AdditionalManifestFiles': 'file1;file2',
-             'AdditionalOptions': 'a string1',
-             'AssemblyIdentity': 'a string1',
-             'ComponentFileName': 'a_file_name',
-             'EnableDPIAwareness': 'fal',
-             'GenerateCatalogFiles': 'truel',
-             'GenerateCategoryTags': 'true',
-             'InputResourceManifests': 'a string1',
-             'ManifestFromManagedAssembly': 'a_file_name',
-             'notgood3': 'bogus',
-             'OutputManifestFile': 'a_file_name',
-             'OutputResourceManifests': 'a string1',
-             'RegistrarScriptFile': 'a_file_name',
-             'ReplacementsFile': 'a_file_name',
-             'SuppressDependencyElement': 'true',
-             'SuppressStartupBanner': 'true',
-             'TrackerLogDirectory': 'a_folder',
-             'TypeLibraryFile': 'a_file_name',
-             'UpdateFileHashes': 'true',
-             'UpdateFileHashesSearchPath': 'a_file_name',
-             'VerboseOutput': 'true'},
-         'ProjectReference': {
-             'LinkLibraryDependencies': 'true',
-             'UseLibraryDependencyInputs': 'true'},
-         'ManifestResourceCompile': {
-             'ResourceOutputFileName': 'a_file_name'},
-         '': {
-             'EmbedManifest': 'true',
-             'GenerateManifest': 'true',
-             'IgnoreImportLibrary': 'true',
-             'LinkIncremental': 'false'}},
-        self.stderr)
-    self._ExpectedWarnings([
-        'Warning: unrecognized setting ClCompile/Enableprefast',
-        'Warning: unrecognized setting ClCompile/ZZXYZ',
-        'Warning: unrecognized setting Manifest/notgood3',
-        'Warning: for Manifest/GenerateCatalogFiles, '
-        "expected bool; got 'truel'",
-        'Warning: for Lib/TargetMachine, unrecognized enumerated value '
-        'MachineX86i',
-        "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
-
-  def testConvertToMSBuildSettings_empty(self):
-    """Tests an empty conversion."""
-    msvs_settings = {}
-    expected_msbuild_settings = {}
-    actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
-        msvs_settings,
-        self.stderr)
-    self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
-    self._ExpectedWarnings([])
-
-  def testConvertToMSBuildSettings_minimal(self):
-    """Tests a minimal conversion."""
-    msvs_settings = {
-        'VCCLCompilerTool': {
-            'AdditionalIncludeDirectories': 'dir1',
-            'AdditionalOptions': '/foo',
-            'BasicRuntimeChecks': '0',
-            },
-        'VCLinkerTool': {
-            'LinkTimeCodeGeneration': '1',
-            'ErrorReporting': '1',
-            'DataExecutionPrevention': '2',
-            },
-        }
-    expected_msbuild_settings = {
-        'ClCompile': {
-            'AdditionalIncludeDirectories': 'dir1',
-            'AdditionalOptions': '/foo',
-            'BasicRuntimeChecks': 'Default',
-            },
-        'Link': {
-            'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
-            'LinkErrorReporting': 'PromptImmediately',
-            'DataExecutionPrevention': 'true',
-            },
-        }
-    actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
-        msvs_settings,
-        self.stderr)
-    self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
-    self._ExpectedWarnings([])
-
-  def testConvertToMSBuildSettings_warnings(self):
-    """Tests conversion that generates warnings."""
-    msvs_settings = {
-        'VCCLCompilerTool': {
-            'AdditionalIncludeDirectories': '1',
-            'AdditionalOptions': '2',
-            # These are incorrect values:
-            'BasicRuntimeChecks': '12',
-            'BrowseInformation': '21',
-            'UsePrecompiledHeader': '13',
-            'GeneratePreprocessedFile': '14'},
-        'VCLinkerTool': {
-            # These are incorrect values:
-            'Driver': '10',
-            'LinkTimeCodeGeneration': '31',
-            'ErrorReporting': '21',
-            'FixedBaseAddress': '6'},
-        'VCResourceCompilerTool': {
-            # Custom
-            'Culture': '1003'}}
-    expected_msbuild_settings = {
-        'ClCompile': {
-            'AdditionalIncludeDirectories': '1',
-            'AdditionalOptions': '2'},
-        'Link': {},
-        'ResourceCompile': {
-            # Custom
-            'Culture': '0x03eb'}}
-    actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
-        msvs_settings,
-        self.stderr)
-    self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
-    self._ExpectedWarnings([
-        'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
-        'MSBuild, index value (12) not in expected range [0, 4)',
-        'Warning: while converting VCCLCompilerTool/BrowseInformation to '
-        'MSBuild, index value (21) not in expected range [0, 3)',
-        'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
-        'MSBuild, index value (13) not in expected range [0, 3)',
-        'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
-        'MSBuild, value must be one of [0, 1, 2]; got 14',
-
-        'Warning: while converting VCLinkerTool/Driver to '
-        'MSBuild, index value (10) not in expected range [0, 4)',
-        'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
-        'MSBuild, index value (31) not in expected range [0, 5)',
-        'Warning: while converting VCLinkerTool/ErrorReporting to '
-        'MSBuild, index value (21) not in expected range [0, 3)',
-        'Warning: while converting VCLinkerTool/FixedBaseAddress to '
-        'MSBuild, index value (6) not in expected range [0, 3)',
-        ])
-
-  def testConvertToMSBuildSettings_full_synthetic(self):
-    """Tests conversion of all the MSBuild settings."""
-    msvs_settings = {
-        'VCCLCompilerTool': {
-            'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string',
-            'AdditionalUsingDirectories': 'folder1;folder2;folder3',
-            'AssemblerListingLocation': 'a_file_name',
-            'AssemblerOutput': '0',
-            'BasicRuntimeChecks': '1',
-            'BrowseInformation': '2',
-            'BrowseInformationFile': 'a_file_name',
-            'BufferSecurityCheck': 'true',
-            'CallingConvention': '0',
-            'CompileAs': '1',
-            'DebugInformationFormat': '4',
-            'DefaultCharIsUnsigned': 'true',
-            'Detect64BitPortabilityProblems': 'true',
-            'DisableLanguageExtensions': 'true',
-            'DisableSpecificWarnings': 'd1;d2;d3',
-            'EnableEnhancedInstructionSet': '0',
-            'EnableFiberSafeOptimizations': 'true',
-            'EnableFunctionLevelLinking': 'true',
-            'EnableIntrinsicFunctions': 'true',
-            'EnablePREfast': 'true',
-            'ErrorReporting': '1',
-            'ExceptionHandling': '2',
-            'ExpandAttributedSource': 'true',
-            'FavorSizeOrSpeed': '0',
-            'FloatingPointExceptions': 'true',
-            'FloatingPointModel': '1',
-            'ForceConformanceInForLoopScope': 'true',
-            'ForcedIncludeFiles': 'file1;file2;file3',
-            'ForcedUsingFiles': 'file1;file2;file3',
-            'GeneratePreprocessedFile': '1',
-            'GenerateXMLDocumentationFiles': 'true',
-            'IgnoreStandardIncludePath': 'true',
-            'InlineFunctionExpansion': '2',
-            'KeepComments': 'true',
-            'MinimalRebuild': 'true',
-            'ObjectFile': 'a_file_name',
-            'OmitDefaultLibName': 'true',
-            'OmitFramePointers': 'true',
-            'OpenMP': 'true',
-            'Optimization': '3',
-            'PrecompiledHeaderFile': 'a_file_name',
-            'PrecompiledHeaderThrough': 'a_file_name',
-            'PreprocessorDefinitions': 'd1;d2;d3',
-            'ProgramDataBaseFileName': 'a_file_name',
-            'RuntimeLibrary': '0',
-            'RuntimeTypeInfo': 'true',
-            'ShowIncludes': 'true',
-            'SmallerTypeCheck': 'true',
-            'StringPooling': 'true',
-            'StructMemberAlignment': '1',
-            'SuppressStartupBanner': 'true',
-            'TreatWChar_tAsBuiltInType': 'true',
-            'UndefineAllPreprocessorDefinitions': 'true',
-            'UndefinePreprocessorDefinitions': 'd1;d2;d3',
-            'UseFullPaths': 'true',
-            'UsePrecompiledHeader': '1',
-            'UseUnicodeResponseFiles': 'true',
-            'WarnAsError': 'true',
-            'WarningLevel': '2',
-            'WholeProgramOptimization': 'true',
-            'XMLDocumentationFileName': 'a_file_name'},
-        'VCLinkerTool': {
-            'AdditionalDependencies': 'file1;file2;file3',
-            'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
-            'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
-            'AdditionalManifestDependencies': 'file1;file2;file3',
-            'AdditionalOptions': 'a_string',
-            'AddModuleNamesToAssembly': 'file1;file2;file3',
-            'AllowIsolation': 'true',
-            'AssemblyDebug': '0',
-            'AssemblyLinkResource': 'file1;file2;file3',
-            'BaseAddress': 'a_string',
-            'CLRImageType': '1',
-            'CLRThreadAttribute': '2',
-            'CLRUnmanagedCodeCheck': 'true',
-            'DataExecutionPrevention': '0',
-            'DelayLoadDLLs': 'file1;file2;file3',
-            'DelaySign': 'true',
-            'Driver': '1',
-            'EmbedManagedResourceFile': 'file1;file2;file3',
-            'EnableCOMDATFolding': '0',
-            'EnableUAC': 'true',
-            'EntryPointSymbol': 'a_string',
-            'ErrorReporting': '0',
-            'FixedBaseAddress': '1',
-            'ForceSymbolReferences': 'file1;file2;file3',
-            'FunctionOrder': 'a_file_name',
-            'GenerateDebugInformation': 'true',
-            'GenerateManifest': 'true',
-            'GenerateMapFile': 'true',
-            'HeapCommitSize': 'a_string',
-            'HeapReserveSize': 'a_string',
-            'IgnoreAllDefaultLibraries': 'true',
-            'IgnoreDefaultLibraryNames': 'file1;file2;file3',
-            'IgnoreEmbeddedIDL': 'true',
-            'IgnoreImportLibrary': 'true',
-            'ImportLibrary': 'a_file_name',
-            'KeyContainer': 'a_file_name',
-            'KeyFile': 'a_file_name',
-            'LargeAddressAware': '2',
-            'LinkIncremental': '1',
-            'LinkLibraryDependencies': 'true',
-            'LinkTimeCodeGeneration': '2',
-            'ManifestFile': 'a_file_name',
-            'MapExports': 'true',
-            'MapFileName': 'a_file_name',
-            'MergedIDLBaseFileName': 'a_file_name',
-            'MergeSections': 'a_string',
-            'MidlCommandFile': 'a_file_name',
-            'ModuleDefinitionFile': 'a_file_name',
-            'OptimizeForWindows98': '1',
-            'OptimizeReferences': '0',
-            'OutputFile': 'a_file_name',
-            'PerUserRedirection': 'true',
-            'Profile': 'true',
-            'ProfileGuidedDatabase': 'a_file_name',
-            'ProgramDatabaseFile': 'a_file_name',
-            'RandomizedBaseAddress': '1',
-            'RegisterOutput': 'true',
-            'ResourceOnlyDLL': 'true',
-            'SetChecksum': 'true',
-            'ShowProgress': '0',
-            'StackCommitSize': 'a_string',
-            'StackReserveSize': 'a_string',
-            'StripPrivateSymbols': 'a_file_name',
-            'SubSystem': '2',
-            'SupportUnloadOfDelayLoadedDLL': 'true',
-            'SuppressStartupBanner': 'true',
-            'SwapRunFromCD': 'true',
-            'SwapRunFromNet': 'true',
-            'TargetMachine': '3',
-            'TerminalServerAware': '2',
-            'TurnOffAssemblyGeneration': 'true',
-            'TypeLibraryFile': 'a_file_name',
-            'TypeLibraryResourceID': '33',
-            'UACExecutionLevel': '1',
-            'UACUIAccess': 'true',
-            'UseLibraryDependencyInputs': 'false',
-            'UseUnicodeResponseFiles': 'true',
-            'Version': 'a_string'},
-        'VCResourceCompilerTool': {
-            'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string',
-            'Culture': '1003',
-            'IgnoreStandardIncludePath': 'true',
-            'PreprocessorDefinitions': 'd1;d2;d3',
-            'ResourceOutputFileName': 'a_string',
-            'ShowProgress': 'true',
-            'SuppressStartupBanner': 'true',
-            'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
-        'VCMIDLTool': {
-            'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string',
-            'CPreprocessOptions': 'a_string',
-            'DefaultCharType': '0',
-            'DLLDataFileName': 'a_file_name',
-            'EnableErrorChecks': '2',
-            'ErrorCheckAllocations': 'true',
-            'ErrorCheckBounds': 'true',
-            'ErrorCheckEnumRange': 'true',
-            'ErrorCheckRefPointers': 'true',
-            'ErrorCheckStubData': 'true',
-            'GenerateStublessProxies': 'true',
-            'GenerateTypeLibrary': 'true',
-            'HeaderFileName': 'a_file_name',
-            'IgnoreStandardIncludePath': 'true',
-            'InterfaceIdentifierFileName': 'a_file_name',
-            'MkTypLibCompatible': 'true',
-            'OutputDirectory': 'a_string',
-            'PreprocessorDefinitions': 'd1;d2;d3',
-            'ProxyFileName': 'a_file_name',
-            'RedirectOutputAndErrors': 'a_file_name',
-            'StructMemberAlignment': '3',
-            'SuppressStartupBanner': 'true',
-            'TargetEnvironment': '1',
-            'TypeLibraryName': 'a_file_name',
-            'UndefinePreprocessorDefinitions': 'd1;d2;d3',
-            'ValidateParameters': 'true',
-            'WarnAsError': 'true',
-            'WarningLevel': '4'},
-        'VCLibrarianTool': {
-            'AdditionalDependencies': 'file1;file2;file3',
-            'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
-            'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string',
-            'ExportNamedFunctions': 'd1;d2;d3',
-            'ForceSymbolReferences': 'a_string',
-            'IgnoreAllDefaultLibraries': 'true',
-            'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
-            'LinkLibraryDependencies': 'true',
-            'ModuleDefinitionFile': 'a_file_name',
-            'OutputFile': 'a_file_name',
-            'SuppressStartupBanner': 'true',
-            'UseUnicodeResponseFiles': 'true'},
-        'VCManifestTool': {
-            'AdditionalManifestFiles': 'file1;file2;file3',
-            'AdditionalOptions': 'a_string',
-            'AssemblyIdentity': 'a_string',
-            'ComponentFileName': 'a_file_name',
-            'DependencyInformationFile': 'a_file_name',
-            'EmbedManifest': 'true',
-            'GenerateCatalogFiles': 'true',
-            'InputResourceManifests': 'a_string',
-            'ManifestResourceFile': 'my_name',
-            'OutputManifestFile': 'a_file_name',
-            'RegistrarScriptFile': 'a_file_name',
-            'ReplacementsFile': 'a_file_name',
-            'SuppressStartupBanner': 'true',
-            'TypeLibraryFile': 'a_file_name',
-            'UpdateFileHashes': 'true',
-            'UpdateFileHashesSearchPath': 'a_file_name',
-            'UseFAT32Workaround': 'true',
-            'UseUnicodeResponseFiles': 'true',
-            'VerboseOutput': 'true'}}
-    expected_msbuild_settings = {
-        'ClCompile': {
-            'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string /J',
-            'AdditionalUsingDirectories': 'folder1;folder2;folder3',
-            'AssemblerListingLocation': 'a_file_name',
-            'AssemblerOutput': 'NoListing',
-            'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
-            'BrowseInformation': 'true',
-            'BrowseInformationFile': 'a_file_name',
-            'BufferSecurityCheck': 'true',
-            'CallingConvention': 'Cdecl',
-            'CompileAs': 'CompileAsC',
-            'DebugInformationFormat': 'EditAndContinue',
-            'DisableLanguageExtensions': 'true',
-            'DisableSpecificWarnings': 'd1;d2;d3',
-            'EnableEnhancedInstructionSet': 'NotSet',
-            'EnableFiberSafeOptimizations': 'true',
-            'EnablePREfast': 'true',
-            'ErrorReporting': 'Prompt',
-            'ExceptionHandling': 'Async',
-            'ExpandAttributedSource': 'true',
-            'FavorSizeOrSpeed': 'Neither',
-            'FloatingPointExceptions': 'true',
-            'FloatingPointModel': 'Strict',
-            'ForceConformanceInForLoopScope': 'true',
-            'ForcedIncludeFiles': 'file1;file2;file3',
-            'ForcedUsingFiles': 'file1;file2;file3',
-            'FunctionLevelLinking': 'true',
-            'GenerateXMLDocumentationFiles': 'true',
-            'IgnoreStandardIncludePath': 'true',
-            'InlineFunctionExpansion': 'AnySuitable',
-            'IntrinsicFunctions': 'true',
-            'MinimalRebuild': 'true',
-            'ObjectFileName': 'a_file_name',
-            'OmitDefaultLibName': 'true',
-            'OmitFramePointers': 'true',
-            'OpenMPSupport': 'true',
-            'Optimization': 'Full',
-            'PrecompiledHeader': 'Create',
-            'PrecompiledHeaderFile': 'a_file_name',
-            'PrecompiledHeaderOutputFile': 'a_file_name',
-            'PreprocessKeepComments': 'true',
-            'PreprocessorDefinitions': 'd1;d2;d3',
-            'PreprocessSuppressLineNumbers': 'false',
-            'PreprocessToFile': 'true',
-            'ProgramDataBaseFileName': 'a_file_name',
-            'RuntimeLibrary': 'MultiThreaded',
-            'RuntimeTypeInfo': 'true',
-            'ShowIncludes': 'true',
-            'SmallerTypeCheck': 'true',
-            'StringPooling': 'true',
-            'StructMemberAlignment': '1Byte',
-            'SuppressStartupBanner': 'true',
-            'TreatWarningAsError': 'true',
-            'TreatWChar_tAsBuiltInType': 'true',
-            'UndefineAllPreprocessorDefinitions': 'true',
-            'UndefinePreprocessorDefinitions': 'd1;d2;d3',
-            'UseFullPaths': 'true',
-            'WarningLevel': 'Level2',
-            'WholeProgramOptimization': 'true',
-            'XMLDocumentationFileName': 'a_file_name'},
-        'Link': {
-            'AdditionalDependencies': 'file1;file2;file3',
-            'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
-            'AdditionalManifestDependencies': 'file1;file2;file3',
-            'AdditionalOptions': 'a_string',
-            'AddModuleNamesToAssembly': 'file1;file2;file3',
-            'AllowIsolation': 'true',
-            'AssemblyDebug': '',
-            'AssemblyLinkResource': 'file1;file2;file3',
-            'BaseAddress': 'a_string',
-            'CLRImageType': 'ForceIJWImage',
-            'CLRThreadAttribute': 'STAThreadingAttribute',
-            'CLRUnmanagedCodeCheck': 'true',
-            'DataExecutionPrevention': '',
-            'DelayLoadDLLs': 'file1;file2;file3',
-            'DelaySign': 'true',
-            'Driver': 'Driver',
-            'EmbedManagedResourceFile': 'file1;file2;file3',
-            'EnableCOMDATFolding': '',
-            'EnableUAC': 'true',
-            'EntryPointSymbol': 'a_string',
-            'FixedBaseAddress': 'false',
-            'ForceSymbolReferences': 'file1;file2;file3',
-            'FunctionOrder': 'a_file_name',
-            'GenerateDebugInformation': 'true',
-            'GenerateMapFile': 'true',
-            'HeapCommitSize': 'a_string',
-            'HeapReserveSize': 'a_string',
-            'IgnoreAllDefaultLibraries': 'true',
-            'IgnoreEmbeddedIDL': 'true',
-            'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
-            'ImportLibrary': 'a_file_name',
-            'KeyContainer': 'a_file_name',
-            'KeyFile': 'a_file_name',
-            'LargeAddressAware': 'true',
-            'LinkErrorReporting': 'NoErrorReport',
-            'LinkTimeCodeGeneration': 'PGInstrument',
-            'ManifestFile': 'a_file_name',
-            'MapExports': 'true',
-            'MapFileName': 'a_file_name',
-            'MergedIDLBaseFileName': 'a_file_name',
-            'MergeSections': 'a_string',
-            'MidlCommandFile': 'a_file_name',
-            'ModuleDefinitionFile': 'a_file_name',
-            'NoEntryPoint': 'true',
-            'OptimizeReferences': '',
-            'OutputFile': 'a_file_name',
-            'PerUserRedirection': 'true',
-            'Profile': 'true',
-            'ProfileGuidedDatabase': 'a_file_name',
-            'ProgramDatabaseFile': 'a_file_name',
-            'RandomizedBaseAddress': 'false',
-            'RegisterOutput': 'true',
-            'SetChecksum': 'true',
-            'ShowProgress': 'NotSet',
-            'StackCommitSize': 'a_string',
-            'StackReserveSize': 'a_string',
-            'StripPrivateSymbols': 'a_file_name',
-            'SubSystem': 'Windows',
-            'SupportUnloadOfDelayLoadedDLL': 'true',
-            'SuppressStartupBanner': 'true',
-            'SwapRunFromCD': 'true',
-            'SwapRunFromNET': 'true',
-            'TargetMachine': 'MachineARM',
-            'TerminalServerAware': 'true',
-            'TurnOffAssemblyGeneration': 'true',
-            'TypeLibraryFile': 'a_file_name',
-            'TypeLibraryResourceID': '33',
-            'UACExecutionLevel': 'HighestAvailable',
-            'UACUIAccess': 'true',
-            'Version': 'a_string'},
-        'ResourceCompile': {
-            'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string',
-            'Culture': '0x03eb',
-            'IgnoreStandardIncludePath': 'true',
-            'PreprocessorDefinitions': 'd1;d2;d3',
-            'ResourceOutputFileName': 'a_string',
-            'ShowProgress': 'true',
-            'SuppressStartupBanner': 'true',
-            'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
-        'Midl': {
-            'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string',
-            'CPreprocessOptions': 'a_string',
-            'DefaultCharType': 'Unsigned',
-            'DllDataFileName': 'a_file_name',
-            'EnableErrorChecks': 'All',
-            'ErrorCheckAllocations': 'true',
-            'ErrorCheckBounds': 'true',
-            'ErrorCheckEnumRange': 'true',
-            'ErrorCheckRefPointers': 'true',
-            'ErrorCheckStubData': 'true',
-            'GenerateStublessProxies': 'true',
-            'GenerateTypeLibrary': 'true',
-            'HeaderFileName': 'a_file_name',
-            'IgnoreStandardIncludePath': 'true',
-            'InterfaceIdentifierFileName': 'a_file_name',
-            'MkTypLibCompatible': 'true',
-            'OutputDirectory': 'a_string',
-            'PreprocessorDefinitions': 'd1;d2;d3',
-            'ProxyFileName': 'a_file_name',
-            'RedirectOutputAndErrors': 'a_file_name',
-            'StructMemberAlignment': '4',
-            'SuppressStartupBanner': 'true',
-            'TargetEnvironment': 'Win32',
-            'TypeLibraryName': 'a_file_name',
-            'UndefinePreprocessorDefinitions': 'd1;d2;d3',
-            'ValidateAllParameters': 'true',
-            'WarnAsError': 'true',
-            'WarningLevel': '4'},
-        'Lib': {
-            'AdditionalDependencies': 'file1;file2;file3',
-            'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
-            'AdditionalOptions': 'a_string',
-            'ExportNamedFunctions': 'd1;d2;d3',
-            'ForceSymbolReferences': 'a_string',
-            'IgnoreAllDefaultLibraries': 'true',
-            'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
-            'ModuleDefinitionFile': 'a_file_name',
-            'OutputFile': 'a_file_name',
-            'SuppressStartupBanner': 'true',
-            'UseUnicodeResponseFiles': 'true'},
-        'Manifest': {
-            'AdditionalManifestFiles': 'file1;file2;file3',
-            'AdditionalOptions': 'a_string',
-            'AssemblyIdentity': 'a_string',
-            'ComponentFileName': 'a_file_name',
-            'GenerateCatalogFiles': 'true',
-            'InputResourceManifests': 'a_string',
-            'OutputManifestFile': 'a_file_name',
-            'RegistrarScriptFile': 'a_file_name',
-            'ReplacementsFile': 'a_file_name',
-            'SuppressStartupBanner': 'true',
-            'TypeLibraryFile': 'a_file_name',
-            'UpdateFileHashes': 'true',
-            'UpdateFileHashesSearchPath': 'a_file_name',
-            'VerboseOutput': 'true'},
-        'ManifestResourceCompile': {
-            'ResourceOutputFileName': 'my_name'},
-        'ProjectReference': {
-            'LinkLibraryDependencies': 'true',
-            'UseLibraryDependencyInputs': 'false'},
-        '': {
-            'EmbedManifest': 'true',
-            'GenerateManifest': 'true',
-            'IgnoreImportLibrary': 'true',
-            'LinkIncremental': 'false'}}
-    actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
-        msvs_settings,
-        self.stderr)
-    self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
-    self._ExpectedWarnings([])
-
-  def testConvertToMSBuildSettings_actual(self):
-    """Tests the conversion of an actual project.
-
-    A VS2008 project with most of the options defined was created through the
-    VS2008 IDE.  It was then converted to VS2010.  The tool settings found in
-    the .vcproj and .vcxproj files were converted to the two dictionaries
-    msvs_settings and expected_msbuild_settings.
-
-    Note that for many settings, the VS2010 converter adds macros like
-    %(AdditionalIncludeDirectories) to make sure than inherited values are
-    included.  Since the Gyp projects we generate do not use inheritance,
-    we removed these macros.  They were:
-        ClCompile:
-            AdditionalIncludeDirectories:  ';%(AdditionalIncludeDirectories)'
-            AdditionalOptions:  ' %(AdditionalOptions)'
-            AdditionalUsingDirectories:  ';%(AdditionalUsingDirectories)'
-            DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
-            ForcedIncludeFiles:  ';%(ForcedIncludeFiles)',
-            ForcedUsingFiles:  ';%(ForcedUsingFiles)',
-            PreprocessorDefinitions:  ';%(PreprocessorDefinitions)',
-            UndefinePreprocessorDefinitions:
-                ';%(UndefinePreprocessorDefinitions)',
-        Link:
-            AdditionalDependencies:  ';%(AdditionalDependencies)',
-            AdditionalLibraryDirectories:  ';%(AdditionalLibraryDirectories)',
-            AdditionalManifestDependencies:
-                ';%(AdditionalManifestDependencies)',
-            AdditionalOptions:  ' %(AdditionalOptions)',
-            AddModuleNamesToAssembly:  ';%(AddModuleNamesToAssembly)',
-            AssemblyLinkResource:  ';%(AssemblyLinkResource)',
-            DelayLoadDLLs:  ';%(DelayLoadDLLs)',
-            EmbedManagedResourceFile:  ';%(EmbedManagedResourceFile)',
-            ForceSymbolReferences:  ';%(ForceSymbolReferences)',
-            IgnoreSpecificDefaultLibraries:
-                ';%(IgnoreSpecificDefaultLibraries)',
-        ResourceCompile:
-            AdditionalIncludeDirectories:  ';%(AdditionalIncludeDirectories)',
-            AdditionalOptions:  ' %(AdditionalOptions)',
-            PreprocessorDefinitions:  ';%(PreprocessorDefinitions)',
-        Manifest:
-            AdditionalManifestFiles:  ';%(AdditionalManifestFiles)',
-            AdditionalOptions:  ' %(AdditionalOptions)',
-            InputResourceManifests:  ';%(InputResourceManifests)',
-    """
-    msvs_settings = {
-        'VCCLCompilerTool': {
-            'AdditionalIncludeDirectories': 'dir1',
-            'AdditionalOptions': '/more',
-            'AdditionalUsingDirectories': 'test',
-            'AssemblerListingLocation': '$(IntDir)\\a',
-            'AssemblerOutput': '1',
-            'BasicRuntimeChecks': '3',
-            'BrowseInformation': '1',
-            'BrowseInformationFile': '$(IntDir)\\e',
-            'BufferSecurityCheck': 'false',
-            'CallingConvention': '1',
-            'CompileAs': '1',
-            'DebugInformationFormat': '4',
-            'DefaultCharIsUnsigned': 'true',
-            'Detect64BitPortabilityProblems': 'true',
-            'DisableLanguageExtensions': 'true',
-            'DisableSpecificWarnings': 'abc',
-            'EnableEnhancedInstructionSet': '1',
-            'EnableFiberSafeOptimizations': 'true',
-            'EnableFunctionLevelLinking': 'true',
-            'EnableIntrinsicFunctions': 'true',
-            'EnablePREfast': 'true',
-            'ErrorReporting': '2',
-            'ExceptionHandling': '2',
-            'ExpandAttributedSource': 'true',
-            'FavorSizeOrSpeed': '2',
-            'FloatingPointExceptions': 'true',
-            'FloatingPointModel': '1',
-            'ForceConformanceInForLoopScope': 'false',
-            'ForcedIncludeFiles': 'def',
-            'ForcedUsingFiles': 'ge',
-            'GeneratePreprocessedFile': '2',
-            'GenerateXMLDocumentationFiles': 'true',
-            'IgnoreStandardIncludePath': 'true',
-            'InlineFunctionExpansion': '1',
-            'KeepComments': 'true',
-            'MinimalRebuild': 'true',
-            'ObjectFile': '$(IntDir)\\b',
-            'OmitDefaultLibName': 'true',
-            'OmitFramePointers': 'true',
-            'OpenMP': 'true',
-            'Optimization': '3',
-            'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
-            'PrecompiledHeaderThrough': 'StdAfx.hd',
-            'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
-            'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
-            'RuntimeLibrary': '3',
-            'RuntimeTypeInfo': 'false',
-            'ShowIncludes': 'true',
-            'SmallerTypeCheck': 'true',
-            'StringPooling': 'true',
-            'StructMemberAlignment': '3',
-            'SuppressStartupBanner': 'false',
-            'TreatWChar_tAsBuiltInType': 'false',
-            'UndefineAllPreprocessorDefinitions': 'true',
-            'UndefinePreprocessorDefinitions': 'wer',
-            'UseFullPaths': 'true',
-            'UsePrecompiledHeader': '0',
-            'UseUnicodeResponseFiles': 'false',
-            'WarnAsError': 'true',
-            'WarningLevel': '3',
-            'WholeProgramOptimization': 'true',
-            'XMLDocumentationFileName': '$(IntDir)\\c'},
-        'VCLinkerTool': {
-            'AdditionalDependencies': 'zx',
-            'AdditionalLibraryDirectories': 'asd',
-            'AdditionalManifestDependencies': 's2',
-            'AdditionalOptions': '/mor2',
-            'AddModuleNamesToAssembly': 'd1',
-            'AllowIsolation': 'false',
-            'AssemblyDebug': '1',
-            'AssemblyLinkResource': 'd5',
-            'BaseAddress': '23423',
-            'CLRImageType': '3',
-            'CLRThreadAttribute': '1',
-            'CLRUnmanagedCodeCheck': 'true',
-            'DataExecutionPrevention': '0',
-            'DelayLoadDLLs': 'd4',
-            'DelaySign': 'true',
-            'Driver': '2',
-            'EmbedManagedResourceFile': 'd2',
-            'EnableCOMDATFolding': '1',
-            'EnableUAC': 'false',
-            'EntryPointSymbol': 'f5',
-            'ErrorReporting': '2',
-            'FixedBaseAddress': '1',
-            'ForceSymbolReferences': 'd3',
-            'FunctionOrder': 'fssdfsd',
-            'GenerateDebugInformation': 'true',
-            'GenerateManifest': 'false',
-            'GenerateMapFile': 'true',
-            'HeapCommitSize': '13',
-            'HeapReserveSize': '12',
-            'IgnoreAllDefaultLibraries': 'true',
-            'IgnoreDefaultLibraryNames': 'flob;flok',
-            'IgnoreEmbeddedIDL': 'true',
-            'IgnoreImportLibrary': 'true',
-            'ImportLibrary': 'f4',
-            'KeyContainer': 'f7',
-            'KeyFile': 'f6',
-            'LargeAddressAware': '2',
-            'LinkIncremental': '0',
-            'LinkLibraryDependencies': 'false',
-            'LinkTimeCodeGeneration': '1',
-            'ManifestFile':
-            '$(IntDir)\\$(TargetFileName).2intermediate.manifest',
-            'MapExports': 'true',
-            'MapFileName': 'd5',
-            'MergedIDLBaseFileName': 'f2',
-            'MergeSections': 'f5',
-            'MidlCommandFile': 'f1',
-            'ModuleDefinitionFile': 'sdsd',
-            'OptimizeForWindows98': '2',
-            'OptimizeReferences': '2',
-            'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
-            'PerUserRedirection': 'true',
-            'Profile': 'true',
-            'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
-            'ProgramDatabaseFile': 'Flob.pdb',
-            'RandomizedBaseAddress': '1',
-            'RegisterOutput': 'true',
-            'ResourceOnlyDLL': 'true',
-            'SetChecksum': 'false',
-            'ShowProgress': '1',
-            'StackCommitSize': '15',
-            'StackReserveSize': '14',
-            'StripPrivateSymbols': 'd3',
-            'SubSystem': '1',
-            'SupportUnloadOfDelayLoadedDLL': 'true',
-            'SuppressStartupBanner': 'false',
-            'SwapRunFromCD': 'true',
-            'SwapRunFromNet': 'true',
-            'TargetMachine': '1',
-            'TerminalServerAware': '1',
-            'TurnOffAssemblyGeneration': 'true',
-            'TypeLibraryFile': 'f3',
-            'TypeLibraryResourceID': '12',
-            'UACExecutionLevel': '2',
-            'UACUIAccess': 'true',
-            'UseLibraryDependencyInputs': 'true',
-            'UseUnicodeResponseFiles': 'false',
-            'Version': '333'},
-        'VCResourceCompilerTool': {
-            'AdditionalIncludeDirectories': 'f3',
-            'AdditionalOptions': '/more3',
-            'Culture': '3084',
-            'IgnoreStandardIncludePath': 'true',
-            'PreprocessorDefinitions': '_UNICODE;UNICODE2',
-            'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
-            'ShowProgress': 'true'},
-        'VCManifestTool': {
-            'AdditionalManifestFiles': 'sfsdfsd',
-            'AdditionalOptions': 'afdsdafsd',
-            'AssemblyIdentity': 'sddfdsadfsa',
-            'ComponentFileName': 'fsdfds',
-            'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
-            'EmbedManifest': 'false',
-            'GenerateCatalogFiles': 'true',
-            'InputResourceManifests': 'asfsfdafs',
-            'ManifestResourceFile':
-            '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
-            'OutputManifestFile': '$(TargetPath).manifestdfs',
-            'RegistrarScriptFile': 'sdfsfd',
-            'ReplacementsFile': 'sdffsd',
-            'SuppressStartupBanner': 'false',
-            'TypeLibraryFile': 'sfsd',
-            'UpdateFileHashes': 'true',
-            'UpdateFileHashesSearchPath': 'sfsd',
-            'UseFAT32Workaround': 'true',
-            'UseUnicodeResponseFiles': 'false',
-            'VerboseOutput': 'true'}}
-    expected_msbuild_settings = {
-        'ClCompile': {
-            'AdditionalIncludeDirectories': 'dir1',
-            'AdditionalOptions': '/more /J',
-            'AdditionalUsingDirectories': 'test',
-            'AssemblerListingLocation': '$(IntDir)a',
-            'AssemblerOutput': 'AssemblyCode',
-            'BasicRuntimeChecks': 'EnableFastChecks',
-            'BrowseInformation': 'true',
-            'BrowseInformationFile': '$(IntDir)e',
-            'BufferSecurityCheck': 'false',
-            'CallingConvention': 'FastCall',
-            'CompileAs': 'CompileAsC',
-            'DebugInformationFormat': 'EditAndContinue',
-            'DisableLanguageExtensions': 'true',
-            'DisableSpecificWarnings': 'abc',
-            'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
-            'EnableFiberSafeOptimizations': 'true',
-            'EnablePREfast': 'true',
-            'ErrorReporting': 'Queue',
-            'ExceptionHandling': 'Async',
-            'ExpandAttributedSource': 'true',
-            'FavorSizeOrSpeed': 'Size',
-            'FloatingPointExceptions': 'true',
-            'FloatingPointModel': 'Strict',
-            'ForceConformanceInForLoopScope': 'false',
-            'ForcedIncludeFiles': 'def',
-            'ForcedUsingFiles': 'ge',
-            'FunctionLevelLinking': 'true',
-            'GenerateXMLDocumentationFiles': 'true',
-            'IgnoreStandardIncludePath': 'true',
-            'InlineFunctionExpansion': 'OnlyExplicitInline',
-            'IntrinsicFunctions': 'true',
-            'MinimalRebuild': 'true',
-            'ObjectFileName': '$(IntDir)b',
-            'OmitDefaultLibName': 'true',
-            'OmitFramePointers': 'true',
-            'OpenMPSupport': 'true',
-            'Optimization': 'Full',
-            'PrecompiledHeader': 'NotUsing',  # Actual conversion gives ''
-            'PrecompiledHeaderFile': 'StdAfx.hd',
-            'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
-            'PreprocessKeepComments': 'true',
-            'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
-            'PreprocessSuppressLineNumbers': 'true',
-            'PreprocessToFile': 'true',
-            'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
-            'RuntimeLibrary': 'MultiThreadedDebugDLL',
-            'RuntimeTypeInfo': 'false',
-            'ShowIncludes': 'true',
-            'SmallerTypeCheck': 'true',
-            'StringPooling': 'true',
-            'StructMemberAlignment': '4Bytes',
-            'SuppressStartupBanner': 'false',
-            'TreatWarningAsError': 'true',
-            'TreatWChar_tAsBuiltInType': 'false',
-            'UndefineAllPreprocessorDefinitions': 'true',
-            'UndefinePreprocessorDefinitions': 'wer',
-            'UseFullPaths': 'true',
-            'WarningLevel': 'Level3',
-            'WholeProgramOptimization': 'true',
-            'XMLDocumentationFileName': '$(IntDir)c'},
-        'Link': {
-            'AdditionalDependencies': 'zx',
-            'AdditionalLibraryDirectories': 'asd',
-            'AdditionalManifestDependencies': 's2',
-            'AdditionalOptions': '/mor2',
-            'AddModuleNamesToAssembly': 'd1',
-            'AllowIsolation': 'false',
-            'AssemblyDebug': 'true',
-            'AssemblyLinkResource': 'd5',
-            'BaseAddress': '23423',
-            'CLRImageType': 'ForceSafeILImage',
-            'CLRThreadAttribute': 'MTAThreadingAttribute',
-            'CLRUnmanagedCodeCheck': 'true',
-            'DataExecutionPrevention': '',
-            'DelayLoadDLLs': 'd4',
-            'DelaySign': 'true',
-            'Driver': 'UpOnly',
-            'EmbedManagedResourceFile': 'd2',
-            'EnableCOMDATFolding': 'false',
-            'EnableUAC': 'false',
-            'EntryPointSymbol': 'f5',
-            'FixedBaseAddress': 'false',
-            'ForceSymbolReferences': 'd3',
-            'FunctionOrder': 'fssdfsd',
-            'GenerateDebugInformation': 'true',
-            'GenerateMapFile': 'true',
-            'HeapCommitSize': '13',
-            'HeapReserveSize': '12',
-            'IgnoreAllDefaultLibraries': 'true',
-            'IgnoreEmbeddedIDL': 'true',
-            'IgnoreSpecificDefaultLibraries': 'flob;flok',
-            'ImportLibrary': 'f4',
-            'KeyContainer': 'f7',
-            'KeyFile': 'f6',
-            'LargeAddressAware': 'true',
-            'LinkErrorReporting': 'QueueForNextLogin',
-            'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
-            'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
-            'MapExports': 'true',
-            'MapFileName': 'd5',
-            'MergedIDLBaseFileName': 'f2',
-            'MergeSections': 'f5',
-            'MidlCommandFile': 'f1',
-            'ModuleDefinitionFile': 'sdsd',
-            'NoEntryPoint': 'true',
-            'OptimizeReferences': 'true',
-            'OutputFile': '$(OutDir)$(ProjectName)2.exe',
-            'PerUserRedirection': 'true',
-            'Profile': 'true',
-            'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
-            'ProgramDatabaseFile': 'Flob.pdb',
-            'RandomizedBaseAddress': 'false',
-            'RegisterOutput': 'true',
-            'SetChecksum': 'false',
-            'ShowProgress': 'LinkVerbose',
-            'StackCommitSize': '15',
-            'StackReserveSize': '14',
-            'StripPrivateSymbols': 'd3',
-            'SubSystem': 'Console',
-            'SupportUnloadOfDelayLoadedDLL': 'true',
-            'SuppressStartupBanner': 'false',
-            'SwapRunFromCD': 'true',
-            'SwapRunFromNET': 'true',
-            'TargetMachine': 'MachineX86',
-            'TerminalServerAware': 'false',
-            'TurnOffAssemblyGeneration': 'true',
-            'TypeLibraryFile': 'f3',
-            'TypeLibraryResourceID': '12',
-            'UACExecutionLevel': 'RequireAdministrator',
-            'UACUIAccess': 'true',
-            'Version': '333'},
-        'ResourceCompile': {
-            'AdditionalIncludeDirectories': 'f3',
-            'AdditionalOptions': '/more3',
-            'Culture': '0x0c0c',
-            'IgnoreStandardIncludePath': 'true',
-            'PreprocessorDefinitions': '_UNICODE;UNICODE2',
-            'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
-            'ShowProgress': 'true'},
-        'Manifest': {
-            'AdditionalManifestFiles': 'sfsdfsd',
-            'AdditionalOptions': 'afdsdafsd',
-            'AssemblyIdentity': 'sddfdsadfsa',
-            'ComponentFileName': 'fsdfds',
-            'GenerateCatalogFiles': 'true',
-            'InputResourceManifests': 'asfsfdafs',
-            'OutputManifestFile': '$(TargetPath).manifestdfs',
-            'RegistrarScriptFile': 'sdfsfd',
-            'ReplacementsFile': 'sdffsd',
-            'SuppressStartupBanner': 'false',
-            'TypeLibraryFile': 'sfsd',
-            'UpdateFileHashes': 'true',
-            'UpdateFileHashesSearchPath': 'sfsd',
-            'VerboseOutput': 'true'},
-        'ProjectReference': {
-            'LinkLibraryDependencies': 'false',
-            'UseLibraryDependencyInputs': 'true'},
-        '': {
-            'EmbedManifest': 'false',
-            'GenerateManifest': 'false',
-            'IgnoreImportLibrary': 'true',
-            'LinkIncremental': ''
-            },
-        'ManifestResourceCompile': {
-            'ResourceOutputFileName':
-            '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
-        }
-    actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
-        msvs_settings,
-        self.stderr)
-    self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
-    self._ExpectedWarnings([])
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/gyp/pylib/gyp/MSVSToolFile.py b/tools/gyp/pylib/gyp/MSVSToolFile.py
deleted file mode 100644
index 74e529a..0000000
--- a/tools/gyp/pylib/gyp/MSVSToolFile.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio project reader/writer."""
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-
-class Writer(object):
-  """Visual Studio XML tool file writer."""
-
-  def __init__(self, tool_file_path, name):
-    """Initializes the tool file.
-
-    Args:
-      tool_file_path: Path to the tool file.
-      name: Name of the tool file.
-    """
-    self.tool_file_path = tool_file_path
-    self.name = name
-    self.rules_section = ['Rules']
-
-  def AddCustomBuildRule(self, name, cmd, description,
-                         additional_dependencies,
-                         outputs, extensions):
-    """Adds a rule to the tool file.
-
-    Args:
-      name: Name of the rule.
-      description: Description of the rule.
-      cmd: Command line of the rule.
-      additional_dependencies: other files which may trigger the rule.
-      outputs: outputs of the rule.
-      extensions: extensions handled by the rule.
-    """
-    rule = ['CustomBuildRule',
-            {'Name': name,
-             'ExecutionDescription': description,
-             'CommandLine': cmd,
-             'Outputs': ';'.join(outputs),
-             'FileExtensions': ';'.join(extensions),
-             'AdditionalDependencies':
-                 ';'.join(additional_dependencies)
-            }]
-    self.rules_section.append(rule)
-
-  def WriteIfChanged(self):
-    """Writes the tool file."""
-    content = ['VisualStudioToolFile',
-               {'Version': '8.00',
-                'Name': self.name
-               },
-               self.rules_section
-               ]
-    easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
-                               encoding="Windows-1252")
diff --git a/tools/gyp/pylib/gyp/MSVSUserFile.py b/tools/gyp/pylib/gyp/MSVSUserFile.py
deleted file mode 100644
index 6c07e9a..0000000
--- a/tools/gyp/pylib/gyp/MSVSUserFile.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio user preferences file writer."""
-
-import os
-import re
-import socket # for gethostname
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-
-#------------------------------------------------------------------------------
-
-def _FindCommandInPath(command):
-  """If there are no slashes in the command given, this function
-     searches the PATH env to find the given command, and converts it
-     to an absolute path.  We have to do this because MSVS is looking
-     for an actual file to launch a debugger on, not just a command
-     line.  Note that this happens at GYP time, so anything needing to
-     be built needs to have a full path."""
-  if '/' in command or '\\' in command:
-    # If the command already has path elements (either relative or
-    # absolute), then assume it is constructed properly.
-    return command
-  else:
-    # Search through the path list and find an existing file that
-    # we can access.
-    paths = os.environ.get('PATH','').split(os.pathsep)
-    for path in paths:
-      item = os.path.join(path, command)
-      if os.path.isfile(item) and os.access(item, os.X_OK):
-        return item
-  return command
-
-def _QuoteWin32CommandLineArgs(args):
-  new_args = []
-  for arg in args:
-    # Replace all double-quotes with double-double-quotes to escape
-    # them for cmd shell, and then quote the whole thing if there
-    # are any.
-    if arg.find('"') != -1:
-      arg = '""'.join(arg.split('"'))
-      arg = '"%s"' % arg
-
-    # Otherwise, if there are any spaces, quote the whole arg.
-    elif re.search(r'[ \t\n]', arg):
-      arg = '"%s"' % arg
-    new_args.append(arg)
-  return new_args
-
-class Writer(object):
-  """Visual Studio XML user user file writer."""
-
-  def __init__(self, user_file_path, version, name):
-    """Initializes the user file.
-
-    Args:
-      user_file_path: Path to the user file.
-      version: Version info.
-      name: Name of the user file.
-    """
-    self.user_file_path = user_file_path
-    self.version = version
-    self.name = name
-    self.configurations = {}
-
-  def AddConfig(self, name):
-    """Adds a configuration to the project.
-
-    Args:
-      name: Configuration name.
-    """
-    self.configurations[name] = ['Configuration', {'Name': name}]
-
-  def AddDebugSettings(self, config_name, command, environment = {},
-                       working_directory=""):
-    """Adds a DebugSettings node to the user file for a particular config.
-
-    Args:
-      command: command line to run.  First element in the list is the
-        executable.  All elements of the command will be quoted if
-        necessary.
-      working_directory: other files which may trigger the rule. (optional)
-    """
-    command = _QuoteWin32CommandLineArgs(command)
-
-    abs_command = _FindCommandInPath(command[0])
-
-    if environment and isinstance(environment, dict):
-      env_list = ['%s="%s"' % (key, val)
-                  for (key,val) in environment.iteritems()]
-      environment = ' '.join(env_list)
-    else:
-      environment = ''
-
-    n_cmd = ['DebugSettings',
-             {'Command': abs_command,
-              'WorkingDirectory': working_directory,
-              'CommandArguments': " ".join(command[1:]),
-              'RemoteMachine': socket.gethostname(),
-              'Environment': environment,
-              'EnvironmentMerge': 'true',
-              # Currently these are all "dummy" values that we're just setting
-              # in the default manner that MSVS does it.  We could use some of
-              # these to add additional capabilities, I suppose, but they might
-              # not have parity with other platforms then.
-              'Attach': 'false',
-              'DebuggerType': '3',  # 'auto' debugger
-              'Remote': '1',
-              'RemoteCommand': '',
-              'HttpUrl': '',
-              'PDBPath': '',
-              'SQLDebugging': '',
-              'DebuggerFlavor': '0',
-              'MPIRunCommand': '',
-              'MPIRunArguments': '',
-              'MPIRunWorkingDirectory': '',
-              'ApplicationCommand': '',
-              'ApplicationArguments': '',
-              'ShimCommand': '',
-              'MPIAcceptMode': '',
-              'MPIAcceptFilter': ''
-             }]
-
-    # Find the config, and add it if it doesn't exist.
-    if config_name not in self.configurations:
-      self.AddConfig(config_name)
-
-    # Add the DebugSettings onto the appropriate config.
-    self.configurations[config_name].append(n_cmd)
-
-  def WriteIfChanged(self):
-    """Writes the user file."""
-    configs = ['Configurations']
-    for config, spec in sorted(self.configurations.iteritems()):
-      configs.append(spec)
-
-    content = ['VisualStudioUserFile',
-               {'Version': self.version.ProjectVersion(),
-                'Name': self.name
-               },
-               configs]
-    easy_xml.WriteXmlIfChanged(content, self.user_file_path,
-                               encoding="Windows-1252")
diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/tools/gyp/pylib/gyp/MSVSUtil.py
deleted file mode 100644
index f5e0c1d..0000000
--- a/tools/gyp/pylib/gyp/MSVSUtil.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions shared amongst the Windows generators."""
-
-import copy
-import os
-
-
-# A dictionary mapping supported target types to extensions.
-TARGET_TYPE_EXT = {
-  'executable': 'exe',
-  'loadable_module': 'dll',
-  'shared_library': 'dll',
-  'static_library': 'lib',
-}
-
-
-def _GetLargePdbShimCcPath():
-  """Returns the path of the large_pdb_shim.cc file."""
-  this_dir = os.path.abspath(os.path.dirname(__file__))
-  src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
-  win_data_dir = os.path.join(src_dir, 'data', 'win')
-  large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
-  return large_pdb_shim_cc
-
-
-def _DeepCopySomeKeys(in_dict, keys):
-  """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
-
-  Arguments:
-    in_dict: The dictionary to copy.
-    keys: The keys to be copied. If a key is in this list and doesn't exist in
-        |in_dict| this is not an error.
-  Returns:
-    The partially deep-copied dictionary.
-  """
-  d = {}
-  for key in keys:
-    if key not in in_dict:
-      continue
-    d[key] = copy.deepcopy(in_dict[key])
-  return d
-
-
-def _SuffixName(name, suffix):
-  """Add a suffix to the end of a target.
-
-  Arguments:
-    name: name of the target (foo#target)
-    suffix: the suffix to be added
-  Returns:
-    Target name with suffix added (foo_suffix#target)
-  """
-  parts = name.rsplit('#', 1)
-  parts[0] = '%s_%s' % (parts[0], suffix)
-  return '#'.join(parts)
-
-
-def _ShardName(name, number):
-  """Add a shard number to the end of a target.
-
-  Arguments:
-    name: name of the target (foo#target)
-    number: shard number
-  Returns:
-    Target name with shard added (foo_1#target)
-  """
-  return _SuffixName(name, str(number))
-
-
-def ShardTargets(target_list, target_dicts):
-  """Shard some targets apart to work around the linkers limits.
-
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-  Returns:
-    Tuple of the new sharded versions of the inputs.
-  """
-  # Gather the targets to shard, and how many pieces.
-  targets_to_shard = {}
-  for t in target_dicts:
-    shards = int(target_dicts[t].get('msvs_shard', 0))
-    if shards:
-      targets_to_shard[t] = shards
-  # Shard target_list.
-  new_target_list = []
-  for t in target_list:
-    if t in targets_to_shard:
-      for i in range(targets_to_shard[t]):
-        new_target_list.append(_ShardName(t, i))
-    else:
-      new_target_list.append(t)
-  # Shard target_dict.
-  new_target_dicts = {}
-  for t in target_dicts:
-    if t in targets_to_shard:
-      for i in range(targets_to_shard[t]):
-        name = _ShardName(t, i)
-        new_target_dicts[name] = copy.copy(target_dicts[t])
-        new_target_dicts[name]['target_name'] = _ShardName(
-             new_target_dicts[name]['target_name'], i)
-        sources = new_target_dicts[name].get('sources', [])
-        new_sources = []
-        for pos in range(i, len(sources), targets_to_shard[t]):
-          new_sources.append(sources[pos])
-        new_target_dicts[name]['sources'] = new_sources
-    else:
-      new_target_dicts[t] = target_dicts[t]
-  # Shard dependencies.
-  for t in sorted(new_target_dicts):
-    for deptype in ('dependencies', 'dependencies_original'):
-      dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
-      new_dependencies = []
-      for d in dependencies:
-        if d in targets_to_shard:
-          for i in range(targets_to_shard[d]):
-            new_dependencies.append(_ShardName(d, i))
-        else:
-          new_dependencies.append(d)
-      new_target_dicts[t][deptype] = new_dependencies
-
-  return (new_target_list, new_target_dicts)
-
-
-def _GetPdbPath(target_dict, config_name, vars):
-  """Returns the path to the PDB file that will be generated by a given
-  configuration.
-
-  The lookup proceeds as follows:
-    - Look for an explicit path in the VCLinkerTool configuration block.
-    - Look for an 'msvs_large_pdb_path' variable.
-    - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
-      specified.
-    - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
-
-  Arguments:
-    target_dict: The target dictionary to be searched.
-    config_name: The name of the configuration of interest.
-    vars: A dictionary of common GYP variables with generator-specific values.
-  Returns:
-    The path of the corresponding PDB file.
-  """
-  config = target_dict['configurations'][config_name]
-  msvs = config.setdefault('msvs_settings', {})
-
-  linker = msvs.get('VCLinkerTool', {})
-
-  pdb_path = linker.get('ProgramDatabaseFile')
-  if pdb_path:
-    return pdb_path
-
-  variables = target_dict.get('variables', {})
-  pdb_path = variables.get('msvs_large_pdb_path', None)
-  if pdb_path:
-    return pdb_path
-
-
-  pdb_base = target_dict.get('product_name', target_dict['target_name'])
-  pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
-  pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
-
-  return pdb_path
-
-
-def InsertLargePdbShims(target_list, target_dicts, vars):
-  """Insert a shim target that forces the linker to use 4KB pagesize PDBs.
-
-  This is a workaround for targets with PDBs greater than 1GB in size, the
-  limit for the 1KB pagesize PDBs created by the linker by default.
-
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-    vars: A dictionary of common GYP variables with generator-specific values.
-  Returns:
-    Tuple of the shimmed version of the inputs.
-  """
-  # Determine which targets need shimming.
-  targets_to_shim = []
-  for t in target_dicts:
-    target_dict = target_dicts[t]
-
-    # We only want to shim targets that have msvs_large_pdb enabled.
-    if not int(target_dict.get('msvs_large_pdb', 0)):
-      continue
-    # This is intended for executable, shared_library and loadable_module
-    # targets where every configuration is set up to produce a PDB output.
-    # If any of these conditions is not true then the shim logic will fail
-    # below.
-    targets_to_shim.append(t)
-
-  large_pdb_shim_cc = _GetLargePdbShimCcPath()
-
-  for t in targets_to_shim:
-    target_dict = target_dicts[t]
-    target_name = target_dict.get('target_name')
-
-    base_dict = _DeepCopySomeKeys(target_dict,
-          ['configurations', 'default_configuration', 'toolset'])
-
-    # This is the dict for copying the source file (part of the GYP tree)
-    # to the intermediate directory of the project. This is necessary because
-    # we can't always build a relative path to the shim source file (on Windows
-    # GYP and the project may be on different drives), and Ninja hates absolute
-    # paths (it ends up generating the .obj and .obj.d alongside the source
-    # file, polluting GYPs tree).
-    copy_suffix = 'large_pdb_copy'
-    copy_target_name = target_name + '_' + copy_suffix
-    full_copy_target_name = _SuffixName(t, copy_suffix)
-    shim_cc_basename = os.path.basename(large_pdb_shim_cc)
-    shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
-    shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
-    copy_dict = copy.deepcopy(base_dict)
-    copy_dict['target_name'] = copy_target_name
-    copy_dict['type'] = 'none'
-    copy_dict['sources'] = [ large_pdb_shim_cc ]
-    copy_dict['copies'] = [{
-      'destination': shim_cc_dir,
-      'files': [ large_pdb_shim_cc ]
-    }]
-
-    # This is the dict for the PDB generating shim target. It depends on the
-    # copy target.
-    shim_suffix = 'large_pdb_shim'
-    shim_target_name = target_name + '_' + shim_suffix
-    full_shim_target_name = _SuffixName(t, shim_suffix)
-    shim_dict = copy.deepcopy(base_dict)
-    shim_dict['target_name'] = shim_target_name
-    shim_dict['type'] = 'static_library'
-    shim_dict['sources'] = [ shim_cc_path ]
-    shim_dict['dependencies'] = [ full_copy_target_name ]
-
-    # Set up the shim to output its PDB to the same location as the final linker
-    # target.
-    for config_name, config in shim_dict.get('configurations').iteritems():
-      pdb_path = _GetPdbPath(target_dict, config_name, vars)
-
-      # A few keys that we don't want to propagate.
-      for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
-        config.pop(key, None)
-
-      msvs = config.setdefault('msvs_settings', {})
-
-      # Update the compiler directives in the shim target.
-      compiler = msvs.setdefault('VCCLCompilerTool', {})
-      compiler['DebugInformationFormat'] = '3'
-      compiler['ProgramDataBaseFileName'] = pdb_path
-
-      # Set the explicit PDB path in the appropriate configuration of the
-      # original target.
-      config = target_dict['configurations'][config_name]
-      msvs = config.setdefault('msvs_settings', {})
-      linker = msvs.setdefault('VCLinkerTool', {})
-      linker['GenerateDebugInformation'] = 'true'
-      linker['ProgramDatabaseFile'] = pdb_path
-
-    # Add the new targets. They must go to the beginning of the list so that
-    # the dependency generation works as expected in ninja.
-    target_list.insert(0, full_copy_target_name)
-    target_list.insert(0, full_shim_target_name)
-    target_dicts[full_copy_target_name] = copy_dict
-    target_dicts[full_shim_target_name] = shim_dict
-
-    # Update the original target to depend on the shim target.
-    target_dict.setdefault('dependencies', []).append(full_shim_target_name)
-
-  return (target_list, target_dicts)
diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py
deleted file mode 100644
index edaf6ee..0000000
--- a/tools/gyp/pylib/gyp/MSVSVersion.py
+++ /dev/null
@@ -1,453 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Handle version information related to Visual Stuio."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-import gyp
-import glob
-
-
-class VisualStudioVersion(object):
-  """Information regarding a version of Visual Studio."""
-
-  def __init__(self, short_name, description,
-               solution_version, project_version, flat_sln, uses_vcxproj,
-               path, sdk_based, default_toolset=None):
-    self.short_name = short_name
-    self.description = description
-    self.solution_version = solution_version
-    self.project_version = project_version
-    self.flat_sln = flat_sln
-    self.uses_vcxproj = uses_vcxproj
-    self.path = path
-    self.sdk_based = sdk_based
-    self.default_toolset = default_toolset
-
-  def ShortName(self):
-    return self.short_name
-
-  def Description(self):
-    """Get the full description of the version."""
-    return self.description
-
-  def SolutionVersion(self):
-    """Get the version number of the sln files."""
-    return self.solution_version
-
-  def ProjectVersion(self):
-    """Get the version number of the vcproj or vcxproj files."""
-    return self.project_version
-
-  def FlatSolution(self):
-    return self.flat_sln
-
-  def UsesVcxproj(self):
-    """Returns true if this version uses a vcxproj file."""
-    return self.uses_vcxproj
-
-  def ProjectExtension(self):
-    """Returns the file extension for the project."""
-    return self.uses_vcxproj and '.vcxproj' or '.vcproj'
-
-  def Path(self):
-    """Returns the path to Visual Studio installation."""
-    return self.path
-
-  def ToolPath(self, tool):
-    """Returns the path to a given compiler tool. """
-    return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
-
-  def DefaultToolset(self):
-    """Returns the msbuild toolset version that will be used in the absence
-    of a user override."""
-    return self.default_toolset
-
-  def _SetupScriptInternal(self, target_arch):
-    """Returns a command (with arguments) to be used to set up the
-    environment."""
-    # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
-    # depot_tools build tools and should run SetEnv.Cmd to set up the
-    # environment. The check for WindowsSDKDir alone is not sufficient because
-    # this is set by running vcvarsall.bat.
-    assert target_arch in ('x86', 'x64')
-    sdk_dir = os.environ.get('WindowsSDKDir')
-    if sdk_dir:
-      setup_path = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd'))
-    if self.sdk_based and sdk_dir and os.path.exists(setup_path):
-      return [setup_path, '/' + target_arch]
-    else:
-      # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
-      # vcvars32, which it can only find if VS??COMNTOOLS is set, which it
-      # isn't always.
-      if target_arch == 'x86':
-        if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
-            os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
-            os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
-          # VS2013 and later, non-Express have a x64-x86 cross that we want
-          # to prefer.
-          return [os.path.normpath(
-             os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
-        # Otherwise, the standard x86 compiler.
-        return [os.path.normpath(
-          os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
-      else:
-        assert target_arch == 'x64'
-        arg = 'x86_amd64'
-        # Use the 64-on-64 compiler if we're not using an express
-        # edition and we're running on a 64bit OS.
-        if self.short_name[-1] != 'e' and (
-            os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
-            os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
-          arg = 'amd64'
-        return [os.path.normpath(
-            os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
-
-  def SetupScript(self, target_arch):
-    script_data = self._SetupScriptInternal(target_arch)
-    script_path = script_data[0]
-    if not os.path.exists(script_path):
-      raise Exception('%s is missing - make sure VC++ tools are installed.' %
-                      script_path)
-    return script_data
-
-
-def _RegistryQueryBase(sysdir, key, value):
-  """Use reg.exe to read a particular key.
-
-  While ideally we might use the win32 module, we would like gyp to be
-  python neutral, so for instance cygwin python lacks this module.
-
-  Arguments:
-    sysdir: The system subdirectory to attempt to launch reg.exe from.
-    key: The registry key to read from.
-    value: The particular value to read.
-  Return:
-    stdout from reg.exe, or None for failure.
-  """
-  # Skip if not on Windows or Python Win32 setup issue
-  if sys.platform not in ('win32', 'cygwin'):
-    return None
-  # Setup params to pass to and attempt to launch reg.exe
-  cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
-         'query', key]
-  if value:
-    cmd.extend(['/v', value])
-  p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-  # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
-  # Note that the error text may be in [1] in some cases
-  text = p.communicate()[0]
-  # Check return code from reg.exe; officially 0==success and 1==error
-  if p.returncode:
-    return None
-  return text
-
-
-def _RegistryQuery(key, value=None):
-  r"""Use reg.exe to read a particular key through _RegistryQueryBase.
-
-  First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
-  that fails, it falls back to System32.  Sysnative is available on Vista and
-  up and available on Windows Server 2003 and XP through KB patch 942589. Note
-  that Sysnative will always fail if using 64-bit python due to it being a
-  virtual directory and System32 will work correctly in the first place.
-
-  KB 942589 - http://support.microsoft.com/kb/942589/en-us.
-
-  Arguments:
-    key: The registry key.
-    value: The particular registry value to read (optional).
-  Return:
-    stdout from reg.exe, or None for failure.
-  """
-  text = None
-  try:
-    text = _RegistryQueryBase('Sysnative', key, value)
-  except OSError, e:
-    if e.errno == errno.ENOENT:
-      text = _RegistryQueryBase('System32', key, value)
-    else:
-      raise
-  return text
-
-
-def _RegistryGetValueUsingWinReg(key, value):
-  """Use the _winreg module to obtain the value of a registry key.
-
-  Args:
-    key: The registry key.
-    value: The particular registry value to read.
-  Return:
-    contents of the registry key's value, or None on failure.  Throws
-    ImportError if _winreg is unavailable.
-  """
-  import _winreg
-  try:
-    root, subkey = key.split('\\', 1)
-    assert root == 'HKLM'  # Only need HKLM for now.
-    with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
-      return _winreg.QueryValueEx(hkey, value)[0]
-  except WindowsError:
-    return None
-
-
-def _RegistryGetValue(key, value):
-  """Use _winreg or reg.exe to obtain the value of a registry key.
-
-  Using _winreg is preferable because it solves an issue on some corporate
-  environments where access to reg.exe is locked down. However, we still need
-  to fallback to reg.exe for the case where the _winreg module is not available
-  (for example in cygwin python).
-
-  Args:
-    key: The registry key.
-    value: The particular registry value to read.
-  Return:
-    contents of the registry key's value, or None on failure.
-  """
-  try:
-    return _RegistryGetValueUsingWinReg(key, value)
-  except ImportError:
-    pass
-
-  # Fallback to reg.exe if we fail to import _winreg.
-  text = _RegistryQuery(key, value)
-  if not text:
-    return None
-  # Extract value.
-  match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
-  if not match:
-    return None
-  return match.group(1)
-
-
-def _CreateVersion(name, path, sdk_based=False):
-  """Sets up MSVS project generation.
-
-  Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
-  autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
-  passed in that doesn't match a value in versions python will throw a error.
-  """
-  if path:
-    path = os.path.normpath(path)
-  versions = {
-      '2015': VisualStudioVersion('2015',
-                                  'Visual Studio 2015',
-                                  solution_version='12.00',
-                                  project_version='14.0',
-                                  flat_sln=False,
-                                  uses_vcxproj=True,
-                                  path=path,
-                                  sdk_based=sdk_based,
-                                  default_toolset='v140'),
-      '2013': VisualStudioVersion('2013',
-                                  'Visual Studio 2013',
-                                  solution_version='13.00',
-                                  project_version='12.0',
-                                  flat_sln=False,
-                                  uses_vcxproj=True,
-                                  path=path,
-                                  sdk_based=sdk_based,
-                                  default_toolset='v120'),
-      '2013e': VisualStudioVersion('2013e',
-                                   'Visual Studio 2013',
-                                   solution_version='13.00',
-                                   project_version='12.0',
-                                   flat_sln=True,
-                                   uses_vcxproj=True,
-                                   path=path,
-                                   sdk_based=sdk_based,
-                                   default_toolset='v120'),
-      '2012': VisualStudioVersion('2012',
-                                  'Visual Studio 2012',
-                                  solution_version='12.00',
-                                  project_version='4.0',
-                                  flat_sln=False,
-                                  uses_vcxproj=True,
-                                  path=path,
-                                  sdk_based=sdk_based,
-                                  default_toolset='v110'),
-      '2012e': VisualStudioVersion('2012e',
-                                   'Visual Studio 2012',
-                                   solution_version='12.00',
-                                   project_version='4.0',
-                                   flat_sln=True,
-                                   uses_vcxproj=True,
-                                   path=path,
-                                   sdk_based=sdk_based,
-                                   default_toolset='v110'),
-      '2010': VisualStudioVersion('2010',
-                                  'Visual Studio 2010',
-                                  solution_version='11.00',
-                                  project_version='4.0',
-                                  flat_sln=False,
-                                  uses_vcxproj=True,
-                                  path=path,
-                                  sdk_based=sdk_based),
-      '2010e': VisualStudioVersion('2010e',
-                                   'Visual C++ Express 2010',
-                                   solution_version='11.00',
-                                   project_version='4.0',
-                                   flat_sln=True,
-                                   uses_vcxproj=True,
-                                   path=path,
-                                   sdk_based=sdk_based),
-      '2008': VisualStudioVersion('2008',
-                                  'Visual Studio 2008',
-                                  solution_version='10.00',
-                                  project_version='9.00',
-                                  flat_sln=False,
-                                  uses_vcxproj=False,
-                                  path=path,
-                                  sdk_based=sdk_based),
-      '2008e': VisualStudioVersion('2008e',
-                                   'Visual Studio 2008',
-                                   solution_version='10.00',
-                                   project_version='9.00',
-                                   flat_sln=True,
-                                   uses_vcxproj=False,
-                                   path=path,
-                                   sdk_based=sdk_based),
-      '2005': VisualStudioVersion('2005',
-                                  'Visual Studio 2005',
-                                  solution_version='9.00',
-                                  project_version='8.00',
-                                  flat_sln=False,
-                                  uses_vcxproj=False,
-                                  path=path,
-                                  sdk_based=sdk_based),
-      '2005e': VisualStudioVersion('2005e',
-                                   'Visual Studio 2005',
-                                   solution_version='9.00',
-                                   project_version='8.00',
-                                   flat_sln=True,
-                                   uses_vcxproj=False,
-                                   path=path,
-                                   sdk_based=sdk_based),
-  }
-  return versions[str(name)]
-
-
-def _ConvertToCygpath(path):
-  """Convert to cygwin path if we are using cygwin."""
-  if sys.platform == 'cygwin':
-    p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
-    path = p.communicate()[0].strip()
-  return path
-
-
-def _DetectVisualStudioVersions(versions_to_check, force_express):
-  """Collect the list of installed visual studio versions.
-
-  Returns:
-    A list of visual studio versions installed in descending order of
-    usage preference.
-    Base this on the registry and a quick check if devenv.exe exists.
-    Only versions 8-10 are considered.
-    Possibilities are:
-      2005(e) - Visual Studio 2005 (8)
-      2008(e) - Visual Studio 2008 (9)
-      2010(e) - Visual Studio 2010 (10)
-      2012(e) - Visual Studio 2012 (11)
-      2013(e) - Visual Studio 2013 (12)
-      2015    - Visual Studio 2015 (14)
-    Where (e) is e for express editions of MSVS and blank otherwise.
-  """
-  version_to_year = {
-      '8.0': '2005',
-      '9.0': '2008',
-      '10.0': '2010',
-      '11.0': '2012',
-      '12.0': '2013',
-      '14.0': '2015',
-  }
-  versions = []
-  for version in versions_to_check:
-    # Old method of searching for which VS version is installed
-    # We don't use the 2010-encouraged-way because we also want to get the
-    # path to the binaries, which it doesn't offer.
-    keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
-            r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
-            r'HKLM\Software\Microsoft\VCExpress\%s' % version,
-            r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
-    for index in range(len(keys)):
-      path = _RegistryGetValue(keys[index], 'InstallDir')
-      if not path:
-        continue
-      path = _ConvertToCygpath(path)
-      # Check for full.
-      full_path = os.path.join(path, 'devenv.exe')
-      express_path = os.path.join(path, '*express.exe')
-      if not force_express and os.path.exists(full_path):
-        # Add this one.
-        versions.append(_CreateVersion(version_to_year[version],
-            os.path.join(path, '..', '..')))
-      # Check for express.
-      elif glob.glob(express_path):
-        # Add this one.
-        versions.append(_CreateVersion(version_to_year[version] + 'e',
-            os.path.join(path, '..', '..')))
-
-    # The old method above does not work when only SDK is installed.
-    keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
-            r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
-    for index in range(len(keys)):
-      path = _RegistryGetValue(keys[index], version)
-      if not path:
-        continue
-      path = _ConvertToCygpath(path)
-      if version != '14.0':  # There is no Express edition for 2015.
-        versions.append(_CreateVersion(version_to_year[version] + 'e',
-            os.path.join(path, '..'), sdk_based=True))
-
-  return versions
-
-
-def SelectVisualStudioVersion(version='auto', allow_fallback=True):
-  """Select which version of Visual Studio projects to generate.
-
-  Arguments:
-    version: Hook to allow caller to force a particular version (vs auto).
-  Returns:
-    An object representing a visual studio project format version.
-  """
-  # In auto mode, check environment variable for override.
-  if version == 'auto':
-    version = os.environ.get('GYP_MSVS_VERSION', 'auto')
-  version_map = {
-    'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
-    '2005': ('8.0',),
-    '2005e': ('8.0',),
-    '2008': ('9.0',),
-    '2008e': ('9.0',),
-    '2010': ('10.0',),
-    '2010e': ('10.0',),
-    '2012': ('11.0',),
-    '2012e': ('11.0',),
-    '2013': ('12.0',),
-    '2013e': ('12.0',),
-    '2015': ('14.0',),
-  }
-  override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
-  if override_path:
-    msvs_version = os.environ.get('GYP_MSVS_VERSION')
-    if not msvs_version:
-      raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
-                       'set to a particular version (e.g. 2010e).')
-    return _CreateVersion(msvs_version, override_path, sdk_based=True)
-  version = str(version)
-  versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
-  if not versions:
-    if not allow_fallback:
-      raise ValueError('Could not locate Visual Studio installation.')
-    if version == 'auto':
-      # Default to 2005 if we couldn't find anything
-      return _CreateVersion('2005', None)
-    else:
-      return _CreateVersion(version, None)
-  return versions[0]
diff --git a/tools/gyp/pylib/gyp/__init__.py b/tools/gyp/pylib/gyp/__init__.py
deleted file mode 100755
index 668f38b..0000000
--- a/tools/gyp/pylib/gyp/__init__.py
+++ /dev/null
@@ -1,548 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import copy
-import gyp.input
-import optparse
-import os.path
-import re
-import shlex
-import sys
-import traceback
-from gyp.common import GypError
-
-# Default debug modes for GYP
-debug = {}
-
-# List of "official" debug modes, but you can use anything you like.
-DEBUG_GENERAL = 'general'
-DEBUG_VARIABLES = 'variables'
-DEBUG_INCLUDES = 'includes'
-
-
-def DebugOutput(mode, message, *args):
-  if 'all' in gyp.debug or mode in gyp.debug:
-    ctx = ('unknown', 0, 'unknown')
-    try:
-      f = traceback.extract_stack(limit=2)
-      if f:
-        ctx = f[0][:3]
-    except:
-      pass
-    if args:
-      message %= args
-    print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
-                              ctx[1], ctx[2], message)
-
-def FindBuildFiles():
-  extension = '.gyp'
-  files = os.listdir(os.getcwd())
-  build_files = []
-  for file in files:
-    if file.endswith(extension):
-      build_files.append(file)
-  return build_files
-
-
-def Load(build_files, format, default_variables={},
-         includes=[], depth='.', params=None, check=False,
-         circular_check=True, duplicate_basename_check=True):
-  """
-  Loads one or more specified build files.
-  default_variables and includes will be copied before use.
-  Returns the generator for the specified format and the
-  data returned by loading the specified build files.
-  """
-  if params is None:
-    params = {}
-
-  if '-' in format:
-    format, params['flavor'] = format.split('-', 1)
-
-  default_variables = copy.copy(default_variables)
-
-  # Default variables provided by this program and its modules should be
-  # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
-  # avoiding collisions with user and automatic variables.
-  default_variables['GENERATOR'] = format
-  default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
-
-  # Format can be a custom python file, or by default the name of a module
-  # within gyp.generator.
-  if format.endswith('.py'):
-    generator_name = os.path.splitext(format)[0]
-    path, generator_name = os.path.split(generator_name)
-
-    # Make sure the path to the custom generator is in sys.path
-    # Don't worry about removing it once we are done.  Keeping the path
-    # to each generator that is used in sys.path is likely harmless and
-    # arguably a good idea.
-    path = os.path.abspath(path)
-    if path not in sys.path:
-      sys.path.insert(0, path)
-  else:
-    generator_name = 'gyp.generator.' + format
-
-  # These parameters are passed in order (as opposed to by key)
-  # because ActivePython cannot handle key parameters to __import__.
-  generator = __import__(generator_name, globals(), locals(), generator_name)
-  for (key, val) in generator.generator_default_variables.items():
-    default_variables.setdefault(key, val)
-
-  # Give the generator the opportunity to set additional variables based on
-  # the params it will receive in the output phase.
-  if getattr(generator, 'CalculateVariables', None):
-    generator.CalculateVariables(default_variables, params)
-
-  # Give the generator the opportunity to set generator_input_info based on
-  # the params it will receive in the output phase.
-  if getattr(generator, 'CalculateGeneratorInputInfo', None):
-    generator.CalculateGeneratorInputInfo(params)
-
-  # Fetch the generator specific info that gets fed to input, we use getattr
-  # so we can default things and the generators only have to provide what
-  # they need.
-  generator_input_info = {
-    'non_configuration_keys':
-        getattr(generator, 'generator_additional_non_configuration_keys', []),
-    'path_sections':
-        getattr(generator, 'generator_additional_path_sections', []),
-    'extra_sources_for_rules':
-        getattr(generator, 'generator_extra_sources_for_rules', []),
-    'generator_supports_multiple_toolsets':
-        getattr(generator, 'generator_supports_multiple_toolsets', False),
-    'generator_wants_static_library_dependencies_adjusted':
-        getattr(generator,
-                'generator_wants_static_library_dependencies_adjusted', True),
-    'generator_wants_sorted_dependencies':
-        getattr(generator, 'generator_wants_sorted_dependencies', False),
-    'generator_filelist_paths':
-        getattr(generator, 'generator_filelist_paths', None),
-  }
-
-  # Process the input specific to this generator.
-  result = gyp.input.Load(build_files, default_variables, includes[:],
-                          depth, generator_input_info, check, circular_check,
-                          duplicate_basename_check,
-                          params['parallel'], params['root_targets'])
-  return [generator] + result
-
-def NameValueListToDict(name_value_list):
-  """
-  Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
-  of the pairs.  If a string is simply NAME, then the value in the dictionary
-  is set to True.  If VALUE can be converted to an integer, it is.
-  """
-  result = { }
-  for item in name_value_list:
-    tokens = item.split('=', 1)
-    if len(tokens) == 2:
-      # If we can make it an int, use that, otherwise, use the string.
-      try:
-        token_value = int(tokens[1])
-      except ValueError:
-        token_value = tokens[1]
-      # Set the variable to the supplied value.
-      result[tokens[0]] = token_value
-    else:
-      # No value supplied, treat it as a boolean and set it.
-      result[tokens[0]] = True
-  return result
-
-def ShlexEnv(env_name):
-  flags = os.environ.get(env_name, [])
-  if flags:
-    flags = shlex.split(flags)
-  return flags
-
-def FormatOpt(opt, value):
-  if opt.startswith('--'):
-    return '%s=%s' % (opt, value)
-  return opt + value
-
-def RegenerateAppendFlag(flag, values, predicate, env_name, options):
-  """Regenerate a list of command line flags, for an option of action='append'.
-
-  The |env_name|, if given, is checked in the environment and used to generate
-  an initial list of options, then the options that were specified on the
-  command line (given in |values|) are appended.  This matches the handling of
-  environment variables and command line flags where command line flags override
-  the environment, while not requiring the environment to be set when the flags
-  are used again.
-  """
-  flags = []
-  if options.use_environment and env_name:
-    for flag_value in ShlexEnv(env_name):
-      value = FormatOpt(flag, predicate(flag_value))
-      if value in flags:
-        flags.remove(value)
-      flags.append(value)
-  if values:
-    for flag_value in values:
-      flags.append(FormatOpt(flag, predicate(flag_value)))
-  return flags
-
-def RegenerateFlags(options):
-  """Given a parsed options object, and taking the environment variables into
-  account, returns a list of flags that should regenerate an equivalent options
-  object (even in the absence of the environment variables.)
-
-  Any path options will be normalized relative to depth.
-
-  The format flag is not included, as it is assumed the calling generator will
-  set that as appropriate.
-  """
-  def FixPath(path):
-    path = gyp.common.FixIfRelativePath(path, options.depth)
-    if not path:
-      return os.path.curdir
-    return path
-
-  def Noop(value):
-    return value
-
-  # We always want to ignore the environment when regenerating, to avoid
-  # duplicate or changed flags in the environment at the time of regeneration.
-  flags = ['--ignore-environment']
-  for name, metadata in options._regeneration_metadata.iteritems():
-    opt = metadata['opt']
-    value = getattr(options, name)
-    value_predicate = metadata['type'] == 'path' and FixPath or Noop
-    action = metadata['action']
-    env_name = metadata['env_name']
-    if action == 'append':
-      flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
-                                        env_name, options))
-    elif action in ('store', None):  # None is a synonym for 'store'.
-      if value:
-        flags.append(FormatOpt(opt, value_predicate(value)))
-      elif options.use_environment and env_name and os.environ.get(env_name):
-        flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
-    elif action in ('store_true', 'store_false'):
-      if ((action == 'store_true' and value) or
-          (action == 'store_false' and not value)):
-        flags.append(opt)
-      elif options.use_environment and env_name:
-        print >>sys.stderr, ('Warning: environment regeneration unimplemented '
-                             'for %s flag %r env_name %r' % (action, opt,
-                                                             env_name))
-    else:
-      print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
-                           'flag %r' % (action, opt))
-
-  return flags
-
-class RegeneratableOptionParser(optparse.OptionParser):
-  def __init__(self):
-    self.__regeneratable_options = {}
-    optparse.OptionParser.__init__(self)
-
-  def add_option(self, *args, **kw):
-    """Add an option to the parser.
-
-    This accepts the same arguments as OptionParser.add_option, plus the
-    following:
-      regenerate: can be set to False to prevent this option from being included
-                  in regeneration.
-      env_name: name of environment variable that additional values for this
-                option come from.
-      type: adds type='path', to tell the regenerator that the values of
-            this option need to be made relative to options.depth
-    """
-    env_name = kw.pop('env_name', None)
-    if 'dest' in kw and kw.pop('regenerate', True):
-      dest = kw['dest']
-
-      # The path type is needed for regenerating, for optparse we can just treat
-      # it as a string.
-      type = kw.get('type')
-      if type == 'path':
-        kw['type'] = 'string'
-
-      self.__regeneratable_options[dest] = {
-          'action': kw.get('action'),
-          'type': type,
-          'env_name': env_name,
-          'opt': args[0],
-        }
-
-    optparse.OptionParser.add_option(self, *args, **kw)
-
-  def parse_args(self, *args):
-    values, args = optparse.OptionParser.parse_args(self, *args)
-    values._regeneration_metadata = self.__regeneratable_options
-    return values, args
-
-def gyp_main(args):
-  my_name = os.path.basename(sys.argv[0])
-
-  parser = RegeneratableOptionParser()
-  usage = 'usage: %s [options ...] [build_file ...]'
-  parser.set_usage(usage.replace('%s', '%prog'))
-  parser.add_option('--build', dest='configs', action='append',
-                    help='configuration for build after project generation')
-  parser.add_option('--check', dest='check', action='store_true',
-                    help='check format of gyp files')
-  parser.add_option('--config-dir', dest='config_dir', action='store',
-                    env_name='GYP_CONFIG_DIR', default=None,
-                    help='The location for configuration files like '
-                    'include.gypi.')
-  parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
-                    action='append', default=[], help='turn on a debugging '
-                    'mode for debugging GYP.  Supported modes are "variables", '
-                    '"includes" and "general" or "all" for all of them.')
-  parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
-                    env_name='GYP_DEFINES',
-                    help='sets variable VAR to value VAL')
-  parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
-                    help='set DEPTH gyp variable to a relative path to PATH')
-  parser.add_option('-f', '--format', dest='formats', action='append',
-                    env_name='GYP_GENERATORS', regenerate=False,
-                    help='output formats to generate')
-  parser.add_option('-G', dest='generator_flags', action='append', default=[],
-                    metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
-                    help='sets generator flag FLAG to VAL')
-  parser.add_option('--generator-output', dest='generator_output',
-                    action='store', default=None, metavar='DIR', type='path',
-                    env_name='GYP_GENERATOR_OUTPUT',
-                    help='puts generated build files under DIR')
-  parser.add_option('--ignore-environment', dest='use_environment',
-                    action='store_false', default=True, regenerate=False,
-                    help='do not read options from environment variables')
-  parser.add_option('-I', '--include', dest='includes', action='append',
-                    metavar='INCLUDE', type='path',
-                    help='files to include in all loaded .gyp files')
-  # --no-circular-check disables the check for circular relationships between
-  # .gyp files.  These relationships should not exist, but they've only been
-  # observed to be harmful with the Xcode generator.  Chromium's .gyp files
-  # currently have some circular relationships on non-Mac platforms, so this
-  # option allows the strict behavior to be used on Macs and the lenient
-  # behavior to be used elsewhere.
-  # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
-  parser.add_option('--no-circular-check', dest='circular_check',
-                    action='store_false', default=True, regenerate=False,
-                    help="don't check for circular relationships between files")
-  # --no-duplicate-basename-check disables the check for duplicate basenames
-  # in a static_library/shared_library project. Visual C++ 2008 generator
-  # doesn't support this configuration. Libtool on Mac also generates warnings
-  # when duplicate basenames are passed into Make generator on Mac.
-  # TODO(yukawa): Remove this option when these legacy generators are
-  # deprecated.
-  parser.add_option('--no-duplicate-basename-check',
-                    dest='duplicate_basename_check', action='store_false',
-                    default=True, regenerate=False,
-                    help="don't check for duplicate basenames")
-  parser.add_option('--no-parallel', action='store_true', default=False,
-                    help='Disable multiprocessing')
-  parser.add_option('-S', '--suffix', dest='suffix', default='',
-                    help='suffix to add to generated files')
-  parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
-                    default=None, metavar='DIR', type='path',
-                    help='directory to use as the root of the source tree')
-  parser.add_option('-R', '--root-target', dest='root_targets',
-                    action='append', metavar='TARGET',
-                    help='include only TARGET and its deep dependencies')
-
-  options, build_files_arg = parser.parse_args(args)
-  build_files = build_files_arg
-
-  # Set up the configuration directory (defaults to ~/.gyp)
-  if not options.config_dir:
-    home = None
-    home_dot_gyp = None
-    if options.use_environment:
-      home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
-      if home_dot_gyp:
-        home_dot_gyp = os.path.expanduser(home_dot_gyp)
-
-    if not home_dot_gyp:
-      home_vars = ['HOME']
-      if sys.platform in ('cygwin', 'win32'):
-        home_vars.append('USERPROFILE')
-      for home_var in home_vars:
-        home = os.getenv(home_var)
-        if home != None:
-          home_dot_gyp = os.path.join(home, '.gyp')
-          if not os.path.exists(home_dot_gyp):
-            home_dot_gyp = None
-          else:
-            break
-  else:
-    home_dot_gyp = os.path.expanduser(options.config_dir)
-
-  if home_dot_gyp and not os.path.exists(home_dot_gyp):
-    home_dot_gyp = None
-
-  if not options.formats:
-    # If no format was given on the command line, then check the env variable.
-    generate_formats = []
-    if options.use_environment:
-      generate_formats = os.environ.get('GYP_GENERATORS', [])
-    if generate_formats:
-      generate_formats = re.split(r'[\s,]', generate_formats)
-    if generate_formats:
-      options.formats = generate_formats
-    else:
-      # Nothing in the variable, default based on platform.
-      if sys.platform == 'darwin':
-        options.formats = ['xcode']
-      elif sys.platform in ('win32', 'cygwin'):
-        options.formats = ['msvs']
-      else:
-        options.formats = ['make']
-
-  if not options.generator_output and options.use_environment:
-    g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
-    if g_o:
-      options.generator_output = g_o
-
-  options.parallel = not options.no_parallel
-
-  for mode in options.debug:
-    gyp.debug[mode] = 1
-
-  # Do an extra check to avoid work when we're not debugging.
-  if DEBUG_GENERAL in gyp.debug:
-    DebugOutput(DEBUG_GENERAL, 'running with these options:')
-    for option, value in sorted(options.__dict__.items()):
-      if option[0] == '_':
-        continue
-      if isinstance(value, basestring):
-        DebugOutput(DEBUG_GENERAL, "  %s: '%s'", option, value)
-      else:
-        DebugOutput(DEBUG_GENERAL, "  %s: %s", option, value)
-
-  if not build_files:
-    build_files = FindBuildFiles()
-  if not build_files:
-    raise GypError((usage + '\n\n%s: error: no build_file') %
-                   (my_name, my_name))
-
-  # TODO(mark): Chromium-specific hack!
-  # For Chromium, the gyp "depth" variable should always be a relative path
-  # to Chromium's top-level "src" directory.  If no depth variable was set
-  # on the command line, try to find a "src" directory by looking at the
-  # absolute path to each build file's directory.  The first "src" component
-  # found will be treated as though it were the path used for --depth.
-  if not options.depth:
-    for build_file in build_files:
-      build_file_dir = os.path.abspath(os.path.dirname(build_file))
-      build_file_dir_components = build_file_dir.split(os.path.sep)
-      components_len = len(build_file_dir_components)
-      for index in xrange(components_len - 1, -1, -1):
-        if build_file_dir_components[index] == 'src':
-          options.depth = os.path.sep.join(build_file_dir_components)
-          break
-        del build_file_dir_components[index]
-
-      # If the inner loop found something, break without advancing to another
-      # build file.
-      if options.depth:
-        break
-
-    if not options.depth:
-      raise GypError('Could not automatically locate src directory.  This is'
-                     'a temporary Chromium feature that will be removed.  Use'
-                     '--depth as a workaround.')
-
-  # If toplevel-dir is not set, we assume that depth is the root of our source
-  # tree.
-  if not options.toplevel_dir:
-    options.toplevel_dir = options.depth
-
-  # -D on the command line sets variable defaults - D isn't just for define,
-  # it's for default.  Perhaps there should be a way to force (-F?) a
-  # variable's value so that it can't be overridden by anything else.
-  cmdline_default_variables = {}
-  defines = []
-  if options.use_environment:
-    defines += ShlexEnv('GYP_DEFINES')
-  if options.defines:
-    defines += options.defines
-  cmdline_default_variables = NameValueListToDict(defines)
-  if DEBUG_GENERAL in gyp.debug:
-    DebugOutput(DEBUG_GENERAL,
-                "cmdline_default_variables: %s", cmdline_default_variables)
-
-  # Set up includes.
-  includes = []
-
-  # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
-  # .gyp file that's loaded, before anything else is included.
-  if home_dot_gyp != None:
-    default_include = os.path.join(home_dot_gyp, 'include.gypi')
-    if os.path.exists(default_include):
-      print 'Using overrides found in ' + default_include
-      includes.append(default_include)
-
-  # Command-line --include files come after the default include.
-  if options.includes:
-    includes.extend(options.includes)
-
-  # Generator flags should be prefixed with the target generator since they
-  # are global across all generator runs.
-  gen_flags = []
-  if options.use_environment:
-    gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
-  if options.generator_flags:
-    gen_flags += options.generator_flags
-  generator_flags = NameValueListToDict(gen_flags)
-  if DEBUG_GENERAL in gyp.debug.keys():
-    DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
-
-  # Generate all requested formats (use a set in case we got one format request
-  # twice)
-  for format in set(options.formats):
-    params = {'options': options,
-              'build_files': build_files,
-              'generator_flags': generator_flags,
-              'cwd': os.getcwd(),
-              'build_files_arg': build_files_arg,
-              'gyp_binary': sys.argv[0],
-              'home_dot_gyp': home_dot_gyp,
-              'parallel': options.parallel,
-              'root_targets': options.root_targets,
-              'target_arch': cmdline_default_variables.get('target_arch', '')}
-
-    # Start with the default variables from the command line.
-    [generator, flat_list, targets, data] = Load(
-        build_files, format, cmdline_default_variables, includes, options.depth,
-        params, options.check, options.circular_check,
-        options.duplicate_basename_check)
-
-    # TODO(mark): Pass |data| for now because the generator needs a list of
-    # build files that came in.  In the future, maybe it should just accept
-    # a list, and not the whole data dict.
-    # NOTE: flat_list is the flattened dependency graph specifying the order
-    # that targets may be built.  Build systems that operate serially or that
-    # need to have dependencies defined before dependents reference them should
-    # generate targets in the order specified in flat_list.
-    generator.GenerateOutput(flat_list, targets, data, params)
-
-    if options.configs:
-      valid_configs = targets[flat_list[0]]['configurations'].keys()
-      for conf in options.configs:
-        if conf not in valid_configs:
-          raise GypError('Invalid config specified via --build: %s' % conf)
-      generator.PerformBuild(data, options.configs, params)
-
-  # Done
-  return 0
-
-
-def main(args):
-  try:
-    return gyp_main(args)
-  except GypError, e:
-    sys.stderr.write("gyp: %s\n" % e)
-    return 1
-
-# NOTE: setuptools generated console_scripts calls function with no arguments
-def script_main():
-  return main(sys.argv[1:])
-
-if __name__ == '__main__':
-  sys.exit(script_main())
diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py
deleted file mode 100644
index a1e1db5..0000000
--- a/tools/gyp/pylib/gyp/common.py
+++ /dev/null
@@ -1,615 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import with_statement
-
-import collections
-import errno
-import filecmp
-import os.path
-import re
-import tempfile
-import sys
-
-
-# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
-# among other "problems".
-class memoize(object):
-  def __init__(self, func):
-    self.func = func
-    self.cache = {}
-  def __call__(self, *args):
-    try:
-      return self.cache[args]
-    except KeyError:
-      result = self.func(*args)
-      self.cache[args] = result
-      return result
-
-
-class GypError(Exception):
-  """Error class representing an error, which is to be presented
-  to the user.  The main entry point will catch and display this.
-  """
-  pass
-
-
-def ExceptionAppend(e, msg):
-  """Append a message to the given exception's message."""
-  if not e.args:
-    e.args = (msg,)
-  elif len(e.args) == 1:
-    e.args = (str(e.args[0]) + ' ' + msg,)
-  else:
-    e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
-
-
-def FindQualifiedTargets(target, qualified_list):
-  """
-  Given a list of qualified targets, return the qualified targets for the
-  specified |target|.
-  """
-  return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
-
-
-def ParseQualifiedTarget(target):
-  # Splits a qualified target into a build file, target name and toolset.
-
-  # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
-  target_split = target.rsplit(':', 1)
-  if len(target_split) == 2:
-    [build_file, target] = target_split
-  else:
-    build_file = None
-
-  target_split = target.rsplit('#', 1)
-  if len(target_split) == 2:
-    [target, toolset] = target_split
-  else:
-    toolset = None
-
-  return [build_file, target, toolset]
-
-
-def ResolveTarget(build_file, target, toolset):
-  # This function resolves a target into a canonical form:
-  # - a fully defined build file, either absolute or relative to the current
-  # directory
-  # - a target name
-  # - a toolset
-  #
-  # build_file is the file relative to which 'target' is defined.
-  # target is the qualified target.
-  # toolset is the default toolset for that target.
-  [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
-
-  if parsed_build_file:
-    if build_file:
-      # If a relative path, parsed_build_file is relative to the directory
-      # containing build_file.  If build_file is not in the current directory,
-      # parsed_build_file is not a usable path as-is.  Resolve it by
-      # interpreting it as relative to build_file.  If parsed_build_file is
-      # absolute, it is usable as a path regardless of the current directory,
-      # and os.path.join will return it as-is.
-      build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
-                                                 parsed_build_file))
-      # Further (to handle cases like ../cwd), make it relative to cwd)
-      if not os.path.isabs(build_file):
-        build_file = RelativePath(build_file, '.')
-    else:
-      build_file = parsed_build_file
-
-  if parsed_toolset:
-    toolset = parsed_toolset
-
-  return [build_file, target, toolset]
-
-
-def BuildFile(fully_qualified_target):
-  # Extracts the build file from the fully qualified target.
-  return ParseQualifiedTarget(fully_qualified_target)[0]
-
-
-def GetEnvironFallback(var_list, default):
-  """Look up a key in the environment, with fallback to secondary keys
-  and finally falling back to a default value."""
-  for var in var_list:
-    if var in os.environ:
-      return os.environ[var]
-  return default
-
-
-def QualifiedTarget(build_file, target, toolset):
-  # "Qualified" means the file that a target was defined in and the target
-  # name, separated by a colon, suffixed by a # and the toolset name:
-  # /path/to/file.gyp:target_name#toolset
-  fully_qualified = build_file + ':' + target
-  if toolset:
-    fully_qualified = fully_qualified + '#' + toolset
-  return fully_qualified
-
-
-@memoize
-def RelativePath(path, relative_to, follow_path_symlink=True):
-  # Assuming both |path| and |relative_to| are relative to the current
-  # directory, returns a relative path that identifies path relative to
-  # relative_to.
-  # If |follow_symlink_path| is true (default) and |path| is a symlink, then
-  # this method returns a path to the real file represented by |path|. If it is
-  # false, this method returns a path to the symlink. If |path| is not a
-  # symlink, this option has no effect.
-
-  # Convert to normalized (and therefore absolute paths).
-  if follow_path_symlink:
-    path = os.path.realpath(path)
-  else:
-    path = os.path.abspath(path)
-  relative_to = os.path.realpath(relative_to)
-
-  # On Windows, we can't create a relative path to a different drive, so just
-  # use the absolute path.
-  if sys.platform == 'win32':
-    if (os.path.splitdrive(path)[0].lower() !=
-        os.path.splitdrive(relative_to)[0].lower()):
-      return path
-
-  # Split the paths into components.
-  path_split = path.split(os.path.sep)
-  relative_to_split = relative_to.split(os.path.sep)
-
-  # Determine how much of the prefix the two paths share.
-  prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
-
-  # Put enough ".." components to back up out of relative_to to the common
-  # prefix, and then append the part of path_split after the common prefix.
-  relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
-                   path_split[prefix_len:]
-
-  if len(relative_split) == 0:
-    # The paths were the same.
-    return ''
-
-  # Turn it back into a string and we're done.
-  return os.path.join(*relative_split)
-
-
-@memoize
-def InvertRelativePath(path, toplevel_dir=None):
-  """Given a path like foo/bar that is relative to toplevel_dir, return
-  the inverse relative path back to the toplevel_dir.
-
-  E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
-  should always produce the empty string, unless the path contains symlinks.
-  """
-  if not path:
-    return path
-  toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
-  return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
-
-
-def FixIfRelativePath(path, relative_to):
-  # Like RelativePath but returns |path| unchanged if it is absolute.
-  if os.path.isabs(path):
-    return path
-  return RelativePath(path, relative_to)
-
-
-def UnrelativePath(path, relative_to):
-  # Assuming that |relative_to| is relative to the current directory, and |path|
-  # is a path relative to the dirname of |relative_to|, returns a path that
-  # identifies |path| relative to the current directory.
-  rel_dir = os.path.dirname(relative_to)
-  return os.path.normpath(os.path.join(rel_dir, path))
-
-
-# re objects used by EncodePOSIXShellArgument.  See IEEE 1003.1 XCU.2.2 at
-# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
-# and the documentation for various shells.
-
-# _quote is a pattern that should match any argument that needs to be quoted
-# with double-quotes by EncodePOSIXShellArgument.  It matches the following
-# characters appearing anywhere in an argument:
-#   \t, \n, space  parameter separators
-#   #              comments
-#   $              expansions (quoted to always expand within one argument)
-#   %              called out by IEEE 1003.1 XCU.2.2
-#   &              job control
-#   '              quoting
-#   (, )           subshell execution
-#   *, ?, [        pathname expansion
-#   ;              command delimiter
-#   <, >, |        redirection
-#   =              assignment
-#   {, }           brace expansion (bash)
-#   ~              tilde expansion
-# It also matches the empty string, because "" (or '') is the only way to
-# represent an empty string literal argument to a POSIX shell.
-#
-# This does not match the characters in _escape, because those need to be
-# backslash-escaped regardless of whether they appear in a double-quoted
-# string.
-_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
-
-# _escape is a pattern that should match any character that needs to be
-# escaped with a backslash, whether or not the argument matched the _quote
-# pattern.  _escape is used with re.sub to backslash anything in _escape's
-# first match group, hence the (parentheses) in the regular expression.
-#
-# _escape matches the following characters appearing anywhere in an argument:
-#   "  to prevent POSIX shells from interpreting this character for quoting
-#   \  to prevent POSIX shells from interpreting this character for escaping
-#   `  to prevent POSIX shells from interpreting this character for command
-#      substitution
-# Missing from this list is $, because the desired behavior of
-# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
-#
-# Also missing from this list is !, which bash will interpret as the history
-# expansion character when history is enabled.  bash does not enable history
-# by default in non-interactive shells, so this is not thought to be a problem.
-# ! was omitted from this list because bash interprets "\!" as a literal string
-# including the backslash character (avoiding history expansion but retaining
-# the backslash), which would not be correct for argument encoding.  Handling
-# this case properly would also be problematic because bash allows the history
-# character to be changed with the histchars shell variable.  Fortunately,
-# as history is not enabled in non-interactive shells and
-# EncodePOSIXShellArgument is only expected to encode for non-interactive
-# shells, there is no room for error here by ignoring !.
-_escape = re.compile(r'(["\\`])')
-
-def EncodePOSIXShellArgument(argument):
-  """Encodes |argument| suitably for consumption by POSIX shells.
-
-  argument may be quoted and escaped as necessary to ensure that POSIX shells
-  treat the returned value as a literal representing the argument passed to
-  this function.  Parameter (variable) expansions beginning with $ are allowed
-  to remain intact without escaping the $, to allow the argument to contain
-  references to variables to be expanded by the shell.
-  """
-
-  if not isinstance(argument, str):
-    argument = str(argument)
-
-  if _quote.search(argument):
-    quote = '"'
-  else:
-    quote = ''
-
-  encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
-
-  return encoded
-
-
-def EncodePOSIXShellList(list):
-  """Encodes |list| suitably for consumption by POSIX shells.
-
-  Returns EncodePOSIXShellArgument for each item in list, and joins them
-  together using the space character as an argument separator.
-  """
-
-  encoded_arguments = []
-  for argument in list:
-    encoded_arguments.append(EncodePOSIXShellArgument(argument))
-  return ' '.join(encoded_arguments)
-
-
-def DeepDependencyTargets(target_dicts, roots):
-  """Returns the recursive list of target dependencies."""
-  dependencies = set()
-  pending = set(roots)
-  while pending:
-    # Pluck out one.
-    r = pending.pop()
-    # Skip if visited already.
-    if r in dependencies:
-      continue
-    # Add it.
-    dependencies.add(r)
-    # Add its children.
-    spec = target_dicts[r]
-    pending.update(set(spec.get('dependencies', [])))
-    pending.update(set(spec.get('dependencies_original', [])))
-  return list(dependencies - set(roots))
-
-
-def BuildFileTargets(target_list, build_file):
-  """From a target_list, returns the subset from the specified build_file.
-  """
-  return [p for p in target_list if BuildFile(p) == build_file]
-
-
-def AllTargets(target_list, target_dicts, build_file):
-  """Returns all targets (direct and dependencies) for the specified build_file.
-  """
-  bftargets = BuildFileTargets(target_list, build_file)
-  deptargets = DeepDependencyTargets(target_dicts, bftargets)
-  return bftargets + deptargets
-
-
-def WriteOnDiff(filename):
-  """Write to a file only if the new contents differ.
-
-  Arguments:
-    filename: name of the file to potentially write to.
-  Returns:
-    A file like object which will write to temporary file and only overwrite
-    the target if it differs (on close).
-  """
-
-  class Writer(object):
-    """Wrapper around file which only covers the target if it differs."""
-    def __init__(self):
-      # Pick temporary file.
-      tmp_fd, self.tmp_path = tempfile.mkstemp(
-          suffix='.tmp',
-          prefix=os.path.split(filename)[1] + '.gyp.',
-          dir=os.path.split(filename)[0])
-      try:
-        self.tmp_file = os.fdopen(tmp_fd, 'wb')
-      except Exception:
-        # Don't leave turds behind.
-        os.unlink(self.tmp_path)
-        raise
-
-    def __getattr__(self, attrname):
-      # Delegate everything else to self.tmp_file
-      return getattr(self.tmp_file, attrname)
-
-    def close(self):
-      try:
-        # Close tmp file.
-        self.tmp_file.close()
-        # Determine if different.
-        same = False
-        try:
-          same = filecmp.cmp(self.tmp_path, filename, False)
-        except OSError, e:
-          if e.errno != errno.ENOENT:
-            raise
-
-        if same:
-          # The new file is identical to the old one, just get rid of the new
-          # one.
-          os.unlink(self.tmp_path)
-        else:
-          # The new file is different from the old one, or there is no old one.
-          # Rename the new file to the permanent name.
-          #
-          # tempfile.mkstemp uses an overly restrictive mode, resulting in a
-          # file that can only be read by the owner, regardless of the umask.
-          # There's no reason to not respect the umask here, which means that
-          # an extra hoop is required to fetch it and reset the new file's mode.
-          #
-          # No way to get the umask without setting a new one?  Set a safe one
-          # and then set it back to the old value.
-          umask = os.umask(077)
-          os.umask(umask)
-          os.chmod(self.tmp_path, 0666 & ~umask)
-          if sys.platform == 'win32' and os.path.exists(filename):
-            # NOTE: on windows (but not cygwin) rename will not replace an
-            # existing file, so it must be preceded with a remove. Sadly there
-            # is no way to make the switch atomic.
-            os.remove(filename)
-          os.rename(self.tmp_path, filename)
-      except Exception:
-        # Don't leave turds behind.
-        os.unlink(self.tmp_path)
-        raise
-
-  return Writer()
-
-
-def EnsureDirExists(path):
-  """Make sure the directory for |path| exists."""
-  try:
-    os.makedirs(os.path.dirname(path))
-  except OSError:
-    pass
-
-
-def GetFlavor(params):
-  """Returns |params.flavor| if it's set, the system's default flavor else."""
-  flavors = {
-    'cygwin': 'win',
-    'win32': 'win',
-    'darwin': 'mac',
-  }
-
-  if 'flavor' in params:
-    return params['flavor']
-  if sys.platform in flavors:
-    return flavors[sys.platform]
-  if sys.platform.startswith('sunos'):
-    return 'solaris'
-  if sys.platform.startswith('freebsd'):
-    return 'freebsd'
-  if sys.platform.startswith('openbsd'):
-    return 'openbsd'
-  if sys.platform.startswith('netbsd'):
-    return 'netbsd'
-  if sys.platform.startswith('aix'):
-    return 'aix'
-
-  return 'linux'
-
-
-def CopyTool(flavor, out_path, generator_flags={}):
-  """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
-  to |out_path|."""
-  # aix and solaris just need flock emulation. mac and win use more complicated
-  # support scripts.
-  prefix = {
-      'aix': 'flock',
-      'solaris': 'flock',
-      'mac': 'mac',
-      'win': 'win'
-      }.get(flavor, None)
-  if not prefix:
-    return
-
-  # Slurp input file.
-  source_path = os.path.join(
-      os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
-  with open(source_path) as source_file:
-    source = source_file.readlines()
-
-  # Set custom header flags.
-  header = '# Generated by gyp. Do not edit.\n'
-  mac_toolchain_dir =  generator_flags.get('mac_toolchain_dir', None)
-  if flavor == 'mac' and mac_toolchain_dir:
-    header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" \
-        % mac_toolchain_dir
-
-  # Add header and write it out.
-  tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
-  with open(tool_path, 'w') as tool_file:
-    tool_file.write(
-        ''.join([source[0], header] + source[1:]))
-
-  # Make file executable.
-  os.chmod(tool_path, 0755)
-
-
-# From Alex Martelli,
-# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
-# ASPN: Python Cookbook: Remove duplicates from a sequence
-# First comment, dated 2001/10/13.
-# (Also in the printed Python Cookbook.)
-
-def uniquer(seq, idfun=None):
-    if idfun is None:
-        idfun = lambda x: x
-    seen = {}
-    result = []
-    for item in seq:
-        marker = idfun(item)
-        if marker in seen: continue
-        seen[marker] = 1
-        result.append(item)
-    return result
-
-
-# Based on http://code.activestate.com/recipes/576694/.
-class OrderedSet(collections.MutableSet):
-  def __init__(self, iterable=None):
-    self.end = end = []
-    end += [None, end, end]         # sentinel node for doubly linked list
-    self.map = {}                   # key --> [key, prev, next]
-    if iterable is not None:
-      self |= iterable
-
-  def __len__(self):
-    return len(self.map)
-
-  def __contains__(self, key):
-    return key in self.map
-
-  def add(self, key):
-    if key not in self.map:
-      end = self.end
-      curr = end[1]
-      curr[2] = end[1] = self.map[key] = [key, curr, end]
-
-  def discard(self, key):
-    if key in self.map:
-      key, prev_item, next_item = self.map.pop(key)
-      prev_item[2] = next_item
-      next_item[1] = prev_item
-
-  def __iter__(self):
-    end = self.end
-    curr = end[2]
-    while curr is not end:
-      yield curr[0]
-      curr = curr[2]
-
-  def __reversed__(self):
-    end = self.end
-    curr = end[1]
-    while curr is not end:
-      yield curr[0]
-      curr = curr[1]
-
-  # The second argument is an addition that causes a pylint warning.
-  def pop(self, last=True):  # pylint: disable=W0221
-    if not self:
-      raise KeyError('set is empty')
-    key = self.end[1][0] if last else self.end[2][0]
-    self.discard(key)
-    return key
-
-  def __repr__(self):
-    if not self:
-      return '%s()' % (self.__class__.__name__,)
-    return '%s(%r)' % (self.__class__.__name__, list(self))
-
-  def __eq__(self, other):
-    if isinstance(other, OrderedSet):
-      return len(self) == len(other) and list(self) == list(other)
-    return set(self) == set(other)
-
-  # Extensions to the recipe.
-  def update(self, iterable):
-    for i in iterable:
-      if i not in self:
-        self.add(i)
-
-
-class CycleError(Exception):
-  """An exception raised when an unexpected cycle is detected."""
-  def __init__(self, nodes):
-    self.nodes = nodes
-  def __str__(self):
-    return 'CycleError: cycle involving: ' + str(self.nodes)
-
-
-def TopologicallySorted(graph, get_edges):
-  r"""Topologically sort based on a user provided edge definition.
-
-  Args:
-    graph: A list of node names.
-    get_edges: A function mapping from node name to a hashable collection
-               of node names which this node has outgoing edges to.
-  Returns:
-    A list containing all of the node in graph in topological order.
-    It is assumed that calling get_edges once for each node and caching is
-    cheaper than repeatedly calling get_edges.
-  Raises:
-    CycleError in the event of a cycle.
-  Example:
-    graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
-    def GetEdges(node):
-      return re.findall(r'\$\(([^))]\)', graph[node])
-    print TopologicallySorted(graph.keys(), GetEdges)
-    ==>
-    ['a', 'c', b']
-  """
-  get_edges = memoize(get_edges)
-  visited = set()
-  visiting = set()
-  ordered_nodes = []
-  def Visit(node):
-    if node in visiting:
-      raise CycleError(visiting)
-    if node in visited:
-      return
-    visited.add(node)
-    visiting.add(node)
-    for neighbor in get_edges(node):
-      Visit(neighbor)
-    visiting.remove(node)
-    ordered_nodes.insert(0, node)
-  for node in sorted(graph):
-    Visit(node)
-  return ordered_nodes
-
-def CrossCompileRequested():
-  # TODO: figure out how to not build extra host objects in the
-  # non-cross-compile case when this is enabled, and enable unconditionally.
-  return (os.environ.get('GYP_CROSSCOMPILE') or
-          os.environ.get('AR_host') or
-          os.environ.get('CC_host') or
-          os.environ.get('CXX_host') or
-          os.environ.get('AR_target') or
-          os.environ.get('CC_target') or
-          os.environ.get('CXX_target'))
diff --git a/tools/gyp/pylib/gyp/common_test.py b/tools/gyp/pylib/gyp/common_test.py
deleted file mode 100755
index ad6f9a1..0000000
--- a/tools/gyp/pylib/gyp/common_test.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the common.py file."""
-
-import gyp.common
-import unittest
-import sys
-
-
-class TestTopologicallySorted(unittest.TestCase):
-  def test_Valid(self):
-    """Test that sorting works on a valid graph with one possible order."""
-    graph = {
-        'a': ['b', 'c'],
-        'b': [],
-        'c': ['d'],
-        'd': ['b'],
-        }
-    def GetEdge(node):
-      return tuple(graph[node])
-    self.assertEqual(
-      gyp.common.TopologicallySorted(graph.keys(), GetEdge),
-      ['a', 'c', 'd', 'b'])
-
-  def test_Cycle(self):
-    """Test that an exception is thrown on a cyclic graph."""
-    graph = {
-        'a': ['b'],
-        'b': ['c'],
-        'c': ['d'],
-        'd': ['a'],
-        }
-    def GetEdge(node):
-      return tuple(graph[node])
-    self.assertRaises(
-      gyp.common.CycleError, gyp.common.TopologicallySorted,
-      graph.keys(), GetEdge)
-
-
-class TestGetFlavor(unittest.TestCase):
-  """Test that gyp.common.GetFlavor works as intended"""
-  original_platform = ''
-
-  def setUp(self):
-    self.original_platform = sys.platform
-
-  def tearDown(self):
-    sys.platform = self.original_platform
-
-  def assertFlavor(self, expected, argument, param):
-    sys.platform = argument
-    self.assertEqual(expected, gyp.common.GetFlavor(param))
-
-  def test_platform_default(self):
-    self.assertFlavor('freebsd', 'freebsd9' , {})
-    self.assertFlavor('freebsd', 'freebsd10', {})
-    self.assertFlavor('openbsd', 'openbsd5' , {})
-    self.assertFlavor('solaris', 'sunos5'   , {});
-    self.assertFlavor('solaris', 'sunos'    , {});
-    self.assertFlavor('linux'  , 'linux2'   , {});
-    self.assertFlavor('linux'  , 'linux3'   , {});
-
-  def test_param(self):
-    self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/gyp/pylib/gyp/easy_xml.py b/tools/gyp/pylib/gyp/easy_xml.py
deleted file mode 100644
index bf949b6..0000000
--- a/tools/gyp/pylib/gyp/easy_xml.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import re
-import os
-
-
-def XmlToString(content, encoding='utf-8', pretty=False):
-  """ Writes the XML content to disk, touching the file only if it has changed.
-
-  Visual Studio files have a lot of pre-defined structures.  This function makes
-  it easy to represent these structures as Python data structures, instead of
-  having to create a lot of function calls.
-
-  Each XML element of the content is represented as a list composed of:
-  1. The name of the element, a string,
-  2. The attributes of the element, a dictionary (optional), and
-  3+. The content of the element, if any.  Strings are simple text nodes and
-      lists are child elements.
-
-  Example 1:
-      <test/>
-  becomes
-      ['test']
-
-  Example 2:
-      <myelement a='value1' b='value2'>
-         <childtype>This is</childtype>
-         <childtype>it!</childtype>
-      </myelement>
-
-  becomes
-      ['myelement', {'a':'value1', 'b':'value2'},
-         ['childtype', 'This is'],
-         ['childtype', 'it!'],
-      ]
-
-  Args:
-    content:  The structured content to be converted.
-    encoding: The encoding to report on the first XML line.
-    pretty: True if we want pretty printing with indents and new lines.
-
-  Returns:
-    The XML content as a string.
-  """
-  # We create a huge list of all the elements of the file.
-  xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
-  if pretty:
-    xml_parts.append('\n')
-  _ConstructContentList(xml_parts, content, pretty)
-
-  # Convert it to a string
-  return ''.join(xml_parts)
-
-
-def _ConstructContentList(xml_parts, specification, pretty, level=0):
-  """ Appends the XML parts corresponding to the specification.
-
-  Args:
-    xml_parts: A list of XML parts to be appended to.
-    specification:  The specification of the element.  See EasyXml docs.
-    pretty: True if we want pretty printing with indents and new lines.
-    level: Indentation level.
-  """
-  # The first item in a specification is the name of the element.
-  if pretty:
-    indentation = '  ' * level
-    new_line = '\n'
-  else:
-    indentation = ''
-    new_line = ''
-  name = specification[0]
-  if not isinstance(name, str):
-    raise Exception('The first item of an EasyXml specification should be '
-                    'a string.  Specification was ' + str(specification))
-  xml_parts.append(indentation + '<' + name)
-
-  # Optionally in second position is a dictionary of the attributes.
-  rest = specification[1:]
-  if rest and isinstance(rest[0], dict):
-    for at, val in sorted(rest[0].iteritems()):
-      xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
-    rest = rest[1:]
-  if rest:
-    xml_parts.append('>')
-    all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
-    multi_line = not all_strings
-    if multi_line and new_line:
-      xml_parts.append(new_line)
-    for child_spec in rest:
-      # If it's a string, append a text node.
-      # Otherwise recurse over that child definition
-      if isinstance(child_spec, str):
-       xml_parts.append(_XmlEscape(child_spec))
-      else:
-        _ConstructContentList(xml_parts, child_spec, pretty, level + 1)
-    if multi_line and indentation:
-      xml_parts.append(indentation)
-    xml_parts.append('</%s>%s' % (name, new_line))
-  else:
-    xml_parts.append('/>%s' % new_line)
-
-
-def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
-                      win32=False):
-  """ Writes the XML content to disk, touching the file only if it has changed.
-
-  Args:
-    content:  The structured content to be written.
-    path: Location of the file.
-    encoding: The encoding to report on the first line of the XML file.
-    pretty: True if we want pretty printing with indents and new lines.
-  """
-  xml_string = XmlToString(content, encoding, pretty)
-  if win32 and os.linesep != '\r\n':
-    xml_string = xml_string.replace('\n', '\r\n')
-
-  # Get the old content
-  try:
-    f = open(path, 'r')
-    existing = f.read()
-    f.close()
-  except:
-    existing = None
-
-  # It has changed, write it
-  if existing != xml_string:
-    f = open(path, 'w')
-    f.write(xml_string)
-    f.close()
-
-
-_xml_escape_map = {
-    '"': '&quot;',
-    "'": '&apos;',
-    '<': '&lt;',
-    '>': '&gt;',
-    '&': '&amp;',
-    '\n': '&#xA;',
-    '\r': '&#xD;',
-}
-
-
-_xml_escape_re = re.compile(
-    "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
-
-
-def _XmlEscape(value, attr=False):
-  """ Escape a string for inclusion in XML."""
-  def replace(match):
-    m = match.string[match.start() : match.end()]
-    # don't replace single quotes in attrs
-    if attr and m == "'":
-      return m
-    return _xml_escape_map[m]
-  return _xml_escape_re.sub(replace, value)
diff --git a/tools/gyp/pylib/gyp/easy_xml_test.py b/tools/gyp/pylib/gyp/easy_xml_test.py
deleted file mode 100755
index df64354..0000000
--- a/tools/gyp/pylib/gyp/easy_xml_test.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the easy_xml.py file. """
-
-import gyp.easy_xml as easy_xml
-import unittest
-import StringIO
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
-  def setUp(self):
-    self.stderr = StringIO.StringIO()
-
-  def test_EasyXml_simple(self):
-    self.assertEqual(
-      easy_xml.XmlToString(['test']),
-      '<?xml version="1.0" encoding="utf-8"?><test/>')
-
-    self.assertEqual(
-      easy_xml.XmlToString(['test'], encoding='Windows-1252'),
-      '<?xml version="1.0" encoding="Windows-1252"?><test/>')
-
-  def test_EasyXml_simple_with_attributes(self):
-    self.assertEqual(
-      easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
-      '<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
-
-  def test_EasyXml_escaping(self):
-    original = '<test>\'"\r&\nfoo'
-    converted = '&lt;test&gt;\'&quot;&#xD;&amp;&#xA;foo'
-    converted_apos = converted.replace("'", '&apos;')
-    self.assertEqual(
-      easy_xml.XmlToString(['test3', {'a': original}, original]),
-      '<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
-      (converted, converted_apos))
-
-  def test_EasyXml_pretty(self):
-    self.assertEqual(
-      easy_xml.XmlToString(
-          ['test3',
-            ['GrandParent',
-              ['Parent1',
-                ['Child']
-              ],
-              ['Parent2']
-            ]
-          ],
-          pretty=True),
-      '<?xml version="1.0" encoding="utf-8"?>\n'
-      '<test3>\n'
-      '  <GrandParent>\n'
-      '    <Parent1>\n'
-      '      <Child/>\n'
-      '    </Parent1>\n'
-      '    <Parent2/>\n'
-      '  </GrandParent>\n'
-      '</test3>\n')
-
-
-  def test_EasyXml_complex(self):
-    # We want to create:
-    target = (
-      '<?xml version="1.0" encoding="utf-8"?>'
-      '<Project>'
-        '<PropertyGroup Label="Globals">'
-          '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
-          '<Keyword>Win32Proj</Keyword>'
-          '<RootNamespace>automated_ui_tests</RootNamespace>'
-        '</PropertyGroup>'
-        '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
-        '<PropertyGroup '
-            'Condition="\'$(Configuration)|$(Platform)\'=='
-                       '\'Debug|Win32\'" Label="Configuration">'
-          '<ConfigurationType>Application</ConfigurationType>'
-          '<CharacterSet>Unicode</CharacterSet>'
-        '</PropertyGroup>'
-      '</Project>')
-
-    xml = easy_xml.XmlToString(
-        ['Project',
-          ['PropertyGroup', {'Label': 'Globals'},
-            ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
-            ['Keyword', 'Win32Proj'],
-            ['RootNamespace', 'automated_ui_tests']
-          ],
-          ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
-          ['PropertyGroup',
-            {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
-             'Label': 'Configuration'},
-            ['ConfigurationType', 'Application'],
-            ['CharacterSet', 'Unicode']
-          ]
-        ])
-    self.assertEqual(xml, target)
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/gyp/pylib/gyp/flock_tool.py b/tools/gyp/pylib/gyp/flock_tool.py
deleted file mode 100755
index b38d866..0000000
--- a/tools/gyp/pylib/gyp/flock_tool.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""These functions are executed via gyp-flock-tool when using the Makefile
-generator.  Used on systems that don't have a built-in flock."""
-
-import fcntl
-import os
-import struct
-import subprocess
-import sys
-
-
-def main(args):
-  executor = FlockTool()
-  executor.Dispatch(args)
-
-
-class FlockTool(object):
-  """This class emulates the 'flock' command."""
-  def Dispatch(self, args):
-    """Dispatches a string command to a method."""
-    if len(args) < 1:
-      raise Exception("Not enough arguments")
-
-    method = "Exec%s" % self._CommandifyName(args[0])
-    getattr(self, method)(*args[1:])
-
-  def _CommandifyName(self, name_string):
-    """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
-    return name_string.title().replace('-', '')
-
-  def ExecFlock(self, lockfile, *cmd_list):
-    """Emulates the most basic behavior of Linux's flock(1)."""
-    # Rely on exception handling to report errors.
-    # Note that the stock python on SunOS has a bug
-    # where fcntl.flock(fd, LOCK_EX) always fails
-    # with EBADF, that's why we use this F_SETLK
-    # hack instead.
-    fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
-    if sys.platform.startswith('aix'):
-      # Python on AIX is compiled with LARGEFILE support, which changes the
-      # struct size.
-      op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
-    else:
-      op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
-    fcntl.fcntl(fd, fcntl.F_SETLK, op)
-    return subprocess.call(cmd_list)
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/gyp/pylib/gyp/generator/__init__.py b/tools/gyp/pylib/gyp/generator/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/pylib/gyp/generator/__init__.py
+++ /dev/null
diff --git a/tools/gyp/pylib/gyp/generator/analyzer.py b/tools/gyp/pylib/gyp/generator/analyzer.py
deleted file mode 100644
index 921c1a6..0000000
--- a/tools/gyp/pylib/gyp/generator/analyzer.py
+++ /dev/null
@@ -1,741 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
-the generator flag config_path) the path of a json file that dictates the files
-and targets to search for. The following keys are supported:
-files: list of paths (relative) of the files to search for.
-test_targets: unqualified target names to search for. Any target in this list
-that depends upon a file in |files| is output regardless of the type of target
-or chain of dependencies.
-additional_compile_targets: Unqualified targets to search for in addition to
-test_targets. Targets in the combined list that depend upon a file in |files|
-are not necessarily output. For example, if the target is of type none then the
-target is not output (but one of the descendants of the target will be).
-
-The following is output:
-error: only supplied if there is an error.
-compile_targets: minimal set of targets that directly or indirectly (for
-  targets of type none) depend on the files in |files| and is one of the
-  supplied targets or a target that one of the supplied targets depends on.
-  The expectation is this set of targets is passed into a build step. This list
-  always contains the output of test_targets as well.
-test_targets: set of targets from the supplied |test_targets| that either
-  directly or indirectly depend upon a file in |files|. This list if useful
-  if additional processing needs to be done for certain targets after the
-  build, such as running tests.
-status: outputs one of three values: none of the supplied files were found,
-  one of the include files changed so that it should be assumed everything
-  changed (in this case test_targets and compile_targets are not output) or at
-  least one file was found.
-invalid_targets: list of supplied targets that were not found.
-
-Example:
-Consider a graph like the following:
-  A     D
- / \
-B   C
-A depends upon both B and C, A is of type none and B and C are executables.
-D is an executable, has no dependencies and nothing depends on it.
-If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
-files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
-the following is output:
-|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
-and the supplied target A depends upon it. A is not output as a build_target
-as it is of type none with no rules and actions.
-|test_targets| = ["B"] B directly depends upon the change file b.cc.
-
-Even though the file d.cc, which D depends upon, has changed D is not output
-as it was not supplied by way of |additional_compile_targets| or |test_targets|.
-
-If the generator flag analyzer_output_path is specified, output is written
-there. Otherwise output is written to stdout.
-
-In Gyp the "all" target is shorthand for the root targets in the files passed
-to gyp. For example, if file "a.gyp" contains targets "a1" and
-"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
-on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
-Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
-directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
-then the "all" target includes "b1" and "b2".
-"""
-
-import gyp.common
-import gyp.ninja_syntax as ninja_syntax
-import json
-import os
-import posixpath
-import sys
-
-debug = False
-
-found_dependency_string = 'Found dependency'
-no_dependency_string = 'No dependencies'
-# Status when it should be assumed that everything has changed.
-all_changed_string = 'Found dependency (all)'
-
-# MatchStatus is used indicate if and how a target depends upon the supplied
-# sources.
-# The target's sources contain one of the supplied paths.
-MATCH_STATUS_MATCHES = 1
-# The target has a dependency on another target that contains one of the
-# supplied paths.
-MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
-# The target's sources weren't in the supplied paths and none of the target's
-# dependencies depend upon a target that matched.
-MATCH_STATUS_DOESNT_MATCH = 3
-# The target doesn't contain the source, but the dependent targets have not yet
-# been visited to determine a more specific status yet.
-MATCH_STATUS_TBD = 4
-
-generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_default_variables = {
-}
-for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
-                'LIB_DIR', 'SHARED_LIB_DIR']:
-  generator_default_variables[dirname] = '!!!'
-
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
-               'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
-               'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
-               'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
-               'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
-               'CONFIGURATION_NAME']:
-  generator_default_variables[unused] = ''
-
-
-def _ToGypPath(path):
-  """Converts a path to the format used by gyp."""
-  if os.sep == '\\' and os.altsep == '/':
-    return path.replace('\\', '/')
-  return path
-
-
-def _ResolveParent(path, base_path_components):
-  """Resolves |path|, which starts with at least one '../'. Returns an empty
-  string if the path shouldn't be considered. See _AddSources() for a
-  description of |base_path_components|."""
-  depth = 0
-  while path.startswith('../'):
-    depth += 1
-    path = path[3:]
-  # Relative includes may go outside the source tree. For example, an action may
-  # have inputs in /usr/include, which are not in the source tree.
-  if depth > len(base_path_components):
-    return ''
-  if depth == len(base_path_components):
-    return path
-  return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
-      '/' + path
-
-
-def _AddSources(sources, base_path, base_path_components, result):
-  """Extracts valid sources from |sources| and adds them to |result|. Each
-  source file is relative to |base_path|, but may contain '..'. To make
-  resolving '..' easier |base_path_components| contains each of the
-  directories in |base_path|. Additionally each source may contain variables.
-  Such sources are ignored as it is assumed dependencies on them are expressed
-  and tracked in some other means."""
-  # NOTE: gyp paths are always posix style.
-  for source in sources:
-    if not len(source) or source.startswith('!!!') or source.startswith('$'):
-      continue
-    # variable expansion may lead to //.
-    org_source = source
-    source = source[0] + source[1:].replace('//', '/')
-    if source.startswith('../'):
-      source = _ResolveParent(source, base_path_components)
-      if len(source):
-        result.append(source)
-      continue
-    result.append(base_path + source)
-    if debug:
-      print 'AddSource', org_source, result[len(result) - 1]
-
-
-def _ExtractSourcesFromAction(action, base_path, base_path_components,
-                              results):
-  if 'inputs' in action:
-    _AddSources(action['inputs'], base_path, base_path_components, results)
-
-
-def _ToLocalPath(toplevel_dir, path):
-  """Converts |path| to a path relative to |toplevel_dir|."""
-  if path == toplevel_dir:
-    return ''
-  if path.startswith(toplevel_dir + '/'):
-    return path[len(toplevel_dir) + len('/'):]
-  return path
-
-
-def _ExtractSources(target, target_dict, toplevel_dir):
-  # |target| is either absolute or relative and in the format of the OS. Gyp
-  # source paths are always posix. Convert |target| to a posix path relative to
-  # |toplevel_dir_|. This is done to make it easy to build source paths.
-  base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
-  base_path_components = base_path.split('/')
-
-  # Add a trailing '/' so that _AddSources() can easily build paths.
-  if len(base_path):
-    base_path += '/'
-
-  if debug:
-    print 'ExtractSources', target, base_path
-
-  results = []
-  if 'sources' in target_dict:
-    _AddSources(target_dict['sources'], base_path, base_path_components,
-                results)
-  # Include the inputs from any actions. Any changes to these affect the
-  # resulting output.
-  if 'actions' in target_dict:
-    for action in target_dict['actions']:
-      _ExtractSourcesFromAction(action, base_path, base_path_components,
-                                results)
-  if 'rules' in target_dict:
-    for rule in target_dict['rules']:
-      _ExtractSourcesFromAction(rule, base_path, base_path_components, results)
-
-  return results
-
-
-class Target(object):
-  """Holds information about a particular target:
-  deps: set of Targets this Target depends upon. This is not recursive, only the
-    direct dependent Targets.
-  match_status: one of the MatchStatus values.
-  back_deps: set of Targets that have a dependency on this Target.
-  visited: used during iteration to indicate whether we've visited this target.
-    This is used for two iterations, once in building the set of Targets and
-    again in _GetBuildTargets().
-  name: fully qualified name of the target.
-  requires_build: True if the target type is such that it needs to be built.
-    See _DoesTargetTypeRequireBuild for details.
-  added_to_compile_targets: used when determining if the target was added to the
-    set of targets that needs to be built.
-  in_roots: true if this target is a descendant of one of the root nodes.
-  is_executable: true if the type of target is executable.
-  is_static_library: true if the type of target is static_library.
-  is_or_has_linked_ancestor: true if the target does a link (eg executable), or
-    if there is a target in back_deps that does a link."""
-  def __init__(self, name):
-    self.deps = set()
-    self.match_status = MATCH_STATUS_TBD
-    self.back_deps = set()
-    self.name = name
-    # TODO(sky): I don't like hanging this off Target. This state is specific
-    # to certain functions and should be isolated there.
-    self.visited = False
-    self.requires_build = False
-    self.added_to_compile_targets = False
-    self.in_roots = False
-    self.is_executable = False
-    self.is_static_library = False
-    self.is_or_has_linked_ancestor = False
-
-
-class Config(object):
-  """Details what we're looking for
-  files: set of files to search for
-  targets: see file description for details."""
-  def __init__(self):
-    self.files = []
-    self.targets = set()
-    self.additional_compile_target_names = set()
-    self.test_target_names = set()
-
-  def Init(self, params):
-    """Initializes Config. This is a separate method as it raises an exception
-    if there is a parse error."""
-    generator_flags = params.get('generator_flags', {})
-    config_path = generator_flags.get('config_path', None)
-    if not config_path:
-      return
-    try:
-      f = open(config_path, 'r')
-      config = json.load(f)
-      f.close()
-    except IOError:
-      raise Exception('Unable to open file ' + config_path)
-    except ValueError as e:
-      raise Exception('Unable to parse config file ' + config_path + str(e))
-    if not isinstance(config, dict):
-      raise Exception('config_path must be a JSON file containing a dictionary')
-    self.files = config.get('files', [])
-    self.additional_compile_target_names = set(
-      config.get('additional_compile_targets', []))
-    self.test_target_names = set(config.get('test_targets', []))
-
-
-def _WasBuildFileModified(build_file, data, files, toplevel_dir):
-  """Returns true if the build file |build_file| is either in |files| or
-  one of the files included by |build_file| is in |files|. |toplevel_dir| is
-  the root of the source tree."""
-  if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
-    if debug:
-      print 'gyp file modified', build_file
-    return True
-
-  # First element of included_files is the file itself.
-  if len(data[build_file]['included_files']) <= 1:
-    return False
-
-  for include_file in data[build_file]['included_files'][1:]:
-    # |included_files| are relative to the directory of the |build_file|.
-    rel_include_file = \
-        _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
-    if _ToLocalPath(toplevel_dir, rel_include_file) in files:
-      if debug:
-        print 'included gyp file modified, gyp_file=', build_file, \
-            'included file=', rel_include_file
-      return True
-  return False
-
-
-def _GetOrCreateTargetByName(targets, target_name):
-  """Creates or returns the Target at targets[target_name]. If there is no
-  Target for |target_name| one is created. Returns a tuple of whether a new
-  Target was created and the Target."""
-  if target_name in targets:
-    return False, targets[target_name]
-  target = Target(target_name)
-  targets[target_name] = target
-  return True, target
-
-
-def _DoesTargetTypeRequireBuild(target_dict):
-  """Returns true if the target type is such that it needs to be built."""
-  # If a 'none' target has rules or actions we assume it requires a build.
-  return bool(target_dict['type'] != 'none' or
-              target_dict.get('actions') or target_dict.get('rules'))
-
-
-def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
-                     build_files):
-  """Returns a tuple of the following:
-  . A dictionary mapping from fully qualified name to Target.
-  . A list of the targets that have a source file in |files|.
-  . Targets that constitute the 'all' target. See description at top of file
-    for details on the 'all' target.
-  This sets the |match_status| of the targets that contain any of the source
-  files in |files| to MATCH_STATUS_MATCHES.
-  |toplevel_dir| is the root of the source tree."""
-  # Maps from target name to Target.
-  name_to_target = {}
-
-  # Targets that matched.
-  matching_targets = []
-
-  # Queue of targets to visit.
-  targets_to_visit = target_list[:]
-
-  # Maps from build file to a boolean indicating whether the build file is in
-  # |files|.
-  build_file_in_files = {}
-
-  # Root targets across all files.
-  roots = set()
-
-  # Set of Targets in |build_files|.
-  build_file_targets = set()
-
-  while len(targets_to_visit) > 0:
-    target_name = targets_to_visit.pop()
-    created_target, target = _GetOrCreateTargetByName(name_to_target,
-                                                      target_name)
-    if created_target:
-      roots.add(target)
-    elif target.visited:
-      continue
-
-    target.visited = True
-    target.requires_build = _DoesTargetTypeRequireBuild(
-        target_dicts[target_name])
-    target_type = target_dicts[target_name]['type']
-    target.is_executable = target_type == 'executable'
-    target.is_static_library = target_type == 'static_library'
-    target.is_or_has_linked_ancestor = (target_type == 'executable' or
-                                        target_type == 'shared_library')
-
-    build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
-    if not build_file in build_file_in_files:
-      build_file_in_files[build_file] = \
-          _WasBuildFileModified(build_file, data, files, toplevel_dir)
-
-    if build_file in build_files:
-      build_file_targets.add(target)
-
-    # If a build file (or any of its included files) is modified we assume all
-    # targets in the file are modified.
-    if build_file_in_files[build_file]:
-      print 'matching target from modified build file', target_name
-      target.match_status = MATCH_STATUS_MATCHES
-      matching_targets.append(target)
-    else:
-      sources = _ExtractSources(target_name, target_dicts[target_name],
-                                toplevel_dir)
-      for source in sources:
-        if _ToGypPath(os.path.normpath(source)) in files:
-          print 'target', target_name, 'matches', source
-          target.match_status = MATCH_STATUS_MATCHES
-          matching_targets.append(target)
-          break
-
-    # Add dependencies to visit as well as updating back pointers for deps.
-    for dep in target_dicts[target_name].get('dependencies', []):
-      targets_to_visit.append(dep)
-
-      created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
-                                                                dep)
-      if not created_dep_target:
-        roots.discard(dep_target)
-
-      target.deps.add(dep_target)
-      dep_target.back_deps.add(target)
-
-  return name_to_target, matching_targets, roots & build_file_targets
-
-
-def _GetUnqualifiedToTargetMapping(all_targets, to_find):
-  """Returns a tuple of the following:
-  . mapping (dictionary) from unqualified name to Target for all the
-    Targets in |to_find|.
-  . any target names not found. If this is empty all targets were found."""
-  result = {}
-  if not to_find:
-    return {}, []
-  to_find = set(to_find)
-  for target_name in all_targets.keys():
-    extracted = gyp.common.ParseQualifiedTarget(target_name)
-    if len(extracted) > 1 and extracted[1] in to_find:
-      to_find.remove(extracted[1])
-      result[extracted[1]] = all_targets[target_name]
-      if not to_find:
-        return result, []
-  return result, [x for x in to_find]
-
-
-def _DoesTargetDependOnMatchingTargets(target):
-  """Returns true if |target| or any of its dependencies is one of the
-  targets containing the files supplied as input to analyzer. This updates
-  |matches| of the Targets as it recurses.
-  target: the Target to look for."""
-  if target.match_status == MATCH_STATUS_DOESNT_MATCH:
-    return False
-  if target.match_status == MATCH_STATUS_MATCHES or \
-      target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
-    return True
-  for dep in target.deps:
-    if _DoesTargetDependOnMatchingTargets(dep):
-      target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
-      print '\t', target.name, 'matches by dep', dep.name
-      return True
-  target.match_status = MATCH_STATUS_DOESNT_MATCH
-  return False
-
-
-def _GetTargetsDependingOnMatchingTargets(possible_targets):
-  """Returns the list of Targets in |possible_targets| that depend (either
-  directly on indirectly) on at least one of the targets containing the files
-  supplied as input to analyzer.
-  possible_targets: targets to search from."""
-  found = []
-  print 'Targets that matched by dependency:'
-  for target in possible_targets:
-    if _DoesTargetDependOnMatchingTargets(target):
-      found.append(target)
-  return found
-
-
-def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
-  """Recurses through all targets that depend on |target|, adding all targets
-  that need to be built (and are in |roots|) to |result|.
-  roots: set of root targets.
-  add_if_no_ancestor: If true and there are no ancestors of |target| then add
-  |target| to |result|. |target| must still be in |roots|.
-  result: targets that need to be built are added here."""
-  if target.visited:
-    return
-
-  target.visited = True
-  target.in_roots = target in roots
-
-  for back_dep_target in target.back_deps:
-    _AddCompileTargets(back_dep_target, roots, False, result)
-    target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
-    target.in_roots |= back_dep_target.in_roots
-    target.is_or_has_linked_ancestor |= (
-      back_dep_target.is_or_has_linked_ancestor)
-
-  # Always add 'executable' targets. Even though they may be built by other
-  # targets that depend upon them it makes detection of what is going to be
-  # built easier.
-  # And always add static_libraries that have no dependencies on them from
-  # linkables. This is necessary as the other dependencies on them may be
-  # static libraries themselves, which are not compile time dependencies.
-  if target.in_roots and \
-        (target.is_executable or
-         (not target.added_to_compile_targets and
-          (add_if_no_ancestor or target.requires_build)) or
-         (target.is_static_library and add_if_no_ancestor and
-          not target.is_or_has_linked_ancestor)):
-    print '\t\tadding to compile targets', target.name, 'executable', \
-           target.is_executable, 'added_to_compile_targets', \
-           target.added_to_compile_targets, 'add_if_no_ancestor', \
-           add_if_no_ancestor, 'requires_build', target.requires_build, \
-           'is_static_library', target.is_static_library, \
-           'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
-    result.add(target)
-    target.added_to_compile_targets = True
-
-
-def _GetCompileTargets(matching_targets, supplied_targets):
-  """Returns the set of Targets that require a build.
-  matching_targets: targets that changed and need to be built.
-  supplied_targets: set of targets supplied to analyzer to search from."""
-  result = set()
-  for target in matching_targets:
-    print 'finding compile targets for match', target.name
-    _AddCompileTargets(target, supplied_targets, True, result)
-  return result
-
-
-def _WriteOutput(params, **values):
-  """Writes the output, either to stdout or a file is specified."""
-  if 'error' in values:
-    print 'Error:', values['error']
-  if 'status' in values:
-    print values['status']
-  if 'targets' in values:
-    values['targets'].sort()
-    print 'Supplied targets that depend on changed files:'
-    for target in values['targets']:
-      print '\t', target
-  if 'invalid_targets' in values:
-    values['invalid_targets'].sort()
-    print 'The following targets were not found:'
-    for target in values['invalid_targets']:
-      print '\t', target
-  if 'build_targets' in values:
-    values['build_targets'].sort()
-    print 'Targets that require a build:'
-    for target in values['build_targets']:
-      print '\t', target
-  if 'compile_targets' in values:
-    values['compile_targets'].sort()
-    print 'Targets that need to be built:'
-    for target in values['compile_targets']:
-      print '\t', target
-  if 'test_targets' in values:
-    values['test_targets'].sort()
-    print 'Test targets:'
-    for target in values['test_targets']:
-      print '\t', target
-
-  output_path = params.get('generator_flags', {}).get(
-      'analyzer_output_path', None)
-  if not output_path:
-    print json.dumps(values)
-    return
-  try:
-    f = open(output_path, 'w')
-    f.write(json.dumps(values) + '\n')
-    f.close()
-  except IOError as e:
-    print 'Error writing to output file', output_path, str(e)
-
-
-def _WasGypIncludeFileModified(params, files):
-  """Returns true if one of the files in |files| is in the set of included
-  files."""
-  if params['options'].includes:
-    for include in params['options'].includes:
-      if _ToGypPath(os.path.normpath(include)) in files:
-        print 'Include file modified, assuming all changed', include
-        return True
-  return False
-
-
-def _NamesNotIn(names, mapping):
-  """Returns a list of the values in |names| that are not in |mapping|."""
-  return [name for name in names if name not in mapping]
-
-
-def _LookupTargets(names, mapping):
-  """Returns a list of the mapping[name] for each value in |names| that is in
-  |mapping|."""
-  return [mapping[name] for name in names if name in mapping]
-
-
-def CalculateVariables(default_variables, params):
-  """Calculate additional variables for use in the build (called by gyp)."""
-  flavor = gyp.common.GetFlavor(params)
-  if flavor == 'mac':
-    default_variables.setdefault('OS', 'mac')
-  elif flavor == 'win':
-    default_variables.setdefault('OS', 'win')
-    # Copy additional generator configuration data from VS, which is shared
-    # by the Windows Ninja generator.
-    import gyp.generator.msvs as msvs_generator
-    generator_additional_non_configuration_keys = getattr(msvs_generator,
-        'generator_additional_non_configuration_keys', [])
-    generator_additional_path_sections = getattr(msvs_generator,
-        'generator_additional_path_sections', [])
-
-    gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-  else:
-    operating_system = flavor
-    if flavor == 'android':
-      operating_system = 'linux'  # Keep this legacy behavior for now.
-    default_variables.setdefault('OS', operating_system)
-
-
-class TargetCalculator(object):
-  """Calculates the matching test_targets and matching compile_targets."""
-  def __init__(self, files, additional_compile_target_names, test_target_names,
-               data, target_list, target_dicts, toplevel_dir, build_files):
-    self._additional_compile_target_names = set(additional_compile_target_names)
-    self._test_target_names = set(test_target_names)
-    self._name_to_target, self._changed_targets, self._root_targets = (
-      _GenerateTargets(data, target_list, target_dicts, toplevel_dir,
-                       frozenset(files), build_files))
-    self._unqualified_mapping, self.invalid_targets = (
-      _GetUnqualifiedToTargetMapping(self._name_to_target,
-                                     self._supplied_target_names_no_all()))
-
-  def _supplied_target_names(self):
-    return self._additional_compile_target_names | self._test_target_names
-
-  def _supplied_target_names_no_all(self):
-    """Returns the supplied test targets without 'all'."""
-    result = self._supplied_target_names();
-    result.discard('all')
-    return result
-
-  def is_build_impacted(self):
-    """Returns true if the supplied files impact the build at all."""
-    return self._changed_targets
-
-  def find_matching_test_target_names(self):
-    """Returns the set of output test targets."""
-    assert self.is_build_impacted()
-    # Find the test targets first. 'all' is special cased to mean all the
-    # root targets. To deal with all the supplied |test_targets| are expanded
-    # to include the root targets during lookup. If any of the root targets
-    # match, we remove it and replace it with 'all'.
-    test_target_names_no_all = set(self._test_target_names)
-    test_target_names_no_all.discard('all')
-    test_targets_no_all = _LookupTargets(test_target_names_no_all,
-                                         self._unqualified_mapping)
-    test_target_names_contains_all = 'all' in self._test_target_names
-    if test_target_names_contains_all:
-      test_targets = [x for x in (set(test_targets_no_all) |
-                                  set(self._root_targets))]
-    else:
-      test_targets = [x for x in test_targets_no_all]
-    print 'supplied test_targets'
-    for target_name in self._test_target_names:
-      print '\t', target_name
-    print 'found test_targets'
-    for target in test_targets:
-      print '\t', target.name
-    print 'searching for matching test targets'
-    matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
-    matching_test_targets_contains_all = (test_target_names_contains_all and
-                                          set(matching_test_targets) &
-                                          set(self._root_targets))
-    if matching_test_targets_contains_all:
-      # Remove any of the targets for all that were not explicitly supplied,
-      # 'all' is subsequentely added to the matching names below.
-      matching_test_targets = [x for x in (set(matching_test_targets) &
-                                           set(test_targets_no_all))]
-    print 'matched test_targets'
-    for target in matching_test_targets:
-      print '\t', target.name
-    matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
-                             for target in matching_test_targets]
-    if matching_test_targets_contains_all:
-      matching_target_names.append('all')
-      print '\tall'
-    return matching_target_names
-
-  def find_matching_compile_target_names(self):
-    """Returns the set of output compile targets."""
-    assert self.is_build_impacted();
-    # Compile targets are found by searching up from changed targets.
-    # Reset the visited status for _GetBuildTargets.
-    for target in self._name_to_target.itervalues():
-      target.visited = False
-
-    supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
-                                      self._unqualified_mapping)
-    if 'all' in self._supplied_target_names():
-      supplied_targets = [x for x in (set(supplied_targets) |
-                                      set(self._root_targets))]
-    print 'Supplied test_targets & compile_targets'
-    for target in supplied_targets:
-      print '\t', target.name
-    print 'Finding compile targets'
-    compile_targets = _GetCompileTargets(self._changed_targets,
-                                         supplied_targets)
-    return [gyp.common.ParseQualifiedTarget(target.name)[1]
-            for target in compile_targets]
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  """Called by gyp as the final stage. Outputs results."""
-  config = Config()
-  try:
-    config.Init(params)
-
-    if not config.files:
-      raise Exception('Must specify files to analyze via config_path generator '
-                      'flag')
-
-    toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
-    if debug:
-      print 'toplevel_dir', toplevel_dir
-
-    if _WasGypIncludeFileModified(params, config.files):
-      result_dict = { 'status': all_changed_string,
-                      'test_targets': list(config.test_target_names),
-                      'compile_targets': list(
-                        config.additional_compile_target_names |
-                        config.test_target_names) }
-      _WriteOutput(params, **result_dict)
-      return
-
-    calculator = TargetCalculator(config.files,
-                                  config.additional_compile_target_names,
-                                  config.test_target_names, data,
-                                  target_list, target_dicts, toplevel_dir,
-                                  params['build_files'])
-    if not calculator.is_build_impacted():
-      result_dict = { 'status': no_dependency_string,
-                      'test_targets': [],
-                      'compile_targets': [] }
-      if calculator.invalid_targets:
-        result_dict['invalid_targets'] = calculator.invalid_targets
-      _WriteOutput(params, **result_dict)
-      return
-
-    test_target_names = calculator.find_matching_test_target_names()
-    compile_target_names = calculator.find_matching_compile_target_names()
-    found_at_least_one_target = compile_target_names or test_target_names
-    result_dict = { 'test_targets': test_target_names,
-                    'status': found_dependency_string if
-                        found_at_least_one_target else no_dependency_string,
-                    'compile_targets': list(
-                        set(compile_target_names) |
-                        set(test_target_names)) }
-    if calculator.invalid_targets:
-      result_dict['invalid_targets'] = calculator.invalid_targets
-    _WriteOutput(params, **result_dict)
-
-  except Exception as e:
-    _WriteOutput(params, error=str(e))
diff --git a/tools/gyp/pylib/gyp/generator/cmake.py b/tools/gyp/pylib/gyp/generator/cmake.py
deleted file mode 100644
index a2b9629..0000000
--- a/tools/gyp/pylib/gyp/generator/cmake.py
+++ /dev/null
@@ -1,1248 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""cmake output module
-
-This module is under development and should be considered experimental.
-
-This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
-created for each configuration.
-
-This module's original purpose was to support editing in IDEs like KDevelop
-which use CMake for project management. It is also possible to use CMake to
-generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
-will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
-but build using CMake. As a result QtCreator editor is unaware of compiler
-defines. The generated CMakeLists.txt can also be used to build on Linux. There
-is currently no support for building on platforms other than Linux.
-
-The generated CMakeLists.txt should properly compile all projects. However,
-there is a mismatch between gyp and cmake with regard to linking. All attempts
-are made to work around this, but CMake sometimes sees -Wl,--start-group as a
-library and incorrectly repeats it. As a result the output of this generator
-should not be relied on for building.
-
-When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
-not be able to find the header file directories described in the generated
-CMakeLists.txt file.
-"""
-
-import multiprocessing
-import os
-import signal
-import string
-import subprocess
-import gyp.common
-import gyp.xcode_emulation
-
-generator_default_variables = {
-  'EXECUTABLE_PREFIX': '',
-  'EXECUTABLE_SUFFIX': '',
-  'STATIC_LIB_PREFIX': 'lib',
-  'STATIC_LIB_SUFFIX': '.a',
-  'SHARED_LIB_PREFIX': 'lib',
-  'SHARED_LIB_SUFFIX': '.so',
-  'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
-  'LIB_DIR': '${obj}.${TOOLSET}',
-  'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
-  'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
-  'PRODUCT_DIR': '${builddir}',
-  'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
-  'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
-  'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
-  'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
-  'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
-  'CONFIGURATION_NAME': '${configuration}',
-}
-
-FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
-
-generator_supports_multiple_toolsets = True
-generator_wants_static_library_dependencies_adjusted = True
-
-COMPILABLE_EXTENSIONS = {
-  '.c': 'cc',
-  '.cc': 'cxx',
-  '.cpp': 'cxx',
-  '.cxx': 'cxx',
-  '.s': 's', # cc
-  '.S': 's', # cc
-}
-
-
-def RemovePrefix(a, prefix):
-  """Returns 'a' without 'prefix' if it starts with 'prefix'."""
-  return a[len(prefix):] if a.startswith(prefix) else a
-
-
-def CalculateVariables(default_variables, params):
-  """Calculate additional variables for use in the build (called by gyp)."""
-  default_variables.setdefault('OS', gyp.common.GetFlavor(params))
-
-
-def Compilable(filename):
-  """Return true if the file is compilable (should be in OBJS)."""
-  return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
-
-
-def Linkable(filename):
-  """Return true if the file is linkable (should be on the link line)."""
-  return filename.endswith('.o')
-
-
-def NormjoinPathForceCMakeSource(base_path, rel_path):
-  """Resolves rel_path against base_path and returns the result.
-
-  If rel_path is an absolute path it is returned unchanged.
-  Otherwise it is resolved against base_path and normalized.
-  If the result is a relative path, it is forced to be relative to the
-  CMakeLists.txt.
-  """
-  if os.path.isabs(rel_path):
-    return rel_path
-  if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
-    return rel_path
-  # TODO: do we need to check base_path for absolute variables as well?
-  return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
-                      os.path.normpath(os.path.join(base_path, rel_path)))
-
-
-def NormjoinPath(base_path, rel_path):
-  """Resolves rel_path against base_path and returns the result.
-  TODO: what is this really used for?
-  If rel_path begins with '$' it is returned unchanged.
-  Otherwise it is resolved against base_path if relative, then normalized.
-  """
-  if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
-    return rel_path
-  return os.path.normpath(os.path.join(base_path, rel_path))
-
-
-def CMakeStringEscape(a):
-  """Escapes the string 'a' for use inside a CMake string.
-
-  This means escaping
-  '\' otherwise it may be seen as modifying the next character
-  '"' otherwise it will end the string
-  ';' otherwise the string becomes a list
-
-  The following do not need to be escaped
-  '#' when the lexer is in string state, this does not start a comment
-
-  The following are yet unknown
-  '$' generator variables (like ${obj}) must not be escaped,
-      but text $ should be escaped
-      what is wanted is to know which $ come from generator variables
-  """
-  return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
-
-
-def SetFileProperty(output, source_name, property_name, values, sep):
-  """Given a set of source file, sets the given property on them."""
-  output.write('set_source_files_properties(')
-  output.write(source_name)
-  output.write(' PROPERTIES ')
-  output.write(property_name)
-  output.write(' "')
-  for value in values:
-    output.write(CMakeStringEscape(value))
-    output.write(sep)
-  output.write('")\n')
-
-
-def SetFilesProperty(output, variable, property_name, values, sep):
-  """Given a set of source files, sets the given property on them."""
-  output.write('set_source_files_properties(')
-  WriteVariable(output, variable)
-  output.write(' PROPERTIES ')
-  output.write(property_name)
-  output.write(' "')
-  for value in values:
-    output.write(CMakeStringEscape(value))
-    output.write(sep)
-  output.write('")\n')
-
-
-def SetTargetProperty(output, target_name, property_name, values, sep=''):
-  """Given a target, sets the given property."""
-  output.write('set_target_properties(')
-  output.write(target_name)
-  output.write(' PROPERTIES ')
-  output.write(property_name)
-  output.write(' "')
-  for value in values:
-    output.write(CMakeStringEscape(value))
-    output.write(sep)
-  output.write('")\n')
-
-
-def SetVariable(output, variable_name, value):
-  """Sets a CMake variable."""
-  output.write('set(')
-  output.write(variable_name)
-  output.write(' "')
-  output.write(CMakeStringEscape(value))
-  output.write('")\n')
-
-
-def SetVariableList(output, variable_name, values):
-  """Sets a CMake variable to a list."""
-  if not values:
-    return SetVariable(output, variable_name, "")
-  if len(values) == 1:
-    return SetVariable(output, variable_name, values[0])
-  output.write('list(APPEND ')
-  output.write(variable_name)
-  output.write('\n  "')
-  output.write('"\n  "'.join([CMakeStringEscape(value) for value in values]))
-  output.write('")\n')
-
-
-def UnsetVariable(output, variable_name):
-  """Unsets a CMake variable."""
-  output.write('unset(')
-  output.write(variable_name)
-  output.write(')\n')
-
-
-def WriteVariable(output, variable_name, prepend=None):
-  if prepend:
-    output.write(prepend)
-  output.write('${')
-  output.write(variable_name)
-  output.write('}')
-
-
-class CMakeTargetType(object):
-  def __init__(self, command, modifier, property_modifier):
-    self.command = command
-    self.modifier = modifier
-    self.property_modifier = property_modifier
-
-
-cmake_target_type_from_gyp_target_type = {
-  'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
-  'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
-  'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
-  'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
-  'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
-}
-
-
-def StringToCMakeTargetName(a):
-  """Converts the given string 'a' to a valid CMake target name.
-
-  All invalid characters are replaced by '_'.
-  Invalid for cmake: ' ', '/', '(', ')', '"'
-  Invalid for make: ':'
-  Invalid for unknown reasons but cause failures: '.'
-  """
-  return a.translate(string.maketrans(' /():."', '_______'))
-
-
-def WriteActions(target_name, actions, extra_sources, extra_deps,
-                 path_to_gyp, output):
-  """Write CMake for the 'actions' in the target.
-
-  Args:
-    target_name: the name of the CMake target being generated.
-    actions: the Gyp 'actions' dict for this target.
-    extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
-    extra_deps: [<cmake_taget>] to append with generated targets.
-    path_to_gyp: relative path from CMakeLists.txt being generated to
-        the Gyp file in which the target being generated is defined.
-  """
-  for action in actions:
-    action_name = StringToCMakeTargetName(action['action_name'])
-    action_target_name = '%s__%s' % (target_name, action_name)
-
-    inputs = action['inputs']
-    inputs_name = action_target_name + '__input'
-    SetVariableList(output, inputs_name,
-        [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
-
-    outputs = action['outputs']
-    cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
-                     for out in outputs]
-    outputs_name = action_target_name + '__output'
-    SetVariableList(output, outputs_name, cmake_outputs)
-
-    # Build up a list of outputs.
-    # Collect the output dirs we'll need.
-    dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
-
-    if int(action.get('process_outputs_as_sources', False)):
-      extra_sources.extend(zip(cmake_outputs, outputs))
-
-    # add_custom_command
-    output.write('add_custom_command(OUTPUT ')
-    WriteVariable(output, outputs_name)
-    output.write('\n')
-
-    if len(dirs) > 0:
-      for directory in dirs:
-        output.write('  COMMAND ${CMAKE_COMMAND} -E make_directory ')
-        output.write(directory)
-        output.write('\n')
-
-    output.write('  COMMAND ')
-    output.write(gyp.common.EncodePOSIXShellList(action['action']))
-    output.write('\n')
-
-    output.write('  DEPENDS ')
-    WriteVariable(output, inputs_name)
-    output.write('\n')
-
-    output.write('  WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
-    output.write(path_to_gyp)
-    output.write('\n')
-
-    output.write('  COMMENT ')
-    if 'message' in action:
-      output.write(action['message'])
-    else:
-      output.write(action_target_name)
-    output.write('\n')
-
-    output.write('  VERBATIM\n')
-    output.write(')\n')
-
-    # add_custom_target
-    output.write('add_custom_target(')
-    output.write(action_target_name)
-    output.write('\n  DEPENDS ')
-    WriteVariable(output, outputs_name)
-    output.write('\n  SOURCES ')
-    WriteVariable(output, inputs_name)
-    output.write('\n)\n')
-
-    extra_deps.append(action_target_name)
-
-
-def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
-  if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
-    if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
-      return rel_path
-  return NormjoinPathForceCMakeSource(base_path, rel_path)
-
-
-def WriteRules(target_name, rules, extra_sources, extra_deps,
-               path_to_gyp, output):
-  """Write CMake for the 'rules' in the target.
-
-  Args:
-    target_name: the name of the CMake target being generated.
-    actions: the Gyp 'actions' dict for this target.
-    extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
-    extra_deps: [<cmake_taget>] to append with generated targets.
-    path_to_gyp: relative path from CMakeLists.txt being generated to
-        the Gyp file in which the target being generated is defined.
-  """
-  for rule in rules:
-    rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
-
-    inputs = rule.get('inputs', [])
-    inputs_name = rule_name + '__input'
-    SetVariableList(output, inputs_name,
-        [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
-    outputs = rule['outputs']
-    var_outputs = []
-
-    for count, rule_source in enumerate(rule.get('rule_sources', [])):
-      action_name = rule_name + '_' + str(count)
-
-      rule_source_dirname, rule_source_basename = os.path.split(rule_source)
-      rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
-
-      SetVariable(output, 'RULE_INPUT_PATH', rule_source)
-      SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
-      SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
-      SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
-      SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
-
-      # Build up a list of outputs.
-      # Collect the output dirs we'll need.
-      dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
-
-      # Create variables for the output, as 'local' variable will be unset.
-      these_outputs = []
-      for output_index, out in enumerate(outputs):
-        output_name = action_name + '_' + str(output_index)
-        SetVariable(output, output_name,
-                     NormjoinRulePathForceCMakeSource(path_to_gyp, out,
-                                                      rule_source))
-        if int(rule.get('process_outputs_as_sources', False)):
-          extra_sources.append(('${' + output_name + '}', out))
-        these_outputs.append('${' + output_name + '}')
-        var_outputs.append('${' + output_name + '}')
-
-      # add_custom_command
-      output.write('add_custom_command(OUTPUT\n')
-      for out in these_outputs:
-        output.write('  ')
-        output.write(out)
-        output.write('\n')
-
-      for directory in dirs:
-        output.write('  COMMAND ${CMAKE_COMMAND} -E make_directory ')
-        output.write(directory)
-        output.write('\n')
-
-      output.write('  COMMAND ')
-      output.write(gyp.common.EncodePOSIXShellList(rule['action']))
-      output.write('\n')
-
-      output.write('  DEPENDS ')
-      WriteVariable(output, inputs_name)
-      output.write(' ')
-      output.write(NormjoinPath(path_to_gyp, rule_source))
-      output.write('\n')
-
-      # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
-      # The cwd is the current build directory.
-      output.write('  WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
-      output.write(path_to_gyp)
-      output.write('\n')
-
-      output.write('  COMMENT ')
-      if 'message' in rule:
-        output.write(rule['message'])
-      else:
-        output.write(action_name)
-      output.write('\n')
-
-      output.write('  VERBATIM\n')
-      output.write(')\n')
-
-      UnsetVariable(output, 'RULE_INPUT_PATH')
-      UnsetVariable(output, 'RULE_INPUT_DIRNAME')
-      UnsetVariable(output, 'RULE_INPUT_NAME')
-      UnsetVariable(output, 'RULE_INPUT_ROOT')
-      UnsetVariable(output, 'RULE_INPUT_EXT')
-
-    # add_custom_target
-    output.write('add_custom_target(')
-    output.write(rule_name)
-    output.write(' DEPENDS\n')
-    for out in var_outputs:
-      output.write('  ')
-      output.write(out)
-      output.write('\n')
-    output.write('SOURCES ')
-    WriteVariable(output, inputs_name)
-    output.write('\n')
-    for rule_source in rule.get('rule_sources', []):
-      output.write('  ')
-      output.write(NormjoinPath(path_to_gyp, rule_source))
-      output.write('\n')
-    output.write(')\n')
-
-    extra_deps.append(rule_name)
-
-
-def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
-  """Write CMake for the 'copies' in the target.
-
-  Args:
-    target_name: the name of the CMake target being generated.
-    actions: the Gyp 'actions' dict for this target.
-    extra_deps: [<cmake_taget>] to append with generated targets.
-    path_to_gyp: relative path from CMakeLists.txt being generated to
-        the Gyp file in which the target being generated is defined.
-  """
-  copy_name = target_name + '__copies'
-
-  # CMake gets upset with custom targets with OUTPUT which specify no output.
-  have_copies = any(copy['files'] for copy in copies)
-  if not have_copies:
-    output.write('add_custom_target(')
-    output.write(copy_name)
-    output.write(')\n')
-    extra_deps.append(copy_name)
-    return
-
-  class Copy(object):
-    def __init__(self, ext, command):
-      self.cmake_inputs = []
-      self.cmake_outputs = []
-      self.gyp_inputs = []
-      self.gyp_outputs = []
-      self.ext = ext
-      self.inputs_name = None
-      self.outputs_name = None
-      self.command = command
-
-  file_copy = Copy('', 'copy')
-  dir_copy = Copy('_dirs', 'copy_directory')
-
-  for copy in copies:
-    files = copy['files']
-    destination = copy['destination']
-    for src in files:
-      path = os.path.normpath(src)
-      basename = os.path.split(path)[1]
-      dst = os.path.join(destination, basename)
-
-      copy = file_copy if os.path.basename(src) else dir_copy
-
-      copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
-      copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
-      copy.gyp_inputs.append(src)
-      copy.gyp_outputs.append(dst)
-
-  for copy in (file_copy, dir_copy):
-    if copy.cmake_inputs:
-      copy.inputs_name = copy_name + '__input' + copy.ext
-      SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
-
-      copy.outputs_name = copy_name + '__output' + copy.ext
-      SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
-
-  # add_custom_command
-  output.write('add_custom_command(\n')
-
-  output.write('OUTPUT')
-  for copy in (file_copy, dir_copy):
-    if copy.outputs_name:
-      WriteVariable(output, copy.outputs_name, ' ')
-  output.write('\n')
-
-  for copy in (file_copy, dir_copy):
-    for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
-      # 'cmake -E copy src dst' will create the 'dst' directory if needed.
-      output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
-      output.write(src)
-      output.write(' ')
-      output.write(dst)
-      output.write("\n")
-
-  output.write('DEPENDS')
-  for copy in (file_copy, dir_copy):
-    if copy.inputs_name:
-      WriteVariable(output, copy.inputs_name, ' ')
-  output.write('\n')
-
-  output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
-  output.write(path_to_gyp)
-  output.write('\n')
-
-  output.write('COMMENT Copying for ')
-  output.write(target_name)
-  output.write('\n')
-
-  output.write('VERBATIM\n')
-  output.write(')\n')
-
-  # add_custom_target
-  output.write('add_custom_target(')
-  output.write(copy_name)
-  output.write('\n  DEPENDS')
-  for copy in (file_copy, dir_copy):
-    if copy.outputs_name:
-      WriteVariable(output, copy.outputs_name, ' ')
-  output.write('\n  SOURCES')
-  if file_copy.inputs_name:
-    WriteVariable(output, file_copy.inputs_name, ' ')
-  output.write('\n)\n')
-
-  extra_deps.append(copy_name)
-
-
-def CreateCMakeTargetBaseName(qualified_target):
-  """This is the name we would like the target to have."""
-  _, gyp_target_name, gyp_target_toolset = (
-      gyp.common.ParseQualifiedTarget(qualified_target))
-  cmake_target_base_name = gyp_target_name
-  if gyp_target_toolset and gyp_target_toolset != 'target':
-    cmake_target_base_name += '_' + gyp_target_toolset
-  return StringToCMakeTargetName(cmake_target_base_name)
-
-
-def CreateCMakeTargetFullName(qualified_target):
-  """An unambiguous name for the target."""
-  gyp_file, gyp_target_name, gyp_target_toolset = (
-      gyp.common.ParseQualifiedTarget(qualified_target))
-  cmake_target_full_name = gyp_file + ':' + gyp_target_name
-  if gyp_target_toolset and gyp_target_toolset != 'target':
-    cmake_target_full_name += '_' + gyp_target_toolset
-  return StringToCMakeTargetName(cmake_target_full_name)
-
-
-class CMakeNamer(object):
-  """Converts Gyp target names into CMake target names.
-
-  CMake requires that target names be globally unique. One way to ensure
-  this is to fully qualify the names of the targets. Unfortunatly, this
-  ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
-  of just "chrome". If this generator were only interested in building, it
-  would be possible to fully qualify all target names, then create
-  unqualified target names which depend on all qualified targets which
-  should have had that name. This is more or less what the 'make' generator
-  does with aliases. However, one goal of this generator is to create CMake
-  files for use with IDEs, and fully qualified names are not as user
-  friendly.
-
-  Since target name collision is rare, we do the above only when required.
-
-  Toolset variants are always qualified from the base, as this is required for
-  building. However, it also makes sense for an IDE, as it is possible for
-  defines to be different.
-  """
-  def __init__(self, target_list):
-    self.cmake_target_base_names_conficting = set()
-
-    cmake_target_base_names_seen = set()
-    for qualified_target in target_list:
-      cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
-
-      if cmake_target_base_name not in cmake_target_base_names_seen:
-        cmake_target_base_names_seen.add(cmake_target_base_name)
-      else:
-        self.cmake_target_base_names_conficting.add(cmake_target_base_name)
-
-  def CreateCMakeTargetName(self, qualified_target):
-    base_name = CreateCMakeTargetBaseName(qualified_target)
-    if base_name in self.cmake_target_base_names_conficting:
-      return CreateCMakeTargetFullName(qualified_target)
-    return base_name
-
-
-def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
-                options, generator_flags, all_qualified_targets, flavor,
-                output):
-  # The make generator does this always.
-  # TODO: It would be nice to be able to tell CMake all dependencies.
-  circular_libs = generator_flags.get('circular', True)
-
-  if not generator_flags.get('standalone', False):
-    output.write('\n#')
-    output.write(qualified_target)
-    output.write('\n')
-
-  gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
-  rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
-  rel_gyp_dir = os.path.dirname(rel_gyp_file)
-
-  # Relative path from build dir to top dir.
-  build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
-  # Relative path from build dir to gyp dir.
-  build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
-
-  path_from_cmakelists_to_gyp = build_to_gyp
-
-  spec = target_dicts.get(qualified_target, {})
-  config = spec.get('configurations', {}).get(config_to_use, {})
-
-  xcode_settings = None
-  if flavor == 'mac':
-    xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
-
-  target_name = spec.get('target_name', '<missing target name>')
-  target_type = spec.get('type', '<missing target type>')
-  target_toolset = spec.get('toolset')
-
-  cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
-  if cmake_target_type is None:
-    print ('Target %s has unknown target type %s, skipping.' %
-          (        target_name,               target_type  ) )
-    return
-
-  SetVariable(output, 'TARGET', target_name)
-  SetVariable(output, 'TOOLSET', target_toolset)
-
-  cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
-
-  extra_sources = []
-  extra_deps = []
-
-  # Actions must come first, since they can generate more OBJs for use below.
-  if 'actions' in spec:
-    WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
-                 path_from_cmakelists_to_gyp, output)
-
-  # Rules must be early like actions.
-  if 'rules' in spec:
-    WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
-               path_from_cmakelists_to_gyp, output)
-
-  # Copies
-  if 'copies' in spec:
-    WriteCopies(cmake_target_name, spec['copies'], extra_deps,
-                path_from_cmakelists_to_gyp, output)
-
-  # Target and sources
-  srcs = spec.get('sources', [])
-
-  # Gyp separates the sheep from the goats based on file extensions.
-  # A full separation is done here because of flag handing (see below).
-  s_sources = []
-  c_sources = []
-  cxx_sources = []
-  linkable_sources = []
-  other_sources = []
-  for src in srcs:
-    _, ext = os.path.splitext(src)
-    src_type = COMPILABLE_EXTENSIONS.get(ext, None)
-    src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
-
-    if src_type == 's':
-      s_sources.append(src_norm_path)
-    elif src_type == 'cc':
-      c_sources.append(src_norm_path)
-    elif src_type == 'cxx':
-      cxx_sources.append(src_norm_path)
-    elif Linkable(ext):
-      linkable_sources.append(src_norm_path)
-    else:
-      other_sources.append(src_norm_path)
-
-  for extra_source in extra_sources:
-    src, real_source = extra_source
-    _, ext = os.path.splitext(real_source)
-    src_type = COMPILABLE_EXTENSIONS.get(ext, None)
-
-    if src_type == 's':
-      s_sources.append(src)
-    elif src_type == 'cc':
-      c_sources.append(src)
-    elif src_type == 'cxx':
-      cxx_sources.append(src)
-    elif Linkable(ext):
-      linkable_sources.append(src)
-    else:
-      other_sources.append(src)
-
-  s_sources_name = None
-  if s_sources:
-    s_sources_name = cmake_target_name + '__asm_srcs'
-    SetVariableList(output, s_sources_name, s_sources)
-
-  c_sources_name = None
-  if c_sources:
-    c_sources_name = cmake_target_name + '__c_srcs'
-    SetVariableList(output, c_sources_name, c_sources)
-
-  cxx_sources_name = None
-  if cxx_sources:
-    cxx_sources_name = cmake_target_name + '__cxx_srcs'
-    SetVariableList(output, cxx_sources_name, cxx_sources)
-
-  linkable_sources_name = None
-  if linkable_sources:
-    linkable_sources_name = cmake_target_name + '__linkable_srcs'
-    SetVariableList(output, linkable_sources_name, linkable_sources)
-
-  other_sources_name = None
-  if other_sources:
-    other_sources_name = cmake_target_name + '__other_srcs'
-    SetVariableList(output, other_sources_name, other_sources)
-
-  # CMake gets upset when executable targets provide no sources.
-  # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
-  dummy_sources_name = None
-  has_sources = (s_sources_name or
-                 c_sources_name or
-                 cxx_sources_name or
-                 linkable_sources_name or
-                 other_sources_name)
-  if target_type == 'executable' and not has_sources:
-    dummy_sources_name = cmake_target_name + '__dummy_srcs'
-    SetVariable(output, dummy_sources_name,
-                "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
-    output.write('if(NOT EXISTS "')
-    WriteVariable(output, dummy_sources_name)
-    output.write('")\n')
-    output.write('  file(WRITE "')
-    WriteVariable(output, dummy_sources_name)
-    output.write('" "")\n')
-    output.write("endif()\n")
-
-
-  # CMake is opposed to setting linker directories and considers the practice
-  # of setting linker directories dangerous. Instead, it favors the use of
-  # find_library and passing absolute paths to target_link_libraries.
-  # However, CMake does provide the command link_directories, which adds
-  # link directories to targets defined after it is called.
-  # As a result, link_directories must come before the target definition.
-  # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
-  library_dirs = config.get('library_dirs')
-  if library_dirs is not None:
-    output.write('link_directories(')
-    for library_dir in library_dirs:
-      output.write(' ')
-      output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
-      output.write('\n')
-    output.write(')\n')
-
-  output.write(cmake_target_type.command)
-  output.write('(')
-  output.write(cmake_target_name)
-
-  if cmake_target_type.modifier is not None:
-    output.write(' ')
-    output.write(cmake_target_type.modifier)
-
-  if s_sources_name:
-    WriteVariable(output, s_sources_name, ' ')
-  if c_sources_name:
-    WriteVariable(output, c_sources_name, ' ')
-  if cxx_sources_name:
-    WriteVariable(output, cxx_sources_name, ' ')
-  if linkable_sources_name:
-    WriteVariable(output, linkable_sources_name, ' ')
-  if other_sources_name:
-    WriteVariable(output, other_sources_name, ' ')
-  if dummy_sources_name:
-    WriteVariable(output, dummy_sources_name, ' ')
-
-  output.write(')\n')
-
-  # Let CMake know if the 'all' target should depend on this target.
-  exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
-                             else 'FALSE')
-  SetTargetProperty(output, cmake_target_name,
-                      'EXCLUDE_FROM_ALL', exclude_from_all)
-  for extra_target_name in extra_deps:
-    SetTargetProperty(output, extra_target_name,
-                        'EXCLUDE_FROM_ALL', exclude_from_all)
-
-  # Output name and location.
-  if target_type != 'none':
-    # Link as 'C' if there are no other files
-    if not c_sources and not cxx_sources:
-      SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
-
-    # Mark uncompiled sources as uncompiled.
-    if other_sources_name:
-      output.write('set_source_files_properties(')
-      WriteVariable(output, other_sources_name, '')
-      output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
-
-    # Mark object sources as linkable.
-    if linkable_sources_name:
-      output.write('set_source_files_properties(')
-      WriteVariable(output, other_sources_name, '')
-      output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
-
-    # Output directory
-    target_output_directory = spec.get('product_dir')
-    if target_output_directory is None:
-      if target_type in ('executable', 'loadable_module'):
-        target_output_directory = generator_default_variables['PRODUCT_DIR']
-      elif target_type == 'shared_library':
-        target_output_directory = '${builddir}/lib.${TOOLSET}'
-      elif spec.get('standalone_static_library', False):
-        target_output_directory = generator_default_variables['PRODUCT_DIR']
-      else:
-        base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
-                                            options.toplevel_dir)
-        target_output_directory = '${obj}.${TOOLSET}'
-        target_output_directory = (
-            os.path.join(target_output_directory, base_path))
-
-    cmake_target_output_directory = NormjoinPathForceCMakeSource(
-                                        path_from_cmakelists_to_gyp,
-                                        target_output_directory)
-    SetTargetProperty(output,
-        cmake_target_name,
-        cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
-        cmake_target_output_directory)
-
-    # Output name
-    default_product_prefix = ''
-    default_product_name = target_name
-    default_product_ext = ''
-    if target_type == 'static_library':
-      static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
-      default_product_name = RemovePrefix(default_product_name,
-                                          static_library_prefix)
-      default_product_prefix = static_library_prefix
-      default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
-
-    elif target_type in ('loadable_module', 'shared_library'):
-      shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
-      default_product_name = RemovePrefix(default_product_name,
-                                          shared_library_prefix)
-      default_product_prefix = shared_library_prefix
-      default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
-
-    elif target_type != 'executable':
-      print ('ERROR: What output file should be generated?',
-              'type', target_type, 'target', target_name)
-
-    product_prefix = spec.get('product_prefix', default_product_prefix)
-    product_name = spec.get('product_name', default_product_name)
-    product_ext = spec.get('product_extension')
-    if product_ext:
-      product_ext = '.' + product_ext
-    else:
-      product_ext = default_product_ext
-
-    SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
-    SetTargetProperty(output, cmake_target_name,
-                        cmake_target_type.property_modifier + '_OUTPUT_NAME',
-                        product_name)
-    SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
-
-    # Make the output of this target referenceable as a source.
-    cmake_target_output_basename = product_prefix + product_name + product_ext
-    cmake_target_output = os.path.join(cmake_target_output_directory,
-                                       cmake_target_output_basename)
-    SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
-
-    # Includes
-    includes = config.get('include_dirs')
-    if includes:
-      # This (target include directories) is what requires CMake 2.8.8
-      includes_name = cmake_target_name + '__include_dirs'
-      SetVariableList(output, includes_name,
-          [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
-           for include in includes])
-      output.write('set_property(TARGET ')
-      output.write(cmake_target_name)
-      output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
-      WriteVariable(output, includes_name, '')
-      output.write(')\n')
-
-    # Defines
-    defines = config.get('defines')
-    if defines is not None:
-      SetTargetProperty(output,
-                        cmake_target_name,
-                        'COMPILE_DEFINITIONS',
-                        defines,
-                        ';')
-
-    # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
-    # CMake currently does not have target C and CXX flags.
-    # So, instead of doing...
-
-    # cflags_c = config.get('cflags_c')
-    # if cflags_c is not None:
-    #   SetTargetProperty(output, cmake_target_name,
-    #                       'C_COMPILE_FLAGS', cflags_c, ' ')
-
-    # cflags_cc = config.get('cflags_cc')
-    # if cflags_cc is not None:
-    #   SetTargetProperty(output, cmake_target_name,
-    #                       'CXX_COMPILE_FLAGS', cflags_cc, ' ')
-
-    # Instead we must...
-    cflags = config.get('cflags', [])
-    cflags_c = config.get('cflags_c', [])
-    cflags_cxx = config.get('cflags_cc', [])
-    if xcode_settings:
-      cflags = xcode_settings.GetCflags(config_to_use)
-      cflags_c = xcode_settings.GetCflagsC(config_to_use)
-      cflags_cxx = xcode_settings.GetCflagsCC(config_to_use)
-      #cflags_objc = xcode_settings.GetCflagsObjC(config_to_use)
-      #cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use)
-
-    if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
-      SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
-
-    elif c_sources and not (s_sources or cxx_sources):
-      flags = []
-      flags.extend(cflags)
-      flags.extend(cflags_c)
-      SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
-    elif cxx_sources and not (s_sources or c_sources):
-      flags = []
-      flags.extend(cflags)
-      flags.extend(cflags_cxx)
-      SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
-    else:
-      # TODO: This is broken, one cannot generally set properties on files,
-      # as other targets may require different properties on the same files.
-      if s_sources and cflags:
-        SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
-
-      if c_sources and (cflags or cflags_c):
-        flags = []
-        flags.extend(cflags)
-        flags.extend(cflags_c)
-        SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
-
-      if cxx_sources and (cflags or cflags_cxx):
-        flags = []
-        flags.extend(cflags)
-        flags.extend(cflags_cxx)
-        SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
-
-    # Linker flags
-    ldflags = config.get('ldflags')
-    if ldflags is not None:
-      SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
-
-    # XCode settings
-    xcode_settings = config.get('xcode_settings', {})
-    for xcode_setting, xcode_value in xcode_settings.viewitems():
-      SetTargetProperty(output, cmake_target_name,
-                        "XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value,
-                        '' if isinstance(xcode_value, str) else ' ')
-
-  # Note on Dependencies and Libraries:
-  # CMake wants to handle link order, resolving the link line up front.
-  # Gyp does not retain or enforce specifying enough information to do so.
-  # So do as other gyp generators and use --start-group and --end-group.
-  # Give CMake as little information as possible so that it doesn't mess it up.
-
-  # Dependencies
-  rawDeps = spec.get('dependencies', [])
-
-  static_deps = []
-  shared_deps = []
-  other_deps = []
-  for rawDep in rawDeps:
-    dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
-    dep_spec = target_dicts.get(rawDep, {})
-    dep_target_type = dep_spec.get('type', None)
-
-    if dep_target_type == 'static_library':
-      static_deps.append(dep_cmake_name)
-    elif dep_target_type ==  'shared_library':
-      shared_deps.append(dep_cmake_name)
-    else:
-      other_deps.append(dep_cmake_name)
-
-  # ensure all external dependencies are complete before internal dependencies
-  # extra_deps currently only depend on their own deps, so otherwise run early
-  if static_deps or shared_deps or other_deps:
-    for extra_dep in extra_deps:
-      output.write('add_dependencies(')
-      output.write(extra_dep)
-      output.write('\n')
-      for deps in (static_deps, shared_deps, other_deps):
-        for dep in gyp.common.uniquer(deps):
-          output.write('  ')
-          output.write(dep)
-          output.write('\n')
-      output.write(')\n')
-
-  linkable = target_type in ('executable', 'loadable_module', 'shared_library')
-  other_deps.extend(extra_deps)
-  if other_deps or (not linkable and (static_deps or shared_deps)):
-    output.write('add_dependencies(')
-    output.write(cmake_target_name)
-    output.write('\n')
-    for dep in gyp.common.uniquer(other_deps):
-      output.write('  ')
-      output.write(dep)
-      output.write('\n')
-    if not linkable:
-      for deps in (static_deps, shared_deps):
-        for lib_dep in gyp.common.uniquer(deps):
-          output.write('  ')
-          output.write(lib_dep)
-          output.write('\n')
-    output.write(')\n')
-
-  # Libraries
-  if linkable:
-    external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
-    if external_libs or static_deps or shared_deps:
-      output.write('target_link_libraries(')
-      output.write(cmake_target_name)
-      output.write('\n')
-      if static_deps:
-        write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac'
-        if write_group:
-          output.write('-Wl,--start-group\n')
-        for dep in gyp.common.uniquer(static_deps):
-          output.write('  ')
-          output.write(dep)
-          output.write('\n')
-        if write_group:
-          output.write('-Wl,--end-group\n')
-      if shared_deps:
-        for dep in gyp.common.uniquer(shared_deps):
-          output.write('  ')
-          output.write(dep)
-          output.write('\n')
-      if external_libs:
-        for lib in gyp.common.uniquer(external_libs):
-          output.write('  "')
-          output.write(RemovePrefix(lib, "$(SDKROOT)"))
-          output.write('"\n')
-
-      output.write(')\n')
-
-  UnsetVariable(output, 'TOOLSET')
-  UnsetVariable(output, 'TARGET')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data,
-                            params, config_to_use):
-  options = params['options']
-  generator_flags = params['generator_flags']
-  flavor = gyp.common.GetFlavor(params)
-
-  # generator_dir: relative path from pwd to where make puts build files.
-  # Makes migrating from make to cmake easier, cmake doesn't put anything here.
-  # Each Gyp configuration creates a different CMakeLists.txt file
-  # to avoid incompatibilities between Gyp and CMake configurations.
-  generator_dir = os.path.relpath(options.generator_output or '.')
-
-  # output_dir: relative path from generator_dir to the build directory.
-  output_dir = generator_flags.get('output_dir', 'out')
-
-  # build_dir: relative path from source root to our output files.
-  # e.g. "out/Debug"
-  build_dir = os.path.normpath(os.path.join(generator_dir,
-                                            output_dir,
-                                            config_to_use))
-
-  toplevel_build = os.path.join(options.toplevel_dir, build_dir)
-
-  output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
-  gyp.common.EnsureDirExists(output_file)
-
-  output = open(output_file, 'w')
-  output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
-  output.write('cmake_policy(VERSION 2.8.8)\n')
-
-  gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
-  output.write('project(')
-  output.write(project_target)
-  output.write(')\n')
-
-  SetVariable(output, 'configuration', config_to_use)
-
-  ar = None
-  cc = None
-  cxx = None
-
-  make_global_settings = data[gyp_file].get('make_global_settings', [])
-  build_to_top = gyp.common.InvertRelativePath(build_dir,
-                                               options.toplevel_dir)
-  for key, value in make_global_settings:
-    if key == 'AR':
-      ar = os.path.join(build_to_top, value)
-    if key == 'CC':
-      cc = os.path.join(build_to_top, value)
-    if key == 'CXX':
-      cxx = os.path.join(build_to_top, value)
-
-  ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
-  cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
-  cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
-
-  if ar:
-    SetVariable(output, 'CMAKE_AR', ar)
-  if cc:
-    SetVariable(output, 'CMAKE_C_COMPILER', cc)
-  if cxx:
-    SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
-
-  # The following appears to be as-yet undocumented.
-  # http://public.kitware.com/Bug/view.php?id=8392
-  output.write('enable_language(ASM)\n')
-  # ASM-ATT does not support .S files.
-  # output.write('enable_language(ASM-ATT)\n')
-
-  if cc:
-    SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
-
-  SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
-  SetVariable(output, 'obj', '${builddir}/obj')
-  output.write('\n')
-
-  # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
-  # CMake by default names the object resulting from foo.c to be foo.c.o.
-  # Gyp traditionally names the object resulting from foo.c foo.o.
-  # This should be irrelevant, but some targets extract .o files from .a
-  # and depend on the name of the extracted .o files.
-  output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
-  output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
-  output.write('\n')
-
-  # Force ninja to use rsp files. Otherwise link and ar lines can get too long,
-  # resulting in 'Argument list too long' errors.
-  # However, rsp files don't work correctly on Mac.
-  if flavor != 'mac':
-    output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
-  output.write('\n')
-
-  namer = CMakeNamer(target_list)
-
-  # The list of targets upon which the 'all' target should depend.
-  # CMake has it's own implicit 'all' target, one is not created explicitly.
-  all_qualified_targets = set()
-  for build_file in params['build_files']:
-    for qualified_target in gyp.common.AllTargets(target_list,
-                                                  target_dicts,
-                                                  os.path.normpath(build_file)):
-      all_qualified_targets.add(qualified_target)
-
-  for qualified_target in target_list:
-    if flavor == 'mac':
-      gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
-      spec = target_dicts[qualified_target]
-      gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec)
-
-    WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
-                options, generator_flags, all_qualified_targets, flavor, output)
-
-  output.close()
-
-
-def PerformBuild(data, configurations, params):
-  options = params['options']
-  generator_flags = params['generator_flags']
-
-  # generator_dir: relative path from pwd to where make puts build files.
-  # Makes migrating from make to cmake easier, cmake doesn't put anything here.
-  generator_dir = os.path.relpath(options.generator_output or '.')
-
-  # output_dir: relative path from generator_dir to the build directory.
-  output_dir = generator_flags.get('output_dir', 'out')
-
-  for config_name in configurations:
-    # build_dir: relative path from source root to our output files.
-    # e.g. "out/Debug"
-    build_dir = os.path.normpath(os.path.join(generator_dir,
-                                              output_dir,
-                                              config_name))
-    arguments = ['cmake', '-G', 'Ninja']
-    print 'Generating [%s]: %s' % (config_name, arguments)
-    subprocess.check_call(arguments, cwd=build_dir)
-
-    arguments = ['ninja', '-C', build_dir]
-    print 'Building [%s]: %s' % (config_name, arguments)
-    subprocess.check_call(arguments)
-
-
-def CallGenerateOutputForConfig(arglist):
-  # Ignore the interrupt signal so that the parent process catches it and
-  # kills all multiprocessing children.
-  signal.signal(signal.SIGINT, signal.SIG_IGN)
-
-  target_list, target_dicts, data, params, config_name = arglist
-  GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  user_config = params.get('generator_flags', {}).get('config', None)
-  if user_config:
-    GenerateOutputForConfig(target_list, target_dicts, data,
-                            params, user_config)
-  else:
-    config_names = target_dicts[target_list[0]]['configurations'].keys()
-    if params['parallel']:
-      try:
-        pool = multiprocessing.Pool(len(config_names))
-        arglists = []
-        for config_name in config_names:
-          arglists.append((target_list, target_dicts, data,
-                           params, config_name))
-          pool.map(CallGenerateOutputForConfig, arglists)
-      except KeyboardInterrupt, e:
-        pool.terminate()
-        raise e
-    else:
-      for config_name in config_names:
-        GenerateOutputForConfig(target_list, target_dicts, data,
-                                params, config_name)
diff --git a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
deleted file mode 100644
index 160eafe..0000000
--- a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import os
-import gyp
-import gyp.common
-import gyp.msvs_emulation
-import json
-import sys
-
-generator_supports_multiple_toolsets = True
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_filelist_paths = {
-}
-
-generator_default_variables = {
-}
-for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
-                'LIB_DIR', 'SHARED_LIB_DIR']:
-  # Some gyp steps fail if these are empty(!).
-  generator_default_variables[dirname] = 'dir'
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
-               'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
-               'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
-               'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
-               'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
-               'CONFIGURATION_NAME']:
-  generator_default_variables[unused] = ''
-
-
-def CalculateVariables(default_variables, params):
-  generator_flags = params.get('generator_flags', {})
-  for key, val in generator_flags.items():
-    default_variables.setdefault(key, val)
-  default_variables.setdefault('OS', gyp.common.GetFlavor(params))
-
-  flavor = gyp.common.GetFlavor(params)
-  if flavor =='win':
-    # Copy additional generator configuration data from VS, which is shared
-    # by the Windows Ninja generator.
-    import gyp.generator.msvs as msvs_generator
-    generator_additional_non_configuration_keys = getattr(msvs_generator,
-        'generator_additional_non_configuration_keys', [])
-    generator_additional_path_sections = getattr(msvs_generator,
-        'generator_additional_path_sections', [])
-
-    gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-
-
-def CalculateGeneratorInputInfo(params):
-  """Calculate the generator specific info that gets fed to input (called by
-  gyp)."""
-  generator_flags = params.get('generator_flags', {})
-  if generator_flags.get('adjust_static_libraries', False):
-    global generator_wants_static_library_dependencies_adjusted
-    generator_wants_static_library_dependencies_adjusted = True
-
-  toplevel = params['options'].toplevel_dir
-  generator_dir = os.path.relpath(params['options'].generator_output or '.')
-  # output_dir: relative path from generator_dir to the build directory.
-  output_dir = generator_flags.get('output_dir', 'out')
-  qualified_out_dir = os.path.normpath(os.path.join(
-      toplevel, generator_dir, output_dir, 'gypfiles'))
-  global generator_filelist_paths
-  generator_filelist_paths = {
-      'toplevel': toplevel,
-      'qualified_out_dir': qualified_out_dir,
-  }
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  # Map of target -> list of targets it depends on.
-  edges = {}
-
-  # Queue of targets to visit.
-  targets_to_visit = target_list[:]
-
-  while len(targets_to_visit) > 0:
-    target = targets_to_visit.pop()
-    if target in edges:
-      continue
-    edges[target] = []
-
-    for dep in target_dicts[target].get('dependencies', []):
-      edges[target].append(dep)
-      targets_to_visit.append(dep)
-
-  try:
-    filepath = params['generator_flags']['output_dir']
-  except KeyError:
-    filepath = '.'
-  filename = os.path.join(filepath, 'dump.json')
-  f = open(filename, 'w')
-  json.dump(edges, f)
-  f.close()
-  print 'Wrote json to %s.' % filename
diff --git a/tools/gyp/pylib/gyp/generator/eclipse.py b/tools/gyp/pylib/gyp/generator/eclipse.py
deleted file mode 100644
index 3544347..0000000
--- a/tools/gyp/pylib/gyp/generator/eclipse.py
+++ /dev/null
@@ -1,425 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""GYP backend that generates Eclipse CDT settings files.
-
-This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
-files that can be imported into an Eclipse CDT project. The XML file contains a
-list of include paths and symbols (i.e. defines).
-
-Because a full .cproject definition is not created by this generator, it's not
-possible to properly define the include dirs and symbols for each file
-individually.  Instead, one set of includes/symbols is generated for the entire
-project.  This works fairly well (and is a vast improvement in general), but may
-still result in a few indexer issues here and there.
-
-This generator has no automated tests, so expect it to be broken.
-"""
-
-from xml.sax.saxutils import escape
-import os.path
-import subprocess
-import gyp
-import gyp.common
-import gyp.msvs_emulation
-import shlex
-import xml.etree.cElementTree as ET
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_default_variables = {
-}
-
-for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
-  # Some gyp steps fail if these are empty(!), so we convert them to variables
-  generator_default_variables[dirname] = '$' + dirname
-
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
-               'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
-               'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
-               'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
-               'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
-               'CONFIGURATION_NAME']:
-  generator_default_variables[unused] = ''
-
-# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
-# part of the path when dealing with generated headers.  This value will be
-# replaced dynamically for each configuration.
-generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
-    '$SHARED_INTERMEDIATE_DIR'
-
-
-def CalculateVariables(default_variables, params):
-  generator_flags = params.get('generator_flags', {})
-  for key, val in generator_flags.items():
-    default_variables.setdefault(key, val)
-  flavor = gyp.common.GetFlavor(params)
-  default_variables.setdefault('OS', flavor)
-  if flavor == 'win':
-    # Copy additional generator configuration data from VS, which is shared
-    # by the Eclipse generator.
-    import gyp.generator.msvs as msvs_generator
-    generator_additional_non_configuration_keys = getattr(msvs_generator,
-        'generator_additional_non_configuration_keys', [])
-    generator_additional_path_sections = getattr(msvs_generator,
-        'generator_additional_path_sections', [])
-
-    gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-
-
-def CalculateGeneratorInputInfo(params):
-  """Calculate the generator specific info that gets fed to input (called by
-  gyp)."""
-  generator_flags = params.get('generator_flags', {})
-  if generator_flags.get('adjust_static_libraries', False):
-    global generator_wants_static_library_dependencies_adjusted
-    generator_wants_static_library_dependencies_adjusted = True
-
-
-def GetAllIncludeDirectories(target_list, target_dicts,
-                             shared_intermediate_dirs, config_name, params,
-                             compiler_path):
-  """Calculate the set of include directories to be used.
-
-  Returns:
-    A list including all the include_dir's specified for every target followed
-    by any include directories that were added as cflag compiler options.
-  """
-
-  gyp_includes_set = set()
-  compiler_includes_list = []
-
-  # Find compiler's default include dirs.
-  if compiler_path:
-    command = shlex.split(compiler_path)
-    command.extend(['-E', '-xc++', '-v', '-'])
-    proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
-                            stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    output = proc.communicate()[1]
-    # Extract the list of include dirs from the output, which has this format:
-    #   ...
-    #   #include "..." search starts here:
-    #   #include <...> search starts here:
-    #    /usr/include/c++/4.6
-    #    /usr/local/include
-    #   End of search list.
-    #   ...
-    in_include_list = False
-    for line in output.splitlines():
-      if line.startswith('#include'):
-        in_include_list = True
-        continue
-      if line.startswith('End of search list.'):
-        break
-      if in_include_list:
-        include_dir = line.strip()
-        if include_dir not in compiler_includes_list:
-          compiler_includes_list.append(include_dir)
-
-  flavor = gyp.common.GetFlavor(params)
-  if flavor == 'win':
-    generator_flags = params.get('generator_flags', {})
-  for target_name in target_list:
-    target = target_dicts[target_name]
-    if config_name in target['configurations']:
-      config = target['configurations'][config_name]
-
-      # Look for any include dirs that were explicitly added via cflags. This
-      # may be done in gyp files to force certain includes to come at the end.
-      # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
-      # remove this.
-      if flavor == 'win':
-        msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
-        cflags = msvs_settings.GetCflags(config_name)
-      else:
-        cflags = config['cflags']
-      for cflag in cflags:
-        if cflag.startswith('-I'):
-          include_dir = cflag[2:]
-          if include_dir not in compiler_includes_list:
-            compiler_includes_list.append(include_dir)
-
-      # Find standard gyp include dirs.
-      if config.has_key('include_dirs'):
-        include_dirs = config['include_dirs']
-        for shared_intermediate_dir in shared_intermediate_dirs:
-          for include_dir in include_dirs:
-            include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
-                                              shared_intermediate_dir)
-            if not os.path.isabs(include_dir):
-              base_dir = os.path.dirname(target_name)
-
-              include_dir = base_dir + '/' + include_dir
-              include_dir = os.path.abspath(include_dir)
-
-            gyp_includes_set.add(include_dir)
-
-  # Generate a list that has all the include dirs.
-  all_includes_list = list(gyp_includes_set)
-  all_includes_list.sort()
-  for compiler_include in compiler_includes_list:
-    if not compiler_include in gyp_includes_set:
-      all_includes_list.append(compiler_include)
-
-  # All done.
-  return all_includes_list
-
-
-def GetCompilerPath(target_list, data, options):
-  """Determine a command that can be used to invoke the compiler.
-
-  Returns:
-    If this is a gyp project that has explicit make settings, try to determine
-    the compiler from that.  Otherwise, see if a compiler was specified via the
-    CC_target environment variable.
-  """
-  # First, see if the compiler is configured in make's settings.
-  build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
-  make_global_settings_dict = data[build_file].get('make_global_settings', {})
-  for key, value in make_global_settings_dict:
-    if key in ['CC', 'CXX']:
-      return os.path.join(options.toplevel_dir, value)
-
-  # Check to see if the compiler was specified as an environment variable.
-  for key in ['CC_target', 'CC', 'CXX']:
-    compiler = os.environ.get(key)
-    if compiler:
-      return compiler
-
-  return 'gcc'
-
-
-def GetAllDefines(target_list, target_dicts, data, config_name, params,
-                  compiler_path):
-  """Calculate the defines for a project.
-
-  Returns:
-    A dict that includes explict defines declared in gyp files along with all of
-    the default defines that the compiler uses.
-  """
-
-  # Get defines declared in the gyp files.
-  all_defines = {}
-  flavor = gyp.common.GetFlavor(params)
-  if flavor == 'win':
-    generator_flags = params.get('generator_flags', {})
-  for target_name in target_list:
-    target = target_dicts[target_name]
-
-    if flavor == 'win':
-      msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
-      extra_defines = msvs_settings.GetComputedDefines(config_name)
-    else:
-      extra_defines = []
-    if config_name in target['configurations']:
-      config = target['configurations'][config_name]
-      target_defines = config['defines']
-    else:
-      target_defines = []
-    for define in target_defines + extra_defines:
-      split_define = define.split('=', 1)
-      if len(split_define) == 1:
-        split_define.append('1')
-      if split_define[0].strip() in all_defines:
-        # Already defined
-        continue
-      all_defines[split_define[0].strip()] = split_define[1].strip()
-  # Get default compiler defines (if possible).
-  if flavor == 'win':
-    return all_defines  # Default defines already processed in the loop above.
-  if compiler_path:
-    command = shlex.split(compiler_path)
-    command.extend(['-E', '-dM', '-'])
-    cpp_proc = subprocess.Popen(args=command, cwd='.',
-                                stdin=subprocess.PIPE, stdout=subprocess.PIPE)
-    cpp_output = cpp_proc.communicate()[0]
-    cpp_lines = cpp_output.split('\n')
-    for cpp_line in cpp_lines:
-      if not cpp_line.strip():
-        continue
-      cpp_line_parts = cpp_line.split(' ', 2)
-      key = cpp_line_parts[1]
-      if len(cpp_line_parts) >= 3:
-        val = cpp_line_parts[2]
-      else:
-        val = '1'
-      all_defines[key] = val
-
-  return all_defines
-
-
-def WriteIncludePaths(out, eclipse_langs, include_dirs):
-  """Write the includes section of a CDT settings export file."""
-
-  out.write('  <section name="org.eclipse.cdt.internal.ui.wizards.' \
-            'settingswizards.IncludePaths">\n')
-  out.write('    <language name="holder for library settings"></language>\n')
-  for lang in eclipse_langs:
-    out.write('    <language name="%s">\n' % lang)
-    for include_dir in include_dirs:
-      out.write('      <includepath workspace_path="false">%s</includepath>\n' %
-                include_dir)
-    out.write('    </language>\n')
-  out.write('  </section>\n')
-
-
-def WriteMacros(out, eclipse_langs, defines):
-  """Write the macros section of a CDT settings export file."""
-
-  out.write('  <section name="org.eclipse.cdt.internal.ui.wizards.' \
-            'settingswizards.Macros">\n')
-  out.write('    <language name="holder for library settings"></language>\n')
-  for lang in eclipse_langs:
-    out.write('    <language name="%s">\n' % lang)
-    for key in sorted(defines.iterkeys()):
-      out.write('      <macro><name>%s</name><value>%s</value></macro>\n' %
-                (escape(key), escape(defines[key])))
-    out.write('    </language>\n')
-  out.write('  </section>\n')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data, params,
-                            config_name):
-  options = params['options']
-  generator_flags = params.get('generator_flags', {})
-
-  # build_dir: relative path from source root to our output files.
-  # e.g. "out/Debug"
-  build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
-                           config_name)
-
-  toplevel_build = os.path.join(options.toplevel_dir, build_dir)
-  # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
-  # SHARED_INTERMEDIATE_DIR. Include both possible locations.
-  shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
-                              os.path.join(toplevel_build, 'gen')]
-
-  GenerateCdtSettingsFile(target_list,
-                          target_dicts,
-                          data,
-                          params,
-                          config_name,
-                          os.path.join(toplevel_build,
-                                       'eclipse-cdt-settings.xml'),
-                          options,
-                          shared_intermediate_dirs)
-  GenerateClasspathFile(target_list,
-                        target_dicts,
-                        options.toplevel_dir,
-                        toplevel_build,
-                        os.path.join(toplevel_build,
-                                     'eclipse-classpath.xml'))
-
-
-def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
-                            config_name, out_name, options,
-                            shared_intermediate_dirs):
-  gyp.common.EnsureDirExists(out_name)
-  with open(out_name, 'w') as out:
-    out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
-    out.write('<cdtprojectproperties>\n')
-
-    eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
-                     'GNU C++', 'GNU C', 'Assembly']
-    compiler_path = GetCompilerPath(target_list, data, options)
-    include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
-                                            shared_intermediate_dirs,
-                                            config_name, params, compiler_path)
-    WriteIncludePaths(out, eclipse_langs, include_dirs)
-    defines = GetAllDefines(target_list, target_dicts, data, config_name,
-                            params, compiler_path)
-    WriteMacros(out, eclipse_langs, defines)
-
-    out.write('</cdtprojectproperties>\n')
-
-
-def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
-                          toplevel_build, out_name):
-  '''Generates a classpath file suitable for symbol navigation and code
-  completion of Java code (such as in Android projects) by finding all
-  .java and .jar files used as action inputs.'''
-  gyp.common.EnsureDirExists(out_name)
-  result = ET.Element('classpath')
-
-  def AddElements(kind, paths):
-    # First, we need to normalize the paths so they are all relative to the
-    # toplevel dir.
-    rel_paths = set()
-    for path in paths:
-      if os.path.isabs(path):
-        rel_paths.add(os.path.relpath(path, toplevel_dir))
-      else:
-        rel_paths.add(path)
-
-    for path in sorted(rel_paths):
-      entry_element = ET.SubElement(result, 'classpathentry')
-      entry_element.set('kind', kind)
-      entry_element.set('path', path)
-
-  AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
-  AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
-  # Include the standard JRE container and a dummy out folder
-  AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
-  # Include a dummy out folder so that Eclipse doesn't use the default /bin
-  # folder in the root of the project.
-  AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
-
-  ET.ElementTree(result).write(out_name)
-
-
-def GetJavaJars(target_list, target_dicts, toplevel_dir):
-  '''Generates a sequence of all .jars used as inputs.'''
-  for target_name in target_list:
-    target = target_dicts[target_name]
-    for action in target.get('actions', []):
-      for input_ in action['inputs']:
-        if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
-          if os.path.isabs(input_):
-            yield input_
-          else:
-            yield os.path.join(os.path.dirname(target_name), input_)
-
-
-def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
-  '''Generates a sequence of all likely java package root directories.'''
-  for target_name in target_list:
-    target = target_dicts[target_name]
-    for action in target.get('actions', []):
-      for input_ in action['inputs']:
-        if (os.path.splitext(input_)[1] == '.java' and
-            not input_.startswith('$')):
-          dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
-                                              input_))
-          # If there is a parent 'src' or 'java' folder, navigate up to it -
-          # these are canonical package root names in Chromium.  This will
-          # break if 'src' or 'java' exists in the package structure. This
-          # could be further improved by inspecting the java file for the
-          # package name if this proves to be too fragile in practice.
-          parent_search = dir_
-          while os.path.basename(parent_search) not in ['src', 'java']:
-            parent_search, _ = os.path.split(parent_search)
-            if not parent_search or parent_search == toplevel_dir:
-              # Didn't find a known root, just return the original path
-              yield dir_
-              break
-          else:
-            yield parent_search
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  """Generate an XML settings file that can be imported into a CDT project."""
-
-  if params['options'].generator_output:
-    raise NotImplementedError("--generator_output not implemented for eclipse")
-
-  user_config = params.get('generator_flags', {}).get('config', None)
-  if user_config:
-    GenerateOutputForConfig(target_list, target_dicts, data, params,
-                            user_config)
-  else:
-    config_names = target_dicts[target_list[0]]['configurations'].keys()
-    for config_name in config_names:
-      GenerateOutputForConfig(target_list, target_dicts, data, params,
-                              config_name)
-
diff --git a/tools/gyp/pylib/gyp/generator/gypd.py b/tools/gyp/pylib/gyp/generator/gypd.py
deleted file mode 100644
index 3efdb99..0000000
--- a/tools/gyp/pylib/gyp/generator/gypd.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""gypd output module
-
-This module produces gyp input as its output.  Output files are given the
-.gypd extension to avoid overwriting the .gyp files that they are generated
-from.  Internal references to .gyp files (such as those found in
-"dependencies" sections) are not adjusted to point to .gypd files instead;
-unlike other paths, which are relative to the .gyp or .gypd file, such paths
-are relative to the directory from which gyp was run to create the .gypd file.
-
-This generator module is intended to be a sample and a debugging aid, hence
-the "d" for "debug" in .gypd.  It is useful to inspect the results of the
-various merges, expansions, and conditional evaluations performed by gyp
-and to see a representation of what would be fed to a generator module.
-
-It's not advisable to rename .gypd files produced by this module to .gyp,
-because they will have all merges, expansions, and evaluations already
-performed and the relevant constructs not present in the output; paths to
-dependencies may be wrong; and various sections that do not belong in .gyp
-files such as such as "included_files" and "*_excluded" will be present.
-Output will also be stripped of comments.  This is not intended to be a
-general-purpose gyp pretty-printer; for that, you probably just want to
-run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
-comments but won't do all of the other things done to this module's output.
-
-The specific formatting of the output generated by this module is subject
-to change.
-"""
-
-
-import gyp.common
-import errno
-import os
-import pprint
-
-
-# These variables should just be spit back out as variable references.
-_generator_identity_variables = [
-  'CONFIGURATION_NAME',
-  'EXECUTABLE_PREFIX',
-  'EXECUTABLE_SUFFIX',
-  'INTERMEDIATE_DIR',
-  'LIB_DIR',
-  'PRODUCT_DIR',
-  'RULE_INPUT_ROOT',
-  'RULE_INPUT_DIRNAME',
-  'RULE_INPUT_EXT',
-  'RULE_INPUT_NAME',
-  'RULE_INPUT_PATH',
-  'SHARED_INTERMEDIATE_DIR',
-  'SHARED_LIB_DIR',
-  'SHARED_LIB_PREFIX',
-  'SHARED_LIB_SUFFIX',
-  'STATIC_LIB_PREFIX',
-  'STATIC_LIB_SUFFIX',
-]
-
-# gypd doesn't define a default value for OS like many other generator
-# modules.  Specify "-D OS=whatever" on the command line to provide a value.
-generator_default_variables = {
-}
-
-# gypd supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-# TODO(mark): This always uses <, which isn't right.  The input module should
-# notify the generator to tell it which phase it is operating in, and this
-# module should use < for the early phase and then switch to > for the late
-# phase.  Bonus points for carrying @ back into the output too.
-for v in _generator_identity_variables:
-  generator_default_variables[v] = '<(%s)' % v
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  output_files = {}
-  for qualified_target in target_list:
-    [input_file, target] = \
-        gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
-
-    if input_file[-4:] != '.gyp':
-      continue
-    input_file_stem = input_file[:-4]
-    output_file = input_file_stem + params['options'].suffix + '.gypd'
-
-    if not output_file in output_files:
-      output_files[output_file] = input_file
-
-  for output_file, input_file in output_files.iteritems():
-    output = open(output_file, 'w')
-    pprint.pprint(data[input_file], output)
-    output.close()
diff --git a/tools/gyp/pylib/gyp/generator/gypsh.py b/tools/gyp/pylib/gyp/generator/gypsh.py
deleted file mode 100644
index bd405f4..0000000
--- a/tools/gyp/pylib/gyp/generator/gypsh.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""gypsh output module
-
-gypsh is a GYP shell.  It's not really a generator per se.  All it does is
-fire up an interactive Python session with a few local variables set to the
-variables passed to the generator.  Like gypd, it's intended as a debugging
-aid, to facilitate the exploration of .gyp structures after being processed
-by the input module.
-
-The expected usage is "gyp -f gypsh -D OS=desired_os".
-"""
-
-
-import code
-import sys
-
-
-# All of this stuff about generator variables was lovingly ripped from gypd.py.
-# That module has a much better description of what's going on and why.
-_generator_identity_variables = [
-  'EXECUTABLE_PREFIX',
-  'EXECUTABLE_SUFFIX',
-  'INTERMEDIATE_DIR',
-  'PRODUCT_DIR',
-  'RULE_INPUT_ROOT',
-  'RULE_INPUT_DIRNAME',
-  'RULE_INPUT_EXT',
-  'RULE_INPUT_NAME',
-  'RULE_INPUT_PATH',
-  'SHARED_INTERMEDIATE_DIR',
-]
-
-generator_default_variables = {
-}
-
-for v in _generator_identity_variables:
-  generator_default_variables[v] = '<(%s)' % v
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  locals = {
-        'target_list':  target_list,
-        'target_dicts': target_dicts,
-        'data':         data,
-      }
-
-  # Use a banner that looks like the stock Python one and like what
-  # code.interact uses by default, but tack on something to indicate what
-  # locals are available, and identify gypsh.
-  banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
-         (sys.version, sys.platform, repr(sorted(locals.keys())))
-
-  code.interact(banner, local=locals)
diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py
deleted file mode 100644
index b7da768..0000000
--- a/tools/gyp/pylib/gyp/generator/make.py
+++ /dev/null
@@ -1,2218 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Notes:
-#
-# This is all roughly based on the Makefile system used by the Linux
-# kernel, but is a non-recursive make -- we put the entire dependency
-# graph in front of make and let it figure it out.
-#
-# The code below generates a separate .mk file for each target, but
-# all are sourced by the top-level Makefile.  This means that all
-# variables in .mk-files clobber one another.  Be careful to use :=
-# where appropriate for immediate evaluation, and similarly to watch
-# that you're not relying on a variable value to last beween different
-# .mk files.
-#
-# TODOs:
-#
-# Global settings and utility functions are currently stuffed in the
-# toplevel Makefile.  It may make sense to generate some .mk files on
-# the side to keep the the files readable.
-
-import os
-import re
-import sys
-import subprocess
-import gyp
-import gyp.common
-import gyp.xcode_emulation
-from gyp.common import GetEnvironFallback
-from gyp.common import GypError
-
-generator_default_variables = {
-  'EXECUTABLE_PREFIX': '',
-  'EXECUTABLE_SUFFIX': '',
-  'STATIC_LIB_PREFIX': 'lib',
-  'SHARED_LIB_PREFIX': 'lib',
-  'STATIC_LIB_SUFFIX': '.a',
-  'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
-  'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
-  'PRODUCT_DIR': '$(builddir)',
-  'RULE_INPUT_ROOT': '%(INPUT_ROOT)s',  # This gets expanded by Python.
-  'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s',  # This gets expanded by Python.
-  'RULE_INPUT_PATH': '$(abspath $<)',
-  'RULE_INPUT_EXT': '$(suffix $<)',
-  'RULE_INPUT_NAME': '$(notdir $<)',
-  'CONFIGURATION_NAME': '$(BUILDTYPE)',
-}
-
-# Make supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-# Request sorted dependencies in the order from dependents to dependencies.
-generator_wants_sorted_dependencies = False
-
-# Placates pylint.
-generator_additional_non_configuration_keys = []
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-generator_filelist_paths = None
-
-
-def CalculateVariables(default_variables, params):
-  """Calculate additional variables for use in the build (called by gyp)."""
-  flavor = gyp.common.GetFlavor(params)
-  if flavor == 'mac':
-    default_variables.setdefault('OS', 'mac')
-    default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
-    default_variables.setdefault('SHARED_LIB_DIR',
-                                 generator_default_variables['PRODUCT_DIR'])
-    default_variables.setdefault('LIB_DIR',
-                                 generator_default_variables['PRODUCT_DIR'])
-
-    # Copy additional generator configuration data from Xcode, which is shared
-    # by the Mac Make generator.
-    import gyp.generator.xcode as xcode_generator
-    global generator_additional_non_configuration_keys
-    generator_additional_non_configuration_keys = getattr(xcode_generator,
-        'generator_additional_non_configuration_keys', [])
-    global generator_additional_path_sections
-    generator_additional_path_sections = getattr(xcode_generator,
-        'generator_additional_path_sections', [])
-    global generator_extra_sources_for_rules
-    generator_extra_sources_for_rules = getattr(xcode_generator,
-        'generator_extra_sources_for_rules', [])
-    COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
-  else:
-    operating_system = flavor
-    if flavor == 'android':
-      operating_system = 'linux'  # Keep this legacy behavior for now.
-    default_variables.setdefault('OS', operating_system)
-    default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
-    default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
-    default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
-
-
-def CalculateGeneratorInputInfo(params):
-  """Calculate the generator specific info that gets fed to input (called by
-  gyp)."""
-  generator_flags = params.get('generator_flags', {})
-  android_ndk_version = generator_flags.get('android_ndk_version', None)
-  # Android NDK requires a strict link order.
-  if android_ndk_version:
-    global generator_wants_sorted_dependencies
-    generator_wants_sorted_dependencies = True
-
-  output_dir = params['options'].generator_output or \
-               params['options'].toplevel_dir
-  builddir_name = generator_flags.get('output_dir', 'out')
-  qualified_out_dir = os.path.normpath(os.path.join(
-    output_dir, builddir_name, 'gypfiles'))
-
-  global generator_filelist_paths
-  generator_filelist_paths = {
-    'toplevel': params['options'].toplevel_dir,
-    'qualified_out_dir': qualified_out_dir,
-  }
-
-
-# The .d checking code below uses these functions:
-# wildcard, sort, foreach, shell, wordlist
-# wildcard can handle spaces, the rest can't.
-# Since I could find no way to make foreach work with spaces in filenames
-# correctly, the .d files have spaces replaced with another character. The .d
-# file for
-#     Chromium\ Framework.framework/foo
-# is for example
-#     out/Release/.deps/out/Release/Chromium?Framework.framework/foo
-# This is the replacement character.
-SPACE_REPLACEMENT = '?'
-
-
-LINK_COMMANDS_LINUX = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
-
-# Due to circular dependencies between libraries :(, we wrap the
-# special "figure out circular dependencies" flags around the entire
-# input list during linking.
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
-
-# We support two kinds of shared objects (.so):
-# 1) shared_library, which is just bundling together many dependent libraries
-# into a link line.
-# 2) loadable_module, which is generating a module intended for dlopen().
-#
-# They differ only slightly:
-# In the former case, we want to package all dependent code into the .so.
-# In the latter case, we want to package just the API exposed by the
-# outermost module.
-# This means shared_library uses --whole-archive, while loadable_module doesn't.
-# (Note that --whole-archive is incompatible with the --start-group used in
-# normal linking.)
-
-# Other shared-object link notes:
-# - Set SONAME to the library filename so our binaries don't reference
-# the local, absolute paths used on the link command-line.
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
-"""
-
-LINK_COMMANDS_MAC = """\
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-LINK_COMMANDS_ANDROID = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
-
-# Due to circular dependencies between libraries :(, we wrap the
-# special "figure out circular dependencies" flags around the entire
-# input list during linking.
-quiet_cmd_link = LINK($(TOOLSET)) $@
-quiet_cmd_link_host = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
-cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-# Other shared-object link notes:
-# - Set SONAME to the library filename so our binaries don't reference
-# the local, absolute paths used on the link command-line.
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
-quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-
-LINK_COMMANDS_AIX = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-
-# Header of toplevel Makefile.
-# This should go into the build tree, but it's easier to keep it here for now.
-SHARED_HEADER = ("""\
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := %(srcdir)s
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= %(builddir)s
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
-  quiet=
-else
-  quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= %(default_configuration)s
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-%(make_global_settings)s
-
-CC.target ?= %(CC.target)s
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= %(CXX.target)s
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= %(LINK.target)s
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= %(CC.host)s
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= %(CXX.host)s
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= %(LINK.host)s
-LDFLAGS.host ?=
-AR.host ?= %(AR.host)s
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
-unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info.  Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-#   foobar.o: DEP1 DEP2
-# into
-#   path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-#   foobar.o: DEP1 DEP2 \\
-#               DEP3
-# to
-#   DEP1:
-#   DEP2:
-#   DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters."""
-r"""
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
-  grep -v '^$$'                             |\
-  sed -e 1d -e 's|$$|:|'                     \
-    >> $(depfile)
-rm $(depfile).raw
-endef
-"""
-"""
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-%(extra_commands)s
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@")
-
-%(link_commands)s
-"""
-
-r"""
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
-"""
-"""
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command.  Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-#   arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
-#                       $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain """ + SPACE_REPLACEMENT + \
-                   """ instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
-                       $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-#   $? -- new prerequisites
-#   $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
-  @E=0;\\
-  for p in $(POSTBUILDS); do\\
-    eval $$p;\\
-    E=$$?;\\
-    if [ $$E -ne 0 ]; then\\
-      break;\\
-    fi;\\
-  done;\\
-  if [ $$E -ne 0 ]; then\\
-    rm -rf "$@";\\
-    exit $$E;\\
-  fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
-                                                     SPACE_REPLACEMENT + """ for
-# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
-                                     """ characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
-  @$(call exact_echo,  $($(quiet)cmd_$(1)))
-  @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
-  $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
-    @$(cmd_$(1))
-    @echo "  $(quiet_cmd_$(1)): Finished",
-    @$(cmd_$(1))
-  )
-  @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
-  @$(if $(2),$(fixup_dep))
-  $(if $(and $(3), $(POSTBUILDS)),
-    $(call do_postbuilds)
-  )
-)
-endef
-
-# Declare the "%(default_target)s" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: %(default_target)s
-%(default_target)s:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run.  Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-""")
-
-SHARED_HEADER_MAC_COMMANDS = """
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-"""
-
-
-def WriteRootHeaderSuffixRules(writer):
-  extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
-
-  writer.write('# Suffix rules, putting all outputs into $(obj).\n')
-  for ext in extensions:
-    writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
-    writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
-
-  writer.write('\n# Try building from generated source, too.\n')
-  for ext in extensions:
-    writer.write(
-        '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
-    writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
-  writer.write('\n')
-  for ext in extensions:
-    writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
-    writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
-  writer.write('\n')
-
-
-SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
-# Suffix rules, putting all outputs into $(obj).
-""")
-
-
-SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
-# Try building from generated source, too.
-""")
-
-
-SHARED_FOOTER = """\
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules.  $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
-  include $(d_files)
-endif
-"""
-
-header = """\
-# This file is generated by gyp; do not edit.
-
-"""
-
-# Maps every compilable file extension to the do_cmd that compiles it.
-COMPILABLE_EXTENSIONS = {
-  '.c': 'cc',
-  '.cc': 'cxx',
-  '.cpp': 'cxx',
-  '.cxx': 'cxx',
-  '.s': 'cc',
-  '.S': 'cc',
-}
-
-def Compilable(filename):
-  """Return true if the file is compilable (should be in OBJS)."""
-  for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
-    if res:
-      return True
-  return False
-
-
-def Linkable(filename):
-  """Return true if the file is linkable (should be on the link line)."""
-  return filename.endswith('.o')
-
-
-def Target(filename):
-  """Translate a compilable filename to its .o target."""
-  return os.path.splitext(filename)[0] + '.o'
-
-
-def EscapeShellArgument(s):
-  """Quotes an argument so that it will be interpreted literally by a POSIX
-     shell. Taken from
-     http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
-     """
-  return "'" + s.replace("'", "'\\''") + "'"
-
-
-def EscapeMakeVariableExpansion(s):
-  """Make has its own variable expansion syntax using $. We must escape it for
-     string to be interpreted literally."""
-  return s.replace('$', '$$')
-
-
-def EscapeCppDefine(s):
-  """Escapes a CPP define so that it will reach the compiler unaltered."""
-  s = EscapeShellArgument(s)
-  s = EscapeMakeVariableExpansion(s)
-  # '#' characters must be escaped even embedded in a string, else Make will
-  # treat it as the start of a comment.
-  return s.replace('#', r'\#')
-
-
-def QuoteIfNecessary(string):
-  """TODO: Should this ideally be replaced with one or more of the above
-     functions?"""
-  if '"' in string:
-    string = '"' + string.replace('"', '\\"') + '"'
-  return string
-
-
-def StringToMakefileVariable(string):
-  """Convert a string to a value that is acceptable as a make variable name."""
-  return re.sub('[^a-zA-Z0-9_]', '_', string)
-
-
-srcdir_prefix = ''
-def Sourceify(path):
-  """Convert a path to its source directory form."""
-  if '$(' in path:
-    return path
-  if os.path.isabs(path):
-    return path
-  return srcdir_prefix + path
-
-
-def QuoteSpaces(s, quote=r'\ '):
-  return s.replace(' ', quote)
-
-
-# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
-def _ValidateSourcesForOSX(spec, all_sources):
-  """Makes sure if duplicate basenames are not specified in the source list.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  """
-  if spec.get('type', None) != 'static_library':
-    return
-
-  basenames = {}
-  for source in all_sources:
-    name, ext = os.path.splitext(source)
-    is_compiled_file = ext in [
-        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
-    if not is_compiled_file:
-      continue
-    basename = os.path.basename(name)  # Don't include extension.
-    basenames.setdefault(basename, []).append(source)
-
-  error = ''
-  for basename, files in basenames.iteritems():
-    if len(files) > 1:
-      error += '  %s: %s\n' % (basename, ' '.join(files))
-
-  if error:
-    print('static library %s has several files with the same basename:\n' %
-          spec['target_name'] + error + 'libtool on OS X will generate' +
-          ' warnings for them.')
-    raise GypError('Duplicate basenames in sources section, see list above')
-
-
-# Map from qualified target to path to output.
-target_outputs = {}
-# Map from qualified target to any linkable output.  A subset
-# of target_outputs.  E.g. when mybinary depends on liba, we want to
-# include liba in the linker line; when otherbinary depends on
-# mybinary, we just want to build mybinary first.
-target_link_deps = {}
-
-
-class MakefileWriter(object):
-  """MakefileWriter packages up the writing of one target-specific foobar.mk.
-
-  Its only real entry point is Write(), and is mostly used for namespacing.
-  """
-
-  def __init__(self, generator_flags, flavor):
-    self.generator_flags = generator_flags
-    self.flavor = flavor
-
-    self.suffix_rules_srcdir = {}
-    self.suffix_rules_objdir1 = {}
-    self.suffix_rules_objdir2 = {}
-
-    # Generate suffix rules for all compilable extensions.
-    for ext in COMPILABLE_EXTENSIONS.keys():
-      # Suffix rules for source folder.
-      self.suffix_rules_srcdir.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
-	@$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
-
-      # Suffix rules for generated source files.
-      self.suffix_rules_objdir1.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
-	@$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
-      self.suffix_rules_objdir2.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
-	@$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
-
-
-  def Write(self, qualified_target, base_path, output_filename, spec, configs,
-            part_of_all):
-    """The main entry point: writes a .mk file for a single target.
-
-    Arguments:
-      qualified_target: target we're generating
-      base_path: path relative to source root we're building in, used to resolve
-                 target-relative paths
-      output_filename: output .mk file name to write
-      spec, configs: gyp info
-      part_of_all: flag indicating this target is part of 'all'
-    """
-    gyp.common.EnsureDirExists(output_filename)
-
-    self.fp = open(output_filename, 'w')
-
-    self.fp.write(header)
-
-    self.qualified_target = qualified_target
-    self.path = base_path
-    self.target = spec['target_name']
-    self.type = spec['type']
-    self.toolset = spec['toolset']
-
-    self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
-    if self.flavor == 'mac':
-      self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
-    else:
-      self.xcode_settings = None
-
-    deps, link_deps = self.ComputeDeps(spec)
-
-    # Some of the generation below can add extra output, sources, or
-    # link dependencies.  All of the out params of the functions that
-    # follow use names like extra_foo.
-    extra_outputs = []
-    extra_sources = []
-    extra_link_deps = []
-    extra_mac_bundle_resources = []
-    mac_bundle_deps = []
-
-    if self.is_mac_bundle:
-      self.output = self.ComputeMacBundleOutput(spec)
-      self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
-    else:
-      self.output = self.output_binary = self.ComputeOutput(spec)
-
-    self.is_standalone_static_library = bool(
-        spec.get('standalone_static_library', 0))
-    self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
-                                 'shared_library')
-    if (self.is_standalone_static_library or
-        self.type in self._INSTALLABLE_TARGETS):
-      self.alias = os.path.basename(self.output)
-      install_path = self._InstallableTargetInstallPath()
-    else:
-      self.alias = self.output
-      install_path = self.output
-
-    self.WriteLn("TOOLSET := " + self.toolset)
-    self.WriteLn("TARGET := " + self.target)
-
-    # Actions must come first, since they can generate more OBJs for use below.
-    if 'actions' in spec:
-      self.WriteActions(spec['actions'], extra_sources, extra_outputs,
-                        extra_mac_bundle_resources, part_of_all)
-
-    # Rules must be early like actions.
-    if 'rules' in spec:
-      self.WriteRules(spec['rules'], extra_sources, extra_outputs,
-                      extra_mac_bundle_resources, part_of_all)
-
-    if 'copies' in spec:
-      self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
-
-    # Bundle resources.
-    if self.is_mac_bundle:
-      all_mac_bundle_resources = (
-          spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
-      self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
-      self.WriteMacInfoPlist(mac_bundle_deps)
-
-    # Sources.
-    all_sources = spec.get('sources', []) + extra_sources
-    if all_sources:
-      if self.flavor == 'mac':
-        # libtool on OS X generates warnings for duplicate basenames in the same
-        # target.
-        _ValidateSourcesForOSX(spec, all_sources)
-      self.WriteSources(
-          configs, deps, all_sources, extra_outputs,
-          extra_link_deps, part_of_all,
-          gyp.xcode_emulation.MacPrefixHeader(
-              self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
-              self.Pchify))
-      sources = filter(Compilable, all_sources)
-      if sources:
-        self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
-        extensions = set([os.path.splitext(s)[1] for s in sources])
-        for ext in extensions:
-          if ext in self.suffix_rules_srcdir:
-            self.WriteLn(self.suffix_rules_srcdir[ext])
-        self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
-        for ext in extensions:
-          if ext in self.suffix_rules_objdir1:
-            self.WriteLn(self.suffix_rules_objdir1[ext])
-        for ext in extensions:
-          if ext in self.suffix_rules_objdir2:
-            self.WriteLn(self.suffix_rules_objdir2[ext])
-        self.WriteLn('# End of this set of suffix rules')
-
-        # Add dependency from bundle to bundle binary.
-        if self.is_mac_bundle:
-          mac_bundle_deps.append(self.output_binary)
-
-    self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
-                     mac_bundle_deps, extra_outputs, part_of_all)
-
-    # Update global list of target outputs, used in dependency tracking.
-    target_outputs[qualified_target] = install_path
-
-    # Update global list of link dependencies.
-    if self.type in ('static_library', 'shared_library'):
-      target_link_deps[qualified_target] = self.output_binary
-
-    # Currently any versions have the same effect, but in future the behavior
-    # could be different.
-    if self.generator_flags.get('android_ndk_version', None):
-      self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
-
-    self.fp.close()
-
-
-  def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
-    """Write a "sub-project" Makefile.
-
-    This is a small, wrapper Makefile that calls the top-level Makefile to build
-    the targets from a single gyp file (i.e. a sub-project).
-
-    Arguments:
-      output_filename: sub-project Makefile name to write
-      makefile_path: path to the top-level Makefile
-      targets: list of "all" targets for this sub-project
-      build_dir: build output directory, relative to the sub-project
-    """
-    gyp.common.EnsureDirExists(output_filename)
-    self.fp = open(output_filename, 'w')
-    self.fp.write(header)
-    # For consistency with other builders, put sub-project build output in the
-    # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
-    self.WriteLn('export builddir_name ?= %s' %
-                 os.path.join(os.path.dirname(output_filename), build_dir))
-    self.WriteLn('.PHONY: all')
-    self.WriteLn('all:')
-    if makefile_path:
-      makefile_path = ' -C ' + makefile_path
-    self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
-    self.fp.close()
-
-
-  def WriteActions(self, actions, extra_sources, extra_outputs,
-                   extra_mac_bundle_resources, part_of_all):
-    """Write Makefile code for any 'actions' from the gyp input.
-
-    extra_sources: a list that will be filled in with newly generated source
-                   files, if any
-    extra_outputs: a list that will be filled in with any outputs of these
-                   actions (used to make other pieces dependent on these
-                   actions)
-    part_of_all: flag indicating this target is part of 'all'
-    """
-    env = self.GetSortedXcodeEnv()
-    for action in actions:
-      name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
-                                                 action['action_name']))
-      self.WriteLn('### Rules for action "%s":' % action['action_name'])
-      inputs = action['inputs']
-      outputs = action['outputs']
-
-      # Build up a list of outputs.
-      # Collect the output dirs we'll need.
-      dirs = set()
-      for out in outputs:
-        dir = os.path.split(out)[0]
-        if dir:
-          dirs.add(dir)
-      if int(action.get('process_outputs_as_sources', False)):
-        extra_sources += outputs
-      if int(action.get('process_outputs_as_mac_bundle_resources', False)):
-        extra_mac_bundle_resources += outputs
-
-      # Write the actual command.
-      action_commands = action['action']
-      if self.flavor == 'mac':
-        action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
-                          for command in action_commands]
-      command = gyp.common.EncodePOSIXShellList(action_commands)
-      if 'message' in action:
-        self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
-      else:
-        self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
-      if len(dirs) > 0:
-        command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
-
-      cd_action = 'cd %s; ' % Sourceify(self.path or '.')
-
-      # command and cd_action get written to a toplevel variable called
-      # cmd_foo. Toplevel variables can't handle things that change per
-      # makefile like $(TARGET), so hardcode the target.
-      command = command.replace('$(TARGET)', self.target)
-      cd_action = cd_action.replace('$(TARGET)', self.target)
-
-      # Set LD_LIBRARY_PATH in case the action runs an executable from this
-      # build which links to shared libs from this build.
-      # actions run on the host, so they should in theory only use host
-      # libraries, but until everything is made cross-compile safe, also use
-      # target libraries.
-      # TODO(piman): when everything is cross-compile safe, remove lib.target
-      self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
-                   '$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
-                   'export LD_LIBRARY_PATH; '
-                   '%s%s'
-                   % (name, cd_action, command))
-      self.WriteLn()
-      outputs = map(self.Absolutify, outputs)
-      # The makefile rules are all relative to the top dir, but the gyp actions
-      # are defined relative to their containing dir.  This replaces the obj
-      # variable for the action rule with an absolute version so that the output
-      # goes in the right place.
-      # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
-      # it's superfluous for the "extra outputs", and this avoids accidentally
-      # writing duplicate dummy rules for those outputs.
-      # Same for environment.
-      self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
-      self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
-      self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
-
-      for input in inputs:
-        assert ' ' not in input, (
-            "Spaces in action input filenames not supported (%s)"  % input)
-      for output in outputs:
-        assert ' ' not in output, (
-            "Spaces in action output filenames not supported (%s)"  % output)
-
-      # See the comment in WriteCopies about expanding env vars.
-      outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
-      inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
-
-      self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
-                      part_of_all=part_of_all, command=name)
-
-      # Stuff the outputs in a variable so we can refer to them later.
-      outputs_variable = 'action_%s_outputs' % name
-      self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
-      extra_outputs.append('$(%s)' % outputs_variable)
-      self.WriteLn()
-
-    self.WriteLn()
-
-
-  def WriteRules(self, rules, extra_sources, extra_outputs,
-                 extra_mac_bundle_resources, part_of_all):
-    """Write Makefile code for any 'rules' from the gyp input.
-
-    extra_sources: a list that will be filled in with newly generated source
-                   files, if any
-    extra_outputs: a list that will be filled in with any outputs of these
-                   rules (used to make other pieces dependent on these rules)
-    part_of_all: flag indicating this target is part of 'all'
-    """
-    env = self.GetSortedXcodeEnv()
-    for rule in rules:
-      name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
-                                                 rule['rule_name']))
-      count = 0
-      self.WriteLn('### Generated for rule %s:' % name)
-
-      all_outputs = []
-
-      for rule_source in rule.get('rule_sources', []):
-        dirs = set()
-        (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
-        (rule_source_root, rule_source_ext) = \
-            os.path.splitext(rule_source_basename)
-
-        outputs = [self.ExpandInputRoot(out, rule_source_root,
-                                        rule_source_dirname)
-                   for out in rule['outputs']]
-
-        for out in outputs:
-          dir = os.path.dirname(out)
-          if dir:
-            dirs.add(dir)
-        if int(rule.get('process_outputs_as_sources', False)):
-          extra_sources += outputs
-        if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
-          extra_mac_bundle_resources += outputs
-        inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
-                                    rule.get('inputs', [])))
-        actions = ['$(call do_cmd,%s_%d)' % (name, count)]
-
-        if name == 'resources_grit':
-          # HACK: This is ugly.  Grit intentionally doesn't touch the
-          # timestamp of its output file when the file doesn't change,
-          # which is fine in hash-based dependency systems like scons
-          # and forge, but not kosher in the make world.  After some
-          # discussion, hacking around it here seems like the least
-          # amount of pain.
-          actions += ['@touch --no-create $@']
-
-        # See the comment in WriteCopies about expanding env vars.
-        outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
-        inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
-
-        outputs = map(self.Absolutify, outputs)
-        all_outputs += outputs
-        # Only write the 'obj' and 'builddir' rules for the "primary" output
-        # (:1); it's superfluous for the "extra outputs", and this avoids
-        # accidentally writing duplicate dummy rules for those outputs.
-        self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
-        self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
-        self.WriteMakeRule(outputs, inputs, actions,
-                           command="%s_%d" % (name, count))
-        # Spaces in rule filenames are not supported, but rule variables have
-        # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
-        # The spaces within the variables are valid, so remove the variables
-        # before checking.
-        variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
-        for output in outputs:
-          output = re.sub(variables_with_spaces, '', output)
-          assert ' ' not in output, (
-              "Spaces in rule filenames not yet supported (%s)"  % output)
-        self.WriteLn('all_deps += %s' % ' '.join(outputs))
-
-        action = [self.ExpandInputRoot(ac, rule_source_root,
-                                       rule_source_dirname)
-                  for ac in rule['action']]
-        mkdirs = ''
-        if len(dirs) > 0:
-          mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
-        cd_action = 'cd %s; ' % Sourceify(self.path or '.')
-
-        # action, cd_action, and mkdirs get written to a toplevel variable
-        # called cmd_foo. Toplevel variables can't handle things that change
-        # per makefile like $(TARGET), so hardcode the target.
-        if self.flavor == 'mac':
-          action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
-                    for command in action]
-        action = gyp.common.EncodePOSIXShellList(action)
-        action = action.replace('$(TARGET)', self.target)
-        cd_action = cd_action.replace('$(TARGET)', self.target)
-        mkdirs = mkdirs.replace('$(TARGET)', self.target)
-
-        # Set LD_LIBRARY_PATH in case the rule runs an executable from this
-        # build which links to shared libs from this build.
-        # rules run on the host, so they should in theory only use host
-        # libraries, but until everything is made cross-compile safe, also use
-        # target libraries.
-        # TODO(piman): when everything is cross-compile safe, remove lib.target
-        self.WriteLn(
-            "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
-              "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
-              "export LD_LIBRARY_PATH; "
-              "%(cd_action)s%(mkdirs)s%(action)s" % {
-          'action': action,
-          'cd_action': cd_action,
-          'count': count,
-          'mkdirs': mkdirs,
-          'name': name,
-        })
-        self.WriteLn(
-            'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
-          'count': count,
-          'name': name,
-        })
-        self.WriteLn()
-        count += 1
-
-      outputs_variable = 'rule_%s_outputs' % name
-      self.WriteList(all_outputs, outputs_variable)
-      extra_outputs.append('$(%s)' % outputs_variable)
-
-      self.WriteLn('### Finished generating for rule: %s' % name)
-      self.WriteLn()
-    self.WriteLn('### Finished generating for all rules')
-    self.WriteLn('')
-
-
-  def WriteCopies(self, copies, extra_outputs, part_of_all):
-    """Write Makefile code for any 'copies' from the gyp input.
-
-    extra_outputs: a list that will be filled in with any outputs of this action
-                   (used to make other pieces dependent on this action)
-    part_of_all: flag indicating this target is part of 'all'
-    """
-    self.WriteLn('### Generated for copy rule.')
-
-    variable = StringToMakefileVariable(self.qualified_target + '_copies')
-    outputs = []
-    for copy in copies:
-      for path in copy['files']:
-        # Absolutify() may call normpath, and will strip trailing slashes.
-        path = Sourceify(self.Absolutify(path))
-        filename = os.path.split(path)[1]
-        output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
-                                                        filename)))
-
-        # If the output path has variables in it, which happens in practice for
-        # 'copies', writing the environment as target-local doesn't work,
-        # because the variables are already needed for the target name.
-        # Copying the environment variables into global make variables doesn't
-        # work either, because then the .d files will potentially contain spaces
-        # after variable expansion, and .d file handling cannot handle spaces.
-        # As a workaround, manually expand variables at gyp time. Since 'copies'
-        # can't run scripts, there's no need to write the env then.
-        # WriteDoCmd() will escape spaces for .d files.
-        env = self.GetSortedXcodeEnv()
-        output = gyp.xcode_emulation.ExpandEnvVars(output, env)
-        path = gyp.xcode_emulation.ExpandEnvVars(path, env)
-        self.WriteDoCmd([output], [path], 'copy', part_of_all)
-        outputs.append(output)
-    self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
-    extra_outputs.append('$(%s)' % variable)
-    self.WriteLn()
-
-
-  def WriteMacBundleResources(self, resources, bundle_deps):
-    """Writes Makefile code for 'mac_bundle_resources'."""
-    self.WriteLn('### Generated for mac_bundle_resources')
-
-    for output, res in gyp.xcode_emulation.GetMacBundleResources(
-        generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
-        map(Sourceify, map(self.Absolutify, resources))):
-      _, ext = os.path.splitext(output)
-      if ext != '.xcassets':
-        # Make does not supports '.xcassets' emulation.
-        self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
-                        part_of_all=True)
-        bundle_deps.append(output)
-
-
-  def WriteMacInfoPlist(self, bundle_deps):
-    """Write Makefile code for bundle Info.plist files."""
-    info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
-        generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
-        lambda p: Sourceify(self.Absolutify(p)))
-    if not info_plist:
-      return
-    if defines:
-      # Create an intermediate file to store preprocessed results.
-      intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
-          os.path.basename(info_plist))
-      self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
-          quoter=EscapeCppDefine)
-      self.WriteMakeRule([intermediate_plist], [info_plist],
-          ['$(call do_cmd,infoplist)',
-           # "Convert" the plist so that any weird whitespace changes from the
-           # preprocessor do not affect the XML parser in mac_tool.
-           '@plutil -convert xml1 $@ $@'])
-      info_plist = intermediate_plist
-    # plists can contain envvars and substitute them into the file.
-    self.WriteSortedXcodeEnv(
-        out, self.GetSortedXcodeEnv(additional_settings=extra_env))
-    self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
-                    part_of_all=True)
-    bundle_deps.append(out)
-
-
-  def WriteSources(self, configs, deps, sources,
-                   extra_outputs, extra_link_deps,
-                   part_of_all, precompiled_header):
-    """Write Makefile code for any 'sources' from the gyp input.
-    These are source files necessary to build the current target.
-
-    configs, deps, sources: input from gyp.
-    extra_outputs: a list of extra outputs this action should be dependent on;
-                   used to serialize action/rules before compilation
-    extra_link_deps: a list that will be filled in with any outputs of
-                     compilation (to be used in link lines)
-    part_of_all: flag indicating this target is part of 'all'
-    """
-
-    # Write configuration-specific variables for CFLAGS, etc.
-    for configname in sorted(configs.keys()):
-      config = configs[configname]
-      self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
-          quoter=EscapeCppDefine)
-
-      if self.flavor == 'mac':
-        cflags = self.xcode_settings.GetCflags(configname)
-        cflags_c = self.xcode_settings.GetCflagsC(configname)
-        cflags_cc = self.xcode_settings.GetCflagsCC(configname)
-        cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
-        cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
-      else:
-        cflags = config.get('cflags')
-        cflags_c = config.get('cflags_c')
-        cflags_cc = config.get('cflags_cc')
-
-      self.WriteLn("# Flags passed to all source files.");
-      self.WriteList(cflags, 'CFLAGS_%s' % configname)
-      self.WriteLn("# Flags passed to only C files.");
-      self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
-      self.WriteLn("# Flags passed to only C++ files.");
-      self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
-      if self.flavor == 'mac':
-        self.WriteLn("# Flags passed to only ObjC files.");
-        self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
-        self.WriteLn("# Flags passed to only ObjC++ files.");
-        self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
-      includes = config.get('include_dirs')
-      if includes:
-        includes = map(Sourceify, map(self.Absolutify, includes))
-      self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
-
-    compilable = filter(Compilable, sources)
-    objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
-    self.WriteList(objs, 'OBJS')
-
-    for obj in objs:
-      assert ' ' not in obj, (
-          "Spaces in object filenames not supported (%s)"  % obj)
-    self.WriteLn('# Add to the list of files we specially track '
-                 'dependencies for.')
-    self.WriteLn('all_deps += $(OBJS)')
-    self.WriteLn()
-
-    # Make sure our dependencies are built first.
-    if deps:
-      self.WriteMakeRule(['$(OBJS)'], deps,
-                         comment = 'Make sure our dependencies are built '
-                                   'before any of us.',
-                         order_only = True)
-
-    # Make sure the actions and rules run first.
-    # If they generate any extra headers etc., the per-.o file dep tracking
-    # will catch the proper rebuilds, so order only is still ok here.
-    if extra_outputs:
-      self.WriteMakeRule(['$(OBJS)'], extra_outputs,
-                         comment = 'Make sure our actions/rules run '
-                                   'before any of us.',
-                         order_only = True)
-
-    pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
-    if pchdeps:
-      self.WriteLn('# Dependencies from obj files to their precompiled headers')
-      for source, obj, gch in pchdeps:
-        self.WriteLn('%s: %s' % (obj, gch))
-      self.WriteLn('# End precompiled header dependencies')
-
-    if objs:
-      extra_link_deps.append('$(OBJS)')
-      self.WriteLn("""\
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.""")
-      self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
-      self.WriteLn("$(OBJS): GYP_CFLAGS := "
-                   "$(DEFS_$(BUILDTYPE)) "
-                   "$(INCS_$(BUILDTYPE)) "
-                   "%s " % precompiled_header.GetInclude('c') +
-                   "$(CFLAGS_$(BUILDTYPE)) "
-                   "$(CFLAGS_C_$(BUILDTYPE))")
-      self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
-                   "$(DEFS_$(BUILDTYPE)) "
-                   "$(INCS_$(BUILDTYPE)) "
-                   "%s " % precompiled_header.GetInclude('cc') +
-                   "$(CFLAGS_$(BUILDTYPE)) "
-                   "$(CFLAGS_CC_$(BUILDTYPE))")
-      if self.flavor == 'mac':
-        self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
-                     "$(DEFS_$(BUILDTYPE)) "
-                     "$(INCS_$(BUILDTYPE)) "
-                     "%s " % precompiled_header.GetInclude('m') +
-                     "$(CFLAGS_$(BUILDTYPE)) "
-                     "$(CFLAGS_C_$(BUILDTYPE)) "
-                     "$(CFLAGS_OBJC_$(BUILDTYPE))")
-        self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
-                     "$(DEFS_$(BUILDTYPE)) "
-                     "$(INCS_$(BUILDTYPE)) "
-                     "%s " % precompiled_header.GetInclude('mm') +
-                     "$(CFLAGS_$(BUILDTYPE)) "
-                     "$(CFLAGS_CC_$(BUILDTYPE)) "
-                     "$(CFLAGS_OBJCC_$(BUILDTYPE))")
-
-    self.WritePchTargets(precompiled_header.GetPchBuildCommands())
-
-    # If there are any object files in our input file list, link them into our
-    # output.
-    extra_link_deps += filter(Linkable, sources)
-
-    self.WriteLn()
-
-  def WritePchTargets(self, pch_commands):
-    """Writes make rules to compile prefix headers."""
-    if not pch_commands:
-      return
-
-    for gch, lang_flag, lang, input in pch_commands:
-      extra_flags = {
-        'c': '$(CFLAGS_C_$(BUILDTYPE))',
-        'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
-        'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
-        'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
-      }[lang]
-      var_name = {
-        'c': 'GYP_PCH_CFLAGS',
-        'cc': 'GYP_PCH_CXXFLAGS',
-        'm': 'GYP_PCH_OBJCFLAGS',
-        'mm': 'GYP_PCH_OBJCXXFLAGS',
-      }[lang]
-      self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
-                   "$(DEFS_$(BUILDTYPE)) "
-                   "$(INCS_$(BUILDTYPE)) "
-                   "$(CFLAGS_$(BUILDTYPE)) " +
-                   extra_flags)
-
-      self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
-      self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
-      self.WriteLn('')
-      assert ' ' not in gch, (
-          "Spaces in gch filenames not supported (%s)"  % gch)
-      self.WriteLn('all_deps += %s' % gch)
-      self.WriteLn('')
-
-
-  def ComputeOutputBasename(self, spec):
-    """Return the 'output basename' of a gyp spec.
-
-    E.g., the loadable module 'foobar' in directory 'baz' will produce
-      'libfoobar.so'
-    """
-    assert not self.is_mac_bundle
-
-    if self.flavor == 'mac' and self.type in (
-        'static_library', 'executable', 'shared_library', 'loadable_module'):
-      return self.xcode_settings.GetExecutablePath()
-
-    target = spec['target_name']
-    target_prefix = ''
-    target_ext = ''
-    if self.type == 'static_library':
-      if target[:3] == 'lib':
-        target = target[3:]
-      target_prefix = 'lib'
-      target_ext = '.a'
-    elif self.type in ('loadable_module', 'shared_library'):
-      if target[:3] == 'lib':
-        target = target[3:]
-      target_prefix = 'lib'
-      target_ext = '.so'
-    elif self.type == 'none':
-      target = '%s.stamp' % target
-    elif self.type != 'executable':
-      print ("ERROR: What output file should be generated?",
-             "type", self.type, "target", target)
-
-    target_prefix = spec.get('product_prefix', target_prefix)
-    target = spec.get('product_name', target)
-    product_ext = spec.get('product_extension')
-    if product_ext:
-      target_ext = '.' + product_ext
-
-    return target_prefix + target + target_ext
-
-
-  def _InstallImmediately(self):
-    return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
-          'static_library', 'executable', 'shared_library', 'loadable_module')
-
-
-  def ComputeOutput(self, spec):
-    """Return the 'output' (full output path) of a gyp spec.
-
-    E.g., the loadable module 'foobar' in directory 'baz' will produce
-      '$(obj)/baz/libfoobar.so'
-    """
-    assert not self.is_mac_bundle
-
-    path = os.path.join('$(obj).' + self.toolset, self.path)
-    if self.type == 'executable' or self._InstallImmediately():
-      path = '$(builddir)'
-    path = spec.get('product_dir', path)
-    return os.path.join(path, self.ComputeOutputBasename(spec))
-
-
-  def ComputeMacBundleOutput(self, spec):
-    """Return the 'output' (full output path) to a bundle output directory."""
-    assert self.is_mac_bundle
-    path = generator_default_variables['PRODUCT_DIR']
-    return os.path.join(path, self.xcode_settings.GetWrapperName())
-
-
-  def ComputeMacBundleBinaryOutput(self, spec):
-    """Return the 'output' (full output path) to the binary in a bundle."""
-    path = generator_default_variables['PRODUCT_DIR']
-    return os.path.join(path, self.xcode_settings.GetExecutablePath())
-
-
-  def ComputeDeps(self, spec):
-    """Compute the dependencies of a gyp spec.
-
-    Returns a tuple (deps, link_deps), where each is a list of
-    filenames that will need to be put in front of make for either
-    building (deps) or linking (link_deps).
-    """
-    deps = []
-    link_deps = []
-    if 'dependencies' in spec:
-      deps.extend([target_outputs[dep] for dep in spec['dependencies']
-                   if target_outputs[dep]])
-      for dep in spec['dependencies']:
-        if dep in target_link_deps:
-          link_deps.append(target_link_deps[dep])
-      deps.extend(link_deps)
-      # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
-      # This hack makes it work:
-      # link_deps.extend(spec.get('libraries', []))
-    return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
-
-
-  def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
-    self.WriteMakeRule([self.output_binary], extra_outputs,
-                       comment = 'Build our special outputs first.',
-                       order_only = True)
-
-
-  def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
-                  extra_outputs, part_of_all):
-    """Write Makefile code to produce the final target of the gyp spec.
-
-    spec, configs: input from gyp.
-    deps, link_deps: dependency lists; see ComputeDeps()
-    extra_outputs: any extra outputs that our target should depend on
-    part_of_all: flag indicating this target is part of 'all'
-    """
-
-    self.WriteLn('### Rules for final target.')
-
-    if extra_outputs:
-      self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
-      self.WriteMakeRule(extra_outputs, deps,
-                         comment=('Preserve order dependency of '
-                                  'special output on deps.'),
-                         order_only = True)
-
-    target_postbuilds = {}
-    if self.type != 'none':
-      for configname in sorted(configs.keys()):
-        config = configs[configname]
-        if self.flavor == 'mac':
-          ldflags = self.xcode_settings.GetLdflags(configname,
-              generator_default_variables['PRODUCT_DIR'],
-              lambda p: Sourceify(self.Absolutify(p)))
-
-          # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
-          gyp_to_build = gyp.common.InvertRelativePath(self.path)
-          target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
-              configname,
-              QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
-                                                        self.output))),
-              QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
-                                                        self.output_binary))))
-          if target_postbuild:
-            target_postbuilds[configname] = target_postbuild
-        else:
-          ldflags = config.get('ldflags', [])
-          # Compute an rpath for this output if needed.
-          if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
-            # We want to get the literal string "$ORIGIN" into the link command,
-            # so we need lots of escaping.
-            ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
-            ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
-                           self.toolset)
-        library_dirs = config.get('library_dirs', [])
-        ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
-        self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
-        if self.flavor == 'mac':
-          self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
-                         'LIBTOOLFLAGS_%s' % configname)
-      libraries = spec.get('libraries')
-      if libraries:
-        # Remove duplicate entries
-        libraries = gyp.common.uniquer(libraries)
-        if self.flavor == 'mac':
-          libraries = self.xcode_settings.AdjustLibraries(libraries)
-      self.WriteList(libraries, 'LIBS')
-      self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
-          QuoteSpaces(self.output_binary))
-      self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
-
-      if self.flavor == 'mac':
-        self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
-            QuoteSpaces(self.output_binary))
-
-    # Postbuild actions. Like actions, but implicitly depend on the target's
-    # output.
-    postbuilds = []
-    if self.flavor == 'mac':
-      if target_postbuilds:
-        postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
-      postbuilds.extend(
-          gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
-
-    if postbuilds:
-      # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
-      # so we must output its definition first, since we declare variables
-      # using ":=".
-      self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
-
-      for configname in target_postbuilds:
-        self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
-            (QuoteSpaces(self.output),
-             configname,
-             gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
-
-      # Postbuilds expect to be run in the gyp file's directory, so insert an
-      # implicit postbuild to cd to there.
-      postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
-      for i in xrange(len(postbuilds)):
-        if not postbuilds[i].startswith('$'):
-          postbuilds[i] = EscapeShellArgument(postbuilds[i])
-      self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
-      self.WriteLn('%s: POSTBUILDS := %s' % (
-          QuoteSpaces(self.output), ' '.join(postbuilds)))
-
-    # A bundle directory depends on its dependencies such as bundle resources
-    # and bundle binary. When all dependencies have been built, the bundle
-    # needs to be packaged.
-    if self.is_mac_bundle:
-      # If the framework doesn't contain a binary, then nothing depends
-      # on the actions -- make the framework depend on them directly too.
-      self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
-
-      # Bundle dependencies. Note that the code below adds actions to this
-      # target, so if you move these two lines, move the lines below as well.
-      self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
-      self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
-
-      # After the framework is built, package it. Needs to happen before
-      # postbuilds, since postbuilds depend on this.
-      if self.type in ('shared_library', 'loadable_module'):
-        self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
-            self.xcode_settings.GetFrameworkVersion())
-
-      # Bundle postbuilds can depend on the whole bundle, so run them after
-      # the bundle is packaged, not already after the bundle binary is done.
-      if postbuilds:
-        self.WriteLn('\t@$(call do_postbuilds)')
-      postbuilds = []  # Don't write postbuilds for target's output.
-
-      # Needed by test/mac/gyptest-rebuild.py.
-      self.WriteLn('\t@true  # No-op, used by tests')
-
-      # Since this target depends on binary and resources which are in
-      # nested subfolders, the framework directory will be older than
-      # its dependencies usually. To prevent this rule from executing
-      # on every build (expensive, especially with postbuilds), expliclity
-      # update the time on the framework directory.
-      self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
-
-    if postbuilds:
-      assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
-          'on the bundle, not the binary (target \'%s\')' % self.target)
-      assert 'product_dir' not in spec, ('Postbuilds do not work with '
-          'custom product_dir')
-
-    if self.type == 'executable':
-      self.WriteLn('%s: LD_INPUTS := %s' % (
-          QuoteSpaces(self.output_binary),
-          ' '.join(map(QuoteSpaces, link_deps))))
-      if self.toolset == 'host' and self.flavor == 'android':
-        self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
-                        part_of_all, postbuilds=postbuilds)
-      else:
-        self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
-                        postbuilds=postbuilds)
-
-    elif self.type == 'static_library':
-      for link_dep in link_deps:
-        assert ' ' not in link_dep, (
-            "Spaces in alink input filenames not supported (%s)"  % link_dep)
-      if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
-          self.is_standalone_static_library):
-        self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
-                        part_of_all, postbuilds=postbuilds)
-      else:
-        self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
-                        postbuilds=postbuilds)
-    elif self.type == 'shared_library':
-      self.WriteLn('%s: LD_INPUTS := %s' % (
-            QuoteSpaces(self.output_binary),
-            ' '.join(map(QuoteSpaces, link_deps))))
-      self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
-                      postbuilds=postbuilds)
-    elif self.type == 'loadable_module':
-      for link_dep in link_deps:
-        assert ' ' not in link_dep, (
-            "Spaces in module input filenames not supported (%s)"  % link_dep)
-      if self.toolset == 'host' and self.flavor == 'android':
-        self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
-                        part_of_all, postbuilds=postbuilds)
-      else:
-        self.WriteDoCmd(
-            [self.output_binary], link_deps, 'solink_module', part_of_all,
-            postbuilds=postbuilds)
-    elif self.type == 'none':
-      # Write a stamp line.
-      self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
-                      postbuilds=postbuilds)
-    else:
-      print "WARNING: no output for", self.type, target
-
-    # Add an alias for each target (if there are any outputs).
-    # Installable target aliases are created below.
-    if ((self.output and self.output != self.target) and
-        (self.type not in self._INSTALLABLE_TARGETS)):
-      self.WriteMakeRule([self.target], [self.output],
-                         comment='Add target alias', phony = True)
-      if part_of_all:
-        self.WriteMakeRule(['all'], [self.target],
-                           comment = 'Add target alias to "all" target.',
-                           phony = True)
-
-    # Add special-case rules for our installable targets.
-    # 1) They need to install to the build dir or "product" dir.
-    # 2) They get shortcuts for building (e.g. "make chrome").
-    # 3) They are part of "make all".
-    if (self.type in self._INSTALLABLE_TARGETS or
-        self.is_standalone_static_library):
-      if self.type == 'shared_library':
-        file_desc = 'shared library'
-      elif self.type == 'static_library':
-        file_desc = 'static library'
-      else:
-        file_desc = 'executable'
-      install_path = self._InstallableTargetInstallPath()
-      installable_deps = [self.output]
-      if (self.flavor == 'mac' and not 'product_dir' in spec and
-          self.toolset == 'target'):
-        # On mac, products are created in install_path immediately.
-        assert install_path == self.output, '%s != %s' % (
-            install_path, self.output)
-
-      # Point the target alias to the final binary output.
-      self.WriteMakeRule([self.target], [install_path],
-                         comment='Add target alias', phony = True)
-      if install_path != self.output:
-        assert not self.is_mac_bundle  # See comment a few lines above.
-        self.WriteDoCmd([install_path], [self.output], 'copy',
-                        comment = 'Copy this to the %s output path.' %
-                        file_desc, part_of_all=part_of_all)
-        installable_deps.append(install_path)
-      if self.output != self.alias and self.alias != self.target:
-        self.WriteMakeRule([self.alias], installable_deps,
-                           comment = 'Short alias for building this %s.' %
-                           file_desc, phony = True)
-      if part_of_all:
-        self.WriteMakeRule(['all'], [install_path],
-                           comment = 'Add %s to "all" target.' % file_desc,
-                           phony = True)
-
-
-  def WriteList(self, value_list, variable=None, prefix='',
-                quoter=QuoteIfNecessary):
-    """Write a variable definition that is a list of values.
-
-    E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
-         foo = blaha blahb
-    but in a pretty-printed style.
-    """
-    values = ''
-    if value_list:
-      value_list = [quoter(prefix + l) for l in value_list]
-      values = ' \\\n\t' + ' \\\n\t'.join(value_list)
-    self.fp.write('%s :=%s\n\n' % (variable, values))
-
-
-  def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
-                 postbuilds=False):
-    """Write a Makefile rule that uses do_cmd.
-
-    This makes the outputs dependent on the command line that was run,
-    as well as support the V= make command line flag.
-    """
-    suffix = ''
-    if postbuilds:
-      assert ',' not in command
-      suffix = ',,1'  # Tell do_cmd to honor $POSTBUILDS
-    self.WriteMakeRule(outputs, inputs,
-                       actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
-                       comment = comment,
-                       command = command,
-                       force = True)
-    # Add our outputs to the list of targets we read depfiles from.
-    # all_deps is only used for deps file reading, and for deps files we replace
-    # spaces with ? because escaping doesn't work with make's $(sort) and
-    # other functions.
-    outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
-    self.WriteLn('all_deps += %s' % ' '.join(outputs))
-
-
-  def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
-                    order_only=False, force=False, phony=False, command=None):
-    """Write a Makefile rule, with some extra tricks.
-
-    outputs: a list of outputs for the rule (note: this is not directly
-             supported by make; see comments below)
-    inputs: a list of inputs for the rule
-    actions: a list of shell commands to run for the rule
-    comment: a comment to put in the Makefile above the rule (also useful
-             for making this Python script's code self-documenting)
-    order_only: if true, makes the dependency order-only
-    force: if true, include FORCE_DO_CMD as an order-only dep
-    phony: if true, the rule does not actually generate the named output, the
-           output is just a name to run the rule
-    command: (optional) command name to generate unambiguous labels
-    """
-    outputs = map(QuoteSpaces, outputs)
-    inputs = map(QuoteSpaces, inputs)
-
-    if comment:
-      self.WriteLn('# ' + comment)
-    if phony:
-      self.WriteLn('.PHONY: ' + ' '.join(outputs))
-    if actions:
-      self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
-    force_append = ' FORCE_DO_CMD' if force else ''
-
-    if order_only:
-      # Order only rule: Just write a simple rule.
-      # TODO(evanm): just make order_only a list of deps instead of this hack.
-      self.WriteLn('%s: | %s%s' %
-                   (' '.join(outputs), ' '.join(inputs), force_append))
-    elif len(outputs) == 1:
-      # Regular rule, one output: Just write a simple rule.
-      self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
-    else:
-      # Regular rule, more than one output: Multiple outputs are tricky in
-      # make. We will write three rules:
-      # - All outputs depend on an intermediate file.
-      # - Make .INTERMEDIATE depend on the intermediate.
-      # - The intermediate file depends on the inputs and executes the
-      #   actual command.
-      # - The intermediate recipe will 'touch' the intermediate file.
-      # - The multi-output rule will have an do-nothing recipe.
-      intermediate = "%s.intermediate" % (command if command else self.target)
-      self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
-      self.WriteLn('\t%s' % '@:');
-      self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
-      self.WriteLn('%s: %s%s' %
-                   (intermediate, ' '.join(inputs), force_append))
-      actions.insert(0, '$(call do_cmd,touch)')
-
-    if actions:
-      for action in actions:
-        self.WriteLn('\t%s' % action)
-    self.WriteLn()
-
-
-  def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
-    """Write a set of LOCAL_XXX definitions for Android NDK.
-
-    These variable definitions will be used by Android NDK but do nothing for
-    non-Android applications.
-
-    Arguments:
-      module_name: Android NDK module name, which must be unique among all
-          module names.
-      all_sources: A list of source files (will be filtered by Compilable).
-      link_deps: A list of link dependencies, which must be sorted in
-          the order from dependencies to dependents.
-    """
-    if self.type not in ('executable', 'shared_library', 'static_library'):
-      return
-
-    self.WriteLn('# Variable definitions for Android applications')
-    self.WriteLn('include $(CLEAR_VARS)')
-    self.WriteLn('LOCAL_MODULE := ' + module_name)
-    self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
-                 '$(DEFS_$(BUILDTYPE)) '
-                 # LOCAL_CFLAGS is applied to both of C and C++.  There is
-                 # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
-                 # sources.
-                 '$(CFLAGS_C_$(BUILDTYPE)) '
-                 # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
-                 # LOCAL_C_INCLUDES does not expect it.  So put it in
-                 # LOCAL_CFLAGS.
-                 '$(INCS_$(BUILDTYPE))')
-    # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
-    self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
-    self.WriteLn('LOCAL_C_INCLUDES :=')
-    self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
-
-    # Detect the C++ extension.
-    cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
-    default_cpp_ext = '.cpp'
-    for filename in all_sources:
-      ext = os.path.splitext(filename)[1]
-      if ext in cpp_ext:
-        cpp_ext[ext] += 1
-        if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
-          default_cpp_ext = ext
-    self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
-
-    self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
-                   'LOCAL_SRC_FILES')
-
-    # Filter out those which do not match prefix and suffix and produce
-    # the resulting list without prefix and suffix.
-    def DepsToModules(deps, prefix, suffix):
-      modules = []
-      for filepath in deps:
-        filename = os.path.basename(filepath)
-        if filename.startswith(prefix) and filename.endswith(suffix):
-          modules.append(filename[len(prefix):-len(suffix)])
-      return modules
-
-    # Retrieve the default value of 'SHARED_LIB_SUFFIX'
-    params = {'flavor': 'linux'}
-    default_variables = {}
-    CalculateVariables(default_variables, params)
-
-    self.WriteList(
-        DepsToModules(link_deps,
-                      generator_default_variables['SHARED_LIB_PREFIX'],
-                      default_variables['SHARED_LIB_SUFFIX']),
-        'LOCAL_SHARED_LIBRARIES')
-    self.WriteList(
-        DepsToModules(link_deps,
-                      generator_default_variables['STATIC_LIB_PREFIX'],
-                      generator_default_variables['STATIC_LIB_SUFFIX']),
-        'LOCAL_STATIC_LIBRARIES')
-
-    if self.type == 'executable':
-      self.WriteLn('include $(BUILD_EXECUTABLE)')
-    elif self.type == 'shared_library':
-      self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
-    elif self.type == 'static_library':
-      self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
-    self.WriteLn()
-
-
-  def WriteLn(self, text=''):
-    self.fp.write(text + '\n')
-
-
-  def GetSortedXcodeEnv(self, additional_settings=None):
-    return gyp.xcode_emulation.GetSortedXcodeEnv(
-        self.xcode_settings, "$(abs_builddir)",
-        os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
-        additional_settings)
-
-
-  def GetSortedXcodePostbuildEnv(self):
-    # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
-    # TODO(thakis): It would be nice to have some general mechanism instead.
-    strip_save_file = self.xcode_settings.GetPerTargetSetting(
-        'CHROMIUM_STRIP_SAVE_FILE', '')
-    # Even if strip_save_file is empty, explicitly write it. Else a postbuild
-    # might pick up an export from an earlier target.
-    return self.GetSortedXcodeEnv(
-        additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
-
-
-  def WriteSortedXcodeEnv(self, target, env):
-    for k, v in env:
-      # For
-      #  foo := a\ b
-      # the escaped space does the right thing. For
-      #  export foo := a\ b
-      # it does not -- the backslash is written to the env as literal character.
-      # So don't escape spaces in |env[k]|.
-      self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
-
-
-  def Objectify(self, path):
-    """Convert a path to its output directory form."""
-    if '$(' in path:
-      path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
-    if not '$(obj)' in path:
-      path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
-    return path
-
-
-  def Pchify(self, path, lang):
-    """Convert a prefix header path to its output directory form."""
-    path = self.Absolutify(path)
-    if '$(' in path:
-      path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
-                          (self.toolset, lang))
-      return path
-    return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
-
-
-  def Absolutify(self, path):
-    """Convert a subdirectory-relative path into a base-relative path.
-    Skips over paths that contain variables."""
-    if '$(' in path:
-      # Don't call normpath in this case, as it might collapse the
-      # path too aggressively if it features '..'. However it's still
-      # important to strip trailing slashes.
-      return path.rstrip('/')
-    return os.path.normpath(os.path.join(self.path, path))
-
-
-  def ExpandInputRoot(self, template, expansion, dirname):
-    if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
-      return template
-    path = template % {
-        'INPUT_ROOT': expansion,
-        'INPUT_DIRNAME': dirname,
-        }
-    return path
-
-
-  def _InstallableTargetInstallPath(self):
-    """Returns the location of the final output for an installable target."""
-    # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
-    # rely on this. Emulate this behavior for mac.
-    if (self.type == 'shared_library' and
-        (self.flavor != 'mac' or self.toolset != 'target')):
-      # Install all shared libs into a common directory (per toolset) for
-      # convenient access with LD_LIBRARY_PATH.
-      return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
-    return '$(builddir)/' + self.alias
-
-
-def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
-                              build_files):
-  """Write the target to regenerate the Makefile."""
-  options = params['options']
-  build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
-                      for filename in params['build_files_arg']]
-
-  gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
-                                            options.toplevel_dir)
-  if not gyp_binary.startswith(os.sep):
-    gyp_binary = os.path.join('.', gyp_binary)
-
-  root_makefile.write(
-      "quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
-      "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
-      "%(makefile_name)s: %(deps)s\n"
-      "\t$(call do_cmd,regen_makefile)\n\n" % {
-          'makefile_name': makefile_name,
-          'deps': ' '.join(map(Sourceify, build_files)),
-          'cmd': gyp.common.EncodePOSIXShellList(
-                     [gyp_binary, '-fmake'] +
-                     gyp.RegenerateFlags(options) +
-                     build_files_args)})
-
-
-def PerformBuild(data, configurations, params):
-  options = params['options']
-  for config in configurations:
-    arguments = ['make']
-    if options.toplevel_dir and options.toplevel_dir != '.':
-      arguments += '-C', options.toplevel_dir
-    arguments.append('BUILDTYPE=' + config)
-    print 'Building [%s]: %s' % (config, arguments)
-    subprocess.check_call(arguments)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  options = params['options']
-  flavor = gyp.common.GetFlavor(params)
-  generator_flags = params.get('generator_flags', {})
-  builddir_name = generator_flags.get('output_dir', 'out')
-  android_ndk_version = generator_flags.get('android_ndk_version', None)
-  default_target = generator_flags.get('default_target', 'all')
-
-  def CalculateMakefilePath(build_file, base_name):
-    """Determine where to write a Makefile for a given gyp file."""
-    # Paths in gyp files are relative to the .gyp file, but we want
-    # paths relative to the source root for the master makefile.  Grab
-    # the path of the .gyp file as the base to relativize against.
-    # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
-    base_path = gyp.common.RelativePath(os.path.dirname(build_file),
-                                        options.depth)
-    # We write the file in the base_path directory.
-    output_file = os.path.join(options.depth, base_path, base_name)
-    if options.generator_output:
-      output_file = os.path.join(
-          options.depth, options.generator_output, base_path, base_name)
-    base_path = gyp.common.RelativePath(os.path.dirname(build_file),
-                                        options.toplevel_dir)
-    return base_path, output_file
-
-  # TODO:  search for the first non-'Default' target.  This can go
-  # away when we add verification that all targets have the
-  # necessary configurations.
-  default_configuration = None
-  toolsets = set([target_dicts[target]['toolset'] for target in target_list])
-  for target in target_list:
-    spec = target_dicts[target]
-    if spec['default_configuration'] != 'Default':
-      default_configuration = spec['default_configuration']
-      break
-  if not default_configuration:
-    default_configuration = 'Default'
-
-  srcdir = '.'
-  makefile_name = 'Makefile' + options.suffix
-  makefile_path = os.path.join(options.toplevel_dir, makefile_name)
-  if options.generator_output:
-    global srcdir_prefix
-    makefile_path = os.path.join(
-        options.toplevel_dir, options.generator_output, makefile_name)
-    srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
-    srcdir_prefix = '$(srcdir)/'
-
-  flock_command= 'flock'
-  copy_archive_arguments = '-af'
-  header_params = {
-      'default_target': default_target,
-      'builddir': builddir_name,
-      'default_configuration': default_configuration,
-      'flock': flock_command,
-      'flock_index': 1,
-      'link_commands': LINK_COMMANDS_LINUX,
-      'extra_commands': '',
-      'srcdir': srcdir,
-      'copy_archive_args': copy_archive_arguments,
-    }
-  if flavor == 'mac':
-    flock_command = './gyp-mac-tool flock'
-    header_params.update({
-        'flock': flock_command,
-        'flock_index': 2,
-        'link_commands': LINK_COMMANDS_MAC,
-        'extra_commands': SHARED_HEADER_MAC_COMMANDS,
-    })
-  elif flavor == 'android':
-    header_params.update({
-        'link_commands': LINK_COMMANDS_ANDROID,
-    })
-  elif flavor == 'solaris':
-    header_params.update({
-        'flock': './gyp-flock-tool flock',
-        'flock_index': 2,
-    })
-  elif flavor == 'freebsd':
-    # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
-    header_params.update({
-        'flock': 'lockf',
-    })
-  elif flavor == 'openbsd':
-    copy_archive_arguments = '-pPRf'
-    header_params.update({
-        'copy_archive_args': copy_archive_arguments,
-    })
-  elif flavor == 'aix':
-    copy_archive_arguments = '-pPRf'
-    header_params.update({
-        'copy_archive_args': copy_archive_arguments,
-        'link_commands': LINK_COMMANDS_AIX,
-        'flock': './gyp-flock-tool flock',
-        'flock_index': 2,
-    })
-
-  header_params.update({
-    'CC.target':   GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
-    'AR.target':   GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
-    'CXX.target':  GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
-    'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
-    'CC.host':     GetEnvironFallback(('CC_host',), 'gcc'),
-    'AR.host':     GetEnvironFallback(('AR_host',), 'ar'),
-    'CXX.host':    GetEnvironFallback(('CXX_host',), 'g++'),
-    'LINK.host':   GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
-  })
-
-  build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
-  make_global_settings_array = data[build_file].get('make_global_settings', [])
-  wrappers = {}
-  for key, value in make_global_settings_array:
-    if key.endswith('_wrapper'):
-      wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
-  make_global_settings = ''
-  for key, value in make_global_settings_array:
-    if re.match('.*_wrapper', key):
-      continue
-    if value[0] != '$':
-      value = '$(abspath %s)' % value
-    wrapper = wrappers.get(key)
-    if wrapper:
-      value = '%s %s' % (wrapper, value)
-      del wrappers[key]
-    if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
-      make_global_settings += (
-          'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
-      # Let gyp-time envvars win over global settings.
-      env_key = key.replace('.', '_')  # CC.host -> CC_host
-      if env_key in os.environ:
-        value = os.environ[env_key]
-      make_global_settings += '  %s = %s\n' % (key, value)
-      make_global_settings += 'endif\n'
-    else:
-      make_global_settings += '%s ?= %s\n' % (key, value)
-  # TODO(ukai): define cmd when only wrapper is specified in
-  # make_global_settings.
-
-  header_params['make_global_settings'] = make_global_settings
-
-  gyp.common.EnsureDirExists(makefile_path)
-  root_makefile = open(makefile_path, 'w')
-  root_makefile.write(SHARED_HEADER % header_params)
-  # Currently any versions have the same effect, but in future the behavior
-  # could be different.
-  if android_ndk_version:
-    root_makefile.write(
-        '# Define LOCAL_PATH for build of Android applications.\n'
-        'LOCAL_PATH := $(call my-dir)\n'
-        '\n')
-  for toolset in toolsets:
-    root_makefile.write('TOOLSET := %s\n' % toolset)
-    WriteRootHeaderSuffixRules(root_makefile)
-
-  # Put build-time support tools next to the root Makefile.
-  dest_path = os.path.dirname(makefile_path)
-  gyp.common.CopyTool(flavor, dest_path)
-
-  # Find the list of targets that derive from the gyp file(s) being built.
-  needed_targets = set()
-  for build_file in params['build_files']:
-    for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
-      needed_targets.add(target)
-
-  build_files = set()
-  include_list = set()
-  for qualified_target in target_list:
-    build_file, target, toolset = gyp.common.ParseQualifiedTarget(
-        qualified_target)
-
-    this_make_global_settings = data[build_file].get('make_global_settings', [])
-    assert make_global_settings_array == this_make_global_settings, (
-        "make_global_settings needs to be the same for all targets. %s vs. %s" %
-        (this_make_global_settings, make_global_settings))
-
-    build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
-    included_files = data[build_file]['included_files']
-    for included_file in included_files:
-      # The included_files entries are relative to the dir of the build file
-      # that included them, so we have to undo that and then make them relative
-      # to the root dir.
-      relative_include_file = gyp.common.RelativePath(
-          gyp.common.UnrelativePath(included_file, build_file),
-          options.toplevel_dir)
-      abs_include_file = os.path.abspath(relative_include_file)
-      # If the include file is from the ~/.gyp dir, we should use absolute path
-      # so that relocating the src dir doesn't break the path.
-      if (params['home_dot_gyp'] and
-          abs_include_file.startswith(params['home_dot_gyp'])):
-        build_files.add(abs_include_file)
-      else:
-        build_files.add(relative_include_file)
-
-    base_path, output_file = CalculateMakefilePath(build_file,
-        target + '.' + toolset + options.suffix + '.mk')
-
-    spec = target_dicts[qualified_target]
-    configs = spec['configurations']
-
-    if flavor == 'mac':
-      gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
-
-    writer = MakefileWriter(generator_flags, flavor)
-    writer.Write(qualified_target, base_path, output_file, spec, configs,
-                 part_of_all=qualified_target in needed_targets)
-
-    # Our root_makefile lives at the source root.  Compute the relative path
-    # from there to the output_file for including.
-    mkfile_rel_path = gyp.common.RelativePath(output_file,
-                                              os.path.dirname(makefile_path))
-    include_list.add(mkfile_rel_path)
-
-  # Write out per-gyp (sub-project) Makefiles.
-  depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
-  for build_file in build_files:
-    # The paths in build_files were relativized above, so undo that before
-    # testing against the non-relativized items in target_list and before
-    # calculating the Makefile path.
-    build_file = os.path.join(depth_rel_path, build_file)
-    gyp_targets = [target_dicts[target]['target_name'] for target in target_list
-                   if target.startswith(build_file) and
-                   target in needed_targets]
-    # Only generate Makefiles for gyp files with targets.
-    if not gyp_targets:
-      continue
-    base_path, output_file = CalculateMakefilePath(build_file,
-        os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
-    makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
-                                                os.path.dirname(output_file))
-    writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
-                        builddir_name)
-
-
-  # Write out the sorted list of includes.
-  root_makefile.write('\n')
-  for include_file in sorted(include_list):
-    # We wrap each .mk include in an if statement so users can tell make to
-    # not load a file by setting NO_LOAD.  The below make code says, only
-    # load the .mk file if the .mk filename doesn't start with a token in
-    # NO_LOAD.
-    root_makefile.write(
-        "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
-        "    $(findstring $(join ^,$(prefix)),\\\n"
-        "                 $(join ^," + include_file + ")))),)\n")
-    root_makefile.write("  include " + include_file + "\n")
-    root_makefile.write("endif\n")
-  root_makefile.write('\n')
-
-  if (not generator_flags.get('standalone')
-      and generator_flags.get('auto_regeneration', True)):
-    WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
-
-  root_makefile.write(SHARED_FOOTER)
-
-  root_makefile.close()
diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py
deleted file mode 100644
index e60c025..0000000
--- a/tools/gyp/pylib/gyp/generator/msvs.py
+++ /dev/null
@@ -1,3499 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import copy
-import ntpath
-import os
-import posixpath
-import re
-import subprocess
-import sys
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-import gyp.generator.ninja as ninja_generator
-import gyp.MSVSNew as MSVSNew
-import gyp.MSVSProject as MSVSProject
-import gyp.MSVSSettings as MSVSSettings
-import gyp.MSVSToolFile as MSVSToolFile
-import gyp.MSVSUserFile as MSVSUserFile
-import gyp.MSVSUtil as MSVSUtil
-import gyp.MSVSVersion as MSVSVersion
-from gyp.common import GypError
-from gyp.common import OrderedSet
-
-# TODO: Remove once bots are on 2.7, http://crbug.com/241769
-def _import_OrderedDict():
-  import collections
-  try:
-    return collections.OrderedDict
-  except AttributeError:
-    import gyp.ordered_dict
-    return gyp.ordered_dict.OrderedDict
-OrderedDict = _import_OrderedDict()
-
-
-# Regular expression for validating Visual Studio GUIDs.  If the GUID
-# contains lowercase hex letters, MSVS will be fine. However,
-# IncrediBuild BuildConsole will parse the solution file, but then
-# silently skip building the target causing hard to track down errors.
-# Note that this only happens with the BuildConsole, and does not occur
-# if IncrediBuild is executed from inside Visual Studio.  This regex
-# validates that the string looks like a GUID with all uppercase hex
-# letters.
-VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
-
-
-generator_default_variables = {
-    'EXECUTABLE_PREFIX': '',
-    'EXECUTABLE_SUFFIX': '.exe',
-    'STATIC_LIB_PREFIX': '',
-    'SHARED_LIB_PREFIX': '',
-    'STATIC_LIB_SUFFIX': '.lib',
-    'SHARED_LIB_SUFFIX': '.dll',
-    'INTERMEDIATE_DIR': '$(IntDir)',
-    'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
-    'OS': 'win',
-    'PRODUCT_DIR': '$(OutDir)',
-    'LIB_DIR': '$(OutDir)lib',
-    'RULE_INPUT_ROOT': '$(InputName)',
-    'RULE_INPUT_DIRNAME': '$(InputDir)',
-    'RULE_INPUT_EXT': '$(InputExt)',
-    'RULE_INPUT_NAME': '$(InputFileName)',
-    'RULE_INPUT_PATH': '$(InputPath)',
-    'CONFIGURATION_NAME': '$(ConfigurationName)',
-}
-
-
-# The msvs specific sections that hold paths
-generator_additional_path_sections = [
-    'msvs_cygwin_dirs',
-    'msvs_props',
-]
-
-
-generator_additional_non_configuration_keys = [
-    'msvs_cygwin_dirs',
-    'msvs_cygwin_shell',
-    'msvs_large_pdb',
-    'msvs_shard',
-    'msvs_external_builder',
-    'msvs_external_builder_out_dir',
-    'msvs_external_builder_build_cmd',
-    'msvs_external_builder_clean_cmd',
-    'msvs_external_builder_clcompile_cmd',
-    'msvs_enable_winrt',
-    'msvs_requires_importlibrary',
-    'msvs_enable_winphone',
-    'msvs_application_type_revision',
-    'msvs_target_platform_version',
-    'msvs_target_platform_minversion',
-]
-
-generator_filelist_paths = None
-
-# List of precompiled header related keys.
-precomp_keys = [
-    'msvs_precompiled_header',
-    'msvs_precompiled_source',
-]
-
-
-cached_username = None
-
-
-cached_domain = None
-
-
-# TODO(gspencer): Switch the os.environ calls to be
-# win32api.GetDomainName() and win32api.GetUserName() once the
-# python version in depot_tools has been updated to work on Vista
-# 64-bit.
-def _GetDomainAndUserName():
-  if sys.platform not in ('win32', 'cygwin'):
-    return ('DOMAIN', 'USERNAME')
-  global cached_username
-  global cached_domain
-  if not cached_domain or not cached_username:
-    domain = os.environ.get('USERDOMAIN')
-    username = os.environ.get('USERNAME')
-    if not domain or not username:
-      call = subprocess.Popen(['net', 'config', 'Workstation'],
-                              stdout=subprocess.PIPE)
-      config = call.communicate()[0]
-      username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
-      username_match = username_re.search(config)
-      if username_match:
-        username = username_match.group(1)
-      domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
-      domain_match = domain_re.search(config)
-      if domain_match:
-        domain = domain_match.group(1)
-    cached_domain = domain
-    cached_username = username
-  return (cached_domain, cached_username)
-
-fixpath_prefix = None
-
-
-def _NormalizedSource(source):
-  """Normalize the path.
-
-  But not if that gets rid of a variable, as this may expand to something
-  larger than one directory.
-
-  Arguments:
-      source: The path to be normalize.d
-
-  Returns:
-      The normalized path.
-  """
-  normalized = os.path.normpath(source)
-  if source.count('$') == normalized.count('$'):
-    source = normalized
-  return source
-
-
-def _FixPath(path):
-  """Convert paths to a form that will make sense in a vcproj file.
-
-  Arguments:
-    path: The path to convert, may contain / etc.
-  Returns:
-    The path with all slashes made into backslashes.
-  """
-  if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
-    path = os.path.join(fixpath_prefix, path)
-  path = path.replace('/', '\\')
-  path = _NormalizedSource(path)
-  if path and path[-1] == '\\':
-    path = path[:-1]
-  return path
-
-
-def _FixPaths(paths):
-  """Fix each of the paths of the list."""
-  return [_FixPath(i) for i in paths]
-
-
-def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
-                                     list_excluded=True, msvs_version=None):
-  """Converts a list split source file paths into a vcproj folder hierarchy.
-
-  Arguments:
-    sources: A list of source file paths split.
-    prefix: A list of source file path layers meant to apply to each of sources.
-    excluded: A set of excluded files.
-    msvs_version: A MSVSVersion object.
-
-  Returns:
-    A hierarchy of filenames and MSVSProject.Filter objects that matches the
-    layout of the source tree.
-    For example:
-    _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
-                                     prefix=['joe'])
-    -->
-    [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
-     MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
-  """
-  if not prefix: prefix = []
-  result = []
-  excluded_result = []
-  folders = OrderedDict()
-  # Gather files into the final result, excluded, or folders.
-  for s in sources:
-    if len(s) == 1:
-      filename = _NormalizedSource('\\'.join(prefix + s))
-      if filename in excluded:
-        excluded_result.append(filename)
-      else:
-        result.append(filename)
-    elif msvs_version and not msvs_version.UsesVcxproj():
-      # For MSVS 2008 and earlier, we need to process all files before walking
-      # the sub folders.
-      if not folders.get(s[0]):
-        folders[s[0]] = []
-      folders[s[0]].append(s[1:])
-    else:
-      contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
-                                                  excluded=excluded,
-                                                  list_excluded=list_excluded,
-                                                  msvs_version=msvs_version)
-      contents = MSVSProject.Filter(s[0], contents=contents)
-      result.append(contents)
-  # Add a folder for excluded files.
-  if excluded_result and list_excluded:
-    excluded_folder = MSVSProject.Filter('_excluded_files',
-                                         contents=excluded_result)
-    result.append(excluded_folder)
-
-  if msvs_version and msvs_version.UsesVcxproj():
-    return result
-
-  # Populate all the folders.
-  for f in folders:
-    contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
-                                                excluded=excluded,
-                                                list_excluded=list_excluded,
-                                                msvs_version=msvs_version)
-    contents = MSVSProject.Filter(f, contents=contents)
-    result.append(contents)
-  return result
-
-
-def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
-  if not value: return
-  _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
-
-
-def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
-  # TODO(bradnelson): ugly hack, fix this more generally!!!
-  if 'Directories' in setting or 'Dependencies' in setting:
-    if type(value) == str:
-      value = value.replace('/', '\\')
-    else:
-      value = [i.replace('/', '\\') for i in value]
-  if not tools.get(tool_name):
-    tools[tool_name] = dict()
-  tool = tools[tool_name]
-  if 'CompileAsWinRT' == setting:
-    return
-  if tool.get(setting):
-    if only_if_unset: return
-    if type(tool[setting]) == list and type(value) == list:
-      tool[setting] += value
-    else:
-      raise TypeError(
-          'Appending "%s" to a non-list setting "%s" for tool "%s" is '
-          'not allowed, previous value: %s' % (
-              value, setting, tool_name, str(tool[setting])))
-  else:
-    tool[setting] = value
-
-
-def _ConfigPlatform(config_data):
-  return config_data.get('msvs_configuration_platform', 'Win32')
-
-
-def _ConfigBaseName(config_name, platform_name):
-  if config_name.endswith('_' + platform_name):
-    return config_name[0:-len(platform_name) - 1]
-  else:
-    return config_name
-
-
-def _ConfigFullName(config_name, config_data):
-  platform_name = _ConfigPlatform(config_data)
-  return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
-
-
-def _ConfigWindowsTargetPlatformVersion(config_data):
-  ver = config_data.get('msvs_windows_sdk_version')
-
-  for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
-              r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
-    sdk_dir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
-    if not sdk_dir:
-      continue
-    version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
-    # Find a matching entry in sdk_dir\include.
-    names = sorted([x for x in os.listdir(r'%s\include' % sdk_dir)
-                    if x.startswith(version)], reverse=True)
-    return names[0]
-
-
-def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
-                                quote_cmd, do_setup_env):
-
-  if [x for x in cmd if '$(InputDir)' in x]:
-    input_dir_preamble = (
-      'set INPUTDIR=$(InputDir)\n'
-      'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
-      'set INPUTDIR=%INPUTDIR:~0,-1%\n'
-      )
-  else:
-    input_dir_preamble = ''
-
-  if cygwin_shell:
-    # Find path to cygwin.
-    cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
-    # Prepare command.
-    direct_cmd = cmd
-    direct_cmd = [i.replace('$(IntDir)',
-                            '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
-    direct_cmd = [i.replace('$(OutDir)',
-                            '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
-    direct_cmd = [i.replace('$(InputDir)',
-                            '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd]
-    if has_input_path:
-      direct_cmd = [i.replace('$(InputPath)',
-                              '`cygpath -m "${INPUTPATH}"`')
-                    for i in direct_cmd]
-    direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
-    # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
-    direct_cmd = ' '.join(direct_cmd)
-    # TODO(quote):  regularize quoting path names throughout the module
-    cmd = ''
-    if do_setup_env:
-      cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
-    cmd += 'set CYGWIN=nontsec&& '
-    if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
-      cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
-    if direct_cmd.find('INTDIR') >= 0:
-      cmd += 'set INTDIR=$(IntDir)&& '
-    if direct_cmd.find('OUTDIR') >= 0:
-      cmd += 'set OUTDIR=$(OutDir)&& '
-    if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
-      cmd += 'set INPUTPATH=$(InputPath) && '
-    cmd += 'bash -c "%(cmd)s"'
-    cmd = cmd % {'cygwin_dir': cygwin_dir,
-                 'cmd': direct_cmd}
-    return input_dir_preamble + cmd
-  else:
-    # Convert cat --> type to mimic unix.
-    if cmd[0] == 'cat':
-      command = ['type']
-    else:
-      command = [cmd[0].replace('/', '\\')]
-    # Add call before command to ensure that commands can be tied together one
-    # after the other without aborting in Incredibuild, since IB makes a bat
-    # file out of the raw command string, and some commands (like python) are
-    # actually batch files themselves.
-    command.insert(0, 'call')
-    # Fix the paths
-    # TODO(quote): This is a really ugly heuristic, and will miss path fixing
-    #              for arguments like "--arg=path" or "/opt:path".
-    # If the argument starts with a slash or dash, it's probably a command line
-    # switch
-    arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
-    arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
-    arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
-    if quote_cmd:
-      # Support a mode for using cmd directly.
-      # Convert any paths to native form (first element is used directly).
-      # TODO(quote):  regularize quoting path names throughout the module
-      arguments = ['"%s"' % i for i in arguments]
-    # Collapse into a single command.
-    return input_dir_preamble + ' '.join(command + arguments)
-
-
-def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
-  # Currently this weird argument munging is used to duplicate the way a
-  # python script would need to be run as part of the chrome tree.
-  # Eventually we should add some sort of rule_default option to set this
-  # per project. For now the behavior chrome needs is the default.
-  mcs = rule.get('msvs_cygwin_shell')
-  if mcs is None:
-    mcs = int(spec.get('msvs_cygwin_shell', 1))
-  elif isinstance(mcs, str):
-    mcs = int(mcs)
-  quote_cmd = int(rule.get('msvs_quote_cmd', 1))
-  return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
-                                     quote_cmd, do_setup_env=do_setup_env)
-
-
-def _AddActionStep(actions_dict, inputs, outputs, description, command):
-  """Merge action into an existing list of actions.
-
-  Care must be taken so that actions which have overlapping inputs either don't
-  get assigned to the same input, or get collapsed into one.
-
-  Arguments:
-    actions_dict: dictionary keyed on input name, which maps to a list of
-      dicts describing the actions attached to that input file.
-    inputs: list of inputs
-    outputs: list of outputs
-    description: description of the action
-    command: command line to execute
-  """
-  # Require there to be at least one input (call sites will ensure this).
-  assert inputs
-
-  action = {
-      'inputs': inputs,
-      'outputs': outputs,
-      'description': description,
-      'command': command,
-  }
-
-  # Pick where to stick this action.
-  # While less than optimal in terms of build time, attach them to the first
-  # input for now.
-  chosen_input = inputs[0]
-
-  # Add it there.
-  if chosen_input not in actions_dict:
-    actions_dict[chosen_input] = []
-  actions_dict[chosen_input].append(action)
-
-
-def _AddCustomBuildToolForMSVS(p, spec, primary_input,
-                               inputs, outputs, description, cmd):
-  """Add a custom build tool to execute something.
-
-  Arguments:
-    p: the target project
-    spec: the target project dict
-    primary_input: input file to attach the build tool to
-    inputs: list of inputs
-    outputs: list of outputs
-    description: description of the action
-    cmd: command line to execute
-  """
-  inputs = _FixPaths(inputs)
-  outputs = _FixPaths(outputs)
-  tool = MSVSProject.Tool(
-      'VCCustomBuildTool',
-      {'Description': description,
-       'AdditionalDependencies': ';'.join(inputs),
-       'Outputs': ';'.join(outputs),
-       'CommandLine': cmd,
-      })
-  # Add to the properties of primary input for each config.
-  for config_name, c_data in spec['configurations'].iteritems():
-    p.AddFileConfig(_FixPath(primary_input),
-                    _ConfigFullName(config_name, c_data), tools=[tool])
-
-
-def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
-  """Add actions accumulated into an actions_dict, merging as needed.
-
-  Arguments:
-    p: the target project
-    spec: the target project dict
-    actions_dict: dictionary keyed on input name, which maps to a list of
-        dicts describing the actions attached to that input file.
-  """
-  for primary_input in actions_dict:
-    inputs = OrderedSet()
-    outputs = OrderedSet()
-    descriptions = []
-    commands = []
-    for action in actions_dict[primary_input]:
-      inputs.update(OrderedSet(action['inputs']))
-      outputs.update(OrderedSet(action['outputs']))
-      descriptions.append(action['description'])
-      commands.append(action['command'])
-    # Add the custom build step for one input file.
-    description = ', and also '.join(descriptions)
-    command = '\r\n'.join(commands)
-    _AddCustomBuildToolForMSVS(p, spec,
-                               primary_input=primary_input,
-                               inputs=inputs,
-                               outputs=outputs,
-                               description=description,
-                               cmd=command)
-
-
-def _RuleExpandPath(path, input_file):
-  """Given the input file to which a rule applied, string substitute a path.
-
-  Arguments:
-    path: a path to string expand
-    input_file: the file to which the rule applied.
-  Returns:
-    The string substituted path.
-  """
-  path = path.replace('$(InputName)',
-                      os.path.splitext(os.path.split(input_file)[1])[0])
-  path = path.replace('$(InputDir)', os.path.dirname(input_file))
-  path = path.replace('$(InputExt)',
-                      os.path.splitext(os.path.split(input_file)[1])[1])
-  path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
-  path = path.replace('$(InputPath)', input_file)
-  return path
-
-
-def _FindRuleTriggerFiles(rule, sources):
-  """Find the list of files which a particular rule applies to.
-
-  Arguments:
-    rule: the rule in question
-    sources: the set of all known source files for this project
-  Returns:
-    The list of sources that trigger a particular rule.
-  """
-  return rule.get('rule_sources', [])
-
-
-def _RuleInputsAndOutputs(rule, trigger_file):
-  """Find the inputs and outputs generated by a rule.
-
-  Arguments:
-    rule: the rule in question.
-    trigger_file: the main trigger for this rule.
-  Returns:
-    The pair of (inputs, outputs) involved in this rule.
-  """
-  raw_inputs = _FixPaths(rule.get('inputs', []))
-  raw_outputs = _FixPaths(rule.get('outputs', []))
-  inputs = OrderedSet()
-  outputs = OrderedSet()
-  inputs.add(trigger_file)
-  for i in raw_inputs:
-    inputs.add(_RuleExpandPath(i, trigger_file))
-  for o in raw_outputs:
-    outputs.add(_RuleExpandPath(o, trigger_file))
-  return (inputs, outputs)
-
-
-def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
-  """Generate a native rules file.
-
-  Arguments:
-    p: the target project
-    rules: the set of rules to include
-    output_dir: the directory in which the project/gyp resides
-    spec: the project dict
-    options: global generator options
-  """
-  rules_filename = '%s%s.rules' % (spec['target_name'],
-                                   options.suffix)
-  rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
-                                   spec['target_name'])
-  # Add each rule.
-  for r in rules:
-    rule_name = r['rule_name']
-    rule_ext = r['extension']
-    inputs = _FixPaths(r.get('inputs', []))
-    outputs = _FixPaths(r.get('outputs', []))
-    # Skip a rule with no action and no inputs.
-    if 'action' not in r and not r.get('rule_sources', []):
-      continue
-    cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
-                                   do_setup_env=True)
-    rules_file.AddCustomBuildRule(name=rule_name,
-                                  description=r.get('message', rule_name),
-                                  extensions=[rule_ext],
-                                  additional_dependencies=inputs,
-                                  outputs=outputs,
-                                  cmd=cmd)
-  # Write out rules file.
-  rules_file.WriteIfChanged()
-
-  # Add rules file to project.
-  p.AddToolFile(rules_filename)
-
-
-def _Cygwinify(path):
-  path = path.replace('$(OutDir)', '$(OutDirCygwin)')
-  path = path.replace('$(IntDir)', '$(IntDirCygwin)')
-  return path
-
-
-def _GenerateExternalRules(rules, output_dir, spec,
-                           sources, options, actions_to_add):
-  """Generate an external makefile to do a set of rules.
-
-  Arguments:
-    rules: the list of rules to include
-    output_dir: path containing project and gyp files
-    spec: project specification data
-    sources: set of sources known
-    options: global generator options
-    actions_to_add: The list of actions we will add to.
-  """
-  filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
-  mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
-  # Find cygwin style versions of some paths.
-  mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
-  mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
-  # Gather stuff needed to emit all: target.
-  all_inputs = OrderedSet()
-  all_outputs = OrderedSet()
-  all_output_dirs = OrderedSet()
-  first_outputs = []
-  for rule in rules:
-    trigger_files = _FindRuleTriggerFiles(rule, sources)
-    for tf in trigger_files:
-      inputs, outputs = _RuleInputsAndOutputs(rule, tf)
-      all_inputs.update(OrderedSet(inputs))
-      all_outputs.update(OrderedSet(outputs))
-      # Only use one target from each rule as the dependency for
-      # 'all' so we don't try to build each rule multiple times.
-      first_outputs.append(list(outputs)[0])
-      # Get the unique output directories for this rule.
-      output_dirs = [os.path.split(i)[0] for i in outputs]
-      for od in output_dirs:
-        all_output_dirs.add(od)
-  first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
-  # Write out all: target, including mkdir for each output directory.
-  mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg))
-  for od in all_output_dirs:
-    if od:
-      mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
-  mk_file.write('\n')
-  # Define how each output is generated.
-  for rule in rules:
-    trigger_files = _FindRuleTriggerFiles(rule, sources)
-    for tf in trigger_files:
-      # Get all the inputs and outputs for this rule for this trigger file.
-      inputs, outputs = _RuleInputsAndOutputs(rule, tf)
-      inputs = [_Cygwinify(i) for i in inputs]
-      outputs = [_Cygwinify(i) for i in outputs]
-      # Prepare the command line for this rule.
-      cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
-      cmd = ['"%s"' % i for i in cmd]
-      cmd = ' '.join(cmd)
-      # Add it to the makefile.
-      mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
-      mk_file.write('\t%s\n\n' % cmd)
-  # Close up the file.
-  mk_file.close()
-
-  # Add makefile to list of sources.
-  sources.add(filename)
-  # Add a build action to call makefile.
-  cmd = ['make',
-         'OutDir=$(OutDir)',
-         'IntDir=$(IntDir)',
-         '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
-         '-f', filename]
-  cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
-  # Insert makefile as 0'th input, so it gets the action attached there,
-  # as this is easier to understand from in the IDE.
-  all_inputs = list(all_inputs)
-  all_inputs.insert(0, filename)
-  _AddActionStep(actions_to_add,
-                 inputs=_FixPaths(all_inputs),
-                 outputs=_FixPaths(all_outputs),
-                 description='Running external rules for %s' %
-                     spec['target_name'],
-                 command=cmd)
-
-
-def _EscapeEnvironmentVariableExpansion(s):
-  """Escapes % characters.
-
-  Escapes any % characters so that Windows-style environment variable
-  expansions will leave them alone.
-  See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
-  to understand why we have to do this.
-
-  Args:
-      s: The string to be escaped.
-
-  Returns:
-      The escaped string.
-  """
-  s = s.replace('%', '%%')
-  return s
-
-
-quote_replacer_regex = re.compile(r'(\\*)"')
-
-
-def _EscapeCommandLineArgumentForMSVS(s):
-  """Escapes a Windows command-line argument.
-
-  So that the Win32 CommandLineToArgv function will turn the escaped result back
-  into the original string.
-  See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
-  ("Parsing C++ Command-Line Arguments") to understand why we have to do
-  this.
-
-  Args:
-      s: the string to be escaped.
-  Returns:
-      the escaped string.
-  """
-
-  def _Replace(match):
-    # For a literal quote, CommandLineToArgv requires an odd number of
-    # backslashes preceding it, and it produces half as many literal backslashes
-    # (rounded down). So we need to produce 2n+1 backslashes.
-    return 2 * match.group(1) + '\\"'
-
-  # Escape all quotes so that they are interpreted literally.
-  s = quote_replacer_regex.sub(_Replace, s)
-  # Now add unescaped quotes so that any whitespace is interpreted literally.
-  s = '"' + s + '"'
-  return s
-
-
-delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
-
-
-def _EscapeVCProjCommandLineArgListItem(s):
-  """Escapes command line arguments for MSVS.
-
-  The VCProj format stores string lists in a single string using commas and
-  semi-colons as separators, which must be quoted if they are to be
-  interpreted literally. However, command-line arguments may already have
-  quotes, and the VCProj parser is ignorant of the backslash escaping
-  convention used by CommandLineToArgv, so the command-line quotes and the
-  VCProj quotes may not be the same quotes. So to store a general
-  command-line argument in a VCProj list, we need to parse the existing
-  quoting according to VCProj's convention and quote any delimiters that are
-  not already quoted by that convention. The quotes that we add will also be
-  seen by CommandLineToArgv, so if backslashes precede them then we also have
-  to escape those backslashes according to the CommandLineToArgv
-  convention.
-
-  Args:
-      s: the string to be escaped.
-  Returns:
-      the escaped string.
-  """
-
-  def _Replace(match):
-    # For a non-literal quote, CommandLineToArgv requires an even number of
-    # backslashes preceding it, and it produces half as many literal
-    # backslashes. So we need to produce 2n backslashes.
-    return 2 * match.group(1) + '"' + match.group(2) + '"'
-
-  segments = s.split('"')
-  # The unquoted segments are at the even-numbered indices.
-  for i in range(0, len(segments), 2):
-    segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
-  # Concatenate back into a single string
-  s = '"'.join(segments)
-  if len(segments) % 2 == 0:
-    # String ends while still quoted according to VCProj's convention. This
-    # means the delimiter and the next list item that follow this one in the
-    # .vcproj file will be misinterpreted as part of this item. There is nothing
-    # we can do about this. Adding an extra quote would correct the problem in
-    # the VCProj but cause the same problem on the final command-line. Moving
-    # the item to the end of the list does works, but that's only possible if
-    # there's only one such item. Let's just warn the user.
-    print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
-                          'quotes in ' + s)
-  return s
-
-
-def _EscapeCppDefineForMSVS(s):
-  """Escapes a CPP define so that it will reach the compiler unaltered."""
-  s = _EscapeEnvironmentVariableExpansion(s)
-  s = _EscapeCommandLineArgumentForMSVS(s)
-  s = _EscapeVCProjCommandLineArgListItem(s)
-  # cl.exe replaces literal # characters with = in preprocesor definitions for
-  # some reason. Octal-encode to work around that.
-  s = s.replace('#', '\\%03o' % ord('#'))
-  return s
-
-
-quote_replacer_regex2 = re.compile(r'(\\+)"')
-
-
-def _EscapeCommandLineArgumentForMSBuild(s):
-  """Escapes a Windows command-line argument for use by MSBuild."""
-
-  def _Replace(match):
-    return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
-
-  # Escape all quotes so that they are interpreted literally.
-  s = quote_replacer_regex2.sub(_Replace, s)
-  return s
-
-
-def _EscapeMSBuildSpecialCharacters(s):
-  escape_dictionary = {
-      '%': '%25',
-      '$': '%24',
-      '@': '%40',
-      "'": '%27',
-      ';': '%3B',
-      '?': '%3F',
-      '*': '%2A'
-      }
-  result = ''.join([escape_dictionary.get(c, c) for c in s])
-  return result
-
-
-def _EscapeCppDefineForMSBuild(s):
-  """Escapes a CPP define so that it will reach the compiler unaltered."""
-  s = _EscapeEnvironmentVariableExpansion(s)
-  s = _EscapeCommandLineArgumentForMSBuild(s)
-  s = _EscapeMSBuildSpecialCharacters(s)
-  # cl.exe replaces literal # characters with = in preprocesor definitions for
-  # some reason. Octal-encode to work around that.
-  s = s.replace('#', '\\%03o' % ord('#'))
-  return s
-
-
-def _GenerateRulesForMSVS(p, output_dir, options, spec,
-                          sources, excluded_sources,
-                          actions_to_add):
-  """Generate all the rules for a particular project.
-
-  Arguments:
-    p: the project
-    output_dir: directory to emit rules to
-    options: global options passed to the generator
-    spec: the specification for this project
-    sources: the set of all known source files in this project
-    excluded_sources: the set of sources excluded from normal processing
-    actions_to_add: deferred list of actions to add in
-  """
-  rules = spec.get('rules', [])
-  rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
-  rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
-
-  # Handle rules that use a native rules file.
-  if rules_native:
-    _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
-
-  # Handle external rules (non-native rules).
-  if rules_external:
-    _GenerateExternalRules(rules_external, output_dir, spec,
-                           sources, options, actions_to_add)
-  _AdjustSourcesForRules(rules, sources, excluded_sources, False)
-
-
-def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
-  # Add outputs generated by each rule (if applicable).
-  for rule in rules:
-    # Add in the outputs from this rule.
-    trigger_files = _FindRuleTriggerFiles(rule, sources)
-    for trigger_file in trigger_files:
-      # Remove trigger_file from excluded_sources to let the rule be triggered
-      # (e.g. rule trigger ax_enums.idl is added to excluded_sources
-      # because it's also in an action's inputs in the same project)
-      excluded_sources.discard(_FixPath(trigger_file))
-      # Done if not processing outputs as sources.
-      if int(rule.get('process_outputs_as_sources', False)):
-        inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
-        inputs = OrderedSet(_FixPaths(inputs))
-        outputs = OrderedSet(_FixPaths(outputs))
-        inputs.remove(_FixPath(trigger_file))
-        sources.update(inputs)
-        if not is_msbuild:
-          excluded_sources.update(inputs)
-        sources.update(outputs)
-
-
-def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
-  """Take inputs with actions attached out of the list of exclusions.
-
-  Arguments:
-    excluded_sources: list of source files not to be built.
-    actions_to_add: dict of actions keyed on source file they're attached to.
-  Returns:
-    excluded_sources with files that have actions attached removed.
-  """
-  must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
-  return [s for s in excluded_sources if s not in must_keep]
-
-
-def _GetDefaultConfiguration(spec):
-  return spec['configurations'][spec['default_configuration']]
-
-
-def _GetGuidOfProject(proj_path, spec):
-  """Get the guid for the project.
-
-  Arguments:
-    proj_path: Path of the vcproj or vcxproj file to generate.
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    the guid.
-  Raises:
-    ValueError: if the specified GUID is invalid.
-  """
-  # Pluck out the default configuration.
-  default_config = _GetDefaultConfiguration(spec)
-  # Decide the guid of the project.
-  guid = default_config.get('msvs_guid')
-  if guid:
-    if VALID_MSVS_GUID_CHARS.match(guid) is None:
-      raise ValueError('Invalid MSVS guid: "%s".  Must match regex: "%s".' %
-                       (guid, VALID_MSVS_GUID_CHARS.pattern))
-    guid = '{%s}' % guid
-  guid = guid or MSVSNew.MakeGuid(proj_path)
-  return guid
-
-
-def _GetMsbuildToolsetOfProject(proj_path, spec, version):
-  """Get the platform toolset for the project.
-
-  Arguments:
-    proj_path: Path of the vcproj or vcxproj file to generate.
-    spec: The target dictionary containing the properties of the target.
-    version: The MSVSVersion object.
-  Returns:
-    the platform toolset string or None.
-  """
-  # Pluck out the default configuration.
-  default_config = _GetDefaultConfiguration(spec)
-  toolset = default_config.get('msbuild_toolset')
-  if not toolset and version.DefaultToolset():
-    toolset = version.DefaultToolset()
-  return toolset
-
-
-def _GenerateProject(project, options, version, generator_flags):
-  """Generates a vcproj file.
-
-  Arguments:
-    project: the MSVSProject object.
-    options: global generator options.
-    version: the MSVSVersion object.
-    generator_flags: dict of generator-specific flags.
-  Returns:
-    A list of source files that cannot be found on disk.
-  """
-  default_config = _GetDefaultConfiguration(project.spec)
-
-  # Skip emitting anything if told to with msvs_existing_vcproj option.
-  if default_config.get('msvs_existing_vcproj'):
-    return []
-
-  if version.UsesVcxproj():
-    return _GenerateMSBuildProject(project, options, version, generator_flags)
-  else:
-    return _GenerateMSVSProject(project, options, version, generator_flags)
-
-
-# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
-def _ValidateSourcesForMSVSProject(spec, version):
-  """Makes sure if duplicate basenames are not specified in the source list.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-    version: The VisualStudioVersion object.
-  """
-  # This validation should not be applied to MSVC2010 and later.
-  assert not version.UsesVcxproj()
-
-  # TODO: Check if MSVC allows this for loadable_module targets.
-  if spec.get('type', None) not in ('static_library', 'shared_library'):
-    return
-  sources = spec.get('sources', [])
-  basenames = {}
-  for source in sources:
-    name, ext = os.path.splitext(source)
-    is_compiled_file = ext in [
-        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
-    if not is_compiled_file:
-      continue
-    basename = os.path.basename(name)  # Don't include extension.
-    basenames.setdefault(basename, []).append(source)
-
-  error = ''
-  for basename, files in basenames.iteritems():
-    if len(files) > 1:
-      error += '  %s: %s\n' % (basename, ' '.join(files))
-
-  if error:
-    print('static library %s has several files with the same basename:\n' %
-          spec['target_name'] + error + 'MSVC08 cannot handle that.')
-    raise GypError('Duplicate basenames in sources section, see list above')
-
-
-def _GenerateMSVSProject(project, options, version, generator_flags):
-  """Generates a .vcproj file.  It may create .rules and .user files too.
-
-  Arguments:
-    project: The project object we will generate the file for.
-    options: Global options passed to the generator.
-    version: The VisualStudioVersion object.
-    generator_flags: dict of generator-specific flags.
-  """
-  spec = project.spec
-  gyp.common.EnsureDirExists(project.path)
-
-  platforms = _GetUniquePlatforms(spec)
-  p = MSVSProject.Writer(project.path, version, spec['target_name'],
-                         project.guid, platforms)
-
-  # Get directory project file is in.
-  project_dir = os.path.split(project.path)[0]
-  gyp_path = _NormalizedSource(project.build_file)
-  relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
-
-  config_type = _GetMSVSConfigurationType(spec, project.build_file)
-  for config_name, config in spec['configurations'].iteritems():
-    _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
-
-  # MSVC08 and prior version cannot handle duplicate basenames in the same
-  # target.
-  # TODO: Take excluded sources into consideration if possible.
-  _ValidateSourcesForMSVSProject(spec, version)
-
-  # Prepare list of sources and excluded sources.
-  gyp_file = os.path.split(project.build_file)[1]
-  sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
-                                                    gyp_file)
-
-  # Add rules.
-  actions_to_add = {}
-  _GenerateRulesForMSVS(p, project_dir, options, spec,
-                        sources, excluded_sources,
-                        actions_to_add)
-  list_excluded = generator_flags.get('msvs_list_excluded_files', True)
-  sources, excluded_sources, excluded_idl = (
-      _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
-                                                sources, excluded_sources,
-                                                list_excluded, version))
-
-  # Add in files.
-  missing_sources = _VerifySourcesExist(sources, project_dir)
-  p.AddFiles(sources)
-
-  _AddToolFilesToMSVS(p, spec)
-  _HandlePreCompiledHeaders(p, sources, spec)
-  _AddActions(actions_to_add, spec, relative_path_of_gyp_file)
-  _AddCopies(actions_to_add, spec)
-  _WriteMSVSUserFile(project.path, version, spec)
-
-  # NOTE: this stanza must appear after all actions have been decided.
-  # Don't excluded sources with actions attached, or they won't run.
-  excluded_sources = _FilterActionsFromExcluded(
-      excluded_sources, actions_to_add)
-  _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
-                              list_excluded)
-  _AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
-
-  # Write it out.
-  p.WriteIfChanged()
-
-  return missing_sources
-
-
-def _GetUniquePlatforms(spec):
-  """Returns the list of unique platforms for this spec, e.g ['win32', ...].
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    The MSVSUserFile object created.
-  """
-  # Gather list of unique platforms.
-  platforms = OrderedSet()
-  for configuration in spec['configurations']:
-    platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
-  platforms = list(platforms)
-  return platforms
-
-
-def _CreateMSVSUserFile(proj_path, version, spec):
-  """Generates a .user file for the user running this Gyp program.
-
-  Arguments:
-    proj_path: The path of the project file being created.  The .user file
-               shares the same path (with an appropriate suffix).
-    version: The VisualStudioVersion object.
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    The MSVSUserFile object created.
-  """
-  (domain, username) = _GetDomainAndUserName()
-  vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
-  user_file = MSVSUserFile.Writer(vcuser_filename, version,
-                                  spec['target_name'])
-  return user_file
-
-
-def _GetMSVSConfigurationType(spec, build_file):
-  """Returns the configuration type for this project.
-
-  It's a number defined by Microsoft.  May raise an exception.
-
-  Args:
-      spec: The target dictionary containing the properties of the target.
-      build_file: The path of the gyp file.
-  Returns:
-      An integer, the configuration type.
-  """
-  try:
-    config_type = {
-        'executable': '1',  # .exe
-        'shared_library': '2',  # .dll
-        'loadable_module': '2',  # .dll
-        'static_library': '4',  # .lib
-        'none': '10',  # Utility type
-        }[spec['type']]
-  except KeyError:
-    if spec.get('type'):
-      raise GypError('Target type %s is not a valid target type for '
-                     'target %s in %s.' %
-                     (spec['type'], spec['target_name'], build_file))
-    else:
-      raise GypError('Missing type field for target %s in %s.' %
-                     (spec['target_name'], build_file))
-  return config_type
-
-
-def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
-  """Adds a configuration to the MSVS project.
-
-  Many settings in a vcproj file are specific to a configuration.  This
-  function the main part of the vcproj file that's configuration specific.
-
-  Arguments:
-    p: The target project being generated.
-    spec: The target dictionary containing the properties of the target.
-    config_type: The configuration type, a number as defined by Microsoft.
-    config_name: The name of the configuration.
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  """
-  # Get the information for this configuration
-  include_dirs, midl_include_dirs, resource_include_dirs = \
-      _GetIncludeDirs(config)
-  libraries = _GetLibraries(spec)
-  library_dirs = _GetLibraryDirs(config)
-  out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
-  defines = _GetDefines(config)
-  defines = [_EscapeCppDefineForMSVS(d) for d in defines]
-  disabled_warnings = _GetDisabledWarnings(config)
-  prebuild = config.get('msvs_prebuild')
-  postbuild = config.get('msvs_postbuild')
-  def_file = _GetModuleDefinition(spec)
-  precompiled_header = config.get('msvs_precompiled_header')
-
-  # Prepare the list of tools as a dictionary.
-  tools = dict()
-  # Add in user specified msvs_settings.
-  msvs_settings = config.get('msvs_settings', {})
-  MSVSSettings.ValidateMSVSSettings(msvs_settings)
-
-  # Prevent default library inheritance from the environment.
-  _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
-
-  for tool in msvs_settings:
-    settings = config['msvs_settings'][tool]
-    for setting in settings:
-      _ToolAppend(tools, tool, setting, settings[setting])
-  # Add the information to the appropriate tool
-  _ToolAppend(tools, 'VCCLCompilerTool',
-              'AdditionalIncludeDirectories', include_dirs)
-  _ToolAppend(tools, 'VCMIDLTool',
-              'AdditionalIncludeDirectories', midl_include_dirs)
-  _ToolAppend(tools, 'VCResourceCompilerTool',
-              'AdditionalIncludeDirectories', resource_include_dirs)
-  # Add in libraries.
-  _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
-  _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
-              library_dirs)
-  if out_file:
-    _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
-  # Add defines.
-  _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
-  _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
-              defines)
-  # Change program database directory to prevent collisions.
-  _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
-              '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True)
-  # Add disabled warnings.
-  _ToolAppend(tools, 'VCCLCompilerTool',
-              'DisableSpecificWarnings', disabled_warnings)
-  # Add Pre-build.
-  _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
-  # Add Post-build.
-  _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
-  # Turn on precompiled headers if appropriate.
-  if precompiled_header:
-    precompiled_header = os.path.split(precompiled_header)[1]
-    _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
-    _ToolAppend(tools, 'VCCLCompilerTool',
-                'PrecompiledHeaderThrough', precompiled_header)
-    _ToolAppend(tools, 'VCCLCompilerTool',
-                'ForcedIncludeFiles', precompiled_header)
-  # Loadable modules don't generate import libraries;
-  # tell dependent projects to not expect one.
-  if spec['type'] == 'loadable_module':
-    _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
-  # Set the module definition file if any.
-  if def_file:
-    _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
-
-  _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
-
-
-def _GetIncludeDirs(config):
-  """Returns the list of directories to be used for #include directives.
-
-  Arguments:
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  Returns:
-    The list of directory paths.
-  """
-  # TODO(bradnelson): include_dirs should really be flexible enough not to
-  #                   require this sort of thing.
-  include_dirs = (
-      config.get('include_dirs', []) +
-      config.get('msvs_system_include_dirs', []))
-  midl_include_dirs = (
-      config.get('midl_include_dirs', []) +
-      config.get('msvs_system_include_dirs', []))
-  resource_include_dirs = config.get('resource_include_dirs', include_dirs)
-  include_dirs = _FixPaths(include_dirs)
-  midl_include_dirs = _FixPaths(midl_include_dirs)
-  resource_include_dirs = _FixPaths(resource_include_dirs)
-  return include_dirs, midl_include_dirs, resource_include_dirs
-
-
-def _GetLibraryDirs(config):
-  """Returns the list of directories to be used for library search paths.
-
-  Arguments:
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  Returns:
-    The list of directory paths.
-  """
-
-  library_dirs = config.get('library_dirs', [])
-  library_dirs = _FixPaths(library_dirs)
-  return library_dirs
-
-
-def _GetLibraries(spec):
-  """Returns the list of libraries for this configuration.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    The list of directory paths.
-  """
-  libraries = spec.get('libraries', [])
-  # Strip out -l, as it is not used on windows (but is needed so we can pass
-  # in libraries that are assumed to be in the default library path).
-  # Also remove duplicate entries, leaving only the last duplicate, while
-  # preserving order.
-  found = OrderedSet()
-  unique_libraries_list = []
-  for entry in reversed(libraries):
-    library = re.sub(r'^\-l', '', entry)
-    if not os.path.splitext(library)[1]:
-      library += '.lib'
-    if library not in found:
-      found.add(library)
-      unique_libraries_list.append(library)
-  unique_libraries_list.reverse()
-  return unique_libraries_list
-
-
-def _GetOutputFilePathAndTool(spec, msbuild):
-  """Returns the path and tool to use for this target.
-
-  Figures out the path of the file this spec will create and the name of
-  the VC tool that will create it.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    A triple of (file path, name of the vc tool, name of the msbuild tool)
-  """
-  # Select a name for the output file.
-  out_file = ''
-  vc_tool = ''
-  msbuild_tool = ''
-  output_file_map = {
-      'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
-      'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
-      'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
-      'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
-  }
-  output_file_props = output_file_map.get(spec['type'])
-  if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
-    vc_tool, msbuild_tool, out_dir, suffix = output_file_props
-    if spec.get('standalone_static_library', 0):
-      out_dir = '$(OutDir)'
-    out_dir = spec.get('product_dir', out_dir)
-    product_extension = spec.get('product_extension')
-    if product_extension:
-      suffix = '.' + product_extension
-    elif msbuild:
-      suffix = '$(TargetExt)'
-    prefix = spec.get('product_prefix', '')
-    product_name = spec.get('product_name', '$(ProjectName)')
-    out_file = ntpath.join(out_dir, prefix + product_name + suffix)
-  return out_file, vc_tool, msbuild_tool
-
-
-def _GetOutputTargetExt(spec):
-  """Returns the extension for this target, including the dot
-
-  If product_extension is specified, set target_extension to this to avoid
-  MSB8012, returns None otherwise. Ignores any target_extension settings in
-  the input files.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    A string with the extension, or None
-  """
-  target_extension = spec.get('product_extension')
-  if target_extension:
-    return '.' + target_extension
-  return None
-
-
-def _GetDefines(config):
-  """Returns the list of preprocessor definitions for this configuation.
-
-  Arguments:
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  Returns:
-    The list of preprocessor definitions.
-  """
-  defines = []
-  for d in config.get('defines', []):
-    if type(d) == list:
-      fd = '='.join([str(dpart) for dpart in d])
-    else:
-      fd = str(d)
-    defines.append(fd)
-  return defines
-
-
-def _GetDisabledWarnings(config):
-  return [str(i) for i in config.get('msvs_disabled_warnings', [])]
-
-
-def _GetModuleDefinition(spec):
-  def_file = ''
-  if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
-    def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
-    if len(def_files) == 1:
-      def_file = _FixPath(def_files[0])
-    elif def_files:
-      raise ValueError(
-          'Multiple module definition files in one target, target %s lists '
-          'multiple .def files: %s' % (
-              spec['target_name'], ' '.join(def_files)))
-  return def_file
-
-
-def _ConvertToolsToExpectedForm(tools):
-  """Convert tools to a form expected by Visual Studio.
-
-  Arguments:
-    tools: A dictionary of settings; the tool name is the key.
-  Returns:
-    A list of Tool objects.
-  """
-  tool_list = []
-  for tool, settings in tools.iteritems():
-    # Collapse settings with lists.
-    settings_fixed = {}
-    for setting, value in settings.iteritems():
-      if type(value) == list:
-        if ((tool == 'VCLinkerTool' and
-             setting == 'AdditionalDependencies') or
-            setting == 'AdditionalOptions'):
-          settings_fixed[setting] = ' '.join(value)
-        else:
-          settings_fixed[setting] = ';'.join(value)
-      else:
-        settings_fixed[setting] = value
-    # Add in this tool.
-    tool_list.append(MSVSProject.Tool(tool, settings_fixed))
-  return tool_list
-
-
-def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
-  """Add to the project file the configuration specified by config.
-
-  Arguments:
-    p: The target project being generated.
-    spec: the target project dict.
-    tools: A dictionary of settings; the tool name is the key.
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-    config_type: The configuration type, a number as defined by Microsoft.
-    config_name: The name of the configuration.
-  """
-  attributes = _GetMSVSAttributes(spec, config, config_type)
-  # Add in this configuration.
-  tool_list = _ConvertToolsToExpectedForm(tools)
-  p.AddConfig(_ConfigFullName(config_name, config),
-              attrs=attributes, tools=tool_list)
-
-
-def _GetMSVSAttributes(spec, config, config_type):
-  # Prepare configuration attributes.
-  prepared_attrs = {}
-  source_attrs = config.get('msvs_configuration_attributes', {})
-  for a in source_attrs:
-    prepared_attrs[a] = source_attrs[a]
-  # Add props files.
-  vsprops_dirs = config.get('msvs_props', [])
-  vsprops_dirs = _FixPaths(vsprops_dirs)
-  if vsprops_dirs:
-    prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
-  # Set configuration type.
-  prepared_attrs['ConfigurationType'] = config_type
-  output_dir = prepared_attrs.get('OutputDirectory',
-                                  '$(SolutionDir)$(ConfigurationName)')
-  prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\'
-  if 'IntermediateDirectory' not in prepared_attrs:
-    intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
-    prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\'
-  else:
-    intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\'
-    intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
-    prepared_attrs['IntermediateDirectory'] = intermediate
-  return prepared_attrs
-
-
-def _AddNormalizedSources(sources_set, sources_array):
-  sources_set.update(_NormalizedSource(s) for s in sources_array)
-
-
-def _PrepareListOfSources(spec, generator_flags, gyp_file):
-  """Prepare list of sources and excluded sources.
-
-  Besides the sources specified directly in the spec, adds the gyp file so
-  that a change to it will cause a re-compile. Also adds appropriate sources
-  for actions and copies. Assumes later stage will un-exclude files which
-  have custom build steps attached.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-    gyp_file: The name of the gyp file.
-  Returns:
-    A pair of (list of sources, list of excluded sources).
-    The sources will be relative to the gyp file.
-  """
-  sources = OrderedSet()
-  _AddNormalizedSources(sources, spec.get('sources', []))
-  excluded_sources = OrderedSet()
-  # Add in the gyp file.
-  if not generator_flags.get('standalone'):
-    sources.add(gyp_file)
-
-  # Add in 'action' inputs and outputs.
-  for a in spec.get('actions', []):
-    inputs = a['inputs']
-    inputs = [_NormalizedSource(i) for i in inputs]
-    # Add all inputs to sources and excluded sources.
-    inputs = OrderedSet(inputs)
-    sources.update(inputs)
-    if not spec.get('msvs_external_builder'):
-      excluded_sources.update(inputs)
-    if int(a.get('process_outputs_as_sources', False)):
-      _AddNormalizedSources(sources, a.get('outputs', []))
-  # Add in 'copies' inputs and outputs.
-  for cpy in spec.get('copies', []):
-    _AddNormalizedSources(sources, cpy.get('files', []))
-  return (sources, excluded_sources)
-
-
-def _AdjustSourcesAndConvertToFilterHierarchy(
-    spec, options, gyp_dir, sources, excluded_sources, list_excluded, version):
-  """Adjusts the list of sources and excluded sources.
-
-  Also converts the sets to lists.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-    options: Global generator options.
-    gyp_dir: The path to the gyp file being processed.
-    sources: A set of sources to be included for this project.
-    excluded_sources: A set of sources to be excluded for this project.
-    version: A MSVSVersion object.
-  Returns:
-    A trio of (list of sources, list of excluded sources,
-               path of excluded IDL file)
-  """
-  # Exclude excluded sources coming into the generator.
-  excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
-  # Add excluded sources into sources for good measure.
-  sources.update(excluded_sources)
-  # Convert to proper windows form.
-  # NOTE: sources goes from being a set to a list here.
-  # NOTE: excluded_sources goes from being a set to a list here.
-  sources = _FixPaths(sources)
-  # Convert to proper windows form.
-  excluded_sources = _FixPaths(excluded_sources)
-
-  excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
-
-  precompiled_related = _GetPrecompileRelatedFiles(spec)
-  # Find the excluded ones, minus the precompiled header related ones.
-  fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
-
-  # Convert to folders and the right slashes.
-  sources = [i.split('\\') for i in sources]
-  sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
-                                             list_excluded=list_excluded,
-                                             msvs_version=version)
-
-  # Prune filters with a single child to flatten ugly directory structures
-  # such as ../../src/modules/module1 etc.
-  if version.UsesVcxproj():
-    while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
-        and len(set([s.name for s in sources])) == 1:
-      assert all([len(s.contents) == 1 for s in sources])
-      sources = [s.contents[0] for s in sources]
-  else:
-    while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
-      sources = sources[0].contents
-
-  return sources, excluded_sources, excluded_idl
-
-
-def _IdlFilesHandledNonNatively(spec, sources):
-  # If any non-native rules use 'idl' as an extension exclude idl files.
-  # Gather a list here to use later.
-  using_idl = False
-  for rule in spec.get('rules', []):
-    if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
-      using_idl = True
-      break
-  if using_idl:
-    excluded_idl = [i for i in sources if i.endswith('.idl')]
-  else:
-    excluded_idl = []
-  return excluded_idl
-
-
-def _GetPrecompileRelatedFiles(spec):
-  # Gather a list of precompiled header related sources.
-  precompiled_related = []
-  for _, config in spec['configurations'].iteritems():
-    for k in precomp_keys:
-      f = config.get(k)
-      if f:
-        precompiled_related.append(_FixPath(f))
-  return precompiled_related
-
-
-def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
-                                list_excluded):
-  exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
-  for file_name, excluded_configs in exclusions.iteritems():
-    if (not list_excluded and
-            len(excluded_configs) == len(spec['configurations'])):
-      # If we're not listing excluded files, then they won't appear in the
-      # project, so don't try to configure them to be excluded.
-      pass
-    else:
-      for config_name, config in excluded_configs:
-        p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
-                        {'ExcludedFromBuild': 'true'})
-
-
-def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
-  exclusions = {}
-  # Exclude excluded sources from being built.
-  for f in excluded_sources:
-    excluded_configs = []
-    for config_name, config in spec['configurations'].iteritems():
-      precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
-      # Don't do this for ones that are precompiled header related.
-      if f not in precomped:
-        excluded_configs.append((config_name, config))
-    exclusions[f] = excluded_configs
-  # If any non-native rules use 'idl' as an extension exclude idl files.
-  # Exclude them now.
-  for f in excluded_idl:
-    excluded_configs = []
-    for config_name, config in spec['configurations'].iteritems():
-      excluded_configs.append((config_name, config))
-    exclusions[f] = excluded_configs
-  return exclusions
-
-
-def _AddToolFilesToMSVS(p, spec):
-  # Add in tool files (rules).
-  tool_files = OrderedSet()
-  for _, config in spec['configurations'].iteritems():
-    for f in config.get('msvs_tool_files', []):
-      tool_files.add(f)
-  for f in tool_files:
-    p.AddToolFile(f)
-
-
-def _HandlePreCompiledHeaders(p, sources, spec):
-  # Pre-compiled header source stubs need a different compiler flag
-  # (generate precompiled header) and any source file not of the same
-  # kind (i.e. C vs. C++) as the precompiled header source stub needs
-  # to have use of precompiled headers disabled.
-  extensions_excluded_from_precompile = []
-  for config_name, config in spec['configurations'].iteritems():
-    source = config.get('msvs_precompiled_source')
-    if source:
-      source = _FixPath(source)
-      # UsePrecompiledHeader=1 for if using precompiled headers.
-      tool = MSVSProject.Tool('VCCLCompilerTool',
-                              {'UsePrecompiledHeader': '1'})
-      p.AddFileConfig(source, _ConfigFullName(config_name, config),
-                      {}, tools=[tool])
-      basename, extension = os.path.splitext(source)
-      if extension == '.c':
-        extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
-      else:
-        extensions_excluded_from_precompile = ['.c']
-  def DisableForSourceTree(source_tree):
-    for source in source_tree:
-      if isinstance(source, MSVSProject.Filter):
-        DisableForSourceTree(source.contents)
-      else:
-        basename, extension = os.path.splitext(source)
-        if extension in extensions_excluded_from_precompile:
-          for config_name, config in spec['configurations'].iteritems():
-            tool = MSVSProject.Tool('VCCLCompilerTool',
-                                    {'UsePrecompiledHeader': '0',
-                                     'ForcedIncludeFiles': '$(NOINHERIT)'})
-            p.AddFileConfig(_FixPath(source),
-                            _ConfigFullName(config_name, config),
-                            {}, tools=[tool])
-  # Do nothing if there was no precompiled source.
-  if extensions_excluded_from_precompile:
-    DisableForSourceTree(sources)
-
-
-def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
-  # Add actions.
-  actions = spec.get('actions', [])
-  # Don't setup_env every time. When all the actions are run together in one
-  # batch file in VS, the PATH will grow too long.
-  # Membership in this set means that the cygwin environment has been set up,
-  # and does not need to be set up again.
-  have_setup_env = set()
-  for a in actions:
-    # Attach actions to the gyp file if nothing else is there.
-    inputs = a.get('inputs') or [relative_path_of_gyp_file]
-    attached_to = inputs[0]
-    need_setup_env = attached_to not in have_setup_env
-    cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
-                                   do_setup_env=need_setup_env)
-    have_setup_env.add(attached_to)
-    # Add the action.
-    _AddActionStep(actions_to_add,
-                   inputs=inputs,
-                   outputs=a.get('outputs', []),
-                   description=a.get('message', a['action_name']),
-                   command=cmd)
-
-
-def _WriteMSVSUserFile(project_path, version, spec):
-  # Add run_as and test targets.
-  if 'run_as' in spec:
-    run_as = spec['run_as']
-    action = run_as.get('action', [])
-    environment = run_as.get('environment', [])
-    working_directory = run_as.get('working_directory', '.')
-  elif int(spec.get('test', 0)):
-    action = ['$(TargetPath)', '--gtest_print_time']
-    environment = []
-    working_directory = '.'
-  else:
-    return  # Nothing to add
-  # Write out the user file.
-  user_file = _CreateMSVSUserFile(project_path, version, spec)
-  for config_name, c_data in spec['configurations'].iteritems():
-    user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
-                               action, environment, working_directory)
-  user_file.WriteIfChanged()
-
-
-def _AddCopies(actions_to_add, spec):
-  copies = _GetCopies(spec)
-  for inputs, outputs, cmd, description in copies:
-    _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
-                   description=description, command=cmd)
-
-
-def _GetCopies(spec):
-  copies = []
-  # Add copies.
-  for cpy in spec.get('copies', []):
-    for src in cpy.get('files', []):
-      dst = os.path.join(cpy['destination'], os.path.basename(src))
-      # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
-      # outputs, so do the same for our generated command line.
-      if src.endswith('/'):
-        src_bare = src[:-1]
-        base_dir = posixpath.split(src_bare)[0]
-        outer_dir = posixpath.split(src_bare)[1]
-        cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
-            _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
-        copies.append(([src], ['dummy_copies', dst], cmd,
-                       'Copying %s to %s' % (src, dst)))
-      else:
-        cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
-            _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
-        copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
-  return copies
-
-
-def _GetPathDict(root, path):
-  # |path| will eventually be empty (in the recursive calls) if it was initially
-  # relative; otherwise it will eventually end up as '\', 'D:\', etc.
-  if not path or path.endswith(os.sep):
-    return root
-  parent, folder = os.path.split(path)
-  parent_dict = _GetPathDict(root, parent)
-  if folder not in parent_dict:
-    parent_dict[folder] = dict()
-  return parent_dict[folder]
-
-
-def _DictsToFolders(base_path, bucket, flat):
-  # Convert to folders recursively.
-  children = []
-  for folder, contents in bucket.iteritems():
-    if type(contents) == dict:
-      folder_children = _DictsToFolders(os.path.join(base_path, folder),
-                                        contents, flat)
-      if flat:
-        children += folder_children
-      else:
-        folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
-                                             name='(' + folder + ')',
-                                             entries=folder_children)
-        children.append(folder_children)
-    else:
-      children.append(contents)
-  return children
-
-
-def _CollapseSingles(parent, node):
-  # Recursively explorer the tree of dicts looking for projects which are
-  # the sole item in a folder which has the same name as the project. Bring
-  # such projects up one level.
-  if (type(node) == dict and
-      len(node) == 1 and
-      node.keys()[0] == parent + '.vcproj'):
-    return node[node.keys()[0]]
-  if type(node) != dict:
-    return node
-  for child in node:
-    node[child] = _CollapseSingles(child, node[child])
-  return node
-
-
-def _GatherSolutionFolders(sln_projects, project_objects, flat):
-  root = {}
-  # Convert into a tree of dicts on path.
-  for p in sln_projects:
-    gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
-    gyp_dir = os.path.dirname(gyp_file)
-    path_dict = _GetPathDict(root, gyp_dir)
-    path_dict[target + '.vcproj'] = project_objects[p]
-  # Walk down from the top until we hit a folder that has more than one entry.
-  # In practice, this strips the top-level "src/" dir from the hierarchy in
-  # the solution.
-  while len(root) == 1 and type(root[root.keys()[0]]) == dict:
-    root = root[root.keys()[0]]
-  # Collapse singles.
-  root = _CollapseSingles('', root)
-  # Merge buckets until everything is a root entry.
-  return _DictsToFolders('', root, flat)
-
-
-def _GetPathOfProject(qualified_target, spec, options, msvs_version):
-  default_config = _GetDefaultConfiguration(spec)
-  proj_filename = default_config.get('msvs_existing_vcproj')
-  if not proj_filename:
-    proj_filename = (spec['target_name'] + options.suffix +
-                     msvs_version.ProjectExtension())
-
-  build_file = gyp.common.BuildFile(qualified_target)
-  proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
-  fix_prefix = None
-  if options.generator_output:
-    project_dir_path = os.path.dirname(os.path.abspath(proj_path))
-    proj_path = os.path.join(options.generator_output, proj_path)
-    fix_prefix = gyp.common.RelativePath(project_dir_path,
-                                         os.path.dirname(proj_path))
-  return proj_path, fix_prefix
-
-
-def _GetPlatformOverridesOfProject(spec):
-  # Prepare a dict indicating which project configurations are used for which
-  # solution configurations for this target.
-  config_platform_overrides = {}
-  for config_name, c in spec['configurations'].iteritems():
-    config_fullname = _ConfigFullName(config_name, c)
-    platform = c.get('msvs_target_platform', _ConfigPlatform(c))
-    fixed_config_fullname = '%s|%s' % (
-        _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
-    config_platform_overrides[config_fullname] = fixed_config_fullname
-  return config_platform_overrides
-
-
-def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
-  """Create a MSVSProject object for the targets found in target list.
-
-  Arguments:
-    target_list: the list of targets to generate project objects for.
-    target_dicts: the dictionary of specifications.
-    options: global generator options.
-    msvs_version: the MSVSVersion object.
-  Returns:
-    A set of created projects, keyed by target.
-  """
-  global fixpath_prefix
-  # Generate each project.
-  projects = {}
-  for qualified_target in target_list:
-    spec = target_dicts[qualified_target]
-    if spec['toolset'] != 'target':
-      raise GypError(
-          'Multiple toolsets not supported in msvs build (target %s)' %
-          qualified_target)
-    proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
-                                                  options, msvs_version)
-    guid = _GetGuidOfProject(proj_path, spec)
-    overrides = _GetPlatformOverridesOfProject(spec)
-    build_file = gyp.common.BuildFile(qualified_target)
-    # Create object for this project.
-    obj = MSVSNew.MSVSProject(
-        proj_path,
-        name=spec['target_name'],
-        guid=guid,
-        spec=spec,
-        build_file=build_file,
-        config_platform_overrides=overrides,
-        fixpath_prefix=fixpath_prefix)
-    # Set project toolset if any (MS build only)
-    if msvs_version.UsesVcxproj():
-      obj.set_msbuild_toolset(
-          _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
-    projects[qualified_target] = obj
-  # Set all the dependencies, but not if we are using an external builder like
-  # ninja
-  for project in projects.values():
-    if not project.spec.get('msvs_external_builder'):
-      deps = project.spec.get('dependencies', [])
-      deps = [projects[d] for d in deps]
-      project.set_dependencies(deps)
-  return projects
-
-
-def _InitNinjaFlavor(params, target_list, target_dicts):
-  """Initialize targets for the ninja flavor.
-
-  This sets up the necessary variables in the targets to generate msvs projects
-  that use ninja as an external builder. The variables in the spec are only set
-  if they have not been set. This allows individual specs to override the
-  default values initialized here.
-  Arguments:
-    params: Params provided to the generator.
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-  """
-  for qualified_target in target_list:
-    spec = target_dicts[qualified_target]
-    if spec.get('msvs_external_builder'):
-      # The spec explicitly defined an external builder, so don't change it.
-      continue
-
-    path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
-
-    spec['msvs_external_builder'] = 'ninja'
-    if not spec.get('msvs_external_builder_out_dir'):
-      gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
-      gyp_dir = os.path.dirname(gyp_file)
-      configuration = '$(Configuration)'
-      if params.get('target_arch') == 'x64':
-        configuration += '_x64'
-      spec['msvs_external_builder_out_dir'] = os.path.join(
-          gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
-          ninja_generator.ComputeOutputDir(params),
-          configuration)
-    if not spec.get('msvs_external_builder_build_cmd'):
-      spec['msvs_external_builder_build_cmd'] = [
-        path_to_ninja,
-        '-C',
-        '$(OutDir)',
-        '$(ProjectName)',
-      ]
-    if not spec.get('msvs_external_builder_clean_cmd'):
-      spec['msvs_external_builder_clean_cmd'] = [
-        path_to_ninja,
-        '-C',
-        '$(OutDir)',
-        '-tclean',
-        '$(ProjectName)',
-      ]
-
-
-def CalculateVariables(default_variables, params):
-  """Generated variables that require params to be known."""
-
-  generator_flags = params.get('generator_flags', {})
-
-  # Select project file format version (if unset, default to auto detecting).
-  msvs_version = MSVSVersion.SelectVisualStudioVersion(
-      generator_flags.get('msvs_version', 'auto'))
-  # Stash msvs_version for later (so we don't have to probe the system twice).
-  params['msvs_version'] = msvs_version
-
-  # Set a variable so conditions can be based on msvs_version.
-  default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
-  # To determine processor word size on Windows, in addition to checking
-  # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
-  # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
-  # contains the actual word size of the system when running thru WOW64).
-  if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
-      os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
-    default_variables['MSVS_OS_BITS'] = 64
-  else:
-    default_variables['MSVS_OS_BITS'] = 32
-
-  if gyp.common.GetFlavor(params) == 'ninja':
-    default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
-
-
-def PerformBuild(data, configurations, params):
-  options = params['options']
-  msvs_version = params['msvs_version']
-  devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
-
-  for build_file, build_file_dict in data.iteritems():
-    (build_file_root, build_file_ext) = os.path.splitext(build_file)
-    if build_file_ext != '.gyp':
-      continue
-    sln_path = build_file_root + options.suffix + '.sln'
-    if options.generator_output:
-      sln_path = os.path.join(options.generator_output, sln_path)
-
-  for config in configurations:
-    arguments = [devenv, sln_path, '/Build', config]
-    print 'Building [%s]: %s' % (config, arguments)
-    rtn = subprocess.check_call(arguments)
-
-
-def CalculateGeneratorInputInfo(params):
-  if params.get('flavor') == 'ninja':
-    toplevel = params['options'].toplevel_dir
-    qualified_out_dir = os.path.normpath(os.path.join(
-        toplevel, ninja_generator.ComputeOutputDir(params),
-        'gypfiles-msvs-ninja'))
-
-    global generator_filelist_paths
-    generator_filelist_paths = {
-        'toplevel': toplevel,
-        'qualified_out_dir': qualified_out_dir,
-    }
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  """Generate .sln and .vcproj files.
-
-  This is the entry point for this generator.
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-    data: Dictionary containing per .gyp data.
-  """
-  global fixpath_prefix
-
-  options = params['options']
-
-  # Get the project file format version back out of where we stashed it in
-  # GeneratorCalculatedVariables.
-  msvs_version = params['msvs_version']
-
-  generator_flags = params.get('generator_flags', {})
-
-  # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
-  (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
-
-  # Optionally use the large PDB workaround for targets marked with
-  # 'msvs_large_pdb': 1.
-  (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
-        target_list, target_dicts, generator_default_variables)
-
-  # Optionally configure each spec to use ninja as the external builder.
-  if params.get('flavor') == 'ninja':
-    _InitNinjaFlavor(params, target_list, target_dicts)
-
-  # Prepare the set of configurations.
-  configs = set()
-  for qualified_target in target_list:
-    spec = target_dicts[qualified_target]
-    for config_name, config in spec['configurations'].iteritems():
-      configs.add(_ConfigFullName(config_name, config))
-  configs = list(configs)
-
-  # Figure out all the projects that will be generated and their guids
-  project_objects = _CreateProjectObjects(target_list, target_dicts, options,
-                                          msvs_version)
-
-  # Generate each project.
-  missing_sources = []
-  for project in project_objects.values():
-    fixpath_prefix = project.fixpath_prefix
-    missing_sources.extend(_GenerateProject(project, options, msvs_version,
-                                            generator_flags))
-  fixpath_prefix = None
-
-  for build_file in data:
-    # Validate build_file extension
-    if not build_file.endswith('.gyp'):
-      continue
-    sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln'
-    if options.generator_output:
-      sln_path = os.path.join(options.generator_output, sln_path)
-    # Get projects in the solution, and their dependents.
-    sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
-    sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
-    # Create folder hierarchy.
-    root_entries = _GatherSolutionFolders(
-        sln_projects, project_objects, flat=msvs_version.FlatSolution())
-    # Create solution.
-    sln = MSVSNew.MSVSSolution(sln_path,
-                               entries=root_entries,
-                               variants=configs,
-                               websiteProperties=False,
-                               version=msvs_version)
-    sln.Write()
-
-  if missing_sources:
-    error_message = "Missing input files:\n" + \
-                    '\n'.join(set(missing_sources))
-    if generator_flags.get('msvs_error_on_missing_sources', False):
-      raise GypError(error_message)
-    else:
-      print >> sys.stdout, "Warning: " + error_message
-
-
-def _GenerateMSBuildFiltersFile(filters_path, source_files,
-                                rule_dependencies, extension_to_rule_name):
-  """Generate the filters file.
-
-  This file is used by Visual Studio to organize the presentation of source
-  files into folders.
-
-  Arguments:
-      filters_path: The path of the file to be created.
-      source_files: The hierarchical structure of all the sources.
-      extension_to_rule_name: A dictionary mapping file extensions to rules.
-  """
-  filter_group = []
-  source_group = []
-  _AppendFiltersForMSBuild('', source_files, rule_dependencies,
-                           extension_to_rule_name, filter_group, source_group)
-  if filter_group:
-    content = ['Project',
-               {'ToolsVersion': '4.0',
-                'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
-               },
-               ['ItemGroup'] + filter_group,
-               ['ItemGroup'] + source_group
-              ]
-    easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
-  elif os.path.exists(filters_path):
-    # We don't need this filter anymore.  Delete the old filter file.
-    os.unlink(filters_path)
-
-
-def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
-                             extension_to_rule_name,
-                             filter_group, source_group):
-  """Creates the list of filters and sources to be added in the filter file.
-
-  Args:
-      parent_filter_name: The name of the filter under which the sources are
-          found.
-      sources: The hierarchy of filters and sources to process.
-      extension_to_rule_name: A dictionary mapping file extensions to rules.
-      filter_group: The list to which filter entries will be appended.
-      source_group: The list to which source entries will be appeneded.
-  """
-  for source in sources:
-    if isinstance(source, MSVSProject.Filter):
-      # We have a sub-filter.  Create the name of that sub-filter.
-      if not parent_filter_name:
-        filter_name = source.name
-      else:
-        filter_name = '%s\\%s' % (parent_filter_name, source.name)
-      # Add the filter to the group.
-      filter_group.append(
-          ['Filter', {'Include': filter_name},
-           ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
-      # Recurse and add its dependents.
-      _AppendFiltersForMSBuild(filter_name, source.contents,
-                               rule_dependencies, extension_to_rule_name,
-                               filter_group, source_group)
-    else:
-      # It's a source.  Create a source entry.
-      _, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
-                                               extension_to_rule_name)
-      source_entry = [element, {'Include': source}]
-      # Specify the filter it is part of, if any.
-      if parent_filter_name:
-        source_entry.append(['Filter', parent_filter_name])
-      source_group.append(source_entry)
-
-
-def _MapFileToMsBuildSourceType(source, rule_dependencies,
-                                extension_to_rule_name):
-  """Returns the group and element type of the source file.
-
-  Arguments:
-      source: The source file name.
-      extension_to_rule_name: A dictionary mapping file extensions to rules.
-
-  Returns:
-      A pair of (group this file should be part of, the label of element)
-  """
-  _, ext = os.path.splitext(source)
-  if ext in extension_to_rule_name:
-    group = 'rule'
-    element = extension_to_rule_name[ext]
-  elif ext in ['.cc', '.cpp', '.c', '.cxx']:
-    group = 'compile'
-    element = 'ClCompile'
-  elif ext in ['.h', '.hxx']:
-    group = 'include'
-    element = 'ClInclude'
-  elif ext == '.rc':
-    group = 'resource'
-    element = 'ResourceCompile'
-  elif ext == '.asm':
-    group = 'masm'
-    element = 'MASM'
-  elif ext == '.idl':
-    group = 'midl'
-    element = 'Midl'
-  elif source in rule_dependencies:
-    group = 'rule_dependency'
-    element = 'CustomBuild'
-  else:
-    group = 'none'
-    element = 'None'
-  return (group, element)
-
-
-def _GenerateRulesForMSBuild(output_dir, options, spec,
-                             sources, excluded_sources,
-                             props_files_of_rules, targets_files_of_rules,
-                             actions_to_add, rule_dependencies,
-                             extension_to_rule_name):
-  # MSBuild rules are implemented using three files: an XML file, a .targets
-  # file and a .props file.
-  # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
-  # for more details.
-  rules = spec.get('rules', [])
-  rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
-  rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
-
-  msbuild_rules = []
-  for rule in rules_native:
-    # Skip a rule with no action and no inputs.
-    if 'action' not in rule and not rule.get('rule_sources', []):
-      continue
-    msbuild_rule = MSBuildRule(rule, spec)
-    msbuild_rules.append(msbuild_rule)
-    rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
-    extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
-  if msbuild_rules:
-    base = spec['target_name'] + options.suffix
-    props_name = base + '.props'
-    targets_name = base + '.targets'
-    xml_name = base + '.xml'
-
-    props_files_of_rules.add(props_name)
-    targets_files_of_rules.add(targets_name)
-
-    props_path = os.path.join(output_dir, props_name)
-    targets_path = os.path.join(output_dir, targets_name)
-    xml_path = os.path.join(output_dir, xml_name)
-
-    _GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
-    _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
-    _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
-
-  if rules_external:
-    _GenerateExternalRules(rules_external, output_dir, spec,
-                           sources, options, actions_to_add)
-  _AdjustSourcesForRules(rules, sources, excluded_sources, True)
-
-
-class MSBuildRule(object):
-  """Used to store information used to generate an MSBuild rule.
-
-  Attributes:
-    rule_name: The rule name, sanitized to use in XML.
-    target_name: The name of the target.
-    after_targets: The name of the AfterTargets element.
-    before_targets: The name of the BeforeTargets element.
-    depends_on: The name of the DependsOn element.
-    compute_output: The name of the ComputeOutput element.
-    dirs_to_make: The name of the DirsToMake element.
-    inputs: The name of the _inputs element.
-    tlog: The name of the _tlog element.
-    extension: The extension this rule applies to.
-    description: The message displayed when this rule is invoked.
-    additional_dependencies: A string listing additional dependencies.
-    outputs: The outputs of this rule.
-    command: The command used to run the rule.
-  """
-
-  def __init__(self, rule, spec):
-    self.display_name = rule['rule_name']
-    # Assure that the rule name is only characters and numbers
-    self.rule_name = re.sub(r'\W', '_', self.display_name)
-    # Create the various element names, following the example set by the
-    # Visual Studio 2008 to 2010 conversion.  I don't know if VS2010
-    # is sensitive to the exact names.
-    self.target_name = '_' + self.rule_name
-    self.after_targets = self.rule_name + 'AfterTargets'
-    self.before_targets = self.rule_name + 'BeforeTargets'
-    self.depends_on = self.rule_name + 'DependsOn'
-    self.compute_output = 'Compute%sOutput' % self.rule_name
-    self.dirs_to_make = self.rule_name + 'DirsToMake'
-    self.inputs = self.rule_name + '_inputs'
-    self.tlog = self.rule_name + '_tlog'
-    self.extension = rule['extension']
-    if not self.extension.startswith('.'):
-      self.extension = '.' + self.extension
-
-    self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
-        rule.get('message', self.rule_name))
-    old_additional_dependencies = _FixPaths(rule.get('inputs', []))
-    self.additional_dependencies = (
-        ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
-                  for i in old_additional_dependencies]))
-    old_outputs = _FixPaths(rule.get('outputs', []))
-    self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
-                             for i in old_outputs])
-    old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
-                                           do_setup_env=True)
-    self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
-
-
-def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
-  """Generate the .props file."""
-  content = ['Project',
-             {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
-  for rule in msbuild_rules:
-    content.extend([
-        ['PropertyGroup',
-         {'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
-          "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
-                                                    rule.after_targets)
-         },
-         [rule.before_targets, 'Midl'],
-         [rule.after_targets, 'CustomBuild'],
-        ],
-        ['PropertyGroup',
-         [rule.depends_on,
-          {'Condition': "'$(ConfigurationType)' != 'Makefile'"},
-          '_SelectedFiles;$(%s)' % rule.depends_on
-         ],
-        ],
-        ['ItemDefinitionGroup',
-         [rule.rule_name,
-          ['CommandLineTemplate', rule.command],
-          ['Outputs', rule.outputs],
-          ['ExecutionDescription', rule.description],
-          ['AdditionalDependencies', rule.additional_dependencies],
-         ],
-        ]
-    ])
-  easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
-
-
-def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
-  """Generate the .targets file."""
-  content = ['Project',
-             {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
-             }
-            ]
-  item_group = [
-      'ItemGroup',
-      ['PropertyPageSchema',
-       {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
-      ]
-    ]
-  for rule in msbuild_rules:
-    item_group.append(
-        ['AvailableItemName',
-         {'Include': rule.rule_name},
-         ['Targets', rule.target_name],
-        ])
-  content.append(item_group)
-
-  for rule in msbuild_rules:
-    content.append(
-        ['UsingTask',
-         {'TaskName': rule.rule_name,
-          'TaskFactory': 'XamlTaskFactory',
-          'AssemblyName': 'Microsoft.Build.Tasks.v4.0'
-         },
-         ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'],
-        ])
-  for rule in msbuild_rules:
-    rule_name = rule.rule_name
-    target_outputs = '%%(%s.Outputs)' % rule_name
-    target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);'
-                     '$(MSBuildProjectFile)') % (rule_name, rule_name)
-    rule_inputs = '%%(%s.Identity)' % rule_name
-    extension_condition = ("'%(Extension)'=='.obj' or "
-                           "'%(Extension)'=='.res' or "
-                           "'%(Extension)'=='.rsc' or "
-                           "'%(Extension)'=='.lib'")
-    remove_section = [
-        'ItemGroup',
-        {'Condition': "'@(SelectedFiles)' != ''"},
-        [rule_name,
-         {'Remove': '@(%s)' % rule_name,
-          'Condition': "'%(Identity)' != '@(SelectedFiles)'"
-         }
-        ]
-    ]
-    inputs_section = [
-        'ItemGroup',
-        [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
-    ]
-    logging_section = [
-        'ItemGroup',
-        [rule.tlog,
-         {'Include': '%%(%s.Outputs)' % rule_name,
-          'Condition': ("'%%(%s.Outputs)' != '' and "
-                        "'%%(%s.ExcludedFromBuild)' != 'true'" %
-                        (rule_name, rule_name))
-         },
-         ['Source', "@(%s, '|')" % rule_name],
-         ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
-        ],
-    ]
-    message_section = [
-        'Message',
-        {'Importance': 'High',
-         'Text': '%%(%s.ExecutionDescription)' % rule_name
-        }
-    ]
-    write_tlog_section = [
-        'WriteLinesToFile',
-        {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
-         "'true'" % (rule.tlog, rule.tlog),
-         'File': '$(IntDir)$(ProjectName).write.1.tlog',
-         'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog,
-                                                            rule.tlog)
-        }
-    ]
-    read_tlog_section = [
-        'WriteLinesToFile',
-        {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
-         "'true'" % (rule.tlog, rule.tlog),
-         'File': '$(IntDir)$(ProjectName).read.1.tlog',
-         'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
-        }
-    ]
-    command_and_input_section = [
-        rule_name,
-        {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
-         "'true'" % (rule_name, rule_name),
-         'EchoOff': 'true',
-         'StandardOutputImportance': 'High',
-         'StandardErrorImportance': 'High',
-         'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
-         'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
-         'Inputs': rule_inputs
-        }
-    ]
-    content.extend([
-        ['Target',
-         {'Name': rule.target_name,
-          'BeforeTargets': '$(%s)' % rule.before_targets,
-          'AfterTargets': '$(%s)' % rule.after_targets,
-          'Condition': "'@(%s)' != ''" % rule_name,
-          'DependsOnTargets': '$(%s);%s' % (rule.depends_on,
-                                            rule.compute_output),
-          'Outputs': target_outputs,
-          'Inputs': target_inputs
-         },
-         remove_section,
-         inputs_section,
-         logging_section,
-         message_section,
-         write_tlog_section,
-         read_tlog_section,
-         command_and_input_section,
-        ],
-        ['PropertyGroup',
-         ['ComputeLinkInputsTargets',
-          '$(ComputeLinkInputsTargets);',
-          '%s;' % rule.compute_output
-         ],
-         ['ComputeLibInputsTargets',
-          '$(ComputeLibInputsTargets);',
-          '%s;' % rule.compute_output
-         ],
-        ],
-        ['Target',
-         {'Name': rule.compute_output,
-          'Condition': "'@(%s)' != ''" % rule_name
-         },
-         ['ItemGroup',
-          [rule.dirs_to_make,
-           {'Condition': "'@(%s)' != '' and "
-            "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name),
-            'Include': '%%(%s.Outputs)' % rule_name
-           }
-          ],
-          ['Link',
-           {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
-            'Condition': extension_condition
-           }
-          ],
-          ['Lib',
-           {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
-            'Condition': extension_condition
-           }
-          ],
-          ['ImpLib',
-           {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
-            'Condition': extension_condition
-           }
-          ],
-         ],
-         ['MakeDir',
-          {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" %
-                           rule.dirs_to_make)
-          }
-         ]
-        ],
-    ])
-  easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
-
-
-def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
-  # Generate the .xml file
-  content = [
-      'ProjectSchemaDefinitions',
-      {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
-                 'assembly=Microsoft.Build.Framework'),
-       'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
-       'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
-       'xmlns:transformCallback':
-       'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
-      }
-  ]
-  for rule in msbuild_rules:
-    content.extend([
-        ['Rule',
-         {'Name': rule.rule_name,
-          'PageTemplate': 'tool',
-          'DisplayName': rule.display_name,
-          'Order': '200'
-         },
-         ['Rule.DataSource',
-          ['DataSource',
-           {'Persistence': 'ProjectFile',
-            'ItemType': rule.rule_name
-           }
-          ]
-         ],
-         ['Rule.Categories',
-          ['Category',
-           {'Name': 'General'},
-           ['Category.DisplayName',
-            ['sys:String', 'General'],
-           ],
-          ],
-          ['Category',
-           {'Name': 'Command Line',
-            'Subtype': 'CommandLine'
-           },
-           ['Category.DisplayName',
-            ['sys:String', 'Command Line'],
-           ],
-          ],
-         ],
-         ['StringListProperty',
-          {'Name': 'Inputs',
-           'Category': 'Command Line',
-           'IsRequired': 'true',
-           'Switch': ' '
-          },
-          ['StringListProperty.DataSource',
-           ['DataSource',
-            {'Persistence': 'ProjectFile',
-             'ItemType': rule.rule_name,
-             'SourceType': 'Item'
-            }
-           ]
-          ],
-         ],
-         ['StringProperty',
-          {'Name': 'CommandLineTemplate',
-           'DisplayName': 'Command Line',
-           'Visible': 'False',
-           'IncludeInCommandLine': 'False'
-          }
-         ],
-         ['DynamicEnumProperty',
-          {'Name': rule.before_targets,
-           'Category': 'General',
-           'EnumProvider': 'Targets',
-           'IncludeInCommandLine': 'False'
-          },
-          ['DynamicEnumProperty.DisplayName',
-           ['sys:String', 'Execute Before'],
-          ],
-          ['DynamicEnumProperty.Description',
-           ['sys:String', 'Specifies the targets for the build customization'
-            ' to run before.'
-           ],
-          ],
-          ['DynamicEnumProperty.ProviderSettings',
-           ['NameValuePair',
-            {'Name': 'Exclude',
-             'Value': '^%s|^Compute' % rule.before_targets
-            }
-           ]
-          ],
-          ['DynamicEnumProperty.DataSource',
-           ['DataSource',
-            {'Persistence': 'ProjectFile',
-             'HasConfigurationCondition': 'true'
-            }
-           ]
-          ],
-         ],
-         ['DynamicEnumProperty',
-          {'Name': rule.after_targets,
-           'Category': 'General',
-           'EnumProvider': 'Targets',
-           'IncludeInCommandLine': 'False'
-          },
-          ['DynamicEnumProperty.DisplayName',
-           ['sys:String', 'Execute After'],
-          ],
-          ['DynamicEnumProperty.Description',
-           ['sys:String', ('Specifies the targets for the build customization'
-                           ' to run after.')
-           ],
-          ],
-          ['DynamicEnumProperty.ProviderSettings',
-           ['NameValuePair',
-            {'Name': 'Exclude',
-             'Value': '^%s|^Compute' % rule.after_targets
-            }
-           ]
-          ],
-          ['DynamicEnumProperty.DataSource',
-           ['DataSource',
-            {'Persistence': 'ProjectFile',
-             'ItemType': '',
-             'HasConfigurationCondition': 'true'
-            }
-           ]
-          ],
-         ],
-         ['StringListProperty',
-          {'Name': 'Outputs',
-           'DisplayName': 'Outputs',
-           'Visible': 'False',
-           'IncludeInCommandLine': 'False'
-          }
-         ],
-         ['StringProperty',
-          {'Name': 'ExecutionDescription',
-           'DisplayName': 'Execution Description',
-           'Visible': 'False',
-           'IncludeInCommandLine': 'False'
-          }
-         ],
-         ['StringListProperty',
-          {'Name': 'AdditionalDependencies',
-           'DisplayName': 'Additional Dependencies',
-           'IncludeInCommandLine': 'False',
-           'Visible': 'false'
-          }
-         ],
-         ['StringProperty',
-          {'Subtype': 'AdditionalOptions',
-           'Name': 'AdditionalOptions',
-           'Category': 'Command Line'
-          },
-          ['StringProperty.DisplayName',
-           ['sys:String', 'Additional Options'],
-          ],
-          ['StringProperty.Description',
-           ['sys:String', 'Additional Options'],
-          ],
-         ],
-        ],
-        ['ItemType',
-         {'Name': rule.rule_name,
-          'DisplayName': rule.display_name
-         }
-        ],
-        ['FileExtension',
-         {'Name': '*' + rule.extension,
-          'ContentType': rule.rule_name
-         }
-        ],
-        ['ContentType',
-         {'Name': rule.rule_name,
-          'DisplayName': '',
-          'ItemType': rule.rule_name
-         }
-        ]
-    ])
-  easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
-
-
-def _GetConfigurationAndPlatform(name, settings):
-  configuration = name.rsplit('_', 1)[0]
-  platform = settings.get('msvs_configuration_platform', 'Win32')
-  return (configuration, platform)
-
-
-def _GetConfigurationCondition(name, settings):
-  return (r"'$(Configuration)|$(Platform)'=='%s|%s'" %
-          _GetConfigurationAndPlatform(name, settings))
-
-
-def _GetMSBuildProjectConfigurations(configurations):
-  group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
-  for (name, settings) in sorted(configurations.iteritems()):
-    configuration, platform = _GetConfigurationAndPlatform(name, settings)
-    designation = '%s|%s' % (configuration, platform)
-    group.append(
-        ['ProjectConfiguration', {'Include': designation},
-         ['Configuration', configuration],
-         ['Platform', platform]])
-  return [group]
-
-
-def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
-  namespace = os.path.splitext(gyp_file_name)[0]
-  properties = [
-      ['PropertyGroup', {'Label': 'Globals'},
-        ['ProjectGuid', guid],
-        ['Keyword', 'Win32Proj'],
-        ['RootNamespace', namespace],
-        ['IgnoreWarnCompileDuplicatedFilename', 'true'],
-      ]
-    ]
-
-  if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
-     os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
-    properties[0].append(['PreferredToolArchitecture', 'x64'])
-
-  if spec.get('msvs_enable_winrt'):
-    properties[0].append(['DefaultLanguage', 'en-US'])
-    properties[0].append(['AppContainerApplication', 'true'])
-    if spec.get('msvs_application_type_revision'):
-      app_type_revision = spec.get('msvs_application_type_revision')
-      properties[0].append(['ApplicationTypeRevision', app_type_revision])
-    else:
-      properties[0].append(['ApplicationTypeRevision', '8.1'])
-
-    if spec.get('msvs_target_platform_version'):
-      target_platform_version = spec.get('msvs_target_platform_version')
-      properties[0].append(['WindowsTargetPlatformVersion',
-                            target_platform_version])
-      if spec.get('msvs_target_platform_minversion'):
-        target_platform_minversion = spec.get('msvs_target_platform_minversion')
-        properties[0].append(['WindowsTargetPlatformMinVersion',
-                              target_platform_minversion])
-      else:
-        properties[0].append(['WindowsTargetPlatformMinVersion',
-                              target_platform_version])
-    if spec.get('msvs_enable_winphone'):
-      properties[0].append(['ApplicationType', 'Windows Phone'])
-    else:
-      properties[0].append(['ApplicationType', 'Windows Store'])
-
-  platform_name = None
-  msvs_windows_sdk_version = None
-  for configuration in spec['configurations'].itervalues():
-    platform_name = platform_name or _ConfigPlatform(configuration)
-    msvs_windows_sdk_version = (msvs_windows_sdk_version or
-                    _ConfigWindowsTargetPlatformVersion(configuration))
-    if platform_name and msvs_windows_sdk_version:
-      break
-
-  if platform_name == 'ARM':
-    properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
-  if msvs_windows_sdk_version:
-    properties[0].append(['WindowsTargetPlatformVersion',
-                          str(msvs_windows_sdk_version)])
-
-  return properties
-
-def _GetMSBuildConfigurationDetails(spec, build_file):
-  properties = {}
-  for name, settings in spec['configurations'].iteritems():
-    msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
-    condition = _GetConfigurationCondition(name, settings)
-    character_set = msbuild_attributes.get('CharacterSet')
-    _AddConditionalProperty(properties, condition, 'ConfigurationType',
-                            msbuild_attributes['ConfigurationType'])
-    if character_set:
-      if 'msvs_enable_winrt' not in spec :
-        _AddConditionalProperty(properties, condition, 'CharacterSet',
-                                character_set)
-  return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
-
-
-def _GetMSBuildLocalProperties(msbuild_toolset):
-  # Currently the only local property we support is PlatformToolset
-  properties = {}
-  if msbuild_toolset:
-    properties = [
-        ['PropertyGroup', {'Label': 'Locals'},
-          ['PlatformToolset', msbuild_toolset],
-        ]
-      ]
-  return properties
-
-
-def _GetMSBuildPropertySheets(configurations):
-  user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
-  additional_props = {}
-  props_specified = False
-  for name, settings in sorted(configurations.iteritems()):
-    configuration = _GetConfigurationCondition(name, settings)
-    if settings.has_key('msbuild_props'):
-      additional_props[configuration] = _FixPaths(settings['msbuild_props'])
-      props_specified = True
-    else:
-     additional_props[configuration] = ''
-
-  if not props_specified:
-    return [
-        ['ImportGroup',
-         {'Label': 'PropertySheets'},
-         ['Import',
-          {'Project': user_props,
-           'Condition': "exists('%s')" % user_props,
-           'Label': 'LocalAppDataPlatform'
-          }
-         ]
-        ]
-    ]
-  else:
-    sheets = []
-    for condition, props in additional_props.iteritems():
-      import_group = [
-        'ImportGroup',
-        {'Label': 'PropertySheets',
-         'Condition': condition
-        },
-        ['Import',
-         {'Project': user_props,
-          'Condition': "exists('%s')" % user_props,
-          'Label': 'LocalAppDataPlatform'
-         }
-        ]
-      ]
-      for props_file in props:
-        import_group.append(['Import', {'Project':props_file}])
-      sheets.append(import_group)
-    return sheets
-
-def _ConvertMSVSBuildAttributes(spec, config, build_file):
-  config_type = _GetMSVSConfigurationType(spec, build_file)
-  msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
-  msbuild_attributes = {}
-  for a in msvs_attributes:
-    if a in ['IntermediateDirectory', 'OutputDirectory']:
-      directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
-      if not directory.endswith('\\'):
-        directory += '\\'
-      msbuild_attributes[a] = directory
-    elif a == 'CharacterSet':
-      msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
-    elif a == 'ConfigurationType':
-      msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
-    else:
-      print 'Warning: Do not know how to convert MSVS attribute ' + a
-  return msbuild_attributes
-
-
-def _ConvertMSVSCharacterSet(char_set):
-  if char_set.isdigit():
-    char_set = {
-        '0': 'MultiByte',
-        '1': 'Unicode',
-        '2': 'MultiByte',
-    }[char_set]
-  return char_set
-
-
-def _ConvertMSVSConfigurationType(config_type):
-  if config_type.isdigit():
-    config_type = {
-        '1': 'Application',
-        '2': 'DynamicLibrary',
-        '4': 'StaticLibrary',
-        '10': 'Utility'
-    }[config_type]
-  return config_type
-
-
-def _GetMSBuildAttributes(spec, config, build_file):
-  if 'msbuild_configuration_attributes' not in config:
-    msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
-
-  else:
-    config_type = _GetMSVSConfigurationType(spec, build_file)
-    config_type = _ConvertMSVSConfigurationType(config_type)
-    msbuild_attributes = config.get('msbuild_configuration_attributes', {})
-    msbuild_attributes.setdefault('ConfigurationType', config_type)
-    output_dir = msbuild_attributes.get('OutputDirectory',
-                                      '$(SolutionDir)$(Configuration)')
-    msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
-    if 'IntermediateDirectory' not in msbuild_attributes:
-      intermediate = _FixPath('$(Configuration)') + '\\'
-      msbuild_attributes['IntermediateDirectory'] = intermediate
-    if 'CharacterSet' in msbuild_attributes:
-      msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
-          msbuild_attributes['CharacterSet'])
-  if 'TargetName' not in msbuild_attributes:
-    prefix = spec.get('product_prefix', '')
-    product_name = spec.get('product_name', '$(ProjectName)')
-    target_name = prefix + product_name
-    msbuild_attributes['TargetName'] = target_name
-
-  if spec.get('msvs_external_builder'):
-    external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
-    msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
-
-  # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
-  # (depending on the tool used) to avoid MSB8012 warning.
-  msbuild_tool_map = {
-      'executable': 'Link',
-      'shared_library': 'Link',
-      'loadable_module': 'Link',
-      'static_library': 'Lib',
-  }
-  msbuild_tool = msbuild_tool_map.get(spec['type'])
-  if msbuild_tool:
-    msbuild_settings = config['finalized_msbuild_settings']
-    out_file = msbuild_settings[msbuild_tool].get('OutputFile')
-    if out_file:
-      msbuild_attributes['TargetPath'] = _FixPath(out_file)
-    target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
-    if target_ext:
-      msbuild_attributes['TargetExt'] = target_ext
-
-  return msbuild_attributes
-
-
-def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
-  # TODO(jeanluc) We could optimize out the following and do it only if
-  # there are actions.
-  # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
-  new_paths = []
-  cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0]
-  if cygwin_dirs:
-    cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs)
-    new_paths.append(cyg_path)
-    # TODO(jeanluc) Change the convention to have both a cygwin_dir and a
-    # python_dir.
-    python_path = cyg_path.replace('cygwin\\bin', 'python_26')
-    new_paths.append(python_path)
-    if new_paths:
-      new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
-
-  properties = {}
-  for (name, configuration) in sorted(configurations.iteritems()):
-    condition = _GetConfigurationCondition(name, configuration)
-    attributes = _GetMSBuildAttributes(spec, configuration, build_file)
-    msbuild_settings = configuration['finalized_msbuild_settings']
-    _AddConditionalProperty(properties, condition, 'IntDir',
-                            attributes['IntermediateDirectory'])
-    _AddConditionalProperty(properties, condition, 'OutDir',
-                            attributes['OutputDirectory'])
-    _AddConditionalProperty(properties, condition, 'TargetName',
-                            attributes['TargetName'])
-
-    if attributes.get('TargetPath'):
-      _AddConditionalProperty(properties, condition, 'TargetPath',
-                              attributes['TargetPath'])
-    if attributes.get('TargetExt'):
-      _AddConditionalProperty(properties, condition, 'TargetExt',
-                              attributes['TargetExt'])
-
-    if new_paths:
-      _AddConditionalProperty(properties, condition, 'ExecutablePath',
-                              new_paths)
-    tool_settings = msbuild_settings.get('', {})
-    for name, value in sorted(tool_settings.iteritems()):
-      formatted_value = _GetValueFormattedForMSBuild('', name, value)
-      _AddConditionalProperty(properties, condition, name, formatted_value)
-  return _GetMSBuildPropertyGroup(spec, None, properties)
-
-
-def _AddConditionalProperty(properties, condition, name, value):
-  """Adds a property / conditional value pair to a dictionary.
-
-  Arguments:
-    properties: The dictionary to be modified.  The key is the name of the
-        property.  The value is itself a dictionary; its key is the value and
-        the value a list of condition for which this value is true.
-    condition: The condition under which the named property has the value.
-    name: The name of the property.
-    value: The value of the property.
-  """
-  if name not in properties:
-    properties[name] = {}
-  values = properties[name]
-  if value not in values:
-    values[value] = []
-  conditions = values[value]
-  conditions.append(condition)
-
-
-# Regex for msvs variable references ( i.e. $(FOO) ).
-MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
-
-
-def _GetMSBuildPropertyGroup(spec, label, properties):
-  """Returns a PropertyGroup definition for the specified properties.
-
-  Arguments:
-    spec: The target project dict.
-    label: An optional label for the PropertyGroup.
-    properties: The dictionary to be converted.  The key is the name of the
-        property.  The value is itself a dictionary; its key is the value and
-        the value a list of condition for which this value is true.
-  """
-  group = ['PropertyGroup']
-  if label:
-    group.append({'Label': label})
-  num_configurations = len(spec['configurations'])
-  def GetEdges(node):
-    # Use a definition of edges such that user_of_variable -> used_varible.
-    # This happens to be easier in this case, since a variable's
-    # definition contains all variables it references in a single string.
-    edges = set()
-    for value in sorted(properties[node].keys()):
-      # Add to edges all $(...) references to variables.
-      #
-      # Variable references that refer to names not in properties are excluded
-      # These can exist for instance to refer built in definitions like
-      # $(SolutionDir).
-      #
-      # Self references are ignored. Self reference is used in a few places to
-      # append to the default value. I.e. PATH=$(PATH);other_path
-      edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
-                        if v in properties and v != node]))
-    return edges
-  properties_ordered = gyp.common.TopologicallySorted(
-      properties.keys(), GetEdges)
-  # Walk properties in the reverse of a topological sort on
-  # user_of_variable -> used_variable as this ensures variables are
-  # defined before they are used.
-  # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
-  for name in reversed(properties_ordered):
-    values = properties[name]
-    for value, conditions in sorted(values.iteritems()):
-      if len(conditions) == num_configurations:
-        # If the value is the same all configurations,
-        # just add one unconditional entry.
-        group.append([name, value])
-      else:
-        for condition in conditions:
-          group.append([name, {'Condition': condition}, value])
-  return [group]
-
-
-def _GetMSBuildToolSettingsSections(spec, configurations):
-  groups = []
-  for (name, configuration) in sorted(configurations.iteritems()):
-    msbuild_settings = configuration['finalized_msbuild_settings']
-    group = ['ItemDefinitionGroup',
-             {'Condition': _GetConfigurationCondition(name, configuration)}
-            ]
-    for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
-      # Skip the tool named '' which is a holder of global settings handled
-      # by _GetMSBuildConfigurationGlobalProperties.
-      if tool_name:
-        if tool_settings:
-          tool = [tool_name]
-          for name, value in sorted(tool_settings.iteritems()):
-            formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
-                                                           value)
-            tool.append([name, formatted_value])
-          group.append(tool)
-    groups.append(group)
-  return groups
-
-
-def _FinalizeMSBuildSettings(spec, configuration):
-  if 'msbuild_settings' in configuration:
-    converted = False
-    msbuild_settings = configuration['msbuild_settings']
-    MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
-  else:
-    converted = True
-    msvs_settings = configuration.get('msvs_settings', {})
-    msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
-  include_dirs, midl_include_dirs, resource_include_dirs = \
-      _GetIncludeDirs(configuration)
-  libraries = _GetLibraries(spec)
-  library_dirs = _GetLibraryDirs(configuration)
-  out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
-  target_ext = _GetOutputTargetExt(spec)
-  defines = _GetDefines(configuration)
-  if converted:
-    # Visual Studio 2010 has TR1
-    defines = [d for d in defines if d != '_HAS_TR1=0']
-    # Warn of ignored settings
-    ignored_settings = ['msvs_tool_files']
-    for ignored_setting in ignored_settings:
-      value = configuration.get(ignored_setting)
-      if value:
-        print ('Warning: The automatic conversion to MSBuild does not handle '
-               '%s.  Ignoring setting of %s' % (ignored_setting, str(value)))
-
-  defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
-  disabled_warnings = _GetDisabledWarnings(configuration)
-  prebuild = configuration.get('msvs_prebuild')
-  postbuild = configuration.get('msvs_postbuild')
-  def_file = _GetModuleDefinition(spec)
-  precompiled_header = configuration.get('msvs_precompiled_header')
-
-  # Add the information to the appropriate tool
-  # TODO(jeanluc) We could optimize and generate these settings only if
-  # the corresponding files are found, e.g. don't generate ResourceCompile
-  # if you don't have any resources.
-  _ToolAppend(msbuild_settings, 'ClCompile',
-              'AdditionalIncludeDirectories', include_dirs)
-  _ToolAppend(msbuild_settings, 'Midl',
-              'AdditionalIncludeDirectories', midl_include_dirs)
-  _ToolAppend(msbuild_settings, 'ResourceCompile',
-              'AdditionalIncludeDirectories', resource_include_dirs)
-  # Add in libraries, note that even for empty libraries, we want this
-  # set, to prevent inheriting default libraries from the enviroment.
-  _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
-                  libraries)
-  _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
-              library_dirs)
-  if out_file:
-    _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
-                only_if_unset=True)
-  if target_ext:
-    _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
-                only_if_unset=True)
-  # Add defines.
-  _ToolAppend(msbuild_settings, 'ClCompile',
-              'PreprocessorDefinitions', defines)
-  _ToolAppend(msbuild_settings, 'ResourceCompile',
-              'PreprocessorDefinitions', defines)
-  # Add disabled warnings.
-  _ToolAppend(msbuild_settings, 'ClCompile',
-              'DisableSpecificWarnings', disabled_warnings)
-  # Turn on precompiled headers if appropriate.
-  if precompiled_header:
-    precompiled_header = os.path.split(precompiled_header)[1]
-    _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use')
-    _ToolAppend(msbuild_settings, 'ClCompile',
-                'PrecompiledHeaderFile', precompiled_header)
-    _ToolAppend(msbuild_settings, 'ClCompile',
-                'ForcedIncludeFiles', [precompiled_header])
-  else:
-    _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
-  # Turn off WinRT compilation
-  _ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
-  # Turn on import libraries if appropriate
-  if spec.get('msvs_requires_importlibrary'):
-   _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
-  # Loadable modules don't generate import libraries;
-  # tell dependent projects to not expect one.
-  if spec['type'] == 'loadable_module':
-    _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true')
-  # Set the module definition file if any.
-  if def_file:
-    _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
-  configuration['finalized_msbuild_settings'] = msbuild_settings
-  if prebuild:
-    _ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
-  if postbuild:
-    _ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
-
-
-def _GetValueFormattedForMSBuild(tool_name, name, value):
-  if type(value) == list:
-    # For some settings, VS2010 does not automatically extends the settings
-    # TODO(jeanluc) Is this what we want?
-    if name in ['AdditionalIncludeDirectories',
-                'AdditionalLibraryDirectories',
-                'AdditionalOptions',
-                'DelayLoadDLLs',
-                'DisableSpecificWarnings',
-                'PreprocessorDefinitions']:
-      value.append('%%(%s)' % name)
-    # For most tools, entries in a list should be separated with ';' but some
-    # settings use a space.  Check for those first.
-    exceptions = {
-        'ClCompile': ['AdditionalOptions'],
-        'Link': ['AdditionalOptions'],
-        'Lib': ['AdditionalOptions']}
-    if tool_name in exceptions and name in exceptions[tool_name]:
-      char = ' '
-    else:
-      char = ';'
-    formatted_value = char.join(
-        [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value])
-  else:
-    formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
-  return formatted_value
-
-
-def _VerifySourcesExist(sources, root_dir):
-  """Verifies that all source files exist on disk.
-
-  Checks that all regular source files, i.e. not created at run time,
-  exist on disk.  Missing files cause needless recompilation but no otherwise
-  visible errors.
-
-  Arguments:
-    sources: A recursive list of Filter/file names.
-    root_dir: The root directory for the relative path names.
-  Returns:
-    A list of source files that cannot be found on disk.
-  """
-  missing_sources = []
-  for source in sources:
-    if isinstance(source, MSVSProject.Filter):
-      missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
-    else:
-      if '$' not in source:
-        full_path = os.path.join(root_dir, source)
-        if not os.path.exists(full_path):
-          missing_sources.append(full_path)
-  return missing_sources
-
-
-def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
-                       extension_to_rule_name, actions_spec,
-                       sources_handled_by_action, list_excluded):
-  groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
-            'rule_dependency']
-  grouped_sources = {}
-  for g in groups:
-    grouped_sources[g] = []
-
-  _AddSources2(spec, sources, exclusions, grouped_sources,
-               rule_dependencies, extension_to_rule_name,
-               sources_handled_by_action, list_excluded)
-  sources = []
-  for g in groups:
-    if grouped_sources[g]:
-      sources.append(['ItemGroup'] + grouped_sources[g])
-  if actions_spec:
-    sources.append(['ItemGroup'] + actions_spec)
-  return sources
-
-
-def _AddSources2(spec, sources, exclusions, grouped_sources,
-                 rule_dependencies, extension_to_rule_name,
-                 sources_handled_by_action,
-                 list_excluded):
-  extensions_excluded_from_precompile = []
-  for source in sources:
-    if isinstance(source, MSVSProject.Filter):
-      _AddSources2(spec, source.contents, exclusions, grouped_sources,
-                   rule_dependencies, extension_to_rule_name,
-                   sources_handled_by_action,
-                   list_excluded)
-    else:
-      if not source in sources_handled_by_action:
-        detail = []
-        excluded_configurations = exclusions.get(source, [])
-        if len(excluded_configurations) == len(spec['configurations']):
-          detail.append(['ExcludedFromBuild', 'true'])
-        else:
-          for config_name, configuration in sorted(excluded_configurations):
-            condition = _GetConfigurationCondition(config_name, configuration)
-            detail.append(['ExcludedFromBuild',
-                           {'Condition': condition},
-                           'true'])
-        # Add precompile if needed
-        for config_name, configuration in spec['configurations'].iteritems():
-          precompiled_source = configuration.get('msvs_precompiled_source', '')
-          if precompiled_source != '':
-            precompiled_source = _FixPath(precompiled_source)
-            if not extensions_excluded_from_precompile:
-              # If the precompiled header is generated by a C source, we must
-              # not try to use it for C++ sources, and vice versa.
-              basename, extension = os.path.splitext(precompiled_source)
-              if extension == '.c':
-                extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
-              else:
-                extensions_excluded_from_precompile = ['.c']
-
-          if precompiled_source == source:
-            condition = _GetConfigurationCondition(config_name, configuration)
-            detail.append(['PrecompiledHeader',
-                           {'Condition': condition},
-                           'Create'
-                          ])
-          else:
-            # Turn off precompiled header usage for source files of a
-            # different type than the file that generated the
-            # precompiled header.
-            for extension in extensions_excluded_from_precompile:
-              if source.endswith(extension):
-                detail.append(['PrecompiledHeader', ''])
-                detail.append(['ForcedIncludeFiles', ''])
-
-        group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
-                                                     extension_to_rule_name)
-        grouped_sources[group].append([element, {'Include': source}] + detail)
-
-
-def _GetMSBuildProjectReferences(project):
-  references = []
-  if project.dependencies:
-    group = ['ItemGroup']
-    for dependency in project.dependencies:
-      guid = dependency.guid
-      project_dir = os.path.split(project.path)[0]
-      relative_path = gyp.common.RelativePath(dependency.path, project_dir)
-      project_ref = ['ProjectReference',
-          {'Include': relative_path},
-          ['Project', guid],
-          ['ReferenceOutputAssembly', 'false']
-          ]
-      for config in dependency.spec.get('configurations', {}).itervalues():
-        # If it's disabled in any config, turn it off in the reference.
-        if config.get('msvs_2010_disable_uldi_when_referenced', 0):
-          project_ref.append(['UseLibraryDependencyInputs', 'false'])
-          break
-      group.append(project_ref)
-    references.append(group)
-  return references
-
-
-def _GenerateMSBuildProject(project, options, version, generator_flags):
-  spec = project.spec
-  configurations = spec['configurations']
-  project_dir, project_file_name = os.path.split(project.path)
-  gyp.common.EnsureDirExists(project.path)
-  # Prepare list of sources and excluded sources.
-  gyp_path = _NormalizedSource(project.build_file)
-  relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
-
-  gyp_file = os.path.split(project.build_file)[1]
-  sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
-                                                    gyp_file)
-  # Add rules.
-  actions_to_add = {}
-  props_files_of_rules = set()
-  targets_files_of_rules = set()
-  rule_dependencies = set()
-  extension_to_rule_name = {}
-  list_excluded = generator_flags.get('msvs_list_excluded_files', True)
-
-  # Don't generate rules if we are using an external builder like ninja.
-  if not spec.get('msvs_external_builder'):
-    _GenerateRulesForMSBuild(project_dir, options, spec,
-                             sources, excluded_sources,
-                             props_files_of_rules, targets_files_of_rules,
-                             actions_to_add, rule_dependencies,
-                             extension_to_rule_name)
-  else:
-    rules = spec.get('rules', [])
-    _AdjustSourcesForRules(rules, sources, excluded_sources, True)
-
-  sources, excluded_sources, excluded_idl = (
-      _AdjustSourcesAndConvertToFilterHierarchy(spec, options,
-                                                project_dir, sources,
-                                                excluded_sources,
-                                                list_excluded, version))
-
-  # Don't add actions if we are using an external builder like ninja.
-  if not spec.get('msvs_external_builder'):
-    _AddActions(actions_to_add, spec, project.build_file)
-    _AddCopies(actions_to_add, spec)
-
-    # NOTE: this stanza must appear after all actions have been decided.
-    # Don't excluded sources with actions attached, or they won't run.
-    excluded_sources = _FilterActionsFromExcluded(
-        excluded_sources, actions_to_add)
-
-  exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
-  actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
-      spec, actions_to_add)
-
-  _GenerateMSBuildFiltersFile(project.path + '.filters', sources,
-                              rule_dependencies,
-                              extension_to_rule_name)
-  missing_sources = _VerifySourcesExist(sources, project_dir)
-
-  for configuration in configurations.itervalues():
-    _FinalizeMSBuildSettings(spec, configuration)
-
-  # Add attributes to root element
-
-  import_default_section = [
-      ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
-  import_cpp_props_section = [
-      ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
-  import_cpp_targets_section = [
-      ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
-  import_masm_props_section = [
-      ['Import',
-        {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
-  import_masm_targets_section = [
-      ['Import',
-        {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
-  macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
-
-  content = [
-      'Project',
-      {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
-       'ToolsVersion': version.ProjectVersion(),
-       'DefaultTargets': 'Build'
-      }]
-
-  content += _GetMSBuildProjectConfigurations(configurations)
-  content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
-  content += import_default_section
-  content += _GetMSBuildConfigurationDetails(spec, project.build_file)
-  if spec.get('msvs_enable_winphone'):
-   content += _GetMSBuildLocalProperties('v120_wp81')
-  else:
-   content += _GetMSBuildLocalProperties(project.msbuild_toolset)
-  content += import_cpp_props_section
-  content += import_masm_props_section
-  content += _GetMSBuildExtensions(props_files_of_rules)
-  content += _GetMSBuildPropertySheets(configurations)
-  content += macro_section
-  content += _GetMSBuildConfigurationGlobalProperties(spec, configurations,
-                                                      project.build_file)
-  content += _GetMSBuildToolSettingsSections(spec, configurations)
-  content += _GetMSBuildSources(
-      spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
-      actions_spec, sources_handled_by_action, list_excluded)
-  content += _GetMSBuildProjectReferences(project)
-  content += import_cpp_targets_section
-  content += import_masm_targets_section
-  content += _GetMSBuildExtensionTargets(targets_files_of_rules)
-
-  if spec.get('msvs_external_builder'):
-    content += _GetMSBuildExternalBuilderTargets(spec)
-
-  # TODO(jeanluc) File a bug to get rid of runas.  We had in MSVS:
-  # has_run_as = _WriteMSVSUserFile(project.path, version, spec)
-
-  easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
-
-  return missing_sources
-
-
-def _GetMSBuildExternalBuilderTargets(spec):
-  """Return a list of MSBuild targets for external builders.
-
-  The "Build" and "Clean" targets are always generated.  If the spec contains
-  'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
-  be generated, to support building selected C/C++ files.
-
-  Arguments:
-    spec: The gyp target spec.
-  Returns:
-    List of MSBuild 'Target' specs.
-  """
-  build_cmd = _BuildCommandLineForRuleRaw(
-      spec, spec['msvs_external_builder_build_cmd'],
-      False, False, False, False)
-  build_target = ['Target', {'Name': 'Build'}]
-  build_target.append(['Exec', {'Command': build_cmd}])
-
-  clean_cmd = _BuildCommandLineForRuleRaw(
-      spec, spec['msvs_external_builder_clean_cmd'],
-      False, False, False, False)
-  clean_target = ['Target', {'Name': 'Clean'}]
-  clean_target.append(['Exec', {'Command': clean_cmd}])
-
-  targets = [build_target, clean_target]
-
-  if spec.get('msvs_external_builder_clcompile_cmd'):
-    clcompile_cmd = _BuildCommandLineForRuleRaw(
-        spec, spec['msvs_external_builder_clcompile_cmd'],
-        False, False, False, False)
-    clcompile_target = ['Target', {'Name': 'ClCompile'}]
-    clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
-    targets.append(clcompile_target)
-
-  return targets
-
-
-def _GetMSBuildExtensions(props_files_of_rules):
-  extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
-  for props_file in props_files_of_rules:
-    extensions.append(['Import', {'Project': props_file}])
-  return [extensions]
-
-
-def _GetMSBuildExtensionTargets(targets_files_of_rules):
-  targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}]
-  for targets_file in sorted(targets_files_of_rules):
-    targets_node.append(['Import', {'Project': targets_file}])
-  return [targets_node]
-
-
-def _GenerateActionsForMSBuild(spec, actions_to_add):
-  """Add actions accumulated into an actions_to_add, merging as needed.
-
-  Arguments:
-    spec: the target project dict
-    actions_to_add: dictionary keyed on input name, which maps to a list of
-        dicts describing the actions attached to that input file.
-
-  Returns:
-    A pair of (action specification, the sources handled by this action).
-  """
-  sources_handled_by_action = OrderedSet()
-  actions_spec = []
-  for primary_input, actions in actions_to_add.iteritems():
-    inputs = OrderedSet()
-    outputs = OrderedSet()
-    descriptions = []
-    commands = []
-    for action in actions:
-      inputs.update(OrderedSet(action['inputs']))
-      outputs.update(OrderedSet(action['outputs']))
-      descriptions.append(action['description'])
-      cmd = action['command']
-      # For most actions, add 'call' so that actions that invoke batch files
-      # return and continue executing.  msbuild_use_call provides a way to
-      # disable this but I have not seen any adverse effect from doing that
-      # for everything.
-      if action.get('msbuild_use_call', True):
-        cmd = 'call ' + cmd
-      commands.append(cmd)
-    # Add the custom build action for one input file.
-    description = ', and also '.join(descriptions)
-
-    # We can't join the commands simply with && because the command line will
-    # get too long. See also _AddActions: cygwin's setup_env mustn't be called
-    # for every invocation or the command that sets the PATH will grow too
-    # long.
-    command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
-                           for c in commands])
-    _AddMSBuildAction(spec,
-                      primary_input,
-                      inputs,
-                      outputs,
-                      command,
-                      description,
-                      sources_handled_by_action,
-                      actions_spec)
-  return actions_spec, sources_handled_by_action
-
-
-def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description,
-                      sources_handled_by_action, actions_spec):
-  command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
-  primary_input = _FixPath(primary_input)
-  inputs_array = _FixPaths(inputs)
-  outputs_array = _FixPaths(outputs)
-  additional_inputs = ';'.join([i for i in inputs_array
-                                if i != primary_input])
-  outputs = ';'.join(outputs_array)
-  sources_handled_by_action.add(primary_input)
-  action_spec = ['CustomBuild', {'Include': primary_input}]
-  action_spec.extend(
-      # TODO(jeanluc) 'Document' for all or just if as_sources?
-      [['FileType', 'Document'],
-       ['Command', command],
-       ['Message', description],
-       ['Outputs', outputs]
-      ])
-  if additional_inputs:
-    action_spec.append(['AdditionalInputs', additional_inputs])
-  actions_spec.append(action_spec)
diff --git a/tools/gyp/pylib/gyp/generator/msvs_test.py b/tools/gyp/pylib/gyp/generator/msvs_test.py
deleted file mode 100755
index c0b021d..0000000
--- a/tools/gyp/pylib/gyp/generator/msvs_test.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the msvs.py file. """
-
-import gyp.generator.msvs as msvs
-import unittest
-import StringIO
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
-  def setUp(self):
-    self.stderr = StringIO.StringIO()
-
-  def test_GetLibraries(self):
-    self.assertEqual(
-      msvs._GetLibraries({}),
-      [])
-    self.assertEqual(
-      msvs._GetLibraries({'libraries': []}),
-      [])
-    self.assertEqual(
-      msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
-      ['a.lib'])
-    self.assertEqual(
-      msvs._GetLibraries({'libraries': ['-la']}),
-      ['a.lib'])
-    self.assertEqual(
-      msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
-                                   '-lb.lib', 'd.lib', 'a.lib']}),
-      ['c.lib', 'b.lib', 'd.lib', 'a.lib'])
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py
deleted file mode 100644
index 9cfc706..0000000
--- a/tools/gyp/pylib/gyp/generator/ninja.py
+++ /dev/null
@@ -1,2476 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import copy
-import hashlib
-import json
-import multiprocessing
-import os.path
-import re
-import signal
-import subprocess
-import sys
-import gyp
-import gyp.common
-from gyp.common import OrderedSet
-import gyp.msvs_emulation
-import gyp.MSVSUtil as MSVSUtil
-import gyp.xcode_emulation
-from cStringIO import StringIO
-
-from gyp.common import GetEnvironFallback
-import gyp.ninja_syntax as ninja_syntax
-
-generator_default_variables = {
-  'EXECUTABLE_PREFIX': '',
-  'EXECUTABLE_SUFFIX': '',
-  'STATIC_LIB_PREFIX': 'lib',
-  'STATIC_LIB_SUFFIX': '.a',
-  'SHARED_LIB_PREFIX': 'lib',
-
-  # Gyp expects the following variables to be expandable by the build
-  # system to the appropriate locations.  Ninja prefers paths to be
-  # known at gyp time.  To resolve this, introduce special
-  # variables starting with $! and $| (which begin with a $ so gyp knows it
-  # should be treated specially, but is otherwise an invalid
-  # ninja/shell variable) that are passed to gyp here but expanded
-  # before writing out into the target .ninja files; see
-  # ExpandSpecial.
-  # $! is used for variables that represent a path and that can only appear at
-  # the start of a string, while $| is used for variables that can appear
-  # anywhere in a string.
-  'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
-  'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
-  'PRODUCT_DIR': '$!PRODUCT_DIR',
-  'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
-
-  # Special variables that may be used by gyp 'rule' targets.
-  # We generate definitions for these variables on the fly when processing a
-  # rule.
-  'RULE_INPUT_ROOT': '${root}',
-  'RULE_INPUT_DIRNAME': '${dirname}',
-  'RULE_INPUT_PATH': '${source}',
-  'RULE_INPUT_EXT': '${ext}',
-  'RULE_INPUT_NAME': '${name}',
-}
-
-# Placates pylint.
-generator_additional_non_configuration_keys = []
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-generator_filelist_paths = None
-
-generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
-
-def StripPrefix(arg, prefix):
-  if arg.startswith(prefix):
-    return arg[len(prefix):]
-  return arg
-
-
-def QuoteShellArgument(arg, flavor):
-  """Quote a string such that it will be interpreted as a single argument
-  by the shell."""
-  # Rather than attempting to enumerate the bad shell characters, just
-  # whitelist common OK ones and quote anything else.
-  if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
-    return arg  # No quoting necessary.
-  if flavor == 'win':
-    return gyp.msvs_emulation.QuoteForRspFile(arg)
-  return "'" + arg.replace("'", "'" + '"\'"' + "'")  + "'"
-
-
-def Define(d, flavor):
-  """Takes a preprocessor define and returns a -D parameter that's ninja- and
-  shell-escaped."""
-  if flavor == 'win':
-    # cl.exe replaces literal # characters with = in preprocesor definitions for
-    # some reason. Octal-encode to work around that.
-    d = d.replace('#', '\\%03o' % ord('#'))
-  return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
-
-
-def AddArch(output, arch):
-  """Adds an arch string to an output path."""
-  output, extension = os.path.splitext(output)
-  return '%s.%s%s' % (output, arch, extension)
-
-
-class Target(object):
-  """Target represents the paths used within a single gyp target.
-
-  Conceptually, building a single target A is a series of steps:
-
-  1) actions/rules/copies  generates source/resources/etc.
-  2) compiles              generates .o files
-  3) link                  generates a binary (library/executable)
-  4) bundle                merges the above in a mac bundle
-
-  (Any of these steps can be optional.)
-
-  From a build ordering perspective, a dependent target B could just
-  depend on the last output of this series of steps.
-
-  But some dependent commands sometimes need to reach inside the box.
-  For example, when linking B it needs to get the path to the static
-  library generated by A.
-
-  This object stores those paths.  To keep things simple, member
-  variables only store concrete paths to single files, while methods
-  compute derived values like "the last output of the target".
-  """
-  def __init__(self, type):
-    # Gyp type ("static_library", etc.) of this target.
-    self.type = type
-    # File representing whether any input dependencies necessary for
-    # dependent actions have completed.
-    self.preaction_stamp = None
-    # File representing whether any input dependencies necessary for
-    # dependent compiles have completed.
-    self.precompile_stamp = None
-    # File representing the completion of actions/rules/copies, if any.
-    self.actions_stamp = None
-    # Path to the output of the link step, if any.
-    self.binary = None
-    # Path to the file representing the completion of building the bundle,
-    # if any.
-    self.bundle = None
-    # On Windows, incremental linking requires linking against all the .objs
-    # that compose a .lib (rather than the .lib itself). That list is stored
-    # here. In this case, we also need to save the compile_deps for the target,
-    # so that the the target that directly depends on the .objs can also depend
-    # on those.
-    self.component_objs = None
-    self.compile_deps = None
-    # Windows only. The import .lib is the output of a build step, but
-    # because dependents only link against the lib (not both the lib and the
-    # dll) we keep track of the import library here.
-    self.import_lib = None
-
-  def Linkable(self):
-    """Return true if this is a target that can be linked against."""
-    return self.type in ('static_library', 'shared_library')
-
-  def UsesToc(self, flavor):
-    """Return true if the target should produce a restat rule based on a TOC
-    file."""
-    # For bundles, the .TOC should be produced for the binary, not for
-    # FinalOutput(). But the naive approach would put the TOC file into the
-    # bundle, so don't do this for bundles for now.
-    if flavor == 'win' or self.bundle:
-      return False
-    return self.type in ('shared_library', 'loadable_module')
-
-  def PreActionInput(self, flavor):
-    """Return the path, if any, that should be used as a dependency of
-    any dependent action step."""
-    if self.UsesToc(flavor):
-      return self.FinalOutput() + '.TOC'
-    return self.FinalOutput() or self.preaction_stamp
-
-  def PreCompileInput(self):
-    """Return the path, if any, that should be used as a dependency of
-    any dependent compile step."""
-    return self.actions_stamp or self.precompile_stamp
-
-  def FinalOutput(self):
-    """Return the last output of the target, which depends on all prior
-    steps."""
-    return self.bundle or self.binary or self.actions_stamp
-
-
-# A small discourse on paths as used within the Ninja build:
-# All files we produce (both at gyp and at build time) appear in the
-# build directory (e.g. out/Debug).
-#
-# Paths within a given .gyp file are always relative to the directory
-# containing the .gyp file.  Call these "gyp paths".  This includes
-# sources as well as the starting directory a given gyp rule/action
-# expects to be run from.  We call the path from the source root to
-# the gyp file the "base directory" within the per-.gyp-file
-# NinjaWriter code.
-#
-# All paths as written into the .ninja files are relative to the build
-# directory.  Call these paths "ninja paths".
-#
-# We translate between these two notions of paths with two helper
-# functions:
-#
-# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
-#   into the equivalent ninja path.
-#
-# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
-#   an output file; the result can be namespaced such that it is unique
-#   to the input file name as well as the output target name.
-
-class NinjaWriter(object):
-  def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
-               output_file, toplevel_build, output_file_name, flavor,
-               toplevel_dir=None):
-    """
-    base_dir: path from source root to directory containing this gyp file,
-              by gyp semantics, all input paths are relative to this
-    build_dir: path from source root to build output
-    toplevel_dir: path to the toplevel directory
-    """
-
-    self.hash_for_rules = hash_for_rules
-    self.target_outputs = target_outputs
-    self.base_dir = base_dir
-    self.build_dir = build_dir
-    self.ninja = ninja_syntax.Writer(output_file)
-    self.toplevel_build = toplevel_build
-    self.output_file_name = output_file_name
-
-    self.flavor = flavor
-    self.abs_build_dir = None
-    if toplevel_dir is not None:
-      self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
-                                                        build_dir))
-    self.obj_ext = '.obj' if flavor == 'win' else '.o'
-    if flavor == 'win':
-      # See docstring of msvs_emulation.GenerateEnvironmentFiles().
-      self.win_env = {}
-      for arch in ('x86', 'x64'):
-        self.win_env[arch] = 'environment.' + arch
-
-    # Relative path from build output dir to base dir.
-    build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
-    self.build_to_base = os.path.join(build_to_top, base_dir)
-    # Relative path from base dir to build dir.
-    base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
-    self.base_to_build = os.path.join(base_to_top, build_dir)
-
-  def ExpandSpecial(self, path, product_dir=None):
-    """Expand specials like $!PRODUCT_DIR in |path|.
-
-    If |product_dir| is None, assumes the cwd is already the product
-    dir.  Otherwise, |product_dir| is the relative path to the product
-    dir.
-    """
-
-    PRODUCT_DIR = '$!PRODUCT_DIR'
-    if PRODUCT_DIR in path:
-      if product_dir:
-        path = path.replace(PRODUCT_DIR, product_dir)
-      else:
-        path = path.replace(PRODUCT_DIR + '/', '')
-        path = path.replace(PRODUCT_DIR + '\\', '')
-        path = path.replace(PRODUCT_DIR, '.')
-
-    INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
-    if INTERMEDIATE_DIR in path:
-      int_dir = self.GypPathToUniqueOutput('gen')
-      # GypPathToUniqueOutput generates a path relative to the product dir,
-      # so insert product_dir in front if it is provided.
-      path = path.replace(INTERMEDIATE_DIR,
-                          os.path.join(product_dir or '', int_dir))
-
-    CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
-    path = path.replace(CONFIGURATION_NAME, self.config_name)
-
-    return path
-
-  def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
-    if self.flavor == 'win':
-      path = self.msvs_settings.ConvertVSMacros(
-          path, config=self.config_name)
-    path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
-    path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
-                        dirname)
-    path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
-    path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
-    path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
-    return path
-
-  def GypPathToNinja(self, path, env=None):
-    """Translate a gyp path to a ninja path, optionally expanding environment
-    variable references in |path| with |env|.
-
-    See the above discourse on path conversions."""
-    if env:
-      if self.flavor == 'mac':
-        path = gyp.xcode_emulation.ExpandEnvVars(path, env)
-      elif self.flavor == 'win':
-        path = gyp.msvs_emulation.ExpandMacros(path, env)
-    if path.startswith('$!'):
-      expanded = self.ExpandSpecial(path)
-      if self.flavor == 'win':
-        expanded = os.path.normpath(expanded)
-      return expanded
-    if '$|' in path:
-      path = self.ExpandSpecial(path)
-    assert '$' not in path, path
-    return os.path.normpath(os.path.join(self.build_to_base, path))
-
-  def GypPathToUniqueOutput(self, path, qualified=True):
-    """Translate a gyp path to a ninja path for writing output.
-
-    If qualified is True, qualify the resulting filename with the name
-    of the target.  This is necessary when e.g. compiling the same
-    path twice for two separate output targets.
-
-    See the above discourse on path conversions."""
-
-    path = self.ExpandSpecial(path)
-    assert not path.startswith('$'), path
-
-    # Translate the path following this scheme:
-    #   Input: foo/bar.gyp, target targ, references baz/out.o
-    #   Output: obj/foo/baz/targ.out.o (if qualified)
-    #           obj/foo/baz/out.o (otherwise)
-    #     (and obj.host instead of obj for cross-compiles)
-    #
-    # Why this scheme and not some other one?
-    # 1) for a given input, you can compute all derived outputs by matching
-    #    its path, even if the input is brought via a gyp file with '..'.
-    # 2) simple files like libraries and stamps have a simple filename.
-
-    obj = 'obj'
-    if self.toolset != 'target':
-      obj += '.' + self.toolset
-
-    path_dir, path_basename = os.path.split(path)
-    assert not os.path.isabs(path_dir), (
-        "'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
-
-    if qualified:
-      path_basename = self.name + '.' + path_basename
-    return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
-                                         path_basename))
-
-  def WriteCollapsedDependencies(self, name, targets, order_only=None):
-    """Given a list of targets, return a path for a single file
-    representing the result of building all the targets or None.
-
-    Uses a stamp file if necessary."""
-
-    assert targets == filter(None, targets), targets
-    if len(targets) == 0:
-      assert not order_only
-      return None
-    if len(targets) > 1 or order_only:
-      stamp = self.GypPathToUniqueOutput(name + '.stamp')
-      targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
-      self.ninja.newline()
-    return targets[0]
-
-  def _SubninjaNameForArch(self, arch):
-    output_file_base = os.path.splitext(self.output_file_name)[0]
-    return '%s.%s.ninja' % (output_file_base, arch)
-
-  def WriteSpec(self, spec, config_name, generator_flags):
-    """The main entry point for NinjaWriter: write the build rules for a spec.
-
-    Returns a Target object, which represents the output paths for this spec.
-    Returns None if there are no outputs (e.g. a settings-only 'none' type
-    target)."""
-
-    self.config_name = config_name
-    self.name = spec['target_name']
-    self.toolset = spec['toolset']
-    config = spec['configurations'][config_name]
-    self.target = Target(spec['type'])
-    self.is_standalone_static_library = bool(
-        spec.get('standalone_static_library', 0))
-    # Track if this target contains any C++ files, to decide if gcc or g++
-    # should be used for linking.
-    self.uses_cpp = False
-
-    self.target_rpath = generator_flags.get('target_rpath', r'\$$ORIGIN/lib/')
-
-    self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
-    self.xcode_settings = self.msvs_settings = None
-    if self.flavor == 'mac':
-      self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
-      mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
-      if mac_toolchain_dir:
-        self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir
-
-    if self.flavor == 'win':
-      self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
-                                                           generator_flags)
-      arch = self.msvs_settings.GetArch(config_name)
-      self.ninja.variable('arch', self.win_env[arch])
-      self.ninja.variable('cc', '$cl_' + arch)
-      self.ninja.variable('cxx', '$cl_' + arch)
-      self.ninja.variable('cc_host', '$cl_' + arch)
-      self.ninja.variable('cxx_host', '$cl_' + arch)
-      self.ninja.variable('asm', '$ml_' + arch)
-
-    if self.flavor == 'mac':
-      self.archs = self.xcode_settings.GetActiveArchs(config_name)
-      if len(self.archs) > 1:
-        self.arch_subninjas = dict(
-            (arch, ninja_syntax.Writer(
-                OpenOutput(os.path.join(self.toplevel_build,
-                                        self._SubninjaNameForArch(arch)),
-                           'w')))
-            for arch in self.archs)
-
-    # Compute predepends for all rules.
-    # actions_depends is the dependencies this target depends on before running
-    # any of its action/rule/copy steps.
-    # compile_depends is the dependencies this target depends on before running
-    # any of its compile steps.
-    actions_depends = []
-    compile_depends = []
-    # TODO(evan): it is rather confusing which things are lists and which
-    # are strings.  Fix these.
-    if 'dependencies' in spec:
-      for dep in spec['dependencies']:
-        if dep in self.target_outputs:
-          target = self.target_outputs[dep]
-          actions_depends.append(target.PreActionInput(self.flavor))
-          compile_depends.append(target.PreCompileInput())
-      actions_depends = filter(None, actions_depends)
-      compile_depends = filter(None, compile_depends)
-      actions_depends = self.WriteCollapsedDependencies('actions_depends',
-                                                        actions_depends)
-      compile_depends = self.WriteCollapsedDependencies('compile_depends',
-                                                        compile_depends)
-      self.target.preaction_stamp = actions_depends
-      self.target.precompile_stamp = compile_depends
-
-    # Write out actions, rules, and copies.  These must happen before we
-    # compile any sources, so compute a list of predependencies for sources
-    # while we do it.
-    extra_sources = []
-    mac_bundle_depends = []
-    self.target.actions_stamp = self.WriteActionsRulesCopies(
-        spec, extra_sources, actions_depends, mac_bundle_depends)
-
-    # If we have actions/rules/copies, we depend directly on those, but
-    # otherwise we depend on dependent target's actions/rules/copies etc.
-    # We never need to explicitly depend on previous target's link steps,
-    # because no compile ever depends on them.
-    compile_depends_stamp = (self.target.actions_stamp or compile_depends)
-
-    # Write out the compilation steps, if any.
-    link_deps = []
-    sources = extra_sources + spec.get('sources', [])
-    if sources:
-      if self.flavor == 'mac' and len(self.archs) > 1:
-        # Write subninja file containing compile and link commands scoped to
-        # a single arch if a fat binary is being built.
-        for arch in self.archs:
-          self.ninja.subninja(self._SubninjaNameForArch(arch))
-
-      pch = None
-      if self.flavor == 'win':
-        gyp.msvs_emulation.VerifyMissingSources(
-            sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
-        pch = gyp.msvs_emulation.PrecompiledHeader(
-            self.msvs_settings, config_name, self.GypPathToNinja,
-            self.GypPathToUniqueOutput, self.obj_ext)
-      else:
-        pch = gyp.xcode_emulation.MacPrefixHeader(
-            self.xcode_settings, self.GypPathToNinja,
-            lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
-      link_deps = self.WriteSources(
-          self.ninja, config_name, config, sources, compile_depends_stamp, pch,
-          spec)
-      # Some actions/rules output 'sources' that are already object files.
-      obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
-      if obj_outputs:
-        if self.flavor != 'mac' or len(self.archs) == 1:
-          link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
-        else:
-          print "Warning: Actions/rules writing object files don't work with " \
-                "multiarch targets, dropping. (target %s)" % spec['target_name']
-    elif self.flavor == 'mac' and len(self.archs) > 1:
-      link_deps = collections.defaultdict(list)
-
-    compile_deps = self.target.actions_stamp or actions_depends
-    if self.flavor == 'win' and self.target.type == 'static_library':
-      self.target.component_objs = link_deps
-      self.target.compile_deps = compile_deps
-
-    # Write out a link step, if needed.
-    output = None
-    is_empty_bundle = not link_deps and not mac_bundle_depends
-    if link_deps or self.target.actions_stamp or actions_depends:
-      output = self.WriteTarget(spec, config_name, config, link_deps,
-                                compile_deps)
-      if self.is_mac_bundle:
-        mac_bundle_depends.append(output)
-
-    # Bundle all of the above together, if needed.
-    if self.is_mac_bundle:
-      output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
-
-    if not output:
-      return None
-
-    assert self.target.FinalOutput(), output
-    return self.target
-
-  def _WinIdlRule(self, source, prebuild, outputs):
-    """Handle the implicit VS .idl rule for one source file. Fills |outputs|
-    with files that are generated."""
-    outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
-        source, self.config_name)
-    outdir = self.GypPathToNinja(outdir)
-    def fix_path(path, rel=None):
-      path = os.path.join(outdir, path)
-      dirname, basename = os.path.split(source)
-      root, ext = os.path.splitext(basename)
-      path = self.ExpandRuleVariables(
-          path, root, dirname, source, ext, basename)
-      if rel:
-        path = os.path.relpath(path, rel)
-      return path
-    vars = [(name, fix_path(value, outdir)) for name, value in vars]
-    output = [fix_path(p) for p in output]
-    vars.append(('outdir', outdir))
-    vars.append(('idlflags', flags))
-    input = self.GypPathToNinja(source)
-    self.ninja.build(output, 'idl', input,
-        variables=vars, order_only=prebuild)
-    outputs.extend(output)
-
-  def WriteWinIdlFiles(self, spec, prebuild):
-    """Writes rules to match MSVS's implicit idl handling."""
-    assert self.flavor == 'win'
-    if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
-      return []
-    outputs = []
-    for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
-      self._WinIdlRule(source, prebuild, outputs)
-    return outputs
-
-  def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
-                              mac_bundle_depends):
-    """Write out the Actions, Rules, and Copies steps.  Return a path
-    representing the outputs of these steps."""
-    outputs = []
-    if self.is_mac_bundle:
-      mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
-    else:
-      mac_bundle_resources = []
-    extra_mac_bundle_resources = []
-
-    if 'actions' in spec:
-      outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
-                                   extra_mac_bundle_resources)
-    if 'rules' in spec:
-      outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
-                                 mac_bundle_resources,
-                                 extra_mac_bundle_resources)
-    if 'copies' in spec:
-      outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
-
-    if 'sources' in spec and self.flavor == 'win':
-      outputs += self.WriteWinIdlFiles(spec, prebuild)
-
-    if self.xcode_settings and self.xcode_settings.IsIosFramework():
-      self.WriteiOSFrameworkHeaders(spec, outputs, prebuild)
-
-    stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
-
-    if self.is_mac_bundle:
-      xcassets = self.WriteMacBundleResources(
-          extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
-      partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
-      self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
-
-    return stamp
-
-  def GenerateDescription(self, verb, message, fallback):
-    """Generate and return a description of a build step.
-
-    |verb| is the short summary, e.g. ACTION or RULE.
-    |message| is a hand-written description, or None if not available.
-    |fallback| is the gyp-level name of the step, usable as a fallback.
-    """
-    if self.toolset != 'target':
-      verb += '(%s)' % self.toolset
-    if message:
-      return '%s %s' % (verb, self.ExpandSpecial(message))
-    else:
-      return '%s %s: %s' % (verb, self.name, fallback)
-
-  def WriteActions(self, actions, extra_sources, prebuild,
-                   extra_mac_bundle_resources):
-    # Actions cd into the base directory.
-    env = self.GetToolchainEnv()
-    all_outputs = []
-    for action in actions:
-      # First write out a rule for the action.
-      name = '%s_%s' % (action['action_name'], self.hash_for_rules)
-      description = self.GenerateDescription('ACTION',
-                                             action.get('message', None),
-                                             name)
-      is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
-                   if self.flavor == 'win' else False)
-      args = action['action']
-      depfile = action.get('depfile', None)
-      if depfile:
-        depfile = self.ExpandSpecial(depfile, self.base_to_build)
-      pool = 'console' if int(action.get('ninja_use_console', 0)) else None
-      rule_name, _ = self.WriteNewNinjaRule(name, args, description,
-                                            is_cygwin, env, pool,
-                                            depfile=depfile)
-
-      inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
-      if int(action.get('process_outputs_as_sources', False)):
-        extra_sources += action['outputs']
-      if int(action.get('process_outputs_as_mac_bundle_resources', False)):
-        extra_mac_bundle_resources += action['outputs']
-      outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
-
-      # Then write out an edge using the rule.
-      self.ninja.build(outputs, rule_name, inputs,
-                       order_only=prebuild)
-      all_outputs += outputs
-
-      self.ninja.newline()
-
-    return all_outputs
-
-  def WriteRules(self, rules, extra_sources, prebuild,
-                 mac_bundle_resources, extra_mac_bundle_resources):
-    env = self.GetToolchainEnv()
-    all_outputs = []
-    for rule in rules:
-      # Skip a rule with no action and no inputs.
-      if 'action' not in rule and not rule.get('rule_sources', []):
-        continue
-
-      # First write out a rule for the rule action.
-      name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
-
-      args = rule['action']
-      description = self.GenerateDescription(
-          'RULE',
-          rule.get('message', None),
-          ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
-      is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
-                   if self.flavor == 'win' else False)
-      pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
-      rule_name, args = self.WriteNewNinjaRule(
-          name, args, description, is_cygwin, env, pool)
-
-      # TODO: if the command references the outputs directly, we should
-      # simplify it to just use $out.
-
-      # Rules can potentially make use of some special variables which
-      # must vary per source file.
-      # Compute the list of variables we'll need to provide.
-      special_locals = ('source', 'root', 'dirname', 'ext', 'name')
-      needed_variables = set(['source'])
-      for argument in args:
-        for var in special_locals:
-          if '${%s}' % var in argument:
-            needed_variables.add(var)
-      needed_variables = sorted(needed_variables)
-
-      def cygwin_munge(path):
-        # pylint: disable=cell-var-from-loop
-        if is_cygwin:
-          return path.replace('\\', '/')
-        return path
-
-      inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
-
-      # If there are n source files matching the rule, and m additional rule
-      # inputs, then adding 'inputs' to each build edge written below will
-      # write m * n inputs. Collapsing reduces this to m + n.
-      sources = rule.get('rule_sources', [])
-      num_inputs = len(inputs)
-      if prebuild:
-        num_inputs += 1
-      if num_inputs > 2 and len(sources) > 2:
-        inputs = [self.WriteCollapsedDependencies(
-          rule['rule_name'], inputs, order_only=prebuild)]
-        prebuild = []
-
-      # For each source file, write an edge that generates all the outputs.
-      for source in sources:
-        source = os.path.normpath(source)
-        dirname, basename = os.path.split(source)
-        root, ext = os.path.splitext(basename)
-
-        # Gather the list of inputs and outputs, expanding $vars if possible.
-        outputs = [self.ExpandRuleVariables(o, root, dirname,
-                                            source, ext, basename)
-                   for o in rule['outputs']]
-
-        if int(rule.get('process_outputs_as_sources', False)):
-          extra_sources += outputs
-
-        was_mac_bundle_resource = source in mac_bundle_resources
-        if was_mac_bundle_resource or \
-            int(rule.get('process_outputs_as_mac_bundle_resources', False)):
-          extra_mac_bundle_resources += outputs
-          # Note: This is n_resources * n_outputs_in_rule.  Put to-be-removed
-          # items in a set and remove them all in a single pass if this becomes
-          # a performance issue.
-          if was_mac_bundle_resource:
-            mac_bundle_resources.remove(source)
-
-        extra_bindings = []
-        for var in needed_variables:
-          if var == 'root':
-            extra_bindings.append(('root', cygwin_munge(root)))
-          elif var == 'dirname':
-            # '$dirname' is a parameter to the rule action, which means
-            # it shouldn't be converted to a Ninja path.  But we don't
-            # want $!PRODUCT_DIR in there either.
-            dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
-            extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
-          elif var == 'source':
-            # '$source' is a parameter to the rule action, which means
-            # it shouldn't be converted to a Ninja path.  But we don't
-            # want $!PRODUCT_DIR in there either.
-            source_expanded = self.ExpandSpecial(source, self.base_to_build)
-            extra_bindings.append(('source', cygwin_munge(source_expanded)))
-          elif var == 'ext':
-            extra_bindings.append(('ext', ext))
-          elif var == 'name':
-            extra_bindings.append(('name', cygwin_munge(basename)))
-          else:
-            assert var == None, repr(var)
-
-        outputs = [self.GypPathToNinja(o, env) for o in outputs]
-        if self.flavor == 'win':
-          # WriteNewNinjaRule uses unique_name for creating an rsp file on win.
-          extra_bindings.append(('unique_name',
-              hashlib.md5(outputs[0]).hexdigest()))
-
-        self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
-                         implicit=inputs,
-                         order_only=prebuild,
-                         variables=extra_bindings)
-
-        all_outputs.extend(outputs)
-
-    return all_outputs
-
-  def WriteCopies(self, copies, prebuild, mac_bundle_depends):
-    outputs = []
-    if self.xcode_settings:
-      extra_env = self.xcode_settings.GetPerTargetSettings()
-      env = self.GetToolchainEnv(additional_settings=extra_env)
-    else:
-      env = self.GetToolchainEnv()
-    for copy in copies:
-      for path in copy['files']:
-        # Normalize the path so trailing slashes don't confuse us.
-        path = os.path.normpath(path)
-        basename = os.path.split(path)[1]
-        src = self.GypPathToNinja(path, env)
-        dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
-                                  env)
-        outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
-        if self.is_mac_bundle:
-          # gyp has mac_bundle_resources to copy things into a bundle's
-          # Resources folder, but there's no built-in way to copy files to other
-          # places in the bundle. Hence, some targets use copies for this. Check
-          # if this file is copied into the current bundle, and if so add it to
-          # the bundle depends so that dependent targets get rebuilt if the copy
-          # input changes.
-          if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
-            mac_bundle_depends.append(dst)
-
-    return outputs
-
-  def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild):
-    """Prebuild steps to generate hmap files and copy headers to destination."""
-    framework = self.ComputeMacBundleOutput()
-    all_sources = spec['sources']
-    copy_headers = spec['mac_framework_headers']
-    output = self.GypPathToUniqueOutput('headers.hmap')
-    self.xcode_settings.header_map_path = output
-    all_headers = map(self.GypPathToNinja,
-                      filter(lambda x:x.endswith(('.h')), all_sources))
-    variables = [('framework', framework),
-                 ('copy_headers', map(self.GypPathToNinja, copy_headers))]
-    outputs.extend(self.ninja.build(
-        output, 'compile_ios_framework_headers', all_headers,
-        variables=variables, order_only=prebuild))
-
-  def WriteMacBundleResources(self, resources, bundle_depends):
-    """Writes ninja edges for 'mac_bundle_resources'."""
-    xcassets = []
-
-    extra_env = self.xcode_settings.GetPerTargetSettings()
-    env = self.GetSortedXcodeEnv(additional_settings=extra_env)
-    env = self.ComputeExportEnvString(env)
-    isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
-
-    for output, res in gyp.xcode_emulation.GetMacBundleResources(
-        generator_default_variables['PRODUCT_DIR'],
-        self.xcode_settings, map(self.GypPathToNinja, resources)):
-      output = self.ExpandSpecial(output)
-      if os.path.splitext(output)[-1] != '.xcassets':
-        self.ninja.build(output, 'mac_tool', res,
-                         variables=[('mactool_cmd', 'copy-bundle-resource'), \
-                                    ('env', env), ('binary', isBinary)])
-        bundle_depends.append(output)
-      else:
-        xcassets.append(res)
-    return xcassets
-
-  def WriteMacXCassets(self, xcassets, bundle_depends):
-    """Writes ninja edges for 'mac_bundle_resources' .xcassets files.
-
-    This add an invocation of 'actool' via the 'mac_tool.py' helper script.
-    It assumes that the assets catalogs define at least one imageset and
-    thus an Assets.car file will be generated in the application resources
-    directory. If this is not the case, then the build will probably be done
-    at each invocation of ninja."""
-    if not xcassets:
-      return
-
-    extra_arguments = {}
-    settings_to_arg = {
-        'XCASSETS_APP_ICON': 'app-icon',
-        'XCASSETS_LAUNCH_IMAGE': 'launch-image',
-    }
-    settings = self.xcode_settings.xcode_settings[self.config_name]
-    for settings_key, arg_name in settings_to_arg.iteritems():
-      value = settings.get(settings_key)
-      if value:
-        extra_arguments[arg_name] = value
-
-    partial_info_plist = None
-    if extra_arguments:
-      partial_info_plist = self.GypPathToUniqueOutput(
-          'assetcatalog_generated_info.plist')
-      extra_arguments['output-partial-info-plist'] = partial_info_plist
-
-    outputs = []
-    outputs.append(
-        os.path.join(
-            self.xcode_settings.GetBundleResourceFolder(),
-            'Assets.car'))
-    if partial_info_plist:
-      outputs.append(partial_info_plist)
-
-    keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
-    extra_env = self.xcode_settings.GetPerTargetSettings()
-    env = self.GetSortedXcodeEnv(additional_settings=extra_env)
-    env = self.ComputeExportEnvString(env)
-
-    bundle_depends.extend(self.ninja.build(
-        outputs, 'compile_xcassets', xcassets,
-        variables=[('env', env), ('keys', keys)]))
-    return partial_info_plist
-
-  def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
-    """Write build rules for bundle Info.plist files."""
-    info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
-        generator_default_variables['PRODUCT_DIR'],
-        self.xcode_settings, self.GypPathToNinja)
-    if not info_plist:
-      return
-    out = self.ExpandSpecial(out)
-    if defines:
-      # Create an intermediate file to store preprocessed results.
-      intermediate_plist = self.GypPathToUniqueOutput(
-          os.path.basename(info_plist))
-      defines = ' '.join([Define(d, self.flavor) for d in defines])
-      info_plist = self.ninja.build(
-          intermediate_plist, 'preprocess_infoplist', info_plist,
-          variables=[('defines',defines)])
-
-    env = self.GetSortedXcodeEnv(additional_settings=extra_env)
-    env = self.ComputeExportEnvString(env)
-
-    if partial_info_plist:
-      intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
-      info_plist = self.ninja.build(
-          intermediate_plist, 'merge_infoplist',
-          [partial_info_plist, info_plist])
-
-    keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
-    keys = QuoteShellArgument(json.dumps(keys), self.flavor)
-    isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
-    self.ninja.build(out, 'copy_infoplist', info_plist,
-                     variables=[('env', env), ('keys', keys),
-                                ('binary', isBinary)])
-    bundle_depends.append(out)
-
-  def WriteSources(self, ninja_file, config_name, config, sources, predepends,
-                   precompiled_header, spec):
-    """Write build rules to compile all of |sources|."""
-    if self.toolset == 'host':
-      self.ninja.variable('ar', '$ar_host')
-      self.ninja.variable('cc', '$cc_host')
-      self.ninja.variable('cxx', '$cxx_host')
-      self.ninja.variable('ld', '$ld_host')
-      self.ninja.variable('ldxx', '$ldxx_host')
-      self.ninja.variable('nm', '$nm_host')
-      self.ninja.variable('readelf', '$readelf_host')
-
-    if self.flavor != 'mac' or len(self.archs) == 1:
-      return self.WriteSourcesForArch(
-          self.ninja, config_name, config, sources, predepends,
-          precompiled_header, spec)
-    else:
-      return dict((arch, self.WriteSourcesForArch(
-            self.arch_subninjas[arch], config_name, config, sources, predepends,
-            precompiled_header, spec, arch=arch))
-          for arch in self.archs)
-
-  def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
-                          predepends, precompiled_header, spec, arch=None):
-    """Write build rules to compile all of |sources|."""
-
-    extra_defines = []
-    if self.flavor == 'mac':
-      cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
-      cflags_c = self.xcode_settings.GetCflagsC(config_name)
-      cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
-      cflags_objc = ['$cflags_c'] + \
-                    self.xcode_settings.GetCflagsObjC(config_name)
-      cflags_objcc = ['$cflags_cc'] + \
-                     self.xcode_settings.GetCflagsObjCC(config_name)
-    elif self.flavor == 'win':
-      asmflags = self.msvs_settings.GetAsmflags(config_name)
-      cflags = self.msvs_settings.GetCflags(config_name)
-      cflags_c = self.msvs_settings.GetCflagsC(config_name)
-      cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
-      extra_defines = self.msvs_settings.GetComputedDefines(config_name)
-      # See comment at cc_command for why there's two .pdb files.
-      pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
-          config_name, self.ExpandSpecial)
-      if not pdbpath_c:
-        obj = 'obj'
-        if self.toolset != 'target':
-          obj += '.' + self.toolset
-        pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
-        pdbpath_c = pdbpath + '.c.pdb'
-        pdbpath_cc = pdbpath + '.cc.pdb'
-      self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
-      self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
-      self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
-    else:
-      cflags = config.get('cflags', [])
-      cflags_c = config.get('cflags_c', [])
-      cflags_cc = config.get('cflags_cc', [])
-
-    # Respect environment variables related to build, but target-specific
-    # flags can still override them.
-    if self.toolset == 'target':
-      cflags_c = (os.environ.get('CPPFLAGS', '').split() +
-                  os.environ.get('CFLAGS', '').split() + cflags_c)
-      cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
-                   os.environ.get('CXXFLAGS', '').split() + cflags_cc)
-    elif self.toolset == 'host':
-      cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
-                  os.environ.get('CFLAGS_host', '').split() + cflags_c)
-      cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
-                   os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
-
-    defines = config.get('defines', []) + extra_defines
-    self.WriteVariableList(ninja_file, 'defines',
-                           [Define(d, self.flavor) for d in defines])
-    if self.flavor == 'win':
-      self.WriteVariableList(ninja_file, 'asmflags',
-                             map(self.ExpandSpecial, asmflags))
-      self.WriteVariableList(ninja_file, 'rcflags',
-          [QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
-           for f in self.msvs_settings.GetRcflags(config_name,
-                                                  self.GypPathToNinja)])
-
-    include_dirs = config.get('include_dirs', [])
-
-    env = self.GetToolchainEnv()
-    if self.flavor == 'win':
-      include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
-                                                          config_name)
-    self.WriteVariableList(ninja_file, 'includes',
-        [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
-         for i in include_dirs])
-
-    if self.flavor == 'win':
-      midl_include_dirs = config.get('midl_include_dirs', [])
-      midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
-          midl_include_dirs, config_name)
-      self.WriteVariableList(ninja_file, 'midl_includes',
-          [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
-           for i in midl_include_dirs])
-
-    pch_commands = precompiled_header.GetPchBuildCommands(arch)
-    if self.flavor == 'mac':
-      # Most targets use no precompiled headers, so only write these if needed.
-      for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
-                       ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
-        include = precompiled_header.GetInclude(ext, arch)
-        if include: ninja_file.variable(var, include)
-
-    arflags = config.get('arflags', [])
-
-    self.WriteVariableList(ninja_file, 'cflags',
-                           map(self.ExpandSpecial, cflags))
-    self.WriteVariableList(ninja_file, 'cflags_c',
-                           map(self.ExpandSpecial, cflags_c))
-    self.WriteVariableList(ninja_file, 'cflags_cc',
-                           map(self.ExpandSpecial, cflags_cc))
-    if self.flavor == 'mac':
-      self.WriteVariableList(ninja_file, 'cflags_objc',
-                             map(self.ExpandSpecial, cflags_objc))
-      self.WriteVariableList(ninja_file, 'cflags_objcc',
-                             map(self.ExpandSpecial, cflags_objcc))
-    self.WriteVariableList(ninja_file, 'arflags',
-                           map(self.ExpandSpecial, arflags))
-    ninja_file.newline()
-    outputs = []
-    has_rc_source = False
-    for source in sources:
-      filename, ext = os.path.splitext(source)
-      ext = ext[1:]
-      obj_ext = self.obj_ext
-      if ext in ('cc', 'cpp', 'cxx'):
-        command = 'cxx'
-        self.uses_cpp = True
-      elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
-        command = 'cc'
-      elif ext == 's' and self.flavor != 'win':  # Doesn't generate .o.d files.
-        command = 'cc_s'
-      elif (self.flavor == 'win' and ext == 'asm' and
-            not self.msvs_settings.HasExplicitAsmRules(spec)):
-        command = 'asm'
-        # Add the _asm suffix as msvs is capable of handling .cc and
-        # .asm files of the same name without collision.
-        obj_ext = '_asm.obj'
-      elif self.flavor == 'mac' and ext == 'm':
-        command = 'objc'
-      elif self.flavor == 'mac' and ext == 'mm':
-        command = 'objcxx'
-        self.uses_cpp = True
-      elif self.flavor == 'win' and ext == 'rc':
-        command = 'rc'
-        obj_ext = '.res'
-        has_rc_source = True
-      else:
-        # Ignore unhandled extensions.
-        continue
-      input = self.GypPathToNinja(source)
-      output = self.GypPathToUniqueOutput(filename + obj_ext)
-      if arch is not None:
-        output = AddArch(output, arch)
-      implicit = precompiled_header.GetObjDependencies([input], [output], arch)
-      variables = []
-      if self.flavor == 'win':
-        variables, output, implicit = precompiled_header.GetFlagsModifications(
-            input, output, implicit, command, cflags_c, cflags_cc,
-            self.ExpandSpecial)
-      ninja_file.build(output, command, input,
-                       implicit=[gch for _, _, gch in implicit],
-                       order_only=predepends, variables=variables)
-      outputs.append(output)
-
-    if has_rc_source:
-      resource_include_dirs = config.get('resource_include_dirs', include_dirs)
-      self.WriteVariableList(ninja_file, 'resource_includes',
-          [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
-           for i in resource_include_dirs])
-
-    self.WritePchTargets(ninja_file, pch_commands)
-
-    ninja_file.newline()
-    return outputs
-
-  def WritePchTargets(self, ninja_file, pch_commands):
-    """Writes ninja rules to compile prefix headers."""
-    if not pch_commands:
-      return
-
-    for gch, lang_flag, lang, input in pch_commands:
-      var_name = {
-        'c': 'cflags_pch_c',
-        'cc': 'cflags_pch_cc',
-        'm': 'cflags_pch_objc',
-        'mm': 'cflags_pch_objcc',
-      }[lang]
-
-      map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
-      cmd = map.get(lang)
-      ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
-
-  def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
-    """Write out a link step. Fills out target.binary. """
-    if self.flavor != 'mac' or len(self.archs) == 1:
-      return self.WriteLinkForArch(
-          self.ninja, spec, config_name, config, link_deps, compile_deps)
-    else:
-      output = self.ComputeOutput(spec)
-      inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
-                                      config_name, config, link_deps[arch],
-                                      compile_deps, arch=arch)
-                for arch in self.archs]
-      extra_bindings = []
-      build_output = output
-      if not self.is_mac_bundle:
-        self.AppendPostbuildVariable(extra_bindings, spec, output, output)
-
-      # TODO(yyanagisawa): more work needed to fix:
-      # https://code.google.com/p/gyp/issues/detail?id=411
-      if (spec['type'] in ('shared_library', 'loadable_module') and
-          not self.is_mac_bundle):
-        extra_bindings.append(('lib', output))
-        self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
-            variables=extra_bindings)
-      else:
-        self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
-      return output
-
-  def WriteLinkForArch(self, ninja_file, spec, config_name, config,
-                       link_deps, compile_deps, arch=None):
-    """Write out a link step. Fills out target.binary. """
-    command = {
-      'executable':      'link',
-      'loadable_module': 'solink_module',
-      'shared_library':  'solink',
-    }[spec['type']]
-    command_suffix = ''
-
-    implicit_deps = set()
-    solibs = set()
-    order_deps = set()
-
-    if compile_deps:
-      # Normally, the compiles of the target already depend on compile_deps,
-      # but a shared_library target might have no sources and only link together
-      # a few static_library deps, so the link step also needs to depend
-      # on compile_deps to make sure actions in the shared_library target
-      # get run before the link.
-      order_deps.add(compile_deps)
-
-    if 'dependencies' in spec:
-      # Two kinds of dependencies:
-      # - Linkable dependencies (like a .a or a .so): add them to the link line.
-      # - Non-linkable dependencies (like a rule that generates a file
-      #   and writes a stamp file): add them to implicit_deps
-      extra_link_deps = set()
-      for dep in spec['dependencies']:
-        target = self.target_outputs.get(dep)
-        if not target:
-          continue
-        linkable = target.Linkable()
-        if linkable:
-          new_deps = []
-          if (self.flavor == 'win' and
-              target.component_objs and
-              self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
-            new_deps = target.component_objs
-            if target.compile_deps:
-              order_deps.add(target.compile_deps)
-          elif self.flavor == 'win' and target.import_lib:
-            new_deps = [target.import_lib]
-          elif target.UsesToc(self.flavor):
-            solibs.add(target.binary)
-            implicit_deps.add(target.binary + '.TOC')
-          else:
-            new_deps = [target.binary]
-          for new_dep in new_deps:
-            if new_dep not in extra_link_deps:
-              extra_link_deps.add(new_dep)
-              link_deps.append(new_dep)
-
-        final_output = target.FinalOutput()
-        if not linkable or final_output != target.binary:
-          implicit_deps.add(final_output)
-
-    extra_bindings = []
-    if self.uses_cpp and self.flavor != 'win':
-      extra_bindings.append(('ld', '$ldxx'))
-
-    output = self.ComputeOutput(spec, arch)
-    if arch is None and not self.is_mac_bundle:
-      self.AppendPostbuildVariable(extra_bindings, spec, output, output)
-
-    is_executable = spec['type'] == 'executable'
-    # The ldflags config key is not used on mac or win. On those platforms
-    # linker flags are set via xcode_settings and msvs_settings, respectively.
-    env_ldflags = os.environ.get('LDFLAGS', '').split()
-    if self.flavor == 'mac':
-      ldflags = self.xcode_settings.GetLdflags(config_name,
-          self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
-          self.GypPathToNinja, arch)
-      ldflags = env_ldflags + ldflags
-    elif self.flavor == 'win':
-      manifest_base_name = self.GypPathToUniqueOutput(
-          self.ComputeOutputFileName(spec))
-      ldflags, intermediate_manifest, manifest_files = \
-          self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
-                                        self.ExpandSpecial, manifest_base_name,
-                                        output, is_executable,
-                                        self.toplevel_build)
-      ldflags = env_ldflags + ldflags
-      self.WriteVariableList(ninja_file, 'manifests', manifest_files)
-      implicit_deps = implicit_deps.union(manifest_files)
-      if intermediate_manifest:
-        self.WriteVariableList(
-            ninja_file, 'intermediatemanifest', [intermediate_manifest])
-      command_suffix = _GetWinLinkRuleNameSuffix(
-          self.msvs_settings.IsEmbedManifest(config_name))
-      def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
-      if def_file:
-        implicit_deps.add(def_file)
-    else:
-      # Respect environment variables related to build, but target-specific
-      # flags can still override them.
-      ldflags = env_ldflags + config.get('ldflags', [])
-      if is_executable and len(solibs):
-        rpath = 'lib/'
-        if self.toolset != 'target':
-          rpath += self.toolset
-          ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
-        else:
-          ldflags.append('-Wl,-rpath=%s' % self.target_rpath)
-        ldflags.append('-Wl,-rpath-link=%s' % rpath)
-    self.WriteVariableList(ninja_file, 'ldflags',
-                           map(self.ExpandSpecial, ldflags))
-
-    library_dirs = config.get('library_dirs', [])
-    if self.flavor == 'win':
-      library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
-                      for l in library_dirs]
-      library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
-                                                       self.flavor)
-                      for l in library_dirs]
-    else:
-      library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
-                                         self.flavor)
-                      for l in library_dirs]
-
-    libraries = gyp.common.uniquer(map(self.ExpandSpecial,
-                                       spec.get('libraries', [])))
-    if self.flavor == 'mac':
-      libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
-    elif self.flavor == 'win':
-      libraries = self.msvs_settings.AdjustLibraries(libraries)
-
-    self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
-
-    linked_binary = output
-
-    if command in ('solink', 'solink_module'):
-      extra_bindings.append(('soname', os.path.split(output)[1]))
-      extra_bindings.append(('lib',
-                            gyp.common.EncodePOSIXShellArgument(output)))
-      if self.flavor != 'win':
-        link_file_list = output
-        if self.is_mac_bundle:
-          # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
-          # 'Dependency Framework.framework.rsp'
-          link_file_list = self.xcode_settings.GetWrapperName()
-        if arch:
-          link_file_list += '.' + arch
-        link_file_list += '.rsp'
-        # If an rspfile contains spaces, ninja surrounds the filename with
-        # quotes around it and then passes it to open(), creating a file with
-        # quotes in its name (and when looking for the rsp file, the name
-        # makes it through bash which strips the quotes) :-/
-        link_file_list = link_file_list.replace(' ', '_')
-        extra_bindings.append(
-          ('link_file_list',
-            gyp.common.EncodePOSIXShellArgument(link_file_list)))
-      if self.flavor == 'win':
-        extra_bindings.append(('binary', output))
-        if ('/NOENTRY' not in ldflags and
-            not self.msvs_settings.GetNoImportLibrary(config_name)):
-          self.target.import_lib = output + '.lib'
-          extra_bindings.append(('implibflag',
-                                 '/IMPLIB:%s' % self.target.import_lib))
-          pdbname = self.msvs_settings.GetPDBName(
-              config_name, self.ExpandSpecial, output + '.pdb')
-          output = [output, self.target.import_lib]
-          if pdbname:
-            output.append(pdbname)
-      elif not self.is_mac_bundle:
-        output = [output, output + '.TOC']
-      else:
-        command = command + '_notoc'
-    elif self.flavor == 'win':
-      extra_bindings.append(('binary', output))
-      pdbname = self.msvs_settings.GetPDBName(
-          config_name, self.ExpandSpecial, output + '.pdb')
-      if pdbname:
-        output = [output, pdbname]
-
-
-    if len(solibs):
-      extra_bindings.append(('solibs',
-          gyp.common.EncodePOSIXShellList(sorted(solibs))))
-
-    ninja_file.build(output, command + command_suffix, link_deps,
-                     implicit=sorted(implicit_deps),
-                     order_only=list(order_deps),
-                     variables=extra_bindings)
-    return linked_binary
-
-  def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
-    extra_link_deps = any(self.target_outputs.get(dep).Linkable()
-                          for dep in spec.get('dependencies', [])
-                          if dep in self.target_outputs)
-    if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
-      # TODO(evan): don't call this function for 'none' target types, as
-      # it doesn't do anything, and we fake out a 'binary' with a stamp file.
-      self.target.binary = compile_deps
-      self.target.type = 'none'
-    elif spec['type'] == 'static_library':
-      self.target.binary = self.ComputeOutput(spec)
-      if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
-          self.is_standalone_static_library):
-        self.ninja.build(self.target.binary, 'alink_thin', link_deps,
-                         order_only=compile_deps)
-      else:
-        variables = []
-        if self.xcode_settings:
-          libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
-          if libtool_flags:
-            variables.append(('libtool_flags', libtool_flags))
-        if self.msvs_settings:
-          libflags = self.msvs_settings.GetLibFlags(config_name,
-                                                    self.GypPathToNinja)
-          variables.append(('libflags', libflags))
-
-        if self.flavor != 'mac' or len(self.archs) == 1:
-          self.AppendPostbuildVariable(variables, spec,
-                                       self.target.binary, self.target.binary)
-          self.ninja.build(self.target.binary, 'alink', link_deps,
-                           order_only=compile_deps, variables=variables)
-        else:
-          inputs = []
-          for arch in self.archs:
-            output = self.ComputeOutput(spec, arch)
-            self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
-                                            order_only=compile_deps,
-                                            variables=variables)
-            inputs.append(output)
-          # TODO: It's not clear if libtool_flags should be passed to the alink
-          # call that combines single-arch .a files into a fat .a file.
-          self.AppendPostbuildVariable(variables, spec,
-                                       self.target.binary, self.target.binary)
-          self.ninja.build(self.target.binary, 'alink', inputs,
-                           # FIXME: test proving order_only=compile_deps isn't
-                           # needed.
-                           variables=variables)
-    else:
-      self.target.binary = self.WriteLink(spec, config_name, config, link_deps,
-                                          compile_deps)
-    return self.target.binary
-
-  def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
-    assert self.is_mac_bundle
-    package_framework = spec['type'] in ('shared_library', 'loadable_module')
-    output = self.ComputeMacBundleOutput()
-    if is_empty:
-      output += '.stamp'
-    variables = []
-    self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
-                                 is_command_start=not package_framework)
-    if package_framework and not is_empty:
-      if spec['type'] == 'shared_library' and self.xcode_settings.isIOS:
-        self.ninja.build(output, 'package_ios_framework', mac_bundle_depends,
-                         variables=variables)
-      else:
-        variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
-        self.ninja.build(output, 'package_framework', mac_bundle_depends,
-                         variables=variables)
-    else:
-      self.ninja.build(output, 'stamp', mac_bundle_depends,
-                       variables=variables)
-    self.target.bundle = output
-    return output
-
-  def GetToolchainEnv(self, additional_settings=None):
-    """Returns the variables toolchain would set for build steps."""
-    env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
-    if self.flavor == 'win':
-      env = self.GetMsvsToolchainEnv(
-          additional_settings=additional_settings)
-    return env
-
-  def GetMsvsToolchainEnv(self, additional_settings=None):
-    """Returns the variables Visual Studio would set for build steps."""
-    return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
-                                             config=self.config_name)
-
-  def GetSortedXcodeEnv(self, additional_settings=None):
-    """Returns the variables Xcode would set for build steps."""
-    assert self.abs_build_dir
-    abs_build_dir = self.abs_build_dir
-    return gyp.xcode_emulation.GetSortedXcodeEnv(
-        self.xcode_settings, abs_build_dir,
-        os.path.join(abs_build_dir, self.build_to_base), self.config_name,
-        additional_settings)
-
-  def GetSortedXcodePostbuildEnv(self):
-    """Returns the variables Xcode would set for postbuild steps."""
-    postbuild_settings = {}
-    # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
-    # TODO(thakis): It would be nice to have some general mechanism instead.
-    strip_save_file = self.xcode_settings.GetPerTargetSetting(
-        'CHROMIUM_STRIP_SAVE_FILE')
-    if strip_save_file:
-      postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
-    return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
-
-  def AppendPostbuildVariable(self, variables, spec, output, binary,
-                              is_command_start=False):
-    """Adds a 'postbuild' variable if there is a postbuild for |output|."""
-    postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
-    if postbuild:
-      variables.append(('postbuilds', postbuild))
-
-  def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
-    """Returns a shell command that runs all the postbuilds, and removes
-    |output| if any of them fails. If |is_command_start| is False, then the
-    returned string will start with ' && '."""
-    if not self.xcode_settings or spec['type'] == 'none' or not output:
-      return ''
-    output = QuoteShellArgument(output, self.flavor)
-    postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
-    if output_binary is not None:
-      postbuilds = self.xcode_settings.AddImplicitPostbuilds(
-          self.config_name,
-          os.path.normpath(os.path.join(self.base_to_build, output)),
-          QuoteShellArgument(
-              os.path.normpath(os.path.join(self.base_to_build, output_binary)),
-              self.flavor),
-          postbuilds, quiet=True)
-
-    if not postbuilds:
-      return ''
-    # Postbuilds expect to be run in the gyp file's directory, so insert an
-    # implicit postbuild to cd to there.
-    postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
-        ['cd', self.build_to_base]))
-    env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
-    # G will be non-null if any postbuild fails. Run all postbuilds in a
-    # subshell.
-    commands = env + ' (' + \
-        ' && '.join([ninja_syntax.escape(command) for command in postbuilds])
-    command_string = (commands + '); G=$$?; '
-                      # Remove the final output if any postbuild failed.
-                      '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
-    if is_command_start:
-      return '(' + command_string + ' && '
-    else:
-      return '$ && (' + command_string
-
-  def ComputeExportEnvString(self, env):
-    """Given an environment, returns a string looking like
-        'export FOO=foo; export BAR="${FOO} bar;'
-    that exports |env| to the shell."""
-    export_str = []
-    for k, v in env:
-      export_str.append('export %s=%s;' %
-          (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
-    return ' '.join(export_str)
-
-  def ComputeMacBundleOutput(self):
-    """Return the 'output' (full output path) to a bundle output directory."""
-    assert self.is_mac_bundle
-    path = generator_default_variables['PRODUCT_DIR']
-    return self.ExpandSpecial(
-        os.path.join(path, self.xcode_settings.GetWrapperName()))
-
-  def ComputeOutputFileName(self, spec, type=None):
-    """Compute the filename of the final output for the current target."""
-    if not type:
-      type = spec['type']
-
-    default_variables = copy.copy(generator_default_variables)
-    CalculateVariables(default_variables, {'flavor': self.flavor})
-
-    # Compute filename prefix: the product prefix, or a default for
-    # the product type.
-    DEFAULT_PREFIX = {
-      'loadable_module': default_variables['SHARED_LIB_PREFIX'],
-      'shared_library': default_variables['SHARED_LIB_PREFIX'],
-      'static_library': default_variables['STATIC_LIB_PREFIX'],
-      'executable': default_variables['EXECUTABLE_PREFIX'],
-      }
-    prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
-
-    # Compute filename extension: the product extension, or a default
-    # for the product type.
-    DEFAULT_EXTENSION = {
-        'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
-        'shared_library': default_variables['SHARED_LIB_SUFFIX'],
-        'static_library': default_variables['STATIC_LIB_SUFFIX'],
-        'executable': default_variables['EXECUTABLE_SUFFIX'],
-      }
-    extension = spec.get('product_extension')
-    if extension:
-      extension = '.' + extension
-    else:
-      extension = DEFAULT_EXTENSION.get(type, '')
-
-    if 'product_name' in spec:
-      # If we were given an explicit name, use that.
-      target = spec['product_name']
-    else:
-      # Otherwise, derive a name from the target name.
-      target = spec['target_name']
-      if prefix == 'lib':
-        # Snip out an extra 'lib' from libs if appropriate.
-        target = StripPrefix(target, 'lib')
-
-    if type in ('static_library', 'loadable_module', 'shared_library',
-                        'executable'):
-      return '%s%s%s' % (prefix, target, extension)
-    elif type == 'none':
-      return '%s.stamp' % target
-    else:
-      raise Exception('Unhandled output type %s' % type)
-
-  def ComputeOutput(self, spec, arch=None):
-    """Compute the path for the final output of the spec."""
-    type = spec['type']
-
-    if self.flavor == 'win':
-      override = self.msvs_settings.GetOutputName(self.config_name,
-                                                  self.ExpandSpecial)
-      if override:
-        return override
-
-    if arch is None and self.flavor == 'mac' and type in (
-        'static_library', 'executable', 'shared_library', 'loadable_module'):
-      filename = self.xcode_settings.GetExecutablePath()
-    else:
-      filename = self.ComputeOutputFileName(spec, type)
-
-    if arch is None and 'product_dir' in spec:
-      path = os.path.join(spec['product_dir'], filename)
-      return self.ExpandSpecial(path)
-
-    # Some products go into the output root, libraries go into shared library
-    # dir, and everything else goes into the normal place.
-    type_in_output_root = ['executable', 'loadable_module']
-    if self.flavor == 'mac' and self.toolset == 'target':
-      type_in_output_root += ['shared_library', 'static_library']
-    elif self.flavor == 'win' and self.toolset == 'target':
-      type_in_output_root += ['shared_library']
-
-    if arch is not None:
-      # Make sure partial executables don't end up in a bundle or the regular
-      # output directory.
-      archdir = 'arch'
-      if self.toolset != 'target':
-        archdir = os.path.join('arch', '%s' % self.toolset)
-      return os.path.join(archdir, AddArch(filename, arch))
-    elif type in type_in_output_root or self.is_standalone_static_library:
-      return filename
-    elif type == 'shared_library':
-      libdir = 'lib'
-      if self.toolset != 'target':
-        libdir = os.path.join('lib', '%s' % self.toolset)
-      return os.path.join(libdir, filename)
-    else:
-      return self.GypPathToUniqueOutput(filename, qualified=False)
-
-  def WriteVariableList(self, ninja_file, var, values):
-    assert not isinstance(values, str)
-    if values is None:
-      values = []
-    ninja_file.variable(var, ' '.join(values))
-
-  def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
-                        depfile=None):
-    """Write out a new ninja "rule" statement for a given command.
-
-    Returns the name of the new rule, and a copy of |args| with variables
-    expanded."""
-
-    if self.flavor == 'win':
-      args = [self.msvs_settings.ConvertVSMacros(
-                  arg, self.base_to_build, config=self.config_name)
-              for arg in args]
-      description = self.msvs_settings.ConvertVSMacros(
-          description, config=self.config_name)
-    elif self.flavor == 'mac':
-      # |env| is an empty list on non-mac.
-      args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
-      description = gyp.xcode_emulation.ExpandEnvVars(description, env)
-
-    # TODO: we shouldn't need to qualify names; we do it because
-    # currently the ninja rule namespace is global, but it really
-    # should be scoped to the subninja.
-    rule_name = self.name
-    if self.toolset == 'target':
-      rule_name += '.' + self.toolset
-    rule_name += '.' + name
-    rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
-
-    # Remove variable references, but not if they refer to the magic rule
-    # variables.  This is not quite right, as it also protects these for
-    # actions, not just for rules where they are valid. Good enough.
-    protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
-    protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
-    description = re.sub(protect + r'\$', '_', description)
-
-    # gyp dictates that commands are run from the base directory.
-    # cd into the directory before running, and adjust paths in
-    # the arguments to point to the proper locations.
-    rspfile = None
-    rspfile_content = None
-    args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
-    if self.flavor == 'win':
-      rspfile = rule_name + '.$unique_name.rsp'
-      # The cygwin case handles this inside the bash sub-shell.
-      run_in = '' if is_cygwin else ' ' + self.build_to_base
-      if is_cygwin:
-        rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
-            args, self.build_to_base)
-      else:
-        rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
-      command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
-                 rspfile + run_in)
-    else:
-      env = self.ComputeExportEnvString(env)
-      command = gyp.common.EncodePOSIXShellList(args)
-      command = 'cd %s; ' % self.build_to_base + env + command
-
-    # GYP rules/actions express being no-ops by not touching their outputs.
-    # Avoid executing downstream dependencies in this case by specifying
-    # restat=1 to ninja.
-    self.ninja.rule(rule_name, command, description, depfile=depfile,
-                    restat=True, pool=pool,
-                    rspfile=rspfile, rspfile_content=rspfile_content)
-    self.ninja.newline()
-
-    return rule_name, args
-
-
-def CalculateVariables(default_variables, params):
-  """Calculate additional variables for use in the build (called by gyp)."""
-  global generator_additional_non_configuration_keys
-  global generator_additional_path_sections
-  flavor = gyp.common.GetFlavor(params)
-  if flavor == 'mac':
-    default_variables.setdefault('OS', 'mac')
-    default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
-    default_variables.setdefault('SHARED_LIB_DIR',
-                                 generator_default_variables['PRODUCT_DIR'])
-    default_variables.setdefault('LIB_DIR',
-                                 generator_default_variables['PRODUCT_DIR'])
-
-    # Copy additional generator configuration data from Xcode, which is shared
-    # by the Mac Ninja generator.
-    import gyp.generator.xcode as xcode_generator
-    generator_additional_non_configuration_keys = getattr(xcode_generator,
-        'generator_additional_non_configuration_keys', [])
-    generator_additional_path_sections = getattr(xcode_generator,
-        'generator_additional_path_sections', [])
-    global generator_extra_sources_for_rules
-    generator_extra_sources_for_rules = getattr(xcode_generator,
-        'generator_extra_sources_for_rules', [])
-  elif flavor == 'win':
-    exts = gyp.MSVSUtil.TARGET_TYPE_EXT
-    default_variables.setdefault('OS', 'win')
-    default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
-    default_variables['STATIC_LIB_PREFIX'] = ''
-    default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
-    default_variables['SHARED_LIB_PREFIX'] = ''
-    default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
-
-    # Copy additional generator configuration data from VS, which is shared
-    # by the Windows Ninja generator.
-    import gyp.generator.msvs as msvs_generator
-    generator_additional_non_configuration_keys = getattr(msvs_generator,
-        'generator_additional_non_configuration_keys', [])
-    generator_additional_path_sections = getattr(msvs_generator,
-        'generator_additional_path_sections', [])
-
-    gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-  else:
-    operating_system = flavor
-    if flavor == 'android':
-      operating_system = 'linux'  # Keep this legacy behavior for now.
-    default_variables.setdefault('OS', operating_system)
-    default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
-    default_variables.setdefault('SHARED_LIB_DIR',
-                                 os.path.join('$!PRODUCT_DIR', 'lib'))
-    default_variables.setdefault('LIB_DIR',
-                                 os.path.join('$!PRODUCT_DIR', 'obj'))
-
-def ComputeOutputDir(params):
-  """Returns the path from the toplevel_dir to the build output directory."""
-  # generator_dir: relative path from pwd to where make puts build files.
-  # Makes migrating from make to ninja easier, ninja doesn't put anything here.
-  generator_dir = os.path.relpath(params['options'].generator_output or '.')
-
-  # output_dir: relative path from generator_dir to the build directory.
-  output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
-
-  # Relative path from source root to our output files.  e.g. "out"
-  return os.path.normpath(os.path.join(generator_dir, output_dir))
-
-
-def CalculateGeneratorInputInfo(params):
-  """Called by __init__ to initialize generator values based on params."""
-  # E.g. "out/gypfiles"
-  toplevel = params['options'].toplevel_dir
-  qualified_out_dir = os.path.normpath(os.path.join(
-      toplevel, ComputeOutputDir(params), 'gypfiles'))
-
-  global generator_filelist_paths
-  generator_filelist_paths = {
-      'toplevel': toplevel,
-      'qualified_out_dir': qualified_out_dir,
-  }
-
-
-def OpenOutput(path, mode='w'):
-  """Open |path| for writing, creating directories if necessary."""
-  gyp.common.EnsureDirExists(path)
-  return open(path, mode)
-
-
-def CommandWithWrapper(cmd, wrappers, prog):
-  wrapper = wrappers.get(cmd, '')
-  if wrapper:
-    return wrapper + ' ' + prog
-  return prog
-
-
-def GetDefaultConcurrentLinks():
-  """Returns a best-guess for a number of concurrent links."""
-  pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
-  if pool_size:
-    return pool_size
-
-  if sys.platform in ('win32', 'cygwin'):
-    import ctypes
-
-    class MEMORYSTATUSEX(ctypes.Structure):
-      _fields_ = [
-        ("dwLength", ctypes.c_ulong),
-        ("dwMemoryLoad", ctypes.c_ulong),
-        ("ullTotalPhys", ctypes.c_ulonglong),
-        ("ullAvailPhys", ctypes.c_ulonglong),
-        ("ullTotalPageFile", ctypes.c_ulonglong),
-        ("ullAvailPageFile", ctypes.c_ulonglong),
-        ("ullTotalVirtual", ctypes.c_ulonglong),
-        ("ullAvailVirtual", ctypes.c_ulonglong),
-        ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
-      ]
-
-    stat = MEMORYSTATUSEX()
-    stat.dwLength = ctypes.sizeof(stat)
-    ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
-
-    # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
-    # on a 64 GB machine.
-    mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30)))  # total / 5GB
-    hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
-    return min(mem_limit, hard_cap)
-  elif sys.platform.startswith('linux'):
-    if os.path.exists("/proc/meminfo"):
-      with open("/proc/meminfo") as meminfo:
-        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
-        for line in meminfo:
-          match = memtotal_re.match(line)
-          if not match:
-            continue
-          # Allow 8Gb per link on Linux because Gold is quite memory hungry
-          return max(1, int(match.group(1)) / (8 * (2 ** 20)))
-    return 1
-  elif sys.platform == 'darwin':
-    try:
-      avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
-      # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
-      # 4GB per ld process allows for some more bloat.
-      return max(1, avail_bytes / (4 * (2 ** 30)))  # total / 4GB
-    except:
-      return 1
-  else:
-    # TODO(scottmg): Implement this for other platforms.
-    return 1
-
-
-def _GetWinLinkRuleNameSuffix(embed_manifest):
-  """Returns the suffix used to select an appropriate linking rule depending on
-  whether the manifest embedding is enabled."""
-  return '_embed' if embed_manifest else ''
-
-
-def _AddWinLinkRules(master_ninja, embed_manifest):
-  """Adds link rules for Windows platform to |master_ninja|."""
-  def FullLinkCommand(ldcmd, out, binary_type):
-    resource_name = {
-      'exe': '1',
-      'dll': '2',
-    }[binary_type]
-    return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
-           '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
-           '$manifests' % {
-               'python': sys.executable,
-               'out': out,
-               'ldcmd': ldcmd,
-               'resname': resource_name,
-               'embed': embed_manifest }
-  rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
-  use_separate_mspdbsrv = (
-      int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
-  dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
-  dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
-            '$ld /nologo $implibflag /DLL /OUT:$binary '
-            '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
-  dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
-  master_ninja.rule('solink' + rule_name_suffix,
-                    description=dlldesc, command=dllcmd,
-                    rspfile='$binary.rsp',
-                    rspfile_content='$libs $in_newline $ldflags',
-                    restat=True,
-                    pool='link_pool')
-  master_ninja.rule('solink_module' + rule_name_suffix,
-                    description=dlldesc, command=dllcmd,
-                    rspfile='$binary.rsp',
-                    rspfile_content='$libs $in_newline $ldflags',
-                    restat=True,
-                    pool='link_pool')
-  # Note that ldflags goes at the end so that it has the option of
-  # overriding default settings earlier in the command line.
-  exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
-             '$ld /nologo /OUT:$binary @$binary.rsp' %
-              (sys.executable, use_separate_mspdbsrv))
-  exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
-  master_ninja.rule('link' + rule_name_suffix,
-                    description='LINK%s $binary' % rule_name_suffix.upper(),
-                    command=exe_cmd,
-                    rspfile='$binary.rsp',
-                    rspfile_content='$in_newline $libs $ldflags',
-                    pool='link_pool')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data, params,
-                            config_name):
-  options = params['options']
-  flavor = gyp.common.GetFlavor(params)
-  generator_flags = params.get('generator_flags', {})
-
-  # build_dir: relative path from source root to our output files.
-  # e.g. "out/Debug"
-  build_dir = os.path.normpath(
-      os.path.join(ComputeOutputDir(params), config_name))
-
-  toplevel_build = os.path.join(options.toplevel_dir, build_dir)
-
-  master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
-  master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
-
-  # Put build-time support tools in out/{config_name}.
-  gyp.common.CopyTool(flavor, toplevel_build, generator_flags)
-
-  # Grab make settings for CC/CXX.
-  # The rules are
-  # - The priority from low to high is gcc/g++, the 'make_global_settings' in
-  #   gyp, the environment variable.
-  # - If there is no 'make_global_settings' for CC.host/CXX.host or
-  #   'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
-  #   to cc/cxx.
-  if flavor == 'win':
-    ar = 'lib.exe'
-    # cc and cxx must be set to the correct architecture by overriding with one
-    # of cl_x86 or cl_x64 below.
-    cc = 'UNSET'
-    cxx = 'UNSET'
-    ld = 'link.exe'
-    ld_host = '$ld'
-  else:
-    ar = 'ar'
-    cc = 'cc'
-    cxx = 'c++'
-    ld = '$cc'
-    ldxx = '$cxx'
-    ld_host = '$cc_host'
-    ldxx_host = '$cxx_host'
-
-  ar_host = ar
-  cc_host = None
-  cxx_host = None
-  cc_host_global_setting = None
-  cxx_host_global_setting = None
-  clang_cl = None
-  nm = 'nm'
-  nm_host = 'nm'
-  readelf = 'readelf'
-  readelf_host = 'readelf'
-
-  build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
-  make_global_settings = data[build_file].get('make_global_settings', [])
-  build_to_root = gyp.common.InvertRelativePath(build_dir,
-                                                options.toplevel_dir)
-  wrappers = {}
-  for key, value in make_global_settings:
-    if key == 'AR':
-      ar = os.path.join(build_to_root, value)
-    if key == 'AR.host':
-      ar_host = os.path.join(build_to_root, value)
-    if key == 'CC':
-      cc = os.path.join(build_to_root, value)
-      if cc.endswith('clang-cl'):
-        clang_cl = cc
-    if key == 'CXX':
-      cxx = os.path.join(build_to_root, value)
-    if key == 'CC.host':
-      cc_host = os.path.join(build_to_root, value)
-      cc_host_global_setting = value
-    if key == 'CXX.host':
-      cxx_host = os.path.join(build_to_root, value)
-      cxx_host_global_setting = value
-    if key == 'LD':
-      ld = os.path.join(build_to_root, value)
-    if key == 'LD.host':
-      ld_host = os.path.join(build_to_root, value)
-    if key == 'NM':
-      nm = os.path.join(build_to_root, value)
-    if key == 'NM.host':
-      nm_host = os.path.join(build_to_root, value)
-    if key == 'READELF':
-      readelf = os.path.join(build_to_root, value)
-    if key == 'READELF.host':
-      readelf_host = os.path.join(build_to_root, value)
-    if key.endswith('_wrapper'):
-      wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
-
-  # Support wrappers from environment variables too.
-  for key, value in os.environ.iteritems():
-    if key.lower().endswith('_wrapper'):
-      key_prefix = key[:-len('_wrapper')]
-      key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
-      wrappers[key_prefix] = os.path.join(build_to_root, value)
-
-  mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
-  if mac_toolchain_dir:
-    wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
-
-  if flavor == 'win':
-    configs = [target_dicts[qualified_target]['configurations'][config_name]
-               for qualified_target in target_list]
-    shared_system_includes = None
-    if not generator_flags.get('ninja_use_custom_environment_files', 0):
-      shared_system_includes = \
-          gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
-              configs, generator_flags)
-    cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
-        toplevel_build, generator_flags, shared_system_includes, OpenOutput)
-    for arch, path in sorted(cl_paths.iteritems()):
-      if clang_cl:
-        # If we have selected clang-cl, use that instead.
-        path = clang_cl
-      command = CommandWithWrapper('CC', wrappers,
-          QuoteShellArgument(path, 'win'))
-      if clang_cl:
-        # Use clang-cl to cross-compile for x86 or x86_64.
-        command += (' -m32' if arch == 'x86' else ' -m64')
-      master_ninja.variable('cl_' + arch, command)
-
-  cc = GetEnvironFallback(['CC_target', 'CC'], cc)
-  master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
-  cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
-  master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
-
-  if flavor == 'win':
-    master_ninja.variable('ld', ld)
-    master_ninja.variable('idl', 'midl.exe')
-    master_ninja.variable('ar', ar)
-    master_ninja.variable('rc', 'rc.exe')
-    master_ninja.variable('ml_x86', 'ml.exe')
-    master_ninja.variable('ml_x64', 'ml64.exe')
-    master_ninja.variable('mt', 'mt.exe')
-  else:
-    master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
-    master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
-    master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
-    if flavor != 'mac':
-      # Mac does not use readelf/nm for .TOC generation, so avoiding polluting
-      # the master ninja with extra unused variables.
-      master_ninja.variable(
-          'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
-      master_ninja.variable(
-          'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
-
-  if generator_supports_multiple_toolsets:
-    if not cc_host:
-      cc_host = cc
-    if not cxx_host:
-      cxx_host = cxx
-
-    master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
-    master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
-    master_ninja.variable('readelf_host',
-                          GetEnvironFallback(['READELF_host'], readelf_host))
-    cc_host = GetEnvironFallback(['CC_host'], cc_host)
-    cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
-
-    # The environment variable could be used in 'make_global_settings', like
-    # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
-    if '$(CC)' in cc_host and cc_host_global_setting:
-      cc_host = cc_host_global_setting.replace('$(CC)', cc)
-    if '$(CXX)' in cxx_host and cxx_host_global_setting:
-      cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
-    master_ninja.variable('cc_host',
-                          CommandWithWrapper('CC.host', wrappers, cc_host))
-    master_ninja.variable('cxx_host',
-                          CommandWithWrapper('CXX.host', wrappers, cxx_host))
-    if flavor == 'win':
-      master_ninja.variable('ld_host', ld_host)
-    else:
-      master_ninja.variable('ld_host', CommandWithWrapper(
-          'LINK', wrappers, ld_host))
-      master_ninja.variable('ldxx_host', CommandWithWrapper(
-          'LINK', wrappers, ldxx_host))
-
-  master_ninja.newline()
-
-  master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
-  master_ninja.newline()
-
-  deps = 'msvc' if flavor == 'win' else 'gcc'
-
-  if flavor != 'win':
-    master_ninja.rule(
-      'cc',
-      description='CC $out',
-      command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
-              '$cflags_pch_c -c $in -o $out'),
-      depfile='$out.d',
-      deps=deps)
-    master_ninja.rule(
-      'cc_s',
-      description='CC $out',
-      command=('$cc $defines $includes $cflags $cflags_c '
-              '$cflags_pch_c -c $in -o $out'))
-    master_ninja.rule(
-      'cxx',
-      description='CXX $out',
-      command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
-              '$cflags_pch_cc -c $in -o $out'),
-      depfile='$out.d',
-      deps=deps)
-  else:
-    # TODO(scottmg) Separate pdb names is a test to see if it works around
-    # http://crbug.com/142362. It seems there's a race between the creation of
-    # the .pdb by the precompiled header step for .cc and the compilation of
-    # .c files. This should be handled by mspdbsrv, but rarely errors out with
-    #   c1xx : fatal error C1033: cannot open program database
-    # By making the rules target separate pdb files this might be avoided.
-    cc_command = ('ninja -t msvc -e $arch ' +
-                  '-- '
-                  '$cc /nologo /showIncludes /FC '
-                  '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
-    cxx_command = ('ninja -t msvc -e $arch ' +
-                   '-- '
-                   '$cxx /nologo /showIncludes /FC '
-                   '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
-    master_ninja.rule(
-      'cc',
-      description='CC $out',
-      command=cc_command,
-      rspfile='$out.rsp',
-      rspfile_content='$defines $includes $cflags $cflags_c',
-      deps=deps)
-    master_ninja.rule(
-      'cxx',
-      description='CXX $out',
-      command=cxx_command,
-      rspfile='$out.rsp',
-      rspfile_content='$defines $includes $cflags $cflags_cc',
-      deps=deps)
-    master_ninja.rule(
-      'idl',
-      description='IDL $in',
-      command=('%s gyp-win-tool midl-wrapper $arch $outdir '
-               '$tlb $h $dlldata $iid $proxy $in '
-               '$midl_includes $idlflags' % sys.executable))
-    master_ninja.rule(
-      'rc',
-      description='RC $in',
-      # Note: $in must be last otherwise rc.exe complains.
-      command=('%s gyp-win-tool rc-wrapper '
-               '$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
-               sys.executable))
-    master_ninja.rule(
-      'asm',
-      description='ASM $out',
-      command=('%s gyp-win-tool asm-wrapper '
-               '$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
-               sys.executable))
-
-  if flavor != 'mac' and flavor != 'win':
-    master_ninja.rule(
-      'alink',
-      description='AR $out',
-      command='rm -f $out && $ar rcs $arflags $out $in')
-    master_ninja.rule(
-      'alink_thin',
-      description='AR $out',
-      command='rm -f $out && $ar rcsT $arflags $out $in')
-
-    # This allows targets that only need to depend on $lib's API to declare an
-    # order-only dependency on $lib.TOC and avoid relinking such downstream
-    # dependencies when $lib changes only in non-public ways.
-    # The resulting string leaves an uninterpolated %{suffix} which
-    # is used in the final substitution below.
-    mtime_preserving_solink_base = (
-        'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
-        '%(solink)s && %(extract_toc)s > $lib.TOC; else '
-        '%(solink)s && %(extract_toc)s > $lib.tmp && '
-        'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
-        'fi; fi'
-        % { 'solink':
-              '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
-            'extract_toc':
-              ('{ $readelf -d $lib | grep SONAME ; '
-               '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
-
-    master_ninja.rule(
-      'solink',
-      description='SOLINK $lib',
-      restat=True,
-      command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
-      rspfile='$link_file_list',
-      rspfile_content=
-          '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
-      pool='link_pool')
-    master_ninja.rule(
-      'solink_module',
-      description='SOLINK(module) $lib',
-      restat=True,
-      command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
-      rspfile='$link_file_list',
-      rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
-      pool='link_pool')
-    master_ninja.rule(
-      'link',
-      description='LINK $out',
-      command=('$ld $ldflags -o $out '
-               '-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
-      pool='link_pool')
-  elif flavor == 'win':
-    master_ninja.rule(
-        'alink',
-        description='LIB $out',
-        command=('%s gyp-win-tool link-wrapper $arch False '
-                 '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
-                 sys.executable),
-        rspfile='$out.rsp',
-        rspfile_content='$in_newline $libflags')
-    _AddWinLinkRules(master_ninja, embed_manifest=True)
-    _AddWinLinkRules(master_ninja, embed_manifest=False)
-  else:
-    master_ninja.rule(
-      'objc',
-      description='OBJC $out',
-      command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
-               '$cflags_pch_objc -c $in -o $out'),
-      depfile='$out.d',
-      deps=deps)
-    master_ninja.rule(
-      'objcxx',
-      description='OBJCXX $out',
-      command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
-               '$cflags_pch_objcc -c $in -o $out'),
-      depfile='$out.d',
-      deps=deps)
-    master_ninja.rule(
-      'alink',
-      description='LIBTOOL-STATIC $out, POSTBUILDS',
-      command='rm -f $out && '
-              './gyp-mac-tool filter-libtool libtool $libtool_flags '
-              '-static -o $out $in'
-              '$postbuilds')
-    master_ninja.rule(
-      'lipo',
-      description='LIPO $out, POSTBUILDS',
-      command='rm -f $out && lipo -create $in -output $out$postbuilds')
-    master_ninja.rule(
-      'solipo',
-      description='SOLIPO $out, POSTBUILDS',
-      command=(
-          'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
-          '%(extract_toc)s > $lib.TOC'
-          % { 'extract_toc':
-                '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
-                'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
-
-
-    # Record the public interface of $lib in $lib.TOC. See the corresponding
-    # comment in the posix section above for details.
-    solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
-    mtime_preserving_solink_base = (
-        'if [ ! -e $lib -o ! -e $lib.TOC ] || '
-             # Always force dependent targets to relink if this library
-             # reexports something. Handling this correctly would require
-             # recursive TOC dumping but this is rare in practice, so punt.
-             'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
-          '%(solink)s && %(extract_toc)s > $lib.TOC; '
-        'else '
-          '%(solink)s && %(extract_toc)s > $lib.tmp && '
-          'if ! cmp -s $lib.tmp $lib.TOC; then '
-            'mv $lib.tmp $lib.TOC ; '
-          'fi; '
-        'fi'
-        % { 'solink': solink_base,
-            'extract_toc':
-              '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
-              'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
-
-
-    solink_suffix = '@$link_file_list$postbuilds'
-    master_ninja.rule(
-      'solink',
-      description='SOLINK $lib, POSTBUILDS',
-      restat=True,
-      command=mtime_preserving_solink_base % {'suffix': solink_suffix,
-                                              'type': '-shared'},
-      rspfile='$link_file_list',
-      rspfile_content='$in $solibs $libs',
-      pool='link_pool')
-    master_ninja.rule(
-      'solink_notoc',
-      description='SOLINK $lib, POSTBUILDS',
-      restat=True,
-      command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
-      rspfile='$link_file_list',
-      rspfile_content='$in $solibs $libs',
-      pool='link_pool')
-
-    master_ninja.rule(
-      'solink_module',
-      description='SOLINK(module) $lib, POSTBUILDS',
-      restat=True,
-      command=mtime_preserving_solink_base % {'suffix': solink_suffix,
-                                              'type': '-bundle'},
-      rspfile='$link_file_list',
-      rspfile_content='$in $solibs $libs',
-      pool='link_pool')
-    master_ninja.rule(
-      'solink_module_notoc',
-      description='SOLINK(module) $lib, POSTBUILDS',
-      restat=True,
-      command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
-      rspfile='$link_file_list',
-      rspfile_content='$in $solibs $libs',
-      pool='link_pool')
-
-    master_ninja.rule(
-      'link',
-      description='LINK $out, POSTBUILDS',
-      command=('$ld $ldflags -o $out '
-               '$in $solibs $libs$postbuilds'),
-      pool='link_pool')
-    master_ninja.rule(
-      'preprocess_infoplist',
-      description='PREPROCESS INFOPLIST $out',
-      command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
-               'plutil -convert xml1 $out $out'))
-    master_ninja.rule(
-      'copy_infoplist',
-      description='COPY INFOPLIST $in',
-      command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
-    master_ninja.rule(
-      'merge_infoplist',
-      description='MERGE INFOPLISTS $in',
-      command='$env ./gyp-mac-tool merge-info-plist $out $in')
-    master_ninja.rule(
-      'compile_xcassets',
-      description='COMPILE XCASSETS $in',
-      command='$env ./gyp-mac-tool compile-xcassets $keys $in')
-    master_ninja.rule(
-      'compile_ios_framework_headers',
-      description='COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in',
-      command='$env ./gyp-mac-tool compile-ios-framework-header-map $out '
-              '$framework $in && $env ./gyp-mac-tool '
-              'copy-ios-framework-headers $framework $copy_headers')
-    master_ninja.rule(
-      'mac_tool',
-      description='MACTOOL $mactool_cmd $in',
-      command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
-    master_ninja.rule(
-      'package_framework',
-      description='PACKAGE FRAMEWORK $out, POSTBUILDS',
-      command='./gyp-mac-tool package-framework $out $version$postbuilds '
-              '&& touch $out')
-    master_ninja.rule(
-      'package_ios_framework',
-      description='PACKAGE IOS FRAMEWORK $out, POSTBUILDS',
-      command='./gyp-mac-tool package-ios-framework $out $postbuilds '
-              '&& touch $out')
-  if flavor == 'win':
-    master_ninja.rule(
-      'stamp',
-      description='STAMP $out',
-      command='%s gyp-win-tool stamp $out' % sys.executable)
-    master_ninja.rule(
-      'copy',
-      description='COPY $in $out',
-      command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
-  else:
-    master_ninja.rule(
-      'stamp',
-      description='STAMP $out',
-      command='${postbuilds}touch $out')
-    master_ninja.rule(
-      'copy',
-      description='COPY $in $out',
-      command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)')
-  master_ninja.newline()
-
-  all_targets = set()
-  for build_file in params['build_files']:
-    for target in gyp.common.AllTargets(target_list,
-                                        target_dicts,
-                                        os.path.normpath(build_file)):
-      all_targets.add(target)
-  all_outputs = set()
-
-  # target_outputs is a map from qualified target name to a Target object.
-  target_outputs = {}
-  # target_short_names is a map from target short name to a list of Target
-  # objects.
-  target_short_names = {}
-
-  # short name of targets that were skipped because they didn't contain anything
-  # interesting.
-  # NOTE: there may be overlap between this an non_empty_target_names.
-  empty_target_names = set()
-
-  # Set of non-empty short target names.
-  # NOTE: there may be overlap between this an empty_target_names.
-  non_empty_target_names = set()
-
-  for qualified_target in target_list:
-    # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
-    build_file, name, toolset = \
-        gyp.common.ParseQualifiedTarget(qualified_target)
-
-    this_make_global_settings = data[build_file].get('make_global_settings', [])
-    assert make_global_settings == this_make_global_settings, (
-        "make_global_settings needs to be the same for all targets. %s vs. %s" %
-        (this_make_global_settings, make_global_settings))
-
-    spec = target_dicts[qualified_target]
-    if flavor == 'mac':
-      gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
-
-    # If build_file is a symlink, we must not follow it because there's a chance
-    # it could point to a path above toplevel_dir, and we cannot correctly deal
-    # with that case at the moment.
-    build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
-                                         False)
-
-    qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
-                                                           toolset)
-    hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
-
-    base_path = os.path.dirname(build_file)
-    obj = 'obj'
-    if toolset != 'target':
-      obj += '.' + toolset
-    output_file = os.path.join(obj, base_path, name + '.ninja')
-
-    ninja_output = StringIO()
-    writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
-                         ninja_output,
-                         toplevel_build, output_file,
-                         flavor, toplevel_dir=options.toplevel_dir)
-
-    target = writer.WriteSpec(spec, config_name, generator_flags)
-
-    if ninja_output.tell() > 0:
-      # Only create files for ninja files that actually have contents.
-      with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
-        ninja_file.write(ninja_output.getvalue())
-      ninja_output.close()
-      master_ninja.subninja(output_file)
-
-    if target:
-      if name != target.FinalOutput() and spec['toolset'] == 'target':
-        target_short_names.setdefault(name, []).append(target)
-      target_outputs[qualified_target] = target
-      if qualified_target in all_targets:
-        all_outputs.add(target.FinalOutput())
-      non_empty_target_names.add(name)
-    else:
-      empty_target_names.add(name)
-
-  if target_short_names:
-    # Write a short name to build this target.  This benefits both the
-    # "build chrome" case as well as the gyp tests, which expect to be
-    # able to run actions and build libraries by their short name.
-    master_ninja.newline()
-    master_ninja.comment('Short names for targets.')
-    for short_name in sorted(target_short_names):
-      master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
-                                               target_short_names[short_name]])
-
-  # Write phony targets for any empty targets that weren't written yet. As
-  # short names are  not necessarily unique only do this for short names that
-  # haven't already been output for another target.
-  empty_target_names = empty_target_names - non_empty_target_names
-  if empty_target_names:
-    master_ninja.newline()
-    master_ninja.comment('Empty targets (output for completeness).')
-    for name in sorted(empty_target_names):
-      master_ninja.build(name, 'phony')
-
-  if all_outputs:
-    master_ninja.newline()
-    master_ninja.build('all', 'phony', sorted(all_outputs))
-    master_ninja.default(generator_flags.get('default_target', 'all'))
-
-  master_ninja_file.close()
-
-
-def PerformBuild(data, configurations, params):
-  options = params['options']
-  for config in configurations:
-    builddir = os.path.join(options.toplevel_dir, 'out', config)
-    arguments = ['ninja', '-C', builddir]
-    print 'Building [%s]: %s' % (config, arguments)
-    subprocess.check_call(arguments)
-
-
-def CallGenerateOutputForConfig(arglist):
-  # Ignore the interrupt signal so that the parent process catches it and
-  # kills all multiprocessing children.
-  signal.signal(signal.SIGINT, signal.SIG_IGN)
-
-  (target_list, target_dicts, data, params, config_name) = arglist
-  GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  # Update target_dicts for iOS device builds.
-  target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
-      target_dicts)
-
-  user_config = params.get('generator_flags', {}).get('config', None)
-  if gyp.common.GetFlavor(params) == 'win':
-    target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
-    target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
-        target_list, target_dicts, generator_default_variables)
-
-  if user_config:
-    GenerateOutputForConfig(target_list, target_dicts, data, params,
-                            user_config)
-  else:
-    config_names = target_dicts[target_list[0]]['configurations'].keys()
-    if params['parallel']:
-      try:
-        pool = multiprocessing.Pool(len(config_names))
-        arglists = []
-        for config_name in config_names:
-          arglists.append(
-              (target_list, target_dicts, data, params, config_name))
-        pool.map(CallGenerateOutputForConfig, arglists)
-      except KeyboardInterrupt, e:
-        pool.terminate()
-        raise e
-    else:
-      for config_name in config_names:
-        GenerateOutputForConfig(target_list, target_dicts, data, params,
-                                config_name)
diff --git a/tools/gyp/pylib/gyp/generator/ninja_test.py b/tools/gyp/pylib/gyp/generator/ninja_test.py
deleted file mode 100644
index 1767b2f..0000000
--- a/tools/gyp/pylib/gyp/generator/ninja_test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the ninja.py file. """
-
-import gyp.generator.ninja as ninja
-import unittest
-import StringIO
-import sys
-import TestCommon
-
-
-class TestPrefixesAndSuffixes(unittest.TestCase):
-  def test_BinaryNamesWindows(self):
-    # These cannot run on non-Windows as they require a VS installation to
-    # correctly handle variable expansion.
-    if sys.platform.startswith('win'):
-      writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
-          'build.ninja', 'win')
-      spec = { 'target_name': 'wee' }
-      self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
-          endswith('.exe'))
-      self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
-          endswith('.dll'))
-      self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
-          endswith('.lib'))
-
-  def test_BinaryNamesLinux(self):
-    writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
-        'build.ninja', 'linux')
-    spec = { 'target_name': 'wee' }
-    self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
-                                                            'executable'))
-    self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
-        startswith('lib'))
-    self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
-        startswith('lib'))
-    self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
-        endswith('.so'))
-    self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
-        endswith('.a'))
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py
deleted file mode 100644
index db99d6a..0000000
--- a/tools/gyp/pylib/gyp/generator/xcode.py
+++ /dev/null
@@ -1,1311 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import filecmp
-import gyp.common
-import gyp.xcodeproj_file
-import gyp.xcode_ninja
-import errno
-import os
-import sys
-import posixpath
-import re
-import shutil
-import subprocess
-import tempfile
-
-
-# Project files generated by this module will use _intermediate_var as a
-# custom Xcode setting whose value is a DerivedSources-like directory that's
-# project-specific and configuration-specific.  The normal choice,
-# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
-# as it is likely that multiple targets within a single project file will want
-# to access the same set of generated files.  The other option,
-# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
-# it is not configuration-specific.  INTERMEDIATE_DIR is defined as
-# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
-_intermediate_var = 'INTERMEDIATE_DIR'
-
-# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
-# targets that share the same BUILT_PRODUCTS_DIR.
-_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
-
-_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
-
-generator_default_variables = {
-  'EXECUTABLE_PREFIX': '',
-  'EXECUTABLE_SUFFIX': '',
-  'STATIC_LIB_PREFIX': 'lib',
-  'SHARED_LIB_PREFIX': 'lib',
-  'STATIC_LIB_SUFFIX': '.a',
-  'SHARED_LIB_SUFFIX': '.dylib',
-  # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
-  # It is specific to each build environment.  It is only guaranteed to exist
-  # and be constant within the context of a project, corresponding to a single
-  # input file.  Some build environments may allow their intermediate directory
-  # to be shared on a wider scale, but this is not guaranteed.
-  'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
-  'OS': 'mac',
-  'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
-  'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
-  'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
-  'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
-  'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
-  'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
-  'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
-  'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
-  'CONFIGURATION_NAME': '$(CONFIGURATION)',
-}
-
-# The Xcode-specific sections that hold paths.
-generator_additional_path_sections = [
-  'mac_bundle_resources',
-  'mac_framework_headers',
-  'mac_framework_private_headers',
-  # 'mac_framework_dirs', input already handles _dirs endings.
-]
-
-# The Xcode-specific keys that exist on targets and aren't moved down to
-# configurations.
-generator_additional_non_configuration_keys = [
-  'ios_app_extension',
-  'ios_watch_app',
-  'ios_watchkit_extension',
-  'mac_bundle',
-  'mac_bundle_resources',
-  'mac_framework_headers',
-  'mac_framework_private_headers',
-  'mac_xctest_bundle',
-  'mac_xcuitest_bundle',
-  'xcode_create_dependents_test_runner',
-]
-
-# We want to let any rules apply to files that are resources also.
-generator_extra_sources_for_rules = [
-  'mac_bundle_resources',
-  'mac_framework_headers',
-  'mac_framework_private_headers',
-]
-
-generator_filelist_paths = None
-
-# Xcode's standard set of library directories, which don't need to be duplicated
-# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
-xcode_standard_library_dirs = frozenset([
-  '$(SDKROOT)/usr/lib',
-  '$(SDKROOT)/usr/local/lib',
-])
-
-def CreateXCConfigurationList(configuration_names):
-  xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
-  if len(configuration_names) == 0:
-    configuration_names = ['Default']
-  for configuration_name in configuration_names:
-    xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
-        'name': configuration_name})
-    xccl.AppendProperty('buildConfigurations', xcbc)
-  xccl.SetProperty('defaultConfigurationName', configuration_names[0])
-  return xccl
-
-
-class XcodeProject(object):
-  def __init__(self, gyp_path, path, build_file_dict):
-    self.gyp_path = gyp_path
-    self.path = path
-    self.project = gyp.xcodeproj_file.PBXProject(path=path)
-    projectDirPath = gyp.common.RelativePath(
-                         os.path.dirname(os.path.abspath(self.gyp_path)),
-                         os.path.dirname(path) or '.')
-    self.project.SetProperty('projectDirPath', projectDirPath)
-    self.project_file = \
-        gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
-    self.build_file_dict = build_file_dict
-
-    # TODO(mark): add destructor that cleans up self.path if created_dir is
-    # True and things didn't complete successfully.  Or do something even
-    # better with "try"?
-    self.created_dir = False
-    try:
-      os.makedirs(self.path)
-      self.created_dir = True
-    except OSError, e:
-      if e.errno != errno.EEXIST:
-        raise
-
-  def Finalize1(self, xcode_targets, serialize_all_tests):
-    # Collect a list of all of the build configuration names used by the
-    # various targets in the file.  It is very heavily advised to keep each
-    # target in an entire project (even across multiple project files) using
-    # the same set of configuration names.
-    configurations = []
-    for xct in self.project.GetProperty('targets'):
-      xccl = xct.GetProperty('buildConfigurationList')
-      xcbcs = xccl.GetProperty('buildConfigurations')
-      for xcbc in xcbcs:
-        name = xcbc.GetProperty('name')
-        if name not in configurations:
-          configurations.append(name)
-
-    # Replace the XCConfigurationList attached to the PBXProject object with
-    # a new one specifying all of the configuration names used by the various
-    # targets.
-    try:
-      xccl = CreateXCConfigurationList(configurations)
-      self.project.SetProperty('buildConfigurationList', xccl)
-    except:
-      sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
-      raise
-
-    # The need for this setting is explained above where _intermediate_var is
-    # defined.  The comments below about wanting to avoid project-wide build
-    # settings apply here too, but this needs to be set on a project-wide basis
-    # so that files relative to the _intermediate_var setting can be displayed
-    # properly in the Xcode UI.
-    #
-    # Note that for configuration-relative files such as anything relative to
-    # _intermediate_var, for the purposes of UI tree view display, Xcode will
-    # only resolve the configuration name once, when the project file is
-    # opened.  If the active build configuration is changed, the project file
-    # must be closed and reopened if it is desired for the tree view to update.
-    # This is filed as Apple radar 6588391.
-    xccl.SetBuildSetting(_intermediate_var,
-                         '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
-    xccl.SetBuildSetting(_shared_intermediate_var,
-                         '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
-
-    # Set user-specified project-wide build settings and config files.  This
-    # is intended to be used very sparingly.  Really, almost everything should
-    # go into target-specific build settings sections.  The project-wide
-    # settings are only intended to be used in cases where Xcode attempts to
-    # resolve variable references in a project context as opposed to a target
-    # context, such as when resolving sourceTree references while building up
-    # the tree tree view for UI display.
-    # Any values set globally are applied to all configurations, then any
-    # per-configuration values are applied.
-    for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
-      xccl.SetBuildSetting(xck, xcv)
-    if 'xcode_config_file' in self.build_file_dict:
-      config_ref = self.project.AddOrGetFileInRootGroup(
-          self.build_file_dict['xcode_config_file'])
-      xccl.SetBaseConfiguration(config_ref)
-    build_file_configurations = self.build_file_dict.get('configurations', {})
-    if build_file_configurations:
-      for config_name in configurations:
-        build_file_configuration_named = \
-            build_file_configurations.get(config_name, {})
-        if build_file_configuration_named:
-          xcc = xccl.ConfigurationNamed(config_name)
-          for xck, xcv in build_file_configuration_named.get('xcode_settings',
-                                                             {}).iteritems():
-            xcc.SetBuildSetting(xck, xcv)
-          if 'xcode_config_file' in build_file_configuration_named:
-            config_ref = self.project.AddOrGetFileInRootGroup(
-                build_file_configurations[config_name]['xcode_config_file'])
-            xcc.SetBaseConfiguration(config_ref)
-
-    # Sort the targets based on how they appeared in the input.
-    # TODO(mark): Like a lot of other things here, this assumes internal
-    # knowledge of PBXProject - in this case, of its "targets" property.
-
-    # ordinary_targets are ordinary targets that are already in the project
-    # file. run_test_targets are the targets that run unittests and should be
-    # used for the Run All Tests target.  support_targets are the action/rule
-    # targets used by GYP file targets, just kept for the assert check.
-    ordinary_targets = []
-    run_test_targets = []
-    support_targets = []
-
-    # targets is full list of targets in the project.
-    targets = []
-
-    # does the it define it's own "all"?
-    has_custom_all = False
-
-    # targets_for_all is the list of ordinary_targets that should be listed
-    # in this project's "All" target.  It includes each non_runtest_target
-    # that does not have suppress_wildcard set.
-    targets_for_all = []
-
-    for target in self.build_file_dict['targets']:
-      target_name = target['target_name']
-      toolset = target['toolset']
-      qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
-                                                    toolset)
-      xcode_target = xcode_targets[qualified_target]
-      # Make sure that the target being added to the sorted list is already in
-      # the unsorted list.
-      assert xcode_target in self.project._properties['targets']
-      targets.append(xcode_target)
-      ordinary_targets.append(xcode_target)
-      if xcode_target.support_target:
-        support_targets.append(xcode_target.support_target)
-        targets.append(xcode_target.support_target)
-
-      if not int(target.get('suppress_wildcard', False)):
-        targets_for_all.append(xcode_target)
-
-      if target_name.lower() == 'all':
-        has_custom_all = True;
-
-      # If this target has a 'run_as' attribute, add its target to the
-      # targets, and add it to the test targets.
-      if target.get('run_as'):
-        # Make a target to run something.  It should have one
-        # dependency, the parent xcode target.
-        xccl = CreateXCConfigurationList(configurations)
-        run_target = gyp.xcodeproj_file.PBXAggregateTarget({
-              'name':                   'Run ' + target_name,
-              'productName':            xcode_target.GetProperty('productName'),
-              'buildConfigurationList': xccl,
-            },
-            parent=self.project)
-        run_target.AddDependency(xcode_target)
-
-        command = target['run_as']
-        script = ''
-        if command.get('working_directory'):
-          script = script + 'cd "%s"\n' % \
-                   gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
-                       command.get('working_directory'))
-
-        if command.get('environment'):
-          script = script + "\n".join(
-            ['export %s="%s"' %
-             (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
-             for (key, val) in command.get('environment').iteritems()]) + "\n"
-
-        # Some test end up using sockets, files on disk, etc. and can get
-        # confused if more then one test runs at a time.  The generator
-        # flag 'xcode_serialize_all_test_runs' controls the forcing of all
-        # tests serially.  It defaults to True.  To get serial runs this
-        # little bit of python does the same as the linux flock utility to
-        # make sure only one runs at a time.
-        command_prefix = ''
-        if serialize_all_tests:
-          command_prefix = \
-"""python -c "import fcntl, subprocess, sys
-file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
-fcntl.flock(file.fileno(), fcntl.LOCK_EX)
-sys.exit(subprocess.call(sys.argv[1:]))" """
-
-        # If we were unable to exec for some reason, we want to exit
-        # with an error, and fixup variable references to be shell
-        # syntax instead of xcode syntax.
-        script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
-                 gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
-                     gyp.common.EncodePOSIXShellList(command.get('action')))
-
-        ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
-              'shellScript':      script,
-              'showEnvVarsInLog': 0,
-            })
-        run_target.AppendProperty('buildPhases', ssbp)
-
-        # Add the run target to the project file.
-        targets.append(run_target)
-        run_test_targets.append(run_target)
-        xcode_target.test_runner = run_target
-
-
-    # Make sure that the list of targets being replaced is the same length as
-    # the one replacing it, but allow for the added test runner targets.
-    assert len(self.project._properties['targets']) == \
-      len(ordinary_targets) + len(support_targets)
-
-    self.project._properties['targets'] = targets
-
-    # Get rid of unnecessary levels of depth in groups like the Source group.
-    self.project.RootGroupsTakeOverOnlyChildren(True)
-
-    # Sort the groups nicely.  Do this after sorting the targets, because the
-    # Products group is sorted based on the order of the targets.
-    self.project.SortGroups()
-
-    # Create an "All" target if there's more than one target in this project
-    # file and the project didn't define its own "All" target.  Put a generated
-    # "All" target first so that people opening up the project for the first
-    # time will build everything by default.
-    if len(targets_for_all) > 1 and not has_custom_all:
-      xccl = CreateXCConfigurationList(configurations)
-      all_target = gyp.xcodeproj_file.PBXAggregateTarget(
-          {
-            'buildConfigurationList': xccl,
-            'name':                   'All',
-          },
-          parent=self.project)
-
-      for target in targets_for_all:
-        all_target.AddDependency(target)
-
-      # TODO(mark): This is evil because it relies on internal knowledge of
-      # PBXProject._properties.  It's important to get the "All" target first,
-      # though.
-      self.project._properties['targets'].insert(0, all_target)
-
-    # The same, but for run_test_targets.
-    if len(run_test_targets) > 1:
-      xccl = CreateXCConfigurationList(configurations)
-      run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
-          {
-            'buildConfigurationList': xccl,
-            'name':                   'Run All Tests',
-          },
-          parent=self.project)
-      for run_test_target in run_test_targets:
-        run_all_tests_target.AddDependency(run_test_target)
-
-      # Insert after the "All" target, which must exist if there is more than
-      # one run_test_target.
-      self.project._properties['targets'].insert(1, run_all_tests_target)
-
-  def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
-    # Finalize2 needs to happen in a separate step because the process of
-    # updating references to other projects depends on the ordering of targets
-    # within remote project files.  Finalize1 is responsible for sorting duty,
-    # and once all project files are sorted, Finalize2 can come in and update
-    # these references.
-
-    # To support making a "test runner" target that will run all the tests
-    # that are direct dependents of any given target, we look for
-    # xcode_create_dependents_test_runner being set on an Aggregate target,
-    # and generate a second target that will run the tests runners found under
-    # the marked target.
-    for bf_tgt in self.build_file_dict['targets']:
-      if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
-        tgt_name = bf_tgt['target_name']
-        toolset = bf_tgt['toolset']
-        qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
-                                                      tgt_name, toolset)
-        xcode_target = xcode_targets[qualified_target]
-        if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
-          # Collect all the run test targets.
-          all_run_tests = []
-          pbxtds = xcode_target.GetProperty('dependencies')
-          for pbxtd in pbxtds:
-            pbxcip = pbxtd.GetProperty('targetProxy')
-            dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
-            if hasattr(dependency_xct, 'test_runner'):
-              all_run_tests.append(dependency_xct.test_runner)
-
-          # Directly depend on all the runners as they depend on the target
-          # that builds them.
-          if len(all_run_tests) > 0:
-            run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
-                  'name':        'Run %s Tests' % tgt_name,
-                  'productName': tgt_name,
-                },
-                parent=self.project)
-            for run_test_target in all_run_tests:
-              run_all_target.AddDependency(run_test_target)
-
-            # Insert the test runner after the related target.
-            idx = self.project._properties['targets'].index(xcode_target)
-            self.project._properties['targets'].insert(idx + 1, run_all_target)
-
-    # Update all references to other projects, to make sure that the lists of
-    # remote products are complete.  Otherwise, Xcode will fill them in when
-    # it opens the project file, which will result in unnecessary diffs.
-    # TODO(mark): This is evil because it relies on internal knowledge of
-    # PBXProject._other_pbxprojects.
-    for other_pbxproject in self.project._other_pbxprojects.keys():
-      self.project.AddOrGetProjectReference(other_pbxproject)
-
-    self.project.SortRemoteProductReferences()
-
-    # Give everything an ID.
-    self.project_file.ComputeIDs()
-
-    # Make sure that no two objects in the project file have the same ID.  If
-    # multiple objects wind up with the same ID, upon loading the file, Xcode
-    # will only recognize one object (the last one in the file?) and the
-    # results are unpredictable.
-    self.project_file.EnsureNoIDCollisions()
-
-  def Write(self):
-    # Write the project file to a temporary location first.  Xcode watches for
-    # changes to the project file and presents a UI sheet offering to reload
-    # the project when it does change.  However, in some cases, especially when
-    # multiple projects are open or when Xcode is busy, things don't work so
-    # seamlessly.  Sometimes, Xcode is able to detect that a project file has
-    # changed but can't unload it because something else is referencing it.
-    # To mitigate this problem, and to avoid even having Xcode present the UI
-    # sheet when an open project is rewritten for inconsequential changes, the
-    # project file is written to a temporary file in the xcodeproj directory
-    # first.  The new temporary file is then compared to the existing project
-    # file, if any.  If they differ, the new file replaces the old; otherwise,
-    # the new project file is simply deleted.  Xcode properly detects a file
-    # being renamed over an open project file as a change and so it remains
-    # able to present the "project file changed" sheet under this system.
-    # Writing to a temporary file first also avoids the possible problem of
-    # Xcode rereading an incomplete project file.
-    (output_fd, new_pbxproj_path) = \
-        tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
-                         dir=self.path)
-
-    try:
-      output_file = os.fdopen(output_fd, 'wb')
-
-      self.project_file.Print(output_file)
-      output_file.close()
-
-      pbxproj_path = os.path.join(self.path, 'project.pbxproj')
-
-      same = False
-      try:
-        same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
-      except OSError, e:
-        if e.errno != errno.ENOENT:
-          raise
-
-      if same:
-        # The new file is identical to the old one, just get rid of the new
-        # one.
-        os.unlink(new_pbxproj_path)
-      else:
-        # The new file is different from the old one, or there is no old one.
-        # Rename the new file to the permanent name.
-        #
-        # tempfile.mkstemp uses an overly restrictive mode, resulting in a
-        # file that can only be read by the owner, regardless of the umask.
-        # There's no reason to not respect the umask here, which means that
-        # an extra hoop is required to fetch it and reset the new file's mode.
-        #
-        # No way to get the umask without setting a new one?  Set a safe one
-        # and then set it back to the old value.
-        umask = os.umask(077)
-        os.umask(umask)
-
-        os.chmod(new_pbxproj_path, 0666 & ~umask)
-        os.rename(new_pbxproj_path, pbxproj_path)
-
-    except Exception:
-      # Don't leave turds behind.  In fact, if this code was responsible for
-      # creating the xcodeproj directory, get rid of that too.
-      os.unlink(new_pbxproj_path)
-      if self.created_dir:
-        shutil.rmtree(self.path, True)
-      raise
-
-
-def AddSourceToTarget(source, type, pbxp, xct):
-  # TODO(mark): Perhaps source_extensions and library_extensions can be made a
-  # little bit fancier.
-  source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
-
-  # .o is conceptually more of a "source" than a "library," but Xcode thinks
-  # of "sources" as things to compile and "libraries" (or "frameworks") as
-  # things to link with. Adding an object file to an Xcode target's frameworks
-  # phase works properly.
-  library_extensions = ['a', 'dylib', 'framework', 'o']
-
-  basename = posixpath.basename(source)
-  (root, ext) = posixpath.splitext(basename)
-  if ext:
-    ext = ext[1:].lower()
-
-  if ext in source_extensions and type != 'none':
-    xct.SourcesPhase().AddFile(source)
-  elif ext in library_extensions and type != 'none':
-    xct.FrameworksPhase().AddFile(source)
-  else:
-    # Files that aren't added to a sources or frameworks build phase can still
-    # go into the project file, just not as part of a build phase.
-    pbxp.AddOrGetFileInRootGroup(source)
-
-
-def AddResourceToTarget(resource, pbxp, xct):
-  # TODO(mark): Combine with AddSourceToTarget above?  Or just inline this call
-  # where it's used.
-  xct.ResourcesPhase().AddFile(resource)
-
-
-def AddHeaderToTarget(header, pbxp, xct, is_public):
-  # TODO(mark): Combine with AddSourceToTarget above?  Or just inline this call
-  # where it's used.
-  settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
-  xct.HeadersPhase().AddFile(header, settings)
-
-
-_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
-def ExpandXcodeVariables(string, expansions):
-  """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
-
-  In some rare cases, it is appropriate to expand Xcode variables when a
-  project file is generated.  For any substring $(VAR) in string, if VAR is a
-  key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
-  Any $(VAR) substring in string for which VAR is not a key in the expansions
-  dict will remain in the returned string.
-  """
-
-  matches = _xcode_variable_re.findall(string)
-  if matches == None:
-    return string
-
-  matches.reverse()
-  for match in matches:
-    (to_replace, variable) = match
-    if not variable in expansions:
-      continue
-
-    replacement = expansions[variable]
-    string = re.sub(re.escape(to_replace), replacement, string)
-
-  return string
-
-
-_xcode_define_re = re.compile(r'([\\\"\' ])')
-def EscapeXcodeDefine(s):
-  """We must escape the defines that we give to XCode so that it knows not to
-     split on spaces and to respect backslash and quote literals. However, we
-     must not quote the define, or Xcode will incorrectly intepret variables
-     especially $(inherited)."""
-  return re.sub(_xcode_define_re, r'\\\1', s)
-
-
-def PerformBuild(data, configurations, params):
-  options = params['options']
-
-  for build_file, build_file_dict in data.iteritems():
-    (build_file_root, build_file_ext) = os.path.splitext(build_file)
-    if build_file_ext != '.gyp':
-      continue
-    xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
-    if options.generator_output:
-      xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
-
-  for config in configurations:
-    arguments = ['xcodebuild', '-project', xcodeproj_path]
-    arguments += ['-configuration', config]
-    print "Building [%s]: %s" % (config, arguments)
-    subprocess.check_call(arguments)
-
-
-def CalculateGeneratorInputInfo(params):
-  toplevel = params['options'].toplevel_dir
-  if params.get('flavor') == 'ninja':
-    generator_dir = os.path.relpath(params['options'].generator_output or '.')
-    output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
-    output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
-    qualified_out_dir = os.path.normpath(os.path.join(
-        toplevel, output_dir, 'gypfiles-xcode-ninja'))
-  else:
-    output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
-    qualified_out_dir = os.path.normpath(os.path.join(
-        toplevel, output_dir, 'gypfiles'))
-
-  global generator_filelist_paths
-  generator_filelist_paths = {
-      'toplevel': toplevel,
-      'qualified_out_dir': qualified_out_dir,
-  }
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  # Optionally configure each spec to use ninja as the external builder.
-  ninja_wrapper = params.get('flavor') == 'ninja'
-  if ninja_wrapper:
-    (target_list, target_dicts, data) = \
-        gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
-
-  options = params['options']
-  generator_flags = params.get('generator_flags', {})
-  parallel_builds = generator_flags.get('xcode_parallel_builds', True)
-  serialize_all_tests = \
-      generator_flags.get('xcode_serialize_all_test_runs', True)
-  upgrade_check_project_version = \
-      generator_flags.get('xcode_upgrade_check_project_version', None)
-
-  # Format upgrade_check_project_version with leading zeros as needed.
-  if upgrade_check_project_version:
-    upgrade_check_project_version = str(upgrade_check_project_version)
-    while len(upgrade_check_project_version) < 4:
-      upgrade_check_project_version = '0' + upgrade_check_project_version
-
-  skip_excluded_files = \
-      not generator_flags.get('xcode_list_excluded_files', True)
-  xcode_projects = {}
-  for build_file, build_file_dict in data.iteritems():
-    (build_file_root, build_file_ext) = os.path.splitext(build_file)
-    if build_file_ext != '.gyp':
-      continue
-    xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
-    if options.generator_output:
-      xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
-    xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
-    xcode_projects[build_file] = xcp
-    pbxp = xcp.project
-
-    # Set project-level attributes from multiple options
-    project_attributes = {};
-    if parallel_builds:
-      project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
-    if upgrade_check_project_version:
-      project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
-      project_attributes['LastTestingUpgradeCheck'] = \
-          upgrade_check_project_version
-      project_attributes['LastSwiftUpdateCheck'] = \
-          upgrade_check_project_version
-    pbxp.SetProperty('attributes', project_attributes)
-
-    # Add gyp/gypi files to project
-    if not generator_flags.get('standalone'):
-      main_group = pbxp.GetProperty('mainGroup')
-      build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
-      main_group.AppendChild(build_group)
-      for included_file in build_file_dict['included_files']:
-        build_group.AddOrGetFileByPath(included_file, False)
-
-  xcode_targets = {}
-  xcode_target_to_target_dict = {}
-  for qualified_target in target_list:
-    [build_file, target_name, toolset] = \
-        gyp.common.ParseQualifiedTarget(qualified_target)
-
-    spec = target_dicts[qualified_target]
-    if spec['toolset'] != 'target':
-      raise Exception(
-          'Multiple toolsets not supported in xcode build (target %s)' %
-          qualified_target)
-    configuration_names = [spec['default_configuration']]
-    for configuration_name in sorted(spec['configurations'].keys()):
-      if configuration_name not in configuration_names:
-        configuration_names.append(configuration_name)
-    xcp = xcode_projects[build_file]
-    pbxp = xcp.project
-
-    # Set up the configurations for the target according to the list of names
-    # supplied.
-    xccl = CreateXCConfigurationList(configuration_names)
-
-    # Create an XCTarget subclass object for the target. The type with
-    # "+bundle" appended will be used if the target has "mac_bundle" set.
-    # loadable_modules not in a mac_bundle are mapped to
-    # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
-    # to create a single-file mh_bundle.
-    _types = {
-      'executable':                  'com.apple.product-type.tool',
-      'loadable_module':             'com.googlecode.gyp.xcode.bundle',
-      'shared_library':              'com.apple.product-type.library.dynamic',
-      'static_library':              'com.apple.product-type.library.static',
-      'mac_kernel_extension':        'com.apple.product-type.kernel-extension',
-      'executable+bundle':           'com.apple.product-type.application',
-      'loadable_module+bundle':      'com.apple.product-type.bundle',
-      'loadable_module+xctest':      'com.apple.product-type.bundle.unit-test',
-      'loadable_module+xcuitest':    'com.apple.product-type.bundle.ui-testing',
-      'shared_library+bundle':       'com.apple.product-type.framework',
-      'executable+extension+bundle': 'com.apple.product-type.app-extension',
-      'executable+watch+extension+bundle':
-          'com.apple.product-type.watchkit-extension',
-      'executable+watch+bundle':
-          'com.apple.product-type.application.watchapp',
-      'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
-    }
-
-    target_properties = {
-      'buildConfigurationList': xccl,
-      'name':                   target_name,
-    }
-
-    type = spec['type']
-    is_xctest = int(spec.get('mac_xctest_bundle', 0))
-    is_xcuitest = int(spec.get('mac_xcuitest_bundle', 0))
-    is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
-    is_app_extension = int(spec.get('ios_app_extension', 0))
-    is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
-    is_watch_app = int(spec.get('ios_watch_app', 0))
-    if type != 'none':
-      type_bundle_key = type
-      if is_xcuitest:
-        type_bundle_key += '+xcuitest'
-        assert type == 'loadable_module', (
-            'mac_xcuitest_bundle targets must have type loadable_module '
-            '(target %s)' % target_name)
-      elif is_xctest:
-        type_bundle_key += '+xctest'
-        assert type == 'loadable_module', (
-            'mac_xctest_bundle targets must have type loadable_module '
-            '(target %s)' % target_name)
-      elif is_app_extension:
-        assert is_bundle, ('ios_app_extension flag requires mac_bundle '
-            '(target %s)' % target_name)
-        type_bundle_key += '+extension+bundle'
-      elif is_watchkit_extension:
-        assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
-            '(target %s)' % target_name)
-        type_bundle_key += '+watch+extension+bundle'
-      elif is_watch_app:
-        assert is_bundle, ('ios_watch_app flag requires mac_bundle '
-            '(target %s)' % target_name)
-        type_bundle_key += '+watch+bundle'
-      elif is_bundle:
-        type_bundle_key += '+bundle'
-
-      xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
-      try:
-        target_properties['productType'] = _types[type_bundle_key]
-      except KeyError, e:
-        gyp.common.ExceptionAppend(e, "-- unknown product type while "
-                                   "writing target %s" % target_name)
-        raise
-    else:
-      xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
-      assert not is_bundle, (
-          'mac_bundle targets cannot have type none (target "%s")' %
-          target_name)
-      assert not is_xcuitest, (
-          'mac_xcuitest_bundle targets cannot have type none (target "%s")' %
-          target_name)
-      assert not is_xctest, (
-          'mac_xctest_bundle targets cannot have type none (target "%s")' %
-          target_name)
-
-    target_product_name = spec.get('product_name')
-    if target_product_name is not None:
-      target_properties['productName'] = target_product_name
-
-    xct = xctarget_type(target_properties, parent=pbxp,
-                        force_outdir=spec.get('product_dir'),
-                        force_prefix=spec.get('product_prefix'),
-                        force_extension=spec.get('product_extension'))
-    pbxp.AppendProperty('targets', xct)
-    xcode_targets[qualified_target] = xct
-    xcode_target_to_target_dict[xct] = spec
-
-    spec_actions = spec.get('actions', [])
-    spec_rules = spec.get('rules', [])
-
-    # Xcode has some "issues" with checking dependencies for the "Compile
-    # sources" step with any source files/headers generated by actions/rules.
-    # To work around this, if a target is building anything directly (not
-    # type "none"), then a second target is used to run the GYP actions/rules
-    # and is made a dependency of this target.  This way the work is done
-    # before the dependency checks for what should be recompiled.
-    support_xct = None
-    # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
-    # logic all happens in ninja.  Don't bother creating the extra targets in
-    # that case.
-    if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
-      support_xccl = CreateXCConfigurationList(configuration_names);
-      support_target_suffix = generator_flags.get(
-          'support_target_suffix', ' Support')
-      support_target_properties = {
-        'buildConfigurationList': support_xccl,
-        'name':                   target_name + support_target_suffix,
-      }
-      if target_product_name:
-        support_target_properties['productName'] = \
-            target_product_name + ' Support'
-      support_xct = \
-          gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
-                                                parent=pbxp)
-      pbxp.AppendProperty('targets', support_xct)
-      xct.AddDependency(support_xct)
-    # Hang the support target off the main target so it can be tested/found
-    # by the generator during Finalize.
-    xct.support_target = support_xct
-
-    prebuild_index = 0
-
-    # Add custom shell script phases for "actions" sections.
-    for action in spec_actions:
-      # There's no need to write anything into the script to ensure that the
-      # output directories already exist, because Xcode will look at the
-      # declared outputs and automatically ensure that they exist for us.
-
-      # Do we have a message to print when this action runs?
-      message = action.get('message')
-      if message:
-        message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
-      else:
-        message = ''
-
-      # Turn the list into a string that can be passed to a shell.
-      action_string = gyp.common.EncodePOSIXShellList(action['action'])
-
-      # Convert Xcode-type variable references to sh-compatible environment
-      # variable references.
-      message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
-      action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
-        action_string)
-
-      script = ''
-      # Include the optional message
-      if message_sh:
-        script += message_sh + '\n'
-      # Be sure the script runs in exec, and that if exec fails, the script
-      # exits signalling an error.
-      script += 'exec ' + action_string_sh + '\nexit 1\n'
-      ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
-            'inputPaths': action['inputs'],
-            'name': 'Action "' + action['action_name'] + '"',
-            'outputPaths': action['outputs'],
-            'shellScript': script,
-            'showEnvVarsInLog': 0,
-          })
-
-      if support_xct:
-        support_xct.AppendProperty('buildPhases', ssbp)
-      else:
-        # TODO(mark): this assumes too much knowledge of the internals of
-        # xcodeproj_file; some of these smarts should move into xcodeproj_file
-        # itself.
-        xct._properties['buildPhases'].insert(prebuild_index, ssbp)
-        prebuild_index = prebuild_index + 1
-
-      # TODO(mark): Should verify that at most one of these is specified.
-      if int(action.get('process_outputs_as_sources', False)):
-        for output in action['outputs']:
-          AddSourceToTarget(output, type, pbxp, xct)
-
-      if int(action.get('process_outputs_as_mac_bundle_resources', False)):
-        for output in action['outputs']:
-          AddResourceToTarget(output, pbxp, xct)
-
-    # tgt_mac_bundle_resources holds the list of bundle resources so
-    # the rule processing can check against it.
-    if is_bundle:
-      tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
-    else:
-      tgt_mac_bundle_resources = []
-
-    # Add custom shell script phases driving "make" for "rules" sections.
-    #
-    # Xcode's built-in rule support is almost powerful enough to use directly,
-    # but there are a few significant deficiencies that render them unusable.
-    # There are workarounds for some of its inadequacies, but in aggregate,
-    # the workarounds added complexity to the generator, and some workarounds
-    # actually require input files to be crafted more carefully than I'd like.
-    # Consequently, until Xcode rules are made more capable, "rules" input
-    # sections will be handled in Xcode output by shell script build phases
-    # performed prior to the compilation phase.
-    #
-    # The following problems with Xcode rules were found.  The numbers are
-    # Apple radar IDs.  I hope that these shortcomings are addressed, I really
-    # liked having the rules handled directly in Xcode during the period that
-    # I was prototyping this.
-    #
-    # 6588600 Xcode compiles custom script rule outputs too soon, compilation
-    #         fails.  This occurs when rule outputs from distinct inputs are
-    #         interdependent.  The only workaround is to put rules and their
-    #         inputs in a separate target from the one that compiles the rule
-    #         outputs.  This requires input file cooperation and it means that
-    #         process_outputs_as_sources is unusable.
-    # 6584932 Need to declare that custom rule outputs should be excluded from
-    #         compilation.  A possible workaround is to lie to Xcode about a
-    #         rule's output, giving it a dummy file it doesn't know how to
-    #         compile.  The rule action script would need to touch the dummy.
-    # 6584839 I need a way to declare additional inputs to a custom rule.
-    #         A possible workaround is a shell script phase prior to
-    #         compilation that touches a rule's primary input files if any
-    #         would-be additional inputs are newer than the output.  Modifying
-    #         the source tree - even just modification times - feels dirty.
-    # 6564240 Xcode "custom script" build rules always dump all environment
-    #         variables.  This is a low-prioroty problem and is not a
-    #         show-stopper.
-    rules_by_ext = {}
-    for rule in spec_rules:
-      rules_by_ext[rule['extension']] = rule
-
-      # First, some definitions:
-      #
-      # A "rule source" is a file that was listed in a target's "sources"
-      # list and will have a rule applied to it on the basis of matching the
-      # rule's "extensions" attribute.  Rule sources are direct inputs to
-      # rules.
-      #
-      # Rule definitions may specify additional inputs in their "inputs"
-      # attribute.  These additional inputs are used for dependency tracking
-      # purposes.
-      #
-      # A "concrete output" is a rule output with input-dependent variables
-      # resolved.  For example, given a rule with:
-      #   'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
-      # if the target's "sources" list contained "one.ext" and "two.ext",
-      # the "concrete output" for rule input "two.ext" would be "two.cc".  If
-      # a rule specifies multiple outputs, each input file that the rule is
-      # applied to will have the same number of concrete outputs.
-      #
-      # If any concrete outputs are outdated or missing relative to their
-      # corresponding rule_source or to any specified additional input, the
-      # rule action must be performed to generate the concrete outputs.
-
-      # concrete_outputs_by_rule_source will have an item at the same index
-      # as the rule['rule_sources'] that it corresponds to.  Each item is a
-      # list of all of the concrete outputs for the rule_source.
-      concrete_outputs_by_rule_source = []
-
-      # concrete_outputs_all is a flat list of all concrete outputs that this
-      # rule is able to produce, given the known set of input files
-      # (rule_sources) that apply to it.
-      concrete_outputs_all = []
-
-      # messages & actions are keyed by the same indices as rule['rule_sources']
-      # and concrete_outputs_by_rule_source.  They contain the message and
-      # action to perform after resolving input-dependent variables.  The
-      # message is optional, in which case None is stored for each rule source.
-      messages = []
-      actions = []
-
-      for rule_source in rule.get('rule_sources', []):
-        rule_source_dirname, rule_source_basename = \
-            posixpath.split(rule_source)
-        (rule_source_root, rule_source_ext) = \
-            posixpath.splitext(rule_source_basename)
-
-        # These are the same variable names that Xcode uses for its own native
-        # rule support.  Because Xcode's rule engine is not being used, they
-        # need to be expanded as they are written to the makefile.
-        rule_input_dict = {
-          'INPUT_FILE_BASE':   rule_source_root,
-          'INPUT_FILE_SUFFIX': rule_source_ext,
-          'INPUT_FILE_NAME':   rule_source_basename,
-          'INPUT_FILE_PATH':   rule_source,
-          'INPUT_FILE_DIRNAME': rule_source_dirname,
-        }
-
-        concrete_outputs_for_this_rule_source = []
-        for output in rule.get('outputs', []):
-          # Fortunately, Xcode and make both use $(VAR) format for their
-          # variables, so the expansion is the only transformation necessary.
-          # Any remaning $(VAR)-type variables in the string can be given
-          # directly to make, which will pick up the correct settings from
-          # what Xcode puts into the environment.
-          concrete_output = ExpandXcodeVariables(output, rule_input_dict)
-          concrete_outputs_for_this_rule_source.append(concrete_output)
-
-          # Add all concrete outputs to the project.
-          pbxp.AddOrGetFileInRootGroup(concrete_output)
-
-        concrete_outputs_by_rule_source.append( \
-            concrete_outputs_for_this_rule_source)
-        concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
-
-        # TODO(mark): Should verify that at most one of these is specified.
-        if int(rule.get('process_outputs_as_sources', False)):
-          for output in concrete_outputs_for_this_rule_source:
-            AddSourceToTarget(output, type, pbxp, xct)
-
-        # If the file came from the mac_bundle_resources list or if the rule
-        # is marked to process outputs as bundle resource, do so.
-        was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
-        if was_mac_bundle_resource or \
-            int(rule.get('process_outputs_as_mac_bundle_resources', False)):
-          for output in concrete_outputs_for_this_rule_source:
-            AddResourceToTarget(output, pbxp, xct)
-
-        # Do we have a message to print when this rule runs?
-        message = rule.get('message')
-        if message:
-          message = gyp.common.EncodePOSIXShellArgument(message)
-          message = ExpandXcodeVariables(message, rule_input_dict)
-        messages.append(message)
-
-        # Turn the list into a string that can be passed to a shell.
-        action_string = gyp.common.EncodePOSIXShellList(rule['action'])
-
-        action = ExpandXcodeVariables(action_string, rule_input_dict)
-        actions.append(action)
-
-      if len(concrete_outputs_all) > 0:
-        # TODO(mark): There's a possibilty for collision here.  Consider
-        # target "t" rule "A_r" and target "t_A" rule "r".
-        makefile_name = '%s.make' % re.sub(
-            '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
-        makefile_path = os.path.join(xcode_projects[build_file].path,
-                                     makefile_name)
-        # TODO(mark): try/close?  Write to a temporary file and swap it only
-        # if it's got changes?
-        makefile = open(makefile_path, 'wb')
-
-        # make will build the first target in the makefile by default.  By
-        # convention, it's called "all".  List all (or at least one)
-        # concrete output for each rule source as a prerequisite of the "all"
-        # target.
-        makefile.write('all: \\\n')
-        for concrete_output_index in \
-            xrange(0, len(concrete_outputs_by_rule_source)):
-          # Only list the first (index [0]) concrete output of each input
-          # in the "all" target.  Otherwise, a parallel make (-j > 1) would
-          # attempt to process each input multiple times simultaneously.
-          # Otherwise, "all" could just contain the entire list of
-          # concrete_outputs_all.
-          concrete_output = \
-              concrete_outputs_by_rule_source[concrete_output_index][0]
-          if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
-            eol = ''
-          else:
-            eol = ' \\'
-          makefile.write('    %s%s\n' % (concrete_output, eol))
-
-        for (rule_source, concrete_outputs, message, action) in \
-            zip(rule['rule_sources'], concrete_outputs_by_rule_source,
-                messages, actions):
-          makefile.write('\n')
-
-          # Add a rule that declares it can build each concrete output of a
-          # rule source.  Collect the names of the directories that are
-          # required.
-          concrete_output_dirs = []
-          for concrete_output_index in xrange(0, len(concrete_outputs)):
-            concrete_output = concrete_outputs[concrete_output_index]
-            if concrete_output_index == 0:
-              bol = ''
-            else:
-              bol = '    '
-            makefile.write('%s%s \\\n' % (bol, concrete_output))
-
-            concrete_output_dir = posixpath.dirname(concrete_output)
-            if (concrete_output_dir and
-                concrete_output_dir not in concrete_output_dirs):
-              concrete_output_dirs.append(concrete_output_dir)
-
-          makefile.write('    : \\\n')
-
-          # The prerequisites for this rule are the rule source itself and
-          # the set of additional rule inputs, if any.
-          prerequisites = [rule_source]
-          prerequisites.extend(rule.get('inputs', []))
-          for prerequisite_index in xrange(0, len(prerequisites)):
-            prerequisite = prerequisites[prerequisite_index]
-            if prerequisite_index == len(prerequisites) - 1:
-              eol = ''
-            else:
-              eol = ' \\'
-            makefile.write('    %s%s\n' % (prerequisite, eol))
-
-          # Make sure that output directories exist before executing the rule
-          # action.
-          if len(concrete_output_dirs) > 0:
-            makefile.write('\t@mkdir -p "%s"\n' %
-                           '" "'.join(concrete_output_dirs))
-
-          # The rule message and action have already had the necessary variable
-          # substitutions performed.
-          if message:
-            # Mark it with note: so Xcode picks it up in build output.
-            makefile.write('\t@echo note: %s\n' % message)
-          makefile.write('\t%s\n' % action)
-
-        makefile.close()
-
-        # It might be nice to ensure that needed output directories exist
-        # here rather than in each target in the Makefile, but that wouldn't
-        # work if there ever was a concrete output that had an input-dependent
-        # variable anywhere other than in the leaf position.
-
-        # Don't declare any inputPaths or outputPaths.  If they're present,
-        # Xcode will provide a slight optimization by only running the script
-        # phase if any output is missing or outdated relative to any input.
-        # Unfortunately, it will also assume that all outputs are touched by
-        # the script, and if the outputs serve as files in a compilation
-        # phase, they will be unconditionally rebuilt.  Since make might not
-        # rebuild everything that could be declared here as an output, this
-        # extra compilation activity is unnecessary.  With inputPaths and
-        # outputPaths not supplied, make will always be called, but it knows
-        # enough to not do anything when everything is up-to-date.
-
-        # To help speed things up, pass -j COUNT to make so it does some work
-        # in parallel.  Don't use ncpus because Xcode will build ncpus targets
-        # in parallel and if each target happens to have a rules step, there
-        # would be ncpus^2 things going.  With a machine that has 2 quad-core
-        # Xeons, a build can quickly run out of processes based on
-        # scheduling/other tasks, and randomly failing builds are no good.
-        script = \
-"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
-if [ "${JOB_COUNT}" -gt 4 ]; then
-  JOB_COUNT=4
-fi
-exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
-exit 1
-""" % makefile_name
-        ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
-              'name': 'Rule "' + rule['rule_name'] + '"',
-              'shellScript': script,
-              'showEnvVarsInLog': 0,
-            })
-
-        if support_xct:
-          support_xct.AppendProperty('buildPhases', ssbp)
-        else:
-          # TODO(mark): this assumes too much knowledge of the internals of
-          # xcodeproj_file; some of these smarts should move into xcodeproj_file
-          # itself.
-          xct._properties['buildPhases'].insert(prebuild_index, ssbp)
-          prebuild_index = prebuild_index + 1
-
-      # Extra rule inputs also go into the project file.  Concrete outputs were
-      # already added when they were computed.
-      groups = ['inputs', 'inputs_excluded']
-      if skip_excluded_files:
-        groups = [x for x in groups if not x.endswith('_excluded')]
-      for group in groups:
-        for item in rule.get(group, []):
-          pbxp.AddOrGetFileInRootGroup(item)
-
-    # Add "sources".
-    for source in spec.get('sources', []):
-      (source_root, source_extension) = posixpath.splitext(source)
-      if source_extension[1:] not in rules_by_ext:
-        # AddSourceToTarget will add the file to a root group if it's not
-        # already there.
-        AddSourceToTarget(source, type, pbxp, xct)
-      else:
-        pbxp.AddOrGetFileInRootGroup(source)
-
-    # Add "mac_bundle_resources" and "mac_framework_private_headers" if
-    # it's a bundle of any type.
-    if is_bundle:
-      for resource in tgt_mac_bundle_resources:
-        (resource_root, resource_extension) = posixpath.splitext(resource)
-        if resource_extension[1:] not in rules_by_ext:
-          AddResourceToTarget(resource, pbxp, xct)
-        else:
-          pbxp.AddOrGetFileInRootGroup(resource)
-
-      for header in spec.get('mac_framework_private_headers', []):
-        AddHeaderToTarget(header, pbxp, xct, False)
-
-    # Add "mac_framework_headers". These can be valid for both frameworks
-    # and static libraries.
-    if is_bundle or type == 'static_library':
-      for header in spec.get('mac_framework_headers', []):
-        AddHeaderToTarget(header, pbxp, xct, True)
-
-    # Add "copies".
-    pbxcp_dict = {}
-    for copy_group in spec.get('copies', []):
-      dest = copy_group['destination']
-      if dest[0] not in ('/', '$'):
-        # Relative paths are relative to $(SRCROOT).
-        dest = '$(SRCROOT)/' + dest
-
-      code_sign = int(copy_group.get('xcode_code_sign', 0))
-      settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
-
-      # Coalesce multiple "copies" sections in the same target with the same
-      # "destination" property into the same PBXCopyFilesBuildPhase, otherwise
-      # they'll wind up with ID collisions.
-      pbxcp = pbxcp_dict.get(dest, None)
-      if pbxcp is None:
-        pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
-              'name': 'Copy to ' + copy_group['destination']
-            },
-            parent=xct)
-        pbxcp.SetDestination(dest)
-
-        # TODO(mark): The usual comment about this knowing too much about
-        # gyp.xcodeproj_file internals applies.
-        xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
-
-        pbxcp_dict[dest] = pbxcp
-
-      for file in copy_group['files']:
-        pbxcp.AddFile(file, settings)
-
-    # Excluded files can also go into the project file.
-    if not skip_excluded_files:
-      for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
-                  'mac_framework_private_headers']:
-        excluded_key = key + '_excluded'
-        for item in spec.get(excluded_key, []):
-          pbxp.AddOrGetFileInRootGroup(item)
-
-    # So can "inputs" and "outputs" sections of "actions" groups.
-    groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
-    if skip_excluded_files:
-      groups = [x for x in groups if not x.endswith('_excluded')]
-    for action in spec.get('actions', []):
-      for group in groups:
-        for item in action.get(group, []):
-          # Exclude anything in BUILT_PRODUCTS_DIR.  They're products, not
-          # sources.
-          if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
-            pbxp.AddOrGetFileInRootGroup(item)
-
-    for postbuild in spec.get('postbuilds', []):
-      action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
-      script = 'exec ' + action_string_sh + '\nexit 1\n'
-
-      # Make the postbuild step depend on the output of ld or ar from this
-      # target. Apparently putting the script step after the link step isn't
-      # sufficient to ensure proper ordering in all cases. With an input
-      # declared but no outputs, the script step should run every time, as
-      # desired.
-      ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
-            'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
-            'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
-            'shellScript': script,
-            'showEnvVarsInLog': 0,
-          })
-      xct.AppendProperty('buildPhases', ssbp)
-
-    # Add dependencies before libraries, because adding a dependency may imply
-    # adding a library.  It's preferable to keep dependencies listed first
-    # during a link phase so that they can override symbols that would
-    # otherwise be provided by libraries, which will usually include system
-    # libraries.  On some systems, ld is finicky and even requires the
-    # libraries to be ordered in such a way that unresolved symbols in
-    # earlier-listed libraries may only be resolved by later-listed libraries.
-    # The Mac linker doesn't work that way, but other platforms do, and so
-    # their linker invocations need to be constructed in this way.  There's
-    # no compelling reason for Xcode's linker invocations to differ.
-
-    if 'dependencies' in spec:
-      for dependency in spec['dependencies']:
-        xct.AddDependency(xcode_targets[dependency])
-        # The support project also gets the dependencies (in case they are
-        # needed for the actions/rules to work).
-        if support_xct:
-          support_xct.AddDependency(xcode_targets[dependency])
-
-    if 'libraries' in spec:
-      for library in spec['libraries']:
-        xct.FrameworksPhase().AddFile(library)
-        # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
-        # I wish Xcode handled this automatically.
-        library_dir = posixpath.dirname(library)
-        if library_dir not in xcode_standard_library_dirs and (
-            not xct.HasBuildSetting(_library_search_paths_var) or
-            library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
-          xct.AppendBuildSetting(_library_search_paths_var, library_dir)
-
-    for configuration_name in configuration_names:
-      configuration = spec['configurations'][configuration_name]
-      xcbc = xct.ConfigurationNamed(configuration_name)
-      for include_dir in configuration.get('mac_framework_dirs', []):
-        xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
-      for include_dir in configuration.get('include_dirs', []):
-        xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
-      for library_dir in configuration.get('library_dirs', []):
-        if library_dir not in xcode_standard_library_dirs and (
-            not xcbc.HasBuildSetting(_library_search_paths_var) or
-            library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
-          xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
-
-      if 'defines' in configuration:
-        for define in configuration['defines']:
-          set_define = EscapeXcodeDefine(define)
-          xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
-      if 'xcode_settings' in configuration:
-        for xck, xcv in configuration['xcode_settings'].iteritems():
-          xcbc.SetBuildSetting(xck, xcv)
-      if 'xcode_config_file' in configuration:
-        config_ref = pbxp.AddOrGetFileInRootGroup(
-            configuration['xcode_config_file'])
-        xcbc.SetBaseConfiguration(config_ref)
-
-  build_files = []
-  for build_file, build_file_dict in data.iteritems():
-    if build_file.endswith('.gyp'):
-      build_files.append(build_file)
-
-  for build_file in build_files:
-    xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
-
-  for build_file in build_files:
-    xcode_projects[build_file].Finalize2(xcode_targets,
-                                         xcode_target_to_target_dict)
-
-  for build_file in build_files:
-    xcode_projects[build_file].Write()
diff --git a/tools/gyp/pylib/gyp/generator/xcode_test.py b/tools/gyp/pylib/gyp/generator/xcode_test.py
deleted file mode 100644
index 260324a..0000000
--- a/tools/gyp/pylib/gyp/generator/xcode_test.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the xcode.py file. """
-
-import gyp.generator.xcode as xcode
-import unittest
-import sys
-
-
-class TestEscapeXcodeDefine(unittest.TestCase):
-  if sys.platform == 'darwin':
-    def test_InheritedRemainsUnescaped(self):
-      self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
-
-    def test_Escaping(self):
-      self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py
deleted file mode 100644
index 22eb333..0000000
--- a/tools/gyp/pylib/gyp/input.py
+++ /dev/null
@@ -1,2899 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from compiler.ast import Const
-from compiler.ast import Dict
-from compiler.ast import Discard
-from compiler.ast import List
-from compiler.ast import Module
-from compiler.ast import Node
-from compiler.ast import Stmt
-import compiler
-import gyp.common
-import gyp.simple_copy
-import multiprocessing
-import optparse
-import os.path
-import re
-import shlex
-import signal
-import subprocess
-import sys
-import threading
-import time
-import traceback
-from gyp.common import GypError
-from gyp.common import OrderedSet
-
-
-# A list of types that are treated as linkable.
-linkable_types = [
-  'executable',
-  'shared_library',
-  'loadable_module',
-  'mac_kernel_extension',
-]
-
-# A list of sections that contain links to other targets.
-dependency_sections = ['dependencies', 'export_dependent_settings']
-
-# base_path_sections is a list of sections defined by GYP that contain
-# pathnames.  The generators can provide more keys, the two lists are merged
-# into path_sections, but you should call IsPathSection instead of using either
-# list directly.
-base_path_sections = [
-  'destination',
-  'files',
-  'include_dirs',
-  'inputs',
-  'libraries',
-  'outputs',
-  'sources',
-]
-path_sections = set()
-
-# These per-process dictionaries are used to cache build file data when loading
-# in parallel mode.
-per_process_data = {}
-per_process_aux_data = {}
-
-def IsPathSection(section):
-  # If section ends in one of the '=+?!' characters, it's applied to a section
-  # without the trailing characters.  '/' is notably absent from this list,
-  # because there's no way for a regular expression to be treated as a path.
-  while section and section[-1:] in '=+?!':
-    section = section[:-1]
-
-  if section in path_sections:
-    return True
-
-  # Sections mathing the regexp '_(dir|file|path)s?$' are also
-  # considered PathSections. Using manual string matching since that
-  # is much faster than the regexp and this can be called hundreds of
-  # thousands of times so micro performance matters.
-  if "_" in section:
-    tail = section[-6:]
-    if tail[-1] == 's':
-      tail = tail[:-1]
-    if tail[-5:] in ('_file', '_path'):
-      return True
-    return tail[-4:] == '_dir'
-
-  return False
-
-# base_non_configuration_keys is a list of key names that belong in the target
-# itself and should not be propagated into its configurations.  It is merged
-# with a list that can come from the generator to
-# create non_configuration_keys.
-base_non_configuration_keys = [
-  # Sections that must exist inside targets and not configurations.
-  'actions',
-  'configurations',
-  'copies',
-  'default_configuration',
-  'dependencies',
-  'dependencies_original',
-  'libraries',
-  'postbuilds',
-  'product_dir',
-  'product_extension',
-  'product_name',
-  'product_prefix',
-  'rules',
-  'run_as',
-  'sources',
-  'standalone_static_library',
-  'suppress_wildcard',
-  'target_name',
-  'toolset',
-  'toolsets',
-  'type',
-
-  # Sections that can be found inside targets or configurations, but that
-  # should not be propagated from targets into their configurations.
-  'variables',
-]
-non_configuration_keys = []
-
-# Keys that do not belong inside a configuration dictionary.
-invalid_configuration_keys = [
-  'actions',
-  'all_dependent_settings',
-  'configurations',
-  'dependencies',
-  'direct_dependent_settings',
-  'libraries',
-  'link_settings',
-  'sources',
-  'standalone_static_library',
-  'target_name',
-  'type',
-]
-
-# Controls whether or not the generator supports multiple toolsets.
-multiple_toolsets = False
-
-# Paths for converting filelist paths to output paths: {
-#   toplevel,
-#   qualified_output_dir,
-# }
-generator_filelist_paths = None
-
-def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
-  """Return a list of all build files included into build_file_path.
-
-  The returned list will contain build_file_path as well as all other files
-  that it included, either directly or indirectly.  Note that the list may
-  contain files that were included into a conditional section that evaluated
-  to false and was not merged into build_file_path's dict.
-
-  aux_data is a dict containing a key for each build file or included build
-  file.  Those keys provide access to dicts whose "included" keys contain
-  lists of all other files included by the build file.
-
-  included should be left at its default None value by external callers.  It
-  is used for recursion.
-
-  The returned list will not contain any duplicate entries.  Each build file
-  in the list will be relative to the current directory.
-  """
-
-  if included == None:
-    included = []
-
-  if build_file_path in included:
-    return included
-
-  included.append(build_file_path)
-
-  for included_build_file in aux_data[build_file_path].get('included', []):
-    GetIncludedBuildFiles(included_build_file, aux_data, included)
-
-  return included
-
-
-def CheckedEval(file_contents):
-  """Return the eval of a gyp file.
-
-  The gyp file is restricted to dictionaries and lists only, and
-  repeated keys are not allowed.
-
-  Note that this is slower than eval() is.
-  """
-
-  ast = compiler.parse(file_contents)
-  assert isinstance(ast, Module)
-  c1 = ast.getChildren()
-  assert c1[0] is None
-  assert isinstance(c1[1], Stmt)
-  c2 = c1[1].getChildren()
-  assert isinstance(c2[0], Discard)
-  c3 = c2[0].getChildren()
-  assert len(c3) == 1
-  return CheckNode(c3[0], [])
-
-
-def CheckNode(node, keypath):
-  if isinstance(node, Dict):
-    c = node.getChildren()
-    dict = {}
-    for n in range(0, len(c), 2):
-      assert isinstance(c[n], Const)
-      key = c[n].getChildren()[0]
-      if key in dict:
-        raise GypError("Key '" + key + "' repeated at level " +
-              repr(len(keypath) + 1) + " with key path '" +
-              '.'.join(keypath) + "'")
-      kp = list(keypath)  # Make a copy of the list for descending this node.
-      kp.append(key)
-      dict[key] = CheckNode(c[n + 1], kp)
-    return dict
-  elif isinstance(node, List):
-    c = node.getChildren()
-    children = []
-    for index, child in enumerate(c):
-      kp = list(keypath)  # Copy list.
-      kp.append(repr(index))
-      children.append(CheckNode(child, kp))
-    return children
-  elif isinstance(node, Const):
-    return node.getChildren()[0]
-  else:
-    raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
-         "': " + repr(node))
-
-
-def LoadOneBuildFile(build_file_path, data, aux_data, includes,
-                     is_target, check):
-  if build_file_path in data:
-    return data[build_file_path]
-
-  if os.path.exists(build_file_path):
-    build_file_contents = open(build_file_path).read()
-  else:
-    raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
-
-  build_file_data = None
-  try:
-    if check:
-      build_file_data = CheckedEval(build_file_contents)
-    else:
-      build_file_data = eval(build_file_contents, {'__builtins__': None},
-                             None)
-  except SyntaxError, e:
-    e.filename = build_file_path
-    raise
-  except Exception, e:
-    gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
-    raise
-
-  if type(build_file_data) is not dict:
-    raise GypError("%s does not evaluate to a dictionary." % build_file_path)
-
-  data[build_file_path] = build_file_data
-  aux_data[build_file_path] = {}
-
-  # Scan for includes and merge them in.
-  if ('skip_includes' not in build_file_data or
-      not build_file_data['skip_includes']):
-    try:
-      if is_target:
-        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
-                                      aux_data, includes, check)
-      else:
-        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
-                                      aux_data, None, check)
-    except Exception, e:
-      gyp.common.ExceptionAppend(e,
-                                 'while reading includes of ' + build_file_path)
-      raise
-
-  return build_file_data
-
-
-def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
-                                  includes, check):
-  includes_list = []
-  if includes != None:
-    includes_list.extend(includes)
-  if 'includes' in subdict:
-    for include in subdict['includes']:
-      # "include" is specified relative to subdict_path, so compute the real
-      # path to include by appending the provided "include" to the directory
-      # in which subdict_path resides.
-      relative_include = \
-          os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
-      includes_list.append(relative_include)
-    # Unhook the includes list, it's no longer needed.
-    del subdict['includes']
-
-  # Merge in the included files.
-  for include in includes_list:
-    if not 'included' in aux_data[subdict_path]:
-      aux_data[subdict_path]['included'] = []
-    aux_data[subdict_path]['included'].append(include)
-
-    gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
-
-    MergeDicts(subdict,
-               LoadOneBuildFile(include, data, aux_data, None, False, check),
-               subdict_path, include)
-
-  # Recurse into subdictionaries.
-  for k, v in subdict.iteritems():
-    if type(v) is dict:
-      LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
-                                    None, check)
-    elif type(v) is list:
-      LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
-                                    check)
-
-
-# This recurses into lists so that it can look for dicts.
-def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
-  for item in sublist:
-    if type(item) is dict:
-      LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
-                                    None, check)
-    elif type(item) is list:
-      LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
-
-# Processes toolsets in all the targets. This recurses into condition entries
-# since they can contain toolsets as well.
-def ProcessToolsetsInDict(data):
-  if 'targets' in data:
-    target_list = data['targets']
-    new_target_list = []
-    for target in target_list:
-      # If this target already has an explicit 'toolset', and no 'toolsets'
-      # list, don't modify it further.
-      if 'toolset' in target and 'toolsets' not in target:
-        new_target_list.append(target)
-        continue
-      if multiple_toolsets:
-        toolsets = target.get('toolsets', ['target'])
-      else:
-        toolsets = ['target']
-      # Make sure this 'toolsets' definition is only processed once.
-      if 'toolsets' in target:
-        del target['toolsets']
-      if len(toolsets) > 0:
-        # Optimization: only do copies if more than one toolset is specified.
-        for build in toolsets[1:]:
-          new_target = gyp.simple_copy.deepcopy(target)
-          new_target['toolset'] = build
-          new_target_list.append(new_target)
-        target['toolset'] = toolsets[0]
-        new_target_list.append(target)
-    data['targets'] = new_target_list
-  if 'conditions' in data:
-    for condition in data['conditions']:
-      if type(condition) is list:
-        for condition_dict in condition[1:]:
-          if type(condition_dict) is dict:
-            ProcessToolsetsInDict(condition_dict)
-
-
-# TODO(mark): I don't love this name.  It just means that it's going to load
-# a build file that contains targets and is expected to provide a targets dict
-# that contains the targets...
-def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
-                        depth, check, load_dependencies):
-  # If depth is set, predefine the DEPTH variable to be a relative path from
-  # this build file's directory to the directory identified by depth.
-  if depth:
-    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
-    # temporary measure. This should really be addressed by keeping all paths
-    # in POSIX until actual project generation.
-    d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
-    if d == '':
-      variables['DEPTH'] = '.'
-    else:
-      variables['DEPTH'] = d.replace('\\', '/')
-
-  # The 'target_build_files' key is only set when loading target build files in
-  # the non-parallel code path, where LoadTargetBuildFile is called
-  # recursively.  In the parallel code path, we don't need to check whether the
-  # |build_file_path| has already been loaded, because the 'scheduled' set in
-  # ParallelState guarantees that we never load the same |build_file_path|
-  # twice.
-  if 'target_build_files' in data:
-    if build_file_path in data['target_build_files']:
-      # Already loaded.
-      return False
-    data['target_build_files'].add(build_file_path)
-
-  gyp.DebugOutput(gyp.DEBUG_INCLUDES,
-                  "Loading Target Build File '%s'", build_file_path)
-
-  build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
-                                     includes, True, check)
-
-  # Store DEPTH for later use in generators.
-  build_file_data['_DEPTH'] = depth
-
-  # Set up the included_files key indicating which .gyp files contributed to
-  # this target dict.
-  if 'included_files' in build_file_data:
-    raise GypError(build_file_path + ' must not contain included_files key')
-
-  included = GetIncludedBuildFiles(build_file_path, aux_data)
-  build_file_data['included_files'] = []
-  for included_file in included:
-    # included_file is relative to the current directory, but it needs to
-    # be made relative to build_file_path's directory.
-    included_relative = \
-        gyp.common.RelativePath(included_file,
-                                os.path.dirname(build_file_path))
-    build_file_data['included_files'].append(included_relative)
-
-  # Do a first round of toolsets expansion so that conditions can be defined
-  # per toolset.
-  ProcessToolsetsInDict(build_file_data)
-
-  # Apply "pre"/"early" variable expansions and condition evaluations.
-  ProcessVariablesAndConditionsInDict(
-      build_file_data, PHASE_EARLY, variables, build_file_path)
-
-  # Since some toolsets might have been defined conditionally, perform
-  # a second round of toolsets expansion now.
-  ProcessToolsetsInDict(build_file_data)
-
-  # Look at each project's target_defaults dict, and merge settings into
-  # targets.
-  if 'target_defaults' in build_file_data:
-    if 'targets' not in build_file_data:
-      raise GypError("Unable to find targets in build file %s" %
-                     build_file_path)
-
-    index = 0
-    while index < len(build_file_data['targets']):
-      # This procedure needs to give the impression that target_defaults is
-      # used as defaults, and the individual targets inherit from that.
-      # The individual targets need to be merged into the defaults.  Make
-      # a deep copy of the defaults for each target, merge the target dict
-      # as found in the input file into that copy, and then hook up the
-      # copy with the target-specific data merged into it as the replacement
-      # target dict.
-      old_target_dict = build_file_data['targets'][index]
-      new_target_dict = gyp.simple_copy.deepcopy(
-        build_file_data['target_defaults'])
-      MergeDicts(new_target_dict, old_target_dict,
-                 build_file_path, build_file_path)
-      build_file_data['targets'][index] = new_target_dict
-      index += 1
-
-    # No longer needed.
-    del build_file_data['target_defaults']
-
-  # Look for dependencies.  This means that dependency resolution occurs
-  # after "pre" conditionals and variable expansion, but before "post" -
-  # in other words, you can't put a "dependencies" section inside a "post"
-  # conditional within a target.
-
-  dependencies = []
-  if 'targets' in build_file_data:
-    for target_dict in build_file_data['targets']:
-      if 'dependencies' not in target_dict:
-        continue
-      for dependency in target_dict['dependencies']:
-        dependencies.append(
-            gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
-
-  if load_dependencies:
-    for dependency in dependencies:
-      try:
-        LoadTargetBuildFile(dependency, data, aux_data, variables,
-                            includes, depth, check, load_dependencies)
-      except Exception, e:
-        gyp.common.ExceptionAppend(
-          e, 'while loading dependencies of %s' % build_file_path)
-        raise
-  else:
-    return (build_file_path, dependencies)
-
-def CallLoadTargetBuildFile(global_flags,
-                            build_file_path, variables,
-                            includes, depth, check,
-                            generator_input_info):
-  """Wrapper around LoadTargetBuildFile for parallel processing.
-
-     This wrapper is used when LoadTargetBuildFile is executed in
-     a worker process.
-  """
-
-  try:
-    signal.signal(signal.SIGINT, signal.SIG_IGN)
-
-    # Apply globals so that the worker process behaves the same.
-    for key, value in global_flags.iteritems():
-      globals()[key] = value
-
-    SetGeneratorGlobals(generator_input_info)
-    result = LoadTargetBuildFile(build_file_path, per_process_data,
-                                 per_process_aux_data, variables,
-                                 includes, depth, check, False)
-    if not result:
-      return result
-
-    (build_file_path, dependencies) = result
-
-    # We can safely pop the build_file_data from per_process_data because it
-    # will never be referenced by this process again, so we don't need to keep
-    # it in the cache.
-    build_file_data = per_process_data.pop(build_file_path)
-
-    # This gets serialized and sent back to the main process via a pipe.
-    # It's handled in LoadTargetBuildFileCallback.
-    return (build_file_path,
-            build_file_data,
-            dependencies)
-  except GypError, e:
-    sys.stderr.write("gyp: %s\n" % e)
-    return None
-  except Exception, e:
-    print >>sys.stderr, 'Exception:', e
-    print >>sys.stderr, traceback.format_exc()
-    return None
-
-
-class ParallelProcessingError(Exception):
-  pass
-
-
-class ParallelState(object):
-  """Class to keep track of state when processing input files in parallel.
-
-  If build files are loaded in parallel, use this to keep track of
-  state during farming out and processing parallel jobs. It's stored
-  in a global so that the callback function can have access to it.
-  """
-
-  def __init__(self):
-    # The multiprocessing pool.
-    self.pool = None
-    # The condition variable used to protect this object and notify
-    # the main loop when there might be more data to process.
-    self.condition = None
-    # The "data" dict that was passed to LoadTargetBuildFileParallel
-    self.data = None
-    # The number of parallel calls outstanding; decremented when a response
-    # was received.
-    self.pending = 0
-    # The set of all build files that have been scheduled, so we don't
-    # schedule the same one twice.
-    self.scheduled = set()
-    # A list of dependency build file paths that haven't been scheduled yet.
-    self.dependencies = []
-    # Flag to indicate if there was an error in a child process.
-    self.error = False
-
-  def LoadTargetBuildFileCallback(self, result):
-    """Handle the results of running LoadTargetBuildFile in another process.
-    """
-    self.condition.acquire()
-    if not result:
-      self.error = True
-      self.condition.notify()
-      self.condition.release()
-      return
-    (build_file_path0, build_file_data0, dependencies0) = result
-    self.data[build_file_path0] = build_file_data0
-    self.data['target_build_files'].add(build_file_path0)
-    for new_dependency in dependencies0:
-      if new_dependency not in self.scheduled:
-        self.scheduled.add(new_dependency)
-        self.dependencies.append(new_dependency)
-    self.pending -= 1
-    self.condition.notify()
-    self.condition.release()
-
-
-def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
-                                 check, generator_input_info):
-  parallel_state = ParallelState()
-  parallel_state.condition = threading.Condition()
-  # Make copies of the build_files argument that we can modify while working.
-  parallel_state.dependencies = list(build_files)
-  parallel_state.scheduled = set(build_files)
-  parallel_state.pending = 0
-  parallel_state.data = data
-
-  try:
-    parallel_state.condition.acquire()
-    while parallel_state.dependencies or parallel_state.pending:
-      if parallel_state.error:
-        break
-      if not parallel_state.dependencies:
-        parallel_state.condition.wait()
-        continue
-
-      dependency = parallel_state.dependencies.pop()
-
-      parallel_state.pending += 1
-      global_flags = {
-        'path_sections': globals()['path_sections'],
-        'non_configuration_keys': globals()['non_configuration_keys'],
-        'multiple_toolsets': globals()['multiple_toolsets']}
-
-      if not parallel_state.pool:
-        parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
-      parallel_state.pool.apply_async(
-          CallLoadTargetBuildFile,
-          args = (global_flags, dependency,
-                  variables, includes, depth, check, generator_input_info),
-          callback = parallel_state.LoadTargetBuildFileCallback)
-  except KeyboardInterrupt, e:
-    parallel_state.pool.terminate()
-    raise e
-
-  parallel_state.condition.release()
-
-  parallel_state.pool.close()
-  parallel_state.pool.join()
-  parallel_state.pool = None
-
-  if parallel_state.error:
-    sys.exit(1)
-
-# Look for the bracket that matches the first bracket seen in a
-# string, and return the start and end as a tuple.  For example, if
-# the input is something like "<(foo <(bar)) blah", then it would
-# return (1, 13), indicating the entire string except for the leading
-# "<" and trailing " blah".
-LBRACKETS= set('{[(')
-BRACKETS = {'}': '{', ']': '[', ')': '('}
-def FindEnclosingBracketGroup(input_str):
-  stack = []
-  start = -1
-  for index, char in enumerate(input_str):
-    if char in LBRACKETS:
-      stack.append(char)
-      if start == -1:
-        start = index
-    elif char in BRACKETS:
-      if not stack:
-        return (-1, -1)
-      if stack.pop() != BRACKETS[char]:
-        return (-1, -1)
-      if not stack:
-        return (start, index + 1)
-  return (-1, -1)
-
-
-def IsStrCanonicalInt(string):
-  """Returns True if |string| is in its canonical integer form.
-
-  The canonical form is such that str(int(string)) == string.
-  """
-  if type(string) is str:
-    # This function is called a lot so for maximum performance, avoid
-    # involving regexps which would otherwise make the code much
-    # shorter. Regexps would need twice the time of this function.
-    if string:
-      if string == "0":
-        return True
-      if string[0] == "-":
-        string = string[1:]
-        if not string:
-          return False
-      if '1' <= string[0] <= '9':
-        return string.isdigit()
-
-  return False
-
-
-# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
-# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
-# In the last case, the inner "<()" is captured in match['content'].
-early_variable_re = re.compile(
-    r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
-    r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
-    r'\((?P<is_array>\s*\[?)'
-    r'(?P<content>.*?)(\]?)\))')
-
-# This matches the same as early_variable_re, but with '>' instead of '<'.
-late_variable_re = re.compile(
-    r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
-    r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
-    r'\((?P<is_array>\s*\[?)'
-    r'(?P<content>.*?)(\]?)\))')
-
-# This matches the same as early_variable_re, but with '^' instead of '<'.
-latelate_variable_re = re.compile(
-    r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
-    r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
-    r'\((?P<is_array>\s*\[?)'
-    r'(?P<content>.*?)(\]?)\))')
-
-# Global cache of results from running commands so they don't have to be run
-# more then once.
-cached_command_results = {}
-
-
-def FixupPlatformCommand(cmd):
-  if sys.platform == 'win32':
-    if type(cmd) is list:
-      cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
-    else:
-      cmd = re.sub('^cat ', 'type ', cmd)
-  return cmd
-
-
-PHASE_EARLY = 0
-PHASE_LATE = 1
-PHASE_LATELATE = 2
-
-
-def ExpandVariables(input, phase, variables, build_file):
-  # Look for the pattern that gets expanded into variables
-  if phase == PHASE_EARLY:
-    variable_re = early_variable_re
-    expansion_symbol = '<'
-  elif phase == PHASE_LATE:
-    variable_re = late_variable_re
-    expansion_symbol = '>'
-  elif phase == PHASE_LATELATE:
-    variable_re = latelate_variable_re
-    expansion_symbol = '^'
-  else:
-    assert False
-
-  input_str = str(input)
-  if IsStrCanonicalInt(input_str):
-    return int(input_str)
-
-  # Do a quick scan to determine if an expensive regex search is warranted.
-  if expansion_symbol not in input_str:
-    return input_str
-
-  # Get the entire list of matches as a list of MatchObject instances.
-  # (using findall here would return strings instead of MatchObjects).
-  matches = list(variable_re.finditer(input_str))
-  if not matches:
-    return input_str
-
-  output = input_str
-  # Reverse the list of matches so that replacements are done right-to-left.
-  # That ensures that earlier replacements won't mess up the string in a
-  # way that causes later calls to find the earlier substituted text instead
-  # of what's intended for replacement.
-  matches.reverse()
-  for match_group in matches:
-    match = match_group.groupdict()
-    gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
-    # match['replace'] is the substring to look for, match['type']
-    # is the character code for the replacement type (< > <! >! <| >| <@
-    # >@ <!@ >!@), match['is_array'] contains a '[' for command
-    # arrays, and match['content'] is the name of the variable (< >)
-    # or command to run (<! >!). match['command_string'] is an optional
-    # command string. Currently, only 'pymod_do_main' is supported.
-
-    # run_command is true if a ! variant is used.
-    run_command = '!' in match['type']
-    command_string = match['command_string']
-
-    # file_list is true if a | variant is used.
-    file_list = '|' in match['type']
-
-    # Capture these now so we can adjust them later.
-    replace_start = match_group.start('replace')
-    replace_end = match_group.end('replace')
-
-    # Find the ending paren, and re-evaluate the contained string.
-    (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
-
-    # Adjust the replacement range to match the entire command
-    # found by FindEnclosingBracketGroup (since the variable_re
-    # probably doesn't match the entire command if it contained
-    # nested variables).
-    replace_end = replace_start + c_end
-
-    # Find the "real" replacement, matching the appropriate closing
-    # paren, and adjust the replacement start and end.
-    replacement = input_str[replace_start:replace_end]
-
-    # Figure out what the contents of the variable parens are.
-    contents_start = replace_start + c_start + 1
-    contents_end = replace_end - 1
-    contents = input_str[contents_start:contents_end]
-
-    # Do filter substitution now for <|().
-    # Admittedly, this is different than the evaluation order in other
-    # contexts. However, since filtration has no chance to run on <|(),
-    # this seems like the only obvious way to give them access to filters.
-    if file_list:
-      processed_variables = gyp.simple_copy.deepcopy(variables)
-      ProcessListFiltersInDict(contents, processed_variables)
-      # Recurse to expand variables in the contents
-      contents = ExpandVariables(contents, phase,
-                                 processed_variables, build_file)
-    else:
-      # Recurse to expand variables in the contents
-      contents = ExpandVariables(contents, phase, variables, build_file)
-
-    # Strip off leading/trailing whitespace so that variable matches are
-    # simpler below (and because they are rarely needed).
-    contents = contents.strip()
-
-    # expand_to_list is true if an @ variant is used.  In that case,
-    # the expansion should result in a list.  Note that the caller
-    # is to be expecting a list in return, and not all callers do
-    # because not all are working in list context.  Also, for list
-    # expansions, there can be no other text besides the variable
-    # expansion in the input string.
-    expand_to_list = '@' in match['type'] and input_str == replacement
-
-    if run_command or file_list:
-      # Find the build file's directory, so commands can be run or file lists
-      # generated relative to it.
-      build_file_dir = os.path.dirname(build_file)
-      if build_file_dir == '' and not file_list:
-        # If build_file is just a leaf filename indicating a file in the
-        # current directory, build_file_dir might be an empty string.  Set
-        # it to None to signal to subprocess.Popen that it should run the
-        # command in the current directory.
-        build_file_dir = None
-
-    # Support <|(listfile.txt ...) which generates a file
-    # containing items from a gyp list, generated at gyp time.
-    # This works around actions/rules which have more inputs than will
-    # fit on the command line.
-    if file_list:
-      if type(contents) is list:
-        contents_list = contents
-      else:
-        contents_list = contents.split(' ')
-      replacement = contents_list[0]
-      if os.path.isabs(replacement):
-        raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
-
-      if not generator_filelist_paths:
-        path = os.path.join(build_file_dir, replacement)
-      else:
-        if os.path.isabs(build_file_dir):
-          toplevel = generator_filelist_paths['toplevel']
-          rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
-        else:
-          rel_build_file_dir = build_file_dir
-        qualified_out_dir = generator_filelist_paths['qualified_out_dir']
-        path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
-        gyp.common.EnsureDirExists(path)
-
-      replacement = gyp.common.RelativePath(path, build_file_dir)
-      f = gyp.common.WriteOnDiff(path)
-      for i in contents_list[1:]:
-        f.write('%s\n' % i)
-      f.close()
-
-    elif run_command:
-      use_shell = True
-      if match['is_array']:
-        contents = eval(contents)
-        use_shell = False
-
-      # Check for a cached value to avoid executing commands, or generating
-      # file lists more than once. The cache key contains the command to be
-      # run as well as the directory to run it from, to account for commands
-      # that depend on their current directory.
-      # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
-      # someone could author a set of GYP files where each time the command
-      # is invoked it produces different output by design. When the need
-      # arises, the syntax should be extended to support no caching off a
-      # command's output so it is run every time.
-      cache_key = (str(contents), build_file_dir)
-      cached_value = cached_command_results.get(cache_key, None)
-      if cached_value is None:
-        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
-                        "Executing command '%s' in directory '%s'",
-                        contents, build_file_dir)
-
-        replacement = ''
-
-        if command_string == 'pymod_do_main':
-          # <!pymod_do_main(modulename param eters) loads |modulename| as a
-          # python module and then calls that module's DoMain() function,
-          # passing ["param", "eters"] as a single list argument. For modules
-          # that don't load quickly, this can be faster than
-          # <!(python modulename param eters). Do this in |build_file_dir|.
-          oldwd = os.getcwd()  # Python doesn't like os.open('.'): no fchdir.
-          if build_file_dir:  # build_file_dir may be None (see above).
-            os.chdir(build_file_dir)
-          try:
-
-            parsed_contents = shlex.split(contents)
-            try:
-              py_module = __import__(parsed_contents[0])
-            except ImportError as e:
-              raise GypError("Error importing pymod_do_main"
-                             "module (%s): %s" % (parsed_contents[0], e))
-            replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
-          finally:
-            os.chdir(oldwd)
-          assert replacement != None
-        elif command_string:
-          raise GypError("Unknown command string '%s' in '%s'." %
-                         (command_string, contents))
-        else:
-          # Fix up command with platform specific workarounds.
-          contents = FixupPlatformCommand(contents)
-          try:
-            p = subprocess.Popen(contents, shell=use_shell,
-                                 stdout=subprocess.PIPE,
-                                 stderr=subprocess.PIPE,
-                                 stdin=subprocess.PIPE,
-                                 cwd=build_file_dir)
-          except Exception, e:
-            raise GypError("%s while executing command '%s' in %s" %
-                           (e, contents, build_file))
-
-          p_stdout, p_stderr = p.communicate('')
-
-          if p.wait() != 0 or p_stderr:
-            sys.stderr.write(p_stderr)
-            # Simulate check_call behavior, since check_call only exists
-            # in python 2.5 and later.
-            raise GypError("Call to '%s' returned exit status %d while in %s." %
-                           (contents, p.returncode, build_file))
-          replacement = p_stdout.rstrip()
-
-        cached_command_results[cache_key] = replacement
-      else:
-        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
-                        "Had cache value for command '%s' in directory '%s'",
-                        contents,build_file_dir)
-        replacement = cached_value
-
-    else:
-      if not contents in variables:
-        if contents[-1] in ['!', '/']:
-          # In order to allow cross-compiles (nacl) to happen more naturally,
-          # we will allow references to >(sources/) etc. to resolve to
-          # and empty list if undefined. This allows actions to:
-          # 'action!': [
-          #   '>@(_sources!)',
-          # ],
-          # 'action/': [
-          #   '>@(_sources/)',
-          # ],
-          replacement = []
-        else:
-          raise GypError('Undefined variable ' + contents +
-                         ' in ' + build_file)
-      else:
-        replacement = variables[contents]
-
-    if type(replacement) is list:
-      for item in replacement:
-        if not contents[-1] == '/' and type(item) not in (str, int):
-          raise GypError('Variable ' + contents +
-                         ' must expand to a string or list of strings; ' +
-                         'list contains a ' +
-                         item.__class__.__name__)
-      # Run through the list and handle variable expansions in it.  Since
-      # the list is guaranteed not to contain dicts, this won't do anything
-      # with conditions sections.
-      ProcessVariablesAndConditionsInList(replacement, phase, variables,
-                                          build_file)
-    elif type(replacement) not in (str, int):
-          raise GypError('Variable ' + contents +
-                         ' must expand to a string or list of strings; ' +
-                         'found a ' + replacement.__class__.__name__)
-
-    if expand_to_list:
-      # Expanding in list context.  It's guaranteed that there's only one
-      # replacement to do in |input_str| and that it's this replacement.  See
-      # above.
-      if type(replacement) is list:
-        # If it's already a list, make a copy.
-        output = replacement[:]
-      else:
-        # Split it the same way sh would split arguments.
-        output = shlex.split(str(replacement))
-    else:
-      # Expanding in string context.
-      encoded_replacement = ''
-      if type(replacement) is list:
-        # When expanding a list into string context, turn the list items
-        # into a string in a way that will work with a subprocess call.
-        #
-        # TODO(mark): This isn't completely correct.  This should
-        # call a generator-provided function that observes the
-        # proper list-to-argument quoting rules on a specific
-        # platform instead of just calling the POSIX encoding
-        # routine.
-        encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
-      else:
-        encoded_replacement = replacement
-
-      output = output[:replace_start] + str(encoded_replacement) + \
-               output[replace_end:]
-    # Prepare for the next match iteration.
-    input_str = output
-
-  if output == input:
-    gyp.DebugOutput(gyp.DEBUG_VARIABLES,
-                    "Found only identity matches on %r, avoiding infinite "
-                    "recursion.",
-                    output)
-  else:
-    # Look for more matches now that we've replaced some, to deal with
-    # expanding local variables (variables defined in the same
-    # variables block as this one).
-    gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
-    if type(output) is list:
-      if output and type(output[0]) is list:
-        # Leave output alone if it's a list of lists.
-        # We don't want such lists to be stringified.
-        pass
-      else:
-        new_output = []
-        for item in output:
-          new_output.append(
-              ExpandVariables(item, phase, variables, build_file))
-        output = new_output
-    else:
-      output = ExpandVariables(output, phase, variables, build_file)
-
-  # Convert all strings that are canonically-represented integers into integers.
-  if type(output) is list:
-    for index in xrange(0, len(output)):
-      if IsStrCanonicalInt(output[index]):
-        output[index] = int(output[index])
-  elif IsStrCanonicalInt(output):
-    output = int(output)
-
-  return output
-
-# The same condition is often evaluated over and over again so it
-# makes sense to cache as much as possible between evaluations.
-cached_conditions_asts = {}
-
-def EvalCondition(condition, conditions_key, phase, variables, build_file):
-  """Returns the dict that should be used or None if the result was
-  that nothing should be used."""
-  if type(condition) is not list:
-    raise GypError(conditions_key + ' must be a list')
-  if len(condition) < 2:
-    # It's possible that condition[0] won't work in which case this
-    # attempt will raise its own IndexError.  That's probably fine.
-    raise GypError(conditions_key + ' ' + condition[0] +
-                   ' must be at least length 2, not ' + str(len(condition)))
-
-  i = 0
-  result = None
-  while i < len(condition):
-    cond_expr = condition[i]
-    true_dict = condition[i + 1]
-    if type(true_dict) is not dict:
-      raise GypError('{} {} must be followed by a dictionary, not {}'.format(
-        conditions_key, cond_expr, type(true_dict)))
-    if len(condition) > i + 2 and type(condition[i + 2]) is dict:
-      false_dict = condition[i + 2]
-      i = i + 3
-      if i != len(condition):
-        raise GypError('{} {} has {} unexpected trailing items'.format(
-          conditions_key, cond_expr, len(condition) - i))
-    else:
-      false_dict = None
-      i = i + 2
-    if result == None:
-      result = EvalSingleCondition(
-          cond_expr, true_dict, false_dict, phase, variables, build_file)
-
-  return result
-
-
-def EvalSingleCondition(
-    cond_expr, true_dict, false_dict, phase, variables, build_file):
-  """Returns true_dict if cond_expr evaluates to true, and false_dict
-  otherwise."""
-  # Do expansions on the condition itself.  Since the conditon can naturally
-  # contain variable references without needing to resort to GYP expansion
-  # syntax, this is of dubious value for variables, but someone might want to
-  # use a command expansion directly inside a condition.
-  cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
-                                       build_file)
-  if type(cond_expr_expanded) not in (str, int):
-    raise ValueError(
-          'Variable expansion in this context permits str and int ' + \
-            'only, found ' + cond_expr_expanded.__class__.__name__)
-
-  try:
-    if cond_expr_expanded in cached_conditions_asts:
-      ast_code = cached_conditions_asts[cond_expr_expanded]
-    else:
-      ast_code = compile(cond_expr_expanded, '<string>', 'eval')
-      cached_conditions_asts[cond_expr_expanded] = ast_code
-    if eval(ast_code, {'__builtins__': None}, variables):
-      return true_dict
-    return false_dict
-  except SyntaxError, e:
-    syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
-                               'at character %d.' %
-                               (str(e.args[0]), e.text, build_file, e.offset),
-                               e.filename, e.lineno, e.offset, e.text)
-    raise syntax_error
-  except NameError, e:
-    gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
-                               (cond_expr_expanded, build_file))
-    raise GypError(e)
-
-
-def ProcessConditionsInDict(the_dict, phase, variables, build_file):
-  # Process a 'conditions' or 'target_conditions' section in the_dict,
-  # depending on phase.
-  # early -> conditions
-  # late -> target_conditions
-  # latelate -> no conditions
-  #
-  # Each item in a conditions list consists of cond_expr, a string expression
-  # evaluated as the condition, and true_dict, a dict that will be merged into
-  # the_dict if cond_expr evaluates to true.  Optionally, a third item,
-  # false_dict, may be present.  false_dict is merged into the_dict if
-  # cond_expr evaluates to false.
-  #
-  # Any dict merged into the_dict will be recursively processed for nested
-  # conditionals and other expansions, also according to phase, immediately
-  # prior to being merged.
-
-  if phase == PHASE_EARLY:
-    conditions_key = 'conditions'
-  elif phase == PHASE_LATE:
-    conditions_key = 'target_conditions'
-  elif phase == PHASE_LATELATE:
-    return
-  else:
-    assert False
-
-  if not conditions_key in the_dict:
-    return
-
-  conditions_list = the_dict[conditions_key]
-  # Unhook the conditions list, it's no longer needed.
-  del the_dict[conditions_key]
-
-  for condition in conditions_list:
-    merge_dict = EvalCondition(condition, conditions_key, phase, variables,
-                               build_file)
-
-    if merge_dict != None:
-      # Expand variables and nested conditinals in the merge_dict before
-      # merging it.
-      ProcessVariablesAndConditionsInDict(merge_dict, phase,
-                                          variables, build_file)
-
-      MergeDicts(the_dict, merge_dict, build_file, build_file)
-
-
-def LoadAutomaticVariablesFromDict(variables, the_dict):
-  # Any keys with plain string values in the_dict become automatic variables.
-  # The variable name is the key name with a "_" character prepended.
-  for key, value in the_dict.iteritems():
-    if type(value) in (str, int, list):
-      variables['_' + key] = value
-
-
-def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
-  # Any keys in the_dict's "variables" dict, if it has one, becomes a
-  # variable.  The variable name is the key name in the "variables" dict.
-  # Variables that end with the % character are set only if they are unset in
-  # the variables dict.  the_dict_key is the name of the key that accesses
-  # the_dict in the_dict's parent dict.  If the_dict's parent is not a dict
-  # (it could be a list or it could be parentless because it is a root dict),
-  # the_dict_key will be None.
-  for key, value in the_dict.get('variables', {}).iteritems():
-    if type(value) not in (str, int, list):
-      continue
-
-    if key.endswith('%'):
-      variable_name = key[:-1]
-      if variable_name in variables:
-        # If the variable is already set, don't set it.
-        continue
-      if the_dict_key is 'variables' and variable_name in the_dict:
-        # If the variable is set without a % in the_dict, and the_dict is a
-        # variables dict (making |variables| a varaibles sub-dict of a
-        # variables dict), use the_dict's definition.
-        value = the_dict[variable_name]
-    else:
-      variable_name = key
-
-    variables[variable_name] = value
-
-
-def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
-                                        build_file, the_dict_key=None):
-  """Handle all variable and command expansion and conditional evaluation.
-
-  This function is the public entry point for all variable expansions and
-  conditional evaluations.  The variables_in dictionary will not be modified
-  by this function.
-  """
-
-  # Make a copy of the variables_in dict that can be modified during the
-  # loading of automatics and the loading of the variables dict.
-  variables = variables_in.copy()
-  LoadAutomaticVariablesFromDict(variables, the_dict)
-
-  if 'variables' in the_dict:
-    # Make sure all the local variables are added to the variables
-    # list before we process them so that you can reference one
-    # variable from another.  They will be fully expanded by recursion
-    # in ExpandVariables.
-    for key, value in the_dict['variables'].iteritems():
-      variables[key] = value
-
-    # Handle the associated variables dict first, so that any variable
-    # references within can be resolved prior to using them as variables.
-    # Pass a copy of the variables dict to avoid having it be tainted.
-    # Otherwise, it would have extra automatics added for everything that
-    # should just be an ordinary variable in this scope.
-    ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
-                                        variables, build_file, 'variables')
-
-  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
-  for key, value in the_dict.iteritems():
-    # Skip "variables", which was already processed if present.
-    if key != 'variables' and type(value) is str:
-      expanded = ExpandVariables(value, phase, variables, build_file)
-      if type(expanded) not in (str, int):
-        raise ValueError(
-              'Variable expansion in this context permits str and int ' + \
-              'only, found ' + expanded.__class__.__name__ + ' for ' + key)
-      the_dict[key] = expanded
-
-  # Variable expansion may have resulted in changes to automatics.  Reload.
-  # TODO(mark): Optimization: only reload if no changes were made.
-  variables = variables_in.copy()
-  LoadAutomaticVariablesFromDict(variables, the_dict)
-  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
-  # Process conditions in this dict.  This is done after variable expansion
-  # so that conditions may take advantage of expanded variables.  For example,
-  # if the_dict contains:
-  #   {'type':       '<(library_type)',
-  #    'conditions': [['_type=="static_library"', { ... }]]},
-  # _type, as used in the condition, will only be set to the value of
-  # library_type if variable expansion is performed before condition
-  # processing.  However, condition processing should occur prior to recursion
-  # so that variables (both automatic and "variables" dict type) may be
-  # adjusted by conditions sections, merged into the_dict, and have the
-  # intended impact on contained dicts.
-  #
-  # This arrangement means that a "conditions" section containing a "variables"
-  # section will only have those variables effective in subdicts, not in
-  # the_dict.  The workaround is to put a "conditions" section within a
-  # "variables" section.  For example:
-  #   {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
-  #    'defines':    ['<(define)'],
-  #    'my_subdict': {'defines': ['<(define)']}},
-  # will not result in "IS_MAC" being appended to the "defines" list in the
-  # current scope but would result in it being appended to the "defines" list
-  # within "my_subdict".  By comparison:
-  #   {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
-  #    'defines':    ['<(define)'],
-  #    'my_subdict': {'defines': ['<(define)']}},
-  # will append "IS_MAC" to both "defines" lists.
-
-  # Evaluate conditions sections, allowing variable expansions within them
-  # as well as nested conditionals.  This will process a 'conditions' or
-  # 'target_conditions' section, perform appropriate merging and recursive
-  # conditional and variable processing, and then remove the conditions section
-  # from the_dict if it is present.
-  ProcessConditionsInDict(the_dict, phase, variables, build_file)
-
-  # Conditional processing may have resulted in changes to automatics or the
-  # variables dict.  Reload.
-  variables = variables_in.copy()
-  LoadAutomaticVariablesFromDict(variables, the_dict)
-  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
-  # Recurse into child dicts, or process child lists which may result in
-  # further recursion into descendant dicts.
-  for key, value in the_dict.iteritems():
-    # Skip "variables" and string values, which were already processed if
-    # present.
-    if key == 'variables' or type(value) is str:
-      continue
-    if type(value) is dict:
-      # Pass a copy of the variables dict so that subdicts can't influence
-      # parents.
-      ProcessVariablesAndConditionsInDict(value, phase, variables,
-                                          build_file, key)
-    elif type(value) is list:
-      # The list itself can't influence the variables dict, and
-      # ProcessVariablesAndConditionsInList will make copies of the variables
-      # dict if it needs to pass it to something that can influence it.  No
-      # copy is necessary here.
-      ProcessVariablesAndConditionsInList(value, phase, variables,
-                                          build_file)
-    elif type(value) is not int:
-      raise TypeError('Unknown type ' + value.__class__.__name__ + \
-                      ' for ' + key)
-
-
-def ProcessVariablesAndConditionsInList(the_list, phase, variables,
-                                        build_file):
-  # Iterate using an index so that new values can be assigned into the_list.
-  index = 0
-  while index < len(the_list):
-    item = the_list[index]
-    if type(item) is dict:
-      # Make a copy of the variables dict so that it won't influence anything
-      # outside of its own scope.
-      ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
-    elif type(item) is list:
-      ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
-    elif type(item) is str:
-      expanded = ExpandVariables(item, phase, variables, build_file)
-      if type(expanded) in (str, int):
-        the_list[index] = expanded
-      elif type(expanded) is list:
-        the_list[index:index+1] = expanded
-        index += len(expanded)
-
-        # index now identifies the next item to examine.  Continue right now
-        # without falling into the index increment below.
-        continue
-      else:
-        raise ValueError(
-              'Variable expansion in this context permits strings and ' + \
-              'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
-              index)
-    elif type(item) is not int:
-      raise TypeError('Unknown type ' + item.__class__.__name__ + \
-                      ' at index ' + index)
-    index = index + 1
-
-
-def BuildTargetsDict(data):
-  """Builds a dict mapping fully-qualified target names to their target dicts.
-
-  |data| is a dict mapping loaded build files by pathname relative to the
-  current directory.  Values in |data| are build file contents.  For each
-  |data| value with a "targets" key, the value of the "targets" key is taken
-  as a list containing target dicts.  Each target's fully-qualified name is
-  constructed from the pathname of the build file (|data| key) and its
-  "target_name" property.  These fully-qualified names are used as the keys
-  in the returned dict.  These keys provide access to the target dicts,
-  the dicts in the "targets" lists.
-  """
-
-  targets = {}
-  for build_file in data['target_build_files']:
-    for target in data[build_file].get('targets', []):
-      target_name = gyp.common.QualifiedTarget(build_file,
-                                               target['target_name'],
-                                               target['toolset'])
-      if target_name in targets:
-        raise GypError('Duplicate target definitions for ' + target_name)
-      targets[target_name] = target
-
-  return targets
-
-
-def QualifyDependencies(targets):
-  """Make dependency links fully-qualified relative to the current directory.
-
-  |targets| is a dict mapping fully-qualified target names to their target
-  dicts.  For each target in this dict, keys known to contain dependency
-  links are examined, and any dependencies referenced will be rewritten
-  so that they are fully-qualified and relative to the current directory.
-  All rewritten dependencies are suitable for use as keys to |targets| or a
-  similar dict.
-  """
-
-  all_dependency_sections = [dep + op
-                             for dep in dependency_sections
-                             for op in ('', '!', '/')]
-
-  for target, target_dict in targets.iteritems():
-    target_build_file = gyp.common.BuildFile(target)
-    toolset = target_dict['toolset']
-    for dependency_key in all_dependency_sections:
-      dependencies = target_dict.get(dependency_key, [])
-      for index in xrange(0, len(dependencies)):
-        dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
-            target_build_file, dependencies[index], toolset)
-        if not multiple_toolsets:
-          # Ignore toolset specification in the dependency if it is specified.
-          dep_toolset = toolset
-        dependency = gyp.common.QualifiedTarget(dep_file,
-                                                dep_target,
-                                                dep_toolset)
-        dependencies[index] = dependency
-
-        # Make sure anything appearing in a list other than "dependencies" also
-        # appears in the "dependencies" list.
-        if dependency_key != 'dependencies' and \
-           dependency not in target_dict['dependencies']:
-          raise GypError('Found ' + dependency + ' in ' + dependency_key +
-                         ' of ' + target + ', but not in dependencies')
-
-
-def ExpandWildcardDependencies(targets, data):
-  """Expands dependencies specified as build_file:*.
-
-  For each target in |targets|, examines sections containing links to other
-  targets.  If any such section contains a link of the form build_file:*, it
-  is taken as a wildcard link, and is expanded to list each target in
-  build_file.  The |data| dict provides access to build file dicts.
-
-  Any target that does not wish to be included by wildcard can provide an
-  optional "suppress_wildcard" key in its target dict.  When present and
-  true, a wildcard dependency link will not include such targets.
-
-  All dependency names, including the keys to |targets| and the values in each
-  dependency list, must be qualified when this function is called.
-  """
-
-  for target, target_dict in targets.iteritems():
-    toolset = target_dict['toolset']
-    target_build_file = gyp.common.BuildFile(target)
-    for dependency_key in dependency_sections:
-      dependencies = target_dict.get(dependency_key, [])
-
-      # Loop this way instead of "for dependency in" or "for index in xrange"
-      # because the dependencies list will be modified within the loop body.
-      index = 0
-      while index < len(dependencies):
-        (dependency_build_file, dependency_target, dependency_toolset) = \
-            gyp.common.ParseQualifiedTarget(dependencies[index])
-        if dependency_target != '*' and dependency_toolset != '*':
-          # Not a wildcard.  Keep it moving.
-          index = index + 1
-          continue
-
-        if dependency_build_file == target_build_file:
-          # It's an error for a target to depend on all other targets in
-          # the same file, because a target cannot depend on itself.
-          raise GypError('Found wildcard in ' + dependency_key + ' of ' +
-                         target + ' referring to same build file')
-
-        # Take the wildcard out and adjust the index so that the next
-        # dependency in the list will be processed the next time through the
-        # loop.
-        del dependencies[index]
-        index = index - 1
-
-        # Loop through the targets in the other build file, adding them to
-        # this target's list of dependencies in place of the removed
-        # wildcard.
-        dependency_target_dicts = data[dependency_build_file]['targets']
-        for dependency_target_dict in dependency_target_dicts:
-          if int(dependency_target_dict.get('suppress_wildcard', False)):
-            continue
-          dependency_target_name = dependency_target_dict['target_name']
-          if (dependency_target != '*' and
-              dependency_target != dependency_target_name):
-            continue
-          dependency_target_toolset = dependency_target_dict['toolset']
-          if (dependency_toolset != '*' and
-              dependency_toolset != dependency_target_toolset):
-            continue
-          dependency = gyp.common.QualifiedTarget(dependency_build_file,
-                                                  dependency_target_name,
-                                                  dependency_target_toolset)
-          index = index + 1
-          dependencies.insert(index, dependency)
-
-        index = index + 1
-
-
-def Unify(l):
-  """Removes duplicate elements from l, keeping the first element."""
-  seen = {}
-  return [seen.setdefault(e, e) for e in l if e not in seen]
-
-
-def RemoveDuplicateDependencies(targets):
-  """Makes sure every dependency appears only once in all targets's dependency
-  lists."""
-  for target_name, target_dict in targets.iteritems():
-    for dependency_key in dependency_sections:
-      dependencies = target_dict.get(dependency_key, [])
-      if dependencies:
-        target_dict[dependency_key] = Unify(dependencies)
-
-
-def Filter(l, item):
-  """Removes item from l."""
-  res = {}
-  return [res.setdefault(e, e) for e in l if e != item]
-
-
-def RemoveSelfDependencies(targets):
-  """Remove self dependencies from targets that have the prune_self_dependency
-  variable set."""
-  for target_name, target_dict in targets.iteritems():
-    for dependency_key in dependency_sections:
-      dependencies = target_dict.get(dependency_key, [])
-      if dependencies:
-        for t in dependencies:
-          if t == target_name:
-            if targets[t].get('variables', {}).get('prune_self_dependency', 0):
-              target_dict[dependency_key] = Filter(dependencies, target_name)
-
-
-def RemoveLinkDependenciesFromNoneTargets(targets):
-  """Remove dependencies having the 'link_dependency' attribute from the 'none'
-  targets."""
-  for target_name, target_dict in targets.iteritems():
-    for dependency_key in dependency_sections:
-      dependencies = target_dict.get(dependency_key, [])
-      if dependencies:
-        for t in dependencies:
-          if target_dict.get('type', None) == 'none':
-            if targets[t].get('variables', {}).get('link_dependency', 0):
-              target_dict[dependency_key] = \
-                  Filter(target_dict[dependency_key], t)
-
-
-class DependencyGraphNode(object):
-  """
-
-  Attributes:
-    ref: A reference to an object that this DependencyGraphNode represents.
-    dependencies: List of DependencyGraphNodes on which this one depends.
-    dependents: List of DependencyGraphNodes that depend on this one.
-  """
-
-  class CircularException(GypError):
-    pass
-
-  def __init__(self, ref):
-    self.ref = ref
-    self.dependencies = []
-    self.dependents = []
-
-  def __repr__(self):
-    return '<DependencyGraphNode: %r>' % self.ref
-
-  def FlattenToList(self):
-    # flat_list is the sorted list of dependencies - actually, the list items
-    # are the "ref" attributes of DependencyGraphNodes.  Every target will
-    # appear in flat_list after all of its dependencies, and before all of its
-    # dependents.
-    flat_list = OrderedSet()
-
-    def ExtractNodeRef(node):
-      """Extracts the object that the node represents from the given node."""
-      return node.ref
-
-    # in_degree_zeros is the list of DependencyGraphNodes that have no
-    # dependencies not in flat_list.  Initially, it is a copy of the children
-    # of this node, because when the graph was built, nodes with no
-    # dependencies were made implicit dependents of the root node.
-    in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
-
-    while in_degree_zeros:
-      # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
-      # can be appended to flat_list.  Take these nodes out of in_degree_zeros
-      # as work progresses, so that the next node to process from the list can
-      # always be accessed at a consistent position.
-      node = in_degree_zeros.pop()
-      flat_list.add(node.ref)
-
-      # Look at dependents of the node just added to flat_list.  Some of them
-      # may now belong in in_degree_zeros.
-      for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
-        is_in_degree_zero = True
-        # TODO: We want to check through the
-        # node_dependent.dependencies list but if it's long and we
-        # always start at the beginning, then we get O(n^2) behaviour.
-        for node_dependent_dependency in (sorted(node_dependent.dependencies,
-                                                 key=ExtractNodeRef)):
-          if not node_dependent_dependency.ref in flat_list:
-            # The dependent one or more dependencies not in flat_list.  There
-            # will be more chances to add it to flat_list when examining
-            # it again as a dependent of those other dependencies, provided
-            # that there are no cycles.
-            is_in_degree_zero = False
-            break
-
-        if is_in_degree_zero:
-          # All of the dependent's dependencies are already in flat_list.  Add
-          # it to in_degree_zeros where it will be processed in a future
-          # iteration of the outer loop.
-          in_degree_zeros += [node_dependent]
-
-    return list(flat_list)
-
-  def FindCycles(self):
-    """
-    Returns a list of cycles in the graph, where each cycle is its own list.
-    """
-    results = []
-    visited = set()
-
-    def Visit(node, path):
-      for child in node.dependents:
-        if child in path:
-          results.append([child] + path[:path.index(child) + 1])
-        elif not child in visited:
-          visited.add(child)
-          Visit(child, [child] + path)
-
-    visited.add(self)
-    Visit(self, [self])
-
-    return results
-
-  def DirectDependencies(self, dependencies=None):
-    """Returns a list of just direct dependencies."""
-    if dependencies == None:
-      dependencies = []
-
-    for dependency in self.dependencies:
-      # Check for None, corresponding to the root node.
-      if dependency.ref != None and dependency.ref not in dependencies:
-        dependencies.append(dependency.ref)
-
-    return dependencies
-
-  def _AddImportedDependencies(self, targets, dependencies=None):
-    """Given a list of direct dependencies, adds indirect dependencies that
-    other dependencies have declared to export their settings.
-
-    This method does not operate on self.  Rather, it operates on the list
-    of dependencies in the |dependencies| argument.  For each dependency in
-    that list, if any declares that it exports the settings of one of its
-    own dependencies, those dependencies whose settings are "passed through"
-    are added to the list.  As new items are added to the list, they too will
-    be processed, so it is possible to import settings through multiple levels
-    of dependencies.
-
-    This method is not terribly useful on its own, it depends on being
-    "primed" with a list of direct dependencies such as one provided by
-    DirectDependencies.  DirectAndImportedDependencies is intended to be the
-    public entry point.
-    """
-
-    if dependencies == None:
-      dependencies = []
-
-    index = 0
-    while index < len(dependencies):
-      dependency = dependencies[index]
-      dependency_dict = targets[dependency]
-      # Add any dependencies whose settings should be imported to the list
-      # if not already present.  Newly-added items will be checked for
-      # their own imports when the list iteration reaches them.
-      # Rather than simply appending new items, insert them after the
-      # dependency that exported them.  This is done to more closely match
-      # the depth-first method used by DeepDependencies.
-      add_index = 1
-      for imported_dependency in \
-          dependency_dict.get('export_dependent_settings', []):
-        if imported_dependency not in dependencies:
-          dependencies.insert(index + add_index, imported_dependency)
-          add_index = add_index + 1
-      index = index + 1
-
-    return dependencies
-
-  def DirectAndImportedDependencies(self, targets, dependencies=None):
-    """Returns a list of a target's direct dependencies and all indirect
-    dependencies that a dependency has advertised settings should be exported
-    through the dependency for.
-    """
-
-    dependencies = self.DirectDependencies(dependencies)
-    return self._AddImportedDependencies(targets, dependencies)
-
-  def DeepDependencies(self, dependencies=None):
-    """Returns an OrderedSet of all of a target's dependencies, recursively."""
-    if dependencies is None:
-      # Using a list to get ordered output and a set to do fast "is it
-      # already added" checks.
-      dependencies = OrderedSet()
-
-    for dependency in self.dependencies:
-      # Check for None, corresponding to the root node.
-      if dependency.ref is None:
-        continue
-      if dependency.ref not in dependencies:
-        dependency.DeepDependencies(dependencies)
-        dependencies.add(dependency.ref)
-
-    return dependencies
-
-  def _LinkDependenciesInternal(self, targets, include_shared_libraries,
-                                dependencies=None, initial=True):
-    """Returns an OrderedSet of dependency targets that are linked
-    into this target.
-
-    This function has a split personality, depending on the setting of
-    |initial|.  Outside callers should always leave |initial| at its default
-    setting.
-
-    When adding a target to the list of dependencies, this function will
-    recurse into itself with |initial| set to False, to collect dependencies
-    that are linked into the linkable target for which the list is being built.
-
-    If |include_shared_libraries| is False, the resulting dependencies will not
-    include shared_library targets that are linked into this target.
-    """
-    if dependencies is None:
-      # Using a list to get ordered output and a set to do fast "is it
-      # already added" checks.
-      dependencies = OrderedSet()
-
-    # Check for None, corresponding to the root node.
-    if self.ref is None:
-      return dependencies
-
-    # It's kind of sucky that |targets| has to be passed into this function,
-    # but that's presently the easiest way to access the target dicts so that
-    # this function can find target types.
-
-    if 'target_name' not in targets[self.ref]:
-      raise GypError("Missing 'target_name' field in target.")
-
-    if 'type' not in targets[self.ref]:
-      raise GypError("Missing 'type' field in target %s" %
-                     targets[self.ref]['target_name'])
-
-    target_type = targets[self.ref]['type']
-
-    is_linkable = target_type in linkable_types
-
-    if initial and not is_linkable:
-      # If this is the first target being examined and it's not linkable,
-      # return an empty list of link dependencies, because the link
-      # dependencies are intended to apply to the target itself (initial is
-      # True) and this target won't be linked.
-      return dependencies
-
-    # Don't traverse 'none' targets if explicitly excluded.
-    if (target_type == 'none' and
-        not targets[self.ref].get('dependencies_traverse', True)):
-      dependencies.add(self.ref)
-      return dependencies
-
-    # Executables, mac kernel extensions and loadable modules are already fully
-    # and finally linked. Nothing else can be a link dependency of them, there
-    # can only be dependencies in the sense that a dependent target might run
-    # an executable or load the loadable_module.
-    if not initial and target_type in ('executable', 'loadable_module',
-                                       'mac_kernel_extension'):
-      return dependencies
-
-    # Shared libraries are already fully linked.  They should only be included
-    # in |dependencies| when adjusting static library dependencies (in order to
-    # link against the shared_library's import lib), but should not be included
-    # in |dependencies| when propagating link_settings.
-    # The |include_shared_libraries| flag controls which of these two cases we
-    # are handling.
-    if (not initial and target_type == 'shared_library' and
-        not include_shared_libraries):
-      return dependencies
-
-    # The target is linkable, add it to the list of link dependencies.
-    if self.ref not in dependencies:
-      dependencies.add(self.ref)
-      if initial or not is_linkable:
-        # If this is a subsequent target and it's linkable, don't look any
-        # further for linkable dependencies, as they'll already be linked into
-        # this target linkable.  Always look at dependencies of the initial
-        # target, and always look at dependencies of non-linkables.
-        for dependency in self.dependencies:
-          dependency._LinkDependenciesInternal(targets,
-                                               include_shared_libraries,
-                                               dependencies, False)
-
-    return dependencies
-
-  def DependenciesForLinkSettings(self, targets):
-    """
-    Returns a list of dependency targets whose link_settings should be merged
-    into this target.
-    """
-
-    # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
-    # link_settings are propagated.  So for now, we will allow it, unless the
-    # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
-    # False.  Once chrome is fixed, we can remove this flag.
-    include_shared_libraries = \
-        targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
-    return self._LinkDependenciesInternal(targets, include_shared_libraries)
-
-  def DependenciesToLinkAgainst(self, targets):
-    """
-    Returns a list of dependency targets that are linked into this target.
-    """
-    return self._LinkDependenciesInternal(targets, True)
-
-
-def BuildDependencyList(targets):
-  # Create a DependencyGraphNode for each target.  Put it into a dict for easy
-  # access.
-  dependency_nodes = {}
-  for target, spec in targets.iteritems():
-    if target not in dependency_nodes:
-      dependency_nodes[target] = DependencyGraphNode(target)
-
-  # Set up the dependency links.  Targets that have no dependencies are treated
-  # as dependent on root_node.
-  root_node = DependencyGraphNode(None)
-  for target, spec in targets.iteritems():
-    target_node = dependency_nodes[target]
-    target_build_file = gyp.common.BuildFile(target)
-    dependencies = spec.get('dependencies')
-    if not dependencies:
-      target_node.dependencies = [root_node]
-      root_node.dependents.append(target_node)
-    else:
-      for dependency in dependencies:
-        dependency_node = dependency_nodes.get(dependency)
-        if not dependency_node:
-          raise GypError("Dependency '%s' not found while "
-                         "trying to load target %s" % (dependency, target))
-        target_node.dependencies.append(dependency_node)
-        dependency_node.dependents.append(target_node)
-
-  flat_list = root_node.FlattenToList()
-
-  # If there's anything left unvisited, there must be a circular dependency
-  # (cycle).
-  if len(flat_list) != len(targets):
-    if not root_node.dependents:
-      # If all targets have dependencies, add the first target as a dependent
-      # of root_node so that the cycle can be discovered from root_node.
-      target = targets.keys()[0]
-      target_node = dependency_nodes[target]
-      target_node.dependencies.append(root_node)
-      root_node.dependents.append(target_node)
-
-    cycles = []
-    for cycle in root_node.FindCycles():
-      paths = [node.ref for node in cycle]
-      cycles.append('Cycle: %s' % ' -> '.join(paths))
-    raise DependencyGraphNode.CircularException(
-        'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
-
-  return [dependency_nodes, flat_list]
-
-
-def VerifyNoGYPFileCircularDependencies(targets):
-  # Create a DependencyGraphNode for each gyp file containing a target.  Put
-  # it into a dict for easy access.
-  dependency_nodes = {}
-  for target in targets.iterkeys():
-    build_file = gyp.common.BuildFile(target)
-    if not build_file in dependency_nodes:
-      dependency_nodes[build_file] = DependencyGraphNode(build_file)
-
-  # Set up the dependency links.
-  for target, spec in targets.iteritems():
-    build_file = gyp.common.BuildFile(target)
-    build_file_node = dependency_nodes[build_file]
-    target_dependencies = spec.get('dependencies', [])
-    for dependency in target_dependencies:
-      try:
-        dependency_build_file = gyp.common.BuildFile(dependency)
-      except GypError, e:
-        gyp.common.ExceptionAppend(
-            e, 'while computing dependencies of .gyp file %s' % build_file)
-        raise
-
-      if dependency_build_file == build_file:
-        # A .gyp file is allowed to refer back to itself.
-        continue
-      dependency_node = dependency_nodes.get(dependency_build_file)
-      if not dependency_node:
-        raise GypError("Dependancy '%s' not found" % dependency_build_file)
-      if dependency_node not in build_file_node.dependencies:
-        build_file_node.dependencies.append(dependency_node)
-        dependency_node.dependents.append(build_file_node)
-
-
-  # Files that have no dependencies are treated as dependent on root_node.
-  root_node = DependencyGraphNode(None)
-  for build_file_node in dependency_nodes.itervalues():
-    if len(build_file_node.dependencies) == 0:
-      build_file_node.dependencies.append(root_node)
-      root_node.dependents.append(build_file_node)
-
-  flat_list = root_node.FlattenToList()
-
-  # If there's anything left unvisited, there must be a circular dependency
-  # (cycle).
-  if len(flat_list) != len(dependency_nodes):
-    if not root_node.dependents:
-      # If all files have dependencies, add the first file as a dependent
-      # of root_node so that the cycle can be discovered from root_node.
-      file_node = dependency_nodes.values()[0]
-      file_node.dependencies.append(root_node)
-      root_node.dependents.append(file_node)
-    cycles = []
-    for cycle in root_node.FindCycles():
-      paths = [node.ref for node in cycle]
-      cycles.append('Cycle: %s' % ' -> '.join(paths))
-    raise DependencyGraphNode.CircularException(
-        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
-
-
-def DoDependentSettings(key, flat_list, targets, dependency_nodes):
-  # key should be one of all_dependent_settings, direct_dependent_settings,
-  # or link_settings.
-
-  for target in flat_list:
-    target_dict = targets[target]
-    build_file = gyp.common.BuildFile(target)
-
-    if key == 'all_dependent_settings':
-      dependencies = dependency_nodes[target].DeepDependencies()
-    elif key == 'direct_dependent_settings':
-      dependencies = \
-          dependency_nodes[target].DirectAndImportedDependencies(targets)
-    elif key == 'link_settings':
-      dependencies = \
-          dependency_nodes[target].DependenciesForLinkSettings(targets)
-    else:
-      raise GypError("DoDependentSettings doesn't know how to determine "
-                      'dependencies for ' + key)
-
-    for dependency in dependencies:
-      dependency_dict = targets[dependency]
-      if not key in dependency_dict:
-        continue
-      dependency_build_file = gyp.common.BuildFile(dependency)
-      MergeDicts(target_dict, dependency_dict[key],
-                 build_file, dependency_build_file)
-
-
-def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
-                                    sort_dependencies):
-  # Recompute target "dependencies" properties.  For each static library
-  # target, remove "dependencies" entries referring to other static libraries,
-  # unless the dependency has the "hard_dependency" attribute set.  For each
-  # linkable target, add a "dependencies" entry referring to all of the
-  # target's computed list of link dependencies (including static libraries
-  # if no such entry is already present.
-  for target in flat_list:
-    target_dict = targets[target]
-    target_type = target_dict['type']
-
-    if target_type == 'static_library':
-      if not 'dependencies' in target_dict:
-        continue
-
-      target_dict['dependencies_original'] = target_dict.get(
-          'dependencies', [])[:]
-
-      # A static library should not depend on another static library unless
-      # the dependency relationship is "hard," which should only be done when
-      # a dependent relies on some side effect other than just the build
-      # product, like a rule or action output. Further, if a target has a
-      # non-hard dependency, but that dependency exports a hard dependency,
-      # the non-hard dependency can safely be removed, but the exported hard
-      # dependency must be added to the target to keep the same dependency
-      # ordering.
-      dependencies = \
-          dependency_nodes[target].DirectAndImportedDependencies(targets)
-      index = 0
-      while index < len(dependencies):
-        dependency = dependencies[index]
-        dependency_dict = targets[dependency]
-
-        # Remove every non-hard static library dependency and remove every
-        # non-static library dependency that isn't a direct dependency.
-        if (dependency_dict['type'] == 'static_library' and \
-            not dependency_dict.get('hard_dependency', False)) or \
-           (dependency_dict['type'] != 'static_library' and \
-            not dependency in target_dict['dependencies']):
-          # Take the dependency out of the list, and don't increment index
-          # because the next dependency to analyze will shift into the index
-          # formerly occupied by the one being removed.
-          del dependencies[index]
-        else:
-          index = index + 1
-
-      # Update the dependencies. If the dependencies list is empty, it's not
-      # needed, so unhook it.
-      if len(dependencies) > 0:
-        target_dict['dependencies'] = dependencies
-      else:
-        del target_dict['dependencies']
-
-    elif target_type in linkable_types:
-      # Get a list of dependency targets that should be linked into this
-      # target.  Add them to the dependencies list if they're not already
-      # present.
-
-      link_dependencies = \
-          dependency_nodes[target].DependenciesToLinkAgainst(targets)
-      for dependency in link_dependencies:
-        if dependency == target:
-          continue
-        if not 'dependencies' in target_dict:
-          target_dict['dependencies'] = []
-        if not dependency in target_dict['dependencies']:
-          target_dict['dependencies'].append(dependency)
-      # Sort the dependencies list in the order from dependents to dependencies.
-      # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
-      # Note: flat_list is already sorted in the order from dependencies to
-      # dependents.
-      if sort_dependencies and 'dependencies' in target_dict:
-        target_dict['dependencies'] = [dep for dep in reversed(flat_list)
-                                       if dep in target_dict['dependencies']]
-
-
-# Initialize this here to speed up MakePathRelative.
-exception_re = re.compile(r'''["']?[-/$<>^]''')
-
-
-def MakePathRelative(to_file, fro_file, item):
-  # If item is a relative path, it's relative to the build file dict that it's
-  # coming from.  Fix it up to make it relative to the build file dict that
-  # it's going into.
-  # Exception: any |item| that begins with these special characters is
-  # returned without modification.
-  #   /   Used when a path is already absolute (shortcut optimization;
-  #       such paths would be returned as absolute anyway)
-  #   $   Used for build environment variables
-  #   -   Used for some build environment flags (such as -lapr-1 in a
-  #       "libraries" section)
-  #   <   Used for our own variable and command expansions (see ExpandVariables)
-  #   >   Used for our own variable and command expansions (see ExpandVariables)
-  #   ^   Used for our own variable and command expansions (see ExpandVariables)
-  #
-  #   "/' Used when a value is quoted.  If these are present, then we
-  #       check the second character instead.
-  #
-  if to_file == fro_file or exception_re.match(item):
-    return item
-  else:
-    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
-    # temporary measure. This should really be addressed by keeping all paths
-    # in POSIX until actual project generation.
-    ret = os.path.normpath(os.path.join(
-        gyp.common.RelativePath(os.path.dirname(fro_file),
-                                os.path.dirname(to_file)),
-                                item)).replace('\\', '/')
-    if item[-1] == '/':
-      ret += '/'
-    return ret
-
-def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
-  # Python documentation recommends objects which do not support hash
-  # set this value to None. Python library objects follow this rule.
-  is_hashable = lambda val: val.__hash__
-
-  # If x is hashable, returns whether x is in s. Else returns whether x is in l.
-  def is_in_set_or_list(x, s, l):
-    if is_hashable(x):
-      return x in s
-    return x in l
-
-  prepend_index = 0
-
-  # Make membership testing of hashables in |to| (in particular, strings)
-  # faster.
-  hashable_to_set = set(x for x in to if is_hashable(x))
-  for item in fro:
-    singleton = False
-    if type(item) in (str, int):
-      # The cheap and easy case.
-      if is_paths:
-        to_item = MakePathRelative(to_file, fro_file, item)
-      else:
-        to_item = item
-
-      if not (type(item) is str and item.startswith('-')):
-        # Any string that doesn't begin with a "-" is a singleton - it can
-        # only appear once in a list, to be enforced by the list merge append
-        # or prepend.
-        singleton = True
-    elif type(item) is dict:
-      # Make a copy of the dictionary, continuing to look for paths to fix.
-      # The other intelligent aspects of merge processing won't apply because
-      # item is being merged into an empty dict.
-      to_item = {}
-      MergeDicts(to_item, item, to_file, fro_file)
-    elif type(item) is list:
-      # Recurse, making a copy of the list.  If the list contains any
-      # descendant dicts, path fixing will occur.  Note that here, custom
-      # values for is_paths and append are dropped; those are only to be
-      # applied to |to| and |fro|, not sublists of |fro|.  append shouldn't
-      # matter anyway because the new |to_item| list is empty.
-      to_item = []
-      MergeLists(to_item, item, to_file, fro_file)
-    else:
-      raise TypeError(
-          'Attempt to merge list item of unsupported type ' + \
-          item.__class__.__name__)
-
-    if append:
-      # If appending a singleton that's already in the list, don't append.
-      # This ensures that the earliest occurrence of the item will stay put.
-      if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
-        to.append(to_item)
-        if is_hashable(to_item):
-          hashable_to_set.add(to_item)
-    else:
-      # If prepending a singleton that's already in the list, remove the
-      # existing instance and proceed with the prepend.  This ensures that the
-      # item appears at the earliest possible position in the list.
-      while singleton and to_item in to:
-        to.remove(to_item)
-
-      # Don't just insert everything at index 0.  That would prepend the new
-      # items to the list in reverse order, which would be an unwelcome
-      # surprise.
-      to.insert(prepend_index, to_item)
-      if is_hashable(to_item):
-        hashable_to_set.add(to_item)
-      prepend_index = prepend_index + 1
-
-
-def MergeDicts(to, fro, to_file, fro_file):
-  # I wanted to name the parameter "from" but it's a Python keyword...
-  for k, v in fro.iteritems():
-    # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
-    # copy semantics.  Something else may want to merge from the |fro| dict
-    # later, and having the same dict ref pointed to twice in the tree isn't
-    # what anyone wants considering that the dicts may subsequently be
-    # modified.
-    if k in to:
-      bad_merge = False
-      if type(v) in (str, int):
-        if type(to[k]) not in (str, int):
-          bad_merge = True
-      elif type(v) is not type(to[k]):
-        bad_merge = True
-
-      if bad_merge:
-        raise TypeError(
-            'Attempt to merge dict value of type ' + v.__class__.__name__ + \
-            ' into incompatible type ' + to[k].__class__.__name__ + \
-            ' for key ' + k)
-    if type(v) in (str, int):
-      # Overwrite the existing value, if any.  Cheap and easy.
-      is_path = IsPathSection(k)
-      if is_path:
-        to[k] = MakePathRelative(to_file, fro_file, v)
-      else:
-        to[k] = v
-    elif type(v) is dict:
-      # Recurse, guaranteeing copies will be made of objects that require it.
-      if not k in to:
-        to[k] = {}
-      MergeDicts(to[k], v, to_file, fro_file)
-    elif type(v) is list:
-      # Lists in dicts can be merged with different policies, depending on
-      # how the key in the "from" dict (k, the from-key) is written.
-      #
-      # If the from-key has          ...the to-list will have this action
-      # this character appended:...     applied when receiving the from-list:
-      #                           =  replace
-      #                           +  prepend
-      #                           ?  set, only if to-list does not yet exist
-      #                      (none)  append
-      #
-      # This logic is list-specific, but since it relies on the associated
-      # dict key, it's checked in this dict-oriented function.
-      ext = k[-1]
-      append = True
-      if ext == '=':
-        list_base = k[:-1]
-        lists_incompatible = [list_base, list_base + '?']
-        to[list_base] = []
-      elif ext == '+':
-        list_base = k[:-1]
-        lists_incompatible = [list_base + '=', list_base + '?']
-        append = False
-      elif ext == '?':
-        list_base = k[:-1]
-        lists_incompatible = [list_base, list_base + '=', list_base + '+']
-      else:
-        list_base = k
-        lists_incompatible = [list_base + '=', list_base + '?']
-
-      # Some combinations of merge policies appearing together are meaningless.
-      # It's stupid to replace and append simultaneously, for example.  Append
-      # and prepend are the only policies that can coexist.
-      for list_incompatible in lists_incompatible:
-        if list_incompatible in fro:
-          raise GypError('Incompatible list policies ' + k + ' and ' +
-                         list_incompatible)
-
-      if list_base in to:
-        if ext == '?':
-          # If the key ends in "?", the list will only be merged if it doesn't
-          # already exist.
-          continue
-        elif type(to[list_base]) is not list:
-          # This may not have been checked above if merging in a list with an
-          # extension character.
-          raise TypeError(
-              'Attempt to merge dict value of type ' + v.__class__.__name__ + \
-              ' into incompatible type ' + to[list_base].__class__.__name__ + \
-              ' for key ' + list_base + '(' + k + ')')
-      else:
-        to[list_base] = []
-
-      # Call MergeLists, which will make copies of objects that require it.
-      # MergeLists can recurse back into MergeDicts, although this will be
-      # to make copies of dicts (with paths fixed), there will be no
-      # subsequent dict "merging" once entering a list because lists are
-      # always replaced, appended to, or prepended to.
-      is_paths = IsPathSection(list_base)
-      MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
-    else:
-      raise TypeError(
-          'Attempt to merge dict value of unsupported type ' + \
-          v.__class__.__name__ + ' for key ' + k)
-
-
-def MergeConfigWithInheritance(new_configuration_dict, build_file,
-                               target_dict, configuration, visited):
-  # Skip if previously visted.
-  if configuration in visited:
-    return
-
-  # Look at this configuration.
-  configuration_dict = target_dict['configurations'][configuration]
-
-  # Merge in parents.
-  for parent in configuration_dict.get('inherit_from', []):
-    MergeConfigWithInheritance(new_configuration_dict, build_file,
-                               target_dict, parent, visited + [configuration])
-
-  # Merge it into the new config.
-  MergeDicts(new_configuration_dict, configuration_dict,
-             build_file, build_file)
-
-  # Drop abstract.
-  if 'abstract' in new_configuration_dict:
-    del new_configuration_dict['abstract']
-
-
-def SetUpConfigurations(target, target_dict):
-  # key_suffixes is a list of key suffixes that might appear on key names.
-  # These suffixes are handled in conditional evaluations (for =, +, and ?)
-  # and rules/exclude processing (for ! and /).  Keys with these suffixes
-  # should be treated the same as keys without.
-  key_suffixes = ['=', '+', '?', '!', '/']
-
-  build_file = gyp.common.BuildFile(target)
-
-  # Provide a single configuration by default if none exists.
-  # TODO(mark): Signal an error if default_configurations exists but
-  # configurations does not.
-  if not 'configurations' in target_dict:
-    target_dict['configurations'] = {'Default': {}}
-  if not 'default_configuration' in target_dict:
-    concrete = [i for (i, config) in target_dict['configurations'].iteritems()
-                if not config.get('abstract')]
-    target_dict['default_configuration'] = sorted(concrete)[0]
-
-  merged_configurations = {}
-  configs = target_dict['configurations']
-  for (configuration, old_configuration_dict) in configs.iteritems():
-    # Skip abstract configurations (saves work only).
-    if old_configuration_dict.get('abstract'):
-      continue
-    # Configurations inherit (most) settings from the enclosing target scope.
-    # Get the inheritance relationship right by making a copy of the target
-    # dict.
-    new_configuration_dict = {}
-    for (key, target_val) in target_dict.iteritems():
-      key_ext = key[-1:]
-      if key_ext in key_suffixes:
-        key_base = key[:-1]
-      else:
-        key_base = key
-      if not key_base in non_configuration_keys:
-        new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
-
-    # Merge in configuration (with all its parents first).
-    MergeConfigWithInheritance(new_configuration_dict, build_file,
-                               target_dict, configuration, [])
-
-    merged_configurations[configuration] = new_configuration_dict
-
-  # Put the new configurations back into the target dict as a configuration.
-  for configuration in merged_configurations.keys():
-    target_dict['configurations'][configuration] = (
-        merged_configurations[configuration])
-
-  # Now drop all the abstract ones.
-  for configuration in target_dict['configurations'].keys():
-    old_configuration_dict = target_dict['configurations'][configuration]
-    if old_configuration_dict.get('abstract'):
-      del target_dict['configurations'][configuration]
-
-  # Now that all of the target's configurations have been built, go through
-  # the target dict's keys and remove everything that's been moved into a
-  # "configurations" section.
-  delete_keys = []
-  for key in target_dict:
-    key_ext = key[-1:]
-    if key_ext in key_suffixes:
-      key_base = key[:-1]
-    else:
-      key_base = key
-    if not key_base in non_configuration_keys:
-      delete_keys.append(key)
-  for key in delete_keys:
-    del target_dict[key]
-
-  # Check the configurations to see if they contain invalid keys.
-  for configuration in target_dict['configurations'].keys():
-    configuration_dict = target_dict['configurations'][configuration]
-    for key in configuration_dict.keys():
-      if key in invalid_configuration_keys:
-        raise GypError('%s not allowed in the %s configuration, found in '
-                       'target %s' % (key, configuration, target))
-
-
-
-def ProcessListFiltersInDict(name, the_dict):
-  """Process regular expression and exclusion-based filters on lists.
-
-  An exclusion list is in a dict key named with a trailing "!", like
-  "sources!".  Every item in such a list is removed from the associated
-  main list, which in this example, would be "sources".  Removed items are
-  placed into a "sources_excluded" list in the dict.
-
-  Regular expression (regex) filters are contained in dict keys named with a
-  trailing "/", such as "sources/" to operate on the "sources" list.  Regex
-  filters in a dict take the form:
-    'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
-                  ['include', '_mac\\.cc$'] ],
-  The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
-  _win.cc.  The second filter then includes all files ending in _mac.cc that
-  are now or were once in the "sources" list.  Items matching an "exclude"
-  filter are subject to the same processing as would occur if they were listed
-  by name in an exclusion list (ending in "!").  Items matching an "include"
-  filter are brought back into the main list if previously excluded by an
-  exclusion list or exclusion regex filter.  Subsequent matching "exclude"
-  patterns can still cause items to be excluded after matching an "include".
-  """
-
-  # Look through the dictionary for any lists whose keys end in "!" or "/".
-  # These are lists that will be treated as exclude lists and regular
-  # expression-based exclude/include lists.  Collect the lists that are
-  # needed first, looking for the lists that they operate on, and assemble
-  # then into |lists|.  This is done in a separate loop up front, because
-  # the _included and _excluded keys need to be added to the_dict, and that
-  # can't be done while iterating through it.
-
-  lists = []
-  del_lists = []
-  for key, value in the_dict.iteritems():
-    operation = key[-1]
-    if operation != '!' and operation != '/':
-      continue
-
-    if type(value) is not list:
-      raise ValueError(name + ' key ' + key + ' must be list, not ' + \
-                       value.__class__.__name__)
-
-    list_key = key[:-1]
-    if list_key not in the_dict:
-      # This happens when there's a list like "sources!" but no corresponding
-      # "sources" list.  Since there's nothing for it to operate on, queue up
-      # the "sources!" list for deletion now.
-      del_lists.append(key)
-      continue
-
-    if type(the_dict[list_key]) is not list:
-      value = the_dict[list_key]
-      raise ValueError(name + ' key ' + list_key + \
-                       ' must be list, not ' + \
-                       value.__class__.__name__ + ' when applying ' + \
-                       {'!': 'exclusion', '/': 'regex'}[operation])
-
-    if not list_key in lists:
-      lists.append(list_key)
-
-  # Delete the lists that are known to be unneeded at this point.
-  for del_list in del_lists:
-    del the_dict[del_list]
-
-  for list_key in lists:
-    the_list = the_dict[list_key]
-
-    # Initialize the list_actions list, which is parallel to the_list.  Each
-    # item in list_actions identifies whether the corresponding item in
-    # the_list should be excluded, unconditionally preserved (included), or
-    # whether no exclusion or inclusion has been applied.  Items for which
-    # no exclusion or inclusion has been applied (yet) have value -1, items
-    # excluded have value 0, and items included have value 1.  Includes and
-    # excludes override previous actions.  All items in list_actions are
-    # initialized to -1 because no excludes or includes have been processed
-    # yet.
-    list_actions = list((-1,) * len(the_list))
-
-    exclude_key = list_key + '!'
-    if exclude_key in the_dict:
-      for exclude_item in the_dict[exclude_key]:
-        for index in xrange(0, len(the_list)):
-          if exclude_item == the_list[index]:
-            # This item matches the exclude_item, so set its action to 0
-            # (exclude).
-            list_actions[index] = 0
-
-      # The "whatever!" list is no longer needed, dump it.
-      del the_dict[exclude_key]
-
-    regex_key = list_key + '/'
-    if regex_key in the_dict:
-      for regex_item in the_dict[regex_key]:
-        [action, pattern] = regex_item
-        pattern_re = re.compile(pattern)
-
-        if action == 'exclude':
-          # This item matches an exclude regex, so set its value to 0 (exclude).
-          action_value = 0
-        elif action == 'include':
-          # This item matches an include regex, so set its value to 1 (include).
-          action_value = 1
-        else:
-          # This is an action that doesn't make any sense.
-          raise ValueError('Unrecognized action ' + action + ' in ' + name + \
-                           ' key ' + regex_key)
-
-        for index in xrange(0, len(the_list)):
-          list_item = the_list[index]
-          if list_actions[index] == action_value:
-            # Even if the regex matches, nothing will change so continue (regex
-            # searches are expensive).
-            continue
-          if pattern_re.search(list_item):
-            # Regular expression match.
-            list_actions[index] = action_value
-
-      # The "whatever/" list is no longer needed, dump it.
-      del the_dict[regex_key]
-
-    # Add excluded items to the excluded list.
-    #
-    # Note that exclude_key ("sources!") is different from excluded_key
-    # ("sources_excluded").  The exclude_key list is input and it was already
-    # processed and deleted; the excluded_key list is output and it's about
-    # to be created.
-    excluded_key = list_key + '_excluded'
-    if excluded_key in the_dict:
-      raise GypError(name + ' key ' + excluded_key +
-                     ' must not be present prior '
-                     ' to applying exclusion/regex filters for ' + list_key)
-
-    excluded_list = []
-
-    # Go backwards through the list_actions list so that as items are deleted,
-    # the indices of items that haven't been seen yet don't shift.  That means
-    # that things need to be prepended to excluded_list to maintain them in the
-    # same order that they existed in the_list.
-    for index in xrange(len(list_actions) - 1, -1, -1):
-      if list_actions[index] == 0:
-        # Dump anything with action 0 (exclude).  Keep anything with action 1
-        # (include) or -1 (no include or exclude seen for the item).
-        excluded_list.insert(0, the_list[index])
-        del the_list[index]
-
-    # If anything was excluded, put the excluded list into the_dict at
-    # excluded_key.
-    if len(excluded_list) > 0:
-      the_dict[excluded_key] = excluded_list
-
-  # Now recurse into subdicts and lists that may contain dicts.
-  for key, value in the_dict.iteritems():
-    if type(value) is dict:
-      ProcessListFiltersInDict(key, value)
-    elif type(value) is list:
-      ProcessListFiltersInList(key, value)
-
-
-def ProcessListFiltersInList(name, the_list):
-  for item in the_list:
-    if type(item) is dict:
-      ProcessListFiltersInDict(name, item)
-    elif type(item) is list:
-      ProcessListFiltersInList(name, item)
-
-
-def ValidateTargetType(target, target_dict):
-  """Ensures the 'type' field on the target is one of the known types.
-
-  Arguments:
-    target: string, name of target.
-    target_dict: dict, target spec.
-
-  Raises an exception on error.
-  """
-  VALID_TARGET_TYPES = ('executable', 'loadable_module',
-                        'static_library', 'shared_library',
-                        'mac_kernel_extension', 'none')
-  target_type = target_dict.get('type', None)
-  if target_type not in VALID_TARGET_TYPES:
-    raise GypError("Target %s has an invalid target type '%s'.  "
-                   "Must be one of %s." %
-                   (target, target_type, '/'.join(VALID_TARGET_TYPES)))
-  if (target_dict.get('standalone_static_library', 0) and
-      not target_type == 'static_library'):
-    raise GypError('Target %s has type %s but standalone_static_library flag is'
-                   ' only valid for static_library type.' % (target,
-                                                             target_type))
-
-
-def ValidateSourcesInTarget(target, target_dict, build_file,
-                            duplicate_basename_check):
-  if not duplicate_basename_check:
-    return
-  if target_dict.get('type', None) != 'static_library':
-    return
-  sources = target_dict.get('sources', [])
-  basenames = {}
-  for source in sources:
-    name, ext = os.path.splitext(source)
-    is_compiled_file = ext in [
-        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
-    if not is_compiled_file:
-      continue
-    basename = os.path.basename(name)  # Don't include extension.
-    basenames.setdefault(basename, []).append(source)
-
-  error = ''
-  for basename, files in basenames.iteritems():
-    if len(files) > 1:
-      error += '  %s: %s\n' % (basename, ' '.join(files))
-
-  if error:
-    print('static library %s has several files with the same basename:\n' %
-          target + error + 'libtool on Mac cannot handle that. Use '
-          '--no-duplicate-basename-check to disable this validation.')
-    raise GypError('Duplicate basenames in sources section, see list above')
-
-
-def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
-  """Ensures that the rules sections in target_dict are valid and consistent,
-  and determines which sources they apply to.
-
-  Arguments:
-    target: string, name of target.
-    target_dict: dict, target spec containing "rules" and "sources" lists.
-    extra_sources_for_rules: a list of keys to scan for rule matches in
-        addition to 'sources'.
-  """
-
-  # Dicts to map between values found in rules' 'rule_name' and 'extension'
-  # keys and the rule dicts themselves.
-  rule_names = {}
-  rule_extensions = {}
-
-  rules = target_dict.get('rules', [])
-  for rule in rules:
-    # Make sure that there's no conflict among rule names and extensions.
-    rule_name = rule['rule_name']
-    if rule_name in rule_names:
-      raise GypError('rule %s exists in duplicate, target %s' %
-                     (rule_name, target))
-    rule_names[rule_name] = rule
-
-    rule_extension = rule['extension']
-    if rule_extension.startswith('.'):
-      rule_extension = rule_extension[1:]
-    if rule_extension in rule_extensions:
-      raise GypError(('extension %s associated with multiple rules, ' +
-                      'target %s rules %s and %s') %
-                     (rule_extension, target,
-                      rule_extensions[rule_extension]['rule_name'],
-                      rule_name))
-    rule_extensions[rule_extension] = rule
-
-    # Make sure rule_sources isn't already there.  It's going to be
-    # created below if needed.
-    if 'rule_sources' in rule:
-      raise GypError(
-            'rule_sources must not exist in input, target %s rule %s' %
-            (target, rule_name))
-
-    rule_sources = []
-    source_keys = ['sources']
-    source_keys.extend(extra_sources_for_rules)
-    for source_key in source_keys:
-      for source in target_dict.get(source_key, []):
-        (source_root, source_extension) = os.path.splitext(source)
-        if source_extension.startswith('.'):
-          source_extension = source_extension[1:]
-        if source_extension == rule_extension:
-          rule_sources.append(source)
-
-    if len(rule_sources) > 0:
-      rule['rule_sources'] = rule_sources
-
-
-def ValidateRunAsInTarget(target, target_dict, build_file):
-  target_name = target_dict.get('target_name')
-  run_as = target_dict.get('run_as')
-  if not run_as:
-    return
-  if type(run_as) is not dict:
-    raise GypError("The 'run_as' in target %s from file %s should be a "
-                   "dictionary." %
-                   (target_name, build_file))
-  action = run_as.get('action')
-  if not action:
-    raise GypError("The 'run_as' in target %s from file %s must have an "
-                   "'action' section." %
-                   (target_name, build_file))
-  if type(action) is not list:
-    raise GypError("The 'action' for 'run_as' in target %s from file %s "
-                   "must be a list." %
-                   (target_name, build_file))
-  working_directory = run_as.get('working_directory')
-  if working_directory and type(working_directory) is not str:
-    raise GypError("The 'working_directory' for 'run_as' in target %s "
-                   "in file %s should be a string." %
-                   (target_name, build_file))
-  environment = run_as.get('environment')
-  if environment and type(environment) is not dict:
-    raise GypError("The 'environment' for 'run_as' in target %s "
-                   "in file %s should be a dictionary." %
-                   (target_name, build_file))
-
-
-def ValidateActionsInTarget(target, target_dict, build_file):
-  '''Validates the inputs to the actions in a target.'''
-  target_name = target_dict.get('target_name')
-  actions = target_dict.get('actions', [])
-  for action in actions:
-    action_name = action.get('action_name')
-    if not action_name:
-      raise GypError("Anonymous action in target %s.  "
-                     "An action must have an 'action_name' field." %
-                     target_name)
-    inputs = action.get('inputs', None)
-    if inputs is None:
-      raise GypError('Action in target %s has no inputs.' % target_name)
-    action_command = action.get('action')
-    if action_command and not action_command[0]:
-      raise GypError("Empty action as command in target %s." % target_name)
-
-
-def TurnIntIntoStrInDict(the_dict):
-  """Given dict the_dict, recursively converts all integers into strings.
-  """
-  # Use items instead of iteritems because there's no need to try to look at
-  # reinserted keys and their associated values.
-  for k, v in the_dict.items():
-    if type(v) is int:
-      v = str(v)
-      the_dict[k] = v
-    elif type(v) is dict:
-      TurnIntIntoStrInDict(v)
-    elif type(v) is list:
-      TurnIntIntoStrInList(v)
-
-    if type(k) is int:
-      del the_dict[k]
-      the_dict[str(k)] = v
-
-
-def TurnIntIntoStrInList(the_list):
-  """Given list the_list, recursively converts all integers into strings.
-  """
-  for index in xrange(0, len(the_list)):
-    item = the_list[index]
-    if type(item) is int:
-      the_list[index] = str(item)
-    elif type(item) is dict:
-      TurnIntIntoStrInDict(item)
-    elif type(item) is list:
-      TurnIntIntoStrInList(item)
-
-
-def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
-                         data):
-  """Return only the targets that are deep dependencies of |root_targets|."""
-  qualified_root_targets = []
-  for target in root_targets:
-    target = target.strip()
-    qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
-    if not qualified_targets:
-      raise GypError("Could not find target %s" % target)
-    qualified_root_targets.extend(qualified_targets)
-
-  wanted_targets = {}
-  for target in qualified_root_targets:
-    wanted_targets[target] = targets[target]
-    for dependency in dependency_nodes[target].DeepDependencies():
-      wanted_targets[dependency] = targets[dependency]
-
-  wanted_flat_list = [t for t in flat_list if t in wanted_targets]
-
-  # Prune unwanted targets from each build_file's data dict.
-  for build_file in data['target_build_files']:
-    if not 'targets' in data[build_file]:
-      continue
-    new_targets = []
-    for target in data[build_file]['targets']:
-      qualified_name = gyp.common.QualifiedTarget(build_file,
-                                                  target['target_name'],
-                                                  target['toolset'])
-      if qualified_name in wanted_targets:
-        new_targets.append(target)
-    data[build_file]['targets'] = new_targets
-
-  return wanted_targets, wanted_flat_list
-
-
-def VerifyNoCollidingTargets(targets):
-  """Verify that no two targets in the same directory share the same name.
-
-  Arguments:
-    targets: A list of targets in the form 'path/to/file.gyp:target_name'.
-  """
-  # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
-  used = {}
-  for target in targets:
-    # Separate out 'path/to/file.gyp, 'target_name' from
-    # 'path/to/file.gyp:target_name'.
-    path, name = target.rsplit(':', 1)
-    # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
-    subdir, gyp = os.path.split(path)
-    # Use '.' for the current directory '', so that the error messages make
-    # more sense.
-    if not subdir:
-      subdir = '.'
-    # Prepare a key like 'path/to:target_name'.
-    key = subdir + ':' + name
-    if key in used:
-      # Complain if this target is already used.
-      raise GypError('Duplicate target name "%s" in directory "%s" used both '
-                     'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
-    used[key] = gyp
-
-
-def SetGeneratorGlobals(generator_input_info):
-  # Set up path_sections and non_configuration_keys with the default data plus
-  # the generator-specific data.
-  global path_sections
-  path_sections = set(base_path_sections)
-  path_sections.update(generator_input_info['path_sections'])
-
-  global non_configuration_keys
-  non_configuration_keys = base_non_configuration_keys[:]
-  non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
-
-  global multiple_toolsets
-  multiple_toolsets = generator_input_info[
-      'generator_supports_multiple_toolsets']
-
-  global generator_filelist_paths
-  generator_filelist_paths = generator_input_info['generator_filelist_paths']
-
-
-def Load(build_files, variables, includes, depth, generator_input_info, check,
-         circular_check, duplicate_basename_check, parallel, root_targets):
-  SetGeneratorGlobals(generator_input_info)
-  # A generator can have other lists (in addition to sources) be processed
-  # for rules.
-  extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
-
-  # Load build files.  This loads every target-containing build file into
-  # the |data| dictionary such that the keys to |data| are build file names,
-  # and the values are the entire build file contents after "early" or "pre"
-  # processing has been done and includes have been resolved.
-  # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
-  # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
-  # track of the keys corresponding to "target" files.
-  data = {'target_build_files': set()}
-  # Normalize paths everywhere.  This is important because paths will be
-  # used as keys to the data dict and for references between input files.
-  build_files = set(map(os.path.normpath, build_files))
-  if parallel:
-    LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
-                                 check, generator_input_info)
-  else:
-    aux_data = {}
-    for build_file in build_files:
-      try:
-        LoadTargetBuildFile(build_file, data, aux_data,
-                            variables, includes, depth, check, True)
-      except Exception, e:
-        gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
-        raise
-
-  # Build a dict to access each target's subdict by qualified name.
-  targets = BuildTargetsDict(data)
-
-  # Fully qualify all dependency links.
-  QualifyDependencies(targets)
-
-  # Remove self-dependencies from targets that have 'prune_self_dependencies'
-  # set to 1.
-  RemoveSelfDependencies(targets)
-
-  # Expand dependencies specified as build_file:*.
-  ExpandWildcardDependencies(targets, data)
-
-  # Remove all dependencies marked as 'link_dependency' from the targets of
-  # type 'none'.
-  RemoveLinkDependenciesFromNoneTargets(targets)
-
-  # Apply exclude (!) and regex (/) list filters only for dependency_sections.
-  for target_name, target_dict in targets.iteritems():
-    tmp_dict = {}
-    for key_base in dependency_sections:
-      for op in ('', '!', '/'):
-        key = key_base + op
-        if key in target_dict:
-          tmp_dict[key] = target_dict[key]
-          del target_dict[key]
-    ProcessListFiltersInDict(target_name, tmp_dict)
-    # Write the results back to |target_dict|.
-    for key in tmp_dict:
-      target_dict[key] = tmp_dict[key]
-
-  # Make sure every dependency appears at most once.
-  RemoveDuplicateDependencies(targets)
-
-  if circular_check:
-    # Make sure that any targets in a.gyp don't contain dependencies in other
-    # .gyp files that further depend on a.gyp.
-    VerifyNoGYPFileCircularDependencies(targets)
-
-  [dependency_nodes, flat_list] = BuildDependencyList(targets)
-
-  if root_targets:
-    # Remove, from |targets| and |flat_list|, the targets that are not deep
-    # dependencies of the targets specified in |root_targets|.
-    targets, flat_list = PruneUnwantedTargets(
-        targets, flat_list, dependency_nodes, root_targets, data)
-
-  # Check that no two targets in the same directory have the same name.
-  VerifyNoCollidingTargets(flat_list)
-
-  # Handle dependent settings of various types.
-  for settings_type in ['all_dependent_settings',
-                        'direct_dependent_settings',
-                        'link_settings']:
-    DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
-
-    # Take out the dependent settings now that they've been published to all
-    # of the targets that require them.
-    for target in flat_list:
-      if settings_type in targets[target]:
-        del targets[target][settings_type]
-
-  # Make sure static libraries don't declare dependencies on other static
-  # libraries, but that linkables depend on all unlinked static libraries
-  # that they need so that their link steps will be correct.
-  gii = generator_input_info
-  if gii['generator_wants_static_library_dependencies_adjusted']:
-    AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
-                                    gii['generator_wants_sorted_dependencies'])
-
-  # Apply "post"/"late"/"target" variable expansions and condition evaluations.
-  for target in flat_list:
-    target_dict = targets[target]
-    build_file = gyp.common.BuildFile(target)
-    ProcessVariablesAndConditionsInDict(
-        target_dict, PHASE_LATE, variables, build_file)
-
-  # Move everything that can go into a "configurations" section into one.
-  for target in flat_list:
-    target_dict = targets[target]
-    SetUpConfigurations(target, target_dict)
-
-  # Apply exclude (!) and regex (/) list filters.
-  for target in flat_list:
-    target_dict = targets[target]
-    ProcessListFiltersInDict(target, target_dict)
-
-  # Apply "latelate" variable expansions and condition evaluations.
-  for target in flat_list:
-    target_dict = targets[target]
-    build_file = gyp.common.BuildFile(target)
-    ProcessVariablesAndConditionsInDict(
-        target_dict, PHASE_LATELATE, variables, build_file)
-
-  # Make sure that the rules make sense, and build up rule_sources lists as
-  # needed.  Not all generators will need to use the rule_sources lists, but
-  # some may, and it seems best to build the list in a common spot.
-  # Also validate actions and run_as elements in targets.
-  for target in flat_list:
-    target_dict = targets[target]
-    build_file = gyp.common.BuildFile(target)
-    ValidateTargetType(target, target_dict)
-    ValidateSourcesInTarget(target, target_dict, build_file,
-                            duplicate_basename_check)
-    ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
-    ValidateRunAsInTarget(target, target_dict, build_file)
-    ValidateActionsInTarget(target, target_dict, build_file)
-
-  # Generators might not expect ints.  Turn them into strs.
-  TurnIntIntoStrInDict(data)
-
-  # TODO(mark): Return |data| for now because the generator needs a list of
-  # build files that came in.  In the future, maybe it should just accept
-  # a list, and not the whole data dict.
-  return [flat_list, targets, data]
diff --git a/tools/gyp/pylib/gyp/input_test.py b/tools/gyp/pylib/gyp/input_test.py
deleted file mode 100755
index 4234fbb..0000000
--- a/tools/gyp/pylib/gyp/input_test.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the input.py file."""
-
-import gyp.input
-import unittest
-import sys
-
-
-class TestFindCycles(unittest.TestCase):
-  def setUp(self):
-    self.nodes = {}
-    for x in ('a', 'b', 'c', 'd', 'e'):
-      self.nodes[x] = gyp.input.DependencyGraphNode(x)
-
-  def _create_dependency(self, dependent, dependency):
-    dependent.dependencies.append(dependency)
-    dependency.dependents.append(dependent)
-
-  def test_no_cycle_empty_graph(self):
-    for label, node in self.nodes.iteritems():
-      self.assertEquals([], node.FindCycles())
-
-  def test_no_cycle_line(self):
-    self._create_dependency(self.nodes['a'], self.nodes['b'])
-    self._create_dependency(self.nodes['b'], self.nodes['c'])
-    self._create_dependency(self.nodes['c'], self.nodes['d'])
-
-    for label, node in self.nodes.iteritems():
-      self.assertEquals([], node.FindCycles())
-
-  def test_no_cycle_dag(self):
-    self._create_dependency(self.nodes['a'], self.nodes['b'])
-    self._create_dependency(self.nodes['a'], self.nodes['c'])
-    self._create_dependency(self.nodes['b'], self.nodes['c'])
-
-    for label, node in self.nodes.iteritems():
-      self.assertEquals([], node.FindCycles())
-
-  def test_cycle_self_reference(self):
-    self._create_dependency(self.nodes['a'], self.nodes['a'])
-
-    self.assertEquals([[self.nodes['a'], self.nodes['a']]],
-                      self.nodes['a'].FindCycles())
-
-  def test_cycle_two_nodes(self):
-    self._create_dependency(self.nodes['a'], self.nodes['b'])
-    self._create_dependency(self.nodes['b'], self.nodes['a'])
-
-    self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
-                      self.nodes['a'].FindCycles())
-    self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
-                      self.nodes['b'].FindCycles())
-
-  def test_two_cycles(self):
-    self._create_dependency(self.nodes['a'], self.nodes['b'])
-    self._create_dependency(self.nodes['b'], self.nodes['a'])
-
-    self._create_dependency(self.nodes['b'], self.nodes['c'])
-    self._create_dependency(self.nodes['c'], self.nodes['b'])
-
-    cycles = self.nodes['a'].FindCycles()
-    self.assertTrue(
-       [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
-    self.assertTrue(
-       [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
-    self.assertEquals(2, len(cycles))
-
-  def test_big_cycle(self):
-    self._create_dependency(self.nodes['a'], self.nodes['b'])
-    self._create_dependency(self.nodes['b'], self.nodes['c'])
-    self._create_dependency(self.nodes['c'], self.nodes['d'])
-    self._create_dependency(self.nodes['d'], self.nodes['e'])
-    self._create_dependency(self.nodes['e'], self.nodes['a'])
-
-    self.assertEquals([[self.nodes['a'],
-                        self.nodes['b'],
-                        self.nodes['c'],
-                        self.nodes['d'],
-                        self.nodes['e'],
-                        self.nodes['a']]],
-                      self.nodes['a'].FindCycles())
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py
deleted file mode 100755
index 055d79c..0000000
--- a/tools/gyp/pylib/gyp/mac_tool.py
+++ /dev/null
@@ -1,712 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import struct
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
-  executor = MacTool()
-  exit_code = executor.Dispatch(args)
-  if exit_code is not None:
-    sys.exit(exit_code)
-
-
-class MacTool(object):
-  """This class performs all the Mac tooling steps. The methods can either be
-  executed directly, or dispatched from an argument list."""
-
-  def Dispatch(self, args):
-    """Dispatches a string command to a method."""
-    if len(args) < 1:
-      raise Exception("Not enough arguments")
-
-    method = "Exec%s" % self._CommandifyName(args[0])
-    return getattr(self, method)(*args[1:])
-
-  def _CommandifyName(self, name_string):
-    """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
-    return name_string.title().replace('-', '')
-
-  def ExecCopyBundleResource(self, source, dest, convert_to_binary):
-    """Copies a resource file to the bundle/Resources directory, performing any
-    necessary compilation on each resource."""
-    convert_to_binary = convert_to_binary == 'True'
-    extension = os.path.splitext(source)[1].lower()
-    if os.path.isdir(source):
-      # Copy tree.
-      # TODO(thakis): This copies file attributes like mtime, while the
-      # single-file branch below doesn't. This should probably be changed to
-      # be consistent with the single-file branch.
-      if os.path.exists(dest):
-        shutil.rmtree(dest)
-      shutil.copytree(source, dest)
-    elif extension == '.xib':
-      return self._CopyXIBFile(source, dest)
-    elif extension == '.storyboard':
-      return self._CopyXIBFile(source, dest)
-    elif extension == '.strings' and not convert_to_binary:
-      self._CopyStringsFile(source, dest)
-    else:
-      if os.path.exists(dest):
-        os.unlink(dest)
-      shutil.copy(source, dest)
-
-    if convert_to_binary and extension in ('.plist', '.strings'):
-      self._ConvertToBinary(dest)
-
-  def _CopyXIBFile(self, source, dest):
-    """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
-    # ibtool sometimes crashes with relative paths. See crbug.com/314728.
-    base = os.path.dirname(os.path.realpath(__file__))
-    if os.path.relpath(source):
-      source = os.path.join(base, source)
-    if os.path.relpath(dest):
-      dest = os.path.join(base, dest)
-
-    args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices']
-
-    if os.environ['XCODE_VERSION_ACTUAL'] > '0700':
-      args.extend(['--auto-activate-custom-fonts'])
-      if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ:
-        args.extend([
-            '--target-device', 'iphone', '--target-device', 'ipad',
-            '--minimum-deployment-target',
-            os.environ['IPHONEOS_DEPLOYMENT_TARGET'],
-        ])
-      else:
-        args.extend([
-            '--target-device', 'mac',
-            '--minimum-deployment-target',
-            os.environ['MACOSX_DEPLOYMENT_TARGET'],
-        ])
-
-    args.extend(['--output-format', 'human-readable-text', '--compile', dest,
-        source])
-
-    ibtool_section_re = re.compile(r'/\*.*\*/')
-    ibtool_re = re.compile(r'.*note:.*is clipping its content')
-    ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
-    current_section_header = None
-    for line in ibtoolout.stdout:
-      if ibtool_section_re.match(line):
-        current_section_header = line
-      elif not ibtool_re.match(line):
-        if current_section_header:
-          sys.stdout.write(current_section_header)
-          current_section_header = None
-        sys.stdout.write(line)
-    return ibtoolout.returncode
-
-  def _ConvertToBinary(self, dest):
-    subprocess.check_call([
-        'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
-  def _CopyStringsFile(self, source, dest):
-    """Copies a .strings file using iconv to reconvert the input into UTF-16."""
-    input_code = self._DetectInputEncoding(source) or "UTF-8"
-
-    # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
-    # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
-    #     CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
-    #     semicolon in dictionary.
-    # on invalid files. Do the same kind of validation.
-    import CoreFoundation
-    s = open(source, 'rb').read()
-    d = CoreFoundation.CFDataCreate(None, s, len(s))
-    _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
-    if error:
-      return
-
-    fp = open(dest, 'wb')
-    fp.write(s.decode(input_code).encode('UTF-16'))
-    fp.close()
-
-  def _DetectInputEncoding(self, file_name):
-    """Reads the first few bytes from file_name and tries to guess the text
-    encoding. Returns None as a guess if it can't detect it."""
-    fp = open(file_name, 'rb')
-    try:
-      header = fp.read(3)
-    except e:
-      fp.close()
-      return None
-    fp.close()
-    if header.startswith("\xFE\xFF"):
-      return "UTF-16"
-    elif header.startswith("\xFF\xFE"):
-      return "UTF-16"
-    elif header.startswith("\xEF\xBB\xBF"):
-      return "UTF-8"
-    else:
-      return None
-
-  def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
-    """Copies the |source| Info.plist to the destination directory |dest|."""
-    # Read the source Info.plist into memory.
-    fd = open(source, 'r')
-    lines = fd.read()
-    fd.close()
-
-    # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
-    plist = plistlib.readPlistFromString(lines)
-    if keys:
-      plist = dict(plist.items() + json.loads(keys[0]).items())
-    lines = plistlib.writePlistToString(plist)
-
-    # Go through all the environment variables and replace them as variables in
-    # the file.
-    IDENT_RE = re.compile(r'[_/\s]')
-    for key in os.environ:
-      if key.startswith('_'):
-        continue
-      evar = '${%s}' % key
-      evalue = os.environ[key]
-      lines = string.replace(lines, evar, evalue)
-
-      # Xcode supports various suffices on environment variables, which are
-      # all undocumented. :rfc1034identifier is used in the standard project
-      # template these days, and :identifier was used earlier. They are used to
-      # convert non-url characters into things that look like valid urls --
-      # except that the replacement character for :identifier, '_' isn't valid
-      # in a URL either -- oops, hence :rfc1034identifier was born.
-      evar = '${%s:identifier}' % key
-      evalue = IDENT_RE.sub('_', os.environ[key])
-      lines = string.replace(lines, evar, evalue)
-
-      evar = '${%s:rfc1034identifier}' % key
-      evalue = IDENT_RE.sub('-', os.environ[key])
-      lines = string.replace(lines, evar, evalue)
-
-    # Remove any keys with values that haven't been replaced.
-    lines = lines.split('\n')
-    for i in range(len(lines)):
-      if lines[i].strip().startswith("<string>${"):
-        lines[i] = None
-        lines[i - 1] = None
-    lines = '\n'.join(filter(lambda x: x is not None, lines))
-
-    # Write out the file with variables replaced.
-    fd = open(dest, 'w')
-    fd.write(lines)
-    fd.close()
-
-    # Now write out PkgInfo file now that the Info.plist file has been
-    # "compiled".
-    self._WritePkgInfo(dest)
-
-    if convert_to_binary == 'True':
-      self._ConvertToBinary(dest)
-
-  def _WritePkgInfo(self, info_plist):
-    """This writes the PkgInfo file from the data stored in Info.plist."""
-    plist = plistlib.readPlist(info_plist)
-    if not plist:
-      return
-
-    # Only create PkgInfo for executable types.
-    package_type = plist['CFBundlePackageType']
-    if package_type != 'APPL':
-      return
-
-    # The format of PkgInfo is eight characters, representing the bundle type
-    # and bundle signature, each four characters. If that is missing, four
-    # '?' characters are used instead.
-    signature_code = plist.get('CFBundleSignature', '????')
-    if len(signature_code) != 4:  # Wrong length resets everything, too.
-      signature_code = '?' * 4
-
-    dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
-    fp = open(dest, 'w')
-    fp.write('%s%s' % (package_type, signature_code))
-    fp.close()
-
-  def ExecFlock(self, lockfile, *cmd_list):
-    """Emulates the most basic behavior of Linux's flock(1)."""
-    # Rely on exception handling to report errors.
-    fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
-    fcntl.flock(fd, fcntl.LOCK_EX)
-    return subprocess.call(cmd_list)
-
-  def ExecFilterLibtool(self, *cmd_list):
-    """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
-    symbols'."""
-    libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?'
-                            r'file: .* has no symbols$')
-    libtool_re5 = re.compile(
-        r'^.*libtool: warning for library: ' +
-        r'.* the table of contents is empty ' +
-        r'\(no object file members in the library define global symbols\)$')
-    env = os.environ.copy()
-    # Ref:
-    # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
-    # The problem with this flag is that it resets the file mtime on the file to
-    # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
-    env['ZERO_AR_DATE'] = '1'
-    libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
-    _, err = libtoolout.communicate()
-    for line in err.splitlines():
-      if not libtool_re.match(line) and not libtool_re5.match(line):
-        print >>sys.stderr, line
-    # Unconditionally touch the output .a file on the command line if present
-    # and the command succeeded. A bit hacky.
-    if not libtoolout.returncode:
-      for i in range(len(cmd_list) - 1):
-        if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
-          os.utime(cmd_list[i+1], None)
-          break
-    return libtoolout.returncode
-
-  def ExecPackageIosFramework(self, framework):
-    # Find the name of the binary based on the part before the ".framework".
-    binary = os.path.basename(framework).split('.')[0]
-    module_path = os.path.join(framework, 'Modules');
-    if not os.path.exists(module_path):
-      os.mkdir(module_path)
-    module_template = 'framework module %s {\n' \
-                      '  umbrella header "%s.h"\n' \
-                      '\n' \
-                      '  export *\n' \
-                      '  module * { export * }\n' \
-                      '}\n' % (binary, binary)
-
-    module_file = open(os.path.join(module_path, 'module.modulemap'), "w")
-    module_file.write(module_template)
-    module_file.close()
-
-  def ExecPackageFramework(self, framework, version):
-    """Takes a path to Something.framework and the Current version of that and
-    sets up all the symlinks."""
-    # Find the name of the binary based on the part before the ".framework".
-    binary = os.path.basename(framework).split('.')[0]
-
-    CURRENT = 'Current'
-    RESOURCES = 'Resources'
-    VERSIONS = 'Versions'
-
-    if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
-      # Binary-less frameworks don't seem to contain symlinks (see e.g.
-      # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
-      return
-
-    # Move into the framework directory to set the symlinks correctly.
-    pwd = os.getcwd()
-    os.chdir(framework)
-
-    # Set up the Current version.
-    self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
-    # Set up the root symlinks.
-    self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
-    self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
-    # Back to where we were before!
-    os.chdir(pwd)
-
-  def _Relink(self, dest, link):
-    """Creates a symlink to |dest| named |link|. If |link| already exists,
-    it is overwritten."""
-    if os.path.lexists(link):
-      os.remove(link)
-    os.symlink(dest, link)
-
-  def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
-    framework_name = os.path.basename(framework).split('.')[0]
-    all_headers = map(os.path.abspath, all_headers)
-    filelist = {}
-    for header in all_headers:
-      filename = os.path.basename(header)
-      filelist[filename] = header
-      filelist[os.path.join(framework_name, filename)] = header
-    WriteHmap(out, filelist)
-
-  def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
-    header_path = os.path.join(framework, 'Headers');
-    if not os.path.exists(header_path):
-      os.makedirs(header_path)
-    for header in copy_headers:
-      shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
-
-  def ExecCompileXcassets(self, keys, *inputs):
-    """Compiles multiple .xcassets files into a single .car file.
-
-    This invokes 'actool' to compile all the inputs .xcassets files. The
-    |keys| arguments is a json-encoded dictionary of extra arguments to
-    pass to 'actool' when the asset catalogs contains an application icon
-    or a launch image.
-
-    Note that 'actool' does not create the Assets.car file if the asset
-    catalogs does not contains imageset.
-    """
-    command_line = [
-      'xcrun', 'actool', '--output-format', 'human-readable-text',
-      '--compress-pngs', '--notices', '--warnings', '--errors',
-    ]
-    is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
-    if is_iphone_target:
-      platform = os.environ['CONFIGURATION'].split('-')[-1]
-      if platform not in ('iphoneos', 'iphonesimulator'):
-        platform = 'iphonesimulator'
-      command_line.extend([
-          '--platform', platform, '--target-device', 'iphone',
-          '--target-device', 'ipad', '--minimum-deployment-target',
-          os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
-          os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
-      ])
-    else:
-      command_line.extend([
-          '--platform', 'macosx', '--target-device', 'mac',
-          '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
-          '--compile',
-          os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
-      ])
-    if keys:
-      keys = json.loads(keys)
-      for key, value in keys.iteritems():
-        arg_name = '--' + key
-        if isinstance(value, bool):
-          if value:
-            command_line.append(arg_name)
-        elif isinstance(value, list):
-          for v in value:
-            command_line.append(arg_name)
-            command_line.append(str(v))
-        else:
-          command_line.append(arg_name)
-          command_line.append(str(value))
-    # Note: actool crashes if inputs path are relative, so use os.path.abspath
-    # to get absolute path name for inputs.
-    command_line.extend(map(os.path.abspath, inputs))
-    subprocess.check_call(command_line)
-
-  def ExecMergeInfoPlist(self, output, *inputs):
-    """Merge multiple .plist files into a single .plist file."""
-    merged_plist = {}
-    for path in inputs:
-      plist = self._LoadPlistMaybeBinary(path)
-      self._MergePlist(merged_plist, plist)
-    plistlib.writePlist(merged_plist, output)
-
-  def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
-    """Code sign a bundle.
-
-    This function tries to code sign an iOS bundle, following the same
-    algorithm as Xcode:
-      1. pick the provisioning profile that best match the bundle identifier,
-         and copy it into the bundle as embedded.mobileprovision,
-      2. copy Entitlements.plist from user or SDK next to the bundle,
-      3. code sign the bundle.
-    """
-    substitutions, overrides = self._InstallProvisioningProfile(
-        provisioning, self._GetCFBundleIdentifier())
-    entitlements_path = self._InstallEntitlements(
-        entitlements, substitutions, overrides)
-
-    args = ['codesign', '--force', '--sign', key]
-    if preserve == 'True':
-      args.extend(['--deep', '--preserve-metadata=identifier,entitlements'])
-    else:
-      args.extend(['--entitlements', entitlements_path])
-    args.extend(['--timestamp=none', path])
-    subprocess.check_call(args)
-
-  def _InstallProvisioningProfile(self, profile, bundle_identifier):
-    """Installs embedded.mobileprovision into the bundle.
-
-    Args:
-      profile: string, optional, short name of the .mobileprovision file
-        to use, if empty or the file is missing, the best file installed
-        will be used
-      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
-    Returns:
-      A tuple containing two dictionary: variables substitutions and values
-      to overrides when generating the entitlements file.
-    """
-    source_path, provisioning_data, team_id = self._FindProvisioningProfile(
-        profile, bundle_identifier)
-    target_path = os.path.join(
-        os.environ['BUILT_PRODUCTS_DIR'],
-        os.environ['CONTENTS_FOLDER_PATH'],
-        'embedded.mobileprovision')
-    shutil.copy2(source_path, target_path)
-    substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
-    return substitutions, provisioning_data['Entitlements']
-
-  def _FindProvisioningProfile(self, profile, bundle_identifier):
-    """Finds the .mobileprovision file to use for signing the bundle.
-
-    Checks all the installed provisioning profiles (or if the user specified
-    the PROVISIONING_PROFILE variable, only consult it) and select the most
-    specific that correspond to the bundle identifier.
-
-    Args:
-      profile: string, optional, short name of the .mobileprovision file
-        to use, if empty or the file is missing, the best file installed
-        will be used
-      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
-    Returns:
-      A tuple of the path to the selected provisioning profile, the data of
-      the embedded plist in the provisioning profile and the team identifier
-      to use for code signing.
-
-    Raises:
-      SystemExit: if no .mobileprovision can be used to sign the bundle.
-    """
-    profiles_dir = os.path.join(
-        os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
-    if not os.path.isdir(profiles_dir):
-      print >>sys.stderr, (
-          'cannot find mobile provisioning for %s' % bundle_identifier)
-      sys.exit(1)
-    provisioning_profiles = None
-    if profile:
-      profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
-      if os.path.exists(profile_path):
-        provisioning_profiles = [profile_path]
-    if not provisioning_profiles:
-      provisioning_profiles = glob.glob(
-          os.path.join(profiles_dir, '*.mobileprovision'))
-    valid_provisioning_profiles = {}
-    for profile_path in provisioning_profiles:
-      profile_data = self._LoadProvisioningProfile(profile_path)
-      app_id_pattern = profile_data.get(
-          'Entitlements', {}).get('application-identifier', '')
-      for team_identifier in profile_data.get('TeamIdentifier', []):
-        app_id = '%s.%s' % (team_identifier, bundle_identifier)
-        if fnmatch.fnmatch(app_id, app_id_pattern):
-          valid_provisioning_profiles[app_id_pattern] = (
-              profile_path, profile_data, team_identifier)
-    if not valid_provisioning_profiles:
-      print >>sys.stderr, (
-          'cannot find mobile provisioning for %s' % bundle_identifier)
-      sys.exit(1)
-    # If the user has multiple provisioning profiles installed that can be
-    # used for ${bundle_identifier}, pick the most specific one (ie. the
-    # provisioning profile whose pattern is the longest).
-    selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
-    return valid_provisioning_profiles[selected_key]
-
-  def _LoadProvisioningProfile(self, profile_path):
-    """Extracts the plist embedded in a provisioning profile.
-
-    Args:
-      profile_path: string, path to the .mobileprovision file
-
-    Returns:
-      Content of the plist embedded in the provisioning profile as a dictionary.
-    """
-    with tempfile.NamedTemporaryFile() as temp:
-      subprocess.check_call([
-          'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
-      return self._LoadPlistMaybeBinary(temp.name)
-
-  def _MergePlist(self, merged_plist, plist):
-    """Merge |plist| into |merged_plist|."""
-    for key, value in plist.iteritems():
-      if isinstance(value, dict):
-        merged_value = merged_plist.get(key, {})
-        if isinstance(merged_value, dict):
-          self._MergePlist(merged_value, value)
-          merged_plist[key] = merged_value
-        else:
-          merged_plist[key] = value
-      else:
-        merged_plist[key] = value
-
-  def _LoadPlistMaybeBinary(self, plist_path):
-    """Loads into a memory a plist possibly encoded in binary format.
-
-    This is a wrapper around plistlib.readPlist that tries to convert the
-    plist to the XML format if it can't be parsed (assuming that it is in
-    the binary format).
-
-    Args:
-      plist_path: string, path to a plist file, in XML or binary format
-
-    Returns:
-      Content of the plist as a dictionary.
-    """
-    try:
-      # First, try to read the file using plistlib that only supports XML,
-      # and if an exception is raised, convert a temporary copy to XML and
-      # load that copy.
-      return plistlib.readPlist(plist_path)
-    except:
-      pass
-    with tempfile.NamedTemporaryFile() as temp:
-      shutil.copy2(plist_path, temp.name)
-      subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
-      return plistlib.readPlist(temp.name)
-
-  def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
-    """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
-    Args:
-      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-      app_identifier_prefix: string, value for AppIdentifierPrefix
-
-    Returns:
-      Dictionary of substitutions to apply when generating Entitlements.plist.
-    """
-    return {
-      'CFBundleIdentifier': bundle_identifier,
-      'AppIdentifierPrefix': app_identifier_prefix,
-    }
-
-  def _GetCFBundleIdentifier(self):
-    """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
-    Returns:
-      Value of CFBundleIdentifier in the Info.plist located in the bundle.
-    """
-    info_plist_path = os.path.join(
-        os.environ['TARGET_BUILD_DIR'],
-        os.environ['INFOPLIST_PATH'])
-    info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
-    return info_plist_data['CFBundleIdentifier']
-
-  def _InstallEntitlements(self, entitlements, substitutions, overrides):
-    """Generates and install the ${BundleName}.xcent entitlements file.
-
-    Expands variables "$(variable)" pattern in the source entitlements file,
-    add extra entitlements defined in the .mobileprovision file and the copy
-    the generated plist to "${BundlePath}.xcent".
-
-    Args:
-      entitlements: string, optional, path to the Entitlements.plist template
-        to use, defaults to "${SDKROOT}/Entitlements.plist"
-      substitutions: dictionary, variable substitutions
-      overrides: dictionary, values to add to the entitlements
-
-    Returns:
-      Path to the generated entitlements file.
-    """
-    source_path = entitlements
-    target_path = os.path.join(
-        os.environ['BUILT_PRODUCTS_DIR'],
-        os.environ['PRODUCT_NAME'] + '.xcent')
-    if not source_path:
-      source_path = os.path.join(
-          os.environ['SDKROOT'],
-          'Entitlements.plist')
-    shutil.copy2(source_path, target_path)
-    data = self._LoadPlistMaybeBinary(target_path)
-    data = self._ExpandVariables(data, substitutions)
-    if overrides:
-      for key in overrides:
-        if key not in data:
-          data[key] = overrides[key]
-    plistlib.writePlist(data, target_path)
-    return target_path
-
-  def _ExpandVariables(self, data, substitutions):
-    """Expands variables "$(variable)" in data.
-
-    Args:
-      data: object, can be either string, list or dictionary
-      substitutions: dictionary, variable substitutions to perform
-
-    Returns:
-      Copy of data where each references to "$(variable)" has been replaced
-      by the corresponding value found in substitutions, or left intact if
-      the key was not found.
-    """
-    if isinstance(data, str):
-      for key, value in substitutions.iteritems():
-        data = data.replace('$(%s)' % key, value)
-      return data
-    if isinstance(data, list):
-      return [self._ExpandVariables(v, substitutions) for v in data]
-    if isinstance(data, dict):
-      return {k: self._ExpandVariables(data[k], substitutions) for k in data}
-    return data
-
-def NextGreaterPowerOf2(x):
-  return 2**(x).bit_length()
-
-def WriteHmap(output_name, filelist):
-  """Generates a header map based on |filelist|.
-
-  Per Mark Mentovai:
-    A header map is structured essentially as a hash table, keyed by names used
-    in #includes, and providing pathnames to the actual files.
-
-  The implementation below and the comment above comes from inspecting:
-    http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
-  while also looking at the implementation in clang in:
-    https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
-  """
-  magic = 1751998832
-  version = 1
-  _reserved = 0
-  count = len(filelist)
-  capacity = NextGreaterPowerOf2(count)
-  strings_offset = 24 + (12 * capacity)
-  max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
-
-  out = open(output_name, "wb")
-  out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
-                        count, capacity, max_value_length))
-
-  # Create empty hashmap buckets.
-  buckets = [None] * capacity
-  for file, path in filelist.items():
-    key = 0
-    for c in file:
-      key += ord(c.lower()) * 13
-
-    # Fill next empty bucket.
-    while buckets[key & capacity - 1] is not None:
-      key = key + 1
-    buckets[key & capacity - 1] = (file, path)
-
-  next_offset = 1
-  for bucket in buckets:
-    if bucket is None:
-      out.write(struct.pack('<LLL', 0, 0, 0))
-    else:
-      (file, path) = bucket
-      key_offset = next_offset
-      prefix_offset = key_offset + len(file) + 1
-      suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
-      next_offset = suffix_offset + len(os.path.basename(path)) + 1
-      out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
-
-  # Pad byte since next offset starts at 1.
-  out.write(struct.pack('<x'))
-
-  for bucket in buckets:
-    if bucket is not None:
-      (file, path) = bucket
-      out.write(struct.pack('<%ds' % len(file), file))
-      out.write(struct.pack('<s', '\0'))
-      base = os.path.dirname(path) + os.sep
-      out.write(struct.pack('<%ds' % len(base), base))
-      out.write(struct.pack('<s', '\0'))
-      path = os.path.basename(path)
-      out.write(struct.pack('<%ds' % len(path), path))
-      out.write(struct.pack('<s', '\0'))
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py
deleted file mode 100644
index e4a85a9..0000000
--- a/tools/gyp/pylib/gyp/msvs_emulation.py
+++ /dev/null
@@ -1,1094 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module helps emulate Visual Studio 2008 behavior on top of other
-build systems, primarily ninja.
-"""
-
-import os
-import re
-import subprocess
-import sys
-
-from gyp.common import OrderedSet
-import gyp.MSVSUtil
-import gyp.MSVSVersion
-
-
-windows_quoter_regex = re.compile(r'(\\*)"')
-
-
-def QuoteForRspFile(arg):
-  """Quote a command line argument so that it appears as one argument when
-  processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
-  Windows programs)."""
-  # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
-  # threads. This is actually the quoting rules for CommandLineToArgvW, not
-  # for the shell, because the shell doesn't do anything in Windows. This
-  # works more or less because most programs (including the compiler, etc.)
-  # use that function to handle command line arguments.
-
-  # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
-  # preceding it, and results in n backslashes + the quote. So we substitute
-  # in 2* what we match, +1 more, plus the quote.
-  arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
-
-  # %'s also need to be doubled otherwise they're interpreted as batch
-  # positional arguments. Also make sure to escape the % so that they're
-  # passed literally through escaping so they can be singled to just the
-  # original %. Otherwise, trying to pass the literal representation that
-  # looks like an environment variable to the shell (e.g. %PATH%) would fail.
-  arg = arg.replace('%', '%%')
-
-  # These commands are used in rsp files, so no escaping for the shell (via ^)
-  # is necessary.
-
-  # Finally, wrap the whole thing in quotes so that the above quote rule
-  # applies and whitespace isn't a word break.
-  return '"' + arg + '"'
-
-
-def EncodeRspFileList(args):
-  """Process a list of arguments using QuoteCmdExeArgument."""
-  # Note that the first argument is assumed to be the command. Don't add
-  # quotes around it because then built-ins like 'echo', etc. won't work.
-  # Take care to normpath only the path in the case of 'call ../x.bat' because
-  # otherwise the whole thing is incorrectly interpreted as a path and not
-  # normalized correctly.
-  if not args: return ''
-  if args[0].startswith('call '):
-    call, program = args[0].split(' ', 1)
-    program = call + ' ' + os.path.normpath(program)
-  else:
-    program = os.path.normpath(args[0])
-  return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
-
-
-def _GenericRetrieve(root, default, path):
-  """Given a list of dictionary keys |path| and a tree of dicts |root|, find
-  value at path, or return |default| if any of the path doesn't exist."""
-  if not root:
-    return default
-  if not path:
-    return root
-  return _GenericRetrieve(root.get(path[0]), default, path[1:])
-
-
-def _AddPrefix(element, prefix):
-  """Add |prefix| to |element| or each subelement if element is iterable."""
-  if element is None:
-    return element
-  # Note, not Iterable because we don't want to handle strings like that.
-  if isinstance(element, list) or isinstance(element, tuple):
-    return [prefix + e for e in element]
-  else:
-    return prefix + element
-
-
-def _DoRemapping(element, map):
-  """If |element| then remap it through |map|. If |element| is iterable then
-  each item will be remapped. Any elements not found will be removed."""
-  if map is not None and element is not None:
-    if not callable(map):
-      map = map.get # Assume it's a dict, otherwise a callable to do the remap.
-    if isinstance(element, list) or isinstance(element, tuple):
-      element = filter(None, [map(elem) for elem in element])
-    else:
-      element = map(element)
-  return element
-
-
-def _AppendOrReturn(append, element):
-  """If |append| is None, simply return |element|. If |append| is not None,
-  then add |element| to it, adding each item in |element| if it's a list or
-  tuple."""
-  if append is not None and element is not None:
-    if isinstance(element, list) or isinstance(element, tuple):
-      append.extend(element)
-    else:
-      append.append(element)
-  else:
-    return element
-
-
-def _FindDirectXInstallation():
-  """Try to find an installation location for the DirectX SDK. Check for the
-  standard environment variable, and if that doesn't exist, try to find
-  via the registry. May return None if not found in either location."""
-  # Return previously calculated value, if there is one
-  if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
-    return _FindDirectXInstallation.dxsdk_dir
-
-  dxsdk_dir = os.environ.get('DXSDK_DIR')
-  if not dxsdk_dir:
-    # Setup params to pass to and attempt to launch reg.exe.
-    cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
-    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    for line in p.communicate()[0].splitlines():
-      if 'InstallPath' in line:
-        dxsdk_dir = line.split('    ')[3] + "\\"
-
-  # Cache return value
-  _FindDirectXInstallation.dxsdk_dir = dxsdk_dir
-  return dxsdk_dir
-
-
-def GetGlobalVSMacroEnv(vs_version):
-  """Get a dict of variables mapping internal VS macro names to their gyp
-  equivalents. Returns all variables that are independent of the target."""
-  env = {}
-  # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
-  # Visual Studio is actually installed.
-  if vs_version.Path():
-    env['$(VSInstallDir)'] = vs_version.Path()
-    env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
-  # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
-  # set. This happens when the SDK is sync'd via src-internal, rather than
-  # by typical end-user installation of the SDK. If it's not set, we don't
-  # want to leave the unexpanded variable in the path, so simply strip it.
-  dxsdk_dir = _FindDirectXInstallation()
-  env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
-  # Try to find an installation location for the Windows DDK by checking
-  # the WDK_DIR environment variable, may be None.
-  env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
-  return env
-
-def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
-  """Finds msvs_system_include_dirs that are common to all targets, removes
-  them from all targets, and returns an OrderedSet containing them."""
-  all_system_includes = OrderedSet(
-      configs[0].get('msvs_system_include_dirs', []))
-  for config in configs[1:]:
-    system_includes = config.get('msvs_system_include_dirs', [])
-    all_system_includes = all_system_includes & OrderedSet(system_includes)
-  if not all_system_includes:
-    return None
-  # Expand macros in all_system_includes.
-  env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
-  expanded_system_includes = OrderedSet([ExpandMacros(include, env)
-                                         for include in all_system_includes])
-  if any(['$' in include for include in expanded_system_includes]):
-    # Some path relies on target-specific variables, bail.
-    return None
-
-  # Remove system includes shared by all targets from the targets.
-  for config in configs:
-    includes = config.get('msvs_system_include_dirs', [])
-    if includes:  # Don't insert a msvs_system_include_dirs key if not needed.
-      # This must check the unexpanded includes list:
-      new_includes = [i for i in includes if i not in all_system_includes]
-      config['msvs_system_include_dirs'] = new_includes
-  return expanded_system_includes
-
-
-class MsvsSettings(object):
-  """A class that understands the gyp 'msvs_...' values (especially the
-  msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
-  class helps map those settings to command line options."""
-
-  def __init__(self, spec, generator_flags):
-    self.spec = spec
-    self.vs_version = GetVSVersion(generator_flags)
-
-    supported_fields = [
-        ('msvs_configuration_attributes', dict),
-        ('msvs_settings', dict),
-        ('msvs_system_include_dirs', list),
-        ('msvs_disabled_warnings', list),
-        ('msvs_precompiled_header', str),
-        ('msvs_precompiled_source', str),
-        ('msvs_configuration_platform', str),
-        ('msvs_target_platform', str),
-        ]
-    configs = spec['configurations']
-    for field, default in supported_fields:
-      setattr(self, field, {})
-      for configname, config in configs.iteritems():
-        getattr(self, field)[configname] = config.get(field, default())
-
-    self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
-
-    unsupported_fields = [
-        'msvs_prebuild',
-        'msvs_postbuild',
-    ]
-    unsupported = []
-    for field in unsupported_fields:
-      for config in configs.values():
-        if field in config:
-          unsupported += ["%s not supported (target %s)." %
-                          (field, spec['target_name'])]
-    if unsupported:
-      raise Exception('\n'.join(unsupported))
-
-  def GetExtension(self):
-    """Returns the extension for the target, with no leading dot.
-
-    Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
-    the target type.
-    """
-    ext = self.spec.get('product_extension', None)
-    if ext:
-      return ext
-    return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
-
-  def GetVSMacroEnv(self, base_to_build=None, config=None):
-    """Get a dict of variables mapping internal VS macro names to their gyp
-    equivalents."""
-    target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
-    target_name = self.spec.get('product_prefix', '') + \
-        self.spec.get('product_name', self.spec['target_name'])
-    target_dir = base_to_build + '\\' if base_to_build else ''
-    target_ext = '.' + self.GetExtension()
-    target_file_name = target_name + target_ext
-
-    replacements = {
-        '$(InputName)': '${root}',
-        '$(InputPath)': '${source}',
-        '$(IntDir)': '$!INTERMEDIATE_DIR',
-        '$(OutDir)\\': target_dir,
-        '$(PlatformName)': target_platform,
-        '$(ProjectDir)\\': '',
-        '$(ProjectName)': self.spec['target_name'],
-        '$(TargetDir)\\': target_dir,
-        '$(TargetExt)': target_ext,
-        '$(TargetFileName)': target_file_name,
-        '$(TargetName)': target_name,
-        '$(TargetPath)': os.path.join(target_dir, target_file_name),
-    }
-    replacements.update(GetGlobalVSMacroEnv(self.vs_version))
-    return replacements
-
-  def ConvertVSMacros(self, s, base_to_build=None, config=None):
-    """Convert from VS macro names to something equivalent."""
-    env = self.GetVSMacroEnv(base_to_build, config=config)
-    return ExpandMacros(s, env)
-
-  def AdjustLibraries(self, libraries):
-    """Strip -l from library if it's specified with that."""
-    libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
-    return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
-
-  def _GetAndMunge(self, field, path, default, prefix, append, map):
-    """Retrieve a value from |field| at |path| or return |default|. If
-    |append| is specified, and the item is found, it will be appended to that
-    object instead of returned. If |map| is specified, results will be
-    remapped through |map| before being returned or appended."""
-    result = _GenericRetrieve(field, default, path)
-    result = _DoRemapping(result, map)
-    result = _AddPrefix(result, prefix)
-    return _AppendOrReturn(append, result)
-
-  class _GetWrapper(object):
-    def __init__(self, parent, field, base_path, append=None):
-      self.parent = parent
-      self.field = field
-      self.base_path = [base_path]
-      self.append = append
-    def __call__(self, name, map=None, prefix='', default=None):
-      return self.parent._GetAndMunge(self.field, self.base_path + [name],
-          default=default, prefix=prefix, append=self.append, map=map)
-
-  def GetArch(self, config):
-    """Get architecture based on msvs_configuration_platform and
-    msvs_target_platform. Returns either 'x86' or 'x64'."""
-    configuration_platform = self.msvs_configuration_platform.get(config, '')
-    platform = self.msvs_target_platform.get(config, '')
-    if not platform: # If no specific override, use the configuration's.
-      platform = configuration_platform
-    # Map from platform to architecture.
-    return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
-
-  def _TargetConfig(self, config):
-    """Returns the target-specific configuration."""
-    # There's two levels of architecture/platform specification in VS. The
-    # first level is globally for the configuration (this is what we consider
-    # "the" config at the gyp level, which will be something like 'Debug' or
-    # 'Release_x64'), and a second target-specific configuration, which is an
-    # override for the global one. |config| is remapped here to take into
-    # account the local target-specific overrides to the global configuration.
-    arch = self.GetArch(config)
-    if arch == 'x64' and not config.endswith('_x64'):
-      config += '_x64'
-    if arch == 'x86' and config.endswith('_x64'):
-      config = config.rsplit('_', 1)[0]
-    return config
-
-  def _Setting(self, path, config,
-              default=None, prefix='', append=None, map=None):
-    """_GetAndMunge for msvs_settings."""
-    return self._GetAndMunge(
-        self.msvs_settings[config], path, default, prefix, append, map)
-
-  def _ConfigAttrib(self, path, config,
-                   default=None, prefix='', append=None, map=None):
-    """_GetAndMunge for msvs_configuration_attributes."""
-    return self._GetAndMunge(
-        self.msvs_configuration_attributes[config],
-        path, default, prefix, append, map)
-
-  def AdjustIncludeDirs(self, include_dirs, config):
-    """Updates include_dirs to expand VS specific paths, and adds the system
-    include dirs used for platform SDK and similar."""
-    config = self._TargetConfig(config)
-    includes = include_dirs + self.msvs_system_include_dirs[config]
-    includes.extend(self._Setting(
-      ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
-    return [self.ConvertVSMacros(p, config=config) for p in includes]
-
-  def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
-    """Updates midl_include_dirs to expand VS specific paths, and adds the
-    system include dirs used for platform SDK and similar."""
-    config = self._TargetConfig(config)
-    includes = midl_include_dirs + self.msvs_system_include_dirs[config]
-    includes.extend(self._Setting(
-      ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
-    return [self.ConvertVSMacros(p, config=config) for p in includes]
-
-  def GetComputedDefines(self, config):
-    """Returns the set of defines that are injected to the defines list based
-    on other VS settings."""
-    config = self._TargetConfig(config)
-    defines = []
-    if self._ConfigAttrib(['CharacterSet'], config) == '1':
-      defines.extend(('_UNICODE', 'UNICODE'))
-    if self._ConfigAttrib(['CharacterSet'], config) == '2':
-      defines.append('_MBCS')
-    defines.extend(self._Setting(
-        ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
-    return defines
-
-  def GetCompilerPdbName(self, config, expand_special):
-    """Get the pdb file name that should be used for compiler invocations, or
-    None if there's no explicit name specified."""
-    config = self._TargetConfig(config)
-    pdbname = self._Setting(
-        ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
-    if pdbname:
-      pdbname = expand_special(self.ConvertVSMacros(pdbname))
-    return pdbname
-
-  def GetMapFileName(self, config, expand_special):
-    """Gets the explicitly overriden map file name for a target or returns None
-    if it's not set."""
-    config = self._TargetConfig(config)
-    map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
-    if map_file:
-      map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
-    return map_file
-
-  def GetOutputName(self, config, expand_special):
-    """Gets the explicitly overridden output name for a target or returns None
-    if it's not overridden."""
-    config = self._TargetConfig(config)
-    type = self.spec['type']
-    root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
-    # TODO(scottmg): Handle OutputDirectory without OutputFile.
-    output_file = self._Setting((root, 'OutputFile'), config)
-    if output_file:
-      output_file = expand_special(self.ConvertVSMacros(
-          output_file, config=config))
-    return output_file
-
-  def GetPDBName(self, config, expand_special, default):
-    """Gets the explicitly overridden pdb name for a target or returns
-    default if it's not overridden, or if no pdb will be generated."""
-    config = self._TargetConfig(config)
-    output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
-    generate_debug_info = self._Setting(
-        ('VCLinkerTool', 'GenerateDebugInformation'), config)
-    if generate_debug_info == 'true':
-      if output_file:
-        return expand_special(self.ConvertVSMacros(output_file, config=config))
-      else:
-        return default
-    else:
-      return None
-
-  def GetNoImportLibrary(self, config):
-    """If NoImportLibrary: true, ninja will not expect the output to include
-    an import library."""
-    config = self._TargetConfig(config)
-    noimplib = self._Setting(('NoImportLibrary',), config)
-    return noimplib == 'true'
-
-  def GetAsmflags(self, config):
-    """Returns the flags that need to be added to ml invocations."""
-    config = self._TargetConfig(config)
-    asmflags = []
-    safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
-    if safeseh == 'true':
-      asmflags.append('/safeseh')
-    return asmflags
-
-  def GetCflags(self, config):
-    """Returns the flags that need to be added to .c and .cc compilations."""
-    config = self._TargetConfig(config)
-    cflags = []
-    cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
-    cl = self._GetWrapper(self, self.msvs_settings[config],
-                          'VCCLCompilerTool', append=cflags)
-    cl('Optimization',
-       map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
-    cl('InlineFunctionExpansion', prefix='/Ob')
-    cl('DisableSpecificWarnings', prefix='/wd')
-    cl('StringPooling', map={'true': '/GF'})
-    cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
-    cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
-    cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
-    cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
-    cl('FloatingPointModel',
-        map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
-        default='0')
-    cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
-    cl('WholeProgramOptimization', map={'true': '/GL'})
-    cl('WarningLevel', prefix='/W')
-    cl('WarnAsError', map={'true': '/WX'})
-    cl('CallingConvention',
-        map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
-    cl('DebugInformationFormat',
-        map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
-    cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
-    cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
-    cl('MinimalRebuild', map={'true': '/Gm'})
-    cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
-    cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
-    cl('RuntimeLibrary',
-        map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
-    cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
-    cl('DefaultCharIsUnsigned', map={'true': '/J'})
-    cl('TreatWChar_tAsBuiltInType',
-        map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
-    cl('EnablePREfast', map={'true': '/analyze'})
-    cl('AdditionalOptions', prefix='')
-    cl('EnableEnhancedInstructionSet',
-        map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
-        prefix='/arch:')
-    cflags.extend(['/FI' + f for f in self._Setting(
-        ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
-    if self.vs_version.short_name in ('2013', '2013e', '2015'):
-      # New flag required in 2013 to maintain previous PDB behavior.
-      cflags.append('/FS')
-    # ninja handles parallelism by itself, don't have the compiler do it too.
-    cflags = filter(lambda x: not x.startswith('/MP'), cflags)
-    return cflags
-
-  def _GetPchFlags(self, config, extension):
-    """Get the flags to be added to the cflags for precompiled header support.
-    """
-    config = self._TargetConfig(config)
-    # The PCH is only built once by a particular source file. Usage of PCH must
-    # only be for the same language (i.e. C vs. C++), so only include the pch
-    # flags when the language matches.
-    if self.msvs_precompiled_header[config]:
-      source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
-      if _LanguageMatchesForPch(source_ext, extension):
-        pch = self.msvs_precompiled_header[config]
-        pchbase = os.path.split(pch)[1]
-        return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pchbase + '.pch']
-    return  []
-
-  def GetCflagsC(self, config):
-    """Returns the flags that need to be added to .c compilations."""
-    config = self._TargetConfig(config)
-    return self._GetPchFlags(config, '.c')
-
-  def GetCflagsCC(self, config):
-    """Returns the flags that need to be added to .cc compilations."""
-    config = self._TargetConfig(config)
-    return ['/TP'] + self._GetPchFlags(config, '.cc')
-
-  def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
-    """Get and normalize the list of paths in AdditionalLibraryDirectories
-    setting."""
-    config = self._TargetConfig(config)
-    libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
-                             config, default=[])
-    libpaths = [os.path.normpath(
-                    gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
-                for p in libpaths]
-    return ['/LIBPATH:"' + p + '"' for p in libpaths]
-
-  def GetLibFlags(self, config, gyp_to_build_path):
-    """Returns the flags that need to be added to lib commands."""
-    config = self._TargetConfig(config)
-    libflags = []
-    lib = self._GetWrapper(self, self.msvs_settings[config],
-                          'VCLibrarianTool', append=libflags)
-    libflags.extend(self._GetAdditionalLibraryDirectories(
-        'VCLibrarianTool', config, gyp_to_build_path))
-    lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
-    lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
-        prefix='/MACHINE:')
-    lib('AdditionalOptions')
-    return libflags
-
-  def GetDefFile(self, gyp_to_build_path):
-    """Returns the .def file from sources, if any.  Otherwise returns None."""
-    spec = self.spec
-    if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
-      def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
-      if len(def_files) == 1:
-        return gyp_to_build_path(def_files[0])
-      elif len(def_files) > 1:
-        raise Exception("Multiple .def files")
-    return None
-
-  def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
-    """.def files get implicitly converted to a ModuleDefinitionFile for the
-    linker in the VS generator. Emulate that behaviour here."""
-    def_file = self.GetDefFile(gyp_to_build_path)
-    if def_file:
-      ldflags.append('/DEF:"%s"' % def_file)
-
-  def GetPGDName(self, config, expand_special):
-    """Gets the explicitly overridden pgd name for a target or returns None
-    if it's not overridden."""
-    config = self._TargetConfig(config)
-    output_file = self._Setting(
-        ('VCLinkerTool', 'ProfileGuidedDatabase'), config)
-    if output_file:
-      output_file = expand_special(self.ConvertVSMacros(
-          output_file, config=config))
-    return output_file
-
-  def GetLdflags(self, config, gyp_to_build_path, expand_special,
-                 manifest_base_name, output_name, is_executable, build_dir):
-    """Returns the flags that need to be added to link commands, and the
-    manifest files."""
-    config = self._TargetConfig(config)
-    ldflags = []
-    ld = self._GetWrapper(self, self.msvs_settings[config],
-                          'VCLinkerTool', append=ldflags)
-    self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
-    ld('GenerateDebugInformation', map={'true': '/DEBUG'})
-    ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
-       prefix='/MACHINE:')
-    ldflags.extend(self._GetAdditionalLibraryDirectories(
-        'VCLinkerTool', config, gyp_to_build_path))
-    ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
-    ld('TreatLinkerWarningAsErrors', prefix='/WX',
-       map={'true': '', 'false': ':NO'})
-    out = self.GetOutputName(config, expand_special)
-    if out:
-      ldflags.append('/OUT:' + out)
-    pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
-    if pdb:
-      ldflags.append('/PDB:' + pdb)
-    pgd = self.GetPGDName(config, expand_special)
-    if pgd:
-      ldflags.append('/PGD:' + pgd)
-    map_file = self.GetMapFileName(config, expand_special)
-    ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
-        else '/MAP'})
-    ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
-    ld('AdditionalOptions', prefix='')
-
-    minimum_required_version = self._Setting(
-        ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
-    if minimum_required_version:
-      minimum_required_version = ',' + minimum_required_version
-    ld('SubSystem',
-       map={'1': 'CONSOLE%s' % minimum_required_version,
-            '2': 'WINDOWS%s' % minimum_required_version},
-       prefix='/SUBSYSTEM:')
-
-    stack_reserve_size = self._Setting(
-        ('VCLinkerTool', 'StackReserveSize'), config, default='')
-    if stack_reserve_size:
-      stack_commit_size = self._Setting(
-          ('VCLinkerTool', 'StackCommitSize'), config, default='')
-      if stack_commit_size:
-        stack_commit_size = ',' + stack_commit_size
-      ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
-
-    ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
-    ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
-    ld('BaseAddress', prefix='/BASE:')
-    ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
-    ld('RandomizedBaseAddress',
-        map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
-    ld('DataExecutionPrevention',
-        map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
-    ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
-    ld('ForceSymbolReferences', prefix='/INCLUDE:')
-    ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
-    ld('LinkTimeCodeGeneration',
-        map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
-             '4': ':PGUPDATE'},
-        prefix='/LTCG')
-    ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
-    ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
-    ld('EntryPointSymbol', prefix='/ENTRY:')
-    ld('Profile', map={'true': '/PROFILE'})
-    ld('LargeAddressAware',
-        map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
-    # TODO(scottmg): This should sort of be somewhere else (not really a flag).
-    ld('AdditionalDependencies', prefix='')
-
-    if self.GetArch(config) == 'x86':
-      safeseh_default = 'true'
-    else:
-      safeseh_default = None
-    ld('ImageHasSafeExceptionHandlers',
-        map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
-        default=safeseh_default)
-
-    # If the base address is not specifically controlled, DYNAMICBASE should
-    # be on by default.
-    base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
-                        ldflags)
-    if not base_flags:
-      ldflags.append('/DYNAMICBASE')
-
-    # If the NXCOMPAT flag has not been specified, default to on. Despite the
-    # documentation that says this only defaults to on when the subsystem is
-    # Vista or greater (which applies to the linker), the IDE defaults it on
-    # unless it's explicitly off.
-    if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
-      ldflags.append('/NXCOMPAT')
-
-    have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
-    manifest_flags, intermediate_manifest, manifest_files = \
-        self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
-                                 is_executable and not have_def_file, build_dir)
-    ldflags.extend(manifest_flags)
-    return ldflags, intermediate_manifest, manifest_files
-
-  def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
-                          allow_isolation, build_dir):
-    """Returns a 3-tuple:
-    - the set of flags that need to be added to the link to generate
-      a default manifest
-    - the intermediate manifest that the linker will generate that should be
-      used to assert it doesn't add anything to the merged one.
-    - the list of all the manifest files to be merged by the manifest tool and
-      included into the link."""
-    generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
-                                      config,
-                                      default='true')
-    if generate_manifest != 'true':
-      # This means not only that the linker should not generate the intermediate
-      # manifest but also that the manifest tool should do nothing even when
-      # additional manifests are specified.
-      return ['/MANIFEST:NO'], [], []
-
-    output_name = name + '.intermediate.manifest'
-    flags = [
-      '/MANIFEST',
-      '/ManifestFile:' + output_name,
-    ]
-
-    # Instead of using the MANIFESTUAC flags, we generate a .manifest to
-    # include into the list of manifests. This allows us to avoid the need to
-    # do two passes during linking. The /MANIFEST flag and /ManifestFile are
-    # still used, and the intermediate manifest is used to assert that the
-    # final manifest we get from merging all the additional manifest files
-    # (plus the one we generate here) isn't modified by merging the
-    # intermediate into it.
-
-    # Always NO, because we generate a manifest file that has what we want.
-    flags.append('/MANIFESTUAC:NO')
-
-    config = self._TargetConfig(config)
-    enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
-                               default='true')
-    manifest_files = []
-    generated_manifest_outer = \
-"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
-"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
-"</assembly>"
-    if enable_uac == 'true':
-      execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
-                                      config, default='0')
-      execution_level_map = {
-        '0': 'asInvoker',
-        '1': 'highestAvailable',
-        '2': 'requireAdministrator'
-      }
-
-      ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
-                                default='false')
-
-      inner = '''
-<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
-  <security>
-    <requestedPrivileges>
-      <requestedExecutionLevel level='%s' uiAccess='%s' />
-    </requestedPrivileges>
-  </security>
-</trustInfo>''' % (execution_level_map[execution_level], ui_access)
-    else:
-      inner = ''
-
-    generated_manifest_contents = generated_manifest_outer % inner
-    generated_name = name + '.generated.manifest'
-    # Need to join with the build_dir here as we're writing it during
-    # generation time, but we return the un-joined version because the build
-    # will occur in that directory. We only write the file if the contents
-    # have changed so that simply regenerating the project files doesn't
-    # cause a relink.
-    build_dir_generated_name = os.path.join(build_dir, generated_name)
-    gyp.common.EnsureDirExists(build_dir_generated_name)
-    f = gyp.common.WriteOnDiff(build_dir_generated_name)
-    f.write(generated_manifest_contents)
-    f.close()
-    manifest_files = [generated_name]
-
-    if allow_isolation:
-      flags.append('/ALLOWISOLATION')
-
-    manifest_files += self._GetAdditionalManifestFiles(config,
-                                                       gyp_to_build_path)
-    return flags, output_name, manifest_files
-
-  def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
-    """Gets additional manifest files that are added to the default one
-    generated by the linker."""
-    files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
-                          default=[])
-    if isinstance(files, str):
-      files = files.split(';')
-    return [os.path.normpath(
-                gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
-            for f in files]
-
-  def IsUseLibraryDependencyInputs(self, config):
-    """Returns whether the target should be linked via Use Library Dependency
-    Inputs (using component .objs of a given .lib)."""
-    config = self._TargetConfig(config)
-    uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
-    return uldi == 'true'
-
-  def IsEmbedManifest(self, config):
-    """Returns whether manifest should be linked into binary."""
-    config = self._TargetConfig(config)
-    embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
-                          default='true')
-    return embed == 'true'
-
-  def IsLinkIncremental(self, config):
-    """Returns whether the target should be linked incrementally."""
-    config = self._TargetConfig(config)
-    link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
-    return link_inc != '1'
-
-  def GetRcflags(self, config, gyp_to_ninja_path):
-    """Returns the flags that need to be added to invocations of the resource
-    compiler."""
-    config = self._TargetConfig(config)
-    rcflags = []
-    rc = self._GetWrapper(self, self.msvs_settings[config],
-        'VCResourceCompilerTool', append=rcflags)
-    rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
-    rcflags.append('/I' + gyp_to_ninja_path('.'))
-    rc('PreprocessorDefinitions', prefix='/d')
-    # /l arg must be in hex without leading '0x'
-    rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
-    return rcflags
-
-  def BuildCygwinBashCommandLine(self, args, path_to_base):
-    """Build a command line that runs args via cygwin bash. We assume that all
-    incoming paths are in Windows normpath'd form, so they need to be
-    converted to posix style for the part of the command line that's passed to
-    bash. We also have to do some Visual Studio macro emulation here because
-    various rules use magic VS names for things. Also note that rules that
-    contain ninja variables cannot be fixed here (for example ${source}), so
-    the outer generator needs to make sure that the paths that are written out
-    are in posix style, if the command line will be used here."""
-    cygwin_dir = os.path.normpath(
-        os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
-    cd = ('cd %s' % path_to_base).replace('\\', '/')
-    args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
-    args = ["'%s'" % a.replace("'", "'\\''") for a in args]
-    bash_cmd = ' '.join(args)
-    cmd = (
-        'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
-        'bash -c "%s ; %s"' % (cd, bash_cmd))
-    return cmd
-
-  def IsRuleRunUnderCygwin(self, rule):
-    """Determine if an action should be run under cygwin. If the variable is
-    unset, or set to 1 we use cygwin."""
-    return int(rule.get('msvs_cygwin_shell',
-                        self.spec.get('msvs_cygwin_shell', 1))) != 0
-
-  def _HasExplicitRuleForExtension(self, spec, extension):
-    """Determine if there's an explicit rule for a particular extension."""
-    for rule in spec.get('rules', []):
-      if rule['extension'] == extension:
-        return True
-    return False
-
-  def _HasExplicitIdlActions(self, spec):
-    """Determine if an action should not run midl for .idl files."""
-    return any([action.get('explicit_idl_action', 0)
-                for action in spec.get('actions', [])])
-
-  def HasExplicitIdlRulesOrActions(self, spec):
-    """Determine if there's an explicit rule or action for idl files. When
-    there isn't we need to generate implicit rules to build MIDL .idl files."""
-    return (self._HasExplicitRuleForExtension(spec, 'idl') or
-            self._HasExplicitIdlActions(spec))
-
-  def HasExplicitAsmRules(self, spec):
-    """Determine if there's an explicit rule for asm files. When there isn't we
-    need to generate implicit rules to assemble .asm files."""
-    return self._HasExplicitRuleForExtension(spec, 'asm')
-
-  def GetIdlBuildData(self, source, config):
-    """Determine the implicit outputs for an idl file. Returns output
-    directory, outputs, and variables and flags that are required."""
-    config = self._TargetConfig(config)
-    midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
-    def midl(name, default=None):
-      return self.ConvertVSMacros(midl_get(name, default=default),
-                                  config=config)
-    tlb = midl('TypeLibraryName', default='${root}.tlb')
-    header = midl('HeaderFileName', default='${root}.h')
-    dlldata = midl('DLLDataFileName', default='dlldata.c')
-    iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
-    proxy = midl('ProxyFileName', default='${root}_p.c')
-    # Note that .tlb is not included in the outputs as it is not always
-    # generated depending on the content of the input idl file.
-    outdir = midl('OutputDirectory', default='')
-    output = [header, dlldata, iid, proxy]
-    variables = [('tlb', tlb),
-                 ('h', header),
-                 ('dlldata', dlldata),
-                 ('iid', iid),
-                 ('proxy', proxy)]
-    # TODO(scottmg): Are there configuration settings to set these flags?
-    target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
-    flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
-    return outdir, output, variables, flags
-
-
-def _LanguageMatchesForPch(source_ext, pch_source_ext):
-  c_exts = ('.c',)
-  cc_exts = ('.cc', '.cxx', '.cpp')
-  return ((source_ext in c_exts and pch_source_ext in c_exts) or
-          (source_ext in cc_exts and pch_source_ext in cc_exts))
-
-
-class PrecompiledHeader(object):
-  """Helper to generate dependencies and build rules to handle generation of
-  precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
-  """
-  def __init__(
-      self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
-    self.settings = settings
-    self.config = config
-    pch_source = self.settings.msvs_precompiled_source[self.config]
-    self.pch_source = gyp_to_build_path(pch_source)
-    filename, _ = os.path.splitext(pch_source)
-    self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
-
-  def _PchHeader(self):
-    """Get the header that will appear in an #include line for all source
-    files."""
-    return self.settings.msvs_precompiled_header[self.config]
-
-  def GetObjDependencies(self, sources, objs, arch):
-    """Given a list of sources files and the corresponding object files,
-    returns a list of the pch files that should be depended upon. The
-    additional wrapping in the return value is for interface compatibility
-    with make.py on Mac, and xcode_emulation.py."""
-    assert arch is None
-    if not self._PchHeader():
-      return []
-    pch_ext = os.path.splitext(self.pch_source)[1]
-    for source in sources:
-      if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
-        return [(None, None, self.output_obj)]
-    return []
-
-  def GetPchBuildCommands(self, arch):
-    """Not used on Windows as there are no additional build steps required
-    (instead, existing steps are modified in GetFlagsModifications below)."""
-    return []
-
-  def GetFlagsModifications(self, input, output, implicit, command,
-                            cflags_c, cflags_cc, expand_special):
-    """Get the modified cflags and implicit dependencies that should be used
-    for the pch compilation step."""
-    if input == self.pch_source:
-      pch_output = ['/Yc' + self._PchHeader()]
-      if command == 'cxx':
-        return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
-                self.output_obj, [])
-      elif command == 'cc':
-        return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
-                self.output_obj, [])
-    return [], output, implicit
-
-
-vs_version = None
-def GetVSVersion(generator_flags):
-  global vs_version
-  if not vs_version:
-    vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
-        generator_flags.get('msvs_version', 'auto'),
-        allow_fallback=False)
-  return vs_version
-
-def _GetVsvarsSetupArgs(generator_flags, arch):
-  vs = GetVSVersion(generator_flags)
-  return vs.SetupScript()
-
-def ExpandMacros(string, expansions):
-  """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
-  for the canonical way to retrieve a suitable dict."""
-  if '$' in string:
-    for old, new in expansions.iteritems():
-      assert '$(' not in new, new
-      string = string.replace(old, new)
-  return string
-
-def _ExtractImportantEnvironment(output_of_set):
-  """Extracts environment variables required for the toolchain to run from
-  a textual dump output by the cmd.exe 'set' command."""
-  envvars_to_save = (
-      'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
-      'include',
-      'lib',
-      'libpath',
-      'path',
-      'pathext',
-      'systemroot',
-      'temp',
-      'tmp',
-      )
-  env = {}
-  # This occasionally happens and leads to misleading SYSTEMROOT error messages
-  # if not caught here.
-  if output_of_set.count('=') == 0:
-    raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
-  for line in output_of_set.splitlines():
-    for envvar in envvars_to_save:
-      if re.match(envvar + '=', line.lower()):
-        var, setting = line.split('=', 1)
-        if envvar == 'path':
-          # Our own rules (for running gyp-win-tool) and other actions in
-          # Chromium rely on python being in the path. Add the path to this
-          # python here so that if it's not in the path when ninja is run
-          # later, python will still be found.
-          setting = os.path.dirname(sys.executable) + os.pathsep + setting
-        env[var.upper()] = setting
-        break
-  for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
-    if required not in env:
-      raise Exception('Environment variable "%s" '
-                      'required to be set to valid path' % required)
-  return env
-
-def _FormatAsEnvironmentBlock(envvar_dict):
-  """Format as an 'environment block' directly suitable for CreateProcess.
-  Briefly this is a list of key=value\0, terminated by an additional \0. See
-  CreateProcess documentation for more details."""
-  block = ''
-  nul = '\0'
-  for key, value in envvar_dict.iteritems():
-    block += key + '=' + value + nul
-  block += nul
-  return block
-
-def _ExtractCLPath(output_of_where):
-  """Gets the path to cl.exe based on the output of calling the environment
-  setup batch file, followed by the equivalent of `where`."""
-  # Take the first line, as that's the first found in the PATH.
-  for line in output_of_where.strip().splitlines():
-    if line.startswith('LOC:'):
-      return line[len('LOC:'):].strip()
-
-def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
-                             system_includes, open_out):
-  """It's not sufficient to have the absolute path to the compiler, linker,
-  etc. on Windows, as those tools rely on .dlls being in the PATH. We also
-  need to support both x86 and x64 compilers within the same build (to support
-  msvs_target_platform hackery). Different architectures require a different
-  compiler binary, and different supporting environment variables (INCLUDE,
-  LIB, LIBPATH). So, we extract the environment here, wrap all invocations
-  of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
-  sets up the environment, and then we do not prefix the compiler with
-  an absolute path, instead preferring something like "cl.exe" in the rule
-  which will then run whichever the environment setup has put in the path.
-  When the following procedure to generate environment files does not
-  meet your requirement (e.g. for custom toolchains), you can pass
-  "-G ninja_use_custom_environment_files" to the gyp to suppress file
-  generation and use custom environment files prepared by yourself."""
-  archs = ('x86', 'x64')
-  if generator_flags.get('ninja_use_custom_environment_files', 0):
-    cl_paths = {}
-    for arch in archs:
-      cl_paths[arch] = 'cl.exe'
-    return cl_paths
-  vs = GetVSVersion(generator_flags)
-  cl_paths = {}
-  for arch in archs:
-    # Extract environment variables for subprocesses.
-    args = vs.SetupScript(arch)
-    args.extend(('&&', 'set'))
-    popen = subprocess.Popen(
-        args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    variables, _ = popen.communicate()
-    if popen.returncode != 0:
-      raise Exception('"%s" failed with error %d' % (args, popen.returncode))
-    env = _ExtractImportantEnvironment(variables)
-
-    # Inject system includes from gyp files into INCLUDE.
-    if system_includes:
-      system_includes = system_includes | OrderedSet(
-                                              env.get('INCLUDE', '').split(';'))
-      env['INCLUDE'] = ';'.join(system_includes)
-
-    env_block = _FormatAsEnvironmentBlock(env)
-    f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
-    f.write(env_block)
-    f.close()
-
-    # Find cl.exe location for this architecture.
-    args = vs.SetupScript(arch)
-    args.extend(('&&',
-      'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
-    popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
-    output, _ = popen.communicate()
-    cl_paths[arch] = _ExtractCLPath(output)
-  return cl_paths
-
-def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
-  """Emulate behavior of msvs_error_on_missing_sources present in the msvs
-  generator: Check that all regular source files, i.e. not created at run time,
-  exist on disk. Missing files cause needless recompilation when building via
-  VS, and we want this check to match for people/bots that build using ninja,
-  so they're not surprised when the VS build fails."""
-  if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
-    no_specials = filter(lambda x: '$' not in x, sources)
-    relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
-    missing = filter(lambda x: not os.path.exists(x), relative)
-    if missing:
-      # They'll look like out\Release\..\..\stuff\things.cc, so normalize the
-      # path for a slightly less crazy looking output.
-      cleaned_up = [os.path.normpath(x) for x in missing]
-      raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
-
-# Sets some values in default_variables, which are required for many
-# generators, run on Windows.
-def CalculateCommonVariables(default_variables, params):
-  generator_flags = params.get('generator_flags', {})
-
-  # Set a variable so conditions can be based on msvs_version.
-  msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
-  default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
-  # To determine processor word size on Windows, in addition to checking
-  # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
-  # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
-  # contains the actual word size of the system when running thru WOW64).
-  if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
-      '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
-    default_variables['MSVS_OS_BITS'] = 64
-  else:
-    default_variables['MSVS_OS_BITS'] = 32
diff --git a/tools/gyp/pylib/gyp/ninja_syntax.py b/tools/gyp/pylib/gyp/ninja_syntax.py
deleted file mode 100644
index d2948f0..0000000
--- a/tools/gyp/pylib/gyp/ninja_syntax.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# This file comes from
-#   https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
-# Do not edit!  Edit the upstream one instead.
-
-"""Python module for generating .ninja files.
-
-Note that this is emphatically not a required piece of Ninja; it's
-just a helpful utility for build-file-generation systems that already
-use Python.
-"""
-
-import textwrap
-import re
-
-def escape_path(word):
-    return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
-
-class Writer(object):
-    def __init__(self, output, width=78):
-        self.output = output
-        self.width = width
-
-    def newline(self):
-        self.output.write('\n')
-
-    def comment(self, text):
-        for line in textwrap.wrap(text, self.width - 2):
-            self.output.write('# ' + line + '\n')
-
-    def variable(self, key, value, indent=0):
-        if value is None:
-            return
-        if isinstance(value, list):
-            value = ' '.join(filter(None, value))  # Filter out empty strings.
-        self._line('%s = %s' % (key, value), indent)
-
-    def pool(self, name, depth):
-        self._line('pool %s' % name)
-        self.variable('depth', depth, indent=1)
-
-    def rule(self, name, command, description=None, depfile=None,
-             generator=False, pool=None, restat=False, rspfile=None,
-             rspfile_content=None, deps=None):
-        self._line('rule %s' % name)
-        self.variable('command', command, indent=1)
-        if description:
-            self.variable('description', description, indent=1)
-        if depfile:
-            self.variable('depfile', depfile, indent=1)
-        if generator:
-            self.variable('generator', '1', indent=1)
-        if pool:
-            self.variable('pool', pool, indent=1)
-        if restat:
-            self.variable('restat', '1', indent=1)
-        if rspfile:
-            self.variable('rspfile', rspfile, indent=1)
-        if rspfile_content:
-            self.variable('rspfile_content', rspfile_content, indent=1)
-        if deps:
-            self.variable('deps', deps, indent=1)
-
-    def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
-              variables=None):
-        outputs = self._as_list(outputs)
-        all_inputs = self._as_list(inputs)[:]
-        out_outputs = list(map(escape_path, outputs))
-        all_inputs = list(map(escape_path, all_inputs))
-
-        if implicit:
-            implicit = map(escape_path, self._as_list(implicit))
-            all_inputs.append('|')
-            all_inputs.extend(implicit)
-        if order_only:
-            order_only = map(escape_path, self._as_list(order_only))
-            all_inputs.append('||')
-            all_inputs.extend(order_only)
-
-        self._line('build %s: %s' % (' '.join(out_outputs),
-                                        ' '.join([rule] + all_inputs)))
-
-        if variables:
-            if isinstance(variables, dict):
-                iterator = iter(variables.items())
-            else:
-                iterator = iter(variables)
-
-            for key, val in iterator:
-                self.variable(key, val, indent=1)
-
-        return outputs
-
-    def include(self, path):
-        self._line('include %s' % path)
-
-    def subninja(self, path):
-        self._line('subninja %s' % path)
-
-    def default(self, paths):
-        self._line('default %s' % ' '.join(self._as_list(paths)))
-
-    def _count_dollars_before_index(self, s, i):
-      """Returns the number of '$' characters right in front of s[i]."""
-      dollar_count = 0
-      dollar_index = i - 1
-      while dollar_index > 0 and s[dollar_index] == '$':
-        dollar_count += 1
-        dollar_index -= 1
-      return dollar_count
-
-    def _line(self, text, indent=0):
-        """Write 'text' word-wrapped at self.width characters."""
-        leading_space = '  ' * indent
-        while len(leading_space) + len(text) > self.width:
-            # The text is too wide; wrap if possible.
-
-            # Find the rightmost space that would obey our width constraint and
-            # that's not an escaped space.
-            available_space = self.width - len(leading_space) - len(' $')
-            space = available_space
-            while True:
-              space = text.rfind(' ', 0, space)
-              if space < 0 or \
-                 self._count_dollars_before_index(text, space) % 2 == 0:
-                break
-
-            if space < 0:
-                # No such space; just use the first unescaped space we can find.
-                space = available_space - 1
-                while True:
-                  space = text.find(' ', space + 1)
-                  if space < 0 or \
-                     self._count_dollars_before_index(text, space) % 2 == 0:
-                    break
-            if space < 0:
-                # Give up on breaking.
-                break
-
-            self.output.write(leading_space + text[0:space] + ' $\n')
-            text = text[space+1:]
-
-            # Subsequent lines are continuations, so indent them.
-            leading_space = '  ' * (indent+2)
-
-        self.output.write(leading_space + text + '\n')
-
-    def _as_list(self, input):
-        if input is None:
-            return []
-        if isinstance(input, list):
-            return input
-        return [input]
-
-
-def escape(string):
-    """Escape a string such that it can be embedded into a Ninja file without
-    further interpretation."""
-    assert '\n' not in string, 'Ninja syntax does not allow newlines'
-    # We only have one special metacharacter: '$'.
-    return string.replace('$', '$$')
diff --git a/tools/gyp/pylib/gyp/ordered_dict.py b/tools/gyp/pylib/gyp/ordered_dict.py
deleted file mode 100644
index a1e89f9..0000000
--- a/tools/gyp/pylib/gyp/ordered_dict.py
+++ /dev/null
@@ -1,289 +0,0 @@
-# Unmodified from http://code.activestate.com/recipes/576693/
-# other than to add MIT license header (as specified on page, but not in code).
-# Linked from Python documentation here:
-# http://docs.python.org/2/library/collections.html#collections.OrderedDict
-#
-# This should be deleted once Py2.7 is available on all bots, see
-# http://crbug.com/241769.
-#
-# Copyright (c) 2009 Raymond Hettinger.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-
-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-
-try:
-    from thread import get_ident as _get_ident
-except ImportError:
-    from dummy_thread import get_ident as _get_ident
-
-try:
-    from _abcoll import KeysView, ValuesView, ItemsView
-except ImportError:
-    pass
-
-
-class OrderedDict(dict):
-    'Dictionary that remembers insertion order'
-    # An inherited dict maps keys to values.
-    # The inherited dict provides __getitem__, __len__, __contains__, and get.
-    # The remaining methods are order-aware.
-    # Big-O running times for all methods are the same as for regular dictionaries.
-
-    # The internal self.__map dictionary maps keys to links in a doubly linked list.
-    # The circular doubly linked list starts and ends with a sentinel element.
-    # The sentinel element never gets deleted (this simplifies the algorithm).
-    # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
-
-    def __init__(self, *args, **kwds):
-        '''Initialize an ordered dictionary.  Signature is the same as for
-        regular dictionaries, but keyword arguments are not recommended
-        because their insertion order is arbitrary.
-
-        '''
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        try:
-            self.__root
-        except AttributeError:
-            self.__root = root = []                     # sentinel node
-            root[:] = [root, root, None]
-            self.__map = {}
-        self.__update(*args, **kwds)
-
-    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
-        'od.__setitem__(i, y) <==> od[i]=y'
-        # Setting a new item creates a new link which goes at the end of the linked
-        # list, and the inherited dictionary is updated with the new key/value pair.
-        if key not in self:
-            root = self.__root
-            last = root[0]
-            last[1] = root[0] = self.__map[key] = [last, root, key]
-        dict_setitem(self, key, value)
-
-    def __delitem__(self, key, dict_delitem=dict.__delitem__):
-        'od.__delitem__(y) <==> del od[y]'
-        # Deleting an existing item uses self.__map to find the link which is
-        # then removed by updating the links in the predecessor and successor nodes.
-        dict_delitem(self, key)
-        link_prev, link_next, key = self.__map.pop(key)
-        link_prev[1] = link_next
-        link_next[0] = link_prev
-
-    def __iter__(self):
-        'od.__iter__() <==> iter(od)'
-        root = self.__root
-        curr = root[1]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[1]
-
-    def __reversed__(self):
-        'od.__reversed__() <==> reversed(od)'
-        root = self.__root
-        curr = root[0]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[0]
-
-    def clear(self):
-        'od.clear() -> None.  Remove all items from od.'
-        try:
-            for node in self.__map.itervalues():
-                del node[:]
-            root = self.__root
-            root[:] = [root, root, None]
-            self.__map.clear()
-        except AttributeError:
-            pass
-        dict.clear(self)
-
-    def popitem(self, last=True):
-        '''od.popitem() -> (k, v), return and remove a (key, value) pair.
-        Pairs are returned in LIFO order if last is true or FIFO order if false.
-
-        '''
-        if not self:
-            raise KeyError('dictionary is empty')
-        root = self.__root
-        if last:
-            link = root[0]
-            link_prev = link[0]
-            link_prev[1] = root
-            root[0] = link_prev
-        else:
-            link = root[1]
-            link_next = link[1]
-            root[1] = link_next
-            link_next[0] = root
-        key = link[2]
-        del self.__map[key]
-        value = dict.pop(self, key)
-        return key, value
-
-    # -- the following methods do not depend on the internal structure --
-
-    def keys(self):
-        'od.keys() -> list of keys in od'
-        return list(self)
-
-    def values(self):
-        'od.values() -> list of values in od'
-        return [self[key] for key in self]
-
-    def items(self):
-        'od.items() -> list of (key, value) pairs in od'
-        return [(key, self[key]) for key in self]
-
-    def iterkeys(self):
-        'od.iterkeys() -> an iterator over the keys in od'
-        return iter(self)
-
-    def itervalues(self):
-        'od.itervalues -> an iterator over the values in od'
-        for k in self:
-            yield self[k]
-
-    def iteritems(self):
-        'od.iteritems -> an iterator over the (key, value) items in od'
-        for k in self:
-            yield (k, self[k])
-
-    # Suppress 'OrderedDict.update: Method has no argument':
-    # pylint: disable=E0211
-    def update(*args, **kwds):
-        '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
-
-        If E is a dict instance, does:           for k in E: od[k] = E[k]
-        If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
-        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
-        In either case, this is followed by:     for k, v in F.items(): od[k] = v
-
-        '''
-        if len(args) > 2:
-            raise TypeError('update() takes at most 2 positional '
-                            'arguments (%d given)' % (len(args),))
-        elif not args:
-            raise TypeError('update() takes at least 1 argument (0 given)')
-        self = args[0]
-        # Make progressively weaker assumptions about "other"
-        other = ()
-        if len(args) == 2:
-            other = args[1]
-        if isinstance(other, dict):
-            for key in other:
-                self[key] = other[key]
-        elif hasattr(other, 'keys'):
-            for key in other.keys():
-                self[key] = other[key]
-        else:
-            for key, value in other:
-                self[key] = value
-        for key, value in kwds.items():
-            self[key] = value
-
-    __update = update  # let subclasses override update without breaking __init__
-
-    __marker = object()
-
-    def pop(self, key, default=__marker):
-        '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
-        If key is not found, d is returned if given, otherwise KeyError is raised.
-
-        '''
-        if key in self:
-            result = self[key]
-            del self[key]
-            return result
-        if default is self.__marker:
-            raise KeyError(key)
-        return default
-
-    def setdefault(self, key, default=None):
-        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
-        if key in self:
-            return self[key]
-        self[key] = default
-        return default
-
-    def __repr__(self, _repr_running={}):
-        'od.__repr__() <==> repr(od)'
-        call_key = id(self), _get_ident()
-        if call_key in _repr_running:
-            return '...'
-        _repr_running[call_key] = 1
-        try:
-            if not self:
-                return '%s()' % (self.__class__.__name__,)
-            return '%s(%r)' % (self.__class__.__name__, self.items())
-        finally:
-            del _repr_running[call_key]
-
-    def __reduce__(self):
-        'Return state information for pickling'
-        items = [[k, self[k]] for k in self]
-        inst_dict = vars(self).copy()
-        for k in vars(OrderedDict()):
-            inst_dict.pop(k, None)
-        if inst_dict:
-            return (self.__class__, (items,), inst_dict)
-        return self.__class__, (items,)
-
-    def copy(self):
-        'od.copy() -> a shallow copy of od'
-        return self.__class__(self)
-
-    @classmethod
-    def fromkeys(cls, iterable, value=None):
-        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
-        and values equal to v (which defaults to None).
-
-        '''
-        d = cls()
-        for key in iterable:
-            d[key] = value
-        return d
-
-    def __eq__(self, other):
-        '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
-        while comparison to a regular mapping is order-insensitive.
-
-        '''
-        if isinstance(other, OrderedDict):
-            return len(self)==len(other) and self.items() == other.items()
-        return dict.__eq__(self, other)
-
-    def __ne__(self, other):
-        return not self == other
-
-    # -- the following methods are only used in Python 2.7 --
-
-    def viewkeys(self):
-        "od.viewkeys() -> a set-like object providing a view on od's keys"
-        return KeysView(self)
-
-    def viewvalues(self):
-        "od.viewvalues() -> an object providing a view on od's values"
-        return ValuesView(self)
-
-    def viewitems(self):
-        "od.viewitems() -> a set-like object providing a view on od's items"
-        return ItemsView(self)
-
diff --git a/tools/gyp/pylib/gyp/simple_copy.py b/tools/gyp/pylib/gyp/simple_copy.py
deleted file mode 100644
index 74c98c5..0000000
--- a/tools/gyp/pylib/gyp/simple_copy.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A clone of the default copy.deepcopy that doesn't handle cyclic
-structures or complex types except for dicts and lists. This is
-because gyp copies so large structure that small copy overhead ends up
-taking seconds in a project the size of Chromium."""
-
-class Error(Exception):
-  pass
-
-__all__ = ["Error", "deepcopy"]
-
-def deepcopy(x):
-  """Deep copy operation on gyp objects such as strings, ints, dicts
-  and lists. More than twice as fast as copy.deepcopy but much less
-  generic."""
-
-  try:
-    return _deepcopy_dispatch[type(x)](x)
-  except KeyError:
-    raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
-                'or expand simple_copy support.' % type(x))
-
-_deepcopy_dispatch = d = {}
-
-def _deepcopy_atomic(x):
-  return x
-
-for x in (type(None), int, long, float,
-          bool, str, unicode, type):
-  d[x] = _deepcopy_atomic
-
-def _deepcopy_list(x):
-  return [deepcopy(a) for a in x]
-d[list] = _deepcopy_list
-
-def _deepcopy_dict(x):
-  y = {}
-  for key, value in x.iteritems():
-    y[deepcopy(key)] = deepcopy(value)
-  return y
-d[dict] = _deepcopy_dict
-
-del d
diff --git a/tools/gyp/pylib/gyp/win_tool.py b/tools/gyp/pylib/gyp/win_tool.py
deleted file mode 100755
index 1c843a0..0000000
--- a/tools/gyp/pylib/gyp/win_tool.py
+++ /dev/null
@@ -1,322 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions for Windows builds.
-
-These functions are executed via gyp-win-tool when using the ninja generator.
-"""
-
-import os
-import re
-import shutil
-import subprocess
-import stat
-import string
-import sys
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-
-# A regex matching an argument corresponding to the output filename passed to
-# link.exe.
-_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
-
-def main(args):
-  executor = WinTool()
-  exit_code = executor.Dispatch(args)
-  if exit_code is not None:
-    sys.exit(exit_code)
-
-
-class WinTool(object):
-  """This class performs all the Windows tooling steps. The methods can either
-  be executed directly, or dispatched from an argument list."""
-
-  def _UseSeparateMspdbsrv(self, env, args):
-    """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
-    shared one."""
-    if len(args) < 1:
-      raise Exception("Not enough arguments")
-
-    if args[0] != 'link.exe':
-      return
-
-    # Use the output filename passed to the linker to generate an endpoint name
-    # for mspdbsrv.exe.
-    endpoint_name = None
-    for arg in args:
-      m = _LINK_EXE_OUT_ARG.match(arg)
-      if m:
-        endpoint_name = re.sub(r'\W+', '',
-            '%s_%d' % (m.group('out'), os.getpid()))
-        break
-
-    if endpoint_name is None:
-      return
-
-    # Adds the appropriate environment variable. This will be read by link.exe
-    # to know which instance of mspdbsrv.exe it should connect to (if it's
-    # not set then the default endpoint is used).
-    env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
-
-  def Dispatch(self, args):
-    """Dispatches a string command to a method."""
-    if len(args) < 1:
-      raise Exception("Not enough arguments")
-
-    method = "Exec%s" % self._CommandifyName(args[0])
-    return getattr(self, method)(*args[1:])
-
-  def _CommandifyName(self, name_string):
-    """Transforms a tool name like recursive-mirror to RecursiveMirror."""
-    return name_string.title().replace('-', '')
-
-  def _GetEnv(self, arch):
-    """Gets the saved environment from a file for a given architecture."""
-    # The environment is saved as an "environment block" (see CreateProcess
-    # and msvs_emulation for details). We convert to a dict here.
-    # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
-    pairs = open(arch).read()[:-2].split('\0')
-    kvs = [item.split('=', 1) for item in pairs]
-    return dict(kvs)
-
-  def ExecStamp(self, path):
-    """Simple stamp command."""
-    open(path, 'w').close()
-
-  def ExecRecursiveMirror(self, source, dest):
-    """Emulation of rm -rf out && cp -af in out."""
-    if os.path.exists(dest):
-      if os.path.isdir(dest):
-        def _on_error(fn, path, excinfo):
-          # The operation failed, possibly because the file is set to
-          # read-only. If that's why, make it writable and try the op again.
-          if not os.access(path, os.W_OK):
-            os.chmod(path, stat.S_IWRITE)
-          fn(path)
-        shutil.rmtree(dest, onerror=_on_error)
-      else:
-        if not os.access(dest, os.W_OK):
-          # Attempt to make the file writable before deleting it.
-          os.chmod(dest, stat.S_IWRITE)
-        os.unlink(dest)
-
-    if os.path.isdir(source):
-      shutil.copytree(source, dest)
-    else:
-      shutil.copy2(source, dest)
-
-  def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
-    """Filter diagnostic output from link that looks like:
-    '   Creating library ui.dll.lib and object ui.dll.exp'
-    This happens when there are exports from the dll or exe.
-    """
-    env = self._GetEnv(arch)
-    if use_separate_mspdbsrv == 'True':
-      self._UseSeparateMspdbsrv(env, args)
-    if sys.platform == 'win32':
-      args = list(args)  # *args is a tuple by default, which is read-only.
-      args[0] = args[0].replace('/', '\\')
-    # https://docs.python.org/2/library/subprocess.html:
-    # "On Unix with shell=True [...] if args is a sequence, the first item
-    # specifies the command string, and any additional items will be treated as
-    # additional arguments to the shell itself.  That is to say, Popen does the
-    # equivalent of:
-    #   Popen(['/bin/sh', '-c', args[0], args[1], ...])"
-    # For that reason, since going through the shell doesn't seem necessary on
-    # non-Windows don't do that there.
-    link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
-                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = link.communicate()
-    for line in out.splitlines():
-      if (not line.startswith('   Creating library ') and
-          not line.startswith('Generating code') and
-          not line.startswith('Finished generating code')):
-        print line
-    return link.returncode
-
-  def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
-                            mt, rc, intermediate_manifest, *manifests):
-    """A wrapper for handling creating a manifest resource and then executing
-    a link command."""
-    # The 'normal' way to do manifests is to have link generate a manifest
-    # based on gathering dependencies from the object files, then merge that
-    # manifest with other manifests supplied as sources, convert the merged
-    # manifest to a resource, and then *relink*, including the compiled
-    # version of the manifest resource. This breaks incremental linking, and
-    # is generally overly complicated. Instead, we merge all the manifests
-    # provided (along with one that includes what would normally be in the
-    # linker-generated one, see msvs_emulation.py), and include that into the
-    # first and only link. We still tell link to generate a manifest, but we
-    # only use that to assert that our simpler process did not miss anything.
-    variables = {
-      'python': sys.executable,
-      'arch': arch,
-      'out': out,
-      'ldcmd': ldcmd,
-      'resname': resname,
-      'mt': mt,
-      'rc': rc,
-      'intermediate_manifest': intermediate_manifest,
-      'manifests': ' '.join(manifests),
-    }
-    add_to_ld = ''
-    if manifests:
-      subprocess.check_call(
-          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
-          '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
-      if embed_manifest == 'True':
-        subprocess.check_call(
-            '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
-          ' %(out)s.manifest.rc %(resname)s' % variables)
-        subprocess.check_call(
-            '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
-            '%(out)s.manifest.rc' % variables)
-        add_to_ld = ' %(out)s.manifest.res' % variables
-    subprocess.check_call(ldcmd + add_to_ld)
-
-    # Run mt.exe on the theoretically complete manifest we generated, merging
-    # it with the one the linker generated to confirm that the linker
-    # generated one does not add anything. This is strictly unnecessary for
-    # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
-    # used in a #pragma comment.
-    if manifests:
-      # Merge the intermediate one with ours to .assert.manifest, then check
-      # that .assert.manifest is identical to ours.
-      subprocess.check_call(
-          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
-          '-manifest %(out)s.manifest %(intermediate_manifest)s '
-          '-out:%(out)s.assert.manifest' % variables)
-      assert_manifest = '%(out)s.assert.manifest' % variables
-      our_manifest = '%(out)s.manifest' % variables
-      # Load and normalize the manifests. mt.exe sometimes removes whitespace,
-      # and sometimes doesn't unfortunately.
-      with open(our_manifest, 'rb') as our_f:
-        with open(assert_manifest, 'rb') as assert_f:
-          our_data = our_f.read().translate(None, string.whitespace)
-          assert_data = assert_f.read().translate(None, string.whitespace)
-      if our_data != assert_data:
-        os.unlink(out)
-        def dump(filename):
-          sys.stderr.write('%s\n-----\n' % filename)
-          with open(filename, 'rb') as f:
-            sys.stderr.write(f.read() + '\n-----\n')
-        dump(intermediate_manifest)
-        dump(our_manifest)
-        dump(assert_manifest)
-        sys.stderr.write(
-            'Linker generated manifest "%s" added to final manifest "%s" '
-            '(result in "%s"). '
-            'Were /MANIFEST switches used in #pragma statements? ' % (
-              intermediate_manifest, our_manifest, assert_manifest))
-        return 1
-
-  def ExecManifestWrapper(self, arch, *args):
-    """Run manifest tool with environment set. Strip out undesirable warning
-    (some XML blocks are recognized by the OS loader, but not the manifest
-    tool)."""
-    env = self._GetEnv(arch)
-    popen = subprocess.Popen(args, shell=True, env=env,
-                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
-    for line in out.splitlines():
-      if line and 'manifest authoring warning 81010002' not in line:
-        print line
-    return popen.returncode
-
-  def ExecManifestToRc(self, arch, *args):
-    """Creates a resource file pointing a SxS assembly manifest.
-    |args| is tuple containing path to resource file, path to manifest file
-    and resource name which can be "1" (for executables) or "2" (for DLLs)."""
-    manifest_path, resource_path, resource_name = args
-    with open(resource_path, 'wb') as output:
-      output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
-        resource_name,
-        os.path.abspath(manifest_path).replace('\\', '/')))
-
-  def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
-                      *flags):
-    """Filter noisy filenames output from MIDL compile step that isn't
-    quietable via command line flags.
-    """
-    args = ['midl', '/nologo'] + list(flags) + [
-        '/out', outdir,
-        '/tlb', tlb,
-        '/h', h,
-        '/dlldata', dlldata,
-        '/iid', iid,
-        '/proxy', proxy,
-        idl]
-    env = self._GetEnv(arch)
-    popen = subprocess.Popen(args, shell=True, env=env,
-                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
-    # Filter junk out of stdout, and write filtered versions. Output we want
-    # to filter is pairs of lines that look like this:
-    # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
-    # objidl.idl
-    lines = out.splitlines()
-    prefixes = ('Processing ', '64 bit Processing ')
-    processing = set(os.path.basename(x)
-                     for x in lines if x.startswith(prefixes))
-    for line in lines:
-      if not line.startswith(prefixes) and line not in processing:
-        print line
-    return popen.returncode
-
-  def ExecAsmWrapper(self, arch, *args):
-    """Filter logo banner from invocations of asm.exe."""
-    env = self._GetEnv(arch)
-    popen = subprocess.Popen(args, shell=True, env=env,
-                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
-    for line in out.splitlines():
-      if (not line.startswith('Copyright (C) Microsoft Corporation') and
-          not line.startswith('Microsoft (R) Macro Assembler') and
-          not line.startswith(' Assembling: ') and
-          line):
-        print line
-    return popen.returncode
-
-  def ExecRcWrapper(self, arch, *args):
-    """Filter logo banner from invocations of rc.exe. Older versions of RC
-    don't support the /nologo flag."""
-    env = self._GetEnv(arch)
-    popen = subprocess.Popen(args, shell=True, env=env,
-                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
-    for line in out.splitlines():
-      if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
-          not line.startswith('Copyright (C) Microsoft Corporation') and
-          line):
-        print line
-    return popen.returncode
-
-  def ExecActionWrapper(self, arch, rspfile, *dir):
-    """Runs an action command line from a response file using the environment
-    for |arch|. If |dir| is supplied, use that as the working directory."""
-    env = self._GetEnv(arch)
-    # TODO(scottmg): This is a temporary hack to get some specific variables
-    # through to actions that are set after gyp-time. http://crbug.com/333738.
-    for k, v in os.environ.iteritems():
-      if k not in env:
-        env[k] = v
-    args = open(rspfile).read()
-    dir = dir[0] if dir else None
-    return subprocess.call(args, shell=True, env=env, cwd=dir)
-
-  def ExecClCompile(self, project_dir, selected_files):
-    """Executed by msvs-ninja projects when the 'ClCompile' target is used to
-    build selected C/C++ files."""
-    project_dir = os.path.relpath(project_dir, BASE_DIR)
-    selected_files = selected_files.split(';')
-    ninja_targets = [os.path.join(project_dir, filename) + '^^'
-        for filename in selected_files]
-    cmd = ['ninja.exe']
-    cmd.extend(ninja_targets)
-    return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/tools/gyp/pylib/gyp/xcode_emulation.py
deleted file mode 100644
index 5e971b5..0000000
--- a/tools/gyp/pylib/gyp/xcode_emulation.py
+++ /dev/null
@@ -1,1729 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module contains classes that help to emulate xcodebuild behavior on top of
-other build systems, such as make and ninja.
-"""
-
-import copy
-import gyp.common
-import os
-import os.path
-import re
-import shlex
-import subprocess
-import sys
-import tempfile
-from gyp.common import GypError
-
-# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
-# "xcodebuild" is called too quickly (it has been found to return incorrect
-# version number).
-XCODE_VERSION_CACHE = None
-
-# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
-# corresponding to the installed version of Xcode.
-XCODE_ARCHS_DEFAULT_CACHE = None
-
-
-def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
-  """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
-  and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
-  mapping = {'$(ARCHS_STANDARD)': archs}
-  if archs_including_64_bit:
-    mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
-  return mapping
-
-class XcodeArchsDefault(object):
-  """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
-  macros and implementing filtering by VALID_ARCHS. The expansion of macros
-  depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
-  on the version of Xcode.
-  """
-
-  # Match variable like $(ARCHS_STANDARD).
-  variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
-
-  def __init__(self, default, mac, iphonesimulator, iphoneos):
-    self._default = (default,)
-    self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
-
-  def _VariableMapping(self, sdkroot):
-    """Returns the dictionary of variable mapping depending on the SDKROOT."""
-    sdkroot = sdkroot.lower()
-    if 'iphoneos' in sdkroot:
-      return self._archs['ios']
-    elif 'iphonesimulator' in sdkroot:
-      return self._archs['iossim']
-    else:
-      return self._archs['mac']
-
-  def _ExpandArchs(self, archs, sdkroot):
-    """Expands variables references in ARCHS, and remove duplicates."""
-    variable_mapping = self._VariableMapping(sdkroot)
-    expanded_archs = []
-    for arch in archs:
-      if self.variable_pattern.match(arch):
-        variable = arch
-        try:
-          variable_expansion = variable_mapping[variable]
-          for arch in variable_expansion:
-            if arch not in expanded_archs:
-              expanded_archs.append(arch)
-        except KeyError as e:
-          print 'Warning: Ignoring unsupported variable "%s".' % variable
-      elif arch not in expanded_archs:
-        expanded_archs.append(arch)
-    return expanded_archs
-
-  def ActiveArchs(self, archs, valid_archs, sdkroot):
-    """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
-    is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
-    values present in VALID_ARCHS are kept)."""
-    expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
-    if valid_archs:
-      filtered_archs = []
-      for arch in expanded_archs:
-        if arch in valid_archs:
-          filtered_archs.append(arch)
-      expanded_archs = filtered_archs
-    return expanded_archs
-
-
-def GetXcodeArchsDefault():
-  """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
-  installed version of Xcode. The default values used by Xcode for ARCHS
-  and the expansion of the variables depends on the version of Xcode used.
-
-  For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
-  uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
-  $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
-  and deprecated with Xcode 5.1.
-
-  For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
-  architecture as part of $(ARCHS_STANDARD) and default to only building it.
-
-  For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
-  of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
-  are also part of $(ARCHS_STANDARD).
-
-  All thoses rules are coded in the construction of the |XcodeArchsDefault|
-  object to use depending on the version of Xcode detected. The object is
-  for performance reason."""
-  global XCODE_ARCHS_DEFAULT_CACHE
-  if XCODE_ARCHS_DEFAULT_CACHE:
-    return XCODE_ARCHS_DEFAULT_CACHE
-  xcode_version, _ = XcodeVersion()
-  if xcode_version < '0500':
-    XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
-        '$(ARCHS_STANDARD)',
-        XcodeArchsVariableMapping(['i386']),
-        XcodeArchsVariableMapping(['i386']),
-        XcodeArchsVariableMapping(['armv7']))
-  elif xcode_version < '0510':
-    XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
-        '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
-        XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
-        XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
-        XcodeArchsVariableMapping(
-            ['armv7', 'armv7s'],
-            ['armv7', 'armv7s', 'arm64']))
-  else:
-    XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
-        '$(ARCHS_STANDARD)',
-        XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
-        XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
-        XcodeArchsVariableMapping(
-            ['armv7', 'armv7s', 'arm64'],
-            ['armv7', 'armv7s', 'arm64']))
-  return XCODE_ARCHS_DEFAULT_CACHE
-
-
-class XcodeSettings(object):
-  """A class that understands the gyp 'xcode_settings' object."""
-
-  # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
-  # at class-level for efficiency.
-  _sdk_path_cache = {}
-  _platform_path_cache = {}
-  _sdk_root_cache = {}
-
-  # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
-  # cached at class-level for efficiency.
-  _plist_cache = {}
-
-  # Populated lazily by GetIOSPostbuilds.  Shared by all XcodeSettings, so
-  # cached at class-level for efficiency.
-  _codesigning_key_cache = {}
-
-  def __init__(self, spec):
-    self.spec = spec
-
-    self.isIOS = False
-    self.mac_toolchain_dir = None
-    self.header_map_path = None
-
-    # Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
-    # This means self.xcode_settings[config] always contains all settings
-    # for that config -- the per-target settings as well. Settings that are
-    # the same for all configs are implicitly per-target settings.
-    self.xcode_settings = {}
-    configs = spec['configurations']
-    for configname, config in configs.iteritems():
-      self.xcode_settings[configname] = config.get('xcode_settings', {})
-      self._ConvertConditionalKeys(configname)
-      if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
-                                             None):
-        self.isIOS = True
-
-    # This is only non-None temporarily during the execution of some methods.
-    self.configname = None
-
-    # Used by _AdjustLibrary to match .a and .dylib entries in libraries.
-    self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
-
-  def _ConvertConditionalKeys(self, configname):
-    """Converts or warns on conditional keys.  Xcode supports conditional keys,
-    such as CODE_SIGN_IDENTITY[sdk=iphoneos*].  This is a partial implementation
-    with some keys converted while the rest force a warning."""
-    settings = self.xcode_settings[configname]
-    conditional_keys = [key for key in settings if key.endswith(']')]
-    for key in conditional_keys:
-      # If you need more, speak up at http://crbug.com/122592
-      if key.endswith("[sdk=iphoneos*]"):
-        if configname.endswith("iphoneos"):
-          new_key = key.split("[")[0]
-          settings[new_key] = settings[key]
-      else:
-        print 'Warning: Conditional keys not implemented, ignoring:', \
-              ' '.join(conditional_keys)
-      del settings[key]
-
-  def _Settings(self):
-    assert self.configname
-    return self.xcode_settings[self.configname]
-
-  def _Test(self, test_key, cond_key, default):
-    return self._Settings().get(test_key, default) == cond_key
-
-  def _Appendf(self, lst, test_key, format_str, default=None):
-    if test_key in self._Settings():
-      lst.append(format_str % str(self._Settings()[test_key]))
-    elif default:
-      lst.append(format_str % str(default))
-
-  def _WarnUnimplemented(self, test_key):
-    if test_key in self._Settings():
-      print 'Warning: Ignoring not yet implemented key "%s".' % test_key
-
-  def IsBinaryOutputFormat(self, configname):
-    default = "binary" if self.isIOS else "xml"
-    format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
-                                                 default)
-    return format == "binary"
-
-  def IsIosFramework(self):
-    return self.spec['type'] == 'shared_library' and self._IsBundle() and \
-        self.isIOS
-
-  def _IsBundle(self):
-    return int(self.spec.get('mac_bundle', 0)) != 0 or self._IsXCTest() or \
-        self._IsXCUiTest()
-
-  def _IsXCTest(self):
-    return int(self.spec.get('mac_xctest_bundle', 0)) != 0
-
-  def _IsXCUiTest(self):
-    return int(self.spec.get('mac_xcuitest_bundle', 0)) != 0
-
-  def _IsIosAppExtension(self):
-    return int(self.spec.get('ios_app_extension', 0)) != 0
-
-  def _IsIosWatchKitExtension(self):
-    return int(self.spec.get('ios_watchkit_extension', 0)) != 0
-
-  def _IsIosWatchApp(self):
-    return int(self.spec.get('ios_watch_app', 0)) != 0
-
-  def GetFrameworkVersion(self):
-    """Returns the framework version of the current target. Only valid for
-    bundles."""
-    assert self._IsBundle()
-    return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
-
-  def GetWrapperExtension(self):
-    """Returns the bundle extension (.app, .framework, .plugin, etc).  Only
-    valid for bundles."""
-    assert self._IsBundle()
-    if self.spec['type'] in ('loadable_module', 'shared_library'):
-      default_wrapper_extension = {
-        'loadable_module': 'bundle',
-        'shared_library': 'framework',
-      }[self.spec['type']]
-      wrapper_extension = self.GetPerTargetSetting(
-          'WRAPPER_EXTENSION', default=default_wrapper_extension)
-      return '.' + self.spec.get('product_extension', wrapper_extension)
-    elif self.spec['type'] == 'executable':
-      if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
-        return '.' + self.spec.get('product_extension', 'appex')
-      else:
-        return '.' + self.spec.get('product_extension', 'app')
-    else:
-      assert False, "Don't know extension for '%s', target '%s'" % (
-          self.spec['type'], self.spec['target_name'])
-
-  def GetProductName(self):
-    """Returns PRODUCT_NAME."""
-    return self.spec.get('product_name', self.spec['target_name'])
-
-  def GetFullProductName(self):
-    """Returns FULL_PRODUCT_NAME."""
-    if self._IsBundle():
-      return self.GetWrapperName()
-    else:
-      return self._GetStandaloneBinaryPath()
-
-  def GetWrapperName(self):
-    """Returns the directory name of the bundle represented by this target.
-    Only valid for bundles."""
-    assert self._IsBundle()
-    return self.GetProductName() + self.GetWrapperExtension()
-
-  def GetBundleContentsFolderPath(self):
-    """Returns the qualified path to the bundle's contents folder. E.g.
-    Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
-    if self.isIOS:
-      return self.GetWrapperName()
-    assert self._IsBundle()
-    if self.spec['type'] == 'shared_library':
-      return os.path.join(
-          self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
-    else:
-      # loadable_modules have a 'Contents' folder like executables.
-      return os.path.join(self.GetWrapperName(), 'Contents')
-
-  def GetBundleResourceFolder(self):
-    """Returns the qualified path to the bundle's resource folder. E.g.
-    Chromium.app/Contents/Resources. Only valid for bundles."""
-    assert self._IsBundle()
-    if self.isIOS:
-      return self.GetBundleContentsFolderPath()
-    return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
-
-  def GetBundlePlistPath(self):
-    """Returns the qualified path to the bundle's plist file. E.g.
-    Chromium.app/Contents/Info.plist. Only valid for bundles."""
-    assert self._IsBundle()
-    if self.spec['type'] in ('executable', 'loadable_module') or \
-        self.IsIosFramework():
-      return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
-    else:
-      return os.path.join(self.GetBundleContentsFolderPath(),
-                          'Resources', 'Info.plist')
-
-  def GetProductType(self):
-    """Returns the PRODUCT_TYPE of this target."""
-    if self._IsIosAppExtension():
-      assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
-          '(target %s)' % self.spec['target_name'])
-      return 'com.apple.product-type.app-extension'
-    if self._IsIosWatchKitExtension():
-      assert self._IsBundle(), ('ios_watchkit_extension flag requires '
-          'mac_bundle (target %s)' % self.spec['target_name'])
-      return 'com.apple.product-type.watchkit-extension'
-    if self._IsIosWatchApp():
-      assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
-          '(target %s)' % self.spec['target_name'])
-      return 'com.apple.product-type.application.watchapp'
-    if self._IsXCUiTest():
-      assert self._IsBundle(), ('mac_xcuitest_bundle flag requires mac_bundle '
-          '(target %s)' % self.spec['target_name'])
-      return 'com.apple.product-type.bundle.ui-testing'
-    if self._IsBundle():
-      return {
-        'executable': 'com.apple.product-type.application',
-        'loadable_module': 'com.apple.product-type.bundle',
-        'shared_library': 'com.apple.product-type.framework',
-      }[self.spec['type']]
-    else:
-      return {
-        'executable': 'com.apple.product-type.tool',
-        'loadable_module': 'com.apple.product-type.library.dynamic',
-        'shared_library': 'com.apple.product-type.library.dynamic',
-        'static_library': 'com.apple.product-type.library.static',
-      }[self.spec['type']]
-
-  def GetMachOType(self):
-    """Returns the MACH_O_TYPE of this target."""
-    # Weird, but matches Xcode.
-    if not self._IsBundle() and self.spec['type'] == 'executable':
-      return ''
-    return {
-      'executable': 'mh_execute',
-      'static_library': 'staticlib',
-      'shared_library': 'mh_dylib',
-      'loadable_module': 'mh_bundle',
-    }[self.spec['type']]
-
-  def _GetBundleBinaryPath(self):
-    """Returns the name of the bundle binary of by this target.
-    E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
-    assert self._IsBundle()
-    if self.spec['type'] in ('shared_library') or self.isIOS:
-      path = self.GetBundleContentsFolderPath()
-    elif self.spec['type'] in ('executable', 'loadable_module'):
-      path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
-    return os.path.join(path, self.GetExecutableName())
-
-  def _GetStandaloneExecutableSuffix(self):
-    if 'product_extension' in self.spec:
-      return '.' + self.spec['product_extension']
-    return {
-      'executable': '',
-      'static_library': '.a',
-      'shared_library': '.dylib',
-      'loadable_module': '.so',
-    }[self.spec['type']]
-
-  def _GetStandaloneExecutablePrefix(self):
-    return self.spec.get('product_prefix', {
-      'executable': '',
-      'static_library': 'lib',
-      'shared_library': 'lib',
-      # Non-bundled loadable_modules are called foo.so for some reason
-      # (that is, .so and no prefix) with the xcode build -- match that.
-      'loadable_module': '',
-    }[self.spec['type']])
-
-  def _GetStandaloneBinaryPath(self):
-    """Returns the name of the non-bundle binary represented by this target.
-    E.g. hello_world. Only valid for non-bundles."""
-    assert not self._IsBundle()
-    assert self.spec['type'] in (
-        'executable', 'shared_library', 'static_library', 'loadable_module'), (
-        'Unexpected type %s' % self.spec['type'])
-    target = self.spec['target_name']
-    if self.spec['type'] == 'static_library':
-      if target[:3] == 'lib':
-        target = target[3:]
-    elif self.spec['type'] in ('loadable_module', 'shared_library'):
-      if target[:3] == 'lib':
-        target = target[3:]
-
-    target_prefix = self._GetStandaloneExecutablePrefix()
-    target = self.spec.get('product_name', target)
-    target_ext = self._GetStandaloneExecutableSuffix()
-    return target_prefix + target + target_ext
-
-  def GetExecutableName(self):
-    """Returns the executable name of the bundle represented by this target.
-    E.g. Chromium."""
-    if self._IsBundle():
-      return self.spec.get('product_name', self.spec['target_name'])
-    else:
-      return self._GetStandaloneBinaryPath()
-
-  def GetExecutablePath(self):
-    """Returns the directory name of the bundle represented by this target. E.g.
-    Chromium.app/Contents/MacOS/Chromium."""
-    if self._IsBundle():
-      return self._GetBundleBinaryPath()
-    else:
-      return self._GetStandaloneBinaryPath()
-
-  def GetActiveArchs(self, configname):
-    """Returns the architectures this target should be built for."""
-    config_settings = self.xcode_settings[configname]
-    xcode_archs_default = GetXcodeArchsDefault()
-    return xcode_archs_default.ActiveArchs(
-        config_settings.get('ARCHS'),
-        config_settings.get('VALID_ARCHS'),
-        config_settings.get('SDKROOT'))
-
-  def _GetSdkVersionInfoItem(self, sdk, infoitem):
-    # xcodebuild requires Xcode and can't run on Command Line Tools-only
-    # systems from 10.7 onward.
-    # Since the CLT has no SDK paths anyway, returning None is the
-    # most sensible route and should still do the right thing.
-    try:
-      return GetStdout(['xcrun', '--sdk', sdk, infoitem])
-    except:
-      pass
-
-  def _SdkRoot(self, configname):
-    if configname is None:
-      configname = self.configname
-    return self.GetPerConfigSetting('SDKROOT', configname, default='')
-
-  def _XcodePlatformPath(self, configname=None):
-    sdk_root = self._SdkRoot(configname)
-    if sdk_root not in XcodeSettings._platform_path_cache:
-      platform_path = self._GetSdkVersionInfoItem(sdk_root,
-                                                  '--show-sdk-platform-path')
-      XcodeSettings._platform_path_cache[sdk_root] = platform_path
-    return XcodeSettings._platform_path_cache[sdk_root]
-
-  def _SdkPath(self, configname=None):
-    sdk_root = self._SdkRoot(configname)
-    if sdk_root.startswith('/'):
-      return sdk_root
-    return self._XcodeSdkPath(sdk_root)
-
-  def _XcodeSdkPath(self, sdk_root):
-    if sdk_root not in XcodeSettings._sdk_path_cache:
-      sdk_path = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-path')
-      XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
-      if sdk_root:
-        XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
-    return XcodeSettings._sdk_path_cache[sdk_root]
-
-  def _AppendPlatformVersionMinFlags(self, lst):
-    self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
-    if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
-      # TODO: Implement this better?
-      sdk_path_basename = os.path.basename(self._SdkPath())
-      if sdk_path_basename.lower().startswith('iphonesimulator'):
-        self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
-                      '-mios-simulator-version-min=%s')
-      else:
-        self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
-                      '-miphoneos-version-min=%s')
-
-  def GetCflags(self, configname, arch=None):
-    """Returns flags that need to be added to .c, .cc, .m, and .mm
-    compilations."""
-    # This functions (and the similar ones below) do not offer complete
-    # emulation of all xcode_settings keys. They're implemented on demand.
-
-    self.configname = configname
-    cflags = []
-
-    sdk_root = self._SdkPath()
-    if 'SDKROOT' in self._Settings() and sdk_root:
-      cflags.append('-isysroot %s' % sdk_root)
-
-    if self.header_map_path:
-      cflags.append('-I%s' % self.header_map_path)
-
-    if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
-      cflags.append('-Wconstant-conversion')
-
-    if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
-      cflags.append('-funsigned-char')
-
-    if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
-      cflags.append('-fasm-blocks')
-
-    if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
-      if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
-        cflags.append('-mdynamic-no-pic')
-    else:
-      pass
-      # TODO: In this case, it depends on the target. xcode passes
-      # mdynamic-no-pic by default for executable and possibly static lib
-      # according to mento
-
-    if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
-      cflags.append('-mpascal-strings')
-
-    self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
-
-    if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
-      dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
-      if dbg_format == 'dwarf':
-        cflags.append('-gdwarf-2')
-      elif dbg_format == 'stabs':
-        raise NotImplementedError('stabs debug format is not supported yet.')
-      elif dbg_format == 'dwarf-with-dsym':
-        cflags.append('-gdwarf-2')
-      else:
-        raise NotImplementedError('Unknown debug format %s' % dbg_format)
-
-    if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
-      cflags.append('-fstrict-aliasing')
-    elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
-      cflags.append('-fno-strict-aliasing')
-
-    if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
-      cflags.append('-fvisibility=hidden')
-
-    if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
-      cflags.append('-Werror')
-
-    if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
-      cflags.append('-Wnewline-eof')
-
-    # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
-    # llvm-gcc. It also requires a fairly recent libtool, and
-    # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
-    # path to the libLTO.dylib that matches the used clang.
-    if self._Test('LLVM_LTO', 'YES', default='NO'):
-      cflags.append('-flto')
-
-    self._AppendPlatformVersionMinFlags(cflags)
-
-    # TODO:
-    if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
-      self._WarnUnimplemented('COPY_PHASE_STRIP')
-    self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
-    self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
-
-    # TODO: This is exported correctly, but assigning to it is not supported.
-    self._WarnUnimplemented('MACH_O_TYPE')
-    self._WarnUnimplemented('PRODUCT_TYPE')
-
-    if arch is not None:
-      archs = [arch]
-    else:
-      assert self.configname
-      archs = self.GetActiveArchs(self.configname)
-    if len(archs) != 1:
-      # TODO: Supporting fat binaries will be annoying.
-      self._WarnUnimplemented('ARCHS')
-      archs = ['i386']
-    cflags.append('-arch ' + archs[0])
-
-    if archs[0] in ('i386', 'x86_64'):
-      if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
-        cflags.append('-msse3')
-      if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
-                    default='NO'):
-        cflags.append('-mssse3')  # Note 3rd 's'.
-      if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
-        cflags.append('-msse4.1')
-      if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
-        cflags.append('-msse4.2')
-
-    cflags += self._Settings().get('WARNING_CFLAGS', [])
-
-    platform_root = self._XcodePlatformPath(configname)
-    if platform_root and self._IsXCTest():
-      cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
-
-    if sdk_root:
-      framework_root = sdk_root
-    else:
-      framework_root = ''
-    config = self.spec['configurations'][self.configname]
-    framework_dirs = config.get('mac_framework_dirs', [])
-    for directory in framework_dirs:
-      cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
-
-    self.configname = None
-    return cflags
-
-  def GetCflagsC(self, configname):
-    """Returns flags that need to be added to .c, and .m compilations."""
-    self.configname = configname
-    cflags_c = []
-    if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
-      cflags_c.append('-ansi')
-    else:
-      self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
-    cflags_c += self._Settings().get('OTHER_CFLAGS', [])
-    self.configname = None
-    return cflags_c
-
-  def GetCflagsCC(self, configname):
-    """Returns flags that need to be added to .cc, and .mm compilations."""
-    self.configname = configname
-    cflags_cc = []
-
-    clang_cxx_language_standard = self._Settings().get(
-        'CLANG_CXX_LANGUAGE_STANDARD')
-    # Note: Don't make c++0x to c++11 so that c++0x can be used with older
-    # clangs that don't understand c++11 yet (like Xcode 4.2's).
-    if clang_cxx_language_standard:
-      cflags_cc.append('-std=%s' % clang_cxx_language_standard)
-
-    self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
-
-    if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
-      cflags_cc.append('-fno-rtti')
-    if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
-      cflags_cc.append('-fno-exceptions')
-    if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
-      cflags_cc.append('-fvisibility-inlines-hidden')
-    if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
-      cflags_cc.append('-fno-threadsafe-statics')
-    # Note: This flag is a no-op for clang, it only has an effect for gcc.
-    if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
-      cflags_cc.append('-Wno-invalid-offsetof')
-
-    other_ccflags = []
-
-    for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
-      # TODO: More general variable expansion. Missing in many other places too.
-      if flag in ('$inherited', '$(inherited)', '${inherited}'):
-        flag = '$OTHER_CFLAGS'
-      if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
-        other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
-      else:
-        other_ccflags.append(flag)
-    cflags_cc += other_ccflags
-
-    self.configname = None
-    return cflags_cc
-
-  def _AddObjectiveCGarbageCollectionFlags(self, flags):
-    gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
-    if gc_policy == 'supported':
-      flags.append('-fobjc-gc')
-    elif gc_policy == 'required':
-      flags.append('-fobjc-gc-only')
-
-  def _AddObjectiveCARCFlags(self, flags):
-    if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
-      flags.append('-fobjc-arc')
-
-  def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
-    if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
-                  'YES', default='NO'):
-      flags.append('-Wobjc-missing-property-synthesis')
-
-  def GetCflagsObjC(self, configname):
-    """Returns flags that need to be added to .m compilations."""
-    self.configname = configname
-    cflags_objc = []
-    self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
-    self._AddObjectiveCARCFlags(cflags_objc)
-    self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
-    self.configname = None
-    return cflags_objc
-
-  def GetCflagsObjCC(self, configname):
-    """Returns flags that need to be added to .mm compilations."""
-    self.configname = configname
-    cflags_objcc = []
-    self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
-    self._AddObjectiveCARCFlags(cflags_objcc)
-    self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
-    if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
-      cflags_objcc.append('-fobjc-call-cxx-cdtors')
-    self.configname = None
-    return cflags_objcc
-
-  def GetInstallNameBase(self):
-    """Return DYLIB_INSTALL_NAME_BASE for this target."""
-    # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
-    if (self.spec['type'] != 'shared_library' and
-        (self.spec['type'] != 'loadable_module' or self._IsBundle())):
-      return None
-    install_base = self.GetPerTargetSetting(
-        'DYLIB_INSTALL_NAME_BASE',
-        default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
-    return install_base
-
-  def _StandardizePath(self, path):
-    """Do :standardizepath processing for path."""
-    # I'm not quite sure what :standardizepath does. Just call normpath(),
-    # but don't let @executable_path/../foo collapse to foo.
-    if '/' in path:
-      prefix, rest = '', path
-      if path.startswith('@'):
-        prefix, rest = path.split('/', 1)
-      rest = os.path.normpath(rest)  # :standardizepath
-      path = os.path.join(prefix, rest)
-    return path
-
-  def GetInstallName(self):
-    """Return LD_DYLIB_INSTALL_NAME for this target."""
-    # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
-    if (self.spec['type'] != 'shared_library' and
-        (self.spec['type'] != 'loadable_module' or self._IsBundle())):
-      return None
-
-    default_install_name = \
-        '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
-    install_name = self.GetPerTargetSetting(
-        'LD_DYLIB_INSTALL_NAME', default=default_install_name)
-
-    # Hardcode support for the variables used in chromium for now, to
-    # unblock people using the make build.
-    if '$' in install_name:
-      assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
-          '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
-          'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
-          'yet in target \'%s\' (got \'%s\')' %
-              (self.spec['target_name'], install_name))
-
-      install_name = install_name.replace(
-          '$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
-          self._StandardizePath(self.GetInstallNameBase()))
-      if self._IsBundle():
-        # These are only valid for bundles, hence the |if|.
-        install_name = install_name.replace(
-            '$(WRAPPER_NAME)', self.GetWrapperName())
-        install_name = install_name.replace(
-            '$(PRODUCT_NAME)', self.GetProductName())
-      else:
-        assert '$(WRAPPER_NAME)' not in install_name
-        assert '$(PRODUCT_NAME)' not in install_name
-
-      install_name = install_name.replace(
-          '$(EXECUTABLE_PATH)', self.GetExecutablePath())
-    return install_name
-
-  def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
-    """Checks if ldflag contains a filename and if so remaps it from
-    gyp-directory-relative to build-directory-relative."""
-    # This list is expanded on demand.
-    # They get matched as:
-    #   -exported_symbols_list file
-    #   -Wl,exported_symbols_list file
-    #   -Wl,exported_symbols_list,file
-    LINKER_FILE = r'(\S+)'
-    WORD = r'\S+'
-    linker_flags = [
-      ['-exported_symbols_list', LINKER_FILE],    # Needed for NaCl.
-      ['-unexported_symbols_list', LINKER_FILE],
-      ['-reexported_symbols_list', LINKER_FILE],
-      ['-sectcreate', WORD, WORD, LINKER_FILE],   # Needed for remoting.
-    ]
-    for flag_pattern in linker_flags:
-      regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
-      m = regex.match(ldflag)
-      if m:
-        ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
-                 ldflag[m.end(1):]
-    # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
-    # TODO(thakis): Update ffmpeg.gyp):
-    if ldflag.startswith('-L'):
-      ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
-    return ldflag
-
-  def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
-    """Returns flags that need to be passed to the linker.
-
-    Args:
-        configname: The name of the configuration to get ld flags for.
-        product_dir: The directory where products such static and dynamic
-            libraries are placed. This is added to the library search path.
-        gyp_to_build_path: A function that converts paths relative to the
-            current gyp file to paths relative to the build direcotry.
-    """
-    self.configname = configname
-    ldflags = []
-
-    # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
-    # can contain entries that depend on this. Explicitly absolutify these.
-    for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
-      ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
-
-    if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
-      ldflags.append('-Wl,-dead_strip')
-
-    if self._Test('PREBINDING', 'YES', default='NO'):
-      ldflags.append('-Wl,-prebind')
-
-    self._Appendf(
-        ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
-    self._Appendf(
-        ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
-
-    self._AppendPlatformVersionMinFlags(ldflags)
-
-    if 'SDKROOT' in self._Settings() and self._SdkPath():
-      ldflags.append('-isysroot ' + self._SdkPath())
-
-    for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
-      ldflags.append('-L' + gyp_to_build_path(library_path))
-
-    if 'ORDER_FILE' in self._Settings():
-      ldflags.append('-Wl,-order_file ' +
-                     '-Wl,' + gyp_to_build_path(
-                                  self._Settings()['ORDER_FILE']))
-
-    if arch is not None:
-      archs = [arch]
-    else:
-      assert self.configname
-      archs = self.GetActiveArchs(self.configname)
-    if len(archs) != 1:
-      # TODO: Supporting fat binaries will be annoying.
-      self._WarnUnimplemented('ARCHS')
-      archs = ['i386']
-    ldflags.append('-arch ' + archs[0])
-
-    # Xcode adds the product directory by default.
-    # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838
-    ldflags.append('-L' + (product_dir if product_dir != '.' else './'))
-
-    install_name = self.GetInstallName()
-    if install_name and self.spec['type'] != 'loadable_module':
-      ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
-
-    for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
-      ldflags.append('-Wl,-rpath,' + rpath)
-
-    sdk_root = self._SdkPath()
-    if not sdk_root:
-      sdk_root = ''
-    config = self.spec['configurations'][self.configname]
-    framework_dirs = config.get('mac_framework_dirs', [])
-    for directory in framework_dirs:
-      ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
-
-    platform_root = self._XcodePlatformPath(configname)
-    if sdk_root and platform_root and self._IsXCTest():
-      ldflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
-      ldflags.append('-framework XCTest')
-
-    is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
-    if sdk_root and is_extension:
-      # Adds the link flags for extensions. These flags are common for all
-      # extensions and provide loader and main function.
-      # These flags reflect the compilation options used by xcode to compile
-      # extensions.
-      if XcodeVersion() < '0900':
-        ldflags.append('-lpkstart')
-        ldflags.append(sdk_root +
-            '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
-      else:
-        ldflags.append('-e _NSExtensionMain')
-      ldflags.append('-fapplication-extension')
-
-    self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
-
-    self.configname = None
-    return ldflags
-
-  def GetLibtoolflags(self, configname):
-    """Returns flags that need to be passed to the static linker.
-
-    Args:
-        configname: The name of the configuration to get ld flags for.
-    """
-    self.configname = configname
-    libtoolflags = []
-
-    for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
-      libtoolflags.append(libtoolflag)
-    # TODO(thakis): ARCHS?
-
-    self.configname = None
-    return libtoolflags
-
-  def GetPerTargetSettings(self):
-    """Gets a list of all the per-target settings. This will only fetch keys
-    whose values are the same across all configurations."""
-    first_pass = True
-    result = {}
-    for configname in sorted(self.xcode_settings.keys()):
-      if first_pass:
-        result = dict(self.xcode_settings[configname])
-        first_pass = False
-      else:
-        for key, value in self.xcode_settings[configname].iteritems():
-          if key not in result:
-            continue
-          elif result[key] != value:
-            del result[key]
-    return result
-
-  def GetPerConfigSetting(self, setting, configname, default=None):
-    if configname in self.xcode_settings:
-      return self.xcode_settings[configname].get(setting, default)
-    else:
-      return self.GetPerTargetSetting(setting, default)
-
-  def GetPerTargetSetting(self, setting, default=None):
-    """Tries to get xcode_settings.setting from spec. Assumes that the setting
-       has the same value in all configurations and throws otherwise."""
-    is_first_pass = True
-    result = None
-    for configname in sorted(self.xcode_settings.keys()):
-      if is_first_pass:
-        result = self.xcode_settings[configname].get(setting, None)
-        is_first_pass = False
-      else:
-        assert result == self.xcode_settings[configname].get(setting, None), (
-            "Expected per-target setting for '%s', got per-config setting "
-            "(target %s)" % (setting, self.spec['target_name']))
-    if result is None:
-      return default
-    return result
-
-  def _GetStripPostbuilds(self, configname, output_binary, quiet):
-    """Returns a list of shell commands that contain the shell commands
-    neccessary to strip this target's binary. These should be run as postbuilds
-    before the actual postbuilds run."""
-    self.configname = configname
-
-    result = []
-    if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
-        self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
-
-      default_strip_style = 'debugging'
-      if ((self.spec['type'] == 'loadable_module' or self._IsIosAppExtension())
-          and self._IsBundle()):
-        default_strip_style = 'non-global'
-      elif self.spec['type'] == 'executable':
-        default_strip_style = 'all'
-
-      strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
-      strip_flags = {
-        'all': '',
-        'non-global': '-x',
-        'debugging': '-S',
-      }[strip_style]
-
-      explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
-      if explicit_strip_flags:
-        strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
-
-      if not quiet:
-        result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
-      result.append('strip %s %s' % (strip_flags, output_binary))
-
-    self.configname = None
-    return result
-
-  def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
-    """Returns a list of shell commands that contain the shell commands
-    neccessary to massage this target's debug information. These should be run
-    as postbuilds before the actual postbuilds run."""
-    self.configname = configname
-
-    # For static libraries, no dSYMs are created.
-    result = []
-    if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
-        self._Test(
-            'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
-        self.spec['type'] != 'static_library'):
-      if not quiet:
-        result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
-      result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
-
-    self.configname = None
-    return result
-
-  def _GetTargetPostbuilds(self, configname, output, output_binary,
-                           quiet=False):
-    """Returns a list of shell commands that contain the shell commands
-    to run as postbuilds for this target, before the actual postbuilds."""
-    # dSYMs need to build before stripping happens.
-    return (
-        self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
-        self._GetStripPostbuilds(configname, output_binary, quiet))
-
-  def _GetIOSPostbuilds(self, configname, output_binary):
-    """Return a shell command to codesign the iOS output binary so it can
-    be deployed to a device.  This should be run as the very last step of the
-    build."""
-    if not (self.isIOS and
-        (self.spec['type'] == 'executable' or self._IsXCTest()) or
-         self.IsIosFramework()):
-      return []
-
-    postbuilds = []
-    product_name = self.GetFullProductName()
-    settings = self.xcode_settings[configname]
-
-    # Xcode expects XCTests to be copied into the TEST_HOST dir.
-    if self._IsXCTest():
-      source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name)
-      test_host = os.path.dirname(settings.get('TEST_HOST'));
-      xctest_destination = os.path.join(test_host, 'PlugIns', product_name)
-      postbuilds.extend(['ditto %s %s' % (source, xctest_destination)])
-
-    key = self._GetIOSCodeSignIdentityKey(settings)
-    if not key:
-      return postbuilds
-
-    # Warn for any unimplemented signing xcode keys.
-    unimpl = ['OTHER_CODE_SIGN_FLAGS']
-    unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
-    if unimpl:
-      print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
-          ', '.join(sorted(unimpl)))
-
-    if self._IsXCTest():
-      # For device xctests, Xcode copies two extra frameworks into $TEST_HOST.
-      test_host = os.path.dirname(settings.get('TEST_HOST'));
-      frameworks_dir = os.path.join(test_host, 'Frameworks')
-      platform_root = self._XcodePlatformPath(configname)
-      frameworks = \
-          ['Developer/Library/PrivateFrameworks/IDEBundleInjection.framework',
-           'Developer/Library/Frameworks/XCTest.framework']
-      for framework in frameworks:
-        source = os.path.join(platform_root, framework)
-        destination = os.path.join(frameworks_dir, os.path.basename(framework))
-        postbuilds.extend(['ditto %s %s' % (source, destination)])
-
-        # Then re-sign everything with 'preserve=True'
-        postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
-            os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
-            settings.get('CODE_SIGN_ENTITLEMENTS', ''),
-            settings.get('PROVISIONING_PROFILE', ''), destination, True)
-        ])
-      plugin_dir = os.path.join(test_host, 'PlugIns')
-      targets = [os.path.join(plugin_dir, product_name), test_host]
-      for target in targets:
-        postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
-            os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
-            settings.get('CODE_SIGN_ENTITLEMENTS', ''),
-            settings.get('PROVISIONING_PROFILE', ''), target, True)
-        ])
-
-    postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
-        os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
-        settings.get('CODE_SIGN_ENTITLEMENTS', ''),
-        settings.get('PROVISIONING_PROFILE', ''),
-        os.path.join("${BUILT_PRODUCTS_DIR}", product_name), False)
-    ])
-    return postbuilds
-
-  def _GetIOSCodeSignIdentityKey(self, settings):
-    identity = settings.get('CODE_SIGN_IDENTITY')
-    if not identity:
-      return None
-    if identity not in XcodeSettings._codesigning_key_cache:
-      output = subprocess.check_output(
-          ['security', 'find-identity', '-p', 'codesigning', '-v'])
-      for line in output.splitlines():
-        if identity in line:
-          fingerprint = line.split()[1]
-          cache = XcodeSettings._codesigning_key_cache
-          assert identity not in cache or fingerprint == cache[identity], (
-              "Multiple codesigning fingerprints for identity: %s" % identity)
-          XcodeSettings._codesigning_key_cache[identity] = fingerprint
-    return XcodeSettings._codesigning_key_cache.get(identity, '')
-
-  def AddImplicitPostbuilds(self, configname, output, output_binary,
-                            postbuilds=[], quiet=False):
-    """Returns a list of shell commands that should run before and after
-    |postbuilds|."""
-    assert output_binary is not None
-    pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
-    post = self._GetIOSPostbuilds(configname, output_binary)
-    return pre + postbuilds + post
-
-  def _AdjustLibrary(self, library, config_name=None):
-    if library.endswith('.framework'):
-      l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
-    else:
-      m = self.library_re.match(library)
-      if m:
-        l = '-l' + m.group(1)
-      else:
-        l = library
-
-    sdk_root = self._SdkPath(config_name)
-    if not sdk_root:
-      sdk_root = ''
-    # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
-    # ".dylib" without providing a real support for them. What it does, for
-    # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
-    # library order and cause collision when building Chrome.
-    #
-    # Instead substitude ".tbd" to ".dylib" in the generated project when the
-    # following conditions are both true:
-    # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
-    # - the ".dylib" file does not exists but a ".tbd" file do.
-    library = l.replace('$(SDKROOT)', sdk_root)
-    if l.startswith('$(SDKROOT)'):
-      basename, ext = os.path.splitext(library)
-      if ext == '.dylib' and not os.path.exists(library):
-        tbd_library = basename + '.tbd'
-        if os.path.exists(tbd_library):
-          library = tbd_library
-    return library
-
-  def AdjustLibraries(self, libraries, config_name=None):
-    """Transforms entries like 'Cocoa.framework' in libraries into entries like
-    '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
-    """
-    libraries = [self._AdjustLibrary(library, config_name)
-                 for library in libraries]
-    return libraries
-
-  def _BuildMachineOSBuild(self):
-    return GetStdout(['sw_vers', '-buildVersion'])
-
-  def _XcodeIOSDeviceFamily(self, configname):
-    family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
-    return [int(x) for x in family.split(',')]
-
-  def GetExtraPlistItems(self, configname=None):
-    """Returns a dictionary with extra items to insert into Info.plist."""
-    if configname not in XcodeSettings._plist_cache:
-      cache = {}
-      cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
-
-      xcode, xcode_build = XcodeVersion()
-      cache['DTXcode'] = xcode
-      cache['DTXcodeBuild'] = xcode_build
-      compiler = self.xcode_settings[configname].get('GCC_VERSION')
-      if compiler is not None:
-        cache['DTCompiler'] = compiler
-
-      sdk_root = self._SdkRoot(configname)
-      if not sdk_root:
-        sdk_root = self._DefaultSdkRoot()
-      sdk_version = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-version')
-      cache['DTSDKName'] = sdk_root + (sdk_version or '')
-      if xcode >= '0720':
-        cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
-            sdk_root, '--show-sdk-build-version')
-      elif xcode >= '0430':
-        cache['DTSDKBuild'] = sdk_version
-      else:
-        cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
-
-      if self.isIOS:
-        cache['MinimumOSVersion'] = self.xcode_settings[configname].get(
-            'IPHONEOS_DEPLOYMENT_TARGET')
-        cache['DTPlatformName'] = sdk_root
-        cache['DTPlatformVersion'] = sdk_version
-
-        if configname.endswith("iphoneos"):
-          cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
-          cache['DTPlatformBuild'] = cache['DTSDKBuild']
-        else:
-          cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
-          # This is weird, but Xcode sets DTPlatformBuild to an empty field
-          # for simulator builds.
-          cache['DTPlatformBuild'] = ""
-      XcodeSettings._plist_cache[configname] = cache
-
-    # Include extra plist items that are per-target, not per global
-    # XcodeSettings.
-    items = dict(XcodeSettings._plist_cache[configname])
-    if self.isIOS:
-      items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
-    return items
-
-  def _DefaultSdkRoot(self):
-    """Returns the default SDKROOT to use.
-
-    Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
-    project, then the environment variable was empty. Starting with this
-    version, Xcode uses the name of the newest SDK installed.
-    """
-    xcode_version, xcode_build = XcodeVersion()
-    if xcode_version < '0500':
-      return ''
-    default_sdk_path = self._XcodeSdkPath('')
-    default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
-    if default_sdk_root:
-      return default_sdk_root
-    try:
-      all_sdks = GetStdout(['xcodebuild', '-showsdks'])
-    except:
-      # If xcodebuild fails, there will be no valid SDKs
-      return ''
-    for line in all_sdks.splitlines():
-      items = line.split()
-      if len(items) >= 3 and items[-2] == '-sdk':
-        sdk_root = items[-1]
-        sdk_path = self._XcodeSdkPath(sdk_root)
-        if sdk_path == default_sdk_path:
-          return sdk_root
-    return ''
-
-
-class MacPrefixHeader(object):
-  """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
-
-  This feature consists of several pieces:
-  * If GCC_PREFIX_HEADER is present, all compilations in that project get an
-    additional |-include path_to_prefix_header| cflag.
-  * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
-    instead compiled, and all other compilations in the project get an
-    additional |-include path_to_compiled_header| instead.
-    + Compiled prefix headers have the extension gch. There is one gch file for
-      every language used in the project (c, cc, m, mm), since gch files for
-      different languages aren't compatible.
-    + gch files themselves are built with the target's normal cflags, but they
-      obviously don't get the |-include| flag. Instead, they need a -x flag that
-      describes their language.
-    + All o files in the target need to depend on the gch file, to make sure
-      it's built before any o file is built.
-
-  This class helps with some of these tasks, but it needs help from the build
-  system for writing dependencies to the gch files, for writing build commands
-  for the gch files, and for figuring out the location of the gch files.
-  """
-  def __init__(self, xcode_settings,
-               gyp_path_to_build_path, gyp_path_to_build_output):
-    """If xcode_settings is None, all methods on this class are no-ops.
-
-    Args:
-        gyp_path_to_build_path: A function that takes a gyp-relative path,
-            and returns a path relative to the build directory.
-        gyp_path_to_build_output: A function that takes a gyp-relative path and
-            a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
-            to where the output of precompiling that path for that language
-            should be placed (without the trailing '.gch').
-    """
-    # This doesn't support per-configuration prefix headers. Good enough
-    # for now.
-    self.header = None
-    self.compile_headers = False
-    if xcode_settings:
-      self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
-      self.compile_headers = xcode_settings.GetPerTargetSetting(
-          'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
-    self.compiled_headers = {}
-    if self.header:
-      if self.compile_headers:
-        for lang in ['c', 'cc', 'm', 'mm']:
-          self.compiled_headers[lang] = gyp_path_to_build_output(
-              self.header, lang)
-      self.header = gyp_path_to_build_path(self.header)
-
-  def _CompiledHeader(self, lang, arch):
-    assert self.compile_headers
-    h = self.compiled_headers[lang]
-    if arch:
-      h += '.' + arch
-    return h
-
-  def GetInclude(self, lang, arch=None):
-    """Gets the cflags to include the prefix header for language |lang|."""
-    if self.compile_headers and lang in self.compiled_headers:
-      return '-include %s' % self._CompiledHeader(lang, arch)
-    elif self.header:
-      return '-include %s' % self.header
-    else:
-      return ''
-
-  def _Gch(self, lang, arch):
-    """Returns the actual file name of the prefix header for language |lang|."""
-    assert self.compile_headers
-    return self._CompiledHeader(lang, arch) + '.gch'
-
-  def GetObjDependencies(self, sources, objs, arch=None):
-    """Given a list of source files and the corresponding object files, returns
-    a list of (source, object, gch) tuples, where |gch| is the build-directory
-    relative path to the gch file each object file depends on.  |compilable[i]|
-    has to be the source file belonging to |objs[i]|."""
-    if not self.header or not self.compile_headers:
-      return []
-
-    result = []
-    for source, obj in zip(sources, objs):
-      ext = os.path.splitext(source)[1]
-      lang = {
-        '.c': 'c',
-        '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
-        '.m': 'm',
-        '.mm': 'mm',
-      }.get(ext, None)
-      if lang:
-        result.append((source, obj, self._Gch(lang, arch)))
-    return result
-
-  def GetPchBuildCommands(self, arch=None):
-    """Returns [(path_to_gch, language_flag, language, header)].
-    |path_to_gch| and |header| are relative to the build directory.
-    """
-    if not self.header or not self.compile_headers:
-      return []
-    return [
-      (self._Gch('c', arch), '-x c-header', 'c', self.header),
-      (self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
-      (self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
-      (self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
-    ]
-
-
-def XcodeVersion():
-  """Returns a tuple of version and build version of installed Xcode."""
-  # `xcodebuild -version` output looks like
-  #    Xcode 4.6.3
-  #    Build version 4H1503
-  # or like
-  #    Xcode 3.2.6
-  #    Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
-  #    BuildVersion: 10M2518
-  # Convert that to '0463', '4H1503'.
-  global XCODE_VERSION_CACHE
-  if XCODE_VERSION_CACHE:
-    return XCODE_VERSION_CACHE
-  try:
-    version_list = GetStdout(['xcodebuild', '-version']).splitlines()
-    # In some circumstances xcodebuild exits 0 but doesn't return
-    # the right results; for example, a user on 10.7 or 10.8 with
-    # a bogus path set via xcode-select
-    # In that case this may be a CLT-only install so fall back to
-    # checking that version.
-    if len(version_list) < 2:
-      raise GypError("xcodebuild returned unexpected results")
-  except:
-    version = CLTVersion()
-    if version:
-      version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
-    else:
-      raise GypError("No Xcode or CLT version detected!")
-    # The CLT has no build information, so we return an empty string.
-    version_list = [version, '']
-  version = version_list[0]
-  build = version_list[-1]
-  # Be careful to convert "4.2" to "0420":
-  version = version.split()[-1].replace('.', '')
-  version = (version + '0' * (3 - len(version))).zfill(4)
-  if build:
-    build = build.split()[-1]
-  XCODE_VERSION_CACHE = (version, build)
-  return XCODE_VERSION_CACHE
-
-
-# This function ported from the logic in Homebrew's CLT version check
-def CLTVersion():
-  """Returns the version of command-line tools from pkgutil."""
-  # pkgutil output looks like
-  #   package-id: com.apple.pkg.CLTools_Executables
-  #   version: 5.0.1.0.1.1382131676
-  #   volume: /
-  #   location: /
-  #   install-time: 1382544035
-  #   groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
-  STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
-  FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
-  MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
-
-  regex = re.compile('version: (?P<version>.+)')
-  for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
-    try:
-      output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
-      return re.search(regex, output).groupdict()['version']
-    except:
-      continue
-
-
-def GetStdout(cmdlist):
-  """Returns the content of standard output returned by invoking |cmdlist|.
-  Raises |GypError| if the command return with a non-zero return code."""
-  job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
-  out = job.communicate()[0]
-  if job.returncode != 0:
-    sys.stderr.write(out + '\n')
-    raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
-  return out.rstrip('\n')
-
-
-def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
-  """Merges the global xcode_settings dictionary into each configuration of the
-  target represented by spec. For keys that are both in the global and the local
-  xcode_settings dict, the local key gets precendence.
-  """
-  # The xcode generator special-cases global xcode_settings and does something
-  # that amounts to merging in the global xcode_settings into each local
-  # xcode_settings dict.
-  global_xcode_settings = global_dict.get('xcode_settings', {})
-  for config in spec['configurations'].values():
-    if 'xcode_settings' in config:
-      new_settings = global_xcode_settings.copy()
-      new_settings.update(config['xcode_settings'])
-      config['xcode_settings'] = new_settings
-
-
-def IsMacBundle(flavor, spec):
-  """Returns if |spec| should be treated as a bundle.
-
-  Bundles are directories with a certain subdirectory structure, instead of
-  just a single file. Bundle rules do not produce a binary but also package
-  resources into that directory."""
-  is_mac_bundle = int(spec.get('mac_xctest_bundle', 0)) != 0 or \
-      int(spec.get('mac_xcuitest_bundle', 0)) != 0 or \
-      (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
-
-  if is_mac_bundle:
-    assert spec['type'] != 'none', (
-        'mac_bundle targets cannot have type none (target "%s")' %
-        spec['target_name'])
-  return is_mac_bundle
-
-
-def GetMacBundleResources(product_dir, xcode_settings, resources):
-  """Yields (output, resource) pairs for every resource in |resources|.
-  Only call this for mac bundle targets.
-
-  Args:
-      product_dir: Path to the directory containing the output bundle,
-          relative to the build directory.
-      xcode_settings: The XcodeSettings of the current target.
-      resources: A list of bundle resources, relative to the build directory.
-  """
-  dest = os.path.join(product_dir,
-                      xcode_settings.GetBundleResourceFolder())
-  for res in resources:
-    output = dest
-
-    # The make generator doesn't support it, so forbid it everywhere
-    # to keep the generators more interchangable.
-    assert ' ' not in res, (
-      "Spaces in resource filenames not supported (%s)"  % res)
-
-    # Split into (path,file).
-    res_parts = os.path.split(res)
-
-    # Now split the path into (prefix,maybe.lproj).
-    lproj_parts = os.path.split(res_parts[0])
-    # If the resource lives in a .lproj bundle, add that to the destination.
-    if lproj_parts[1].endswith('.lproj'):
-      output = os.path.join(output, lproj_parts[1])
-
-    output = os.path.join(output, res_parts[1])
-    # Compiled XIB files are referred to by .nib.
-    if output.endswith('.xib'):
-      output = os.path.splitext(output)[0] + '.nib'
-    # Compiled storyboard files are referred to by .storyboardc.
-    if output.endswith('.storyboard'):
-      output = os.path.splitext(output)[0] + '.storyboardc'
-
-    yield output, res
-
-
-def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
-  """Returns (info_plist, dest_plist, defines, extra_env), where:
-  * |info_plist| is the source plist path, relative to the
-    build directory,
-  * |dest_plist| is the destination plist path, relative to the
-    build directory,
-  * |defines| is a list of preprocessor defines (empty if the plist
-    shouldn't be preprocessed,
-  * |extra_env| is a dict of env variables that should be exported when
-    invoking |mac_tool copy-info-plist|.
-
-  Only call this for mac bundle targets.
-
-  Args:
-      product_dir: Path to the directory containing the output bundle,
-          relative to the build directory.
-      xcode_settings: The XcodeSettings of the current target.
-      gyp_to_build_path: A function that converts paths relative to the
-          current gyp file to paths relative to the build direcotry.
-  """
-  info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
-  if not info_plist:
-    return None, None, [], {}
-
-  # The make generator doesn't support it, so forbid it everywhere
-  # to keep the generators more interchangable.
-  assert ' ' not in info_plist, (
-    "Spaces in Info.plist filenames not supported (%s)"  % info_plist)
-
-  info_plist = gyp_path_to_build_path(info_plist)
-
-  # If explicitly set to preprocess the plist, invoke the C preprocessor and
-  # specify any defines as -D flags.
-  if xcode_settings.GetPerTargetSetting(
-      'INFOPLIST_PREPROCESS', default='NO') == 'YES':
-    # Create an intermediate file based on the path.
-    defines = shlex.split(xcode_settings.GetPerTargetSetting(
-        'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
-  else:
-    defines = []
-
-  dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
-  extra_env = xcode_settings.GetPerTargetSettings()
-
-  return info_plist, dest_plist, defines, extra_env
-
-
-def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
-                additional_settings=None):
-  """Return the environment variables that Xcode would set. See
-  http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
-  for a full list.
-
-  Args:
-      xcode_settings: An XcodeSettings object. If this is None, this function
-          returns an empty dict.
-      built_products_dir: Absolute path to the built products dir.
-      srcroot: Absolute path to the source root.
-      configuration: The build configuration name.
-      additional_settings: An optional dict with more values to add to the
-          result.
-  """
-  if not xcode_settings: return {}
-
-  # This function is considered a friend of XcodeSettings, so let it reach into
-  # its implementation details.
-  spec = xcode_settings.spec
-
-  # These are filled in on a as-needed basis.
-  env = {
-    'BUILT_FRAMEWORKS_DIR' : built_products_dir,
-    'BUILT_PRODUCTS_DIR' : built_products_dir,
-    'CONFIGURATION' : configuration,
-    'PRODUCT_NAME' : xcode_settings.GetProductName(),
-    # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
-    'SRCROOT' : srcroot,
-    'SOURCE_ROOT': '${SRCROOT}',
-    # This is not true for static libraries, but currently the env is only
-    # written for bundles:
-    'TARGET_BUILD_DIR' : built_products_dir,
-    'TEMP_DIR' : '${TMPDIR}',
-    'XCODE_VERSION_ACTUAL' : XcodeVersion()[0],
-  }
-  if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
-    env['SDKROOT'] = xcode_settings._SdkPath(configuration)
-  else:
-    env['SDKROOT'] = ''
-
-  if xcode_settings.mac_toolchain_dir:
-    env['DEVELOPER_DIR'] = xcode_settings.mac_toolchain_dir
-
-  if spec['type'] in (
-      'executable', 'static_library', 'shared_library', 'loadable_module'):
-    env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
-    env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
-    env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
-    mach_o_type = xcode_settings.GetMachOType()
-    if mach_o_type:
-      env['MACH_O_TYPE'] = mach_o_type
-    env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
-  if xcode_settings._IsBundle():
-    env['CONTENTS_FOLDER_PATH'] = \
-      xcode_settings.GetBundleContentsFolderPath()
-    env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
-        xcode_settings.GetBundleResourceFolder()
-    env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
-    env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
-
-  install_name = xcode_settings.GetInstallName()
-  if install_name:
-    env['LD_DYLIB_INSTALL_NAME'] = install_name
-  install_name_base = xcode_settings.GetInstallNameBase()
-  if install_name_base:
-    env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
-  if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
-    sdk_root = xcode_settings._SdkRoot(configuration)
-    if not sdk_root:
-      sdk_root = xcode_settings._XcodeSdkPath('')
-    env['SDKROOT'] = sdk_root
-
-  if not additional_settings:
-    additional_settings = {}
-  else:
-    # Flatten lists to strings.
-    for k in additional_settings:
-      if not isinstance(additional_settings[k], str):
-        additional_settings[k] = ' '.join(additional_settings[k])
-  additional_settings.update(env)
-
-  for k in additional_settings:
-    additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
-
-  return additional_settings
-
-
-def _NormalizeEnvVarReferences(str):
-  """Takes a string containing variable references in the form ${FOO}, $(FOO),
-  or $FOO, and returns a string with all variable references in the form ${FOO}.
-  """
-  # $FOO -> ${FOO}
-  str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
-
-  # $(FOO) -> ${FOO}
-  matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
-  for match in matches:
-    to_replace, variable = match
-    assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
-    str = str.replace(to_replace, '${' + variable + '}')
-
-  return str
-
-
-def ExpandEnvVars(string, expansions):
-  """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
-  expansions list. If the variable expands to something that references
-  another variable, this variable is expanded as well if it's in env --
-  until no variables present in env are left."""
-  for k, v in reversed(expansions):
-    string = string.replace('${' + k + '}', v)
-    string = string.replace('$(' + k + ')', v)
-    string = string.replace('$' + k, v)
-  return string
-
-
-def _TopologicallySortedEnvVarKeys(env):
-  """Takes a dict |env| whose values are strings that can refer to other keys,
-  for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
-  env such that key2 is after key1 in L if env[key2] refers to env[key1].
-
-  Throws an Exception in case of dependency cycles.
-  """
-  # Since environment variables can refer to other variables, the evaluation
-  # order is important. Below is the logic to compute the dependency graph
-  # and sort it.
-  regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
-  def GetEdges(node):
-    # Use a definition of edges such that user_of_variable -> used_varible.
-    # This happens to be easier in this case, since a variable's
-    # definition contains all variables it references in a single string.
-    # We can then reverse the result of the topological sort at the end.
-    # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
-    matches = set([v for v in regex.findall(env[node]) if v in env])
-    for dependee in matches:
-      assert '${' not in dependee, 'Nested variables not supported: ' + dependee
-    return matches
-
-  try:
-    # Topologically sort, and then reverse, because we used an edge definition
-    # that's inverted from the expected result of this function (see comment
-    # above).
-    order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
-    order.reverse()
-    return order
-  except gyp.common.CycleError, e:
-    raise GypError(
-        'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
-
-
-def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
-                      configuration, additional_settings=None):
-  env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
-                    additional_settings)
-  return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
-
-
-def GetSpecPostbuildCommands(spec, quiet=False):
-  """Returns the list of postbuilds explicitly defined on |spec|, in a form
-  executable by a shell."""
-  postbuilds = []
-  for postbuild in spec.get('postbuilds', []):
-    if not quiet:
-      postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
-            spec['target_name'], postbuild['postbuild_name']))
-    postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
-  return postbuilds
-
-
-def _HasIOSTarget(targets):
-  """Returns true if any target contains the iOS specific key
-  IPHONEOS_DEPLOYMENT_TARGET."""
-  for target_dict in targets.values():
-    for config in target_dict['configurations'].values():
-      if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
-        return True
-  return False
-
-
-def _AddIOSDeviceConfigurations(targets):
-  """Clone all targets and append -iphoneos to the name. Configure these targets
-  to build for iOS devices and use correct architectures for those builds."""
-  for target_dict in targets.itervalues():
-    toolset = target_dict['toolset']
-    configs = target_dict['configurations']
-    for config_name, simulator_config_dict in dict(configs).iteritems():
-      iphoneos_config_dict = copy.deepcopy(simulator_config_dict)
-      configs[config_name + '-iphoneos'] = iphoneos_config_dict
-      configs[config_name + '-iphonesimulator'] = simulator_config_dict
-      if toolset == 'target':
-        simulator_config_dict['xcode_settings']['SDKROOT'] = 'iphonesimulator'
-        iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
-  return targets
-
-def CloneConfigurationForDeviceAndEmulator(target_dicts):
-  """If |target_dicts| contains any iOS targets, automatically create -iphoneos
-  targets for iOS device builds."""
-  if _HasIOSTarget(target_dicts):
-    return _AddIOSDeviceConfigurations(target_dicts)
-  return target_dicts
diff --git a/tools/gyp/pylib/gyp/xcode_ninja.py b/tools/gyp/pylib/gyp/xcode_ninja.py
deleted file mode 100644
index bc76fff..0000000
--- a/tools/gyp/pylib/gyp/xcode_ninja.py
+++ /dev/null
@@ -1,289 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Xcode-ninja wrapper project file generator.
-
-This updates the data structures passed to the Xcode gyp generator to build
-with ninja instead. The Xcode project itself is transformed into a list of
-executable targets, each with a build step to build with ninja, and a target
-with every source and resource file.  This appears to sidestep some of the
-major performance headaches experienced using complex projects and large number
-of targets within Xcode.
-"""
-
-import errno
-import gyp.generator.ninja
-import os
-import re
-import xml.sax.saxutils
-
-
-def _WriteWorkspace(main_gyp, sources_gyp, params):
-  """ Create a workspace to wrap main and sources gyp paths. """
-  (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
-  workspace_path = build_file_root + '.xcworkspace'
-  options = params['options']
-  if options.generator_output:
-    workspace_path = os.path.join(options.generator_output, workspace_path)
-  try:
-    os.makedirs(workspace_path)
-  except OSError, e:
-    if e.errno != errno.EEXIST:
-      raise
-  output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
-                  '<Workspace version = "1.0">\n'
-  for gyp_name in [main_gyp, sources_gyp]:
-    name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
-    name = xml.sax.saxutils.quoteattr("group:" + name)
-    output_string += '  <FileRef location = %s></FileRef>\n' % name
-  output_string += '</Workspace>\n'
-
-  workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
-
-  try:
-    with open(workspace_file, 'r') as input_file:
-      input_string = input_file.read()
-      if input_string == output_string:
-        return
-  except IOError:
-    # Ignore errors if the file doesn't exist.
-    pass
-
-  with open(workspace_file, 'w') as output_file:
-    output_file.write(output_string)
-
-def _TargetFromSpec(old_spec, params):
-  """ Create fake target for xcode-ninja wrapper. """
-  # Determine ninja top level build dir (e.g. /path/to/out).
-  ninja_toplevel = None
-  jobs = 0
-  if params:
-    options = params['options']
-    ninja_toplevel = \
-        os.path.join(options.toplevel_dir,
-                     gyp.generator.ninja.ComputeOutputDir(params))
-    jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
-
-  target_name = old_spec.get('target_name')
-  product_name = old_spec.get('product_name', target_name)
-  product_extension = old_spec.get('product_extension')
-
-  ninja_target = {}
-  ninja_target['target_name'] = target_name
-  ninja_target['product_name'] = product_name
-  if product_extension:
-    ninja_target['product_extension'] = product_extension
-  ninja_target['toolset'] = old_spec.get('toolset')
-  ninja_target['default_configuration'] = old_spec.get('default_configuration')
-  ninja_target['configurations'] = {}
-
-  # Tell Xcode to look in |ninja_toplevel| for build products.
-  new_xcode_settings = {}
-  if ninja_toplevel:
-    new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
-        "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
-
-  if 'configurations' in old_spec:
-    for config in old_spec['configurations'].iterkeys():
-      old_xcode_settings = \
-        old_spec['configurations'][config].get('xcode_settings', {})
-      if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
-        new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
-        new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
-            old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
-      for key in ['BUNDLE_LOADER', 'TEST_HOST']:
-        if key in old_xcode_settings:
-          new_xcode_settings[key] = old_xcode_settings[key]
-
-      ninja_target['configurations'][config] = {}
-      ninja_target['configurations'][config]['xcode_settings'] = \
-          new_xcode_settings
-
-  ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
-  ninja_target['mac_xctest_bundle'] = old_spec.get('mac_xctest_bundle', 0)
-  ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
-  ninja_target['ios_watchkit_extension'] = \
-      old_spec.get('ios_watchkit_extension', 0)
-  ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
-  ninja_target['type'] = old_spec['type']
-  if ninja_toplevel:
-    ninja_target['actions'] = [
-      {
-        'action_name': 'Compile and copy %s via ninja' % target_name,
-        'inputs': [],
-        'outputs': [],
-        'action': [
-          'env',
-          'PATH=%s' % os.environ['PATH'],
-          'ninja',
-          '-C',
-          new_xcode_settings['CONFIGURATION_BUILD_DIR'],
-          target_name,
-        ],
-        'message': 'Compile and copy %s via ninja' % target_name,
-      },
-    ]
-    if jobs > 0:
-      ninja_target['actions'][0]['action'].extend(('-j', jobs))
-  return ninja_target
-
-def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
-  """Limit targets for Xcode wrapper.
-
-  Xcode sometimes performs poorly with too many targets, so only include
-  proper executable targets, with filters to customize.
-  Arguments:
-    target_extras: Regular expression to always add, matching any target.
-    executable_target_pattern: Regular expression limiting executable targets.
-    spec: Specifications for target.
-  """
-  target_name = spec.get('target_name')
-  # Always include targets matching target_extras.
-  if target_extras is not None and re.search(target_extras, target_name):
-    return True
-
-  # Otherwise just show executable targets and xc_tests.
-  if (int(spec.get('mac_xctest_bundle', 0)) != 0 or
-      (spec.get('type', '') == 'executable' and
-       spec.get('product_extension', '') != 'bundle')):
-
-    # If there is a filter and the target does not match, exclude the target.
-    if executable_target_pattern is not None:
-      if not re.search(executable_target_pattern, target_name):
-        return False
-    return True
-  return False
-
-def CreateWrapper(target_list, target_dicts, data, params):
-  """Initialize targets for the ninja wrapper.
-
-  This sets up the necessary variables in the targets to generate Xcode projects
-  that use ninja as an external builder.
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-    data: Dict of flattened build files keyed on gyp path.
-    params: Dict of global options for gyp.
-  """
-  orig_gyp = params['build_files'][0]
-  for gyp_name, gyp_dict in data.iteritems():
-    if gyp_name == orig_gyp:
-      depth = gyp_dict['_DEPTH']
-
-  # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
-  # and prepend .ninja before the .gyp extension.
-  generator_flags = params.get('generator_flags', {})
-  main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
-  if main_gyp is None:
-    (build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
-    main_gyp = build_file_root + ".ninja" + build_file_ext
-
-  # Create new |target_list|, |target_dicts| and |data| data structures.
-  new_target_list = []
-  new_target_dicts = {}
-  new_data = {}
-
-  # Set base keys needed for |data|.
-  new_data[main_gyp] = {}
-  new_data[main_gyp]['included_files'] = []
-  new_data[main_gyp]['targets'] = []
-  new_data[main_gyp]['xcode_settings'] = \
-      data[orig_gyp].get('xcode_settings', {})
-
-  # Normally the xcode-ninja generator includes only valid executable targets.
-  # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
-  # executable targets that match the pattern. (Default all)
-  executable_target_pattern = \
-      generator_flags.get('xcode_ninja_executable_target_pattern', None)
-
-  # For including other non-executable targets, add the matching target name
-  # to the |xcode_ninja_target_pattern| regular expression. (Default none)
-  target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
-
-  for old_qualified_target in target_list:
-    spec = target_dicts[old_qualified_target]
-    if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
-      # Add to new_target_list.
-      target_name = spec.get('target_name')
-      new_target_name = '%s:%s#target' % (main_gyp, target_name)
-      new_target_list.append(new_target_name)
-
-      # Add to new_target_dicts.
-      new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
-
-      # Add to new_data.
-      for old_target in data[old_qualified_target.split(':')[0]]['targets']:
-        if old_target['target_name'] == target_name:
-          new_data_target = {}
-          new_data_target['target_name'] = old_target['target_name']
-          new_data_target['toolset'] = old_target['toolset']
-          new_data[main_gyp]['targets'].append(new_data_target)
-
-  # Create sources target.
-  sources_target_name = 'sources_for_indexing'
-  sources_target = _TargetFromSpec(
-    { 'target_name' : sources_target_name,
-      'toolset': 'target',
-      'default_configuration': 'Default',
-      'mac_bundle': '0',
-      'type': 'executable'
-    }, None)
-
-  # Tell Xcode to look everywhere for headers.
-  sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
-
-  # Put excluded files into the sources target so they can be opened in Xcode.
-  skip_excluded_files = \
-      not generator_flags.get('xcode_ninja_list_excluded_files', True)
-
-  sources = []
-  for target, target_dict in target_dicts.iteritems():
-    base = os.path.dirname(target)
-    files = target_dict.get('sources', []) + \
-            target_dict.get('mac_bundle_resources', [])
-
-    if not skip_excluded_files:
-      files.extend(target_dict.get('sources_excluded', []) +
-                   target_dict.get('mac_bundle_resources_excluded', []))
-
-    for action in target_dict.get('actions', []):
-      files.extend(action.get('inputs', []))
-
-      if not skip_excluded_files:
-        files.extend(action.get('inputs_excluded', []))
-
-    # Remove files starting with $. These are mostly intermediate files for the
-    # build system.
-    files = [ file for file in files if not file.startswith('$')]
-
-    # Make sources relative to root build file.
-    relative_path = os.path.dirname(main_gyp)
-    sources += [ os.path.relpath(os.path.join(base, file), relative_path)
-                    for file in files ]
-
-  sources_target['sources'] = sorted(set(sources))
-
-  # Put sources_to_index in it's own gyp.
-  sources_gyp = \
-      os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
-  fully_qualified_target_name = \
-      '%s:%s#target' % (sources_gyp, sources_target_name)
-
-  # Add to new_target_list, new_target_dicts and new_data.
-  new_target_list.append(fully_qualified_target_name)
-  new_target_dicts[fully_qualified_target_name] = sources_target
-  new_data_target = {}
-  new_data_target['target_name'] = sources_target['target_name']
-  new_data_target['_DEPTH'] = depth
-  new_data_target['toolset'] = "target"
-  new_data[sources_gyp] = {}
-  new_data[sources_gyp]['targets'] = []
-  new_data[sources_gyp]['included_files'] = []
-  new_data[sources_gyp]['xcode_settings'] = \
-      data[orig_gyp].get('xcode_settings', {})
-  new_data[sources_gyp]['targets'].append(new_data_target)
-
-  # Write workspace to file.
-  _WriteWorkspace(main_gyp, sources_gyp, params)
-  return (new_target_list, new_target_dicts, new_data)
diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py
deleted file mode 100644
index 1bc90c7..0000000
--- a/tools/gyp/pylib/gyp/xcodeproj_file.py
+++ /dev/null
@@ -1,2931 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Xcode project file generator.
-
-This module is both an Xcode project file generator and a documentation of the
-Xcode project file format.  Knowledge of the project file format was gained
-based on extensive experience with Xcode, and by making changes to projects in
-Xcode.app and observing the resultant changes in the associated project files.
-
-XCODE PROJECT FILES
-
-The generator targets the file format as written by Xcode 3.2 (specifically,
-3.2.6), but past experience has taught that the format has not changed
-significantly in the past several years, and future versions of Xcode are able
-to read older project files.
-
-Xcode project files are "bundled": the project "file" from an end-user's
-perspective is actually a directory with an ".xcodeproj" extension.  The
-project file from this module's perspective is actually a file inside this
-directory, always named "project.pbxproj".  This file contains a complete
-description of the project and is all that is needed to use the xcodeproj.
-Other files contained in the xcodeproj directory are simply used to store
-per-user settings, such as the state of various UI elements in the Xcode
-application.
-
-The project.pbxproj file is a property list, stored in a format almost
-identical to the NeXTstep property list format.  The file is able to carry
-Unicode data, and is encoded in UTF-8.  The root element in the property list
-is a dictionary that contains several properties of minimal interest, and two
-properties of immense interest.  The most important property is a dictionary
-named "objects".  The entire structure of the project is represented by the
-children of this property.  The objects dictionary is keyed by unique 96-bit
-values represented by 24 uppercase hexadecimal characters.  Each value in the
-objects dictionary is itself a dictionary, describing an individual object.
-
-Each object in the dictionary is a member of a class, which is identified by
-the "isa" property of each object.  A variety of classes are represented in a
-project file.  Objects can refer to other objects by ID, using the 24-character
-hexadecimal object key.  A project's objects form a tree, with a root object
-of class PBXProject at the root.  As an example, the PBXProject object serves
-as parent to an XCConfigurationList object defining the build configurations
-used in the project, a PBXGroup object serving as a container for all files
-referenced in the project, and a list of target objects, each of which defines
-a target in the project.  There are several different types of target object,
-such as PBXNativeTarget and PBXAggregateTarget.  In this module, this
-relationship is expressed by having each target type derive from an abstract
-base named XCTarget.
-
-The project.pbxproj file's root dictionary also contains a property, sibling to
-the "objects" dictionary, named "rootObject".  The value of rootObject is a
-24-character object key referring to the root PBXProject object in the
-objects dictionary.
-
-In Xcode, every file used as input to a target or produced as a final product
-of a target must appear somewhere in the hierarchy rooted at the PBXGroup
-object referenced by the PBXProject's mainGroup property.  A PBXGroup is
-generally represented as a folder in the Xcode application.  PBXGroups can
-contain other PBXGroups as well as PBXFileReferences, which are pointers to
-actual files.
-
-Each XCTarget contains a list of build phases, represented in this module by
-the abstract base XCBuildPhase.  Examples of concrete XCBuildPhase derivations
-are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
-"Compile Sources" and "Link Binary With Libraries" phases displayed in the
-Xcode application.  Files used as input to these phases (for example, source
-files in the former case and libraries and frameworks in the latter) are
-represented by PBXBuildFile objects, referenced by elements of "files" lists
-in XCTarget objects.  Each PBXBuildFile object refers to a PBXBuildFile
-object as a "weak" reference: it does not "own" the PBXBuildFile, which is
-owned by the root object's mainGroup or a descendant group.  In most cases, the
-layer of indirection between an XCBuildPhase and a PBXFileReference via a
-PBXBuildFile appears extraneous, but there's actually one reason for this:
-file-specific compiler flags are added to the PBXBuildFile object so as to
-allow a single file to be a member of multiple targets while having distinct
-compiler flags for each.  These flags can be modified in the Xcode applciation
-in the "Build" tab of a File Info window.
-
-When a project is open in the Xcode application, Xcode will rewrite it.  As
-such, this module is careful to adhere to the formatting used by Xcode, to
-avoid insignificant changes appearing in the file when it is used in the
-Xcode application.  This will keep version control repositories happy, and
-makes it possible to compare a project file used in Xcode to one generated by
-this module to determine if any significant changes were made in the
-application.
-
-Xcode has its own way of assigning 24-character identifiers to each object,
-which is not duplicated here.  Because the identifier only is only generated
-once, when an object is created, and is then left unchanged, there is no need
-to attempt to duplicate Xcode's behavior in this area.  The generator is free
-to select any identifier, even at random, to refer to the objects it creates,
-and Xcode will retain those identifiers and use them when subsequently
-rewriting the project file.  However, the generator would choose new random
-identifiers each time the project files are generated, leading to difficulties
-comparing "used" project files to "pristine" ones produced by this module,
-and causing the appearance of changes as every object identifier is changed
-when updated projects are checked in to a version control repository.  To
-mitigate this problem, this module chooses identifiers in a more deterministic
-way, by hashing a description of each object as well as its parent and ancestor
-objects.  This strategy should result in minimal "shift" in IDs as successive
-generations of project files are produced.
-
-THIS MODULE
-
-This module introduces several classes, all derived from the XCObject class.
-Nearly all of the "brains" are built into the XCObject class, which understands
-how to create and modify objects, maintain the proper tree structure, compute
-identifiers, and print objects.  For the most part, classes derived from
-XCObject need only provide a _schema class object, a dictionary that
-expresses what properties objects of the class may contain.
-
-Given this structure, it's possible to build a minimal project file by creating
-objects of the appropriate types and making the proper connections:
-
-  config_list = XCConfigurationList()
-  group = PBXGroup()
-  project = PBXProject({'buildConfigurationList': config_list,
-                        'mainGroup': group})
-
-With the project object set up, it can be added to an XCProjectFile object.
-XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
-subclass that does not actually correspond to a class type found in a project
-file.  Rather, it is used to represent the project file's root dictionary.
-Printing an XCProjectFile will print the entire project file, including the
-full "objects" dictionary.
-
-  project_file = XCProjectFile({'rootObject': project})
-  project_file.ComputeIDs()
-  project_file.Print()
-
-Xcode project files are always encoded in UTF-8.  This module will accept
-strings of either the str class or the unicode class.  Strings of class str
-are assumed to already be encoded in UTF-8.  Obviously, if you're just using
-ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
-Strings of class unicode are handled properly and encoded in UTF-8 when
-a project file is output.
-"""
-
-import gyp.common
-import posixpath
-import re
-import struct
-import sys
-
-# hashlib is supplied as of Python 2.5 as the replacement interface for sha
-# and other secure hashes.  In 2.6, sha is deprecated.  Import hashlib if
-# available, avoiding a deprecation warning under 2.6.  Import sha otherwise,
-# preserving 2.4 compatibility.
-try:
-  import hashlib
-  _new_sha1 = hashlib.sha1
-except ImportError:
-  import sha
-  _new_sha1 = sha.new
-
-
-# See XCObject._EncodeString.  This pattern is used to determine when a string
-# can be printed unquoted.  Strings that match this pattern may be printed
-# unquoted.  Strings that do not match must be quoted and may be further
-# transformed to be properly encoded.  Note that this expression matches the
-# characters listed with "+", for 1 or more occurrences: if a string is empty,
-# it must not match this pattern, because it needs to be encoded as "".
-_unquoted = re.compile('^[A-Za-z0-9$./_]+$')
-
-# Strings that match this pattern are quoted regardless of what _unquoted says.
-# Oddly, Xcode will quote any string with a run of three or more underscores.
-_quoted = re.compile('___')
-
-# This pattern should match any character that needs to be escaped by
-# XCObject._EncodeString.  See that function.
-_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
-
-
-# Used by SourceTreeAndPathFromPath
-_path_leading_variable = re.compile(r'^\$\((.*?)\)(/(.*))?$')
-
-def SourceTreeAndPathFromPath(input_path):
-  """Given input_path, returns a tuple with sourceTree and path values.
-
-  Examples:
-    input_path     (source_tree, output_path)
-    '$(VAR)/path'  ('VAR', 'path')
-    '$(VAR)'       ('VAR', None)
-    'path'         (None, 'path')
-  """
-
-  source_group_match = _path_leading_variable.match(input_path)
-  if source_group_match:
-    source_tree = source_group_match.group(1)
-    output_path = source_group_match.group(3)  # This may be None.
-  else:
-    source_tree = None
-    output_path = input_path
-
-  return (source_tree, output_path)
-
-def ConvertVariablesToShellSyntax(input_string):
-  return re.sub(r'\$\((.*?)\)', '${\\1}', input_string)
-
-class XCObject(object):
-  """The abstract base of all class types used in Xcode project files.
-
-  Class variables:
-    _schema: A dictionary defining the properties of this class.  The keys to
-             _schema are string property keys as used in project files.  Values
-             are a list of four or five elements:
-             [ is_list, property_type, is_strong, is_required, default ]
-             is_list: True if the property described is a list, as opposed
-                      to a single element.
-             property_type: The type to use as the value of the property,
-                            or if is_list is True, the type to use for each
-                            element of the value's list.  property_type must
-                            be an XCObject subclass, or one of the built-in
-                            types str, int, or dict.
-             is_strong: If property_type is an XCObject subclass, is_strong
-                        is True to assert that this class "owns," or serves
-                        as parent, to the property value (or, if is_list is
-                        True, values).  is_strong must be False if
-                        property_type is not an XCObject subclass.
-             is_required: True if the property is required for the class.
-                          Note that is_required being True does not preclude
-                          an empty string ("", in the case of property_type
-                          str) or list ([], in the case of is_list True) from
-                          being set for the property.
-             default: Optional.  If is_requried is True, default may be set
-                      to provide a default value for objects that do not supply
-                      their own value.  If is_required is True and default
-                      is not provided, users of the class must supply their own
-                      value for the property.
-             Note that although the values of the array are expressed in
-             boolean terms, subclasses provide values as integers to conserve
-             horizontal space.
-    _should_print_single_line: False in XCObject.  Subclasses whose objects
-                               should be written to the project file in the
-                               alternate single-line format, such as
-                               PBXFileReference and PBXBuildFile, should
-                               set this to True.
-    _encode_transforms: Used by _EncodeString to encode unprintable characters.
-                        The index into this list is the ordinal of the
-                        character to transform; each value is a string
-                        used to represent the character in the output.  XCObject
-                        provides an _encode_transforms list suitable for most
-                        XCObject subclasses.
-    _alternate_encode_transforms: Provided for subclasses that wish to use
-                                  the alternate encoding rules.  Xcode seems
-                                  to use these rules when printing objects in
-                                  single-line format.  Subclasses that desire
-                                  this behavior should set _encode_transforms
-                                  to _alternate_encode_transforms.
-    _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
-                to construct this object's ID.  Most classes that need custom
-                hashing behavior should do it by overriding Hashables,
-                but in some cases an object's parent may wish to push a
-                hashable value into its child, and it can do so by appending
-                to _hashables.
-  Attributes:
-    id: The object's identifier, a 24-character uppercase hexadecimal string.
-        Usually, objects being created should not set id until the entire
-        project file structure is built.  At that point, UpdateIDs() should
-        be called on the root object to assign deterministic values for id to
-        each object in the tree.
-    parent: The object's parent.  This is set by a parent XCObject when a child
-            object is added to it.
-    _properties: The object's property dictionary.  An object's properties are
-                 described by its class' _schema variable.
-  """
-
-  _schema = {}
-  _should_print_single_line = False
-
-  # See _EncodeString.
-  _encode_transforms = []
-  i = 0
-  while i < ord(' '):
-    _encode_transforms.append('\\U%04x' % i)
-    i = i + 1
-  _encode_transforms[7] = '\\a'
-  _encode_transforms[8] = '\\b'
-  _encode_transforms[9] = '\\t'
-  _encode_transforms[10] = '\\n'
-  _encode_transforms[11] = '\\v'
-  _encode_transforms[12] = '\\f'
-  _encode_transforms[13] = '\\n'
-
-  _alternate_encode_transforms = list(_encode_transforms)
-  _alternate_encode_transforms[9] = chr(9)
-  _alternate_encode_transforms[10] = chr(10)
-  _alternate_encode_transforms[11] = chr(11)
-
-  def __init__(self, properties=None, id=None, parent=None):
-    self.id = id
-    self.parent = parent
-    self._properties = {}
-    self._hashables = []
-    self._SetDefaultsFromSchema()
-    self.UpdateProperties(properties)
-
-  def __repr__(self):
-    try:
-      name = self.Name()
-    except NotImplementedError:
-      return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
-    return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
-  def Copy(self):
-    """Make a copy of this object.
-
-    The new object will have its own copy of lists and dicts.  Any XCObject
-    objects owned by this object (marked "strong") will be copied in the
-    new object, even those found in lists.  If this object has any weak
-    references to other XCObjects, the same references are added to the new
-    object without making a copy.
-    """
-
-    that = self.__class__(id=self.id, parent=self.parent)
-    for key, value in self._properties.iteritems():
-      is_strong = self._schema[key][2]
-
-      if isinstance(value, XCObject):
-        if is_strong:
-          new_value = value.Copy()
-          new_value.parent = that
-          that._properties[key] = new_value
-        else:
-          that._properties[key] = value
-      elif isinstance(value, str) or isinstance(value, unicode) or \
-           isinstance(value, int):
-        that._properties[key] = value
-      elif isinstance(value, list):
-        if is_strong:
-          # If is_strong is True, each element is an XCObject, so it's safe to
-          # call Copy.
-          that._properties[key] = []
-          for item in value:
-            new_item = item.Copy()
-            new_item.parent = that
-            that._properties[key].append(new_item)
-        else:
-          that._properties[key] = value[:]
-      elif isinstance(value, dict):
-        # dicts are never strong.
-        if is_strong:
-          raise TypeError('Strong dict for key ' + key + ' in ' + \
-                          self.__class__.__name__)
-        else:
-          that._properties[key] = value.copy()
-      else:
-        raise TypeError('Unexpected type ' + value.__class__.__name__ + \
-                        ' for key ' + key + ' in ' + self.__class__.__name__)
-
-    return that
-
-  def Name(self):
-    """Return the name corresponding to an object.
-
-    Not all objects necessarily need to be nameable, and not all that do have
-    a "name" property.  Override as needed.
-    """
-
-    # If the schema indicates that "name" is required, try to access the
-    # property even if it doesn't exist.  This will result in a KeyError
-    # being raised for the property that should be present, which seems more
-    # appropriate than NotImplementedError in this case.
-    if 'name' in self._properties or \
-        ('name' in self._schema and self._schema['name'][3]):
-      return self._properties['name']
-
-    raise NotImplementedError(self.__class__.__name__ + ' must implement Name')
-
-  def Comment(self):
-    """Return a comment string for the object.
-
-    Most objects just use their name as the comment, but PBXProject uses
-    different values.
-
-    The returned comment is not escaped and does not have any comment marker
-    strings applied to it.
-    """
-
-    return self.Name()
-
-  def Hashables(self):
-    hashables = [self.__class__.__name__]
-
-    name = self.Name()
-    if name != None:
-      hashables.append(name)
-
-    hashables.extend(self._hashables)
-
-    return hashables
-
-  def HashablesForChild(self):
-    return None
-
-  def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
-    """Set "id" properties deterministically.
-
-    An object's "id" property is set based on a hash of its class type and
-    name, as well as the class type and name of all ancestor objects.  As
-    such, it is only advisable to call ComputeIDs once an entire project file
-    tree is built.
-
-    If recursive is True, recurse into all descendant objects and update their
-    hashes.
-
-    If overwrite is True, any existing value set in the "id" property will be
-    replaced.
-    """
-
-    def _HashUpdate(hash, data):
-      """Update hash with data's length and contents.
-
-      If the hash were updated only with the value of data, it would be
-      possible for clowns to induce collisions by manipulating the names of
-      their objects.  By adding the length, it's exceedingly less likely that
-      ID collisions will be encountered, intentionally or not.
-      """
-
-      hash.update(struct.pack('>i', len(data)))
-      hash.update(data)
-
-    if seed_hash is None:
-      seed_hash = _new_sha1()
-
-    hash = seed_hash.copy()
-
-    hashables = self.Hashables()
-    assert len(hashables) > 0
-    for hashable in hashables:
-      _HashUpdate(hash, hashable)
-
-    if recursive:
-      hashables_for_child = self.HashablesForChild()
-      if hashables_for_child is None:
-        child_hash = hash
-      else:
-        assert len(hashables_for_child) > 0
-        child_hash = seed_hash.copy()
-        for hashable in hashables_for_child:
-          _HashUpdate(child_hash, hashable)
-
-      for child in self.Children():
-        child.ComputeIDs(recursive, overwrite, child_hash)
-
-    if overwrite or self.id is None:
-      # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
-      # is 160 bits.  Instead of throwing out 64 bits of the digest, xor them
-      # into the portion that gets used.
-      assert hash.digest_size % 4 == 0
-      digest_int_count = hash.digest_size / 4
-      digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
-      id_ints = [0, 0, 0]
-      for index in xrange(0, digest_int_count):
-        id_ints[index % 3] ^= digest_ints[index]
-      self.id = '%08X%08X%08X' % tuple(id_ints)
-
-  def EnsureNoIDCollisions(self):
-    """Verifies that no two objects have the same ID.  Checks all descendants.
-    """
-
-    ids = {}
-    descendants = self.Descendants()
-    for descendant in descendants:
-      if descendant.id in ids:
-        other = ids[descendant.id]
-        raise KeyError(
-              'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
-              (descendant.id, str(descendant._properties),
-               str(other._properties), self._properties['rootObject'].Name()))
-      ids[descendant.id] = descendant
-
-  def Children(self):
-    """Returns a list of all of this object's owned (strong) children."""
-
-    children = []
-    for property, attributes in self._schema.iteritems():
-      (is_list, property_type, is_strong) = attributes[0:3]
-      if is_strong and property in self._properties:
-        if not is_list:
-          children.append(self._properties[property])
-        else:
-          children.extend(self._properties[property])
-    return children
-
-  def Descendants(self):
-    """Returns a list of all of this object's descendants, including this
-    object.
-    """
-
-    children = self.Children()
-    descendants = [self]
-    for child in children:
-      descendants.extend(child.Descendants())
-    return descendants
-
-  def PBXProjectAncestor(self):
-    # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
-    if self.parent:
-      return self.parent.PBXProjectAncestor()
-    return None
-
-  def _EncodeComment(self, comment):
-    """Encodes a comment to be placed in the project file output, mimicing
-    Xcode behavior.
-    """
-
-    # This mimics Xcode behavior by wrapping the comment in "/*" and "*/".  If
-    # the string already contains a "*/", it is turned into "(*)/".  This keeps
-    # the file writer from outputting something that would be treated as the
-    # end of a comment in the middle of something intended to be entirely a
-    # comment.
-
-    return '/* ' + comment.replace('*/', '(*)/') + ' */'
-
-  def _EncodeTransform(self, match):
-    # This function works closely with _EncodeString.  It will only be called
-    # by re.sub with match.group(0) containing a character matched by the
-    # the _escaped expression.
-    char = match.group(0)
-
-    # Backslashes (\) and quotation marks (") are always replaced with a
-    # backslash-escaped version of the same.  Everything else gets its
-    # replacement from the class' _encode_transforms array.
-    if char == '\\':
-      return '\\\\'
-    if char == '"':
-      return '\\"'
-    return self._encode_transforms[ord(char)]
-
-  def _EncodeString(self, value):
-    """Encodes a string to be placed in the project file output, mimicing
-    Xcode behavior.
-    """
-
-    # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
-    # $ (dollar sign), . (period), and _ (underscore) is present.  Also use
-    # quotation marks to represent empty strings.
-    #
-    # Escape " (double-quote) and \ (backslash) by preceding them with a
-    # backslash.
-    #
-    # Some characters below the printable ASCII range are encoded specially:
-    #     7 ^G BEL is encoded as "\a"
-    #     8 ^H BS  is encoded as "\b"
-    #    11 ^K VT  is encoded as "\v"
-    #    12 ^L NP  is encoded as "\f"
-    #   127 ^? DEL is passed through as-is without escaping
-    #  - In PBXFileReference and PBXBuildFile objects:
-    #     9 ^I HT  is passed through as-is without escaping
-    #    10 ^J NL  is passed through as-is without escaping
-    #    13 ^M CR  is passed through as-is without escaping
-    #  - In other objects:
-    #     9 ^I HT  is encoded as "\t"
-    #    10 ^J NL  is encoded as "\n"
-    #    13 ^M CR  is encoded as "\n" rendering it indistinguishable from
-    #              10 ^J NL
-    # All other characters within the ASCII control character range (0 through
-    # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
-    # in hexadecimal.  For example, character 14 (^N SO) is encoded as "\U000e".
-    # Characters above the ASCII range are passed through to the output encoded
-    # as UTF-8 without any escaping.  These mappings are contained in the
-    # class' _encode_transforms list.
-
-    if _unquoted.search(value) and not _quoted.search(value):
-      return value
-
-    return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
-
-  def _XCPrint(self, file, tabs, line):
-    file.write('\t' * tabs + line)
-
-  def _XCPrintableValue(self, tabs, value, flatten_list=False):
-    """Returns a representation of value that may be printed in a project file,
-    mimicing Xcode's behavior.
-
-    _XCPrintableValue can handle str and int values, XCObjects (which are
-    made printable by returning their id property), and list and dict objects
-    composed of any of the above types.  When printing a list or dict, and
-    _should_print_single_line is False, the tabs parameter is used to determine
-    how much to indent the lines corresponding to the items in the list or
-    dict.
-
-    If flatten_list is True, single-element lists will be transformed into
-    strings.
-    """
-
-    printable = ''
-    comment = None
-
-    if self._should_print_single_line:
-      sep = ' '
-      element_tabs = ''
-      end_tabs = ''
-    else:
-      sep = '\n'
-      element_tabs = '\t' * (tabs + 1)
-      end_tabs = '\t' * tabs
-
-    if isinstance(value, XCObject):
-      printable += value.id
-      comment = value.Comment()
-    elif isinstance(value, str):
-      printable += self._EncodeString(value)
-    elif isinstance(value, unicode):
-      printable += self._EncodeString(value.encode('utf-8'))
-    elif isinstance(value, int):
-      printable += str(value)
-    elif isinstance(value, list):
-      if flatten_list and len(value) <= 1:
-        if len(value) == 0:
-          printable += self._EncodeString('')
-        else:
-          printable += self._EncodeString(value[0])
-      else:
-        printable = '(' + sep
-        for item in value:
-          printable += element_tabs + \
-                       self._XCPrintableValue(tabs + 1, item, flatten_list) + \
-                       ',' + sep
-        printable += end_tabs + ')'
-    elif isinstance(value, dict):
-      printable = '{' + sep
-      for item_key, item_value in sorted(value.iteritems()):
-        printable += element_tabs + \
-            self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
-            self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
-            sep
-      printable += end_tabs + '}'
-    else:
-      raise TypeError("Can't make " + value.__class__.__name__ + ' printable')
-
-    if comment != None:
-      printable += ' ' + self._EncodeComment(comment)
-
-    return printable
-
-  def _XCKVPrint(self, file, tabs, key, value):
-    """Prints a key and value, members of an XCObject's _properties dictionary,
-    to file.
-
-    tabs is an int identifying the indentation level.  If the class'
-    _should_print_single_line variable is True, tabs is ignored and the
-    key-value pair will be followed by a space insead of a newline.
-    """
-
-    if self._should_print_single_line:
-      printable = ''
-      after_kv = ' '
-    else:
-      printable = '\t' * tabs
-      after_kv = '\n'
-
-    # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
-    # objects without comments.  Sometimes it prints them with comments, but
-    # the majority of the time, it doesn't.  To avoid unnecessary changes to
-    # the project file after Xcode opens it, don't write comments for
-    # remoteGlobalIDString.  This is a sucky hack and it would certainly be
-    # cleaner to extend the schema to indicate whether or not a comment should
-    # be printed, but since this is the only case where the problem occurs and
-    # Xcode itself can't seem to make up its mind, the hack will suffice.
-    #
-    # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
-    if key == 'remoteGlobalIDString' and isinstance(self,
-                                                    PBXContainerItemProxy):
-      value_to_print = value.id
-    else:
-      value_to_print = value
-
-    # PBXBuildFile's settings property is represented in the output as a dict,
-    # but a hack here has it represented as a string. Arrange to strip off the
-    # quotes so that it shows up in the output as expected.
-    if key == 'settings' and isinstance(self, PBXBuildFile):
-      strip_value_quotes = True
-    else:
-      strip_value_quotes = False
-
-    # In another one-off, let's set flatten_list on buildSettings properties
-    # of XCBuildConfiguration objects, because that's how Xcode treats them.
-    if key == 'buildSettings' and isinstance(self, XCBuildConfiguration):
-      flatten_list = True
-    else:
-      flatten_list = False
-
-    try:
-      printable_key = self._XCPrintableValue(tabs, key, flatten_list)
-      printable_value = self._XCPrintableValue(tabs, value_to_print,
-                                               flatten_list)
-      if strip_value_quotes and len(printable_value) > 1 and \
-          printable_value[0] == '"' and printable_value[-1] == '"':
-        printable_value = printable_value[1:-1]
-      printable += printable_key + ' = ' + printable_value + ';' + after_kv
-    except TypeError, e:
-      gyp.common.ExceptionAppend(e,
-                                 'while printing key "%s"' % key)
-      raise
-
-    self._XCPrint(file, 0, printable)
-
-  def Print(self, file=sys.stdout):
-    """Prints a reprentation of this object to file, adhering to Xcode output
-    formatting.
-    """
-
-    self.VerifyHasRequiredProperties()
-
-    if self._should_print_single_line:
-      # When printing an object in a single line, Xcode doesn't put any space
-      # between the beginning of a dictionary (or presumably a list) and the
-      # first contained item, so you wind up with snippets like
-      #   ...CDEF = {isa = PBXFileReference; fileRef = 0123...
-      # If it were me, I would have put a space in there after the opening
-      # curly, but I guess this is just another one of those inconsistencies
-      # between how Xcode prints PBXFileReference and PBXBuildFile objects as
-      # compared to other objects.  Mimic Xcode's behavior here by using an
-      # empty string for sep.
-      sep = ''
-      end_tabs = 0
-    else:
-      sep = '\n'
-      end_tabs = 2
-
-    # Start the object.  For example, '\t\tPBXProject = {\n'.
-    self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep)
-
-    # "isa" isn't in the _properties dictionary, it's an intrinsic property
-    # of the class which the object belongs to.  Xcode always outputs "isa"
-    # as the first element of an object dictionary.
-    self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
-
-    # The remaining elements of an object dictionary are sorted alphabetically.
-    for property, value in sorted(self._properties.iteritems()):
-      self._XCKVPrint(file, 3, property, value)
-
-    # End the object.
-    self._XCPrint(file, end_tabs, '};\n')
-
-  def UpdateProperties(self, properties, do_copy=False):
-    """Merge the supplied properties into the _properties dictionary.
-
-    The input properties must adhere to the class schema or a KeyError or
-    TypeError exception will be raised.  If adding an object of an XCObject
-    subclass and the schema indicates a strong relationship, the object's
-    parent will be set to this object.
-
-    If do_copy is True, then lists, dicts, strong-owned XCObjects, and
-    strong-owned XCObjects in lists will be copied instead of having their
-    references added.
-    """
-
-    if properties is None:
-      return
-
-    for property, value in properties.iteritems():
-      # Make sure the property is in the schema.
-      if not property in self._schema:
-        raise KeyError(property + ' not in ' + self.__class__.__name__)
-
-      # Make sure the property conforms to the schema.
-      (is_list, property_type, is_strong) = self._schema[property][0:3]
-      if is_list:
-        if value.__class__ != list:
-          raise TypeError(
-                property + ' of ' + self.__class__.__name__ + \
-                ' must be list, not ' + value.__class__.__name__)
-        for item in value:
-          if not isinstance(item, property_type) and \
-             not (item.__class__ == unicode and property_type == str):
-            # Accept unicode where str is specified.  str is treated as
-            # UTF-8-encoded.
-            raise TypeError(
-                  'item of ' + property + ' of ' + self.__class__.__name__ + \
-                  ' must be ' + property_type.__name__ + ', not ' + \
-                  item.__class__.__name__)
-      elif not isinstance(value, property_type) and \
-           not (value.__class__ == unicode and property_type == str):
-        # Accept unicode where str is specified.  str is treated as
-        # UTF-8-encoded.
-        raise TypeError(
-              property + ' of ' + self.__class__.__name__ + ' must be ' + \
-              property_type.__name__ + ', not ' + value.__class__.__name__)
-
-      # Checks passed, perform the assignment.
-      if do_copy:
-        if isinstance(value, XCObject):
-          if is_strong:
-            self._properties[property] = value.Copy()
-          else:
-            self._properties[property] = value
-        elif isinstance(value, str) or isinstance(value, unicode) or \
-             isinstance(value, int):
-          self._properties[property] = value
-        elif isinstance(value, list):
-          if is_strong:
-            # If is_strong is True, each element is an XCObject, so it's safe
-            # to call Copy.
-            self._properties[property] = []
-            for item in value:
-              self._properties[property].append(item.Copy())
-          else:
-            self._properties[property] = value[:]
-        elif isinstance(value, dict):
-          self._properties[property] = value.copy()
-        else:
-          raise TypeError("Don't know how to copy a " + \
-                          value.__class__.__name__ + ' object for ' + \
-                          property + ' in ' + self.__class__.__name__)
-      else:
-        self._properties[property] = value
-
-      # Set up the child's back-reference to this object.  Don't use |value|
-      # any more because it may not be right if do_copy is true.
-      if is_strong:
-        if not is_list:
-          self._properties[property].parent = self
-        else:
-          for item in self._properties[property]:
-            item.parent = self
-
-  def HasProperty(self, key):
-    return key in self._properties
-
-  def GetProperty(self, key):
-    return self._properties[key]
-
-  def SetProperty(self, key, value):
-    self.UpdateProperties({key: value})
-
-  def DelProperty(self, key):
-    if key in self._properties:
-      del self._properties[key]
-
-  def AppendProperty(self, key, value):
-    # TODO(mark): Support ExtendProperty too (and make this call that)?
-
-    # Schema validation.
-    if not key in self._schema:
-      raise KeyError(key + ' not in ' + self.__class__.__name__)
-
-    (is_list, property_type, is_strong) = self._schema[key][0:3]
-    if not is_list:
-      raise TypeError(key + ' of ' + self.__class__.__name__ + ' must be list')
-    if not isinstance(value, property_type):
-      raise TypeError('item of ' + key + ' of ' + self.__class__.__name__ + \
-                      ' must be ' + property_type.__name__ + ', not ' + \
-                      value.__class__.__name__)
-
-    # If the property doesn't exist yet, create a new empty list to receive the
-    # item.
-    if not key in self._properties:
-      self._properties[key] = []
-
-    # Set up the ownership link.
-    if is_strong:
-      value.parent = self
-
-    # Store the item.
-    self._properties[key].append(value)
-
-  def VerifyHasRequiredProperties(self):
-    """Ensure that all properties identified as required by the schema are
-    set.
-    """
-
-    # TODO(mark): A stronger verification mechanism is needed.  Some
-    # subclasses need to perform validation beyond what the schema can enforce.
-    for property, attributes in self._schema.iteritems():
-      (is_list, property_type, is_strong, is_required) = attributes[0:4]
-      if is_required and not property in self._properties:
-        raise KeyError(self.__class__.__name__ + ' requires ' + property)
-
-  def _SetDefaultsFromSchema(self):
-    """Assign object default values according to the schema.  This will not
-    overwrite properties that have already been set."""
-
-    defaults = {}
-    for property, attributes in self._schema.iteritems():
-      (is_list, property_type, is_strong, is_required) = attributes[0:4]
-      if is_required and len(attributes) >= 5 and \
-          not property in self._properties:
-        default = attributes[4]
-
-        defaults[property] = default
-
-    if len(defaults) > 0:
-      # Use do_copy=True so that each new object gets its own copy of strong
-      # objects, lists, and dicts.
-      self.UpdateProperties(defaults, do_copy=True)
-
-
-class XCHierarchicalElement(XCObject):
-  """Abstract base for PBXGroup and PBXFileReference.  Not represented in a
-  project file."""
-
-  # TODO(mark): Do name and path belong here?  Probably so.
-  # If path is set and name is not, name may have a default value.  Name will
-  # be set to the basename of path, if the basename of path is different from
-  # the full value of path.  If path is already just a leaf name, name will
-  # not be set.
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'comments':       [0, str, 0, 0],
-    'fileEncoding':   [0, str, 0, 0],
-    'includeInIndex': [0, int, 0, 0],
-    'indentWidth':    [0, int, 0, 0],
-    'lineEnding':     [0, int, 0, 0],
-    'sourceTree':     [0, str, 0, 1, '<group>'],
-    'tabWidth':       [0, int, 0, 0],
-    'usesTabs':       [0, int, 0, 0],
-    'wrapsLines':     [0, int, 0, 0],
-  })
-
-  def __init__(self, properties=None, id=None, parent=None):
-    # super
-    XCObject.__init__(self, properties, id, parent)
-    if 'path' in self._properties and not 'name' in self._properties:
-      path = self._properties['path']
-      name = posixpath.basename(path)
-      if name != '' and path != name:
-        self.SetProperty('name', name)
-
-    if 'path' in self._properties and \
-        (not 'sourceTree' in self._properties or \
-         self._properties['sourceTree'] == '<group>'):
-      # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
-      # the variable out and make the path be relative to that variable by
-      # assigning the variable name as the sourceTree.
-      (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path'])
-      if source_tree != None:
-        self._properties['sourceTree'] = source_tree
-      if path != None:
-        self._properties['path'] = path
-      if source_tree != None and path is None and \
-         not 'name' in self._properties:
-        # The path was of the form "$(SDKROOT)" with no path following it.
-        # This object is now relative to that variable, so it has no path
-        # attribute of its own.  It does, however, keep a name.
-        del self._properties['path']
-        self._properties['name'] = source_tree
-
-  def Name(self):
-    if 'name' in self._properties:
-      return self._properties['name']
-    elif 'path' in self._properties:
-      return self._properties['path']
-    else:
-      # This happens in the case of the root PBXGroup.
-      return None
-
-  def Hashables(self):
-    """Custom hashables for XCHierarchicalElements.
-
-    XCHierarchicalElements are special.  Generally, their hashes shouldn't
-    change if the paths don't change.  The normal XCObject implementation of
-    Hashables adds a hashable for each object, which means that if
-    the hierarchical structure changes (possibly due to changes caused when
-    TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
-    the hashes will change.  For example, if a project file initially contains
-    a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
-    a/b.  If someone later adds a/f2 to the project file, a/b can no longer be
-    collapsed, and f1 winds up with parent b and grandparent a.  That would
-    be sufficient to change f1's hash.
-
-    To counteract this problem, hashables for all XCHierarchicalElements except
-    for the main group (which has neither a name nor a path) are taken to be
-    just the set of path components.  Because hashables are inherited from
-    parents, this provides assurance that a/b/f1 has the same set of hashables
-    whether its parent is b or a/b.
-
-    The main group is a special case.  As it is permitted to have no name or
-    path, it is permitted to use the standard XCObject hash mechanism.  This
-    is not considered a problem because there can be only one main group.
-    """
-
-    if self == self.PBXProjectAncestor()._properties['mainGroup']:
-      # super
-      return XCObject.Hashables(self)
-
-    hashables = []
-
-    # Put the name in first, ensuring that if TakeOverOnlyChild collapses
-    # children into a top-level group like "Source", the name always goes
-    # into the list of hashables without interfering with path components.
-    if 'name' in self._properties:
-      # Make it less likely for people to manipulate hashes by following the
-      # pattern of always pushing an object type value onto the list first.
-      hashables.append(self.__class__.__name__ + '.name')
-      hashables.append(self._properties['name'])
-
-    # NOTE: This still has the problem that if an absolute path is encountered,
-    # including paths with a sourceTree, they'll still inherit their parents'
-    # hashables, even though the paths aren't relative to their parents.  This
-    # is not expected to be much of a problem in practice.
-    path = self.PathFromSourceTreeAndPath()
-    if path != None:
-      components = path.split(posixpath.sep)
-      for component in components:
-        hashables.append(self.__class__.__name__ + '.path')
-        hashables.append(component)
-
-    hashables.extend(self._hashables)
-
-    return hashables
-
-  def Compare(self, other):
-    # Allow comparison of these types.  PBXGroup has the highest sort rank;
-    # PBXVariantGroup is treated as equal to PBXFileReference.
-    valid_class_types = {
-      PBXFileReference: 'file',
-      PBXGroup:         'group',
-      PBXVariantGroup:  'file',
-    }
-    self_type = valid_class_types[self.__class__]
-    other_type = valid_class_types[other.__class__]
-
-    if self_type == other_type:
-      # If the two objects are of the same sort rank, compare their names.
-      return cmp(self.Name(), other.Name())
-
-    # Otherwise, sort groups before everything else.
-    if self_type == 'group':
-      return -1
-    return 1
-
-  def CompareRootGroup(self, other):
-    # This function should be used only to compare direct children of the
-    # containing PBXProject's mainGroup.  These groups should appear in the
-    # listed order.
-    # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
-    # generator should have a way of influencing this list rather than having
-    # to hardcode for the generator here.
-    order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products',
-             'Build']
-
-    # If the groups aren't in the listed order, do a name comparison.
-    # Otherwise, groups in the listed order should come before those that
-    # aren't.
-    self_name = self.Name()
-    other_name = other.Name()
-    self_in = isinstance(self, PBXGroup) and self_name in order
-    other_in = isinstance(self, PBXGroup) and other_name in order
-    if not self_in and not other_in:
-      return self.Compare(other)
-    if self_name in order and not other_name in order:
-      return -1
-    if other_name in order and not self_name in order:
-      return 1
-
-    # If both groups are in the listed order, go by the defined order.
-    self_index = order.index(self_name)
-    other_index = order.index(other_name)
-    if self_index < other_index:
-      return -1
-    if self_index > other_index:
-      return 1
-    return 0
-
-  def PathFromSourceTreeAndPath(self):
-    # Turn the object's sourceTree and path properties into a single flat
-    # string of a form comparable to the path parameter.  If there's a
-    # sourceTree property other than "<group>", wrap it in $(...) for the
-    # comparison.
-    components = []
-    if self._properties['sourceTree'] != '<group>':
-      components.append('$(' + self._properties['sourceTree'] + ')')
-    if 'path' in self._properties:
-      components.append(self._properties['path'])
-
-    if len(components) > 0:
-      return posixpath.join(*components)
-
-    return None
-
-  def FullPath(self):
-    # Returns a full path to self relative to the project file, or relative
-    # to some other source tree.  Start with self, and walk up the chain of
-    # parents prepending their paths, if any, until no more parents are
-    # available (project-relative path) or until a path relative to some
-    # source tree is found.
-    xche = self
-    path = None
-    while isinstance(xche, XCHierarchicalElement) and \
-          (path is None or \
-           (not path.startswith('/') and not path.startswith('$'))):
-      this_path = xche.PathFromSourceTreeAndPath()
-      if this_path != None and path != None:
-        path = posixpath.join(this_path, path)
-      elif this_path != None:
-        path = this_path
-      xche = xche.parent
-
-    return path
-
-
-class PBXGroup(XCHierarchicalElement):
-  """
-  Attributes:
-    _children_by_path: Maps pathnames of children of this PBXGroup to the
-      actual child XCHierarchicalElement objects.
-    _variant_children_by_name_and_path: Maps (name, path) tuples of
-      PBXVariantGroup children to the actual child PBXVariantGroup objects.
-  """
-
-  _schema = XCHierarchicalElement._schema.copy()
-  _schema.update({
-    'children': [1, XCHierarchicalElement, 1, 1, []],
-    'name':     [0, str,                   0, 0],
-    'path':     [0, str,                   0, 0],
-  })
-
-  def __init__(self, properties=None, id=None, parent=None):
-    # super
-    XCHierarchicalElement.__init__(self, properties, id, parent)
-    self._children_by_path = {}
-    self._variant_children_by_name_and_path = {}
-    for child in self._properties.get('children', []):
-      self._AddChildToDicts(child)
-
-  def Hashables(self):
-    # super
-    hashables = XCHierarchicalElement.Hashables(self)
-
-    # It is not sufficient to just rely on name and parent to build a unique
-    # hashable : a node could have two child PBXGroup sharing a common name.
-    # To add entropy the hashable is enhanced with the names of all its
-    # children.
-    for child in self._properties.get('children', []):
-      child_name = child.Name()
-      if child_name != None:
-        hashables.append(child_name)
-
-    return hashables
-
-  def HashablesForChild(self):
-    # To avoid a circular reference the hashables used to compute a child id do
-    # not include the child names.
-    return XCHierarchicalElement.Hashables(self)
-
-  def _AddChildToDicts(self, child):
-    # Sets up this PBXGroup object's dicts to reference the child properly.
-    child_path = child.PathFromSourceTreeAndPath()
-    if child_path:
-      if child_path in self._children_by_path:
-        raise ValueError('Found multiple children with path ' + child_path)
-      self._children_by_path[child_path] = child
-
-    if isinstance(child, PBXVariantGroup):
-      child_name = child._properties.get('name', None)
-      key = (child_name, child_path)
-      if key in self._variant_children_by_name_and_path:
-        raise ValueError('Found multiple PBXVariantGroup children with ' + \
-                         'name ' + str(child_name) + ' and path ' + \
-                         str(child_path))
-      self._variant_children_by_name_and_path[key] = child
-
-  def AppendChild(self, child):
-    # Callers should use this instead of calling
-    # AppendProperty('children', child) directly because this function
-    # maintains the group's dicts.
-    self.AppendProperty('children', child)
-    self._AddChildToDicts(child)
-
-  def GetChildByName(self, name):
-    # This is not currently optimized with a dict as GetChildByPath is because
-    # it has few callers.  Most callers probably want GetChildByPath.  This
-    # function is only useful to get children that have names but no paths,
-    # which is rare.  The children of the main group ("Source", "Products",
-    # etc.) is pretty much the only case where this likely to come up.
-    #
-    # TODO(mark): Maybe this should raise an error if more than one child is
-    # present with the same name.
-    if not 'children' in self._properties:
-      return None
-
-    for child in self._properties['children']:
-      if child.Name() == name:
-        return child
-
-    return None
-
-  def GetChildByPath(self, path):
-    if not path:
-      return None
-
-    if path in self._children_by_path:
-      return self._children_by_path[path]
-
-    return None
-
-  def GetChildByRemoteObject(self, remote_object):
-    # This method is a little bit esoteric.  Given a remote_object, which
-    # should be a PBXFileReference in another project file, this method will
-    # return this group's PBXReferenceProxy object serving as a local proxy
-    # for the remote PBXFileReference.
-    #
-    # This function might benefit from a dict optimization as GetChildByPath
-    # for some workloads, but profiling shows that it's not currently a
-    # problem.
-    if not 'children' in self._properties:
-      return None
-
-    for child in self._properties['children']:
-      if not isinstance(child, PBXReferenceProxy):
-        continue
-
-      container_proxy = child._properties['remoteRef']
-      if container_proxy._properties['remoteGlobalIDString'] == remote_object:
-        return child
-
-    return None
-
-  def AddOrGetFileByPath(self, path, hierarchical):
-    """Returns an existing or new file reference corresponding to path.
-
-    If hierarchical is True, this method will create or use the necessary
-    hierarchical group structure corresponding to path.  Otherwise, it will
-    look in and create an item in the current group only.
-
-    If an existing matching reference is found, it is returned, otherwise, a
-    new one will be created, added to the correct group, and returned.
-
-    If path identifies a directory by virtue of carrying a trailing slash,
-    this method returns a PBXFileReference of "folder" type.  If path
-    identifies a variant, by virtue of it identifying a file inside a directory
-    with an ".lproj" extension, this method returns a PBXVariantGroup
-    containing the variant named by path, and possibly other variants.  For
-    all other paths, a "normal" PBXFileReference will be returned.
-    """
-
-    # Adding or getting a directory?  Directories end with a trailing slash.
-    is_dir = False
-    if path.endswith('/'):
-      is_dir = True
-    path = posixpath.normpath(path)
-    if is_dir:
-      path = path + '/'
-
-    # Adding or getting a variant?  Variants are files inside directories
-    # with an ".lproj" extension.  Xcode uses variants for localization.  For
-    # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
-    # MainMenu.nib inside path/to, and give it a variant named Language.  In
-    # this example, grandparent would be set to path/to and parent_root would
-    # be set to Language.
-    variant_name = None
-    parent = posixpath.dirname(path)
-    grandparent = posixpath.dirname(parent)
-    parent_basename = posixpath.basename(parent)
-    (parent_root, parent_ext) = posixpath.splitext(parent_basename)
-    if parent_ext == '.lproj':
-      variant_name = parent_root
-    if grandparent == '':
-      grandparent = None
-
-    # Putting a directory inside a variant group is not currently supported.
-    assert not is_dir or variant_name is None
-
-    path_split = path.split(posixpath.sep)
-    if len(path_split) == 1 or \
-       ((is_dir or variant_name != None) and len(path_split) == 2) or \
-       not hierarchical:
-      # The PBXFileReference or PBXVariantGroup will be added to or gotten from
-      # this PBXGroup, no recursion necessary.
-      if variant_name is None:
-        # Add or get a PBXFileReference.
-        file_ref = self.GetChildByPath(path)
-        if file_ref != None:
-          assert file_ref.__class__ == PBXFileReference
-        else:
-          file_ref = PBXFileReference({'path': path})
-          self.AppendChild(file_ref)
-      else:
-        # Add or get a PBXVariantGroup.  The variant group name is the same
-        # as the basename (MainMenu.nib in the example above).  grandparent
-        # specifies the path to the variant group itself, and path_split[-2:]
-        # is the path of the specific variant relative to its group.
-        variant_group_name = posixpath.basename(path)
-        variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
-            variant_group_name, grandparent)
-        variant_path = posixpath.sep.join(path_split[-2:])
-        variant_ref = variant_group_ref.GetChildByPath(variant_path)
-        if variant_ref != None:
-          assert variant_ref.__class__ == PBXFileReference
-        else:
-          variant_ref = PBXFileReference({'name': variant_name,
-                                          'path': variant_path})
-          variant_group_ref.AppendChild(variant_ref)
-        # The caller is interested in the variant group, not the specific
-        # variant file.
-        file_ref = variant_group_ref
-      return file_ref
-    else:
-      # Hierarchical recursion.  Add or get a PBXGroup corresponding to the
-      # outermost path component, and then recurse into it, chopping off that
-      # path component.
-      next_dir = path_split[0]
-      group_ref = self.GetChildByPath(next_dir)
-      if group_ref != None:
-        assert group_ref.__class__ == PBXGroup
-      else:
-        group_ref = PBXGroup({'path': next_dir})
-        self.AppendChild(group_ref)
-      return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]),
-                                          hierarchical)
-
-  def AddOrGetVariantGroupByNameAndPath(self, name, path):
-    """Returns an existing or new PBXVariantGroup for name and path.
-
-    If a PBXVariantGroup identified by the name and path arguments is already
-    present as a child of this object, it is returned.  Otherwise, a new
-    PBXVariantGroup with the correct properties is created, added as a child,
-    and returned.
-
-    This method will generally be called by AddOrGetFileByPath, which knows
-    when to create a variant group based on the structure of the pathnames
-    passed to it.
-    """
-
-    key = (name, path)
-    if key in self._variant_children_by_name_and_path:
-      variant_group_ref = self._variant_children_by_name_and_path[key]
-      assert variant_group_ref.__class__ == PBXVariantGroup
-      return variant_group_ref
-
-    variant_group_properties = {'name': name}
-    if path != None:
-      variant_group_properties['path'] = path
-    variant_group_ref = PBXVariantGroup(variant_group_properties)
-    self.AppendChild(variant_group_ref)
-
-    return variant_group_ref
-
-  def TakeOverOnlyChild(self, recurse=False):
-    """If this PBXGroup has only one child and it's also a PBXGroup, take
-    it over by making all of its children this object's children.
-
-    This function will continue to take over only children when those children
-    are groups.  If there are three PBXGroups representing a, b, and c, with
-    c inside b and b inside a, and a and b have no other children, this will
-    result in a taking over both b and c, forming a PBXGroup for a/b/c.
-
-    If recurse is True, this function will recurse into children and ask them
-    to collapse themselves by taking over only children as well.  Assuming
-    an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
-    (d1, d2, and f are files, the rest are groups), recursion will result in
-    a group for a/b/c containing a group for d3/e.
-    """
-
-    # At this stage, check that child class types are PBXGroup exactly,
-    # instead of using isinstance.  The only subclass of PBXGroup,
-    # PBXVariantGroup, should not participate in reparenting in the same way:
-    # reparenting by merging different object types would be wrong.
-    while len(self._properties['children']) == 1 and \
-          self._properties['children'][0].__class__ == PBXGroup:
-      # Loop to take over the innermost only-child group possible.
-
-      child = self._properties['children'][0]
-
-      # Assume the child's properties, including its children.  Save a copy
-      # of this object's old properties, because they'll still be needed.
-      # This object retains its existing id and parent attributes.
-      old_properties = self._properties
-      self._properties = child._properties
-      self._children_by_path = child._children_by_path
-
-      if not 'sourceTree' in self._properties or \
-         self._properties['sourceTree'] == '<group>':
-        # The child was relative to its parent.  Fix up the path.  Note that
-        # children with a sourceTree other than "<group>" are not relative to
-        # their parents, so no path fix-up is needed in that case.
-        if 'path' in old_properties:
-          if 'path' in self._properties:
-            # Both the original parent and child have paths set.
-            self._properties['path'] = posixpath.join(old_properties['path'],
-                                                      self._properties['path'])
-          else:
-            # Only the original parent has a path, use it.
-            self._properties['path'] = old_properties['path']
-        if 'sourceTree' in old_properties:
-          # The original parent had a sourceTree set, use it.
-          self._properties['sourceTree'] = old_properties['sourceTree']
-
-      # If the original parent had a name set, keep using it.  If the original
-      # parent didn't have a name but the child did, let the child's name
-      # live on.  If the name attribute seems unnecessary now, get rid of it.
-      if 'name' in old_properties and old_properties['name'] != None and \
-         old_properties['name'] != self.Name():
-        self._properties['name'] = old_properties['name']
-      if 'name' in self._properties and 'path' in self._properties and \
-         self._properties['name'] == self._properties['path']:
-        del self._properties['name']
-
-      # Notify all children of their new parent.
-      for child in self._properties['children']:
-        child.parent = self
-
-    # If asked to recurse, recurse.
-    if recurse:
-      for child in self._properties['children']:
-        if child.__class__ == PBXGroup:
-          child.TakeOverOnlyChild(recurse)
-
-  def SortGroup(self):
-    self._properties['children'] = \
-        sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y))
-
-    # Recurse.
-    for child in self._properties['children']:
-      if isinstance(child, PBXGroup):
-        child.SortGroup()
-
-
-class XCFileLikeElement(XCHierarchicalElement):
-  # Abstract base for objects that can be used as the fileRef property of
-  # PBXBuildFile.
-
-  def PathHashables(self):
-    # A PBXBuildFile that refers to this object will call this method to
-    # obtain additional hashables specific to this XCFileLikeElement.  Don't
-    # just use this object's hashables, they're not specific and unique enough
-    # on their own (without access to the parent hashables.)  Instead, provide
-    # hashables that identify this object by path by getting its hashables as
-    # well as the hashables of ancestor XCHierarchicalElement objects.
-
-    hashables = []
-    xche = self
-    while xche != None and isinstance(xche, XCHierarchicalElement):
-      xche_hashables = xche.Hashables()
-      for index in xrange(0, len(xche_hashables)):
-        hashables.insert(index, xche_hashables[index])
-      xche = xche.parent
-    return hashables
-
-
-class XCContainerPortal(XCObject):
-  # Abstract base for objects that can be used as the containerPortal property
-  # of PBXContainerItemProxy.
-  pass
-
-
-class XCRemoteObject(XCObject):
-  # Abstract base for objects that can be used as the remoteGlobalIDString
-  # property of PBXContainerItemProxy.
-  pass
-
-
-class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
-  _schema = XCFileLikeElement._schema.copy()
-  _schema.update({
-    'explicitFileType':  [0, str, 0, 0],
-    'lastKnownFileType': [0, str, 0, 0],
-    'name':              [0, str, 0, 0],
-    'path':              [0, str, 0, 1],
-  })
-
-  # Weird output rules for PBXFileReference.
-  _should_print_single_line = True
-  # super
-  _encode_transforms = XCFileLikeElement._alternate_encode_transforms
-
-  def __init__(self, properties=None, id=None, parent=None):
-    # super
-    XCFileLikeElement.__init__(self, properties, id, parent)
-    if 'path' in self._properties and self._properties['path'].endswith('/'):
-      self._properties['path'] = self._properties['path'][:-1]
-      is_dir = True
-    else:
-      is_dir = False
-
-    if 'path' in self._properties and \
-        not 'lastKnownFileType' in self._properties and \
-        not 'explicitFileType' in self._properties:
-      # TODO(mark): This is the replacement for a replacement for a quick hack.
-      # It is no longer incredibly sucky, but this list needs to be extended.
-      extension_map = {
-        'a':           'archive.ar',
-        'app':         'wrapper.application',
-        'bdic':        'file',
-        'bundle':      'wrapper.cfbundle',
-        'c':           'sourcecode.c.c',
-        'cc':          'sourcecode.cpp.cpp',
-        'cpp':         'sourcecode.cpp.cpp',
-        'css':         'text.css',
-        'cxx':         'sourcecode.cpp.cpp',
-        'dart':        'sourcecode',
-        'dylib':       'compiled.mach-o.dylib',
-        'framework':   'wrapper.framework',
-        'gyp':         'sourcecode',
-        'gypi':        'sourcecode',
-        'h':           'sourcecode.c.h',
-        'hxx':         'sourcecode.cpp.h',
-        'icns':        'image.icns',
-        'java':        'sourcecode.java',
-        'js':          'sourcecode.javascript',
-        'kext':        'wrapper.kext',
-        'm':           'sourcecode.c.objc',
-        'mm':          'sourcecode.cpp.objcpp',
-        'nib':         'wrapper.nib',
-        'o':           'compiled.mach-o.objfile',
-        'pdf':         'image.pdf',
-        'pl':          'text.script.perl',
-        'plist':       'text.plist.xml',
-        'pm':          'text.script.perl',
-        'png':         'image.png',
-        'py':          'text.script.python',
-        'r':           'sourcecode.rez',
-        'rez':         'sourcecode.rez',
-        's':           'sourcecode.asm',
-        'storyboard':  'file.storyboard',
-        'strings':     'text.plist.strings',
-        'swift':       'sourcecode.swift',
-        'ttf':         'file',
-        'xcassets':    'folder.assetcatalog',
-        'xcconfig':    'text.xcconfig',
-        'xcdatamodel': 'wrapper.xcdatamodel',
-        'xcdatamodeld':'wrapper.xcdatamodeld',
-        'xib':         'file.xib',
-        'y':           'sourcecode.yacc',
-      }
-
-      prop_map = {
-        'dart':        'explicitFileType',
-        'gyp':         'explicitFileType',
-        'gypi':        'explicitFileType',
-      }
-
-      if is_dir:
-        file_type = 'folder'
-        prop_name = 'lastKnownFileType'
-      else:
-        basename = posixpath.basename(self._properties['path'])
-        (root, ext) = posixpath.splitext(basename)
-        # Check the map using a lowercase extension.
-        # TODO(mark): Maybe it should try with the original case first and fall
-        # back to lowercase, in case there are any instances where case
-        # matters.  There currently aren't.
-        if ext != '':
-          ext = ext[1:].lower()
-
-        # TODO(mark): "text" is the default value, but "file" is appropriate
-        # for unrecognized files not containing text.  Xcode seems to choose
-        # based on content.
-        file_type = extension_map.get(ext, 'text')
-        prop_name = prop_map.get(ext, 'lastKnownFileType')
-
-      self._properties[prop_name] = file_type
-
-
-class PBXVariantGroup(PBXGroup, XCFileLikeElement):
-  """PBXVariantGroup is used by Xcode to represent localizations."""
-  # No additions to the schema relative to PBXGroup.
-  pass
-
-
-# PBXReferenceProxy is also an XCFileLikeElement subclass.  It is defined below
-# because it uses PBXContainerItemProxy, defined below.
-
-
-class XCBuildConfiguration(XCObject):
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'baseConfigurationReference': [0, PBXFileReference, 0, 0],
-    'buildSettings':              [0, dict, 0, 1, {}],
-    'name':                       [0, str,  0, 1],
-  })
-
-  def HasBuildSetting(self, key):
-    return key in self._properties['buildSettings']
-
-  def GetBuildSetting(self, key):
-    return self._properties['buildSettings'][key]
-
-  def SetBuildSetting(self, key, value):
-    # TODO(mark): If a list, copy?
-    self._properties['buildSettings'][key] = value
-
-  def AppendBuildSetting(self, key, value):
-    if not key in self._properties['buildSettings']:
-      self._properties['buildSettings'][key] = []
-    self._properties['buildSettings'][key].append(value)
-
-  def DelBuildSetting(self, key):
-    if key in self._properties['buildSettings']:
-      del self._properties['buildSettings'][key]
-
-  def SetBaseConfiguration(self, value):
-    self._properties['baseConfigurationReference'] = value
-
-class XCConfigurationList(XCObject):
-  # _configs is the default list of configurations.
-  _configs = [ XCBuildConfiguration({'name': 'Debug'}),
-               XCBuildConfiguration({'name': 'Release'}) ]
-
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'buildConfigurations':           [1, XCBuildConfiguration, 1, 1, _configs],
-    'defaultConfigurationIsVisible': [0, int,                  0, 1, 1],
-    'defaultConfigurationName':      [0, str,                  0, 1, 'Release'],
-  })
-
-  def Name(self):
-    return 'Build configuration list for ' + \
-           self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"'
-
-  def ConfigurationNamed(self, name):
-    """Convenience accessor to obtain an XCBuildConfiguration by name."""
-    for configuration in self._properties['buildConfigurations']:
-      if configuration._properties['name'] == name:
-        return configuration
-
-    raise KeyError(name)
-
-  def DefaultConfiguration(self):
-    """Convenience accessor to obtain the default XCBuildConfiguration."""
-    return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
-
-  def HasBuildSetting(self, key):
-    """Determines the state of a build setting in all XCBuildConfiguration
-    child objects.
-
-    If all child objects have key in their build settings, and the value is the
-    same in all child objects, returns 1.
-
-    If no child objects have the key in their build settings, returns 0.
-
-    If some, but not all, child objects have the key in their build settings,
-    or if any children have different values for the key, returns -1.
-    """
-
-    has = None
-    value = None
-    for configuration in self._properties['buildConfigurations']:
-      configuration_has = configuration.HasBuildSetting(key)
-      if has is None:
-        has = configuration_has
-      elif has != configuration_has:
-        return -1
-
-      if configuration_has:
-        configuration_value = configuration.GetBuildSetting(key)
-        if value is None:
-          value = configuration_value
-        elif value != configuration_value:
-          return -1
-
-    if not has:
-      return 0
-
-    return 1
-
-  def GetBuildSetting(self, key):
-    """Gets the build setting for key.
-
-    All child XCConfiguration objects must have the same value set for the
-    setting, or a ValueError will be raised.
-    """
-
-    # TODO(mark): This is wrong for build settings that are lists.  The list
-    # contents should be compared (and a list copy returned?)
-
-    value = None
-    for configuration in self._properties['buildConfigurations']:
-      configuration_value = configuration.GetBuildSetting(key)
-      if value is None:
-        value = configuration_value
-      else:
-        if value != configuration_value:
-          raise ValueError('Variant values for ' + key)
-
-    return value
-
-  def SetBuildSetting(self, key, value):
-    """Sets the build setting for key to value in all child
-    XCBuildConfiguration objects.
-    """
-
-    for configuration in self._properties['buildConfigurations']:
-      configuration.SetBuildSetting(key, value)
-
-  def AppendBuildSetting(self, key, value):
-    """Appends value to the build setting for key, which is treated as a list,
-    in all child XCBuildConfiguration objects.
-    """
-
-    for configuration in self._properties['buildConfigurations']:
-      configuration.AppendBuildSetting(key, value)
-
-  def DelBuildSetting(self, key):
-    """Deletes the build setting key from all child XCBuildConfiguration
-    objects.
-    """
-
-    for configuration in self._properties['buildConfigurations']:
-      configuration.DelBuildSetting(key)
-
-  def SetBaseConfiguration(self, value):
-    """Sets the build configuration in all child XCBuildConfiguration objects.
-    """
-
-    for configuration in self._properties['buildConfigurations']:
-      configuration.SetBaseConfiguration(value)
-
-
-class PBXBuildFile(XCObject):
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'fileRef':  [0, XCFileLikeElement, 0, 1],
-    'settings': [0, str,               0, 0],  # hack, it's a dict
-  })
-
-  # Weird output rules for PBXBuildFile.
-  _should_print_single_line = True
-  _encode_transforms = XCObject._alternate_encode_transforms
-
-  def Name(self):
-    # Example: "main.cc in Sources"
-    return self._properties['fileRef'].Name() + ' in ' + self.parent.Name()
-
-  def Hashables(self):
-    # super
-    hashables = XCObject.Hashables(self)
-
-    # It is not sufficient to just rely on Name() to get the
-    # XCFileLikeElement's name, because that is not a complete pathname.
-    # PathHashables returns hashables unique enough that no two
-    # PBXBuildFiles should wind up with the same set of hashables, unless
-    # someone adds the same file multiple times to the same target.  That
-    # would be considered invalid anyway.
-    hashables.extend(self._properties['fileRef'].PathHashables())
-
-    return hashables
-
-
-class XCBuildPhase(XCObject):
-  """Abstract base for build phase classes.  Not represented in a project
-  file.
-
-  Attributes:
-    _files_by_path: A dict mapping each path of a child in the files list by
-      path (keys) to the corresponding PBXBuildFile children (values).
-    _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
-      to the corresponding PBXBuildFile children (values).
-  """
-
-  # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
-  # actually have a "files" list.  XCBuildPhase should not have "files" but
-  # another abstract subclass of it should provide this, and concrete build
-  # phase types that do have "files" lists should be derived from that new
-  # abstract subclass.  XCBuildPhase should only provide buildActionMask and
-  # runOnlyForDeploymentPostprocessing, and not files or the various
-  # file-related methods and attributes.
-
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'buildActionMask':                    [0, int,          0, 1, 0x7fffffff],
-    'files':                              [1, PBXBuildFile, 1, 1, []],
-    'runOnlyForDeploymentPostprocessing': [0, int,          0, 1, 0],
-  })
-
-  def __init__(self, properties=None, id=None, parent=None):
-    # super
-    XCObject.__init__(self, properties, id, parent)
-
-    self._files_by_path = {}
-    self._files_by_xcfilelikeelement = {}
-    for pbxbuildfile in self._properties.get('files', []):
-      self._AddBuildFileToDicts(pbxbuildfile)
-
-  def FileGroup(self, path):
-    # Subclasses must override this by returning a two-element tuple.  The
-    # first item in the tuple should be the PBXGroup to which "path" should be
-    # added, either as a child or deeper descendant.  The second item should
-    # be a boolean indicating whether files should be added into hierarchical
-    # groups or one single flat group.
-    raise NotImplementedError(
-          self.__class__.__name__ + ' must implement FileGroup')
-
-  def _AddPathToDict(self, pbxbuildfile, path):
-    """Adds path to the dict tracking paths belonging to this build phase.
-
-    If the path is already a member of this build phase, raises an exception.
-    """
-
-    if path in self._files_by_path:
-      raise ValueError('Found multiple build files with path ' + path)
-    self._files_by_path[path] = pbxbuildfile
-
-  def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
-    """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
-
-    If path is specified, then it is the path that is being added to the
-    phase, and pbxbuildfile must contain either a PBXFileReference directly
-    referencing that path, or it must contain a PBXVariantGroup that itself
-    contains a PBXFileReference referencing the path.
-
-    If path is not specified, either the PBXFileReference's path or the paths
-    of all children of the PBXVariantGroup are taken as being added to the
-    phase.
-
-    If the path is already present in the phase, raises an exception.
-
-    If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
-    are already present in the phase, referenced by a different PBXBuildFile
-    object, raises an exception.  This does not raise an exception when
-    a PBXFileReference or PBXVariantGroup reappear and are referenced by the
-    same PBXBuildFile that has already introduced them, because in the case
-    of PBXVariantGroup objects, they may correspond to multiple paths that are
-    not all added simultaneously.  When this situation occurs, the path needs
-    to be added to _files_by_path, but nothing needs to change in
-    _files_by_xcfilelikeelement, and the caller should have avoided adding
-    the PBXBuildFile if it is already present in the list of children.
-    """
-
-    xcfilelikeelement = pbxbuildfile._properties['fileRef']
-
-    paths = []
-    if path != None:
-      # It's best when the caller provides the path.
-      if isinstance(xcfilelikeelement, PBXVariantGroup):
-        paths.append(path)
-    else:
-      # If the caller didn't provide a path, there can be either multiple
-      # paths (PBXVariantGroup) or one.
-      if isinstance(xcfilelikeelement, PBXVariantGroup):
-        for variant in xcfilelikeelement._properties['children']:
-          paths.append(variant.FullPath())
-      else:
-        paths.append(xcfilelikeelement.FullPath())
-
-    # Add the paths first, because if something's going to raise, the
-    # messages provided by _AddPathToDict are more useful owing to its
-    # having access to a real pathname and not just an object's Name().
-    for a_path in paths:
-      self._AddPathToDict(pbxbuildfile, a_path)
-
-    # If another PBXBuildFile references this XCFileLikeElement, there's a
-    # problem.
-    if xcfilelikeelement in self._files_by_xcfilelikeelement and \
-       self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
-      raise ValueError('Found multiple build files for ' + \
-                       xcfilelikeelement.Name())
-    self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
-
-  def AppendBuildFile(self, pbxbuildfile, path=None):
-    # Callers should use this instead of calling
-    # AppendProperty('files', pbxbuildfile) directly because this function
-    # maintains the object's dicts.  Better yet, callers can just call AddFile
-    # with a pathname and not worry about building their own PBXBuildFile
-    # objects.
-    self.AppendProperty('files', pbxbuildfile)
-    self._AddBuildFileToDicts(pbxbuildfile, path)
-
-  def AddFile(self, path, settings=None):
-    (file_group, hierarchical) = self.FileGroup(path)
-    file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
-
-    if file_ref in self._files_by_xcfilelikeelement and \
-       isinstance(file_ref, PBXVariantGroup):
-      # There's already a PBXBuildFile in this phase corresponding to the
-      # PBXVariantGroup.  path just provides a new variant that belongs to
-      # the group.  Add the path to the dict.
-      pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
-      self._AddBuildFileToDicts(pbxbuildfile, path)
-    else:
-      # Add a new PBXBuildFile to get file_ref into the phase.
-      if settings is None:
-        pbxbuildfile = PBXBuildFile({'fileRef': file_ref})
-      else:
-        pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings})
-      self.AppendBuildFile(pbxbuildfile, path)
-
-
-class PBXHeadersBuildPhase(XCBuildPhase):
-  # No additions to the schema relative to XCBuildPhase.
-
-  def Name(self):
-    return 'Headers'
-
-  def FileGroup(self, path):
-    return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXResourcesBuildPhase(XCBuildPhase):
-  # No additions to the schema relative to XCBuildPhase.
-
-  def Name(self):
-    return 'Resources'
-
-  def FileGroup(self, path):
-    return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXSourcesBuildPhase(XCBuildPhase):
-  # No additions to the schema relative to XCBuildPhase.
-
-  def Name(self):
-    return 'Sources'
-
-  def FileGroup(self, path):
-    return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXFrameworksBuildPhase(XCBuildPhase):
-  # No additions to the schema relative to XCBuildPhase.
-
-  def Name(self):
-    return 'Frameworks'
-
-  def FileGroup(self, path):
-    (root, ext) = posixpath.splitext(path)
-    if ext != '':
-      ext = ext[1:].lower()
-    if ext == 'o':
-      # .o files are added to Xcode Frameworks phases, but conceptually aren't
-      # frameworks, they're more like sources or intermediates. Redirect them
-      # to show up in one of those other groups.
-      return self.PBXProjectAncestor().RootGroupForPath(path)
-    else:
-      return (self.PBXProjectAncestor().FrameworksGroup(), False)
-
-
-class PBXShellScriptBuildPhase(XCBuildPhase):
-  _schema = XCBuildPhase._schema.copy()
-  _schema.update({
-    'inputPaths':       [1, str, 0, 1, []],
-    'name':             [0, str, 0, 0],
-    'outputPaths':      [1, str, 0, 1, []],
-    'shellPath':        [0, str, 0, 1, '/bin/sh'],
-    'shellScript':      [0, str, 0, 1],
-    'showEnvVarsInLog': [0, int, 0, 0],
-  })
-
-  def Name(self):
-    if 'name' in self._properties:
-      return self._properties['name']
-
-    return 'ShellScript'
-
-
-class PBXCopyFilesBuildPhase(XCBuildPhase):
-  _schema = XCBuildPhase._schema.copy()
-  _schema.update({
-    'dstPath':          [0, str, 0, 1],
-    'dstSubfolderSpec': [0, int, 0, 1],
-    'name':             [0, str, 0, 0],
-  })
-
-  # path_tree_re matches "$(DIR)/path" or just "$(DIR)".  Match group 1 is
-  # "DIR", match group 3 is "path" or None.
-  path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
-
-  # path_tree_to_subfolder maps names of Xcode variables to the associated
-  # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
-  path_tree_to_subfolder = {
-    'BUILT_FRAMEWORKS_DIR': 10,  # Frameworks Directory
-    'BUILT_PRODUCTS_DIR': 16,  # Products Directory
-    # Other types that can be chosen via the Xcode UI.
-    # TODO(mark): Map Xcode variable names to these.
-    # : 1,  # Wrapper
-    # : 6,  # Executables: 6
-    # : 7,  # Resources
-    # : 15,  # Java Resources
-    # : 11,  # Shared Frameworks
-    # : 12,  # Shared Support
-    # : 13,  # PlugIns
-  }
-
-  def Name(self):
-    if 'name' in self._properties:
-      return self._properties['name']
-
-    return 'CopyFiles'
-
-  def FileGroup(self, path):
-    return self.PBXProjectAncestor().RootGroupForPath(path)
-
-  def SetDestination(self, path):
-    """Set the dstSubfolderSpec and dstPath properties from path.
-
-    path may be specified in the same notation used for XCHierarchicalElements,
-    specifically, "$(DIR)/path".
-    """
-
-    path_tree_match = self.path_tree_re.search(path)
-    if path_tree_match:
-      # Everything else needs to be relative to an Xcode variable.
-      path_tree = path_tree_match.group(1)
-      relative_path = path_tree_match.group(3)
-
-      if path_tree in self.path_tree_to_subfolder:
-        subfolder = self.path_tree_to_subfolder[path_tree]
-        if relative_path is None:
-          relative_path = ''
-      else:
-        # The path starts with an unrecognized Xcode variable
-        # name like $(SRCROOT).  Xcode will still handle this
-        # as an "absolute path" that starts with the variable.
-        subfolder = 0
-        relative_path = path
-    elif path.startswith('/'):
-      # Special case.  Absolute paths are in dstSubfolderSpec 0.
-      subfolder = 0
-      relative_path = path[1:]
-    else:
-      raise ValueError('Can\'t use path %s in a %s' % \
-                       (path, self.__class__.__name__))
-
-    self._properties['dstPath'] = relative_path
-    self._properties['dstSubfolderSpec'] = subfolder
-
-
-class PBXBuildRule(XCObject):
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'compilerSpec': [0, str, 0, 1],
-    'filePatterns': [0, str, 0, 0],
-    'fileType':     [0, str, 0, 1],
-    'isEditable':   [0, int, 0, 1, 1],
-    'outputFiles':  [1, str, 0, 1, []],
-    'script':       [0, str, 0, 0],
-  })
-
-  def Name(self):
-    # Not very inspired, but it's what Xcode uses.
-    return self.__class__.__name__
-
-  def Hashables(self):
-    # super
-    hashables = XCObject.Hashables(self)
-
-    # Use the hashables of the weak objects that this object refers to.
-    hashables.append(self._properties['fileType'])
-    if 'filePatterns' in self._properties:
-      hashables.append(self._properties['filePatterns'])
-    return hashables
-
-
-class PBXContainerItemProxy(XCObject):
-  # When referencing an item in this project file, containerPortal is the
-  # PBXProject root object of this project file.  When referencing an item in
-  # another project file, containerPortal is a PBXFileReference identifying
-  # the other project file.
-  #
-  # When serving as a proxy to an XCTarget (in this project file or another),
-  # proxyType is 1.  When serving as a proxy to a PBXFileReference (in another
-  # project file), proxyType is 2.  Type 2 is used for references to the
-  # producs of the other project file's targets.
-  #
-  # Xcode is weird about remoteGlobalIDString.  Usually, it's printed without
-  # a comment, indicating that it's tracked internally simply as a string, but
-  # sometimes it's printed with a comment (usually when the object is initially
-  # created), indicating that it's tracked as a project file object at least
-  # sometimes.  This module always tracks it as an object, but contains a hack
-  # to prevent it from printing the comment in the project file output.  See
-  # _XCKVPrint.
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'containerPortal':      [0, XCContainerPortal, 0, 1],
-    'proxyType':            [0, int,               0, 1],
-    'remoteGlobalIDString': [0, XCRemoteObject,    0, 1],
-    'remoteInfo':           [0, str,               0, 1],
-  })
-
-  def __repr__(self):
-    props = self._properties
-    name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo'])
-    return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
-  def Name(self):
-    # Admittedly not the best name, but it's what Xcode uses.
-    return self.__class__.__name__
-
-  def Hashables(self):
-    # super
-    hashables = XCObject.Hashables(self)
-
-    # Use the hashables of the weak objects that this object refers to.
-    hashables.extend(self._properties['containerPortal'].Hashables())
-    hashables.extend(self._properties['remoteGlobalIDString'].Hashables())
-    return hashables
-
-
-class PBXTargetDependency(XCObject):
-  # The "target" property accepts an XCTarget object, and obviously not
-  # NoneType.  But XCTarget is defined below, so it can't be put into the
-  # schema yet.  The definition of PBXTargetDependency can't be moved below
-  # XCTarget because XCTarget's own schema references PBXTargetDependency.
-  # Python doesn't deal well with this circular relationship, and doesn't have
-  # a real way to do forward declarations.  To work around, the type of
-  # the "target" property is reset below, after XCTarget is defined.
-  #
-  # At least one of "name" and "target" is required.
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'name':        [0, str,                   0, 0],
-    'target':      [0, None.__class__,        0, 0],
-    'targetProxy': [0, PBXContainerItemProxy, 1, 1],
-  })
-
-  def __repr__(self):
-    name = self._properties.get('name') or self._properties['target'].Name()
-    return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
-  def Name(self):
-    # Admittedly not the best name, but it's what Xcode uses.
-    return self.__class__.__name__
-
-  def Hashables(self):
-    # super
-    hashables = XCObject.Hashables(self)
-
-    # Use the hashables of the weak objects that this object refers to.
-    hashables.extend(self._properties['targetProxy'].Hashables())
-    return hashables
-
-
-class PBXReferenceProxy(XCFileLikeElement):
-  _schema = XCFileLikeElement._schema.copy()
-  _schema.update({
-    'fileType':  [0, str,                   0, 1],
-    'path':      [0, str,                   0, 1],
-    'remoteRef': [0, PBXContainerItemProxy, 1, 1],
-  })
-
-
-class XCTarget(XCRemoteObject):
-  # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
-  # to allow PBXProject to be used in the remoteGlobalIDString property of
-  # PBXContainerItemProxy.
-  #
-  # Setting a "name" property at instantiation may also affect "productName",
-  # which may in turn affect the "PRODUCT_NAME" build setting in children of
-  # "buildConfigurationList".  See __init__ below.
-  _schema = XCRemoteObject._schema.copy()
-  _schema.update({
-    'buildConfigurationList': [0, XCConfigurationList, 1, 1,
-                               XCConfigurationList()],
-    'buildPhases':            [1, XCBuildPhase,        1, 1, []],
-    'dependencies':           [1, PBXTargetDependency, 1, 1, []],
-    'name':                   [0, str,                 0, 1],
-    'productName':            [0, str,                 0, 1],
-  })
-
-  def __init__(self, properties=None, id=None, parent=None,
-               force_outdir=None, force_prefix=None, force_extension=None):
-    # super
-    XCRemoteObject.__init__(self, properties, id, parent)
-
-    # Set up additional defaults not expressed in the schema.  If a "name"
-    # property was supplied, set "productName" if it is not present.  Also set
-    # the "PRODUCT_NAME" build setting in each configuration, but only if
-    # the setting is not present in any build configuration.
-    if 'name' in self._properties:
-      if not 'productName' in self._properties:
-        self.SetProperty('productName', self._properties['name'])
-
-    if 'productName' in self._properties:
-      if 'buildConfigurationList' in self._properties:
-        configs = self._properties['buildConfigurationList']
-        if configs.HasBuildSetting('PRODUCT_NAME') == 0:
-          configs.SetBuildSetting('PRODUCT_NAME',
-                                  self._properties['productName'])
-
-  def AddDependency(self, other):
-    pbxproject = self.PBXProjectAncestor()
-    other_pbxproject = other.PBXProjectAncestor()
-    if pbxproject == other_pbxproject:
-      # Add a dependency to another target in the same project file.
-      container = PBXContainerItemProxy({'containerPortal':      pbxproject,
-                                         'proxyType':            1,
-                                         'remoteGlobalIDString': other,
-                                         'remoteInfo':           other.Name()})
-      dependency = PBXTargetDependency({'target':      other,
-                                        'targetProxy': container})
-      self.AppendProperty('dependencies', dependency)
-    else:
-      # Add a dependency to a target in a different project file.
-      other_project_ref = \
-          pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
-      container = PBXContainerItemProxy({
-            'containerPortal':      other_project_ref,
-            'proxyType':            1,
-            'remoteGlobalIDString': other,
-            'remoteInfo':           other.Name(),
-          })
-      dependency = PBXTargetDependency({'name':        other.Name(),
-                                        'targetProxy': container})
-      self.AppendProperty('dependencies', dependency)
-
-  # Proxy all of these through to the build configuration list.
-
-  def ConfigurationNamed(self, name):
-    return self._properties['buildConfigurationList'].ConfigurationNamed(name)
-
-  def DefaultConfiguration(self):
-    return self._properties['buildConfigurationList'].DefaultConfiguration()
-
-  def HasBuildSetting(self, key):
-    return self._properties['buildConfigurationList'].HasBuildSetting(key)
-
-  def GetBuildSetting(self, key):
-    return self._properties['buildConfigurationList'].GetBuildSetting(key)
-
-  def SetBuildSetting(self, key, value):
-    return self._properties['buildConfigurationList'].SetBuildSetting(key, \
-                                                                      value)
-
-  def AppendBuildSetting(self, key, value):
-    return self._properties['buildConfigurationList'].AppendBuildSetting(key, \
-                                                                         value)
-
-  def DelBuildSetting(self, key):
-    return self._properties['buildConfigurationList'].DelBuildSetting(key)
-
-
-# Redefine the type of the "target" property.  See PBXTargetDependency._schema
-# above.
-PBXTargetDependency._schema['target'][1] = XCTarget
-
-
-class PBXNativeTarget(XCTarget):
-  # buildPhases is overridden in the schema to be able to set defaults.
-  #
-  # NOTE: Contrary to most objects, it is advisable to set parent when
-  # constructing PBXNativeTarget.  A parent of an XCTarget must be a PBXProject
-  # object.  A parent reference is required for a PBXNativeTarget during
-  # construction to be able to set up the target defaults for productReference,
-  # because a PBXBuildFile object must be created for the target and it must
-  # be added to the PBXProject's mainGroup hierarchy.
-  _schema = XCTarget._schema.copy()
-  _schema.update({
-    'buildPhases':      [1, XCBuildPhase,     1, 1,
-                         [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]],
-    'buildRules':       [1, PBXBuildRule,     1, 1, []],
-    'productReference': [0, PBXFileReference, 0, 1],
-    'productType':      [0, str,              0, 1],
-  })
-
-  # Mapping from Xcode product-types to settings.  The settings are:
-  #  filetype : used for explicitFileType in the project file
-  #  prefix : the prefix for the file name
-  #  suffix : the suffix for the file name
-  _product_filetypes = {
-    'com.apple.product-type.application':           ['wrapper.application',
-                                                     '', '.app'],
-    'com.apple.product-type.application.watchapp':  ['wrapper.application',
-                                                     '', '.app'],
-    'com.apple.product-type.watchkit-extension':    ['wrapper.app-extension',
-                                                     '', '.appex'],
-    'com.apple.product-type.app-extension':         ['wrapper.app-extension',
-                                                     '', '.appex'],
-    'com.apple.product-type.bundle':            ['wrapper.cfbundle',
-                                                 '', '.bundle'],
-    'com.apple.product-type.framework':         ['wrapper.framework',
-                                                 '', '.framework'],
-    'com.apple.product-type.library.dynamic':   ['compiled.mach-o.dylib',
-                                                 'lib', '.dylib'],
-    'com.apple.product-type.library.static':    ['archive.ar',
-                                                 'lib', '.a'],
-    'com.apple.product-type.tool':              ['compiled.mach-o.executable',
-                                                 '', ''],
-    'com.apple.product-type.bundle.unit-test':  ['wrapper.cfbundle',
-                                                 '', '.xctest'],
-    'com.apple.product-type.bundle.ui-testing':  ['wrapper.cfbundle',
-                                                  '', '.xctest'],
-    'com.googlecode.gyp.xcode.bundle':          ['compiled.mach-o.dylib',
-                                                 '', '.so'],
-    'com.apple.product-type.kernel-extension':  ['wrapper.kext',
-                                                 '', '.kext'],
-  }
-
-  def __init__(self, properties=None, id=None, parent=None,
-               force_outdir=None, force_prefix=None, force_extension=None):
-    # super
-    XCTarget.__init__(self, properties, id, parent)
-
-    if 'productName' in self._properties and \
-       'productType' in self._properties and \
-       not 'productReference' in self._properties and \
-       self._properties['productType'] in self._product_filetypes:
-      products_group = None
-      pbxproject = self.PBXProjectAncestor()
-      if pbxproject != None:
-        products_group = pbxproject.ProductsGroup()
-
-      if products_group != None:
-        (filetype, prefix, suffix) = \
-            self._product_filetypes[self._properties['productType']]
-        # Xcode does not have a distinct type for loadable modules that are
-        # pure BSD targets (not in a bundle wrapper). GYP allows such modules
-        # to be specified by setting a target type to loadable_module without
-        # having mac_bundle set. These are mapped to the pseudo-product type
-        # com.googlecode.gyp.xcode.bundle.
-        #
-        # By picking up this special type and converting it to a dynamic
-        # library (com.apple.product-type.library.dynamic) with fix-ups,
-        # single-file loadable modules can be produced.
-        #
-        # MACH_O_TYPE is changed to mh_bundle to produce the proper file type
-        # (as opposed to mh_dylib). In order for linking to succeed,
-        # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be
-        # cleared. They are meaningless for type mh_bundle.
-        #
-        # Finally, the .so extension is forcibly applied over the default
-        # (.dylib), unless another forced extension is already selected.
-        # .dylib is plainly wrong, and .bundle is used by loadable_modules in
-        # bundle wrappers (com.apple.product-type.bundle). .so seems an odd
-        # choice because it's used as the extension on many other systems that
-        # don't distinguish between linkable shared libraries and non-linkable
-        # loadable modules, but there's precedent: Python loadable modules on
-        # Mac OS X use an .so extension.
-        if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle':
-          self._properties['productType'] = \
-              'com.apple.product-type.library.dynamic'
-          self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
-          self.SetBuildSetting('DYLIB_CURRENT_VERSION', '')
-          self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '')
-          if force_extension is None:
-            force_extension = suffix[1:]
-
-        if self._properties['productType'] == \
-           'com.apple.product-type-bundle.unit.test' or \
-           self._properties['productType'] == \
-           'com.apple.product-type-bundle.ui-testing':
-          if force_extension is None:
-            force_extension = suffix[1:]
-
-        if force_extension is not None:
-          # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
-          # Extension override.
-          suffix = '.' + force_extension
-          if filetype.startswith('wrapper.'):
-            self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
-          else:
-            self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
-
-          if filetype.startswith('compiled.mach-o.executable'):
-            product_name = self._properties['productName']
-            product_name += suffix
-            suffix = ''
-            self.SetProperty('productName', product_name)
-            self.SetBuildSetting('PRODUCT_NAME', product_name)
-
-        # Xcode handles most prefixes based on the target type, however there
-        # are exceptions.  If a "BSD Dynamic Library" target is added in the
-        # Xcode UI, Xcode sets EXECUTABLE_PREFIX.  This check duplicates that
-        # behavior.
-        if force_prefix is not None:
-          prefix = force_prefix
-        if filetype.startswith('wrapper.'):
-          self.SetBuildSetting('WRAPPER_PREFIX', prefix)
-        else:
-          self.SetBuildSetting('EXECUTABLE_PREFIX', prefix)
-
-        if force_outdir is not None:
-          self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir)
-
-        # TODO(tvl): Remove the below hack.
-        #    http://code.google.com/p/gyp/issues/detail?id=122
-
-        # Some targets include the prefix in the target_name.  These targets
-        # really should just add a product_name setting that doesn't include
-        # the prefix.  For example:
-        #  target_name = 'libevent', product_name = 'event'
-        # This check cleans up for them.
-        product_name = self._properties['productName']
-        prefix_len = len(prefix)
-        if prefix_len and (product_name[:prefix_len] == prefix):
-          product_name = product_name[prefix_len:]
-          self.SetProperty('productName', product_name)
-          self.SetBuildSetting('PRODUCT_NAME', product_name)
-
-        ref_props = {
-          'explicitFileType': filetype,
-          'includeInIndex':   0,
-          'path':             prefix + product_name + suffix,
-          'sourceTree':       'BUILT_PRODUCTS_DIR',
-        }
-        file_ref = PBXFileReference(ref_props)
-        products_group.AppendChild(file_ref)
-        self.SetProperty('productReference', file_ref)
-
-  def GetBuildPhaseByType(self, type):
-    if not 'buildPhases' in self._properties:
-      return None
-
-    the_phase = None
-    for phase in self._properties['buildPhases']:
-      if isinstance(phase, type):
-        # Some phases may be present in multiples in a well-formed project file,
-        # but phases like PBXSourcesBuildPhase may only be present singly, and
-        # this function is intended as an aid to GetBuildPhaseByType.  Loop
-        # over the entire list of phases and assert if more than one of the
-        # desired type is found.
-        assert the_phase is None
-        the_phase = phase
-
-    return the_phase
-
-  def HeadersPhase(self):
-    headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase)
-    if headers_phase is None:
-      headers_phase = PBXHeadersBuildPhase()
-
-      # The headers phase should come before the resources, sources, and
-      # frameworks phases, if any.
-      insert_at = len(self._properties['buildPhases'])
-      for index in xrange(0, len(self._properties['buildPhases'])):
-        phase = self._properties['buildPhases'][index]
-        if isinstance(phase, PBXResourcesBuildPhase) or \
-           isinstance(phase, PBXSourcesBuildPhase) or \
-           isinstance(phase, PBXFrameworksBuildPhase):
-          insert_at = index
-          break
-
-      self._properties['buildPhases'].insert(insert_at, headers_phase)
-      headers_phase.parent = self
-
-    return headers_phase
-
-  def ResourcesPhase(self):
-    resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
-    if resources_phase is None:
-      resources_phase = PBXResourcesBuildPhase()
-
-      # The resources phase should come before the sources and frameworks
-      # phases, if any.
-      insert_at = len(self._properties['buildPhases'])
-      for index in xrange(0, len(self._properties['buildPhases'])):
-        phase = self._properties['buildPhases'][index]
-        if isinstance(phase, PBXSourcesBuildPhase) or \
-           isinstance(phase, PBXFrameworksBuildPhase):
-          insert_at = index
-          break
-
-      self._properties['buildPhases'].insert(insert_at, resources_phase)
-      resources_phase.parent = self
-
-    return resources_phase
-
-  def SourcesPhase(self):
-    sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
-    if sources_phase is None:
-      sources_phase = PBXSourcesBuildPhase()
-      self.AppendProperty('buildPhases', sources_phase)
-
-    return sources_phase
-
-  def FrameworksPhase(self):
-    frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
-    if frameworks_phase is None:
-      frameworks_phase = PBXFrameworksBuildPhase()
-      self.AppendProperty('buildPhases', frameworks_phase)
-
-    return frameworks_phase
-
-  def AddDependency(self, other):
-    # super
-    XCTarget.AddDependency(self, other)
-
-    static_library_type = 'com.apple.product-type.library.static'
-    shared_library_type = 'com.apple.product-type.library.dynamic'
-    framework_type = 'com.apple.product-type.framework'
-    if isinstance(other, PBXNativeTarget) and \
-       'productType' in self._properties and \
-       self._properties['productType'] != static_library_type and \
-       'productType' in other._properties and \
-       (other._properties['productType'] == static_library_type or \
-        ((other._properties['productType'] == shared_library_type or \
-          other._properties['productType'] == framework_type) and \
-         ((not other.HasBuildSetting('MACH_O_TYPE')) or
-          other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))):
-
-      file_ref = other.GetProperty('productReference')
-
-      pbxproject = self.PBXProjectAncestor()
-      other_pbxproject = other.PBXProjectAncestor()
-      if pbxproject != other_pbxproject:
-        other_project_product_group = \
-            pbxproject.AddOrGetProjectReference(other_pbxproject)[0]
-        file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
-
-      self.FrameworksPhase().AppendProperty('files',
-                                            PBXBuildFile({'fileRef': file_ref}))
-
-
-class PBXAggregateTarget(XCTarget):
-  pass
-
-
-class PBXProject(XCContainerPortal):
-  # A PBXProject is really just an XCObject, the XCContainerPortal thing is
-  # just to allow PBXProject to be used in the containerPortal property of
-  # PBXContainerItemProxy.
-  """
-
-  Attributes:
-    path: "sample.xcodeproj".  TODO(mark) Document me!
-    _other_pbxprojects: A dictionary, keyed by other PBXProject objects.  Each
-                        value is a reference to the dict in the
-                        projectReferences list associated with the keyed
-                        PBXProject.
-  """
-
-  _schema = XCContainerPortal._schema.copy()
-  _schema.update({
-    'attributes':             [0, dict,                0, 0],
-    'buildConfigurationList': [0, XCConfigurationList, 1, 1,
-                               XCConfigurationList()],
-    'compatibilityVersion':   [0, str,                 0, 1, 'Xcode 3.2'],
-    'hasScannedForEncodings': [0, int,                 0, 1, 1],
-    'mainGroup':              [0, PBXGroup,            1, 1, PBXGroup()],
-    'projectDirPath':         [0, str,                 0, 1, ''],
-    'projectReferences':      [1, dict,                0, 0],
-    'projectRoot':            [0, str,                 0, 1, ''],
-    'targets':                [1, XCTarget,            1, 1, []],
-  })
-
-  def __init__(self, properties=None, id=None, parent=None, path=None):
-    self.path = path
-    self._other_pbxprojects = {}
-    # super
-    return XCContainerPortal.__init__(self, properties, id, parent)
-
-  def Name(self):
-    name = self.path
-    if name[-10:] == '.xcodeproj':
-      name = name[:-10]
-    return posixpath.basename(name)
-
-  def Path(self):
-    return self.path
-
-  def Comment(self):
-    return 'Project object'
-
-  def Children(self):
-    # super
-    children = XCContainerPortal.Children(self)
-
-    # Add children that the schema doesn't know about.  Maybe there's a more
-    # elegant way around this, but this is the only case where we need to own
-    # objects in a dictionary (that is itself in a list), and three lines for
-    # a one-off isn't that big a deal.
-    if 'projectReferences' in self._properties:
-      for reference in self._properties['projectReferences']:
-        children.append(reference['ProductGroup'])
-
-    return children
-
-  def PBXProjectAncestor(self):
-    return self
-
-  def _GroupByName(self, name):
-    if not 'mainGroup' in self._properties:
-      self.SetProperty('mainGroup', PBXGroup())
-
-    main_group = self._properties['mainGroup']
-    group = main_group.GetChildByName(name)
-    if group is None:
-      group = PBXGroup({'name': name})
-      main_group.AppendChild(group)
-
-    return group
-
-  # SourceGroup and ProductsGroup are created by default in Xcode's own
-  # templates.
-  def SourceGroup(self):
-    return self._GroupByName('Source')
-
-  def ProductsGroup(self):
-    return self._GroupByName('Products')
-
-  # IntermediatesGroup is used to collect source-like files that are generated
-  # by rules or script phases and are placed in intermediate directories such
-  # as DerivedSources.
-  def IntermediatesGroup(self):
-    return self._GroupByName('Intermediates')
-
-  # FrameworksGroup and ProjectsGroup are top-level groups used to collect
-  # frameworks and projects.
-  def FrameworksGroup(self):
-    return self._GroupByName('Frameworks')
-
-  def ProjectsGroup(self):
-    return self._GroupByName('Projects')
-
-  def RootGroupForPath(self, path):
-    """Returns a PBXGroup child of this object to which path should be added.
-
-    This method is intended to choose between SourceGroup and
-    IntermediatesGroup on the basis of whether path is present in a source
-    directory or an intermediates directory.  For the purposes of this
-    determination, any path located within a derived file directory such as
-    PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
-    directory.
-
-    The returned value is a two-element tuple.  The first element is the
-    PBXGroup, and the second element specifies whether that group should be
-    organized hierarchically (True) or as a single flat list (False).
-    """
-
-    # TODO(mark): make this a class variable and bind to self on call?
-    # Also, this list is nowhere near exhaustive.
-    # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
-    # gyp.generator.xcode.  There should probably be some way for that module
-    # to push the names in, rather than having to hard-code them here.
-    source_tree_groups = {
-      'DERIVED_FILE_DIR':         (self.IntermediatesGroup, True),
-      'INTERMEDIATE_DIR':         (self.IntermediatesGroup, True),
-      'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
-      'SHARED_INTERMEDIATE_DIR':  (self.IntermediatesGroup, True),
-    }
-
-    (source_tree, path) = SourceTreeAndPathFromPath(path)
-    if source_tree != None and source_tree in source_tree_groups:
-      (group_func, hierarchical) = source_tree_groups[source_tree]
-      group = group_func()
-      return (group, hierarchical)
-
-    # TODO(mark): make additional choices based on file extension.
-
-    return (self.SourceGroup(), True)
-
-  def AddOrGetFileInRootGroup(self, path):
-    """Returns a PBXFileReference corresponding to path in the correct group
-    according to RootGroupForPath's heuristics.
-
-    If an existing PBXFileReference for path exists, it will be returned.
-    Otherwise, one will be created and returned.
-    """
-
-    (group, hierarchical) = self.RootGroupForPath(path)
-    return group.AddOrGetFileByPath(path, hierarchical)
-
-  def RootGroupsTakeOverOnlyChildren(self, recurse=False):
-    """Calls TakeOverOnlyChild for all groups in the main group."""
-
-    for group in self._properties['mainGroup']._properties['children']:
-      if isinstance(group, PBXGroup):
-        group.TakeOverOnlyChild(recurse)
-
-  def SortGroups(self):
-    # Sort the children of the mainGroup (like "Source" and "Products")
-    # according to their defined order.
-    self._properties['mainGroup']._properties['children'] = \
-        sorted(self._properties['mainGroup']._properties['children'],
-               cmp=lambda x,y: x.CompareRootGroup(y))
-
-    # Sort everything else by putting group before files, and going
-    # alphabetically by name within sections of groups and files.  SortGroup
-    # is recursive.
-    for group in self._properties['mainGroup']._properties['children']:
-      if not isinstance(group, PBXGroup):
-        continue
-
-      if group.Name() == 'Products':
-        # The Products group is a special case.  Instead of sorting
-        # alphabetically, sort things in the order of the targets that
-        # produce the products.  To do this, just build up a new list of
-        # products based on the targets.
-        products = []
-        for target in self._properties['targets']:
-          if not isinstance(target, PBXNativeTarget):
-            continue
-          product = target._properties['productReference']
-          # Make sure that the product is already in the products group.
-          assert product in group._properties['children']
-          products.append(product)
-
-        # Make sure that this process doesn't miss anything that was already
-        # in the products group.
-        assert len(products) == len(group._properties['children'])
-        group._properties['children'] = products
-      else:
-        group.SortGroup()
-
-  def AddOrGetProjectReference(self, other_pbxproject):
-    """Add a reference to another project file (via PBXProject object) to this
-    one.
-
-    Returns [ProductGroup, ProjectRef].  ProductGroup is a PBXGroup object in
-    this project file that contains a PBXReferenceProxy object for each
-    product of each PBXNativeTarget in the other project file.  ProjectRef is
-    a PBXFileReference to the other project file.
-
-    If this project file already references the other project file, the
-    existing ProductGroup and ProjectRef are returned.  The ProductGroup will
-    still be updated if necessary.
-    """
-
-    if not 'projectReferences' in self._properties:
-      self._properties['projectReferences'] = []
-
-    product_group = None
-    project_ref = None
-
-    if not other_pbxproject in self._other_pbxprojects:
-      # This project file isn't yet linked to the other one.  Establish the
-      # link.
-      product_group = PBXGroup({'name': 'Products'})
-
-      # ProductGroup is strong.
-      product_group.parent = self
-
-      # There's nothing unique about this PBXGroup, and if left alone, it will
-      # wind up with the same set of hashables as all other PBXGroup objects
-      # owned by the projectReferences list.  Add the hashables of the
-      # remote PBXProject that it's related to.
-      product_group._hashables.extend(other_pbxproject.Hashables())
-
-      # The other project reports its path as relative to the same directory
-      # that this project's path is relative to.  The other project's path
-      # is not necessarily already relative to this project.  Figure out the
-      # pathname that this project needs to use to refer to the other one.
-      this_path = posixpath.dirname(self.Path())
-      projectDirPath = self.GetProperty('projectDirPath')
-      if projectDirPath:
-        if posixpath.isabs(projectDirPath[0]):
-          this_path = projectDirPath
-        else:
-          this_path = posixpath.join(this_path, projectDirPath)
-      other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
-
-      # ProjectRef is weak (it's owned by the mainGroup hierarchy).
-      project_ref = PBXFileReference({
-            'lastKnownFileType': 'wrapper.pb-project',
-            'path':              other_path,
-            'sourceTree':        'SOURCE_ROOT',
-          })
-      self.ProjectsGroup().AppendChild(project_ref)
-
-      ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref}
-      self._other_pbxprojects[other_pbxproject] = ref_dict
-      self.AppendProperty('projectReferences', ref_dict)
-
-      # Xcode seems to sort this list case-insensitively
-      self._properties['projectReferences'] = \
-          sorted(self._properties['projectReferences'], cmp=lambda x,y:
-                 cmp(x['ProjectRef'].Name().lower(),
-                     y['ProjectRef'].Name().lower()))
-    else:
-      # The link already exists.  Pull out the relevnt data.
-      project_ref_dict = self._other_pbxprojects[other_pbxproject]
-      product_group = project_ref_dict['ProductGroup']
-      project_ref = project_ref_dict['ProjectRef']
-
-    self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
-
-    inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False)
-    targets = other_pbxproject.GetProperty('targets')
-    if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets):
-      dir_path = project_ref._properties['path']
-      product_group._hashables.extend(dir_path)
-
-    return [product_group, project_ref]
-
-  def _AllSymrootsUnique(self, target, inherit_unique_symroot):
-    # Returns True if all configurations have a unique 'SYMROOT' attribute.
-    # The value of inherit_unique_symroot decides, if a configuration is assumed
-    # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't
-    # define an explicit value for 'SYMROOT'.
-    symroots = self._DefinedSymroots(target)
-    for s in self._DefinedSymroots(target):
-      if (s is not None and not self._IsUniqueSymrootForTarget(s) or
-          s is None and not inherit_unique_symroot):
-        return False
-    return True if symroots else inherit_unique_symroot
-
-  def _DefinedSymroots(self, target):
-    # Returns all values for the 'SYMROOT' attribute defined in all
-    # configurations for this target. If any configuration doesn't define the
-    # 'SYMROOT' attribute, None is added to the returned set. If all
-    # configurations don't define the 'SYMROOT' attribute, an empty set is
-    # returned.
-    config_list = target.GetProperty('buildConfigurationList')
-    symroots = set()
-    for config in config_list.GetProperty('buildConfigurations'):
-      setting = config.GetProperty('buildSettings')
-      if 'SYMROOT' in setting:
-        symroots.add(setting['SYMROOT'])
-      else:
-        symroots.add(None)
-    if len(symroots) == 1 and None in symroots:
-      return set()
-    return symroots
-
-  def _IsUniqueSymrootForTarget(self, symroot):
-    # This method returns True if all configurations in target contain a
-    # 'SYMROOT' attribute that is unique for the given target. A value is
-    # unique, if the Xcode macro '$SRCROOT' appears in it in any form.
-    uniquifier = ['$SRCROOT', '$(SRCROOT)']
-    if any(x in symroot for x in uniquifier):
-      return True
-    return False
-
-  def _SetUpProductReferences(self, other_pbxproject, product_group,
-                              project_ref):
-    # TODO(mark): This only adds references to products in other_pbxproject
-    # when they don't exist in this pbxproject.  Perhaps it should also
-    # remove references from this pbxproject that are no longer present in
-    # other_pbxproject.  Perhaps it should update various properties if they
-    # change.
-    for target in other_pbxproject._properties['targets']:
-      if not isinstance(target, PBXNativeTarget):
-        continue
-
-      other_fileref = target._properties['productReference']
-      if product_group.GetChildByRemoteObject(other_fileref) is None:
-        # Xcode sets remoteInfo to the name of the target and not the name
-        # of its product, despite this proxy being a reference to the product.
-        container_item = PBXContainerItemProxy({
-              'containerPortal':      project_ref,
-              'proxyType':            2,
-              'remoteGlobalIDString': other_fileref,
-              'remoteInfo':           target.Name()
-            })
-        # TODO(mark): Does sourceTree get copied straight over from the other
-        # project?  Can the other project ever have lastKnownFileType here
-        # instead of explicitFileType?  (Use it if so?)  Can path ever be
-        # unset?  (I don't think so.)  Can other_fileref have name set, and
-        # does it impact the PBXReferenceProxy if so?  These are the questions
-        # that perhaps will be answered one day.
-        reference_proxy = PBXReferenceProxy({
-              'fileType':   other_fileref._properties['explicitFileType'],
-              'path':       other_fileref._properties['path'],
-              'sourceTree': other_fileref._properties['sourceTree'],
-              'remoteRef':  container_item,
-            })
-
-        product_group.AppendChild(reference_proxy)
-
-  def SortRemoteProductReferences(self):
-    # For each remote project file, sort the associated ProductGroup in the
-    # same order that the targets are sorted in the remote project file.  This
-    # is the sort order used by Xcode.
-
-    def CompareProducts(x, y, remote_products):
-      # x and y are PBXReferenceProxy objects.  Go through their associated
-      # PBXContainerItem to get the remote PBXFileReference, which will be
-      # present in the remote_products list.
-      x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString']
-      y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString']
-      x_index = remote_products.index(x_remote)
-      y_index = remote_products.index(y_remote)
-
-      # Use the order of each remote PBXFileReference in remote_products to
-      # determine the sort order.
-      return cmp(x_index, y_index)
-
-    for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
-      # Build up a list of products in the remote project file, ordered the
-      # same as the targets that produce them.
-      remote_products = []
-      for target in other_pbxproject._properties['targets']:
-        if not isinstance(target, PBXNativeTarget):
-          continue
-        remote_products.append(target._properties['productReference'])
-
-      # Sort the PBXReferenceProxy children according to the list of remote
-      # products.
-      product_group = ref_dict['ProductGroup']
-      product_group._properties['children'] = sorted(
-          product_group._properties['children'],
-          cmp=lambda x, y, rp=remote_products: CompareProducts(x, y, rp))
-
-
-class XCProjectFile(XCObject):
-  _schema = XCObject._schema.copy()
-  _schema.update({
-    'archiveVersion': [0, int,        0, 1, 1],
-    'classes':        [0, dict,       0, 1, {}],
-    'objectVersion':  [0, int,        0, 1, 46],
-    'rootObject':     [0, PBXProject, 1, 1],
-  })
-
-  def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
-    # Although XCProjectFile is implemented here as an XCObject, it's not a
-    # proper object in the Xcode sense, and it certainly doesn't have its own
-    # ID.  Pass through an attempt to update IDs to the real root object.
-    if recursive:
-      self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash)
-
-  def Print(self, file=sys.stdout):
-    self.VerifyHasRequiredProperties()
-
-    # Add the special "objects" property, which will be caught and handled
-    # separately during printing.  This structure allows a fairly standard
-    # loop do the normal printing.
-    self._properties['objects'] = {}
-    self._XCPrint(file, 0, '// !$*UTF8*$!\n')
-    if self._should_print_single_line:
-      self._XCPrint(file, 0, '{ ')
-    else:
-      self._XCPrint(file, 0, '{\n')
-    for property, value in sorted(self._properties.iteritems(),
-                                  cmp=lambda x, y: cmp(x, y)):
-      if property == 'objects':
-        self._PrintObjects(file)
-      else:
-        self._XCKVPrint(file, 1, property, value)
-    self._XCPrint(file, 0, '}\n')
-    del self._properties['objects']
-
-  def _PrintObjects(self, file):
-    if self._should_print_single_line:
-      self._XCPrint(file, 0, 'objects = {')
-    else:
-      self._XCPrint(file, 1, 'objects = {\n')
-
-    objects_by_class = {}
-    for object in self.Descendants():
-      if object == self:
-        continue
-      class_name = object.__class__.__name__
-      if not class_name in objects_by_class:
-        objects_by_class[class_name] = []
-      objects_by_class[class_name].append(object)
-
-    for class_name in sorted(objects_by_class):
-      self._XCPrint(file, 0, '\n')
-      self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n')
-      for object in sorted(objects_by_class[class_name],
-                           cmp=lambda x, y: cmp(x.id, y.id)):
-        object.Print(file)
-      self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n')
-
-    if self._should_print_single_line:
-      self._XCPrint(file, 0, '}; ')
-    else:
-      self._XCPrint(file, 1, '};\n')
diff --git a/tools/gyp/pylib/gyp/xml_fix.py b/tools/gyp/pylib/gyp/xml_fix.py
deleted file mode 100644
index 5de8481..0000000
--- a/tools/gyp/pylib/gyp/xml_fix.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Applies a fix to CR LF TAB handling in xml.dom.
-
-Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
-Working around this: http://bugs.python.org/issue5752
-TODO(bradnelson): Consider dropping this when we drop XP support.
-"""
-
-
-import xml.dom.minidom
-
-
-def _Replacement_write_data(writer, data, is_attrib=False):
-  """Writes datachars to writer."""
-  data = data.replace("&", "&amp;").replace("<", "&lt;")
-  data = data.replace("\"", "&quot;").replace(">", "&gt;")
-  if is_attrib:
-    data = data.replace(
-        "\r", "&#xD;").replace(
-        "\n", "&#xA;").replace(
-        "\t", "&#x9;")
-  writer.write(data)
-
-
-def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
-  # indent = current indentation
-  # addindent = indentation to add to higher levels
-  # newl = newline string
-  writer.write(indent+"<" + self.tagName)
-
-  attrs = self._get_attributes()
-  a_names = attrs.keys()
-  a_names.sort()
-
-  for a_name in a_names:
-    writer.write(" %s=\"" % a_name)
-    _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
-    writer.write("\"")
-  if self.childNodes:
-    writer.write(">%s" % newl)
-    for node in self.childNodes:
-      node.writexml(writer, indent + addindent, addindent, newl)
-    writer.write("%s</%s>%s" % (indent, self.tagName, newl))
-  else:
-    writer.write("/>%s" % newl)
-
-
-class XmlFix(object):
-  """Object to manage temporary patching of xml.dom.minidom."""
-
-  def __init__(self):
-    # Preserve current xml.dom.minidom functions.
-    self.write_data = xml.dom.minidom._write_data
-    self.writexml = xml.dom.minidom.Element.writexml
-    # Inject replacement versions of a function and a method.
-    xml.dom.minidom._write_data = _Replacement_write_data
-    xml.dom.minidom.Element.writexml = _Replacement_writexml
-
-  def Cleanup(self):
-    if self.write_data:
-      xml.dom.minidom._write_data = self.write_data
-      xml.dom.minidom.Element.writexml = self.writexml
-      self.write_data = None
-
-  def __del__(self):
-    self.Cleanup()
diff --git a/tools/gyp/samples/samples b/tools/gyp/samples/samples
deleted file mode 100755
index 804b618..0000000
--- a/tools/gyp/samples/samples
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os.path
-import shutil
-import sys
-
-
-gyps = [
-    'app/app.gyp',
-    'base/base.gyp',
-    'build/temp_gyp/googleurl.gyp',
-    'build/all.gyp',
-    'build/common.gypi',
-    'build/external_code.gypi',
-    'chrome/test/security_tests/security_tests.gyp',
-    'chrome/third_party/hunspell/hunspell.gyp',
-    'chrome/chrome.gyp',
-    'media/media.gyp',
-    'net/net.gyp',
-    'printing/printing.gyp',
-    'sdch/sdch.gyp',
-    'skia/skia.gyp',
-    'testing/gmock.gyp',
-    'testing/gtest.gyp',
-    'third_party/bzip2/bzip2.gyp',
-    'third_party/icu38/icu38.gyp',
-    'third_party/libevent/libevent.gyp',
-    'third_party/libjpeg/libjpeg.gyp',
-    'third_party/libpng/libpng.gyp',
-    'third_party/libxml/libxml.gyp',
-    'third_party/libxslt/libxslt.gyp',
-    'third_party/lzma_sdk/lzma_sdk.gyp',
-    'third_party/modp_b64/modp_b64.gyp',
-    'third_party/npapi/npapi.gyp',
-    'third_party/sqlite/sqlite.gyp',
-    'third_party/zlib/zlib.gyp',
-    'v8/tools/gyp/v8.gyp',
-    'webkit/activex_shim/activex_shim.gyp',
-    'webkit/activex_shim_dll/activex_shim_dll.gyp',
-    'webkit/build/action_csspropertynames.py',
-    'webkit/build/action_cssvaluekeywords.py',
-    'webkit/build/action_jsconfig.py',
-    'webkit/build/action_makenames.py',
-    'webkit/build/action_maketokenizer.py',
-    'webkit/build/action_useragentstylesheets.py',
-    'webkit/build/rule_binding.py',
-    'webkit/build/rule_bison.py',
-    'webkit/build/rule_gperf.py',
-    'webkit/tools/test_shell/test_shell.gyp',
-    'webkit/webkit.gyp',
-]
-
-
-def Main(argv):
-  if len(argv) != 3 or argv[1] not in ['push', 'pull']:
-    print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0]
-    return 1
-
-  path_to_chrome = argv[2]
-
-  for g in gyps:
-    chrome_file = os.path.join(path_to_chrome, g)
-    local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
-    if argv[1] == 'push':
-      print 'Copying %s to %s' % (local_file, chrome_file)
-      shutil.copyfile(local_file, chrome_file)
-    elif argv[1] == 'pull':
-      print 'Copying %s to %s' % (chrome_file, local_file)
-      shutil.copyfile(chrome_file, local_file)
-    else:
-      assert False
-
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(Main(sys.argv))
diff --git a/tools/gyp/samples/samples.bat b/tools/gyp/samples/samples.bat
deleted file mode 100644
index 778d9c9..0000000
--- a/tools/gyp/samples/samples.bat
+++ /dev/null
@@ -1,5 +0,0 @@
-@rem Copyright (c) 2009 Google Inc. All rights reserved.

-@rem Use of this source code is governed by a BSD-style license that can be

-@rem found in the LICENSE file.

-

-@python %~dp0/samples %*

diff --git a/tools/gyp/setup.py b/tools/gyp/setup.py
deleted file mode 100755
index 75a4255..0000000
--- a/tools/gyp/setup.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from setuptools import setup
-
-setup(
-  name='gyp',
-  version='0.1',
-  description='Generate Your Projects',
-  author='Chromium Authors',
-  author_email='chromium-dev@googlegroups.com',
-  url='http://code.google.com/p/gyp',
-  package_dir = {'': 'pylib'},
-  packages=['gyp', 'gyp.generator'],
-  entry_points = {'console_scripts': ['gyp=gyp:script_main'] }
-)
diff --git a/tools/gyp/test/actions-bare/gyptest-bare.py b/tools/gyp/test/actions-bare/gyptest-bare.py
deleted file mode 100755
index e3d6db1..0000000
--- a/tools/gyp/test/actions-bare/gyptest-bare.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies actions which are not depended on by other targets get executed.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('bare.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-test.build('bare.gyp', chdir='relocate/src')
-
-file_content = 'Hello from bare.py\n'
-
-test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/actions-bare/src/bare.gyp b/tools/gyp/test/actions-bare/src/bare.gyp
deleted file mode 100644
index 3d28f09..0000000
--- a/tools/gyp/test/actions-bare/src/bare.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'bare',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action1',
-          'inputs': [
-            'bare.py',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/out.txt',
-          ],
-          'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'],
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions-bare/src/bare.py b/tools/gyp/test/actions-bare/src/bare.py
deleted file mode 100755
index 1230750..0000000
--- a/tools/gyp/test/actions-bare/src/bare.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'wb')
-f.write('Hello from bare.py\n')
-f.close()
diff --git a/tools/gyp/test/actions-depfile/depfile.gyp b/tools/gyp/test/actions-depfile/depfile.gyp
deleted file mode 100644
index dc2397d..0000000
--- a/tools/gyp/test/actions-depfile/depfile.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'depfile_target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'depfile_action',
-          'inputs': [
-            'input.txt',
-          ],
-          'outputs': [
-            'output.txt',
-          ],
-          'depfile': 'depfile.d',
-          'action': [ ]
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions-depfile/gyptest-all.py b/tools/gyp/test/actions-depfile/gyptest-all.py
deleted file mode 100644
index 23f6f4a..0000000
--- a/tools/gyp/test/actions-depfile/gyptest-all.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Verifies that depfile fields are output in ninja rules."""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-if test.format == 'ninja':
-  test.run_gyp('depfile.gyp')
-  contents = open(test.built_file_path('obj/depfile_target.ninja')).read()
-
-  expected = 'depfile = depfile.d'
-  if expected not in contents:
-    test.fail_test()
-  test.pass_test()
diff --git a/tools/gyp/test/actions-depfile/input.txt b/tools/gyp/test/actions-depfile/input.txt
deleted file mode 100644
index 3f9177e..0000000
--- a/tools/gyp/test/actions-depfile/input.txt
+++ /dev/null
@@ -1 +0,0 @@
-input
diff --git a/tools/gyp/test/actions-multiple-outputs-with-dependencies/gyptest-action.py b/tools/gyp/test/actions-multiple-outputs-with-dependencies/gyptest-action.py
deleted file mode 100755
index ebc7f4f..0000000
--- a/tools/gyp/test/actions-multiple-outputs-with-dependencies/gyptest-action.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies actions with multiple outputs & dependncies will correctly rebuild.
-
-This is a regression test for crrev.com/1177163002.
-"""
-
-import TestGyp
-import os
-import sys
-import time
-
-if sys.platform in ('darwin', 'win32'):
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-test = TestGyp.TestGyp()
-
-TESTDIR='relocate/src'
-test.run_gyp('action.gyp', chdir='src')
-test.relocate('src', TESTDIR)
-
-def build_and_check(content):
-  test.write(TESTDIR + '/input.txt', content)
-  test.build('action.gyp', 'upper', chdir=TESTDIR)
-  test.built_file_must_match('result.txt', content, chdir=TESTDIR)
-
-build_and_check('Content for first build.')
-
-# Ninja works with timestamps and the test above is fast enough that the
-# 'updated' file may end up with the same timestamp as the original, meaning
-# that ninja may not always recognize the input file has changed.
-if test.format == 'ninja':
-  time.sleep(1)
-
-build_and_check('An updated input file.')
-
-test.pass_test()
diff --git a/tools/gyp/test/actions-multiple-outputs-with-dependencies/src/action.gyp b/tools/gyp/test/actions-multiple-outputs-with-dependencies/src/action.gyp
deleted file mode 100644
index a305d65..0000000
--- a/tools/gyp/test/actions-multiple-outputs-with-dependencies/src/action.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'upper',
-      'type': 'none',
-      'actions': [{
-        'action_name': 'upper_action',
-        'inputs': ['<(PRODUCT_DIR)/out2.txt'],
-        'outputs': ['<(PRODUCT_DIR)/result.txt'],
-        'action': ['python', 'rcopy.py', '<@(_inputs)', '<@(_outputs)'],
-      }],
-    },
-    {
-      'target_name': 'lower',
-      'type': 'none',
-      'actions': [{
-        'action_name': 'lower_action',
-        'inputs': ['input.txt'],
-        'outputs': ['<(PRODUCT_DIR)/out1.txt', '<(PRODUCT_DIR)/out2.txt'],
-        'action': ['python', 'rcopy.py', '<@(_inputs)', '<@(_outputs)'],
-      }],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions-multiple-outputs-with-dependencies/src/rcopy.py b/tools/gyp/test/actions-multiple-outputs-with-dependencies/src/rcopy.py
deleted file mode 100644
index fb02959..0000000
--- a/tools/gyp/test/actions-multiple-outputs-with-dependencies/src/rcopy.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-"""A slightly odd 'cp' implementation for this test.
-
-This 'cp' can have many targets, but only one source. 'cp src dest1 dest2'
-will copy the file 'src' to both 'dest1' and 'dest2'."""
-
-with open(sys.argv[1], 'r') as f:
-  src = f.read()
-for dest in sys.argv[2:]:
-  with open(dest, 'w') as f:
-    f.write(src)
-
diff --git a/tools/gyp/test/actions-multiple-outputs/gyptest-multiple-outputs.py b/tools/gyp/test/actions-multiple-outputs/gyptest-multiple-outputs.py
deleted file mode 100755
index 72a7040..0000000
--- a/tools/gyp/test/actions-multiple-outputs/gyptest-multiple-outputs.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies actions with multiple outputs will correctly rebuild.
-"""
-
-import TestGyp
-import os
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('multiple-outputs.gyp', chdir='src')
-
-chdir = 'relocate/src'
-test.relocate('src', chdir)
-
-def build_and_check():
-  # Build + check that both outputs exist.
-  test.build('multiple-outputs.gyp', chdir=chdir)
-  test.built_file_must_exist('out1.txt', chdir=chdir)
-  test.built_file_must_exist('out2.txt', chdir=chdir)
-
-# Plain build.
-build_and_check()
-
-# Remove either + rebuild. Both should exist (again).
-os.remove(test.built_file_path('out1.txt', chdir=chdir))
-build_and_check();
-
-# Remove the other + rebuild. Both should exist (again).
-os.remove(test.built_file_path('out2.txt', chdir=chdir))
-build_and_check();
-
-test.pass_test()
diff --git a/tools/gyp/test/actions-multiple-outputs/src/multiple-outputs.gyp b/tools/gyp/test/actions-multiple-outputs/src/multiple-outputs.gyp
deleted file mode 100644
index 7a3d74b..0000000
--- a/tools/gyp/test/actions-multiple-outputs/src/multiple-outputs.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'multiple-outputs',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action1',
-          'inputs': [],
-          'outputs': [
-            '<(PRODUCT_DIR)/out1.txt',
-            '<(PRODUCT_DIR)/out2.txt',
-          ],
-          'action': ['python', 'touch.py', '<@(_outputs)'],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions-multiple-outputs/src/touch.py b/tools/gyp/test/actions-multiple-outputs/src/touch.py
deleted file mode 100644
index bc61267..0000000
--- a/tools/gyp/test/actions-multiple-outputs/src/touch.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-"""Cross-platform touch."""
-
-for fname in sys.argv[1:]:
-  if os.path.exists(fname):
-    os.utime(fname, None)
-  else:
-    open(fname, 'w').close()
diff --git a/tools/gyp/test/actions-multiple/gyptest-all.py b/tools/gyp/test/actions-multiple/gyptest-all.py
deleted file mode 100755
index 2a083de..0000000
--- a/tools/gyp/test/actions-multiple/gyptest-all.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies two actions can be attached to the same input files.
-"""
-
-import sys
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('actions.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-# Test of fine-grained dependencies for generators that can build individual
-# files on demand.
-# In particular:
-#   - TargetA depends on TargetB.
-#   - TargetA and TargetB are 'none' type with actions attached.
-#   - TargetA has multiple actions.
-#   - An output from one of the actions in TargetA (not the first listed),
-#     is requested as the build target.
-# Ensure that TargetB gets built.
-#
-# This sub-test can only be done with generators/build tools that can
-# be asked to build individual files rather than whole targets (make, ninja).
-if test.format in ['make', 'ninja']:
-  # Select location of target based on generator.
-  if test.format == 'make':
-    target = 'multi2.txt'
-  elif test.format == 'ninja':
-    if sys.platform in ['win32', 'cygwin']:
-      target = '..\\..\\multi2.txt'
-    else:
-      target = '../../multi2.txt'
-  else:
-    assert False
-  test.build('actions.gyp', chdir='relocate/src', target=target)
-  test.must_contain('relocate/src/multi2.txt', 'hello there')
-  test.must_contain('relocate/src/multi_dep.txt', 'hello there')
-
-
-# Test that two actions can be attached to the same inputs.
-test.build('actions.gyp', test.ALL, chdir='relocate/src')
-test.must_contain('relocate/src/output1.txt', 'hello there')
-test.must_contain('relocate/src/output2.txt', 'hello there')
-test.must_contain('relocate/src/output3.txt', 'hello there')
-test.must_contain('relocate/src/output4.txt', 'hello there')
-
-# Test that process_outputs_as_sources works in conjuction with merged
-# actions.
-test.run_built_executable(
-    'multiple_action_source_filter',
-    chdir='relocate/src',
-    stdout=(
-        '{\n'
-        'bar\n'
-        'car\n'
-        'dar\n'
-        'ear\n'
-        '}\n'
-    ),
-)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/actions-multiple/src/actions.gyp b/tools/gyp/test/actions-multiple/src/actions.gyp
deleted file mode 100644
index c70a58f..0000000
--- a/tools/gyp/test/actions-multiple/src/actions.gyp
+++ /dev/null
@@ -1,226 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    # Have a long string so that actions will exceed xp 512 character
-    # command limit on xp.
-    'long_string':
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-        'abcdefghijklmnopqrstuvwxyz0123456789'
-  },
-  'targets': [
-    {
-      'target_name': 'multiple_action_target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action1',
-          'inputs': [
-            'copy.py',
-            'input.txt',
-          ],
-          'outputs': [
-            'output1.txt',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'action2',
-          'inputs': [
-            'copy.py',
-            'input.txt',
-          ],
-          'outputs': [
-            'output2.txt',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'action3',
-          'inputs': [
-            'copy.py',
-            'input.txt',
-          ],
-          'outputs': [
-            'output3.txt',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'action4',
-          'inputs': [
-            'copy.py',
-            'input.txt',
-          ],
-          'outputs': [
-            'output4.txt',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'multiple_action_source_filter',
-      'type': 'executable',
-      'sources': [
-        'main.c',
-        # TODO(bradnelson): add foo.c here once this issue is fixed:
-        #     http://code.google.com/p/gyp/issues/detail?id=175
-      ],
-      'actions': [
-        {
-          'action_name': 'action1',
-          'inputs': [
-            'foo.c',
-            'filter.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/output1.c',
-          ],
-          'process_outputs_as_sources': 1,
-          'action': [
-            'python', 'filter.py', 'foo', 'bar', 'foo.c', '<@(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'action2',
-          'inputs': [
-            'foo.c',
-            'filter.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/output2.c',
-          ],
-          'process_outputs_as_sources': 1,
-          'action': [
-            'python', 'filter.py', 'foo', 'car', 'foo.c', '<@(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'action3',
-          'inputs': [
-            'foo.c',
-            'filter.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/output3.c',
-          ],
-          'process_outputs_as_sources': 1,
-          'action': [
-            'python', 'filter.py', 'foo', 'dar', 'foo.c', '<@(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'action4',
-          'inputs': [
-            'foo.c',
-            'filter.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/output4.c',
-          ],
-          'process_outputs_as_sources': 1,
-          'action': [
-            'python', 'filter.py', 'foo', 'ear', 'foo.c', '<@(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'multiple_dependent_target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action1',
-          'inputs': [
-            'copy.py',
-            'input.txt',
-          ],
-          'outputs': [
-            'multi1.txt',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'action2',
-          'inputs': [
-            'copy.py',
-            'input.txt',
-          ],
-          'outputs': [
-            'multi2.txt',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-      'dependencies': [
-        'multiple_required_target',
-      ],
-    },
-    {
-      'target_name': 'multiple_required_target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'multi_dep',
-          'inputs': [
-            'copy.py',
-            'input.txt',
-          ],
-          'outputs': [
-            'multi_dep.txt',
-          ],
-          'process_outputs_as_sources': 1,
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions-multiple/src/copy.py b/tools/gyp/test/actions-multiple/src/copy.py
deleted file mode 100755
index 0774679..0000000
--- a/tools/gyp/test/actions-multiple/src/copy.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import shutil
-import sys
-
-shutil.copyfile(sys.argv[1], sys.argv[2])
diff --git a/tools/gyp/test/actions-multiple/src/filter.py b/tools/gyp/test/actions-multiple/src/filter.py
deleted file mode 100755
index f61a5fa..0000000
--- a/tools/gyp/test/actions-multiple/src/filter.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import sys
-
-data = open(sys.argv[3], 'r').read()
-fh = open(sys.argv[4], 'w')
-fh.write(data.replace(sys.argv[1], sys.argv[2]))
-fh.close()
diff --git a/tools/gyp/test/actions-multiple/src/foo.c b/tools/gyp/test/actions-multiple/src/foo.c
deleted file mode 100644
index 23c4ef7..0000000
--- a/tools/gyp/test/actions-multiple/src/foo.c
+++ /dev/null
@@ -1,11 +0,0 @@
-/*
- * Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-void foo(void) {
-  printf("foo\n");
-}
diff --git a/tools/gyp/test/actions-multiple/src/input.txt b/tools/gyp/test/actions-multiple/src/input.txt
deleted file mode 100644
index c7c7da3..0000000
--- a/tools/gyp/test/actions-multiple/src/input.txt
+++ /dev/null
@@ -1 +0,0 @@
-hello there
diff --git a/tools/gyp/test/actions-multiple/src/main.c b/tools/gyp/test/actions-multiple/src/main.c
deleted file mode 100644
index 0a420b9..0000000
--- a/tools/gyp/test/actions-multiple/src/main.c
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-void bar(void);
-void car(void);
-void dar(void);
-void ear(void);
-
-int main() {
-  printf("{\n");
-  bar();
-  car();
-  dar();
-  ear();
-  printf("}\n");
-  return 0;
-}
diff --git a/tools/gyp/test/actions-none/gyptest-none.py b/tools/gyp/test/actions-none/gyptest-none.py
deleted file mode 100755
index 933cfad..0000000
--- a/tools/gyp/test/actions-none/gyptest-none.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies actions can be in 'none' type targets with source files.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('none_with_source_files.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-test.build('none_with_source_files.gyp', chdir='relocate/src')
-
-file_content = 'foo.cc\n'
-
-test.built_file_must_match('fake.out', file_content, chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/actions-none/src/fake_cross.py b/tools/gyp/test/actions-none/src/fake_cross.py
deleted file mode 100644
index 2913f66..0000000
--- a/tools/gyp/test/actions-none/src/fake_cross.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-import sys
-
-fh = open(sys.argv[-1], 'wb')
-for filename in sys.argv[1:-1]:
-  fh.write(open(filename).read())
-fh.close()
diff --git a/tools/gyp/test/actions-none/src/foo.cc b/tools/gyp/test/actions-none/src/foo.cc
deleted file mode 100644
index c6c6174..0000000
--- a/tools/gyp/test/actions-none/src/foo.cc
+++ /dev/null
@@ -1 +0,0 @@
-foo.cc
diff --git a/tools/gyp/test/actions-none/src/none_with_source_files.gyp b/tools/gyp/test/actions-none/src/none_with_source_files.gyp
deleted file mode 100644
index e2aaebc..0000000
--- a/tools/gyp/test/actions-none/src/none_with_source_files.gyp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Test that 'none' type targets can have .cc files in them.
-
-{
-  'targets': [
-    {
-      'target_name': 'none_with_sources',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'foo.cc',
-      ],
-      'actions': [
-        {
-          'action_name': 'fake_cross',
-          'inputs': [
-            'fake_cross.py',
-            '<@(_sources)',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/fake.out',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<@(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        }
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions-subdir/gyptest-action.py b/tools/gyp/test/actions-subdir/gyptest-action.py
deleted file mode 100755
index 09cfef1..0000000
--- a/tools/gyp/test/actions-subdir/gyptest-action.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test actions that output to PRODUCT_DIR.
-"""
-
-import TestGyp
-
-# TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88
-test = TestGyp.TestGyp(formats=['!xcode'])
-
-test.run_gyp('none.gyp', chdir='src')
-
-test.build('none.gyp', test.ALL, chdir='src')
-
-file_content = 'Hello from make-file.py\n'
-subdir_file_content = 'Hello from make-subdir-file.py\n'
-
-test.built_file_must_match('file.out', file_content, chdir='src')
-test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src')
-
-test.pass_test()
diff --git a/tools/gyp/test/actions-subdir/src/make-file.py b/tools/gyp/test/actions-subdir/src/make-file.py
deleted file mode 100755
index 74e5581..0000000
--- a/tools/gyp/test/actions-subdir/src/make-file.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = 'Hello from make-file.py\n'
-
-open(sys.argv[1], 'wb').write(contents)
diff --git a/tools/gyp/test/actions-subdir/src/none.gyp b/tools/gyp/test/actions-subdir/src/none.gyp
deleted file mode 100644
index 23f8d25..0000000
--- a/tools/gyp/test/actions-subdir/src/none.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'file',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'actions': [
-        {
-          'action_name': 'make-file',
-          'inputs': [
-            'make-file.py',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/file.out',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        }
-      ],
-      'dependencies': [
-        'subdir/subdir.gyp:subdir_file',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py b/tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py
deleted file mode 100755
index 80ce19a..0000000
--- a/tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = 'Hello from make-subdir-file.py\n'
-
-open(sys.argv[1], 'wb').write(contents)
diff --git a/tools/gyp/test/actions-subdir/src/subdir/subdir.gyp b/tools/gyp/test/actions-subdir/src/subdir/subdir.gyp
deleted file mode 100644
index 0315d4e..0000000
--- a/tools/gyp/test/actions-subdir/src/subdir/subdir.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'subdir_file',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'actions': [
-        {
-          'action_name': 'make-subdir-file',
-          'inputs': [
-            'make-subdir-file.py',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/subdir_file.out',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        }
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions/generated-header/action.py b/tools/gyp/test/actions/generated-header/action.py
deleted file mode 100644
index 9be9879..0000000
--- a/tools/gyp/test/actions/generated-header/action.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-outfile = sys.argv[1]
-open(outfile, 'w').write('const char kFoo[] = "%s";' % sys.argv[2])
diff --git a/tools/gyp/test/actions/generated-header/main.cc b/tools/gyp/test/actions/generated-header/main.cc
deleted file mode 100644
index 7973781..0000000
--- a/tools/gyp/test/actions/generated-header/main.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-#include "MyHeader.h"
-
-int main() {
-  printf("%s\n", kFoo);
-}
diff --git a/tools/gyp/test/actions/generated-header/test.gyp b/tools/gyp/test/actions/generated-header/test.gyp
deleted file mode 100644
index 209b951..0000000
--- a/tools/gyp/test/actions/generated-header/test.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'generate_header',
-      'type': 'none',
-      'actions': [
-        {
-          'inputs': [ ],
-          'outputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/MyHeader.h',
-          ],
-          'action_name': 'generate header',
-          'action': ['python', './action.py',
-                     '<(SHARED_INTERMEDIATE_DIR)/MyHeader.h', 'foobar output' ],
-        },
-      ],
-      'msvs_cygwin_shell': 0,
-    },
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'dependencies': [
-        'generate_header',
-      ],
-      'include_dirs': [
-        '<(SHARED_INTERMEDIATE_DIR)',
-      ],
-      'sources': [ 'main.cc' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions/gyptest-all.py b/tools/gyp/test/actions/gyptest-all.py
deleted file mode 100755
index c8833a5..0000000
--- a/tools/gyp/test/actions/gyptest-all.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple actions when using an explicit build target of 'all'.
-"""
-
-import glob
-import os
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_all')
-
-test.run_gyp('actions.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-# Some gyp files use an action that mentions an output but never
-# writes it as a means to making the action run on every build.  That
-# doesn't mesh well with ninja's semantics.  TODO(evan): figure out
-# how to work always-run actions in to ninja.
-if test.format in ['ninja', 'xcode-ninja']:
-  test.build('actions.gyp', test.ALL, chdir='relocate/src')
-else:
-  # Test that an "always run" action increases a counter on multiple
-  # invocations, and that a dependent action updates in step.
-  test.build('actions.gyp', test.ALL, chdir='relocate/src')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
-  test.build('actions.gyp', test.ALL, chdir='relocate/src')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
-
-  # The "always run" action only counts to 2, but the dependent target
-  # will count forever if it's allowed to run. This verifies that the
-  # dependent target only runs when the "always run" action generates
-  # new output, not just because the "always run" ran.
-  test.build('actions.gyp', test.ALL, chdir='relocate/src')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
-
-expect = """\
-Hello from program.c
-Hello from make-prog1.py
-Hello from make-prog2.py
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir1'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('program', chdir=chdir, stdout=expect)
-
-
-test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
-
-
-expect = "Hello from generate_main.py\n"
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir3'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('null_input', chdir=chdir, stdout=expect)
-
-
-# Clean out files which may have been created if test.ALL was run.
-def clean_dep_files():
-  for file in (glob.glob('relocate/src/dep_*.txt') +
-               glob.glob('relocate/src/deps_all_done_*.txt')):
-    if os.path.exists(file):
-      os.remove(file)
-
-# Confirm our clean.
-clean_dep_files()
-test.must_not_exist('relocate/src/dep_1.txt')
-test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
-
-# Make sure all deps finish before an action is run on a 'None' target.
-# If using the Make builder, add -j to make things more difficult.
-arguments = []
-if test.format == 'make':
-  arguments = ['-j']
-test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
-           arguments=arguments)
-test.must_exist('relocate/src/deps_all_done_first_123.txt')
-
-# Try again with a target that has deps in reverse.  Output files from
-# previous tests deleted.  Confirm this execution did NOT run the ALL
-# target which would mess up our dep tests.
-clean_dep_files()
-test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
-           arguments=arguments)
-test.must_exist('relocate/src/deps_all_done_first_321.txt')
-test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/actions/gyptest-default.py b/tools/gyp/test/actions/gyptest-default.py
deleted file mode 100755
index 70c99ec..0000000
--- a/tools/gyp/test/actions/gyptest-default.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple actions when using the default build target.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_default')
-
-test.run_gyp('actions.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-# Some gyp files use an action that mentions an output but never
-# writes it as a means to making the action run on every build.  That
-# doesn't mesh well with ninja's semantics.  TODO(evan): figure out
-# how to work always-run actions in to ninja.
-if test.format in ['ninja', 'xcode-ninja']:
-  test.build('actions.gyp', test.ALL, chdir='relocate/src')
-else:
-  # Test that an "always run" action increases a counter on multiple
-  # invocations, and that a dependent action updates in step.
-  test.build('actions.gyp', chdir='relocate/src')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
-  test.build('actions.gyp', chdir='relocate/src')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
-
-  # The "always run" action only counts to 2, but the dependent target
-  # will count forever if it's allowed to run. This verifies that the
-  # dependent target only runs when the "always run" action generates
-  # new output, not just because the "always run" ran.
-  test.build('actions.gyp', test.ALL, chdir='relocate/src')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
-  test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
-
-expect = """\
-Hello from program.c
-Hello from make-prog1.py
-Hello from make-prog2.py
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir1'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('program', chdir=chdir, stdout=expect)
-
-
-test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
-
-
-expect = "Hello from generate_main.py\n"
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir3'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('null_input', chdir=chdir, stdout=expect)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/actions/gyptest-errors.py b/tools/gyp/test/actions/gyptest-errors.py
deleted file mode 100755
index e1ef883..0000000
--- a/tools/gyp/test/actions/gyptest-errors.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies behavior for different action configuration errors:
-exit status of 1, and the expected error message must be in stderr.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_errors')
-
-
-test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None)
-expect = [
-  "Anonymous action in target broken_actions2.  An action must have an 'action_name' field.",
-]
-test.must_contain_all_lines(test.stderr(), expect)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/actions/gyptest-generated-header.py b/tools/gyp/test/actions/gyptest-generated-header.py
deleted file mode 100644
index cd5bd69..0000000
--- a/tools/gyp/test/actions/gyptest-generated-header.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that dependencies on generated headers work, even if the header has
-a mixed-case file name.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-CHDIR = 'generated-header'
-
-test.run_gyp('test.gyp', chdir=CHDIR)
-test.build('test.gyp', 'program', chdir=CHDIR)
-test.up_to_date('test.gyp', 'program', chdir=CHDIR)
-
-expect = 'foobar output\n'
-test.run_built_executable('program', chdir=CHDIR, stdout=expect)
-
-# Change what's written to the generated header, regyp and rebuild, and check
-# that the change makes it to the executable and that the build is clean.
-test.sleep()
-test.write('generated-header/test.gyp',
-           test.read('generated-header/test.gyp').replace('foobar', 'barbaz'))
-
-test.run_gyp('test.gyp', chdir=CHDIR)
-test.build('test.gyp', 'program', chdir=CHDIR)
-test.up_to_date('test.gyp', 'program', chdir=CHDIR)
-
-expect = 'barbaz output\n'
-test.run_built_executable('program', chdir=CHDIR, stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/actions/src/action_missing_name.gyp b/tools/gyp/test/actions/src/action_missing_name.gyp
deleted file mode 100644
index 00424c3..0000000
--- a/tools/gyp/test/actions/src/action_missing_name.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'broken_actions2',
-      'type': 'none',
-      'actions': [
-        {
-          'inputs': [
-            'no_name.input',
-          ],
-          'action': [
-            'python',
-            '-c',
-            'print \'missing name\'',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions/src/actions.gyp b/tools/gyp/test/actions/src/actions.gyp
deleted file mode 100644
index 5d2db19..0000000
--- a/tools/gyp/test/actions/src/actions.gyp
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'pull_in_all_actions',
-      'type': 'none',
-      'dependencies': [
-        'subdir1/executable.gyp:*',
-        'subdir2/none.gyp:*',
-        'subdir3/null_input.gyp:*',
-      ],
-    },
-    {
-      'target_name': 'depend_on_always_run_action',
-      'type': 'none',
-      'dependencies': [ 'subdir1/executable.gyp:counter' ],
-      'actions': [
-        {
-          'action_name': 'use_always_run_output',
-          'inputs': [
-            'subdir1/actions-out/action-counter.txt',
-            'subdir1/counter.py',
-          ],
-          'outputs': [
-            'subdir1/actions-out/action-counter_2.txt',
-          ],
-          'action': [
-            'python', 'subdir1/counter.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-
-    # Three deps which don't finish immediately.
-    # Each one has a small delay then creates a file.
-    # Delays are 1.0, 1.1, and 2.0 seconds.
-    {
-      'target_name': 'dep_1',
-      'type': 'none',
-      'actions': [{
-        'inputs': [ 'actions.gyp' ],
-        'outputs': [ 'dep_1.txt' ],
-        'action_name': 'dep_1',
-        'action': [ 'python', '-c',
-                    'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ],
-        # Allows the test to run without hermetic cygwin on windows.
-        'msvs_cygwin_shell': 0,
-      }],
-    },
-    {
-      'target_name': 'dep_2',
-      'type': 'none',
-      'actions': [{
-        'inputs': [ 'actions.gyp' ],
-        'outputs': [ 'dep_2.txt' ],
-        'action_name': 'dep_2',
-        'action': [ 'python', '-c',
-                    'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ],
-        # Allows the test to run without hermetic cygwin on windows.
-        'msvs_cygwin_shell': 0,
-      }],
-    },
-    {
-      'target_name': 'dep_3',
-      'type': 'none',
-      'actions': [{
-        'inputs': [ 'actions.gyp' ],
-        'outputs': [ 'dep_3.txt' ],
-        'action_name': 'dep_3',
-        'action': [ 'python', '-c',
-                    'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ],
-        # Allows the test to run without hermetic cygwin on windows.
-        'msvs_cygwin_shell': 0,
-      }],
-    },
-
-    # An action which assumes the deps have completed.
-    # Does NOT list the output files of it's deps as inputs.
-    # On success create the file deps_all_done_first.txt.
-    {
-      'target_name': 'action_with_dependencies_123',
-      'type': 'none',
-      'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ],
-      'actions': [{
-        'inputs': [ 'actions.gyp' ],
-        'outputs': [ 'deps_all_done_first_123.txt' ],
-        'action_name': 'action_with_dependencies_123',
-        'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
-        # Allows the test to run without hermetic cygwin on windows.
-        'msvs_cygwin_shell': 0,
-      }],
-    },
-    # Same as above but with deps in reverse.
-    {
-      'target_name': 'action_with_dependencies_321',
-      'type': 'none',
-      'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ],
-      'actions': [{
-        'inputs': [ 'actions.gyp' ],
-        'outputs': [ 'deps_all_done_first_321.txt' ],
-        'action_name': 'action_with_dependencies_321',
-        'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
-        # Allows the test to run without hermetic cygwin on windows.
-        'msvs_cygwin_shell': 0,
-      }],
-    },
-
-  ],
-}
diff --git a/tools/gyp/test/actions/src/confirm-dep-files.py b/tools/gyp/test/actions/src/confirm-dep-files.py
deleted file mode 100755
index 3b84630..0000000
--- a/tools/gyp/test/actions/src/confirm-dep-files.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Confirms presence of files generated by our targets we depend on.
-If they exist, create a new file.
-
-Note target's input files are explicitly NOT defined in the gyp file
-so they can't easily be passed to this script as args.
-"""
-
-import os
-import sys
-
-outfile = sys.argv[1]  # Example value we expect: deps_all_done_first_123.txt
-if (os.path.exists("dep_1.txt") and
-    os.path.exists("dep_2.txt") and
-    os.path.exists("dep_3.txt")):
-  open(outfile, "w")
diff --git a/tools/gyp/test/actions/src/subdir1/counter.py b/tools/gyp/test/actions/src/subdir1/counter.py
deleted file mode 100755
index d888f2e..0000000
--- a/tools/gyp/test/actions/src/subdir1/counter.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-import time
-
-output = sys.argv[1]
-persistoutput = "%s.persist" % sys.argv[1]
-
-count = 0
-try:
-  count = open(persistoutput, 'r').read()
-except:
-  pass
-count = int(count) + 1
-
-if len(sys.argv) > 2:
-  max_count = int(sys.argv[2])
-  if count > max_count:
-    count = max_count
-
-oldcount = 0
-try:
-  oldcount = open(output, 'r').read()
-except:
-  pass
-
-# Save the count in a file that is undeclared, and thus hidden, to gyp. We need
-# to do this because, prior to running commands, some build systems deletes
-# any declared outputs, so we would lose our count if we just wrote to the
-# given output file.
-open(persistoutput, 'w').write('%d' % (count))
-
-# Only write the given output file if the count has changed.
-if int(oldcount) != count:
-  open(output, 'w').write('%d' % (count))
-  # Sleep so the next run changes the file time sufficiently to make the build
-  # detect the file as changed.
-  time.sleep(1)
-
-sys.exit(0)
diff --git a/tools/gyp/test/actions/src/subdir1/executable.gyp b/tools/gyp/test/actions/src/subdir1/executable.gyp
deleted file mode 100644
index 6a1ce4f..0000000
--- a/tools/gyp/test/actions/src/subdir1/executable.gyp
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'program.c',
-      ],
-      'actions': [
-        {
-          'action_name': 'make-prog1',
-          'inputs': [
-            'make-prog1.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/prog1.c',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-        {
-          'action_name': 'make-prog2',
-          'inputs': [
-            'make-prog2.py',
-          ],
-          'outputs': [
-            'actions-out/prog2.c',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'counter',
-      'type': 'none',
-      'actions': [
-        {
-          # This action should always run, regardless of whether or not it's
-          # inputs or the command-line change. We do this by creating a dummy
-          # first output, which is always missing, thus causing the build to
-          # always try to recreate it. Actual output files should be listed
-          # after the dummy one, and dependent targets should list the real
-          # output(s) in their inputs
-          # (see '../actions.gyp:depend_on_always_run_action').
-          'action_name': 'action_counter',
-          'inputs': [
-            'counter.py',
-          ],
-          'outputs': [
-            'actions-out/action-counter.txt.always',
-            'actions-out/action-counter.txt',
-          ],
-          'action': [
-            'python', '<(_inputs)', 'actions-out/action-counter.txt', '2',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions/src/subdir1/make-prog1.py b/tools/gyp/test/actions/src/subdir1/make-prog1.py
deleted file mode 100755
index 7ea1d8a..0000000
--- a/tools/gyp/test/actions/src/subdir1/make-prog1.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = r"""
-#include <stdio.h>
-
-void prog1(void)
-{
-  printf("Hello from make-prog1.py\n");
-}
-"""
-
-open(sys.argv[1], 'w').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/actions/src/subdir1/make-prog2.py b/tools/gyp/test/actions/src/subdir1/make-prog2.py
deleted file mode 100755
index 0bfe497..0000000
--- a/tools/gyp/test/actions/src/subdir1/make-prog2.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = r"""
-#include <stdio.h>
-
-void prog2(void)
-{
-  printf("Hello from make-prog2.py\n");
-}
-"""
-
-open(sys.argv[1], 'w').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/actions/src/subdir1/program.c b/tools/gyp/test/actions/src/subdir1/program.c
deleted file mode 100644
index c093153..0000000
--- a/tools/gyp/test/actions/src/subdir1/program.c
+++ /dev/null
@@ -1,12 +0,0 @@
-#include <stdio.h>

-

-extern void prog1(void);

-extern void prog2(void);

-

-int main(void)
-{

-  printf("Hello from program.c\n");

-  prog1();

-  prog2();

-  return 0;

-}

diff --git a/tools/gyp/test/actions/src/subdir2/make-file.py b/tools/gyp/test/actions/src/subdir2/make-file.py
deleted file mode 100755
index fff0653..0000000
--- a/tools/gyp/test/actions/src/subdir2/make-file.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = "Hello from make-file.py\n"
-
-open(sys.argv[1], 'wb').write(contents)
diff --git a/tools/gyp/test/actions/src/subdir2/none.gyp b/tools/gyp/test/actions/src/subdir2/none.gyp
deleted file mode 100644
index 2caa97d..0000000
--- a/tools/gyp/test/actions/src/subdir2/none.gyp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'file',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'actions': [
-        {
-          'action_name': 'make-file',
-          'inputs': [
-            'make-file.py',
-          ],
-          'outputs': [
-            'file.out',
-            # TODO:  enhance testing infrastructure to test this
-            # without having to hard-code the intermediate dir paths.
-            #'<(INTERMEDIATE_DIR)/file.out',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        }
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/actions/src/subdir3/generate_main.py b/tools/gyp/test/actions/src/subdir3/generate_main.py
deleted file mode 100755
index 804d38d..0000000
--- a/tools/gyp/test/actions/src/subdir3/generate_main.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = """
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from generate_main.py\\n");
-  return 0;
-}
-"""
-
-open(sys.argv[1], 'w').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/actions/src/subdir3/null_input.gyp b/tools/gyp/test/actions/src/subdir3/null_input.gyp
deleted file mode 100644
index 9b0bea5..0000000
--- a/tools/gyp/test/actions/src/subdir3/null_input.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'null_input',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'actions': [
-        {
-          'action_name': 'generate_main',
-          'process_outputs_as_sources': 1,
-          'inputs': [],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/main.c',
-          ],
-          'action': [
-            # TODO:  we can't just use <(_outputs) here?!
-            'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/additional-targets/gyptest-additional.py b/tools/gyp/test/additional-targets/gyptest-additional.py
deleted file mode 100755
index 466283e..0000000
--- a/tools/gyp/test/additional-targets/gyptest-additional.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple actions when using an explicit build target of 'all'.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('all.gyp',
-             '-G', 'xcode_ninja_target_pattern=^all_targets$',
-             chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-# Build all.
-test.build('all.gyp', chdir='relocate/src')
-
-if test.format=='xcode':
-  chdir = 'relocate/src/dir1'
-else:
-  chdir = 'relocate/src'
-
-# Output is as expected.
-file_content = 'Hello from emit.py\n'
-test.built_file_must_match('out2.txt', file_content, chdir=chdir)
-
-test.built_file_must_not_exist('out.txt', chdir='relocate/src')
-test.built_file_must_not_exist('foolib1',
-                               type=test.SHARED_LIB,
-                               chdir=chdir)
-
-# xcode-ninja doesn't generate separate workspaces for sub-gyps by design
-if test.format == 'xcode-ninja':
-  test.pass_test()
-
-# TODO(mmoss) Make consistent with msvs, with 'dir1' before 'out/Default'?
-if test.format in ('make', 'ninja', 'cmake'):
-  chdir='relocate/src'
-else:
-  chdir='relocate/src/dir1'
-
-# Build the action explicitly.
-test.build('actions.gyp', 'action1_target', chdir=chdir)
-
-# Check that things got run.
-file_content = 'Hello from emit.py\n'
-test.built_file_must_exist('out.txt', chdir=chdir)
-
-# Build the shared library explicitly.
-test.build('actions.gyp', 'foolib1', chdir=chdir)
-
-test.built_file_must_exist('foolib1',
-                           type=test.SHARED_LIB,
-                           chdir=chdir,
-                           subdir='dir1')
-
-test.pass_test()
diff --git a/tools/gyp/test/additional-targets/src/all.gyp b/tools/gyp/test/additional-targets/src/all.gyp
deleted file mode 100644
index 21c8308..0000000
--- a/tools/gyp/test/additional-targets/src/all.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'all_targets',
-      'type': 'none',
-      'dependencies': ['dir1/actions.gyp:*'],
-    },
-  ],
-}
diff --git a/tools/gyp/test/additional-targets/src/dir1/actions.gyp b/tools/gyp/test/additional-targets/src/dir1/actions.gyp
deleted file mode 100644
index 5089c80..0000000
--- a/tools/gyp/test/additional-targets/src/dir1/actions.gyp
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'action1_target',
-      'type': 'none',
-      'suppress_wildcard': 1,
-      'actions': [
-        {
-          'action_name': 'action1',
-          'inputs': [
-            'emit.py',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/out.txt',
-          ],
-          'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'],
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'action2_target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action2',
-          'inputs': [
-            'emit.py',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/out2.txt',
-          ],
-          'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'],
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'foolib1',
-      'type': 'shared_library',
-      'suppress_wildcard': 1,
-      'sources': ['lib1.c'],
-    },
-  ],
-  'conditions': [
-    ['OS=="linux"', {
-      'target_defaults': {
-        'cflags': ['-fPIC'],
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/additional-targets/src/dir1/emit.py b/tools/gyp/test/additional-targets/src/dir1/emit.py
deleted file mode 100755
index fd31387..0000000
--- a/tools/gyp/test/additional-targets/src/dir1/emit.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'wb')
-f.write('Hello from emit.py\n')
-f.close()
diff --git a/tools/gyp/test/additional-targets/src/dir1/lib1.c b/tools/gyp/test/additional-targets/src/dir1/lib1.c
deleted file mode 100644
index df4cb10..0000000
--- a/tools/gyp/test/additional-targets/src/dir1/lib1.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-int func1(void) {
-  return 42;
-}
diff --git a/tools/gyp/test/analyzer/common.gypi b/tools/gyp/test/analyzer/common.gypi
deleted file mode 100644
index 7c664e4..0000000
--- a/tools/gyp/test/analyzer/common.gypi
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-}
diff --git a/tools/gyp/test/analyzer/gyptest-analyzer.py b/tools/gyp/test/analyzer/gyptest-analyzer.py
deleted file mode 100644
index 72de218..0000000
--- a/tools/gyp/test/analyzer/gyptest-analyzer.py
+++ /dev/null
@@ -1,425 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for analyzer
-"""
-
-import json
-import TestGyp
-
-found = 'Found dependency'
-found_all = 'Found dependency (all)'
-not_found = 'No dependencies'
-
-
-def _CreateConfigFile(files, additional_compile_targets, test_targets=[]):
-  """Creates the analyzer config file, which is used as the input to analyzer.
-  See description of analyzer.py for description of the arguments."""
-  f = open('test_file', 'w')
-  to_write = {'files': files,
-              'test_targets': test_targets,
-              'additional_compile_targets': additional_compile_targets }
-  json.dump(to_write, f)
-  f.close()
-
-
-def _CreateBogusConfigFile():
-  f = open('test_file','w')
-  f.write('bogus')
-  f.close()
-
-
-def _ReadOutputFileContents():
-  f = open('analyzer_output', 'r')
-  result = json.load(f)
-  f.close()
-  return result
-
-
-# NOTE: this would be clearer if it subclassed TestGypCustom, but that trips
-# over a bug in pylint (E1002).
-test = TestGyp.TestGypCustom(format='analyzer')
-
-def CommonArgs():
-  return ('-Gconfig_path=test_file',
-           '-Ganalyzer_output_path=analyzer_output')
-
-
-def run_analyzer(*args, **kw):
-  """Runs the test specifying a particular config and output path."""
-  args += CommonArgs()
-  test.run_gyp('test.gyp', *args, **kw)
-
-
-def run_analyzer2(*args, **kw):
-  """Same as run_analyzer(), but passes in test2.gyp instead of test.gyp."""
-  args += CommonArgs()
-  test.run_gyp('test2.gyp', *args, **kw)
-
-
-def run_analyzer3(*args, **kw):
-  """Same as run_analyzer(), but passes in test3.gyp instead of test.gyp."""
-  args += CommonArgs()
-  test.run_gyp('test3.gyp', *args, **kw)
-
-
-def run_analyzer4(*args, **kw):
-  """Same as run_analyzer(), but passes in test3.gyp instead of test.gyp."""
-  args += CommonArgs()
-  test.run_gyp('test4.gyp', *args, **kw)
-
-
-def EnsureContains(matched=False, compile_targets=set(), test_targets=set()):
-  """Verifies output contains |compile_targets|."""
-  result = _ReadOutputFileContents()
-  if 'error' in result:
-    print 'unexpected error', result.get('error')
-    test.fail_test()
-
-  if 'invalid_targets' in result:
-    print 'unexpected invalid_targets', result.get('invalid_targets')
-    test.fail_test()
-
-  actual_compile_targets = set(result['compile_targets'])
-  if actual_compile_targets != compile_targets:
-    print 'actual compile_targets:', actual_compile_targets, \
-           '\nexpected compile_targets:', compile_targets
-    test.fail_test()
-
-  actual_test_targets = set(result['test_targets'])
-  if actual_test_targets != test_targets:
-    print 'actual test_targets:', actual_test_targets, \
-           '\nexpected test_targets:', test_targets
-    test.fail_test()
-
-  if matched and result['status'] != found:
-    print 'expected', found, 'got', result['status']
-    test.fail_test()
-  elif not matched and result['status'] != not_found:
-    print 'expected', not_found, 'got', result['status']
-    test.fail_test()
-
-
-def EnsureMatchedAll(compile_targets, test_targets=set()):
-  result = _ReadOutputFileContents()
-  if 'error' in result:
-    print 'unexpected error', result.get('error')
-    test.fail_test()
-
-  if 'invalid_targets' in result:
-    print 'unexpected invalid_targets', result.get('invalid_targets')
-    test.fail_test()
-
-  if result['status'] != found_all:
-    print 'expected', found_all, 'got', result['status']
-    test.fail_test()
-
-  actual_compile_targets = set(result['compile_targets'])
-  if actual_compile_targets != compile_targets:
-    print ('actual compile_targets:', actual_compile_targets,
-           '\nexpected compile_targets:', compile_targets)
-    test.fail_test()
-
-  actual_test_targets = set(result['test_targets'])
-  if actual_test_targets != test_targets:
-    print ('actual test_targets:', actual_test_targets,
-           '\nexpected test_targets:', test_targets)
-    test.fail_test()
-
-
-def EnsureError(expected_error_string):
-  """Verifies output contains the error string."""
-  result = _ReadOutputFileContents()
-  if result.get('error', '').find(expected_error_string) == -1:
-    print 'actual error:', result.get('error', ''), '\nexpected error:', \
-        expected_error_string
-    test.fail_test()
-
-
-def EnsureStdoutContains(expected_error_string):
-  if test.stdout().find(expected_error_string) == -1:
-    print 'actual stdout:', test.stdout(), '\nexpected stdout:', \
-        expected_error_string
-    test.fail_test()
-
-
-def EnsureInvalidTargets(expected_invalid_targets):
-  """Verifies output contains invalid_targets."""
-  result = _ReadOutputFileContents()
-  actual_invalid_targets = set(result['invalid_targets'])
-  if actual_invalid_targets != expected_invalid_targets:
-    print 'actual invalid_targets:', actual_invalid_targets, \
-        '\nexpected :', expected_invalid_targets
-    test.fail_test()
-
-
-# Two targets, A and B (both static_libraries) and A depends upon B. If a file
-# in B changes, then both A and B are output. It is not strictly necessary that
-# A is compiled in this case, only B.
-_CreateConfigFile(['b.c'], ['all'])
-test.run_gyp('static_library_test.gyp', *CommonArgs())
-EnsureContains(matched=True, compile_targets={'a' ,'b'})
-
-# Verifies config_path must be specified.
-test.run_gyp('test.gyp')
-EnsureStdoutContains('Must specify files to analyze via config_path')
-
-# Verifies config_path must point to a valid file.
-test.run_gyp('test.gyp', '-Gconfig_path=bogus_file',
-             '-Ganalyzer_output_path=analyzer_output')
-EnsureError('Unable to open file bogus_file')
-
-# Verify 'invalid_targets' is present when bad target is specified.
-_CreateConfigFile(['exe2.c'], ['bad_target'])
-run_analyzer()
-EnsureInvalidTargets({'bad_target'})
-
-# Verifies config_path must point to a valid json file.
-_CreateBogusConfigFile()
-run_analyzer()
-EnsureError('Unable to parse config file test_file')
-
-# Trivial test of a source.
-_CreateConfigFile(['foo.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Conditional source that is excluded.
-_CreateConfigFile(['conditional_source.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=False)
-
-# Conditional source that is included by way of argument.
-_CreateConfigFile(['conditional_source.c'], ['all'])
-run_analyzer('-Dtest_variable=1')
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Two unknown files.
-_CreateConfigFile(['unknown1.c', 'unoknow2.cc'], ['all'])
-run_analyzer()
-EnsureContains()
-
-# Two unknown files.
-_CreateConfigFile(['unknown1.c', 'subdir/subdir_sourcex.c'], ['all'])
-run_analyzer()
-EnsureContains()
-
-# Included dependency
-_CreateConfigFile(['unknown1.c', 'subdir/subdir_source.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe', 'exe3'})
-
-# Included inputs to actions.
-_CreateConfigFile(['action_input.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Don't consider outputs.
-_CreateConfigFile(['action_output.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=False)
-
-# Rule inputs.
-_CreateConfigFile(['rule_input.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Ignore path specified with PRODUCT_DIR.
-_CreateConfigFile(['product_dir_input.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=False)
-
-# Path specified via a variable.
-_CreateConfigFile(['subdir/subdir_source2.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Verifies paths with // are fixed up correctly.
-_CreateConfigFile(['parent_source.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe', 'exe3'})
-
-# Verifies relative paths are resolved correctly.
-_CreateConfigFile(['subdir/subdir_source.h'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Verifies relative paths in inputs are resolved correctly.
-_CreateConfigFile(['rel_path1.h'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Various permutations when passing in targets.
-_CreateConfigFile(['exe2.c', 'subdir/subdir2b_source.c'],
-                  ['all'], ['exe', 'exe3'])
-run_analyzer()
-EnsureContains(matched=True, test_targets={'exe3'},
-               compile_targets={'exe2', 'exe3'})
-
-_CreateConfigFile(['exe2.c', 'subdir/subdir2b_source.c'], ['all'], ['exe'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe2', 'exe3'})
-
-# Verifies duplicates are ignored.
-_CreateConfigFile(['exe2.c', 'subdir/subdir2b_source.c'], ['all'],
-                  ['exe', 'exe'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe2', 'exe3'})
-
-_CreateConfigFile(['exe2.c'], ['all'], ['exe'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe2'})
-
-_CreateConfigFile(['exe2.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe2'})
-
-_CreateConfigFile(['subdir/subdir2b_source.c', 'exe2.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe2', 'exe3'})
-
-_CreateConfigFile(['subdir/subdir2b_source.c'], ['all'], ['exe3'])
-run_analyzer()
-EnsureContains(matched=True, test_targets={'exe3'}, compile_targets={'exe3'})
-
-_CreateConfigFile(['exe2.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe2'})
-
-_CreateConfigFile(['foo.c'], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe'})
-
-# Assertions when modifying build (gyp/gypi) files, especially when said files
-# are included.
-_CreateConfigFile(['subdir2/d.cc'], ['all'], ['exe', 'exe2', 'foo', 'exe3'])
-run_analyzer2()
-EnsureContains(matched=True, test_targets={'exe', 'foo'},
-               compile_targets={'exe', 'foo'})
-
-_CreateConfigFile(['subdir2/subdir.includes.gypi'], ['all'],
-                ['exe', 'exe2', 'foo', 'exe3'])
-run_analyzer2()
-EnsureContains(matched=True, test_targets={'exe', 'foo'},
-               compile_targets={'exe', 'foo'})
-
-_CreateConfigFile(['subdir2/subdir.gyp'], ['all'],
-                  ['exe', 'exe2', 'foo', 'exe3'])
-run_analyzer2()
-EnsureContains(matched=True, test_targets={'exe', 'foo'},
-               compile_targets={'exe', 'foo'})
-
-_CreateConfigFile(['test2.includes.gypi'], ['all'],
-                  ['exe', 'exe2', 'foo', 'exe3'])
-run_analyzer2()
-EnsureContains(matched=True, test_targets={'exe', 'exe2', 'exe3'},
-               compile_targets={'exe', 'exe2', 'exe3'})
-
-# Verify modifying a file included makes all targets dirty.
-_CreateConfigFile(['common.gypi'], ['all'], ['exe', 'exe2', 'foo', 'exe3'])
-run_analyzer2('-Icommon.gypi')
-EnsureMatchedAll({'all', 'exe', 'exe2', 'foo', 'exe3'},
-                 {'exe', 'exe2', 'foo', 'exe3'})
-
-# Assertions from test3.gyp.
-_CreateConfigFile(['d.c', 'f.c'], ['all'], ['a'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
-
-_CreateConfigFile(['f.c'], ['all'], ['a'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
-
-_CreateConfigFile(['f.c'], ['all'])
-run_analyzer3()
-EnsureContains(matched=True, compile_targets={'a', 'b'})
-
-_CreateConfigFile(['c.c', 'e.c'], ['all'])
-run_analyzer3()
-EnsureContains(matched=True, compile_targets={'a', 'b', 'c', 'e'})
-
-_CreateConfigFile(['d.c'], ['all'], ['a'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
-
-_CreateConfigFile(['a.c'], ['all'], ['a', 'b'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a'})
-
-_CreateConfigFile(['a.c'], ['all'], ['a', 'b'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a'})
-
-_CreateConfigFile(['d.c'], ['all'], ['a', 'b'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a', 'b'},
-               compile_targets={'a', 'b'})
-
-_CreateConfigFile(['f.c'], ['all'], ['a'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a', 'b'})
-
-_CreateConfigFile(['a.c'], ['all'], ['a'])
-run_analyzer3()
-EnsureContains(matched=True, test_targets={'a'}, compile_targets={'a'})
-
-_CreateConfigFile(['a.c'], ['all'])
-run_analyzer3()
-EnsureContains(matched=True, compile_targets={'a'})
-
-_CreateConfigFile(['d.c'], ['all'])
-run_analyzer3()
-EnsureContains(matched=True, compile_targets={'a', 'b'})
-
-# Assertions around test4.gyp.
-_CreateConfigFile(['f.c'], ['all'])
-run_analyzer4()
-EnsureContains(matched=True, compile_targets={'e', 'f'})
-
-_CreateConfigFile(['d.c'], ['all'])
-run_analyzer4()
-EnsureContains(matched=True, compile_targets={'a', 'b', 'c', 'd'})
-
-_CreateConfigFile(['i.c'], ['all'])
-run_analyzer4()
-EnsureContains(matched=True, compile_targets={'h', 'i'})
-
-# Assertions where 'all' is not supplied in compile_targets.
-
-_CreateConfigFile(['exe2.c'], [], ['exe2'])
-run_analyzer()
-EnsureContains(matched=True, test_targets={'exe2'}, compile_targets={'exe2'})
-
-_CreateConfigFile(['exe20.c'], [], ['exe2'])
-run_analyzer()
-EnsureContains(matched=False)
-
-
-_CreateConfigFile(['exe2.c', 'exe3.c'], [], ['exe2', 'exe3'])
-run_analyzer()
-EnsureContains(matched=True, test_targets={'exe2', 'exe3'},
-               compile_targets={'exe2', 'exe3'})
-
-_CreateConfigFile(['exe2.c', 'exe3.c'], ['exe3'], ['exe2'])
-run_analyzer()
-EnsureContains(matched=True, test_targets={'exe2'},
-               compile_targets={'exe2', 'exe3'})
-
-_CreateConfigFile(['exe3.c'], ['exe2'], ['exe2'])
-run_analyzer()
-EnsureContains(matched=False)
-
-# Assertions with 'all' listed as a test_target.
-_CreateConfigFile(['exe3.c'], [], ['all'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe3', 'all'},
-               test_targets={'all'})
-
-_CreateConfigFile(['exe2.c'], [], ['all', 'exe2'])
-run_analyzer()
-EnsureContains(matched=True, compile_targets={'exe2', 'all'},
-               test_targets={'all', 'exe2'})
-
-test.pass_test()
diff --git a/tools/gyp/test/analyzer/static_library_test.gyp b/tools/gyp/test/analyzer/static_library_test.gyp
deleted file mode 100644
index 2c8e4bd..0000000
--- a/tools/gyp/test/analyzer/static_library_test.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# These gyp files create the following dependencies:
-#
-# test.gyp:
-#   #a -> b
-#     a.c
-#   #b
-#     b.c
-#  a and b are static libraries.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'static_library',
-      'sources': [
-        'a.c',
-      ],
-      'dependencies': [
-        'b',
-      ],
-    },
-    {
-      'target_name': 'b',
-      'type': 'static_library',
-      'sources': [
-        'b.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/subdir/subdir.gyp b/tools/gyp/test/analyzer/subdir/subdir.gyp
deleted file mode 100644
index bfa2df4..0000000
--- a/tools/gyp/test/analyzer/subdir/subdir.gyp
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'trailing_dir_path': '../',
-   },
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'static_library',
-      'sources': [
-        'subdir_source.c',
-        '<(trailing_dir_path)/parent_source.c',
-      ],
-    },
-    {
-      'target_name': 'subdir2a',
-      'type': 'static_library',
-      'sources': [
-        'subdir2_source.c',
-      ],
-      'dependencies': [
-        'subdir2b',
-      ],
-    },
-    {
-      'target_name': 'subdir2b',
-      'type': 'static_library',
-      'sources': [
-        'subdir2b_source.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/subdir/subdir2/subdir2.gyp b/tools/gyp/test/analyzer/subdir/subdir2/subdir2.gyp
deleted file mode 100644
index e5aaa92..0000000
--- a/tools/gyp/test/analyzer/subdir/subdir2/subdir2.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'subdir2',
-      'type': 'static_library',
-      'sources': [
-        '../subdir_source.h',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/subdir2/subdir.gyp b/tools/gyp/test/analyzer/subdir2/subdir.gyp
deleted file mode 100644
index d6c709c..0000000
--- a/tools/gyp/test/analyzer/subdir2/subdir.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'static_library',
-      'sources': [
-        'subdir_source.c',
-      ],
-      'includes': [
-        'subdir.includes.gypi',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/subdir2/subdir.includes.gypi b/tools/gyp/test/analyzer/subdir2/subdir.includes.gypi
deleted file mode 100644
index 324e92b..0000000
--- a/tools/gyp/test/analyzer/subdir2/subdir.includes.gypi
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'sources': [
-    'd.cc'
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test.gyp b/tools/gyp/test/analyzer/test.gyp
deleted file mode 100644
index c25ca73..0000000
--- a/tools/gyp/test/analyzer/test.gyp
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# These gyp files create the following dependencies:
-#
-# test.gyp:
-#   #exe -> subdir/subdir.gyp#foo, subdir/subdir2/subdir2.gyp#subdir2
-#     foo.c
-#     subdir/subdir_source2.c
-#     conditional_source.c (if test_variable==1)
-#     action_input.c
-#     action_output.c
-#     rule_input.c
-#     rule_output.pdf
-#   #exe2
-#     exe2.c
-#   #exe3 -> subdir/subdir.gyp#foo, subdir/subdir.gyp#subdir2a
-#     exe3.c
-#   #allx (type none) -> exe, exe3
-# 
-# subdir/subdir.gyp
-#   #foo
-#     subdir/subdir_source.c
-#     parent_source.c
-#   #subdir2a -> subdir2b
-#     subdir/subdir2_source.c
-#   #subdir2b
-#     subdir/subdir2b_source.c
-# 
-# subdir/subdir2/subdir2.gyp
-#   #subdir2
-#     subdir/subdir_source.h
-
-{
-  'variables': {
-    'test_variable%': 0,
-    'variable_path': 'subdir',
-   },
-  'targets': [
-    {
-      'target_name': 'exe',
-      'type': 'executable',
-      'dependencies': [
-        'subdir/subdir.gyp:foo',
-        'subdir/subdir2/subdir2.gyp:subdir2',
-      ],
-      'sources': [
-        'foo.c',
-        '<(variable_path)/subdir_source2.c',
-      ],
-      'conditions': [
-        ['test_variable==1', {
-          'sources': [
-            'conditional_source.c',
-          ],
-        }],
-      ],
-      'actions': [
-        {
-          'action_name': 'action',
-          'inputs': [
-            '<(PRODUCT_DIR)/product_dir_input.c',
-            'action_input.c',
-            '../bad_path1.h',
-            '../../bad_path2.h',
-            './rel_path1.h',
-          ],
-          'outputs': [
-            'action_output.c',
-          ],
-        },
-      ],
-      'rules': [
-        {
-          'rule_name': 'rule',
-          'extension': 'pdf',
-          'inputs': [
-            'rule_input.c',
-          ],
-          'outputs': [
-            'rule_output.pdf',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'exe2',
-      'type': 'executable',
-      'sources': [
-        'exe2.c',
-      ],
-    },
-    {
-      'target_name': 'exe3',
-      'type': 'executable',
-      'dependencies': [
-        'subdir/subdir.gyp:foo',
-        'subdir/subdir.gyp:subdir2a',
-      ],
-      'sources': [
-        'exe3.c',
-      ],
-    },
-    {
-      'target_name': 'allx',
-      'type': 'none',
-      'dependencies': [
-        'exe',
-        'exe3',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test2.gyp b/tools/gyp/test/analyzer/test2.gyp
deleted file mode 100644
index 782b6e6..0000000
--- a/tools/gyp/test/analyzer/test2.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'exe',
-      'type': 'executable',
-      'dependencies': [
-        'subdir2/subdir.gyp:foo',
-      ],
-    },
-    {
-      'target_name': 'exe2',
-      'type': 'executable',
-      'includes': [
-        'test2.includes.gypi',
-      ],
-    },
-  ],
-  'includes': [
-    'test2.toplevel_includes.gypi',
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test2.includes.gypi b/tools/gyp/test/analyzer/test2.includes.gypi
deleted file mode 100644
index 3e21de2..0000000
--- a/tools/gyp/test/analyzer/test2.includes.gypi
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'sources': [
-    'a.cc',
-    'b.cc'
-  ],
-  'includes': [
-    'test2.includes.includes.gypi',
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test2.includes.includes.gypi b/tools/gyp/test/analyzer/test2.includes.includes.gypi
deleted file mode 100644
index de3a025..0000000
--- a/tools/gyp/test/analyzer/test2.includes.includes.gypi
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'sources': [
-    'c.cc'
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test2.toplevel_includes.gypi b/tools/gyp/test/analyzer/test2.toplevel_includes.gypi
deleted file mode 100644
index 54fa453..0000000
--- a/tools/gyp/test/analyzer/test2.toplevel_includes.gypi
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'exe3',
-      'type': 'executable',
-      'sources': [
-        'e.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test3.gyp b/tools/gyp/test/analyzer/test3.gyp
deleted file mode 100644
index e52f6bc..0000000
--- a/tools/gyp/test/analyzer/test3.gyp
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'allx',
-      'type': 'none',
-      'dependencies': [
-        'a',
-        'b',
-      ],
-    },
-    {
-      'target_name': 'a',
-      'type': 'executable',
-      'sources': [
-        'a.c',
-      ],
-      'dependencies': [
-        'c',
-        'd',
-      ],
-    },
-    {
-      'target_name': 'b',
-      'type': 'executable',
-      'sources': [
-        'b.c',
-      ],
-      'dependencies': [
-        'd',
-        'e',
-      ],
-    },
-    {
-      'target_name': 'c',
-      'type': 'executable',
-      'sources': [
-        'c.c',
-      ],
-    },
-    {
-      'target_name': 'd',
-      'type': 'none',
-      'sources': [
-        'd.c',
-      ],
-      'dependencies': [
-        'f',
-        'g',
-      ],
-    },
-    {
-      'target_name': 'e',
-      'type': 'executable',
-      'sources': [
-        'e.c',
-      ],
-    },
-    {
-      'target_name': 'f',
-      'type': 'static_library',
-      'sources': [
-        'f.c',
-      ],
-    },
-    {
-      'target_name': 'g',
-      'type': 'executable',
-      'sources': [
-        'g.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test4.gyp b/tools/gyp/test/analyzer/test4.gyp
deleted file mode 100644
index 91cea56..0000000
--- a/tools/gyp/test/analyzer/test4.gyp
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'executable',
-      'sources': [
-        'a.c',
-      ],
-      'dependencies': [
-        'b',
-        'c',
-      ],
-    },
-    {
-      'target_name': 'b',
-      'type': 'executable',
-      'sources': [
-        'b.c',
-      ],
-      'dependencies': [
-        'd',
-      ],
-    },
-    {
-      'target_name': 'c',
-      'type': 'executable',
-      'sources': [
-        'c.c',
-      ],
-      'dependencies': [
-        'b',
-        'd',
-      ],
-    },
-    {
-      'target_name': 'd',
-      'type': 'executable',
-      'sources': [
-        'd.c',
-      ],
-    },
-    {
-      'target_name': 'e',
-      'type': 'executable',
-      'dependencies': [
-        'test5.gyp:f',
-      ],
-    },
-    {
-      'target_name': 'h',
-      'type': 'none',
-      'dependencies': [
-        'i',
-      ],
-      'rules': [
-        {
-          'rule_name': 'rule',
-          'extension': 'pdf',
-          'inputs': [
-            'rule_input.c',
-          ],
-          'outputs': [
-            'rule_output.pdf',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'i',
-      'type': 'static_library',
-      'sources': [
-        'i.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/analyzer/test5.gyp b/tools/gyp/test/analyzer/test5.gyp
deleted file mode 100644
index f3ea5b0..0000000
--- a/tools/gyp/test/analyzer/test5.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'f',
-      'type': 'executable',
-      'sources': [
-        'f.c',
-      ],
-    },
-    {
-      'target_name': 'g',
-      'type': 'executable',
-      'sources': [
-        'g.c',
-      ],
-      'dependencies': [
-        'f',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/arflags/gyptest-arflags.py b/tools/gyp/test/arflags/gyptest-arflags.py
deleted file mode 100644
index a5cbcac..0000000
--- a/tools/gyp/test/arflags/gyptest-arflags.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that building a target with invalid arflags fails.
-"""
-
-import os
-import sys
-import TestGyp
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-test = TestGyp.TestGyp(formats=['ninja'])
-test.run_gyp('test.gyp')
-expected_status = 0 if sys.platform in ['darwin', 'win32'] else 1
-test.build('test.gyp', target='lib', status=expected_status)
-test.pass_test()
diff --git a/tools/gyp/test/arflags/lib.cc b/tools/gyp/test/arflags/lib.cc
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/arflags/lib.cc
+++ /dev/null
diff --git a/tools/gyp/test/arflags/test.gyp b/tools/gyp/test/arflags/test.gyp
deleted file mode 100644
index f7430fa..0000000
--- a/tools/gyp/test/arflags/test.gyp
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  'targets': [
-    {
-      'target_name': 'lib',
-      'type': 'static_library',
-      'sources': ['lib.cc'],
-      'arflags': ['--nonexistent'],
-    },
-  ],
-}
diff --git a/tools/gyp/test/assembly/gyptest-assembly.py b/tools/gyp/test/assembly/gyptest-assembly.py
deleted file mode 100755
index 8a84310..0000000
--- a/tools/gyp/test/assembly/gyptest-assembly.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-A basic test of compiling assembler files.
-"""
-
-import sys
-import TestGyp
-
-if sys.platform != 'win32':
-  # TODO(bradnelson): get this working for windows.
-  test = TestGyp.TestGyp(formats=['!msvs'])
-
-  test.run_gyp('assembly.gyp', chdir='src')
-
-  test.relocate('src', 'relocate/src')
-
-  test.build('assembly.gyp', test.ALL, chdir='relocate/src')
-
-  expect = """\
-Hello from program.c
-Got 42.
-"""
-  test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-  test.pass_test()
diff --git a/tools/gyp/test/assembly/gyptest-override.py b/tools/gyp/test/assembly/gyptest-override.py
deleted file mode 100644
index e84a23e..0000000
--- a/tools/gyp/test/assembly/gyptest-override.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure that manual rules on Windows override the built in ones.
-"""
-
-import sys
-import TestGyp
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-  CHDIR = 'src'
-  test.run_gyp('override.gyp', chdir=CHDIR)
-  test.build('override.gyp', test.ALL, chdir=CHDIR)
-  expect = """\
-Hello from program.c
-Got 42.
-"""
-  test.run_built_executable('program', chdir=CHDIR, stdout=expect)
-  test.pass_test()
diff --git a/tools/gyp/test/assembly/src/as.bat b/tools/gyp/test/assembly/src/as.bat
deleted file mode 100644
index b796db9..0000000
--- a/tools/gyp/test/assembly/src/as.bat
+++ /dev/null
@@ -1,4 +0,0 @@
-@echo off
-:: Mock windows assembler.
-cl /MD /c %1 /Fo"%2"
-
diff --git a/tools/gyp/test/assembly/src/assembly.gyp b/tools/gyp/test/assembly/src/assembly.gyp
deleted file mode 100644
index 565cb0f..0000000
--- a/tools/gyp/test/assembly/src/assembly.gyp
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'conditions': [
-      ['OS=="win"', {
-        'defines': ['PLATFORM_WIN'],
-      }],
-      ['OS=="mac" or OS=="ios"', {
-        'defines': ['PLATFORM_MAC'],
-      }],
-      ['OS=="linux"', {
-        'defines': ['PLATFORM_LINUX'],
-      }],
-      ['OS=="android"', {
-        'defines': ['PLATFORM_ANDROID'],
-      }],
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'dependencies': ['lib1'],
-      'sources': [
-        'program.c',
-      ],
-    },
-    {
-      'target_name': 'lib1',
-      'type': 'static_library',
-      'sources': [
-        'lib1.S',
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="win"', {
-      'target_defaults': {
-        'rules': [
-          {
-            'rule_name': 'assembler',
-            'msvs_cygwin_shell': 0,
-            'extension': 'S',
-            'inputs': [
-              'as.bat',
-            ],
-            'outputs': [
-              '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj',
-            ],
-            'action':
-              ['as.bat', 'lib1.c', '<(_outputs)'],
-            'message': 'Building assembly file <(RULE_INPUT_PATH)',
-            'process_outputs_as_sources': 1,
-          },
-        ],
-      },
-    },],
-  ],
-}
diff --git a/tools/gyp/test/assembly/src/lib1.S b/tools/gyp/test/assembly/src/lib1.S
deleted file mode 100644
index 7de9f19..0000000
--- a/tools/gyp/test/assembly/src/lib1.S
+++ /dev/null
@@ -1,15 +0,0 @@
-#if PLATFORM_WINDOWS || PLATFORM_MAC
-# define IDENTIFIER(n)  _##n
-#else /* Linux */
-# define IDENTIFIER(n)  n
-#endif
-
-.globl IDENTIFIER(lib1_function)
-IDENTIFIER(lib1_function):
-#if !defined(PLATFORM_ANDROID)
-  movl $42, %eax
-  ret
-#else /* Android (assuming ARM) */
-  mov r0, #42
-  bx lr
-#endif
diff --git a/tools/gyp/test/assembly/src/lib1.c b/tools/gyp/test/assembly/src/lib1.c
deleted file mode 100644
index be21ecd..0000000
--- a/tools/gyp/test/assembly/src/lib1.c
+++ /dev/null
@@ -1,3 +0,0 @@
-int lib1_function(void) {
-  return 42;
-}
diff --git a/tools/gyp/test/assembly/src/override.gyp b/tools/gyp/test/assembly/src/override.gyp
deleted file mode 100644
index 39a4072..0000000
--- a/tools/gyp/test/assembly/src/override.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [
-        'program.c',
-        'override_asm.asm',
-      ],
-      'rules': [
-      {
-        # Test that if there's a specific .asm rule, it overrides the
-        # built in one on Windows.
-        'rule_name': 'assembler',
-        'msvs_cygwin_shell': 0,
-        'extension': 'asm',
-        'inputs': [
-          'as.bat',
-        ],
-        'outputs': [
-          'output.obj',
-        ],
-        'action': ['as.bat', 'lib1.c', '<(_outputs)'],
-        'message': 'Building assembly file <(RULE_INPUT_PATH)',
-        'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/assembly/src/override_asm.asm b/tools/gyp/test/assembly/src/override_asm.asm
deleted file mode 100644
index be93b23..0000000
--- a/tools/gyp/test/assembly/src/override_asm.asm
+++ /dev/null
@@ -1,8 +0,0 @@
-; Copyright (c) 2012 Google Inc. All rights reserved.
-; Use of this source code is governed by a BSD-style license that can be
-; found in the LICENSE file.
-
-; This is a placeholder. It should not be referenced if overrides work
-; correctly.
-
-Bad stuff that shouldn't assemble.
diff --git a/tools/gyp/test/assembly/src/program.c b/tools/gyp/test/assembly/src/program.c
deleted file mode 100644
index eee8627..0000000
--- a/tools/gyp/test/assembly/src/program.c
+++ /dev/null
@@ -1,12 +0,0 @@
-#include <stdio.h>
-
-extern int lib1_function(void);
-
-int main(void)
-{
-  fprintf(stdout, "Hello from program.c\n");
-  fflush(stdout);
-  fprintf(stdout, "Got %d.\n", lib1_function());
-  fflush(stdout);
-  return 0;
-}
diff --git a/tools/gyp/test/build-option/gyptest-build.py b/tools/gyp/test/build-option/gyptest-build.py
deleted file mode 100755
index 34a9e11..0000000
--- a/tools/gyp/test/build-option/gyptest-build.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simplest-possible build of a "Hello, world!" program
-using the default build target.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_default')
-
-if test.format == 'xcode-ninja':
-  # The xcode-ninja generator doesn't support --build
-  # cf. https://code.google.com/p/gyp/issues/detail?id=453
-  test.skip_test()
-
-test.run_gyp('hello.gyp', '--build=Default')
-
-test.run_built_executable('hello', stdout="Hello, world!\n")
-
-test.up_to_date('hello.gyp', test.DEFAULT)
-
-test.pass_test()
diff --git a/tools/gyp/test/build-option/hello.c b/tools/gyp/test/build-option/hello.c
deleted file mode 100644
index f6ad129..0000000
--- a/tools/gyp/test/build-option/hello.c
+++ /dev/null
@@ -1,13 +0,0 @@
-/*
- * Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/build-option/hello.gyp b/tools/gyp/test/build-option/hello.gyp
deleted file mode 100644
index 1974d51..0000000
--- a/tools/gyp/test/build-option/hello.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/builddir/gyptest-all.py b/tools/gyp/test/builddir/gyptest-all.py
deleted file mode 100755
index a26543f..0000000
--- a/tools/gyp/test/builddir/gyptest-all.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify the settings that cause a set of programs to be created in
-a specific build directory, and that no intermediate built files
-get created outside of that build directory hierarchy even when
-referred to with deeply-nested ../../.. paths.
-"""
-
-import TestGyp
-
-# TODO(mmoss): Make only supports (theoretically) a single, global build
-# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
-# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
-# generators support, so this doesn't work yet for make.
-# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
-# the "--depth" location, which is one level above 'src', but then this test
-# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
-# its sources. I'm not sure if make is wrong for writing outside the current
-# directory, or if the test is wrong for assuming everything generated is under
-# the current directory.
-# Ninja and CMake do not support setting the build directory.
-test = TestGyp.TestGyp(formats=['!make', '!ninja', '!cmake'])
-
-test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
-if test.format == 'msvs':
-  if test.uses_msbuild:
-    test.must_contain('src/prog1.vcxproj',
-      '<OutDir>..\\builddir\\Default\\</OutDir>')
-  else:
-    test.must_contain('src/prog1.vcproj',
-      'OutputDirectory="..\\builddir\\Default\\"')
-
-test.relocate('src', 'relocate/src')
-
-test.subdir('relocate/builddir')
-
-# Make sure that all the built ../../etc. files only get put under builddir,
-# by making all of relocate read-only and then making only builddir writable.
-test.writable('relocate', False)
-test.writable('relocate/builddir', True)
-
-# Suppress the test infrastructure's setting SYMROOT on the command line.
-test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
-
-expect1 = """\
-Hello from prog1.c
-Hello from func1.c
-"""
-
-expect2 = """\
-Hello from subdir2/prog2.c
-Hello from func2.c
-"""
-
-expect3 = """\
-Hello from subdir2/subdir3/prog3.c
-Hello from func3.c
-"""
-
-expect4 = """\
-Hello from subdir2/subdir3/subdir4/prog4.c
-Hello from func4.c
-"""
-
-expect5 = """\
-Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
-Hello from func5.c
-"""
-
-def run_builddir(prog, expect):
-  dir = 'relocate/builddir/Default/'
-  test.run(program=test.workpath(dir + prog), stdout=expect)
-
-run_builddir('prog1', expect1)
-run_builddir('prog2', expect2)
-run_builddir('prog3', expect3)
-run_builddir('prog4', expect4)
-run_builddir('prog5', expect5)
-
-test.pass_test()
diff --git a/tools/gyp/test/builddir/gyptest-default.py b/tools/gyp/test/builddir/gyptest-default.py
deleted file mode 100755
index 4904cda..0000000
--- a/tools/gyp/test/builddir/gyptest-default.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify the settings that cause a set of programs to be created in
-a specific build directory, and that no intermediate built files
-get created outside of that build directory hierarchy even when
-referred to with deeply-nested ../../.. paths.
-"""
-
-import TestGyp
-
-# TODO(mmoss): Make only supports (theoretically) a single, global build
-# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
-# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
-# generators support, so this doesn't work yet for make.
-# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
-# the "--depth" location, which is one level above 'src', but then this test
-# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
-# its sources. I'm not sure if make is wrong for writing outside the current
-# directory, or if the test is wrong for assuming everything generated is under
-# the current directory.
-# Ninja and CMake do not support setting the build directory.
-test = TestGyp.TestGyp(formats=['!make', '!ninja', '!cmake'])
-
-test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
-if test.format == 'msvs':
-  if test.uses_msbuild:
-    test.must_contain('src/prog1.vcxproj',
-      '<OutDir>..\\builddir\\Default\\</OutDir>')
-  else:
-    test.must_contain('src/prog1.vcproj',
-      'OutputDirectory="..\\builddir\\Default\\"')
-
-test.relocate('src', 'relocate/src')
-
-test.subdir('relocate/builddir')
-
-# Make sure that all the built ../../etc. files only get put under builddir,
-# by making all of relocate read-only and then making only builddir writable.
-test.writable('relocate', False)
-test.writable('relocate/builddir', True)
-
-# Suppress the test infrastructure's setting SYMROOT on the command line.
-test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
-
-expect1 = """\
-Hello from prog1.c
-Hello from func1.c
-"""
-
-expect2 = """\
-Hello from subdir2/prog2.c
-Hello from func2.c
-"""
-
-expect3 = """\
-Hello from subdir2/subdir3/prog3.c
-Hello from func3.c
-"""
-
-expect4 = """\
-Hello from subdir2/subdir3/subdir4/prog4.c
-Hello from func4.c
-"""
-
-expect5 = """\
-Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
-Hello from func5.c
-"""
-
-def run_builddir(prog, expect):
-  dir = 'relocate/builddir/Default/'
-  test.run(program=test.workpath(dir + prog), stdout=expect)
-
-run_builddir('prog1', expect1)
-run_builddir('prog2', expect2)
-run_builddir('prog3', expect3)
-run_builddir('prog4', expect4)
-run_builddir('prog5', expect5)
-
-test.pass_test()
diff --git a/tools/gyp/test/builddir/src/builddir.gypi b/tools/gyp/test/builddir/src/builddir.gypi
deleted file mode 100644
index ce175db..0000000
--- a/tools/gyp/test/builddir/src/builddir.gypi
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'configurations': {
-      'Default': {
-        'msvs_configuration_attributes': {
-          'OutputDirectory': '<(DEPTH)\\builddir/Default',
-        },
-      },
-    },
-  },
-  'xcode_settings': {
-    'SYMROOT': '<(DEPTH)/builddir',
-  },
-}
diff --git a/tools/gyp/test/builddir/src/func1.c b/tools/gyp/test/builddir/src/func1.c
deleted file mode 100644
index b8e6a06..0000000
--- a/tools/gyp/test/builddir/src/func1.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void func1(void)
-{
-  printf("Hello from func1.c\n");
-}
diff --git a/tools/gyp/test/builddir/src/func2.c b/tools/gyp/test/builddir/src/func2.c
deleted file mode 100644
index 14aabac..0000000
--- a/tools/gyp/test/builddir/src/func2.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void func2(void)
-{
-  printf("Hello from func2.c\n");
-}
diff --git a/tools/gyp/test/builddir/src/func3.c b/tools/gyp/test/builddir/src/func3.c
deleted file mode 100644
index 3b4edea..0000000
--- a/tools/gyp/test/builddir/src/func3.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void func3(void)
-{
-  printf("Hello from func3.c\n");
-}
diff --git a/tools/gyp/test/builddir/src/func4.c b/tools/gyp/test/builddir/src/func4.c
deleted file mode 100644
index 732891b..0000000
--- a/tools/gyp/test/builddir/src/func4.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void func4(void)
-{
-  printf("Hello from func4.c\n");
-}
diff --git a/tools/gyp/test/builddir/src/func5.c b/tools/gyp/test/builddir/src/func5.c
deleted file mode 100644
index 18fdfab..0000000
--- a/tools/gyp/test/builddir/src/func5.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void func5(void)
-{
-  printf("Hello from func5.c\n");
-}
diff --git a/tools/gyp/test/builddir/src/prog1.c b/tools/gyp/test/builddir/src/prog1.c
deleted file mode 100644
index a32aaf0..0000000
--- a/tools/gyp/test/builddir/src/prog1.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void func1(void);
-
-int main(void)
-{
-  printf("Hello from prog1.c\n");
-  func1();
-  return 0;
-}
diff --git a/tools/gyp/test/builddir/src/prog1.gyp b/tools/gyp/test/builddir/src/prog1.gyp
deleted file mode 100644
index 5b96f03..0000000
--- a/tools/gyp/test/builddir/src/prog1.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    'builddir.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'pull_in_all',
-      'type': 'none',
-      'dependencies': [
-        'prog1',
-        'subdir2/prog2.gyp:prog2',
-        'subdir2/subdir3/prog3.gyp:prog3',
-        'subdir2/subdir3/subdir4/prog4.gyp:prog4',
-        'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5',
-      ],
-    },
-    {
-      'target_name': 'prog1',
-      'type': 'executable',
-      'sources': [
-        'prog1.c',
-        'func1.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/prog2.c b/tools/gyp/test/builddir/src/subdir2/prog2.c
deleted file mode 100644
index 9d682cd..0000000
--- a/tools/gyp/test/builddir/src/subdir2/prog2.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void func2(void);
-
-int main(void)
-{
-  printf("Hello from subdir2/prog2.c\n");
-  func2();
-  return 0;
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/prog2.gyp b/tools/gyp/test/builddir/src/subdir2/prog2.gyp
deleted file mode 100644
index 96299b6..0000000
--- a/tools/gyp/test/builddir/src/subdir2/prog2.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../builddir.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog2',
-      'type': 'executable',
-      'sources': [
-        'prog2.c',
-        '../func2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c b/tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c
deleted file mode 100644
index da74965..0000000
--- a/tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void func3(void);
-
-int main(void)
-{
-  printf("Hello from subdir2/subdir3/prog3.c\n");
-  func3();
-  return 0;
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp b/tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp
deleted file mode 100644
index d7df43c..0000000
--- a/tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../../builddir.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog3',
-      'type': 'executable',
-      'sources': [
-        'prog3.c',
-        '../../func3.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c b/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c
deleted file mode 100644
index 5787d5f..0000000
--- a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void func4(void);
-
-int main(void)
-{
-  printf("Hello from subdir2/subdir3/subdir4/prog4.c\n");
-  func4();
-  return 0;
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp b/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp
deleted file mode 100644
index 862a8a1..0000000
--- a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../../../builddir.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog4',
-      'type': 'executable',
-      'sources': [
-        'prog4.c',
-        '../../../func4.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c b/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c
deleted file mode 100644
index c6e2ab5..0000000
--- a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void func5(void);
-
-int main(void)
-{
-  printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n");
-  func5();
-  return 0;
-}
diff --git a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp b/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp
deleted file mode 100644
index fe1c9cb..0000000
--- a/tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../../../../builddir.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog5',
-      'type': 'executable',
-      'sources': [
-        'prog5.c',
-        '../../../../func5.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/cflags/cflags.c b/tools/gyp/test/cflags/cflags.c
deleted file mode 100644
index 0a02ba9..0000000
--- a/tools/gyp/test/cflags/cflags.c
+++ /dev/null
@@ -1,15 +0,0 @@
-/* Copyright (c) 2010 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-#ifdef FOO
-  printf("FOO defined\n");
-#else
-  printf("FOO not defined\n");
-#endif
-  return 0;
-}
diff --git a/tools/gyp/test/cflags/cflags.gyp b/tools/gyp/test/cflags/cflags.gyp
deleted file mode 100644
index 2840dc6..0000000
--- a/tools/gyp/test/cflags/cflags.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'cflags',
-      'type': 'executable',
-      'sources': [
-        'cflags.c',
-      ],
-    },
-    {
-      'target_name': 'cflags_host',
-      'toolsets': ['host'],
-      'type': 'executable',
-      'sources': [
-        'cflags.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/cflags/gyptest-cflags.py b/tools/gyp/test/cflags/gyptest-cflags.py
deleted file mode 100755
index f4efccb..0000000
--- a/tools/gyp/test/cflags/gyptest-cflags.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies the use of the environment during regeneration when the gyp file
-changes, specifically via build of an executable with C preprocessor
-definition specified by CFLAGS.
-
-In this test, gyp and build both run in same local environment.
-"""
-
-import TestGyp
-
-# CPPFLAGS works in ninja but not make; CFLAGS works in both
-FORMATS = ('make', 'ninja')
-
-test = TestGyp.TestGyp(formats=FORMATS)
-
-# First set CFLAGS to blank in case the platform doesn't support unsetenv.
-with TestGyp.LocalEnv({'CFLAGS': '',
-                       'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('cflags.gyp')
-  test.build('cflags.gyp')
-
-expect = """FOO not defined\n"""
-test.run_built_executable('cflags', stdout=expect)
-test.run_built_executable('cflags_host', stdout=expect)
-
-test.sleep()
-
-with TestGyp.LocalEnv({'CFLAGS': '-DFOO=1',
-                       'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('cflags.gyp')
-  test.build('cflags.gyp')
-
-expect = """FOO defined\n"""
-test.run_built_executable('cflags', stdout=expect)
-
-# Environment variable CFLAGS shouldn't influence the flags for the host.
-expect = """FOO not defined\n"""
-test.run_built_executable('cflags_host', stdout=expect)
-
-test.sleep()
-
-with TestGyp.LocalEnv({'CFLAGS_host': '-DFOO=1',
-                       'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('cflags.gyp')
-  test.build('cflags.gyp')
-
-# Environment variable CFLAGS_host should influence the flags for the host.
-expect = """FOO defined\n"""
-test.run_built_executable('cflags_host', stdout=expect)
-
-test.sleep()
-
-with TestGyp.LocalEnv({'CFLAGS': ''}):
-  test.run_gyp('cflags.gyp')
-  test.build('cflags.gyp')
-
-expect = """FOO not defined\n"""
-test.run_built_executable('cflags', stdout=expect)
-
-test.sleep()
-
-with TestGyp.LocalEnv({'CFLAGS': '-DFOO=1'}):
-  test.run_gyp('cflags.gyp')
-  test.build('cflags.gyp')
-
-expect = """FOO defined\n"""
-test.run_built_executable('cflags', stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/compilable/gyptest-headers.py b/tools/gyp/test/compilable/gyptest-headers.py
deleted file mode 100755
index 9176021..0000000
--- a/tools/gyp/test/compilable/gyptest-headers.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that .hpp files are ignored when included in the source list on all
-platforms.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('headers.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('headers.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from program.c
-Hello from lib1.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/compilable/src/headers.gyp b/tools/gyp/test/compilable/src/headers.gyp
deleted file mode 100644
index b6c2a88..0000000
--- a/tools/gyp/test/compilable/src/headers.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'dependencies': [
-        'lib1'
-      ],
-      'sources': [
-        'program.cpp',
-      ],
-    },
-    {
-      'target_name': 'lib1',
-      'type': 'static_library',
-      'sources': [
-        'lib1.hpp',
-        'lib1.cpp',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/compilable/src/lib1.cpp b/tools/gyp/test/compilable/src/lib1.cpp
deleted file mode 100644
index 51bc31a..0000000
--- a/tools/gyp/test/compilable/src/lib1.cpp
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-#include "lib1.hpp"
-
-void lib1_function(void) {
-  fprintf(stdout, "Hello from lib1.c\n");
-  fflush(stdout);
-}
diff --git a/tools/gyp/test/compilable/src/lib1.hpp b/tools/gyp/test/compilable/src/lib1.hpp
deleted file mode 100644
index 72e63e8..0000000
--- a/tools/gyp/test/compilable/src/lib1.hpp
+++ /dev/null
@@ -1,6 +0,0 @@
-#ifndef _lib1_hpp
-#define _lib1_hpp
-
-extern void lib1_function(void);
-
-#endif
diff --git a/tools/gyp/test/compilable/src/program.cpp b/tools/gyp/test/compilable/src/program.cpp
deleted file mode 100644
index 8af2c9b..0000000
--- a/tools/gyp/test/compilable/src/program.cpp
+++ /dev/null
@@ -1,9 +0,0 @@
-#include <stdio.h>
-#include "lib1.hpp"
-
-int main(void) {
-  fprintf(stdout, "Hello from program.c\n");
-  fflush(stdout);
-  lib1_function();
-  return 0;
-}
diff --git a/tools/gyp/test/compiler-override/compiler-exe.gyp b/tools/gyp/test/compiler-override/compiler-exe.gyp
deleted file mode 100644
index c2f3002..0000000
--- a/tools/gyp/test/compiler-override/compiler-exe.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'test.c',
-        'cxxtest.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/compiler-override/compiler-global-settings.gyp.in b/tools/gyp/test/compiler-override/compiler-global-settings.gyp.in
deleted file mode 100644
index ca13a53..0000000
--- a/tools/gyp/test/compiler-override/compiler-global-settings.gyp.in
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  # PYTHON and PWD are replaced by the test code before this
-  # gyp file runs
-  'make_global_settings': [
-    ['CC', r'$PYTHON $PWD/my_cc.py FOO'],
-    ['CXX', r'$PYTHON $PWD/my_cxx.py FOO'],
-    ['CC.host', r'$PYTHON $PWD/my_cc.py BAR'],
-    ['CXX.host', r'$PYTHON $PWD/my_cxx.py BAR'],
-
-    ['LD', r'$PYTHON $PWD/my_ld.py FOO_LINK'],
-    ['LD.host', r'$PYTHON $PWD/my_ld.py BAR_LINK'],
-    ['LINK', r'$PYTHON $PWD/my_ld.py FOO_LINK'],
-    ['LINK.host', r'$PYTHON $PWD/my_ld.py BAR_LINK'],
-  ],
-
-  # The above global settings should mean that
-  # that these targets are built using the fake
-  # toolchain above.
-  'targets': [
-    {
-      'toolset': '$TOOLSET',
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'test.c',
-        'cxxtest.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/compiler-override/compiler-host.gyp b/tools/gyp/test/compiler-override/compiler-host.gyp
deleted file mode 100644
index ab3d247..0000000
--- a/tools/gyp/test/compiler-override/compiler-host.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'toolset': 'host',
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'test.c',
-        'cxxtest.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/compiler-override/compiler-shared-lib.gyp b/tools/gyp/test/compiler-override/compiler-shared-lib.gyp
deleted file mode 100644
index d3e4316..0000000
--- a/tools/gyp/test/compiler-override/compiler-shared-lib.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello-lib',
-      'type': 'shared_library',
-      'sources': [
-        'test.c',
-        'cxxtest.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/compiler-override/cxxtest.cc b/tools/gyp/test/compiler-override/cxxtest.cc
deleted file mode 100644
index 517a353..0000000
--- a/tools/gyp/test/compiler-override/cxxtest.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Deliberate C syntax error as this file should never be passed to
-// the actual compiler
-#error Should not be passed to a real compiler
diff --git a/tools/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py b/tools/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py
deleted file mode 100644
index 2361d0c..0000000
--- a/tools/gyp/test/compiler-override/gyptest-compiler-env-toolchain.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""
-Verifies that the user can override the compiler and linker using
-CC/CXX/NM/READELF environment variables.
-"""
-
-import TestGyp
-import os
-import copy
-import sys
-
-here = os.path.dirname(os.path.abspath(__file__))
-
-if sys.platform == 'win32':
-  # cross compiling not supported by ninja on windows
-  # and make not supported on windows at all.
-  sys.exit(0)
-
-# Clear any existing compiler related env vars.
-for key in ['CC', 'CXX', 'LINK', 'CC_host', 'CXX_host', 'LINK_host',
-            'NM_target', 'READELF_target']:
-  if key in os.environ:
-    del os.environ[key]
-
-
-def CheckCompiler(test, gypfile, check_for, run_gyp):
-  if run_gyp:
-    test.run_gyp(gypfile)
-  test.build(gypfile)
-
-  test.must_contain_all_lines(test.stdout(), check_for)
-
-
-test = TestGyp.TestGyp(formats=['ninja'])
-# Must set the test format to something with a flavor (the part after the '-')
-# in order to test the desired behavior. Since we want to run a non-host
-# toolchain, we have to set the flavor to something that the ninja generator
-# doesn't know about, so it doesn't default to the host-specific tools (e.g.,
-# 'otool' on mac to generate the .TOC).
-#
-# Note that we can't just pass format=['ninja-some_toolchain'] to the
-# constructor above, because then this test wouldn't be recognized as a ninja
-# format test.
-test.formats = ['ninja-my_flavor' if f == 'ninja' else f for f in test.formats]
-
-
-def TestTargetOverideSharedLib():
-  # The std output from nm and readelf is redirected to files, so we can't
-  # expect their output to appear. Instead, check for the files they create to
-  # see if they actually ran.
-  expected = ['my_cc.py', 'my_cxx.py', 'FOO']
-
-  # Check that CC, CXX, NM, READELF, set target compiler
-  env = {'CC': 'python %s/my_cc.py FOO' % here,
-         'CXX': 'python %s/my_cxx.py FOO' % here,
-         'NM': 'python %s/my_nm.py' % here,
-         'READELF': 'python %s/my_readelf.py' % here}
-
-  with TestGyp.LocalEnv(env):
-    CheckCompiler(test, 'compiler-shared-lib.gyp', expected, True)
-    test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
-    test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
-    test.unlink(test.built_file_path('RAN_MY_NM'))
-    test.unlink(test.built_file_path('RAN_MY_READELF'))
-
-  # Run the same tests once the eviron has been restored.  The generated
-  # projects should have embedded all the settings in the project files so the
-  # results should be the same.
-  CheckCompiler(test, 'compiler-shared-lib.gyp', expected, False)
-  test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
-  test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
-
-
-TestTargetOverideSharedLib()
-test.pass_test()
diff --git a/tools/gyp/test/compiler-override/gyptest-compiler-env.py b/tools/gyp/test/compiler-override/gyptest-compiler-env.py
deleted file mode 100755
index bb38b6e..0000000
--- a/tools/gyp/test/compiler-override/gyptest-compiler-env.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""
-Verifies that the user can override the compiler and linker using CC/CXX/LD
-environment variables.
-"""
-
-import TestGyp
-import os
-import copy
-import sys
-
-here = os.path.dirname(os.path.abspath(__file__))
-
-if sys.platform == 'win32':
-  # cross compiling not supported by ninja on windows
-  # and make not supported on windows at all.
-  sys.exit(0)
-
-# Clear any existing compiler related env vars.
-for key in ['CC', 'CXX', 'LINK', 'CC_host', 'CXX_host', 'LINK_host']:
-  if key in os.environ:
-    del os.environ[key]
-
-
-def CheckCompiler(test, gypfile, check_for, run_gyp):
-  if run_gyp:
-    test.run_gyp(gypfile)
-  test.build(gypfile)
-
-  test.must_contain_all_lines(test.stdout(), check_for)
-
-
-test = TestGyp.TestGyp(formats=['ninja', 'make'])
-
-def TestTargetOveride():
-  expected = ['my_cc.py', 'my_cxx.py', 'FOO' ]
-
-  # ninja just uses $CC / $CXX as linker.
-  if test.format not in ['ninja', 'xcode-ninja']:
-    expected.append('FOO_LINK')
-
-  # Check that CC, CXX and LD set target compiler
-  oldenv = os.environ.copy()
-  try:
-    os.environ['CC'] = 'python %s/my_cc.py FOO' % here
-    os.environ['CXX'] = 'python %s/my_cxx.py FOO' % here
-    os.environ['LINK'] = 'python %s/my_ld.py FOO_LINK' % here
-
-    CheckCompiler(test, 'compiler-exe.gyp', expected, True)
-  finally:
-    os.environ.clear()
-    os.environ.update(oldenv)
-
-  # Run the same tests once the eviron has been restored.  The
-  # generated should have embedded all the settings in the
-  # project files so the results should be the same.
-  CheckCompiler(test, 'compiler-exe.gyp', expected, False)
-
-
-def TestTargetOverideCompilerOnly():
-  # Same test again but with that CC, CXX and not LD
-  oldenv = os.environ.copy()
-  try:
-    os.environ['CC'] = 'python %s/my_cc.py FOO' % here
-    os.environ['CXX'] = 'python %s/my_cxx.py FOO' % here
-
-    CheckCompiler(test, 'compiler-exe.gyp',
-                  ['my_cc.py', 'my_cxx.py', 'FOO'],
-                  True)
-  finally:
-    os.environ.clear()
-    os.environ.update(oldenv)
-
-  # Run the same tests once the eviron has been restored.  The
-  # generated should have embedded all the settings in the
-  # project files so the results should be the same.
-  CheckCompiler(test, 'compiler-exe.gyp',
-                ['my_cc.py', 'my_cxx.py', 'FOO'],
-                False)
-
-
-def TestHostOveride():
-  expected = ['my_cc.py', 'my_cxx.py', 'HOST' ]
-  if test.format != 'ninja':  # ninja just uses $CC / $CXX as linker.
-    expected.append('HOST_LINK')
-
-  # Check that CC_host sets host compilee
-  oldenv = os.environ.copy()
-  try:
-    os.environ['CC_host'] = 'python %s/my_cc.py HOST' % here
-    os.environ['CXX_host'] = 'python %s/my_cxx.py HOST' % here
-    os.environ['LINK_host'] = 'python %s/my_ld.py HOST_LINK' % here
-    CheckCompiler(test, 'compiler-host.gyp', expected, True)
-  finally:
-    os.environ.clear()
-    os.environ.update(oldenv)
-
-  # Run the same tests once the eviron has been restored.  The
-  # generated should have embedded all the settings in the
-  # project files so the results should be the same.
-  CheckCompiler(test, 'compiler-host.gyp', expected, False)
-
-
-TestTargetOveride()
-TestTargetOverideCompilerOnly()
-
-test.pass_test()
diff --git a/tools/gyp/test/compiler-override/gyptest-compiler-global-settings.py b/tools/gyp/test/compiler-override/gyptest-compiler-global-settings.py
deleted file mode 100755
index d58fc7c..0000000
--- a/tools/gyp/test/compiler-override/gyptest-compiler-global-settings.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""
-Verifies that make_global_settings can be used to override the
-compiler settings.
-"""
-
-import TestGyp
-import os
-import copy
-import sys
-from string import Template
-
-
-if sys.platform == 'win32':
-  # cross compiling not support by ninja on windows
-  # and make not supported on windows at all.
-  sys.exit(0)
-
-test = TestGyp.TestGyp(formats=['ninja', 'make'])
-
-gypfile = 'compiler-global-settings.gyp'
-
-replacements = { 'PYTHON': '/usr/bin/python', 'PWD': os.getcwd()}
-
-# Process the .in gyp file to produce the final gyp file
-# since we need to include absolute paths in the make_global_settings
-# section.
-replacements['TOOLSET'] = 'target'
-s = Template(open(gypfile + '.in').read())
-output = open(gypfile, 'w')
-output.write(s.substitute(replacements))
-output.close()
-
-old_env = dict(os.environ)
-os.environ['GYP_CROSSCOMPILE'] = '1'
-test.run_gyp(gypfile)
-os.environ.clear()
-os.environ.update(old_env)
-
-test.build(gypfile)
-test.must_contain_all_lines(test.stdout(), ['my_cc.py', 'my_cxx.py', 'FOO'])
-
-# The xcode generator chokes on the 'host' toolset. Skip the rest of
-# this test (cf. https://code.google.com/p/gyp/issues/detail?id=454).
-if test.format == 'xcode-ninja':
-  test.pass_test()
-
-# Same again but with the host toolset.
-replacements['TOOLSET'] = 'host'
-s = Template(open(gypfile + '.in').read())
-output = open(gypfile, 'w')
-output.write(s.substitute(replacements))
-output.close()
-
-old_env = dict(os.environ)
-os.environ['GYP_CROSSCOMPILE'] = '1'
-test.run_gyp(gypfile)
-os.environ.clear()
-os.environ.update(old_env)
-
-test.build(gypfile)
-test.must_contain_all_lines(test.stdout(), ['my_cc.py', 'my_cxx.py', 'BAR'])
-
-# Check that CC_host overrides make_global_settings
-old_env = dict(os.environ)
-os.environ['CC_host'] = '%s %s/my_cc.py SECRET' % (replacements['PYTHON'],
-                                                   replacements['PWD'])
-test.run_gyp(gypfile)
-os.environ.clear()
-os.environ.update(old_env)
-
-test.build(gypfile)
-test.must_contain_all_lines(test.stdout(), ['SECRET', 'my_cxx.py', 'BAR'])
-
-test.pass_test()
diff --git a/tools/gyp/test/compiler-override/my_cc.py b/tools/gyp/test/compiler-override/my_cc.py
deleted file mode 100755
index e2f0bdd..0000000
--- a/tools/gyp/test/compiler-override/my_cc.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-print sys.argv
diff --git a/tools/gyp/test/compiler-override/my_cxx.py b/tools/gyp/test/compiler-override/my_cxx.py
deleted file mode 100755
index e2f0bdd..0000000
--- a/tools/gyp/test/compiler-override/my_cxx.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-print sys.argv
diff --git a/tools/gyp/test/compiler-override/my_ld.py b/tools/gyp/test/compiler-override/my_ld.py
deleted file mode 100755
index e2f0bdd..0000000
--- a/tools/gyp/test/compiler-override/my_ld.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-print sys.argv
diff --git a/tools/gyp/test/compiler-override/my_nm.py b/tools/gyp/test/compiler-override/my_nm.py
deleted file mode 100755
index f0f1efc..0000000
--- a/tools/gyp/test/compiler-override/my_nm.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-print sys.argv
-with open('RAN_MY_NM', 'w') as f:
-  f.write('RAN_MY_NM')
diff --git a/tools/gyp/test/compiler-override/my_readelf.py b/tools/gyp/test/compiler-override/my_readelf.py
deleted file mode 100755
index 40e303c..0000000
--- a/tools/gyp/test/compiler-override/my_readelf.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-print sys.argv
-with open('RAN_MY_READELF', 'w') as f:
-  f.write('RAN_MY_READELF')
diff --git a/tools/gyp/test/compiler-override/test.c b/tools/gyp/test/compiler-override/test.c
deleted file mode 100644
index 517a353..0000000
--- a/tools/gyp/test/compiler-override/test.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Deliberate C syntax error as this file should never be passed to
-// the actual compiler
-#error Should not be passed to a real compiler
diff --git a/tools/gyp/test/conditions/elseif/elseif.gyp b/tools/gyp/test/conditions/elseif/elseif.gyp
deleted file mode 100644
index 6367ff7..0000000
--- a/tools/gyp/test/conditions/elseif/elseif.gyp
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'variables': { 'test_var': 0 },
-      'target_name': 'program0',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'includes': [ 'elseif_conditions.gypi' ],
-    },
-    {
-      'variables': { 'test_var': 1 },
-      'target_name': 'program1',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'includes': [ 'elseif_conditions.gypi' ],
-    },
-    {
-      'variables': { 'test_var': 2 },
-      'target_name': 'program2',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'includes': [ 'elseif_conditions.gypi' ],
-    },
-    {
-      'variables': { 'test_var': 3 },
-      'target_name': 'program3',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'includes': [ 'elseif_conditions.gypi' ],
-    },
-    {
-      'variables': { 'test_var': 4 },
-      'target_name': 'program4',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'includes': [ 'elseif_conditions.gypi' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/conditions/elseif/elseif_bad1.gyp b/tools/gyp/test/conditions/elseif/elseif_bad1.gyp
deleted file mode 100644
index 35c8455..0000000
--- a/tools/gyp/test/conditions/elseif/elseif_bad1.gyp
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Trigger an error because of two consecutive string conditions.
-
-{
-  'targets': [
-    {
-      'variables': { 'test_var': 0 },
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'conditions': [
-        ['test_var==0', 'test_var==1', {
-        }],
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/conditions/elseif/elseif_bad2.gyp b/tools/gyp/test/conditions/elseif/elseif_bad2.gyp
deleted file mode 100644
index b529f29..0000000
--- a/tools/gyp/test/conditions/elseif/elseif_bad2.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Trigger an error because of two consecutive string conditions, even if the
-# conditions are not actually evaluated.
-
-{
-  'targets': [
-    {
-      'variables': { 'test_var': 0 },
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'conditions': [
-        ['test_var==0', {
-        }, 'test_var==1', 'test_var==2', {
-        }],
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/conditions/elseif/elseif_bad3.gyp b/tools/gyp/test/conditions/elseif/elseif_bad3.gyp
deleted file mode 100644
index 126e186..0000000
--- a/tools/gyp/test/conditions/elseif/elseif_bad3.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Trigger an error because there are unexpected trailing items in a condition.
-
-{
-  'targets': [
-    {
-      'variables': { 'test_var': 0 },
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'conditions': [
-        ['test_var==0' {
-        }, 'test_var==1', {
-        }, {
-        }, 'test_var==2', {
-        }],
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/conditions/elseif/elseif_conditions.gypi b/tools/gyp/test/conditions/elseif/elseif_conditions.gypi
deleted file mode 100644
index 4310ccc..0000000
--- a/tools/gyp/test/conditions/elseif/elseif_conditions.gypi
+++ /dev/null
@@ -1,15 +0,0 @@
-{
-  'conditions': [
-    ['test_var==0', {
-      'defines': ['FOO="first_if"'],
-    }, 'test_var==1', {
-      'defines': ['FOO="first_else_if"'],
-    }, 'test_var==2', {
-      'defines': ['FOO="second_else_if"'],
-    }, 'test_var==3', {
-      'defines': ['FOO="third_else_if"'],
-    }, {
-      'defines': ['FOO="last_else"'],
-    }],
-  ],
-}
diff --git a/tools/gyp/test/conditions/elseif/gyptest_elseif.py b/tools/gyp/test/conditions/elseif/gyptest_elseif.py
deleted file mode 100644
index 9d030cf..0000000
--- a/tools/gyp/test/conditions/elseif/gyptest_elseif.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that "else-if" conditions work.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('elseif.gyp')
-test.build('elseif.gyp', test.ALL)
-test.run_built_executable(
-    'program0', stdout='first_if\n')
-test.run_built_executable(
-    'program1', stdout='first_else_if\n')
-test.run_built_executable(
-    'program2', stdout='second_else_if\n')
-test.run_built_executable(
-    'program3', stdout='third_else_if\n')
-test.run_built_executable(
-    'program4', stdout='last_else\n')
-
-# Verify that bad condition blocks fail at gyp time.
-test.run_gyp('elseif_bad1.gyp', status=1, stderr=None)
-test.run_gyp('elseif_bad2.gyp', status=1, stderr=None)
-test.run_gyp('elseif_bad3.gyp', status=1, stderr=None)
-
-test.pass_test()
diff --git a/tools/gyp/test/conditions/elseif/program.cc b/tools/gyp/test/conditions/elseif/program.cc
deleted file mode 100644
index 147fe2f..0000000
--- a/tools/gyp/test/conditions/elseif/program.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-int main() {
-  printf("%s\n", FOO);
-  return 0;
-}
diff --git a/tools/gyp/test/configurations/basics/configurations.c b/tools/gyp/test/configurations/basics/configurations.c
deleted file mode 100644
index 39e13c9..0000000
--- a/tools/gyp/test/configurations/basics/configurations.c
+++ /dev/null
@@ -1,15 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-#ifdef FOO
-  printf("Foo configuration\n");
-#endif
-#ifdef DEBUG
-  printf("Debug configuration\n");
-#endif
-#ifdef RELEASE
-  printf("Release configuration\n");
-#endif
-  return 0;
-}
diff --git a/tools/gyp/test/configurations/basics/configurations.gyp b/tools/gyp/test/configurations/basics/configurations.gyp
deleted file mode 100644
index 93f1d8d..0000000
--- a/tools/gyp/test/configurations/basics/configurations.gyp
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'executable',
-      'sources': [
-        'configurations.c',
-      ],
-      'configurations': {
-        'Debug': {
-          'defines': [
-            'DEBUG',
-          ],
-        },
-        'Release': {
-          'defines': [
-            'RELEASE',
-          ],
-        },
-        'Foo': {
-          'defines': [
-            'FOO',
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/basics/gyptest-configurations.py b/tools/gyp/test/configurations/basics/gyptest-configurations.py
deleted file mode 100755
index 27cd2e8..0000000
--- a/tools/gyp/test/configurations/basics/gyptest-configurations.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable in three different configurations.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('configurations.gyp')
-
-test.set_configuration('Release')
-test.build('configurations.gyp')
-test.run_built_executable('configurations', stdout="Release configuration\n")
-
-test.set_configuration('Debug')
-test.build('configurations.gyp')
-test.run_built_executable('configurations', stdout="Debug configuration\n")
-
-test.set_configuration('Foo')
-test.build('configurations.gyp')
-test.run_built_executable('configurations', stdout="Foo configuration\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/configurations/inheritance/configurations.c b/tools/gyp/test/configurations/inheritance/configurations.c
deleted file mode 100644
index ebb9f84..0000000
--- a/tools/gyp/test/configurations/inheritance/configurations.c
+++ /dev/null
@@ -1,21 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-#ifdef BASE
-  printf("Base configuration\n");
-#endif
-#ifdef COMMON
-  printf("Common configuration\n");
-#endif
-#ifdef COMMON2
-  printf("Common2 configuration\n");
-#endif
-#ifdef DEBUG
-  printf("Debug configuration\n");
-#endif
-#ifdef RELEASE
-  printf("Release configuration\n");
-#endif
-  return 0;
-}
diff --git a/tools/gyp/test/configurations/inheritance/configurations.gyp b/tools/gyp/test/configurations/inheritance/configurations.gyp
deleted file mode 100644
index 9441376..0000000
--- a/tools/gyp/test/configurations/inheritance/configurations.gyp
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'configurations': {
-      'Base': {
-         'abstract': 1,
-         'defines': ['BASE'],
-      },
-      'Common': {
-         'abstract': 1,
-         'inherit_from': ['Base'],
-         'defines': ['COMMON'],
-      },
-      'Common2': {
-         'abstract': 1,
-         'defines': ['COMMON2'],
-      },
-      'Debug': {
-        'inherit_from': ['Common', 'Common2'],
-        'defines': ['DEBUG'],
-      },
-      'Release': {
-        'inherit_from': ['Common', 'Common2'],
-        'defines': ['RELEASE'],
-      },
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'executable',
-      'sources': [
-        'configurations.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/inheritance/duplicates.gyp b/tools/gyp/test/configurations/inheritance/duplicates.gyp
deleted file mode 100644
index 6930ce3..0000000
--- a/tools/gyp/test/configurations/inheritance/duplicates.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'default_configuration': 'A',
-    'configurations': {
-      'A': {
-        'defines': ['SOMETHING'],
-      },
-      'B': {
-        'inherit_from': ['A'],
-      },
-    },
-    'cflags': ['-g'],
-  },
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'executable',
-      'sources': [
-        'configurations.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/inheritance/duplicates.gypd.golden b/tools/gyp/test/configurations/inheritance/duplicates.gypd.golden
deleted file mode 100644
index 719b708..0000000
--- a/tools/gyp/test/configurations/inheritance/duplicates.gypd.golden
+++ /dev/null
@@ -1,12 +0,0 @@
-{'_DEPTH': '.',
- 'included_files': ['duplicates.gyp'],
- 'targets': [{'configurations': {'A': {'cflags': ['-g'],
-                                       'defines': ['SOMETHING']},
-                                 'B': {'cflags': ['-g'],
-                                       'defines': ['SOMETHING'],
-                                       'inherit_from': ['A']}},
-              'default_configuration': 'A',
-              'sources': ['configurations.c'],
-              'target_name': 'configurations',
-              'toolset': 'target',
-              'type': 'executable'}]}
diff --git a/tools/gyp/test/configurations/inheritance/gyptest-duplicates.py b/tools/gyp/test/configurations/inheritance/gyptest-duplicates.py
deleted file mode 100755
index 46687b4..0000000
--- a/tools/gyp/test/configurations/inheritance/gyptest-duplicates.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that configurations do not duplicate other settings.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(format='gypd')
-
-test.run_gyp('duplicates.gyp')
-
-# Verify the duplicates.gypd against the checked-in expected contents.
-#
-# Normally, we should canonicalize line endings in the expected
-# contents file setting the Subversion svn:eol-style to native,
-# but that would still fail if multiple systems are sharing a single
-# workspace on a network-mounted file system.  Consequently, we
-# massage the Windows line endings ('\r\n') in the output to the
-# checked-in UNIX endings ('\n').
-
-contents = test.read('duplicates.gypd').replace(
-    '\r', '').replace('\\\\', '/')
-expect = test.read('duplicates.gypd.golden').replace('\r', '')
-if not test.match(contents, expect):
-  print "Unexpected contents of `duplicates.gypd'"
-  test.diff(expect, contents, 'duplicates.gypd ')
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/configurations/inheritance/gyptest-inheritance.py b/tools/gyp/test/configurations/inheritance/gyptest-inheritance.py
deleted file mode 100755
index 22c73a3..0000000
--- a/tools/gyp/test/configurations/inheritance/gyptest-inheritance.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable in three different configurations.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('configurations.gyp')
-
-test.set_configuration('Release')
-test.build('configurations.gyp')
-test.run_built_executable('configurations',
-                          stdout=('Base configuration\n'
-                                  'Common configuration\n'
-                                  'Common2 configuration\n'
-                                  'Release configuration\n'))
-
-test.set_configuration('Debug')
-test.build('configurations.gyp')
-test.run_built_executable('configurations',
-                          stdout=('Base configuration\n'
-                                  'Common configuration\n'
-                                  'Common2 configuration\n'
-                                  'Debug configuration\n'))
-
-test.pass_test()
diff --git a/tools/gyp/test/configurations/invalid/actions.gyp b/tools/gyp/test/configurations/invalid/actions.gyp
deleted file mode 100644
index a6e4208..0000000
--- a/tools/gyp/test/configurations/invalid/actions.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'actions': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/all_dependent_settings.gyp b/tools/gyp/test/configurations/invalid/all_dependent_settings.gyp
deleted file mode 100644
index b16a245..0000000
--- a/tools/gyp/test/configurations/invalid/all_dependent_settings.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'all_dependent_settings': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/configurations.gyp b/tools/gyp/test/configurations/invalid/configurations.gyp
deleted file mode 100644
index 2cfc960..0000000
--- a/tools/gyp/test/configurations/invalid/configurations.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'configurations': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/dependencies.gyp b/tools/gyp/test/configurations/invalid/dependencies.gyp
deleted file mode 100644
index 74633f3..0000000
--- a/tools/gyp/test/configurations/invalid/dependencies.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'dependencies': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/direct_dependent_settings.gyp b/tools/gyp/test/configurations/invalid/direct_dependent_settings.gyp
deleted file mode 100644
index 8a0f2e9..0000000
--- a/tools/gyp/test/configurations/invalid/direct_dependent_settings.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'direct_dependent_settings': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/gyptest-configurations.py b/tools/gyp/test/configurations/invalid/gyptest-configurations.py
deleted file mode 100755
index bd844b9..0000000
--- a/tools/gyp/test/configurations/invalid/gyptest-configurations.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable in three different configurations.
-"""
-
-import TestGyp
-
-# Keys that do not belong inside a configuration dictionary.
-invalid_configuration_keys = [
-  'actions',
-  'all_dependent_settings',
-  'configurations',
-  'dependencies',
-  'direct_dependent_settings',
-  'libraries',
-  'link_settings',
-  'sources',
-  'standalone_static_library',
-  'target_name',
-  'type',
-]
-
-test = TestGyp.TestGyp()
-
-for test_key in invalid_configuration_keys:
-  test.run_gyp('%s.gyp' % test_key, status=1, stderr=None)
-  expect = ['%s not allowed in the Debug configuration, found in target '
-            '%s.gyp:configurations#target' % (test_key, test_key)]
-  test.must_contain_all_lines(test.stderr(), expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/configurations/invalid/libraries.gyp b/tools/gyp/test/configurations/invalid/libraries.gyp
deleted file mode 100644
index c4014ed..0000000
--- a/tools/gyp/test/configurations/invalid/libraries.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'libraries': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/link_settings.gyp b/tools/gyp/test/configurations/invalid/link_settings.gyp
deleted file mode 100644
index 2f0e1c4..0000000
--- a/tools/gyp/test/configurations/invalid/link_settings.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'link_settings': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/sources.gyp b/tools/gyp/test/configurations/invalid/sources.gyp
deleted file mode 100644
index b38cca0..0000000
--- a/tools/gyp/test/configurations/invalid/sources.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'sources': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/standalone_static_library.gyp b/tools/gyp/test/configurations/invalid/standalone_static_library.gyp
deleted file mode 100644
index 2edb9fe..0000000
--- a/tools/gyp/test/configurations/invalid/standalone_static_library.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'standalone_static_library': 1,
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/target_name.gyp b/tools/gyp/test/configurations/invalid/target_name.gyp
deleted file mode 100644
index 83baad9..0000000
--- a/tools/gyp/test/configurations/invalid/target_name.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'target_name': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/invalid/type.gyp b/tools/gyp/test/configurations/invalid/type.gyp
deleted file mode 100644
index bc55898..0000000
--- a/tools/gyp/test/configurations/invalid/type.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'none',
-      'configurations': {
-        'Debug': {
-          'type': [
-          ],
-        },
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/target_platform/configurations.gyp b/tools/gyp/test/configurations/target_platform/configurations.gyp
deleted file mode 100644
index d15429f..0000000
--- a/tools/gyp/test/configurations/target_platform/configurations.gyp
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'configurations': {
-      'Debug_Win32': {
-        'msvs_configuration_platform': 'Win32',
-      },
-      'Debug_x64': {
-        'msvs_configuration_platform': 'x64',
-      },
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'left',
-      'type': 'static_library',
-      'sources': [
-        'left.c',
-      ],
-      'configurations': {
-        'Debug_Win32': {
-          'msvs_target_platform': 'x64',
-        },
-      },
-    },
-    {
-      'target_name': 'right',
-      'type': 'static_library',
-      'sources': [
-        'right.c',
-      ],
-    },
-    {
-      'target_name': 'front_left',
-      'type': 'executable',
-      'dependencies': ['left'],
-      'sources': [
-        'front.c',
-      ],
-      'configurations': {
-        'Debug_Win32': {
-          'msvs_target_platform': 'x64',
-        },
-      },
-    },
-    {
-      'target_name': 'front_right',
-      'type': 'executable',
-      'dependencies': ['right'],
-      'sources': [
-        'front.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/target_platform/front.c b/tools/gyp/test/configurations/target_platform/front.c
deleted file mode 100644
index 7a91689..0000000
--- a/tools/gyp/test/configurations/target_platform/front.c
+++ /dev/null
@@ -1,8 +0,0 @@
-#include <stdio.h>
-
-const char *message(void);
-
-int main(void) {
-  printf("%s\n", message());
-  return 0;
-}
diff --git a/tools/gyp/test/configurations/target_platform/gyptest-target_platform.py b/tools/gyp/test/configurations/target_platform/gyptest-target_platform.py
deleted file mode 100755
index ae4e9e5..0000000
--- a/tools/gyp/test/configurations/target_platform/gyptest-target_platform.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests the msvs specific msvs_target_platform option.
-"""
-
-import TestGyp
-import TestCommon
-
-
-def RunX64(exe, stdout):
-  try:
-    test.run_built_executable(exe, stdout=stdout)
-  except WindowsError, e:
-    # Assume the exe is 64-bit if it can't load on 32-bit systems.
-    # Both versions of the error are required because different versions
-    # of python seem to return different errors for invalid exe type.
-    if e.errno != 193 and '[Error 193]' not in str(e):
-      raise
-
-
-test = TestGyp.TestGyp(formats=['msvs'])
-
-test.run_gyp('configurations.gyp')
-
-test.set_configuration('Debug|x64')
-test.build('configurations.gyp', rebuild=True)
-RunX64('front_left', stdout=('left\n'))
-RunX64('front_right', stdout=('right\n'))
-
-test.set_configuration('Debug|Win32')
-test.build('configurations.gyp', rebuild=True)
-RunX64('front_left', stdout=('left\n'))
-test.run_built_executable('front_right', stdout=('right\n'))
-
-test.pass_test()
diff --git a/tools/gyp/test/configurations/target_platform/left.c b/tools/gyp/test/configurations/target_platform/left.c
deleted file mode 100644
index 1ce2ea1..0000000
--- a/tools/gyp/test/configurations/target_platform/left.c
+++ /dev/null
@@ -1,3 +0,0 @@
-const char *message(void) {
-  return "left";
-}
diff --git a/tools/gyp/test/configurations/target_platform/right.c b/tools/gyp/test/configurations/target_platform/right.c
deleted file mode 100644
index b157849..0000000
--- a/tools/gyp/test/configurations/target_platform/right.c
+++ /dev/null
@@ -1,3 +0,0 @@
-const char *message(void) {
-  return "right";
-}
diff --git a/tools/gyp/test/configurations/x64/configurations.c b/tools/gyp/test/configurations/x64/configurations.c
deleted file mode 100644
index 3701843..0000000
--- a/tools/gyp/test/configurations/x64/configurations.c
+++ /dev/null
@@ -1,12 +0,0 @@
-#include <stdio.h>
-
-int main(void) {
-  if (sizeof(void*) == 4) {
-    printf("Running Win32\n");
-  } else if (sizeof(void*) == 8) {
-    printf("Running x64\n");
-  } else {
-    printf("Unexpected platform\n");
-  }
-  return 0;
-}
diff --git a/tools/gyp/test/configurations/x64/configurations.gyp b/tools/gyp/test/configurations/x64/configurations.gyp
deleted file mode 100644
index 8b0139f..0000000
--- a/tools/gyp/test/configurations/x64/configurations.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'configurations': {
-      'Debug': {
-        'msvs_configuration_platform': 'Win32',
-      },
-      'Debug_x64': {
-        'inherit_from': ['Debug'],
-        'msvs_configuration_platform': 'x64',
-      },
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'configurations',
-      'type': 'executable',
-      'sources': [
-        'configurations.c',
-      ],
-    },
-    {
-      'target_name': 'configurations64',
-      'type': 'executable',
-      'sources': [
-        'configurations.c',
-      ],
-      'configurations': {
-        'Debug': {
-          'msvs_target_platform': 'x64',
-        },
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/configurations/x64/gyptest-x86.py b/tools/gyp/test/configurations/x64/gyptest-x86.py
deleted file mode 100755
index 8675d8f..0000000
--- a/tools/gyp/test/configurations/x64/gyptest-x86.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable in three different configurations.
-"""
-
-import TestGyp
-
-import sys
-
-formats = ['msvs']
-if sys.platform == 'win32':
-  formats += ['ninja']
-test = TestGyp.TestGyp(formats=formats)
-
-test.run_gyp('configurations.gyp')
-test.set_configuration('Debug|Win32')
-test.build('configurations.gyp', test.ALL)
-
-for machine, suffix in [('14C machine (x86)', ''),
-                        ('8664 machine (x64)', '64')]:
-  output = test.run_dumpbin(
-      '/headers', test.built_file_path('configurations%s.exe' % suffix))
-  if machine not in output:
-    test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/copies/gyptest-all.py b/tools/gyp/test/copies/gyptest-all.py
deleted file mode 100755
index aeccf33..0000000
--- a/tools/gyp/test/copies/gyptest-all.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies file copies using an explicit build target of 'all'.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('copies.gyp',
-             '-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
-             chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('copies.gyp', test.ALL, chdir='relocate/src')
-
-test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
-
-test.built_file_must_match('copies-out/file2',
-                           'file2 contents\n',
-                           chdir='relocate/src')
-
-test.built_file_must_match('copies-out/directory/file3',
-                           'file3 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out/directory/file4',
-                           'file4 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out/directory/subdir/file5',
-                           'file5 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out/subdir/file6',
-                           'file6 contents\n',
-                           chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/copies/gyptest-attribs.py b/tools/gyp/test/copies/gyptest-attribs.py
deleted file mode 100644
index 70d717a..0000000
--- a/tools/gyp/test/copies/gyptest-attribs.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that copying files preserves file attributes.
-"""
-
-import TestGyp
-
-import os
-import stat
-import sys
-
-
-def check_attribs(path, expected_exec_bit):
-  out_path = test.built_file_path(path, chdir='src')
-
-  in_stat = os.stat(os.path.join('src', path))
-  out_stat = os.stat(out_path)
-  if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
-    test.fail_test()
-
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('copies-attribs.gyp', chdir='src')
-
-test.build('copies-attribs.gyp', chdir='src')
-
-if sys.platform != 'win32':
-  out_path = test.built_file_path('executable-file.sh', chdir='src')
-  test.must_contain(out_path,
-                    '#!/bin/bash\n'
-                    '\n'
-                    'echo echo echo echo cho ho o o\n')
-  check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
-
-test.pass_test()
diff --git a/tools/gyp/test/copies/gyptest-default.py b/tools/gyp/test/copies/gyptest-default.py
deleted file mode 100755
index a916869..0000000
--- a/tools/gyp/test/copies/gyptest-default.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies file copies using the build tool default.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('copies.gyp',
-             '-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
-             chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('copies.gyp', chdir='relocate/src')
-
-test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
-
-test.built_file_must_match('copies-out/file2',
-                           'file2 contents\n',
-                           chdir='relocate/src')
-
-test.built_file_must_match('copies-out/directory/file3',
-                           'file3 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out/directory/file4',
-                           'file4 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out/directory/subdir/file5',
-                           'file5 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out/subdir/file6',
-                           'file6 contents\n',
-                           chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/copies/gyptest-samedir.py b/tools/gyp/test/copies/gyptest-samedir.py
deleted file mode 100755
index 923ca61..0000000
--- a/tools/gyp/test/copies/gyptest-samedir.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies file copies where two copies sections in the same target have the
-same destination directory.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-test.run_gyp('copies-samedir.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-test.build('copies-samedir.gyp', 'copies_samedir', chdir='relocate/src')
-
-test.built_file_must_match('copies-out-samedir/file1',
-                           'file1 contents\n',
-                           chdir='relocate/src')
-
-test.built_file_must_match('copies-out-samedir/file2',
-                           'file2 contents\n',
-                           chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/copies/gyptest-slash.py b/tools/gyp/test/copies/gyptest-slash.py
deleted file mode 100755
index f7a2e54..0000000
--- a/tools/gyp/test/copies/gyptest-slash.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies file copies with a trailing slash in the destination directory.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-test.run_gyp('copies-slash.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-test.build('copies-slash.gyp', chdir='relocate/src')
-
-test.built_file_must_match('copies-out-slash/directory/file3',
-                           'file3 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out-slash/directory/file4',
-                           'file4 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out-slash/directory/subdir/file5',
-                           'file5 contents\n',
-                           chdir='relocate/src')
-
-test.built_file_must_match('copies-out-slash-2/directory/file3',
-                           'file3 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out-slash-2/directory/file4',
-                           'file4 contents\n',
-                           chdir='relocate/src')
-test.built_file_must_match('copies-out-slash-2/directory/subdir/file5',
-                           'file5 contents\n',
-                           chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/copies/gyptest-sourceless-shared-lib.py b/tools/gyp/test/copies/gyptest-sourceless-shared-lib.py
deleted file mode 100644
index 6ec2e51..0000000
--- a/tools/gyp/test/copies/gyptest-sourceless-shared-lib.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies copies in sourceless shared_library targets are executed.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-test.run_gyp('copies-sourceless-shared-lib.gyp', chdir='src')
-test.relocate('src', 'relocate/src')
-test.build('copies-sourceless-shared-lib.gyp', chdir='relocate/src')
-test.built_file_must_match('copies-out/file1',
-                           'file1 contents\n',
-                           chdir='relocate/src')
-test.pass_test()
diff --git a/tools/gyp/test/copies/gyptest-updir.py b/tools/gyp/test/copies/gyptest-updir.py
deleted file mode 100755
index a34ae70..0000000
--- a/tools/gyp/test/copies/gyptest-updir.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies file copies where the destination is one level above an expansion that
-yields a make variable.
-"""
-
-import sys
-
-import TestGyp
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-test = TestGyp.TestGyp()
-test.run_gyp('copies-updir.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-test.build('copies-updir.gyp', 'copies_up', chdir='relocate/src')
-
-test.built_file_must_match('../copies-out-updir/file1',
-                           'file1 contents\n',
-                           chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/copies/src/copies-attribs.gyp b/tools/gyp/test/copies/src/copies-attribs.gyp
deleted file mode 100644
index 073e0d0..0000000
--- a/tools/gyp/test/copies/src/copies-attribs.gyp
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'copies1',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)',
-          'files': [
-            'executable-file.sh',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/copies/src/copies-samedir.gyp b/tools/gyp/test/copies/src/copies-samedir.gyp
deleted file mode 100644
index 2919ce5..0000000
--- a/tools/gyp/test/copies/src/copies-samedir.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'copies_samedir',
-      'type': 'none',
-      'dependencies': [
-        'copies_samedir_dependency',
-      ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out-samedir',
-          'files': [
-            'file1',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'copies_samedir_dependency',
-      'type': 'none',
-      'direct_dependent_settings': {
-        'copies': [
-          {
-            'destination': '<(PRODUCT_DIR)/copies-out-samedir',
-            'files': [
-              'file2',
-            ],
-          },
-        ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/copies/src/copies-slash.gyp b/tools/gyp/test/copies/src/copies-slash.gyp
deleted file mode 100644
index 9bf54bd..0000000
--- a/tools/gyp/test/copies/src/copies-slash.gyp
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    # A trailing slash on the destination directory should be ignored.
-    {
-      'target_name': 'copies_recursive_trailing_slash',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out-slash/',
-          'files': [
-            'directory/',
-          ],
-        },
-      ],
-    },
-    # Even if the source directory is below <(PRODUCT_DIR).
-    {
-      'target_name': 'copies_recursive_trailing_slash_in_product_dir',
-      'type': 'none',
-      'dependencies': [ ':copies_recursive_trailing_slash' ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out-slash-2/',
-          'files': [
-            '<(PRODUCT_DIR)/copies-out-slash/directory/',
-          ],
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/copies/src/copies-sourceless-shared-lib.gyp b/tools/gyp/test/copies/src/copies-sourceless-shared-lib.gyp
deleted file mode 100644
index 7908f71..0000000
--- a/tools/gyp/test/copies/src/copies-sourceless-shared-lib.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'mylib',
-      'type': 'static_library',
-      'sources': [ 'foo.c' ],
-    },
-    {
-      'target_name': 'mysolib',
-      'type': 'shared_library',
-      'dependencies': [ 'mylib' ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out',
-          'files': [ 'file1' ],
-        },
-      ],
-      # link.exe gets confused by sourceless shared libraries and needs this
-      # to become unconfused.
-      'msvs_settings': { 'VCLinkerTool': { 'TargetMachine': '1', }, },
-    },
-  ],
-}
diff --git a/tools/gyp/test/copies/src/copies-updir.gyp b/tools/gyp/test/copies/src/copies-updir.gyp
deleted file mode 100644
index bd3bfdd..0000000
--- a/tools/gyp/test/copies/src/copies-updir.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'copies_up',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/../copies-out-updir',
-          'files': [
-            'file1',
-          ],
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/copies/src/copies.gyp b/tools/gyp/test/copies/src/copies.gyp
deleted file mode 100644
index ce2e0ca..0000000
--- a/tools/gyp/test/copies/src/copies.gyp
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'copies1',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': 'copies-out',
-          'files': [
-            'file1',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'copies2',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out',
-          'files': [
-            'file2',
-          ],
-        },
-      ],
-    },
-    # Copy a directory tree.
-    {
-      'target_name': 'copies_recursive',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out',
-          'files': [
-            'directory/',
-          ],
-        },
-      ],
-    },
-    # Copy a directory from deeper in the tree (this should not reproduce the
-    # entire directory path in the destination, only the final directory).
-    {
-      'target_name': 'copies_recursive_depth',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out',
-          'files': [
-            'parentdir/subdir/',
-          ],
-        },
-      ],
-    },
-    # Verify that a null 'files' list doesn't gag the generators.
-    {
-      'target_name': 'copies_null',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-null',
-          'files': [],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/copies/src/directory/file3 b/tools/gyp/test/copies/src/directory/file3
deleted file mode 100644
index 43f16f3..0000000
--- a/tools/gyp/test/copies/src/directory/file3
+++ /dev/null
@@ -1 +0,0 @@
-file3 contents
diff --git a/tools/gyp/test/copies/src/directory/file4 b/tools/gyp/test/copies/src/directory/file4
deleted file mode 100644
index 5f7270a..0000000
--- a/tools/gyp/test/copies/src/directory/file4
+++ /dev/null
@@ -1 +0,0 @@
-file4 contents
diff --git a/tools/gyp/test/copies/src/directory/subdir/file5 b/tools/gyp/test/copies/src/directory/subdir/file5
deleted file mode 100644
index 41f4718..0000000
--- a/tools/gyp/test/copies/src/directory/subdir/file5
+++ /dev/null
@@ -1 +0,0 @@
-file5 contents
diff --git a/tools/gyp/test/copies/src/executable-file.sh b/tools/gyp/test/copies/src/executable-file.sh
deleted file mode 100755
index 796953a..0000000
--- a/tools/gyp/test/copies/src/executable-file.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo echo echo echo cho ho o o
diff --git a/tools/gyp/test/copies/src/file1 b/tools/gyp/test/copies/src/file1
deleted file mode 100644
index 84d55c5..0000000
--- a/tools/gyp/test/copies/src/file1
+++ /dev/null
@@ -1 +0,0 @@
-file1 contents
diff --git a/tools/gyp/test/copies/src/file2 b/tools/gyp/test/copies/src/file2
deleted file mode 100644
index af1b8ae..0000000
--- a/tools/gyp/test/copies/src/file2
+++ /dev/null
@@ -1 +0,0 @@
-file2 contents
diff --git a/tools/gyp/test/copies/src/foo.c b/tools/gyp/test/copies/src/foo.c
deleted file mode 100644
index 99a4c10..0000000
--- a/tools/gyp/test/copies/src/foo.c
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-int f() { return 42; }
-
-#ifdef _MSC_VER
-// link.exe gets confused by sourceless shared libraries and needs this
-// to become unconfused.
-int __stdcall _DllMainCRTStartup(
-    unsigned hInst, unsigned reason, void* reserved) {
-  return 1;
-}
-#endif
diff --git a/tools/gyp/test/copies/src/parentdir/subdir/file6 b/tools/gyp/test/copies/src/parentdir/subdir/file6
deleted file mode 100644
index f5d5757..0000000
--- a/tools/gyp/test/copies/src/parentdir/subdir/file6
+++ /dev/null
@@ -1 +0,0 @@
-file6 contents
diff --git a/tools/gyp/test/custom-generator/gyptest-custom-generator.py b/tools/gyp/test/custom-generator/gyptest-custom-generator.py
deleted file mode 100755
index 85fd072..0000000
--- a/tools/gyp/test/custom-generator/gyptest-custom-generator.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Test that custom generators can be passed to --format
-"""
-
-import TestGyp
-
-test = TestGyp.TestGypCustom(format='mygenerator.py')
-test.run_gyp('test.gyp')
-
-# mygenerator.py should generate a file called MyBuildFile containing
-# "Testing..." alongside the gyp file.
-test.must_match('MyBuildFile', 'Testing...\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/custom-generator/mygenerator.py b/tools/gyp/test/custom-generator/mygenerator.py
deleted file mode 100644
index 8eb4c2d..0000000
--- a/tools/gyp/test/custom-generator/mygenerator.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Custom gyp generator that doesn't do much."""
-
-import gyp.common
-
-generator_default_variables = {}
-
-def GenerateOutput(target_list, target_dicts, data, params):
-  f = open("MyBuildFile", "wb")
-  f.write("Testing...\n")
-  f.close()
diff --git a/tools/gyp/test/custom-generator/test.gyp b/tools/gyp/test/custom-generator/test.gyp
deleted file mode 100644
index aa5f864..0000000
--- a/tools/gyp/test/custom-generator/test.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'exe',
-      'type': 'executable',
-      'sources': [
-        'main.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/cxxflags/cxxflags.cc b/tools/gyp/test/cxxflags/cxxflags.cc
deleted file mode 100644
index e70e39d..0000000
--- a/tools/gyp/test/cxxflags/cxxflags.cc
+++ /dev/null
@@ -1,15 +0,0 @@
-/* Copyright (c) 2010 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-#ifdef ABC
-  printf("With define\n");
-#else
-  printf("No define\n");
-#endif
-  return 0;
-}
diff --git a/tools/gyp/test/cxxflags/cxxflags.gyp b/tools/gyp/test/cxxflags/cxxflags.gyp
deleted file mode 100644
index a082d49..0000000
--- a/tools/gyp/test/cxxflags/cxxflags.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'cxxflags',
-      'type': 'executable',
-      'sources': [
-        'cxxflags.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/cxxflags/gyptest-cxxflags.py b/tools/gyp/test/cxxflags/gyptest-cxxflags.py
deleted file mode 100755
index 117a180..0000000
--- a/tools/gyp/test/cxxflags/gyptest-cxxflags.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies the use of the environment during regeneration when the gyp file
-changes, specifically via build of an executable with C++ flags specified by
-CXXFLAGS.
-
-In this test, gyp happens within a local environment, but build outside of it.
-"""
-
-import TestGyp
-
-FORMATS = ('ninja',)
-
-test = TestGyp.TestGyp(formats=FORMATS)
-
-# We reset the environ after calling gyp. When the auto-regeneration happens,
-# the same define should be reused anyway.
-with TestGyp.LocalEnv({'CXXFLAGS': ''}):
-  test.run_gyp('cxxflags.gyp')
-
-test.build('cxxflags.gyp')
-
-expect = """\
-No define
-"""
-test.run_built_executable('cxxflags', stdout=expect)
-
-test.sleep()
-
-with TestGyp.LocalEnv({'CXXFLAGS': '-DABC'}):
-  test.run_gyp('cxxflags.gyp')
-
-test.build('cxxflags.gyp')
-
-expect = """\
-With define
-"""
-test.run_built_executable('cxxflags', stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/defines-escaping/defines-escaping.c b/tools/gyp/test/defines-escaping/defines-escaping.c
deleted file mode 100644
index a0aa4c2..0000000
--- a/tools/gyp/test/defines-escaping/defines-escaping.c
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright (c) 2010 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf(TEST_FORMAT, TEST_ARGS);
-  return 0;
-}
diff --git a/tools/gyp/test/defines-escaping/defines-escaping.gyp b/tools/gyp/test/defines-escaping/defines-escaping.gyp
deleted file mode 100644
index 6f0f3fd..0000000
--- a/tools/gyp/test/defines-escaping/defines-escaping.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'defines_escaping',
-      'type': 'executable',
-      'sources': [
-        'defines-escaping.c',
-      ],
-      'defines': [
-        'TEST_FORMAT="<(test_format)"',
-        'TEST_ARGS=<(test_args)',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/defines-escaping/gyptest-defines-escaping.py b/tools/gyp/test/defines-escaping/gyptest-defines-escaping.py
deleted file mode 100755
index eb18a3d..0000000
--- a/tools/gyp/test/defines-escaping/gyptest-defines-escaping.py
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable with C++ define specified by a gyp define using
-various special characters such as quotes, commas, etc.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# Tests string literals, percents, and backslash escapes.
-try:
-  os.environ['GYP_DEFINES'] = (
-      r"""test_format='\n%s\n' """
-      r"""test_args='"Simple test of %s with a literal"'""")
-  test.run_gyp('defines-escaping.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.build('defines-escaping.gyp')
-
-expect = """
-Simple test of %s with a literal
-"""
-test.run_built_executable('defines_escaping', stdout=expect)
-
-
-# Test multiple comma-and-space-separated string literals.
-try:
-  os.environ['GYP_DEFINES'] = \
-      r"""test_format='\n%s and %s\n' test_args='"foo", "bar"'"""
-  test.run_gyp('defines-escaping.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines-escaping.c')
-test.build('defines-escaping.gyp')
-
-expect = """
-foo and bar
-"""
-test.run_built_executable('defines_escaping', stdout=expect)
-
-
-# Test string literals containing quotes.
-try:
-  os.environ['GYP_DEFINES'] = (
-      r"""test_format='\n%s %s %s %s %s\n' """
-      r"""test_args='"\"These,\"","""
-                r""" "\"words,\"","""
-                r""" "\"are,\"","""
-                r""" "\"in,\"","""
-                r""" "\"quotes.\""'""")
-  test.run_gyp('defines-escaping.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines-escaping.c')
-test.build('defines-escaping.gyp')
-
-expect = """
-"These," "words," "are," "in," "quotes."
-"""
-test.run_built_executable('defines_escaping', stdout=expect)
-
-
-# Test string literals containing single quotes.
-try:
-  os.environ['GYP_DEFINES'] = (
-      r"""test_format='\n%s %s %s %s %s\n' """
-      r"""test_args="\"'These,'\","""
-                r""" \"'words,'\","""
-                r""" \"'are,'\","""
-                r""" \"'in,'\","""
-                r""" \"'quotes.'\"" """)
-  test.run_gyp('defines-escaping.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines-escaping.c')
-test.build('defines-escaping.gyp')
-
-expect = """
-'These,' 'words,' 'are,' 'in,' 'quotes.'
-"""
-test.run_built_executable('defines_escaping', stdout=expect)
-
-
-# Test string literals containing different numbers of backslashes before quotes
-# (to exercise Windows' quoting behaviour).
-try:
-  os.environ['GYP_DEFINES'] = (
-      r"""test_format='\n%s\n%s\n%s\n' """
-      r"""test_args='"\\\"1 visible slash\\\"","""
-                r""" "\\\\\"2 visible slashes\\\\\"","""
-                r""" "\\\\\\\"3 visible slashes\\\\\\\""'""")
-  test.run_gyp('defines-escaping.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines-escaping.c')
-test.build('defines-escaping.gyp')
-
-expect = r"""
-\"1 visible slash\"
-\\"2 visible slashes\\"
-\\\"3 visible slashes\\\"
-"""
-test.run_built_executable('defines_escaping', stdout=expect)
-
-
-# Test that various scary sequences are passed unfettered.
-try:
-  os.environ['GYP_DEFINES'] = (
-      r"""test_format='\n%s\n' """
-      r"""test_args='"$foo, &quot; `foo`;"'""")
-  test.run_gyp('defines-escaping.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines-escaping.c')
-test.build('defines-escaping.gyp')
-
-expect = """
-$foo, &quot; `foo`;
-"""
-test.run_built_executable('defines_escaping', stdout=expect)
-
-
-# VisualStudio 2010 can't handle passing %PATH%
-if not (test.format == 'msvs' and test.uses_msbuild):
-  try:
-    os.environ['GYP_DEFINES'] = (
-        """test_format='%s' """
-        """test_args='"%PATH%"'""")
-    test.run_gyp('defines-escaping.gyp')
-  finally:
-    del os.environ['GYP_DEFINES']
-
-  test.sleep()
-  test.touch('defines-escaping.c')
-  test.build('defines-escaping.gyp')
-
-  expect = "%PATH%"
-  test.run_built_executable('defines_escaping', stdout=expect)
-
-
-# Test commas and semi-colons preceded by backslashes (to exercise Windows'
-# quoting behaviour).
-try:
-  os.environ['GYP_DEFINES'] = (
-      r"""test_format='\n%s\n%s\n' """
-      r"""test_args='"\\, \\\\;","""
-                # Same thing again, but enclosed in visible quotes.
-                r""" "\"\\, \\\\;\""'""")
-  test.run_gyp('defines-escaping.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines-escaping.c')
-test.build('defines-escaping.gyp')
-
-expect = r"""
-\, \\;
-"\, \\;"
-"""
-test.run_built_executable('defines_escaping', stdout=expect)
-
-# We deliberately do not test having an odd number of quotes in a string
-# literal because that isn't feasible in MSVS.
-
-test.pass_test()
diff --git a/tools/gyp/test/defines/defines-env.gyp b/tools/gyp/test/defines/defines-env.gyp
deleted file mode 100644
index 1781546..0000000
--- a/tools/gyp/test/defines/defines-env.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'value%': '5',
-  },
-  'targets': [
-    {
-      'target_name': 'defines',
-      'type': 'executable',
-      'sources': [
-        'defines.c',
-      ],
-      'defines': [
-        'VALUE=<(value)',
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/defines/defines.c b/tools/gyp/test/defines/defines.c
deleted file mode 100644
index dda1392..0000000
--- a/tools/gyp/test/defines/defines.c
+++ /dev/null
@@ -1,23 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-#ifdef FOO
-  printf("FOO is defined\n");
-#endif
-  printf("VALUE is %d\n", VALUE);
-
-#ifdef PAREN_VALUE
-  printf("2*PAREN_VALUE is %d\n", 2*PAREN_VALUE);
-#endif
-
-#ifdef HASH_VALUE
-  printf("HASH_VALUE is %s\n", HASH_VALUE);
-#endif
-
-  return 0;
-}
diff --git a/tools/gyp/test/defines/defines.gyp b/tools/gyp/test/defines/defines.gyp
deleted file mode 100644
index 90a755e..0000000
--- a/tools/gyp/test/defines/defines.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'defines',
-      'type': 'executable',
-      'sources': [
-        'defines.c',
-      ],
-      'defines': [
-        'FOO',
-        'VALUE=1',
-        'PAREN_VALUE=(1+2+3)',
-        'HASH_VALUE="a#1"',
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="fakeos"', {
-      'targets': [
-        {
-          'target_name': 'fakeosprogram',
-          'type': 'executable',
-          'sources': [
-            'defines.c',
-          ],
-          'defines': [
-            'FOO',
-            'VALUE=1',
-          ],
-        },
-      ],
-    }],
-  ],
-}
diff --git a/tools/gyp/test/defines/gyptest-define-override.py b/tools/gyp/test/defines/gyptest-define-override.py
deleted file mode 100755
index 9730455..0000000
--- a/tools/gyp/test/defines/gyptest-define-override.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a default gyp define can be overridden.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# CMake loudly warns about passing '#' to the compiler and drops the define.
-expect_stderr = ''
-if test.format == 'cmake':
-  expect_stderr = (
-"""WARNING: Preprocessor definitions containing '#' may not be passed on the"""
-""" compiler command line because many compilers do not support it.\n"""
-"""CMake is dropping a preprocessor definition: HASH_VALUE="a#1"\n"""
-"""Consider defining the macro in a (configured) header file.\n\n""")
-
-# Command-line define
-test.run_gyp('defines.gyp', '-D', 'OS=fakeos')
-test.build('defines.gyp', stderr=expect_stderr)
-test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
-# Clean up the exe so subsequent tests don't find an old exe.
-os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE))
-
-# Without "OS" override, fokeosprogram shouldn't be built.
-test.run_gyp('defines.gyp')
-test.build('defines.gyp', stderr=expect_stderr)
-test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE)
-
-# Environment define
-os.environ['GYP_DEFINES'] = 'OS=fakeos'
-test.run_gyp('defines.gyp')
-test.build('defines.gyp', stderr=expect_stderr)
-test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
-
-test.pass_test()
diff --git a/tools/gyp/test/defines/gyptest-defines-env-regyp.py b/tools/gyp/test/defines/gyptest-defines-env-regyp.py
deleted file mode 100755
index f2d931c..0000000
--- a/tools/gyp/test/defines/gyptest-defines-env-regyp.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable with C++ define specified by a gyp define, and
-the use of the environment during regeneration when the gyp file changes.
-"""
-
-import os
-import TestGyp
-
-# Regenerating build files when a gyp file changes is currently only supported
-# by the make generator.
-test = TestGyp.TestGyp(formats=['make'])
-
-try:
-  os.environ['GYP_DEFINES'] = 'value=50'
-  test.run_gyp('defines.gyp')
-finally:
-  # We clear the environ after calling gyp.  When the auto-regeneration happens,
-  # the same define should be reused anyway.  Reset to empty string first in
-  # case the platform doesn't support unsetenv.
-  os.environ['GYP_DEFINES'] = ''
-  del os.environ['GYP_DEFINES']
-
-test.build('defines.gyp')
-
-expect = """\
-FOO is defined
-VALUE is 1
-2*PAREN_VALUE is 12
-HASH_VALUE is a#1
-"""
-test.run_built_executable('defines', stdout=expect)
-
-# Sleep so that the changed gyp file will have a newer timestamp than the
-# previously generated build files.
-test.sleep()
-test.write('defines.gyp', test.read('defines-env.gyp'))
-
-test.build('defines.gyp', test.ALL)
-
-expect = """\
-VALUE is 50
-"""
-test.run_built_executable('defines', stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/defines/gyptest-defines-env.py b/tools/gyp/test/defines/gyptest-defines-env.py
deleted file mode 100755
index 6b4e717..0000000
--- a/tools/gyp/test/defines/gyptest-defines-env.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable with C++ define specified by a gyp define.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# With the value only given in environment, it should be used.
-try:
-  os.environ['GYP_DEFINES'] = 'value=10'
-  test.run_gyp('defines-env.gyp')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.build('defines-env.gyp')
-
-expect = """\
-VALUE is 10
-"""
-test.run_built_executable('defines', stdout=expect)
-
-
-# With the value given in both command line and environment,
-# command line should take precedence.
-try:
-  os.environ['GYP_DEFINES'] = 'value=20'
-  test.run_gyp('defines-env.gyp', '-Dvalue=25')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines.c')
-test.build('defines-env.gyp')
-
-expect = """\
-VALUE is 25
-"""
-test.run_built_executable('defines', stdout=expect)
-
-
-# With the value only given in environment, it should be ignored if
-# --ignore-environment is specified.
-try:
-  os.environ['GYP_DEFINES'] = 'value=30'
-  test.run_gyp('defines-env.gyp', '--ignore-environment')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines.c')
-test.build('defines-env.gyp')
-
-expect = """\
-VALUE is 5
-"""
-test.run_built_executable('defines', stdout=expect)
-
-
-# With the value given in both command line and environment, and
-# --ignore-environment also specified, command line should still be used.
-try:
-  os.environ['GYP_DEFINES'] = 'value=40'
-  test.run_gyp('defines-env.gyp', '--ignore-environment', '-Dvalue=45')
-finally:
-  del os.environ['GYP_DEFINES']
-
-test.sleep()
-test.touch('defines.c')
-test.build('defines-env.gyp')
-
-expect = """\
-VALUE is 45
-"""
-test.run_built_executable('defines', stdout=expect)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/defines/gyptest-defines.py b/tools/gyp/test/defines/gyptest-defines.py
deleted file mode 100755
index 77a3af5..0000000
--- a/tools/gyp/test/defines/gyptest-defines.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of an executable with C++ defines.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('defines.gyp')
-
-expect = """\
-FOO is defined
-VALUE is 1
-2*PAREN_VALUE is 12
-"""
-
-#CMake loudly warns about passing '#' to the compiler and drops the define.
-expect_stderr = ''
-if test.format == 'cmake':
-  expect_stderr = (
-"""WARNING: Preprocessor definitions containing '#' may not be passed on the"""
-""" compiler command line because many compilers do not support it.\n"""
-"""CMake is dropping a preprocessor definition: HASH_VALUE="a#1"\n"""
-"""Consider defining the macro in a (configured) header file.\n\n""")
-else:
-  expect += """HASH_VALUE is a#1
-"""
-
-test.build('defines.gyp', stderr=expect_stderr)
-
-test.run_built_executable('defines', stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/a.c b/tools/gyp/test/dependencies/a.c
deleted file mode 100755
index 3bba111..0000000
--- a/tools/gyp/test/dependencies/a.c
+++ /dev/null
@@ -1,9 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-extern int funcB();
-
-int funcA() {
-  return funcB();
-}
diff --git a/tools/gyp/test/dependencies/adso/all_dependent_settings_order.gyp b/tools/gyp/test/dependencies/adso/all_dependent_settings_order.gyp
deleted file mode 100644
index 89817d6..0000000
--- a/tools/gyp/test/dependencies/adso/all_dependent_settings_order.gyp
+++ /dev/null
@@ -1,45 +0,0 @@
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'none',
-      'sources': ['a.cc'],
-      'all_dependent_settings': {'sources': ['a.cc']},
-    },
-    {
-      'target_name': 'b',
-      'type': 'none',
-      'sources': ['b.cc'],
-      'all_dependent_settings': {'sources': ['b.cc']},
-      'dependencies': ['a'],
-    },
-
-    {
-      'target_name': 'c',
-      'type': 'none',
-      'sources': ['c.cc'],
-      'all_dependent_settings': {'sources': ['c.cc']},
-      'dependencies': ['b', 'a'],
-    },
-    {
-      'target_name': 'd',
-      'type': 'none',
-      'sources': ['d.cc'],
-      'dependencies': ['c', 'a', 'b'],
-      'actions': [
-        {
-          'action_name': 'write_sources',
-          'inputs': ['write_args.py'],
-          'outputs': ['<(PRODUCT_DIR)/out.txt'],
-          'action': [
-            'python',
-            'write_args.py',
-            '<(PRODUCT_DIR)/out.txt',
-            '>@(_sources)'
-          ],
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependencies/adso/write_args.py b/tools/gyp/test/dependencies/adso/write_args.py
deleted file mode 100755
index cc87cf5..0000000
--- a/tools/gyp/test/dependencies/adso/write_args.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'wb')
-f.write(' '.join(sys.argv[2:]))
-f.close()
diff --git a/tools/gyp/test/dependencies/b/b.c b/tools/gyp/test/dependencies/b/b.c
deleted file mode 100755
index b5e771b..0000000
--- a/tools/gyp/test/dependencies/b/b.c
+++ /dev/null
@@ -1,3 +0,0 @@
-int funcB() {
-  return 2;
-}
diff --git a/tools/gyp/test/dependencies/b/b.gyp b/tools/gyp/test/dependencies/b/b.gyp
deleted file mode 100755
index 893dc64..0000000
--- a/tools/gyp/test/dependencies/b/b.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'b',
-      'type': 'static_library',
-      'sources': [
-        'b.c',
-      ],
-    },
-    {
-      'target_name': 'b3',
-      'type': 'static_library',
-      'sources': [
-        'b3.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependencies/b/b3.c b/tools/gyp/test/dependencies/b/b3.c
deleted file mode 100755
index 287f67f..0000000
--- a/tools/gyp/test/dependencies/b/b3.c
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-int funcB() {
-  return 3;
-}
diff --git a/tools/gyp/test/dependencies/c/c.c b/tools/gyp/test/dependencies/c/c.c
deleted file mode 100644
index 4949daf..0000000
--- a/tools/gyp/test/dependencies/c/c.c
+++ /dev/null
@@ -1,4 +0,0 @@
-int funcC() {
-  return 3
-  // Intentional syntax error. This file should never be compiled, so this
-  // shouldn't be a problem.
diff --git a/tools/gyp/test/dependencies/c/c.gyp b/tools/gyp/test/dependencies/c/c.gyp
deleted file mode 100644
index eabebea..0000000
--- a/tools/gyp/test/dependencies/c/c.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'c_unused',
-      'type': 'static_library',
-      'sources': [
-        'c.c',
-      ],
-    },
-    {
-      'target_name': 'd',
-      'type': 'static_library',
-      'sources': [
-        'd.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependencies/c/d.c b/tools/gyp/test/dependencies/c/d.c
deleted file mode 100644
index 05465fc..0000000
--- a/tools/gyp/test/dependencies/c/d.c
+++ /dev/null
@@ -1,3 +0,0 @@
-int funcD() {
-  return 4;
-}
diff --git a/tools/gyp/test/dependencies/double_dependency.gyp b/tools/gyp/test/dependencies/double_dependency.gyp
deleted file mode 100644
index c4a2d00..0000000
--- a/tools/gyp/test/dependencies/double_dependency.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'double_dependency',
-      'type': 'shared_library',
-      'dependencies': [
-        'double_dependent.gyp:double_dependent',
-      ],
-      'conditions': [
-        ['1==1', {
-          'dependencies': [
-            'double_dependent.gyp:*',
-          ],
-        }],
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/dependencies/double_dependent.gyp b/tools/gyp/test/dependencies/double_dependent.gyp
deleted file mode 100644
index 334caff..0000000
--- a/tools/gyp/test/dependencies/double_dependent.gyp
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'double_dependent',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependencies/extra_targets.gyp b/tools/gyp/test/dependencies/extra_targets.gyp
deleted file mode 100644
index c1a26de..0000000
--- a/tools/gyp/test/dependencies/extra_targets.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'static_library',
-      'sources': [
-        'a.c',
-      ],
-      # This only depends on the "d" target; other targets in c.gyp
-      # should not become part of the build (unlike with 'c/c.gyp:*').
-      'dependencies': ['c/c.gyp:d'],
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependencies/gyptest-all-dependent-settings-order.py b/tools/gyp/test/dependencies/gyptest-all-dependent-settings-order.py
deleted file mode 100644
index 715f322..0000000
--- a/tools/gyp/test/dependencies/gyptest-all-dependent-settings-order.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests that all_dependent_settings are processed in topological order.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('all_dependent_settings_order.gyp', chdir='adso')
-test.build('all_dependent_settings_order.gyp', chdir='adso')
-test.built_file_must_match('out.txt', 'd.cc a.cc b.cc c.cc',
-                           chdir='adso')
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/gyptest-double-dependency.py b/tools/gyp/test/dependencies/gyptest-double-dependency.py
deleted file mode 100644
index 7692740..0000000
--- a/tools/gyp/test/dependencies/gyptest-double-dependency.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that pulling in a dependency a second time in a conditional works for
-shared_library targets. Regression test for http://crbug.com/122588
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('double_dependency.gyp')
-
-# If running gyp worked, all is well.
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/gyptest-extra-targets.py b/tools/gyp/test/dependencies/gyptest-extra-targets.py
deleted file mode 100755
index 09b00d9..0000000
--- a/tools/gyp/test/dependencies/gyptest-extra-targets.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that dependencies don't pull unused targets into the build.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('extra_targets.gyp',
-             '-G', 'xcode_ninja_target_pattern=^a$')
-
-# This should fail if it tries to build 'c_unused' since 'c/c.c' has a syntax
-# error and won't compile.
-test.build('extra_targets.gyp', test.ALL)
-
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/gyptest-indirect-module-dependency.py b/tools/gyp/test/dependencies/gyptest-indirect-module-dependency.py
deleted file mode 100644
index d001b57..0000000
--- a/tools/gyp/test/dependencies/gyptest-indirect-module-dependency.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure that we cause downstream modules to get built when we depend on the
-parent targets.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-CHDIR = 'module-dep'
-test.run_gyp('indirect-module-dependency.gyp', chdir=CHDIR)
-test.build('indirect-module-dependency.gyp', 'an_exe', chdir=CHDIR)
-test.built_file_must_exist(
-    test.built_file_basename('a_module', test.LOADABLE_MODULE), chdir=CHDIR)
-
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/gyptest-lib-only.py b/tools/gyp/test/dependencies/gyptest-lib-only.py
deleted file mode 100755
index 3a99a7f4..0000000
--- a/tools/gyp/test/dependencies/gyptest-lib-only.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that a link time only dependency will get pulled into the set of built
-targets, even if no executable uses it.
-"""
-
-import TestGyp
-
-import sys
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('lib_only.gyp')
-
-test.build('lib_only.gyp', test.ALL)
-
-test.built_file_must_exist('a', type=test.STATIC_LIB)
-
-# TODO(bradnelson/mark):
-# On linux and windows a library target will at least pull its link dependencies
-# into the generated project, since not doing so confuses users.
-# This is not currently implemented on mac, which has the opposite behavior.
-if sys.platform == 'darwin':
-  if test.format == 'xcode':
-    test.built_file_must_not_exist('b', type=test.STATIC_LIB)
-  else:
-    assert test.format in ('make', 'ninja', 'xcode-ninja')
-    test.built_file_must_exist('b', type=test.STATIC_LIB)
-else:
-  # Make puts the resulting library in a directory matching the input gyp file;
-  # for the 'b' library, that is in the 'b' subdirectory.
-  test.built_file_must_exist('b', type=test.STATIC_LIB, subdir='b')
-
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/gyptest-none-traversal.py b/tools/gyp/test/dependencies/gyptest-none-traversal.py
deleted file mode 100755
index c09063d..0000000
--- a/tools/gyp/test/dependencies/gyptest-none-traversal.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that static library dependencies don't traverse none targets, unless
-explicitly specified.
-"""
-
-import TestGyp
-
-import sys
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('none_traversal.gyp')
-
-test.build('none_traversal.gyp', test.ALL)
-
-test.run_built_executable('needs_chain', stdout="2\n")
-test.run_built_executable('doesnt_need_chain', stdout="3\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/gyptest-sharedlib-linksettings.py b/tools/gyp/test/dependencies/gyptest-sharedlib-linksettings.py
deleted file mode 100644
index 87428af..0000000
--- a/tools/gyp/test/dependencies/gyptest-sharedlib-linksettings.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that link_settings in a shared_library are not propagated to targets
-that depend on the shared_library, but are used in the shared_library itself.
-"""
-
-import TestGyp
-import sys
-
-CHDIR='sharedlib-linksettings'
-
-test = TestGyp.TestGyp()
-test.run_gyp('test.gyp', chdir=CHDIR)
-test.build('test.gyp', test.ALL, chdir=CHDIR)
-test.run_built_executable('program', stdout="1\n2\n", chdir=CHDIR)
-test.pass_test()
diff --git a/tools/gyp/test/dependencies/lib_only.gyp b/tools/gyp/test/dependencies/lib_only.gyp
deleted file mode 100755
index f6c84de..0000000
--- a/tools/gyp/test/dependencies/lib_only.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'static_library',
-      'sources': [
-        'a.c',
-      ],
-      'dependencies': ['b/b.gyp:b'],
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependencies/main.c b/tools/gyp/test/dependencies/main.c
deleted file mode 100644
index 185bd48..0000000
--- a/tools/gyp/test/dependencies/main.c
+++ /dev/null
@@ -1,14 +0,0 @@
-/*
- * Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-extern int funcA();
-
-int main() {
-  printf("%d\n", funcA());
-  return 0;
-}
diff --git a/tools/gyp/test/dependencies/module-dep/a.cc b/tools/gyp/test/dependencies/module-dep/a.cc
deleted file mode 100644
index 231fc7a..0000000
--- a/tools/gyp/test/dependencies/module-dep/a.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int some_function() {
-  return 0;
-}
diff --git a/tools/gyp/test/dependencies/module-dep/dll.cc b/tools/gyp/test/dependencies/module-dep/dll.cc
deleted file mode 100644
index e1eea02..0000000
--- a/tools/gyp/test/dependencies/module-dep/dll.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#if defined(_MSC_VER)
-__declspec(dllexport)
-#endif
-    void SomeFunction() {
-}
diff --git a/tools/gyp/test/dependencies/module-dep/exe.cc b/tools/gyp/test/dependencies/module-dep/exe.cc
deleted file mode 100644
index b3039ac..0000000
--- a/tools/gyp/test/dependencies/module-dep/exe.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/dependencies/module-dep/indirect-module-dependency.gyp b/tools/gyp/test/dependencies/module-dep/indirect-module-dependency.gyp
deleted file mode 100644
index f3fb532..0000000
--- a/tools/gyp/test/dependencies/module-dep/indirect-module-dependency.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'an_exe',
-      'type': 'executable',
-      'sources': ['exe.cc'],
-      'dependencies': [
-        'a_dll',
-      ],
-    },
-    {
-      'target_name': 'a_dll',
-      'type': 'shared_library',
-      'sources': ['dll.cc'],
-      'dependencies': [
-        'a_lib',
-      ],
-    },
-    {
-      'target_name': 'a_lib',
-      'type': 'static_library',
-      'dependencies': [
-        'a_module',
-      ],
-      'sources': ['a.cc'],
-    },
-    {
-      'target_name': 'a_module',
-      'type': 'loadable_module',
-      'sources': ['a.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/dependencies/none_traversal.gyp b/tools/gyp/test/dependencies/none_traversal.gyp
deleted file mode 100755
index 3d8ab30..0000000
--- a/tools/gyp/test/dependencies/none_traversal.gyp
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'needs_chain',
-      'type': 'executable',
-      'sources': [
-        'a.c',
-        'main.c',
-      ],
-      'dependencies': ['chain'],
-    },
-    {
-      'target_name': 'chain',
-      'type': 'none',
-      'dependencies': ['b/b.gyp:b'],
-    },
-    {
-      'target_name': 'doesnt_need_chain',
-      'type': 'executable',
-      'sources': [
-        'main.c',
-      ],
-      'dependencies': ['no_chain', 'other_chain'],
-    },
-    {
-      'target_name': 'no_chain',
-      'type': 'none',
-      'sources': [
-      ],
-      'dependencies': ['b/b.gyp:b'],
-      'dependencies_traverse': 0,
-    },
-    {
-      'target_name': 'other_chain',
-      'type': 'static_library',
-      'sources': [
-        'a.c',
-      ],
-      'dependencies': ['b/b.gyp:b3'],
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependencies/sharedlib-linksettings/program.c b/tools/gyp/test/dependencies/sharedlib-linksettings/program.c
deleted file mode 100644
index b7c15ed..0000000
--- a/tools/gyp/test/dependencies/sharedlib-linksettings/program.c
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright (c) 2013 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-/*
- * This will fail to compile if TEST_DEFINE was propagated from sharedlib to
- * program.
- */
-#ifdef TEST_DEFINE
-#error TEST_DEFINE is already defined!
-#endif
-
-#define TEST_DEFINE 2
-
-extern int staticLibFunc();
-
-int main() {
-  printf("%d\n", staticLibFunc());
-  printf("%d\n", TEST_DEFINE);
-  return 0;
-}
diff --git a/tools/gyp/test/dependencies/sharedlib-linksettings/sharedlib.c b/tools/gyp/test/dependencies/sharedlib-linksettings/sharedlib.c
deleted file mode 100644
index 3199bcc..0000000
--- a/tools/gyp/test/dependencies/sharedlib-linksettings/sharedlib.c
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright (c) 2013 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-int sharedLibFunc() {
-  /*
-   * This will fail to compile if TEST_DEFINE was not obtained from sharedlib's
-   * link_settings.
-   */
-  return TEST_DEFINE;
-}
diff --git a/tools/gyp/test/dependencies/sharedlib-linksettings/staticlib.c b/tools/gyp/test/dependencies/sharedlib-linksettings/staticlib.c
deleted file mode 100644
index e889b41..0000000
--- a/tools/gyp/test/dependencies/sharedlib-linksettings/staticlib.c
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright (c) 2013 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-/*
- * This will fail to compile if TEST_DEFINE was propagated from sharedlib to
- * staticlib.
- */
-#ifdef TEST_DEFINE
-#error TEST_DEFINE is defined!
-#endif
-
-#ifdef _WIN32
-__declspec(dllimport)
-#else
-extern
-#endif
-int sharedLibFunc();
-
-int staticLibFunc() {
-  return sharedLibFunc();
-}
diff --git a/tools/gyp/test/dependencies/sharedlib-linksettings/test.gyp b/tools/gyp/test/dependencies/sharedlib-linksettings/test.gyp
deleted file mode 100644
index 830ce32..0000000
--- a/tools/gyp/test/dependencies/sharedlib-linksettings/test.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'allow_sharedlib_linksettings_propagation': 0,
-  },
-  'targets': [
-    {
-      'target_name': 'sharedlib',
-      'type': 'shared_library',
-      'sources': [ 'sharedlib.c' ],
-      'link_settings': {
-        'defines': [ 'TEST_DEFINE=1' ],
-      },
-      'conditions': [
-        ['OS=="linux"', {
-          # Support 64-bit shared libs (also works fine for 32-bit).
-          'cflags': ['-fPIC'],
-        }],
-      ],
-    },
-    {
-      'target_name': 'staticlib',
-      'type': 'static_library',
-      'sources': [ 'staticlib.c' ],
-      'dependencies': [ 'sharedlib' ],
-    },
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [ 'program.c' ],
-      'dependencies': [ 'staticlib' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependency-copy/gyptest-copy.py b/tools/gyp/test/dependency-copy/gyptest-copy.py
deleted file mode 100755
index 5ba7c73..0000000
--- a/tools/gyp/test/dependency-copy/gyptest-copy.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies dependencies do the copy step.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('copies.gyp', chdir='src')
-
-test.build('copies.gyp', 'proj2', chdir='src')
-
-test.run_built_executable('proj1',
-                          chdir='src',
-                          stdout="Hello from file1.c\n")
-test.run_built_executable('proj2',
-                          chdir='src',
-                          stdout="Hello from file2.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/dependency-copy/src/copies.gyp b/tools/gyp/test/dependency-copy/src/copies.gyp
deleted file mode 100644
index 4176b18..0000000
--- a/tools/gyp/test/dependency-copy/src/copies.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'proj1',
-      'type': 'executable',
-      'sources': [
-        'file1.c',
-      ],
-    },
-    {
-      'target_name': 'proj2',
-      'type': 'executable',
-      'sources': [
-        'file2.c',
-      ],
-      'dependencies': [
-        'proj1',
-      ]
-    },
-  ],
-}
diff --git a/tools/gyp/test/dependency-copy/src/file1.c b/tools/gyp/test/dependency-copy/src/file1.c
deleted file mode 100644
index d7c3159..0000000
--- a/tools/gyp/test/dependency-copy/src/file1.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from file1.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/dependency-copy/src/file2.c b/tools/gyp/test/dependency-copy/src/file2.c
deleted file mode 100644
index cf40f57..0000000
--- a/tools/gyp/test/dependency-copy/src/file2.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from file2.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/determinism/determinism.gyp b/tools/gyp/test/determinism/determinism.gyp
deleted file mode 100644
index 8134674..0000000
--- a/tools/gyp/test/determinism/determinism.gyp
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'determinism',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'depfile_action',
-          'inputs': [
-            'input.txt',
-          ],
-          'outputs': [
-            'output.txt',
-          ],
-          'depfile': 'depfile.d',
-          'action': [ ]
-        },
-      ],
-    },
-    {
-      'target_name': 'determinism2',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'depfile_action',
-          'inputs': [
-            'input.txt',
-          ],
-          'outputs': [
-            'output.txt',
-          ],
-          'depfile': 'depfile.d',
-          'action': [ ]
-        },
-      ],
-    },
-    {
-      'target_name': 'determinism3',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'depfile_action',
-          'inputs': [
-            'input.txt',
-          ],
-          'outputs': [
-            'output.txt',
-          ],
-          'depfile': 'depfile.d',
-          'action': [ ]
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/determinism/empty-targets.gyp b/tools/gyp/test/determinism/empty-targets.gyp
deleted file mode 100644
index a4ccdd7..0000000
--- a/tools/gyp/test/determinism/empty-targets.gyp
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'empty_target1',
-      'type': 'none',
-    },
-    {
-      'target_name': 'empty_target2',
-      'type': 'none',
-    },
-    {
-      'target_name': 'empty_target3',
-      'type': 'none',
-    },
-    {
-      'target_name': 'empty_target4',
-      'type': 'none',
-    },
-    {
-      'target_name': 'empty_target5',
-      'type': 'none',
-    },
-    {
-      'target_name': 'empty_target6',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/determinism/gyptest-determinism.py b/tools/gyp/test/determinism/gyptest-determinism.py
deleted file mode 100644
index 670cb4b..0000000
--- a/tools/gyp/test/determinism/gyptest-determinism.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies builds are the same even with different PYTHONHASHSEEDs.
-Tests target_short_names and FlattenToList.
-"""
-
-import os
-import sys
-import TestGyp
-
-test = TestGyp.TestGyp()
-if test.format == 'ninja':
-  os.environ["PYTHONHASHSEED"] = "1"
-  test.run_gyp('determinism.gyp')
-  base = open(test.built_file_path('build.ninja')).read()
-
-  for i in range(1,5):
-    os.environ["PYTHONHASHSEED"] = str(i)
-    test.run_gyp('determinism.gyp')
-    contents = open(test.built_file_path('build.ninja')).read()
-    if base != contents:
-      test.fail_test()
-
-  del os.environ["PYTHONHASHSEED"]
-  test.pass_test()
diff --git a/tools/gyp/test/determinism/gyptest-empty-target-names.py b/tools/gyp/test/determinism/gyptest-empty-target-names.py
deleted file mode 100644
index cf49f50..0000000
--- a/tools/gyp/test/determinism/gyptest-empty-target-names.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies builds are the same even with different PYTHONHASHSEEDs.
-Tests both solibs and implicit_deps.
-"""
-
-import os
-import sys
-import TestGyp
-
-test = TestGyp.TestGyp()
-if test.format == 'ninja':
-  os.environ["PYTHONHASHSEED"] = "1"
-  test.run_gyp('empty-targets.gyp')
-  base = open(test.built_file_path('build.ninja')).read()
-
-  for i in range(1,5):
-    os.environ["PYTHONHASHSEED"] = str(i)
-    test.run_gyp('empty-targets.gyp')
-    contents = open(test.built_file_path('build.ninja')).read()
-    if base != contents:
-      test.fail_test()
-
-  del os.environ["PYTHONHASHSEED"]
-  test.pass_test()
diff --git a/tools/gyp/test/determinism/gyptest-needed-variables.py b/tools/gyp/test/determinism/gyptest-needed-variables.py
deleted file mode 100644
index 7b97cca..0000000
--- a/tools/gyp/test/determinism/gyptest-needed-variables.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies builds are the same even with different PYTHONHASHSEEDs.
-Tests needed_variables.
-"""
-
-import os
-import sys
-import TestGyp
-
-test = TestGyp.TestGyp()
-if test.format == 'ninja':
-  os.environ["PYTHONHASHSEED"] = "1"
-  test.run_gyp('needed-variables.gyp')
-  base = open(test.built_file_path('test.ninja', subdir='obj')).read()
-
-  for i in range(1,5):
-    os.environ["PYTHONHASHSEED"] = str(i)
-    test.run_gyp('needed-variables.gyp')
-    contents = open(test.built_file_path('test.ninja', subdir='obj')).read()
-    if base != contents:
-      test.fail_test()
-
-  del os.environ["PYTHONHASHSEED"]
-  test.pass_test()
diff --git a/tools/gyp/test/determinism/gyptest-solibs.py b/tools/gyp/test/determinism/gyptest-solibs.py
deleted file mode 100644
index de9588d..0000000
--- a/tools/gyp/test/determinism/gyptest-solibs.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies builds are the same even with different PYTHONHASHSEEDs.
-Tests all_targets, implicit_deps and solibs.
-"""
-
-import os
-import sys
-import TestGyp
-
-test = TestGyp.TestGyp()
-if test.format == 'ninja':
-  os.environ["PYTHONHASHSEED"] = "1"
-  test.run_gyp('solibs.gyp')
-  base1 = open(test.built_file_path('c.ninja', subdir='obj')).read()
-  base2 = open(test.built_file_path('build.ninja')).read()
-
-  for i in range(1,5):
-    os.environ["PYTHONHASHSEED"] = str(i)
-    test.run_gyp('solibs.gyp')
-    contents1 = open(test.built_file_path('c.ninja', subdir='obj')).read()
-    contents2 = open(test.built_file_path('build.ninja')).read()
-    if base1 != contents1:
-      test.fail_test()
-    if base2 != contents2:
-      print base2
-      test.fail_test()
-
-  del os.environ["PYTHONHASHSEED"]
-  test.pass_test()
diff --git a/tools/gyp/test/determinism/main.cc b/tools/gyp/test/determinism/main.cc
deleted file mode 100644
index 2cd74d3..0000000
--- a/tools/gyp/test/determinism/main.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-extern int foo();
-
-int main() {
-  return foo();
-}
diff --git a/tools/gyp/test/determinism/needed-variables.gyp b/tools/gyp/test/determinism/needed-variables.gyp
deleted file mode 100644
index 022165b..0000000
--- a/tools/gyp/test/determinism/needed-variables.gyp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'executable',
-      'sources': ['rule.ext'],
-      'rules': [{
-        'rule_name': 'rule',
-        'extension': 'ext',
-        'inputs': [ 'rule.py', ],
-        'action': [
-          'python',
-          'rule.py',
-          '<(RULE_INPUT_ROOT)',
-          '<(RULE_INPUT_EXT)',
-          '<(RULE_INPUT_DIRNAME)',
-          '<(RULE_INPUT_NAME)',
-          '<(RULE_INPUT_PATH)',
-        ],
-        'outputs': [ 'hello_world.txt' ],
-        'sources': ['rule.ext'],
-        'message': 'Processing <(RULE_INPUT_PATH)',
-        'process_outputs_as_sources': 1,
-        # Allows the test to run without hermetic cygwin on windows.
-        'msvs_cygwin_shell': 0,
-      }],
-    },
-  ],
-}
diff --git a/tools/gyp/test/determinism/rule.py b/tools/gyp/test/determinism/rule.py
deleted file mode 100644
index 310a981..0000000
--- a/tools/gyp/test/determinism/rule.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-
-print 'Hello World'
diff --git a/tools/gyp/test/determinism/solib.cc b/tools/gyp/test/determinism/solib.cc
deleted file mode 100644
index 0856cd4..0000000
--- a/tools/gyp/test/determinism/solib.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-#ifdef _MSC_VER
-__declspec(dllexport)
-#else
-__attribute__((visibility("default")))
-#endif
-int foo() {
-  return 42;
-}
diff --git a/tools/gyp/test/determinism/solibs.gyp b/tools/gyp/test/determinism/solibs.gyp
deleted file mode 100644
index 9ae3246..0000000
--- a/tools/gyp/test/determinism/solibs.gyp
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This test both tests solibs and implicit_deps.
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'shared_library',
-      'sources': [ 'solib.cc' ],
-    },
-    {
-      'target_name': 'b',
-      'type': 'shared_library',
-      'sources': [ 'solib.cc' ],
-    },
-    {
-      'target_name': 'c',
-      'type': 'executable',
-      'sources': [ 'main.cc' ],
-      'dependencies': [ 'a', 'b' ],
-    },
-  ],
-  'conditions': [
-    ['OS=="linux"', {
-      'target_defaults': {
-        'cflags': ['-fPIC'],
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/empty-target/empty-target.gyp b/tools/gyp/test/empty-target/empty-target.gyp
deleted file mode 100644
index feefa28..0000000
--- a/tools/gyp/test/empty-target/empty-target.gyp
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'empty_target',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/empty-target/gyptest-empty-target.py b/tools/gyp/test/empty-target/gyptest-empty-target.py
deleted file mode 100644
index ecadd4a..0000000
--- a/tools/gyp/test/empty-target/gyptest-empty-target.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a target with nothing succeeds.
-"""
-
-import os
-import sys
-import TestGyp
-
-test = TestGyp.TestGyp()
-test.run_gyp('empty-target.gyp')
-test.build('empty-target.gyp', target='empty_target')
-test.pass_test()
diff --git a/tools/gyp/test/errors/dependency_cycle.gyp b/tools/gyp/test/errors/dependency_cycle.gyp
deleted file mode 100644
index eef44bc..0000000
--- a/tools/gyp/test/errors/dependency_cycle.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'target0',
-      'type': 'none',
-      'dependencies': [ 'target1' ],
-    },
-    {
-      'target_name': 'target1',
-      'type': 'none',
-      'dependencies': [ 'target2' ],
-    },
-    {
-      'target_name': 'target2',
-      'type': 'none',
-      'dependencies': [ 'target0' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/errors/duplicate_basenames.gyp b/tools/gyp/test/errors/duplicate_basenames.gyp
deleted file mode 100644
index b3dceb3..0000000
--- a/tools/gyp/test/errors/duplicate_basenames.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'static_library',
-      'sources': ['foo.c', 'foo.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/errors/duplicate_node.gyp b/tools/gyp/test/errors/duplicate_node.gyp
deleted file mode 100644
index d609609..0000000
--- a/tools/gyp/test/errors/duplicate_node.gyp
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    { 'target_name' : 'foo', 'type': 'executable' },
-  ],
-  'targets': [
-    { 'target_name' : 'bar', 'type': 'executable' },
-  ]
-}
diff --git a/tools/gyp/test/errors/duplicate_rule.gyp b/tools/gyp/test/errors/duplicate_rule.gyp
deleted file mode 100644
index dab98e9..0000000
--- a/tools/gyp/test/errors/duplicate_rule.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'executable',
-      'rules': [
-        {
-          'rule_name': 'bar',
-          'extension': '',
-        },
-        {
-          'rule_name': 'bar',
-          'extension': '',
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/errors/duplicate_targets.gyp b/tools/gyp/test/errors/duplicate_targets.gyp
deleted file mode 100644
index aec470e..0000000
--- a/tools/gyp/test/errors/duplicate_targets.gyp
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo'
-    },
-    {
-      'target_name': 'foo'
-    },
-  ]
-}
diff --git a/tools/gyp/test/errors/error_command.gyp b/tools/gyp/test/errors/error_command.gyp
deleted file mode 100644
index 1736fc9..0000000
--- a/tools/gyp/test/errors/error_command.gyp
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': '<!(["python", "-c", "import sys; sys.exit(3)"])',
-    },
-  ]
-}
diff --git a/tools/gyp/test/errors/file_cycle0.gyp b/tools/gyp/test/errors/file_cycle0.gyp
deleted file mode 100644
index 3bfafb6..0000000
--- a/tools/gyp/test/errors/file_cycle0.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'top',
-      'type': 'none',
-      'dependencies': [ 'file_cycle1.gyp:middle' ],
-    },
-    {
-      'target_name': 'bottom',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/errors/file_cycle1.gyp b/tools/gyp/test/errors/file_cycle1.gyp
deleted file mode 100644
index fbd7a0d..0000000
--- a/tools/gyp/test/errors/file_cycle1.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'middle',
-      'type': 'none',
-      'dependencies': [ 'file_cycle0.gyp:bottom' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/errors/gyptest-errors.py b/tools/gyp/test/errors/gyptest-errors.py
deleted file mode 100755
index 0296f80..0000000
--- a/tools/gyp/test/errors/gyptest-errors.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test that two targets with the same name generates an error.
-"""
-
-import os
-import sys
-
-import TestGyp
-import TestCmd
-
-# TODO(sbc): Remove the use of match_re below, done because scons
-# error messages were not consistent with other generators.
-# Also remove input.py:generator_wants_absolute_build_file_paths.
-
-test = TestGyp.TestGyp()
-
-stderr = ('gyp: Duplicate target definitions for '
-          '.*duplicate_targets.gyp:foo#target\n')
-test.run_gyp('duplicate_targets.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re)
-
-stderr = ('.*: Unable to find targets in build file .*missing_targets.gyp.*')
-test.run_gyp('missing_targets.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re_dotall)
-
-stderr = ('gyp: rule bar exists in duplicate, target '
-          '.*duplicate_rule.gyp:foo#target\n')
-test.run_gyp('duplicate_rule.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re)
-
-stderr = ("gyp: Key 'targets' repeated at level 1 with key path '' while "
-          "reading .*duplicate_node.gyp.*")
-test.run_gyp('duplicate_node.gyp', '--check', status=1, stderr=stderr,
-             match=TestCmd.match_re_dotall)
-
-stderr = (".*target0.*target1.*target2.*target0.*")
-test.run_gyp('dependency_cycle.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re_dotall)
-
-stderr = (".*file_cycle0.*file_cycle1.*file_cycle0.*")
-test.run_gyp('file_cycle0.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re_dotall)
-
-stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
-test.run_gyp('duplicate_basenames.gyp', status=1, stderr=stderr)
-
-# Check if '--no-duplicate-basename-check' works.
-if ((test.format == 'make' and sys.platform == 'darwin') or
-    (test.format == 'msvs' and
-        int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
-  test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check',
-               status=1, stderr=stderr)
-else:
-  test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check')
-
-stderr = ("gyp: Dependency '.*missing_dep.gyp:missing.gyp#target' not found "
-          "while trying to load target .*missing_dep.gyp:foo#target\n")
-test.run_gyp('missing_dep.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re)
-
-# Make sure invalid <!() command invocations say what command it was and
-# mention the gyp file name. Use a "random" command name to trigger an ENOENT.
-stderr = (".*invalid-command-name-egtyevNif3.*netDurj9.*missing_command.gyp.*")
-test.run_gyp('missing_command.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re_dotall)
-
-# Make sure <!() commands that error out result in a message that mentions
-# the command and gyp file name
-stderr = (".*python.*-c.*import sys.*sys.exit.*3.*error_command.gyp.*")
-test.run_gyp('error_command.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re_dotall)
-
-test.pass_test()
diff --git a/tools/gyp/test/errors/missing_command.gyp b/tools/gyp/test/errors/missing_command.gyp
deleted file mode 100644
index c93d954..0000000
--- a/tools/gyp/test/errors/missing_command.gyp
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': '<!(["invalid-command-name-egtyevNif3", "netDurj9"])',
-    },
-  ]
-}
diff --git a/tools/gyp/test/errors/missing_dep.gyp b/tools/gyp/test/errors/missing_dep.gyp
deleted file mode 100644
index 08746be..0000000
--- a/tools/gyp/test/errors/missing_dep.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'static_library',
-      'dependencies': [
-        'missing.gyp'
-      ]
-    },
-  ]
-}
diff --git a/tools/gyp/test/errors/missing_targets.gyp b/tools/gyp/test/errors/missing_targets.gyp
deleted file mode 100644
index 13d4f92..0000000
--- a/tools/gyp/test/errors/missing_targets.gyp
+++ /dev/null
@@ -1,8 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-  },
-}
diff --git a/tools/gyp/test/escaping/colon/test.gyp b/tools/gyp/test/escaping/colon/test.gyp
deleted file mode 100644
index 715f954..0000000
--- a/tools/gyp/test/escaping/colon/test.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'colon',
-      'type': 'executable',
-      'sources': [
-        'a:b.c',
-      ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/',
-          # MSVS2008 gets confused if the same file is in 'sources' and 'copies'
-          'files': [ 'a:b.c-d', ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/escaping/gyptest-colon.py b/tools/gyp/test/escaping/gyptest-colon.py
deleted file mode 100644
index 61a0e24..0000000
--- a/tools/gyp/test/escaping/gyptest-colon.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests that filenames that contain colons are handled correctly.
-(This is important for absolute paths on Windows.)
-"""
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-import TestGyp
-
-# TODO: Make colons in filenames work with make, if required.
-test = TestGyp.TestGyp(formats=['!make'])
-CHDIR = 'colon'
-
-source_name = 'colon/a:b.c'
-copies_name = 'colon/a:b.c-d'
-if sys.platform == 'win32':
-  # Windows uses : as drive separator and doesn't allow it in regular filenames.
-  # Use abspath() to create a path that contains a colon instead.
-  abs_source = os.path.abspath('colon/file.c')
-  test.write('colon/test.gyp',
-             test.read('colon/test.gyp').replace("'a:b.c'", repr(abs_source)))
-  source_name = abs_source
-
-  abs_copies = os.path.abspath('colon/file.txt')
-  test.write('colon/test.gyp',
-             test.read('colon/test.gyp').replace("'a:b.c-d'", repr(abs_copies)))
-  copies_name = abs_copies
-
-# Create the file dynamically, Windows is unhappy if a file with a colon in
-# its name is checked in.
-test.write(source_name, 'int main() {}')
-test.write(copies_name, 'foo')
-
-test.run_gyp('test.gyp', chdir=CHDIR)
-test.build('test.gyp', test.ALL, chdir=CHDIR)
-test.built_file_must_exist(os.path.basename(copies_name), chdir=CHDIR)
-test.pass_test()
diff --git a/tools/gyp/test/exclusion/exclusion.gyp b/tools/gyp/test/exclusion/exclusion.gyp
deleted file mode 100644
index 1232dab..0000000
--- a/tools/gyp/test/exclusion/exclusion.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-        'bogus.c',
-        'also/not/real.c',
-        'also/not/real2.c',
-      ],
-      'sources!': [
-        'bogus.c',
-        'also/not/real.c',
-        'also/not/real2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/exclusion/gyptest-exclusion.py b/tools/gyp/test/exclusion/gyptest-exclusion.py
deleted file mode 100755
index 1fc32bf..0000000
--- a/tools/gyp/test/exclusion/gyptest-exclusion.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that exclusions (e.g. sources!) are respected.  Excluded sources
-that do not exist should not prevent the build from succeeding.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('exclusion.gyp')
-test.build('exclusion.gyp')
-
-# executables
-test.built_file_must_exist('hello' + test._exe, test.EXECUTABLE, bare=True)
-
-test.pass_test()
diff --git a/tools/gyp/test/exclusion/hello.c b/tools/gyp/test/exclusion/hello.c
deleted file mode 100644
index 6e7dc8e..0000000
--- a/tools/gyp/test/exclusion/hello.c
+++ /dev/null
@@ -1,15 +0,0 @@
-/* Copyright (c) 2010 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int func1(void) {
-  return 42;
-}
-
-int main(void) {
-  printf("Hello, world!\n");
-  printf("%d\n", func1());
-  return 0;
-}
diff --git a/tools/gyp/test/external-cross-compile/gyptest-cross.py b/tools/gyp/test/external-cross-compile/gyptest-cross.py
deleted file mode 100755
index a837ec5..0000000
--- a/tools/gyp/test/external-cross-compile/gyptest-cross.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that actions can be + a source scanner can be used to implement,
-cross-compiles (for Native Client at this point).
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('cross.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('cross.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-From test1.cc
-From test2.c
-From test3.cc
-From test4.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/external-cross-compile/src/bogus1.cc b/tools/gyp/test/external-cross-compile/src/bogus1.cc
deleted file mode 100644
index 1b8d011..0000000
--- a/tools/gyp/test/external-cross-compile/src/bogus1.cc
+++ /dev/null
@@ -1 +0,0 @@
-From bogus1.cc
diff --git a/tools/gyp/test/external-cross-compile/src/bogus2.c b/tools/gyp/test/external-cross-compile/src/bogus2.c
deleted file mode 100644
index cbf4a12..0000000
--- a/tools/gyp/test/external-cross-compile/src/bogus2.c
+++ /dev/null
@@ -1 +0,0 @@
-From bogus2.c
diff --git a/tools/gyp/test/external-cross-compile/src/cross.gyp b/tools/gyp/test/external-cross-compile/src/cross.gyp
deleted file mode 100644
index aeda76b..0000000
--- a/tools/gyp/test/external-cross-compile/src/cross.gyp
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': ['cross_compile.gypi'],
-  'target_defaults': {
-    'variables': {
-      'nix_lame%': 0,
-    },
-    'target_conditions': [
-      ['nix_lame==1', {
-        'sources/': [
-          ['exclude', 'lame'],
-        ],
-      }],
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'dependencies': [
-        'program_inc',
-      ],
-      'include_dirs': [
-        '<(SHARED_INTERMEDIATE_DIR)',
-      ],
-      'sources': [
-        'program.cc',
-      ],
-    },
-    {
-      'target_name': 'program_inc',
-      'type': 'none',
-      'dependencies': ['cross_program'],
-      'actions': [
-        {
-          'action_name': 'program_inc',
-          'inputs': ['<(SHARED_INTERMEDIATE_DIR)/cross_program.fake'],
-          'outputs': ['<(SHARED_INTERMEDIATE_DIR)/cross_program.h'],
-          'action': ['python', 'tochar.py', '<@(_inputs)', '<@(_outputs)'],
-        },
-      ],
-      # Allows the test to run without hermetic cygwin on windows.
-      'msvs_cygwin_shell': 0,
-    },
-    {
-      'target_name': 'cross_program',
-      'type': 'none',
-      'variables': {
-        'cross': 1,
-        'nix_lame': 1,
-      },
-      'dependencies': ['cross_lib'],
-      'sources': [
-        'test1.cc',
-        'test2.c',
-        'very_lame.cc',
-        '<(SHARED_INTERMEDIATE_DIR)/cross_lib.fake',
-      ],
-    },
-    {
-      'target_name': 'cross_lib',
-      'type': 'none',
-      'variables': {
-        'cross': 1,
-        'nix_lame': 1,
-      },
-      'sources': [
-        'test3.cc',
-        'test4.c',
-        'bogus1.cc',
-        'bogus2.c',
-        'sort_of_lame.cc',
-      ],
-      'sources!': [
-        'bogus1.cc',
-        'bogus2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/external-cross-compile/src/cross_compile.gypi b/tools/gyp/test/external-cross-compile/src/cross_compile.gypi
deleted file mode 100644
index 36e6519..0000000
--- a/tools/gyp/test/external-cross-compile/src/cross_compile.gypi
+++ /dev/null
@@ -1,23 +0,0 @@
-{
-  'target_defaults': {
-    'variables': {
-      'cross%': 0,
-    },
-    'target_conditions': [
-      ['cross==1', {
-        'actions': [
-          {
-            'action_name': 'cross compile >(_target_name)',
-            'inputs': ['^@(_sources)'],
-            'outputs': ['<(SHARED_INTERMEDIATE_DIR)/>(_target_name).fake'],
-            'action': [
-              'python', 'fake_cross.py', '>@(_outputs)', '^@(_sources)',
-            ],
-            # Allows the test to run without hermetic cygwin on windows.
-            'msvs_cygwin_shell': 0,
-          },
-        ],
-      }],
-    ],
-  },
-}
diff --git a/tools/gyp/test/external-cross-compile/src/fake_cross.py b/tools/gyp/test/external-cross-compile/src/fake_cross.py
deleted file mode 100644
index 05eacc6..0000000
--- a/tools/gyp/test/external-cross-compile/src/fake_cross.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-fh = open(sys.argv[1], 'w')
-
-filenames = sys.argv[2:]
-
-for filename in filenames:
-  subfile = open(filename)
-  data = subfile.read()
-  subfile.close()
-  fh.write(data)
-
-fh.close()
diff --git a/tools/gyp/test/external-cross-compile/src/program.cc b/tools/gyp/test/external-cross-compile/src/program.cc
deleted file mode 100644
index 5172ae9..0000000
--- a/tools/gyp/test/external-cross-compile/src/program.cc
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-static char data[] = {
-#include "cross_program.h"
-};
-
-int main(void) {
-  fwrite(data, 1, sizeof(data), stdout);
-  return 0;
-}
diff --git a/tools/gyp/test/external-cross-compile/src/test1.cc b/tools/gyp/test/external-cross-compile/src/test1.cc
deleted file mode 100644
index b584c31..0000000
--- a/tools/gyp/test/external-cross-compile/src/test1.cc
+++ /dev/null
@@ -1 +0,0 @@
-From test1.cc
diff --git a/tools/gyp/test/external-cross-compile/src/test2.c b/tools/gyp/test/external-cross-compile/src/test2.c
deleted file mode 100644
index 367ae19..0000000
--- a/tools/gyp/test/external-cross-compile/src/test2.c
+++ /dev/null
@@ -1 +0,0 @@
-From test2.c
diff --git a/tools/gyp/test/external-cross-compile/src/test3.cc b/tools/gyp/test/external-cross-compile/src/test3.cc
deleted file mode 100644
index 9eb6473..0000000
--- a/tools/gyp/test/external-cross-compile/src/test3.cc
+++ /dev/null
@@ -1 +0,0 @@
-From test3.cc
diff --git a/tools/gyp/test/external-cross-compile/src/test4.c b/tools/gyp/test/external-cross-compile/src/test4.c
deleted file mode 100644
index 8ecc33e..0000000
--- a/tools/gyp/test/external-cross-compile/src/test4.c
+++ /dev/null
@@ -1 +0,0 @@
-From test4.c
diff --git a/tools/gyp/test/external-cross-compile/src/tochar.py b/tools/gyp/test/external-cross-compile/src/tochar.py
deleted file mode 100644
index c0780d9..0000000
--- a/tools/gyp/test/external-cross-compile/src/tochar.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-src = open(sys.argv[1])
-dst = open(sys.argv[2], 'w')
-for ch in src.read():
-  dst.write('%d,\n' % ord(ch))
-src.close()
-dst.close()
diff --git a/tools/gyp/test/generator-output/actions/actions.gyp b/tools/gyp/test/generator-output/actions/actions.gyp
deleted file mode 100644
index dded59a..0000000
--- a/tools/gyp/test/generator-output/actions/actions.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'pull_in_all_actions',
-      'type': 'none',
-      'dependencies': [
-        'subdir1/executable.gyp:*',
-        'subdir2/none.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/actions/build/README.txt b/tools/gyp/test/generator-output/actions/build/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/actions/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/actions/subdir1/actions-out/README.txt b/tools/gyp/test/generator-output/actions/subdir1/actions-out/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/actions/subdir1/actions-out/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/actions/subdir1/build/README.txt b/tools/gyp/test/generator-output/actions/subdir1/build/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/actions/subdir1/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/actions/subdir1/executable.gyp b/tools/gyp/test/generator-output/actions/subdir1/executable.gyp
deleted file mode 100644
index 6bdd60a..0000000
--- a/tools/gyp/test/generator-output/actions/subdir1/executable.gyp
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'program.c',
-      ],
-      'actions': [
-        {
-          'action_name': 'make-prog1',
-          'inputs': [
-            'make-prog1.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/prog1.c',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-        {
-          'action_name': 'make-prog2',
-          'inputs': [
-            'make-prog2.py',
-          ],
-          'outputs': [
-            'actions-out/prog2.c',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/actions/subdir1/make-prog1.py b/tools/gyp/test/generator-output/actions/subdir1/make-prog1.py
deleted file mode 100755
index 7ea1d8a..0000000
--- a/tools/gyp/test/generator-output/actions/subdir1/make-prog1.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = r"""
-#include <stdio.h>
-
-void prog1(void)
-{
-  printf("Hello from make-prog1.py\n");
-}
-"""
-
-open(sys.argv[1], 'w').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/generator-output/actions/subdir1/make-prog2.py b/tools/gyp/test/generator-output/actions/subdir1/make-prog2.py
deleted file mode 100755
index 0bfe497..0000000
--- a/tools/gyp/test/generator-output/actions/subdir1/make-prog2.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = r"""
-#include <stdio.h>
-
-void prog2(void)
-{
-  printf("Hello from make-prog2.py\n");
-}
-"""
-
-open(sys.argv[1], 'w').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/generator-output/actions/subdir1/program.c b/tools/gyp/test/generator-output/actions/subdir1/program.c
deleted file mode 100644
index c093153..0000000
--- a/tools/gyp/test/generator-output/actions/subdir1/program.c
+++ /dev/null
@@ -1,12 +0,0 @@
-#include <stdio.h>

-

-extern void prog1(void);

-extern void prog2(void);

-

-int main(void)
-{

-  printf("Hello from program.c\n");

-  prog1();

-  prog2();

-  return 0;

-}

diff --git a/tools/gyp/test/generator-output/actions/subdir2/actions-out/README.txt b/tools/gyp/test/generator-output/actions/subdir2/actions-out/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/actions/subdir2/actions-out/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/actions/subdir2/build/README.txt b/tools/gyp/test/generator-output/actions/subdir2/build/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/actions/subdir2/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/actions/subdir2/make-file.py b/tools/gyp/test/generator-output/actions/subdir2/make-file.py
deleted file mode 100755
index fff0653..0000000
--- a/tools/gyp/test/generator-output/actions/subdir2/make-file.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = "Hello from make-file.py\n"
-
-open(sys.argv[1], 'wb').write(contents)
diff --git a/tools/gyp/test/generator-output/actions/subdir2/none.gyp b/tools/gyp/test/generator-output/actions/subdir2/none.gyp
deleted file mode 100644
index f98f527..0000000
--- a/tools/gyp/test/generator-output/actions/subdir2/none.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'file',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'actions': [
-        {
-          'action_name': 'make-file',
-          'inputs': [
-            'make-file.py',
-          ],
-          'outputs': [
-            'actions-out/file.out',
-            # TODO:  enhance testing infrastructure to test this
-            # without having to hard-code the intermediate dir paths.
-            #'<(INTERMEDIATE_DIR)/file.out',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        }
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/copies/build/README.txt b/tools/gyp/test/generator-output/copies/build/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/copies/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/copies/copies-out/README.txt b/tools/gyp/test/generator-output/copies/copies-out/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/copies/copies-out/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/copies/copies.gyp b/tools/gyp/test/generator-output/copies/copies.gyp
deleted file mode 100644
index 479a3d9..0000000
--- a/tools/gyp/test/generator-output/copies/copies.gyp
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'pull_in_subdir',
-      'type': 'none',
-      'dependencies': [
-        'subdir/subdir.gyp:*',
-      ],
-    },
-    {
-      'target_name': 'copies1',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': 'copies-out',
-          'files': [
-            'file1',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'copies2',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out',
-          'files': [
-            'file2',
-          ],
-        },
-      ],
-    },
-    # Verify that a null 'files' list doesn't gag the generators.
-    {
-      'target_name': 'copies_null',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-null',
-          'files': [],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/copies/file1 b/tools/gyp/test/generator-output/copies/file1
deleted file mode 100644
index 84d55c5..0000000
--- a/tools/gyp/test/generator-output/copies/file1
+++ /dev/null
@@ -1 +0,0 @@
-file1 contents
diff --git a/tools/gyp/test/generator-output/copies/file2 b/tools/gyp/test/generator-output/copies/file2
deleted file mode 100644
index af1b8ae..0000000
--- a/tools/gyp/test/generator-output/copies/file2
+++ /dev/null
@@ -1 +0,0 @@
-file2 contents
diff --git a/tools/gyp/test/generator-output/copies/subdir/build/README.txt b/tools/gyp/test/generator-output/copies/subdir/build/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/copies/subdir/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/copies/subdir/copies-out/README.txt b/tools/gyp/test/generator-output/copies/subdir/copies-out/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/copies/subdir/copies-out/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/copies/subdir/file3 b/tools/gyp/test/generator-output/copies/subdir/file3
deleted file mode 100644
index 43f16f3..0000000
--- a/tools/gyp/test/generator-output/copies/subdir/file3
+++ /dev/null
@@ -1 +0,0 @@
-file3 contents
diff --git a/tools/gyp/test/generator-output/copies/subdir/file4 b/tools/gyp/test/generator-output/copies/subdir/file4
deleted file mode 100644
index 5f7270a..0000000
--- a/tools/gyp/test/generator-output/copies/subdir/file4
+++ /dev/null
@@ -1 +0,0 @@
-file4 contents
diff --git a/tools/gyp/test/generator-output/copies/subdir/subdir.gyp b/tools/gyp/test/generator-output/copies/subdir/subdir.gyp
deleted file mode 100644
index af031d2..0000000
--- a/tools/gyp/test/generator-output/copies/subdir/subdir.gyp
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'copies3',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': 'copies-out',
-          'files': [
-            'file3',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'copies4',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/copies-out',
-          'files': [
-            'file4',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/gyptest-actions.py b/tools/gyp/test/generator-output/gyptest-actions.py
deleted file mode 100755
index 47121d0..0000000
--- a/tools/gyp/test/generator-output/gyptest-actions.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies --generator-output= behavior when using actions.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# All the generated files should go under 'gypfiles'. The source directory
-# ('actions') should be untouched.
-test.writable(test.workpath('actions'), False)
-test.run_gyp('actions.gyp',
-             '--generator-output=' + test.workpath('gypfiles'),
-             chdir='actions')
-
-test.writable(test.workpath('actions'), True)
-
-test.relocate('actions', 'relocate/actions')
-test.relocate('gypfiles', 'relocate/gypfiles')
-
-test.writable(test.workpath('relocate/actions'), False)
-
-# Some of the action outputs use "pure" relative paths (i.e. without prefixes
-# like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under
-# 'gypfiles', such outputs will still be created relative to the original .gyp
-# sources. Projects probably wouldn't normally do this, since it kind of defeats
-# the purpose of '--generator-output', but it is supported behaviour.
-test.writable(test.workpath('relocate/actions/build'), True)
-test.writable(test.workpath('relocate/actions/subdir1/build'), True)
-test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True)
-test.writable(test.workpath('relocate/actions/subdir2/build'), True)
-test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True)
-
-test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles')
-
-expect = """\
-Hello from program.c
-Hello from make-prog1.py
-Hello from make-prog2.py
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/actions/subdir1'
-else:
-  chdir = 'relocate/gypfiles'
-test.run_built_executable('program', chdir=chdir, stdout=expect)
-
-test.must_match('relocate/actions/subdir2/actions-out/file.out',
-                "Hello from make-file.py\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-copies.py b/tools/gyp/test/generator-output/gyptest-copies.py
deleted file mode 100755
index 262dfc3..0000000
--- a/tools/gyp/test/generator-output/gyptest-copies.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies file copies with --generator-output using an explicit build
-target of 'all'.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.writable(test.workpath('copies'), False)
-
-test.run_gyp('copies.gyp',
-             '--generator-output=' + test.workpath('gypfiles'),
-             '-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
-             chdir='copies')
-
-test.writable(test.workpath('copies'), True)
-
-test.relocate('copies', 'relocate/copies')
-test.relocate('gypfiles', 'relocate/gypfiles')
-
-test.writable(test.workpath('relocate/copies'), False)
-
-test.writable(test.workpath('relocate/copies/build'), True)
-test.writable(test.workpath('relocate/copies/copies-out'), True)
-test.writable(test.workpath('relocate/copies/subdir/build'), True)
-test.writable(test.workpath('relocate/copies/subdir/copies-out'), True)
-
-test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles')
-
-test.must_match(['relocate', 'copies', 'copies-out', 'file1'],
-                "file1 contents\n")
-
-if test.format == 'xcode':
-  chdir = 'relocate/copies/build'
-elif test.format in ['make', 'ninja', 'xcode-ninja', 'cmake']:
-  chdir = 'relocate/gypfiles/out'
-else:
-  chdir = 'relocate/gypfiles'
-test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n")
-
-test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'],
-                "file3 contents\n")
-
-if test.format == 'xcode':
-  chdir = 'relocate/copies/subdir/build'
-elif test.format in ['make', 'ninja', 'xcode-ninja', 'cmake']:
-  chdir = 'relocate/gypfiles/out'
-else:
-  chdir = 'relocate/gypfiles'
-test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-depth.py b/tools/gyp/test/generator-output/gyptest-depth.py
deleted file mode 100755
index ee59a11..0000000
--- a/tools/gyp/test/generator-output/gyptest-depth.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a project hierarchy created when the --generator-output=
-and --depth= options is used to put the build configuration files in a separate
-directory tree.
-"""
-
-import TestGyp
-import os
-
-# This is a regression test for the make generator only.
-test = TestGyp.TestGyp(formats=['make'])
-
-test.writable(test.workpath('src'), False)
-
-toplevel_dir = os.path.basename(test.workpath())
-
-test.run_gyp(os.path.join(toplevel_dir, 'src', 'prog1.gyp'),
-             '-Dset_symroot=1',
-             '--generator-output=gypfiles',
-             depth=toplevel_dir,
-             chdir='..')
-
-test.writable(test.workpath('src/build'), True)
-test.writable(test.workpath('src/subdir2/build'), True)
-test.writable(test.workpath('src/subdir3/build'), True)
-
-test.build('prog1.gyp', test.ALL, chdir='gypfiles')
-
-chdir = 'gypfiles'
-
-expect = """\
-Hello from %s
-Hello from inc.h
-Hello from inc1/include1.h
-Hello from inc2/include2.h
-Hello from inc3/include3.h
-Hello from subdir2/deeper/deeper.h
-"""
-
-if test.format == 'xcode':
-  chdir = 'src'
-test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
-
-if test.format == 'xcode':
-  chdir = 'src/subdir2'
-test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
-
-if test.format == 'xcode':
-  chdir = 'src/subdir3'
-test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
-
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-mac-bundle.py b/tools/gyp/test/generator-output/gyptest-mac-bundle.py
deleted file mode 100644
index 8d19eed..0000000
--- a/tools/gyp/test/generator-output/gyptest-mac-bundle.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies mac bundles work with --generator-output.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=[])
-
-  MAC_BUNDLE_DIR = 'mac-bundle'
-  GYPFILES_DIR = 'gypfiles'
-  test.writable(test.workpath(MAC_BUNDLE_DIR), False)
-  test.run_gyp('test.gyp',
-               '--generator-output=' + test.workpath(GYPFILES_DIR),
-               chdir=MAC_BUNDLE_DIR)
-  test.writable(test.workpath(MAC_BUNDLE_DIR), True)
-
-  test.build('test.gyp', test.ALL, chdir=GYPFILES_DIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-relocate.py b/tools/gyp/test/generator-output/gyptest-relocate.py
deleted file mode 100755
index b867a6c..0000000
--- a/tools/gyp/test/generator-output/gyptest-relocate.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a project hierarchy created with the --generator-output=
-option can be built even when it's relocated to a different path.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.writable(test.workpath('src'), False)
-
-test.run_gyp('prog1.gyp',
-             '-Dset_symroot=1',
-             '--generator-output=' + test.workpath('gypfiles'),
-             chdir='src')
-
-test.writable(test.workpath('src'), True)
-
-test.relocate('src', 'relocate/src')
-test.relocate('gypfiles', 'relocate/gypfiles')
-
-test.writable(test.workpath('relocate/src'), False)
-
-test.writable(test.workpath('relocate/src/build'), True)
-test.writable(test.workpath('relocate/src/subdir2/build'), True)
-test.writable(test.workpath('relocate/src/subdir3/build'), True)
-
-test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
-
-chdir = 'relocate/gypfiles'
-
-expect = """\
-Hello from %s
-Hello from inc.h
-Hello from inc1/include1.h
-Hello from inc2/include2.h
-Hello from inc3/include3.h
-Hello from subdir2/deeper/deeper.h
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src'
-test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir2'
-test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir3'
-test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
-
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-rules.py b/tools/gyp/test/generator-output/gyptest-rules.py
deleted file mode 100755
index a3ff8bd..0000000
--- a/tools/gyp/test/generator-output/gyptest-rules.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies --generator-output= behavior when using rules.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.writable(test.workpath('rules'), False)
-
-test.run_gyp('rules.gyp',
-             '--generator-output=' + test.workpath('gypfiles'),
-             chdir='rules')
-
-test.writable(test.workpath('rules'), True)
-
-test.relocate('rules', 'relocate/rules')
-test.relocate('gypfiles', 'relocate/gypfiles')
-
-test.writable(test.workpath('relocate/rules'), False)
-
-test.writable(test.workpath('relocate/rules/build'), True)
-test.writable(test.workpath('relocate/rules/subdir1/build'), True)
-test.writable(test.workpath('relocate/rules/subdir2/build'), True)
-test.writable(test.workpath('relocate/rules/subdir2/rules-out'), True)
-
-test.build('rules.gyp', test.ALL, chdir='relocate/gypfiles')
-
-expect = """\
-Hello from program.c
-Hello from function1.in1
-Hello from function2.in1
-Hello from define3.in0
-Hello from define4.in0
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/rules/subdir1'
-else:
-  chdir = 'relocate/gypfiles'
-test.run_built_executable('program', chdir=chdir, stdout=expect)
-
-test.must_match('relocate/rules/subdir2/rules-out/file1.out',
-                "Hello from file1.in0\n")
-test.must_match('relocate/rules/subdir2/rules-out/file2.out',
-                "Hello from file2.in0\n")
-test.must_match('relocate/rules/subdir2/rules-out/file3.out',
-                "Hello from file3.in1\n")
-test.must_match('relocate/rules/subdir2/rules-out/file4.out',
-                "Hello from file4.in1\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-subdir2-deep.py b/tools/gyp/test/generator-output/gyptest-subdir2-deep.py
deleted file mode 100755
index ec7862d..0000000
--- a/tools/gyp/test/generator-output/gyptest-subdir2-deep.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a target from a .gyp file a few subdirectories
-deep when the --generator-output= option is used to put the build
-configuration files in a separate directory tree.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.writable(test.workpath('src'), False)
-
-test.writable(test.workpath('src/subdir2/deeper/build'), True)
-
-test.run_gyp('deeper.gyp',
-             '-Dset_symroot=1',
-             '--generator-output=' + test.workpath('gypfiles'),
-             chdir='src/subdir2/deeper')
-
-test.build('deeper.gyp', test.ALL, chdir='gypfiles')
-
-chdir = 'gypfiles'
-
-if test.format == 'xcode':
-  chdir = 'src/subdir2/deeper'
-test.run_built_executable('deeper',
-                          chdir=chdir,
-                          stdout="Hello from deeper.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-symlink.py b/tools/gyp/test/generator-output/gyptest-symlink.py
deleted file mode 100755
index 8b29b34..0000000
--- a/tools/gyp/test/generator-output/gyptest-symlink.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a target when the --generator-output= option is used to put
-the build configuration files in a separate directory tree referenced by a
-symlink.
-"""
-
-import TestGyp
-import os
-
-test = TestGyp.TestGyp()
-if not hasattr(os, 'symlink'):
-  test.skip_test('Missing os.symlink -- skipping test.\n')
-
-test.writable(test.workpath('src'), False)
-
-test.writable(test.workpath('src/subdir2/deeper/build'), True)
-
-test.subdir(test.workpath('build'))
-test.subdir(test.workpath('build/deeper'))
-test.symlink('build/deeper', test.workpath('symlink'))
-
-test.writable(test.workpath('build/deeper'), True)
-test.run_gyp('deeper.gyp',
-             '-Dset_symroot=2',
-             '--generator-output=' + test.workpath('symlink'),
-             chdir='src/subdir2/deeper')
-
-chdir = 'symlink'
-test.build('deeper.gyp', test.ALL, chdir=chdir)
-
-if test.format == 'xcode':
-  chdir = 'src/subdir2/deeper'
-test.run_built_executable('deeper',
-                          chdir=chdir,
-                          stdout="Hello from deeper.c\n")
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/gyptest-top-all.py b/tools/gyp/test/generator-output/gyptest-top-all.py
deleted file mode 100755
index b177677..0000000
--- a/tools/gyp/test/generator-output/gyptest-top-all.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a project hierarchy created when the --generator-output=
-option is used to put the build configuration files in a separate
-directory tree.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.writable(test.workpath('src'), False)
-
-test.run_gyp('prog1.gyp',
-             '-Dset_symroot=1',
-             '--generator-output=' + test.workpath('gypfiles'),
-             chdir='src')
-
-test.writable(test.workpath('src/build'), True)
-test.writable(test.workpath('src/subdir2/build'), True)
-test.writable(test.workpath('src/subdir3/build'), True)
-
-test.build('prog1.gyp', test.ALL, chdir='gypfiles')
-
-chdir = 'gypfiles'
-
-expect = """\
-Hello from %s
-Hello from inc.h
-Hello from inc1/include1.h
-Hello from inc2/include2.h
-Hello from inc3/include3.h
-Hello from subdir2/deeper/deeper.h
-"""
-
-if test.format == 'xcode':
-  chdir = 'src'
-test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
-
-if test.format == 'xcode':
-  chdir = 'src/subdir2'
-test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
-
-if test.format == 'xcode':
-  chdir = 'src/subdir3'
-test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
-
-test.pass_test()
diff --git a/tools/gyp/test/generator-output/mac-bundle/Info.plist b/tools/gyp/test/generator-output/mac-bundle/Info.plist
deleted file mode 100644
index 8cb142e..0000000
--- a/tools/gyp/test/generator-output/mac-bundle/Info.plist
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>ause</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSMinimumSystemVersion</key>
-	<string>${MACOSX_DEPLOYMENT_TARGET}</string>
-	<key>NSMainNibFile</key>
-	<string>MainMenu</string>
-	<key>NSPrincipalClass</key>
-	<string>NSApplication</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/generator-output/mac-bundle/app.order b/tools/gyp/test/generator-output/mac-bundle/app.order
deleted file mode 100644
index 4eb9e89..0000000
--- a/tools/gyp/test/generator-output/mac-bundle/app.order
+++ /dev/null
@@ -1 +0,0 @@
-_main
diff --git a/tools/gyp/test/generator-output/mac-bundle/header.h b/tools/gyp/test/generator-output/mac-bundle/header.h
deleted file mode 100644
index 7ed7775..0000000
--- a/tools/gyp/test/generator-output/mac-bundle/header.h
+++ /dev/null
@@ -1 +0,0 @@
-int f();
diff --git a/tools/gyp/test/generator-output/mac-bundle/main.c b/tools/gyp/test/generator-output/mac-bundle/main.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/generator-output/mac-bundle/main.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/generator-output/mac-bundle/resource.sb b/tools/gyp/test/generator-output/mac-bundle/resource.sb
deleted file mode 100644
index 731befc..0000000
--- a/tools/gyp/test/generator-output/mac-bundle/resource.sb
+++ /dev/null
@@ -1 +0,0 @@
-A text file.
diff --git a/tools/gyp/test/generator-output/mac-bundle/test.gyp b/tools/gyp/test/generator-output/mac-bundle/test.gyp
deleted file mode 100644
index 35ac674..0000000
--- a/tools/gyp/test/generator-output/mac-bundle/test.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App Gyp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'main.c',
-      ],
-      'mac_bundle_resources': [
-        'resource.sb',
-      ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'ORDER_FILE': 'app.order',
-        'GCC_PREFIX_HEADER': 'header.h',
-        'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/rules/build/README.txt b/tools/gyp/test/generator-output/rules/build/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/rules/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/rules/copy-file.py b/tools/gyp/test/generator-output/rules/copy-file.py
deleted file mode 100755
index 938c336..0000000
--- a/tools/gyp/test/generator-output/rules/copy-file.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-contents = open(sys.argv[1], 'r').read()
-open(sys.argv[2], 'wb').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/generator-output/rules/rules.gyp b/tools/gyp/test/generator-output/rules/rules.gyp
deleted file mode 100644
index dded59a..0000000
--- a/tools/gyp/test/generator-output/rules/rules.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'pull_in_all_actions',
-      'type': 'none',
-      'dependencies': [
-        'subdir1/executable.gyp:*',
-        'subdir2/none.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/rules/subdir1/build/README.txt b/tools/gyp/test/generator-output/rules/subdir1/build/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/rules/subdir1/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/rules/subdir1/define3.in0 b/tools/gyp/test/generator-output/rules/subdir1/define3.in0
deleted file mode 100644
index cc29c64..0000000
--- a/tools/gyp/test/generator-output/rules/subdir1/define3.in0
+++ /dev/null
@@ -1 +0,0 @@
-#define STRING3 "Hello from define3.in0\n"
diff --git a/tools/gyp/test/generator-output/rules/subdir1/define4.in0 b/tools/gyp/test/generator-output/rules/subdir1/define4.in0
deleted file mode 100644
index c9b0467..0000000
--- a/tools/gyp/test/generator-output/rules/subdir1/define4.in0
+++ /dev/null
@@ -1 +0,0 @@
-#define STRING4 "Hello from define4.in0\n"
diff --git a/tools/gyp/test/generator-output/rules/subdir1/executable.gyp b/tools/gyp/test/generator-output/rules/subdir1/executable.gyp
deleted file mode 100644
index 42bee4d..0000000
--- a/tools/gyp/test/generator-output/rules/subdir1/executable.gyp
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'program.c',
-        'function1.in1',
-        'function2.in1',
-        'define3.in0',
-        'define4.in0',
-      ],
-      'include_dirs': [
-        '<(INTERMEDIATE_DIR)',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file_0',
-          'extension': 'in0',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            # TODO:  fix Make to support generated files not
-            # in a variable-named path like <(INTERMEDIATE_DIR)
-            #'<(RULE_INPUT_ROOT).c',
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 0,
-        },
-        {
-          'rule_name': 'copy_file_1',
-          'extension': 'in1',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            # TODO:  fix Make to support generated files not
-            # in a variable-named path like <(INTERMEDIATE_DIR)
-            #'<(RULE_INPUT_ROOT).c',
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/rules/subdir1/function1.in1 b/tools/gyp/test/generator-output/rules/subdir1/function1.in1
deleted file mode 100644
index 545e7ca..0000000
--- a/tools/gyp/test/generator-output/rules/subdir1/function1.in1
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void function1(void)
-{
-  printf("Hello from function1.in1\n");
-}
diff --git a/tools/gyp/test/generator-output/rules/subdir1/function2.in1 b/tools/gyp/test/generator-output/rules/subdir1/function2.in1
deleted file mode 100644
index 6bad43f..0000000
--- a/tools/gyp/test/generator-output/rules/subdir1/function2.in1
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void function2(void)
-{
-  printf("Hello from function2.in1\n");
-}
diff --git a/tools/gyp/test/generator-output/rules/subdir1/program.c b/tools/gyp/test/generator-output/rules/subdir1/program.c
deleted file mode 100644
index 56b3206..0000000
--- a/tools/gyp/test/generator-output/rules/subdir1/program.c
+++ /dev/null
@@ -1,18 +0,0 @@
-#include <stdio.h>
-#include "define3.h"
-#include "define4.h"
-
-extern void function1(void);
-extern void function2(void);
-extern void function3(void);
-extern void function4(void);
-
-int main(void)
-{
-  printf("Hello from program.c\n");
-  function1();
-  function2();
-  printf("%s", STRING3);
-  printf("%s", STRING4);
-  return 0;
-}
diff --git a/tools/gyp/test/generator-output/rules/subdir2/build/README.txt b/tools/gyp/test/generator-output/rules/subdir2/build/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/rules/subdir2/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/rules/subdir2/file1.in0 b/tools/gyp/test/generator-output/rules/subdir2/file1.in0
deleted file mode 100644
index 7aca64f..0000000
--- a/tools/gyp/test/generator-output/rules/subdir2/file1.in0
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file1.in0
diff --git a/tools/gyp/test/generator-output/rules/subdir2/file2.in0 b/tools/gyp/test/generator-output/rules/subdir2/file2.in0
deleted file mode 100644
index 80a281a..0000000
--- a/tools/gyp/test/generator-output/rules/subdir2/file2.in0
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file2.in0
diff --git a/tools/gyp/test/generator-output/rules/subdir2/file3.in1 b/tools/gyp/test/generator-output/rules/subdir2/file3.in1
deleted file mode 100644
index 60ae2e7..0000000
--- a/tools/gyp/test/generator-output/rules/subdir2/file3.in1
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file3.in1
diff --git a/tools/gyp/test/generator-output/rules/subdir2/file4.in1 b/tools/gyp/test/generator-output/rules/subdir2/file4.in1
deleted file mode 100644
index 5a3c307..0000000
--- a/tools/gyp/test/generator-output/rules/subdir2/file4.in1
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file4.in1
diff --git a/tools/gyp/test/generator-output/rules/subdir2/none.gyp b/tools/gyp/test/generator-output/rules/subdir2/none.gyp
deleted file mode 100644
index 664cbd9..0000000
--- a/tools/gyp/test/generator-output/rules/subdir2/none.gyp
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'files',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'file1.in0',
-        'file2.in0',
-        'file3.in1',
-        'file4.in1',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file_0',
-          'extension': 'in0',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            'rules-out/<(RULE_INPUT_ROOT).out',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 0,
-        },
-        {
-          'rule_name': 'copy_file_1',
-          'extension': 'in1',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            'rules-out/<(RULE_INPUT_ROOT).out',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/rules/subdir2/rules-out/README.txt b/tools/gyp/test/generator-output/rules/subdir2/rules-out/README.txt
deleted file mode 100644
index 1b052c9..0000000
--- a/tools/gyp/test/generator-output/rules/subdir2/rules-out/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the

-test script can verify that .xcodeproj files are not created in

-their normal location by making the src/ read-only, and then

-selectively making this build directory writable.

diff --git a/tools/gyp/test/generator-output/src/build/README.txt b/tools/gyp/test/generator-output/src/build/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/src/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/src/inc.h b/tools/gyp/test/generator-output/src/inc.h
deleted file mode 100644
index 57aa1a5..0000000
--- a/tools/gyp/test/generator-output/src/inc.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INC_STRING      "inc.h"
diff --git a/tools/gyp/test/generator-output/src/inc1/include1.h b/tools/gyp/test/generator-output/src/inc1/include1.h
deleted file mode 100644
index 1d59065..0000000
--- a/tools/gyp/test/generator-output/src/inc1/include1.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INCLUDE1_STRING "inc1/include1.h"
diff --git a/tools/gyp/test/generator-output/src/prog1.c b/tools/gyp/test/generator-output/src/prog1.c
deleted file mode 100644
index bf7c2a1..0000000
--- a/tools/gyp/test/generator-output/src/prog1.c
+++ /dev/null
@@ -1,18 +0,0 @@
-#include <stdio.h>
-
-#include "inc.h"
-#include "include1.h"
-#include "include2.h"
-#include "include3.h"
-#include "deeper.h"
-
-int main(void)
-{
-  printf("Hello from prog1.c\n");
-  printf("Hello from %s\n", INC_STRING);
-  printf("Hello from %s\n", INCLUDE1_STRING);
-  printf("Hello from %s\n", INCLUDE2_STRING);
-  printf("Hello from %s\n", INCLUDE3_STRING);
-  printf("Hello from %s\n", DEEPER_STRING);
-  return 0;
-}
diff --git a/tools/gyp/test/generator-output/src/prog1.gyp b/tools/gyp/test/generator-output/src/prog1.gyp
deleted file mode 100644
index d50e6fb..0000000
--- a/tools/gyp/test/generator-output/src/prog1.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    'symroot.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog1',
-      'type': 'executable',
-      'dependencies': [
-        'subdir2/prog2.gyp:prog2',
-      ],
-      'include_dirs': [
-        '.',
-        'inc1',
-        'subdir2/inc2',
-        'subdir3/inc3',
-        'subdir2/deeper',
-      ],
-      'sources': [
-        'prog1.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/src/subdir2/build/README.txt b/tools/gyp/test/generator-output/src/subdir2/build/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/src/subdir2/deeper/build/README.txt b/tools/gyp/test/generator-output/src/subdir2/deeper/build/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/deeper/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.c b/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.c
deleted file mode 100644
index 843505c..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from deeper.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp b/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp
deleted file mode 100644
index 8648770..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../../symroot.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'deeper',
-      'type': 'executable',
-      'sources': [
-        'deeper.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.h b/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.h
deleted file mode 100644
index f6484a0..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/deeper/deeper.h
+++ /dev/null
@@ -1 +0,0 @@
-#define DEEPER_STRING   "subdir2/deeper/deeper.h"
diff --git a/tools/gyp/test/generator-output/src/subdir2/inc2/include2.h b/tools/gyp/test/generator-output/src/subdir2/inc2/include2.h
deleted file mode 100644
index 1ccfa5d..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/inc2/include2.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INCLUDE2_STRING "inc2/include2.h"
diff --git a/tools/gyp/test/generator-output/src/subdir2/prog2.c b/tools/gyp/test/generator-output/src/subdir2/prog2.c
deleted file mode 100644
index d80d871..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/prog2.c
+++ /dev/null
@@ -1,18 +0,0 @@
-#include <stdio.h>
-
-#include "inc.h"
-#include "include1.h"
-#include "include2.h"
-#include "include3.h"
-#include "deeper.h"
-
-int main(void)
-{
-  printf("Hello from prog2.c\n");
-  printf("Hello from %s\n", INC_STRING);
-  printf("Hello from %s\n", INCLUDE1_STRING);
-  printf("Hello from %s\n", INCLUDE2_STRING);
-  printf("Hello from %s\n", INCLUDE3_STRING);
-  printf("Hello from %s\n", DEEPER_STRING);
-  return 0;
-}
diff --git a/tools/gyp/test/generator-output/src/subdir2/prog2.gyp b/tools/gyp/test/generator-output/src/subdir2/prog2.gyp
deleted file mode 100644
index 7176ed8..0000000
--- a/tools/gyp/test/generator-output/src/subdir2/prog2.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../symroot.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog2',
-      'type': 'executable',
-      'include_dirs': [
-        '..',
-        '../inc1',
-        'inc2',
-        '../subdir3/inc3',
-        'deeper',
-      ],
-      'dependencies': [
-        '../subdir3/prog3.gyp:prog3',
-      ],
-      'sources': [
-        'prog2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/src/subdir3/build/README.txt b/tools/gyp/test/generator-output/src/subdir3/build/README.txt
deleted file mode 100644
index 90ef886..0000000
--- a/tools/gyp/test/generator-output/src/subdir3/build/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-A place-holder for this Xcode build output directory, so that the
-test script can verify that .xcodeproj files are not created in
-their normal location by making the src/ read-only, and then
-selectively making this build directory writable.
diff --git a/tools/gyp/test/generator-output/src/subdir3/inc3/include3.h b/tools/gyp/test/generator-output/src/subdir3/inc3/include3.h
deleted file mode 100644
index bf53bf1..0000000
--- a/tools/gyp/test/generator-output/src/subdir3/inc3/include3.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INCLUDE3_STRING "inc3/include3.h"
diff --git a/tools/gyp/test/generator-output/src/subdir3/prog3.c b/tools/gyp/test/generator-output/src/subdir3/prog3.c
deleted file mode 100644
index c72233d..0000000
--- a/tools/gyp/test/generator-output/src/subdir3/prog3.c
+++ /dev/null
@@ -1,18 +0,0 @@
-#include <stdio.h>
-
-#include "inc.h"
-#include "include1.h"
-#include "include2.h"
-#include "include3.h"
-#include "deeper.h"
-
-int main(void)
-{
-  printf("Hello from prog3.c\n");
-  printf("Hello from %s\n", INC_STRING);
-  printf("Hello from %s\n", INCLUDE1_STRING);
-  printf("Hello from %s\n", INCLUDE2_STRING);
-  printf("Hello from %s\n", INCLUDE3_STRING);
-  printf("Hello from %s\n", DEEPER_STRING);
-  return 0;
-}
diff --git a/tools/gyp/test/generator-output/src/subdir3/prog3.gyp b/tools/gyp/test/generator-output/src/subdir3/prog3.gyp
deleted file mode 100644
index 46c5e00..0000000
--- a/tools/gyp/test/generator-output/src/subdir3/prog3.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../symroot.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog3',
-      'type': 'executable',
-      'include_dirs': [
-        '..',
-        '../inc1',
-        '../subdir2/inc2',
-        'inc3',
-        '../subdir2/deeper',
-      ],
-      'sources': [
-        'prog3.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/generator-output/src/symroot.gypi b/tools/gyp/test/generator-output/src/symroot.gypi
deleted file mode 100644
index 5199164..0000000
--- a/tools/gyp/test/generator-output/src/symroot.gypi
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'set_symroot%': 0,
-  },
-  'conditions': [
-    ['set_symroot == 1', {
-      'xcode_settings': {
-        'SYMROOT': '<(DEPTH)/build',
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/gyp-defines/defines.gyp b/tools/gyp/test/gyp-defines/defines.gyp
deleted file mode 100644
index f59bbd2..0000000
--- a/tools/gyp/test/gyp-defines/defines.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'test_action',
-          'inputs': [],
-          'outputs': [ 'action.txt' ],
-          'action': [
-            'python',
-            'echo.py',
-            '<(key)',
-            '<(_outputs)',
-          ],
-          'msvs_cygwin_shell': 0,
-        }
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/gyp-defines/echo.py b/tools/gyp/test/gyp-defines/echo.py
deleted file mode 100644
index b85add1..0000000
--- a/tools/gyp/test/gyp-defines/echo.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[2], 'w+')
-f.write(sys.argv[1])
-f.close()
diff --git a/tools/gyp/test/gyp-defines/gyptest-multiple-values.py b/tools/gyp/test/gyp-defines/gyptest-multiple-values.py
deleted file mode 100644
index 67735cc..0000000
--- a/tools/gyp/test/gyp-defines/gyptest-multiple-values.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that when multiple values are supplied for a gyp define, the last one
-is used.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-os.environ['GYP_DEFINES'] = 'key=value1 key=value2 key=value3'
-test.run_gyp('defines.gyp')
-
-test.build('defines.gyp')
-test.must_contain('action.txt', 'value3')
-
-# The last occurrence of a repeated set should take precedence over other
-# values.
-os.environ['GYP_DEFINES'] = 'key=repeated_value key=value1 key=repeated_value'
-test.run_gyp('defines.gyp')
-
-if test.format == 'msvs' and not test.uses_msbuild:
-  # msvs versions before 2010 don't detect build rule changes not reflected
-  # in file system timestamps. Rebuild to see differences.
-  test.build('defines.gyp', rebuild=True)
-else:
-  test.build('defines.gyp')
-test.must_contain('action.txt', 'repeated_value')
-
-test.pass_test()
diff --git a/tools/gyp/test/gyp-defines/gyptest-regyp.py b/tools/gyp/test/gyp-defines/gyptest-regyp.py
deleted file mode 100644
index 0895d81..0000000
--- a/tools/gyp/test/gyp-defines/gyptest-regyp.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that when the same value is repeated for a gyp define, duplicates are
-stripped from the regeneration rule.
-"""
-
-import os
-import TestGyp
-
-# Regenerating build files when a gyp file changes is currently only supported
-# by the make generator.
-test = TestGyp.TestGyp(formats=['make'])
-
-os.environ['GYP_DEFINES'] = 'key=repeated_value key=value1 key=repeated_value'
-test.run_gyp('defines.gyp')
-test.build('defines.gyp')
-
-# The last occurrence of a repeated set should take precedence over other
-# values. See gyptest-multiple-values.py.
-test.must_contain('action.txt', 'repeated_value')
-
-# So the regeneration rule needs to use the correct order.
-test.must_not_contain(
-    'Makefile', '"-Dkey=repeated_value" "-Dkey=value1" "-Dkey=repeated_value"')
-test.must_contain('Makefile', '"-Dkey=value1" "-Dkey=repeated_value"')
-
-# Sleep so that the changed gyp file will have a newer timestamp than the
-# previously generated build files.
-test.sleep()
-os.utime("defines.gyp", None)
-
-test.build('defines.gyp')
-test.must_contain('action.txt', 'repeated_value')
-
-test.pass_test()
diff --git a/tools/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py b/tools/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py
deleted file mode 100755
index ba51528..0000000
--- a/tools/gyp/test/hard_dependency/gyptest-exported-hard-dependency.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that a hard_dependency that is exported is pulled in as a dependency
-for a target if the target is a static library and if the generator will
-remove dependencies between static libraries.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-if test.format == 'dump_dependency_json':
-  test.skip_test('Skipping test; dependency JSON does not adjust ' \
-                 'static libraries.\n')
-
-test.run_gyp('hard_dependency.gyp', chdir='src')
-
-chdir = 'relocate/src'
-test.relocate('src', chdir)
-
-test.build('hard_dependency.gyp', 'c', chdir=chdir)
-
-# The 'a' static library should be built, as it has actions with side-effects
-# that are necessary to compile 'c'. Even though 'c' does not directly depend
-# on 'a', because 'a' is a hard_dependency that 'b' exports, 'c' should import
-# it as a hard_dependency and ensure it is built before building 'c'.
-test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_exist('c', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_not_exist('d', type=test.STATIC_LIB, chdir=chdir)
-
-test.pass_test()
diff --git a/tools/gyp/test/hard_dependency/gyptest-no-exported-hard-dependency.py b/tools/gyp/test/hard_dependency/gyptest-no-exported-hard-dependency.py
deleted file mode 100755
index 10774ca..0000000
--- a/tools/gyp/test/hard_dependency/gyptest-no-exported-hard-dependency.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that a hard_dependency that is not exported is not pulled in as a
-dependency for a target if the target does not explicitly specify a dependency
-and none of its dependencies export the hard_dependency.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-if test.format == 'dump_dependency_json':
-  test.skip_test('Skipping test; dependency JSON does not adjust ' \
-                 'static libaries.\n')
-
-test.run_gyp('hard_dependency.gyp', chdir='src')
-
-chdir = 'relocate/src'
-test.relocate('src', chdir)
-
-test.build('hard_dependency.gyp', 'd', chdir=chdir)
-
-# Because 'c' does not export a hard_dependency, only the target 'd' should
-# be built. This is because the 'd' target does not need the generated headers
-# in order to be compiled.
-test.built_file_must_not_exist('a', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_not_exist('c', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_exist('d', type=test.STATIC_LIB, chdir=chdir)
-
-test.pass_test()
diff --git a/tools/gyp/test/hard_dependency/src/a.c b/tools/gyp/test/hard_dependency/src/a.c
deleted file mode 100644
index 0fa0223..0000000
--- a/tools/gyp/test/hard_dependency/src/a.c
+++ /dev/null
@@ -1,9 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include "a.h"
-
-int funcA() {
-  return 42;
-}
diff --git a/tools/gyp/test/hard_dependency/src/a.h b/tools/gyp/test/hard_dependency/src/a.h
deleted file mode 100644
index 854a065..0000000
--- a/tools/gyp/test/hard_dependency/src/a.h
+++ /dev/null
@@ -1,12 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#ifndef A_H_
-#define A_H_
-
-#include "generated.h"
-
-int funcA();
-
-#endif  // A_H_
diff --git a/tools/gyp/test/hard_dependency/src/b.c b/tools/gyp/test/hard_dependency/src/b.c
deleted file mode 100644
index 0baace9..0000000
--- a/tools/gyp/test/hard_dependency/src/b.c
+++ /dev/null
@@ -1,9 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include "a.h"
-
-int funcB() {
-  return funcA();
-}
diff --git a/tools/gyp/test/hard_dependency/src/b.h b/tools/gyp/test/hard_dependency/src/b.h
deleted file mode 100644
index 22b48ce..0000000
--- a/tools/gyp/test/hard_dependency/src/b.h
+++ /dev/null
@@ -1,12 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#ifndef B_H_
-#define B_H_
-
-#include "a.h"
-
-int funcB();
-
-#endif  // B_H_
diff --git a/tools/gyp/test/hard_dependency/src/c.c b/tools/gyp/test/hard_dependency/src/c.c
deleted file mode 100644
index 7d00682..0000000
--- a/tools/gyp/test/hard_dependency/src/c.c
+++ /dev/null
@@ -1,10 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include "b.h"
-#include "c.h"
-
-int funcC() {
-  return funcB();
-}
diff --git a/tools/gyp/test/hard_dependency/src/c.h b/tools/gyp/test/hard_dependency/src/c.h
deleted file mode 100644
index f4ea7fe..0000000
--- a/tools/gyp/test/hard_dependency/src/c.h
+++ /dev/null
@@ -1,10 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#ifndef C_H_
-#define C_H_
-
-int funcC();
-
-#endif  // C_H_
diff --git a/tools/gyp/test/hard_dependency/src/d.c b/tools/gyp/test/hard_dependency/src/d.c
deleted file mode 100644
index d016c3c..0000000
--- a/tools/gyp/test/hard_dependency/src/d.c
+++ /dev/null
@@ -1,9 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include "c.h"
-
-int funcD() {
-  return funcC();
-}
diff --git a/tools/gyp/test/hard_dependency/src/emit.py b/tools/gyp/test/hard_dependency/src/emit.py
deleted file mode 100755
index 2df74b7..0000000
--- a/tools/gyp/test/hard_dependency/src/emit.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'wb')
-f.write('/* Hello World */\n')
-f.close()
diff --git a/tools/gyp/test/hard_dependency/src/hard_dependency.gyp b/tools/gyp/test/hard_dependency/src/hard_dependency.gyp
deleted file mode 100644
index 4479c5f..0000000
--- a/tools/gyp/test/hard_dependency/src/hard_dependency.gyp
+++ /dev/null
@@ -1,78 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'static_library',
-      'sources': [
-        'a.c',
-        'a.h',
-      ],
-      'hard_dependency': 1,
-      'actions': [
-        {
-          'action_name': 'generate_headers',
-          'inputs': [
-            'emit.py'
-          ],
-          'outputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/generated.h'
-          ],
-          'action': [
-            'python',
-            'emit.py',
-            '<(SHARED_INTERMEDIATE_DIR)/generated.h',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-      'include_dirs': [
-        '<(SHARED_INTERMEDIATE_DIR)',
-      ],
-      'direct_dependent_settings': {
-        'include_dirs': [
-          '<(SHARED_INTERMEDIATE_DIR)',
-        ],
-      },
-    },
-    {
-      'target_name': 'b',
-      'type': 'static_library',
-      'sources': [
-        'b.c',
-        'b.h',
-      ],
-      'dependencies': [
-        'a',
-      ],
-      'export_dependent_settings': [
-        'a',
-      ],
-    },
-    {
-      'target_name': 'c',
-      'type': 'static_library',
-      'sources': [
-        'c.c',
-        'c.h',
-      ],
-      'dependencies': [
-        'b',
-      ],
-    },
-    {
-      'target_name': 'd',
-      'type': 'static_library',
-      'sources': [
-        'd.c',
-      ],
-      'dependencies': [
-        'c',
-      ],
-    }
-  ],
-}
diff --git a/tools/gyp/test/hello/gyptest-all.py b/tools/gyp/test/hello/gyptest-all.py
deleted file mode 100755
index 1739b68..0000000
--- a/tools/gyp/test/hello/gyptest-all.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simplest-possible build of a "Hello, world!" program
-using an explicit build target of 'all'.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_all')
-
-test.run_gyp('hello.gyp')
-
-test.build('hello.gyp', test.ALL)
-
-test.run_built_executable('hello', stdout="Hello, world!\n")
-
-test.up_to_date('hello.gyp', test.ALL)
-
-test.pass_test()
diff --git a/tools/gyp/test/hello/gyptest-default.py b/tools/gyp/test/hello/gyptest-default.py
deleted file mode 100755
index 22377e7..0000000
--- a/tools/gyp/test/hello/gyptest-default.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simplest-possible build of a "Hello, world!" program
-using the default build target.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_default')
-
-test.run_gyp('hello.gyp')
-
-test.build('hello.gyp')
-
-test.run_built_executable('hello', stdout="Hello, world!\n")
-
-test.up_to_date('hello.gyp', test.DEFAULT)
-
-test.pass_test()
diff --git a/tools/gyp/test/hello/gyptest-disable-regyp.py b/tools/gyp/test/hello/gyptest-disable-regyp.py
deleted file mode 100755
index 1e4b306..0000000
--- a/tools/gyp/test/hello/gyptest-disable-regyp.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that Makefiles don't get rebuilt when a source gyp file changes and
-the disable_regeneration generator flag is set.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('hello.gyp', '-Gauto_regeneration=0')
-
-test.build('hello.gyp', test.ALL)
-
-test.run_built_executable('hello', stdout="Hello, world!\n")
-
-# Sleep so that the changed gyp file will have a newer timestamp than the
-# previously generated build files.
-test.sleep()
-test.write('hello.gyp', test.read('hello2.gyp'))
-
-test.build('hello.gyp', test.ALL)
-
-# Should still be the old executable, as regeneration was disabled.
-test.run_built_executable('hello', stdout="Hello, world!\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/hello/gyptest-regyp-output.py b/tools/gyp/test/hello/gyptest-regyp-output.py
deleted file mode 100644
index fd88a85..0000000
--- a/tools/gyp/test/hello/gyptest-regyp-output.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that Makefiles get rebuilt when a source gyp file changes and
---generator-output is used.
-"""
-
-import TestGyp
-
-# Regenerating build files when a gyp file changes is currently only supported
-# by the make generator, and --generator-output is not supported by ninja, so we
-# can only test for make.
-test = TestGyp.TestGyp(formats=['make'])
-
-CHDIR='generator-output'
-
-test.run_gyp('hello.gyp', '--generator-output=%s' % CHDIR)
-
-test.build('hello.gyp', test.ALL, chdir=CHDIR)
-
-test.run_built_executable('hello', stdout="Hello, world!\n", chdir=CHDIR)
-
-# Sleep so that the changed gyp file will have a newer timestamp than the
-# previously generated build files.
-test.sleep()
-test.write('hello.gyp', test.read('hello2.gyp'))
-
-test.build('hello.gyp', test.ALL, chdir=CHDIR)
-
-test.run_built_executable('hello', stdout="Hello, two!\n", chdir=CHDIR)
-
-test.pass_test()
diff --git a/tools/gyp/test/hello/gyptest-regyp.py b/tools/gyp/test/hello/gyptest-regyp.py
deleted file mode 100755
index b513edc..0000000
--- a/tools/gyp/test/hello/gyptest-regyp.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that Makefiles get rebuilt when a source gyp file changes.
-"""
-
-import TestGyp
-
-# Regenerating build files when a gyp file changes is currently only supported
-# by the make generator.
-test = TestGyp.TestGyp(formats=['make'])
-
-test.run_gyp('hello.gyp')
-
-test.build('hello.gyp', test.ALL)
-
-test.run_built_executable('hello', stdout="Hello, world!\n")
-
-# Sleep so that the changed gyp file will have a newer timestamp than the
-# previously generated build files.
-test.sleep()
-test.write('hello.gyp', test.read('hello2.gyp'))
-
-test.build('hello.gyp', test.ALL)
-
-test.run_built_executable('hello', stdout="Hello, two!\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/hello/gyptest-target.py b/tools/gyp/test/hello/gyptest-target.py
deleted file mode 100755
index 1abaf70..0000000
--- a/tools/gyp/test/hello/gyptest-target.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simplest-possible build of a "Hello, world!" program
-using an explicit build target of 'hello'.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_target')
-
-test.run_gyp('hello.gyp')
-
-test.build('hello.gyp', 'hello')
-
-test.run_built_executable('hello', stdout="Hello, world!\n")
-
-test.up_to_date('hello.gyp', 'hello')
-
-test.pass_test()
diff --git a/tools/gyp/test/hello/hello.c b/tools/gyp/test/hello/hello.c
deleted file mode 100644
index 0a4c806..0000000
--- a/tools/gyp/test/hello/hello.c
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/hello/hello.gyp b/tools/gyp/test/hello/hello.gyp
deleted file mode 100644
index 1974d51..0000000
--- a/tools/gyp/test/hello/hello.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/hello/hello2.c b/tools/gyp/test/hello/hello2.c
deleted file mode 100644
index b14299c..0000000
--- a/tools/gyp/test/hello/hello2.c
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello, two!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/hello/hello2.gyp b/tools/gyp/test/hello/hello2.gyp
deleted file mode 100644
index 25b08ca..0000000
--- a/tools/gyp/test/hello/hello2.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/home_dot_gyp/gyptest-home-includes-config-arg.py b/tools/gyp/test/home_dot_gyp/gyptest-home-includes-config-arg.py
deleted file mode 100755
index 82e39f9..0000000
--- a/tools/gyp/test/home_dot_gyp/gyptest-home-includes-config-arg.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies inclusion of $HOME/.gyp/include.gypi works when --config-dir is
-specified.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-os.environ['HOME'] = os.path.abspath('home2')
-
-test.run_gyp('all.gyp', '--config-dir=~/.gyp_new', chdir='src')
-
-# After relocating, we should still be able to build (build file shouldn't
-# contain relative reference to ~/.gyp/include.gypi)
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', test.ALL, chdir='relocate/src')
-
-test.run_built_executable('printfoo',
-                          chdir='relocate/src',
-                          stdout='FOO is fromhome3\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/home_dot_gyp/gyptest-home-includes-config-env.py b/tools/gyp/test/home_dot_gyp/gyptest-home-includes-config-env.py
deleted file mode 100755
index 6f4b299..0000000
--- a/tools/gyp/test/home_dot_gyp/gyptest-home-includes-config-env.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies inclusion of $HOME/.gyp_new/include.gypi works when GYP_CONFIG_DIR
-is set.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-os.environ['HOME'] = os.path.abspath('home')
-os.environ['GYP_CONFIG_DIR'] = os.path.join(os.path.abspath('home2'),
-                                            '.gyp_new')
-
-test.run_gyp('all.gyp', chdir='src')
-
-# After relocating, we should still be able to build (build file shouldn't
-# contain relative reference to ~/.gyp_new/include.gypi)
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', test.ALL, chdir='relocate/src')
-
-test.run_built_executable('printfoo',
-                          chdir='relocate/src',
-                          stdout='FOO is fromhome3\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py b/tools/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py
deleted file mode 100755
index fdf8b14..0000000
--- a/tools/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies inclusion of $HOME/.gyp/include.gypi works properly with relocation
-and with regeneration.
-"""
-
-import os
-import TestGyp
-
-# Regenerating build files when a gyp file changes is currently only supported
-# by the make generator.
-test = TestGyp.TestGyp(formats=['make'])
-
-os.environ['HOME'] = os.path.abspath('home')
-
-test.run_gyp('all.gyp', chdir='src')
-
-# After relocating, we should still be able to build (build file shouldn't
-# contain relative reference to ~/.gyp/include.gypi)
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', test.ALL, chdir='relocate/src')
-
-test.run_built_executable('printfoo',
-                          chdir='relocate/src',
-                          stdout='FOO is fromhome\n')
-
-# Building should notice any changes to ~/.gyp/include.gypi and regyp.
-test.sleep()
-
-test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi'))
-
-test.build('all.gyp', test.ALL, chdir='relocate/src')
-
-test.run_built_executable('printfoo',
-                          chdir='relocate/src',
-                          stdout='FOO is fromhome2\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/home_dot_gyp/gyptest-home-includes.py b/tools/gyp/test/home_dot_gyp/gyptest-home-includes.py
deleted file mode 100755
index 8ad5255..0000000
--- a/tools/gyp/test/home_dot_gyp/gyptest-home-includes.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies inclusion of $HOME/.gyp/include.gypi works.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-os.environ['HOME'] = os.path.abspath('home')
-
-test.run_gyp('all.gyp', chdir='src')
-
-# After relocating, we should still be able to build (build file shouldn't
-# contain relative reference to ~/.gyp/include.gypi)
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', test.ALL, chdir='relocate/src')
-
-test.run_built_executable('printfoo',
-                          chdir='relocate/src',
-                          stdout='FOO is fromhome\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/home_dot_gyp/home/.gyp/include.gypi b/tools/gyp/test/home_dot_gyp/home/.gyp/include.gypi
deleted file mode 100644
index fcfb39b..0000000
--- a/tools/gyp/test/home_dot_gyp/home/.gyp/include.gypi
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-  'variables': {
-    'foo': '"fromhome"',
-  },
-}
diff --git a/tools/gyp/test/home_dot_gyp/home2/.gyp/include.gypi b/tools/gyp/test/home_dot_gyp/home2/.gyp/include.gypi
deleted file mode 100644
index f0d84b3..0000000
--- a/tools/gyp/test/home_dot_gyp/home2/.gyp/include.gypi
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-  'variables': {
-    'foo': '"fromhome2"',
-  },
-}
diff --git a/tools/gyp/test/home_dot_gyp/home2/.gyp_new/include.gypi b/tools/gyp/test/home_dot_gyp/home2/.gyp_new/include.gypi
deleted file mode 100644
index 4094dfd..0000000
--- a/tools/gyp/test/home_dot_gyp/home2/.gyp_new/include.gypi
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-  'variables': {
-    'foo': '"fromhome3"',
-  },
-}
diff --git a/tools/gyp/test/home_dot_gyp/src/all.gyp b/tools/gyp/test/home_dot_gyp/src/all.gyp
deleted file mode 100644
index 14b6aea..0000000
--- a/tools/gyp/test/home_dot_gyp/src/all.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'foo%': '"fromdefault"',
-  },
-  'targets': [
-    {
-      'target_name': 'printfoo',
-      'type': 'executable',
-      'sources': [
-        'printfoo.c',
-      ],
-      'defines': [
-        'FOO=<(foo)',
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/home_dot_gyp/src/printfoo.c b/tools/gyp/test/home_dot_gyp/src/printfoo.c
deleted file mode 100644
index 9bb6718..0000000
--- a/tools/gyp/test/home_dot_gyp/src/printfoo.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("FOO is %s\n", FOO);
-  return 0;
-}
diff --git a/tools/gyp/test/include_dirs/gyptest-all.py b/tools/gyp/test/include_dirs/gyptest-all.py
deleted file mode 100755
index d64bc6a..0000000
--- a/tools/gyp/test/include_dirs/gyptest-all.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies use of include_dirs when using an explicit build target of 'all'.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('includes.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('includes.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from includes.c
-Hello from inc.h
-Hello from include1.h
-Hello from subdir/inc2/include2.h
-Hello from shadow2/shadow.h
-"""
-test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
-
-if test.format == 'xcode':
-  chdir='relocate/src/subdir'
-else:
-  chdir='relocate/src'
-
-expect = """\
-Hello from subdir/subdir_includes.c
-Hello from subdir/inc.h
-Hello from include1.h
-Hello from subdir/inc2/include2.h
-"""
-test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
-
-test.pass_test()
diff --git a/tools/gyp/test/include_dirs/gyptest-default.py b/tools/gyp/test/include_dirs/gyptest-default.py
deleted file mode 100755
index fc61415..0000000
--- a/tools/gyp/test/include_dirs/gyptest-default.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies use of include_dirs when using the default build target.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('includes.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('includes.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from includes.c
-Hello from inc.h
-Hello from include1.h
-Hello from subdir/inc2/include2.h
-Hello from shadow2/shadow.h
-"""
-test.run_built_executable('includes', stdout=expect, chdir='relocate/src')
-
-if test.format == 'xcode':
-  chdir='relocate/src/subdir'
-else:
-  chdir='relocate/src'
-
-expect = """\
-Hello from subdir/subdir_includes.c
-Hello from subdir/inc.h
-Hello from include1.h
-Hello from subdir/inc2/include2.h
-"""
-test.run_built_executable('subdir_includes', stdout=expect, chdir=chdir)
-
-test.pass_test()
diff --git a/tools/gyp/test/include_dirs/src/inc.h b/tools/gyp/test/include_dirs/src/inc.h
deleted file mode 100644
index 0398d69..0000000
--- a/tools/gyp/test/include_dirs/src/inc.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INC_STRING "inc.h"
diff --git a/tools/gyp/test/include_dirs/src/inc1/include1.h b/tools/gyp/test/include_dirs/src/inc1/include1.h
deleted file mode 100644
index 43356b5..0000000
--- a/tools/gyp/test/include_dirs/src/inc1/include1.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INCLUDE1_STRING "include1.h"
diff --git a/tools/gyp/test/include_dirs/src/includes.c b/tools/gyp/test/include_dirs/src/includes.c
deleted file mode 100644
index 6e2a23c..0000000
--- a/tools/gyp/test/include_dirs/src/includes.c
+++ /dev/null
@@ -1,19 +0,0 @@
-#include <stdio.h>
-
-#include "inc.h"
-#include "include1.h"
-#include "include2.h"
-#include "shadow.h"
-
-int main(void)
-{
-  printf("Hello from includes.c\n");
-  printf("Hello from %s\n", INC_STRING);
-  printf("Hello from %s\n", INCLUDE1_STRING);
-  printf("Hello from %s\n", INCLUDE2_STRING);
-  /* Test that include_dirs happen first: The gyp file has a -Ishadow1
-     cflag and an include_dir of shadow2.  Including shadow.h should get
-     the shadow.h from the include_dir. */
-  printf("Hello from %s\n", SHADOW_STRING);
-  return 0;
-}
diff --git a/tools/gyp/test/include_dirs/src/includes.gyp b/tools/gyp/test/include_dirs/src/includes.gyp
deleted file mode 100644
index 3592690..0000000
--- a/tools/gyp/test/include_dirs/src/includes.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'includes',
-      'type': 'executable',
-      'dependencies': [
-        'subdir/subdir_includes.gyp:subdir_includes',
-      ],
-      'cflags': [
-        '-Ishadow1',
-      ],
-      'include_dirs': [
-        '.',
-        'inc1',
-        'shadow2',
-        'subdir/inc2',
-      ],
-      'sources': [
-        'includes.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/include_dirs/src/shadow1/shadow.h b/tools/gyp/test/include_dirs/src/shadow1/shadow.h
deleted file mode 100644
index 80f6de2..0000000
--- a/tools/gyp/test/include_dirs/src/shadow1/shadow.h
+++ /dev/null
@@ -1 +0,0 @@
-#define SHADOW_STRING "shadow1/shadow.h"
diff --git a/tools/gyp/test/include_dirs/src/shadow2/shadow.h b/tools/gyp/test/include_dirs/src/shadow2/shadow.h
deleted file mode 100644
index fad5ccd..0000000
--- a/tools/gyp/test/include_dirs/src/shadow2/shadow.h
+++ /dev/null
@@ -1 +0,0 @@
-#define SHADOW_STRING "shadow2/shadow.h"
diff --git a/tools/gyp/test/include_dirs/src/subdir/inc.h b/tools/gyp/test/include_dirs/src/subdir/inc.h
deleted file mode 100644
index 0a68d7b..0000000
--- a/tools/gyp/test/include_dirs/src/subdir/inc.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INC_STRING "subdir/inc.h"
diff --git a/tools/gyp/test/include_dirs/src/subdir/inc2/include2.h b/tools/gyp/test/include_dirs/src/subdir/inc2/include2.h
deleted file mode 100644
index 721577e..0000000
--- a/tools/gyp/test/include_dirs/src/subdir/inc2/include2.h
+++ /dev/null
@@ -1 +0,0 @@
-#define INCLUDE2_STRING "subdir/inc2/include2.h"
diff --git a/tools/gyp/test/include_dirs/src/subdir/subdir_includes.c b/tools/gyp/test/include_dirs/src/subdir/subdir_includes.c
deleted file mode 100644
index 4623543..0000000
--- a/tools/gyp/test/include_dirs/src/subdir/subdir_includes.c
+++ /dev/null
@@ -1,14 +0,0 @@
-#include <stdio.h>
-
-#include "inc.h"
-#include "include1.h"
-#include "include2.h"
-
-int main(void)
-{
-  printf("Hello from subdir/subdir_includes.c\n");
-  printf("Hello from %s\n", INC_STRING);
-  printf("Hello from %s\n", INCLUDE1_STRING);
-  printf("Hello from %s\n", INCLUDE2_STRING);
-  return 0;
-}
diff --git a/tools/gyp/test/include_dirs/src/subdir/subdir_includes.gyp b/tools/gyp/test/include_dirs/src/subdir/subdir_includes.gyp
deleted file mode 100644
index 257d052..0000000
--- a/tools/gyp/test/include_dirs/src/subdir/subdir_includes.gyp
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'subdir_includes',
-      'type': 'executable',
-      'include_dirs': [
-        '.',
-        '../inc1',
-        'inc2',
-      ],
-      'sources': [
-        'subdir_includes.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/intermediate_dir/gyptest-intermediate-dir.py b/tools/gyp/test/intermediate_dir/gyptest-intermediate-dir.py
deleted file mode 100755
index bf4b91a..0000000
--- a/tools/gyp/test/intermediate_dir/gyptest-intermediate-dir.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that targets have independent INTERMEDIATE_DIRs.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('test.gyp', chdir='src')
-
-test.build('test.gyp', 'target1', chdir='src')
-# Check stuff exists.
-intermediate_file1 = test.read('src/outfile.txt')
-test.must_contain(intermediate_file1, 'target1')
-
-shared_intermediate_file1 = test.read('src/shared_outfile.txt')
-test.must_contain(shared_intermediate_file1, 'shared_target1')
-
-test.run_gyp('test2.gyp', chdir='src')
-
-# Force the shared intermediate to be rebuilt.
-test.sleep()
-test.touch('src/shared_infile.txt')
-test.build('test2.gyp', 'target2', chdir='src')
-# Check INTERMEDIATE_DIR file didn't get overwritten but SHARED_INTERMEDIATE_DIR
-# file did.
-intermediate_file2 = test.read('src/outfile.txt')
-test.must_contain(intermediate_file1, 'target1')
-test.must_contain(intermediate_file2, 'target2')
-
-shared_intermediate_file2 = test.read('src/shared_outfile.txt')
-if shared_intermediate_file1 != shared_intermediate_file2:
-  test.fail_test(shared_intermediate_file1 + ' != ' + shared_intermediate_file2)
-
-test.must_contain(shared_intermediate_file1, 'shared_target2')
-test.must_contain(shared_intermediate_file2, 'shared_target2')
-
-test.pass_test()
diff --git a/tools/gyp/test/intermediate_dir/src/script.py b/tools/gyp/test/intermediate_dir/src/script.py
deleted file mode 100755
index 7abc7ee..0000000
--- a/tools/gyp/test/intermediate_dir/src/script.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Takes 3 arguments. Writes the 1st argument to the file in the 2nd argument,
-# and writes the absolute path to the file in the 2nd argument to the file in
-# the 3rd argument.
-
-import os
-import shlex
-import sys
-
-if len(sys.argv) == 3 and ' ' in sys.argv[2]:
-  sys.argv[2], fourth = shlex.split(sys.argv[2].replace('\\', '\\\\'))
-  sys.argv.append(fourth)
-
-#print >>sys.stderr, sys.argv
-
-with open(sys.argv[2], 'w') as f:
-  f.write(sys.argv[1])
-
-with open(sys.argv[3], 'w') as f:
-  f.write(os.path.abspath(sys.argv[2]))
diff --git a/tools/gyp/test/intermediate_dir/src/shared_infile.txt b/tools/gyp/test/intermediate_dir/src/shared_infile.txt
deleted file mode 100644
index e2aba15..0000000
--- a/tools/gyp/test/intermediate_dir/src/shared_infile.txt
+++ /dev/null
@@ -1 +0,0 @@
-dummy input
diff --git a/tools/gyp/test/intermediate_dir/src/test.gyp b/tools/gyp/test/intermediate_dir/src/test.gyp
deleted file mode 100644
index b61e7e8..0000000
--- a/tools/gyp/test/intermediate_dir/src/test.gyp
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'target1',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'intermediate',
-          'inputs': [],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/intermediate_out.txt',
-            'outfile.txt',
-          ],
-          'action': [
-            'python', 'script.py', 'target1', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'shared_intermediate',
-          'inputs': [
-            'shared_infile.txt',
-          ],
-          'outputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/intermediate_out.txt',
-            'shared_outfile.txt',
-          ],
-          'action': [
-            'python', 'script.py', 'shared_target1', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/intermediate_dir/src/test2.gyp b/tools/gyp/test/intermediate_dir/src/test2.gyp
deleted file mode 100644
index 41f5564..0000000
--- a/tools/gyp/test/intermediate_dir/src/test2.gyp
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'target2',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'intermediate',
-          'inputs': [],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/intermediate_out.txt',
-            'outfile.txt',
-          ],
-          'action': [
-            'python', 'script.py', 'target2', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'shared_intermediate',
-          'inputs': [
-            'shared_infile.txt',
-          ],
-          'outputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/intermediate_out.txt',
-            'shared_outfile.txt',
-          ],
-          'action': [
-            'python', 'script.py', 'shared_target2', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist-error.strings b/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
deleted file mode 100644
index 452e7fa..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
+++ /dev/null
@@ -1,3 +0,0 @@
-/* Localized versions of Info.plist keys */
-
-NSHumanReadableCopyright = "Copyright ©2011 Google Inc."
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist.strings b/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist.strings
deleted file mode 100644
index 35bd33a..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/InfoPlist.strings
+++ /dev/null
@@ -1,3 +0,0 @@
-/* Localized versions of Info.plist keys */
-
-NSHumanReadableCopyright = "Copyright ©2011 Google Inc.";
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/LanguageMap.plist b/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/LanguageMap.plist
deleted file mode 100644
index 6b94882..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/LanguageMap.plist
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>en</key>
-  <string>en</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/MainMenu.xib b/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/MainMenu.xib
deleted file mode 100644
index 21b6044..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/MainMenu.xib
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="9060" systemVersion="15B42" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="9051"/>
-        <capability name="Aspect ratio constraints" minToolsVersion="5.1"/>
-        <capability name="Constraints with non-1.0 multipliers" minToolsVersion="5.1"/>
-    </dependencies>
-    <objects>
-        <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
-        <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
-        <viewController id="Ssz-5V-cv2">
-            <view key="view" contentMode="scaleToFill" id="tRS-Cx-RH3">
-            </view>
-            <point key="canvasLocation" x="548" y="1086"/>
-        </viewController>
-    </objects>
-</document>
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard b/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard
deleted file mode 100644
index 723bc85..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="1.0" toolsVersion="1906" systemVersion="11A511" targetRuntime="iOS.CocoaTouch" nextObjectID="6" propertyAccessControl="none" initialViewController="2">
-    <dependencies>
-        <development defaultVersion="4200" identifier="xcode"/>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="902"/>
-    </dependencies>
-    <scenes>
-        <scene sceneID="5">
-            <objects>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="4" sceneMemberID="firstResponder"/>
-                <viewController id="2" customClass="ViewController" sceneMemberID="viewController">
-                    <view key="view" contentMode="scaleToFill" id="3">
-                        <rect key="frame" x="0.0" y="20" width="320" height="460"/>
-                        <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
-                        <subviews/>
-                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
-                    </view>
-                </viewController>
-            </objects>
-        </scene>
-    </scenes>
-    <simulatedMetricsContainer key="defaultSimulatedMetrics">
-        <simulatedStatusBarMetrics key="statusBar"/>
-        <simulatedOrientationMetrics key="orientation"/>
-        <simulatedScreenMetrics key="destination"/>
-    </simulatedMetricsContainer>
-</document>
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json b/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
deleted file mode 100644
index 2db2b1c..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "mac",
-      "size" : "16x16",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "16x16",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "32x32",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "32x32",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "128x128",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "128x128",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "256x256",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "256x256",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "512x512",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "512x512",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
\ No newline at end of file
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json b/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
deleted file mode 100644
index 0a87b6e..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "universal",
-      "scale" : "1x",
-      "filename" : "super_sylvain.png"
-    },
-    {
-      "idiom" : "universal",
-      "scale" : "2x",
-      "filename" : "super_sylvain@2x.png"
-    },
-    {
-      "idiom" : "universal",
-      "scale" : "3x",
-      "filename" : "super_sylvain@3x.png"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
\ No newline at end of file
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png b/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
deleted file mode 100644
index 0ba7691..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png b/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
deleted file mode 100644
index edfa6a5..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png b/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
deleted file mode 100644
index e0652ef..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/TestApp-Info.plist b/tools/gyp/test/ios/app-bundle/TestApp/TestApp-Info.plist
deleted file mode 100644
index bb90043..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/TestApp-Info.plist
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>ause</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>NSMainNibFile</key>
-	<string>MainMenu</string>
-	<key>NSPrincipalClass</key>
-	<string>NSApplication</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/check_no_signature.py b/tools/gyp/test/ios/app-bundle/TestApp/check_no_signature.py
deleted file mode 100644
index 4f6e340..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/check_no_signature.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/python
-
-import os
-import subprocess
-import sys
-
-p = os.path.join(os.environ['BUILT_PRODUCTS_DIR'],os.environ['EXECUTABLE_PATH'])
-proc = subprocess.Popen(['codesign', '-v', p],
-                        stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
-o = proc.communicate()[0].strip()
-if "code object is not signed at all" not in o:
-  sys.stderr.write('File should not already be signed.')
-  sys.exit(1)
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/main.m b/tools/gyp/test/ios/app-bundle/TestApp/main.m
deleted file mode 100644
index ec93e0e..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/main.m
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-int main(int argc, char *argv[])
-{
-  NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
-  int retVal  = UIApplicationMain(argc, argv, nil, nil);
-  [pool release];
-  return retVal;
-}
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m b/tools/gyp/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m
deleted file mode 100644
index 28bb117..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#if defined(__LP64__)
-# error 64-bit build
-#endif
diff --git a/tools/gyp/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m b/tools/gyp/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m
deleted file mode 100644
index e6d2558..0000000
--- a/tools/gyp/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#if !defined(__LP64__)
-# error 32-bit build
-#endif
diff --git a/tools/gyp/test/ios/app-bundle/test-archs.gyp b/tools/gyp/test/ios/app-bundle/test-archs.gyp
deleted file mode 100644
index fa935c4..0000000
--- a/tools/gyp/test/ios/app-bundle/test-archs.gyp
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-  ],
-  'target_defaults': {
-    'mac_bundle_resources': [
-      'TestApp/English.lproj/InfoPlist.strings',
-      'TestApp/English.lproj/MainMenu.xib',
-    ],
-    'link_settings': {
-      'libraries': [
-        '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-        '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-      ],
-    },
-    'xcode_settings': {
-      'OTHER_CFLAGS': [
-        '-fobjc-abi-version=2',
-      ],
-      'CODE_SIGNING_REQUIRED': 'NO',
-      'SDKROOT': 'iphoneos',  # -isysroot
-      'TARGETED_DEVICE_FAMILY': '1,2',
-      'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-      'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
-      'CONFIGURATION_BUILD_DIR':'build/Default',
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'TestNoArchs',
-      'product_name': 'TestNoArchs',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-        'TestApp/only-compile-in-32-bits.m',
-      ],
-      'xcode_settings': {
-        'VALID_ARCHS': [
-          'i386',
-          'x86_64',
-          'arm64',
-          'armv7',
-        ],
-      }
-    },
-    {
-      'target_name': 'TestArch32Bits',
-      'product_name': 'TestArch32Bits',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-        'TestApp/only-compile-in-32-bits.m',
-      ],
-      'xcode_settings': {
-        'ARCHS': [
-          '$(ARCHS_STANDARD)',
-        ],
-        'VALID_ARCHS': [
-          'i386',
-          'armv7',
-        ],
-      },
-    },
-    {
-      'target_name': 'TestArch64Bits',
-      'product_name': 'TestArch64Bits',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-        'TestApp/only-compile-in-64-bits.m',
-      ],
-      'xcode_settings': {
-        'ARCHS': [
-          '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
-        ],
-        'VALID_ARCHS': [
-          'x86_64',
-          'arm64',
-        ],
-      },
-    },
-    {
-      'target_name': 'TestMultiArchs',
-      'product_name': 'TestMultiArchs',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-      ],
-      'xcode_settings': {
-        'ARCHS': [
-          '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
-        ],
-        'VALID_ARCHS': [
-          'x86_64',
-          'i386',
-          'arm64',
-          'armv7',
-        ],
-      }
-    },
-  ],
-}
diff --git a/tools/gyp/test/ios/app-bundle/test-assets-catalog.gyp b/tools/gyp/test/ios/app-bundle/test-assets-catalog.gyp
deleted file mode 100644
index 9a12d07..0000000
--- a/tools/gyp/test/ios/app-bundle/test-assets-catalog.gyp
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'conditions': [
-    ['"<(GENERATOR)"=="ninja"', {
-      'make_global_settings': [
-        ['CC', '/usr/bin/clang'],
-        ['CXX', '/usr/bin/clang++'],
-      ],
-    }],
-  ],
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App Assets Catalog Gyp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',
-        'TestApp/English.lproj/MainMenu.xib',
-        'TestApp/English.lproj/Main_iPhone.storyboard',
-        'TestApp/Images.xcassets',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-        'SDKROOT': 'iphonesimulator',  # -isysroot
-        'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/ios/app-bundle/test-crosscompile.gyp b/tools/gyp/test/ios/app-bundle/test-crosscompile.gyp
deleted file mode 100644
index d904958..0000000
--- a/tools/gyp/test/ios/app-bundle/test-crosscompile.gyp
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-  ],
-  'targets': [
-    # This target will not be built, but is here so that ninja Xcode emulation
-    # understand this is a multi-platform (ios + mac) build.
-    {
-      'target_name': 'TestDummy',
-      'product_name': 'TestDummy',
-      'toolsets': ['target'],
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'tool_main.cc',
-      ],
-      'xcode_settings': {
-        'SDKROOT': 'iphonesimulator',  # -isysroot
-        'TARGETED_DEVICE_FAMILY': '1,2',
-        'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
-      },
-    },
-    {
-      'target_name': 'TestHost',
-      'product_name': 'TestHost',
-      'toolsets': ['host'],
-      'type': 'executable',
-      'mac_bundle': 0,
-      'sources': [
-        'tool_main.cc',
-      ],
-      'xcode_settings': {
-        'SDKROOT': 'macosx',
-        'ARCHS': [
-          '$(ARCHS_STANDARD)',
-          'x86_64',
-        ],
-        'VALID_ARCHS': [
-          'x86_64',
-        ],
-      }
-    }
-  ],
-}
diff --git a/tools/gyp/test/ios/app-bundle/test-device.gyp b/tools/gyp/test/ios/app-bundle/test-device.gyp
deleted file mode 100644
index a0cfff7..0000000
--- a/tools/gyp/test/ios/app-bundle/test-device.gyp
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'conditions': [
-    ['"<(GENERATOR)"=="xcode"', {
-      'target_defaults': {
-        'configurations': {
-          'Default': {
-            'xcode_settings': {
-              'SDKROOT': 'iphonesimulator',
-              'CONFIGURATION_BUILD_DIR':'build/Default',
-            }
-          },
-          'Default-iphoneos': {
-            'xcode_settings': {
-              'SDKROOT': 'iphoneos',
-              'CONFIGURATION_BUILD_DIR':'build/Default-iphoneos',
-            }
-          },
-        },
-      },
-    }, {
-      'target_defaults': {
-        'configurations': {
-          'Default': {
-            'xcode_settings': {
-              'SDKROOT': 'iphonesimulator',
-            }
-          },
-        },
-      },
-    }],
-  ],
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App Gyp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',
-        'TestApp/English.lproj/MainMenu.xib',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'SDKROOT': 'iphonesimulator',  # -isysroot
-        'TARGETED_DEVICE_FAMILY': '1,2',
-        'INFOPLIST_OUTPUT_FORMAT':'xml',
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-        'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
-        'CODE_SIGNING_REQUIRED': 'NO',
-        'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
-
-      },
-    },
-    {
-      'target_name': 'sig_test',
-      'product_name': 'sigtest',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',
-        'TestApp/English.lproj/MainMenu.xib',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'Verify no signature',
-          'action': [
-            'python',
-            'TestApp/check_no_signature.py'
-          ],
-        },
-      ],
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'SDKROOT': 'iphonesimulator',  # -isysroot
-        'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
-        'INFOPLIST_OUTPUT_FORMAT':'xml',
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-        'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
-        'CONFIGURATION_BUILD_DIR':'buildsig/Default',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/ios/app-bundle/test.gyp b/tools/gyp/test/ios/app-bundle/test.gyp
deleted file mode 100644
index 544c589..0000000
--- a/tools/gyp/test/ios/app-bundle/test.gyp
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'conditions': [
-    ['"<(GENERATOR)"=="ninja"', {
-      'make_global_settings': [
-        ['CC', '/usr/bin/clang'],
-        ['CXX', '/usr/bin/clang++'],
-      ],
-    }],
-  ],
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App Gyp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',
-        'TestApp/English.lproj/LanguageMap.plist',
-        'TestApp/English.lproj/MainMenu.xib',
-        'TestApp/English.lproj/Main_iPhone.storyboard',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-        'SDKROOT': 'iphonesimulator',  # -isysroot
-        'IPHONEOS_DEPLOYMENT_TARGET': '5.0',
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-      },
-    },
-    {
-      'target_name': 'test_app_xml',
-      'product_name': 'Test App Gyp XML',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',
-        'TestApp/English.lproj/MainMenu.xib',
-        'TestApp/English.lproj/Main_iPhone.storyboard',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-        'INFOPLIST_OUTPUT_FORMAT':'xml',
-        'SDKROOT': 'iphonesimulator',  # -isysroot
-        'IPHONEOS_DEPLOYMENT_TARGET': '5.0',
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/ios/app-bundle/tool_main.cc b/tools/gyp/test/ios/app-bundle/tool_main.cc
deleted file mode 100644
index 9dc3c94..0000000
--- a/tools/gyp/test/ios/app-bundle/tool_main.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/ios/deployment-target/check-version-min.c b/tools/gyp/test/ios/deployment-target/check-version-min.c
deleted file mode 100644
index 761c529..0000000
--- a/tools/gyp/test/ios/deployment-target/check-version-min.c
+++ /dev/null
@@ -1,33 +0,0 @@
-/* Copyright (c) 2013 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <Availability.h>
-
-/* GYPTEST_MAC_VERSION_MIN: should be set to the corresponding value of
- * xcode setting 'MACOSX_DEPLOYMENT_TARGET', otherwise both should be
- * left undefined.
- *
- * GYPTEST_IOS_VERSION_MIN: should be set to the corresponding value of
- * xcode setting 'IPHONEOS_DEPLOYMENT_TARGET', otherwise both should be
- * left undefined.
- */
-
-#if defined(GYPTEST_MAC_VERSION_MIN)
-# if GYPTEST_MAC_VERSION_MIN != __MAC_OS_X_VERSION_MIN_REQUIRED
-#  error __MAC_OS_X_VERSION_MIN_REQUIRED has wrong value
-# endif
-#elif defined(__MAC_OS_X_VERSION_MIN_REQUIRED)
-# error __MAC_OS_X_VERSION_MIN_REQUIRED should be undefined
-#endif
-
-#if defined(GYPTEST_IOS_VERSION_MIN)
-# if GYPTEST_IOS_VERSION_MIN != __IPHONE_OS_VERSION_MIN_REQUIRED
-#  error __IPHONE_OS_VERSION_MIN_REQUIRED has wrong value
-# endif
-#elif defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
-# error __IPHONE_OS_VERSION_MIN_REQUIRED should be undefined
-#endif
-
-int main() { return 0; }
-
diff --git a/tools/gyp/test/ios/deployment-target/deployment-target.gyp b/tools/gyp/test/ios/deployment-target/deployment-target.gyp
deleted file mode 100644
index bdc1439..0000000
--- a/tools/gyp/test/ios/deployment-target/deployment-target.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-    ['CXX', '/usr/bin/clang++'],
-  ],
-  'targets': [
-    {
-      'target_name': 'version-min-4.3',
-      'type': 'static_library',
-      'sources': [ 'check-version-min.c', ],
-      'defines': [ 'GYPTEST_IOS_VERSION_MIN=40300', ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'SDKROOT': 'iphoneos',
-        'IPHONEOS_DEPLOYMENT_TARGET': '4.3',
-      },
-    },
-    {
-      'target_name': 'version-min-5.0',
-      'type': 'static_library',
-      'sources': [ 'check-version-min.c', ],
-      'defines': [ 'GYPTEST_IOS_VERSION_MIN=50000', ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'SDKROOT': 'iphoneos',
-        'IPHONEOS_DEPLOYMENT_TARGET': '5.0',
-      },
-    }
-  ],
-}
-
diff --git a/tools/gyp/test/ios/extension/ActionExtension/ActionViewController.h b/tools/gyp/test/ios/extension/ActionExtension/ActionViewController.h
deleted file mode 100644
index 1c92509..0000000
--- a/tools/gyp/test/ios/extension/ActionExtension/ActionViewController.h
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface ActionViewController : UIViewController
-
-@end
diff --git a/tools/gyp/test/ios/extension/ActionExtension/ActionViewController.m b/tools/gyp/test/ios/extension/ActionExtension/ActionViewController.m
deleted file mode 100644
index d37bacd..0000000
--- a/tools/gyp/test/ios/extension/ActionExtension/ActionViewController.m
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "ActionViewController.h"
-#import <MobileCoreServices/MobileCoreServices.h>
-
-@interface ActionViewController ()
-
-@end
-
-@implementation ActionViewController
-
-- (void)viewDidLoad {
-  [super viewDidLoad];
-}
-
-- (void)didReceiveMemoryWarning {
-  [super didReceiveMemoryWarning];
-  // Dispose of any resources that can be recreated.
-}
-
-- (IBAction)done {
-  // Return any edited content to the host app.
-  // This template doesn't do anything, so we just echo the passed in items.
-  [self.extensionContext
-      completeRequestReturningItems:self.extensionContext.inputItems
-      completionHandler:nil];
-}
-
-@end
diff --git a/tools/gyp/test/ios/extension/ActionExtension/Info.plist b/tools/gyp/test/ios/extension/ActionExtension/Info.plist
deleted file mode 100644
index f89cd79..0000000
--- a/tools/gyp/test/ios/extension/ActionExtension/Info.plist
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>en</string>
-	<key>CFBundleDisplayName</key>
-	<string>ActionExtension</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.gyptest.extension.ActionExtension</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>XPC!</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>NSExtension</key>
-	<dict>
-		<key>NSExtensionAttributes</key>
-		<dict>
-			<key>NSExtensionActivationRule</key>
-			<string>TRUEPREDICATE</string>
-			<key>NSExtensionPointName</key>
-			<string>com.apple.ui-services</string>
-			<key>NSExtensionPointVersion</key>
-			<string>1.0</string>
-		</dict>
-		<key>NSExtensionMainStoryboard</key>
-		<string>MainInterface</string>
-		<key>NSExtensionPointIdentifier</key>
-		<string>com.apple.ui-services</string>
-	</dict>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/extension/ActionExtension/MainInterface.storyboard b/tools/gyp/test/ios/extension/ActionExtension/MainInterface.storyboard
deleted file mode 100644
index 5aa5818..0000000
--- a/tools/gyp/test/ios/extension/ActionExtension/MainInterface.storyboard
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6148" systemVersion="14A229a" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" initialViewController="ObA-dk-sSI">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6147"/>
-    </dependencies>
-    <scenes>
-        <!--Action View Controller - Image-->
-        <scene sceneID="7MM-of-jgj">
-            <objects>
-                <viewController title="Image" id="ObA-dk-sSI" customClass="ActionViewController" customModuleProvider="" sceneMemberID="viewController">
-                    <layoutGuides>
-                        <viewControllerLayoutGuide type="top" id="qkL-Od-lgU"/>
-                        <viewControllerLayoutGuide type="bottom" id="n38-gi-rB5"/>
-                    </layoutGuides>
-                    <view key="view" contentMode="scaleToFill" id="zMn-AG-sqS">
-                        <rect key="frame" x="0.0" y="0.0" width="320" height="528"/>
-                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-                        <subviews>
-                            <imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="9ga-4F-77Z">
-                                <rect key="frame" x="0.0" y="64" width="320" height="464"/>
-                            </imageView>
-                            <navigationBar contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="NOA-Dm-cuz">
-                                <rect key="frame" x="0.0" y="20" width="320" height="44"/>
-                                <items>
-                                    <navigationItem id="3HJ-uW-3hn">
-                                        <barButtonItem key="leftBarButtonItem" title="Done" style="done" id="WYi-yp-eM6">
-                                            <connections>
-                                                <action selector="done" destination="ObA-dk-sSI" id="Qdu-qn-U6V"/>
-                                            </connections>
-                                        </barButtonItem>
-                                    </navigationItem>
-                                </items>
-                            </navigationBar>
-                        </subviews>
-                        <color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
-                        <constraints>
-                            <constraint firstAttribute="trailing" secondItem="NOA-Dm-cuz" secondAttribute="trailing" id="A05-Pj-hrr"/>
-                            <constraint firstItem="9ga-4F-77Z" firstAttribute="top" secondItem="NOA-Dm-cuz" secondAttribute="bottom" id="Fps-3D-QQW"/>
-                            <constraint firstItem="NOA-Dm-cuz" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="HxO-8t-aoh"/>
-                            <constraint firstAttribute="trailing" secondItem="9ga-4F-77Z" secondAttribute="trailing" id="Ozw-Hg-0yh"/>
-                            <constraint firstItem="9ga-4F-77Z" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="XH5-ld-ONA"/>
-                            <constraint firstItem="n38-gi-rB5" firstAttribute="top" secondItem="9ga-4F-77Z" secondAttribute="bottom" id="eQg-nn-Zy4"/>
-                            <constraint firstItem="NOA-Dm-cuz" firstAttribute="top" secondItem="qkL-Od-lgU" secondAttribute="bottom" id="we0-1t-bgp"/>
-                        </constraints>
-                    </view>
-                    <freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
-                    <size key="freeformSize" width="320" height="528"/>
-                    <connections>
-                        <outlet property="imageView" destination="9ga-4F-77Z" id="5y6-5w-9QO"/>
-                        <outlet property="view" destination="zMn-AG-sqS" id="Qma-de-2ek"/>
-                    </connections>
-                </viewController>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="X47-rx-isc" userLabel="First Responder" sceneMemberID="firstResponder"/>
-            </objects>
-            <point key="canvasLocation" x="252" y="-124"/>
-        </scene>
-    </scenes>
-    <simulatedMetricsContainer key="defaultSimulatedMetrics">
-        <simulatedStatusBarMetrics key="statusBar"/>
-        <simulatedOrientationMetrics key="orientation"/>
-        <simulatedScreenMetrics key="destination" type="retina4"/>
-    </simulatedMetricsContainer>
-</document>
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/AppDelegate.h b/tools/gyp/test/ios/extension/ExtensionContainer/AppDelegate.h
deleted file mode 100644
index 510e230..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/AppDelegate.h
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface AppDelegate : UIResponder <UIApplicationDelegate>
-
-@property (strong, nonatomic) UIWindow *window;
-
-@end
-
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/AppDelegate.m b/tools/gyp/test/ios/extension/ExtensionContainer/AppDelegate.m
deleted file mode 100644
index 1197bc1..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/AppDelegate.m
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "AppDelegate.h"
-
-@interface AppDelegate ()
-
-@end
-
-@implementation AppDelegate
-
-- (BOOL)application:(UIApplication*)application
-    didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
-  // Override point for customization after application launch.
-  return YES;
-}
-
-@end
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/Base.lproj/Main.storyboard b/tools/gyp/test/ios/extension/ExtensionContainer/Base.lproj/Main.storyboard
deleted file mode 100644
index e8f3cfb..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/Base.lproj/Main.storyboard
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6162" systemVersion="14A238h" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6160"/>
-    </dependencies>
-    <scenes>
-        <!--View Controller-->
-        <scene sceneID="tne-QT-ifu">
-            <objects>
-                <viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
-                    <layoutGuides>
-                        <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
-                        <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
-                    </layoutGuides>
-                    <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
-                        <rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
-                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
-                    </view>
-                </viewController>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
-            </objects>
-        </scene>
-    </scenes>
-</document>
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/AppIcon.appiconset/Contents.json b/tools/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/AppIcon.appiconset/Contents.json
deleted file mode 100644
index f697f61..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/AppIcon.appiconset/Contents.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "iphone",
-      "size" : "29x29",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "40x40",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "60x60",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "29x29",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "29x29",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "40x40",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "40x40",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "76x76",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "76x76",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/LaunchImage.launchimage/Contents.json b/tools/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
deleted file mode 100644
index 4458b40..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "images" : [
-    {
-      "orientation" : "portrait",
-      "idiom" : "iphone",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    },
-    {
-      "orientation" : "portrait",
-      "idiom" : "iphone",
-      "subtype" : "retina4",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    },
-    {
-      "orientation" : "portrait",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "1x"
-    },
-    {
-      "orientation" : "landscape",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "1x"
-    },
-    {
-      "orientation" : "portrait",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    },
-    {
-      "orientation" : "landscape",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/Info.plist b/tools/gyp/test/ios/extension/ExtensionContainer/Info.plist
deleted file mode 100644
index 31ccf4c..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/Info.plist
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-    <key>CFBundleDevelopmentRegion</key>
-    <string>en</string>
-    <key>CFBundleExecutable</key>
-    <string>ExtensionContainer</string>
-    <key>CFBundleIdentifier</key>
-    <string>com.google.gyptest.extension</string>
-    <key>CFBundleInfoDictionaryVersion</key>
-    <string>6.0</string>
-    <key>CFBundleName</key>
-    <string>${PRODUCT_NAME}</string>
-    <key>CFBundlePackageType</key>
-    <string>APPL</string>
-    <key>CFBundleShortVersionString</key>
-    <string>1.0</string>
-    <key>CFBundleSignature</key>
-    <string>????</string>
-    <key>CFBundleVersion</key>
-    <string>1</string>
-    <key>LSRequiresIPhoneOS</key>
-    <true/>
-    <key>UIMainStoryboardFile</key>
-    <string>Main</string>
-    <key>UIRequiredDeviceCapabilities</key>
-    <array>
-        <string>armv7</string>
-    </array>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/ViewController.h b/tools/gyp/test/ios/extension/ExtensionContainer/ViewController.h
deleted file mode 100644
index fad7754..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/ViewController.h
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface ViewController : UIViewController
-
-
-@end
-
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/ViewController.m b/tools/gyp/test/ios/extension/ExtensionContainer/ViewController.m
deleted file mode 100644
index 3810fa9..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/ViewController.m
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "ViewController.h"
-
-@interface ViewController ()
-
-
-@end
-
-@implementation ViewController
-
-- (void)viewDidLoad {
-  [super viewDidLoad];
-  // Do any additional setup after loading the view, typically from a nib.
-}
-
-- (void)didReceiveMemoryWarning {
-  [super didReceiveMemoryWarning];
-  // Dispose of any resources that can be recreated.
-}
-
-@end
diff --git a/tools/gyp/test/ios/extension/ExtensionContainer/main.m b/tools/gyp/test/ios/extension/ExtensionContainer/main.m
deleted file mode 100644
index 47aecb5..0000000
--- a/tools/gyp/test/ios/extension/ExtensionContainer/main.m
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-//
-#import <UIKit/UIKit.h>
-#import "AppDelegate.h"
-
-int main(int argc, char* argv[]) {
-  @autoreleasepool {
-    return UIApplicationMain(argc, argv, nil,
-        NSStringFromClass([AppDelegate class]));
-  }
-}
diff --git a/tools/gyp/test/ios/extension/extension.gyp b/tools/gyp/test/ios/extension/extension.gyp
deleted file mode 100644
index 91c0684..0000000
--- a/tools/gyp/test/ios/extension/extension.gyp
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-    ['CXX', '/usr/bin/clang++'],
-  ],
-  'targets': [
-    {
-      'target_name': 'ExtensionContainer',
-      'product_name': 'ExtensionContainer',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'mac_bundle_resources': [
-        'ExtensionContainer/Base.lproj/Main.storyboard',
-      ],
-      'sources': [
-        'ExtensionContainer/AppDelegate.h',
-        'ExtensionContainer/AppDelegate.m',
-        'ExtensionContainer/ViewController.h',
-        'ExtensionContainer/ViewController.m',
-        'ExtensionContainer/main.m',
-      ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/ExtensionContainer.app/PlugIns',
-          'files': [
-            '<(PRODUCT_DIR)/ActionExtension.appex',
-      ]}],
-      'dependencies': [
-        'ActionExtension'
-      ],
-
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'INFOPLIST_FILE': 'ExtensionContainer/Info.plist',
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'ARCHS': [ 'armv7' ],
-        'SDKROOT': 'iphoneos',
-        'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
-        'CODE_SIGNING_REQUIRED': 'NO',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-      },
-    },
-    {
-      'target_name': 'ActionExtension',
-      'product_name': 'ActionExtension',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'ios_app_extension': 1,
-      'sources': [
-        'ActionExtension/ActionViewController.h',
-        'ActionExtension/ActionViewController.m',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-          '$(SDKROOT)/System/Library/Frameworks/MobileCoreServices.framework',
-        ],
-      },
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'INFOPLIST_FILE': 'ActionExtension/Info.plist',
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'ARCHS': [ 'armv7' ],
-        'SDKROOT': 'iphoneos',
-        'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
-        'CODE_SIGNING_REQUIRED': 'NO',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/ios/framework/framework.gyp b/tools/gyp/test/ios/framework/framework.gyp
deleted file mode 100644
index 2c6fdd5..0000000
--- a/tools/gyp/test/ios/framework/framework.gyp
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'iOSFramework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [
-        'iOSFramework/iOSFramework.h',
-        'iOSFramework/Thing.h',
-        'iOSFramework/Thing.m',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'mac_framework_headers': [
-        # Using two headers here tests mac_tool.py NextGreaterPowerOf2.
-        'iOSFramework/iOSFramework.h',
-        'iOSFramework/Thing.h',
-      ],
-      'mac_framework_dirs': [
-        '$(SDKROOT)/../../Library/Frameworks',
-      ],
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'INFOPLIST_FILE': 'iOSFramework/Info.plist',
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'SDKROOT': 'iphoneos',
-        'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-        'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/ios/framework/iOSFramework/Info.plist b/tools/gyp/test/ios/framework/iOSFramework/Info.plist
deleted file mode 100644
index d3de8ee..0000000
--- a/tools/gyp/test/ios/framework/iOSFramework/Info.plist
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>en</string>
-	<key>CFBundleExecutable</key>
-	<string>$(EXECUTABLE_NAME)</string>
-	<key>CFBundleIdentifier</key>
-	<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>$(PRODUCT_NAME)</string>
-	<key>CFBundlePackageType</key>
-	<string>FMWK</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>$(CURRENT_PROJECT_VERSION)</string>
-	<key>NSPrincipalClass</key>
-	<string></string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/framework/iOSFramework/Thing.h b/tools/gyp/test/ios/framework/iOSFramework/Thing.h
deleted file mode 100644
index a34e908..0000000
--- a/tools/gyp/test/ios/framework/iOSFramework/Thing.h
+++ /dev/null
@@ -1,10 +0,0 @@
-#import <Foundation/Foundation.h>
-#import <UIKit/UIKit.h>
-
-@interface Thing : NSObject
-
-+ (instancetype)thing;
-
-- (void)sayHello;
-
-@end
diff --git a/tools/gyp/test/ios/framework/iOSFramework/Thing.m b/tools/gyp/test/ios/framework/iOSFramework/Thing.m
deleted file mode 100644
index 5b2b549..0000000
--- a/tools/gyp/test/ios/framework/iOSFramework/Thing.m
+++ /dev/null
@@ -1,22 +0,0 @@
-#import "Thing.h"
-
-@interface Thing ()
-
-@end
-
-@implementation Thing
-
-+ (instancetype)thing {
-  static Thing* thing = nil;
-  static dispatch_once_t onceToken;
-  dispatch_once(&onceToken, ^{
-      thing = [[[self class] alloc] init];
-  });
-  return thing;
-}
-
-- (void)sayHello {
-  NSLog(@"Hello World");
-}
-
-@end
diff --git a/tools/gyp/test/ios/framework/iOSFramework/iOSFramework.h b/tools/gyp/test/ios/framework/iOSFramework/iOSFramework.h
deleted file mode 100644
index e86b524..0000000
--- a/tools/gyp/test/ios/framework/iOSFramework/iOSFramework.h
+++ /dev/null
@@ -1,9 +0,0 @@
-#import <UIKit/UIKit.h>
-
-//! Project version number for iOSFramework.
-FOUNDATION_EXPORT double iOSFrameworkVersionNumber;
-
-//! Project version string for iOSFramework.
-FOUNDATION_EXPORT const unsigned char iOSFrameworkVersionString[];
-
-#import <iOSFramework/Thing.h>
diff --git a/tools/gyp/test/ios/gyptest-app-ios-assets-catalog.py b/tools/gyp/test/ios/gyptest-app-ios-assets-catalog.py
deleted file mode 100755
index efd96ac..0000000
--- a/tools/gyp/test/ios/gyptest-app-ios-assets-catalog.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ios app bundles are built correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import os.path
-import sys
-
-# Xcode supports for assets catalog was introduced in Xcode 6.0
-if sys.platform == 'darwin' and TestMac.Xcode.Version() >= '0600':
-  test_gyp_path = 'test-assets-catalog.gyp'
-  test_app_path = 'Test App Assets Catalog Gyp.app'
-
-  test = TestGyp.TestGyp(formats=['xcode', 'ninja'])
-  test.run_gyp(test_gyp_path, chdir='app-bundle')
-  test.build(test_gyp_path, test.ALL, chdir='app-bundle')
-
-  # Test that the extension is .bundle
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'Test App Assets Catalog Gyp'),
-      chdir='app-bundle')
-
-  # Info.plist
-  info_plist = test.built_file_path(
-      os.path.join(test_app_path, 'Info.plist'),
-      chdir='app-bundle')
-  # Resources
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'English.lproj/InfoPlist.strings'),
-      chdir='app-bundle')
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'English.lproj/MainMenu.nib'),
-      chdir='app-bundle')
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'English.lproj/Main_iPhone.storyboardc'),
-      chdir='app-bundle')
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'Assets.car'),
-      chdir='app-bundle')
-
-  # Packaging
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'PkgInfo'),
-      chdir='app-bundle')
-  test.built_file_must_match(
-      os.path.join(test_app_path, 'PkgInfo'), 'APPLause',
-      chdir='app-bundle')
-
-  test.pass_test()
diff --git a/tools/gyp/test/ios/gyptest-app-ios.py b/tools/gyp/test/ios/gyptest-app-ios.py
deleted file mode 100755
index f905254..0000000
--- a/tools/gyp/test/ios/gyptest-app-ios.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ios app bundles are built correctly.
-"""
-
-import TestGyp
-
-import subprocess
-import sys
-
-def CheckFileXMLPropertyList(file):
-  output = subprocess.check_output(['file', file])
-  # The double space after XML is intentional.
-  if not 'XML  document text' in output:
-    print 'File: Expected XML  document text, got %s' % output
-    test.fail_test()
-
-def CheckFileBinaryPropertyList(file):
-  output = subprocess.check_output(['file', file])
-  if not 'Apple binary property list' in output:
-    print 'File: Expected Apple binary property list, got %s' % output
-    test.fail_test()
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode', 'ninja'])
-
-  test.run_gyp('test.gyp', chdir='app-bundle')
-
-  test.build('test.gyp', test.ALL, chdir='app-bundle')
-
-  # Test that the extension is .bundle
-  test.built_file_must_exist('Test App Gyp.app/Test App Gyp',
-                             chdir='app-bundle')
-
-  # Info.plist
-  info_plist = test.built_file_path('Test App Gyp.app/Info.plist',
-                                    chdir='app-bundle')
-  test.built_file_must_exist(info_plist)
-  CheckFileBinaryPropertyList(info_plist)
-
-  # XML Info.plist
-  info_plist = test.built_file_path('Test App Gyp XML.app/Info.plist',
-                                    chdir='app-bundle')
-  CheckFileXMLPropertyList(info_plist)
-
-  # Resources
-  strings_file = test.built_file_path(
-      'Test App Gyp.app/English.lproj/InfoPlist.strings',
-      chdir='app-bundle')
-  test.built_file_must_exist(strings_file)
-  CheckFileBinaryPropertyList(strings_file)
-
-  extra_plist_file = test.built_file_path(
-      'Test App Gyp.app/English.lproj/LanguageMap.plist',
-      chdir='app-bundle')
-  test.built_file_must_exist(extra_plist_file)
-  CheckFileBinaryPropertyList(extra_plist_file)
-
-  test.built_file_must_exist(
-      'Test App Gyp.app/English.lproj/MainMenu.nib',
-      chdir='app-bundle')
-  test.built_file_must_exist(
-      'Test App Gyp.app/English.lproj/Main_iPhone.storyboardc',
-      chdir='app-bundle')
-
-  # Packaging
-  test.built_file_must_exist('Test App Gyp.app/PkgInfo',
-                             chdir='app-bundle')
-  test.built_file_must_match('Test App Gyp.app/PkgInfo', 'APPLause',
-                             chdir='app-bundle')
-
-  test.pass_test()
diff --git a/tools/gyp/test/ios/gyptest-archs.py b/tools/gyp/test/ios/gyptest-archs.py
deleted file mode 100644
index c653d99..0000000
--- a/tools/gyp/test/ios/gyptest-archs.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that device and simulator bundles are built correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import collections
-import sys
-
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
-
-  test_cases = [
-    ('Default', 'TestArch32Bits', ['i386']),
-    ('Default-iphoneos', 'TestArch32Bits', ['armv7']),
-  ]
-
-  if TestMac.Xcode.Version() < '0510':
-    test_cases.extend([
-        ('Default', 'TestNoArchs', ['i386']),
-        ('Default-iphoneos', 'TestNoArchs', ['armv7'])])
-
-  if TestMac.Xcode.Version() >= '0500':
-    test_cases.extend([
-        ('Default', 'TestArch64Bits', ['x86_64']),
-        ('Default', 'TestMultiArchs', ['i386', 'x86_64']),
-        ('Default-iphoneos', 'TestArch64Bits', ['arm64']),
-        ('Default-iphoneos', 'TestMultiArchs', ['armv7', 'arm64'])])
-
-  test.run_gyp('test-archs.gyp', chdir='app-bundle')
-  for configuration, target, archs in test_cases:
-    is_device_build = configuration.endswith('-iphoneos')
-
-    kwds = collections.defaultdict(list)
-    if test.format == 'xcode':
-      if is_device_build:
-        configuration, sdk = configuration.split('-')
-        kwds['arguments'].extend(['-sdk', sdk])
-      if TestMac.Xcode.Version() < '0500':
-        kwds['arguments'].extend(['-arch', archs[0]])
-
-    test.set_configuration(configuration)
-    filename = '%s.app/%s' % (target, target)
-    test.build('test-archs.gyp', target, chdir='app-bundle', **kwds)
-    result_file = test.built_file_path(filename, chdir='app-bundle')
-
-    test.must_exist(result_file)
-    TestMac.CheckFileType(test, result_file, archs)
-
-  test.pass_test()
diff --git a/tools/gyp/test/ios/gyptest-crosscompile.py b/tools/gyp/test/ios/gyptest-crosscompile.py
deleted file mode 100644
index a081683..0000000
--- a/tools/gyp/test/ios/gyptest-crosscompile.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that tools are built correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import sys
-import os
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
-
-  oldenv = os.environ.copy()
-  try:
-    os.environ['GYP_CROSSCOMPILE'] = '1'
-    test.run_gyp('test-crosscompile.gyp', chdir='app-bundle')
-  finally:
-    os.environ.clear()
-    os.environ.update(oldenv)
-
-  test.set_configuration('Default')
-  test.build('test-crosscompile.gyp', 'TestHost', chdir='app-bundle')
-  result_file = test.built_file_path('TestHost', chdir='app-bundle')
-  test.must_exist(result_file)
-  TestMac.CheckFileType(test, result_file, ['x86_64'])
-
-  test.pass_test()
diff --git a/tools/gyp/test/ios/gyptest-deployment-target.py b/tools/gyp/test/ios/gyptest-deployment-target.py
deleted file mode 100644
index 6c09d9d..0000000
--- a/tools/gyp/test/ios/gyptest-deployment-target.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that IPHONEOS_DEPLOYMENT_TARGET works.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
-
-  test.run_gyp('deployment-target.gyp', chdir='deployment-target')
-
-  test.build('deployment-target.gyp', test.ALL, chdir='deployment-target')
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/ios/gyptest-extension.py b/tools/gyp/test/ios/gyptest-extension.py
deleted file mode 100755
index c5e76d9..0000000
--- a/tools/gyp/test/ios/gyptest-extension.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ios app extensions are built correctly.
-"""
-
-import TestGyp
-import TestMac
-import subprocess
-import sys
-
-def CheckStrip(p, expected):
-  if expected not in subprocess.check_output(['nm','-gU', p]):
-    print expected + " shouldn't get stripped out."
-    test.fail_test()
-
-def CheckEntrypoint(p, expected):
-  if expected not in subprocess.check_output(['nm', p]):
-    print expected + "not found."
-    test.fail_test()
-
-if sys.platform == 'darwin' and TestMac.Xcode.Version()>="0600":
-
-  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
-
-  test.run_gyp('extension.gyp', chdir='extension')
-
-  test.build('extension.gyp', 'ExtensionContainer', chdir='extension')
-
-  # Test that the extension is .appex
-  test.built_file_must_exist(
-      'ExtensionContainer.app/PlugIns/ActionExtension.appex',
-      chdir='extension')
-
-  path = test.built_file_path(
-      'ExtensionContainer.app/PlugIns/ActionExtension.appex/ActionExtension',
-      chdir='extension')
-  CheckStrip(path, "ActionViewController")
-  CheckEntrypoint(path, "_NSExtensionMain")
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/ios/gyptest-framework.py b/tools/gyp/test/ios/gyptest-framework.py
deleted file mode 100755
index a6dd857..0000000
--- a/tools/gyp/test/ios/gyptest-framework.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2016 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ios app frameworks are built correctly.
-"""
-
-import TestGyp
-import TestMac
-import subprocess
-import sys
-
-if sys.platform == 'darwin' and TestMac.Xcode.Version()>="0700":
-
-  test = TestGyp.TestGyp(formats=['ninja'])
-  if test.format == 'xcode-ninja':
-    test.skip_test()
-
-  test.run_gyp('framework.gyp', chdir='framework')
-
-  test.build('framework.gyp', 'iOSFramework', chdir='framework')
-
-  test.built_file_must_exist(
-      'iOSFramework.framework/Headers/iOSFramework.h',
-      chdir='framework')
-  test.built_file_must_exist(
-      'iOSFramework.framework/Headers/Thing.h',
-      chdir='framework')
-  test.built_file_must_exist(
-      'iOSFramework.framework/iOSFramework',
-      chdir='framework')
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/ios/gyptest-per-config-settings.py b/tools/gyp/test/ios/gyptest-per-config-settings.py
deleted file mode 100644
index 7313e56..0000000
--- a/tools/gyp/test/ios/gyptest-per-config-settings.py
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that device and simulator bundles are built correctly.
-"""
-
-import plistlib
-import TestGyp
-import os
-import struct
-import subprocess
-import sys
-import tempfile
-import TestMac
-
-def CheckFileType(file, expected):
-  proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE)
-  o = proc.communicate()[0].strip()
-  assert not proc.returncode
-  if not expected in o:
-    print 'File: Expected %s, got %s' % (expected, o)
-    test.fail_test()
-
-def HasCerts():
-  # Because the bots do not have certs, don't check them if there are no
-  # certs available.
-  proc = subprocess.Popen(['security','find-identity','-p', 'codesigning',
-                           '-v'], stdout=subprocess.PIPE)
-  return "0 valid identities found" not in proc.communicate()[0].strip()
-
-def CheckSignature(file):
-  proc = subprocess.Popen(['codesign', '-v', file], stdout=subprocess.PIPE)
-  o = proc.communicate()[0].strip()
-  assert not proc.returncode
-  if "code object is not signed at all" in o:
-    print 'File %s not properly signed.' % (file)
-    test.fail_test()
-
-def CheckEntitlements(file, expected_entitlements):
-  with tempfile.NamedTemporaryFile() as temp:
-    proc = subprocess.Popen(['codesign', '--display', '--entitlements',
-                             temp.name, file], stdout=subprocess.PIPE)
-    o = proc.communicate()[0].strip()
-    assert not proc.returncode
-    data = temp.read()
-  entitlements = ParseEntitlements(data)
-  if not entitlements:
-    print 'No valid entitlements found in %s.' % (file)
-    test.fail_test()
-  if entitlements != expected_entitlements:
-    print 'Unexpected entitlements found in %s.' % (file)
-    test.fail_test()
-
-def ParseEntitlements(data):
-  if len(data) < 8:
-    return None
-  magic, length = struct.unpack('>II', data[:8])
-  if magic != 0xfade7171 or length != len(data):
-    return None
-  return data[8:]
-
-def GetXcodeVersionValue(type):
-  args = ['xcodebuild', '-version', '-sdk', 'iphoneos', type]
-  job = subprocess.Popen(args, stdout=subprocess.PIPE)
-  return job.communicate()[0].strip()
-
-def GetMachineBuild():
-  args = ['sw_vers', '-buildVersion']
-  job = subprocess.Popen(args, stdout=subprocess.PIPE)
-  return job.communicate()[0].strip()
-
-def CheckPlistvalue(plist, key, expected):
-  if key not in plist:
-    print '%s not set in plist' % key
-    test.fail_test()
-    return
-  actual = plist[key]
-  if actual != expected:
-    print 'File: Expected %s, got %s for %s' % (expected, actual, key)
-    test.fail_test()
-
-def CheckPlistNotSet(plist, key):
-  if key in plist:
-    print '%s should not be set in plist' % key
-    test.fail_test()
-    return
-
-def ConvertBinaryPlistToXML(path):
-  proc = subprocess.call(['plutil', '-convert', 'xml1', path],
-                         stdout=subprocess.PIPE)
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
-
-  test.run_gyp('test-device.gyp', chdir='app-bundle')
-
-  test_configs = ['Default-iphoneos', 'Default']
-  for configuration in test_configs:
-    test.set_configuration(configuration)
-    test.build('test-device.gyp', 'test_app', chdir='app-bundle')
-    result_file = test.built_file_path('Test App Gyp.app/Test App Gyp',
-                                       chdir='app-bundle')
-    test.must_exist(result_file)
-    info_plist = test.built_file_path('Test App Gyp.app/Info.plist',
-                                      chdir='app-bundle')
-    plist = plistlib.readPlist(info_plist)
-    xcode_version = TestMac.Xcode.Version()
-    if xcode_version >= '0720':
-      if len(plist) != 23:
-        print 'plist should have 23 entries, but it has %s' % len(plist)
-        test.fail_test()
-
-    # Values that will hopefully never change.
-    CheckPlistvalue(plist, 'CFBundleDevelopmentRegion', 'English')
-    CheckPlistvalue(plist, 'CFBundleExecutable', 'Test App Gyp')
-    CheckPlistvalue(plist, 'CFBundleIdentifier', 'com.google.Test App Gyp')
-    CheckPlistvalue(plist, 'CFBundleInfoDictionaryVersion', '6.0')
-    CheckPlistvalue(plist, 'CFBundleName', 'Test App Gyp')
-    CheckPlistvalue(plist, 'CFBundlePackageType', 'APPL')
-    CheckPlistvalue(plist, 'CFBundleShortVersionString', '1.0')
-    CheckPlistvalue(plist, 'CFBundleSignature', 'ause')
-    CheckPlistvalue(plist, 'CFBundleVersion', '1')
-    CheckPlistvalue(plist, 'NSMainNibFile', 'MainMenu')
-    CheckPlistvalue(plist, 'NSPrincipalClass', 'NSApplication')
-    CheckPlistvalue(plist, 'UIDeviceFamily', [1, 2])
-
-    # Values that get pulled from xcodebuild.
-    machine_build = GetMachineBuild()
-    platform_version = GetXcodeVersionValue('ProductVersion')
-    sdk_build = GetXcodeVersionValue('ProductBuildVersion')
-    xcode_build = TestMac.Xcode.Build()
-
-    # Xcode keeps changing what gets included in executable plists, and it
-    # changes between device and simuator builds.  Allow the strictest tests for
-    # Xcode 7.2 and above.
-    if xcode_version >= '0720':
-      CheckPlistvalue(plist, 'BuildMachineOSBuild', machine_build)
-      CheckPlistvalue(plist, 'DTCompiler', 'com.apple.compilers.llvm.clang.1_0')
-      CheckPlistvalue(plist, 'DTPlatformVersion', platform_version)
-      CheckPlistvalue(plist, 'DTSDKBuild', sdk_build)
-      CheckPlistvalue(plist, 'DTXcode', xcode_version)
-      CheckPlistvalue(plist, 'DTXcodeBuild', xcode_build)
-      CheckPlistvalue(plist, 'MinimumOSVersion', '8.0')
-
-
-    if configuration == 'Default-iphoneos':
-      platform_name = 'iphoneos'
-      CheckFileType(result_file, 'armv7')
-      CheckPlistvalue(plist, 'CFBundleSupportedPlatforms', ['iPhoneOS'])
-      # Apple keeps changing their mind.
-      if xcode_version >= '0720':
-        CheckPlistvalue(plist, 'DTPlatformBuild', sdk_build)
-    else:
-      platform_name = 'iphonesimulator'
-      CheckFileType(result_file, 'i386')
-      CheckPlistvalue(plist, 'CFBundleSupportedPlatforms', ['iPhoneSimulator'])
-      if xcode_version >= '0720':
-        CheckPlistvalue(plist, 'DTPlatformBuild', '')
-
-    CheckPlistvalue(plist, 'DTPlatformName', platform_name)
-    CheckPlistvalue(plist, 'DTSDKName', platform_name + platform_version)
-
-
-    if HasCerts() and configuration == 'Default-iphoneos':
-      test.build('test-device.gyp', 'sig_test', chdir='app-bundle')
-      result_file = test.built_file_path('sigtest.app/sigtest',
-                                         chdir='app-bundle')
-      CheckSignature(result_file)
-      info_plist = test.built_file_path('sigtest.app/Info.plist',
-                                        chdir='app-bundle')
-
-      plist = plistlib.readPlist(info_plist)
-      CheckPlistvalue(plist, 'UIDeviceFamily', [1])
-
-      entitlements_file = test.built_file_path('sig_test.xcent',
-                                               chdir='app-bundle')
-      if os.path.isfile(entitlements_file):
-        expected_entitlements = open(entitlements_file).read()
-        CheckEntitlements(result_file, expected_entitlements)
-
-  test.pass_test()
diff --git a/tools/gyp/test/ios/gyptest-watch.py b/tools/gyp/test/ios/gyptest-watch.py
deleted file mode 100755
index f5c4601..0000000
--- a/tools/gyp/test/ios/gyptest-watch.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ios watch extensions and apps are built correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-if sys.platform == 'darwin' and TestMac.Xcode.Version() >= "0620":
-  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
-
-  test.run_gyp('watch.gyp', chdir='watch')
-
-  test.build(
-      'watch.gyp',
-      'WatchContainer',
-      chdir='watch')
-
-  # Test that the extension exists
-  test.built_file_must_exist(
-      'WatchContainer.app/PlugIns/WatchKitExtension.appex',
-      chdir='watch')
-
-  # Test that the watch app exists
-  test.built_file_must_exist(
-      'WatchContainer.app/PlugIns/WatchKitExtension.appex/WatchApp.app',
-      chdir='watch')
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/ios/gyptest-xcode-ninja.py b/tools/gyp/test/ios/gyptest-xcode-ninja.py
deleted file mode 100644
index 609db8c..0000000
--- a/tools/gyp/test/ios/gyptest-xcode-ninja.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that the xcode-ninja GYP_GENERATOR runs and builds correctly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode'])
-
-  # Run ninja and xcode-ninja
-  test.formats = ['ninja', 'xcode-ninja']
-  test.run_gyp('test.gyp', chdir='app-bundle')
-
-  # If it builds the target, it works.
-  test.build('test.ninja.gyp', chdir='app-bundle')
-  test.pass_test()
diff --git a/tools/gyp/test/ios/watch/WatchApp/Images.xcassets/AppIcon.appiconset/Contents.json b/tools/gyp/test/ios/watch/WatchApp/Images.xcassets/AppIcon.appiconset/Contents.json
deleted file mode 100644
index 562c5ef..0000000
--- a/tools/gyp/test/ios/watch/WatchApp/Images.xcassets/AppIcon.appiconset/Contents.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
-  "images" : [
-    {
-      "size" : "14.5x14.5",
-      "idiom" : "watch",
-      "scale" : "2x",
-      "role" : "notificationCenter",
-      "subtype" : "38mm"
-    },
-    {
-      "size" : "18x18",
-      "idiom" : "watch",
-      "scale" : "2x",
-      "role" : "notificationCenter",
-      "subtype" : "42mm"
-    },
-    {
-      "size" : "29x29",
-      "idiom" : "watch",
-      "role" : "companionSettings",
-      "scale" : "2x"
-    },
-    {
-      "size" : "29.3x29.3",
-      "idiom" : "watch",
-      "role" : "companionSettings",
-      "scale" : "3x"
-    },
-    {
-      "size" : "40x40",
-      "idiom" : "watch",
-      "scale" : "2x",
-      "role" : "appLauncher",
-      "subtype" : "38mm"
-    },
-    {
-      "size" : "44x44",
-      "idiom" : "watch",
-      "scale" : "2x",
-      "role" : "appLauncher",
-      "subtype" : "42mm"
-    },
-    {
-      "size" : "86x86",
-      "idiom" : "watch",
-      "scale" : "2x",
-      "role" : "quickLook",
-      "subtype" : "38mm"
-    },
-    {
-      "size" : "98x98",
-      "idiom" : "watch",
-      "scale" : "2x",
-      "role" : "quickLook",
-      "subtype" : "42mm"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
diff --git a/tools/gyp/test/ios/watch/WatchApp/Images.xcassets/LaunchImage.launchimage/Contents.json b/tools/gyp/test/ios/watch/WatchApp/Images.xcassets/LaunchImage.launchimage/Contents.json
deleted file mode 100644
index ed123fe..0000000
--- a/tools/gyp/test/ios/watch/WatchApp/Images.xcassets/LaunchImage.launchimage/Contents.json
+++ /dev/null
@@ -1,24 +0,0 @@
-{
-  "images" : [
-    {
-      "orientation" : "portrait",
-      "idiom" : "watch",
-      "extent" : "full-screen",
-      "minimum-system-version" : "8.0",
-      "subtype" : "38mm",
-      "scale" : "2x"
-    },
-    {
-      "orientation" : "portrait",
-      "idiom" : "watch",
-      "extent" : "full-screen",
-      "minimum-system-version" : "8.0",
-      "subtype" : "42mm",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
diff --git a/tools/gyp/test/ios/watch/WatchApp/Info.plist b/tools/gyp/test/ios/watch/WatchApp/Info.plist
deleted file mode 100644
index 3cf65b8..0000000
--- a/tools/gyp/test/ios/watch/WatchApp/Info.plist
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>CFBundleDevelopmentRegion</key>
-  <string>en</string>
-  <key>CFBundleDisplayName</key>
-  <string>WatchApp</string>
-  <key>CFBundleExecutable</key>
-  <string>$(EXECUTABLE_NAME)</string>
-  <key>CFBundleIdentifier</key>
-  <string>com.google.gyptest.watch.watchapp</string>
-  <key>CFBundleInfoDictionaryVersion</key>
-  <string>6.0</string>
-  <key>CFBundleName</key>
-  <string>$(PRODUCT_NAME)</string>
-  <key>CFBundlePackageType</key>
-  <string>APPL</string>
-  <key>CFBundleShortVersionString</key>
-  <string>1.0</string>
-  <key>CFBundleSignature</key>
-  <string>????</string>
-  <key>CFBundleVersion</key>
-  <string>1</string>
-  <key>UISupportedInterfaceOrientations</key>
-  <array>
-    <string>UIInterfaceOrientationPortrait</string>
-    <string>UIInterfaceOrientationPortraitUpsideDown</string>
-  </array>
-  <key>WKCompanionAppBundleIdentifier</key>
-  <string>com.google.gyptest.watch</string>
-  <key>WKWatchKitApp</key>
-  <true/>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/watch/WatchApp/Interface.storyboard b/tools/gyp/test/ios/watch/WatchApp/Interface.storyboard
deleted file mode 100644
index 5f52cb6..0000000
--- a/tools/gyp/test/ios/watch/WatchApp/Interface.storyboard
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder.WatchKit.Storyboard" version="3.0" toolsVersion="6221" systemVersion="13E28" targetRuntime="watchKit" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="AgC-eL-Hgc">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6213"/>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBWatchKitPlugin" version="3733"/>
-    </dependencies>
-    <scenes>
-        <!--Interface Controller-->
-        <scene sceneID="aou-V4-d1y">
-            <objects>
-                <controller id="AgC-eL-Hgc" customClass="InterfaceController" customModuleProvider=""/>
-            </objects>
-        </scene>
-    </scenes>
-</document>
diff --git a/tools/gyp/test/ios/watch/WatchContainer/AppDelegate.h b/tools/gyp/test/ios/watch/WatchContainer/AppDelegate.h
deleted file mode 100644
index 510e230..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/AppDelegate.h
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface AppDelegate : UIResponder <UIApplicationDelegate>
-
-@property (strong, nonatomic) UIWindow *window;
-
-@end
-
diff --git a/tools/gyp/test/ios/watch/WatchContainer/AppDelegate.m b/tools/gyp/test/ios/watch/WatchContainer/AppDelegate.m
deleted file mode 100644
index 1197bc1..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/AppDelegate.m
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "AppDelegate.h"
-
-@interface AppDelegate ()
-
-@end
-
-@implementation AppDelegate
-
-- (BOOL)application:(UIApplication*)application
-    didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
-  // Override point for customization after application launch.
-  return YES;
-}
-
-@end
diff --git a/tools/gyp/test/ios/watch/WatchContainer/Base.lproj/Main.storyboard b/tools/gyp/test/ios/watch/WatchContainer/Base.lproj/Main.storyboard
deleted file mode 100644
index e8f3cfb..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/Base.lproj/Main.storyboard
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6162" systemVersion="14A238h" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6160"/>
-    </dependencies>
-    <scenes>
-        <!--View Controller-->
-        <scene sceneID="tne-QT-ifu">
-            <objects>
-                <viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
-                    <layoutGuides>
-                        <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
-                        <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
-                    </layoutGuides>
-                    <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
-                        <rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
-                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
-                    </view>
-                </viewController>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
-            </objects>
-        </scene>
-    </scenes>
-</document>
diff --git a/tools/gyp/test/ios/watch/WatchContainer/Images.xcassets/AppIcon.appiconset/Contents.json b/tools/gyp/test/ios/watch/WatchContainer/Images.xcassets/AppIcon.appiconset/Contents.json
deleted file mode 100644
index f697f61..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/Images.xcassets/AppIcon.appiconset/Contents.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "iphone",
-      "size" : "29x29",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "40x40",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "60x60",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "29x29",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "29x29",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "40x40",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "40x40",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "76x76",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "76x76",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
diff --git a/tools/gyp/test/ios/watch/WatchContainer/Images.xcassets/LaunchImage.launchimage/Contents.json b/tools/gyp/test/ios/watch/WatchContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
deleted file mode 100644
index 4458b40..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/Images.xcassets/LaunchImage.launchimage/Contents.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "images" : [
-    {
-      "orientation" : "portrait",
-      "idiom" : "iphone",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    },
-    {
-      "orientation" : "portrait",
-      "idiom" : "iphone",
-      "subtype" : "retina4",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    },
-    {
-      "orientation" : "portrait",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "1x"
-    },
-    {
-      "orientation" : "landscape",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "1x"
-    },
-    {
-      "orientation" : "portrait",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    },
-    {
-      "orientation" : "landscape",
-      "idiom" : "ipad",
-      "extent" : "full-screen",
-      "minimum-system-version" : "7.0",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
diff --git a/tools/gyp/test/ios/watch/WatchContainer/Info.plist b/tools/gyp/test/ios/watch/WatchContainer/Info.plist
deleted file mode 100644
index a40319c..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/Info.plist
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-    <key>CFBundleDevelopmentRegion</key>
-    <string>en</string>
-    <key>CFBundleExecutable</key>
-    <string>WatchContainer</string>
-    <key>CFBundleIdentifier</key>
-    <string>com.google.gyptest.watch</string>
-    <key>CFBundleInfoDictionaryVersion</key>
-    <string>6.0</string>
-    <key>CFBundleName</key>
-    <string>${PRODUCT_NAME}</string>
-    <key>CFBundlePackageType</key>
-    <string>APPL</string>
-    <key>CFBundleShortVersionString</key>
-    <string>1.0</string>
-    <key>CFBundleSignature</key>
-    <string>????</string>
-    <key>CFBundleVersion</key>
-    <string>1</string>
-    <key>LSRequiresIPhoneOS</key>
-    <true/>
-    <key>UIMainStoryboardFile</key>
-    <string>Main</string>
-    <key>UIRequiredDeviceCapabilities</key>
-    <array>
-        <string>armv7</string>
-    </array>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/watch/WatchContainer/ViewController.h b/tools/gyp/test/ios/watch/WatchContainer/ViewController.h
deleted file mode 100644
index fad7754..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/ViewController.h
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface ViewController : UIViewController
-
-
-@end
-
diff --git a/tools/gyp/test/ios/watch/WatchContainer/ViewController.m b/tools/gyp/test/ios/watch/WatchContainer/ViewController.m
deleted file mode 100644
index 3810fa9..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/ViewController.m
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "ViewController.h"
-
-@interface ViewController ()
-
-
-@end
-
-@implementation ViewController
-
-- (void)viewDidLoad {
-  [super viewDidLoad];
-  // Do any additional setup after loading the view, typically from a nib.
-}
-
-- (void)didReceiveMemoryWarning {
-  [super didReceiveMemoryWarning];
-  // Dispose of any resources that can be recreated.
-}
-
-@end
diff --git a/tools/gyp/test/ios/watch/WatchContainer/main.m b/tools/gyp/test/ios/watch/WatchContainer/main.m
deleted file mode 100644
index 47aecb5..0000000
--- a/tools/gyp/test/ios/watch/WatchContainer/main.m
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-//
-#import <UIKit/UIKit.h>
-#import "AppDelegate.h"
-
-int main(int argc, char* argv[]) {
-  @autoreleasepool {
-    return UIApplicationMain(argc, argv, nil,
-        NSStringFromClass([AppDelegate class]));
-  }
-}
diff --git a/tools/gyp/test/ios/watch/WatchKitExtension/Images.xcassets/MyImage.imageset/Contents.json b/tools/gyp/test/ios/watch/WatchKitExtension/Images.xcassets/MyImage.imageset/Contents.json
deleted file mode 100644
index f80d950..0000000
--- a/tools/gyp/test/ios/watch/WatchKitExtension/Images.xcassets/MyImage.imageset/Contents.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "universal",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "universal",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "universal",
-      "scale" : "3x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
diff --git a/tools/gyp/test/ios/watch/WatchKitExtension/Info.plist b/tools/gyp/test/ios/watch/WatchKitExtension/Info.plist
deleted file mode 100644
index 7a35464..0000000
--- a/tools/gyp/test/ios/watch/WatchKitExtension/Info.plist
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>CFBundleDevelopmentRegion</key>
-  <string>en</string>
-  <key>CFBundleDisplayName</key>
-  <string>WatchContainer WatchKit Extension</string>
-  <key>CFBundleExecutable</key>
-  <string>$(EXECUTABLE_NAME)</string>
-  <key>CFBundleIdentifier</key>
-  <string>com.google.gyptest.watch.watchkitextension</string>
-  <key>CFBundleInfoDictionaryVersion</key>
-  <string>6.0</string>
-  <key>CFBundleName</key>
-  <string>$(PRODUCT_NAME)</string>
-  <key>CFBundlePackageType</key>
-  <string>XPC!</string>
-  <key>CFBundleShortVersionString</key>
-  <string>1.0</string>
-  <key>CFBundleSignature</key>
-  <string>????</string>
-  <key>CFBundleVersion</key>
-  <string>1.0</string>
-  <key>NSExtension</key>
-  <dict>
-    <key>NSExtensionAttributes</key>
-    <dict>
-      <key>WKAppBundleIdentifier</key>
-      <string>com.google.gyptest.watch.watchapp</string>
-    </dict>
-    <key>NSExtensionPointIdentifier</key>
-    <string>com.apple.watchkit</string>
-  </dict>
-  <key>RemoteInterfacePrincipalClass</key>
-  <string>InterfaceController</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/watch/WatchKitExtension/InterfaceController.h b/tools/gyp/test/ios/watch/WatchKitExtension/InterfaceController.h
deleted file mode 100644
index c3395eb..0000000
--- a/tools/gyp/test/ios/watch/WatchKitExtension/InterfaceController.h
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <Foundation/Foundation.h>
-#import <WatchKit/WatchKit.h>
-
-@interface InterfaceController : WKInterfaceController
-@end
-
diff --git a/tools/gyp/test/ios/watch/WatchKitExtension/InterfaceController.m b/tools/gyp/test/ios/watch/WatchKitExtension/InterfaceController.m
deleted file mode 100644
index 564b7d1..0000000
--- a/tools/gyp/test/ios/watch/WatchKitExtension/InterfaceController.m
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "InterfaceController.h"
-
-@implementation InterfaceController
-
-- (instancetype)initWithContext:(id)context {
-  if ((self = [super initWithContext:context])) {
-    // -initWithContext:
-  }
-  return self;
-}
-
-- (void)willActivate {
-  // -willActivate
-}
-
-- (void)didDeactivate {
-  // -didDeactivate
-}
-
-@end
-
diff --git a/tools/gyp/test/ios/watch/WatchKitExtension/MainInterface.storyboard b/tools/gyp/test/ios/watch/WatchKitExtension/MainInterface.storyboard
deleted file mode 100644
index 5aa5818..0000000
--- a/tools/gyp/test/ios/watch/WatchKitExtension/MainInterface.storyboard
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6148" systemVersion="14A229a" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" initialViewController="ObA-dk-sSI">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6147"/>
-    </dependencies>
-    <scenes>
-        <!--Action View Controller - Image-->
-        <scene sceneID="7MM-of-jgj">
-            <objects>
-                <viewController title="Image" id="ObA-dk-sSI" customClass="ActionViewController" customModuleProvider="" sceneMemberID="viewController">
-                    <layoutGuides>
-                        <viewControllerLayoutGuide type="top" id="qkL-Od-lgU"/>
-                        <viewControllerLayoutGuide type="bottom" id="n38-gi-rB5"/>
-                    </layoutGuides>
-                    <view key="view" contentMode="scaleToFill" id="zMn-AG-sqS">
-                        <rect key="frame" x="0.0" y="0.0" width="320" height="528"/>
-                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-                        <subviews>
-                            <imageView userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="9ga-4F-77Z">
-                                <rect key="frame" x="0.0" y="64" width="320" height="464"/>
-                            </imageView>
-                            <navigationBar contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="NOA-Dm-cuz">
-                                <rect key="frame" x="0.0" y="20" width="320" height="44"/>
-                                <items>
-                                    <navigationItem id="3HJ-uW-3hn">
-                                        <barButtonItem key="leftBarButtonItem" title="Done" style="done" id="WYi-yp-eM6">
-                                            <connections>
-                                                <action selector="done" destination="ObA-dk-sSI" id="Qdu-qn-U6V"/>
-                                            </connections>
-                                        </barButtonItem>
-                                    </navigationItem>
-                                </items>
-                            </navigationBar>
-                        </subviews>
-                        <color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
-                        <constraints>
-                            <constraint firstAttribute="trailing" secondItem="NOA-Dm-cuz" secondAttribute="trailing" id="A05-Pj-hrr"/>
-                            <constraint firstItem="9ga-4F-77Z" firstAttribute="top" secondItem="NOA-Dm-cuz" secondAttribute="bottom" id="Fps-3D-QQW"/>
-                            <constraint firstItem="NOA-Dm-cuz" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="HxO-8t-aoh"/>
-                            <constraint firstAttribute="trailing" secondItem="9ga-4F-77Z" secondAttribute="trailing" id="Ozw-Hg-0yh"/>
-                            <constraint firstItem="9ga-4F-77Z" firstAttribute="leading" secondItem="zMn-AG-sqS" secondAttribute="leading" id="XH5-ld-ONA"/>
-                            <constraint firstItem="n38-gi-rB5" firstAttribute="top" secondItem="9ga-4F-77Z" secondAttribute="bottom" id="eQg-nn-Zy4"/>
-                            <constraint firstItem="NOA-Dm-cuz" firstAttribute="top" secondItem="qkL-Od-lgU" secondAttribute="bottom" id="we0-1t-bgp"/>
-                        </constraints>
-                    </view>
-                    <freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
-                    <size key="freeformSize" width="320" height="528"/>
-                    <connections>
-                        <outlet property="imageView" destination="9ga-4F-77Z" id="5y6-5w-9QO"/>
-                        <outlet property="view" destination="zMn-AG-sqS" id="Qma-de-2ek"/>
-                    </connections>
-                </viewController>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="X47-rx-isc" userLabel="First Responder" sceneMemberID="firstResponder"/>
-            </objects>
-            <point key="canvasLocation" x="252" y="-124"/>
-        </scene>
-    </scenes>
-    <simulatedMetricsContainer key="defaultSimulatedMetrics">
-        <simulatedStatusBarMetrics key="statusBar"/>
-        <simulatedOrientationMetrics key="orientation"/>
-        <simulatedScreenMetrics key="destination" type="retina4"/>
-    </simulatedMetricsContainer>
-</document>
diff --git a/tools/gyp/test/ios/watch/watch.gyp b/tools/gyp/test/ios/watch/watch.gyp
deleted file mode 100644
index 49be555..0000000
--- a/tools/gyp/test/ios/watch/watch.gyp
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-    ['CXX', '/usr/bin/clang++'],
-  ],
-  'target_defaults': {
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'SDKROOT': 'iphoneos',
-        'IPHONEOS_DEPLOYMENT_TARGET': '8.2',
-        'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
-      }
-  },
-  'targets': [
-    {
-      'target_name': 'WatchContainer',
-      'product_name': 'WatchContainer',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'mac_bundle_resources': [
-        'WatchContainer/Base.lproj/Main.storyboard',
-      ],
-      'sources': [
-        'WatchContainer/AppDelegate.h',
-        'WatchContainer/AppDelegate.m',
-        'WatchContainer/ViewController.h',
-        'WatchContainer/ViewController.m',
-        'WatchContainer/main.m',
-      ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/WatchContainer.app/PlugIns',
-          'files': [
-            '<(PRODUCT_DIR)/WatchKitExtension.appex',
-      ]}],
-      'dependencies': [
-        'WatchKitExtension'
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'WatchContainer/Info.plist',
-      },
-    },
-    {
-      'target_name': 'WatchKitExtension',
-      'product_name': 'WatchKitExtension',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'ios_watchkit_extension': 1,
-      'sources': [
-        'WatchKitExtension/InterfaceController.h',
-        'WatchKitExtension/InterfaceController.m',
-      ],
-      'mac_bundle_resources': [
-        'WatchKitExtension/Images.xcassets',
-        '<(PRODUCT_DIR)/WatchApp.app',
-      ],
-      'dependencies': [
-        'WatchApp'
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/WatchKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'WatchKitExtension/Info.plist',
-        'SKIP_INSTALL': 'YES',
-        'COPY_PHASE_STRIP': 'NO',
-      },
-    },
-    {
-      'target_name': 'WatchApp',
-      'product_name': 'WatchApp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'ios_watch_app': 1,
-      'mac_bundle_resources': [
-        'WatchApp/Images.xcassets',
-        'WatchApp/Interface.storyboard',
-      ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'WatchApp/Info.plist',
-        'SKIP_INSTALL': 'YES',
-        'COPY_PHASE_STRIP': 'NO',
-        'TARGETED_DEVICE_FAMILY': '4',
-        'TARGETED_DEVICE_FAMILY[sdk=iphonesimulator*]': '1,4',
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/ios/xctests/App/AppDelegate.h b/tools/gyp/test/ios/xctests/App/AppDelegate.h
deleted file mode 100644
index f8efce9..0000000
--- a/tools/gyp/test/ios/xctests/App/AppDelegate.h
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface AppDelegate : UIResponder<UIApplicationDelegate>
-
-@property(strong, nonatomic) UIWindow* window;
-
-@end
diff --git a/tools/gyp/test/ios/xctests/App/AppDelegate.m b/tools/gyp/test/ios/xctests/App/AppDelegate.m
deleted file mode 100644
index 825dda7..0000000
--- a/tools/gyp/test/ios/xctests/App/AppDelegate.m
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "AppDelegate.h"
-
-@interface AppDelegate ()
-
-@end
-
-@implementation AppDelegate
-
-- (BOOL)application:(UIApplication*)application
-    didFinishLaunchingWithOptions:(NSDictionary*)launchOptions {
-  return YES;
-}
-
-@end
diff --git a/tools/gyp/test/ios/xctests/App/Base.lproj/LaunchScreen.xib b/tools/gyp/test/ios/xctests/App/Base.lproj/LaunchScreen.xib
deleted file mode 100644
index 063dc5e..0000000
--- a/tools/gyp/test/ios/xctests/App/Base.lproj/LaunchScreen.xib
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="6214" systemVersion="14A314h" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6207"/>
-        <capability name="Constraints with non-1.0 multipliers" minToolsVersion="5.1"/>
-    </dependencies>
-    <objects>
-        <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
-        <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
-        <view contentMode="scaleToFill" id="iN0-l3-epB">
-            <rect key="frame" x="0.0" y="0.0" width="480" height="480"/>
-            <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-            <subviews>
-                <label opaque="NO" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="  Copyright (c) 2014 Google. All rights reserved." textAlignment="center" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" minimumFontSize="9" translatesAutoresizingMaskIntoConstraints="NO" id="8ie-xW-0ye">
-                    <rect key="frame" x="20" y="439" width="441" height="21"/>
-                    <fontDescription key="fontDescription" type="system" pointSize="17"/>
-                    <color key="textColor" cocoaTouchSystemColor="darkTextColor"/>
-                    <nil key="highlightedColor"/>
-                </label>
-                <label opaque="NO" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="App" textAlignment="center" lineBreakMode="middleTruncation" baselineAdjustment="alignBaselines" minimumFontSize="18" translatesAutoresizingMaskIntoConstraints="NO" id="kId-c2-rCX">
-                    <rect key="frame" x="20" y="140" width="441" height="43"/>
-                    <fontDescription key="fontDescription" type="boldSystem" pointSize="36"/>
-                    <color key="textColor" cocoaTouchSystemColor="darkTextColor"/>
-                    <nil key="highlightedColor"/>
-                </label>
-            </subviews>
-            <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
-            <constraints>
-                <constraint firstItem="kId-c2-rCX" firstAttribute="centerY" secondItem="iN0-l3-epB" secondAttribute="bottom" multiplier="1/3" constant="1" id="5cJ-9S-tgC"/>
-                <constraint firstAttribute="centerX" secondItem="kId-c2-rCX" secondAttribute="centerX" id="Koa-jz-hwk"/>
-                <constraint firstAttribute="bottom" secondItem="8ie-xW-0ye" secondAttribute="bottom" constant="20" id="Kzo-t9-V3l"/>
-                <constraint firstItem="8ie-xW-0ye" firstAttribute="leading" secondItem="iN0-l3-epB" secondAttribute="leading" constant="20" symbolic="YES" id="MfP-vx-nX0"/>
-                <constraint firstAttribute="centerX" secondItem="8ie-xW-0ye" secondAttribute="centerX" id="ZEH-qu-HZ9"/>
-                <constraint firstItem="kId-c2-rCX" firstAttribute="leading" secondItem="iN0-l3-epB" secondAttribute="leading" constant="20" symbolic="YES" id="fvb-Df-36g"/>
-            </constraints>
-            <nil key="simulatedStatusBarMetrics"/>
-            <freeformSimulatedSizeMetrics key="simulatedDestinationMetrics"/>
-            <point key="canvasLocation" x="548" y="455"/>
-        </view>
-    </objects>
-</document>
diff --git a/tools/gyp/test/ios/xctests/App/Base.lproj/Main.storyboard b/tools/gyp/test/ios/xctests/App/Base.lproj/Main.storyboard
deleted file mode 100644
index f56d2f3..0000000
--- a/tools/gyp/test/ios/xctests/App/Base.lproj/Main.storyboard
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="6211" systemVersion="14A298i" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
-    <dependencies>
-        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="6204"/>
-    </dependencies>
-    <scenes>
-        <!--View Controller-->
-        <scene sceneID="tne-QT-ifu">
-            <objects>
-                <viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
-                    <layoutGuides>
-                        <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
-                        <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
-                    </layoutGuides>
-                    <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
-                        <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
-                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
-                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
-                    </view>
-                </viewController>
-                <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
-            </objects>
-        </scene>
-    </scenes>
-</document>
diff --git a/tools/gyp/test/ios/xctests/App/Images.xcassets/AppIcon.appiconset/Contents.json b/tools/gyp/test/ios/xctests/App/Images.xcassets/AppIcon.appiconset/Contents.json
deleted file mode 100644
index 36d2c80..0000000
--- a/tools/gyp/test/ios/xctests/App/Images.xcassets/AppIcon.appiconset/Contents.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "iphone",
-      "size" : "29x29",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "29x29",
-      "scale" : "3x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "40x40",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "40x40",
-      "scale" : "3x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "60x60",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "iphone",
-      "size" : "60x60",
-      "scale" : "3x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "29x29",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "29x29",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "40x40",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "40x40",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "76x76",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "ipad",
-      "size" : "76x76",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
\ No newline at end of file
diff --git a/tools/gyp/test/ios/xctests/App/Info.plist b/tools/gyp/test/ios/xctests/App/Info.plist
deleted file mode 100644
index 3f938f6..0000000
--- a/tools/gyp/test/ios/xctests/App/Info.plist
+++ /dev/null
@@ -1,47 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>en</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.gyptest.App</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSRequiresIPhoneOS</key>
-	<true/>
-	<key>UILaunchStoryboardName</key>
-	<string>LaunchScreen</string>
-	<key>UIMainStoryboardFile</key>
-	<string>Main</string>
-	<key>UIRequiredDeviceCapabilities</key>
-	<array>
-		<string>armv7</string>
-	</array>
-	<key>UISupportedInterfaceOrientations</key>
-	<array>
-		<string>UIInterfaceOrientationPortrait</string>
-		<string>UIInterfaceOrientationLandscapeLeft</string>
-		<string>UIInterfaceOrientationLandscapeRight</string>
-	</array>
-	<key>UISupportedInterfaceOrientations~ipad</key>
-	<array>
-		<string>UIInterfaceOrientationPortrait</string>
-		<string>UIInterfaceOrientationPortraitUpsideDown</string>
-		<string>UIInterfaceOrientationLandscapeLeft</string>
-		<string>UIInterfaceOrientationLandscapeRight</string>
-	</array>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/xctests/App/ViewController.h b/tools/gyp/test/ios/xctests/App/ViewController.h
deleted file mode 100644
index 95a281e..0000000
--- a/tools/gyp/test/ios/xctests/App/ViewController.h
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface ViewController : UIViewController
-
-@end
diff --git a/tools/gyp/test/ios/xctests/App/ViewController.m b/tools/gyp/test/ios/xctests/App/ViewController.m
deleted file mode 100644
index d38e3c5..0000000
--- a/tools/gyp/test/ios/xctests/App/ViewController.m
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "ViewController.h"
-
-@interface ViewController ()
-
-@end
-
-@implementation ViewController
-
-- (void)viewDidLoad {
-  [super viewDidLoad];
-}
-
-- (void)didReceiveMemoryWarning {
-  [super didReceiveMemoryWarning];
-}
-
-@end
diff --git a/tools/gyp/test/ios/xctests/App/main.m b/tools/gyp/test/ios/xctests/App/main.m
deleted file mode 100644
index 8336807..0000000
--- a/tools/gyp/test/ios/xctests/App/main.m
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-#import "AppDelegate.h"
-
-int main(int argc, char* argv[]) {
-  @autoreleasepool {
-    return UIApplicationMain(
-        argc, argv, nil, NSStringFromClass([AppDelegate class]));
-  }
-}
diff --git a/tools/gyp/test/ios/xctests/AppTests/AppTests.m b/tools/gyp/test/ios/xctests/AppTests/AppTests.m
deleted file mode 100644
index 22121b0..0000000
--- a/tools/gyp/test/ios/xctests/AppTests/AppTests.m
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-#import <XCTest/XCTest.h>
-
-@interface AppTests : XCTestCase
-
-@end
-
-@implementation AppTests
-
-- (void)setUp {
-    [super setUp];
-}
-
-- (void)tearDown {
-    [super tearDown];
-}
-
-- (void)testExample {
-    XCTAssert(YES, @"Pass");
-}
-
-- (void)testPerformanceExample {
-    [self measureBlock:^{
-    }];
-}
-
-@end
diff --git a/tools/gyp/test/ios/xctests/AppTests/Info.plist b/tools/gyp/test/ios/xctests/AppTests/Info.plist
deleted file mode 100644
index d43ff4b..0000000
--- a/tools/gyp/test/ios/xctests/AppTests/Info.plist
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>en</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.gyptest.AppTests</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>BNDL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/ios/xctests/gyptest-xctests.py b/tools/gyp/test/ios/xctests/gyptest-xctests.py
deleted file mode 100644
index 42610d1..0000000
--- a/tools/gyp/test/ios/xctests/gyptest-xctests.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that iOS XCTests can be built correctly.
-"""
-
-import TestGyp
-
-import os
-import subprocess
-import sys
-
-def HasCerts():
-  # Because the bots do not have certs, don't check them if there are no
-  # certs available.
-  proc = subprocess.Popen(['security','find-identity','-p', 'codesigning',
-                           '-v'], stdout=subprocess.PIPE)
-  return "0 valid identities found" not in proc.communicate()[0].strip()
-
-if sys.platform == "darwin":
-  test = TestGyp.TestGyp(formats=['xcode', 'ninja'])
-  test.run_gyp('xctests.gyp')
-  test_configs = ['Default']
-  # TODO(crbug.com/557418): Enable this once xcodebuild works for iOS devices.
-  #if HasCerts() and test.format == 'xcode':
-  #  test_configs.append('Default-iphoneos')
-  for config in test_configs:
-    test.set_configuration(config)
-    test.build('xctests.gyp', test.ALL)
-    test.built_file_must_exist('app_under_test.app/app_under_test')
-    test.built_file_must_exist('app_tests.xctest/app_tests')
-    if 'ninja' in test.format:
-      test.built_file_must_exist('obj/AppTests/app_tests.AppTests.i386.o')
-      test.built_file_must_exist('obj/AppTests/app_tests.AppTests.x86_64.o')
-    elif test.format == 'xcode':
-      xcode_object_path = os.path.join('..', 'xctests.build',
-                                       'Default-iphonesimulator',
-                                       'app_tests.build', 'Objects-normal',
-                                       '%s', 'AppTests.o')
-      test.built_file_must_exist(xcode_object_path % 'i386')
-      test.built_file_must_exist(xcode_object_path % 'x86_64')
-  test.pass_test()
diff --git a/tools/gyp/test/ios/xctests/xctests.gyp b/tools/gyp/test/ios/xctests/xctests.gyp
deleted file mode 100644
index 8d4d639..0000000
--- a/tools/gyp/test/ios/xctests/xctests.gyp
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'conditions': [
-    ['"<(GENERATOR)"=="ninja"', {
-      'make_global_settings': [
-        ['CC', '/usr/bin/clang'],
-        ['CXX', '/usr/bin/clang++'],
-      ],
-    }]
-  ],
-  'target_defaults': {
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-fobjc-abi-version=2',
-        ],
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'SDKROOT': 'iphonesimulator',  # -isysroot
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-        'IPHONEOS_DEPLOYMENT_TARGET': '9.0',
-        'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
-      }
-  },
-  'targets': [
-    {
-      'target_name': 'app_under_test',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'mac_bundle_resources': [
-        'App/Base.lproj/LaunchScreen.xib',
-        'App/Base.lproj/Main.storyboard',
-      ],
-      'sources': [
-        'App/AppDelegate.h',
-        'App/AppDelegate.m',
-        'App/ViewController.h',
-        'App/ViewController.m',
-        'App/main.m',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'App/Info.plist',
-      },
-    },
-    {
-      'target_name': 'app_tests',
-      'type': 'loadable_module',
-      'mac_xctest_bundle': 1,
-      'sources': [
-        'AppTests/AppTests.m',
-      ],
-      'dependencies': [
-        'app_under_test'
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-        ],
-      },
-      'xcode_settings': {
-        'WRAPPER_EXTENSION': 'xctest',
-        'INFOPLIST_FILE': 'AppTests/Info.plist',
-        'BUNDLE_LOADER': '$(BUILT_PRODUCTS_DIR)/app_under_test.app/app_under_test',
-        'TEST_HOST': '$(BUNDLE_LOADER)',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/lib/README.txt b/tools/gyp/test/lib/README.txt
deleted file mode 100644
index b3d7245..0000000
--- a/tools/gyp/test/lib/README.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-Supporting modules for GYP testing.
-
-    TestCmd.py
-    TestCommon.py
-
-        Modules for generic testing of command-line utilities,
-        specifically including the ability to copy a test configuration
-        to temporary directories (with default cleanup on exit) as part
-        of running test scripts that invoke commands, compare actual
-        against expected output, etc.
-
-        Our copies of these come from the SCons project,
-        http://www.scons.org/.
-
-    TestGyp.py
-
-        Modules for GYP-specific tests, of course.
diff --git a/tools/gyp/test/lib/TestCmd.py b/tools/gyp/test/lib/TestCmd.py
deleted file mode 100644
index 7140361..0000000
--- a/tools/gyp/test/lib/TestCmd.py
+++ /dev/null
@@ -1,1597 +0,0 @@
-"""
-TestCmd.py:  a testing framework for commands and scripts.
-
-The TestCmd module provides a framework for portable automated testing
-of executable commands and scripts (in any language, not just Python),
-especially commands and scripts that require file system interaction.
-
-In addition to running tests and evaluating conditions, the TestCmd
-module manages and cleans up one or more temporary workspace
-directories, and provides methods for creating files and directories in
-those workspace directories from in-line data, here-documents), allowing
-tests to be completely self-contained.
-
-A TestCmd environment object is created via the usual invocation:
-
-    import TestCmd
-    test = TestCmd.TestCmd()
-
-There are a bunch of keyword arguments available at instantiation:
-
-    test = TestCmd.TestCmd(description = 'string',
-                           program = 'program_or_script_to_test',
-                           interpreter = 'script_interpreter',
-                           workdir = 'prefix',
-                           subdir = 'subdir',
-                           verbose = Boolean,
-                           match = default_match_function,
-                           diff = default_diff_function,
-                           combine = Boolean)
-
-There are a bunch of methods that let you do different things:
-
-    test.verbose_set(1)
-
-    test.description_set('string')
-
-    test.program_set('program_or_script_to_test')
-
-    test.interpreter_set('script_interpreter')
-    test.interpreter_set(['script_interpreter', 'arg'])
-
-    test.workdir_set('prefix')
-    test.workdir_set('')
-
-    test.workpath('file')
-    test.workpath('subdir', 'file')
-
-    test.subdir('subdir', ...)
-
-    test.rmdir('subdir', ...)
-
-    test.write('file', "contents\n")
-    test.write(['subdir', 'file'], "contents\n")
-
-    test.read('file')
-    test.read(['subdir', 'file'])
-    test.read('file', mode)
-    test.read(['subdir', 'file'], mode)
-
-    test.writable('dir', 1)
-    test.writable('dir', None)
-
-    test.preserve(condition, ...)
-
-    test.cleanup(condition)
-
-    test.command_args(program = 'program_or_script_to_run',
-                      interpreter = 'script_interpreter',
-                      arguments = 'arguments to pass to program')
-
-    test.run(program = 'program_or_script_to_run',
-             interpreter = 'script_interpreter',
-             arguments = 'arguments to pass to program',
-             chdir = 'directory_to_chdir_to',
-             stdin = 'input to feed to the program\n')
-             universal_newlines = True)
-
-    p = test.start(program = 'program_or_script_to_run',
-                   interpreter = 'script_interpreter',
-                   arguments = 'arguments to pass to program',
-                   universal_newlines = None)
-
-    test.finish(self, p)
-
-    test.pass_test()
-    test.pass_test(condition)
-    test.pass_test(condition, function)
-
-    test.fail_test()
-    test.fail_test(condition)
-    test.fail_test(condition, function)
-    test.fail_test(condition, function, skip)
-
-    test.no_result()
-    test.no_result(condition)
-    test.no_result(condition, function)
-    test.no_result(condition, function, skip)
-
-    test.stdout()
-    test.stdout(run)
-
-    test.stderr()
-    test.stderr(run)
-
-    test.symlink(target, link)
-
-    test.banner(string)
-    test.banner(string, width)
-
-    test.diff(actual, expected)
-
-    test.match(actual, expected)
-
-    test.match_exact("actual 1\nactual 2\n", "expected 1\nexpected 2\n")
-    test.match_exact(["actual 1\n", "actual 2\n"],
-                     ["expected 1\n", "expected 2\n"])
-
-    test.match_re("actual 1\nactual 2\n", regex_string)
-    test.match_re(["actual 1\n", "actual 2\n"], list_of_regexes)
-
-    test.match_re_dotall("actual 1\nactual 2\n", regex_string)
-    test.match_re_dotall(["actual 1\n", "actual 2\n"], list_of_regexes)
-
-    test.tempdir()
-    test.tempdir('temporary-directory')
-
-    test.sleep()
-    test.sleep(seconds)
-
-    test.where_is('foo')
-    test.where_is('foo', 'PATH1:PATH2')
-    test.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
-
-    test.unlink('file')
-    test.unlink('subdir', 'file')
-
-The TestCmd module provides pass_test(), fail_test(), and no_result()
-unbound functions that report test results for use with the Aegis change
-management system.  These methods terminate the test immediately,
-reporting PASSED, FAILED, or NO RESULT respectively, and exiting with
-status 0 (success), 1 or 2 respectively.  This allows for a distinction
-between an actual failed test and a test that could not be properly
-evaluated because of an external condition (such as a full file system
-or incorrect permissions).
-
-    import TestCmd
-
-    TestCmd.pass_test()
-    TestCmd.pass_test(condition)
-    TestCmd.pass_test(condition, function)
-
-    TestCmd.fail_test()
-    TestCmd.fail_test(condition)
-    TestCmd.fail_test(condition, function)
-    TestCmd.fail_test(condition, function, skip)
-
-    TestCmd.no_result()
-    TestCmd.no_result(condition)
-    TestCmd.no_result(condition, function)
-    TestCmd.no_result(condition, function, skip)
-
-The TestCmd module also provides unbound functions that handle matching
-in the same way as the match_*() methods described above.
-
-    import TestCmd
-
-    test = TestCmd.TestCmd(match = TestCmd.match_exact)
-
-    test = TestCmd.TestCmd(match = TestCmd.match_re)
-
-    test = TestCmd.TestCmd(match = TestCmd.match_re_dotall)
-
-The TestCmd module provides unbound functions that can be used for the
-"diff" argument to TestCmd.TestCmd instantiation:
-
-    import TestCmd
-
-    test = TestCmd.TestCmd(match = TestCmd.match_re,
-                           diff = TestCmd.diff_re)
-
-    test = TestCmd.TestCmd(diff = TestCmd.simple_diff)
-
-The "diff" argument can also be used with standard difflib functions:
-
-    import difflib
-
-    test = TestCmd.TestCmd(diff = difflib.context_diff)
-
-    test = TestCmd.TestCmd(diff = difflib.unified_diff)
-
-Lastly, the where_is() method also exists in an unbound function
-version.
-
-    import TestCmd
-
-    TestCmd.where_is('foo')
-    TestCmd.where_is('foo', 'PATH1:PATH2')
-    TestCmd.where_is('foo', 'PATH1;PATH2', '.suffix3;.suffix4')
-"""
-
-# Copyright 2000-2010 Steven Knight
-# This module is free software, and you may redistribute it and/or modify
-# it under the same terms as Python itself, so long as this copyright message
-# and disclaimer are retained in their original form.
-#
-# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
-# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
-# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-# DAMAGE.
-#
-# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-# PARTICULAR PURPOSE.  THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
-# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
-# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-__author__ = "Steven Knight <knight at baldmt dot com>"
-__revision__ = "TestCmd.py 0.37.D001 2010/01/11 16:55:50 knight"
-__version__ = "0.37"
-
-import errno
-import os
-import os.path
-import re
-import shutil
-import stat
-import string
-import sys
-import tempfile
-import time
-import traceback
-import types
-import UserList
-
-__all__ = [
-    'diff_re',
-    'fail_test',
-    'no_result',
-    'pass_test',
-    'match_exact',
-    'match_re',
-    'match_re_dotall',
-    'python_executable',
-    'TestCmd'
-]
-
-try:
-    import difflib
-except ImportError:
-    __all__.append('simple_diff')
-
-def is_List(e):
-    return type(e) is types.ListType \
-        or isinstance(e, UserList.UserList)
-
-try:
-    from UserString import UserString
-except ImportError:
-    class UserString:
-        pass
-
-if hasattr(types, 'UnicodeType'):
-    def is_String(e):
-        return type(e) is types.StringType \
-            or type(e) is types.UnicodeType \
-            or isinstance(e, UserString)
-else:
-    def is_String(e):
-        return type(e) is types.StringType or isinstance(e, UserString)
-
-tempfile.template = 'testcmd.'
-if os.name in ('posix', 'nt'):
-    tempfile.template = 'testcmd.' + str(os.getpid()) + '.'
-else:
-    tempfile.template = 'testcmd.'
-
-re_space = re.compile('\s')
-
-_Cleanup = []
-
-_chain_to_exitfunc = None
-
-def _clean():
-    global _Cleanup
-    cleanlist = filter(None, _Cleanup)
-    del _Cleanup[:]
-    cleanlist.reverse()
-    for test in cleanlist:
-        test.cleanup()
-    if _chain_to_exitfunc:
-        _chain_to_exitfunc()
-
-try:
-    import atexit
-except ImportError:
-    # TODO(1.5): atexit requires python 2.0, so chain sys.exitfunc
-    try:
-        _chain_to_exitfunc = sys.exitfunc
-    except AttributeError:
-        pass
-    sys.exitfunc = _clean
-else:
-    atexit.register(_clean)
-
-try:
-    zip
-except NameError:
-    def zip(*lists):
-        result = []
-        for i in xrange(min(map(len, lists))):
-            result.append(tuple(map(lambda l, i=i: l[i], lists)))
-        return result
-
-class Collector:
-    def __init__(self, top):
-        self.entries = [top]
-    def __call__(self, arg, dirname, names):
-        pathjoin = lambda n, d=dirname: os.path.join(d, n)
-        self.entries.extend(map(pathjoin, names))
-
-def _caller(tblist, skip):
-    string = ""
-    arr = []
-    for file, line, name, text in tblist:
-        if file[-10:] == "TestCmd.py":
-                break
-        arr = [(file, line, name, text)] + arr
-    atfrom = "at"
-    for file, line, name, text in arr[skip:]:
-        if name in ("?", "<module>"):
-            name = ""
-        else:
-            name = " (" + name + ")"
-        string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
-        atfrom = "\tfrom"
-    return string
-
-def fail_test(self = None, condition = 1, function = None, skip = 0):
-    """Cause the test to fail.
-
-    By default, the fail_test() method reports that the test FAILED
-    and exits with a status of 1.  If a condition argument is supplied,
-    the test fails only if the condition is true.
-    """
-    if not condition:
-        return
-    if not function is None:
-        function()
-    of = ""
-    desc = ""
-    sep = " "
-    if not self is None:
-        if self.program:
-            of = " of " + self.program
-            sep = "\n\t"
-        if self.description:
-            desc = " [" + self.description + "]"
-            sep = "\n\t"
-
-    at = _caller(traceback.extract_stack(), skip)
-    sys.stderr.write("FAILED test" + of + desc + sep + at)
-
-    sys.exit(1)
-
-def no_result(self = None, condition = 1, function = None, skip = 0):
-    """Causes a test to exit with no valid result.
-
-    By default, the no_result() method reports NO RESULT for the test
-    and exits with a status of 2.  If a condition argument is supplied,
-    the test fails only if the condition is true.
-    """
-    if not condition:
-        return
-    if not function is None:
-        function()
-    of = ""
-    desc = ""
-    sep = " "
-    if not self is None:
-        if self.program:
-            of = " of " + self.program
-            sep = "\n\t"
-        if self.description:
-            desc = " [" + self.description + "]"
-            sep = "\n\t"
-
-    if os.environ.get('TESTCMD_DEBUG_SKIPS'):
-        at = _caller(traceback.extract_stack(), skip)
-        sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
-    else:
-        sys.stderr.write("NO RESULT\n")
-
-    sys.exit(2)
-
-def pass_test(self = None, condition = 1, function = None):
-    """Causes a test to pass.
-
-    By default, the pass_test() method reports PASSED for the test
-    and exits with a status of 0.  If a condition argument is supplied,
-    the test passes only if the condition is true.
-    """
-    if not condition:
-        return
-    if not function is None:
-        function()
-    sys.stderr.write("PASSED\n")
-    sys.exit(0)
-
-def match_exact(lines = None, matches = None):
-    """
-    """
-    if not is_List(lines):
-        lines = string.split(lines, "\n")
-    if not is_List(matches):
-        matches = string.split(matches, "\n")
-    if len(lines) != len(matches):
-        return
-    for i in range(len(lines)):
-        if lines[i] != matches[i]:
-            return
-    return 1
-
-def match_re(lines = None, res = None):
-    """
-    """
-    if not is_List(lines):
-        lines = string.split(lines, "\n")
-    if not is_List(res):
-        res = string.split(res, "\n")
-    if len(lines) != len(res):
-        return
-    for i in range(len(lines)):
-        s = "^" + res[i] + "$"
-        try:
-            expr = re.compile(s)
-        except re.error, e:
-            msg = "Regular expression error in %s: %s"
-            raise re.error, msg % (repr(s), e[0])
-        if not expr.search(lines[i]):
-            return
-    return 1
-
-def match_re_dotall(lines = None, res = None):
-    """
-    """
-    if not type(lines) is type(""):
-        lines = string.join(lines, "\n")
-    if not type(res) is type(""):
-        res = string.join(res, "\n")
-    s = "^" + res + "$"
-    try:
-        expr = re.compile(s, re.DOTALL)
-    except re.error, e:
-        msg = "Regular expression error in %s: %s"
-        raise re.error, msg % (repr(s), e[0])
-    if expr.match(lines):
-        return 1
-
-try:
-    import difflib
-except ImportError:
-    pass
-else:
-    def simple_diff(a, b, fromfile='', tofile='',
-                    fromfiledate='', tofiledate='', n=3, lineterm='\n'):
-        """
-        A function with the same calling signature as difflib.context_diff
-        (diff -c) and difflib.unified_diff (diff -u) but which prints
-        output like the simple, unadorned 'diff" command.
-        """
-        sm = difflib.SequenceMatcher(None, a, b)
-        def comma(x1, x2):
-            return x1+1 == x2 and str(x2) or '%s,%s' % (x1+1, x2)
-        result = []
-        for op, a1, a2, b1, b2 in sm.get_opcodes():
-            if op == 'delete':
-                result.append("%sd%d" % (comma(a1, a2), b1))
-                result.extend(map(lambda l: '< ' + l, a[a1:a2]))
-            elif op == 'insert':
-                result.append("%da%s" % (a1, comma(b1, b2)))
-                result.extend(map(lambda l: '> ' + l, b[b1:b2]))
-            elif op == 'replace':
-                result.append("%sc%s" % (comma(a1, a2), comma(b1, b2)))
-                result.extend(map(lambda l: '< ' + l, a[a1:a2]))
-                result.append('---')
-                result.extend(map(lambda l: '> ' + l, b[b1:b2]))
-        return result
-
-def diff_re(a, b, fromfile='', tofile='',
-                fromfiledate='', tofiledate='', n=3, lineterm='\n'):
-    """
-    A simple "diff" of two sets of lines when the expected lines
-    are regular expressions.  This is a really dumb thing that
-    just compares each line in turn, so it doesn't look for
-    chunks of matching lines and the like--but at least it lets
-    you know exactly which line first didn't compare correctl...
-    """
-    result = []
-    diff = len(a) - len(b)
-    if diff < 0:
-        a = a + ['']*(-diff)
-    elif diff > 0:
-        b = b + ['']*diff
-    i = 0
-    for aline, bline in zip(a, b):
-        s = "^" + aline + "$"
-        try:
-            expr = re.compile(s)
-        except re.error, e:
-            msg = "Regular expression error in %s: %s"
-            raise re.error, msg % (repr(s), e[0])
-        if not expr.search(bline):
-            result.append("%sc%s" % (i+1, i+1))
-            result.append('< ' + repr(a[i]))
-            result.append('---')
-            result.append('> ' + repr(b[i]))
-        i = i+1
-    return result
-
-if os.name == 'java':
-
-    python_executable = os.path.join(sys.prefix, 'jython')
-
-else:
-
-    python_executable = sys.executable
-
-if sys.platform == 'win32':
-
-    default_sleep_seconds = 2
-
-    def where_is(file, path=None, pathext=None):
-        if path is None:
-            path = os.environ['PATH']
-        if is_String(path):
-            path = string.split(path, os.pathsep)
-        if pathext is None:
-            pathext = os.environ['PATHEXT']
-        if is_String(pathext):
-            pathext = string.split(pathext, os.pathsep)
-        for ext in pathext:
-            if string.lower(ext) == string.lower(file[-len(ext):]):
-                pathext = ['']
-                break
-        for dir in path:
-            f = os.path.join(dir, file)
-            for ext in pathext:
-                fext = f + ext
-                if os.path.isfile(fext):
-                    return fext
-        return None
-
-else:
-
-    def where_is(file, path=None, pathext=None):
-        if path is None:
-            path = os.environ['PATH']
-        if is_String(path):
-            path = string.split(path, os.pathsep)
-        for dir in path:
-            f = os.path.join(dir, file)
-            if os.path.isfile(f):
-                try:
-                    st = os.stat(f)
-                except OSError:
-                    continue
-                if stat.S_IMODE(st[stat.ST_MODE]) & 0111:
-                    return f
-        return None
-
-    default_sleep_seconds = 1
-
-
-
-try:
-    import subprocess
-except ImportError:
-    # The subprocess module doesn't exist in this version of Python,
-    # so we're going to cobble up something that looks just enough
-    # like its API for our purposes below.
-    import new
-
-    subprocess = new.module('subprocess')
-
-    subprocess.PIPE = 'PIPE'
-    subprocess.STDOUT = 'STDOUT'
-    subprocess.mswindows = (sys.platform == 'win32')
-
-    try:
-        import popen2
-        popen2.Popen3
-    except AttributeError:
-        class Popen3:
-            universal_newlines = 1
-            def __init__(self, command, **kw):
-                if sys.platform == 'win32' and command[0] == '"':
-                    command = '"' + command + '"'
-                (stdin, stdout, stderr) = os.popen3(' ' + command)
-                self.stdin = stdin
-                self.stdout = stdout
-                self.stderr = stderr
-            def close_output(self):
-                self.stdout.close()
-                self.resultcode = self.stderr.close()
-            def wait(self):
-                resultcode = self.resultcode
-                if os.WIFEXITED(resultcode):
-                    return os.WEXITSTATUS(resultcode)
-                elif os.WIFSIGNALED(resultcode):
-                    return os.WTERMSIG(resultcode)
-                else:
-                    return None
-
-    else:
-        try:
-            popen2.Popen4
-        except AttributeError:
-            # A cribbed Popen4 class, with some retrofitted code from
-            # the Python 1.5 Popen3 class methods to do certain things
-            # by hand.
-            class Popen4(popen2.Popen3):
-                childerr = None
-
-                def __init__(self, cmd, bufsize=-1):
-                    p2cread, p2cwrite = os.pipe()
-                    c2pread, c2pwrite = os.pipe()
-                    self.pid = os.fork()
-                    if self.pid == 0:
-                        # Child
-                        os.dup2(p2cread, 0)
-                        os.dup2(c2pwrite, 1)
-                        os.dup2(c2pwrite, 2)
-                        for i in range(3, popen2.MAXFD):
-                            try:
-                                os.close(i)
-                            except: pass
-                        try:
-                            os.execvp(cmd[0], cmd)
-                        finally:
-                            os._exit(1)
-                        # Shouldn't come here, I guess
-                        os._exit(1)
-                    os.close(p2cread)
-                    self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
-                    os.close(c2pwrite)
-                    self.fromchild = os.fdopen(c2pread, 'r', bufsize)
-                    popen2._active.append(self)
-
-            popen2.Popen4 = Popen4
-
-        class Popen3(popen2.Popen3, popen2.Popen4):
-            universal_newlines = 1
-            def __init__(self, command, **kw):
-                if kw.get('stderr') == 'STDOUT':
-                    apply(popen2.Popen4.__init__, (self, command, 1))
-                else:
-                    apply(popen2.Popen3.__init__, (self, command, 1))
-                self.stdin = self.tochild
-                self.stdout = self.fromchild
-                self.stderr = self.childerr
-            def wait(self, *args, **kw):
-                resultcode = apply(popen2.Popen3.wait, (self,)+args, kw)
-                if os.WIFEXITED(resultcode):
-                    return os.WEXITSTATUS(resultcode)
-                elif os.WIFSIGNALED(resultcode):
-                    return os.WTERMSIG(resultcode)
-                else:
-                    return None
-
-    subprocess.Popen = Popen3
-
-
-
-# From Josiah Carlson,
-# ASPN : Python Cookbook : Module to allow Asynchronous subprocess use on Windows and Posix platforms
-# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440554
-
-PIPE = subprocess.PIPE
-
-if subprocess.mswindows:
-    from win32file import ReadFile, WriteFile
-    from win32pipe import PeekNamedPipe
-    import msvcrt
-else:
-    import select
-    import fcntl
-
-    try:                    fcntl.F_GETFL
-    except AttributeError:  fcntl.F_GETFL = 3
-
-    try:                    fcntl.F_SETFL
-    except AttributeError:  fcntl.F_SETFL = 4
-
-class Popen(subprocess.Popen):
-    def recv(self, maxsize=None):
-        return self._recv('stdout', maxsize)
-
-    def recv_err(self, maxsize=None):
-        return self._recv('stderr', maxsize)
-
-    def send_recv(self, input='', maxsize=None):
-        return self.send(input), self.recv(maxsize), self.recv_err(maxsize)
-
-    def get_conn_maxsize(self, which, maxsize):
-        if maxsize is None:
-            maxsize = 1024
-        elif maxsize < 1:
-            maxsize = 1
-        return getattr(self, which), maxsize
-
-    def _close(self, which):
-        getattr(self, which).close()
-        setattr(self, which, None)
-
-    if subprocess.mswindows:
-        def send(self, input):
-            if not self.stdin:
-                return None
-
-            try:
-                x = msvcrt.get_osfhandle(self.stdin.fileno())
-                (errCode, written) = WriteFile(x, input)
-            except ValueError:
-                return self._close('stdin')
-            except (subprocess.pywintypes.error, Exception), why:
-                if why[0] in (109, errno.ESHUTDOWN):
-                    return self._close('stdin')
-                raise
-
-            return written
-
-        def _recv(self, which, maxsize):
-            conn, maxsize = self.get_conn_maxsize(which, maxsize)
-            if conn is None:
-                return None
-
-            try:
-                x = msvcrt.get_osfhandle(conn.fileno())
-                (read, nAvail, nMessage) = PeekNamedPipe(x, 0)
-                if maxsize < nAvail:
-                    nAvail = maxsize
-                if nAvail > 0:
-                    (errCode, read) = ReadFile(x, nAvail, None)
-            except ValueError:
-                return self._close(which)
-            except (subprocess.pywintypes.error, Exception), why:
-                if why[0] in (109, errno.ESHUTDOWN):
-                    return self._close(which)
-                raise
-
-            #if self.universal_newlines:
-            #    read = self._translate_newlines(read)
-            return read
-
-    else:
-        def send(self, input):
-            if not self.stdin:
-                return None
-
-            if not select.select([], [self.stdin], [], 0)[1]:
-                return 0
-
-            try:
-                written = os.write(self.stdin.fileno(), input)
-            except OSError, why:
-                if why[0] == errno.EPIPE: #broken pipe
-                    return self._close('stdin')
-                raise
-
-            return written
-
-        def _recv(self, which, maxsize):
-            conn, maxsize = self.get_conn_maxsize(which, maxsize)
-            if conn is None:
-                return None
-
-            try:
-                flags = fcntl.fcntl(conn, fcntl.F_GETFL)
-            except TypeError:
-                flags = None
-            else:
-                if not conn.closed:
-                    fcntl.fcntl(conn, fcntl.F_SETFL, flags| os.O_NONBLOCK)
-
-            try:
-                if not select.select([conn], [], [], 0)[0]:
-                    return ''
-
-                r = conn.read(maxsize)
-                if not r:
-                    return self._close(which)
-
-                #if self.universal_newlines:
-                #    r = self._translate_newlines(r)
-                return r
-            finally:
-                if not conn.closed and not flags is None:
-                    fcntl.fcntl(conn, fcntl.F_SETFL, flags)
-
-disconnect_message = "Other end disconnected!"
-
-def recv_some(p, t=.1, e=1, tr=5, stderr=0):
-    if tr < 1:
-        tr = 1
-    x = time.time()+t
-    y = []
-    r = ''
-    pr = p.recv
-    if stderr:
-        pr = p.recv_err
-    while time.time() < x or r:
-        r = pr()
-        if r is None:
-            if e:
-                raise Exception(disconnect_message)
-            else:
-                break
-        elif r:
-            y.append(r)
-        else:
-            time.sleep(max((x-time.time())/tr, 0))
-    return ''.join(y)
-
-# TODO(3.0:  rewrite to use memoryview()
-def send_all(p, data):
-    while len(data):
-        sent = p.send(data)
-        if sent is None:
-            raise Exception(disconnect_message)
-        data = buffer(data, sent)
-
-
-
-try:
-    object
-except NameError:
-    class object:
-        pass
-
-
-
-class TestCmd(object):
-    """Class TestCmd
-    """
-
-    def __init__(self, description = None,
-                       program = None,
-                       interpreter = None,
-                       workdir = None,
-                       subdir = None,
-                       verbose = None,
-                       match = None,
-                       diff = None,
-                       combine = 0,
-                       universal_newlines = 1):
-        self._cwd = os.getcwd()
-        self.description_set(description)
-        self.program_set(program)
-        self.interpreter_set(interpreter)
-        if verbose is None:
-            try:
-                verbose = max( 0, int(os.environ.get('TESTCMD_VERBOSE', 0)) )
-            except ValueError:
-                verbose = 0
-        self.verbose_set(verbose)
-        self.combine = combine
-        self.universal_newlines = universal_newlines
-        if match is not None:
-            self.match_function = match
-        else:
-            self.match_function = match_re
-        if diff is not None:
-            self.diff_function = diff
-        else:
-            try:
-                difflib
-            except NameError:
-                pass
-            else:
-                self.diff_function = simple_diff
-                #self.diff_function = difflib.context_diff
-                #self.diff_function = difflib.unified_diff
-        self._dirlist = []
-        self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
-        if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '':
-            self._preserve['pass_test'] = os.environ['PRESERVE']
-            self._preserve['fail_test'] = os.environ['PRESERVE']
-            self._preserve['no_result'] = os.environ['PRESERVE']
-        else:
-            try:
-                self._preserve['pass_test'] = os.environ['PRESERVE_PASS']
-            except KeyError:
-                pass
-            try:
-                self._preserve['fail_test'] = os.environ['PRESERVE_FAIL']
-            except KeyError:
-                pass
-            try:
-                self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT']
-            except KeyError:
-                pass
-        self._stdout = []
-        self._stderr = []
-        self.status = None
-        self.condition = 'no_result'
-        self.workdir_set(workdir)
-        self.subdir(subdir)
-
-    def __del__(self):
-        self.cleanup()
-
-    def __repr__(self):
-        return "%x" % id(self)
-
-    banner_char = '='
-    banner_width = 80
-
-    def banner(self, s, width=None):
-        if width is None:
-            width = self.banner_width
-        return s + self.banner_char * (width - len(s))
-
-    if os.name == 'posix':
-
-        def escape(self, arg):
-            "escape shell special characters"
-            slash = '\\'
-            special = '"$'
-
-            arg = string.replace(arg, slash, slash+slash)
-            for c in special:
-                arg = string.replace(arg, c, slash+c)
-
-            if re_space.search(arg):
-                arg = '"' + arg + '"'
-            return arg
-
-    else:
-
-        # Windows does not allow special characters in file names
-        # anyway, so no need for an escape function, we will just quote
-        # the arg.
-        def escape(self, arg):
-            if re_space.search(arg):
-                arg = '"' + arg + '"'
-            return arg
-
-    def canonicalize(self, path):
-        if is_List(path):
-            path = apply(os.path.join, tuple(path))
-        if not os.path.isabs(path):
-            path = os.path.join(self.workdir, path)
-        return path
-
-    def chmod(self, path, mode):
-        """Changes permissions on the specified file or directory
-        path name."""
-        path = self.canonicalize(path)
-        os.chmod(path, mode)
-
-    def cleanup(self, condition = None):
-        """Removes any temporary working directories for the specified
-        TestCmd environment.  If the environment variable PRESERVE was
-        set when the TestCmd environment was created, temporary working
-        directories are not removed.  If any of the environment variables
-        PRESERVE_PASS, PRESERVE_FAIL, or PRESERVE_NO_RESULT were set
-        when the TestCmd environment was created, then temporary working
-        directories are not removed if the test passed, failed, or had
-        no result, respectively.  Temporary working directories are also
-        preserved for conditions specified via the preserve method.
-
-        Typically, this method is not called directly, but is used when
-        the script exits to clean up temporary working directories as
-        appropriate for the exit status.
-        """
-        if not self._dirlist:
-            return
-        os.chdir(self._cwd)
-        self.workdir = None
-        if condition is None:
-            condition = self.condition
-        if self._preserve[condition]:
-            for dir in self._dirlist:
-                print "Preserved directory", dir
-        else:
-            list = self._dirlist[:]
-            list.reverse()
-            for dir in list:
-                self.writable(dir, 1)
-                shutil.rmtree(dir, ignore_errors = 1)
-            self._dirlist = []
-
-        try:
-            global _Cleanup
-            _Cleanup.remove(self)
-        except (AttributeError, ValueError):
-            pass
-
-    def command_args(self, program = None,
-                           interpreter = None,
-                           arguments = None):
-        if program:
-            if type(program) == type('') and not os.path.isabs(program):
-                program = os.path.join(self._cwd, program)
-        else:
-            program = self.program
-            if not interpreter:
-                interpreter = self.interpreter
-        if not type(program) in [type([]), type(())]:
-            program = [program]
-        cmd = list(program)
-        if interpreter:
-            if not type(interpreter) in [type([]), type(())]:
-                interpreter = [interpreter]
-            cmd = list(interpreter) + cmd
-        if arguments:
-            if type(arguments) == type(''):
-                arguments = string.split(arguments)
-            cmd.extend(arguments)
-        return cmd
-
-    def description_set(self, description):
-        """Set the description of the functionality being tested.
-        """
-        self.description = description
-
-    try:
-        difflib
-    except NameError:
-        def diff(self, a, b, name, *args, **kw):
-            print self.banner('Expected %s' % name)
-            print a
-            print self.banner('Actual %s' % name)
-            print b
-    else:
-        def diff(self, a, b, name, *args, **kw):
-            print self.banner(name)
-            args = (a.splitlines(), b.splitlines()) + args
-            lines = apply(self.diff_function, args, kw)
-            for l in lines:
-                print l
-
-    def fail_test(self, condition = 1, function = None, skip = 0):
-        """Cause the test to fail.
-        """
-        if not condition:
-            return
-        self.condition = 'fail_test'
-        fail_test(self = self,
-                  condition = condition,
-                  function = function,
-                  skip = skip)
-
-    def interpreter_set(self, interpreter):
-        """Set the program to be used to interpret the program
-        under test as a script.
-        """
-        self.interpreter = interpreter
-
-    def match(self, lines, matches):
-        """Compare actual and expected file contents.
-        """
-        return self.match_function(lines, matches)
-
-    def match_exact(self, lines, matches):
-        """Compare actual and expected file contents.
-        """
-        return match_exact(lines, matches)
-
-    def match_re(self, lines, res):
-        """Compare actual and expected file contents.
-        """
-        return match_re(lines, res)
-
-    def match_re_dotall(self, lines, res):
-        """Compare actual and expected file contents.
-        """
-        return match_re_dotall(lines, res)
-
-    def no_result(self, condition = 1, function = None, skip = 0):
-        """Report that the test could not be run.
-        """
-        if not condition:
-            return
-        self.condition = 'no_result'
-        no_result(self = self,
-                  condition = condition,
-                  function = function,
-                  skip = skip)
-
-    def pass_test(self, condition = 1, function = None):
-        """Cause the test to pass.
-        """
-        if not condition:
-            return
-        self.condition = 'pass_test'
-        pass_test(self = self, condition = condition, function = function)
-
-    def preserve(self, *conditions):
-        """Arrange for the temporary working directories for the
-        specified TestCmd environment to be preserved for one or more
-        conditions.  If no conditions are specified, arranges for
-        the temporary working directories to be preserved for all
-        conditions.
-        """
-        if conditions is ():
-            conditions = ('pass_test', 'fail_test', 'no_result')
-        for cond in conditions:
-            self._preserve[cond] = 1
-
-    def program_set(self, program):
-        """Set the executable program or script to be tested.
-        """
-        if program and not os.path.isabs(program):
-            program = os.path.join(self._cwd, program)
-        self.program = program
-
-    def read(self, file, mode = 'rb'):
-        """Reads and returns the contents of the specified file name.
-        The file name may be a list, in which case the elements are
-        concatenated with the os.path.join() method.  The file is
-        assumed to be under the temporary working directory unless it
-        is an absolute path name.  The I/O mode for the file may
-        be specified; it must begin with an 'r'.  The default is
-        'rb' (binary read).
-        """
-        file = self.canonicalize(file)
-        if mode[0] != 'r':
-            raise ValueError, "mode must begin with 'r'"
-        with open(file, mode) as f:
-            result = f.read()
-        return result
-
-    def rmdir(self, dir):
-        """Removes the specified dir name.
-        The dir name may be a list, in which case the elements are
-        concatenated with the os.path.join() method.  The dir is
-        assumed to be under the temporary working directory unless it
-        is an absolute path name.
-        The dir must be empty.
-        """
-        dir = self.canonicalize(dir)
-        os.rmdir(dir)
-
-    def start(self, program = None,
-                    interpreter = None,
-                    arguments = None,
-                    universal_newlines = None,
-                    **kw):
-        """
-        Starts a program or script for the test environment.
-
-        The specified program will have the original directory
-        prepended unless it is enclosed in a [list].
-        """
-        cmd = self.command_args(program, interpreter, arguments)
-        cmd_string = string.join(map(self.escape, cmd), ' ')
-        if self.verbose:
-            sys.stderr.write(cmd_string + "\n")
-        if universal_newlines is None:
-            universal_newlines = self.universal_newlines
-
-        # On Windows, if we make stdin a pipe when we plan to send 
-        # no input, and the test program exits before
-        # Popen calls msvcrt.open_osfhandle, that call will fail.
-        # So don't use a pipe for stdin if we don't need one.
-        stdin = kw.get('stdin', None)
-        if stdin is not None:
-            stdin = subprocess.PIPE
-
-        combine = kw.get('combine', self.combine)
-        if combine:
-            stderr_value = subprocess.STDOUT
-        else:
-            stderr_value = subprocess.PIPE
-
-        return Popen(cmd,
-                     stdin=stdin,
-                     stdout=subprocess.PIPE,
-                     stderr=stderr_value,
-                     universal_newlines=universal_newlines)
-
-    def finish(self, popen, **kw):
-        """
-        Finishes and waits for the process being run under control of
-        the specified popen argument, recording the exit status,
-        standard output and error output.
-        """
-        popen.stdin.close()
-        self.status = popen.wait()
-        if not self.status:
-            self.status = 0
-        self._stdout.append(popen.stdout.read())
-        if popen.stderr:
-            stderr = popen.stderr.read()
-        else:
-            stderr = ''
-        self._stderr.append(stderr)
-
-    def run(self, program = None,
-                  interpreter = None,
-                  arguments = None,
-                  chdir = None,
-                  stdin = None,
-                  universal_newlines = None):
-        """Runs a test of the program or script for the test
-        environment.  Standard output and error output are saved for
-        future retrieval via the stdout() and stderr() methods.
-
-        The specified program will have the original directory
-        prepended unless it is enclosed in a [list].
-        """
-        if chdir:
-            oldcwd = os.getcwd()
-            if not os.path.isabs(chdir):
-                chdir = os.path.join(self.workpath(chdir))
-            if self.verbose:
-                sys.stderr.write("chdir(" + chdir + ")\n")
-            os.chdir(chdir)
-        p = self.start(program,
-                       interpreter,
-                       arguments,
-                       universal_newlines,
-                       stdin=stdin)
-        if stdin:
-            if is_List(stdin):
-                for line in stdin:
-                    p.stdin.write(line)
-            else:
-                p.stdin.write(stdin)
-            p.stdin.close()
-
-        out = p.stdout.read()
-        if p.stderr is None:
-            err = ''
-        else:
-            err = p.stderr.read()
-        try:
-            close_output = p.close_output
-        except AttributeError:
-            p.stdout.close()
-            if not p.stderr is None:
-                p.stderr.close()
-        else:
-            close_output()
-
-        self._stdout.append(out)
-        self._stderr.append(err)
-
-        self.status = p.wait()
-        if not self.status:
-            self.status = 0
-
-        if chdir:
-            os.chdir(oldcwd)
-        if self.verbose >= 2:
-            write = sys.stdout.write
-            write('============ STATUS: %d\n' % self.status)
-            out = self.stdout()
-            if out or self.verbose >= 3:
-                write('============ BEGIN STDOUT (len=%d):\n' % len(out))
-                write(out)
-                write('============ END STDOUT\n')
-            err = self.stderr()
-            if err or self.verbose >= 3:
-                write('============ BEGIN STDERR (len=%d)\n' % len(err))
-                write(err)
-                write('============ END STDERR\n')
-
-    def sleep(self, seconds = default_sleep_seconds):
-        """Sleeps at least the specified number of seconds.  If no
-        number is specified, sleeps at least the minimum number of
-        seconds necessary to advance file time stamps on the current
-        system.  Sleeping more seconds is all right.
-        """
-        time.sleep(seconds)
-
-    def stderr(self, run = None):
-        """Returns the error output from the specified run number.
-        If there is no specified run number, then returns the error
-        output of the last run.  If the run number is less than zero,
-        then returns the error output from that many runs back from the
-        current run.
-        """
-        if not run:
-            run = len(self._stderr)
-        elif run < 0:
-            run = len(self._stderr) + run
-        run = run - 1
-        return self._stderr[run]
-
-    def stdout(self, run = None):
-        """Returns the standard output from the specified run number.
-        If there is no specified run number, then returns the standard
-        output of the last run.  If the run number is less than zero,
-        then returns the standard output from that many runs back from
-        the current run.
-        """
-        if not run:
-            run = len(self._stdout)
-        elif run < 0:
-            run = len(self._stdout) + run
-        run = run - 1
-        return self._stdout[run]
-
-    def subdir(self, *subdirs):
-        """Create new subdirectories under the temporary working
-        directory, one for each argument.  An argument may be a list,
-        in which case the list elements are concatenated using the
-        os.path.join() method.  Subdirectories multiple levels deep
-        must be created using a separate argument for each level:
-
-                test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
-
-        Returns the number of subdirectories actually created.
-        """
-        count = 0
-        for sub in subdirs:
-            if sub is None:
-                continue
-            if is_List(sub):
-                sub = apply(os.path.join, tuple(sub))
-            new = os.path.join(self.workdir, sub)
-            try:
-                os.mkdir(new)
-            except OSError:
-                pass
-            else:
-                count = count + 1
-        return count
-
-    def symlink(self, target, link):
-        """Creates a symlink to the specified target.
-        The link name may be a list, in which case the elements are
-        concatenated with the os.path.join() method.  The link is
-        assumed to be under the temporary working directory unless it
-        is an absolute path name. The target is *not* assumed to be
-        under the temporary working directory.
-        """
-        link = self.canonicalize(link)
-        os.symlink(target, link)
-
-    def tempdir(self, path=None):
-        """Creates a temporary directory.
-        A unique directory name is generated if no path name is specified.
-        The directory is created, and will be removed when the TestCmd
-        object is destroyed.
-        """
-        if path is None:
-            try:
-                path = tempfile.mktemp(prefix=tempfile.template)
-            except TypeError:
-                path = tempfile.mktemp()
-        os.mkdir(path)
-
-        # Symlinks in the path will report things
-        # differently from os.getcwd(), so chdir there
-        # and back to fetch the canonical path.
-        cwd = os.getcwd()
-        try:
-            os.chdir(path)
-            path = os.getcwd()
-        finally:
-            os.chdir(cwd)
-
-        # Uppercase the drive letter since the case of drive
-        # letters is pretty much random on win32:
-        drive,rest = os.path.splitdrive(path)
-        if drive:
-            path = string.upper(drive) + rest
-
-        #
-        self._dirlist.append(path)
-        global _Cleanup
-        try:
-            _Cleanup.index(self)
-        except ValueError:
-            _Cleanup.append(self)
-
-        return path
-
-    def touch(self, path, mtime=None):
-        """Updates the modification time on the specified file or
-        directory path name.  The default is to update to the
-        current time if no explicit modification time is specified.
-        """
-        path = self.canonicalize(path)
-        atime = os.path.getatime(path)
-        if mtime is None:
-            mtime = time.time()
-        os.utime(path, (atime, mtime))
-
-    def unlink(self, file):
-        """Unlinks the specified file name.
-        The file name may be a list, in which case the elements are
-        concatenated with the os.path.join() method.  The file is
-        assumed to be under the temporary working directory unless it
-        is an absolute path name.
-        """
-        file = self.canonicalize(file)
-        os.unlink(file)
-
-    def verbose_set(self, verbose):
-        """Set the verbose level.
-        """
-        self.verbose = verbose
-
-    def where_is(self, file, path=None, pathext=None):
-        """Find an executable file.
-        """
-        if is_List(file):
-            file = apply(os.path.join, tuple(file))
-        if not os.path.isabs(file):
-            file = where_is(file, path, pathext)
-        return file
-
-    def workdir_set(self, path):
-        """Creates a temporary working directory with the specified
-        path name.  If the path is a null string (''), a unique
-        directory name is created.
-        """
-        if (path != None):
-            if path == '':
-                path = None
-            path = self.tempdir(path)
-        self.workdir = path
-
-    def workpath(self, *args):
-        """Returns the absolute path name to a subdirectory or file
-        within the current temporary working directory.  Concatenates
-        the temporary working directory name with the specified
-        arguments using the os.path.join() method.
-        """
-        return apply(os.path.join, (self.workdir,) + tuple(args))
-
-    def readable(self, top, read=1):
-        """Make the specified directory tree readable (read == 1)
-        or not (read == None).
-
-        This method has no effect on Windows systems, which use a
-        completely different mechanism to control file readability.
-        """
-
-        if sys.platform == 'win32':
-            return
-
-        if read:
-            def do_chmod(fname):
-                try: st = os.stat(fname)
-                except OSError: pass
-                else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IREAD))
-        else:
-            def do_chmod(fname):
-                try: st = os.stat(fname)
-                except OSError: pass
-                else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IREAD))
-
-        if os.path.isfile(top):
-            # If it's a file, that's easy, just chmod it.
-            do_chmod(top)
-        elif read:
-            # It's a directory and we're trying to turn on read
-            # permission, so it's also pretty easy, just chmod the
-            # directory and then chmod every entry on our walk down the
-            # tree.  Because os.path.walk() is top-down, we'll enable
-            # read permission on any directories that have it disabled
-            # before os.path.walk() tries to list their contents.
-            do_chmod(top)
-
-            def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
-                for n in names:
-                    do_chmod(os.path.join(dirname, n))
-
-            os.path.walk(top, chmod_entries, None)
-        else:
-            # It's a directory and we're trying to turn off read
-            # permission, which means we have to chmod the directoreis
-            # in the tree bottom-up, lest disabling read permission from
-            # the top down get in the way of being able to get at lower
-            # parts of the tree.  But os.path.walk() visits things top
-            # down, so we just use an object to collect a list of all
-            # of the entries in the tree, reverse the list, and then
-            # chmod the reversed (bottom-up) list.
-            col = Collector(top)
-            os.path.walk(top, col, None)
-            col.entries.reverse()
-            for d in col.entries: do_chmod(d)
-
-    def writable(self, top, write=1):
-        """Make the specified directory tree writable (write == 1)
-        or not (write == None).
-        """
-
-        if sys.platform == 'win32':
-
-            if write:
-                def do_chmod(fname):
-                    try: os.chmod(fname, stat.S_IWRITE)
-                    except OSError: pass
-            else:
-                def do_chmod(fname):
-                    try: os.chmod(fname, stat.S_IREAD)
-                    except OSError: pass
-
-        else:
-
-            if write:
-                def do_chmod(fname):
-                    try: st = os.stat(fname)
-                    except OSError: pass
-                    else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|0200))
-            else:
-                def do_chmod(fname):
-                    try: st = os.stat(fname)
-                    except OSError: pass
-                    else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~0200))
-
-        if os.path.isfile(top):
-            do_chmod(top)
-        else:
-            col = Collector(top)
-            os.path.walk(top, col, None)
-            for d in col.entries: do_chmod(d)
-
-    def executable(self, top, execute=1):
-        """Make the specified directory tree executable (execute == 1)
-        or not (execute == None).
-
-        This method has no effect on Windows systems, which use a
-        completely different mechanism to control file executability.
-        """
-
-        if sys.platform == 'win32':
-            return
-
-        if execute:
-            def do_chmod(fname):
-                try: st = os.stat(fname)
-                except OSError: pass
-                else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]|stat.S_IEXEC))
-        else:
-            def do_chmod(fname):
-                try: st = os.stat(fname)
-                except OSError: pass
-                else: os.chmod(fname, stat.S_IMODE(st[stat.ST_MODE]&~stat.S_IEXEC))
-
-        if os.path.isfile(top):
-            # If it's a file, that's easy, just chmod it.
-            do_chmod(top)
-        elif execute:
-            # It's a directory and we're trying to turn on execute
-            # permission, so it's also pretty easy, just chmod the
-            # directory and then chmod every entry on our walk down the
-            # tree.  Because os.path.walk() is top-down, we'll enable
-            # execute permission on any directories that have it disabled
-            # before os.path.walk() tries to list their contents.
-            do_chmod(top)
-
-            def chmod_entries(arg, dirname, names, do_chmod=do_chmod):
-                for n in names:
-                    do_chmod(os.path.join(dirname, n))
-
-            os.path.walk(top, chmod_entries, None)
-        else:
-            # It's a directory and we're trying to turn off execute
-            # permission, which means we have to chmod the directories
-            # in the tree bottom-up, lest disabling execute permission from
-            # the top down get in the way of being able to get at lower
-            # parts of the tree.  But os.path.walk() visits things top
-            # down, so we just use an object to collect a list of all
-            # of the entries in the tree, reverse the list, and then
-            # chmod the reversed (bottom-up) list.
-            col = Collector(top)
-            os.path.walk(top, col, None)
-            col.entries.reverse()
-            for d in col.entries: do_chmod(d)
-
-    def write(self, file, content, mode = 'wb'):
-        """Writes the specified content text (second argument) to the
-        specified file name (first argument).  The file name may be
-        a list, in which case the elements are concatenated with the
-        os.path.join() method.  The file is created under the temporary
-        working directory.  Any subdirectories in the path must already
-        exist.  The I/O mode for the file may be specified; it must
-        begin with a 'w'.  The default is 'wb' (binary write).
-        """
-        file = self.canonicalize(file)
-        if mode[0] != 'w':
-            raise ValueError, "mode must begin with 'w'"
-        with open(file, mode) as f:
-            f.write(content)
-
-# Local Variables:
-# tab-width:4
-# indent-tabs-mode:nil
-# End:
-# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/tools/gyp/test/lib/TestCommon.py b/tools/gyp/test/lib/TestCommon.py
deleted file mode 100644
index 2f526a6..0000000
--- a/tools/gyp/test/lib/TestCommon.py
+++ /dev/null
@@ -1,582 +0,0 @@
-"""
-TestCommon.py:  a testing framework for commands and scripts
-                with commonly useful error handling
-
-The TestCommon module provides a simple, high-level interface for writing
-tests of executable commands and scripts, especially commands and scripts
-that interact with the file system.  All methods throw exceptions and
-exit on failure, with useful error messages.  This makes a number of
-explicit checks unnecessary, making the test scripts themselves simpler
-to write and easier to read.
-
-The TestCommon class is a subclass of the TestCmd class.  In essence,
-TestCommon is a wrapper that handles common TestCmd error conditions in
-useful ways.  You can use TestCommon directly, or subclass it for your
-program and add additional (or override) methods to tailor it to your
-program's specific needs.  Alternatively, the TestCommon class serves
-as a useful example of how to define your own TestCmd subclass.
-
-As a subclass of TestCmd, TestCommon provides access to all of the
-variables and methods from the TestCmd module.  Consequently, you can
-use any variable or method documented in the TestCmd module without
-having to explicitly import TestCmd.
-
-A TestCommon environment object is created via the usual invocation:
-
-    import TestCommon
-    test = TestCommon.TestCommon()
-
-You can use all of the TestCmd keyword arguments when instantiating a
-TestCommon object; see the TestCmd documentation for details.
-
-Here is an overview of the methods and keyword arguments that are
-provided by the TestCommon class:
-
-    test.must_be_writable('file1', ['file2', ...])
-
-    test.must_contain('file', 'required text\n')
-
-    test.must_contain_all_lines(output, lines, ['title', find])
-
-    test.must_contain_any_line(output, lines, ['title', find])
-
-    test.must_exist('file1', ['file2', ...])
-
-    test.must_match('file', "expected contents\n")
-
-    test.must_not_be_writable('file1', ['file2', ...])
-
-    test.must_not_contain('file', 'banned text\n')
-
-    test.must_not_contain_any_line(output, lines, ['title', find])
-
-    test.must_not_exist('file1', ['file2', ...])
-
-    test.run(options = "options to be prepended to arguments",
-             stdout = "expected standard output from the program",
-             stderr = "expected error output from the program",
-             status = expected_status,
-             match = match_function)
-
-The TestCommon module also provides the following variables
-
-    TestCommon.python_executable
-    TestCommon.exe_suffix
-    TestCommon.obj_suffix
-    TestCommon.shobj_prefix
-    TestCommon.shobj_suffix
-    TestCommon.lib_prefix
-    TestCommon.lib_suffix
-    TestCommon.dll_prefix
-    TestCommon.dll_suffix
-
-"""
-
-# Copyright 2000-2010 Steven Knight
-# This module is free software, and you may redistribute it and/or modify
-# it under the same terms as Python itself, so long as this copyright message
-# and disclaimer are retained in their original form.
-#
-# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
-# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
-# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-# DAMAGE.
-#
-# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-# PARTICULAR PURPOSE.  THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
-# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
-# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-__author__ = "Steven Knight <knight at baldmt dot com>"
-__revision__ = "TestCommon.py 0.37.D001 2010/01/11 16:55:50 knight"
-__version__ = "0.37"
-
-import copy
-import os
-import os.path
-import stat
-import string
-import sys
-import types
-import UserList
-
-from TestCmd import *
-from TestCmd import __all__
-
-__all__.extend([ 'TestCommon',
-                 'exe_suffix',
-                 'obj_suffix',
-                 'shobj_prefix',
-                 'shobj_suffix',
-                 'lib_prefix',
-                 'lib_suffix',
-                 'dll_prefix',
-                 'dll_suffix',
-               ])
-
-# Variables that describe the prefixes and suffixes on this system.
-if sys.platform == 'win32':
-    exe_suffix    = '.exe'
-    obj_suffix    = '.obj'
-    shobj_suffix  = '.obj'
-    shobj_prefix  = ''
-    lib_prefix    = ''
-    lib_suffix    = '.lib'
-    dll_prefix    = ''
-    dll_suffix    = '.dll'
-    module_prefix = ''
-    module_suffix = '.dll'
-elif sys.platform == 'cygwin':
-    exe_suffix    = '.exe'
-    obj_suffix    = '.o'
-    shobj_suffix  = '.os'
-    shobj_prefix  = ''
-    lib_prefix    = 'lib'
-    lib_suffix    = '.a'
-    dll_prefix    = ''
-    dll_suffix    = '.dll'
-    module_prefix = ''
-    module_suffix = '.dll'
-elif string.find(sys.platform, 'irix') != -1:
-    exe_suffix    = ''
-    obj_suffix    = '.o'
-    shobj_suffix  = '.o'
-    shobj_prefix  = ''
-    lib_prefix    = 'lib'
-    lib_suffix    = '.a'
-    dll_prefix    = 'lib'
-    dll_suffix    = '.so'
-    module_prefix = 'lib'
-    module_prefix = '.so'
-elif string.find(sys.platform, 'darwin') != -1:
-    exe_suffix    = ''
-    obj_suffix    = '.o'
-    shobj_suffix  = '.os'
-    shobj_prefix  = ''
-    lib_prefix    = 'lib'
-    lib_suffix    = '.a'
-    dll_prefix    = 'lib'
-    dll_suffix    = '.dylib'
-    module_prefix = ''
-    module_suffix = '.so'
-elif string.find(sys.platform, 'sunos') != -1:
-    exe_suffix    = ''
-    obj_suffix    = '.o'
-    shobj_suffix  = '.os'
-    shobj_prefix  = 'so_'
-    lib_prefix    = 'lib'
-    lib_suffix    = '.a'
-    dll_prefix    = 'lib'
-    dll_suffix    = '.dylib'
-    module_prefix = ''
-    module_suffix = '.so'
-else:
-    exe_suffix    = ''
-    obj_suffix    = '.o'
-    shobj_suffix  = '.os'
-    shobj_prefix  = ''
-    lib_prefix    = 'lib'
-    lib_suffix    = '.a'
-    dll_prefix    = 'lib'
-    dll_suffix    = '.so'
-    module_prefix = 'lib'
-    module_suffix = '.so'
-
-def is_List(e):
-    return type(e) is types.ListType \
-        or isinstance(e, UserList.UserList)
-
-def is_writable(f):
-    mode = os.stat(f)[stat.ST_MODE]
-    return mode & stat.S_IWUSR
-
-def separate_files(flist):
-    existing = []
-    missing = []
-    for f in flist:
-        if os.path.exists(f):
-            existing.append(f)
-        else:
-            missing.append(f)
-    return existing, missing
-
-def _failed(self, status = 0):
-    if self.status is None or status is None:
-        return None
-    try:
-        return _status(self) not in status
-    except TypeError:
-        # status wasn't an iterable
-        return _status(self) != status
-
-def _status(self):
-    return self.status
-
-class TestCommon(TestCmd):
-
-    # Additional methods from the Perl Test::Cmd::Common module
-    # that we may wish to add in the future:
-    #
-    #  $test->subdir('subdir', ...);
-    #
-    #  $test->copy('src_file', 'dst_file');
-
-    def __init__(self, **kw):
-        """Initialize a new TestCommon instance.  This involves just
-        calling the base class initialization, and then changing directory
-        to the workdir.
-        """
-        apply(TestCmd.__init__, [self], kw)
-        os.chdir(self.workdir)
-
-    def must_be_writable(self, *files):
-        """Ensures that the specified file(s) exist and are writable.
-        An individual file can be specified as a list of directory names,
-        in which case the pathname will be constructed by concatenating
-        them.  Exits FAILED if any of the files does not exist or is
-        not writable.
-        """
-        files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
-        existing, missing = separate_files(files)
-        unwritable = filter(lambda x, iw=is_writable: not iw(x), existing)
-        if missing:
-            print "Missing files: `%s'" % string.join(missing, "', `")
-        if unwritable:
-            print "Unwritable files: `%s'" % string.join(unwritable, "', `")
-        self.fail_test(missing + unwritable)
-
-    def must_contain(self, file, required, mode = 'rb'):
-        """Ensures that the specified file contains the required text.
-        """
-        file_contents = self.read(file, mode)
-        contains = (string.find(file_contents, required) != -1)
-        if not contains:
-            print "File `%s' does not contain required string." % file
-            print self.banner('Required string ')
-            print required
-            print self.banner('%s contents ' % file)
-            print file_contents
-            self.fail_test(not contains)
-
-    def must_contain_all_lines(self, output, lines, title=None, find=None):
-        """Ensures that the specified output string (first argument)
-        contains all of the specified lines (second argument).
-
-        An optional third argument can be used to describe the type
-        of output being searched, and only shows up in failure output.
-
-        An optional fourth argument can be used to supply a different
-        function, of the form "find(line, output), to use when searching
-        for lines in the output.
-        """
-        if find is None:
-            find = lambda o, l: string.find(o, l) != -1
-        missing = []
-        for line in lines:
-            if not find(output, line):
-                missing.append(line)
-
-        if missing:
-            if title is None:
-                title = 'output'
-            sys.stdout.write("Missing expected lines from %s:\n" % title)
-            for line in missing:
-                sys.stdout.write('    ' + repr(line) + '\n')
-            sys.stdout.write(self.banner(title + ' '))
-            sys.stdout.write(output)
-            self.fail_test()
-
-    def must_contain_any_line(self, output, lines, title=None, find=None):
-        """Ensures that the specified output string (first argument)
-        contains at least one of the specified lines (second argument).
-
-        An optional third argument can be used to describe the type
-        of output being searched, and only shows up in failure output.
-
-        An optional fourth argument can be used to supply a different
-        function, of the form "find(line, output), to use when searching
-        for lines in the output.
-        """
-        if find is None:
-            find = lambda o, l: string.find(o, l) != -1
-        for line in lines:
-            if find(output, line):
-                return
-
-        if title is None:
-            title = 'output'
-        sys.stdout.write("Missing any expected line from %s:\n" % title)
-        for line in lines:
-            sys.stdout.write('    ' + repr(line) + '\n')
-        sys.stdout.write(self.banner(title + ' '))
-        sys.stdout.write(output)
-        self.fail_test()
-
-    def must_contain_lines(self, lines, output, title=None):
-        # Deprecated; retain for backwards compatibility.
-        return self.must_contain_all_lines(output, lines, title)
-
-    def must_exist(self, *files):
-        """Ensures that the specified file(s) must exist.  An individual
-        file be specified as a list of directory names, in which case the
-        pathname will be constructed by concatenating them.  Exits FAILED
-        if any of the files does not exist.
-        """
-        files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
-        missing = filter(lambda x: not os.path.exists(x), files)
-        if missing:
-            print "Missing files: `%s'" % string.join(missing, "', `")
-            self.fail_test(missing)
-
-    def must_match(self, file, expect, mode = 'rb'):
-        """Matches the contents of the specified file (first argument)
-        against the expected contents (second argument).  The expected
-        contents are a list of lines or a string which will be split
-        on newlines.
-        """
-        file_contents = self.read(file, mode)
-        try:
-            self.fail_test(not self.match(file_contents, expect))
-        except KeyboardInterrupt:
-            raise
-        except:
-            print "Unexpected contents of `%s'" % file
-            self.diff(expect, file_contents, 'contents ')
-            raise
-
-    def must_not_contain(self, file, banned, mode = 'rb'):
-        """Ensures that the specified file doesn't contain the banned text.
-        """
-        file_contents = self.read(file, mode)
-        contains = (string.find(file_contents, banned) != -1)
-        if contains:
-            print "File `%s' contains banned string." % file
-            print self.banner('Banned string ')
-            print banned
-            print self.banner('%s contents ' % file)
-            print file_contents
-            self.fail_test(contains)
-
-    def must_not_contain_any_line(self, output, lines, title=None, find=None):
-        """Ensures that the specified output string (first argument)
-        does not contain any of the specified lines (second argument).
-
-        An optional third argument can be used to describe the type
-        of output being searched, and only shows up in failure output.
-
-        An optional fourth argument can be used to supply a different
-        function, of the form "find(line, output), to use when searching
-        for lines in the output.
-        """
-        if find is None:
-            find = lambda o, l: string.find(o, l) != -1
-        unexpected = []
-        for line in lines:
-            if find(output, line):
-                unexpected.append(line)
-
-        if unexpected:
-            if title is None:
-                title = 'output'
-            sys.stdout.write("Unexpected lines in %s:\n" % title)
-            for line in unexpected:
-                sys.stdout.write('    ' + repr(line) + '\n')
-            sys.stdout.write(self.banner(title + ' '))
-            sys.stdout.write(output)
-            self.fail_test()
-
-    def must_not_contain_lines(self, lines, output, title=None):
-        return self.must_not_contain_any_line(output, lines, title)
-
-    def must_not_exist(self, *files):
-        """Ensures that the specified file(s) must not exist.
-        An individual file be specified as a list of directory names, in
-        which case the pathname will be constructed by concatenating them.
-        Exits FAILED if any of the files exists.
-        """
-        files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
-        existing = filter(os.path.exists, files)
-        if existing:
-            print "Unexpected files exist: `%s'" % string.join(existing, "', `")
-            self.fail_test(existing)
-
-    def must_not_be_writable(self, *files):
-        """Ensures that the specified file(s) exist and are not writable.
-        An individual file can be specified as a list of directory names,
-        in which case the pathname will be constructed by concatenating
-        them.  Exits FAILED if any of the files does not exist or is
-        writable.
-        """
-        files = map(lambda x: is_List(x) and apply(os.path.join, x) or x, files)
-        existing, missing = separate_files(files)
-        writable = filter(is_writable, existing)
-        if missing:
-            print "Missing files: `%s'" % string.join(missing, "', `")
-        if writable:
-            print "Writable files: `%s'" % string.join(writable, "', `")
-        self.fail_test(missing + writable)
-
-    def _complete(self, actual_stdout, expected_stdout,
-                        actual_stderr, expected_stderr, status, match):
-        """
-        Post-processes running a subcommand, checking for failure
-        status and displaying output appropriately.
-        """
-        if _failed(self, status):
-            expect = ''
-            if status != 0:
-                expect = " (expected %s)" % str(status)
-            print "%s returned %s%s" % (self.program, str(_status(self)), expect)
-            print self.banner('STDOUT ')
-            print actual_stdout
-            print self.banner('STDERR ')
-            print actual_stderr
-            self.fail_test()
-        if not expected_stdout is None and not match(actual_stdout, expected_stdout):
-            self.diff(expected_stdout, actual_stdout, 'STDOUT ')
-            if actual_stderr:
-                print self.banner('STDERR ')
-                print actual_stderr
-            self.fail_test()
-        if not expected_stderr is None and not match(actual_stderr, expected_stderr):
-            print self.banner('STDOUT ')
-            print actual_stdout
-            self.diff(expected_stderr, actual_stderr, 'STDERR ')
-            self.fail_test()
-
-    def start(self, program = None,
-                    interpreter = None,
-                    arguments = None,
-                    universal_newlines = None,
-                    **kw):
-        """
-        Starts a program or script for the test environment.
-
-        This handles the "options" keyword argument and exceptions.
-        """
-        options = kw.pop('options', None)
-        if options:
-            if arguments is None:
-                arguments = options
-            else:
-                arguments = options + " " + arguments
-
-        try:
-            return apply(TestCmd.start,
-                         (self, program, interpreter, arguments, universal_newlines),
-                         kw)
-        except KeyboardInterrupt:
-            raise
-        except Exception, e:
-            print self.banner('STDOUT ')
-            try:
-                print self.stdout()
-            except IndexError:
-                pass
-            print self.banner('STDERR ')
-            try:
-                print self.stderr()
-            except IndexError:
-                pass
-            cmd_args = self.command_args(program, interpreter, arguments)
-            sys.stderr.write('Exception trying to execute: %s\n' % cmd_args)
-            raise e
-
-    def finish(self, popen, stdout = None, stderr = '', status = 0, **kw):
-        """
-        Finishes and waits for the process being run under control of
-        the specified popen argument.  Additional arguments are similar
-        to those of the run() method:
-
-                stdout  The expected standard output from
-                        the command.  A value of None means
-                        don't test standard output.
-
-                stderr  The expected error output from
-                        the command.  A value of None means
-                        don't test error output.
-
-                status  The expected exit status from the
-                        command.  A value of None means don't
-                        test exit status.
-        """
-        apply(TestCmd.finish, (self, popen,), kw)
-        match = kw.get('match', self.match)
-        self._complete(self.stdout(), stdout,
-                       self.stderr(), stderr, status, match)
-
-    def run(self, options = None, arguments = None,
-                  stdout = None, stderr = '', status = 0, **kw):
-        """Runs the program under test, checking that the test succeeded.
-
-        The arguments are the same as the base TestCmd.run() method,
-        with the addition of:
-
-                options Extra options that get appended to the beginning
-                        of the arguments.
-
-                stdout  The expected standard output from
-                        the command.  A value of None means
-                        don't test standard output.
-
-                stderr  The expected error output from
-                        the command.  A value of None means
-                        don't test error output.
-
-                status  The expected exit status from the
-                        command.  A value of None means don't
-                        test exit status.
-
-        By default, this expects a successful exit (status = 0), does
-        not test standard output (stdout = None), and expects that error
-        output is empty (stderr = "").
-        """
-        if options:
-            if arguments is None:
-                arguments = options
-            else:
-                arguments = options + " " + arguments
-        kw['arguments'] = arguments
-        match = kw.pop('match', self.match)
-        apply(TestCmd.run, [self], kw)
-        self._complete(self.stdout(), stdout,
-                       self.stderr(), stderr, status, match)
-
-    def skip_test(self, message="Skipping test.\n"):
-        """Skips a test.
-
-        Proper test-skipping behavior is dependent on the external
-        TESTCOMMON_PASS_SKIPS environment variable.  If set, we treat
-        the skip as a PASS (exit 0), and otherwise treat it as NO RESULT.
-        In either case, we print the specified message as an indication
-        that the substance of the test was skipped.
-
-        (This was originally added to support development under Aegis.
-        Technically, skipping a test is a NO RESULT, but Aegis would
-        treat that as a test failure and prevent the change from going to
-        the next step.  Since we ddn't want to force anyone using Aegis
-        to have to install absolutely every tool used by the tests, we
-        would actually report to Aegis that a skipped test has PASSED
-        so that the workflow isn't held up.)
-        """
-        if message:
-            sys.stdout.write(message)
-            sys.stdout.flush()
-        pass_skips = os.environ.get('TESTCOMMON_PASS_SKIPS')
-        if pass_skips in [None, 0, '0']:
-            # skip=1 means skip this function when showing where this
-            # result came from.  They only care about the line where the
-            # script called test.skip_test(), not the line number where
-            # we call test.no_result().
-            self.no_result(skip=1)
-        else:
-            # We're under the development directory for this change,
-            # so this is an Aegis invocation; pass the test (exit 0).
-            self.pass_test()
-
-# Local Variables:
-# tab-width:4
-# indent-tabs-mode:nil
-# End:
-# vim: set expandtab tabstop=4 shiftwidth=4:
diff --git a/tools/gyp/test/lib/TestGyp.py b/tools/gyp/test/lib/TestGyp.py
deleted file mode 100644
index fc7b135..0000000
--- a/tools/gyp/test/lib/TestGyp.py
+++ /dev/null
@@ -1,1193 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-TestGyp.py:  a testing framework for GYP integration tests.
-"""
-
-import collections
-from contextlib import contextmanager
-import itertools
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-
-import TestCmd
-import TestCommon
-from TestCommon import __all__
-
-__all__.extend([
-  'TestGyp',
-])
-
-
-def remove_debug_line_numbers(contents):
-  """Function to remove the line numbers from the debug output
-  of gyp and thus reduce the extreme fragility of the stdout
-  comparison tests.
-  """
-  lines = contents.splitlines()
-  # split each line on ":"
-  lines = [l.split(":", 3) for l in lines]
-  # join each line back together while ignoring the
-  # 3rd column which is the line number
-  lines = [len(l) > 3 and ":".join(l[3:]) or l for l in lines]
-  return "\n".join(lines)
-
-
-def match_modulo_line_numbers(contents_a, contents_b):
-  """File contents matcher that ignores line numbers."""
-  contents_a = remove_debug_line_numbers(contents_a)
-  contents_b = remove_debug_line_numbers(contents_b)
-  return TestCommon.match_exact(contents_a, contents_b)
-
-
-@contextmanager
-def LocalEnv(local_env):
-  """Context manager to provide a local OS environment."""
-  old_env = os.environ.copy()
-  os.environ.update(local_env)
-  try:
-    yield
-  finally:
-    os.environ.clear()
-    os.environ.update(old_env)
-
-
-class TestGypBase(TestCommon.TestCommon):
-  """
-  Class for controlling end-to-end tests of gyp generators.
-
-  Instantiating this class will create a temporary directory and
-  arrange for its destruction (via the TestCmd superclass) and
-  copy all of the non-gyptest files in the directory hierarchy of the
-  executing script.
-
-  The default behavior is to test the 'gyp' or 'gyp.bat' file in the
-  current directory.  An alternative may be specified explicitly on
-  instantiation, or by setting the TESTGYP_GYP environment variable.
-
-  This class should be subclassed for each supported gyp generator
-  (format).  Various abstract methods below define calling signatures
-  used by the test scripts to invoke builds on the generated build
-  configuration and to run executables generated by those builds.
-  """
-
-  formats = []
-  build_tool = None
-  build_tool_list = []
-
-  _exe = TestCommon.exe_suffix
-  _obj = TestCommon.obj_suffix
-  shobj_ = TestCommon.shobj_prefix
-  _shobj = TestCommon.shobj_suffix
-  lib_ = TestCommon.lib_prefix
-  _lib = TestCommon.lib_suffix
-  dll_ = TestCommon.dll_prefix
-  _dll = TestCommon.dll_suffix
-  module_ = TestCommon.module_prefix
-  _module = TestCommon.module_suffix
-
-  # Constants to represent different targets.
-  ALL = '__all__'
-  DEFAULT = '__default__'
-
-  # Constants for different target types.
-  EXECUTABLE = '__executable__'
-  STATIC_LIB = '__static_lib__'
-  SHARED_LIB = '__shared_lib__'
-  LOADABLE_MODULE = '__loadable_module__'
-
-  def __init__(self, gyp=None, *args, **kw):
-    self.origin_cwd = os.path.abspath(os.path.dirname(sys.argv[0]))
-    self.extra_args = sys.argv[1:]
-
-    if not gyp:
-      gyp = os.environ.get('TESTGYP_GYP')
-      if not gyp:
-        if sys.platform == 'win32':
-          gyp = 'gyp.bat'
-        else:
-          gyp = 'gyp'
-    self.gyp = os.path.abspath(gyp)
-    self.no_parallel = False
-
-    self.formats = [self.format]
-
-    self.initialize_build_tool()
-
-    kw.setdefault('match', TestCommon.match_exact)
-
-    # Put test output in out/testworkarea by default.
-    # Use temporary names so there are no collisions.
-    workdir = os.path.join('out', kw.get('workdir', 'testworkarea'))
-    # Create work area if it doesn't already exist.
-    if not os.path.isdir(workdir):
-      os.makedirs(workdir)
-
-    kw['workdir'] = tempfile.mktemp(prefix='testgyp.', dir=workdir)
-
-    formats = kw.pop('formats', [])
-
-    super(TestGypBase, self).__init__(*args, **kw)
-
-    real_format = self.format.split('-')[-1]
-    excluded_formats = set([f for f in formats if f[0] == '!'])
-    included_formats = set(formats) - excluded_formats
-    if ('!'+real_format in excluded_formats or
-        included_formats and real_format not in included_formats):
-      msg = 'Invalid test for %r format; skipping test.\n'
-      self.skip_test(msg % self.format)
-
-    self.copy_test_configuration(self.origin_cwd, self.workdir)
-    self.set_configuration(None)
-
-    # Set $HOME so that gyp doesn't read the user's actual
-    # ~/.gyp/include.gypi file, which may contain variables
-    # and other settings that would change the output.
-    os.environ['HOME'] = self.workpath()
-    # Clear $GYP_DEFINES for the same reason.
-    if 'GYP_DEFINES' in os.environ:
-      del os.environ['GYP_DEFINES']
-    # Override the user's language settings, which could
-    # otherwise make the output vary from what is expected.
-    os.environ['LC_ALL'] = 'C'
-
-  def built_file_must_exist(self, name, type=None, **kw):
-    """
-    Fails the test if the specified built file name does not exist.
-    """
-    return self.must_exist(self.built_file_path(name, type, **kw))
-
-  def built_file_must_not_exist(self, name, type=None, **kw):
-    """
-    Fails the test if the specified built file name exists.
-    """
-    return self.must_not_exist(self.built_file_path(name, type, **kw))
-
-  def built_file_must_match(self, name, contents, **kw):
-    """
-    Fails the test if the contents of the specified built file name
-    do not match the specified contents.
-    """
-    return self.must_match(self.built_file_path(name, **kw), contents)
-
-  def built_file_must_not_match(self, name, contents, **kw):
-    """
-    Fails the test if the contents of the specified built file name
-    match the specified contents.
-    """
-    return self.must_not_match(self.built_file_path(name, **kw), contents)
-
-  def built_file_must_not_contain(self, name, contents, **kw):
-    """
-    Fails the test if the specified built file name contains the specified
-    contents.
-    """
-    return self.must_not_contain(self.built_file_path(name, **kw), contents)
-
-  def copy_test_configuration(self, source_dir, dest_dir):
-    """
-    Copies the test configuration from the specified source_dir
-    (the directory in which the test script lives) to the
-    specified dest_dir (a temporary working directory).
-
-    This ignores all files and directories that begin with
-    the string 'gyptest', and all '.svn' subdirectories.
-    """
-    for root, dirs, files in os.walk(source_dir):
-      if '.svn' in dirs:
-        dirs.remove('.svn')
-      dirs = [ d for d in dirs if not d.startswith('gyptest') ]
-      files = [ f for f in files if not f.startswith('gyptest') ]
-      for dirname in dirs:
-        source = os.path.join(root, dirname)
-        destination = source.replace(source_dir, dest_dir)
-        os.mkdir(destination)
-        if sys.platform != 'win32':
-          shutil.copystat(source, destination)
-      for filename in files:
-        source = os.path.join(root, filename)
-        destination = source.replace(source_dir, dest_dir)
-        shutil.copy2(source, destination)
-
-    # The gyp tests are run with HOME pointing to |dest_dir| to provide an
-    # hermetic environment. Symlink login.keychain and the 'Provisioning
-    # Profiles' folder to allow codesign to access to the data required for
-    # signing binaries.
-    if sys.platform == 'darwin':
-      old_keychain = GetDefaultKeychainPath()
-      old_provisioning_profiles = os.path.join(
-          os.environ['HOME'], 'Library', 'MobileDevice',
-          'Provisioning Profiles')
-
-      new_keychain = os.path.join(dest_dir, 'Library', 'Keychains')
-      MakeDirs(new_keychain)
-      os.symlink(old_keychain, os.path.join(new_keychain, 'login.keychain'))
-
-      if os.path.exists(old_provisioning_profiles):
-        new_provisioning_profiles = os.path.join(
-            dest_dir, 'Library', 'MobileDevice')
-        MakeDirs(new_provisioning_profiles)
-        os.symlink(old_provisioning_profiles,
-            os.path.join(new_provisioning_profiles, 'Provisioning Profiles'))
-
-  def initialize_build_tool(self):
-    """
-    Initializes the .build_tool attribute.
-
-    Searches the .build_tool_list for an executable name on the user's
-    $PATH.  The first tool on the list is used as-is if nothing is found
-    on the current $PATH.
-    """
-    for build_tool in self.build_tool_list:
-      if not build_tool:
-        continue
-      if os.path.isabs(build_tool):
-        self.build_tool = build_tool
-        return
-      build_tool = self.where_is(build_tool)
-      if build_tool:
-        self.build_tool = build_tool
-        return
-
-    if self.build_tool_list:
-      self.build_tool = self.build_tool_list[0]
-
-  def relocate(self, source, destination):
-    """
-    Renames (relocates) the specified source (usually a directory)
-    to the specified destination, creating the destination directory
-    first if necessary.
-
-    Note:  Don't use this as a generic "rename" operation.  In the
-    future, "relocating" parts of a GYP tree may affect the state of
-    the test to modify the behavior of later method calls.
-    """
-    destination_dir = os.path.dirname(destination)
-    if not os.path.exists(destination_dir):
-      self.subdir(destination_dir)
-    os.rename(source, destination)
-
-  def report_not_up_to_date(self):
-    """
-    Reports that a build is not up-to-date.
-
-    This provides common reporting for formats that have complicated
-    conditions for checking whether a build is up-to-date.  Formats
-    that expect exact output from the command (make) can
-    just set stdout= when they call the run_build() method.
-    """
-    print "Build is not up-to-date:"
-    print self.banner('STDOUT ')
-    print self.stdout()
-    stderr = self.stderr()
-    if stderr:
-      print self.banner('STDERR ')
-      print stderr
-
-  def run_gyp(self, gyp_file, *args, **kw):
-    """
-    Runs gyp against the specified gyp_file with the specified args.
-    """
-
-    # When running gyp, and comparing its output we use a comparitor
-    # that ignores the line numbers that gyp logs in its debug output.
-    if kw.pop('ignore_line_numbers', False):
-      kw.setdefault('match', match_modulo_line_numbers)
-
-    # TODO:  --depth=. works around Chromium-specific tree climbing.
-    depth = kw.pop('depth', '.')
-    run_args = ['--depth='+depth]
-    run_args.extend(['--format='+f for f in self.formats]);
-    run_args.append(gyp_file)
-    if self.no_parallel:
-      run_args += ['--no-parallel']
-    # TODO: if extra_args contains a '--build' flag
-    # we really want that to only apply to the last format (self.format).
-    run_args.extend(self.extra_args)
-    # Default xcode_ninja_target_pattern to ^.*$ to fix xcode-ninja tests
-    xcode_ninja_target_pattern = kw.pop('xcode_ninja_target_pattern', '.*')
-    run_args.extend(
-      ['-G', 'xcode_ninja_target_pattern=%s' % xcode_ninja_target_pattern])
-    run_args.extend(args)
-    return self.run(program=self.gyp, arguments=run_args, **kw)
-
-  def run(self, *args, **kw):
-    """
-    Executes a program by calling the superclass .run() method.
-
-    This exists to provide a common place to filter out keyword
-    arguments implemented in this layer, without having to update
-    the tool-specific subclasses or clutter the tests themselves
-    with platform-specific code.
-    """
-    if kw.has_key('SYMROOT'):
-      del kw['SYMROOT']
-    super(TestGypBase, self).run(*args, **kw)
-
-  def set_configuration(self, configuration):
-    """
-    Sets the configuration, to be used for invoking the build
-    tool and testing potential built output.
-    """
-    self.configuration = configuration
-
-  def configuration_dirname(self):
-    if self.configuration:
-      return self.configuration.split('|')[0]
-    else:
-      return 'Default'
-
-  def configuration_buildname(self):
-    if self.configuration:
-      return self.configuration
-    else:
-      return 'Default'
-
-  #
-  # Abstract methods to be defined by format-specific subclasses.
-  #
-
-  def build(self, gyp_file, target=None, **kw):
-    """
-    Runs a build of the specified target against the configuration
-    generated from the specified gyp_file.
-
-    A 'target' argument of None or the special value TestGyp.DEFAULT
-    specifies the default argument for the underlying build tool.
-    A 'target' argument of TestGyp.ALL specifies the 'all' target
-    (if any) of the underlying build tool.
-    """
-    raise NotImplementedError
-
-  def built_file_path(self, name, type=None, **kw):
-    """
-    Returns a path to the specified file name, of the specified type.
-    """
-    raise NotImplementedError
-
-  def built_file_basename(self, name, type=None, **kw):
-    """
-    Returns the base name of the specified file name, of the specified type.
-
-    A bare=True keyword argument specifies that prefixes and suffixes shouldn't
-    be applied.
-    """
-    if not kw.get('bare'):
-      if type == self.EXECUTABLE:
-        name = name + self._exe
-      elif type == self.STATIC_LIB:
-        name = self.lib_ + name + self._lib
-      elif type == self.SHARED_LIB:
-        name = self.dll_ + name + self._dll
-      elif type == self.LOADABLE_MODULE:
-        name = self.module_ + name + self._module
-    return name
-
-  def run_built_executable(self, name, *args, **kw):
-    """
-    Runs an executable program built from a gyp-generated configuration.
-
-    The specified name should be independent of any particular generator.
-    Subclasses should find the output executable in the appropriate
-    output build directory, tack on any necessary executable suffix, etc.
-    """
-    raise NotImplementedError
-
-  def up_to_date(self, gyp_file, target=None, **kw):
-    """
-    Verifies that a build of the specified target is up to date.
-
-    The subclass should implement this by calling build()
-    (or a reasonable equivalent), checking whatever conditions
-    will tell it the build was an "up to date" null build, and
-    failing if it isn't.
-    """
-    raise NotImplementedError
-
-
-class TestGypGypd(TestGypBase):
-  """
-  Subclass for testing the GYP 'gypd' generator (spit out the
-  internal data structure as pretty-printed Python).
-  """
-  format = 'gypd'
-  def __init__(self, gyp=None, *args, **kw):
-    super(TestGypGypd, self).__init__(*args, **kw)
-    # gypd implies the use of 'golden' files, so parallelizing conflicts as it
-    # causes ordering changes.
-    self.no_parallel = True
-
-
-class TestGypCustom(TestGypBase):
-  """
-  Subclass for testing the GYP with custom generator
-  """
-
-  def __init__(self, gyp=None, *args, **kw):
-    self.format = kw.pop("format")
-    super(TestGypCustom, self).__init__(*args, **kw)
-
-
-class TestGypCMake(TestGypBase):
-  """
-  Subclass for testing the GYP CMake generator, using cmake's ninja backend.
-  """
-  format = 'cmake'
-  build_tool_list = ['cmake']
-  ALL = 'all'
-
-  def cmake_build(self, gyp_file, target=None, **kw):
-    arguments = kw.get('arguments', [])[:]
-
-    self.build_tool_list = ['cmake']
-    self.initialize_build_tool()
-
-    chdir = os.path.join(kw.get('chdir', '.'),
-                         'out',
-                         self.configuration_dirname())
-    kw['chdir'] = chdir
-
-    arguments.append('-G')
-    arguments.append('Ninja')
-
-    kw['arguments'] = arguments
-
-    stderr = kw.get('stderr', None)
-    if stderr:
-      kw['stderr'] = stderr.split('$$$')[0]
-
-    self.run(program=self.build_tool, **kw)
-
-  def ninja_build(self, gyp_file, target=None, **kw):
-    arguments = kw.get('arguments', [])[:]
-
-    self.build_tool_list = ['ninja']
-    self.initialize_build_tool()
-
-    # Add a -C output/path to the command line.
-    arguments.append('-C')
-    arguments.append(os.path.join('out', self.configuration_dirname()))
-
-    if target not in (None, self.DEFAULT):
-      arguments.append(target)
-
-    kw['arguments'] = arguments
-
-    stderr = kw.get('stderr', None)
-    if stderr:
-      stderrs = stderr.split('$$$')
-      kw['stderr'] = stderrs[1] if len(stderrs) > 1 else ''
-
-    return self.run(program=self.build_tool, **kw)
-
-  def build(self, gyp_file, target=None, status=0, **kw):
-    # Two tools must be run to build, cmake and the ninja.
-    # Allow cmake to succeed when the overall expectation is to fail.
-    if status is None:
-      kw['status'] = None
-    else:
-      if not isinstance(status, collections.Iterable): status = (status,)
-      kw['status'] = list(itertools.chain((0,), status))
-    self.cmake_build(gyp_file, target, **kw)
-    kw['status'] = status
-    self.ninja_build(gyp_file, target, **kw)
-
-  def run_built_executable(self, name, *args, **kw):
-    # Enclosing the name in a list avoids prepending the original dir.
-    program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
-    if sys.platform == 'darwin':
-      configuration = self.configuration_dirname()
-      os.environ['DYLD_LIBRARY_PATH'] = os.path.join('out', configuration)
-    return self.run(program=program, *args, **kw)
-
-  def built_file_path(self, name, type=None, **kw):
-    result = []
-    chdir = kw.get('chdir')
-    if chdir:
-      result.append(chdir)
-    result.append('out')
-    result.append(self.configuration_dirname())
-    if type == self.STATIC_LIB:
-      if sys.platform != 'darwin':
-        result.append('obj.target')
-    elif type == self.SHARED_LIB:
-      if sys.platform != 'darwin' and sys.platform != 'win32':
-        result.append('lib.target')
-    subdir = kw.get('subdir')
-    if subdir and type != self.SHARED_LIB:
-      result.append(subdir)
-    result.append(self.built_file_basename(name, type, **kw))
-    return self.workpath(*result)
-
-  def up_to_date(self, gyp_file, target=None, **kw):
-    result = self.ninja_build(gyp_file, target, **kw)
-    if not result:
-      stdout = self.stdout()
-      if 'ninja: no work to do' not in stdout:
-        self.report_not_up_to_date()
-        self.fail_test()
-    return result
-
-
-class TestGypMake(TestGypBase):
-  """
-  Subclass for testing the GYP Make generator.
-  """
-  format = 'make'
-  build_tool_list = ['make']
-  ALL = 'all'
-  def build(self, gyp_file, target=None, **kw):
-    """
-    Runs a Make build using the Makefiles generated from the specified
-    gyp_file.
-    """
-    arguments = kw.get('arguments', [])[:]
-    if self.configuration:
-      arguments.append('BUILDTYPE=' + self.configuration)
-    if target not in (None, self.DEFAULT):
-      arguments.append(target)
-    # Sub-directory builds provide per-gyp Makefiles (i.e.
-    # Makefile.gyp_filename), so use that if there is no Makefile.
-    chdir = kw.get('chdir', '')
-    if not os.path.exists(os.path.join(chdir, 'Makefile')):
-      print "NO Makefile in " + os.path.join(chdir, 'Makefile')
-      arguments.insert(0, '-f')
-      arguments.insert(1, os.path.splitext(gyp_file)[0] + '.Makefile')
-    kw['arguments'] = arguments
-    return self.run(program=self.build_tool, **kw)
-  def up_to_date(self, gyp_file, target=None, **kw):
-    """
-    Verifies that a build of the specified Make target is up to date.
-    """
-    if target in (None, self.DEFAULT):
-      message_target = 'all'
-    else:
-      message_target = target
-    kw['stdout'] = "make: Nothing to be done for `%s'.\n" % message_target
-    return self.build(gyp_file, target, **kw)
-  def run_built_executable(self, name, *args, **kw):
-    """
-    Runs an executable built by Make.
-    """
-    configuration = self.configuration_dirname()
-    libdir = os.path.join('out', configuration, 'lib')
-    # TODO(piman): when everything is cross-compile safe, remove lib.target
-    if sys.platform == 'darwin':
-      # Mac puts target shared libraries right in the product directory.
-      configuration = self.configuration_dirname()
-      os.environ['DYLD_LIBRARY_PATH'] = (
-          libdir + '.host:' + os.path.join('out', configuration))
-    else:
-      os.environ['LD_LIBRARY_PATH'] = libdir + '.host:' + libdir + '.target'
-    # Enclosing the name in a list avoids prepending the original dir.
-    program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
-    return self.run(program=program, *args, **kw)
-  def built_file_path(self, name, type=None, **kw):
-    """
-    Returns a path to the specified file name, of the specified type,
-    as built by Make.
-
-    Built files are in the subdirectory 'out/{configuration}'.
-    The default is 'out/Default'.
-
-    A chdir= keyword argument specifies the source directory
-    relative to which  the output subdirectory can be found.
-
-    "type" values of STATIC_LIB or SHARED_LIB append the necessary
-    prefixes and suffixes to a platform-independent library base name.
-
-    A subdir= keyword argument specifies a library subdirectory within
-    the default 'obj.target'.
-    """
-    result = []
-    chdir = kw.get('chdir')
-    if chdir:
-      result.append(chdir)
-    configuration = self.configuration_dirname()
-    result.extend(['out', configuration])
-    if type == self.STATIC_LIB and sys.platform != 'darwin':
-      result.append('obj.target')
-    elif type == self.SHARED_LIB and sys.platform != 'darwin':
-      result.append('lib.target')
-    subdir = kw.get('subdir')
-    if subdir and type != self.SHARED_LIB:
-      result.append(subdir)
-    result.append(self.built_file_basename(name, type, **kw))
-    return self.workpath(*result)
-
-
-def ConvertToCygpath(path):
-  """Convert to cygwin path if we are using cygwin."""
-  if sys.platform == 'cygwin':
-    p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
-    path = p.communicate()[0].strip()
-  return path
-
-
-def MakeDirs(new_dir):
-  """A wrapper around os.makedirs() that emulates "mkdir -p"."""
-  try:
-    os.makedirs(new_dir)
-  except OSError as e:
-    if e.errno != errno.EEXIST:
-      raise
-
-def GetDefaultKeychainPath():
-  """Get the keychain path, for used before updating HOME."""
-  assert sys.platform == 'darwin'
-  # Format is:
-  # $ security default-keychain
-  #     "/Some/Path/To/default.keychain"
-  path = subprocess.check_output(['security', 'default-keychain']).strip()
-  return path[1:-1]
-
-def FindMSBuildInstallation(msvs_version = 'auto'):
-  """Returns path to MSBuild for msvs_version or latest available.
-
-  Looks in the registry to find install location of MSBuild.
-  MSBuild before v4.0 will not build c++ projects, so only use newer versions.
-  """
-  import TestWin
-  registry = TestWin.Registry()
-
-  msvs_to_msbuild = {
-      '2013': r'12.0',
-      '2012': r'4.0',  # Really v4.0.30319 which comes with .NET 4.5.
-      '2010': r'4.0'}
-
-  msbuild_basekey = r'HKLM\SOFTWARE\Microsoft\MSBuild\ToolsVersions'
-  if not registry.KeyExists(msbuild_basekey):
-    print 'Error: could not find MSBuild base registry entry'
-    return None
-
-  msbuild_version = None
-  if msvs_version in msvs_to_msbuild:
-    msbuild_test_version = msvs_to_msbuild[msvs_version]
-    if registry.KeyExists(msbuild_basekey + '\\' + msbuild_test_version):
-      msbuild_version = msbuild_test_version
-    else:
-      print ('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" '
-             'but corresponding MSBuild "%s" was not found.' %
-             (msvs_version, msbuild_version))
-  if not msbuild_version:
-    for msvs_version in sorted(msvs_to_msbuild, reverse=True):
-      msbuild_test_version = msvs_to_msbuild[msvs_version]
-      if registry.KeyExists(msbuild_basekey + '\\' + msbuild_test_version):
-        msbuild_version = msbuild_test_version
-        break
-  if not msbuild_version:
-    print 'Error: could not find MSBuild registry entry'
-    return None
-
-  msbuild_path = registry.GetValue(msbuild_basekey + '\\' + msbuild_version,
-                                   'MSBuildToolsPath')
-  if not msbuild_path:
-    print 'Error: could not get MSBuild registry entry value'
-    return None
-
-  return os.path.join(msbuild_path, 'MSBuild.exe')
-
-
-def FindVisualStudioInstallation():
-  """Returns appropriate values for .build_tool and .uses_msbuild fields
-  of TestGypBase for Visual Studio.
-
-  We use the value specified by GYP_MSVS_VERSION.  If not specified, we
-  search %PATH% and %PATHEXT% for a devenv.{exe,bat,...} executable.
-  Failing that, we search for likely deployment paths.
-  """
-  possible_roots = ['%s:\\Program Files%s' % (chr(drive), suffix)
-                    for drive in range(ord('C'), ord('Z') + 1)
-                    for suffix in ['', ' (x86)']]
-  possible_paths = {
-      '2015': r'Microsoft Visual Studio 14.0\Common7\IDE\devenv.com',
-      '2013': r'Microsoft Visual Studio 12.0\Common7\IDE\devenv.com',
-      '2012': r'Microsoft Visual Studio 11.0\Common7\IDE\devenv.com',
-      '2010': r'Microsoft Visual Studio 10.0\Common7\IDE\devenv.com',
-      '2008': r'Microsoft Visual Studio 9.0\Common7\IDE\devenv.com',
-      '2005': r'Microsoft Visual Studio 8\Common7\IDE\devenv.com'}
-
-  possible_roots = [ConvertToCygpath(r) for r in possible_roots]
-
-  msvs_version = 'auto'
-  for flag in (f for f in sys.argv if f.startswith('msvs_version=')):
-    msvs_version = flag.split('=')[-1]
-  msvs_version = os.environ.get('GYP_MSVS_VERSION', msvs_version)
-
-  if msvs_version in possible_paths:
-    # Check that the path to the specified GYP_MSVS_VERSION exists.
-    path = possible_paths[msvs_version]
-    for r in possible_roots:
-      build_tool = os.path.join(r, path)
-      if os.path.exists(build_tool):
-        uses_msbuild = msvs_version >= '2010'
-        msbuild_path = FindMSBuildInstallation(msvs_version)
-        return build_tool, uses_msbuild, msbuild_path
-    else:
-      print ('Warning: Environment variable GYP_MSVS_VERSION specifies "%s" '
-              'but corresponding "%s" was not found.' % (msvs_version, path))
-  # Neither GYP_MSVS_VERSION nor the path help us out.  Iterate through
-  # the choices looking for a match.
-  for version in sorted(possible_paths, reverse=True):
-    path = possible_paths[version]
-    for r in possible_roots:
-      build_tool = os.path.join(r, path)
-      if os.path.exists(build_tool):
-        uses_msbuild = msvs_version >= '2010'
-        msbuild_path = FindMSBuildInstallation(msvs_version)
-        return build_tool, uses_msbuild, msbuild_path
-  print 'Error: could not find devenv'
-  sys.exit(1)
-
-class TestGypOnMSToolchain(TestGypBase):
-  """
-  Common subclass for testing generators that target the Microsoft Visual
-  Studio toolchain (cl, link, dumpbin, etc.)
-  """
-  @staticmethod
-  def _ComputeVsvarsPath(devenv_path):
-    devenv_dir = os.path.split(devenv_path)[0]
-    vsvars_path = os.path.join(devenv_path, '../../Tools/vsvars32.bat')
-    return vsvars_path
-
-  def initialize_build_tool(self):
-    super(TestGypOnMSToolchain, self).initialize_build_tool()
-    if sys.platform in ('win32', 'cygwin'):
-      build_tools = FindVisualStudioInstallation()
-      self.devenv_path, self.uses_msbuild, self.msbuild_path = build_tools
-      self.vsvars_path = TestGypOnMSToolchain._ComputeVsvarsPath(
-          self.devenv_path)
-
-  def run_dumpbin(self, *dumpbin_args):
-    """Run the dumpbin tool with the specified arguments, and capturing and
-    returning stdout."""
-    assert sys.platform in ('win32', 'cygwin')
-    cmd = os.environ.get('COMSPEC', 'cmd.exe')
-    arguments = [cmd, '/c', self.vsvars_path, '&&', 'dumpbin']
-    arguments.extend(dumpbin_args)
-    proc = subprocess.Popen(arguments, stdout=subprocess.PIPE)
-    output = proc.communicate()[0]
-    assert not proc.returncode
-    return output
-
-class TestGypNinja(TestGypOnMSToolchain):
-  """
-  Subclass for testing the GYP Ninja generator.
-  """
-  format = 'ninja'
-  build_tool_list = ['ninja']
-  ALL = 'all'
-  DEFAULT = 'all'
-
-  def run_gyp(self, gyp_file, *args, **kw):
-    TestGypBase.run_gyp(self, gyp_file, *args, **kw)
-
-  def build(self, gyp_file, target=None, **kw):
-    arguments = kw.get('arguments', [])[:]
-
-    # Add a -C output/path to the command line.
-    arguments.append('-C')
-    arguments.append(os.path.join('out', self.configuration_dirname()))
-
-    if target is None:
-      target = 'all'
-    arguments.append(target)
-
-    kw['arguments'] = arguments
-    return self.run(program=self.build_tool, **kw)
-
-  def run_built_executable(self, name, *args, **kw):
-    # Enclosing the name in a list avoids prepending the original dir.
-    program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
-    if sys.platform == 'darwin':
-      configuration = self.configuration_dirname()
-      os.environ['DYLD_LIBRARY_PATH'] = os.path.join('out', configuration)
-    return self.run(program=program, *args, **kw)
-
-  def built_file_path(self, name, type=None, **kw):
-    result = []
-    chdir = kw.get('chdir')
-    if chdir:
-      result.append(chdir)
-    result.append('out')
-    result.append(self.configuration_dirname())
-    if type == self.STATIC_LIB:
-      if sys.platform != 'darwin':
-        result.append('obj')
-    elif type == self.SHARED_LIB:
-      if sys.platform != 'darwin' and sys.platform != 'win32':
-        result.append('lib')
-    subdir = kw.get('subdir')
-    if subdir and type != self.SHARED_LIB:
-      result.append(subdir)
-    result.append(self.built_file_basename(name, type, **kw))
-    return self.workpath(*result)
-
-  def up_to_date(self, gyp_file, target=None, **kw):
-    result = self.build(gyp_file, target, **kw)
-    if not result:
-      stdout = self.stdout()
-      if 'ninja: no work to do' not in stdout:
-        self.report_not_up_to_date()
-        self.fail_test()
-    return result
-
-
-class TestGypMSVS(TestGypOnMSToolchain):
-  """
-  Subclass for testing the GYP Visual Studio generator.
-  """
-  format = 'msvs'
-
-  u = r'=== Build: 0 succeeded, 0 failed, (\d+) up-to-date, 0 skipped ==='
-  up_to_date_re = re.compile(u, re.M)
-
-  # Initial None element will indicate to our .initialize_build_tool()
-  # method below that 'devenv' was not found on %PATH%.
-  #
-  # Note:  we must use devenv.com to be able to capture build output.
-  # Directly executing devenv.exe only sends output to BuildLog.htm.
-  build_tool_list = [None, 'devenv.com']
-
-  def initialize_build_tool(self):
-    super(TestGypMSVS, self).initialize_build_tool()
-    self.build_tool = self.devenv_path
-
-  def build(self, gyp_file, target=None, rebuild=False, clean=False, **kw):
-    """
-    Runs a Visual Studio build using the configuration generated
-    from the specified gyp_file.
-    """
-    configuration = self.configuration_buildname()
-    if clean:
-      build = '/Clean'
-    elif rebuild:
-      build = '/Rebuild'
-    else:
-      build = '/Build'
-    arguments = kw.get('arguments', [])[:]
-    arguments.extend([gyp_file.replace('.gyp', '.sln'),
-                      build, configuration])
-    # Note:  the Visual Studio generator doesn't add an explicit 'all'
-    # target, so we just treat it the same as the default.
-    if target not in (None, self.ALL, self.DEFAULT):
-      arguments.extend(['/Project', target])
-    if self.configuration:
-      arguments.extend(['/ProjectConfig', self.configuration])
-    kw['arguments'] = arguments
-    return self.run(program=self.build_tool, **kw)
-  def up_to_date(self, gyp_file, target=None, **kw):
-    """
-    Verifies that a build of the specified Visual Studio target is up to date.
-
-    Beware that VS2010 will behave strangely if you build under
-    C:\USERS\yourname\AppData\Local. It will cause needless work.  The ouptut
-    will be "1 succeeded and 0 up to date".  MSBuild tracing reveals that:
-    "Project 'C:\Users\...\AppData\Local\...vcxproj' not up to date because
-    'C:\PROGRAM FILES (X86)\MICROSOFT VISUAL STUDIO 10.0\VC\BIN\1033\CLUI.DLL'
-    was modified at 02/21/2011 17:03:30, which is newer than '' which was
-    modified at 01/01/0001 00:00:00.
-
-    The workaround is to specify a workdir when instantiating the test, e.g.
-    test = TestGyp.TestGyp(workdir='workarea')
-    """
-    result = self.build(gyp_file, target, **kw)
-    if not result:
-      stdout = self.stdout()
-
-      m = self.up_to_date_re.search(stdout)
-      up_to_date = m and int(m.group(1)) > 0
-      if not up_to_date:
-        self.report_not_up_to_date()
-        self.fail_test()
-    return result
-  def run_built_executable(self, name, *args, **kw):
-    """
-    Runs an executable built by Visual Studio.
-    """
-    configuration = self.configuration_dirname()
-    # Enclosing the name in a list avoids prepending the original dir.
-    program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
-    return self.run(program=program, *args, **kw)
-  def built_file_path(self, name, type=None, **kw):
-    """
-    Returns a path to the specified file name, of the specified type,
-    as built by Visual Studio.
-
-    Built files are in a subdirectory that matches the configuration
-    name.  The default is 'Default'.
-
-    A chdir= keyword argument specifies the source directory
-    relative to which  the output subdirectory can be found.
-
-    "type" values of STATIC_LIB or SHARED_LIB append the necessary
-    prefixes and suffixes to a platform-independent library base name.
-    """
-    result = []
-    chdir = kw.get('chdir')
-    if chdir:
-      result.append(chdir)
-    result.append(self.configuration_dirname())
-    if type == self.STATIC_LIB:
-      result.append('lib')
-    result.append(self.built_file_basename(name, type, **kw))
-    return self.workpath(*result)
-
-
-class TestGypMSVSNinja(TestGypNinja):
-  """
-  Subclass for testing the GYP Visual Studio Ninja generator.
-  """
-  format = 'msvs-ninja'
-
-  def initialize_build_tool(self):
-    super(TestGypMSVSNinja, self).initialize_build_tool()
-    # When using '--build', make sure ninja is first in the format list.
-    self.formats.insert(0, 'ninja')
-
-  def build(self, gyp_file, target=None, rebuild=False, clean=False, **kw):
-    """
-    Runs a Visual Studio build using the configuration generated
-    from the specified gyp_file.
-    """
-    arguments = kw.get('arguments', [])[:]
-    if target in (None, self.ALL, self.DEFAULT):
-      # Note: the Visual Studio generator doesn't add an explicit 'all' target.
-      # This will build each project. This will work if projects are hermetic,
-      # but may fail if they are not (a project may run more than once).
-      # It would be nice to supply an all.metaproj for MSBuild.
-      arguments.extend([gyp_file.replace('.gyp', '.sln')])
-    else:
-      # MSBuild documentation claims that one can specify a sln but then build a
-      # project target like 'msbuild a.sln /t:proj:target' but this format only
-      # supports 'Clean', 'Rebuild', and 'Publish' (with none meaning Default).
-      # This limitation is due to the .sln -> .sln.metaproj conversion.
-      # The ':' is not special, 'proj:target' is a target in the metaproj.
-      arguments.extend([target+'.vcxproj'])
-
-    if clean:
-      build = 'Clean'
-    elif rebuild:
-      build = 'Rebuild'
-    else:
-      build = 'Build'
-    arguments.extend(['/target:'+build])
-    configuration = self.configuration_buildname()
-    config = configuration.split('|')
-    arguments.extend(['/property:Configuration='+config[0]])
-    if len(config) > 1:
-      arguments.extend(['/property:Platform='+config[1]])
-    arguments.extend(['/property:BuildInParallel=false'])
-    arguments.extend(['/verbosity:minimal'])
-
-    kw['arguments'] = arguments
-    return self.run(program=self.msbuild_path, **kw)
-
-
-class TestGypXcode(TestGypBase):
-  """
-  Subclass for testing the GYP Xcode generator.
-  """
-  format = 'xcode'
-  build_tool_list = ['xcodebuild']
-
-  phase_script_execution = ("\n"
-                            "PhaseScriptExecution /\\S+/Script-[0-9A-F]+\\.sh\n"
-                            "    cd /\\S+\n"
-                            "    /bin/sh -c /\\S+/Script-[0-9A-F]+\\.sh\n"
-                            "(make: Nothing to be done for `all'\\.\n)?")
-
-  strip_up_to_date_expressions = [
-    # Various actions or rules can run even when the overall build target
-    # is up to date.  Strip those phases' GYP-generated output.
-    re.compile(phase_script_execution, re.S),
-
-    # The message from distcc_pump can trail the "BUILD SUCCEEDED"
-    # message, so strip that, too.
-    re.compile('__________Shutting down distcc-pump include server\n', re.S),
-  ]
-
-  up_to_date_endings = (
-    'Checking Dependencies...\n** BUILD SUCCEEDED **\n', # Xcode 3.0/3.1
-    'Check dependencies\n** BUILD SUCCEEDED **\n\n',     # Xcode 3.2
-    'Check dependencies\n\n\n** BUILD SUCCEEDED **\n\n', # Xcode 4.2
-    'Check dependencies\n\n** BUILD SUCCEEDED **\n\n',   # Xcode 5.0
-  )
-
-  def build(self, gyp_file, target=None, **kw):
-    """
-    Runs an xcodebuild using the .xcodeproj generated from the specified
-    gyp_file.
-    """
-    # Be sure we're working with a copy of 'arguments' since we modify it.
-    # The caller may not be expecting it to be modified.
-    arguments = kw.get('arguments', [])[:]
-    arguments.extend(['-project', gyp_file.replace('.gyp', '.xcodeproj')])
-    if target == self.ALL:
-      arguments.append('-alltargets',)
-    elif target not in (None, self.DEFAULT):
-      arguments.extend(['-target', target])
-    if self.configuration:
-      arguments.extend(['-configuration', self.configuration])
-    symroot = kw.get('SYMROOT', '$SRCROOT/build')
-    if symroot:
-      arguments.append('SYMROOT='+symroot)
-    kw['arguments'] = arguments
-
-    # Work around spurious stderr output from Xcode 4, http://crbug.com/181012
-    match = kw.pop('match', self.match)
-    def match_filter_xcode(actual, expected):
-      if actual:
-        if not TestCmd.is_List(actual):
-          actual = actual.split('\n')
-        if not TestCmd.is_List(expected):
-          expected = expected.split('\n')
-        actual = [a for a in actual
-                    if 'No recorder, buildTask: <Xcode3BuildTask:' not in a and
-                       'Beginning test session' not in a and
-                       'Writing diagnostic log' not in a and
-                       'Logs/Test/' not in a]
-      return match(actual, expected)
-    kw['match'] = match_filter_xcode
-
-    return self.run(program=self.build_tool, **kw)
-  def up_to_date(self, gyp_file, target=None, **kw):
-    """
-    Verifies that a build of the specified Xcode target is up to date.
-    """
-    result = self.build(gyp_file, target, **kw)
-    if not result:
-      output = self.stdout()
-      for expression in self.strip_up_to_date_expressions:
-        output = expression.sub('', output)
-      if not output.endswith(self.up_to_date_endings):
-        self.report_not_up_to_date()
-        self.fail_test()
-    return result
-  def run_built_executable(self, name, *args, **kw):
-    """
-    Runs an executable built by xcodebuild.
-    """
-    configuration = self.configuration_dirname()
-    os.environ['DYLD_LIBRARY_PATH'] = os.path.join('build', configuration)
-    # Enclosing the name in a list avoids prepending the original dir.
-    program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
-    return self.run(program=program, *args, **kw)
-  def built_file_path(self, name, type=None, **kw):
-    """
-    Returns a path to the specified file name, of the specified type,
-    as built by Xcode.
-
-    Built files are in the subdirectory 'build/{configuration}'.
-    The default is 'build/Default'.
-
-    A chdir= keyword argument specifies the source directory
-    relative to which  the output subdirectory can be found.
-
-    "type" values of STATIC_LIB or SHARED_LIB append the necessary
-    prefixes and suffixes to a platform-independent library base name.
-    """
-    result = []
-    chdir = kw.get('chdir')
-    if chdir:
-      result.append(chdir)
-    configuration = self.configuration_dirname()
-    result.extend(['build', configuration])
-    result.append(self.built_file_basename(name, type, **kw))
-    return self.workpath(*result)
-
-
-class TestGypXcodeNinja(TestGypXcode):
-  """
-  Subclass for testing the GYP Xcode Ninja generator.
-  """
-  format = 'xcode-ninja'
-
-  def initialize_build_tool(self):
-    super(TestGypXcodeNinja, self).initialize_build_tool()
-    # When using '--build', make sure ninja is first in the format list.
-    self.formats.insert(0, 'ninja')
-
-  def build(self, gyp_file, target=None, **kw):
-    """
-    Runs an xcodebuild using the .xcodeproj generated from the specified
-    gyp_file.
-    """
-    build_config = self.configuration
-    if build_config and build_config.endswith(('-iphoneos',
-                                               '-iphonesimulator')):
-      build_config, sdk = self.configuration.split('-')
-      kw['arguments'] = kw.get('arguments', []) + ['-sdk', sdk]
-
-    with self._build_configuration(build_config):
-      return super(TestGypXcodeNinja, self).build(
-        gyp_file.replace('.gyp', '.ninja.gyp'), target, **kw)
-
-  @contextmanager
-  def _build_configuration(self, build_config):
-    config = self.configuration
-    self.configuration = build_config
-    try:
-      yield
-    finally:
-      self.configuration = config
-
-  def built_file_path(self, name, type=None, **kw):
-    result = []
-    chdir = kw.get('chdir')
-    if chdir:
-      result.append(chdir)
-    result.append('out')
-    result.append(self.configuration_dirname())
-    subdir = kw.get('subdir')
-    if subdir and type != self.SHARED_LIB:
-      result.append(subdir)
-    result.append(self.built_file_basename(name, type, **kw))
-    return self.workpath(*result)
-
-  def up_to_date(self, gyp_file, target=None, **kw):
-    result = self.build(gyp_file, target, **kw)
-    if not result:
-      stdout = self.stdout()
-      if 'ninja: no work to do' not in stdout:
-        self.report_not_up_to_date()
-        self.fail_test()
-    return result
-
-  def run_built_executable(self, name, *args, **kw):
-    """
-    Runs an executable built by xcodebuild + ninja.
-    """
-    configuration = self.configuration_dirname()
-    os.environ['DYLD_LIBRARY_PATH'] = os.path.join('out', configuration)
-    # Enclosing the name in a list avoids prepending the original dir.
-    program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
-    return self.run(program=program, *args, **kw)
-
-
-format_class_list = [
-  TestGypGypd,
-  TestGypCMake,
-  TestGypMake,
-  TestGypMSVS,
-  TestGypMSVSNinja,
-  TestGypNinja,
-  TestGypXcode,
-  TestGypXcodeNinja,
-]
-
-def TestGyp(*args, **kw):
-  """
-  Returns an appropriate TestGyp* instance for a specified GYP format.
-  """
-  format = kw.pop('format', os.environ.get('TESTGYP_FORMAT'))
-  for format_class in format_class_list:
-    if format == format_class.format:
-      return format_class(*args, **kw)
-  raise Exception, "unknown format %r" % format
diff --git a/tools/gyp/test/lib/TestMac.py b/tools/gyp/test/lib/TestMac.py
deleted file mode 100644
index 68605d7..0000000
--- a/tools/gyp/test/lib/TestMac.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-TestMac.py:  a collection of helper function shared between test on Mac OS X.
-"""
-
-import re
-import subprocess
-
-__all__ = ['Xcode', 'CheckFileType']
-
-
-def CheckFileType(test, file, archs):
-  """Check that |file| contains exactly |archs| or fails |test|."""
-  proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE)
-  o = proc.communicate()[0].strip()
-  assert not proc.returncode
-  if len(archs) == 1:
-    pattern = re.compile('^Non-fat file: (.*) is architecture: (.*)$')
-  else:
-    pattern = re.compile('^Architectures in the fat file: (.*) are: (.*)$')
-  match = pattern.match(o)
-  if match is None:
-    print 'Ouput does not match expected pattern: %s' % (pattern.pattern)
-    test.fail_test()
-  else:
-    found_file, found_archs = match.groups()
-    if found_file != file or set(found_archs.split()) != set(archs):
-      print 'Expected file %s with arch %s, got %s with arch %s' % (
-          file, ' '.join(archs), found_file, found_archs)
-      test.fail_test()
-
-
-class XcodeInfo(object):
-  """Simplify access to Xcode informations."""
-
-  def __init__(self):
-    self._cache = {}
-
-  def _XcodeVersion(self):
-    lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
-    version = ''.join(lines[0].split()[-1].split('.'))
-    version = (version + '0' * (3 - len(version))).zfill(4)
-    return version, lines[-1].split()[-1]
-
-  def Version(self):
-    if 'Version' not in self._cache:
-      self._cache['Version'], self._cache['Build'] = self._XcodeVersion()
-    return self._cache['Version']
-
-  def Build(self):
-    if 'Build' not in self._cache:
-      self._cache['Version'], self._cache['Build'] = self._XcodeVersion()
-    return self._cache['Build']
-
-  def SDKBuild(self):
-    if 'SDKBuild' not in self._cache:
-      self._cache['SDKBuild'] = subprocess.check_output(
-          ['xcodebuild', '-version', '-sdk', '', 'ProductBuildVersion'])
-      self._cache['SDKBuild'] = self._cache['SDKBuild'].rstrip('\n')
-    return self._cache['SDKBuild']
-
-  def SDKVersion(self):
-    if 'SDKVersion' not in self._cache:
-      self._cache['SDKVersion'] = subprocess.check_output(
-          ['xcodebuild', '-version', '-sdk', '', 'SDKVersion'])
-      self._cache['SDKVersion'] = self._cache['SDKVersion'].rstrip('\n')
-    return self._cache['SDKVersion']
-
-
-Xcode = XcodeInfo()
diff --git a/tools/gyp/test/lib/TestWin.py b/tools/gyp/test/lib/TestWin.py
deleted file mode 100644
index 7627197..0000000
--- a/tools/gyp/test/lib/TestWin.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-TestWin.py:  a collection of helpers for testing on Windows.
-"""
-
-import errno
-import os
-import re
-import sys
-import subprocess
-
-class Registry(object):
-  def _QueryBase(self, sysdir, key, value):
-    """Use reg.exe to read a particular key.
-
-    While ideally we might use the win32 module, we would like gyp to be
-    python neutral, so for instance cygwin python lacks this module.
-
-    Arguments:
-      sysdir: The system subdirectory to attempt to launch reg.exe from.
-      key: The registry key to read from.
-      value: The particular value to read.
-    Return:
-      stdout from reg.exe, or None for failure.
-    """
-    # Skip if not on Windows or Python Win32 setup issue
-    if sys.platform not in ('win32', 'cygwin'):
-      return None
-    # Setup params to pass to and attempt to launch reg.exe
-    cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
-           'query', key]
-    if value:
-      cmd.extend(['/v', value])
-    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    # Get the stdout from reg.exe, reading to the end so p.returncode is valid
-    # Note that the error text may be in [1] in some cases
-    text = p.communicate()[0]
-    # Check return code from reg.exe; officially 0==success and 1==error
-    if p.returncode:
-      return None
-    return text
-
-  def Query(self, key, value=None):
-    r"""Use reg.exe to read a particular key through _QueryBase.
-
-    First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
-    that fails, it falls back to System32.  Sysnative is available on Vista and
-    up and available on Windows Server 2003 and XP through KB patch 942589. Note
-    that Sysnative will always fail if using 64-bit python due to it being a
-    virtual directory and System32 will work correctly in the first place.
-
-    KB 942589 - http://support.microsoft.com/kb/942589/en-us.
-
-    Arguments:
-      key: The registry key.
-      value: The particular registry value to read (optional).
-    Return:
-      stdout from reg.exe, or None for failure.
-    """
-    text = None
-    try:
-      text = self._QueryBase('Sysnative', key, value)
-    except OSError, e:
-      if e.errno == errno.ENOENT:
-        text = self._QueryBase('System32', key, value)
-      else:
-        raise
-    return text
-
-  def GetValue(self, key, value):
-    """Use reg.exe to obtain the value of a registry key.
-
-    Args:
-      key: The registry key.
-      value: The particular registry value to read.
-    Return:
-      contents of the registry key's value, or None on failure.
-    """
-    text = self.Query(key, value)
-    if not text:
-      return None
-    # Extract value.
-    match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
-    if not match:
-      return None
-    return match.group(1)
-
-  def KeyExists(self, key):
-    """Use reg.exe to see if a key exists.
-
-    Args:
-      key: The registry key to check.
-    Return:
-      True if the key exists
-    """
-    if not self.Query(key):
-      return False
-    return True
diff --git a/tools/gyp/test/library/gyptest-shared-obj-install-path.py b/tools/gyp/test/library/gyptest-shared-obj-install-path.py
deleted file mode 100755
index af33536..0000000
--- a/tools/gyp/test/library/gyptest-shared-obj-install-path.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that .so files that are order only dependencies are specified by
-their install location rather than by their alias.
-"""
-
-# Python 2.5 needs this for the with statement.
-from __future__ import with_statement
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['make'])
-
-test.run_gyp('shared_dependency.gyp',
-             chdir='src')
-test.relocate('src', 'relocate/src')
-
-test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
-
-makefile_path = 'relocate/src/Makefile'
-
-with open(makefile_path) as makefile:
-  make_contents = makefile.read()
-
-# If we remove the code to generate lib1, Make should still be able
-# to build lib2 since lib1.so already exists.
-make_contents = make_contents.replace('include lib1.target.mk', '')
-with open(makefile_path, 'w') as makefile:
-  makefile.write(make_contents)
-
-test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/library/gyptest-shared.py b/tools/gyp/test/library/gyptest-shared.py
deleted file mode 100755
index a1d2985..0000000
--- a/tools/gyp/test/library/gyptest-shared.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple build of a "Hello, world!" program with shared libraries,
-including verifying that libraries are rebuilt correctly when functions
-move between libraries.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('library.gyp',
-             '-Dlibrary=shared_library',
-             '-Dmoveable_function=lib1',
-             chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('library.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from program.c
-Hello from lib1.c
-Hello from lib2.c
-Hello from lib1_moveable.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.run_gyp('library.gyp',
-             '-Dlibrary=shared_library',
-             '-Dmoveable_function=lib2',
-             chdir='relocate/src')
-
-# Update program.c to force a rebuild.
-test.sleep()
-contents = test.read('relocate/src/program.c')
-contents = contents.replace('Hello', 'Hello again')
-test.write('relocate/src/program.c', contents)
-
-test.build('library.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello again from program.c
-Hello from lib1.c
-Hello from lib2.c
-Hello from lib2_moveable.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.run_gyp('library.gyp',
-             '-Dlibrary=shared_library',
-             '-Dmoveable_function=lib1',
-             chdir='relocate/src')
-
-# Update program.c to force a rebuild.
-test.sleep()
-contents = test.read('relocate/src/program.c')
-contents = contents.replace('again', 'again again')
-test.write('relocate/src/program.c', contents)
-
-# TODO(sgk):  we have to force a rebuild of lib2 so that it weeds out
-# the "moved" module.  This should be done in gyp by adding a dependency
-# on the generated .vcproj file itself.
-test.touch('relocate/src/lib2.c')
-
-test.build('library.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello again again from program.c
-Hello from lib1.c
-Hello from lib2.c
-Hello from lib1_moveable.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/library/gyptest-static.py b/tools/gyp/test/library/gyptest-static.py
deleted file mode 100755
index 4bc71c4..0000000
--- a/tools/gyp/test/library/gyptest-static.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple build of a "Hello, world!" program with static libraries,
-including verifying that libraries are rebuilt correctly when functions
-move between libraries.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('library.gyp',
-             '-Dlibrary=static_library',
-             '-Dmoveable_function=lib1',
-             chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('library.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from program.c
-Hello from lib1.c
-Hello from lib2.c
-Hello from lib1_moveable.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.run_gyp('library.gyp',
-             '-Dlibrary=static_library',
-             '-Dmoveable_function=lib2',
-             chdir='relocate/src')
-
-# Update program.c to force a rebuild.
-test.sleep()
-contents = test.read('relocate/src/program.c')
-contents = contents.replace('Hello', 'Hello again')
-test.write('relocate/src/program.c', contents)
-
-test.build('library.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello again from program.c
-Hello from lib1.c
-Hello from lib2.c
-Hello from lib2_moveable.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.run_gyp('library.gyp',
-             '-Dlibrary=static_library',
-             '-Dmoveable_function=lib1',
-             chdir='relocate/src')
-
-# Update program.c and lib2.c to force a rebuild.
-test.sleep()
-contents = test.read('relocate/src/program.c')
-contents = contents.replace('again', 'again again')
-test.write('relocate/src/program.c', contents)
-
-# TODO(sgk):  we have to force a rebuild of lib2 so that it weeds out
-# the "moved" module.  This should be done in gyp by adding a dependency
-# on the generated .vcproj file itself.
-test.touch('relocate/src/lib2.c')
-
-test.build('library.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello again again from program.c
-Hello from lib1.c
-Hello from lib2.c
-Hello from lib1_moveable.c
-"""
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/library/src/lib1.c b/tools/gyp/test/library/src/lib1.c
deleted file mode 100644
index 3866b1b..0000000
--- a/tools/gyp/test/library/src/lib1.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void lib1_function(void)
-{
-  fprintf(stdout, "Hello from lib1.c\n");
-  fflush(stdout);
-}
diff --git a/tools/gyp/test/library/src/lib1_moveable.c b/tools/gyp/test/library/src/lib1_moveable.c
deleted file mode 100644
index 5d3cc1d..0000000
--- a/tools/gyp/test/library/src/lib1_moveable.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void moveable_function(void)
-{
-  fprintf(stdout, "Hello from lib1_moveable.c\n");
-  fflush(stdout);
-}
diff --git a/tools/gyp/test/library/src/lib2.c b/tools/gyp/test/library/src/lib2.c
deleted file mode 100644
index 21dda72..0000000
--- a/tools/gyp/test/library/src/lib2.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void lib2_function(void)
-{
-  fprintf(stdout, "Hello from lib2.c\n");
-  fflush(stdout);
-}
diff --git a/tools/gyp/test/library/src/lib2_moveable.c b/tools/gyp/test/library/src/lib2_moveable.c
deleted file mode 100644
index f645071..0000000
--- a/tools/gyp/test/library/src/lib2_moveable.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void moveable_function(void)
-{
-  fprintf(stdout, "Hello from lib2_moveable.c\n");
-  fflush(stdout);
-}
diff --git a/tools/gyp/test/library/src/library.gyp b/tools/gyp/test/library/src/library.gyp
deleted file mode 100644
index bc35516..0000000
--- a/tools/gyp/test/library/src/library.gyp
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'moveable_function%': 0,
-  },
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'dependencies': [
-        'lib1',
-        'lib2',
-      ],
-      'sources': [
-        'program.c',
-      ],
-    },
-    {
-      'target_name': 'lib1',
-      'type': '<(library)',
-      'sources': [
-        'lib1.c',
-      ],
-      'conditions': [
-        ['moveable_function=="lib1"', {
-          'sources': [
-            'lib1_moveable.c',
-          ],
-        }],
-      ],
-    },
-    {
-      'target_name': 'lib2',
-      'type': '<(library)',
-      'sources': [
-        'lib2.c',
-      ],
-      'conditions': [
-        ['moveable_function=="lib2"', {
-          'sources': [
-            'lib2_moveable.c',
-          ],
-        }],
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="linux"', {
-      'target_defaults': {
-        # Support 64-bit shared libs (also works fine for 32-bit).
-        'cflags': ['-fPIC'],
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/library/src/program.c b/tools/gyp/test/library/src/program.c
deleted file mode 100644
index d460f60..0000000
--- a/tools/gyp/test/library/src/program.c
+++ /dev/null
@@ -1,15 +0,0 @@
-#include <stdio.h>
-
-extern void lib1_function(void);
-extern void lib2_function(void);
-extern void moveable_function(void);
-
-int main(void)
-{
-  fprintf(stdout, "Hello from program.c\n");
-  fflush(stdout);
-  lib1_function();
-  lib2_function();
-  moveable_function();
-  return 0;
-}
diff --git a/tools/gyp/test/library/src/shared_dependency.gyp b/tools/gyp/test/library/src/shared_dependency.gyp
deleted file mode 100644
index 7d29f5d..0000000
--- a/tools/gyp/test/library/src/shared_dependency.gyp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'lib1',
-      'type': 'shared_library',
-      'sources': [
-        'lib1.c',
-      ],
-    },
-    {
-      'target_name': 'lib2',
-      'type': 'shared_library',
-      'sources': [
-        'lib2.c',
-      ],
-      'dependencies': [
-        'lib1',
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="linux"', {
-      'target_defaults': {
-        # Support 64-bit shared libs (also works fine for 32-bit).
-        'cflags': ['-fPIC'],
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/library_dirs/gyptest-library-dirs.py b/tools/gyp/test/library_dirs/gyptest-library-dirs.py
deleted file mode 100644
index e725dd1..0000000
--- a/tools/gyp/test/library_dirs/gyptest-library-dirs.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies library_dirs (in link_settings) are properly found.
-"""
-
-import sys
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-lib_dir = test.tempdir('secret_location')
-
-test.run_gyp('test.gyp',
-             '-D', 'abs_path_to_secret_library_location={0}'.format(lib_dir),
-             chdir='subdir')
-
-# Must build each target independently, since they are not in each others'
-# 'dependencies' (test.ALL does NOT work here for some builders, and in any case
-# would not ensure the correct ordering).
-test.build('test.gyp', 'mylib', chdir='subdir')
-test.build('test.gyp', 'libraries-search-path-test', chdir='subdir')
-
-expect = """Hello world
-"""
-test.run_built_executable(
-    'libraries-search-path-test', chdir='subdir', stdout=expect)
-
-if sys.platform in ('win32', 'cygwin'):
-  test.run_gyp('test-win.gyp',
-               '-D',
-               'abs_path_to_secret_library_location={0}'.format(lib_dir),
-               chdir='subdir')
-
-  test.build('test.gyp', 'mylib', chdir='subdir')
-  test.build('test-win.gyp',
-             'libraries-search-path-test-lib-suffix',
-             chdir='subdir')
-
-  test.run_built_executable(
-        'libraries-search-path-test-lib-suffix', chdir='subdir', stdout=expect)
-
-
-test.pass_test()
-test.cleanup()
diff --git a/tools/gyp/test/library_dirs/subdir/README.txt b/tools/gyp/test/library_dirs/subdir/README.txt
deleted file mode 100644
index 4031ded..0000000
--- a/tools/gyp/test/library_dirs/subdir/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-Make things live in a subdirectory, to make sure that DEPTH works correctly.
diff --git a/tools/gyp/test/library_dirs/subdir/hello.cc b/tools/gyp/test/library_dirs/subdir/hello.cc
deleted file mode 100644
index 5dbbd48..0000000
--- a/tools/gyp/test/library_dirs/subdir/hello.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <iostream>
-#include "mylib.h"
-
-int main() {
-  std::cout << "Hello " << my_foo(99) << std::endl;
-  return 0;
-}
diff --git a/tools/gyp/test/library_dirs/subdir/mylib.cc b/tools/gyp/test/library_dirs/subdir/mylib.cc
deleted file mode 100644
index 654f3d0..0000000
--- a/tools/gyp/test/library_dirs/subdir/mylib.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "mylib.h"
-
-std::string my_foo(int x) {
-  return std::string("world");
-}
diff --git a/tools/gyp/test/library_dirs/subdir/mylib.h b/tools/gyp/test/library_dirs/subdir/mylib.h
deleted file mode 100644
index 84b4022..0000000
--- a/tools/gyp/test/library_dirs/subdir/mylib.h
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef TEST_LIBRARY_DIRS_SUBDIR_MYLIB_H
-#define TEST_LIBRARY_DIRS_SUBDIR_MYLIB_H
-
-#include <string>
-
-std::string my_foo(int);
-
-#endif  // TEST_LIBRARY_DIRS_SUBDIR_MYLIB_H
diff --git a/tools/gyp/test/library_dirs/subdir/test-win.gyp b/tools/gyp/test/library_dirs/subdir/test-win.gyp
deleted file mode 100644
index 033b6f7..0000000
--- a/tools/gyp/test/library_dirs/subdir/test-win.gyp
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      # This creates a static library and puts it in a nonstandard location for
-      # libraries-search-path-test.
-      'target_name': 'mylib',
-      'type': 'static_library',
-      'standalone_static_library': 1,
-      # This directory is NOT in the default library search locations. It also
-      # MUST be passed in on the gyp command line:
-      #
-      #  -D abs_path_to_secret_library_location=/some_absolute_path
-      #
-      # The gyptest itself (../gyptest-library-dirs.py) provides this.
-      'product_dir': '<(abs_path_to_secret_library_location)',
-      'sources': [
-        'mylib.cc',
-      ],
-    },
-    {
-      'target_name': 'libraries-search-path-test-lib-suffix',
-      'type': 'executable',
-      'dependencies': [
-        # It is important to NOT list the mylib as a dependency here, because
-        # some build systems will track it down based on its product_dir,
-        # such that the link succeeds even without the library_dirs below.
-        #
-        # The point of this weird structuring is to ensure that 'library_dirs'
-        # works as advertised, such that just '-lmylib' (or its equivalent)
-        # works based on the directories that library_dirs puts in the library
-        # link path.
-        #
-        # If 'mylib' was listed as a proper dependency here, the build system
-        # would find it and link with its path on disk.
-        #
-        # Note that this implies 'mylib' must already be built when building
-        # 'libraries-search-path-test' (see ../gyptest-library-dirs.py).
-        #
-        #'mylib',
-      ],
-      'sources': [
-        'hello.cc',
-      ],
-      # Note that without this, the mylib library would not be found and
-      # successfully linked.
-      'library_dirs': [
-        '<(abs_path_to_secret_library_location)',
-      ],
-      'link_settings': {
-        'libraries': [
-          '-lmylib.lib',
-        ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/library_dirs/subdir/test.gyp b/tools/gyp/test/library_dirs/subdir/test.gyp
deleted file mode 100644
index f83d7f2..0000000
--- a/tools/gyp/test/library_dirs/subdir/test.gyp
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      # This creates a static library and puts it in a nonstandard location for
-      # libraries-search-path-test.
-      'target_name': 'mylib',
-      'type': 'static_library',
-      'standalone_static_library': 1,
-      # This directory is NOT in the default library search locations. It also
-      # MUST be passed in on the gyp command line:
-      #
-      #  -D abs_path_to_secret_library_location=/some_absolute_path
-      #
-      # The gyptest itself (../gyptest-library-dirs.py) provides this.
-      'product_dir': '<(abs_path_to_secret_library_location)',
-      'sources': [
-        'mylib.cc',
-      ],
-    },
-    {
-      'target_name': 'libraries-search-path-test',
-      'type': 'executable',
-      'dependencies': [
-        # It is important to NOT list the mylib as a dependency here, because
-        # some build systems will track it down based on its product_dir,
-        # such that the link succeeds even without the library_dirs below.
-        #
-        # The point of this weird structuring is to ensure that 'library_dirs'
-        # works as advertised, such that just '-lmylib' (or its equivalent)
-        # works based on the directories that library_dirs puts in the library
-        # link path.
-        #
-        # If 'mylib' was listed as a proper dependency here, the build system
-        # would find it and link with its path on disk.
-        #
-        # Note that this implies 'mylib' must already be built when building
-        # 'libraries-search-path-test' (see ../gyptest-library-dirs.py).
-        #
-        #'mylib',
-      ],
-      'sources': [
-        'hello.cc',
-      ],
-      # Note that without this, the mylib library would not be found and
-      # successfully linked.
-      'library_dirs': [
-        '<(abs_path_to_secret_library_location)',
-      ],
-      'link_settings': {
-        'conditions': [
-          ['OS=="linux"', {
-            'libraries': [
-              '-lmylib',
-            ],
-          }, { # else
-            'libraries': [
-              '<(STATIC_LIB_PREFIX)mylib<(STATIC_LIB_SUFFIX)',
-            ],
-          }],
-        ],  # conditions
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/link-dependency/gyptest-link-dependency.py b/tools/gyp/test/link-dependency/gyptest-link-dependency.py
deleted file mode 100755
index 3a8300d..0000000
--- a/tools/gyp/test/link-dependency/gyptest-link-dependency.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that a target marked as 'link_dependency==1' isn't being pulled into
-the 'none' target's dependency (which would otherwise lead to a dependency
-cycle in ninja).
-"""
-
-import TestGyp
-
-# See https://codereview.chromium.org/177043010/#msg15 for why this doesn't
-# work with cmake.
-test = TestGyp.TestGyp(formats=['!cmake'])
-
-test.run_gyp('test.gyp')
-test.build('test.gyp', 'main')
-
-# If running gyp worked, all is well.
-test.pass_test()
diff --git a/tools/gyp/test/link-dependency/main.c b/tools/gyp/test/link-dependency/main.c
deleted file mode 100644
index 543d8b6..0000000
--- a/tools/gyp/test/link-dependency/main.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-int main() {
-  void *p = malloc(1);
-  printf("p: %p\n", p);
-  return 0;
-}
diff --git a/tools/gyp/test/link-dependency/mymalloc.c b/tools/gyp/test/link-dependency/mymalloc.c
deleted file mode 100644
index f80bc02..0000000
--- a/tools/gyp/test/link-dependency/mymalloc.c
+++ /dev/null
@@ -1,12 +0,0 @@
-#include <stdlib.h>
-
-// The windows ninja generator is expecting an import library to get generated,
-// but it doesn't if there are no exports.
-#ifdef _MSC_VER
-__declspec(dllexport) void foo() {}
-#endif
-
-void *malloc(size_t size) {
-  (void)size;
-  return (void*)0xdeadbeef;
-}
diff --git a/tools/gyp/test/link-dependency/test.gyp b/tools/gyp/test/link-dependency/test.gyp
deleted file mode 100644
index 47cec15..0000000
--- a/tools/gyp/test/link-dependency/test.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-{
-  'variables': {
-    'custom_malloc%' : 1,
-  },
-  'target_defaults': {
-    'conditions': [
-      ['custom_malloc==1', {
-        'dependencies': [
-          'malloc',
-        ],
-      }],
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'main',
-      'type': 'none',
-      'dependencies': [ 'main_initial',],
-    },
-    {
-      'target_name': 'main_initial',
-      'type': 'executable',
-      'product_name': 'main',
-      'sources': [ 'main.c' ],
-    },
-    {
-      'target_name': 'malloc',
-      'type': 'shared_library',
-      'variables': {
-        'prune_self_dependency': 1,
-        # Targets with type 'none' won't depend on this target.
-        'link_dependency': 1,
-      },  
-      'sources': [ 'mymalloc.c' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/link-objects/base.c b/tools/gyp/test/link-objects/base.c
deleted file mode 100644
index 3327459..0000000
--- a/tools/gyp/test/link-objects/base.c
+++ /dev/null
@@ -1,6 +0,0 @@
-void extra();
-
-int main(void) {
-  extra();
-  return 0;
-}
diff --git a/tools/gyp/test/link-objects/extra.c b/tools/gyp/test/link-objects/extra.c
deleted file mode 100644
index 1d7ee09..0000000
--- a/tools/gyp/test/link-objects/extra.c
+++ /dev/null
@@ -1,5 +0,0 @@
-#include <stdio.h>
-
-void extra() {
-  printf("PASS\n");
-}
diff --git a/tools/gyp/test/link-objects/gyptest-all.py b/tools/gyp/test/link-objects/gyptest-all.py
deleted file mode 100755
index 45bd6e1..0000000
--- a/tools/gyp/test/link-objects/gyptest-all.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Put an object file on the sources list.
-Expect the result to link ok.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform != 'darwin':
-  # Currently only works under the linux make build.
-  test = TestGyp.TestGyp(formats=['make'])
-
-  test.run_gyp('link-objects.gyp')
-
-  test.build('link-objects.gyp', test.ALL)
-
-  test.run_built_executable('link-objects', stdout="PASS\n")
-
-  test.up_to_date('link-objects.gyp', test.ALL)
-
-  test.pass_test()
diff --git a/tools/gyp/test/link-objects/link-objects.gyp b/tools/gyp/test/link-objects/link-objects.gyp
deleted file mode 100644
index ab72855..0000000
--- a/tools/gyp/test/link-objects/link-objects.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'link-objects',
-      'type': 'executable',
-      'actions': [
-        {
-          'action_name': 'build extra object',
-          'inputs': ['extra.c'],
-          'outputs': ['extra.o'],
-          'action': ['gcc', '-o', 'extra.o', '-c', 'extra.c'],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-      'sources': [
-        'base.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/linux/gyptest-implicit-rpath.py b/tools/gyp/test/linux/gyptest-implicit-rpath.py
deleted file mode 100644
index dd7718c..0000000
--- a/tools/gyp/test/linux/gyptest-implicit-rpath.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that the implicit rpath is added only when needed.
-"""
-
-import TestGyp
-
-import re
-import subprocess
-import sys
-
-if sys.platform.startswith('linux'):
-  test = TestGyp.TestGyp(formats=['ninja', 'make'])
-
-  CHDIR = 'implicit-rpath'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  def GetRpaths(p):
-    p = test.built_file_path(p, chdir=CHDIR)
-    r = re.compile(r'Library rpath: \[([^\]]+)\]')
-    proc = subprocess.Popen(['readelf', '-d', p], stdout=subprocess.PIPE)
-    o = proc.communicate()[0]
-    assert not proc.returncode
-    return r.findall(o)
-
-  if test.format == 'ninja':
-    expect = '$ORIGIN/lib/'
-  elif test.format == 'make':
-    expect = '$ORIGIN/lib.target/'
-  else:
-    test.fail_test()
-
-  if GetRpaths('shared_executable') != [expect]:
-    test.fail_test()
-
-  if GetRpaths('shared_executable_no_so_suffix') != [expect]:
-    test.fail_test()
-
-  if GetRpaths('static_executable'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/linux/gyptest-ldflags-duplicates.py b/tools/gyp/test/linux/gyptest-ldflags-duplicates.py
deleted file mode 100644
index 43a4607..0000000
--- a/tools/gyp/test/linux/gyptest-ldflags-duplicates.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies duplicate ldflags are not removed.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform.startswith('linux'):
-  test = TestGyp.TestGyp()
-
-  CHDIR = 'ldflags-duplicates'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/linux/gyptest-target-rpath.py b/tools/gyp/test/linux/gyptest-target-rpath.py
deleted file mode 100644
index 2950a20..0000000
--- a/tools/gyp/test/linux/gyptest-target-rpath.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Check target_rpath generator flag for ninja.
-"""
-
-import TestGyp
-
-import re
-import subprocess
-import sys
-
-if sys.platform.startswith('linux'):
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'target-rpath'
-  test.run_gyp('test.gyp', '-G', 'target_rpath=/usr/lib/gyptest/', chdir=CHDIR)
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  def GetRpaths(p):
-    p = test.built_file_path(p, chdir=CHDIR)
-    r = re.compile(r'Library rpath: \[([^\]]+)\]')
-    proc = subprocess.Popen(['readelf', '-d', p], stdout=subprocess.PIPE)
-    o = proc.communicate()[0]
-    assert not proc.returncode
-    return r.findall(o)
-
-  expect = '/usr/lib/gyptest/'
-
-  if GetRpaths('shared_executable') != [expect]:
-    test.fail_test()
-
-  if GetRpaths('shared_executable_no_so_suffix') != [expect]:
-    test.fail_test()
-
-  if GetRpaths('static_executable'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/linux/implicit-rpath/file.c b/tools/gyp/test/linux/implicit-rpath/file.c
deleted file mode 100644
index 56757a7..0000000
--- a/tools/gyp/test/linux/implicit-rpath/file.c
+++ /dev/null
@@ -1 +0,0 @@
-void f() {}
diff --git a/tools/gyp/test/linux/implicit-rpath/main.c b/tools/gyp/test/linux/implicit-rpath/main.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/linux/implicit-rpath/main.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/linux/implicit-rpath/test.gyp b/tools/gyp/test/linux/implicit-rpath/test.gyp
deleted file mode 100644
index b546106..0000000
--- a/tools/gyp/test/linux/implicit-rpath/test.gyp
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'shared',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'shared_no_so_suffix',
-      'product_extension': 'so.0.1',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'static',
-      'type': 'static_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'shared_executable',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'dependencies': [
-        'shared',
-      ]
-    },
-    {
-      'target_name': 'shared_executable_no_so_suffix',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'dependencies': [
-        'shared_no_so_suffix',
-      ]
-    },
-    {
-      'target_name': 'static_executable',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'dependencies': [
-        'static',
-      ]
-    },
-  ],
-}
diff --git a/tools/gyp/test/linux/ldflags-duplicates/check-ldflags.py b/tools/gyp/test/linux/ldflags-duplicates/check-ldflags.py
deleted file mode 100755
index 0515da9..0000000
--- a/tools/gyp/test/linux/ldflags-duplicates/check-ldflags.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies duplicate ldflags are not removed.
-"""
-
-import sys
-
-def CheckContainsFlags(args, substring):
-  if args.find(substring) is -1:
-    print 'ERROR: Linker arguments "%s" are missing in "%s"' % (substring, args)
-    return False;
-  return True;
-
-if __name__ == '__main__':
-  args = " ".join(sys.argv)
-  print  "args = " +args
-  if not CheckContainsFlags(args, 'lib1.a -Wl,--no-whole-archive') \
-    or not CheckContainsFlags(args, 'lib2.a -Wl,--no-whole-archive'):
-    sys.exit(1);
-  sys.exit(0)
diff --git a/tools/gyp/test/linux/ldflags-duplicates/lib1.c b/tools/gyp/test/linux/ldflags-duplicates/lib1.c
deleted file mode 100644
index a1322e7..0000000
--- a/tools/gyp/test/linux/ldflags-duplicates/lib1.c
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void foo() {
-}
diff --git a/tools/gyp/test/linux/ldflags-duplicates/lib2.c b/tools/gyp/test/linux/ldflags-duplicates/lib2.c
deleted file mode 100644
index 8e7a082..0000000
--- a/tools/gyp/test/linux/ldflags-duplicates/lib2.c
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void bar() {
-}
diff --git a/tools/gyp/test/linux/ldflags-duplicates/main.c b/tools/gyp/test/linux/ldflags-duplicates/main.c
deleted file mode 100644
index b3039ac..0000000
--- a/tools/gyp/test/linux/ldflags-duplicates/main.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/linux/ldflags-duplicates/test.gyp b/tools/gyp/test/linux/ldflags-duplicates/test.gyp
deleted file mode 100644
index c36835b..0000000
--- a/tools/gyp/test/linux/ldflags-duplicates/test.gyp
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'make_global_settings': [
-    ['LINK_wrapper', './check-ldflags.py'],
-  ],
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'executable',
-      'ldflags': [
-        '-Wl,--whole-archive <(PRODUCT_DIR)/lib1.a',
-        '-Wl,--no-whole-archive',
-
-        '-Wl,--whole-archive <(PRODUCT_DIR)/lib2.a',
-        '-Wl,--no-whole-archive',
-      ],
-      'dependencies': [
-        'lib1',
-        'lib2',
-      ],
-      'sources': [
-        'main.c',
-      ],
-    },
-    {
-      'target_name': 'lib1',
-      'type': 'static_library',
-      'standalone_static_library': 1,
-      'sources': [
-        'lib1.c',
-      ],
-    },
-    {
-      'target_name': 'lib2',
-      'type': 'static_library',
-      'standalone_static_library': 1,
-      'sources': [
-        'lib2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/linux/target-rpath/file.c b/tools/gyp/test/linux/target-rpath/file.c
deleted file mode 100644
index 56757a7..0000000
--- a/tools/gyp/test/linux/target-rpath/file.c
+++ /dev/null
@@ -1 +0,0 @@
-void f() {}
diff --git a/tools/gyp/test/linux/target-rpath/main.c b/tools/gyp/test/linux/target-rpath/main.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/linux/target-rpath/main.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/linux/target-rpath/test.gyp b/tools/gyp/test/linux/target-rpath/test.gyp
deleted file mode 100644
index b546106..0000000
--- a/tools/gyp/test/linux/target-rpath/test.gyp
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'shared',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'shared_no_so_suffix',
-      'product_extension': 'so.0.1',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'static',
-      'type': 'static_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'shared_executable',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'dependencies': [
-        'shared',
-      ]
-    },
-    {
-      'target_name': 'shared_executable_no_so_suffix',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'dependencies': [
-        'shared_no_so_suffix',
-      ]
-    },
-    {
-      'target_name': 'static_executable',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'dependencies': [
-        'static',
-      ]
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/action-envvars/action/action.gyp b/tools/gyp/test/mac/action-envvars/action/action.gyp
deleted file mode 100644
index d9d6574..0000000
--- a/tools/gyp/test/mac/action-envvars/action/action.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'action',
-      'type': 'none',
-      'actions': [
-        {
-          'inputs': [ ],
-          'outputs': [
-            '<(PRODUCT_DIR)/result',
-            '<(SHARED_INTERMEDIATE_DIR)/tempfile',
-          ],
-          'action_name': 'Test action',
-          'action': ['./action.sh', '<(SHARED_INTERMEDIATE_DIR)/tempfile' ],
-        },
-        {
-          'inputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/tempfile',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/other_result',
-          ],
-          'action_name': 'Other test action',
-          'action': ['cp', '<(SHARED_INTERMEDIATE_DIR)/tempfile',
-                           '<(PRODUCT_DIR)/other_result' ],
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/action-envvars/action/action.sh b/tools/gyp/test/mac/action-envvars/action/action.sh
deleted file mode 100755
index 48d5f6b..0000000
--- a/tools/gyp/test/mac/action-envvars/action/action.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-echo 'Test output' > "${BUILT_PRODUCTS_DIR}/result"
-echo 'Other output' > "$1"
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist-error.strings b/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
deleted file mode 100644
index 452e7fa..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist-error.strings
+++ /dev/null
@@ -1,3 +0,0 @@
-/* Localized versions of Info.plist keys */
-
-NSHumanReadableCopyright = "Copyright ©2011 Google Inc."
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist.strings b/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist.strings
deleted file mode 100644
index 35bd33a..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/InfoPlist.strings
+++ /dev/null
@@ -1,3 +0,0 @@
-/* Localized versions of Info.plist keys */
-
-NSHumanReadableCopyright = "Copyright ©2011 Google Inc.";
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/MainMenu.xib b/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/MainMenu.xib
deleted file mode 100644
index 4524596..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/MainMenu.xib
+++ /dev/null
@@ -1,4119 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<archive type="com.apple.InterfaceBuilder3.Cocoa.XIB" version="7.10">
-	<data>
-		<int key="IBDocument.SystemTarget">1060</int>
-		<string key="IBDocument.SystemVersion">10A324</string>
-		<string key="IBDocument.InterfaceBuilderVersion">719</string>
-		<string key="IBDocument.AppKitVersion">1015</string>
-		<string key="IBDocument.HIToolboxVersion">418.00</string>
-		<object class="NSMutableDictionary" key="IBDocument.PluginVersions">
-			<string key="NS.key.0">com.apple.InterfaceBuilder.CocoaPlugin</string>
-			<string key="NS.object.0">719</string>
-		</object>
-		<object class="NSMutableArray" key="IBDocument.EditedObjectIDs">
-			<bool key="EncodedWithXMLCoder">YES</bool>
-			<integer value="371"/>
-			<integer value="29"/>
-		</object>
-		<object class="NSArray" key="IBDocument.PluginDependencies">
-			<bool key="EncodedWithXMLCoder">YES</bool>
-			<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-		</object>
-		<object class="NSMutableDictionary" key="IBDocument.Metadata">
-			<bool key="EncodedWithXMLCoder">YES</bool>
-			<object class="NSArray" key="dict.sortedKeys" id="0">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-			</object>
-			<object class="NSMutableArray" key="dict.values">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-			</object>
-		</object>
-		<object class="NSMutableArray" key="IBDocument.RootObjects" id="1048">
-			<bool key="EncodedWithXMLCoder">YES</bool>
-			<object class="NSCustomObject" id="1021">
-				<string key="NSClassName">NSApplication</string>
-			</object>
-			<object class="NSCustomObject" id="1014">
-				<string key="NSClassName">FirstResponder</string>
-			</object>
-			<object class="NSCustomObject" id="1050">
-				<string key="NSClassName">NSApplication</string>
-			</object>
-			<object class="NSMenu" id="649796088">
-				<string key="NSTitle">AMainMenu</string>
-				<object class="NSMutableArray" key="NSMenuItems">
-					<bool key="EncodedWithXMLCoder">YES</bool>
-					<object class="NSMenuItem" id="694149608">
-						<reference key="NSMenu" ref="649796088"/>
-						<string key="NSTitle">TestApp</string>
-						<string key="NSKeyEquiv"/>
-						<int key="NSKeyEquivModMask">1048576</int>
-						<int key="NSMnemonicLoc">2147483647</int>
-						<object class="NSCustomResource" key="NSOnImage" id="35465992">
-							<string key="NSClassName">NSImage</string>
-							<string key="NSResourceName">NSMenuCheckmark</string>
-						</object>
-						<object class="NSCustomResource" key="NSMixedImage" id="502551668">
-							<string key="NSClassName">NSImage</string>
-							<string key="NSResourceName">NSMenuMixedState</string>
-						</object>
-						<string key="NSAction">submenuAction:</string>
-						<object class="NSMenu" key="NSSubmenu" id="110575045">
-							<string key="NSTitle">TestApp</string>
-							<object class="NSMutableArray" key="NSMenuItems">
-								<bool key="EncodedWithXMLCoder">YES</bool>
-								<object class="NSMenuItem" id="238522557">
-									<reference key="NSMenu" ref="110575045"/>
-									<string key="NSTitle">About TestApp</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="304266470">
-									<reference key="NSMenu" ref="110575045"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="609285721">
-									<reference key="NSMenu" ref="110575045"/>
-									<string key="NSTitle">Preferences…</string>
-									<string key="NSKeyEquiv">,</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="481834944">
-									<reference key="NSMenu" ref="110575045"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="1046388886">
-									<reference key="NSMenu" ref="110575045"/>
-									<string key="NSTitle">Services</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="752062318">
-										<string key="NSTitle">Services</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-										</object>
-										<string key="NSName">_NSServicesMenu</string>
-									</object>
-								</object>
-								<object class="NSMenuItem" id="646227648">
-									<reference key="NSMenu" ref="110575045"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="755159360">
-									<reference key="NSMenu" ref="110575045"/>
-									<string key="NSTitle">Hide TestApp</string>
-									<string key="NSKeyEquiv">h</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="342932134">
-									<reference key="NSMenu" ref="110575045"/>
-									<string key="NSTitle">Hide Others</string>
-									<string key="NSKeyEquiv">h</string>
-									<int key="NSKeyEquivModMask">1572864</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="908899353">
-									<reference key="NSMenu" ref="110575045"/>
-									<string key="NSTitle">Show All</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="1056857174">
-									<reference key="NSMenu" ref="110575045"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="632727374">
-									<reference key="NSMenu" ref="110575045"/>
-									<string key="NSTitle">Quit TestApp</string>
-									<string key="NSKeyEquiv">q</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-							</object>
-							<string key="NSName">_NSAppleMenu</string>
-						</object>
-					</object>
-					<object class="NSMenuItem" id="379814623">
-						<reference key="NSMenu" ref="649796088"/>
-						<string key="NSTitle">File</string>
-						<string key="NSKeyEquiv"/>
-						<int key="NSKeyEquivModMask">1048576</int>
-						<int key="NSMnemonicLoc">2147483647</int>
-						<reference key="NSOnImage" ref="35465992"/>
-						<reference key="NSMixedImage" ref="502551668"/>
-						<string key="NSAction">submenuAction:</string>
-						<object class="NSMenu" key="NSSubmenu" id="720053764">
-							<string key="NSTitle">File</string>
-							<object class="NSMutableArray" key="NSMenuItems">
-								<bool key="EncodedWithXMLCoder">YES</bool>
-								<object class="NSMenuItem" id="705341025">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">New</string>
-									<string key="NSKeyEquiv">n</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="722745758">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Open…</string>
-									<string key="NSKeyEquiv">o</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="1025936716">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Open Recent</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="1065607017">
-										<string key="NSTitle">Open Recent</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="759406840">
-												<reference key="NSMenu" ref="1065607017"/>
-												<string key="NSTitle">Clear Menu</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-										<string key="NSName">_NSRecentDocumentsMenu</string>
-									</object>
-								</object>
-								<object class="NSMenuItem" id="425164168">
-									<reference key="NSMenu" ref="720053764"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="776162233">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Close</string>
-									<string key="NSKeyEquiv">w</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="1023925487">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Save</string>
-									<string key="NSKeyEquiv">s</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="117038363">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Save As…</string>
-									<string key="NSKeyEquiv">S</string>
-									<int key="NSKeyEquivModMask">1179648</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="579971712">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Revert to Saved</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="1010469920">
-									<reference key="NSMenu" ref="720053764"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="294629803">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Page Setup...</string>
-									<string key="NSKeyEquiv">P</string>
-									<int key="NSKeyEquivModMask">1179648</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSToolTip"/>
-								</object>
-								<object class="NSMenuItem" id="49223823">
-									<reference key="NSMenu" ref="720053764"/>
-									<string key="NSTitle">Print…</string>
-									<string key="NSKeyEquiv">p</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-							</object>
-						</object>
-					</object>
-					<object class="NSMenuItem" id="952259628">
-						<reference key="NSMenu" ref="649796088"/>
-						<string key="NSTitle">Edit</string>
-						<string key="NSKeyEquiv"/>
-						<int key="NSKeyEquivModMask">1048576</int>
-						<int key="NSMnemonicLoc">2147483647</int>
-						<reference key="NSOnImage" ref="35465992"/>
-						<reference key="NSMixedImage" ref="502551668"/>
-						<string key="NSAction">submenuAction:</string>
-						<object class="NSMenu" key="NSSubmenu" id="789758025">
-							<string key="NSTitle">Edit</string>
-							<object class="NSMutableArray" key="NSMenuItems">
-								<bool key="EncodedWithXMLCoder">YES</bool>
-								<object class="NSMenuItem" id="1058277027">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Undo</string>
-									<string key="NSKeyEquiv">z</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="790794224">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Redo</string>
-									<string key="NSKeyEquiv">Z</string>
-									<int key="NSKeyEquivModMask">1179648</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="1040322652">
-									<reference key="NSMenu" ref="789758025"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="296257095">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Cut</string>
-									<string key="NSKeyEquiv">x</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="860595796">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Copy</string>
-									<string key="NSKeyEquiv">c</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="29853731">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Paste</string>
-									<string key="NSKeyEquiv">v</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="82994268">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Paste and Match Style</string>
-									<string key="NSKeyEquiv">V</string>
-									<int key="NSKeyEquivModMask">1572864</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="437104165">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Delete</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="583158037">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Select All</string>
-									<string key="NSKeyEquiv">a</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="212016141">
-									<reference key="NSMenu" ref="789758025"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="892235320">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Find</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="963351320">
-										<string key="NSTitle">Find</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="447796847">
-												<reference key="NSMenu" ref="963351320"/>
-												<string key="NSTitle">Find…</string>
-												<string key="NSKeyEquiv">f</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">1</int>
-											</object>
-											<object class="NSMenuItem" id="326711663">
-												<reference key="NSMenu" ref="963351320"/>
-												<string key="NSTitle">Find Next</string>
-												<string key="NSKeyEquiv">g</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">2</int>
-											</object>
-											<object class="NSMenuItem" id="270902937">
-												<reference key="NSMenu" ref="963351320"/>
-												<string key="NSTitle">Find Previous</string>
-												<string key="NSKeyEquiv">G</string>
-												<int key="NSKeyEquivModMask">1179648</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">3</int>
-											</object>
-											<object class="NSMenuItem" id="159080638">
-												<reference key="NSMenu" ref="963351320"/>
-												<string key="NSTitle">Use Selection for Find</string>
-												<string key="NSKeyEquiv">e</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">7</int>
-											</object>
-											<object class="NSMenuItem" id="88285865">
-												<reference key="NSMenu" ref="963351320"/>
-												<string key="NSTitle">Jump to Selection</string>
-												<string key="NSKeyEquiv">j</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-									</object>
-								</object>
-								<object class="NSMenuItem" id="972420730">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Spelling and Grammar</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="769623530">
-										<string key="NSTitle">Spelling and Grammar</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="679648819">
-												<reference key="NSMenu" ref="769623530"/>
-												<string key="NSTitle">Show Spelling and Grammar</string>
-												<string key="NSKeyEquiv">:</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="96193923">
-												<reference key="NSMenu" ref="769623530"/>
-												<string key="NSTitle">Check Document Now</string>
-												<string key="NSKeyEquiv">;</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="859480356">
-												<reference key="NSMenu" ref="769623530"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="948374510">
-												<reference key="NSMenu" ref="769623530"/>
-												<string key="NSTitle">Check Spelling While Typing</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="967646866">
-												<reference key="NSMenu" ref="769623530"/>
-												<string key="NSTitle">Check Grammar With Spelling</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="795346622">
-												<reference key="NSMenu" ref="769623530"/>
-												<string key="NSTitle">Correct Spelling Automatically</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-									</object>
-								</object>
-								<object class="NSMenuItem" id="507821607">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Substitutions</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="698887838">
-										<string key="NSTitle">Substitutions</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="65139061">
-												<reference key="NSMenu" ref="698887838"/>
-												<string key="NSTitle">Show Substitutions</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="19036812">
-												<reference key="NSMenu" ref="698887838"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="605118523">
-												<reference key="NSMenu" ref="698887838"/>
-												<string key="NSTitle">Smart Copy/Paste</string>
-												<string key="NSKeyEquiv">f</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">1</int>
-											</object>
-											<object class="NSMenuItem" id="197661976">
-												<reference key="NSMenu" ref="698887838"/>
-												<string key="NSTitle">Smart Quotes</string>
-												<string key="NSKeyEquiv">g</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">2</int>
-											</object>
-											<object class="NSMenuItem" id="672708820">
-												<reference key="NSMenu" ref="698887838"/>
-												<string key="NSTitle">Smart Dashes</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="708854459">
-												<reference key="NSMenu" ref="698887838"/>
-												<string key="NSTitle">Smart Links</string>
-												<string key="NSKeyEquiv">G</string>
-												<int key="NSKeyEquivModMask">1179648</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">3</int>
-											</object>
-											<object class="NSMenuItem" id="537092702">
-												<reference key="NSMenu" ref="698887838"/>
-												<string key="NSTitle">Text Replacement</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-									</object>
-								</object>
-								<object class="NSMenuItem" id="288088188">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Transformations</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="579392910">
-										<string key="NSTitle">Transformations</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="1060694897">
-												<reference key="NSMenu" ref="579392910"/>
-												<string key="NSTitle">Make Upper Case</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="879586729">
-												<reference key="NSMenu" ref="579392910"/>
-												<string key="NSTitle">Make Lower Case</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="56570060">
-												<reference key="NSMenu" ref="579392910"/>
-												<string key="NSTitle">Capitalize</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-									</object>
-								</object>
-								<object class="NSMenuItem" id="676164635">
-									<reference key="NSMenu" ref="789758025"/>
-									<string key="NSTitle">Speech</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="785027613">
-										<string key="NSTitle">Speech</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="731782645">
-												<reference key="NSMenu" ref="785027613"/>
-												<string key="NSTitle">Start Speaking</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="680220178">
-												<reference key="NSMenu" ref="785027613"/>
-												<string key="NSTitle">Stop Speaking</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-									</object>
-								</object>
-							</object>
-						</object>
-					</object>
-					<object class="NSMenuItem" id="302598603">
-						<reference key="NSMenu" ref="649796088"/>
-						<string key="NSTitle">Format</string>
-						<string key="NSKeyEquiv"/>
-						<int key="NSMnemonicLoc">2147483647</int>
-						<reference key="NSOnImage" ref="35465992"/>
-						<reference key="NSMixedImage" ref="502551668"/>
-						<string key="NSAction">submenuAction:</string>
-						<object class="NSMenu" key="NSSubmenu" id="941447902">
-							<string key="NSTitle">Format</string>
-							<object class="NSMutableArray" key="NSMenuItems">
-								<bool key="EncodedWithXMLCoder">YES</bool>
-								<object class="NSMenuItem" id="792887677">
-									<reference key="NSMenu" ref="941447902"/>
-									<string key="NSTitle">Font</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="786677654">
-										<string key="NSTitle">Font</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="159677712">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Show Fonts</string>
-												<string key="NSKeyEquiv">t</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="305399458">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Bold</string>
-												<string key="NSKeyEquiv">b</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">2</int>
-											</object>
-											<object class="NSMenuItem" id="814362025">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Italic</string>
-												<string key="NSKeyEquiv">i</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">1</int>
-											</object>
-											<object class="NSMenuItem" id="330926929">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Underline</string>
-												<string key="NSKeyEquiv">u</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="533507878">
-												<reference key="NSMenu" ref="786677654"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="158063935">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Bigger</string>
-												<string key="NSKeyEquiv">+</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">3</int>
-											</object>
-											<object class="NSMenuItem" id="885547335">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Smaller</string>
-												<string key="NSKeyEquiv">-</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<int key="NSTag">4</int>
-											</object>
-											<object class="NSMenuItem" id="901062459">
-												<reference key="NSMenu" ref="786677654"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="767671776">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Kern</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<string key="NSAction">submenuAction:</string>
-												<object class="NSMenu" key="NSSubmenu" id="175441468">
-													<string key="NSTitle">Kern</string>
-													<object class="NSMutableArray" key="NSMenuItems">
-														<bool key="EncodedWithXMLCoder">YES</bool>
-														<object class="NSMenuItem" id="252969304">
-															<reference key="NSMenu" ref="175441468"/>
-															<string key="NSTitle">Use Default</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="766922938">
-															<reference key="NSMenu" ref="175441468"/>
-															<string key="NSTitle">Use None</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="677519740">
-															<reference key="NSMenu" ref="175441468"/>
-															<string key="NSTitle">Tighten</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="238351151">
-															<reference key="NSMenu" ref="175441468"/>
-															<string key="NSTitle">Loosen</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-													</object>
-												</object>
-											</object>
-											<object class="NSMenuItem" id="691570813">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Ligature</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<string key="NSAction">submenuAction:</string>
-												<object class="NSMenu" key="NSSubmenu" id="1058217995">
-													<string key="NSTitle">Ligature</string>
-													<object class="NSMutableArray" key="NSMenuItems">
-														<bool key="EncodedWithXMLCoder">YES</bool>
-														<object class="NSMenuItem" id="706297211">
-															<reference key="NSMenu" ref="1058217995"/>
-															<string key="NSTitle">Use Default</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="568384683">
-															<reference key="NSMenu" ref="1058217995"/>
-															<string key="NSTitle">Use None</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="663508465">
-															<reference key="NSMenu" ref="1058217995"/>
-															<string key="NSTitle">Use All</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-													</object>
-												</object>
-											</object>
-											<object class="NSMenuItem" id="769124883">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Baseline</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<string key="NSAction">submenuAction:</string>
-												<object class="NSMenu" key="NSSubmenu" id="18263474">
-													<string key="NSTitle">Baseline</string>
-													<object class="NSMutableArray" key="NSMenuItems">
-														<bool key="EncodedWithXMLCoder">YES</bool>
-														<object class="NSMenuItem" id="257962622">
-															<reference key="NSMenu" ref="18263474"/>
-															<string key="NSTitle">Use Default</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="644725453">
-															<reference key="NSMenu" ref="18263474"/>
-															<string key="NSTitle">Superscript</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="1037576581">
-															<reference key="NSMenu" ref="18263474"/>
-															<string key="NSTitle">Subscript</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="941806246">
-															<reference key="NSMenu" ref="18263474"/>
-															<string key="NSTitle">Raise</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="1045724900">
-															<reference key="NSMenu" ref="18263474"/>
-															<string key="NSTitle">Lower</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-													</object>
-												</object>
-											</object>
-											<object class="NSMenuItem" id="739652853">
-												<reference key="NSMenu" ref="786677654"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="1012600125">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Show Colors</string>
-												<string key="NSKeyEquiv">C</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="214559597">
-												<reference key="NSMenu" ref="786677654"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="596732606">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Copy Style</string>
-												<string key="NSKeyEquiv">c</string>
-												<int key="NSKeyEquivModMask">1572864</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="393423671">
-												<reference key="NSMenu" ref="786677654"/>
-												<string key="NSTitle">Paste Style</string>
-												<string key="NSKeyEquiv">v</string>
-												<int key="NSKeyEquivModMask">1572864</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-										<string key="NSName">_NSFontMenu</string>
-									</object>
-								</object>
-								<object class="NSMenuItem" id="215659978">
-									<reference key="NSMenu" ref="941447902"/>
-									<string key="NSTitle">Text</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-									<string key="NSAction">submenuAction:</string>
-									<object class="NSMenu" key="NSSubmenu" id="446991534">
-										<string key="NSTitle">Text</string>
-										<object class="NSMutableArray" key="NSMenuItems">
-											<bool key="EncodedWithXMLCoder">YES</bool>
-											<object class="NSMenuItem" id="875092757">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Align Left</string>
-												<string key="NSKeyEquiv">{</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="630155264">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Center</string>
-												<string key="NSKeyEquiv">|</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="945678886">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Justify</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="512868991">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Align Right</string>
-												<string key="NSKeyEquiv">}</string>
-												<int key="NSKeyEquivModMask">1048576</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="163117631">
-												<reference key="NSMenu" ref="446991534"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="31516759">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Writing Direction</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-												<string key="NSAction">submenuAction:</string>
-												<object class="NSMenu" key="NSSubmenu" id="956096989">
-													<string key="NSTitle">Writing Direction</string>
-													<object class="NSMutableArray" key="NSMenuItems">
-														<bool key="EncodedWithXMLCoder">YES</bool>
-														<object class="NSMenuItem" id="257099033">
-															<reference key="NSMenu" ref="956096989"/>
-															<bool key="NSIsDisabled">YES</bool>
-															<string key="NSTitle">Paragraph</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="551969625">
-															<reference key="NSMenu" ref="956096989"/>
-															<string type="base64-UTF8" key="NSTitle">CURlZmF1bHQ</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="249532473">
-															<reference key="NSMenu" ref="956096989"/>
-															<string type="base64-UTF8" key="NSTitle">CUxlZnQgdG8gUmlnaHQ</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="607364498">
-															<reference key="NSMenu" ref="956096989"/>
-															<string type="base64-UTF8" key="NSTitle">CVJpZ2h0IHRvIExlZnQ</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="508151438">
-															<reference key="NSMenu" ref="956096989"/>
-															<bool key="NSIsDisabled">YES</bool>
-															<bool key="NSIsSeparator">YES</bool>
-															<string key="NSTitle"/>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="981751889">
-															<reference key="NSMenu" ref="956096989"/>
-															<bool key="NSIsDisabled">YES</bool>
-															<string key="NSTitle">Selection</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="380031999">
-															<reference key="NSMenu" ref="956096989"/>
-															<string type="base64-UTF8" key="NSTitle">CURlZmF1bHQ</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="825984362">
-															<reference key="NSMenu" ref="956096989"/>
-															<string type="base64-UTF8" key="NSTitle">CUxlZnQgdG8gUmlnaHQ</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-														<object class="NSMenuItem" id="560145579">
-															<reference key="NSMenu" ref="956096989"/>
-															<string type="base64-UTF8" key="NSTitle">CVJpZ2h0IHRvIExlZnQ</string>
-															<string key="NSKeyEquiv"/>
-															<int key="NSMnemonicLoc">2147483647</int>
-															<reference key="NSOnImage" ref="35465992"/>
-															<reference key="NSMixedImage" ref="502551668"/>
-														</object>
-													</object>
-												</object>
-											</object>
-											<object class="NSMenuItem" id="908105787">
-												<reference key="NSMenu" ref="446991534"/>
-												<bool key="NSIsDisabled">YES</bool>
-												<bool key="NSIsSeparator">YES</bool>
-												<string key="NSTitle"/>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="644046920">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Show Ruler</string>
-												<string key="NSKeyEquiv"/>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="231811626">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Copy Ruler</string>
-												<string key="NSKeyEquiv">c</string>
-												<int key="NSKeyEquivModMask">1310720</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-											<object class="NSMenuItem" id="883618387">
-												<reference key="NSMenu" ref="446991534"/>
-												<string key="NSTitle">Paste Ruler</string>
-												<string key="NSKeyEquiv">v</string>
-												<int key="NSKeyEquivModMask">1310720</int>
-												<int key="NSMnemonicLoc">2147483647</int>
-												<reference key="NSOnImage" ref="35465992"/>
-												<reference key="NSMixedImage" ref="502551668"/>
-											</object>
-										</object>
-									</object>
-								</object>
-							</object>
-						</object>
-					</object>
-					<object class="NSMenuItem" id="586577488">
-						<reference key="NSMenu" ref="649796088"/>
-						<string key="NSTitle">View</string>
-						<string key="NSKeyEquiv"/>
-						<int key="NSKeyEquivModMask">1048576</int>
-						<int key="NSMnemonicLoc">2147483647</int>
-						<reference key="NSOnImage" ref="35465992"/>
-						<reference key="NSMixedImage" ref="502551668"/>
-						<string key="NSAction">submenuAction:</string>
-						<object class="NSMenu" key="NSSubmenu" id="466310130">
-							<string key="NSTitle">View</string>
-							<object class="NSMutableArray" key="NSMenuItems">
-								<bool key="EncodedWithXMLCoder">YES</bool>
-								<object class="NSMenuItem" id="102151532">
-									<reference key="NSMenu" ref="466310130"/>
-									<string key="NSTitle">Show Toolbar</string>
-									<string key="NSKeyEquiv">t</string>
-									<int key="NSKeyEquivModMask">1572864</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="237841660">
-									<reference key="NSMenu" ref="466310130"/>
-									<string key="NSTitle">Customize Toolbar…</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-							</object>
-						</object>
-					</object>
-					<object class="NSMenuItem" id="713487014">
-						<reference key="NSMenu" ref="649796088"/>
-						<string key="NSTitle">Window</string>
-						<string key="NSKeyEquiv"/>
-						<int key="NSKeyEquivModMask">1048576</int>
-						<int key="NSMnemonicLoc">2147483647</int>
-						<reference key="NSOnImage" ref="35465992"/>
-						<reference key="NSMixedImage" ref="502551668"/>
-						<string key="NSAction">submenuAction:</string>
-						<object class="NSMenu" key="NSSubmenu" id="835318025">
-							<string key="NSTitle">Window</string>
-							<object class="NSMutableArray" key="NSMenuItems">
-								<bool key="EncodedWithXMLCoder">YES</bool>
-								<object class="NSMenuItem" id="1011231497">
-									<reference key="NSMenu" ref="835318025"/>
-									<string key="NSTitle">Minimize</string>
-									<string key="NSKeyEquiv">m</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="575023229">
-									<reference key="NSMenu" ref="835318025"/>
-									<string key="NSTitle">Zoom</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="299356726">
-									<reference key="NSMenu" ref="835318025"/>
-									<bool key="NSIsDisabled">YES</bool>
-									<bool key="NSIsSeparator">YES</bool>
-									<string key="NSTitle"/>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-								<object class="NSMenuItem" id="625202149">
-									<reference key="NSMenu" ref="835318025"/>
-									<string key="NSTitle">Bring All to Front</string>
-									<string key="NSKeyEquiv"/>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-							</object>
-							<string key="NSName">_NSWindowsMenu</string>
-						</object>
-					</object>
-					<object class="NSMenuItem" id="448692316">
-						<reference key="NSMenu" ref="649796088"/>
-						<string key="NSTitle">Help</string>
-						<string key="NSKeyEquiv"/>
-						<int key="NSMnemonicLoc">2147483647</int>
-						<reference key="NSOnImage" ref="35465992"/>
-						<reference key="NSMixedImage" ref="502551668"/>
-						<string key="NSAction">submenuAction:</string>
-						<object class="NSMenu" key="NSSubmenu" id="992780483">
-							<string key="NSTitle">Help</string>
-							<object class="NSMutableArray" key="NSMenuItems">
-								<bool key="EncodedWithXMLCoder">YES</bool>
-								<object class="NSMenuItem" id="105068016">
-									<reference key="NSMenu" ref="992780483"/>
-									<string key="NSTitle">TestApp Help</string>
-									<string key="NSKeyEquiv">?</string>
-									<int key="NSKeyEquivModMask">1048576</int>
-									<int key="NSMnemonicLoc">2147483647</int>
-									<reference key="NSOnImage" ref="35465992"/>
-									<reference key="NSMixedImage" ref="502551668"/>
-								</object>
-							</object>
-							<string key="NSName">_NSHelpMenu</string>
-						</object>
-					</object>
-				</object>
-				<string key="NSName">_NSMainMenu</string>
-			</object>
-			<object class="NSWindowTemplate" id="972006081">
-				<int key="NSWindowStyleMask">15</int>
-				<int key="NSWindowBacking">2</int>
-				<string key="NSWindowRect">{{335, 390}, {480, 360}}</string>
-				<int key="NSWTFlags">1954021376</int>
-				<string key="NSWindowTitle">TestApp</string>
-				<string key="NSWindowClass">NSWindow</string>
-				<nil key="NSViewClass"/>
-				<string key="NSWindowContentMaxSize">{1.79769e+308, 1.79769e+308}</string>
-				<object class="NSView" key="NSWindowView" id="439893737">
-					<reference key="NSNextResponder"/>
-					<int key="NSvFlags">256</int>
-					<string key="NSFrameSize">{480, 360}</string>
-					<reference key="NSSuperview"/>
-				</object>
-				<string key="NSScreenRect">{{0, 0}, {1920, 1178}}</string>
-				<string key="NSMaxSize">{1.79769e+308, 1.79769e+308}</string>
-			</object>
-			<object class="NSCustomObject" id="976324537">
-				<string key="NSClassName">TestAppAppDelegate</string>
-			</object>
-			<object class="NSCustomObject" id="755631768">
-				<string key="NSClassName">NSFontManager</string>
-			</object>
-		</object>
-		<object class="IBObjectContainer" key="IBDocument.Objects">
-			<object class="NSMutableArray" key="connectionRecords">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">performMiniaturize:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="1011231497"/>
-					</object>
-					<int key="connectionID">37</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">arrangeInFront:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="625202149"/>
-					</object>
-					<int key="connectionID">39</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">print:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="49223823"/>
-					</object>
-					<int key="connectionID">86</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">runPageLayout:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="294629803"/>
-					</object>
-					<int key="connectionID">87</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">clearRecentDocuments:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="759406840"/>
-					</object>
-					<int key="connectionID">127</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">orderFrontStandardAboutPanel:</string>
-						<reference key="source" ref="1021"/>
-						<reference key="destination" ref="238522557"/>
-					</object>
-					<int key="connectionID">142</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">performClose:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="776162233"/>
-					</object>
-					<int key="connectionID">193</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleContinuousSpellChecking:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="948374510"/>
-					</object>
-					<int key="connectionID">222</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">undo:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="1058277027"/>
-					</object>
-					<int key="connectionID">223</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">copy:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="860595796"/>
-					</object>
-					<int key="connectionID">224</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">checkSpelling:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="96193923"/>
-					</object>
-					<int key="connectionID">225</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">paste:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="29853731"/>
-					</object>
-					<int key="connectionID">226</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">stopSpeaking:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="680220178"/>
-					</object>
-					<int key="connectionID">227</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">cut:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="296257095"/>
-					</object>
-					<int key="connectionID">228</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">showGuessPanel:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="679648819"/>
-					</object>
-					<int key="connectionID">230</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">redo:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="790794224"/>
-					</object>
-					<int key="connectionID">231</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">selectAll:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="583158037"/>
-					</object>
-					<int key="connectionID">232</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">startSpeaking:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="731782645"/>
-					</object>
-					<int key="connectionID">233</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">delete:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="437104165"/>
-					</object>
-					<int key="connectionID">235</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">performZoom:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="575023229"/>
-					</object>
-					<int key="connectionID">240</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">performFindPanelAction:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="447796847"/>
-					</object>
-					<int key="connectionID">241</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">centerSelectionInVisibleArea:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="88285865"/>
-					</object>
-					<int key="connectionID">245</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleGrammarChecking:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="967646866"/>
-					</object>
-					<int key="connectionID">347</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleSmartInsertDelete:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="605118523"/>
-					</object>
-					<int key="connectionID">355</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleAutomaticQuoteSubstitution:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="197661976"/>
-					</object>
-					<int key="connectionID">356</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleAutomaticLinkDetection:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="708854459"/>
-					</object>
-					<int key="connectionID">357</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">saveDocument:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="1023925487"/>
-					</object>
-					<int key="connectionID">362</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">saveDocumentAs:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="117038363"/>
-					</object>
-					<int key="connectionID">363</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">revertDocumentToSaved:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="579971712"/>
-					</object>
-					<int key="connectionID">364</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">runToolbarCustomizationPalette:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="237841660"/>
-					</object>
-					<int key="connectionID">365</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleToolbarShown:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="102151532"/>
-					</object>
-					<int key="connectionID">366</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">hide:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="755159360"/>
-					</object>
-					<int key="connectionID">367</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">hideOtherApplications:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="342932134"/>
-					</object>
-					<int key="connectionID">368</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">unhideAllApplications:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="908899353"/>
-					</object>
-					<int key="connectionID">370</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">newDocument:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="705341025"/>
-					</object>
-					<int key="connectionID">373</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">openDocument:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="722745758"/>
-					</object>
-					<int key="connectionID">374</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">addFontTrait:</string>
-						<reference key="source" ref="755631768"/>
-						<reference key="destination" ref="305399458"/>
-					</object>
-					<int key="connectionID">421</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">addFontTrait:</string>
-						<reference key="source" ref="755631768"/>
-						<reference key="destination" ref="814362025"/>
-					</object>
-					<int key="connectionID">422</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">modifyFont:</string>
-						<reference key="source" ref="755631768"/>
-						<reference key="destination" ref="885547335"/>
-					</object>
-					<int key="connectionID">423</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">orderFrontFontPanel:</string>
-						<reference key="source" ref="755631768"/>
-						<reference key="destination" ref="159677712"/>
-					</object>
-					<int key="connectionID">424</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">modifyFont:</string>
-						<reference key="source" ref="755631768"/>
-						<reference key="destination" ref="158063935"/>
-					</object>
-					<int key="connectionID">425</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">raiseBaseline:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="941806246"/>
-					</object>
-					<int key="connectionID">426</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">lowerBaseline:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="1045724900"/>
-					</object>
-					<int key="connectionID">427</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">copyFont:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="596732606"/>
-					</object>
-					<int key="connectionID">428</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">subscript:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="1037576581"/>
-					</object>
-					<int key="connectionID">429</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">superscript:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="644725453"/>
-					</object>
-					<int key="connectionID">430</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">tightenKerning:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="677519740"/>
-					</object>
-					<int key="connectionID">431</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">underline:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="330926929"/>
-					</object>
-					<int key="connectionID">432</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">orderFrontColorPanel:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="1012600125"/>
-					</object>
-					<int key="connectionID">433</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">useAllLigatures:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="663508465"/>
-					</object>
-					<int key="connectionID">434</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">loosenKerning:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="238351151"/>
-					</object>
-					<int key="connectionID">435</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">pasteFont:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="393423671"/>
-					</object>
-					<int key="connectionID">436</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">unscript:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="257962622"/>
-					</object>
-					<int key="connectionID">437</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">useStandardKerning:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="252969304"/>
-					</object>
-					<int key="connectionID">438</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">useStandardLigatures:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="706297211"/>
-					</object>
-					<int key="connectionID">439</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">turnOffLigatures:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="568384683"/>
-					</object>
-					<int key="connectionID">440</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">turnOffKerning:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="766922938"/>
-					</object>
-					<int key="connectionID">441</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">terminate:</string>
-						<reference key="source" ref="1050"/>
-						<reference key="destination" ref="632727374"/>
-					</object>
-					<int key="connectionID">449</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleAutomaticSpellingCorrection:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="795346622"/>
-					</object>
-					<int key="connectionID">456</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">orderFrontSubstitutionsPanel:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="65139061"/>
-					</object>
-					<int key="connectionID">458</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleAutomaticDashSubstitution:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="672708820"/>
-					</object>
-					<int key="connectionID">461</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleAutomaticTextReplacement:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="537092702"/>
-					</object>
-					<int key="connectionID">463</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">uppercaseWord:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="1060694897"/>
-					</object>
-					<int key="connectionID">464</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">capitalizeWord:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="56570060"/>
-					</object>
-					<int key="connectionID">467</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">lowercaseWord:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="879586729"/>
-					</object>
-					<int key="connectionID">468</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">pasteAsPlainText:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="82994268"/>
-					</object>
-					<int key="connectionID">486</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">performFindPanelAction:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="326711663"/>
-					</object>
-					<int key="connectionID">487</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">performFindPanelAction:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="270902937"/>
-					</object>
-					<int key="connectionID">488</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">performFindPanelAction:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="159080638"/>
-					</object>
-					<int key="connectionID">489</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">showHelp:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="105068016"/>
-					</object>
-					<int key="connectionID">493</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBOutletConnection" key="connection">
-						<string key="label">delegate</string>
-						<reference key="source" ref="1021"/>
-						<reference key="destination" ref="976324537"/>
-					</object>
-					<int key="connectionID">495</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">alignCenter:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="630155264"/>
-					</object>
-					<int key="connectionID">518</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">pasteRuler:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="883618387"/>
-					</object>
-					<int key="connectionID">519</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">toggleRuler:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="644046920"/>
-					</object>
-					<int key="connectionID">520</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">alignRight:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="512868991"/>
-					</object>
-					<int key="connectionID">521</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">copyRuler:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="231811626"/>
-					</object>
-					<int key="connectionID">522</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">alignJustified:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="945678886"/>
-					</object>
-					<int key="connectionID">523</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">alignLeft:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="875092757"/>
-					</object>
-					<int key="connectionID">524</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">makeBaseWritingDirectionNatural:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="551969625"/>
-					</object>
-					<int key="connectionID">525</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">makeBaseWritingDirectionLeftToRight:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="249532473"/>
-					</object>
-					<int key="connectionID">526</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">makeBaseWritingDirectionRightToLeft:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="607364498"/>
-					</object>
-					<int key="connectionID">527</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">makeTextWritingDirectionNatural:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="380031999"/>
-					</object>
-					<int key="connectionID">528</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">makeTextWritingDirectionLeftToRight:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="825984362"/>
-					</object>
-					<int key="connectionID">529</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBActionConnection" key="connection">
-						<string key="label">makeTextWritingDirectionRightToLeft:</string>
-						<reference key="source" ref="1014"/>
-						<reference key="destination" ref="560145579"/>
-					</object>
-					<int key="connectionID">530</int>
-				</object>
-				<object class="IBConnectionRecord">
-					<object class="IBOutletConnection" key="connection">
-						<string key="label">window</string>
-						<reference key="source" ref="976324537"/>
-						<reference key="destination" ref="972006081"/>
-					</object>
-					<int key="connectionID">532</int>
-				</object>
-			</object>
-			<object class="IBMutableOrderedSet" key="objectRecords">
-				<object class="NSArray" key="orderedObjects">
-					<bool key="EncodedWithXMLCoder">YES</bool>
-					<object class="IBObjectRecord">
-						<int key="objectID">0</int>
-						<reference key="object" ref="0"/>
-						<reference key="children" ref="1048"/>
-						<nil key="parent"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">-2</int>
-						<reference key="object" ref="1021"/>
-						<reference key="parent" ref="0"/>
-						<string key="objectName">File's Owner</string>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">-1</int>
-						<reference key="object" ref="1014"/>
-						<reference key="parent" ref="0"/>
-						<string key="objectName">First Responder</string>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">-3</int>
-						<reference key="object" ref="1050"/>
-						<reference key="parent" ref="0"/>
-						<string key="objectName">Application</string>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">29</int>
-						<reference key="object" ref="649796088"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="713487014"/>
-							<reference ref="694149608"/>
-							<reference ref="952259628"/>
-							<reference ref="379814623"/>
-							<reference ref="586577488"/>
-							<reference ref="302598603"/>
-							<reference ref="448692316"/>
-						</object>
-						<reference key="parent" ref="0"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">19</int>
-						<reference key="object" ref="713487014"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="835318025"/>
-						</object>
-						<reference key="parent" ref="649796088"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">56</int>
-						<reference key="object" ref="694149608"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="110575045"/>
-						</object>
-						<reference key="parent" ref="649796088"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">217</int>
-						<reference key="object" ref="952259628"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="789758025"/>
-						</object>
-						<reference key="parent" ref="649796088"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">83</int>
-						<reference key="object" ref="379814623"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="720053764"/>
-						</object>
-						<reference key="parent" ref="649796088"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">81</int>
-						<reference key="object" ref="720053764"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="1023925487"/>
-							<reference ref="117038363"/>
-							<reference ref="49223823"/>
-							<reference ref="722745758"/>
-							<reference ref="705341025"/>
-							<reference ref="1025936716"/>
-							<reference ref="294629803"/>
-							<reference ref="776162233"/>
-							<reference ref="425164168"/>
-							<reference ref="579971712"/>
-							<reference ref="1010469920"/>
-						</object>
-						<reference key="parent" ref="379814623"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">75</int>
-						<reference key="object" ref="1023925487"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">80</int>
-						<reference key="object" ref="117038363"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">78</int>
-						<reference key="object" ref="49223823"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">72</int>
-						<reference key="object" ref="722745758"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">82</int>
-						<reference key="object" ref="705341025"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">124</int>
-						<reference key="object" ref="1025936716"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="1065607017"/>
-						</object>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">77</int>
-						<reference key="object" ref="294629803"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">73</int>
-						<reference key="object" ref="776162233"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">79</int>
-						<reference key="object" ref="425164168"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">112</int>
-						<reference key="object" ref="579971712"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">74</int>
-						<reference key="object" ref="1010469920"/>
-						<reference key="parent" ref="720053764"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">125</int>
-						<reference key="object" ref="1065607017"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="759406840"/>
-						</object>
-						<reference key="parent" ref="1025936716"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">126</int>
-						<reference key="object" ref="759406840"/>
-						<reference key="parent" ref="1065607017"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">205</int>
-						<reference key="object" ref="789758025"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="437104165"/>
-							<reference ref="583158037"/>
-							<reference ref="1058277027"/>
-							<reference ref="212016141"/>
-							<reference ref="296257095"/>
-							<reference ref="29853731"/>
-							<reference ref="860595796"/>
-							<reference ref="1040322652"/>
-							<reference ref="790794224"/>
-							<reference ref="892235320"/>
-							<reference ref="972420730"/>
-							<reference ref="676164635"/>
-							<reference ref="507821607"/>
-							<reference ref="288088188"/>
-							<reference ref="82994268"/>
-						</object>
-						<reference key="parent" ref="952259628"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">202</int>
-						<reference key="object" ref="437104165"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">198</int>
-						<reference key="object" ref="583158037"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">207</int>
-						<reference key="object" ref="1058277027"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">214</int>
-						<reference key="object" ref="212016141"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">199</int>
-						<reference key="object" ref="296257095"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">203</int>
-						<reference key="object" ref="29853731"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">197</int>
-						<reference key="object" ref="860595796"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">206</int>
-						<reference key="object" ref="1040322652"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">215</int>
-						<reference key="object" ref="790794224"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">218</int>
-						<reference key="object" ref="892235320"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="963351320"/>
-						</object>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">216</int>
-						<reference key="object" ref="972420730"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="769623530"/>
-						</object>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">200</int>
-						<reference key="object" ref="769623530"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="948374510"/>
-							<reference ref="96193923"/>
-							<reference ref="679648819"/>
-							<reference ref="967646866"/>
-							<reference ref="859480356"/>
-							<reference ref="795346622"/>
-						</object>
-						<reference key="parent" ref="972420730"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">219</int>
-						<reference key="object" ref="948374510"/>
-						<reference key="parent" ref="769623530"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">201</int>
-						<reference key="object" ref="96193923"/>
-						<reference key="parent" ref="769623530"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">204</int>
-						<reference key="object" ref="679648819"/>
-						<reference key="parent" ref="769623530"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">220</int>
-						<reference key="object" ref="963351320"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="270902937"/>
-							<reference ref="88285865"/>
-							<reference ref="159080638"/>
-							<reference ref="326711663"/>
-							<reference ref="447796847"/>
-						</object>
-						<reference key="parent" ref="892235320"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">213</int>
-						<reference key="object" ref="270902937"/>
-						<reference key="parent" ref="963351320"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">210</int>
-						<reference key="object" ref="88285865"/>
-						<reference key="parent" ref="963351320"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">221</int>
-						<reference key="object" ref="159080638"/>
-						<reference key="parent" ref="963351320"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">208</int>
-						<reference key="object" ref="326711663"/>
-						<reference key="parent" ref="963351320"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">209</int>
-						<reference key="object" ref="447796847"/>
-						<reference key="parent" ref="963351320"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">57</int>
-						<reference key="object" ref="110575045"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="238522557"/>
-							<reference ref="755159360"/>
-							<reference ref="908899353"/>
-							<reference ref="632727374"/>
-							<reference ref="646227648"/>
-							<reference ref="609285721"/>
-							<reference ref="481834944"/>
-							<reference ref="304266470"/>
-							<reference ref="1046388886"/>
-							<reference ref="1056857174"/>
-							<reference ref="342932134"/>
-						</object>
-						<reference key="parent" ref="694149608"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">58</int>
-						<reference key="object" ref="238522557"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">134</int>
-						<reference key="object" ref="755159360"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">150</int>
-						<reference key="object" ref="908899353"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">136</int>
-						<reference key="object" ref="632727374"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">144</int>
-						<reference key="object" ref="646227648"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">129</int>
-						<reference key="object" ref="609285721"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">143</int>
-						<reference key="object" ref="481834944"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">236</int>
-						<reference key="object" ref="304266470"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">131</int>
-						<reference key="object" ref="1046388886"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="752062318"/>
-						</object>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">149</int>
-						<reference key="object" ref="1056857174"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">145</int>
-						<reference key="object" ref="342932134"/>
-						<reference key="parent" ref="110575045"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">130</int>
-						<reference key="object" ref="752062318"/>
-						<reference key="parent" ref="1046388886"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">24</int>
-						<reference key="object" ref="835318025"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="299356726"/>
-							<reference ref="625202149"/>
-							<reference ref="575023229"/>
-							<reference ref="1011231497"/>
-						</object>
-						<reference key="parent" ref="713487014"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">92</int>
-						<reference key="object" ref="299356726"/>
-						<reference key="parent" ref="835318025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">5</int>
-						<reference key="object" ref="625202149"/>
-						<reference key="parent" ref="835318025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">239</int>
-						<reference key="object" ref="575023229"/>
-						<reference key="parent" ref="835318025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">23</int>
-						<reference key="object" ref="1011231497"/>
-						<reference key="parent" ref="835318025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">295</int>
-						<reference key="object" ref="586577488"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="466310130"/>
-						</object>
-						<reference key="parent" ref="649796088"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">296</int>
-						<reference key="object" ref="466310130"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="102151532"/>
-							<reference ref="237841660"/>
-						</object>
-						<reference key="parent" ref="586577488"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">297</int>
-						<reference key="object" ref="102151532"/>
-						<reference key="parent" ref="466310130"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">298</int>
-						<reference key="object" ref="237841660"/>
-						<reference key="parent" ref="466310130"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">211</int>
-						<reference key="object" ref="676164635"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="785027613"/>
-						</object>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">212</int>
-						<reference key="object" ref="785027613"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="680220178"/>
-							<reference ref="731782645"/>
-						</object>
-						<reference key="parent" ref="676164635"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">195</int>
-						<reference key="object" ref="680220178"/>
-						<reference key="parent" ref="785027613"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">196</int>
-						<reference key="object" ref="731782645"/>
-						<reference key="parent" ref="785027613"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">346</int>
-						<reference key="object" ref="967646866"/>
-						<reference key="parent" ref="769623530"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">348</int>
-						<reference key="object" ref="507821607"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="698887838"/>
-						</object>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">349</int>
-						<reference key="object" ref="698887838"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="605118523"/>
-							<reference ref="197661976"/>
-							<reference ref="708854459"/>
-							<reference ref="65139061"/>
-							<reference ref="19036812"/>
-							<reference ref="672708820"/>
-							<reference ref="537092702"/>
-						</object>
-						<reference key="parent" ref="507821607"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">350</int>
-						<reference key="object" ref="605118523"/>
-						<reference key="parent" ref="698887838"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">351</int>
-						<reference key="object" ref="197661976"/>
-						<reference key="parent" ref="698887838"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">354</int>
-						<reference key="object" ref="708854459"/>
-						<reference key="parent" ref="698887838"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">371</int>
-						<reference key="object" ref="972006081"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="439893737"/>
-						</object>
-						<reference key="parent" ref="0"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">372</int>
-						<reference key="object" ref="439893737"/>
-						<reference key="parent" ref="972006081"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">375</int>
-						<reference key="object" ref="302598603"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="941447902"/>
-						</object>
-						<reference key="parent" ref="649796088"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">376</int>
-						<reference key="object" ref="941447902"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="792887677"/>
-							<reference ref="215659978"/>
-						</object>
-						<reference key="parent" ref="302598603"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">377</int>
-						<reference key="object" ref="792887677"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="786677654"/>
-						</object>
-						<reference key="parent" ref="941447902"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">388</int>
-						<reference key="object" ref="786677654"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="159677712"/>
-							<reference ref="305399458"/>
-							<reference ref="814362025"/>
-							<reference ref="330926929"/>
-							<reference ref="533507878"/>
-							<reference ref="158063935"/>
-							<reference ref="885547335"/>
-							<reference ref="901062459"/>
-							<reference ref="767671776"/>
-							<reference ref="691570813"/>
-							<reference ref="769124883"/>
-							<reference ref="739652853"/>
-							<reference ref="1012600125"/>
-							<reference ref="214559597"/>
-							<reference ref="596732606"/>
-							<reference ref="393423671"/>
-						</object>
-						<reference key="parent" ref="792887677"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">389</int>
-						<reference key="object" ref="159677712"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">390</int>
-						<reference key="object" ref="305399458"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">391</int>
-						<reference key="object" ref="814362025"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">392</int>
-						<reference key="object" ref="330926929"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">393</int>
-						<reference key="object" ref="533507878"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">394</int>
-						<reference key="object" ref="158063935"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">395</int>
-						<reference key="object" ref="885547335"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">396</int>
-						<reference key="object" ref="901062459"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">397</int>
-						<reference key="object" ref="767671776"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="175441468"/>
-						</object>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">398</int>
-						<reference key="object" ref="691570813"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="1058217995"/>
-						</object>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">399</int>
-						<reference key="object" ref="769124883"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="18263474"/>
-						</object>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">400</int>
-						<reference key="object" ref="739652853"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">401</int>
-						<reference key="object" ref="1012600125"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">402</int>
-						<reference key="object" ref="214559597"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">403</int>
-						<reference key="object" ref="596732606"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">404</int>
-						<reference key="object" ref="393423671"/>
-						<reference key="parent" ref="786677654"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">405</int>
-						<reference key="object" ref="18263474"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="257962622"/>
-							<reference ref="644725453"/>
-							<reference ref="1037576581"/>
-							<reference ref="941806246"/>
-							<reference ref="1045724900"/>
-						</object>
-						<reference key="parent" ref="769124883"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">406</int>
-						<reference key="object" ref="257962622"/>
-						<reference key="parent" ref="18263474"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">407</int>
-						<reference key="object" ref="644725453"/>
-						<reference key="parent" ref="18263474"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">408</int>
-						<reference key="object" ref="1037576581"/>
-						<reference key="parent" ref="18263474"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">409</int>
-						<reference key="object" ref="941806246"/>
-						<reference key="parent" ref="18263474"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">410</int>
-						<reference key="object" ref="1045724900"/>
-						<reference key="parent" ref="18263474"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">411</int>
-						<reference key="object" ref="1058217995"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="706297211"/>
-							<reference ref="568384683"/>
-							<reference ref="663508465"/>
-						</object>
-						<reference key="parent" ref="691570813"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">412</int>
-						<reference key="object" ref="706297211"/>
-						<reference key="parent" ref="1058217995"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">413</int>
-						<reference key="object" ref="568384683"/>
-						<reference key="parent" ref="1058217995"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">414</int>
-						<reference key="object" ref="663508465"/>
-						<reference key="parent" ref="1058217995"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">415</int>
-						<reference key="object" ref="175441468"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="252969304"/>
-							<reference ref="766922938"/>
-							<reference ref="677519740"/>
-							<reference ref="238351151"/>
-						</object>
-						<reference key="parent" ref="767671776"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">416</int>
-						<reference key="object" ref="252969304"/>
-						<reference key="parent" ref="175441468"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">417</int>
-						<reference key="object" ref="766922938"/>
-						<reference key="parent" ref="175441468"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">418</int>
-						<reference key="object" ref="677519740"/>
-						<reference key="parent" ref="175441468"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">419</int>
-						<reference key="object" ref="238351151"/>
-						<reference key="parent" ref="175441468"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">420</int>
-						<reference key="object" ref="755631768"/>
-						<reference key="parent" ref="0"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">450</int>
-						<reference key="object" ref="288088188"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="579392910"/>
-						</object>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">451</int>
-						<reference key="object" ref="579392910"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="1060694897"/>
-							<reference ref="879586729"/>
-							<reference ref="56570060"/>
-						</object>
-						<reference key="parent" ref="288088188"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">452</int>
-						<reference key="object" ref="1060694897"/>
-						<reference key="parent" ref="579392910"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">453</int>
-						<reference key="object" ref="859480356"/>
-						<reference key="parent" ref="769623530"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">454</int>
-						<reference key="object" ref="795346622"/>
-						<reference key="parent" ref="769623530"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">457</int>
-						<reference key="object" ref="65139061"/>
-						<reference key="parent" ref="698887838"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">459</int>
-						<reference key="object" ref="19036812"/>
-						<reference key="parent" ref="698887838"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">460</int>
-						<reference key="object" ref="672708820"/>
-						<reference key="parent" ref="698887838"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">462</int>
-						<reference key="object" ref="537092702"/>
-						<reference key="parent" ref="698887838"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">465</int>
-						<reference key="object" ref="879586729"/>
-						<reference key="parent" ref="579392910"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">466</int>
-						<reference key="object" ref="56570060"/>
-						<reference key="parent" ref="579392910"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">485</int>
-						<reference key="object" ref="82994268"/>
-						<reference key="parent" ref="789758025"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">490</int>
-						<reference key="object" ref="448692316"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="992780483"/>
-						</object>
-						<reference key="parent" ref="649796088"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">491</int>
-						<reference key="object" ref="992780483"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="105068016"/>
-						</object>
-						<reference key="parent" ref="448692316"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">492</int>
-						<reference key="object" ref="105068016"/>
-						<reference key="parent" ref="992780483"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">494</int>
-						<reference key="object" ref="976324537"/>
-						<reference key="parent" ref="0"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">496</int>
-						<reference key="object" ref="215659978"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="446991534"/>
-						</object>
-						<reference key="parent" ref="941447902"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">497</int>
-						<reference key="object" ref="446991534"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="875092757"/>
-							<reference ref="630155264"/>
-							<reference ref="945678886"/>
-							<reference ref="512868991"/>
-							<reference ref="163117631"/>
-							<reference ref="31516759"/>
-							<reference ref="908105787"/>
-							<reference ref="644046920"/>
-							<reference ref="231811626"/>
-							<reference ref="883618387"/>
-						</object>
-						<reference key="parent" ref="215659978"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">498</int>
-						<reference key="object" ref="875092757"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">499</int>
-						<reference key="object" ref="630155264"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">500</int>
-						<reference key="object" ref="945678886"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">501</int>
-						<reference key="object" ref="512868991"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">502</int>
-						<reference key="object" ref="163117631"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">503</int>
-						<reference key="object" ref="31516759"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="956096989"/>
-						</object>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">504</int>
-						<reference key="object" ref="908105787"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">505</int>
-						<reference key="object" ref="644046920"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">506</int>
-						<reference key="object" ref="231811626"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">507</int>
-						<reference key="object" ref="883618387"/>
-						<reference key="parent" ref="446991534"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">508</int>
-						<reference key="object" ref="956096989"/>
-						<object class="NSMutableArray" key="children">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<reference ref="257099033"/>
-							<reference ref="551969625"/>
-							<reference ref="249532473"/>
-							<reference ref="607364498"/>
-							<reference ref="508151438"/>
-							<reference ref="981751889"/>
-							<reference ref="380031999"/>
-							<reference ref="825984362"/>
-							<reference ref="560145579"/>
-						</object>
-						<reference key="parent" ref="31516759"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">509</int>
-						<reference key="object" ref="257099033"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">510</int>
-						<reference key="object" ref="551969625"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">511</int>
-						<reference key="object" ref="249532473"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">512</int>
-						<reference key="object" ref="607364498"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">513</int>
-						<reference key="object" ref="508151438"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">514</int>
-						<reference key="object" ref="981751889"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">515</int>
-						<reference key="object" ref="380031999"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">516</int>
-						<reference key="object" ref="825984362"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-					<object class="IBObjectRecord">
-						<int key="objectID">517</int>
-						<reference key="object" ref="560145579"/>
-						<reference key="parent" ref="956096989"/>
-					</object>
-				</object>
-			</object>
-			<object class="NSMutableDictionary" key="flattenedProperties">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-				<object class="NSArray" key="dict.sortedKeys">
-					<bool key="EncodedWithXMLCoder">YES</bool>
-					<string>-3.IBPluginDependency</string>
-					<string>112.IBPluginDependency</string>
-					<string>112.ImportedFromIB2</string>
-					<string>124.IBPluginDependency</string>
-					<string>124.ImportedFromIB2</string>
-					<string>125.IBPluginDependency</string>
-					<string>125.ImportedFromIB2</string>
-					<string>125.editorWindowContentRectSynchronizationRect</string>
-					<string>126.IBPluginDependency</string>
-					<string>126.ImportedFromIB2</string>
-					<string>129.IBPluginDependency</string>
-					<string>129.ImportedFromIB2</string>
-					<string>130.IBPluginDependency</string>
-					<string>130.ImportedFromIB2</string>
-					<string>130.editorWindowContentRectSynchronizationRect</string>
-					<string>131.IBPluginDependency</string>
-					<string>131.ImportedFromIB2</string>
-					<string>134.IBPluginDependency</string>
-					<string>134.ImportedFromIB2</string>
-					<string>136.IBPluginDependency</string>
-					<string>136.ImportedFromIB2</string>
-					<string>143.IBPluginDependency</string>
-					<string>143.ImportedFromIB2</string>
-					<string>144.IBPluginDependency</string>
-					<string>144.ImportedFromIB2</string>
-					<string>145.IBPluginDependency</string>
-					<string>145.ImportedFromIB2</string>
-					<string>149.IBPluginDependency</string>
-					<string>149.ImportedFromIB2</string>
-					<string>150.IBPluginDependency</string>
-					<string>150.ImportedFromIB2</string>
-					<string>19.IBPluginDependency</string>
-					<string>19.ImportedFromIB2</string>
-					<string>195.IBPluginDependency</string>
-					<string>195.ImportedFromIB2</string>
-					<string>196.IBPluginDependency</string>
-					<string>196.ImportedFromIB2</string>
-					<string>197.IBPluginDependency</string>
-					<string>197.ImportedFromIB2</string>
-					<string>198.IBPluginDependency</string>
-					<string>198.ImportedFromIB2</string>
-					<string>199.IBPluginDependency</string>
-					<string>199.ImportedFromIB2</string>
-					<string>200.IBEditorWindowLastContentRect</string>
-					<string>200.IBPluginDependency</string>
-					<string>200.ImportedFromIB2</string>
-					<string>200.editorWindowContentRectSynchronizationRect</string>
-					<string>201.IBPluginDependency</string>
-					<string>201.ImportedFromIB2</string>
-					<string>202.IBPluginDependency</string>
-					<string>202.ImportedFromIB2</string>
-					<string>203.IBPluginDependency</string>
-					<string>203.ImportedFromIB2</string>
-					<string>204.IBPluginDependency</string>
-					<string>204.ImportedFromIB2</string>
-					<string>205.IBEditorWindowLastContentRect</string>
-					<string>205.IBPluginDependency</string>
-					<string>205.ImportedFromIB2</string>
-					<string>205.editorWindowContentRectSynchronizationRect</string>
-					<string>206.IBPluginDependency</string>
-					<string>206.ImportedFromIB2</string>
-					<string>207.IBPluginDependency</string>
-					<string>207.ImportedFromIB2</string>
-					<string>208.IBPluginDependency</string>
-					<string>208.ImportedFromIB2</string>
-					<string>209.IBPluginDependency</string>
-					<string>209.ImportedFromIB2</string>
-					<string>210.IBPluginDependency</string>
-					<string>210.ImportedFromIB2</string>
-					<string>211.IBPluginDependency</string>
-					<string>211.ImportedFromIB2</string>
-					<string>212.IBPluginDependency</string>
-					<string>212.ImportedFromIB2</string>
-					<string>212.editorWindowContentRectSynchronizationRect</string>
-					<string>213.IBPluginDependency</string>
-					<string>213.ImportedFromIB2</string>
-					<string>214.IBPluginDependency</string>
-					<string>214.ImportedFromIB2</string>
-					<string>215.IBPluginDependency</string>
-					<string>215.ImportedFromIB2</string>
-					<string>216.IBPluginDependency</string>
-					<string>216.ImportedFromIB2</string>
-					<string>217.IBPluginDependency</string>
-					<string>217.ImportedFromIB2</string>
-					<string>218.IBPluginDependency</string>
-					<string>218.ImportedFromIB2</string>
-					<string>219.IBPluginDependency</string>
-					<string>219.ImportedFromIB2</string>
-					<string>220.IBEditorWindowLastContentRect</string>
-					<string>220.IBPluginDependency</string>
-					<string>220.ImportedFromIB2</string>
-					<string>220.editorWindowContentRectSynchronizationRect</string>
-					<string>221.IBPluginDependency</string>
-					<string>221.ImportedFromIB2</string>
-					<string>23.IBPluginDependency</string>
-					<string>23.ImportedFromIB2</string>
-					<string>236.IBPluginDependency</string>
-					<string>236.ImportedFromIB2</string>
-					<string>239.IBPluginDependency</string>
-					<string>239.ImportedFromIB2</string>
-					<string>24.IBEditorWindowLastContentRect</string>
-					<string>24.IBPluginDependency</string>
-					<string>24.ImportedFromIB2</string>
-					<string>24.editorWindowContentRectSynchronizationRect</string>
-					<string>29.IBEditorWindowLastContentRect</string>
-					<string>29.IBPluginDependency</string>
-					<string>29.ImportedFromIB2</string>
-					<string>29.WindowOrigin</string>
-					<string>29.editorWindowContentRectSynchronizationRect</string>
-					<string>295.IBPluginDependency</string>
-					<string>296.IBEditorWindowLastContentRect</string>
-					<string>296.IBPluginDependency</string>
-					<string>296.editorWindowContentRectSynchronizationRect</string>
-					<string>297.IBPluginDependency</string>
-					<string>298.IBPluginDependency</string>
-					<string>346.IBPluginDependency</string>
-					<string>346.ImportedFromIB2</string>
-					<string>348.IBPluginDependency</string>
-					<string>348.ImportedFromIB2</string>
-					<string>349.IBEditorWindowLastContentRect</string>
-					<string>349.IBPluginDependency</string>
-					<string>349.ImportedFromIB2</string>
-					<string>349.editorWindowContentRectSynchronizationRect</string>
-					<string>350.IBPluginDependency</string>
-					<string>350.ImportedFromIB2</string>
-					<string>351.IBPluginDependency</string>
-					<string>351.ImportedFromIB2</string>
-					<string>354.IBPluginDependency</string>
-					<string>354.ImportedFromIB2</string>
-					<string>371.IBEditorWindowLastContentRect</string>
-					<string>371.IBPluginDependency</string>
-					<string>371.IBWindowTemplateEditedContentRect</string>
-					<string>371.NSWindowTemplate.visibleAtLaunch</string>
-					<string>371.editorWindowContentRectSynchronizationRect</string>
-					<string>371.windowTemplate.maxSize</string>
-					<string>372.IBPluginDependency</string>
-					<string>375.IBPluginDependency</string>
-					<string>376.IBEditorWindowLastContentRect</string>
-					<string>376.IBPluginDependency</string>
-					<string>377.IBPluginDependency</string>
-					<string>388.IBEditorWindowLastContentRect</string>
-					<string>388.IBPluginDependency</string>
-					<string>389.IBPluginDependency</string>
-					<string>390.IBPluginDependency</string>
-					<string>391.IBPluginDependency</string>
-					<string>392.IBPluginDependency</string>
-					<string>393.IBPluginDependency</string>
-					<string>394.IBPluginDependency</string>
-					<string>395.IBPluginDependency</string>
-					<string>396.IBPluginDependency</string>
-					<string>397.IBPluginDependency</string>
-					<string>398.IBPluginDependency</string>
-					<string>399.IBPluginDependency</string>
-					<string>400.IBPluginDependency</string>
-					<string>401.IBPluginDependency</string>
-					<string>402.IBPluginDependency</string>
-					<string>403.IBPluginDependency</string>
-					<string>404.IBPluginDependency</string>
-					<string>405.IBPluginDependency</string>
-					<string>406.IBPluginDependency</string>
-					<string>407.IBPluginDependency</string>
-					<string>408.IBPluginDependency</string>
-					<string>409.IBPluginDependency</string>
-					<string>410.IBPluginDependency</string>
-					<string>411.IBPluginDependency</string>
-					<string>412.IBPluginDependency</string>
-					<string>413.IBPluginDependency</string>
-					<string>414.IBPluginDependency</string>
-					<string>415.IBPluginDependency</string>
-					<string>416.IBPluginDependency</string>
-					<string>417.IBPluginDependency</string>
-					<string>418.IBPluginDependency</string>
-					<string>419.IBPluginDependency</string>
-					<string>450.IBPluginDependency</string>
-					<string>451.IBEditorWindowLastContentRect</string>
-					<string>451.IBPluginDependency</string>
-					<string>452.IBPluginDependency</string>
-					<string>453.IBPluginDependency</string>
-					<string>454.IBPluginDependency</string>
-					<string>457.IBPluginDependency</string>
-					<string>459.IBPluginDependency</string>
-					<string>460.IBPluginDependency</string>
-					<string>462.IBPluginDependency</string>
-					<string>465.IBPluginDependency</string>
-					<string>466.IBPluginDependency</string>
-					<string>485.IBPluginDependency</string>
-					<string>490.IBPluginDependency</string>
-					<string>491.IBEditorWindowLastContentRect</string>
-					<string>491.IBPluginDependency</string>
-					<string>492.IBPluginDependency</string>
-					<string>496.IBPluginDependency</string>
-					<string>497.IBEditorWindowLastContentRect</string>
-					<string>497.IBPluginDependency</string>
-					<string>498.IBPluginDependency</string>
-					<string>499.IBPluginDependency</string>
-					<string>5.IBPluginDependency</string>
-					<string>5.ImportedFromIB2</string>
-					<string>500.IBPluginDependency</string>
-					<string>501.IBPluginDependency</string>
-					<string>502.IBPluginDependency</string>
-					<string>503.IBPluginDependency</string>
-					<string>504.IBPluginDependency</string>
-					<string>505.IBPluginDependency</string>
-					<string>506.IBPluginDependency</string>
-					<string>507.IBPluginDependency</string>
-					<string>508.IBEditorWindowLastContentRect</string>
-					<string>508.IBPluginDependency</string>
-					<string>509.IBPluginDependency</string>
-					<string>510.IBPluginDependency</string>
-					<string>511.IBPluginDependency</string>
-					<string>512.IBPluginDependency</string>
-					<string>513.IBPluginDependency</string>
-					<string>514.IBPluginDependency</string>
-					<string>515.IBPluginDependency</string>
-					<string>516.IBPluginDependency</string>
-					<string>517.IBPluginDependency</string>
-					<string>56.IBPluginDependency</string>
-					<string>56.ImportedFromIB2</string>
-					<string>57.IBEditorWindowLastContentRect</string>
-					<string>57.IBPluginDependency</string>
-					<string>57.ImportedFromIB2</string>
-					<string>57.editorWindowContentRectSynchronizationRect</string>
-					<string>58.IBPluginDependency</string>
-					<string>58.ImportedFromIB2</string>
-					<string>72.IBPluginDependency</string>
-					<string>72.ImportedFromIB2</string>
-					<string>73.IBPluginDependency</string>
-					<string>73.ImportedFromIB2</string>
-					<string>74.IBPluginDependency</string>
-					<string>74.ImportedFromIB2</string>
-					<string>75.IBPluginDependency</string>
-					<string>75.ImportedFromIB2</string>
-					<string>77.IBPluginDependency</string>
-					<string>77.ImportedFromIB2</string>
-					<string>78.IBPluginDependency</string>
-					<string>78.ImportedFromIB2</string>
-					<string>79.IBPluginDependency</string>
-					<string>79.ImportedFromIB2</string>
-					<string>80.IBPluginDependency</string>
-					<string>80.ImportedFromIB2</string>
-					<string>81.IBEditorWindowLastContentRect</string>
-					<string>81.IBPluginDependency</string>
-					<string>81.ImportedFromIB2</string>
-					<string>81.editorWindowContentRectSynchronizationRect</string>
-					<string>82.IBPluginDependency</string>
-					<string>82.ImportedFromIB2</string>
-					<string>83.IBPluginDependency</string>
-					<string>83.ImportedFromIB2</string>
-					<string>92.IBPluginDependency</string>
-					<string>92.ImportedFromIB2</string>
-				</object>
-				<object class="NSMutableArray" key="dict.values">
-					<bool key="EncodedWithXMLCoder">YES</bool>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{522, 812}, {146, 23}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{436, 809}, {64, 6}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{753, 187}, {275, 113}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{608, 612}, {275, 83}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{547, 180}, {254, 283}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{187, 434}, {243, 243}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{608, 612}, {167, 43}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{753, 217}, {238, 103}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{608, 612}, {241, 103}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{654, 239}, {194, 73}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{525, 802}, {197, 73}}</string>
-					<string>{{380, 836}, {512, 20}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{74, 862}</string>
-					<string>{{6, 978}, {478, 20}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{604, 269}, {231, 43}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{475, 832}, {234, 43}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{746, 287}, {220, 133}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{608, 612}, {215, 63}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{380, 496}, {480, 360}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{380, 496}, {480, 360}}</string>
-					<integer value="1"/>
-					<string>{{33, 99}, {480, 360}}</string>
-					<string>{3.40282e+38, 3.40282e+38}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{591, 420}, {83, 43}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{523, 2}, {178, 283}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{753, 197}, {170, 63}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{725, 289}, {246, 23}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{674, 260}, {204, 183}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>{{878, 180}, {164, 173}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{286, 129}, {275, 183}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{23, 794}, {245, 183}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{452, 109}, {196, 203}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>{{145, 474}, {199, 203}}</string>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-					<string>com.apple.InterfaceBuilder.CocoaPlugin</string>
-					<integer value="1"/>
-				</object>
-			</object>
-			<object class="NSMutableDictionary" key="unlocalizedProperties">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-				<reference key="dict.sortedKeys" ref="0"/>
-				<object class="NSMutableArray" key="dict.values">
-					<bool key="EncodedWithXMLCoder">YES</bool>
-				</object>
-			</object>
-			<nil key="activeLocalization"/>
-			<object class="NSMutableDictionary" key="localizations">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-				<reference key="dict.sortedKeys" ref="0"/>
-				<object class="NSMutableArray" key="dict.values">
-					<bool key="EncodedWithXMLCoder">YES</bool>
-				</object>
-			</object>
-			<nil key="sourceID"/>
-			<int key="maxID">532</int>
-		</object>
-		<object class="IBClassDescriber" key="IBDocument.Classes">
-			<object class="NSMutableArray" key="referencedPartialClassDescriptions">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-				<object class="IBPartialClassDescription">
-					<string key="className">TestAppAppDelegate</string>
-					<string key="superclassName">NSObject</string>
-					<object class="NSMutableDictionary" key="outlets">
-						<string key="NS.key.0">window</string>
-						<string key="NS.object.0">NSWindow</string>
-					</object>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBProjectSource</string>
-						<string key="minorKey">TestAppAppDelegate.h</string>
-					</object>
-				</object>
-			</object>
-			<object class="NSMutableArray" key="referencedPartialClassDescriptionsV3.2+">
-				<bool key="EncodedWithXMLCoder">YES</bool>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSApplication</string>
-					<string key="superclassName">NSResponder</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="822405504">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSApplication.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSApplication</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="850738725">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSApplicationScripting.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSApplication</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="624831158">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSColorPanel.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSApplication</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSHelpManager.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSApplication</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSPageLayout.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSApplication</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSUserInterfaceItemSearching.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSBrowser</string>
-					<string key="superclassName">NSControl</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSBrowser.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSControl</string>
-					<string key="superclassName">NSView</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="310914472">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSControl.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSDocument</string>
-					<string key="superclassName">NSObject</string>
-					<object class="NSMutableDictionary" key="actions">
-						<bool key="EncodedWithXMLCoder">YES</bool>
-						<object class="NSArray" key="dict.sortedKeys">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<string>printDocument:</string>
-							<string>revertDocumentToSaved:</string>
-							<string>runPageLayout:</string>
-							<string>saveDocument:</string>
-							<string>saveDocumentAs:</string>
-							<string>saveDocumentTo:</string>
-						</object>
-						<object class="NSMutableArray" key="dict.values">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<string>id</string>
-							<string>id</string>
-							<string>id</string>
-							<string>id</string>
-							<string>id</string>
-							<string>id</string>
-						</object>
-					</object>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSDocument.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSDocument</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSDocumentScripting.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSDocumentController</string>
-					<string key="superclassName">NSObject</string>
-					<object class="NSMutableDictionary" key="actions">
-						<bool key="EncodedWithXMLCoder">YES</bool>
-						<object class="NSArray" key="dict.sortedKeys">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<string>clearRecentDocuments:</string>
-							<string>newDocument:</string>
-							<string>openDocument:</string>
-							<string>saveAllDocuments:</string>
-						</object>
-						<object class="NSMutableArray" key="dict.values">
-							<bool key="EncodedWithXMLCoder">YES</bool>
-							<string>id</string>
-							<string>id</string>
-							<string>id</string>
-							<string>id</string>
-						</object>
-					</object>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSDocumentController.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSFontManager</string>
-					<string key="superclassName">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="946436764">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSFontManager.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSFormatter</string>
-					<string key="superclassName">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSFormatter.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSMatrix</string>
-					<string key="superclassName">NSControl</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSMatrix.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSMenu</string>
-					<string key="superclassName">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="1056362899">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSMenu.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSMenuItem</string>
-					<string key="superclassName">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="472958451">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSMenuItem.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSMovieView</string>
-					<string key="superclassName">NSView</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSMovieView.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSAccessibility.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<reference key="sourceIdentifier" ref="822405504"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<reference key="sourceIdentifier" ref="850738725"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<reference key="sourceIdentifier" ref="624831158"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<reference key="sourceIdentifier" ref="310914472"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSDictionaryController.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSDragging.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<reference key="sourceIdentifier" ref="946436764"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSFontPanel.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSKeyValueBinding.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<reference key="sourceIdentifier" ref="1056362899"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSNibLoading.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSOutlineView.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSPasteboard.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSSavePanel.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="809545482">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSTableView.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSToolbarItem.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier" id="260078765">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSView.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSArchiver.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSClassDescription.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSError.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSFileManager.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSKeyValueCoding.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSKeyValueObserving.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSKeyedArchiver.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSObject.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSObjectScripting.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSPortCoder.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSRunLoop.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSScriptClassDescription.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSScriptKeyValueCoding.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSScriptObjectSpecifiers.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSScriptWhoseTests.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSThread.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSURL.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSURLConnection.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">Foundation.framework/Headers/NSURLDownload.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSResponder</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSInterfaceStyle.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSResponder</string>
-					<string key="superclassName">NSObject</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSResponder.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSTableView</string>
-					<string key="superclassName">NSControl</string>
-					<reference key="sourceIdentifier" ref="809545482"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSText</string>
-					<string key="superclassName">NSView</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSText.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSTextView</string>
-					<string key="superclassName">NSText</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSTextView.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSView</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSClipView.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSView</string>
-					<reference key="sourceIdentifier" ref="472958451"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSView</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSRulerView.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSView</string>
-					<string key="superclassName">NSResponder</string>
-					<reference key="sourceIdentifier" ref="260078765"/>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSWindow</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSDrawer.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSWindow</string>
-					<string key="superclassName">NSResponder</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSWindow.h</string>
-					</object>
-				</object>
-				<object class="IBPartialClassDescription">
-					<string key="className">NSWindow</string>
-					<object class="IBClassDescriptionSource" key="sourceIdentifier">
-						<string key="majorKey">IBFrameworkSource</string>
-						<string key="minorKey">AppKit.framework/Headers/NSWindowScripting.h</string>
-					</object>
-				</object>
-			</object>
-		</object>
-		<int key="IBDocument.localizationMode">0</int>
-		<object class="NSMutableDictionary" key="IBDocument.PluginDeclaredDependencyDefaults">
-			<string key="NS.key.0">com.apple.InterfaceBuilder.CocoaPlugin.macosx</string>
-			<integer value="1060" key="NS.object.0"/>
-		</object>
-		<object class="NSMutableDictionary" key="IBDocument.PluginDeclaredDevelopmentDependencies">
-			<string key="NS.key.0">com.apple.InterfaceBuilder.CocoaPlugin.InterfaceBuilder3</string>
-			<integer value="3000" key="NS.object.0"/>
-		</object>
-		<bool key="IBDocument.PluginDeclaredDependenciesTrackSystemTargetVersion">YES</bool>
-		<string key="IBDocument.LastKnownRelativeProjectPath">../TestApp.xcodeproj</string>
-		<int key="IBDocument.defaultPropertyAccessControl">3</int>
-	</data>
-</archive>
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings b/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings
deleted file mode 100644
index 5807837..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings b/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings
deleted file mode 100644
index eeb3837..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json b/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
deleted file mode 100644
index 2db2b1c..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/AppIcon.appiconset/Contents.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "mac",
-      "size" : "16x16",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "16x16",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "32x32",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "32x32",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "128x128",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "128x128",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "256x256",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "256x256",
-      "scale" : "2x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "512x512",
-      "scale" : "1x"
-    },
-    {
-      "idiom" : "mac",
-      "size" : "512x512",
-      "scale" : "2x"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
\ No newline at end of file
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json b/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
deleted file mode 100644
index 0a87b6e..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/Contents.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
-  "images" : [
-    {
-      "idiom" : "universal",
-      "scale" : "1x",
-      "filename" : "super_sylvain.png"
-    },
-    {
-      "idiom" : "universal",
-      "scale" : "2x",
-      "filename" : "super_sylvain@2x.png"
-    },
-    {
-      "idiom" : "universal",
-      "scale" : "3x",
-      "filename" : "super_sylvain@3x.png"
-    }
-  ],
-  "info" : {
-    "version" : 1,
-    "author" : "xcode"
-  }
-}
\ No newline at end of file
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png b/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
deleted file mode 100644
index 0ba7691..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain.png
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png b/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
deleted file mode 100644
index edfa6a5..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@2x.png
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png b/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
deleted file mode 100644
index e0652ef..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/Images.xcassets/image.imageset/super_sylvain@3x.png
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/TestApp-Info.plist b/tools/gyp/test/mac/app-bundle/TestApp/TestApp-Info.plist
deleted file mode 100644
index e005852..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/TestApp-Info.plist
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>BuildMachineOSBuild</key>
-	<string>Doesn't matter, will be overwritten</string>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME:rfc1034identifier}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>ause</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSMinimumSystemVersion</key>
-	<string>${MACOSX_DEPLOYMENT_TARGET}</string>
-	<key>NSMainNibFile</key>
-	<string>MainMenu</string>
-	<key>NSPrincipalClass</key>
-	<string>NSApplication</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.h b/tools/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.h
deleted file mode 100644
index 518645e..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.h
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <Cocoa/Cocoa.h>
-
-@interface TestAppAppDelegate : NSObject <NSApplicationDelegate> {
-    NSWindow *window;
-}
-
-@property (assign) IBOutlet NSWindow *window;
-
-@end
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.m b/tools/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.m
deleted file mode 100644
index 9aafa42..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/TestAppAppDelegate.m
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "TestAppAppDelegate.h"
-
-@implementation TestAppAppDelegate
-
-@synthesize window;
-
-- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
-  // Insert code here to initialize your application
-}
-
-@end
diff --git a/tools/gyp/test/mac/app-bundle/TestApp/main.m b/tools/gyp/test/mac/app-bundle/TestApp/main.m
deleted file mode 100644
index df6a12d..0000000
--- a/tools/gyp/test/mac/app-bundle/TestApp/main.m
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <Cocoa/Cocoa.h>
-
-int main(int argc, char *argv[])
-{
-    return NSApplicationMain(argc,  (const char **) argv);
-}
diff --git a/tools/gyp/test/mac/app-bundle/empty.c b/tools/gyp/test/mac/app-bundle/empty.c
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/app-bundle/empty.c
+++ /dev/null
diff --git a/tools/gyp/test/mac/app-bundle/test-assets-catalog.gyp b/tools/gyp/test/mac/app-bundle/test-assets-catalog.gyp
deleted file mode 100644
index 25f94a1..0000000
--- a/tools/gyp/test/mac/app-bundle/test-assets-catalog.gyp
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'dep_framework',
-      'product_name': 'Dependency Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'empty.c', ],
-    },
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App Assets Catalog Gyp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'dependencies': [ 'dep_framework', ],
-      'sources': [
-        'TestApp/main.m',
-        'TestApp/TestApp_Prefix.pch',
-        'TestApp/TestAppAppDelegate.h',
-        'TestApp/TestAppAppDelegate.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',  # UTF-8
-        'TestApp/English.lproj/utf-16be.strings',
-        'TestApp/English.lproj/utf-16le.strings',
-        'TestApp/English.lproj/MainMenu.xib',
-        'TestApp/Images.xcassets',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
-        ],
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-        'MACOSX_DEPLOYMENT_TARGET': '10.9',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/app-bundle/test-error.gyp b/tools/gyp/test/mac/app-bundle/test-error.gyp
deleted file mode 100644
index 370772c..0000000
--- a/tools/gyp/test/mac/app-bundle/test-error.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App Gyp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'TestApp/main.m',
-        'TestApp/TestApp_Prefix.pch',
-        'TestApp/TestAppAppDelegate.h',
-        'TestApp/TestAppAppDelegate.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist-error.strings',
-        'TestApp/English.lproj/MainMenu.xib',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
-        ],
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/app-bundle/test.gyp b/tools/gyp/test/mac/app-bundle/test.gyp
deleted file mode 100644
index 21973c3..0000000
--- a/tools/gyp/test/mac/app-bundle/test.gyp
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'dep_framework',
-      'product_name': 'Dependency Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'empty.c', ],
-    },
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App Gyp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'dependencies': [ 'dep_framework', ],
-      'sources': [
-        'TestApp/main.m',
-        'TestApp/TestApp_Prefix.pch',
-        'TestApp/TestAppAppDelegate.h',
-        'TestApp/TestAppAppDelegate.m',
-      ],
-      'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',  # UTF-8
-        'TestApp/English.lproj/utf-16be.strings',
-        'TestApp/English.lproj/utf-16le.strings',
-        'TestApp/English.lproj/MainMenu.xib',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
-        ],
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/archs/empty_main.cc b/tools/gyp/test/mac/archs/empty_main.cc
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/mac/archs/empty_main.cc
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/mac/archs/file.mm b/tools/gyp/test/mac/archs/file.mm
deleted file mode 100644
index d0b39d1..0000000
--- a/tools/gyp/test/mac/archs/file.mm
+++ /dev/null
@@ -1 +0,0 @@
-MyInt f() { return 0; }
diff --git a/tools/gyp/test/mac/archs/file_a.cc b/tools/gyp/test/mac/archs/file_a.cc
deleted file mode 100644
index 7307873..0000000
--- a/tools/gyp/test/mac/archs/file_a.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "file_a.h"
-
-void DependentFunctionA() {
-}
diff --git a/tools/gyp/test/mac/archs/file_a.h b/tools/gyp/test/mac/archs/file_a.h
deleted file mode 100644
index 7439d13..0000000
--- a/tools/gyp/test/mac/archs/file_a.h
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_A_H_
-#define _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_A_H_
-
-void DependentFunctionA();
-
-#endif // _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_A_H_
diff --git a/tools/gyp/test/mac/archs/file_b.cc b/tools/gyp/test/mac/archs/file_b.cc
deleted file mode 100644
index 72d59cb..0000000
--- a/tools/gyp/test/mac/archs/file_b.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "file_b.h"
-
-void DependentFunctionB() {
-}
diff --git a/tools/gyp/test/mac/archs/file_b.h b/tools/gyp/test/mac/archs/file_b.h
deleted file mode 100644
index eb272ec..0000000
--- a/tools/gyp/test/mac/archs/file_b.h
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_B_H_
-#define _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_B_H_
-
-void DependentFunctionB();
-
-#endif // _INCLUDED_TEST_MAC_DEPENDENCIES_FILE_B_H_
diff --git a/tools/gyp/test/mac/archs/file_c.cc b/tools/gyp/test/mac/archs/file_c.cc
deleted file mode 100644
index ca39f7a..0000000
--- a/tools/gyp/test/mac/archs/file_c.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "file_a.h"
-#include "file_b.h"
-
-void PublicFunctionC() {
-  DependentFunctionA();
-  DependentFunctionB();
-}
diff --git a/tools/gyp/test/mac/archs/file_d.cc b/tools/gyp/test/mac/archs/file_d.cc
deleted file mode 100644
index c40911c..0000000
--- a/tools/gyp/test/mac/archs/file_d.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "file_a.h"
-#include "file_b.h"
-
-void PublicFunctionD() {
-  DependentFunctionA();
-  DependentFunctionB();
-}
diff --git a/tools/gyp/test/mac/archs/header.h b/tools/gyp/test/mac/archs/header.h
deleted file mode 100644
index 0716e50..0000000
--- a/tools/gyp/test/mac/archs/header.h
+++ /dev/null
@@ -1 +0,0 @@
-typedef int MyInt;
diff --git a/tools/gyp/test/mac/archs/my_file.cc b/tools/gyp/test/mac/archs/my_file.cc
deleted file mode 100644
index 94216a7..0000000
--- a/tools/gyp/test/mac/archs/my_file.cc
+++ /dev/null
@@ -1,4 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-int x = 1;
diff --git a/tools/gyp/test/mac/archs/my_main_file.cc b/tools/gyp/test/mac/archs/my_main_file.cc
deleted file mode 100644
index f1fa06f..0000000
--- a/tools/gyp/test/mac/archs/my_main_file.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-#include <stdio.h>
-extern int x;
-int main() {
-  printf("hello, world %d\n", x);
-}
-
diff --git a/tools/gyp/test/mac/archs/test-archs-multiarch.gyp b/tools/gyp/test/mac/archs/test-archs-multiarch.gyp
deleted file mode 100644
index 567e8a6..0000000
--- a/tools/gyp/test/mac/archs/test-archs-multiarch.gyp
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'static_32_64',
-      'type': 'static_library',
-      'sources': [ 'my_file.cc' ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-    {
-      'target_name': 'shared_32_64',
-      'type': 'shared_library',
-      'sources': [ 'my_file.cc' ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-    {
-      'target_name': 'shared_32_64_bundle',
-      'product_name': 'My Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'my_file.cc' ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-    {
-      'target_name': 'module_32_64',
-      'type': 'loadable_module',
-      'sources': [ 'my_file.cc' ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-    {
-      'target_name': 'module_32_64_bundle',
-      'product_name': 'My Bundle',
-      'type': 'loadable_module',
-      'mac_bundle': 1,
-      'sources': [ 'my_file.cc' ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-    {
-      'target_name': 'exe_32_64',
-      'type': 'executable',
-      'sources': [ 'empty_main.cc' ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-    {
-      'target_name': 'exe_32_64_bundle',
-      'product_name': 'Test App',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'empty_main.cc' ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-    # This only needs to compile.
-    {
-      'target_name': 'precompiled_prefix_header_mm_32_64',
-      'type': 'shared_library',
-      'sources': [ 'file.mm', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-        'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
-      },
-    },
-    # This does not compile but should not cause generation errors.
-    {
-      'target_name': 'exe_32_64_no_sources',
-      'type': 'executable',
-      'dependencies': [
-        'static_32_64',
-      ],
-      'sources': [],
-      'xcode_settings': {
-        'ARCHS': ['i386', 'x86_64'],
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/mac/archs/test-archs-x86_64.gyp b/tools/gyp/test/mac/archs/test-archs-x86_64.gyp
deleted file mode 100644
index d11a896..0000000
--- a/tools/gyp/test/mac/archs/test-archs-x86_64.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-  {
-   'target_name': 'lib',
-   'product_name': 'Test64',
-   'type': 'static_library',
-   'sources': [ 'my_file.cc' ],
-   'xcode_settings': {
-     'ARCHS': [ 'x86_64' ],
-   },
-  },
-  {
-   'target_name': 'exe',
-   'product_name': 'Test64',
-   'type': 'executable',
-   'dependencies': [ 'lib' ],
-   'sources': [ 'my_main_file.cc' ],
-   'xcode_settings': {
-     'ARCHS': [ 'x86_64' ],
-   },
-  },
- ]
-}
diff --git a/tools/gyp/test/mac/archs/test-dependencies.gyp b/tools/gyp/test/mac/archs/test-dependencies.gyp
deleted file mode 100644
index 0431f5f..0000000
--- a/tools/gyp/test/mac/archs/test-dependencies.gyp
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'xcode_settings': {
-      'ARCHS': ['i386', 'x86_64'],
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'target_a',
-      'type': 'static_library',
-      'sources': [
-        'file_a.cc',
-        'file_a.h',
-      ],
-    },
-    {
-      'target_name': 'target_b',
-      'type': 'static_library',
-      'sources': [
-        'file_b.cc',
-        'file_b.h',
-      ],
-    },
-    {
-      'target_name': 'target_c_standalone_helper',
-      'type': 'loadable_module',
-      'hard_dependency': 1,
-      'dependencies': [
-        'target_a',
-        'target_b',
-      ],
-      'sources': [
-        'file_c.cc',
-      ],
-    },
-    {
-      'target_name': 'target_c_standalone',
-      'type': 'none',
-      'dependencies': [
-        'target_c_standalone_helper',
-      ],
-      'actions': [
-        {
-          'action_name': 'Package C',
-          'inputs': [],
-          'outputs': [
-            '<(PRODUCT_DIR)/libc_standalone.a',
-          ],
-          'action': [
-            'touch',
-            '<@(_outputs)',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'target_d_standalone_helper',
-      'type': 'shared_library',
-      'dependencies': [
-        'target_a',
-        'target_b',
-      ],
-      'sources': [
-        'file_d.cc',
-      ],
-    },
-    {
-      'target_name': 'target_d_standalone',
-      'type': 'none',
-      'dependencies': [
-        'target_d_standalone_helper',
-      ],
-      'actions': [
-        {
-          'action_name': 'Package D',
-          'inputs': [],
-          'outputs': [
-            '<(PRODUCT_DIR)/libd_standalone.a',
-          ],
-          'action': [
-            'touch',
-            '<@(_outputs)',
-          ],
-        },
-      ],
-    }
-  ],
-}
diff --git a/tools/gyp/test/mac/archs/test-no-archs.gyp b/tools/gyp/test/mac/archs/test-no-archs.gyp
deleted file mode 100644
index 8f3b6b4..0000000
--- a/tools/gyp/test/mac/archs/test-no-archs.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-  {
-   'target_name': 'lib',
-   'product_name': 'Test',
-   'type': 'static_library',
-   'sources': [ 'my_file.cc' ],
-  },
-  {
-   'target_name': 'exe',
-   'product_name': 'Test',
-   'type': 'executable',
-   'dependencies': [ 'lib' ],
-   'sources': [ 'my_main_file.cc' ],
-  },
- ]
-}
diff --git a/tools/gyp/test/mac/archs/test-valid-archs.gyp b/tools/gyp/test/mac/archs/test-valid-archs.gyp
deleted file mode 100644
index c90ec1f..0000000
--- a/tools/gyp/test/mac/archs/test-valid-archs.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-  {
-    'target_name': 'lib',
-    'product_name': 'Test',
-    'type': 'static_library',
-    'sources': [ 'my_file.cc' ],
-    'xcode_settings': {
-      'ARCHS': ['i386', 'x86_64', 'unknown-arch'],
-      'VALID_ARCHS': ['x86_64'],
-    },
-  },
-  {
-    'target_name': 'exe',
-    'product_name': 'Test',
-    'type': 'executable',
-    'dependencies': [ 'lib' ],
-    'sources': [ 'my_main_file.cc' ],
-    'xcode_settings': {
-      'ARCHS': ['i386', 'x86_64', 'unknown-arch'],
-      'VALID_ARCHS': ['x86_64'],
-    },
-  }]
-}
diff --git a/tools/gyp/test/mac/bundle-resources/change.sh b/tools/gyp/test/mac/bundle-resources/change.sh
deleted file mode 100755
index 6d0fe6c..0000000
--- a/tools/gyp/test/mac/bundle-resources/change.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-tr a-z A-Z < "${1}" > "${2}"
diff --git a/tools/gyp/test/mac/bundle-resources/executable-file.sh b/tools/gyp/test/mac/bundle-resources/executable-file.sh
deleted file mode 100755
index 796953a..0000000
--- a/tools/gyp/test/mac/bundle-resources/executable-file.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo echo echo echo cho ho o o
diff --git a/tools/gyp/test/mac/bundle-resources/secret.txt b/tools/gyp/test/mac/bundle-resources/secret.txt
deleted file mode 100644
index 8baef1b..0000000
--- a/tools/gyp/test/mac/bundle-resources/secret.txt
+++ /dev/null
@@ -1 +0,0 @@
-abc
diff --git a/tools/gyp/test/mac/bundle-resources/test.gyp b/tools/gyp/test/mac/bundle-resources/test.gyp
deleted file mode 100644
index af034ce..0000000
--- a/tools/gyp/test/mac/bundle-resources/test.gyp
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'resource',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'mac_bundle_resources': [
-        'secret.txt',
-        'executable-file.sh',
-      ],
-    },
-    # A rule with process_outputs_as_mac_bundle_resources should copy files
-    # into the Resources folder.
-    {
-      'target_name': 'source_rule',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'secret.txt',
-      ],
-      'rules': [
-        {
-          'rule_name': 'bundlerule',
-          'extension': 'txt',
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).txt',
-          ],
-          'action': ['./change.sh', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
-          'message': 'Running rule on <(RULE_INPUT_PATH)',
-          'process_outputs_as_mac_bundle_resources': 1,
-        },
-      ],
-    },
-    # So should an ordinary rule acting on mac_bundle_resources.
-    {
-      'target_name': 'resource_rule',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'mac_bundle_resources': [
-        'secret.txt',
-      ],
-      'rules': [
-        {
-          'rule_name': 'bundlerule',
-          'extension': 'txt',
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).txt',
-          ],
-          'action': ['./change.sh', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
-          'message': 'Running rule on <(RULE_INPUT_PATH)',
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/cflags/ccfile.cc b/tools/gyp/test/mac/cflags/ccfile.cc
deleted file mode 100644
index 1a54d18..0000000
--- a/tools/gyp/test/mac/cflags/ccfile.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifdef CFLAG
-#error CFLAG should not be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/ccfile_withcflags.cc b/tools/gyp/test/mac/cflags/ccfile_withcflags.cc
deleted file mode 100644
index de078a0..0000000
--- a/tools/gyp/test/mac/cflags/ccfile_withcflags.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifndef CFLAG
-#error CFLAG should be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/cfile.c b/tools/gyp/test/mac/cflags/cfile.c
deleted file mode 100644
index 0af9d0a..0000000
--- a/tools/gyp/test/mac/cflags/cfile.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifndef CFLAG
-#error CFLAG should be set
-#endif
-
-#ifdef CCFLAG
-#error CCFLAG should not be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/cppfile.cpp b/tools/gyp/test/mac/cflags/cppfile.cpp
deleted file mode 100644
index 1a54d18..0000000
--- a/tools/gyp/test/mac/cflags/cppfile.cpp
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifdef CFLAG
-#error CFLAG should not be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/cppfile_withcflags.cpp b/tools/gyp/test/mac/cflags/cppfile_withcflags.cpp
deleted file mode 100644
index de078a0..0000000
--- a/tools/gyp/test/mac/cflags/cppfile_withcflags.cpp
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifndef CFLAG
-#error CFLAG should be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/cxxfile.cxx b/tools/gyp/test/mac/cflags/cxxfile.cxx
deleted file mode 100644
index 1a54d18..0000000
--- a/tools/gyp/test/mac/cflags/cxxfile.cxx
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifdef CFLAG
-#error CFLAG should not be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/cxxfile_withcflags.cxx b/tools/gyp/test/mac/cflags/cxxfile_withcflags.cxx
deleted file mode 100644
index de078a0..0000000
--- a/tools/gyp/test/mac/cflags/cxxfile_withcflags.cxx
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifndef CFLAG
-#error CFLAG should be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/mfile.m b/tools/gyp/test/mac/cflags/mfile.m
deleted file mode 100644
index 0af9d0a..0000000
--- a/tools/gyp/test/mac/cflags/mfile.m
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifndef CFLAG
-#error CFLAG should be set
-#endif
-
-#ifdef CCFLAG
-#error CCFLAG should not be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/mmfile.mm b/tools/gyp/test/mac/cflags/mmfile.mm
deleted file mode 100644
index 1a54d18..0000000
--- a/tools/gyp/test/mac/cflags/mmfile.mm
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifdef CFLAG
-#error CFLAG should not be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/mmfile_withcflags.mm b/tools/gyp/test/mac/cflags/mmfile_withcflags.mm
deleted file mode 100644
index de078a0..0000000
--- a/tools/gyp/test/mac/cflags/mmfile_withcflags.mm
+++ /dev/null
@@ -1,7 +0,0 @@
-#ifndef CFLAG
-#error CFLAG should be set
-#endif
-
-#ifndef CCFLAG
-#error CCFLAG should be set
-#endif
diff --git a/tools/gyp/test/mac/cflags/test.gyp b/tools/gyp/test/mac/cflags/test.gyp
deleted file mode 100644
index d330a54..0000000
--- a/tools/gyp/test/mac/cflags/test.gyp
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'mytarget',
-      'type': 'shared_library',
-      'sources': [
-        'cfile.c',
-        'mfile.m',
-        'ccfile.cc',
-        'cppfile.cpp',
-        'cxxfile.cxx',
-        'mmfile.mm',
-      ],
-      'xcode_settings': {
-        # Normally, defines would go in 'defines' instead. This is just for
-        # testing.
-        'OTHER_CFLAGS': [
-          '-DCFLAG',
-        ],
-        'OTHER_CPLUSPLUSFLAGS': [
-          '-DCCFLAG',
-        ],
-        'GCC_C_LANGUAGE_STANDARD': 'c99',
-      },
-    },
-    {
-      'target_name': 'mytarget_reuse_cflags',
-      'type': 'shared_library',
-      'sources': [
-        'cfile.c',
-        'mfile.m',
-        'ccfile_withcflags.cc',
-        'cppfile_withcflags.cpp',
-        'cxxfile_withcflags.cxx',
-        'mmfile_withcflags.mm',
-      ],
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-DCFLAG',
-        ],
-        'OTHER_CPLUSPLUSFLAGS': [
-          '$OTHER_CFLAGS',
-          '-DCCFLAG',
-        ],
-        # This is a C-only flag, to check these don't get added to C++ files.
-        'GCC_C_LANGUAGE_STANDARD': 'c99',
-      },
-    },
-    {
-      'target_name': 'mytarget_inherit_cflags',
-      'type': 'shared_library',
-      'sources': [
-        'cfile.c',
-        'mfile.m',
-        'ccfile_withcflags.cc',
-        'cppfile_withcflags.cpp',
-        'cxxfile_withcflags.cxx',
-        'mmfile_withcflags.mm',
-      ],
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-DCFLAG',
-        ],
-        'OTHER_CPLUSPLUSFLAGS': [
-          '$inherited',
-          '-DCCFLAG',
-        ],
-        'GCC_C_LANGUAGE_STANDARD': 'c99',
-      },
-    },
-    {
-      'target_name': 'mytarget_inherit_cflags_parens',
-      'type': 'shared_library',
-      'sources': [
-        'cfile.c',
-        'mfile.m',
-        'ccfile_withcflags.cc',
-        'cppfile_withcflags.cpp',
-        'cxxfile_withcflags.cxx',
-        'mmfile_withcflags.mm',
-      ],
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-DCFLAG',
-        ],
-        'OTHER_CPLUSPLUSFLAGS': [
-          '$(inherited)',
-          '-DCCFLAG',
-        ],
-        'GCC_C_LANGUAGE_STANDARD': 'c99',
-      },
-    },
-    {
-      'target_name': 'mytarget_inherit_cflags_braces',
-      'type': 'shared_library',
-      'sources': [
-        'cfile.c',
-        'mfile.m',
-        'ccfile_withcflags.cc',
-        'cppfile_withcflags.cpp',
-        'cxxfile_withcflags.cxx',
-        'mmfile_withcflags.mm',
-      ],
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-DCFLAG',
-        ],
-        'OTHER_CPLUSPLUSFLAGS': [
-          '${inherited}',
-          '-DCCFLAG',
-        ],
-        'GCC_C_LANGUAGE_STANDARD': 'c99',
-      },
-    },
-    {
-      'target_name': 'ansi_standard',
-      'type': 'shared_library',
-      'sources': [
-        'cfile.c',
-      ],
-      'xcode_settings': {
-        'OTHER_CFLAGS': [
-          '-DCFLAG',
-        ],
-        'GCC_C_LANGUAGE_STANDARD': 'ansi',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/clang-cxx-language-standard/c++11.cc b/tools/gyp/test/mac/clang-cxx-language-standard/c++11.cc
deleted file mode 100644
index 756dc1c..0000000
--- a/tools/gyp/test/mac/clang-cxx-language-standard/c++11.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-static_assert(__cplusplus == 201103L, "wrong c++ standard version");
-
-int main() { return 0; }
-
diff --git a/tools/gyp/test/mac/clang-cxx-language-standard/c++98.cc b/tools/gyp/test/mac/clang-cxx-language-standard/c++98.cc
deleted file mode 100644
index a6a00c7..0000000
--- a/tools/gyp/test/mac/clang-cxx-language-standard/c++98.cc
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#if __cplusplus != 199711L
-#error wrong c++ standard version
-#endif
-
-enum cxx11_keywords {
-  alignas,
-  alignof,
-  char16_t,
-  char32_t,
-  constexpr,
-  decltype,
-  noexcept,
-  nullptr,
-  override,
-  static_assert,
-  thread_local,
-};
-
-int main() { return 0; }
-
diff --git a/tools/gyp/test/mac/clang-cxx-language-standard/clang-cxx-language-standard.gyp b/tools/gyp/test/mac/clang-cxx-language-standard/clang-cxx-language-standard.gyp
deleted file mode 100644
index eb60bbd..0000000
--- a/tools/gyp/test/mac/clang-cxx-language-standard/clang-cxx-language-standard.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-    ['CXX', '/usr/bin/clang++'],
-  ],
-  'targets': [
-    {
-      'target_name': 'c++98',
-      'type': 'executable',
-      'sources': [ 'c++98.cc', ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'CLANG_CXX_LANGUAGE_STANDARD': 'c++98',
-      },
-    },
-    {
-      'target_name': 'c++11',
-      'type': 'executable',
-      'sources': [ 'c++11.cc', ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'CLANG_CXX_LANGUAGE_STANDARD': 'c++0x',
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/clang-cxx-library/clang-cxx-library.gyp b/tools/gyp/test/mac/clang-cxx-library/clang-cxx-library.gyp
deleted file mode 100644
index 67006e5..0000000
--- a/tools/gyp/test/mac/clang-cxx-library/clang-cxx-library.gyp
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-    ['CXX', '/usr/bin/clang++'],
-  ],
-  'targets': [
-    {
-      'target_name': 'libc++',
-      'type': 'executable',
-      'sources': [ 'libc++.cc', ],
-      'xcode_settings': {
-        'CC': 'clang',
-        # libc++ requires OS X 10.7+.
-        'MACOSX_DEPLOYMENT_TARGET': '10.7',
-        'CLANG_CXX_LIBRARY': 'libc++',
-      },
-    },
-    {
-      'target_name': 'libstdc++',
-      'type': 'executable',
-      'sources': [ 'libstdc++.cc', ],
-      'xcode_settings': {
-        'CC': 'clang',
-        'CLANG_CXX_LIBRARY': 'libstdc++',
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/clang-cxx-library/libc++.cc b/tools/gyp/test/mac/clang-cxx-library/libc++.cc
deleted file mode 100644
index b8d6e6b..0000000
--- a/tools/gyp/test/mac/clang-cxx-library/libc++.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <string>
-#ifndef _LIBCPP_VERSION
-#error expected std library: libc++
-#endif
-
-int main() { std::string x; return x.size(); }
-
diff --git a/tools/gyp/test/mac/clang-cxx-library/libstdc++.cc b/tools/gyp/test/mac/clang-cxx-library/libstdc++.cc
deleted file mode 100644
index 474dbf3..0000000
--- a/tools/gyp/test/mac/clang-cxx-library/libstdc++.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <string>
-#ifndef __GLIBCXX__
-#error expected std library: libstdc++
-#endif
-
-int main() { std::string x; return x.size(); }
-
diff --git a/tools/gyp/test/mac/copy-dylib/empty.c b/tools/gyp/test/mac/copy-dylib/empty.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/mac/copy-dylib/empty.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/mac/copy-dylib/test.gyp b/tools/gyp/test/mac/copy-dylib/test.gyp
deleted file mode 100644
index 4210c51..0000000
--- a/tools/gyp/test/mac/copy-dylib/test.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'my_dylib',
-      'type': 'shared_library',
-      'sources': [ 'empty.c', ],
-    },
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'dependencies': [ 'my_dylib', ],
-      'sources': [
-        'empty.c',
-      ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/Test App.app/Contents/Resources',
-          'files': [
-            '<(PRODUCT_DIR)/libmy_dylib.dylib',
-          ],
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/debuginfo/file.c b/tools/gyp/test/mac/debuginfo/file.c
deleted file mode 100644
index 9cddaf1..0000000
--- a/tools/gyp/test/mac/debuginfo/file.c
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void f() {}
-int main() {}
diff --git a/tools/gyp/test/mac/debuginfo/test.gyp b/tools/gyp/test/mac/debuginfo/test.gyp
deleted file mode 100644
index 3faf6b5..0000000
--- a/tools/gyp/test/mac/debuginfo/test.gyp
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'nonbundle_static_library',
-      'type': 'static_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-    {
-      'target_name': 'nonbundle_shared_library',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-    {
-      'target_name': 'nonbundle_loadable_module',
-      'type': 'loadable_module',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-    {
-      'target_name': 'nonbundle_executable',
-      'type': 'executable',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-
-    {
-      'target_name': 'bundle_shared_library',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-    {
-      'target_name': 'bundle_loadable_module',
-      'type': 'loadable_module',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-    {
-      'target_name': 'my_app',
-      'product_name': 'My App',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/depend-on-bundle/English.lproj/InfoPlist.strings b/tools/gyp/test/mac/depend-on-bundle/English.lproj/InfoPlist.strings
deleted file mode 100644
index b92732c..0000000
--- a/tools/gyp/test/mac/depend-on-bundle/English.lproj/InfoPlist.strings
+++ /dev/null
@@ -1 +0,0 @@
-/* Localized versions of Info.plist keys */
diff --git a/tools/gyp/test/mac/depend-on-bundle/Info.plist b/tools/gyp/test/mac/depend-on-bundle/Info.plist
deleted file mode 100644
index 5e05a51..0000000
--- a/tools/gyp/test/mac/depend-on-bundle/Info.plist
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.yourcompany.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>FMWK</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>NSPrincipalClass</key>
-	<string></string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/depend-on-bundle/bundle.c b/tools/gyp/test/mac/depend-on-bundle/bundle.c
deleted file mode 100644
index d64ff8c..0000000
--- a/tools/gyp/test/mac/depend-on-bundle/bundle.c
+++ /dev/null
@@ -1 +0,0 @@
-int f() { return 42; }
diff --git a/tools/gyp/test/mac/depend-on-bundle/executable.c b/tools/gyp/test/mac/depend-on-bundle/executable.c
deleted file mode 100644
index 931bce6..0000000
--- a/tools/gyp/test/mac/depend-on-bundle/executable.c
+++ /dev/null
@@ -1,4 +0,0 @@
-int f();
-int main() {
-  return f();
-}
diff --git a/tools/gyp/test/mac/depend-on-bundle/test.gyp b/tools/gyp/test/mac/depend-on-bundle/test.gyp
deleted file mode 100644
index e00b105..0000000
--- a/tools/gyp/test/mac/depend-on-bundle/test.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'my_bundle',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'bundle.c' ],
-      'mac_bundle_resources': [
-        'English.lproj/InfoPlist.strings',
-      ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-      }
-    },
-    {
-      'target_name': 'dependent_on_bundle',
-      'type': 'executable',
-      'sources': [ 'executable.c' ],
-      'dependencies': [
-        'my_bundle',
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/deployment-target/check-version-min.c b/tools/gyp/test/mac/deployment-target/check-version-min.c
deleted file mode 100644
index 761c529..0000000
--- a/tools/gyp/test/mac/deployment-target/check-version-min.c
+++ /dev/null
@@ -1,33 +0,0 @@
-/* Copyright (c) 2013 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <Availability.h>
-
-/* GYPTEST_MAC_VERSION_MIN: should be set to the corresponding value of
- * xcode setting 'MACOSX_DEPLOYMENT_TARGET', otherwise both should be
- * left undefined.
- *
- * GYPTEST_IOS_VERSION_MIN: should be set to the corresponding value of
- * xcode setting 'IPHONEOS_DEPLOYMENT_TARGET', otherwise both should be
- * left undefined.
- */
-
-#if defined(GYPTEST_MAC_VERSION_MIN)
-# if GYPTEST_MAC_VERSION_MIN != __MAC_OS_X_VERSION_MIN_REQUIRED
-#  error __MAC_OS_X_VERSION_MIN_REQUIRED has wrong value
-# endif
-#elif defined(__MAC_OS_X_VERSION_MIN_REQUIRED)
-# error __MAC_OS_X_VERSION_MIN_REQUIRED should be undefined
-#endif
-
-#if defined(GYPTEST_IOS_VERSION_MIN)
-# if GYPTEST_IOS_VERSION_MIN != __IPHONE_OS_VERSION_MIN_REQUIRED
-#  error __IPHONE_OS_VERSION_MIN_REQUIRED has wrong value
-# endif
-#elif defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
-# error __IPHONE_OS_VERSION_MIN_REQUIRED should be undefined
-#endif
-
-int main() { return 0; }
-
diff --git a/tools/gyp/test/mac/deployment-target/deployment-target.gyp b/tools/gyp/test/mac/deployment-target/deployment-target.gyp
deleted file mode 100644
index 47e0565..0000000
--- a/tools/gyp/test/mac/deployment-target/deployment-target.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'macosx-version-min-10.5',
-      'type': 'executable',
-      'sources': [ 'check-version-min.c', ],
-      'defines': [ 'GYPTEST_MAC_VERSION_MIN=1050', ],
-      'xcode_settings': {
-        'SDKROOT': 'macosx',
-        'MACOSX_DEPLOYMENT_TARGET': '10.5',
-      },
-    },
-    {
-      'target_name': 'macosx-version-min-10.6',
-      'type': 'executable',
-      'sources': [ 'check-version-min.c', ],
-      'defines': [ 'GYPTEST_MAC_VERSION_MIN=1060', ],
-      'xcode_settings': {
-        'SDKROOT': 'macosx',
-        'MACOSX_DEPLOYMENT_TARGET': '10.6',
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/framework-dirs/calculate.c b/tools/gyp/test/mac/framework-dirs/calculate.c
deleted file mode 100644
index 7dc9d2d..0000000
--- a/tools/gyp/test/mac/framework-dirs/calculate.c
+++ /dev/null
@@ -1,15 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-int CalculatePerformExpression(char* expr,
-                               int significantDigits,
-                               int flags,
-                               char* answer);
-
-int main() {
-  char buffer[1024];
-  return CalculatePerformExpression("42", 1, 0, buffer);
-}
-
diff --git a/tools/gyp/test/mac/framework-dirs/framework-dirs.gyp b/tools/gyp/test/mac/framework-dirs/framework-dirs.gyp
deleted file mode 100644
index bf1cbde..0000000
--- a/tools/gyp/test/mac/framework-dirs/framework-dirs.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'calculate',
-      'type': 'executable',
-      'sources': [
-        'calculate.c',
-      ],
-      'libraries': [
-        '/System/Library/PrivateFrameworks/Calculate.framework',
-      ],
-      'mac_framework_dirs': [
-        '/System/Library/PrivateFrameworks',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/framework-headers/myframework.h b/tools/gyp/test/mac/framework-headers/myframework.h
deleted file mode 100644
index 961fc70..0000000
--- a/tools/gyp/test/mac/framework-headers/myframework.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <Foundation/Foundation.h>
-
-@interface TestObject : NSObject
-@end
diff --git a/tools/gyp/test/mac/framework-headers/myframework.m b/tools/gyp/test/mac/framework-headers/myframework.m
deleted file mode 100644
index 13d53a3..0000000
--- a/tools/gyp/test/mac/framework-headers/myframework.m
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "myframework.h"
-
-@implementation TestObject
-@end
diff --git a/tools/gyp/test/mac/framework-headers/test.gyp b/tools/gyp/test/mac/framework-headers/test.gyp
deleted file mode 100644
index 70ed007..0000000
--- a/tools/gyp/test/mac/framework-headers/test.gyp
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {    
-      'target_name': 'test_framework_headers_framework',
-      'product_name': 'TestFramework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [
-        'myframework.h',
-        'myframework.m',
-      ],
-      'mac_framework_headers': [
-        'myframework.h',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-        ],
-      },
-    },{    
-      'target_name': 'test_framework_headers_static',
-      'product_name': 'TestLibrary',
-      'type': 'static_library',
-      'xcode_settings': {
-        'PUBLIC_HEADERS_FOLDER_PATH': 'include',
-      },      
-      'sources': [
-        'myframework.h',
-        'myframework.m',
-      ],
-      'mac_framework_headers': [
-        'myframework.h',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-        ],
-      },      
-    },  
-  ],
-}
diff --git a/tools/gyp/test/mac/framework/TestFramework/English.lproj/InfoPlist.strings b/tools/gyp/test/mac/framework/TestFramework/English.lproj/InfoPlist.strings
deleted file mode 100644
index 88f65cf..0000000
--- a/tools/gyp/test/mac/framework/TestFramework/English.lproj/InfoPlist.strings
+++ /dev/null
@@ -1,2 +0,0 @@
-/* Localized versions of Info.plist keys */
-
diff --git a/tools/gyp/test/mac/framework/TestFramework/Info.plist b/tools/gyp/test/mac/framework/TestFramework/Info.plist
deleted file mode 100644
index a791b3e..0000000
--- a/tools/gyp/test/mac/framework/TestFramework/Info.plist
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.yourcompany.${PRODUCT_NAME:identifier}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>FMWK</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>NSPrincipalClass</key>
-	<string></string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/framework/TestFramework/ObjCVector.h b/tools/gyp/test/mac/framework/TestFramework/ObjCVector.h
deleted file mode 100644
index c245096..0000000
--- a/tools/gyp/test/mac/framework/TestFramework/ObjCVector.h
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <Cocoa/Cocoa.h>
-
-#ifdef __cplusplus
-struct ObjCVectorImp;
-#else
-typedef struct _ObjCVectorImpT ObjCVectorImp;
-#endif
-
-@interface ObjCVector : NSObject {
- @private
-  ObjCVectorImp* imp_;
-}
-
-- (id)init;
-
-- (void)addObject:(id)obj;
-- (void)addObject:(id)obj atIndex:(NSUInteger)index;
-
-- (void)removeObject:(id)obj;
-- (void)removeObjectAtIndex:(NSUInteger)index;
-
-- (id)objectAtIndex:(NSUInteger)index;
-
-@end
diff --git a/tools/gyp/test/mac/framework/TestFramework/ObjCVector.mm b/tools/gyp/test/mac/framework/TestFramework/ObjCVector.mm
deleted file mode 100644
index cbf431f..0000000
--- a/tools/gyp/test/mac/framework/TestFramework/ObjCVector.mm
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "ObjCVectorInternal.h"
-#import "ObjCVector.h"
-
-#include <vector>
-
-@interface ObjCVector (Private)
-- (std::vector<id>::iterator)makeIterator:(NSUInteger)index;
-@end
-
-@implementation ObjCVector
-
-- (id)init {
-  if ((self = [super init])) {
-    imp_ = new ObjCVectorImp();
-  }
-  return self;
-}
-
-- (void)dealloc {
-  delete imp_;
-  [super dealloc];
-}
-
-- (void)addObject:(id)obj {
-  imp_->v.push_back([obj retain]);
-}
-
-- (void)addObject:(id)obj atIndex:(NSUInteger)index {
-  imp_->v.insert([self makeIterator:index], [obj retain]);
-}
-
-- (void)removeObject:(id)obj {
-  for (std::vector<id>::iterator it = imp_->v.begin();
-       it != imp_->v.end();
-       ++it) {
-    if ([*it isEqual:obj]) {
-      [*it autorelease];
-      imp_->v.erase(it);
-      return;
-    }
-  }
-}
-
-- (void)removeObjectAtIndex:(NSUInteger)index {
-  [imp_->v[index] autorelease];
-  imp_->v.erase([self makeIterator:index]);
-}
-
-- (id)objectAtIndex:(NSUInteger)index {
-  return imp_->v[index];
-}
-
-- (std::vector<id>::iterator)makeIterator:(NSUInteger)index {
-  std::vector<id>::iterator it = imp_->v.begin();
-  it += index;
-  return it;
-}
-
-@end
diff --git a/tools/gyp/test/mac/framework/TestFramework/ObjCVectorInternal.h b/tools/gyp/test/mac/framework/TestFramework/ObjCVectorInternal.h
deleted file mode 100644
index fb6c982..0000000
--- a/tools/gyp/test/mac/framework/TestFramework/ObjCVectorInternal.h
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <vector>
-
-struct ObjCVectorImp {
-  std::vector<id> v;
-};
diff --git a/tools/gyp/test/mac/framework/TestFramework/TestFramework_Prefix.pch b/tools/gyp/test/mac/framework/TestFramework/TestFramework_Prefix.pch
deleted file mode 100644
index 394f41d..0000000
--- a/tools/gyp/test/mac/framework/TestFramework/TestFramework_Prefix.pch
+++ /dev/null
@@ -1,7 +0,0 @@
-//
-// Prefix header for all source files of the 'TestFramework' target in the 'TestFramework' project.
-//
-
-#ifdef __OBJC__
-    #import <Cocoa/Cocoa.h>
-#endif
diff --git a/tools/gyp/test/mac/framework/empty.c b/tools/gyp/test/mac/framework/empty.c
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/framework/empty.c
+++ /dev/null
diff --git a/tools/gyp/test/mac/framework/framework.gyp b/tools/gyp/test/mac/framework/framework.gyp
deleted file mode 100644
index 52b4f37..0000000
--- a/tools/gyp/test/mac/framework/framework.gyp
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'dep_framework',
-      'product_name': 'Dependency Bundle',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'empty.c', ],
-    },
-    {    
-      'target_name': 'test_framework',
-      'product_name': 'Test Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'dependencies': [ 'dep_framework', ],
-      'sources': [
-        'TestFramework/ObjCVector.h',
-        'TestFramework/ObjCVectorInternal.h',
-        'TestFramework/ObjCVector.mm',
-      ],
-      'mac_bundle_resources': [
-        'TestFramework/English.lproj/InfoPlist.strings',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Cocoa.framework',
-        ],
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'TestFramework/Info.plist',
-        'GCC_DYNAMIC_NO_PIC': 'NO',
-      },
-      'copies': [
-        # Test copying to a file that has envvars in its dest path.
-        # Needs to be in a mac_bundle target, else CONTENTS_FOLDER_PATH isn't
-        # set.
-        {
-          'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Libraries',
-          'files': [
-            'empty.c',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'copy_target',
-      'type': 'none',
-      'dependencies': [ 'test_framework', 'dep_framework', ],
-      'copies': [
-        # Test copying directories with spaces in src and dest paths.
-        {
-          'destination': '<(PRODUCT_DIR)/Test Framework.framework/foo',
-          'files': [
-            '<(PRODUCT_DIR)/Dependency Bundle.framework',
-          ],
-        },
-      ],
-      'actions': [
-        {
-          'action_name': 'aektschn',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/touched_file'],
-          'action': ['touch', '${BUILT_PRODUCTS_DIR}/action_file'],
-        },
-      ],
-    },
-    {
-      'target_name': 'copy_embedded',
-      'type': 'none',
-      'dependencies': [ 'test_framework' ],
-      'copies': [
-        # Test copying framework to FRAMEWORK directory.
-        {
-          'destination': '$(BUILT_FRAMEWORKS_DIR)/Embedded',
-          'files': [
-            '<(PRODUCT_DIR)/Test Framework.framework',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'copy_target_code_sign',
-      'type': 'none',
-      'dependencies': [ 'test_framework', 'dep_framework', ],
-      'copies': [
-        # Test copying directories with spaces in src and dest paths.
-        {
-          'destination': '<(PRODUCT_DIR)/Test Framework.framework/foo',
-          'files': [
-            '<(PRODUCT_DIR)/Dependency Bundle.framework',
-          ],
-         'xcode_code_sign': 1,
-        },
-      ],
-      'actions': [
-        {
-          'action_name': 'aektschn',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/touched_file'],
-          'action': ['touch', '${BUILT_PRODUCTS_DIR}/action_file'],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/global-settings/src/dir1/dir1.gyp b/tools/gyp/test/mac/global-settings/src/dir1/dir1.gyp
deleted file mode 100644
index 153e34d..0000000
--- a/tools/gyp/test/mac/global-settings/src/dir1/dir1.gyp
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'dir1_target',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/global-settings/src/dir2/dir2.gyp b/tools/gyp/test/mac/global-settings/src/dir2/dir2.gyp
deleted file mode 100644
index cda46c8..0000000
--- a/tools/gyp/test/mac/global-settings/src/dir2/dir2.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'dir2_target',
-      'type': 'none',
-      'dependencies': [
-        '../dir1/dir1.gyp:dir1_target',
-      ],
-      'actions': [
-        {
-          'inputs': [ ],
-          'outputs': [ '<(PRODUCT_DIR)/file.txt' ],
-          'action_name': 'Test action',
-          'action': ['cp', 'file.txt', '${BUILT_PRODUCTS_DIR}/file.txt' ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/global-settings/src/dir2/file.txt b/tools/gyp/test/mac/global-settings/src/dir2/file.txt
deleted file mode 100644
index 58da2d8..0000000
--- a/tools/gyp/test/mac/global-settings/src/dir2/file.txt
+++ /dev/null
@@ -1 +0,0 @@
-File.
diff --git a/tools/gyp/test/mac/gyptest-action-envvars.py b/tools/gyp/test/mac/gyptest-action-envvars.py
deleted file mode 100644
index c84eeaa..0000000
--- a/tools/gyp/test/mac/gyptest-action-envvars.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that env vars work with actions, with relative directory paths.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  # The xcode-ninja generator handles gypfiles which are not at the
-  # project root incorrectly.
-  # cf. https://code.google.com/p/gyp/issues/detail?id=460
-  if test.format == 'xcode-ninja':
-    test.skip_test()
-
-  CHDIR = 'action-envvars'
-  test.run_gyp('action/action.gyp', chdir=CHDIR)
-  test.build('action/action.gyp', 'action', chdir=CHDIR, SYMROOT='../build')
-
-  result_file = test.built_file_path('result', chdir=CHDIR)
-  test.must_exist(result_file)
-  test.must_contain(result_file, 'Test output')
-
-  other_result_file = test.built_file_path('other_result', chdir=CHDIR)
-  test.must_exist(other_result_file)
-  test.must_contain(other_result_file, 'Other output')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-app-assets-catalog.py b/tools/gyp/test/mac/gyptest-app-assets-catalog.py
deleted file mode 100755
index ca76b51..0000000
--- a/tools/gyp/test/mac/gyptest-app-assets-catalog.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that app bundles are built correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import os
-import plistlib
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-def ExpectEq(expected, actual):
-  if expected != actual:
-    print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual)
-    test.fail_test()
-
-def ls(path):
-  '''Returns a list of all files in a directory, relative to the directory.'''
-  result = []
-  for dirpath, _, files in os.walk(path):
-    for f in files:
-      result.append(os.path.join(dirpath, f)[len(path) + 1:])
-  return result
-
-# Xcode supports for assets catalog was introduced in Xcode 6.0
-if sys.platform == 'darwin' and TestMac.Xcode.Version() >= '0600':
-  test_gyp_path = 'test-assets-catalog.gyp'
-  test_app_path = 'Test App Assets Catalog Gyp.app'
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-  test.run_gyp(test_gyp_path, chdir='app-bundle')
-  test.build(test_gyp_path, test.ALL, chdir='app-bundle')
-
-  # Binary
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'Contents/MacOS/Test App Assets Catalog Gyp'),
-      chdir='app-bundle')
-
-  # Info.plist
-  info_plist = test.built_file_path(
-      os.path.join(test_app_path, 'Contents/Info.plist'),
-      chdir='app-bundle')
-  test.must_exist(info_plist)
-  test.must_contain(
-      info_plist,
-      'com.google.Test-App-Assets-Catalog-Gyp')  # Variable expansion
-  test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
-
-  if test.format != 'make':
-    # TODO: Synthesized plist entries aren't hooked up in the make generator.
-    machine = subprocess.check_output(['sw_vers', '-buildVersion']).rstrip('\n')
-    plist = plistlib.readPlist(info_plist)
-    ExpectEq(machine, plist['BuildMachineOSBuild'])
-
-    expected = ''
-    version = TestMac.Xcode.SDKVersion()
-    expected = 'macosx' + version
-    ExpectEq(expected, plist['DTSDKName'])
-    sdkbuild = TestMac.Xcode.SDKBuild()
-    if not sdkbuild:
-      # Above command doesn't work in Xcode 4.2.
-      sdkbuild = plist['BuildMachineOSBuild']
-    ExpectEq(sdkbuild, plist['DTSDKBuild'])
-    ExpectEq(TestMac.Xcode.Version(), plist['DTXcode'])
-    ExpectEq(TestMac.Xcode.Build(), plist['DTXcodeBuild'])
-
-  # Resources
-  strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
-  for f in strings_files:
-    strings = test.built_file_path(
-        os.path.join(test_app_path, 'Contents/Resources/English.lproj', f),
-        chdir='app-bundle')
-    test.must_exist(strings)
-    # Xcodes writes UTF-16LE with BOM.
-    contents = open(strings, 'rb').read()
-    if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
-      test.fail_test()
-
-  test.built_file_must_exist(
-      os.path.join(
-          test_app_path, 'Contents/Resources/English.lproj/MainMenu.nib'),
-      chdir='app-bundle')
-
-  # make does not supports .xcassets files
-  extra_content_files = []
-  if test.format != 'make':
-    extra_content_files = ['Contents/Resources/Assets.car']
-    for f in extra_content_files:
-      test.built_file_must_exist(
-          os.path.join(test_app_path, f),
-          chdir='app-bundle')
-
-  # Packaging
-  test.built_file_must_exist(
-      os.path.join(test_app_path, 'Contents/PkgInfo'),
-      chdir='app-bundle')
-  test.built_file_must_match(
-      os.path.join(test_app_path, 'Contents/PkgInfo'), 'APPLause',
-      chdir='app-bundle')
-
-  # Check that no other files get added to the bundle.
-  if set(ls(test.built_file_path(test_app_path, chdir='app-bundle'))) != \
-     set(['Contents/MacOS/Test App Assets Catalog Gyp',
-          'Contents/Info.plist',
-          'Contents/Resources/English.lproj/MainMenu.nib',
-          'Contents/PkgInfo',
-          ] + extra_content_files +
-         [os.path.join('Contents/Resources/English.lproj', f)
-             for f in strings_files]):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-app-error.py b/tools/gyp/test/mac/gyptest-app-error.py
deleted file mode 100755
index c6fe33f..0000000
--- a/tools/gyp/test/mac/gyptest-app-error.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that invalid strings files cause the build to fail.
-"""
-
-import TestCmd
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  expected_error = 'Old-style plist parser: missing semicolon in dictionary'
-  saw_expected_error = [False]  # Python2 has no "nonlocal" keyword.
-  def match(a, b):
-    if a == b:
-      return True
-    if not TestCmd.is_List(a):
-      a = a.split('\n')
-    if not TestCmd.is_List(b):
-      b = b.split('\n')
-    if expected_error in '\n'.join(a) + '\n'.join(b):
-      saw_expected_error[0] = True
-      return True
-    return False
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'], match=match)
-
-  test.run_gyp('test-error.gyp', chdir='app-bundle')
-
-  test.build('test-error.gyp', test.ALL, chdir='app-bundle')
-
-  # Ninja pipes stderr of subprocesses to stdout.
-  if test.format in ['ninja', 'xcode-ninja'] \
-      and expected_error in test.stdout():
-    saw_expected_error[0] = True
-
-  if saw_expected_error[0]:
-    test.pass_test()
-  else:
-    test.fail_test()
diff --git a/tools/gyp/test/mac/gyptest-app.py b/tools/gyp/test/mac/gyptest-app.py
deleted file mode 100755
index be92d01..0000000
--- a/tools/gyp/test/mac/gyptest-app.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that app bundles are built correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import os
-import plistlib
-import subprocess
-import sys
-
-
-if sys.platform in ('darwin', 'win32'):
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-def CheckFileXMLPropertyList(file):
-  output = subprocess.check_output(['file', file])
-  # The double space after XML is intentional.
-  if not 'XML  document text' in output:
-    print 'File: Expected XML  document text, got %s' % output
-    test.fail_test()
-
-def ExpectEq(expected, actual):
-  if expected != actual:
-    print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual)
-    test.fail_test()
-
-def ls(path):
-  '''Returns a list of all files in a directory, relative to the directory.'''
-  result = []
-  for dirpath, _, files in os.walk(path):
-    for f in files:
-      result.append(os.path.join(dirpath, f)[len(path) + 1:])
-  return result
-
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='app-bundle')
-
-  test.build('test.gyp', test.ALL, chdir='app-bundle')
-
-  # Binary
-  test.built_file_must_exist('Test App Gyp.app/Contents/MacOS/Test App Gyp',
-                             chdir='app-bundle')
-
-  # Info.plist
-  info_plist = test.built_file_path('Test App Gyp.app/Contents/Info.plist',
-                                    chdir='app-bundle')
-  test.must_exist(info_plist)
-  test.must_contain(info_plist, 'com.google.Test-App-Gyp')  # Variable expansion
-  test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
-  CheckFileXMLPropertyList(info_plist)
-
-  if test.format != 'make':
-    # TODO: Synthesized plist entries aren't hooked up in the make generator.
-    machine = subprocess.check_output(['sw_vers', '-buildVersion']).rstrip('\n')
-    plist = plistlib.readPlist(info_plist)
-    ExpectEq(machine, plist['BuildMachineOSBuild'])
-
-    # Prior to Xcode 5.0.0, SDKROOT (and thus DTSDKName) was only defined if
-    # set in the Xcode project file. Starting with that version, it is always
-    # defined.
-    expected = ''
-    if TestMac.Xcode.Version() >= '0500':
-      version = TestMac.Xcode.SDKVersion()
-      expected = 'macosx' + version
-    ExpectEq(expected, plist['DTSDKName'])
-    sdkbuild = TestMac.Xcode.SDKBuild()
-    if not sdkbuild:
-      # Above command doesn't work in Xcode 4.2.
-      sdkbuild = plist['BuildMachineOSBuild']
-    ExpectEq(sdkbuild, plist['DTSDKBuild'])
-    ExpectEq(TestMac.Xcode.Version(), plist['DTXcode'])
-    ExpectEq(TestMac.Xcode.Build(), plist['DTXcodeBuild'])
-
-  # Resources
-  strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
-  for f in strings_files:
-    strings = test.built_file_path(
-        os.path.join('Test App Gyp.app/Contents/Resources/English.lproj', f),
-        chdir='app-bundle')
-    test.must_exist(strings)
-    # Xcodes writes UTF-16LE with BOM.
-    contents = open(strings, 'rb').read()
-    if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
-      test.fail_test()
-
-  test.built_file_must_exist(
-      'Test App Gyp.app/Contents/Resources/English.lproj/MainMenu.nib',
-      chdir='app-bundle')
-
-  # Packaging
-  test.built_file_must_exist('Test App Gyp.app/Contents/PkgInfo',
-                             chdir='app-bundle')
-  test.built_file_must_match('Test App Gyp.app/Contents/PkgInfo', 'APPLause',
-                             chdir='app-bundle')
-
-  # Check that no other files get added to the bundle.
-  if set(ls(test.built_file_path('Test App Gyp.app', chdir='app-bundle'))) != \
-     set(['Contents/MacOS/Test App Gyp',
-          'Contents/Info.plist',
-          'Contents/Resources/English.lproj/MainMenu.nib',
-          'Contents/PkgInfo',
-          ] +
-         [os.path.join('Contents/Resources/English.lproj', f)
-             for f in strings_files]):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-archs.py b/tools/gyp/test/mac/gyptest-archs.py
deleted file mode 100644
index 106a928..0000000
--- a/tools/gyp/test/mac/gyptest-archs.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests things related to ARCHS.
-"""
-
-import TestGyp
-import TestMac
-
-import re
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test-no-archs.gyp', chdir='archs')
-  test.build('test-no-archs.gyp', test.ALL, chdir='archs')
-  result_file = test.built_file_path('Test', chdir='archs')
-  test.must_exist(result_file)
-
-  if TestMac.Xcode.Version() >= '0500':
-    expected_type = ['x86_64']
-  else:
-    expected_type = ['i386']
-  TestMac.CheckFileType(test, result_file, expected_type)
-
-  test.run_gyp('test-valid-archs.gyp', chdir='archs')
-  test.build('test-valid-archs.gyp', test.ALL, chdir='archs')
-  result_file = test.built_file_path('Test', chdir='archs')
-  test.must_exist(result_file)
-  TestMac.CheckFileType(test, result_file, ['x86_64'])
-
-  test.run_gyp('test-archs-x86_64.gyp', chdir='archs')
-  test.build('test-archs-x86_64.gyp', test.ALL, chdir='archs')
-  result_file = test.built_file_path('Test64', chdir='archs')
-  test.must_exist(result_file)
-  TestMac.CheckFileType(test, result_file, ['x86_64'])
-
-  test.run_gyp('test-dependencies.gyp', chdir='archs')
-  test.build('test-dependencies.gyp', target=test.ALL, chdir='archs')
-  products = ['c_standalone', 'd_standalone']
-  for product in products:
-    result_file = test.built_file_path(
-        product, chdir='archs', type=test.STATIC_LIB)
-    test.must_exist(result_file)
-
-  if test.format != 'make':
-    # Build all targets except 'exe_32_64_no_sources' that does build
-    # but should not cause error when generating ninja files
-    targets = [
-        'static_32_64', 'shared_32_64', 'shared_32_64_bundle',
-        'module_32_64', 'module_32_64_bundle',
-        'exe_32_64', 'exe_32_64_bundle', 'precompiled_prefix_header_mm_32_64',
-    ]
-
-    test.run_gyp('test-archs-multiarch.gyp', chdir='archs')
-
-    for target in targets:
-      test.build('test-archs-multiarch.gyp', target=target, chdir='archs')
-
-    result_file = test.built_file_path(
-        'static_32_64', chdir='archs', type=test.STATIC_LIB)
-    test.must_exist(result_file)
-    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
-
-    result_file = test.built_file_path(
-        'shared_32_64', chdir='archs', type=test.SHARED_LIB)
-    test.must_exist(result_file)
-    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
-
-    result_file = test.built_file_path('My Framework.framework/My Framework',
-                                       chdir='archs')
-    test.must_exist(result_file)
-    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
-    # Check that symbol "_x" made it into both versions of the binary:
-    if not all(['D _x' in subprocess.check_output(
-        ['nm', '-arch', arch, result_file]) for arch in ['i386', 'x86_64']]):
-      # This can only flakily fail, due to process ordering issues. If this
-      # does fail flakily, then something's broken, it's not the test at fault.
-      test.fail_test()
-
-    result_file = test.built_file_path(
-        'exe_32_64', chdir='archs', type=test.EXECUTABLE)
-    test.must_exist(result_file)
-    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
-
-    result_file = test.built_file_path('Test App.app/Contents/MacOS/Test App',
-                                       chdir='archs')
-    test.must_exist(result_file)
-    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
diff --git a/tools/gyp/test/mac/gyptest-bundle-resources.py b/tools/gyp/test/mac/gyptest-bundle-resources.py
deleted file mode 100644
index e9eddb7..0000000
--- a/tools/gyp/test/mac/gyptest-bundle-resources.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies things related to bundle resources.
-"""
-
-import TestGyp
-
-import os
-import stat
-import sys
-
-if sys.platform in ('darwin'):
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-def check_attribs(path, expected_exec_bit):
-  out_path = test.built_file_path(
-      os.path.join('resource.app/Contents/Resources', path), chdir=CHDIR)
-
-  in_stat = os.stat(os.path.join(CHDIR, path))
-  out_stat = os.stat(out_path)
-  if in_stat.st_mtime == out_stat.st_mtime:
-    test.fail_test()
-  if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
-    test.fail_test()
-
-
-if sys.platform == 'darwin':
-  # set |match| to ignore build stderr output.
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'bundle-resources'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  test.built_file_must_match('resource.app/Contents/Resources/secret.txt',
-                             'abc\n', chdir=CHDIR)
-  test.built_file_must_match('source_rule.app/Contents/Resources/secret.txt',
-                             'ABC\n', chdir=CHDIR)
-
-  test.built_file_must_match(
-      'resource.app/Contents/Resources/executable-file.sh',
-      '#!/bin/bash\n'
-      '\n'
-      'echo echo echo echo cho ho o o\n', chdir=CHDIR)
-
-  check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
-  check_attribs('secret.txt', expected_exec_bit=0)
-
-  # TODO(thakis): This currently fails with make.
-  if test.format != 'make':
-    test.built_file_must_match(
-        'resource_rule.app/Contents/Resources/secret.txt', 'ABC\n', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-cflags.py b/tools/gyp/test/mac/gyptest-cflags.py
deleted file mode 100644
index 17afd15..0000000
--- a/tools/gyp/test/mac/gyptest-cflags.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that compile-time flags work.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-  CHDIR = 'cflags'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-clang-cxx-language-standard.py b/tools/gyp/test/mac/gyptest-clang-cxx-language-standard.py
deleted file mode 100644
index 75c6c74..0000000
--- a/tools/gyp/test/mac/gyptest-clang-cxx-language-standard.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that CLANG_CXX_LANGUAGE_STANDARD works.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
-
-  test.run_gyp('clang-cxx-language-standard.gyp',
-               chdir='clang-cxx-language-standard')
-
-  test.build('clang-cxx-language-standard.gyp', test.ALL,
-             chdir='clang-cxx-language-standard')
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/mac/gyptest-clang-cxx-library.py b/tools/gyp/test/mac/gyptest-clang-cxx-library.py
deleted file mode 100644
index 39a11c7..0000000
--- a/tools/gyp/test/mac/gyptest-clang-cxx-library.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that CLANG_CXX_LIBRARY works.
-"""
-
-import TestGyp
-import TestMac
-
-import os
-import sys
-
-if sys.platform == 'darwin':
-  # Xcode 4.2 on OS X 10.6 doesn't install the libc++ headers, don't run this
-  # test there.
-  if TestMac.Xcode.Version() <= '0420':
-    sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
-  test.run_gyp('clang-cxx-library.gyp', chdir='clang-cxx-library')
-  test.build('clang-cxx-library.gyp', test.ALL, chdir='clang-cxx-library')
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/mac/gyptest-copies.py b/tools/gyp/test/mac/gyptest-copies.py
deleted file mode 100755
index 4146441..0000000
--- a/tools/gyp/test/mac/gyptest-copies.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that 'copies' with app bundles are handled correctly.
-"""
-
-import TestGyp
-
-import os
-import sys
-import time
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('framework.gyp', chdir='framework')
-
-  test.build('framework.gyp', 'copy_target', chdir='framework')
-
-  # Check that the copy succeeded.
-  test.built_file_must_exist(
-      'Test Framework.framework/foo/Dependency Bundle.framework',
-      chdir='framework')
-  test.built_file_must_exist(
-      'Test Framework.framework/foo/Dependency Bundle.framework/Versions/A',
-      chdir='framework')
-  test.built_file_must_exist(
-      'Test Framework.framework/Versions/A/Libraries/empty.c',
-      chdir='framework')
-
-  # Verify BUILT_FRAMEWORKS_DIR is set and working.
-  test.build('framework.gyp', 'copy_embedded', chdir='framework')
-
-  test.built_file_must_exist(
-      'Embedded/Test Framework.framework', chdir='framework')
-
-  # Check that rebuilding the target a few times works.
-  dep_bundle = test.built_file_path('Dependency Bundle.framework',
-                                    chdir='framework')
-  mtime = os.path.getmtime(dep_bundle)
-  atime = os.path.getatime(dep_bundle)
-  for i in range(3):
-    os.utime(dep_bundle, (atime + i * 1000, mtime + i * 1000))
-    test.build('framework.gyp', 'copy_target', chdir='framework')
-
-
-  # Check that actions ran.
-  test.built_file_must_exist('action_file', chdir='framework')
-
-  # Test that a copy with the "Code Sign on Copy" flag on succeeds.
-  test.build('framework.gyp', 'copy_target_code_sign', chdir='framework')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-copy-dylib.py b/tools/gyp/test/mac/gyptest-copy-dylib.py
deleted file mode 100644
index 253623d..0000000
--- a/tools/gyp/test/mac/gyptest-copy-dylib.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that dylibs can be copied into app bundles.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='copy-dylib')
-
-  test.build('test.gyp', 'test_app', chdir='copy-dylib')
-
-  test.built_file_must_exist(
-      'Test App.app/Contents/Resources/libmy_dylib.dylib', chdir='copy-dylib')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-debuginfo.py b/tools/gyp/test/mac/gyptest-debuginfo.py
deleted file mode 100755
index a0e9438..0000000
--- a/tools/gyp/test/mac/gyptest-debuginfo.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests things related to debug information generation.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='debuginfo')
-
-  test.build('test.gyp', test.ALL, chdir='debuginfo')
-
-  test.built_file_must_exist('libnonbundle_shared_library.dylib.dSYM',
-                             chdir='debuginfo')
-  test.built_file_must_exist('nonbundle_loadable_module.so.dSYM',
-                             chdir='debuginfo')
-  test.built_file_must_exist('nonbundle_executable.dSYM',
-                             chdir='debuginfo')
-
-  test.built_file_must_exist('bundle_shared_library.framework.dSYM',
-                             chdir='debuginfo')
-  test.built_file_must_exist('bundle_loadable_module.bundle.dSYM',
-                             chdir='debuginfo')
-  test.built_file_must_exist('My App.app.dSYM',
-                             chdir='debuginfo')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-depend-on-bundle.py b/tools/gyp/test/mac/gyptest-depend-on-bundle.py
deleted file mode 100644
index 486fbfe..0000000
--- a/tools/gyp/test/mac/gyptest-depend-on-bundle.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a dependency on a bundle causes the whole bundle to be built.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='depend-on-bundle')
-
-  test.build('test.gyp', 'dependent_on_bundle', chdir='depend-on-bundle')
-
-  # Binary itself.
-  test.built_file_must_exist('dependent_on_bundle', chdir='depend-on-bundle')
-
-  # Bundle dependency.
-  test.built_file_must_exist(
-      'my_bundle.framework/Versions/A/my_bundle',
-      chdir='depend-on-bundle')
-  test.built_file_must_exist(  # package_framework
-      'my_bundle.framework/my_bundle',
-      chdir='depend-on-bundle')
-  test.built_file_must_exist(  # plist
-      'my_bundle.framework/Versions/A/Resources/Info.plist',
-      chdir='depend-on-bundle')
-  test.built_file_must_exist(
-      'my_bundle.framework/Versions/A/Resources/English.lproj/'  # Resources
-      'InfoPlist.strings',
-      chdir='depend-on-bundle')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-deployment-target.py b/tools/gyp/test/mac/gyptest-deployment-target.py
deleted file mode 100644
index afa6c77..0000000
--- a/tools/gyp/test/mac/gyptest-deployment-target.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that MACOSX_DEPLOYMENT_TARGET works.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode'])
-
-  test.run_gyp('deployment-target.gyp', chdir='deployment-target')
-
-  test.build('deployment-target.gyp', test.ALL, chdir='deployment-target')
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/mac/gyptest-framework-dirs.py b/tools/gyp/test/mac/gyptest-framework-dirs.py
deleted file mode 100644
index a1ae54c..0000000
--- a/tools/gyp/test/mac/gyptest-framework-dirs.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that it is possible to build an object that depends on a
-PrivateFramework.
-"""
-
-import os
-import sys
-import TestGyp
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'framework-dirs'
-  test.run_gyp('framework-dirs.gyp', chdir=CHDIR)
-  test.build('framework-dirs.gyp', 'calculate', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-framework-headers.py b/tools/gyp/test/mac/gyptest-framework-headers.py
deleted file mode 100644
index aa13a74..0000000
--- a/tools/gyp/test/mac/gyptest-framework-headers.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that mac_framework_headers works properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  # TODO(thakis): Make this work with ninja, make. http://crbug.com/129013
-  test = TestGyp.TestGyp(formats=['xcode'])
-
-  CHDIR = 'framework-headers'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  # Test that headers are installed for frameworks
-  test.build('test.gyp', 'test_framework_headers_framework', chdir=CHDIR)
-
-  test.built_file_must_exist(
-    'TestFramework.framework/Versions/A/TestFramework', chdir=CHDIR)
-
-  test.built_file_must_exist(
-    'TestFramework.framework/Versions/A/Headers/myframework.h', chdir=CHDIR)
-
-  # Test that headers are installed for static libraries.
-  test.build('test.gyp', 'test_framework_headers_static', chdir=CHDIR)
-
-  test.built_file_must_exist('libTestLibrary.a', chdir=CHDIR)
-
-  test.built_file_must_exist('include/myframework.h', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-framework.py b/tools/gyp/test/mac/gyptest-framework.py
deleted file mode 100755
index 553cc9f..0000000
--- a/tools/gyp/test/mac/gyptest-framework.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that app bundles are built correctly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-def ls(path):
-  '''Returns a list of all files in a directory, relative to the directory.'''
-  result = []
-  for dirpath, _, files in os.walk(path):
-    for f in files:
-      result.append(os.path.join(dirpath, f)[len(path) + 1:])
-  return result
-
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('framework.gyp', chdir='framework')
-
-  test.build('framework.gyp', 'test_framework', chdir='framework')
-
-  # Binary
-  test.built_file_must_exist(
-      'Test Framework.framework/Versions/A/Test Framework',
-      chdir='framework')
-
-  # Info.plist
-  info_plist = test.built_file_path(
-      'Test Framework.framework/Versions/A/Resources/Info.plist',
-      chdir='framework')
-  test.must_exist(info_plist)
-  test.must_contain(info_plist, 'com.yourcompany.Test_Framework')
-
-  # Resources
-  test.built_file_must_exist(
-      'Test Framework.framework/Versions/A/Resources/English.lproj/'
-      'InfoPlist.strings',
-      chdir='framework')
-
-  # Symlinks created by packaging process
-  test.built_file_must_exist('Test Framework.framework/Versions/Current',
-                             chdir='framework')
-  test.built_file_must_exist('Test Framework.framework/Resources',
-                             chdir='framework')
-  test.built_file_must_exist('Test Framework.framework/Test Framework',
-                             chdir='framework')
-  # PkgInfo.
-  test.built_file_must_not_exist(
-      'Test Framework.framework/Versions/A/Resources/PkgInfo',
-      chdir='framework')
-
-  # Check that no other files get added to the bundle.
-  if set(ls(test.built_file_path('Test Framework.framework',
-                                 chdir='framework'))) != \
-     set(['Versions/A/Test Framework',
-          'Versions/A/Resources/Info.plist',
-          'Versions/A/Resources/English.lproj/InfoPlist.strings',
-          'Test Framework',
-          'Versions/A/Libraries/empty.c',  # Written by a gyp action.
-          ]):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-global-settings.py b/tools/gyp/test/mac/gyptest-global-settings.py
deleted file mode 100644
index f4ed166..0000000
--- a/tools/gyp/test/mac/gyptest-global-settings.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that the global xcode_settings processing doesn't throw.
-Regression test for http://crbug.com/109163
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  # The xcode-ninja generator handles gypfiles which are not at the
-  # project root incorrectly.
-  # cf. https://code.google.com/p/gyp/issues/detail?id=460
-  if test.format == 'xcode-ninja':
-    test.skip_test()
-
-  test.run_gyp('src/dir2/dir2.gyp', chdir='global-settings', depth='src')
-  # run_gyp shouldn't throw.
-
-  # Check that BUILT_PRODUCTS_DIR was set correctly, too.
-  test.build('dir2/dir2.gyp', 'dir2_target', chdir='global-settings/src',
-             SYMROOT='../build')
-  test.built_file_must_exist('file.txt', chdir='global-settings/src')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-identical-name.py b/tools/gyp/test/mac/gyptest-identical-name.py
deleted file mode 100644
index 0d358df..0000000
--- a/tools/gyp/test/mac/gyptest-identical-name.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies libraries (in identical-names) are properly handeled by xcode.
-
-The names for all libraries participating in this build are:
-libtestlib.a - identical-name/testlib
-libtestlib.a - identical-name/proxy/testlib
-libproxy.a   - identical-name/proxy
-The first two libs produce a hash collision in Xcode when Gyp is executed,
-because they have the same name and would be copied to the same directory with
-Xcode default settings.
-For this scenario to work one needs to change the Xcode variables SYMROOT and
-CONFIGURATION_BUILD_DIR. Setting these to per-lib-unique directories, avoids
-copying the libs into the same directory.
-
-The test consists of two steps. The first one verifies that by setting both
-vars, there is no hash collision anymore during Gyp execution and that the libs
-can actually be be built. The second one verifies that there is still a hash
-collision if the vars are not set and thus the current behavior is preserved.
-"""
-
-import TestGyp
-
-import sys
-
-def IgnoreOutput(string, expected_string):
-  return True
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode'])
-
-
-  test.run_gyp('test.gyp', chdir='identical-name')
-  test.build('test.gyp', test.ALL, chdir='identical-name')
-
-  test.run_gyp('test-should-fail.gyp', chdir='identical-name')
-  test.built_file_must_not_exist('test-should-fail.xcodeproj')
-
-  test.pass_test()
-
diff --git a/tools/gyp/test/mac/gyptest-infoplist-process.py b/tools/gyp/test/mac/gyptest-infoplist-process.py
deleted file mode 100755
index 2e51684..0000000
--- a/tools/gyp/test/mac/gyptest-infoplist-process.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies the Info.plist preprocessor functionality.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'infoplist-process'
-  INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
-
-  # First process both keys.
-  test.set_configuration('One')
-  test.run_gyp('test1.gyp', chdir=CHDIR)
-  test.build('test1.gyp', test.ALL, chdir=CHDIR)
-  info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
-  test.must_exist(info_plist)
-  test.must_contain(info_plist, 'Foo')
-  test.must_contain(info_plist, 'Bar')
-
-  # Then process a single key.
-  test.set_configuration('Two')
-  test.run_gyp('test2.gyp', chdir=CHDIR)
-  test.build('test2.gyp', chdir=CHDIR)
-  info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
-  test.must_exist(info_plist)
-  test.must_contain(info_plist, 'com.google.Test')  # Normal expansion works.
-  test.must_contain(info_plist, 'Foo (Bar)')
-  test.must_contain(info_plist, 'PROCESSED_KEY2')
-
-  # Then turn off the processor.
-  test.set_configuration('Three')
-  test.run_gyp('test3.gyp', chdir=CHDIR)
-  test.build('test3.gyp', chdir=CHDIR)
-  info_plist = test.built_file_path('Test App.app/Contents/Info.plist',
-                                    chdir=CHDIR)
-  test.must_exist(info_plist)
-  test.must_contain(info_plist, 'com.google.Test')  # Normal expansion works.
-  test.must_contain(info_plist, 'PROCESSED_KEY1')
-  test.must_contain(info_plist, 'PROCESSED_KEY2')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-installname.py b/tools/gyp/test/mac/gyptest-installname.py
deleted file mode 100644
index 3fc7152..0000000
--- a/tools/gyp/test/mac/gyptest-installname.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that LD_DYLIB_INSTALL_NAME and DYLIB_INSTALL_NAME_BASE are handled
-correctly.
-"""
-
-import TestGyp
-
-import re
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'installname'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  def GetInstallname(p):
-    p = test.built_file_path(p, chdir=CHDIR)
-    r = re.compile(r'cmd LC_ID_DYLIB.*?name (.*?) \(offset \d+\)', re.DOTALL)
-    proc = subprocess.Popen(['otool', '-l', p], stdout=subprocess.PIPE)
-    o = proc.communicate()[0]
-    assert not proc.returncode
-    m = r.search(o)
-    assert m
-    return m.group(1)
-
-  if (GetInstallname('libdefault_installname.dylib') !=
-      '/usr/local/lib/libdefault_installname.dylib'):
-    test.fail_test()
-
-  if (GetInstallname('My Framework.framework/My Framework') !=
-      '/Library/Frameworks/My Framework.framework/'
-      'Versions/A/My Framework'):
-    test.fail_test()
-
-  if (GetInstallname('libexplicit_installname.dylib') !=
-      'Trapped in a dynamiclib factory'):
-    test.fail_test()
-
-  if (GetInstallname('libexplicit_installname_base.dylib') !=
-      '@executable_path/../../../libexplicit_installname_base.dylib'):
-    test.fail_test()
-
-  if (GetInstallname('My Other Framework.framework/My Other Framework') !=
-      '@executable_path/../../../My Other Framework.framework/'
-      'Versions/A/My Other Framework'):
-    test.fail_test()
-
-  if (GetInstallname('libexplicit_installname_with_base.dylib') !=
-      '/usr/local/lib/libexplicit_installname_with_base.dylib'):
-    test.fail_test()
-
-  if (GetInstallname('libexplicit_installname_with_explicit_base.dylib') !=
-      '@executable_path/../libexplicit_installname_with_explicit_base.dylib'):
-    test.fail_test()
-
-  if (GetInstallname('libboth_base_and_installname.dylib') !=
-      'Still trapped in a dynamiclib factory'):
-    test.fail_test()
-
-  if (GetInstallname('install_name_with_info_plist.framework/'
-                     'install_name_with_info_plist') !=
-      '/Library/Frameworks/install_name_with_info_plist.framework/'
-      'Versions/A/install_name_with_info_plist'):
-    test.fail_test()
-
-  if ('DYLIB_INSTALL_NAME_BASE:standardizepath: command not found' in
-          test.stdout()):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-kext.py b/tools/gyp/test/mac/gyptest-kext.py
deleted file mode 100755
index 56790bd..0000000
--- a/tools/gyp/test/mac/gyptest-kext.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that kext bundles are built correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import os
-import plistlib
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode'])
-  test.run_gyp('kext.gyp', chdir='kext')
-  test.build('kext.gyp', test.ALL, chdir='kext')
-  test.built_file_must_exist('GypKext.kext/Contents/MacOS/GypKext',
-                             chdir='kext')
-  test.built_file_must_exist('GypKext.kext/Contents/Info.plist',
-                             chdir='kext')
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-ldflags-passed-to-libtool.py b/tools/gyp/test/mac/gyptest-ldflags-passed-to-libtool.py
deleted file mode 100644
index 48afcd4..0000000
--- a/tools/gyp/test/mac/gyptest-ldflags-passed-to-libtool.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that OTHER_LDFLAGS is passed to libtool.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
-                         match = lambda a, b: True)
-
-  build_error_code = {
-    'xcode': [1, 65],  # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
-    'make': 2,
-    'ninja': 1,
-    'xcode-ninja': [1, 65],
-  }[test.format]
-
-  CHDIR = 'ldflags-libtool'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', 'ldflags_passed_to_libtool', chdir=CHDIR,
-             status=build_error_code)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-ldflags.py b/tools/gyp/test/mac/gyptest-ldflags.py
deleted file mode 100644
index af44b8c..0000000
--- a/tools/gyp/test/mac/gyptest-ldflags.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that filenames passed to various linker flags are converted into
-build-directory relative paths correctly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  # The xcode-ninja generator handles gypfiles which are not at the
-  # project root incorrectly.
-  # cf. https://code.google.com/p/gyp/issues/detail?id=460
-  if test.format == 'xcode-ninja':
-    test.skip_test()
-
-  CHDIR = 'ldflags'
-  test.run_gyp('subdirectory/test.gyp', chdir=CHDIR)
-
-  test.build('subdirectory/test.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
-
-
-# These flags from `man ld` couldl show up in OTHER_LDFLAGS and need path
-# translation.
-#
-# Done:
-#      -exported_symbols_list filename
-#      -unexported_symbols_list file
-#      -reexported_symbols_list file
-#      -sectcreate segname sectname file
-#
-# Will be done on demand:
-#      -weak_library path_to_library
-#      -reexport_library path_to_library
-#      -lazy_library path_to_library
-#      -upward_library path_to_library
-#      -syslibroot rootdir
-#      -framework name[,suffix]
-#      -weak_framework name[,suffix]
-#      -reexport_framework name[,suffix]
-#      -lazy_framework name[,suffix]
-#      -upward_framework name[,suffix]
-#      -force_load path_to_archive
-#      -filelist file[,dirname]
-#      -dtrace file
-#      -order_file file                     # should use ORDER_FILE
-#      -exported_symbols_order file
-#      -bundle_loader executable            # should use BUNDLE_LOADER
-#      -alias_list filename
-#      -seg_addr_table filename
-#      -dylib_file install_name:file_name
-#      -interposable_list filename
-#      -object_path_lto filename
-#
-#
-# obsolete:
-#      -sectorder segname sectname orderfile
-#      -seg_addr_table_filename path
-#
-#
-# ??:
-#      -map map_file_path
-#      -sub_library library_name
-#      -sub_umbrella framework_name
diff --git a/tools/gyp/test/mac/gyptest-libraries.py b/tools/gyp/test/mac/gyptest-libraries.py
deleted file mode 100755
index d5b64bd..0000000
--- a/tools/gyp/test/mac/gyptest-libraries.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies libraries (in link_settings) are properly found.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  # The xcode-ninja generator handles gypfiles which are not at the
-  # project root incorrectly.
-  # cf. https://code.google.com/p/gyp/issues/detail?id=460
-  if test.format == 'xcode-ninja':
-    test.skip_test()
-
-  test.run_gyp('subdir/test.gyp', chdir='libraries')
-
-  test.build('subdir/test.gyp', test.ALL, chdir='libraries')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-libtool-zero.py b/tools/gyp/test/mac/gyptest-libtool-zero.py
deleted file mode 100644
index ae5b7e6..0000000
--- a/tools/gyp/test/mac/gyptest-libtool-zero.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies libraries have proper mtime.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'libtool-zero'
-
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', 'mylib', chdir=CHDIR)
-
-  test.up_to_date('test.gyp', 'mylib', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-loadable-module-bundle-product-extension.py b/tools/gyp/test/mac/gyptest-loadable-module-bundle-product-extension.py
deleted file mode 100644
index 7a60ca2..0000000
--- a/tools/gyp/test/mac/gyptest-loadable-module-bundle-product-extension.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests that loadable_modules don't collide when using the same name with
-different file extensions.
-"""
-
-import TestGyp
-
-import os
-import struct
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'loadable-module-bundle-product-extension'
-  test.run_gyp('test.gyp',
-               '-G', 'xcode_ninja_target_pattern=^.*$',
-               chdir=CHDIR)
-
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  test.must_exist(test.built_file_path('Collide.foo', chdir=CHDIR))
-  test.must_exist(test.built_file_path('Collide.bar', chdir=CHDIR))
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-loadable-module.py b/tools/gyp/test/mac/gyptest-loadable-module.py
deleted file mode 100755
index 158a930..0000000
--- a/tools/gyp/test/mac/gyptest-loadable-module.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests that a loadable_module target is built correctly.
-"""
-
-import TestGyp
-
-import os
-import struct
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'loadable-module'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  # Binary.
-  binary = test.built_file_path(
-      'test_loadable_module.plugin/Contents/MacOS/test_loadable_module',
-      chdir=CHDIR)
-  test.must_exist(binary)
-  MH_BUNDLE = 8
-  if struct.unpack('4I', open(binary, 'rb').read(16))[3] != MH_BUNDLE:
-    test.fail_test()
-
-  # Info.plist.
-  info_plist = test.built_file_path(
-      'test_loadable_module.plugin/Contents/Info.plist', chdir=CHDIR)
-  test.must_exist(info_plist)
-  test.must_contain(info_plist, """
-	<key>CFBundleExecutable</key>
-	<string>test_loadable_module</string>
-""")
-
-  # PkgInfo.
-  test.built_file_must_not_exist(
-      'test_loadable_module.plugin/Contents/PkgInfo', chdir=CHDIR)
-  test.built_file_must_not_exist(
-      'test_loadable_module.plugin/Contents/Resources', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-lto.py b/tools/gyp/test/mac/gyptest-lto.py
deleted file mode 100644
index 5171544..0000000
--- a/tools/gyp/test/mac/gyptest-lto.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that LTO flags work.
-"""
-
-import TestGyp
-
-import os
-import re
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-  CHDIR = 'lto'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  def ObjPath(srcpath, target):
-    # TODO: Move this into TestGyp if it's needed elsewhere.
-    if test.format == 'xcode':
-      return os.path.join(CHDIR, 'build', 'test.build', 'Default',
-                          target + '.build', 'Objects-normal', 'x86_64',
-                          srcpath + '.o')
-    elif 'ninja' in test.format:  # ninja, xcode-ninja
-      return os.path.join(CHDIR, 'out', 'Default', 'obj',
-                          target + '.' + srcpath + '.o')
-    elif test.format == 'make':
-      return os.path.join(CHDIR, 'out', 'Default', 'obj.target',
-                          target, srcpath + '.o')
-
-  def ObjType(p, t_expected):
-    r = re.compile(r'nsyms\s+(\d+)')
-    o = subprocess.check_output(['file', p])
-    objtype = 'unknown'
-    if ': Mach-O ' in o:
-      objtype = 'mach-o'
-    elif ': LLVM bit-code ' in o:
-      objtype = 'llvm'
-    if objtype != t_expected:
-      print 'Expected %s, got %s' % (t_expected, objtype)
-      test.fail_test()
-
-  ObjType(ObjPath('cfile', 'lto'), 'llvm')
-  ObjType(ObjPath('ccfile', 'lto'), 'llvm')
-  ObjType(ObjPath('mfile', 'lto'), 'llvm')
-  ObjType(ObjPath('mmfile', 'lto'), 'llvm')
-  ObjType(ObjPath('asmfile', 'lto'), 'mach-o')
-
-  ObjType(ObjPath('cfile', 'lto_static'), 'llvm')
-  ObjType(ObjPath('ccfile', 'lto_static'), 'llvm')
-  ObjType(ObjPath('mfile', 'lto_static'), 'llvm')
-  ObjType(ObjPath('mmfile', 'lto_static'), 'llvm')
-  ObjType(ObjPath('asmfile', 'lto_static'), 'mach-o')
-
-  test.pass_test()
-
-  # TODO: Probably test for -object_path_lto too, else dsymutil won't be
-  # useful maybe?
diff --git a/tools/gyp/test/mac/gyptest-missing-cfbundlesignature.py b/tools/gyp/test/mac/gyptest-missing-cfbundlesignature.py
deleted file mode 100644
index 43cab77..0000000
--- a/tools/gyp/test/mac/gyptest-missing-cfbundlesignature.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that an Info.plist with CFBundleSignature works.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='missing-cfbundlesignature')
-  test.build('test.gyp', test.ALL, chdir='missing-cfbundlesignature')
-
-  test.built_file_must_match('mytarget.app/Contents/PkgInfo', 'APPL????',
-                             chdir='missing-cfbundlesignature')
-
-  test.built_file_must_match('myothertarget.app/Contents/PkgInfo', 'APPL????',
-                             chdir='missing-cfbundlesignature')
-
-  test.built_file_must_match('thirdtarget.app/Contents/PkgInfo', 'APPL????',
-                             chdir='missing-cfbundlesignature')
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-non-strs-flattened-to-env.py b/tools/gyp/test/mac/gyptest-non-strs-flattened-to-env.py
deleted file mode 100644
index b802619..0000000
--- a/tools/gyp/test/mac/gyptest-non-strs-flattened-to-env.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that list xcode_settings are flattened before being exported to the
-environment.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'non-strs-flattened-to-env'
-  INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
-
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-  info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
-  test.must_exist(info_plist)
-  test.must_contain(info_plist, '''\
-\t<key>My Variable</key>
-\t<string>some expansion</string>''')
-  test.must_contain(info_plist, '''\
-\t<key>CFlags</key>
-\t<string>-fstack-protector-all -fno-strict-aliasing -DS="A Space"</string>''')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-objc-arc.py b/tools/gyp/test/mac/gyptest-objc-arc.py
deleted file mode 100755
index b3192a1..0000000
--- a/tools/gyp/test/mac/gyptest-objc-arc.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ARC objc settings are handled correctly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  # set |match| to ignore build stderr output.
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
-                         match = lambda a, b: True)
-
-  CHDIR = 'objc-arc'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', 'arc_enabled', chdir=CHDIR)
-  test.build('test.gyp', 'arc_disabled', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-objc-gc.py b/tools/gyp/test/mac/gyptest-objc-gc.py
deleted file mode 100644
index 0cec458..0000000
--- a/tools/gyp/test/mac/gyptest-objc-gc.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that GC objc settings are handled correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import sys
-
-if sys.platform == 'darwin':
-  # set |match| to ignore build stderr output.
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
-                         match = lambda a, b: True)
-
-  # Xcode 5.1 removed support for garbage-collection:
-  #   error: garbage collection is no longer supported
-  if TestMac.Xcode.Version() < '0510':
-
-    CHDIR = 'objc-gc'
-    test.run_gyp('test.gyp', chdir=CHDIR)
-
-    build_error_code = {
-      'xcode': [1, 65],  # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
-      'make': 2,
-      'ninja': 1,
-    }[test.format]
-
-    test.build('test.gyp', 'gc_exe_fails', chdir=CHDIR, status=build_error_code)
-    test.build(
-        'test.gyp', 'gc_off_exe_req_lib', chdir=CHDIR, status=build_error_code)
-
-    test.build('test.gyp', 'gc_req_exe', chdir=CHDIR)
-    test.run_built_executable('gc_req_exe', chdir=CHDIR, stdout="gc on: 1\n")
-
-    test.build('test.gyp', 'gc_exe_req_lib', chdir=CHDIR)
-    test.run_built_executable(
-        'gc_exe_req_lib', chdir=CHDIR, stdout="gc on: 1\n")
-
-    test.build('test.gyp', 'gc_exe', chdir=CHDIR)
-    test.run_built_executable('gc_exe', chdir=CHDIR, stdout="gc on: 1\n")
-
-    test.build('test.gyp', 'gc_off_exe', chdir=CHDIR)
-    test.run_built_executable('gc_off_exe', chdir=CHDIR, stdout="gc on: 0\n")
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-postbuild-copy-bundle.py b/tools/gyp/test/mac/gyptest-postbuild-copy-bundle.py
deleted file mode 100644
index 1f04d1c..0000000
--- a/tools/gyp/test/mac/gyptest-postbuild-copy-bundle.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a postbuild copying a dependend framework into an app bundle is
-rerun if the resources in the framework change.
-"""
-
-import TestGyp
-
-import os.path
-import sys
-
-if sys.platform == 'darwin':
-  # TODO(thakis): Make this pass with the make generator, http://crbug.com/95529
-  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
-
-  CHDIR = 'postbuild-copy-bundle'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  app_bundle_dir = test.built_file_path('Test App.app', chdir=CHDIR)
-  bundled_framework_dir = os.path.join(
-      app_bundle_dir, 'Contents', 'My Framework.framework', 'Resources')
-  final_plist_path = os.path.join(bundled_framework_dir, 'Info.plist')
-  final_resource_path = os.path.join(bundled_framework_dir, 'resource_file.sb')
-  final_copies_path = os.path.join(
-      app_bundle_dir, 'Contents', 'My Framework.framework', 'Versions', 'A',
-      'Libraries', 'copied.txt')
-
-  # Check that the dependency was built and copied into the app bundle:
-  test.build('test.gyp', 'test_app', chdir=CHDIR)
-  test.must_exist(final_resource_path)
-  test.must_match(final_resource_path,
-                  'This is included in the framework bundle.\n')
-
-  test.must_exist(final_plist_path)
-  test.must_contain(final_plist_path, '''\
-\t<key>RandomKey</key>
-\t<string>RandomValue</string>''')
-
-  # Touch the dependency's bundle resource, and check that the modification
-  # makes it all the way into the app bundle:
-  test.sleep()
-  test.write('postbuild-copy-bundle/resource_file.sb', 'New text\n')
-  test.build('test.gyp', 'test_app', chdir=CHDIR)
-
-  test.must_exist(final_resource_path)
-  test.must_match(final_resource_path, 'New text\n')
-
-  # Check the same for the plist file.
-  test.sleep()
-  contents = test.read('postbuild-copy-bundle/Framework-Info.plist')
-  contents = contents.replace('RandomValue', 'NewRandomValue')
-  test.write('postbuild-copy-bundle/Framework-Info.plist', contents)
-  test.build('test.gyp', 'test_app', chdir=CHDIR)
-
-  test.must_exist(final_plist_path)
-  test.must_contain(final_plist_path, '''\
-\t<key>RandomKey</key>
-\t<string>NewRandomValue</string>''')
-
-  # Check the same for the copies section, test for http://crbug.com/157077
-  test.sleep()
-  contents = test.read('postbuild-copy-bundle/copied.txt')
-  contents = contents.replace('old', 'new')
-  test.write('postbuild-copy-bundle/copied.txt', contents)
-  test.build('test.gyp', 'test_app', chdir=CHDIR)
-
-  test.must_exist(final_copies_path)
-  test.must_contain(final_copies_path, 'new copied file')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-postbuild-defaults.py b/tools/gyp/test/mac/gyptest-postbuild-defaults.py
deleted file mode 100644
index 892a0c4..0000000
--- a/tools/gyp/test/mac/gyptest-postbuild-defaults.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a postbuild invoking |defaults| works.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'postbuild-defaults'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  result_file = test.built_file_path('result', chdir=CHDIR)
-  test.must_exist(result_file)
-  test.must_contain(result_file, '''\
-Test
-${PRODUCT_NAME}
-''')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-postbuild-fail.py b/tools/gyp/test/mac/gyptest-postbuild-fail.py
deleted file mode 100755
index 9cd5d47..0000000
--- a/tools/gyp/test/mac/gyptest-postbuild-fail.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a failing postbuild step lets the build fail.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  # set |match| to ignore build stderr output.
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
-                         match = lambda a, b: True)
-
-  test.run_gyp('test.gyp', chdir='postbuild-fail')
-
-  build_error_code = {
-    'xcode': [1, 65],  # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
-    'make': 2,
-    'ninja': 1,
-    'xcode-ninja': [1, 65],
-  }[test.format]
-
-
-  # If a postbuild fails, all postbuilds should be re-run on the next build.
-  # In Xcode 3, even if the first postbuild fails the other postbuilds were
-  # still executed. In Xcode 4, postbuilds are stopped after the first
-  # failing postbuild. This test checks for the Xcode 4 behavior.
-
-  # Ignore this test on Xcode 3.
-  import subprocess
-  job = subprocess.Popen(['xcodebuild', '-version'],
-                         stdout=subprocess.PIPE,
-                         stderr=subprocess.STDOUT)
-  out, err = job.communicate()
-  if job.returncode != 0:
-    print out
-    raise Exception('Error %d running xcodebuild' % job.returncode)
-  if out.startswith('Xcode 3.'):
-    test.pass_test()
-
-  # Non-bundles
-  test.build('test.gyp', 'nonbundle', chdir='postbuild-fail',
-             status=build_error_code)
-  test.built_file_must_not_exist('static_touch',
-                                 chdir='postbuild-fail')
-  # Check for non-up-to-date-ness by checking if building again produces an
-  # error.
-  test.build('test.gyp', 'nonbundle', chdir='postbuild-fail',
-             status=build_error_code)
-
-
-  # Bundles
-  test.build('test.gyp', 'bundle', chdir='postbuild-fail',
-             status=build_error_code)
-  test.built_file_must_not_exist('dynamic_touch',
-                                 chdir='postbuild-fail')
-  # Check for non-up-to-date-ness by checking if building again produces an
-  # error.
-  test.build('test.gyp', 'bundle', chdir='postbuild-fail',
-             status=build_error_code)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-postbuild-multiple-configurations.py b/tools/gyp/test/mac/gyptest-postbuild-multiple-configurations.py
deleted file mode 100644
index 84694f3..0000000
--- a/tools/gyp/test/mac/gyptest-postbuild-multiple-configurations.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a postbuild work in projects with multiple configurations.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'postbuild-multiple-configurations'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  for configuration in ['Debug', 'Release']:
-    test.set_configuration(configuration)
-    test.build('test.gyp', test.ALL, chdir=CHDIR)
-    test.built_file_must_exist('postbuild-file', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-postbuild-static-library.py b/tools/gyp/test/mac/gyptest-postbuild-static-library.py
deleted file mode 100644
index 8f9a6eb..0000000
--- a/tools/gyp/test/mac/gyptest-postbuild-static-library.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a postbuilds on static libraries work, and that sourceless
-libraries don't cause failures at gyp time.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['make', 'xcode'])
-
-  CHDIR = 'postbuild-static-library'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', 'my_lib', chdir=CHDIR)
-  # Building my_sourceless_lib doesn't work with make. gyp should probably
-  # forbid sourceless static libraries, since they're pretty pointless.
-  # But they shouldn't cause gyp time exceptions.
-
-  test.built_file_must_exist('postbuild-file', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-postbuild.py b/tools/gyp/test/mac/gyptest-postbuild.py
deleted file mode 100755
index 684e7b8..0000000
--- a/tools/gyp/test/mac/gyptest-postbuild.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that postbuild steps work.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='postbuilds')
-
-  test.build('test.gyp', test.ALL, chdir='postbuilds')
-
-  # See comment in test/subdirectory/gyptest-subdir-default.py
-  if test.format == 'xcode':
-    chdir = 'postbuilds/subdirectory'
-  else:
-    chdir = 'postbuilds'
-
-  # Created by the postbuild scripts
-  test.built_file_must_exist('el.a_touch',
-                             type=test.STATIC_LIB,
-                             chdir='postbuilds')
-  test.built_file_must_exist('el.a_gyp_touch',
-                             type=test.STATIC_LIB,
-                             chdir='postbuilds')
-  test.built_file_must_exist('nest_el.a_touch',
-                             type=test.STATIC_LIB,
-                             chdir=chdir)
-  test.built_file_must_exist(
-      'dyna.framework/Versions/A/dyna_touch',
-      chdir='postbuilds')
-  test.built_file_must_exist(
-      'dyna.framework/Versions/A/dyna_gyp_touch',
-      chdir='postbuilds')
-  test.built_file_must_exist(
-      'nest_dyna.framework/Versions/A/nest_dyna_touch',
-      chdir=chdir)
-  test.built_file_must_exist('dyna_standalone.dylib_gyp_touch',
-                             type=test.SHARED_LIB,
-                             chdir='postbuilds')
-  test.built_file_must_exist('copied_file.txt', chdir='postbuilds')
-  test.built_file_must_exist('copied_file_2.txt', chdir=chdir)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-prefixheader.py b/tools/gyp/test/mac/gyptest-prefixheader.py
deleted file mode 100755
index 0cf85f9..0000000
--- a/tools/gyp/test/mac/gyptest-prefixheader.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that GCC_PREFIX_HEADER works.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-  test.run_gyp('test.gyp', chdir='prefixheader')
-
-  test.build('test.gyp', test.ALL, chdir='prefixheader')
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-rebuild.py b/tools/gyp/test/mac/gyptest-rebuild.py
deleted file mode 100755
index e615d06..0000000
--- a/tools/gyp/test/mac/gyptest-rebuild.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that app bundles are rebuilt correctly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'rebuild'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', 'test_app', chdir=CHDIR)
-
-  # Touch a source file, rebuild, and check that the app target is up-to-date.
-  test.touch('rebuild/main.c')
-  test.build('test.gyp', 'test_app', chdir=CHDIR)
-
-  test.up_to_date('test.gyp', 'test_app', chdir=CHDIR)
-
-  # Xcode runs postbuilds on every build, so targets with postbuilds are
-  # never marked as up_to_date.
-  if test.format != 'xcode':
-    # Same for a framework bundle.
-    test.build('test.gyp', 'test_framework_postbuilds', chdir=CHDIR)
-    test.up_to_date('test.gyp', 'test_framework_postbuilds', chdir=CHDIR)
-
-    # Test that an app bundle with a postbuild that touches the app binary needs
-    # to be built only once.
-    test.build('test.gyp', 'test_app_postbuilds', chdir=CHDIR)
-    test.up_to_date('test.gyp', 'test_app_postbuilds', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-rpath.py b/tools/gyp/test/mac/gyptest-rpath.py
deleted file mode 100644
index ef415cd..0000000
--- a/tools/gyp/test/mac/gyptest-rpath.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that LD_DYLIB_INSTALL_NAME and DYLIB_INSTALL_NAME_BASE are handled
-correctly.
-"""
-
-import TestGyp
-
-import re
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'rpath'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  def GetRpaths(p):
-    p = test.built_file_path(p, chdir=CHDIR)
-    r = re.compile(r'cmd LC_RPATH.*?path (.*?) \(offset \d+\)', re.DOTALL)
-    proc = subprocess.Popen(['otool', '-l', p], stdout=subprocess.PIPE)
-    o = proc.communicate()[0]
-    assert not proc.returncode
-    return r.findall(o)
-
-  if GetRpaths('libdefault_rpath.dylib') != []:
-    test.fail_test()
-
-  if GetRpaths('libexplicit_rpath.dylib') != ['@executable_path/.']:
-    test.fail_test()
-
-  if (GetRpaths('libexplicit_rpaths_escaped.dylib') !=
-      ['First rpath', 'Second rpath']):
-    test.fail_test()
-
-  if GetRpaths('My Framework.framework/My Framework') != ['@loader_path/.']:
-    test.fail_test()
-
-  if GetRpaths('executable') != ['@executable_path/.']:
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-sdkroot.py b/tools/gyp/test/mac/gyptest-sdkroot.py
deleted file mode 100644
index f7d41cd..0000000
--- a/tools/gyp/test/mac/gyptest-sdkroot.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that setting SDKROOT works.
-"""
-
-import TestGyp
-
-import os
-import subprocess
-import sys
-
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  def GetSDKPath(sdk):
-    """Return SDKROOT if the SDK version |sdk| is installed or empty string."""
-    DEVNULL = open(os.devnull, 'wb')
-    try:
-      proc = subprocess.Popen(
-          ['xcodebuild', '-version', '-sdk', 'macosx' + sdk, 'Path'],
-          stdout=subprocess.PIPE, stderr=DEVNULL)
-      return proc.communicate()[0].rstrip('\n')
-    finally:
-      DEVNULL.close()
-
-  def SelectSDK():
-    """Select the oldest SDK installed (greater than 10.6)."""
-    for sdk in ['10.6', '10.7', '10.8', '10.9']:
-      path = GetSDKPath(sdk)
-      if path:
-        return True, sdk, path
-    return False, '', ''
-
-  # Make sure this works on the bots, which only have the 10.6 sdk, and on
-  # dev machines which usually don't have the 10.6 sdk.
-  sdk_found, sdk, sdk_path = SelectSDK()
-  if not sdk_found:
-    test.fail_test()
-
-  test.write('sdkroot/test.gyp', test.read('sdkroot/test.gyp') % sdk)
-
-  test.run_gyp('test.gyp', '-D', 'sdk_path=%s' % sdk_path,
-               chdir='sdkroot')
-  test.build('test.gyp', test.ALL, chdir='sdkroot')
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-sourceless-module.py b/tools/gyp/test/mac/gyptest-sourceless-module.py
deleted file mode 100644
index c34bc54..0000000
--- a/tools/gyp/test/mac/gyptest-sourceless-module.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that bundles that have no 'sources' (pure resource containers) work.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='sourceless-module')
-
-  # Just needs to build without errors.
-  test.build('test.gyp', 'empty_bundle', chdir='sourceless-module')
-  test.built_file_must_not_exist(
-      'empty_bundle.bundle', chdir='sourceless-module')
-
-  # Needs to build, and contain a resource.
-  test.build('test.gyp', 'resource_bundle', chdir='sourceless-module')
-
-  test.built_file_must_exist(
-      'resource_bundle.bundle/Contents/Resources/foo.manifest',
-      chdir='sourceless-module')
-  test.built_file_must_not_exist(
-      'resource_bundle.bundle/Contents/MacOS/resource_bundle',
-      chdir='sourceless-module')
-
-  # Build an app containing an actionless bundle.
-  test.build(
-      'test.gyp',
-      'bundle_dependent_on_resource_bundle_no_actions',
-      chdir='sourceless-module')
-
-  test.built_file_must_exist(
-      'bundle_dependent_on_resource_bundle_no_actions.app/Contents/Resources/'
-          'mac_resource_bundle_no_actions.bundle/Contents/Resources/empty.txt',
-      chdir='sourceless-module')
-
-  # Needs to build and cause the bundle to be built.
-  test.build(
-      'test.gyp', 'dependent_on_resource_bundle', chdir='sourceless-module')
-
-  test.built_file_must_exist(
-      'resource_bundle.bundle/Contents/Resources/foo.manifest',
-      chdir='sourceless-module')
-  test.built_file_must_not_exist(
-      'resource_bundle.bundle/Contents/MacOS/resource_bundle',
-      chdir='sourceless-module')
-
-  # TODO(thakis): shared_libraries that have no sources but depend on static
-  # libraries currently only work with the ninja generator.  This is used by
-  # chrome/mac's components build.
-  if test.format == 'ninja':
-    # Check that an executable depending on a resource framework links fine too.
-    test.build(
-       'test.gyp', 'dependent_on_resource_framework', chdir='sourceless-module')
-
-    test.built_file_must_exist(
-        'resource_framework.framework/Resources/foo.manifest',
-        chdir='sourceless-module')
-    test.built_file_must_exist(
-        'resource_framework.framework/resource_framework',
-        chdir='sourceless-module')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-strip-default.py b/tools/gyp/test/mac/gyptest-strip-default.py
deleted file mode 100644
index f73fa11..0000000
--- a/tools/gyp/test/mac/gyptest-strip-default.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that the default STRIP_STYLEs match between different generators.
-"""
-
-import TestGyp
-
-import re
-import subprocess
-import sys
-import time
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR='strip'
-  test.run_gyp('test-defaults.gyp', chdir=CHDIR)
-
-  test.build('test-defaults.gyp', test.ALL, chdir=CHDIR)
-
-  # Lightweight check if stripping was done.
-  def OutPath(s):
-    return test.built_file_path(s, chdir=CHDIR)
-
-  def CheckNsyms(p, o_expected):
-    proc = subprocess.Popen(['nm', '-aU', p], stdout=subprocess.PIPE)
-    o = proc.communicate()[0]
-
-    # Filter out mysterious "00 0000   OPT radr://5614542" symbol which
-    # is apparently only printed on the bots (older toolchain?).
-    # Yes, "radr", not "rdar".
-    o = ''.join(filter(lambda s: 'radr://5614542' not in s, o.splitlines(True)))
-
-    o = o.replace('A', 'T')
-    o = re.sub(r'^[a-fA-F0-9]+', 'XXXXXXXX', o, flags=re.MULTILINE)
-    assert not proc.returncode
-    if o != o_expected:
-      print 'Stripping: Expected symbols """\n%s""", got """\n%s"""' % (
-          o_expected, o)
-      test.fail_test()
-
-  CheckNsyms(OutPath('libsingle_dylib.dylib'),
-"""\
-XXXXXXXX S _ci
-XXXXXXXX S _i
-XXXXXXXX T _the_function
-XXXXXXXX t _the_hidden_function
-XXXXXXXX T _the_used_function
-XXXXXXXX T _the_visible_function
-""")
-  CheckNsyms(OutPath('single_so.so'),
-"""\
-XXXXXXXX S _ci
-XXXXXXXX S _i
-XXXXXXXX T _the_function
-XXXXXXXX t _the_hidden_function
-XXXXXXXX T _the_used_function
-XXXXXXXX T _the_visible_function
-""")
-  CheckNsyms(OutPath('single_exe'),
-"""\
-XXXXXXXX T __mh_execute_header
-""")
-
-  CheckNsyms(test.built_file_path(
-      'bundle_dylib.framework/Versions/A/bundle_dylib', chdir=CHDIR),
-"""\
-XXXXXXXX S _ci
-XXXXXXXX S _i
-XXXXXXXX T _the_function
-XXXXXXXX t _the_hidden_function
-XXXXXXXX T _the_used_function
-XXXXXXXX T _the_visible_function
-""")
-  CheckNsyms(test.built_file_path(
-      'bundle_so.bundle/Contents/MacOS/bundle_so', chdir=CHDIR),
-"""\
-XXXXXXXX S _ci
-XXXXXXXX S _i
-XXXXXXXX T _the_function
-XXXXXXXX T _the_used_function
-XXXXXXXX T _the_visible_function
-""")
-  CheckNsyms(test.built_file_path(
-      'bundle_exe.app/Contents/MacOS/bundle_exe', chdir=CHDIR),
-"""\
-XXXXXXXX T __mh_execute_header
-""")
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-strip.py b/tools/gyp/test/mac/gyptest-strip.py
deleted file mode 100755
index a729521..0000000
--- a/tools/gyp/test/mac/gyptest-strip.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that stripping works.
-"""
-
-import TestGyp
-import TestMac
-
-import re
-import subprocess
-import sys
-import time
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='strip')
-
-  test.build('test.gyp', test.ALL, chdir='strip')
-
-  # Lightweight check if stripping was done.
-  def OutPath(s):
-    return test.built_file_path(s, type=test.SHARED_LIB, chdir='strip')
-
-  def CheckNsyms(p, n_expected):
-    r = re.compile(r'nsyms\s+(\d+)')
-    o = subprocess.check_output(['otool', '-l', p])
-    m = r.search(o)
-    n = int(m.group(1))
-    if n != n_expected:
-      print 'Stripping: Expected %d symbols, got %d' % (n_expected, n)
-      test.fail_test()
-
-  # Starting with Xcode 5.0, clang adds an additional symbols to the compiled
-  # file when using a relative path to the input file. So when using ninja
-  # with Xcode 5.0 or higher, take this additional symbol into consideration
-  # for unstripped builds (it is stripped by all strip commands).
-  expected_extra_symbol_count = 0
-  if test.format in ['ninja', 'xcode-ninja'] \
-      and TestMac.Xcode.Version() >= '0500':
-    expected_extra_symbol_count = 1
-
-  # The actual numbers here are not interesting, they just need to be the same
-  # in both the xcode and the make build.
-  CheckNsyms(OutPath('no_postprocess'), 29 + expected_extra_symbol_count)
-  CheckNsyms(OutPath('no_strip'), 29 + expected_extra_symbol_count)
-  CheckNsyms(OutPath('strip_all'), 0)
-  CheckNsyms(OutPath('strip_nonglobal'), 6)
-  CheckNsyms(OutPath('strip_debugging'), 7)
-  CheckNsyms(OutPath('strip_all_custom_flags'), 0)
-  CheckNsyms(test.built_file_path(
-      'strip_all_bundle.framework/Versions/A/strip_all_bundle', chdir='strip'),
-      0)
-  CheckNsyms(OutPath('strip_save'), 7)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-swift-library.py b/tools/gyp/test/mac/gyptest-swift-library.py
deleted file mode 100644
index dde7a62..0000000
--- a/tools/gyp/test/mac/gyptest-swift-library.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a swift framework builds correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import collections
-import sys
-import subprocess
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['xcode'])
-
-  # Ensures that the given symbol is present in the given file, by running nm.
-  def CheckHasSymbolName(path, symbol):
-    output = subprocess.check_output(['nm', '-j', path])
-    idx = output.find(symbol)
-    if idx == -1:
-      print 'Swift: Could not find symobl: %s' % symbol
-      test.fail_test()
-
-  test_cases = []
-
-  # Run this for iOS on XCode 6.0 or greater
-  if TestMac.Xcode.Version() >= '0600':
-    test_cases.append(('Default', 'iphoneos'))
-    test_cases.append(('Default', 'iphonesimulator'))
-
-  # Run it for Mac on XCode 6.1 or greater
-  if TestMac.Xcode.Version() >= '0610':
-    test_cases.append(('Default', None))
-
-  # Generate the project.
-  test.run_gyp('test.gyp', chdir='swift-library')
-
-  # Build and verify for each configuration.
-  for configuration, sdk in test_cases:
-    kwds = collections.defaultdict(list)
-    if test.format == 'xcode':
-      if sdk is not None:
-        kwds['arguments'].extend(['-sdk', sdk])
-
-    test.set_configuration(configuration)
-    test.build('test.gyp', 'SwiftFramework', chdir='swift-library', **kwds)
-
-    filename = 'SwiftFramework.framework/SwiftFramework'
-    result_file = test.built_file_path(filename, chdir='swift-library')
-
-    test.must_exist(result_file)
-
-    # Check to make sure that our swift class (GypSwiftTest) is present in the
-    # built binary
-    CheckHasSymbolName(result_file, "C14SwiftFramework12GypSwiftTest")
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-type-envvars.py b/tools/gyp/test/mac/gyptest-type-envvars.py
deleted file mode 100755
index a5203c5..0000000
--- a/tools/gyp/test/mac/gyptest-type-envvars.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test that MACH_O_TYPE etc are set correctly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp',
-               '-G', 'xcode_ninja_target_pattern=^(?!nonbundle_none).*$',
-               chdir='type_envvars')
-
-  test.build('test.gyp', test.ALL, chdir='type_envvars')
-
-  # The actual test is done by postbuild scripts during |test.build()|.
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-unicode-settings.py b/tools/gyp/test/mac/gyptest-unicode-settings.py
deleted file mode 100644
index a71b3bd..0000000
--- a/tools/gyp/test/mac/gyptest-unicode-settings.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that unicode strings in 'xcode_settings' work.
-Also checks that ASCII control characters are escaped properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode'])
-  test.run_gyp('test.gyp', chdir='unicode-settings')
-  test.build('test.gyp', test.ALL, chdir='unicode-settings')
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-xcode-env-order.py b/tools/gyp/test/mac/gyptest-xcode-env-order.py
deleted file mode 100755
index 6e7ca24..0000000
--- a/tools/gyp/test/mac/gyptest-xcode-env-order.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that dependent Xcode settings are processed correctly.
-"""
-
-import TestGyp
-import TestMac
-
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'xcode-env-order'
-  INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
-
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-  # Env vars in 'copies' filenames.
-  test.built_file_must_exist('Test-copy-brace/main.c', chdir=CHDIR)
-  test.built_file_must_exist('Test-copy-paren/main.c', chdir=CHDIR)
-  test.built_file_must_exist('Test-copy-bare/main.c', chdir=CHDIR)
-
-  # Env vars in 'actions' filenames and inline actions
-  test.built_file_must_exist('action-copy-brace.txt', chdir=CHDIR)
-  test.built_file_must_exist('action-copy-paren.txt', chdir=CHDIR)
-  test.built_file_must_exist('action-copy-bare.txt', chdir=CHDIR)
-
-  # Env vars in 'rules' filenames and inline actions
-  test.built_file_must_exist('rule-copy-brace.txt', chdir=CHDIR)
-  test.built_file_must_exist('rule-copy-paren.txt', chdir=CHDIR)
-  # TODO: see comment in test.gyp for this file.
-  #test.built_file_must_exist('rule-copy-bare.txt', chdir=CHDIR)
-
-  # Env vars in Info.plist.
-  info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
-  test.must_exist(info_plist)
-
-  test.must_contain(info_plist, '''\
-\t<key>BraceProcessedKey1</key>
-\t<string>D:/Source/Project/Test</string>''')
-  test.must_contain(info_plist, '''\
-\t<key>BraceProcessedKey2</key>
-\t<string>/Source/Project/Test</string>''')
-  test.must_contain(info_plist, '''\
-\t<key>BraceProcessedKey3</key>
-\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
-
-  test.must_contain(info_plist, '''\
-\t<key>ParenProcessedKey1</key>
-\t<string>D:/Source/Project/Test</string>''')
-  test.must_contain(info_plist, '''\
-\t<key>ParenProcessedKey2</key>
-\t<string>/Source/Project/Test</string>''')
-  test.must_contain(info_plist, '''\
-\t<key>ParenProcessedKey3</key>
-\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
-
-  test.must_contain(info_plist, '''\
-\t<key>BareProcessedKey1</key>
-\t<string>D:/Source/Project/Test</string>''')
-  test.must_contain(info_plist, '''\
-\t<key>BareProcessedKey2</key>
-\t<string>/Source/Project/Test</string>''')
-  # NOTE: For bare variables, $PRODUCT_TYPE is not replaced! It _is_ replaced
-  # if it's not right at the start of the string (e.g. ':$PRODUCT_TYPE'), so
-  # this looks like an Xcode bug. This bug isn't emulated (yet?), so check this
-  # only for Xcode.
-  if test.format == 'xcode' and TestMac.Xcode.Version() < '0500':
-    test.must_contain(info_plist, '''\
-\t<key>BareProcessedKey3</key>
-\t<string>$PRODUCT_TYPE:D:/Source/Project/Test</string>''')
-  else:
-    # The bug has been fixed by Xcode version 5.0.0.
-    test.must_contain(info_plist, '''\
-\t<key>BareProcessedKey3</key>
-\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
-
-  test.must_contain(info_plist, '''\
-\t<key>MixedProcessedKey</key>
-\t<string>/Source/Project:Test:mh_execute</string>''')
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-xcode-gcc-clang.py b/tools/gyp/test/mac/gyptest-xcode-gcc-clang.py
deleted file mode 100644
index 981c3fc..0000000
--- a/tools/gyp/test/mac/gyptest-xcode-gcc-clang.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that xcode-style GCC_... settings that require clang are handled
-properly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  CHDIR = 'xcode-gcc'
-  test.run_gyp('test-clang.gyp', chdir=CHDIR)
-
-  test.build('test-clang.gyp', 'aliasing_yes', chdir=CHDIR)
-  test.run_built_executable('aliasing_yes', chdir=CHDIR, stdout="1\n")
-  test.build('test-clang.gyp', 'aliasing_no', chdir=CHDIR)
-  test.run_built_executable('aliasing_no', chdir=CHDIR, stdout="0\n")
-
-  # The default behavior changed: strict aliasing used to be off, now it's on
-  # by default. The important part is that this is identical for all generators
-  # (which it is). TODO(thakis): Enable this once the bots have a newer Xcode.
-  #test.build('test-clang.gyp', 'aliasing_default', chdir=CHDIR)
-  #test.run_built_executable('aliasing_default', chdir=CHDIR, stdout="1\n")
-  # For now, just check the generated ninja file:
-  if test.format == 'ninja':
-    contents = open(test.built_file_path('obj/aliasing_default.ninja',
-                                         chdir=CHDIR)).read()
-    if 'strict-aliasing' in contents:
-      test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-xcode-gcc.py b/tools/gyp/test/mac/gyptest-xcode-gcc.py
deleted file mode 100644
index dee4bd5..0000000
--- a/tools/gyp/test/mac/gyptest-xcode-gcc.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that xcode-style GCC_... settings are handled properly.
-"""
-
-import TestGyp
-
-import os
-import subprocess
-import sys
-
-def IgnoreOutput(string, expected_string):
-  return True
-
-def CompilerVersion(compiler):
-  stdout = subprocess.check_output([compiler, '-v'], stderr=subprocess.STDOUT)
-  return stdout.rstrip('\n')
-
-def CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
-  # "clang" does not support the "-Winvalid-offsetof" flag, and silently
-  # ignore it. Starting with Xcode 5.0.0, "gcc" is just a "clang" binary with
-  # some hard-coded include path hack, so use the output of "-v" to detect if
-  # the compiler supports the flag or not.
-  return 'clang' not in CompilerVersion('/usr/bin/cc')
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  if test.format == 'xcode-ninja':
-    test.skip_test()
-
-  CHDIR = 'xcode-gcc'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-
-  # List of targets that'll pass. It expects targets of the same name with
-  # '-fail' appended that'll fail to build.
-  targets = [
-    'warn_about_missing_newline',
-  ]
-
-  # clang doesn't warn on invalid offsetofs, it silently ignores
-  # -Wno-invalid-offsetof.
-  if CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
-    targets.append('warn_about_invalid_offsetof_macro')
-
-  for target in targets:
-    test.build('test.gyp', target, chdir=CHDIR)
-    test.built_file_must_exist(target, chdir=CHDIR)
-    fail_target = target + '-fail'
-    test.build('test.gyp', fail_target, chdir=CHDIR, status=None,
-               stderr=None, match=IgnoreOutput)
-    test.built_file_must_not_exist(fail_target, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-xcode-support-actions.py b/tools/gyp/test/mac/gyptest-xcode-support-actions.py
deleted file mode 100755
index ecc1402..0000000
--- a/tools/gyp/test/mac/gyptest-xcode-support-actions.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that support actions are properly created.
-"""
-
-import TestGyp
-
-import os
-import subprocess
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode'])
-
-  CHDIR = 'xcode-support-actions'
-
-  test.run_gyp('test.gyp', '-Gsupport_target_suffix=_customsuffix', chdir=CHDIR)
-  test.build('test.gyp', target='target_customsuffix', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-xctest.py b/tools/gyp/test/mac/gyptest-xctest.py
deleted file mode 100644
index a46a5fb..0000000
--- a/tools/gyp/test/mac/gyptest-xctest.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that xctest targets are correctly configured.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode'])
-
-  # Ignore this test if Xcode 5 is not installed
-  import subprocess
-  job = subprocess.Popen(['xcodebuild', '-version'],
-                         stdout=subprocess.PIPE,
-                         stderr=subprocess.STDOUT)
-  out, err = job.communicate()
-  if job.returncode != 0:
-    raise Exception('Error %d running xcodebuild' % job.returncode)
-  xcode_version, build_number = out.splitlines()
-  # Convert the version string from 'Xcode 5.0' to ['5','0'].
-  xcode_version = xcode_version.split()[-1].split('.')
-  if xcode_version < ['5']:
-    test.pass_test()
-
-  CHDIR = 'xctest'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', chdir=CHDIR, arguments=['-scheme', 'classes', 'test'])
-
-  test.built_file_must_match('tests.xctest/Contents/Resources/resource.txt',
-                             'foo\n', chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/mac/gyptest-xcuitest.py b/tools/gyp/test/mac/gyptest-xcuitest.py
deleted file mode 100755
index 4e6067e..0000000
--- a/tools/gyp/test/mac/gyptest-xcuitest.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that xcuitest targets are correctly configured.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['xcode'])
-
-  # Ignore this test if Xcode 5 is not installed
-  import subprocess
-  job = subprocess.Popen(['xcodebuild', '-version'],
-                         stdout=subprocess.PIPE,
-                         stderr=subprocess.STDOUT)
-  out, err = job.communicate()
-  if job.returncode != 0:
-    raise Exception('Error %d running xcodebuild' % job.returncode)
-  xcode_version, build_number = out.splitlines()
-  # Convert the version string from 'Xcode 5.0' to ['5','0'].
-  xcode_version = xcode_version.split()[-1].split('.')
-  if xcode_version < ['7']:
-    test.pass_test()
-
-  CHDIR = 'xcuitest'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', chdir=CHDIR, arguments=[
-    '-target', 'tests',
-    '-sdk', 'iphonesimulator',
-  ])
-
-  test.pass_test()
diff --git a/tools/gyp/test/mac/identical-name/proxy/proxy.cc b/tools/gyp/test/mac/identical-name/proxy/proxy.cc
deleted file mode 100644
index 8e1782d..0000000
--- a/tools/gyp/test/mac/identical-name/proxy/proxy.cc
+++ /dev/null
@@ -1,2 +0,0 @@
-// Empty file
-
diff --git a/tools/gyp/test/mac/identical-name/proxy/proxy.gyp b/tools/gyp/test/mac/identical-name/proxy/proxy.gyp
deleted file mode 100644
index 38f44af..0000000
--- a/tools/gyp/test/mac/identical-name/proxy/proxy.gyp
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-  'includes': ['../test.gypi'],
-  'targets': [{
-    'target_name': 'testlib',
-    'type': 'none',
-    'dependencies': ['testlib/testlib.gyp:testlib'],
-    'sources': ['proxy.cc'],
-  }],
-}
diff --git a/tools/gyp/test/mac/identical-name/proxy/testlib/testlib.cc b/tools/gyp/test/mac/identical-name/proxy/testlib/testlib.cc
deleted file mode 100644
index 8e1782d..0000000
--- a/tools/gyp/test/mac/identical-name/proxy/testlib/testlib.cc
+++ /dev/null
@@ -1,2 +0,0 @@
-// Empty file
-
diff --git a/tools/gyp/test/mac/identical-name/proxy/testlib/testlib.gyp b/tools/gyp/test/mac/identical-name/proxy/testlib/testlib.gyp
deleted file mode 100644
index ed1c62e..0000000
--- a/tools/gyp/test/mac/identical-name/proxy/testlib/testlib.gyp
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  'includes': ['../../test.gypi'],
-  'targets': [{
-    'target_name': 'testlib',
-    'type': 'static_library',
-    'sources': ['testlib.cc'],
-  }],
-}
diff --git a/tools/gyp/test/mac/identical-name/test-should-fail.gyp b/tools/gyp/test/mac/identical-name/test-should-fail.gyp
deleted file mode 100644
index 72bfc7a..0000000
--- a/tools/gyp/test/mac/identical-name/test-should-fail.gyp
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  'targets': [{
-    'target_name': 'test',
-    'type': 'executable',
-    'dependencies': [
-      'testlib/testlib.gyp:proxy',
-      'proxy/proxy.gyp:testlib',
-    ],
-  }],
-}
diff --git a/tools/gyp/test/mac/identical-name/test.gyp b/tools/gyp/test/mac/identical-name/test.gyp
deleted file mode 100644
index 717220e..0000000
--- a/tools/gyp/test/mac/identical-name/test.gyp
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-  'includes': ['test.gypi'],
-  'targets': [{
-    'target_name': 'test',
-    'type': 'executable',
-    'dependencies': [
-      'testlib/testlib.gyp:proxy',
-      'proxy/proxy.gyp:testlib',
-    ],
-  }],
-}
\ No newline at end of file
diff --git a/tools/gyp/test/mac/identical-name/test.gypi b/tools/gyp/test/mac/identical-name/test.gypi
deleted file mode 100644
index 61b7c2b..0000000
--- a/tools/gyp/test/mac/identical-name/test.gypi
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  'target_defaults': {
-    'xcode_settings': {
-      'SYMROOT': '<(DEPTH)/$SRCROOT/',
-    },
-  },
-}
diff --git a/tools/gyp/test/mac/identical-name/testlib/main.cc b/tools/gyp/test/mac/identical-name/testlib/main.cc
deleted file mode 100644
index 5c2fa9b..0000000
--- a/tools/gyp/test/mac/identical-name/testlib/main.cc
+++ /dev/null
@@ -1,3 +0,0 @@
-int main(int argc, char **argv) {
-  return 0;
-}
diff --git a/tools/gyp/test/mac/identical-name/testlib/testlib.gyp b/tools/gyp/test/mac/identical-name/testlib/testlib.gyp
deleted file mode 100644
index aa8b851..0000000
--- a/tools/gyp/test/mac/identical-name/testlib/testlib.gyp
+++ /dev/null
@@ -1,14 +0,0 @@
-{
-  'includes': ['../test.gypi'],
-  'targets': [{
-    'target_name': 'proxy',
-    'type': 'static_library',
-    'sources': ['void.cc'],
-    'dependencies': ['testlib'],
-    'export_dependent_settings': ['testlib'],
-  }, {
-    'target_name': 'testlib',
-    'type': 'static_library',
-    'sources': ['main.cc'],
-  }],
-}
diff --git a/tools/gyp/test/mac/identical-name/testlib/void.cc b/tools/gyp/test/mac/identical-name/testlib/void.cc
deleted file mode 100644
index 8e1782d..0000000
--- a/tools/gyp/test/mac/identical-name/testlib/void.cc
+++ /dev/null
@@ -1,2 +0,0 @@
-// Empty file
-
diff --git a/tools/gyp/test/mac/infoplist-process/Info.plist b/tools/gyp/test/mac/infoplist-process/Info.plist
deleted file mode 100644
index cb65721..0000000
--- a/tools/gyp/test/mac/infoplist-process/Info.plist
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSMinimumSystemVersion</key>
-	<string>${MACOSX_DEPLOYMENT_TARGET}</string>
-	<key>NSMainNibFile</key>
-	<string>MainMenu</string>
-	<key>NSPrincipalClass</key>
-	<string>NSApplication</string>
-	<key>ProcessedKey1</key>
-        <string>PROCESSED_KEY1</string>
-	<key>ProcessedKey2</key>
-        <string>PROCESSED_KEY2</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/infoplist-process/main.c b/tools/gyp/test/mac/infoplist-process/main.c
deleted file mode 100644
index 1bf4b2a..0000000
--- a/tools/gyp/test/mac/infoplist-process/main.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/mac/infoplist-process/test1.gyp b/tools/gyp/test/mac/infoplist-process/test1.gyp
deleted file mode 100644
index bc625a9..0000000
--- a/tools/gyp/test/mac/infoplist-process/test1.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'main.c',
-      ],
-      'configurations': {
-        'One': {
-        },
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'INFOPLIST_PREPROCESS': 'YES',
-        'INFOPLIST_PREPROCESSOR_DEFINITIONS': 'PROCESSED_KEY1=Foo PROCESSED_KEY2=Bar',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/infoplist-process/test2.gyp b/tools/gyp/test/mac/infoplist-process/test2.gyp
deleted file mode 100644
index ecfbc9f..0000000
--- a/tools/gyp/test/mac/infoplist-process/test2.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'main.c',
-      ],
-      'configurations': {
-        'Two': {
-        },
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'INFOPLIST_PREPROCESS': 'YES',
-        'INFOPLIST_PREPROCESSOR_DEFINITIONS': 'PROCESSED_KEY1="Foo (Bar)"',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/infoplist-process/test3.gyp b/tools/gyp/test/mac/infoplist-process/test3.gyp
deleted file mode 100644
index be8fe75..0000000
--- a/tools/gyp/test/mac/infoplist-process/test3.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'main.c',
-      ],
-      'configurations': {
-        'Three': {
-        },
-      },
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'INFOPLIST_PREPROCESS': 'NO',
-        'INFOPLIST_PREPROCESSOR_DEFINITIONS': 'PROCESSED_KEY1=Foo',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/installname/Info.plist b/tools/gyp/test/mac/installname/Info.plist
deleted file mode 100644
index 5e05a51..0000000
--- a/tools/gyp/test/mac/installname/Info.plist
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.yourcompany.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>FMWK</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>NSPrincipalClass</key>
-	<string></string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/installname/file.c b/tools/gyp/test/mac/installname/file.c
deleted file mode 100644
index a39fce0..0000000
--- a/tools/gyp/test/mac/installname/file.c
+++ /dev/null
@@ -1 +0,0 @@
-int f() { return 0; }
diff --git a/tools/gyp/test/mac/installname/main.c b/tools/gyp/test/mac/installname/main.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/mac/installname/main.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/mac/installname/test.gyp b/tools/gyp/test/mac/installname/test.gyp
deleted file mode 100644
index 60c867f..0000000
--- a/tools/gyp/test/mac/installname/test.gyp
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'default_installname',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'default_bundle_installname',
-      'product_name': 'My Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'explicit_installname',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'LD_DYLIB_INSTALL_NAME': 'Trapped in a dynamiclib factory',
-      },
-    },
-    {
-      'target_name': 'explicit_installname_base',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'DYLIB_INSTALL_NAME_BASE': '@executable_path/../../..',
-
-      },
-    },
-    {
-      'target_name': 'explicit_installname_base_bundle',
-      'product_name': 'My Other Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'DYLIB_INSTALL_NAME_BASE': '@executable_path/../../..',
-
-      },
-    },
-    {
-      'target_name': 'both_base_and_installname',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        # LD_DYLIB_INSTALL_NAME wins.
-        'LD_DYLIB_INSTALL_NAME': 'Still trapped in a dynamiclib factory',
-        'DYLIB_INSTALL_NAME_BASE': '@executable_path/../../..',
-      },
-    },
-    {
-      'target_name': 'explicit_installname_with_base',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'LD_DYLIB_INSTALL_NAME': '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)',
-      },
-    },
-    {
-      'target_name': 'explicit_installname_with_explicit_base',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'DYLIB_INSTALL_NAME_BASE': '@executable_path/..',
-        'LD_DYLIB_INSTALL_NAME': '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)',
-      },
-    },
-    {
-      'target_name': 'executable',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'xcode_settings': {
-        'LD_DYLIB_INSTALL_NAME': 'Should be ignored for not shared_lib',
-      },
-    },
-    # Regression test for http://crbug.com/113918
-    {
-      'target_name': 'install_name_with_info_plist',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'LD_DYLIB_INSTALL_NAME': '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/kext/GypKext/GypKext-Info.plist b/tools/gyp/test/mac/kext/GypKext/GypKext-Info.plist
deleted file mode 100644
index 8422609..0000000
--- a/tools/gyp/test/mac/kext/GypKext/GypKext-Info.plist
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>BuildMachineOSBuild</key>
-	<string>Doesn't matter, will be overwritten</string>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME:rfc1034identifier}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>KEXT</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>ause</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSMinimumSystemVersion</key>
-	<string>${MACOSX_DEPLOYMENT_TARGET}</string>
-	<key>OSBundleLibraries</key>
-	<dict>
-		<key>com.apple.kpi.libkern</key>
-		<string>10.0</string>
-	</dict>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/kext/GypKext/GypKext.c b/tools/gyp/test/mac/kext/GypKext/GypKext.c
deleted file mode 100644
index 9b611b0..0000000
--- a/tools/gyp/test/mac/kext/GypKext/GypKext.c
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <sys/systm.h>
-#include <mach/mach_types.h>
-
-kern_return_t GypKext_start(kmod_info_t* ki, void* d) {
-  printf("GypKext has started.\n");
-  return KERN_SUCCESS;
-}
-
-kern_return_t GypKext_stop(kmod_info_t* ki, void* d) {
-  printf("GypKext has stopped.\n");
-  return KERN_SUCCESS;
-}
diff --git a/tools/gyp/test/mac/kext/kext.gyp b/tools/gyp/test/mac/kext/kext.gyp
deleted file mode 100644
index 5b93087..0000000
--- a/tools/gyp/test/mac/kext/kext.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'gypkext',
-      'product_name': 'GypKext',
-      'type': 'mac_kernel_extension',
-      'sources': [
-        'GypKext/GypKext.c',
-      ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'GypKext/GypKext-Info.plist',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/ldflags-libtool/file.c b/tools/gyp/test/mac/ldflags-libtool/file.c
deleted file mode 100644
index 56757a7..0000000
--- a/tools/gyp/test/mac/ldflags-libtool/file.c
+++ /dev/null
@@ -1 +0,0 @@
-void f() {}
diff --git a/tools/gyp/test/mac/ldflags-libtool/test.gyp b/tools/gyp/test/mac/ldflags-libtool/test.gyp
deleted file mode 100644
index 4e7aa07..0000000
--- a/tools/gyp/test/mac/ldflags-libtool/test.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'ldflags_passed_to_libtool',
-      'type': 'static_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'OTHER_LDFLAGS': [
-          '-fblorfen-horf-does-not-exist',
-        ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/ldflags/subdirectory/Info.plist b/tools/gyp/test/mac/ldflags/subdirectory/Info.plist
deleted file mode 100644
index 5f5e9ab..0000000
--- a/tools/gyp/test/mac/ldflags/subdirectory/Info.plist
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/ldflags/subdirectory/file.c b/tools/gyp/test/mac/ldflags/subdirectory/file.c
deleted file mode 100644
index 90c4554..0000000
--- a/tools/gyp/test/mac/ldflags/subdirectory/file.c
+++ /dev/null
@@ -1,2 +0,0 @@
-void f() {}
-void g() {}
diff --git a/tools/gyp/test/mac/ldflags/subdirectory/symbol_list.def b/tools/gyp/test/mac/ldflags/subdirectory/symbol_list.def
deleted file mode 100644
index 0ab7543..0000000
--- a/tools/gyp/test/mac/ldflags/subdirectory/symbol_list.def
+++ /dev/null
@@ -1 +0,0 @@
-_f
diff --git a/tools/gyp/test/mac/ldflags/subdirectory/test.gyp b/tools/gyp/test/mac/ldflags/subdirectory/test.gyp
deleted file mode 100644
index db00c74..0000000
--- a/tools/gyp/test/mac/ldflags/subdirectory/test.gyp
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'raw',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'OTHER_LDFLAGS': [
-          '-exported_symbols_list symbol_list.def',
-          '-sectcreate __TEXT __info_plist Info.plist',
-        ],
-      },
-    },
-    # TODO(thakis): This form should ideally be supported, too. (But
-    # -Wlfoo,bar,baz is cleaner so people should use that anyway.)
-    #{
-    #  'target_name': 'raw_sep',
-    #  'type': 'shared_library',
-    #  'sources': [ 'file.c', ],
-    #  'xcode_settings': {
-    #    'OTHER_LDFLAGS': [
-    #      '-exported_symbols_list', 'symbol_list.def',
-    #      '-sectcreate', '__TEXT', '__info_plist', 'Info.plist',
-    #    ],
-    #  },
-    #},
-    {
-      'target_name': 'wl_space',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'OTHER_LDFLAGS': [
-          # Works because clang passes unknown files on to the linker.
-          '-Wl,-exported_symbols_list symbol_list.def',
-        ],
-      },
-    },
-    # TODO(thakis): This form should ideally be supported, too. (But
-    # -Wlfoo,bar,baz is cleaner so people should use that anyway.)
-    #{
-    #  'target_name': 'wl_space_sep',
-    #  'type': 'shared_library',
-    #  'sources': [ 'file.c', ],
-    #  'xcode_settings': {
-    #    'OTHER_LDFLAGS': [
-    #      # Works because clang passes unknown files on to the linker.
-    #      '-Wl,-exported_symbols_list', 'symbol_list.def',
-    #    ],
-    #  },
-    #},
-    {
-      'target_name': 'wl_comma',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'OTHER_LDFLAGS': [
-          '-Wl,-exported_symbols_list,symbol_list.def',
-          '-Wl,-sectcreate,__TEXT,__info_plist,Info.plist',
-        ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/libraries/subdir/README.txt b/tools/gyp/test/mac/libraries/subdir/README.txt
deleted file mode 100644
index 4031ded..0000000
--- a/tools/gyp/test/mac/libraries/subdir/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-Make things live in a subdirectory, to make sure that DEPTH works correctly.
diff --git a/tools/gyp/test/mac/libraries/subdir/hello.cc b/tools/gyp/test/mac/libraries/subdir/hello.cc
deleted file mode 100644
index a43554c..0000000
--- a/tools/gyp/test/mac/libraries/subdir/hello.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <iostream>
-
-int main() {
-  std::cout << "Hello, world!" << std::endl;
-  return 0;
-}
diff --git a/tools/gyp/test/mac/libraries/subdir/mylib.c b/tools/gyp/test/mac/libraries/subdir/mylib.c
deleted file mode 100644
index e771991..0000000
--- a/tools/gyp/test/mac/libraries/subdir/mylib.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int my_foo(int x) {
-  return x + 1;
-}
diff --git a/tools/gyp/test/mac/libraries/subdir/test.gyp b/tools/gyp/test/mac/libraries/subdir/test.gyp
deleted file mode 100644
index 59fef51..0000000
--- a/tools/gyp/test/mac/libraries/subdir/test.gyp
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'libraries-test',
-      'type': 'executable',
-      'sources': [
-        'hello.cc',
-      ],
-      'link_settings': {
-        'libraries': [
-          'libcrypto.dylib',
-        ],
-      },
-    },
-    {
-      # This creates a static library and puts it in a nonstandard location for
-      # libraries-search-path-test.
-      'target_name': 'mylib',
-      'type': 'static_library',
-      'sources': [
-        'mylib.c',
-      ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Make a secret location',
-          'action': [
-            'mkdir',
-            '-p',
-            '${SRCROOT}/../secret_location',
-          ],
-        },
-        {
-          'postbuild_name': 'Copy to secret location, with secret name',
-          'action': [
-            'cp',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
-            '${SRCROOT}/../secret_location/libmysecretlib.a',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'libraries-search-path-test',
-      'type': 'executable',
-      'dependencies': [ 'mylib' ],
-      'sources': [
-        'hello.cc',
-      ],
-      'xcode_settings': {
-        'LIBRARY_SEARCH_PATHS': [
-          '<(DEPTH)/secret_location',
-        ],
-      },
-      'link_settings': {
-        'libraries': [
-          'libmysecretlib.a',
-        ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/libtool-zero/mylib.c b/tools/gyp/test/mac/libtool-zero/mylib.c
deleted file mode 100644
index b26d61b..0000000
--- a/tools/gyp/test/mac/libtool-zero/mylib.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int my_foo(int x) {
-  return x + 1;
-}
diff --git a/tools/gyp/test/mac/libtool-zero/test.gyp b/tools/gyp/test/mac/libtool-zero/test.gyp
deleted file mode 100644
index 0d6ee55..0000000
--- a/tools/gyp/test/mac/libtool-zero/test.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'mylib',
-      'type': 'static_library',
-      'sources': [
-        'mylib.c',
-      ],
-      'xcode_settings': {
-        'ARCHS': [ 'i386', 'x86_64' ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/loadable-module-bundle-product-extension/src.cc b/tools/gyp/test/mac/loadable-module-bundle-product-extension/src.cc
deleted file mode 100644
index 3d878e9..0000000
--- a/tools/gyp/test/mac/loadable-module-bundle-product-extension/src.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int test() {
-  return 1337;
-}
diff --git a/tools/gyp/test/mac/loadable-module-bundle-product-extension/test.gyp b/tools/gyp/test/mac/loadable-module-bundle-product-extension/test.gyp
deleted file mode 100644
index 684a2c0..0000000
--- a/tools/gyp/test/mac/loadable-module-bundle-product-extension/test.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [{
-    'target_name': 'test',
-    'type': 'none',
-    'dependencies': ['child_one', 'child_two'],
-  }, {
-    'target_name': 'child_one',
-    'product_name': 'Collide',
-    'product_extension': 'bar',
-    'sources': ['src.cc'],
-    'type': 'loadable_module',
-    'mac_bundle': 1,
-  }, {
-    'target_name': 'child_two',
-    'product_name': 'Collide',
-    'product_extension': 'foo',
-    'sources': ['src.cc'],
-    'type': 'loadable_module',
-    'mac_bundle': 1,
-  }],
-}
diff --git a/tools/gyp/test/mac/loadable-module/Info.plist b/tools/gyp/test/mac/loadable-module/Info.plist
deleted file mode 100644
index f6607ae..0000000
--- a/tools/gyp/test/mac/loadable-module/Info.plist
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.test_loadable_module</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>BRPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1.0</string>
-	<key>CFPlugInDynamicRegisterFunction</key>
-	<string></string>
-	<key>CFPlugInDynamicRegistration</key>
-	<string>NO</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/loadable-module/module.c b/tools/gyp/test/mac/loadable-module/module.c
deleted file mode 100644
index 9584538..0000000
--- a/tools/gyp/test/mac/loadable-module/module.c
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int SuperFly() {
-  return 42;
-}
-
-const char* SuperFoo() {
-  return "Hello World";
-}
diff --git a/tools/gyp/test/mac/loadable-module/test.gyp b/tools/gyp/test/mac/loadable-module/test.gyp
deleted file mode 100644
index 3c8a530..0000000
--- a/tools/gyp/test/mac/loadable-module/test.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_loadable_module',
-      'type': 'loadable_module',
-      'mac_bundle': 1,
-      'sources': [ 'module.c' ],
-      'product_extension': 'plugin',
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/lto/asmfile.S b/tools/gyp/test/mac/lto/asmfile.S
deleted file mode 100644
index ea23759..0000000
--- a/tools/gyp/test/mac/lto/asmfile.S
+++ /dev/null
@@ -1,2 +0,0 @@
-.globl _asfun
-ret
diff --git a/tools/gyp/test/mac/lto/ccfile.cc b/tools/gyp/test/mac/lto/ccfile.cc
deleted file mode 100644
index 2503afd..0000000
--- a/tools/gyp/test/mac/lto/ccfile.cc
+++ /dev/null
@@ -1 +0,0 @@
-void ccfun() {}
diff --git a/tools/gyp/test/mac/lto/cfile.c b/tools/gyp/test/mac/lto/cfile.c
deleted file mode 100644
index d02ef4b..0000000
--- a/tools/gyp/test/mac/lto/cfile.c
+++ /dev/null
@@ -1 +0,0 @@
-void cfun() {}
diff --git a/tools/gyp/test/mac/lto/mfile.m b/tools/gyp/test/mac/lto/mfile.m
deleted file mode 100644
index 85b7d93..0000000
--- a/tools/gyp/test/mac/lto/mfile.m
+++ /dev/null
@@ -1 +0,0 @@
-void mfun() {}
diff --git a/tools/gyp/test/mac/lto/mmfile.mm b/tools/gyp/test/mac/lto/mmfile.mm
deleted file mode 100644
index beaa359..0000000
--- a/tools/gyp/test/mac/lto/mmfile.mm
+++ /dev/null
@@ -1 +0,0 @@
-void mmfun() {}
diff --git a/tools/gyp/test/mac/lto/test.gyp b/tools/gyp/test/mac/lto/test.gyp
deleted file mode 100644
index 0a8e851..0000000
--- a/tools/gyp/test/mac/lto/test.gyp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'lto',
-      'type': 'shared_library',
-      'sources': [
-        'cfile.c',
-        'mfile.m',
-        'ccfile.cc',
-        'mmfile.mm',
-        'asmfile.S',
-      ],
-      'xcode_settings': {
-        'LLVM_LTO': 'YES',
-      },
-    },
-    {
-      'target_name': 'lto_static',
-      'type': 'static_library',
-      'sources': [
-        'cfile.c',
-        'mfile.m',
-        'ccfile.cc',
-        'mmfile.mm',
-        'asmfile.S',
-      ],
-      'xcode_settings': {
-        'LLVM_LTO': 'YES',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/missing-cfbundlesignature/Info.plist b/tools/gyp/test/mac/missing-cfbundlesignature/Info.plist
deleted file mode 100644
index 0c31674..0000000
--- a/tools/gyp/test/mac/missing-cfbundlesignature/Info.plist
+++ /dev/null
@@ -1,10 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/missing-cfbundlesignature/Other-Info.plist b/tools/gyp/test/mac/missing-cfbundlesignature/Other-Info.plist
deleted file mode 100644
index 4709528..0000000
--- a/tools/gyp/test/mac/missing-cfbundlesignature/Other-Info.plist
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-        <key>CFBundleSignature</key>
-        <string>F</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/missing-cfbundlesignature/Third-Info.plist b/tools/gyp/test/mac/missing-cfbundlesignature/Third-Info.plist
deleted file mode 100644
index 5b61fe2..0000000
--- a/tools/gyp/test/mac/missing-cfbundlesignature/Third-Info.plist
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-        <key>CFBundleSignature</key>
-        <string>some really long string</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/missing-cfbundlesignature/file.c b/tools/gyp/test/mac/missing-cfbundlesignature/file.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/mac/missing-cfbundlesignature/file.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/mac/missing-cfbundlesignature/test.gyp b/tools/gyp/test/mac/missing-cfbundlesignature/test.gyp
deleted file mode 100644
index b50cc27..0000000
--- a/tools/gyp/test/mac/missing-cfbundlesignature/test.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'mytarget',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-      },
-    },
-    {
-      'target_name': 'myothertarget',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Other-Info.plist',
-      },
-    },
-    {
-      'target_name': 'thirdtarget',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Third-Info.plist',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/non-strs-flattened-to-env/Info.plist b/tools/gyp/test/mac/non-strs-flattened-to-env/Info.plist
deleted file mode 100644
index 11fc4b6..0000000
--- a/tools/gyp/test/mac/non-strs-flattened-to-env/Info.plist
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-        <!-- Not a valid plist file since it's missing so much. That's fine. -->
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>My Variable</key>
-	<string>${MY_VAR}</string>
-	<key>CFlags</key>
-	<string>${OTHER_CFLAGS}</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/non-strs-flattened-to-env/main.c b/tools/gyp/test/mac/non-strs-flattened-to-env/main.c
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/mac/non-strs-flattened-to-env/main.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/mac/non-strs-flattened-to-env/test.gyp b/tools/gyp/test/mac/non-strs-flattened-to-env/test.gyp
deleted file mode 100644
index aaf821c..0000000
--- a/tools/gyp/test/mac/non-strs-flattened-to-env/test.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'main.c', ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'MY_VAR': 'some expansion',
-        'OTHER_CFLAGS': [
-          # Just some (more than one) random flags.
-          '-fstack-protector-all',
-          '-fno-strict-aliasing',
-          '-DS="A Space"',  # Would normally be in 'defines'
-        ],
-      },
-      'include_dirs': [
-        '$(SDKROOT)/usr/include/libxml2',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/objc-arc/c-file.c b/tools/gyp/test/mac/objc-arc/c-file.c
deleted file mode 100644
index 6536132..0000000
--- a/tools/gyp/test/mac/objc-arc/c-file.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#if __has_feature(objc_arc)
-#error "C files shouldn't be ARC'd!"
-#endif
-
-void c_fun() {}
-
diff --git a/tools/gyp/test/mac/objc-arc/cc-file.cc b/tools/gyp/test/mac/objc-arc/cc-file.cc
deleted file mode 100644
index 95e14ea..0000000
--- a/tools/gyp/test/mac/objc-arc/cc-file.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-#if __has_feature(objc_arc)
-#error "C++ files shouldn't be ARC'd!"
-#endif
-
-void cc_fun() {}
diff --git a/tools/gyp/test/mac/objc-arc/m-file-no-arc.m b/tools/gyp/test/mac/objc-arc/m-file-no-arc.m
deleted file mode 100644
index 8ffaabf..0000000
--- a/tools/gyp/test/mac/objc-arc/m-file-no-arc.m
+++ /dev/null
@@ -1,5 +0,0 @@
-#if __has_feature(objc_arc)
-#error "ObjC files without CLANG_ENABLE_OBJC_ARC should not be ARC'd!"
-#endif
-
-void m_fun() {}
diff --git a/tools/gyp/test/mac/objc-arc/m-file.m b/tools/gyp/test/mac/objc-arc/m-file.m
deleted file mode 100644
index 9689b1f..0000000
--- a/tools/gyp/test/mac/objc-arc/m-file.m
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !__has_feature(objc_arc)
-#error "ObjC files with CLANG_ENABLE_OBJC_ARC should be ARC'd!"
-#endif
-
-void m_fun() {}
diff --git a/tools/gyp/test/mac/objc-arc/mm-file-no-arc.mm b/tools/gyp/test/mac/objc-arc/mm-file-no-arc.mm
deleted file mode 100644
index 0dac539..0000000
--- a/tools/gyp/test/mac/objc-arc/mm-file-no-arc.mm
+++ /dev/null
@@ -1,5 +0,0 @@
-#if __has_feature(objc_arc)
-#error "ObjC++ files without CLANG_ENABLE_OBJC_ARC should not be ARC'd!"
-#endif
-
-void mm_fun() {}
diff --git a/tools/gyp/test/mac/objc-arc/mm-file.mm b/tools/gyp/test/mac/objc-arc/mm-file.mm
deleted file mode 100644
index 9467e96..0000000
--- a/tools/gyp/test/mac/objc-arc/mm-file.mm
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !__has_feature(objc_arc)
-#error "ObjC++ files with CLANG_ENABLE_OBJC_ARC should be ARC'd!"
-#endif
-
-void mm_fun() {}
diff --git a/tools/gyp/test/mac/objc-arc/test.gyp b/tools/gyp/test/mac/objc-arc/test.gyp
deleted file mode 100644
index 59cf0e2..0000000
--- a/tools/gyp/test/mac/objc-arc/test.gyp
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-    ['CXX', '/usr/bin/clang++'],
-  ],
-
-  'targets': [
-    {
-      'target_name': 'arc_enabled',
-      'type': 'static_library',
-      'sources': [
-        'c-file.c',
-        'cc-file.cc',
-        'm-file.m',
-        'mm-file.mm',
-      ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'MACOSX_DEPLOYMENT_TARGET': '10.6',
-        'ARCHS': [ 'x86_64' ],  # For the non-fragile objc ABI.
-        'CLANG_ENABLE_OBJC_ARC': 'YES',
-      },
-    },
-
-    {
-      'target_name': 'arc_disabled',
-      'type': 'static_library',
-      'sources': [
-        'c-file.c',
-        'cc-file.cc',
-        'm-file-no-arc.m',
-        'mm-file-no-arc.mm',
-      ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'MACOSX_DEPLOYMENT_TARGET': '10.6',
-        'ARCHS': [ 'x86_64' ],  # For the non-fragile objc ABI.
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/objc-gc/c-file.c b/tools/gyp/test/mac/objc-gc/c-file.c
deleted file mode 100644
index 2855a00..0000000
--- a/tools/gyp/test/mac/objc-gc/c-file.c
+++ /dev/null
@@ -1 +0,0 @@
-void c_fun() {}
diff --git a/tools/gyp/test/mac/objc-gc/cc-file.cc b/tools/gyp/test/mac/objc-gc/cc-file.cc
deleted file mode 100644
index 71e47a0..0000000
--- a/tools/gyp/test/mac/objc-gc/cc-file.cc
+++ /dev/null
@@ -1 +0,0 @@
-void cc_fun() {}
diff --git a/tools/gyp/test/mac/objc-gc/main.m b/tools/gyp/test/mac/objc-gc/main.m
deleted file mode 100644
index 1a87f8e..0000000
--- a/tools/gyp/test/mac/objc-gc/main.m
+++ /dev/null
@@ -1,6 +0,0 @@
-#import <Foundation/Foundation.h>
-
-int main() {
-  printf("gc on: %d\n", [NSGarbageCollector defaultCollector] != NULL);
-  return 0;
-}
diff --git a/tools/gyp/test/mac/objc-gc/needs-gc-mm.mm b/tools/gyp/test/mac/objc-gc/needs-gc-mm.mm
deleted file mode 100644
index fc3fee9..0000000
--- a/tools/gyp/test/mac/objc-gc/needs-gc-mm.mm
+++ /dev/null
@@ -1 +0,0 @@
-void objcpp_fun() { }
diff --git a/tools/gyp/test/mac/objc-gc/needs-gc.m b/tools/gyp/test/mac/objc-gc/needs-gc.m
deleted file mode 100644
index ca77976..0000000
--- a/tools/gyp/test/mac/objc-gc/needs-gc.m
+++ /dev/null
@@ -1 +0,0 @@
-void objc_fun() { }
diff --git a/tools/gyp/test/mac/objc-gc/test.gyp b/tools/gyp/test/mac/objc-gc/test.gyp
deleted file mode 100644
index 4d827c1..0000000
--- a/tools/gyp/test/mac/objc-gc/test.gyp
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    # For some reason, static_library targets that are built with gc=required
-    # and then linked to executables that don't use gc, the linker doesn't
-    # complain. For shared_libraries it does, so use that.
-    {
-      'target_name': 'no_gc_lib',
-      'type': 'shared_library',
-      'sources': [
-        'c-file.c',
-        'cc-file.cc',
-        'needs-gc-mm.mm',
-        'needs-gc.m',
-      ],
-    },
-    {
-      'target_name': 'gc_lib',
-      'type': 'shared_library',
-      'sources': [
-        'c-file.c',
-        'cc-file.cc',
-        'needs-gc-mm.mm',
-        'needs-gc.m',
-      ],
-      'xcode_settings': {
-        'GCC_ENABLE_OBJC_GC': 'supported',
-      },
-    },
-    {
-      'target_name': 'gc_req_lib',
-      'type': 'shared_library',
-      'sources': [
-        'c-file.c',
-        'cc-file.cc',
-        'needs-gc-mm.mm',
-        'needs-gc.m',
-      ],
-      'xcode_settings': {
-        'GCC_ENABLE_OBJC_GC': 'required',
-      },
-    },
-
-    {
-      'target_name': 'gc_exe_fails',
-      'type': 'executable',
-      'sources': [ 'main.m' ],
-      'dependencies': [ 'no_gc_lib' ],
-      'xcode_settings': {
-        'GCC_ENABLE_OBJC_GC': 'required',
-      },
-      'libraries': [ 'Foundation.framework' ],
-    },
-    {
-      'target_name': 'gc_req_exe',
-      'type': 'executable',
-      'sources': [ 'main.m' ],
-      'dependencies': [ 'gc_lib' ],
-      'xcode_settings': {
-        'GCC_ENABLE_OBJC_GC': 'required',
-      },
-      'libraries': [ 'Foundation.framework' ],
-    },
-    {
-      'target_name': 'gc_exe_req_lib',
-      'type': 'executable',
-      'sources': [ 'main.m' ],
-      'dependencies': [ 'gc_req_lib' ],
-      'xcode_settings': {
-        'GCC_ENABLE_OBJC_GC': 'supported',
-      },
-      'libraries': [ 'Foundation.framework' ],
-    },
-    {
-      'target_name': 'gc_exe',
-      'type': 'executable',
-      'sources': [ 'main.m' ],
-      'dependencies': [ 'gc_lib' ],
-      'xcode_settings': {
-        'GCC_ENABLE_OBJC_GC': 'supported',
-      },
-      'libraries': [ 'Foundation.framework' ],
-    },
-    {
-      'target_name': 'gc_off_exe_req_lib',
-      'type': 'executable',
-      'sources': [ 'main.m' ],
-      'dependencies': [ 'gc_req_lib' ],
-      'libraries': [ 'Foundation.framework' ],
-    },
-    {
-      'target_name': 'gc_off_exe',
-      'type': 'executable',
-      'sources': [ 'main.m' ],
-      'dependencies': [ 'gc_lib' ],
-      'libraries': [ 'Foundation.framework' ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/Framework-Info.plist b/tools/gyp/test/mac/postbuild-copy-bundle/Framework-Info.plist
deleted file mode 100644
index ec36829..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/Framework-Info.plist
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.yourcompany.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>FMWK</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>NSPrincipalClass</key>
-	<string></string>
-	<key>RandomKey</key>
-	<string>RandomValue</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/TestApp-Info.plist b/tools/gyp/test/mac/postbuild-copy-bundle/TestApp-Info.plist
deleted file mode 100644
index 98fd515..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/TestApp-Info.plist
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSMinimumSystemVersion</key>
-	<string>${MACOSX_DEPLOYMENT_TARGET}</string>
-	<key>NSMainNibFile</key>
-	<string>MainMenu</string>
-	<key>NSPrincipalClass</key>
-	<string>NSApplication</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/copied.txt b/tools/gyp/test/mac/postbuild-copy-bundle/copied.txt
deleted file mode 100644
index 1784138..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/copied.txt
+++ /dev/null
@@ -1 +0,0 @@
-old copied file
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/empty.c b/tools/gyp/test/mac/postbuild-copy-bundle/empty.c
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/empty.c
+++ /dev/null
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/main.c b/tools/gyp/test/mac/postbuild-copy-bundle/main.c
deleted file mode 100644
index 21c1963..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/main.c
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-int main() {}
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/postbuild-copy-framework.sh b/tools/gyp/test/mac/postbuild-copy-bundle/postbuild-copy-framework.sh
deleted file mode 100755
index 930fec6..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/postbuild-copy-framework.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-rsync -acC --delete "$1" "$2"
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/resource_file.sb b/tools/gyp/test/mac/postbuild-copy-bundle/resource_file.sb
deleted file mode 100644
index 42057fa..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/resource_file.sb
+++ /dev/null
@@ -1 +0,0 @@
-This is included in the framework bundle.
diff --git a/tools/gyp/test/mac/postbuild-copy-bundle/test.gyp b/tools/gyp/test/mac/postbuild-copy-bundle/test.gyp
deleted file mode 100644
index a03e643..0000000
--- a/tools/gyp/test/mac/postbuild-copy-bundle/test.gyp
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_bundle',
-      'product_name': 'My Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'empty.c', ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Framework-Info.plist',
-      },
-      'mac_bundle_resources': [
-        'resource_file.sb',
-      ],
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Libraries',
-          'files': [ 'copied.txt' ],
-        },
-      ],
-    },
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'dependencies': [
-        'test_bundle',
-      ],
-      'sources': [ 'main.c', ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'TestApp-Info.plist',
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'Copy dependent framework into app',
-          'action': [
-            './postbuild-copy-framework.sh',
-            '${BUILT_PRODUCTS_DIR}/My Framework.framework',
-            '${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}/',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/postbuild-defaults/Info.plist b/tools/gyp/test/mac/postbuild-defaults/Info.plist
deleted file mode 100644
index d3f54d7..0000000
--- a/tools/gyp/test/mac/postbuild-defaults/Info.plist
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-        <!-- Not a valid plist file since it's missing so much. That's fine. -->
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleName</key>
-        <string>${PRODUCT_NAME}</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/postbuild-defaults/main.c b/tools/gyp/test/mac/postbuild-defaults/main.c
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/mac/postbuild-defaults/main.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/mac/postbuild-defaults/postbuild-defaults.sh b/tools/gyp/test/mac/postbuild-defaults/postbuild-defaults.sh
deleted file mode 100755
index 56af2a8..0000000
--- a/tools/gyp/test/mac/postbuild-defaults/postbuild-defaults.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-# This is the built Info.plist in the output directory.
-PLIST="${BUILT_PRODUCTS_DIR}"/Test.app/Contents/Info  # No trailing .plist
-echo $(defaults read "${PLIST}" "CFBundleName") > "${BUILT_PRODUCTS_DIR}/result"
-
-# This is the source Info.plist next to this script file.
-PLIST="${SRCROOT}"/Info  # No trailing .plist
-echo $(defaults read "${PLIST}" "CFBundleName") \
-    >> "${BUILT_PRODUCTS_DIR}/result"
diff --git a/tools/gyp/test/mac/postbuild-defaults/test.gyp b/tools/gyp/test/mac/postbuild-defaults/test.gyp
deleted file mode 100644
index be0a075..0000000
--- a/tools/gyp/test/mac/postbuild-defaults/test.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'main.c', ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'Postbuild that calls defaults',
-          'action': [
-            './postbuild-defaults.sh',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/postbuild-fail/file.c b/tools/gyp/test/mac/postbuild-fail/file.c
deleted file mode 100644
index 91695b1..0000000
--- a/tools/gyp/test/mac/postbuild-fail/file.c
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// That's right, this is copyrighted.
-void f() {}
diff --git a/tools/gyp/test/mac/postbuild-fail/postbuild-fail.sh b/tools/gyp/test/mac/postbuild-fail/postbuild-fail.sh
deleted file mode 100755
index dc1a60d..0000000
--- a/tools/gyp/test/mac/postbuild-fail/postbuild-fail.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/bash
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-exit 1
diff --git a/tools/gyp/test/mac/postbuild-fail/test.gyp b/tools/gyp/test/mac/postbuild-fail/test.gyp
deleted file mode 100644
index e63283d..0000000
--- a/tools/gyp/test/mac/postbuild-fail/test.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'nonbundle',
-      'type': 'static_library',
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Postbuild Fail',
-          'action': [ './postbuild-fail.sh', ],
-        },
-        {
-          'postbuild_name': 'Runs after failing postbuild',
-          'action': [ './touch-static.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'bundle',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Postbuild Fail',
-          'action': [ './postbuild-fail.sh', ],
-        },
-        {
-          'postbuild_name': 'Runs after failing postbuild',
-          'action': [ './touch-dynamic.sh', ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/postbuild-fail/touch-dynamic.sh b/tools/gyp/test/mac/postbuild-fail/touch-dynamic.sh
deleted file mode 100755
index a388a64..0000000
--- a/tools/gyp/test/mac/postbuild-fail/touch-dynamic.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-touch "${BUILT_PRODUCTS_DIR}/dynamic_touch"
diff --git a/tools/gyp/test/mac/postbuild-fail/touch-static.sh b/tools/gyp/test/mac/postbuild-fail/touch-static.sh
deleted file mode 100755
index 97ecaa6..0000000
--- a/tools/gyp/test/mac/postbuild-fail/touch-static.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-touch "${BUILT_PRODUCTS_DIR}/static_touch"
diff --git a/tools/gyp/test/mac/postbuild-multiple-configurations/main.c b/tools/gyp/test/mac/postbuild-multiple-configurations/main.c
deleted file mode 100644
index 21c1963..0000000
--- a/tools/gyp/test/mac/postbuild-multiple-configurations/main.c
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-int main() {}
diff --git a/tools/gyp/test/mac/postbuild-multiple-configurations/postbuild-touch-file.sh b/tools/gyp/test/mac/postbuild-multiple-configurations/postbuild-touch-file.sh
deleted file mode 100755
index b6170cf..0000000
--- a/tools/gyp/test/mac/postbuild-multiple-configurations/postbuild-touch-file.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-touch "${BUILT_PRODUCTS_DIR}/postbuild-file"
diff --git a/tools/gyp/test/mac/postbuild-multiple-configurations/test.gyp b/tools/gyp/test/mac/postbuild-multiple-configurations/test.gyp
deleted file mode 100644
index c350b20..0000000
--- a/tools/gyp/test/mac/postbuild-multiple-configurations/test.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'target_defaults': {
-    'configurations': {
-       'Debug': {},
-       'Release': {},
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'random_target',
-      'type': 'executable',
-      'sources': [ 'main.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Touch a file.',
-          'action': [
-            './postbuild-touch-file.sh',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/postbuild-static-library/empty.c b/tools/gyp/test/mac/postbuild-static-library/empty.c
deleted file mode 100644
index 9554336..0000000
--- a/tools/gyp/test/mac/postbuild-static-library/empty.c
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-void f() {}
diff --git a/tools/gyp/test/mac/postbuild-static-library/postbuild-touch-file.sh b/tools/gyp/test/mac/postbuild-static-library/postbuild-touch-file.sh
deleted file mode 100755
index 37de4de..0000000
--- a/tools/gyp/test/mac/postbuild-static-library/postbuild-touch-file.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-touch "${BUILT_PRODUCTS_DIR}/$1"
diff --git a/tools/gyp/test/mac/postbuild-static-library/test.gyp b/tools/gyp/test/mac/postbuild-static-library/test.gyp
deleted file mode 100644
index 9ef55a0..0000000
--- a/tools/gyp/test/mac/postbuild-static-library/test.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'my_lib',
-      'type': 'static_library',
-      'sources': [ 'empty.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Postbuild that touches a file',
-          'action': [
-            './postbuild-touch-file.sh', 'postbuild-file'
-          ],
-        },
-      ],
-    },
-
-    {
-      'target_name': 'my_sourceless_lib',
-      'type': 'static_library',
-      'dependencies': [ 'my_lib' ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Postbuild that touches a file',
-          'action': [
-            './postbuild-touch-file.sh', 'postbuild-file-sourceless'
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/postbuilds/copy.sh b/tools/gyp/test/mac/postbuilds/copy.sh
deleted file mode 100755
index ecad038..0000000
--- a/tools/gyp/test/mac/postbuilds/copy.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-cp "$@"
diff --git a/tools/gyp/test/mac/postbuilds/file.c b/tools/gyp/test/mac/postbuilds/file.c
deleted file mode 100644
index 653e71f..0000000
--- a/tools/gyp/test/mac/postbuilds/file.c
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-void f() {}
diff --git a/tools/gyp/test/mac/postbuilds/file_g.c b/tools/gyp/test/mac/postbuilds/file_g.c
deleted file mode 100644
index 0f7849d..0000000
--- a/tools/gyp/test/mac/postbuilds/file_g.c
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-void g() {}
diff --git a/tools/gyp/test/mac/postbuilds/file_h.c b/tools/gyp/test/mac/postbuilds/file_h.c
deleted file mode 100644
index 521d1f4..0000000
--- a/tools/gyp/test/mac/postbuilds/file_h.c
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-void h() {}
diff --git a/tools/gyp/test/mac/postbuilds/script/shared_library_postbuild.sh b/tools/gyp/test/mac/postbuilds/script/shared_library_postbuild.sh
deleted file mode 100755
index c623c8b..0000000
--- a/tools/gyp/test/mac/postbuilds/script/shared_library_postbuild.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-lib="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
-nm ${lib} > /dev/null  # Just make sure this works.
-
-pattern="${1}"
-
-if [ $pattern != "a|b" ]; then
-  echo "Parameter quoting is broken"
-  exit 1
-fi
-
-if [ "${2}" != "arg with spaces" ]; then
-  echo "Parameter space escaping is broken"
-  exit 1
-fi
-
-touch "${lib}"_touch
diff --git a/tools/gyp/test/mac/postbuilds/script/static_library_postbuild.sh b/tools/gyp/test/mac/postbuilds/script/static_library_postbuild.sh
deleted file mode 100755
index 2bf09b3..0000000
--- a/tools/gyp/test/mac/postbuilds/script/static_library_postbuild.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-lib="${BUILT_PRODUCTS_DIR}/${FULL_PRODUCT_NAME}"
-nm ${lib} > /dev/null  # Just make sure this works.
-
-pattern="${1}"
-
-if [ $pattern != "a|b" ]; then
-  echo "Parameter quote escaping is broken"
-  exit 1
-fi
-
-if [ "${2}" != "arg with spaces" ]; then
-  echo "Parameter space escaping is broken"
-  exit 1
-fi
-
-touch "${lib}"_touch.a
diff --git a/tools/gyp/test/mac/postbuilds/subdirectory/copied_file.txt b/tools/gyp/test/mac/postbuilds/subdirectory/copied_file.txt
deleted file mode 100644
index a634f85..0000000
--- a/tools/gyp/test/mac/postbuilds/subdirectory/copied_file.txt
+++ /dev/null
@@ -1 +0,0 @@
-This file should be copied to the products dir.
diff --git a/tools/gyp/test/mac/postbuilds/subdirectory/nested_target.gyp b/tools/gyp/test/mac/postbuilds/subdirectory/nested_target.gyp
deleted file mode 100644
index 6d4f239..0000000
--- a/tools/gyp/test/mac/postbuilds/subdirectory/nested_target.gyp
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'nest_el',
-      'type': 'static_library',
-      'sources': [ '../file_g.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Static library postbuild',
-          'variables': {
-            'some_regex': 'a|b',
-          },
-          'action': [
-            '../script/static_library_postbuild.sh',
-            '<(some_regex)',
-            'arg with spaces',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'nest_dyna',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ '../file_h.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Dynamic library postbuild',
-          'variables': {
-            'some_regex': 'a|b',
-          },
-          'action': [
-            '../script/shared_library_postbuild.sh',
-            '<(some_regex)',
-            'arg with spaces',
-          ],
-        },
-        {
-          'postbuild_name': 'Test paths relative to gyp file',
-          'action': [
-            '../copy.sh',
-            './copied_file.txt',
-            '${BUILT_PRODUCTS_DIR}/copied_file_2.txt',
-          ],
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/postbuilds/test.gyp b/tools/gyp/test/mac/postbuilds/test.gyp
deleted file mode 100644
index 7c0b523..0000000
--- a/tools/gyp/test/mac/postbuilds/test.gyp
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'el',
-      'type': 'static_library',
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Static library postbuild',
-          'variables': {
-            'some_regex': 'a|b',
-          },
-          'action': [
-            'script/static_library_postbuild.sh',
-            '<(some_regex)',
-            'arg with spaces',
-          ],
-        },
-        {
-          'postbuild_name': 'Test variable in gyp file',
-          'action': [
-            'cp',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}_gyp_touch.a',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'dyna',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'dependencies': [
-        'subdirectory/nested_target.gyp:nest_dyna',
-        'subdirectory/nested_target.gyp:nest_el',
-      ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Dynamic library postbuild',
-          'variables': {
-            'some_regex': 'a|b',
-          },
-          'action': [
-            'script/shared_library_postbuild.sh',
-            '<(some_regex)',
-            'arg with spaces',
-          ],
-        },
-        {
-          'postbuild_name': 'Test variable in gyp file',
-          'action': [
-            'cp',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}_gyp_touch',
-          ],
-        },
-        {
-          'postbuild_name': 'Test paths relative to gyp file',
-          'action': [
-            './copy.sh',
-            'subdirectory/copied_file.txt',
-            '${BUILT_PRODUCTS_DIR}',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'dyna_standalone',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Test variable in gyp file',
-          'action': [
-            'cp',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
-            '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}_gyp_touch.dylib',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'EmptyBundle',
-      'product_extension': 'bundle',
-      'type': 'executable',
-      'mac_bundle': 1,
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/prefixheader/file.c b/tools/gyp/test/mac/prefixheader/file.c
deleted file mode 100644
index d0b39d1..0000000
--- a/tools/gyp/test/mac/prefixheader/file.c
+++ /dev/null
@@ -1 +0,0 @@
-MyInt f() { return 0; }
diff --git a/tools/gyp/test/mac/prefixheader/file.cc b/tools/gyp/test/mac/prefixheader/file.cc
deleted file mode 100644
index d0b39d1..0000000
--- a/tools/gyp/test/mac/prefixheader/file.cc
+++ /dev/null
@@ -1 +0,0 @@
-MyInt f() { return 0; }
diff --git a/tools/gyp/test/mac/prefixheader/file.m b/tools/gyp/test/mac/prefixheader/file.m
deleted file mode 100644
index d0b39d1..0000000
--- a/tools/gyp/test/mac/prefixheader/file.m
+++ /dev/null
@@ -1 +0,0 @@
-MyInt f() { return 0; }
diff --git a/tools/gyp/test/mac/prefixheader/file.mm b/tools/gyp/test/mac/prefixheader/file.mm
deleted file mode 100644
index d0b39d1..0000000
--- a/tools/gyp/test/mac/prefixheader/file.mm
+++ /dev/null
@@ -1 +0,0 @@
-MyInt f() { return 0; }
diff --git a/tools/gyp/test/mac/prefixheader/header.h b/tools/gyp/test/mac/prefixheader/header.h
deleted file mode 100644
index 0716e50..0000000
--- a/tools/gyp/test/mac/prefixheader/header.h
+++ /dev/null
@@ -1 +0,0 @@
-typedef int MyInt;
diff --git a/tools/gyp/test/mac/prefixheader/test.gyp b/tools/gyp/test/mac/prefixheader/test.gyp
deleted file mode 100644
index 7e6b1af..0000000
--- a/tools/gyp/test/mac/prefixheader/test.gyp
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'prefix_header_c',
-      'type': 'static_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-      },
-    },
-    {
-      'target_name': 'precompiled_prefix_header_c',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-        'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
-      },
-    },
-
-    {
-      'target_name': 'prefix_header_cc',
-      'type': 'static_library',
-      'sources': [ 'file.cc', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-      },
-    },
-    {
-      'target_name': 'precompiled_prefix_header_cc',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.cc', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-        'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
-      },
-    },
-
-    {
-      'target_name': 'prefix_header_m',
-      'type': 'static_library',
-      'sources': [ 'file.m', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-      },
-    },
-    {
-      'target_name': 'precompiled_prefix_header_m',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.m', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-        'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
-      },
-    },
-
-    {
-      'target_name': 'prefix_header_mm',
-      'type': 'static_library',
-      'sources': [ 'file.mm', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-      },
-    },
-    {
-      'target_name': 'precompiled_prefix_header_mm',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.mm', ],
-      'xcode_settings': {
-        'GCC_PREFIX_HEADER': 'header.h',
-        'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/rebuild/TestApp-Info.plist b/tools/gyp/test/mac/rebuild/TestApp-Info.plist
deleted file mode 100644
index 98fd515..0000000
--- a/tools/gyp/test/mac/rebuild/TestApp-Info.plist
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSMinimumSystemVersion</key>
-	<string>${MACOSX_DEPLOYMENT_TARGET}</string>
-	<key>NSMainNibFile</key>
-	<string>MainMenu</string>
-	<key>NSPrincipalClass</key>
-	<string>NSApplication</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/rebuild/delay-touch.sh b/tools/gyp/test/mac/rebuild/delay-touch.sh
deleted file mode 100755
index 7caf105..0000000
--- a/tools/gyp/test/mac/rebuild/delay-touch.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-set -e
-
-sleep 1  # mtime resolution is 1 sec on unix.
-touch "$1"
diff --git a/tools/gyp/test/mac/rebuild/empty.c b/tools/gyp/test/mac/rebuild/empty.c
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/rebuild/empty.c
+++ /dev/null
diff --git a/tools/gyp/test/mac/rebuild/main.c b/tools/gyp/test/mac/rebuild/main.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/mac/rebuild/main.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/mac/rebuild/test.gyp b/tools/gyp/test/mac/rebuild/test.gyp
deleted file mode 100644
index 15b4e4e..0000000
--- a/tools/gyp/test/mac/rebuild/test.gyp
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test App',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'main.c',
-      ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'TestApp-Info.plist',
-      },
-    },
-    {
-      'target_name': 'test_app_postbuilds',
-      'product_name': 'Test App 2',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'main.c',
-      ],
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'TestApp-Info.plist',
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'Postbuild that touches the app binary',
-          'action': [
-            './delay-touch.sh', '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'test_framework_postbuilds',
-      'product_name': 'Test Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [
-        'empty.c',
-      ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'Postbuild that touches the framework binary',
-          'action': [
-            './delay-touch.sh', '${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/rpath/file.c b/tools/gyp/test/mac/rpath/file.c
deleted file mode 100644
index 56757a7..0000000
--- a/tools/gyp/test/mac/rpath/file.c
+++ /dev/null
@@ -1 +0,0 @@
-void f() {}
diff --git a/tools/gyp/test/mac/rpath/main.c b/tools/gyp/test/mac/rpath/main.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/mac/rpath/main.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/mac/rpath/test.gyp b/tools/gyp/test/mac/rpath/test.gyp
deleted file mode 100644
index 7255cb7..0000000
--- a/tools/gyp/test/mac/rpath/test.gyp
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'default_rpath',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-    },
-    {
-      'target_name': 'explicit_rpath',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'LD_RUNPATH_SEARCH_PATHS': ['@executable_path/.'],
-      },
-    },
-    {
-      'target_name': 'explicit_rpaths_escaped',
-      'type': 'shared_library',
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        # Xcode requires spaces to be escaped, else it ends up adding two
-        # independent rpaths.
-        'LD_RUNPATH_SEARCH_PATHS': ['First\\ rpath', 'Second\\ rpath'],
-      },
-    },
-    {
-      'target_name': 'explicit_rpaths_bundle',
-      'product_name': 'My Framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c' ],
-      'xcode_settings': {
-        'LD_RUNPATH_SEARCH_PATHS': ['@loader_path/.'],
-      },
-    },
-    {
-      'target_name': 'executable',
-      'type': 'executable',
-      'sources': [ 'main.c' ],
-      'xcode_settings': {
-        'LD_RUNPATH_SEARCH_PATHS': ['@executable_path/.'],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/sdkroot/file.cc b/tools/gyp/test/mac/sdkroot/file.cc
deleted file mode 100644
index 13ae971..0000000
--- a/tools/gyp/test/mac/sdkroot/file.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-#include <map>
-using std::map;
-
-int main() {
-}
diff --git a/tools/gyp/test/mac/sdkroot/test.gyp b/tools/gyp/test/mac/sdkroot/test.gyp
deleted file mode 100644
index 2fc11a0..0000000
--- a/tools/gyp/test/mac/sdkroot/test.gyp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'mytarget',
-      'type': 'executable',
-      'sources': [ 'file.cc', ],
-      'xcode_settings': {
-        'SDKROOT': 'macosx%s',
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_shorthand.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'absolute',
-      'type': 'executable',
-      'sources': [ 'file.cc', ],
-      'xcode_settings': {
-        'SDKROOT': '<(sdk_path)',
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_shorthand.sh', ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/sdkroot/test_shorthand.sh b/tools/gyp/test/mac/sdkroot/test_shorthand.sh
deleted file mode 100755
index ac4ac22..0000000
--- a/tools/gyp/test/mac/sdkroot/test_shorthand.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-found=false
-for sdk in 10.6 10.7 10.8 10.9 ; do
-  if expected=$(xcodebuild -version -sdk macosx$sdk Path 2>/dev/null) ; then
-    found=true
-    break
-  fi
-done
-if ! $found ; then
-  echo >&2 "cannot find installed SDK"
-  exit 1
-fi
-
-test $SDKROOT = $expected
diff --git a/tools/gyp/test/mac/sourceless-module/empty.c b/tools/gyp/test/mac/sourceless-module/empty.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/mac/sourceless-module/empty.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/mac/sourceless-module/empty.txt b/tools/gyp/test/mac/sourceless-module/empty.txt
deleted file mode 100644
index 139597f..0000000
--- a/tools/gyp/test/mac/sourceless-module/empty.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-
-
diff --git a/tools/gyp/test/mac/sourceless-module/fun.c b/tools/gyp/test/mac/sourceless-module/fun.c
deleted file mode 100644
index d64ff8c..0000000
--- a/tools/gyp/test/mac/sourceless-module/fun.c
+++ /dev/null
@@ -1 +0,0 @@
-int f() { return 42; }
diff --git a/tools/gyp/test/mac/sourceless-module/test.gyp b/tools/gyp/test/mac/sourceless-module/test.gyp
deleted file mode 100644
index cbbe63d..0000000
--- a/tools/gyp/test/mac/sourceless-module/test.gyp
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'empty_bundle',
-      'type': 'loadable_module',
-      'mac_bundle': 1,
-    },
-    {
-      'target_name': 'resource_bundle',
-      'type': 'loadable_module',
-      'mac_bundle': 1,
-      'actions': [
-        {
-          'action_name': 'Add Resource',
-          'inputs': [],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
-          ],
-          'action': [
-            'touch', '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
-          ],
-          'process_outputs_as_mac_bundle_resources': 1,
-        },
-      ],
-    },
-    {
-      'target_name': 'dependent_on_resource_bundle',
-      'type': 'executable',
-      'sources': [ 'empty.c' ],
-      'dependencies': [
-        'resource_bundle',
-      ],
-    },
-
-    {
-      'target_name': 'alib',
-      'type': 'static_library',
-      'sources': [ 'fun.c' ]
-    },
-    { # No sources, but depends on a static_library so must be linked.
-      'target_name': 'resource_framework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'dependencies': [
-        'alib',
-      ],
-      'actions': [
-        {
-          'action_name': 'Add Resource',
-          'inputs': [],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
-          ],
-          'action': [
-            'touch', '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
-          ],
-          'process_outputs_as_mac_bundle_resources': 1,
-        },
-      ],
-    },
-    {
-      'target_name': 'dependent_on_resource_framework',
-      'type': 'executable',
-      'sources': [ 'empty.c' ],
-      'dependencies': [
-        'resource_framework',
-      ],
-    },
-
-    { # No actions, but still have resources.
-      'target_name': 'mac_resource_bundle_no_actions',
-      'product_extension': 'bundle',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'mac_bundle_resources': [
-        'empty.txt',
-      ],
-    },
-    {
-      'target_name': 'bundle_dependent_on_resource_bundle_no_actions',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'empty.c' ],
-      'dependencies': [
-        'mac_resource_bundle_no_actions',
-      ],
-      'mac_bundle_resources': [
-        '<(PRODUCT_DIR)/mac_resource_bundle_no_actions.bundle',
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/strip/file.c b/tools/gyp/test/mac/strip/file.c
deleted file mode 100644
index a4c504d..0000000
--- a/tools/gyp/test/mac/strip/file.c
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-static void the_static_function() {}
-__attribute__((used)) void the_used_function() {}
-
-__attribute__((visibility("hidden"))) __attribute__((used))
-    void the_hidden_function() {}
-__attribute__((visibility("default"))) __attribute__((used))
-    void the_visible_function() {}
-
-extern const int eci;
-__attribute__((used)) int i;
-__attribute__((used)) const int ci = 34623;
-
-void the_function() {
-  the_static_function();
-  the_used_function();
-  the_hidden_function();
-  the_visible_function();
-}
diff --git a/tools/gyp/test/mac/strip/main.c b/tools/gyp/test/mac/strip/main.c
deleted file mode 100644
index b2291a6..0000000
--- a/tools/gyp/test/mac/strip/main.c
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-static void the_static_function() {}
-__attribute__((used)) void the_used_function() {}
-
-__attribute__((visibility("hidden"))) __attribute__((used))
-void the_hidden_function() {}
-__attribute__((visibility("default"))) __attribute__((used))
-void the_visible_function() {}
-
-void the_function() {}
-
-extern const int eci;
-__attribute__((used)) int i;
-__attribute__((used)) const int ci = 34623;
-
-int main() {
-  the_function();
-  the_static_function();
-  the_used_function();
-  the_hidden_function();
-  the_visible_function();
-}
diff --git a/tools/gyp/test/mac/strip/strip.saves b/tools/gyp/test/mac/strip/strip.saves
deleted file mode 100644
index b60ca62..0000000
--- a/tools/gyp/test/mac/strip/strip.saves
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file would list symbols that should not be stripped.
diff --git a/tools/gyp/test/mac/strip/subdirectory/nested_file.c b/tools/gyp/test/mac/strip/subdirectory/nested_file.c
deleted file mode 100644
index 50daa6c..0000000
--- a/tools/gyp/test/mac/strip/subdirectory/nested_file.c
+++ /dev/null
@@ -1 +0,0 @@
-void nested_f() {}
diff --git a/tools/gyp/test/mac/strip/subdirectory/nested_strip.saves b/tools/gyp/test/mac/strip/subdirectory/nested_strip.saves
deleted file mode 100644
index d434c0e..0000000
--- a/tools/gyp/test/mac/strip/subdirectory/nested_strip.saves
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file would list symbols that should not be stripped.
diff --git a/tools/gyp/test/mac/strip/subdirectory/subdirectory.gyp b/tools/gyp/test/mac/strip/subdirectory/subdirectory.gyp
deleted file mode 100644
index 5d0d190..0000000
--- a/tools/gyp/test/mac/strip/subdirectory/subdirectory.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'nested_strip_save',
-      'type': 'shared_library',
-      'sources': [ 'nested_file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIPFLAGS': '-s $(CHROMIUM_STRIP_SAVE_FILE)',
-        'CHROMIUM_STRIP_SAVE_FILE': 'nested_strip.saves',
-      },
-    },
-    {
-      'target_name': 'nested_strip_save_postbuild',
-      'type': 'shared_library',
-      'sources': [ 'nested_file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIPFLAGS': '-s $(CHROMIUM_STRIP_SAVE_FILE)',
-        'CHROMIUM_STRIP_SAVE_FILE': 'nested_strip.saves',
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'Action that reads CHROMIUM_STRIP_SAVE_FILE',
-          'action': [
-            './test_reading_save_file_from_postbuild.sh',
-          ],
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/strip/subdirectory/test_reading_save_file_from_postbuild.sh b/tools/gyp/test/mac/strip/subdirectory/test_reading_save_file_from_postbuild.sh
deleted file mode 100755
index 9769436..0000000
--- a/tools/gyp/test/mac/strip/subdirectory/test_reading_save_file_from_postbuild.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-set -e
-
-test -f ${CHROMIUM_STRIP_SAVE_FILE}
diff --git a/tools/gyp/test/mac/strip/test-defaults.gyp b/tools/gyp/test/mac/strip/test-defaults.gyp
deleted file mode 100644
index e688b95..0000000
--- a/tools/gyp/test/mac/strip/test-defaults.gyp
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-  ],
-  'target_defaults': {
-    'xcode_settings': {
-      'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-      'DEPLOYMENT_POSTPROCESSING': 'YES',
-      'STRIP_INSTALLED_PRODUCT': 'YES',
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'single_dylib',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-    },
-    {
-      'target_name': 'single_so',
-      'type': 'loadable_module',
-      'sources': [ 'file.c', ],
-    },
-    {
-      'target_name': 'single_exe',
-      'type': 'executable',
-      'sources': [ 'main.c', ],
-    },
-
-    {
-      'target_name': 'bundle_dylib',
-      'type': 'shared_library',
-      'mac_bundle': '1',
-      'sources': [ 'file.c', ],
-    },
-    {
-      'target_name': 'bundle_so',
-      'type': 'loadable_module',
-      'mac_bundle': '1',
-      'sources': [ 'file.c', ],
-    },
-    {
-      'target_name': 'bundle_exe',
-      'type': 'executable',
-      'mac_bundle': '1',
-      'sources': [ 'main.c', ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/strip/test.gyp b/tools/gyp/test/mac/strip/test.gyp
deleted file mode 100644
index 2558aa9..0000000
--- a/tools/gyp/test/mac/strip/test.gyp
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# These xcode_settings affect stripping:
-# "Deployment postprocessing involves stripping the binary, and setting
-# its file mode, owner, and group."
-#'DEPLOYMENT_POSTPROCESSING': 'YES',
-
-# "Specifies whether to strip symbol information from the binary.
-# Prerequisite: $DEPLOYMENT_POSTPROCESSING = YES" "Default Value: 'NO'"
-#'STRIP_INSTALLED_PRODUCT': 'YES',
-
-# "Values:
-# * all: Strips the binary completely, removing the symbol table and
-#        relocation information
-# * non-global: Strips nonglobal symbols but saves external symbols.
-# * debugging: Strips debugging symbols but saves local and global
-#              symbols."
-# (maps to no flag, -x, -S in that order)
-#'STRIP_STYLE': 'non-global',
-
-# "Additional strip flags"
-#'STRIPFLAGS': '-c',
-
-# "YES: Copied binaries are stripped of debugging symbols. This does
-# not cause the binary produced by the linker to be stripped. Use
-# 'STRIP_INSTALLED_PRODUCT (Strip Linked Product)' to have the linker
-# strip the binary."
-#'COPY_PHASE_STRIP': 'NO',
-{
-  'targets': [
-    {
-      'target_name': 'no_postprocess',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'NO',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-      },
-    },
-    {
-      'target_name': 'no_strip',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'NO',
-      },
-    },
-    {
-      'target_name': 'strip_all',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIP_STYLE': 'all',
-      },
-    },
-    {
-      'target_name': 'strip_nonglobal',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIP_STYLE': 'non-global',
-      },
-    },
-    {
-      'target_name': 'strip_debugging',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIP_STYLE': 'debugging',
-      },
-    },
-    {
-      'target_name': 'strip_all_custom_flags',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIP_STYLE': 'all',
-        'STRIPFLAGS': '-c',
-      },
-    },
-    {
-      'target_name': 'strip_all_bundle',
-      'type': 'shared_library',
-      'mac_bundle': '1',
-      'sources': [ 'file.c', ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIP_STYLE': 'all',
-      },
-    },
-    {
-      'target_name': 'strip_save',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'dependencies': [
-        'subdirectory/subdirectory.gyp:nested_strip_save',
-        'subdirectory/subdirectory.gyp:nested_strip_save_postbuild',
-      ],
-      'xcode_settings': {
-        'DEPLOYMENT_POSTPROCESSING': 'YES',
-        'STRIP_INSTALLED_PRODUCT': 'YES',
-        'STRIPFLAGS': '-s $(CHROMIUM_STRIP_SAVE_FILE)',
-        'CHROMIUM_STRIP_SAVE_FILE': 'strip.saves',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/swift-library/Info.plist b/tools/gyp/test/mac/swift-library/Info.plist
deleted file mode 100644
index 804990c..0000000
--- a/tools/gyp/test/mac/swift-library/Info.plist
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-  <key>CFBundleDevelopmentRegion</key>
-  <string>English</string>
-  <key>CFBundleExecutable</key>
-  <string>${EXECUTABLE_NAME}</string>
-  <key>CFBundleIconFile</key>
-  <string></string>
-  <key>CFBundleIdentifier</key>
-  <string>com.yourcompany.${PRODUCT_NAME:identifier}</string>
-  <key>CFBundleInfoDictionaryVersion</key>
-  <string>6.0</string>
-  <key>CFBundleName</key>
-  <string>${PRODUCT_NAME}</string>
-  <key>CFBundlePackageType</key>
-  <string>FMWK</string>
-  <key>CFBundleShortVersionString</key>
-  <string>1.0</string>
-  <key>CFBundleSignature</key>
-  <string>????</string>
-  <key>CFBundleVersion</key>
-  <string>1</string>
-  <key>NSPrincipalClass</key>
-  <string></string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/swift-library/file.swift b/tools/gyp/test/mac/swift-library/file.swift
deleted file mode 100644
index 88db7da..0000000
--- a/tools/gyp/test/mac/swift-library/file.swift
+++ /dev/null
@@ -1,9 +0,0 @@
-import Foundation
-
-public class GypSwiftTest {
-  let myProperty = false
-
-  init() {
-    self.myProperty = true
-  }
-}
\ No newline at end of file
diff --git a/tools/gyp/test/mac/swift-library/test.gyp b/tools/gyp/test/mac/swift-library/test.gyp
deleted file mode 100644
index 373a677..0000000
--- a/tools/gyp/test/mac/swift-library/test.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'SwiftFramework',
-      'product_name': 'SwiftFramework',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'CODE_SIGNING_REQUIRED': 'NO',
-        'CONFIGURATION_BUILD_DIR':'build/Default',
-      },
-      'sources': [
-        'file.swift',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/type_envvars/file.c b/tools/gyp/test/mac/type_envvars/file.c
deleted file mode 100644
index 9cddaf1..0000000
--- a/tools/gyp/test/mac/type_envvars/file.c
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void f() {}
-int main() {}
diff --git a/tools/gyp/test/mac/type_envvars/test.gyp b/tools/gyp/test/mac/type_envvars/test.gyp
deleted file mode 100644
index 4656700..0000000
--- a/tools/gyp/test/mac/type_envvars/test.gyp
+++ /dev/null
@@ -1,100 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'my_app',
-      'product_name': 'My App',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_bundle_executable.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'bundle_loadable_module',
-      'type': 'loadable_module',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_bundle_loadable_module.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'bundle_shared_library',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_bundle_shared_library.sh', ],
-        },
-      ],
-    },
-    # Types 'static_library' and 'none' can't exist as bundles.
-
-    {
-      'target_name': 'nonbundle_executable',
-      'type': 'executable',
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_nonbundle_executable.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'nonbundle_loadable_module',
-      'type': 'loadable_module',
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_nonbundle_loadable_module.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'nonbundle_shared_library',
-      'type': 'shared_library',
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_nonbundle_shared_library.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'nonbundle_static_library',
-      'type': 'static_library',
-      'sources': [ 'file.c', ],
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_nonbundle_static_library.sh', ],
-        },
-      ],
-    },
-    {
-      'target_name': 'nonbundle_none',
-      'type': 'none',
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_nonbundle_none.sh', ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/type_envvars/test_bundle_executable.sh b/tools/gyp/test/mac/type_envvars/test_bundle_executable.sh
deleted file mode 100755
index 5cd740c..0000000
--- a/tools/gyp/test/mac/type_envvars/test_bundle_executable.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-test $MACH_O_TYPE = mh_execute
-test $PRODUCT_TYPE = com.apple.product-type.application
-test "${PRODUCT_NAME}" = "My App"
-test "${FULL_PRODUCT_NAME}" = "My App.app"
-
-test "${EXECUTABLE_NAME}" = "My App"
-test "${EXECUTABLE_PATH}" = "My App.app/Contents/MacOS/My App"
-test "${WRAPPER_NAME}" = "My App.app"
-
-[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
-[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/type_envvars/test_bundle_loadable_module.sh b/tools/gyp/test/mac/type_envvars/test_bundle_loadable_module.sh
deleted file mode 100755
index ea985f5..0000000
--- a/tools/gyp/test/mac/type_envvars/test_bundle_loadable_module.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-test $MACH_O_TYPE = mh_bundle
-test $PRODUCT_TYPE = com.apple.product-type.bundle
-test $PRODUCT_NAME = bundle_loadable_module
-test $FULL_PRODUCT_NAME = bundle_loadable_module.bundle
-
-test $EXECUTABLE_NAME = bundle_loadable_module
-test $EXECUTABLE_PATH = \
-    "bundle_loadable_module.bundle/Contents/MacOS/bundle_loadable_module"
-test $WRAPPER_NAME = bundle_loadable_module.bundle
-
-[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
-[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/type_envvars/test_bundle_shared_library.sh b/tools/gyp/test/mac/type_envvars/test_bundle_shared_library.sh
deleted file mode 100755
index bf49d45..0000000
--- a/tools/gyp/test/mac/type_envvars/test_bundle_shared_library.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-test $MACH_O_TYPE = mh_dylib
-test $PRODUCT_TYPE = com.apple.product-type.framework
-test $PRODUCT_NAME = bundle_shared_library
-test $FULL_PRODUCT_NAME = bundle_shared_library.framework
-
-test $EXECUTABLE_NAME = bundle_shared_library
-test $EXECUTABLE_PATH = \
-    "bundle_shared_library.framework/Versions/A/bundle_shared_library"
-test $WRAPPER_NAME = bundle_shared_library.framework
-
-test $DYLIB_INSTALL_NAME_BASE = "/Library/Frameworks"
-test $LD_DYLIB_INSTALL_NAME = \
-    "/Library/Frameworks/bundle_shared_library.framework/Versions/A/bundle_shared_library"
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/type_envvars/test_check_sdkroot.sh b/tools/gyp/test/mac/type_envvars/test_check_sdkroot.sh
deleted file mode 100755
index 1297dbe..0000000
--- a/tools/gyp/test/mac/type_envvars/test_check_sdkroot.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-# `xcodebuild -version` output looks like
-#    Xcode 4.6.3
-#    Build version 4H1503
-# or like
-#    Xcode 4.2
-#    Build version 4C199
-# or like
-#    Xcode 3.2.6
-#    Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
-#    BuildVersion: 10M2518
-# Convert that to '0463', '0420' and '0326' respectively.
-function xcodeversion() {
-  xcodebuild -version | awk '/Xcode ([0-9]+\.[0-9]+(\.[0-9]+)?)/ {
-    version = $2
-    gsub(/\./, "", version)
-    if (length(version) < 3) {
-      version = version "0"
-    }
-    if (length(version) < 4) {
-      version = "0" version
-    }
-  }
-  END { print version }'
-}
-
-# Returns true if |string1| is smaller than |string2|.
-# This function assumes that both strings represent Xcode version numbers
-# as returned by |xcodeversion|.
-function smaller() {
-  local min="$(echo -ne "${1}\n${2}\n" | sort -n | head -n1)"
-  test "${min}" != "${2}"
-}
-
-if [[ "$(xcodeversion)" < "0500" ]]; then
-  # Xcode version is older than 5.0, check that SDKROOT is set but empty.
-  [[ -z "${SDKROOT}" && -z "${SDKROOT-_}" ]]
-else
-  # Xcode version is newer than 5.0, check that SDKROOT is set.
-  [[ "${SDKROOT}" == "$(xcodebuild -version -sdk '' Path)" ]]
-fi
diff --git a/tools/gyp/test/mac/type_envvars/test_nonbundle_executable.sh b/tools/gyp/test/mac/type_envvars/test_nonbundle_executable.sh
deleted file mode 100755
index 25afcbe..0000000
--- a/tools/gyp/test/mac/type_envvars/test_nonbundle_executable.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-# For some reason, Xcode doesn't set MACH_O_TYPE for non-bundle executables.
-# Check for "not set", not just "empty":
-[[ ! $MACH_O_TYPE && ${MACH_O_TYPE-_} ]]
-test $PRODUCT_TYPE = com.apple.product-type.tool
-test $PRODUCT_NAME = nonbundle_executable
-test $FULL_PRODUCT_NAME = nonbundle_executable
-
-test $EXECUTABLE_NAME = nonbundle_executable
-test $EXECUTABLE_PATH = nonbundle_executable
-[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
-
-[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
-[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/type_envvars/test_nonbundle_loadable_module.sh b/tools/gyp/test/mac/type_envvars/test_nonbundle_loadable_module.sh
deleted file mode 100755
index 9b58426..0000000
--- a/tools/gyp/test/mac/type_envvars/test_nonbundle_loadable_module.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-test $MACH_O_TYPE = mh_bundle
-test $PRODUCT_TYPE = com.apple.product-type.library.dynamic
-test $PRODUCT_NAME = nonbundle_loadable_module
-test $FULL_PRODUCT_NAME = nonbundle_loadable_module.so
-
-test $EXECUTABLE_NAME = nonbundle_loadable_module.so
-test $EXECUTABLE_PATH = nonbundle_loadable_module.so
-[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
-
-test $DYLIB_INSTALL_NAME_BASE = "/usr/local/lib"
-test $LD_DYLIB_INSTALL_NAME = "/usr/local/lib/nonbundle_loadable_module.so"
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/type_envvars/test_nonbundle_none.sh b/tools/gyp/test/mac/type_envvars/test_nonbundle_none.sh
deleted file mode 100755
index 871af1b..0000000
--- a/tools/gyp/test/mac/type_envvars/test_nonbundle_none.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-# Check for "not set", not just "empty":
-[[ ! $MACH_O_TYPE && ${MACH_O_TYPE-_} ]]
-[[ ! $PRODUCT_TYPE && ${PRODUCT_TYPE-_} ]]
-test $PRODUCT_NAME = nonbundle_none
-[[ ! $FULL_PRODUCT_NAME && ${FULL_PRODUCT_NAME-_} ]]
-
-[[ ! $EXECUTABLE_NAME && ${EXECUTABLE_NAME-_} ]]
-[[ ! $EXECUTABLE_PATH && ${EXECUTABLE_PATH-_} ]]
-[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
-
-[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
-[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/type_envvars/test_nonbundle_shared_library.sh b/tools/gyp/test/mac/type_envvars/test_nonbundle_shared_library.sh
deleted file mode 100755
index cbb118b..0000000
--- a/tools/gyp/test/mac/type_envvars/test_nonbundle_shared_library.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-test $MACH_O_TYPE = mh_dylib
-test $PRODUCT_TYPE = com.apple.product-type.library.dynamic
-test $PRODUCT_NAME = nonbundle_shared_library
-test $FULL_PRODUCT_NAME = libnonbundle_shared_library.dylib
-
-test $EXECUTABLE_NAME = libnonbundle_shared_library.dylib
-test $EXECUTABLE_PATH = libnonbundle_shared_library.dylib
-[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
-
-test $DYLIB_INSTALL_NAME_BASE = "/usr/local/lib"
-test $LD_DYLIB_INSTALL_NAME = "/usr/local/lib/libnonbundle_shared_library.dylib"
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/type_envvars/test_nonbundle_static_library.sh b/tools/gyp/test/mac/type_envvars/test_nonbundle_static_library.sh
deleted file mode 100755
index 86c04a9..0000000
--- a/tools/gyp/test/mac/type_envvars/test_nonbundle_static_library.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-
-test $MACH_O_TYPE = staticlib
-test $PRODUCT_TYPE = com.apple.product-type.library.static
-test $PRODUCT_NAME = nonbundle_static_library
-test $FULL_PRODUCT_NAME = libnonbundle_static_library.a
-
-test $EXECUTABLE_NAME = libnonbundle_static_library.a
-test $EXECUTABLE_PATH = libnonbundle_static_library.a
-[[ ! $WRAPPER_NAME && ${WRAPPER_NAME-_} ]]
-
-[[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
-[[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
-
-"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/tools/gyp/test/mac/unicode-settings/file.cc b/tools/gyp/test/mac/unicode-settings/file.cc
deleted file mode 100644
index b2f9976..0000000
--- a/tools/gyp/test/mac/unicode-settings/file.cc
+++ /dev/null
@@ -1,2 +0,0 @@
-int main() {
-}
diff --git a/tools/gyp/test/mac/unicode-settings/test.gyp b/tools/gyp/test/mac/unicode-settings/test.gyp
deleted file mode 100644
index b331ae4..0000000
--- a/tools/gyp/test/mac/unicode-settings/test.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'myapp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [ 'file.cc', ],
-      'xcode_settings': {
-        'BUNDLE_DISPLAY_NAME': 'α\011',
-      },
-      'postbuilds': [
-        {
-          'postbuild_name': 'envtest',
-          'action': [ './test_bundle_display_name.sh', ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/unicode-settings/test_bundle_display_name.sh b/tools/gyp/test/mac/unicode-settings/test_bundle_display_name.sh
deleted file mode 100755
index 95dd626..0000000
--- a/tools/gyp/test/mac/unicode-settings/test_bundle_display_name.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-test "${BUNDLE_DISPLAY_NAME}" = 'α	'
diff --git a/tools/gyp/test/mac/xcode-env-order/Info.plist b/tools/gyp/test/mac/xcode-env-order/Info.plist
deleted file mode 100644
index e11f21e..0000000
--- a/tools/gyp/test/mac/xcode-env-order/Info.plist
+++ /dev/null
@@ -1,56 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>LSMinimumSystemVersion</key>
-	<string>${MACOSX_DEPLOYMENT_TARGET}</string>
-	<key>NSMainNibFile</key>
-	<string>MainMenu</string>
-	<key>NSPrincipalClass</key>
-	<string>NSApplication</string>
-
-	<key>BraceProcessedKey1</key>
-	<string>${BRACE_DEPENDENT_KEY1}</string>
-	<key>BraceProcessedKey2</key>
-	<string>${BRACE_DEPENDENT_KEY2}</string>
-	<key>BraceProcessedKey3</key>
-	<string>${BRACE_DEPENDENT_KEY3}</string>
-
-	<key>ParenProcessedKey1</key>
-	<string>${PAREN_DEPENDENT_KEY1}</string>
-	<key>ParenProcessedKey2</key>
-	<string>${PAREN_DEPENDENT_KEY2}</string>
-	<key>ParenProcessedKey3</key>
-	<string>${PAREN_DEPENDENT_KEY3}</string>
-
-	<key>BareProcessedKey1</key>
-	<string>${BARE_DEPENDENT_KEY1}</string>
-	<key>BareProcessedKey2</key>
-	<string>${BARE_DEPENDENT_KEY2}</string>
-	<key>BareProcessedKey3</key>
-	<string>${BARE_DEPENDENT_KEY3}</string>
-
-	<key>MixedProcessedKey</key>
-	<string>${MIXED_DEPENDENT_KEY}</string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/xcode-env-order/file.ext1 b/tools/gyp/test/mac/xcode-env-order/file.ext1
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/xcode-env-order/file.ext1
+++ /dev/null
diff --git a/tools/gyp/test/mac/xcode-env-order/file.ext2 b/tools/gyp/test/mac/xcode-env-order/file.ext2
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/xcode-env-order/file.ext2
+++ /dev/null
diff --git a/tools/gyp/test/mac/xcode-env-order/file.ext3 b/tools/gyp/test/mac/xcode-env-order/file.ext3
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/xcode-env-order/file.ext3
+++ /dev/null
diff --git a/tools/gyp/test/mac/xcode-env-order/main.c b/tools/gyp/test/mac/xcode-env-order/main.c
deleted file mode 100644
index 1bf4b2a..0000000
--- a/tools/gyp/test/mac/xcode-env-order/main.c
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/mac/xcode-env-order/test.gyp b/tools/gyp/test/mac/xcode-env-order/test.gyp
deleted file mode 100644
index 8f975f7..0000000
--- a/tools/gyp/test/mac/xcode-env-order/test.gyp
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'test_app',
-      'product_name': 'Test',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'main.c',
-        'file.ext1',
-        'file.ext2',
-        'file.ext3',
-      ],
-      # Env vars in copies.
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/${PRODUCT_NAME}-copy-brace',
-          'files': [ 'main.c', ],  # ${SOURCE_ROOT} doesn't work with xcode
-        },
-        {
-          'destination': '<(PRODUCT_DIR)/$(PRODUCT_NAME)-copy-paren',
-          'files': [ '$(SOURCE_ROOT)/main.c', ],
-        },
-        {
-          'destination': '<(PRODUCT_DIR)/$PRODUCT_NAME-copy-bare',
-          'files': [ 'main.c', ],  # $SOURCE_ROOT doesn't work with xcode
-        },
-      ],
-      # Env vars in actions. The $FOO's are here to test that env vars that
-      # aren't defined are handled in some way that doesn't break the build.
-      'actions': [
-        {
-          'action_name': 'Action copy braces ${PRODUCT_NAME} ${FOO}',
-          'description': 'Action copy braces ${PRODUCT_NAME} ${FOO}',
-          'inputs': [ '${SOURCE_ROOT}/main.c' ],
-          # Referencing ${PRODUCT_NAME} in action outputs doesn't work with
-          # the Xcode generator (PRODUCT_NAME expands to "Test Support").
-          'outputs': [ '<(PRODUCT_DIR)/action-copy-brace.txt' ],
-          'action': [ 'cp', '${SOURCE_ROOT}/main.c',
-                      '<(PRODUCT_DIR)/action-copy-brace.txt' ],
-        },
-        {
-          'action_name': 'Action copy parens $(PRODUCT_NAME) $(FOO)',
-          'description': 'Action copy parens $(PRODUCT_NAME) $(FOO)',
-          'inputs': [ '$(SOURCE_ROOT)/main.c' ],
-          # Referencing $(PRODUCT_NAME) in action outputs doesn't work with
-          # the Xcode generator (PRODUCT_NAME expands to "Test Support").
-          'outputs': [ '<(PRODUCT_DIR)/action-copy-paren.txt' ],
-          'action': [ 'cp', '$(SOURCE_ROOT)/main.c',
-                      '<(PRODUCT_DIR)/action-copy-paren.txt' ],
-        },
-        {
-          'action_name': 'Action copy bare $PRODUCT_NAME $FOO',
-          'description': 'Action copy bare $PRODUCT_NAME $FOO',
-          'inputs': [ '$SOURCE_ROOT/main.c' ],
-          # Referencing $PRODUCT_NAME in action outputs doesn't work with
-          # the Xcode generator (PRODUCT_NAME expands to "Test Support").
-          'outputs': [ '<(PRODUCT_DIR)/action-copy-bare.txt' ],
-          'action': [ 'cp', '$SOURCE_ROOT/main.c',
-                      '<(PRODUCT_DIR)/action-copy-bare.txt' ],
-        },
-      ],
-      # Env vars in xcode_settings.
-      'xcode_settings': {
-        'INFOPLIST_FILE': 'Info.plist',
-        'STRING_KEY': '/Source/Project',
-
-        'BRACE_DEPENDENT_KEY2': '${STRING_KEY}/${PRODUCT_NAME}',
-        'BRACE_DEPENDENT_KEY1': 'D:${BRACE_DEPENDENT_KEY2}',
-        'BRACE_DEPENDENT_KEY3': '${PRODUCT_TYPE}:${BRACE_DEPENDENT_KEY1}',
-
-        'PAREN_DEPENDENT_KEY2': '$(STRING_KEY)/$(PRODUCT_NAME)',
-        'PAREN_DEPENDENT_KEY1': 'D:$(PAREN_DEPENDENT_KEY2)',
-        'PAREN_DEPENDENT_KEY3': '$(PRODUCT_TYPE):$(PAREN_DEPENDENT_KEY1)',
-
-        'BARE_DEPENDENT_KEY2': '$STRING_KEY/$PRODUCT_NAME',
-        'BARE_DEPENDENT_KEY1': 'D:$BARE_DEPENDENT_KEY2',
-        'BARE_DEPENDENT_KEY3': '$PRODUCT_TYPE:$BARE_DEPENDENT_KEY1',
-
-        'MIXED_DEPENDENT_KEY': '${STRING_KEY}:$(PRODUCT_NAME):$MACH_O_TYPE',
-      },
-      # Env vars in rules. The $FOO's are here to test that env vars that
-      # aren't defined are handled in some way that doesn't break the build.
-      'rules': [
-        {
-          'rule_name': 'brace_rule',
-          'message': 'Rule braces ${PRODUCT_NAME} ${FOO} <(RULE_INPUT_NAME)',
-          'extension': 'ext1',
-          'inputs': [ '${SOURCE_ROOT}/main.c' ],
-          'outputs': [ '<(PRODUCT_DIR)/rule-copy-brace.txt' ],
-          'action': [ 'cp', '${SOURCE_ROOT}/main.c',
-                      '<(PRODUCT_DIR)/rule-copy-brace.txt' ],
-        },
-        {
-          'rule_name': 'paren_rule',
-          'message': 'Rule parens $(PRODUCT_NAME) $(FOO) <(RULE_INPUT_NAME)',
-          'extension': 'ext2',
-          'inputs': [ '$(SOURCE_ROOT)/main.c' ],
-          'outputs': [ '<(PRODUCT_DIR)/rule-copy-paren.txt' ],
-          'action': [ 'cp', '$(SOURCE_ROOT)/main.c',
-                      '<(PRODUCT_DIR)/rule-copy-paren.txt' ],
-        },
-        # TODO: Fails in xcode. Looks like a bug in the xcode generator though
-        #       (which uses makefiles for rules, and thinks $PRODUCT_NAME is
-        #       $(P)RODUCT_NAME).
-        #{
-        #  'rule_name': 'bare_rule',
-        #  'message': 'Rule copy bare $PRODUCT_NAME $FOO',
-        #  'extension': 'ext3',
-        #  'inputs': [ '$SOURCE_ROOT/main.c' ],
-        #  'outputs': [ '<(PRODUCT_DIR)/rule-copy-bare.txt' ],
-        #  'action': [ 'cp', '$SOURCE_ROOT/main.c',
-        #              '<(PRODUCT_DIR)/rule-copy-bare.txt' ],
-        #},
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/aliasing.cc b/tools/gyp/test/mac/xcode-gcc/aliasing.cc
deleted file mode 100644
index 16a41ef..0000000
--- a/tools/gyp/test/mac/xcode-gcc/aliasing.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-#include <stdio.h>
-
-void check(int* h, long* k) {
-  *h = 1;
-  *k = 0;
-  printf("%d\n", *h);
-}
-
-int main(void) {
-  long k;
-  check((int*)&k, &k);
-  return 0;
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/test-clang.gyp b/tools/gyp/test/mac/xcode-gcc/test-clang.gyp
deleted file mode 100644
index 9f4a98a..0000000
--- a/tools/gyp/test/mac/xcode-gcc/test-clang.gyp
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'make_global_settings': [
-    ['CC', '/usr/bin/clang'],
-    ['CXX', '/usr/bin/clang++'],
-  ],
-
-  'targets': [
-    {
-      'target_name': 'aliasing_yes',
-      'type': 'executable',
-      'sources': [ 'aliasing.cc', ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'GCC_STRICT_ALIASING': 'YES',
-        'GCC_OPTIMIZATION_LEVEL': 2,
-      },
-    },
-    {
-      'target_name': 'aliasing_no',
-      'type': 'executable',
-      'sources': [ 'aliasing.cc', ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'GCC_STRICT_ALIASING': 'NO',
-        'GCC_OPTIMIZATION_LEVEL': 2,
-      },
-    },
-    {
-      'target_name': 'aliasing_default',
-      'type': 'executable',
-      'sources': [ 'aliasing.cc', ],
-      'xcode_settings': {
-        'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
-        'GCC_OPTIMIZATION_LEVEL': 2,
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/xcode-gcc/test.gyp b/tools/gyp/test/mac/xcode-gcc/test.gyp
deleted file mode 100644
index 1ca8b21..0000000
--- a/tools/gyp/test/mac/xcode-gcc/test.gyp
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'target_defaults': {
-    'xcode_settings': {
-      'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',
-    },
-  },
-
-  'variables': {
-    # Non-failing tests should check that these trivial files in every language
-    # still compile correctly.
-    'valid_sources': [
-      'valid_c.c',
-      'valid_cc.cc',
-      'valid_m.m',
-      'valid_mm.mm',
-    ],
-  },
-
-  # Targets come in pairs: 'foo' and 'foo-fail', with the former building with
-  # no warnings and the latter not.
-  'targets': [
-    # GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO (default: YES):
-    {
-      'target_name': 'warn_about_invalid_offsetof_macro',
-      'type': 'executable',
-      'sources': [
-        'warn_about_invalid_offsetof_macro.cc',
-        '<@(valid_sources)',
-      ],
-      'xcode_settings': {
-        'GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO': 'NO',
-      },
-    },
-    {
-      'target_name': 'warn_about_invalid_offsetof_macro-fail',
-      'type': 'executable',
-      'sources': [ 'warn_about_invalid_offsetof_macro.cc', ],
-    },
-    # GCC_WARN_ABOUT_MISSING_NEWLINE (default: NO):
-    {
-      'target_name': 'warn_about_missing_newline',
-      'type': 'executable',
-      'sources': [
-        'warn_about_missing_newline.c',
-        '<@(valid_sources)',
-      ],
-    },
-    {
-      'target_name': 'warn_about_missing_newline-fail',
-      'type': 'executable',
-      'sources': [ 'warn_about_missing_newline.c', ],
-      'xcode_settings': {
-        'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/valid_c.c b/tools/gyp/test/mac/xcode-gcc/valid_c.c
deleted file mode 100644
index 2b10ac3..0000000
--- a/tools/gyp/test/mac/xcode-gcc/valid_c.c
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file exists to test that valid C files compile correctly.
-
-void FunctionInCFile(void) {
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/valid_cc.cc b/tools/gyp/test/mac/xcode-gcc/valid_cc.cc
deleted file mode 100644
index 31cddc3..0000000
--- a/tools/gyp/test/mac/xcode-gcc/valid_cc.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file exists to test that valid C++ files compile correctly.
-
-void FunctionInCCFile() {
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/valid_m.m b/tools/gyp/test/mac/xcode-gcc/valid_m.m
deleted file mode 100644
index 95bddb2..0000000
--- a/tools/gyp/test/mac/xcode-gcc/valid_m.m
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file exists to test that valid Objective-C files compile correctly.
-
-void FunctionInMFile(void) {
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/valid_mm.mm b/tools/gyp/test/mac/xcode-gcc/valid_mm.mm
deleted file mode 100644
index a7db7e3..0000000
--- a/tools/gyp/test/mac/xcode-gcc/valid_mm.mm
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file exists to test that valid Objective-C++ files compile correctly.
-
-void FunctionInMMFile() {
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/warn_about_invalid_offsetof_macro.cc b/tools/gyp/test/mac/xcode-gcc/warn_about_invalid_offsetof_macro.cc
deleted file mode 100644
index 4a4612b..0000000
--- a/tools/gyp/test/mac/xcode-gcc/warn_about_invalid_offsetof_macro.cc
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#define offsetof(st, m) ((unsigned)((char*)&((st*)0)->m - (char*)0))
-
-struct MyStruct {
-  virtual void MyFunc() = 0;
-  int my_member;
-};
-
-int main() {
-  unsigned x = offsetof(MyStruct, my_member);
-  return x ? 0 : 1;
-}
diff --git a/tools/gyp/test/mac/xcode-gcc/warn_about_missing_newline.c b/tools/gyp/test/mac/xcode-gcc/warn_about_missing_newline.c
deleted file mode 100644
index 6faf089..0000000
--- a/tools/gyp/test/mac/xcode-gcc/warn_about_missing_newline.c
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Important: Don't terminate this file with a newline.
-int main() {
-  return 0;
-}
\ No newline at end of file
diff --git a/tools/gyp/test/mac/xcode-support-actions/source.c b/tools/gyp/test/mac/xcode-support-actions/source.c
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/mac/xcode-support-actions/source.c
+++ /dev/null
diff --git a/tools/gyp/test/mac/xcode-support-actions/test.gyp b/tools/gyp/test/mac/xcode-support-actions/test.gyp
deleted file mode 100644
index ad81b8c..0000000
--- a/tools/gyp/test/mac/xcode-support-actions/test.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'target',
-      'product_name': 'Product',
-      'type': 'shared_library',
-      'mac_bundle': 1,
-      'sources': [
-        '<(PRODUCT_DIR)/copy.c',
-      ],
-      'actions': [
-        {
-          'action_name': 'Helper',
-          'description': 'Helps',
-          'inputs': [ 'source.c' ],
-          'outputs': [ '<(PRODUCT_DIR)/copy.c' ],
-          'action': [ 'cp', '${SOURCE_ROOT}/source.c',
-                      '<(PRODUCT_DIR)/copy.c' ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/mac/xctest/MyClass.h b/tools/gyp/test/mac/xctest/MyClass.h
deleted file mode 100644
index dde13aa..0000000
--- a/tools/gyp/test/mac/xctest/MyClass.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <Foundation/Foundation.h>
-
-@interface MyClass : NSObject
-@end
diff --git a/tools/gyp/test/mac/xctest/MyClass.m b/tools/gyp/test/mac/xctest/MyClass.m
deleted file mode 100644
index df11471..0000000
--- a/tools/gyp/test/mac/xctest/MyClass.m
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "MyClass.h"
-
-@implementation MyClass
-@end
diff --git a/tools/gyp/test/mac/xctest/TestCase.m b/tools/gyp/test/mac/xctest/TestCase.m
deleted file mode 100644
index 36846a1..0000000
--- a/tools/gyp/test/mac/xctest/TestCase.m
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <XCTest/XCTest.h>
-#import "MyClass.h"
-
-@interface TestCase : XCTestCase
-@end
-
-@implementation TestCase
-- (void)testFoo {
-  MyClass *foo = [[MyClass alloc] init];
-  XCTAssertNotNil(foo, @"expected non-nil object");
-}
-@end
diff --git a/tools/gyp/test/mac/xctest/resource.txt b/tools/gyp/test/mac/xctest/resource.txt
deleted file mode 100644
index 257cc56..0000000
--- a/tools/gyp/test/mac/xctest/resource.txt
+++ /dev/null
@@ -1 +0,0 @@
-foo
diff --git a/tools/gyp/test/mac/xctest/test.gyp b/tools/gyp/test/mac/xctest/test.gyp
deleted file mode 100644
index ac25656..0000000
--- a/tools/gyp/test/mac/xctest/test.gyp
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'classes',
-      'type': 'static_library',
-      'sources': [
-        'MyClass.h',
-        'MyClass.m',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-        ],
-      },
-    },
-    {
-      'target_name': 'tests',
-      'type': 'loadable_module',
-      'mac_xctest_bundle': 1,
-      'sources': [
-        'TestCase.m',
-      ],
-      'dependencies': [
-        'classes',
-      ],
-      'mac_bundle_resources': [
-        'resource.txt',
-      ],
-      'xcode_settings': {
-        'WRAPPER_EXTENSION': 'xctest',
-        'FRAMEWORK_SEARCH_PATHS': [
-          '$(inherited)',
-          '$(DEVELOPER_FRAMEWORKS_DIR)',
-        ],
-        'OTHER_LDFLAGS': [
-          '$(inherited)',
-          '-ObjC',
-        ],
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme b/tools/gyp/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme
deleted file mode 100644
index 6bd1bb9..0000000
--- a/tools/gyp/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme
+++ /dev/null
@@ -1,69 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Scheme
-   LastUpgradeVersion = "0500"
-   version = "1.3">
-   <BuildAction
-      parallelizeBuildables = "YES"
-      buildImplicitDependencies = "YES">
-      <BuildActionEntries>
-         <BuildActionEntry
-            buildForTesting = "YES"
-            buildForRunning = "YES"
-            buildForProfiling = "YES"
-            buildForArchiving = "YES"
-            buildForAnalyzing = "YES">
-            <BuildableReference
-               BuildableIdentifier = "primary"
-               BlueprintIdentifier = "D3B79173B4570A3C70A902FF"
-               BuildableName = "libclasses.a"
-               BlueprintName = "classes"
-               ReferencedContainer = "container:test.xcodeproj">
-            </BuildableReference>
-         </BuildActionEntry>
-      </BuildActionEntries>
-   </BuildAction>
-   <TestAction
-      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
-      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
-      shouldUseLaunchSchemeArgsEnv = "YES"
-      buildConfiguration = "Default">
-      <Testables>
-         <TestableReference
-            skipped = "NO">
-            <BuildableReference
-               BuildableIdentifier = "primary"
-               BlueprintIdentifier = "2ACDAB234B9E5D65CACBCF9C"
-               BuildableName = "tests.xctest"
-               BlueprintName = "tests"
-               ReferencedContainer = "container:test.xcodeproj">
-            </BuildableReference>
-         </TestableReference>
-      </Testables>
-   </TestAction>
-   <LaunchAction
-      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
-      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
-      launchStyle = "0"
-      useCustomWorkingDirectory = "NO"
-      buildConfiguration = "Default"
-      ignoresPersistentStateOnLaunch = "NO"
-      debugDocumentVersioning = "YES"
-      allowLocationSimulation = "YES">
-      <AdditionalOptions>
-      </AdditionalOptions>
-   </LaunchAction>
-   <ProfileAction
-      shouldUseLaunchSchemeArgsEnv = "YES"
-      savedToolIdentifier = ""
-      useCustomWorkingDirectory = "NO"
-      buildConfiguration = "Default"
-      debugDocumentVersioning = "YES">
-   </ProfileAction>
-   <AnalyzeAction
-      buildConfiguration = "Default">
-   </AnalyzeAction>
-   <ArchiveAction
-      buildConfiguration = "Default"
-      revealArchiveInOrganizer = "YES">
-   </ArchiveAction>
-</Scheme>
diff --git a/tools/gyp/test/mac/xcuitest/Info.plist b/tools/gyp/test/mac/xcuitest/Info.plist
deleted file mode 100644
index ae8852b..0000000
--- a/tools/gyp/test/mac/xcuitest/Info.plist
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>${EXECUTABLE_NAME}</string>
-	<key>CFBundleIconFile</key>
-	<string></string>
-	<key>CFBundleIdentifier</key>
-	<string>com.yourcompany.${PRODUCT_NAME}</string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleName</key>
-	<string>${PRODUCT_NAME}</string>
-	<key>CFBundlePackageType</key>
-	<string>BNDL</string>
-	<key>CFBundleShortVersionString</key>
-	<string>1.0</string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string>1</string>
-	<key>NSPrincipalClass</key>
-	<string></string>
-</dict>
-</plist>
diff --git a/tools/gyp/test/mac/xcuitest/MyAppDelegate.h b/tools/gyp/test/mac/xcuitest/MyAppDelegate.h
deleted file mode 100644
index 445be2c..0000000
--- a/tools/gyp/test/mac/xcuitest/MyAppDelegate.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-@interface MyAppDelegate : NSObject<UIApplicationDelegate>
-@end
diff --git a/tools/gyp/test/mac/xcuitest/MyAppDelegate.m b/tools/gyp/test/mac/xcuitest/MyAppDelegate.m
deleted file mode 100644
index 6ad60fa..0000000
--- a/tools/gyp/test/mac/xcuitest/MyAppDelegate.m
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import "MyAppDelegate.h"
-
-
-@implementation MyAppDelegate
-@synthesize window;
-
-- (BOOL)application:(UIApplication *)application
-    didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
-  self.window = [[UIWindow alloc] init];
-  self.window.rootViewController = [[UIViewController alloc] init];
-  [self.window makeKeyAndVisible];
-  return YES;
-}
-
-@end
diff --git a/tools/gyp/test/mac/xcuitest/TestCase.m b/tools/gyp/test/mac/xcuitest/TestCase.m
deleted file mode 100644
index 1f32b7af..0000000
--- a/tools/gyp/test/mac/xcuitest/TestCase.m
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <XCTest/XCTest.h>
-
-@interface TestCase : XCTestCase
-@end
-
-@implementation TestCase
-- (void)testFoo {
-  XCUIApplication *foo = [[XCUIApplication alloc] init];
-  XCTAssertNotNil(foo, @"expected non-nil object");
-}
-@end
diff --git a/tools/gyp/test/mac/xcuitest/main.m b/tools/gyp/test/mac/xcuitest/main.m
deleted file mode 100644
index e7cb62e..0000000
--- a/tools/gyp/test/mac/xcuitest/main.m
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#import <UIKit/UIKit.h>
-
-#import "MyAppDelegate.h"
-
-int main(int argc, char * argv[]) {
-  @autoreleasepool {
-    UIApplicationMain(argc, argv,
-                      nil, NSStringFromClass([MyAppDelegate class]));
-  }
-  return 1;
-}
diff --git a/tools/gyp/test/mac/xcuitest/resource.txt b/tools/gyp/test/mac/xcuitest/resource.txt
deleted file mode 100644
index 257cc56..0000000
--- a/tools/gyp/test/mac/xcuitest/resource.txt
+++ /dev/null
@@ -1 +0,0 @@
-foo
diff --git a/tools/gyp/test/mac/xcuitest/test.gyp b/tools/gyp/test/mac/xcuitest/test.gyp
deleted file mode 100644
index 80cdf90..0000000
--- a/tools/gyp/test/mac/xcuitest/test.gyp
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'xcode_settings': {
-      'SDKROOT': 'iphoneos',
-      'FRAMEWORK_SEARCH_PATHS': [
-        '$(inherited)',
-        '$(DEVELOPER_FRAMEWORKS_DIR)',
-      ],
-      'OTHER_LDFLAGS': [
-        '$(inherited)',
-        '-ObjC',
-      ],
-      'GCC_PREFIX_HEADER': '',
-      'CLANG_ENABLE_OBJC_ARC': 'YES',
-      'INFOPLIST_FILE': 'Info.plist',
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'testApp',
-      'type': 'executable',
-      'mac_bundle': 1,
-      'sources': [
-        'MyAppDelegate.h',
-        'MyAppDelegate.m',
-        'main.m',
-      ],
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
-          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
-        ],
-      },
-    },
-    {
-      'target_name': 'tests',
-      'type': 'loadable_module',
-      'mac_bundle': 1,
-      'mac_xcuitest_bundle': 1,
-      'sources': [
-        'TestCase.m',
-      ],
-      'dependencies': [
-        'testApp',
-      ],
-      'mac_bundle_resources': [
-        'resource.txt',
-      ],
-      'variables': {
-        # This must *not* be set for xctest ui tests.
-        'xctest_host': '',
-      },
-      'link_settings': {
-        'libraries': [
-          '$(SDKROOT)/System/Library/Frameworks/XCTest.framework',
-        ]
-      },
-      'xcode_settings': {
-        'WRAPPER_EXTENSION': 'xctest',
-        'TEST_TARGET_NAME': 'testApp',
-      },
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/make/dependencies.gyp b/tools/gyp/test/make/dependencies.gyp
deleted file mode 100644
index e2bee24..0000000
--- a/tools/gyp/test/make/dependencies.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'main',
-      'type': 'executable',
-      'sources': [
-        'main.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make/gyptest-dependencies.py b/tools/gyp/test/make/gyptest-dependencies.py
deleted file mode 100755
index d215f76..0000000
--- a/tools/gyp/test/make/gyptest-dependencies.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that .d files and all.deps are properly generated.
-"""
-
-import TestGyp
-
-# .d files are only used by the make build.
-test = TestGyp.TestGyp(formats=['make'])
-
-test.run_gyp('dependencies.gyp')
-
-test.build('dependencies.gyp', test.ALL)
-
-deps_file = test.built_file_path(".deps/out/Default/obj.target/main/main.o.d")
-test.must_contain(deps_file, "main.h")
-
-# Build a second time to make sure we generate all.deps.
-test.build('dependencies.gyp', test.ALL)
-
-test.pass_test()
diff --git a/tools/gyp/test/make/gyptest-noload.py b/tools/gyp/test/make/gyptest-noload.py
deleted file mode 100755
index 1f51033..0000000
--- a/tools/gyp/test/make/gyptest-noload.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests the use of the NO_LOAD flag which makes loading sub .mk files
-optional.
-"""
-
-# Python 2.5 needs this for the with statement.
-from __future__ import with_statement
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['make'])
-
-test.run_gyp('all.gyp', chdir='noload')
-
-test.relocate('noload', 'relocate/noload')
-
-test.build('build/all.gyp', test.ALL, chdir='relocate/noload')
-test.run_built_executable('exe', chdir='relocate/noload',
-                          stdout='Hello from shared.c.\n')
-
-# Just sanity test that NO_LOAD=lib doesn't break anything.
-test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
-           arguments=['NO_LOAD=lib'])
-test.run_built_executable('exe', chdir='relocate/noload',
-                          stdout='Hello from shared.c.\n')
-test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
-           arguments=['NO_LOAD=z'])
-test.run_built_executable('exe', chdir='relocate/noload',
-                          stdout='Hello from shared.c.\n')
-
-# Make sure we can rebuild without reloading the sub .mk file.
-with open('relocate/noload/main.c', 'a') as src_file:
-  src_file.write("\n")
-test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
-           arguments=['NO_LOAD=lib'])
-test.run_built_executable('exe', chdir='relocate/noload',
-                          stdout='Hello from shared.c.\n')
-
-# Change shared.c, but verify that it doesn't get rebuild if we don't load it.
-with open('relocate/noload/lib/shared.c', 'w') as shared_file:
-  shared_file.write(
-      '#include "shared.h"\n'
-      'const char kSharedStr[] = "modified";\n'
-  )
-test.build('build/all.gyp', test.ALL, chdir='relocate/noload',
-           arguments=['NO_LOAD=lib'])
-test.run_built_executable('exe', chdir='relocate/noload',
-                          stdout='Hello from shared.c.\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/make/main.cc b/tools/gyp/test/make/main.cc
deleted file mode 100644
index 3b9a705..0000000
--- a/tools/gyp/test/make/main.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-#include "main.h"
-
-int main(void) {
-  printf("hello world\n");
-  return 0;
-}
diff --git a/tools/gyp/test/make/main.h b/tools/gyp/test/make/main.h
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/make/main.h
+++ /dev/null
diff --git a/tools/gyp/test/make/noload/all.gyp b/tools/gyp/test/make/noload/all.gyp
deleted file mode 100644
index 1617a9e..0000000
--- a/tools/gyp/test/make/noload/all.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'exe',
-      'type': 'executable',
-      'sources': [
-        'main.c',
-      ],
-      'dependencies': [
-        'lib/shared.gyp:shared',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make/noload/lib/shared.c b/tools/gyp/test/make/noload/lib/shared.c
deleted file mode 100644
index 51776c5..0000000
--- a/tools/gyp/test/make/noload/lib/shared.c
+++ /dev/null
@@ -1,3 +0,0 @@
-#include "shared.h"
-
-const char kSharedStr[] = "shared.c";
diff --git a/tools/gyp/test/make/noload/lib/shared.gyp b/tools/gyp/test/make/noload/lib/shared.gyp
deleted file mode 100644
index 8a8841b..0000000
--- a/tools/gyp/test/make/noload/lib/shared.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'shared',
-      'type': 'shared_library',
-      'sources': [
-        'shared.c',
-        'shared.h',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make/noload/lib/shared.h b/tools/gyp/test/make/noload/lib/shared.h
deleted file mode 100644
index a21da75..0000000
--- a/tools/gyp/test/make/noload/lib/shared.h
+++ /dev/null
@@ -1 +0,0 @@
-extern const char kSharedStr[];
diff --git a/tools/gyp/test/make/noload/main.c b/tools/gyp/test/make/noload/main.c
deleted file mode 100644
index 26ec188..0000000
--- a/tools/gyp/test/make/noload/main.c
+++ /dev/null
@@ -1,9 +0,0 @@
-#include <stdio.h>
-
-#include "lib/shared.h"
-
-int main(void)
-{
-  printf("Hello from %s.\n", kSharedStr);
-  return 0;
-}
diff --git a/tools/gyp/test/make_global_settings/ar/gyptest-make_global_settings_ar.py b/tools/gyp/test/make_global_settings/ar/gyptest-make_global_settings_ar.py
deleted file mode 100644
index e6676de..0000000
--- a/tools/gyp/test/make_global_settings/ar/gyptest-make_global_settings_ar.py
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies 'AR' in make_global_settings.
-"""
-
-import os
-import sys
-import TestGyp
-
-def resolve_path(test, path):
-  if path is None:
-    return None
-  elif test.format == 'make':
-    return '$(abspath %s)' % path
-  elif test.format in ['ninja', 'xcode-ninja']:
-    return os.path.join('..', '..', path)
-  else:
-    test.fail_test()
-
-
-def verify_ar_target(test, ar=None, rel_path=False):
-  if rel_path:
-    ar_expected = resolve_path(test, ar)
-  else:
-    ar_expected = ar
-  # Resolve default values
-  if ar_expected is None:
-    if test.format == 'make':
-      # Make generator hasn't set the default value for AR.
-      # You can remove the following assertion as long as it doesn't
-      # break existing projects.
-      test.must_not_contain('Makefile', 'AR ?= ')
-      return
-    elif test.format in ['ninja', 'xcode-ninja']:
-      if sys.platform == 'win32':
-        ar_expected = 'lib.exe'
-      else:
-        ar_expected = 'ar'
-  if test.format == 'make':
-    test.must_contain('Makefile', 'AR ?= %s' % ar_expected)
-  elif test.format in ['ninja', 'xcode-ninja']:
-    test.must_contain('out/Default/build.ninja', 'ar = %s' % ar_expected)
-  else:
-    test.fail_test()
-
-
-def verify_ar_host(test, ar=None, rel_path=False):
-  if rel_path:
-    ar_expected = resolve_path(test, ar)
-  else:
-    ar_expected = ar
-  # Resolve default values
-  if ar_expected is None:
-    if sys.platform == 'win32':
-      ar_expected = 'lib.exe'
-    else:
-      ar_expected = 'ar'
-  if test.format == 'make':
-    test.must_contain('Makefile', 'AR.host ?= %s' % ar_expected)
-  elif test.format in ['ninja', 'xcode-ninja']:
-    test.must_contain('out/Default/build.ninja', 'ar_host = %s' % ar_expected)
-  else:
-    test.fail_test()
-
-
-test_format = ['ninja']
-if sys.platform in ('linux2', 'darwin'):
-  test_format += ['make']
-
-test = TestGyp.TestGyp(formats=test_format)
-
-# Check default values
-test.run_gyp('make_global_settings_ar.gyp')
-verify_ar_target(test)
-
-
-# Check default values with GYP_CROSSCOMPILE enabled.
-with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('make_global_settings_ar.gyp')
-verify_ar_target(test)
-verify_ar_host(test)
-
-
-# Test 'AR' in 'make_global_settings'.
-with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('make_global_settings_ar.gyp', '-Dcustom_ar_target=my_ar')
-verify_ar_target(test, ar='my_ar', rel_path=True)
-
-
-# Test 'AR'/'AR.host' in 'make_global_settings'.
-with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('make_global_settings_ar.gyp',
-               '-Dcustom_ar_target=my_ar_target1',
-               '-Dcustom_ar_host=my_ar_host1')
-verify_ar_target(test, ar='my_ar_target1', rel_path=True)
-verify_ar_host(test, ar='my_ar_host1', rel_path=True)
-
-
-# Test $AR and $AR_host environment variables.
-with TestGyp.LocalEnv({'AR': 'my_ar_target2',
-                       'AR_host': 'my_ar_host2'}):
-  test.run_gyp('make_global_settings_ar.gyp')
-# Ninja generator resolves $AR in gyp phase. Make generator doesn't.
-if test.format == 'ninja':
-  if sys.platform == 'win32':
-    # TODO(yukawa): Make sure if this is an expected result or not.
-    verify_ar_target(test, ar='lib.exe', rel_path=False)
-  else:
-    verify_ar_target(test, ar='my_ar_target2', rel_path=False)
-verify_ar_host(test, ar='my_ar_host2', rel_path=False)
-
-
-# Test 'AR' in 'make_global_settings' with $AR_host environment variable.
-with TestGyp.LocalEnv({'AR_host': 'my_ar_host3'}):
-  test.run_gyp('make_global_settings_ar.gyp',
-               '-Dcustom_ar_target=my_ar_target3')
-verify_ar_target(test, ar='my_ar_target3', rel_path=True)
-verify_ar_host(test, ar='my_ar_host3', rel_path=False)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/make_global_settings/ar/make_global_settings_ar.gyp b/tools/gyp/test/make_global_settings/ar/make_global_settings_ar.gyp
deleted file mode 100644
index 3430d82..0000000
--- a/tools/gyp/test/make_global_settings/ar/make_global_settings_ar.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style licence that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'custom_ar_target%': '',
-    'custom_ar_host%': '',
-  },
-  'conditions': [
-    ['"<(custom_ar_target)"!=""', {
-      'make_global_settings': [
-        ['AR', '<(custom_ar_target)'],
-      ],
-    }],
-    ['"<(custom_ar_host)"!=""', {
-      'make_global_settings': [
-        ['AR.host', '<(custom_ar_host)'],
-      ],
-    }],
-  ],
-  'targets': [
-    {
-      'target_name': 'make_global_settings_ar_test',
-      'type': 'static_library',
-      'sources': [ 'foo.c' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make_global_settings/basics/gyptest-make_global_settings.py b/tools/gyp/test/make_global_settings/basics/gyptest-make_global_settings.py
deleted file mode 100644
index 63d1d0a..0000000
--- a/tools/gyp/test/make_global_settings/basics/gyptest-make_global_settings.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies make_global_settings.
-"""
-
-import os
-import sys
-import TestGyp
-
-test_format = ['ninja']
-if sys.platform in ('linux2', 'darwin'):
-  test_format += ['make']
-
-test = TestGyp.TestGyp(formats=test_format)
-
-test.run_gyp('make_global_settings.gyp')
-
-if test.format == 'make':
-  cc_expected = """ifneq (,$(filter $(origin CC), undefined default))
-  CC = $(abspath clang)
-endif
-"""
-  if sys.platform == 'linux2':
-    link_expected = """
-LINK ?= $(abspath clang)
-"""
-  elif sys.platform == 'darwin':
-    link_expected = """
-LINK ?= $(abspath clang)
-"""
-  test.must_contain('Makefile', cc_expected)
-  test.must_contain('Makefile', link_expected)
-if test.format == 'ninja':
-  cc_expected = 'cc = ' + os.path.join('..', '..', 'clang')
-  ld_expected = 'ld = $cc'
-  if sys.platform == 'win32':
-    ld_expected = 'link.exe'
-  test.must_contain('out/Default/build.ninja', cc_expected)
-  test.must_contain('out/Default/build.ninja', ld_expected)
-
-test.pass_test()
diff --git a/tools/gyp/test/make_global_settings/basics/make_global_settings.gyp b/tools/gyp/test/make_global_settings/basics/make_global_settings.gyp
deleted file mode 100644
index 47dbc85..0000000
--- a/tools/gyp/test/make_global_settings/basics/make_global_settings.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style licence that can be
-# found in the LICENSE file.
-
-{
-  'make_global_settings': [
-    ['CC', 'clang'],
-    ['LINK', 'clang'],
-  ],
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'static_library',
-      'sources': [ 'foo.c' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make_global_settings/env-wrapper/gyptest-wrapper.py b/tools/gyp/test/make_global_settings/env-wrapper/gyptest-wrapper.py
deleted file mode 100644
index 70d6906..0000000
--- a/tools/gyp/test/make_global_settings/env-wrapper/gyptest-wrapper.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies *_wrapper in environment.
-"""
-
-import os
-import sys
-import TestGyp
-
-test_format = ['ninja']
-
-os.environ['CC_wrapper'] = 'distcc'
-os.environ['LINK_wrapper'] = 'distlink'
-os.environ['CC.host_wrapper'] = 'ccache'
-
-test = TestGyp.TestGyp(formats=test_format)
-
-old_env = dict(os.environ)
-os.environ['GYP_CROSSCOMPILE'] = '1'
-test.run_gyp('wrapper.gyp')
-os.environ.clear()
-os.environ.update(old_env)
-
-if test.format == 'ninja':
-  cc_expected = ('cc = ' + os.path.join('..', '..', 'distcc') + ' ' +
-                 os.path.join('..', '..', 'clang'))
-  cc_host_expected = ('cc_host = ' + os.path.join('..', '..', 'ccache') + ' ' +
-                      os.path.join('..', '..', 'clang'))
-  ld_expected = 'ld = ../../distlink $cc'
-  if sys.platform != 'win32':
-    ldxx_expected = 'ldxx = ../../distlink $cxx'
-
-  if sys.platform == 'win32':
-     ld_expected = 'link.exe'
-  test.must_contain('out/Default/build.ninja', cc_expected)
-  test.must_contain('out/Default/build.ninja', cc_host_expected)
-  test.must_contain('out/Default/build.ninja', ld_expected)
-  if sys.platform != 'win32':
-    test.must_contain('out/Default/build.ninja', ldxx_expected)
-
-test.pass_test()
diff --git a/tools/gyp/test/make_global_settings/env-wrapper/wrapper.gyp b/tools/gyp/test/make_global_settings/env-wrapper/wrapper.gyp
deleted file mode 100644
index 1698d71..0000000
--- a/tools/gyp/test/make_global_settings/env-wrapper/wrapper.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'make_global_settings': [
-    ['CC', 'clang'],
-    ['CC.host', 'clang'],
-  ],
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'static_library',
-      'sources': [ 'foo.c' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make_global_settings/full-toolchain/bar.cc b/tools/gyp/test/make_global_settings/full-toolchain/bar.cc
deleted file mode 100644
index afb422b..0000000
--- a/tools/gyp/test/make_global_settings/full-toolchain/bar.cc
+++ /dev/null
@@ -1 +0,0 @@
-#error Not a real source file
diff --git a/tools/gyp/test/make_global_settings/full-toolchain/foo.c b/tools/gyp/test/make_global_settings/full-toolchain/foo.c
deleted file mode 100644
index afb422b..0000000
--- a/tools/gyp/test/make_global_settings/full-toolchain/foo.c
+++ /dev/null
@@ -1 +0,0 @@
-#error Not a real source file
diff --git a/tools/gyp/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py b/tools/gyp/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py
deleted file mode 100644
index eebda7d..0000000
--- a/tools/gyp/test/make_global_settings/full-toolchain/gyptest-make_global_settings.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies make_global_settings works with the full toolchain.
-"""
-
-import os
-import sys
-import TestGyp
-
-if sys.platform == 'win32':
-  # cross compiling not supported by ninja on windows
-  # and make not supported on windows at all.
-  sys.exit(0)
-
-test = TestGyp.TestGyp(formats=['ninja'])
-# Must set the test format to something with a flavor (the part after the '-')
-# in order to test the desired behavior. Since we want to run a non-host
-# toolchain, we have to set the flavor to something that the ninja generator
-# doesn't know about, so it doesn't default to the host-specific tools (e.g.,
-# 'otool' on mac to generate the .TOC).
-#
-# Note that we can't just pass format=['ninja-some_toolchain'] to the
-# constructor above, because then this test wouldn't be recognized as a ninja
-# format test.
-test.formats = ['ninja-my_flavor' if f == 'ninja' else f for f in test.formats]
-
-gyp_file = 'make_global_settings.gyp'
-
-test.run_gyp(gyp_file,
-             # Teach the .gyp file about the location of my_nm.py and
-             # my_readelf.py, and the python executable.
-             '-Dworkdir=%s' % test.workdir,
-             '-Dpython=%s' % sys.executable)
-test.build(gyp_file,
-           arguments=['-v'] if test.format == 'ninja-my_flavor' else [])
-
-expected = ['MY_CC', 'MY_CXX']
-test.must_contain_all_lines(test.stdout(), expected)
-
-test.must_contain(test.built_file_path('RAN_MY_NM'), 'RAN_MY_NM')
-test.must_contain(test.built_file_path('RAN_MY_READELF'), 'RAN_MY_READELF')
-
-test.pass_test()
diff --git a/tools/gyp/test/make_global_settings/full-toolchain/make_global_settings.gyp b/tools/gyp/test/make_global_settings/full-toolchain/make_global_settings.gyp
deleted file mode 100644
index 2c32663..0000000
--- a/tools/gyp/test/make_global_settings/full-toolchain/make_global_settings.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style licence that can be
-# found in the LICENSE file.
-
-{
-  'make_global_settings': [
-    ['CC', '/bin/echo MY_CC'],
-    ['CXX', '/bin/echo MY_CXX'],
-    ['NM', '<(python) <(workdir)/my_nm.py'],
-    ['READELF', '<(python) <(workdir)/my_readelf.py'],
-  ],
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'shared_library',
-      'sources': [
-        'foo.c',
-        'bar.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make_global_settings/full-toolchain/my_nm.py b/tools/gyp/test/make_global_settings/full-toolchain/my_nm.py
deleted file mode 100755
index f0f1efc..0000000
--- a/tools/gyp/test/make_global_settings/full-toolchain/my_nm.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-print sys.argv
-with open('RAN_MY_NM', 'w') as f:
-  f.write('RAN_MY_NM')
diff --git a/tools/gyp/test/make_global_settings/full-toolchain/my_readelf.py b/tools/gyp/test/make_global_settings/full-toolchain/my_readelf.py
deleted file mode 100755
index 40e303c..0000000
--- a/tools/gyp/test/make_global_settings/full-toolchain/my_readelf.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-print sys.argv
-with open('RAN_MY_READELF', 'w') as f:
-  f.write('RAN_MY_READELF')
diff --git a/tools/gyp/test/make_global_settings/ld/gyptest-make_global_settings_ld.py b/tools/gyp/test/make_global_settings/ld/gyptest-make_global_settings_ld.py
deleted file mode 100644
index c5a2e96..0000000
--- a/tools/gyp/test/make_global_settings/ld/gyptest-make_global_settings_ld.py
+++ /dev/null
@@ -1,130 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies 'LD' in make_global_settings.
-"""
-
-import os
-import sys
-import TestGyp
-
-def resolve_path(test, path):
-  if path is None:
-    return None
-  elif test.format == 'make':
-    return '$(abspath %s)' % path
-  elif test.format in ['ninja', 'xcode-ninja']:
-    return os.path.join('..', '..', path)
-  else:
-    test.fail_test()
-
-
-def verify_ld_target(test, ld=None, rel_path=False):
-  if rel_path:
-    ld_expected = resolve_path(test, ld)
-  else:
-    ld_expected = ld
-  # Resolve default values
-  if ld_expected is None:
-    if test.format == 'make':
-      # Make generator hasn't set the default value for LD.
-      # You can remove the following assertion as long as it doesn't
-      # break existing projects.
-      test.must_not_contain('Makefile', 'LD ?= ')
-      return
-    elif test.format in ['ninja', 'xcode-ninja']:
-      if sys.platform == 'win32':
-        ld_expected = 'link.exe'
-      else:
-        ld_expected = '$cc'
-  if test.format == 'make':
-    test.must_contain('Makefile', 'LD ?= %s' % ld_expected)
-  elif test.format in ['ninja', 'xcode-ninja']:
-    test.must_contain('out/Default/build.ninja', 'ld = %s' % ld_expected)
-  else:
-    test.fail_test()
-
-
-def verify_ld_host(test, ld=None, rel_path=False):
-  if rel_path:
-    ld_expected = resolve_path(test, ld)
-  else:
-    ld_expected = ld
-  # Resolve default values
-  if ld_expected is None:
-    if test.format == 'make':
-      # Make generator hasn't set the default value for LD.host.
-      # You can remove the following assertion as long as it doesn't
-      # break existing projects.
-      test.must_not_contain('Makefile', 'LD.host ?= ')
-      return
-    elif test.format in ['ninja', 'xcode-ninja']:
-      if sys.platform == 'win32':
-        ld_expected = '$ld'
-      else:
-        ld_expected = '$cc_host'
-  if test.format == 'make':
-    test.must_contain('Makefile', 'LD.host ?= %s' % ld_expected)
-  elif test.format in ['ninja', 'xcode-ninja']:
-    test.must_contain('out/Default/build.ninja', 'ld_host = %s' % ld_expected)
-  else:
-    test.fail_test()
-
-
-test_format = ['ninja']
-if sys.platform in ('linux2', 'darwin'):
-  test_format += ['make']
-
-test = TestGyp.TestGyp(formats=test_format)
-
-# Check default values
-test.run_gyp('make_global_settings_ld.gyp')
-verify_ld_target(test)
-
-
-# Check default values with GYP_CROSSCOMPILE enabled.
-with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('make_global_settings_ld.gyp')
-verify_ld_target(test)
-verify_ld_host(test)
-
-
-# Test 'LD' in 'make_global_settings'.
-with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('make_global_settings_ld.gyp', '-Dcustom_ld_target=my_ld')
-verify_ld_target(test, ld='my_ld', rel_path=True)
-
-
-# Test 'LD'/'LD.host' in 'make_global_settings'.
-with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1'}):
-  test.run_gyp('make_global_settings_ld.gyp',
-               '-Dcustom_ld_target=my_ld_target1',
-               '-Dcustom_ld_host=my_ld_host1')
-verify_ld_target(test, ld='my_ld_target1', rel_path=True)
-verify_ld_host(test, ld='my_ld_host1', rel_path=True)
-
-
-# Unlike other environment variables such as $AR/$AR_host, $CC/$CC_host,
-# and $CXX/$CXX_host, neither Make generator nor Ninja generator recognizes
-# $LD/$LD_host environment variables as of r1935. This may or may not be
-# intentional, but here we leave a test case to verify this behavior just for
-# the record.
-# If you want to support $LD/$LD_host, please revise the following test case as
-# well as the generator.
-with TestGyp.LocalEnv({'GYP_CROSSCOMPILE': '1',
-                       'LD': 'my_ld_target2',
-                       'LD_host': 'my_ld_host2'}):
-  test.run_gyp('make_global_settings_ld.gyp')
-if test.format == 'make':
-  test.must_not_contain('Makefile', 'my_ld_target2')
-  test.must_not_contain('Makefile', 'my_ld_host2')
-elif test.format == 'ninja':
-  test.must_not_contain('out/Default/build.ninja', 'my_ld_target2')
-  test.must_not_contain('out/Default/build.ninja', 'my_ld_host2')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/make_global_settings/ld/make_global_settings_ld.gyp b/tools/gyp/test/make_global_settings/ld/make_global_settings_ld.gyp
deleted file mode 100644
index 6837c77..0000000
--- a/tools/gyp/test/make_global_settings/ld/make_global_settings_ld.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style licence that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'custom_ld_target%': '',
-    'custom_ld_host%': '',
-  },
-  'conditions': [
-    ['"<(custom_ld_target)"!=""', {
-      'make_global_settings': [
-        ['LD', '<(custom_ld_target)'],
-      ],
-    }],
-    ['"<(custom_ld_host)"!=""', {
-      'make_global_settings': [
-        ['LD.host', '<(custom_ld_host)'],
-      ],
-    }],
-  ],
-  'targets': [
-    {
-      'target_name': 'make_global_settings_ld_test',
-      'type': 'static_library',
-      'sources': [ 'foo.c' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/make_global_settings/wrapper/gyptest-wrapper.py b/tools/gyp/test/make_global_settings/wrapper/gyptest-wrapper.py
deleted file mode 100644
index eb1ebfd..0000000
--- a/tools/gyp/test/make_global_settings/wrapper/gyptest-wrapper.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies *_wrapper in make_global_settings.
-"""
-
-import os
-import sys
-import TestGyp
-
-test_format = ['ninja']
-if sys.platform in ('linux2', 'darwin'):
-  test_format += ['make']
-
-test = TestGyp.TestGyp(formats=test_format)
-
-old_env = dict(os.environ)
-os.environ['GYP_CROSSCOMPILE'] = '1'
-test.run_gyp('wrapper.gyp')
-os.environ.clear()
-os.environ.update(old_env)
-
-if test.format == 'make':
-  cc_expected = """ifneq (,$(filter $(origin CC), undefined default))
-  CC = $(abspath distcc) $(abspath clang)
-endif
-"""
-  link_expected = 'LINK ?= $(abspath distlink) $(abspath clang++)'
-  test.must_contain('Makefile', cc_expected)
-  test.must_contain('Makefile', link_expected)
-if test.format == 'ninja':
-  cc_expected = ('cc = ' + os.path.join('..', '..', 'distcc') + ' ' +
-                 os.path.join('..', '..', 'clang'))
-  cc_host_expected = ('cc_host = ' + os.path.join('..', '..', 'ccache') + ' ' +
-                      os.path.join('..', '..', 'clang'))
-  ld_expected = 'ld = ../../distlink $cc'
-  if sys.platform == 'win32':
-     ld_expected = 'link.exe'
-  test.must_contain('out/Default/build.ninja', cc_expected)
-  test.must_contain('out/Default/build.ninja', cc_host_expected)
-  test.must_contain('out/Default/build.ninja', ld_expected)
-
-test.pass_test()
diff --git a/tools/gyp/test/make_global_settings/wrapper/wrapper.gyp b/tools/gyp/test/make_global_settings/wrapper/wrapper.gyp
deleted file mode 100644
index 3d4cd04..0000000
--- a/tools/gyp/test/make_global_settings/wrapper/wrapper.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'make_global_settings': [
-    ['CC', 'clang'],
-    ['CC_wrapper', 'distcc'],
-    ['LINK', 'clang++'],
-    ['LINK_wrapper', 'distlink'],
-    ['CC.host', 'clang'],
-    ['CC.host_wrapper', 'ccache'],
-  ],
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'static_library',
-      'sources': [ 'foo.c' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/many-actions/file0 b/tools/gyp/test/many-actions/file0
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/many-actions/file0
+++ /dev/null
diff --git a/tools/gyp/test/many-actions/file1 b/tools/gyp/test/many-actions/file1
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/many-actions/file1
+++ /dev/null
diff --git a/tools/gyp/test/many-actions/file2 b/tools/gyp/test/many-actions/file2
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/many-actions/file2
+++ /dev/null
diff --git a/tools/gyp/test/many-actions/file3 b/tools/gyp/test/many-actions/file3
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/many-actions/file3
+++ /dev/null
diff --git a/tools/gyp/test/many-actions/file4 b/tools/gyp/test/many-actions/file4
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/many-actions/file4
+++ /dev/null
diff --git a/tools/gyp/test/many-actions/gyptest-many-actions-unsorted.py b/tools/gyp/test/many-actions/gyptest-many-actions-unsorted.py
deleted file mode 100644
index 90d3c92..0000000
--- a/tools/gyp/test/many-actions/gyptest-many-actions-unsorted.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure lots of actions in the same target don't cause exceeding command
-line length.
-"""
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('many-actions-unsorted.gyp')
-
-test.build('many-actions-unsorted.gyp', test.ALL)
-for i in range(15):
-  test.built_file_must_exist('generated_%d.h' % i)
-
-# Make sure the optimized cygwin setup doesn't cause problems for incremental
-# builds.
-test.touch('file1')
-test.build('many-actions-unsorted.gyp', test.ALL)
-
-test.touch('file0')
-test.build('many-actions-unsorted.gyp', test.ALL)
-
-test.touch('file2')
-test.touch('file3')
-test.touch('file4')
-test.build('many-actions-unsorted.gyp', test.ALL)
-
-test.pass_test()
diff --git a/tools/gyp/test/many-actions/gyptest-many-actions.py b/tools/gyp/test/many-actions/gyptest-many-actions.py
deleted file mode 100644
index f2e719b..0000000
--- a/tools/gyp/test/many-actions/gyptest-many-actions.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure lots of actions in the same target don't cause exceeding command
-line length.
-"""
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('many-actions.gyp')
-test.build('many-actions.gyp', test.ALL)
-for i in range(200):
-  test.built_file_must_exist('generated_%d.h' % i)
-test.pass_test()
diff --git a/tools/gyp/test/many-actions/many-actions-unsorted.gyp b/tools/gyp/test/many-actions/many-actions-unsorted.gyp
deleted file mode 100644
index eec79fe..0000000
--- a/tools/gyp/test/many-actions/many-actions-unsorted.gyp
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'msvs_cygwin_dirs': ['../../../../<(DEPTH)/third_party/cygwin'],
-  },
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'none',
-      'actions': [
-        # Notice that the inputs go 0, 1, ..., 0, 1, .... This is to test
-        # a regression in the msvs generator in _AddActions.
-        {
-          'action_name': 'do_0',
-          'inputs': ['file0'],
-          'outputs': ['<(PRODUCT_DIR)/generated_0.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_0.h',
-          ],
-        },
-        {
-          'action_name': 'do_1',
-          'inputs': ['file1'],
-          'outputs': ['<(PRODUCT_DIR)/generated_1.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_1.h',
-          ],
-        },
-        {
-          'action_name': 'do_2',
-          'inputs': ['file2'],
-          'outputs': ['<(PRODUCT_DIR)/generated_2.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_2.h',
-          ],
-        },
-        {
-          'action_name': 'do_3',
-          'inputs': ['file3'],
-          'outputs': ['<(PRODUCT_DIR)/generated_3.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_3.h',
-          ],
-        },
-        {
-          'action_name': 'do_4',
-          'inputs': ['file4'],
-          'outputs': ['<(PRODUCT_DIR)/generated_4.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_4.h',
-          ],
-        },
-        {
-          'action_name': 'do_5',
-          'inputs': ['file0'],
-          'outputs': ['<(PRODUCT_DIR)/generated_5.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_5.h',
-          ],
-        },
-        {
-          'action_name': 'do_6',
-          'inputs': ['file1'],
-          'outputs': ['<(PRODUCT_DIR)/generated_6.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_6.h',
-          ],
-        },
-        {
-          'action_name': 'do_7',
-          'inputs': ['file2'],
-          'outputs': ['<(PRODUCT_DIR)/generated_7.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_7.h',
-          ],
-        },
-        {
-          'action_name': 'do_8',
-          'inputs': ['file3'],
-          'outputs': ['<(PRODUCT_DIR)/generated_8.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_8.h',
-          ],
-        },
-        {
-          'action_name': 'do_9',
-          'inputs': ['file4'],
-          'outputs': ['<(PRODUCT_DIR)/generated_9.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_9.h',
-          ],
-        },
-        {
-          'action_name': 'do_10',
-          'inputs': ['file0'],
-          'outputs': ['<(PRODUCT_DIR)/generated_10.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_10.h',
-          ],
-        },
-        {
-          'action_name': 'do_11',
-          'inputs': ['file1'],
-          'outputs': ['<(PRODUCT_DIR)/generated_11.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_11.h',
-          ],
-        },
-        {
-          'action_name': 'do_12',
-          'inputs': ['file2'],
-          'outputs': ['<(PRODUCT_DIR)/generated_12.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_12.h',
-          ],
-        },
-        {
-          'action_name': 'do_13',
-          'inputs': ['file3'],
-          'outputs': ['<(PRODUCT_DIR)/generated_13.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_13.h',
-          ],
-        },
-        {
-          'action_name': 'do_14',
-          'inputs': ['file4'],
-          'outputs': ['<(PRODUCT_DIR)/generated_14.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_14.h',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/many-actions/many-actions.gyp b/tools/gyp/test/many-actions/many-actions.gyp
deleted file mode 100644
index 38545d2..0000000
--- a/tools/gyp/test/many-actions/many-actions.gyp
+++ /dev/null
@@ -1,1817 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'msvs_cygwin_dirs': ['../../../../<(DEPTH)/third_party/cygwin'],
-  },
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'do_0',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_0.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_0.h',
-          ],
-        },
-        {
-          'action_name': 'do_1',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_1.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_1.h',
-          ],
-        },
-        {
-          'action_name': 'do_2',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_2.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_2.h',
-          ],
-        },
-        {
-          'action_name': 'do_3',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_3.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_3.h',
-          ],
-        },
-        {
-          'action_name': 'do_4',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_4.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_4.h',
-          ],
-        },
-        {
-          'action_name': 'do_5',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_5.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_5.h',
-          ],
-        },
-        {
-          'action_name': 'do_6',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_6.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_6.h',
-          ],
-        },
-        {
-          'action_name': 'do_7',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_7.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_7.h',
-          ],
-        },
-        {
-          'action_name': 'do_8',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_8.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_8.h',
-          ],
-        },
-        {
-          'action_name': 'do_9',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_9.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_9.h',
-          ],
-        },
-        {
-          'action_name': 'do_10',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_10.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_10.h',
-          ],
-        },
-        {
-          'action_name': 'do_11',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_11.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_11.h',
-          ],
-        },
-        {
-          'action_name': 'do_12',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_12.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_12.h',
-          ],
-        },
-        {
-          'action_name': 'do_13',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_13.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_13.h',
-          ],
-        },
-        {
-          'action_name': 'do_14',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_14.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_14.h',
-          ],
-        },
-        {
-          'action_name': 'do_15',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_15.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_15.h',
-          ],
-        },
-        {
-          'action_name': 'do_16',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_16.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_16.h',
-          ],
-        },
-        {
-          'action_name': 'do_17',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_17.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_17.h',
-          ],
-        },
-        {
-          'action_name': 'do_18',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_18.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_18.h',
-          ],
-        },
-        {
-          'action_name': 'do_19',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_19.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_19.h',
-          ],
-        },
-        {
-          'action_name': 'do_20',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_20.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_20.h',
-          ],
-        },
-        {
-          'action_name': 'do_21',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_21.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_21.h',
-          ],
-        },
-        {
-          'action_name': 'do_22',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_22.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_22.h',
-          ],
-        },
-        {
-          'action_name': 'do_23',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_23.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_23.h',
-          ],
-        },
-        {
-          'action_name': 'do_24',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_24.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_24.h',
-          ],
-        },
-        {
-          'action_name': 'do_25',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_25.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_25.h',
-          ],
-        },
-        {
-          'action_name': 'do_26',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_26.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_26.h',
-          ],
-        },
-        {
-          'action_name': 'do_27',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_27.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_27.h',
-          ],
-        },
-        {
-          'action_name': 'do_28',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_28.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_28.h',
-          ],
-        },
-        {
-          'action_name': 'do_29',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_29.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_29.h',
-          ],
-        },
-        {
-          'action_name': 'do_30',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_30.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_30.h',
-          ],
-        },
-        {
-          'action_name': 'do_31',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_31.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_31.h',
-          ],
-        },
-        {
-          'action_name': 'do_32',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_32.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_32.h',
-          ],
-        },
-        {
-          'action_name': 'do_33',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_33.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_33.h',
-          ],
-        },
-        {
-          'action_name': 'do_34',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_34.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_34.h',
-          ],
-        },
-        {
-          'action_name': 'do_35',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_35.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_35.h',
-          ],
-        },
-        {
-          'action_name': 'do_36',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_36.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_36.h',
-          ],
-        },
-        {
-          'action_name': 'do_37',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_37.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_37.h',
-          ],
-        },
-        {
-          'action_name': 'do_38',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_38.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_38.h',
-          ],
-        },
-        {
-          'action_name': 'do_39',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_39.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_39.h',
-          ],
-        },
-        {
-          'action_name': 'do_40',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_40.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_40.h',
-          ],
-        },
-        {
-          'action_name': 'do_41',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_41.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_41.h',
-          ],
-        },
-        {
-          'action_name': 'do_42',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_42.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_42.h',
-          ],
-        },
-        {
-          'action_name': 'do_43',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_43.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_43.h',
-          ],
-        },
-        {
-          'action_name': 'do_44',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_44.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_44.h',
-          ],
-        },
-        {
-          'action_name': 'do_45',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_45.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_45.h',
-          ],
-        },
-        {
-          'action_name': 'do_46',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_46.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_46.h',
-          ],
-        },
-        {
-          'action_name': 'do_47',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_47.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_47.h',
-          ],
-        },
-        {
-          'action_name': 'do_48',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_48.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_48.h',
-          ],
-        },
-        {
-          'action_name': 'do_49',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_49.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_49.h',
-          ],
-        },
-        {
-          'action_name': 'do_50',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_50.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_50.h',
-          ],
-        },
-        {
-          'action_name': 'do_51',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_51.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_51.h',
-          ],
-        },
-        {
-          'action_name': 'do_52',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_52.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_52.h',
-          ],
-        },
-        {
-          'action_name': 'do_53',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_53.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_53.h',
-          ],
-        },
-        {
-          'action_name': 'do_54',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_54.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_54.h',
-          ],
-        },
-        {
-          'action_name': 'do_55',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_55.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_55.h',
-          ],
-        },
-        {
-          'action_name': 'do_56',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_56.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_56.h',
-          ],
-        },
-        {
-          'action_name': 'do_57',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_57.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_57.h',
-          ],
-        },
-        {
-          'action_name': 'do_58',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_58.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_58.h',
-          ],
-        },
-        {
-          'action_name': 'do_59',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_59.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_59.h',
-          ],
-        },
-        {
-          'action_name': 'do_60',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_60.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_60.h',
-          ],
-        },
-        {
-          'action_name': 'do_61',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_61.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_61.h',
-          ],
-        },
-        {
-          'action_name': 'do_62',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_62.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_62.h',
-          ],
-        },
-        {
-          'action_name': 'do_63',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_63.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_63.h',
-          ],
-        },
-        {
-          'action_name': 'do_64',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_64.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_64.h',
-          ],
-        },
-        {
-          'action_name': 'do_65',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_65.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_65.h',
-          ],
-        },
-        {
-          'action_name': 'do_66',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_66.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_66.h',
-          ],
-        },
-        {
-          'action_name': 'do_67',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_67.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_67.h',
-          ],
-        },
-        {
-          'action_name': 'do_68',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_68.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_68.h',
-          ],
-        },
-        {
-          'action_name': 'do_69',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_69.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_69.h',
-          ],
-        },
-        {
-          'action_name': 'do_70',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_70.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_70.h',
-          ],
-        },
-        {
-          'action_name': 'do_71',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_71.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_71.h',
-          ],
-        },
-        {
-          'action_name': 'do_72',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_72.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_72.h',
-          ],
-        },
-        {
-          'action_name': 'do_73',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_73.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_73.h',
-          ],
-        },
-        {
-          'action_name': 'do_74',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_74.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_74.h',
-          ],
-        },
-        {
-          'action_name': 'do_75',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_75.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_75.h',
-          ],
-        },
-        {
-          'action_name': 'do_76',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_76.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_76.h',
-          ],
-        },
-        {
-          'action_name': 'do_77',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_77.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_77.h',
-          ],
-        },
-        {
-          'action_name': 'do_78',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_78.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_78.h',
-          ],
-        },
-        {
-          'action_name': 'do_79',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_79.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_79.h',
-          ],
-        },
-        {
-          'action_name': 'do_80',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_80.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_80.h',
-          ],
-        },
-        {
-          'action_name': 'do_81',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_81.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_81.h',
-          ],
-        },
-        {
-          'action_name': 'do_82',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_82.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_82.h',
-          ],
-        },
-        {
-          'action_name': 'do_83',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_83.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_83.h',
-          ],
-        },
-        {
-          'action_name': 'do_84',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_84.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_84.h',
-          ],
-        },
-        {
-          'action_name': 'do_85',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_85.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_85.h',
-          ],
-        },
-        {
-          'action_name': 'do_86',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_86.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_86.h',
-          ],
-        },
-        {
-          'action_name': 'do_87',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_87.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_87.h',
-          ],
-        },
-        {
-          'action_name': 'do_88',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_88.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_88.h',
-          ],
-        },
-        {
-          'action_name': 'do_89',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_89.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_89.h',
-          ],
-        },
-        {
-          'action_name': 'do_90',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_90.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_90.h',
-          ],
-        },
-        {
-          'action_name': 'do_91',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_91.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_91.h',
-          ],
-        },
-        {
-          'action_name': 'do_92',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_92.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_92.h',
-          ],
-        },
-        {
-          'action_name': 'do_93',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_93.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_93.h',
-          ],
-        },
-        {
-          'action_name': 'do_94',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_94.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_94.h',
-          ],
-        },
-        {
-          'action_name': 'do_95',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_95.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_95.h',
-          ],
-        },
-        {
-          'action_name': 'do_96',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_96.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_96.h',
-          ],
-        },
-        {
-          'action_name': 'do_97',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_97.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_97.h',
-          ],
-        },
-        {
-          'action_name': 'do_98',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_98.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_98.h',
-          ],
-        },
-        {
-          'action_name': 'do_99',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_99.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_99.h',
-          ],
-        },
-        {
-          'action_name': 'do_100',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_100.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_100.h',
-          ],
-        },
-        {
-          'action_name': 'do_101',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_101.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_101.h',
-          ],
-        },
-        {
-          'action_name': 'do_102',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_102.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_102.h',
-          ],
-        },
-        {
-          'action_name': 'do_103',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_103.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_103.h',
-          ],
-        },
-        {
-          'action_name': 'do_104',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_104.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_104.h',
-          ],
-        },
-        {
-          'action_name': 'do_105',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_105.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_105.h',
-          ],
-        },
-        {
-          'action_name': 'do_106',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_106.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_106.h',
-          ],
-        },
-        {
-          'action_name': 'do_107',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_107.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_107.h',
-          ],
-        },
-        {
-          'action_name': 'do_108',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_108.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_108.h',
-          ],
-        },
-        {
-          'action_name': 'do_109',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_109.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_109.h',
-          ],
-        },
-        {
-          'action_name': 'do_110',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_110.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_110.h',
-          ],
-        },
-        {
-          'action_name': 'do_111',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_111.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_111.h',
-          ],
-        },
-        {
-          'action_name': 'do_112',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_112.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_112.h',
-          ],
-        },
-        {
-          'action_name': 'do_113',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_113.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_113.h',
-          ],
-        },
-        {
-          'action_name': 'do_114',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_114.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_114.h',
-          ],
-        },
-        {
-          'action_name': 'do_115',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_115.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_115.h',
-          ],
-        },
-        {
-          'action_name': 'do_116',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_116.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_116.h',
-          ],
-        },
-        {
-          'action_name': 'do_117',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_117.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_117.h',
-          ],
-        },
-        {
-          'action_name': 'do_118',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_118.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_118.h',
-          ],
-        },
-        {
-          'action_name': 'do_119',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_119.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_119.h',
-          ],
-        },
-        {
-          'action_name': 'do_120',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_120.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_120.h',
-          ],
-        },
-        {
-          'action_name': 'do_121',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_121.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_121.h',
-          ],
-        },
-        {
-          'action_name': 'do_122',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_122.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_122.h',
-          ],
-        },
-        {
-          'action_name': 'do_123',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_123.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_123.h',
-          ],
-        },
-        {
-          'action_name': 'do_124',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_124.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_124.h',
-          ],
-        },
-        {
-          'action_name': 'do_125',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_125.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_125.h',
-          ],
-        },
-        {
-          'action_name': 'do_126',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_126.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_126.h',
-          ],
-        },
-        {
-          'action_name': 'do_127',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_127.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_127.h',
-          ],
-        },
-        {
-          'action_name': 'do_128',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_128.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_128.h',
-          ],
-        },
-        {
-          'action_name': 'do_129',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_129.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_129.h',
-          ],
-        },
-        {
-          'action_name': 'do_130',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_130.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_130.h',
-          ],
-        },
-        {
-          'action_name': 'do_131',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_131.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_131.h',
-          ],
-        },
-        {
-          'action_name': 'do_132',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_132.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_132.h',
-          ],
-        },
-        {
-          'action_name': 'do_133',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_133.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_133.h',
-          ],
-        },
-        {
-          'action_name': 'do_134',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_134.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_134.h',
-          ],
-        },
-        {
-          'action_name': 'do_135',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_135.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_135.h',
-          ],
-        },
-        {
-          'action_name': 'do_136',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_136.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_136.h',
-          ],
-        },
-        {
-          'action_name': 'do_137',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_137.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_137.h',
-          ],
-        },
-        {
-          'action_name': 'do_138',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_138.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_138.h',
-          ],
-        },
-        {
-          'action_name': 'do_139',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_139.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_139.h',
-          ],
-        },
-        {
-          'action_name': 'do_140',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_140.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_140.h',
-          ],
-        },
-        {
-          'action_name': 'do_141',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_141.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_141.h',
-          ],
-        },
-        {
-          'action_name': 'do_142',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_142.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_142.h',
-          ],
-        },
-        {
-          'action_name': 'do_143',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_143.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_143.h',
-          ],
-        },
-        {
-          'action_name': 'do_144',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_144.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_144.h',
-          ],
-        },
-        {
-          'action_name': 'do_145',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_145.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_145.h',
-          ],
-        },
-        {
-          'action_name': 'do_146',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_146.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_146.h',
-          ],
-        },
-        {
-          'action_name': 'do_147',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_147.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_147.h',
-          ],
-        },
-        {
-          'action_name': 'do_148',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_148.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_148.h',
-          ],
-        },
-        {
-          'action_name': 'do_149',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_149.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_149.h',
-          ],
-        },
-        {
-          'action_name': 'do_150',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_150.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_150.h',
-          ],
-        },
-        {
-          'action_name': 'do_151',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_151.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_151.h',
-          ],
-        },
-        {
-          'action_name': 'do_152',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_152.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_152.h',
-          ],
-        },
-        {
-          'action_name': 'do_153',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_153.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_153.h',
-          ],
-        },
-        {
-          'action_name': 'do_154',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_154.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_154.h',
-          ],
-        },
-        {
-          'action_name': 'do_155',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_155.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_155.h',
-          ],
-        },
-        {
-          'action_name': 'do_156',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_156.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_156.h',
-          ],
-        },
-        {
-          'action_name': 'do_157',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_157.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_157.h',
-          ],
-        },
-        {
-          'action_name': 'do_158',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_158.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_158.h',
-          ],
-        },
-        {
-          'action_name': 'do_159',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_159.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_159.h',
-          ],
-        },
-        {
-          'action_name': 'do_160',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_160.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_160.h',
-          ],
-        },
-        {
-          'action_name': 'do_161',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_161.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_161.h',
-          ],
-        },
-        {
-          'action_name': 'do_162',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_162.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_162.h',
-          ],
-        },
-        {
-          'action_name': 'do_163',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_163.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_163.h',
-          ],
-        },
-        {
-          'action_name': 'do_164',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_164.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_164.h',
-          ],
-        },
-        {
-          'action_name': 'do_165',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_165.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_165.h',
-          ],
-        },
-        {
-          'action_name': 'do_166',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_166.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_166.h',
-          ],
-        },
-        {
-          'action_name': 'do_167',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_167.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_167.h',
-          ],
-        },
-        {
-          'action_name': 'do_168',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_168.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_168.h',
-          ],
-        },
-        {
-          'action_name': 'do_169',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_169.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_169.h',
-          ],
-        },
-        {
-          'action_name': 'do_170',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_170.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_170.h',
-          ],
-        },
-        {
-          'action_name': 'do_171',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_171.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_171.h',
-          ],
-        },
-        {
-          'action_name': 'do_172',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_172.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_172.h',
-          ],
-        },
-        {
-          'action_name': 'do_173',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_173.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_173.h',
-          ],
-        },
-        {
-          'action_name': 'do_174',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_174.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_174.h',
-          ],
-        },
-        {
-          'action_name': 'do_175',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_175.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_175.h',
-          ],
-        },
-        {
-          'action_name': 'do_176',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_176.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_176.h',
-          ],
-        },
-        {
-          'action_name': 'do_177',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_177.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_177.h',
-          ],
-        },
-        {
-          'action_name': 'do_178',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_178.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_178.h',
-          ],
-        },
-        {
-          'action_name': 'do_179',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_179.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_179.h',
-          ],
-        },
-        {
-          'action_name': 'do_180',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_180.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_180.h',
-          ],
-        },
-        {
-          'action_name': 'do_181',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_181.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_181.h',
-          ],
-        },
-        {
-          'action_name': 'do_182',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_182.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_182.h',
-          ],
-        },
-        {
-          'action_name': 'do_183',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_183.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_183.h',
-          ],
-        },
-        {
-          'action_name': 'do_184',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_184.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_184.h',
-          ],
-        },
-        {
-          'action_name': 'do_185',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_185.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_185.h',
-          ],
-        },
-        {
-          'action_name': 'do_186',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_186.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_186.h',
-          ],
-        },
-        {
-          'action_name': 'do_187',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_187.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_187.h',
-          ],
-        },
-        {
-          'action_name': 'do_188',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_188.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_188.h',
-          ],
-        },
-        {
-          'action_name': 'do_189',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_189.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_189.h',
-          ],
-        },
-        {
-          'action_name': 'do_190',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_190.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_190.h',
-          ],
-        },
-        {
-          'action_name': 'do_191',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_191.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_191.h',
-          ],
-        },
-        {
-          'action_name': 'do_192',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_192.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_192.h',
-          ],
-        },
-        {
-          'action_name': 'do_193',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_193.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_193.h',
-          ],
-        },
-        {
-          'action_name': 'do_194',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_194.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_194.h',
-          ],
-        },
-        {
-          'action_name': 'do_195',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_195.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_195.h',
-          ],
-        },
-        {
-          'action_name': 'do_196',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_196.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_196.h',
-          ],
-        },
-        {
-          'action_name': 'do_197',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_197.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_197.h',
-          ],
-        },
-        {
-          'action_name': 'do_198',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_198.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_198.h',
-          ],
-        },
-        {
-          'action_name': 'do_199',
-          'inputs': [],
-          'outputs': ['<(PRODUCT_DIR)/generated_199.h'],
-          'action': [
-            'touch',
-            '<(PRODUCT_DIR)/generated_199.h',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/module/gyptest-default.py b/tools/gyp/test/module/gyptest-default.py
deleted file mode 100755
index 7fecf3c..0000000
--- a/tools/gyp/test/module/gyptest-default.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple build of a "Hello, world!" program with loadable modules. The
-default for all platforms should be to output the loadable modules to the same
-path as the executable.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('module.gyp', chdir='src')
-
-test.build('module.gyp', test.ALL, chdir='src')
-
-expect = """\
-Hello from program.c
-Hello from lib1.c
-Hello from lib2.c
-"""
-test.run_built_executable('program', chdir='src', stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/module/src/lib1.c b/tools/gyp/test/module/src/lib1.c
deleted file mode 100644
index 8de0e94..0000000
--- a/tools/gyp/test/module/src/lib1.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void module_main(void)
-{
-  fprintf(stdout, "Hello from lib1.c\n");
-  fflush(stdout);
-}
diff --git a/tools/gyp/test/module/src/lib2.c b/tools/gyp/test/module/src/lib2.c
deleted file mode 100644
index 266396d..0000000
--- a/tools/gyp/test/module/src/lib2.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void module_main(void)
-{
-  fprintf(stdout, "Hello from lib2.c\n");
-  fflush(stdout);
-}
diff --git a/tools/gyp/test/module/src/module.gyp b/tools/gyp/test/module/src/module.gyp
deleted file mode 100644
index 2bc398b..0000000
--- a/tools/gyp/test/module/src/module.gyp
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'conditions': [
-      ['OS=="win"', {
-        'defines': ['PLATFORM_WIN'],
-      }],
-      ['OS=="mac" or OS=="ios"', {
-        'defines': ['PLATFORM_MAC'],
-      }],
-      ['OS=="linux"', {
-        'defines': ['PLATFORM_LINUX'],
-        # Support 64-bit shared libs (also works fine for 32-bit).
-        'cflags': ['-fPIC'],
-        'libraries': ['-ldl'],
-      }],
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'dependencies': [
-        'lib1',
-        'lib2',
-      ],
-      'sources': [
-        'program.c',
-      ],
-    },
-    {
-      'target_name': 'lib1',
-      'type': 'loadable_module',
-      'product_name': 'lib1',
-      'product_prefix': '',
-      'sources': [
-        'lib1.c',
-      ],
-    },
-    {
-      'target_name': 'lib2',
-      'product_name': 'lib2',
-      'product_prefix': '',
-      'type': 'loadable_module',
-      'sources': [
-        'lib2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/module/src/program.c b/tools/gyp/test/module/src/program.c
deleted file mode 100644
index 7cc3dd3..0000000
--- a/tools/gyp/test/module/src/program.c
+++ /dev/null
@@ -1,111 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-
-#if defined(PLATFORM_WIN)
-#include <windows.h>
-#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
-#include <dlfcn.h>
-#include <libgen.h>
-#include <string.h>
-#include <sys/param.h>
-#define MAX_PATH PATH_MAX
-#endif
-
-#if defined(PLATFORM_WIN)
-#define MODULE_SUFFIX ".dll"
-#elif defined(PLATFORM_MAC)
-#define MODULE_SUFFIX ".so"
-#elif defined(PLATFORM_LINUX)
-#define MODULE_SUFFIX ".so"
-#endif
-
-typedef void (*module_symbol)(void);
-char bin_path[MAX_PATH + 1];
-
-
-void CallModule(const char* module) {
-  char module_path[MAX_PATH + 1];
-  const char* module_function = "module_main";
-  module_symbol funcptr;
-#if defined(PLATFORM_WIN)
-  HMODULE dl;
-  char drive[_MAX_DRIVE];
-  char dir[_MAX_DIR];
-
-  if (_splitpath_s(bin_path, drive, _MAX_DRIVE, dir, _MAX_DIR,
-                    NULL, 0, NULL, 0)) {
-    fprintf(stderr, "Failed to split executable path.\n");
-    return;
-  }
-  if (_makepath_s(module_path, MAX_PATH, drive, dir, module, MODULE_SUFFIX)) {
-    fprintf(stderr, "Failed to calculate module path.\n");
-    return;
-  }
-
-  dl = LoadLibrary(module_path);
-  if (!dl) {
-    fprintf(stderr, "Failed to open module: %s\n", module_path);
-    return;
-  }
-
-  funcptr = (module_symbol) GetProcAddress(dl, module_function);
-  if (!funcptr) {
-    fprintf(stderr, "Failed to find symbol: %s\n", module_function);
-    return;
-  }
-  funcptr();
-
-  FreeLibrary(dl);
-#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
-  void* dl;
-  char* path_copy = strdup(bin_path);
-  char* bin_dir = dirname(path_copy);
-  int path_size = snprintf(module_path, MAX_PATH, "%s/%s%s", bin_dir, module,
-                           MODULE_SUFFIX);
-  free(path_copy);
-  if (path_size < 0 || path_size > MAX_PATH) {
-    fprintf(stderr, "Failed to calculate module path.\n");
-    return;
-  }
-  module_path[path_size] = 0;
-
-  dl = dlopen(module_path, RTLD_LAZY);
-  if (!dl) {
-    fprintf(stderr, "Failed to open module: %s\n", module_path);
-    return;
-  }
-
-  funcptr = dlsym(dl, module_function);
-  if (!funcptr) {
-    fprintf(stderr, "Failed to find symbol: %s\n", module_function);
-    return;
-  }
-  funcptr();
-
-  dlclose(dl);
-#endif
-}
-
-int main(int argc, char *argv[])
-{
-  fprintf(stdout, "Hello from program.c\n");
-  fflush(stdout);
-
-#if defined(PLATFORM_WIN)
-  if (!GetModuleFileName(NULL, bin_path, MAX_PATH)) {
-    fprintf(stderr, "Failed to determine executable path.\n");
-    return 1;
-  }
-#elif defined(PLATFORM_MAC) || defined(PLATFORM_LINUX)
-  // Using argv[0] should be OK here since we control how the tests run, and
-  // can avoid exec and such issues that make it unreliable.
-  if (!realpath(argv[0], bin_path)) {
-    fprintf(stderr, "Failed to determine executable path (%s).\n", argv[0]);
-    return 1;
-  }
-#endif
-
-  CallModule("lib1");
-  CallModule("lib2");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/buildevents/buildevents.gyp b/tools/gyp/test/msvs/buildevents/buildevents.gyp
deleted file mode 100644
index e0304dd..0000000
--- a/tools/gyp/test/msvs/buildevents/buildevents.gyp
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'main',
-      'type': 'executable',
-      'sources': [ 'main.cc', ],
-      'msvs_prebuild': r'echo starting',
-      'msvs_postbuild': r'echo finished',
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py b/tools/gyp/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py
deleted file mode 100755
index 208f434..0000000
--- a/tools/gyp/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that msvs_prebuild and msvs_postbuild can be specified in both
-VS 2008 and 2010.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
-
-test.run_gyp('buildevents.gyp', '-G', 'msvs_version=2008')
-test.must_contain('main.vcproj', 'Name="VCPreBuildEventTool"')
-test.must_contain('main.vcproj', 'Name="VCPostBuildEventTool"')
-
-test.run_gyp('buildevents.gyp', '-G', 'msvs_version=2010')
-test.must_contain('main.vcxproj', '<PreBuildEvent>')
-test.must_contain('main.vcxproj', '<PostBuildEvent>')
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/buildevents/gyptest-ninja-warnings.py b/tools/gyp/test/msvs/buildevents/gyptest-ninja-warnings.py
deleted file mode 100755
index be4ec99..0000000
--- a/tools/gyp/test/msvs/buildevents/gyptest-ninja-warnings.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ninja errors out when encountering msvs_prebuild/msvs_postbuild.
-"""
-
-import sys
-import TestCmd
-import TestGyp
-
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  test.run_gyp('buildevents.gyp',
-      status=1,
-      stderr=r'.*msvs_prebuild not supported \(target main\).*',
-      match=TestCmd.match_re_dotall)
-
-  test.run_gyp('buildevents.gyp',
-      status=1,
-      stderr=r'.*msvs_postbuild not supported \(target main\).*',
-      match=TestCmd.match_re_dotall)
-
-  test.pass_test()
diff --git a/tools/gyp/test/msvs/buildevents/main.cc b/tools/gyp/test/msvs/buildevents/main.cc
deleted file mode 100644
index 03c0285..0000000
--- a/tools/gyp/test/msvs/buildevents/main.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {}
diff --git a/tools/gyp/test/msvs/config_attrs/gyptest-config_attrs.py b/tools/gyp/test/msvs/config_attrs/gyptest-config_attrs.py
deleted file mode 100644
index d560374..0000000
--- a/tools/gyp/test/msvs/config_attrs/gyptest-config_attrs.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that msvs_configuration_attributes and
-msbuild_configuration_attributes are applied by using
-them to set the OutputDirectory.
-"""
-
-import TestGyp
-import os
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-
-test = TestGyp.TestGyp(workdir='workarea_all',formats=['msvs'])
-
-vc_version = 'VC90'
-
-if os.getenv('GYP_MSVS_VERSION'):
-  vc_version = ['VC90','VC100'][int(os.getenv('GYP_MSVS_VERSION')) >= 2010]
-
-expected_exe_file = os.path.join(test.workdir, vc_version, 'hello.exe')
-
-test.run_gyp('hello.gyp')
-
-test.build('hello.gyp')
-
-test.must_exist(expected_exe_file)
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/config_attrs/hello.c b/tools/gyp/test/msvs/config_attrs/hello.c
deleted file mode 100644
index faadc75..0000000
--- a/tools/gyp/test/msvs/config_attrs/hello.c
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/config_attrs/hello.gyp b/tools/gyp/test/msvs/config_attrs/hello.gyp
deleted file mode 100644
index 810a80e..0000000
--- a/tools/gyp/test/msvs/config_attrs/hello.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-      'msvs_configuration_attributes': {
-        'OutputDirectory':'$(SolutionDir)VC90/'
-      },
-      'msbuild_configuration_attributes': {
-        'OutputDirectory':'$(SolutionDir)VC100/',
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/express/base/base.gyp b/tools/gyp/test/msvs/express/base/base.gyp
deleted file mode 100644
index b7c9fc6..0000000
--- a/tools/gyp/test/msvs/express/base/base.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'static_library',
-      'sources': [
-        'a.c',
-      ],
-    },
-    {
-      'target_name': 'b',
-      'type': 'static_library',
-      'sources': [
-        'b.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/express/express.gyp b/tools/gyp/test/msvs/express/express.gyp
deleted file mode 100644
index 917abe2..0000000
--- a/tools/gyp/test/msvs/express/express.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'express',
-      'type': 'executable',
-      'dependencies': [
-        'base/base.gyp:a',
-        'base/base.gyp:b',
-      ],
-      'sources': [
-        'main.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/express/gyptest-express.py b/tools/gyp/test/msvs/express/gyptest-express.py
deleted file mode 100755
index 54c06f6..0000000
--- a/tools/gyp/test/msvs/express/gyptest-express.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that flat solutions get generated for Express versions of
-Visual Studio.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['msvs'])
-
-test.run_gyp('express.gyp', '-G', 'msvs_version=2005')
-test.must_contain('express.sln', '(base)')
-
-test.run_gyp('express.gyp', '-G', 'msvs_version=2008')
-test.must_contain('express.sln', '(base)')
-
-test.run_gyp('express.gyp', '-G', 'msvs_version=2005e')
-test.must_not_contain('express.sln', '(base)')
-
-test.run_gyp('express.gyp', '-G', 'msvs_version=2008e')
-test.must_not_contain('express.sln', '(base)')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/external_builder/external.gyp b/tools/gyp/test/msvs/external_builder/external.gyp
deleted file mode 100644
index abe5b58..0000000
--- a/tools/gyp/test/msvs/external_builder/external.gyp
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    # the test driver switches this flag when testing external builder
-    'use_external_builder%': 0,
-  },
-  'targets': [
-    {
-      'target_name': 'external',
-      'type': 'executable',
-      'sources': [
-        'hello.cpp',
-        'hello.z',
-      ],
-      'rules': [
-        {
-          'rule_name': 'test_rule',
-          'extension': 'z',
-          'outputs': [
-            'msbuild_rule.out',
-          ],
-          'action': [
-            'python',
-            'msbuild_rule.py',
-            '<(RULE_INPUT_PATH)',
-            'a', 'b', 'c',
-          ],
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-      'actions': [
-        {
-          'action_name': 'test action',
-          'inputs': [
-            'msbuild_action.py',
-          ],
-          'outputs': [
-            'msbuild_action.out',
-          ],
-          'action': [
-            'python',
-            '<@(_inputs)',
-            'x', 'y', 'z',
-          ],
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-      'conditions': [
-        ['use_external_builder==1', {
-          'msvs_external_builder': 'test',
-          'msvs_external_builder_build_cmd': [
-            'python',
-            'external_builder.py',
-            'build', '1', '2', '3',
-          ],
-          'msvs_external_builder_clean_cmd': [
-            'python',
-            'external_builder.py',
-            'clean', '4', '5',
-          ],
-        }],
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/external_builder/external_builder.py b/tools/gyp/test/msvs/external_builder/external_builder.py
deleted file mode 100644
index ddfc1e5..0000000
--- a/tools/gyp/test/msvs/external_builder/external_builder.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-with open('external_builder.out', 'w') as f:
-  f.write(' '.join(sys.argv))
-
diff --git a/tools/gyp/test/msvs/external_builder/gyptest-all.py b/tools/gyp/test/msvs/external_builder/gyptest-all.py
deleted file mode 100644
index 72faa7a..0000000
--- a/tools/gyp/test/msvs/external_builder/gyptest-all.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that msvs_external_builder being set will invoke the provided
-msvs_external_builder_build_cmd and msvs_external_builder_clean_cmd, and will
-not invoke MSBuild actions and rules.
-"""
-
-import os
-import sys
-import TestGyp
-
-if int(os.environ.get('GYP_MSVS_VERSION', 0)) < 2010:
-  sys.exit(0)
-
-test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
-
-# without the flag set
-test.run_gyp('external.gyp')
-test.build('external.gyp', target='external')
-test.must_not_exist('external_builder.out')
-test.must_exist('msbuild_rule.out')
-test.must_exist('msbuild_action.out')
-test.must_match('msbuild_rule.out', 'msbuild_rule.py hello.z a b c')
-test.must_match('msbuild_action.out', 'msbuild_action.py x y z')
-os.remove('msbuild_rule.out')
-os.remove('msbuild_action.out')
-
-# with the flag set, using Build
-try:
-  os.environ['GYP_DEFINES'] = 'use_external_builder=1'
-  test.run_gyp('external.gyp')
-  test.build('external.gyp', target='external')
-finally:
-  del os.environ['GYP_DEFINES']
-test.must_not_exist('msbuild_rule.out')
-test.must_not_exist('msbuild_action.out')
-test.must_exist('external_builder.out')
-test.must_match('external_builder.out', 'external_builder.py build 1 2 3')
-os.remove('external_builder.out')
-
-# with the flag set, using Clean
-try:
-  os.environ['GYP_DEFINES'] = 'use_external_builder=1'
-  test.run_gyp('external.gyp')
-  test.build('external.gyp', target='external', clean=True)
-finally:
-  del os.environ['GYP_DEFINES']
-test.must_not_exist('msbuild_rule.out')
-test.must_not_exist('msbuild_action.out')
-test.must_exist('external_builder.out')
-test.must_match('external_builder.out', 'external_builder.py clean 4 5')
-os.remove('external_builder.out')
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/external_builder/hello.cpp b/tools/gyp/test/msvs/external_builder/hello.cpp
deleted file mode 100644
index bc0c026..0000000
--- a/tools/gyp/test/msvs/external_builder/hello.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-int main(void) {
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/external_builder/hello.z b/tools/gyp/test/msvs/external_builder/hello.z
deleted file mode 100644
index aa47882..0000000
--- a/tools/gyp/test/msvs/external_builder/hello.z
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-This file will be passed to the test rule.
-
diff --git a/tools/gyp/test/msvs/external_builder/msbuild_action.py b/tools/gyp/test/msvs/external_builder/msbuild_action.py
deleted file mode 100644
index 632d786..0000000
--- a/tools/gyp/test/msvs/external_builder/msbuild_action.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-with open('msbuild_action.out', 'w') as f:
-  f.write(' '.join(sys.argv))
-
diff --git a/tools/gyp/test/msvs/external_builder/msbuild_rule.py b/tools/gyp/test/msvs/external_builder/msbuild_rule.py
deleted file mode 100644
index 0d6e315..0000000
--- a/tools/gyp/test/msvs/external_builder/msbuild_rule.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys, os.path
-
-sys.argv[1] = os.path.basename(sys.argv[1])
-
-with open('msbuild_rule.out', 'w') as f:
-  f.write(' '.join(sys.argv))
-
diff --git a/tools/gyp/test/msvs/filters/filters.gyp b/tools/gyp/test/msvs/filters/filters.gyp
deleted file mode 100644
index a4106dc..0000000
--- a/tools/gyp/test/msvs/filters/filters.gyp
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'no_source_files',
-      'type': 'none',
-      'sources': [ ],
-    },
-    {
-      'target_name': 'one_source_file',
-      'type': 'executable',
-      'sources': [
-        '../folder/a.c',
-      ],
-    },
-    {
-      'target_name': 'two_source_files',
-      'type': 'executable',
-      'sources': [
-        '../folder/a.c',
-        '../folder/b.c',
-      ],
-    },
-    {
-      'target_name': 'three_files_in_two_folders',
-      'type': 'executable',
-      'sources': [
-        '../folder1/a.c',
-        '../folder1/b.c',
-        '../folder2/c.c',
-      ],
-    },
-    {
-      'target_name': 'nested_folders',
-      'type': 'executable',
-      'sources': [
-        '../folder1/nested/a.c',
-        '../folder2/d.c',
-        '../folder1/nested/b.c',
-        '../folder1/other/c.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/filters/gyptest-filters-2008.py b/tools/gyp/test/msvs/filters/gyptest-filters-2008.py
deleted file mode 100644
index 41ca085..0000000
--- a/tools/gyp/test/msvs/filters/gyptest-filters-2008.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that extra filters are pruned correctly for Visual Studio 2008.
-"""
-
-import re
-import TestGyp
-
-
-def strip_ws(str):
-    return re.sub('^ +', '', str, flags=re.M).replace('\n', '')
-
-
-test = TestGyp.TestGyp(formats=['msvs'])
-
-test.run_gyp('filters.gyp', '-G', 'standalone', '-G', 'msvs_version=2008')
-
-test.must_contain('no_source_files.vcproj', '<Files/>')
-
-test.must_contain('one_source_file.vcproj', strip_ws('''\
-<Files>
-  <File RelativePath="..\\folder\\a.c"/>
-</Files>
-'''))
-
-test.must_contain('two_source_files.vcproj', strip_ws('''\
-<Files>
-  <File RelativePath="..\\folder\\a.c"/>
-  <File RelativePath="..\\folder\\b.c"/>
-</Files>
-'''))
-
-test.must_contain('three_files_in_two_folders.vcproj', strip_ws('''\
-<Files>
-  <Filter Name="folder1">
-    <File RelativePath="..\\folder1\\a.c"/>
-    <File RelativePath="..\\folder1\\b.c"/>
-  </Filter>
-  <Filter Name="folder2">
-    <File RelativePath="..\\folder2\\c.c"/>
-  </Filter>
-</Files>
-'''))
-
-test.must_contain('nested_folders.vcproj', strip_ws('''\
-<Files>
-  <Filter Name="folder1">
-    <Filter Name="nested">
-      <File RelativePath="..\\folder1\\nested\\a.c"/>
-      <File RelativePath="..\\folder1\\nested\\b.c"/>
-    </Filter>
-    <Filter Name="other">
-      <File RelativePath="..\\folder1\\other\\c.c"/>
-    </Filter>
-  </Filter>
-  <Filter Name="folder2">
-    <File RelativePath="..\\folder2\\d.c"/>
-  </Filter>
-</Files>
-'''))
-
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/filters/gyptest-filters-2010.py b/tools/gyp/test/msvs/filters/gyptest-filters-2010.py
deleted file mode 100644
index d8131d5..0000000
--- a/tools/gyp/test/msvs/filters/gyptest-filters-2010.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that extra filters are pruned correctly for Visual Studio 2010
-and later.
-"""
-
-import TestGyp
-
-
-test = TestGyp.TestGyp(formats=['msvs'])
-
-test.run_gyp('filters.gyp', '-G', 'standalone', '-G', 'msvs_version=2010')
-
-test.must_not_exist('no_source_files.vcxproj.filters')
-
-test.must_not_exist('one_source_file.vcxproj.filters')
-
-test.must_not_exist('two_source_files.vcxproj.filters')
-
-test.must_contain('three_files_in_two_folders.vcxproj.filters', '''\
-  <ItemGroup>
-    <ClCompile Include="..\\folder1\\a.c">
-      <Filter>folder1</Filter>
-    </ClCompile>
-    <ClCompile Include="..\\folder1\\b.c">
-      <Filter>folder1</Filter>
-    </ClCompile>
-    <ClCompile Include="..\\folder2\\c.c">
-      <Filter>folder2</Filter>
-    </ClCompile>
-  </ItemGroup>
-'''.replace('\n', '\r\n'))
-
-test.must_contain('nested_folders.vcxproj.filters', '''\
-  <ItemGroup>
-    <ClCompile Include="..\\folder1\\nested\\a.c">
-      <Filter>folder1\\nested</Filter>
-    </ClCompile>
-    <ClCompile Include="..\\folder2\\d.c">
-      <Filter>folder2</Filter>
-    </ClCompile>
-    <ClCompile Include="..\\folder1\\nested\\b.c">
-      <Filter>folder1\\nested</Filter>
-    </ClCompile>
-    <ClCompile Include="..\\folder1\\other\\c.c">
-      <Filter>folder1\\other</Filter>
-    </ClCompile>
-  </ItemGroup>
-'''.replace('\n', '\r\n'))
-
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/list_excluded/gyptest-all.py b/tools/gyp/test/msvs/list_excluded/gyptest-all.py
deleted file mode 100644
index 5a370f6..0000000
--- a/tools/gyp/test/msvs/list_excluded/gyptest-all.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that msvs_list_excluded_files=0 doesn't list files that would
-normally be in _excluded_files, and that if that flag is not set, then they
-are still listed.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
-
-
-# with the flag set to 0
-try:
-  os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_list_excluded_files=0'
-  test.run_gyp('hello_exclude.gyp')
-finally:
-  del os.environ['GYP_GENERATOR_FLAGS']
-if test.uses_msbuild:
-  test.must_not_contain('hello.vcxproj', 'hello_mac')
-else:
-  test.must_not_contain('hello.vcproj', 'hello_mac')
-
-
-# with the flag not set
-test.run_gyp('hello_exclude.gyp')
-if test.uses_msbuild:
-  test.must_contain('hello.vcxproj', 'hello_mac')
-else:
-  test.must_contain('hello.vcproj', 'hello_mac')
-
-
-# with the flag explicitly set to 1
-try:
-  os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_list_excluded_files=1'
-  test.run_gyp('hello_exclude.gyp')
-finally:
-  del os.environ['GYP_GENERATOR_FLAGS']
-if test.uses_msbuild:
-  test.must_contain('hello.vcxproj', 'hello_mac')
-else:
-  test.must_contain('hello.vcproj', 'hello_mac')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/list_excluded/hello.cpp b/tools/gyp/test/msvs/list_excluded/hello.cpp
deleted file mode 100644
index bc0c026..0000000
--- a/tools/gyp/test/msvs/list_excluded/hello.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-int main(void) {
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/list_excluded/hello_exclude.gyp b/tools/gyp/test/msvs/list_excluded/hello_exclude.gyp
deleted file mode 100644
index aa160f2..0000000
--- a/tools/gyp/test/msvs/list_excluded/hello_exclude.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.cpp',
-        'hello_mac.cpp',
-      ],
-      'conditions': [
-        ['OS!="mac"', {'sources!': ['hello_mac.cpp']}],
-      ]
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/list_excluded/hello_mac.cpp b/tools/gyp/test/msvs/list_excluded/hello_mac.cpp
deleted file mode 100644
index b9f6242..0000000
--- a/tools/gyp/test/msvs/list_excluded/hello_mac.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-int hello2() {
-  printf("Hello, two!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/missing_sources/gyptest-missing.py b/tools/gyp/test/msvs/missing_sources/gyptest-missing.py
deleted file mode 100644
index 62a99ef..0000000
--- a/tools/gyp/test/msvs/missing_sources/gyptest-missing.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that missing 'sources' files are treated as fatal errors when the
-the generator flag 'msvs_error_on_missing_sources' is set.
-"""
-
-import TestGyp
-import os
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'], workdir='workarea_all')
-
-  # With the flag not set
-  test.run_gyp('hello_missing.gyp')
-
-  # With the flag explicitly set to 0
-  try:
-    os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_error_on_missing_sources=0'
-    test.run_gyp('hello_missing.gyp')
-  finally:
-    del os.environ['GYP_GENERATOR_FLAGS']
-
-  # With the flag explicitly set to 1
-  try:
-    os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_error_on_missing_sources=1'
-    # Test to make sure GYP raises an exception (exit status 1). Since this will
-    # also print a backtrace, ensure that TestGyp is not checking that stderr is
-    # empty by specifying None, which means do not perform any checking.
-    # Instead, stderr is checked below to ensure it contains the expected
-    # output.
-    test.run_gyp('hello_missing.gyp', status=1, stderr=None)
-  finally:
-    del os.environ['GYP_GENERATOR_FLAGS']
-  test.must_contain_any_line(test.stderr(),
-                            ["Missing input files:"])
-
-  test.pass_test()
diff --git a/tools/gyp/test/msvs/missing_sources/hello_missing.gyp b/tools/gyp/test/msvs/missing_sources/hello_missing.gyp
deleted file mode 100644
index c08926b..0000000
--- a/tools/gyp/test/msvs/missing_sources/hello_missing.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello_missing.cpp',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/multiple_actions_error_handling/action_fail.py b/tools/gyp/test/msvs/multiple_actions_error_handling/action_fail.py
deleted file mode 100644
index 286fc4e..0000000
--- a/tools/gyp/test/msvs/multiple_actions_error_handling/action_fail.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-sys.exit(1)
diff --git a/tools/gyp/test/msvs/multiple_actions_error_handling/action_succeed.py b/tools/gyp/test/msvs/multiple_actions_error_handling/action_succeed.py
deleted file mode 100644
index 3554373..0000000
--- a/tools/gyp/test/msvs/multiple_actions_error_handling/action_succeed.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-sys.exit(0)
diff --git a/tools/gyp/test/msvs/multiple_actions_error_handling/actions.gyp b/tools/gyp/test/msvs/multiple_actions_error_handling/actions.gyp
deleted file mode 100644
index ab99e92..0000000
--- a/tools/gyp/test/msvs/multiple_actions_error_handling/actions.gyp
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'actions-test',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'first action (fails)',
-          'inputs': [
-            'action_fail.py',
-          ],
-          'outputs': [
-            'ALWAYS_OUT_OF_DATE',
-          ],
-          'action': [
-            'python', '<@(_inputs)'
-          ],
-          'msvs_cygwin_shell': 0,
-        },
-        {
-          'action_name': 'second action (succeeds)',
-          'inputs': [
-            'action_succeed.py',
-          ],
-          'outputs': [
-            'ALWAYS_OUT_OF_DATE',
-          ],
-          'action': [
-            'python', '<@(_inputs)'
-          ],
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/multiple_actions_error_handling/gyptest.py b/tools/gyp/test/msvs/multiple_actions_error_handling/gyptest.py
deleted file mode 100644
index 3aa6b8f..0000000
--- a/tools/gyp/test/msvs/multiple_actions_error_handling/gyptest.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that failing actions make the build fail reliably, even when there
-are multiple actions in one project.
-"""
-
-import os
-import sys
-import TestGyp
-import TestCmd
-
-test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
-
-test.run_gyp('actions.gyp')
-test.build('actions.gyp',
-           target='actions-test',
-           status=1,
-           stdout=r'.*"cmd\.exe" exited with code 1\..*',
-           match=TestCmd.match_re_dotall)
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/props/AppName.props b/tools/gyp/test/msvs/props/AppName.props
deleted file mode 100644
index b688f66..0000000
--- a/tools/gyp/test/msvs/props/AppName.props
+++ /dev/null
@@ -1,14 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-  <PropertyGroup Label="UserMacros">
-    <AppName>Greet</AppName>
-  </PropertyGroup>
-  <PropertyGroup>
-    <_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion>
-  </PropertyGroup>
-  <ItemGroup>
-    <BuildMacro Include="AppName">
-      <Value>$(AppName)</Value>
-    </BuildMacro>
-  </ItemGroup>
-</Project>
diff --git a/tools/gyp/test/msvs/props/AppName.vsprops b/tools/gyp/test/msvs/props/AppName.vsprops
deleted file mode 100644
index 84b9af3..0000000
--- a/tools/gyp/test/msvs/props/AppName.vsprops
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="Windows-1252"?>
-<VisualStudioPropertySheet
-    ProjectType="Visual C++"
-    Version="8.00"
-    Name="Common"
-    >
-    <UserMacro
-        Name="AppName"
-        Value="Greet"
-    />
-</VisualStudioPropertySheet>
diff --git a/tools/gyp/test/msvs/props/gyptest-props.py b/tools/gyp/test/msvs/props/gyptest-props.py
deleted file mode 100644
index abd4df2..0000000
--- a/tools/gyp/test/msvs/props/gyptest-props.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies props files are added by using a
-props file to set the name of the built executable.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_all', formats=['msvs'])
-
-test.run_gyp('hello.gyp')
-
-test.build('hello.gyp')
-
-test.built_file_must_exist('Greet.exe')
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/props/hello.c b/tools/gyp/test/msvs/props/hello.c
deleted file mode 100644
index faadc75..0000000
--- a/tools/gyp/test/msvs/props/hello.c
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/props/hello.gyp b/tools/gyp/test/msvs/props/hello.gyp
deleted file mode 100644
index 5a58317..0000000
--- a/tools/gyp/test/msvs/props/hello.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'product_name': '$(AppName)',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-      'msvs_props': [
-        '$(SolutionDir)AppName.vsprops'
-      ],
-      'msbuild_props': [
-        '$(SolutionDir)AppName.props'
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/msvs/rules_stdout_stderr/dummy.bar b/tools/gyp/test/msvs/rules_stdout_stderr/dummy.bar
deleted file mode 100644
index 2517869..0000000
--- a/tools/gyp/test/msvs/rules_stdout_stderr/dummy.bar
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-A dummy file with the .bar extension (used for stderr rule).
diff --git a/tools/gyp/test/msvs/rules_stdout_stderr/dummy.foo b/tools/gyp/test/msvs/rules_stdout_stderr/dummy.foo
deleted file mode 100644
index 6a7990b..0000000
--- a/tools/gyp/test/msvs/rules_stdout_stderr/dummy.foo
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-A dummy file with the .foo extension (used for stdout rule).
diff --git a/tools/gyp/test/msvs/rules_stdout_stderr/gyptest-rules-stdout-stderr.py b/tools/gyp/test/msvs/rules_stdout_stderr/gyptest-rules-stdout-stderr.py
deleted file mode 100644
index 804505a..0000000
--- a/tools/gyp/test/msvs/rules_stdout_stderr/gyptest-rules-stdout-stderr.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Verifies that stdout and stderr from rules get logged in the build's
-stdout."""
-
-import sys
-import TestGyp
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs'])
-
-  test.run_gyp('rules-stdout-stderr.gyp')
-  test.build('rules-stdout-stderr.gyp', test.ALL)
-
-  expected_stdout_lines = [
-    'testing stdout',
-    'This will go to stdout',
-
-    # Note: stderr output from rules will go to the build's stdout.
-    'testing stderr',
-    'This will go to stderr',
-  ]
-  test.must_contain_all_lines(test.stdout(), expected_stdout_lines)
-
-  test.pass_test()
diff --git a/tools/gyp/test/msvs/rules_stdout_stderr/rule_stderr.py b/tools/gyp/test/msvs/rules_stdout_stderr/rule_stderr.py
deleted file mode 100644
index f486062..0000000
--- a/tools/gyp/test/msvs/rules_stdout_stderr/rule_stderr.py
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-print >>sys.stderr, "This will go to stderr"
diff --git a/tools/gyp/test/msvs/rules_stdout_stderr/rule_stdout.py b/tools/gyp/test/msvs/rules_stdout_stderr/rule_stdout.py
deleted file mode 100644
index 2b58d2a..0000000
--- a/tools/gyp/test/msvs/rules_stdout_stderr/rule_stdout.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-print "This will go to stdout"
diff --git a/tools/gyp/test/msvs/rules_stdout_stderr/rules-stdout-stderr.gyp b/tools/gyp/test/msvs/rules_stdout_stderr/rules-stdout-stderr.gyp
deleted file mode 100644
index ce93643..0000000
--- a/tools/gyp/test/msvs/rules_stdout_stderr/rules-stdout-stderr.gyp
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'none',
-      'sources': [
-        'dummy.foo',
-        'dummy.bar',
-      ],
-      'rules': [
-        {
-          'rule_name': 'test_stdout',
-          'extension': 'foo',
-          'message': 'testing stdout',
-          'msvs_cygwin_shell': 0,
-          'inputs': [
-            'rule_stdout.py',
-          ],
-          'outputs': [
-            'dummy.foo_output',
-          ],
-          'action': [
-            'python',
-            'rule_stdout.py',
-            '<(RULE_INPUT_PATH)',
-          ],
-        },
-        {
-          'rule_name': 'test_stderr',
-          'extension': 'bar',
-          'message': 'testing stderr',
-          'msvs_cygwin_shell': 0,
-          'inputs': [
-            'rule_stderr.py',
-          ],
-          'outputs': [
-            'dummy.bar_output',
-          ],
-          'action': [
-            'python',
-            'rule_stderr.py',
-            '<(RULE_INPUT_PATH)',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/shared_output/common.gypi b/tools/gyp/test/msvs/shared_output/common.gypi
deleted file mode 100644
index c6fa341..0000000
--- a/tools/gyp/test/msvs/shared_output/common.gypi
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'default_configuration': 'Baz',
-    'configurations': {
-      'Baz': {
-        'msvs_configuration_attributes': {
-          'OutputDirectory': '<(DEPTH)/foo',
-          'IntermediateDirectory': '$(OutDir)/bar',
-        },
-      },
-    },
-  },
-}
diff --git a/tools/gyp/test/msvs/shared_output/gyptest-shared_output.py b/tools/gyp/test/msvs/shared_output/gyptest-shared_output.py
deleted file mode 100644
index 270b280..0000000
--- a/tools/gyp/test/msvs/shared_output/gyptest-shared_output.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test checking that IntermediateDirectory can be defined in terms of
-OutputDirectory. We previously had emitted the definition of
-IntermediateDirectory before the definition of OutputDirectory.
-This is required so that $(IntDir) can be based on $(OutDir).
-"""
-
-import TestGyp
-import os
-
-# NOTE: This test really is vcbuild/msbuild specific (not applicable to windows
-#       ninja), as it is testing the msvs output location when opening an .sln
-#       other than all.sln.
-test = TestGyp.TestGyp(workdir='workarea_shared_output', formats=['msvs'])
-
-test.run_gyp('hello.gyp')
-test.set_configuration('Baz')
-
-test.build('there/there.gyp', test.ALL)
-test.must_exist(os.path.join(test.workdir, 'foo', 'there.exe'))
-test.must_exist(os.path.join(test.workdir, 'foo', 'bar', 'there.obj'))
-
-test.build('hello.gyp', test.ALL)
-test.must_exist(os.path.join(test.workdir, 'foo', 'hello.exe'))
-test.must_exist(os.path.join(test.workdir, 'foo', 'bar', 'hello.obj'))
-
-if test.format == 'msvs':
-  if test.uses_msbuild:
-    test.must_contain('pull_in_there.vcxproj',
-      '<IntDir>$(OutDir)bar\\</IntDir>')
-  else:
-    test.must_contain('pull_in_there.vcproj',
-      'IntermediateDirectory="$(OutDir)bar\\"')
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/shared_output/hello.c b/tools/gyp/test/msvs/shared_output/hello.c
deleted file mode 100644
index 698e4fd..0000000
--- a/tools/gyp/test/msvs/shared_output/hello.c
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-int main(void) {
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/shared_output/hello.gyp b/tools/gyp/test/msvs/shared_output/hello.gyp
deleted file mode 100644
index f80e5cf..0000000
--- a/tools/gyp/test/msvs/shared_output/hello.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': ['common.gypi'],
-  'targets': [
-    {
-      'target_name': 'pull_in_there',
-      'type': 'none',
-      'dependencies': ['there/there.gyp:*'],
-    },
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/shared_output/there/there.c b/tools/gyp/test/msvs/shared_output/there/there.c
deleted file mode 100644
index 698e4fd..0000000
--- a/tools/gyp/test/msvs/shared_output/there/there.c
+++ /dev/null
@@ -1,12 +0,0 @@
-/*
- * Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-int main(void) {
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/shared_output/there/there.gyp b/tools/gyp/test/msvs/shared_output/there/there.gyp
deleted file mode 100644
index 56feff3..0000000
--- a/tools/gyp/test/msvs/shared_output/there/there.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': ['../common.gypi'],
-  'targets': [
-    {
-      'target_name': 'there',
-      'type': 'executable',
-      'sources': [
-        'there.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/uldi2010/gyptest-all.py b/tools/gyp/test/msvs/uldi2010/gyptest-all.py
deleted file mode 100644
index cc248fb..0000000
--- a/tools/gyp/test/msvs/uldi2010/gyptest-all.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that uldi can be disabled on a per-project-reference basis in vs2010.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
-
-test.run_gyp('hello.gyp')
-
-if test.uses_msbuild:
-  test.must_contain('hello.vcxproj', '<UseLibraryDependencyInputs>false')
-
-test.pass_test()
diff --git a/tools/gyp/test/msvs/uldi2010/hello.c b/tools/gyp/test/msvs/uldi2010/hello.c
deleted file mode 100644
index 06e6a02..0000000
--- a/tools/gyp/test/msvs/uldi2010/hello.c
+++ /dev/null
@@ -1,13 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-extern int hello2();
-
-int main(void) {
-  printf("Hello, world!\n");
-  hello2();
-  return 0;
-}
diff --git a/tools/gyp/test/msvs/uldi2010/hello.gyp b/tools/gyp/test/msvs/uldi2010/hello.gyp
deleted file mode 100644
index a2bf2ba..0000000
--- a/tools/gyp/test/msvs/uldi2010/hello.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-      'dependencies': [
-        'hellolib',
-      ]
-    },
-    {
-      'target_name': 'hellolib',
-      'type': 'static_library',
-      'sources': [
-        'hello2.c',
-      ],
-      'msvs_2010_disable_uldi_when_referenced': 1,
-    },
-  ],
-}
diff --git a/tools/gyp/test/msvs/uldi2010/hello2.c b/tools/gyp/test/msvs/uldi2010/hello2.c
deleted file mode 100644
index e2f2323..0000000
--- a/tools/gyp/test/msvs/uldi2010/hello2.c
+++ /dev/null
@@ -1,10 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int hello2() {
-  printf("Hello, two!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/multiple-targets/gyptest-all.py b/tools/gyp/test/multiple-targets/gyptest-all.py
deleted file mode 100755
index 3ef5009..0000000
--- a/tools/gyp/test/multiple-targets/gyptest-all.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('multiple.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('multiple.gyp', test.ALL, chdir='relocate/src', stderr=None)
-
-expect1 = """\
-hello from prog1.c
-hello from common.c
-"""
-
-expect2 = """\
-hello from prog2.c
-hello from common.c
-"""
-
-test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
-test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/multiple-targets/gyptest-default.py b/tools/gyp/test/multiple-targets/gyptest-default.py
deleted file mode 100755
index db15d79..0000000
--- a/tools/gyp/test/multiple-targets/gyptest-default.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('multiple.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('multiple.gyp', chdir='relocate/src')
-
-expect1 = """\
-hello from prog1.c
-hello from common.c
-"""
-
-expect2 = """\
-hello from prog2.c
-hello from common.c
-"""
-
-test.run_built_executable('prog1', stdout=expect1, chdir='relocate/src')
-test.run_built_executable('prog2', stdout=expect2, chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/multiple-targets/src/common.c b/tools/gyp/test/multiple-targets/src/common.c
deleted file mode 100644
index f1df7c1..0000000
--- a/tools/gyp/test/multiple-targets/src/common.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-void common(void)
-{
-  printf("hello from common.c\n");
-  return;
-}
diff --git a/tools/gyp/test/multiple-targets/src/multiple.gyp b/tools/gyp/test/multiple-targets/src/multiple.gyp
deleted file mode 100644
index 3db4ea3..0000000
--- a/tools/gyp/test/multiple-targets/src/multiple.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'prog1',
-      'type': 'executable',
-      'sources': [
-        'prog1.c',
-        'common.c',
-      ],
-    },
-    {
-      'target_name': 'prog2',
-      'type': 'executable',
-      'sources': [
-        'prog2.c',
-        'common.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/multiple-targets/src/prog1.c b/tools/gyp/test/multiple-targets/src/prog1.c
deleted file mode 100644
index fbf8d4c..0000000
--- a/tools/gyp/test/multiple-targets/src/prog1.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void common(void);
-
-int main(void)
-{
-  printf("hello from prog1.c\n");
-  common();
-  return 0;
-}
diff --git a/tools/gyp/test/multiple-targets/src/prog2.c b/tools/gyp/test/multiple-targets/src/prog2.c
deleted file mode 100644
index a94b5c1..0000000
--- a/tools/gyp/test/multiple-targets/src/prog2.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void common(void);
-
-int main(void)
-{
-  printf("hello from prog2.c\n");
-  common();
-  return 0;
-}
diff --git a/tools/gyp/test/ninja/action-rule-hash/gyptest-action-rule-hash.py b/tools/gyp/test/ninja/action-rule-hash/gyptest-action-rule-hash.py
deleted file mode 100644
index 7147fd2..0000000
--- a/tools/gyp/test/ninja/action-rule-hash/gyptest-action-rule-hash.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that running gyp in a different directory does not cause actions and
-rules to rerun.
-"""
-
-import os
-import sys
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['ninja'])
-# The xcode-ninja generator handles gypfiles which are not at the
-# project root incorrectly.
-# cf. https://code.google.com/p/gyp/issues/detail?id=460
-if test.format == 'xcode-ninja':
-  test.skip_test()
-
-test.run_gyp('subdir/action-rule-hash.gyp')
-test.build('subdir/action-rule-hash.gyp', test.ALL)
-test.up_to_date('subdir/action-rule-hash.gyp')
-
-# Verify that everything is still up-to-date when we re-invoke gyp from a
-# different directory.
-test.run_gyp('action-rule-hash.gyp', '--depth=../', chdir='subdir')
-test.up_to_date('subdir/action-rule-hash.gyp')
-
-test.pass_test()
diff --git a/tools/gyp/test/ninja/action-rule-hash/subdir/action-rule-hash.gyp b/tools/gyp/test/ninja/action-rule-hash/subdir/action-rule-hash.gyp
deleted file mode 100644
index 0e88a30..0000000
--- a/tools/gyp/test/ninja/action-rule-hash/subdir/action-rule-hash.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [
-        '<(INTERMEDIATE_DIR)/main.cc',
-      ],
-      'actions': [
-        {
-          'action_name': 'emit_main_cc',
-          'inputs': ['emit.py'],
-          'outputs': ['<(INTERMEDIATE_DIR)/main.cc'],
-          'action': [
-            'python',
-            'emit.py',
-            '<(INTERMEDIATE_DIR)/main.cc',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/action-rule-hash/subdir/emit.py b/tools/gyp/test/ninja/action-rule-hash/subdir/emit.py
deleted file mode 100644
index fcb715a..0000000
--- a/tools/gyp/test/ninja/action-rule-hash/subdir/emit.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'wb')
-f.write('int main() {\n')
-f.write('  return 0;\n')
-f.write('}\n')
-f.close()
diff --git a/tools/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py b/tools/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py
deleted file mode 100755
index cb59d7e..0000000
--- a/tools/gyp/test/ninja/action_dependencies/gyptest-action-dependencies.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that building an object file correctly depends on running actions in
-dependent targets, but not the targets themselves.
-"""
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-import TestGyp
-
-# NOTE(piman): This test will not work with other generators because:
-# - it explicitly tests the optimization, which is not implemented (yet?) on
-# other generators
-# - it relies on the exact path to output object files, which is generator
-# dependent, and actually, relies on the ability to build only that object file,
-# which I don't think is available on all generators.
-# TODO(piman): Extend to other generators when possible.
-test = TestGyp.TestGyp(formats=['ninja'])
-# xcode-ninja doesn't support building single object files by design.
-if test.format == 'xcode-ninja':
-  test.skip_test()
-
-test.run_gyp('action_dependencies.gyp', chdir='src')
-
-chdir = 'relocate/src'
-test.relocate('src', chdir)
-
-objext = '.obj' if sys.platform == 'win32' else '.o'
-
-test.build('action_dependencies.gyp',
-           os.path.join('obj', 'b.b' + objext),
-           chdir=chdir)
-
-# The 'a' actions should be run (letting b.c compile), but the a static library
-# should not be built.
-test.built_file_must_not_exist('a', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_exist(os.path.join('obj', 'b.b' + objext), chdir=chdir)
-
-test.build('action_dependencies.gyp',
-           os.path.join('obj', 'c.c' + objext),
-           chdir=chdir)
-
-# 'a' and 'b' should be built, so that the 'c' action succeeds, letting c.c
-# compile
-test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir)
-test.built_file_must_exist('b', type=test.EXECUTABLE, chdir=chdir)
-test.built_file_must_exist(os.path.join('obj', 'c.c' + objext), chdir=chdir)
-
-
-test.pass_test()
diff --git a/tools/gyp/test/ninja/action_dependencies/src/a.c b/tools/gyp/test/ninja/action_dependencies/src/a.c
deleted file mode 100644
index 4d7af9b..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/a.c
+++ /dev/null
@@ -1,10 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include "a.h"
-
-int funcA() {
-  return 42;
-}
diff --git a/tools/gyp/test/ninja/action_dependencies/src/a.h b/tools/gyp/test/ninja/action_dependencies/src/a.h
deleted file mode 100644
index 335db56..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/a.h
+++ /dev/null
@@ -1,13 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#ifndef A_H_
-#define A_H_
-
-#include "a/generated.h"
-
-int funcA();
-
-#endif  // A_H_
diff --git a/tools/gyp/test/ninja/action_dependencies/src/action_dependencies.gyp b/tools/gyp/test/ninja/action_dependencies/src/action_dependencies.gyp
deleted file mode 100644
index 5baa7a7..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/action_dependencies.gyp
+++ /dev/null
@@ -1,88 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'static_library',
-      'sources': [
-        'a.c',
-        'a.h',
-      ],
-      'actions': [
-        {
-          'action_name': 'generate_headers',
-          'inputs': [
-            'emit.py'
-          ],
-          'outputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/a/generated.h'
-          ],
-          'action': [
-            'python',
-            'emit.py',
-            '<(SHARED_INTERMEDIATE_DIR)/a/generated.h',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-      'include_dirs': [
-        '<(SHARED_INTERMEDIATE_DIR)',
-      ],
-      'direct_dependent_settings': {
-        'include_dirs': [
-          '<(SHARED_INTERMEDIATE_DIR)',
-        ],
-      },
-    },
-    {
-      'target_name': 'b',
-      'type': 'executable',
-      'sources': [
-        'b.c',
-        'b.h',
-      ],
-      'dependencies': [
-        'a',
-      ],
-    },
-    {
-      'target_name': 'c',
-      'type': 'static_library',
-      'sources': [
-        'c.c',
-        'c.h',
-      ],
-      'dependencies': [
-        'b',
-      ],
-      'actions': [
-        {
-          'action_name': 'generate_headers',
-          'inputs': [
-          ],
-          'outputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/c/generated.h'
-          ],
-          'action': [
-            '<(PRODUCT_DIR)/b',
-            '<(SHARED_INTERMEDIATE_DIR)/c/generated.h',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-      'include_dirs': [
-        '<(SHARED_INTERMEDIATE_DIR)',
-      ],
-      'direct_dependent_settings': {
-        'include_dirs': [
-          '<(SHARED_INTERMEDIATE_DIR)',
-        ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/action_dependencies/src/b.c b/tools/gyp/test/ninja/action_dependencies/src/b.c
deleted file mode 100644
index 8244646..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/b.c
+++ /dev/null
@@ -1,18 +0,0 @@
-/* Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-#include "b.h"
-
-int main(int argc, char** argv) {
-  FILE* f;
-  if (argc < 2)
-    return 1;
-  f = fopen(argv[1], "wt");
-  fprintf(f, "#define VALUE %d\n", funcA());
-  fclose(f);
-  return 0;
-}
diff --git a/tools/gyp/test/ninja/action_dependencies/src/b.h b/tools/gyp/test/ninja/action_dependencies/src/b.h
deleted file mode 100644
index 91362cd..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/b.h
+++ /dev/null
@@ -1,13 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#ifndef B_H_
-#define B_H_
-
-#include "a.h"
-
-int funcB();
-
-#endif  // B_H_
diff --git a/tools/gyp/test/ninja/action_dependencies/src/c.c b/tools/gyp/test/ninja/action_dependencies/src/c.c
deleted file mode 100644
index b412087..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/c.c
+++ /dev/null
@@ -1,10 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include "c.h"
-
-int funcC() {
-  return VALUE;
-}
diff --git a/tools/gyp/test/ninja/action_dependencies/src/c.h b/tools/gyp/test/ninja/action_dependencies/src/c.h
deleted file mode 100644
index c81a45b..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/c.h
+++ /dev/null
@@ -1,13 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#ifndef C_H_
-#define C_H_
-
-#include "c/generated.h"
-
-int funcC();
-
-#endif  // C_H_
diff --git a/tools/gyp/test/ninja/action_dependencies/src/emit.py b/tools/gyp/test/ninja/action_dependencies/src/emit.py
deleted file mode 100755
index 2df74b7..0000000
--- a/tools/gyp/test/ninja/action_dependencies/src/emit.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'wb')
-f.write('/* Hello World */\n')
-f.close()
diff --git a/tools/gyp/test/ninja/chained-dependency/chained-dependency.gyp b/tools/gyp/test/ninja/chained-dependency/chained-dependency.gyp
deleted file mode 100644
index 3fe68ae..0000000
--- a/tools/gyp/test/ninja/chained-dependency/chained-dependency.gyp
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    # This first target generates a header.
-    {
-      'target_name': 'generate_header',
-      'type': 'none',
-      'msvs_cygwin_shell': '0',
-      'actions': [
-        {
-          'action_name': 'generate header',
-          'inputs': [],
-          'outputs': ['<(SHARED_INTERMEDIATE_DIR)/generated/header.h'],
-          'action': [
-            'python', '-c', 'open(<(_outputs), "w")'
-          ]
-        },
-      ],
-      'all_dependent_settings': {
-        'include_dirs': [
-          '<(SHARED_INTERMEDIATE_DIR)',
-        ],
-      },
-    },
-
-    # This intermediate target does nothing other than pull in a
-    # dependency on the above generated target.
-    {
-      'target_name': 'chain',
-      'type': 'none',
-      'dependencies': [
-        'generate_header',
-      ],
-    },
-
-    # This final target is:
-    # - a static library (so gyp doesn't transitively pull in dependencies);
-    # - that relies on the generated file two dependencies away.
-    {
-      'target_name': 'chained',
-      'type': 'static_library',
-      'dependencies': [
-        'chain',
-      ],
-      'sources': [
-        'chained.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/chained-dependency/chained.c b/tools/gyp/test/ninja/chained-dependency/chained.c
deleted file mode 100644
index c1ff1a7..0000000
--- a/tools/gyp/test/ninja/chained-dependency/chained.c
+++ /dev/null
@@ -1,5 +0,0 @@
-#include "generated/header.h"
-
-int main(void) {
-  return 0;
-}
diff --git a/tools/gyp/test/ninja/chained-dependency/gyptest-chained-dependency.py b/tools/gyp/test/ninja/chained-dependency/gyptest-chained-dependency.py
deleted file mode 100755
index d8763f1..0000000
--- a/tools/gyp/test/ninja/chained-dependency/gyptest-chained-dependency.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that files generated by two-steps-removed actions are built before
-dependent compile steps.
-"""
-
-import os
-import sys
-import TestGyp
-
-# This test is Ninja-specific in that:
-# - the bug only showed nondeterministically in parallel builds;
-# - it relies on a ninja-specific output file path.
-
-test = TestGyp.TestGyp(formats=['ninja'])
-# xcode-ninja doesn't support building single object files by design.
-if test.format == 'xcode-ninja':
-  test.skip_test()
-
-test.run_gyp('chained-dependency.gyp')
-objext = '.obj' if sys.platform == 'win32' else '.o'
-test.build('chained-dependency.gyp',
-           os.path.join('obj', 'chained.chained' + objext))
-# The test passes if the .o file builds successfully.
-test.pass_test()
diff --git a/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/gyptest-empty-and-non-empty-duplicate-name.py b/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/gyptest-empty-and-non-empty-duplicate-name.py
deleted file mode 100644
index 0bdca66..0000000
--- a/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/gyptest-empty-and-non-empty-duplicate-name.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies a phony target isn't output if a target exists with the same name that
-was output.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['ninja'])
-
-# Reset xcode_ninja_target_pattern to its default for this test.
-test.run_gyp('test.gyp', '-G', 'xcode_ninja_target_pattern=^$')
-
-# Check for both \r and \n to cover both windows and linux.
-test.must_not_contain('out/Default/build.ninja', 'build empty_target: phony\r')
-test.must_not_contain('out/Default/build.ninja', 'build empty_target: phony\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/subdir/included.gyp b/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/subdir/included.gyp
deleted file mode 100644
index 1b9fc42..0000000
--- a/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/subdir/included.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'empty_target',
-      'type': 'executable',
-      'sources': [
-        'test.cc',
-      ],
-    },
-    {
-      'target_name': 'included_empty_target',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/test.gyp b/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/test.gyp
deleted file mode 100644
index 9aa6287..0000000
--- a/tools/gyp/test/ninja/empty-and-non-empty-duplicate-name/test.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'All',
-      'type': 'none',
-      'dependencies': [
-        'subdir/included.gyp:included_empty_target'
-      ]
-    },
-    {
-      'target_name': 'empty_target',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/normalize-paths-win/gyptest-normalize-paths.py b/tools/gyp/test/ninja/normalize-paths-win/gyptest-normalize-paths.py
deleted file mode 100644
index f56dbe5..0000000
--- a/tools/gyp/test/ninja/normalize-paths-win/gyptest-normalize-paths.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure paths are normalized with VS macros properly expanded on Windows.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  test.run_gyp('normalize-paths.gyp')
-
-  # We can't use existence tests because any case will pass, so we check the
-  # contents of ninja files directly since that's what we're most concerned
-  # with anyway.
-  subninja = open(test.built_file_path('obj/some_target.ninja')).read()
-  if '$!product_dir' in subninja:
-    test.fail_test()
-  if 'out\\Default' in subninja:
-    test.fail_test()
-
-  second = open(test.built_file_path('obj/second.ninja')).read()
-  if ('..\\..\\things\\AnotherName.exe' in second or
-      'AnotherName.exe' not in second):
-    test.fail_test()
-
-  copytarget = open(test.built_file_path('obj/copy_target.ninja')).read()
-  if '$(VSInstallDir)' in copytarget:
-    test.fail_test()
-
-  action = open(test.built_file_path('obj/action.ninja')).read()
-  if '..\\..\\out\\Default' in action:
-    test.fail_test()
-  if '..\\..\\SomethingElse' in action or 'SomethingElse' not in action:
-    test.fail_test()
-  if '..\\..\\SomeOtherInput' in action or 'SomeOtherInput' not in action:
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/ninja/normalize-paths-win/hello.cc b/tools/gyp/test/ninja/normalize-paths-win/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/ninja/normalize-paths-win/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/ninja/normalize-paths-win/normalize-paths.gyp b/tools/gyp/test/ninja/normalize-paths-win/normalize-paths.gyp
deleted file mode 100644
index 544d064..0000000
--- a/tools/gyp/test/ninja/normalize-paths-win/normalize-paths.gyp
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'Some_Target',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '<(PRODUCT_DIR)/stuff/AnotherName.exe',
-        },
-      },
-      'sources': [
-        'HeLLo.cc',
-        'blOrP.idl',
-      ],
-    },
-    {
-      'target_name': 'second',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(OutDir)\\things\\AnotherName.exe',
-        },
-      },
-      'sources': [
-        'HeLLo.cc',
-      ],
-    },
-    {
-      'target_name': 'Copy_Target',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)',
-          'files': [
-            '$(VSInstallDir)\\bin\\cl.exe',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'action',
-      'type': 'none',
-      'msvs_cygwin_shell': '0',
-      'actions': [
-        {
-          'inputs': [
-            '$(IntDir)\\SomeInput',
-            '$(OutDir)\\SomeOtherInput',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/ReSuLt',
-            '<(SHARED_INTERMEDIATE_DIR)/TempFile',
-            '$(OutDir)\SomethingElse',
-          ],
-          'action_name': 'Test action',
-          # Unfortunately, we can't normalize this field because it's
-          # free-form. Fortunately, ninja doesn't inspect it at all (only the
-          # inputs and outputs) so it's not mandatory.
-          'action': [],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/s-needs-no-depfiles/empty.s b/tools/gyp/test/ninja/s-needs-no-depfiles/empty.s
deleted file mode 100644
index 218d892..0000000
--- a/tools/gyp/test/ninja/s-needs-no-depfiles/empty.s
+++ /dev/null
@@ -1 +0,0 @@
-# This file intentionally left blank.
diff --git a/tools/gyp/test/ninja/s-needs-no-depfiles/gyptest-s-needs-no-depfiles.py b/tools/gyp/test/ninja/s-needs-no-depfiles/gyptest-s-needs-no-depfiles.py
deleted file mode 100755
index 77a3245..0000000
--- a/tools/gyp/test/ninja/s-needs-no-depfiles/gyptest-s-needs-no-depfiles.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that .s files don't always trigger a rebuild, as would happen if depfiles
-were used for them (since clang & gcc ignore -MMD when building .s->.o on
-linux).
-"""
-
-import os
-import sys
-import TestCommon
-import TestGyp
-
-# NOTE(fischman): Each generator uses depfiles (or not) differently, so this is
-# a ninja-specific test.
-test = TestGyp.TestGyp(formats=['ninja'])
-
-if sys.platform == 'win32' or sys.platform == 'win64':
-  # This test is about clang/gcc vs. depfiles; VS gets a pass.
-  test.pass_test()
-  sys.exit(0)
-
-test.run_gyp('s-needs-no-depfiles.gyp')
-
-# Build the library, grab its timestamp, rebuild the library, ensure timestamp
-# hasn't changed.
-test.build('s-needs-no-depfiles.gyp', 'empty')
-empty_dll = test.built_file_path('empty', test.SHARED_LIB)
-test.built_file_must_exist(empty_dll)
-pre_stat = os.stat(test.built_file_path(empty_dll))
-test.sleep()
-test.build('s-needs-no-depfiles.gyp', 'empty')
-post_stat = os.stat(test.built_file_path(empty_dll))
-
-if pre_stat.st_mtime != post_stat.st_mtime:
-  test.fail_test()
-else:
-  test.pass_test()
diff --git a/tools/gyp/test/ninja/s-needs-no-depfiles/s-needs-no-depfiles.gyp b/tools/gyp/test/ninja/s-needs-no-depfiles/s-needs-no-depfiles.gyp
deleted file mode 100644
index bd66b1a..0000000
--- a/tools/gyp/test/ninja/s-needs-no-depfiles/s-needs-no-depfiles.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'empty',
-      'type': 'shared_library',
-      'sources': [ 'empty.s' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py b/tools/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py
deleted file mode 100755
index 1b8e812..0000000
--- a/tools/gyp/test/ninja/solibs_avoid_relinking/gyptest-solibs-avoid-relinking.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that relinking a solib doesn't relink a dependent executable if the
-solib's public API hasn't changed.
-"""
-
-import os
-import sys
-import TestCommon
-import TestGyp
-
-# NOTE(fischman): This test will not work with other generators because the
-# API-hash-based-mtime-preservation optimization is only implemented in
-# ninja.py.  It could be extended to the make.py generator as well pretty
-# easily, probably.
-# (also, it tests ninja-specific out paths, which would have to be generalized
-# if this was extended to other generators).
-test = TestGyp.TestGyp(formats=['ninja'])
-
-if not os.environ.get('ProgramFiles(x86)'):
-  # TODO(scottmg)
-  print 'Skipping test on x86, http://crbug.com/365833'
-  test.pass_test()
-
-test.run_gyp('solibs_avoid_relinking.gyp')
-
-# Build the executable, grab its timestamp, touch the solib's source, rebuild
-# executable, ensure timestamp hasn't changed.
-test.build('solibs_avoid_relinking.gyp', 'b')
-test.built_file_must_exist('b' + TestCommon.exe_suffix)
-pre_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
-os.utime(os.path.join(test.workdir, 'solib.cc'),
-         (pre_stat.st_atime, pre_stat.st_mtime + 100))
-test.sleep()
-test.build('solibs_avoid_relinking.gyp', 'b')
-post_stat = os.stat(test.built_file_path('b' + TestCommon.exe_suffix))
-
-if pre_stat.st_mtime != post_stat.st_mtime:
-  test.fail_test()
-else:
-  test.pass_test()
diff --git a/tools/gyp/test/ninja/solibs_avoid_relinking/main.cc b/tools/gyp/test/ninja/solibs_avoid_relinking/main.cc
deleted file mode 100644
index 2cd74d3..0000000
--- a/tools/gyp/test/ninja/solibs_avoid_relinking/main.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-extern int foo();
-
-int main() {
-  return foo();
-}
diff --git a/tools/gyp/test/ninja/solibs_avoid_relinking/solib.cc b/tools/gyp/test/ninja/solibs_avoid_relinking/solib.cc
deleted file mode 100644
index 0856cd4..0000000
--- a/tools/gyp/test/ninja/solibs_avoid_relinking/solib.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-#ifdef _MSC_VER
-__declspec(dllexport)
-#else
-__attribute__((visibility("default")))
-#endif
-int foo() {
-  return 42;
-}
diff --git a/tools/gyp/test/ninja/solibs_avoid_relinking/solibs_avoid_relinking.gyp b/tools/gyp/test/ninja/solibs_avoid_relinking/solibs_avoid_relinking.gyp
deleted file mode 100644
index e816351..0000000
--- a/tools/gyp/test/ninja/solibs_avoid_relinking/solibs_avoid_relinking.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'shared_library',
-      'sources': [ 'solib.cc' ],
-      # Incremental linking enabled so that .lib timestamp is maintained when
-      # exports are unchanged.
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '2',
-        }
-      },
-    },
-    {
-      'target_name': 'b',
-      'type': 'executable',
-      'sources': [ 'main.cc' ],
-      'dependencies': [ 'a' ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '2',
-        }
-      },
-    },
-  ],
-  'conditions': [
-    ['OS=="linux"', {
-      'target_defaults': {
-        'cflags': ['-fPIC'],
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/ninja/use-console/foo.bar b/tools/gyp/test/ninja/use-console/foo.bar
deleted file mode 100644
index 07c476a..0000000
--- a/tools/gyp/test/ninja/use-console/foo.bar
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-This is a dummy file for rule/action input.
diff --git a/tools/gyp/test/ninja/use-console/gyptest-use-console.py b/tools/gyp/test/ninja/use-console/gyptest-use-console.py
deleted file mode 100644
index f76fcd9..0000000
--- a/tools/gyp/test/ninja/use-console/gyptest-use-console.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure 'ninja_use_console' is supported in actions and rules.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['ninja'])
-
-test.run_gyp('use-console.gyp')
-
-no_pool = open(test.built_file_path('obj/no_pool.ninja')).read()
-if 'pool =' in no_pool:
-  test.fail_test()
-
-action_pool = open(test.built_file_path('obj/action_pool.ninja')).read()
-if 'pool = console' not in action_pool:
-  test.fail_test()
-
-rule_pool = open(test.built_file_path('obj/rule_pool.ninja')).read()
-if 'pool = console' not in rule_pool:
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/ninja/use-console/use-console.gyp b/tools/gyp/test/ninja/use-console/use-console.gyp
deleted file mode 100644
index 84e6318..0000000
--- a/tools/gyp/test/ninja/use-console/use-console.gyp
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'no_pool',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'some_action',
-          'action': ['echo', 'hello'],
-          'inputs': ['foo.bar'],
-          'outputs': ['dummy'],
-        },
-      ],
-      'rules': [
-        {
-          'rule_name': 'some_rule',
-          'extension': 'bar',
-          'action': ['echo', 'hello'],
-          'outputs': ['dummy'],
-        },
-      ],
-      'sources': [
-        'foo.bar',
-      ],
-    },
-    {
-      'target_name': 'action_pool',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'some_action',
-          'action': ['echo', 'hello'],
-          'inputs': ['foo.bar'],
-          'outputs': ['dummy'],
-          'ninja_use_console': 1,
-        },
-      ],
-    },
-    {
-      'target_name': 'rule_pool',
-      'type': 'none',
-      'rules': [
-        {
-          'rule_name': 'some_rule',
-          'extension': 'bar',
-          'action': ['echo', 'hello'],
-          'outputs': ['dummy'],
-          'ninja_use_console': 1,
-        },
-      ],
-      'sources': [
-        'foo.bar',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/ninja/use-custom-environment-files/gyptest-use-custom-environment-files.py b/tools/gyp/test/ninja/use-custom-environment-files/gyptest-use-custom-environment-files.py
deleted file mode 100644
index 0c44b1d..0000000
--- a/tools/gyp/test/ninja/use-custom-environment-files/gyptest-use-custom-environment-files.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure environment files can be suppressed.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  test.run_gyp('use-custom-environment-files.gyp',
-               '-G', 'ninja_use_custom_environment_files')
-
-  # Make sure environment files do not exist.
-  if os.path.exists(test.built_file_path('environment.x86')):
-    test.fail_test()
-  if os.path.exists(test.built_file_path('environment.x64')):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.cc b/tools/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.gyp b/tools/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.gyp
deleted file mode 100644
index dbc95a9..0000000
--- a/tools/gyp/test/ninja/use-custom-environment-files/use-custom-environment-files.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_use_custom_environment_files',
-      'type': 'executable',
-      'sources': [
-        'use-custom-environment-files.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/no-cpp/gyptest-no-cpp.py b/tools/gyp/test/no-cpp/gyptest-no-cpp.py
deleted file mode 100644
index d37e3c1..0000000
--- a/tools/gyp/test/no-cpp/gyptest-no-cpp.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Checks that C-only targets aren't linked against libstdc++.
-"""
-
-import TestGyp
-
-import re
-import subprocess
-import sys
-
-# set |match| to ignore build stderr output.
-test = TestGyp.TestGyp(match = lambda a, b: True)
-if sys.platform != 'win32' and test.format != 'make':
-  # TODO: This doesn't pass with make.
-  # TODO: Does a test like this make sense with Windows?
-
-  CHDIR = 'src'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', 'no_cpp', chdir=CHDIR)
-
-  def LinksLibStdCpp(path):
-    path = test.built_file_path(path, chdir=CHDIR)
-    if sys.platform == 'darwin':
-      proc = subprocess.Popen(['otool', '-L', path], stdout=subprocess.PIPE)
-    else:
-      proc = subprocess.Popen(['ldd', path], stdout=subprocess.PIPE)
-    output = proc.communicate()[0]
-    assert not proc.returncode
-    return 'libstdc++' in output or 'libc++' in output
-
-  if LinksLibStdCpp('no_cpp'):
-    test.fail_test()
-
-  build_error_code = {
-    'xcode': [1, 65],  # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
-    'make': 2,
-    'ninja': 1,
-    'cmake': 0,  # CMake picks the compiler driver based on transitive checks.
-    'xcode-ninja': [1, 65],
-  }[test.format]
-
-  test.build('test.gyp', 'no_cpp_dep_on_cc_lib', chdir=CHDIR,
-             status=build_error_code)
-
-  test.pass_test()
diff --git a/tools/gyp/test/no-cpp/src/call-f-main.c b/tools/gyp/test/no-cpp/src/call-f-main.c
deleted file mode 100644
index 8b95c59..0000000
--- a/tools/gyp/test/no-cpp/src/call-f-main.c
+++ /dev/null
@@ -1,2 +0,0 @@
-void* f();
-int main() { f(); }
diff --git a/tools/gyp/test/no-cpp/src/empty-main.c b/tools/gyp/test/no-cpp/src/empty-main.c
deleted file mode 100644
index 237c8ce..0000000
--- a/tools/gyp/test/no-cpp/src/empty-main.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() {}
diff --git a/tools/gyp/test/no-cpp/src/f.cc b/tools/gyp/test/no-cpp/src/f.cc
deleted file mode 100644
index 02f50f2..0000000
--- a/tools/gyp/test/no-cpp/src/f.cc
+++ /dev/null
@@ -1,3 +0,0 @@
-extern "C" { void* f(); }
-
-void* f() { return new int; }
diff --git a/tools/gyp/test/no-cpp/src/test.gyp b/tools/gyp/test/no-cpp/src/test.gyp
deleted file mode 100644
index 417015e..0000000
--- a/tools/gyp/test/no-cpp/src/test.gyp
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'no_cpp',
-      'type': 'executable',
-      'sources': [ 'empty-main.c' ],
-    },
-    # A static_library with a cpp file and a linkable with only .c files
-    # depending on it causes a linker error:
-    {
-      'target_name': 'cpp_lib',
-      'type': 'static_library',
-      'sources': [ 'f.cc' ],
-    },
-    {
-      'target_name': 'no_cpp_dep_on_cc_lib',
-      'type': 'executable',
-      'dependencies': [ 'cpp_lib' ],
-      'sources': [ 'call-f-main.c' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/no-output/gyptest-no-output.py b/tools/gyp/test/no-output/gyptest-no-output.py
deleted file mode 100755
index bf9a0b5..0000000
--- a/tools/gyp/test/no-output/gyptest-no-output.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verified things don't explode when there are targets without outputs.
-"""
-
-import TestGyp
-
-# TODO(evan): in ninja when there are no targets, there is no 'all'
-# target either.  Disabling this test for now.
-test = TestGyp.TestGyp(formats=['!ninja'])
-
-test.run_gyp('nooutput.gyp', chdir='src')
-test.relocate('src', 'relocate/src')
-test.build('nooutput.gyp', chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/no-output/src/nooutput.gyp b/tools/gyp/test/no-output/src/nooutput.gyp
deleted file mode 100644
index c40124e..0000000
--- a/tools/gyp/test/no-output/src/nooutput.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'no_output',
-      'type': 'none',
-      'direct_dependent_settings': {
-        'defines': [
-          'NADA',
-        ],
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/product/gyptest-product.py b/tools/gyp/test/product/gyptest-product.py
deleted file mode 100755
index 53eb5c3..0000000
--- a/tools/gyp/test/product/gyptest-product.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simplest-possible build of a "Hello, world!" program
-using the default build target.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('product.gyp')
-test.build('product.gyp')
-
-# executables
-test.built_file_must_exist('alt1' + test._exe, test.EXECUTABLE, bare=True)
-test.built_file_must_exist('hello2.stuff', test.EXECUTABLE, bare=True)
-test.built_file_must_exist('yoalt3.stuff', test.EXECUTABLE, bare=True)
-
-# shared libraries
-test.built_file_must_exist(test.dll_ + 'alt4' + test._dll,
-                           test.SHARED_LIB, bare=True)
-test.built_file_must_exist(test.dll_ + 'hello5.stuff',
-                           test.SHARED_LIB, bare=True)
-test.built_file_must_exist('yoalt6.stuff', test.SHARED_LIB, bare=True)
-
-# static libraries
-test.built_file_must_exist(test.lib_ + 'alt7' + test._lib,
-                           test.STATIC_LIB, bare=True)
-test.built_file_must_exist(test.lib_ + 'hello8.stuff',
-                           test.STATIC_LIB, bare=True)
-test.built_file_must_exist('yoalt9.stuff', test.STATIC_LIB, bare=True)
-
-# alternate product_dir
-test.built_file_must_exist('bob/yoalt10.stuff', test.EXECUTABLE, bare=True)
-test.built_file_must_exist('bob/yoalt11.stuff', test.EXECUTABLE, bare=True)
-test.built_file_must_exist('bob/yoalt12.stuff', test.EXECUTABLE, bare=True)
-
-test.pass_test()
diff --git a/tools/gyp/test/product/hello.c b/tools/gyp/test/product/hello.c
deleted file mode 100644
index 41fdff0..0000000
--- a/tools/gyp/test/product/hello.c
+++ /dev/null
@@ -1,15 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-int func1(void) {
-  return 42;
-}
-
-int main(void) {
-  printf("Hello, world!\n");
-  printf("%d\n", func1());
-  return 0;
-}
diff --git a/tools/gyp/test/product/product.gyp b/tools/gyp/test/product/product.gyp
deleted file mode 100644
index c25eaaa..0000000
--- a/tools/gyp/test/product/product.gyp
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello1',
-      'product_name': 'alt1',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello2',
-      'product_extension': 'stuff',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello3',
-      'product_name': 'alt3',
-      'product_extension': 'stuff',
-      'product_prefix': 'yo',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-
-    {
-      'target_name': 'hello4',
-      'product_name': 'alt4',
-      'type': 'shared_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello5',
-      'product_extension': 'stuff',
-      'type': 'shared_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello6',
-      'product_name': 'alt6',
-      'product_extension': 'stuff',
-      'product_prefix': 'yo',
-      'type': 'shared_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-
-    {
-      'target_name': 'hello7',
-      'product_name': 'alt7',
-      'type': 'static_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello8',
-      'product_extension': 'stuff',
-      'type': 'static_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello9',
-      'product_name': 'alt9',
-      'product_extension': 'stuff',
-      'product_prefix': 'yo',
-      'type': 'static_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello10',
-      'product_name': 'alt10',
-      'product_extension': 'stuff',
-      'product_prefix': 'yo',
-      'product_dir': '<(PRODUCT_DIR)/bob',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello11',
-      'product_name': 'alt11',
-      'product_extension': 'stuff',
-      'product_prefix': 'yo',
-      'product_dir': '<(PRODUCT_DIR)/bob',
-      'type': 'shared_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello12',
-      'product_name': 'alt12',
-      'product_extension': 'stuff',
-      'product_prefix': 'yo',
-      'product_dir': '<(PRODUCT_DIR)/bob',
-      'type': 'static_library',
-      'sources': [
-        'hello.c',
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="linux"', {
-      'target_defaults': {
-        'cflags': ['-fPIC'],
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/prune_targets/gyptest-prune-targets.py b/tools/gyp/test/prune_targets/gyptest-prune-targets.py
deleted file mode 100644
index b2c90f7..0000000
--- a/tools/gyp/test/prune_targets/gyptest-prune-targets.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies --root-target removes the unnecessary targets.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-# The xcode-ninja generator has its own logic for which targets to include
-if test.format == 'xcode-ninja':
-  test.skip_test()
-
-build_error_code = {
-  'cmake': 1,
-  'make': 2,
-  'msvs': 1,
-  'ninja': 1,
-  'xcode': 65,
-}[test.format]
-
-# By default, everything will be included.
-test.run_gyp('test1.gyp')
-test.build('test2.gyp', 'lib1')
-test.build('test2.gyp', 'lib2')
-test.build('test2.gyp', 'lib3')
-test.build('test2.gyp', 'lib_indirect')
-test.build('test1.gyp', 'program1')
-test.build('test1.gyp', 'program2')
-test.build('test1.gyp', 'program3')
-
-# With deep dependencies of program1 only.
-test.run_gyp('test1.gyp', '--root-target=program1')
-test.build('test2.gyp', 'lib1')
-test.build('test2.gyp', 'lib2', status=build_error_code, stderr=None)
-test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
-test.build('test2.gyp', 'lib_indirect')
-test.build('test1.gyp', 'program1')
-test.build('test1.gyp', 'program2', status=build_error_code, stderr=None)
-test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
-
-# With deep dependencies of program2 only.
-test.run_gyp('test1.gyp', '--root-target=program2')
-test.build('test2.gyp', 'lib1', status=build_error_code, stderr=None)
-test.build('test2.gyp', 'lib2')
-test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
-test.build('test2.gyp', 'lib_indirect')
-test.build('test1.gyp', 'program1', status=build_error_code, stderr=None)
-test.build('test1.gyp', 'program2')
-test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
-
-# With deep dependencies of program1 and program2.
-test.run_gyp('test1.gyp', '--root-target=program1', '--root-target=program2')
-test.build('test2.gyp', 'lib1')
-test.build('test2.gyp', 'lib2')
-test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
-test.build('test2.gyp', 'lib_indirect')
-test.build('test1.gyp', 'program1')
-test.build('test1.gyp', 'program2')
-test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
-
-test.pass_test()
diff --git a/tools/gyp/test/prune_targets/lib1.cc b/tools/gyp/test/prune_targets/lib1.cc
deleted file mode 100644
index 692b7de..0000000
--- a/tools/gyp/test/prune_targets/lib1.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void libfunc1() {
-}
diff --git a/tools/gyp/test/prune_targets/lib2.cc b/tools/gyp/test/prune_targets/lib2.cc
deleted file mode 100644
index aed394a..0000000
--- a/tools/gyp/test/prune_targets/lib2.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void libfunc2() {
-}
diff --git a/tools/gyp/test/prune_targets/lib3.cc b/tools/gyp/test/prune_targets/lib3.cc
deleted file mode 100644
index af0f717..0000000
--- a/tools/gyp/test/prune_targets/lib3.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void libfunc3() {
-}
diff --git a/tools/gyp/test/prune_targets/lib_indirect.cc b/tools/gyp/test/prune_targets/lib_indirect.cc
deleted file mode 100644
index 92d9ea4..0000000
--- a/tools/gyp/test/prune_targets/lib_indirect.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void libfunc_indirect() {
-}
diff --git a/tools/gyp/test/prune_targets/program.cc b/tools/gyp/test/prune_targets/program.cc
deleted file mode 100644
index c9ac070..0000000
--- a/tools/gyp/test/prune_targets/program.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/prune_targets/test1.gyp b/tools/gyp/test/prune_targets/test1.gyp
deleted file mode 100644
index b65ec19..0000000
--- a/tools/gyp/test/prune_targets/test1.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program1',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'dependencies': [ 'test2.gyp:lib1' ],
-    },
-    {
-      'target_name': 'program2',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'dependencies': [ 'test2.gyp:lib2' ],
-    },
-    {
-      'target_name': 'program3',
-      'type': 'executable',
-      'sources': [ 'program.cc' ],
-      'dependencies': [ 'test2.gyp:lib3' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/prune_targets/test2.gyp b/tools/gyp/test/prune_targets/test2.gyp
deleted file mode 100644
index 16f0fd3..0000000
--- a/tools/gyp/test/prune_targets/test2.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'lib1',
-      'type': 'static_library',
-      'sources': [ 'lib1.cc' ],
-      'dependencies': [ 'lib_indirect' ],
-    },
-    {
-      'target_name': 'lib2',
-      'type': 'static_library',
-      'sources': [ 'lib2.cc' ],
-      'dependencies': [ 'lib_indirect' ],
-    },
-    {
-      'target_name': 'lib3',
-      'type': 'static_library',
-      'sources': [ 'lib3.cc' ],
-    },
-    {
-      'target_name': 'lib_indirect',
-      'type': 'static_library',
-      'sources': [ 'lib_indirect.cc' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/relative/foo/a/a.cc b/tools/gyp/test/relative/foo/a/a.cc
deleted file mode 100644
index 7d1c953..0000000
--- a/tools/gyp/test/relative/foo/a/a.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/relative/foo/a/a.gyp b/tools/gyp/test/relative/foo/a/a.gyp
deleted file mode 100644
index 66316ac..0000000
--- a/tools/gyp/test/relative/foo/a/a.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'executable',
-      'sources': ['a.cc'],
-      'dependencies': [
-        '../../foo/b/b.gyp:b',
-        'c/c.gyp:c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/relative/foo/a/c/c.cc b/tools/gyp/test/relative/foo/a/c/c.cc
deleted file mode 100644
index 9d22471..0000000
--- a/tools/gyp/test/relative/foo/a/c/c.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-int func() {
-  return 0;
-}
diff --git a/tools/gyp/test/relative/foo/a/c/c.gyp b/tools/gyp/test/relative/foo/a/c/c.gyp
deleted file mode 100644
index c1f087d..0000000
--- a/tools/gyp/test/relative/foo/a/c/c.gyp
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  'targets': [
-    {
-      'target_name': 'c',
-      'type': 'static_library',
-      'sources': ['c.cc'],
-      'dependencies': [
-        '../../b/b.gyp:b',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/relative/foo/b/b.cc b/tools/gyp/test/relative/foo/b/b.cc
deleted file mode 100644
index 011d59c..0000000
--- a/tools/gyp/test/relative/foo/b/b.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
- * Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-int func2() {
-  return 0;
-}
diff --git a/tools/gyp/test/relative/foo/b/b.gyp b/tools/gyp/test/relative/foo/b/b.gyp
deleted file mode 100644
index 0ebe453..0000000
--- a/tools/gyp/test/relative/foo/b/b.gyp
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-  'targets': [
-    {
-      'target_name': 'b',
-      'type': 'static_library',
-      'sources': ['b.cc'],
-    },
-  ],
-}
diff --git a/tools/gyp/test/relative/gyptest-default.py b/tools/gyp/test/relative/gyptest-default.py
deleted file mode 100755
index 2d657aa..0000000
--- a/tools/gyp/test/relative/gyptest-default.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simplest-possible build of a "Hello, world!" program
-using the default build target.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_default', formats=['msvs'])
-
-# Run from down in foo.
-test.run_gyp('a.gyp', chdir='foo/a')
-sln = test.workpath('foo/a/a.sln')
-sln_data = open(sln, 'rb').read()
-vcproj = sln_data.count('b.vcproj')
-vcxproj = sln_data.count('b.vcxproj')
-if (vcproj, vcxproj) not in [(1, 0), (0, 1)]:
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/rename/filecase/file.c b/tools/gyp/test/rename/filecase/file.c
deleted file mode 100644
index 76e8197..0000000
--- a/tools/gyp/test/rename/filecase/file.c
+++ /dev/null
@@ -1 +0,0 @@
-int main() { return 0; }
diff --git a/tools/gyp/test/rename/filecase/test-casesensitive.gyp b/tools/gyp/test/rename/filecase/test-casesensitive.gyp
deleted file mode 100644
index 48eaa6e..0000000
--- a/tools/gyp/test/rename/filecase/test-casesensitive.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'filecaserename_sensitive',
-      'type': 'executable',
-      'sources': [
-        'FiLe.c',
-        'fIlE.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rename/filecase/test.gyp b/tools/gyp/test/rename/filecase/test.gyp
deleted file mode 100644
index eaee933..0000000
--- a/tools/gyp/test/rename/filecase/test.gyp
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'filecaserename',
-      'type': 'executable',
-      'sources': [
-        'file.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rename/gyptest-filecase.py b/tools/gyp/test/rename/gyptest-filecase.py
deleted file mode 100644
index daed518..0000000
--- a/tools/gyp/test/rename/gyptest-filecase.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Checks that files whose file case changes get rebuilt correctly.
-"""
-
-import os
-import TestGyp
-
-test = TestGyp.TestGyp()
-CHDIR = 'filecase'
-test.run_gyp('test.gyp', chdir=CHDIR)
-test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-os.rename('filecase/file.c', 'filecase/fIlE.c')
-test.write('filecase/test.gyp',
-           test.read('filecase/test.gyp').replace('file.c', 'fIlE.c'))
-test.run_gyp('test.gyp', chdir=CHDIR)
-test.build('test.gyp', test.ALL, chdir=CHDIR)
-
-
-# Check that having files that differ just in their case still work on
-# case-sensitive file systems.
-test.write('filecase/FiLe.c', 'int f(); int main() { return f(); }')
-test.write('filecase/fIlE.c', 'int f() { return 42; }')
-is_case_sensitive = test.read('filecase/FiLe.c') != test.read('filecase/fIlE.c')
-if is_case_sensitive:
-  test.run_gyp('test-casesensitive.gyp', chdir=CHDIR)
-  test.build('test-casesensitive.gyp', test.ALL, chdir=CHDIR)
-
-test.pass_test()
diff --git a/tools/gyp/test/restat/gyptest-restat.py b/tools/gyp/test/restat/gyptest-restat.py
deleted file mode 100644
index 8737904..0000000
--- a/tools/gyp/test/restat/gyptest-restat.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that dependent rules are executed iff a dependency action modifies its
-outputs.
-"""
-
-import TestGyp
-import os
-
-test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-test.run_gyp('restat.gyp', chdir='src')
-
-chdir = 'relocate/src'
-test.relocate('src', chdir)
-
-# Building 'dependent' the first time generates 'side_effect', but building it
-# the second time doesn't, because 'create_intermediate' doesn't update its
-# output.
-test.build('restat.gyp', 'dependent', chdir=chdir)
-test.built_file_must_exist('side_effect', chdir=chdir)
-os.remove(test.built_file_path('side_effect', chdir=chdir))
-test.build('restat.gyp', 'dependent', chdir=chdir)
-test.built_file_must_not_exist('side_effect', chdir=chdir)
-
-test.pass_test()
diff --git a/tools/gyp/test/restat/src/create_intermediate.py b/tools/gyp/test/restat/src/create_intermediate.py
deleted file mode 100644
index a4d7450..0000000
--- a/tools/gyp/test/restat/src/create_intermediate.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-"""
-Create argv[1] iff it doesn't already exist.
-"""
-
-outfile = sys.argv[1]
-if os.path.exists(outfile):
-  sys.exit()
-open(outfile, "wb").close()
diff --git a/tools/gyp/test/restat/src/restat.gyp b/tools/gyp/test/restat/src/restat.gyp
deleted file mode 100644
index ff020e0..0000000
--- a/tools/gyp/test/restat/src/restat.gyp
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'create_intermediate',
-      'type': 'none',
-      'msvs_cygwin_shell': '0',
-      'actions': [
-        {
-          'action_name': 'create_intermediate',
-          'inputs': [
-            'create_intermediate.py',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/intermediate',
-            'ALWAYS.run.ALWAYS',
-          ],
-          'action': [
-            'python', 'create_intermediate.py', '<(PRODUCT_DIR)/intermediate',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'dependent',
-      'type': 'none',
-      'msvs_cygwin_shell': '0',
-      'dependencies': [
-        'create_intermediate',
-      ],
-      'actions': [
-        {
-          'action_name': 'dependent',
-          'inputs': [
-            '<(PRODUCT_DIR)/intermediate',
-          ],
-          'outputs': [
-            '<(PRODUCT_DIR)/dependent'
-          ],
-          'action': [
-            'python', 'touch.py', '<(PRODUCT_DIR)/dependent', '<(PRODUCT_DIR)/side_effect',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/restat/src/touch.py b/tools/gyp/test/restat/src/touch.py
deleted file mode 100644
index 7cd781a..0000000
--- a/tools/gyp/test/restat/src/touch.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-"""Cross-platform touch."""
-
-for fname in sys.argv[1:]:
-  if os.path.exists(fname):
-    os.utime(fname, None)
-  else:
-    open(fname, 'w').close()
diff --git a/tools/gyp/test/rules-dirname/gyptest-dirname.py b/tools/gyp/test/rules-dirname/gyptest-dirname.py
deleted file mode 100755
index 9b8949b..0000000
--- a/tools/gyp/test/rules-dirname/gyptest-dirname.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple rules when using an explicit build target of 'all'.
-"""
-
-import TestGyp
-import os
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-test = TestGyp.TestGyp(formats=['make', 'ninja', 'xcode', 'msvs'])
-
-test.run_gyp('actions.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('actions.gyp', chdir='relocate/src')
-
-expect = """\
-no dir here
-hi c
-hello baz
-"""
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('gencc_int_output', chdir=chdir, stdout=expect)
-if test.format == 'msvs':
-  test.run_built_executable('gencc_int_output_external', chdir=chdir,
-                            stdout=expect)
-
-test.must_match('relocate/src/subdir/foo/bar/baz.dirname',
-                os.path.join('foo', 'bar'))
-test.must_match('relocate/src/subdir/a/b/c.dirname',
-                os.path.join('a', 'b'))
-
-# FIXME the xcode and make generators incorrectly convert RULE_INPUT_PATH
-# to an absolute path, making the tests below fail!
-if test.format != 'xcode' and test.format != 'make':
-  test.must_match('relocate/src/subdir/foo/bar/baz.path',
-                  os.path.join('foo', 'bar', 'baz.printvars'))
-  test.must_match('relocate/src/subdir/a/b/c.path',
-                  os.path.join('a', 'b', 'c.printvars'))
-
-test.pass_test()
diff --git a/tools/gyp/test/rules-dirname/src/actions.gyp b/tools/gyp/test/rules-dirname/src/actions.gyp
deleted file mode 100644
index c5693c6..0000000
--- a/tools/gyp/test/rules-dirname/src/actions.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'pull_in_all_actions',
-      'type': 'none',
-      'dependencies': [
-        'subdir/input-rule-dirname.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules-dirname/src/copy-file.py b/tools/gyp/test/rules-dirname/src/copy-file.py
deleted file mode 100755
index 9774ccc..0000000
--- a/tools/gyp/test/rules-dirname/src/copy-file.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-
-contents = open(sys.argv[1], 'r').read()
-open(sys.argv[2], 'wb').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/rules-dirname/src/subdir/a/b/c.gencc b/tools/gyp/test/rules-dirname/src/subdir/a/b/c.gencc
deleted file mode 100644
index 29cb5f7..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/a/b/c.gencc
+++ /dev/null
@@ -1,8 +0,0 @@
-// -*- mode: c++ -*-
-#include <stdio.h>
-
-namespace gen {
-  void c() {
-    printf("hi c\n");
-  }
-}
diff --git a/tools/gyp/test/rules-dirname/src/subdir/a/b/c.printvars b/tools/gyp/test/rules-dirname/src/subdir/a/b/c.printvars
deleted file mode 100644
index cc4561d..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/a/b/c.printvars
+++ /dev/null
@@ -1 +0,0 @@
-# Empty file for testing build rules
diff --git a/tools/gyp/test/rules-dirname/src/subdir/foo/bar/baz.gencc b/tools/gyp/test/rules-dirname/src/subdir/foo/bar/baz.gencc
deleted file mode 100644
index 90b4ce9..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/foo/bar/baz.gencc
+++ /dev/null
@@ -1,8 +0,0 @@
-// -*- mode: c++ -*-
-#include <stdio.h>
-
-namespace gen {
-  void baz() {
-    printf("hello baz\n");
-  }
-}
diff --git a/tools/gyp/test/rules-dirname/src/subdir/foo/bar/baz.printvars b/tools/gyp/test/rules-dirname/src/subdir/foo/bar/baz.printvars
deleted file mode 100644
index cc4561d..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/foo/bar/baz.printvars
+++ /dev/null
@@ -1 +0,0 @@
-# Empty file for testing build rules
diff --git a/tools/gyp/test/rules-dirname/src/subdir/input-rule-dirname.gyp b/tools/gyp/test/rules-dirname/src/subdir/input-rule-dirname.gyp
deleted file mode 100644
index da749a2..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/input-rule-dirname.gyp
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'print_rule_input_dirname',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'foo/bar/baz.printvars',
-        'a/b/c.printvars',
-      ],
-      'rules': [
-        {
-          'rule_name': 'printvars',
-          'extension': 'printvars',
-          'inputs': [
-            'printvars.py',
-          ],
-          'outputs': [
-            '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).dirname',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(RULE_INPUT_DIRNAME)', '<@(_outputs)',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'print_rule_input_path',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'foo/bar/baz.printvars',
-        'a/b/c.printvars',
-      ],
-      'rules': [
-        {
-          'rule_name': 'printvars',
-          'extension': 'printvars',
-          'inputs': [
-            'printvars.py',
-          ],
-          'outputs': [
-            '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).path',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'gencc_int_output',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'nodir.gencc',
-        'foo/bar/baz.gencc',
-        'a/b/c.gencc',
-        'main.cc',
-      ],
-      'rules': [
-        {
-          'rule_name': 'gencc',
-          'extension': 'gencc',
-          'inputs': [
-            '<(DEPTH)/copy-file.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="win"', {
-      'targets': [
-        {
-          'target_name': 'gencc_int_output_external',
-          'type': 'executable',
-          'msvs_cygwin_shell': 0,
-          'msvs_cygwin_dirs': ['../../../../../../<(DEPTH)/third_party/cygwin'],
-          'sources': [
-            'nodir.gencc',
-            'foo/bar/baz.gencc',
-            'a/b/c.gencc',
-            'main.cc',
-          ],
-          'dependencies': [
-            'cygwin',
-          ],
-          'rules': [
-            {
-              'rule_name': 'gencc',
-              'extension': 'gencc',
-              'msvs_external_rule': 1,
-              'inputs': [
-                '<(DEPTH)/copy-file.py',
-              ],
-              'outputs': [
-                '<(INTERMEDIATE_DIR)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
-              ],
-              'action': [
-                'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-              ],
-              'process_outputs_as_sources': 1,
-            },
-          ],
-        },
-        {
-          'target_name': 'cygwin',
-          'type': 'none',
-          'actions': [
-            {
-              'action_name': 'setup_mount',
-              'msvs_cygwin_shell': 0,
-              'inputs': [
-                '../../../../../../<(DEPTH)/third_party/cygwin/setup_mount.bat',
-              ],
-              # Visual Studio requires an output file, or else the
-              # custom build step won't run.
-              'outputs': [
-                '<(INTERMEDIATE_DIR)/_always_run_setup_mount.marker',
-              ],
-              'action': ['<@(_inputs)'],
-            },
-          ],
-        },
-      ],
-    }],
-  ],
-}
diff --git a/tools/gyp/test/rules-dirname/src/subdir/main.cc b/tools/gyp/test/rules-dirname/src/subdir/main.cc
deleted file mode 100644
index 3bb8e01..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/main.cc
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-namespace gen {
-  extern void nodir();
-  extern void c();
-  extern void baz();
-}
-
-int main() {
-  gen::nodir();
-  gen::c();
-  gen::baz();
-}
diff --git a/tools/gyp/test/rules-dirname/src/subdir/nodir.gencc b/tools/gyp/test/rules-dirname/src/subdir/nodir.gencc
deleted file mode 100644
index 720f589..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/nodir.gencc
+++ /dev/null
@@ -1,8 +0,0 @@
-// -*- mode: c++ -*-
-#include <stdio.h>
-
-namespace gen {
-  void nodir() {
-    printf("no dir here\n");
-  }
-}
diff --git a/tools/gyp/test/rules-dirname/src/subdir/printvars.py b/tools/gyp/test/rules-dirname/src/subdir/printvars.py
deleted file mode 100755
index ef3d92e..0000000
--- a/tools/gyp/test/rules-dirname/src/subdir/printvars.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Prints interesting vars
-"""
-
-import sys;
-
-out = open(sys.argv[2], 'w')
-out.write(sys.argv[1]);
diff --git a/tools/gyp/test/rules-rebuild/gyptest-all.py b/tools/gyp/test/rules-rebuild/gyptest-all.py
deleted file mode 100755
index aaaa2a6..0000000
--- a/tools/gyp/test/rules-rebuild/gyptest-all.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a rule that generates multiple outputs rebuilds
-correctly when the inputs change.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_all')
-
-test.run_gyp('same_target.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-
-test.build('same_target.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from main.c
-Hello from prog1.in!
-Hello from prog2.in!
-"""
-
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
-
-
-test.sleep()
-contents = test.read(['relocate', 'src', 'prog1.in'])
-contents = contents.replace('!', ' AGAIN!')
-test.write(['relocate', 'src', 'prog1.in'], contents)
-
-test.build('same_target.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from main.c
-Hello from prog1.in AGAIN!
-Hello from prog2.in!
-"""
-
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
-
-
-test.sleep()
-contents = test.read(['relocate', 'src', 'prog2.in'])
-contents = contents.replace('!', ' AGAIN!')
-test.write(['relocate', 'src', 'prog2.in'], contents)
-
-test.build('same_target.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from main.c
-Hello from prog1.in AGAIN!
-Hello from prog2.in AGAIN!
-"""
-
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/rules-rebuild/gyptest-default.py b/tools/gyp/test/rules-rebuild/gyptest-default.py
deleted file mode 100755
index ac3f020..0000000
--- a/tools/gyp/test/rules-rebuild/gyptest-default.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a rule that generates multiple outputs rebuilds
-correctly when the inputs change.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(workdir='workarea_default')
-
-test.run_gyp('same_target.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-
-test.build('same_target.gyp', chdir='relocate/src')
-
-expect = """\
-Hello from main.c
-Hello from prog1.in!
-Hello from prog2.in!
-"""
-
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
-
-
-test.sleep()
-contents = test.read(['relocate', 'src', 'prog1.in'])
-contents = contents.replace('!', ' AGAIN!')
-test.write(['relocate', 'src', 'prog1.in'], contents)
-
-test.build('same_target.gyp', chdir='relocate/src')
-
-expect = """\
-Hello from main.c
-Hello from prog1.in AGAIN!
-Hello from prog2.in!
-"""
-
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
-
-
-test.sleep()
-contents = test.read(['relocate', 'src', 'prog2.in'])
-contents = contents.replace('!', ' AGAIN!')
-test.write(['relocate', 'src', 'prog2.in'], contents)
-
-test.build('same_target.gyp', chdir='relocate/src')
-
-expect = """\
-Hello from main.c
-Hello from prog1.in AGAIN!
-Hello from prog2.in AGAIN!
-"""
-
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
-
-
-# Test that modifying a rule's inputs (specifically, make-sources.py) causes
-# the targets to be built.
-
-test.sleep()
-contents = test.read(['relocate', 'src', 'make-sources.py'])
-contents = contents.replace('%s', 'the amazing %s')
-test.write(['relocate', 'src', 'make-sources.py'], contents)
-
-test.build('same_target.gyp', chdir='relocate/src')
-
-expect = """\
-Hello from main.c
-Hello from the amazing prog1.in AGAIN!
-Hello from the amazing prog2.in AGAIN!
-"""
-
-test.run_built_executable('program', chdir='relocate/src', stdout=expect)
-
-test.up_to_date('same_target.gyp', 'program', chdir='relocate/src')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/rules-rebuild/src/main.c b/tools/gyp/test/rules-rebuild/src/main.c
deleted file mode 100644
index bd8fbb2..0000000
--- a/tools/gyp/test/rules-rebuild/src/main.c
+++ /dev/null
@@ -1,12 +0,0 @@
-#include <stdio.h>
-
-extern void prog1(void);
-extern void prog2(void);
-
-int main(void)
-{
-  printf("Hello from main.c\n");
-  prog1();
-  prog2();
-  return 0;
-}
diff --git a/tools/gyp/test/rules-rebuild/src/make-sources.py b/tools/gyp/test/rules-rebuild/src/make-sources.py
deleted file mode 100755
index 7ec0227..0000000
--- a/tools/gyp/test/rules-rebuild/src/make-sources.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-assert len(sys.argv) == 4, sys.argv
-
-(in_file, c_file, h_file) = sys.argv[1:]
-
-def write_file(filename, contents):
-  open(filename, 'wb').write(contents)
-
-write_file(c_file, open(in_file, 'rb').read())
-
-write_file(h_file, '#define NAME "%s"\n' % in_file)
-
-sys.exit(0)
diff --git a/tools/gyp/test/rules-rebuild/src/prog1.in b/tools/gyp/test/rules-rebuild/src/prog1.in
deleted file mode 100644
index 191b00e..0000000
--- a/tools/gyp/test/rules-rebuild/src/prog1.in
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-#include "prog1.h"
-
-void prog1(void)
-{
-  printf("Hello from %s!\n", NAME);
-}
diff --git a/tools/gyp/test/rules-rebuild/src/prog2.in b/tools/gyp/test/rules-rebuild/src/prog2.in
deleted file mode 100644
index 7bfac51..0000000
--- a/tools/gyp/test/rules-rebuild/src/prog2.in
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-#include "prog2.h"
-
-void prog2(void)
-{
-  printf("Hello from %s!\n", NAME);
-}
diff --git a/tools/gyp/test/rules-rebuild/src/same_target.gyp b/tools/gyp/test/rules-rebuild/src/same_target.gyp
deleted file mode 100644
index 22ba560..0000000
--- a/tools/gyp/test/rules-rebuild/src/same_target.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'main.c',
-        'prog1.in',
-        'prog2.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'make_sources',
-          'extension': 'in',
-          'inputs': [
-            'make-sources.py',
-          ],
-          'outputs': [
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).h',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_NAME)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules-use-built-dependencies/gyptest-use-built-dependencies.py b/tools/gyp/test/rules-use-built-dependencies/gyptest-use-built-dependencies.py
deleted file mode 100755
index a57c36d..0000000
--- a/tools/gyp/test/rules-use-built-dependencies/gyptest-use-built-dependencies.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that rules which use built dependencies work correctly.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('use-built-dependencies-rule.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-test.build('use-built-dependencies-rule.gyp', chdir='relocate/src')
-
-test.built_file_must_exist('main_output', chdir='relocate/src')
-test.built_file_must_match('main_output', 'output', chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/rules-use-built-dependencies/src/main.cc b/tools/gyp/test/rules-use-built-dependencies/src/main.cc
deleted file mode 100644
index 937d284..0000000
--- a/tools/gyp/test/rules-use-built-dependencies/src/main.cc
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-#include <stdio.h>
-
-int main(int argc, char *argv[]) {
-    if (argc < 2) {
-        return 2;
-    }
-    FILE* file;
-    file = fopen(argv[1], "wb");
-    const char output[] = "output";
-    fwrite(output, 1, sizeof(output) - 1, file);
-    fclose(file);
-    return 0;
-}
-
diff --git a/tools/gyp/test/rules-use-built-dependencies/src/use-built-dependencies-rule.gyp b/tools/gyp/test/rules-use-built-dependencies/src/use-built-dependencies-rule.gyp
deleted file mode 100644
index 92bfeda..0000000
--- a/tools/gyp/test/rules-use-built-dependencies/src/use-built-dependencies-rule.gyp
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'main',
-      'toolsets': ['host'],
-      'type': 'executable',
-      'sources': [
-        'main.cc',
-      ],
-    },
-    {
-      'target_name': 'post',
-      'toolsets': ['host'],
-      'type': 'none',
-      'dependencies': [
-        'main',
-      ],
-      'sources': [
-        # As this test is written it could easily be made into an action.
-        # An acutal use case would have a number of these 'sources'.
-        '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)main<(EXECUTABLE_SUFFIX)',
-      ],
-      'rules': [
-        {
-          'rule_name': 'generate_output',
-          'extension': '<(EXECUTABLE_SUFFIX)',
-          'outputs': [ '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT)_output', ],
-          'msvs_cygwin_shell': 0,
-          'action': [
-            '<(RULE_INPUT_PATH)',
-            '<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT)_output',
-          ],
-          'message': 'Generating output for <(RULE_INPUT_ROOT)'
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules-variables/gyptest-rules-variables.py b/tools/gyp/test/rules-variables/gyptest-rules-variables.py
deleted file mode 100755
index c1825e0..0000000
--- a/tools/gyp/test/rules-variables/gyptest-rules-variables.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies rules related variables are expanded.
-"""
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['ninja'])
-
-test.relocate('src', 'relocate/src')
-
-test.run_gyp('variables.gyp', chdir='relocate/src')
-
-test.build('variables.gyp', chdir='relocate/src')
-
-test.run_built_executable('all_rule_variables',
-                          chdir='relocate/src',
-                          stdout="input_root\ninput_dirname\ninput_path\n" +
-                          "input_ext\ninput_name\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/rules-variables/src/input_ext.c b/tools/gyp/test/rules-variables/src/input_ext.c
deleted file mode 100644
index f41e73e..0000000
--- a/tools/gyp/test/rules-variables/src/input_ext.c
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-void input_ext() {
-  printf("input_ext\n");
-}
diff --git a/tools/gyp/test/rules-variables/src/input_name/test.c b/tools/gyp/test/rules-variables/src/input_name/test.c
deleted file mode 100644
index e28b74d..0000000
--- a/tools/gyp/test/rules-variables/src/input_name/test.c
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-void input_name() {
-  printf("input_name\n");
-}
diff --git a/tools/gyp/test/rules-variables/src/input_path/subdir/test.c b/tools/gyp/test/rules-variables/src/input_path/subdir/test.c
deleted file mode 100644
index 403dbbd..0000000
--- a/tools/gyp/test/rules-variables/src/input_path/subdir/test.c
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-void input_path() {
-  printf("input_path\n");
-}
diff --git a/tools/gyp/test/rules-variables/src/subdir/input_dirname.c b/tools/gyp/test/rules-variables/src/subdir/input_dirname.c
deleted file mode 100644
index 40cecd8..0000000
--- a/tools/gyp/test/rules-variables/src/subdir/input_dirname.c
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-void input_dirname() {
-  printf("input_dirname\n");
-}
diff --git a/tools/gyp/test/rules-variables/src/subdir/test.c b/tools/gyp/test/rules-variables/src/subdir/test.c
deleted file mode 100644
index 6c0280b..0000000
--- a/tools/gyp/test/rules-variables/src/subdir/test.c
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-extern void input_root();
-extern void input_dirname();
-extern void input_path();
-extern void input_ext();
-extern void input_name();
-
-int main() {
-  input_root();
-  input_dirname();
-  input_path();
-  input_ext();
-  input_name();
-  return 0;
-}
diff --git a/tools/gyp/test/rules-variables/src/test.input_root.c b/tools/gyp/test/rules-variables/src/test.input_root.c
deleted file mode 100644
index 33a7740..0000000
--- a/tools/gyp/test/rules-variables/src/test.input_root.c
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2011 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-void input_root() {
-  printf("input_root\n");
-}
diff --git a/tools/gyp/test/rules-variables/src/variables.gyp b/tools/gyp/test/rules-variables/src/variables.gyp
deleted file mode 100644
index 6debba1..0000000
--- a/tools/gyp/test/rules-variables/src/variables.gyp
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    # This test shouldn't ever actually need to execute its rules: there's no
-    # command line that generates any output anyway. However, there's something
-    # slightly broken in either ninja or (maybe more likely?) on the win32 VM
-    # gypbots that breaks dependency checking and causes this rule to want to
-    # run. When it does run, the cygwin path is wrong, so the do-nothing step
-    # fails.
-    # TODO: Investigate and fix whatever's actually failing and remove this.
-    'msvs_cygwin_dirs': ['../../../../../../<(DEPTH)/third_party/cygwin'],
-  },
-  'targets': [
-    {
-      'target_name': 'all_rule_variables',
-      'type': 'executable',
-      'sources': [
-        'subdir/test.c',
-      ],
-      'rules': [
-        {
-          'rule_name': 'rule_variable',
-          'extension': 'c',
-          'outputs': [
-            '<(RULE_INPUT_ROOT).input_root.c',
-            '<(RULE_INPUT_DIRNAME)/input_dirname.c',
-            'input_path/<(RULE_INPUT_PATH)',
-            'input_ext<(RULE_INPUT_EXT)',
-            'input_name/<(RULE_INPUT_NAME)',
-          ],
-          'action': [],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/gyptest-all.py b/tools/gyp/test/rules/gyptest-all.py
deleted file mode 100755
index e6e637e..0000000
--- a/tools/gyp/test/rules/gyptest-all.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple rules when using an explicit build target of 'all'.
-"""
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('no_action_with_rules_fails.gyp', chdir='src/noaction', status=1,
-             stderr=None)
-
-test.run_gyp('actions.gyp',
-             '-G', 'xcode_ninja_target_pattern=^pull_in_all_actions$',
-             chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('actions.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from program.c
-Hello from function1.in
-Hello from function2.in
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir1'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('program', chdir=chdir, stdout=expect)
-
-expect = """\
-Hello from program.c
-Hello from function3.in
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir3'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('program2', chdir=chdir, stdout=expect)
-
-test.must_match('relocate/src/subdir2/file1.out', 'Hello from file1.in\n')
-test.must_match('relocate/src/subdir2/file2.out', 'Hello from file2.in\n')
-
-test.must_match('relocate/src/subdir2/file1.out2', 'Hello from file1.in\n')
-test.must_match('relocate/src/subdir2/file2.out2', 'Hello from file2.in\n')
-
-test.must_match('relocate/src/subdir2/file1.out4', 'Hello from file1.in\n')
-test.must_match('relocate/src/subdir2/file2.out4', 'Hello from file2.in\n')
-test.must_match('relocate/src/subdir2/file1.copy', 'Hello from file1.in\n')
-
-test.must_match('relocate/src/external/file1.external_rules.out',
-                'Hello from file1.in\n')
-test.must_match('relocate/src/external/file2.external_rules.out',
-                'Hello from file2.in\n')
-
-expect = """\
-Hello from program.c
-Got 41.
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir4'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('program4', chdir=chdir, stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/rules/gyptest-default.py b/tools/gyp/test/rules/gyptest-default.py
deleted file mode 100755
index 65b79da..0000000
--- a/tools/gyp/test/rules/gyptest-default.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simple rules when using an explicit build target of 'all'.
-"""
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('actions.gyp',
-             '-G', 'xcode_ninja_target_pattern=^pull_in_all_actions$',
-             chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('actions.gyp', chdir='relocate/src')
-
-expect = """\
-Hello from program.c
-Hello from function1.in
-Hello from function2.in
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir1'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('program', chdir=chdir, stdout=expect)
-
-expect = """\
-Hello from program.c
-Hello from function3.in
-"""
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir3'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('program2', chdir=chdir, stdout=expect)
-
-test.must_match('relocate/src/subdir2/file1.out', 'Hello from file1.in\n')
-test.must_match('relocate/src/subdir2/file2.out', 'Hello from file2.in\n')
-
-test.must_match('relocate/src/subdir2/file1.out2', 'Hello from file1.in\n')
-test.must_match('relocate/src/subdir2/file2.out2', 'Hello from file2.in\n')
-
-test.must_match('relocate/src/subdir2/file1.out4', 'Hello from file1.in\n')
-test.must_match('relocate/src/subdir2/file2.out4', 'Hello from file2.in\n')
-test.must_match('relocate/src/subdir2/file1.copy', 'Hello from file1.in\n')
-
-test.must_match('relocate/src/external/file1.external_rules.out',
-                'Hello from file1.in\n')
-test.must_match('relocate/src/external/file2.external_rules.out',
-                'Hello from file2.in\n')
-
-test.pass_test()
diff --git a/tools/gyp/test/rules/gyptest-input-root.py b/tools/gyp/test/rules/gyptest-input-root.py
deleted file mode 100755
index 92bade6..0000000
--- a/tools/gyp/test/rules/gyptest-input-root.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that RULE_INPUT_ROOT isn't turned into a path in rule actions
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('input-root.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('input-root.gyp', target='test', chdir='relocate/src')
-
-expect = """\
-Hello somefile
-"""
-
-test.run_built_executable('test', chdir='relocate/src', stdout=expect)
-test.pass_test()
diff --git a/tools/gyp/test/rules/gyptest-special-variables.py b/tools/gyp/test/rules/gyptest-special-variables.py
deleted file mode 100644
index 05ea7ce..0000000
--- a/tools/gyp/test/rules/gyptest-special-variables.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Verifies that VS variables that require special variables are expanded
-correctly. """
-
-import sys
-import TestGyp
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp()
-
-  test.run_gyp('special-variables.gyp', chdir='src')
-  test.build('special-variables.gyp', test.ALL, chdir='src')
-  test.pass_test()
diff --git a/tools/gyp/test/rules/src/actions.gyp b/tools/gyp/test/rules/src/actions.gyp
deleted file mode 100644
index 84376a7..0000000
--- a/tools/gyp/test/rules/src/actions.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'pull_in_all_actions',
-      'type': 'none',
-      'dependencies': [
-        'subdir1/executable.gyp:*',
-        'subdir2/both_rule_and_action_input.gyp:*',
-        'subdir2/never_used.gyp:*',
-        'subdir2/no_inputs.gyp:*',
-        'subdir2/no_action.gyp:*',
-        'subdir2/none.gyp:*',
-        'subdir3/executable2.gyp:*',
-        'subdir4/build-asm.gyp:*',
-        'external/external.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/an_asm.S b/tools/gyp/test/rules/src/an_asm.S
deleted file mode 100644
index eeb1345..0000000
--- a/tools/gyp/test/rules/src/an_asm.S
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Fake asm file.
-int main() {}
diff --git a/tools/gyp/test/rules/src/as.bat b/tools/gyp/test/rules/src/as.bat
deleted file mode 100644
index 903c31a..0000000
--- a/tools/gyp/test/rules/src/as.bat
+++ /dev/null
@@ -1,7 +0,0 @@
-@echo off

-:: Copyright (c) 2011 Google Inc. All rights reserved.

-:: Use of this source code is governed by a BSD-style license that can be

-:: found in the LICENSE file.

-

-:: Fake assembler for Windows

-cl /TP /c %1 /Fo%2

diff --git a/tools/gyp/test/rules/src/copy-file.py b/tools/gyp/test/rules/src/copy-file.py
deleted file mode 100755
index 5a5feae..0000000
--- a/tools/gyp/test/rules/src/copy-file.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-
-contents = open(sys.argv[1], 'r').read()
-open(sys.argv[2], 'wb').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/rules/src/external/external.gyp b/tools/gyp/test/rules/src/external/external.gyp
deleted file mode 100644
index b28174f..0000000
--- a/tools/gyp/test/rules/src/external/external.gyp
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Test that the case where there are no inputs (other than the
-# file the rule applies to).
-{
-  'target_defaults': {
-    'msvs_cygwin_dirs': ['../../../../../../<(DEPTH)/third_party/cygwin'],
-  },
-  'targets': [
-    {
-      'target_name': 'external_rules',
-      'type': 'none',
-      'sources': [
-        'file1.in',
-        'file2.in',
-      ],
-      'conditions': [
-        ['OS=="win"', {
-          'dependencies': [
-            'cygwin',
-          ],
-        }],
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file',
-          'extension': 'in',
-          'msvs_external_rule': 1,
-          'outputs': [
-            '<(RULE_INPUT_ROOT).external_rules.out',
-          ],
-          'action': [
-            'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-        },
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="win"', {
-      'targets': [
-        {
-          'target_name': 'cygwin',
-          'type': 'none',
-          'actions': [
-            {
-              'action_name': 'setup_mount',
-              'msvs_cygwin_shell': 0,
-              'inputs': [
-                '../../../../../../<(DEPTH)/third_party/cygwin/setup_mount.bat',
-              ],
-              # Visual Studio requires an output file, or else the
-              # custom build step won't run.
-              'outputs': [
-                '<(INTERMEDIATE_DIR)/_always_run_setup_mount.marker',
-              ],
-              'action': ['<@(_inputs)'],
-            },
-          ],
-        },
-      ],
-    }],
-  ],
-}
diff --git a/tools/gyp/test/rules/src/external/file1.in b/tools/gyp/test/rules/src/external/file1.in
deleted file mode 100644
index 86ac3ad..0000000
--- a/tools/gyp/test/rules/src/external/file1.in
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file1.in
diff --git a/tools/gyp/test/rules/src/external/file2.in b/tools/gyp/test/rules/src/external/file2.in
deleted file mode 100644
index bf83d8e..0000000
--- a/tools/gyp/test/rules/src/external/file2.in
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file2.in
diff --git a/tools/gyp/test/rules/src/input-root.gyp b/tools/gyp/test/rules/src/input-root.gyp
deleted file mode 100644
index b6600e7..0000000
--- a/tools/gyp/test/rules/src/input-root.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test',
-      'type': 'executable',
-      'sources': [ 'somefile.ext', ],
-      'rules': [{
-        'rule_name': 'rule',
-        'extension': 'ext',
-        'inputs': [ 'rule.py', ],
-        'outputs': [ '<(RULE_INPUT_ROOT).cc', ],
-        'action': [ 'python', 'rule.py', '<(RULE_INPUT_ROOT)', ],
-        'message': 'Processing <(RULE_INPUT_PATH)',
-        'process_outputs_as_sources': 1,
-        # Allows the test to run without hermetic cygwin on windows.
-        'msvs_cygwin_shell': 0,
-      }],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/noaction/file1.in b/tools/gyp/test/rules/src/noaction/file1.in
deleted file mode 100644
index 86ac3ad..0000000
--- a/tools/gyp/test/rules/src/noaction/file1.in
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file1.in
diff --git a/tools/gyp/test/rules/src/noaction/no_action_with_rules_fails.gyp b/tools/gyp/test/rules/src/noaction/no_action_with_rules_fails.gyp
deleted file mode 100644
index 9b6a656..0000000
--- a/tools/gyp/test/rules/src/noaction/no_action_with_rules_fails.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Test the case where there's no action but there are input rules that should
-# be processed results in a gyp failure.
-{
-  'targets': [
-    {
-      'target_name': 'extension_does_match_sources_but_no_action',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'file1.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'assembled',
-          'extension': 'in',
-          'outputs': [
-            '<(RULE_INPUT_ROOT).in',
-          ],
-          'conditions': [
-            # Always fails.
-            [ '"true"=="false"', {
-              'action': [
-                'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-              ],
-              'process_outputs_as_sources': 1,
-              'message': 'test_rule',
-            }],
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/rule.py b/tools/gyp/test/rules/src/rule.py
deleted file mode 100755
index 8a1f36d..0000000
--- a/tools/gyp/test/rules/src/rule.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1] + ".cc", "w")
-f.write("""\
-#include <stdio.h>
-
-int main() {
-  puts("Hello %s");
-  return 0;
-}
-""" % sys.argv[1])
-f.close()
diff --git a/tools/gyp/test/rules/src/somefile.ext b/tools/gyp/test/rules/src/somefile.ext
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/rules/src/somefile.ext
+++ /dev/null
diff --git a/tools/gyp/test/rules/src/special-variables.gyp b/tools/gyp/test/rules/src/special-variables.gyp
deleted file mode 100644
index d1443af..0000000
--- a/tools/gyp/test/rules/src/special-variables.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'rules': [
-        {
-          'rule_name': 'assembler (gnu-compatible)',
-          'msvs_cygwin_shell': 0,
-          'msvs_quote_cmd': 0,
-          'extension': 'S',
-          'inputs': [
-            'as.bat',
-          ],
-          'outputs': [
-            '$(IntDir)/$(InputName).obj',
-          ],
-          'action': [
-            'as.bat',
-            '$(InputPath)',
-            '$(IntDir)/$(InputName).obj',
-          ],
-          'message': 'Building assembly language file $(InputPath)',
-          'process_outputs_as_sources': 1,
-        },
-      ],
-      'target_name': 'test',
-      'type': 'static_library',
-      'sources': [ 'an_asm.S' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir1/executable.gyp b/tools/gyp/test/rules/src/subdir1/executable.gyp
deleted file mode 100644
index c34cce5..0000000
--- a/tools/gyp/test/rules/src/subdir1/executable.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'program.c',
-        'function1.in',
-        'function2.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file',
-          'extension': 'in',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            # TODO:  fix Make to support generated files not
-            # in a variable-named path like <(INTERMEDIATE_DIR)
-            #'<(RULE_INPUT_ROOT).c',
-            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).c',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir1/function1.in b/tools/gyp/test/rules/src/subdir1/function1.in
deleted file mode 100644
index 60ff289..0000000
--- a/tools/gyp/test/rules/src/subdir1/function1.in
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void function1(void)
-{
-  printf("Hello from function1.in\n");
-}
diff --git a/tools/gyp/test/rules/src/subdir1/function2.in b/tools/gyp/test/rules/src/subdir1/function2.in
deleted file mode 100644
index 0fcfc03..0000000
--- a/tools/gyp/test/rules/src/subdir1/function2.in
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void function2(void)
-{
-  printf("Hello from function2.in\n");
-}
diff --git a/tools/gyp/test/rules/src/subdir1/program.c b/tools/gyp/test/rules/src/subdir1/program.c
deleted file mode 100644
index 6b11ff9..0000000
--- a/tools/gyp/test/rules/src/subdir1/program.c
+++ /dev/null
@@ -1,12 +0,0 @@
-#include <stdio.h>
-
-extern void function1(void);
-extern void function2(void);
-
-int main(void)
-{
-  printf("Hello from program.c\n");
-  function1();
-  function2();
-  return 0;
-}
diff --git a/tools/gyp/test/rules/src/subdir2/both_rule_and_action_input.gyp b/tools/gyp/test/rules/src/subdir2/both_rule_and_action_input.gyp
deleted file mode 100644
index e5e6f3e..0000000
--- a/tools/gyp/test/rules/src/subdir2/both_rule_and_action_input.gyp
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Tests that if a rule input is also an action input, both the rule and action
-# are executed
-{
-  'targets': [
-    {
-      'target_name': 'files_both_rule_and_action_input',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'program.c',
-        'file1.in',
-        'file2.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file',
-          'extension': 'in',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            '<(RULE_INPUT_ROOT).out4',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-        },
-      ],
-      'actions': [
-        {
-          'action_name': 'copy_file1_in',
-          'inputs': [
-            '../copy-file.py',
-            'file1.in',
-          ],
-          'outputs': [
-            'file1.copy',
-          ],
-          'action': [
-            'python', '<@(_inputs)', '<(_outputs)'
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir2/file1.in b/tools/gyp/test/rules/src/subdir2/file1.in
deleted file mode 100644
index 86ac3ad..0000000
--- a/tools/gyp/test/rules/src/subdir2/file1.in
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file1.in
diff --git a/tools/gyp/test/rules/src/subdir2/file2.in b/tools/gyp/test/rules/src/subdir2/file2.in
deleted file mode 100644
index bf83d8e..0000000
--- a/tools/gyp/test/rules/src/subdir2/file2.in
+++ /dev/null
@@ -1 +0,0 @@
-Hello from file2.in
diff --git a/tools/gyp/test/rules/src/subdir2/never_used.gyp b/tools/gyp/test/rules/src/subdir2/never_used.gyp
deleted file mode 100644
index 17f6f55..0000000
--- a/tools/gyp/test/rules/src/subdir2/never_used.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Test that the case where there is a rule that doesn't apply to anything.
-{
-  'targets': [
-    {
-      'target_name': 'files_no_input2',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'file1.in',
-        'file2.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file3',
-          'extension': 'in2',
-          'outputs': [
-            '<(RULE_INPUT_ROOT).out3',
-          ],
-          'action': [
-            'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir2/no_action.gyp b/tools/gyp/test/rules/src/subdir2/no_action.gyp
deleted file mode 100644
index ffa1cef..0000000
--- a/tools/gyp/test/rules/src/subdir2/no_action.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Test that the case where an action is only specified under a conditional is
-# evaluated appropriately.
-{
-  'targets': [
-    {
-      'target_name': 'extension_does_not_match_sources_and_no_action',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'file1.in',
-        'file2.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'assemble',
-          'extension': 'asm',
-          'outputs': [
-            '<(RULE_INPUT_ROOT).fail',
-          ],
-          'conditions': [
-            # Always fails.
-            [ '"true"=="false"', {
-              'action': [
-                'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-              ],
-              'process_outputs_as_sources': 1,
-              'message': 'test_rule',
-            }],
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir2/no_inputs.gyp b/tools/gyp/test/rules/src/subdir2/no_inputs.gyp
deleted file mode 100644
index e61a1a3..0000000
--- a/tools/gyp/test/rules/src/subdir2/no_inputs.gyp
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Test that the case where there are no inputs (other than the
-# file the rule applies to).
-{
-  'targets': [
-    {
-      'target_name': 'files_no_input',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'file1.in',
-        'file2.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file2',
-          'extension': 'in',
-          'outputs': [
-            '<(RULE_INPUT_ROOT).out2',
-          ],
-          'action': [
-            'python', '../copy-file.py', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir2/none.gyp b/tools/gyp/test/rules/src/subdir2/none.gyp
deleted file mode 100644
index 38bcdab..0000000
--- a/tools/gyp/test/rules/src/subdir2/none.gyp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'files',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'file1.in',
-        'file2.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file',
-          'extension': 'in',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            '<(RULE_INPUT_ROOT).out',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir2/program.c b/tools/gyp/test/rules/src/subdir2/program.c
deleted file mode 100644
index e5db175..0000000
--- a/tools/gyp/test/rules/src/subdir2/program.c
+++ /dev/null
@@ -1,12 +0,0 @@
-/* Copyright (c) 2014 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from program.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/rules/src/subdir3/executable2.gyp b/tools/gyp/test/rules/src/subdir3/executable2.gyp
deleted file mode 100644
index a2a528f..0000000
--- a/tools/gyp/test/rules/src/subdir3/executable2.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This one tests that rules are properly written if extensions are different
-# between the target's sources (program.c) and the generated files
-# (function3.cc)
-
-{
-  'targets': [
-    {
-      'target_name': 'program2',
-      'type': 'executable',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'program.c',
-        'function3.in',
-      ],
-      'rules': [
-        {
-          'rule_name': 'copy_file',
-          'extension': 'in',
-          'inputs': [
-            '../copy-file.py',
-          ],
-          'outputs': [
-            '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).cc',
-          ],
-          'action': [
-            'python', '<(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-          ],
-          'process_outputs_as_sources': 1,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir3/function3.in b/tools/gyp/test/rules/src/subdir3/function3.in
deleted file mode 100644
index 99f46ab..0000000
--- a/tools/gyp/test/rules/src/subdir3/function3.in
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-extern "C" void function3(void)
-{
-  printf("Hello from function3.in\n");
-}
diff --git a/tools/gyp/test/rules/src/subdir3/program.c b/tools/gyp/test/rules/src/subdir3/program.c
deleted file mode 100644
index c38eead..0000000
--- a/tools/gyp/test/rules/src/subdir3/program.c
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <stdio.h>
-
-extern void function3(void);
-
-int main(void)
-{
-  printf("Hello from program.c\n");
-  function3();
-  return 0;
-}
diff --git a/tools/gyp/test/rules/src/subdir4/asm-function.assem b/tools/gyp/test/rules/src/subdir4/asm-function.assem
deleted file mode 100644
index ed47cad..0000000
--- a/tools/gyp/test/rules/src/subdir4/asm-function.assem
+++ /dev/null
@@ -1,10 +0,0 @@
-#if PLATFORM_WINDOWS || PLATFORM_MAC
-# define IDENTIFIER(n)  _##n
-#else /* Linux */
-# define IDENTIFIER(n)  n
-#endif
-
-.globl IDENTIFIER(asm_function)
-IDENTIFIER(asm_function):
-  movl $41, %eax
-  ret
diff --git a/tools/gyp/test/rules/src/subdir4/build-asm.gyp b/tools/gyp/test/rules/src/subdir4/build-asm.gyp
deleted file mode 100644
index fe0fe93..0000000
--- a/tools/gyp/test/rules/src/subdir4/build-asm.gyp
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This one tests that assembly files ended as .s and .S are compiled.
-
-{
-  'target_defaults': {
-    'conditions': [
-      ['OS=="win"', {
-        'defines': ['PLATFORM_WIN'],
-      }],
-      ['OS=="mac"', {
-        'defines': ['PLATFORM_MAC'],
-      }],
-      ['OS=="linux"', {
-        'defines': ['PLATFORM_LINUX'],
-      }],
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'program4',
-      'type': 'executable',
-      'sources': [
-        'asm-function.assem',
-        'program.c',
-      ],
-      'conditions': [
-        ['OS=="linux" or OS=="mac"', {
-          'rules': [
-            {
-              'rule_name': 'convert_assem',
-              'extension': 'assem',
-              'inputs': [],
-              'outputs': [
-                '<(SHARED_INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).S',
-              ],
-              'action': [
-                'bash', '-c', 'cp <(RULE_INPUT_PATH) <@(_outputs)',
-              ],
-              'process_outputs_as_sources': 1,
-            },
-          ],
-        }],
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/rules/src/subdir4/program.c b/tools/gyp/test/rules/src/subdir4/program.c
deleted file mode 100644
index ad647f4..0000000
--- a/tools/gyp/test/rules/src/subdir4/program.c
+++ /dev/null
@@ -1,19 +0,0 @@
-#include <stdio.h>
-
-// Use the assembly function in linux and mac where it is built.
-#if PLATFORM_LINUX || PLATFORM_MAC
-extern int asm_function(void);
-#else
-int asm_function() {
-  return 41;
-}
-#endif
-
-int main(void)
-{
-  fprintf(stdout, "Hello from program.c\n");
-  fflush(stdout);
-  fprintf(stdout, "Got %d.\n", asm_function());
-  fflush(stdout);
-  return 0;
-}
diff --git a/tools/gyp/test/same-gyp-name/gyptest-all.py b/tools/gyp/test/same-gyp-name/gyptest-all.py
deleted file mode 100755
index cda1a72..0000000
--- a/tools/gyp/test/same-gyp-name/gyptest-all.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Build a .gyp that depends on 2 gyp files with the same name.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('all.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', test.ALL, chdir='relocate/src')
-
-expect1 = """\
-Hello from main1.cc
-"""
-
-expect2 = """\
-Hello from main2.cc
-"""
-
-if test.format == 'xcode':
-  chdir1 = 'relocate/src/subdir1'
-  chdir2 = 'relocate/src/subdir2'
-else:
-  chdir1 = chdir2 = 'relocate/src'
-
-test.run_built_executable('program1', chdir=chdir1, stdout=expect1)
-test.run_built_executable('program2', chdir=chdir2, stdout=expect2)
-
-test.pass_test()
diff --git a/tools/gyp/test/same-gyp-name/gyptest-default.py b/tools/gyp/test/same-gyp-name/gyptest-default.py
deleted file mode 100755
index 5e4bba0..0000000
--- a/tools/gyp/test/same-gyp-name/gyptest-default.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Build a .gyp that depends on 2 gyp files with the same name.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('all.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', chdir='relocate/src')
-
-expect1 = """\
-Hello from main1.cc
-"""
-
-expect2 = """\
-Hello from main2.cc
-"""
-
-if test.format == 'xcode':
-  chdir1 = 'relocate/src/subdir1'
-  chdir2 = 'relocate/src/subdir2'
-else:
-  chdir1 = chdir2 = 'relocate/src'
-
-test.run_built_executable('program1', chdir=chdir1, stdout=expect1)
-test.run_built_executable('program2', chdir=chdir2, stdout=expect2)
-
-test.pass_test()
diff --git a/tools/gyp/test/same-gyp-name/gyptest-library.py b/tools/gyp/test/same-gyp-name/gyptest-library.py
deleted file mode 100644
index 957a4a5..0000000
--- a/tools/gyp/test/same-gyp-name/gyptest-library.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a dependency on two gyp files with the same name do not create a
-uid collision in the resulting generated xcode file.
-"""
-
-import TestGyp
-
-import sys
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('test.gyp', chdir='library')
-
-test.pass_test()
diff --git a/tools/gyp/test/same-gyp-name/library/one/sub.gyp b/tools/gyp/test/same-gyp-name/library/one/sub.gyp
deleted file mode 100644
index 1bed941..0000000
--- a/tools/gyp/test/same-gyp-name/library/one/sub.gyp
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'one',
-      'type': 'static_library',
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-gyp-name/library/test.gyp b/tools/gyp/test/same-gyp-name/library/test.gyp
deleted file mode 100644
index 552a77e..0000000
--- a/tools/gyp/test/same-gyp-name/library/test.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'duplicate_names',
-      'type': 'shared_library',
-      'dependencies': [
-        'one/sub.gyp:one',
-        'two/sub.gyp:two',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-gyp-name/library/two/sub.gyp b/tools/gyp/test/same-gyp-name/library/two/sub.gyp
deleted file mode 100644
index 934c98a..0000000
--- a/tools/gyp/test/same-gyp-name/library/two/sub.gyp
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
-  'targets': [
-    {
-      'target_name': 'two',
-      'type': 'static_library',
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-gyp-name/src/all.gyp b/tools/gyp/test/same-gyp-name/src/all.gyp
deleted file mode 100644
index 229f02e..0000000
--- a/tools/gyp/test/same-gyp-name/src/all.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'all_exes',
-      'type': 'none',
-      'dependencies': [
-        'subdir1/executable.gyp:*',
-        'subdir2/executable.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-gyp-name/src/subdir1/executable.gyp b/tools/gyp/test/same-gyp-name/src/subdir1/executable.gyp
deleted file mode 100644
index 82483b4..0000000
--- a/tools/gyp/test/same-gyp-name/src/subdir1/executable.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program1',
-      'type': 'executable',
-      'sources': [
-        'main1.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-gyp-name/src/subdir1/main1.cc b/tools/gyp/test/same-gyp-name/src/subdir1/main1.cc
deleted file mode 100644
index 3645558..0000000
--- a/tools/gyp/test/same-gyp-name/src/subdir1/main1.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-int main() {
-  printf("Hello from main1.cc\n");
-  return 0;
-}
diff --git a/tools/gyp/test/same-gyp-name/src/subdir2/executable.gyp b/tools/gyp/test/same-gyp-name/src/subdir2/executable.gyp
deleted file mode 100644
index e353701..0000000
--- a/tools/gyp/test/same-gyp-name/src/subdir2/executable.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program2',
-      'type': 'executable',
-      'sources': [
-        'main2.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-gyp-name/src/subdir2/main2.cc b/tools/gyp/test/same-gyp-name/src/subdir2/main2.cc
deleted file mode 100644
index 0c724de..0000000
--- a/tools/gyp/test/same-gyp-name/src/subdir2/main2.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-int main() {
-  printf("Hello from main2.cc\n");
-  return 0;
-}
diff --git a/tools/gyp/test/same-rule-output-file-name/gyptest-all.py b/tools/gyp/test/same-rule-output-file-name/gyptest-all.py
deleted file mode 100644
index 964e6b7..0000000
--- a/tools/gyp/test/same-rule-output-file-name/gyptest-all.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Tests the use of rules with the same output file name.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('subdirs.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('subdirs.gyp', test.ALL, chdir='relocate/src')
-test.must_exist('relocate/src/subdir1/rule.txt')
-test.must_exist('relocate/src/subdir2/rule.txt')
-
-test.pass_test()
diff --git a/tools/gyp/test/same-rule-output-file-name/src/subdir1/subdir1.gyp b/tools/gyp/test/same-rule-output-file-name/src/subdir1/subdir1.gyp
deleted file mode 100644
index bff381a..0000000
--- a/tools/gyp/test/same-rule-output-file-name/src/subdir1/subdir1.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'target1',
-      'type': 'none',
-      'sources': [
-        '../touch.py'
-      ],
-      'rules': [
-        {
-          'rule_name': 'rule1',
-          'extension': 'py',
-          'inputs': [],
-          'outputs': [
-            'rule.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-rule-output-file-name/src/subdir2/subdir2.gyp b/tools/gyp/test/same-rule-output-file-name/src/subdir2/subdir2.gyp
deleted file mode 100644
index 12a3560..0000000
--- a/tools/gyp/test/same-rule-output-file-name/src/subdir2/subdir2.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'target2',
-      'type': 'none',
-      'sources': [
-        '../touch.py'
-      ],
-      'rules': [
-        {
-          'rule_name': 'rule2',
-          'extension': 'py',
-          'inputs': [],
-          'outputs': [
-            'rule.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-rule-output-file-name/src/subdirs.gyp b/tools/gyp/test/same-rule-output-file-name/src/subdirs.gyp
deleted file mode 100644
index 25259a3..0000000
--- a/tools/gyp/test/same-rule-output-file-name/src/subdirs.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'subdirs',
-      'type': 'none',
-      'dependencies': [
-        'subdir1/subdir1.gyp:*',
-        'subdir2/subdir2.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-rule-output-file-name/src/touch.py b/tools/gyp/test/same-rule-output-file-name/src/touch.py
deleted file mode 100644
index 2291e9c..0000000
--- a/tools/gyp/test/same-rule-output-file-name/src/touch.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'w+')
-f.write('Hello from touch.py\n')
-f.close()
diff --git a/tools/gyp/test/same-source-file-name/gyptest-all.py b/tools/gyp/test/same-source-file-name/gyptest-all.py
deleted file mode 100755
index 4c21502..0000000
--- a/tools/gyp/test/same-source-file-name/gyptest-all.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Build a .gyp with two targets that share a common .c source file.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('all.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', test.ALL, chdir='relocate/src')
-
-expect1 = """\
-Hello from prog1.c
-Hello prog1 from func.c
-"""
-
-expect2 = """\
-Hello from prog2.c
-Hello prog2 from func.c
-"""
-
-test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
-test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
-
-test.pass_test()
diff --git a/tools/gyp/test/same-source-file-name/gyptest-default.py b/tools/gyp/test/same-source-file-name/gyptest-default.py
deleted file mode 100755
index 98757c2..0000000
--- a/tools/gyp/test/same-source-file-name/gyptest-default.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Build a .gyp with two targets that share a common .c source file.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('all.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('all.gyp', chdir='relocate/src')
-
-expect1 = """\
-Hello from prog1.c
-Hello prog1 from func.c
-"""
-
-expect2 = """\
-Hello from prog2.c
-Hello prog2 from func.c
-"""
-
-test.run_built_executable('prog1', chdir='relocate/src', stdout=expect1)
-test.run_built_executable('prog2', chdir='relocate/src', stdout=expect2)
-
-test.pass_test()
diff --git a/tools/gyp/test/same-source-file-name/gyptest-pass-executable.py b/tools/gyp/test/same-source-file-name/gyptest-pass-executable.py
deleted file mode 100755
index 1a3dcda..0000000
--- a/tools/gyp/test/same-source-file-name/gyptest-pass-executable.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Checks that gyp does not fail on executable targets which have several files
-with the same basename.
-"""
-
-import TestGyp
-
-# While MSVS supports building executables that contain several files with the
-# same name, the msvs gyp generator does not.
-test = TestGyp.TestGyp(formats=['!msvs'])
-
-test.run_gyp('double-executable.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('double-executable.gyp', test.ALL, chdir='relocate/src')
-
-expect = """\
-Hello from prog3.c
-Hello prog3 from func.c
-Hello prog3 from subdir1/func.c
-Hello prog3 from subdir2/func.c
-"""
-
-test.run_built_executable('prog3', chdir='relocate/src', stdout=expect)
-
-test.pass_test()
diff --git a/tools/gyp/test/same-source-file-name/gyptest-pass-shared.py b/tools/gyp/test/same-source-file-name/gyptest-pass-shared.py
deleted file mode 100755
index a498f1a..0000000
--- a/tools/gyp/test/same-source-file-name/gyptest-pass-shared.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Checks that gyp does not fail on shared_library targets which have several files
-with the same basename.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('double-shared.gyp', chdir='src')
-
-test.pass_test()
diff --git a/tools/gyp/test/same-source-file-name/gyptest-static.py b/tools/gyp/test/same-source-file-name/gyptest-static.py
deleted file mode 100755
index 7fa2772..0000000
--- a/tools/gyp/test/same-source-file-name/gyptest-static.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Checks that gyp fails on static_library targets which have several files with
-the same basename.
-"""
-
-import os
-import sys
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# Fails by default for the compatibility with legacy generators such as
-# VCProj generator for Visual C++ 2008 and Makefile generator on Mac.
-# TODO: Update expected behavior when these legacy generators are deprecated.
-test.run_gyp('double-static.gyp', chdir='src', status=1, stderr=None)
-
-if ((test.format == 'make' and sys.platform == 'darwin') or
-    (test.format == 'msvs' and
-        int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
-               chdir='src', status=1, stderr=None)
-else:
-  test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
-               chdir='src')
-  test.build('double-static.gyp', test.ALL, chdir='src')
-
-test.pass_test()
diff --git a/tools/gyp/test/same-source-file-name/src/all.gyp b/tools/gyp/test/same-source-file-name/src/all.gyp
deleted file mode 100644
index 4fe052c..0000000
--- a/tools/gyp/test/same-source-file-name/src/all.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'prog1',
-      'type': 'executable',
-      'defines': [
-        'PROG="prog1"',
-      ],
-      'sources': [
-        'prog1.c',
-        'func.c',
-      ],
-    },
-    {
-      'target_name': 'prog2',
-      'type': 'executable',
-      'defines': [
-        'PROG="prog2"',
-      ],
-      'sources': [
-        'prog2.c',
-        'func.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-source-file-name/src/double-executable.gyp b/tools/gyp/test/same-source-file-name/src/double-executable.gyp
deleted file mode 100644
index 477bd87..0000000
--- a/tools/gyp/test/same-source-file-name/src/double-executable.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'prog3',
-      'type': 'executable',
-      'sources': [
-        'prog3.c',
-        'func.c',
-        'subdir1/func.c',
-        'subdir2/func.c',
-      ],
-      'defines': [
-        'PROG="prog3"',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-source-file-name/src/double-shared.gyp b/tools/gyp/test/same-source-file-name/src/double-shared.gyp
deleted file mode 100644
index 438b50f..0000000
--- a/tools/gyp/test/same-source-file-name/src/double-shared.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'lib',
-      'product_name': 'test_shared_lib',
-      'type': 'shared_library',
-      'sources': [
-        'prog2.c',
-        'func.c',
-        'subdir1/func.c',
-        'subdir2/func.c',
-      ],
-      'defines': [
-        'PROG="prog2"',
-      ],
-      'conditions': [
-        ['OS=="linux"', {
-          'cflags': ['-fPIC'],
-        }],
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-source-file-name/src/double-static.gyp b/tools/gyp/test/same-source-file-name/src/double-static.gyp
deleted file mode 100644
index e49c0e1..0000000
--- a/tools/gyp/test/same-source-file-name/src/double-static.gyp
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'lib',
-      'product_name': 'test_static_lib',
-      'type': 'static_library',
-      'sources': [
-        'prog1.c',
-        'func.c',
-        'subdir1/func.c',
-        'subdir2/func.c',
-      ],
-      'defines': [
-        'PROG="prog1"',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-source-file-name/src/func.c b/tools/gyp/test/same-source-file-name/src/func.c
deleted file mode 100644
index e069c69..0000000
--- a/tools/gyp/test/same-source-file-name/src/func.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void func(void)
-{
-  printf("Hello %s from func.c\n", PROG);
-}
diff --git a/tools/gyp/test/same-source-file-name/src/prog1.c b/tools/gyp/test/same-source-file-name/src/prog1.c
deleted file mode 100644
index 604e2b9..0000000
--- a/tools/gyp/test/same-source-file-name/src/prog1.c
+++ /dev/null
@@ -1,16 +0,0 @@
-#include <stdio.h>
-
-extern void func(void);
-
-int main(void)
-{
-  printf("Hello from prog1.c\n");
-  func();
-  /*
-   * Uncomment to test same-named files in different directories,
-   * which Visual Studio doesn't support.
-  subdir1_func();
-  subdir2_func();
-   */
-  return 0;
-}
diff --git a/tools/gyp/test/same-source-file-name/src/prog2.c b/tools/gyp/test/same-source-file-name/src/prog2.c
deleted file mode 100644
index 466ee35..0000000
--- a/tools/gyp/test/same-source-file-name/src/prog2.c
+++ /dev/null
@@ -1,16 +0,0 @@
-#include <stdio.h>
-
-extern void func(void);
-
-int main(void)
-{
-  printf("Hello from prog2.c\n");
-  func();
-  /*
-   * Uncomment to test same-named files in different directories,
-   * which Visual Studio doesn't support.
-  subdir1_func();
-  subdir2_func();
-   */
-  return 0;
-}
diff --git a/tools/gyp/test/same-source-file-name/src/prog3.c b/tools/gyp/test/same-source-file-name/src/prog3.c
deleted file mode 100644
index 34d495c..0000000
--- a/tools/gyp/test/same-source-file-name/src/prog3.c
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-extern void func(void);
-extern void subdir1_func(void);
-extern void subdir2_func(void);
-
-int main(void)
-{
-  printf("Hello from prog3.c\n");
-  func();
-  subdir1_func();
-  subdir2_func();
-  return 0;
-}
diff --git a/tools/gyp/test/same-source-file-name/src/subdir1/func.c b/tools/gyp/test/same-source-file-name/src/subdir1/func.c
deleted file mode 100644
index b73450d..0000000
--- a/tools/gyp/test/same-source-file-name/src/subdir1/func.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void subdir1_func(void)
-{
-  printf("Hello %s from subdir1/func.c\n", PROG);
-}
diff --git a/tools/gyp/test/same-source-file-name/src/subdir2/func.c b/tools/gyp/test/same-source-file-name/src/subdir2/func.c
deleted file mode 100644
index 0248b57..0000000
--- a/tools/gyp/test/same-source-file-name/src/subdir2/func.c
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <stdio.h>
-
-void subdir2_func(void)
-{
-  printf("Hello %s from subdir2/func.c\n", PROG);
-}
diff --git a/tools/gyp/test/same-target-name-different-directory/gyptest-all.py b/tools/gyp/test/same-target-name-different-directory/gyptest-all.py
deleted file mode 100644
index 755691b..0000000
--- a/tools/gyp/test/same-target-name-different-directory/gyptest-all.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test cases when multiple targets in different directories have the same name.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(formats=['ninja', 'make'])
-
-# xcode-ninja fails to generate a project due to id collisions
-# cf. https://code.google.com/p/gyp/issues/detail?id=461
-if test.format == 'xcode-ninja':
-  test.skip_test()
-
-test.run_gyp('subdirs.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-# Test that we build all targets.
-test.build('subdirs.gyp', 'target', chdir='relocate/src')
-test.must_exist('relocate/src/subdir1/action1.txt')
-test.must_exist('relocate/src/subdir2/action2.txt')
-
-# Test that we build all targets using the correct actions, even if they have
-# the same names.
-test.build('subdirs.gyp', 'target_same_action_name', chdir='relocate/src')
-test.must_exist('relocate/src/subdir1/action.txt')
-test.must_exist('relocate/src/subdir2/action.txt')
-
-# Test that we build all targets using the correct rules, even if they have
-# the same names.
-test.build('subdirs.gyp', 'target_same_rule_name', chdir='relocate/src')
-test.must_exist('relocate/src/subdir1/rule.txt')
-test.must_exist('relocate/src/subdir2/rule.txt')
-
-test.pass_test()
diff --git a/tools/gyp/test/same-target-name-different-directory/src/subdir1/subdir1.gyp b/tools/gyp/test/same-target-name-different-directory/src/subdir1/subdir1.gyp
deleted file mode 100644
index d4ec2e6..0000000
--- a/tools/gyp/test/same-target-name-different-directory/src/subdir1/subdir1.gyp
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action1',
-          'inputs': [],
-          'outputs': [
-            'action1.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'target_same_action_name',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action',
-          'inputs': [],
-          'outputs': [
-            'action.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'target_same_rule_name',
-      'type': 'none',
-      'sources': [
-        '../touch.py'
-      ],
-      'rules': [
-        {
-          'rule_name': 'rule',
-          'extension': 'py',
-          'inputs': [],
-          'outputs': [
-            'rule.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-target-name-different-directory/src/subdir2/subdir2.gyp b/tools/gyp/test/same-target-name-different-directory/src/subdir2/subdir2.gyp
deleted file mode 100644
index 9006d45..0000000
--- a/tools/gyp/test/same-target-name-different-directory/src/subdir2/subdir2.gyp
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'target',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action2',
-          'inputs': [],
-          'outputs': [
-            'action2.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'target_same_action_name',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'action',
-          'inputs': [],
-          'outputs': [
-            'action.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-    {
-      'target_name': 'target_same_rule_name',
-      'type': 'none',
-      'sources': [
-        '../touch.py'
-      ],
-      'rules': [
-        {
-          'rule_name': 'rule',
-          'extension': 'py',
-          'inputs': [],
-          'outputs': [
-            'rule.txt',
-          ],
-          'action': [
-            'python', '../touch.py', '<(_outputs)',
-          ],
-          # Allows the test to run without hermetic cygwin on windows.
-          'msvs_cygwin_shell': 0,
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-target-name-different-directory/src/subdirs.gyp b/tools/gyp/test/same-target-name-different-directory/src/subdirs.gyp
deleted file mode 100644
index 65413e7..0000000
--- a/tools/gyp/test/same-target-name-different-directory/src/subdirs.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'subdirs',
-      'type': 'none',
-      'dependencies': [
-        'subdir1/subdir1.gyp:*',
-        'subdir2/subdir2.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-target-name-different-directory/src/touch.py b/tools/gyp/test/same-target-name-different-directory/src/touch.py
deleted file mode 100644
index 2291e9c..0000000
--- a/tools/gyp/test/same-target-name-different-directory/src/touch.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-f = open(sys.argv[1], 'w+')
-f.write('Hello from touch.py\n')
-f.close()
diff --git a/tools/gyp/test/same-target-name/gyptest-same-target-name.py b/tools/gyp/test/same-target-name/gyptest-same-target-name.py
deleted file mode 100755
index bfe5540..0000000
--- a/tools/gyp/test/same-target-name/gyptest-same-target-name.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Check that duplicate targets in a directory gives an error.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# Require that gyp files with duplicate targets spit out an error.
-test.run_gyp('all.gyp', chdir='src', status=1, stderr=None)
-
-test.pass_test()
diff --git a/tools/gyp/test/same-target-name/src/all.gyp b/tools/gyp/test/same-target-name/src/all.gyp
deleted file mode 100644
index ac16976..0000000
--- a/tools/gyp/test/same-target-name/src/all.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'all_exes',
-      'type': 'none',
-      'dependencies': [
-        'executable1.gyp:*',
-        'executable2.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-target-name/src/executable1.gyp b/tools/gyp/test/same-target-name/src/executable1.gyp
deleted file mode 100644
index 3c492c1..0000000
--- a/tools/gyp/test/same-target-name/src/executable1.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [
-        'main1.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/same-target-name/src/executable2.gyp b/tools/gyp/test/same-target-name/src/executable2.gyp
deleted file mode 100644
index 41e84a6..0000000
--- a/tools/gyp/test/same-target-name/src/executable2.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [
-        'main2.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/sanitize-rule-names/blah.S b/tools/gyp/test/sanitize-rule-names/blah.S
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/sanitize-rule-names/blah.S
+++ /dev/null
diff --git a/tools/gyp/test/sanitize-rule-names/gyptest-sanitize-rule-names.py b/tools/gyp/test/sanitize-rule-names/gyptest-sanitize-rule-names.py
deleted file mode 100644
index 968a0ce..0000000
--- a/tools/gyp/test/sanitize-rule-names/gyptest-sanitize-rule-names.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure rule names with non-"normal" characters in them don't cause
-broken build files. This test was originally causing broken .ninja files.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-test.run_gyp('sanitize-rule-names.gyp')
-test.build('sanitize-rule-names.gyp', test.ALL)
-test.pass_test()
diff --git a/tools/gyp/test/sanitize-rule-names/hello.cc b/tools/gyp/test/sanitize-rule-names/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/sanitize-rule-names/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/sanitize-rule-names/sanitize-rule-names.gyp b/tools/gyp/test/sanitize-rule-names/sanitize-rule-names.gyp
deleted file mode 100644
index 184253e..0000000
--- a/tools/gyp/test/sanitize-rule-names/sanitize-rule-names.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 's_test',
-      'type': 'executable',
-      'rules': [
-        {
-          # Make sure this rule name doesn't cause an invalid ninja file.
-          'rule_name': 'rule name with odd characters ()/',
-          'extension': 'S',
-          'outputs': ['outfile'],
-          'msvs_cygwin_shell': 0,
-          'msvs_quote_cmd': 0,
-          'action': ['python', 'script.py', '<(RULE_INPUT_PATH)', 'outfile'],
-        },
-      ],
-      'sources': [
-        'blah.S',
-        'hello.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/sanitize-rule-names/script.py b/tools/gyp/test/sanitize-rule-names/script.py
deleted file mode 100644
index ae2efa1..0000000
--- a/tools/gyp/test/sanitize-rule-names/script.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import shutil
-import sys
-
-shutil.copyfile(*sys.argv[1:])
diff --git a/tools/gyp/test/self-dependency/common.gypi b/tools/gyp/test/self-dependency/common.gypi
deleted file mode 100644
index aae221a..0000000
--- a/tools/gyp/test/self-dependency/common.gypi
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# A common file that other .gyp files include.
-# Makes every target in the project depend on dep.gyp:dep.
-{
-  'target_defaults': {
-    'dependencies': [
-      'dep.gyp:dep',
-    ],
-  },
-}
diff --git a/tools/gyp/test/self-dependency/dep.gyp b/tools/gyp/test/self-dependency/dep.gyp
deleted file mode 100644
index 2b6c9dd..0000000
--- a/tools/gyp/test/self-dependency/dep.gyp
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# dep.gyp contains a target dep, on which all the targets in the project
-# depend. This means there's a self-dependency of dep on itself, which is
-# pruned by setting prune_self_dependency to 1.
-
-{
-  'includes': [
-    'common.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'dep',
-      'type': 'none',
-      'variables': {
-        # Without this GYP will report a cycle in dependency graph.
-        'prune_self_dependency': 1,
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/self-dependency/gyptest-self-dependency.py b/tools/gyp/test/self-dependency/gyptest-self-dependency.py
deleted file mode 100755
index 82fab27..0000000
--- a/tools/gyp/test/self-dependency/gyptest-self-dependency.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that pulling in a dependency a second time in a conditional works for
-shared_library targets. Regression test for http://crbug.com/122588
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('self_dependency.gyp')
-
-# If running gyp worked, all is well.
-test.pass_test()
diff --git a/tools/gyp/test/self-dependency/self_dependency.gyp b/tools/gyp/test/self-dependency/self_dependency.gyp
deleted file mode 100644
index 0ca76c6..0000000
--- a/tools/gyp/test/self-dependency/self_dependency.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    'common.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'a',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/sibling/gyptest-all.py b/tools/gyp/test/sibling/gyptest-all.py
deleted file mode 100755
index 318e1a3..0000000
--- a/tools/gyp/test/sibling/gyptest-all.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# The xcode-ninja generator handles gypfiles which are not at the
-# project root incorrectly.
-# cf. https://code.google.com/p/gyp/issues/detail?id=460
-if test.format == 'xcode-ninja':
-  test.skip_test()
-
-test.run_gyp('build/all.gyp', chdir='src')
-
-test.build('build/all.gyp', test.ALL, chdir='src')
-
-chdir = 'src/build'
-
-# The top-level Makefile is in the directory where gyp was run.
-# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
-# file? What about when passing in multiple .gyp files? Would sub-project
-# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
-if test.format in ('make', 'ninja', 'cmake'):
-  chdir = 'src'
-
-if test.format == 'xcode':
-  chdir = 'src/prog1'
-test.run_built_executable('program1',
-                          chdir=chdir,
-                          stdout="Hello from prog1.c\n")
-
-if test.format == 'xcode':
-  chdir = 'src/prog2'
-test.run_built_executable('program2',
-                          chdir=chdir,
-                          stdout="Hello from prog2.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/sibling/gyptest-relocate.py b/tools/gyp/test/sibling/gyptest-relocate.py
deleted file mode 100755
index 05fa9d9..0000000
--- a/tools/gyp/test/sibling/gyptest-relocate.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# The xcode-ninja generator handles gypfiles which are not at the
-# project root incorrectly.
-# cf. https://code.google.com/p/gyp/issues/detail?id=460
-if test.format == 'xcode-ninja':
-  test.skip_test()
-
-test.run_gyp('build/all.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('build/all.gyp', test.ALL, chdir='relocate/src')
-
-chdir = 'relocate/src/build'
-
-# The top-level Makefile is in the directory where gyp was run.
-# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
-# file? What about when passing in multiple .gyp files? Would sub-project
-# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
-if test.format in ('make', 'ninja', 'cmake'):
-  chdir = 'relocate/src'
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/prog1'
-test.run_built_executable('program1',
-                          chdir=chdir,
-                          stdout="Hello from prog1.c\n")
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/prog2'
-test.run_built_executable('program2',
-                          chdir=chdir,
-                          stdout="Hello from prog2.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/sibling/src/build/all.gyp b/tools/gyp/test/sibling/src/build/all.gyp
deleted file mode 100644
index 79c80c9..0000000
--- a/tools/gyp/test/sibling/src/build/all.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'All',
-      'type': 'none',
-      'dependencies': [
-        '../prog1/prog1.gyp:*',
-        '../prog2/prog2.gyp:*',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/sibling/src/prog1/prog1.c b/tools/gyp/test/sibling/src/prog1/prog1.c
deleted file mode 100644
index 218e994..0000000
--- a/tools/gyp/test/sibling/src/prog1/prog1.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from prog1.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/sibling/src/prog1/prog1.gyp b/tools/gyp/test/sibling/src/prog1/prog1.gyp
deleted file mode 100644
index 4532e4b..0000000
--- a/tools/gyp/test/sibling/src/prog1/prog1.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program1',
-      'type': 'executable',
-      'sources': [
-        'prog1.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/sibling/src/prog2/prog2.c b/tools/gyp/test/sibling/src/prog2/prog2.c
deleted file mode 100644
index 12a3188..0000000
--- a/tools/gyp/test/sibling/src/prog2/prog2.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from prog2.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/sibling/src/prog2/prog2.gyp b/tools/gyp/test/sibling/src/prog2/prog2.gyp
deleted file mode 100644
index 4cf7f6e..0000000
--- a/tools/gyp/test/sibling/src/prog2/prog2.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program2',
-      'type': 'executable',
-      'sources': [
-        'prog2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/small/gyptest-small.py b/tools/gyp/test/small/gyptest-small.py
deleted file mode 100755
index e7562cb..0000000
--- a/tools/gyp/test/small/gyptest-small.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Runs small tests.
-"""
-
-import imp
-import os
-import sys
-import unittest
-
-import TestGyp
-
-
-test = TestGyp.TestGyp()
-
-# Add pylib to the import path (so tests can import their dependencies).
-# This is consistant with the path.append done in the top file "gyp".
-sys.path.insert(0, os.path.join(test._cwd, 'pylib'))
-
-# Add new test suites here.
-files_to_test = [
-    'pylib/gyp/MSVSSettings_test.py',
-    'pylib/gyp/easy_xml_test.py',
-    'pylib/gyp/generator/msvs_test.py',
-    'pylib/gyp/generator/ninja_test.py',
-    'pylib/gyp/generator/xcode_test.py',
-    'pylib/gyp/common_test.py',
-    'pylib/gyp/input_test.py',
-]
-
-# Collect all the suites from the above files.
-suites = []
-for filename in files_to_test:
-  # Carve the module name out of the path.
-  name = os.path.splitext(os.path.split(filename)[1])[0]
-  # Find the complete module path.
-  full_filename = os.path.join(test._cwd, filename)
-  # Load the module.
-  module = imp.load_source(name, full_filename)
-  # Add it to the list of test suites.
-  suites.append(unittest.defaultTestLoader.loadTestsFromModule(module))
-# Create combined suite.
-all_tests = unittest.TestSuite(suites)
-
-# Run all the tests.
-result = unittest.TextTestRunner(verbosity=2).run(all_tests)
-if result.failures or result.errors:
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/standalone-static-library/gyptest-standalone-static-library.py b/tools/gyp/test/standalone-static-library/gyptest-standalone-static-library.py
deleted file mode 100644
index 50535ab..0000000
--- a/tools/gyp/test/standalone-static-library/gyptest-standalone-static-library.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies build of a static_library with the standalone_static_library flag set.
-"""
-
-import os
-import subprocess
-import sys
-import TestGyp
-
-# standalone_static_library currently means two things: a specific output
-# location for the built target and non-thin archive files.
-test = TestGyp.TestGyp()
-
-# Verify that types other than static_library cause a failure.
-test.run_gyp('invalid.gyp', status=1, stderr=None)
-target_str = 'invalid.gyp:bad#target'
-err = ['gyp: Target %s has type executable but standalone_static_library flag '
-       'is only valid for static_library type.' % target_str]
-test.must_contain_all_lines(test.stderr(), err)
-
-# Build a valid standalone_static_library.
-test.run_gyp('mylib.gyp')
-test.build('mylib.gyp', target='prog')
-
-# Verify that the static library is copied to the correct location.
-# We expect the library to be copied to $PRODUCT_DIR.
-standalone_static_library_dir = test.EXECUTABLE
-path_to_lib = os.path.split(
-    test.built_file_path('mylib', type=standalone_static_library_dir))[0]
-lib_name = test.built_file_basename('mylib', type=test.STATIC_LIB)
-path = os.path.join(path_to_lib, lib_name)
-test.must_exist(path)
-
-# Verify that the program runs properly.
-expect = 'hello from mylib.c\n'
-test.run_built_executable('prog', stdout=expect)
-
-# Verify that libmylib.a contains symbols.  "ar -x" fails on a 'thin' archive.
-supports_thick = ('make', 'ninja', 'cmake')
-if test.format in supports_thick and sys.platform.startswith('linux'):
-  retcode = subprocess.call(['ar', '-x', path])
-  assert retcode == 0
-
-test.pass_test()
diff --git a/tools/gyp/test/standalone-static-library/invalid.gyp b/tools/gyp/test/standalone-static-library/invalid.gyp
deleted file mode 100644
index 54b3211..0000000
--- a/tools/gyp/test/standalone-static-library/invalid.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'bad',
-      'type': 'executable',
-      'standalone_static_library': 1,
-      'sources': [
-        'prog.c',
-      ],
-    },
-  ],
-}
\ No newline at end of file
diff --git a/tools/gyp/test/standalone-static-library/mylib.c b/tools/gyp/test/standalone-static-library/mylib.c
deleted file mode 100644
index 108be61..0000000
--- a/tools/gyp/test/standalone-static-library/mylib.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-void print(void)
-{
-  printf("hello from mylib.c\n");
-  return;
-}
diff --git a/tools/gyp/test/standalone-static-library/mylib.gyp b/tools/gyp/test/standalone-static-library/mylib.gyp
deleted file mode 100644
index 2d191de..0000000
--- a/tools/gyp/test/standalone-static-library/mylib.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'mylib',
-      'type': 'static_library',
-      'standalone_static_library': 1,
-      'sources': [
-        'mylib.c',
-      ],
-    },
-    {
-      'target_name': 'prog',
-      'type': 'executable',
-      'sources': [
-        'prog.c',
-      ],
-      'dependencies': [
-        'mylib',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/standalone-static-library/prog.c b/tools/gyp/test/standalone-static-library/prog.c
deleted file mode 100644
index 8af5c90..0000000
--- a/tools/gyp/test/standalone-static-library/prog.c
+++ /dev/null
@@ -1,7 +0,0 @@
-extern void print(void);
-
-int main(void)
-{
-  print();
-  return 0;
-}
diff --git a/tools/gyp/test/standalone/gyptest-standalone.py b/tools/gyp/test/standalone/gyptest-standalone.py
deleted file mode 100644
index 8714370..0000000
--- a/tools/gyp/test/standalone/gyptest-standalone.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that a project hierarchy created with the --generator-output=
-option can be built even when it's relocated to a different path.
-"""
-
-import TestGyp
-import os
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('standalone.gyp', '-Gstandalone')
-
-# Look at all the files in the tree to make sure none
-# of them reference the gyp file.
-for root, dirs, files in os.walk("."):
-  for file in files:
-    # ignore ourself
-    if os.path.splitext(__file__)[0] in file:
-      continue
-    file = os.path.join(root, file)
-    contents = open(file).read()
-    if 'standalone.gyp' in contents:
-      print 'gyp file referenced in generated output: %s' % file
-      test.fail_test()
-
-
-test.pass_test()
diff --git a/tools/gyp/test/standalone/standalone.gyp b/tools/gyp/test/standalone/standalone.gyp
deleted file mode 100644
index b2a6785..0000000
--- a/tools/gyp/test/standalone/standalone.gyp
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name' : 'foo',
-      'type' : 'executable'
-    },
-  ]
-}
diff --git a/tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py b/tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py
deleted file mode 100755
index 9dfb8b0..0000000
--- a/tools/gyp/test/subdirectory/gyptest-SYMROOT-all.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a target and a subsidiary dependent target from a
-.gyp file in a subdirectory, without specifying an explicit output build
-directory, and using the generated solution or project file at the top
-of the tree as the entry point.
-
-The configuration sets the Xcode SYMROOT variable and uses --depth=
-to make Xcode behave like the other build tools--that is, put all
-built targets in a single output build directory at the top of the tree.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-# Suppress the test infrastructure's setting SYMROOT on the command line.
-test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
-
-test.run_built_executable('prog1',
-                          stdout="Hello from prog1.c\n",
-                          chdir='relocate/src')
-test.run_built_executable('prog2',
-                          stdout="Hello from prog2.c\n",
-                          chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py b/tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py
deleted file mode 100755
index 8796650..0000000
--- a/tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a target and a subsidiary dependent target from a
-.gyp file in a subdirectory, without specifying an explicit output build
-directory, and using the generated solution or project file at the top
-of the tree as the entry point.
-
-The configuration sets the Xcode SYMROOT variable and uses --depth=
-to make Xcode behave like the other build tools--that is, put all
-built targets in a single output build directory at the top of the tree.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-# Suppress the test infrastructure's setting SYMROOT on the command line.
-test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
-
-test.run_built_executable('prog1',
-                          stdout="Hello from prog1.c\n",
-                          chdir='relocate/src')
-
-test.run_built_executable('prog2',
-                          stdout="Hello from prog2.c\n",
-                          chdir='relocate/src')
-
-test.pass_test()
diff --git a/tools/gyp/test/subdirectory/gyptest-subdir-all.py b/tools/gyp/test/subdirectory/gyptest-subdir-all.py
deleted file mode 100755
index d5c4584..0000000
--- a/tools/gyp/test/subdirectory/gyptest-subdir-all.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a subsidiary dependent target from a .gyp file in a
-subdirectory, without specifying an explicit output build directory,
-and using the subdirectory's solution or project file as the entry point.
-"""
-
-import TestGyp
-
-# Ninja doesn't support relocation.
-# CMake produces a single CMakeLists.txt in the output directory.
-test = TestGyp.TestGyp(formats=['!ninja', '!cmake'])
-
-test.run_gyp('prog1.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-chdir = 'relocate/src/subdir'
-target = test.ALL
-
-test.build('prog2.gyp', target, chdir=chdir)
-
-test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
-
-test.run_built_executable('prog2',
-                          chdir=chdir,
-                          stdout="Hello from prog2.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/subdirectory/gyptest-subdir-default.py b/tools/gyp/test/subdirectory/gyptest-subdir-default.py
deleted file mode 100755
index 2cb6659..0000000
--- a/tools/gyp/test/subdirectory/gyptest-subdir-default.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a subsidiary dependent target from a .gyp file in a
-subdirectory, without specifying an explicit output build directory,
-and using the subdirectory's solution or project file as the entry point.
-"""
-
-import TestGyp
-import errno
-
-# Ninja doesn't support relocation.
-# CMake produces a single CMakeLists.txt in the output directory.
-test = TestGyp.TestGyp(formats=['!ninja', '!cmake'])
-
-test.run_gyp('prog1.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-chdir = 'relocate/src/subdir'
-
-test.build('prog2.gyp', chdir=chdir)
-
-test.built_file_must_not_exist('prog1', type=test.EXECUTABLE, chdir=chdir)
-
-test.run_built_executable('prog2',
-                          chdir=chdir,
-                          stdout="Hello from prog2.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/subdirectory/gyptest-subdir2-deep.py b/tools/gyp/test/subdirectory/gyptest-subdir2-deep.py
deleted file mode 100755
index 4854898..0000000
--- a/tools/gyp/test/subdirectory/gyptest-subdir2-deep.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a project rooted several layers under src_dir works.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('prog3.gyp', chdir='src/subdir/subdir2')
-
-test.relocate('src', 'relocate/src')
-
-test.build('prog3.gyp', test.ALL, chdir='relocate/src/subdir/subdir2')
-
-test.run_built_executable('prog3',
-                          chdir='relocate/src/subdir/subdir2',
-                          stdout="Hello from prog3.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/subdirectory/gyptest-top-all.py b/tools/gyp/test/subdirectory/gyptest-top-all.py
deleted file mode 100755
index b3c25b1..0000000
--- a/tools/gyp/test/subdirectory/gyptest-top-all.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a target and a subsidiary dependent target from a
-.gyp file in a subdirectory, without specifying an explicit output build
-directory, and using the generated solution or project file at the top
-of the tree as the entry point.
-
-There is a difference here in the default behavior of the underlying
-build tools.  Specifically, when building the entire "solution", Xcode
-puts the output of each project relative to the .xcodeproj directory,
-while Visual Studio (and our implementation of Make) put it
-in a build directory relative to the "solution"--that is, the entry-point
-from which you built the entire tree.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('prog1.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('prog1.gyp', test.ALL, chdir='relocate/src')
-
-test.run_built_executable('prog1',
-                          stdout="Hello from prog1.c\n",
-                          chdir='relocate/src')
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('prog2',
-                          chdir=chdir,
-                          stdout="Hello from prog2.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/subdirectory/gyptest-top-default.py b/tools/gyp/test/subdirectory/gyptest-top-default.py
deleted file mode 100755
index 2448dd9..0000000
--- a/tools/gyp/test/subdirectory/gyptest-top-default.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a target and a subsidiary dependent target from a
-.gyp file in a subdirectory, without specifying an explicit output build
-directory, and using the generated solution or project file at the top
-of the tree as the entry point.
-
-There is a difference here in the default behavior of the underlying
-build tools.  Specifically, when building the entire "solution", Xcode
-puts the output of each project relative to the .xcodeproj directory,
-while Visual Studio (and our implementation of Make) put it
-in a build directory relative to the "solution"--that is, the entry-point
-from which you built the entire tree.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('prog1.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('prog1.gyp', chdir='relocate/src')
-
-test.run_built_executable('prog1',
-                          stdout="Hello from prog1.c\n",
-                          chdir='relocate/src')
-
-if test.format == 'xcode':
-  chdir = 'relocate/src/subdir'
-else:
-  chdir = 'relocate/src'
-test.run_built_executable('prog2',
-                          chdir=chdir,
-                          stdout="Hello from prog2.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/subdirectory/src/prog1.c b/tools/gyp/test/subdirectory/src/prog1.c
deleted file mode 100644
index 218e994..0000000
--- a/tools/gyp/test/subdirectory/src/prog1.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from prog1.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/subdirectory/src/prog1.gyp b/tools/gyp/test/subdirectory/src/prog1.gyp
deleted file mode 100644
index 2aa66ce..0000000
--- a/tools/gyp/test/subdirectory/src/prog1.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    'symroot.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog1',
-      'type': 'executable',
-      'dependencies': [
-        'subdir/prog2.gyp:prog2',
-      ],
-      'sources': [
-        'prog1.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/subdirectory/src/subdir/prog2.c b/tools/gyp/test/subdirectory/src/subdir/prog2.c
deleted file mode 100644
index 12a3188..0000000
--- a/tools/gyp/test/subdirectory/src/subdir/prog2.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from prog2.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/subdirectory/src/subdir/prog2.gyp b/tools/gyp/test/subdirectory/src/subdir/prog2.gyp
deleted file mode 100644
index c6cd35f..0000000
--- a/tools/gyp/test/subdirectory/src/subdir/prog2.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../symroot.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog2',
-      'type': 'executable',
-      'sources': [
-        'prog2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.c b/tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.c
deleted file mode 100644
index a326dc6..0000000
--- a/tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from prog3.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp b/tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp
deleted file mode 100644
index b49fb59..0000000
--- a/tools/gyp/test/subdirectory/src/subdir/subdir2/prog3.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': [
-    '../../symroot.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'prog3',
-      'type': 'executable',
-      'sources': [
-        'prog3.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/subdirectory/src/symroot.gypi b/tools/gyp/test/subdirectory/src/symroot.gypi
deleted file mode 100644
index 5199164..0000000
--- a/tools/gyp/test/subdirectory/src/symroot.gypi
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'set_symroot%': 0,
-  },
-  'conditions': [
-    ['set_symroot == 1', {
-      'xcode_settings': {
-        'SYMROOT': '<(DEPTH)/build',
-      },
-    }],
-  ],
-}
diff --git a/tools/gyp/test/symlinks/gyptest-symlinks.py b/tools/gyp/test/symlinks/gyptest-symlinks.py
deleted file mode 100755
index f0c2d51..0000000
--- a/tools/gyp/test/symlinks/gyptest-symlinks.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test that RelativePath(s, d) doesn't return a path starting with '..' when
-s is textually below d, but is also a symlink to a file that is not below d.
-
-Returning .. in this case would break the Ninja generator in such a case,
-because it computes output directories by concatenating paths, and concat'ing
-a path starting with .. can unexpectedly erase other parts of the path. It's
-difficult to test this directly since the test harness assumes toplevel_dir is
-the root of the repository, but this test should at least verify that the
-required behavior doesn't change.
-"""
-
-import TestGyp
-import os
-import sys
-import tempfile
-
-if sys.platform != 'win32':
-  test = TestGyp.TestGyp()
-
-  # Copy hello.gyp and hello.c to temporary named files, which will then be
-  # symlinked back and processed. Note that we don't ask gyp to touch the
-  # original files at all; they are only there as source material for the copy.
-  # That's why hello.gyp references symlink_hello.c instead of hello.c.
-  with tempfile.NamedTemporaryFile() as gyp_file:
-    with tempfile.NamedTemporaryFile() as c_file:
-      with open('hello.gyp') as orig_gyp_file:
-        gyp_file.write(orig_gyp_file.read())
-        gyp_file.flush()
-      with open('hello.c') as orig_c_file:
-        c_file.write(orig_c_file.read())
-        c_file.flush()
-      # We need to flush the files because we want to read them before closing
-      # them, since when they are closed they will be deleted.
-
-      # Don't proceed with the test on a system that doesn't let you read from
-      # a still-open temporary file.
-      if os.path.getsize(gyp_file.name) == 0:
-        raise OSError("Copy to temporary file didn't work.")
-
-      symlink_gyp = test.built_file_path('symlink_hello.gyp')
-      symlink_c = test.built_file_path('symlink_hello.c')
-      outdir = os.path.dirname(symlink_gyp)
-
-      # Make sure the outdir exists.
-      try:
-        os.makedirs(outdir)
-      except OSError:
-        if not os.path.isdir(outdir):
-          raise
-      os.symlink(gyp_file.name, symlink_gyp)
-      os.symlink(c_file.name, symlink_c)
-
-      # Run gyp on the symlinked files.
-      test.run_gyp(symlink_gyp, chdir=outdir)
-      test.build(symlink_gyp, chdir=outdir)
-      test.run_built_executable('symlink_hello', stdout="Hello, world!\n",
-                                chdir=outdir)
-
-      test.pass_test()
diff --git a/tools/gyp/test/symlinks/hello.c b/tools/gyp/test/symlinks/hello.c
deleted file mode 100644
index c63204b..0000000
--- a/tools/gyp/test/symlinks/hello.c
+++ /dev/null
@@ -1,12 +0,0 @@
-/* Copyright (c) 2015 Google Inc. All rights reserved.
-   Use of this source code is governed by a BSD-style license that can be
-   found in the LICENSE file.
-*/
-
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello, world!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/symlinks/hello.gyp b/tools/gyp/test/symlinks/hello.gyp
deleted file mode 100644
index 81d9f18..0000000
--- a/tools/gyp/test/symlinks/hello.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'symlink_hello',
-      'type': 'executable',
-      'sources': [
-        'symlink_hello.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/target/gyptest-target.py b/tools/gyp/test/target/gyptest-target.py
deleted file mode 100644
index 4338db7..0000000
--- a/tools/gyp/test/target/gyptest-target.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies simplest-possible build of a "Hello, world!" program
-using non-default extension. In particular, verifies how
-target_extension is used to avoid MSB8012 for msvs.
-"""
-
-import sys
-import TestGyp
-
-if sys.platform in ('win32', 'cygwin'):
-  test = TestGyp.TestGyp()
-
-  test.run_gyp('target.gyp')
-  test.build('target.gyp')
-
-  # executables
-  test.built_file_must_exist('hello1.stuff', test.EXECUTABLE, bare=True)
-  test.built_file_must_exist('hello2.exe', test.EXECUTABLE, bare=True)
-  test.built_file_must_not_exist('hello2.stuff', test.EXECUTABLE, bare=True)
-
-  # check msvs log for errors
-  if test.format == "msvs":
-    log_file = "obj\\hello1\\hello1.log"
-    test.built_file_must_exist(log_file)
-    test.built_file_must_not_contain(log_file, "MSB8012")
-
-    log_file = "obj\\hello2\\hello2.log"
-    test.built_file_must_exist(log_file)
-    test.built_file_must_not_contain(log_file, "MSB8012")
-
-  test.pass_test()
diff --git a/tools/gyp/test/target/hello.c b/tools/gyp/test/target/hello.c
deleted file mode 100644
index 3d535d3..0000000
--- a/tools/gyp/test/target/hello.c
+++ /dev/null
@@ -1,7 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-void main(void) {
-  printf("Hello, world!\n");
-}
diff --git a/tools/gyp/test/target/target.gyp b/tools/gyp/test/target/target.gyp
deleted file mode 100644
index c87e30f..0000000
--- a/tools/gyp/test/target/target.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello1',
-      'product_extension': 'stuff',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    },
-    {
-      'target_name': 'hello2',
-      'target_extension': 'stuff',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-      ],
-    }
-  ]
-}
diff --git a/tools/gyp/test/toolsets/gyptest-toolsets.py b/tools/gyp/test/toolsets/gyptest-toolsets.py
deleted file mode 100755
index f80fce7..0000000
--- a/tools/gyp/test/toolsets/gyptest-toolsets.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that toolsets are correctly applied
-"""
-import os
-import sys
-import TestGyp
-
-if sys.platform.startswith('linux'):
-
-  test = TestGyp.TestGyp(formats=['make', 'ninja'])
-
-  oldenv = os.environ.copy()
-  try:
-    os.environ['GYP_CROSSCOMPILE'] = '1'
-    test.run_gyp('toolsets.gyp')
-  finally:
-    os.environ.clear()
-    os.environ.update(oldenv)
-
-  test.build('toolsets.gyp', test.ALL)
-
-  test.run_built_executable('host-main', stdout="Host\nShared: Host\n")
-  test.run_built_executable('target-main', stdout="Target\nShared: Target\n")
-
-  test.pass_test()
diff --git a/tools/gyp/test/toolsets/main.cc b/tools/gyp/test/toolsets/main.cc
deleted file mode 100644
index bc47da9..0000000
--- a/tools/gyp/test/toolsets/main.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-#include <stdio.h>
-
-const char *GetToolset();
-const char *GetToolsetShared();
-
-int main(void) {
-  printf("%s\n", GetToolset());
-  printf("Shared: %s\n", GetToolsetShared());
-}
diff --git a/tools/gyp/test/toolsets/toolsets.cc b/tools/gyp/test/toolsets/toolsets.cc
deleted file mode 100644
index a45fa02..0000000
--- a/tools/gyp/test/toolsets/toolsets.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright (c) 2009 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-const char *GetToolset() {
-#ifdef TARGET
-  return "Target";
-#else
-  return "Host";
-#endif
-}
diff --git a/tools/gyp/test/toolsets/toolsets.gyp b/tools/gyp/test/toolsets/toolsets.gyp
deleted file mode 100644
index 3bc3a78..0000000
--- a/tools/gyp/test/toolsets/toolsets.gyp
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'target_conditions': [
-      ['_toolset=="target"', {'defines': ['TARGET']}]
-    ]
-  },
-  'targets': [
-    {
-      'target_name': 'toolsets',
-      'type': 'static_library',
-      'toolsets': ['target', 'host'],
-      'sources': [
-        'toolsets.cc',
-      ],
-    },
-    {
-      'target_name': 'host-main',
-      'type': 'executable',
-      'toolsets': ['host'],
-      'dependencies': ['toolsets', 'toolsets_shared'],
-      'sources': [
-        'main.cc',
-      ],
-    },
-    {
-      'target_name': 'target-main',
-      'type': 'executable',
-      'dependencies': ['toolsets', 'toolsets_shared'],
-      'sources': [
-        'main.cc',
-      ],
-    },
-    # This tests that build systems can handle a shared library being build for
-    # both host and target.
-    {
-      'target_name': 'janus',
-      'type': 'shared_library',
-      'toolsets': ['target', 'host'],
-      'sources': [
-        'toolsets.cc',
-      ],
-      'cflags': [ '-fPIC' ],
-    },
-    {
-      'target_name': 'toolsets_shared',
-      'type': 'shared_library',
-      'toolsets': ['target', 'host'],
-      'target_conditions': [
-        # Ensure target and host have different shared_library names
-        ['_toolset=="host"', {'product_extension': 'host'}],
-      ],
-      'sources': [
-        'toolsets_shared.cc',
-      ],
-      'cflags': [ '-fPIC' ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/toolsets/toolsets_shared.cc b/tools/gyp/test/toolsets/toolsets_shared.cc
deleted file mode 100644
index 794af2c..0000000
--- a/tools/gyp/test/toolsets/toolsets_shared.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright (c) 2013 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-const char *GetToolsetShared() {
-#ifdef TARGET
-  return "Target";
-#else
-  return "Host";
-#endif
-}
diff --git a/tools/gyp/test/toplevel-dir/gyptest-toplevel-dir.py b/tools/gyp/test/toplevel-dir/gyptest-toplevel-dir.py
deleted file mode 100755
index 9e69512..0000000
--- a/tools/gyp/test/toplevel-dir/gyptest-toplevel-dir.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies building a subsidiary dependent target from a .gyp file in a
-subdirectory, without specifying an explicit output build directory,
-and using the subdirectory's solution or project file as the entry point.
-"""
-
-import TestGyp
-import errno
-
-test = TestGyp.TestGyp(formats=['ninja', 'make'])
-
-# We want our Makefile to be one dir up from main.gyp.
-test.run_gyp('main.gyp', '--toplevel-dir=..', chdir='src/sub1')
-
-toplevel_dir = 'src'
-
-test.build('sub1/main.gyp', test.ALL, chdir=toplevel_dir)
-
-test.built_file_must_exist('prog1', type=test.EXECUTABLE, chdir=toplevel_dir)
-
-test.run_built_executable('prog1',
-                          chdir=toplevel_dir,
-                          stdout="Hello from prog1.c\n")
-
-test.pass_test()
diff --git a/tools/gyp/test/toplevel-dir/src/sub1/main.gyp b/tools/gyp/test/toplevel-dir/src/sub1/main.gyp
deleted file mode 100644
index 3321901..0000000
--- a/tools/gyp/test/toplevel-dir/src/sub1/main.gyp
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'prog1',
-      'type': 'executable',
-      'dependencies': [
-        '<(DEPTH)/../sub2/prog2.gyp:prog2',
-      ],
-      'sources': [
-        'prog1.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/toplevel-dir/src/sub1/prog1.c b/tools/gyp/test/toplevel-dir/src/sub1/prog1.c
deleted file mode 100644
index 218e994..0000000
--- a/tools/gyp/test/toplevel-dir/src/sub1/prog1.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from prog1.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/toplevel-dir/src/sub2/prog2.c b/tools/gyp/test/toplevel-dir/src/sub2/prog2.c
deleted file mode 100644
index 12a3188..0000000
--- a/tools/gyp/test/toplevel-dir/src/sub2/prog2.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <stdio.h>
-
-int main(void)
-{
-  printf("Hello from prog2.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/toplevel-dir/src/sub2/prog2.gyp b/tools/gyp/test/toplevel-dir/src/sub2/prog2.gyp
deleted file mode 100644
index 5934548..0000000
--- a/tools/gyp/test/toplevel-dir/src/sub2/prog2.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'prog2',
-      'type': 'executable',
-      'sources': [
-        'prog2.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/commands-repeated.gyp b/tools/gyp/test/variables/commands/commands-repeated.gyp
deleted file mode 100644
index 822ae4f..0000000
--- a/tools/gyp/test/variables/commands/commands-repeated.gyp
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a simple test file to make sure that variable substitution
-# happens correctly.  Run "run_tests.py" using python to generate the
-# output from this gyp file.
-
-{
-  'variables': {
-    'pi': 'import math; print math.pi',
-    'third_letters': "<(other_letters)HIJK",
-    'letters_list': 'ABCD',
-    'other_letters': '<(letters_list)EFG',
-    'check_included': '<(included_variable)',
-    'check_lists': [
-      '<(included_variable)',
-      '<(third_letters)',
-    ],
-    'check_int': 5,
-    'check_str_int': '6',
-    'check_list_int': [
-      7,
-      '8',
-      9,
-    ],
-    'not_int_1': ' 10',
-    'not_int_2': '11 ',
-    'not_int_3': '012',
-    'not_int_4': '13.0',
-    'not_int_5': '+14',
-    'negative_int': '-15',
-    'zero_int': '0',
-  },
-  'includes': [
-    'commands.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'none',
-      'variables': {
-        'var1': '<!(["python", "-c", "<(pi)"])',
-        'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
-        'var3': '<!(python -c "print \'<(letters_list)\'")',
-        'var4': '<(<!(python -c "print \'letters_list\'"))',
-        'var5': 'letters_',
-        'var6': 'list',
-        'var7': '<(check_int)',
-        'var8': '<(check_int)blah',
-        'var9': '<(check_str_int)',
-        'var10': '<(check_list_int)',
-        'var11': ['<@(check_list_int)'],
-        'var12': '<(not_int_1)',
-        'var13': '<(not_int_2)',
-        'var14': '<(not_int_3)',
-        'var15': '<(not_int_4)',
-        'var16': '<(not_int_5)',
-        'var17': '<(negative_int)',
-        'var18': '<(zero_int)',
-        # A second set with different names to make sure they only execute the
-        # commands once.
-        'var1prime': '<!(["python", "-c", "<(pi)"])',
-        'var2prime': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
-        'var3prime': '<!(python -c "print \'<(letters_list)\'")',
-        'var4prime': '<(<!(python -c "print \'letters_list\'"))',
-      },
-      'actions': [
-        {
-          'action_name': 'test_action',
-          'variables': {
-            'var7': '<!(echo <(var5)<(var6))',
-          },
-          'inputs' : [
-            '<(var2)',
-          ],
-          'outputs': [
-            '<(var4)',
-            '<(var7)',
-          ],
-          'action': [
-            'echo',
-            '<(_inputs)',
-            '<(_outputs)',
-          ],
-        },
-        # Again with the same vars to make sure the right things happened.
-        {
-          'action_name': 'test_action_prime',
-          'variables': {
-            'var7': '<!(echo <(var5)<(var6))',
-          },
-          'inputs' : [
-            '<(var2)',
-          ],
-          'outputs': [
-            '<(var4)',
-            '<(var7)',
-          ],
-          'action': [
-            'echo',
-            '<(_inputs)',
-            '<(_outputs)',
-          ],
-        },
-        # And one more time with the other vars...
-        {
-          'action_name': 'test_action_prime_prime',
-          'variables': {
-            'var7': '<!(echo <(var5)<(var6))',
-          },
-          'inputs' : [
-            '<(var2prime)',
-          ],
-          'outputs': [
-            '<(var4prime)',
-            '<(var7)',
-          ],
-          'action': [
-            'echo',
-            '<(_inputs)',
-            '<(_outputs)',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/commands-repeated.gyp.stdout b/tools/gyp/test/variables/commands/commands-repeated.gyp.stdout
deleted file mode 100644
index 56c393f..0000000
--- a/tools/gyp/test/variables/commands/commands-repeated.gyp.stdout
+++ /dev/null
@@ -1,136 +0,0 @@
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFG', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'third_letters', 'is_array': '', 'replace': '<(third_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(other_letters)HIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print \'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print math.pi"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "import math; print math.pi"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print math.pi"]', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<(<!(python -c "print \'letters_list\'")', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<!(python -c "print \'letters_list\'")', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print 'letters_list'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '5', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print math.pi"]', recursing.
-VARIABLES:input.py:889:ExpandVariables Had cache value for command '['python', '-c', 'import math; print math.pi']' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print \'<(letters_list)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'ABCD\'"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print 'ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print \'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print math.pi"', recursing.
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "import math; print math.pi"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_str_int', 'is_array': '', 'replace': '<(check_str_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '6', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '5blah', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<(<!(python -c "print \'letters_list\'")', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<!(python -c "print \'letters_list\'")', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "print 'letters_list'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print \'<(letters_list)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'ABCD\'"', recursing.
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python -c "print 'ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_4', 'is_array': '', 'replace': '<(not_int_4)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '13.0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_3', 'is_array': '', 'replace': '<(not_int_3)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '012', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'negative_int', 'is_array': '', 'replace': '<(negative_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '-15', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_5', 'is_array': '', 'replace': '<(not_int_5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '+14', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<(check_list_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '7 8 9', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_2', 'is_array': '', 'replace': '<(not_int_2)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '11 ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_1', 'is_array': '', 'replace': '<(not_int_1)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output ' 10', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'zero_int', 'is_array': '', 'replace': '<(zero_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<@(check_list_int)', 'type': '<@', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output [7, 8, 9], recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'echo letters_list' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'echo letters_list' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'echo letters_list' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2prime', 'is_array': '', 'replace': '<(var2prime)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4prime', 'is_array': '', 'replace': '<(var4prime)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
diff --git a/tools/gyp/test/variables/commands/commands-repeated.gypd.golden b/tools/gyp/test/variables/commands/commands-repeated.gypd.golden
deleted file mode 100644
index b29db5e..0000000
--- a/tools/gyp/test/variables/commands/commands-repeated.gypd.golden
+++ /dev/null
@@ -1,77 +0,0 @@
-{'_DEPTH': '.',
- 'included_files': ['commands-repeated.gyp', 'commands.gypi'],
- 'targets': [{'actions': [{'action': ['echo',
-                                      '"3.14159265359 ABCD"',
-                                      'ABCD letters_list'],
-                           'action_name': 'test_action',
-                           'inputs': ['3.14159265359 ABCD'],
-                           'outputs': ['ABCD', 'letters_list'],
-                           'variables': {'var7': 'letters_list'}},
-                          {'action': ['echo',
-                                      '"3.14159265359 ABCD"',
-                                      'ABCD letters_list'],
-                           'action_name': 'test_action_prime',
-                           'inputs': ['3.14159265359 ABCD'],
-                           'outputs': ['ABCD', 'letters_list'],
-                           'variables': {'var7': 'letters_list'}},
-                          {'action': ['echo',
-                                      '"3.14159265359 ABCD"',
-                                      'ABCD letters_list'],
-                           'action_name': 'test_action_prime_prime',
-                           'inputs': ['3.14159265359 ABCD'],
-                           'outputs': ['ABCD', 'letters_list'],
-                           'variables': {'var7': 'letters_list'}}],
-              'configurations': {'Default': {}},
-              'default_configuration': 'Default',
-              'target_name': 'foo',
-              'toolset': 'target',
-              'type': 'none',
-              'variables': {'var1': '3.14159265359',
-                            'var10': '7 8 9',
-                            'var11': ['7', '8', '9'],
-                            'var12': ' 10',
-                            'var13': '11 ',
-                            'var14': '012',
-                            'var15': '13.0',
-                            'var16': '+14',
-                            'var17': '-15',
-                            'var18': '0',
-                            'var1prime': '3.14159265359',
-                            'var2': '3.14159265359 ABCD',
-                            'var2prime': '3.14159265359 ABCD',
-                            'var3': 'ABCD',
-                            'var3prime': 'ABCD',
-                            'var4': 'ABCD',
-                            'var4prime': 'ABCD',
-                            'var5': 'letters_',
-                            'var6': 'list',
-                            'var7': '5',
-                            'var8': '5blah',
-                            'var9': '6'}},
-             {'configurations': {'Default': {}},
-              'default_configuration': 'Default',
-              'target_name': 'dummy',
-              'toolset': 'target',
-              'type': 'none'}],
- 'variables': {'check_included': 'XYZ',
-               'check_int': '5',
-               'check_list_int': ['7', '8', '9'],
-               'check_lists': ['XYZ', 'ABCDEFGHIJK'],
-               'check_str_int': '6',
-               'default_empty_files%': '',
-               'default_empty_str%': '',
-               'default_int%': '0',
-               'default_int_files%': '0',
-               'default_str%': 'my_str',
-               'included_variable': 'XYZ',
-               'letters_list': 'ABCD',
-               'negative_int': '-15',
-               'not_int_1': ' 10',
-               'not_int_2': '11 ',
-               'not_int_3': '012',
-               'not_int_4': '13.0',
-               'not_int_5': '+14',
-               'other_letters': 'ABCDEFG',
-               'pi': 'import math; print math.pi',
-               'third_letters': 'ABCDEFGHIJK',
-               'zero_int': '0'}}
diff --git a/tools/gyp/test/variables/commands/commands.gyp b/tools/gyp/test/variables/commands/commands.gyp
deleted file mode 100644
index 78376ed..0000000
--- a/tools/gyp/test/variables/commands/commands.gyp
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a simple test file to make sure that variable substitution
-# happens correctly.  Run "run_tests.py" using python to generate the
-# output from this gyp file.
-
-{
-  'variables': {
-    'pi': 'import math; print math.pi',
-    'third_letters': "<(other_letters)HIJK",
-    'letters_list': 'ABCD',
-    'other_letters': '<(letters_list)EFG',
-    'check_included': '<(included_variable)',
-    'check_lists': [
-      '<(included_variable)',
-      '<(third_letters)',
-    ],
-    'check_int': 5,
-    'check_str_int': '6',
-    'check_list_int': [
-      7,
-      '8',
-      9,
-    ],
-    'not_int_1': ' 10',
-    'not_int_2': '11 ',
-    'not_int_3': '012',
-    'not_int_4': '13.0',
-    'not_int_5': '+14',
-    'negative_int': '-15',
-    'zero_int': '0',
-  },
-  'includes': [
-    'commands.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'none',
-      'variables': {
-        'var1': '<!(["python", "-c", "<(pi)"])',
-        'var2': '<!(python -c "print \'<!(python -c "<(pi)") <(letters_list)\'")',
-        'var3': '<!(python -c "print \'<(letters_list)\'")',
-        'var4': '<(<!(python -c "print \'letters_list\'"))',
-        'var5': 'letters_',
-        'var6': 'list',
-        'var7': '<(check_int)',
-        'var8': '<(check_int)blah',
-        'var9': '<(check_str_int)',
-        'var10': '<(check_list_int)',
-        'var11': ['<@(check_list_int)'],
-        'var12': '<(not_int_1)',
-        'var13': '<(not_int_2)',
-        'var14': '<(not_int_3)',
-        'var15': '<(not_int_4)',
-        'var16': '<(not_int_5)',
-        'var17': '<(negative_int)',
-        'var18': '<(zero_int)',
-        'var19': ['<!@(python test.py)'],
-        'var20': '<!(python test.py)',
-        'var21': '<(default_str)',
-        'var22': '<(default_empty_str)',
-        'var23': '<(default_int)',
-        'var24': '<(default_empty_files)',
-        'var25': '<(default_int_files)',
-      },
-      'actions': [
-        {
-          'action_name': 'test_action',
-          'variables': {
-            'var7': '<!(echo <(var5)<(var6))',
-          },
-          'inputs' : [
-            '<(var2)',
-          ],
-          'outputs': [
-            '<(var4)',
-            '<(var7)',
-          ],
-          'action': [
-            'echo',
-            '<(_inputs)',
-            '<(_outputs)',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/commands.gyp.ignore-env.stdout b/tools/gyp/test/variables/commands/commands.gyp.ignore-env.stdout
deleted file mode 100644
index a345920..0000000
--- a/tools/gyp/test/variables/commands/commands.gyp.ignore-env.stdout
+++ /dev/null
@@ -1,96 +0,0 @@
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFG', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'third_letters', 'is_array': '', 'replace': '<(third_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(other_letters)HIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_files', 'is_array': '', 'replace': '<(default_empty_files)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int_files', 'is_array': '', 'replace': '<(default_int_files)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!(python test.py)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:838:ExpandVariables Executing command 'python test.py' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'sample\\path\\foo.cpp', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_str', 'is_array': '', 'replace': '<(default_str)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'my_str', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_str', 'is_array': '', 'replace': '<(default_empty_str)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int', 'is_array': '', 'replace': '<(default_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<(<!(python -c "print \'letters_list\'")', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<!(python -c "print \'letters_list\'")', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print 'letters_list'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '5', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print math.pi"]', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print \'<(letters_list)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'ABCD\'"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print 'ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print \'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print math.pi"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "import math; print math.pi"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_str_int', 'is_array': '', 'replace': '<(check_str_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '6', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '5blah', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_4', 'is_array': '', 'replace': '<(not_int_4)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '13.0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_3', 'is_array': '', 'replace': '<(not_int_3)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '012', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'negative_int', 'is_array': '', 'replace': '<(negative_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '-15', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_5', 'is_array': '', 'replace': '<(not_int_5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '+14', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<(check_list_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '7 8 9', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_2', 'is_array': '', 'replace': '<(not_int_2)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '11 ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_1', 'is_array': '', 'replace': '<(not_int_1)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output ' 10', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'zero_int', 'is_array': '', 'replace': '<(zero_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<@(check_list_int)', 'type': '<@', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output [7, 8, 9], recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!@(python test.py)', 'type': '<!@', 'command_string': None}
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python test.py' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output ['samplepathfoo.cpp'], recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'echo letters_list' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
diff --git a/tools/gyp/test/variables/commands/commands.gyp.stdout b/tools/gyp/test/variables/commands/commands.gyp.stdout
deleted file mode 100644
index a345920..0000000
--- a/tools/gyp/test/variables/commands/commands.gyp.stdout
+++ /dev/null
@@ -1,96 +0,0 @@
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFG', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'included_variable', 'is_array': '', 'replace': '<(included_variable)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'XYZ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'third_letters', 'is_array': '', 'replace': '<(third_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(other_letters)HIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'other_letters', 'is_array': '', 'replace': '<(other_letters)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '<(letters_list)EFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCDEFGHIJK', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_files', 'is_array': '', 'replace': '<(default_empty_files)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int_files', 'is_array': '', 'replace': '<(default_int_files)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!(python test.py)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:838:ExpandVariables Executing command 'python test.py' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'sample\\path\\foo.cpp', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_str', 'is_array': '', 'replace': '<(default_str)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'my_str', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_empty_str', 'is_array': '', 'replace': '<(default_empty_str)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'default_int', 'is_array': '', 'replace': '<(default_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '<!(python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<(<!(python -c "print \'letters_list\'")', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'letters_list\'"', 'is_array': '', 'replace': '<!(python -c "print \'letters_list\'")', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print 'letters_list'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '5', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '"python", "-c", "<(pi', 'is_array': '[', 'replace': '<!(["python", "-c", "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '["python", "-c", "import math; print math.pi"]', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command '['python', '-c', 'import math; print math.pi']' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<(letters_list', 'is_array': '', 'replace': '<!(python -c "print \'<(letters_list)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'ABCD\'"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print 'ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'letters_list', 'is_array': '', 'replace': '<(letters_list)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "print \'<!(python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "print \'<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python -c "<(pi', 'is_array': '', 'replace': '<!(python -c "<(pi)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'pi', 'is_array': '', 'replace': '<(pi)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "import math; print math.pi"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "import math; print math.pi"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'python -c "print \'3.14159265359 ABCD\'"', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'python -c "print '3.14159265359 ABCD'"' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_str_int', 'is_array': '', 'replace': '<(check_str_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '6', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_int', 'is_array': '', 'replace': '<(check_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '5blah', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_4', 'is_array': '', 'replace': '<(not_int_4)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '13.0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_3', 'is_array': '', 'replace': '<(not_int_3)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '012', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'negative_int', 'is_array': '', 'replace': '<(negative_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '-15', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_5', 'is_array': '', 'replace': '<(not_int_5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '+14', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<(check_list_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '7 8 9', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_2', 'is_array': '', 'replace': '<(not_int_2)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '11 ', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'not_int_1', 'is_array': '', 'replace': '<(not_int_1)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output ' 10', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'zero_int', 'is_array': '', 'replace': '<(zero_int)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '0', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'check_list_int', 'is_array': '', 'replace': '<@(check_list_int)', 'type': '<@', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output [7, 8, 9], recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'python test.py', 'is_array': '', 'replace': '<!@(python test.py)', 'type': '<!@', 'command_string': None}
-VARIABLES:input.py:889:ExpandVariables Had cache value for command 'python test.py' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output ['samplepathfoo.cpp'], recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var6', 'is_array': '', 'replace': '<(var6)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'echo <(var5', 'is_array': '', 'replace': '<!(echo <(var5)', 'type': '<!', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var5', 'is_array': '', 'replace': '<(var5)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'echo letters_list', recursing.
-VARIABLES:input.py:838:ExpandVariables Executing command 'echo letters_list' in directory 'None'
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_inputs', 'is_array': '', 'replace': '<(_inputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var2', 'is_array': '', 'replace': '<(var2)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output '3.14159265359 ABCD', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output '"3.14159265359 ABCD"', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': '_outputs', 'is_array': '', 'replace': '<(_outputs)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var4', 'is_array': '', 'replace': '<(var4)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD', recursing.
-VARIABLES:input.py:724:ExpandVariables Matches: {'content': 'var7', 'is_array': '', 'replace': '<(var7)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:964:ExpandVariables Found output 'letters_list', recursing.
-VARIABLES:input.py:964:ExpandVariables Found output 'ABCD letters_list', recursing.
diff --git a/tools/gyp/test/variables/commands/commands.gypd.golden b/tools/gyp/test/variables/commands/commands.gypd.golden
deleted file mode 100644
index 9e5cf89..0000000
--- a/tools/gyp/test/variables/commands/commands.gypd.golden
+++ /dev/null
@@ -1,66 +0,0 @@
-{'_DEPTH': '.',
- 'included_files': ['commands.gyp', 'commands.gypi'],
- 'targets': [{'actions': [{'action': ['echo',
-                                      '"3.14159265359 ABCD"',
-                                      'ABCD letters_list'],
-                           'action_name': 'test_action',
-                           'inputs': ['3.14159265359 ABCD'],
-                           'outputs': ['ABCD', 'letters_list'],
-                           'variables': {'var7': 'letters_list'}}],
-              'configurations': {'Default': {}},
-              'default_configuration': 'Default',
-              'target_name': 'foo',
-              'toolset': 'target',
-              'type': 'none',
-              'variables': {'var1': '3.14159265359',
-                            'var10': '7 8 9',
-                            'var11': ['7', '8', '9'],
-                            'var12': ' 10',
-                            'var13': '11 ',
-                            'var14': '012',
-                            'var15': '13.0',
-                            'var16': '+14',
-                            'var17': '-15',
-                            'var18': '0',
-                            'var19': ['samplepathfoo.cpp'],
-                            'var2': '3.14159265359 ABCD',
-                            'var20': 'sample\\path\\foo.cpp',
-                            'var21': 'my_str',
-                            'var22': '',
-                            'var23': '0',
-                            'var24': '',
-                            'var25': '0',
-                            'var3': 'ABCD',
-                            'var4': 'ABCD',
-                            'var5': 'letters_',
-                            'var6': 'list',
-                            'var7': '5',
-                            'var8': '5blah',
-                            'var9': '6'}},
-             {'configurations': {'Default': {}},
-              'default_configuration': 'Default',
-              'target_name': 'dummy',
-              'toolset': 'target',
-              'type': 'none'}],
- 'variables': {'check_included': 'XYZ',
-               'check_int': '5',
-               'check_list_int': ['7', '8', '9'],
-               'check_lists': ['XYZ', 'ABCDEFGHIJK'],
-               'check_str_int': '6',
-               'default_empty_files%': '',
-               'default_empty_str%': '',
-               'default_int%': '0',
-               'default_int_files%': '0',
-               'default_str%': 'my_str',
-               'included_variable': 'XYZ',
-               'letters_list': 'ABCD',
-               'negative_int': '-15',
-               'not_int_1': ' 10',
-               'not_int_2': '11 ',
-               'not_int_3': '012',
-               'not_int_4': '13.0',
-               'not_int_5': '+14',
-               'other_letters': 'ABCDEFG',
-               'pi': 'import math; print math.pi',
-               'third_letters': 'ABCDEFGHIJK',
-               'zero_int': '0'}}
diff --git a/tools/gyp/test/variables/commands/commands.gypi b/tools/gyp/test/variables/commands/commands.gypi
deleted file mode 100644
index 839cb30..0000000
--- a/tools/gyp/test/variables/commands/commands.gypi
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This file is included from commands.gyp to test evaluation order of includes.
-{
-  'variables': {
-    'included_variable': 'XYZ',
-
-    'default_str%': 'my_str',
-    'default_empty_str%': '',
-    'default_int%': 0,
-
-    'default_empty_files%': '',
-    'default_int_files%': 0,
-  },
-  'targets': [
-    {
-      'target_name': 'dummy',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/gyptest-commands-ignore-env.py b/tools/gyp/test/variables/commands/gyptest-commands-ignore-env.py
deleted file mode 100755
index 1cf3308..0000000
--- a/tools/gyp/test/variables/commands/gyptest-commands-ignore-env.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test that environment variables are ignored when --ignore-environment is
-specified.
-"""
-
-import os
-
-import TestGyp
-
-test = TestGyp.TestGyp(format='gypd')
-
-os.environ['GYP_DEFINES'] = 'FOO=BAR'
-os.environ['GYP_GENERATORS'] = 'foo'
-os.environ['GYP_GENERATOR_FLAGS'] = 'genflag=foo'
-os.environ['GYP_GENERATOR_OUTPUT'] = 'somedir'
-
-expect = test.read('commands.gyp.ignore-env.stdout').replace('\r\n', '\n')
-
-test.run_gyp('commands.gyp',
-             '--debug', 'variables',
-             '--ignore-environment',
-             stdout=expect, ignore_line_numbers=True)
-
-# Verify the commands.gypd against the checked-in expected contents.
-#
-# Normally, we should canonicalize line endings in the expected
-# contents file setting the Subversion svn:eol-style to native,
-# but that would still fail if multiple systems are sharing a single
-# workspace on a network-mounted file system.  Consequently, we
-# massage the Windows line endings ('\r\n') in the output to the
-# checked-in UNIX endings ('\n').
-
-contents = test.read('commands.gypd').replace('\r', '')
-expect = test.read('commands.gypd.golden').replace('\r', '')
-if not test.match(contents, expect):
-  print "Unexpected contents of `commands.gypd'"
-  test.diff(expect, contents, 'commands.gypd ')
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/commands/gyptest-commands-repeated-multidir.py b/tools/gyp/test/variables/commands/gyptest-commands-repeated-multidir.py
deleted file mode 100755
index 21e0487..0000000
--- a/tools/gyp/test/variables/commands/gyptest-commands-repeated-multidir.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test variable expansion of '<!()' syntax commands where they are evaluated
-more than once from different directories.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-# This tests GYP's cache of commands, ensuring that the directory a command is
-# run from is part of its cache key. Parallelism may lead to multiple cache
-# lookups failing, resulting in the command being run multiple times by
-# chance, not by GYP's logic. Turn off parallelism to ensure that the logic is
-# being tested.
-test.run_gyp('repeated_multidir/main.gyp', '--no-parallel')
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/commands/gyptest-commands-repeated.py b/tools/gyp/test/variables/commands/gyptest-commands-repeated.py
deleted file mode 100755
index b95fe2d..0000000
--- a/tools/gyp/test/variables/commands/gyptest-commands-repeated.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test variable expansion of '<!()' syntax commands where they are evaluated
-more then once..
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp(format='gypd')
-
-expect = test.read('commands-repeated.gyp.stdout').replace('\r\n', '\n')
-
-test.run_gyp('commands-repeated.gyp',
-             '--debug', 'variables',
-             stdout=expect, ignore_line_numbers=True)
-
-# Verify the commands-repeated.gypd against the checked-in expected contents.
-#
-# Normally, we should canonicalize line endings in the expected
-# contents file setting the Subversion svn:eol-style to native,
-# but that would still fail if multiple systems are sharing a single
-# workspace on a network-mounted file system.  Consequently, we
-# massage the Windows line endings ('\r\n') in the output to the
-# checked-in UNIX endings ('\n').
-
-contents = test.read('commands-repeated.gypd').replace('\r\n', '\n')
-expect = test.read('commands-repeated.gypd.golden').replace('\r\n', '\n')
-if not test.match(contents, expect):
-  print "Unexpected contents of `commands-repeated.gypd'"
-  test.diff(expect, contents, 'commands-repeated.gypd ')
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/commands/gyptest-commands.py b/tools/gyp/test/variables/commands/gyptest-commands.py
deleted file mode 100755
index ef1af8c..0000000
--- a/tools/gyp/test/variables/commands/gyptest-commands.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test variable expansion of '<!()' syntax commands.
-"""
-
-import os
-
-import TestGyp
-
-test = TestGyp.TestGyp(format='gypd')
-
-expect = test.read('commands.gyp.stdout').replace('\r', '')
-
-test.run_gyp('commands.gyp',
-             '--debug', 'variables',
-             stdout=expect, ignore_line_numbers=True)
-
-# Verify the commands.gypd against the checked-in expected contents.
-#
-# Normally, we should canonicalize line endings in the expected
-# contents file setting the Subversion svn:eol-style to native,
-# but that would still fail if multiple systems are sharing a single
-# workspace on a network-mounted file system.  Consequently, we
-# massage the Windows line endings ('\r\n') in the output to the
-# checked-in UNIX endings ('\n').
-
-contents = test.read('commands.gypd').replace('\r', '')
-expect = test.read('commands.gypd.golden').replace('\r', '')
-if not test.match(contents, expect):
-  print "Unexpected contents of `commands.gypd'"
-  test.diff(expect, contents, 'commands.gypd ')
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/commands/repeated_multidir/dir_1/test_1.gyp b/tools/gyp/test/variables/commands/repeated_multidir/dir_1/test_1.gyp
deleted file mode 100644
index 328fc30..0000000
--- a/tools/gyp/test/variables/commands/repeated_multidir/dir_1/test_1.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'expected_value': 'dir_1',
-    'target_name': 'target_1',
-  },
-  'includes': [
-    '../repeated_command_common.gypi',
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/repeated_multidir/dir_2/test_2.gyp b/tools/gyp/test/variables/commands/repeated_multidir/dir_2/test_2.gyp
deleted file mode 100644
index 18e0c62..0000000
--- a/tools/gyp/test/variables/commands/repeated_multidir/dir_2/test_2.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'expected_value': 'dir_2',
-    'target_name': 'target_2',
-  },
-  'includes': [
-    '../repeated_command_common.gypi',
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/repeated_multidir/main.gyp b/tools/gyp/test/variables/commands/repeated_multidir/main.gyp
deleted file mode 100644
index 5beeeb7..0000000
--- a/tools/gyp/test/variables/commands/repeated_multidir/main.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'main',
-      'type': 'none',
-      'dependencies': [
-        'dir_1/test_1.gyp:target_1',
-        'dir_2/test_2.gyp:target_2',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/repeated_multidir/print_cwd_basename.py b/tools/gyp/test/variables/commands/repeated_multidir/print_cwd_basename.py
deleted file mode 100755
index ace9ed6..0000000
--- a/tools/gyp/test/variables/commands/repeated_multidir/print_cwd_basename.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import os.path
-
-print os.path.basename(os.getcwd())
diff --git a/tools/gyp/test/variables/commands/repeated_multidir/repeated_command_common.gypi b/tools/gyp/test/variables/commands/repeated_multidir/repeated_command_common.gypi
deleted file mode 100644
index 7436677..0000000
--- a/tools/gyp/test/variables/commands/repeated_multidir/repeated_command_common.gypi
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    # This command will be run from the directories of the .gyp files that
-    # include this .gypi, the subdirectories dir_1 and dir_2, so use a
-    # relative path from those directories to the script.
-    'observed_value': '<!(python ../print_cwd_basename.py)',
-  },
-  'targets': [
-    {
-      'target_name': '<(target_name)',
-      'type': 'none',
-      'conditions': [
-        ['observed_value != expected_value', {
-          # Attempt to expand an undefined variable. This triggers a GYP
-          # error.
-          'assertion': '<(observed_value_must_equal_expected_value)',
-        }],
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/commands/test.py b/tools/gyp/test/variables/commands/test.py
deleted file mode 100644
index 4d9ca6d..0000000
--- a/tools/gyp/test/variables/commands/test.py
+++ /dev/null
@@ -1 +0,0 @@
-print "sample\\path\\foo.cpp"
diff --git a/tools/gyp/test/variables/commands/update_golden b/tools/gyp/test/variables/commands/update_golden
deleted file mode 100755
index 4fcf1eb..0000000
--- a/tools/gyp/test/variables/commands/update_golden
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-python ../../../gyp --debug variables --format gypd --depth . commands.gyp > commands.gyp.stdout
-python ../../../gyp --ignore-environment --debug variables --format gypd --depth . commands.gyp > commands.gyp.ignore-env.stdout
-cp -f commands.gypd commands.gypd.golden
-python ../../../gyp --debug variables --format gypd --depth . commands-repeated.gyp > commands-repeated.gyp.stdout
-cp -f commands-repeated.gypd commands-repeated.gypd.golden
diff --git a/tools/gyp/test/variables/empty/empty.gyp b/tools/gyp/test/variables/empty/empty.gyp
deleted file mode 100644
index 207be06..0000000
--- a/tools/gyp/test/variables/empty/empty.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'includes': ['empty.gypi'],
-  'targets': [
-    {
-      'target_name': 'empty',
-      'type': 'none',
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/empty/empty.gypi b/tools/gyp/test/variables/empty/empty.gypi
deleted file mode 100644
index e95031f..0000000
--- a/tools/gyp/test/variables/empty/empty.gypi
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    '': '',
-  },
-}
diff --git a/tools/gyp/test/variables/empty/gyptest-empty.py b/tools/gyp/test/variables/empty/gyptest-empty.py
deleted file mode 100755
index 4cbe166..0000000
--- a/tools/gyp/test/variables/empty/gyptest-empty.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test that empty variable names don't cause infinite loops.
-"""
-
-import os
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('empty.gyp')
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/filelist/filelist.gyp.stdout b/tools/gyp/test/variables/filelist/filelist.gyp.stdout
deleted file mode 100644
index 595a19c..0000000
--- a/tools/gyp/test/variables/filelist/filelist.gyp.stdout
+++ /dev/null
@@ -1,26 +0,0 @@
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names.txt <@(names', 'is_array': '', 'replace': '<|(names.txt <@(names)', 'type': '<|', 'command_string': None}
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names', 'is_array': '', 'replace': '<@(names)', 'type': '<@', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'names.txt John Jacob Jingleheimer Schmidt', recursing.
-VARIABLES:input.py:797:ExpandVariables Found output 'names.txt', recursing.
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names_listfile', 'is_array': '', 'replace': '<(names_listfile)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'names.txt', recursing.
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names_listfile', 'is_array': '', 'replace': '<(names_listfile)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'names.txt', recursing.
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'cat <(names_listfile', 'is_array': '', 'replace': '<!@(cat <(names_listfile)', 'type': '<!@', 'command_string': None}
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'names_listfile', 'is_array': '', 'replace': '<(names_listfile)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'cat names.txt', recursing.
-VARIABLES:input.py:676:ExpandVariables Executing command 'cat names.txt' in directory 'src'
-VARIABLES:input.py:797:ExpandVariables Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources.txt <@(_sources', 'is_array': '', 'replace': '<|(sources.txt <@(_sources)', 'type': '<|', 'command_string': None}
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': '_sources', 'is_array': '', 'replace': '<@(_sources)', 'type': '<@', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt John Jacob Jingleheimer Schmidt', recursing.
-VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt', recursing.
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources_listfile', 'is_array': '', 'replace': '<(sources_listfile)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt', recursing.
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources_listfile', 'is_array': '', 'replace': '<(sources_listfile)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'sources.txt', recursing.
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'cat <(sources_listfile', 'is_array': '', 'replace': '<!@(cat <(sources_listfile)', 'type': '<!@', 'command_string': None}
-VARIABLES:input.py:562:ExpandVariables Matches: {'content': 'sources_listfile', 'is_array': '', 'replace': '<(sources_listfile)', 'type': '<', 'command_string': None}
-VARIABLES:input.py:797:ExpandVariables Found output 'cat sources.txt', recursing.
-VARIABLES:input.py:676:ExpandVariables Executing command 'cat sources.txt' in directory 'src'
-VARIABLES:input.py:797:ExpandVariables Found output ['John', 'Jacob', 'Jingleheimer', 'Schmidt'], recursing.
diff --git a/tools/gyp/test/variables/filelist/filelist.gypd.golden b/tools/gyp/test/variables/filelist/filelist.gypd.golden
deleted file mode 100644
index 09d9116..0000000
--- a/tools/gyp/test/variables/filelist/filelist.gypd.golden
+++ /dev/null
@@ -1,43 +0,0 @@
-{'_DEPTH': '.',
- 'included_files': ['filelist.gyp'],
- 'targets': [{'actions': [{'action': ['python', 'dummy.py', 'names.txt'],
-                           'action_name': 'test_action',
-                           'inputs': ['names.txt',
-                                      'John',
-                                      'Jacob',
-                                      'Jingleheimer',
-                                      'Schmidt'],
-                           'outputs': ['dummy_foo']}],
-              'configurations': {'Default': {}},
-              'default_configuration': 'Default',
-              'target_name': 'foo',
-              'toolset': 'target',
-              'type': 'none',
-              'variables': {'names_listfile': 'names.txt'}},
-             {'actions': [{'action': ['python', 'dummy.py', 'sources.txt'],
-                           'action_name': 'test_action',
-                           'inputs': ['sources.txt',
-                                      'John',
-                                      'Jacob',
-                                      'Jingleheimer',
-                                      'Schmidt'],
-                           'outputs': ['dummy_foo']}],
-              'configurations': {'Default': {}},
-              'default_configuration': 'Default',
-              'sources': ['John', 'Jacob', 'Jingleheimer', 'Schmidt'],
-              'sources_excluded': ['Astor', 'Jerome', 'Schultz'],
-              'target_name': 'bar',
-              'toolset': 'target',
-              'type': 'none',
-              'variables': {'sources_listfile': 'sources.txt'}}],
- 'variables': {'names': ['John',
-                         'Jacob',
-                         'Astor',
-                         'Jingleheimer',
-                         'Jerome',
-                         'Schmidt',
-                         'Schultz'],
-               'names!': ['Astor'],
-               'names/': [['exclude', 'Sch.*'],
-                          ['include', '.*dt'],
-                          ['exclude', 'Jer.*']]}}
diff --git a/tools/gyp/test/variables/filelist/gyptest-filelist-golden.py b/tools/gyp/test/variables/filelist/gyptest-filelist-golden.py
deleted file mode 100644
index 55eaf9d..0000000
--- a/tools/gyp/test/variables/filelist/gyptest-filelist-golden.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test variable expansion of '<|(list.txt ...)' syntax commands.
-"""
-
-import os
-import sys
-
-import TestGyp
-
-test = TestGyp.TestGyp(format='gypd')
-
-expect = test.read('filelist.gyp.stdout')
-if sys.platform == 'win32':
-  expect = expect.replace('/', r'\\').replace('\r\n', '\n')
-
-test.run_gyp('src/filelist.gyp',
-             '--debug', 'variables',
-             stdout=expect, ignore_line_numbers=True)
-
-# Verify the filelist.gypd against the checked-in expected contents.
-#
-# Normally, we should canonicalize line endings in the expected
-# contents file setting the Subversion svn:eol-style to native,
-# but that would still fail if multiple systems are sharing a single
-# workspace on a network-mounted file system.  Consequently, we
-# massage the Windows line endings ('\r\n') in the output to the
-# checked-in UNIX endings ('\n').
-
-contents = test.read('src/filelist.gypd').replace(
-    '\r', '').replace('\\\\', '/')
-expect = test.read('filelist.gypd.golden').replace('\r', '')
-if not test.match(contents, expect):
-  print "Unexpected contents of `src/filelist.gypd'"
-  test.diff(expect, contents, 'src/filelist.gypd ')
-  test.fail_test()
-
-contents = test.read('src/names.txt')
-expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
-if not test.match(contents, expect):
-  print "Unexpected contents of `src/names.txt'"
-  test.diff(expect, contents, 'src/names.txt ')
-  test.fail_test()
-
-test.pass_test()
-
diff --git a/tools/gyp/test/variables/filelist/gyptest-filelist.py b/tools/gyp/test/variables/filelist/gyptest-filelist.py
deleted file mode 100755
index 84a6cba..0000000
--- a/tools/gyp/test/variables/filelist/gyptest-filelist.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test variable expansion of '<|(list.txt ...)' syntax commands.
-"""
-
-import os
-import sys
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-CHDIR = 'src'
-test.run_gyp('filelist2.gyp', chdir=CHDIR)
-
-test.build('filelist2.gyp', 'foo', chdir=CHDIR)
-contents = test.read('src/dummy_foo').replace('\r', '')
-expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
-if not test.match(contents, expect):
-  print "Unexpected contents of `src/dummy_foo'"
-  test.diff(expect, contents, 'src/dummy_foo')
-  test.fail_test()
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/filelist/src/dummy.py b/tools/gyp/test/variables/filelist/src/dummy.py
deleted file mode 100644
index e41fc9f..0000000
--- a/tools/gyp/test/variables/filelist/src/dummy.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-
-open(sys.argv[1], 'w').write(open(sys.argv[2]).read())
diff --git a/tools/gyp/test/variables/filelist/src/filelist.gyp b/tools/gyp/test/variables/filelist/src/filelist.gyp
deleted file mode 100644
index df48eb3..0000000
--- a/tools/gyp/test/variables/filelist/src/filelist.gyp
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a test to make sure that <|(foo.txt a b c) generates
-# a pre-calculated file list at gyp time and returns foo.txt.
-# This feature is useful to work around limits in the number of arguments that
-# can be passed to rule/action.
-
-{
-  'variables': {
-    'names': [
-      'John',
-      'Jacob',
-      'Astor',
-      'Jingleheimer',
-      'Jerome',
-      'Schmidt',
-      'Schultz',
-    ],
-    'names!': [
-      'Astor',
-    ],
-    'names/': [
-      ['exclude', 'Sch.*'],
-      ['include', '.*dt'],
-      ['exclude', 'Jer.*'],
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'none',
-      'variables': {
-        'names_listfile': '<|(names.txt <@(names))',
-      },
-      'actions': [
-        {
-          'action_name': 'test_action',
-          'inputs' : [
-            '<(names_listfile)',
-            '<!@(cat <(names_listfile))',
-          ],
-          'outputs': [
-            'dummy_foo',
-          ],
-          'action': [
-            'python', 'dummy.py', '<(names_listfile)',
-          ],
-        },
-      ],
-    },
-    {
-      'target_name': 'bar',
-      'type': 'none',
-      'sources': [
-        'John',
-        'Jacob',
-        'Astor',
-        'Jingleheimer',
-        'Jerome',
-        'Schmidt',
-        'Schultz',
-      ],
-      'sources!': [
-        'Astor',
-      ],
-      'sources/': [
-        ['exclude', 'Sch.*'],
-        ['include', '.*dt'],
-        ['exclude', 'Jer.*'],
-      ],
-      'variables': {
-        'sources_listfile': '<|(sources.txt <@(_sources))',
-      },
-      'actions': [
-        {
-          'action_name': 'test_action',
-          'inputs' : [
-            '<(sources_listfile)',
-            '<!@(cat <(sources_listfile))',
-          ],
-          'outputs': [
-            'dummy_foo',
-          ],
-          'action': [
-            'python', 'dummy.py', '<(sources_listfile)',
-          ],
-        },
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/filelist/src/filelist2.gyp b/tools/gyp/test/variables/filelist/src/filelist2.gyp
deleted file mode 100644
index ec215db..0000000
--- a/tools/gyp/test/variables/filelist/src/filelist2.gyp
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This is a test to make sure that <|(foo.txt a b c) generates
-# a pre-calculated file list at gyp time and returns foo.txt.
-# This feature is useful to work around limits in the number of arguments that
-# can be passed to rule/action.
-
-{
-  'variables': {
-    'names': [
-      'John',
-      'Jacob',
-      'Jingleheimer',
-      'Schmidt',
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'none',
-      'variables': {
-        'names_listfile': '<|(names.txt <@(names))',
-      },
-      'actions': [
-        {
-          'action_name': 'test_action',
-          'msvs_cygwin_shell': 0,
-          'inputs' : [ '<(names_listfile)' ],
-          'outputs': [ 'dummy_foo' ],
-          'action': [
-            'python', 'dummy.py', '<@(_outputs)', '<(names_listfile)',
-          ],
-        },
-      ],
-    },
-  ],
-}
-
diff --git a/tools/gyp/test/variables/filelist/update_golden b/tools/gyp/test/variables/filelist/update_golden
deleted file mode 100755
index b4d489a..0000000
--- a/tools/gyp/test/variables/filelist/update_golden
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-python ../../../gyp --debug variables --debug general --format gypd --depth . src/filelist.gyp > filelist.gyp.stdout
-cp -f src/filelist.gypd filelist.gypd.golden
diff --git a/tools/gyp/test/variables/latelate/gyptest-latelate.py b/tools/gyp/test/variables/latelate/gyptest-latelate.py
deleted file mode 100755
index 2d77dfe..0000000
--- a/tools/gyp/test/variables/latelate/gyptest-latelate.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ^(latelate) style variables work.
-"""
-
-import TestGyp
-
-test = TestGyp.TestGyp()
-
-test.run_gyp('latelate.gyp', chdir='src')
-
-test.relocate('src', 'relocate/src')
-
-test.build('latelate.gyp', test.ALL, chdir='relocate/src')
-
-test.run_built_executable(
-    'program', chdir='relocate/src', stdout='program.cc\n')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/latelate/src/latelate.gyp b/tools/gyp/test/variables/latelate/src/latelate.gyp
deleted file mode 100644
index 312f376..0000000
--- a/tools/gyp/test/variables/latelate/src/latelate.gyp
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'target_conditions': [
-      ['has_lame==1', {
-        'sources/': [
-          ['exclude', 'lame'],
-        ],
-      }],
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'variables': {
-        'has_lame': 1,
-      },
-      'include_dirs': [
-        '<(SHARED_INTERMEDIATE_DIR)',
-      ],
-      'defines': [
-        'FOO="^(_sources)"',
-      ],
-      'sources': [
-        'program.cc',
-        'this_is_lame.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/variables/latelate/src/program.cc b/tools/gyp/test/variables/latelate/src/program.cc
deleted file mode 100644
index 97c98ae..0000000
--- a/tools/gyp/test/variables/latelate/src/program.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-/*
- * Copyright (c) 2012 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-
-#include <stdio.h>
-
-
-int main(void) {
-  printf(FOO "\n");
-  return 0;
-}
diff --git a/tools/gyp/test/variables/variable-in-path/C1/hello.cc b/tools/gyp/test/variables/variable-in-path/C1/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/variables/variable-in-path/C1/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/variables/variable-in-path/gyptest-variable-in-path.py b/tools/gyp/test/variables/variable-in-path/gyptest-variable-in-path.py
deleted file mode 100644
index b73a279..0000000
--- a/tools/gyp/test/variables/variable-in-path/gyptest-variable-in-path.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure <(CONFIGURATION_NAME) variable is correctly expanded.
-"""
-
-import TestGyp
-
-import sys
-
-test = TestGyp.TestGyp()
-test.set_configuration('C1')
-
-test.run_gyp('variable-in-path.gyp')
-test.build('variable-in-path.gyp', 'hello1')
-test.build('variable-in-path.gyp', 'hello2')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/variables/variable-in-path/variable-in-path.gyp b/tools/gyp/test/variables/variable-in-path/variable-in-path.gyp
deleted file mode 100644
index 908d21e..0000000
--- a/tools/gyp/test/variables/variable-in-path/variable-in-path.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello1',
-      'type': 'executable',
-      'sources': [
-        '<(CONFIGURATION_NAME)/hello.cc',
-      ],
-    },
-    {
-      'target_name': 'hello2',
-      'type': 'executable',
-      'sources': [
-        './<(CONFIGURATION_NAME)/hello.cc',
-      ],
-    },
-  ],
-  'target_defaults': {
-    'default_configuration': 'C1',
-    'configurations': {
-      'C1': {
-      },
-      'C2': {
-      },
-    },
-  },
-}
diff --git a/tools/gyp/test/win/asm-files/asm-files.gyp b/tools/gyp/test/win/asm-files/asm-files.gyp
deleted file mode 100644
index b1f132c..0000000
--- a/tools/gyp/test/win/asm-files/asm-files.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'sources_with_asm',
-      'type': 'executable',
-      'sources': [
-        'hello.cc',
-        'b.s',
-        'c.S',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/asm-files/b.s b/tools/gyp/test/win/asm-files/b.s
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/asm-files/b.s
+++ /dev/null
diff --git a/tools/gyp/test/win/asm-files/c.S b/tools/gyp/test/win/asm-files/c.S
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/asm-files/c.S
+++ /dev/null
diff --git a/tools/gyp/test/win/asm-files/hello.cc b/tools/gyp/test/win/asm-files/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/win/asm-files/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/batch-file-action/batch-file-action.gyp b/tools/gyp/test/win/batch-file-action/batch-file-action.gyp
deleted file mode 100644
index e4db9af..0000000
--- a/tools/gyp/test/win/batch-file-action/batch-file-action.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_batch',
-      'type': 'none',
-      'actions': [
-        {
-          'action_name': 'copy_to_output',
-          'inputs': ['infile'],
-          'outputs': ['outfile'],
-          'action': ['somecmd.bat', 'infile', 'outfile'],
-          'msvs_cygwin_shell': 0,
-        }
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/batch-file-action/infile b/tools/gyp/test/win/batch-file-action/infile
deleted file mode 100644
index 3f9177e..0000000
--- a/tools/gyp/test/win/batch-file-action/infile
+++ /dev/null
@@ -1 +0,0 @@
-input
diff --git a/tools/gyp/test/win/batch-file-action/somecmd.bat b/tools/gyp/test/win/batch-file-action/somecmd.bat
deleted file mode 100644
index d487753..0000000
--- a/tools/gyp/test/win/batch-file-action/somecmd.bat
+++ /dev/null
@@ -1,5 +0,0 @@
-@echo off

-:: The redirs to nul are important. %2 can end up being an unterminated "'d

-:: string, so the remainder of the command line becomes the target file name,

-:: which in turn fails because it's a filename containing >, nul, etc.

-copy /y %1 %2 >nul 2>nul

diff --git a/tools/gyp/test/win/command-quote/a.S b/tools/gyp/test/win/command-quote/a.S
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/command-quote/a.S
+++ /dev/null
diff --git a/tools/gyp/test/win/command-quote/bat with spaces.bat b/tools/gyp/test/win/command-quote/bat with spaces.bat
deleted file mode 100644
index dc3508f..0000000
--- a/tools/gyp/test/win/command-quote/bat with spaces.bat
+++ /dev/null
@@ -1,7 +0,0 @@
-@echo off
-
-:: Copyright (c) 2012 Google Inc. All rights reserved.
-:: Use of this source code is governed by a BSD-style license that can be
-:: found in the LICENSE file.
-
-copy %1 %2
diff --git a/tools/gyp/test/win/command-quote/command-quote.gyp b/tools/gyp/test/win/command-quote/command-quote.gyp
deleted file mode 100644
index faf7246..0000000
--- a/tools/gyp/test/win/command-quote/command-quote.gyp
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'msvs_cygwin_dirs': ['../../../../../<(DEPTH)/third_party/cygwin'],
-  },
-  'targets': [
-    {
-      'target_name': 'test_batch',
-      'type': 'none',
-      'rules': [
-      {
-        'rule_name': 'build_with_batch',
-        'msvs_cygwin_shell': 0,
-        'extension': 'S',
-        'outputs': ['output.obj'],
-        'action': ['call go.bat', '<(RULE_INPUT_PATH)', 'output.obj'],
-      },],
-      'sources': ['a.S'],
-    },
-    {
-      'target_name': 'test_call_separate',
-      'type': 'none',
-      'rules': [
-      {
-        'rule_name': 'build_with_batch2',
-        'msvs_cygwin_shell': 0,
-        'extension': 'S',
-        'outputs': ['output2.obj'],
-        'action': ['call', 'go.bat', '<(RULE_INPUT_PATH)', 'output2.obj'],
-      },],
-      'sources': ['a.S'],
-    },
-    {
-      'target_name': 'test_with_spaces',
-      'type': 'none',
-      'rules': [
-      {
-        'rule_name': 'build_with_batch3',
-        'msvs_cygwin_shell': 0,
-        'extension': 'S',
-        'outputs': ['output3.obj'],
-        'action': ['bat with spaces.bat', '<(RULE_INPUT_PATH)', 'output3.obj'],
-      },],
-      'sources': ['a.S'],
-    },
-    {
-      'target_name': 'test_with_double_quotes',
-      'type': 'none',
-      'rules': [
-      {
-        'rule_name': 'build_with_batch3',
-        'msvs_cygwin_shell': 1,
-        'extension': 'S',
-        'outputs': ['output4.obj'],
-        'arguments': ['-v'],
-        'action': ['python', '-c', 'import shutil; '
-          'shutil.copy("<(RULE_INPUT_PATH)", "output4.obj")'],
-      },],
-      'sources': ['a.S'],
-    },
-    {
-      'target_name': 'test_with_single_quotes',
-      'type': 'none',
-      'rules': [
-      {
-        'rule_name': 'build_with_batch3',
-        'msvs_cygwin_shell': 1,
-        'extension': 'S',
-        'outputs': ['output5.obj'],
-        'action': ['python', '-c', "import shutil; "
-          "shutil.copy('<(RULE_INPUT_PATH)', 'output5.obj')"],
-      },],
-      'sources': ['a.S'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/command-quote/go.bat b/tools/gyp/test/win/command-quote/go.bat
deleted file mode 100644
index dc3508f..0000000
--- a/tools/gyp/test/win/command-quote/go.bat
+++ /dev/null
@@ -1,7 +0,0 @@
-@echo off
-
-:: Copyright (c) 2012 Google Inc. All rights reserved.
-:: Use of this source code is governed by a BSD-style license that can be
-:: found in the LICENSE file.
-
-copy %1 %2
diff --git a/tools/gyp/test/win/command-quote/subdir/and/another/in-subdir.gyp b/tools/gyp/test/win/command-quote/subdir/and/another/in-subdir.gyp
deleted file mode 100644
index 3dff4c4..0000000
--- a/tools/gyp/test/win/command-quote/subdir/and/another/in-subdir.gyp
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_batch_depth',
-      'type': 'none',
-      'variables': {
-        # Taken from native_client/build/common.gypi. Seems unintentional (a
-        # string in a 1 element list)? But since it works on other generators,
-        # I guess it should work here too.
-        'filepath': [ 'call <(DEPTH)/../../../go.bat' ],
-      },
-      'rules': [
-      {
-        'rule_name': 'build_with_batch4',
-        'msvs_cygwin_shell': 0,
-        'extension': 'S',
-        'outputs': ['output4.obj'],
-        'action': ['<@(filepath)', '<(RULE_INPUT_PATH)', 'output4.obj'],
-      },],
-      'sources': ['<(DEPTH)\\..\\..\\..\\a.S'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/additional-include-dirs.cc b/tools/gyp/test/win/compiler-flags/additional-include-dirs.cc
deleted file mode 100644
index f1e11dd..0000000
--- a/tools/gyp/test/win/compiler-flags/additional-include-dirs.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// No path qualification to test compiler include dir specification.
-#include "header.h"
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/additional-include-dirs.gyp b/tools/gyp/test/win/compiler-flags/additional-include-dirs.gyp
deleted file mode 100644
index 42c7e84..0000000
--- a/tools/gyp/test/win/compiler-flags/additional-include-dirs.gyp
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_incs',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'AdditionalIncludeDirectories': [
-            'subdir',
-          ],
-        }
-      },
-      'sources': ['additional-include-dirs.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/additional-options.cc b/tools/gyp/test/win/compiler-flags/additional-options.cc
deleted file mode 100644
index c79572b..0000000
--- a/tools/gyp/test/win/compiler-flags/additional-options.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  // Generate a warning that will appear at level 4, but not level 1
-  // (truncation and unused local).
-  char c = 123456;
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/additional-options.gyp b/tools/gyp/test/win/compiler-flags/additional-options.gyp
deleted file mode 100644
index 6a365a2..0000000
--- a/tools/gyp/test/win/compiler-flags/additional-options.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_additional_none',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '4',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['additional-options.cc'],
-    },
-    {
-      'target_name': 'test_additional_one',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '4',
-          'WarnAsError': 'true',
-          'AdditionalOptions': [ '/W1' ],
-        }
-      },
-      'sources': ['additional-options.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/analysis.gyp b/tools/gyp/test/win/compiler-flags/analysis.gyp
deleted file mode 100644
index 97e9422..0000000
--- a/tools/gyp/test/win/compiler-flags/analysis.gyp
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_analysis_on',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnablePREfast': 'true',
-          'WarnAsError': 'true',
-        },
-      },
-      'sources': ['uninit.cc'],
-    },
-    {
-      'target_name': 'test_analysis_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnablePREfast': 'false',
-          'WarnAsError': 'true',
-        },
-      },
-      'sources': ['uninit.cc'],
-    },
-    {
-      'target_name': 'test_analysis_unspec',
-      'type': 'executable',
-      'sources': ['uninit.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarnAsError': 'true',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/buffer-security-check.gyp b/tools/gyp/test/win/compiler-flags/buffer-security-check.gyp
deleted file mode 100644
index cc5a12b..0000000
--- a/tools/gyp/test/win/compiler-flags/buffer-security-check.gyp
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Turn debug information on so that we can see the name of the buffer
-    # security check cookie in the disassembly.
-    {
-      'target_name': 'test_bsc_unset',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-      },
-      'sources': ['buffer-security.cc'],
-    },
-    {
-      'target_name': 'test_bsc_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'BufferSecurityCheck': 'false',
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-      },
-      'sources': ['buffer-security.cc'],
-    },
-    {
-      'target_name': 'test_bsc_on',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'BufferSecurityCheck': 'true',
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-      },
-      'sources': ['buffer-security.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/buffer-security.cc b/tools/gyp/test/win/compiler-flags/buffer-security.cc
deleted file mode 100644
index e8a48a2..0000000
--- a/tools/gyp/test/win/compiler-flags/buffer-security.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <malloc.h>
-#include <string.h>
-
-int main() {
-  char* stuff = reinterpret_cast<char*>(_alloca(256));
-  strcpy(stuff, "blah");
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/calling-convention-cdecl.def b/tools/gyp/test/win/compiler-flags/calling-convention-cdecl.def
deleted file mode 100644
index dc1dba0..0000000
--- a/tools/gyp/test/win/compiler-flags/calling-convention-cdecl.def
+++ /dev/null
@@ -1,6 +0,0 @@
-; Copyright (c) 2014 Google Inc. All rights reserved.
-; Use of this source code is governed by a BSD-style license that can be
-; found in the LICENSE file.
-
-EXPORTS
-  foo
diff --git a/tools/gyp/test/win/compiler-flags/calling-convention-fastcall.def b/tools/gyp/test/win/compiler-flags/calling-convention-fastcall.def
deleted file mode 100644
index 2c61afe..0000000
--- a/tools/gyp/test/win/compiler-flags/calling-convention-fastcall.def
+++ /dev/null
@@ -1,6 +0,0 @@
-; Copyright (c) 2014 Google Inc. All rights reserved.
-; Use of this source code is governed by a BSD-style license that can be
-; found in the LICENSE file.
-
-EXPORTS
-  @foo@0
diff --git a/tools/gyp/test/win/compiler-flags/calling-convention-stdcall.def b/tools/gyp/test/win/compiler-flags/calling-convention-stdcall.def
deleted file mode 100644
index 6c7e05e..0000000
--- a/tools/gyp/test/win/compiler-flags/calling-convention-stdcall.def
+++ /dev/null
@@ -1,6 +0,0 @@
-; Copyright (c) 2014 Google Inc. All rights reserved.
-; Use of this source code is governed by a BSD-style license that can be
-; found in the LICENSE file.
-
-EXPORTS
-  _foo@0
diff --git a/tools/gyp/test/win/compiler-flags/calling-convention-vectorcall.def b/tools/gyp/test/win/compiler-flags/calling-convention-vectorcall.def
deleted file mode 100644
index 4ef119c..0000000
--- a/tools/gyp/test/win/compiler-flags/calling-convention-vectorcall.def
+++ /dev/null
@@ -1,6 +0,0 @@
-; Copyright (c) 2014 Google Inc. All rights reserved.
-; Use of this source code is governed by a BSD-style license that can be
-; found in the LICENSE file.
-
-EXPORTS
-  foo@@0
diff --git a/tools/gyp/test/win/compiler-flags/calling-convention.cc b/tools/gyp/test/win/compiler-flags/calling-convention.cc
deleted file mode 100644
index 0d78a0c..0000000
--- a/tools/gyp/test/win/compiler-flags/calling-convention.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-extern "C" void foo() {
-}
diff --git a/tools/gyp/test/win/compiler-flags/calling-convention.gyp b/tools/gyp/test/win/compiler-flags/calling-convention.gyp
deleted file mode 100644
index 5069c55..0000000
--- a/tools/gyp/test/win/compiler-flags/calling-convention.gyp
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_cdecl',
-      'type': 'loadable_module',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'CallingConvention': 0,
-        },
-      },
-      'sources': [
-        'calling-convention.cc',
-        'calling-convention-cdecl.def',
-      ],
-    },
-    {
-      'target_name': 'test_fastcall',
-      'type': 'loadable_module',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'CallingConvention': 1,
-        },
-      },
-      'sources': [
-        'calling-convention.cc',
-        'calling-convention-fastcall.def',
-      ],
-    },
-    {
-      'target_name': 'test_stdcall',
-      'type': 'loadable_module',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'CallingConvention': 2,
-        },
-      },
-      'sources': [
-        'calling-convention.cc',
-        'calling-convention-stdcall.def',
-      ],
-    },
-  ],
-  'conditions': [
-    ['MSVS_VERSION[0:4]>="2013"', {
-      'targets': [
-        {
-          'target_name': 'test_vectorcall',
-          'type': 'loadable_module',
-          'msvs_settings': {
-            'VCCLCompilerTool': {
-              'CallingConvention': 3,
-            },
-          },
-          'sources': [
-            'calling-convention.cc',
-            'calling-convention-vectorcall.def',
-          ],
-        },
-      ],
-    }],
-  ],
-}
diff --git a/tools/gyp/test/win/compiler-flags/character-set-mbcs.cc b/tools/gyp/test/win/compiler-flags/character-set-mbcs.cc
deleted file mode 100644
index 3286304..0000000
--- a/tools/gyp/test/win/compiler-flags/character-set-mbcs.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _MBCS
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/character-set-unicode.cc b/tools/gyp/test/win/compiler-flags/character-set-unicode.cc
deleted file mode 100644
index 32e6972..0000000
--- a/tools/gyp/test/win/compiler-flags/character-set-unicode.cc
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _UNICODE
-#error
-#endif
-
-#ifndef UNICODE
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/character-set.gyp b/tools/gyp/test/win/compiler-flags/character-set.gyp
deleted file mode 100644
index 3dc4555..0000000
--- a/tools/gyp/test/win/compiler-flags/character-set.gyp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_cs_notset',
-      'product_name': 'test_cs_notset',
-      'type': 'executable',
-      'msvs_configuration_attributes': {
-        'CharacterSet': '0'
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_cs_unicode',
-      'product_name': 'test_cs_unicode',
-      'type': 'executable',
-      'msvs_configuration_attributes': {
-        'CharacterSet': '1'
-      },
-      'sources': ['character-set-unicode.cc'],
-    },
-    {
-      'target_name': 'test_cs_mbcs',
-      'product_name': 'test_cs_mbcs',
-      'type': 'executable',
-      'msvs_configuration_attributes': {
-        'CharacterSet': '2'
-      },
-      'sources': ['character-set-mbcs.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/compile-as-managed.cc b/tools/gyp/test/win/compiler-flags/compile-as-managed.cc
deleted file mode 100644
index a29c71e..0000000
--- a/tools/gyp/test/win/compiler-flags/compile-as-managed.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <vcclr.h>
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/compile-as-managed.gyp b/tools/gyp/test/win/compiler-flags/compile-as-managed.gyp
deleted file mode 100644
index 3bacbbc..0000000
--- a/tools/gyp/test/win/compiler-flags/compile-as-managed.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test-compile-as-managed',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'CompileAsManaged': 'true',
-          'ExceptionHandling': '0' # /clr is incompatible with /EHs
-        }
-      },
-      'sources': ['compile-as-managed.cc'],
-    },
-    {
-      'target_name': 'test-compile-as-unmanaged',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'CompileAsManaged': 'false',
-        }
-      },
-      'sources': ['compile-as-managed.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/compile-as-winrt.cc b/tools/gyp/test/win/compiler-flags/compile-as-winrt.cc
deleted file mode 100644
index da9954f..0000000
--- a/tools/gyp/test/win/compiler-flags/compile-as-winrt.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2016 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-using namespace Platform;
-
-int main() {
-  wchar_t msg[] = L"Test";
-  String^ str1 = ref new String(msg);
-  auto str2 = String::Concat(str1, " Concat");
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/compile-as-winrt.gyp b/tools/gyp/test/win/compiler-flags/compile-as-winrt.gyp
deleted file mode 100644
index 8978e50..0000000
--- a/tools/gyp/test/win/compiler-flags/compile-as-winrt.gyp
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2016 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test-compile-as-winrt',
-      'type': 'executable',
-      'msvs_windows_sdk_version': 'v10.0',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'AdditionalUsingDirectories': ['$(VCInstallDir)vcpackages;$(WindowsSdkDir)UnionMetadata;%(AdditionalUsingDirectories)'],
-          'CompileAsWinRT': 'true'
-        }
-      },
-      'sources': ['compile-as-winrt.cc']
-    }
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/debug-format.gyp b/tools/gyp/test/win/compiler-flags/debug-format.gyp
deleted file mode 100644
index daaed23..0000000
--- a/tools/gyp/test/win/compiler-flags/debug-format.gyp
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test-debug-format-off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '0'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test-debug-format-oldstyle',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '1'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test-debug-format-pdb',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test-debug-format-editcontinue',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '4'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/default-char-is-unsigned.cc b/tools/gyp/test/win/compiler-flags/default-char-is-unsigned.cc
deleted file mode 100644
index beeca2a..0000000
--- a/tools/gyp/test/win/compiler-flags/default-char-is-unsigned.cc
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-template <bool>
-struct CompileAssert {
-};
-
-#define COMPILE_ASSERT(expr, msg) \
-  typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1]
-
-int main() {
-  COMPILE_ASSERT(char(-1) > 0, default_char_is_unsigned);
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/default-char-is-unsigned.gyp b/tools/gyp/test/win/compiler-flags/default-char-is-unsigned.gyp
deleted file mode 100644
index 941e581..0000000
--- a/tools/gyp/test/win/compiler-flags/default-char-is-unsigned.gyp
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_default_char_is_unsigned',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DefaultCharIsUnsigned': 'true',
-        },
-      },
-      'sources': [
-        'default-char-is-unsigned.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/compiler-flags/disable-specific-warnings.cc b/tools/gyp/test/win/compiler-flags/disable-specific-warnings.cc
deleted file mode 100644
index d312f5f..0000000
--- a/tools/gyp/test/win/compiler-flags/disable-specific-warnings.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  // Causes level 1 warning (C4700)
-  int i;
-  return i;
-}
diff --git a/tools/gyp/test/win/compiler-flags/disable-specific-warnings.gyp b/tools/gyp/test/win/compiler-flags/disable-specific-warnings.gyp
deleted file mode 100644
index d81d694..0000000
--- a/tools/gyp/test/win/compiler-flags/disable-specific-warnings.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_disable_specific_warnings_set',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarnAsError': 'true',
-          'DisableSpecificWarnings': ['4700']
-        }
-      },
-      'sources': ['disable-specific-warnings.cc']
-    },
-    {
-      'target_name': 'test_disable_specific_warnings_unset',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarnAsError': 'true'
-        }
-      },
-      'sources': ['disable-specific-warnings.cc']
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.cc b/tools/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.cc
deleted file mode 100644
index 432ef54..0000000
--- a/tools/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.cc
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-static const char* GetArchOption() {
-#if _M_IX86_FP == 0
-  return "IA32";
-#elif _M_IX86_FP == 1
-  return "SSE";
-#elif _M_IX86_FP == 2
-#  if defined(__AVX2__)
-  return "AVX2";
-#  elif defined(__AVX__)
-  return "AVX";
-#  else
-  return "SSE2";
-#  endif
-#else
-  return "UNSUPPORTED OPTION";
-#endif
-}
-
-int main() {
-  printf("/arch:%s\n", GetArchOption());
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.gyp b/tools/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.gyp
deleted file mode 100644
index 9c49edc..0000000
--- a/tools/gyp/test/win/compiler-flags/enable-enhanced-instruction-set.gyp
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'sse_extensions',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableEnhancedInstructionSet': '1',  # StreamingSIMDExtensions
-        }
-      },
-      'sources': ['enable-enhanced-instruction-set.cc'],
-    },
-    {
-      'target_name': 'sse2_extensions',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableEnhancedInstructionSet': '2',  # StreamingSIMDExtensions2
-        }
-      },
-      'sources': ['enable-enhanced-instruction-set.cc'],
-    },
-  ],
-  'conditions': [
-    ['MSVS_VERSION[0:4]>"2010"', {
-      'targets': [
-        {
-          'target_name': 'avx_extensions',
-          'type': 'executable',
-          'msvs_settings': {
-            'VCCLCompilerTool': {
-              'EnableEnhancedInstructionSet': '3',  # AdvancedVectorExtensions
-            }
-          },
-          'sources': ['enable-enhanced-instruction-set.cc'],
-        },
-        {
-          'target_name': 'no_extensions',
-          'type': 'executable',
-          'msvs_settings': {
-            'VCCLCompilerTool': {
-              'EnableEnhancedInstructionSet': '4',  # NoExtensions
-            }
-          },
-          'sources': ['enable-enhanced-instruction-set.cc'],
-        },
-      ],
-    }],
-    ['MSVS_VERSION[0:4]>="2013"', {
-      'targets': [
-        {
-          'target_name': 'avx2_extensions',
-          'type': 'executable',
-          'msvs_settings': {
-            'VCCLCompilerTool': {
-              'EnableEnhancedInstructionSet': '5',  # AdvancedVectorExtensions2
-            }
-          },
-          'sources': ['enable-enhanced-instruction-set.cc'],
-        },
-      ],
-    }],
-  ],
-}
diff --git a/tools/gyp/test/win/compiler-flags/exception-handling-on.cc b/tools/gyp/test/win/compiler-flags/exception-handling-on.cc
deleted file mode 100644
index 5d9a3af..0000000
--- a/tools/gyp/test/win/compiler-flags/exception-handling-on.cc
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <excpt.h>
-#include <stdlib.h>
-
-void fail() {
-   try {
-      int i = 0, j = 1;
-      j /= i;
-   } catch(...) {
-     exit(1);
-   }
-}
-
-int main() {
-   __try {
-      fail();
-   } __except(EXCEPTION_EXECUTE_HANDLER) {
-     return 2;
-   }
-   return 3;
-}
diff --git a/tools/gyp/test/win/compiler-flags/exception-handling.gyp b/tools/gyp/test/win/compiler-flags/exception-handling.gyp
deleted file mode 100644
index c266768..0000000
--- a/tools/gyp/test/win/compiler-flags/exception-handling.gyp
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Optimization disabled so that the exception-causing code is not removed
-    # (divide by zero was getting optimized away in VS2010).
-    {
-      'target_name': 'test_eh_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'ExceptionHandling': '0',
-          'WarnAsError': 'true',
-          'Optimization': '0',
-        }
-      },
-      'sources': ['exception-handling-on.cc'],
-    },
-    {
-      'target_name': 'test_eh_s',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'ExceptionHandling': '1',
-          'WarnAsError': 'true',
-          'Optimization': '0',
-        }
-      },
-      'sources': ['exception-handling-on.cc'],
-    },
-    {
-      'target_name': 'test_eh_a',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'ExceptionHandling': '2',
-          'WarnAsError': 'true',
-          'Optimization': '0',
-        }
-      },
-      'sources': ['exception-handling-on.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/floating-point-model-fast.cc b/tools/gyp/test/win/compiler-flags/floating-point-model-fast.cc
deleted file mode 100644
index 9d22152..0000000
--- a/tools/gyp/test/win/compiler-flags/floating-point-model-fast.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifdef _M_FP_PRECISE
-#error
-#endif
-
-#ifdef _M_FP_STRICT
-#error
-#endif
-
-#ifndef _M_FP_FAST
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/floating-point-model-precise.cc b/tools/gyp/test/win/compiler-flags/floating-point-model-precise.cc
deleted file mode 100644
index 1191a74..0000000
--- a/tools/gyp/test/win/compiler-flags/floating-point-model-precise.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _M_FP_PRECISE
-#error
-#endif
-
-#ifdef _M_FP_STRICT
-#error
-#endif
-
-#ifdef _M_FP_FAST
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/floating-point-model-strict.cc b/tools/gyp/test/win/compiler-flags/floating-point-model-strict.cc
deleted file mode 100644
index 1ffde36..0000000
--- a/tools/gyp/test/win/compiler-flags/floating-point-model-strict.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifdef _M_FP_PRECISE
-#error
-#endif
-
-#ifndef _M_FP_STRICT
-#error
-#endif
-
-#ifdef _M_FP_FAST
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/floating-point-model.gyp b/tools/gyp/test/win/compiler-flags/floating-point-model.gyp
deleted file mode 100644
index 857b275..0000000
--- a/tools/gyp/test/win/compiler-flags/floating-point-model.gyp
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test-floating-point-model-default',
-      'type': 'executable',
-      'sources': ['floating-point-model-precise.cc'],
-    },
-    {
-      'target_name': 'test-floating-point-model-precise',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'FloatingPointModel': '0'
-        }
-      },
-      'sources': ['floating-point-model-precise.cc'],
-    },
-    {
-      'target_name': 'test-floating-point-model-strict',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'FloatingPointModel': '1'
-        }
-      },
-      'sources': ['floating-point-model-strict.cc'],
-    },
-    {
-      'target_name': 'test-floating-point-model-fast',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'FloatingPointModel': '2'
-        }
-      },
-      'sources': ['floating-point-model-fast.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/force-include-files-with-precompiled.cc b/tools/gyp/test/win/compiler-flags/force-include-files-with-precompiled.cc
deleted file mode 100644
index 85cb0f3..0000000
--- a/tools/gyp/test/win/compiler-flags/force-include-files-with-precompiled.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-int main() {
-  std::string s;
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/force-include-files.cc b/tools/gyp/test/win/compiler-flags/force-include-files.cc
deleted file mode 100644
index 4a93de5..0000000
--- a/tools/gyp/test/win/compiler-flags/force-include-files.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  std::list<std::vector<std::string> > l;
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/force-include-files.gyp b/tools/gyp/test/win/compiler-flags/force-include-files.gyp
deleted file mode 100644
index 2031546..0000000
--- a/tools/gyp/test/win/compiler-flags/force-include-files.gyp
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_force_include_files',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'ForcedIncludeFiles': ['string', 'vector', 'list'],
-        },
-      },
-      'sources': [
-        'force-include-files.cc',
-      ],
-    },
-    {
-      'target_name': 'test_force_include_with_precompiled',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'ForcedIncludeFiles': ['string'],
-        },
-      },
-      'msvs_precompiled_header': 'stdio.h',
-      'msvs_precompiled_source': 'precomp.cc',
-      'msvs_disabled_warnings': [ 4530, ],
-      'sources': [
-        'force-include-files-with-precompiled.cc',
-        'precomp.cc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/compiler-flags/function-level-linking.cc b/tools/gyp/test/win/compiler-flags/function-level-linking.cc
deleted file mode 100644
index 4952272..0000000
--- a/tools/gyp/test/win/compiler-flags/function-level-linking.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int comdat_function() {
-  return 1;
-}
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/function-level-linking.gyp b/tools/gyp/test/win/compiler-flags/function-level-linking.gyp
deleted file mode 100644
index 5858586..0000000
--- a/tools/gyp/test/win/compiler-flags/function-level-linking.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_fll_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'false'
-        }
-      },
-      'sources': ['function-level-linking.cc'],
-    },
-    {
-      'target_name': 'test_fll_on',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'true',
-        }
-      },
-      'sources': ['function-level-linking.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/hello.cc b/tools/gyp/test/win/compiler-flags/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/win/compiler-flags/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/optimizations.gyp b/tools/gyp/test/win/compiler-flags/optimizations.gyp
deleted file mode 100644
index e63096f..0000000
--- a/tools/gyp/test/win/compiler-flags/optimizations.gyp
+++ /dev/null
@@ -1,207 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_opt_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'Optimization': '0'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_lev_size',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'Optimization': '1'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_lev_speed',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'Optimization': '2'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_lev_max',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'Optimization': '3'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_unset',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_fpo',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'OmitFramePointers': 'true'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_fpo_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'OmitFramePointers': 'false'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_intrinsic',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableIntrinsicFunctions': 'true'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_intrinsic_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableIntrinsicFunctions': 'false'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_inline_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'InlineFunctionExpansion': '0'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_inline_manual',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'InlineFunctionExpansion': '1'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_inline_auto',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'InlineFunctionExpansion': '2'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_neither',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'FavorSizeOrSpeed': '0'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_speed',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'FavorSizeOrSpeed': '1'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_size',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'FavorSizeOrSpeed': '2'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_wpo',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WholeProgramOptimization': 'true'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_sp',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'StringPooling': 'true'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_sp_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'StringPooling': 'false'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_fso',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFiberSafeOptimizations': 'true'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_opt_fso_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFiberSafeOptimizations': 'false'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/pdbname-override.gyp b/tools/gyp/test/win/compiler-flags/pdbname-override.gyp
deleted file mode 100644
index dad20e0..0000000
--- a/tools/gyp/test/win/compiler-flags/pdbname-override.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_pdbname',
-      'type': 'executable',
-      'sources': [
-        'hello.cc',
-        'pdbname.cc',
-      ],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-          'ProgramDataBaseFileName': '<(PRODUCT_DIR)/compiler_generated.pdb',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': '<(PRODUCT_DIR)/linker_generated.pdb',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/pdbname.cc b/tools/gyp/test/win/compiler-flags/pdbname.cc
deleted file mode 100644
index 0fe05d5..0000000
--- a/tools/gyp/test/win/compiler-flags/pdbname.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int some_function() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/pdbname.gyp b/tools/gyp/test/win/compiler-flags/pdbname.gyp
deleted file mode 100644
index 8fcf754..0000000
--- a/tools/gyp/test/win/compiler-flags/pdbname.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_pdbname',
-      'type': 'executable',
-      'sources': [
-        'hello.cc',
-        'pdbname.cc',
-      ],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/precomp.cc b/tools/gyp/test/win/compiler-flags/precomp.cc
deleted file mode 100644
index d16bac8..0000000
--- a/tools/gyp/test/win/compiler-flags/precomp.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <windows.h>
-#include <stdio.h>
diff --git a/tools/gyp/test/win/compiler-flags/rtti-on.cc b/tools/gyp/test/win/compiler-flags/rtti-on.cc
deleted file mode 100644
index 2d3ad03..0000000
--- a/tools/gyp/test/win/compiler-flags/rtti-on.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _CPPRTTI
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/rtti.gyp b/tools/gyp/test/win/compiler-flags/rtti.gyp
deleted file mode 100644
index 704cd58..0000000
--- a/tools/gyp/test/win/compiler-flags/rtti.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_rtti_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'RuntimeTypeInfo': 'false',
-          'WarnAsError': 'true'
-        }
-      },
-      'sources': ['rtti-on.cc'],
-    },
-    {
-      'target_name': 'test_rtti_on',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'RuntimeTypeInfo': 'true',
-          'WarnAsError': 'true'
-        }
-      },
-      'sources': ['rtti-on.cc'],
-    },
-    {
-      'target_name': 'test_rtti_unset',
-      'type': 'executable',
-      'msvs_settings': {
-      },
-      'sources': ['rtti-on.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/runtime-checks.cc b/tools/gyp/test/win/compiler-flags/runtime-checks.cc
deleted file mode 100644
index fdb811d..0000000
--- a/tools/gyp/test/win/compiler-flags/runtime-checks.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef __MSVC_RUNTIME_CHECKS
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/runtime-checks.gyp b/tools/gyp/test/win/compiler-flags/runtime-checks.gyp
deleted file mode 100644
index 8ea3092..0000000
--- a/tools/gyp/test/win/compiler-flags/runtime-checks.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_brc_none',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'Optimization': '0',
-        }
-      },
-      'sources': ['runtime-checks.cc'],
-    },
-    {
-      'target_name': 'test_brc_1',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'Optimization': '0',
-          'BasicRuntimeChecks': '3'
-        }
-      },
-      'sources': ['runtime-checks.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/runtime-library-md.cc b/tools/gyp/test/win/compiler-flags/runtime-library-md.cc
deleted file mode 100644
index 87c8302..0000000
--- a/tools/gyp/test/win/compiler-flags/runtime-library-md.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _MT
-#error
-#endif
-
-#ifdef _DEBUG
-#error
-#endif
-
-#ifndef _DLL
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/runtime-library-mdd.cc b/tools/gyp/test/win/compiler-flags/runtime-library-mdd.cc
deleted file mode 100644
index 9f175e4..0000000
--- a/tools/gyp/test/win/compiler-flags/runtime-library-mdd.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _MT
-#error
-#endif
-
-#ifndef _DEBUG
-#error
-#endif
-
-#ifndef _DLL
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/runtime-library-mt.cc b/tools/gyp/test/win/compiler-flags/runtime-library-mt.cc
deleted file mode 100644
index 27e62b6..0000000
--- a/tools/gyp/test/win/compiler-flags/runtime-library-mt.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _MT
-#error
-#endif
-
-#ifdef _DEBUG
-#error
-#endif
-
-#ifdef _DLL
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/runtime-library-mtd.cc b/tools/gyp/test/win/compiler-flags/runtime-library-mtd.cc
deleted file mode 100644
index a9921db..0000000
--- a/tools/gyp/test/win/compiler-flags/runtime-library-mtd.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _MT
-#error
-#endif
-
-#ifndef _DEBUG
-#error
-#endif
-
-#ifdef _DLL
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/runtime-library.gyp b/tools/gyp/test/win/compiler-flags/runtime-library.gyp
deleted file mode 100644
index 04afc39..0000000
--- a/tools/gyp/test/win/compiler-flags/runtime-library.gyp
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_rl_md',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'RuntimeLibrary': '2'
-        }
-      },
-      'sources': ['runtime-library-md.cc'],
-    },
-    {
-      'target_name': 'test_rl_mdd',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'RuntimeLibrary': '3'
-        }
-      },
-      'sources': ['runtime-library-mdd.cc'],
-    },
-    {
-      'target_name': 'test_rl_mt',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'RuntimeLibrary': '0'
-        }
-      },
-      'sources': ['runtime-library-mt.cc'],
-    },
-    {
-      'target_name': 'test_rl_mtd',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'RuntimeLibrary': '1'
-        }
-      },
-      'sources': ['runtime-library-mtd.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/subdir/header.h b/tools/gyp/test/win/compiler-flags/subdir/header.h
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/compiler-flags/subdir/header.h
+++ /dev/null
diff --git a/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type.gyp b/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type.gyp
deleted file mode 100644
index 456fe04..0000000
--- a/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type.gyp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2010 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_treat_wchar_t_as_built_in_type_negative',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'TreatWChar_tAsBuiltInType': 'false',
-        },
-      },
-      'sources': [
-        'treat-wchar-t-as-built-in-type1.cc',
-      ],
-    },
-    {
-      'target_name': 'test_treat_wchar_t_as_built_in_type_positive',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'TreatWChar_tAsBuiltInType': 'true',
-        },
-      },
-      'sources': [
-        'treat-wchar-t-as-built-in-type2.cc',
-      ],
-    },
-
-  ],
-}
diff --git a/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type1.cc b/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type1.cc
deleted file mode 100644
index fc1ed0b..0000000
--- a/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type1.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifdef _NATIVE_WCHAR_T_DEFINED
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type2.cc b/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type2.cc
deleted file mode 100644
index 28ab94f..0000000
--- a/tools/gyp/test/win/compiler-flags/treat-wchar-t-as-built-in-type2.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef _NATIVE_WCHAR_T_DEFINED
-#error
-#endif
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/uninit.cc b/tools/gyp/test/win/compiler-flags/uninit.cc
deleted file mode 100644
index a9d5f5d..0000000
--- a/tools/gyp/test/win/compiler-flags/uninit.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Should trigger C6001: using uninitialized memory <variable> for |i|.
-int f(bool b) {
-  int i;
-  if (b)
-    i = 0;
-  return i;
-}
-
-int main() {}
diff --git a/tools/gyp/test/win/compiler-flags/warning-as-error.cc b/tools/gyp/test/win/compiler-flags/warning-as-error.cc
deleted file mode 100644
index fd2130a..0000000
--- a/tools/gyp/test/win/compiler-flags/warning-as-error.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  // Cause a warning, even at /W1
-  int export;
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/warning-as-error.gyp b/tools/gyp/test/win/compiler-flags/warning-as-error.gyp
deleted file mode 100644
index d71f261..0000000
--- a/tools/gyp/test/win/compiler-flags/warning-as-error.gyp
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_warn_as_error_false',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarnAsError': 'false'
-        }
-      },
-      'sources': ['warning-as-error.cc']
-    },
-    {
-      'target_name': 'test_warn_as_error_true',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarnAsError': 'true'
-        }
-      },
-      'sources': ['warning-as-error.cc']
-    },
-    {
-      'target_name': 'test_warn_as_error_unset',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-        }
-      },
-      'sources': ['warning-as-error.cc']
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/warning-level.gyp b/tools/gyp/test/win/compiler-flags/warning-level.gyp
deleted file mode 100644
index 2297aa7..0000000
--- a/tools/gyp/test/win/compiler-flags/warning-level.gyp
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Level 1
-    {
-      'target_name': 'test_wl1_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '1',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level1.cc'],
-    },
-    {
-      'target_name': 'test_wl1_pass',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '1',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level2.cc'],
-    },
-
-    # Level 2
-    {
-      'target_name': 'test_wl2_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '2',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level2.cc'],
-    },
-    {
-      'target_name': 'test_wl2_pass',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '2',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level3.cc'],
-    },
-
-    # Level 3
-    {
-      'target_name': 'test_wl3_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '3',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level3.cc'],
-    },
-    {
-      'target_name': 'test_wl3_pass',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '3',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level4.cc'],
-    },
-
-
-    # Level 4
-    {
-      'target_name': 'test_wl4_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '4',
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level4.cc'],
-    },
-
-    # Default level
-    {
-      'target_name': 'test_def_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarnAsError': 'true',
-        }
-      },
-      'sources': ['warning-level1.cc'],
-    },
-    {
-      'target_name': 'test_def_pass',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-        }
-      },
-      'sources': ['warning-level2.cc'],
-    },
-
-  ]
-}
diff --git a/tools/gyp/test/win/compiler-flags/warning-level1.cc b/tools/gyp/test/win/compiler-flags/warning-level1.cc
deleted file mode 100644
index 119578d..0000000
--- a/tools/gyp/test/win/compiler-flags/warning-level1.cc
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  int export; // Cause a level 1 warning (C4237).
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/warning-level2.cc b/tools/gyp/test/win/compiler-flags/warning-level2.cc
deleted file mode 100644
index 9a26703..0000000
--- a/tools/gyp/test/win/compiler-flags/warning-level2.cc
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int f(int x) {
-  return 0;
-}
-
-int main() {
-  double x = 10.1;
-  // Cause a level 2 warning (C4243).
-  return f(x);
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/warning-level3.cc b/tools/gyp/test/win/compiler-flags/warning-level3.cc
deleted file mode 100644
index e0a9f3c..0000000
--- a/tools/gyp/test/win/compiler-flags/warning-level3.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Cause a level 3 warning (C4359).
-struct __declspec(align(8)) C8 { __int64 i; };
-struct __declspec(align(4)) C4 { C8 m8; };
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/compiler-flags/warning-level4.cc b/tools/gyp/test/win/compiler-flags/warning-level4.cc
deleted file mode 100644
index 48a4fb7..0000000
--- a/tools/gyp/test/win/compiler-flags/warning-level4.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  const int i = -1;
-  // Cause a level 4 warning (C4245).
-  unsigned int j = i;
-  return 0;
-}
diff --git a/tools/gyp/test/win/enable-winrt/dllmain.cc b/tools/gyp/test/win/enable-winrt/dllmain.cc
deleted file mode 100644
index dedd83c..0000000
--- a/tools/gyp/test/win/enable-winrt/dllmain.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <windows.h>
-#include <wrl.h>
-#include <wrl/wrappers/corewrappers.h>
-#include <windows.graphics.display.h>
-
-using namespace Microsoft::WRL;
-using namespace Microsoft::WRL::Wrappers;
-using namespace ABI::Windows::Foundation;
-using namespace ABI::Windows::Graphics::Display;
-
-bool TryToUseSomeWinRT() {
-  ComPtr<IDisplayPropertiesStatics> dp;
-  HStringReference s(RuntimeClass_Windows_Graphics_Display_DisplayProperties);
-  HRESULT hr = GetActivationFactory(s.Get(), dp.GetAddressOf());
-  if (SUCCEEDED(hr)) {
-    float dpi = 96.0f;
-    if (SUCCEEDED(dp->get_LogicalDpi(&dpi))) {
-      return true;
-    }
-  }
-  return false;
-}
-
-BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
-  return TRUE;
-}
diff --git a/tools/gyp/test/win/enable-winrt/enable-winrt.gyp b/tools/gyp/test/win/enable-winrt/enable-winrt.gyp
deleted file mode 100644
index 69f7018..0000000
--- a/tools/gyp/test/win/enable-winrt/enable-winrt.gyp
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'enable_winrt_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-    {
-      'target_name': 'enable_winrt_missing_dll',
-      'type': 'shared_library',
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-    {
-      'target_name': 'enable_winrt_winphone_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_enable_winphone': 1,
-      'sources': [
-        'dllmain.cc',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'AdditionalDependencies': [
-            '%(AdditionalDependencies)',
-          ],
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/generator-output-different-drive/gyptest-generator-output-different-drive.py b/tools/gyp/test/win/generator-output-different-drive/gyptest-generator-output-different-drive.py
deleted file mode 100644
index 8c8c365..0000000
--- a/tools/gyp/test/win/generator-output-different-drive/gyptest-generator-output-different-drive.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test that the generator output can be written to a different drive on Windows.
-"""
-
-import os
-import TestGyp
-import string
-import subprocess
-import sys
-
-
-if sys.platform == 'win32':
-  import win32api
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  def GetFirstFreeDriveLetter():
-    """ Returns the first unused Windows drive letter in [A, Z] """
-    all_letters = [c for c in string.uppercase]
-    in_use = win32api.GetLogicalDriveStrings()
-    free = list(set(all_letters) - set(in_use))
-    return free[0]
-
-  output_dir = os.path.join('different-drive', 'output')
-  if not os.path.isdir(os.path.abspath(output_dir)):
-    os.makedirs(os.path.abspath(output_dir))
-  output_drive = GetFirstFreeDriveLetter()
-  subprocess.call(['subst', '%c:' % output_drive, os.path.abspath(output_dir)])
-  try:
-    test.run_gyp('prog.gyp', '--generator-output=%s' % (
-        os.path.join(output_drive, 'output')))
-    test.build('prog.gyp', test.ALL, chdir=os.path.join(output_drive, 'output'))
-    test.built_file_must_exist('program', chdir=os.path.join(output_drive,
-                                                             'output'),
-                               type=test.EXECUTABLE)
-    test.pass_test()
-  finally:
-    subprocess.call(['subst', '%c:' % output_drive, '/D'])
diff --git a/tools/gyp/test/win/generator-output-different-drive/prog.c b/tools/gyp/test/win/generator-output-different-drive/prog.c
deleted file mode 100644
index 7937f5d..0000000
--- a/tools/gyp/test/win/generator-output-different-drive/prog.c
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <stdio.h>
-
-int main(void) {
-  printf("Hello from prog.c\n");
-  return 0;
-}
diff --git a/tools/gyp/test/win/generator-output-different-drive/prog.gyp b/tools/gyp/test/win/generator-output-different-drive/prog.gyp
deleted file mode 100644
index 92f53e5..0000000
--- a/tools/gyp/test/win/generator-output-different-drive/prog.gyp
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [
-        'prog.c',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/gyptest-asm-files.py b/tools/gyp/test/win/gyptest-asm-files.py
deleted file mode 100644
index 007b52e..0000000
--- a/tools/gyp/test/win/gyptest-asm-files.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure .s files aren't passed to cl.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'asm-files'
-  test.run_gyp('asm-files.gyp', chdir=CHDIR)
-  # The compiler will error out if it's passed the .s files, so just make sure
-  # the build succeeds. The compiler doesn't directly support building
-  # assembler files on Windows, they have to be built explicitly with a
-  # third-party tool.
-  test.build('asm-files.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-additional-include-dirs.py b/tools/gyp/test/win/gyptest-cl-additional-include-dirs.py
deleted file mode 100644
index 1fabfa9..0000000
--- a/tools/gyp/test/win/gyptest-cl-additional-include-dirs.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure additional include dirs are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('additional-include-dirs.gyp', chdir=CHDIR)
-  test.build('additional-include-dirs.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-additional-options.py b/tools/gyp/test/win/gyptest-cl-additional-options.py
deleted file mode 100644
index e9aea10..0000000
--- a/tools/gyp/test/win/gyptest-cl-additional-options.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure additional manual compiler flags are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('additional-options.gyp', chdir=CHDIR)
-
-  # Warning level not overidden, must fail.
-  test.build('additional-options.gyp', 'test_additional_none', chdir=CHDIR,
-      status=1)
-
-  # Warning level is overridden, must succeed.
-  test.build('additional-options.gyp', 'test_additional_one', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-analysis.py b/tools/gyp/test/win/gyptest-cl-analysis.py
deleted file mode 100644
index 7b3b989..0000000
--- a/tools/gyp/test/win/gyptest-cl-analysis.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure PREfast (code analysis) setting is extracted properly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if (sys.platform == 'win32' and
-    int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2012):
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('analysis.gyp', chdir=CHDIR)
-
-  # Analysis enabled, should fail.
-  test.build('analysis.gyp', 'test_analysis_on', chdir=CHDIR, status=1)
-
-  # Analysis not enabled, or unspecified, should pass.
-  test.build('analysis.gyp', 'test_analysis_off', chdir=CHDIR)
-  test.build('analysis.gyp', 'test_analysis_unspec', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-buffer-security-check.py b/tools/gyp/test/win/gyptest-cl-buffer-security-check.py
deleted file mode 100644
index e22869c..0000000
--- a/tools/gyp/test/win/gyptest-cl-buffer-security-check.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure buffer security check setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('buffer-security-check.gyp', chdir=CHDIR)
-  test.build('buffer-security-check.gyp', chdir=CHDIR)
-
-  def GetDisassemblyOfMain(exe):
-    # The standard library uses buffer security checks independent of our
-    # buffer security settings, so we extract just our code (i.e. main()) to
-    # check against.
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    output = test.run_dumpbin('/disasm', full_path)
-    result = []
-    in_main = False
-    for line in output.splitlines():
-      if line == '_main:':
-        in_main = True
-      elif in_main:
-        # Disassembly of next function starts.
-        if line.startswith('_'):
-          break
-        result.append(line)
-    return '\n'.join(result)
-
-  # Buffer security checks are on by default, make sure security_cookie
-  # appears in the disassembly of our code.
-  if 'security_cookie' not in GetDisassemblyOfMain('test_bsc_unset.exe'):
-    test.fail_test()
-
-  # Explicitly on.
-  if 'security_cookie' not in GetDisassemblyOfMain('test_bsc_on.exe'):
-    test.fail_test()
-
-  # Explicitly off, shouldn't be a reference to the security cookie.
-  if 'security_cookie' in GetDisassemblyOfMain('test_bsc_off.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-calling-convention.py b/tools/gyp/test/win/gyptest-cl-calling-convention.py
deleted file mode 100644
index b5fdc47..0000000
--- a/tools/gyp/test/win/gyptest-cl-calling-convention.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure calling convention setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('calling-convention.gyp', chdir=CHDIR)
-  test.build('calling-convention.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-character-set.py b/tools/gyp/test/win/gyptest-cl-character-set.py
deleted file mode 100644
index 7fabb67..0000000
--- a/tools/gyp/test/win/gyptest-cl-character-set.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure character set setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('character-set.gyp', chdir=CHDIR)
-  test.build('character-set.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-compile-as-managed.py b/tools/gyp/test/win/gyptest-cl-compile-as-managed.py
deleted file mode 100644
index 0d7b420..0000000
--- a/tools/gyp/test/win/gyptest-cl-compile-as-managed.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure compile as managed (clr) settings are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp()
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('compile-as-managed.gyp', chdir=CHDIR)
-  test.build('compile-as-managed.gyp', "test-compile-as-managed", chdir=CHDIR)
-  # Must fail.
-  test.build('compile-as-managed.gyp', "test-compile-as-unmanaged",
-    chdir=CHDIR, status=1)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-compile-as-winrt.py b/tools/gyp/test/win/gyptest-cl-compile-as-winrt.py
deleted file mode 100644
index 3e0168b..0000000
--- a/tools/gyp/test/win/gyptest-cl-compile-as-winrt.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2016 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import TestGyp
-
-import os
-import sys
-
-if (sys.platform == 'win32' and
-    int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2015):
-  test = TestGyp.TestGyp(formats=['msvs'])
-
-  CHDIR = 'compiler-flags'
-
-  test.run_gyp('compile-as-winrt.gyp', chdir=CHDIR)
-
-  test.build('compile-as-winrt.gyp', 'test-compile-as-winrt', chdir=CHDIR)
-
-  test.pass_test()
\ No newline at end of file
diff --git a/tools/gyp/test/win/gyptest-cl-debug-format.py b/tools/gyp/test/win/gyptest-cl-debug-format.py
deleted file mode 100644
index 6c68a61..0000000
--- a/tools/gyp/test/win/gyptest-cl-debug-format.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure debug format settings are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('debug-format.gyp', chdir=CHDIR)
-
-  # While there's ways to via .pdb contents, the .pdb doesn't include
-  # which style the debug information was created from, so we resort to just
-  # verifying the flags are correct on the command line.
-
-  ninja_file = test.built_file_path('obj/test-debug-format-off.ninja',
-      chdir=CHDIR)
-  test.must_not_contain(ninja_file, '/Z7')
-  test.must_not_contain(ninja_file, '/Zi')
-  test.must_not_contain(ninja_file, '/ZI')
-
-  ninja_file = test.built_file_path('obj/test-debug-format-oldstyle.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Z7')
-
-  ninja_file = test.built_file_path('obj/test-debug-format-pdb.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Zi')
-
-  ninja_file = test.built_file_path('obj/test-debug-format-editcontinue.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/ZI')
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-default-char-is-unsigned.py b/tools/gyp/test/win/gyptest-cl-default-char-is-unsigned.py
deleted file mode 100644
index d20f674..0000000
--- a/tools/gyp/test/win/gyptest-cl-default-char-is-unsigned.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure DefaultCharIsUnsigned option is functional.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('default-char-is-unsigned.gyp', chdir=CHDIR)
-  test.build('default-char-is-unsigned.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-disable-specific-warnings.py b/tools/gyp/test/win/gyptest-cl-disable-specific-warnings.py
deleted file mode 100644
index cb253af..0000000
--- a/tools/gyp/test/win/gyptest-cl-disable-specific-warnings.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure disable specific warnings is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('disable-specific-warnings.gyp', chdir=CHDIR)
-
-  # The source file contains a warning, so if WarnAsError is true and
-  # DisableSpecificWarnings for the warning in question is set, then the build
-  # should succeed, otherwise it must fail.
-
-  test.build('disable-specific-warnings.gyp',
-             'test_disable_specific_warnings_set',
-             chdir=CHDIR)
-  test.build('disable-specific-warnings.gyp',
-             'test_disable_specific_warnings_unset',
-             chdir=CHDIR, status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-enable-enhanced-instruction-set.py b/tools/gyp/test/win/gyptest-cl-enable-enhanced-instruction-set.py
deleted file mode 100644
index 78a924a..0000000
--- a/tools/gyp/test/win/gyptest-cl-enable-enhanced-instruction-set.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test VCCLCompilerTool EnableEnhancedInstructionSet setting.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp()
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('enable-enhanced-instruction-set.gyp', chdir=CHDIR)
-
-  test.build('enable-enhanced-instruction-set.gyp', test.ALL, chdir=CHDIR)
-
-  test.run_built_executable('sse_extensions', chdir=CHDIR,
-                            stdout='/arch:SSE\n')
-  test.run_built_executable('sse2_extensions', chdir=CHDIR,
-                            stdout='/arch:SSE2\n')
-
-  # /arch:AVX introduced in VS2010, but MSBuild support lagged until 2012.
-  if os.path.exists(test.built_file_path('avx_extensions')):
-    test.run_built_executable('avx_extensions', chdir=CHDIR,
-                              stdout='/arch:AVX\n')
-
-  # /arch:IA32 introduced in VS2012.
-  if os.path.exists(test.built_file_path('no_extensions')):
-    test.run_built_executable('no_extensions', chdir=CHDIR,
-                              stdout='/arch:IA32\n')
-
-  # /arch:AVX2 introduced in VS2013r2.
-  if os.path.exists(test.built_file_path('avx2_extensions')):
-    test.run_built_executable('avx2_extensions', chdir=CHDIR,
-                              stdout='/arch:AVX2\n')
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-exception-handling.py b/tools/gyp/test/win/gyptest-cl-exception-handling.py
deleted file mode 100644
index 5738a54..0000000
--- a/tools/gyp/test/win/gyptest-cl-exception-handling.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure exception handling settings are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('exception-handling.gyp', chdir=CHDIR)
-
-  # Must fail.
-  test.build('exception-handling.gyp', 'test_eh_off', chdir=CHDIR,
-      status=1)
-
-  # Must succeed.
-  test.build('exception-handling.gyp', 'test_eh_s', chdir=CHDIR)
-  test.build('exception-handling.gyp', 'test_eh_a', chdir=CHDIR)
-
-  # Error code must be 1 if EHa, and 2 if EHsc.
-  test.run_built_executable('test_eh_a', chdir=CHDIR, status=1)
-  test.run_built_executable('test_eh_s', chdir=CHDIR, status=2)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-floating-point-model.py b/tools/gyp/test/win/gyptest-cl-floating-point-model.py
deleted file mode 100644
index 86ff478..0000000
--- a/tools/gyp/test/win/gyptest-cl-floating-point-model.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure floating point model settings are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp()
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('floating-point-model.gyp', chdir=CHDIR)
-  test.build('floating-point-model.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-force-include-files.py b/tools/gyp/test/win/gyptest-cl-force-include-files.py
deleted file mode 100644
index b73b8bd..0000000
--- a/tools/gyp/test/win/gyptest-cl-force-include-files.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure ForcedIncludeFiles option is functional.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('force-include-files.gyp', chdir=CHDIR)
-  test.build('force-include-files.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-function-level-linking.py b/tools/gyp/test/win/gyptest-cl-function-level-linking.py
deleted file mode 100644
index 17c29e2..0000000
--- a/tools/gyp/test/win/gyptest-cl-function-level-linking.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure function-level linking setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('function-level-linking.gyp', chdir=CHDIR)
-  test.build('function-level-linking.gyp', test.ALL, chdir=CHDIR)
-
-  def CheckForSectionString(binary, search_for, should_exist):
-    output = test.run_dumpbin('/headers', binary)
-    if should_exist and search_for not in output:
-      print 'Did not find "%s" in %s' % (search_for, binary)
-      test.fail_test()
-    elif not should_exist and search_for in output:
-      print 'Found "%s" in %s (and shouldn\'t have)' % (search_for, binary)
-      test.fail_test()
-
-  def Object(proj, obj):
-    sep = '.' if test.format == 'ninja' else '\\'
-    return 'obj\\%s%s%s' % (proj, sep, obj)
-
-  look_for = '''COMDAT; sym= "int __cdecl comdat_function'''
-
-  # When function level linking is on, the functions should be listed as
-  # separate comdat entries.
-
-  CheckForSectionString(
-      test.built_file_path(Object('test_fll_on', 'function-level-linking.obj'),
-                           chdir=CHDIR),
-      look_for,
-      should_exist=True)
-
-  CheckForSectionString(
-      test.built_file_path(Object('test_fll_off', 'function-level-linking.obj'),
-                           chdir=CHDIR),
-      look_for,
-      should_exist=False)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-optimizations.py b/tools/gyp/test/win/gyptest-cl-optimizations.py
deleted file mode 100644
index 31341f7..0000000
--- a/tools/gyp/test/win/gyptest-cl-optimizations.py
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure optimization settings are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('optimizations.gyp', chdir=CHDIR)
-
-  # It's hard to map flags to output contents in a non-fragile way (especially
-  # handling both 2008/2010), so just verify the correct ninja command line
-  # contents.
-
-  ninja_file = test.built_file_path('obj/test_opt_off.ninja', chdir=CHDIR)
-  test.must_contain(ninja_file, 'cflags = /Od')
-
-  ninja_file = test.built_file_path('obj/test_opt_lev_size.ninja', chdir=CHDIR)
-  test.must_contain(ninja_file, 'cflags = /O1')
-
-  ninja_file = test.built_file_path('obj/test_opt_lev_speed.ninja', chdir=CHDIR)
-  test.must_contain(ninja_file, 'cflags = /O2')
-
-  ninja_file = test.built_file_path('obj/test_opt_lev_max.ninja', chdir=CHDIR)
-  test.must_contain(ninja_file, 'cflags = /Ox')
-
-  ninja_file = test.built_file_path('obj/test_opt_unset.ninja', chdir=CHDIR)
-  test.must_not_contain(ninja_file, '/Od')
-  test.must_not_contain(ninja_file, '/O1')
-  test.must_not_contain(ninja_file, '/Ox')
-  # Set by default if none specified.
-  test.must_contain(ninja_file, '/O2')
-
-  ninja_file = test.built_file_path('obj/test_opt_fpo.ninja', chdir=CHDIR)
-  test.must_contain(ninja_file, '/Oy')
-  test.must_not_contain(ninja_file, '/Oy-')
-
-  ninja_file = test.built_file_path('obj/test_opt_fpo_off.ninja', chdir=CHDIR)
-  test.must_contain(ninja_file, '/Oy-')
-
-  ninja_file = test.built_file_path('obj/test_opt_intrinsic.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Oi')
-  test.must_not_contain(ninja_file, '/Oi-')
-
-  ninja_file = test.built_file_path('obj/test_opt_intrinsic_off.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Oi-')
-
-  ninja_file = test.built_file_path('obj/test_opt_inline_off.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Ob0')
-
-  ninja_file = test.built_file_path('obj/test_opt_inline_manual.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Ob1')
-
-  ninja_file = test.built_file_path('obj/test_opt_inline_auto.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Ob2')
-
-  ninja_file = test.built_file_path('obj/test_opt_neither.ninja',
-      chdir=CHDIR)
-  test.must_not_contain(ninja_file, '/Os')
-  test.must_not_contain(ninja_file, '/Ot')
-
-  ninja_file = test.built_file_path('obj/test_opt_size.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Os')
-
-  ninja_file = test.built_file_path('obj/test_opt_speed.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/Ot')
-
-  ninja_file = test.built_file_path('obj/test_opt_wpo.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/GL')
-
-  ninja_file = test.built_file_path('obj/test_opt_sp.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/GF')
-
-  ninja_file = test.built_file_path('obj/test_opt_sp_off.ninja',
-      chdir=CHDIR)
-  test.must_not_contain(ninja_file, '/GF')
-
-  ninja_file = test.built_file_path('obj/test_opt_fso.ninja',
-      chdir=CHDIR)
-  test.must_contain(ninja_file, '/GT')
-
-  ninja_file = test.built_file_path('obj/test_opt_fso_off.ninja',
-      chdir=CHDIR)
-  test.must_not_contain(ninja_file, '/GT')
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-pdbname-override.py b/tools/gyp/test/win/gyptest-cl-pdbname-override.py
deleted file mode 100644
index da9b49a..0000000
--- a/tools/gyp/test/win/gyptest-cl-pdbname-override.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure pdb is named as expected (shared between .cc files).
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp()
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('pdbname-override.gyp', chdir=CHDIR)
-  test.build('pdbname-override.gyp', test.ALL, chdir=CHDIR)
-
-  # Confirm that the pdb generated by the compiler was renamed (and we also
-  # have the linker generated one).
-  test.built_file_must_exist('compiler_generated.pdb', chdir=CHDIR)
-  test.built_file_must_exist('linker_generated.pdb', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-pdbname.py b/tools/gyp/test/win/gyptest-cl-pdbname.py
deleted file mode 100644
index f09ac23..0000000
--- a/tools/gyp/test/win/gyptest-cl-pdbname.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure pdb is named as expected (shared between .cc files).
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('pdbname.gyp', chdir=CHDIR)
-  test.build('pdbname.gyp', test.ALL, chdir=CHDIR)
-
-  # Confirm that the default behaviour is to name the .pdb per-target (rather
-  # than per .cc file).
-  test.built_file_must_exist('obj/test_pdbname.cc.pdb', chdir=CHDIR)
-
-  # Confirm that there should be a .pdb alongside the executable.
-  test.built_file_must_exist('test_pdbname.exe', chdir=CHDIR)
-  test.built_file_must_exist('test_pdbname.exe.pdb', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-rtti.py b/tools/gyp/test/win/gyptest-cl-rtti.py
deleted file mode 100644
index d49a094..0000000
--- a/tools/gyp/test/win/gyptest-cl-rtti.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure RTTI setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('rtti.gyp', chdir=CHDIR)
-
-  # Must fail.
-  test.build('rtti.gyp', 'test_rtti_off', chdir=CHDIR, status=1)
-
-  # Must succeed.
-  test.build('rtti.gyp', 'test_rtti_on', chdir=CHDIR)
-
-  # Must succeed.
-  test.build('rtti.gyp', 'test_rtti_unset', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-runtime-checks.py b/tools/gyp/test/win/gyptest-cl-runtime-checks.py
deleted file mode 100644
index 4fd529f..0000000
--- a/tools/gyp/test/win/gyptest-cl-runtime-checks.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure RTC setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('runtime-checks.gyp', chdir=CHDIR)
-
-  # Runtime checks disabled, should fail.
-  test.build('runtime-checks.gyp', 'test_brc_none', chdir=CHDIR, status=1)
-
-  # Runtime checks enabled, should pass.
-  test.build('runtime-checks.gyp', 'test_brc_1', chdir=CHDIR)
-
-  # TODO(scottmg): There are other less frequently used/partial options, but
-  # it's not clear how to verify them, so ignore for now.
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-runtime-library.py b/tools/gyp/test/win/gyptest-cl-runtime-library.py
deleted file mode 100644
index 53c1492..0000000
--- a/tools/gyp/test/win/gyptest-cl-runtime-library.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure runtime C library setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('runtime-library.gyp', chdir=CHDIR)
-  test.build('runtime-library.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-treat-wchar-t-as-built-in-type.py b/tools/gyp/test/win/gyptest-cl-treat-wchar-t-as-built-in-type.py
deleted file mode 100644
index ca35fb5..0000000
--- a/tools/gyp/test/win/gyptest-cl-treat-wchar-t-as-built-in-type.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure TreatWChar_tAsBuiltInType option is functional.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('treat-wchar-t-as-built-in-type.gyp', chdir=CHDIR)
-  test.build('treat-wchar-t-as-built-in-type.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-warning-as-error.py b/tools/gyp/test/win/gyptest-cl-warning-as-error.py
deleted file mode 100644
index d4ef1b3..0000000
--- a/tools/gyp/test/win/gyptest-cl-warning-as-error.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure warning-as-error is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('warning-as-error.gyp', chdir=CHDIR)
-
-  # The source file contains a warning, so if WarnAsError is false (or
-  # default, which is also false), then the build should succeed, otherwise it
-  # must fail.
-
-  test.build('warning-as-error.gyp', 'test_warn_as_error_false', chdir=CHDIR)
-  test.build('warning-as-error.gyp', 'test_warn_as_error_unset', chdir=CHDIR)
-  test.build('warning-as-error.gyp', 'test_warn_as_error_true', chdir=CHDIR,
-    status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-cl-warning-level.py b/tools/gyp/test/win/gyptest-cl-warning-level.py
deleted file mode 100644
index 62a5b39..0000000
--- a/tools/gyp/test/win/gyptest-cl-warning-level.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure warning level is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'compiler-flags'
-  test.run_gyp('warning-level.gyp', chdir=CHDIR)
-
-  # A separate target for each warning level: one pass (compiling a file
-  # containing a warning that's above the specified level); and one fail
-  # (compiling a file at the specified level). No pass for 4 of course,
-  # because it would have to have no warnings. The default warning level is
-  # equivalent to level 1.
-
-  test.build('warning-level.gyp', 'test_wl1_fail', chdir=CHDIR, status=1)
-  test.build('warning-level.gyp', 'test_wl1_pass', chdir=CHDIR)
-
-  test.build('warning-level.gyp', 'test_wl2_fail', chdir=CHDIR, status=1)
-  test.build('warning-level.gyp', 'test_wl2_pass', chdir=CHDIR)
-
-  test.build('warning-level.gyp', 'test_wl3_fail', chdir=CHDIR, status=1)
-  test.build('warning-level.gyp', 'test_wl3_pass', chdir=CHDIR)
-
-  test.build('warning-level.gyp', 'test_wl4_fail', chdir=CHDIR, status=1)
-
-  test.build('warning-level.gyp', 'test_def_fail', chdir=CHDIR, status=1)
-  test.build('warning-level.gyp', 'test_def_pass', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-command-quote.py b/tools/gyp/test/win/gyptest-command-quote.py
deleted file mode 100644
index bd93ac5..0000000
--- a/tools/gyp/test/win/gyptest-command-quote.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-
-Make sure the program in a command can be a called batch file, or an
-application in the path. Specifically, this means not quoting something like
-"call x.bat", lest the shell look for a program named "call x.bat", rather
-than calling "x.bat".
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-  CHDIR = 'command-quote'
-  test.run_gyp('command-quote.gyp', chdir=CHDIR)
-
-  test.build('command-quote.gyp', 'test_batch', chdir=CHDIR)
-  test.build('command-quote.gyp', 'test_call_separate', chdir=CHDIR)
-  test.build('command-quote.gyp', 'test_with_double_quotes', chdir=CHDIR)
-  test.build('command-quote.gyp', 'test_with_single_quotes', chdir=CHDIR)
-
-  # We confirm that this fails because other generators don't handle spaces in
-  # inputs so it's preferable to not have it work here.
-  test.build('command-quote.gyp', 'test_with_spaces', chdir=CHDIR, status=1)
-
-  CHDIR = 'command-quote/subdir/and/another'
-  test.run_gyp('in-subdir.gyp', chdir=CHDIR)
-  test.build('in-subdir.gyp', 'test_batch_depth', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-crosscompile-ar.py b/tools/gyp/test/win/gyptest-crosscompile-ar.py
deleted file mode 100644
index dc75d96..0000000
--- a/tools/gyp/test/win/gyptest-crosscompile-ar.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that ar_host is set correctly when enabling cross-compile on windows.
-"""
-
-import TestGyp
-
-import sys
-import os
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'lib-crosscompile'
-  oldenv = os.environ.copy()
-  try:
-    os.environ['GYP_CROSSCOMPILE'] = '1'
-    test.run_gyp('use_host_ar.gyp', chdir=CHDIR)
-  finally:
-    os.environ.clear()
-    os.environ.update(oldenv)
-
-  test.build('use_host_ar.gyp', test.ALL, chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-lib-ltcg.py b/tools/gyp/test/win/gyptest-lib-ltcg.py
deleted file mode 100644
index d1d7bad..0000000
--- a/tools/gyp/test/win/gyptest-lib-ltcg.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure LTCG setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'lib-flags'
-  test.run_gyp('ltcg.gyp', chdir=CHDIR)
-  test.build('ltcg.gyp', test.ALL, chdir=CHDIR)
-  test.must_not_contain_any_line(test.stdout(), ['restarting link with /LTCG'])
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-additional-deps.py b/tools/gyp/test/win/gyptest-link-additional-deps.py
deleted file mode 100644
index 62c5736..0000000
--- a/tools/gyp/test/win/gyptest-link-additional-deps.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure additional library dependencies are handled.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('additional-deps.gyp', chdir=CHDIR)
-  test.build('additional-deps.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-additional-options.py b/tools/gyp/test/win/gyptest-link-additional-options.py
deleted file mode 100644
index 7e57ae4..0000000
--- a/tools/gyp/test/win/gyptest-link-additional-options.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure additional options are handled.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('additional-options.gyp', chdir=CHDIR)
-  test.build('additional-options.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-aslr.py b/tools/gyp/test/win/gyptest-link-aslr.py
deleted file mode 100644
index e765017..0000000
--- a/tools/gyp/test/win/gyptest-link-aslr.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure aslr setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('aslr.gyp', chdir=CHDIR)
-  test.build('aslr.gyp', test.ALL, chdir=CHDIR)
-
-  def HasDynamicBase(exe):
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    output = test.run_dumpbin('/headers', full_path)
-    return '                   Dynamic base' in output
-
-  # Default is to be on.
-  if not HasDynamicBase('test_aslr_default.exe'):
-    test.fail_test()
-  if HasDynamicBase('test_aslr_no.exe'):
-    test.fail_test()
-  if not HasDynamicBase('test_aslr_yes.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-base-address.py b/tools/gyp/test/win/gyptest-link-base-address.py
deleted file mode 100644
index d58527a..0000000
--- a/tools/gyp/test/win/gyptest-link-base-address.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure the base address setting is extracted properly.
-"""
-
-import TestGyp
-
-import re
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('base-address.gyp', chdir=CHDIR)
-  test.build('base-address.gyp', test.ALL, chdir=CHDIR)
-
-  def GetHeaders(exe):
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    return test.run_dumpbin('/headers', full_path)
-
-  # Extract the image base address from the headers output.
-  image_base_reg_ex = re.compile(r'.*\s+([0-9]+) image base.*', re.DOTALL)
-
-  exe_headers = GetHeaders('test_base_specified_exe.exe')
-  exe_match = image_base_reg_ex.match(exe_headers)
-
-  if not exe_match or not exe_match.group(1):
-    test.fail_test()
-  if exe_match.group(1) != '420000':
-    test.fail_test()
-
-  dll_headers = GetHeaders('test_base_specified_dll.dll')
-  dll_match = image_base_reg_ex.match(dll_headers)
-
-  if not dll_match or not dll_match.group(1):
-    test.fail_test()
-  if dll_match.group(1) != '10420000':
-    test.fail_test()
-
-  default_exe_headers = GetHeaders('test_base_default_exe.exe')
-  default_exe_match = image_base_reg_ex.match(default_exe_headers)
-
-  if not default_exe_match or not default_exe_match.group(1):
-    test.fail_test()
-  if default_exe_match.group(1) != '400000':
-    test.fail_test()
-
-  default_dll_headers = GetHeaders('test_base_default_dll.dll')
-  default_dll_match = image_base_reg_ex.match(default_dll_headers)
-
-  if not default_dll_match or not default_dll_match.group(1):
-    test.fail_test()
-  if default_dll_match.group(1) != '10000000':
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-debug-info.py b/tools/gyp/test/win/gyptest-link-debug-info.py
deleted file mode 100644
index 33e8ac4..0000000
--- a/tools/gyp/test/win/gyptest-link-debug-info.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure debug info setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('debug-info.gyp', chdir=CHDIR)
-  test.build('debug-info.gyp', test.ALL, chdir=CHDIR)
-
-  suffix = '.exe.pdb' if test.format == 'ninja' else '.pdb'
-  test.built_file_must_not_exist('test_debug_off%s' % suffix, chdir=CHDIR)
-  test.built_file_must_exist('test_debug_on%s' % suffix, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-default-libs.py b/tools/gyp/test/win/gyptest-link-default-libs.py
deleted file mode 100644
index 5edf467..0000000
--- a/tools/gyp/test/win/gyptest-link-default-libs.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure we include the default libs.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('no-default-libs.gyp', chdir=CHDIR)
-  test.build('no-default-libs.gyp', test.ALL, chdir=CHDIR, status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-deffile.py b/tools/gyp/test/win/gyptest-link-deffile.py
deleted file mode 100644
index 94df874..0000000
--- a/tools/gyp/test/win/gyptest-link-deffile.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure a .def file is handled in the link.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-
-  # Multiple .def files doesn't make any sense, should fail at generate time.
-  test.run_gyp('deffile-multiple.gyp', chdir=CHDIR, stderr=None, status=1)
-
-  test.run_gyp('deffile.gyp', chdir=CHDIR)
-  test.build('deffile.gyp', test.ALL, chdir=CHDIR)
-
-  def HasExport(binary, export):
-    full_path = test.built_file_path(binary, chdir=CHDIR)
-    output = test.run_dumpbin('/exports', full_path)
-    return export in output
-
-  # Make sure we only have the export when the .def file is in use.
-
-  if HasExport('test_deffile_dll_notexported.dll', 'AnExportedFunction'):
-    test.fail_test()
-  if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
-    test.fail_test()
-
-  if HasExport('test_deffile_exe_notexported.exe', 'AnExportedFunction'):
-    test.fail_test()
-  if not HasExport('test_deffile_exe_ok.exe', 'AnExportedFunction'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-defrelink.py b/tools/gyp/test/win/gyptest-link-defrelink.py
deleted file mode 100644
index cec0ea1..0000000
--- a/tools/gyp/test/win/gyptest-link-defrelink.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure a relink is performed when a .def file is touched.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  target = 'test_deffile_dll_ok'
-  def_contents = test.read('linker-flags/deffile.def')
-
-  # This first build makes sure everything is up to date.
-  test.run_gyp('deffile.gyp', chdir=CHDIR)
-  test.build('deffile.gyp', target, chdir=CHDIR)
-  test.up_to_date('deffile.gyp', target, chdir=CHDIR)
-
-  def HasExport(binary, export):
-    full_path = test.built_file_path(binary, chdir=CHDIR)
-    output = test.run_dumpbin('/exports', full_path)
-    return export in output
-
-  # Verify that only one function is exported.
-  if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
-    test.fail_test()
-  if HasExport('test_deffile_dll_ok.dll', 'AnotherExportedFunction'):
-    test.fail_test()
-
-  # Add AnotherExportedFunction to the def file, then rebuild.  If it doesn't
-  # relink the DLL, then the subsequent check for AnotherExportedFunction will
-  # fail.
-  new_def_contents = def_contents + "\n    AnotherExportedFunction"
-  test.write('linker-flags/deffile.def', new_def_contents)
-  test.build('deffile.gyp', target, chdir=CHDIR)
-  test.up_to_date('deffile.gyp', target, chdir=CHDIR)
-
-  if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
-    test.fail_test()
-  if not HasExport('test_deffile_dll_ok.dll', 'AnotherExportedFunction'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-delay-load-dlls.py b/tools/gyp/test/win/gyptest-link-delay-load-dlls.py
deleted file mode 100644
index 3880247..0000000
--- a/tools/gyp/test/win/gyptest-link-delay-load-dlls.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure delay load setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('delay-load-dlls.gyp', chdir=CHDIR)
-  test.build('delay-load-dlls.gyp', test.ALL, chdir=CHDIR)
-
-  prefix = 'contains the following delay load imports:'
-  shell32_look_for = prefix + '\r\n\r\n    SHELL32.dll'
-
-  output = test.run_dumpbin(
-      '/all', test.built_file_path('test_dld_none.exe', chdir=CHDIR))
-  if prefix in output:
-    test.fail_test()
-
-  output = test.run_dumpbin(
-      '/all', test.built_file_path('test_dld_shell32.exe', chdir=CHDIR))
-  if shell32_look_for not in output:
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-embed-manifest.py b/tools/gyp/test/win/gyptest-link-embed-manifest.py
deleted file mode 100644
index 5b9d2c2..0000000
--- a/tools/gyp/test/win/gyptest-link-embed-manifest.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Yandex LLC. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure manifests are embedded in binaries properly. Handling of
-AdditionalManifestFiles is tested too.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  import pywintypes
-  import win32api
-  import winerror
-
-  RT_MANIFEST = 24
-
-  class LoadLibrary(object):
-    """Context manager for loading and releasing binaries in Windows.
-    Yields the handle of the binary loaded."""
-    def __init__(self, path):
-      self._path = path
-      self._handle = None
-
-    def __enter__(self):
-      self._handle = win32api.LoadLibrary(self._path)
-      return self._handle
-
-    def __exit__(self, type, value, traceback):
-      win32api.FreeLibrary(self._handle)
-
-
-  def extract_manifest(path, resource_name):
-    """Reads manifest from |path| and returns it as a string.
-    Returns None is there is no such manifest."""
-    with LoadLibrary(path) as handle:
-      try:
-        return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
-      except pywintypes.error as error:
-        if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
-          return None
-        else:
-          raise
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-  CHDIR = 'linker-flags'
-  test.run_gyp('embed-manifest.gyp', chdir=CHDIR)
-  test.build('embed-manifest.gyp', test.ALL, chdir=CHDIR)
-
-  # The following binaries must contain a manifest embedded.
-  test.fail_test(not extract_manifest(test.built_file_path(
-    'test_manifest_exe.exe', chdir=CHDIR), 1))
-  test.fail_test(not extract_manifest(test.built_file_path(
-    'test_manifest_exe_inc.exe', chdir=CHDIR), 1))
-  test.fail_test(not extract_manifest(test.built_file_path(
-    'test_manifest_dll.dll', chdir=CHDIR), 2))
-  test.fail_test(not extract_manifest(test.built_file_path(
-    'test_manifest_dll_inc.dll', chdir=CHDIR), 2))
-
-  # Must contain the Win7 support GUID, but not the Vista one (from
-  # extra2.manifest).
-  test.fail_test(
-    '35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in
-    extract_manifest(test.built_file_path('test_manifest_extra1.exe',
-                                            chdir=CHDIR), 1))
-  test.fail_test(
-    'e2011457-1546-43c5-a5fe-008deee3d3f0' in
-    extract_manifest(test.built_file_path('test_manifest_extra1.exe',
-                                            chdir=CHDIR), 1))
-  # Must contain both.
-  test.fail_test(
-    '35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in
-    extract_manifest(test.built_file_path('test_manifest_extra2.exe',
-                                            chdir=CHDIR), 1))
-  test.fail_test(
-    'e2011457-1546-43c5-a5fe-008deee3d3f0' not in
-    extract_manifest(test.built_file_path('test_manifest_extra2.exe',
-                                            chdir=CHDIR), 1))
-
-  # Same as extra2, but using list syntax instead.
-  test.fail_test(
-    '35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in
-    extract_manifest(test.built_file_path('test_manifest_extra_list.exe',
-                                          chdir=CHDIR), 1))
-  test.fail_test(
-    'e2011457-1546-43c5-a5fe-008deee3d3f0' not in
-    extract_manifest(test.built_file_path('test_manifest_extra_list.exe',
-                                          chdir=CHDIR), 1))
-
-  # Test that incremental linking doesn't force manifest embedding.
-  test.fail_test(extract_manifest(test.built_file_path(
-    'test_manifest_exe_inc_no_embed.exe', chdir=CHDIR), 1))
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-enable-uac.py b/tools/gyp/test/win/gyptest-link-enable-uac.py
deleted file mode 100644
index 131e07e..0000000
--- a/tools/gyp/test/win/gyptest-link-enable-uac.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that embedding UAC information into the manifest works.
-"""
-
-import TestGyp
-
-import sys
-from xml.dom.minidom import parseString
-
-if sys.platform == 'win32':
-  import pywintypes
-  import win32api
-  import winerror
-
-  RT_MANIFEST = 24
-
-  class LoadLibrary(object):
-    """Context manager for loading and releasing binaries in Windows.
-    Yields the handle of the binary loaded."""
-    def __init__(self, path):
-      self._path = path
-      self._handle = None
-
-    def __enter__(self):
-      self._handle = win32api.LoadLibrary(self._path)
-      return self._handle
-
-    def __exit__(self, type, value, traceback):
-      win32api.FreeLibrary(self._handle)
-
-
-  def extract_manifest(path, resource_name):
-    """Reads manifest from |path| and returns it as a string.
-    Returns None is there is no such manifest."""
-    with LoadLibrary(path) as handle:
-      try:
-        return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
-      except pywintypes.error as error:
-        if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
-          return None
-        else:
-          raise
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-  CHDIR = 'linker-flags'
-  test.run_gyp('enable-uac.gyp', chdir=CHDIR)
-  test.build('enable-uac.gyp', test.ALL, chdir=CHDIR)
-
-  # The following binaries must contain a manifest embedded.
-  test.fail_test(not extract_manifest(test.built_file_path(
-    'enable_uac.exe', chdir=CHDIR), 1))
-  test.fail_test(not extract_manifest(test.built_file_path(
-    'enable_uac_no.exe', chdir=CHDIR), 1))
-  test.fail_test(not extract_manifest(test.built_file_path(
-    'enable_uac_admin.exe', chdir=CHDIR), 1))
-
-  # Verify that <requestedExecutionLevel level="asInvoker" uiAccess="false" />
-  # is present.
-  manifest = parseString(extract_manifest(
-      test.built_file_path('enable_uac.exe', chdir=CHDIR), 1))
-  execution_level = manifest.getElementsByTagName('requestedExecutionLevel')
-  test.fail_test(len(execution_level) != 1)
-  execution_level = execution_level[0].attributes
-  test.fail_test(not (
-      execution_level.has_key('level') and
-      execution_level.has_key('uiAccess') and
-      execution_level['level'].nodeValue == 'asInvoker' and
-      execution_level['uiAccess'].nodeValue == 'false'))
-
-  # Verify that <requestedExecutionLevel> is not in the menifest.
-  manifest = parseString(extract_manifest(
-      test.built_file_path('enable_uac_no.exe', chdir=CHDIR), 1))
-  execution_level = manifest.getElementsByTagName('requestedExecutionLevel')
-  test.fail_test(len(execution_level) != 0)
-
-  # Verify that <requestedExecutionLevel level="requireAdministrator"
-  # uiAccess="true" /> is present.
-  manifest = parseString(extract_manifest(
-      test.built_file_path('enable_uac_admin.exe', chdir=CHDIR), 1))
-  execution_level = manifest.getElementsByTagName('requestedExecutionLevel')
-  test.fail_test(len(execution_level) != 1)
-  execution_level = execution_level[0].attributes
-  test.fail_test(not (
-      execution_level.has_key('level') and
-      execution_level.has_key('uiAccess') and
-      execution_level['level'].nodeValue == 'requireAdministrator' and
-      execution_level['uiAccess'].nodeValue == 'true'))
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-enable-winrt-app-revision.py b/tools/gyp/test/win/gyptest-link-enable-winrt-app-revision.py
deleted file mode 100644
index e5c5a71..0000000
--- a/tools/gyp/test/win/gyptest-link-enable-winrt-app-revision.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure msvs_application_type_revision works correctly.
-"""
-
-import TestGyp
-
-import os
-import sys
-import struct
-
-CHDIR = 'winrt-app-type-revision'
-
-print 'This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466'
-sys.exit(0)
-
-if (sys.platform == 'win32' and
-    int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2013):
-  test = TestGyp.TestGyp(formats=['msvs'])
-
-  test.run_gyp('winrt-app-type-revision.gyp', chdir=CHDIR)
-
-  test.build('winrt-app-type-revision.gyp', 'enable_winrt_81_revision_dll',
-             chdir=CHDIR)
-
-  # Revision is set to 8.2 which is invalid for 2013 projects so compilation
-  # must fail.
-  test.build('winrt-app-type-revision.gyp', 'enable_winrt_82_revision_dll',
-             chdir=CHDIR, status=1)
-
-  # Revision is set to an invalid value for 2013 projects so compilation
-  # must fail.
-  test.build('winrt-app-type-revision.gyp', 'enable_winrt_invalid_revision_dll',
-             chdir=CHDIR, status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-enable-winrt-target-platform-version.py b/tools/gyp/test/win/gyptest-link-enable-winrt-target-platform-version.py
deleted file mode 100644
index cd9244e..0000000
--- a/tools/gyp/test/win/gyptest-link-enable-winrt-target-platform-version.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure msvs_target_platform_version works correctly.
-"""
-
-import TestGyp
-
-import os
-import sys
-import struct
-
-CHDIR = 'winrt-target-platform-version'
-
-print 'This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466'
-sys.exit(0)
-
-if (sys.platform == 'win32' and
-    int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2015):
-  test = TestGyp.TestGyp(formats=['msvs'])
-
-  test.run_gyp('winrt-target-platform-version.gyp', chdir=CHDIR)
-
-  test.build('winrt-target-platform-version.gyp',
-             'enable_winrt_10_platversion_dll', chdir=CHDIR)
-
-  # Target Platform without Minimum Target Platform version defaults to a valid
-  # Target Platform and compiles.
-  test.build('winrt-target-platform-version.gyp',
-             'enable_winrt_10_platversion_nominver_dll', chdir=CHDIR)
-
-  # Target Platform is set to 9.0 which is invalid for 2015 projects so
-  # compilation must fail.
-  test.build('winrt-target-platform-version.gyp',
-             'enable_winrt_9_platversion_dll', chdir=CHDIR, status=1)
-
-  # Missing Target Platform for 2015 projects must fail.
-  test.build('winrt-target-platform-version.gyp',
-             'enable_winrt_missing_platversion_dll', chdir=CHDIR, status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-enable-winrt.py b/tools/gyp/test/win/gyptest-link-enable-winrt.py
deleted file mode 100644
index 283863c..0000000
--- a/tools/gyp/test/win/gyptest-link-enable-winrt.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure msvs_enable_winrt works correctly.
-"""
-
-import TestGyp
-
-import os
-import sys
-import struct
-
-CHDIR = 'enable-winrt'
-
-print 'This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466'
-sys.exit(0)
-
-if (sys.platform == 'win32' and
-    int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2013):
-  test = TestGyp.TestGyp(formats=['msvs'])
-
-  test.run_gyp('enable-winrt.gyp', chdir=CHDIR)
-
-  test.build('enable-winrt.gyp', 'enable_winrt_dll', chdir=CHDIR)
-
-  test.build('enable-winrt.gyp', 'enable_winrt_missing_dll', chdir=CHDIR,
-             status=1)
-
-  test.build('enable-winrt.gyp', 'enable_winrt_winphone_dll', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-entrypointsymbol.py b/tools/gyp/test/win/gyptest-link-entrypointsymbol.py
deleted file mode 100644
index e88174a..0000000
--- a/tools/gyp/test/win/gyptest-link-entrypointsymbol.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure entrypointsymbol setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('entrypointsymbol.gyp', chdir=CHDIR)
-
-  test.build('entrypointsymbol.gyp', 'test_ok', chdir=CHDIR)
-  test.build('entrypointsymbol.gyp', 'test_fail', chdir=CHDIR, status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-fixed-base.py b/tools/gyp/test/win/gyptest-link-fixed-base.py
deleted file mode 100644
index 725a870..0000000
--- a/tools/gyp/test/win/gyptest-link-fixed-base.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure fixed base setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('fixed-base.gyp', chdir=CHDIR)
-  test.build('fixed-base.gyp', test.ALL, chdir=CHDIR)
-
-  def GetHeaders(exe):
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    return test.run_dumpbin('/headers', full_path)
-
-  # For exe, default is fixed, for dll, it's not fixed.
-  if 'Relocations stripped' not in GetHeaders('test_fixed_default_exe.exe'):
-    test.fail_test()
-  if 'Relocations stripped' in GetHeaders('test_fixed_default_dll.dll'):
-    test.fail_test()
-
-  # Explicitly not fixed.
-  if 'Relocations stripped' in GetHeaders('test_fixed_no.exe'):
-    test.fail_test()
-
-  # Explicitly fixed.
-  if 'Relocations stripped' not in GetHeaders('test_fixed_yes.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-force-symbol-reference.py b/tools/gyp/test/win/gyptest-link-force-symbol-reference.py
deleted file mode 100644
index 235e94f..0000000
--- a/tools/gyp/test/win/gyptest-link-force-symbol-reference.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure ForceSymbolReference is translated properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('force-symbol-reference.gyp', chdir=CHDIR)
-  test.build('force-symbol-reference.gyp', test.ALL, chdir=CHDIR)
-
-  output = test.run_dumpbin(
-      '/disasm', test.built_file_path('test_force_reference.exe', chdir=CHDIR))
-  if '?x@@YAHXZ:' not in output or '?y@@YAHXZ:' not in output:
-    test.fail_test()
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-generate-manifest.py b/tools/gyp/test/win/gyptest-link-generate-manifest.py
deleted file mode 100644
index 77c9228..0000000
--- a/tools/gyp/test/win/gyptest-link-generate-manifest.py
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure we generate a manifest file when linking binaries, including
-handling AdditionalManifestFiles.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  import pywintypes
-  import win32api
-  import winerror
-
-  RT_MANIFEST = 24
-
-  class LoadLibrary(object):
-    """Context manager for loading and releasing binaries in Windows.
-    Yields the handle of the binary loaded."""
-    def __init__(self, path):
-      self._path = path
-      self._handle = None
-
-    def __enter__(self):
-      self._handle = win32api.LoadLibrary(self._path)
-      return self._handle
-
-    def __exit__(self, type, value, traceback):
-      win32api.FreeLibrary(self._handle)
-
-  def extract_manifest(path, resource_name):
-    """Reads manifest from |path| and returns it as a string.
-    Returns None is there is no such manifest."""
-    with LoadLibrary(path) as handle:
-      try:
-        return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
-      except pywintypes.error as error:
-        if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
-          return None
-        else:
-          raise
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
-  test.build('generate-manifest.gyp', test.ALL, chdir=CHDIR)
-
-  # Make sure that generation of .generated.manifest does not cause a relink.
-  test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
-  test.up_to_date('generate-manifest.gyp', test.ALL, chdir=CHDIR)
-
-  def test_manifest(filename, generate_manifest, embedded_manifest,
-                    extra_manifest):
-    exe_file = test.built_file_path(filename, chdir=CHDIR)
-    if not generate_manifest:
-      test.must_not_exist(exe_file + '.manifest')
-      manifest = extract_manifest(exe_file, 1)
-      test.fail_test(manifest)
-      return
-    if embedded_manifest:
-      manifest = extract_manifest(exe_file, 1)
-      test.fail_test(not manifest)
-    else:
-      test.must_exist(exe_file + '.manifest')
-      manifest = test.read(exe_file + '.manifest')
-      test.fail_test(not manifest)
-      test.fail_test(extract_manifest(exe_file, 1))
-    if generate_manifest:
-      test.must_contain_any_line(manifest, 'requestedExecutionLevel')
-    if extra_manifest:
-      test.must_contain_any_line(manifest,
-                                 '35138b9a-5d96-4fbd-8e2d-a2440225f93a')
-      test.must_contain_any_line(manifest,
-                                 'e2011457-1546-43c5-a5fe-008deee3d3f0')
-
-  test_manifest('test_generate_manifest_true.exe',
-                generate_manifest=True,
-                embedded_manifest=False,
-                extra_manifest=False)
-  test_manifest('test_generate_manifest_false.exe',
-                generate_manifest=False,
-                embedded_manifest=False,
-                extra_manifest=False)
-  test_manifest('test_generate_manifest_default.exe',
-                generate_manifest=True,
-                embedded_manifest=False,
-                extra_manifest=False)
-  test_manifest('test_generate_manifest_true_as_embedded.exe',
-                generate_manifest=True,
-                embedded_manifest=True,
-                extra_manifest=False)
-  test_manifest('test_generate_manifest_false_as_embedded.exe',
-                generate_manifest=False,
-                embedded_manifest=True,
-                extra_manifest=False)
-  test_manifest('test_generate_manifest_default_as_embedded.exe',
-                generate_manifest=True,
-                embedded_manifest=True,
-                extra_manifest=False)
-  test_manifest('test_generate_manifest_true_with_extra_manifest.exe',
-                generate_manifest=True,
-                embedded_manifest=False,
-                extra_manifest=True)
-  test_manifest('test_generate_manifest_false_with_extra_manifest.exe',
-                generate_manifest=False,
-                embedded_manifest=False,
-                extra_manifest=True)
-  test_manifest('test_generate_manifest_true_with_extra_manifest_list.exe',
-                generate_manifest=True,
-                embedded_manifest=False,
-                extra_manifest=True)
-  test_manifest('test_generate_manifest_false_with_extra_manifest_list.exe',
-                generate_manifest=False,
-                embedded_manifest=False,
-                extra_manifest=True)
-  test_manifest('test_generate_manifest_default_embed_default.exe',
-                generate_manifest=True,
-                embedded_manifest=True,
-                extra_manifest=False)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-incremental.py b/tools/gyp/test/win/gyptest-link-incremental.py
deleted file mode 100644
index e7184e1..0000000
--- a/tools/gyp/test/win/gyptest-link-incremental.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure incremental linking setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('incremental.gyp', chdir=CHDIR)
-  test.build('incremental.gyp', test.ALL, chdir=CHDIR)
-
-  def HasILTTables(exe):
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    output = test.run_dumpbin('/disasm', full_path)
-    return '@ILT+' in output
-
-  # Default or unset is to be on.
-  if not HasILTTables('test_incremental_unset.exe'):
-    test.fail_test()
-  if not HasILTTables('test_incremental_default.exe'):
-    test.fail_test()
-  if HasILTTables('test_incremental_no.exe'):
-    test.fail_test()
-  if not HasILTTables('test_incremental_yes.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-large-address-aware.py b/tools/gyp/test/win/gyptest-link-large-address-aware.py
deleted file mode 100644
index ea433f2..0000000
--- a/tools/gyp/test/win/gyptest-link-large-address-aware.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure largeaddressaware setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('large-address-aware.gyp', chdir=CHDIR)
-  test.build('large-address-aware.gyp', test.ALL, chdir=CHDIR)
-
-  def GetHeaders(exe):
-    return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
-
-  MARKER = 'Application can handle large (>2GB) addresses'
-
-  # Explicitly off.
-  if MARKER in GetHeaders('test_large_address_aware_no.exe'):
-    test.fail_test()
-
-  # Explicitly on.
-  if MARKER not in GetHeaders('test_large_address_aware_yes.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-large-pdb.py b/tools/gyp/test/win/gyptest-link-large-pdb.py
deleted file mode 100644
index 4604745..0000000
--- a/tools/gyp/test/win/gyptest-link-large-pdb.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure msvs_large_pdb works correctly.
-"""
-
-import TestGyp
-
-import struct
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-
-CHDIR = 'large-pdb'
-
-
-def CheckImageAndPdb(test, image_basename, expected_page_size,
-                     pdb_basename=None):
-  if not pdb_basename:
-    pdb_basename = image_basename + '.pdb'
-  test.built_file_must_exist(image_basename, chdir=CHDIR)
-  test.built_file_must_exist(pdb_basename, chdir=CHDIR)
-
-  # We expect the PDB to have the given page size. For full details of the
-  # header look here: https://code.google.com/p/pdbparser/wiki/MSF_Format
-  # We read the little-endian 4-byte unsigned integer at position 32 of the
-  # file.
-  pdb_path = test.built_file_path(pdb_basename, chdir=CHDIR)
-  pdb_file = open(pdb_path, 'rb')
-  pdb_file.seek(32, 0)
-  page_size = struct.unpack('<I', pdb_file.read(4))[0]
-  if page_size != expected_page_size:
-    print "Expected page size of %d, got %d for PDB file `%s'." % (
-        expected_page_size, page_size, pdb_path)
-
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  test.run_gyp('large-pdb.gyp', chdir=CHDIR)
-
-  test.build('large-pdb.gyp', 'large_pdb_exe', chdir=CHDIR)
-  CheckImageAndPdb(test, 'large_pdb_exe.exe', 4096)
-
-  test.build('large-pdb.gyp', 'small_pdb_exe', chdir=CHDIR)
-  CheckImageAndPdb(test, 'small_pdb_exe.exe', 1024)
-
-  test.build('large-pdb.gyp', 'large_pdb_dll', chdir=CHDIR)
-  CheckImageAndPdb(test, 'large_pdb_dll.dll', 4096)
-
-  test.build('large-pdb.gyp', 'small_pdb_dll', chdir=CHDIR)
-  CheckImageAndPdb(test, 'small_pdb_dll.dll', 1024)
-
-  test.build('large-pdb.gyp', 'large_pdb_implicit_exe', chdir=CHDIR)
-  CheckImageAndPdb(test, 'large_pdb_implicit_exe.exe', 4096)
-
-  # This target has a different PDB name because it uses an
-  # 'msvs_large_pdb_path' variable.
-  test.build('large-pdb.gyp', 'large_pdb_variable_exe', chdir=CHDIR)
-  CheckImageAndPdb(test, 'large_pdb_variable_exe.exe', 4096,
-                   pdb_basename='foo.pdb')
-
-  # This target has a different output name because it uses 'product_name'.
-  test.build('large-pdb.gyp', 'large_pdb_product_exe', chdir=CHDIR)
-  CheckImageAndPdb(test, 'bar.exe', 4096)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-library-adjust.py b/tools/gyp/test/win/gyptest-link-library-adjust.py
deleted file mode 100644
index 71d1c09..0000000
--- a/tools/gyp/test/win/gyptest-link-library-adjust.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure link_settings containing -lblah.lib is remapped to just blah.lib.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('library-adjust.gyp', chdir=CHDIR)
-  test.build('library-adjust.gyp', test.ALL, chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-library-directories.py b/tools/gyp/test/win/gyptest-link-library-directories.py
deleted file mode 100644
index 8308e14..0000000
--- a/tools/gyp/test/win/gyptest-link-library-directories.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure libpath is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-
-  # Build subdirectory library.
-  test.run_gyp('subdir/library.gyp', chdir=CHDIR)
-  test.build('subdir/library.gyp', test.ALL, chdir=CHDIR)
-
-  # And then try to link the main project against the library using only
-  # LIBPATH to find it.
-  test.run_gyp('library-directories.gyp', chdir=CHDIR)
-
-  # Without additional paths specified, should fail.
-  test.build('library-directories.gyp', 'test_libdirs_none', chdir=CHDIR,
-      status=1)
-
-  # With the additional library directory, should pass.
-  test.build('library-directories.gyp', 'test_libdirs_with', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-ltcg.py b/tools/gyp/test/win/gyptest-link-ltcg.py
deleted file mode 100644
index 5271e09..0000000
--- a/tools/gyp/test/win/gyptest-link-ltcg.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure LTCG is working properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('ltcg.gyp', chdir=CHDIR)
-
-  # Here we expect LTCG is able to inline functions beyond compile unit.
-  # Note: This marker is embedded in 'inline_test_main.cc'
-  INLINE_MARKER = '==== inlined ===='
-
-  # link.exe generates following lines when LTCG is enabled.
-  # Note: Future link.exe may or may not generate them. Update as needed.
-  LTCG_LINKER_MESSAGES = ['Generating code', 'Finished generating code']
-
-  # test 'LinkTimeCodeGenerationOptionDefault'
-  test.build('ltcg.gyp', 'test_ltcg_off', chdir=CHDIR)
-  test.run_built_executable('test_ltcg_off', chdir=CHDIR)
-  test.must_not_contain_any_line(test.stdout(), [INLINE_MARKER])
-
-  # test 'LinkTimeCodeGenerationOptionUse'
-  test.build('ltcg.gyp', 'test_ltcg_on', chdir=CHDIR)
-  if test.format == 'ninja':
-    # Make sure ninja win_tool.py filters out noisy lines.
-    test.must_not_contain_any_line(test.stdout(), LTCG_LINKER_MESSAGES)
-  elif test.format == 'msvs':
-    test.must_contain_any_line(test.stdout(), LTCG_LINKER_MESSAGES)
-  test.run_built_executable('test_ltcg_on', chdir=CHDIR)
-  test.must_contain_any_line(test.stdout(), [INLINE_MARKER])
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-mapfile.py b/tools/gyp/test/win/gyptest-link-mapfile.py
deleted file mode 100644
index 00c1dea..0000000
--- a/tools/gyp/test/win/gyptest-link-mapfile.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure mapfile settings are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('mapfile.gyp', chdir=CHDIR)
-  test.build('mapfile.gyp', test.ALL, chdir=CHDIR)
-
-  map_file = test.built_file_path('test_mapfile_unset.map', chdir=CHDIR)
-  test.must_not_exist(map_file)
-
-  map_file = test.built_file_path('test_mapfile_generate.map', chdir=CHDIR)
-  test.must_exist(map_file)
-  test.must_contain(map_file, '?AnExportedFunction@@YAXXZ')
-  test.must_not_contain(map_file, 'void __cdecl AnExportedFunction(void)')
-
-  map_file = test.built_file_path('test_mapfile_generate_exports.map',
-          chdir=CHDIR)
-  test.must_exist(map_file)
-  test.must_contain(map_file, 'void __cdecl AnExportedFunction(void)')
-
-  map_file = test.built_file_path('test_mapfile_generate_filename.map',
-          chdir=CHDIR)
-  test.must_not_exist(map_file)
-
-  map_file = test.built_file_path('custom_file_name.map', chdir=CHDIR)
-  test.must_exist(map_file)
-  test.must_contain(map_file, '?AnExportedFunction@@YAXXZ')
-  test.must_not_contain(map_file, 'void __cdecl AnExportedFunction(void)')
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-nodefaultlib.py b/tools/gyp/test/win/gyptest-link-nodefaultlib.py
deleted file mode 100644
index f00760b..0000000
--- a/tools/gyp/test/win/gyptest-link-nodefaultlib.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure nodefaultlib setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('nodefaultlib.gyp', chdir=CHDIR)
-
-  test.build('nodefaultlib.gyp', 'test_ok', chdir=CHDIR)
-  test.build('nodefaultlib.gyp', 'test_fail', chdir=CHDIR, status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-noimportlib.py b/tools/gyp/test/win/gyptest-link-noimportlib.py
deleted file mode 100644
index d12e0ad..0000000
--- a/tools/gyp/test/win/gyptest-link-noimportlib.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure that the (custom) NoImportLibrary flag is handled correctly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'importlib'
-  test.run_gyp('noimplib.gyp', chdir=CHDIR)
-  test.build('noimplib.gyp', test.ALL, chdir=CHDIR)
-
-  # The target has an entry point, but no exports. Ordinarily, ninja expects
-  # all DLLs to export some symbols (with the exception of /NOENTRY resource-
-  # only DLLs). When the NoImportLibrary flag is set, this is suppressed. If
-  # this is not working correctly, the expected .lib will never be generated
-  # but will be expected, so the build will not be up to date.
-  test.up_to_date('noimplib.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-nxcompat.py b/tools/gyp/test/win/gyptest-link-nxcompat.py
deleted file mode 100644
index 6600743..0000000
--- a/tools/gyp/test/win/gyptest-link-nxcompat.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure nxcompat setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('nxcompat.gyp', chdir=CHDIR)
-  test.build('nxcompat.gyp', test.ALL, chdir=CHDIR)
-
-  def GetHeaders(exe):
-    return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
-
-  # NXCOMPAT is on by default.
-  if 'NX compatible' not in GetHeaders('test_nxcompat_default.exe'):
-    test.fail_test()
-
-  # Explicitly off, should not be marked NX compatiable.
-  if 'NX compatible' in GetHeaders('test_nxcompat_no.exe'):
-    test.fail_test()
-
-  # Explicitly on.
-  if 'NX compatible' not in GetHeaders('test_nxcompat_yes.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-opt-icf.py b/tools/gyp/test/win/gyptest-link-opt-icf.py
deleted file mode 100644
index 3c48ef6..0000000
--- a/tools/gyp/test/win/gyptest-link-opt-icf.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure comdat folding optimization setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('opt-icf.gyp', chdir=CHDIR)
-  test.build('opt-icf.gyp', chdir=CHDIR)
-
-  # We're specifying /DEBUG so the default is to not merge identical
-  # functions, so all of the similar_functions should be preserved.
-  output = test.run_dumpbin(
-      '/disasm', test.built_file_path('test_opticf_default.exe', chdir=CHDIR))
-  if output.count('similar_function') != 6: # 3 definitions, 3 calls.
-    test.fail_test()
-
-  # Explicitly off, all functions preserved seperately.
-  output = test.run_dumpbin(
-      '/disasm', test.built_file_path('test_opticf_no.exe', chdir=CHDIR))
-  if output.count('similar_function') != 6: # 3 definitions, 3 calls.
-    test.fail_test()
-
-  # Explicitly on, all but one removed.
-  output = test.run_dumpbin(
-      '/disasm', test.built_file_path('test_opticf_yes.exe', chdir=CHDIR))
-  if output.count('similar_function') != 4: # 1 definition, 3 calls.
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-opt-ref.py b/tools/gyp/test/win/gyptest-link-opt-ref.py
deleted file mode 100644
index 586b7af..0000000
--- a/tools/gyp/test/win/gyptest-link-opt-ref.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure reference optimization setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('opt-ref.gyp', chdir=CHDIR)
-  test.build('opt-ref.gyp', chdir=CHDIR)
-
-  # We're specifying /DEBUG so the default is to not remove unused functions.
-  output = test.run_dumpbin(
-      '/disasm', test.built_file_path('test_optref_default.exe', chdir=CHDIR))
-  if 'unused_function' not in output:
-    test.fail_test()
-
-  # Explicitly off, unused_function preserved.
-  output = test.run_dumpbin(
-      '/disasm', test.built_file_path('test_optref_no.exe', chdir=CHDIR))
-  if 'unused_function' not in output:
-    test.fail_test()
-
-  # Explicitly on, should be removed.
-  output = test.run_dumpbin(
-      '/disasm', test.built_file_path('test_optref_yes.exe', chdir=CHDIR))
-  if 'unused_function' in output:
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-ordering.py b/tools/gyp/test/win/gyptest-link-ordering.py
deleted file mode 100644
index a2527fa..0000000
--- a/tools/gyp/test/win/gyptest-link-ordering.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure the link order of object files is the same between msvs and ninja.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('link-ordering.gyp', chdir=CHDIR)
-  test.build('link-ordering.gyp', test.ALL, chdir=CHDIR)
-
-  def GetDisasm(exe):
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    # Get disassembly and drop int3 padding between functions.
-    return '\n'.join(
-        x for x in test.run_dumpbin('/disasm', full_path).splitlines()
-                   if 'CC' not in x)
-
-  # This is the full dump that we expect. The source files in the .gyp match
-  # this order which is what determines the ordering in the binary.
-
-  expected_disasm_basic = '''
-_mainCRTStartup:
-  00401000: B8 05 00 00 00     mov         eax,5
-  00401005: C3                 ret
-?z@@YAHXZ:
-  00401010: B8 03 00 00 00     mov         eax,3
-  00401015: C3                 ret
-?x@@YAHXZ:
-  00401020: B8 01 00 00 00     mov         eax,1
-  00401025: C3                 ret
-?y@@YAHXZ:
-  00401030: B8 02 00 00 00     mov         eax,2
-  00401035: C3                 ret
-_main:
-  00401040: 33 C0              xor         eax,eax
-  00401042: C3                 ret
-'''
-
-  if expected_disasm_basic not in GetDisasm('test_ordering_exe.exe'):
-    print GetDisasm('test_ordering_exe.exe')
-    test.fail_test()
-
-  # Similar to above. The VS generator handles subdirectories differently.
-
-  expected_disasm_subdirs = '''
-_mainCRTStartup:
-  00401000: B8 05 00 00 00     mov         eax,5
-  00401005: C3                 ret
-_main:
-  00401010: 33 C0              xor         eax,eax
-  00401012: C3                 ret
-?y@@YAHXZ:
-  00401020: B8 02 00 00 00     mov         eax,2
-  00401025: C3                 ret
-?z@@YAHXZ:
-  00401030: B8 03 00 00 00     mov         eax,3
-  00401035: C3                 ret
-'''
-
-  if expected_disasm_subdirs not in GetDisasm('test_ordering_subdirs.exe'):
-    print GetDisasm('test_ordering_subdirs.exe')
-    test.fail_test()
-
-  # Similar, but with directories mixed into folders (crt and main at the same
-  # level, but with a subdir in the middle).
-
-  expected_disasm_subdirs_mixed = '''
-_mainCRTStartup:
-  00401000: B8 05 00 00 00     mov         eax,5
-  00401005: C3                 ret
-?x@@YAHXZ:
-  00401010: B8 01 00 00 00     mov         eax,1
-  00401015: C3                 ret
-_main:
-  00401020: 33 C0              xor         eax,eax
-  00401022: C3                 ret
-?z@@YAHXZ:
-  00401030: B8 03 00 00 00     mov         eax,3
-  00401035: C3                 ret
-?y@@YAHXZ:
-  00401040: B8 02 00 00 00     mov         eax,2
-  00401045: C3                 ret
-'''
-
-  if (expected_disasm_subdirs_mixed not in
-      GetDisasm('test_ordering_subdirs_mixed.exe')):
-    print GetDisasm('test_ordering_subdirs_mixed.exe')
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-outputfile.py b/tools/gyp/test/win/gyptest-link-outputfile.py
deleted file mode 100644
index b98cdff..0000000
--- a/tools/gyp/test/win/gyptest-link-outputfile.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure linker OutputFile setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('outputfile.gyp', chdir=CHDIR)
-  test.build('outputfile.gyp', test.ALL, chdir=CHDIR)
-
-  test.built_file_must_exist('blorp.exe', chdir=CHDIR)
-  test.built_file_must_exist('blorp.dll', chdir=CHDIR)
-  test.built_file_must_exist('subdir/blorp.exe', chdir=CHDIR)
-  test.built_file_must_exist('blorp.lib', chdir=CHDIR)
-  test.built_file_must_exist('subdir/blorp.lib', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-pdb-no-output.py b/tools/gyp/test/win/gyptest-link-pdb-no-output.py
deleted file mode 100644
index 6da0aea..0000000
--- a/tools/gyp/test/win/gyptest-link-pdb-no-output.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Ensure that when debug information is not output, a pdb is not expected.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp()
-  CHDIR = 'linker-flags'
-  test.run_gyp('pdb-output.gyp', chdir=CHDIR)
-  test.build('pdb-output.gyp', 'test_pdb_output_disabled', chdir=CHDIR)
-  # Make sure that the build doesn't expect a PDB to be generated when there
-  # will be none.
-  test.up_to_date('pdb-output.gyp', 'test_pdb_output_disabled', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-pdb-output.py b/tools/gyp/test/win/gyptest-link-pdb-output.py
deleted file mode 100644
index 27245f7..0000000
--- a/tools/gyp/test/win/gyptest-link-pdb-output.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Ensure that ninja includes the .pdb as an output file from linking.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-  CHDIR = 'linker-flags'
-  test.run_gyp('pdb-output.gyp', chdir=CHDIR)
-  # Note, building the pdbs rather than ALL or gyp target.
-  test.build('pdb-output.gyp', 'output_exe.pdb', chdir=CHDIR)
-  test.build('pdb-output.gyp', 'output_dll.pdb', chdir=CHDIR)
-
-  def FindFile(pdb):
-    full_path = test.built_file_path(pdb, chdir=CHDIR)
-    return os.path.isfile(full_path)
-
-  if not FindFile('output_exe.pdb'):
-    test.fail_test()
-  if not FindFile('output_dll.pdb'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-pdb.py b/tools/gyp/test/win/gyptest-link-pdb.py
deleted file mode 100644
index 26d744d..0000000
--- a/tools/gyp/test/win/gyptest-link-pdb.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that the 'ProgramDatabaseFile' attribute in VCLinker is extracted
-properly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-  CHDIR = 'linker-flags'
-  test.run_gyp('program-database.gyp', chdir=CHDIR)
-  test.build('program-database.gyp', test.ALL, chdir=CHDIR)
-
-  def FindFile(pdb):
-    full_path = test.built_file_path(pdb, chdir=CHDIR)
-    return os.path.isfile(full_path)
-
-  # Verify the specified PDB is created when ProgramDatabaseFile
-  # is provided.
-  if not FindFile('name_outdir.pdb'):
-    test.fail_test()
-  if not FindFile('name_proddir.pdb'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-pgo.py b/tools/gyp/test/win/gyptest-link-pgo.py
deleted file mode 100644
index d742047..0000000
--- a/tools/gyp/test/win/gyptest-link-pgo.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure PGO is working properly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('pgo.gyp', chdir=CHDIR)
-
-  def IsPGOAvailable():
-    """Returns true if the Visual Studio available here supports PGO."""
-    test.build('pgo.gyp', 'gen_linker_option', chdir=CHDIR)
-    tmpfile = test.read(test.built_file_path('linker_options.txt', chdir=CHDIR))
-    return any(line.find('PGOPTIMIZE') for line in tmpfile)
-
-  # Test generated build files look fine.
-  if test.format == 'ninja':
-    ninja = test.built_file_path('obj/test_pgo_instrument.ninja', chdir=CHDIR)
-    test.must_contain(ninja, '/LTCG:PGINSTRUMENT')
-    test.must_contain(ninja, 'test_pgo.pgd')
-    ninja = test.built_file_path('obj/test_pgo_optimize.ninja', chdir=CHDIR)
-    test.must_contain(ninja, '/LTCG:PGOPTIMIZE')
-    test.must_contain(ninja, 'test_pgo.pgd')
-    ninja = test.built_file_path('obj/test_pgo_update.ninja', chdir=CHDIR)
-    test.must_contain(ninja, '/LTCG:PGUPDATE')
-    test.must_contain(ninja, 'test_pgo.pgd')
-  elif test.format == 'msvs':
-    LTCG_FORMAT = '<LinkTimeCodeGeneration>%s</LinkTimeCodeGeneration>'
-    vcproj = test.workpath('linker-flags/test_pgo_instrument.vcxproj')
-    test.must_contain(vcproj, LTCG_FORMAT % 'PGInstrument')
-    test.must_contain(vcproj, 'test_pgo.pgd')
-    vcproj = test.workpath('linker-flags/test_pgo_optimize.vcxproj')
-    test.must_contain(vcproj, LTCG_FORMAT % 'PGOptimization')
-    test.must_contain(vcproj, 'test_pgo.pgd')
-    vcproj = test.workpath('linker-flags/test_pgo_update.vcxproj')
-    test.must_contain(vcproj, LTCG_FORMAT % 'PGUpdate')
-    test.must_contain(vcproj, 'test_pgo.pgd')
-
-  # When PGO is available, try building binaries with PGO.
-  if IsPGOAvailable():
-    pgd_path = test.built_file_path('test_pgo.pgd', chdir=CHDIR)
-
-    # Test if 'PGInstrument' generates PGD (Profile-Guided Database) file.
-    if os.path.exists(pgd_path):
-      test.unlink(pgd_path)
-    test.must_not_exist(pgd_path)
-    test.build('pgo.gyp', 'test_pgo_instrument', chdir=CHDIR)
-    test.must_exist(pgd_path)
-
-    # Test if 'PGOptimize' works well
-    test.build('pgo.gyp', 'test_pgo_optimize', chdir=CHDIR)
-    test.must_contain_any_line(test.stdout(), ['profiled functions'])
-
-    # Test if 'PGUpdate' works well
-    test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
-    # With 'PGUpdate', linker should not complain that sources are changed after
-    # the previous training run.
-    test.touch(test.workpath('linker-flags/inline_test_main.cc'))
-    test.unlink(test.built_file_path('test_pgo_update.exe', chdir=CHDIR))
-    test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
-    test.must_contain_any_line(test.stdout(), ['profiled functions'])
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-profile.py b/tools/gyp/test/win/gyptest-link-profile.py
deleted file mode 100644
index 4dbc9ae..0000000
--- a/tools/gyp/test/win/gyptest-link-profile.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that the 'Profile' attribute in VCLinker is extracted properly.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-  CHDIR = 'linker-flags'
-  test.run_gyp('profile.gyp', chdir=CHDIR)
-  test.build('profile.gyp', test.ALL, chdir=CHDIR)
-
-  def GetSummary(exe):
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    return test.run_dumpbin(full_path)
-
-  # '.idata' section will be missing when /PROFILE is enabled.
-  if '.idata' in GetSummary('test_profile_true.exe'):
-    test.fail_test()
-
-  if not '.idata' in GetSummary('test_profile_false.exe'):
-    test.fail_test()
-
-  if not '.idata' in GetSummary('test_profile_default.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-restat-importlib.py b/tools/gyp/test/win/gyptest-link-restat-importlib.py
deleted file mode 100644
index 76b5c3c..0000000
--- a/tools/gyp/test/win/gyptest-link-restat-importlib.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure we don't cause unnecessary builds due to import libs appearing
-to be out of date.
-"""
-
-import TestGyp
-
-import os
-import sys
-import time
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  if not os.environ.get('ProgramFiles(x86)'):
-    # TODO(scottmg)
-    print 'Skipping test on x86, http://crbug.com/365833'
-    test.pass_test()
-
-  CHDIR = 'importlib'
-  test.run_gyp('importlib.gyp', chdir=CHDIR)
-  test.build('importlib.gyp', test.ALL, chdir=CHDIR)
-
-  # Delay briefly so that there's time for this touch not to have the
-  # timestamp as the previous run.
-  test.sleep()
-
-  # Touch the .cc file; the .dll will rebuild, but the import libs timestamp
-  # won't be updated.
-  test.touch('importlib/has-exports.cc')
-  test.build('importlib.gyp', 'test_importlib', chdir=CHDIR)
-
-  # This is the important part. The .dll above will relink and have an updated
-  # timestamp, however the import .libs timestamp won't be updated. So, we
-  # have to handle restating inputs in ninja so the final binary doesn't
-  # continually relink (due to thinking the .lib isn't up to date).
-  test.up_to_date('importlib.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-safeseh.py b/tools/gyp/test/win/gyptest-link-safeseh.py
deleted file mode 100644
index 31a2567..0000000
--- a/tools/gyp/test/win/gyptest-link-safeseh.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure safeseh setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp()
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('safeseh.gyp', chdir=CHDIR)
-  test.build('safeseh.gyp', test.ALL, chdir=CHDIR)
-
-  def HasSafeExceptionHandlers(exe):
-    full_path = test.built_file_path(exe, chdir=CHDIR)
-    output = test.run_dumpbin('/LOADCONFIG', full_path)
-    return '    Safe Exception Handler Table' in output
-
-  # From MSDN: http://msdn.microsoft.com/en-us/library/9a89h429.aspx
-  #   If /SAFESEH is not specified, the linker will produce an image with a
-  #   table of safe exceptions handlers if all modules are compatible with
-  #   the safe exception handling feature. If any modules were not
-  #   compatible with safe exception handling feature, the resulting image
-  #   will not contain a table of safe exception handlers.
-  #   However, the msvs IDE passes /SAFESEH to the linker by default, if
-  #   ImageHasSafeExceptionHandlers is not set to false in the vcxproj file.
-  #   We emulate this behavior in msvs_emulation.py, so 'test_safeseh_default'
-  #   and 'test_safeseh_yes' are built identically.
-  if not HasSafeExceptionHandlers('test_safeseh_default.exe'):
-    test.fail_test()
-  if HasSafeExceptionHandlers('test_safeseh_no.exe'):
-    test.fail_test()
-  if not HasSafeExceptionHandlers('test_safeseh_yes.exe'):
-    test.fail_test()
-  if HasSafeExceptionHandlers('test_safeseh_x64.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-shard.py b/tools/gyp/test/win/gyptest-link-shard.py
deleted file mode 100644
index 9af9328..0000000
--- a/tools/gyp/test/win/gyptest-link-shard.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure msvs_shard works correctly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'shard'
-  test.run_gyp('shard.gyp', chdir=CHDIR)
-  test.build('shard.gyp', test.ALL, chdir=CHDIR)
-
-  test.built_file_must_exist('shard_0.lib', chdir=CHDIR)
-  test.built_file_must_exist('shard_1.lib', chdir=CHDIR)
-  test.built_file_must_exist('shard_2.lib', chdir=CHDIR)
-  test.built_file_must_exist('shard_3.lib', chdir=CHDIR)
-
-  test.run_gyp('shard_ref.gyp', chdir=CHDIR)
-  test.build('shard_ref.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-stacksize.py b/tools/gyp/test/win/gyptest-link-stacksize.py
deleted file mode 100644
index 2e952d2..0000000
--- a/tools/gyp/test/win/gyptest-link-stacksize.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure StackReserveSize and StackCommitSize settings are extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('stacksize.gyp', chdir=CHDIR)
-  test.build('stacksize.gyp', test.ALL, chdir=CHDIR)
-
-  def GetHeaders(exe):
-    return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
-
-  # Verify default sizes as reported by dumpbin:
-  #     100000h = 1MB
-  #     1000h   = 4KB
-  default_headers = GetHeaders('test_default.exe')
-  if '100000 size of stack reserve' not in default_headers:
-    test.fail_test()
-  if '1000 size of stack commit' not in default_headers:
-    test.fail_test()
-
-  # Verify that reserved size is changed, but commit size is unchanged:
-  #     200000h = 2MB
-  #     1000h   = 4KB
-  set_reserved_size_headers = GetHeaders('test_set_reserved_size.exe')
-  if '200000 size of stack reserve' not in set_reserved_size_headers:
-    test.fail_test()
-  if '1000 size of stack commit' not in set_reserved_size_headers:
-    test.fail_test()
-
-  # Verify that setting the commit size, without the reserve size, has no
-  # effect:
-  #     100000h = 1MB
-  #     1000h   = 4KB
-  set_commit_size_headers = GetHeaders('test_set_commit_size.exe')
-  if '100000 size of stack reserve' not in set_commit_size_headers:
-    test.fail_test()
-  if '1000 size of stack commit' not in set_commit_size_headers:
-    test.fail_test()
-
-  # Verify that setting both works:
-  #     200000h = 2MB
-  #     2000h   = 8KB
-  set_both_headers = GetHeaders('test_set_both.exe')
-  if '200000 size of stack reserve' not in set_both_headers:
-    test.fail_test()
-  if '2000 size of stack commit' not in set_both_headers:
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-subsystem.py b/tools/gyp/test/win/gyptest-link-subsystem.py
deleted file mode 100644
index a94ba36..0000000
--- a/tools/gyp/test/win/gyptest-link-subsystem.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure subsystem setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('subsystem.gyp', chdir=CHDIR)
-
-  test.build('subsystem.gyp', 'test_console_ok', chdir=CHDIR)
-  test.build('subsystem.gyp', 'test_console_fail', chdir=CHDIR, status=1)
-  test.build('subsystem.gyp', 'test_windows_ok', chdir=CHDIR)
-  test.build('subsystem.gyp', 'test_windows_fail', chdir=CHDIR, status=1)
-
-  test.build('subsystem.gyp', 'test_console_xp', chdir=CHDIR)
-  test.build('subsystem.gyp', 'test_windows_xp', chdir=CHDIR)
-  # Make sure we are targeting XP.
-  def GetHeaders(exe):
-    return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
-  if '5.01 subsystem version' not in GetHeaders('test_console_xp.exe'):
-    test.fail_test()
-  if '5.01 subsystem version' not in GetHeaders('test_windows_xp.exe'):
-    test.fail_test()
-
-  # TODO(scottmg): There are other subsystems (WinCE, etc.) that we don't use.
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-target-machine.py b/tools/gyp/test/win/gyptest-link-target-machine.py
deleted file mode 100644
index 5a15f3f..0000000
--- a/tools/gyp/test/win/gyptest-link-target-machine.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure TargetMachine setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('target-machine.gyp', chdir=CHDIR)
-  # The .cc file is compiled as x86 (the default), so the link/libs that are
-  # x64 need to fail.
-  test.build('target-machine.gyp', 'test_target_link_x86', chdir=CHDIR)
-  test.build(
-      'target-machine.gyp', 'test_target_link_x64', chdir=CHDIR, status=1)
-  test.build('target-machine.gyp', 'test_target_lib_x86', chdir=CHDIR)
-  test.build('target-machine.gyp', 'test_target_lib_x64', chdir=CHDIR, status=1)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-tsaware.py b/tools/gyp/test/win/gyptest-link-tsaware.py
deleted file mode 100644
index d34b3c2..0000000
--- a/tools/gyp/test/win/gyptest-link-tsaware.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure tsaware setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('tsaware.gyp', chdir=CHDIR)
-  test.build('tsaware.gyp', test.ALL, chdir=CHDIR)
-
-  def GetHeaders(exe):
-    return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
-
-  # Explicitly off, should not be marked NX compatiable.
-  if 'Terminal Server Aware' in GetHeaders('test_tsaware_no.exe'):
-    test.fail_test()
-
-  # Explicitly on.
-  if 'Terminal Server Aware' not in GetHeaders('test_tsaware_yes.exe'):
-    test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-uldi-depending-on-module.py b/tools/gyp/test/win/gyptest-link-uldi-depending-on-module.py
deleted file mode 100644
index 75c9503..0000000
--- a/tools/gyp/test/win/gyptest-link-uldi-depending-on-module.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure that when ULDI is on, we link cause downstream modules to get built
-when we depend on the component objs.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'uldi'
-  test.run_gyp('uldi-depending-on-module.gyp', chdir=CHDIR)
-  test.build('uldi-depending-on-module.gyp', 'an_exe', chdir=CHDIR)
-  test.built_file_must_exist('a_module.dll', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-uldi.py b/tools/gyp/test/win/gyptest-link-uldi.py
deleted file mode 100644
index 62c5892..0000000
--- a/tools/gyp/test/win/gyptest-link-uldi.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure that when ULDI is on, we link .objs that make up .libs rather than
-the .libs themselves.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'uldi'
-  test.run_gyp('uldi.gyp', chdir=CHDIR)
-  # When linking with ULDI, the duplicated function from the lib will be an
-  # error.
-  test.build('uldi.gyp', 'final_uldi', chdir=CHDIR, status=1)
-  # And when in libs, the duplicated function will be silently dropped, so the
-  # build succeeds.
-  test.build('uldi.gyp', 'final_no_uldi', chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-unsupported-manifest.py b/tools/gyp/test/win/gyptest-link-unsupported-manifest.py
deleted file mode 100644
index 8f7e12b..0000000
--- a/tools/gyp/test/win/gyptest-link-unsupported-manifest.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure we error out if #pragma comments are used to modify manifests.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  # This assertion only applies to the ninja build.
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('unsupported-manifest.gyp', chdir=CHDIR)
-
-  # Just needs to fail to build.
-  test.build('unsupported-manifest.gyp',
-      'test_unsupported', chdir=CHDIR, status=1)
-  test.must_not_exist(test.built_file_path('test_unsupported.exe', chdir=CHDIR))
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-link-update-manifest.py b/tools/gyp/test/win/gyptest-link-update-manifest.py
deleted file mode 100644
index 4f8b2b9..0000000
--- a/tools/gyp/test/win/gyptest-link-update-manifest.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure binary is relinked when manifest settings are changed.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  import pywintypes
-  import win32api
-  import winerror
-
-  RT_MANIFEST = 24
-
-  class LoadLibrary(object):
-    """Context manager for loading and releasing binaries in Windows.
-    Yields the handle of the binary loaded."""
-    def __init__(self, path):
-      self._path = path
-      self._handle = None
-
-    def __enter__(self):
-      self._handle = win32api.LoadLibrary(self._path)
-      return self._handle
-
-    def __exit__(self, type, value, traceback):
-      win32api.FreeLibrary(self._handle)
-
-  def extract_manifest(path, resource_name):
-    """Reads manifest from |path| and returns it as a string.
-    Returns None is there is no such manifest."""
-    with LoadLibrary(path) as handle:
-      try:
-        return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
-      except pywintypes.error as error:
-        if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
-          return None
-        else:
-          raise
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-
-  gyp_template = '''
-{
- 'targets': [
-    {
-      'target_name': 'test_update_manifest',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'UACExecutionLevel': '%(uac_execution_level)d',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-          'AdditionalManifestFiles': '%(additional_manifest_files)s',
-        },
-      },
-    },
-  ],
-}
-'''
-
-  gypfile = 'update-manifest.gyp'
-
-  def WriteAndUpdate(uac_execution_level, additional_manifest_files, do_build):
-    with open(os.path.join(CHDIR, gypfile), 'wb') as f:
-      f.write(gyp_template % {
-        'uac_execution_level': uac_execution_level,
-        'additional_manifest_files': additional_manifest_files,
-      })
-    test.run_gyp(gypfile, chdir=CHDIR)
-    if do_build:
-      test.build(gypfile, chdir=CHDIR)
-      exe_file = test.built_file_path('test_update_manifest.exe', chdir=CHDIR)
-      return extract_manifest(exe_file, 1)
-
-  manifest = WriteAndUpdate(0, '', True)
-  test.fail_test('asInvoker' not in manifest)
-  test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' in manifest)
-
-  # Make sure that updating .gyp and regenerating doesn't cause a rebuild.
-  WriteAndUpdate(0, '', False)
-  test.up_to_date(gypfile, test.ALL, chdir=CHDIR)
-
-  # But make sure that changing a manifest property does cause a relink.
-  manifest = WriteAndUpdate(2, '', True)
-  test.fail_test('requireAdministrator' not in manifest)
-
-  # Adding a manifest causes a rebuild.
-  manifest = WriteAndUpdate(2, 'extra.manifest', True)
-  test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in manifest)
diff --git a/tools/gyp/test/win/gyptest-link-warnings-as-errors.py b/tools/gyp/test/win/gyptest-link-warnings-as-errors.py
deleted file mode 100644
index d6a6473..0000000
--- a/tools/gyp/test/win/gyptest-link-warnings-as-errors.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure linker warnings-as-errors setting is extracted properly.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'linker-flags'
-  test.run_gyp('warn-as-error.gyp', chdir=CHDIR)
-
-  test.build('warn-as-error.gyp', 'test_on', chdir=CHDIR, status=1)
-  test.build('warn-as-error.gyp', 'test_off', chdir=CHDIR)
-  test.build('warn-as-error.gyp', 'test_default', chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-long-command-line.py b/tools/gyp/test/win/gyptest-long-command-line.py
deleted file mode 100644
index 8f8b7a3..0000000
--- a/tools/gyp/test/win/gyptest-long-command-line.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure long command lines work.
-"""
-
-import TestGyp
-
-import subprocess
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja', 'msvs'])
-
-  CHDIR = 'long-command-line'
-  test.run_gyp('long-command-line.gyp', chdir=CHDIR)
-  test.build('long-command-line.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macro-projectname.py b/tools/gyp/test/win/gyptest-macro-projectname.py
deleted file mode 100644
index e411cc0..0000000
--- a/tools/gyp/test/win/gyptest-macro-projectname.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure macro expansion of $(ProjectName) is handled.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('projectname.gyp', chdir=CHDIR)
-  test.build('projectname.gyp', test.ALL, chdir=CHDIR)
-  test.built_file_must_exist('test_expansions_plus_something.exe', chdir=CHDIR)
-  test.built_file_must_exist(
-      'test_with_product_name_plus_something.exe', chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macro-targetext.py b/tools/gyp/test/win/gyptest-macro-targetext.py
deleted file mode 100644
index 450710d..0000000
--- a/tools/gyp/test/win/gyptest-macro-targetext.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure macro expansion of $(TargetExt) is handled.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('targetext.gyp', chdir=CHDIR)
-  test.build('targetext.gyp', test.ALL, chdir=CHDIR)
-  test.built_file_must_exist('executable.exe', chdir=CHDIR)
-  test.built_file_must_exist('loadable_module.dll', chdir=CHDIR)
-  test.built_file_must_exist('shared_library.dll', chdir=CHDIR)
-  test.built_file_must_exist('static_library.lib', chdir=CHDIR)
-  test.built_file_must_exist('product_extension.library', chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macro-targetfilename.py b/tools/gyp/test/win/gyptest-macro-targetfilename.py
deleted file mode 100644
index be4b637..0000000
--- a/tools/gyp/test/win/gyptest-macro-targetfilename.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure macro expansion of $(TargetFileName) is handled.
-"""
-
-import TestGyp
-
-import os
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-  if not (test.format == 'msvs' and
-          int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2013):
-    CHDIR = 'vs-macros'
-    test.run_gyp('targetfilename.gyp', chdir=CHDIR)
-    test.build('targetfilename.gyp', test.ALL, chdir=CHDIR)
-    test.built_file_must_exist('test_targetfilename_executable.exe', chdir=CHDIR)
-    test.built_file_must_exist('test_targetfilename_loadable_module.dll',
-                              chdir=CHDIR)
-    test.built_file_must_exist('test_targetfilename_shared_library.dll',
-                              chdir=CHDIR)
-    test.built_file_must_exist('test_targetfilename_static_library.lib',
-                              chdir=CHDIR)
-    test.built_file_must_exist('test_targetfilename_product_extension.foo',
-                              chdir=CHDIR)
-    test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macro-targetname.py b/tools/gyp/test/win/gyptest-macro-targetname.py
deleted file mode 100644
index b111801..0000000
--- a/tools/gyp/test/win/gyptest-macro-targetname.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure macro expansion of $(TargetName) and $(TargetDir) are handled.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('targetname.gyp', chdir=CHDIR)
-  test.build('targetname.gyp', test.ALL, chdir=CHDIR)
-  test.built_file_must_exist('test_targetname_plus_something1.exe',
-          chdir=CHDIR)
-  test.built_file_must_exist(
-          'prod_prefixtest_targetname_with_prefix_plus_something2.exe',
-          chdir=CHDIR)
-  test.built_file_must_exist('prod_name_plus_something3.exe', chdir=CHDIR)
-  test.built_file_must_exist('prod_prefixprod_name_plus_something4.exe',
-          chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macro-targetpath.py b/tools/gyp/test/win/gyptest-macro-targetpath.py
deleted file mode 100644
index fe7eac1..0000000
--- a/tools/gyp/test/win/gyptest-macro-targetpath.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure macro expansion of $(TargetPath) is handled.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('targetpath.gyp', chdir=CHDIR)
-  test.build('targetpath.gyp', test.ALL, chdir=CHDIR)
-  test.built_file_must_exist('test_targetpath_executable.exe', chdir=CHDIR)
-  test.built_file_must_exist('test_targetpath_loadable_module.dll',
-                             chdir=CHDIR)
-  test.built_file_must_exist('test_targetpath_shared_library.dll',
-                             chdir=CHDIR)
-  test.built_file_must_exist('test_targetpath_static_library.lib',
-                             chdir=CHDIR)
-  test.built_file_must_exist('test_targetpath_product_extension.foo',
-                             chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macro-vcinstalldir.py b/tools/gyp/test/win/gyptest-macro-vcinstalldir.py
deleted file mode 100644
index 37396e1..0000000
--- a/tools/gyp/test/win/gyptest-macro-vcinstalldir.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure macro expansion of $(VCInstallDir) is handled, and specifically
-always / terminated for compatibility.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('vcinstalldir.gyp', chdir=CHDIR)
-  # This fails on VS because the trailing slash escapes the trailing quote.
-  test.build('vcinstalldir.gyp', 'test_slash_trailing', chdir=CHDIR, status=1)
-  test.build('vcinstalldir.gyp', 'test_slash_dir', chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macros-containing-gyp.py b/tools/gyp/test/win/gyptest-macros-containing-gyp.py
deleted file mode 100644
index f6eaf63..0000000
--- a/tools/gyp/test/win/gyptest-macros-containing-gyp.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Handle VS macro expansion containing gyp variables.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('containing-gyp.gyp', chdir=CHDIR)
-  test.build('containing-gyp.gyp', test.ALL, chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-macros-in-inputs-and-outputs.py b/tools/gyp/test/win/gyptest-macros-in-inputs-and-outputs.py
deleted file mode 100644
index 3d6fa74..0000000
--- a/tools/gyp/test/win/gyptest-macros-in-inputs-and-outputs.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Handle macro expansion in inputs and outputs of rules.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('input-output-macros.gyp', chdir=CHDIR)
-
-  test.build('input-output-macros.gyp', 'test_expansions', chdir=CHDIR)
-
-  test.built_file_must_exist('stuff.blah.something',
-      content='Random data file.\nModified.',
-      chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-midl-excluded.py b/tools/gyp/test/win/gyptest-midl-excluded.py
deleted file mode 100644
index 70059ab..0000000
--- a/tools/gyp/test/win/gyptest-midl-excluded.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Test that .idl files in actions and non-native rules are excluded.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'idl-excluded'
-  test.run_gyp('idl-excluded.gyp', chdir=CHDIR)
-  test.build('idl-excluded.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-midl-includedirs.py b/tools/gyp/test/win/gyptest-midl-includedirs.py
deleted file mode 100644
index 05f6370..0000000
--- a/tools/gyp/test/win/gyptest-midl-includedirs.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verify that 'midl_include_dirs' is handled.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'idl-includedirs'
-  test.run_gyp('idl-includedirs.gyp', chdir=CHDIR)
-  test.build('idl-includedirs.gyp', test.ALL, chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-midl-rules.py b/tools/gyp/test/win/gyptest-midl-rules.py
deleted file mode 100644
index 591a507..0000000
--- a/tools/gyp/test/win/gyptest-midl-rules.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Handle default .idl build rules.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'idl-rules'
-  test.run_gyp('basic-idl.gyp', chdir=CHDIR)
-  for platform in ['Win32', 'x64']:
-    test.set_configuration('Debug|%s' % platform)
-    test.build('basic-idl.gyp', test.ALL, chdir=CHDIR)
-
-    # Make sure ninja win_tool.py filters out noisy lines.
-    if test.format == 'ninja' and 'Processing' in test.stdout():
-      test.fail_test()
-
-    test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-ml-safeseh.py b/tools/gyp/test/win/gyptest-ml-safeseh.py
deleted file mode 100644
index ec702b9..0000000
--- a/tools/gyp/test/win/gyptest-ml-safeseh.py
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure the /safeseh option can be passed to ml.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  CHDIR = 'ml-safeseh'
-  test.run_gyp('ml-safeseh.gyp', chdir=CHDIR)
-  test.build('ml-safeseh.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-quoting-commands.py b/tools/gyp/test/win/gyptest-quoting-commands.py
deleted file mode 100644
index b40f99f..0000000
--- a/tools/gyp/test/win/gyptest-quoting-commands.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure batch files run as actions. Regression test for previously missing
-trailing quote on command line. cmd typically will implicitly insert a missing
-quote, but if the command ends in a quote, it will not insert another, so the
-command can sometimes become unterminated.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'batch-file-action'
-  test.run_gyp('batch-file-action.gyp', chdir=CHDIR)
-  test.build('batch-file-action.gyp', test.ALL, chdir=CHDIR)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-rc-build.py b/tools/gyp/test/win/gyptest-rc-build.py
deleted file mode 100644
index a6e4d36..0000000
--- a/tools/gyp/test/win/gyptest-rc-build.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure we build and include .rc files.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  print "This test is currently disabled: https://crbug.com/483696."
-  sys.exit(0)
-
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'rc-build'
-  test.run_gyp('hello.gyp', chdir=CHDIR)
-  test.build('hello.gyp', test.ALL, chdir=CHDIR)
-  test.up_to_date('hello.gyp', 'resource_only_dll', chdir=CHDIR)
-  test.run_built_executable('with_resources', chdir=CHDIR, status=4)
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/gyptest-system-include.py b/tools/gyp/test/win/gyptest-system-include.py
deleted file mode 100644
index 9a47d98..0000000
--- a/tools/gyp/test/win/gyptest-system-include.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Checks that msvs_system_include_dirs works.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'system-include'
-  test.run_gyp('test.gyp', chdir=CHDIR)
-  test.build('test.gyp', test.ALL, chdir=CHDIR)
-  test.pass_test()
diff --git a/tools/gyp/test/win/idl-excluded/bad.idl b/tools/gyp/test/win/idl-excluded/bad.idl
deleted file mode 100644
index 38554e9..0000000
--- a/tools/gyp/test/win/idl-excluded/bad.idl
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-This is a dummy .idl file that will trigger an error if it is not excluded from
-the build.
diff --git a/tools/gyp/test/win/idl-excluded/copy-file.py b/tools/gyp/test/win/idl-excluded/copy-file.py
deleted file mode 100644
index 5a5feae..0000000
--- a/tools/gyp/test/win/idl-excluded/copy-file.py
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-
-contents = open(sys.argv[1], 'r').read()
-open(sys.argv[2], 'wb').write(contents)
-
-sys.exit(0)
diff --git a/tools/gyp/test/win/idl-excluded/idl-excluded.gyp b/tools/gyp/test/win/idl-excluded/idl-excluded.gyp
deleted file mode 100644
index 972b7de..0000000
--- a/tools/gyp/test/win/idl-excluded/idl-excluded.gyp
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'exclude_with_action',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'actions': [{
-        'action_name': 'copy_action',
-        'inputs': [
-          'copy-file.py',
-          'bad.idl',
-        ],
-        'outputs': [
-          '<(INTERMEDIATE_DIR)/bad.idl',
-        ],
-        'action': [
-          'python', '<@(_inputs)', '<@(_outputs)',
-        ],
-      }],
-    },
-    {
-      'target_name': 'exclude_with_rule',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'sources': [
-        'bad.idl',
-      ],
-      'rules': [{
-        'rule_name': 'copy_rule',
-        'extension': 'idl',
-        'inputs': [
-          'copy-file.py',
-        ],
-        'outputs': [
-          '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).idl',
-        ],
-        'action': [
-          'python', '<@(_inputs)', '<(RULE_INPUT_PATH)', '<@(_outputs)',
-        ],
-      }],
-    },
-    {
-      'target_name': 'program',
-      'type': 'executable',
-      'sources': [
-        'program.cc',
-      ],
-      'dependencies': [
-        'exclude_with_action',
-        'exclude_with_rule',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/idl-excluded/program.cc b/tools/gyp/test/win/idl-excluded/program.cc
deleted file mode 100644
index 9dc3c94..0000000
--- a/tools/gyp/test/win/idl-excluded/program.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/idl-includedirs/hello.cc b/tools/gyp/test/win/idl-includedirs/hello.cc
deleted file mode 100644
index 9dc3c94..0000000
--- a/tools/gyp/test/win/idl-includedirs/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/idl-includedirs/idl-includedirs.gyp b/tools/gyp/test/win/idl-includedirs/idl-includedirs.gyp
deleted file mode 100644
index fcec063..0000000
--- a/tools/gyp/test/win/idl-includedirs/idl-includedirs.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_midl_include_dirs',
-      'type': 'executable',
-      'sources': [
-        'hello.cc',
-        'subdir/foo.idl',
-        'subdir/bar.idl',
-      ],
-      'midl_include_dirs': [
-        'subdir',
-      ],
-      'msvs_settings': {
-        'VCMIDLTool': {
-          'OutputDirectory': '<(INTERMEDIATE_DIR)',
-          'DLLDataFileName': '$(InputName)_dlldata.h',
-         },
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/idl-includedirs/subdir/bar.idl b/tools/gyp/test/win/idl-includedirs/subdir/bar.idl
deleted file mode 100644
index d4e6cbb..0000000
--- a/tools/gyp/test/win/idl-includedirs/subdir/bar.idl
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-import "oaidl.idl";
-
-[
-  object,
-  uuid(A03D1421-B1EC-11D0-8C3A-00C04FC31D3F),
-]
-interface Bar : IUnknown {
-  HRESULT BarFunction();
-};
diff --git a/tools/gyp/test/win/idl-includedirs/subdir/foo.idl b/tools/gyp/test/win/idl-includedirs/subdir/foo.idl
deleted file mode 100644
index c8c65b9..0000000
--- a/tools/gyp/test/win/idl-includedirs/subdir/foo.idl
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (c) 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-import "oaidl.idl";
-import "bar.idl";
-
-[
-  object,
-  uuid(9C1100DD-51D4-4827-AE9F-3B8FAC4AED72),
-]
-interface Foo : IUnknown {
-  HRESULT FooFunction(Bar* bar);
-};
diff --git a/tools/gyp/test/win/idl-rules/Window.idl b/tools/gyp/test/win/idl-rules/Window.idl
deleted file mode 100644
index d8ea01b..0000000
--- a/tools/gyp/test/win/idl-rules/Window.idl
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-[
-    WillBeGarbageCollected,
-] interface Window {
-    void alert();
-};
diff --git a/tools/gyp/test/win/idl-rules/basic-idl.gyp b/tools/gyp/test/win/idl-rules/basic-idl.gyp
deleted file mode 100644
index b74622a..0000000
--- a/tools/gyp/test/win/idl-rules/basic-idl.gyp
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'midl_out_dir': '<(SHARED_INTERMEDIATE_DIR)',
-  },
-  'target_defaults': {
-    'configurations': {
-      'Debug': {
-        'msvs_configuration_platform': 'Win32',
-      },
-      'Debug_x64': {
-        'inherit_from': ['Debug'],
-        'msvs_configuration_platform': 'x64',
-      },
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'idl_test',
-      'type': 'executable',
-      'sources': [
-        'history_indexer.idl',
-        '<(midl_out_dir)/history_indexer.h',
-        '<(midl_out_dir)/history_indexer_i.c',
-        'history_indexer_user.cc',
-      ],
-      'libraries': ['ole32.lib'],
-      'include_dirs': [
-        '<(midl_out_dir)',
-      ],
-      'msvs_settings': {
-        'VCMIDLTool': {
-          'OutputDirectory': '<(midl_out_dir)',
-          'HeaderFileName': '<(RULE_INPUT_ROOT).h',
-         },
-      },
-    },
-    {
-      'target_name': 'idl_explicit_action',
-      'type': 'none',
-      'sources': [
-        'Window.idl',
-      ],
-      'actions': [{
-        'action_name': 'blink_idl',
-        'explicit_idl_action': 1,
-        'msvs_cygwin_shell': 0,
-        'inputs': [
-          'Window.idl',
-          'idl_compiler.py',
-        ],
-        'outputs': [
-          'Window.cpp',
-          'Window.h',
-        ],
-        'action': [
-          'python',
-          'idl_compiler.py',
-          'Window.idl',
-        ],
-      }],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/idl-rules/history_indexer.idl b/tools/gyp/test/win/idl-rules/history_indexer.idl
deleted file mode 100644
index e866ce6..0000000
--- a/tools/gyp/test/win/idl-rules/history_indexer.idl
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.

-// Use of this source code is governed by a BSD-style license that can be

-// found in the LICENSE file.

-

-import "oaidl.idl";

-import "ocidl.idl";

-

-[

-  object,

-  uuid(9C1100DD-51D4-4827-AE9F-3B8FAC4AED72),

-  oleautomation,

-  nonextensible,

-  pointer_default(unique)

-]

-interface IChromeHistoryIndexer : IUnknown {

-  HRESULT SomeFunction([in] VARIANT begin_time, [in] VARIANT end_time);

-};

diff --git a/tools/gyp/test/win/idl-rules/history_indexer_user.cc b/tools/gyp/test/win/idl-rules/history_indexer_user.cc
deleted file mode 100644
index 071a9ff..0000000
--- a/tools/gyp/test/win/idl-rules/history_indexer_user.cc
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "history_indexer.h"
-
-// Use the thing in the IDL.
-int main() {
-  IChromeHistoryIndexer** indexer = 0;
-  IID fake_iid;
-  CoCreateInstance(fake_iid, NULL, CLSCTX_INPROC,
-                   __uuidof(IChromeHistoryIndexer),
-                   reinterpret_cast<void**>(indexer));
-  return 0;
-}
diff --git a/tools/gyp/test/win/idl-rules/idl_compiler.py b/tools/gyp/test/win/idl-rules/idl_compiler.py
deleted file mode 100644
index a12b274..0000000
--- a/tools/gyp/test/win/idl-rules/idl_compiler.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# mock, just outputs empty .h/.cpp files
-
-import os
-import sys
-
-if len(sys.argv) == 2:
-  basename, ext = os.path.splitext(sys.argv[1])
-  with open('%s.h' % basename, 'w') as f:
-    f.write('// %s.h\n' % basename)
-  with open('%s.cpp' % basename, 'w') as f:
-    f.write('// %s.cpp\n' % basename)
diff --git a/tools/gyp/test/win/importlib/dll_no_exports.cc b/tools/gyp/test/win/importlib/dll_no_exports.cc
deleted file mode 100644
index 96dd797..0000000
--- a/tools/gyp/test/win/importlib/dll_no_exports.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <windows.h>
-
-BOOL APIENTRY DllMain(HMODULE module, DWORD reason, LPVOID reserved) {
-  return TRUE;
-}
diff --git a/tools/gyp/test/win/importlib/has-exports.cc b/tools/gyp/test/win/importlib/has-exports.cc
deleted file mode 100644
index 3f62d6c..0000000
--- a/tools/gyp/test/win/importlib/has-exports.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-__declspec(dllexport) void some_function() {
-}
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/importlib/hello.cc b/tools/gyp/test/win/importlib/hello.cc
deleted file mode 100644
index 66ff68c..0000000
--- a/tools/gyp/test/win/importlib/hello.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-__declspec(dllimport) void some_function();
-
-int main() {
-  some_function();
-}
diff --git a/tools/gyp/test/win/importlib/importlib.gyp b/tools/gyp/test/win/importlib/importlib.gyp
deleted file mode 100644
index ab15b18..0000000
--- a/tools/gyp/test/win/importlib/importlib.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_importlib',
-      'type': 'shared_library',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '2',
-        }
-      },
-      'sources': ['has-exports.cc'],
-    },
-
-    {
-      'target_name': 'test_linkagainst',
-      'type': 'executable',
-      'dependencies': ['test_importlib'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '2',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/importlib/noimplib.gyp b/tools/gyp/test/win/importlib/noimplib.gyp
deleted file mode 100644
index 0245058..0000000
--- a/tools/gyp/test/win/importlib/noimplib.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'no_import_library',
-      'type': 'loadable_module',
-      'msvs_settings': {
-        'NoImportLibrary': 'true',
-      },
-      'sources': ['dll_no_exports.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/large-pdb/dllmain.cc b/tools/gyp/test/win/large-pdb/dllmain.cc
deleted file mode 100644
index 1487562..0000000
--- a/tools/gyp/test/win/large-pdb/dllmain.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <windows.h>
-
-BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
-  return TRUE;
-}
diff --git a/tools/gyp/test/win/large-pdb/large-pdb.gyp b/tools/gyp/test/win/large-pdb/large-pdb.gyp
deleted file mode 100644
index 2a241a5..0000000
--- a/tools/gyp/test/win/large-pdb/large-pdb.gyp
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'large_pdb_exe',
-      'type': 'executable',
-      'msvs_large_pdb': 1,
-      'sources': [
-        'main.cc',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': '<(PRODUCT_DIR)/large_pdb_exe.exe.pdb',
-        },
-      },
-    },
-    {
-      'target_name': 'small_pdb_exe',
-      'type': 'executable',
-      'msvs_large_pdb': 0,
-      'sources': [
-        'main.cc',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': '<(PRODUCT_DIR)/small_pdb_exe.exe.pdb',
-        },
-      },
-    },
-    {
-      'target_name': 'large_pdb_dll',
-      'type': 'shared_library',
-      'msvs_large_pdb': 1,
-      'sources': [
-        'dllmain.cc',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': '<(PRODUCT_DIR)/large_pdb_dll.dll.pdb',
-        },
-      },
-    },
-    {
-      'target_name': 'small_pdb_dll',
-      'type': 'shared_library',
-      'msvs_large_pdb': 0,
-      'sources': [
-        'dllmain.cc',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': '<(PRODUCT_DIR)/small_pdb_dll.dll.pdb',
-        },
-      },
-    },
-    {
-      'target_name': 'large_pdb_implicit_exe',
-      'type': 'executable',
-      'msvs_large_pdb': 1,
-      'sources': [
-        'main.cc',
-      ],
-      # No PDB file is specified. However, the msvs_large_pdb mechanism should
-      # default to the appropriate <(PRODUCT_DIR)/<(TARGET_NAME).exe.pdb.
-    },
-    {
-      'target_name': 'large_pdb_variable_exe',
-      'type': 'executable',
-      'msvs_large_pdb': 1,
-      'sources': [
-        'main.cc',
-      ],
-      # No PDB file is specified. However, the msvs_large_pdb_path variable
-      # explicitly sets one.
-      'variables': {
-        'msvs_large_pdb_path': '<(PRODUCT_DIR)/foo.pdb',
-      },
-    },
-    {
-      'target_name': 'large_pdb_product_exe',
-      'product_name': 'bar',
-      'type': 'executable',
-      'msvs_large_pdb': 1,
-      'sources': [
-        'main.cc',
-      ],
-      # No PDB file is specified. However, we've specified a product name so
-      # it should use <(PRODUCT_DIR)/bar.exe.pdb.
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/large-pdb/main.cc b/tools/gyp/test/win/large-pdb/main.cc
deleted file mode 100644
index c3da8e9..0000000
--- a/tools/gyp/test/win/large-pdb/main.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main(void) {
-  return 0;
-}
diff --git a/tools/gyp/test/win/lib-crosscompile/answer.cc b/tools/gyp/test/win/lib-crosscompile/answer.cc
deleted file mode 100644
index a6ffa16..0000000
--- a/tools/gyp/test/win/lib-crosscompile/answer.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "answer.h"
-
-int answer() {
-  return 42;
-}
diff --git a/tools/gyp/test/win/lib-crosscompile/answer.h b/tools/gyp/test/win/lib-crosscompile/answer.h
deleted file mode 100644
index 82312d5..0000000
--- a/tools/gyp/test/win/lib-crosscompile/answer.h
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int answer();
\ No newline at end of file
diff --git a/tools/gyp/test/win/lib-crosscompile/use_host_ar.gyp b/tools/gyp/test/win/lib-crosscompile/use_host_ar.gyp
deleted file mode 100644
index 4747bc6..0000000
--- a/tools/gyp/test/win/lib-crosscompile/use_host_ar.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'lib_answer',
-      'type': 'static_library',
-      'toolsets': ['host'],
-      'msvs_settings': {
-        'msvs_cygwin_shell': 0,
-      },
-      'sources': ['answer.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/lib-flags/answer.cc b/tools/gyp/test/win/lib-flags/answer.cc
deleted file mode 100644
index a6ffa16..0000000
--- a/tools/gyp/test/win/lib-flags/answer.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "answer.h"
-
-int answer() {
-  return 42;
-}
diff --git a/tools/gyp/test/win/lib-flags/answer.h b/tools/gyp/test/win/lib-flags/answer.h
deleted file mode 100644
index 82312d5..0000000
--- a/tools/gyp/test/win/lib-flags/answer.h
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int answer();
\ No newline at end of file
diff --git a/tools/gyp/test/win/lib-flags/ltcg.gyp b/tools/gyp/test/win/lib-flags/ltcg.gyp
deleted file mode 100644
index c183107..0000000
--- a/tools/gyp/test/win/lib-flags/ltcg.gyp
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'lib_answer',
-      'type': 'static_library',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WholeProgramOptimization': 'true',  # /GL
-        },
-        'VCLibrarianTool': {
-          'LinkTimeCodeGeneration': 'true',    # /LTCG
-        },
-      },
-      'sources': ['answer.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/a/x.cc b/tools/gyp/test/win/linker-flags/a/x.cc
deleted file mode 100644
index f5f763b..0000000
--- a/tools/gyp/test/win/linker-flags/a/x.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int x() {
-  return 1;
-}
diff --git a/tools/gyp/test/win/linker-flags/a/z.cc b/tools/gyp/test/win/linker-flags/a/z.cc
deleted file mode 100644
index 8a43501..0000000
--- a/tools/gyp/test/win/linker-flags/a/z.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int z() {
-  return 3;
-}
diff --git a/tools/gyp/test/win/linker-flags/additional-deps.cc b/tools/gyp/test/win/linker-flags/additional-deps.cc
deleted file mode 100644
index 7dfb589..0000000
--- a/tools/gyp/test/win/linker-flags/additional-deps.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <winsock2.h>
-
-int main() {
-  WSAStartup(0, 0);
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/additional-deps.gyp b/tools/gyp/test/win/linker-flags/additional-deps.gyp
deleted file mode 100644
index 55afe64..0000000
--- a/tools/gyp/test/win/linker-flags/additional-deps.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_deps_none',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_deps_few',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'AdditionalDependencies': [
-            'wininet.lib',
-            'ws2_32.lib',
-          ]
-        }
-      },
-      'sources': ['additional-deps.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/additional-options.gyp b/tools/gyp/test/win/linker-flags/additional-options.gyp
deleted file mode 100644
index cab3994..0000000
--- a/tools/gyp/test/win/linker-flags/additional-options.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.

-# Use of this source code is governed by a BSD-style license that can be

-# found in the LICENSE file.

-

-{

- 'targets': [

-    {

-      'target_name': 'test_additional_none',

-      'type': 'executable',

-      'msvs_settings': {

-        'VCLinkerTool': {

-        }

-      },

-      'sources': ['hello.cc'],

-    },

-    {

-      'target_name': 'test_additional_few',

-      'type': 'executable',

-      'msvs_settings': {

-        'VCLinkerTool': {

-          'AdditionalOptions': [

-            '/dynamicbase:no',

-          ]

-        }

-      },

-      'sources': ['hello.cc'],

-    },

-  ]

-}

diff --git a/tools/gyp/test/win/linker-flags/aslr.gyp b/tools/gyp/test/win/linker-flags/aslr.gyp
deleted file mode 100644
index b3aefd5..0000000
--- a/tools/gyp/test/win/linker-flags/aslr.gyp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_aslr_default',
-      'type': 'executable',
-      'msvs_settings': {
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_aslr_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'RandomizedBaseAddress': '1',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_aslr_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'RandomizedBaseAddress': '2',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/b/y.cc b/tools/gyp/test/win/linker-flags/b/y.cc
deleted file mode 100644
index bd88411..0000000
--- a/tools/gyp/test/win/linker-flags/b/y.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int y() {
-  return 2;
-}
diff --git a/tools/gyp/test/win/linker-flags/base-address.gyp b/tools/gyp/test/win/linker-flags/base-address.gyp
deleted file mode 100644
index 873ebfe..0000000
--- a/tools/gyp/test/win/linker-flags/base-address.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_base_specified_exe',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'BaseAddress': '0x00420000',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_base_specified_dll',
-      'type': 'shared_library',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'BaseAddress': '0x10420000',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_base_default_exe',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_base_default_dll',
-      'type': 'shared_library',
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/debug-info.gyp b/tools/gyp/test/win/linker-flags/debug-info.gyp
deleted file mode 100644
index d47d0ec..0000000
--- a/tools/gyp/test/win/linker-flags/debug-info.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_debug_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'false'
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_debug_on',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true'
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/deffile-multiple.gyp b/tools/gyp/test/win/linker-flags/deffile-multiple.gyp
deleted file mode 100644
index c74a9af..0000000
--- a/tools/gyp/test/win/linker-flags/deffile-multiple.gyp
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_deffile_multiple_fail',
-      'type': 'shared_library',
-      'sources': [
-          'deffile.cc',
-          'deffile.def',
-          'deffile2.def',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/deffile.cc b/tools/gyp/test/win/linker-flags/deffile.cc
deleted file mode 100644
index fa203b3..0000000
--- a/tools/gyp/test/win/linker-flags/deffile.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void AnExportedFunction() {
-}
-
-void AnotherExportedFunction() {
-}
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/deffile.def b/tools/gyp/test/win/linker-flags/deffile.def
deleted file mode 100644
index ba9d399..0000000
--- a/tools/gyp/test/win/linker-flags/deffile.def
+++ /dev/null
@@ -1,8 +0,0 @@
-; Copyright (c) 2012 Google Inc. All rights reserved.
-; Use of this source code is governed by a BSD-style license that can be
-; found in the LICENSE file.
-
-LIBRARY test_deffile_ok
-
-EXPORTS
-        AnExportedFunction
diff --git a/tools/gyp/test/win/linker-flags/deffile.gyp b/tools/gyp/test/win/linker-flags/deffile.gyp
deleted file mode 100644
index 7b241d5..0000000
--- a/tools/gyp/test/win/linker-flags/deffile.gyp
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_deffile_dll_ok',
-      'type': 'shared_library',
-      'sources': [
-          'deffile.cc',
-          'deffile.def',
-      ],
-    },
-    {
-      'target_name': 'test_deffile_dll_notexported',
-      'type': 'shared_library',
-      'sources': [
-          'deffile.cc',
-      ],
-    },
-    {
-      'target_name': 'test_deffile_exe_ok',
-      'type': 'executable',
-      'sources': [
-          'deffile.cc',
-          'deffile.def',
-      ],
-    },
-    {
-      'target_name': 'test_deffile_exe_notexported',
-      'type': 'executable',
-      'sources': [
-          'deffile.cc',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/delay-load-dlls.gyp b/tools/gyp/test/win/linker-flags/delay-load-dlls.gyp
deleted file mode 100644
index 671cbaa..0000000
--- a/tools/gyp/test/win/linker-flags/delay-load-dlls.gyp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_dld_none',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-        }
-      },
-      'sources': ['delay-load.cc'],
-      'libraries': [
-        'delayimp.lib',
-        'shell32.lib',
-      ],
-    },
-    {
-      'target_name': 'test_dld_shell32',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'DelayLoadDLLs': ['shell32.dll']
-        }
-      },
-      'sources': ['delay-load.cc'],
-      'libraries': [
-        'delayimp.lib',
-        'shell32.lib',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/delay-load.cc b/tools/gyp/test/win/linker-flags/delay-load.cc
deleted file mode 100644
index 2be34aa..0000000
--- a/tools/gyp/test/win/linker-flags/delay-load.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <shlobj.h>
-
-int main() {
-  SHCreateDirectory(0, 0);
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/embed-manifest.gyp b/tools/gyp/test/win/linker-flags/embed-manifest.gyp
deleted file mode 100644
index fefb2f5..0000000
--- a/tools/gyp/test/win/linker-flags/embed-manifest.gyp
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright (c) 2013 Yandex LLC. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_manifest_exe',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '1',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        }
-      },
-    },
-    {
-      'target_name': 'test_manifest_dll',
-      'type': 'loadable_module',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '1',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        }
-      },
-    },
-    {
-      'target_name': 'test_manifest_extra1',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-          'AdditionalManifestFiles': 'extra.manifest',
-        }
-      },
-    },
-    {
-      'target_name': 'test_manifest_extra2',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-          'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
-        }
-      },
-    },
-    {
-      'target_name': 'test_manifest_extra_list',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-          'AdditionalManifestFiles': [
-            'extra.manifest',
-            'extra2.manifest'
-          ],
-        }
-      },
-    },
-    {
-      'target_name': 'test_manifest_dll_inc',
-      'type': 'loadable_module',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '2',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        }
-      },
-    },
-    {
-      'target_name': 'test_manifest_exe_inc',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '2',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        }
-      },
-    },
-    {
-      'target_name': 'test_manifest_exe_inc_no_embed',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkIncremental': '2',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-        }
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/enable-uac.gyp b/tools/gyp/test/win/linker-flags/enable-uac.gyp
deleted file mode 100644
index 4e58c86..0000000
--- a/tools/gyp/test/win/linker-flags/enable-uac.gyp
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'enable_uac',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        }
-      },
-    },
-    {
-      'target_name': 'enable_uac_no',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'false',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        }
-      },
-    },
-    {
-      'target_name': 'enable_uac_admin',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'UACExecutionLevel': 2,
-          'UACUIAccess': 'true',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        }
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/entrypointsymbol.cc b/tools/gyp/test/win/linker-flags/entrypointsymbol.cc
deleted file mode 100644
index b567bc8..0000000
--- a/tools/gyp/test/win/linker-flags/entrypointsymbol.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// The entry point specified by link.exe /ENTRY option.
-extern "C" void MainEntryPoint() {
-}
-
-// Still needed because the linker checks for existence of one of main, wmain,
-// WinMain, or wMain to offer informative diagnositics.
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/entrypointsymbol.gyp b/tools/gyp/test/win/linker-flags/entrypointsymbol.gyp
deleted file mode 100644
index 7f2c142..0000000
--- a/tools/gyp/test/win/linker-flags/entrypointsymbol.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_ok',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EntryPointSymbol': 'MainEntryPoint',
-        }
-      },
-      'sources': ['entrypointsymbol.cc'],
-    },
-    {
-      'target_name': 'test_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EntryPointSymbol': 'MainEntryPoint',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/extra.manifest b/tools/gyp/test/win/linker-flags/extra.manifest
deleted file mode 100644
index 2e436dc..0000000
--- a/tools/gyp/test/win/linker-flags/extra.manifest
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>

-<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">

-

-  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">

-    <application>

-      <!--This Id value indicates the application supports Windows 7 functionality-->

-      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>

-    </application>

-  </compatibility>

-  

-</assembly>

diff --git a/tools/gyp/test/win/linker-flags/extra2.manifest b/tools/gyp/test/win/linker-flags/extra2.manifest
deleted file mode 100644
index bfb570c..0000000
--- a/tools/gyp/test/win/linker-flags/extra2.manifest
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>

-<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">

-

-  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">

-    <application>

-      <!--This Id value indicates the application supports Windows Vista functionality -->

-      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>

-    </application>

-  </compatibility>

-  

-</assembly>

diff --git a/tools/gyp/test/win/linker-flags/fixed-base.gyp b/tools/gyp/test/win/linker-flags/fixed-base.gyp
deleted file mode 100644
index cc2982e..0000000
--- a/tools/gyp/test/win/linker-flags/fixed-base.gyp
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Disable DYNAMICBASE for these tests because it implies/doesn't imply
-    # FIXED in certain cases so it complicates the test for FIXED.
-    {
-      'target_name': 'test_fixed_default_exe',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'RandomizedBaseAddress': '1',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_fixed_default_dll',
-      'type': 'shared_library',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'RandomizedBaseAddress': '1',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_fixed_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'FixedBaseAddress': '1',
-          'RandomizedBaseAddress': '1',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_fixed_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'FixedBaseAddress': '2',
-          'RandomizedBaseAddress': '1',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/force-symbol-reference.gyp b/tools/gyp/test/win/linker-flags/force-symbol-reference.gyp
deleted file mode 100644
index d6d02a6..0000000
--- a/tools/gyp/test/win/linker-flags/force-symbol-reference.gyp
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.

-# Use of this source code is governed by a BSD-style license that can be

-# found in the LICENSE file.

-

-{

- 'targets': [

-    {

-      'target_name': 'test_force_reference_lib',

-      'type': 'static_library',

-      'sources': ['x.cc', 'y.cc'],

-    },

-    {

-      'target_name': 'test_force_reference',

-      'type': 'executable',

-      # Turn on debug info to get symbols in disasm for the test code, and

-      # turn on opt:ref to drop unused symbols to make sure we wouldn't

-      # otherwise have the symbols.

-      'msvs_settings': {

-        'VCCLCompilerTool': {

-          'DebugInformationFormat': '3',

-        },

-        'VCLinkerTool': {

-          'GenerateDebugInformation': 'true',

-          'AdditionalOptions': [

-            '/OPT:REF',

-          ],

-          'ForceSymbolReferences': [

-            '?x@@YAHXZ',

-            '?y@@YAHXZ',

-          ],

-        },

-      },

-      'sources': ['hello.cc'],

-      'dependencies': [

-        'test_force_reference_lib',

-      ],

-    },

-  ]

-}

diff --git a/tools/gyp/test/win/linker-flags/generate-manifest.gyp b/tools/gyp/test/win/linker-flags/generate-manifest.gyp
deleted file mode 100644
index 34a68d1..0000000
--- a/tools/gyp/test/win/linker-flags/generate-manifest.gyp
+++ /dev/null
@@ -1,166 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_generate_manifest_true',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'true',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_false',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'false',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_default',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_true_as_embedded',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'true',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_false_as_embedded',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'false',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_default_as_embedded',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'true',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_true_with_extra_manifest',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'true',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-          'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_false_with_extra_manifest',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'false',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-          'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_true_with_extra_manifest_list',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'true',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-          'AdditionalManifestFiles': [
-            'extra.manifest',
-            'extra2.manifest',
-          ],
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_false_with_extra_manifest_list',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-          'GenerateManifest': 'false',
-        },
-        'VCManifestTool': {
-          'EmbedManifest': 'false',
-          'AdditionalManifestFiles': [
-            'extra.manifest',
-            'extra2.manifest',
-          ],
-        },
-      },
-    },
-    {
-      'target_name': 'test_generate_manifest_default_embed_default',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'EnableUAC': 'true',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/hello.cc b/tools/gyp/test/win/linker-flags/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/win/linker-flags/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/incremental.gyp b/tools/gyp/test/win/linker-flags/incremental.gyp
deleted file mode 100644
index 59f3103..0000000
--- a/tools/gyp/test/win/linker-flags/incremental.gyp
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Turn on debug information so the incremental linking tables have a
-    # visible symbolic name in the disassembly.
-    {
-      'target_name': 'test_incremental_unset',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_incremental_default',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'LinkIncremental': '0',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_incremental_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'LinkIncremental': '1',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_incremental_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'LinkIncremental': '2',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/inline_test.cc b/tools/gyp/test/win/linker-flags/inline_test.cc
deleted file mode 100644
index a9f177e..0000000
--- a/tools/gyp/test/win/linker-flags/inline_test.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "inline_test.h"
-
-#include <intrin.h>
-#pragma intrinsic(_ReturnAddress)
-
-bool IsFunctionInlined(void* caller_return_address) {
-  return _ReturnAddress() == caller_return_address;
-}
diff --git a/tools/gyp/test/win/linker-flags/inline_test.h b/tools/gyp/test/win/linker-flags/inline_test.h
deleted file mode 100644
index 117913c..0000000
--- a/tools/gyp/test/win/linker-flags/inline_test.h
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-bool IsFunctionInlined(void* current_return_address);
diff --git a/tools/gyp/test/win/linker-flags/inline_test_main.cc b/tools/gyp/test/win/linker-flags/inline_test_main.cc
deleted file mode 100644
index 23cafe8..0000000
--- a/tools/gyp/test/win/linker-flags/inline_test_main.cc
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "inline_test.h"
-
-#include <intrin.h>
-#include <stdio.h>
-
-#pragma intrinsic(_ReturnAddress)
-
-int main() {
-  if (IsFunctionInlined(_ReturnAddress()))
-    puts("==== inlined ====\n");
-}
diff --git a/tools/gyp/test/win/linker-flags/large-address-aware.gyp b/tools/gyp/test/win/linker-flags/large-address-aware.gyp
deleted file mode 100644
index fa56d37..0000000
--- a/tools/gyp/test/win/linker-flags/large-address-aware.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_large_address_aware_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LargeAddressAware': '1',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_large_address_aware_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LargeAddressAware': '2',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/library-adjust.cc b/tools/gyp/test/win/linker-flags/library-adjust.cc
deleted file mode 100644
index 7dfb589..0000000
--- a/tools/gyp/test/win/linker-flags/library-adjust.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <winsock2.h>
-
-int main() {
-  WSAStartup(0, 0);
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/library-adjust.gyp b/tools/gyp/test/win/linker-flags/library-adjust.gyp
deleted file mode 100644
index 10e9996..0000000
--- a/tools/gyp/test/win/linker-flags/library-adjust.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_adjust',
-      'type': 'executable',
-      'libraries': [
-        '-lws2_32.lib'
-      ],
-      'sources': ['library-adjust.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/library-directories-define.cc b/tools/gyp/test/win/linker-flags/library-directories-define.cc
deleted file mode 100644
index 211ef06..0000000
--- a/tools/gyp/test/win/linker-flags/library-directories-define.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int library_function() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/library-directories-reference.cc b/tools/gyp/test/win/linker-flags/library-directories-reference.cc
deleted file mode 100644
index 3350978..0000000
--- a/tools/gyp/test/win/linker-flags/library-directories-reference.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-extern int library_function();
-
-int main() {
-  library_function();
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/library-directories.gyp b/tools/gyp/test/win/linker-flags/library-directories.gyp
deleted file mode 100644
index 25395d6..0000000
--- a/tools/gyp/test/win/linker-flags/library-directories.gyp
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_libdirs_none',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'AdditionalDependencies': [
-            'test_lib.lib',
-          ],
-        },
-      },
-      'sources': ['library-directories-reference.cc'],
-    },
-    {
-      'target_name': 'test_libdirs_with',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          # NOTE: Don't use this for general dependencies between gyp
-          # libraries (use 'dependencies' instead). This is done here only for
-          # testing.
-          #
-          # This setting should only be used to depend on third party prebuilt
-          # libraries that are stored as binaries at a known location.
-          'AdditionalLibraryDirectories': [
-            '<(DEPTH)/out/Default/obj/subdir', # ninja style
-            '<(DEPTH)/subdir/Default/lib', # msvs style
-          ],
-          'AdditionalDependencies': [
-            'test_lib.lib',
-          ],
-        },
-      },
-      'sources': ['library-directories-reference.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/link-ordering.gyp b/tools/gyp/test/win/linker-flags/link-ordering.gyp
deleted file mode 100644
index 66f4430..0000000
--- a/tools/gyp/test/win/linker-flags/link-ordering.gyp
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_ordering_exe',
-      'type': 'executable',
-      # These are so the names of the functions appear in the disassembly.
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-          'Optimization': '2',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'LinkIncremental': '1',
-          'GenerateManifest': 'false',
-          # Minimize the disassembly to just our code.
-          'AdditionalOptions': [
-            '/NODEFAULTLIB',
-          ],
-        },
-      },
-      'sources': [
-        # Explicitly sorted the same way as the disassembly in the test .py.
-        'main-crt.c',
-        'z.cc',
-        'x.cc',
-        'y.cc',
-        'hello.cc',
-      ],
-    },
-
-    {
-      'target_name': 'test_ordering_subdirs',
-      'type': 'executable',
-      # These are so the names of the functions appear in the disassembly.
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-          'Optimization': '2',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'LinkIncremental': '1',
-          'GenerateManifest': 'false',
-          # Minimize the disassembly to just our code.
-          'AdditionalOptions': [
-            '/NODEFAULTLIB',
-          ],
-        },
-      },
-      'sources': [
-        # Explicitly sorted the same way as the disassembly in the test .py.
-        'main-crt.c',
-        'hello.cc',
-        'b/y.cc',
-        'a/z.cc',
-      ],
-    },
-
-
-    {
-      'target_name': 'test_ordering_subdirs_mixed',
-      'type': 'executable',
-      # These are so the names of the functions appear in the disassembly.
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-          'Optimization': '2',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'LinkIncremental': '1',
-          'GenerateManifest': 'false',
-          # Minimize the disassembly to just our code.
-          'AdditionalOptions': [
-            '/NODEFAULTLIB',
-          ],
-        },
-      },
-      'sources': [
-        # Explicitly sorted the same way as the disassembly in the test .py.
-        'main-crt.c',
-        'a/x.cc',
-        'hello.cc',
-        'a/z.cc',
-        'y.cc',
-      ],
-    },
-
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/link-warning.cc b/tools/gyp/test/win/linker-flags/link-warning.cc
deleted file mode 100644
index 4b34277..0000000
--- a/tools/gyp/test/win/linker-flags/link-warning.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This will cause LNK4254.
-#pragma comment(linker, "/merge:.data=.text")
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/ltcg.gyp b/tools/gyp/test/win/linker-flags/ltcg.gyp
deleted file mode 100644
index ddb0d9b..0000000
--- a/tools/gyp/test/win/linker-flags/ltcg.gyp
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_ltcg_off',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WholeProgramOptimization': 'false',
-        },
-        'VCLinkerTool': {
-          'LinkTimeCodeGeneration': '0',
-        },
-      },
-      'sources': [
-        'inline_test.h',
-        'inline_test.cc',
-        'inline_test_main.cc',
-      ],
-    },
-    {
-      'target_name': 'test_ltcg_on',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WholeProgramOptimization': 'true',  # /GL
-        },
-        'VCLinkerTool': {
-          'LinkTimeCodeGeneration': '1',       # /LTCG
-        },
-      },
-      'sources': [
-        'inline_test.h',
-        'inline_test.cc',
-        'inline_test_main.cc',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/main-crt.c b/tools/gyp/test/win/linker-flags/main-crt.c
deleted file mode 100644
index bdc80c5..0000000
--- a/tools/gyp/test/win/linker-flags/main-crt.c
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Stub so we can link with /NODEFAULTLIB when checking disasm.
-int mainCRTStartup() {
-  return 5;
-}
diff --git a/tools/gyp/test/win/linker-flags/manifest-in-comment.cc b/tools/gyp/test/win/linker-flags/manifest-in-comment.cc
deleted file mode 100644
index ae54ae5..0000000
--- a/tools/gyp/test/win/linker-flags/manifest-in-comment.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#pragma comment(linker,                                                  \
-                "\"/manifestdependency:type='Win32' "                    \
-                "name='Test.Research.SampleAssembly' version='6.0.0.0' " \
-                "processorArchitecture='X86' "                           \
-                "publicKeyToken='0000000000000000' language='*'\"")
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/mapfile.cc b/tools/gyp/test/win/linker-flags/mapfile.cc
deleted file mode 100644
index cebccb2..0000000
--- a/tools/gyp/test/win/linker-flags/mapfile.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-__declspec(dllexport)
-void AnExportedFunction() {
-    // We need an exported function to verify that /MAPINFO:EXPORTS works.
-}
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/mapfile.gyp b/tools/gyp/test/win/linker-flags/mapfile.gyp
deleted file mode 100644
index 14206fe..0000000
--- a/tools/gyp/test/win/linker-flags/mapfile.gyp
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_mapfile_unset',
-      'type': 'executable',
-      'sources': ['mapfile.cc'],
-    },
-    {
-      'target_name': 'test_mapfile_generate',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateMapFile': 'true',
-        },
-      },
-      'sources': ['mapfile.cc'],
-    },
-    {
-      'target_name': 'test_mapfile_generate_exports',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateMapFile': 'true',
-          'MapExports': 'true',
-        },
-      },
-      'sources': ['mapfile.cc'],
-    },
-    {
-      'target_name': 'test_mapfile_generate_filename',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'GenerateMapFile': 'true',
-          'MapFileName': '<(PRODUCT_DIR)/custom_file_name.map',
-        },
-      },
-      'sources': ['mapfile.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/no-default-libs.cc b/tools/gyp/test/win/linker-flags/no-default-libs.cc
deleted file mode 100644
index e306846..0000000
--- a/tools/gyp/test/win/linker-flags/no-default-libs.cc
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Reference something in kernel32.dll. This will fail to link, verifying that
-// GYP provides no default import library configuration.
-// Note that we don't include Windows.h, as that will result in generating
-// linker directives in the object file through #pragma comment(lib, ...).
-typedef short BOOL;
-
-extern "C" __declspec(dllimport)
-BOOL CopyFileW(const wchar_t*, const wchar_t*, BOOL);
-
-
-int main() {
-  CopyFileW(0, 0, 0); // kernel32
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/no-default-libs.gyp b/tools/gyp/test/win/linker-flags/no-default-libs.gyp
deleted file mode 100644
index 77838ce..0000000
--- a/tools/gyp/test/win/linker-flags/no-default-libs.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_default',
-      'type': 'executable',
-      'sources': ['no-default-libs.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/nodefaultlib.cc b/tools/gyp/test/win/linker-flags/nodefaultlib.cc
deleted file mode 100644
index 24b6eca..0000000
--- a/tools/gyp/test/win/linker-flags/nodefaultlib.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Include entry point function that's excluded by removing C runtime libraries.
-extern "C" void mainCRTStartup() {
-}
-
-// Still needed because the linker checks for existence of one of main, wmain,
-// WinMain, or wMain to offer informative diagnositics.
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/nodefaultlib.gyp b/tools/gyp/test/win/linker-flags/nodefaultlib.gyp
deleted file mode 100644
index 4fb452a..0000000
--- a/tools/gyp/test/win/linker-flags/nodefaultlib.gyp
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_ok',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'IgnoreDefaultLibraryNames':
-              ['libcmtd.lib', 'libcmt.lib', 'msvcrt.lib', 'msvcrtd.lib'],
-        }
-      },
-      'sources': ['nodefaultlib.cc'],
-    },
-    {
-      'target_name': 'test_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'IgnoreDefaultLibraryNames':
-              ['libcmtd.lib', 'libcmt.lib', 'msvcrt.lib', 'msvcrtd.lib'],
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/nxcompat.gyp b/tools/gyp/test/win/linker-flags/nxcompat.gyp
deleted file mode 100644
index fa4118c..0000000
--- a/tools/gyp/test/win/linker-flags/nxcompat.gyp
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_nxcompat_default',
-      'type': 'executable',
-      'msvs_settings': {
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_nxcompat_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'DataExecutionPrevention': '1',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_nxcompat_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'DataExecutionPrevention': '2',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/opt-icf.cc b/tools/gyp/test/win/linker-flags/opt-icf.cc
deleted file mode 100644
index 1f12156..0000000
--- a/tools/gyp/test/win/linker-flags/opt-icf.cc
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-void similar_function0(char* x) {
-  while (*x) {
-    ++x;
-  }
-}
-
-void similar_function1(char* p) {
-  while (*p) {
-    ++p;
-  }
-}
-
-void similar_function2(char* q) {
-  while (*q) {
-    ++q;
-  }
-}
-
-int main() {
-  char* x = "hello";
-  similar_function0(x);
-  similar_function1(x);
-  similar_function2(x);
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/opt-icf.gyp b/tools/gyp/test/win/linker-flags/opt-icf.gyp
deleted file mode 100644
index effe802..0000000
--- a/tools/gyp/test/win/linker-flags/opt-icf.gyp
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Have to turn on function level linking here to get the function packaged
-    # as a COMDAT so that it's eligible for merging. Also turn on debug
-    # information so that the symbol names for the code appear in the dump.
-    # Finally, specify non-incremental linking so that there's not a bunch of
-    # extra "similar_function"s in the output (the ILT jump table).
-    {
-      'target_name': 'test_opticf_default',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'true',
-          'DebugInformationFormat': '3',
-          'Optimization': '0',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'LinkIncremental': '1',
-        },
-      },
-      'sources': ['opt-icf.cc'],
-    },
-    {
-      'target_name': 'test_opticf_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'true',
-          'DebugInformationFormat': '3',
-          'Optimization': '0',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'EnableCOMDATFolding': '1',
-          'LinkIncremental': '1',
-        },
-      },
-      'sources': ['opt-icf.cc'],
-    },
-    {
-      'target_name': 'test_opticf_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'true',
-          'DebugInformationFormat': '3',
-          'Optimization': '0',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'EnableCOMDATFolding': '2',
-          'LinkIncremental': '1',
-        },
-      },
-      'sources': ['opt-icf.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/opt-ref.cc b/tools/gyp/test/win/linker-flags/opt-ref.cc
deleted file mode 100644
index afaa328..0000000
--- a/tools/gyp/test/win/linker-flags/opt-ref.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int unused_function() {
-  return 0;
-}
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/opt-ref.gyp b/tools/gyp/test/win/linker-flags/opt-ref.gyp
deleted file mode 100644
index 69d0281..0000000
--- a/tools/gyp/test/win/linker-flags/opt-ref.gyp
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Have to turn on function level linking here to get the function packaged
-    # as a COMDAT so that it's eligible for optimizing away. Also turn on
-    # debug information so that the symbol names for the code appear in the
-    # dump (so we can verify if they are included in the final exe).
-    {
-      'target_name': 'test_optref_default',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'true',
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-      },
-      'sources': ['opt-ref.cc'],
-    },
-    {
-      'target_name': 'test_optref_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'true',
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'OptimizeReferences': '1',
-        },
-      },
-      'sources': ['opt-ref.cc'],
-    },
-    {
-      'target_name': 'test_optref_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'EnableFunctionLevelLinking': 'true',
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'OptimizeReferences': '2',
-        },
-      },
-      'sources': ['opt-ref.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/outputfile.gyp b/tools/gyp/test/win/linker-flags/outputfile.gyp
deleted file mode 100644
index 1022ec2..0000000
--- a/tools/gyp/test/win/linker-flags/outputfile.gyp
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_output_exe',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(OutDir)\\blorp.exe'
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_output_exe2',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(OutDir)\\subdir\\blorp.exe'
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_output_dll',
-      'type': 'shared_library',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(OutDir)\\blorp.dll'
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_output_lib',
-      'type': 'static_library',
-      'msvs_settings': {
-        'VCLibrarianTool': {
-          'OutputFile': '$(OutDir)\\blorp.lib'
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_output_lib2',
-      'type': 'static_library',
-      'msvs_settings': {
-        'VCLibrarianTool': {
-          'OutputFile': '$(OutDir)\\subdir\\blorp.lib'
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/pdb-output.gyp b/tools/gyp/test/win/linker-flags/pdb-output.gyp
deleted file mode 100644
index 1a03c67..0000000
--- a/tools/gyp/test/win/linker-flags/pdb-output.gyp
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_pdb_output_exe',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': 'output_exe.pdb',
-        },
-      },
-    },
-    {
-      'target_name': 'test_pdb_output_dll',
-      'type': 'shared_library',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': 'output_dll.pdb',
-        },
-      },
-    },
-    {
-      'target_name': 'test_pdb_output_disabled',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '0'
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'false',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/pgo.gyp b/tools/gyp/test/win/linker-flags/pgo.gyp
deleted file mode 100644
index da32639..0000000
--- a/tools/gyp/test/win/linker-flags/pgo.gyp
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'pgd_basename': 'test_pgo',
-  },
-  'targets': [
-    # In the PGO (Profile-Guided Optimization) build flow, we need to build the
-    # target binary multiple times. To implement this flow with gyp, here we
-    # define multiple 'executable' targets, each of which represents one build
-    # particular build/profile stage. On tricky part to do this is that these
-    # 'executable' targets should share the code itself so that profile data
-    # can be reused among these 'executable' files. In other words, the only
-    # differences among below 'executable' targets are:
-    #   1) PGO (Profile-Guided Optimization) database, and
-    #   2) linker options.
-    # The following static library contains all the logic including entry point.
-    # Basically we don't need to rebuild this target once we enter profiling
-    # phase of PGO.
-    {
-      'target_name': 'test_pgo_main',
-      'type': 'static_library',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WholeProgramOptimization': 'true',  # /GL
-        },
-        'VCLibrarianTool': {
-          'LinkTimeCodeGeneration': 'true',
-        },
-      },
-      'link_settings': {
-        'msvs_settings': {
-          'VCLinkerTool': {
-            'ProfileGuidedDatabase': '$(OutDir)\\<(pgd_basename).pgd',
-            'TargetMachine': '1',  # x86 - 32
-            'SubSystem': '1',      # /SUBSYSTEM:CONSOLE

-            # Tell ninja generator not to pass /ManifestFile:<filename> option
-            # to the linker, because it causes LNK1268 error in PGO biuld.
-            'GenerateManifest': 'false',
-            # We need to specify 'libcmt.lib' here so that the linker can pick
-            # up a valid entry point.
-            'AdditionalDependencies': [
-              'libcmt.lib',
-            ],
-          },
-        },
-      },
-      'sources': [
-        'inline_test.h',
-        'inline_test.cc',
-        'inline_test_main.cc',
-      ],
-    },
-    {
-      'target_name': 'test_pgo_instrument',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkTimeCodeGeneration': '2',
-        },
-      },
-      'dependencies': [
-        'test_pgo_main',
-      ],
-    },
-    {
-      'target_name': 'gen_profile_guided_database',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'actions': [
-        {
-          'action_name': 'action_main',
-          'inputs': [],
-          'outputs': [
-            '$(OutDir)\\<(pgd_basename).pgd',
-          ],
-          'action': [
-            'python', 'update_pgd.py',
-            '--vcbindir', '$(VCInstallDir)bin',
-            '--exe', '$(OutDir)\\test_pgo_instrument.exe',
-            '--pgd', '$(OutDir)\\<(pgd_basename).pgd',
-          ],
-        },
-      ],
-      'dependencies': [
-        'test_pgo_instrument',
-      ],
-    },
-    {
-      'target_name': 'test_pgo_optimize',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkTimeCodeGeneration': '3',
-        },
-      },
-      'sources': [
-        '$(OutDir)\\<(pgd_basename).pgd',
-      ],
-      'dependencies': [
-        'test_pgo_main',
-        'gen_profile_guided_database',
-      ],
-    },
-    {
-      'target_name': 'test_pgo_update',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'LinkTimeCodeGeneration': '4',
-        },
-      },
-      'sources': [
-        '$(OutDir)\\<(pgd_basename).pgd',
-      ],
-      'dependencies': [
-        'test_pgo_main',
-      ],
-    },
-    # A helper target to dump link.exe's command line options. We can use the
-    # output to determine if PGO (Profile-Guided Optimization) is available on
-    # the test environment.
-    {
-      'target_name': 'gen_linker_option',
-      'type': 'none',
-      'msvs_cygwin_shell': 0,
-      'actions': [
-        {
-          'action_name': 'action_main',
-          'inputs': [],
-          'outputs': [
-            '$(OutDir)\\linker_options.txt',
-          ],
-          'action': [
-            'cmd.exe', '/c link.exe > $(OutDir)\\linker_options.txt & exit 0',
-          ],
-        },
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/profile.gyp b/tools/gyp/test/win/linker-flags/profile.gyp
deleted file mode 100644
index d60a700..0000000
--- a/tools/gyp/test/win/linker-flags/profile.gyp
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Verify that 'Profile' option correctly makes it to LINK steup in Ninja
-    {
-      'target_name': 'test_profile_true',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        },
-        'VCLinkerTool': {
-          'Profile': 'true',
-          'GenerateDebugInformation': 'true',
-        },
-      },
-    },
-    {
-      'target_name': 'test_profile_false',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        },
-        'VCLinkerTool': {
-          'Profile': 'false',
-          'GenerateDebugInformation': 'true',
-        },
-      },
-    },
-    {
-      'target_name': 'test_profile_default',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/program-database.gyp b/tools/gyp/test/win/linker-flags/program-database.gyp
deleted file mode 100644
index 6e60ac0..0000000
--- a/tools/gyp/test/win/linker-flags/program-database.gyp
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    # Verify that 'ProgramDatabaseFile' option correctly makes it to LINK
-    # step in Ninja.
-    {
-      # Verify that VC macros and windows paths work correctly.
-      'target_name': 'test_pdb_outdir',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': '$(OutDir)\\name_outdir.pdb',
-        },
-      },
-    },
-    {
-      # Verify that GYP macros and POSIX paths work correctly.
-      'target_name': 'test_pdb_proddir',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3'
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-          'ProgramDatabaseFile': '<(PRODUCT_DIR)/name_proddir.pdb',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/safeseh.gyp b/tools/gyp/test/win/linker-flags/safeseh.gyp
deleted file mode 100644
index d4a6207..0000000
--- a/tools/gyp/test/win/linker-flags/safeseh.gyp
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'target_defaults': {
-    'configurations': {
-      'Default': {
-        'msvs_configuration_platform': 'Win32',
-      },
-      'Default_x64': {
-        'inherit_from': ['Default'],
-        'msvs_configuration_platform': 'x64',
-      },
-    },
-  },
-  'targets': [
-    {
-      'target_name': 'test_safeseh_default',
-      'type': 'executable',
-      'msvs_settings': {
-        # By default, msvs passes /SAFESEH for Link, but not for MASM.  In
-        # order for test_safeseh_default to link successfully, we need to
-        # explicitly specify /SAFESEH for MASM.
-        'MASM': {
-          'UseSafeExceptionHandlers': 'true',
-        },
-      },
-      'sources': [
-        'safeseh_hello.cc',
-        'safeseh_zero.asm',
-      ],
-    },
-    {
-      'target_name': 'test_safeseh_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'ImageHasSafeExceptionHandlers': 'false',
-        },
-      },
-      'sources': [
-        'safeseh_hello.cc',
-        'safeseh_zero.asm',
-      ],
-    },
-    {
-      'target_name': 'test_safeseh_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'ImageHasSafeExceptionHandlers': 'true',
-        },
-        'MASM': {
-          'UseSafeExceptionHandlers': 'true',
-        },
-      },
-      'sources': [
-        'safeseh_hello.cc',
-        'safeseh_zero.asm',
-      ],
-    },
-    {
-      # x64 targets cannot have ImageHasSafeExceptionHandlers or
-      # UseSafeExceptionHandlers set.
-      'target_name': 'test_safeseh_x64',
-      'type': 'executable',
-      'configurations': {
-        'Default': {
-          'msvs_target_platform': 'x64',
-        },
-      },
-      'sources': [
-        'safeseh_hello.cc',
-        'safeseh_zero64.asm',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/safeseh_hello.cc b/tools/gyp/test/win/linker-flags/safeseh_hello.cc
deleted file mode 100644
index 6141300..0000000
--- a/tools/gyp/test/win/linker-flags/safeseh_hello.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-extern "C" {
-int zero(void);
-}
-
-int main() {
-  return zero();
-}
diff --git a/tools/gyp/test/win/linker-flags/safeseh_zero.asm b/tools/gyp/test/win/linker-flags/safeseh_zero.asm
deleted file mode 100644
index 62da0df..0000000
--- a/tools/gyp/test/win/linker-flags/safeseh_zero.asm
+++ /dev/null
@@ -1,10 +0,0 @@
-.MODEL FLAT, C
-.CODE
-
-PUBLIC  zero
-zero    PROC
-        xor     eax, eax
-        ret     0
-zero    ENDP
-
-END
diff --git a/tools/gyp/test/win/linker-flags/safeseh_zero64.asm b/tools/gyp/test/win/linker-flags/safeseh_zero64.asm
deleted file mode 100644
index a4740c0..0000000
--- a/tools/gyp/test/win/linker-flags/safeseh_zero64.asm
+++ /dev/null
@@ -1,9 +0,0 @@
-.CODE
-
-PUBLIC  zero
-zero    PROC
-        xor     eax, eax
-        ret     0
-zero    ENDP
-
-END
diff --git a/tools/gyp/test/win/linker-flags/stacksize.gyp b/tools/gyp/test/win/linker-flags/stacksize.gyp
deleted file mode 100644
index bba44ca..0000000
--- a/tools/gyp/test/win/linker-flags/stacksize.gyp
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'test_default',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_set_reserved_size',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'StackReserveSize': 2097152,  # 2MB
-        }
-      },
-    },
-    {
-      'target_name': 'test_set_commit_size',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'StackCommitSize': 8192,  # 8KB
-        }
-      },
-    },
-    {
-      'target_name': 'test_set_both',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'StackReserveSize': 2097152,  # 2MB
-          'StackCommitSize': 8192,  # 8KB
-        }
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/subdir/library.gyp b/tools/gyp/test/win/linker-flags/subdir/library.gyp
deleted file mode 100644
index 519577f..0000000
--- a/tools/gyp/test/win/linker-flags/subdir/library.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.

-# Use of this source code is governed by a BSD-style license that can be

-# found in the LICENSE file.

-

-{

- 'targets': [

-    {

-      'target_name': 'test_lib',

-      'type': 'static_library',

-      'sources': ['../library-directories-define.cc'],

-    },

-  ]

-}

diff --git a/tools/gyp/test/win/linker-flags/subsystem-windows.cc b/tools/gyp/test/win/linker-flags/subsystem-windows.cc
deleted file mode 100644
index ac99da8..0000000
--- a/tools/gyp/test/win/linker-flags/subsystem-windows.cc
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <windows.h>
-
-int CALLBACK WinMain(HINSTANCE, HINSTANCE, LPSTR, int) {
-  return 0;
-}
diff --git a/tools/gyp/test/win/linker-flags/subsystem.gyp b/tools/gyp/test/win/linker-flags/subsystem.gyp
deleted file mode 100644
index 63f072a..0000000
--- a/tools/gyp/test/win/linker-flags/subsystem.gyp
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_console_ok',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'SubSystem': '1'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_console_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'SubSystem': '1'
-        }
-      },
-      'sources': ['subsystem-windows.cc'],
-    },
-    {
-      'target_name': 'test_windows_ok',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'SubSystem': '2'
-        }
-      },
-      'sources': ['subsystem-windows.cc'],
-    },
-    {
-      'target_name': 'test_windows_fail',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'SubSystem': '2'
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_console_xp',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'SubSystem': '1',
-          'MinimumRequiredVersion': '5.01',  # XP.
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_windows_xp',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'SubSystem': '2',
-          'MinimumRequiredVersion': '5.01',  # XP.
-        }
-      },
-      'sources': ['subsystem-windows.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/target-machine.gyp b/tools/gyp/test/win/linker-flags/target-machine.gyp
deleted file mode 100644
index 3027192..0000000
--- a/tools/gyp/test/win/linker-flags/target-machine.gyp
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_target_link_x86',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'TargetMachine': '1',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_target_link_x64',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'TargetMachine': '17',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_target_lib_x86',
-      'type': 'static_library',
-      'msvs_settings': {
-        'VCLibrarianTool': {
-          'TargetMachine': '1',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_target_lib_x64',
-      'type': 'static_library',
-      'msvs_settings': {
-        'VCLibrarianTool': {
-          'TargetMachine': '17',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/tsaware.gyp b/tools/gyp/test/win/linker-flags/tsaware.gyp
deleted file mode 100644
index 7ffc742..0000000
--- a/tools/gyp/test/win/linker-flags/tsaware.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_tsaware_no',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'TerminalServerAware': '1',
-        }
-      },
-      'sources': ['hello.cc'],
-    },
-    {
-      'target_name': 'test_tsaware_yes',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'TerminalServerAware': '2',
-        },
-      },
-      'sources': ['hello.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/linker-flags/unsupported-manifest.gyp b/tools/gyp/test/win/linker-flags/unsupported-manifest.gyp
deleted file mode 100644
index 5549e7c..0000000
--- a/tools/gyp/test/win/linker-flags/unsupported-manifest.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_unsupported',
-      'type': 'executable',
-      'sources': ['manifest-in-comment.cc'],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/linker-flags/update_pgd.py b/tools/gyp/test/win/linker-flags/update_pgd.py
deleted file mode 100644
index 176e9e5..0000000
--- a/tools/gyp/test/win/linker-flags/update_pgd.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from optparse import OptionParser
-import glob
-import os
-import subprocess
-
-parser = OptionParser()
-parser.add_option('--exe', dest='exe')
-parser.add_option('--vcbindir', dest='vcbindir')
-parser.add_option('--pgd', dest='pgd')
-(options, args) = parser.parse_args()
-
-# Instrumented binaries fail to run unless the Visual C++'s bin dir is included
-# in the PATH environment variable.
-os.environ['PATH'] = os.environ['PATH'] + os.pathsep + options.vcbindir
-
-# Run Instrumented binary.  The profile will be recorded into *.pgc file.
-subprocess.call([options.exe])
-
-# Merge *.pgc files into a *.pgd (Profile-Guided Database) file.
-subprocess.call(['pgomgr', '/merge', options.pgd])
-
-# *.pgc files are no longer necessary. Clear all of them.
-pgd_file = os.path.abspath(options.pgd)
-pgd_dir = os.path.dirname(pgd_file)
-(pgd_basename, _) = os.path.splitext(os.path.basename(pgd_file))
-pgc_filepattern = os.path.join(pgd_dir, '%s!*.pgc' % pgd_basename)
-pgc_files= glob.glob(pgc_filepattern)
-for pgc_file in pgc_files:
-  os.unlink(pgc_file)
diff --git a/tools/gyp/test/win/linker-flags/warn-as-error.gyp b/tools/gyp/test/win/linker-flags/warn-as-error.gyp
deleted file mode 100644
index 83c67e9..0000000
--- a/tools/gyp/test/win/linker-flags/warn-as-error.gyp
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.

-# Use of this source code is governed by a BSD-style license that can be

-# found in the LICENSE file.

-

-{

- 'targets': [

-    {

-      'target_name': 'test_on',

-      'type': 'executable',

-      'msvs_settings': {

-        'VCLinkerTool': {

-          'TreatLinkerWarningAsErrors': 'true',

-        }

-      },

-      'sources': ['link-warning.cc'],

-    },

-    {

-      'target_name': 'test_off',

-      'type': 'executable',

-      'msvs_settings': {

-        'VCLinkerTool': {

-          'TreatLinkerWarningAsErrors': 'false',

-        }

-      },

-      'sources': ['link-warning.cc'],

-    },

-    {

-      'target_name': 'test_default',

-      'type': 'executable',

-      'sources': ['link-warning.cc'],

-    },

-  ]

-}

diff --git a/tools/gyp/test/win/linker-flags/x.cc b/tools/gyp/test/win/linker-flags/x.cc
deleted file mode 100644
index f5f763b..0000000
--- a/tools/gyp/test/win/linker-flags/x.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int x() {
-  return 1;
-}
diff --git a/tools/gyp/test/win/linker-flags/y.cc b/tools/gyp/test/win/linker-flags/y.cc
deleted file mode 100644
index bd88411..0000000
--- a/tools/gyp/test/win/linker-flags/y.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int y() {
-  return 2;
-}
diff --git a/tools/gyp/test/win/linker-flags/z.cc b/tools/gyp/test/win/linker-flags/z.cc
deleted file mode 100644
index 8a43501..0000000
--- a/tools/gyp/test/win/linker-flags/z.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int z() {
-  return 3;
-}
diff --git a/tools/gyp/test/win/long-command-line/function.cc b/tools/gyp/test/win/long-command-line/function.cc
deleted file mode 100644
index af44b2c..0000000
--- a/tools/gyp/test/win/long-command-line/function.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int func() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/long-command-line/hello.cc b/tools/gyp/test/win/long-command-line/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/win/long-command-line/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/long-command-line/long-command-line.gyp b/tools/gyp/test/win/long-command-line/long-command-line.gyp
deleted file mode 100644
index 964c94f..0000000
--- a/tools/gyp/test/win/long-command-line/long-command-line.gyp
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'longexe',
-      'type': 'executable',
-      'msvs_settings': {
-        # Use this as a simple way to get a long command.
-        'VCCLCompilerTool': {
-          'AdditionalOptions': '/nologo ' * 8000,
-        },
-        'VCLinkerTool': {
-          'AdditionalOptions': '/nologo ' * 8000,
-        },
-      },
-      'sources': [
-        'hello.cc',
-      ],
-    },
-    {
-      'target_name': 'longlib',
-      'type': 'static_library',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'AdditionalOptions': '/nologo ' * 8000,
-        },
-        'VCLibrarianTool': {
-          'AdditionalOptions': '/nologo ' * 8000,
-        },
-      },
-      'sources': [
-        'function.cc',
-      ],
-    },
-    {
-      'target_name': 'longdll',
-      'type': 'shared_library',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'AdditionalOptions': '/nologo ' * 8000,
-        },
-        'VCLinkerTool': {
-          'AdditionalOptions': '/nologo ' * 8000,
-        },
-      },
-      'sources': [
-        'hello.cc',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/ml-safeseh/a.asm b/tools/gyp/test/win/ml-safeseh/a.asm
deleted file mode 100644
index 62da0df..0000000
--- a/tools/gyp/test/win/ml-safeseh/a.asm
+++ /dev/null
@@ -1,10 +0,0 @@
-.MODEL FLAT, C
-.CODE
-
-PUBLIC  zero
-zero    PROC
-        xor     eax, eax
-        ret     0
-zero    ENDP
-
-END
diff --git a/tools/gyp/test/win/ml-safeseh/hello.cc b/tools/gyp/test/win/ml-safeseh/hello.cc
deleted file mode 100644
index 6141300..0000000
--- a/tools/gyp/test/win/ml-safeseh/hello.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-extern "C" {
-int zero(void);
-}
-
-int main() {
-  return zero();
-}
diff --git a/tools/gyp/test/win/ml-safeseh/ml-safeseh.gyp b/tools/gyp/test/win/ml-safeseh/ml-safeseh.gyp
deleted file mode 100644
index bf8618f..0000000
--- a/tools/gyp/test/win/ml-safeseh/ml-safeseh.gyp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'ml_safeseh',
-      'type': 'executable',
-      'sources': [
-        'hello.cc',
-        'a.asm',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'ImageHasSafeExceptionHandlers': 'true',
-        },
-        'MASM': {
-          'UseSafeExceptionHandlers': 'true',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/precompiled/gyptest-all.py b/tools/gyp/test/win/precompiled/gyptest-all.py
deleted file mode 100644
index 9fb5e62..0000000
--- a/tools/gyp/test/win/precompiled/gyptest-all.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that precompiled headers can be specified.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'win32':
-    test = TestGyp.TestGyp(formats=['msvs', 'ninja'], workdir='workarea_all')
-    test.run_gyp('hello.gyp')
-    test.build('hello.gyp', 'hello')
-    test.run_built_executable('hello', stdout="Hello, world!\nHello, two!\n")
-    test.up_to_date('hello.gyp', test.ALL)
-    test.pass_test()
diff --git a/tools/gyp/test/win/precompiled/hello.c b/tools/gyp/test/win/precompiled/hello.c
deleted file mode 100644
index ffb47bf..0000000
--- a/tools/gyp/test/win/precompiled/hello.c
+++ /dev/null
@@ -1,14 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-// Note the abscence of a stdio.h include.  This will be inserted because of the
-// precompiled header.
-
-extern int hello2();
-
-int main(void) {
-  printf("Hello, world!\n");
-  hello2();
-  return 0;
-}
diff --git a/tools/gyp/test/win/precompiled/hello.gyp b/tools/gyp/test/win/precompiled/hello.gyp
deleted file mode 100644
index 5f82c53..0000000
--- a/tools/gyp/test/win/precompiled/hello.gyp
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.c',
-        'hello2.c',
-        'precomp.c',
-      ],
-      'msvs_precompiled_header': 'stdio.h',
-      'msvs_precompiled_source': 'precomp.c',
-
-      # Required so that the printf actually causes a build failure
-      # if the pch isn't included.
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'WarningLevel': '3',
-          'WarnAsError': 'true',
-        },
-      },
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/precompiled/hello2.c b/tools/gyp/test/win/precompiled/hello2.c
deleted file mode 100644
index d6d5311..0000000
--- a/tools/gyp/test/win/precompiled/hello2.c
+++ /dev/null
@@ -1,13 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-// Unlike hello.c, this file specifies the headers.
-
-#include <windows.h>
-#include <stdio.h>
-
-int hello2() {
-  printf("Hello, two!\n");
-  return 0;
-}
diff --git a/tools/gyp/test/win/precompiled/precomp.c b/tools/gyp/test/win/precompiled/precomp.c
deleted file mode 100644
index 517c61a..0000000
--- a/tools/gyp/test/win/precompiled/precomp.c
+++ /dev/null
@@ -1,8 +0,0 @@
-/* Copyright (c) 2011 Google Inc. All rights reserved.
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file. */
-
-// The precompiled header does not have to be the first one in the file.
-
-#include <windows.h>
-#include <stdio.h>
diff --git a/tools/gyp/test/win/rc-build/Resource.h b/tools/gyp/test/win/rc-build/Resource.h
deleted file mode 100644
index 137acf3..0000000
--- a/tools/gyp/test/win/rc-build/Resource.h
+++ /dev/null
@@ -1,26 +0,0 @@
-//{{NO_DEPENDENCIES}}

-// Microsoft Visual C++ generated include file.

-// Used by hello.rc

-//

-

-#define IDS_APP_TITLE			103

-

-#define IDR_MAINFRAME			128

-#define IDI_HELLO			107

-#define IDI_SMALL				108

-#define IDC_HELLO			109

-#ifndef IDC_STATIC

-#define IDC_STATIC				-1

-#endif

-// Next default values for new objects

-//

-#ifdef APSTUDIO_INVOKED

-#ifndef APSTUDIO_READONLY_SYMBOLS

-

-#define _APS_NO_MFC					130

-#define _APS_NEXT_RESOURCE_VALUE	129

-#define _APS_NEXT_COMMAND_VALUE		32771

-#define _APS_NEXT_CONTROL_VALUE		1000

-#define _APS_NEXT_SYMED_VALUE		110

-#endif

-#endif

diff --git a/tools/gyp/test/win/rc-build/hello.cpp b/tools/gyp/test/win/rc-build/hello.cpp
deleted file mode 100644
index f552ca1..0000000
--- a/tools/gyp/test/win/rc-build/hello.cpp
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.

-// Use of this source code is governed by a BSD-style license that can be

-// found in the LICENSE file.

-

-#define WIN32_LEAN_AND_MEAN

-#include <windows.h>

-#include <tchar.h>

-

-#include "resource.h"

-

-#define MAX_LOADSTRING 100

-

-TCHAR szTitle[MAX_LOADSTRING];

-TCHAR szWindowClass[MAX_LOADSTRING];

-

-int APIENTRY _tWinMain(

-    HINSTANCE hInstance,

-    HINSTANCE hPrevInstance,

-    LPTSTR    lpCmdLine,

-    int       nCmdShow) {

-  // Make sure we can load some resources.

-  int count = 0;

-  LoadString(hInstance, IDS_APP_TITLE, szTitle, MAX_LOADSTRING);

-  if (szTitle[0] != 0) ++count;

-  LoadString(hInstance, IDC_HELLO, szWindowClass, MAX_LOADSTRING);

-  if (szWindowClass[0] != 0) ++count;

-  if (LoadIcon(hInstance, MAKEINTRESOURCE(IDI_SMALL)) != NULL) ++count;

-  if (LoadIcon(hInstance, MAKEINTRESOURCE(IDI_HELLO)) != NULL) ++count;

-  return count;

-}

diff --git a/tools/gyp/test/win/rc-build/hello.gyp b/tools/gyp/test/win/rc-build/hello.gyp
deleted file mode 100644
index 3a66357..0000000
--- a/tools/gyp/test/win/rc-build/hello.gyp
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'with_resources',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-        'VCResourceCompilerTool': {
-          'Culture' : '1033',
-        },
-      },
-      'sources': [
-        'hello.cpp',
-        'hello.rc',
-      ],
-      'libraries': [
-        'kernel32.lib',
-        'user32.lib',
-      ],
-    },
-    {
-      'target_name': 'with_resources_subdir',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-        'VCResourceCompilerTool': {
-          'Culture' : '1033',
-        },
-      },
-      'sources': [
-        'hello.cpp',
-        'subdir/hello2.rc',
-      ],
-      'libraries': [
-        'kernel32.lib',
-        'user32.lib',
-      ],
-    },
-    {
-      'target_name': 'with_include_subdir',
-      'type': 'executable',
-      'msvs_settings': {
-        'VCCLCompilerTool': {
-          'DebugInformationFormat': '3',
-        },
-        'VCLinkerTool': {
-          'GenerateDebugInformation': 'true',
-        },
-        'VCResourceCompilerTool': {
-          'Culture' : '1033',
-        },
-      },
-      'resource_include_dirs': [
-        '$(ProjectDir)\\subdir',
-      ],
-      'sources': [
-        'hello.cpp',
-        'hello3.rc',
-      ],
-      'libraries': [
-        'kernel32.lib',
-        'user32.lib',
-      ],
-    },
-    {
-      'target_name': 'resource_only_dll',
-      'type': 'shared_library',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'ResourceOnlyDLL': 'true',
-        },
-      },
-      'sources': [
-        'hello.rc',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/rc-build/hello.h b/tools/gyp/test/win/rc-build/hello.h
deleted file mode 100644
index e60f2eb..0000000
--- a/tools/gyp/test/win/rc-build/hello.h
+++ /dev/null
@@ -1,3 +0,0 @@
-#pragma once

-

-#include "resource.h"

diff --git a/tools/gyp/test/win/rc-build/hello.ico b/tools/gyp/test/win/rc-build/hello.ico
deleted file mode 100644
index d551aa3..0000000
--- a/tools/gyp/test/win/rc-build/hello.ico
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/win/rc-build/hello.rc b/tools/gyp/test/win/rc-build/hello.rc
deleted file mode 100644
index c9a7af6..0000000
--- a/tools/gyp/test/win/rc-build/hello.rc
+++ /dev/null
@@ -1,86 +0,0 @@
-//Microsoft Visual C++ generated resource script.

-//

-#include "resource.h"

-

-#define APSTUDIO_READONLY_SYMBOLS

-/////////////////////////////////////////////////////////////////////////////

-//

-// Generated from the TEXTINCLUDE 2 resource.

-//

-#ifndef APSTUDIO_INVOKED

-#include "targetver.h"

-#endif

-#define APSTUDIO_HIDDEN_SYMBOLS

-#include "windows.h"

-#undef APSTUDIO_HIDDEN_SYMBOLS

-/////////////////////////////////////////////////////////////////////////////

-#undef APSTUDIO_READONLY_SYMBOLS

-

-#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)

-LANGUAGE 9, 1

-#pragma code_page(932)

-

-/////////////////////////////////////////////////////////////////////////////

-//

-// Icon

-//

-

-// Icon with lowest ID value placed first to ensure application icon

-// remains consistent on all systems.

-

-IDI_HELLO       ICON         "hello.ico"

-IDI_SMALL               ICON         "small.ico"

-

-#ifdef APSTUDIO_INVOKED

-/////////////////////////////////////////////////////////////////////////////

-//

-// TEXTINCLUDE

-//

-1 TEXTINCLUDE

-BEGIN

-    "resource.h\0"

-END

-

-2 TEXTINCLUDE

-BEGIN

-	"#ifndef APSTUDIO_INVOKED\r\n"

-    "#include ""targetver.h""\r\n"

-    "#endif\r\n"

-    "#define APSTUDIO_HIDDEN_SYMBOLS\r\n"

-    "#include ""windows.h""\r\n"

-    "#undef APSTUDIO_HIDDEN_SYMBOLS\r\n"

-    "\0"

-END

-

-3 TEXTINCLUDE

-BEGIN

-    "\r\n"

-    "\0"

-END

-

-#endif    // APSTUDIO_INVOKED

-

-/////////////////////////////////////////////////////////////////////////////

-//

-// String Table

-//

-

-STRINGTABLE

-BEGIN

-   IDC_HELLO   "HELLO"

-   IDS_APP_TITLE       "hello"

-END

-

-#endif

-/////////////////////////////////////////////////////////////////////////////

-

-

-

-#ifndef APSTUDIO_INVOKED

-/////////////////////////////////////////////////////////////////////////////

-//

-// Generated from the TEXTINCLUDE 3 resource.

-//

-

-/////////////////////////////////////////////////////////////////////////////

-#endif    // not APSTUDIO_INVOKED

diff --git a/tools/gyp/test/win/rc-build/hello3.rc b/tools/gyp/test/win/rc-build/hello3.rc
deleted file mode 100644
index c74dede..0000000
--- a/tools/gyp/test/win/rc-build/hello3.rc
+++ /dev/null
@@ -1,87 +0,0 @@
-//Microsoft Visual C++ generated resource script.

-//

-#include "include.h"

-#include "resource.h"

-

-#define APSTUDIO_READONLY_SYMBOLS

-/////////////////////////////////////////////////////////////////////////////

-//

-// Generated from the TEXTINCLUDE 2 resource.

-//

-#ifndef APSTUDIO_INVOKED

-#include "targetver.h"

-#endif

-#define APSTUDIO_HIDDEN_SYMBOLS

-#include "windows.h"

-#undef APSTUDIO_HIDDEN_SYMBOLS

-/////////////////////////////////////////////////////////////////////////////

-#undef APSTUDIO_READONLY_SYMBOLS

-

-#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)

-LANGUAGE 9, 1

-#pragma code_page(932)

-

-/////////////////////////////////////////////////////////////////////////////

-//

-// Icon

-//

-

-// Icon with lowest ID value placed first to ensure application icon

-// remains consistent on all systems.

-

-IDI_HELLO       ICON         "hello.ico"

-IDI_SMALL               ICON         "small.ico"

-

-#ifdef APSTUDIO_INVOKED

-/////////////////////////////////////////////////////////////////////////////

-//

-// TEXTINCLUDE

-//

-1 TEXTINCLUDE

-BEGIN

-    "resource.h\0"

-END

-

-2 TEXTINCLUDE

-BEGIN

-	"#ifndef APSTUDIO_INVOKED\r\n"

-    "#include ""targetver.h""\r\n"

-    "#endif\r\n"

-    "#define APSTUDIO_HIDDEN_SYMBOLS\r\n"

-    "#include ""windows.h""\r\n"

-    "#undef APSTUDIO_HIDDEN_SYMBOLS\r\n"

-    "\0"

-END

-

-3 TEXTINCLUDE

-BEGIN

-    "\r\n"

-    "\0"

-END

-

-#endif    // APSTUDIO_INVOKED

-

-/////////////////////////////////////////////////////////////////////////////

-//

-// String Table

-//

-

-STRINGTABLE

-BEGIN

-   IDC_HELLO   "HELLO"

-   IDS_APP_TITLE       "hello"

-END

-

-#endif

-/////////////////////////////////////////////////////////////////////////////

-

-

-

-#ifndef APSTUDIO_INVOKED

-/////////////////////////////////////////////////////////////////////////////

-//

-// Generated from the TEXTINCLUDE 3 resource.

-//

-

-/////////////////////////////////////////////////////////////////////////////

-#endif    // not APSTUDIO_INVOKED

diff --git a/tools/gyp/test/win/rc-build/small.ico b/tools/gyp/test/win/rc-build/small.ico
deleted file mode 100644
index d551aa3..0000000
--- a/tools/gyp/test/win/rc-build/small.ico
+++ /dev/null
Binary files differ
diff --git a/tools/gyp/test/win/rc-build/subdir/hello2.rc b/tools/gyp/test/win/rc-build/subdir/hello2.rc
deleted file mode 100644
index 4c8eab1..0000000
--- a/tools/gyp/test/win/rc-build/subdir/hello2.rc
+++ /dev/null
@@ -1,87 +0,0 @@
-//Microsoft Visual C++ generated resource script.

-//

-#include "subdir/include.h"

-#include "resource.h"

-

-#define APSTUDIO_READONLY_SYMBOLS

-/////////////////////////////////////////////////////////////////////////////

-//

-// Generated from the TEXTINCLUDE 2 resource.

-//

-#ifndef APSTUDIO_INVOKED

-#include "targetver.h"

-#endif

-#define APSTUDIO_HIDDEN_SYMBOLS

-#include "windows.h"

-#undef APSTUDIO_HIDDEN_SYMBOLS

-/////////////////////////////////////////////////////////////////////////////

-#undef APSTUDIO_READONLY_SYMBOLS

-

-#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)

-LANGUAGE 9, 1

-#pragma code_page(932)

-

-/////////////////////////////////////////////////////////////////////////////

-//

-// Icon

-//

-

-// Icon with lowest ID value placed first to ensure application icon

-// remains consistent on all systems.

-

-IDI_HELLO       ICON         "hello.ico"

-IDI_SMALL               ICON         "small.ico"

-

-#ifdef APSTUDIO_INVOKED

-/////////////////////////////////////////////////////////////////////////////

-//

-// TEXTINCLUDE

-//

-1 TEXTINCLUDE

-BEGIN

-    "resource.h\0"

-END

-

-2 TEXTINCLUDE

-BEGIN

-	"#ifndef APSTUDIO_INVOKED\r\n"

-    "#include ""targetver.h""\r\n"

-    "#endif\r\n"

-    "#define APSTUDIO_HIDDEN_SYMBOLS\r\n"

-    "#include ""windows.h""\r\n"

-    "#undef APSTUDIO_HIDDEN_SYMBOLS\r\n"

-    "\0"

-END

-

-3 TEXTINCLUDE

-BEGIN

-    "\r\n"

-    "\0"

-END

-

-#endif    // APSTUDIO_INVOKED

-

-/////////////////////////////////////////////////////////////////////////////

-//

-// String Table

-//

-

-STRINGTABLE

-BEGIN

-   IDC_HELLO   "HELLO"

-   IDS_APP_TITLE       "hello"

-END

-

-#endif

-/////////////////////////////////////////////////////////////////////////////

-

-

-

-#ifndef APSTUDIO_INVOKED

-/////////////////////////////////////////////////////////////////////////////

-//

-// Generated from the TEXTINCLUDE 3 resource.

-//

-

-/////////////////////////////////////////////////////////////////////////////

-#endif    // not APSTUDIO_INVOKED

diff --git a/tools/gyp/test/win/rc-build/subdir/include.h b/tools/gyp/test/win/rc-build/subdir/include.h
deleted file mode 100644
index f15c48b..0000000
--- a/tools/gyp/test/win/rc-build/subdir/include.h
+++ /dev/null
@@ -1 +0,0 @@
-// Just exists to make sure it can be included.

diff --git a/tools/gyp/test/win/rc-build/targetver.h b/tools/gyp/test/win/rc-build/targetver.h
deleted file mode 100644
index f583181..0000000
--- a/tools/gyp/test/win/rc-build/targetver.h
+++ /dev/null
@@ -1,24 +0,0 @@
-#pragma once

-

-// The following macros define the minimum required platform.  The minimum required platform

-// is the earliest version of Windows, Internet Explorer etc. that has the necessary features to run 

-// your application.  The macros work by enabling all features available on platform versions up to and 

-// including the version specified.

-

-// Modify the following defines if you have to target a platform prior to the ones specified below.

-// Refer to MSDN for the latest info on corresponding values for different platforms.

-#ifndef WINVER                          // Specifies that the minimum required platform is Windows Vista.

-#define WINVER 0x0600           // Change this to the appropriate value to target other versions of Windows.

-#endif

-

-#ifndef _WIN32_WINNT            // Specifies that the minimum required platform is Windows Vista.

-#define _WIN32_WINNT 0x0600     // Change this to the appropriate value to target other versions of Windows.

-#endif

-

-#ifndef _WIN32_WINDOWS          // Specifies that the minimum required platform is Windows 98.

-#define _WIN32_WINDOWS 0x0410 // Change this to the appropriate value to target Windows Me or later.

-#endif

-

-#ifndef _WIN32_IE                       // Specifies that the minimum required platform is Internet Explorer 7.0.

-#define _WIN32_IE 0x0700        // Change this to the appropriate value to target other versions of IE.

-#endif

diff --git a/tools/gyp/test/win/shard/hello.cc b/tools/gyp/test/win/shard/hello.cc
deleted file mode 100644
index a9dce62..0000000
--- a/tools/gyp/test/win/shard/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/shard/hello1.cc b/tools/gyp/test/win/shard/hello1.cc
deleted file mode 100644
index 0eccf28..0000000
--- a/tools/gyp/test/win/shard/hello1.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int f1() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/shard/hello2.cc b/tools/gyp/test/win/shard/hello2.cc
deleted file mode 100644
index 23fcb54..0000000
--- a/tools/gyp/test/win/shard/hello2.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int f2() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/shard/hello3.cc b/tools/gyp/test/win/shard/hello3.cc
deleted file mode 100644
index a72e2ef..0000000
--- a/tools/gyp/test/win/shard/hello3.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int f3() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/shard/hello4.cc b/tools/gyp/test/win/shard/hello4.cc
deleted file mode 100644
index a94df19..0000000
--- a/tools/gyp/test/win/shard/hello4.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int f4() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/shard/shard.gyp b/tools/gyp/test/win/shard/shard.gyp
deleted file mode 100644
index eac45fc..0000000
--- a/tools/gyp/test/win/shard/shard.gyp
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'shard',
-      'type': 'static_library',
-      'msvs_shard': 4,
-      'sources': [
-        'hello1.cc',
-        'hello2.cc',
-        'hello3.cc',
-        'hello4.cc',
-      ],
-      'product_dir': '<(PRODUCT_DIR)',
-    },
-    {
-      'target_name': 'refs_to_shard',
-      'type': 'executable',
-      'dependencies': [
-        # Make sure references are correctly updated.
-        'shard',
-      ],
-      'sources': [
-        'hello.cc',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/shard/shard_ref.gyp b/tools/gyp/test/win/shard/shard_ref.gyp
deleted file mode 100644
index 3ec8d76..0000000
--- a/tools/gyp/test/win/shard/shard_ref.gyp
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'refs_to_shard_external_lib',
-      'type': 'static_library',
-      'dependencies': [
-        # Make sure references in other files are updated correctly.
-        'shard.gyp:shard',
-      ],
-      'sources': [
-        'hello.cc',
-      ],
-    },
-    {
-      'target_name': 'refs_to_shard_external_exe',
-      'type': 'executable',
-      'dependencies': [
-        # Make sure references in other files are updated correctly.
-        'shard.gyp:shard',
-      ],
-      'sources': [
-        'hello.cc',
-      ],
-    },
-    {
-      'target_name': 'refs_to_shard_external_dll',
-      'type': 'shared_library',
-      'dependencies': [
-        # Make sure references in other files are updated correctly.
-        'shard.gyp:shard',
-      ],
-      'sources': [
-        'hello.cc',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/system-include/bar/header.h b/tools/gyp/test/win/system-include/bar/header.h
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/system-include/bar/header.h
+++ /dev/null
diff --git a/tools/gyp/test/win/system-include/common/commonheader.h b/tools/gyp/test/win/system-include/common/commonheader.h
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/system-include/common/commonheader.h
+++ /dev/null
diff --git a/tools/gyp/test/win/system-include/foo/header.h b/tools/gyp/test/win/system-include/foo/header.h
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/system-include/foo/header.h
+++ /dev/null
diff --git a/tools/gyp/test/win/system-include/main.cc b/tools/gyp/test/win/system-include/main.cc
deleted file mode 100644
index b04ea8a..0000000
--- a/tools/gyp/test/win/system-include/main.cc
+++ /dev/null
@@ -1,4 +0,0 @@
-#include <commonheader.h>
-#include <header.h>
-
-int main() {}
diff --git a/tools/gyp/test/win/system-include/test.gyp b/tools/gyp/test/win/system-include/test.gyp
deleted file mode 100644
index 07f2636..0000000
--- a/tools/gyp/test/win/system-include/test.gyp
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-  'target_defaults': {
-    'msvs_settings': {
-      'VCCLCompilerTool': {
-        'WarningLevel': '4',
-        'WarnAsError': 'true',
-      },
-    },
-    'msvs_system_include_dirs': [
-      '$(ProjectName)',  # Different for each target
-      'common',  # Same for all targets
-    ],
-  },
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'executable',
-      'sources': [ 'main.cc', ],
-    },
-    {
-      'target_name': 'bar',
-      'type': 'executable',
-      'sources': [ 'main.cc', ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/win/uldi/a.cc b/tools/gyp/test/win/uldi/a.cc
deleted file mode 100644
index 0fe05d5..0000000
--- a/tools/gyp/test/win/uldi/a.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int some_function() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/uldi/b.cc b/tools/gyp/test/win/uldi/b.cc
deleted file mode 100644
index 0fe05d5..0000000
--- a/tools/gyp/test/win/uldi/b.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int some_function() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/uldi/dll.cc b/tools/gyp/test/win/uldi/dll.cc
deleted file mode 100644
index 93a6c19..0000000
--- a/tools/gyp/test/win/uldi/dll.cc
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-__declspec(dllexport) void SomeFunction() {
-}
diff --git a/tools/gyp/test/win/uldi/exe.cc b/tools/gyp/test/win/uldi/exe.cc
deleted file mode 100644
index b3039ac..0000000
--- a/tools/gyp/test/win/uldi/exe.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/uldi/main.cc b/tools/gyp/test/win/uldi/main.cc
deleted file mode 100644
index 81b46d8..0000000
--- a/tools/gyp/test/win/uldi/main.cc
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-extern int some_function();
-
-int main() {
-  some_function();
-  return 0;
-}
diff --git a/tools/gyp/test/win/uldi/uldi-depending-on-module.gyp b/tools/gyp/test/win/uldi/uldi-depending-on-module.gyp
deleted file mode 100644
index 3e34de8..0000000
--- a/tools/gyp/test/win/uldi/uldi-depending-on-module.gyp
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'an_exe',
-      'type': 'executable',
-      'sources': ['exe.cc'],
-      'dependencies': [
-        'a_dll',
-      ],
-    },
-    {
-      'target_name': 'a_dll',
-      'type': 'shared_library',
-      'sources': ['dll.cc'],
-      'dependencies': [
-        'a_lib',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'UseLibraryDependencyInputs': 'true'
-        },
-      },
-    },
-    {
-      'target_name': 'a_lib',
-      'type': 'static_library',
-      'dependencies': [
-        'a_module',
-      ],
-      'sources': ['a.cc'],
-    },
-    {
-      'target_name': 'a_module',
-      'type': 'loadable_module',
-      'sources': ['a.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/uldi/uldi.gyp b/tools/gyp/test/win/uldi/uldi.gyp
deleted file mode 100644
index c32f5e0..0000000
--- a/tools/gyp/test/win/uldi/uldi.gyp
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'lib1',
-      'type': 'static_library',
-      'sources': ['a.cc'],
-    },
-    {
-      'target_name': 'final_uldi',
-      'type': 'executable',
-      'dependencies': [
-        'lib1',
-        'lib2',
-      ],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'UseLibraryDependencyInputs': 'true'
-        },
-      },
-      'sources': ['main.cc'],
-    },
-    {
-      'target_name': 'final_no_uldi',
-      'type': 'executable',
-      'dependencies': [
-        'lib1',
-        'lib2',
-      ],
-      'sources': ['main.cc'],
-    },
-    {
-      'target_name': 'lib2',
-      'type': 'static_library',
-      # b.cc has the same named function as a.cc, but don't use the same name
-      # so that the .obj will have a different name. If the obj file has the
-      # same name, the linker will discard the obj file, invalidating the
-      # test.
-      'sources': ['b.cc'],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/as.py b/tools/gyp/test/win/vs-macros/as.py
deleted file mode 100644
index e0bc3ae..0000000
--- a/tools/gyp/test/win/vs-macros/as.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from optparse import OptionParser
-
-parser = OptionParser()
-parser.add_option('-a', dest='platform')
-parser.add_option('-o', dest='output')
-parser.add_option('-p', dest='path')
-(options, args) = parser.parse_args()
-
-f = open(options.output, 'w')
-print >>f, 'options', options
-print >>f, 'args', args
-f.close()
diff --git a/tools/gyp/test/win/vs-macros/containing-gyp.gyp b/tools/gyp/test/win/vs-macros/containing-gyp.gyp
deleted file mode 100644
index c07b639..0000000
--- a/tools/gyp/test/win/vs-macros/containing-gyp.gyp
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_expansions',
-      'msvs_cygwin_shell': 0,
-      'type': 'none',
-      'rules': [
-        {
-          'rule_name': 'assembler (gnu-compatible)',
-          'msvs_cygwin_shell': 0,
-          'msvs_quote_cmd': 0,
-          'extension': 'S',
-          'inputs': [
-            'as.py',
-          ],
-          'outputs': [
-            '$(IntDir)/$(InputName).obj',
-          ],
-          'action':
-            ['python',
-              'as.py',
-              '-a', '$(PlatformName)',
-              '-o', '$(IntDir)/$(InputName).obj',
-              '-p', '<(DEPTH)',
-              '$(InputPath)'],
-          'message': 'Building assembly language file $(InputPath)',
-          'process_outputs_as_sources': 1,
-        },
-      ],
-      'sources': [
-        'input.S',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/do_stuff.py b/tools/gyp/test/win/vs-macros/do_stuff.py
deleted file mode 100644
index 4669d31..0000000
--- a/tools/gyp/test/win/vs-macros/do_stuff.py
+++ /dev/null
@@ -1,8 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-
-input = open(sys.argv[1], "r").read()
-open(sys.argv[2], "w").write(input + "Modified.")
diff --git a/tools/gyp/test/win/vs-macros/hello.cc b/tools/gyp/test/win/vs-macros/hello.cc
deleted file mode 100644
index 1711567..0000000
--- a/tools/gyp/test/win/vs-macros/hello.cc
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2012 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/win/vs-macros/input-output-macros.gyp b/tools/gyp/test/win/vs-macros/input-output-macros.gyp
deleted file mode 100644
index b4520f8..0000000
--- a/tools/gyp/test/win/vs-macros/input-output-macros.gyp
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_expansions',
-      'msvs_cygwin_shell': 0,
-      'type': 'none',
-      'rules': [
-        {
-          'rule_name': 'generate_file',
-          'extension': 'blah',
-          'inputs': [
-            'do_stuff.py',
-          ],
-          'outputs': [
-            '$(OutDir)\\<(RULE_INPUT_NAME).something',
-          ],
-          'action': ['python',
-                     'do_stuff.py',
-                     '<(RULE_INPUT_PATH)',
-                     '$(OutDir)\\<(RULE_INPUT_NAME).something',],
-        },
-      ],
-      'sources': [
-        'stuff.blah',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/input.S b/tools/gyp/test/win/vs-macros/input.S
deleted file mode 100644
index e69de29..0000000
--- a/tools/gyp/test/win/vs-macros/input.S
+++ /dev/null
diff --git a/tools/gyp/test/win/vs-macros/projectname.gyp b/tools/gyp/test/win/vs-macros/projectname.gyp
deleted file mode 100644
index 625a177..0000000
--- a/tools/gyp/test/win/vs-macros/projectname.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_expansions',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(OutDir)\\$(ProjectName)_plus_something.exe',
-        },
-      },
-    },
-    {
-      'target_name': 'test_with_product_name',
-      'product_name': 'prod_name',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(OutDir)\\$(ProjectName)_plus_something.exe',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/stuff.blah b/tools/gyp/test/win/vs-macros/stuff.blah
deleted file mode 100644
index d438b4a..0000000
--- a/tools/gyp/test/win/vs-macros/stuff.blah
+++ /dev/null
@@ -1 +0,0 @@
-Random data file.
diff --git a/tools/gyp/test/win/vs-macros/targetext.gyp b/tools/gyp/test/win/vs-macros/targetext.gyp
deleted file mode 100644
index 11f580e..0000000
--- a/tools/gyp/test/win/vs-macros/targetext.gyp
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_targetext_executable',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\executable$(TargetExt)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetext_loadable_module',
-      'type': 'loadable_module',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\loadable_module$(TargetExt)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetext_shared_library',
-      'type': 'shared_library',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\shared_library$(TargetExt)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetext_static_library',
-      'type': 'static_library',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLibrarianTool': {
-          'OutputFile': '$(TargetDir)\\static_library$(TargetExt)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetext_product_extension',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'product_extension': 'library',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\product_extension$(TargetExt)',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/targetfilename.gyp b/tools/gyp/test/win/vs-macros/targetfilename.gyp
deleted file mode 100644
index 8287320..0000000
--- a/tools/gyp/test/win/vs-macros/targetfilename.gyp
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_targetfilename_executable',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetFileName)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetfilename_loadable_module',
-      'type': 'loadable_module',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetFileName)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetfilename_shared_library',
-      'type': 'loadable_module',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetFileName)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetfilename_static_library',
-      'type': 'static_library',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLibrarianTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetFileName)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetfilename_product_extension',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'product_extension': 'foo',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetFileName)',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/targetname.gyp b/tools/gyp/test/win/vs-macros/targetname.gyp
deleted file mode 100644
index a53d3c0..0000000
--- a/tools/gyp/test/win/vs-macros/targetname.gyp
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_targetname',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something1.exe',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetname_with_prefix',
-      'product_prefix': 'prod_prefix',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something2.exe',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetname_with_prodname',
-      'product_name': 'prod_name',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something3.exe',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetname_with_prodname_with_prefix',
-      'product_name': 'prod_name',
-      'product_prefix': 'prod_prefix',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetDir)\\$(TargetName)_plus_something4.exe',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/targetpath.gyp b/tools/gyp/test/win/vs-macros/targetpath.gyp
deleted file mode 100644
index a8699ff..0000000
--- a/tools/gyp/test/win/vs-macros/targetpath.gyp
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_targetpath_executable',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetPath)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetpath_loadable_module',
-      'type': 'loadable_module',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetPath)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetpath_shared_library',
-      'type': 'loadable_module',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetPath)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetpath_static_library',
-      'type': 'static_library',
-      'sources': ['hello.cc'],
-      'msvs_settings': {
-        'VCLibrarianTool': {
-          'OutputFile': '$(TargetPath)',
-        },
-      },
-    },
-    {
-      'target_name': 'test_targetpath_product_extension',
-      'type': 'executable',
-      'sources': ['hello.cc'],
-      'product_extension': 'foo',
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'OutputFile': '$(TargetPath)',
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/vs-macros/test_exists.py b/tools/gyp/test/win/vs-macros/test_exists.py
deleted file mode 100644
index f5c90ad..0000000
--- a/tools/gyp/test/win/vs-macros/test_exists.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-if not os.path.exists(sys.argv[1]):
-  raise
-open(sys.argv[2], 'w').close()
diff --git a/tools/gyp/test/win/vs-macros/vcinstalldir.gyp b/tools/gyp/test/win/vs-macros/vcinstalldir.gyp
deleted file mode 100644
index 3763a4e..0000000
--- a/tools/gyp/test/win/vs-macros/vcinstalldir.gyp
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'test_slash_trailing',
-      'type': 'none',
-      'msvs_cygwin_shell': '0',
-      'actions': [
-        {
-          'action_name': 'root',
-          'inputs': [],
-          'outputs': ['out1'],
-          'action': ['python', 'test_exists.py', '$(VCInstallDir)', 'out1']
-        },
-      ],
-    },
-    {
-      'target_name': 'test_slash_dir',
-      'type': 'none',
-      'msvs_cygwin_shell': '0',
-      'actions': [
-        {
-          'action_name': 'bin',
-          'inputs': [],
-          'outputs': ['out2'],
-          'action': ['python', 'test_exists.py', '$(VCInstallDir)bin', 'out2'],
-        },
-        {
-          'action_name': 'compiler',
-          'inputs': [],
-          'outputs': ['out3'],
-          'action': [
-              'python', 'test_exists.py', '$(VCInstallDir)bin\\cl.exe', 'out3'],
-        },
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/win-tool/copies_readonly_files.gyp b/tools/gyp/test/win/win-tool/copies_readonly_files.gyp
deleted file mode 100644
index 3cd7e69..0000000
--- a/tools/gyp/test/win/win-tool/copies_readonly_files.gyp
+++ /dev/null
@@ -1,29 +0,0 @@
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/dest',
-          'files': [
-            'read-only-file',
-          ],
-        },
-      ],
-    },  # target: foo
-
-    {
-      'target_name': 'bar',
-      'type': 'none',
-      'copies': [
-        {
-          'destination': '<(PRODUCT_DIR)/dest',
-          'files': [
-            'subdir/',
-          ],
-        },
-      ],
-    },  # target: bar
-  ],
-}
diff --git a/tools/gyp/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py b/tools/gyp/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py
deleted file mode 100644
index 951b952..0000000
--- a/tools/gyp/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Make sure overwriting read-only files works as expected (via win-tool).
-"""
-
-import TestGyp
-
-import filecmp
-import os
-import stat
-import sys
-
-if sys.platform == 'win32':
-  test = TestGyp.TestGyp(formats=['ninja'])
-
-  # First, create the source files.
-  os.makedirs('subdir')
-  read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']
-  for f in read_only_files:
-    test.write(f, 'source_contents')
-    test.chmod(f, stat.S_IREAD)
-    if os.access(f, os.W_OK):
-      test.fail_test()
-
-  # Second, create the read-only destination files. Note that we are creating
-  # them where the ninja and win-tool will try to copy them to, in order to test
-  # that copies overwrite the files.
-  os.makedirs(test.built_file_path('dest/subdir'))
-  for f in read_only_files:
-    f = os.path.join('dest', f)
-    test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')
-    test.chmod(test.built_file_path(f), stat.S_IREAD)
-    # Ensure not writable.
-    if os.access(test.built_file_path(f), os.W_OK):
-      test.fail_test()
-
-  test.run_gyp('copies_readonly_files.gyp')
-  test.build('copies_readonly_files.gyp')
-
-  # Check the destination files were overwritten by ninja.
-  for f in read_only_files:
-    f = os.path.join('dest', f)
-    test.must_contain(test.built_file_path(f), 'source_contents')
-
-  # This will fail if the files are not the same mode or contents.
-  for f in read_only_files:
-    if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):
-      test.fail_test()
-
-  test.pass_test()
diff --git a/tools/gyp/test/win/winrt-app-type-revision/dllmain.cc b/tools/gyp/test/win/winrt-app-type-revision/dllmain.cc
deleted file mode 100644
index dedd83c..0000000
--- a/tools/gyp/test/win/winrt-app-type-revision/dllmain.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <windows.h>
-#include <wrl.h>
-#include <wrl/wrappers/corewrappers.h>
-#include <windows.graphics.display.h>
-
-using namespace Microsoft::WRL;
-using namespace Microsoft::WRL::Wrappers;
-using namespace ABI::Windows::Foundation;
-using namespace ABI::Windows::Graphics::Display;
-
-bool TryToUseSomeWinRT() {
-  ComPtr<IDisplayPropertiesStatics> dp;
-  HStringReference s(RuntimeClass_Windows_Graphics_Display_DisplayProperties);
-  HRESULT hr = GetActivationFactory(s.Get(), dp.GetAddressOf());
-  if (SUCCEEDED(hr)) {
-    float dpi = 96.0f;
-    if (SUCCEEDED(dp->get_LogicalDpi(&dpi))) {
-      return true;
-    }
-  }
-  return false;
-}
-
-BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
-  return TRUE;
-}
diff --git a/tools/gyp/test/win/winrt-app-type-revision/winrt-app-type-revison.gyp b/tools/gyp/test/win/winrt-app-type-revision/winrt-app-type-revison.gyp
deleted file mode 100644
index 5f37b5a..0000000
--- a/tools/gyp/test/win/winrt-app-type-revision/winrt-app-type-revison.gyp
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'enable_winrt_81_revision_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_application_type_revision': '8.1'
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-    {
-      'target_name': 'enable_winrt_82_revision_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_application_type_revision': '8.2'
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-    {
-      'target_name': 'enable_winrt_invalid_revision_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_application_type_revision': '999'
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-      'msvs_settings': {
-        'VCLinkerTool': {
-          'AdditionalDependencies': [
-            '%(AdditionalDependencies)',
-          ],
-        },
-      },
-    },
-  ]
-}
diff --git a/tools/gyp/test/win/winrt-target-platform-version/dllmain.cc b/tools/gyp/test/win/winrt-target-platform-version/dllmain.cc
deleted file mode 100644
index d71460c..0000000
--- a/tools/gyp/test/win/winrt-target-platform-version/dllmain.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright (c) 2015 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <windows.h>
-#include <wrl.h>
-#include <wrl/wrappers/corewrappers.h>
-#include <windows.graphics.display.h>
-
-using namespace Microsoft::WRL;
-using namespace Microsoft::WRL::Wrappers;
-using namespace ABI::Windows::Foundation;
-using namespace ABI::Windows::Graphics::Display;
-
-bool TryToUseSomeWinRT() {
-  ComPtr<IDisplayPropertiesStatics> dp;
-  HStringReference s(RuntimeClass_Windows_Graphics_Display_DisplayProperties);
-  HRESULT hr = GetActivationFactory(s.Get(), dp.GetAddressOf());
-  if (SUCCEEDED(hr)) {
-    float dpi = 96.0f;
-    if (SUCCEEDED(dp->get_LogicalDpi(&dpi))) {
-      return true;
-    }
-  }
-  return false;
-}
-
-BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
-  return TRUE;
-}
diff --git a/tools/gyp/test/win/winrt-target-platform-version/winrt-target-platform-version.gyp b/tools/gyp/test/win/winrt-target-platform-version/winrt-target-platform-version.gyp
deleted file mode 100644
index dbcfac6..0000000
--- a/tools/gyp/test/win/winrt-target-platform-version/winrt-target-platform-version.gyp
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright (c) 2015 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
-    {
-      'target_name': 'enable_winrt_10_platversion_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_application_type_revision': '10.0',
-      'msvs_target_platform_version':'10.0.10240.0',
-      'msvs_target_platform_minversion':'10.0.10240.0'
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-    {
-      'target_name': 'enable_winrt_10_platversion_nominver_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_application_type_revision': '10.0',
-      'msvs_target_platform_version':'10.0.10240.0',
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-    {
-      'target_name': 'enable_winrt_9_platversion_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_application_type_revision': '10.0',
-      'msvs_target_platform_version':'9.0.0.0',
-      'msvs_target_platform_minversion':'9.0.0.0'
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-    {
-      'target_name': 'enable_winrt_missing_platversion_dll',
-      'type': 'shared_library',
-      'msvs_enable_winrt': 1,
-      'msvs_application_type_revision': '10.0',
-      'sources': [
-        'dllmain.cc',
-      ],
-    },
-  ]
-}
diff --git a/tools/gyp/test/xcode-ninja/list_excluded/gyptest-all.py b/tools/gyp/test/xcode-ninja/list_excluded/gyptest-all.py
deleted file mode 100644
index 2d6378a..0000000
--- a/tools/gyp/test/xcode-ninja/list_excluded/gyptest-all.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that excluded files are listed in sources_for_indexing.xcodeproj by
-default, and that the generator flag xcode_ninja_list_excluded_files can be
-used to override the default behavior.
-"""
-
-import os
-import TestGyp
-
-
-test = TestGyp.TestGyp()
-
-if test.format != 'xcode-ninja':
-  test.skip_test()
-
-
-# With the generator flag not set.
-test.run_gyp('hello_exclude.gyp')
-test.must_contain(
-  'sources_for_indexing.xcodeproj/project.pbxproj', 'hello_excluded.cpp')
-
-
-# With the generator flag set to 0.
-try:
-  os.environ['GYP_GENERATOR_FLAGS'] = 'xcode_ninja_list_excluded_files=0'
-  test.run_gyp('hello_exclude.gyp')
-finally:
-  del os.environ['GYP_GENERATOR_FLAGS']
-test.must_not_contain(
-  'sources_for_indexing.xcodeproj/project.pbxproj', 'hello_excluded.cpp')
-
-
-# With the generator flag explicitly set to 1.
-try:
-  os.environ['GYP_GENERATOR_FLAGS'] = 'xcode_ninja_list_excluded_files=1'
-  test.run_gyp('hello_exclude.gyp')
-finally:
-  del os.environ['GYP_GENERATOR_FLAGS']
-test.must_contain(
-  'sources_for_indexing.xcodeproj/project.pbxproj', 'hello_excluded.cpp')
-
-
-test.pass_test()
diff --git a/tools/gyp/test/xcode-ninja/list_excluded/hello.cpp b/tools/gyp/test/xcode-ninja/list_excluded/hello.cpp
deleted file mode 100644
index cd409da..0000000
--- a/tools/gyp/test/xcode-ninja/list_excluded/hello.cpp
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 0;
-}
diff --git a/tools/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp b/tools/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp
deleted file mode 100644
index f5f0e8e..0000000
--- a/tools/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'hello',
-      'type': 'executable',
-      'sources': [
-        'hello.cpp',
-        'hello_excluded.cpp',
-      ],
-      'sources!': [
-        'hello_excluded.cpp',
-      ],
-    },
-  ],
-}
diff --git a/tools/gyp/test/xcode-ninja/list_excluded/hello_excluded.cpp b/tools/gyp/test/xcode-ninja/list_excluded/hello_excluded.cpp
deleted file mode 100644
index 2115529..0000000
--- a/tools/gyp/test/xcode-ninja/list_excluded/hello_excluded.cpp
+++ /dev/null
@@ -1,7 +0,0 @@
-// Copyright (c) 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-int main() {
-  return 42;
-}
diff --git a/tools/gyp/tools/README b/tools/gyp/tools/README
deleted file mode 100644
index 712e4ef..0000000
--- a/tools/gyp/tools/README
+++ /dev/null
@@ -1,15 +0,0 @@
-pretty_vcproj:
-  Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
-
-  They key/value pair are used to resolve vsprops name.
-
-  For example, if I want to diff the base.vcproj project:
-
-  pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
-  pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
-
-  And you can use your favorite diff tool to see the changes.
-
-  Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
-        I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
-        before you perform the diff.
\ No newline at end of file
diff --git a/tools/gyp/tools/Xcode/README b/tools/gyp/tools/Xcode/README
deleted file mode 100644
index 2492a2c..0000000
--- a/tools/gyp/tools/Xcode/README
+++ /dev/null
@@ -1,5 +0,0 @@
-Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in
-
-~/Library/Application Support/Developer/Shared/Xcode/Specifications/
-
-and restart Xcode.
\ No newline at end of file
diff --git a/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec b/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec
deleted file mode 100644
index 85e2e26..0000000
--- a/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
-	gyp.pbfilespec
-	GYP source file spec for Xcode 3
-
-	There is not much documentation available regarding the format
-	of .pbfilespec files. As a starting point, see for instance the
-	outdated documentation at:
-	http://maxao.free.fr/xcode-plugin-interface/specifications.html
-	and the files in:
-	/Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
-
-	Place this file in directory:
-	~/Library/Application Support/Developer/Shared/Xcode/Specifications/
-*/
-
-(
-	{
-		Identifier = sourcecode.gyp;
-		BasedOn = sourcecode;
-		Name = "GYP Files";
-		Extensions = ("gyp", "gypi");
-		MIMETypes = ("text/gyp");
-		Language = "xcode.lang.gyp";
-		IsTextFile = YES;
-		IsSourceFile = YES;
-	}
-)
diff --git a/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec b/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec
deleted file mode 100644
index 3b3506d..0000000
--- a/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
-	Copyright (c) 2011 Google Inc. All rights reserved.
-	Use of this source code is governed by a BSD-style license that can be
-	found in the LICENSE file.
-	
-	gyp.xclangspec
-	GYP language specification for Xcode 3
-
-	There is not much documentation available regarding the format
-	of .xclangspec files. As a starting point, see for instance the
-	outdated documentation at:
-	http://maxao.free.fr/xcode-plugin-interface/specifications.html
-	and the files in:
-	/Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
-
-	Place this file in directory:
-	~/Library/Application Support/Developer/Shared/Xcode/Specifications/
-*/
-
-(
-
-    {
-        Identifier = "xcode.lang.gyp.keyword";
-        Syntax = {
-            Words = (
-                "and",
-                "or",
-                "<!",
-                "<",
-             );
-            Type = "xcode.syntax.keyword";
-        };
-    },
-
-    {
-        Identifier = "xcode.lang.gyp.target.declarator";
-        Syntax = {
-        	Words = (
-        		"'target_name'",
-        	);
-            Type = "xcode.syntax.identifier.type";
-        };
-    },
-
-	{
-		Identifier = "xcode.lang.gyp.string.singlequote";
-		Syntax = {
-			IncludeRules = (
-				"xcode.lang.string",
-				"xcode.lang.gyp.keyword",
-				"xcode.lang.number",
-			);
-			Start = "'";
-			End = "'";
-		};
-	},
-	
-	{
-		Identifier = "xcode.lang.gyp.comma";
-		Syntax = {
-			Words = ( ",", );
-			
-		};
-	},
-
-	{
-		Identifier = "xcode.lang.gyp";
-		Description = "GYP Coloring";
-		BasedOn = "xcode.lang.simpleColoring";
-		IncludeInMenu = YES;
-		Name = "GYP";
-		Syntax = {
-			Tokenizer = "xcode.lang.gyp.lexer.toplevel";
-			IncludeRules = (
-				"xcode.lang.gyp.dictionary",
-			);
-			Type = "xcode.syntax.plain";
-		};
-	},
-
-	// The following rule returns tokens to the other rules
-	{
-		Identifier = "xcode.lang.gyp.lexer";
-		Syntax = {
-			IncludeRules = (
-				"xcode.lang.gyp.comment",
-				"xcode.lang.string",
-				'xcode.lang.gyp.targetname.declarator',
-				"xcode.lang.gyp.string.singlequote",
-				"xcode.lang.number",
-				"xcode.lang.gyp.comma",
-			);
-		};
-	},
-
-	{
-		Identifier = "xcode.lang.gyp.lexer.toplevel";
-		Syntax = {
-			IncludeRules = (
-				"xcode.lang.gyp.comment",
-			);
-		};
-	},
-
-	{
-        Identifier = "xcode.lang.gyp.assignment";
-        Syntax = {
-            Tokenizer = "xcode.lang.gyp.lexer";
-            Rules = (
-            	"xcode.lang.gyp.assignment.lhs",
-            	":",
-                "xcode.lang.gyp.assignment.rhs",
-            );
-        };
-       
-    },
-    
-    {
-        Identifier = "xcode.lang.gyp.target.declaration";
-        Syntax = {
-            Tokenizer = "xcode.lang.gyp.lexer";
-            Rules = (
-                "xcode.lang.gyp.target.declarator",
-                ":",
-                "xcode.lang.gyp.target.name",
-            );
-        };
-   },
-   
-   {
-        Identifier = "xcode.lang.gyp.target.name";
-        Syntax = {
-            Tokenizer = "xcode.lang.gyp.lexer";
-            Rules = (
-                "xcode.lang.gyp.string.singlequote",
-            );
-        	Type = "xcode.syntax.definition.function";
-        };
-    },
-    
-	{
-        Identifier = "xcode.lang.gyp.assignment.lhs";
-        Syntax = {
-            Tokenizer = "xcode.lang.gyp.lexer";
-            Rules = (
-            	"xcode.lang.gyp.string.singlequote",
-            );
-         	Type = "xcode.syntax.identifier.type";
-        };
-    },
-    
-    {
-        Identifier = "xcode.lang.gyp.assignment.rhs";
-        Syntax = {
-        	Tokenizer = "xcode.lang.gyp.lexer";
-            Rules = (
-            	"xcode.lang.gyp.string.singlequote?",
-                "xcode.lang.gyp.array?",
-				"xcode.lang.gyp.dictionary?",
-				"xcode.lang.number?",
-            );
-        };
-    },
-
-	{
-		Identifier = "xcode.lang.gyp.dictionary";
-		Syntax = {
-			Tokenizer = "xcode.lang.gyp.lexer";
-			Start = "{";
-			End = "}";
-			Foldable = YES;
-			Recursive = YES;
-			IncludeRules = (
-				"xcode.lang.gyp.target.declaration",
-				"xcode.lang.gyp.assignment",
-			);
-		};
-	},
-
-	{
-		Identifier = "xcode.lang.gyp.array";
-		Syntax = {
-			Tokenizer = "xcode.lang.gyp.lexer";
-			Start = "[";
-			End = "]";
-			Foldable = YES;
-			Recursive = YES;
-			IncludeRules = (
-				"xcode.lang.gyp.array",
-				"xcode.lang.gyp.dictionary",
-				"xcode.lang.gyp.string.singlequote",
-			);
-		};
-	},
-
-    {
-        Identifier = "xcode.lang.gyp.todo.mark";
-        Syntax = {
-            StartChars = "T";
-            Match = (
-                "^\(TODO\(.*\):[ \t]+.*\)$",       // include "TODO: " in the markers list
-            );
-            // This is the order of captures. All of the match strings above need the same order.
-            CaptureTypes = (
-                "xcode.syntax.mark"
-            );
-            Type = "xcode.syntax.comment";
-        };
-    },
-
-	{
-		Identifier = "xcode.lang.gyp.comment";
-		BasedOn = "xcode.lang.comment"; // for text macros
-		Syntax = {
-			Start = "#";
-			End = "\n";
-			IncludeRules = (
-				"xcode.lang.url",
-				"xcode.lang.url.mail",
-				"xcode.lang.comment.mark",
-				"xcode.lang.gyp.todo.mark",
-			);
-			Type = "xcode.syntax.comment";
-		};
-	},
-)
diff --git a/tools/gyp/tools/emacs/README b/tools/gyp/tools/emacs/README
deleted file mode 100644
index eeef39f..0000000
--- a/tools/gyp/tools/emacs/README
+++ /dev/null
@@ -1,12 +0,0 @@
-How to install gyp-mode for emacs:
-
-Add the following to your ~/.emacs (replace ... with the path to your gyp
-checkout).
-
-(setq load-path (cons ".../tools/emacs" load-path))
-(require 'gyp)
-
-Restart emacs (or eval-region the added lines) and you should be all set.
-
-Please note that ert is required for running the tests, which is included in
-Emacs 24, or available separately from https://github.com/ohler/ert
diff --git a/tools/gyp/tools/emacs/gyp-tests.el b/tools/gyp/tools/emacs/gyp-tests.el
deleted file mode 100644
index 11b8497..0000000
--- a/tools/gyp/tools/emacs/gyp-tests.el
+++ /dev/null
@@ -1,63 +0,0 @@
-;;; gyp-tests.el - unit tests for gyp-mode.
-
-;; Copyright (c) 2012 Google Inc. All rights reserved.
-;; Use of this source code is governed by a BSD-style license that can be
-;; found in the LICENSE file.
-
-;; The recommended way to run these tests is to run them from the command-line,
-;; with the run-unit-tests.sh script.
-
-(require 'cl)
-(require 'ert)
-(require 'gyp)
-
-(defconst samples (directory-files "testdata" t ".gyp$")
-  "List of golden samples to check")
-
-(defun fontify (filename)
-  (with-temp-buffer
-    (insert-file-contents-literally filename)
-    (gyp-mode)
-    (font-lock-fontify-buffer)
-    (buffer-string)))
-
-(defun read-golden-sample (filename)
-  (with-temp-buffer
-    (insert-file-contents-literally (concat filename ".fontified"))
-    (read (current-buffer))))
-
-(defun equivalent-face (face)
-  "For the purposes of face comparison, we're not interested in the
-   differences between certain faces. For example, the difference between
-   font-lock-comment-delimiter and font-lock-comment-face."
-  (case face
-    ((font-lock-comment-delimiter-face) font-lock-comment-face)
-    (t face)))
-
-(defun text-face-properties (s)
-  "Extract the text properties from s"
-  (let ((result (list t)))
-    (dotimes (i (length s))
-      (setq result (cons (equivalent-face (get-text-property i 'face s))
-                         result)))
-    (nreverse result)))
-
-(ert-deftest test-golden-samples ()
-  "Check that fontification produces the same results as the golden samples"
-  (dolist (sample samples)
-    (let ((golden (read-golden-sample sample))
-          (fontified (fontify sample)))
-      (should (equal golden fontified))
-      (should (equal (text-face-properties golden)
-                     (text-face-properties fontified))))))
-
-(defun create-golden-sample (filename)
-  "Create a golden sample by fontifying filename and writing out the printable
-   representation of the fontified buffer (with text properties) to the
-   FILENAME.fontified"
-  (with-temp-file (concat filename ".fontified")
-    (print (fontify filename) (current-buffer))))
-
-(defun create-golden-samples ()
-  "Recreate the golden samples"
-  (dolist (sample samples) (create-golden-sample sample)))
diff --git a/tools/gyp/tools/emacs/gyp.el b/tools/gyp/tools/emacs/gyp.el
deleted file mode 100644
index b98b155..0000000
--- a/tools/gyp/tools/emacs/gyp.el
+++ /dev/null
@@ -1,275 +0,0 @@
-;;; gyp.el - font-lock-mode support for gyp files.
-
-;; Copyright (c) 2012 Google Inc. All rights reserved.
-;; Use of this source code is governed by a BSD-style license that can be
-;; found in the LICENSE file.
-
-;; Put this somewhere in your load-path and
-;; (require 'gyp)
-
-(require 'python)
-(require 'cl)
-
-(when (string-match "python-mode.el" (symbol-file 'python-mode 'defun))
-  (error (concat "python-mode must be loaded from python.el (bundled with "
-                 "recent emacsen), not from the older and less maintained "
-                 "python-mode.el")))
-
-(defadvice python-indent-calculate-levels (after gyp-outdent-closing-parens
-                                                 activate)
-  "De-indent closing parens, braces, and brackets in gyp-mode."
-  (when (and (eq major-mode 'gyp-mode)
-             (string-match "^ *[])}][],)}]* *$"
-                           (buffer-substring-no-properties
-                            (line-beginning-position) (line-end-position))))
-    (setf (first python-indent-levels)
-          (- (first python-indent-levels) python-continuation-offset))))
-
-(defadvice python-indent-guess-indent-offset (around
-                                              gyp-indent-guess-indent-offset
-                                              activate)
-  "Guess correct indent offset in gyp-mode."
-  (or (and (not (eq major-mode 'gyp-mode))
-           ad-do-it)
-      (save-excursion
-        (save-restriction
-          (widen)
-          (goto-char (point-min))
-          ;; Find first line ending with an opening brace that is not a comment.
-          (or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
-                   (forward-line)
-                   (/= (current-indentation) 0)
-                   (set (make-local-variable 'python-indent-offset)
-                        (current-indentation))
-                   (set (make-local-variable 'python-continuation-offset)
-                        (current-indentation)))
-              (message "Can't guess gyp indent offset, using default: %s"
-                       python-continuation-offset))))))
-
-(define-derived-mode gyp-mode python-mode "Gyp"
-  "Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
-  ;; gyp-parse-history is a stack of (POSITION . PARSE-STATE) tuples,
-  ;; with greater positions at the top of the stack. PARSE-STATE
-  ;; is a list of section symbols (see gyp-section-name and gyp-parse-to)
-  ;; with most nested section symbol at the front of the list.
-  (set (make-local-variable 'gyp-parse-history) '((1 . (list))))
-  (gyp-add-font-lock-keywords))
-
-(defun gyp-set-indentation ()
-  "Hook function to configure python indentation to suit gyp mode."
-  (set (make-local-variable 'python-indent-offset) 2)
-  (set (make-local-variable 'python-continuation-offset) 2)
-  (set (make-local-variable 'python-indent-guess-indent-offset) t)
-  (python-indent-guess-indent-offset))
-
-(add-hook 'gyp-mode-hook 'gyp-set-indentation)
-
-(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
-(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
-(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
-
-;;; Font-lock support
-
-(defconst gyp-dependencies-regexp
-  (regexp-opt (list "dependencies" "export_dependent_settings"))
-  "Regular expression to introduce 'dependencies' section")
-
-(defconst gyp-sources-regexp
-  (regexp-opt (list "action" "files" "include_dirs" "includes" "inputs"
-                    "libraries" "outputs" "sources"))
-  "Regular expression to introduce 'sources' sections")
-
-(defconst gyp-conditions-regexp
-  (regexp-opt (list "conditions" "target_conditions"))
-  "Regular expression to introduce conditions sections")
-
-(defconst gyp-variables-regexp
-  "^variables"
-  "Regular expression to introduce variables sections")
-
-(defconst gyp-defines-regexp
-  "^defines"
-  "Regular expression to introduce 'defines' sections")
-
-(defconst gyp-targets-regexp
-  "^targets"
-  "Regular expression to introduce 'targets' sections")
-
-(defun gyp-section-name (section)
-  "Map the sections we are interested in from SECTION to symbol.
-
-   SECTION is a string from the buffer that introduces a section.  The result is
-   a symbol representing the kind of section.
-
-   This allows us to treat (for the purposes of font-lock) several different
-   section names as the same kind of section. For example, a 'sources section
-   can be introduced by the 'sources', 'inputs', 'outputs' keyword.
-
-   'other is the default section kind when a more specific match is not made."
-  (cond ((string-match-p gyp-dependencies-regexp section) 'dependencies)
-        ((string-match-p gyp-sources-regexp section) 'sources)
-        ((string-match-p gyp-variables-regexp section) 'variables)
-        ((string-match-p gyp-conditions-regexp section) 'conditions)
-        ((string-match-p gyp-targets-regexp section) 'targets)
-        ((string-match-p gyp-defines-regexp section) 'defines)
-        (t 'other)))
-
-(defun gyp-invalidate-parse-states-after (target-point)
-  "Erase any parse information after target-point."
-  (while (> (caar gyp-parse-history) target-point)
-    (setq gyp-parse-history (cdr gyp-parse-history))))
-
-(defun gyp-parse-point ()
-  "The point of the last parse state added by gyp-parse-to."
-  (caar gyp-parse-history))
-
-(defun gyp-parse-sections ()
-  "A list of section symbols holding at the last parse state point."
-  (cdar gyp-parse-history))
-
-(defun gyp-inside-dictionary-p ()
-  "Predicate returning true if the parser is inside a dictionary."
-  (not (eq (cadar gyp-parse-history) 'list)))
-
-(defun gyp-add-parse-history (point sections)
-  "Add parse state SECTIONS to the parse history at POINT so that parsing can be
-   resumed instantly."
-  (while (>= (caar gyp-parse-history) point)
-    (setq gyp-parse-history (cdr gyp-parse-history)))
-  (setq gyp-parse-history (cons (cons point sections) gyp-parse-history)))
-
-(defun gyp-parse-to (target-point)
-  "Parses from (point) to TARGET-POINT adding the parse state information to
-   gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a
-   string literal has been parsed. Returns nil if no further parsing can be
-   done, otherwise returns the position of the start of a parsed string, leaving
-   the point at the end of the string."
-  (let ((parsing t)
-        string-start)
-    (while parsing
-      (setq string-start nil)
-      ;; Parse up to a character that starts a sexp, or if the nesting
-      ;; level decreases.
-      (let ((state (parse-partial-sexp (gyp-parse-point)
-                                       target-point
-                                       -1
-                                       t))
-            (sections (gyp-parse-sections)))
-        (if (= (nth 0 state) -1)
-            (setq sections (cdr sections)) ; pop out a level
-          (cond ((looking-at-p "['\"]") ; a string
-                 (setq string-start (point))
-                 (goto-char (scan-sexps (point) 1))
-                 (if (gyp-inside-dictionary-p)
-                     ;; Look for sections inside a dictionary
-                     (let ((section (gyp-section-name
-                                     (buffer-substring-no-properties
-                                      (+ 1 string-start)
-                                      (- (point) 1)))))
-                       (setq sections (cons section (cdr sections)))))
-                 ;; Stop after the string so it can be fontified.
-                 (setq target-point (point)))
-                ((looking-at-p "{")
-                 ;; Inside a dictionary. Increase nesting.
-                 (forward-char 1)
-                 (setq sections (cons 'unknown sections)))
-                ((looking-at-p "\\[")
-                 ;; Inside a list. Increase nesting
-                 (forward-char 1)
-                 (setq sections (cons 'list sections)))
-                ((not (eobp))
-                 ;; other
-                 (forward-char 1))))
-        (gyp-add-parse-history (point) sections)
-        (setq parsing (< (point) target-point))))
-    string-start))
-
-(defun gyp-section-at-point ()
-  "Transform the last parse state, which is a list of nested sections and return
-   the section symbol that should be used to determine font-lock information for
-   the string. Can return nil indicating the string should not have any attached
-   section."
-  (let ((sections (gyp-parse-sections)))
-    (cond
-     ((eq (car sections) 'conditions)
-      ;; conditions can occur in a variables section, but we still want to
-      ;; highlight it as a keyword.
-      nil)
-     ((and (eq (car sections) 'list)
-           (eq (cadr sections) 'list))
-      ;; conditions and sources can have items in [[ ]]
-      (caddr sections))
-     (t (cadr sections)))))
-
-(defun gyp-section-match (limit)
-  "Parse from (point) to LIMIT returning by means of match data what was
-   matched. The group of the match indicates what style font-lock should apply.
-   See also `gyp-add-font-lock-keywords'."
-  (gyp-invalidate-parse-states-after (point))
-  (let ((group nil)
-        (string-start t))
-    (while (and (< (point) limit)
-                (not group)
-                string-start)
-      (setq string-start (gyp-parse-to limit))
-      (if string-start
-          (setq group (case (gyp-section-at-point)
-                        ('dependencies 1)
-                        ('variables 2)
-                        ('conditions 2)
-                        ('sources 3)
-                        ('defines 4)
-                        (nil nil)))))
-    (if group
-        (progn
-          ;; Set the match data to indicate to the font-lock mechanism the
-          ;; highlighting to be performed.
-          (set-match-data (append (list string-start (point))
-                                  (make-list (* (1- group) 2) nil)
-                                  (list (1+ string-start) (1- (point)))))
-          t))))
-
-;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for
-;;; canonical list of keywords.
-(defun gyp-add-font-lock-keywords ()
-  "Add gyp-mode keywords to font-lock mechanism."
-  ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match
-  ;; so that we can do the font-locking in a single font-lock pass.
-  (font-lock-add-keywords
-   nil
-   (list
-    ;; Top-level keywords
-    (list (concat "['\"]\\("
-              (regexp-opt (list "action" "action_name" "actions" "cflags"
-                                "cflags_cc" "conditions" "configurations"
-                                "copies" "defines" "dependencies" "destination"
-                                "direct_dependent_settings"
-                                "export_dependent_settings" "extension" "files"
-                                "include_dirs" "includes" "inputs" "ldflags" "libraries"
-                                "link_settings" "mac_bundle" "message"
-                                "msvs_external_rule" "outputs" "product_name"
-                                "process_outputs_as_sources" "rules" "rule_name"
-                                "sources" "suppress_wildcard"
-                                "target_conditions" "target_defaults"
-                                "target_defines" "target_name" "toolsets"
-                                "targets" "type" "variables" "xcode_settings"))
-              "[!/+=]?\\)") 1 'font-lock-keyword-face t)
-    ;; Type of target
-    (list (concat "['\"]\\("
-              (regexp-opt (list "loadable_module" "static_library"
-                                "shared_library" "executable" "none"))
-              "\\)") 1 'font-lock-type-face t)
-    (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1
-          'font-lock-function-name-face t)
-    (list 'gyp-section-match
-          (list 1 'font-lock-function-name-face t t) ; dependencies
-          (list 2 'font-lock-variable-name-face t t) ; variables, conditions
-          (list 3 'font-lock-constant-face t t) ; sources
-          (list 4 'font-lock-preprocessor-face t t)) ; preprocessor
-    ;; Variable expansion
-    (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
-    ;; Command expansion
-    (list "<!@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
-    )))
-
-(provide 'gyp)
diff --git a/tools/gyp/tools/emacs/run-unit-tests.sh b/tools/gyp/tools/emacs/run-unit-tests.sh
deleted file mode 100755
index 6e62b9b..0000000
--- a/tools/gyp/tools/emacs/run-unit-tests.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-emacs --no-site-file --no-init-file --batch \
-      --load ert.el --load gyp.el --load gyp-tests.el \
-      -f ert-run-tests-batch-and-exit
diff --git a/tools/gyp/tools/emacs/testdata/media.gyp b/tools/gyp/tools/emacs/testdata/media.gyp
deleted file mode 100644
index 29300fe..0000000
--- a/tools/gyp/tools/emacs/testdata/media.gyp
+++ /dev/null
@@ -1,1105 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'chromium_code': 1,
-    # Override to dynamically link the PulseAudio library.
-    'use_pulseaudio%': 0,
-    # Override to dynamically link the cras (ChromeOS audio) library.
-    'use_cras%': 0,
-  },
-  'targets': [
-    {
-      'target_name': 'media',
-      'type': '<(component)',
-      'dependencies': [
-        'yuv_convert',
-        '../base/base.gyp:base',
-        '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
-        '../build/temp_gyp/googleurl.gyp:googleurl',
-        '../crypto/crypto.gyp:crypto',
-        '../third_party/openmax/openmax.gyp:il',
-        '../ui/ui.gyp:ui',
-      ],
-      'defines': [
-        'MEDIA_IMPLEMENTATION',
-      ],
-      'include_dirs': [
-        '..',
-      ],
-      'sources': [
-        'audio/android/audio_manager_android.cc',
-        'audio/android/audio_manager_android.h',
-        'audio/android/audio_track_output_android.cc',
-        'audio/android/audio_track_output_android.h',
-        'audio/android/opensles_input.cc',
-        'audio/android/opensles_input.h',
-        'audio/android/opensles_output.cc',
-        'audio/android/opensles_output.h',
-        'audio/async_socket_io_handler.h',
-        'audio/async_socket_io_handler_posix.cc',
-        'audio/async_socket_io_handler_win.cc',
-        'audio/audio_buffers_state.cc',
-        'audio/audio_buffers_state.h',
-        'audio/audio_io.h',
-        'audio/audio_input_controller.cc',
-        'audio/audio_input_controller.h',
-        'audio/audio_input_stream_impl.cc',
-        'audio/audio_input_stream_impl.h',
-        'audio/audio_device_name.cc',
-        'audio/audio_device_name.h',
-        'audio/audio_manager.cc',
-        'audio/audio_manager.h',
-        'audio/audio_manager_base.cc',
-        'audio/audio_manager_base.h',
-        'audio/audio_output_controller.cc',
-        'audio/audio_output_controller.h',
-        'audio/audio_output_dispatcher.cc',
-        'audio/audio_output_dispatcher.h',
-        'audio/audio_output_dispatcher_impl.cc',
-        'audio/audio_output_dispatcher_impl.h',
-        'audio/audio_output_mixer.cc',
-        'audio/audio_output_mixer.h',
-        'audio/audio_output_proxy.cc',
-        'audio/audio_output_proxy.h',
-        'audio/audio_parameters.cc',
-        'audio/audio_parameters.h',
-        'audio/audio_util.cc',
-        'audio/audio_util.h',
-        'audio/cross_process_notification.cc',
-        'audio/cross_process_notification.h',
-        'audio/cross_process_notification_win.cc',
-        'audio/cross_process_notification_posix.cc',
-        'audio/fake_audio_input_stream.cc',
-        'audio/fake_audio_input_stream.h',
-        'audio/fake_audio_output_stream.cc',
-        'audio/fake_audio_output_stream.h',
-        'audio/linux/audio_manager_linux.cc',
-        'audio/linux/audio_manager_linux.h',
-        'audio/linux/alsa_input.cc',
-        'audio/linux/alsa_input.h',
-        'audio/linux/alsa_output.cc',
-        'audio/linux/alsa_output.h',
-        'audio/linux/alsa_util.cc',
-        'audio/linux/alsa_util.h',
-        'audio/linux/alsa_wrapper.cc',
-        'audio/linux/alsa_wrapper.h',
-        'audio/linux/cras_output.cc',
-        'audio/linux/cras_output.h',
-        'audio/openbsd/audio_manager_openbsd.cc',
-        'audio/openbsd/audio_manager_openbsd.h',
-        'audio/mac/audio_input_mac.cc',
-        'audio/mac/audio_input_mac.h',
-        'audio/mac/audio_low_latency_input_mac.cc',
-        'audio/mac/audio_low_latency_input_mac.h',
-        'audio/mac/audio_low_latency_output_mac.cc',
-        'audio/mac/audio_low_latency_output_mac.h',
-        'audio/mac/audio_manager_mac.cc',
-        'audio/mac/audio_manager_mac.h',
-        'audio/mac/audio_output_mac.cc',
-        'audio/mac/audio_output_mac.h',
-        'audio/null_audio_sink.cc',
-        'audio/null_audio_sink.h',
-        'audio/pulse/pulse_output.cc',
-        'audio/pulse/pulse_output.h',
-        'audio/sample_rates.cc',
-        'audio/sample_rates.h',
-        'audio/simple_sources.cc',
-        'audio/simple_sources.h',
-        'audio/win/audio_low_latency_input_win.cc',
-        'audio/win/audio_low_latency_input_win.h',
-        'audio/win/audio_low_latency_output_win.cc',
-        'audio/win/audio_low_latency_output_win.h',
-        'audio/win/audio_manager_win.cc',
-        'audio/win/audio_manager_win.h',
-        'audio/win/avrt_wrapper_win.cc',
-        'audio/win/avrt_wrapper_win.h',
-        'audio/win/device_enumeration_win.cc',
-        'audio/win/device_enumeration_win.h',
-        'audio/win/wavein_input_win.cc',
-        'audio/win/wavein_input_win.h',
-        'audio/win/waveout_output_win.cc',
-        'audio/win/waveout_output_win.h',
-        'base/android/media_jni_registrar.cc',
-        'base/android/media_jni_registrar.h',
-        'base/audio_decoder.cc',
-        'base/audio_decoder.h',
-        'base/audio_decoder_config.cc',
-        'base/audio_decoder_config.h',
-        'base/audio_renderer.h',
-        'base/audio_renderer_mixer.cc',
-        'base/audio_renderer_mixer.h',
-        'base/audio_renderer_mixer_input.cc',
-        'base/audio_renderer_mixer_input.h',
-        'base/bitstream_buffer.h',
-        'base/buffers.cc',
-        'base/buffers.h',
-        'base/byte_queue.cc',
-        'base/byte_queue.h',
-        'base/channel_layout.cc',
-        'base/channel_layout.h',
-        'base/clock.cc',
-        'base/clock.h',
-        'base/composite_filter.cc',
-        'base/composite_filter.h',
-        'base/data_buffer.cc',
-        'base/data_buffer.h',
-        'base/data_source.cc',
-        'base/data_source.h',
-        'base/decoder_buffer.cc',
-        'base/decoder_buffer.h',
-        'base/decrypt_config.cc',
-        'base/decrypt_config.h',
-        'base/decryptor.h',
-        'base/decryptor_client.h',
-        'base/demuxer.cc',
-        'base/demuxer.h',
-        'base/demuxer_stream.cc',
-        'base/demuxer_stream.h',
-        'base/djb2.cc',
-        'base/djb2.h',
-        'base/filter_collection.cc',
-        'base/filter_collection.h',
-        'base/filter_host.h',
-        'base/filters.cc',
-        'base/filters.h',
-        'base/h264_bitstream_converter.cc',
-        'base/h264_bitstream_converter.h',
-        'base/media.h',
-        'base/media_android.cc',
-        'base/media_export.h',
-        'base/media_log.cc',
-        'base/media_log.h',
-        'base/media_log_event.h',
-        'base/media_posix.cc',
-        'base/media_switches.cc',
-        'base/media_switches.h',
-        'base/media_win.cc',
-        'base/message_loop_factory.cc',
-        'base/message_loop_factory.h',
-        'base/pipeline.cc',
-        'base/pipeline.h',
-        'base/pipeline_status.cc',
-        'base/pipeline_status.h',
-        'base/ranges.cc',
-        'base/ranges.h',
-        'base/seekable_buffer.cc',
-        'base/seekable_buffer.h',
-        'base/state_matrix.cc',
-        'base/state_matrix.h',
-        'base/stream_parser.cc',
-        'base/stream_parser.h',
-        'base/stream_parser_buffer.cc',
-        'base/stream_parser_buffer.h',
-        'base/video_decoder.cc',
-        'base/video_decoder.h',
-        'base/video_decoder_config.cc',
-        'base/video_decoder_config.h',
-        'base/video_frame.cc',
-        'base/video_frame.h',
-        'base/video_renderer.h',
-        'base/video_util.cc',
-        'base/video_util.h',
-        'crypto/aes_decryptor.cc',
-        'crypto/aes_decryptor.h',
-        'ffmpeg/ffmpeg_common.cc',
-        'ffmpeg/ffmpeg_common.h',
-        'ffmpeg/file_protocol.cc',
-        'ffmpeg/file_protocol.h',
-        'filters/audio_file_reader.cc',
-        'filters/audio_file_reader.h',
-        'filters/audio_renderer_algorithm.cc',
-        'filters/audio_renderer_algorithm.h',
-        'filters/audio_renderer_impl.cc',
-        'filters/audio_renderer_impl.h',
-        'filters/bitstream_converter.cc',
-        'filters/bitstream_converter.h',
-        'filters/chunk_demuxer.cc',
-        'filters/chunk_demuxer.h',
-        'filters/chunk_demuxer_client.h',
-        'filters/dummy_demuxer.cc',
-        'filters/dummy_demuxer.h',
-        'filters/ffmpeg_audio_decoder.cc',
-        'filters/ffmpeg_audio_decoder.h',
-        'filters/ffmpeg_demuxer.cc',
-        'filters/ffmpeg_demuxer.h',
-        'filters/ffmpeg_h264_bitstream_converter.cc',
-        'filters/ffmpeg_h264_bitstream_converter.h',
-        'filters/ffmpeg_glue.cc',
-        'filters/ffmpeg_glue.h',
-        'filters/ffmpeg_video_decoder.cc',
-        'filters/ffmpeg_video_decoder.h',
-        'filters/file_data_source.cc',
-        'filters/file_data_source.h',
-        'filters/gpu_video_decoder.cc',
-        'filters/gpu_video_decoder.h',
-        'filters/in_memory_url_protocol.cc',
-        'filters/in_memory_url_protocol.h',
-        'filters/source_buffer_stream.cc',
-        'filters/source_buffer_stream.h',
-        'filters/video_frame_generator.cc',
-        'filters/video_frame_generator.h',
-        'filters/video_renderer_base.cc',
-        'filters/video_renderer_base.h',
-        'video/capture/fake_video_capture_device.cc',
-        'video/capture/fake_video_capture_device.h',
-        'video/capture/linux/video_capture_device_linux.cc',
-        'video/capture/linux/video_capture_device_linux.h',
-        'video/capture/mac/video_capture_device_mac.h',
-        'video/capture/mac/video_capture_device_mac.mm',
-        'video/capture/mac/video_capture_device_qtkit_mac.h',
-        'video/capture/mac/video_capture_device_qtkit_mac.mm',
-        'video/capture/video_capture.h',
-        'video/capture/video_capture_device.h',
-        'video/capture/video_capture_device_dummy.cc',
-        'video/capture/video_capture_device_dummy.h',
-        'video/capture/video_capture_proxy.cc',
-        'video/capture/video_capture_proxy.h',
-        'video/capture/video_capture_types.h',
-        'video/capture/win/filter_base_win.cc',
-        'video/capture/win/filter_base_win.h',
-        'video/capture/win/pin_base_win.cc',
-        'video/capture/win/pin_base_win.h',
-        'video/capture/win/sink_filter_observer_win.h',
-        'video/capture/win/sink_filter_win.cc',
-        'video/capture/win/sink_filter_win.h',
-        'video/capture/win/sink_input_pin_win.cc',
-        'video/capture/win/sink_input_pin_win.h',
-        'video/capture/win/video_capture_device_win.cc',
-        'video/capture/win/video_capture_device_win.h',
-        'video/picture.cc',
-        'video/picture.h',
-        'video/video_decode_accelerator.cc',
-        'video/video_decode_accelerator.h',
-        'webm/webm_constants.h',
-        'webm/webm_cluster_parser.cc',
-        'webm/webm_cluster_parser.h',
-        'webm/webm_content_encodings.cc',
-        'webm/webm_content_encodings.h',
-        'webm/webm_content_encodings_client.cc',
-        'webm/webm_content_encodings_client.h',
-        'webm/webm_info_parser.cc',
-        'webm/webm_info_parser.h',
-        'webm/webm_parser.cc',
-        'webm/webm_parser.h',
-        'webm/webm_stream_parser.cc',
-        'webm/webm_stream_parser.h',
-        'webm/webm_tracks_parser.cc',
-        'webm/webm_tracks_parser.h',
-      ],
-      'direct_dependent_settings': {
-        'include_dirs': [
-          '..',
-        ],
-      },
-      'conditions': [
-        # Android doesn't use ffmpeg, so make the dependency conditional
-        # and exclude the sources which depend on ffmpeg.
-        ['OS != "android"', {
-          'dependencies': [
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-        }],
-        ['OS == "android"', {
-          'sources!': [
-            'base/media_posix.cc',
-            'ffmpeg/ffmpeg_common.cc',
-            'ffmpeg/ffmpeg_common.h',
-            'ffmpeg/file_protocol.cc',
-            'ffmpeg/file_protocol.h',
-            'filters/audio_file_reader.cc',
-            'filters/audio_file_reader.h',
-            'filters/bitstream_converter.cc',
-            'filters/bitstream_converter.h',
-            'filters/chunk_demuxer.cc',
-            'filters/chunk_demuxer.h',
-            'filters/chunk_demuxer_client.h',
-            'filters/ffmpeg_audio_decoder.cc',
-            'filters/ffmpeg_audio_decoder.h',
-            'filters/ffmpeg_demuxer.cc',
-            'filters/ffmpeg_demuxer.h',
-            'filters/ffmpeg_h264_bitstream_converter.cc',
-            'filters/ffmpeg_h264_bitstream_converter.h',
-            'filters/ffmpeg_glue.cc',
-            'filters/ffmpeg_glue.h',
-            'filters/ffmpeg_video_decoder.cc',
-            'filters/ffmpeg_video_decoder.h',
-            'filters/gpu_video_decoder.cc',
-            'filters/gpu_video_decoder.h',
-            'webm/webm_cluster_parser.cc',
-            'webm/webm_cluster_parser.h',
-            'webm/webm_stream_parser.cc',
-            'webm/webm_stream_parser.h',
-          ],
-        }],
-        # The below 'android' condition were added temporarily and should be
-        # removed in downstream, because there is no Java environment setup in
-        # upstream yet.
-        ['OS == "android"', {
-          'sources!':[
-            'audio/android/audio_track_output_android.cc',
-          ],
-          'sources':[
-            'audio/android/audio_track_output_stub_android.cc',
-          ],
-          'link_settings': {
-            'libraries': [
-              '-lOpenSLES',
-            ],
-          },
-        }],
-        ['OS=="linux" or OS=="freebsd" or OS=="solaris"', {
-          'link_settings': {
-            'libraries': [
-              '-lasound',
-            ],
-          },
-        }],
-        ['OS=="openbsd"', {
-          'sources/': [ ['exclude', '/alsa_' ],
-                        ['exclude', '/audio_manager_linux' ] ],
-          'link_settings': {
-            'libraries': [
-            ],
-          },
-        }],
-        ['OS!="openbsd"', {
-          'sources!': [
-            'audio/openbsd/audio_manager_openbsd.cc',
-            'audio/openbsd/audio_manager_openbsd.h',
-          ],
-        }],
-        ['OS=="linux"', {
-          'variables': {
-            'conditions': [
-              ['sysroot!=""', {
-                'pkg-config': '../build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
-              }, {
-                'pkg-config': 'pkg-config'
-              }],
-            ],
-          },
-          'conditions': [
-            ['use_cras == 1', {
-              'cflags': [
-                '<!@(<(pkg-config) --cflags libcras)',
-              ],
-              'link_settings': {
-                'libraries': [
-                  '<!@(<(pkg-config) --libs libcras)',
-                ],
-              },
-              'defines': [
-                'USE_CRAS',
-              ],
-            }, {  # else: use_cras == 0
-              'sources!': [
-                'audio/linux/cras_output.cc',
-                'audio/linux/cras_output.h',
-              ],
-            }],
-          ],
-        }],
-        ['os_posix == 1', {
-          'conditions': [
-            ['use_pulseaudio == 1', {
-              'cflags': [
-                '<!@(pkg-config --cflags libpulse)',
-              ],
-              'link_settings': {
-                'libraries': [
-                  '<!@(pkg-config --libs-only-l libpulse)',
-                ],
-              },
-              'defines': [
-                'USE_PULSEAUDIO',
-              ],
-            }, {  # else: use_pulseaudio == 0
-              'sources!': [
-                'audio/pulse/pulse_output.cc',
-                'audio/pulse/pulse_output.h',
-              ],
-            }],
-          ],
-        }],
-        ['os_posix == 1 and OS != "android"', {
-          # Video capture isn't supported in Android yet.
-          'sources!': [
-            'video/capture/video_capture_device_dummy.cc',
-            'video/capture/video_capture_device_dummy.h',
-          ],
-        }],
-        ['OS=="mac"', {
-          'link_settings': {
-            'libraries': [
-              '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
-              '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
-              '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
-              '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
-              '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
-            ],
-          },
-        }],
-        ['OS=="win"', {
-          'sources!': [
-            'audio/pulse/pulse_output.cc',
-            'audio/pulse/pulse_output.h',
-            'video/capture/video_capture_device_dummy.cc',
-            'video/capture/video_capture_device_dummy.h',
-          ],
-        }],
-        ['proprietary_codecs==1 or branding=="Chrome"', {
-          'sources': [
-            'mp4/avc.cc',
-            'mp4/avc.h',
-            'mp4/box_definitions.cc',
-            'mp4/box_definitions.h',
-            'mp4/box_reader.cc',
-            'mp4/box_reader.h',
-            'mp4/cenc.cc',
-            'mp4/cenc.h',
-            'mp4/mp4_stream_parser.cc',
-            'mp4/mp4_stream_parser.h',
-            'mp4/offset_byte_queue.cc',
-            'mp4/offset_byte_queue.h',
-            'mp4/track_run_iterator.cc',
-            'mp4/track_run_iterator.h',
-          ],
-        }],
-      ],
-    },
-    {
-      'target_name': 'yuv_convert',
-      'type': 'static_library',
-      'include_dirs': [
-        '..',
-      ],
-      'conditions': [
-        ['order_profiling != 0', {
-          'target_conditions' : [
-            ['_toolset=="target"', {
-              'cflags!': [ '-finstrument-functions' ],
-            }],
-          ],
-        }],
-        [ 'target_arch == "ia32" or target_arch == "x64"', {
-          'dependencies': [
-            'yuv_convert_simd_x86',
-          ],
-        }],
-        [ 'target_arch == "arm"', {
-          'dependencies': [
-            'yuv_convert_simd_arm',
-          ],
-        }],
-      ],
-      'sources': [
-        'base/yuv_convert.cc',
-        'base/yuv_convert.h',
-      ],
-    },
-    {
-      'target_name': 'yuv_convert_simd_x86',
-      'type': 'static_library',
-      'include_dirs': [
-        '..',
-      ],
-      'sources': [
-        'base/simd/convert_rgb_to_yuv_c.cc',
-        'base/simd/convert_rgb_to_yuv_sse2.cc',
-        'base/simd/convert_rgb_to_yuv_ssse3.asm',
-        'base/simd/convert_rgb_to_yuv_ssse3.cc',
-        'base/simd/convert_rgb_to_yuv_ssse3.inc',
-        'base/simd/convert_yuv_to_rgb_c.cc',
-        'base/simd/convert_yuv_to_rgb_x86.cc',
-        'base/simd/convert_yuv_to_rgb_mmx.asm',
-        'base/simd/convert_yuv_to_rgb_mmx.inc',
-        'base/simd/convert_yuv_to_rgb_sse.asm',
-        'base/simd/filter_yuv.h',
-        'base/simd/filter_yuv_c.cc',
-        'base/simd/filter_yuv_mmx.cc',
-        'base/simd/filter_yuv_sse2.cc',
-        'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
-        'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
-        'base/simd/linear_scale_yuv_to_rgb_sse.asm',
-        'base/simd/scale_yuv_to_rgb_mmx.asm',
-        'base/simd/scale_yuv_to_rgb_mmx.inc',
-        'base/simd/scale_yuv_to_rgb_sse.asm',
-        'base/simd/yuv_to_rgb_table.cc',
-        'base/simd/yuv_to_rgb_table.h',
-      ],
-      'conditions': [
-        ['order_profiling != 0', {
-          'target_conditions' : [
-            ['_toolset=="target"', {
-              'cflags!': [ '-finstrument-functions' ],
-            }],
-          ],
-        }],
-        [ 'target_arch == "x64"', {
-          # Source files optimized for X64 systems.
-          'sources': [
-            'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
-            'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
-          ],
-        }],
-        [ 'os_posix == 1 and OS != "mac" and OS != "android"', {
-          'cflags': [
-            '-msse2',
-          ],
-        }],
-        [ 'OS == "mac"', {
-          'configurations': {
-            'Debug': {
-              'xcode_settings': {
-                # gcc on the mac builds horribly unoptimized sse code in debug
-                # mode. Since this is rarely going to be debugged, run with full
-                # optimizations in Debug as well as Release.
-                'GCC_OPTIMIZATION_LEVEL': '3',  # -O3
-               },
-             },
-          },
-        }],
-        [ 'OS=="win"', {
-          'variables': {
-            'yasm_flags': [
-              '-DWIN32',
-              '-DMSVC',
-              '-DCHROMIUM',
-              '-Isimd',
-            ],
-          },
-        }],
-        [ 'OS=="mac"', {
-          'variables': {
-            'yasm_flags': [
-              '-DPREFIX',
-              '-DMACHO',
-              '-DCHROMIUM',
-              '-Isimd',
-            ],
-          },
-        }],
-        [ 'os_posix==1 and OS!="mac"', {
-          'variables': {
-            'conditions': [
-              [ 'target_arch=="ia32"', {
-                'yasm_flags': [
-                  '-DX86_32',
-                  '-DELF',
-                  '-DCHROMIUM',
-                  '-Isimd',
-                ],
-              }, {
-                'yasm_flags': [
-                  '-DARCH_X86_64',
-                  '-DELF',
-                  '-DPIC',
-                  '-DCHROMIUM',
-                  '-Isimd',
-                ],
-              }],
-            ],
-          },
-        }],
-      ],
-      'variables': {
-        'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
-      },
-      'msvs_2010_disable_uldi_when_referenced': 1,
-      'includes': [
-        '../third_party/yasm/yasm_compile.gypi',
-      ],
-    },
-    {
-      'target_name': 'yuv_convert_simd_arm',
-      'type': 'static_library',
-      'include_dirs': [
-        '..',
-      ],
-      'sources': [
-        'base/simd/convert_rgb_to_yuv_c.cc',
-        'base/simd/convert_rgb_to_yuv.h',
-        'base/simd/convert_yuv_to_rgb_c.cc',
-        'base/simd/convert_yuv_to_rgb.h',
-        'base/simd/filter_yuv.h',
-        'base/simd/filter_yuv_c.cc',
-        'base/simd/yuv_to_rgb_table.cc',
-        'base/simd/yuv_to_rgb_table.h',
-      ],
-    },
-    {
-      'target_name': 'media_unittests',
-      'type': 'executable',
-      'dependencies': [
-        'media',
-        'media_test_support',
-        'yuv_convert',
-        '../base/base.gyp:base',
-        '../base/base.gyp:base_i18n',
-        '../base/base.gyp:test_support_base',
-        '../testing/gmock.gyp:gmock',
-        '../testing/gtest.gyp:gtest',
-        '../ui/ui.gyp:ui',
-      ],
-      'sources': [
-        'audio/async_socket_io_handler_unittest.cc',
-        'audio/audio_input_controller_unittest.cc',
-        'audio/audio_input_device_unittest.cc',
-        'audio/audio_input_unittest.cc',
-        'audio/audio_input_volume_unittest.cc',
-        'audio/audio_low_latency_input_output_unittest.cc',
-        'audio/audio_output_controller_unittest.cc',
-        'audio/audio_output_proxy_unittest.cc',
-        'audio/audio_parameters_unittest.cc',
-        'audio/audio_util_unittest.cc',
-        'audio/cross_process_notification_unittest.cc',
-        'audio/linux/alsa_output_unittest.cc',
-        'audio/mac/audio_low_latency_input_mac_unittest.cc',
-        'audio/mac/audio_output_mac_unittest.cc',
-        'audio/simple_sources_unittest.cc',
-        'audio/win/audio_low_latency_input_win_unittest.cc',
-        'audio/win/audio_low_latency_output_win_unittest.cc',
-        'audio/win/audio_output_win_unittest.cc',
-        'base/audio_renderer_mixer_unittest.cc',
-        'base/audio_renderer_mixer_input_unittest.cc',
-        'base/buffers_unittest.cc',
-        'base/clock_unittest.cc',
-        'base/composite_filter_unittest.cc',
-        'base/data_buffer_unittest.cc',
-        'base/decoder_buffer_unittest.cc',
-        'base/djb2_unittest.cc',
-        'base/fake_audio_render_callback.cc',
-        'base/fake_audio_render_callback.h',
-        'base/filter_collection_unittest.cc',
-        'base/h264_bitstream_converter_unittest.cc',
-        'base/pipeline_unittest.cc',
-        'base/ranges_unittest.cc',
-        'base/run_all_unittests.cc',
-        'base/seekable_buffer_unittest.cc',
-        'base/state_matrix_unittest.cc',
-        'base/test_data_util.cc',
-        'base/test_data_util.h',
-        'base/video_frame_unittest.cc',
-        'base/video_util_unittest.cc',
-        'base/yuv_convert_unittest.cc',
-        'crypto/aes_decryptor_unittest.cc',
-        'ffmpeg/ffmpeg_common_unittest.cc',
-        'filters/audio_renderer_algorithm_unittest.cc',
-        'filters/audio_renderer_impl_unittest.cc',
-        'filters/bitstream_converter_unittest.cc',
-        'filters/chunk_demuxer_unittest.cc',
-        'filters/ffmpeg_audio_decoder_unittest.cc',
-        'filters/ffmpeg_decoder_unittest.h',
-        'filters/ffmpeg_demuxer_unittest.cc',
-        'filters/ffmpeg_glue_unittest.cc',
-        'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
-        'filters/ffmpeg_video_decoder_unittest.cc',
-        'filters/file_data_source_unittest.cc',
-        'filters/pipeline_integration_test.cc',
-        'filters/pipeline_integration_test_base.cc',
-        'filters/source_buffer_stream_unittest.cc',
-        'filters/video_renderer_base_unittest.cc',
-        'video/capture/video_capture_device_unittest.cc',
-        'webm/cluster_builder.cc',
-        'webm/cluster_builder.h',
-        'webm/webm_cluster_parser_unittest.cc',
-        'webm/webm_content_encodings_client_unittest.cc',
-        'webm/webm_parser_unittest.cc',
-      ],
-      'conditions': [
-        ['os_posix==1 and OS!="mac"', {
-          'conditions': [
-            ['linux_use_tcmalloc==1', {
-              'dependencies': [
-                '../base/allocator/allocator.gyp:allocator',
-              ],
-            }],
-          ],
-        }],
-        ['OS != "android"', {
-          'dependencies': [
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-        }],
-        ['OS == "android"', {
-          'sources!': [
-            'audio/audio_input_volume_unittest.cc',
-            'base/test_data_util.cc',
-            'base/test_data_util.h',
-            'ffmpeg/ffmpeg_common_unittest.cc',
-            'filters/ffmpeg_audio_decoder_unittest.cc',
-            'filters/bitstream_converter_unittest.cc',
-            'filters/chunk_demuxer_unittest.cc',
-            'filters/ffmpeg_demuxer_unittest.cc',
-            'filters/ffmpeg_glue_unittest.cc',
-            'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
-            'filters/ffmpeg_video_decoder_unittest.cc',
-            'filters/pipeline_integration_test.cc',
-            'filters/pipeline_integration_test_base.cc',
-            'mp4/mp4_stream_parser_unittest.cc',
-            'webm/webm_cluster_parser_unittest.cc',
-          ],
-        }],
-        ['OS == "linux"', {
-          'conditions': [
-            ['use_cras == 1', {
-              'sources': [
-                'audio/linux/cras_output_unittest.cc',
-              ],
-              'defines': [
-                'USE_CRAS',
-              ],
-            }],
-          ],
-        }],
-        [ 'target_arch=="ia32" or target_arch=="x64"', {
-          'sources': [
-            'base/simd/convert_rgb_to_yuv_unittest.cc',
-          ],
-        }],
-        ['proprietary_codecs==1 or branding=="Chrome"', {
-          'sources': [
-            'mp4/avc_unittest.cc',
-            'mp4/box_reader_unittest.cc',
-            'mp4/mp4_stream_parser_unittest.cc',
-            'mp4/offset_byte_queue_unittest.cc',
-          ],
-        }],
-      ],
-    },
-    {
-      'target_name': 'media_test_support',
-      'type': 'static_library',
-      'dependencies': [
-        'media',
-        '../base/base.gyp:base',
-        '../testing/gmock.gyp:gmock',
-        '../testing/gtest.gyp:gtest',
-      ],
-      'sources': [
-        'audio/test_audio_input_controller_factory.cc',
-        'audio/test_audio_input_controller_factory.h',
-        'base/mock_callback.cc',
-        'base/mock_callback.h',
-        'base/mock_data_source_host.cc',
-        'base/mock_data_source_host.h',
-        'base/mock_demuxer_host.cc',
-        'base/mock_demuxer_host.h',
-        'base/mock_filter_host.cc',
-        'base/mock_filter_host.h',
-        'base/mock_filters.cc',
-        'base/mock_filters.h',
-      ],
-    },
-    {
-      'target_name': 'scaler_bench',
-      'type': 'executable',
-      'dependencies': [
-        'media',
-        'yuv_convert',
-        '../base/base.gyp:base',
-        '../skia/skia.gyp:skia',
-      ],
-      'sources': [
-        'tools/scaler_bench/scaler_bench.cc',
-      ],
-    },
-    {
-      'target_name': 'qt_faststart',
-      'type': 'executable',
-      'sources': [
-        'tools/qt_faststart/qt_faststart.c'
-      ],
-    },
-    {
-      'target_name': 'seek_tester',
-      'type': 'executable',
-      'dependencies': [
-        'media',
-        '../base/base.gyp:base',
-      ],
-      'sources': [
-        'tools/seek_tester/seek_tester.cc',
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS=="win"', {
-      'targets': [
-        {
-          'target_name': 'player_wtl',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'yuv_convert',
-            '../base/base.gyp:base',
-            '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
-            '../ui/ui.gyp:ui',
-          ],
-          'include_dirs': [
-            '<(DEPTH)/third_party/wtl/include',
-          ],
-          'sources': [
-            'tools/player_wtl/list.h',
-            'tools/player_wtl/mainfrm.h',
-            'tools/player_wtl/movie.cc',
-            'tools/player_wtl/movie.h',
-            'tools/player_wtl/player_wtl.cc',
-            'tools/player_wtl/player_wtl.rc',
-            'tools/player_wtl/props.h',
-            'tools/player_wtl/seek.h',
-            'tools/player_wtl/resource.h',
-            'tools/player_wtl/view.h',
-          ],
-          'msvs_settings': {
-            'VCLinkerTool': {
-              'SubSystem': '2',         # Set /SUBSYSTEM:WINDOWS
-            },
-          },
-          'defines': [
-            '_CRT_SECURE_NO_WARNINGS=1',
-          ],
-        },
-      ],
-    }],
-    ['OS == "win" or toolkit_uses_gtk == 1', {
-      'targets': [
-        {
-          'target_name': 'shader_bench',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'yuv_convert',
-            '../base/base.gyp:base',
-            '../ui/gl/gl.gyp:gl',
-          ],
-          'sources': [
-            'tools/shader_bench/shader_bench.cc',
-            'tools/shader_bench/cpu_color_painter.cc',
-            'tools/shader_bench/cpu_color_painter.h',
-            'tools/shader_bench/gpu_color_painter.cc',
-            'tools/shader_bench/gpu_color_painter.h',
-            'tools/shader_bench/gpu_painter.cc',
-            'tools/shader_bench/gpu_painter.h',
-            'tools/shader_bench/painter.cc',
-            'tools/shader_bench/painter.h',
-            'tools/shader_bench/window.cc',
-            'tools/shader_bench/window.h',
-          ],
-          'conditions': [
-            ['toolkit_uses_gtk == 1', {
-              'dependencies': [
-                '../build/linux/system.gyp:gtk',
-              ],
-              'sources': [
-                'tools/shader_bench/window_linux.cc',
-              ],
-            }],
-            ['OS=="win"', {
-              'dependencies': [
-                '../third_party/angle/src/build_angle.gyp:libEGL',
-                '../third_party/angle/src/build_angle.gyp:libGLESv2',
-              ],
-              'sources': [
-                'tools/shader_bench/window_win.cc',
-              ],
-            }],
-          ],
-        },
-      ],
-    }],
-    ['OS == "linux" and target_arch != "arm"', {
-      'targets': [
-        {
-          'target_name': 'tile_render_bench',
-          'type': 'executable',
-          'dependencies': [
-            '../base/base.gyp:base',
-            '../ui/gl/gl.gyp:gl',
-          ],
-          'libraries': [
-            '-lGL',
-            '-ldl',
-          ],
-          'sources': [
-            'tools/tile_render_bench/tile_render_bench.cc',
-          ],
-        },
-      ],
-    }],
-    ['os_posix == 1 and OS != "mac" and OS != "android"', {
-      'targets': [
-        {
-          'target_name': 'player_x11',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'yuv_convert',
-            '../base/base.gyp:base',
-            '../ui/gl/gl.gyp:gl',
-          ],
-          'link_settings': {
-            'libraries': [
-              '-ldl',
-              '-lX11',
-              '-lXrender',
-              '-lXext',
-            ],
-          },
-          'sources': [
-            'tools/player_x11/data_source_logger.cc',
-            'tools/player_x11/data_source_logger.h',
-            'tools/player_x11/gl_video_renderer.cc',
-            'tools/player_x11/gl_video_renderer.h',
-            'tools/player_x11/player_x11.cc',
-            'tools/player_x11/x11_video_renderer.cc',
-            'tools/player_x11/x11_video_renderer.h',
-          ],
-        },
-      ],
-    }],
-    ['OS == "android"', {
-      'targets': [
-        {
-          'target_name': 'player_android',
-          'type': 'static_library',
-          'sources': [
-            'base/android/media_player_bridge.cc',
-            'base/android/media_player_bridge.h',
-          ],
-          'dependencies': [
-            '../base/base.gyp:base',
-          ],
-          'include_dirs': [
-            '<(SHARED_INTERMEDIATE_DIR)/media',
-          ],
-          'actions': [
-            {
-              'action_name': 'generate-jni-headers',
-              'inputs': [
-                '../base/android/jni_generator/jni_generator.py',
-                'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
-              ],
-              'outputs': [
-                '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
-              ],
-              'action': [
-                'python',
-                '<(DEPTH)/base/android/jni_generator/jni_generator.py',
-                '-o',
-                '<@(_inputs)',
-                '<@(_outputs)',
-              ],
-            },
-          ],
-        },
-        {
-          'target_name': 'media_java',
-          'type': 'none',
-          'dependencies': [ '../base/base.gyp:base_java' ],
-          'variables': {
-            'package_name': 'media',
-            'java_in_dir': 'base/android/java',
-          },
-          'includes': [ '../build/java.gypi' ],
-        },
-
-      ],
-    }, { # OS != "android"'
-      # Android does not use ffmpeg, so disable the targets which require it.
-      'targets': [
-        {
-          'target_name': 'ffmpeg_unittests',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'media_test_support',
-            '../base/base.gyp:base',
-            '../base/base.gyp:base_i18n',
-            '../base/base.gyp:test_support_base',
-            '../base/base.gyp:test_support_perf',
-            '../testing/gtest.gyp:gtest',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'ffmpeg/ffmpeg_unittest.cc',
-          ],
-          'conditions': [
-            ['toolkit_uses_gtk == 1', {
-              'dependencies': [
-                # Needed for the following #include chain:
-                #   base/run_all_unittests.cc
-                #   ../base/test_suite.h
-                #   gtk/gtk.h
-                '../build/linux/system.gyp:gtk',
-              ],
-              'conditions': [
-                ['linux_use_tcmalloc==1', {
-                  'dependencies': [
-                    '../base/allocator/allocator.gyp:allocator',
-                  ],
-                }],
-              ],
-            }],
-          ],
-        },
-        {
-          'target_name': 'ffmpeg_regression_tests',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'media_test_support',
-            '../base/base.gyp:test_support_base',
-            '../testing/gmock.gyp:gmock',
-            '../testing/gtest.gyp:gtest',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'base/test_data_util.cc',
-            'base/run_all_unittests.cc',
-            'ffmpeg/ffmpeg_regression_tests.cc',
-            'filters/pipeline_integration_test_base.cc',
-          ],
-          'conditions': [
-            ['os_posix==1 and OS!="mac"', {
-              'conditions': [
-                ['linux_use_tcmalloc==1', {
-                  'dependencies': [
-                    '../base/allocator/allocator.gyp:allocator',
-                  ],
-                }],
-              ],
-            }],
-          ],
-        },
-        {
-          'target_name': 'ffmpeg_tests',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            '../base/base.gyp:base',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'test/ffmpeg_tests/ffmpeg_tests.cc',
-          ],
-        },
-        {
-          'target_name': 'media_bench',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            '../base/base.gyp:base',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'tools/media_bench/media_bench.cc',
-          ],
-        },
-      ],
-    }]
-  ],
-}
diff --git a/tools/gyp/tools/emacs/testdata/media.gyp.fontified b/tools/gyp/tools/emacs/testdata/media.gyp.fontified
deleted file mode 100644
index 962b7b2..0000000
--- a/tools/gyp/tools/emacs/testdata/media.gyp.fontified
+++ /dev/null
@@ -1,1107 +0,0 @@
-
-#("# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'chromium_code': 1,
-    # Override to dynamically link the PulseAudio library.
-    'use_pulseaudio%': 0,
-    # Override to dynamically link the cras (ChromeOS audio) library.
-    'use_cras%': 0,
-  },
-  'targets': [
-    {
-      'target_name': 'media',
-      'type': '<(component)',
-      'dependencies': [
-        'yuv_convert',
-        '../base/base.gyp:base',
-        '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
-        '../build/temp_gyp/googleurl.gyp:googleurl',
-        '../crypto/crypto.gyp:crypto',
-        '../third_party/openmax/openmax.gyp:il',
-        '../ui/ui.gyp:ui',
-      ],
-      'defines': [
-        'MEDIA_IMPLEMENTATION',
-      ],
-      'include_dirs': [
-        '..',
-      ],
-      'sources': [
-        'audio/android/audio_manager_android.cc',
-        'audio/android/audio_manager_android.h',
-        'audio/android/audio_track_output_android.cc',
-        'audio/android/audio_track_output_android.h',
-        'audio/android/opensles_input.cc',
-        'audio/android/opensles_input.h',
-        'audio/android/opensles_output.cc',
-        'audio/android/opensles_output.h',
-        'audio/async_socket_io_handler.h',
-        'audio/async_socket_io_handler_posix.cc',
-        'audio/async_socket_io_handler_win.cc',
-        'audio/audio_buffers_state.cc',
-        'audio/audio_buffers_state.h',
-        'audio/audio_io.h',
-        'audio/audio_input_controller.cc',
-        'audio/audio_input_controller.h',
-        'audio/audio_input_stream_impl.cc',
-        'audio/audio_input_stream_impl.h',
-        'audio/audio_device_name.cc',
-        'audio/audio_device_name.h',
-        'audio/audio_manager.cc',
-        'audio/audio_manager.h',
-        'audio/audio_manager_base.cc',
-        'audio/audio_manager_base.h',
-        'audio/audio_output_controller.cc',
-        'audio/audio_output_controller.h',
-        'audio/audio_output_dispatcher.cc',
-        'audio/audio_output_dispatcher.h',
-        'audio/audio_output_dispatcher_impl.cc',
-        'audio/audio_output_dispatcher_impl.h',
-        'audio/audio_output_mixer.cc',
-        'audio/audio_output_mixer.h',
-        'audio/audio_output_proxy.cc',
-        'audio/audio_output_proxy.h',
-        'audio/audio_parameters.cc',
-        'audio/audio_parameters.h',
-        'audio/audio_util.cc',
-        'audio/audio_util.h',
-        'audio/cross_process_notification.cc',
-        'audio/cross_process_notification.h',
-        'audio/cross_process_notification_win.cc',
-        'audio/cross_process_notification_posix.cc',
-        'audio/fake_audio_input_stream.cc',
-        'audio/fake_audio_input_stream.h',
-        'audio/fake_audio_output_stream.cc',
-        'audio/fake_audio_output_stream.h',
-        'audio/linux/audio_manager_linux.cc',
-        'audio/linux/audio_manager_linux.h',
-        'audio/linux/alsa_input.cc',
-        'audio/linux/alsa_input.h',
-        'audio/linux/alsa_output.cc',
-        'audio/linux/alsa_output.h',
-        'audio/linux/alsa_util.cc',
-        'audio/linux/alsa_util.h',
-        'audio/linux/alsa_wrapper.cc',
-        'audio/linux/alsa_wrapper.h',
-        'audio/linux/cras_output.cc',
-        'audio/linux/cras_output.h',
-        'audio/openbsd/audio_manager_openbsd.cc',
-        'audio/openbsd/audio_manager_openbsd.h',
-        'audio/mac/audio_input_mac.cc',
-        'audio/mac/audio_input_mac.h',
-        'audio/mac/audio_low_latency_input_mac.cc',
-        'audio/mac/audio_low_latency_input_mac.h',
-        'audio/mac/audio_low_latency_output_mac.cc',
-        'audio/mac/audio_low_latency_output_mac.h',
-        'audio/mac/audio_manager_mac.cc',
-        'audio/mac/audio_manager_mac.h',
-        'audio/mac/audio_output_mac.cc',
-        'audio/mac/audio_output_mac.h',
-        'audio/null_audio_sink.cc',
-        'audio/null_audio_sink.h',
-        'audio/pulse/pulse_output.cc',
-        'audio/pulse/pulse_output.h',
-        'audio/sample_rates.cc',
-        'audio/sample_rates.h',
-        'audio/simple_sources.cc',
-        'audio/simple_sources.h',
-        'audio/win/audio_low_latency_input_win.cc',
-        'audio/win/audio_low_latency_input_win.h',
-        'audio/win/audio_low_latency_output_win.cc',
-        'audio/win/audio_low_latency_output_win.h',
-        'audio/win/audio_manager_win.cc',
-        'audio/win/audio_manager_win.h',
-        'audio/win/avrt_wrapper_win.cc',
-        'audio/win/avrt_wrapper_win.h',
-        'audio/win/device_enumeration_win.cc',
-        'audio/win/device_enumeration_win.h',
-        'audio/win/wavein_input_win.cc',
-        'audio/win/wavein_input_win.h',
-        'audio/win/waveout_output_win.cc',
-        'audio/win/waveout_output_win.h',
-        'base/android/media_jni_registrar.cc',
-        'base/android/media_jni_registrar.h',
-        'base/audio_decoder.cc',
-        'base/audio_decoder.h',
-        'base/audio_decoder_config.cc',
-        'base/audio_decoder_config.h',
-        'base/audio_renderer.h',
-        'base/audio_renderer_mixer.cc',
-        'base/audio_renderer_mixer.h',
-        'base/audio_renderer_mixer_input.cc',
-        'base/audio_renderer_mixer_input.h',
-        'base/bitstream_buffer.h',
-        'base/buffers.cc',
-        'base/buffers.h',
-        'base/byte_queue.cc',
-        'base/byte_queue.h',
-        'base/channel_layout.cc',
-        'base/channel_layout.h',
-        'base/clock.cc',
-        'base/clock.h',
-        'base/composite_filter.cc',
-        'base/composite_filter.h',
-        'base/data_buffer.cc',
-        'base/data_buffer.h',
-        'base/data_source.cc',
-        'base/data_source.h',
-        'base/decoder_buffer.cc',
-        'base/decoder_buffer.h',
-        'base/decrypt_config.cc',
-        'base/decrypt_config.h',
-        'base/decryptor.h',
-        'base/decryptor_client.h',
-        'base/demuxer.cc',
-        'base/demuxer.h',
-        'base/demuxer_stream.cc',
-        'base/demuxer_stream.h',
-        'base/djb2.cc',
-        'base/djb2.h',
-        'base/filter_collection.cc',
-        'base/filter_collection.h',
-        'base/filter_host.h',
-        'base/filters.cc',
-        'base/filters.h',
-        'base/h264_bitstream_converter.cc',
-        'base/h264_bitstream_converter.h',
-        'base/media.h',
-        'base/media_android.cc',
-        'base/media_export.h',
-        'base/media_log.cc',
-        'base/media_log.h',
-        'base/media_log_event.h',
-        'base/media_posix.cc',
-        'base/media_switches.cc',
-        'base/media_switches.h',
-        'base/media_win.cc',
-        'base/message_loop_factory.cc',
-        'base/message_loop_factory.h',
-        'base/pipeline.cc',
-        'base/pipeline.h',
-        'base/pipeline_status.cc',
-        'base/pipeline_status.h',
-        'base/ranges.cc',
-        'base/ranges.h',
-        'base/seekable_buffer.cc',
-        'base/seekable_buffer.h',
-        'base/state_matrix.cc',
-        'base/state_matrix.h',
-        'base/stream_parser.cc',
-        'base/stream_parser.h',
-        'base/stream_parser_buffer.cc',
-        'base/stream_parser_buffer.h',
-        'base/video_decoder.cc',
-        'base/video_decoder.h',
-        'base/video_decoder_config.cc',
-        'base/video_decoder_config.h',
-        'base/video_frame.cc',
-        'base/video_frame.h',
-        'base/video_renderer.h',
-        'base/video_util.cc',
-        'base/video_util.h',
-        'crypto/aes_decryptor.cc',
-        'crypto/aes_decryptor.h',
-        'ffmpeg/ffmpeg_common.cc',
-        'ffmpeg/ffmpeg_common.h',
-        'ffmpeg/file_protocol.cc',
-        'ffmpeg/file_protocol.h',
-        'filters/audio_file_reader.cc',
-        'filters/audio_file_reader.h',
-        'filters/audio_renderer_algorithm.cc',
-        'filters/audio_renderer_algorithm.h',
-        'filters/audio_renderer_impl.cc',
-        'filters/audio_renderer_impl.h',
-        'filters/bitstream_converter.cc',
-        'filters/bitstream_converter.h',
-        'filters/chunk_demuxer.cc',
-        'filters/chunk_demuxer.h',
-        'filters/chunk_demuxer_client.h',
-        'filters/dummy_demuxer.cc',
-        'filters/dummy_demuxer.h',
-        'filters/ffmpeg_audio_decoder.cc',
-        'filters/ffmpeg_audio_decoder.h',
-        'filters/ffmpeg_demuxer.cc',
-        'filters/ffmpeg_demuxer.h',
-        'filters/ffmpeg_h264_bitstream_converter.cc',
-        'filters/ffmpeg_h264_bitstream_converter.h',
-        'filters/ffmpeg_glue.cc',
-        'filters/ffmpeg_glue.h',
-        'filters/ffmpeg_video_decoder.cc',
-        'filters/ffmpeg_video_decoder.h',
-        'filters/file_data_source.cc',
-        'filters/file_data_source.h',
-        'filters/gpu_video_decoder.cc',
-        'filters/gpu_video_decoder.h',
-        'filters/in_memory_url_protocol.cc',
-        'filters/in_memory_url_protocol.h',
-        'filters/source_buffer_stream.cc',
-        'filters/source_buffer_stream.h',
-        'filters/video_frame_generator.cc',
-        'filters/video_frame_generator.h',
-        'filters/video_renderer_base.cc',
-        'filters/video_renderer_base.h',
-        'video/capture/fake_video_capture_device.cc',
-        'video/capture/fake_video_capture_device.h',
-        'video/capture/linux/video_capture_device_linux.cc',
-        'video/capture/linux/video_capture_device_linux.h',
-        'video/capture/mac/video_capture_device_mac.h',
-        'video/capture/mac/video_capture_device_mac.mm',
-        'video/capture/mac/video_capture_device_qtkit_mac.h',
-        'video/capture/mac/video_capture_device_qtkit_mac.mm',
-        'video/capture/video_capture.h',
-        'video/capture/video_capture_device.h',
-        'video/capture/video_capture_device_dummy.cc',
-        'video/capture/video_capture_device_dummy.h',
-        'video/capture/video_capture_proxy.cc',
-        'video/capture/video_capture_proxy.h',
-        'video/capture/video_capture_types.h',
-        'video/capture/win/filter_base_win.cc',
-        'video/capture/win/filter_base_win.h',
-        'video/capture/win/pin_base_win.cc',
-        'video/capture/win/pin_base_win.h',
-        'video/capture/win/sink_filter_observer_win.h',
-        'video/capture/win/sink_filter_win.cc',
-        'video/capture/win/sink_filter_win.h',
-        'video/capture/win/sink_input_pin_win.cc',
-        'video/capture/win/sink_input_pin_win.h',
-        'video/capture/win/video_capture_device_win.cc',
-        'video/capture/win/video_capture_device_win.h',
-        'video/picture.cc',
-        'video/picture.h',
-        'video/video_decode_accelerator.cc',
-        'video/video_decode_accelerator.h',
-        'webm/webm_constants.h',
-        'webm/webm_cluster_parser.cc',
-        'webm/webm_cluster_parser.h',
-        'webm/webm_content_encodings.cc',
-        'webm/webm_content_encodings.h',
-        'webm/webm_content_encodings_client.cc',
-        'webm/webm_content_encodings_client.h',
-        'webm/webm_info_parser.cc',
-        'webm/webm_info_parser.h',
-        'webm/webm_parser.cc',
-        'webm/webm_parser.h',
-        'webm/webm_stream_parser.cc',
-        'webm/webm_stream_parser.h',
-        'webm/webm_tracks_parser.cc',
-        'webm/webm_tracks_parser.h',
-      ],
-      'direct_dependent_settings': {
-        'include_dirs': [
-          '..',
-        ],
-      },
-      'conditions': [
-        # Android doesn't use ffmpeg, so make the dependency conditional
-        # and exclude the sources which depend on ffmpeg.
-        ['OS != \"android\"', {
-          'dependencies': [
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-        }],
-        ['OS == \"android\"', {
-          'sources!': [
-            'base/media_posix.cc',
-            'ffmpeg/ffmpeg_common.cc',
-            'ffmpeg/ffmpeg_common.h',
-            'ffmpeg/file_protocol.cc',
-            'ffmpeg/file_protocol.h',
-            'filters/audio_file_reader.cc',
-            'filters/audio_file_reader.h',
-            'filters/bitstream_converter.cc',
-            'filters/bitstream_converter.h',
-            'filters/chunk_demuxer.cc',
-            'filters/chunk_demuxer.h',
-            'filters/chunk_demuxer_client.h',
-            'filters/ffmpeg_audio_decoder.cc',
-            'filters/ffmpeg_audio_decoder.h',
-            'filters/ffmpeg_demuxer.cc',
-            'filters/ffmpeg_demuxer.h',
-            'filters/ffmpeg_h264_bitstream_converter.cc',
-            'filters/ffmpeg_h264_bitstream_converter.h',
-            'filters/ffmpeg_glue.cc',
-            'filters/ffmpeg_glue.h',
-            'filters/ffmpeg_video_decoder.cc',
-            'filters/ffmpeg_video_decoder.h',
-            'filters/gpu_video_decoder.cc',
-            'filters/gpu_video_decoder.h',
-            'webm/webm_cluster_parser.cc',
-            'webm/webm_cluster_parser.h',
-            'webm/webm_stream_parser.cc',
-            'webm/webm_stream_parser.h',
-          ],
-        }],
-        # The below 'android' condition were added temporarily and should be
-        # removed in downstream, because there is no Java environment setup in
-        # upstream yet.
-        ['OS == \"android\"', {
-          'sources!':[
-            'audio/android/audio_track_output_android.cc',
-          ],
-          'sources':[
-            'audio/android/audio_track_output_stub_android.cc',
-          ],
-          'link_settings': {
-            'libraries': [
-              '-lOpenSLES',
-            ],
-          },
-        }],
-        ['OS==\"linux\" or OS==\"freebsd\" or OS==\"solaris\"', {
-          'link_settings': {
-            'libraries': [
-              '-lasound',
-            ],
-          },
-        }],
-        ['OS==\"openbsd\"', {
-          'sources/': [ ['exclude', '/alsa_' ],
-                        ['exclude', '/audio_manager_linux' ] ],
-          'link_settings': {
-            'libraries': [
-            ],
-          },
-        }],
-        ['OS!=\"openbsd\"', {
-          'sources!': [
-            'audio/openbsd/audio_manager_openbsd.cc',
-            'audio/openbsd/audio_manager_openbsd.h',
-          ],
-        }],
-        ['OS==\"linux\"', {
-          'variables': {
-            'conditions': [
-              ['sysroot!=\"\"', {
-                'pkg-config': '../build/linux/pkg-config-wrapper \"<(sysroot)\" \"<(target_arch)\"',
-              }, {
-                'pkg-config': 'pkg-config'
-              }],
-            ],
-          },
-          'conditions': [
-            ['use_cras == 1', {
-              'cflags': [
-                '<!@(<(pkg-config) --cflags libcras)',
-              ],
-              'link_settings': {
-                'libraries': [
-                  '<!@(<(pkg-config) --libs libcras)',
-                ],
-              },
-              'defines': [
-                'USE_CRAS',
-              ],
-            }, {  # else: use_cras == 0
-              'sources!': [
-                'audio/linux/cras_output.cc',
-                'audio/linux/cras_output.h',
-              ],
-            }],
-          ],
-        }],
-        ['os_posix == 1', {
-          'conditions': [
-            ['use_pulseaudio == 1', {
-              'cflags': [
-                '<!@(pkg-config --cflags libpulse)',
-              ],
-              'link_settings': {
-                'libraries': [
-                  '<!@(pkg-config --libs-only-l libpulse)',
-                ],
-              },
-              'defines': [
-                'USE_PULSEAUDIO',
-              ],
-            }, {  # else: use_pulseaudio == 0
-              'sources!': [
-                'audio/pulse/pulse_output.cc',
-                'audio/pulse/pulse_output.h',
-              ],
-            }],
-          ],
-        }],
-        ['os_posix == 1 and OS != \"android\"', {
-          # Video capture isn't supported in Android yet.
-          'sources!': [
-            'video/capture/video_capture_device_dummy.cc',
-            'video/capture/video_capture_device_dummy.h',
-          ],
-        }],
-        ['OS==\"mac\"', {
-          'link_settings': {
-            'libraries': [
-              '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
-              '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
-              '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
-              '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
-              '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
-            ],
-          },
-        }],
-        ['OS==\"win\"', {
-          'sources!': [
-            'audio/pulse/pulse_output.cc',
-            'audio/pulse/pulse_output.h',
-            'video/capture/video_capture_device_dummy.cc',
-            'video/capture/video_capture_device_dummy.h',
-          ],
-        }],
-        ['proprietary_codecs==1 or branding==\"Chrome\"', {
-          'sources': [
-            'mp4/avc.cc',
-            'mp4/avc.h',
-            'mp4/box_definitions.cc',
-            'mp4/box_definitions.h',
-            'mp4/box_reader.cc',
-            'mp4/box_reader.h',
-            'mp4/cenc.cc',
-            'mp4/cenc.h',
-            'mp4/mp4_stream_parser.cc',
-            'mp4/mp4_stream_parser.h',
-            'mp4/offset_byte_queue.cc',
-            'mp4/offset_byte_queue.h',
-            'mp4/track_run_iterator.cc',
-            'mp4/track_run_iterator.h',
-          ],
-        }],
-      ],
-    },
-    {
-      'target_name': 'yuv_convert',
-      'type': 'static_library',
-      'include_dirs': [
-        '..',
-      ],
-      'conditions': [
-        ['order_profiling != 0', {
-          'target_conditions' : [
-            ['_toolset==\"target\"', {
-              'cflags!': [ '-finstrument-functions' ],
-            }],
-          ],
-        }],
-        [ 'target_arch == \"ia32\" or target_arch == \"x64\"', {
-          'dependencies': [
-            'yuv_convert_simd_x86',
-          ],
-        }],
-        [ 'target_arch == \"arm\"', {
-          'dependencies': [
-            'yuv_convert_simd_arm',
-          ],
-        }],
-      ],
-      'sources': [
-        'base/yuv_convert.cc',
-        'base/yuv_convert.h',
-      ],
-    },
-    {
-      'target_name': 'yuv_convert_simd_x86',
-      'type': 'static_library',
-      'include_dirs': [
-        '..',
-      ],
-      'sources': [
-        'base/simd/convert_rgb_to_yuv_c.cc',
-        'base/simd/convert_rgb_to_yuv_sse2.cc',
-        'base/simd/convert_rgb_to_yuv_ssse3.asm',
-        'base/simd/convert_rgb_to_yuv_ssse3.cc',
-        'base/simd/convert_rgb_to_yuv_ssse3.inc',
-        'base/simd/convert_yuv_to_rgb_c.cc',
-        'base/simd/convert_yuv_to_rgb_x86.cc',
-        'base/simd/convert_yuv_to_rgb_mmx.asm',
-        'base/simd/convert_yuv_to_rgb_mmx.inc',
-        'base/simd/convert_yuv_to_rgb_sse.asm',
-        'base/simd/filter_yuv.h',
-        'base/simd/filter_yuv_c.cc',
-        'base/simd/filter_yuv_mmx.cc',
-        'base/simd/filter_yuv_sse2.cc',
-        'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
-        'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
-        'base/simd/linear_scale_yuv_to_rgb_sse.asm',
-        'base/simd/scale_yuv_to_rgb_mmx.asm',
-        'base/simd/scale_yuv_to_rgb_mmx.inc',
-        'base/simd/scale_yuv_to_rgb_sse.asm',
-        'base/simd/yuv_to_rgb_table.cc',
-        'base/simd/yuv_to_rgb_table.h',
-      ],
-      'conditions': [
-        ['order_profiling != 0', {
-          'target_conditions' : [
-            ['_toolset==\"target\"', {
-              'cflags!': [ '-finstrument-functions' ],
-            }],
-          ],
-        }],
-        [ 'target_arch == \"x64\"', {
-          # Source files optimized for X64 systems.
-          'sources': [
-            'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
-            'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
-          ],
-        }],
-        [ 'os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
-          'cflags': [
-            '-msse2',
-          ],
-        }],
-        [ 'OS == \"mac\"', {
-          'configurations': {
-            'Debug': {
-              'xcode_settings': {
-                # gcc on the mac builds horribly unoptimized sse code in debug
-                # mode. Since this is rarely going to be debugged, run with full
-                # optimizations in Debug as well as Release.
-                'GCC_OPTIMIZATION_LEVEL': '3',  # -O3
-               },
-             },
-          },
-        }],
-        [ 'OS==\"win\"', {
-          'variables': {
-            'yasm_flags': [
-              '-DWIN32',
-              '-DMSVC',
-              '-DCHROMIUM',
-              '-Isimd',
-            ],
-          },
-        }],
-        [ 'OS==\"mac\"', {
-          'variables': {
-            'yasm_flags': [
-              '-DPREFIX',
-              '-DMACHO',
-              '-DCHROMIUM',
-              '-Isimd',
-            ],
-          },
-        }],
-        [ 'os_posix==1 and OS!=\"mac\"', {
-          'variables': {
-            'conditions': [
-              [ 'target_arch==\"ia32\"', {
-                'yasm_flags': [
-                  '-DX86_32',
-                  '-DELF',
-                  '-DCHROMIUM',
-                  '-Isimd',
-                ],
-              }, {
-                'yasm_flags': [
-                  '-DARCH_X86_64',
-                  '-DELF',
-                  '-DPIC',
-                  '-DCHROMIUM',
-                  '-Isimd',
-                ],
-              }],
-            ],
-          },
-        }],
-      ],
-      'variables': {
-        'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
-      },
-      'msvs_2010_disable_uldi_when_referenced': 1,
-      'includes': [
-        '../third_party/yasm/yasm_compile.gypi',
-      ],
-    },
-    {
-      'target_name': 'yuv_convert_simd_arm',
-      'type': 'static_library',
-      'include_dirs': [
-        '..',
-      ],
-      'sources': [
-        'base/simd/convert_rgb_to_yuv_c.cc',
-        'base/simd/convert_rgb_to_yuv.h',
-        'base/simd/convert_yuv_to_rgb_c.cc',
-        'base/simd/convert_yuv_to_rgb.h',
-        'base/simd/filter_yuv.h',
-        'base/simd/filter_yuv_c.cc',
-        'base/simd/yuv_to_rgb_table.cc',
-        'base/simd/yuv_to_rgb_table.h',
-      ],
-    },
-    {
-      'target_name': 'media_unittests',
-      'type': 'executable',
-      'dependencies': [
-        'media',
-        'media_test_support',
-        'yuv_convert',
-        '../base/base.gyp:base',
-        '../base/base.gyp:base_i18n',
-        '../base/base.gyp:test_support_base',
-        '../testing/gmock.gyp:gmock',
-        '../testing/gtest.gyp:gtest',
-        '../ui/ui.gyp:ui',
-      ],
-      'sources': [
-        'audio/async_socket_io_handler_unittest.cc',
-        'audio/audio_input_controller_unittest.cc',
-        'audio/audio_input_device_unittest.cc',
-        'audio/audio_input_unittest.cc',
-        'audio/audio_input_volume_unittest.cc',
-        'audio/audio_low_latency_input_output_unittest.cc',
-        'audio/audio_output_controller_unittest.cc',
-        'audio/audio_output_proxy_unittest.cc',
-        'audio/audio_parameters_unittest.cc',
-        'audio/audio_util_unittest.cc',
-        'audio/cross_process_notification_unittest.cc',
-        'audio/linux/alsa_output_unittest.cc',
-        'audio/mac/audio_low_latency_input_mac_unittest.cc',
-        'audio/mac/audio_output_mac_unittest.cc',
-        'audio/simple_sources_unittest.cc',
-        'audio/win/audio_low_latency_input_win_unittest.cc',
-        'audio/win/audio_low_latency_output_win_unittest.cc',
-        'audio/win/audio_output_win_unittest.cc',
-        'base/audio_renderer_mixer_unittest.cc',
-        'base/audio_renderer_mixer_input_unittest.cc',
-        'base/buffers_unittest.cc',
-        'base/clock_unittest.cc',
-        'base/composite_filter_unittest.cc',
-        'base/data_buffer_unittest.cc',
-        'base/decoder_buffer_unittest.cc',
-        'base/djb2_unittest.cc',
-        'base/fake_audio_render_callback.cc',
-        'base/fake_audio_render_callback.h',
-        'base/filter_collection_unittest.cc',
-        'base/h264_bitstream_converter_unittest.cc',
-        'base/pipeline_unittest.cc',
-        'base/ranges_unittest.cc',
-        'base/run_all_unittests.cc',
-        'base/seekable_buffer_unittest.cc',
-        'base/state_matrix_unittest.cc',
-        'base/test_data_util.cc',
-        'base/test_data_util.h',
-        'base/video_frame_unittest.cc',
-        'base/video_util_unittest.cc',
-        'base/yuv_convert_unittest.cc',
-        'crypto/aes_decryptor_unittest.cc',
-        'ffmpeg/ffmpeg_common_unittest.cc',
-        'filters/audio_renderer_algorithm_unittest.cc',
-        'filters/audio_renderer_impl_unittest.cc',
-        'filters/bitstream_converter_unittest.cc',
-        'filters/chunk_demuxer_unittest.cc',
-        'filters/ffmpeg_audio_decoder_unittest.cc',
-        'filters/ffmpeg_decoder_unittest.h',
-        'filters/ffmpeg_demuxer_unittest.cc',
-        'filters/ffmpeg_glue_unittest.cc',
-        'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
-        'filters/ffmpeg_video_decoder_unittest.cc',
-        'filters/file_data_source_unittest.cc',
-        'filters/pipeline_integration_test.cc',
-        'filters/pipeline_integration_test_base.cc',
-        'filters/source_buffer_stream_unittest.cc',
-        'filters/video_renderer_base_unittest.cc',
-        'video/capture/video_capture_device_unittest.cc',
-        'webm/cluster_builder.cc',
-        'webm/cluster_builder.h',
-        'webm/webm_cluster_parser_unittest.cc',
-        'webm/webm_content_encodings_client_unittest.cc',
-        'webm/webm_parser_unittest.cc',
-      ],
-      'conditions': [
-        ['os_posix==1 and OS!=\"mac\"', {
-          'conditions': [
-            ['linux_use_tcmalloc==1', {
-              'dependencies': [
-                '../base/allocator/allocator.gyp:allocator',
-              ],
-            }],
-          ],
-        }],
-        ['OS != \"android\"', {
-          'dependencies': [
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-        }],
-        ['OS == \"android\"', {
-          'sources!': [
-            'audio/audio_input_volume_unittest.cc',
-            'base/test_data_util.cc',
-            'base/test_data_util.h',
-            'ffmpeg/ffmpeg_common_unittest.cc',
-            'filters/ffmpeg_audio_decoder_unittest.cc',
-            'filters/bitstream_converter_unittest.cc',
-            'filters/chunk_demuxer_unittest.cc',
-            'filters/ffmpeg_demuxer_unittest.cc',
-            'filters/ffmpeg_glue_unittest.cc',
-            'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
-            'filters/ffmpeg_video_decoder_unittest.cc',
-            'filters/pipeline_integration_test.cc',
-            'filters/pipeline_integration_test_base.cc',
-            'mp4/mp4_stream_parser_unittest.cc',
-            'webm/webm_cluster_parser_unittest.cc',
-          ],
-        }],
-        ['OS == \"linux\"', {
-          'conditions': [
-            ['use_cras == 1', {
-              'sources': [
-                'audio/linux/cras_output_unittest.cc',
-              ],
-              'defines': [
-                'USE_CRAS',
-              ],
-            }],
-          ],
-        }],
-        [ 'target_arch==\"ia32\" or target_arch==\"x64\"', {
-          'sources': [
-            'base/simd/convert_rgb_to_yuv_unittest.cc',
-          ],
-        }],
-        ['proprietary_codecs==1 or branding==\"Chrome\"', {
-          'sources': [
-            'mp4/avc_unittest.cc',
-            'mp4/box_reader_unittest.cc',
-            'mp4/mp4_stream_parser_unittest.cc',
-            'mp4/offset_byte_queue_unittest.cc',
-          ],
-        }],
-      ],
-    },
-    {
-      'target_name': 'media_test_support',
-      'type': 'static_library',
-      'dependencies': [
-        'media',
-        '../base/base.gyp:base',
-        '../testing/gmock.gyp:gmock',
-        '../testing/gtest.gyp:gtest',
-      ],
-      'sources': [
-        'audio/test_audio_input_controller_factory.cc',
-        'audio/test_audio_input_controller_factory.h',
-        'base/mock_callback.cc',
-        'base/mock_callback.h',
-        'base/mock_data_source_host.cc',
-        'base/mock_data_source_host.h',
-        'base/mock_demuxer_host.cc',
-        'base/mock_demuxer_host.h',
-        'base/mock_filter_host.cc',
-        'base/mock_filter_host.h',
-        'base/mock_filters.cc',
-        'base/mock_filters.h',
-      ],
-    },
-    {
-      'target_name': 'scaler_bench',
-      'type': 'executable',
-      'dependencies': [
-        'media',
-        'yuv_convert',
-        '../base/base.gyp:base',
-        '../skia/skia.gyp:skia',
-      ],
-      'sources': [
-        'tools/scaler_bench/scaler_bench.cc',
-      ],
-    },
-    {
-      'target_name': 'qt_faststart',
-      'type': 'executable',
-      'sources': [
-        'tools/qt_faststart/qt_faststart.c'
-      ],
-    },
-    {
-      'target_name': 'seek_tester',
-      'type': 'executable',
-      'dependencies': [
-        'media',
-        '../base/base.gyp:base',
-      ],
-      'sources': [
-        'tools/seek_tester/seek_tester.cc',
-      ],
-    },
-  ],
-  'conditions': [
-    ['OS==\"win\"', {
-      'targets': [
-        {
-          'target_name': 'player_wtl',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'yuv_convert',
-            '../base/base.gyp:base',
-            '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
-            '../ui/ui.gyp:ui',
-          ],
-          'include_dirs': [
-            '<(DEPTH)/third_party/wtl/include',
-          ],
-          'sources': [
-            'tools/player_wtl/list.h',
-            'tools/player_wtl/mainfrm.h',
-            'tools/player_wtl/movie.cc',
-            'tools/player_wtl/movie.h',
-            'tools/player_wtl/player_wtl.cc',
-            'tools/player_wtl/player_wtl.rc',
-            'tools/player_wtl/props.h',
-            'tools/player_wtl/seek.h',
-            'tools/player_wtl/resource.h',
-            'tools/player_wtl/view.h',
-          ],
-          'msvs_settings': {
-            'VCLinkerTool': {
-              'SubSystem': '2',         # Set /SUBSYSTEM:WINDOWS
-            },
-          },
-          'defines': [
-            '_CRT_SECURE_NO_WARNINGS=1',
-          ],
-        },
-      ],
-    }],
-    ['OS == \"win\" or toolkit_uses_gtk == 1', {
-      'targets': [
-        {
-          'target_name': 'shader_bench',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'yuv_convert',
-            '../base/base.gyp:base',
-            '../ui/gl/gl.gyp:gl',
-          ],
-          'sources': [
-            'tools/shader_bench/shader_bench.cc',
-            'tools/shader_bench/cpu_color_painter.cc',
-            'tools/shader_bench/cpu_color_painter.h',
-            'tools/shader_bench/gpu_color_painter.cc',
-            'tools/shader_bench/gpu_color_painter.h',
-            'tools/shader_bench/gpu_painter.cc',
-            'tools/shader_bench/gpu_painter.h',
-            'tools/shader_bench/painter.cc',
-            'tools/shader_bench/painter.h',
-            'tools/shader_bench/window.cc',
-            'tools/shader_bench/window.h',
-          ],
-          'conditions': [
-            ['toolkit_uses_gtk == 1', {
-              'dependencies': [
-                '../build/linux/system.gyp:gtk',
-              ],
-              'sources': [
-                'tools/shader_bench/window_linux.cc',
-              ],
-            }],
-            ['OS==\"win\"', {
-              'dependencies': [
-                '../third_party/angle/src/build_angle.gyp:libEGL',
-                '../third_party/angle/src/build_angle.gyp:libGLESv2',
-              ],
-              'sources': [
-                'tools/shader_bench/window_win.cc',
-              ],
-            }],
-          ],
-        },
-      ],
-    }],
-    ['OS == \"linux\" and target_arch != \"arm\"', {
-      'targets': [
-        {
-          'target_name': 'tile_render_bench',
-          'type': 'executable',
-          'dependencies': [
-            '../base/base.gyp:base',
-            '../ui/gl/gl.gyp:gl',
-          ],
-          'libraries': [
-            '-lGL',
-            '-ldl',
-          ],
-          'sources': [
-            'tools/tile_render_bench/tile_render_bench.cc',
-          ],
-        },
-      ],
-    }],
-    ['os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
-      'targets': [
-        {
-          'target_name': 'player_x11',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'yuv_convert',
-            '../base/base.gyp:base',
-            '../ui/gl/gl.gyp:gl',
-          ],
-          'link_settings': {
-            'libraries': [
-              '-ldl',
-              '-lX11',
-              '-lXrender',
-              '-lXext',
-            ],
-          },
-          'sources': [
-            'tools/player_x11/data_source_logger.cc',
-            'tools/player_x11/data_source_logger.h',
-            'tools/player_x11/gl_video_renderer.cc',
-            'tools/player_x11/gl_video_renderer.h',
-            'tools/player_x11/player_x11.cc',
-            'tools/player_x11/x11_video_renderer.cc',
-            'tools/player_x11/x11_video_renderer.h',
-          ],
-        },
-      ],
-    }],
-    ['OS == \"android\"', {
-      'targets': [
-        {
-          'target_name': 'player_android',
-          'type': 'static_library',
-          'sources': [
-            'base/android/media_player_bridge.cc',
-            'base/android/media_player_bridge.h',
-          ],
-          'dependencies': [
-            '../base/base.gyp:base',
-          ],
-          'include_dirs': [
-            '<(SHARED_INTERMEDIATE_DIR)/media',
-          ],
-          'actions': [
-            {
-              'action_name': 'generate-jni-headers',
-              'inputs': [
-                '../base/android/jni_generator/jni_generator.py',
-                'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
-              ],
-              'outputs': [
-                '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
-              ],
-              'action': [
-                'python',
-                '<(DEPTH)/base/android/jni_generator/jni_generator.py',
-                '-o',
-                '<@(_inputs)',
-                '<@(_outputs)',
-              ],
-            },
-          ],
-        },
-        {
-          'target_name': 'media_java',
-          'type': 'none',
-          'dependencies': [ '../base/base.gyp:base_java' ],
-          'variables': {
-            'package_name': 'media',
-            'java_in_dir': 'base/android/java',
-          },
-          'includes': [ '../build/java.gypi' ],
-        },
-
-      ],
-    }, { # OS != \"android\"'
-      # Android does not use ffmpeg, so disable the targets which require it.
-      'targets': [
-        {
-          'target_name': 'ffmpeg_unittests',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'media_test_support',
-            '../base/base.gyp:base',
-            '../base/base.gyp:base_i18n',
-            '../base/base.gyp:test_support_base',
-            '../base/base.gyp:test_support_perf',
-            '../testing/gtest.gyp:gtest',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'ffmpeg/ffmpeg_unittest.cc',
-          ],
-          'conditions': [
-            ['toolkit_uses_gtk == 1', {
-              'dependencies': [
-                # Needed for the following #include chain:
-                #   base/run_all_unittests.cc
-                #   ../base/test_suite.h
-                #   gtk/gtk.h
-                '../build/linux/system.gyp:gtk',
-              ],
-              'conditions': [
-                ['linux_use_tcmalloc==1', {
-                  'dependencies': [
-                    '../base/allocator/allocator.gyp:allocator',
-                  ],
-                }],
-              ],
-            }],
-          ],
-        },
-        {
-          'target_name': 'ffmpeg_regression_tests',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            'media_test_support',
-            '../base/base.gyp:test_support_base',
-            '../testing/gmock.gyp:gmock',
-            '../testing/gtest.gyp:gtest',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'base/test_data_util.cc',
-            'base/run_all_unittests.cc',
-            'ffmpeg/ffmpeg_regression_tests.cc',
-            'filters/pipeline_integration_test_base.cc',
-          ],
-          'conditions': [
-            ['os_posix==1 and OS!=\"mac\"', {
-              'conditions': [
-                ['linux_use_tcmalloc==1', {
-                  'dependencies': [
-                    '../base/allocator/allocator.gyp:allocator',
-                  ],
-                }],
-              ],
-            }],
-          ],
-        },
-        {
-          'target_name': 'ffmpeg_tests',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            '../base/base.gyp:base',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'test/ffmpeg_tests/ffmpeg_tests.cc',
-          ],
-        },
-        {
-          'target_name': 'media_bench',
-          'type': 'executable',
-          'dependencies': [
-            'media',
-            '../base/base.gyp:base',
-            '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
-          ],
-          'sources': [
-            'tools/media_bench/media_bench.cc',
-          ],
-        },
-      ],
-    }]
-  ],
-}
-" 0 64 (face font-lock-comment-face) 64 137 (face font-lock-comment-face) 137 166 (face font-lock-comment-face) 166 171 nil 171 172 (face font-lock-string-face) 172 181 (face font-lock-keyword-face) 181 182 (face font-lock-string-face) 182 190 nil 190 191 (face font-lock-string-face) 191 204 (face font-lock-variable-name-face) 204 205 (face font-lock-string-face) 205 214 nil 214 269 (face font-lock-comment-face) 269 273 nil 273 274 (face font-lock-string-face) 274 289 (face font-lock-variable-name-face) 289 290 (face font-lock-string-face) 290 299 nil 299 365 (face font-lock-comment-face) 365 369 nil 369 370 (face font-lock-string-face) 370 379 (face font-lock-variable-name-face) 379 380 (face font-lock-string-face) 380 392 nil 392 393 (face font-lock-string-face) 393 400 (face font-lock-keyword-face) 400 401 (face font-lock-string-face) 401 417 nil 417 418 (face font-lock-string-face) 418 429 (face font-lock-keyword-face) 429 430 (face font-lock-string-face) 430 432 nil 432 433 (face font-lock-string-face) 433 438 (face font-lock-function-name-face) 438 439 (face font-lock-string-face) 439 447 nil 447 448 (face font-lock-string-face) 448 452 (face font-lock-keyword-face) 452 453 (face font-lock-string-face) 453 455 nil 455 458 (face font-lock-string-face) 458 467 (face font-lock-variable-name-face) 467 469 (face font-lock-string-face) 469 477 nil 477 478 (face font-lock-string-face) 478 490 (face font-lock-keyword-face) 490 491 (face font-lock-string-face) 491 503 nil 503 504 (face font-lock-string-face) 504 515 (face font-lock-function-name-face) 515 516 (face font-lock-string-face) 516 526 nil 526 527 (face font-lock-string-face) 527 548 (face font-lock-function-name-face) 548 549 (face font-lock-string-face) 549 559 nil 559 560 (face font-lock-string-face) 560 643 (face font-lock-function-name-face) 643 644 (face font-lock-string-face) 644 654 nil 654 655 (face font-lock-string-face) 655 696 (face font-lock-function-name-face) 696 697 (face font-lock-string-face) 697 707 nil 707 708 (face font-lock-string-face) 708 735 (face font-lock-function-name-face) 735 736 (face font-lock-string-face) 736 746 nil 746 747 (face font-lock-string-face) 747 784 (face font-lock-function-name-face) 784 785 (face font-lock-string-face) 785 795 nil 795 796 (face font-lock-string-face) 796 811 (face font-lock-function-name-face) 811 812 (face font-lock-string-face) 812 829 nil 829 830 (face font-lock-string-face) 830 837 (face font-lock-keyword-face) 837 838 (face font-lock-string-face) 838 850 nil 850 851 (face font-lock-string-face) 851 871 (face font-lock-preprocessor-face) 871 872 (face font-lock-string-face) 872 889 nil 889 890 (face font-lock-string-face) 890 902 (face font-lock-keyword-face) 902 903 (face font-lock-string-face) 903 915 nil 915 916 (face font-lock-string-face) 916 918 (face font-lock-constant-face) 918 919 (face font-lock-string-face) 919 936 nil 936 937 (face font-lock-string-face) 937 944 (face font-lock-keyword-face) 944 945 (face font-lock-string-face) 945 957 nil 957 958 (face font-lock-string-face) 958 996 (face font-lock-constant-face) 996 997 (face font-lock-string-face) 997 1007 nil 1007 1008 (face font-lock-string-face) 1008 1045 (face font-lock-constant-face) 1045 1046 (face font-lock-string-face) 1046 1056 nil 1056 1057 (face font-lock-string-face) 1057 1100 (face font-lock-constant-face) 1100 1101 (face font-lock-string-face) 1101 1111 nil 1111 1112 (face font-lock-string-face) 1112 1154 (face font-lock-constant-face) 1154 1155 (face font-lock-string-face) 1155 1165 nil 1165 1166 (face font-lock-string-face) 1166 1197 (face font-lock-constant-face) 1197 1198 (face font-lock-string-face) 1198 1208 nil 1208 1209 (face font-lock-string-face) 1209 1239 (face font-lock-constant-face) 1239 1240 (face font-lock-string-face) 1240 1250 nil 1250 1251 (face font-lock-string-face) 1251 1283 (face font-lock-constant-face) 1283 1284 (face font-lock-string-face) 1284 1294 nil 1294 1295 (face font-lock-string-face) 1295 1326 (face font-lock-constant-face) 1326 1327 (face font-lock-string-face) 1327 1337 nil 1337 1338 (face font-lock-string-face) 1338 1369 (face font-lock-constant-face) 1369 1370 (face font-lock-string-face) 1370 1380 nil 1380 1381 (face font-lock-string-face) 1381 1419 (face font-lock-constant-face) 1419 1420 (face font-lock-string-face) 1420 1430 nil 1430 1431 (face font-lock-string-face) 1431 1467 (face font-lock-constant-face) 1467 1468 (face font-lock-string-face) 1468 1478 nil 1478 1479 (face font-lock-string-face) 1479 1507 (face font-lock-constant-face) 1507 1508 (face font-lock-string-face) 1508 1518 nil 1518 1519 (face font-lock-string-face) 1519 1546 (face font-lock-constant-face) 1546 1547 (face font-lock-string-face) 1547 1557 nil 1557 1558 (face font-lock-string-face) 1558 1574 (face font-lock-constant-face) 1574 1575 (face font-lock-string-face) 1575 1585 nil 1585 1586 (face font-lock-string-face) 1586 1617 (face font-lock-constant-face) 1617 1618 (face font-lock-string-face) 1618 1628 nil 1628 1629 (face font-lock-string-face) 1629 1659 (face font-lock-constant-face) 1659 1660 (face font-lock-string-face) 1660 1670 nil 1670 1671 (face font-lock-string-face) 1671 1703 (face font-lock-constant-face) 1703 1704 (face font-lock-string-face) 1704 1714 nil 1714 1715 (face font-lock-string-face) 1715 1746 (face font-lock-constant-face) 1746 1747 (face font-lock-string-face) 1747 1757 nil 1757 1758 (face font-lock-string-face) 1758 1784 (face font-lock-constant-face) 1784 1785 (face font-lock-string-face) 1785 1795 nil 1795 1796 (face font-lock-string-face) 1796 1821 (face font-lock-constant-face) 1821 1822 (face font-lock-string-face) 1822 1832 nil 1832 1833 (face font-lock-string-face) 1833 1855 (face font-lock-constant-face) 1855 1856 (face font-lock-string-face) 1856 1866 nil 1866 1867 (face font-lock-string-face) 1867 1888 (face font-lock-constant-face) 1888 1889 (face font-lock-string-face) 1889 1899 nil 1899 1900 (face font-lock-string-face) 1900 1927 (face font-lock-constant-face) 1927 1928 (face font-lock-string-face) 1928 1938 nil 1938 1939 (face font-lock-string-face) 1939 1965 (face font-lock-constant-face) 1965 1966 (face font-lock-string-face) 1966 1976 nil 1976 1977 (face font-lock-string-face) 1977 2009 (face font-lock-constant-face) 2009 2010 (face font-lock-string-face) 2010 2020 nil 2020 2021 (face font-lock-string-face) 2021 2052 (face font-lock-constant-face) 2052 2053 (face font-lock-string-face) 2053 2063 nil 2063 2064 (face font-lock-string-face) 2064 2096 (face font-lock-constant-face) 2096 2097 (face font-lock-string-face) 2097 2107 nil 2107 2108 (face font-lock-string-face) 2108 2139 (face font-lock-constant-face) 2139 2140 (face font-lock-string-face) 2140 2150 nil 2150 2151 (face font-lock-string-face) 2151 2188 (face font-lock-constant-face) 2188 2189 (face font-lock-string-face) 2189 2199 nil 2199 2200 (face font-lock-string-face) 2200 2236 (face font-lock-constant-face) 2236 2237 (face font-lock-string-face) 2237 2247 nil 2247 2248 (face font-lock-string-face) 2248 2275 (face font-lock-constant-face) 2275 2276 (face font-lock-string-face) 2276 2286 nil 2286 2287 (face font-lock-string-face) 2287 2313 (face font-lock-constant-face) 2313 2314 (face font-lock-string-face) 2314 2324 nil 2324 2325 (face font-lock-string-face) 2325 2352 (face font-lock-constant-face) 2352 2353 (face font-lock-string-face) 2353 2363 nil 2363 2364 (face font-lock-string-face) 2364 2390 (face font-lock-constant-face) 2390 2391 (face font-lock-string-face) 2391 2401 nil 2401 2402 (face font-lock-string-face) 2402 2427 (face font-lock-constant-face) 2427 2428 (face font-lock-string-face) 2428 2438 nil 2438 2439 (face font-lock-string-face) 2439 2463 (face font-lock-constant-face) 2463 2464 (face font-lock-string-face) 2464 2474 nil 2474 2475 (face font-lock-string-face) 2475 2494 (face font-lock-constant-face) 2494 2495 (face font-lock-string-face) 2495 2505 nil 2505 2506 (face font-lock-string-face) 2506 2524 (face font-lock-constant-face) 2524 2525 (face font-lock-string-face) 2525 2535 nil 2535 2536 (face font-lock-string-face) 2536 2571 (face font-lock-constant-face) 2571 2572 (face font-lock-string-face) 2572 2582 nil 2582 2583 (face font-lock-string-face) 2583 2617 (face font-lock-constant-face) 2617 2618 (face font-lock-string-face) 2618 2628 nil 2628 2629 (face font-lock-string-face) 2629 2668 (face font-lock-constant-face) 2668 2669 (face font-lock-string-face) 2669 2679 nil 2679 2680 (face font-lock-string-face) 2680 2721 (face font-lock-constant-face) 2721 2722 (face font-lock-string-face) 2722 2732 nil 2732 2733 (face font-lock-string-face) 2733 2765 (face font-lock-constant-face) 2765 2766 (face font-lock-string-face) 2766 2776 nil 2776 2777 (face font-lock-string-face) 2777 2808 (face font-lock-constant-face) 2808 2809 (face font-lock-string-face) 2809 2819 nil 2819 2820 (face font-lock-string-face) 2820 2853 (face font-lock-constant-face) 2853 2854 (face font-lock-string-face) 2854 2864 nil 2864 2865 (face font-lock-string-face) 2865 2897 (face font-lock-constant-face) 2897 2898 (face font-lock-string-face) 2898 2908 nil 2908 2909 (face font-lock-string-face) 2909 2943 (face font-lock-constant-face) 2943 2944 (face font-lock-string-face) 2944 2954 nil 2954 2955 (face font-lock-string-face) 2955 2988 (face font-lock-constant-face) 2988 2989 (face font-lock-string-face) 2989 2999 nil 2999 3000 (face font-lock-string-face) 3000 3025 (face font-lock-constant-face) 3025 3026 (face font-lock-string-face) 3026 3036 nil 3036 3037 (face font-lock-string-face) 3037 3061 (face font-lock-constant-face) 3061 3062 (face font-lock-string-face) 3062 3072 nil 3072 3073 (face font-lock-string-face) 3073 3099 (face font-lock-constant-face) 3099 3100 (face font-lock-string-face) 3100 3110 nil 3110 3111 (face font-lock-string-face) 3111 3136 (face font-lock-constant-face) 3136 3137 (face font-lock-string-face) 3137 3147 nil 3147 3148 (face font-lock-string-face) 3148 3172 (face font-lock-constant-face) 3172 3173 (face font-lock-string-face) 3173 3183 nil 3183 3184 (face font-lock-string-face) 3184 3207 (face font-lock-constant-face) 3207 3208 (face font-lock-string-face) 3208 3218 nil 3218 3219 (face font-lock-string-face) 3219 3246 (face font-lock-constant-face) 3246 3247 (face font-lock-string-face) 3247 3257 nil 3257 3258 (face font-lock-string-face) 3258 3284 (face font-lock-constant-face) 3284 3285 (face font-lock-string-face) 3285 3295 nil 3295 3296 (face font-lock-string-face) 3296 3322 (face font-lock-constant-face) 3322 3323 (face font-lock-string-face) 3323 3333 nil 3333 3334 (face font-lock-string-face) 3334 3359 (face font-lock-constant-face) 3359 3360 (face font-lock-string-face) 3360 3370 nil 3370 3371 (face font-lock-string-face) 3371 3409 (face font-lock-constant-face) 3409 3410 (face font-lock-string-face) 3410 3420 nil 3420 3421 (face font-lock-string-face) 3421 3458 (face font-lock-constant-face) 3458 3459 (face font-lock-string-face) 3459 3469 nil 3469 3470 (face font-lock-string-face) 3470 3498 (face font-lock-constant-face) 3498 3499 (face font-lock-string-face) 3499 3509 nil 3509 3510 (face font-lock-string-face) 3510 3537 (face font-lock-constant-face) 3537 3538 (face font-lock-string-face) 3538 3548 nil 3548 3549 (face font-lock-string-face) 3549 3589 (face font-lock-constant-face) 3589 3590 (face font-lock-string-face) 3590 3600 nil 3600 3601 (face font-lock-string-face) 3601 3640 (face font-lock-constant-face) 3640 3641 (face font-lock-string-face) 3641 3651 nil 3651 3652 (face font-lock-string-face) 3652 3693 (face font-lock-constant-face) 3693 3694 (face font-lock-string-face) 3694 3704 nil 3704 3705 (face font-lock-string-face) 3705 3745 (face font-lock-constant-face) 3745 3746 (face font-lock-string-face) 3746 3756 nil 3756 3757 (face font-lock-string-face) 3757 3787 (face font-lock-constant-face) 3787 3788 (face font-lock-string-face) 3788 3798 nil 3798 3799 (face font-lock-string-face) 3799 3828 (face font-lock-constant-face) 3828 3829 (face font-lock-string-face) 3829 3839 nil 3839 3840 (face font-lock-string-face) 3840 3869 (face font-lock-constant-face) 3869 3870 (face font-lock-string-face) 3870 3880 nil 3880 3881 (face font-lock-string-face) 3881 3909 (face font-lock-constant-face) 3909 3910 (face font-lock-string-face) 3910 3920 nil 3920 3921 (face font-lock-string-face) 3921 3945 (face font-lock-constant-face) 3945 3946 (face font-lock-string-face) 3946 3956 nil 3956 3957 (face font-lock-string-face) 3957 3980 (face font-lock-constant-face) 3980 3981 (face font-lock-string-face) 3981 3991 nil 3991 3992 (face font-lock-string-face) 3992 4019 (face font-lock-constant-face) 4019 4020 (face font-lock-string-face) 4020 4030 nil 4030 4031 (face font-lock-string-face) 4031 4057 (face font-lock-constant-face) 4057 4058 (face font-lock-string-face) 4058 4068 nil 4068 4069 (face font-lock-string-face) 4069 4090 (face font-lock-constant-face) 4090 4091 (face font-lock-string-face) 4091 4101 nil 4101 4102 (face font-lock-string-face) 4102 4122 (face font-lock-constant-face) 4122 4123 (face font-lock-string-face) 4123 4133 nil 4133 4134 (face font-lock-string-face) 4134 4157 (face font-lock-constant-face) 4157 4158 (face font-lock-string-face) 4158 4168 nil 4168 4169 (face font-lock-string-face) 4169 4191 (face font-lock-constant-face) 4191 4192 (face font-lock-string-face) 4192 4202 nil 4202 4203 (face font-lock-string-face) 4203 4243 (face font-lock-constant-face) 4243 4244 (face font-lock-string-face) 4244 4254 nil 4254 4255 (face font-lock-string-face) 4255 4294 (face font-lock-constant-face) 4294 4295 (face font-lock-string-face) 4295 4305 nil 4305 4306 (face font-lock-string-face) 4306 4347 (face font-lock-constant-face) 4347 4348 (face font-lock-string-face) 4348 4358 nil 4358 4359 (face font-lock-string-face) 4359 4399 (face font-lock-constant-face) 4399 4400 (face font-lock-string-face) 4400 4410 nil 4410 4411 (face font-lock-string-face) 4411 4441 (face font-lock-constant-face) 4441 4442 (face font-lock-string-face) 4442 4452 nil 4452 4453 (face font-lock-string-face) 4453 4482 (face font-lock-constant-face) 4482 4483 (face font-lock-string-face) 4483 4493 nil 4493 4494 (face font-lock-string-face) 4494 4523 (face font-lock-constant-face) 4523 4524 (face font-lock-string-face) 4524 4534 nil 4534 4535 (face font-lock-string-face) 4535 4563 (face font-lock-constant-face) 4563 4564 (face font-lock-string-face) 4564 4574 nil 4574 4575 (face font-lock-string-face) 4575 4610 (face font-lock-constant-face) 4610 4611 (face font-lock-string-face) 4611 4621 nil 4621 4622 (face font-lock-string-face) 4622 4656 (face font-lock-constant-face) 4656 4657 (face font-lock-string-face) 4657 4667 nil 4667 4668 (face font-lock-string-face) 4668 4697 (face font-lock-constant-face) 4697 4698 (face font-lock-string-face) 4698 4708 nil 4708 4709 (face font-lock-string-face) 4709 4737 (face font-lock-constant-face) 4737 4738 (face font-lock-string-face) 4738 4748 nil 4748 4749 (face font-lock-string-face) 4749 4780 (face font-lock-constant-face) 4780 4781 (face font-lock-string-face) 4781 4791 nil 4791 4792 (face font-lock-string-face) 4792 4822 (face font-lock-constant-face) 4822 4823 (face font-lock-string-face) 4823 4833 nil 4833 4834 (face font-lock-string-face) 4834 4869 (face font-lock-constant-face) 4869 4870 (face font-lock-string-face) 4870 4880 nil 4880 4881 (face font-lock-string-face) 4881 4915 (face font-lock-constant-face) 4915 4916 (face font-lock-string-face) 4916 4926 nil 4926 4927 (face font-lock-string-face) 4927 4948 (face font-lock-constant-face) 4948 4949 (face font-lock-string-face) 4949 4959 nil 4959 4960 (face font-lock-string-face) 4960 4980 (face font-lock-constant-face) 4980 4981 (face font-lock-string-face) 4981 4991 nil 4991 4992 (face font-lock-string-face) 4992 5020 (face font-lock-constant-face) 5020 5021 (face font-lock-string-face) 5021 5031 nil 5031 5032 (face font-lock-string-face) 5032 5059 (face font-lock-constant-face) 5059 5060 (face font-lock-string-face) 5060 5070 nil 5070 5071 (face font-lock-string-face) 5071 5092 (face font-lock-constant-face) 5092 5093 (face font-lock-string-face) 5093 5103 nil 5103 5104 (face font-lock-string-face) 5104 5132 (face font-lock-constant-face) 5132 5133 (face font-lock-string-face) 5133 5143 nil 5143 5144 (face font-lock-string-face) 5144 5171 (face font-lock-constant-face) 5171 5172 (face font-lock-string-face) 5172 5182 nil 5182 5183 (face font-lock-string-face) 5183 5217 (face font-lock-constant-face) 5217 5218 (face font-lock-string-face) 5218 5228 nil 5228 5229 (face font-lock-string-face) 5229 5262 (face font-lock-constant-face) 5262 5263 (face font-lock-string-face) 5263 5273 nil 5273 5274 (face font-lock-string-face) 5274 5297 (face font-lock-constant-face) 5297 5298 (face font-lock-string-face) 5298 5308 nil 5308 5309 (face font-lock-string-face) 5309 5324 (face font-lock-constant-face) 5324 5325 (face font-lock-string-face) 5325 5335 nil 5335 5336 (face font-lock-string-face) 5336 5350 (face font-lock-constant-face) 5350 5351 (face font-lock-string-face) 5351 5361 nil 5361 5362 (face font-lock-string-face) 5362 5380 (face font-lock-constant-face) 5380 5381 (face font-lock-string-face) 5381 5391 nil 5391 5392 (face font-lock-string-face) 5392 5409 (face font-lock-constant-face) 5409 5410 (face font-lock-string-face) 5410 5420 nil 5420 5421 (face font-lock-string-face) 5421 5443 (face font-lock-constant-face) 5443 5444 (face font-lock-string-face) 5444 5454 nil 5454 5455 (face font-lock-string-face) 5455 5476 (face font-lock-constant-face) 5476 5477 (face font-lock-string-face) 5477 5487 nil 5487 5488 (face font-lock-string-face) 5488 5501 (face font-lock-constant-face) 5501 5502 (face font-lock-string-face) 5502 5512 nil 5512 5513 (face font-lock-string-face) 5513 5525 (face font-lock-constant-face) 5525 5526 (face font-lock-string-face) 5526 5536 nil 5536 5537 (face font-lock-string-face) 5537 5561 (face font-lock-constant-face) 5561 5562 (face font-lock-string-face) 5562 5572 nil 5572 5573 (face font-lock-string-face) 5573 5596 (face font-lock-constant-face) 5596 5597 (face font-lock-string-face) 5597 5607 nil 5607 5608 (face font-lock-string-face) 5608 5627 (face font-lock-constant-face) 5627 5628 (face font-lock-string-face) 5628 5638 nil 5638 5639 (face font-lock-string-face) 5639 5657 (face font-lock-constant-face) 5657 5658 (face font-lock-string-face) 5658 5668 nil 5668 5669 (face font-lock-string-face) 5669 5688 (face font-lock-constant-face) 5688 5689 (face font-lock-string-face) 5689 5699 nil 5699 5700 (face font-lock-string-face) 5700 5718 (face font-lock-constant-face) 5718 5719 (face font-lock-string-face) 5719 5729 nil 5729 5730 (face font-lock-string-face) 5730 5752 (face font-lock-constant-face) 5752 5753 (face font-lock-string-face) 5753 5763 nil 5763 5764 (face font-lock-string-face) 5764 5785 (face font-lock-constant-face) 5785 5786 (face font-lock-string-face) 5786 5796 nil 5796 5797 (face font-lock-string-face) 5797 5819 (face font-lock-constant-face) 5819 5820 (face font-lock-string-face) 5820 5830 nil 5830 5831 (face font-lock-string-face) 5831 5852 (face font-lock-constant-face) 5852 5853 (face font-lock-string-face) 5853 5863 nil 5863 5864 (face font-lock-string-face) 5864 5880 (face font-lock-constant-face) 5880 5881 (face font-lock-string-face) 5881 5891 nil 5891 5892 (face font-lock-string-face) 5892 5915 (face font-lock-constant-face) 5915 5916 (face font-lock-string-face) 5916 5926 nil 5926 5927 (face font-lock-string-face) 5927 5942 (face font-lock-constant-face) 5942 5943 (face font-lock-string-face) 5943 5953 nil 5953 5954 (face font-lock-string-face) 5954 5968 (face font-lock-constant-face) 5968 5969 (face font-lock-string-face) 5969 5979 nil 5979 5980 (face font-lock-string-face) 5980 6002 (face font-lock-constant-face) 6002 6003 (face font-lock-string-face) 6003 6013 nil 6013 6014 (face font-lock-string-face) 6014 6035 (face font-lock-constant-face) 6035 6036 (face font-lock-string-face) 6036 6046 nil 6046 6047 (face font-lock-string-face) 6047 6059 (face font-lock-constant-face) 6059 6060 (face font-lock-string-face) 6060 6070 nil 6070 6071 (face font-lock-string-face) 6071 6082 (face font-lock-constant-face) 6082 6083 (face font-lock-string-face) 6083 6093 nil 6093 6094 (face font-lock-string-face) 6094 6119 (face font-lock-constant-face) 6119 6120 (face font-lock-string-face) 6120 6130 nil 6130 6131 (face font-lock-string-face) 6131 6155 (face font-lock-constant-face) 6155 6156 (face font-lock-string-face) 6156 6166 nil 6166 6167 (face font-lock-string-face) 6167 6185 (face font-lock-constant-face) 6185 6186 (face font-lock-string-face) 6186 6196 nil 6196 6197 (face font-lock-string-face) 6197 6212 (face font-lock-constant-face) 6212 6213 (face font-lock-string-face) 6213 6223 nil 6223 6224 (face font-lock-string-face) 6224 6238 (face font-lock-constant-face) 6238 6239 (face font-lock-string-face) 6239 6249 nil 6249 6250 (face font-lock-string-face) 6250 6282 (face font-lock-constant-face) 6282 6283 (face font-lock-string-face) 6283 6293 nil 6293 6294 (face font-lock-string-face) 6294 6325 (face font-lock-constant-face) 6325 6326 (face font-lock-string-face) 6326 6336 nil 6336 6337 (face font-lock-string-face) 6337 6349 (face font-lock-constant-face) 6349 6350 (face font-lock-string-face) 6350 6360 nil 6360 6361 (face font-lock-string-face) 6361 6382 (face font-lock-constant-face) 6382 6383 (face font-lock-string-face) 6383 6393 nil 6393 6394 (face font-lock-string-face) 6394 6413 (face font-lock-constant-face) 6413 6414 (face font-lock-string-face) 6414 6424 nil 6424 6425 (face font-lock-string-face) 6425 6442 (face font-lock-constant-face) 6442 6443 (face font-lock-string-face) 6443 6453 nil 6453 6454 (face font-lock-string-face) 6454 6470 (face font-lock-constant-face) 6470 6471 (face font-lock-string-face) 6471 6481 nil 6481 6482 (face font-lock-string-face) 6482 6504 (face font-lock-constant-face) 6504 6505 (face font-lock-string-face) 6505 6515 nil 6515 6516 (face font-lock-string-face) 6516 6535 (face font-lock-constant-face) 6535 6536 (face font-lock-string-face) 6536 6546 nil 6546 6547 (face font-lock-string-face) 6547 6569 (face font-lock-constant-face) 6569 6570 (face font-lock-string-face) 6570 6580 nil 6580 6581 (face font-lock-string-face) 6581 6602 (face font-lock-constant-face) 6602 6603 (face font-lock-string-face) 6603 6613 nil 6613 6614 (face font-lock-string-face) 6614 6631 (face font-lock-constant-face) 6631 6632 (face font-lock-string-face) 6632 6642 nil 6642 6643 (face font-lock-string-face) 6643 6671 (face font-lock-constant-face) 6671 6672 (face font-lock-string-face) 6672 6682 nil 6682 6683 (face font-lock-string-face) 6683 6710 (face font-lock-constant-face) 6710 6711 (face font-lock-string-face) 6711 6721 nil 6721 6722 (face font-lock-string-face) 6722 6738 (face font-lock-constant-face) 6738 6739 (face font-lock-string-face) 6739 6749 nil 6749 6750 (face font-lock-string-face) 6750 6765 (face font-lock-constant-face) 6765 6766 (face font-lock-string-face) 6766 6776 nil 6776 6777 (face font-lock-string-face) 6777 6800 (face font-lock-constant-face) 6800 6801 (face font-lock-string-face) 6801 6811 nil 6811 6812 (face font-lock-string-face) 6812 6834 (face font-lock-constant-face) 6834 6835 (face font-lock-string-face) 6835 6845 nil 6845 6846 (face font-lock-string-face) 6846 6860 (face font-lock-constant-face) 6860 6861 (face font-lock-string-face) 6861 6871 nil 6871 6872 (face font-lock-string-face) 6872 6885 (face font-lock-constant-face) 6885 6886 (face font-lock-string-face) 6886 6896 nil 6896 6897 (face font-lock-string-face) 6897 6920 (face font-lock-constant-face) 6920 6921 (face font-lock-string-face) 6921 6931 nil 6931 6932 (face font-lock-string-face) 6932 6954 (face font-lock-constant-face) 6954 6955 (face font-lock-string-face) 6955 6965 nil 6965 6966 (face font-lock-string-face) 6966 6986 (face font-lock-constant-face) 6986 6987 (face font-lock-string-face) 6987 6997 nil 6997 6998 (face font-lock-string-face) 6998 7017 (face font-lock-constant-face) 7017 7018 (face font-lock-string-face) 7018 7028 nil 7028 7029 (face font-lock-string-face) 7029 7050 (face font-lock-constant-face) 7050 7051 (face font-lock-string-face) 7051 7061 nil 7061 7062 (face font-lock-string-face) 7062 7082 (face font-lock-constant-face) 7082 7083 (face font-lock-string-face) 7083 7093 nil 7093 7094 (face font-lock-string-face) 7094 7122 (face font-lock-constant-face) 7122 7123 (face font-lock-string-face) 7123 7133 nil 7133 7134 (face font-lock-string-face) 7134 7161 (face font-lock-constant-face) 7161 7162 (face font-lock-string-face) 7162 7172 nil 7172 7173 (face font-lock-string-face) 7173 7194 (face font-lock-constant-face) 7194 7195 (face font-lock-string-face) 7195 7205 nil 7205 7206 (face font-lock-string-face) 7206 7226 (face font-lock-constant-face) 7226 7227 (face font-lock-string-face) 7227 7237 nil 7237 7238 (face font-lock-string-face) 7238 7266 (face font-lock-constant-face) 7266 7267 (face font-lock-string-face) 7267 7277 nil 7277 7278 (face font-lock-string-face) 7278 7305 (face font-lock-constant-face) 7305 7306 (face font-lock-string-face) 7306 7316 nil 7316 7317 (face font-lock-string-face) 7317 7336 (face font-lock-constant-face) 7336 7337 (face font-lock-string-face) 7337 7347 nil 7347 7348 (face font-lock-string-face) 7348 7366 (face font-lock-constant-face) 7366 7367 (face font-lock-string-face) 7367 7377 nil 7377 7378 (face font-lock-string-face) 7378 7399 (face font-lock-constant-face) 7399 7400 (face font-lock-string-face) 7400 7410 nil 7410 7411 (face font-lock-string-face) 7411 7429 (face font-lock-constant-face) 7429 7430 (face font-lock-string-face) 7430 7440 nil 7440 7441 (face font-lock-string-face) 7441 7458 (face font-lock-constant-face) 7458 7459 (face font-lock-string-face) 7459 7469 nil 7469 7470 (face font-lock-string-face) 7470 7493 (face font-lock-constant-face) 7493 7494 (face font-lock-string-face) 7494 7504 nil 7504 7505 (face font-lock-string-face) 7505 7527 (face font-lock-constant-face) 7527 7528 (face font-lock-string-face) 7528 7538 nil 7538 7539 (face font-lock-string-face) 7539 7562 (face font-lock-constant-face) 7562 7563 (face font-lock-string-face) 7563 7573 nil 7573 7574 (face font-lock-string-face) 7574 7596 (face font-lock-constant-face) 7596 7597 (face font-lock-string-face) 7597 7607 nil 7607 7608 (face font-lock-string-face) 7608 7631 (face font-lock-constant-face) 7631 7632 (face font-lock-string-face) 7632 7642 nil 7642 7643 (face font-lock-string-face) 7643 7665 (face font-lock-constant-face) 7665 7666 (face font-lock-string-face) 7666 7676 nil 7676 7677 (face font-lock-string-face) 7677 7705 (face font-lock-constant-face) 7705 7706 (face font-lock-string-face) 7706 7716 nil 7716 7717 (face font-lock-string-face) 7717 7744 (face font-lock-constant-face) 7744 7745 (face font-lock-string-face) 7745 7755 nil 7755 7756 (face font-lock-string-face) 7756 7791 (face font-lock-constant-face) 7791 7792 (face font-lock-string-face) 7792 7802 nil 7802 7803 (face font-lock-string-face) 7803 7837 (face font-lock-constant-face) 7837 7838 (face font-lock-string-face) 7838 7848 nil 7848 7849 (face font-lock-string-face) 7849 7879 (face font-lock-constant-face) 7879 7880 (face font-lock-string-face) 7880 7890 nil 7890 7891 (face font-lock-string-face) 7891 7920 (face font-lock-constant-face) 7920 7921 (face font-lock-string-face) 7921 7931 nil 7931 7932 (face font-lock-string-face) 7932 7962 (face font-lock-constant-face) 7962 7963 (face font-lock-string-face) 7963 7973 nil 7973 7974 (face font-lock-string-face) 7974 8003 (face font-lock-constant-face) 8003 8004 (face font-lock-string-face) 8004 8014 nil 8014 8015 (face font-lock-string-face) 8015 8039 (face font-lock-constant-face) 8039 8040 (face font-lock-string-face) 8040 8050 nil 8050 8051 (face font-lock-string-face) 8051 8074 (face font-lock-constant-face) 8074 8075 (face font-lock-string-face) 8075 8085 nil 8085 8086 (face font-lock-string-face) 8086 8116 (face font-lock-constant-face) 8116 8117 (face font-lock-string-face) 8117 8127 nil 8127 8128 (face font-lock-string-face) 8128 8152 (face font-lock-constant-face) 8152 8153 (face font-lock-string-face) 8153 8163 nil 8163 8164 (face font-lock-string-face) 8164 8187 (face font-lock-constant-face) 8187 8188 (face font-lock-string-face) 8188 8198 nil 8198 8199 (face font-lock-string-face) 8199 8230 (face font-lock-constant-face) 8230 8231 (face font-lock-string-face) 8231 8241 nil 8241 8242 (face font-lock-string-face) 8242 8272 (face font-lock-constant-face) 8272 8273 (face font-lock-string-face) 8273 8283 nil 8283 8284 (face font-lock-string-face) 8284 8309 (face font-lock-constant-face) 8309 8310 (face font-lock-string-face) 8310 8320 nil 8320 8321 (face font-lock-string-face) 8321 8345 (face font-lock-constant-face) 8345 8346 (face font-lock-string-face) 8346 8356 nil 8356 8357 (face font-lock-string-face) 8357 8399 (face font-lock-constant-face) 8399 8400 (face font-lock-string-face) 8400 8410 nil 8410 8411 (face font-lock-string-face) 8411 8452 (face font-lock-constant-face) 8452 8453 (face font-lock-string-face) 8453 8463 nil 8463 8464 (face font-lock-string-face) 8464 8486 (face font-lock-constant-face) 8486 8487 (face font-lock-string-face) 8487 8497 nil 8497 8498 (face font-lock-string-face) 8498 8519 (face font-lock-constant-face) 8519 8520 (face font-lock-string-face) 8520 8530 nil 8530 8531 (face font-lock-string-face) 8531 8562 (face font-lock-constant-face) 8562 8563 (face font-lock-string-face) 8563 8573 nil 8573 8574 (face font-lock-string-face) 8574 8604 (face font-lock-constant-face) 8604 8605 (face font-lock-string-face) 8605 8615 nil 8615 8616 (face font-lock-string-face) 8616 8643 (face font-lock-constant-face) 8643 8644 (face font-lock-string-face) 8644 8654 nil 8654 8655 (face font-lock-string-face) 8655 8681 (face font-lock-constant-face) 8681 8682 (face font-lock-string-face) 8682 8692 nil 8692 8693 (face font-lock-string-face) 8693 8721 (face font-lock-constant-face) 8721 8722 (face font-lock-string-face) 8722 8732 nil 8732 8733 (face font-lock-string-face) 8733 8760 (face font-lock-constant-face) 8760 8761 (face font-lock-string-face) 8761 8771 nil 8771 8772 (face font-lock-string-face) 8772 8805 (face font-lock-constant-face) 8805 8806 (face font-lock-string-face) 8806 8816 nil 8816 8817 (face font-lock-string-face) 8817 8849 (face font-lock-constant-face) 8849 8850 (face font-lock-string-face) 8850 8860 nil 8860 8861 (face font-lock-string-face) 8861 8892 (face font-lock-constant-face) 8892 8893 (face font-lock-string-face) 8893 8903 nil 8903 8904 (face font-lock-string-face) 8904 8934 (face font-lock-constant-face) 8934 8935 (face font-lock-string-face) 8935 8945 nil 8945 8946 (face font-lock-string-face) 8946 8978 (face font-lock-constant-face) 8978 8979 (face font-lock-string-face) 8979 8989 nil 8989 8990 (face font-lock-string-face) 8990 9021 (face font-lock-constant-face) 9021 9022 (face font-lock-string-face) 9022 9032 nil 9032 9033 (face font-lock-string-face) 9033 9063 (face font-lock-constant-face) 9063 9064 (face font-lock-string-face) 9064 9074 nil 9074 9075 (face font-lock-string-face) 9075 9104 (face font-lock-constant-face) 9104 9105 (face font-lock-string-face) 9105 9115 nil 9115 9116 (face font-lock-string-face) 9116 9158 (face font-lock-constant-face) 9158 9159 (face font-lock-string-face) 9159 9169 nil 9169 9170 (face font-lock-string-face) 9170 9211 (face font-lock-constant-face) 9211 9212 (face font-lock-string-face) 9212 9222 nil 9222 9223 (face font-lock-string-face) 9223 9272 (face font-lock-constant-face) 9272 9273 (face font-lock-string-face) 9273 9283 nil 9283 9284 (face font-lock-string-face) 9284 9332 (face font-lock-constant-face) 9332 9333 (face font-lock-string-face) 9333 9343 nil 9343 9344 (face font-lock-string-face) 9344 9388 (face font-lock-constant-face) 9388 9389 (face font-lock-string-face) 9389 9399 nil 9399 9400 (face font-lock-string-face) 9400 9445 (face font-lock-constant-face) 9445 9446 (face font-lock-string-face) 9446 9456 nil 9456 9457 (face font-lock-string-face) 9457 9507 (face font-lock-constant-face) 9507 9508 (face font-lock-string-face) 9508 9518 nil 9518 9519 (face font-lock-string-face) 9519 9570 (face font-lock-constant-face) 9570 9571 (face font-lock-string-face) 9571 9581 nil 9581 9582 (face font-lock-string-face) 9582 9611 (face font-lock-constant-face) 9611 9612 (face font-lock-string-face) 9612 9622 nil 9622 9623 (face font-lock-string-face) 9623 9659 (face font-lock-constant-face) 9659 9660 (face font-lock-string-face) 9660 9670 nil 9670 9671 (face font-lock-string-face) 9671 9714 (face font-lock-constant-face) 9714 9715 (face font-lock-string-face) 9715 9725 nil 9725 9726 (face font-lock-string-face) 9726 9768 (face font-lock-constant-face) 9768 9769 (face font-lock-string-face) 9769 9779 nil 9779 9780 (face font-lock-string-face) 9780 9816 (face font-lock-constant-face) 9816 9817 (face font-lock-string-face) 9817 9827 nil 9827 9828 (face font-lock-string-face) 9828 9863 (face font-lock-constant-face) 9863 9864 (face font-lock-string-face) 9864 9874 nil 9874 9875 (face font-lock-string-face) 9875 9910 (face font-lock-constant-face) 9910 9911 (face font-lock-string-face) 9911 9921 nil 9921 9922 (face font-lock-string-face) 9922 9958 (face font-lock-constant-face) 9958 9959 (face font-lock-string-face) 9959 9969 nil 9969 9970 (face font-lock-string-face) 9970 10005 (face font-lock-constant-face) 10005 10006 (face font-lock-string-face) 10006 10016 nil 10016 10017 (face font-lock-string-face) 10017 10050 (face font-lock-constant-face) 10050 10051 (face font-lock-string-face) 10051 10061 nil 10061 10062 (face font-lock-string-face) 10062 10094 (face font-lock-constant-face) 10094 10095 (face font-lock-string-face) 10095 10105 nil 10105 10106 (face font-lock-string-face) 10106 10150 (face font-lock-constant-face) 10150 10151 (face font-lock-string-face) 10151 10161 nil 10161 10162 (face font-lock-string-face) 10162 10198 (face font-lock-constant-face) 10198 10199 (face font-lock-string-face) 10199 10209 nil 10209 10210 (face font-lock-string-face) 10210 10245 (face font-lock-constant-face) 10245 10246 (face font-lock-string-face) 10246 10256 nil 10256 10257 (face font-lock-string-face) 10257 10296 (face font-lock-constant-face) 10296 10297 (face font-lock-string-face) 10297 10307 nil 10307 10308 (face font-lock-string-face) 10308 10346 (face font-lock-constant-face) 10346 10347 (face font-lock-string-face) 10347 10357 nil 10357 10358 (face font-lock-string-face) 10358 10403 (face font-lock-constant-face) 10403 10404 (face font-lock-string-face) 10404 10414 nil 10414 10415 (face font-lock-string-face) 10415 10459 (face font-lock-constant-face) 10459 10460 (face font-lock-string-face) 10460 10470 nil 10470 10471 (face font-lock-string-face) 10471 10487 (face font-lock-constant-face) 10487 10488 (face font-lock-string-face) 10488 10498 nil 10498 10499 (face font-lock-string-face) 10499 10514 (face font-lock-constant-face) 10514 10515 (face font-lock-string-face) 10515 10525 nil 10525 10526 (face font-lock-string-face) 10526 10559 (face font-lock-constant-face) 10559 10560 (face font-lock-string-face) 10560 10570 nil 10570 10571 (face font-lock-string-face) 10571 10603 (face font-lock-constant-face) 10603 10604 (face font-lock-string-face) 10604 10614 nil 10614 10615 (face font-lock-string-face) 10615 10636 (face font-lock-constant-face) 10636 10637 (face font-lock-string-face) 10637 10647 nil 10647 10648 (face font-lock-string-face) 10648 10675 (face font-lock-constant-face) 10675 10676 (face font-lock-string-face) 10676 10686 nil 10686 10687 (face font-lock-string-face) 10687 10713 (face font-lock-constant-face) 10713 10714 (face font-lock-string-face) 10714 10724 nil 10724 10725 (face font-lock-string-face) 10725 10755 (face font-lock-constant-face) 10755 10756 (face font-lock-string-face) 10756 10766 nil 10766 10767 (face font-lock-string-face) 10767 10796 (face font-lock-constant-face) 10796 10797 (face font-lock-string-face) 10797 10807 nil 10807 10808 (face font-lock-string-face) 10808 10845 (face font-lock-constant-face) 10845 10846 (face font-lock-string-face) 10846 10856 nil 10856 10857 (face font-lock-string-face) 10857 10893 (face font-lock-constant-face) 10893 10894 (face font-lock-string-face) 10894 10904 nil 10904 10905 (face font-lock-string-face) 10905 10929 (face font-lock-constant-face) 10929 10930 (face font-lock-string-face) 10930 10940 nil 10940 10941 (face font-lock-string-face) 10941 10964 (face font-lock-constant-face) 10964 10965 (face font-lock-string-face) 10965 10975 nil 10975 10976 (face font-lock-string-face) 10976 10995 (face font-lock-constant-face) 10995 10996 (face font-lock-string-face) 10996 11006 nil 11006 11007 (face font-lock-string-face) 11007 11025 (face font-lock-constant-face) 11025 11026 (face font-lock-string-face) 11026 11036 nil 11036 11037 (face font-lock-string-face) 11037 11063 (face font-lock-constant-face) 11063 11064 (face font-lock-string-face) 11064 11074 nil 11074 11075 (face font-lock-string-face) 11075 11100 (face font-lock-constant-face) 11100 11101 (face font-lock-string-face) 11101 11111 nil 11111 11112 (face font-lock-string-face) 11112 11138 (face font-lock-constant-face) 11138 11139 (face font-lock-string-face) 11139 11149 nil 11149 11150 (face font-lock-string-face) 11150 11175 (face font-lock-constant-face) 11175 11176 (face font-lock-string-face) 11176 11193 nil 11193 11194 (face font-lock-string-face) 11194 11219 (face font-lock-keyword-face) 11219 11220 (face font-lock-string-face) 11220 11232 nil 11232 11233 (face font-lock-string-face) 11233 11245 (face font-lock-keyword-face) 11245 11246 (face font-lock-string-face) 11246 11260 nil 11260 11261 (face font-lock-string-face) 11261 11263 (face font-lock-constant-face) 11263 11264 (face font-lock-string-face) 11264 11292 nil 11292 11293 (face font-lock-string-face) 11293 11303 (face font-lock-keyword-face) 11303 11304 (face font-lock-string-face) 11304 11316 nil 11316 11381 (face font-lock-comment-face) 11381 11389 nil 11389 11439 (face font-lock-comment-face) 11439 11448 nil 11448 11449 (face font-lock-string-face) 11449 11464 (face font-lock-variable-name-face) 11464 11465 (face font-lock-string-face) 11465 11479 nil 11479 11480 (face font-lock-string-face) 11480 11492 (face font-lock-keyword-face) 11492 11493 (face font-lock-string-face) 11493 11509 nil 11509 11510 (face font-lock-string-face) 11510 11549 (face font-lock-function-name-face) 11549 11550 (face font-lock-string-face) 11550 11586 nil 11586 11587 (face font-lock-string-face) 11587 11602 (face font-lock-variable-name-face) 11602 11603 (face font-lock-string-face) 11603 11617 nil 11617 11618 (face font-lock-string-face) 11618 11626 (face font-lock-keyword-face) 11626 11627 (face font-lock-string-face) 11627 11643 nil 11643 11644 (face font-lock-string-face) 11644 11663 (face font-lock-constant-face) 11663 11664 (face font-lock-string-face) 11664 11678 nil 11678 11679 (face font-lock-string-face) 11679 11702 (face font-lock-constant-face) 11702 11703 (face font-lock-string-face) 11703 11717 nil 11717 11718 (face font-lock-string-face) 11718 11740 (face font-lock-constant-face) 11740 11741 (face font-lock-string-face) 11741 11755 nil 11755 11756 (face font-lock-string-face) 11756 11779 (face font-lock-constant-face) 11779 11780 (face font-lock-string-face) 11780 11794 nil 11794 11795 (face font-lock-string-face) 11795 11817 (face font-lock-constant-face) 11817 11818 (face font-lock-string-face) 11818 11832 nil 11832 11833 (face font-lock-string-face) 11833 11861 (face font-lock-constant-face) 11861 11862 (face font-lock-string-face) 11862 11876 nil 11876 11877 (face font-lock-string-face) 11877 11904 (face font-lock-constant-face) 11904 11905 (face font-lock-string-face) 11905 11919 nil 11919 11920 (face font-lock-string-face) 11920 11950 (face font-lock-constant-face) 11950 11951 (face font-lock-string-face) 11951 11965 nil 11965 11966 (face font-lock-string-face) 11966 11995 (face font-lock-constant-face) 11995 11996 (face font-lock-string-face) 11996 12010 nil 12010 12011 (face font-lock-string-face) 12011 12035 (face font-lock-constant-face) 12035 12036 (face font-lock-string-face) 12036 12050 nil 12050 12051 (face font-lock-string-face) 12051 12074 (face font-lock-constant-face) 12074 12075 (face font-lock-string-face) 12075 12089 nil 12089 12090 (face font-lock-string-face) 12090 12120 (face font-lock-constant-face) 12120 12121 (face font-lock-string-face) 12121 12135 nil 12135 12136 (face font-lock-string-face) 12136 12167 (face font-lock-constant-face) 12167 12168 (face font-lock-string-face) 12168 12182 nil 12182 12183 (face font-lock-string-face) 12183 12213 (face font-lock-constant-face) 12213 12214 (face font-lock-string-face) 12214 12228 nil 12228 12229 (face font-lock-string-face) 12229 12254 (face font-lock-constant-face) 12254 12255 (face font-lock-string-face) 12255 12269 nil 12269 12270 (face font-lock-string-face) 12270 12294 (face font-lock-constant-face) 12294 12295 (face font-lock-string-face) 12295 12309 nil 12309 12310 (face font-lock-string-face) 12310 12352 (face font-lock-constant-face) 12352 12353 (face font-lock-string-face) 12353 12367 nil 12367 12368 (face font-lock-string-face) 12368 12409 (face font-lock-constant-face) 12409 12410 (face font-lock-string-face) 12410 12424 nil 12424 12425 (face font-lock-string-face) 12425 12447 (face font-lock-constant-face) 12447 12448 (face font-lock-string-face) 12448 12462 nil 12462 12463 (face font-lock-string-face) 12463 12484 (face font-lock-constant-face) 12484 12485 (face font-lock-string-face) 12485 12499 nil 12499 12500 (face font-lock-string-face) 12500 12531 (face font-lock-constant-face) 12531 12532 (face font-lock-string-face) 12532 12546 nil 12546 12547 (face font-lock-string-face) 12547 12577 (face font-lock-constant-face) 12577 12578 (face font-lock-string-face) 12578 12592 nil 12592 12593 (face font-lock-string-face) 12593 12621 (face font-lock-constant-face) 12621 12622 (face font-lock-string-face) 12622 12636 nil 12636 12637 (face font-lock-string-face) 12637 12664 (face font-lock-constant-face) 12664 12665 (face font-lock-string-face) 12665 12679 nil 12679 12680 (face font-lock-string-face) 12680 12707 (face font-lock-constant-face) 12707 12708 (face font-lock-string-face) 12708 12722 nil 12722 12723 (face font-lock-string-face) 12723 12749 (face font-lock-constant-face) 12749 12750 (face font-lock-string-face) 12750 12764 nil 12764 12765 (face font-lock-string-face) 12765 12791 (face font-lock-constant-face) 12791 12792 (face font-lock-string-face) 12792 12806 nil 12806 12807 (face font-lock-string-face) 12807 12832 (face font-lock-constant-face) 12832 12833 (face font-lock-string-face) 12833 12868 nil 12868 12937 (face font-lock-comment-face) 12937 12945 nil 12945 13016 (face font-lock-comment-face) 13016 13024 nil 13024 13040 (face font-lock-comment-face) 13040 13049 nil 13049 13050 (face font-lock-string-face) 13050 13065 (face font-lock-variable-name-face) 13065 13066 (face font-lock-string-face) 13066 13080 nil 13080 13081 (face font-lock-string-face) 13081 13089 (face font-lock-keyword-face) 13089 13090 (face font-lock-string-face) 13090 13105 nil 13105 13106 (face font-lock-string-face) 13106 13149 (face font-lock-constant-face) 13149 13150 (face font-lock-string-face) 13150 13175 nil 13175 13176 (face font-lock-string-face) 13176 13183 (face font-lock-keyword-face) 13183 13184 (face font-lock-string-face) 13184 13199 nil 13199 13200 (face font-lock-string-face) 13200 13248 (face font-lock-constant-face) 13248 13249 (face font-lock-string-face) 13249 13274 nil 13274 13275 (face font-lock-string-face) 13275 13288 (face font-lock-keyword-face) 13288 13289 (face font-lock-string-face) 13289 13305 nil 13305 13306 (face font-lock-string-face) 13306 13315 (face font-lock-keyword-face) 13315 13316 (face font-lock-string-face) 13316 13334 nil 13334 13335 (face font-lock-string-face) 13335 13345 (face font-lock-constant-face) 13345 13346 (face font-lock-string-face) 13346 13397 nil 13397 13398 (face font-lock-string-face) 13398 13443 (face font-lock-variable-name-face) 13443 13444 (face font-lock-string-face) 13444 13458 nil 13458 13459 (face font-lock-string-face) 13459 13472 (face font-lock-keyword-face) 13472 13473 (face font-lock-string-face) 13473 13489 nil 13489 13490 (face font-lock-string-face) 13490 13499 (face font-lock-keyword-face) 13499 13500 (face font-lock-string-face) 13500 13518 nil 13518 13519 (face font-lock-string-face) 13519 13527 (face font-lock-constant-face) 13527 13528 (face font-lock-string-face) 13528 13579 nil 13579 13580 (face font-lock-string-face) 13580 13593 (face font-lock-variable-name-face) 13593 13594 (face font-lock-string-face) 13594 13608 nil 13608 13609 (face font-lock-string-face) 13609 13617 (face font-lock-keyword-face) 13617 13618 (face font-lock-string-face) 13618 13623 nil 13623 13624 (face font-lock-string-face) 13624 13631 (face font-lock-constant-face) 13631 13632 (face font-lock-string-face) 13632 13634 nil 13634 13635 (face font-lock-string-face) 13635 13641 (face font-lock-constant-face) 13641 13642 (face font-lock-string-face) 13642 13671 nil 13671 13672 (face font-lock-string-face) 13672 13679 (face font-lock-constant-face) 13679 13680 (face font-lock-string-face) 13680 13682 nil 13682 13683 (face font-lock-string-face) 13683 13703 (face font-lock-constant-face) 13703 13704 (face font-lock-string-face) 13704 13720 nil 13720 13721 (face font-lock-string-face) 13721 13734 (face font-lock-keyword-face) 13734 13735 (face font-lock-string-face) 13735 13751 nil 13751 13752 (face font-lock-string-face) 13752 13761 (face font-lock-keyword-face) 13761 13762 (face font-lock-string-face) 13762 13815 nil 13815 13816 (face font-lock-string-face) 13816 13829 (face font-lock-variable-name-face) 13829 13830 (face font-lock-string-face) 13830 13844 nil 13844 13845 (face font-lock-string-face) 13845 13853 (face font-lock-keyword-face) 13853 13854 (face font-lock-string-face) 13854 13870 nil 13870 13871 (face font-lock-string-face) 13871 13909 (face font-lock-constant-face) 13909 13910 (face font-lock-string-face) 13910 13924 nil 13924 13925 (face font-lock-string-face) 13925 13962 (face font-lock-constant-face) 13962 13963 (face font-lock-string-face) 13963 13999 nil 13999 14000 (face font-lock-string-face) 14000 14011 (face font-lock-variable-name-face) 14011 14012 (face font-lock-string-face) 14012 14026 nil 14026 14027 (face font-lock-string-face) 14027 14036 (face font-lock-keyword-face) 14036 14037 (face font-lock-string-face) 14037 14053 nil 14053 14054 (face font-lock-string-face) 14054 14064 (face font-lock-keyword-face) 14064 14065 (face font-lock-string-face) 14065 14084 nil 14084 14085 (face font-lock-string-face) 14085 14096 (face font-lock-variable-name-face) 14096 14097 (face font-lock-string-face) 14097 14117 nil 14117 14129 (face font-lock-string-face) 14129 14131 nil 14131 14169 (face font-lock-string-face) 14169 14176 (face font-lock-variable-name-face) 14176 14182 (face font-lock-string-face) 14182 14193 (face font-lock-variable-name-face) 14193 14196 (face font-lock-string-face) 14196 14233 nil 14233 14245 (face font-lock-string-face) 14245 14247 nil 14247 14259 (face font-lock-string-face) 14259 14316 nil 14316 14317 (face font-lock-string-face) 14317 14327 (face font-lock-keyword-face) 14327 14328 (face font-lock-string-face) 14328 14345 nil 14345 14346 (face font-lock-string-face) 14346 14359 (face font-lock-variable-name-face) 14359 14360 (face font-lock-string-face) 14360 14378 nil 14378 14379 (face font-lock-string-face) 14379 14385 (face font-lock-keyword-face) 14385 14386 (face font-lock-string-face) 14386 14406 nil 14406 14411 (face font-lock-string-face) 14411 14413 (face font-lock-variable-name-face) 14413 14423 (face font-lock-variable-name-face) 14423 14443 (face font-lock-string-face) 14443 14476 nil 14476 14477 (face font-lock-string-face) 14477 14490 (face font-lock-keyword-face) 14490 14491 (face font-lock-string-face) 14491 14511 nil 14511 14512 (face font-lock-string-face) 14512 14521 (face font-lock-keyword-face) 14521 14522 (face font-lock-string-face) 14522 14544 nil 14544 14545 (face font-lock-string-face) 14545 14549 (face font-lock-constant-face) 14549 14551 (face font-lock-variable-name-face) 14551 14561 (face font-lock-variable-name-face) 14561 14578 (face font-lock-constant-face) 14578 14579 (face font-lock-string-face) 14579 14631 nil 14631 14632 (face font-lock-string-face) 14632 14639 (face font-lock-keyword-face) 14639 14640 (face font-lock-string-face) 14640 14660 nil 14660 14661 (face font-lock-string-face) 14661 14669 (face font-lock-preprocessor-face) 14669 14670 (face font-lock-string-face) 14670 14707 nil 14707 14729 (face font-lock-comment-face) 14729 14743 nil 14743 14744 (face font-lock-string-face) 14744 14752 (face font-lock-keyword-face) 14752 14753 (face font-lock-string-face) 14753 14773 nil 14773 14774 (face font-lock-string-face) 14774 14800 (face font-lock-constant-face) 14800 14801 (face font-lock-string-face) 14801 14819 nil 14819 14820 (face font-lock-string-face) 14820 14845 (face font-lock-constant-face) 14845 14846 (face font-lock-string-face) 14846 14915 nil 14915 14916 (face font-lock-string-face) 14916 14929 (face font-lock-variable-name-face) 14929 14930 (face font-lock-string-face) 14930 14944 nil 14944 14945 (face font-lock-string-face) 14945 14955 (face font-lock-keyword-face) 14955 14956 (face font-lock-string-face) 14956 14973 nil 14973 14974 (face font-lock-string-face) 14974 14993 (face font-lock-variable-name-face) 14993 14994 (face font-lock-string-face) 14994 15012 nil 15012 15013 (face font-lock-string-face) 15013 15019 (face font-lock-keyword-face) 15019 15020 (face font-lock-string-face) 15020 15040 nil 15040 15075 (face font-lock-string-face) 15075 15108 nil 15108 15109 (face font-lock-string-face) 15109 15122 (face font-lock-keyword-face) 15122 15123 (face font-lock-string-face) 15123 15143 nil 15143 15144 (face font-lock-string-face) 15144 15153 (face font-lock-keyword-face) 15153 15154 (face font-lock-string-face) 15154 15176 nil 15176 15177 (face font-lock-string-face) 15177 15215 (face font-lock-constant-face) 15215 15216 (face font-lock-string-face) 15216 15268 nil 15268 15269 (face font-lock-string-face) 15269 15276 (face font-lock-keyword-face) 15276 15277 (face font-lock-string-face) 15277 15297 nil 15297 15298 (face font-lock-string-face) 15298 15312 (face font-lock-preprocessor-face) 15312 15313 (face font-lock-string-face) 15313 15350 nil 15350 15378 (face font-lock-comment-face) 15378 15392 nil 15392 15393 (face font-lock-string-face) 15393 15401 (face font-lock-keyword-face) 15401 15402 (face font-lock-string-face) 15402 15422 nil 15422 15423 (face font-lock-string-face) 15423 15450 (face font-lock-constant-face) 15450 15451 (face font-lock-string-face) 15451 15469 nil 15469 15470 (face font-lock-string-face) 15470 15496 (face font-lock-constant-face) 15496 15497 (face font-lock-string-face) 15497 15566 nil 15566 15567 (face font-lock-string-face) 15567 15600 (face font-lock-variable-name-face) 15600 15601 (face font-lock-string-face) 15601 15615 nil 15615 15663 (face font-lock-comment-face) 15663 15673 nil 15673 15674 (face font-lock-string-face) 15674 15682 (face font-lock-keyword-face) 15682 15683 (face font-lock-string-face) 15683 15699 nil 15699 15700 (face font-lock-string-face) 15700 15743 (face font-lock-constant-face) 15743 15744 (face font-lock-string-face) 15744 15758 nil 15758 15759 (face font-lock-string-face) 15759 15801 (face font-lock-constant-face) 15801 15802 (face font-lock-string-face) 15802 15838 nil 15838 15839 (face font-lock-string-face) 15839 15848 (face font-lock-variable-name-face) 15848 15849 (face font-lock-string-face) 15849 15863 nil 15863 15864 (face font-lock-string-face) 15864 15877 (face font-lock-keyword-face) 15877 15878 (face font-lock-string-face) 15878 15894 nil 15894 15895 (face font-lock-string-face) 15895 15904 (face font-lock-keyword-face) 15904 15905 (face font-lock-string-face) 15905 15923 nil 15923 15924 (face font-lock-string-face) 15924 15980 (face font-lock-constant-face) 15980 15981 (face font-lock-string-face) 15981 15997 nil 15997 15998 (face font-lock-string-face) 15998 16057 (face font-lock-constant-face) 16057 16058 (face font-lock-string-face) 16058 16074 nil 16074 16075 (face font-lock-string-face) 16075 16131 (face font-lock-constant-face) 16131 16132 (face font-lock-string-face) 16132 16148 nil 16148 16149 (face font-lock-string-face) 16149 16205 (face font-lock-constant-face) 16205 16206 (face font-lock-string-face) 16206 16222 nil 16222 16223 (face font-lock-string-face) 16223 16275 (face font-lock-constant-face) 16275 16276 (face font-lock-string-face) 16276 16327 nil 16327 16328 (face font-lock-string-face) 16328 16337 (face font-lock-variable-name-face) 16337 16338 (face font-lock-string-face) 16338 16352 nil 16352 16353 (face font-lock-string-face) 16353 16361 (face font-lock-keyword-face) 16361 16362 (face font-lock-string-face) 16362 16378 nil 16378 16379 (face font-lock-string-face) 16379 16406 (face font-lock-constant-face) 16406 16407 (face font-lock-string-face) 16407 16421 nil 16421 16422 (face font-lock-string-face) 16422 16448 (face font-lock-constant-face) 16448 16449 (face font-lock-string-face) 16449 16463 nil 16463 16464 (face font-lock-string-face) 16464 16507 (face font-lock-constant-face) 16507 16508 (face font-lock-string-face) 16508 16522 nil 16522 16523 (face font-lock-string-face) 16523 16565 (face font-lock-constant-face) 16565 16566 (face font-lock-string-face) 16566 16602 nil 16602 16603 (face font-lock-string-face) 16603 16646 (face font-lock-variable-name-face) 16646 16647 (face font-lock-string-face) 16647 16661 nil 16661 16662 (face font-lock-string-face) 16662 16669 (face font-lock-keyword-face) 16669 16670 (face font-lock-string-face) 16670 16686 nil 16686 16687 (face font-lock-string-face) 16687 16697 (face font-lock-constant-face) 16697 16698 (face font-lock-string-face) 16698 16712 nil 16712 16713 (face font-lock-string-face) 16713 16722 (face font-lock-constant-face) 16722 16723 (face font-lock-string-face) 16723 16737 nil 16737 16738 (face font-lock-string-face) 16738 16760 (face font-lock-constant-face) 16760 16761 (face font-lock-string-face) 16761 16775 nil 16775 16776 (face font-lock-string-face) 16776 16797 (face font-lock-constant-face) 16797 16798 (face font-lock-string-face) 16798 16812 nil 16812 16813 (face font-lock-string-face) 16813 16830 (face font-lock-constant-face) 16830 16831 (face font-lock-string-face) 16831 16845 nil 16845 16846 (face font-lock-string-face) 16846 16862 (face font-lock-constant-face) 16862 16863 (face font-lock-string-face) 16863 16877 nil 16877 16878 (face font-lock-string-face) 16878 16889 (face font-lock-constant-face) 16889 16890 (face font-lock-string-face) 16890 16904 nil 16904 16905 (face font-lock-string-face) 16905 16915 (face font-lock-constant-face) 16915 16916 (face font-lock-string-face) 16916 16930 nil 16930 16931 (face font-lock-string-face) 16931 16955 (face font-lock-constant-face) 16955 16956 (face font-lock-string-face) 16956 16970 nil 16970 16971 (face font-lock-string-face) 16971 16994 (face font-lock-constant-face) 16994 16995 (face font-lock-string-face) 16995 17009 nil 17009 17010 (face font-lock-string-face) 17010 17034 (face font-lock-constant-face) 17034 17035 (face font-lock-string-face) 17035 17049 nil 17049 17050 (face font-lock-string-face) 17050 17073 (face font-lock-constant-face) 17073 17074 (face font-lock-string-face) 17074 17088 nil 17088 17089 (face font-lock-string-face) 17089 17114 (face font-lock-constant-face) 17114 17115 (face font-lock-string-face) 17115 17129 nil 17129 17130 (face font-lock-string-face) 17130 17154 (face font-lock-constant-face) 17154 17155 (face font-lock-string-face) 17155 17210 nil 17210 17211 (face font-lock-string-face) 17211 17222 (face font-lock-keyword-face) 17222 17223 (face font-lock-string-face) 17223 17225 nil 17225 17226 (face font-lock-string-face) 17226 17237 (face font-lock-function-name-face) 17237 17238 (face font-lock-string-face) 17238 17246 nil 17246 17247 (face font-lock-string-face) 17247 17251 (face font-lock-keyword-face) 17251 17252 (face font-lock-string-face) 17252 17254 nil 17254 17255 (face font-lock-string-face) 17255 17269 (face font-lock-type-face) 17269 17270 (face font-lock-string-face) 17270 17278 nil 17278 17279 (face font-lock-string-face) 17279 17291 (face font-lock-keyword-face) 17291 17292 (face font-lock-string-face) 17292 17304 nil 17304 17305 (face font-lock-string-face) 17305 17307 (face font-lock-constant-face) 17307 17308 (face font-lock-string-face) 17308 17325 nil 17325 17326 (face font-lock-string-face) 17326 17336 (face font-lock-keyword-face) 17336 17337 (face font-lock-string-face) 17337 17350 nil 17350 17351 (face font-lock-string-face) 17351 17371 (face font-lock-variable-name-face) 17371 17372 (face font-lock-string-face) 17372 17386 nil 17386 17387 (face font-lock-string-face) 17387 17404 (face font-lock-keyword-face) 17404 17405 (face font-lock-string-face) 17405 17423 nil 17423 17424 (face font-lock-string-face) 17424 17442 (face font-lock-variable-name-face) 17442 17443 (face font-lock-string-face) 17443 17461 nil 17461 17462 (face font-lock-string-face) 17462 17469 (face font-lock-keyword-face) 17469 17470 (face font-lock-string-face) 17470 17474 nil 17474 17498 (face font-lock-string-face) 17498 17553 nil 17553 17554 (face font-lock-string-face) 17554 17599 (face font-lock-variable-name-face) 17599 17600 (face font-lock-string-face) 17600 17614 nil 17614 17615 (face font-lock-string-face) 17615 17627 (face font-lock-keyword-face) 17627 17628 (face font-lock-string-face) 17628 17644 nil 17644 17645 (face font-lock-string-face) 17645 17665 (face font-lock-function-name-face) 17665 17666 (face font-lock-string-face) 17666 17703 nil 17703 17704 (face font-lock-string-face) 17704 17724 (face font-lock-variable-name-face) 17724 17725 (face font-lock-string-face) 17725 17739 nil 17739 17740 (face font-lock-string-face) 17740 17752 (face font-lock-keyword-face) 17752 17753 (face font-lock-string-face) 17753 17769 nil 17769 17770 (face font-lock-string-face) 17770 17790 (face font-lock-function-name-face) 17790 17791 (face font-lock-string-face) 17791 17833 nil 17833 17834 (face font-lock-string-face) 17834 17841 (face font-lock-keyword-face) 17841 17842 (face font-lock-string-face) 17842 17854 nil 17854 17855 (face font-lock-string-face) 17855 17874 (face font-lock-constant-face) 17874 17875 (face font-lock-string-face) 17875 17885 nil 17885 17886 (face font-lock-string-face) 17886 17904 (face font-lock-constant-face) 17904 17905 (face font-lock-string-face) 17905 17935 nil 17935 17936 (face font-lock-string-face) 17936 17947 (face font-lock-keyword-face) 17947 17948 (face font-lock-string-face) 17948 17950 nil 17950 17951 (face font-lock-string-face) 17951 17971 (face font-lock-function-name-face) 17971 17972 (face font-lock-string-face) 17972 17980 nil 17980 17981 (face font-lock-string-face) 17981 17985 (face font-lock-keyword-face) 17985 17986 (face font-lock-string-face) 17986 17988 nil 17988 17989 (face font-lock-string-face) 17989 18003 (face font-lock-type-face) 18003 18004 (face font-lock-string-face) 18004 18012 nil 18012 18013 (face font-lock-string-face) 18013 18025 (face font-lock-keyword-face) 18025 18026 (face font-lock-string-face) 18026 18038 nil 18038 18039 (face font-lock-string-face) 18039 18041 (face font-lock-constant-face) 18041 18042 (face font-lock-string-face) 18042 18059 nil 18059 18060 (face font-lock-string-face) 18060 18067 (face font-lock-keyword-face) 18067 18068 (face font-lock-string-face) 18068 18080 nil 18080 18081 (face font-lock-string-face) 18081 18114 (face font-lock-constant-face) 18114 18115 (face font-lock-string-face) 18115 18125 nil 18125 18126 (face font-lock-string-face) 18126 18162 (face font-lock-constant-face) 18162 18163 (face font-lock-string-face) 18163 18173 nil 18173 18174 (face font-lock-string-face) 18174 18212 (face font-lock-constant-face) 18212 18213 (face font-lock-string-face) 18213 18223 nil 18223 18224 (face font-lock-string-face) 18224 18261 (face font-lock-constant-face) 18261 18262 (face font-lock-string-face) 18262 18272 nil 18272 18273 (face font-lock-string-face) 18273 18311 (face font-lock-constant-face) 18311 18312 (face font-lock-string-face) 18312 18322 nil 18322 18323 (face font-lock-string-face) 18323 18356 (face font-lock-constant-face) 18356 18357 (face font-lock-string-face) 18357 18367 nil 18367 18368 (face font-lock-string-face) 18368 18403 (face font-lock-constant-face) 18403 18404 (face font-lock-string-face) 18404 18414 nil 18414 18415 (face font-lock-string-face) 18415 18451 (face font-lock-constant-face) 18451 18452 (face font-lock-string-face) 18452 18462 nil 18462 18463 (face font-lock-string-face) 18463 18499 (face font-lock-constant-face) 18499 18500 (face font-lock-string-face) 18500 18510 nil 18510 18511 (face font-lock-string-face) 18511 18547 (face font-lock-constant-face) 18547 18548 (face font-lock-string-face) 18548 18558 nil 18558 18559 (face font-lock-string-face) 18559 18581 (face font-lock-constant-face) 18581 18582 (face font-lock-string-face) 18582 18592 nil 18592 18593 (face font-lock-string-face) 18593 18618 (face font-lock-constant-face) 18618 18619 (face font-lock-string-face) 18619 18629 nil 18629 18630 (face font-lock-string-face) 18630 18657 (face font-lock-constant-face) 18657 18658 (face font-lock-string-face) 18658 18668 nil 18668 18669 (face font-lock-string-face) 18669 18697 (face font-lock-constant-face) 18697 18698 (face font-lock-string-face) 18698 18708 nil 18708 18709 (face font-lock-string-face) 18709 18750 (face font-lock-constant-face) 18750 18751 (face font-lock-string-face) 18751 18761 nil 18761 18762 (face font-lock-string-face) 18762 18803 (face font-lock-constant-face) 18803 18804 (face font-lock-string-face) 18804 18814 nil 18814 18815 (face font-lock-string-face) 18815 18856 (face font-lock-constant-face) 18856 18857 (face font-lock-string-face) 18857 18867 nil 18867 18868 (face font-lock-string-face) 18868 18902 (face font-lock-constant-face) 18902 18903 (face font-lock-string-face) 18903 18913 nil 18913 18914 (face font-lock-string-face) 18914 18948 (face font-lock-constant-face) 18948 18949 (face font-lock-string-face) 18949 18959 nil 18959 18960 (face font-lock-string-face) 18960 18994 (face font-lock-constant-face) 18994 18995 (face font-lock-string-face) 18995 19005 nil 19005 19006 (face font-lock-string-face) 19006 19035 (face font-lock-constant-face) 19035 19036 (face font-lock-string-face) 19036 19046 nil 19046 19047 (face font-lock-string-face) 19047 19075 (face font-lock-constant-face) 19075 19076 (face font-lock-string-face) 19076 19093 nil 19093 19094 (face font-lock-string-face) 19094 19104 (face font-lock-keyword-face) 19104 19105 (face font-lock-string-face) 19105 19118 nil 19118 19119 (face font-lock-string-face) 19119 19139 (face font-lock-variable-name-face) 19139 19140 (face font-lock-string-face) 19140 19154 nil 19154 19155 (face font-lock-string-face) 19155 19172 (face font-lock-keyword-face) 19172 19173 (face font-lock-string-face) 19173 19191 nil 19191 19192 (face font-lock-string-face) 19192 19210 (face font-lock-variable-name-face) 19210 19211 (face font-lock-string-face) 19211 19229 nil 19229 19230 (face font-lock-string-face) 19230 19237 (face font-lock-keyword-face) 19237 19238 (face font-lock-string-face) 19238 19242 nil 19242 19266 (face font-lock-string-face) 19266 19321 nil 19321 19322 (face font-lock-string-face) 19322 19342 (face font-lock-variable-name-face) 19342 19343 (face font-lock-string-face) 19343 19357 nil 19357 19399 (face font-lock-comment-face) 19399 19409 nil 19409 19410 (face font-lock-string-face) 19410 19417 (face font-lock-keyword-face) 19417 19418 (face font-lock-string-face) 19418 19434 nil 19434 19435 (face font-lock-string-face) 19435 19480 (face font-lock-constant-face) 19480 19481 (face font-lock-string-face) 19481 19495 nil 19495 19496 (face font-lock-string-face) 19496 19535 (face font-lock-constant-face) 19535 19536 (face font-lock-string-face) 19536 19573 nil 19573 19574 (face font-lock-string-face) 19574 19623 (face font-lock-variable-name-face) 19623 19624 (face font-lock-string-face) 19624 19638 nil 19638 19639 (face font-lock-string-face) 19639 19645 (face font-lock-keyword-face) 19645 19646 (face font-lock-string-face) 19646 19662 nil 19662 19670 (face font-lock-string-face) 19670 19707 nil 19707 19708 (face font-lock-string-face) 19708 19719 (face font-lock-variable-name-face) 19719 19720 (face font-lock-string-face) 19720 19734 nil 19734 19735 (face font-lock-string-face) 19735 19749 (face font-lock-keyword-face) 19749 19750 (face font-lock-string-face) 19750 19766 nil 19766 19773 (face font-lock-string-face) 19773 19791 nil 19791 19792 (face font-lock-string-face) 19792 19806 (face font-lock-keyword-face) 19806 19807 (face font-lock-string-face) 19807 19827 nil 19827 19890 (face font-lock-comment-face) 19890 19906 nil 19906 19971 (face font-lock-comment-face) 19971 19987 nil 19987 20032 (face font-lock-comment-face) 20032 20048 nil 20048 20072 (face font-lock-string-face) 20072 20074 nil 20074 20077 (face font-lock-string-face) 20077 20080 nil 20080 20086 (face font-lock-comment-face) 20086 20155 nil 20155 20156 (face font-lock-string-face) 20156 20165 (face font-lock-variable-name-face) 20165 20166 (face font-lock-string-face) 20166 20180 nil 20180 20181 (face font-lock-string-face) 20181 20190 (face font-lock-keyword-face) 20190 20191 (face font-lock-string-face) 20191 20207 nil 20207 20208 (face font-lock-string-face) 20208 20218 (face font-lock-variable-name-face) 20218 20219 (face font-lock-string-face) 20219 20237 nil 20237 20246 (face font-lock-string-face) 20246 20262 nil 20262 20270 (face font-lock-string-face) 20270 20286 nil 20286 20298 (face font-lock-string-face) 20298 20314 nil 20314 20322 (face font-lock-string-face) 20322 20374 nil 20374 20375 (face font-lock-string-face) 20375 20384 (face font-lock-variable-name-face) 20384 20385 (face font-lock-string-face) 20385 20399 nil 20399 20400 (face font-lock-string-face) 20400 20409 (face font-lock-keyword-face) 20409 20410 (face font-lock-string-face) 20410 20426 nil 20426 20427 (face font-lock-string-face) 20427 20437 (face font-lock-variable-name-face) 20437 20438 (face font-lock-string-face) 20438 20456 nil 20456 20466 (face font-lock-string-face) 20466 20482 nil 20482 20491 (face font-lock-string-face) 20491 20507 nil 20507 20519 (face font-lock-string-face) 20519 20535 nil 20535 20543 (face font-lock-string-face) 20543 20595 nil 20595 20596 (face font-lock-string-face) 20596 20621 (face font-lock-variable-name-face) 20621 20622 (face font-lock-string-face) 20622 20636 nil 20636 20637 (face font-lock-string-face) 20637 20646 (face font-lock-keyword-face) 20646 20647 (face font-lock-string-face) 20647 20663 nil 20663 20664 (face font-lock-string-face) 20664 20674 (face font-lock-keyword-face) 20674 20675 (face font-lock-string-face) 20675 20695 nil 20695 20696 (face font-lock-string-face) 20696 20715 (face font-lock-variable-name-face) 20715 20716 (face font-lock-string-face) 20716 20736 nil 20736 20748 (face font-lock-string-face) 20748 20770 nil 20770 20780 (face font-lock-string-face) 20780 20800 nil 20800 20807 (face font-lock-string-face) 20807 20827 nil 20827 20839 (face font-lock-string-face) 20839 20859 nil 20859 20867 (face font-lock-string-face) 20867 20923 nil 20923 20935 (face font-lock-string-face) 20935 20957 nil 20957 20972 (face font-lock-string-face) 20972 20992 nil 20992 20999 (face font-lock-string-face) 20999 21019 nil 21019 21026 (face font-lock-string-face) 21026 21046 nil 21046 21058 (face font-lock-string-face) 21058 21078 nil 21078 21086 (face font-lock-string-face) 21086 21180 nil 21180 21181 (face font-lock-string-face) 21181 21190 (face font-lock-keyword-face) 21190 21191 (face font-lock-string-face) 21191 21203 nil 21203 21204 (face font-lock-string-face) 21204 21220 (face font-lock-variable-name-face) 21220 21221 (face font-lock-string-face) 21221 21223 nil 21223 21224 (face font-lock-string-face) 21224 21256 (face font-lock-variable-name-face) 21256 21257 (face font-lock-string-face) 21257 21274 nil 21274 21314 (face font-lock-string-face) 21314 21325 nil 21325 21326 (face font-lock-string-face) 21326 21334 (face font-lock-keyword-face) 21334 21335 (face font-lock-string-face) 21335 21347 nil 21347 21348 (face font-lock-string-face) 21348 21385 (face font-lock-constant-face) 21385 21386 (face font-lock-string-face) 21386 21416 nil 21416 21417 (face font-lock-string-face) 21417 21428 (face font-lock-keyword-face) 21428 21429 (face font-lock-string-face) 21429 21431 nil 21431 21432 (face font-lock-string-face) 21432 21452 (face font-lock-function-name-face) 21452 21453 (face font-lock-string-face) 21453 21461 nil 21461 21462 (face font-lock-string-face) 21462 21466 (face font-lock-keyword-face) 21466 21467 (face font-lock-string-face) 21467 21469 nil 21469 21470 (face font-lock-string-face) 21470 21484 (face font-lock-type-face) 21484 21485 (face font-lock-string-face) 21485 21493 nil 21493 21494 (face font-lock-string-face) 21494 21506 (face font-lock-keyword-face) 21506 21507 (face font-lock-string-face) 21507 21519 nil 21519 21520 (face font-lock-string-face) 21520 21522 (face font-lock-constant-face) 21522 21523 (face font-lock-string-face) 21523 21540 nil 21540 21541 (face font-lock-string-face) 21541 21548 (face font-lock-keyword-face) 21548 21549 (face font-lock-string-face) 21549 21561 nil 21561 21562 (face font-lock-string-face) 21562 21595 (face font-lock-constant-face) 21595 21596 (face font-lock-string-face) 21596 21606 nil 21606 21607 (face font-lock-string-face) 21607 21637 (face font-lock-constant-face) 21637 21638 (face font-lock-string-face) 21638 21648 nil 21648 21649 (face font-lock-string-face) 21649 21682 (face font-lock-constant-face) 21682 21683 (face font-lock-string-face) 21683 21693 nil 21693 21694 (face font-lock-string-face) 21694 21724 (face font-lock-constant-face) 21724 21725 (face font-lock-string-face) 21725 21735 nil 21735 21736 (face font-lock-string-face) 21736 21758 (face font-lock-constant-face) 21758 21759 (face font-lock-string-face) 21759 21769 nil 21769 21770 (face font-lock-string-face) 21770 21795 (face font-lock-constant-face) 21795 21796 (face font-lock-string-face) 21796 21806 nil 21806 21807 (face font-lock-string-face) 21807 21836 (face font-lock-constant-face) 21836 21837 (face font-lock-string-face) 21837 21847 nil 21847 21848 (face font-lock-string-face) 21848 21876 (face font-lock-constant-face) 21876 21877 (face font-lock-string-face) 21877 21907 nil 21907 21908 (face font-lock-string-face) 21908 21919 (face font-lock-keyword-face) 21919 21920 (face font-lock-string-face) 21920 21922 nil 21922 21923 (face font-lock-string-face) 21923 21938 (face font-lock-function-name-face) 21938 21939 (face font-lock-string-face) 21939 21947 nil 21947 21948 (face font-lock-string-face) 21948 21952 (face font-lock-keyword-face) 21952 21953 (face font-lock-string-face) 21953 21955 nil 21955 21956 (face font-lock-string-face) 21956 21966 (face font-lock-type-face) 21966 21967 (face font-lock-string-face) 21967 21975 nil 21975 21976 (face font-lock-string-face) 21976 21988 (face font-lock-keyword-face) 21988 21989 (face font-lock-string-face) 21989 22001 nil 22001 22002 (face font-lock-string-face) 22002 22007 (face font-lock-function-name-face) 22007 22008 (face font-lock-string-face) 22008 22018 nil 22018 22019 (face font-lock-string-face) 22019 22037 (face font-lock-function-name-face) 22037 22038 (face font-lock-string-face) 22038 22048 nil 22048 22049 (face font-lock-string-face) 22049 22060 (face font-lock-function-name-face) 22060 22061 (face font-lock-string-face) 22061 22071 nil 22071 22072 (face font-lock-string-face) 22072 22093 (face font-lock-function-name-face) 22093 22094 (face font-lock-string-face) 22094 22104 nil 22104 22105 (face font-lock-string-face) 22105 22131 (face font-lock-function-name-face) 22131 22132 (face font-lock-string-face) 22132 22142 nil 22142 22143 (face font-lock-string-face) 22143 22177 (face font-lock-function-name-face) 22177 22178 (face font-lock-string-face) 22178 22188 nil 22188 22189 (face font-lock-string-face) 22189 22215 (face font-lock-function-name-face) 22215 22216 (face font-lock-string-face) 22216 22226 nil 22226 22227 (face font-lock-string-face) 22227 22253 (face font-lock-function-name-face) 22253 22254 (face font-lock-string-face) 22254 22264 nil 22264 22265 (face font-lock-string-face) 22265 22280 (face font-lock-function-name-face) 22280 22281 (face font-lock-string-face) 22281 22298 nil 22298 22299 (face font-lock-string-face) 22299 22306 (face font-lock-keyword-face) 22306 22307 (face font-lock-string-face) 22307 22319 nil 22319 22320 (face font-lock-string-face) 22320 22361 (face font-lock-constant-face) 22361 22362 (face font-lock-string-face) 22362 22372 nil 22372 22373 (face font-lock-string-face) 22373 22413 (face font-lock-constant-face) 22413 22414 (face font-lock-string-face) 22414 22424 nil 22424 22425 (face font-lock-string-face) 22425 22461 (face font-lock-constant-face) 22461 22462 (face font-lock-string-face) 22462 22472 nil 22472 22473 (face font-lock-string-face) 22473 22502 (face font-lock-constant-face) 22502 22503 (face font-lock-string-face) 22503 22513 nil 22513 22514 (face font-lock-string-face) 22514 22550 (face font-lock-constant-face) 22550 22551 (face font-lock-string-face) 22551 22561 nil 22561 22562 (face font-lock-string-face) 22562 22610 (face font-lock-constant-face) 22610 22611 (face font-lock-string-face) 22611 22621 nil 22621 22622 (face font-lock-string-face) 22622 22663 (face font-lock-constant-face) 22663 22664 (face font-lock-string-face) 22664 22674 nil 22674 22675 (face font-lock-string-face) 22675 22711 (face font-lock-constant-face) 22711 22712 (face font-lock-string-face) 22712 22722 nil 22722 22723 (face font-lock-string-face) 22723 22757 (face font-lock-constant-face) 22757 22758 (face font-lock-string-face) 22758 22768 nil 22768 22769 (face font-lock-string-face) 22769 22797 (face font-lock-constant-face) 22797 22798 (face font-lock-string-face) 22798 22808 nil 22808 22809 (face font-lock-string-face) 22809 22853 (face font-lock-constant-face) 22853 22854 (face font-lock-string-face) 22854 22864 nil 22864 22865 (face font-lock-string-face) 22865 22900 (face font-lock-constant-face) 22900 22901 (face font-lock-string-face) 22901 22911 nil 22911 22912 (face font-lock-string-face) 22912 22961 (face font-lock-constant-face) 22961 22962 (face font-lock-string-face) 22962 22972 nil 22972 22973 (face font-lock-string-face) 22973 23011 (face font-lock-constant-face) 23011 23012 (face font-lock-string-face) 23012 23022 nil 23022 23023 (face font-lock-string-face) 23023 23055 (face font-lock-constant-face) 23055 23056 (face font-lock-string-face) 23056 23066 nil 23066 23067 (face font-lock-string-face) 23067 23116 (face font-lock-constant-face) 23116 23117 (face font-lock-string-face) 23117 23127 nil 23127 23128 (face font-lock-string-face) 23128 23178 (face font-lock-constant-face) 23178 23179 (face font-lock-string-face) 23179 23189 nil 23189 23190 (face font-lock-string-face) 23190 23228 (face font-lock-constant-face) 23228 23229 (face font-lock-string-face) 23229 23239 nil 23239 23240 (face font-lock-string-face) 23240 23277 (face font-lock-constant-face) 23277 23278 (face font-lock-string-face) 23278 23288 nil 23288 23289 (face font-lock-string-face) 23289 23332 (face font-lock-constant-face) 23332 23333 (face font-lock-string-face) 23333 23343 nil 23343 23344 (face font-lock-string-face) 23344 23368 (face font-lock-constant-face) 23368 23369 (face font-lock-string-face) 23369 23379 nil 23379 23380 (face font-lock-string-face) 23380 23402 (face font-lock-constant-face) 23402 23403 (face font-lock-string-face) 23403 23413 nil 23413 23414 (face font-lock-string-face) 23414 23447 (face font-lock-constant-face) 23447 23448 (face font-lock-string-face) 23448 23458 nil 23458 23459 (face font-lock-string-face) 23459 23487 (face font-lock-constant-face) 23487 23488 (face font-lock-string-face) 23488 23498 nil 23498 23499 (face font-lock-string-face) 23499 23530 (face font-lock-constant-face) 23530 23531 (face font-lock-string-face) 23531 23541 nil 23541 23542 (face font-lock-string-face) 23542 23563 (face font-lock-constant-face) 23563 23564 (face font-lock-string-face) 23564 23574 nil 23574 23575 (face font-lock-string-face) 23575 23609 (face font-lock-constant-face) 23609 23610 (face font-lock-string-face) 23610 23620 nil 23620 23621 (face font-lock-string-face) 23621 23654 (face font-lock-constant-face) 23654 23655 (face font-lock-string-face) 23655 23665 nil 23665 23666 (face font-lock-string-face) 23666 23700 (face font-lock-constant-face) 23700 23701 (face font-lock-string-face) 23701 23711 nil 23711 23712 (face font-lock-string-face) 23712 23753 (face font-lock-constant-face) 23753 23754 (face font-lock-string-face) 23754 23764 nil 23764 23765 (face font-lock-string-face) 23765 23790 (face font-lock-constant-face) 23790 23791 (face font-lock-string-face) 23791 23801 nil 23801 23802 (face font-lock-string-face) 23802 23825 (face font-lock-constant-face) 23825 23826 (face font-lock-string-face) 23826 23836 nil 23836 23837 (face font-lock-string-face) 23837 23862 (face font-lock-constant-face) 23862 23863 (face font-lock-string-face) 23863 23873 nil 23873 23874 (face font-lock-string-face) 23874 23906 (face font-lock-constant-face) 23906 23907 (face font-lock-string-face) 23907 23917 nil 23917 23918 (face font-lock-string-face) 23918 23947 (face font-lock-constant-face) 23947 23948 (face font-lock-string-face) 23948 23958 nil 23958 23959 (face font-lock-string-face) 23959 23981 (face font-lock-constant-face) 23981 23982 (face font-lock-string-face) 23982 23992 nil 23992 23993 (face font-lock-string-face) 23993 24014 (face font-lock-constant-face) 24014 24015 (face font-lock-string-face) 24015 24025 nil 24025 24026 (face font-lock-string-face) 24026 24054 (face font-lock-constant-face) 24054 24055 (face font-lock-string-face) 24055 24065 nil 24065 24066 (face font-lock-string-face) 24066 24093 (face font-lock-constant-face) 24093 24094 (face font-lock-string-face) 24094 24104 nil 24104 24105 (face font-lock-string-face) 24105 24133 (face font-lock-constant-face) 24133 24134 (face font-lock-string-face) 24134 24144 nil 24144 24145 (face font-lock-string-face) 24145 24177 (face font-lock-constant-face) 24177 24178 (face font-lock-string-face) 24178 24188 nil 24188 24189 (face font-lock-string-face) 24189 24221 (face font-lock-constant-face) 24221 24222 (face font-lock-string-face) 24222 24232 nil 24232 24233 (face font-lock-string-face) 24233 24277 (face font-lock-constant-face) 24277 24278 (face font-lock-string-face) 24278 24288 nil 24288 24289 (face font-lock-string-face) 24289 24328 (face font-lock-constant-face) 24328 24329 (face font-lock-string-face) 24329 24339 nil 24339 24340 (face font-lock-string-face) 24340 24379 (face font-lock-constant-face) 24379 24380 (face font-lock-string-face) 24380 24390 nil 24390 24391 (face font-lock-string-face) 24391 24424 (face font-lock-constant-face) 24424 24425 (face font-lock-string-face) 24425 24435 nil 24435 24436 (face font-lock-string-face) 24436 24476 (face font-lock-constant-face) 24476 24477 (face font-lock-string-face) 24477 24487 nil 24487 24488 (face font-lock-string-face) 24488 24521 (face font-lock-constant-face) 24521 24522 (face font-lock-string-face) 24522 24532 nil 24532 24533 (face font-lock-string-face) 24533 24567 (face font-lock-constant-face) 24567 24568 (face font-lock-string-face) 24568 24578 nil 24578 24579 (face font-lock-string-face) 24579 24610 (face font-lock-constant-face) 24610 24611 (face font-lock-string-face) 24611 24621 nil 24621 24622 (face font-lock-string-face) 24622 24673 (face font-lock-constant-face) 24673 24674 (face font-lock-string-face) 24674 24684 nil 24684 24685 (face font-lock-string-face) 24685 24725 (face font-lock-constant-face) 24725 24726 (face font-lock-string-face) 24726 24736 nil 24736 24737 (face font-lock-string-face) 24737 24773 (face font-lock-constant-face) 24773 24774 (face font-lock-string-face) 24774 24784 nil 24784 24785 (face font-lock-string-face) 24785 24821 (face font-lock-constant-face) 24821 24822 (face font-lock-string-face) 24822 24832 nil 24832 24833 (face font-lock-string-face) 24833 24874 (face font-lock-constant-face) 24874 24875 (face font-lock-string-face) 24875 24885 nil 24885 24886 (face font-lock-string-face) 24886 24926 (face font-lock-constant-face) 24926 24927 (face font-lock-string-face) 24927 24937 nil 24937 24938 (face font-lock-string-face) 24938 24977 (face font-lock-constant-face) 24977 24978 (face font-lock-string-face) 24978 24988 nil 24988 24989 (face font-lock-string-face) 24989 25035 (face font-lock-constant-face) 25035 25036 (face font-lock-string-face) 25036 25046 nil 25046 25047 (face font-lock-string-face) 25047 25070 (face font-lock-constant-face) 25070 25071 (face font-lock-string-face) 25071 25081 nil 25081 25082 (face font-lock-string-face) 25082 25104 (face font-lock-constant-face) 25104 25105 (face font-lock-string-face) 25105 25115 nil 25115 25116 (face font-lock-string-face) 25116 25152 (face font-lock-constant-face) 25152 25153 (face font-lock-string-face) 25153 25163 nil 25163 25164 (face font-lock-string-face) 25164 25210 (face font-lock-constant-face) 25210 25211 (face font-lock-string-face) 25211 25221 nil 25221 25222 (face font-lock-string-face) 25222 25250 (face font-lock-constant-face) 25250 25251 (face font-lock-string-face) 25251 25268 nil 25268 25269 (face font-lock-string-face) 25269 25279 (face font-lock-keyword-face) 25279 25280 (face font-lock-string-face) 25280 25293 nil 25293 25294 (face font-lock-string-face) 25294 25319 (face font-lock-variable-name-face) 25319 25320 (face font-lock-string-face) 25320 25334 nil 25334 25335 (face font-lock-string-face) 25335 25345 (face font-lock-keyword-face) 25345 25346 (face font-lock-string-face) 25346 25363 nil 25363 25364 (face font-lock-string-face) 25364 25385 (face font-lock-variable-name-face) 25385 25386 (face font-lock-string-face) 25386 25404 nil 25404 25405 (face font-lock-string-face) 25405 25417 (face font-lock-keyword-face) 25417 25418 (face font-lock-string-face) 25418 25438 nil 25438 25439 (face font-lock-string-face) 25439 25480 (face font-lock-function-name-face) 25480 25481 (face font-lock-string-face) 25481 25550 nil 25550 25551 (face font-lock-string-face) 25551 25566 (face font-lock-variable-name-face) 25566 25567 (face font-lock-string-face) 25567 25581 nil 25581 25582 (face font-lock-string-face) 25582 25594 (face font-lock-keyword-face) 25594 25595 (face font-lock-string-face) 25595 25611 nil 25611 25612 (face font-lock-string-face) 25612 25651 (face font-lock-function-name-face) 25651 25652 (face font-lock-string-face) 25652 25688 nil 25688 25689 (face font-lock-string-face) 25689 25704 (face font-lock-variable-name-face) 25704 25705 (face font-lock-string-face) 25705 25719 nil 25719 25720 (face font-lock-string-face) 25720 25728 (face font-lock-keyword-face) 25728 25729 (face font-lock-string-face) 25729 25745 nil 25745 25746 (face font-lock-string-face) 25746 25782 (face font-lock-constant-face) 25782 25783 (face font-lock-string-face) 25783 25797 nil 25797 25798 (face font-lock-string-face) 25798 25820 (face font-lock-constant-face) 25820 25821 (face font-lock-string-face) 25821 25835 nil 25835 25836 (face font-lock-string-face) 25836 25857 (face font-lock-constant-face) 25857 25858 (face font-lock-string-face) 25858 25872 nil 25872 25873 (face font-lock-string-face) 25873 25905 (face font-lock-constant-face) 25905 25906 (face font-lock-string-face) 25906 25920 nil 25920 25921 (face font-lock-string-face) 25921 25961 (face font-lock-constant-face) 25961 25962 (face font-lock-string-face) 25962 25976 nil 25976 25977 (face font-lock-string-face) 25977 26016 (face font-lock-constant-face) 26016 26017 (face font-lock-string-face) 26017 26031 nil 26031 26032 (face font-lock-string-face) 26032 26065 (face font-lock-constant-face) 26065 26066 (face font-lock-string-face) 26066 26080 nil 26080 26081 (face font-lock-string-face) 26081 26115 (face font-lock-constant-face) 26115 26116 (face font-lock-string-face) 26116 26130 nil 26130 26131 (face font-lock-string-face) 26131 26162 (face font-lock-constant-face) 26162 26163 (face font-lock-string-face) 26163 26177 nil 26177 26178 (face font-lock-string-face) 26178 26229 (face font-lock-constant-face) 26229 26230 (face font-lock-string-face) 26230 26244 nil 26244 26245 (face font-lock-string-face) 26245 26285 (face font-lock-constant-face) 26285 26286 (face font-lock-string-face) 26286 26300 nil 26300 26301 (face font-lock-string-face) 26301 26337 (face font-lock-constant-face) 26337 26338 (face font-lock-string-face) 26338 26352 nil 26352 26353 (face font-lock-string-face) 26353 26394 (face font-lock-constant-face) 26394 26395 (face font-lock-string-face) 26395 26409 nil 26409 26410 (face font-lock-string-face) 26410 26443 (face font-lock-constant-face) 26443 26444 (face font-lock-string-face) 26444 26458 nil 26458 26459 (face font-lock-string-face) 26459 26495 (face font-lock-constant-face) 26495 26496 (face font-lock-string-face) 26496 26532 nil 26532 26533 (face font-lock-string-face) 26533 26546 (face font-lock-variable-name-face) 26546 26547 (face font-lock-string-face) 26547 26561 nil 26561 26562 (face font-lock-string-face) 26562 26572 (face font-lock-keyword-face) 26572 26573 (face font-lock-string-face) 26573 26590 nil 26590 26591 (face font-lock-string-face) 26591 26604 (face font-lock-variable-name-face) 26604 26605 (face font-lock-string-face) 26605 26623 nil 26623 26624 (face font-lock-string-face) 26624 26631 (face font-lock-keyword-face) 26631 26632 (face font-lock-string-face) 26632 26652 nil 26652 26653 (face font-lock-string-face) 26653 26688 (face font-lock-constant-face) 26688 26689 (face font-lock-string-face) 26689 26722 nil 26722 26723 (face font-lock-string-face) 26723 26730 (face font-lock-keyword-face) 26730 26731 (face font-lock-string-face) 26731 26751 nil 26751 26752 (face font-lock-string-face) 26752 26760 (face font-lock-preprocessor-face) 26760 26761 (face font-lock-string-face) 26761 26831 nil 26831 26832 (face font-lock-string-face) 26832 26873 (face font-lock-variable-name-face) 26873 26874 (face font-lock-string-face) 26874 26888 nil 26888 26889 (face font-lock-string-face) 26889 26896 (face font-lock-keyword-face) 26896 26897 (face font-lock-string-face) 26897 26913 nil 26913 26914 (face font-lock-string-face) 26914 26954 (face font-lock-constant-face) 26954 26955 (face font-lock-string-face) 26955 26991 nil 26991 26992 (face font-lock-string-face) 26992 27035 (face font-lock-variable-name-face) 27035 27036 (face font-lock-string-face) 27036 27050 nil 27050 27051 (face font-lock-string-face) 27051 27058 (face font-lock-keyword-face) 27058 27059 (face font-lock-string-face) 27059 27075 nil 27075 27076 (face font-lock-string-face) 27076 27095 (face font-lock-constant-face) 27095 27096 (face font-lock-string-face) 27096 27110 nil 27110 27111 (face font-lock-string-face) 27111 27137 (face font-lock-constant-face) 27137 27138 (face font-lock-string-face) 27138 27152 nil 27152 27153 (face font-lock-string-face) 27153 27186 (face font-lock-constant-face) 27186 27187 (face font-lock-string-face) 27187 27201 nil 27201 27202 (face font-lock-string-face) 27202 27235 (face font-lock-constant-face) 27235 27236 (face font-lock-string-face) 27236 27291 nil 27291 27292 (face font-lock-string-face) 27292 27303 (face font-lock-keyword-face) 27303 27304 (face font-lock-string-face) 27304 27306 nil 27306 27307 (face font-lock-string-face) 27307 27325 (face font-lock-function-name-face) 27325 27326 (face font-lock-string-face) 27326 27334 nil 27334 27335 (face font-lock-string-face) 27335 27339 (face font-lock-keyword-face) 27339 27340 (face font-lock-string-face) 27340 27342 nil 27342 27343 (face font-lock-string-face) 27343 27357 (face font-lock-type-face) 27357 27358 (face font-lock-string-face) 27358 27366 nil 27366 27367 (face font-lock-string-face) 27367 27379 (face font-lock-keyword-face) 27379 27380 (face font-lock-string-face) 27380 27392 nil 27392 27393 (face font-lock-string-face) 27393 27398 (face font-lock-function-name-face) 27398 27399 (face font-lock-string-face) 27399 27409 nil 27409 27410 (face font-lock-string-face) 27410 27431 (face font-lock-function-name-face) 27431 27432 (face font-lock-string-face) 27432 27442 nil 27442 27443 (face font-lock-string-face) 27443 27469 (face font-lock-function-name-face) 27469 27470 (face font-lock-string-face) 27470 27480 nil 27480 27481 (face font-lock-string-face) 27481 27507 (face font-lock-function-name-face) 27507 27508 (face font-lock-string-face) 27508 27525 nil 27525 27526 (face font-lock-string-face) 27526 27533 (face font-lock-keyword-face) 27533 27534 (face font-lock-string-face) 27534 27546 nil 27546 27547 (face font-lock-string-face) 27547 27591 (face font-lock-constant-face) 27591 27592 (face font-lock-string-face) 27592 27602 nil 27602 27603 (face font-lock-string-face) 27603 27646 (face font-lock-constant-face) 27646 27647 (face font-lock-string-face) 27647 27657 nil 27657 27658 (face font-lock-string-face) 27658 27679 (face font-lock-constant-face) 27679 27680 (face font-lock-string-face) 27680 27690 nil 27690 27691 (face font-lock-string-face) 27691 27711 (face font-lock-constant-face) 27711 27712 (face font-lock-string-face) 27712 27722 nil 27722 27723 (face font-lock-string-face) 27723 27752 (face font-lock-constant-face) 27752 27753 (face font-lock-string-face) 27753 27763 nil 27763 27764 (face font-lock-string-face) 27764 27792 (face font-lock-constant-face) 27792 27793 (face font-lock-string-face) 27793 27803 nil 27803 27804 (face font-lock-string-face) 27804 27829 (face font-lock-constant-face) 27829 27830 (face font-lock-string-face) 27830 27840 nil 27840 27841 (face font-lock-string-face) 27841 27865 (face font-lock-constant-face) 27865 27866 (face font-lock-string-face) 27866 27876 nil 27876 27877 (face font-lock-string-face) 27877 27901 (face font-lock-constant-face) 27901 27902 (face font-lock-string-face) 27902 27912 nil 27912 27913 (face font-lock-string-face) 27913 27936 (face font-lock-constant-face) 27936 27937 (face font-lock-string-face) 27937 27947 nil 27947 27948 (face font-lock-string-face) 27948 27968 (face font-lock-constant-face) 27968 27969 (face font-lock-string-face) 27969 27979 nil 27979 27980 (face font-lock-string-face) 27980 27999 (face font-lock-constant-face) 27999 28000 (face font-lock-string-face) 28000 28030 nil 28030 28031 (face font-lock-string-face) 28031 28042 (face font-lock-keyword-face) 28042 28043 (face font-lock-string-face) 28043 28045 nil 28045 28046 (face font-lock-string-face) 28046 28058 (face font-lock-function-name-face) 28058 28059 (face font-lock-string-face) 28059 28067 nil 28067 28068 (face font-lock-string-face) 28068 28072 (face font-lock-keyword-face) 28072 28073 (face font-lock-string-face) 28073 28075 nil 28075 28076 (face font-lock-string-face) 28076 28086 (face font-lock-type-face) 28086 28087 (face font-lock-string-face) 28087 28095 nil 28095 28096 (face font-lock-string-face) 28096 28108 (face font-lock-keyword-face) 28108 28109 (face font-lock-string-face) 28109 28121 nil 28121 28122 (face font-lock-string-face) 28122 28127 (face font-lock-function-name-face) 28127 28128 (face font-lock-string-face) 28128 28138 nil 28138 28139 (face font-lock-string-face) 28139 28150 (face font-lock-function-name-face) 28150 28151 (face font-lock-string-face) 28151 28161 nil 28161 28162 (face font-lock-string-face) 28162 28183 (face font-lock-function-name-face) 28183 28184 (face font-lock-string-face) 28184 28194 nil 28194 28195 (face font-lock-string-face) 28195 28216 (face font-lock-function-name-face) 28216 28217 (face font-lock-string-face) 28217 28234 nil 28234 28235 (face font-lock-string-face) 28235 28242 (face font-lock-keyword-face) 28242 28243 (face font-lock-string-face) 28243 28255 nil 28255 28256 (face font-lock-string-face) 28256 28290 (face font-lock-constant-face) 28290 28291 (face font-lock-string-face) 28291 28321 nil 28321 28322 (face font-lock-string-face) 28322 28333 (face font-lock-keyword-face) 28333 28334 (face font-lock-string-face) 28334 28336 nil 28336 28337 (face font-lock-string-face) 28337 28349 (face font-lock-function-name-face) 28349 28350 (face font-lock-string-face) 28350 28358 nil 28358 28359 (face font-lock-string-face) 28359 28363 (face font-lock-keyword-face) 28363 28364 (face font-lock-string-face) 28364 28366 nil 28366 28367 (face font-lock-string-face) 28367 28377 (face font-lock-type-face) 28377 28378 (face font-lock-string-face) 28378 28386 nil 28386 28387 (face font-lock-string-face) 28387 28394 (face font-lock-keyword-face) 28394 28395 (face font-lock-string-face) 28395 28407 nil 28407 28408 (face font-lock-string-face) 28408 28441 (face font-lock-constant-face) 28441 28442 (face font-lock-string-face) 28442 28471 nil 28471 28472 (face font-lock-string-face) 28472 28483 (face font-lock-keyword-face) 28483 28484 (face font-lock-string-face) 28484 28486 nil 28486 28487 (face font-lock-string-face) 28487 28498 (face font-lock-function-name-face) 28498 28499 (face font-lock-string-face) 28499 28507 nil 28507 28508 (face font-lock-string-face) 28508 28512 (face font-lock-keyword-face) 28512 28513 (face font-lock-string-face) 28513 28515 nil 28515 28516 (face font-lock-string-face) 28516 28526 (face font-lock-type-face) 28526 28527 (face font-lock-string-face) 28527 28535 nil 28535 28536 (face font-lock-string-face) 28536 28548 (face font-lock-keyword-face) 28548 28549 (face font-lock-string-face) 28549 28561 nil 28561 28562 (face font-lock-string-face) 28562 28567 (face font-lock-function-name-face) 28567 28568 (face font-lock-string-face) 28568 28578 nil 28578 28579 (face font-lock-string-face) 28579 28600 (face font-lock-function-name-face) 28600 28601 (face font-lock-string-face) 28601 28618 nil 28618 28619 (face font-lock-string-face) 28619 28626 (face font-lock-keyword-face) 28626 28627 (face font-lock-string-face) 28627 28639 nil 28639 28640 (face font-lock-string-face) 28640 28672 (face font-lock-constant-face) 28672 28673 (face font-lock-string-face) 28673 28698 nil 28698 28699 (face font-lock-string-face) 28699 28709 (face font-lock-keyword-face) 28709 28710 (face font-lock-string-face) 28710 28719 nil 28719 28720 (face font-lock-string-face) 28720 28729 (face font-lock-variable-name-face) 28729 28730 (face font-lock-string-face) 28730 28740 nil 28740 28741 (face font-lock-string-face) 28741 28748 (face font-lock-keyword-face) 28748 28749 (face font-lock-string-face) 28749 28773 nil 28773 28774 (face font-lock-string-face) 28774 28785 (face font-lock-keyword-face) 28785 28786 (face font-lock-string-face) 28786 28788 nil 28788 28789 (face font-lock-string-face) 28789 28799 (face font-lock-function-name-face) 28799 28800 (face font-lock-string-face) 28800 28812 nil 28812 28813 (face font-lock-string-face) 28813 28817 (face font-lock-keyword-face) 28817 28818 (face font-lock-string-face) 28818 28820 nil 28820 28821 (face font-lock-string-face) 28821 28831 (face font-lock-type-face) 28831 28832 (face font-lock-string-face) 28832 28844 nil 28844 28845 (face font-lock-string-face) 28845 28857 (face font-lock-keyword-face) 28857 28858 (face font-lock-string-face) 28858 28874 nil 28874 28875 (face font-lock-string-face) 28875 28880 (face font-lock-function-name-face) 28880 28881 (face font-lock-string-face) 28881 28895 nil 28895 28896 (face font-lock-string-face) 28896 28907 (face font-lock-function-name-face) 28907 28908 (face font-lock-string-face) 28908 28922 nil 28922 28923 (face font-lock-string-face) 28923 28944 (face font-lock-function-name-face) 28944 28945 (face font-lock-string-face) 28945 28959 nil 28959 28960 (face font-lock-string-face) 28960 29043 (face font-lock-function-name-face) 29043 29044 (face font-lock-string-face) 29044 29058 nil 29058 29059 (face font-lock-string-face) 29059 29074 (face font-lock-function-name-face) 29074 29075 (face font-lock-string-face) 29075 29100 nil 29100 29101 (face font-lock-string-face) 29101 29113 (face font-lock-keyword-face) 29113 29114 (face font-lock-string-face) 29114 29130 nil 29130 29131 (face font-lock-string-face) 29131 29133 (face font-lock-constant-face) 29133 29138 (face font-lock-variable-name-face) 29138 29163 (face font-lock-constant-face) 29163 29164 (face font-lock-string-face) 29164 29189 nil 29189 29190 (face font-lock-string-face) 29190 29197 (face font-lock-keyword-face) 29197 29198 (face font-lock-string-face) 29198 29214 nil 29214 29215 (face font-lock-string-face) 29215 29238 (face font-lock-constant-face) 29238 29239 (face font-lock-string-face) 29239 29253 nil 29253 29254 (face font-lock-string-face) 29254 29280 (face font-lock-constant-face) 29280 29281 (face font-lock-string-face) 29281 29295 nil 29295 29296 (face font-lock-string-face) 29296 29321 (face font-lock-constant-face) 29321 29322 (face font-lock-string-face) 29322 29336 nil 29336 29337 (face font-lock-string-face) 29337 29361 (face font-lock-constant-face) 29361 29362 (face font-lock-string-face) 29362 29376 nil 29376 29377 (face font-lock-string-face) 29377 29407 (face font-lock-constant-face) 29407 29408 (face font-lock-string-face) 29408 29422 nil 29422 29423 (face font-lock-string-face) 29423 29453 (face font-lock-constant-face) 29453 29454 (face font-lock-string-face) 29454 29468 nil 29468 29469 (face font-lock-string-face) 29469 29493 (face font-lock-constant-face) 29493 29494 (face font-lock-string-face) 29494 29508 nil 29508 29509 (face font-lock-string-face) 29509 29532 (face font-lock-constant-face) 29532 29533 (face font-lock-string-face) 29533 29547 nil 29547 29548 (face font-lock-string-face) 29548 29575 (face font-lock-constant-face) 29575 29576 (face font-lock-string-face) 29576 29590 nil 29590 29591 (face font-lock-string-face) 29591 29614 (face font-lock-constant-face) 29614 29615 (face font-lock-string-face) 29615 29640 nil 29640 29655 (face font-lock-string-face) 29655 29671 nil 29671 29685 (face font-lock-string-face) 29685 29703 nil 29703 29714 (face font-lock-string-face) 29714 29716 nil 29716 29719 (face font-lock-string-face) 29719 29729 nil 29729 29754 (face font-lock-comment-face) 29754 29792 nil 29792 29793 (face font-lock-string-face) 29793 29800 (face font-lock-keyword-face) 29800 29801 (face font-lock-string-face) 29801 29817 nil 29817 29818 (face font-lock-string-face) 29818 29843 (face font-lock-preprocessor-face) 29843 29844 (face font-lock-string-face) 29844 29892 nil 29892 29893 (face font-lock-string-face) 29893 29929 (face font-lock-variable-name-face) 29929 29930 (face font-lock-string-face) 29930 29940 nil 29940 29941 (face font-lock-string-face) 29941 29948 (face font-lock-keyword-face) 29948 29949 (face font-lock-string-face) 29949 29973 nil 29973 29974 (face font-lock-string-face) 29974 29985 (face font-lock-keyword-face) 29985 29986 (face font-lock-string-face) 29986 29988 nil 29988 29989 (face font-lock-string-face) 29989 30001 (face font-lock-function-name-face) 30001 30002 (face font-lock-string-face) 30002 30014 nil 30014 30015 (face font-lock-string-face) 30015 30019 (face font-lock-keyword-face) 30019 30020 (face font-lock-string-face) 30020 30022 nil 30022 30023 (face font-lock-string-face) 30023 30033 (face font-lock-type-face) 30033 30034 (face font-lock-string-face) 30034 30046 nil 30046 30047 (face font-lock-string-face) 30047 30059 (face font-lock-keyword-face) 30059 30060 (face font-lock-string-face) 30060 30076 nil 30076 30077 (face font-lock-string-face) 30077 30082 (face font-lock-function-name-face) 30082 30083 (face font-lock-string-face) 30083 30097 nil 30097 30098 (face font-lock-string-face) 30098 30109 (face font-lock-function-name-face) 30109 30110 (face font-lock-string-face) 30110 30124 nil 30124 30125 (face font-lock-string-face) 30125 30146 (face font-lock-function-name-face) 30146 30147 (face font-lock-string-face) 30147 30161 nil 30161 30162 (face font-lock-string-face) 30162 30180 (face font-lock-function-name-face) 30180 30181 (face font-lock-string-face) 30181 30206 nil 30206 30207 (face font-lock-string-face) 30207 30214 (face font-lock-keyword-face) 30214 30215 (face font-lock-string-face) 30215 30231 nil 30231 30232 (face font-lock-string-face) 30232 30266 (face font-lock-constant-face) 30266 30267 (face font-lock-string-face) 30267 30281 nil 30281 30282 (face font-lock-string-face) 30282 30321 (face font-lock-constant-face) 30321 30322 (face font-lock-string-face) 30322 30336 nil 30336 30337 (face font-lock-string-face) 30337 30375 (face font-lock-constant-face) 30375 30376 (face font-lock-string-face) 30376 30390 nil 30390 30391 (face font-lock-string-face) 30391 30430 (face font-lock-constant-face) 30430 30431 (face font-lock-string-face) 30431 30445 nil 30445 30446 (face font-lock-string-face) 30446 30484 (face font-lock-constant-face) 30484 30485 (face font-lock-string-face) 30485 30499 nil 30499 30500 (face font-lock-string-face) 30500 30533 (face font-lock-constant-face) 30533 30534 (face font-lock-string-face) 30534 30548 nil 30548 30549 (face font-lock-string-face) 30549 30581 (face font-lock-constant-face) 30581 30582 (face font-lock-string-face) 30582 30596 nil 30596 30597 (face font-lock-string-face) 30597 30626 (face font-lock-constant-face) 30626 30627 (face font-lock-string-face) 30627 30641 nil 30641 30642 (face font-lock-string-face) 30642 30670 (face font-lock-constant-face) 30670 30671 (face font-lock-string-face) 30671 30685 nil 30685 30686 (face font-lock-string-face) 30686 30714 (face font-lock-constant-face) 30714 30715 (face font-lock-string-face) 30715 30729 nil 30729 30730 (face font-lock-string-face) 30730 30757 (face font-lock-constant-face) 30757 30758 (face font-lock-string-face) 30758 30783 nil 30783 30784 (face font-lock-string-face) 30784 30794 (face font-lock-keyword-face) 30794 30795 (face font-lock-string-face) 30795 30812 nil 30812 30813 (face font-lock-string-face) 30813 30834 (face font-lock-variable-name-face) 30834 30835 (face font-lock-string-face) 30835 30853 nil 30853 30854 (face font-lock-string-face) 30854 30866 (face font-lock-keyword-face) 30866 30867 (face font-lock-string-face) 30867 30887 nil 30887 30888 (face font-lock-string-face) 30888 30917 (face font-lock-function-name-face) 30917 30918 (face font-lock-string-face) 30918 30951 nil 30951 30952 (face font-lock-string-face) 30952 30959 (face font-lock-keyword-face) 30959 30960 (face font-lock-string-face) 30960 30980 nil 30980 30981 (face font-lock-string-face) 30981 31015 (face font-lock-constant-face) 31015 31016 (face font-lock-string-face) 31016 31064 nil 31064 31065 (face font-lock-string-face) 31065 31074 (face font-lock-variable-name-face) 31074 31075 (face font-lock-string-face) 31075 31093 nil 31093 31094 (face font-lock-string-face) 31094 31106 (face font-lock-keyword-face) 31106 31107 (face font-lock-string-face) 31107 31127 nil 31127 31128 (face font-lock-string-face) 31128 31175 (face font-lock-function-name-face) 31175 31176 (face font-lock-string-face) 31176 31194 nil 31194 31195 (face font-lock-string-face) 31195 31245 (face font-lock-function-name-face) 31245 31246 (face font-lock-string-face) 31246 31279 nil 31279 31280 (face font-lock-string-face) 31280 31287 (face font-lock-keyword-face) 31287 31288 (face font-lock-string-face) 31288 31308 nil 31308 31309 (face font-lock-string-face) 31309 31341 (face font-lock-constant-face) 31341 31342 (face font-lock-string-face) 31342 31423 nil 31423 31424 (face font-lock-string-face) 31424 31462 (face font-lock-variable-name-face) 31462 31463 (face font-lock-string-face) 31463 31473 nil 31473 31474 (face font-lock-string-face) 31474 31481 (face font-lock-keyword-face) 31481 31482 (face font-lock-string-face) 31482 31506 nil 31506 31507 (face font-lock-string-face) 31507 31518 (face font-lock-keyword-face) 31518 31519 (face font-lock-string-face) 31519 31521 nil 31521 31522 (face font-lock-string-face) 31522 31539 (face font-lock-function-name-face) 31539 31540 (face font-lock-string-face) 31540 31552 nil 31552 31553 (face font-lock-string-face) 31553 31557 (face font-lock-keyword-face) 31557 31558 (face font-lock-string-face) 31558 31560 nil 31560 31561 (face font-lock-string-face) 31561 31571 (face font-lock-type-face) 31571 31572 (face font-lock-string-face) 31572 31584 nil 31584 31585 (face font-lock-string-face) 31585 31597 (face font-lock-keyword-face) 31597 31598 (face font-lock-string-face) 31598 31614 nil 31614 31615 (face font-lock-string-face) 31615 31636 (face font-lock-function-name-face) 31636 31637 (face font-lock-string-face) 31637 31651 nil 31651 31652 (face font-lock-string-face) 31652 31670 (face font-lock-function-name-face) 31670 31671 (face font-lock-string-face) 31671 31696 nil 31696 31697 (face font-lock-string-face) 31697 31706 (face font-lock-keyword-face) 31706 31707 (face font-lock-string-face) 31707 31723 nil 31723 31724 (face font-lock-string-face) 31724 31728 (face font-lock-constant-face) 31728 31729 (face font-lock-string-face) 31729 31743 nil 31743 31744 (face font-lock-string-face) 31744 31748 (face font-lock-constant-face) 31748 31749 (face font-lock-string-face) 31749 31774 nil 31774 31775 (face font-lock-string-face) 31775 31782 (face font-lock-keyword-face) 31782 31783 (face font-lock-string-face) 31783 31799 nil 31799 31800 (face font-lock-string-face) 31800 31844 (face font-lock-constant-face) 31844 31845 (face font-lock-string-face) 31845 31893 nil 31893 31894 (face font-lock-string-face) 31894 31943 (face font-lock-variable-name-face) 31943 31944 (face font-lock-string-face) 31944 31954 nil 31954 31955 (face font-lock-string-face) 31955 31962 (face font-lock-keyword-face) 31962 31963 (face font-lock-string-face) 31963 31987 nil 31987 31988 (face font-lock-string-face) 31988 31999 (face font-lock-keyword-face) 31999 32000 (face font-lock-string-face) 32000 32002 nil 32002 32003 (face font-lock-string-face) 32003 32013 (face font-lock-function-name-face) 32013 32014 (face font-lock-string-face) 32014 32026 nil 32026 32027 (face font-lock-string-face) 32027 32031 (face font-lock-keyword-face) 32031 32032 (face font-lock-string-face) 32032 32034 nil 32034 32035 (face font-lock-string-face) 32035 32045 (face font-lock-type-face) 32045 32046 (face font-lock-string-face) 32046 32058 nil 32058 32059 (face font-lock-string-face) 32059 32071 (face font-lock-keyword-face) 32071 32072 (face font-lock-string-face) 32072 32088 nil 32088 32089 (face font-lock-string-face) 32089 32094 (face font-lock-function-name-face) 32094 32095 (face font-lock-string-face) 32095 32109 nil 32109 32110 (face font-lock-string-face) 32110 32121 (face font-lock-function-name-face) 32121 32122 (face font-lock-string-face) 32122 32136 nil 32136 32137 (face font-lock-string-face) 32137 32158 (face font-lock-function-name-face) 32158 32159 (face font-lock-string-face) 32159 32173 nil 32173 32174 (face font-lock-string-face) 32174 32192 (face font-lock-function-name-face) 32192 32193 (face font-lock-string-face) 32193 32218 nil 32218 32219 (face font-lock-string-face) 32219 32232 (face font-lock-keyword-face) 32232 32233 (face font-lock-string-face) 32233 32249 nil 32249 32250 (face font-lock-string-face) 32250 32259 (face font-lock-keyword-face) 32259 32260 (face font-lock-string-face) 32260 32278 nil 32278 32279 (face font-lock-string-face) 32279 32283 (face font-lock-constant-face) 32283 32284 (face font-lock-string-face) 32284 32300 nil 32300 32301 (face font-lock-string-face) 32301 32306 (face font-lock-constant-face) 32306 32307 (face font-lock-string-face) 32307 32323 nil 32323 32324 (face font-lock-string-face) 32324 32333 (face font-lock-constant-face) 32333 32334 (face font-lock-string-face) 32334 32350 nil 32350 32351 (face font-lock-string-face) 32351 32357 (face font-lock-constant-face) 32357 32358 (face font-lock-string-face) 32358 32398 nil 32398 32399 (face font-lock-string-face) 32399 32406 (face font-lock-keyword-face) 32406 32407 (face font-lock-string-face) 32407 32423 nil 32423 32424 (face font-lock-string-face) 32424 32462 (face font-lock-constant-face) 32462 32463 (face font-lock-string-face) 32463 32477 nil 32477 32478 (face font-lock-string-face) 32478 32515 (face font-lock-constant-face) 32515 32516 (face font-lock-string-face) 32516 32530 nil 32530 32531 (face font-lock-string-face) 32531 32568 (face font-lock-constant-face) 32568 32569 (face font-lock-string-face) 32569 32583 nil 32583 32584 (face font-lock-string-face) 32584 32620 (face font-lock-constant-face) 32620 32621 (face font-lock-string-face) 32621 32635 nil 32635 32636 (face font-lock-string-face) 32636 32666 (face font-lock-constant-face) 32666 32667 (face font-lock-string-face) 32667 32681 nil 32681 32682 (face font-lock-string-face) 32682 32720 (face font-lock-constant-face) 32720 32721 (face font-lock-string-face) 32721 32735 nil 32735 32736 (face font-lock-string-face) 32736 32773 (face font-lock-constant-face) 32773 32774 (face font-lock-string-face) 32774 32822 nil 32822 32823 (face font-lock-string-face) 32823 32838 (face font-lock-variable-name-face) 32838 32839 (face font-lock-string-face) 32839 32849 nil 32849 32850 (face font-lock-string-face) 32850 32857 (face font-lock-keyword-face) 32857 32858 (face font-lock-string-face) 32858 32882 nil 32882 32883 (face font-lock-string-face) 32883 32894 (face font-lock-keyword-face) 32894 32895 (face font-lock-string-face) 32895 32897 nil 32897 32898 (face font-lock-string-face) 32898 32912 (face font-lock-function-name-face) 32912 32913 (face font-lock-string-face) 32913 32925 nil 32925 32926 (face font-lock-string-face) 32926 32930 (face font-lock-keyword-face) 32930 32931 (face font-lock-string-face) 32931 32933 nil 32933 32934 (face font-lock-string-face) 32934 32948 (face font-lock-type-face) 32948 32949 (face font-lock-string-face) 32949 32961 nil 32961 32962 (face font-lock-string-face) 32962 32969 (face font-lock-keyword-face) 32969 32970 (face font-lock-string-face) 32970 32986 nil 32986 32987 (face font-lock-string-face) 32987 33022 (face font-lock-constant-face) 33022 33023 (face font-lock-string-face) 33023 33037 nil 33037 33038 (face font-lock-string-face) 33038 33072 (face font-lock-constant-face) 33072 33073 (face font-lock-string-face) 33073 33098 nil 33098 33099 (face font-lock-string-face) 33099 33111 (face font-lock-keyword-face) 33111 33112 (face font-lock-string-face) 33112 33128 nil 33128 33129 (face font-lock-string-face) 33129 33150 (face font-lock-function-name-face) 33150 33151 (face font-lock-string-face) 33151 33176 nil 33176 33177 (face font-lock-string-face) 33177 33189 (face font-lock-keyword-face) 33189 33190 (face font-lock-string-face) 33190 33206 nil 33206 33207 (face font-lock-string-face) 33207 33209 (face font-lock-constant-face) 33209 33232 (face font-lock-variable-name-face) 33232 33239 (face font-lock-constant-face) 33239 33240 (face font-lock-string-face) 33240 33265 nil 33265 33266 (face font-lock-string-face) 33266 33273 (face font-lock-keyword-face) 33273 33274 (face font-lock-string-face) 33274 33306 nil 33306 33307 (face font-lock-string-face) 33307 33318 (face font-lock-keyword-face) 33318 33319 (face font-lock-string-face) 33319 33321 nil 33321 33322 (face font-lock-string-face) 33322 33342 (face font-lock-function-name-face) 33342 33343 (face font-lock-string-face) 33343 33359 nil 33359 33360 (face font-lock-string-face) 33360 33366 (face font-lock-keyword-face) 33366 33367 (face font-lock-string-face) 33367 33387 nil 33387 33388 (face font-lock-string-face) 33388 33434 (face font-lock-constant-face) 33434 33435 (face font-lock-string-face) 33435 33453 nil 33453 33454 (face font-lock-string-face) 33454 33519 (face font-lock-constant-face) 33519 33520 (face font-lock-string-face) 33520 33553 nil 33553 33554 (face font-lock-string-face) 33554 33561 (face font-lock-keyword-face) 33561 33562 (face font-lock-string-face) 33562 33582 nil 33582 33583 (face font-lock-string-face) 33583 33585 (face font-lock-constant-face) 33585 33608 (face font-lock-variable-name-face) 33608 33647 (face font-lock-constant-face) 33647 33648 (face font-lock-string-face) 33648 33681 nil 33681 33682 (face font-lock-string-face) 33682 33688 (face font-lock-keyword-face) 33688 33689 (face font-lock-string-face) 33689 33709 nil 33709 33710 (face font-lock-string-face) 33710 33716 (face font-lock-constant-face) 33716 33717 (face font-lock-string-face) 33717 33735 nil 33735 33736 (face font-lock-string-face) 33736 33738 (face font-lock-constant-face) 33738 33743 (face font-lock-variable-name-face) 33743 33788 (face font-lock-constant-face) 33788 33789 (face font-lock-string-face) 33789 33807 nil 33807 33808 (face font-lock-string-face) 33808 33810 (face font-lock-constant-face) 33810 33811 (face font-lock-string-face) 33811 33829 nil 33829 33830 (face font-lock-string-face) 33830 33833 (face font-lock-constant-face) 33833 33840 (face font-lock-variable-name-face) 33840 33841 (face font-lock-constant-face) 33841 33842 (face font-lock-string-face) 33842 33860 nil 33860 33861 (face font-lock-string-face) 33861 33864 (face font-lock-constant-face) 33864 33872 (face font-lock-variable-name-face) 33872 33873 (face font-lock-constant-face) 33873 33874 (face font-lock-string-face) 33874 33952 nil 33952 33953 (face font-lock-string-face) 33953 33964 (face font-lock-keyword-face) 33964 33965 (face font-lock-string-face) 33965 33967 nil 33967 33968 (face font-lock-string-face) 33968 33978 (face font-lock-function-name-face) 33978 33979 (face font-lock-string-face) 33979 33991 nil 33991 33992 (face font-lock-string-face) 33992 33996 (face font-lock-keyword-face) 33996 33997 (face font-lock-string-face) 33997 33999 nil 33999 34000 (face font-lock-string-face) 34000 34004 (face font-lock-type-face) 34004 34005 (face font-lock-string-face) 34005 34017 nil 34017 34018 (face font-lock-string-face) 34018 34030 (face font-lock-keyword-face) 34030 34031 (face font-lock-string-face) 34031 34035 nil 34035 34036 (face font-lock-string-face) 34036 34062 (face font-lock-function-name-face) 34062 34063 (face font-lock-string-face) 34063 34077 nil 34077 34078 (face font-lock-string-face) 34078 34087 (face font-lock-keyword-face) 34087 34088 (face font-lock-string-face) 34088 34104 nil 34104 34105 (face font-lock-string-face) 34105 34117 (face font-lock-variable-name-face) 34117 34118 (face font-lock-string-face) 34118 34120 nil 34120 34121 (face font-lock-string-face) 34121 34126 (face font-lock-variable-name-face) 34126 34127 (face font-lock-string-face) 34127 34141 nil 34141 34142 (face font-lock-string-face) 34142 34153 (face font-lock-variable-name-face) 34153 34154 (face font-lock-string-face) 34154 34156 nil 34156 34157 (face font-lock-string-face) 34157 34174 (face font-lock-variable-name-face) 34174 34175 (face font-lock-string-face) 34175 34200 nil 34200 34201 (face font-lock-string-face) 34201 34209 (face font-lock-keyword-face) 34209 34210 (face font-lock-string-face) 34210 34214 nil 34214 34215 (face font-lock-string-face) 34215 34233 (face font-lock-constant-face) 34233 34234 (face font-lock-string-face) 34234 34268 nil 34268 34287 (face font-lock-comment-face) 34287 34293 nil 34293 34365 (face font-lock-comment-face) 34365 34371 nil 34371 34372 (face font-lock-string-face) 34372 34379 (face font-lock-keyword-face) 34379 34380 (face font-lock-string-face) 34380 34404 nil 34404 34405 (face font-lock-string-face) 34405 34416 (face font-lock-keyword-face) 34416 34417 (face font-lock-string-face) 34417 34419 nil 34419 34420 (face font-lock-string-face) 34420 34436 (face font-lock-function-name-face) 34436 34437 (face font-lock-string-face) 34437 34449 nil 34449 34450 (face font-lock-string-face) 34450 34454 (face font-lock-keyword-face) 34454 34455 (face font-lock-string-face) 34455 34457 nil 34457 34458 (face font-lock-string-face) 34458 34468 (face font-lock-type-face) 34468 34469 (face font-lock-string-face) 34469 34481 nil 34481 34482 (face font-lock-string-face) 34482 34494 (face font-lock-keyword-face) 34494 34495 (face font-lock-string-face) 34495 34511 nil 34511 34512 (face font-lock-string-face) 34512 34517 (face font-lock-function-name-face) 34517 34518 (face font-lock-string-face) 34518 34532 nil 34532 34533 (face font-lock-string-face) 34533 34551 (face font-lock-function-name-face) 34551 34552 (face font-lock-string-face) 34552 34566 nil 34566 34567 (face font-lock-string-face) 34567 34588 (face font-lock-function-name-face) 34588 34589 (face font-lock-string-face) 34589 34603 nil 34603 34604 (face font-lock-string-face) 34604 34630 (face font-lock-function-name-face) 34630 34631 (face font-lock-string-face) 34631 34645 nil 34645 34646 (face font-lock-string-face) 34646 34680 (face font-lock-function-name-face) 34680 34681 (face font-lock-string-face) 34681 34695 nil 34695 34696 (face font-lock-string-face) 34696 34730 (face font-lock-function-name-face) 34730 34731 (face font-lock-string-face) 34731 34745 nil 34745 34746 (face font-lock-string-face) 34746 34772 (face font-lock-function-name-face) 34772 34773 (face font-lock-string-face) 34773 34787 nil 34787 34788 (face font-lock-string-face) 34788 34827 (face font-lock-function-name-face) 34827 34828 (face font-lock-string-face) 34828 34853 nil 34853 34854 (face font-lock-string-face) 34854 34861 (face font-lock-keyword-face) 34861 34862 (face font-lock-string-face) 34862 34878 nil 34878 34879 (face font-lock-string-face) 34879 34904 (face font-lock-constant-face) 34904 34905 (face font-lock-string-face) 34905 34930 nil 34930 34931 (face font-lock-string-face) 34931 34941 (face font-lock-keyword-face) 34941 34942 (face font-lock-string-face) 34942 34959 nil 34959 34960 (face font-lock-string-face) 34960 34981 (face font-lock-variable-name-face) 34981 34982 (face font-lock-string-face) 34982 35000 nil 35000 35001 (face font-lock-string-face) 35001 35013 (face font-lock-keyword-face) 35013 35014 (face font-lock-string-face) 35014 35034 nil 35034 35077 (face font-lock-comment-face) 35077 35093 nil 35093 35123 (face font-lock-comment-face) 35123 35139 nil 35139 35164 (face font-lock-comment-face) 35164 35180 nil 35180 35194 (face font-lock-comment-face) 35194 35210 nil 35210 35211 (face font-lock-string-face) 35211 35240 (face font-lock-function-name-face) 35240 35241 (face font-lock-string-face) 35241 35274 nil 35274 35275 (face font-lock-string-face) 35275 35285 (face font-lock-keyword-face) 35285 35286 (face font-lock-string-face) 35286 35307 nil 35307 35308 (face font-lock-string-face) 35308 35329 (face font-lock-variable-name-face) 35329 35330 (face font-lock-string-face) 35330 35352 nil 35352 35353 (face font-lock-string-face) 35353 35365 (face font-lock-keyword-face) 35365 35366 (face font-lock-string-face) 35366 35390 nil 35390 35391 (face font-lock-string-face) 35391 35432 (face font-lock-function-name-face) 35432 35433 (face font-lock-string-face) 35433 35553 nil 35553 35554 (face font-lock-string-face) 35554 35565 (face font-lock-keyword-face) 35565 35566 (face font-lock-string-face) 35566 35568 nil 35568 35569 (face font-lock-string-face) 35569 35592 (face font-lock-function-name-face) 35592 35593 (face font-lock-string-face) 35593 35605 nil 35605 35606 (face font-lock-string-face) 35606 35610 (face font-lock-keyword-face) 35610 35611 (face font-lock-string-face) 35611 35613 nil 35613 35614 (face font-lock-string-face) 35614 35624 (face font-lock-type-face) 35624 35625 (face font-lock-string-face) 35625 35637 nil 35637 35638 (face font-lock-string-face) 35638 35650 (face font-lock-keyword-face) 35650 35651 (face font-lock-string-face) 35651 35667 nil 35667 35668 (face font-lock-string-face) 35668 35673 (face font-lock-function-name-face) 35673 35674 (face font-lock-string-face) 35674 35688 nil 35688 35689 (face font-lock-string-face) 35689 35707 (face font-lock-function-name-face) 35707 35708 (face font-lock-string-face) 35708 35722 nil 35722 35723 (face font-lock-string-face) 35723 35757 (face font-lock-function-name-face) 35757 35758 (face font-lock-string-face) 35758 35772 nil 35772 35773 (face font-lock-string-face) 35773 35799 (face font-lock-function-name-face) 35799 35800 (face font-lock-string-face) 35800 35814 nil 35814 35815 (face font-lock-string-face) 35815 35841 (face font-lock-function-name-face) 35841 35842 (face font-lock-string-face) 35842 35856 nil 35856 35857 (face font-lock-string-face) 35857 35896 (face font-lock-function-name-face) 35896 35897 (face font-lock-string-face) 35897 35922 nil 35922 35923 (face font-lock-string-face) 35923 35930 (face font-lock-keyword-face) 35930 35931 (face font-lock-string-face) 35931 35947 nil 35947 35948 (face font-lock-string-face) 35948 35970 (face font-lock-constant-face) 35970 35971 (face font-lock-string-face) 35971 35985 nil 35985 35986 (face font-lock-string-face) 35986 36011 (face font-lock-constant-face) 36011 36012 (face font-lock-string-face) 36012 36026 nil 36026 36027 (face font-lock-string-face) 36027 36060 (face font-lock-constant-face) 36060 36061 (face font-lock-string-face) 36061 36075 nil 36075 36076 (face font-lock-string-face) 36076 36117 (face font-lock-constant-face) 36117 36118 (face font-lock-string-face) 36118 36143 nil 36143 36144 (face font-lock-string-face) 36144 36154 (face font-lock-keyword-face) 36154 36155 (face font-lock-string-face) 36155 36172 nil 36172 36173 (face font-lock-string-face) 36173 36198 (face font-lock-variable-name-face) 36198 36199 (face font-lock-string-face) 36199 36217 nil 36217 36218 (face font-lock-string-face) 36218 36228 (face font-lock-keyword-face) 36228 36229 (face font-lock-string-face) 36229 36250 nil 36250 36251 (face font-lock-string-face) 36251 36272 (face font-lock-variable-name-face) 36272 36273 (face font-lock-string-face) 36273 36295 nil 36295 36296 (face font-lock-string-face) 36296 36308 (face font-lock-keyword-face) 36308 36309 (face font-lock-string-face) 36309 36333 nil 36333 36334 (face font-lock-string-face) 36334 36375 (face font-lock-function-name-face) 36375 36376 (face font-lock-string-face) 36376 36496 nil 36496 36497 (face font-lock-string-face) 36497 36508 (face font-lock-keyword-face) 36508 36509 (face font-lock-string-face) 36509 36511 nil 36511 36512 (face font-lock-string-face) 36512 36524 (face font-lock-function-name-face) 36524 36525 (face font-lock-string-face) 36525 36537 nil 36537 36538 (face font-lock-string-face) 36538 36542 (face font-lock-keyword-face) 36542 36543 (face font-lock-string-face) 36543 36545 nil 36545 36546 (face font-lock-string-face) 36546 36556 (face font-lock-type-face) 36556 36557 (face font-lock-string-face) 36557 36569 nil 36569 36570 (face font-lock-string-face) 36570 36582 (face font-lock-keyword-face) 36582 36583 (face font-lock-string-face) 36583 36599 nil 36599 36600 (face font-lock-string-face) 36600 36605 (face font-lock-function-name-face) 36605 36606 (face font-lock-string-face) 36606 36620 nil 36620 36621 (face font-lock-string-face) 36621 36642 (face font-lock-function-name-face) 36642 36643 (face font-lock-string-face) 36643 36657 nil 36657 36658 (face font-lock-string-face) 36658 36697 (face font-lock-function-name-face) 36697 36698 (face font-lock-string-face) 36698 36723 nil 36723 36724 (face font-lock-string-face) 36724 36731 (face font-lock-keyword-face) 36731 36732 (face font-lock-string-face) 36732 36748 nil 36748 36749 (face font-lock-string-face) 36749 36782 (face font-lock-constant-face) 36782 36783 (face font-lock-string-face) 36783 36829 nil 36829 36830 (face font-lock-string-face) 36830 36841 (face font-lock-keyword-face) 36841 36842 (face font-lock-string-face) 36842 36844 nil 36844 36845 (face font-lock-string-face) 36845 36856 (face font-lock-function-name-face) 36856 36857 (face font-lock-string-face) 36857 36869 nil 36869 36870 (face font-lock-string-face) 36870 36874 (face font-lock-keyword-face) 36874 36875 (face font-lock-string-face) 36875 36877 nil 36877 36878 (face font-lock-string-face) 36878 36888 (face font-lock-type-face) 36888 36889 (face font-lock-string-face) 36889 36901 nil 36901 36902 (face font-lock-string-face) 36902 36914 (face font-lock-keyword-face) 36914 36915 (face font-lock-string-face) 36915 36931 nil 36931 36932 (face font-lock-string-face) 36932 36937 (face font-lock-function-name-face) 36937 36938 (face font-lock-string-face) 36938 36952 nil 36952 36953 (face font-lock-string-face) 36953 36974 (face font-lock-function-name-face) 36974 36975 (face font-lock-string-face) 36975 36989 nil 36989 36990 (face font-lock-string-face) 36990 37029 (face font-lock-function-name-face) 37029 37030 (face font-lock-string-face) 37030 37055 nil 37055 37056 (face font-lock-string-face) 37056 37063 (face font-lock-keyword-face) 37063 37064 (face font-lock-string-face) 37064 37080 nil 37080 37081 (face font-lock-string-face) 37081 37113 (face font-lock-constant-face) 37113 37114 (face font-lock-string-face) 37114 37163 nil)
diff --git a/tools/gyp/tools/graphviz.py b/tools/gyp/tools/graphviz.py
deleted file mode 100755
index 326ae22..0000000
--- a/tools/gyp/tools/graphviz.py
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Using the JSON dumped by the dump-dependency-json generator,
-generate input suitable for graphviz to render a dependency graph of
-targets."""
-
-import collections
-import json
-import sys
-
-
-def ParseTarget(target):
-  target, _, suffix = target.partition('#')
-  filename, _, target = target.partition(':')
-  return filename, target, suffix
-
-
-def LoadEdges(filename, targets):
-  """Load the edges map from the dump file, and filter it to only
-  show targets in |targets| and their depedendents."""
-
-  file = open('dump.json')
-  edges = json.load(file)
-  file.close()
-
-  # Copy out only the edges we're interested in from the full edge list.
-  target_edges = {}
-  to_visit = targets[:]
-  while to_visit:
-    src = to_visit.pop()
-    if src in target_edges:
-      continue
-    target_edges[src] = edges[src]
-    to_visit.extend(edges[src])
-
-  return target_edges
-
-
-def WriteGraph(edges):
-  """Print a graphviz graph to stdout.
-  |edges| is a map of target to a list of other targets it depends on."""
-
-  # Bucket targets by file.
-  files = collections.defaultdict(list)
-  for src, dst in edges.items():
-    build_file, target_name, toolset = ParseTarget(src)
-    files[build_file].append(src)
-
-  print 'digraph D {'
-  print '  fontsize=8'  # Used by subgraphs.
-  print '  node [fontsize=8]'
-
-  # Output nodes by file.  We must first write out each node within
-  # its file grouping before writing out any edges that may refer
-  # to those nodes.
-  for filename, targets in files.items():
-    if len(targets) == 1:
-      # If there's only one node for this file, simplify
-      # the display by making it a box without an internal node.
-      target = targets[0]
-      build_file, target_name, toolset = ParseTarget(target)
-      print '  "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
-                                                     target_name)
-    else:
-      # Group multiple nodes together in a subgraph.
-      print '  subgraph "cluster_%s" {' % filename
-      print '    label = "%s"' % filename
-      for target in targets:
-        build_file, target_name, toolset = ParseTarget(target)
-        print '    "%s" [label="%s"]' % (target, target_name)
-      print '  }'
-
-  # Now that we've placed all the nodes within subgraphs, output all
-  # the edges between nodes.
-  for src, dsts in edges.items():
-    for dst in dsts:
-      print '  "%s" -> "%s"' % (src, dst)
-
-  print '}'
-
-
-def main():
-  if len(sys.argv) < 2:
-    print >>sys.stderr, __doc__
-    print >>sys.stderr
-    print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
-    return 1
-
-  edges = LoadEdges('dump.json', sys.argv[1:])
-
-  WriteGraph(edges)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/gyp/tools/pretty_gyp.py b/tools/gyp/tools/pretty_gyp.py
deleted file mode 100755
index d5736bb..0000000
--- a/tools/gyp/tools/pretty_gyp.py
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Pretty-prints the contents of a GYP file."""
-
-import sys
-import re
-
-
-# Regex to remove comments when we're counting braces.
-COMMENT_RE = re.compile(r'\s*#.*')
-
-# Regex to remove quoted strings when we're counting braces.
-# It takes into account quoted quotes, and makes sure that the quotes match.
-# NOTE: It does not handle quotes that span more than one line, or
-# cases where an escaped quote is preceeded by an escaped backslash.
-QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
-QUOTE_RE = re.compile(QUOTE_RE_STR)
-
-
-def comment_replace(matchobj):
-  return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
-
-
-def mask_comments(input):
-  """Mask the quoted strings so we skip braces inside quoted strings."""
-  search_re = re.compile(r'(.*?)(#)(.*)')
-  return [search_re.sub(comment_replace, line) for line in input]
-
-
-def quote_replace(matchobj):
-  return "%s%s%s%s" % (matchobj.group(1),
-                       matchobj.group(2),
-                       'x'*len(matchobj.group(3)),
-                       matchobj.group(2))
-
-
-def mask_quotes(input):
-  """Mask the quoted strings so we skip braces inside quoted strings."""
-  search_re = re.compile(r'(.*?)' + QUOTE_RE_STR)
-  return [search_re.sub(quote_replace, line) for line in input]
-
-
-def do_split(input, masked_input, search_re):
-  output = []
-  mask_output = []
-  for (line, masked_line) in zip(input, masked_input):
-    m = search_re.match(masked_line)
-    while m:
-      split = len(m.group(1))
-      line = line[:split] + r'\n' + line[split:]
-      masked_line = masked_line[:split] + r'\n' + masked_line[split:]
-      m = search_re.match(masked_line)
-    output.extend(line.split(r'\n'))
-    mask_output.extend(masked_line.split(r'\n'))
-  return (output, mask_output)
-
-
-def split_double_braces(input):
-  """Masks out the quotes and comments, and then splits appropriate
-  lines (lines that matche the double_*_brace re's above) before
-  indenting them below.
-
-  These are used to split lines which have multiple braces on them, so
-  that the indentation looks prettier when all laid out (e.g. closing
-  braces make a nice diagonal line).
-  """
-  double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
-  double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
-
-  masked_input = mask_quotes(input)
-  masked_input = mask_comments(masked_input)
-
-  (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
-  (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
-
-  return output
-
-
-def count_braces(line):
-  """keeps track of the number of braces on a given line and returns the result.
-
-  It starts at zero and subtracts for closed braces, and adds for open braces.
-  """
-  open_braces = ['[', '(', '{']
-  close_braces = [']', ')', '}']
-  closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
-  cnt = 0
-  stripline = COMMENT_RE.sub(r'', line)
-  stripline = QUOTE_RE.sub(r"''", stripline)
-  for char in stripline:
-    for brace in open_braces:
-      if char == brace:
-        cnt += 1
-    for brace in close_braces:
-      if char == brace:
-        cnt -= 1
-
-  after = False
-  if cnt > 0:
-    after = True
-
-  # This catches the special case of a closing brace having something
-  # other than just whitespace ahead of it -- we don't want to
-  # unindent that until after this line is printed so it stays with
-  # the previous indentation level.
-  if cnt < 0 and closing_prefix_re.match(stripline):
-    after = True
-  return (cnt, after)
-
-
-def prettyprint_input(lines):
-  """Does the main work of indenting the input based on the brace counts."""
-  indent = 0
-  basic_offset = 2
-  last_line = ""
-  for line in lines:
-    line = line.strip('\r\n\t ')  # Otherwise doesn't strip \r on Unix.
-    if len(line) > 0:
-      brace_diff = 0
-      if not COMMENT_RE.match(line):
-        (brace_diff, after) = count_braces(line)
-      if brace_diff != 0:
-        if after:
-          print " " * (basic_offset * indent) + line
-          indent += brace_diff
-        else:
-          indent += brace_diff
-          print " " * (basic_offset * indent) + line
-      else:
-        print " " * (basic_offset * indent) + line
-    else:
-      print ""
-    last_line = line
-
-
-def main():
-  if len(sys.argv) > 1:
-    data = open(sys.argv[1]).read().splitlines()
-  else:
-    data = sys.stdin.read().splitlines()
-  # Split up the double braces.
-  lines = split_double_braces(data)
-
-  # Indent and print the output.
-  prettyprint_input(lines)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/gyp/tools/pretty_sln.py b/tools/gyp/tools/pretty_sln.py
deleted file mode 100755
index ca8cf4a..0000000
--- a/tools/gyp/tools/pretty_sln.py
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Prints the information in a sln file in a diffable way.
-
-   It first outputs each projects in alphabetical order with their
-   dependencies.
-
-   Then it outputs a possible build order.
-"""
-
-__author__ = 'nsylvain (Nicolas Sylvain)'
-
-import os
-import re
-import sys
-import pretty_vcproj
-
-def BuildProject(project, built, projects, deps):
-  # if all dependencies are done, we can build it, otherwise we try to build the
-  # dependency.
-  # This is not infinite-recursion proof.
-  for dep in deps[project]:
-    if dep not in built:
-      BuildProject(dep, built, projects, deps)
-  print project
-  built.append(project)
-
-def ParseSolution(solution_file):
-  # All projects, their clsid and paths.
-  projects = dict()
-
-  # A list of dependencies associated with a project.
-  dependencies = dict()
-
-  # Regular expressions that matches the SLN format.
-  # The first line of a project definition.
-  begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
-                             r'}"\) = "(.*)", "(.*)", "(.*)"$')
-  # The last line of a project definition.
-  end_project = re.compile('^EndProject$')
-  # The first line of a dependency list.
-  begin_dep = re.compile(
-      r'ProjectSection\(ProjectDependencies\) = postProject$')
-  # The last line of a dependency list.
-  end_dep = re.compile('EndProjectSection$')
-  # A line describing a dependency.
-  dep_line = re.compile(' *({.*}) = ({.*})$')
-
-  in_deps = False
-  solution = open(solution_file)
-  for line in solution:
-    results = begin_project.search(line)
-    if results:
-      # Hack to remove icu because the diff is too different.
-      if results.group(1).find('icu') != -1:
-        continue
-      # We remove "_gyp" from the names because it helps to diff them.
-      current_project = results.group(1).replace('_gyp', '')
-      projects[current_project] = [results.group(2).replace('_gyp', ''),
-                                   results.group(3),
-                                   results.group(2)]
-      dependencies[current_project] = []
-      continue
-
-    results = end_project.search(line)
-    if results:
-      current_project = None
-      continue
-
-    results = begin_dep.search(line)
-    if results:
-      in_deps = True
-      continue
-
-    results = end_dep.search(line)
-    if results:
-      in_deps = False
-      continue
-
-    results = dep_line.search(line)
-    if results and in_deps and current_project:
-      dependencies[current_project].append(results.group(1))
-      continue
-
-  # Change all dependencies clsid to name instead.
-  for project in dependencies:
-    # For each dependencies in this project
-    new_dep_array = []
-    for dep in dependencies[project]:
-      # Look for the project name matching this cldis
-      for project_info in projects:
-        if projects[project_info][1] == dep:
-          new_dep_array.append(project_info)
-    dependencies[project] = sorted(new_dep_array)
-
-  return (projects, dependencies)
-
-def PrintDependencies(projects, deps):
-  print "---------------------------------------"
-  print "Dependencies for all projects"
-  print "---------------------------------------"
-  print "--                                   --"
-
-  for (project, dep_list) in sorted(deps.items()):
-    print "Project : %s" % project
-    print "Path : %s" % projects[project][0]
-    if dep_list:
-      for dep in dep_list:
-        print "  - %s" % dep
-    print ""
-
-  print "--                                   --"
-
-def PrintBuildOrder(projects, deps):
-  print "---------------------------------------"
-  print "Build order                            "
-  print "---------------------------------------"
-  print "--                                   --"
-
-  built = []
-  for (project, _) in sorted(deps.items()):
-    if project not in built:
-      BuildProject(project, built, projects, deps)
-
-  print "--                                   --"
-
-def PrintVCProj(projects):
-
-  for project in projects:
-    print "-------------------------------------"
-    print "-------------------------------------"
-    print project
-    print project
-    print project
-    print "-------------------------------------"
-    print "-------------------------------------"
-
-    project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
-                                                projects[project][2]))
-
-    pretty = pretty_vcproj
-    argv = [ '',
-             project_path,
-             '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
-           ]
-    argv.extend(sys.argv[3:])
-    pretty.main(argv)
-
-def main():
-  # check if we have exactly 1 parameter.
-  if len(sys.argv) < 2:
-    print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
-    return 1
-
-  (projects, deps) = ParseSolution(sys.argv[1])
-  PrintDependencies(projects, deps)
-  PrintBuildOrder(projects, deps)
-
-  if '--recursive' in sys.argv:
-    PrintVCProj(projects)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/gyp/tools/pretty_vcproj.py b/tools/gyp/tools/pretty_vcproj.py
deleted file mode 100755
index 6099bd7..0000000
--- a/tools/gyp/tools/pretty_vcproj.py
+++ /dev/null
@@ -1,329 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Make the format of a vcproj really pretty.
-
-   This script normalize and sort an xml. It also fetches all the properties
-   inside linked vsprops and include them explicitly in the vcproj.
-
-   It outputs the resulting xml to stdout.
-"""
-
-__author__ = 'nsylvain (Nicolas Sylvain)'
-
-import os
-import sys
-
-from xml.dom.minidom import parse
-from xml.dom.minidom import Node
-
-REPLACEMENTS = dict()
-ARGUMENTS = None
-
-
-class CmpTuple(object):
-  """Compare function between 2 tuple."""
-  def __call__(self, x, y):
-    return cmp(x[0], y[0])
-
-
-class CmpNode(object):
-  """Compare function between 2 xml nodes."""
-
-  def __call__(self, x, y):
-    def get_string(node):
-      node_string = "node"
-      node_string += node.nodeName
-      if node.nodeValue:
-        node_string += node.nodeValue
-
-      if node.attributes:
-        # We first sort by name, if present.
-        node_string += node.getAttribute("Name")
-
-        all_nodes = []
-        for (name, value) in node.attributes.items():
-          all_nodes.append((name, value))
-
-        all_nodes.sort(CmpTuple())
-        for (name, value) in all_nodes:
-          node_string += name
-          node_string += value
-
-      return node_string
-
-    return cmp(get_string(x), get_string(y))
-
-
-def PrettyPrintNode(node, indent=0):
-  if node.nodeType == Node.TEXT_NODE:
-    if node.data.strip():
-      print '%s%s' % (' '*indent, node.data.strip())
-    return
-
-  if node.childNodes:
-    node.normalize()
-  # Get the number of attributes
-  attr_count = 0
-  if node.attributes:
-    attr_count = node.attributes.length
-
-  # Print the main tag
-  if attr_count == 0:
-    print '%s<%s>' % (' '*indent, node.nodeName)
-  else:
-    print '%s<%s' % (' '*indent, node.nodeName)
-
-    all_attributes = []
-    for (name, value) in node.attributes.items():
-      all_attributes.append((name, value))
-      all_attributes.sort(CmpTuple())
-    for (name, value) in all_attributes:
-      print '%s  %s="%s"' % (' '*indent, name, value)
-    print '%s>' % (' '*indent)
-  if node.nodeValue:
-    print '%s  %s' % (' '*indent, node.nodeValue)
-
-  for sub_node in node.childNodes:
-    PrettyPrintNode(sub_node, indent=indent+2)
-  print '%s</%s>' % (' '*indent, node.nodeName)
-
-
-def FlattenFilter(node):
-  """Returns a list of all the node and sub nodes."""
-  node_list = []
-
-  if (node.attributes and
-      node.getAttribute('Name') == '_excluded_files'):
-      # We don't add the "_excluded_files" filter.
-    return []
-
-  for current in node.childNodes:
-    if current.nodeName == 'Filter':
-      node_list.extend(FlattenFilter(current))
-    else:
-      node_list.append(current)
-
-  return node_list
-
-
-def FixFilenames(filenames, current_directory):
-  new_list = []
-  for filename in filenames:
-    if filename:
-      for key in REPLACEMENTS:
-        filename = filename.replace(key, REPLACEMENTS[key])
-      os.chdir(current_directory)
-      filename = filename.strip('"\' ')
-      if filename.startswith('$'):
-        new_list.append(filename)
-      else:
-        new_list.append(os.path.abspath(filename))
-  return new_list
-
-
-def AbsoluteNode(node):
-  """Makes all the properties we know about in this node absolute."""
-  if node.attributes:
-    for (name, value) in node.attributes.items():
-      if name in ['InheritedPropertySheets', 'RelativePath',
-                  'AdditionalIncludeDirectories',
-                  'IntermediateDirectory', 'OutputDirectory',
-                  'AdditionalLibraryDirectories']:
-        # We want to fix up these paths
-        path_list = value.split(';')
-        new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
-        node.setAttribute(name, ';'.join(new_list))
-      if not value:
-        node.removeAttribute(name)
-
-
-def CleanupVcproj(node):
-  """For each sub node, we call recursively this function."""
-  for sub_node in node.childNodes:
-    AbsoluteNode(sub_node)
-    CleanupVcproj(sub_node)
-
-  # Normalize the node, and remove all extranous whitespaces.
-  for sub_node in node.childNodes:
-    if sub_node.nodeType == Node.TEXT_NODE:
-      sub_node.data = sub_node.data.replace("\r", "")
-      sub_node.data = sub_node.data.replace("\n", "")
-      sub_node.data = sub_node.data.rstrip()
-
-  # Fix all the semicolon separated attributes to be sorted, and we also
-  # remove the dups.
-  if node.attributes:
-    for (name, value) in node.attributes.items():
-      sorted_list = sorted(value.split(';'))
-      unique_list = []
-      for i in sorted_list:
-        if not unique_list.count(i):
-          unique_list.append(i)
-      node.setAttribute(name, ';'.join(unique_list))
-      if not value:
-        node.removeAttribute(name)
-
-  if node.childNodes:
-    node.normalize()
-
-  # For each node, take a copy, and remove it from the list.
-  node_array = []
-  while node.childNodes and node.childNodes[0]:
-    # Take a copy of the node and remove it from the list.
-    current = node.childNodes[0]
-    node.removeChild(current)
-
-    # If the child is a filter, we want to append all its children
-    # to this same list.
-    if current.nodeName == 'Filter':
-      node_array.extend(FlattenFilter(current))
-    else:
-      node_array.append(current)
-
-
-  # Sort the list.
-  node_array.sort(CmpNode())
-
-  # Insert the nodes in the correct order.
-  for new_node in node_array:
-    # But don't append empty tool node.
-    if new_node.nodeName == 'Tool':
-      if new_node.attributes and new_node.attributes.length == 1:
-        # This one was empty.
-        continue
-    if new_node.nodeName == 'UserMacro':
-      continue
-    node.appendChild(new_node)
-
-
-def GetConfiguationNodes(vcproj):
-  #TODO(nsylvain): Find a better way to navigate the xml.
-  nodes = []
-  for node in vcproj.childNodes:
-    if node.nodeName == "Configurations":
-      for sub_node in node.childNodes:
-        if sub_node.nodeName == "Configuration":
-          nodes.append(sub_node)
-
-  return nodes
-
-
-def GetChildrenVsprops(filename):
-  dom = parse(filename)
-  if dom.documentElement.attributes:
-    vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
-    return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
-  return []
-
-def SeekToNode(node1, child2):
-  # A text node does not have properties.
-  if child2.nodeType == Node.TEXT_NODE:
-    return None
-
-  # Get the name of the current node.
-  current_name = child2.getAttribute("Name")
-  if not current_name:
-    # There is no name. We don't know how to merge.
-    return None
-
-  # Look through all the nodes to find a match.
-  for sub_node in node1.childNodes:
-    if sub_node.nodeName == child2.nodeName:
-      name = sub_node.getAttribute("Name")
-      if name == current_name:
-        return sub_node
-
-  # No match. We give up.
-  return None
-
-
-def MergeAttributes(node1, node2):
-  # No attributes to merge?
-  if not node2.attributes:
-    return
-
-  for (name, value2) in node2.attributes.items():
-    # Don't merge the 'Name' attribute.
-    if name == 'Name':
-      continue
-    value1 = node1.getAttribute(name)
-    if value1:
-      # The attribute exist in the main node. If it's equal, we leave it
-      # untouched, otherwise we concatenate it.
-      if value1 != value2:
-        node1.setAttribute(name, ';'.join([value1, value2]))
-    else:
-      # The attribute does nto exist in the main node. We append this one.
-      node1.setAttribute(name, value2)
-
-    # If the attribute was a property sheet attributes, we remove it, since
-    # they are useless.
-    if name == 'InheritedPropertySheets':
-      node1.removeAttribute(name)
-
-
-def MergeProperties(node1, node2):
-  MergeAttributes(node1, node2)
-  for child2 in node2.childNodes:
-    child1 = SeekToNode(node1, child2)
-    if child1:
-      MergeProperties(child1, child2)
-    else:
-      node1.appendChild(child2.cloneNode(True))
-
-
-def main(argv):
-  """Main function of this vcproj prettifier."""
-  global ARGUMENTS
-  ARGUMENTS = argv
-
-  # check if we have exactly 1 parameter.
-  if len(argv) < 2:
-    print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
-           '[key2=value2]' % argv[0])
-    return 1
-
-  # Parse the keys
-  for i in range(2, len(argv)):
-    (key, value) = argv[i].split('=')
-    REPLACEMENTS[key] = value
-
-  # Open the vcproj and parse the xml.
-  dom = parse(argv[1])
-
-  # First thing we need to do is find the Configuration Node and merge them
-  # with the vsprops they include.
-  for configuration_node in GetConfiguationNodes(dom.documentElement):
-    # Get the property sheets associated with this configuration.
-    vsprops = configuration_node.getAttribute('InheritedPropertySheets')
-
-    # Fix the filenames to be absolute.
-    vsprops_list = FixFilenames(vsprops.strip().split(';'),
-                                os.path.dirname(argv[1]))
-
-    # Extend the list of vsprops with all vsprops contained in the current
-    # vsprops.
-    for current_vsprops in vsprops_list:
-      vsprops_list.extend(GetChildrenVsprops(current_vsprops))
-
-    # Now that we have all the vsprops, we need to merge them.
-    for current_vsprops in vsprops_list:
-      MergeProperties(configuration_node,
-                      parse(current_vsprops).documentElement)
-
-  # Now that everything is merged, we need to cleanup the xml.
-  CleanupVcproj(dom.documentElement)
-
-  # Finally, we use the prett xml function to print the vcproj back to the
-  # user.
-  #print dom.toprettyxml(newl="\n")
-  PrettyPrintNode(dom.documentElement)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv))
diff --git a/tools/mb/OWNERS b/tools/mb/OWNERS
new file mode 100644
index 0000000..de5efcb
--- /dev/null
+++ b/tools/mb/OWNERS
@@ -0,0 +1,3 @@
+brettw@chromium.org
+dpranke@chromium.org
+machenbach@chromium.org
diff --git a/tools/mb/PRESUBMIT.py b/tools/mb/PRESUBMIT.py
new file mode 100644
index 0000000..6f5307c
--- /dev/null
+++ b/tools/mb/PRESUBMIT.py
@@ -0,0 +1,41 @@
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+def _CommonChecks(input_api, output_api):
+  results = []
+
+  # Run Pylint over the files in the directory.
+  pylint_checks = input_api.canned_checks.GetPylint(input_api, output_api)
+  results.extend(input_api.RunTests(pylint_checks))
+
+  # Run the MB unittests.
+  results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+      input_api, output_api, '.', [ r'^.+_unittest\.py$']))
+
+  # Validate the format of the mb_config.pyl file.
+  cmd = [input_api.python_executable, 'mb.py', 'validate']
+  kwargs = {'cwd': input_api.PresubmitLocalPath()}
+  results.extend(input_api.RunTests([
+      input_api.Command(name='mb_validate',
+                        cmd=cmd, kwargs=kwargs,
+                        message=output_api.PresubmitError)]))
+
+  results.extend(
+      input_api.canned_checks.CheckLongLines(
+          input_api,
+          output_api,
+          maxlen=80,
+          source_file_filter=lambda x: 'mb_config.pyl' in x.LocalPath()))
+
+  return results
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return _CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return _CommonChecks(input_api, output_api)
diff --git a/tools/mb/README.md b/tools/mb/README.md
new file mode 100644
index 0000000..4e73a8e
--- /dev/null
+++ b/tools/mb/README.md
@@ -0,0 +1,22 @@
+# MB - The Meta-Build wrapper
+
+MB is a simple wrapper intended to provide a uniform interface to either
+GYP or GN, such that users and bots can call one script and not need to
+worry about whether a given bot is meant to use GN or GYP.
+
+It supports two main functions:
+
+1. "gen" - the main `gyp_chromium` / `gn gen` invocation that generates the
+   Ninja files needed for the build.
+
+2. "analyze" - the step that takes a list of modified files and a list of
+   desired targets and reports which targets will need to be rebuilt.
+
+We also use MB as a forcing function to collect all of the different 
+build configurations that we actually support for Chromium builds into
+one place, in `//tools/mb/mb_config.pyl`.
+
+For more information, see:
+
+* [The User Guide](docs/user_guide.md)
+* [The Design Spec](docs/design_spec.md)
diff --git a/tools/mb/docs/README.md b/tools/mb/docs/README.md
new file mode 100644
index 0000000..f29007d
--- /dev/null
+++ b/tools/mb/docs/README.md
@@ -0,0 +1,4 @@
+# The MB (Meta-Build wrapper) documentation
+
+* The [User Guide](user_guide.md)
+* The [Design Spec](design_spec.md)
diff --git a/tools/mb/docs/design_spec.md b/tools/mb/docs/design_spec.md
new file mode 100644
index 0000000..33fda80
--- /dev/null
+++ b/tools/mb/docs/design_spec.md
@@ -0,0 +1,426 @@
+# The MB (Meta-Build wrapper) design spec
+
+[TOC]
+
+## Intro
+
+MB is intended to address two major aspects of the GYP -> GN transition
+for Chromium:
+
+1. "bot toggling" - make it so that we can easily flip a given bot
+   back and forth between GN and GYP.
+
+2. "bot configuration" - provide a single source of truth for all of
+   the different configurations (os/arch/`gyp_define` combinations) of
+   Chromium that are supported.
+
+MB must handle at least the `gen` and `analyze` steps on the bots, i.e.,
+we need to wrap both the `gyp_chromium` invocation to generate the
+Ninja files, and the `analyze` step that takes a list of modified files
+and a list of targets to build and returns which targets are affected by
+the files.
+
+For more information on how to actually use MB, see
+[the user guide](user_guide.md).
+
+## Design
+
+MB is intended to be as simple as possible, and to defer as much work as
+possible to GN or GYP. It should live as a very simple Python wrapper
+that offers little in the way of surprises.
+
+### Command line
+
+It is structured as a single binary that supports a list of subcommands:
+
+* `mb gen -c linux_rel_bot //out/Release`
+* `mb analyze -m tryserver.chromium.linux -b linux_rel /tmp/input.json /tmp/output.json`
+
+### Configurations
+
+`mb` will first look for a bot config file in a set of different locations
+(initially just in //ios/build/bots). Bot config files are JSON files that
+contain keys for 'GYP_DEFINES' (a list of strings that will be joined together
+with spaces and passed to GYP, or a dict that will be similarly converted),
+'gn_args' (a list of strings that will be joined together), and an
+'mb_type' field that says whether to use GN or GYP. Bot config files
+require the full list of settings to be given explicitly.
+
+If no matching bot config file is found, `mb` looks in the
+`//tools/mb/mb_config.pyl` config file to determine whether to use GYP or GN
+for a particular build directory, and what set of flags (`GYP_DEFINES` or `gn
+args`) to use.
+
+A config can either be specified directly (useful for testing) or by specifying
+the master name and builder name (useful on the bots so that they do not need
+to specify a config directly and can be hidden from the details).
+
+See the [user guide](user_guide.md#mb_config.pyl) for details.
+
+### Handling the analyze step
+
+The interface to `mb analyze` is described in the
+[user\_guide](user_guide.md#mb_analyze).
+
+The way analyze works can be subtle and complicated (see below).
+
+Since the interface basically mirrors the way the "analyze" step on the bots
+invokes `gyp_chromium` today, when the config is found to be a gyp config,
+the arguments are passed straight through.
+
+It implements the equivalent functionality in GN by calling `gn refs
+[list of files] --type=executable --all --as=output` and filtering the
+output to match the list of targets.
+
+## Analyze
+
+The goal of the `analyze` step is to speed up the cycle time of the try servers
+by only building and running the tests affected by the files in a patch, rather
+than everything that might be out of date. Doing this ends up being tricky.
+
+We start with the following requirements and observations:
+
+* In an ideal (un-resource-constrained) world, we would build and test
+  everything that a patch affected on every patch. This does not
+  necessarily mean that we would build 'all' on every patch (see below).
+
+* In the real world, however, we do not have an infinite number of machines,
+  and try jobs are not infinitely fast, so we need to balance the desire
+  to get maximum test coverage against the desire to have reasonable cycle
+  times, given the number of machines we have.
+
+* Also, since we run most try jobs against tip-of-tree Chromium, by
+  the time one job completes on the bot, new patches have probably landed,
+  rendering the build out of date.
+
+* This means that the next try job may have to do a build that is out of
+  date due to a combination of files affected by a given patch, and files
+  affected for unrelated reasons. We want to rebuild and test only the
+  targets affected by the patch, so that we don't blame or punish the
+  patch author for unrelated changes.
+
+So:
+
+1. We need a way to indicate which changed files we care about and which
+   we don't (the affected files of a patch).
+
+2. We need to know which tests we might potentially want to run, and how
+   those are mapped onto build targets. For some kinds of tests (like
+   GTest-based tests), the mapping is 1:1 - if you want to run base_unittests,
+   you need to build base_unittests. For others (like the telemetry and
+   layout tests), you might need to build several executables in order to
+   run the tests, and that mapping might best be captured by a *meta*
+   target (a GN group or a GYP 'none' target like `webkit_tests`) that
+   depends on the right list of files. Because the GN and GYP files know
+   nothing about test steps, we have to have some way of mapping back
+   and forth between test steps and build targets. That mapping
+   is *not* currently available to MB (or GN or GYP), and so we have to 
+   enough information to make it possible for the caller to do the mapping.
+
+3. We might also want to know when test targets are affected by data files
+   that aren't compiled (python scripts, or the layout tests themselves).
+   There's no good way to do this in GYP, but GN supports this.
+
+4. We also want to ensure that particular targets still compile even if they
+   are not actually tested; consider testing the installers themselves, or
+   targets that don't yet have good test coverage. We might want to use meta
+   targets for this purpose as well.
+
+5. However, for some meta targets, we don't necessarily want to rebuild the
+   meta target itself, perhaps just the dependencies of the meta target that
+   are affected by the patch. For example, if you have a meta target like
+   `blink_tests` that might depend on ten different test binaries. If a patch
+   only affects one of them (say `wtf_unittests`), you don't want to
+   build `blink_tests`, because that might actually also build the other nine
+   targets.  In other words, some meta targets are *prunable*.
+
+6. As noted above, in the ideal case we actually have enough resources and
+   things are fast enough that we can afford to build everything affected by a
+   patch, but listing every possible target explicitly would be painful. The
+   GYP and GN Ninja generators provide an 'all' target that captures (nearly,
+   see [crbug.com/503241](crbug.com/503241)) everything, but unfortunately
+   neither GN nor GYP actually represents 'all' as a meta target in the build
+   graph, so we will need to write code to handle that specially.
+
+7. In some cases, we will not be able to correctly analyze the build graph to
+   determine the impact of a patch, and need to bail out (e.g,. if you change a
+   build file itself, it may not be easy to tell how that affects the graph).
+   In that case we should simply build and run everything.
+
+The interaction between 2) and 5) means that we need to treat meta targets
+two different ways, and so we need to know which targets should be
+pruned in the sense of 5) and which targets should be returned unchanged
+so that we can map them back to the appropriate tests.
+
+So, we need three things as input:
+
+* `files`: the list of files in the patch
+* `test_targets`: the list of ninja targets which, if affected by a patch,
+  should be reported back so that we can map them back to the appropriate
+  tests to run. Any meta targets in this list should *not* be pruned.
+* `additional_compile_targets`: the list of ninja targets we wish to compile
+  *in addition to* the list in `test_targets`. Any meta targets
+  present in this list should be pruned (we don't need to return the
+  meta targets because they aren't mapped back to tests, and we don't want
+  to build them because we might build too much).
+
+We can then return two lists as output:
+
+* `compile_targets`, which is a list of pruned targets to be
+  passed to Ninja to build. It is acceptable to replace a list of
+  pruned targets by a meta target if it turns out that all of the
+  dependendencies of the target are affected by the patch (i.e.,
+  all ten binaries that blink_tests depends on), but doing so is
+  not required.
+* `test_targets`, which is a list of unpruned targets to be mapped
+  back to determine which tests to run.
+
+There may be substantial overlap between the two lists, but there is
+no guarantee that one is a subset of the other and the two cannot be
+used interchangeably or merged together without losing information and
+causing the wrong thing to happen.
+
+The implementation is responsible for recognizing 'all' as a magic string
+and mapping it onto the list of all root nodes in the build graph.
+
+There may be files listed in the input that don't actually exist in the build
+graph: this could be either the result of an error (the file should be in the
+build graph, but isn't), or perfectly fine (the file doesn't affect the build
+graph at all). We can't tell these two apart, so we should ignore missing
+files.
+
+There may be targets listed in the input that don't exist in the build
+graph; unlike missing files, this can only indicate a configuration error,
+and so we should return which targets are missing so the caller can
+treat this as an error, if so desired.
+
+Any of the three inputs may be an empty list:
+
+* It normally doesn't make sense to call analyze at all if no files
+  were modified, but in rare cases we can hit a race where we try to
+  test a patch after it has already been committed, in which case
+  the list of modified files is empty. We should return 'no dependency'
+  in that case.
+
+* Passing an empty list for one or the other of test_targets and
+  additional_compile_targets is perfectly sensible: in the former case,
+  it can indicate that you don't want to run any tests, and in the latter,
+  it can indicate that you don't want to do build anything else in
+  addition to the test targets.
+
+* It doesn't make sense to call analyze if you don't want to compile
+  anything at all, so passing [] for both test_targets and 
+  additional_compile_targets should probably return an error.
+
+In the output case, an empty list indicates that there was nothing to
+build, or that there were no affected test targets as appropriate.
+
+Note that passing no arguments to Ninja is equivalent to passing
+`all` to Ninja (at least given how GN and GYP work); however, we
+don't want to take advantage of this in most cases because we don't
+actually want to build every out of date target, only the targets
+potentially affected by the files. One could try to indicate
+to analyze that we wanted to use no arguments instead of an empty
+list, but using the existing fields for this seems fragile and/or
+confusing, and adding a new field for this seems unwarranted at this time.
+
+There is an "error" field in case something goes wrong (like the
+empty file list case, above, or an internal error in MB/GYP/GN). The
+analyze code should also return an error code to the shell if appropriate
+to indicate that the command failed.
+
+In the case where build files themselves are modified and analyze may
+not be able to determine a correct answer (point 7 above, where we return
+"Found dependency (all)"), we should also return the `test_targets` unmodified
+and return the union of `test_targets` and `additional_compile_targets` for
+`compile_targets`, to avoid confusion.
+
+### Examples
+
+Continuing the example given above, suppose we have the following build
+graph:
+
+* `blink_tests` is a meta target that depends on `webkit_unit_tests`,
+  `wtf_unittests`, and `webkit_tests` and represents all of the targets
+  needed to fully test Blink. Each of those is a separate test step.
+* `webkit_tests` is also a meta target; it depends on `content_shell`
+  and `image_diff`.
+* `base_unittests` is a separate test binary.
+* `wtf_unittests` depends on `Assertions.cpp` and `AssertionsTest.cpp`.
+* `webkit_unit_tests` depends on `WebNode.cpp` and `WebNodeTest.cpp`.
+* `content_shell` depends on `WebNode.cpp` and `Assertions.cpp`.
+* `base_unittests` depends on `logging.cc` and `logging_unittest.cc`.
+
+#### Example 1
+
+We wish to run 'wtf_unittests' and 'webkit_tests' on a bot, but not
+compile any additional targets.
+
+If a patch touches WebNode.cpp, then analyze gets as input:
+
+    {
+      "files": ["WebNode.cpp"],
+      "test_targets": ["wtf_unittests", "webkit_tests"],
+      "additional_compile_targets": []
+    }
+
+and should return as output:
+
+    {
+      "status": "Found dependency",
+      "compile_targets": ["webkit_unit_tests"],
+      "test_targets": ["webkit_tests"]
+    }
+
+Note how `webkit_tests` was pruned in compile_targets but not in test_targets.
+
+#### Example 2
+
+Using the same patch as Example 1, assume we wish to run only `wtf_unittests`,
+but additionally build everything needed to test Blink (`blink_tests`):
+
+We pass as input:
+
+    {
+      "files": ["WebNode.cpp"],
+      "test_targets": ["wtf_unittests"],
+      "additional_compile_targets": ["blink_tests"]
+    }
+
+And should get as output:
+
+    {
+      "status": "Found dependency",
+      "compile_targets": ["webkit_unit_tests"],
+      "test_targets": []
+    }
+
+Here `blink_tests` was pruned in the output compile_targets, and
+test_targets was empty, since blink_tests was not listed in the input
+test_targets.
+
+#### Example 3
+
+Build everything, but do not run any tests.
+
+Input:
+
+    {
+      "files": ["WebNode.cpp"],
+      "test_targets": [],
+      "additional_compile_targets": ["all"]
+    }
+
+Output:
+
+    {
+      "status": "Found dependency",
+      "compile_targets": ["webkit_unit_tests", "content_shell"],
+      "test_targets": []
+    }
+
+#### Example 4
+
+Same as Example 2, but a build file was modified instead of a source file.
+
+Input:
+
+    {
+      "files": ["BUILD.gn"],
+      "test_targets": ["wtf_unittests"],
+      "additional_compile_targets": ["blink_tests"]
+    }
+
+Output:
+
+    {
+      "status": "Found dependency (all)",
+      "compile_targets": ["webkit_unit_tests", "wtf_unittests"],
+      "test_targets": ["wtf_unittests"]
+    }
+
+test_targets was returned unchanged, compile_targets was pruned.
+
+## Random Requirements and Rationale
+
+This section is collection of semi-organized notes on why MB is the way
+it is ...
+
+### in-tree or out-of-tree
+
+The first issue is whether or not this should exist as a script in
+Chromium at all; an alternative would be to simply change the bot
+configurations to know whether to use GYP or GN, and which flags to
+pass.
+
+That would certainly work, but experience over the past two years
+suggests a few things:
+
+  * we should push as much logic as we can into the source repositories
+    so that they can be versioned and changed atomically with changes to
+    the product code; having to coordinate changes between src/ and
+    build/ is at best annoying and can lead to weird errors.
+  * the infra team would really like to move to providing
+    product-independent services (i.e., not have to do one thing for
+    Chromium, another for NaCl, a third for V8, etc.).
+  * we found that during the SVN->GIT migration the ability to flip bot
+    configurations between the two via changes to a file in chromium
+    was very useful.
+
+All of this suggests that the interface between bots and Chromium should
+be a simple one, hiding as much of the chromium logic as possible.
+
+### Why not have MB be smarter about de-duping flags?
+
+This just adds complexity to the MB implementation, and duplicates logic
+that GYP and GN already have to support anyway; in particular, it might
+require MB to know how to parse GYP and GN values. The belief is that
+if MB does *not* do this, it will lead to fewer surprises.
+
+It will not be hard to change this if need be.
+
+### Integration w/ gclient runhooks
+
+On the bots, we will disable `gyp_chromium` as part of runhooks (using
+`GYP_CHROMIUM_NO_ACTION=1`), so that mb shows up as a separate step.
+
+At the moment, we expect most developers to either continue to use
+`gyp_chromium` in runhooks or to disable at as above if they have no
+use for GYP at all. We may revisit how this works once we encourage more
+people to use GN full-time (i.e., we might take `gyp_chromium` out of
+runhooks altogether).
+
+### Config per flag set or config per (os/arch/flag set)?
+
+Currently, mb_config.pyl does not specify the host_os, target_os, host_cpu, or
+target_cpu values for every config that Chromium runs on, it only specifies
+them for when the values need to be explicitly set on the command line.
+
+Instead, we have one config per unique combination of flags only.
+
+In other words, rather than having `linux_rel_bot`, `win_rel_bot`, and
+`mac_rel_bot`, we just have `rel_bot`.
+
+This design allows us to determine easily all of the different sets
+of flags that we need to support, but *not* which flags are used on which
+host/target combinations.
+
+It may be that we should really track the latter. Doing so is just a
+config file change, however.
+
+### Non-goals
+
+* MB is not intended to replace direct invocation of GN or GYP for
+  complicated build scenarios (aka ChromeOS), where multiple flags need
+  to be set to user-defined paths for specific toolchains (e.g., where
+  ChromeOS needs to specify specific board types and compilers).
+
+* MB is not intended at this time to be something developers use frequently,
+  or to add a lot of features to. We hope to be able to get rid of it once
+  the GYP->GN migration is done, and so we should not add things for
+  developers that can't easily be added to GN itself.
+
+* MB is not intended to replace the
+  [CR tool](https://code.google.com/p/chromium/wiki/CRUserManual). Not
+  only is it only intended to replace the gyp\_chromium part of `'gclient
+  runhooks'`, it is not really meant as a developer-facing tool.
diff --git a/tools/mb/docs/user_guide.md b/tools/mb/docs/user_guide.md
new file mode 100644
index 0000000..9817553
--- /dev/null
+++ b/tools/mb/docs/user_guide.md
@@ -0,0 +1,297 @@
+# The MB (Meta-Build wrapper) user guide
+
+[TOC]
+
+## Introduction
+
+`mb` is a simple python wrapper around the GYP and GN meta-build tools to
+be used as part of the GYP->GN migration.
+
+It is intended to be used by bots to make it easier to manage the configuration
+each bot builds (i.e., the configurations can be changed from chromium
+commits), and to consolidate the list of all of the various configurations
+that Chromium is built in.
+
+Ideally this tool will no longer be needed after the migration is complete.
+
+For more discussion of MB, see also [the design spec](design_spec.md).
+
+## MB subcommands
+
+### `mb analyze`
+
+`mb analyze` is reponsible for determining what targets are affected by
+a list of files (e.g., the list of files in a patch on a trybot):
+
+```
+mb analyze -c chromium_linux_rel //out/Release input.json output.json
+```
+
+Either the `-c/--config` flag or the `-m/--master` and `-b/--builder` flags
+must be specified so that `mb` can figure out which config to use.
+
+The first positional argument must be a GN-style "source-absolute" path
+to the build directory.
+
+The second positional argument is a (normal) path to a JSON file containing
+a single object with the following fields:
+
+  * `files`: an array of the modified filenames to check (as paths relative to
+    the checkout root).
+  * `test_targets`: an array of (ninja) build targets that needed to run the
+    tests we wish to run. An empty array will be treated as if there are
+    no tests that will be run.
+  * `additional_compile_targets`: an array of (ninja) build targets that
+    reflect the stuff we might want to build *in addition to* the list
+    passed in `test_targets`. Targets in this list will be treated 
+    specially, in the following way: if a given target is a "meta"
+    (GN: group, GYP: none) target like 'blink_tests' or
+    'chromium_builder_tests', or even the ninja-specific 'all' target, 
+    then only the *dependencies* of the target that are affected by
+    the modified files will be rebuilt (not the target itself, which
+    might also cause unaffected dependencies to be rebuilt). An empty
+    list will be treated as if there are no additional targets to build.
+    Empty lists for both `test_targets` and `additional_compile_targets`
+    would cause no work to be done, so will result in an error.
+  * `targets`: a legacy field that resembled a union of `compile_targets`
+    and `test_targets`. Support for this field will be removed once the
+    bots have been updated to use compile_targets and test_targets instead.
+
+The third positional argument is a (normal) path to where mb will write
+the result, also as a JSON object. This object may contain the following
+fields:
+
+  * `error`: this should only be present if something failed.
+  * `compile_targets`: the list of ninja targets that should be passed
+    directly to the corresponding ninja / compile.py invocation. This
+    list may contain entries that are *not* listed in the input (see
+    the description of `additional_compile_targets` above and 
+    [design_spec.md](the design spec) for how this works).
+  * `invalid_targets`: a list of any targets that were passed in
+    either of the input lists that weren't actually found in the graph.
+  * `test_targets`: the subset of the input `test_targets` that are
+    potentially out of date, indicating that the matching test steps
+    should be re-run.
+  * `targets`: a legacy field that indicates the subset of the input `targets`
+    that depend on the input `files`.
+  * `build_targets`: a legacy field that indicates the minimal subset of
+    targets needed to build all of `targets` that were affected.
+  * `status`: a field containing one of three strings:
+
+    * `"Found dependency"` (build the `compile_targets`)
+    * `"No dependency"` (i.e., no build needed)
+    * `"Found dependency (all)"` (`test_targets` is returned as-is;
+       `compile_targets` should contain the union of `test_targets` and
+       `additional_compile_targets`. In this case the targets do not
+       need to be pruned).
+
+See [design_spec.md](the design spec) for more details and examples; the
+differences can be subtle.  We won't even go into how the `targets` and
+`build_targets` differ from each other or from `compile_targets` and
+`test_targets`.
+
+The `-b/--builder`, `-c/--config`, `-f/--config-file`, `-m/--master`,
+`-q/--quiet`, and `-v/--verbose` flags work as documented for `mb gen`.
+
+### `mb audit`
+
+`mb audit` is used to track the progress of the GYP->GN migration. You can
+use it to check a single master, or all the masters we care about. See
+`mb help audit` for more details (most people are not expected to care about
+this).
+
+### `mb gen`
+
+`mb gen` is responsible for generating the Ninja files by invoking either GYP
+or GN as appropriate. It takes arguments to specify a build config and
+a directory, then runs GYP or GN as appropriate:
+
+```
+% mb gen -m tryserver.chromium.linux -b linux_rel //out/Release
+% mb gen -c linux_rel_trybot //out/Release
+```
+
+Either the `-c/--config` flag or the `-m/--master` and `-b/--builder` flags
+must be specified so that `mb` can figure out which config to use. The
+`--phase` flag must also be used with builders that have multiple
+build/compile steps (and only with those builders).
+
+By default, MB will look for a bot config file under `//ios/build/bots` (see
+[design_spec.md](the design spec) for details of how the bot config files
+work). If no matching one is found, will then look in
+`//tools/mb/mb_config.pyl` to look up the config information, but you can
+specify a custom config file using the `-f/--config-file` flag.
+
+The path must be a GN-style "source-absolute" path (as above).
+
+You can pass the `-n/--dryrun` flag to mb gen to see what will happen without
+actually writing anything.
+
+You can pass the `-q/--quiet` flag to get mb to be silent unless there is an
+error, and pass the `-v/--verbose` flag to get mb to log all of the files
+that are read and written, and all the commands that are run.
+
+If the build config will use the Goma distributed-build system, you can pass
+the path to your Goma client in the `-g/--goma-dir` flag, and it will be
+incorporated into the appropriate flags for GYP or GN as needed.
+
+If gen ends up using GYP, the path must have a valid GYP configuration as the
+last component of the path (i.e., specify `//out/Release_x64`, not `//out`).
+The gyp script defaults to `//build/gyp_chromium`, but can be overridden with
+the `--gyp-script` flag, e.g. `--gyp-script=gypfiles/gyp_v8`.
+
+### `mb help`
+
+Produces help output on the other subcommands
+
+### `mb lookup`
+
+Prints what command will be run by `mb gen` (like `mb gen -n` but does
+not require you to specify a path).
+
+The `-b/--builder`, `-c/--config`, `-f/--config-file`, `-m/--master`,
+`--phase`, `-q/--quiet`, and `-v/--verbose` flags work as documented for
+`mb gen`.
+
+### `mb validate`
+
+Does internal checking to make sure the config file is syntactically
+valid and that all of the entries are used properly. It does not validate
+that the flags make sense, or that the builder names are legal or
+comprehensive, but it does complain about configs and mixins that aren't
+used.
+
+The `-f/--config-file` and `-q/--quiet` flags work as documented for
+`mb gen`.
+
+This is mostly useful as a presubmit check and for verifying changes to
+the config file.
+
+## Isolates and Swarming
+
+`mb gen` is also responsible for generating the `.isolate` and
+`.isolated.gen.json` files needed to run test executables through swarming
+in a GN build (in a GYP build, this is done as part of the compile step).
+
+If you wish to generate the isolate files, pass `mb gen` the
+`--swarming-targets-file` command line argument; that arg should be a path
+to a file containing a list of ninja build targets to compute the runtime
+dependencies for (on Windows, use the ninja target name, not the file, so
+`base_unittests`, not `base_unittests.exe`).
+
+MB will take this file, translate each build target to the matching GN
+label (e.g., `base_unittests` -> `//base:base_unittests`, write that list
+to a file called `runtime_deps` in the build directory, and pass that to
+`gn gen $BUILD ... --runtime-deps-list-file=$BUILD/runtime_deps`.
+
+Once GN has computed the lists of runtime dependencies, MB will then
+look up the command line for each target (currently this is hard-coded
+in [mb.py](https://code.google.com/p/chromium/codesearch?q=mb.py#chromium/src/tools/mb/mb.py&q=mb.py%20GetIsolateCommand&sq=package:chromium&type=cs)), and write out the
+matching `.isolate` and `.isolated.gen.json` files.
+
+## The `mb_config.pyl` config file
+
+The `mb_config.pyl` config file is intended to enumerate all of the
+supported build configurations for Chromium. Generally speaking, you
+should never need to (or want to) build a configuration that isn't
+listed here, and so by using the configs in this file you can avoid
+having to juggle long lists of GYP_DEFINES and gn args by hand.
+
+`mb_config.pyl` is structured as a file containing a single PYthon Literal
+expression: a dictionary with three main keys, `masters`, `configs` and
+`mixins`.
+
+The `masters` key contains a nested series of dicts containing mappings
+of master -> builder -> config . This allows us to isolate the buildbot
+recipes from the actual details of the configs. The config should either
+be a single string value representing a key in the `configs` dictionary,
+or a list of strings, each of which is a key in the `configs` dictionary;
+the latter case is for builders that do multiple compiles with different
+arguments in a single build, and must *only* be used for such builders
+(where a --phase argument must be supplied in each lookup or gen call).
+
+The `configs` key points to a dictionary of named build configurations.
+
+There should be an key in this dict for every supported configuration
+of Chromium, meaning every configuration we have a bot for, and every
+configuration commonly used by develpers but that we may not have a bot
+for.
+
+The value of each key is a list of "mixins" that will define what that
+build_config does. Each item in the list must be an entry in the dictionary
+value of the `mixins` key.
+
+Each mixin value is itself a dictionary that contains one or more of the
+following keys:
+
+  * `gyp_crosscompile`: a boolean; if true, GYP_CROSSCOMPILE=1 is set in
+    the environment and passed to GYP.
+  * `gyp_defines`: a string containing a list of GYP_DEFINES.
+  * `gn_args`: a string containing a list of values passed to gn --args.
+  * `mixins`: a list of other mixins that should be included.
+  * `type`: a string with either the value `gyp` or `gn`;
+    setting this indicates which meta-build tool to use.
+
+When `mb gen` or `mb analyze` executes, it takes a config name, looks it
+up in the 'configs' dict, and then does a left-to-right expansion of the
+mixins; gyp_defines and gn_args values are concatenated, and the type values
+override each other.
+
+For example, if you had:
+
+```
+{
+  'configs`: {
+    'linux_release_trybot': ['gyp_release', 'trybot'],
+    'gn_shared_debug': None,
+  }
+  'mixins': {
+    'bot': {
+      'gyp_defines': 'use_goma=1 dcheck_always_on=0',
+      'gn_args': 'use_goma=true dcheck_always_on=false',
+    },
+    'debug': {
+      'gn_args': 'is_debug=true',
+    },
+    'gn': {'type': 'gn'},
+    'gyp_release': {
+      'mixins': ['release'],
+      'type': 'gyp',
+    },
+    'release': {
+      'gn_args': 'is_debug=false',
+    }
+    'shared': {
+      'gn_args': 'is_component_build=true',
+      'gyp_defines': 'component=shared_library',
+    },
+    'trybot': {
+      'gyp_defines': 'dcheck_always_on=1',
+      'gn_args': 'dcheck_always_on=true',
+    }
+  }
+}
+```
+
+and you ran `mb gen -c linux_release_trybot //out/Release`, it would
+translate into a call to `gyp_chromium -G Release` with `GYP_DEFINES` set to
+`"use_goma=true dcheck_always_on=false dcheck_always_on=true"`.
+
+(From that you can see that mb is intentionally dumb and does not
+attempt to de-dup the flags, it lets gyp do that).
+
+## Debugging MB
+
+By design, MB should be simple enough that very little can go wrong.
+
+The most obvious issue is that you might see different commands being
+run than you expect; running `'mb -v'` will print what it's doing and
+run the commands; `'mb -n'` will print what it will do but *not* run
+the commands.
+
+If you hit weirder things than that, add some print statements to the
+python script, send a question to gn-dev@chromium.org, or
+[file a bug](https://crbug.com/new) with the label
+'mb' and cc: dpranke@chromium.org.
+
+
diff --git a/tools/mb/mb b/tools/mb/mb
new file mode 100755
index 0000000..d3a0cdf
--- /dev/null
+++ b/tools/mb/mb
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/mb.py" "$@"
diff --git a/tools/mb/mb.bat b/tools/mb/mb.bat
new file mode 100755
index 0000000..a82770e
--- /dev/null
+++ b/tools/mb/mb.bat
@@ -0,0 +1,6 @@
+@echo off
+setlocal
+:: This is required with cygwin only.
+PATH=%~dp0;%PATH%
+set PYTHONDONTWRITEBYTECODE=1
+call python "%~dp0mb.py" %*
diff --git a/tools/mb/mb.py b/tools/mb/mb.py
new file mode 100755
index 0000000..536dc00
--- /dev/null
+++ b/tools/mb/mb.py
@@ -0,0 +1,1500 @@
+#!/usr/bin/env python
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""MB - the Meta-Build wrapper around GYP and GN
+
+MB is a wrapper script for GYP and GN that can be used to generate build files
+for sets of canned configurations and analyze them.
+"""
+
+from __future__ import print_function
+
+import argparse
+import ast
+import errno
+import json
+import os
+import pipes
+import pprint
+import re
+import shutil
+import sys
+import subprocess
+import tempfile
+import traceback
+import urllib2
+
+from collections import OrderedDict
+
+CHROMIUM_SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(
+    os.path.abspath(__file__))))
+sys.path = [os.path.join(CHROMIUM_SRC_DIR, 'build')] + sys.path
+
+import gn_helpers
+
+
+def main(args):
+  mbw = MetaBuildWrapper()
+  return mbw.Main(args)
+
+
+class MetaBuildWrapper(object):
+  def __init__(self):
+    self.chromium_src_dir = CHROMIUM_SRC_DIR
+    self.default_config = os.path.join(self.chromium_src_dir, 'infra', 'mb',
+                                       'mb_config.pyl')
+    self.executable = sys.executable
+    self.platform = sys.platform
+    self.sep = os.sep
+    self.args = argparse.Namespace()
+    self.configs = {}
+    self.masters = {}
+    self.mixins = {}
+
+  def Main(self, args):
+    self.ParseArgs(args)
+    try:
+      ret = self.args.func()
+      if ret:
+        self.DumpInputFiles()
+      return ret
+    except KeyboardInterrupt:
+      self.Print('interrupted, exiting', stream=sys.stderr)
+      return 130
+    except Exception:
+      self.DumpInputFiles()
+      s = traceback.format_exc()
+      for l in s.splitlines():
+        self.Print(l)
+      return 1
+
+  def ParseArgs(self, argv):
+    def AddCommonOptions(subp):
+      subp.add_argument('-b', '--builder',
+                        help='builder name to look up config from')
+      subp.add_argument('-m', '--master',
+                        help='master name to look up config from')
+      subp.add_argument('-c', '--config',
+                        help='configuration to analyze')
+      subp.add_argument('--phase', type=int,
+                        help=('build phase for a given build '
+                              '(int in [1, 2, ...))'))
+      subp.add_argument('-f', '--config-file', metavar='PATH',
+                        default=self.default_config,
+                        help='path to config file '
+                            '(default is //tools/mb/mb_config.pyl)')
+      subp.add_argument('-g', '--goma-dir',
+                        help='path to goma directory')
+      subp.add_argument('--gyp-script', metavar='PATH',
+                        default=self.PathJoin('build', 'gyp_chromium'),
+                        help='path to gyp script relative to project root '
+                             '(default is %(default)s)')
+      subp.add_argument('--android-version-code',
+                        help='Sets GN arg android_default_version_code and '
+                             'GYP_DEFINE app_manifest_version_code')
+      subp.add_argument('--android-version-name',
+                        help='Sets GN arg android_default_version_name and '
+                             'GYP_DEFINE app_manifest_version_name')
+      subp.add_argument('-n', '--dryrun', action='store_true',
+                        help='Do a dry run (i.e., do nothing, just print '
+                             'the commands that will run)')
+      subp.add_argument('-v', '--verbose', action='store_true',
+                        help='verbose logging')
+
+    parser = argparse.ArgumentParser(prog='mb')
+    subps = parser.add_subparsers()
+
+    subp = subps.add_parser('analyze',
+                            help='analyze whether changes to a set of files '
+                                 'will cause a set of binaries to be rebuilt.')
+    AddCommonOptions(subp)
+    subp.add_argument('path', nargs=1,
+                      help='path build was generated into.')
+    subp.add_argument('input_path', nargs=1,
+                      help='path to a file containing the input arguments '
+                           'as a JSON object.')
+    subp.add_argument('output_path', nargs=1,
+                      help='path to a file containing the output arguments '
+                           'as a JSON object.')
+    subp.set_defaults(func=self.CmdAnalyze)
+
+    subp = subps.add_parser('gen',
+                            help='generate a new set of build files')
+    AddCommonOptions(subp)
+    subp.add_argument('--swarming-targets-file',
+                      help='save runtime dependencies for targets listed '
+                           'in file.')
+    subp.add_argument('path', nargs=1,
+                      help='path to generate build into')
+    subp.set_defaults(func=self.CmdGen)
+
+    subp = subps.add_parser('isolate',
+                            help='generate the .isolate files for a given'
+                                 'binary')
+    AddCommonOptions(subp)
+    subp.add_argument('path', nargs=1,
+                      help='path build was generated into')
+    subp.add_argument('target', nargs=1,
+                      help='ninja target to generate the isolate for')
+    subp.set_defaults(func=self.CmdIsolate)
+
+    subp = subps.add_parser('lookup',
+                            help='look up the command for a given config or '
+                                 'builder')
+    AddCommonOptions(subp)
+    subp.set_defaults(func=self.CmdLookup)
+
+    subp = subps.add_parser(
+        'run',
+        help='build and run the isolated version of a '
+             'binary',
+        formatter_class=argparse.RawDescriptionHelpFormatter)
+    subp.description = (
+        'Build, isolate, and run the given binary with the command line\n'
+        'listed in the isolate. You may pass extra arguments after the\n'
+        'target; use "--" if the extra arguments need to include switches.\n'
+        '\n'
+        'Examples:\n'
+        '\n'
+        '  % tools/mb/mb.py run -m chromium.linux -b "Linux Builder" \\\n'
+        '    //out/Default content_browsertests\n'
+        '\n'
+        '  % tools/mb/mb.py run out/Default content_browsertests\n'
+        '\n'
+        '  % tools/mb/mb.py run out/Default content_browsertests -- \\\n'
+        '    --test-launcher-retry-limit=0'
+        '\n'
+    )
+
+    AddCommonOptions(subp)
+    subp.add_argument('-j', '--jobs', dest='jobs', type=int,
+                      help='Number of jobs to pass to ninja')
+    subp.add_argument('--no-build', dest='build', default=True,
+                      action='store_false',
+                      help='Do not build, just isolate and run')
+    subp.add_argument('path', nargs=1,
+                      help=('path to generate build into (or use).'
+                            ' This can be either a regular path or a '
+                            'GN-style source-relative path like '
+                            '//out/Default.'))
+    subp.add_argument('target', nargs=1,
+                      help='ninja target to build and run')
+    subp.add_argument('extra_args', nargs='*',
+                      help=('extra args to pass to the isolate to run. Use '
+                            '"--" as the first arg if you need to pass '
+                            'switches'))
+    subp.set_defaults(func=self.CmdRun)
+
+    subp = subps.add_parser('validate',
+                            help='validate the config file')
+    subp.add_argument('-f', '--config-file', metavar='PATH',
+                      default=self.default_config,
+                      help='path to config file '
+                          '(default is //infra/mb/mb_config.pyl)')
+    subp.set_defaults(func=self.CmdValidate)
+
+    subp = subps.add_parser('audit',
+                            help='Audit the config file to track progress')
+    subp.add_argument('-f', '--config-file', metavar='PATH',
+                      default=self.default_config,
+                      help='path to config file '
+                          '(default is //infra/mb/mb_config.pyl)')
+    subp.add_argument('-i', '--internal', action='store_true',
+                      help='check internal masters also')
+    subp.add_argument('-m', '--master', action='append',
+                      help='master to audit (default is all non-internal '
+                           'masters in file)')
+    subp.add_argument('-u', '--url-template', action='store',
+                      default='https://build.chromium.org/p/'
+                              '{master}/json/builders',
+                      help='URL scheme for JSON APIs to buildbot '
+                           '(default: %(default)s) ')
+    subp.add_argument('-c', '--check-compile', action='store_true',
+                      help='check whether tbd and master-only bots actually'
+                           ' do compiles')
+    subp.set_defaults(func=self.CmdAudit)
+
+    subp = subps.add_parser('help',
+                            help='Get help on a subcommand.')
+    subp.add_argument(nargs='?', action='store', dest='subcommand',
+                      help='The command to get help for.')
+    subp.set_defaults(func=self.CmdHelp)
+
+    self.args = parser.parse_args(argv)
+
+  def DumpInputFiles(self):
+
+    def DumpContentsOfFilePassedTo(arg_name, path):
+      if path and self.Exists(path):
+        self.Print("\n# To recreate the file passed to %s:" % arg_name)
+        self.Print("%% cat > %s <<EOF)" % path)
+        contents = self.ReadFile(path)
+        self.Print(contents)
+        self.Print("EOF\n%\n")
+
+    if getattr(self.args, 'input_path', None):
+      DumpContentsOfFilePassedTo(
+          'argv[0] (input_path)', self.args.input_path[0])
+    if getattr(self.args, 'swarming_targets_file', None):
+      DumpContentsOfFilePassedTo(
+          '--swarming-targets-file', self.args.swarming_targets_file)
+
+  def CmdAnalyze(self):
+    vals = self.Lookup()
+    self.ClobberIfNeeded(vals)
+    if vals['type'] == 'gn':
+      return self.RunGNAnalyze(vals)
+    else:
+      return self.RunGYPAnalyze(vals)
+
+  def CmdGen(self):
+    vals = self.Lookup()
+    self.ClobberIfNeeded(vals)
+    if vals['type'] == 'gn':
+      return self.RunGNGen(vals)
+    else:
+      return self.RunGYPGen(vals)
+
+  def CmdHelp(self):
+    if self.args.subcommand:
+      self.ParseArgs([self.args.subcommand, '--help'])
+    else:
+      self.ParseArgs(['--help'])
+
+  def CmdIsolate(self):
+    vals = self.GetConfig()
+    if not vals:
+      return 1
+
+    if vals['type'] == 'gn':
+      return self.RunGNIsolate(vals)
+    else:
+      return self.Build('%s_run' % self.args.target[0])
+
+  def CmdLookup(self):
+    vals = self.Lookup()
+    if vals['type'] == 'gn':
+      cmd = self.GNCmd('gen', '_path_')
+      gn_args = self.GNArgs(vals)
+      self.Print('\nWriting """\\\n%s""" to _path_/args.gn.\n' % gn_args)
+      env = None
+    else:
+      cmd, env = self.GYPCmd('_path_', vals)
+
+    self.PrintCmd(cmd, env)
+    return 0
+
+  def CmdRun(self):
+    vals = self.GetConfig()
+    if not vals:
+      return 1
+
+    build_dir = self.args.path[0]
+    target = self.args.target[0]
+
+    if vals['type'] == 'gn':
+      if self.args.build:
+        ret = self.Build(target)
+        if ret:
+          return ret
+      ret = self.RunGNIsolate(vals)
+      if ret:
+        return ret
+    else:
+      ret = self.Build('%s_run' % target)
+      if ret:
+        return ret
+
+    cmd = [
+        self.executable,
+        self.PathJoin('tools', 'swarming_client', 'isolate.py'),
+        'run',
+        '-s',
+        self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)),
+    ]
+    if self.args.extra_args:
+        cmd += ['--'] + self.args.extra_args
+
+    ret, _, _ = self.Run(cmd, force_verbose=False, buffer_output=False)
+
+    return ret
+
+  def CmdValidate(self, print_ok=True):
+    errs = []
+
+    # Read the file to make sure it parses.
+    self.ReadConfigFile()
+
+    # Build a list of all of the configs referenced by builders.
+    all_configs = {}
+    for master in self.masters:
+      for config in self.masters[master].values():
+        if isinstance(config, list):
+          for c in config:
+            all_configs[c] = master
+        else:
+          all_configs[config] = master
+
+    # Check that every referenced args file or config actually exists.
+    for config, loc in all_configs.items():
+      if config.startswith('//'):
+        if not self.Exists(self.ToAbsPath(config)):
+          errs.append('Unknown args file "%s" referenced from "%s".' %
+                      (config, loc))
+      elif not config in self.configs:
+        errs.append('Unknown config "%s" referenced from "%s".' %
+                    (config, loc))
+
+    # Check that every actual config is actually referenced.
+    for config in self.configs:
+      if not config in all_configs:
+        errs.append('Unused config "%s".' % config)
+
+    # Figure out the whole list of mixins, and check that every mixin
+    # listed by a config or another mixin actually exists.
+    referenced_mixins = set()
+    for config, mixins in self.configs.items():
+      for mixin in mixins:
+        if not mixin in self.mixins:
+          errs.append('Unknown mixin "%s" referenced by config "%s".' %
+                      (mixin, config))
+        referenced_mixins.add(mixin)
+
+    for mixin in self.mixins:
+      for sub_mixin in self.mixins[mixin].get('mixins', []):
+        if not sub_mixin in self.mixins:
+          errs.append('Unknown mixin "%s" referenced by mixin "%s".' %
+                      (sub_mixin, mixin))
+        referenced_mixins.add(sub_mixin)
+
+    # Check that every mixin defined is actually referenced somewhere.
+    for mixin in self.mixins:
+      if not mixin in referenced_mixins:
+        errs.append('Unreferenced mixin "%s".' % mixin)
+
+    if errs:
+      raise MBErr(('mb config file %s has problems:' % self.args.config_file) +
+                    '\n  ' + '\n  '.join(errs))
+
+    if print_ok:
+      self.Print('mb config file %s looks ok.' % self.args.config_file)
+    return 0
+
+  def CmdAudit(self):
+    """Track the progress of the GYP->GN migration on the bots."""
+
+    # First, make sure the config file is okay, but don't print anything
+    # if it is (it will throw an error if it isn't).
+    self.CmdValidate(print_ok=False)
+
+    stats = OrderedDict()
+    STAT_MASTER_ONLY = 'Master only'
+    STAT_CONFIG_ONLY = 'Config only'
+    STAT_TBD = 'Still TBD'
+    STAT_GYP = 'Still GYP'
+    STAT_DONE = 'Done (on GN)'
+    stats[STAT_MASTER_ONLY] = 0
+    stats[STAT_CONFIG_ONLY] = 0
+    stats[STAT_TBD] = 0
+    stats[STAT_GYP] = 0
+    stats[STAT_DONE] = 0
+
+    def PrintBuilders(heading, builders, notes):
+      stats.setdefault(heading, 0)
+      stats[heading] += len(builders)
+      if builders:
+        self.Print('  %s:' % heading)
+        for builder in sorted(builders):
+          self.Print('    %s%s' % (builder, notes[builder]))
+
+    self.ReadConfigFile()
+
+    masters = self.args.master or self.masters
+    for master in sorted(masters):
+      url = self.args.url_template.replace('{master}', master)
+
+      self.Print('Auditing %s' % master)
+
+      MASTERS_TO_SKIP = (
+        'client.skia',
+        'client.v8.fyi',
+        'tryserver.v8',
+      )
+      if master in MASTERS_TO_SKIP:
+        # Skip these bots because converting them is the responsibility of
+        # those teams and out of scope for the Chromium migration to GN.
+        self.Print('  Skipped (out of scope)')
+        self.Print('')
+        continue
+
+      INTERNAL_MASTERS = ('official.desktop', 'official.desktop.continuous',
+                          'internal.client.kitchensync')
+      if master in INTERNAL_MASTERS and not self.args.internal:
+        # Skip these because the servers aren't accessible by default ...
+        self.Print('  Skipped (internal)')
+        self.Print('')
+        continue
+
+      try:
+        # Fetch the /builders contents from the buildbot master. The
+        # keys of the dict are the builder names themselves.
+        json_contents = self.Fetch(url)
+        d = json.loads(json_contents)
+      except Exception as e:
+        self.Print(str(e))
+        return 1
+
+      config_builders = set(self.masters[master])
+      master_builders = set(d.keys())
+      both = master_builders & config_builders
+      master_only = master_builders - config_builders
+      config_only = config_builders - master_builders
+      tbd = set()
+      gyp = set()
+      done = set()
+      notes = {builder: '' for builder in config_builders | master_builders}
+
+      for builder in both:
+        config = self.masters[master][builder]
+        if config == 'tbd':
+          tbd.add(builder)
+        elif isinstance(config, list):
+          vals = self.FlattenConfig(config[0])
+          if vals['type'] == 'gyp':
+            gyp.add(builder)
+          else:
+            done.add(builder)
+        elif config.startswith('//'):
+          done.add(builder)
+        else:
+          vals = self.FlattenConfig(config)
+          if vals['type'] == 'gyp':
+            gyp.add(builder)
+          else:
+            done.add(builder)
+
+      if self.args.check_compile and (tbd or master_only):
+        either = tbd | master_only
+        for builder in either:
+          notes[builder] = ' (' + self.CheckCompile(master, builder) +')'
+
+      if master_only or config_only or tbd or gyp:
+        PrintBuilders(STAT_MASTER_ONLY, master_only, notes)
+        PrintBuilders(STAT_CONFIG_ONLY, config_only, notes)
+        PrintBuilders(STAT_TBD, tbd, notes)
+        PrintBuilders(STAT_GYP, gyp, notes)
+      else:
+        self.Print('  All GN!')
+
+      stats[STAT_DONE] += len(done)
+
+      self.Print('')
+
+    fmt = '{:<27} {:>4}'
+    self.Print(fmt.format('Totals', str(sum(int(v) for v in stats.values()))))
+    self.Print(fmt.format('-' * 27, '----'))
+    for stat, count in stats.items():
+      self.Print(fmt.format(stat, str(count)))
+
+    return 0
+
+  def GetConfig(self):
+    build_dir = self.args.path[0]
+
+    vals = {}
+    if self.args.builder or self.args.master or self.args.config:
+      vals = self.Lookup()
+      if vals['type'] == 'gn':
+        # Re-run gn gen in order to ensure the config is consistent with the
+        # build dir.
+        self.RunGNGen(vals)
+      return vals
+
+    mb_type_path = self.PathJoin(self.ToAbsPath(build_dir), 'mb_type')
+    if not self.Exists(mb_type_path):
+      toolchain_path = self.PathJoin(self.ToAbsPath(build_dir),
+                                     'toolchain.ninja')
+      if not self.Exists(toolchain_path):
+        self.Print('Must either specify a path to an existing GN build dir '
+                   'or pass in a -m/-b pair or a -c flag to specify the '
+                   'configuration')
+        return {}
+      else:
+        mb_type = 'gn'
+    else:
+      mb_type = self.ReadFile(mb_type_path).strip()
+
+    if mb_type == 'gn':
+      vals = self.GNValsFromDir(build_dir)
+    else:
+      vals = {}
+    vals['type'] = mb_type
+
+    return vals
+
+  def GNValsFromDir(self, build_dir):
+    args_contents = ""
+    gn_args_path = self.PathJoin(self.ToAbsPath(build_dir), 'args.gn')
+    if self.Exists(gn_args_path):
+      args_contents = self.ReadFile(gn_args_path)
+    gn_args = []
+    for l in args_contents.splitlines():
+      fields = l.split(' ')
+      name = fields[0]
+      val = ' '.join(fields[2:])
+      gn_args.append('%s=%s' % (name, val))
+
+    return {
+      'gn_args': ' '.join(gn_args),
+      'type': 'gn',
+    }
+
+  def Lookup(self):
+    vals = self.ReadBotConfig()
+    if not vals:
+      self.ReadConfigFile()
+      config = self.ConfigFromArgs()
+      if config.startswith('//'):
+        if not self.Exists(self.ToAbsPath(config)):
+          raise MBErr('args file "%s" not found' % config)
+        vals = {
+          'args_file': config,
+          'cros_passthrough': False,
+          'gn_args': '',
+          'gyp_crosscompile': False,
+          'gyp_defines': '',
+          'type': 'gn',
+        }
+      else:
+        if not config in self.configs:
+          raise MBErr('Config "%s" not found in %s' %
+                      (config, self.args.config_file))
+        vals = self.FlattenConfig(config)
+
+    # Do some basic sanity checking on the config so that we
+    # don't have to do this in every caller.
+    assert 'type' in vals, 'No meta-build type specified in the config'
+    assert vals['type'] in ('gn', 'gyp'), (
+        'Unknown meta-build type "%s"' % vals['gn_args'])
+
+    return vals
+
+  def ReadBotConfig(self):
+    if not self.args.master or not self.args.builder:
+      return {}
+    path = self.PathJoin(self.chromium_src_dir, 'ios', 'build', 'bots',
+                         self.args.master, self.args.builder + '.json')
+    if not self.Exists(path):
+      return {}
+
+    contents = json.loads(self.ReadFile(path))
+    gyp_vals = contents.get('GYP_DEFINES', {})
+    if isinstance(gyp_vals, dict):
+      gyp_defines = ' '.join('%s=%s' % (k, v) for k, v in gyp_vals.items())
+    else:
+      gyp_defines = ' '.join(gyp_vals)
+    gn_args = ' '.join(contents.get('gn_args', []))
+
+    return {
+        'args_file': '',
+        'cros_passthrough': False,
+        'gn_args': gn_args,
+        'gyp_crosscompile': False,
+        'gyp_defines': gyp_defines,
+        'type': contents.get('mb_type', ''),
+    }
+
+  def ReadConfigFile(self):
+    if not self.Exists(self.args.config_file):
+      raise MBErr('config file not found at %s' % self.args.config_file)
+
+    try:
+      contents = ast.literal_eval(self.ReadFile(self.args.config_file))
+    except SyntaxError as e:
+      raise MBErr('Failed to parse config file "%s": %s' %
+                 (self.args.config_file, e))
+
+    self.configs = contents['configs']
+    self.masters = contents['masters']
+    self.mixins = contents['mixins']
+
+  def ConfigFromArgs(self):
+    if self.args.config:
+      if self.args.master or self.args.builder:
+        raise MBErr('Can not specific both -c/--config and -m/--master or '
+                    '-b/--builder')
+
+      return self.args.config
+
+    if not self.args.master or not self.args.builder:
+      raise MBErr('Must specify either -c/--config or '
+                  '(-m/--master and -b/--builder)')
+
+    if not self.args.master in self.masters:
+      raise MBErr('Master name "%s" not found in "%s"' %
+                  (self.args.master, self.args.config_file))
+
+    if not self.args.builder in self.masters[self.args.master]:
+      raise MBErr('Builder name "%s"  not found under masters[%s] in "%s"' %
+                  (self.args.builder, self.args.master, self.args.config_file))
+
+    config = self.masters[self.args.master][self.args.builder]
+    if isinstance(config, list):
+      if self.args.phase is None:
+        raise MBErr('Must specify a build --phase for %s on %s' %
+                    (self.args.builder, self.args.master))
+      phase = int(self.args.phase)
+      if phase < 1 or phase > len(config):
+        raise MBErr('Phase %d out of bounds for %s on %s' %
+                    (phase, self.args.builder, self.args.master))
+      return config[phase-1]
+
+    if self.args.phase is not None:
+      raise MBErr('Must not specify a build --phase for %s on %s' %
+                  (self.args.builder, self.args.master))
+    return config
+
+  def FlattenConfig(self, config):
+    mixins = self.configs[config]
+    vals = {
+      'args_file': '',
+      'cros_passthrough': False,
+      'gn_args': [],
+      'gyp_defines': '',
+      'gyp_crosscompile': False,
+      'type': None,
+    }
+
+    visited = []
+    self.FlattenMixins(mixins, vals, visited)
+    return vals
+
+  def FlattenMixins(self, mixins, vals, visited):
+    for m in mixins:
+      if m not in self.mixins:
+        raise MBErr('Unknown mixin "%s"' % m)
+
+      visited.append(m)
+
+      mixin_vals = self.mixins[m]
+
+      if 'cros_passthrough' in mixin_vals:
+        vals['cros_passthrough'] = mixin_vals['cros_passthrough']
+      if 'gn_args' in mixin_vals:
+        if vals['gn_args']:
+          vals['gn_args'] += ' ' + mixin_vals['gn_args']
+        else:
+          vals['gn_args'] = mixin_vals['gn_args']
+      if 'gyp_crosscompile' in mixin_vals:
+        vals['gyp_crosscompile'] = mixin_vals['gyp_crosscompile']
+      if 'gyp_defines' in mixin_vals:
+        if vals['gyp_defines']:
+          vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines']
+        else:
+          vals['gyp_defines'] = mixin_vals['gyp_defines']
+      if 'type' in mixin_vals:
+        vals['type'] = mixin_vals['type']
+
+      if 'mixins' in mixin_vals:
+        self.FlattenMixins(mixin_vals['mixins'], vals, visited)
+    return vals
+
+  def ClobberIfNeeded(self, vals):
+    path = self.args.path[0]
+    build_dir = self.ToAbsPath(path)
+    mb_type_path = self.PathJoin(build_dir, 'mb_type')
+    needs_clobber = False
+    new_mb_type = vals['type']
+    if self.Exists(build_dir):
+      if self.Exists(mb_type_path):
+        old_mb_type = self.ReadFile(mb_type_path)
+        if old_mb_type != new_mb_type:
+          self.Print("Build type mismatch: was %s, will be %s, clobbering %s" %
+                     (old_mb_type, new_mb_type, path))
+          needs_clobber = True
+      else:
+        # There is no 'mb_type' file in the build directory, so this probably
+        # means that the prior build(s) were not done through mb, and we
+        # have no idea if this was a GYP build or a GN build. Clobber it
+        # to be safe.
+        self.Print("%s/mb_type missing, clobbering to be safe" % path)
+        needs_clobber = True
+
+    if self.args.dryrun:
+      return
+
+    if needs_clobber:
+      self.RemoveDirectory(build_dir)
+
+    self.MaybeMakeDirectory(build_dir)
+    self.WriteFile(mb_type_path, new_mb_type)
+
+  def RunGNGen(self, vals):
+    build_dir = self.args.path[0]
+
+    cmd = self.GNCmd('gen', build_dir, '--check')
+    gn_args = self.GNArgs(vals)
+
+    # Since GN hasn't run yet, the build directory may not even exist.
+    self.MaybeMakeDirectory(self.ToAbsPath(build_dir))
+
+    gn_args_path = self.ToAbsPath(build_dir, 'args.gn')
+    self.WriteFile(gn_args_path, gn_args, force_verbose=True)
+
+    swarming_targets = []
+    if getattr(self.args, 'swarming_targets_file', None):
+      # We need GN to generate the list of runtime dependencies for
+      # the compile targets listed (one per line) in the file so
+      # we can run them via swarming. We use ninja_to_gn.pyl to convert
+      # the compile targets to the matching GN labels.
+      path = self.args.swarming_targets_file
+      if not self.Exists(path):
+        self.WriteFailureAndRaise('"%s" does not exist' % path,
+                                  output_path=None)
+      contents = self.ReadFile(path)
+      swarming_targets = set(contents.splitlines())
+      gn_isolate_map = ast.literal_eval(self.ReadFile(self.PathJoin(
+          self.chromium_src_dir, 'testing', 'buildbot', 'gn_isolate_map.pyl')))
+      gn_labels = []
+      err = ''
+      for target in swarming_targets:
+        target_name = self.GNTargetName(target)
+        if not target_name in gn_isolate_map:
+          err += ('test target "%s" not found\n' % target_name)
+        elif gn_isolate_map[target_name]['type'] == 'unknown':
+          err += ('test target "%s" type is unknown\n' % target_name)
+        else:
+          gn_labels.append(gn_isolate_map[target_name]['label'])
+
+      if err:
+          raise MBErr('Error: Failed to match swarming targets to %s:\n%s' %
+                      ('//testing/buildbot/gn_isolate_map.pyl', err))
+
+      gn_runtime_deps_path = self.ToAbsPath(build_dir, 'runtime_deps')
+      self.WriteFile(gn_runtime_deps_path, '\n'.join(gn_labels) + '\n')
+      cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path)
+
+    ret, _, _ = self.Run(cmd)
+    if ret:
+        # If `gn gen` failed, we should exit early rather than trying to
+        # generate isolates. Run() will have already logged any error output.
+        self.Print('GN gen failed: %d' % ret)
+        return ret
+
+    android = 'target_os="android"' in vals['gn_args']
+    for target in swarming_targets:
+      if android:
+        # Android targets may be either android_apk or executable. The former
+        # will result in runtime_deps associated with the stamp file, while the
+        # latter will result in runtime_deps associated with the executable.
+        target_name = self.GNTargetName(target)
+        label = gn_isolate_map[target_name]['label']
+        runtime_deps_targets = [
+            target_name + '.runtime_deps',
+            'obj/%s.stamp.runtime_deps' % label.replace(':', '/')]
+      elif gn_isolate_map[target]['type'] == 'gpu_browser_test':
+        if self.platform == 'win32':
+          runtime_deps_targets = ['browser_tests.exe.runtime_deps']
+        else:
+          runtime_deps_targets = ['browser_tests.runtime_deps']
+      elif (gn_isolate_map[target]['type'] == 'script' or
+            gn_isolate_map[target].get('label_type') == 'group'):
+        # For script targets, the build target is usually a group,
+        # for which gn generates the runtime_deps next to the stamp file
+        # for the label, which lives under the obj/ directory.
+        label = gn_isolate_map[target]['label']
+        runtime_deps_targets = [
+            'obj/%s.stamp.runtime_deps' % label.replace(':', '/')]
+      elif self.platform == 'win32':
+        runtime_deps_targets = [target + '.exe.runtime_deps']
+      else:
+        runtime_deps_targets = [target + '.runtime_deps']
+
+      for r in runtime_deps_targets:
+        runtime_deps_path = self.ToAbsPath(build_dir, r)
+        if self.Exists(runtime_deps_path):
+          break
+      else:
+        raise MBErr('did not generate any of %s' %
+                    ', '.join(runtime_deps_targets))
+
+      command, extra_files = self.GetIsolateCommand(target, vals,
+                                                    gn_isolate_map)
+
+      runtime_deps = self.ReadFile(runtime_deps_path).splitlines()
+
+      self.WriteIsolateFiles(build_dir, command, target, runtime_deps,
+                             extra_files)
+
+    return 0
+
+  def RunGNIsolate(self, vals):
+    gn_isolate_map = ast.literal_eval(self.ReadFile(self.PathJoin(
+        self.chromium_src_dir, 'testing', 'buildbot', 'gn_isolate_map.pyl')))
+
+    build_dir = self.args.path[0]
+    target = self.args.target[0]
+    target_name = self.GNTargetName(target)
+    command, extra_files = self.GetIsolateCommand(target, vals, gn_isolate_map)
+
+    label = gn_isolate_map[target_name]['label']
+    cmd = self.GNCmd('desc', build_dir, label, 'runtime_deps')
+    ret, out, _ = self.Call(cmd)
+    if ret:
+      if out:
+        self.Print(out)
+      return ret
+
+    runtime_deps = out.splitlines()
+
+    self.WriteIsolateFiles(build_dir, command, target, runtime_deps,
+                           extra_files)
+
+    ret, _, _ = self.Run([
+        self.executable,
+        self.PathJoin('tools', 'swarming_client', 'isolate.py'),
+        'check',
+        '-i',
+        self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
+        '-s',
+        self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target))],
+        buffer_output=False)
+
+    return ret
+
+  def WriteIsolateFiles(self, build_dir, command, target, runtime_deps,
+                        extra_files):
+    isolate_path = self.ToAbsPath(build_dir, target + '.isolate')
+    self.WriteFile(isolate_path,
+      pprint.pformat({
+        'variables': {
+          'command': command,
+          'files': sorted(runtime_deps + extra_files),
+        }
+      }) + '\n')
+
+    self.WriteJSON(
+      {
+        'args': [
+          '--isolated',
+          self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)),
+          '--isolate',
+          self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
+        ],
+        'dir': self.chromium_src_dir,
+        'version': 1,
+      },
+      isolate_path + 'd.gen.json',
+    )
+
+  def GNCmd(self, subcommand, path, *args):
+    if self.platform == 'linux2':
+      subdir, exe = 'linux64', 'gn'
+    elif self.platform == 'darwin':
+      subdir, exe = 'mac', 'gn'
+    else:
+      subdir, exe = 'win', 'gn.exe'
+
+    gn_path = self.PathJoin(self.chromium_src_dir, 'buildtools', subdir, exe)
+
+    return [gn_path, subcommand, path] + list(args)
+
+  def GNArgs(self, vals):
+    if vals['cros_passthrough']:
+      if not 'GN_ARGS' in os.environ:
+        raise MBErr('MB is expecting GN_ARGS to be in the environment')
+      gn_args = os.environ['GN_ARGS']
+      if not re.search('target_os.*=.*"chromeos"', gn_args):
+        raise MBErr('GN_ARGS is missing target_os = "chromeos": (GN_ARGS=%s)' %
+                    gn_args)
+    else:
+      gn_args = vals['gn_args']
+
+    if self.args.goma_dir:
+      gn_args += ' goma_dir="%s"' % self.args.goma_dir
+
+    android_version_code = self.args.android_version_code
+    if android_version_code:
+      gn_args += ' android_default_version_code="%s"' % android_version_code
+
+    android_version_name = self.args.android_version_name
+    if android_version_name:
+      gn_args += ' android_default_version_name="%s"' % android_version_name
+
+    # Canonicalize the arg string into a sorted, newline-separated list
+    # of key-value pairs, and de-dup the keys if need be so that only
+    # the last instance of each arg is listed.
+    gn_args = gn_helpers.ToGNString(gn_helpers.FromGNArgs(gn_args))
+
+    args_file = vals.get('args_file', None)
+    if args_file:
+      gn_args = ('import("%s")\n' % vals['args_file']) + gn_args
+    return gn_args
+
+  def RunGYPGen(self, vals):
+    path = self.args.path[0]
+
+    output_dir = self.ParseGYPConfigPath(path)
+    cmd, env = self.GYPCmd(output_dir, vals)
+    ret, _, _ = self.Run(cmd, env=env)
+    return ret
+
+  def RunGYPAnalyze(self, vals):
+    output_dir = self.ParseGYPConfigPath(self.args.path[0])
+    if self.args.verbose:
+      inp = self.ReadInputJSON(['files', 'test_targets',
+                                'additional_compile_targets'])
+      self.Print()
+      self.Print('analyze input:')
+      self.PrintJSON(inp)
+      self.Print()
+
+    cmd, env = self.GYPCmd(output_dir, vals)
+    cmd.extend(['-f', 'analyzer',
+                '-G', 'config_path=%s' % self.args.input_path[0],
+                '-G', 'analyzer_output_path=%s' % self.args.output_path[0]])
+    ret, _, _ = self.Run(cmd, env=env)
+    if not ret and self.args.verbose:
+      outp = json.loads(self.ReadFile(self.args.output_path[0]))
+      self.Print()
+      self.Print('analyze output:')
+      self.PrintJSON(outp)
+      self.Print()
+
+    return ret
+
+  def GetIsolateCommand(self, target, vals, gn_isolate_map):
+    android = 'target_os="android"' in vals['gn_args']
+
+    # This needs to mirror the settings in //build/config/ui.gni:
+    # use_x11 = is_linux && !use_ozone.
+    use_x11 = (self.platform == 'linux2' and
+               not android and
+               not 'use_ozone=true' in vals['gn_args'])
+
+    asan = 'is_asan=true' in vals['gn_args']
+    msan = 'is_msan=true' in vals['gn_args']
+    tsan = 'is_tsan=true' in vals['gn_args']
+
+    target_name = self.GNTargetName(target)
+    test_type = gn_isolate_map[target_name]['type']
+
+    executable = gn_isolate_map[target_name].get('executable', target_name)
+    executable_suffix = '.exe' if self.platform == 'win32' else ''
+
+    cmdline = []
+    extra_files = []
+
+    if android and test_type != "script":
+      logdog_command = [
+          '--logdog-bin-cmd', './../../bin/logdog_butler',
+          '--project', 'chromium',
+          '--service-account-json',
+          '/creds/service_accounts/service-account-luci-logdog-publisher.json',
+          '--prefix', 'android/swarming/logcats/${SWARMING_TASK_ID}',
+          '--source', '${ISOLATED_OUTDIR}/logcats',
+          '--name', 'unified_logcats',
+      ]
+      test_cmdline = [
+          self.PathJoin('bin', 'run_%s' % target_name),
+          '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats',
+          '--target-devices-file', '${SWARMING_BOT_FILE}',
+          '-v'
+      ]
+      cmdline = (['./../../build/android/test_wrapper/logdog_wrapper.py']
+                 + logdog_command + test_cmdline)
+    elif use_x11 and test_type == 'windowed_test_launcher':
+      extra_files = [
+          'xdisplaycheck',
+          '../../testing/test_env.py',
+          '../../testing/xvfb.py',
+      ]
+      cmdline = [
+        '../../testing/xvfb.py',
+        '.',
+        './' + str(executable) + executable_suffix,
+        '--brave-new-test-launcher',
+        '--test-launcher-bot-mode',
+        '--asan=%d' % asan,
+        '--msan=%d' % msan,
+        '--tsan=%d' % tsan,
+      ]
+    elif test_type in ('windowed_test_launcher', 'console_test_launcher'):
+      extra_files = [
+          '../../testing/test_env.py'
+      ]
+      cmdline = [
+          '../../testing/test_env.py',
+          './' + str(executable) + executable_suffix,
+          '--brave-new-test-launcher',
+          '--test-launcher-bot-mode',
+          '--asan=%d' % asan,
+          '--msan=%d' % msan,
+          '--tsan=%d' % tsan,
+      ]
+    elif test_type == 'gpu_browser_test':
+      extra_files = [
+          '../../testing/test_env.py'
+      ]
+      gtest_filter = gn_isolate_map[target]['gtest_filter']
+      cmdline = [
+          '../../testing/test_env.py',
+          './browser_tests' + executable_suffix,
+          '--test-launcher-bot-mode',
+          '--enable-gpu',
+          '--test-launcher-jobs=1',
+          '--gtest_filter=%s' % gtest_filter,
+      ]
+    elif test_type == 'script':
+      extra_files = [
+          '../../testing/test_env.py'
+      ]
+      cmdline = [
+          '../../testing/test_env.py',
+          '../../' + self.ToSrcRelPath(gn_isolate_map[target]['script'])
+      ]
+    elif test_type in ('raw'):
+      extra_files = []
+      cmdline = [
+          './' + str(target) + executable_suffix,
+      ]
+
+    else:
+      self.WriteFailureAndRaise('No command line for %s found (test type %s).'
+                                % (target, test_type), output_path=None)
+
+    cmdline += gn_isolate_map[target_name].get('args', [])
+
+    return cmdline, extra_files
+
+  def ToAbsPath(self, build_path, *comps):
+    return self.PathJoin(self.chromium_src_dir,
+                         self.ToSrcRelPath(build_path),
+                         *comps)
+
+  def ToSrcRelPath(self, path):
+    """Returns a relative path from the top of the repo."""
+    if path.startswith('//'):
+      return path[2:].replace('/', self.sep)
+    return self.RelPath(path, self.chromium_src_dir)
+
+  def ParseGYPConfigPath(self, path):
+    rpath = self.ToSrcRelPath(path)
+    output_dir, _, _ = rpath.rpartition(self.sep)
+    return output_dir
+
+  def GYPCmd(self, output_dir, vals):
+    if vals['cros_passthrough']:
+      if not 'GYP_DEFINES' in os.environ:
+        raise MBErr('MB is expecting GYP_DEFINES to be in the environment')
+      gyp_defines = os.environ['GYP_DEFINES']
+      if not 'chromeos=1' in gyp_defines:
+        raise MBErr('GYP_DEFINES is missing chromeos=1: (GYP_DEFINES=%s)' %
+                    gyp_defines)
+    else:
+      gyp_defines = vals['gyp_defines']
+
+    goma_dir = self.args.goma_dir
+
+    # GYP uses shlex.split() to split the gyp defines into separate arguments,
+    # so we can support backslashes and and spaces in arguments by quoting
+    # them, even on Windows, where this normally wouldn't work.
+    if goma_dir and ('\\' in goma_dir or ' ' in goma_dir):
+      goma_dir = "'%s'" % goma_dir
+
+    if goma_dir:
+      gyp_defines += ' gomadir=%s' % goma_dir
+
+    android_version_code = self.args.android_version_code
+    if android_version_code:
+      gyp_defines += ' app_manifest_version_code=%s' % android_version_code
+
+    android_version_name = self.args.android_version_name
+    if android_version_name:
+      gyp_defines += ' app_manifest_version_name=%s' % android_version_name
+
+    cmd = [
+        self.executable,
+        self.args.gyp_script,
+        '-G',
+        'output_dir=' + output_dir,
+    ]
+
+    # Ensure that we have an environment that only contains
+    # the exact values of the GYP variables we need.
+    env = os.environ.copy()
+
+    # This is a terrible hack to work around the fact that
+    # //tools/clang/scripts/update.py is invoked by GYP and GN but
+    # currently relies on an environment variable to figure out
+    # what revision to embed in the command line #defines.
+    # For GN, we've made this work via a gn arg that will cause update.py
+    # to get an additional command line arg, but getting that to work
+    # via GYP_DEFINES has proven difficult, so we rewrite the GYP_DEFINES
+    # to get rid of the arg and add the old var in, instead.
+    # See crbug.com/582737 for more on this. This can hopefully all
+    # go away with GYP.
+    m = re.search('llvm_force_head_revision=1\s*', gyp_defines)
+    if m:
+      env['LLVM_FORCE_HEAD_REVISION'] = '1'
+      gyp_defines = gyp_defines.replace(m.group(0), '')
+
+    # This is another terrible hack to work around the fact that
+    # GYP sets the link concurrency to use via the GYP_LINK_CONCURRENCY
+    # environment variable, and not via a proper GYP_DEFINE. See
+    # crbug.com/611491 for more on this.
+    m = re.search('gyp_link_concurrency=(\d+)(\s*)', gyp_defines)
+    if m:
+      env['GYP_LINK_CONCURRENCY'] = m.group(1)
+      gyp_defines = gyp_defines.replace(m.group(0), '')
+
+    env['GYP_GENERATORS'] = 'ninja'
+    if 'GYP_CHROMIUM_NO_ACTION' in env:
+      del env['GYP_CHROMIUM_NO_ACTION']
+    if 'GYP_CROSSCOMPILE' in env:
+      del env['GYP_CROSSCOMPILE']
+    env['GYP_DEFINES'] = gyp_defines
+    if vals['gyp_crosscompile']:
+      env['GYP_CROSSCOMPILE'] = '1'
+    return cmd, env
+
+  def RunGNAnalyze(self, vals):
+    # analyze runs before 'gn gen' now, so we need to run gn gen
+    # in order to ensure that we have a build directory.
+    ret = self.RunGNGen(vals)
+    if ret:
+      return ret
+
+    inp = self.ReadInputJSON(['files', 'test_targets',
+                              'additional_compile_targets'])
+    if self.args.verbose:
+      self.Print()
+      self.Print('analyze input:')
+      self.PrintJSON(inp)
+      self.Print()
+
+    # TODO(crbug.com/555273) - currently GN treats targets and
+    # additional_compile_targets identically since we can't tell the
+    # difference between a target that is a group in GN and one that isn't.
+    # We should eventually fix this and treat the two types differently.
+    targets = (set(inp['test_targets']) |
+               set(inp['additional_compile_targets']))
+
+    output_path = self.args.output_path[0]
+
+    # Bail out early if a GN file was modified, since 'gn refs' won't know
+    # what to do about it. Also, bail out early if 'all' was asked for,
+    # since we can't deal with it yet.
+    if (any(f.endswith('.gn') or f.endswith('.gni') for f in inp['files']) or
+        'all' in targets):
+      self.WriteJSON({
+            'status': 'Found dependency (all)',
+            'compile_targets': sorted(targets),
+            'test_targets': sorted(targets & set(inp['test_targets'])),
+          }, output_path)
+      return 0
+
+    # This shouldn't normally happen, but could due to unusual race conditions,
+    # like a try job that gets scheduled before a patch lands but runs after
+    # the patch has landed.
+    if not inp['files']:
+      self.Print('Warning: No files modified in patch, bailing out early.')
+      self.WriteJSON({
+            'status': 'No dependency',
+            'compile_targets': [],
+            'test_targets': [],
+          }, output_path)
+      return 0
+
+    ret = 0
+    response_file = self.TempFile()
+    response_file.write('\n'.join(inp['files']) + '\n')
+    response_file.close()
+
+    matching_targets = set()
+    try:
+      cmd = self.GNCmd('refs',
+                       self.args.path[0],
+                       '@%s' % response_file.name,
+                       '--all',
+                       '--as=output')
+      ret, out, _ = self.Run(cmd, force_verbose=False)
+      if ret and not 'The input matches no targets' in out:
+        self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
+                                  output_path)
+      build_dir = self.ToSrcRelPath(self.args.path[0]) + self.sep
+      for output in out.splitlines():
+        build_output = output.replace(build_dir, '')
+        if build_output in targets:
+          matching_targets.add(build_output)
+
+      cmd = self.GNCmd('refs',
+                       self.args.path[0],
+                       '@%s' % response_file.name,
+                       '--all')
+      ret, out, _ = self.Run(cmd, force_verbose=False)
+      if ret and not 'The input matches no targets' in out:
+        self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
+                                  output_path)
+      for label in out.splitlines():
+        build_target = label[2:]
+        # We want to accept 'chrome/android:chrome_public_apk' and
+        # just 'chrome_public_apk'. This may result in too many targets
+        # getting built, but we can adjust that later if need be.
+        for input_target in targets:
+          if (input_target == build_target or
+              build_target.endswith(':' + input_target)):
+            matching_targets.add(input_target)
+    finally:
+      self.RemoveFile(response_file.name)
+
+    if matching_targets:
+      self.WriteJSON({
+            'status': 'Found dependency',
+            'compile_targets': sorted(matching_targets),
+            'test_targets': sorted(matching_targets &
+                                   set(inp['test_targets'])),
+          }, output_path)
+    else:
+      self.WriteJSON({
+          'status': 'No dependency',
+          'compile_targets': [],
+          'test_targets': [],
+      }, output_path)
+
+    if self.args.verbose:
+      outp = json.loads(self.ReadFile(output_path))
+      self.Print()
+      self.Print('analyze output:')
+      self.PrintJSON(outp)
+      self.Print()
+
+    return 0
+
+  def ReadInputJSON(self, required_keys):
+    path = self.args.input_path[0]
+    output_path = self.args.output_path[0]
+    if not self.Exists(path):
+      self.WriteFailureAndRaise('"%s" does not exist' % path, output_path)
+
+    try:
+      inp = json.loads(self.ReadFile(path))
+    except Exception as e:
+      self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' %
+                                (path, e), output_path)
+
+    for k in required_keys:
+      if not k in inp:
+        self.WriteFailureAndRaise('input file is missing a "%s" key' % k,
+                                  output_path)
+
+    return inp
+
+  def WriteFailureAndRaise(self, msg, output_path):
+    if output_path:
+      self.WriteJSON({'error': msg}, output_path, force_verbose=True)
+    raise MBErr(msg)
+
+  def WriteJSON(self, obj, path, force_verbose=False):
+    try:
+      self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n',
+                     force_verbose=force_verbose)
+    except Exception as e:
+      raise MBErr('Error %s writing to the output path "%s"' %
+                 (e, path))
+
+  def CheckCompile(self, master, builder):
+    url_template = self.args.url_template + '/{builder}/builds/_all?as_text=1'
+    url = urllib2.quote(url_template.format(master=master, builder=builder),
+                        safe=':/()?=')
+    try:
+      builds = json.loads(self.Fetch(url))
+    except Exception as e:
+      return str(e)
+    successes = sorted(
+        [int(x) for x in builds.keys() if "text" in builds[x] and
+          cmp(builds[x]["text"][:2], ["build", "successful"]) == 0],
+        reverse=True)
+    if not successes:
+      return "no successful builds"
+    build = builds[str(successes[0])]
+    step_names = set([step["name"] for step in build["steps"]])
+    compile_indicators = set(["compile", "compile (with patch)", "analyze"])
+    if compile_indicators & step_names:
+      return "compiles"
+    return "does not compile"
+
+  def PrintCmd(self, cmd, env):
+    if self.platform == 'win32':
+      env_prefix = 'set '
+      env_quoter = QuoteForSet
+      shell_quoter = QuoteForCmd
+    else:
+      env_prefix = ''
+      env_quoter = pipes.quote
+      shell_quoter = pipes.quote
+
+    def print_env(var):
+      if env and var in env:
+        self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
+
+    print_env('GYP_CROSSCOMPILE')
+    print_env('GYP_DEFINES')
+    print_env('GYP_LINK_CONCURRENCY')
+    print_env('LLVM_FORCE_HEAD_REVISION')
+
+    if cmd[0] == self.executable:
+      cmd = ['python'] + cmd[1:]
+    self.Print(*[shell_quoter(arg) for arg in cmd])
+
+  def PrintJSON(self, obj):
+    self.Print(json.dumps(obj, indent=2, sort_keys=True))
+
+  def GNTargetName(self, target):
+    return target
+
+  def Build(self, target):
+    build_dir = self.ToSrcRelPath(self.args.path[0])
+    ninja_cmd = ['ninja', '-C', build_dir]
+    if self.args.jobs:
+      ninja_cmd.extend(['-j', '%d' % self.args.jobs])
+    ninja_cmd.append(target)
+    ret, _, _ = self.Run(ninja_cmd, force_verbose=False, buffer_output=False)
+    return ret
+
+  def Run(self, cmd, env=None, force_verbose=True, buffer_output=True):
+    # This function largely exists so it can be overridden for testing.
+    if self.args.dryrun or self.args.verbose or force_verbose:
+      self.PrintCmd(cmd, env)
+    if self.args.dryrun:
+      return 0, '', ''
+
+    ret, out, err = self.Call(cmd, env=env, buffer_output=buffer_output)
+    if self.args.verbose or force_verbose:
+      if ret:
+        self.Print('  -> returned %d' % ret)
+      if out:
+        self.Print(out, end='')
+      if err:
+        self.Print(err, end='', file=sys.stderr)
+    return ret, out, err
+
+  def Call(self, cmd, env=None, buffer_output=True):
+    if buffer_output:
+      p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir,
+                           stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                           env=env)
+      out, err = p.communicate()
+    else:
+      p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir,
+                           env=env)
+      p.wait()
+      out = err = ''
+    return p.returncode, out, err
+
+  def ExpandUser(self, path):
+    # This function largely exists so it can be overridden for testing.
+    return os.path.expanduser(path)
+
+  def Exists(self, path):
+    # This function largely exists so it can be overridden for testing.
+    return os.path.exists(path)
+
+  def Fetch(self, url):
+    # This function largely exists so it can be overridden for testing.
+    f = urllib2.urlopen(url)
+    contents = f.read()
+    f.close()
+    return contents
+
+  def MaybeMakeDirectory(self, path):
+    try:
+      os.makedirs(path)
+    except OSError, e:
+      if e.errno != errno.EEXIST:
+        raise
+
+  def PathJoin(self, *comps):
+    # This function largely exists so it can be overriden for testing.
+    return os.path.join(*comps)
+
+  def Print(self, *args, **kwargs):
+    # This function largely exists so it can be overridden for testing.
+    print(*args, **kwargs)
+    if kwargs.get('stream', sys.stdout) == sys.stdout:
+      sys.stdout.flush()
+
+  def ReadFile(self, path):
+    # This function largely exists so it can be overriden for testing.
+    with open(path) as fp:
+      return fp.read()
+
+  def RelPath(self, path, start='.'):
+    # This function largely exists so it can be overriden for testing.
+    return os.path.relpath(path, start)
+
+  def RemoveFile(self, path):
+    # This function largely exists so it can be overriden for testing.
+    os.remove(path)
+
+  def RemoveDirectory(self, abs_path):
+    if self.platform == 'win32':
+      # In other places in chromium, we often have to retry this command
+      # because we're worried about other processes still holding on to
+      # file handles, but when MB is invoked, it will be early enough in the
+      # build that their should be no other processes to interfere. We
+      # can change this if need be.
+      self.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path])
+    else:
+      shutil.rmtree(abs_path, ignore_errors=True)
+
+  def TempFile(self, mode='w'):
+    # This function largely exists so it can be overriden for testing.
+    return tempfile.NamedTemporaryFile(mode=mode, delete=False)
+
+  def WriteFile(self, path, contents, force_verbose=False):
+    # This function largely exists so it can be overriden for testing.
+    if self.args.dryrun or self.args.verbose or force_verbose:
+      self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path))
+    with open(path, 'w') as fp:
+      return fp.write(contents)
+
+
+class MBErr(Exception):
+  pass
+
+
+# See http://goo.gl/l5NPDW and http://goo.gl/4Diozm for the painful
+# details of this next section, which handles escaping command lines
+# so that they can be copied and pasted into a cmd window.
+UNSAFE_FOR_SET = set('^<>&|')
+UNSAFE_FOR_CMD = UNSAFE_FOR_SET.union(set('()%'))
+ALL_META_CHARS = UNSAFE_FOR_CMD.union(set('"'))
+
+
+def QuoteForSet(arg):
+  if any(a in UNSAFE_FOR_SET for a in arg):
+    arg = ''.join('^' + a if a in UNSAFE_FOR_SET else a for a in arg)
+  return arg
+
+
+def QuoteForCmd(arg):
+  # First, escape the arg so that CommandLineToArgvW will parse it properly.
+  # From //tools/gyp/pylib/gyp/msvs_emulation.py:23.
+  if arg == '' or ' ' in arg or '"' in arg:
+    quote_re = re.compile(r'(\\*)"')
+    arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg))
+
+  # Then check to see if the arg contains any metacharacters other than
+  # double quotes; if it does, quote everything (including the double
+  # quotes) for safety.
+  if any(a in UNSAFE_FOR_CMD for a in arg):
+    arg = ''.join('^' + a if a in ALL_META_CHARS else a for a in arg)
+  return arg
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/tools/mb/mb_unittest.py b/tools/mb/mb_unittest.py
new file mode 100755
index 0000000..ac58c02
--- /dev/null
+++ b/tools/mb/mb_unittest.py
@@ -0,0 +1,572 @@
+#!/usr/bin/python
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for mb.py."""
+
+import json
+import StringIO
+import os
+import sys
+import unittest
+
+import mb
+
+
+class FakeMBW(mb.MetaBuildWrapper):
+  def __init__(self, win32=False):
+    super(FakeMBW, self).__init__()
+
+    # Override vars for test portability.
+    if win32:
+      self.chromium_src_dir = 'c:\\fake_src'
+      self.default_config = 'c:\\fake_src\\tools\\mb\\mb_config.pyl'
+      self.platform = 'win32'
+      self.executable = 'c:\\python\\python.exe'
+      self.sep = '\\'
+    else:
+      self.chromium_src_dir = '/fake_src'
+      self.default_config = '/fake_src/tools/mb/mb_config.pyl'
+      self.executable = '/usr/bin/python'
+      self.platform = 'linux2'
+      self.sep = '/'
+
+    self.files = {}
+    self.calls = []
+    self.cmds = []
+    self.cross_compile = None
+    self.out = ''
+    self.err = ''
+    self.rmdirs = []
+
+  def ExpandUser(self, path):
+    return '$HOME/%s' % path
+
+  def Exists(self, path):
+    return self.files.get(path) is not None
+
+  def MaybeMakeDirectory(self, path):
+    self.files[path] = True
+
+  def PathJoin(self, *comps):
+    return self.sep.join(comps)
+
+  def ReadFile(self, path):
+    return self.files[path]
+
+  def WriteFile(self, path, contents, force_verbose=False):
+    if self.args.dryrun or self.args.verbose or force_verbose:
+      self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path))
+    self.files[path] = contents
+
+  def Call(self, cmd, env=None, buffer_output=True):
+    if env:
+      self.cross_compile = env.get('GYP_CROSSCOMPILE')
+    self.calls.append(cmd)
+    if self.cmds:
+      return self.cmds.pop(0)
+    return 0, '', ''
+
+  def Print(self, *args, **kwargs):
+    sep = kwargs.get('sep', ' ')
+    end = kwargs.get('end', '\n')
+    f = kwargs.get('file', sys.stdout)
+    if f == sys.stderr:
+      self.err += sep.join(args) + end
+    else:
+      self.out += sep.join(args) + end
+
+  def TempFile(self, mode='w'):
+    return FakeFile(self.files)
+
+  def RemoveFile(self, path):
+    del self.files[path]
+
+  def RemoveDirectory(self, path):
+    self.rmdirs.append(path)
+    files_to_delete = [f for f in self.files if f.startswith(path)]
+    for f in files_to_delete:
+      self.files[f] = None
+
+
+class FakeFile(object):
+  def __init__(self, files):
+    self.name = '/tmp/file'
+    self.buf = ''
+    self.files = files
+
+  def write(self, contents):
+    self.buf += contents
+
+  def close(self):
+     self.files[self.name] = self.buf
+
+
+TEST_CONFIG = """\
+{
+  'masters': {
+    'chromium': {},
+    'fake_master': {
+      'fake_builder': 'gyp_rel_bot',
+      'fake_gn_builder': 'gn_rel_bot',
+      'fake_gyp_crosscompile_builder': 'gyp_crosscompile',
+      'fake_gn_debug_builder': 'gn_debug_goma',
+      'fake_gyp_builder': 'gyp_debug',
+      'fake_gn_args_bot': '//build/args/bots/fake_master/fake_gn_args_bot.gn',
+      'fake_multi_phase': ['gn_phase_1', 'gn_phase_2'],
+    },
+  },
+  'configs': {
+    'gyp_rel_bot': ['gyp', 'rel', 'goma'],
+    'gn_debug_goma': ['gn', 'debug', 'goma'],
+    'gyp_debug': ['gyp', 'debug', 'fake_feature1'],
+    'gn_rel_bot': ['gn', 'rel', 'goma'],
+    'gyp_crosscompile': ['gyp', 'crosscompile'],
+    'gn_phase_1': ['gn', 'phase_1'],
+    'gn_phase_2': ['gn', 'phase_2'],
+  },
+  'mixins': {
+    'crosscompile': {
+      'gyp_crosscompile': True,
+    },
+    'fake_feature1': {
+      'gn_args': 'enable_doom_melon=true',
+      'gyp_defines': 'doom_melon=1',
+    },
+    'gyp': {'type': 'gyp'},
+    'gn': {'type': 'gn'},
+    'goma': {
+      'gn_args': 'use_goma=true',
+      'gyp_defines': 'goma=1',
+    },
+    'phase_1': {
+      'gn_args': 'phase=1',
+      'gyp_args': 'phase=1',
+    },
+    'phase_2': {
+      'gn_args': 'phase=2',
+      'gyp_args': 'phase=2',
+    },
+    'rel': {
+      'gn_args': 'is_debug=false',
+    },
+    'debug': {
+      'gn_args': 'is_debug=true',
+    },
+  },
+}
+"""
+
+
+TEST_BAD_CONFIG = """\
+{
+  'configs': {
+    'gn_rel_bot_1': ['gn', 'rel', 'chrome_with_codecs'],
+    'gn_rel_bot_2': ['gn', 'rel', 'bad_nested_config'],
+  },
+  'masters': {
+    'chromium': {
+      'a': 'gn_rel_bot_1',
+      'b': 'gn_rel_bot_2',
+    },
+  },
+  'mixins': {
+    'gn': {'type': 'gn'},
+    'chrome_with_codecs': {
+      'gn_args': 'proprietary_codecs=true',
+    },
+    'bad_nested_config': {
+      'mixins': ['chrome_with_codecs'],
+    },
+    'rel': {
+      'gn_args': 'is_debug=false',
+    },
+  },
+}
+"""
+
+
+GYP_HACKS_CONFIG = """\
+{
+  'masters': {
+    'chromium': {},
+    'fake_master': {
+      'fake_builder': 'fake_config',
+    },
+  },
+  'configs': {
+    'fake_config': ['fake_mixin'],
+  },
+  'mixins': {
+    'fake_mixin': {
+      'type': 'gyp',
+      'gn_args': '',
+      'gyp_defines':
+         ('foo=bar llvm_force_head_revision=1 '
+          'gyp_link_concurrency=1 baz=1'),
+    },
+  },
+}
+"""
+
+
+class UnitTest(unittest.TestCase):
+  def fake_mbw(self, files=None, win32=False):
+    mbw = FakeMBW(win32=win32)
+    mbw.files.setdefault(mbw.default_config, TEST_CONFIG)
+    mbw.files.setdefault(
+        mbw.ToAbsPath('//build/args/bots/fake_master/fake_gn_args_bot.gn'),
+        'is_debug = false\n')
+    if files:
+      for path, contents in files.items():
+        mbw.files[path] = contents
+    return mbw
+
+  def check(self, args, mbw=None, files=None, out=None, err=None, ret=None):
+    if not mbw:
+      mbw = self.fake_mbw(files)
+
+    actual_ret = mbw.Main(args)
+
+    self.assertEqual(actual_ret, ret)
+    if out is not None:
+      self.assertEqual(mbw.out, out)
+    if err is not None:
+      self.assertEqual(mbw.err, err)
+    return mbw
+
+  def test_clobber(self):
+    files = {
+      '/fake_src/out/Debug': None,
+      '/fake_src/out/Debug/mb_type': None,
+    }
+    mbw = self.fake_mbw(files)
+
+    # The first time we run this, the build dir doesn't exist, so no clobber.
+    self.check(['gen', '-c', 'gn_debug_goma', '//out/Debug'], mbw=mbw, ret=0)
+    self.assertEqual(mbw.rmdirs, [])
+    self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gn')
+
+    # The second time we run this, the build dir exists and matches, so no
+    # clobber.
+    self.check(['gen', '-c', 'gn_debug_goma', '//out/Debug'], mbw=mbw, ret=0)
+    self.assertEqual(mbw.rmdirs, [])
+    self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gn')
+
+    # Now we switch build types; this should result in a clobber.
+    self.check(['gen', '-c', 'gyp_debug', '//out/Debug'], mbw=mbw, ret=0)
+    self.assertEqual(mbw.rmdirs, ['/fake_src/out/Debug'])
+    self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gyp')
+
+    # Now we delete mb_type; this checks the case where the build dir
+    # exists but wasn't populated by mb; this should also result in a clobber.
+    del mbw.files['/fake_src/out/Debug/mb_type']
+    self.check(['gen', '-c', 'gyp_debug', '//out/Debug'], mbw=mbw, ret=0)
+    self.assertEqual(mbw.rmdirs,
+                     ['/fake_src/out/Debug', '/fake_src/out/Debug'])
+    self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gyp')
+
+  def test_gn_analyze(self):
+    files = {'/tmp/in.json': """{\
+               "files": ["foo/foo_unittest.cc"],
+               "test_targets": ["foo_unittests", "bar_unittests"],
+               "additional_compile_targets": []
+             }"""}
+
+    mbw = self.fake_mbw(files)
+    mbw.Call = lambda cmd, env=None, buffer_output=True: (
+        0, 'out/Default/foo_unittests\n', '')
+
+    self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+                '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
+    out = json.loads(mbw.files['/tmp/out.json'])
+    self.assertEqual(out, {
+      'status': 'Found dependency',
+      'compile_targets': ['foo_unittests'],
+      'test_targets': ['foo_unittests']
+    })
+
+  def test_gn_analyze_fails(self):
+    files = {'/tmp/in.json': """{\
+               "files": ["foo/foo_unittest.cc"],
+               "test_targets": ["foo_unittests", "bar_unittests"],
+               "additional_compile_targets": []
+             }"""}
+
+    mbw = self.fake_mbw(files)
+    mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
+
+    self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+                '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=1)
+
+  def test_gn_analyze_all(self):
+    files = {'/tmp/in.json': """{\
+               "files": ["foo/foo_unittest.cc"],
+               "test_targets": ["bar_unittests"],
+               "additional_compile_targets": ["all"]
+             }"""}
+    mbw = self.fake_mbw(files)
+    mbw.Call = lambda cmd, env=None, buffer_output=True: (
+        0, 'out/Default/foo_unittests\n', '')
+    self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+                '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
+    out = json.loads(mbw.files['/tmp/out.json'])
+    self.assertEqual(out, {
+      'status': 'Found dependency (all)',
+      'compile_targets': ['all', 'bar_unittests'],
+      'test_targets': ['bar_unittests'],
+    })
+
+  def test_gn_analyze_missing_file(self):
+    files = {'/tmp/in.json': """{\
+               "files": ["foo/foo_unittest.cc"],
+               "test_targets": ["bar_unittests"],
+               "additional_compile_targets": []
+             }"""}
+    mbw = self.fake_mbw(files)
+    mbw.cmds = [
+        (0, '', ''),
+        (1, 'The input matches no targets, configs, or files\n', ''),
+        (1, 'The input matches no targets, configs, or files\n', ''),
+    ]
+
+    self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+                '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
+    out = json.loads(mbw.files['/tmp/out.json'])
+    self.assertEqual(out, {
+      'status': 'No dependency',
+      'compile_targets': [],
+      'test_targets': [],
+    })
+
+  def test_gn_gen(self):
+    mbw = self.fake_mbw()
+    self.check(['gen', '-c', 'gn_debug_goma', '//out/Default', '-g', '/goma'],
+               mbw=mbw, ret=0)
+    self.assertMultiLineEqual(mbw.files['/fake_src/out/Default/args.gn'],
+                              ('goma_dir = "/goma"\n'
+                               'is_debug = true\n'
+                               'use_goma = true\n'))
+
+    # Make sure we log both what is written to args.gn and the command line.
+    self.assertIn('Writing """', mbw.out)
+    self.assertIn('/fake_src/buildtools/linux64/gn gen //out/Default --check',
+                  mbw.out)
+
+    mbw = self.fake_mbw(win32=True)
+    self.check(['gen', '-c', 'gn_debug_goma', '-g', 'c:\\goma', '//out/Debug'],
+               mbw=mbw, ret=0)
+    self.assertMultiLineEqual(mbw.files['c:\\fake_src\\out\\Debug\\args.gn'],
+                              ('goma_dir = "c:\\\\goma"\n'
+                               'is_debug = true\n'
+                               'use_goma = true\n'))
+    self.assertIn('c:\\fake_src\\buildtools\\win\\gn.exe gen //out/Debug '
+                  '--check\n', mbw.out)
+
+    mbw = self.fake_mbw()
+    self.check(['gen', '-m', 'fake_master', '-b', 'fake_gn_args_bot',
+                '//out/Debug'],
+               mbw=mbw, ret=0)
+    self.assertEqual(
+        mbw.files['/fake_src/out/Debug/args.gn'],
+        'import("//build/args/bots/fake_master/fake_gn_args_bot.gn")\n')
+
+
+  def test_gn_gen_fails(self):
+    mbw = self.fake_mbw()
+    mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
+    self.check(['gen', '-c', 'gn_debug_goma', '//out/Default'], mbw=mbw, ret=1)
+
+  def test_gn_gen_swarming(self):
+    files = {
+      '/tmp/swarming_targets': 'base_unittests\n',
+      '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
+          "{'base_unittests': {"
+          "  'label': '//base:base_unittests',"
+          "  'type': 'raw',"
+          "  'args': [],"
+          "}}\n"
+      ),
+      '/fake_src/out/Default/base_unittests.runtime_deps': (
+          "base_unittests\n"
+      ),
+    }
+    mbw = self.fake_mbw(files)
+    self.check(['gen',
+                '-c', 'gn_debug_goma',
+                '--swarming-targets-file', '/tmp/swarming_targets',
+                '//out/Default'], mbw=mbw, ret=0)
+    self.assertIn('/fake_src/out/Default/base_unittests.isolate',
+                  mbw.files)
+    self.assertIn('/fake_src/out/Default/base_unittests.isolated.gen.json',
+                  mbw.files)
+
+  def test_gn_isolate(self):
+    files = {
+      '/fake_src/out/Default/toolchain.ninja': "",
+      '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
+          "{'base_unittests': {"
+          "  'label': '//base:base_unittests',"
+          "  'type': 'raw',"
+          "  'args': [],"
+          "}}\n"
+      ),
+      '/fake_src/out/Default/base_unittests.runtime_deps': (
+          "base_unittests\n"
+      ),
+    }
+    self.check(['isolate', '-c', 'gn_debug_goma', '//out/Default',
+                'base_unittests'], files=files, ret=0)
+
+    # test running isolate on an existing build_dir
+    files['/fake_src/out/Default/args.gn'] = 'is_debug = True\n'
+    self.check(['isolate', '//out/Default', 'base_unittests'],
+               files=files, ret=0)
+
+    files['/fake_src/out/Default/mb_type'] = 'gn\n'
+    self.check(['isolate', '//out/Default', 'base_unittests'],
+               files=files, ret=0)
+
+  def test_gn_run(self):
+    files = {
+      '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
+          "{'base_unittests': {"
+          "  'label': '//base:base_unittests',"
+          "  'type': 'raw',"
+          "  'args': [],"
+          "}}\n"
+      ),
+      '/fake_src/out/Default/base_unittests.runtime_deps': (
+          "base_unittests\n"
+      ),
+    }
+    self.check(['run', '-c', 'gn_debug_goma', '//out/Default',
+                'base_unittests'], files=files, ret=0)
+
+  def test_gn_lookup(self):
+    self.check(['lookup', '-c', 'gn_debug_goma'], ret=0)
+
+  def test_gn_lookup_goma_dir_expansion(self):
+    self.check(['lookup', '-c', 'gn_rel_bot', '-g', '/foo'], ret=0,
+               out=('\n'
+                    'Writing """\\\n'
+                    'goma_dir = "/foo"\n'
+                    'is_debug = false\n'
+                    'use_goma = true\n'
+                    '""" to _path_/args.gn.\n\n'
+                    '/fake_src/buildtools/linux64/gn gen _path_\n'))
+
+  def test_gyp_analyze(self):
+    mbw = self.check(['analyze', '-c', 'gyp_rel_bot', '//out/Release',
+                      '/tmp/in.json', '/tmp/out.json'], ret=0)
+    self.assertIn('analyzer', mbw.calls[0])
+
+  def test_gyp_crosscompile(self):
+    mbw = self.fake_mbw()
+    self.check(['gen', '-c', 'gyp_crosscompile', '//out/Release'],
+               mbw=mbw, ret=0)
+    self.assertTrue(mbw.cross_compile)
+
+  def test_gyp_gen(self):
+    self.check(['gen', '-c', 'gyp_rel_bot', '-g', '/goma', '//out/Release'],
+               ret=0,
+               out=("GYP_DEFINES='goma=1 gomadir=/goma'\n"
+                    "python build/gyp_chromium -G output_dir=out\n"))
+
+    mbw = self.fake_mbw(win32=True)
+    self.check(['gen', '-c', 'gyp_rel_bot', '-g', 'c:\\goma', '//out/Release'],
+               mbw=mbw, ret=0,
+               out=("set GYP_DEFINES=goma=1 gomadir='c:\\goma'\n"
+                    "python build\\gyp_chromium -G output_dir=out\n"))
+
+  def test_gyp_gen_fails(self):
+    mbw = self.fake_mbw()
+    mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
+    self.check(['gen', '-c', 'gyp_rel_bot', '//out/Release'], mbw=mbw, ret=1)
+
+  def test_gyp_lookup_goma_dir_expansion(self):
+    self.check(['lookup', '-c', 'gyp_rel_bot', '-g', '/foo'], ret=0,
+               out=("GYP_DEFINES='goma=1 gomadir=/foo'\n"
+                    "python build/gyp_chromium -G output_dir=_path_\n"))
+
+  def test_help(self):
+    orig_stdout = sys.stdout
+    try:
+      sys.stdout = StringIO.StringIO()
+      self.assertRaises(SystemExit, self.check, ['-h'])
+      self.assertRaises(SystemExit, self.check, ['help'])
+      self.assertRaises(SystemExit, self.check, ['help', 'gen'])
+    finally:
+      sys.stdout = orig_stdout
+
+  def test_multiple_phases(self):
+    # Check that not passing a --phase to a multi-phase builder fails.
+    mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_multi_phase'],
+                     ret=1)
+    self.assertIn('Must specify a build --phase', mbw.out)
+
+    # Check that passing a --phase to a single-phase builder fails.
+    mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_gn_builder',
+                      '--phase', '1'],
+                     ret=1)
+    self.assertIn('Must not specify a build --phase', mbw.out)
+
+    # Check different ranges; 0 and 3 are out of bounds, 1 and 2 should work.
+    mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_multi_phase',
+                      '--phase', '0'], ret=1)
+    self.assertIn('Phase 0 out of bounds', mbw.out)
+
+    mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_multi_phase',
+                      '--phase', '1'], ret=0)
+    self.assertIn('phase = 1', mbw.out)
+
+    mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_multi_phase',
+                      '--phase', '2'], ret=0)
+    self.assertIn('phase = 2', mbw.out)
+
+    mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_multi_phase',
+                      '--phase', '3'], ret=1)
+    self.assertIn('Phase 3 out of bounds', mbw.out)
+
+  def test_validate(self):
+    mbw = self.fake_mbw()
+    self.check(['validate'], mbw=mbw, ret=0)
+
+  def test_gyp_env_hacks(self):
+    mbw = self.fake_mbw()
+    mbw.files[mbw.default_config] = GYP_HACKS_CONFIG
+    self.check(['lookup', '-c', 'fake_config'], mbw=mbw,
+               ret=0,
+               out=("GYP_DEFINES='foo=bar baz=1'\n"
+                    "GYP_LINK_CONCURRENCY=1\n"
+                    "LLVM_FORCE_HEAD_REVISION=1\n"
+                    "python build/gyp_chromium -G output_dir=_path_\n"))
+
+
+if __name__ == '__main__':
+  unittest.main()
+
+  def test_validate(self):
+    mbw = self.fake_mbw()
+    self.check(['validate'], mbw=mbw, ret=0)
+
+  def test_bad_validate(self):
+    mbw = self.fake_mbw()
+    mbw.files[mbw.default_config] = TEST_BAD_CONFIG
+    self.check(['validate'], mbw=mbw, ret=1)
+
+  def test_gyp_env_hacks(self):
+    mbw = self.fake_mbw()
+    mbw.files[mbw.default_config] = GYP_HACKS_CONFIG
+    self.check(['lookup', '-c', 'fake_config'], mbw=mbw,
+               ret=0,
+               out=("GYP_DEFINES='foo=bar baz=1'\n"
+                    "GYP_LINK_CONCURRENCY=1\n"
+                    "LLVM_FORCE_HEAD_REVISION=1\n"
+                    "python build/gyp_chromium -G output_dir=_path_\n"))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/oom_dump/README b/tools/oom_dump/README
deleted file mode 100644
index 1d840b9..0000000
--- a/tools/oom_dump/README
+++ /dev/null
@@ -1,33 +0,0 @@
-oom_dump extracts useful information from Google Chrome OOM minidumps.
-
-To build one needs a google-breakpad checkout
-(http://code.google.com/p/google-breakpad/).
-
-First, one needs to build and install breakpad itself. For instructions
-check google-breakpad, but currently it's as easy as:
-
-  ./configure
-  make
-  sudo make install
-
-(the catch: breakpad installs .so into /usr/local/lib, so you might
-need some additional tweaking to make it discoverable, for example,
-put a soft link into /usr/lib directory).
-
-Next step is to build v8.  Note: you should build x64 version of v8,
-if you're on 64-bit platform, otherwise you would get a link error when
-building oom_dump.  Also, if you are testing against an older version of chrome
-you should build the corresponding version of V8 to make sure that the type-id 
-enum have the correct values.
-
-The last step is to build oom_dump itself.  The following command should work:
-
-  cd <v8 working copy>/tools/oom_dump
-  scons BREAKPAD_DIR=<path to google-breakpad working copy>
-
-(Additionally you can control v8 working copy dir, but the default should work.)
-
-If everything goes fine, oom_dump <path to minidump> should print
-some useful information about the OOM crash.
-
-Note: currently only 32-bit Windows minidumps are supported.
diff --git a/tools/oom_dump/SConstruct b/tools/oom_dump/SConstruct
deleted file mode 100644
index f228c89..0000000
--- a/tools/oom_dump/SConstruct
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2010 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-#       notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-#       copyright notice, this list of conditions and the following
-#       disclaimer in the documentation and/or other materials provided
-#       with the distribution.
-#     * Neither the name of Google Inc. nor the names of its
-#       contributors may be used to endorse or promote products derived
-#       from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-vars = Variables('custom.py')
-vars.Add(PathVariable('BREAKPAD_DIR',
-                      'Path to checkout of google-breakpad project',
-                      '~/google-breakpad',
-                      PathVariable.PathIsDir))
-vars.Add(PathVariable('V8_DIR',
-                      'Path to checkout of v8 project',
-                      '../..',
-                      PathVariable.PathIsDir))
-
-env = Environment(variables = vars,
-                  CPPPATH = ['${BREAKPAD_DIR}/src', '${V8_DIR}/src'],
-                  LIBPATH = ['/usr/local/lib', '${V8_DIR}'])
-
-env.Program('oom_dump.cc', LIBS = ['breakpad', 'v8', 'pthread'])
diff --git a/tools/oom_dump/oom_dump.cc b/tools/oom_dump/oom_dump.cc
deleted file mode 100644
index 581e191..0000000
--- a/tools/oom_dump/oom_dump.cc
+++ /dev/null
@@ -1,283 +0,0 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//     * Redistributions of source code must retain the above copyright
-//       notice, this list of conditions and the following disclaimer.
-//     * Redistributions in binary form must reproduce the above
-//       copyright notice, this list of conditions and the following
-//       disclaimer in the documentation and/or other materials provided
-//       with the distribution.
-//     * Neither the name of Google Inc. nor the names of its
-//       contributors may be used to endorse or promote products derived
-//       from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include <stdio.h>
-#include <stdlib.h>
-
-#include <algorithm>
-
-#include <google_breakpad/processor/minidump.h>
-
-#include <v8.h>
-
-namespace {
-
-using google_breakpad::Minidump;
-using google_breakpad::MinidumpContext;
-using google_breakpad::MinidumpThread;
-using google_breakpad::MinidumpThreadList;
-using google_breakpad::MinidumpException;
-using google_breakpad::MinidumpMemoryRegion;
-
-const char* InstanceTypeToString(int type) {
-  static char const* names[v8::internal::LAST_TYPE] = {0};
-  if (names[v8::internal::STRING_TYPE] == NULL) {
-    using namespace v8::internal;
-#define SET(type) names[type] = #type;
-    INSTANCE_TYPE_LIST(SET)
-#undef SET
-  }
-  return names[type];
-}
-
-
-u_int32_t ReadPointedValue(MinidumpMemoryRegion* region,
-                           u_int64_t base,
-                           int offset) {
-  u_int32_t ptr = 0;
-  CHECK(region->GetMemoryAtAddress(base + 4 * offset, &ptr));
-  u_int32_t value = 0;
-  CHECK(region->GetMemoryAtAddress(ptr, &value));
-  return value;
-}
-
-
-void ReadArray(MinidumpMemoryRegion* region,
-               u_int64_t array_ptr,
-               int size,
-               int* output) {
-  for (int i = 0; i < size; i++) {
-    u_int32_t value;
-    CHECK(region->GetMemoryAtAddress(array_ptr + 4 * i, &value));
-    output[i] = value;
-  }
-}
-
-
-u_int32_t ReadArrayFrom(MinidumpMemoryRegion* region,
-                        u_int64_t base,
-                        int offset,
-                        int size,
-                        int* output) {
-  u_int32_t ptr = 0;
-  CHECK(region->GetMemoryAtAddress(base + 4 * offset, &ptr));
-  ReadArray(region, ptr, size, output);
-}
-
-
-double toM(int size) {
-  return size / (1024. * 1024.);
-}
-
-
-class IndirectSorter {
- public:
-  explicit IndirectSorter(int* a) : a_(a) { }
-
-  bool operator() (int i0, int i1) {
-    return a_[i0] > a_[i1];
-  }
-
- private:
-  int* a_;
-};
-
-
-void DumpHeapStats(const char *minidump_file) {
-  Minidump minidump(minidump_file);
-  CHECK(minidump.Read());
-
-  MinidumpException *exception = minidump.GetException();
-  CHECK(exception);
-
-  MinidumpContext* crash_context = exception->GetContext();
-  CHECK(crash_context);
-
-  u_int32_t exception_thread_id = 0;
-  CHECK(exception->GetThreadID(&exception_thread_id));
-
-  MinidumpThreadList* thread_list = minidump.GetThreadList();
-  CHECK(thread_list);
-
-  MinidumpThread* exception_thread =
-      thread_list->GetThreadByID(exception_thread_id);
-  CHECK(exception_thread);
-
-  // Currently only 32-bit Windows minidumps are supported.
-  CHECK_EQ(MD_CONTEXT_X86, crash_context->GetContextCPU());
-
-  const MDRawContextX86* contextX86 = crash_context->GetContextX86();
-  CHECK(contextX86);
-
-  const u_int32_t esp = contextX86->esp;
-
-  MinidumpMemoryRegion* memory_region = exception_thread->GetMemory();
-  CHECK(memory_region);
-
-  const u_int64_t last = memory_region->GetBase() + memory_region->GetSize();
-
-  u_int64_t heap_stats_addr = 0;
-  for (u_int64_t addr = esp; addr < last; addr += 4) {
-    u_int32_t value = 0;
-    CHECK(memory_region->GetMemoryAtAddress(addr, &value));
-    if (value >= esp && value < last) {
-      u_int32_t value2 = 0;
-      CHECK(memory_region->GetMemoryAtAddress(value, &value2));
-      if (value2 == v8::internal::HeapStats::kStartMarker) {
-        heap_stats_addr = addr;
-        break;
-      }
-    }
-  }
-  CHECK(heap_stats_addr);
-
-  // Read heap stats.
-
-#define READ_FIELD(offset) \
-  ReadPointedValue(memory_region, heap_stats_addr, offset)
-
-  CHECK(READ_FIELD(0) == v8::internal::HeapStats::kStartMarker);
-  CHECK(READ_FIELD(24) == v8::internal::HeapStats::kEndMarker);
-
-  const int new_space_size = READ_FIELD(1);
-  const int new_space_capacity = READ_FIELD(2);
-  const int old_space_size = READ_FIELD(3);
-  const int old_space_capacity = READ_FIELD(4);
-  const int code_space_size = READ_FIELD(5);
-  const int code_space_capacity = READ_FIELD(6);
-  const int map_space_size = READ_FIELD(7);
-  const int map_space_capacity = READ_FIELD(8);
-  const int cell_space_size = READ_FIELD(9);
-  const int cell_space_capacity = READ_FIELD(10);
-  const int lo_space_size = READ_FIELD(11);
-  const int global_handle_count = READ_FIELD(12);
-  const int weak_global_handle_count = READ_FIELD(13);
-  const int pending_global_handle_count = READ_FIELD(14);
-  const int near_death_global_handle_count = READ_FIELD(15);
-  const int destroyed_global_handle_count = READ_FIELD(16);
-  const int memory_allocator_size = READ_FIELD(17);
-  const int memory_allocator_capacity = READ_FIELD(18);
-  const int os_error = READ_FIELD(19);
-#undef READ_FIELD
-
-  int objects_per_type[v8::internal::LAST_TYPE + 1] = {0};
-  ReadArrayFrom(memory_region, heap_stats_addr, 21,
-                v8::internal::LAST_TYPE + 1, objects_per_type);
-
-  int size_per_type[v8::internal::LAST_TYPE + 1] = {0};
-  ReadArrayFrom(memory_region, heap_stats_addr, 22, v8::internal::LAST_TYPE + 1,
-                size_per_type);
-
-  int js_global_objects =
-      objects_per_type[v8::internal::JS_GLOBAL_OBJECT_TYPE];
-  int js_builtins_objects =
-      objects_per_type[v8::internal::JS_BUILTINS_OBJECT_TYPE];
-  int js_global_proxies =
-      objects_per_type[v8::internal::JS_GLOBAL_PROXY_TYPE];
-
-  int indices[v8::internal::LAST_TYPE + 1];
-  for (int i = 0; i <= v8::internal::LAST_TYPE; i++) {
-    indices[i] = i;
-  }
-
-  std::stable_sort(indices, indices + sizeof(indices)/sizeof(indices[0]),
-                  IndirectSorter(size_per_type));
-
-  int total_size = 0;
-  for (int i = 0; i <= v8::internal::LAST_TYPE; i++) {
-    total_size += size_per_type[i];
-  }
-
-  // Print heap stats.
-
-  printf("exception thread ID: %" PRIu32 " (%#" PRIx32 ")\n",
-         exception_thread_id, exception_thread_id);
-  printf("heap stats address: %#" PRIx64 "\n", heap_stats_addr);
-#define PRINT_INT_STAT(stat) \
-    printf("\t%-25s\t% 10d\n", #stat ":", stat);
-#define PRINT_MB_STAT(stat) \
-    printf("\t%-25s\t% 10.3f MB\n", #stat ":", toM(stat));
-  PRINT_MB_STAT(new_space_size);
-  PRINT_MB_STAT(new_space_capacity);
-  PRINT_MB_STAT(old_space_size);
-  PRINT_MB_STAT(old_space_capacity);
-  PRINT_MB_STAT(code_space_size);
-  PRINT_MB_STAT(code_space_capacity);
-  PRINT_MB_STAT(map_space_size);
-  PRINT_MB_STAT(map_space_capacity);
-  PRINT_MB_STAT(cell_space_size);
-  PRINT_MB_STAT(cell_space_capacity);
-  PRINT_MB_STAT(lo_space_size);
-  PRINT_INT_STAT(global_handle_count);
-  PRINT_INT_STAT(weak_global_handle_count);
-  PRINT_INT_STAT(pending_global_handle_count);
-  PRINT_INT_STAT(near_death_global_handle_count);
-  PRINT_INT_STAT(destroyed_global_handle_count);
-  PRINT_MB_STAT(memory_allocator_size);
-  PRINT_MB_STAT(memory_allocator_capacity);
-  PRINT_INT_STAT(os_error);
-#undef PRINT_STAT
-
-  printf("\n");
-
-  printf(
-      "\tJS_GLOBAL_OBJECT_TYPE/JS_BUILTINS_OBJECT_TYPE/JS_GLOBAL_PROXY_TYPE: "
-      "%d/%d/%d\n\n",
-      js_global_objects, js_builtins_objects, js_global_proxies);
-
-  int running_size = 0;
-  for (int i = 0; i <= v8::internal::LAST_TYPE; i++) {
-    int type = indices[i];
-    const char* name = InstanceTypeToString(type);
-    if (name == NULL) {
-      // Unknown instance type.  Check that there is no objects of that type.
-      CHECK_EQ(0, objects_per_type[type]);
-      CHECK_EQ(0, size_per_type[type]);
-      continue;
-    }
-    int size = size_per_type[type];
-    running_size += size;
-    printf("\t%-37s% 9d% 11.3f MB% 10.3f%%% 10.3f%%\n",
-           name, objects_per_type[type], toM(size),
-           100. * size / total_size, 100. * running_size / total_size);
-  }
-  printf("\t%-37s% 9d% 11.3f MB% 10.3f%%% 10.3f%%\n",
-         "total", 0, toM(total_size), 100., 100.);
-}
-
-}  // namespace
-
-int main(int argc, char **argv) {
-  if (argc != 2) {
-    fprintf(stderr, "usage: %s <minidump>\n", argv[0]);
-    return 1;
-  }
-
-  DumpHeapStats(argv[1]);
-
-  return 0;
-}
diff --git a/tools/presubmit.py b/tools/presubmit.py
index d503538..3be9caf 100755
--- a/tools/presubmit.py
+++ b/tools/presubmit.py
@@ -203,7 +203,7 @@
 
   def GetPathsToSearch(self):
     return ['src', 'include', 'samples', join('test', 'cctest'),
-            join('test', 'unittests')]
+            join('test', 'unittests'), join('test', 'inspector')]
 
   def GetCpplintScript(self, prio_path):
     for path in [prio_path] + os.environ["PATH"].split(os.pathsep):
@@ -295,13 +295,21 @@
 
   IGNORE_COPYRIGHTS = ['box2d.js',
                        'cpplint.py',
+                       'check_injected_script_source.py',
                        'copy.js',
                        'corrections.js',
                        'crypto.js',
                        'daemon.py',
+                       'debugger-script.js',
                        'earley-boyer.js',
                        'fannkuch.js',
                        'fasta.js',
+                       'generate_protocol_externs.py',
+                       'injected-script.cc',
+                       'injected-script.h',
+                       'injected-script-source.js',
+                       'java-script-call-frame.cc',
+                       'java-script-call-frame.h',
                        'jsmin.py',
                        'libraries.cc',
                        'libraries-empty.cc',
@@ -311,10 +319,19 @@
                        'primes.js',
                        'raytrace.js',
                        'regexp-pcre.js',
+                       'rjsmin.py',
+                       'script-breakpoint.h',
                        'sqlite.js',
                        'sqlite-change-heap.js',
                        'sqlite-pointer-masking.js',
                        'sqlite-safe-heap.js',
+                       'v8-debugger-script.h',
+                       'v8-function-call.cc',
+                       'v8-function-call.h',
+                       'v8-inspector-impl.cc',
+                       'v8-inspector-impl.h',
+                       'v8-runtime-agent-impl.cc',
+                       'v8-runtime-agent-impl.h',
                        'gnuplot-4.6.3-emscripten.js',
                        'zlib.js']
   IGNORE_TABS = IGNORE_COPYRIGHTS + ['unicode-test.js', 'html-comments.js']
diff --git a/tools/run-perf.sh b/tools/run-perf.sh
index 03123fd..8375093 100755
--- a/tools/run-perf.sh
+++ b/tools/run-perf.sh
@@ -45,9 +45,14 @@
   echo 0 | sudo tee $KERNEL_MAP_CONFIG_FILE
 fi
 
+# Extract the command being perfed, so that we can prepend arguments to the
+# arguments that the user supplied.
+COMMAND=$1
+shift 1
+
 echo "Running..."
 perf record -R \
   -e $EVENT_TYPE \
   -c $SAMPLE_EVERY_N_CYCLES \
   --call-graph $CALL_GRAPH_METHOD \
-  -i $@ --perf_basic_prof
+  -i "$COMMAND" --perf_basic_prof "$@"
diff --git a/tools/run-tests.py b/tools/run-tests.py
index de16463..f248dff 100755
--- a/tools/run-tests.py
+++ b/tools/run-tests.py
@@ -34,7 +34,7 @@
 import multiprocessing
 import optparse
 import os
-from os.path import join
+from os.path import getmtime, isdir, join
 import platform
 import random
 import shlex
@@ -55,6 +55,8 @@
 # Base dir of the v8 checkout to be used as cwd.
 BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 
+DEFAULT_OUT_GN = "out.gn"
+
 ARCH_GUESS = utils.DefaultArch()
 
 # Map of test name synonyms to lists of test suites. Should be ordered by
@@ -102,6 +104,7 @@
   "ignition",
   "stress",
   "turbofan_opt",
+  "asm_wasm",
 ]
 
 EXHAUSTIVE_VARIANTS = VARIANTS + MORE_VARIANTS
@@ -294,6 +297,8 @@
                     " \"%s\"" % ",".join(EXHAUSTIVE_VARIANTS))
   result.add_option("--outdir", help="Base directory with compile output",
                     default="out")
+  result.add_option("--gn", help="Scan out.gn for the last built configuration",
+                    default=False, action="store_true")
   result.add_option("--predictable",
                     help="Compare output of several reruns of each test",
                     default=False, action="store_true")
@@ -427,6 +432,21 @@
   # First try to auto-detect configurations based on the build if GN was
   # used. This can't be overridden by cmd-line arguments.
   options.auto_detect = False
+  if options.gn:
+    gn_out_dir = os.path.join(BASE_DIR, DEFAULT_OUT_GN)
+    latest_timestamp = -1
+    latest_config = None
+    for gn_config in os.listdir(gn_out_dir):
+      gn_config_dir = os.path.join(gn_out_dir, gn_config)
+      if not isdir(gn_config_dir):
+        continue
+      if os.path.getmtime(gn_config_dir) > latest_timestamp:
+        latest_timestamp = os.path.getmtime(gn_config_dir)
+        latest_config = gn_config
+    if latest_config:
+      print(">>> Latest GN build found is %s" % latest_config)
+      options.outdir = os.path.join(DEFAULT_OUT_GN, latest_config)
+
   build_config_path = os.path.join(
       BASE_DIR, options.outdir, "v8_build_config.json")
   if os.path.exists(build_config_path):
diff --git a/tools/swarming_client/LICENSE b/tools/swarming_client/LICENSE
deleted file mode 100644
index 95f08e1..0000000
--- a/tools/swarming_client/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2013 The LUCI Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/tools/swarming_client/PRESUBMIT.py b/tools/swarming_client/PRESUBMIT.py
deleted file mode 100644
index 86b3797..0000000
--- a/tools/swarming_client/PRESUBMIT.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Top-level presubmit script for swarm_client.
-
-See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
-details on the presubmit API built into gcl.
-"""
-
-def CommonChecks(input_api, output_api):
-  import sys
-  def join(*args):
-    return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
-
-  output = []
-  sys_path_backup = sys.path
-  try:
-    sys.path = [
-      input_api.PresubmitLocalPath(),
-      join('tests'),
-      join('third_party'),
-    ] + sys.path
-    output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
-  finally:
-    sys.path = sys_path_backup
-
-  # These tests are touching the live infrastructure. It's a pain if your IP
-  # is not whitelisted so do not run them for now. They should use a local fake
-  # web service instead.
-  blacklist = [
-    r'.*isolateserver_smoke_test\.py$',
-    r'.*isolateserver_load_test\.py$',
-    r'.*swarming_smoke_test\.py$',
-  ]
-  if not input_api.is_committing:
-    # Remove all slow tests, e.g. the ones that take >1s to complete.
-    blacklist.extend([
-      r'.*isolate_smoke_test\.py$',
-      r'.*trace_inputs_smoke_test\.py$',
-      r'.*url_open_timeout_test\.py$',
-    ])
-
-  unit_tests = input_api.canned_checks.GetUnitTestsRecursively(
-      input_api, output_api,
-      input_api.os_path.join(input_api.PresubmitLocalPath()),
-      whitelist=[r'.+_test\.py$'],
-      blacklist=blacklist)
-  output.extend(input_api.RunTests(unit_tests))
-  return output
-
-
-def CheckChangeOnUpload(input_api, output_api):
-  return CommonChecks(input_api, output_api)
-
-
-def CheckChangeOnCommit(input_api, output_api):
-  return CommonChecks(input_api, output_api)
diff --git a/tools/swarming_client/README.md b/tools/swarming_client/README.md
deleted file mode 100644
index cb6d5c0..0000000
--- a/tools/swarming_client/README.md
+++ /dev/null
@@ -1,13 +0,0 @@
-# LUCI Python Client
-
-This is the Python Client code for [LUCI][1]. It's part of the [main python
-repo][2], and is also mirrored into a standalone [client-py repo][3].
-
-## License
-
-This project is licensed under Apache v2.0 license. See LICENSE for details.
-
-
-[1]: https://github.com/luci
-[2]: https://github.com/luci/luci-py
-[3]: https://github.com/luci/client-py
diff --git a/tools/swarming_client/README.py b/tools/swarming_client/README.py
deleted file mode 100755
index c14ec5c..0000000
--- a/tools/swarming_client/README.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-###
-# Run me to generate the documentation!
-###
-
-# Line too long (NN/80)
-# pylint: disable=C0301
-
-"""Test tracing and isolation infrastructure.
-
-A few scripts have strict dependency rules:
-- The pure tracing scripts (trace_*.py) do not know about isolate
-  infrastructure.
-"""
-
-import os
-import sys
-
-
-def main():
-  for i in sorted(os.listdir(os.path.dirname(os.path.abspath(__file__)))):
-    if not i.endswith('.py') or i == 'PRESUBMIT.py':
-      continue
-    module = __import__(i[:-3])
-    if hasattr(module, '__doc__'):
-      print module.__name__
-      print ''.join('  %s\n' % i for i in module.__doc__.splitlines())
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/WATCHLISTS b/tools/swarming_client/WATCHLISTS
deleted file mode 100644
index 03ac757..0000000
--- a/tools/swarming_client/WATCHLISTS
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# Watchlist Rules
-# Refer: http://dev.chromium.org/developers/contributing-code/watchlists
-
-{
-
-  'WATCHLIST_DEFINITIONS': {
-    'all': {
-      'filepath': '.+',
-    },
-  },
-
-  'WATCHLISTS': {
-    'all': [
-      'csharp+cc@chromium.org',
-      'vadimsh+cc@chromium.org',
-    ],
-  },
-
-}
diff --git a/tools/swarming_client/artool b/tools/swarming_client/artool
deleted file mode 100755
index 5beb3f0..0000000
--- a/tools/swarming_client/artool
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-exec python -m libs.arfile.cli $@
diff --git a/tools/swarming_client/auth.py b/tools/swarming_client/auth.py
deleted file mode 100755
index 4514fbf..0000000
--- a/tools/swarming_client/auth.py
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Client tool to perform various authentication related tasks."""
-
-__version__ = '0.4'
-
-import logging
-import optparse
-import sys
-
-from third_party import colorama
-from third_party.depot_tools import fix_encoding
-from third_party.depot_tools import subcommand
-
-from utils import logging_utils
-from utils import on_error
-from utils import net
-from utils import oauth
-from utils import subprocess42
-from utils import tools
-
-
-class AuthServiceError(Exception):
-  """Unexpected response from authentication service."""
-
-
-class AuthService(object):
-  """Represents remote Authentication service."""
-
-  def __init__(self, url):
-    self._service = net.get_http_service(url)
-
-  def login(self, allow_user_interaction):
-    """Refreshes cached access token or creates a new one."""
-    return self._service.login(allow_user_interaction)
-
-  def logout(self):
-    """Purges cached access token."""
-    return self._service.logout()
-
-  def get_current_identity(self):
-    """Returns identity associated with currently used credentials.
-
-    Identity is a string:
-      user:<email> - if using OAuth or cookie based authentication.
-      bot:<id> - if using HMAC based authentication.
-      anonymous:anonymous - if not authenticated.
-    """
-    identity = self._service.json_request('/auth/api/v1/accounts/self')
-    if not identity:
-      raise AuthServiceError('Failed to fetch identity')
-    return identity['identity']
-
-
-def add_auth_options(parser):
-  """Adds command line options related to authentication."""
-  oauth.add_oauth_options(parser)
-
-
-def process_auth_options(parser, options):
-  """Configures process-wide authentication parameters based on |options|."""
-  try:
-    net.set_oauth_config(oauth.extract_oauth_config_from_options(options))
-  except ValueError as exc:
-    parser.error(str(exc))
-
-
-def normalize_host_url(url):
-  """Makes sure URL starts with http:// or https://."""
-  url = url.lower().rstrip('/')
-  if url.startswith('https://'):
-    return url
-  if url.startswith('http://'):
-    allowed = ('http://localhost:', 'http://127.0.0.1:', 'http://::1:')
-    if not url.startswith(allowed):
-      raise ValueError(
-          'URL must start with https:// or be on localhost with port number')
-    return url
-  return 'https://' + url
-
-
-def ensure_logged_in(server_url):
-  """Checks that user is logged in, asking to do it if not.
-
-  Raises:
-    ValueError if the server_url is not acceptable.
-  """
-  # It's just a waste of time on a headless bot (it can't do interactive login).
-  if tools.is_headless() or net.get_oauth_config().disabled:
-    return None
-  server_url = normalize_host_url(server_url)
-  service = AuthService(server_url)
-  try:
-    service.login(False)
-  except IOError:
-    raise ValueError('Failed to contact %s' % server_url)
-  try:
-    identity = service.get_current_identity()
-  except AuthServiceError:
-    raise ValueError('Failed to fetch identify from %s' % server_url)
-  if identity == 'anonymous:anonymous':
-    raise ValueError(
-        'Please login to %s: \n'
-        '  python auth.py login --service=%s' % (server_url, server_url))
-  email = identity.split(':')[1]
-  logging.info('Logged in to %s: %s', server_url, email)
-  return email
-
-
-@subcommand.usage('[options]')
-def CMDlogin(parser, args):
-  """Runs interactive login flow and stores auth token/cookie on disk."""
-  (options, args) = parser.parse_args(args)
-  process_auth_options(parser, options)
-  service = AuthService(options.service)
-  if service.login(True):
-    print 'Logged in as \'%s\'.' % service.get_current_identity()
-    return 0
-  else:
-    print 'Login failed or canceled.'
-    return 1
-
-
-@subcommand.usage('[options]')
-def CMDlogout(parser, args):
-  """Purges cached auth token/cookie."""
-  (options, args) = parser.parse_args(args)
-  process_auth_options(parser, options)
-  service = AuthService(options.service)
-  service.logout()
-  return 0
-
-
-@subcommand.usage('[options]')
-def CMDcheck(parser, args):
-  """Shows identity associated with currently cached auth token/cookie."""
-  (options, args) = parser.parse_args(args)
-  process_auth_options(parser, options)
-  service = AuthService(options.service)
-  service.login(False)
-  print service.get_current_identity()
-  return 0
-
-
-class OptionParserAuth(logging_utils.OptionParserWithLogging):
-  def __init__(self, **kwargs):
-    logging_utils.OptionParserWithLogging.__init__(
-        self, prog='auth.py', **kwargs)
-    self.server_group = optparse.OptionGroup(self, 'Server')
-    self.server_group.add_option(
-        '-S', '--service',
-        metavar='URL', default='',
-        help='Service to use')
-    self.add_option_group(self.server_group)
-    add_auth_options(self)
-
-  def parse_args(self, *args, **kwargs):
-    options, args = logging_utils.OptionParserWithLogging.parse_args(
-        self, *args, **kwargs)
-    if not options.service:
-      self.error('--service is required.')
-    try:
-      options.service = normalize_host_url(options.service)
-    except ValueError as exc:
-      self.error(str(exc))
-    on_error.report_on_exception_exit(options.service)
-    return options, args
-
-
-def main(args):
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  return dispatcher.execute(OptionParserAuth(version=__version__), args)
-
-
-if __name__ == '__main__':
-  subprocess42.inhibit_os_error_reporting()
-  fix_encoding.fix_encoding()
-  tools.disable_buffering()
-  colorama.init()
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/swarming_client/cipd.py b/tools/swarming_client/cipd.py
deleted file mode 100644
index 85166bd..0000000
--- a/tools/swarming_client/cipd.py
+++ /dev/null
@@ -1,443 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Fetches CIPD client and installs packages."""
-
-__version__ = '0.4'
-
-import collections
-import contextlib
-import hashlib
-import json
-import logging
-import optparse
-import os
-import platform
-import sys
-import tempfile
-import time
-import urllib
-
-from utils import file_path
-from utils import fs
-from utils import net
-from utils import subprocess42
-from utils import tools
-import isolated_format
-import isolateserver
-
-
-# .exe on Windows.
-EXECUTABLE_SUFFIX = '.exe' if sys.platform == 'win32' else ''
-
-
-class Error(Exception):
-  """Raised on CIPD errors."""
-
-
-def add_cipd_options(parser):
-  group = optparse.OptionGroup(parser, 'CIPD')
-  group.add_option(
-      '--cipd-server',
-      help='URL of the CIPD server. Only relevant with --cipd-package.')
-  group.add_option(
-      '--cipd-client-package',
-      help='Package name of CIPD client with optional parameters described in '
-           '--cipd-package help. '
-           'Only relevant with --cipd-package. '
-           'Default: "%default"',
-      default='infra/tools/cipd/${platform}')
-  group.add_option(
-      '--cipd-client-version',
-      help='Version of CIPD client. '
-           'Only relevant with --cipd-package. '
-           'Default: "%default"',
-      default='latest')
-  group.add_option(
-      '--cipd-package',
-      dest='cipd_packages',
-      help='A CIPD package to install. '
-           'Format is "<path>:<package_name>:<version>". '
-           '"path" is installation directory relative to run_dir, '
-           'defaults to ".". '
-           '"package_name" may have ${platform} and/or ${os_ver} parameters. '
-           '${platform} will be expanded to "<os>-<architecture>" and '
-           '${os_ver} will be expanded to OS version name. '
-           'The option can be specified multiple times.',
-      action='append',
-      default=[])
-  group.add_option(
-      '--cipd-cache',
-      help='CIPD cache directory, separate from isolate cache. '
-           'Only relevant with --cipd-package. '
-           'Default: "%default".',
-      default='')
-  parser.add_option_group(group)
-
-
-def validate_cipd_options(parser, options):
-  """Calls parser.error on first found error among cipd options."""
-  if not options.cipd_packages:
-    return
-
-  for pkg in options.cipd_packages:
-    parts = pkg.split(':', 2)
-    if len(parts) != 3:
-      parser.error('invalid package "%s": must have at least 2 colons' % pkg)
-    _path, name, version = parts
-    if not name:
-      parser.error('invalid package "%s": package name is not specified' % pkg)
-    if not version:
-      parser.error('invalid package "%s": version is not specified' % pkg)
-
-  if not options.cipd_server:
-    parser.error('--cipd-package requires non-empty --cipd-server')
-
-  if not options.cipd_client_package:
-    parser.error(
-        '--cipd-package requires non-empty --cipd-client-package')
-  if not options.cipd_client_version:
-    parser.error(
-        '--cipd-package requires non-empty --cipd-client-version')
-
-
-class CipdClient(object):
-  """Installs packages."""
-
-  def __init__(self, binary_path, package_name, instance_id, service_url):
-    """Initializes CipdClient.
-
-    Args:
-      binary_path (str): path to the CIPD client binary.
-      package_name (str): the CIPD package name for the client itself.
-      instance_id (str): the CIPD instance_id for the client itself.
-      service_url (str): if not None, URL of the CIPD backend that overrides
-        the default one.
-    """
-    self.binary_path = binary_path
-    self.package_name = package_name
-    self.instance_id = instance_id
-    self.service_url = service_url
-
-  def ensure(
-      self, site_root, packages, cache_dir=None, tmp_dir=None, timeout=None):
-    """Ensures that packages installed in |site_root| equals |packages| set.
-
-    Blocking call.
-
-    Args:
-      site_root (str): where to install packages.
-      packages: list of (package_template, version) tuples.
-      cache_dir (str): if set, cache dir for cipd binary own cache.
-        Typically contains packages and tags.
-      tmp_dir (str): if not None, dir for temp files.
-      timeout (int): if not None, timeout in seconds for this function to run.
-
-    Returns:
-      Pinned packages in the form of [(package_name, package_id)], which
-      correspond 1:1 with the input packages argument.
-
-    Raises:
-      Error if could not install packages or timed out.
-    """
-    timeoutfn = tools.sliding_timeout(timeout)
-    logging.info('Installing packages %r into %s', packages, site_root)
-
-    list_file_handle, list_file_path = tempfile.mkstemp(
-        dir=tmp_dir, prefix=u'cipd-ensure-list-', suffix='.txt')
-    json_out_file_handle, json_file_path = tempfile.mkstemp(
-      dir=tmp_dir, prefix=u'cipd-ensure-result-', suffix='.json')
-    os.close(json_out_file_handle)
-
-    try:
-      try:
-        for pkg, version in packages:
-          pkg = render_package_name_template(pkg)
-          os.write(list_file_handle, '%s %s\n' % (pkg, version))
-      finally:
-        os.close(list_file_handle)
-
-      cmd = [
-        self.binary_path, 'ensure',
-        '-root', site_root,
-        '-list', list_file_path,
-        '-verbose',  # this is safe because cipd-ensure does not print a lot
-        '-json-output', json_file_path,
-      ]
-      if cache_dir:
-        cmd += ['-cache-dir', cache_dir]
-      if self.service_url:
-        cmd += ['-service-url', self.service_url]
-
-      logging.debug('Running %r', cmd)
-      process = subprocess42.Popen(
-          cmd, stdout=subprocess42.PIPE, stderr=subprocess42.PIPE)
-      output = []
-      for pipe_name, line in process.yield_any_line(timeout=0.1):
-        to = timeoutfn()
-        if to is not None and to <= 0:
-          raise Error(
-              'Could not install packages; took more than %d seconds' % timeout)
-        if not pipe_name:
-          # stdout or stderr was closed, but yield_any_line still may have
-          # something to yield.
-          continue
-        output.append(line)
-        if pipe_name == 'stderr':
-          logging.debug('cipd client: %s', line)
-        else:
-          logging.info('cipd client: %s', line)
-
-      exit_code = process.wait(timeout=timeoutfn())
-      if exit_code != 0:
-        raise Error(
-            'Could not install packages; exit code %d\noutput:%s' % (
-            exit_code, '\n'.join(output)))
-      with open(json_file_path) as jfile:
-        result_json = json.load(jfile)
-      return [(x['package'], x['instance_id']) for x in result_json['result']]
-    finally:
-      fs.remove(list_file_path)
-      fs.remove(json_file_path)
-
-
-def get_platform():
-  """Returns ${platform} parameter value.
-
-  Borrowed from
-  https://chromium.googlesource.com/infra/infra/+/aaf9586/build/build.py#204
-  """
-  # linux, mac or windows.
-  platform_variant = {
-    'darwin': 'mac',
-    'linux2': 'linux',
-    'win32': 'windows',
-  }.get(sys.platform)
-  if not platform_variant:
-    raise Error('Unknown OS: %s' % sys.platform)
-
-  # amd64, 386, etc.
-  machine = platform.machine().lower()
-  platform_arch = {
-    'amd64': 'amd64',
-    'i386': '386',
-    'i686': '386',
-    'x86': '386',
-    'x86_64': 'amd64',
-  }.get(machine)
-  if not platform_arch:
-    if machine.startswith('arm'):
-      platform_arch = 'armv6l'
-    else:
-      platform_arch = 'amd64' if sys.maxsize > 2**32 else '386'
-  return '%s-%s' % (platform_variant, platform_arch)
-
-
-def get_os_ver():
-  """Returns ${os_ver} parameter value.
-
-  Examples: 'ubuntu14_04' or 'mac10_9' or 'win6_1'.
-
-  Borrowed from
-  https://chromium.googlesource.com/infra/infra/+/aaf9586/build/build.py#204
-  """
-  if sys.platform == 'darwin':
-    # platform.mac_ver()[0] is '10.9.5'.
-    dist = platform.mac_ver()[0].split('.')
-    return 'mac%s_%s' % (dist[0], dist[1])
-
-  if sys.platform == 'linux2':
-    # platform.linux_distribution() is ('Ubuntu', '14.04', ...).
-    dist = platform.linux_distribution()
-    return '%s%s' % (dist[0].lower(), dist[1].replace('.', '_'))
-
-  if sys.platform == 'win32':
-    # platform.version() is '6.1.7601'.
-    dist = platform.version().split('.')
-    return 'win%s_%s' % (dist[0], dist[1])
-  raise Error('Unknown OS: %s' % sys.platform)
-
-
-def render_package_name_template(template):
-  """Expands template variables in a CIPD package name template."""
-  return (template
-      .lower()  # Package names are always lower case
-      .replace('${platform}', get_platform())
-      .replace('${os_ver}', get_os_ver()))
-
-
-def _check_response(res, fmt, *args):
-  """Raises Error if response is bad."""
-  if not res:
-    raise Error('%s: no response' % (fmt % args))
-
-  if res.get('status') != 'SUCCESS':
-    raise Error('%s: %s' % (
-        fmt % args,
-        res.get('error_message') or 'status is %s' % res.get('status')))
-
-
-def resolve_version(cipd_server, package_name, version, timeout=None):
-  """Resolves a package instance version (e.g. a tag) to an instance id."""
-  url = '%s/_ah/api/repo/v1/instance/resolve?%s' % (
-      cipd_server,
-      urllib.urlencode({
-        'package_name': package_name,
-        'version': version,
-      }))
-  res = net.url_read_json(url, timeout=timeout)
-  _check_response(res, 'Could not resolve version %s:%s', package_name, version)
-  instance_id = res.get('instance_id')
-  if not instance_id:
-    raise Error('Invalid resolveVersion response: no instance id')
-  return instance_id
-
-
-def get_client_fetch_url(service_url, package_name, instance_id, timeout=None):
-  """Returns a fetch URL of CIPD client binary contents.
-
-  Raises:
-    Error if cannot retrieve fetch URL.
-  """
-  # Fetch the URL of the binary from CIPD backend.
-  package_name = render_package_name_template(package_name)
-  url = '%s/_ah/api/repo/v1/client?%s' % (service_url, urllib.urlencode({
-    'package_name': package_name,
-    'instance_id': instance_id,
-  }))
-  res = net.url_read_json(url, timeout=timeout)
-  _check_response(
-      res, 'Could not fetch CIPD client %s:%s',package_name, instance_id)
-  fetch_url = res.get('client_binary', {}).get('fetch_url')
-  if not fetch_url:
-    raise Error('Invalid fetchClientBinary response: no fetch_url')
-  return fetch_url
-
-
-def _fetch_cipd_client(disk_cache, instance_id, fetch_url, timeoutfn):
-  """Fetches cipd binary to |disk_cache|.
-
-  Retries requests with exponential back-off.
-
-  Raises:
-    Error if could not fetch content.
-  """
-  sleep_time = 1
-  for attempt in xrange(5):
-    if attempt > 0:
-      if timeoutfn() is not None and timeoutfn() < sleep_time:
-        raise Error('Could not fetch CIPD client: timeout')
-      logging.warning('Will retry to fetch CIPD client in %ds', sleep_time)
-      time.sleep(sleep_time)
-      sleep_time *= 2
-
-    try:
-      res = net.url_open(fetch_url, timeout=timeoutfn())
-      if res:
-        disk_cache.write(instance_id, res.iter_content(64 * 1024))
-        return
-    except net.TimeoutError as ex:
-      raise Error('Could not fetch CIPD client: %s', ex)
-    except net.NetError as ex:
-      logging.warning(
-          'Could not fetch CIPD client on attempt #%d: %s', attempt + 1, ex)
-
-  raise Error('Could not fetch CIPD client after 5 retries')
-
-
-@contextlib.contextmanager
-def get_client(
-      service_url, package_name, version, cache_dir, timeout=None):
-  """Returns a context manager that yields a CipdClient. A blocking call.
-
-  Args:
-      service_url (str): URL of the CIPD backend.
-      package_name (str): package name template of the CIPD client.
-      version (str): version of CIPD client package.
-      cache_dir: directory to store instance cache, version cache
-        and a hardlink to the client binary.
-      timeout (int): if not None, timeout in seconds for this function.
-
-  Yields:
-    CipdClient.
-
-  Raises:
-    Error if CIPD client version cannot be resolved or client cannot be fetched.
-  """
-  timeoutfn = tools.sliding_timeout(timeout)
-
-  package_name = render_package_name_template(package_name)
-
-  # Resolve version to instance id.
-  # Is it an instance id already? They look like HEX SHA1.
-  if isolated_format.is_valid_hash(version, hashlib.sha1):
-    instance_id = version
-  elif ':' in version: # it's an immutable tag
-    # version_cache is {version_digest -> instance id} mapping.
-    # It does not take a lot of disk space.
-    version_cache = isolateserver.DiskCache(
-        unicode(os.path.join(cache_dir, 'versions')),
-        isolateserver.CachePolicies(0, 0, 300),
-        hashlib.sha1)
-    with version_cache:
-      version_cache.cleanup()
-      # Convert |version| to a string that may be used as a filename in disk
-      # cache by hashing it.
-      version_digest = hashlib.sha1(version).hexdigest()
-      try:
-        with version_cache.getfileobj(version_digest) as f:
-          instance_id = f.read()
-      except isolateserver.CacheMiss:
-        instance_id = resolve_version(
-            service_url, package_name, version, timeout=timeoutfn())
-        version_cache.write(version_digest, instance_id)
-  else: # it's a ref
-    instance_id = resolve_version(
-        service_url, package_name, version, timeout=timeoutfn())
-
-  # instance_cache is {instance_id -> client binary} mapping.
-  # It is bounded by 5 client versions.
-  instance_cache = isolateserver.DiskCache(
-      unicode(os.path.join(cache_dir, 'clients')),
-      isolateserver.CachePolicies(0, 0, 5),
-      hashlib.sha1)
-  with instance_cache:
-    instance_cache.cleanup()
-    if instance_id not in instance_cache:
-      logging.info('Fetching CIPD client %s:%s', package_name, instance_id)
-      fetch_url = get_client_fetch_url(
-          service_url, package_name, instance_id, timeout=timeoutfn())
-      _fetch_cipd_client(instance_cache, instance_id, fetch_url, timeoutfn)
-
-    # A single host can run multiple swarming bots, but ATM they do not share
-    # same root bot directory. Thus, it is safe to use the same name for the
-    # binary.
-    binary_path = unicode(os.path.join(cache_dir, 'cipd' + EXECUTABLE_SUFFIX))
-    if fs.isfile(binary_path):
-      file_path.remove(binary_path)
-
-    with instance_cache.getfileobj(instance_id) as f:
-      isolateserver.putfile(f, binary_path, 0511)  # -r-x--x--x
-
-    yield CipdClient(binary_path, package_name=package_name,
-                     instance_id=instance_id, service_url=service_url)
-
-
-def parse_package_args(packages):
-  """Parses --cipd-package arguments.
-
-  Assumes |packages| were validated by validate_cipd_options.
-
-  Returns:
-    A list of [(path, package_name, version), ...]
-  """
-  result = []
-  for pkg in packages:
-    path, name, version = pkg.split(':', 2)
-    if not name:
-      raise Error('Invalid package "%s": package name is not specified' % pkg)
-    if not version:
-      raise Error('Invalid package "%s": version is not specified' % pkg)
-    result.append((path, name, version))
-  return result
diff --git a/tools/swarming_client/example/1_isolate_server.py b/tools/swarming_client/example/1_isolate_server.py
deleted file mode 100755
index ca53e2f..0000000
--- a/tools/swarming_client/example/1_isolate_server.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Runs hello_world.py, through hello_world.isolated, locally in a temporary
-directory.
-
-The files are archived and fetched from the remote Isolate Server.
-"""
-
-import hashlib
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-# Pylint can't find common.py that's in the same directory as this file.
-# pylint: disable=F0401
-import common
-
-
-def main():
-  options = common.parse_args(use_isolate_server=True, use_swarming=False)
-  tempdir = tempfile.mkdtemp(prefix=u'hello_world')
-  try:
-    # All the files are put in a temporary directory. This is optional and
-    # simply done so the current directory doesn't have the following files
-    # created:
-    # - hello_world.isolated
-    # - hello_world.isolated.state
-    # - cache/
-    cachedir = os.path.join(tempdir, 'cache')
-    isolateddir = os.path.join(tempdir, 'isolated')
-    isolated = os.path.join(isolateddir, 'hello_world.isolated')
-
-    os.mkdir(isolateddir)
-
-    common.note('Archiving to %s' % options.isolate_server)
-    # TODO(maruel): Parse the output from run() to get 'isolated_sha1'.
-    common.run(
-        [
-          'isolate.py',
-          'archive',
-          '--isolate', os.path.join('payload', 'hello_world.isolate'),
-          '--isolated', isolated,
-          '--isolate-server', options.isolate_server,
-          '--config-variable', 'OS', 'Yours',
-        ], options.verbose)
-
-    common.note(
-        'Downloading from %s and running in a temporary directory' %
-        options.isolate_server)
-    with open(isolated, 'rb') as f:
-      isolated_sha1 = hashlib.sha1(f.read()).hexdigest()
-    common.run(
-        [
-          'run_isolated.py',
-          '--cache', cachedir,
-          '--isolate-server', options.isolate_server,
-          '--isolated', isolated_sha1,
-          '--no-log',
-        ], options.verbose)
-    return 0
-  except subprocess.CalledProcessError as e:
-    return e.returncode
-  finally:
-    shutil.rmtree(tempdir)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/example/2_swarming_run.py b/tools/swarming_client/example/2_swarming_run.py
deleted file mode 100755
index 64f7ccc..0000000
--- a/tools/swarming_client/example/2_swarming_run.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Runs hello_world.py, through hello_world.isolate, remotely on a Swarming
-bot.
-
-It first 'compiles' hello_world.isolate into hello_word.isolated, then requests
-via swarming.py to archives, run and collect results for this task.
-
-It generates example_result.json as a task summary.
-"""
-
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-# Pylint can't find common.py that's in the same directory as this file.
-# pylint: disable=F0401
-import common
-
-
-def main():
-  options = common.parse_args(use_isolate_server=True, use_swarming=True)
-  tempdir = tempfile.mkdtemp(prefix=u'hello_world')
-  try:
-    isolated, _ = common.isolate(
-        tempdir, options.isolate_server, options.swarming_os, options.verbose)
-    common.note(
-        'Running the job remotely. This:\n'
-        ' - archives to %s\n'
-        ' - runs and collect results via %s' %
-        (options.isolate_server, options.swarming))
-    cmd = [
-      'swarming.py',
-      'run',
-      '--swarming', options.swarming,
-      '--isolate-server', options.isolate_server,
-      '--dimension', 'os', options.swarming_os,
-      '--dimension', 'pool', 'default',
-      '--task-name', options.task_name,
-      '--task-summary-json', 'example_result.json',
-      '--decorate',
-      isolated,
-    ]
-    if options.idempotent:
-      cmd.append('--idempotent')
-    if options.priority is not None:
-      cmd.extend(('--priority', str(options.priority)))
-    common.run(cmd, options.verbose)
-    with open('example_result.json', 'rb') as f:
-      print('example_result.json content:')
-      print(f.read())
-    return 0
-  except subprocess.CalledProcessError as e:
-    return e.returncode
-  finally:
-    shutil.rmtree(tempdir)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/example/3_swarming_trigger_collect.py b/tools/swarming_client/example/3_swarming_trigger_collect.py
deleted file mode 100755
index df0852c..0000000
--- a/tools/swarming_client/example/3_swarming_trigger_collect.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Runs hello_world.py, through hello_world.isolate, remotely on a Swarming
-slave.
-
-It compiles and archives via 'isolate.py archive', then discard the local files.
-After, it triggers and finally collects the results.
-
-Creates 2 shards and instructs the script to produce a file in the output
-directory.
-"""
-
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-# Pylint can't find common.py that's in the same directory as this file.
-# pylint: disable=F0401
-import common
-
-
-def main():
-  options = common.parse_args(use_isolate_server=True, use_swarming=True)
-  try:
-    tempdir = tempfile.mkdtemp(prefix=u'hello_world')
-    try:
-      _, hashval = common.isolate(
-          tempdir, options.isolate_server, options.swarming_os, options.verbose)
-
-      json_file = os.path.join(tempdir, 'task.json')
-      common.note('Running on %s' % options.swarming)
-      cmd = [
-        'swarming.py',
-        'trigger',
-        '--swarming', options.swarming,
-        '--isolate-server', options.isolate_server,
-        '--dimension', 'os', options.swarming_os,
-        '--dimension', 'pool', 'default',
-        '--task-name', options.task_name,
-        '--dump-json', json_file,
-        '--isolated', hashval,
-        '--shards', '2',
-      ]
-      if options.idempotent:
-        cmd.append('--idempotent')
-      if options.priority is not None:
-        cmd.extend(('--priority', str(options.priority)))
-      cmd.extend(('--', '${ISOLATED_OUTDIR}'))
-      common.run(cmd, options.verbose)
-
-      common.note('Getting results from %s' % options.swarming)
-      common.run(
-          [
-            'swarming.py',
-            'collect',
-            '--swarming', options.swarming,
-            '--json', json_file,
-            '--task-output-dir', 'example_result',
-          ], options.verbose)
-      for root, _, files in os.walk('example_result'):
-        for name in files:
-          p = os.path.join(root, name)
-          with open(p, 'rb') as f:
-            print('%s content:' % p)
-            print(f.read())
-      return 0
-    finally:
-      shutil.rmtree(tempdir)
-  except subprocess.CalledProcessError as e:
-    return e.returncode
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/example/4_swarming_run_manual_upload.py b/tools/swarming_client/example/4_swarming_run_manual_upload.py
deleted file mode 100755
index 6758ae5..0000000
--- a/tools/swarming_client/example/4_swarming_run_manual_upload.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Runs hello_world.py through a manually crafted hello_world.isolated, remotely
-on a Swarming slave.
-
-No .isolate file is involved at all.
-
-It creates hello_world.isolated and archives via 'isolateserver.py archive',
-then trigger and finally collect the results.
-
-It never create a local file.
-"""
-
-import json
-import os
-import subprocess
-import sys
-import tempfile
-
-# Pylint can't find common.py that's in the same directory as this file.
-# pylint: disable=F0401
-import common
-
-
-def main():
-  options = common.parse_args(use_isolate_server=True, use_swarming=True)
-  try:
-    common.note(
-        'Archiving directory \'payload\' to %s' % options.isolate_server)
-    payload_isolated_sha1 = common.capture(
-        [
-          'isolateserver.py',
-          'archive',
-          '--isolate-server', options.isolate_server,
-          'payload',
-        ]).split()[0]
-
-    common.note(
-        'Archiving custom .isolated file to %s' % options.isolate_server)
-    handle, isolated = tempfile.mkstemp(
-        prefix=u'hello_world', suffix=u'.isolated')
-    os.close(handle)
-    try:
-      data = {
-        'algo': 'sha-1',
-        'command': ['python', 'hello_world.py', 'Custom'],
-        'includes': [payload_isolated_sha1],
-        'version': '1.0',
-      }
-      with open(isolated, 'wb') as f:
-        json.dump(data, f, sort_keys=True, separators=(',',':'))
-      isolated_sha1 = common.capture(
-          [
-            'isolateserver.py',
-            'archive',
-            '--isolate-server', options.isolate_server,
-            isolated,
-          ]).split()[0]
-    finally:
-      common.note('Deleting temporary file, it is not necessary anymore.')
-      os.remove(isolated)
-
-    # Now trigger as usual. You could look at run_exmaple_swarming_involved for
-    # the involved way but use the short way here.
-
-    common.note('Running %s on %s' % (isolated_sha1, options.swarming))
-    cmd = [
-      'swarming.py',
-      'run',
-      '--swarming', options.swarming,
-      '--isolate-server', options.isolate_server,
-      '--dimension', 'os', options.swarming_os,
-      '--dimension', 'pool', 'default',
-      '--task-name', options.task_name,
-      isolated_sha1,
-    ]
-    if options.idempotent:
-      cmd.append('--idempotent')
-    if options.priority is not None:
-      cmd.extend(('--priority', str(options.priority)))
-    common.run(cmd, options.verbose)
-    return 0
-  except subprocess.CalledProcessError as e:
-    return e.returncode
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/example/README.md b/tools/swarming_client/example/README.md
deleted file mode 100644
index eb282c8..0000000
--- a/tools/swarming_client/example/README.md
+++ /dev/null
@@ -1,12 +0,0 @@
-# Swarming examples
-
-This directory contains examples how to use this toolset to run tasks remotely.
-
-You should look at the code and try these in the following order:
-
-*   1_isolate_server.py
-*   2_swarming_run.py
-*   3_swarming_trigger_collect.py
-*   4_swarming_run_manual_upload.py
-
-Look at the docstring of each script to see what they do.
diff --git a/tools/swarming_client/example/common.py b/tools/swarming_client/example/common.py
deleted file mode 100644
index 07c0bfc..0000000
--- a/tools/swarming_client/example/common.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import datetime
-import getpass
-import hashlib
-import optparse
-import os
-import subprocess
-import sys
-
-
-ROOT_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-sys.path.append(os.path.join(ROOT_DIR, '..', 'third_party'))
-
-import colorama
-
-
-CHROMIUM_SWARMING_OSES = {
-    'darwin': 'Mac',
-    'cygwin': 'Windows',
-    'linux2': 'Ubuntu',
-    'win32': 'Windows',
-}
-
-
-def parse_args(use_isolate_server, use_swarming):
-  """Process arguments for the example scripts."""
-  os.chdir(ROOT_DIR)
-  colorama.init()
-
-  parser = optparse.OptionParser(description=sys.modules['__main__'].__doc__)
-  if use_isolate_server:
-    parser.add_option(
-        '-I', '--isolate-server',
-        metavar='URL', default=os.environ.get('ISOLATE_SERVER', ''),
-        help='Isolate server to use')
-  if use_swarming:
-    task_name = '%s-%s-hello_world' % (
-      getpass.getuser(),
-      datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S'))
-    parser.add_option(
-        '--idempotent', action='store_true',
-        help='Tells Swarming to reused previous task result if possible')
-    parser.add_option(
-        '-S', '--swarming',
-        metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
-        help='Swarming server to use')
-    parser.add_option(
-        '-o', '--os', default=sys.platform,
-        help='Swarming slave OS to request. Should be one of the valid '
-             'sys.platform values like darwin, linux2 or win32 default: '
-             '%default.')
-    parser.add_option(
-        '-t', '--task-name', default=task_name,
-        help='Swarming task name, default is based on time: %default')
-  parser.add_option('-v', '--verbose', action='count', default=0)
-  parser.add_option(
-      '--priority', metavar='INT', type='int', help='Priority to use')
-  options, args = parser.parse_args()
-
-  if args:
-    parser.error('Unsupported argument %s' % args)
-  if use_isolate_server and not options.isolate_server:
-    parser.error('--isolate-server is required.')
-  if use_swarming:
-    if not options.swarming:
-      parser.error('--swarming is required.')
-    options.swarming_os = CHROMIUM_SWARMING_OSES[options.os]
-    del options.os
-
-  return options
-
-
-def note(text):
-  """Prints a formatted note."""
-  print(
-      colorama.Fore.YELLOW + colorama.Style.BRIGHT + '\n-> ' + text +
-      colorama.Fore.RESET)
-
-
-def run(cmd, verbose):
-  """Prints the command it runs then run it."""
-  cmd = cmd[:]
-  cmd.extend(['--verbose'] * verbose)
-  print(
-      'Running: %s%s%s' %
-      (colorama.Fore.GREEN, ' '.join(cmd), colorama.Fore.RESET))
-  cmd = [sys.executable, os.path.join('..', cmd[0])] + cmd[1:]
-  if sys.platform != 'win32':
-    cmd = ['time', '-p'] + cmd
-  subprocess.check_call(cmd)
-
-
-def capture(cmd):
-  """Prints the command it runs then return stdout."""
-  print(
-      'Running: %s%s%s' %
-      (colorama.Fore.GREEN, ' '.join(cmd), colorama.Fore.RESET))
-  cmd = [sys.executable, os.path.join('..', cmd[0])] + cmd[1:]
-  return subprocess.check_output(cmd)
-
-
-def isolate(tempdir, isolate_server, swarming_os, verbose):
-  """Archives the payload."""
-  # All the files are put in a temporary directory. This is optional and
-  # simply done so the current directory doesn't have the following files
-  # created:
-  # - hello_world.isolated
-  # - hello_world.isolated.state
-  isolated = os.path.join(tempdir, 'hello_world.isolated')
-  note('Archiving to %s' % isolate_server)
-  run(
-      [
-        'isolate.py',
-        'archive',
-        '--isolate', os.path.join('payload', 'hello_world.isolate'),
-        '--isolated', isolated,
-        '--isolate-server', isolate_server,
-        '--config-variable', 'OS', swarming_os,
-      ], verbose)
-  with open(isolated, 'rb') as f:
-    hashval = hashlib.sha1(f.read()).hexdigest()
-  return isolated, hashval
diff --git a/tools/swarming_client/example/payload/hello_world.isolate b/tools/swarming_client/example/payload/hello_world.isolate
deleted file mode 100644
index d6e5373..0000000
--- a/tools/swarming_client/example/payload/hello_world.isolate
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-{
-  'variables': {
-    'files': [
-      'hello_world.py',
-    ],
-  },
-  #
-  'conditions': [
-    ['OS=="Ubuntu"', {
-      'variables': {
-        'command': [
-          'python',
-          'hello_world.py',
-          'Ubuntu',
-        ],
-      },
-    }],
-    ['OS=="Mac"', {
-      'variables': {
-        'command': [
-          'python',
-          'hello_world.py',
-          'OSX',
-        ],
-      },
-    }],
-    ['OS=="Yours"', {
-      'variables': {
-        'command': [
-          'python',
-          'hello_world.py',
-          'your OS is the best',
-        ],
-      },
-    }],
-    ['OS=="Windows"', {
-      'variables': {
-        'command': [
-          'python',
-          'hello_world.py',
-          'Windows',
-        ],
-      },
-    }],
-  ],
-}
diff --git a/tools/swarming_client/example/payload/hello_world.py b/tools/swarming_client/example/payload/hello_world.py
deleted file mode 100755
index a57ba5d..0000000
--- a/tools/swarming_client/example/payload/hello_world.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""This script is meant to be run on a Swarming slave."""
-
-import os
-import sys
-
-
-def main():
-  print('Hello world: ' + sys.argv[1])
-  if len(sys.argv) == 3:
-    # Write a file in ${ISOLATED_OUTDIR}.
-    with open(os.path.join(sys.argv[2], 'happiness.txt'), 'wb') as f:
-      f.write(
-          'is where you look %d/%d' % (
-            int(os.environ['GTEST_SHARD_INDEX']),
-            int(os.environ['GTEST_TOTAL_SHARDS'])))
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/isolate.py b/tools/swarming_client/isolate.py
deleted file mode 100755
index 0024cf9..0000000
--- a/tools/swarming_client/isolate.py
+++ /dev/null
@@ -1,1226 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Front end tool to operate on .isolate files.
-
-This includes creating, merging or compiling them to generate a .isolated file.
-
-See more information at
-  https://code.google.com/p/swarming/wiki/IsolateDesign
-  https://code.google.com/p/swarming/wiki/IsolateUserGuide
-"""
-# Run ./isolate.py --help for more detailed information.
-
-__version__ = '0.4.4'
-
-import datetime
-import itertools
-import logging
-import optparse
-import os
-import re
-import subprocess
-import sys
-
-import auth
-import isolate_format
-import isolated_format
-import isolateserver
-import run_isolated
-
-from third_party import colorama
-from third_party.depot_tools import fix_encoding
-from third_party.depot_tools import subcommand
-
-from utils import logging_utils
-from utils import file_path
-from utils import fs
-from utils import subprocess42
-from utils import tools
-
-
-# Exit code of 'archive' and 'batcharchive' if the command fails due to an error
-# in *.isolate file (format error, or some referenced files are missing, etc.)
-EXIT_CODE_ISOLATE_ERROR = 1
-
-
-# Exit code of 'archive' and 'batcharchive' if the command fails due to
-# a network or server issue. It is an infrastructure failure.
-EXIT_CODE_UPLOAD_ERROR = 101
-
-
-# Supported version of *.isolated.gen.json files consumed by CMDbatcharchive.
-ISOLATED_GEN_JSON_VERSION = 1
-
-
-class ExecutionError(Exception):
-  """A generic error occurred."""
-  def __str__(self):
-    return self.args[0]
-
-
-### Path handling code.
-
-
-def recreate_tree(outdir, indir, infiles, action, as_hash):
-  """Creates a new tree with only the input files in it.
-
-  Arguments:
-    outdir:    Output directory to create the files in.
-    indir:     Root directory the infiles are based in.
-    infiles:   dict of files to map from |indir| to |outdir|.
-    action:    One of accepted action of file_path.link_file().
-    as_hash:   Output filename is the hash instead of relfile.
-  """
-  logging.info(
-      'recreate_tree(outdir=%s, indir=%s, files=%d, action=%s, as_hash=%s)' %
-      (outdir, indir, len(infiles), action, as_hash))
-
-  assert os.path.isabs(outdir) and outdir == os.path.normpath(outdir), outdir
-  if not os.path.isdir(outdir):
-    logging.info('Creating %s' % outdir)
-    fs.makedirs(outdir)
-
-  for relfile, metadata in infiles.iteritems():
-    infile = os.path.join(indir, relfile)
-    if as_hash:
-      # Do the hashtable specific checks.
-      if 'l' in metadata:
-        # Skip links when storing a hashtable.
-        continue
-      outfile = os.path.join(outdir, metadata['h'])
-      if os.path.isfile(outfile):
-        # Just do a quick check that the file size matches. No need to stat()
-        # again the input file, grab the value from the dict.
-        if not 's' in metadata:
-          raise isolated_format.MappingError(
-              'Misconfigured item %s: %s' % (relfile, metadata))
-        if metadata['s'] == fs.stat(outfile).st_size:
-          continue
-        else:
-          logging.warn('Overwritting %s' % metadata['h'])
-          fs.remove(outfile)
-    else:
-      outfile = os.path.join(outdir, relfile)
-      file_path.ensure_tree(os.path.dirname(outfile))
-
-    if 'l' in metadata:
-      pointed = metadata['l']
-      logging.debug('Symlink: %s -> %s' % (outfile, pointed))
-      # symlink doesn't exist on Windows.
-      fs.symlink(pointed, outfile)  # pylint: disable=E1101
-    else:
-      file_path.link_file(outfile, infile, action)
-
-
-### Variable stuff.
-
-
-def _normalize_path_variable(cwd, relative_base_dir, key, value):
-  """Normalizes a path variable into a relative directory.
-  """
-  # Variables could contain / or \ on windows. Always normalize to
-  # os.path.sep.
-  x = os.path.join(cwd, value.strip().replace('/', os.path.sep))
-  normalized = file_path.get_native_path_case(os.path.normpath(x))
-  if not os.path.isdir(normalized):
-    raise ExecutionError('%s=%s is not a directory' % (key, normalized))
-
-  # All variables are relative to the .isolate file.
-  normalized = os.path.relpath(normalized, relative_base_dir)
-  logging.debug(
-      'Translated variable %s from %s to %s', key, value, normalized)
-  return normalized
-
-
-def normalize_path_variables(cwd, path_variables, relative_base_dir):
-  """Processes path variables as a special case and returns a copy of the dict.
-
-  For each 'path' variable: first normalizes it based on |cwd|, verifies it
-  exists then sets it as relative to relative_base_dir.
-  """
-  logging.info(
-      'normalize_path_variables(%s, %s, %s)', cwd, path_variables,
-      relative_base_dir)
-  assert isinstance(cwd, unicode), cwd
-  assert isinstance(relative_base_dir, unicode), relative_base_dir
-  relative_base_dir = file_path.get_native_path_case(relative_base_dir)
-  return dict(
-      (k, _normalize_path_variable(cwd, relative_base_dir, k, v))
-      for k, v in path_variables.iteritems())
-
-
-### Internal state files.
-
-
-def isolatedfile_to_state(filename):
-  """For a '.isolate' file, returns the path to the saved '.state' file."""
-  return filename + '.state'
-
-
-def chromium_save_isolated(isolated, data, path_variables, algo):
-  """Writes one or many .isolated files.
-
-  This slightly increases the cold cache cost but greatly reduce the warm cache
-  cost by splitting low-churn files off the master .isolated file. It also
-  reduces overall isolateserver memcache consumption.
-  """
-  slaves = []
-
-  def extract_into_included_isolated(prefix):
-    new_slave = {
-      'algo': data['algo'],
-      'files': {},
-      'version': data['version'],
-    }
-    for f in data['files'].keys():
-      if f.startswith(prefix):
-        new_slave['files'][f] = data['files'].pop(f)
-    if new_slave['files']:
-      slaves.append(new_slave)
-
-  # Split test/data/ in its own .isolated file.
-  extract_into_included_isolated(os.path.join('test', 'data', ''))
-
-  # Split everything out of PRODUCT_DIR in its own .isolated file.
-  if path_variables.get('PRODUCT_DIR'):
-    extract_into_included_isolated(path_variables['PRODUCT_DIR'])
-
-  files = []
-  for index, f in enumerate(slaves):
-    slavepath = isolated[:-len('.isolated')] + '.%d.isolated' % index
-    tools.write_json(slavepath, f, True)
-    data.setdefault('includes', []).append(
-        isolated_format.hash_file(slavepath, algo))
-    files.append(os.path.basename(slavepath))
-
-  files.extend(isolated_format.save_isolated(isolated, data))
-  return files
-
-
-class Flattenable(object):
-  """Represents data that can be represented as a json file."""
-  MEMBERS = ()
-
-  def flatten(self):
-    """Returns a json-serializable version of itself.
-
-    Skips None entries.
-    """
-    items = ((member, getattr(self, member)) for member in self.MEMBERS)
-    return dict((member, value) for member, value in items if value is not None)
-
-  @classmethod
-  def load(cls, data, *args, **kwargs):
-    """Loads a flattened version."""
-    data = data.copy()
-    out = cls(*args, **kwargs)
-    for member in out.MEMBERS:
-      if member in data:
-        # Access to a protected member XXX of a client class
-        # pylint: disable=W0212
-        out._load_member(member, data.pop(member))
-    if data:
-      raise ValueError(
-          'Found unexpected entry %s while constructing an object %s' %
-            (data, cls.__name__), data, cls.__name__)
-    return out
-
-  def _load_member(self, member, value):
-    """Loads a member into self."""
-    setattr(self, member, value)
-
-  @classmethod
-  def load_file(cls, filename, *args, **kwargs):
-    """Loads the data from a file or return an empty instance."""
-    try:
-      out = cls.load(tools.read_json(filename), *args, **kwargs)
-      logging.debug('Loaded %s(%s)', cls.__name__, filename)
-    except (IOError, ValueError) as e:
-      # On failure, loads the default instance.
-      out = cls(*args, **kwargs)
-      logging.warn('Failed to load %s: %s', filename, e)
-    return out
-
-
-class SavedState(Flattenable):
-  """Describes the content of a .state file.
-
-  This file caches the items calculated by this script and is used to increase
-  the performance of the script. This file is not loaded by run_isolated.py.
-  This file can always be safely removed.
-
-  It is important to note that the 'files' dict keys are using native OS path
-  separator instead of '/' used in .isolate file.
-  """
-  MEMBERS = (
-    # Value of sys.platform so that the file is rejected if loaded from a
-    # different OS. While this should never happen in practice, users are ...
-    # "creative".
-    'OS',
-    # Algorithm used to generate the hash. The only supported value is at the
-    # time of writting 'sha-1'.
-    'algo',
-    # List of included .isolated files. Used to support/remember 'slave'
-    # .isolated files. Relative path to isolated_basedir.
-    'child_isolated_files',
-    # Cache of the processed command. This value is saved because .isolated
-    # files are never loaded by isolate.py so it's the only way to load the
-    # command safely.
-    'command',
-    # GYP variables that are used to generate conditions. The most frequent
-    # example is 'OS'.
-    'config_variables',
-    # GYP variables that will be replaced in 'command' and paths but will not be
-    # considered a relative directory.
-    'extra_variables',
-    # Cache of the files found so the next run can skip hash calculation.
-    'files',
-    # Path of the original .isolate file. Relative path to isolated_basedir.
-    'isolate_file',
-    # GYP variables used to generate the .isolated files paths based on path
-    # variables. Frequent examples are DEPTH and PRODUCT_DIR.
-    'path_variables',
-    # If the generated directory tree should be read-only. Defaults to 1.
-    'read_only',
-    # Relative cwd to use to start the command.
-    'relative_cwd',
-    # Root directory the files are mapped from.
-    'root_dir',
-    # Version of the saved state file format. Any breaking change must update
-    # the value.
-    'version',
-  )
-
-  # Bump this version whenever the saved state changes. It is also keyed on the
-  # .isolated file version so any change in the generator will invalidate .state
-  # files.
-  EXPECTED_VERSION = isolated_format.ISOLATED_FILE_VERSION + '.2'
-
-  def __init__(self, isolated_basedir):
-    """Creates an empty SavedState.
-
-    Arguments:
-      isolated_basedir: the directory where the .isolated and .isolated.state
-          files are saved.
-    """
-    super(SavedState, self).__init__()
-    assert os.path.isabs(isolated_basedir), isolated_basedir
-    assert os.path.isdir(isolated_basedir), isolated_basedir
-    self.isolated_basedir = isolated_basedir
-
-    # The default algorithm used.
-    self.OS = sys.platform
-    self.algo = isolated_format.SUPPORTED_ALGOS['sha-1']
-    self.child_isolated_files = []
-    self.command = []
-    self.config_variables = {}
-    self.extra_variables = {}
-    self.files = {}
-    self.isolate_file = None
-    self.path_variables = {}
-    # Defaults to 1 when compiling to .isolated.
-    self.read_only = None
-    self.relative_cwd = None
-    self.root_dir = None
-    self.version = self.EXPECTED_VERSION
-
-  def update_config(self, config_variables):
-    """Updates the saved state with only config variables."""
-    self.config_variables.update(config_variables)
-
-  def update(self, isolate_file, path_variables, extra_variables):
-    """Updates the saved state with new data to keep GYP variables and internal
-    reference to the original .isolate file.
-    """
-    assert os.path.isabs(isolate_file)
-    # Convert back to a relative path. On Windows, if the isolate and
-    # isolated files are on different drives, isolate_file will stay an absolute
-    # path.
-    isolate_file = file_path.safe_relpath(isolate_file, self.isolated_basedir)
-
-    # The same .isolate file should always be used to generate the .isolated and
-    # .isolated.state.
-    assert isolate_file == self.isolate_file or not self.isolate_file, (
-        isolate_file, self.isolate_file)
-    self.extra_variables.update(extra_variables)
-    self.isolate_file = isolate_file
-    self.path_variables.update(path_variables)
-
-  def update_isolated(self, command, infiles, read_only, relative_cwd):
-    """Updates the saved state with data necessary to generate a .isolated file.
-
-    The new files in |infiles| are added to self.files dict but their hash is
-    not calculated here.
-    """
-    self.command = command
-    # Add new files.
-    for f in infiles:
-      self.files.setdefault(f, {})
-    # Prune extraneous files that are not a dependency anymore.
-    for f in set(self.files).difference(set(infiles)):
-      del self.files[f]
-    if read_only is not None:
-      self.read_only = read_only
-    self.relative_cwd = relative_cwd
-
-  def to_isolated(self):
-    """Creates a .isolated dictionary out of the saved state.
-
-    https://code.google.com/p/swarming/wiki/IsolatedDesign
-    """
-    def strip(data):
-      """Returns a 'files' entry with only the whitelisted keys."""
-      return dict((k, data[k]) for k in ('h', 'l', 'm', 's') if k in data)
-
-    out = {
-      'algo': isolated_format.SUPPORTED_ALGOS_REVERSE[self.algo],
-      'files': dict(
-          (filepath, strip(data)) for filepath, data in self.files.iteritems()),
-      # The version of the .state file is different than the one of the
-      # .isolated file.
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    if self.command:
-      out['command'] = self.command
-    out['read_only'] = self.read_only if self.read_only is not None else 1
-    if self.relative_cwd:
-      out['relative_cwd'] = self.relative_cwd
-    return out
-
-  @property
-  def isolate_filepath(self):
-    """Returns the absolute path of self.isolate_file."""
-    return os.path.normpath(
-        os.path.join(self.isolated_basedir, self.isolate_file))
-
-  # Arguments number differs from overridden method
-  @classmethod
-  def load(cls, data, isolated_basedir):  # pylint: disable=W0221
-    """Special case loading to disallow different OS.
-
-    It is not possible to load a .isolated.state files from a different OS, this
-    file is saved in OS-specific format.
-    """
-    out = super(SavedState, cls).load(data, isolated_basedir)
-    if data.get('OS') != sys.platform:
-      raise isolated_format.IsolatedError('Unexpected OS %s', data.get('OS'))
-
-    # Converts human readable form back into the proper class type.
-    algo = data.get('algo')
-    if not algo in isolated_format.SUPPORTED_ALGOS:
-      raise isolated_format.IsolatedError('Unknown algo \'%s\'' % out.algo)
-    out.algo = isolated_format.SUPPORTED_ALGOS[algo]
-
-    # Refuse the load non-exact version, even minor difference. This is unlike
-    # isolateserver.load_isolated(). This is because .isolated.state could have
-    # changed significantly even in minor version difference.
-    if out.version != cls.EXPECTED_VERSION:
-      raise isolated_format.IsolatedError(
-          'Unsupported version \'%s\'' % out.version)
-
-    # The .isolate file must be valid. If it is not present anymore, zap the
-    # value as if it was not noted, so .isolate_file can safely be overriden
-    # later.
-    if out.isolate_file and not fs.isfile(out.isolate_filepath):
-      out.isolate_file = None
-    if out.isolate_file:
-      # It could be absolute on Windows if the drive containing the .isolate and
-      # the drive containing the .isolated files differ, .e.g .isolate is on
-      # C:\\ and .isolated is on D:\\   .
-      assert not os.path.isabs(out.isolate_file) or sys.platform == 'win32'
-      assert fs.isfile(out.isolate_filepath), out.isolate_filepath
-    return out
-
-  def flatten(self):
-    """Makes sure 'algo' is in human readable form."""
-    out = super(SavedState, self).flatten()
-    out['algo'] = isolated_format.SUPPORTED_ALGOS_REVERSE[out['algo']]
-    return out
-
-  def __str__(self):
-    def dict_to_str(d):
-      return ''.join('\n    %s=%s' % (k, d[k]) for k in sorted(d))
-
-    out = '%s(\n' % self.__class__.__name__
-    out += '  command: %s\n' % self.command
-    out += '  files: %d\n' % len(self.files)
-    out += '  isolate_file: %s\n' % self.isolate_file
-    out += '  read_only: %s\n' % self.read_only
-    out += '  relative_cwd: %s\n' % self.relative_cwd
-    out += '  child_isolated_files: %s\n' % self.child_isolated_files
-    out += '  path_variables: %s\n' % dict_to_str(self.path_variables)
-    out += '  config_variables: %s\n' % dict_to_str(self.config_variables)
-    out += '  extra_variables: %s\n' % dict_to_str(self.extra_variables)
-    return out
-
-
-class CompleteState(object):
-  """Contains all the state to run the task at hand."""
-  def __init__(self, isolated_filepath, saved_state):
-    super(CompleteState, self).__init__()
-    assert isolated_filepath is None or os.path.isabs(isolated_filepath)
-    self.isolated_filepath = isolated_filepath
-    # Contains the data to ease developer's use-case but that is not strictly
-    # necessary.
-    self.saved_state = saved_state
-
-  @classmethod
-  def load_files(cls, isolated_filepath):
-    """Loads state from disk."""
-    assert os.path.isabs(isolated_filepath), isolated_filepath
-    isolated_basedir = os.path.dirname(isolated_filepath)
-    return cls(
-        isolated_filepath,
-        SavedState.load_file(
-            isolatedfile_to_state(isolated_filepath), isolated_basedir))
-
-  def load_isolate(
-      self, cwd, isolate_file, path_variables, config_variables,
-      extra_variables, blacklist, ignore_broken_items):
-    """Updates self.isolated and self.saved_state with information loaded from a
-    .isolate file.
-
-    Processes the loaded data, deduce root_dir, relative_cwd.
-    """
-    # Make sure to not depend on os.getcwd().
-    assert os.path.isabs(isolate_file), isolate_file
-    isolate_file = file_path.get_native_path_case(isolate_file)
-    logging.info(
-        'CompleteState.load_isolate(%s, %s, %s, %s, %s, %s)',
-        cwd, isolate_file, path_variables, config_variables, extra_variables,
-        ignore_broken_items)
-
-    # Config variables are not affected by the paths and must be used to
-    # retrieve the paths, so update them first.
-    self.saved_state.update_config(config_variables)
-
-    with fs.open(isolate_file, 'r') as f:
-      # At that point, variables are not replaced yet in command and infiles.
-      # infiles may contain directory entries and is in posix style.
-      command, infiles, read_only, isolate_cmd_dir = (
-          isolate_format.load_isolate_for_config(
-              os.path.dirname(isolate_file), f.read(),
-              self.saved_state.config_variables))
-
-    # Processes the variables with the new found relative root. Note that 'cwd'
-    # is used when path variables are used.
-    path_variables = normalize_path_variables(
-        cwd, path_variables, isolate_cmd_dir)
-    # Update the rest of the saved state.
-    self.saved_state.update(isolate_file, path_variables, extra_variables)
-
-    total_variables = self.saved_state.path_variables.copy()
-    total_variables.update(self.saved_state.config_variables)
-    total_variables.update(self.saved_state.extra_variables)
-    command = [
-        isolate_format.eval_variables(i, total_variables) for i in command
-    ]
-
-    total_variables = self.saved_state.path_variables.copy()
-    total_variables.update(self.saved_state.extra_variables)
-    infiles = [
-        isolate_format.eval_variables(f, total_variables) for f in infiles
-    ]
-    # root_dir is automatically determined by the deepest root accessed with the
-    # form '../../foo/bar'. Note that path variables must be taken in account
-    # too, add them as if they were input files.
-    self.saved_state.root_dir = isolate_format.determine_root_dir(
-        isolate_cmd_dir, infiles + self.saved_state.path_variables.values())
-    # The relative directory is automatically determined by the relative path
-    # between root_dir and the directory containing the .isolate file,
-    # isolate_base_dir.
-    relative_cwd = os.path.relpath(isolate_cmd_dir, self.saved_state.root_dir)
-    # Now that we know where the root is, check that the path_variables point
-    # inside it.
-    for k, v in self.saved_state.path_variables.iteritems():
-      dest = os.path.join(isolate_cmd_dir, relative_cwd, v)
-      if not file_path.path_starts_with(self.saved_state.root_dir, dest):
-        raise isolated_format.MappingError(
-            'Path variable %s=%r points outside the inferred root directory '
-            '%s; %s'
-            % (k, v, self.saved_state.root_dir, dest))
-    # Normalize the files based to self.saved_state.root_dir. It is important to
-    # keep the trailing os.path.sep at that step.
-    infiles = [
-      file_path.relpath(
-          file_path.normpath(os.path.join(isolate_cmd_dir, f)),
-          self.saved_state.root_dir)
-      for f in infiles
-    ]
-    follow_symlinks = sys.platform != 'win32'
-    # Expand the directories by listing each file inside. Up to now, trailing
-    # os.path.sep must be kept.
-    infiles = isolated_format.expand_directories_and_symlinks(
-        self.saved_state.root_dir,
-        infiles,
-        tools.gen_blacklist(blacklist),
-        follow_symlinks,
-        ignore_broken_items)
-
-    # Finally, update the new data to be able to generate the foo.isolated file,
-    # the file that is used by run_isolated.py.
-    self.saved_state.update_isolated(command, infiles, read_only, relative_cwd)
-    logging.debug(self)
-
-  def files_to_metadata(self, subdir):
-    """Updates self.saved_state.files with the files' mode and hash.
-
-    If |subdir| is specified, filters to a subdirectory. The resulting .isolated
-    file is tainted.
-
-    See isolated_format.file_to_metadata() for more information.
-    """
-    for infile in sorted(self.saved_state.files):
-      if subdir and not infile.startswith(subdir):
-        self.saved_state.files.pop(infile)
-      else:
-        filepath = os.path.join(self.root_dir, infile)
-        self.saved_state.files[infile] = isolated_format.file_to_metadata(
-            filepath,
-            self.saved_state.files[infile],
-            self.saved_state.read_only,
-            self.saved_state.algo)
-
-  def save_files(self):
-    """Saves self.saved_state and creates a .isolated file."""
-    logging.debug('Dumping to %s' % self.isolated_filepath)
-    self.saved_state.child_isolated_files = chromium_save_isolated(
-        self.isolated_filepath,
-        self.saved_state.to_isolated(),
-        self.saved_state.path_variables,
-        self.saved_state.algo)
-    total_bytes = sum(
-        i.get('s', 0) for i in self.saved_state.files.itervalues())
-    if total_bytes:
-      # TODO(maruel): Stats are missing the .isolated files.
-      logging.debug('Total size: %d bytes' % total_bytes)
-    saved_state_file = isolatedfile_to_state(self.isolated_filepath)
-    logging.debug('Dumping to %s' % saved_state_file)
-    tools.write_json(saved_state_file, self.saved_state.flatten(), True)
-
-  @property
-  def root_dir(self):
-    return self.saved_state.root_dir
-
-  def __str__(self):
-    def indent(data, indent_length):
-      """Indents text."""
-      spacing = ' ' * indent_length
-      return ''.join(spacing + l for l in str(data).splitlines(True))
-
-    out = '%s(\n' % self.__class__.__name__
-    out += '  root_dir: %s\n' % self.root_dir
-    out += '  saved_state: %s)' % indent(self.saved_state, 2)
-    return out
-
-
-def load_complete_state(options, cwd, subdir, skip_update):
-  """Loads a CompleteState.
-
-  This includes data from .isolate and .isolated.state files. Never reads the
-  .isolated file.
-
-  Arguments:
-    options: Options instance generated with process_isolate_options. For either
-             options.isolate and options.isolated, if the value is set, it is an
-             absolute path.
-    cwd: base directory to be used when loading the .isolate file.
-    subdir: optional argument to only process file in the subdirectory, relative
-            to CompleteState.root_dir.
-    skip_update: Skip trying to load the .isolate file and processing the
-                 dependencies. It is useful when not needed, like when tracing.
-  """
-  assert not options.isolate or os.path.isabs(options.isolate)
-  assert not options.isolated or os.path.isabs(options.isolated)
-  cwd = file_path.get_native_path_case(unicode(cwd))
-  if options.isolated:
-    # Load the previous state if it was present. Namely, "foo.isolated.state".
-    # Note: this call doesn't load the .isolate file.
-    complete_state = CompleteState.load_files(options.isolated)
-  else:
-    # Constructs a dummy object that cannot be saved. Useful for temporary
-    # commands like 'run'. There is no directory containing a .isolated file so
-    # specify the current working directory as a valid directory.
-    complete_state = CompleteState(None, SavedState(os.getcwd()))
-
-  if not options.isolate:
-    if not complete_state.saved_state.isolate_file:
-      if not skip_update:
-        raise ExecutionError('A .isolate file is required.')
-      isolate = None
-    else:
-      isolate = complete_state.saved_state.isolate_filepath
-  else:
-    isolate = options.isolate
-    if complete_state.saved_state.isolate_file:
-      rel_isolate = file_path.safe_relpath(
-          options.isolate, complete_state.saved_state.isolated_basedir)
-      if rel_isolate != complete_state.saved_state.isolate_file:
-        # This happens if the .isolate file was moved for example. In this case,
-        # discard the saved state.
-        logging.warning(
-            '--isolated %s != %s as saved in %s. Discarding saved state',
-            rel_isolate,
-            complete_state.saved_state.isolate_file,
-            isolatedfile_to_state(options.isolated))
-        complete_state = CompleteState(
-            options.isolated,
-            SavedState(complete_state.saved_state.isolated_basedir))
-
-  if not skip_update:
-    # Then load the .isolate and expands directories.
-    complete_state.load_isolate(
-        cwd, isolate, options.path_variables, options.config_variables,
-        options.extra_variables, options.blacklist, options.ignore_broken_items)
-
-  # Regenerate complete_state.saved_state.files.
-  if subdir:
-    subdir = unicode(subdir)
-    # This is tricky here. If it is a path, take it from the root_dir. If
-    # it is a variable, it must be keyed from the directory containing the
-    # .isolate file. So translate all variables first.
-    translated_path_variables = dict(
-        (k,
-          os.path.normpath(os.path.join(complete_state.saved_state.relative_cwd,
-            v)))
-        for k, v in complete_state.saved_state.path_variables.iteritems())
-    subdir = isolate_format.eval_variables(subdir, translated_path_variables)
-    subdir = subdir.replace('/', os.path.sep)
-
-  if not skip_update:
-    complete_state.files_to_metadata(subdir)
-  return complete_state
-
-
-def create_isolate_tree(outdir, root_dir, files, relative_cwd, read_only):
-  """Creates a isolated tree usable for test execution.
-
-  Returns the current working directory where the isolated command should be
-  started in.
-  """
-  # Forcibly copy when the tree has to be read only. Otherwise the inode is
-  # modified, and this cause real problems because the user's source tree
-  # becomes read only. On the other hand, the cost of doing file copy is huge.
-  if read_only not in (0, None):
-    action = file_path.COPY
-  else:
-    action = file_path.HARDLINK_WITH_FALLBACK
-
-  recreate_tree(
-      outdir=outdir,
-      indir=root_dir,
-      infiles=files,
-      action=action,
-      as_hash=False)
-  cwd = os.path.normpath(os.path.join(outdir, relative_cwd))
-
-  # cwd may not exist when no files are mapped from the directory containing the
-  # .isolate file. But the directory must exist to be the current working
-  # directory.
-  file_path.ensure_tree(cwd)
-
-  run_isolated.change_tree_read_only(outdir, read_only)
-  return cwd
-
-
-@tools.profile
-def prepare_for_archival(options, cwd):
-  """Loads the isolated file and create 'infiles' for archival."""
-  complete_state = load_complete_state(
-      options, cwd, options.subdir, False)
-  # Make sure that complete_state isn't modified until save_files() is
-  # called, because any changes made to it here will propagate to the files
-  # created (which is probably not intended).
-  complete_state.save_files()
-
-  infiles = complete_state.saved_state.files
-  # Add all the .isolated files.
-  isolated_hash = []
-  isolated_files = [
-    options.isolated,
-  ] + complete_state.saved_state.child_isolated_files
-  for item in isolated_files:
-    item_path = os.path.join(
-        os.path.dirname(complete_state.isolated_filepath), item)
-    # Do not use isolated_format.hash_file() here because the file is
-    # likely smallish (under 500kb) and its file size is needed.
-    with fs.open(item_path, 'rb') as f:
-      content = f.read()
-    isolated_hash.append(
-        complete_state.saved_state.algo(content).hexdigest())
-    isolated_metadata = {
-      'h': isolated_hash[-1],
-      's': len(content),
-      'priority': '0'
-    }
-    infiles[item_path] = isolated_metadata
-  return complete_state, infiles, isolated_hash
-
-
-def isolate_and_archive(trees, isolate_server, namespace):
-  """Isolates and uploads a bunch of isolated trees.
-
-  Args:
-    trees: list of pairs (Options, working directory) that describe what tree
-        to isolate. Options are processed by 'process_isolate_options'.
-    isolate_server: URL of Isolate Server to upload to.
-    namespace: namespace to upload to.
-
-  Returns a dict {target name -> isolate hash or None}, where target name is
-  a name of *.isolated file without an extension (e.g. 'base_unittests').
-
-  Have multiple failure modes:
-    * If the upload fails due to server or network error returns None.
-    * If some *.isolate file is incorrect (but rest of them are fine and were
-      successfully uploaded), returns a dict where the value of the entry
-      corresponding to invalid *.isolate file is None.
-  """
-  if not trees:
-    return {}
-
-  # Helper generator to avoid materializing the full (huge) list of files until
-  # the very end (in upload_tree).
-  def emit_files(root_dir, files):
-    for path, meta in files.iteritems():
-      yield (os.path.join(root_dir, path), meta)
-
-  # Process all *.isolate files, it involves parsing, file system traversal and
-  # hashing. The result is a list of generators that produce files to upload
-  # and the mapping {target name -> hash of *.isolated file} to return from
-  # this function.
-  files_generators = []
-  isolated_hashes = {}
-  with tools.Profiler('Isolate'):
-    for opts, cwd in trees:
-      target_name = os.path.splitext(os.path.basename(opts.isolated))[0]
-      try:
-        complete_state, files, isolated_hash = prepare_for_archival(opts, cwd)
-        files_generators.append(emit_files(complete_state.root_dir, files))
-        isolated_hashes[target_name] = isolated_hash[0]
-        print('%s  %s' % (isolated_hash[0], target_name))
-      except Exception:
-        logging.exception('Exception when isolating %s', target_name)
-        isolated_hashes[target_name] = None
-
-  # All bad? Nothing to upload.
-  if all(v is None for v in isolated_hashes.itervalues()):
-    return isolated_hashes
-
-  # Now upload all necessary files at once.
-  with tools.Profiler('Upload'):
-    try:
-      isolateserver.upload_tree(
-          base_url=isolate_server,
-          infiles=itertools.chain(*files_generators),
-          namespace=namespace)
-    except Exception:
-      logging.exception('Exception while uploading files')
-      return None
-
-  return isolated_hashes
-
-
-def parse_archive_command_line(args, cwd):
-  """Given list of arguments for 'archive' command returns parsed options.
-
-  Used by CMDbatcharchive to parse options passed via JSON. See also CMDarchive.
-  """
-  parser = optparse.OptionParser()
-  add_isolate_options(parser)
-  add_subdir_option(parser)
-  options, args = parser.parse_args(args)
-  if args:
-    parser.error('Unsupported argument: %s' % args)
-  process_isolate_options(parser, options, cwd)
-  return options
-
-
-### Commands.
-
-
-def CMDarchive(parser, args):
-  """Creates a .isolated file and uploads the tree to an isolate server.
-
-  All the files listed in the .isolated file are put in the isolate server
-  cache via isolateserver.py.
-  """
-  add_isolate_options(parser)
-  add_subdir_option(parser)
-  isolateserver.add_isolate_server_options(parser)
-  auth.add_auth_options(parser)
-  options, args = parser.parse_args(args)
-  if args:
-    parser.error('Unsupported argument: %s' % args)
-  process_isolate_options(parser, options)
-  auth.process_auth_options(parser, options)
-  isolateserver.process_isolate_server_options(parser, options, True, True)
-  result = isolate_and_archive(
-      [(options, unicode(os.getcwd()))],
-      options.isolate_server,
-      options.namespace)
-  if result is None:
-    return EXIT_CODE_UPLOAD_ERROR
-  assert len(result) == 1, result
-  if result.values()[0] is None:
-    return EXIT_CODE_ISOLATE_ERROR
-  return 0
-
-
-@subcommand.usage('-- GEN_JSON_1 GEN_JSON_2 ...')
-def CMDbatcharchive(parser, args):
-  """Archives multiple isolated trees at once.
-
-  Using single command instead of multiple sequential invocations allows to cut
-  redundant work when isolated trees share common files (e.g. file hashes are
-  checked only once, their presence on the server is checked only once, and
-  so on).
-
-  Takes a list of paths to *.isolated.gen.json files that describe what trees to
-  isolate. Format of files is:
-  {
-    "version": 1,
-    "dir": <absolute path to a directory all other paths are relative to>,
-    "args": [list of command line arguments for single 'archive' command]
-  }
-  """
-  isolateserver.add_isolate_server_options(parser)
-  isolateserver.add_archive_options(parser)
-  auth.add_auth_options(parser)
-  parser.add_option(
-      '--dump-json',
-      metavar='FILE',
-      help='Write isolated hashes of archived trees to this file as JSON')
-  options, args = parser.parse_args(args)
-  auth.process_auth_options(parser, options)
-  isolateserver.process_isolate_server_options(parser, options, True, True)
-
-  # Validate all incoming options, prepare what needs to be archived as a list
-  # of tuples (archival options, working directory).
-  work_units = []
-  for gen_json_path in args:
-    # Validate JSON format of a *.isolated.gen.json file.
-    try:
-      data = tools.read_json(gen_json_path)
-    except IOError as e:
-      parser.error('Failed to open %s: %s' % (gen_json_path, e))
-    if data.get('version') != ISOLATED_GEN_JSON_VERSION:
-      parser.error('Invalid version in %s' % gen_json_path)
-    cwd = data.get('dir')
-    if not isinstance(cwd, unicode) or not fs.isdir(cwd):
-      parser.error('Invalid dir in %s' % gen_json_path)
-    args = data.get('args')
-    if (not isinstance(args, list) or
-        not all(isinstance(x, unicode) for x in args)):
-      parser.error('Invalid args in %s' % gen_json_path)
-    # Convert command line (embedded in JSON) to Options object.
-    work_units.append((parse_archive_command_line(args, cwd), cwd))
-
-  # Perform the archival, all at once.
-  isolated_hashes = isolate_and_archive(
-      work_units, options.isolate_server, options.namespace)
-
-  # TODO(vadimsh): isolate_and_archive returns None on upload failure, there's
-  # no way currently to figure out what *.isolated file from a batch were
-  # successfully uploaded, so consider them all failed (and emit empty dict
-  # as JSON result).
-  if options.dump_json:
-    tools.write_json(options.dump_json, isolated_hashes or {}, False)
-
-  if isolated_hashes is None:
-    return EXIT_CODE_UPLOAD_ERROR
-
-  # isolated_hashes[x] is None if 'x.isolate' contains a error.
-  if not all(isolated_hashes.itervalues()):
-    return EXIT_CODE_ISOLATE_ERROR
-
-  return 0
-
-
-def CMDcheck(parser, args):
-  """Checks that all the inputs are present and generates .isolated."""
-  add_isolate_options(parser)
-  add_subdir_option(parser)
-  options, args = parser.parse_args(args)
-  if args:
-    parser.error('Unsupported argument: %s' % args)
-  process_isolate_options(parser, options)
-
-  complete_state = load_complete_state(
-      options, os.getcwd(), options.subdir, False)
-
-  # Nothing is done specifically. Just store the result and state.
-  complete_state.save_files()
-  return 0
-
-
-def CMDremap(parser, args):
-  """Creates a directory with all the dependencies mapped into it.
-
-  Useful to test manually why a test is failing. The target executable is not
-  run.
-  """
-  add_isolate_options(parser)
-  add_outdir_options(parser)
-  add_skip_refresh_option(parser)
-  options, args = parser.parse_args(args)
-  if args:
-    parser.error('Unsupported argument: %s' % args)
-  cwd = os.getcwd()
-  process_isolate_options(parser, options, cwd, require_isolated=False)
-  process_outdir_options(parser, options, cwd)
-  complete_state = load_complete_state(options, cwd, None, options.skip_refresh)
-
-  file_path.ensure_tree(options.outdir)
-  print('Remapping into %s' % options.outdir)
-  if fs.listdir(options.outdir):
-    raise ExecutionError('Can\'t remap in a non-empty directory')
-
-  create_isolate_tree(
-      options.outdir, complete_state.root_dir, complete_state.saved_state.files,
-      complete_state.saved_state.relative_cwd,
-      complete_state.saved_state.read_only)
-  if complete_state.isolated_filepath:
-    complete_state.save_files()
-  return 0
-
-
-@subcommand.usage('-- [extra arguments]')
-def CMDrun(parser, args):
-  """Runs the test executable in an isolated (temporary) directory.
-
-  All the dependencies are mapped into the temporary directory and the
-  directory is cleaned up after the target exits.
-
-  Argument processing stops at -- and these arguments are appended to the
-  command line of the target to run. For example, use:
-    isolate.py run --isolated foo.isolated -- --gtest_filter=Foo.Bar
-  """
-  add_isolate_options(parser)
-  add_skip_refresh_option(parser)
-  options, args = parser.parse_args(args)
-  process_isolate_options(parser, options, require_isolated=False)
-  complete_state = load_complete_state(
-      options, os.getcwd(), None, options.skip_refresh)
-  cmd = complete_state.saved_state.command + args
-  if not cmd:
-    raise ExecutionError('No command to run.')
-  cmd = tools.fix_python_path(cmd)
-
-  outdir = run_isolated.make_temp_dir(
-      u'isolate-%s' % datetime.date.today(),
-      os.path.dirname(complete_state.root_dir))
-  try:
-    # TODO(maruel): Use run_isolated.run_tha_test().
-    cwd = create_isolate_tree(
-        outdir, complete_state.root_dir, complete_state.saved_state.files,
-        complete_state.saved_state.relative_cwd,
-        complete_state.saved_state.read_only)
-    file_path.ensure_command_has_abs_path(cmd, cwd)
-    logging.info('Running %s, cwd=%s' % (cmd, cwd))
-    try:
-      result = subprocess.call(cmd, cwd=cwd)
-    except OSError:
-      sys.stderr.write(
-          'Failed to executed the command; executable is missing, maybe you\n'
-          'forgot to map it in the .isolate file?\n  %s\n  in %s\n' %
-          (' '.join(cmd), cwd))
-      result = 1
-  finally:
-    file_path.rmtree(outdir)
-
-  if complete_state.isolated_filepath:
-    complete_state.save_files()
-  return result
-
-
-def _process_variable_arg(option, opt, _value, parser):
-  """Called by OptionParser to process a --<foo>-variable argument."""
-  if not parser.rargs:
-    raise optparse.OptionValueError(
-        'Please use %s FOO=BAR or %s FOO BAR' % (opt, opt))
-  k = parser.rargs.pop(0)
-  variables = getattr(parser.values, option.dest)
-  if '=' in k:
-    k, v = k.split('=', 1)
-  else:
-    if not parser.rargs:
-      raise optparse.OptionValueError(
-          'Please use %s FOO=BAR or %s FOO BAR' % (opt, opt))
-    v = parser.rargs.pop(0)
-  if not re.match('^' + isolate_format.VALID_VARIABLE + '$', k):
-    raise optparse.OptionValueError(
-        'Variable \'%s\' doesn\'t respect format \'%s\'' %
-        (k, isolate_format.VALID_VARIABLE))
-  variables.append((k, v.decode('utf-8')))
-
-
-def add_variable_option(parser):
-  """Adds --isolated and --<foo>-variable to an OptionParser."""
-  parser.add_option(
-      '-s', '--isolated',
-      metavar='FILE',
-      help='.isolated file to generate or read')
-  # Keep for compatibility. TODO(maruel): Remove once not used anymore.
-  parser.add_option(
-      '-r', '--result',
-      dest='isolated',
-      help=optparse.SUPPRESS_HELP)
-  is_win = sys.platform in ('win32', 'cygwin')
-  # There is really 3 kind of variables:
-  # - path variables, like DEPTH or PRODUCT_DIR that should be
-  #   replaced opportunistically when tracing tests.
-  # - extraneous things like EXECUTABE_SUFFIX.
-  # - configuration variables that are to be used in deducing the matrix to
-  #   reduce.
-  # - unrelated variables that are used as command flags for example.
-  parser.add_option(
-      '--config-variable',
-      action='callback',
-      callback=_process_variable_arg,
-      default=[],
-      dest='config_variables',
-      metavar='FOO BAR',
-      help='Config variables are used to determine which conditions should be '
-           'matched when loading a .isolate file, default: %default. '
-            'All 3 kinds of variables are persistent accross calls, they are '
-            'saved inside <.isolated>.state')
-  parser.add_option(
-      '--path-variable',
-      action='callback',
-      callback=_process_variable_arg,
-      default=[],
-      dest='path_variables',
-      metavar='FOO BAR',
-      help='Path variables are used to replace file paths when loading a '
-           '.isolate file, default: %default')
-  parser.add_option(
-      '--extra-variable',
-      action='callback',
-      callback=_process_variable_arg,
-      default=[('EXECUTABLE_SUFFIX', '.exe' if is_win else '')],
-      dest='extra_variables',
-      metavar='FOO BAR',
-      help='Extraneous variables are replaced on the \'command\' entry and on '
-           'paths in the .isolate file but are not considered relative paths.')
-
-
-def add_isolate_options(parser):
-  """Adds --isolate, --isolated, --out and --<foo>-variable options."""
-  isolateserver.add_archive_options(parser)
-  group = optparse.OptionGroup(parser, 'Common options')
-  group.add_option(
-      '-i', '--isolate',
-      metavar='FILE',
-      help='.isolate file to load the dependency data from')
-  add_variable_option(group)
-  group.add_option(
-      '--ignore_broken_items', action='store_true',
-      default=bool(os.environ.get('ISOLATE_IGNORE_BROKEN_ITEMS')),
-      help='Indicates that invalid entries in the isolated file to be '
-           'only be logged and not stop processing. Defaults to True if '
-           'env var ISOLATE_IGNORE_BROKEN_ITEMS is set')
-  parser.add_option_group(group)
-
-
-def add_subdir_option(parser):
-  parser.add_option(
-      '--subdir',
-      help='Filters to a subdirectory. Its behavior changes depending if it '
-           'is a relative path as a string or as a path variable. Path '
-           'variables are always keyed from the directory containing the '
-           '.isolate file. Anything else is keyed on the root directory.')
-
-
-def add_skip_refresh_option(parser):
-  parser.add_option(
-      '--skip-refresh', action='store_true',
-      help='Skip reading .isolate file and do not refresh the hash of '
-           'dependencies')
-
-
-def add_outdir_options(parser):
-  """Adds --outdir, which is orthogonal to --isolate-server.
-
-  Note: On upload, separate commands are used between 'archive' and 'hashtable'.
-  On 'download', the same command can download from either an isolate server or
-  a file system.
-  """
-  parser.add_option(
-      '-o', '--outdir', metavar='DIR',
-      help='Directory used to recreate the tree.')
-
-
-def process_outdir_options(parser, options, cwd):
-  if not options.outdir:
-    parser.error('--outdir is required.')
-  if file_path.is_url(options.outdir):
-    parser.error('Can\'t use an URL for --outdir.')
-  options.outdir = unicode(options.outdir).replace('/', os.path.sep)
-  # outdir doesn't need native path case since tracing is never done from there.
-  options.outdir = os.path.abspath(
-      os.path.normpath(os.path.join(cwd, options.outdir)))
-  # In theory, we'd create the directory outdir right away. Defer doing it in
-  # case there's errors in the command line.
-
-
-def process_isolate_options(parser, options, cwd=None, require_isolated=True):
-  """Handles options added with 'add_isolate_options'.
-
-  Mutates |options| in place, by normalizing path to isolate file, values of
-  variables, etc.
-  """
-  cwd = file_path.get_native_path_case(unicode(cwd or os.getcwd()))
-
-  # Parse --isolated option.
-  if options.isolated:
-    options.isolated = os.path.abspath(
-        os.path.join(cwd, unicode(options.isolated).replace('/', os.path.sep)))
-  if require_isolated and not options.isolated:
-    parser.error('--isolated is required.')
-  if options.isolated and not options.isolated.endswith('.isolated'):
-    parser.error('--isolated value must end with \'.isolated\'')
-
-  # Processes all the --<foo>-variable flags.
-  def try_make_int(s):
-    """Converts a value to int if possible, converts to unicode otherwise."""
-    try:
-      return int(s)
-    except ValueError:
-      return s.decode('utf-8')
-  options.config_variables = dict(
-      (k, try_make_int(v)) for k, v in options.config_variables)
-  options.path_variables = dict(options.path_variables)
-  options.extra_variables = dict(options.extra_variables)
-
-  # Normalize the path in --isolate.
-  if options.isolate:
-    # TODO(maruel): Work with non-ASCII.
-    # The path must be in native path case for tracing purposes.
-    options.isolate = unicode(options.isolate).replace('/', os.path.sep)
-    options.isolate = os.path.abspath(os.path.join(cwd, options.isolate))
-    options.isolate = file_path.get_native_path_case(options.isolate)
-
-
-def main(argv):
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  parser = logging_utils.OptionParserWithLogging(
-        version=__version__, verbose=int(os.environ.get('ISOLATE_DEBUG', 0)))
-  try:
-    return dispatcher.execute(parser, argv)
-  except isolated_format.MappingError as e:
-    print >> sys.stderr, 'Failed to find an input file: %s' % e
-    return 1
-  except ExecutionError as e:
-    print >> sys.stderr, 'Execution failure: %s' % e
-    return 1
-
-
-if __name__ == '__main__':
-  subprocess42.inhibit_os_error_reporting()
-  fix_encoding.fix_encoding()
-  tools.disable_buffering()
-  colorama.init()
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/swarming_client/isolate_format.py b/tools/swarming_client/isolate_format.py
deleted file mode 100644
index 4d26982..0000000
--- a/tools/swarming_client/isolate_format.py
+++ /dev/null
@@ -1,663 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Contains logic to parse .isolate files.
-
-This module doesn't touch the file system. It's the job of the client code to do
-I/O on behalf of this module.
-
-See more information at
-  https://code.google.com/p/swarming/wiki/IsolateDesign
-  https://code.google.com/p/swarming/wiki/IsolateUserGuide
-"""
-
-import ast
-import itertools
-import logging
-import os
-import posixpath
-import re
-import sys
-
-from utils import fs
-
-
-# Valid variable name.
-VALID_VARIABLE = '[A-Za-z_][A-Za-z_0-9]*'
-
-
-class IsolateError(ValueError):
-  """Generic failure to load a .isolate file."""
-  pass
-
-
-def determine_root_dir(relative_root, infiles):
-  """For a list of infiles, determines the deepest root directory that is
-  referenced indirectly.
-
-  All arguments must be using os.path.sep.
-  """
-  # The trick used to determine the root directory is to look at "how far" back
-  # up it is looking up.
-  deepest_root = relative_root
-  for i in infiles:
-    x = relative_root
-    while i.startswith('..' + os.path.sep):
-      i = i[3:]
-      assert not i.startswith(os.path.sep)
-      x = os.path.dirname(x)
-    if deepest_root.startswith(x):
-      deepest_root = x
-  logging.info(
-      'determine_root_dir(%s, %d files) -> %s',
-      relative_root, len(infiles), deepest_root)
-  return deepest_root
-
-
-def replace_variable(part, variables):
-  m = re.match(r'<\((' + VALID_VARIABLE + ')\)', part)
-  if m:
-    if m.group(1) not in variables:
-      raise IsolateError(
-        'Variable "%s" was not found in %s.\nDid you forget to specify '
-        '--path-variable?' % (m.group(1), variables))
-    return str(variables[m.group(1)])
-  return part
-
-
-def eval_variables(item, variables):
-  """Replaces the .isolate variables in a string item.
-
-  Note that the .isolate format is a subset of the .gyp dialect.
-  """
-  return ''.join(
-      replace_variable(p, variables)
-      for p in re.split(r'(<\(' + VALID_VARIABLE + '\))', item))
-
-
-def pretty_print(variables, stdout):
-  """Outputs a .isolate file from the decoded variables.
-
-  The .isolate format is GYP compatible.
-
-  Similar to pprint.print() but with NIH syndrome.
-  """
-  # Order the dictionary keys by these keys in priority.
-  ORDER = ('variables', 'condition', 'command', 'files', 'read_only')
-
-  def sorting_key(x):
-    """Gives priority to 'most important' keys before the others."""
-    if x in ORDER:
-      return str(ORDER.index(x))
-    return x
-
-  def loop_list(indent, items):
-    for item in items:
-      if isinstance(item, basestring):
-        stdout.write('%s\'%s\',\n' % (indent, item))
-      elif isinstance(item, dict):
-        stdout.write('%s{\n' % indent)
-        loop_dict(indent + '  ', item)
-        stdout.write('%s},\n' % indent)
-      elif isinstance(item, list):
-        # A list inside a list will write the first item embedded.
-        stdout.write('%s[' % indent)
-        for index, i in enumerate(item):
-          if isinstance(i, basestring):
-            stdout.write(
-                '\'%s\', ' % i.replace('\\', '\\\\').replace('\'', '\\\''))
-          elif isinstance(i, dict):
-            stdout.write('{\n')
-            loop_dict(indent + '  ', i)
-            if index != len(item) - 1:
-              x = ', '
-            else:
-              x = ''
-            stdout.write('%s}%s' % (indent, x))
-          else:
-            assert False
-        stdout.write('],\n')
-      else:
-        assert False
-
-  def loop_dict(indent, items):
-    for key in sorted(items, key=sorting_key):
-      item = items[key]
-      stdout.write("%s'%s': " % (indent, key))
-      if isinstance(item, dict):
-        stdout.write('{\n')
-        loop_dict(indent + '  ', item)
-        stdout.write(indent + '},\n')
-      elif isinstance(item, list):
-        stdout.write('[\n')
-        loop_list(indent + '  ', item)
-        stdout.write(indent + '],\n')
-      elif isinstance(item, basestring):
-        stdout.write(
-            '\'%s\',\n' % item.replace('\\', '\\\\').replace('\'', '\\\''))
-      elif isinstance(item, (int, bool)) or item is None:
-        stdout.write('%s,\n' % item)
-      else:
-        assert False, item
-
-  stdout.write('{\n')
-  loop_dict('  ', variables)
-  stdout.write('}\n')
-
-
-def print_all(comment, data, stream):
-  """Prints a complete .isolate file and its top-level file comment into a
-  stream.
-  """
-  if comment:
-    stream.write(comment)
-  pretty_print(data, stream)
-
-
-def extract_comment(content):
-  """Extracts file level comment."""
-  out = []
-  for line in content.splitlines(True):
-    if line.startswith('#'):
-      out.append(line)
-    else:
-      break
-  return ''.join(out)
-
-
-def eval_content(content):
-  """Evaluates a python file and return the value defined in it.
-
-  Used in practice for .isolate files.
-  """
-  globs = {'__builtins__': None}
-  locs = {}
-  try:
-    value = eval(content, globs, locs)
-  except TypeError as e:
-    e.args = list(e.args) + [content]
-    raise
-  assert locs == {}, locs
-  assert globs == {'__builtins__': None}, globs
-  return value
-
-
-def match_configs(expr, config_variables, all_configs):
-  """Returns the list of values from |values| that match the condition |expr|.
-
-  Arguments:
-    expr: string that is evaluatable with eval(). It is a GYP condition.
-    config_variables: list of the name of the variables.
-    all_configs: list of the list of possible values.
-
-  If a variable is not referenced at all, it is marked as unbounded (free) with
-  a value set to None.
-  """
-  # It is more than just eval'ing the variable, it needs to be double checked to
-  # see if the variable is referenced at all. If not, the variable is free
-  # (unbounded).
-  # TODO(maruel): Use the intelligent way by inspecting expr instead of doing
-  # trial and error to figure out which variable is bound.
-  combinations = []
-  for bound_variables in itertools.product(
-      (True, False), repeat=len(config_variables)):
-    # Add the combination of variables bound.
-    combinations.append(
-        (
-          [c for c, b in zip(config_variables, bound_variables) if b],
-          set(
-            tuple(v if b else None for v, b in zip(line, bound_variables))
-            for line in all_configs)
-        ))
-
-  out = []
-  for variables, configs in combinations:
-    # Strip variables and see if expr can still be evaluated.
-    for values in configs:
-      globs = {'__builtins__': None}
-      globs.update(zip(variables, (v for v in values if v is not None)))
-      try:
-        assertion = eval(expr, globs, {})
-      except NameError:
-        continue
-      if not isinstance(assertion, bool):
-        raise IsolateError('Invalid condition')
-      if assertion:
-        out.append(values)
-  return out
-
-
-def verify_variables(variables):
-  """Verifies the |variables| dictionary is in the expected format."""
-  VALID_VARIABLES = [
-    'command',
-    'files',
-    'read_only',
-  ]
-  assert isinstance(variables, dict), variables
-  assert set(VALID_VARIABLES).issuperset(set(variables)), variables.keys()
-  for name, value in variables.iteritems():
-    if name == 'read_only':
-      assert value in (0, 1, 2, None), value
-    else:
-      assert isinstance(value, list), value
-      assert all(isinstance(i, basestring) for i in value), value
-
-
-def verify_ast(expr, variables_and_values):
-  """Verifies that |expr| is of the form
-  expr ::= expr ( "or" | "and" ) expr
-         | identifier "==" ( string | int )
-  Also collects the variable identifiers and string/int values in the dict
-  |variables_and_values|, in the form {'var': set([val1, val2, ...]), ...}.
-  """
-  assert isinstance(expr, (ast.BoolOp, ast.Compare))
-  if isinstance(expr, ast.BoolOp):
-    assert isinstance(expr.op, (ast.And, ast.Or))
-    for subexpr in expr.values:
-      verify_ast(subexpr, variables_and_values)
-  else:
-    assert isinstance(expr.left.ctx, ast.Load)
-    assert len(expr.ops) == 1
-    assert isinstance(expr.ops[0], ast.Eq)
-    var_values = variables_and_values.setdefault(expr.left.id, set())
-    rhs = expr.comparators[0]
-    assert isinstance(rhs, (ast.Str, ast.Num))
-    var_values.add(rhs.n if isinstance(rhs, ast.Num) else rhs.s)
-
-
-def verify_condition(condition, variables_and_values):
-  """Verifies the |condition| dictionary is in the expected format.
-  See verify_ast() for the meaning of |variables_and_values|.
-  """
-  VALID_INSIDE_CONDITION = ['variables']
-  assert isinstance(condition, list), condition
-  assert len(condition) == 2, condition
-  expr, then = condition
-
-  test_ast = compile(expr, '<condition>', 'eval', ast.PyCF_ONLY_AST)
-  verify_ast(test_ast.body, variables_and_values)
-
-  assert isinstance(then, dict), then
-  assert set(VALID_INSIDE_CONDITION).issuperset(set(then)), then.keys()
-  if not 'variables' in then:
-    raise IsolateError('Missing \'variables\' in condition %s' % condition)
-  verify_variables(then['variables'])
-
-
-def verify_root(value, variables_and_values):
-  """Verifies that |value| is the parsed form of a valid .isolate file.
-
-  See verify_ast() for the meaning of |variables_and_values|.
-  """
-  VALID_ROOTS = ['includes', 'conditions', 'variables']
-  assert isinstance(value, dict), value
-  assert set(VALID_ROOTS).issuperset(set(value)), value.keys()
-
-  includes = value.get('includes', [])
-  assert isinstance(includes, list), includes
-  for include in includes:
-    assert isinstance(include, basestring), include
-
-  conditions = value.get('conditions', [])
-  assert isinstance(conditions, list), conditions
-  for condition in conditions:
-    verify_condition(condition, variables_and_values)
-
-  variables = value.get('variables', {})
-  verify_variables(variables)
-
-
-def get_folders(values_dict):
-  """Returns a dict of all the folders in the given value_dict."""
-  return dict(
-    (item, configs) for (item, configs) in values_dict.iteritems()
-    if item.endswith('/')
-  )
-
-
-class ConfigSettings(object):
-  """Represents the dependency variables for a single build configuration.
-
-  The structure is immutable.
-
-  .command and .isolate_dir describe how to run the command. .isolate_dir uses
-      the OS' native path separator. It must be an absolute path, it's the path
-      where to start the command from.
-  .files is the list of dependencies. The items use '/' as a path separator.
-  .read_only describe how to map the files.
-  """
-  def __init__(self, values, isolate_dir):
-    verify_variables(values)
-    if isolate_dir is None:
-      # It must be an empty object if isolate_dir is None.
-      assert values == {}, values
-    else:
-      # Otherwise, the path must be absolute.
-      assert os.path.isabs(isolate_dir), isolate_dir
-
-    self.files = sorted(values.get('files', []))
-    self.command = values.get('command', [])[:]
-    self.isolate_dir = isolate_dir
-    self.read_only = values.get('read_only')
-
-  def union(self, rhs):
-    """Merges two config settings together into a new instance.
-
-    A new instance is not created and self or rhs is returned if the other
-    object is the empty object.
-
-    self has priority over rhs for .command. Use the same .isolate_dir as the
-    one having a .command.
-
-    Dependencies listed in rhs are patch adjusted ONLY if they don't start with
-    a path variable, e.g. the characters '<('.
-    """
-    # When an object has .isolate_dir == None, it means it is the empty object.
-    if rhs.isolate_dir is None:
-      return self
-    if self.isolate_dir is None:
-      return rhs
-
-    if sys.platform == 'win32':
-      assert self.isolate_dir[0].lower() == rhs.isolate_dir[0].lower()
-
-    # Takes the difference between the two isolate_dir. Note that while
-    # isolate_dir is in native path case, all other references are in posix.
-    l_rel_cwd, r_rel_cwd = self.isolate_dir, rhs.isolate_dir
-    if self.command or rhs.command:
-      use_rhs = bool(not self.command and rhs.command)
-    else:
-      # If self doesn't define any file, use rhs.
-      use_rhs = not bool(self.files)
-    if use_rhs:
-      # Rebase files in rhs.
-      l_rel_cwd, r_rel_cwd = r_rel_cwd, l_rel_cwd
-
-    rebase_path = os.path.relpath(r_rel_cwd, l_rel_cwd).replace(
-        os.path.sep, '/')
-    def rebase_item(f):
-      if f.startswith('<(') or rebase_path == '.':
-        return f
-      return posixpath.join(rebase_path, f)
-
-    def map_both(l, r):
-      """Rebase items in either lhs or rhs, as needed."""
-      if use_rhs:
-        l, r = r, l
-      return sorted(l + map(rebase_item, r))
-
-    var = {
-      'command': self.command or rhs.command,
-      'files': map_both(self.files, rhs.files),
-      'read_only': rhs.read_only if self.read_only is None else self.read_only,
-    }
-    return ConfigSettings(var, l_rel_cwd)
-
-  def flatten(self):
-    """Converts the object into a dict."""
-    out = {}
-    if self.command:
-      out['command'] = self.command
-    if self.files:
-      out['files'] = self.files
-    if self.read_only is not None:
-      out['read_only'] = self.read_only
-    # TODO(maruel): Probably better to not output it if command is None?
-    if self.isolate_dir is not None:
-      out['isolate_dir'] = self.isolate_dir
-    return out
-
-  def __str__(self):
-    """Returns a short representation useful for debugging."""
-    files = ''.join('\n    ' + f for f in self.files)
-    return 'ConfigSettings(%s, %s, %s, %s)' % (
-        self.command,
-        self.isolate_dir,
-        self.read_only,
-        files or '[]')
-
-
-def _safe_index(l, k):
-  try:
-    return l.index(k)
-  except ValueError:
-    return None
-
-
-def _get_map_keys(dest_keys, in_keys):
-  """Returns a tuple of the indexes of each item in in_keys found in dest_keys.
-
-  For example, if in_keys is ('A', 'C') and dest_keys is ('A', 'B', 'C'), the
-  return value will be (0, None, 1).
-  """
-  return tuple(_safe_index(in_keys, k) for k in dest_keys)
-
-
-def _map_keys(mapping, items):
-  """Returns a tuple with items placed at mapping index.
-
-  For example, if mapping is (1, None, 0) and items is ('a', 'b'), it will
-  return ('b', None, 'c').
-  """
-  return tuple(items[i] if i != None else None for i in mapping)
-
-
-class Configs(object):
-  """Represents a processed .isolate file.
-
-  Stores the file in a processed way, split by configuration.
-
-  At this point, we don't know all the possibilities. So mount a partial view
-  that we have.
-
-  This class doesn't hold isolate_dir, since it is dependent on the final
-  configuration selected. It is implicitly dependent on which .isolate defines
-  the 'command' that will take effect.
-  """
-  def __init__(self, file_comment, config_variables):
-    self.file_comment = file_comment
-    # Contains the names of the config variables seen while processing
-    # .isolate file(s). The order is important since the same order is used for
-    # keys in self._by_config.
-    assert isinstance(config_variables, tuple)
-    assert all(isinstance(c, basestring) for c in config_variables), (
-        config_variables)
-    config_variables = tuple(config_variables)
-    assert tuple(sorted(config_variables)) == config_variables, config_variables
-    self._config_variables = config_variables
-    # The keys of _by_config are tuples of values for each of the items in
-    # self._config_variables. A None item in the list of the key means the value
-    # is unbounded.
-    self._by_config = {}
-
-  @property
-  def config_variables(self):
-    return self._config_variables
-
-  def get_config(self, config):
-    """Returns all configs that matches this config as a single ConfigSettings.
-
-    Returns an empty ConfigSettings if none apply.
-    """
-    # TODO(maruel): Fix ordering based on the bounded values. The keys are not
-    # necessarily sorted in the way that makes sense, they are alphabetically
-    # sorted. It is important because the left-most takes predescence.
-    out = ConfigSettings({}, None)
-    for k, v in sorted(self._by_config.iteritems()):
-      if all(i == j or j is None for i, j in zip(config, k)):
-        out = out.union(v)
-    return out
-
-  def set_config(self, key, value):
-    """Sets the ConfigSettings for this key.
-
-    The key is a tuple of bounded or unbounded variables. The global variable
-    is the key where all values are unbounded, e.g.:
-      (None,) * len(self._config_variables)
-    """
-    assert key not in self._by_config, (key, self._by_config.keys())
-    assert isinstance(key, tuple)
-    assert len(key) == len(self._config_variables), (
-        key, self._config_variables)
-    assert isinstance(value, ConfigSettings)
-    self._by_config[key] = value
-
-  def union(self, rhs):
-    """Returns a new Configs instance, the union of variables from self and rhs.
-
-    Uses self.file_comment if available, otherwise rhs.file_comment.
-    It keeps config_variables sorted in the output.
-    """
-    # Merge the keys of config_variables for each Configs instances. All the new
-    # variables will become unbounded. This requires realigning the keys.
-    config_variables = tuple(sorted(
-        set(self.config_variables) | set(rhs.config_variables)))
-    out = Configs(self.file_comment or rhs.file_comment, config_variables)
-    mapping_lhs = _get_map_keys(out.config_variables, self.config_variables)
-    mapping_rhs = _get_map_keys(out.config_variables, rhs.config_variables)
-    lhs_config = dict(
-        (_map_keys(mapping_lhs, k), v) for k, v in self._by_config.iteritems())
-    # pylint: disable=W0212
-    rhs_config = dict(
-        (_map_keys(mapping_rhs, k), v) for k, v in rhs._by_config.iteritems())
-
-    for key in set(lhs_config) | set(rhs_config):
-      l = lhs_config.get(key)
-      r = rhs_config.get(key)
-      out.set_config(key, l.union(r) if (l and r) else (l or r))
-    return out
-
-  def flatten(self):
-    """Returns a flat dictionary representation of the configuration.
-    """
-    return dict((k, v.flatten()) for k, v in self._by_config.iteritems())
-
-  def __str__(self):
-    return 'Configs(%s,%s)' % (
-      self._config_variables,
-      ''.join('\n  %s' % str(f) for f in self._by_config))
-
-
-def load_included_isolate(isolate_dir, isolate_path):
-  if os.path.isabs(isolate_path):
-    raise IsolateError(
-        'Failed to load configuration; absolute include path \'%s\'' %
-        isolate_path)
-  included_isolate = os.path.normpath(os.path.join(isolate_dir, isolate_path))
-  if sys.platform == 'win32':
-    if included_isolate[0].lower() != isolate_dir[0].lower():
-      raise IsolateError(
-          'Can\'t reference a .isolate file from another drive')
-  with fs.open(included_isolate, 'r') as f:
-    return load_isolate_as_config(
-        os.path.dirname(included_isolate),
-        eval_content(f.read()),
-        None)
-
-
-def load_isolate_as_config(isolate_dir, value, file_comment):
-  """Parses one .isolate file and returns a Configs() instance.
-
-  Arguments:
-    isolate_dir: only used to load relative includes so it doesn't depend on
-                 cwd.
-    value: is the loaded dictionary that was defined in the gyp file.
-    file_comment: comments found at the top of the file so it can be preserved.
-
-  The expected format is strict, anything diverting from the format below will
-  throw an assert:
-  {
-    'includes': [
-      'foo.isolate',
-    ],
-    'conditions': [
-      ['OS=="vms" and foo=42', {
-        'variables': {
-          'command': [
-            ...
-          ],
-          'files': [
-            ...
-          ],
-          'read_only': 0,
-        },
-      }],
-      ...
-    ],
-    'variables': {
-      ...
-    },
-  }
-  """
-  assert os.path.isabs(isolate_dir), isolate_dir
-  if any(len(cond) == 3 for cond in value.get('conditions', [])):
-    raise IsolateError('Using \'else\' is not supported anymore.')
-  variables_and_values = {}
-  verify_root(value, variables_and_values)
-  if variables_and_values:
-    config_variables, config_values = zip(
-        *sorted(variables_and_values.iteritems()))
-    all_configs = list(itertools.product(*config_values))
-  else:
-    config_variables = ()
-    all_configs = []
-
-  isolate = Configs(file_comment, config_variables)
-
-  # Add global variables. The global variables are on the empty tuple key.
-  isolate.set_config(
-      (None,) * len(config_variables),
-      ConfigSettings(value.get('variables', {}), isolate_dir))
-
-  # Add configuration-specific variables.
-  for expr, then in value.get('conditions', []):
-    configs = match_configs(expr, config_variables, all_configs)
-    new = Configs(None, config_variables)
-    for config in configs:
-      new.set_config(config, ConfigSettings(then['variables'], isolate_dir))
-    isolate = isolate.union(new)
-
-  # If the .isolate contains command, ignore any command in child .isolate.
-  root_has_command = any(c.command for c in isolate._by_config.itervalues())
-
-  # Load the includes. Process them in reverse so the last one take precedence.
-  for include in reversed(value.get('includes', [])):
-    included = load_included_isolate(isolate_dir, include)
-    if root_has_command:
-      # Strip any command in the imported isolate. It is because the chosen
-      # command is not related to the one in the top-most .isolate, since the
-      # configuration is flattened.
-      for c in included._by_config.itervalues():
-        c.command = []
-    isolate = isolate.union(included)
-
-  return isolate
-
-
-def load_isolate_for_config(isolate_dir, content, config_variables):
-  """Loads the .isolate file and returns the information unprocessed but
-  filtered for the specific OS.
-
-  Returns:
-    tuple of command, dependencies, read_only flag, isolate_dir.
-    The dependencies are fixed to use os.path.sep.
-  """
-  # Load the .isolate file, process its conditions, retrieve the command and
-  # dependencies.
-  isolate = load_isolate_as_config(isolate_dir, eval_content(content), None)
-  try:
-    config_name = tuple(
-        config_variables[var] for var in isolate.config_variables)
-  except KeyError:
-    raise IsolateError(
-        'These configuration variables were missing from the command line: %s' %
-        ', '.join(
-            sorted(set(isolate.config_variables) - set(config_variables))))
-
-  # A configuration is to be created with all the combinations of free
-  # variables.
-  config = isolate.get_config(config_name)
-  dependencies = [f.replace('/', os.path.sep) for f in config.files]
-  return config.command, dependencies, config.read_only, config.isolate_dir
diff --git a/tools/swarming_client/isolated_format.py b/tools/swarming_client/isolated_format.py
deleted file mode 100644
index c828f74..0000000
--- a/tools/swarming_client/isolated_format.py
+++ /dev/null
@@ -1,576 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Understands .isolated files and can do local operations on them."""
-
-import hashlib
-import json
-import logging
-import os
-import re
-import stat
-import sys
-
-from utils import file_path
-from utils import fs
-from utils import tools
-
-
-# Version stored and expected in .isolated files.
-ISOLATED_FILE_VERSION = '1.5'
-
-
-# Chunk size to use when doing disk I/O.
-DISK_FILE_CHUNK = 1024 * 1024
-
-
-# Sadly, hashlib uses 'sha1' instead of the standard 'sha-1' so explicitly
-# specify the names here.
-SUPPORTED_ALGOS = {
-  'md5': hashlib.md5,
-  'sha-1': hashlib.sha1,
-  'sha-512': hashlib.sha512,
-}
-
-
-# Used for serialization.
-SUPPORTED_ALGOS_REVERSE = dict((v, k) for k, v in SUPPORTED_ALGOS.iteritems())
-
-SUPPORTED_FILE_TYPES = ['basic', 'ar']
-
-
-class IsolatedError(ValueError):
-  """Generic failure to load a .isolated file."""
-  pass
-
-
-class MappingError(OSError):
-  """Failed to recreate the tree."""
-  pass
-
-
-def is_valid_hash(value, algo):
-  """Returns if the value is a valid hash for the corresponding algorithm."""
-  size = 2 * algo().digest_size
-  return bool(re.match(r'^[a-fA-F0-9]{%d}$' % size, value))
-
-
-def get_hash_algo(_namespace):
-  """Return hash algorithm class to use when uploading to given |namespace|."""
-  # TODO(vadimsh): Implement this at some point.
-  return hashlib.sha1
-
-
-def is_namespace_with_compression(namespace):
-  """Returns True if given |namespace| stores compressed objects."""
-  return namespace.endswith(('-gzip', '-deflate'))
-
-
-def hash_file(filepath, algo):
-  """Calculates the hash of a file without reading it all in memory at once.
-
-  |algo| should be one of hashlib hashing algorithm.
-  """
-  digest = algo()
-  with fs.open(filepath, 'rb') as f:
-    while True:
-      chunk = f.read(DISK_FILE_CHUNK)
-      if not chunk:
-        break
-      digest.update(chunk)
-  return digest.hexdigest()
-
-
-class IsolatedFile(object):
-  """Represents a single parsed .isolated file."""
-
-  def __init__(self, obj_hash, algo):
-    """|obj_hash| is really the sha-1 of the file."""
-    self.obj_hash = obj_hash
-    self.algo = algo
-
-    # Raw data.
-    self.data = {}
-    # A IsolatedFile instance, one per object in self.includes.
-    self.children = []
-
-    # Set once the .isolated file is loaded.
-    self._is_loaded = False
-
-  def __repr__(self):
-    return 'IsolatedFile(%s, loaded: %s)' % (self.obj_hash, self._is_loaded)
-
-  def load(self, content):
-    """Verifies the .isolated file is valid and loads this object with the json
-    data.
-    """
-    logging.debug('IsolatedFile.load(%s)' % self.obj_hash)
-    assert not self._is_loaded
-    self.data = load_isolated(content, self.algo)
-    self.children = [
-        IsolatedFile(i, self.algo) for i in self.data.get('includes', [])
-    ]
-    self._is_loaded = True
-
-  @property
-  def is_loaded(self):
-    """Returns True if 'load' was already called."""
-    return self._is_loaded
-
-
-def walk_includes(isolated):
-  """Walks IsolatedFile include graph and yields IsolatedFile objects.
-
-  Visits root node first, then recursively all children, left to right.
-  Not yet loaded nodes are considered childless.
-  """
-  yield isolated
-  for child in isolated.children:
-    for x in walk_includes(child):
-      yield x
-
-
-@tools.profile
-def expand_symlinks(indir, relfile):
-  """Follows symlinks in |relfile|, but treating symlinks that point outside the
-  build tree as if they were ordinary directories/files. Returns the final
-  symlink-free target and a list of paths to symlinks encountered in the
-  process.
-
-  The rule about symlinks outside the build tree is for the benefit of the
-  Chromium OS ebuild, which symlinks the output directory to an unrelated path
-  in the chroot.
-
-  Fails when a directory loop is detected, although in theory we could support
-  that case.
-  """
-  is_directory = relfile.endswith(os.path.sep)
-  done = indir
-  todo = relfile.strip(os.path.sep)
-  symlinks = []
-
-  while todo:
-    pre_symlink, symlink, post_symlink = file_path.split_at_symlink(done, todo)
-    if not symlink:
-      todo = file_path.fix_native_path_case(done, todo)
-      done = os.path.join(done, todo)
-      break
-    symlink_path = os.path.join(done, pre_symlink, symlink)
-    post_symlink = post_symlink.lstrip(os.path.sep)
-    # readlink doesn't exist on Windows.
-    # pylint: disable=E1101
-    target = os.path.normpath(os.path.join(done, pre_symlink))
-    symlink_target = os.readlink(symlink_path)
-    if os.path.isabs(symlink_target):
-      # Absolute path are considered a normal directories. The use case is
-      # generally someone who puts the output directory on a separate drive.
-      target = symlink_target
-    else:
-      # The symlink itself could be using the wrong path case.
-      target = file_path.fix_native_path_case(target, symlink_target)
-
-    if not os.path.exists(target):
-      raise MappingError(
-          'Symlink target doesn\'t exist: %s -> %s' % (symlink_path, target))
-    target = file_path.get_native_path_case(target)
-    if not file_path.path_starts_with(indir, target):
-      done = symlink_path
-      todo = post_symlink
-      continue
-    if file_path.path_starts_with(target, symlink_path):
-      raise MappingError(
-          'Can\'t map recursive symlink reference %s -> %s' %
-          (symlink_path, target))
-    logging.info('Found symlink: %s -> %s', symlink_path, target)
-    symlinks.append(os.path.relpath(symlink_path, indir))
-    # Treat the common prefix of the old and new paths as done, and start
-    # scanning again.
-    target = target.split(os.path.sep)
-    symlink_path = symlink_path.split(os.path.sep)
-    prefix_length = 0
-    for target_piece, symlink_path_piece in zip(target, symlink_path):
-      if target_piece == symlink_path_piece:
-        prefix_length += 1
-      else:
-        break
-    done = os.path.sep.join(target[:prefix_length])
-    todo = os.path.join(
-        os.path.sep.join(target[prefix_length:]), post_symlink)
-
-  relfile = os.path.relpath(done, indir)
-  relfile = relfile.rstrip(os.path.sep) + is_directory * os.path.sep
-  return relfile, symlinks
-
-
-@tools.profile
-def expand_directory_and_symlink(indir, relfile, blacklist, follow_symlinks):
-  """Expands a single input. It can result in multiple outputs.
-
-  This function is recursive when relfile is a directory.
-
-  Note: this code doesn't properly handle recursive symlink like one created
-  with:
-    ln -s .. foo
-  """
-  if os.path.isabs(relfile):
-    raise MappingError('Can\'t map absolute path %s' % relfile)
-
-  infile = file_path.normpath(os.path.join(indir, relfile))
-  if not infile.startswith(indir):
-    raise MappingError('Can\'t map file %s outside %s' % (infile, indir))
-
-  filepath = os.path.join(indir, relfile)
-  native_filepath = file_path.get_native_path_case(filepath)
-  if filepath != native_filepath:
-    # Special case './'.
-    if filepath != native_filepath + '.' + os.path.sep:
-      # While it'd be nice to enforce path casing on Windows, it's impractical.
-      # Also give up enforcing strict path case on OSX. Really, it's that sad.
-      # The case where it happens is very specific and hard to reproduce:
-      # get_native_path_case(
-      #    u'Foo.framework/Versions/A/Resources/Something.nib') will return
-      # u'Foo.framework/Versions/A/resources/Something.nib', e.g. lowercase 'r'.
-      #
-      # Note that this is really something deep in OSX because running
-      # ls Foo.framework/Versions/A
-      # will print out 'Resources', while file_path.get_native_path_case()
-      # returns a lower case 'r'.
-      #
-      # So *something* is happening under the hood resulting in the command 'ls'
-      # and Carbon.File.FSPathMakeRef('path').FSRefMakePath() to disagree.  We
-      # have no idea why.
-      if sys.platform not in ('darwin', 'win32'):
-        raise MappingError(
-            'File path doesn\'t equal native file path\n%s != %s' %
-            (filepath, native_filepath))
-
-  symlinks = []
-  if follow_symlinks:
-    try:
-      relfile, symlinks = expand_symlinks(indir, relfile)
-    except OSError:
-      # The file doesn't exist, it will throw below.
-      pass
-
-  if relfile.endswith(os.path.sep):
-    if not os.path.isdir(infile):
-      raise MappingError(
-          '%s is not a directory but ends with "%s"' % (infile, os.path.sep))
-
-    # Special case './'.
-    if relfile.startswith('.' + os.path.sep):
-      relfile = relfile[2:]
-    outfiles = symlinks
-    try:
-      for filename in fs.listdir(infile):
-        inner_relfile = os.path.join(relfile, filename)
-        if blacklist and blacklist(inner_relfile):
-          continue
-        if os.path.isdir(os.path.join(indir, inner_relfile)):
-          inner_relfile += os.path.sep
-        outfiles.extend(
-            expand_directory_and_symlink(indir, inner_relfile, blacklist,
-                                         follow_symlinks))
-      return outfiles
-    except OSError as e:
-      raise MappingError(
-          'Unable to iterate over directory %s.\n%s' % (infile, e))
-  else:
-    # Always add individual files even if they were blacklisted.
-    if os.path.isdir(infile):
-      raise MappingError(
-          'Input directory %s must have a trailing slash' % infile)
-
-    if not os.path.isfile(infile):
-      raise MappingError('Input file %s doesn\'t exist' % infile)
-
-    return symlinks + [relfile]
-
-
-def expand_directories_and_symlinks(
-    indir, infiles, blacklist, follow_symlinks, ignore_broken_items):
-  """Expands the directories and the symlinks, applies the blacklist and
-  verifies files exist.
-
-  Files are specified in os native path separator.
-  """
-  outfiles = []
-  for relfile in infiles:
-    try:
-      outfiles.extend(
-          expand_directory_and_symlink(
-              indir, relfile, blacklist, follow_symlinks))
-    except MappingError as e:
-      if not ignore_broken_items:
-        raise
-      logging.info('warning: %s', e)
-  return outfiles
-
-
-@tools.profile
-def file_to_metadata(filepath, prevdict, read_only, algo):
-  """Processes an input file, a dependency, and return meta data about it.
-
-  Behaviors:
-  - Retrieves the file mode, file size, file timestamp, file link
-    destination if it is a file link and calcultate the SHA-1 of the file's
-    content if the path points to a file and not a symlink.
-
-  Arguments:
-    filepath: File to act on.
-    prevdict: the previous dictionary. It is used to retrieve the cached sha-1
-              to skip recalculating the hash. Optional.
-    read_only: If 1 or 2, the file mode is manipulated. In practice, only save
-               one of 4 modes: 0755 (rwx), 0644 (rw), 0555 (rx), 0444 (r). On
-               windows, mode is not set since all files are 'executable' by
-               default.
-    algo:      Hashing algorithm used.
-
-  Returns:
-    The necessary dict to create a entry in the 'files' section of an .isolated
-    file.
-  """
-  # TODO(maruel): None is not a valid value.
-  assert read_only in (None, 0, 1, 2), read_only
-  out = {}
-  # Always check the file stat and check if it is a link. The timestamp is used
-  # to know if the file's content/symlink destination should be looked into.
-  # E.g. only reuse from prevdict if the timestamp hasn't changed.
-  # There is the risk of the file's timestamp being reset to its last value
-  # manually while its content changed. We don't protect against that use case.
-  try:
-    filestats = os.lstat(filepath)
-  except OSError:
-    # The file is not present.
-    raise MappingError('%s is missing' % filepath)
-  is_link = stat.S_ISLNK(filestats.st_mode)
-
-  if sys.platform != 'win32':
-    # Ignore file mode on Windows since it's not really useful there.
-    filemode = stat.S_IMODE(filestats.st_mode)
-    # Remove write access for group and all access to 'others'.
-    filemode &= ~(stat.S_IWGRP | stat.S_IRWXO)
-    if read_only:
-      filemode &= ~stat.S_IWUSR
-    if filemode & (stat.S_IXUSR|stat.S_IRGRP) == (stat.S_IXUSR|stat.S_IRGRP):
-      # Only keep x group bit if both x user bit and group read bit are set.
-      filemode |= stat.S_IXGRP
-    else:
-      filemode &= ~stat.S_IXGRP
-    if not is_link:
-      out['m'] = filemode
-
-  # Used to skip recalculating the hash or link destination. Use the most recent
-  # update time.
-  out['t'] = int(round(filestats.st_mtime))
-
-  if not is_link:
-    out['s'] = filestats.st_size
-    # If the timestamp wasn't updated and the file size is still the same, carry
-    # on the sha-1.
-    if (prevdict.get('t') == out['t'] and
-        prevdict.get('s') == out['s']):
-      # Reuse the previous hash if available.
-      out['h'] = prevdict.get('h')
-    if not out.get('h'):
-      out['h'] = hash_file(filepath, algo)
-  else:
-    # If the timestamp wasn't updated, carry on the link destination.
-    if prevdict.get('t') == out['t']:
-      # Reuse the previous link destination if available.
-      out['l'] = prevdict.get('l')
-    if out.get('l') is None:
-      # The link could be in an incorrect path case. In practice, this only
-      # happen on OSX on case insensitive HFS.
-      # TODO(maruel): It'd be better if it was only done once, in
-      # expand_directory_and_symlink(), so it would not be necessary to do again
-      # here.
-      symlink_value = os.readlink(filepath)  # pylint: disable=E1101
-      filedir = file_path.get_native_path_case(os.path.dirname(filepath))
-      native_dest = file_path.fix_native_path_case(filedir, symlink_value)
-      out['l'] = os.path.relpath(native_dest, filedir)
-  return out
-
-
-def save_isolated(isolated, data):
-  """Writes one or multiple .isolated files.
-
-  Note: this reference implementation does not create child .isolated file so it
-  always returns an empty list.
-
-  Returns the list of child isolated files that are included by |isolated|.
-  """
-  # Make sure the data is valid .isolated data by 'reloading' it.
-  algo = SUPPORTED_ALGOS[data['algo']]
-  load_isolated(json.dumps(data), algo)
-  tools.write_json(isolated, data, True)
-  return []
-
-
-def split_path(path):
-  """Splits a path and return a list with each element."""
-  out = []
-  while path:
-    path, rest = os.path.split(path)
-    if rest:
-      out.append(rest)
-  return out
-
-
-def load_isolated(content, algo):
-  """Verifies the .isolated file is valid and loads this object with the json
-  data.
-
-  Arguments:
-  - content: raw serialized content to load.
-  - algo: hashlib algorithm class. Used to confirm the algorithm matches the
-          algorithm used on the Isolate Server.
-  """
-  try:
-    data = json.loads(content)
-  except ValueError:
-    raise IsolatedError('Failed to parse: %s...' % content[:100])
-
-  if not isinstance(data, dict):
-    raise IsolatedError('Expected dict, got %r' % data)
-
-  # Check 'version' first, since it could modify the parsing after.
-  value = data.get('version', '1.0')
-  if not isinstance(value, basestring):
-    raise IsolatedError('Expected string, got %r' % value)
-  try:
-    version = tuple(map(int, value.split('.')))
-  except ValueError:
-    raise IsolatedError('Expected valid version, got %r' % value)
-
-  expected_version = tuple(map(int, ISOLATED_FILE_VERSION.split('.')))
-  # Major version must match.
-  if version[0] != expected_version[0]:
-    raise IsolatedError(
-        'Expected compatible \'%s\' version, got %r' %
-        (ISOLATED_FILE_VERSION, value))
-
-  if algo is None:
-    # TODO(maruel): Remove the default around Jan 2014.
-    # Default the algorithm used in the .isolated file itself, falls back to
-    # 'sha-1' if unspecified.
-    algo = SUPPORTED_ALGOS_REVERSE[data.get('algo', 'sha-1')]
-
-  for key, value in data.iteritems():
-    if key == 'algo':
-      if not isinstance(value, basestring):
-        raise IsolatedError('Expected string, got %r' % value)
-      if value not in SUPPORTED_ALGOS:
-        raise IsolatedError(
-            'Expected one of \'%s\', got %r' %
-            (', '.join(sorted(SUPPORTED_ALGOS)), value))
-      if value != SUPPORTED_ALGOS_REVERSE[algo]:
-        raise IsolatedError(
-            'Expected \'%s\', got %r' % (SUPPORTED_ALGOS_REVERSE[algo], value))
-
-    elif key == 'command':
-      if not isinstance(value, list):
-        raise IsolatedError('Expected list, got %r' % value)
-      if not value:
-        raise IsolatedError('Expected non-empty command')
-      for subvalue in value:
-        if not isinstance(subvalue, basestring):
-          raise IsolatedError('Expected string, got %r' % subvalue)
-
-    elif key == 'files':
-      if not isinstance(value, dict):
-        raise IsolatedError('Expected dict, got %r' % value)
-      for subkey, subvalue in value.iteritems():
-        if not isinstance(subkey, basestring):
-          raise IsolatedError('Expected string, got %r' % subkey)
-        if os.path.isabs(subkey) or subkey.startswith('\\\\'):
-          # Disallow '\\\\', it could UNC on Windows but disallow this
-          # everywhere.
-          raise IsolatedError('File path can\'t be absolute: %r' % subkey)
-        if subkey.endswith(('/', '\\')):
-          raise IsolatedError(
-              'File path can\'t end with \'%s\': %r' % (subkey[-1], subkey))
-        if '..' in split_path(subkey):
-          raise IsolatedError('File path can\'t reference parent: %r' % subkey)
-        if not isinstance(subvalue, dict):
-          raise IsolatedError('Expected dict, got %r' % subvalue)
-        for subsubkey, subsubvalue in subvalue.iteritems():
-          if subsubkey == 'l':
-            if not isinstance(subsubvalue, basestring):
-              raise IsolatedError('Expected string, got %r' % subsubvalue)
-          elif subsubkey == 'm':
-            if not isinstance(subsubvalue, int):
-              raise IsolatedError('Expected int, got %r' % subsubvalue)
-          elif subsubkey == 'h':
-            if not is_valid_hash(subsubvalue, algo):
-              raise IsolatedError('Expected sha-1, got %r' % subsubvalue)
-          elif subsubkey == 's':
-            if not isinstance(subsubvalue, (int, long)):
-              raise IsolatedError('Expected int or long, got %r' % subsubvalue)
-          elif subsubkey == 't':
-            if subsubvalue not in SUPPORTED_FILE_TYPES:
-              raise IsolatedError('Expected one of \'%s\', got %r' % (
-                  ', '.join(sorted(SUPPORTED_FILE_TYPES)), subsubvalue))
-          else:
-            raise IsolatedError('Unknown subsubkey %s' % subsubkey)
-        if bool('h' in subvalue) == bool('l' in subvalue):
-          raise IsolatedError(
-              'Need only one of \'h\' (sha-1) or \'l\' (link), got: %r' %
-              subvalue)
-        if bool('h' in subvalue) != bool('s' in subvalue):
-          raise IsolatedError(
-              'Both \'h\' (sha-1) and \'s\' (size) should be set, got: %r' %
-              subvalue)
-        if bool('s' in subvalue) == bool('l' in subvalue):
-          raise IsolatedError(
-              'Need only one of \'s\' (size) or \'l\' (link), got: %r' %
-              subvalue)
-        if bool('l' in subvalue) and bool('m' in subvalue):
-          raise IsolatedError(
-              'Cannot use \'m\' (mode) and \'l\' (link), got: %r' %
-              subvalue)
-
-    elif key == 'includes':
-      if not isinstance(value, list):
-        raise IsolatedError('Expected list, got %r' % value)
-      if not value:
-        raise IsolatedError('Expected non-empty includes list')
-      for subvalue in value:
-        if not is_valid_hash(subvalue, algo):
-          raise IsolatedError('Expected sha-1, got %r' % subvalue)
-
-    elif key == 'os':
-      if version >= (1, 4):
-        raise IsolatedError('Key \'os\' is not allowed starting version 1.4')
-
-    elif key == 'read_only':
-      if not value in (0, 1, 2):
-        raise IsolatedError('Expected 0, 1 or 2, got %r' % value)
-
-    elif key == 'relative_cwd':
-      if not isinstance(value, basestring):
-        raise IsolatedError('Expected string, got %r' % value)
-
-    elif key == 'version':
-      # Already checked above.
-      pass
-
-    else:
-      raise IsolatedError('Unknown key %r' % key)
-
-  # Automatically fix os.path.sep if necessary. While .isolated files are always
-  # in the the native path format, someone could want to download an .isolated
-  # tree from another OS.
-  wrong_path_sep = '/' if os.path.sep == '\\' else '\\'
-  if 'files' in data:
-    data['files'] = dict(
-        (k.replace(wrong_path_sep, os.path.sep), v)
-        for k, v in data['files'].iteritems())
-    for v in data['files'].itervalues():
-      if 'l' in v:
-        v['l'] = v['l'].replace(wrong_path_sep, os.path.sep)
-  if 'relative_cwd' in data:
-    data['relative_cwd'] = data['relative_cwd'].replace(
-        wrong_path_sep, os.path.sep)
-  return data
diff --git a/tools/swarming_client/isolateserver.py b/tools/swarming_client/isolateserver.py
deleted file mode 100755
index 8a8bed6..0000000
--- a/tools/swarming_client/isolateserver.py
+++ /dev/null
@@ -1,2440 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Archives a set of files or directories to an Isolate Server."""
-
-__version__ = '0.6.0'
-
-import base64
-import errno
-import functools
-import io
-import logging
-import optparse
-import os
-import re
-import signal
-import stat
-import sys
-import tempfile
-import threading
-import time
-import types
-import zlib
-
-from third_party import colorama
-from third_party.depot_tools import fix_encoding
-from third_party.depot_tools import subcommand
-
-from libs import arfile
-from utils import file_path
-from utils import fs
-from utils import logging_utils
-from utils import lru
-from utils import net
-from utils import on_error
-from utils import subprocess42
-from utils import threading_utils
-from utils import tools
-
-import auth
-import isolated_format
-
-
-# Version of isolate protocol passed to the server in /handshake request.
-ISOLATE_PROTOCOL_VERSION = '1.0'
-
-
-# The file size to be used when we don't know the correct file size,
-# generally used for .isolated files.
-UNKNOWN_FILE_SIZE = None
-
-
-# Maximum expected delay (in seconds) between successive file fetches or uploads
-# in Storage. If it takes longer than that, a deadlock might be happening
-# and all stack frames for all threads are dumped to log.
-DEADLOCK_TIMEOUT = 5 * 60
-
-
-# The number of files to check the isolate server per /pre-upload query.
-# All files are sorted by likelihood of a change in the file content
-# (currently file size is used to estimate this: larger the file -> larger the
-# possibility it has changed). Then first ITEMS_PER_CONTAINS_QUERIES[0] files
-# are taken and send to '/pre-upload', then next ITEMS_PER_CONTAINS_QUERIES[1],
-# and so on. Numbers here is a trade-off; the more per request, the lower the
-# effect of HTTP round trip latency and TCP-level chattiness. On the other hand,
-# larger values cause longer lookups, increasing the initial latency to start
-# uploading, which is especially an issue for large files. This value is
-# optimized for the "few thousands files to look up with minimal number of large
-# files missing" case.
-ITEMS_PER_CONTAINS_QUERIES = (20, 20, 50, 50, 50, 100)
-
-
-# A list of already compressed extension types that should not receive any
-# compression before being uploaded.
-ALREADY_COMPRESSED_TYPES = [
-    '7z', 'avi', 'cur', 'gif', 'h264', 'jar', 'jpeg', 'jpg', 'mp4', 'pdf',
-    'png', 'wav', 'zip',
-]
-
-
-# Chunk size to use when reading from network stream.
-NET_IO_FILE_CHUNK = 16 * 1024
-
-
-# Read timeout in seconds for downloads from isolate storage. If there's no
-# response from the server within this timeout whole download will be aborted.
-DOWNLOAD_READ_TIMEOUT = 60
-
-
-# The delay (in seconds) to wait between logging statements when retrieving
-# the required files. This is intended to let the user (or buildbot) know that
-# the program is still running.
-DELAY_BETWEEN_UPDATES_IN_SECS = 30
-
-
-DEFAULT_BLACKLIST = (
-  # Temporary vim or python files.
-  r'^.+\.(?:pyc|swp)$',
-  # .git or .svn directory.
-  r'^(?:.+' + re.escape(os.path.sep) + r'|)\.(?:git|svn)$',
-)
-
-
-# A class to use to communicate with the server by default. Can be changed by
-# 'set_storage_api_class'. Default is IsolateServer.
-_storage_api_cls = None
-
-
-class Error(Exception):
-  """Generic runtime error."""
-  pass
-
-
-class Aborted(Error):
-  """Operation aborted."""
-  pass
-
-
-class AlreadyExists(Error):
-  """File already exists."""
-
-
-def file_read(path, chunk_size=isolated_format.DISK_FILE_CHUNK, offset=0):
-  """Yields file content in chunks of |chunk_size| starting from |offset|."""
-  with fs.open(path, 'rb') as f:
-    if offset:
-      f.seek(offset)
-    while True:
-      data = f.read(chunk_size)
-      if not data:
-        break
-      yield data
-
-
-def file_write(path, content_generator):
-  """Writes file content as generated by content_generator.
-
-  Creates the intermediary directory as needed.
-
-  Returns the number of bytes written.
-
-  Meant to be mocked out in unit tests.
-  """
-  file_path.ensure_tree(os.path.dirname(path))
-  total = 0
-  with fs.open(path, 'wb') as f:
-    for d in content_generator:
-      total += len(d)
-      f.write(d)
-  return total
-
-
-def fileobj_path(fileobj):
-  """Return file system path for file like object or None.
-
-  The returned path is guaranteed to exist and can be passed to file system
-  operations like copy.
-  """
-  name = getattr(fileobj, 'name', None)
-  if name is None:
-    return
-
-  # If the file like object was created using something like open("test.txt")
-  # name will end up being a str (such as a function outside our control, like
-  # the standard library). We want all our paths to be unicode objects, so we
-  # decode it.
-  if not isinstance(name, unicode):
-    name = name.decode(sys.getfilesystemencoding())
-
-  if fs.exists(name):
-    return name
-
-
-# TODO(tansell): Replace fileobj_copy with shutil.copyfileobj once proper file
-# wrappers have been created.
-def fileobj_copy(
-    dstfileobj, srcfileobj, size=-1,
-    chunk_size=isolated_format.DISK_FILE_CHUNK):
-  """Copy data from srcfileobj to dstfileobj.
-
-  Providing size means exactly that amount of data will be copied (if there
-  isn't enough data, an IOError exception is thrown). Otherwise all data until
-  the EOF marker will be copied.
-  """
-  if size == -1 and hasattr(srcfileobj, 'tell'):
-    if srcfileobj.tell() != 0:
-      raise IOError('partial file but not using size')
-
-  written = 0
-  while written != size:
-    readsize = chunk_size
-    if size > 0:
-      readsize = min(readsize, size-written)
-    data = srcfileobj.read(readsize)
-    if not data:
-      if size == -1:
-        break
-      raise IOError('partial file, got %s, wanted %s' % (written, size))
-    dstfileobj.write(data)
-    written += len(data)
-
-
-def putfile(srcfileobj, dstpath, file_mode=None, size=-1, use_symlink=False):
-  """Put srcfileobj at the given dstpath with given mode.
-
-  The function aims to do this as efficiently as possible while still allowing
-  any possible file like object be given.
-
-  Creating a tree of hardlinks has a few drawbacks:
-  - tmpfs cannot be used for the scratch space. The tree has to be on the same
-    partition as the cache.
-  - involves a write to the inode, which advances ctime, cause a metadata
-    writeback (causing disk seeking).
-  - cache ctime cannot be used to detect modifications / corruption.
-  - Some file systems (NTFS) have a 64k limit on the number of hardlink per
-    partition. This is why the function automatically fallbacks to copying the
-    file content.
-  - /proc/sys/fs/protected_hardlinks causes an additional check to ensure the
-    same owner is for all hardlinks.
-  - Anecdotal report that ext2 is known to be potentially faulty on high rate
-    of hardlink creation.
-
-  Creating a tree of symlinks has a few drawbacks:
-  - Tasks running the equivalent of os.path.realpath() will get the naked path
-    and may fail.
-  - Windows:
-    - Symlinks are reparse points:
-      https://msdn.microsoft.com/library/windows/desktop/aa365460.aspx
-      https://msdn.microsoft.com/library/windows/desktop/aa363940.aspx
-    - Symbolic links are Win32 paths, not NT paths.
-      https://googleprojectzero.blogspot.com/2016/02/the-definitive-guide-on-win32-to-nt.html
-    - Symbolic links are supported on Windows 7 and later only.
-    - SeCreateSymbolicLinkPrivilege is needed, which is not present by
-      default.
-    - SeCreateSymbolicLinkPrivilege is *stripped off* by UAC when a restricted
-      RID is present in the token;
-      https://msdn.microsoft.com/en-us/library/bb530410.aspx
-  """
-  srcpath = fileobj_path(srcfileobj)
-  if srcpath and size == -1:
-    readonly = file_mode is None or (
-        file_mode & (stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH))
-
-    if readonly:
-      # If the file is read only we can link the file
-      if use_symlink:
-        link_mode = file_path.SYMLINK_WITH_FALLBACK
-      else:
-        link_mode = file_path.HARDLINK_WITH_FALLBACK
-    else:
-      # If not read only, we must copy the file
-      link_mode = file_path.COPY
-
-    file_path.link_file(dstpath, srcpath, link_mode)
-  else:
-    # Need to write out the file
-    with fs.open(dstpath, 'wb') as dstfileobj:
-      fileobj_copy(dstfileobj, srcfileobj, size)
-
-  assert fs.exists(dstpath)
-
-  # file_mode of 0 is actually valid, so need explicit check.
-  if file_mode is not None:
-    fs.chmod(dstpath, file_mode)
-
-
-def zip_compress(content_generator, level=7):
-  """Reads chunks from |content_generator| and yields zip compressed chunks."""
-  compressor = zlib.compressobj(level)
-  for chunk in content_generator:
-    compressed = compressor.compress(chunk)
-    if compressed:
-      yield compressed
-  tail = compressor.flush(zlib.Z_FINISH)
-  if tail:
-    yield tail
-
-
-def zip_decompress(
-    content_generator, chunk_size=isolated_format.DISK_FILE_CHUNK):
-  """Reads zipped data from |content_generator| and yields decompressed data.
-
-  Decompresses data in small chunks (no larger than |chunk_size|) so that
-  zip bomb file doesn't cause zlib to preallocate huge amount of memory.
-
-  Raises IOError if data is corrupted or incomplete.
-  """
-  decompressor = zlib.decompressobj()
-  compressed_size = 0
-  try:
-    for chunk in content_generator:
-      compressed_size += len(chunk)
-      data = decompressor.decompress(chunk, chunk_size)
-      if data:
-        yield data
-      while decompressor.unconsumed_tail:
-        data = decompressor.decompress(decompressor.unconsumed_tail, chunk_size)
-        if data:
-          yield data
-    tail = decompressor.flush()
-    if tail:
-      yield tail
-  except zlib.error as e:
-    raise IOError(
-        'Corrupted zip stream (read %d bytes) - %s' % (compressed_size, e))
-  # Ensure all data was read and decompressed.
-  if decompressor.unused_data or decompressor.unconsumed_tail:
-    raise IOError('Not all data was decompressed')
-
-
-def get_zip_compression_level(filename):
-  """Given a filename calculates the ideal zip compression level to use."""
-  file_ext = os.path.splitext(filename)[1].lower()
-  # TODO(csharp): Profile to find what compression level works best.
-  return 0 if file_ext in ALREADY_COMPRESSED_TYPES else 7
-
-
-def create_directories(base_directory, files):
-  """Creates the directory structure needed by the given list of files."""
-  logging.debug('create_directories(%s, %d)', base_directory, len(files))
-  # Creates the tree of directories to create.
-  directories = set(os.path.dirname(f) for f in files)
-  for item in list(directories):
-    while item:
-      directories.add(item)
-      item = os.path.dirname(item)
-  for d in sorted(directories):
-    if d:
-      fs.mkdir(os.path.join(base_directory, d))
-
-
-def create_symlinks(base_directory, files):
-  """Creates any symlinks needed by the given set of files."""
-  for filepath, properties in files:
-    if 'l' not in properties:
-      continue
-    if sys.platform == 'win32':
-      # TODO(maruel): Create symlink via the win32 api.
-      logging.warning('Ignoring symlink %s', filepath)
-      continue
-    outfile = os.path.join(base_directory, filepath)
-    try:
-      os.symlink(properties['l'], outfile)  # pylint: disable=E1101
-    except OSError as e:
-      if e.errno == errno.EEXIST:
-        raise AlreadyExists('File %s already exists.' % outfile)
-      raise
-
-
-def is_valid_file(path, size):
-  """Determines if the given files appears valid.
-
-  Currently it just checks the file's size.
-  """
-  if size == UNKNOWN_FILE_SIZE:
-    return fs.isfile(path)
-  actual_size = fs.stat(path).st_size
-  if size != actual_size:
-    logging.warning(
-        'Found invalid item %s; %d != %d',
-        os.path.basename(path), actual_size, size)
-    return False
-  return True
-
-
-class Item(object):
-  """An item to push to Storage.
-
-  Its digest and size may be provided in advance, if known. Otherwise they will
-  be derived from content(). If digest is provided, it MUST correspond to
-  hash algorithm used by Storage.
-
-  When used with Storage, Item starts its life in a main thread, travels
-  to 'contains' thread, then to 'push' thread and then finally back to
-  the main thread. It is never used concurrently from multiple threads.
-  """
-
-  def __init__(self, digest=None, size=None, high_priority=False):
-    self.digest = digest
-    self.size = size
-    self.high_priority = high_priority
-    self.compression_level = 6
-
-  def content(self):
-    """Iterable with content of this item as byte string (str) chunks."""
-    raise NotImplementedError()
-
-  def prepare(self, hash_algo):
-    """Ensures self.digest and self.size are set.
-
-    Uses content() as a source of data to calculate them. Does nothing if digest
-    and size is already known.
-
-    Arguments:
-      hash_algo: hash algorithm to use to calculate digest.
-    """
-    if self.digest is None or self.size is None:
-      digest = hash_algo()
-      total = 0
-      for chunk in self.content():
-        digest.update(chunk)
-        total += len(chunk)
-      self.digest = digest.hexdigest()
-      self.size = total
-
-
-class FileItem(Item):
-  """A file to push to Storage.
-
-  Its digest and size may be provided in advance, if known. Otherwise they will
-  be derived from the file content.
-  """
-
-  def __init__(self, path, digest=None, size=None, high_priority=False):
-    super(FileItem, self).__init__(
-        digest,
-        size if size is not None else fs.stat(path).st_size,
-        high_priority)
-    self.path = path
-    self.compression_level = get_zip_compression_level(path)
-
-  def content(self):
-    return file_read(self.path)
-
-
-class BufferItem(Item):
-  """A byte buffer to push to Storage."""
-
-  def __init__(self, buf, high_priority=False):
-    super(BufferItem, self).__init__(None, len(buf), high_priority)
-    self.buffer = buf
-
-  def content(self):
-    return [self.buffer]
-
-
-class Storage(object):
-  """Efficiently downloads or uploads large set of files via StorageApi.
-
-  Implements compression support, parallel 'contains' checks, parallel uploads
-  and more.
-
-  Works only within single namespace (and thus hashing algorithm and compression
-  scheme are fixed).
-
-  Spawns multiple internal threads. Thread safe, but not fork safe. Modifies
-  signal handlers table to handle Ctrl+C.
-  """
-
-  def __init__(self, storage_api):
-    self._storage_api = storage_api
-    self._use_zip = isolated_format.is_namespace_with_compression(
-        storage_api.namespace)
-    self._hash_algo = isolated_format.get_hash_algo(storage_api.namespace)
-    self._cpu_thread_pool = None
-    self._net_thread_pool = None
-    self._aborted = False
-    self._prev_sig_handlers = {}
-
-  @property
-  def hash_algo(self):
-    """Hashing algorithm used to name files in storage based on their content.
-
-    Defined by |namespace|. See also isolated_format.get_hash_algo().
-    """
-    return self._hash_algo
-
-  @property
-  def location(self):
-    """URL of the backing store that this class is using."""
-    return self._storage_api.location
-
-  @property
-  def namespace(self):
-    """Isolate namespace used by this storage.
-
-    Indirectly defines hashing scheme and compression method used.
-    """
-    return self._storage_api.namespace
-
-  @property
-  def cpu_thread_pool(self):
-    """ThreadPool for CPU-bound tasks like zipping."""
-    if self._cpu_thread_pool is None:
-      threads = max(threading_utils.num_processors(), 2)
-      if sys.maxsize <= 2L**32:
-        # On 32 bits userland, do not try to use more than 16 threads.
-        threads = min(threads, 16)
-      self._cpu_thread_pool = threading_utils.ThreadPool(2, threads, 0, 'zip')
-    return self._cpu_thread_pool
-
-  @property
-  def net_thread_pool(self):
-    """AutoRetryThreadPool for IO-bound tasks, retries IOError."""
-    if self._net_thread_pool is None:
-      self._net_thread_pool = threading_utils.IOAutoRetryThreadPool()
-    return self._net_thread_pool
-
-  def close(self):
-    """Waits for all pending tasks to finish."""
-    logging.info('Waiting for all threads to die...')
-    if self._cpu_thread_pool:
-      self._cpu_thread_pool.join()
-      self._cpu_thread_pool.close()
-      self._cpu_thread_pool = None
-    if self._net_thread_pool:
-      self._net_thread_pool.join()
-      self._net_thread_pool.close()
-      self._net_thread_pool = None
-    logging.info('Done.')
-
-  def abort(self):
-    """Cancels any pending or future operations."""
-    # This is not strictly theadsafe, but in the worst case the logging message
-    # will be printed twice. Not a big deal. In other places it is assumed that
-    # unprotected reads and writes to _aborted are serializable (it is true
-    # for python) and thus no locking is used.
-    if not self._aborted:
-      logging.warning('Aborting... It can take a while.')
-      self._aborted = True
-
-  def __enter__(self):
-    """Context manager interface."""
-    assert not self._prev_sig_handlers, self._prev_sig_handlers
-    for s in (signal.SIGINT, signal.SIGTERM):
-      self._prev_sig_handlers[s] = signal.signal(s, lambda *_args: self.abort())
-    return self
-
-  def __exit__(self, _exc_type, _exc_value, _traceback):
-    """Context manager interface."""
-    self.close()
-    while self._prev_sig_handlers:
-      s, h = self._prev_sig_handlers.popitem()
-      signal.signal(s, h)
-    return False
-
-  def upload_items(self, items):
-    """Uploads a bunch of items to the isolate server.
-
-    It figures out what items are missing from the server and uploads only them.
-
-    Arguments:
-      items: list of Item instances that represents data to upload.
-
-    Returns:
-      List of items that were uploaded. All other items are already there.
-    """
-    logging.info('upload_items(items=%d)', len(items))
-
-    # Ensure all digests are calculated.
-    for item in items:
-      item.prepare(self._hash_algo)
-
-    # For each digest keep only first Item that matches it. All other items
-    # are just indistinguishable copies from the point of view of isolate
-    # server (it doesn't care about paths at all, only content and digests).
-    seen = {}
-    duplicates = 0
-    for item in items:
-      if seen.setdefault(item.digest, item) is not item:
-        duplicates += 1
-    items = seen.values()
-    if duplicates:
-      logging.info('Skipped %d files with duplicated content', duplicates)
-
-    # Enqueue all upload tasks.
-    missing = set()
-    uploaded = []
-    channel = threading_utils.TaskChannel()
-    for missing_item, push_state in self.get_missing_items(items):
-      missing.add(missing_item)
-      self.async_push(channel, missing_item, push_state)
-
-    # No need to spawn deadlock detector thread if there's nothing to upload.
-    if missing:
-      with threading_utils.DeadlockDetector(DEADLOCK_TIMEOUT) as detector:
-        # Wait for all started uploads to finish.
-        while len(uploaded) != len(missing):
-          detector.ping()
-          item = channel.pull()
-          uploaded.append(item)
-          logging.debug(
-              'Uploaded %d / %d: %s', len(uploaded), len(missing), item.digest)
-    logging.info('All files are uploaded')
-
-    # Print stats.
-    total = len(items)
-    total_size = sum(f.size for f in items)
-    logging.info(
-        'Total:      %6d, %9.1fkb',
-        total,
-        total_size / 1024.)
-    cache_hit = set(items) - missing
-    cache_hit_size = sum(f.size for f in cache_hit)
-    logging.info(
-        'cache hit:  %6d, %9.1fkb, %6.2f%% files, %6.2f%% size',
-        len(cache_hit),
-        cache_hit_size / 1024.,
-        len(cache_hit) * 100. / total,
-        cache_hit_size * 100. / total_size if total_size else 0)
-    cache_miss = missing
-    cache_miss_size = sum(f.size for f in cache_miss)
-    logging.info(
-        'cache miss: %6d, %9.1fkb, %6.2f%% files, %6.2f%% size',
-        len(cache_miss),
-        cache_miss_size / 1024.,
-        len(cache_miss) * 100. / total,
-        cache_miss_size * 100. / total_size if total_size else 0)
-
-    return uploaded
-
-  def async_push(self, channel, item, push_state):
-    """Starts asynchronous push to the server in a parallel thread.
-
-    Can be used only after |item| was checked for presence on a server with
-    'get_missing_items' call. 'get_missing_items' returns |push_state| object
-    that contains storage specific information describing how to upload
-    the item (for example in case of cloud storage, it is signed upload URLs).
-
-    Arguments:
-      channel: TaskChannel that receives back |item| when upload ends.
-      item: item to upload as instance of Item class.
-      push_state: push state returned by 'get_missing_items' call for |item|.
-
-    Returns:
-      None, but |channel| later receives back |item| when upload ends.
-    """
-    # Thread pool task priority.
-    priority = (
-        threading_utils.PRIORITY_HIGH if item.high_priority
-        else threading_utils.PRIORITY_MED)
-
-    def push(content):
-      """Pushes an Item and returns it to |channel|."""
-      if self._aborted:
-        raise Aborted()
-      item.prepare(self._hash_algo)
-      self._storage_api.push(item, push_state, content)
-      return item
-
-    # If zipping is not required, just start a push task.
-    if not self._use_zip:
-      self.net_thread_pool.add_task_with_channel(
-          channel, priority, push, item.content())
-      return
-
-    # If zipping is enabled, zip in a separate thread.
-    def zip_and_push():
-      # TODO(vadimsh): Implement streaming uploads. Before it's done, assemble
-      # content right here. It will block until all file is zipped.
-      try:
-        if self._aborted:
-          raise Aborted()
-        stream = zip_compress(item.content(), item.compression_level)
-        data = ''.join(stream)
-      except Exception as exc:
-        logging.error('Failed to zip \'%s\': %s', item, exc)
-        channel.send_exception()
-        return
-      self.net_thread_pool.add_task_with_channel(
-          channel, priority, push, [data])
-    self.cpu_thread_pool.add_task(priority, zip_and_push)
-
-  def push(self, item, push_state):
-    """Synchronously pushes a single item to the server.
-
-    If you need to push many items at once, consider using 'upload_items' or
-    'async_push' with instance of TaskChannel.
-
-    Arguments:
-      item: item to upload as instance of Item class.
-      push_state: push state returned by 'get_missing_items' call for |item|.
-
-    Returns:
-      Pushed item (same object as |item|).
-    """
-    channel = threading_utils.TaskChannel()
-    with threading_utils.DeadlockDetector(DEADLOCK_TIMEOUT):
-      self.async_push(channel, item, push_state)
-      pushed = channel.pull()
-      assert pushed is item
-    return item
-
-  def async_fetch(self, channel, priority, digest, size, sink):
-    """Starts asynchronous fetch from the server in a parallel thread.
-
-    Arguments:
-      channel: TaskChannel that receives back |digest| when download ends.
-      priority: thread pool task priority for the fetch.
-      digest: hex digest of an item to download.
-      size: expected size of the item (after decompression).
-      sink: function that will be called as sink(generator).
-    """
-    def fetch():
-      try:
-        # Prepare reading pipeline.
-        stream = self._storage_api.fetch(digest)
-        if self._use_zip:
-          stream = zip_decompress(stream, isolated_format.DISK_FILE_CHUNK)
-        # Run |stream| through verifier that will assert its size.
-        verifier = FetchStreamVerifier(stream, size)
-        # Verified stream goes to |sink|.
-        sink(verifier.run())
-      except Exception as err:
-        logging.error('Failed to fetch %s: %s', digest, err)
-        raise
-      return digest
-
-    # Don't bother with zip_thread_pool for decompression. Decompression is
-    # really fast and most probably IO bound anyway.
-    self.net_thread_pool.add_task_with_channel(channel, priority, fetch)
-
-  def get_missing_items(self, items):
-    """Yields items that are missing from the server.
-
-    Issues multiple parallel queries via StorageApi's 'contains' method.
-
-    Arguments:
-      items: a list of Item objects to check.
-
-    Yields:
-      For each missing item it yields a pair (item, push_state), where:
-        * item - Item object that is missing (one of |items|).
-        * push_state - opaque object that contains storage specific information
-            describing how to upload the item (for example in case of cloud
-            storage, it is signed upload URLs). It can later be passed to
-            'async_push'.
-    """
-    channel = threading_utils.TaskChannel()
-    pending = 0
-
-    # Ensure all digests are calculated.
-    for item in items:
-      item.prepare(self._hash_algo)
-
-    def contains(batch):
-      if self._aborted:
-        raise Aborted()
-      return self._storage_api.contains(batch)
-
-    # Enqueue all requests.
-    for batch in batch_items_for_check(items):
-      self.net_thread_pool.add_task_with_channel(
-          channel, threading_utils.PRIORITY_HIGH, contains, batch)
-      pending += 1
-
-    # Yield results as they come in.
-    for _ in xrange(pending):
-      for missing_item, push_state in channel.pull().iteritems():
-        yield missing_item, push_state
-
-
-def batch_items_for_check(items):
-  """Splits list of items to check for existence on the server into batches.
-
-  Each batch corresponds to a single 'exists?' query to the server via a call
-  to StorageApi's 'contains' method.
-
-  Arguments:
-    items: a list of Item objects.
-
-  Yields:
-    Batches of items to query for existence in a single operation,
-    each batch is a list of Item objects.
-  """
-  batch_count = 0
-  batch_size_limit = ITEMS_PER_CONTAINS_QUERIES[0]
-  next_queries = []
-  for item in sorted(items, key=lambda x: x.size, reverse=True):
-    next_queries.append(item)
-    if len(next_queries) == batch_size_limit:
-      yield next_queries
-      next_queries = []
-      batch_count += 1
-      batch_size_limit = ITEMS_PER_CONTAINS_QUERIES[
-          min(batch_count, len(ITEMS_PER_CONTAINS_QUERIES) - 1)]
-  if next_queries:
-    yield next_queries
-
-
-class FetchQueue(object):
-  """Fetches items from Storage and places them into LocalCache.
-
-  It manages multiple concurrent fetch operations. Acts as a bridge between
-  Storage and LocalCache so that Storage and LocalCache don't depend on each
-  other at all.
-  """
-
-  def __init__(self, storage, cache):
-    self.storage = storage
-    self.cache = cache
-    self._channel = threading_utils.TaskChannel()
-    self._pending = set()
-    self._accessed = set()
-    self._fetched = cache.cached_set()
-
-  def add(
-      self,
-      digest,
-      size=UNKNOWN_FILE_SIZE,
-      priority=threading_utils.PRIORITY_MED):
-    """Starts asynchronous fetch of item |digest|."""
-    # Fetching it now?
-    if digest in self._pending:
-      return
-
-    # Mark this file as in use, verify_all_cached will later ensure it is still
-    # in cache.
-    self._accessed.add(digest)
-
-    # Already fetched? Notify cache to update item's LRU position.
-    if digest in self._fetched:
-      # 'touch' returns True if item is in cache and not corrupted.
-      if self.cache.touch(digest, size):
-        return
-      # Item is corrupted, remove it from cache and fetch it again.
-      self._fetched.remove(digest)
-      self.cache.evict(digest)
-
-    # TODO(maruel): It should look at the free disk space, the current cache
-    # size and the size of the new item on every new item:
-    # - Trim the cache as more entries are listed when free disk space is low,
-    #   otherwise if the amount of data downloaded during the run > free disk
-    #   space, it'll crash.
-    # - Make sure there's enough free disk space to fit all dependencies of
-    #   this run! If not, abort early.
-
-    # Start fetching.
-    self._pending.add(digest)
-    self.storage.async_fetch(
-        self._channel, priority, digest, size,
-        functools.partial(self.cache.write, digest))
-
-  def wait(self, digests):
-    """Starts a loop that waits for at least one of |digests| to be retrieved.
-
-    Returns the first digest retrieved.
-    """
-    # Flush any already fetched items.
-    for digest in digests:
-      if digest in self._fetched:
-        return digest
-
-    # Ensure all requested items are being fetched now.
-    assert all(digest in self._pending for digest in digests), (
-        digests, self._pending)
-
-    # Wait for some requested item to finish fetching.
-    while self._pending:
-      digest = self._channel.pull()
-      self._pending.remove(digest)
-      self._fetched.add(digest)
-      if digest in digests:
-        return digest
-
-    # Should never reach this point due to assert above.
-    raise RuntimeError('Impossible state')
-
-  def inject_local_file(self, path, algo):
-    """Adds local file to the cache as if it was fetched from storage."""
-    with fs.open(path, 'rb') as f:
-      data = f.read()
-    digest = algo(data).hexdigest()
-    self.cache.write(digest, [data])
-    self._fetched.add(digest)
-    return digest
-
-  @property
-  def pending_count(self):
-    """Returns number of items to be fetched."""
-    return len(self._pending)
-
-  def verify_all_cached(self):
-    """True if all accessed items are in cache."""
-    return self._accessed.issubset(self.cache.cached_set())
-
-
-class FetchStreamVerifier(object):
-  """Verifies that fetched file is valid before passing it to the LocalCache."""
-
-  def __init__(self, stream, expected_size):
-    assert stream is not None
-    self.stream = stream
-    self.expected_size = expected_size
-    self.current_size = 0
-
-  def run(self):
-    """Generator that yields same items as |stream|.
-
-    Verifies |stream| is complete before yielding a last chunk to consumer.
-
-    Also wraps IOError produced by consumer into MappingError exceptions since
-    otherwise Storage will retry fetch on unrelated local cache errors.
-    """
-    # Read one chunk ahead, keep it in |stored|.
-    # That way a complete stream can be verified before pushing last chunk
-    # to consumer.
-    stored = None
-    for chunk in self.stream:
-      assert chunk is not None
-      if stored is not None:
-        self._inspect_chunk(stored, is_last=False)
-        try:
-          yield stored
-        except IOError as exc:
-          raise isolated_format.MappingError(
-              'Failed to store an item in cache: %s' % exc)
-      stored = chunk
-    if stored is not None:
-      self._inspect_chunk(stored, is_last=True)
-      try:
-        yield stored
-      except IOError as exc:
-        raise isolated_format.MappingError(
-            'Failed to store an item in cache: %s' % exc)
-
-  def _inspect_chunk(self, chunk, is_last):
-    """Called for each fetched chunk before passing it to consumer."""
-    self.current_size += len(chunk)
-    if (is_last and
-        (self.expected_size != UNKNOWN_FILE_SIZE) and
-        (self.expected_size != self.current_size)):
-      raise IOError('Incorrect file size: expected %d, got %d' % (
-          self.expected_size, self.current_size))
-
-
-class StorageApi(object):
-  """Interface for classes that implement low-level storage operations.
-
-  StorageApi is oblivious of compression and hashing scheme used. This details
-  are handled in higher level Storage class.
-
-  Clients should generally not use StorageApi directly. Storage class is
-  preferred since it implements compression and upload optimizations.
-  """
-
-  @property
-  def location(self):
-    """URL of the backing store that this class is using."""
-    raise NotImplementedError()
-
-  @property
-  def namespace(self):
-    """Isolate namespace used by this storage.
-
-    Indirectly defines hashing scheme and compression method used.
-    """
-    raise NotImplementedError()
-
-  def fetch(self, digest, offset=0):
-    """Fetches an object and yields its content.
-
-    Arguments:
-      digest: hash digest of item to download.
-      offset: offset (in bytes) from the start of the file to resume fetch from.
-
-    Yields:
-      Chunks of downloaded item (as str objects).
-    """
-    raise NotImplementedError()
-
-  def push(self, item, push_state, content=None):
-    """Uploads an |item| with content generated by |content| generator.
-
-    |item| MUST go through 'contains' call to get |push_state| before it can
-    be pushed to the storage.
-
-    To be clear, here is one possible usage:
-      all_items = [... all items to push as Item subclasses ...]
-      for missing_item, push_state in storage_api.contains(all_items).items():
-        storage_api.push(missing_item, push_state)
-
-    When pushing to a namespace with compression, data that should be pushed
-    and data provided by the item is not the same. In that case |content| is
-    not None and it yields chunks of compressed data (using item.content() as
-    a source of original uncompressed data). This is implemented by Storage
-    class.
-
-    Arguments:
-      item: Item object that holds information about an item being pushed.
-      push_state: push state object as returned by 'contains' call.
-      content: a generator that yields chunks to push, item.content() if None.
-
-    Returns:
-      None.
-    """
-    raise NotImplementedError()
-
-  def contains(self, items):
-    """Checks for |items| on the server, prepares missing ones for upload.
-
-    Arguments:
-      items: list of Item objects to check for presence.
-
-    Returns:
-      A dict missing Item -> opaque push state object to be passed to 'push'.
-      See doc string for 'push'.
-    """
-    raise NotImplementedError()
-
-
-class _IsolateServerPushState(object):
-  """Per-item state passed from IsolateServer.contains to IsolateServer.push.
-
-  Note this needs to be a global class to support pickling.
-  """
-
-  def __init__(self, preupload_status, size):
-    self.preupload_status = preupload_status
-    gs_upload_url = preupload_status.get('gs_upload_url') or None
-    if gs_upload_url:
-      self.upload_url = gs_upload_url
-      self.finalize_url = 'api/isolateservice/v1/finalize_gs_upload'
-    else:
-      self.upload_url = 'api/isolateservice/v1/store_inline'
-      self.finalize_url = None
-    self.uploaded = False
-    self.finalized = False
-    self.size = size
-
-
-class IsolateServer(StorageApi):
-  """StorageApi implementation that downloads and uploads to Isolate Server.
-
-  It uploads and downloads directly from Google Storage whenever appropriate.
-  Works only within single namespace.
-  """
-
-  def __init__(self, base_url, namespace):
-    super(IsolateServer, self).__init__()
-    assert file_path.is_url(base_url), base_url
-    self._base_url = base_url.rstrip('/')
-    self._namespace = namespace
-    self._namespace_dict = {
-        'compression': 'flate' if namespace.endswith(
-            ('-gzip', '-flate')) else '',
-        'digest_hash': 'sha-1',
-        'namespace': namespace,
-    }
-    self._lock = threading.Lock()
-    self._server_caps = None
-    self._memory_use = 0
-
-  @property
-  def _server_capabilities(self):
-    """Gets server details.
-
-    Returns:
-      Server capabilities dictionary as returned by /server_details endpoint.
-    """
-    # TODO(maruel): Make this request much earlier asynchronously while the
-    # files are being enumerated.
-
-    # TODO(vadimsh): Put |namespace| in the URL so that server can apply
-    # namespace-level ACLs to this call.
-
-    with self._lock:
-      if self._server_caps is None:
-        self._server_caps = net.url_read_json(
-            url='%s/api/isolateservice/v1/server_details' % self._base_url,
-            data={})
-      return self._server_caps
-
-  @property
-  def location(self):
-    return self._base_url
-
-  @property
-  def namespace(self):
-    return self._namespace
-
-  def fetch(self, digest, offset=0):
-    assert offset >= 0
-    source_url = '%s/api/isolateservice/v1/retrieve' % (
-        self._base_url)
-    logging.debug('download_file(%s, %d)', source_url, offset)
-    response = self.do_fetch(source_url, digest, offset)
-
-    if not response:
-      raise IOError(
-          'Attempted to fetch from %s; no data exist: %s / %s.' % (
-            source_url, self._namespace, digest))
-
-    # for DB uploads
-    content = response.get('content')
-    if content is not None:
-      yield base64.b64decode(content)
-      return
-
-    # for GS entities
-    connection = net.url_open(response['url'])
-    if not connection:
-      raise IOError('Failed to download %s / %s' % (self._namespace, digest))
-
-    # If |offset|, verify server respects it by checking Content-Range.
-    if offset:
-      content_range = connection.get_header('Content-Range')
-      if not content_range:
-        raise IOError('Missing Content-Range header')
-
-      # 'Content-Range' format is 'bytes <offset>-<last_byte_index>/<size>'.
-      # According to a spec, <size> can be '*' meaning "Total size of the file
-      # is not known in advance".
-      try:
-        match = re.match(r'bytes (\d+)-(\d+)/(\d+|\*)', content_range)
-        if not match:
-          raise ValueError()
-        content_offset = int(match.group(1))
-        last_byte_index = int(match.group(2))
-        size = None if match.group(3) == '*' else int(match.group(3))
-      except ValueError:
-        raise IOError('Invalid Content-Range header: %s' % content_range)
-
-      # Ensure returned offset equals requested one.
-      if offset != content_offset:
-        raise IOError('Expecting offset %d, got %d (Content-Range is %s)' % (
-            offset, content_offset, content_range))
-
-      # Ensure entire tail of the file is returned.
-      if size is not None and last_byte_index + 1 != size:
-        raise IOError('Incomplete response. Content-Range: %s' % content_range)
-
-    for data in connection.iter_content(NET_IO_FILE_CHUNK):
-      yield data
-
-  def push(self, item, push_state, content=None):
-    assert isinstance(item, Item)
-    assert item.digest is not None
-    assert item.size is not None
-    assert isinstance(push_state, _IsolateServerPushState)
-    assert not push_state.finalized
-
-    # Default to item.content().
-    content = item.content() if content is None else content
-    logging.info('Push state size: %d', push_state.size)
-    if isinstance(content, (basestring, list)):
-      # Memory is already used, too late.
-      with self._lock:
-        self._memory_use += push_state.size
-    else:
-      # TODO(vadimsh): Do not read from |content| generator when retrying push.
-      # If |content| is indeed a generator, it can not be re-winded back to the
-      # beginning of the stream. A retry will find it exhausted. A possible
-      # solution is to wrap |content| generator with some sort of caching
-      # restartable generator. It should be done alongside streaming support
-      # implementation.
-      #
-      # In theory, we should keep the generator, so that it is not serialized in
-      # memory. Sadly net.HttpService.request() requires the body to be
-      # serialized.
-      assert isinstance(content, types.GeneratorType), repr(content)
-      slept = False
-      # HACK HACK HACK. Please forgive me for my sins but OMG, it works!
-      # One byte less than 512mb. This is to cope with incompressible content.
-      max_size = int(sys.maxsize * 0.25)
-      while True:
-        with self._lock:
-          # This is due to 32 bits python when uploading very large files. The
-          # problem is that it's comparing uncompressed sizes, while we care
-          # about compressed sizes since it's what is serialized in memory.
-          # The first check assumes large files are compressible and that by
-          # throttling one upload at once, we can survive. Otherwise, kaboom.
-          memory_use = self._memory_use
-          if ((push_state.size >= max_size and not memory_use) or
-              (memory_use + push_state.size <= max_size)):
-            self._memory_use += push_state.size
-            memory_use = self._memory_use
-            break
-        time.sleep(0.1)
-        slept = True
-      if slept:
-        logging.info('Unblocked: %d %d', memory_use, push_state.size)
-
-    try:
-      # This push operation may be a retry after failed finalization call below,
-      # no need to reupload contents in that case.
-      if not push_state.uploaded:
-        # PUT file to |upload_url|.
-        success = self.do_push(push_state, content)
-        if not success:
-          raise IOError('Failed to upload file with hash %s to URL %s' % (
-              item.digest, push_state.upload_url))
-        push_state.uploaded = True
-      else:
-        logging.info(
-            'A file %s already uploaded, retrying finalization only',
-            item.digest)
-
-      # Optionally notify the server that it's done.
-      if push_state.finalize_url:
-        # TODO(vadimsh): Calculate MD5 or CRC32C sum while uploading a file and
-        # send it to isolated server. That way isolate server can verify that
-        # the data safely reached Google Storage (GS provides MD5 and CRC32C of
-        # stored files).
-        # TODO(maruel): Fix the server to accept properly data={} so
-        # url_read_json() can be used.
-        response = net.url_read_json(
-            url='%s/%s' % (self._base_url, push_state.finalize_url),
-            data={
-                'upload_ticket': push_state.preupload_status['upload_ticket'],
-            })
-        if not response or not response['ok']:
-          raise IOError('Failed to finalize file with hash %s.' % item.digest)
-      push_state.finalized = True
-    finally:
-      with self._lock:
-        self._memory_use -= push_state.size
-
-  def contains(self, items):
-    # Ensure all items were initialized with 'prepare' call. Storage does that.
-    assert all(i.digest is not None and i.size is not None for i in items)
-
-    # Request body is a json encoded list of dicts.
-    body = {
-        'items': [
-          {
-            'digest': item.digest,
-            'is_isolated': bool(item.high_priority),
-            'size': item.size,
-          } for item in items
-        ],
-        'namespace': self._namespace_dict,
-    }
-
-    query_url = '%s/api/isolateservice/v1/preupload' % self._base_url
-
-    # Response body is a list of push_urls (or null if file is already present).
-    response = None
-    try:
-      response = net.url_read_json(url=query_url, data=body)
-      if response is None:
-        raise isolated_format.MappingError(
-            'Failed to execute preupload query')
-    except ValueError as err:
-      raise isolated_format.MappingError(
-          'Invalid response from server: %s, body is %s' % (err, response))
-
-    # Pick Items that are missing, attach _PushState to them.
-    missing_items = {}
-    for preupload_status in response.get('items', []):
-      assert 'upload_ticket' in preupload_status, (
-          preupload_status, '/preupload did not generate an upload ticket')
-      index = int(preupload_status['index'])
-      missing_items[items[index]] = _IsolateServerPushState(
-          preupload_status, items[index].size)
-    logging.info('Queried %d files, %d cache hit',
-        len(items), len(items) - len(missing_items))
-    return missing_items
-
-  def do_fetch(self, url, digest, offset):
-    """Fetches isolated data from the URL.
-
-    Used only for fetching files, not for API calls. Can be overridden in
-    subclasses.
-
-    Args:
-      url: URL to fetch the data from, can possibly return http redirect.
-      offset: byte offset inside the file to start fetching from.
-
-    Returns:
-      net.HttpResponse compatible object, with 'read' and 'get_header' calls.
-    """
-    assert isinstance(offset, int)
-    data = {
-        'digest': digest.encode('utf-8'),
-        'namespace': self._namespace_dict,
-        'offset': offset,
-    }
-    # TODO(maruel): url + '?' + urllib.urlencode(data) once a HTTP GET endpoint
-    # is added.
-    return net.url_read_json(
-        url=url,
-        data=data,
-        read_timeout=DOWNLOAD_READ_TIMEOUT)
-
-  def do_push(self, push_state, content):
-    """Uploads isolated file to the URL.
-
-    Used only for storing files, not for API calls. Can be overridden in
-    subclasses.
-
-    Args:
-      url: URL to upload the data to.
-      push_state: an _IsolateServicePushState instance
-      item: the original Item to be uploaded
-      content: an iterable that yields 'str' chunks.
-    """
-    # A cheezy way to avoid memcpy of (possibly huge) file, until streaming
-    # upload support is implemented.
-    if isinstance(content, list) and len(content) == 1:
-      content = content[0]
-    else:
-      content = ''.join(content)
-
-    # DB upload
-    if not push_state.finalize_url:
-      url = '%s/%s' % (self._base_url, push_state.upload_url)
-      content = base64.b64encode(content)
-      data = {
-          'upload_ticket': push_state.preupload_status['upload_ticket'],
-          'content': content,
-      }
-      response = net.url_read_json(url=url, data=data)
-      return response is not None and response['ok']
-
-    # upload to GS
-    url = push_state.upload_url
-    response = net.url_read(
-        content_type='application/octet-stream',
-        data=content,
-        method='PUT',
-        headers={'Cache-Control': 'public, max-age=31536000'},
-        url=url)
-    return response is not None
-
-
-class CacheMiss(Exception):
-  """Raised when an item is not in cache."""
-
-  def __init__(self, digest):
-    self.digest = digest
-    super(CacheMiss, self).__init__(
-        'Item with digest %r is not found in cache' % digest)
-
-
-class LocalCache(object):
-  """Local cache that stores objects fetched via Storage.
-
-  It can be accessed concurrently from multiple threads, so it should protect
-  its internal state with some lock.
-  """
-  cache_dir = None
-
-  def __init__(self):
-    self._lock = threading_utils.LockWithAssert()
-    # Profiling values.
-    self._added = []
-    self._initial_number_items = 0
-    self._initial_size = 0
-    self._evicted = []
-    self._used = []
-
-  def __contains__(self, digest):
-    raise NotImplementedError()
-
-  def __enter__(self):
-    """Context manager interface."""
-    return self
-
-  def __exit__(self, _exc_type, _exec_value, _traceback):
-    """Context manager interface."""
-    return False
-
-  @property
-  def added(self):
-    return self._added[:]
-
-  @property
-  def evicted(self):
-    return self._evicted[:]
-
-  @property
-  def used(self):
-    return self._used[:]
-
-  @property
-  def initial_number_items(self):
-    return self._initial_number_items
-
-  @property
-  def initial_size(self):
-    return self._initial_size
-
-  def cached_set(self):
-    """Returns a set of all cached digests (always a new object)."""
-    raise NotImplementedError()
-
-  def cleanup(self):
-    """Deletes any corrupted item from the cache and trims it if necessary."""
-    raise NotImplementedError()
-
-  def touch(self, digest, size):
-    """Ensures item is not corrupted and updates its LRU position.
-
-    Arguments:
-      digest: hash digest of item to check.
-      size: expected size of this item.
-
-    Returns:
-      True if item is in cache and not corrupted.
-    """
-    raise NotImplementedError()
-
-  def evict(self, digest):
-    """Removes item from cache if it's there."""
-    raise NotImplementedError()
-
-  def getfileobj(self, digest):
-    """Returns a readable file like object.
-
-    If file exists on the file system it will have a .name attribute with an
-    absolute path to the file.
-    """
-    raise NotImplementedError()
-
-  def write(self, digest, content):
-    """Reads data from |content| generator and stores it in cache.
-
-    Returns digest to simplify chaining.
-    """
-    raise NotImplementedError()
-
-
-class MemoryCache(LocalCache):
-  """LocalCache implementation that stores everything in memory."""
-
-  def __init__(self, file_mode_mask=0500):
-    """Args:
-      file_mode_mask: bit mask to AND file mode with. Default value will make
-          all mapped files to be read only.
-    """
-    super(MemoryCache, self).__init__()
-    self._file_mode_mask = file_mode_mask
-    self._contents = {}
-
-  def __contains__(self, digest):
-    with self._lock:
-      return digest in self._contents
-
-  def cached_set(self):
-    with self._lock:
-      return set(self._contents)
-
-  def cleanup(self):
-    pass
-
-  def touch(self, digest, size):
-    with self._lock:
-      return digest in self._contents
-
-  def evict(self, digest):
-    with self._lock:
-      v = self._contents.pop(digest, None)
-      if v is not None:
-        self._evicted.add(v)
-
-  def getfileobj(self, digest):
-    with self._lock:
-      try:
-        d = self._contents[digest]
-      except KeyError:
-        raise CacheMiss(digest)
-      self._used.append(len(d))
-    return io.BytesIO(d)
-
-  def write(self, digest, content):
-    # Assemble whole stream before taking the lock.
-    data = ''.join(content)
-    with self._lock:
-      self._contents[digest] = data
-      self._added.append(len(data))
-    return digest
-
-
-class CachePolicies(object):
-  def __init__(self, max_cache_size, min_free_space, max_items):
-    """
-    Arguments:
-    - max_cache_size: Trim if the cache gets larger than this value. If 0, the
-                      cache is effectively a leak.
-    - min_free_space: Trim if disk free space becomes lower than this value. If
-                      0, it unconditionally fill the disk.
-    - max_items: Maximum number of items to keep in the cache. If 0, do not
-                 enforce a limit.
-    """
-    self.max_cache_size = max_cache_size
-    self.min_free_space = min_free_space
-    self.max_items = max_items
-
-
-class DiskCache(LocalCache):
-  """Stateful LRU cache in a flat hash table in a directory.
-
-  Saves its state as json file.
-  """
-  STATE_FILE = u'state.json'
-
-  def __init__(self, cache_dir, policies, hash_algo):
-    """
-    Arguments:
-      cache_dir: directory where to place the cache.
-      policies: cache retention policies.
-      algo: hashing algorithm used.
-    """
-    # All protected methods (starting with '_') except _path should be called
-    # with self._lock held.
-    super(DiskCache, self).__init__()
-    self.cache_dir = cache_dir
-    self.policies = policies
-    self.hash_algo = hash_algo
-    self.state_file = os.path.join(cache_dir, self.STATE_FILE)
-    # Items in a LRU lookup dict(digest: size).
-    self._lru = lru.LRUDict()
-    # Current cached free disk space. It is updated by self._trim().
-    self._free_disk = 0
-    # The first item in the LRU cache that must not be evicted during this run
-    # since it was referenced. All items more recent that _protected in the LRU
-    # cache are also inherently protected. It could be a set() of all items
-    # referenced but this increases memory usage without a use case.
-    self._protected = None
-    # Cleanup operations done by self._load(), if any.
-    self._operations = []
-    with tools.Profiler('Setup'):
-      with self._lock:
-        # self._load() calls self._trim() which initializes self._free_disk.
-        self._load()
-
-  def __contains__(self, digest):
-    with self._lock:
-      return digest in self._lru
-
-  def __enter__(self):
-    return self
-
-  def __exit__(self, _exc_type, _exec_value, _traceback):
-    with tools.Profiler('CleanupTrimming'):
-      with self._lock:
-        self._trim()
-
-        logging.info(
-            '%5d (%8dkb) added',
-            len(self._added), sum(self._added) / 1024)
-        logging.info(
-            '%5d (%8dkb) current',
-            len(self._lru),
-            sum(self._lru.itervalues()) / 1024)
-        logging.info(
-            '%5d (%8dkb) evicted',
-            len(self._evicted), sum(self._evicted) / 1024)
-        logging.info(
-            '       %8dkb free',
-            self._free_disk / 1024)
-    return False
-
-  def cached_set(self):
-    with self._lock:
-      return self._lru.keys_set()
-
-  def cleanup(self):
-    """Cleans up the cache directory.
-
-    Ensures there is no unknown files in cache_dir.
-    Ensures the read-only bits are set correctly.
-
-    At that point, the cache was already loaded, trimmed to respect cache
-    policies.
-    """
-    fs.chmod(self.cache_dir, 0700)
-    # Ensure that all files listed in the state still exist and add new ones.
-    previous = self._lru.keys_set()
-    # It'd be faster if there were a readdir() function.
-    for filename in fs.listdir(self.cache_dir):
-      if filename == self.STATE_FILE:
-        fs.chmod(os.path.join(self.cache_dir, filename), 0600)
-        continue
-      if filename in previous:
-        fs.chmod(os.path.join(self.cache_dir, filename), 0400)
-        previous.remove(filename)
-        continue
-
-      # An untracked file. Delete it.
-      logging.warning('Removing unknown file %s from cache', filename)
-      p = self._path(filename)
-      if fs.isdir(p):
-        try:
-          file_path.rmtree(p)
-        except OSError:
-          pass
-      else:
-        file_path.try_remove(p)
-      continue
-
-    if previous:
-      # Filter out entries that were not found.
-      logging.warning('Removed %d lost files', len(previous))
-      for filename in previous:
-        self._lru.pop(filename)
-
-    # What remains to be done is to hash every single item to
-    # detect corruption, then save to ensure state.json is up to date.
-    # Sadly, on a 50Gb cache with 100mib/s I/O, this is still over 8 minutes.
-    # TODO(maruel): Let's revisit once directory metadata is stored in
-    # state.json so only the files that had been mapped since the last cleanup()
-    # call are manually verified.
-    #
-    #with self._lock:
-    #  for digest in self._lru:
-    #    if not isolated_format.is_valid_hash(
-    #        self._path(digest), self.hash_algo):
-    #      self.evict(digest)
-    #      logging.info('Deleted corrupted item: %s', digest)
-
-  def touch(self, digest, size):
-    """Verifies an actual file is valid.
-
-    Note that is doesn't compute the hash so it could still be corrupted if the
-    file size didn't change.
-
-    TODO(maruel): More stringent verification while keeping the check fast.
-    """
-    # Do the check outside the lock.
-    if not is_valid_file(self._path(digest), size):
-      return False
-
-    # Update it's LRU position.
-    with self._lock:
-      if digest not in self._lru:
-        return False
-      self._lru.touch(digest)
-      self._protected = self._protected or digest
-    return True
-
-  def evict(self, digest):
-    with self._lock:
-      # Do not check for 'digest == self._protected' since it could be because
-      # the object is corrupted.
-      self._lru.pop(digest)
-      self._delete_file(digest, UNKNOWN_FILE_SIZE)
-
-  def getfileobj(self, digest):
-    try:
-      f = fs.open(self._path(digest), 'rb')
-      with self._lock:
-        self._used.append(self._lru[digest])
-      return f
-    except IOError:
-      raise CacheMiss(digest)
-
-  def write(self, digest, content):
-    assert content is not None
-    with self._lock:
-      self._protected = self._protected or digest
-    path = self._path(digest)
-    # A stale broken file may remain. It is possible for the file to have write
-    # access bit removed which would cause the file_write() call to fail to open
-    # in write mode. Take no chance here.
-    file_path.try_remove(path)
-    try:
-      size = file_write(path, content)
-    except:
-      # There are two possible places were an exception can occur:
-      #   1) Inside |content| generator in case of network or unzipping errors.
-      #   2) Inside file_write itself in case of disk IO errors.
-      # In any case delete an incomplete file and propagate the exception to
-      # caller, it will be logged there.
-      file_path.try_remove(path)
-      raise
-    # Make the file read-only in the cache.  This has a few side-effects since
-    # the file node is modified, so every directory entries to this file becomes
-    # read-only. It's fine here because it is a new file.
-    file_path.set_read_only(path, True)
-    with self._lock:
-      self._add(digest, size)
-    return digest
-
-  def _load(self):
-    """Loads state of the cache from json file.
-
-    If cache_dir does not exist on disk, it is created.
-    """
-    self._lock.assert_locked()
-
-    if not fs.isfile(self.state_file):
-      if not os.path.isdir(self.cache_dir):
-        fs.makedirs(self.cache_dir)
-    else:
-      # Load state of the cache.
-      try:
-        self._lru = lru.LRUDict.load(self.state_file)
-      except ValueError as err:
-        logging.error('Failed to load cache state: %s' % (err,))
-        # Don't want to keep broken state file.
-        file_path.try_remove(self.state_file)
-    self._trim()
-    # We want the initial cache size after trimming, i.e. what is readily
-    # avaiable.
-    self._initial_number_items = len(self._lru)
-    self._initial_size = sum(self._lru.itervalues())
-    if self._evicted:
-      logging.info(
-          'Trimming evicted items with the following sizes: %s',
-          sorted(self._evicted))
-
-  def _save(self):
-    """Saves the LRU ordering."""
-    self._lock.assert_locked()
-    if sys.platform != 'win32':
-      d = os.path.dirname(self.state_file)
-      if fs.isdir(d):
-        # Necessary otherwise the file can't be created.
-        file_path.set_read_only(d, False)
-    if fs.isfile(self.state_file):
-      file_path.set_read_only(self.state_file, False)
-    self._lru.save(self.state_file)
-
-  def _trim(self):
-    """Trims anything we don't know, make sure enough free space exists."""
-    self._lock.assert_locked()
-
-    # Ensure maximum cache size.
-    if self.policies.max_cache_size:
-      total_size = sum(self._lru.itervalues())
-      while total_size > self.policies.max_cache_size:
-        total_size -= self._remove_lru_file(True)
-
-    # Ensure maximum number of items in the cache.
-    if self.policies.max_items and len(self._lru) > self.policies.max_items:
-      for _ in xrange(len(self._lru) - self.policies.max_items):
-        self._remove_lru_file(True)
-
-    # Ensure enough free space.
-    self._free_disk = file_path.get_free_space(self.cache_dir)
-    trimmed_due_to_space = 0
-    while (
-        self.policies.min_free_space and
-        self._lru and
-        self._free_disk < self.policies.min_free_space):
-      trimmed_due_to_space += 1
-      self._remove_lru_file(True)
-
-    if trimmed_due_to_space:
-      total_usage = sum(self._lru.itervalues())
-      usage_percent = 0.
-      if total_usage:
-        usage_percent = 100. * float(total_usage) / self.policies.max_cache_size
-
-      logging.warning(
-          'Trimmed %s file(s) due to not enough free disk space: %.1fkb free,'
-          ' %.1fkb cache (%.1f%% of its maximum capacity of %.1fkb)',
-          trimmed_due_to_space,
-          self._free_disk / 1024.,
-          total_usage / 1024.,
-          usage_percent,
-          self.policies.max_cache_size / 1024.)
-    self._save()
-
-  def _path(self, digest):
-    """Returns the path to one item."""
-    return os.path.join(self.cache_dir, digest)
-
-  def _remove_lru_file(self, allow_protected):
-    """Removes the lastest recently used file and returns its size."""
-    self._lock.assert_locked()
-    try:
-      digest, size = self._lru.get_oldest()
-      if not allow_protected and digest == self._protected:
-        raise Error('Not enough space to map the whole isolated tree')
-    except KeyError:
-      raise Error('Nothing to remove')
-    digest, size = self._lru.pop_oldest()
-    logging.debug("Removing LRU file %s", digest)
-    self._delete_file(digest, size)
-    return size
-
-  def _add(self, digest, size=UNKNOWN_FILE_SIZE):
-    """Adds an item into LRU cache marking it as a newest one."""
-    self._lock.assert_locked()
-    if size == UNKNOWN_FILE_SIZE:
-      size = fs.stat(self._path(digest)).st_size
-    self._added.append(size)
-    self._lru.add(digest, size)
-    self._free_disk -= size
-    # Do a quicker version of self._trim(). It only enforces free disk space,
-    # not cache size limits. It doesn't actually look at real free disk space,
-    # only uses its cache values. self._trim() will be called later to enforce
-    # real trimming but doing this quick version here makes it possible to map
-    # an isolated that is larger than the current amount of free disk space when
-    # the cache size is already large.
-    while (
-        self.policies.min_free_space and
-        self._lru and
-        self._free_disk < self.policies.min_free_space):
-      self._remove_lru_file(False)
-
-  def _delete_file(self, digest, size=UNKNOWN_FILE_SIZE):
-    """Deletes cache file from the file system."""
-    self._lock.assert_locked()
-    try:
-      if size == UNKNOWN_FILE_SIZE:
-        size = fs.stat(self._path(digest)).st_size
-      file_path.try_remove(self._path(digest))
-      self._evicted.append(size)
-      self._free_disk += size
-    except OSError as e:
-      logging.error('Error attempting to delete a file %s:\n%s' % (digest, e))
-
-
-class IsolatedBundle(object):
-  """Fetched and parsed .isolated file with all dependencies."""
-
-  def __init__(self):
-    self.command = []
-    self.files = {}
-    self.read_only = None
-    self.relative_cwd = None
-    # The main .isolated file, a IsolatedFile instance.
-    self.root = None
-
-  def fetch(self, fetch_queue, root_isolated_hash, algo):
-    """Fetches the .isolated and all the included .isolated.
-
-    It enables support for "included" .isolated files. They are processed in
-    strict order but fetched asynchronously from the cache. This is important so
-    that a file in an included .isolated file that is overridden by an embedding
-    .isolated file is not fetched needlessly. The includes are fetched in one
-    pass and the files are fetched as soon as all the ones on the left-side
-    of the tree were fetched.
-
-    The prioritization is very important here for nested .isolated files.
-    'includes' have the highest priority and the algorithm is optimized for both
-    deep and wide trees. A deep one is a long link of .isolated files referenced
-    one at a time by one item in 'includes'. A wide one has a large number of
-    'includes' in a single .isolated file. 'left' is defined as an included
-    .isolated file earlier in the 'includes' list. So the order of the elements
-    in 'includes' is important.
-
-    As a side effect this method starts asynchronous fetch of all data files
-    by adding them to |fetch_queue|. It doesn't wait for data files to finish
-    fetching though.
-    """
-    self.root = isolated_format.IsolatedFile(root_isolated_hash, algo)
-
-    # Isolated files being retrieved now: hash -> IsolatedFile instance.
-    pending = {}
-    # Set of hashes of already retrieved items to refuse recursive includes.
-    seen = set()
-    # Set of IsolatedFile's whose data files have already being fetched.
-    processed = set()
-
-    def retrieve_async(isolated_file):
-      h = isolated_file.obj_hash
-      if h in seen:
-        raise isolated_format.IsolatedError(
-            'IsolatedFile %s is retrieved recursively' % h)
-      assert h not in pending
-      seen.add(h)
-      pending[h] = isolated_file
-      fetch_queue.add(h, priority=threading_utils.PRIORITY_HIGH)
-
-    # Start fetching root *.isolated file (single file, not the whole bundle).
-    retrieve_async(self.root)
-
-    while pending:
-      # Wait until some *.isolated file is fetched, parse it.
-      item_hash = fetch_queue.wait(pending)
-      item = pending.pop(item_hash)
-      with fetch_queue.cache.getfileobj(item_hash) as f:
-        item.load(f.read())
-
-      # Start fetching included *.isolated files.
-      for new_child in item.children:
-        retrieve_async(new_child)
-
-      # Always fetch *.isolated files in traversal order, waiting if necessary
-      # until next to-be-processed node loads. "Waiting" is done by yielding
-      # back to the outer loop, that waits until some *.isolated is loaded.
-      for node in isolated_format.walk_includes(self.root):
-        if node not in processed:
-          # Not visited, and not yet loaded -> wait for it to load.
-          if not node.is_loaded:
-            break
-          # Not visited and loaded -> process it and continue the traversal.
-          self._start_fetching_files(node, fetch_queue)
-          processed.add(node)
-
-    # All *.isolated files should be processed by now and only them.
-    all_isolateds = set(isolated_format.walk_includes(self.root))
-    assert all_isolateds == processed, (all_isolateds, processed)
-
-    # Extract 'command' and other bundle properties.
-    for node in isolated_format.walk_includes(self.root):
-      self._update_self(node)
-    self.relative_cwd = self.relative_cwd or ''
-
-  def _start_fetching_files(self, isolated, fetch_queue):
-    """Starts fetching files from |isolated| that are not yet being fetched.
-
-    Modifies self.files.
-    """
-    logging.debug('fetch_files(%s)', isolated.obj_hash)
-    for filepath, properties in isolated.data.get('files', {}).iteritems():
-      # Root isolated has priority on the files being mapped. In particular,
-      # overridden files must not be fetched.
-      if filepath not in self.files:
-        self.files[filepath] = properties
-
-        # Make sure if the isolated is read only, the mode doesn't have write
-        # bits.
-        if 'm' in properties and self.read_only:
-          properties['m'] &= ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
-
-        # Preemptively request hashed files.
-        if 'h' in properties:
-          logging.debug('fetching %s', filepath)
-          fetch_queue.add(
-              properties['h'], properties['s'], threading_utils.PRIORITY_MED)
-
-  def _update_self(self, node):
-    """Extracts bundle global parameters from loaded *.isolated file.
-
-    Will be called with each loaded *.isolated file in order of traversal of
-    isolated include graph (see isolated_format.walk_includes).
-    """
-    # Grabs properties.
-    if not self.command and node.data.get('command'):
-      # Ensure paths are correctly separated on windows.
-      self.command = node.data['command']
-      if self.command:
-        self.command[0] = self.command[0].replace('/', os.path.sep)
-        self.command = tools.fix_python_path(self.command)
-    if self.read_only is None and node.data.get('read_only') is not None:
-      self.read_only = node.data['read_only']
-    if (self.relative_cwd is None and
-        node.data.get('relative_cwd') is not None):
-      self.relative_cwd = node.data['relative_cwd']
-
-
-def set_storage_api_class(cls):
-  """Replaces StorageApi implementation used by default."""
-  global _storage_api_cls
-  assert _storage_api_cls is None
-  assert issubclass(cls, StorageApi)
-  _storage_api_cls = cls
-
-
-def get_storage_api(url, namespace):
-  """Returns an object that implements low-level StorageApi interface.
-
-  It is used by Storage to work with single isolate |namespace|. It should
-  rarely be used directly by clients, see 'get_storage' for
-  a better alternative.
-
-  Arguments:
-    url: URL of isolate service to use shared cloud based storage.
-    namespace: isolate namespace to operate in, also defines hashing and
-        compression scheme used, i.e. namespace names that end with '-gzip'
-        store compressed data.
-
-  Returns:
-    Instance of StorageApi subclass.
-  """
-  cls = _storage_api_cls or IsolateServer
-  return cls(url, namespace)
-
-
-def get_storage(url, namespace):
-  """Returns Storage class that can upload and download from |namespace|.
-
-  Arguments:
-    url: URL of isolate service to use shared cloud based storage.
-    namespace: isolate namespace to operate in, also defines hashing and
-        compression scheme used, i.e. namespace names that end with '-gzip'
-        store compressed data.
-
-  Returns:
-    Instance of Storage.
-  """
-  return Storage(get_storage_api(url, namespace))
-
-
-def upload_tree(base_url, infiles, namespace):
-  """Uploads the given tree to the given url.
-
-  Arguments:
-    base_url:  The url of the isolate server to upload to.
-    infiles:   iterable of pairs (absolute path, metadata dict) of files.
-    namespace: The namespace to use on the server.
-  """
-  # Convert |infiles| into a list of FileItem objects, skip duplicates.
-  # Filter out symlinks, since they are not represented by items on isolate
-  # server side.
-  items = []
-  seen = set()
-  skipped = 0
-  for filepath, metadata in infiles:
-    assert isinstance(filepath, unicode), filepath
-    if 'l' not in metadata and filepath not in seen:
-      seen.add(filepath)
-      item = FileItem(
-          path=filepath,
-          digest=metadata['h'],
-          size=metadata['s'],
-          high_priority=metadata.get('priority') == '0')
-      items.append(item)
-    else:
-      skipped += 1
-
-  logging.info('Skipped %d duplicated entries', skipped)
-  with get_storage(base_url, namespace) as storage:
-    return storage.upload_items(items)
-
-
-def fetch_isolated(isolated_hash, storage, cache, outdir, use_symlinks):
-  """Aggressively downloads the .isolated file(s), then download all the files.
-
-  Arguments:
-    isolated_hash: hash of the root *.isolated file.
-    storage: Storage class that communicates with isolate storage.
-    cache: LocalCache class that knows how to store and map files locally.
-    outdir: Output directory to map file tree to.
-    use_symlinks: Use symlinks instead of hardlinks when True.
-
-  Returns:
-    IsolatedBundle object that holds details about loaded *.isolated file.
-  """
-  logging.debug(
-      'fetch_isolated(%s, %s, %s, %s, %s)',
-      isolated_hash, storage, cache, outdir, use_symlinks)
-  # Hash algorithm to use, defined by namespace |storage| is using.
-  algo = storage.hash_algo
-  with cache:
-    fetch_queue = FetchQueue(storage, cache)
-    bundle = IsolatedBundle()
-
-    with tools.Profiler('GetIsolateds'):
-      # Optionally support local files by manually adding them to cache.
-      if not isolated_format.is_valid_hash(isolated_hash, algo):
-        logging.debug('%s is not a valid hash, assuming a file', isolated_hash)
-        path = unicode(os.path.abspath(isolated_hash))
-        try:
-          isolated_hash = fetch_queue.inject_local_file(path, algo)
-        except IOError:
-          raise isolated_format.MappingError(
-              '%s doesn\'t seem to be a valid file. Did you intent to pass a '
-              'valid hash?' % isolated_hash)
-
-      # Load all *.isolated and start loading rest of the files.
-      bundle.fetch(fetch_queue, isolated_hash, algo)
-
-    with tools.Profiler('GetRest'):
-      # Create file system hierarchy.
-      file_path.ensure_tree(outdir)
-      create_directories(outdir, bundle.files)
-      create_symlinks(outdir, bundle.files.iteritems())
-
-      # Ensure working directory exists.
-      cwd = os.path.normpath(os.path.join(outdir, bundle.relative_cwd))
-      file_path.ensure_tree(cwd)
-
-      # Multimap: digest -> list of pairs (path, props).
-      remaining = {}
-      for filepath, props in bundle.files.iteritems():
-        if 'h' in props:
-          remaining.setdefault(props['h'], []).append((filepath, props))
-
-      # Now block on the remaining files to be downloaded and mapped.
-      logging.info('Retrieving remaining files (%d of them)...',
-          fetch_queue.pending_count)
-      last_update = time.time()
-      with threading_utils.DeadlockDetector(DEADLOCK_TIMEOUT) as detector:
-        while remaining:
-          detector.ping()
-
-          # Wait for any item to finish fetching to cache.
-          digest = fetch_queue.wait(remaining)
-
-          # Create the files in the destination using item in cache as the
-          # source.
-          for filepath, props in remaining.pop(digest):
-            fullpath = os.path.join(outdir, filepath)
-
-            with cache.getfileobj(digest) as srcfileobj:
-              filetype = props.get('t', 'basic')
-
-              if filetype == 'basic':
-                file_mode = props.get('m')
-                if file_mode:
-                  # Ignore all bits apart from the user
-                  file_mode &= 0700
-                putfile(
-                    srcfileobj, fullpath, file_mode,
-                    use_symlink=use_symlinks)
-
-              elif filetype == 'ar':
-                basedir = os.path.dirname(fullpath)
-                extractor = arfile.ArFileReader(srcfileobj, fullparse=False)
-                for ai, ifd in extractor:
-                  fp = os.path.normpath(os.path.join(basedir, ai.name))
-                  file_path.ensure_tree(os.path.dirname(fp))
-                  putfile(ifd, fp, 0700, ai.size)
-
-              else:
-                raise isolated_format.IsolatedError(
-                      'Unknown file type %r', filetype)
-
-          # Report progress.
-          duration = time.time() - last_update
-          if duration > DELAY_BETWEEN_UPDATES_IN_SECS:
-            msg = '%d files remaining...' % len(remaining)
-            print msg
-            logging.info(msg)
-            last_update = time.time()
-
-  # Cache could evict some items we just tried to fetch, it's a fatal error.
-  if not fetch_queue.verify_all_cached():
-    raise isolated_format.MappingError(
-        'Cache is too small to hold all requested files')
-  return bundle
-
-
-def directory_to_metadata(root, algo, blacklist):
-  """Returns the FileItem list and .isolated metadata for a directory."""
-  root = file_path.get_native_path_case(root)
-  paths = isolated_format.expand_directory_and_symlink(
-      root, '.' + os.path.sep, blacklist, sys.platform != 'win32')
-  metadata = {
-    relpath: isolated_format.file_to_metadata(
-        os.path.join(root, relpath), {}, 0, algo)
-    for relpath in paths
-  }
-  for v in metadata.itervalues():
-    v.pop('t')
-  items = [
-      FileItem(
-          path=os.path.join(root, relpath),
-          digest=meta['h'],
-          size=meta['s'],
-          high_priority=relpath.endswith('.isolated'))
-      for relpath, meta in metadata.iteritems() if 'h' in meta
-  ]
-  return items, metadata
-
-
-def archive_files_to_storage(storage, files, blacklist):
-  """Stores every entries and returns the relevant data.
-
-  Arguments:
-    storage: a Storage object that communicates with the remote object store.
-    files: list of file paths to upload. If a directory is specified, a
-           .isolated file is created and its hash is returned.
-    blacklist: function that returns True if a file should be omitted.
-
-  Returns:
-    tuple(list(tuple(hash, path)), list(FileItem cold), list(FileItem hot)).
-    The first file in the first item is always the isolated file.
-  """
-  assert all(isinstance(i, unicode) for i in files), files
-  if len(files) != len(set(map(os.path.abspath, files))):
-    raise Error('Duplicate entries found.')
-
-  # List of tuple(hash, path).
-  results = []
-  # The temporary directory is only created as needed.
-  tempdir = None
-  try:
-    # TODO(maruel): Yield the files to a worker thread.
-    items_to_upload = []
-    for f in files:
-      try:
-        filepath = os.path.abspath(f)
-        if fs.isdir(filepath):
-          # Uploading a whole directory.
-          items, metadata = directory_to_metadata(
-              filepath, storage.hash_algo, blacklist)
-
-          # Create the .isolated file.
-          if not tempdir:
-            tempdir = tempfile.mkdtemp(prefix=u'isolateserver')
-          handle, isolated = tempfile.mkstemp(dir=tempdir, suffix=u'.isolated')
-          os.close(handle)
-          data = {
-              'algo':
-                  isolated_format.SUPPORTED_ALGOS_REVERSE[storage.hash_algo],
-              'files': metadata,
-              'version': isolated_format.ISOLATED_FILE_VERSION,
-          }
-          isolated_format.save_isolated(isolated, data)
-          h = isolated_format.hash_file(isolated, storage.hash_algo)
-          items_to_upload.extend(items)
-          items_to_upload.append(
-              FileItem(
-                  path=isolated,
-                  digest=h,
-                  size=fs.stat(isolated).st_size,
-                  high_priority=True))
-          results.append((h, f))
-
-        elif fs.isfile(filepath):
-          h = isolated_format.hash_file(filepath, storage.hash_algo)
-          items_to_upload.append(
-            FileItem(
-                path=filepath,
-                digest=h,
-                size=fs.stat(filepath).st_size,
-                high_priority=f.endswith('.isolated')))
-          results.append((h, f))
-        else:
-          raise Error('%s is neither a file or directory.' % f)
-      except OSError:
-        raise Error('Failed to process %s.' % f)
-    uploaded = storage.upload_items(items_to_upload)
-    cold = [i for i in items_to_upload if i in uploaded]
-    hot = [i for i in items_to_upload if i not in uploaded]
-    return results, cold, hot
-  finally:
-    if tempdir and fs.isdir(tempdir):
-      file_path.rmtree(tempdir)
-
-
-def archive(out, namespace, files, blacklist):
-  if files == ['-']:
-    files = sys.stdin.readlines()
-
-  if not files:
-    raise Error('Nothing to upload')
-
-  files = [f.decode('utf-8') for f in files]
-  blacklist = tools.gen_blacklist(blacklist)
-  with get_storage(out, namespace) as storage:
-    # Ignore stats.
-    results = archive_files_to_storage(storage, files, blacklist)[0]
-  print('\n'.join('%s %s' % (r[0], r[1]) for r in results))
-
-
-@subcommand.usage('<file1..fileN> or - to read from stdin')
-def CMDarchive(parser, args):
-  """Archives data to the server.
-
-  If a directory is specified, a .isolated file is created the whole directory
-  is uploaded. Then this .isolated file can be included in another one to run
-  commands.
-
-  The commands output each file that was processed with its content hash. For
-  directories, the .isolated generated for the directory is listed as the
-  directory entry itself.
-  """
-  add_isolate_server_options(parser)
-  add_archive_options(parser)
-  options, files = parser.parse_args(args)
-  process_isolate_server_options(parser, options, True, True)
-  try:
-    archive(options.isolate_server, options.namespace, files, options.blacklist)
-  except Error as e:
-    parser.error(e.args[0])
-  return 0
-
-
-def CMDdownload(parser, args):
-  """Download data from the server.
-
-  It can either download individual files or a complete tree from a .isolated
-  file.
-  """
-  add_isolate_server_options(parser)
-  parser.add_option(
-      '-s', '--isolated', metavar='HASH',
-      help='hash of an isolated file, .isolated file content is discarded, use '
-           '--file if you need it')
-  parser.add_option(
-      '-f', '--file', metavar='HASH DEST', default=[], action='append', nargs=2,
-      help='hash and destination of a file, can be used multiple times')
-  parser.add_option(
-      '-t', '--target', metavar='DIR', default='download',
-      help='destination directory')
-  parser.add_option(
-      '--use-symlinks', action='store_true',
-      help='Use symlinks instead of hardlinks')
-  add_cache_options(parser)
-  options, args = parser.parse_args(args)
-  if args:
-    parser.error('Unsupported arguments: %s' % args)
-
-  process_isolate_server_options(parser, options, True, True)
-  if bool(options.isolated) == bool(options.file):
-    parser.error('Use one of --isolated or --file, and only one.')
-  if not options.cache and options.use_symlinks:
-    parser.error('--use-symlinks require the use of a cache with --cache')
-
-  cache = process_cache_options(options)
-  cache.cleanup()
-  options.target = unicode(os.path.abspath(options.target))
-  if options.isolated:
-    if (fs.isfile(options.target) or
-        (fs.isdir(options.target) and fs.listdir(options.target))):
-      parser.error(
-          '--target \'%s\' exists, please use another target' % options.target)
-  with get_storage(options.isolate_server, options.namespace) as storage:
-    # Fetching individual files.
-    if options.file:
-      # TODO(maruel): Enable cache in this case too.
-      channel = threading_utils.TaskChannel()
-      pending = {}
-      for digest, dest in options.file:
-        pending[digest] = dest
-        storage.async_fetch(
-            channel,
-            threading_utils.PRIORITY_MED,
-            digest,
-            UNKNOWN_FILE_SIZE,
-            functools.partial(file_write, os.path.join(options.target, dest)))
-      while pending:
-        fetched = channel.pull()
-        dest = pending.pop(fetched)
-        logging.info('%s: %s', fetched, dest)
-
-    # Fetching whole isolated tree.
-    if options.isolated:
-      with cache:
-        bundle = fetch_isolated(
-            isolated_hash=options.isolated,
-            storage=storage,
-            cache=cache,
-            outdir=options.target,
-            use_symlinks=options.use_symlinks)
-      if bundle.command:
-        rel = os.path.join(options.target, bundle.relative_cwd)
-        print('To run this test please run from the directory %s:' %
-              os.path.join(options.target, rel))
-        print('  ' + ' '.join(bundle.command))
-
-  return 0
-
-
-def add_archive_options(parser):
-  parser.add_option(
-      '--blacklist',
-      action='append', default=list(DEFAULT_BLACKLIST),
-      help='List of regexp to use as blacklist filter when uploading '
-           'directories')
-
-
-def add_isolate_server_options(parser):
-  """Adds --isolate-server and --namespace options to parser."""
-  parser.add_option(
-      '-I', '--isolate-server',
-      metavar='URL', default=os.environ.get('ISOLATE_SERVER', ''),
-      help='URL of the Isolate Server to use. Defaults to the environment '
-           'variable ISOLATE_SERVER if set. No need to specify https://, this '
-           'is assumed.')
-  parser.add_option(
-      '--namespace', default='default-gzip',
-      help='The namespace to use on the Isolate Server, default: %default')
-
-
-def process_isolate_server_options(
-    parser, options, set_exception_handler, required):
-  """Processes the --isolate-server option.
-
-  Returns the identity as determined by the server.
-  """
-  if not options.isolate_server:
-    if required:
-      parser.error('--isolate-server is required.')
-    return
-
-  try:
-    options.isolate_server = net.fix_url(options.isolate_server)
-  except ValueError as e:
-    parser.error('--isolate-server %s' % e)
-  if set_exception_handler:
-    on_error.report_on_exception_exit(options.isolate_server)
-  try:
-    return auth.ensure_logged_in(options.isolate_server)
-  except ValueError as e:
-    parser.error(str(e))
-
-
-def add_cache_options(parser):
-  cache_group = optparse.OptionGroup(parser, 'Cache management')
-  cache_group.add_option(
-      '--cache', metavar='DIR',
-      help='Directory to keep a local cache of the files. Accelerates download '
-           'by reusing already downloaded files. Default=%default')
-  cache_group.add_option(
-      '--max-cache-size',
-      type='int',
-      metavar='NNN',
-      default=50*1024*1024*1024,
-      help='Trim if the cache gets larger than this value, default=%default')
-  cache_group.add_option(
-      '--min-free-space',
-      type='int',
-      metavar='NNN',
-      default=2*1024*1024*1024,
-      help='Trim if disk free space becomes lower than this value, '
-           'default=%default')
-  cache_group.add_option(
-      '--max-items',
-      type='int',
-      metavar='NNN',
-      default=100000,
-      help='Trim if more than this number of items are in the cache '
-           'default=%default')
-  parser.add_option_group(cache_group)
-
-
-def process_cache_options(options):
-  if options.cache:
-    policies = CachePolicies(
-        options.max_cache_size, options.min_free_space, options.max_items)
-
-    # |options.cache| path may not exist until DiskCache() instance is created.
-    return DiskCache(
-        unicode(os.path.abspath(options.cache)),
-        policies,
-        isolated_format.get_hash_algo(options.namespace))
-  else:
-    return MemoryCache()
-
-
-class OptionParserIsolateServer(logging_utils.OptionParserWithLogging):
-  def __init__(self, **kwargs):
-    logging_utils.OptionParserWithLogging.__init__(
-        self,
-        version=__version__,
-        prog=os.path.basename(sys.modules[__name__].__file__),
-        **kwargs)
-    auth.add_auth_options(self)
-
-  def parse_args(self, *args, **kwargs):
-    options, args = logging_utils.OptionParserWithLogging.parse_args(
-        self, *args, **kwargs)
-    auth.process_auth_options(self, options)
-    return options, args
-
-
-def main(args):
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  return dispatcher.execute(OptionParserIsolateServer(), args)
-
-
-if __name__ == '__main__':
-  subprocess42.inhibit_os_error_reporting()
-  fix_encoding.fix_encoding()
-  tools.disable_buffering()
-  colorama.init()
-  file_path.enable_symlink()
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/swarming_client/libs/__init__.py b/tools/swarming_client/libs/__init__.py
deleted file mode 100644
index 5c8814d..0000000
--- a/tools/swarming_client/libs/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
diff --git a/tools/swarming_client/libs/arfile/__init__.py b/tools/swarming_client/libs/arfile/__init__.py
deleted file mode 100644
index dc813a8..0000000
--- a/tools/swarming_client/libs/arfile/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# pylint: disable=wildcard-import,relative-import,redefined-builtin
-from arfile import *
-
-__all__ = [
-  'AR_FORMAT_BSD',
-  'AR_FORMAT_SIMPLE',
-  'AR_FORMAT_SYSV',
-  'ArFileReader',
-  'ArFileWriter',
-  'ArInfo',
-  'is_arfile',
-  'open',
-]
diff --git a/tools/swarming_client/libs/arfile/arfile.py b/tools/swarming_client/libs/arfile/arfile.py
deleted file mode 100644
index a387946..0000000
--- a/tools/swarming_client/libs/arfile/arfile.py
+++ /dev/null
@@ -1,353 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import collections
-import doctest
-import os
-import shutil
-import stat
-import struct
-
-AR_MAGIC_START = '!<arch>\n'
-AR_MAGIC_BIT = '\x60\n'
-AR_PADDING = '\n'
-
-AR_FORMAT_SIMPLE = ('Simple Format',)
-AR_FORMAT_BSD = ('4.4BSD Format',)
-AR_FORMAT_SYSV = ('System V / GNU Format',)
-
-AR_DEFAULT_MTIME = 1447140471
-AR_DEFAULT_UID = 1000
-AR_DEFAULT_GID = 1000
-AR_DEFAULT_MODE = 0100640 # 100640 -- Octal
-
-_ArInfoStruct = struct.Struct('16s 12s 6s 6s 8s 10s 2s')
-
-_ArInfoBase = collections.namedtuple('ArInfo', [
-    'format', 'name', 'size', 'mtime', 'uid', 'gid', 'mode'])
-
-class ArInfo(_ArInfoBase):
-  """A ArInfo object represents one member in an ArFile.
-
-  It does *not* contain the file's data.
-  """
-
-  @staticmethod
-  def _format(path, arformat):
-    u"""
-    Allow forcing the format to a given type
-    >>> assert ArInfo._format('a', None) == AR_FORMAT_SIMPLE
-    >>> assert ArInfo._format(u'\u2603', None) == AR_FORMAT_SIMPLE
-    >>> assert ArInfo._format('a', AR_FORMAT_BSD) == AR_FORMAT_BSD
-
-    Certain file paths require the BSD format
-    >>> assert ArInfo._format('f f', None) == AR_FORMAT_BSD
-    >>> assert ArInfo._format('123456789abcdef..', None) == AR_FORMAT_BSD
-
-    >>> ArInfo._format('123456789abcdef..', AR_FORMAT_SIMPLE)
-    Traceback (most recent call last):
-        ...
-    IOError: File name too long for format!
-
-    >>> ArInfo._format('f f', AR_FORMAT_SIMPLE)
-    Traceback (most recent call last):
-        ...
-    IOError: File name contains forbidden character for format!
-    """
-    if isinstance(path, unicode):
-      path = path.encode('utf-8')
-
-    if path.startswith('#1/'):
-      if not arformat:
-        arformat = AR_FORMAT_BSD
-      elif arformat is AR_FORMAT_SIMPLE:
-        raise IOError('File name starts with special for format!')
-
-    if len(path) >= 16:
-      if arformat is None:
-        arformat = AR_FORMAT_BSD
-      elif arformat is AR_FORMAT_SIMPLE:
-        raise IOError('File name too long for format!')
-
-    if ' ' in path:
-      if not arformat:
-        arformat = AR_FORMAT_BSD
-      elif arformat is AR_FORMAT_SIMPLE:
-        raise IOError('File name contains forbidden character for format!')
-
-    if arformat is None:
-      arformat = AR_FORMAT_SIMPLE
-
-    return arformat
-
-  @property
-  def needspadding(self):
-    """
-    >>> ArInfo(AR_FORMAT_SIMPLE, '', 10, 0, 0, 0, 0).needspadding
-    False
-    >>> ArInfo(AR_FORMAT_SIMPLE, '', 11, 0, 0, 0, 0).needspadding
-    True
-    >>> ArInfo(AR_FORMAT_BSD, 'a', 10, 0, 0, 0, 0).needspadding
-    True
-    >>> ArInfo(AR_FORMAT_BSD, 'ab', 10, 0, 0, 0, 0).needspadding
-    False
-    >>> ArInfo(AR_FORMAT_BSD, 'ab', 11, 0, 0, 0, 0).needspadding
-    True
-    >>> ArInfo(AR_FORMAT_BSD, 'ab', 12, 0, 0, 0, 0).needspadding
-    False
-    """
-    return self.datasize % 2 != 0
-
-  @property
-  def datasize(self):
-    """
-    >>> ArInfo(AR_FORMAT_SIMPLE, '', 1, 0, 0, 0, 0).datasize
-    1
-    >>> ArInfo(AR_FORMAT_SIMPLE, '', 10, 0, 0, 0, 0).datasize
-    10
-    >>> ArInfo(AR_FORMAT_BSD, '', 1, 0, 0, 0, 0).datasize
-    1
-    >>> ArInfo(AR_FORMAT_BSD, 'a', 1, 0, 0, 0, 0).datasize
-    2
-    >>> ArInfo(AR_FORMAT_BSD, '', 10, 0, 0, 0, 0).datasize
-    10
-    >>> ArInfo(AR_FORMAT_BSD, 'abc', 10, 0, 0, 0, 0).datasize
-    13
-    """
-    if self.format is AR_FORMAT_SIMPLE:
-      return self.size
-    elif self.format is AR_FORMAT_BSD:
-      return len(self.name)+self.size
-    assert False, 'Unknown format %r' % self.format
-
-  @classmethod
-  def fromfileobj(cls, fileobj, fullparse=True):
-    """Create and return a ArInfo object from fileobj.
-
-    Raises IOError if the buffer is invalid.
-    """
-    buf = fileobj.read(_ArInfoStruct.size)
-    if not buf:
-      return None
-
-    if len(buf) < _ArInfoStruct.size:
-      raise IOError(
-          'not enough data for header, got %r, needed %r' % (
-              len(buf), _ArInfoStruct.size))
-
-    name, mtime, uid, gid, mode, datasize, magic = _ArInfoStruct.unpack(buf)
-
-    datasize = int(datasize)
-    if fullparse:
-      mtime = int(mtime)
-      uid = int(uid)
-      gid = int(gid)
-      mode = int(mode, 8)
-
-    if name.startswith('#1/'):
-      arformat = AR_FORMAT_BSD
-
-      try:
-        filenamesize = int(name[3:])
-      except ValueError:
-        raise IOError('invalid file name length: %r' % name[3:])
-
-      filename = fileobj.read(filenamesize)
-      if len(filename) != filenamesize:
-        raise IOError(
-            'not enough data for filename, got %r, needed %r' % (
-                len(name), filenamesize))
-
-      filesize = datasize - filenamesize
-
-    elif name.startswith('/'):
-      arformat = AR_FORMAT_SYSV
-      raise SystemError('%s format is not supported.' % arformat)
-
-    else:
-      arformat = AR_FORMAT_SIMPLE
-      filename = name.strip()
-      filesize = datasize
-
-    if magic != AR_MAGIC_BIT:
-      raise IOError('file magic invalid, got %r, needed %r' % (
-          magic, AR_MAGIC_BIT))
-
-    return cls(
-        arformat, filename.decode('utf-8'), filesize, mtime, uid, gid, mode)
-
-  @classmethod
-  def frompath(cls, path, arformat=None, cwd=None):
-    """Return an ArInfo object from a file path for information."""
-    fp = path
-    if cwd:
-      fp = os.path.join(cwd, path)
-    st = os.stat(fp)
-
-    if not stat.S_ISREG(st.st_mode):
-      raise IOError('Only work on regular files.')
-
-    return cls(
-        cls._format(path, arformat), path,
-        st.st_size, st.st_mtime, st.st_uid, st.st_gid, st.st_mode)
-
-  @classmethod
-  def fromdefault(cls, path, size, arformat=None):
-    """Return an ArInfo object using name and size (with defaults elsewhere).
-
-    Only a file's name and content are needed to create the ArInfo, all of the
-    modification time, user, group and mode information will be set to default
-    values. This means that you don't need to perform an expensive stat the
-    file.
-
-    >>> ai = ArInfo.fromdefault('abc123', 10)
-    >>> ai.name
-    'abc123'
-    >>> ai.size
-    10
-    >>> assert ai.mtime == AR_DEFAULT_MTIME
-    >>> assert ai.uid == AR_DEFAULT_UID
-    >>> assert ai.gid == AR_DEFAULT_GID
-    >>> assert ai.mode == AR_DEFAULT_MODE
-    """
-    return cls(
-        cls._format(path, arformat), path, size,
-        AR_DEFAULT_MTIME, AR_DEFAULT_UID, AR_DEFAULT_GID, AR_DEFAULT_MODE)
-
-  def tofileobj(self, fileobj):
-    """Write an ArInfo object to file like object."""
-    # File name, 16 bytes
-    name = self.name.encode('utf-8')
-    if self.format is AR_FORMAT_SIMPLE:
-      assert len(name) < 16
-      fileobj.write('%-16s' % name)
-      datasize = self.size
-    elif self.format is AR_FORMAT_BSD:
-      fileobj.write('#1/%-13s' % str(len(name)))
-      datasize = self.size + len(name)
-
-    # Modtime, 12 bytes
-    fileobj.write('%-12i' % self.mtime)
-    # Owner ID, 6 bytes
-    fileobj.write('%-6i' % self.uid)
-    # Group ID, 6 bytes
-    fileobj.write('%-6i' % self.gid)
-    # File mode, 8 bytes
-    fileobj.write('%-8o' % self.mode)
-    # File size, 10 bytes
-    fileobj.write('%-10s' % datasize)
-    # File magic, 2 bytes
-    fileobj.write(AR_MAGIC_BIT)
-
-    # Filename - BSD variant
-    if self.format is AR_FORMAT_BSD:
-      fileobj.write(name)
-
-
-class ArFileReader(object):
-  """Read an ar archive from the given input buffer."""
-
-  def __init__(self, fileobj, fullparse=True):
-    self.fullparse = fullparse
-    self.fileobj = fileobj
-
-    magic = self.fileobj.read(len(AR_MAGIC_START))
-    if magic != AR_MAGIC_START:
-      raise IOError(
-          'Not an ar file, invalid magic, got %r, wanted %r.' % (
-              magic, AR_MAGIC_START))
-
-  def __iter__(self):
-    while True:
-      if self.fileobj.closed:
-        raise IOError('Tried to read after the file closed.')
-      ai = ArInfo.fromfileobj(self.fileobj, self.fullparse)
-      if not ai:
-        return
-
-      start = self.fileobj.tell()
-      yield ai, self.fileobj
-      end = self.fileobj.tell()
-
-      read = end - start
-      # If the reader didn't touch the input buffer, seek past the file.
-      if not read:
-        self.fileobj.seek(ai.size, os.SEEK_CUR)
-      elif read != ai.size:
-        raise IOError(
-            'Wrong amount of data read from fileobj! got %i, wanted %i' % (
-                read, ai.size))
-
-      if ai.needspadding:
-        padding = self.fileobj.read(len(AR_PADDING))
-        if padding != AR_PADDING:
-          raise IOError(
-              'incorrect padding, got %r, wanted %r' % (
-                  padding, AR_PADDING))
-
-  def close(self):
-    """Close the archive.
-
-    Will close the output buffer.
-    """
-    self.fileobj.close()
-
-
-class ArFileWriter(object):
-  """Write an ar archive from the given output buffer."""
-
-  def __init__(self, fileobj):
-    self.fileobj = fileobj
-    self.fileobj.write(AR_MAGIC_START)
-
-  def addfile(self, arinfo, fileobj=None):
-    if not fileobj and arinfo.size:
-      raise ValueError('Need to supply fileobj if file is non-zero in size.')
-
-    arinfo.tofileobj(self.fileobj)
-    if fileobj:
-      shutil.copyfileobj(fileobj, self.fileobj, arinfo.size)
-
-    if arinfo.needspadding:
-      self.fileobj.write(AR_PADDING)
-
-  def flush(self):
-    """Flush the output buffer."""
-    self.fileobj.flush()
-
-  def close(self):
-    """Close the archive.
-
-    Will close the output buffer."""
-    self.fileobj.close()
-
-
-def is_arfile(name):
-  with file(name, 'rb') as f:
-    return f.read(len(AR_MAGIC_START)) == AR_MAGIC_START
-
-
-# pylint: disable=redefined-builtin
-def open(name=None, mode='r', fileobj=None):
-  if name is None and fileobj is None:
-    raise ValueError('Nothing to open!')
-
-  if name is not None:
-    if fileobj is not None:
-      raise ValueError('Provided both a file name and file object!')
-    fileobj = file(name, mode+'b')
-
-  if 'b' not in fileobj.mode:
-    raise ValueError('File object not open in binary mode.')
-
-  if mode == 'rb':
-    return ArFileReader(fileobj)
-  elif mode == 'wb':
-    return ArFileWriter(fileobj)
-
-  raise ValueError('Unknown file mode.')
-
-
-if __name__ == '__main__':
-  doctest.testmod()
diff --git a/tools/swarming_client/libs/arfile/arfile_test.py b/tools/swarming_client/libs/arfile/arfile_test.py
deleted file mode 100755
index 08290d7..0000000
--- a/tools/swarming_client/libs/arfile/arfile_test.py
+++ /dev/null
@@ -1,603 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# pylint: disable=relative-import
-
-import doctest
-import io
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import unittest
-
-import arfile
-import cli
-
-
-ARFILE_DIR = os.path.dirname(os.path.abspath(__file__))
-sys.path.insert(0, ARFILE_DIR)
-
-
-if not hasattr(subprocess, 'DEVNULL'):
-  subprocess.DEVNULL = file(os.devnull, 'wb')
-
-
-def filesystem_supports_unicode():
-  try:
-    u'\u2603'.encode(sys.getfilesystemencoding())
-    return True
-  except UnicodeEncodeError:
-    return False
-
-
-class ClosesSaveIOBytes(io.BytesIO):
-
-  def close(self):
-    _value = self.getvalue()
-    self.getvalue = lambda: _value
-    io.BytesIO.close(self)
-
-
-AR_TEST_SIMPLE1 = (
-    # ar file header
-    '!<arch>\n'
-    # File 1
-    # ----------------------
-    # (16 bytes) simple file
-    'filename1       '
-    # (12 bytes) modification time
-    '123         '
-    # (6 bytes) user id
-    '1000  '
-    # (6 bytes) group id
-    '1000  '
-    # (8 bytes) file mode
-    '100640  '
-    # (10 bytes) data size
-    '6         '
-    # (2 bytes) file magic
-    '\x60\n'
-    # File data
-    'abc123'
-    # Finished
-    '')
-
-AR_TEST_SIMPLE_UTF = (
-    # ar file header
-    '!<arch>\n'
-    # File 1
-    # ----------------------
-    # (16 bytes) simple file
-    '\xe2\x98\x83             '
-    # (12 bytes) modification time
-    '123         '
-    # (6 bytes) user id
-    '1000  '
-    # (6 bytes) group id
-    '1000  '
-    # (8 bytes) file mode
-    '100640  '
-    # (10 bytes) data size
-    '4         '
-    # (2 bytes) file magic
-    '\x60\n'
-    # (4 bytes) File data
-    '\xf0\x9f\x92\xa9'
-    # Finished
-    '')
-
-AR_TEST_BSD1 = (
-    # ar file header
-    '!<arch>\n'
-    # File 1
-    # ----------------------
-    # (16 bytes) BSD style filename length
-    '#1/9            '
-    # (12 bytes) modification time
-    '1234        '
-    # (6 bytes) user id
-    '1001  '
-    # (6 bytes) group id
-    '1001  '
-    # (8 bytes) file mode
-    '100644  '
-    # (10 bytes) data size
-    '15        '
-    # (2 bytes) file magic
-    '\x60\n'
-    # BSD style filename
-    'filename1'
-    # File data
-    'abc123'
-    # Padding
-    '\n'
-    # Finished
-    '')
-
-AR_TEST_BSD2 = (
-    # ar file header
-    '!<arch>\n'
-
-    # File 1
-    # ----------------------
-    # (16 bytes) filename len
-    '#1/5            '
-    # (12 bytes) mtime
-    '1447140471  '
-    # (6 bytes) owner id
-    '1000  '
-    # (6 bytes) group id
-    '1000  '
-    # (8 bytes) file mode
-    '100640  '
-    # (10 bytes) Data size
-    '13        '
-    # (2 bytes) File magic
-    '\x60\n'
-    # (9 bytes) File name
-    'file1'
-    # (6 bytes) File data
-    'contents'
-    # (1 byte) Padding
-    '\n'
-
-    # File 2
-    # ----------------------
-    # (16 bytes) filename len
-    '#1/7            '
-    # (12 bytes) mtime
-    '1447140471  '
-    # (6 bytes) owner id
-    '1000  '
-    # (6 bytes) group id
-    '1000  '
-    # (8 bytes) file mode
-    '100640  '
-    # (10 bytes) Data size
-    '10        '
-    # (2 bytes) File magic
-    '\x60\n'
-    # (9 bytes) File name
-    'fileabc'
-    # (6 bytes) File data
-    '123'
-    # (0 byte) No padding
-    ''
-
-    # File 3
-    # ----------------------
-    # (16 bytes) filename len
-    '#1/10           '
-    # (12 bytes) mtime
-    '1447140471  '
-    # (6 bytes) owner id
-    '1000  '
-    # (6 bytes) group id
-    '1000  '
-    # (8 bytes) file mode
-    '100640  '
-    # (10 bytes) Data size
-    '16        '
-    # (2 bytes) File magic
-    '\x60\n'
-    # (9 bytes) File name
-    'dir1/file1'
-    # (6 bytes) File data
-    '123abc'
-    # (0 byte) No padding
-    ''
-
-    # Finished
-    '')
-
-AR_TEST_BSD_UTF = (
-    # ar file header
-    '!<arch>\n'
-    # File 1
-    # ----------------------
-    # (16 bytes) BSD style filename length
-    '#1/3            '
-    # (12 bytes) modification time
-    '1234        '
-    # (6 bytes) user id
-    '1001  '
-    # (6 bytes) group id
-    '1001  '
-    # (8 bytes) file mode
-    '100644  '
-    # (10 bytes) data size
-    '7         '
-    # (2 bytes) file magic
-    '\x60\n'
-    # (3 bytes) BSD style filename
-    '\xe2\x98\x83'
-    # (4 bytes) File data
-    '\xf0\x9f\x92\xa9'
-    # Padding
-    '\n'
-    # Finished
-    '')
-
-
-class TestArFileReader(unittest.TestCase):
-
-  def testSimple1(self):
-    fileobj = io.BytesIO(AR_TEST_SIMPLE1)
-
-    afri = iter(arfile.ArFileReader(fileobj))
-    ai, af = afri.next()
-    self.assertIs(arfile.AR_FORMAT_SIMPLE, ai.format)
-    self.assertEqual('filename1', ai.name)
-    self.assertEqual(6, ai.size)
-    self.assertEqual(123, ai.mtime)
-    self.assertEqual(1000, ai.uid)
-    self.assertEqual(1000, ai.gid)
-    self.assertEqual('0100640', oct(ai.mode))
-    self.assertEqual('abc123', af.read(ai.size))
-
-  def testSimpleUTF(self):
-    fileobj = io.BytesIO(AR_TEST_SIMPLE_UTF)
-
-    afri = iter(arfile.ArFileReader(fileobj))
-    ai, af = afri.next()
-    self.assertIs(arfile.AR_FORMAT_SIMPLE, ai.format)
-    self.assertEqual(u'\u2603', ai.name)
-    self.assertEqual(4, ai.size)
-    self.assertEqual(123, ai.mtime)
-    self.assertEqual(1000, ai.uid)
-    self.assertEqual(1000, ai.gid)
-    self.assertEqual('0100640', oct(ai.mode))
-    self.assertEqual(u'\U0001f4a9', af.read(ai.size).decode('utf-8'))
-
-  def testBSD1(self):
-    fileobj = io.BytesIO(AR_TEST_BSD1)
-
-    afri = iter(arfile.ArFileReader(fileobj))
-    ai, af = afri.next()
-    self.assertIs(arfile.AR_FORMAT_BSD, ai.format)
-    self.assertEqual('filename1', ai.name)
-    self.assertEqual(6, ai.size)
-    self.assertEqual(1234, ai.mtime)
-    self.assertEqual(1001, ai.uid)
-    self.assertEqual(1001, ai.gid)
-    self.assertEqual('0100644', oct(ai.mode))
-    self.assertEqual('abc123', af.read(ai.size))
-
-  def testBSD2(self):
-    fileobj = io.BytesIO(AR_TEST_BSD2)
-
-    afri = iter(arfile.ArFileReader(fileobj))
-    ai, af = afri.next()
-    self.assertIs(arfile.AR_FORMAT_BSD, ai.format)
-    self.assertEqual('file1', ai.name)
-    self.assertEqual(8, ai.size)
-    self.assertEqual(1447140471, ai.mtime)
-    self.assertEqual(1000, ai.uid)
-    self.assertEqual(1000, ai.gid)
-    self.assertEqual('0100640', oct(ai.mode))
-    self.assertEqual('contents', af.read(ai.size))
-
-    ai, af = afri.next()
-    self.assertIs(arfile.AR_FORMAT_BSD, ai.format)
-    self.assertEqual('fileabc', ai.name)
-    self.assertEqual(3, ai.size)
-    self.assertEqual(1447140471, ai.mtime)
-    self.assertEqual(1000, ai.uid)
-    self.assertEqual(1000, ai.gid)
-    self.assertEqual('0100640', oct(ai.mode))
-    self.assertEqual('123', af.read(ai.size))
-
-    ai, af = afri.next()
-    self.assertIs(arfile.AR_FORMAT_BSD, ai.format)
-    self.assertEqual('dir1/file1', ai.name)
-    self.assertEqual(6, ai.size)
-    self.assertEqual(1447140471, ai.mtime)
-    self.assertEqual(1000, ai.uid)
-    self.assertEqual(1000, ai.gid)
-    self.assertEqual('0100640', oct(ai.mode))
-    self.assertEqual('123abc', af.read(ai.size))
-
-  def testBSDUTF(self):
-    fileobj = io.BytesIO(AR_TEST_BSD_UTF)
-
-    afri = iter(arfile.ArFileReader(fileobj))
-    ai, af = afri.next()
-    self.assertIs(arfile.AR_FORMAT_BSD, ai.format)
-    self.assertEqual(u'\u2603', ai.name)
-    self.assertEqual(4, ai.size)
-    self.assertEqual(1234, ai.mtime)
-    self.assertEqual(1001, ai.uid)
-    self.assertEqual(1001, ai.gid)
-    self.assertEqual('0100644', oct(ai.mode))
-    self.assertEqual(u'\U0001f4a9', af.read(ai.size).decode('utf-8'))
-
-
-class TestArFileWriter(unittest.TestCase):
-
-  def testSimple1(self):
-    fileobj = ClosesSaveIOBytes()
-
-    afw = arfile.ArFileWriter(fileobj)
-    ai = arfile.ArInfo(
-        arfile.AR_FORMAT_SIMPLE, 'filename1', 6, 123, 1000, 1000, 0100640)
-    afw.addfile(ai, io.BytesIO('abc123'))
-    afw.close()
-
-    self.assertMultiLineEqual(AR_TEST_SIMPLE1, fileobj.getvalue())
-
-  def testSimpleUTF(self):
-    fileobj = ClosesSaveIOBytes()
-
-    afw = arfile.ArFileWriter(fileobj)
-    ai = arfile.ArInfo(
-        arfile.AR_FORMAT_SIMPLE, u'\u2603', 4, 123, 1000, 1000, 0100640)
-    afw.addfile(ai, io.BytesIO(u'\U0001f4a9'.encode('utf-8')))
-    afw.close()
-
-    self.assertMultiLineEqual(AR_TEST_SIMPLE_UTF, fileobj.getvalue())
-
-  def testBSD1(self):
-    fileobj = ClosesSaveIOBytes()
-
-    afw = arfile.ArFileWriter(fileobj)
-    ai = arfile.ArInfo(
-        arfile.AR_FORMAT_BSD, 'filename1', 6, 1234, 1001, 1001, 0100644)
-    afw.addfile(ai, io.BytesIO('abc123'))
-    afw.close()
-
-    self.assertMultiLineEqual(AR_TEST_BSD1, fileobj.getvalue())
-
-  def testBSD2(self):
-    fileobj = ClosesSaveIOBytes()
-
-    afw = arfile.ArFileWriter(fileobj)
-    afw.addfile(
-        arfile.ArInfo.fromdefault(
-            'file1', 8, arformat=arfile.AR_FORMAT_BSD),
-        io.BytesIO('contents'))
-    afw.addfile(
-        arfile.ArInfo.fromdefault(
-            'fileabc', 3, arformat=arfile.AR_FORMAT_BSD),
-        io.BytesIO('123'))
-    afw.addfile(
-        arfile.ArInfo.fromdefault(
-            'dir1/file1', 6, arformat=arfile.AR_FORMAT_BSD),
-        io.BytesIO('123abc'))
-    afw.close()
-
-    self.assertMultiLineEqual(AR_TEST_BSD2, fileobj.getvalue())
-
-  def testBSDUTF(self):
-    fileobj = ClosesSaveIOBytes()
-
-    afw = arfile.ArFileWriter(fileobj)
-    ai = arfile.ArInfo(
-        arfile.AR_FORMAT_BSD, u'\u2603', 4, 1234, 1001, 1001, 0100644)
-    afw.addfile(ai, io.BytesIO(u'\U0001f4a9'.encode('utf-8')))
-    afw.close()
-
-    self.assertMultiLineEqual(AR_TEST_BSD_UTF, fileobj.getvalue())
-
-
-class BaseTestSuite(object):
-
-  def testSimple1(self):
-    self.assertWorking(
-        (
-            arfile.ArInfo(
-                arfile.AR_FORMAT_SIMPLE, 'filename1',
-                6, 123, 1000, 1000, 0100640),
-            'abc123'))
-
-  def testSimpleUTF(self):
-    self.assertWorking(
-        (
-            arfile.ArInfo(
-                arfile.AR_FORMAT_SIMPLE, u'\u2603',
-                4, 123, 1000, 1000, 0100640),
-            u'\U0001f4a9'.encode('utf-8')))
-
-  def testBSD1(self):
-    self.assertWorking(
-        (
-            arfile.ArInfo(
-                arfile.AR_FORMAT_BSD, 'filename1',
-                6, 123, 1000, 1000, 0100640),
-            'abc123'))
-
-  def testBSD2(self):
-    self.assertWorking(
-        (
-            arfile.ArInfo.fromdefault(
-                'file1', 8, arformat=arfile.AR_FORMAT_BSD),
-            'contents'),
-        (
-            arfile.ArInfo.fromdefault(
-                'fileabc', 3, arformat=arfile.AR_FORMAT_BSD),
-            '123'),
-        (
-            arfile.ArInfo.fromdefault(
-                'dir1/file1', 6, arformat=arfile.AR_FORMAT_BSD),
-            '123abc'))
-
-  def testBSDUTF(self):
-    self.assertWorking(
-        (
-            arfile.ArInfo(
-                arfile.AR_FORMAT_BSD, u'\u2603',
-                4, 123, 1000, 1000, 0100640),
-            u'\U0001f4a9'.encode('utf-8')))
-
-  def testMixed(self):
-    self.assertWorking(
-        (arfile.ArInfo.fromdefault('file1', 0), ''),
-        (arfile.ArInfo.fromdefault('f f', 1), 'a'),
-        (arfile.ArInfo.fromdefault('123456789abcedefa', 1), 'a'))
-
-
-class TestArRoundTrip(BaseTestSuite, unittest.TestCase):
-
-  def assertWorking(self, *initems):
-    outfile = ClosesSaveIOBytes()
-
-    afw = arfile.ArFileWriter(outfile)
-    for ai, data in initems:
-      assert ai.size == len(data)
-      afw.addfile(ai, io.BytesIO(data))
-    afw.close()
-
-    infile = io.BytesIO(outfile.getvalue())
-    afr = arfile.ArFileReader(infile)
-
-    outitems = []
-    for ai, fd in afr:
-      data = fd.read(ai.size)
-      outitems.append((ai, data))
-
-    self.assertSequenceEqual(initems, outitems)
-
-
-def system_has_ar():
-  retcode = subprocess.call(
-      'ar', stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
-  return retcode == 1
-
-
-@unittest.skipIf(not system_has_ar(), 'no ar binary found.')
-class TestArExternal(BaseTestSuite, unittest.TestCase):
-
-  def assertWorking(self, *initems):
-    tf = tempfile.NamedTemporaryFile(mode='wb')
-    afw = arfile.ArFileWriter(tf)
-
-    files = []
-    for ai, data in initems:
-      files.append(ai.name)
-      assert ai.size == len(data)
-      afw.addfile(ai, io.BytesIO(data))
-    afw.flush()
-
-    output = subprocess.check_output(['ar', 't', tf.name])
-    self.assertMultiLineEqual('\n'.join(files), output.decode('utf-8').strip())
-    tf.close()
-
-
-class TestCLI(unittest.TestCase):
-
-  def runCLI(self, args):
-    orig_stdout = sys.stdout
-    orig_stderr = sys.stderr
-    try:
-      sys.stdout = io.StringIO()
-      sys.stderr = io.StringIO()
-      cli.main('artool', args)
-      return sys.stdout.getvalue(), sys.stderr.getvalue()
-    finally:
-      sys.stdout = orig_stdout
-      sys.stderr = orig_stderr
-
-  def assertCLI(self, *initems, **kw):
-    extra_args = kw.get('extra_args', [])
-
-    indir = None
-    ardir = None
-    outdir = None
-    try:
-      indir = tempfile.mkdtemp().decode(sys.getfilesystemencoding())
-      ardir = tempfile.mkdtemp().decode(sys.getfilesystemencoding())
-      outdir = tempfile.mkdtemp().decode(sys.getfilesystemencoding())
-
-      arp = os.path.join(ardir, 'out.ar')
-      assert not os.path.exists(arp)
-
-      # Write out a directory tree
-      files = []
-      for fp, contents in initems:
-        fn = os.path.join(indir, fp)
-        dn = os.path.dirname(fn)
-        if not os.path.exists(dn):
-          os.makedirs(dn)
-
-        with file(fn, 'wb') as f:
-          f.write(contents)
-
-        files.append(fp)
-
-      files.sort()
-      fileslist = '\n'.join(files)
-
-      # Create an archive from a directory
-      self.runCLI(['create', '--filename', arp, indir] + extra_args)
-      self.assertTrue(
-          os.path.exists(arp), '%s file should exists' % arp)
-
-      # List the archive contents
-      output, _ = self.runCLI(['list', '--filename', arp])
-      filesoutput = '\n'.join(sorted(output[:-1].split('\n')))
-      self.assertMultiLineEqual(fileslist, filesoutput)
-
-      # Extract the archive
-      os.chdir(outdir)
-      self.runCLI(['extract', '--filename', arp] + extra_args)
-
-      # Walk the directory tree and collect the extracted output
-      outitems = []
-      for root, _, files in os.walk(outdir):
-        for fn in files:
-          fp = os.path.join(root, fn)
-          outitems.append([fp[len(outdir)+1:], file(fp, 'rb').read()])
-
-      # Check the two are equal
-      self.assertSequenceEqual(sorted(initems), sorted(outitems))
-
-    finally:
-      if indir:
-        shutil.rmtree(indir, ignore_errors=True)
-      if ardir:
-        shutil.rmtree(ardir, ignore_errors=True)
-      if outdir:
-        shutil.rmtree(outdir, ignore_errors=True)
-
-  def testSimple1(self):
-    self.assertCLI(['file1', 'contents1'])
-
-  def testFullStat(self):
-    self.assertCLI(
-        ['file1', 'contents1'],
-        extra_args=['--dont-use-defaults'])
-
-  def testMultiple(self):
-    self.assertCLI(
-        ['file1', 'contents1'],
-        ['dir1/file2', 'contents2'],
-        ['dir2/dir3/file3', 'contents3'],
-        ['file4', 'contents4'],
-        )
-
-  def testUnicodeContents(self):
-    self.assertCLI(['file1', u'\u2603'.encode('utf-8')])
-
-  def testFilenameSpaces(self):
-    self.assertCLI(
-        ['f f1', 'contents1'],
-        ['d d1/file2', 'contents2'],
-        ['d d1/f f3', 'contents3'],
-        ['file4', 'contents4'],
-        )
-
-  def testBigFile(self):
-    self.assertCLI(['bigfile', 'data'*1024*1024*10])
-
-  @unittest.skipIf(
-      not filesystem_supports_unicode(), 'no unicode file support')
-  def testUnicode(self):
-    self.assertCLI([u'\u2603', u'\U0001f4a9'.encode('utf-8')])
-
-
-if __name__ == '__main__':
-  doctest.testmod(arfile)
-  unittest.main()
diff --git a/tools/swarming_client/libs/arfile/cli.py b/tools/swarming_client/libs/arfile/cli.py
deleted file mode 100644
index f6c01bb..0000000
--- a/tools/swarming_client/libs/arfile/cli.py
+++ /dev/null
@@ -1,173 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Command line tool for creating and extracting ar files."""
-
-from __future__ import print_function
-
-import argparse
-import io
-import os
-import shutil
-import stat
-import sys
-import time
-
-# pylint: disable=relative-import
-import arfile
-
-
-class ProgressReporter(object):
-  def __init__(self, every):
-    self.every = int(every)
-    self.start = time.time()
-    self.filecount = 0
-    self.lastreport = 0
-
-  def inc(self):
-    self.filecount += 1
-    if (self.filecount - self.lastreport) >= self.every:
-      self.report()
-
-  def report(self):
-    if self.every:
-      t = time.time()-self.start
-      print(u'Took %f for %i files == %f files/second' % (
-          t, self.filecount, self.filecount/t), file=sys.stderr)
-    self.lastreport = self.filecount
-
-  def __del__(self):
-    self.report()
-
-
-def create_cmd(
-      filename, dirs, progress, read_ahead, verbose, dont_use_defaults):
-  afw = arfile.ArFileWriter(filename)
-  try:
-    for path in dirs:
-      for dirpath, child_dirs, filenames in os.walk(path):
-        # In-place sort the child_dirs so we walk in lexicographical order
-        child_dirs.sort()
-        filenames.sort()
-        for fn in filenames:
-          fp = os.path.join(dirpath, fn)
-
-          if verbose:
-            print(fp, file=sys.stderr)
-
-          progress.inc()
-
-          with open(fp, 'rb') as f:
-            if dont_use_defaults:
-              afw.addfile(
-                  arfile.ArInfo.frompath(fp[len(path)+1:], cwd=path),
-                  f)
-              continue
-
-            # If a file is small, it is cheaper to just read the file rather
-            # than doing a stat
-            data = f.read(read_ahead)
-            if len(data) < read_ahead:
-              afw.addfile(arfile.ArInfo.fromdefault(
-                fp[len(path)+1:], len(data)), io.BytesIO(data))
-            else:
-              size = os.stat(fp).st_size
-              f.seek(0)
-              afw.addfile(arfile.ArInfo.fromdefault(
-                fp[len(path)+1:], size), f)
-  finally:
-    afw.close()
-
-
-def list_cmd(filename, progress):
-  afr = arfile.ArFileReader(filename, fullparse=False)
-  for ai, _ in afr:
-    print(ai.name)
-    progress.inc()
-
-
-def extract_cmd(
-      filename, progress, verbose, dont_use_defaults, blocksize=1024*64):
-  afr = arfile.ArFileReader(filename, fullparse=dont_use_defaults)
-  for ai, ifd in afr:
-    assert not ai.name.startswith('/')
-    if verbose:
-      print(ai.name, file=sys.stderr)
-
-    try:
-      os.makedirs(os.path.dirname(ai.name))
-    except OSError:
-      pass
-
-    with open(ai.name, 'wb') as ofd:
-      written = 0
-      while written < ai.size:
-        readsize = min(blocksize, ai.size-written)
-        ofd.write(ifd.read(readsize))
-        written += readsize
-
-    progress.inc()
-
-
-def main(name, args):
-  parser = argparse.ArgumentParser(
-    prog=name,
-    description=sys.modules[__name__].__doc__)
-  subparsers = parser.add_subparsers(
-    dest='mode', help='sub-command help')
-
-  # Create command
-  parser_create = subparsers.add_parser(
-    'create', help='Create a new ar file')
-  parser_create.add_argument(
-    '-r', '--read-ahead',
-    type=int, default=1024*64,
-    help='Amount of data to read-ahead before doing a stat.')
-  parser_create.add_argument(
-    '-f', '--filename',
-    type=argparse.FileType('wb'), default=sys.stdout,
-    help='ar file to use')
-  parser_create.add_argument(
-    'dirs', nargs='+', help='Directory or file to add to the ar file')
-
-  # List command
-  parser_list = subparsers.add_parser('list', help='List a new ar file')
-
-  # Extract command
-  parser_extract = subparsers.add_parser(
-    'extract', help='Extract an existing ar file to current directory')
-
-  # Add to output commands
-  for p in parser_list, parser_extract:
-    p.add_argument(
-      '-f', '--filename',
-      type=argparse.FileType('rb'), default=sys.stdin,
-      help='ar file to use')
-
-  for p in parser_create, parser_extract:
-    p.add_argument(
-      '--dont-use-defaults',
-      action='store_true', default=False,
-      help='Don\'t use default value for file information.')
-
-    p.add_argument(
-      '-v', '--verbose',
-      action='store_true',
-      help='Output file names to stderr while running.')
-
-  # Add to all commands
-  for p in parser_create, parser_list, parser_extract:
-    p.add_argument(
-      '-p', '--progress',
-      type=ProgressReporter, default='10000',
-      help='Output progress information every N files.')
-
-  args = parser.parse_args(args)
-  mode = getattr(sys.modules[__name__], args.mode + '_cmd')
-  del args.mode
-  return mode(**args.__dict__)
-
-
-if __name__ == '__main__':
-  sys.exit(main('artool', (a.decode('utf-8') for a in sys.argv[1:])))
diff --git a/tools/swarming_client/libs/logdog/__init__.py b/tools/swarming_client/libs/logdog/__init__.py
deleted file mode 100644
index 5c8814d..0000000
--- a/tools/swarming_client/libs/logdog/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
diff --git a/tools/swarming_client/libs/logdog/bootstrap.py b/tools/swarming_client/libs/logdog/bootstrap.py
deleted file mode 100644
index 3344e5d..0000000
--- a/tools/swarming_client/libs/logdog/bootstrap.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import collections
-import os
-
-from libs.logdog import stream, streamname
-
-
-class NotBootstrappedError(RuntimeError):
-  """Raised when the current environment is missing Butler bootstrap variables.
-  """
-
-
-_ButlerBootstrapBase = collections.namedtuple('_ButlerBootstrapBase',
-    ('project', 'prefix', 'streamserver_uri'))
-
-
-class ButlerBootstrap(_ButlerBootstrapBase):
-  """Loads LogDog Butler bootstrap parameters from the environment.
-
-  LogDog Butler adds variables describing the LogDog stream parameters to the
-  environment when it bootstraps an application. This class probes the
-  environment and identifies those parameters.
-  """
-
-  _ENV_PROJECT = 'LOGDOG_STREAM_PROJECT'
-  _ENV_PREFIX = 'LOGDOG_STREAM_PREFIX'
-  _ENV_STREAM_SERVER_PATH = 'LOGDOG_STREAM_SERVER_PATH'
-
-  @classmethod
-  def probe(cls, env=None):
-    """Returns (ButlerBootstrap): The probed bootstrap environment.
-
-    Args:
-      env (dict): The environment to probe. If None, `os.getenv` will be used.
-
-    Raises:
-      NotBootstrappedError if the current environment is not boostrapped.
-    """
-    if env is None:
-      env = os.environ
-    project = env.get(cls._ENV_PROJECT)
-    prefix = env.get(cls._ENV_PREFIX)
-
-    if not project:
-      raise NotBootstrappedError('Missing project [%s]' % (cls._ENV_PROJECT,))
-
-    if not prefix:
-      raise NotBootstrappedError('Missing prefix [%s]' % (cls._ENV_PREFIX,))
-    try:
-      streamname.validate_stream_name(prefix)
-    except ValueError as e:
-      raise NotBootstrappedError('Prefix (%s) is invalid: %s' % (prefix, e))
-
-    return cls(project=project, prefix=prefix,
-               streamserver_uri=env.get(cls._ENV_STREAM_SERVER_PATH))
-
-  def stream_client(self):
-    """Returns: (StreamClient) stream client for the bootstrap streamserver URI.
-
-    If the Butler accepts external stream connections, it will export a
-    streamserver URI in the environment. This will create a StreamClient
-    instance to operate on the streamserver if one is defined.
-
-    Raises:
-      ValueError: If no streamserver URI is present in the environment.
-    """
-    if not self.streamserver_uri:
-      raise ValueError('No streamserver in bootstrap environment.')
-    return stream.create(self.streamserver_uri)
diff --git a/tools/swarming_client/libs/logdog/stream.py b/tools/swarming_client/libs/logdog/stream.py
deleted file mode 100644
index 3d60a6c..0000000
--- a/tools/swarming_client/libs/logdog/stream.py
+++ /dev/null
@@ -1,455 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import collections
-import contextlib
-import json
-import os
-import socket
-import sys
-import threading
-import types
-
-from libs.logdog import streamname, varint
-
-
-_StreamParamsBase = collections.namedtuple('_StreamParamsBase',
-    ('name', 'type', 'content_type', 'tags', 'tee', 'binary_file_extension'))
-
-
-# Magic number at the beginning of a Butler stream
-#
-# See "ProtocolFrameHeaderMagic" in:
-# <luci-go>/logdog/client/butlerlib/streamproto
-BUTLER_MAGIC = 'BTLR1\x1e'
-
-
-class StreamParams(_StreamParamsBase):
-  """Defines the set of parameters to apply to a new stream."""
-
-  # A text content stream.
-  TEXT = 'text'
-  # A binary content stream.
-  BINARY = 'binary'
-  # A datagram content stream.
-  DATAGRAM = 'datagram'
-
-  # Tee parameter to tee this stream through the Butler's STDOUT.
-  TEE_STDOUT = 'stdout'
-  # Tee parameter to tee this stream through the Butler's STDERR.
-  TEE_STDERR = 'stderr'
-
-  @classmethod
-  def make(cls, **kwargs):
-    """Returns (StreamParams): A new StreamParams instance with supplied values.
-
-    Any parameter that isn't supplied will be set to None.
-
-    Args:
-      kwargs (dict): Named parameters to apply.
-    """
-    return cls(**{f: kwargs.get(f) for f in cls._fields})
-
-  def validate(self):
-    """Raises (ValueError): if the parameters are not valid."""
-    streamname.validate_stream_name(self.name)
-
-    if self.type not in (self.TEXT, self.BINARY, self.DATAGRAM):
-      raise ValueError('Invalid type (%s)' % (self.type,))
-
-    if self.tags is not None:
-      if not isinstance(self.tags, collections.Mapping):
-        raise ValueError('Invalid tags type (%s)' % (self.tags,))
-      for k, v in self.tags.iteritems():
-        streamname.validate_tag(k, v)
-
-    if self.tee not in (None, self.TEE_STDOUT, self.TEE_STDERR):
-      raise ValueError('Invalid tee type (%s)' % (self.tee,))
-
-    if not isinstance(self.binary_file_extension,
-        (types.NoneType, types.StringTypes)):
-      raise ValueError('Invalid binary file extension type (%s)' % (
-          self.binary_file_extension,))
-
-  def to_json(self):
-    """Returns (str): The JSON representation of the StreamParams.
-
-    Converts stream parameters to JSON for Butler consumption.
-
-    Raises:
-      ValueError: if these parameters are not valid.
-    """
-    self.validate()
-
-    obj = {
-        'name': self.name,
-        'type': self.type,
-    }
-
-    def maybe_add(key, value):
-      if value is not None:
-        obj[key] = value
-    maybe_add('contentType', self.content_type)
-    maybe_add('tags', self.tags)
-    maybe_add('tee', self.tee)
-    maybe_add('binaryFileExtension', self.binary_file_extension)
-
-    # Note that "dumps' will dump UTF-8 by default, which is what Butler wants.
-    return json.dumps(obj, sort_keys=True, ensure_ascii=True, indent=None)
-
-
-class StreamProtocolRegistry(object):
-  """Registry of streamserver URI protocols and their client classes.
-  """
-
-  def __init__(self):
-    self._registry = {}
-
-  def register_protocol(self, protocol, client_cls):
-    assert issubclass(client_cls, StreamClient)
-    if self._registry.get(protocol) is not None:
-      raise KeyError('Duplicate protocol registered.')
-    self._registry[protocol] = client_cls
-
-  def create(self, uri):
-    uri = uri.split(':', 1)
-    if len(uri) != 2:
-      raise ValueError('Invalid stream server URI [%s]' % (uri,))
-    protocol, value = uri
-
-    client_cls = self._registry.get(protocol)
-    if not client_cls:
-      raise ValueError('Unknown stream client protocol (%s)' % (protocol,))
-    return client_cls._create(value)
-
-# Default (global) registry.
-_default_registry = StreamProtocolRegistry()
-
-
-def create(uri):
-  """Returns (StreamClient): A stream client for the specified URI.
-
-  This uses the default StreamProtocolRegistry to instantiate a StreamClient
-  for the specified URI.
-
-  Args:
-    uri: The streamserver URI.
-
-  Raises:
-    ValueError if the supplied URI references an invalid or improperly
-        configured streamserver.
-  """
-  return _default_registry.create(uri)
-
-
-class StreamClient(object):
-  """Abstract base class for a streamserver client.
-  """
-
-  class _DatagramStream(object):
-    """Wraps a stream object to write length-prefixed datagrams."""
-
-    def __init__(self, fd):
-      self._fd = fd
-
-    def send(self, data):
-      varint.write_uvarint(self._fd, len(data))
-      self._fd.write(data)
-
-    def close(self):
-      return self._fd.close()
-
-  def __init__(self):
-    self._name_lock = threading.Lock()
-    self._names = set()
-
-  def _register_new_stream(self, name):
-    """Registers a new stream name.
-
-    The Butler will internally reject any duplicate stream names. However, there
-    isn't really feedback when this happens except a closed stream client. This
-    is a client-side check to provide a more user-friendly experience in the
-    event that a user attempts to register a duplicate stream name.
-
-    Note that this is imperfect, as something else could register stream names
-    with the same Butler instance and this library has no means of tracking.
-    This is a best-effort experience, not a reliable check.
-
-    Args:
-      name (str): The name of the stream.
-
-    Raises:
-      ValueError if the stream name has already been registered.
-    """
-    with self._name_lock:
-      if name in self._names:
-        raise ValueError("Duplicate stream name [%s]" % (name,))
-      self._names.add(name)
-
-  @classmethod
-  def _create(cls, value):
-    """Returns (StreamClient): A new stream client connection.
-
-    Validates the streamserver parameters and creates a new StreamClient
-    instance that connects to them.
-
-    Implementing classes must override this.
-    """
-    raise NotImplementedError()
-
-  def _connect_raw(self):
-    """Returns (file): A new file-like stream.
-
-    Creates a new raw connection to the streamserver. This connection MUST not
-    have any data written to it past initialization (if needed) when it has been
-    returned.
-
-    The file-like object must implement `write` and `close`.
-
-    Implementing classes must override this.
-    """
-    raise NotImplementedError()
-
-  def new_connection(self, params):
-    """Returns (file): A new configured stream.
-
-    The returned object implements (minimally) `write` and `close`.
-
-    Creates a new LogDog stream with the specified parameters.
-
-    Args:
-      params (StreamParams): The parameters to use with the new connection.
-
-    Raises:
-      ValueError if the stream name has already been used, or if the parameters
-      are not valid.
-    """
-    self._register_new_stream(params.name)
-    params_json = params.to_json()
-
-    fd = self._connect_raw()
-    fd.write(BUTLER_MAGIC)
-    varint.write_uvarint(fd, len(params_json))
-    fd.write(params_json)
-    return fd
-
-  @contextlib.contextmanager
-  def text(self, name, **kwargs):
-    """Context manager to create, use, and teardown a TEXT stream.
-
-    This context manager creates a new butler TEXT stream with the specified
-    parameters, yields it, and closes it on teardown.
-
-    Args:
-      name (str): the LogDog name of the stream.
-      kwargs (dict): Log stream parameters. These may be any keyword arguments
-          accepted by `open_text`.
-
-    Returns (file): A file-like object to a Butler UTF-8 text stream supporting
-        `write`.
-    """
-    fd = None
-    try:
-      fd = self.open_text(name, **kwargs)
-      yield fd
-    finally:
-      if fd is not None:
-        fd.close()
-
-  def open_text(self, name, content_type=None, tags=None, tee=None,
-                binary_file_extension=None):
-    """Returns (file): A file-like object for a single text stream.
-
-    This creates a new butler TEXT stream with the specified parameters.
-
-    Args:
-      name (str): the LogDog name of the stream.
-      content_type (str): The optional content type of the stream. If None, a
-          default content type will be chosen by the Butler.
-      tags (dict): An optional key/value dictionary pair of LogDog stream tags.
-      tee (str): Describes how stream data should be tee'd through the Butler.
-          One of StreamParams' TEE arguments.
-      binary_file_extension (str): A custom binary file extension. If not
-          provided, a default extension may be chosen or the binary stream may
-          not be emitted.
-
-    Returns (file): A file-like object to a Butler text stream. This object can
-        have UTF-8 text content written to it with its `write` method, and must
-        be closed when finished using its `close` method.
-    """
-    params = StreamParams.make(
-        name=name,
-        type=StreamParams.TEXT,
-        content_type=content_type,
-        tags=tags,
-        tee=tee,
-        binary_file_extension=binary_file_extension)
-    return self.new_connection(params)
-
-  @contextlib.contextmanager
-  def binary(self, name, **kwargs):
-    """Context manager to create, use, and teardown a BINARY stream.
-
-    This context manager creates a new butler BINARY stream with the specified
-    parameters, yields it, and closes it on teardown.
-
-    Args:
-      name (str): the LogDog name of the stream.
-      kwargs (dict): Log stream parameters. These may be any keyword arguments
-          accepted by `open_binary`.
-
-    Returns (file): A file-like object to a Butler binary stream supporting
-        `write`.
-    """
-    fd = None
-    try:
-      fd = self.open_binary(name, **kwargs)
-      yield fd
-    finally:
-      if fd is not None:
-        fd.close()
-
-  def open_binary(self, name, content_type=None, tags=None, tee=None,
-                binary_file_extension=None):
-    """Returns (file): A file-like object for a single binary stream.
-
-    This creates a new butler BINARY stream with the specified parameters.
-
-    Args:
-      name (str): the LogDog name of the stream.
-      content_type (str): The optional content type of the stream. If None, a
-          default content type will be chosen by the Butler.
-      tags (dict): An optional key/value dictionary pair of LogDog stream tags.
-      tee (str): Describes how stream data should be tee'd through the Butler.
-          One of StreamParams' TEE arguments.
-      binary_file_extension (str): A custom binary file extension. If not
-          provided, a default extension may be chosen or the binary stream may
-          not be emitted.
-
-    Returns (file): A file-like object to a Butler binary stream. This object
-        can have UTF-8 content written to it with its `write` method, and must
-        be closed when finished using its `close` method.
-    """
-    params = StreamParams.make(
-        name=name,
-        type=StreamParams.BINARY,
-        content_type=content_type,
-        tags=tags,
-        tee=tee,
-        binary_file_extension=binary_file_extension)
-    return self.new_connection(params)
-
-  @contextlib.contextmanager
-  def datagram(self, name, **kwargs):
-    """Context manager to create, use, and teardown a DATAGRAM stream.
-
-    This context manager creates a new butler DATAAGRAM stream with the
-    specified parameters, yields it, and closes it on teardown.
-
-    Args:
-      name (str): the LogDog name of the stream.
-      kwargs (dict): Log stream parameters. These may be any keyword arguments
-          accepted by `open_datagram`.
-
-    Returns (_DatagramStream): A datagram stream object. Datagrams can be
-        written to it using its `send` method.
-    """
-    fd = None
-    try:
-      fd = self.open_datagram(name, **kwargs)
-      yield fd
-    finally:
-      if fd is not None:
-        fd.close()
-
-  def open_datagram(self, name, content_type=None, tags=None, tee=None,
-                    binary_file_extension=None):
-    """Creates a new butler DATAGRAM stream with the specified parameters.
-
-    Args:
-      name (str): the LogDog name of the stream.
-      content_type (str): The optional content type of the stream. If None, a
-          default content type will be chosen by the Butler.
-      tags (dict): An optional key/value dictionary pair of LogDog stream tags.
-      tee (str): Describes how stream data should be tee'd through the Butler.
-          One of StreamParams' TEE arguments.
-      binary_file_extension (str): A custom binary file extension. If not
-          provided, a default extension may be chosen or the binary stream may
-          not be emitted.
-
-    Returns (_DatagramStream): A datagram stream object. Datagrams can be
-        written to it using its `send` method. This object must be closed when
-        finished by using its `close` method.
-    """
-    params = StreamParams.make(
-        name=name,
-        type=StreamParams.DATAGRAM,
-        content_type=content_type,
-        tags=tags,
-        tee=tee,
-        binary_file_extension=binary_file_extension)
-    return self._DatagramStream(self.new_connection(params))
-
-
-class _NamedPipeStreamClient(StreamClient):
-  """A StreamClient implementation that connects to a Windows named pipe.
-  """
-
-  def __init__(self, name):
-    r"""Initializes a new Windows named pipe stream client.
-
-    Args:
-      name (str): The name of the Windows named pipe to use (e.g., "\\.\name")
-    """
-    super(_NamedPipeStreamClient, self).__init__()
-    self._name = name
-
-  @classmethod
-  def _create(cls, value):
-    return cls(value)
-
-  def _connect_raw(self):
-    return open(self._name, 'wb')
-
-_default_registry.register_protocol('net.pipe', _NamedPipeStreamClient)
-
-
-class _UnixDomainSocketStreamClient(StreamClient):
-  """A StreamClient implementation that uses a UNIX domain socket.
-  """
-
-  class SocketFile(object):
-    """A write-only file-like object that writes to a UNIX socket."""
-
-    def __init__(self, fd):
-      self._fd = fd
-
-    def write(self, data):
-      self._fd.send(data)
-
-    def close(self):
-      self._fd.close()
-
-
-  def __init__(self, path):
-    """Initializes a new UNIX domain socket stream client.
-
-    Args:
-      path (str): The path to the named UNIX domain socket.
-    """
-    super(_UnixDomainSocketStreamClient, self).__init__()
-    self._path = path
-
-  @classmethod
-  def _create(cls, value):
-    if not os.path.exists(value):
-      raise ValueError('UNIX domain socket [%s] does not exist.' % (value,))
-    return cls(value)
-
-  def _connect_raw(self):
-    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-    sock.connect(self._path)
-    return self.SocketFile(sock)
-
-_default_registry.register_protocol('unix', _UnixDomainSocketStreamClient)
diff --git a/tools/swarming_client/libs/logdog/streamname.py b/tools/swarming_client/libs/logdog/streamname.py
deleted file mode 100644
index 8aaffb8..0000000
--- a/tools/swarming_client/libs/logdog/streamname.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import re
-import string
-import types
-
-_ALNUM_CHARS = string.ascii_letters + string.digits
-_SEGMENT_RE_BASE = r'[a-zA-Z0-9][a-zA-Z0-9:_\-.]*'
-_STREAM_NAME_RE = re.compile('^(' + _SEGMENT_RE_BASE + ')(/' +
-                             _SEGMENT_RE_BASE + ')*$')
-_MAX_STREAM_NAME_LENGTH = 4096
-
-_MAX_TAG_KEY_LENGTH = 64
-_MAX_TAG_VALUE_LENGTH = 4096
-
-
-def validate_stream_name(v, maxlen=None):
-  """Verifies that a given stream name is valid.
-
-  Args:
-    v (str): The stream name string.
-
-
-  Raises:
-    ValueError if the stream name is invalid.
-  """
-  maxlen = maxlen or _MAX_STREAM_NAME_LENGTH
-  if len(v) > maxlen:
-    raise ValueError('Maximum length exceeded (%d > %d)' % (len(v), maxlen))
-  if _STREAM_NAME_RE.match(v) is None:
-    raise ValueError('Invalid stream name')
-
-
-def validate_tag(key, value):
-  """Verifies that a given tag key/value is valid.
-
-  Args:
-    k (str): The tag key.
-    v (str): The tag value.
-
-  Raises:
-    ValueError if the tag is not valid.
-  """
-  validate_stream_name(key, maxlen=_MAX_TAG_KEY_LENGTH)
-  validate_stream_name(value, maxlen=_MAX_TAG_VALUE_LENGTH)
-
-
-def normalize(v, prefix=None):
-  """Given a string, "v", mutate it into a valid stream name.
-
-  This operates by replacing invalid stream naem characters with underscores (_)
-  when encountered.
-
-  A special case is when "v" begins with an invalid character. In this case, we
-  will replace it with the "prefix", if one is supplied.
-
-  See _STREAM_NAME_RE for a description of a valid stream name.
-
-  Raises:
-    ValueError: If normalization could not be successfully performed.
-  """
-  if len(v) == 0:
-    if not prefix:
-      raise ValueError('Cannot normalize empty name with no prefix.')
-    v = prefix
-  else:
-    out = []
-    for i, ch in enumerate(v):
-      if i == 0 and not _is_valid_stream_char(ch, first=True):
-        # The first letter is special, and must be alphanumeric.
-        # If we have a prefix, prepend that to the resulting string.
-        if prefix is None:
-          raise ValueError('Name has invalid beginning, and no prefix was '
-                           'provided.')
-        out.append(prefix)
-
-      if not _is_valid_stream_char(ch):
-        ch = '_'
-      out.append(ch)
-    v = ''.join(out)
-
-  # Validate the resulting string.
-  validate_stream_name(v)
-  return v
-
-
-def _is_valid_stream_char(ch, first=False):
-  """Returns (bool): True if a character is alphanumeric.
-
-  The first character must be alphanumeric, matching [a-zA-Z0-9].
-  Additional characters must either be alphanumeric or one of: (: _ - .).
-
-  Args:
-    ch (str): the character to evaluate.
-    first (bool): if true, apply special first-character constraints.
-  """
-  # Alphanumeric check.
-  if ch in _ALNUM_CHARS:
-    return True
-  if first:
-    # The first character must be alphanumeric.
-    return False
-
-  # Check additional middle-name characters:
-  return ch in ':_-./'
diff --git a/tools/swarming_client/libs/logdog/tests/bootstrap_test.py b/tools/swarming_client/libs/logdog/tests/bootstrap_test.py
deleted file mode 100755
index 0d5f968..0000000
--- a/tools/swarming_client/libs/logdog/tests/bootstrap_test.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import os
-import sys
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.abspath(os.path.join(
-    __file__.decode(sys.getfilesystemencoding()),
-    os.pardir, os.pardir, os.pardir)))
-sys.path.insert(0, ROOT_DIR)
-
-from libs.logdog import bootstrap
-
-
-class BootstrapTestCase(unittest.TestCase):
-
-  def setUp(self):
-    self.env = {
-        bootstrap.ButlerBootstrap._ENV_PROJECT: 'test-project',
-        bootstrap.ButlerBootstrap._ENV_PREFIX: 'foo/bar',
-        bootstrap.ButlerBootstrap._ENV_STREAM_SERVER_PATH: 'fake:path',
-    }
-
-  def testProbeSucceeds(self):
-    bs = bootstrap.ButlerBootstrap.probe(self.env)
-    self.assertEqual(bs, bootstrap.ButlerBootstrap(
-      project='test-project',
-      prefix='foo/bar',
-      streamserver_uri='fake:path'))
-
-  def testProbeNoBootstrapRaisesError(self):
-    self.assertRaises(bootstrap.NotBootstrappedError,
-        bootstrap.ButlerBootstrap.probe, env={})
-
-  def testProbeMissingProjectRaisesError(self):
-    self.env.pop(bootstrap.ButlerBootstrap._ENV_PROJECT)
-    self.assertRaises(bootstrap.NotBootstrappedError,
-        bootstrap.ButlerBootstrap.probe, env=self.env)
-
-  def testProbeMissingPrefixRaisesError(self):
-    self.env.pop(bootstrap.ButlerBootstrap._ENV_PREFIX)
-    self.assertRaises(bootstrap.NotBootstrappedError,
-        bootstrap.ButlerBootstrap.probe, env=self.env)
-
-  def testProbeInvalidPrefixRaisesError(self):
-    self.env[bootstrap.ButlerBootstrap._ENV_PREFIX] = '!!! not valid !!!'
-    self.assertRaises(bootstrap.NotBootstrappedError,
-        bootstrap.ButlerBootstrap.probe, env=self.env)
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/swarming_client/libs/logdog/tests/stream_test.py b/tools/swarming_client/libs/logdog/tests/stream_test.py
deleted file mode 100755
index 98cbbda..0000000
--- a/tools/swarming_client/libs/logdog/tests/stream_test.py
+++ /dev/null
@@ -1,208 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import json
-import os
-import sys
-import unittest
-import StringIO
-
-ROOT_DIR = os.path.dirname(os.path.abspath(os.path.join(
-    __file__.decode(sys.getfilesystemencoding()),
-    os.pardir, os.pardir, os.pardir)))
-sys.path.insert(0, ROOT_DIR)
-
-from libs.logdog import stream, varint
-
-
-class StreamParamsTestCase(unittest.TestCase):
-
-  def setUp(self):
-    self.params = stream.StreamParams(
-        'name',
-        type=stream.StreamParams.TEXT,
-        content_type='content-type',
-        tags={
-            'foo': 'bar',
-            'baz': 'qux',
-        },
-        tee=stream.StreamParams.TEE_STDOUT,
-        binary_file_extension='ext')
-
-  def testParamsToJson(self):
-    self.assertEqual(self.params.to_json(),
-        ('{"binaryFileExtension": "ext", "contentType": "content-type", '
-         '"name": "name", "tags": {"baz": "qux", "foo": "bar"}, '
-         '"tee": "stdout", "type": "text"}'))
-
-  def testParamsToJsonWithEmpties(self):
-    params = self.params._replace(
-        content_type=None,
-        tags=None,
-        tee=None,
-        binary_file_extension=None,
-    )
-    self.assertEqual(params.to_json(), '{"name": "name", "type": "text"}')
-
-  def testParamsWithInvalidTypeRaisesValueError(self):
-    params = self.params._replace(type=None)
-    self.assertRaises(ValueError, params.to_json)
-
-  def testParamsWithInvalidTeeTypeRaisesValueError(self):
-    params = self.params._replace(tee='somewhere')
-    self.assertRaises(ValueError, params.to_json)
-
-  def testParamsWithInvalidTagRaisesValueError(self):
-    params = self.params._replace(tags='foo')
-    self.assertRaises(ValueError, params.to_json)
-
-    params = self.params._replace(tags={'!!! invalid tag key !!!': 'bar'})
-    self.assertRaises(ValueError, params.to_json)
-
-
-class StreamClientTestCase(unittest.TestCase):
-
-  class _TestStreamClientConnection(object):
-
-    def __init__(self):
-      self.buffer = StringIO.StringIO()
-      self.closed = False
-
-    def _assert_not_closed(self):
-      if self.closed:
-        raise Exception('Connection is closed.')
-
-    def write(self, v):
-      self._assert_not_closed()
-      self.buffer.write(v)
-
-    def close(self):
-      self._assert_not_closed()
-      self.closed = True
-
-    def interpret(self):
-      data = StringIO.StringIO(self.buffer.getvalue())
-      magic = data.read(len(stream.BUTLER_MAGIC))
-      if magic != stream.BUTLER_MAGIC:
-        raise ValueError('Invalid magic value ([%s] != [%s])' % (
-            magic, stream.BUTLER_MAGIC))
-      length, _ = varint.read_uvarint(data)
-      header = data.read(length)
-      return json.loads(header), data.read()
-
-  class _TestStreamClient(stream.StreamClient):
-    def __init__(self, value):
-      super(StreamClientTestCase._TestStreamClient, self).__init__()
-      self.value = value
-      self.last_conn = None
-
-    @classmethod
-    def _create(cls, value):
-      return cls(value)
-
-    def _connect_raw(self):
-      conn = StreamClientTestCase._TestStreamClientConnection()
-      self.last_conn = conn
-      return conn
-
-  def setUp(self):
-    self._registry = stream.StreamProtocolRegistry()
-    self._registry.register_protocol('test', self._TestStreamClient)
-
-  @staticmethod
-  def _split_datagrams(value):
-    sio = StringIO.StringIO(value)
-    while sio.pos < sio.len:
-      size_prefix, _ = varint.read_uvarint(sio)
-      data = sio.read(size_prefix)
-      if len(data) != size_prefix:
-        raise ValueError('Expected %d bytes, but only got %d' % (
-            size_prefix, len(data)))
-      yield data
-
-  def testClientInstantiation(self):
-    client = self._registry.create('test:value')
-    self.assertIsInstance(client, self._TestStreamClient)
-    self.assertEqual(client.value, 'value')
-
-  def testTextStream(self):
-    client = self._registry.create('test:value')
-    with client.text('mystream') as fd:
-      fd.write('text\nstream\nlines')
-
-    conn = client.last_conn
-    self.assertTrue(conn.closed)
-
-    header, data = conn.interpret()
-    self.assertEqual(header, {'name': 'mystream', 'type': 'text'})
-    self.assertEqual(data, 'text\nstream\nlines')
-
-  def testTextStreamWithParams(self):
-    client = self._registry.create('test:value')
-    with client.text('mystream', content_type='foo/bar',
-                     tee=stream.StreamParams.TEE_STDOUT,
-                     tags={'foo': 'bar', 'baz': 'qux'}) as fd:
-      fd.write('text!')
-
-    conn = client.last_conn
-    self.assertTrue(conn.closed)
-
-    header, data = conn.interpret()
-    self.assertEqual(header, {
-        'name': 'mystream',
-        'type': 'text',
-        'contentType': 'foo/bar',
-         'tee': 'stdout',
-         'tags': {'foo': 'bar', 'baz': 'qux'},
-    })
-    self.assertEqual(data, 'text!')
-
-  def testBinaryStream(self):
-    client = self._registry.create('test:value')
-    with client.binary('mystream') as fd:
-      fd.write('\x60\x0d\xd0\x65')
-
-    conn = client.last_conn
-    self.assertTrue(conn.closed)
-
-    header, data = conn.interpret()
-    self.assertEqual(header, {'name': 'mystream', 'type': 'binary'})
-    self.assertEqual(data, '\x60\x0d\xd0\x65')
-
-  def testDatagramStream(self):
-    client = self._registry.create('test:value')
-    with client.datagram('mystream') as fd:
-      fd.send('datagram0')
-      fd.send('dg1')
-      fd.send('')
-      fd.send('dg3')
-
-    conn = client.last_conn
-    self.assertTrue(conn.closed)
-
-    header, data = conn.interpret()
-    self.assertEqual(header, {'name': 'mystream', 'type': 'datagram'})
-    self.assertEqual(list(self._split_datagrams(data)),
-        ['datagram0', 'dg1', '', 'dg3'])
-
-  def testCreatingDuplicateStreamNameRaisesValueError(self):
-    client = self._registry.create('test:value')
-    with client.text('mystream') as fd:
-      fd.write('Using a text stream.')
-
-    with self.assertRaises(ValueError):
-      with client.text('mystream') as fd:
-        fd.write('Should not work.')
-
-    conn = client.last_conn
-    self.assertTrue(conn.closed)
-
-    header, data = conn.interpret()
-    self.assertEqual(header, {'name': 'mystream', 'type': 'text'})
-    self.assertEqual(data, 'Using a text stream.')
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/swarming_client/libs/logdog/tests/streamname_test.py b/tools/swarming_client/libs/logdog/tests/streamname_test.py
deleted file mode 100755
index c84292a..0000000
--- a/tools/swarming_client/libs/logdog/tests/streamname_test.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import os
-import sys
-import unittest
-import StringIO
-
-ROOT_DIR = os.path.dirname(os.path.abspath(os.path.join(
-    __file__.decode(sys.getfilesystemencoding()),
-    os.pardir, os.pardir, os.pardir)))
-sys.path.insert(0, ROOT_DIR)
-
-from libs.logdog import streamname
-
-
-class StreamNameTestCase(unittest.TestCase):
-
-  def testInvalidStreamNamesRaiseValueError(self):
-    for name in (
-        '',
-        'a' * (streamname._MAX_STREAM_NAME_LENGTH+1),
-        ' s p a c e s ',
-        '-hyphen',
-        'stream/path/+/not/name',
-    ):
-      with self.assertRaises(ValueError):
-        streamname.validate_stream_name(name)
-
-  def testValidStreamNamesDoNotRaise(self):
-    for name in (
-        'a',
-        'a' * (streamname._MAX_STREAM_NAME_LENGTH),
-        'foo/bar',
-        'f123/four/five-_.:',
-    ):
-      raised = False
-      try:
-        streamname.validate_stream_name(name)
-      except ValueError:
-        raised = True
-      self.assertFalse(raised, "Stream name '%s' raised ValueError" % (name,))
-
-  def testNormalize(self):
-    for name, normalized in (
-        ('', 'PFX'),
-        ('_invalid_start_char', 'PFX_invalid_start_char'),
-        ('valid_stream_name.1:2-3', 'valid_stream_name.1:2-3'),
-        ('some stream (with stuff)', 'some_stream__with_stuff_'),
-        ('_invalid/st!ream/name entry', 'PFX_invalid/st_ream/name_entry'),
-        ('     ', 'PFX_____'),
-    ):
-      self.assertEqual(streamname.normalize(name, prefix='PFX'), normalized)
-
-    # Assert that an empty stream name with no prefix will raise a ValueError.
-    self.assertRaises(ValueError, streamname.normalize, '')
-
-    # Assert that a stream name with an invalid starting character and no prefix
-    # will raise a ValueError.
-    self.assertRaises(ValueError, streamname.normalize, '_invalid_start_char')
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/swarming_client/libs/logdog/tests/varint_test.py b/tools/swarming_client/libs/logdog/tests/varint_test.py
deleted file mode 100755
index 0dbbf8d..0000000
--- a/tools/swarming_client/libs/logdog/tests/varint_test.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import itertools
-import os
-import sys
-import unittest
-import StringIO
-
-ROOT_DIR = os.path.dirname(os.path.abspath(os.path.join(
-    __file__.decode(sys.getfilesystemencoding()),
-    os.pardir, os.pardir, os.pardir)))
-sys.path.insert(0, ROOT_DIR)
-
-from libs.logdog import varint
-
-
-class VarintTestCase(unittest.TestCase):
-
-  def testVarintEncodingRaw(self):
-    for base, exp in (
-        (0, b'\x00'),
-        (1, b'\x01'),
-        (0x7F, b'\x7f'),
-        (0x80, b'\x80\x01'),
-        (0x81, b'\x81\x01'),
-        (0x18080, b'\x80\x81\x06'),
-    ):
-      sio = StringIO.StringIO()
-      count = varint.write_uvarint(sio, base)
-      act = sio.getvalue()
-
-      self.assertEqual(act, exp,
-          "Encoding for %d (%r) doesn't match expected (%r)" % (base, act, exp))
-      self.assertEqual(count, len(act),
-          "Length of %d (%d) doesn't match encoded length (%d)" % (
-              base, len(act), count))
-
-  def testVarintEncodeDecode(self):
-    seed = (b'\x00', b'\x01', b'\x55', b'\x7F', b'\x80', b'\x81', b'\xff')
-    for perm in itertools.permutations(seed):
-      perm = ''.join(perm).encode('hex')
-
-      while len(perm) > 0:
-        exp = int(perm.encode('hex'), 16)
-
-        sio = StringIO.StringIO()
-        count = varint.write_uvarint(sio, exp)
-        sio.seek(0)
-        act, count = varint.read_uvarint(sio)
-
-        self.assertEqual(act, exp,
-            "Decoded %r (%d) doesn't match expected (%d)" % (
-                sio.getvalue().encode('hex'), act, exp))
-        self.assertEqual(count, len(sio.getvalue()),
-            "Decoded length (%d) doesn't match expected (%d)" % (
-                count, len(sio.getvalue())))
-
-        if perm == 0:
-          break
-        perm = perm[1:]
-
-
-if __name__ == '__main__':
-  unittest.main()
diff --git a/tools/swarming_client/libs/logdog/varint.py b/tools/swarming_client/libs/logdog/varint.py
deleted file mode 100644
index 7bf3cca..0000000
--- a/tools/swarming_client/libs/logdog/varint.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import os
-import sys
-
-
-def write_uvarint(w, val):
-  """Writes a varint value to the supplied file-like object.
-
-  Args:
-    w (object): A file-like object to write to. Must implement write.
-    val (number): The value to write. Must be >= 0.
-
-  Returns (int): The number of bytes that were written.
-
-  Raises:
-    ValueError if 'val' is < 0.
-  """
-  if val < 0:
-    raise ValueError('Cannot encode negative value, %d' % (val,))
-
-  count = 0
-  while val > 0 or count == 0:
-    byte = (val & 0b01111111)
-    val >>= 7
-    if val > 0:
-      byte |= 0b10000000
-
-    w.write(chr(byte))
-    count += 1
-  return count
-
-
-def read_uvarint(r):
-  """Reads a uvarint from a stream.
-
-  This is targeted towards testing, and will not be used in production code.
-
-  Args:
-    r (object): A file-like object to read from. Must implement read.
-
-  Returns: (value, count)
-    value (int): The decoded varint number.
-    count (int): The number of bytes that were read from 'r'.
-
-  Raises:
-    ValueError if the encoded varint is not terminated.
-  """
-  count = 0
-  result = 0
-  while True:
-    byte = r.read(1)
-    if len(byte) == 0:
-      raise ValueError('UVarint was not terminated')
-
-    byte = ord(byte)
-    result |= ((byte & 0b01111111) << (7 * count))
-    count += 1
-    if byte & 0b10000000 == 0:
-      break
-  return result, count
diff --git a/tools/swarming_client/run_isolated.py b/tools/swarming_client/run_isolated.py
deleted file mode 100755
index 6c69c21..0000000
--- a/tools/swarming_client/run_isolated.py
+++ /dev/null
@@ -1,831 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Runs a command with optional isolated input/output.
-
-Despite name "run_isolated", can run a generic non-isolated command specified as
-args.
-
-If input isolated hash is provided, fetches it, creates a tree of hard links,
-appends args to the command in the fetched isolated and runs it.
-To improve performance, keeps a local cache.
-The local cache can safely be deleted.
-
-Any ${EXECUTABLE_SUFFIX} on the command line will be replaced with ".exe" string
-on Windows and "" on other platforms.
-
-Any ${ISOLATED_OUTDIR} on the command line will be replaced by the location of a
-temporary directory upon execution of the command specified in the .isolated
-file. All content written to this directory will be uploaded upon termination
-and the .isolated file describing this directory will be printed to stdout.
-
-Any ${SWARMING_BOT_FILE} on the command line will be replaced by the value of
-the --bot-file parameter. This file is used by a swarming bot to communicate
-state of the host to tasks. It is written to by the swarming bot's
-on_before_task() hook in the swarming server's custom bot_config.py.
-"""
-
-__version__ = '0.8.5'
-
-import base64
-import collections
-import logging
-import optparse
-import os
-import sys
-import tempfile
-import time
-
-from third_party.depot_tools import fix_encoding
-
-from utils import file_path
-from utils import fs
-from utils import large
-from utils import logging_utils
-from utils import on_error
-from utils import subprocess42
-from utils import tools
-from utils import zip_package
-
-import auth
-import cipd
-import isolateserver
-
-
-# Absolute path to this file (can be None if running from zip on Mac).
-THIS_FILE_PATH = os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())) if __file__ else None
-
-# Directory that contains this file (might be inside zip package).
-BASE_DIR = os.path.dirname(THIS_FILE_PATH) if __file__.decode(
-    sys.getfilesystemencoding()) else None
-
-# Directory that contains currently running script file.
-if zip_package.get_main_script_path():
-  MAIN_DIR = os.path.dirname(
-      os.path.abspath(zip_package.get_main_script_path()))
-else:
-  # This happens when 'import run_isolated' is executed at the python
-  # interactive prompt, in that case __file__ is undefined.
-  MAIN_DIR = None
-
-
-# Magic variables that can be found in the isolate task command line.
-ISOLATED_OUTDIR_PARAMETER = '${ISOLATED_OUTDIR}'
-EXECUTABLE_SUFFIX_PARAMETER = '${EXECUTABLE_SUFFIX}'
-SWARMING_BOT_FILE_PARAMETER = '${SWARMING_BOT_FILE}'
-
-
-# The name of the log file to use.
-RUN_ISOLATED_LOG_FILE = 'run_isolated.log'
-
-
-# The name of the log to use for the run_test_cases.py command
-RUN_TEST_CASES_LOG = 'run_test_cases.log'
-
-
-# Use short names for temporary directories. This is driven by Windows, which
-# imposes a relatively short maximum path length of 260 characters, often
-# referred to as MAX_PATH. It is relatively easy to create files with longer
-# path length. A use case is with recursive depedency treesV like npm packages.
-#
-# It is recommended to start the script with a `root_dir` as short as
-# possible.
-# - ir stands for isolated_run
-# - io stands for isolated_out
-# - it stands for isolated_tmp
-ISOLATED_RUN_DIR = u'ir'
-ISOLATED_OUT_DIR = u'io'
-ISOLATED_TMP_DIR = u'it'
-
-
-def get_as_zip_package(executable=True):
-  """Returns ZipPackage with this module and all its dependencies.
-
-  If |executable| is True will store run_isolated.py as __main__.py so that
-  zip package is directly executable be python.
-  """
-  # Building a zip package when running from another zip package is
-  # unsupported and probably unneeded.
-  assert not zip_package.is_zipped_module(sys.modules[__name__])
-  assert THIS_FILE_PATH
-  assert BASE_DIR
-  package = zip_package.ZipPackage(root=BASE_DIR)
-  package.add_python_file(THIS_FILE_PATH, '__main__.py' if executable else None)
-  package.add_python_file(os.path.join(BASE_DIR, 'isolated_format.py'))
-  package.add_python_file(os.path.join(BASE_DIR, 'isolateserver.py'))
-  package.add_python_file(os.path.join(BASE_DIR, 'auth.py'))
-  package.add_python_file(os.path.join(BASE_DIR, 'cipd.py'))
-  package.add_directory(os.path.join(BASE_DIR, 'libs'))
-  package.add_directory(os.path.join(BASE_DIR, 'third_party'))
-  package.add_directory(os.path.join(BASE_DIR, 'utils'))
-  return package
-
-
-def make_temp_dir(prefix, root_dir):
-  """Returns a new unique temporary directory."""
-  return unicode(tempfile.mkdtemp(prefix=prefix, dir=root_dir))
-
-
-def change_tree_read_only(rootdir, read_only):
-  """Changes the tree read-only bits according to the read_only specification.
-
-  The flag can be 0, 1 or 2, which will affect the possibility to modify files
-  and create or delete files.
-  """
-  if read_only == 2:
-    # Files and directories (except on Windows) are marked read only. This
-    # inhibits modifying, creating or deleting files in the test directory,
-    # except on Windows where creating and deleting files is still possible.
-    file_path.make_tree_read_only(rootdir)
-  elif read_only == 1:
-    # Files are marked read only but not the directories. This inhibits
-    # modifying files but creating or deleting files is still possible.
-    file_path.make_tree_files_read_only(rootdir)
-  elif read_only in (0, None):
-    # Anything can be modified.
-    # TODO(maruel): This is currently dangerous as long as DiskCache.touch()
-    # is not yet changed to verify the hash of the content of the files it is
-    # looking at, so that if a test modifies an input file, the file must be
-    # deleted.
-    file_path.make_tree_writeable(rootdir)
-  else:
-    raise ValueError(
-        'change_tree_read_only(%s, %s): Unknown flag %s' %
-        (rootdir, read_only, read_only))
-
-
-def process_command(command, out_dir, bot_file):
-  """Replaces variables in a command line.
-
-  Raises:
-    ValueError if a parameter is requested in |command| but its value is not
-      provided.
-  """
-  def fix(arg):
-    arg = arg.replace(EXECUTABLE_SUFFIX_PARAMETER, cipd.EXECUTABLE_SUFFIX)
-    replace_slash = False
-    if ISOLATED_OUTDIR_PARAMETER in arg:
-      if not out_dir:
-        raise ValueError(
-            'output directory is requested in command, but not provided; '
-            'please specify one')
-      arg = arg.replace(ISOLATED_OUTDIR_PARAMETER, out_dir)
-      replace_slash = True
-    if SWARMING_BOT_FILE_PARAMETER in arg:
-      if bot_file:
-        arg = arg.replace(SWARMING_BOT_FILE_PARAMETER, bot_file)
-        replace_slash = True
-      else:
-        logging.warning('SWARMING_BOT_FILE_PARAMETER found in command, but no '
-                        'bot_file specified. Leaving parameter unchanged.')
-    if replace_slash:
-      # Replace slashes only if parameters are present
-      # because of arguments like '${ISOLATED_OUTDIR}/foo/bar'
-      arg = arg.replace('/', os.sep)
-    return arg
-
-  return [fix(arg) for arg in command]
-
-
-def run_command(command, cwd, tmp_dir, hard_timeout, grace_period):
-  """Runs the command.
-
-  Returns:
-    tuple(process exit code, bool if had a hard timeout)
-  """
-  logging.info('run_command(%s, %s)' % (command, cwd))
-
-  env = os.environ.copy()
-  if sys.platform == 'darwin':
-    env['TMPDIR'] = tmp_dir.encode(sys.getfilesystemencoding())
-  elif sys.platform == 'win32':
-    env['TEMP'] = tmp_dir.encode(sys.getfilesystemencoding())
-  else:
-    env['TMP'] = tmp_dir.encode(sys.getfilesystemencoding())
-  exit_code = None
-  had_hard_timeout = False
-  with tools.Profiler('RunTest'):
-    proc = None
-    had_signal = []
-    try:
-      # TODO(maruel): This code is imperfect. It doesn't handle well signals
-      # during the download phase and there's short windows were things can go
-      # wrong.
-      def handler(signum, _frame):
-        if proc and not had_signal:
-          logging.info('Received signal %d', signum)
-          had_signal.append(True)
-          raise subprocess42.TimeoutExpired(command, None)
-
-      proc = subprocess42.Popen(command, cwd=cwd, env=env, detached=True)
-      with subprocess42.set_signal_handler(subprocess42.STOP_SIGNALS, handler):
-        try:
-          exit_code = proc.wait(hard_timeout or None)
-        except subprocess42.TimeoutExpired:
-          if not had_signal:
-            logging.warning('Hard timeout')
-            had_hard_timeout = True
-          logging.warning('Sending SIGTERM')
-          proc.terminate()
-
-      # Ignore signals in grace period. Forcibly give the grace period to the
-      # child process.
-      if exit_code is None:
-        ignore = lambda *_: None
-        with subprocess42.set_signal_handler(subprocess42.STOP_SIGNALS, ignore):
-          try:
-            exit_code = proc.wait(grace_period or None)
-          except subprocess42.TimeoutExpired:
-            # Now kill for real. The user can distinguish between the
-            # following states:
-            # - signal but process exited within grace period,
-            #   hard_timed_out will be set but the process exit code will be
-            #   script provided.
-            # - processed exited late, exit code will be -9 on posix.
-            logging.warning('Grace exhausted; sending SIGKILL')
-            proc.kill()
-      logging.info('Waiting for proces exit')
-      exit_code = proc.wait()
-    except OSError:
-      # This is not considered to be an internal error. The executable simply
-      # does not exit.
-      sys.stderr.write(
-          '<The executable does not exist or a dependent library is missing>\n'
-          '<Check for missing .so/.dll in the .isolate or GN file>\n'
-          '<Command: %s>\n' % command)
-      if os.environ.get('SWARMING_TASK_ID'):
-        # Give an additional hint when running as a swarming task.
-        sys.stderr.write(
-            '<See the task\'s page for commands to help diagnose this issue '
-            'by reproducing the task locally>\n')
-      exit_code = 1
-  logging.info(
-      'Command finished with exit code %d (%s)',
-      exit_code, hex(0xffffffff & exit_code))
-  return exit_code, had_hard_timeout
-
-
-def fetch_and_map(isolated_hash, storage, cache, outdir, use_symlinks):
-  """Fetches an isolated tree, create the tree and returns (bundle, stats)."""
-  start = time.time()
-  bundle = isolateserver.fetch_isolated(
-      isolated_hash=isolated_hash,
-      storage=storage,
-      cache=cache,
-      outdir=outdir,
-      use_symlinks=use_symlinks)
-  return bundle, {
-    'duration': time.time() - start,
-    'initial_number_items': cache.initial_number_items,
-    'initial_size': cache.initial_size,
-    'items_cold': base64.b64encode(large.pack(sorted(cache.added))),
-    'items_hot': base64.b64encode(
-        large.pack(sorted(set(cache.used) - set(cache.added)))),
-  }
-
-
-def delete_and_upload(storage, out_dir, leak_temp_dir):
-  """Deletes the temporary run directory and uploads results back.
-
-  Returns:
-    tuple(outputs_ref, success, stats)
-    - outputs_ref: a dict referring to the results archived back to the isolated
-          server, if applicable.
-    - success: False if something occurred that means that the task must
-          forcibly be considered a failure, e.g. zombie processes were left
-          behind.
-    - stats: uploading stats.
-  """
-
-  # Upload out_dir and generate a .isolated file out of this directory. It is
-  # only done if files were written in the directory.
-  outputs_ref = None
-  cold = []
-  hot = []
-  start = time.time()
-
-  if fs.isdir(out_dir) and fs.listdir(out_dir):
-    with tools.Profiler('ArchiveOutput'):
-      try:
-        results, f_cold, f_hot = isolateserver.archive_files_to_storage(
-            storage, [out_dir], None)
-        outputs_ref = {
-          'isolated': results[0][0],
-          'isolatedserver': storage.location,
-          'namespace': storage.namespace,
-        }
-        cold = sorted(i.size for i in f_cold)
-        hot = sorted(i.size for i in f_hot)
-      except isolateserver.Aborted:
-        # This happens when a signal SIGTERM was received while uploading data.
-        # There is 2 causes:
-        # - The task was too slow and was about to be killed anyway due to
-        #   exceeding the hard timeout.
-        # - The amount of data uploaded back is very large and took too much
-        #   time to archive.
-        sys.stderr.write('Received SIGTERM while uploading')
-        # Re-raise, so it will be treated as an internal failure.
-        raise
-
-  success = False
-  try:
-    if (not leak_temp_dir and fs.isdir(out_dir) and
-        not file_path.rmtree(out_dir)):
-      logging.error('Had difficulties removing out_dir %s', out_dir)
-    else:
-      success = True
-  except OSError as e:
-    # When this happens, it means there's a process error.
-    logging.exception('Had difficulties removing out_dir %s: %s', out_dir, e)
-  stats = {
-    'duration': time.time() - start,
-    'items_cold': base64.b64encode(large.pack(cold)),
-    'items_hot': base64.b64encode(large.pack(hot)),
-  }
-  return outputs_ref, success, stats
-
-
-def map_and_run(
-    command, isolated_hash, storage, cache, leak_temp_dir, root_dir,
-    hard_timeout, grace_period, bot_file, extra_args, install_packages_fn,
-    use_symlinks):
-  """Runs a command with optional isolated input/output.
-
-  See run_tha_test for argument documentation.
-
-  Returns metadata about the result.
-  """
-  assert bool(command) ^ bool(isolated_hash)
-  result = {
-    'duration': None,
-    'exit_code': None,
-    'had_hard_timeout': False,
-    'internal_failure': None,
-    'stats': {
-    # 'isolated': {
-    #    'cipd': {
-    #      'duration': 0.,
-    #      'get_client_duration': 0.,
-    #    },
-    #    'download': {
-    #      'duration': 0.,
-    #      'initial_number_items': 0,
-    #      'initial_size': 0,
-    #      'items_cold': '<large.pack()>',
-    #      'items_hot': '<large.pack()>',
-    #    },
-    #    'upload': {
-    #      'duration': 0.,
-    #      'items_cold': '<large.pack()>',
-    #      'items_hot': '<large.pack()>',
-    #    },
-    #  },
-    },
-    # 'cipd_pins': {
-    #   'packages': [
-    #     {'package_name': ..., 'version': ..., 'path': ...},
-    #     ...
-    #   ],
-    #  'client_package': {'package_name': ..., 'version': ...},
-    # },
-    'outputs_ref': None,
-    'version': 5,
-  }
-
-  if root_dir:
-    file_path.ensure_tree(root_dir, 0700)
-  else:
-    root_dir = os.path.dirname(cache.cache_dir) if cache.cache_dir else None
-  # See comment for these constants.
-  run_dir = make_temp_dir(ISOLATED_RUN_DIR, root_dir)
-  # storage should be normally set but don't crash if it is not. This can happen
-  # as Swarming task can run without an isolate server.
-  out_dir = make_temp_dir(ISOLATED_OUT_DIR, root_dir) if storage else None
-  tmp_dir = make_temp_dir(ISOLATED_TMP_DIR, root_dir)
-  cwd = run_dir
-
-  try:
-    cipd_info = install_packages_fn(run_dir)
-    if cipd_info:
-      result['stats']['cipd'] = cipd_info['stats']
-      result['cipd_pins'] = cipd_info['cipd_pins']
-
-    if isolated_hash:
-      isolated_stats = result['stats'].setdefault('isolated', {})
-      bundle, isolated_stats['download'] = fetch_and_map(
-          isolated_hash=isolated_hash,
-          storage=storage,
-          cache=cache,
-          outdir=run_dir,
-          use_symlinks=use_symlinks)
-      if not bundle.command:
-        # Handle this as a task failure, not an internal failure.
-        sys.stderr.write(
-            '<The .isolated doesn\'t declare any command to run!>\n'
-            '<Check your .isolate for missing \'command\' variable>\n')
-        if os.environ.get('SWARMING_TASK_ID'):
-          # Give an additional hint when running as a swarming task.
-          sys.stderr.write('<This occurs at the \'isolate\' step>\n')
-        result['exit_code'] = 1
-        return result
-
-      change_tree_read_only(run_dir, bundle.read_only)
-      cwd = os.path.normpath(os.path.join(cwd, bundle.relative_cwd))
-      command = bundle.command + extra_args
-
-    command = tools.fix_python_path(command)
-    command = process_command(command, out_dir, bot_file)
-    file_path.ensure_command_has_abs_path(command, cwd)
-
-    sys.stdout.flush()
-    start = time.time()
-    try:
-      result['exit_code'], result['had_hard_timeout'] = run_command(
-          command, cwd, tmp_dir, hard_timeout, grace_period)
-    finally:
-      result['duration'] = max(time.time() - start, 0)
-  except Exception as e:
-    # An internal error occurred. Report accordingly so the swarming task will
-    # be retried automatically.
-    logging.exception('internal failure: %s', e)
-    result['internal_failure'] = str(e)
-    on_error.report(None)
-  finally:
-    try:
-      if leak_temp_dir:
-        logging.warning(
-            'Deliberately leaking %s for later examination', run_dir)
-      else:
-        # On Windows rmtree(run_dir) call above has a synchronization effect: it
-        # finishes only when all task child processes terminate (since a running
-        # process locks *.exe file). Examine out_dir only after that call
-        # completes (since child processes may write to out_dir too and we need
-        # to wait for them to finish).
-        if fs.isdir(run_dir):
-          try:
-            success = file_path.rmtree(run_dir)
-          except OSError as e:
-            logging.error('Failure with %s', e)
-            success = False
-          if not success:
-            print >> sys.stderr, (
-                'Failed to delete the run directory, forcibly failing\n'
-                'the task because of it. No zombie process can outlive a\n'
-                'successful task run and still be marked as successful.\n'
-                'Fix your stuff.')
-            if result['exit_code'] == 0:
-              result['exit_code'] = 1
-        if fs.isdir(tmp_dir):
-          try:
-            success = file_path.rmtree(tmp_dir)
-          except OSError as e:
-            logging.error('Failure with %s', e)
-            success = False
-          if not success:
-            print >> sys.stderr, (
-                'Failed to delete the temporary directory, forcibly failing\n'
-                'the task because of it. No zombie process can outlive a\n'
-                'successful task run and still be marked as successful.\n'
-                'Fix your stuff.')
-            if result['exit_code'] == 0:
-              result['exit_code'] = 1
-
-      # This deletes out_dir if leak_temp_dir is not set.
-      if out_dir:
-        isolated_stats = result['stats'].setdefault('isolated', {})
-        result['outputs_ref'], success, isolated_stats['upload'] = (
-            delete_and_upload(storage, out_dir, leak_temp_dir))
-      if not success and result['exit_code'] == 0:
-        result['exit_code'] = 1
-    except Exception as e:
-      # Swallow any exception in the main finally clause.
-      if out_dir:
-        logging.exception('Leaking out_dir %s: %s', out_dir, e)
-      result['internal_failure'] = str(e)
-  return result
-
-
-def run_tha_test(
-    command, isolated_hash, storage, cache, leak_temp_dir, result_json,
-    root_dir, hard_timeout, grace_period, bot_file, extra_args,
-    install_packages_fn, use_symlinks):
-  """Runs an executable and records execution metadata.
-
-  Either command or isolated_hash must be specified.
-
-  If isolated_hash is specified, downloads the dependencies in the cache,
-  hardlinks them into a temporary directory and runs the command specified in
-  the .isolated.
-
-  A temporary directory is created to hold the output files. The content inside
-  this directory will be uploaded back to |storage| packaged as a .isolated
-  file.
-
-  Arguments:
-    command: the command to run, a list of strings. Mutually exclusive with
-             isolated_hash.
-    isolated_hash: the SHA-1 of the .isolated file that must be retrieved to
-                   recreate the tree of files to run the target executable.
-                   The command specified in the .isolated is executed.
-                   Mutually exclusive with command argument.
-    storage: an isolateserver.Storage object to retrieve remote objects. This
-             object has a reference to an isolateserver.StorageApi, which does
-             the actual I/O.
-    cache: an isolateserver.LocalCache to keep from retrieving the same objects
-           constantly by caching the objects retrieved. Can be on-disk or
-           in-memory.
-    leak_temp_dir: if true, the temporary directory will be deliberately leaked
-                   for later examination.
-    result_json: file path to dump result metadata into. If set, the process
-                 exit code is always 0 unless an internal error occurred.
-    root_dir: path to the directory to use to create the temporary directory. If
-              not specified, a random temporary directory is created.
-    hard_timeout: kills the process if it lasts more than this amount of
-                  seconds.
-    grace_period: number of seconds to wait between SIGTERM and SIGKILL.
-    extra_args: optional arguments to add to the command stated in the .isolate
-                file. Ignored if isolate_hash is empty.
-    install_packages_fn: function (dir) => {"stats": cipd_stats, "pins":
-                         cipd_pins}. Installs packages.
-    use_symlinks: create tree with symlinks instead of hardlinks.
-
-  Returns:
-    Process exit code that should be used.
-  """
-  assert bool(command) ^ bool(isolated_hash)
-  extra_args = extra_args or []
-
-  if any(ISOLATED_OUTDIR_PARAMETER in a for a in (command or extra_args)):
-    assert storage is not None, 'storage is None although outdir is specified'
-
-  if result_json:
-    # Write a json output file right away in case we get killed.
-    result = {
-      'exit_code': None,
-      'had_hard_timeout': False,
-      'internal_failure': 'Was terminated before completion',
-      'outputs_ref': None,
-      'version': 5,
-    }
-    tools.write_json(result_json, result, dense=True)
-
-  # run_isolated exit code. Depends on if result_json is used or not.
-  result = map_and_run(
-      command, isolated_hash, storage, cache, leak_temp_dir, root_dir,
-      hard_timeout, grace_period, bot_file, extra_args, install_packages_fn,
-      use_symlinks)
-  logging.info('Result:\n%s', tools.format_json(result, dense=True))
-
-  if result_json:
-    # We've found tests to delete 'work' when quitting, causing an exception
-    # here. Try to recreate the directory if necessary.
-    file_path.ensure_tree(os.path.dirname(result_json))
-    tools.write_json(result_json, result, dense=True)
-    # Only return 1 if there was an internal error.
-    return int(bool(result['internal_failure']))
-
-  # Marshall into old-style inline output.
-  if result['outputs_ref']:
-    data = {
-      'hash': result['outputs_ref']['isolated'],
-      'namespace': result['outputs_ref']['namespace'],
-      'storage': result['outputs_ref']['isolatedserver'],
-    }
-    sys.stdout.flush()
-    print(
-        '[run_isolated_out_hack]%s[/run_isolated_out_hack]' %
-        tools.format_json(data, dense=True))
-    sys.stdout.flush()
-  return result['exit_code'] or int(bool(result['internal_failure']))
-
-
-def install_packages(
-    run_dir, packages, service_url, client_package_name,
-    client_version, cache_dir=None, timeout=None):
-  """Installs packages. Returns stats, cipd client info and pins.
-
-  pins and the cipd client info are in the form of:
-    [
-      {
-        "path": path, "package_name": package_name, "version": version,
-      },
-      ...
-    ]
-  (the cipd client info is a single dictionary instead of a list)
-
-  such that they correspond 1:1 to all input package arguments from the command
-  line. These dictionaries make their all the way back to swarming, where they
-  become the arguments of CipdPackage.
-
-  Args:
-    run_dir (str): root of installation.
-    packages: packages to install, list [(path, package_name, version), ...]
-    service_url (str): CIPD server url, e.g.
-      "https://chrome-infra-packages.appspot.com."
-    client_package_name (str): CIPD package name of CIPD client.
-    client_version (str): Version of CIPD client.
-    cache_dir (str): where to keep cache of cipd clients, packages and tags.
-    timeout: max duration in seconds that this function can take.
-  """
-  assert cache_dir
-  if not packages:
-    return None
-
-  timeoutfn = tools.sliding_timeout(timeout)
-  start = time.time()
-  cache_dir = os.path.abspath(cache_dir)
-
-  run_dir = os.path.abspath(run_dir)
-
-  package_pins = [None]*len(packages)
-  def insert_pin(path, name, version, idx):
-    path = path.replace(os.path.sep, '/')
-    package_pins[idx] = {
-      'package_name': name,
-      'path': path,
-      'version': version,
-    }
-
-  get_client_start = time.time()
-  client_manager = cipd.get_client(
-      service_url, client_package_name, client_version, cache_dir,
-      timeout=timeoutfn())
-
-  by_path = collections.defaultdict(list)
-  for i, (path, name, version) in enumerate(packages):
-    path = path.replace('/', os.path.sep)
-    by_path[path].append((name, version, i))
-
-  with client_manager as client:
-    client_package = {
-      'package_name': client.package_name,
-      'version': client.instance_id,
-    }
-    get_client_duration = time.time() - get_client_start
-    for path, pkgs in sorted(by_path.iteritems()):
-      site_root = os.path.abspath(os.path.join(run_dir, path))
-      if not site_root.startswith(run_dir):
-        raise cipd.Error('Invalid CIPD package path "%s"' % path)
-
-      # Do not clean site_root before installation because it may contain other
-      # site roots.
-      file_path.ensure_tree(site_root, 0770)
-      pins = client.ensure(
-          site_root, [(name, vers) for name, vers, _ in pkgs],
-          cache_dir=os.path.join(cache_dir, 'cipd_internal'),
-          timeout=timeoutfn())
-      for i, pin in enumerate(pins):
-        insert_pin(path, pin[0], pin[1], pkgs[i][2])
-      file_path.make_tree_files_read_only(site_root)
-
-  total_duration = time.time() - start
-  logging.info(
-      'Installing CIPD client and packages took %d seconds', total_duration)
-
-  assert None not in package_pins
-
-  return {
-    'stats': {
-      'duration': total_duration,
-      'get_client_duration': get_client_duration,
-    },
-    'cipd_pins': {
-      'client_package': client_package,
-      'packages': package_pins,
-    }
-  }
-
-
-def create_option_parser():
-  parser = logging_utils.OptionParserWithLogging(
-      usage='%prog <options> [command to run or extra args]',
-      version=__version__,
-      log_file=RUN_ISOLATED_LOG_FILE)
-  parser.add_option(
-      '--clean', action='store_true',
-      help='Cleans the cache, trimming it necessary and remove corrupted items '
-           'and returns without executing anything; use with -v to know what '
-           'was done')
-  parser.add_option(
-      '--no-clean', action='store_true',
-      help='Do not clean the cache automatically on startup. This is meant for '
-           'bots where a separate execution with --clean was done earlier so '
-           'doing it again is redundant')
-  parser.add_option(
-      '--use-symlinks', action='store_true',
-      help='Use symlinks instead of hardlinks')
-  parser.add_option(
-      '--json',
-      help='dump output metadata to json file. When used, run_isolated returns '
-           'non-zero only on internal failure')
-  parser.add_option(
-      '--hard-timeout', type='float', help='Enforce hard timeout in execution')
-  parser.add_option(
-      '--grace-period', type='float',
-      help='Grace period between SIGTERM and SIGKILL')
-  parser.add_option(
-      '--bot-file',
-      help='Path to a file describing the state of the host. The content is '
-           'defined by on_before_task() in bot_config.')
-  data_group = optparse.OptionGroup(parser, 'Data source')
-  data_group.add_option(
-      '-s', '--isolated',
-      help='Hash of the .isolated to grab from the isolate server.')
-  isolateserver.add_isolate_server_options(data_group)
-  parser.add_option_group(data_group)
-
-  isolateserver.add_cache_options(parser)
-
-  cipd.add_cipd_options(parser)
-
-  debug_group = optparse.OptionGroup(parser, 'Debugging')
-  debug_group.add_option(
-      '--leak-temp-dir',
-      action='store_true',
-      help='Deliberately leak isolate\'s temp dir for later examination. '
-           'Default: %default')
-  debug_group.add_option(
-      '--root-dir', help='Use a directory instead of a random one')
-  parser.add_option_group(debug_group)
-
-  auth.add_auth_options(parser)
-
-  parser.set_defaults(cache='cache', cipd_cache='cipd_cache')
-  return parser
-
-
-def main(args):
-  parser = create_option_parser()
-  options, args = parser.parse_args(args)
-
-  cache = isolateserver.process_cache_options(options)
-  if options.clean:
-    if options.isolated:
-      parser.error('Can\'t use --isolated with --clean.')
-    if options.isolate_server:
-      parser.error('Can\'t use --isolate-server with --clean.')
-    if options.json:
-      parser.error('Can\'t use --json with --clean.')
-    cache.cleanup()
-    return 0
-  if not options.no_clean:
-    cache.cleanup()
-
-  if not options.isolated and not args:
-    parser.error('--isolated or command to run is required.')
-
-  auth.process_auth_options(parser, options)
-
-  isolateserver.process_isolate_server_options(
-    parser, options, True, False)
-  if not options.isolate_server:
-    if options.isolated:
-      parser.error('--isolated requires --isolate-server')
-    if ISOLATED_OUTDIR_PARAMETER in args:
-      parser.error(
-        '%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER)
-
-  if options.root_dir:
-    options.root_dir = unicode(os.path.abspath(options.root_dir))
-  if options.json:
-    options.json = unicode(os.path.abspath(options.json))
-
-  cipd.validate_cipd_options(parser, options)
-
-  install_packages_fn = lambda run_dir: install_packages(
-      run_dir, cipd.parse_package_args(options.cipd_packages),
-      options.cipd_server, options.cipd_client_package,
-      options.cipd_client_version, cache_dir=options.cipd_cache)
-
-  try:
-    command = [] if options.isolated else args
-    if options.isolate_server:
-      storage = isolateserver.get_storage(
-          options.isolate_server, options.namespace)
-      with storage:
-        # Hashing schemes used by |storage| and |cache| MUST match.
-        assert storage.hash_algo == cache.hash_algo
-        return run_tha_test(
-            command, options.isolated, storage, cache, options.leak_temp_dir,
-            options.json, options.root_dir, options.hard_timeout,
-            options.grace_period, options.bot_file, args, install_packages_fn,
-            options.use_symlinks)
-    return run_tha_test(
-        command, options.isolated, None, cache, options.leak_temp_dir,
-        options.json, options.root_dir, options.hard_timeout,
-        options.grace_period, options.bot_file, args, install_packages_fn,
-        options.use_symlinks)
-  except cipd.Error as ex:
-    print >> sys.stderr, ex.message
-    return 1
-
-
-if __name__ == '__main__':
-  subprocess42.inhibit_os_error_reporting()
-  # Ensure that we are always running with the correct encoding.
-  fix_encoding.fix_encoding()
-  file_path.enable_symlink()
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/swarming_client/swarming.py b/tools/swarming_client/swarming.py
deleted file mode 100755
index a2987a1..0000000
--- a/tools/swarming_client/swarming.py
+++ /dev/null
@@ -1,1606 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Client tool to trigger tasks or retrieve results from a Swarming server."""
-
-__version__ = '0.8.6'
-
-import collections
-import datetime
-import json
-import logging
-import optparse
-import os
-import subprocess
-import sys
-import tempfile
-import threading
-import time
-import urllib
-
-from third_party import colorama
-from third_party.depot_tools import fix_encoding
-from third_party.depot_tools import subcommand
-
-from utils import file_path
-from utils import fs
-from utils import logging_utils
-from third_party.chromium import natsort
-from utils import net
-from utils import on_error
-from utils import subprocess42
-from utils import threading_utils
-from utils import tools
-
-import auth
-import isolated_format
-import isolateserver
-import run_isolated
-
-
-ROOT_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-
-
-class Failure(Exception):
-  """Generic failure."""
-  pass
-
-
-### Isolated file handling.
-
-
-def isolated_to_hash(arg, algo):
-  """Archives a .isolated file if needed.
-
-  Returns the file hash to trigger and a bool specifying if it was a file (True)
-  or a hash (False).
-  """
-  if arg.endswith('.isolated'):
-    arg = unicode(os.path.abspath(arg))
-    file_hash = isolated_format.hash_file(arg, algo)
-    if not file_hash:
-      on_error.report('Archival failure %s' % arg)
-      return None, True
-    return file_hash, True
-  elif isolated_format.is_valid_hash(arg, algo):
-    return arg, False
-  else:
-    on_error.report('Invalid hash %s' % arg)
-    return None, False
-
-
-def isolated_handle_options(options, args):
-  """Handles '--isolated <isolated>', '<isolated>' and '-- <args...>' arguments.
-
-  Returns:
-    tuple(command, inputs_ref).
-  """
-  isolated_cmd_args = []
-  is_file = False
-  if not options.isolated:
-    if '--' in args:
-      index = args.index('--')
-      isolated_cmd_args = args[index+1:]
-      args = args[:index]
-    else:
-      # optparse eats '--' sometimes.
-      isolated_cmd_args = args[1:]
-      args = args[:1]
-    if len(args) != 1:
-      raise ValueError(
-          'Use --isolated, --raw-cmd or \'--\' to pass arguments to the called '
-          'process.')
-    # Old code. To be removed eventually.
-    options.isolated, is_file = isolated_to_hash(
-        args[0], isolated_format.get_hash_algo(options.namespace))
-    if not options.isolated:
-      raise ValueError('Invalid argument %s' % args[0])
-  elif args:
-    if '--' in args:
-      index = args.index('--')
-      isolated_cmd_args = args[index+1:]
-      if index != 0:
-        raise ValueError('Unexpected arguments.')
-    else:
-      # optparse eats '--' sometimes.
-      isolated_cmd_args = args
-
-  # If a file name was passed, use its base name of the isolated hash.
-  # Otherwise, use user name as an approximation of a task name.
-  if not options.task_name:
-    if is_file:
-      key = os.path.splitext(os.path.basename(args[0]))[0]
-    else:
-      key = options.user
-    options.task_name = u'%s/%s/%s' % (
-        key,
-        '_'.join(
-            '%s=%s' % (k, v)
-            for k, v in sorted(options.dimensions.iteritems())),
-        options.isolated)
-
-  inputs_ref = FilesRef(
-      isolated=options.isolated,
-      isolatedserver=options.isolate_server,
-      namespace=options.namespace)
-  return isolated_cmd_args, inputs_ref
-
-
-### Triggering.
-
-
-# See ../appengine/swarming/swarming_rpcs.py.
-CipdPackage = collections.namedtuple(
-    'CipdPackage',
-    [
-      'package_name',
-      'path',
-      'version',
-    ])
-
-
-# See ../appengine/swarming/swarming_rpcs.py.
-CipdInput = collections.namedtuple(
-    'CipdInput',
-    [
-      'client_package',
-      'packages',
-      'server',
-    ])
-
-
-# See ../appengine/swarming/swarming_rpcs.py.
-FilesRef = collections.namedtuple(
-    'FilesRef',
-    [
-      'isolated',
-      'isolatedserver',
-      'namespace',
-    ])
-
-
-# See ../appengine/swarming/swarming_rpcs.py.
-TaskProperties = collections.namedtuple(
-    'TaskProperties',
-    [
-      'cipd_input',
-      'command',
-      'dimensions',
-      'env',
-      'execution_timeout_secs',
-      'extra_args',
-      'grace_period_secs',
-      'idempotent',
-      'inputs_ref',
-      'io_timeout_secs',
-    ])
-
-
-# See ../appengine/swarming/swarming_rpcs.py.
-NewTaskRequest = collections.namedtuple(
-    'NewTaskRequest',
-    [
-      'expiration_secs',
-      'name',
-      'parent_task_id',
-      'priority',
-      'properties',
-      'tags',
-      'user',
-    ])
-
-
-def namedtuple_to_dict(value):
-  """Recursively converts a namedtuple to a dict."""
-  out = dict(value._asdict())
-  for k, v in out.iteritems():
-    if hasattr(v, '_asdict'):
-      out[k] = namedtuple_to_dict(v)
-    elif isinstance(v, (list, tuple)):
-      l = []
-      for elem in v:
-        if hasattr(elem, '_asdict'):
-          l.append(namedtuple_to_dict(elem))
-        else:
-          l.append(elem)
-      out[k] = l
-  return out
-
-
-def task_request_to_raw_request(task_request):
-  """Returns the json-compatible dict expected by the server for new request.
-
-  This is for the v1 client Swarming API.
-  """
-  out = namedtuple_to_dict(task_request)
-  # Maps are not supported until protobuf v3.
-  out['properties']['dimensions'] = [
-    {'key': k, 'value': v}
-    for k, v in out['properties']['dimensions'].iteritems()
-  ]
-  out['properties']['dimensions'].sort(key=lambda x: x['key'])
-  out['properties']['env'] = [
-    {'key': k, 'value': v}
-    for k, v in out['properties']['env'].iteritems()
-  ]
-  out['properties']['env'].sort(key=lambda x: x['key'])
-  return out
-
-
-def swarming_trigger(swarming, raw_request):
-  """Triggers a request on the Swarming server and returns the json data.
-
-  It's the low-level function.
-
-  Returns:
-    {
-      'request': {
-        'created_ts': u'2010-01-02 03:04:05',
-        'name': ..
-      },
-      'task_id': '12300',
-    }
-  """
-  logging.info('Triggering: %s', raw_request['name'])
-
-  result = net.url_read_json(
-      swarming + '/api/swarming/v1/tasks/new', data=raw_request)
-  if not result:
-    on_error.report('Failed to trigger task %s' % raw_request['name'])
-    return None
-  if result.get('error'):
-    # The reply is an error.
-    msg = 'Failed to trigger task %s' % raw_request['name']
-    if result['error'].get('errors'):
-      for err in result['error']['errors']:
-        if err.get('message'):
-          msg += '\nMessage: %s' % err['message']
-        if err.get('debugInfo'):
-          msg += '\nDebug info:\n%s' % err['debugInfo']
-    elif result['error'].get('message'):
-      msg += '\nMessage: %s' % result['error']['message']
-
-    on_error.report(msg)
-    return None
-  return result
-
-
-def setup_googletest(env, shards, index):
-  """Sets googletest specific environment variables."""
-  if shards > 1:
-    assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
-    assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
-    env = env[:]
-    env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
-    env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
-  return env
-
-
-def trigger_task_shards(swarming, task_request, shards):
-  """Triggers one or many subtasks of a sharded task.
-
-  Returns:
-    Dict with task details, returned to caller as part of --dump-json output.
-    None in case of failure.
-  """
-  def convert(index):
-    req = task_request_to_raw_request(task_request)
-    if shards > 1:
-      req['properties']['env'] = setup_googletest(
-          req['properties']['env'], shards, index)
-      req['name'] += ':%s:%s' % (index, shards)
-    return req
-
-  requests = [convert(index) for index in xrange(shards)]
-  tasks = {}
-  priority_warning = False
-  for index, request in enumerate(requests):
-    task = swarming_trigger(swarming, request)
-    if not task:
-      break
-    logging.info('Request result: %s', task)
-    if (not priority_warning and
-        task['request']['priority'] != task_request.priority):
-      priority_warning = True
-      print >> sys.stderr, (
-          'Priority was reset to %s' % task['request']['priority'])
-    tasks[request['name']] = {
-      'shard_index': index,
-      'task_id': task['task_id'],
-      'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
-    }
-
-  # Some shards weren't triggered. Abort everything.
-  if len(tasks) != len(requests):
-    if tasks:
-      print >> sys.stderr, 'Only %d shard(s) out of %d were triggered' % (
-          len(tasks), len(requests))
-      for task_dict in tasks.itervalues():
-        abort_task(swarming, task_dict['task_id'])
-    return None
-
-  return tasks
-
-
-### Collection.
-
-
-# How often to print status updates to stdout in 'collect'.
-STATUS_UPDATE_INTERVAL = 15 * 60.
-
-
-class State(object):
-  """States in which a task can be.
-
-  WARNING: Copy-pasted from appengine/swarming/server/task_result.py. These
-  values are part of the API so if they change, the API changed.
-
-  It's in fact an enum. Values should be in decreasing order of importance.
-  """
-  RUNNING = 0x10
-  PENDING = 0x20
-  EXPIRED = 0x30
-  TIMED_OUT = 0x40
-  BOT_DIED = 0x50
-  CANCELED = 0x60
-  COMPLETED = 0x70
-
-  STATES = (
-      'RUNNING', 'PENDING', 'EXPIRED', 'TIMED_OUT', 'BOT_DIED', 'CANCELED',
-      'COMPLETED')
-  STATES_RUNNING = ('RUNNING', 'PENDING')
-  STATES_NOT_RUNNING = (
-      'EXPIRED', 'TIMED_OUT', 'BOT_DIED', 'CANCELED', 'COMPLETED')
-  STATES_DONE = ('TIMED_OUT', 'COMPLETED')
-  STATES_ABANDONED = ('EXPIRED', 'BOT_DIED', 'CANCELED')
-
-  _NAMES = {
-    RUNNING: 'Running',
-    PENDING: 'Pending',
-    EXPIRED: 'Expired',
-    TIMED_OUT: 'Execution timed out',
-    BOT_DIED: 'Bot died',
-    CANCELED: 'User canceled',
-    COMPLETED: 'Completed',
-  }
-
-  _ENUMS = {
-    'RUNNING': RUNNING,
-    'PENDING': PENDING,
-    'EXPIRED': EXPIRED,
-    'TIMED_OUT': TIMED_OUT,
-    'BOT_DIED': BOT_DIED,
-    'CANCELED': CANCELED,
-    'COMPLETED': COMPLETED,
-  }
-
-  @classmethod
-  def to_string(cls, state):
-    """Returns a user-readable string representing a State."""
-    if state not in cls._NAMES:
-      raise ValueError('Invalid state %s' % state)
-    return cls._NAMES[state]
-
-  @classmethod
-  def from_enum(cls, state):
-    """Returns int value based on the string."""
-    if state not in cls._ENUMS:
-      raise ValueError('Invalid state %s' % state)
-    return cls._ENUMS[state]
-
-
-class TaskOutputCollector(object):
-  """Assembles task execution summary (for --task-summary-json output).
-
-  Optionally fetches task outputs from isolate server to local disk (used when
-  --task-output-dir is passed).
-
-  This object is shared among multiple threads running 'retrieve_results'
-  function, in particular they call 'process_shard_result' method in parallel.
-  """
-
-  def __init__(self, task_output_dir, shard_count):
-    """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
-
-    Args:
-      task_output_dir: (optional) local directory to put fetched files to.
-      shard_count: expected number of task shards.
-    """
-    self.task_output_dir = (
-        unicode(os.path.abspath(task_output_dir))
-        if task_output_dir else task_output_dir)
-    self.shard_count = shard_count
-
-    self._lock = threading.Lock()
-    self._per_shard_results = {}
-    self._storage = None
-
-    if self.task_output_dir:
-      file_path.ensure_tree(self.task_output_dir)
-
-  def process_shard_result(self, shard_index, result):
-    """Stores results of a single task shard, fetches output files if necessary.
-
-    Modifies |result| in place.
-
-    shard_index is 0-based.
-
-    Called concurrently from multiple threads.
-    """
-    # Sanity check index is in expected range.
-    assert isinstance(shard_index, int)
-    if shard_index < 0 or shard_index >= self.shard_count:
-      logging.warning(
-          'Shard index %d is outside of expected range: [0; %d]',
-          shard_index, self.shard_count - 1)
-      return
-
-    if result.get('outputs_ref'):
-      ref = result['outputs_ref']
-      result['outputs_ref']['view_url'] = '%s/browse?%s' % (
-          ref['isolatedserver'],
-          urllib.urlencode(
-              [('namespace', ref['namespace']), ('hash', ref['isolated'])]))
-
-    # Store result dict of that shard, ignore results we've already seen.
-    with self._lock:
-      if shard_index in self._per_shard_results:
-        logging.warning('Ignoring duplicate shard index %d', shard_index)
-        return
-      self._per_shard_results[shard_index] = result
-
-    # Fetch output files if necessary.
-    if self.task_output_dir and result.get('outputs_ref'):
-      storage = self._get_storage(
-          result['outputs_ref']['isolatedserver'],
-          result['outputs_ref']['namespace'])
-      if storage:
-        # Output files are supposed to be small and they are not reused across
-        # tasks. So use MemoryCache for them instead of on-disk cache. Make
-        # files writable, so that calling script can delete them.
-        isolateserver.fetch_isolated(
-            result['outputs_ref']['isolated'],
-            storage,
-            isolateserver.MemoryCache(file_mode_mask=0700),
-            os.path.join(self.task_output_dir, str(shard_index)),
-            False)
-
-  def finalize(self):
-    """Assembles and returns task summary JSON, shutdowns underlying Storage."""
-    with self._lock:
-      # Write an array of shard results with None for missing shards.
-      summary = {
-        'shards': [
-          self._per_shard_results.get(i) for i in xrange(self.shard_count)
-        ],
-      }
-      # Write summary.json to task_output_dir as well.
-      if self.task_output_dir:
-        tools.write_json(
-            os.path.join(self.task_output_dir, u'summary.json'),
-            summary,
-            False)
-      if self._storage:
-        self._storage.close()
-        self._storage = None
-      return summary
-
-  def _get_storage(self, isolate_server, namespace):
-    """Returns isolateserver.Storage to use to fetch files."""
-    assert self.task_output_dir
-    with self._lock:
-      if not self._storage:
-        self._storage = isolateserver.get_storage(isolate_server, namespace)
-      else:
-        # Shards must all use exact same isolate server and namespace.
-        if self._storage.location != isolate_server:
-          logging.error(
-              'Task shards are using multiple isolate servers: %s and %s',
-              self._storage.location, isolate_server)
-          return None
-        if self._storage.namespace != namespace:
-          logging.error(
-              'Task shards are using multiple namespaces: %s and %s',
-              self._storage.namespace, namespace)
-          return None
-      return self._storage
-
-
-def now():
-  """Exists so it can be mocked easily."""
-  return time.time()
-
-
-def parse_time(value):
-  """Converts serialized time from the API to datetime.datetime."""
-  # When microseconds are 0, the '.123456' suffix is elided. This means the
-  # serialized format is not consistent, which confuses the hell out of python.
-  for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
-    try:
-      return datetime.datetime.strptime(value, fmt)
-    except ValueError:
-      pass
-  raise ValueError('Failed to parse %s' % value)
-
-
-def retrieve_results(
-    base_url, shard_index, task_id, timeout, should_stop, output_collector,
-    include_perf):
-  """Retrieves results for a single task ID.
-
-  Returns:
-    <result dict> on success.
-    None on failure.
-  """
-  assert timeout is None or isinstance(timeout, float), timeout
-  result_url = '%s/api/swarming/v1/task/%s/result' % (base_url, task_id)
-  if include_perf:
-    result_url += '?include_performance_stats=true'
-  output_url = '%s/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
-  started = now()
-  deadline = started + timeout if timeout else None
-  attempt = 0
-
-  while not should_stop.is_set():
-    attempt += 1
-
-    # Waiting for too long -> give up.
-    current_time = now()
-    if deadline and current_time >= deadline:
-      logging.error('retrieve_results(%s) timed out on attempt %d',
-          base_url, attempt)
-      return None
-
-    # Do not spin too fast. Spin faster at the beginning though.
-    # Start with 1 sec delay and for each 30 sec of waiting add another second
-    # of delay, until hitting 15 sec ceiling.
-    if attempt > 1:
-      max_delay = min(15, 1 + (current_time - started) / 30.0)
-      delay = min(max_delay, deadline - current_time) if deadline else max_delay
-      if delay > 0:
-        logging.debug('Waiting %.1f sec before retrying', delay)
-        should_stop.wait(delay)
-        if should_stop.is_set():
-          return None
-
-    # Disable internal retries in net.url_read_json, since we are doing retries
-    # ourselves.
-    # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
-    # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
-    # request on GAE v2.
-    result = net.url_read_json(result_url, retry_50x=False)
-    if not result:
-      continue
-
-    if result.get('error'):
-      # An error occurred.
-      if result['error'].get('errors'):
-        for err in result['error']['errors']:
-          logging.warning(
-              'Error while reading task: %s; %s',
-              err.get('message'), err.get('debugInfo'))
-      elif result['error'].get('message'):
-        logging.warning(
-            'Error while reading task: %s', result['error']['message'])
-      continue
-
-    if result['state'] in State.STATES_NOT_RUNNING:
-      # TODO(maruel): Not always fetch stdout?
-      out = net.url_read_json(output_url)
-      result['output'] = out.get('output') if out else out
-      # Record the result, try to fetch attached output files (if any).
-      if output_collector:
-        # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
-        output_collector.process_shard_result(shard_index, result)
-      if result.get('internal_failure'):
-        logging.error('Internal error!')
-      elif result['state'] == 'BOT_DIED':
-        logging.error('Bot died!')
-      return result
-
-
-def convert_to_old_format(result):
-  """Converts the task result data from Endpoints API format to old API format
-  for compatibility.
-
-  This goes into the file generated as --task-summary-json.
-  """
-  # Sets default.
-  result.setdefault('abandoned_ts', None)
-  result.setdefault('bot_id', None)
-  result.setdefault('bot_version', None)
-  result.setdefault('children_task_ids', [])
-  result.setdefault('completed_ts', None)
-  result.setdefault('cost_saved_usd', None)
-  result.setdefault('costs_usd', None)
-  result.setdefault('deduped_from', None)
-  result.setdefault('name', None)
-  result.setdefault('outputs_ref', None)
-  result.setdefault('properties_hash', None)
-  result.setdefault('server_versions', None)
-  result.setdefault('started_ts', None)
-  result.setdefault('tags', None)
-  result.setdefault('user', None)
-
-  # Convertion back to old API.
-  duration = result.pop('duration', None)
-  result['durations'] = [duration] if duration else []
-  exit_code = result.pop('exit_code', None)
-  result['exit_codes'] = [int(exit_code)] if exit_code else []
-  result['id'] = result.pop('task_id')
-  result['isolated_out'] = result.get('outputs_ref', None)
-  output = result.pop('output', None)
-  result['outputs'] = [output] if output else []
-  # properties_hash
-  # server_version
-  # Endpoints result 'state' as string. For compatibility with old code, convert
-  # to int.
-  result['state'] = State.from_enum(result['state'])
-  result['try_number'] = (
-      int(result['try_number']) if result.get('try_number') else None)
-  if 'bot_dimensions' in result:
-    result['bot_dimensions'] = {
-      i['key']: i.get('value', []) for i in result['bot_dimensions']
-    }
-  else:
-    result['bot_dimensions'] = None
-
-
-def yield_results(
-    swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
-    output_collector, include_perf):
-  """Yields swarming task results from the swarming server as (index, result).
-
-  Duplicate shards are ignored. Shards are yielded in order of completion.
-  Timed out shards are NOT yielded at all. Caller can compare number of yielded
-  shards with len(task_keys) to verify all shards completed.
-
-  max_threads is optional and is used to limit the number of parallel fetches
-  done. Since in general the number of task_keys is in the range <=10, it's not
-  worth normally to limit the number threads. Mostly used for testing purposes.
-
-  output_collector is an optional instance of TaskOutputCollector that will be
-  used to fetch files produced by a task from isolate server to the local disk.
-
-  Yields:
-    (index, result). In particular, 'result' is defined as the
-    GetRunnerResults() function in services/swarming/server/test_runner.py.
-  """
-  number_threads = (
-      min(max_threads, len(task_ids)) if max_threads else len(task_ids))
-  should_stop = threading.Event()
-  results_channel = threading_utils.TaskChannel()
-
-  with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
-    try:
-      # Adds a task to the thread pool to call 'retrieve_results' and return
-      # the results together with shard_index that produced them (as a tuple).
-      def enqueue_retrieve_results(shard_index, task_id):
-        task_fn = lambda *args: (shard_index, retrieve_results(*args))
-        pool.add_task(
-            0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
-            task_id, timeout, should_stop, output_collector, include_perf)
-
-      # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
-      for shard_index, task_id in enumerate(task_ids):
-        enqueue_retrieve_results(shard_index, task_id)
-
-      # Wait for all of them to finish.
-      shards_remaining = range(len(task_ids))
-      active_task_count = len(task_ids)
-      while active_task_count:
-        shard_index, result = None, None
-        try:
-          shard_index, result = results_channel.pull(
-              timeout=STATUS_UPDATE_INTERVAL)
-        except threading_utils.TaskChannel.Timeout:
-          if print_status_updates:
-            print(
-                'Waiting for results from the following shards: %s' %
-                ', '.join(map(str, shards_remaining)))
-            sys.stdout.flush()
-          continue
-        except Exception:
-          logging.exception('Unexpected exception in retrieve_results')
-
-        # A call to 'retrieve_results' finished (successfully or not).
-        active_task_count -= 1
-        if not result:
-          logging.error('Failed to retrieve the results for a swarming key')
-          continue
-
-        # Yield back results to the caller.
-        assert shard_index in shards_remaining
-        shards_remaining.remove(shard_index)
-        yield shard_index, result
-
-    finally:
-      # Done or aborted with Ctrl+C, kill the remaining threads.
-      should_stop.set()
-
-
-def decorate_shard_output(swarming, shard_index, metadata):
-  """Returns wrapped output for swarming task shard."""
-  if metadata.get('started_ts') and not metadata.get('deduped_from'):
-    pending = '%.1fs' % (
-        parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
-        ).total_seconds()
-  else:
-    pending = 'N/A'
-
-  if metadata.get('duration') is not None:
-    duration = '%.1fs' % metadata['duration']
-  else:
-    duration = 'N/A'
-
-  if metadata.get('exit_code') is not None:
-    # Integers are encoded as string to not loose precision.
-    exit_code = '%s' % metadata['exit_code']
-  else:
-    exit_code = 'N/A'
-
-  bot_id = metadata.get('bot_id') or 'N/A'
-
-  url = '%s/user/task/%s' % (swarming, metadata['task_id'])
-  tag_header = 'Shard %d  %s' % (shard_index, url)
-  tag_footer = (
-      'End of shard %d  Pending: %s  Duration: %s  Bot: %s  Exit: %s' % (
-      shard_index, pending, duration, bot_id, exit_code))
-
-  tag_len = max(len(tag_header), len(tag_footer))
-  dash_pad = '+-%s-+\n' % ('-' * tag_len)
-  tag_header = '| %s |\n' % tag_header.ljust(tag_len)
-  tag_footer = '| %s |\n' % tag_footer.ljust(tag_len)
-
-  header = dash_pad + tag_header + dash_pad
-  footer = dash_pad + tag_footer + dash_pad[:-1]
-  output = (metadata.get('output') or '').rstrip() + '\n'
-  return header + output + footer
-
-
-def collect(
-    swarming, task_ids, timeout, decorate, print_status_updates,
-    task_summary_json, task_output_dir, include_perf):
-  """Retrieves results of a Swarming task.
-
-  Returns:
-    process exit code that should be returned to the user.
-  """
-  # Collect summary JSON and output files (if task_output_dir is not None).
-  output_collector = TaskOutputCollector(task_output_dir, len(task_ids))
-
-  seen_shards = set()
-  exit_code = None
-  total_duration = 0
-  try:
-    for index, metadata in yield_results(
-        swarming, task_ids, timeout, None, print_status_updates,
-        output_collector, include_perf):
-      seen_shards.add(index)
-
-      # Default to failure if there was no process that even started.
-      shard_exit_code = metadata.get('exit_code')
-      if shard_exit_code:
-        # It's encoded as a string, so bool('0') is True.
-        shard_exit_code = int(shard_exit_code)
-      if shard_exit_code or exit_code is None:
-        exit_code = shard_exit_code
-      total_duration += metadata.get('duration', 0)
-
-      if decorate:
-        print(decorate_shard_output(swarming, index, metadata))
-        if len(seen_shards) < len(task_ids):
-          print('')
-      else:
-        print('%s: %s %s' % (
-            metadata.get('bot_id', 'N/A'),
-            metadata['task_id'],
-            shard_exit_code))
-        if metadata['output']:
-          output = metadata['output'].rstrip()
-          if output:
-            print(''.join('  %s\n' % l for l in output.splitlines()))
-  finally:
-    summary = output_collector.finalize()
-    if task_summary_json:
-      # TODO(maruel): Make this optional.
-      for i in summary['shards']:
-        if i:
-          convert_to_old_format(i)
-      tools.write_json(task_summary_json, summary, False)
-
-  if decorate and total_duration:
-    print('Total duration: %.1fs' % total_duration)
-
-  if len(seen_shards) != len(task_ids):
-    missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
-    print >> sys.stderr, ('Results from some shards are missing: %s' %
-        ', '.join(map(str, missing_shards)))
-    return 1
-
-  return exit_code if exit_code is not None else 1
-
-
-### API management.
-
-
-class APIError(Exception):
-  pass
-
-
-def endpoints_api_discovery_apis(host):
-  """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
-  the APIs exposed by a host.
-
-  https://developers.google.com/discovery/v1/reference/apis/list
-  """
-  # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
-  # Endpoints version is turned down.
-  data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
-  if data is None:
-    raise APIError('Failed to discover APIs on %s' % host)
-  out = {}
-  for api in data['items']:
-    if api['id'] == 'discovery:v1':
-      continue
-    # URL is of the following form:
-    # url = host + (
-    #   '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
-    api_data = net.url_read_json(api['discoveryRestUrl'])
-    if api_data is None:
-      raise APIError('Failed to discover %s on %s' % (api['id'], host))
-    out[api['id']] = api_data
-  return out
-
-
-### Commands.
-
-
-def abort_task(_swarming, _manifest):
-  """Given a task manifest that was triggered, aborts its execution."""
-  # TODO(vadimsh): No supported by the server yet.
-
-
-def add_filter_options(parser):
-  parser.filter_group = optparse.OptionGroup(parser, 'Filtering slaves')
-  parser.filter_group.add_option(
-      '-d', '--dimension', default=[], action='append', nargs=2,
-      dest='dimensions', metavar='FOO bar',
-      help='dimension to filter on')
-  parser.add_option_group(parser.filter_group)
-
-
-def add_sharding_options(parser):
-  parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
-  parser.sharding_group.add_option(
-      '--shards', type='int', default=1,
-      help='Number of shards to trigger and collect.')
-  parser.add_option_group(parser.sharding_group)
-
-
-def add_trigger_options(parser):
-  """Adds all options to trigger a task on Swarming."""
-  isolateserver.add_isolate_server_options(parser)
-  add_filter_options(parser)
-
-  parser.task_group = optparse.OptionGroup(parser, 'Task properties')
-  parser.task_group.add_option(
-      '-s', '--isolated',
-      help='Hash of the .isolated to grab from the isolate server')
-  parser.task_group.add_option(
-      '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
-      help='Environment variables to set')
-  parser.task_group.add_option(
-      '--priority', type='int', default=100,
-      help='The lower value, the more important the task is')
-  parser.task_group.add_option(
-      '-T', '--task-name',
-      help='Display name of the task. Defaults to '
-           '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
-           'isolated file is provided, if a hash is provided, it defaults to '
-           '<user>/<dimensions>/<isolated hash>/<timestamp>')
-  parser.task_group.add_option(
-      '--tags', action='append', default=[],
-      help='Tags to assign to the task.')
-  parser.task_group.add_option(
-      '--user', default='',
-      help='User associated with the task. Defaults to authenticated user on '
-           'the server.')
-  parser.task_group.add_option(
-      '--idempotent', action='store_true', default=False,
-      help='When set, the server will actively try to find a previous task '
-           'with the same parameter and return this result instead if possible')
-  parser.task_group.add_option(
-      '--expiration', type='int', default=6*60*60,
-      help='Seconds to allow the task to be pending for a bot to run before '
-           'this task request expires.')
-  parser.task_group.add_option(
-      '--deadline', type='int', dest='expiration',
-      help=optparse.SUPPRESS_HELP)
-  parser.task_group.add_option(
-      '--hard-timeout', type='int', default=60*60,
-      help='Seconds to allow the task to complete.')
-  parser.task_group.add_option(
-      '--io-timeout', type='int', default=20*60,
-      help='Seconds to allow the task to be silent.')
-  parser.task_group.add_option(
-      '--raw-cmd', action='store_true', default=False,
-      help='When set, the command after -- is used as-is without run_isolated. '
-           'In this case, no .isolated file is expected.')
-  parser.task_group.add_option(
-      '--cipd-package', action='append', default=[],
-      help='CIPD packages to install on the Swarming bot.  Uses the format: '
-           'path:package_name:version')
-  parser.add_option_group(parser.task_group)
-
-
-def process_trigger_options(parser, options, args):
-  """Processes trigger options and uploads files to isolate server if necessary.
-  """
-  options.dimensions = dict(options.dimensions)
-  options.env = dict(options.env)
-
-  if not options.dimensions:
-    parser.error('Please at least specify one --dimension')
-  if options.raw_cmd:
-    if not args:
-      parser.error(
-          'Arguments with --raw-cmd should be passed after -- as command '
-          'delimiter.')
-    if options.isolate_server:
-      parser.error('Can\'t use both --raw-cmd and --isolate-server.')
-
-    command = args
-    if not options.task_name:
-      options.task_name = u'%s/%s' % (
-          options.user,
-          '_'.join(
-            '%s=%s' % (k, v)
-            for k, v in sorted(options.dimensions.iteritems())))
-    inputs_ref = None
-  else:
-    isolateserver.process_isolate_server_options(parser, options, False, True)
-    try:
-      command, inputs_ref = isolated_handle_options(options, args)
-    except ValueError as e:
-      parser.error(str(e))
-
-  cipd_packages = []
-  for p in options.cipd_package:
-    split = p.split(':', 2)
-    if len(split) != 3:
-      parser.error('CIPD packages must take the form: path:package:version')
-    cipd_packages.append(CipdPackage(
-        package_name=split[1],
-        path=split[0],
-        version=split[2]))
-  cipd_input = None
-  if cipd_packages:
-    cipd_input = CipdInput(
-        client_package=None,
-        packages=cipd_packages,
-        server=None)
-
-  # If inputs_ref.isolated is used, command is actually extra_args.
-  # Otherwise it's an actual command to run.
-  isolated_input = inputs_ref and inputs_ref.isolated
-  properties = TaskProperties(
-      cipd_input=cipd_input,
-      command=None if isolated_input else command,
-      dimensions=options.dimensions,
-      env=options.env,
-      execution_timeout_secs=options.hard_timeout,
-      extra_args=command if isolated_input else None,
-      grace_period_secs=30,
-      idempotent=options.idempotent,
-      inputs_ref=inputs_ref,
-      io_timeout_secs=options.io_timeout)
-  if not all(len(t.split(':', 1)) == 2 for t in options.tags):
-    parser.error('--tags must be in the format key:value')
-  return NewTaskRequest(
-      expiration_secs=options.expiration,
-      name=options.task_name,
-      parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
-      priority=options.priority,
-      properties=properties,
-      tags=options.tags,
-      user=options.user)
-
-
-def add_collect_options(parser):
-  parser.server_group.add_option(
-      '-t', '--timeout', type='float',
-      help='Timeout to wait for result, set to 0 for no timeout; default to no '
-           'wait')
-  parser.group_logging.add_option(
-      '--decorate', action='store_true', help='Decorate output')
-  parser.group_logging.add_option(
-      '--print-status-updates', action='store_true',
-      help='Print periodic status updates')
-  parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
-  parser.task_output_group.add_option(
-      '--task-summary-json',
-      metavar='FILE',
-      help='Dump a summary of task results to this file as json. It contains '
-           'only shards statuses as know to server directly. Any output files '
-           'emitted by the task can be collected by using --task-output-dir')
-  parser.task_output_group.add_option(
-      '--task-output-dir',
-      metavar='DIR',
-      help='Directory to put task results into. When the task finishes, this '
-           'directory contains per-shard directory with output files produced '
-           'by shards: <task-output-dir>/<zero-based-shard-index>/.')
-  parser.task_output_group.add_option(
-      '--perf', action='store_true', default=False,
-      help='Includes performance statistics')
-  parser.add_option_group(parser.task_output_group)
-
-
-@subcommand.usage('bots...')
-def CMDbot_delete(parser, args):
-  """Forcibly deletes bots from the Swarming server."""
-  parser.add_option(
-      '-f', '--force', action='store_true',
-      help='Do not prompt for confirmation')
-  options, args = parser.parse_args(args)
-  if not args:
-    parser.error('Please specify bots to delete')
-
-  bots = sorted(args)
-  if not options.force:
-    print('Delete the following bots?')
-    for bot in bots:
-      print('  %s' % bot)
-    if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
-      print('Goodbye.')
-      return 1
-
-  result = 0
-  for bot in bots:
-    url = '%s/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
-    if net.url_read_json(url, data={}, method='POST') is None:
-      print('Deleting %s failed. Probably already gone' % bot)
-      result = 1
-  return result
-
-
-def CMDbots(parser, args):
-  """Returns information about the bots connected to the Swarming server."""
-  add_filter_options(parser)
-  parser.filter_group.add_option(
-      '--dead-only', action='store_true',
-      help='Only print dead bots, useful to reap them and reimage broken bots')
-  parser.filter_group.add_option(
-      '-k', '--keep-dead', action='store_true',
-      help='Do not filter out dead bots')
-  parser.filter_group.add_option(
-      '-b', '--bare', action='store_true',
-      help='Do not print out dimensions')
-  options, args = parser.parse_args(args)
-
-  if options.keep_dead and options.dead_only:
-    parser.error('Use only one of --keep-dead and --dead-only')
-
-  bots = []
-  cursor = None
-  limit = 250
-  # Iterate via cursors.
-  base_url = (
-      options.swarming + '/api/swarming/v1/bots/list?limit=%d' % limit)
-  while True:
-    url = base_url
-    if cursor:
-      url += '&cursor=%s' % urllib.quote(cursor)
-    data = net.url_read_json(url)
-    if data is None:
-      print >> sys.stderr, 'Failed to access %s' % options.swarming
-      return 1
-    bots.extend(data['items'])
-    cursor = data.get('cursor')
-    if not cursor:
-      break
-
-  for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
-    if options.dead_only:
-      if not bot.get('is_dead'):
-        continue
-    elif not options.keep_dead and bot.get('is_dead'):
-      continue
-
-    # If the user requested to filter on dimensions, ensure the bot has all the
-    # dimensions requested.
-    dimensions = {i['key']: i.get('value') for i in bot['dimensions']}
-    for key, value in options.dimensions:
-      if key not in dimensions:
-        break
-      # A bot can have multiple value for a key, for example,
-      # {'os': ['Windows', 'Windows-6.1']}, so that --dimension os=Windows will
-      # be accepted.
-      if isinstance(dimensions[key], list):
-        if value not in dimensions[key]:
-          break
-      else:
-        if value != dimensions[key]:
-          break
-    else:
-      print bot['bot_id']
-      if not options.bare:
-        print '  %s' % json.dumps(dimensions, sort_keys=True)
-        if bot.get('task_id'):
-          print '  task: %s' % bot['task_id']
-  return 0
-
-
-@subcommand.usage('task_id')
-def CMDcancel(parser, args):
-  """Cancels a task."""
-  options, args = parser.parse_args(args)
-  if not args:
-    parser.error('Please specify the task to cancel')
-  for task_id in args:
-    url = '%s/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
-    if net.url_read_json(url, data={'task_id': task_id}, method='POST') is None:
-      print('Deleting %s failed. Probably already gone' % task_id)
-      return 1
-  return 0
-
-
-@subcommand.usage('--json file | task_id...')
-def CMDcollect(parser, args):
-  """Retrieves results of one or multiple Swarming task by its ID.
-
-  The result can be in multiple part if the execution was sharded. It can
-  potentially have retries.
-  """
-  add_collect_options(parser)
-  parser.add_option(
-      '-j', '--json',
-      help='Load the task ids from .json as saved by trigger --dump-json')
-  options, args = parser.parse_args(args)
-  if not args and not options.json:
-    parser.error('Must specify at least one task id or --json.')
-  if args and options.json:
-    parser.error('Only use one of task id or --json.')
-
-  if options.json:
-    options.json = unicode(os.path.abspath(options.json))
-    try:
-      with fs.open(options.json, 'rb') as f:
-        data = json.load(f)
-    except (IOError, ValueError):
-      parser.error('Failed to open %s' % options.json)
-    try:
-      tasks = sorted(
-          data['tasks'].itervalues(), key=lambda x: x['shard_index'])
-      args = [t['task_id'] for t in tasks]
-    except (KeyError, TypeError):
-      parser.error('Failed to process %s' % options.json)
-    if options.timeout is None:
-      options.timeout = (
-          data['request']['properties']['execution_timeout_secs'] +
-          data['request']['expiration_secs'] + 10.)
-  else:
-    valid = frozenset('0123456789abcdef')
-    if any(not valid.issuperset(task_id) for task_id in args):
-      parser.error('Task ids are 0-9a-f.')
-
-  try:
-    return collect(
-        options.swarming,
-        args,
-        options.timeout,
-        options.decorate,
-        options.print_status_updates,
-        options.task_summary_json,
-        options.task_output_dir,
-        options.perf)
-  except Failure:
-    on_error.report(None)
-    return 1
-
-
-@subcommand.usage('[filename]')
-def CMDput_bootstrap(parser, args):
-  """Uploads a new version of bootstrap.py."""
-  options, args = parser.parse_args(args)
-  if len(args) != 1:
-    parser.error('Must specify file to upload')
-  url = options.swarming + '/api/swarming/v1/server/put_bootstrap'
-  path = unicode(os.path.abspath(args[0]))
-  with fs.open(path, 'rb') as f:
-    content = f.read().decode('utf-8')
-  data = net.url_read_json(url, data={'content': content})
-  print data
-  return 0
-
-
-@subcommand.usage('[filename]')
-def CMDput_bot_config(parser, args):
-  """Uploads a new version of bot_config.py."""
-  options, args = parser.parse_args(args)
-  if len(args) != 1:
-    parser.error('Must specify file to upload')
-  url = options.swarming + '/api/swarming/v1/server/put_bot_config'
-  path = unicode(os.path.abspath(args[0]))
-  with fs.open(path, 'rb') as f:
-    content = f.read().decode('utf-8')
-  data = net.url_read_json(url, data={'content': content})
-  print data
-  return 0
-
-
-@subcommand.usage('[method name]')
-def CMDquery(parser, args):
-  """Returns raw JSON information via an URL endpoint. Use 'query-list' to
-  gather the list of API methods from the server.
-
-  Examples:
-    Listing all bots:
-      swarming.py query -S server-url.com bots/list
-
-    Listing last 10 tasks on a specific bot named 'swarm1':
-      swarming.py query -S server-url.com --limit 10 bot/swarm1/tasks
-
-    Listing last 10 tasks with tags os:Ubuntu-12.04 and pool:Chrome. Note that
-    quoting is important!:
-      swarming.py query -S server-url.com --limit 10 \\
-          'tasks/list?tags=os:Ubuntu-12.04&tags=pool:Chrome'
-  """
-  CHUNK_SIZE = 250
-
-  parser.add_option(
-      '-L', '--limit', type='int', default=200,
-      help='Limit to enforce on limitless items (like number of tasks); '
-           'default=%default')
-  parser.add_option(
-      '--json', help='Path to JSON output file (otherwise prints to stdout)')
-  parser.add_option(
-      '--progress', action='store_true',
-      help='Prints a dot at each request to show progress')
-  options, args = parser.parse_args(args)
-  if len(args) != 1:
-    parser.error(
-        'Must specify only method name and optionally query args properly '
-        'escaped.')
-  base_url = options.swarming + '/api/swarming/v1/' + args[0]
-  url = base_url
-  if options.limit:
-    # Check check, change if not working out.
-    merge_char = '&' if '?' in url else '?'
-    url += '%slimit=%d' % (merge_char, min(CHUNK_SIZE, options.limit))
-  data = net.url_read_json(url)
-  if data is None:
-    # TODO(maruel): Do basic diagnostic.
-    print >> sys.stderr, 'Failed to access %s' % url
-    return 1
-
-  # Some items support cursors. Try to get automatically if cursors are needed
-  # by looking at the 'cursor' items.
-  while (
-      data.get('cursor') and
-      (not options.limit or len(data['items']) < options.limit)):
-    merge_char = '&' if '?' in base_url else '?'
-    url = base_url + '%scursor=%s' % (merge_char, urllib.quote(data['cursor']))
-    if options.limit:
-      url += '&limit=%d' % min(CHUNK_SIZE, options.limit - len(data['items']))
-    if options.progress:
-      sys.stdout.write('.')
-      sys.stdout.flush()
-    new = net.url_read_json(url)
-    if new is None:
-      if options.progress:
-        print('')
-      print >> sys.stderr, 'Failed to access %s' % options.swarming
-      return 1
-    data['items'].extend(new.get('items', []))
-    data['cursor'] = new.get('cursor')
-
-  if options.progress:
-    print('')
-  if options.limit and len(data.get('items', [])) > options.limit:
-    data['items'] = data['items'][:options.limit]
-  data.pop('cursor', None)
-
-  if options.json:
-    options.json = unicode(os.path.abspath(options.json))
-    tools.write_json(options.json, data, True)
-  else:
-    try:
-      tools.write_json(sys.stdout, data, False)
-      sys.stdout.write('\n')
-    except IOError:
-      pass
-  return 0
-
-
-def CMDquery_list(parser, args):
-  """Returns list of all the Swarming APIs that can be used with command
-  'query'.
-  """
-  parser.add_option(
-      '--json', help='Path to JSON output file (otherwise prints to stdout)')
-  options, args = parser.parse_args(args)
-  if args:
-    parser.error('No argument allowed.')
-
-  try:
-    apis = endpoints_api_discovery_apis(options.swarming)
-  except APIError as e:
-    parser.error(str(e))
-  if options.json:
-    options.json = unicode(os.path.abspath(options.json))
-    with fs.open(options.json, 'wb') as f:
-      json.dump(apis, f)
-  else:
-    help_url = (
-      'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
-      options.swarming)
-    for api_id, api in sorted(apis.iteritems()):
-      print api_id
-      print '  ' + api['description']
-      for resource_name, resource in sorted(api['resources'].iteritems()):
-        print ''
-        for method_name, method in sorted(resource['methods'].iteritems()):
-          # Only list the GET ones.
-          if method['httpMethod'] != 'GET':
-            continue
-          print '- %s.%s: %s' % (
-              resource_name, method_name, method['path'])
-          print '  ' + method['description']
-          print '  %s%s%s' % (help_url, api['servicePath'], method['id'])
-  return 0
-
-
-@subcommand.usage('(hash|isolated) [-- extra_args]')
-def CMDrun(parser, args):
-  """Triggers a task and wait for the results.
-
-  Basically, does everything to run a command remotely.
-  """
-  add_trigger_options(parser)
-  add_collect_options(parser)
-  add_sharding_options(parser)
-  options, args = parser.parse_args(args)
-  task_request = process_trigger_options(parser, options, args)
-  try:
-    tasks = trigger_task_shards(
-        options.swarming, task_request, options.shards)
-  except Failure as e:
-    on_error.report(
-        'Failed to trigger %s(%s): %s' %
-        (options.task_name, args[0], e.args[0]))
-    return 1
-  if not tasks:
-    on_error.report('Failed to trigger the task.')
-    return 1
-  print('Triggered task: %s' % options.task_name)
-  task_ids = [
-    t['task_id']
-    for t in sorted(tasks.itervalues(), key=lambda x: x['shard_index'])
-  ]
-  if options.timeout is None:
-    options.timeout = (
-        task_request.properties.execution_timeout_secs +
-        task_request.expiration_secs + 10.)
-  try:
-    return collect(
-        options.swarming,
-        task_ids,
-        options.timeout,
-        options.decorate,
-        options.print_status_updates,
-        options.task_summary_json,
-        options.task_output_dir,
-        options.perf)
-  except Failure:
-    on_error.report(None)
-    return 1
-
-
-@subcommand.usage('task_id -- <extra_args>')
-def CMDreproduce(parser, args):
-  """Runs a task locally that was triggered on the server.
-
-  This running locally the same commands that have been run on the bot. The data
-  downloaded will be in a subdirectory named 'work' of the current working
-  directory.
-
-  You can pass further additional arguments to the target command by passing
-  them after --.
-  """
-  parser.add_option(
-      '--output-dir', metavar='DIR', default='out',
-      help='Directory that will have results stored into')
-  options, args = parser.parse_args(args)
-  extra_args = []
-  if not args:
-    parser.error('Must specify exactly one task id.')
-  if len(args) > 1:
-    if args[1] == '--':
-      if len(args) > 2:
-        extra_args = args[2:]
-    else:
-      extra_args = args[1:]
-
-  url = options.swarming + '/api/swarming/v1/task/%s/request' % args[0]
-  request = net.url_read_json(url)
-  if not request:
-    print >> sys.stderr, 'Failed to retrieve request data for the task'
-    return 1
-
-  workdir = unicode(os.path.abspath('work'))
-  if fs.isdir(workdir):
-    parser.error('Please delete the directory \'work\' first')
-  fs.mkdir(workdir)
-
-  properties = request['properties']
-  env = None
-  if properties.get('env'):
-    env = os.environ.copy()
-    logging.info('env: %r', properties['env'])
-    for i in properties['env']:
-      key = i['key'].encode('utf-8')
-      if not i['value']:
-        env.pop(key, None)
-      else:
-        env[key] = i['value'].encode('utf-8')
-
-  if (properties.get('inputs_ref') or {}).get('isolated'):
-    # Create the tree.
-    with isolateserver.get_storage(
-          properties['inputs_ref']['isolatedserver'],
-          properties['inputs_ref']['namespace']) as storage:
-      bundle = isolateserver.fetch_isolated(
-          properties['inputs_ref']['isolated'],
-          storage,
-          isolateserver.MemoryCache(file_mode_mask=0700),
-          workdir,
-          False)
-      command = bundle.command
-      if bundle.relative_cwd:
-        workdir = os.path.join(workdir, bundle.relative_cwd)
-      command.extend(properties.get('extra_args') or [])
-    # https://github.com/luci/luci-py/blob/master/appengine/swarming/doc/Magic-Values.md
-    new_command = run_isolated.process_command(
-        command, options.output_dir, None)
-    if not options.output_dir and new_command != command:
-      parser.error('The task has outputs, you must use --output-dir')
-    command = new_command
-  else:
-    command = properties['command']
-  try:
-    return subprocess.call(command + extra_args, env=env, cwd=workdir)
-  except OSError as e:
-    print >> sys.stderr, 'Failed to run: %s' % ' '.join(command)
-    print >> sys.stderr, str(e)
-    return 1
-
-
-@subcommand.usage('bot_id')
-def CMDterminate(parser, args):
-  """Tells a bot to gracefully shut itself down as soon as it can.
-
-  This is done by completing whatever current task there is then exiting the bot
-  process.
-  """
-  parser.add_option(
-      '--wait', action='store_true', help='Wait for the bot to terminate')
-  options, args = parser.parse_args(args)
-  if len(args) != 1:
-    parser.error('Please provide the bot id')
-  url = options.swarming + '/api/swarming/v1/bot/%s/terminate' % args[0]
-  request = net.url_read_json(url, data={})
-  if not request:
-    print >> sys.stderr, 'Failed to ask for termination'
-    return 1
-  if options.wait:
-    return collect(
-        options.swarming, [request['task_id']], 0., False, False, None, None,
-        False)
-  return 0
-
-
-@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
-def CMDtrigger(parser, args):
-  """Triggers a Swarming task.
-
-  Accepts either the hash (sha1) of a .isolated file already uploaded or the
-  path to an .isolated file to archive.
-
-  If an .isolated file is specified instead of an hash, it is first archived.
-
-  Passes all extra arguments provided after '--' as additional command line
-  arguments for an isolated command specified in *.isolate file.
-  """
-  add_trigger_options(parser)
-  add_sharding_options(parser)
-  parser.add_option(
-      '--dump-json',
-      metavar='FILE',
-      help='Dump details about the triggered task(s) to this file as json')
-  options, args = parser.parse_args(args)
-  task_request = process_trigger_options(parser, options, args)
-  try:
-    tasks = trigger_task_shards(
-        options.swarming, task_request, options.shards)
-    if tasks:
-      print('Triggered task: %s' % options.task_name)
-      tasks_sorted = sorted(
-          tasks.itervalues(), key=lambda x: x['shard_index'])
-      if options.dump_json:
-        data = {
-          'base_task_name': options.task_name,
-          'tasks': tasks,
-          'request': task_request_to_raw_request(task_request),
-        }
-        tools.write_json(unicode(options.dump_json), data, True)
-        print('To collect results, use:')
-        print('  swarming.py collect -S %s --json %s' %
-            (options.swarming, options.dump_json))
-      else:
-        print('To collect results, use:')
-        print('  swarming.py collect -S %s %s' %
-            (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
-      print('Or visit:')
-      for t in tasks_sorted:
-        print('  ' + t['view_url'])
-    return int(not tasks)
-  except Failure:
-    on_error.report(None)
-    return 1
-
-
-class OptionParserSwarming(logging_utils.OptionParserWithLogging):
-  def __init__(self, **kwargs):
-    logging_utils.OptionParserWithLogging.__init__(
-        self, prog='swarming.py', **kwargs)
-    self.server_group = optparse.OptionGroup(self, 'Server')
-    self.server_group.add_option(
-        '-S', '--swarming',
-        metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
-        help='Swarming server to use')
-    self.add_option_group(self.server_group)
-    auth.add_auth_options(self)
-
-  def parse_args(self, *args, **kwargs):
-    options, args = logging_utils.OptionParserWithLogging.parse_args(
-        self, *args, **kwargs)
-    auth.process_auth_options(self, options)
-    user = self._process_swarming(options)
-    if hasattr(options, 'user') and not options.user:
-      options.user = user
-    return options, args
-
-  def _process_swarming(self, options):
-    """Processes the --swarming option and aborts if not specified.
-
-    Returns the identity as determined by the server.
-    """
-    if not options.swarming:
-      self.error('--swarming is required.')
-    try:
-      options.swarming = net.fix_url(options.swarming)
-    except ValueError as e:
-      self.error('--swarming %s' % e)
-    on_error.report_on_exception_exit(options.swarming)
-    try:
-      user = auth.ensure_logged_in(options.swarming)
-    except ValueError as e:
-      self.error(str(e))
-    return user
-
-
-def main(args):
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  return dispatcher.execute(OptionParserSwarming(version=__version__), args)
-
-
-if __name__ == '__main__':
-  subprocess42.inhibit_os_error_reporting()
-  fix_encoding.fix_encoding()
-  tools.disable_buffering()
-  colorama.init()
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/swarming_client/tests/cipdserver_mock.py b/tools/swarming_client/tests/cipdserver_mock.py
deleted file mode 100644
index 97f6e8f..0000000
--- a/tools/swarming_client/tests/cipdserver_mock.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import logging
-
-import httpserver_mock
-
-
-class CipdServerHandler(httpserver_mock.MockHandler):
-  """An extremely minimal implementation of the cipd server API v1.0."""
-
-  ### Mocked HTTP Methods
-
-  def do_GET(self):
-    logging.info('GET %s', self.path)
-    if self.path in ('/on/load', '/on/quit'):
-      self._octet_stream('')
-    elif self.path == '/auth/api/v1/server/oauth_config':
-      self._json({
-        'client_id': 'c',
-        'client_not_so_secret': 's',
-        'primary_url': self.server.url})
-    elif self.path.startswith('/_ah/api/repo/v1/instance/resolve?'):
-      self._json({
-        'status': 'SUCCESS',
-        'instance_id': 'a' * 40,
-      })
-    elif self.path.startswith('/_ah/api/repo/v1/client?'):
-      self._json({
-        'status': 'SUCCESS',
-        'client_binary': {
-          'fetch_url': self.server.url + '/fake_google_storage/cipd_client',
-        },
-      })
-    elif self.path == '/fake_google_storage/cipd_client':
-      # The content is not actually used because run_isolated_test.py
-      # mocks popen.
-      self._octet_stream('#!/usr/sh\n')
-    else:
-      raise NotImplementedError(self.path)
-
-
-class MockCipdServer(httpserver_mock.MockServer):
-  _HANDLER_CLS = CipdServerHandler
diff --git a/tools/swarming_client/tests/file_path_test.py b/tools/swarming_client/tests/file_path_test.py
deleted file mode 100755
index e73e494..0000000
--- a/tools/swarming_client/tests/file_path_test.py
+++ /dev/null
@@ -1,420 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import getpass
-import logging
-import os
-import tempfile
-import unittest
-import StringIO
-import subprocess
-import sys
-import time
-
-BASE_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-ROOT_DIR = os.path.dirname(BASE_DIR)
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-FILE_PATH = os.path.abspath(__file__.decode(sys.getfilesystemencoding()))
-
-from depot_tools import auto_stub
-from depot_tools import fix_encoding
-import test_utils
-from utils import file_path
-from utils import fs
-
-
-def write_content(filepath, content):
-  with fs.open(filepath, 'wb') as f:
-    f.write(content)
-
-
-class FilePathTest(auto_stub.TestCase):
-  def setUp(self):
-    super(FilePathTest, self).setUp()
-    self._tempdir = None
-
-  def tearDown(self):
-    try:
-      if self._tempdir:
-        for dirpath, dirnames, filenames in fs.walk(
-            self._tempdir, topdown=True):
-          for filename in filenames:
-            file_path.set_read_only(os.path.join(dirpath, filename), False)
-          for dirname in dirnames:
-            file_path.set_read_only(os.path.join(dirpath, dirname), False)
-        file_path.rmtree(self._tempdir)
-    finally:
-      super(FilePathTest, self).tearDown()
-
-  @property
-  def tempdir(self):
-    if not self._tempdir:
-      self._tempdir = tempfile.mkdtemp(prefix=u'run_isolated_test')
-    return self._tempdir
-
-  def test_atomic_replace_new_file(self):
-    path = os.path.join(self.tempdir, 'new_file')
-    file_path.atomic_replace(path, 'blah')
-    with open(path, 'rb') as f:
-      self.assertEqual('blah', f.read())
-    self.assertEqual([u'new_file'], os.listdir(self.tempdir))
-
-  def test_atomic_replace_existing_file(self):
-    path = os.path.join(self.tempdir, 'existing_file')
-    with open(path, 'wb') as f:
-      f.write('existing body')
-    file_path.atomic_replace(path, 'new body')
-    with open(path, 'rb') as f:
-      self.assertEqual('new body', f.read())
-    self.assertEqual([u'existing_file'], os.listdir(self.tempdir))
-
-  def assertFileMode(self, filepath, mode, umask=None):
-    umask = test_utils.umask() if umask is None else umask
-    actual = fs.stat(filepath).st_mode
-    expected = mode & ~umask
-    self.assertEqual(
-        expected,
-        actual,
-        (filepath, oct(expected), oct(actual), oct(umask)))
-
-  def assertMaskedFileMode(self, filepath, mode):
-    """It's usually when the file was first marked read only."""
-    self.assertFileMode(filepath, mode, 0 if sys.platform == 'win32' else 077)
-
-  def test_native_case_end_with_os_path_sep(self):
-    # Make sure the trailing os.path.sep is kept.
-    path = file_path.get_native_path_case(ROOT_DIR) + os.path.sep
-    self.assertEqual(file_path.get_native_path_case(path), path)
-
-  def test_native_case_end_with_dot_os_path_sep(self):
-    path = file_path.get_native_path_case(ROOT_DIR + os.path.sep)
-    self.assertEqual(
-        file_path.get_native_path_case(path + '.' + os.path.sep),
-        path)
-
-  def test_native_case_non_existing(self):
-    # Make sure it doesn't throw on non-existing files.
-    non_existing = 'trace_input_test_this_file_should_not_exist'
-    path = os.path.expanduser('~/' + non_existing)
-    self.assertFalse(os.path.exists(path))
-    path = file_path.get_native_path_case(ROOT_DIR) + os.path.sep
-    self.assertEqual(file_path.get_native_path_case(path), path)
-
-  def test_delete_wd_rf(self):
-    # Confirms that a RO file in a RW directory can be deleted on non-Windows.
-    dir_foo = os.path.join(self.tempdir, 'foo')
-    file_bar = os.path.join(dir_foo, 'bar')
-    fs.mkdir(dir_foo, 0777)
-    write_content(file_bar, 'bar')
-    file_path.set_read_only(dir_foo, False)
-    file_path.set_read_only(file_bar, True)
-    self.assertFileMode(dir_foo, 040777)
-    self.assertMaskedFileMode(file_bar, 0100444)
-    if sys.platform == 'win32':
-      # On Windows, a read-only file can't be deleted.
-      with self.assertRaises(OSError):
-        fs.remove(file_bar)
-    else:
-      fs.remove(file_bar)
-
-  def test_delete_rd_wf(self):
-    # Confirms that a Rw file in a RO directory can be deleted on Windows only.
-    dir_foo = os.path.join(self.tempdir, 'foo')
-    file_bar = os.path.join(dir_foo, 'bar')
-    fs.mkdir(dir_foo, 0777)
-    write_content(file_bar, 'bar')
-    file_path.set_read_only(dir_foo, True)
-    file_path.set_read_only(file_bar, False)
-    self.assertMaskedFileMode(dir_foo, 040555)
-    self.assertFileMode(file_bar, 0100666)
-    if sys.platform == 'win32':
-      # A read-only directory has a convoluted meaning on Windows, it means that
-      # the directory is "personalized". This is used as a signal by Windows
-      # Explorer to tell it to look into the directory for desktop.ini.
-      # See http://support.microsoft.com/kb/326549 for more details.
-      # As such, it is important to not try to set the read-only bit on
-      # directories on Windows since it has no effect other than trigger
-      # Windows Explorer to look for desktop.ini, which is unnecessary.
-      fs.remove(file_bar)
-    else:
-      with self.assertRaises(OSError):
-        fs.remove(file_bar)
-
-  def test_delete_rd_rf(self):
-    # Confirms that a RO file in a RO directory can't be deleted.
-    dir_foo = os.path.join(self.tempdir, 'foo')
-    file_bar = os.path.join(dir_foo, 'bar')
-    fs.mkdir(dir_foo, 0777)
-    write_content(file_bar, 'bar')
-    file_path.set_read_only(dir_foo, True)
-    file_path.set_read_only(file_bar, True)
-    self.assertMaskedFileMode(dir_foo, 040555)
-    self.assertMaskedFileMode(file_bar, 0100444)
-    with self.assertRaises(OSError):
-      # It fails for different reason depending on the OS. See the test cases
-      # above.
-      fs.remove(file_bar)
-
-  def test_hard_link_mode(self):
-    # Creates a hard link, see if the file mode changed on the node or the
-    # directory entry.
-    dir_foo = os.path.join(self.tempdir, 'foo')
-    file_bar = os.path.join(dir_foo, 'bar')
-    file_link = os.path.join(dir_foo, 'link')
-    fs.mkdir(dir_foo, 0777)
-    write_content(file_bar, 'bar')
-    file_path.hardlink(file_bar, file_link)
-    self.assertFileMode(file_bar, 0100666)
-    self.assertFileMode(file_link, 0100666)
-    file_path.set_read_only(file_bar, True)
-    self.assertMaskedFileMode(file_bar, 0100444)
-    self.assertMaskedFileMode(file_link, 0100444)
-    # This is bad news for Windows; on Windows, the file must be writeable to be
-    # deleted, but the file node is modified. This means that every hard links
-    # must be reset to be read-only after deleting one of the hard link
-    # directory entry.
-
-  def test_rmtree_unicode(self):
-    subdir = os.path.join(self.tempdir, 'hi')
-    fs.mkdir(subdir)
-    filepath = os.path.join(
-        subdir, u'\u0627\u0644\u0635\u064A\u0646\u064A\u0629')
-    with fs.open(filepath, 'wb') as f:
-      f.write('hi')
-    # In particular, it fails when the input argument is a str.
-    file_path.rmtree(str(subdir))
-
-  if sys.platform == 'darwin':
-    def test_native_case_symlink_wrong_case(self):
-      base_dir = file_path.get_native_path_case(BASE_DIR)
-      trace_inputs_dir = os.path.join(base_dir, 'trace_inputs')
-      actual = file_path.get_native_path_case(trace_inputs_dir)
-      self.assertEqual(trace_inputs_dir, actual)
-
-      # Make sure the symlink is not resolved.
-      data = os.path.join(trace_inputs_dir, 'Files2')
-      actual = file_path.get_native_path_case(data)
-      self.assertEqual(
-          os.path.join(trace_inputs_dir, 'files2'), actual)
-
-      data = os.path.join(trace_inputs_dir, 'Files2', '')
-      actual = file_path.get_native_path_case(data)
-      self.assertEqual(
-          os.path.join(trace_inputs_dir, 'files2', ''), actual)
-
-      data = os.path.join(trace_inputs_dir, 'Files2', 'Child1.py')
-      actual = file_path.get_native_path_case(data)
-      # TODO(maruel): Should be child1.py.
-      self.assertEqual(
-          os.path.join(trace_inputs_dir, 'files2', 'Child1.py'), actual)
-
-  if sys.platform in ('darwin', 'win32'):
-    def test_native_case_not_sensitive(self):
-      # The home directory is almost guaranteed to have mixed upper/lower case
-      # letters on both Windows and OSX.
-      # This test also ensures that the output is independent on the input
-      # string case.
-      path = os.path.expanduser(u'~')
-      self.assertTrue(os.path.isdir(path))
-      path = path.replace('/', os.path.sep)
-      if sys.platform == 'win32':
-        # Make sure the drive letter is upper case for consistency.
-        path = path[0].upper() + path[1:]
-      # This test assumes the variable is in the native path case on disk, this
-      # should be the case. Verify this assumption:
-      self.assertEqual(path, file_path.get_native_path_case(path))
-      self.assertEqual(
-          file_path.get_native_path_case(path.lower()),
-          file_path.get_native_path_case(path.upper()))
-
-    def test_native_case_not_sensitive_non_existent(self):
-      # This test also ensures that the output is independent on the input
-      # string case.
-      non_existing = os.path.join(
-          'trace_input_test_this_dir_should_not_exist', 'really not', '')
-      path = os.path.expanduser(os.path.join(u'~', non_existing))
-      path = path.replace('/', os.path.sep)
-      self.assertFalse(fs.exists(path))
-      lower = file_path.get_native_path_case(path.lower())
-      upper = file_path.get_native_path_case(path.upper())
-      # Make sure non-existing element is not modified:
-      self.assertTrue(lower.endswith(non_existing.lower()))
-      self.assertTrue(upper.endswith(non_existing.upper()))
-      self.assertEqual(lower[:-len(non_existing)], upper[:-len(non_existing)])
-
-  if sys.platform == 'win32':
-    def test_native_case_alternate_datastream(self):
-      # Create the file manually, since tempfile doesn't support ADS.
-      tempdir = unicode(tempfile.mkdtemp(prefix=u'trace_inputs'))
-      try:
-        tempdir = file_path.get_native_path_case(tempdir)
-        basename = 'foo.txt'
-        filename = basename + ':Zone.Identifier'
-        filepath = os.path.join(tempdir, filename)
-        open(filepath, 'w').close()
-        self.assertEqual(filepath, file_path.get_native_path_case(filepath))
-        data_suffix = ':$DATA'
-        self.assertEqual(
-            filepath + data_suffix,
-            file_path.get_native_path_case(filepath + data_suffix))
-
-        open(filepath + '$DATA', 'w').close()
-        self.assertEqual(
-            filepath + data_suffix,
-            file_path.get_native_path_case(filepath + data_suffix))
-        # Ensure the ADS weren't created as separate file. You love NTFS, don't
-        # you?
-        self.assertEqual([basename], fs.listdir(tempdir))
-      finally:
-        file_path.rmtree(tempdir)
-
-    def test_rmtree_win(self):
-      # Mock our sleep for faster test case execution.
-      sleeps = []
-      self.mock(time, 'sleep', sleeps.append)
-      self.mock(sys, 'stderr', StringIO.StringIO())
-
-      # Open a child process, so the file is locked.
-      subdir = os.path.join(self.tempdir, 'to_be_deleted')
-      fs.mkdir(subdir)
-      script = 'import time; open(\'a\', \'w\'); time.sleep(60)'
-      proc = subprocess.Popen([sys.executable, '-c', script], cwd=subdir)
-      try:
-        # Wait until the file exist.
-        while not fs.isfile(os.path.join(subdir, 'a')):
-          self.assertEqual(None, proc.poll())
-        file_path.rmtree(subdir)
-        self.assertEqual([2, 4, 2], sleeps)
-        # sys.stderr.getvalue() would return a fair amount of output but it is
-        # not completely deterministic so we're not testing it here.
-      finally:
-        proc.wait()
-
-    def test_filter_processes_dir_win(self):
-      python_dir = os.path.dirname(sys.executable)
-      processes = file_path.filter_processes_dir_win(
-          file_path.enum_processes_win(), python_dir)
-      self.assertTrue(processes)
-      proc_names = [proc.ExecutablePath for proc in processes]
-      # Try to find at least one python process.
-      self.assertTrue(
-          any(proc == sys.executable for proc in proc_names), proc_names)
-
-    def test_filter_processes_tree_win(self):
-      # Create a grand-child.
-      script = (
-        'import subprocess,sys;'
-        'proc = subprocess.Popen('
-          '[sys.executable, \'-u\', \'-c\', \'import time; print(1); '
-          'time.sleep(60)\'], stdout=subprocess.PIPE); '
-        # Signal grand child is ready.
-        'print(proc.stdout.read(1)); '
-        # Wait for parent to have completed the test.
-        'sys.stdin.read(1); '
-        'proc.kill()'
-      )
-      proc = subprocess.Popen(
-          [sys.executable, '-u', '-c', script],
-          stdin=subprocess.PIPE,
-          stdout=subprocess.PIPE)
-      try:
-        proc.stdout.read(1)
-        processes = file_path.filter_processes_tree_win(
-            file_path.enum_processes_win())
-        self.assertEqual(3, len(processes), processes)
-        proc.stdin.write('a')
-        proc.wait()
-      except Exception:
-        proc.kill()
-      finally:
-        proc.wait()
-
-  if sys.platform != 'win32':
-    def test_symlink(self):
-      # This test will fail if the checkout is in a symlink.
-      actual = file_path.split_at_symlink(None, ROOT_DIR)
-      expected = (ROOT_DIR, None, None)
-      self.assertEqual(expected, actual)
-
-      actual = file_path.split_at_symlink(
-          None, os.path.join(BASE_DIR, 'trace_inputs'))
-      expected = (
-          os.path.join(BASE_DIR, 'trace_inputs'), None, None)
-      self.assertEqual(expected, actual)
-
-      actual = file_path.split_at_symlink(
-          None, os.path.join(BASE_DIR, 'trace_inputs', 'files2'))
-      expected = (
-          os.path.join(BASE_DIR, 'trace_inputs'), 'files2', '')
-      self.assertEqual(expected, actual)
-
-      actual = file_path.split_at_symlink(
-          ROOT_DIR, os.path.join('tests', 'trace_inputs', 'files2'))
-      expected = (
-          os.path.join('tests', 'trace_inputs'), 'files2', '')
-      self.assertEqual(expected, actual)
-      actual = file_path.split_at_symlink(
-          ROOT_DIR, os.path.join('tests', 'trace_inputs', 'files2', 'bar'))
-      expected = (
-          os.path.join('tests', 'trace_inputs'), 'files2', '/bar')
-      self.assertEqual(expected, actual)
-
-    def test_native_case_symlink_right_case(self):
-      actual = file_path.get_native_path_case(
-          os.path.join(BASE_DIR, 'trace_inputs'))
-      self.assertEqual('trace_inputs', os.path.basename(actual))
-
-      # Make sure the symlink is not resolved.
-      actual = file_path.get_native_path_case(
-          os.path.join(BASE_DIR, 'trace_inputs', 'files2'))
-      self.assertEqual('files2', os.path.basename(actual))
-
-  else:
-    def test_undeleteable_chmod(self):
-      # Create a file and a directory with an empty ACL. Then try to delete it.
-      dirpath = os.path.join(self.tempdir, 'd')
-      filepath = os.path.join(dirpath, 'f')
-      os.mkdir(dirpath)
-      with open(filepath, 'w') as f:
-        f.write('hi')
-      os.chmod(filepath, 0)
-      os.chmod(dirpath, 0)
-      file_path.rmtree(dirpath)
-
-    def test_undeleteable_owner(self):
-      # Create a file and a directory with an empty ACL. Then try to delete it.
-      dirpath = os.path.join(self.tempdir, 'd')
-      filepath = os.path.join(dirpath, 'f')
-      os.mkdir(dirpath)
-      with open(filepath, 'w') as f:
-        f.write('hi')
-      import win32security
-      user, _domain, _type = win32security.LookupAccountName(
-          '', getpass.getuser())
-      sd = win32security.SECURITY_DESCRIPTOR()
-      sd.Initialize()
-      sd.SetSecurityDescriptorOwner(user, False)
-      # Create an empty DACL, which removes all rights.
-      dacl = win32security.ACL()
-      dacl.Initialize()
-      sd.SetSecurityDescriptorDacl(1, dacl, 0)
-      win32security.SetFileSecurity(
-          fs.extend(filepath), win32security.DACL_SECURITY_INFORMATION, sd)
-      win32security.SetFileSecurity(
-          fs.extend(dirpath), win32security.DACL_SECURITY_INFORMATION, sd)
-      file_path.rmtree(dirpath)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  unittest.main()
diff --git a/tools/swarming_client/tests/httpserver_mock.py b/tools/swarming_client/tests/httpserver_mock.py
deleted file mode 100644
index d4d2cc5..0000000
--- a/tools/swarming_client/tests/httpserver_mock.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import BaseHTTPServer
-import json
-import logging
-import threading
-import urllib2
-
-
-class MockHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-  def _json(self, data):
-    """Sends a JSON response."""
-    self.send_response(200)
-    self.send_header('Content-type', 'application/json')
-    self.end_headers()
-    json.dump(data, self.wfile)
-
-  def _octet_stream(self, data):
-    """Sends a binary response."""
-    self.send_response(200)
-    self.send_header('Content-type', 'application/octet-stream')
-    self.end_headers()
-    self.wfile.write(data)
-
-  def _read_body(self):
-    """Reads the request body."""
-    return self.rfile.read(int(self.headers['Content-Length']))
-
-  def _drop_body(self):
-    """Reads the request body."""
-    size = int(self.headers['Content-Length'])
-    while size:
-      chunk = min(4096, size)
-      self.rfile.read(chunk)
-      size -= chunk
-
-  def log_message(self, fmt, *args):
-    logging.info(
-        '%s - - [%s] %s', self.address_string(), self.log_date_time_string(),
-        fmt % args)
-
-
-class MockServer(object):
-  _HANDLER_CLS = None
-
-  def __init__(self):
-    self._closed = False
-    self._server = BaseHTTPServer.HTTPServer(
-        ('127.0.0.1', 0), self._HANDLER_CLS)
-    self._server.url = self.url = 'http://localhost:%d' % (
-      self._server.server_port)
-    self._thread = threading.Thread(target=self._run, name='httpd')
-    self._thread.daemon = True
-    self._thread.start()
-    logging.info('%s', self.url)
-
-  def close(self):
-    self.close_start()
-    self.close_end()
-
-  def close_start(self):
-    assert not self._closed
-    self._closed = True
-    urllib2.urlopen(self.url + '/on/quit')
-
-  def close_end(self):
-    assert self._closed
-    self._thread.join()
-
-  def _run(self):
-    while not self._closed:
-      self._server.handle_request()
diff --git a/tools/swarming_client/tests/isolate_format_test.py b/tools/swarming_client/tests/isolate_format_test.py
deleted file mode 100755
index eaf5440..0000000
--- a/tools/swarming_client/tests/isolate_format_test.py
+++ /dev/null
@@ -1,1005 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import cStringIO
-import logging
-import os
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-import isolate_format
-from depot_tools import auto_stub
-from depot_tools import fix_encoding
-from utils import file_path
-
-
-# Access to a protected member XXX of a client class
-# pylint: disable=W0212
-
-
-FAKE_DIR = (
-    u'z:\\path\\to\\non_existing'
-    if sys.platform == 'win32' else u'/path/to/non_existing')
-
-
-class IsolateFormatTest(auto_stub.TestCase):
-  def test_unknown_key(self):
-    try:
-      isolate_format.verify_variables({'foo': [],})
-      self.fail()
-    except AssertionError:
-      pass
-
-  def test_unknown_var(self):
-    try:
-      isolate_format.verify_condition({'variables': {'foo': [],}}, {})
-      self.fail()
-    except AssertionError:
-      pass
-
-  def test_eval_content(self):
-    try:
-      # Intrinsics are not available.
-      isolate_format.eval_content('map(str, [1, 2])')
-      self.fail()
-    except NameError:
-      pass
-
-  def test_load_isolate_as_config_empty(self):
-    expected = {
-      (): {
-        'isolate_dir': FAKE_DIR,
-      },
-    }
-    self.assertEqual(
-        expected,
-        isolate_format.load_isolate_as_config(FAKE_DIR, {}, None).flatten())
-
-  def test_load_isolate_as_config(self):
-    value = {
-      'conditions': [
-        ['OS=="amiga" or OS=="atari" or OS=="coleco" or OS=="dendy"', {
-          'variables': {
-            'files': ['a', 'b', 'touched'],
-          },
-        }],
-        ['OS=="atari"', {
-          'variables': {
-            'files': ['c', 'd', 'touched_a', 'x'],
-            'command': ['echo', 'Hello World'],
-            'read_only': 2,
-          },
-        }],
-        ['OS=="amiga" or OS=="coleco" or OS=="dendy"', {
-          'variables': {
-            'files': ['e', 'f', 'touched_e', 'x'],
-            'command': ['echo', 'You should get an Atari'],
-          },
-        }],
-        ['OS=="amiga"', {
-          'variables': {
-            'files': ['g'],
-            'read_only': 1,
-          },
-        }],
-        ['OS=="amiga" or OS=="atari" or OS=="dendy"', {
-          'variables': {
-            'files': ['h'],
-          },
-        }],
-      ],
-    }
-    expected = {
-      (None,): {
-        'isolate_dir': FAKE_DIR,
-      },
-      ('amiga',): {
-        'files': ['a', 'b', 'e', 'f', 'g', 'h', 'touched', 'touched_e', 'x'],
-        'command': ['echo', 'You should get an Atari'],
-        'isolate_dir': FAKE_DIR,
-        'read_only': 1,
-      },
-      ('atari',): {
-        'files': ['a', 'b', 'c', 'd', 'h', 'touched', 'touched_a', 'x'],
-        'command': ['echo', 'Hello World'],
-        'isolate_dir': FAKE_DIR,
-        'read_only': 2,
-      },
-      ('coleco',): {
-        'files': ['a', 'b', 'e', 'f', 'touched', 'touched_e', 'x'],
-        'command': ['echo', 'You should get an Atari'],
-        'isolate_dir': FAKE_DIR,
-      },
-      ('dendy',): {
-        'files': ['a', 'b', 'e', 'f', 'h', 'touched', 'touched_e', 'x'],
-        'command': ['echo', 'You should get an Atari'],
-        'isolate_dir': FAKE_DIR,
-      },
-    }
-    self.assertEqual(
-        expected, isolate_format.load_isolate_as_config(
-            FAKE_DIR, value, None).flatten())
-
-  def test_load_isolate_as_config_duplicate_command(self):
-    value = {
-      'variables': {
-        'command': ['rm', '-rf', '/'],
-      },
-      'conditions': [
-        ['OS=="atari"', {
-          'variables': {
-            'command': ['echo', 'Hello World'],
-          },
-        }],
-      ],
-    }
-    try:
-      isolate_format.load_isolate_as_config(FAKE_DIR, value, None)
-      self.fail()
-    except AssertionError:
-      pass
-
-  def test_load_isolate_as_config_no_variable(self):
-    value = {
-      'variables': {
-        'command': ['echo', 'You should get an Atari'],
-        'files': ['a', 'b', 'touched'],
-        'read_only': 1,
-      },
-    }
-    # The key is the empty tuple, since there is no variable to bind to.
-    expected = {
-      (): {
-        'command': ['echo', 'You should get an Atari'],
-        'files': ['a', 'b', 'touched'],
-        'isolate_dir': FAKE_DIR,
-        'read_only': 1,
-      },
-    }
-    self.assertEqual(
-        expected, isolate_format.load_isolate_as_config(
-            FAKE_DIR, value, None).flatten())
-
-  def test_merge_two_empty(self):
-    # Flat stay flat. Pylint is confused about union() return type.
-    # pylint: disable=E1103
-    actual = isolate_format.Configs(None, ()).union(
-        isolate_format.load_isolate_as_config(FAKE_DIR, {}, None)).union(
-            isolate_format.load_isolate_as_config(FAKE_DIR, {}, None))
-    expected = {
-      (): {
-        'isolate_dir': FAKE_DIR,
-      },
-    }
-    self.assertEqual(expected, actual.flatten())
-
-  def test_load_two_conditions(self):
-    linux = {
-      'conditions': [
-        ['OS=="linux"', {
-          'variables': {
-            'files': [
-              'file_linux',
-              'file_common',
-            ],
-          },
-        }],
-      ],
-    }
-    mac = {
-      'conditions': [
-        ['OS=="mac"', {
-          'variables': {
-            'files': [
-              'file_mac',
-              'file_common',
-            ],
-          },
-        }],
-      ],
-    }
-    expected = {
-      (None,): {
-        'isolate_dir': FAKE_DIR,
-      },
-      ('linux',): {
-        'files': ['file_common', 'file_linux'],
-        'isolate_dir': FAKE_DIR,
-      },
-      ('mac',): {
-        'files': ['file_common', 'file_mac'],
-        'isolate_dir': FAKE_DIR,
-      },
-    }
-    # Pylint is confused about union() return type.
-    # pylint: disable=E1103
-    configs = isolate_format.Configs(None, ()).union(
-        isolate_format.load_isolate_as_config(FAKE_DIR, linux, None)).union(
-            isolate_format.load_isolate_as_config(FAKE_DIR, mac, None)
-        ).flatten()
-    self.assertEqual(expected, configs)
-
-  def test_load_three_conditions(self):
-    linux = {
-      'conditions': [
-        ['OS=="linux" and chromeos==1', {
-          'variables': {
-            'files': [
-              'file_linux',
-              'file_common',
-            ],
-          },
-        }],
-      ],
-    }
-    mac = {
-      'conditions': [
-        ['OS=="mac" and chromeos==0', {
-          'variables': {
-            'files': [
-              'file_mac',
-              'file_common',
-            ],
-          },
-        }],
-      ],
-    }
-    win = {
-      'conditions': [
-        ['OS=="win" and chromeos==0', {
-          'variables': {
-            'files': [
-              'file_win',
-              'file_common',
-            ],
-          },
-        }],
-      ],
-    }
-    expected = {
-      (None, None): {
-        'isolate_dir': FAKE_DIR,
-      },
-      ('linux', 1): {
-        'files': ['file_common', 'file_linux'],
-        'isolate_dir': FAKE_DIR,
-      },
-      ('mac', 0): {
-        'files': ['file_common', 'file_mac'],
-        'isolate_dir': FAKE_DIR,
-      },
-      ('win', 0): {
-        'files': ['file_common', 'file_win'],
-        'isolate_dir': FAKE_DIR,
-      },
-    }
-    # Pylint is confused about union() return type.
-    # pylint: disable=E1103
-    configs = isolate_format.Configs(None, ()).union(
-        isolate_format.load_isolate_as_config(FAKE_DIR, linux, None)).union(
-            isolate_format.load_isolate_as_config(FAKE_DIR, mac, None)).union(
-                isolate_format.load_isolate_as_config(FAKE_DIR, win, None))
-    self.assertEqual(expected, configs.flatten())
-
-  def test_safe_index(self):
-    self.assertEqual(1, isolate_format._safe_index(('a', 'b'), 'b'))
-    self.assertEqual(None, isolate_format._safe_index(('a', 'b'), 'c'))
-
-  def test_get_map_keys(self):
-    self.assertEqual(
-        (0, None, 1), isolate_format._get_map_keys(('a', 'b', 'c'), ('a', 'c')))
-
-  def test_map_keys(self):
-    self.assertEqual(
-        ('a', None, 'c'),
-        isolate_format._map_keys((0, None, 1), ('a', 'c')))
-
-  def test_load_multi_variables(self):
-    # Load an .isolate with different condition on different variables.
-    data = {
-      'conditions': [
-        ['OS=="abc"', {
-          'variables': {
-            'command': ['bar'],
-          },
-        }],
-        ['CHROMEOS=="1"', {
-          'variables': {
-            'command': ['foo'],
-          },
-        }],
-      ],
-    }
-    configs = isolate_format.load_isolate_as_config(FAKE_DIR, data, None)
-    self.assertEqual(('CHROMEOS', 'OS'), configs.config_variables)
-    flatten = dict((k, v.flatten()) for k, v in configs._by_config.iteritems())
-    expected = {
-      (None, None): {
-        'isolate_dir': FAKE_DIR,
-      },
-      (None, 'abc'): {
-        'command': ['bar'],
-        'isolate_dir': FAKE_DIR,
-      },
-      ('1', None): {
-        'command': ['foo'],
-        'isolate_dir': FAKE_DIR,
-      },
-      # TODO(maruel): It is a conflict.
-      ('1', 'abc'): {
-        'command': ['bar'],
-        'isolate_dir': FAKE_DIR,
-      },
-    }
-    self.assertEqual(expected, flatten)
-
-  def test_union_multi_variables(self):
-    data1 = {
-      'conditions': [
-        ['OS=="abc"', {
-          'variables': {
-            'command': ['bar'],
-          },
-        }],
-      ],
-    }
-    data2 = {
-      'conditions': [
-        ['CHROMEOS=="1"', {
-          'variables': {
-            'command': ['foo'],
-          },
-        }],
-      ],
-    }
-    configs1 = isolate_format.load_isolate_as_config(FAKE_DIR, data1, None)
-    configs2 = isolate_format.load_isolate_as_config(FAKE_DIR, data2, None)
-    configs = configs1.union(configs2)
-    self.assertEqual(('CHROMEOS', 'OS'), configs.config_variables)
-    flatten = dict((k, v.flatten()) for k, v in configs._by_config.iteritems())
-    expected = {
-      (None, None): {
-        'isolate_dir': FAKE_DIR,
-      },
-      (None, 'abc'): {
-        'command': ['bar'],
-        'isolate_dir': FAKE_DIR,
-      },
-      ('1', None): {
-        'command': ['foo'],
-        'isolate_dir': FAKE_DIR,
-      },
-    }
-    self.assertEqual(expected, flatten)
-
-  def test_ConfigSettings_union(self):
-    lhs_values = {}
-    rhs_values = {'files': ['data/', 'test/data/']}
-    lhs = isolate_format.ConfigSettings(lhs_values, '/src/net/third_party/nss')
-    rhs = isolate_format.ConfigSettings(rhs_values, '/src/base')
-    out = lhs.union(rhs)
-    expected = {
-      'files': ['data/', 'test/data/'],
-      'isolate_dir': '/src/base',
-    }
-    self.assertEqual(expected, out.flatten())
-
-  def test_configs_comment(self):
-    # Pylint is confused with isolate_format.union() return type.
-    # pylint: disable=E1103
-    configs = isolate_format.load_isolate_as_config(
-            FAKE_DIR, {}, '# Yo dawg!\n# Chill out.\n').union(
-        isolate_format.load_isolate_as_config(FAKE_DIR, {}, None))
-    self.assertEqual('# Yo dawg!\n# Chill out.\n', configs.file_comment)
-
-    configs = isolate_format.load_isolate_as_config(FAKE_DIR, {}, None).union(
-        isolate_format.load_isolate_as_config(
-            FAKE_DIR, {}, '# Yo dawg!\n# Chill out.\n'))
-    self.assertEqual('# Yo dawg!\n# Chill out.\n', configs.file_comment)
-
-    # Only keep the first one.
-    configs = isolate_format.load_isolate_as_config(
-        FAKE_DIR, {}, '# Yo dawg!\n').union(
-            isolate_format.load_isolate_as_config(
-                FAKE_DIR, {}, '# Chill out.\n'))
-    self.assertEqual('# Yo dawg!\n', configs.file_comment)
-
-  def test_extract_comment(self):
-    self.assertEqual(
-        '# Foo\n# Bar\n', isolate_format.extract_comment('# Foo\n# Bar\n{}'))
-    self.assertEqual('', isolate_format.extract_comment('{}'))
-
-  def _test_pretty_print_impl(self, value, expected):
-    actual = cStringIO.StringIO()
-    isolate_format.pretty_print(value, actual)
-    self.assertEqual(expected.splitlines(), actual.getvalue().splitlines())
-
-  def test_pretty_print_empty(self):
-    self._test_pretty_print_impl({}, '{\n}\n')
-
-  def test_pretty_print_mid_size(self):
-    value = {
-      'variables': {
-        'files': [
-          'file1',
-          'file2',
-        ],
-      },
-      'conditions': [
-        ['OS==\"foo\"', {
-          'variables': {
-            'files': [
-              'dir1/',
-              'dir2/',
-              'file3',
-              'file4',
-            ],
-            'command': ['python', '-c', 'print "H\\i\'"'],
-            'read_only': 2,
-          },
-        }],
-        ['OS==\"bar\"', {
-          'variables': {},
-        }],
-      ],
-    }
-    isolate_format.verify_root(value, {})
-    # This is an .isolate format.
-    expected = (
-        "{\n"
-        "  'variables': {\n"
-        "    'files': [\n"
-        "      'file1',\n"
-        "      'file2',\n"
-        "    ],\n"
-        "  },\n"
-        "  'conditions': [\n"
-        "    ['OS==\"foo\"', {\n"
-        "      'variables': {\n"
-        "        'command': [\n"
-        "          'python',\n"
-        "          '-c',\n"
-        "          'print \"H\\i\'\"',\n"
-        "        ],\n"
-        "        'files': [\n"
-        "          'dir1/',\n"
-        "          'dir2/',\n"
-        "          'file3',\n"
-        "          'file4',\n"
-        "        ],\n"
-        "        'read_only': 2,\n"
-        "      },\n"
-        "    }],\n"
-        "    ['OS==\"bar\"', {\n"
-        "      'variables': {\n"
-        "      },\n"
-        "    }],\n"
-        "  ],\n"
-        "}\n")
-    self._test_pretty_print_impl(value, expected)
-
-  def test_convert_old_to_new_else(self):
-    isolate_with_else_clauses = {
-      'conditions': [
-        ['OS=="mac"', {
-          'variables': {'foo': 'bar'},
-        }, {
-          'variables': {'x': 'y'},
-        }],
-      ],
-    }
-    with self.assertRaises(isolate_format.IsolateError):
-      isolate_format.load_isolate_as_config(
-          FAKE_DIR, isolate_with_else_clauses, None)
-
-  def test_match_configs(self):
-    expectations = [
-        (
-          ('OS=="win"', ('OS',), [('win',), ('mac',), ('linux',)]),
-          [('win',)],
-        ),
-        (
-          (
-            '(foo==1 or foo==2) and bar=="b"',
-            ['foo', 'bar'],
-            [(1, 'a'), (1, 'b'), (2, 'a'), (2, 'b')],
-          ),
-          [(1, 'b'), (2, 'b')],
-        ),
-        (
-          (
-            'bar=="b"',
-            ['foo', 'bar'],
-            [(1, 'a'), (1, 'b'), (2, 'a'), (2, 'b')],
-          ),
-          # TODO(maruel): When a free variable match is found, it should not
-          # list all the bounded values in addition. The problem is when an
-          # intersection of two different bound variables that are tested singly
-          # in two different conditions.
-          [(1, 'b'), (2, 'b'), (None, 'b')],
-        ),
-        (
-          (
-            'foo==1 or bar=="b"',
-            ['foo', 'bar'],
-            [(1, 'a'), (1, 'b'), (2, 'a'), (2, 'b')],
-          ),
-          # TODO(maruel): (None, 'b') would match.
-          # It is hard in this case to realize that each of the variables 'foo'
-          # and 'bar' can be unbounded in a specific case.
-          [(1, 'a'), (1, 'b'), (2, 'b'), (1, None)],
-        ),
-    ]
-    for data, expected in expectations:
-      self.assertEqual(expected, isolate_format.match_configs(*data))
-
-  def test_load_with_globals(self):
-    values = {
-      'variables': {
-        'files': [
-          'file_common',
-        ],
-      },
-      'conditions': [
-        ['OS=="linux"', {
-          'variables': {
-            'files': [
-              'file_linux',
-            ],
-            'read_only': 1,
-          },
-        }],
-        ['OS=="mac" or OS=="win"', {
-          'variables': {
-            'files': [
-              'file_non_linux',
-            ],
-            'read_only': 0,
-          },
-        }],
-      ],
-    }
-    expected = {
-      (None,): {
-        'files': [
-          'file_common',
-        ],
-        'isolate_dir': FAKE_DIR,
-      },
-      ('linux',): {
-        'files': [
-          'file_linux',
-        ],
-        'isolate_dir': FAKE_DIR,
-        'read_only': 1,
-      },
-      ('mac',): {
-        'files': [
-          'file_non_linux',
-        ],
-        'isolate_dir': FAKE_DIR,
-        'read_only': 0,
-      },
-      ('win',): {
-        'files': [
-          'file_non_linux',
-        ],
-        'isolate_dir': FAKE_DIR,
-        'read_only': 0,
-      },
-    }
-    actual = isolate_format.load_isolate_as_config(FAKE_DIR, values, None)
-    self.assertEqual(expected, actual.flatten())
-
-  def test_and_or_bug(self):
-    a = {
-      'conditions': [
-        ['use_x11==0', {
-          'variables': {
-            'command': ['foo', 'x11=0'],
-          },
-        }],
-        ['OS=="linux" and chromeos==0', {
-          'variables': {
-            'command': ['foo', 'linux'],
-            },
-          }],
-        ],
-      }
-
-    def load_included_isolate(isolate_dir, _isolate_path):
-      return isolate_format.load_isolate_as_config(isolate_dir, a, None)
-    self.mock(isolate_format, 'load_included_isolate', load_included_isolate)
-
-    b = {
-      'conditions': [
-        ['use_x11==1', {
-          'variables': {
-            'command': ['foo', 'x11=1'],
-          },
-        }],
-      ],
-      'includes': [
-        'a',
-      ],
-    }
-    variables = {'use_x11': 1, 'OS': 'linux', 'chromeos': 0}
-    config = isolate_format.load_isolate_for_config('/', str(b), variables)
-    self.assertEqual((['foo', 'x11=1'], [], None, '/'), config)
-    variables = {'use_x11': 0, 'OS': 'linux', 'chromeos': 0}
-    config = isolate_format.load_isolate_for_config('/', str(b), variables)
-    self.assertEqual(([], [], None, '/'), config)
-
-
-class IsolateFormatTmpDirTest(unittest.TestCase):
-  def setUp(self):
-    super(IsolateFormatTmpDirTest, self).setUp()
-    self.tempdir = tempfile.mkdtemp(prefix=u'isolate_')
-
-  def tearDown(self):
-    try:
-      file_path.rmtree(self.tempdir)
-    finally:
-      super(IsolateFormatTmpDirTest, self).tearDown()
-
-  def test_load_with_includes(self):
-    included_isolate = {
-      'variables': {
-        'files': [
-          'file_common',
-        ],
-      },
-      'conditions': [
-        ['OS=="linux"', {
-          'variables': {
-            'files': [
-              'file_linux',
-            ],
-            'read_only': 1,
-          },
-        }],
-        ['OS=="mac" or OS=="win"', {
-          'variables': {
-            'files': [
-              'file_non_linux',
-            ],
-            'read_only': 0,
-          },
-        }],
-      ],
-    }
-    with open(os.path.join(self.tempdir, 'included.isolate'), 'wb') as f:
-      isolate_format.pretty_print(included_isolate, f)
-    values = {
-      'includes': ['included.isolate'],
-      'variables': {
-        'files': [
-          'file_less_common',
-        ],
-      },
-      'conditions': [
-        ['OS=="mac"', {
-          'variables': {
-            'files': [
-              'file_mac',
-            ],
-            'read_only': 2,
-          },
-        }],
-      ],
-    }
-    actual = isolate_format.load_isolate_as_config(self.tempdir, values, None)
-
-    expected = {
-      (None,): {
-        'files': [
-          'file_common',
-          'file_less_common',
-        ],
-        'isolate_dir': self.tempdir,
-      },
-      ('linux',): {
-        'files': [
-          'file_linux',
-        ],
-        'isolate_dir': self.tempdir,
-        'read_only': 1,
-      },
-      ('mac',): {
-        'files': [
-          'file_mac',
-          'file_non_linux',
-        ],
-        'isolate_dir': self.tempdir,
-        'read_only': 2,
-      },
-      ('win',): {
-        'files': [
-          'file_non_linux',
-        ],
-        'isolate_dir': self.tempdir,
-        'read_only': 0,
-      },
-    }
-    self.assertEqual(expected, actual.flatten())
-
-  def test_load_with_includes_with_commands(self):
-    # This one is messy. Check that isolate_dir is the expected value. To
-    # achieve this, put the .isolate files into subdirectories.
-    dir_1 = os.path.join(self.tempdir, '1')
-    dir_3 = os.path.join(self.tempdir, '3')
-    dir_3_2 = os.path.join(self.tempdir, '3', '2')
-    os.mkdir(dir_1)
-    os.mkdir(dir_3)
-    os.mkdir(dir_3_2)
-
-    isolate1 = {
-      'conditions': [
-        ['OS=="amiga" or OS=="win"', {
-          'variables': {
-            'command': [
-              'foo', 'amiga_or_win',
-            ],
-          },
-        }],
-        ['OS=="linux"', {
-          'variables': {
-            'command': [
-              'foo', 'linux',
-            ],
-            'files': [
-              'file_linux',
-            ],
-          },
-        }],
-        ['OS=="mac" or OS=="win"', {
-          'variables': {
-            'files': [
-              'file_non_linux',
-            ],
-          },
-        }],
-      ],
-    }
-    isolate2 = {
-      'conditions': [
-        ['OS=="linux" or OS=="mac"', {
-          'variables': {
-            'command': [
-              'foo', 'linux_or_mac',
-            ],
-            'files': [
-              'other/file',
-            ],
-          },
-        }],
-      ],
-    }
-    # Do not define command in isolate3, otherwise commands in the other
-    # included .isolated will be ignored.
-    isolate3 = {
-      'includes': [
-        '../1/isolate1.isolate',
-        '2/isolate2.isolate',
-      ],
-      'conditions': [
-        ['OS=="amiga"', {
-          'variables': {
-            'files': [
-              'file_amiga',
-            ],
-          },
-        }],
-        ['OS=="mac"', {
-          'variables': {
-            'files': [
-              'file_mac',
-            ],
-          },
-        }],
-      ],
-    }
-    # No need to write isolate3.
-    with open(os.path.join(dir_1, 'isolate1.isolate'), 'wb') as f:
-      isolate_format.pretty_print(isolate1, f)
-    with open(os.path.join(dir_3_2, 'isolate2.isolate'), 'wb') as f:
-      isolate_format.pretty_print(isolate2, f)
-
-    # The 'isolate_dir' are important, they are what will be used when
-    # definining the final isolate_dir to use to run the command in the
-    # .isolated file.
-    actual = isolate_format.load_isolate_as_config(dir_3, isolate3, None)
-    expected = {
-      (None,): {
-        # TODO(maruel): See TODO in ConfigSettings.flatten().
-        # TODO(maruel): If kept, in this case dir_3 should be selected.
-        'isolate_dir': dir_1,
-      },
-      ('amiga',): {
-        'command': ['foo', 'amiga_or_win'],
-        'files': [
-          # Note that the file was rebased from isolate1. This is important,
-          # isolate1 represent the canonical root path because it is the one
-          # that defined the command.
-          '../3/file_amiga',
-        ],
-        'isolate_dir': dir_1,
-      },
-      ('linux',): {
-        # Last included takes precedence. *command comes from isolate2*, so
-        # it becomes the canonical root, so reference to file from isolate1 is
-        # via '../../1'.
-        'command': ['foo', 'linux_or_mac'],
-        'files': [
-          '../../1/file_linux',
-          'other/file',
-        ],
-        'isolate_dir': dir_3_2,
-      },
-      ('mac',): {
-        'command': ['foo', 'linux_or_mac'],
-        'files': [
-          '../../1/file_non_linux',
-          '../file_mac',
-          'other/file',
-        ],
-        'isolate_dir': dir_3_2,
-      },
-      ('win',): {
-        # command comes from isolate1.
-        'command': ['foo', 'amiga_or_win'],
-        'files': [
-          # While this may be surprising, this is because the command was
-          # defined in isolate1, not isolate3.
-          'file_non_linux',
-        ],
-        'isolate_dir': dir_1,
-      },
-    }
-    self.assertEqual(expected, actual.flatten())
-
-  def test_load_with_includes_with_commands_and_variables(self):
-    # This one is the pinacle of fun. Check that isolate_dir is the expected
-    # value. To achieve this, put the .isolate files into subdirectories.
-    dir_1 = os.path.join(self.tempdir, '1')
-    dir_3 = os.path.join(self.tempdir, '3')
-    dir_3_2 = os.path.join(self.tempdir, '3', '2')
-    os.mkdir(dir_1)
-    os.mkdir(dir_3)
-    os.mkdir(dir_3_2)
-
-    isolate1 = {
-      'conditions': [
-        ['OS=="amiga" or OS=="win"', {
-          'variables': {
-            'command': [
-              'foo', 'amiga_or_win', '<(PATH)',
-            ],
-          },
-        }],
-        ['OS=="linux"', {
-          'variables': {
-            'command': [
-              'foo', 'linux', '<(PATH)',
-            ],
-            'files': [
-              '<(PATH)/file_linux',
-            ],
-          },
-        }],
-        ['OS=="mac" or OS=="win"', {
-          'variables': {
-            'files': [
-              '<(PATH)/file_non_linux',
-            ],
-          },
-        }],
-      ],
-    }
-    isolate2 = {
-      'conditions': [
-        ['OS=="linux" or OS=="mac"', {
-          'variables': {
-            'command': [
-              'foo', 'linux_or_mac', '<(PATH)',
-            ],
-            'files': [
-              '<(PATH)/other/file',
-            ],
-          },
-        }],
-      ],
-    }
-    # Do not define command in isolate3, otherwise commands in the other
-    # included .isolated will be ignored.
-    isolate3 = {
-      'includes': [
-        '../1/isolate1.isolate',
-        '2/isolate2.isolate',
-      ],
-      'conditions': [
-        ['OS=="amiga"', {
-          'variables': {
-            'files': [
-              '<(PATH)/file_amiga',
-            ],
-          },
-        }],
-        ['OS=="mac"', {
-          'variables': {
-            'files': [
-              '<(PATH)/file_mac',
-            ],
-          },
-        }],
-      ],
-    }
-    # No need to write isolate3.
-    with open(os.path.join(dir_1, 'isolate1.isolate'), 'wb') as f:
-      isolate_format.pretty_print(isolate1, f)
-    with open(os.path.join(dir_3_2, 'isolate2.isolate'), 'wb') as f:
-      isolate_format.pretty_print(isolate2, f)
-
-    # The 'isolate_dir' are important, they are what will be used when
-    # definining the final isolate_dir to use to run the command in the
-    # .isolated file.
-    actual = isolate_format.load_isolate_as_config(dir_3, isolate3, None)
-    expected = {
-      (None,): {
-        'isolate_dir': dir_1,
-      },
-      ('amiga',): {
-        'command': ['foo', 'amiga_or_win', '<(PATH)'],
-        'files': [
-          '<(PATH)/file_amiga',
-        ],
-        'isolate_dir': dir_1,
-      },
-      ('linux',): {
-        # Last included takes precedence. *command comes from isolate2*, so
-        # it becomes the canonical root, so reference to file from isolate1 is
-        # via '../../1'.
-        'command': ['foo', 'linux_or_mac', '<(PATH)'],
-        'files': [
-          '<(PATH)/file_linux',
-          '<(PATH)/other/file',
-        ],
-        'isolate_dir': dir_3_2,
-      },
-      ('mac',): {
-        'command': ['foo', 'linux_or_mac', '<(PATH)'],
-        'files': [
-          '<(PATH)/file_mac',
-          '<(PATH)/file_non_linux',
-          '<(PATH)/other/file',
-        ],
-        'isolate_dir': dir_3_2,
-      },
-      ('win',): {
-        # command comes from isolate1.
-        'command': ['foo', 'amiga_or_win', '<(PATH)'],
-        'files': [
-          '<(PATH)/file_non_linux',
-        ],
-        'isolate_dir': dir_1,
-      },
-    }
-    self.assertEqual(expected, actual.flatten())
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR,
-      format='%(levelname)5s %(filename)15s(%(lineno)3d): %(message)s')
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  unittest.main()
diff --git a/tools/swarming_client/tests/isolate_smoke_test.py b/tools/swarming_client/tests/isolate_smoke_test.py
deleted file mode 100755
index ecbb297..0000000
--- a/tools/swarming_client/tests/isolate_smoke_test.py
+++ /dev/null
@@ -1,1074 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import cStringIO
-import hashlib
-import json
-import logging
-import os
-import re
-import stat
-import subprocess
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-import isolate
-import isolated_format
-from depot_tools import fix_encoding
-from utils import file_path
-
-import test_utils
-
-
-ALGO = hashlib.sha1
-HASH_NULL = ALGO().hexdigest()
-
-
-# These are per test case, not per mode.
-RELATIVE_CWD = {
-  'all_items_invalid': '.',
-  'fail': '.',
-  'missing_trailing_slash': '.',
-  'no_run': '.',
-  'non_existent': '.',
-  'split': '.',
-  'symlink_full': '.',
-  'symlink_partial': '.',
-  'symlink_outside_build_root': '.',
-  'touch_only': '.',
-  'touch_root': os.path.join('tests', 'isolate'),
-  'with_flag': '.',
-}
-
-DEPENDENCIES = {
-  'all_items_invalid': (
-    {
-      'tests/isolate/all_items_invalid.isolate':
-        """{
-          'variables': {
-            'command': ['python', 'empty.py'],
-            'files': [
-              # A single valid file so the command is valid and exits without
-              # an error.
-              'empty.py',
-              # File doesn't exist.
-              'A_file_that_does_not_exist',
-              # Directory missing trailing slash.
-              'files1',
-              # File doesn't exist.
-              'A_file_that_does_not_exist_either',
-            ],
-          },
-        }""",
-      'tests/isolate/empty.py': 'import sys; sys.exit(0)',
-    },
-    ['empty.py'],
-  ),
-  'fail': (
-    {
-      'tests/isolate/fail.isolate':
-        """{
-        'conditions': [
-          ['(OS=="linux" and chromeos==1) or '
-           '((OS=="mac" or OS=="win") and chromeos==0)', {
-            'variables': {
-              'command': ['python', 'fail.py'],
-              'files': ['fail.py'],
-            },
-          }],
-        ],
-      }""",
-      'tests/isolate/fail.py': 'import sys\nprint(\'Failing\')\nsys.exit(1)',
-    },
-    ['fail.py'],
-  ),
-  'missing_trailing_slash': (
-    {
-      # Directory missing trailing slash.
-      'tests/isolate/missing_trailing_slash.isolate':
-        "{'variables': {'files': ['files1']}}",
-      'tests/isolate/files1/foo': 'bar',
-    },
-    [],
-  ),
-  'no_run': (
-    {
-      'tests/isolate/files1/subdir/42.txt':
-          'the answer to life the universe and everything\n',
-      'tests/isolate/files1/test_file1.txt': 'Foo\n',
-      'tests/isolate/files1/test_file2.txt': 'Bar\n',
-      'tests/isolate/no_run.isolate':
-        """{
-            # Includes itself.
-          'variables': {'files': ['no_run.isolate', 'files1/']},
-        }""",
-    },
-    [
-      'no_run.isolate',
-      os.path.join('files1', 'subdir', '42.txt'),
-      os.path.join('files1', 'test_file1.txt'),
-      os.path.join('files1', 'test_file2.txt'),
-    ],
-  ),
-  'non_existent': (
-    {
-      'tests/isolate/non_existent.isolate':
-        "{'variables': {'files': ['A_file_that_do_not_exist']}}",
-    },
-    [],
-  ),
-  'split': (
-    {
-      'tests/isolate/files1/subdir/42.txt':
-          'the answer to life the universe and everything',
-      'tests/isolate/split.isolate':
-        """{
-          'variables': {
-            'command': ['python', 'split.py'],
-            'files': [
-              '<(DEPTH)/split.py',
-              '<(PRODUCT_DIR)/subdir/42.txt',
-              'test/data/foo.txt',
-            ],
-          },
-        }""",
-      'tests/isolate/split.py': "import sys; sys.exit(1)",
-      'tests/isolate/test/data/foo.txt': 'Split',
-    },
-    [
-      os.path.join('files1', 'subdir', '42.txt'),
-      os.path.join('test', 'data', 'foo.txt'),
-      'split.py',
-    ],
-  ),
-  'symlink_full': (
-    {
-      'tests/isolate/files1/subdir/42.txt':
-          'the answer to life the universe and everything\n',
-      'tests/isolate/files1/test_file1.txt': 'Foo\n',
-      'tests/isolate/files1/test_file2.txt': 'Bar\n',
-      'tests/isolate/files2': test_utils.SymLink('files1'),
-      'tests/isolate/symlink_full.isolate':
-        """{
-          'conditions': [
-            ['(OS=="linux" and chromeos==1) or ((OS=="mac" or OS=="win") and '
-             'chromeos==0)', {
-              'variables': {
-                'command': ['python', 'symlink_full.py'],
-                'files': ['files2/', 'symlink_full.py'],
-              },
-            }],
-          ],
-        }""",
-      'tests/isolate/symlink_full.py':
-        """if __name__ == '__main__':
-        import os, sys
-        print('symlink: touches files2/')
-        assert len(sys.argv) == 1
-        expected = {
-          os.path.join('subdir', '42.txt'):
-              'the answer to life the universe and everything\\n',
-          'test_file1.txt': 'Foo\\n',
-          'test_file2.txt': 'Bar\\n',
-        }
-        root = 'files2'
-        actual = {}
-        for relroot, dirnames, filenames in os.walk(root):
-          for filename in filenames:
-            fullpath = os.path.join(relroot, filename)
-            actual[fullpath[len(root)+1:]] = open(fullpath, 'rb').read()
-          if '.svn' in dirnames:
-            dirnames.remove('.svn')
-        if actual != expected:
-          print('Failure')
-          print(actual)
-          print(expected)
-          sys.exit(1)
-        """,
-    },
-    [
-      os.path.join('files1', 'subdir', '42.txt'),
-      os.path.join('files1', 'test_file1.txt'),
-      os.path.join('files1', 'test_file2.txt'),
-      # files2 is a symlink to files1.
-      'files2',
-      'symlink_full.py',
-    ],
-  ),
-  'symlink_partial': (
-    {
-      'tests/isolate/files1/subdir/42.txt':
-          'the answer to life the universe and everything\n',
-      'tests/isolate/files1/test_file1.txt': 'Foo\n',
-      'tests/isolate/files1/test_file2.txt': 'Bar\n',
-      'tests/isolate/files2': test_utils.SymLink('files1'),
-      'tests/isolate/symlink_partial.isolate':
-        """{
-          'conditions': [
-            ['(OS=="linux" and chromeos==1) or ((OS=="mac" or OS=="win") and '
-             'chromeos==0)', {
-              'variables': {
-                'command': ['python', 'symlink_partial.py'],
-                'files': ['files2/test_file2.txt', 'symlink_partial.py'],
-              },
-            }],
-          ],
-        }""",
-      'tests/isolate/symlink_partial.py':
-        """if __name__ == '__main__':
-        import os, sys
-        print('symlink: touches files2/test_file2.txt')
-        assert len(sys.argv) == 1
-        with open(os.path.join('files2', 'test_file2.txt'), 'rb') as f:
-          if 'Bar\\n' != f.read():
-            print('Failed')
-            sys.exit(1)
-        """,
-    },
-    [
-      os.path.join('files1', 'test_file2.txt'),
-      # files2 is a symlink to files1.
-      'files2',
-      'symlink_partial.py',
-    ],
-  ),
-  'symlink_outside_build_root': (
-    {
-      'tests/directory_outside_build_root/test_file3.txt': 'asdf\n',
-      'tests/isolate/link_outside_build_root':
-          test_utils.SymLink('../directory_outside_build_root'),
-      'tests/isolate/symlink_outside_build_root.isolate':
-        """{
-          'conditions': [
-            ['(OS=="linux" and chromeos==1) or ((OS=="mac" or OS=="win") and '
-             'chromeos==0)', {
-              'variables': {
-                'command': ['python', 'symlink_outside_build_root.py'],
-                'files': [
-                  'link_outside_build_root/',
-                  'symlink_outside_build_root.py',
-                ],
-              },
-            }],
-          ],
-        }""",
-      'tests/isolate/symlink_outside_build_root.py':
-        """if __name__ == '__main__':
-        import os, sys
-        print('symlink: touches link_outside_build_root/')
-        assert len(sys.argv) == 1
-        p = os.path.join('link_outside_build_root', 'test_file3.txt')
-        with open(p, 'rb') as f:
-          if 'asdf\\n' != f.read():
-            print('Failed')
-            sys.exit(1)
-        """,
-    },
-    [
-      os.path.join('link_outside_build_root', 'test_file3.txt'),
-      'symlink_outside_build_root.py',
-    ],
-  ),
-  'touch_only': (
-    {
-    },
-    [
-      'touch_only.py',
-      os.path.join('files1', 'test_file1.txt'),
-    ],
-  ),
-  'touch_root': (
-    {
-      'tests/isolate/touch_root.isolate':
-        """{
-          'conditions': [
-            ['(OS=="linux" and chromeos==1) or ((OS=="mac" or OS=="win") and '
-             'chromeos==0)', {
-              'variables': {
-                'command': ['python', 'touch_root.py'],
-                'files': ['../../at_root', 'touch_root.py'],
-              },
-            }],
-          ],
-        }""",
-      'tests/isolate/touch_root.py':
-        """if __name__ == '__main__':
-        import os, sys
-        print('child_touch_root: Verify the relative directories')
-        root_dir = os.path.dirname(os.path.abspath(
-            __file__.decode(sys.getfilesystemencoding())))
-        parent_dir, base = os.path.split(root_dir)
-        parent_dir, base2 = os.path.split(parent_dir)
-        if base != 'isolate' or base2 != 'tests':
-          print('Invalid root dir %s' % root_dir)
-          sys.exit(4)
-        content = open(os.path.join(parent_dir, 'at_root'), 'r').read()
-        sys.exit(int(content != 'foo'))""",
-      'at_root': 'foo',
-    },
-    [
-      os.path.join('tests', 'isolate', 'touch_root.py'),
-      'at_root',
-    ],
-  ),
-  'with_flag': (
-    {
-      'tests/isolate/files1/subdir/42.txt':
-          'the answer to life the universe and everything\n',
-      'tests/isolate/files1/test_file1.txt': 'Foo\n',
-      'tests/isolate/files1/test_file2.txt': 'Bar\n',
-      'tests/isolate/with_flag.isolate':
-        """{
-        'conditions': [
-          ['(OS=="linux" and chromeos==1) or ((OS=="mac" or OS=="win") and '
-           'chromeos==0)', {
-            'variables': {
-              'command': ['python', 'with_flag.py', '<(FLAG)'],
-              'files': ['files1/', 'with_flag.py'],
-            },
-          }],
-        ],
-      }""",
-      'tests/isolate/with_flag.py':
-        """if __name__ == '__main__':
-        import os, sys
-        print('with_flag: Verify the test data files were mapped properly')
-        assert len(sys.argv) == 2
-        mode = sys.argv[1]
-        assert mode in ('run', 'trace')
-        expected = {
-          os.path.join('subdir', '42.txt'):
-              'the answer to life the universe and everything\\n',
-          'test_file1.txt': 'Foo\\n',
-          'test_file2.txt': 'Bar\\n',
-        }
-        root = 'files1'
-        actual = {}
-        for relroot, dirnames, filenames in os.walk(root):
-          for filename in filenames:
-            fullpath = os.path.join(relroot, filename)
-            actual[fullpath[len(root)+1:]] = open(fullpath, 'r').read()
-          if mode == 'trace' and '.svn' in dirnames:
-            dirnames.remove('.svn')
-        if actual != expected:
-          print('Failure')
-          print(actual)
-          print(expected)
-          sys.exit(1)
-        root_dir = os.path.dirname(os.path.abspath(
-            __file__.decode(sys.getfilesystemencoding())))
-        parent_dir, base = os.path.split(root_dir)
-        if mode == 'trace':
-          # Verify the parent directory.
-          parent_dir, base2 = os.path.split(parent_dir)
-          if base != 'isolate' or base2 != 'tests':
-            print('mode trace: Invalid root dir %s' % root_dir)
-            sys.exit(4)
-        else:
-          # Verify that we are not inside a checkout.
-          if base == 'tests':
-            print('mode run: Invalid root dir %s' % root_dir)
-            sys.exit(5)
-        """,
-    },
-    [
-      'with_flag.py',
-      os.path.join('files1', 'subdir', '42.txt'),
-      os.path.join('files1', 'test_file1.txt'),
-      os.path.join('files1', 'test_file2.txt'),
-    ],
-  ),
-}
-
-
-SIMPLE_ISOLATE = {
-  'simple.isolate':
-    """{
-      'variables': {
-        'command': ['python', 'simple.py'],
-        'files': ['simple.py'],
-      },
-    }""",
-  'simple.py':
-    """if __name__ == '__main__':
-    import os, sys
-    actual = set(os.listdir('.'))
-    expected = set(['simple.py'])
-    if expected != actual:
-      print('Unexpected files: %s' % ', '.join(sorted(actual- expected)))
-      sys.exit(1)
-    print('Simply works.')
-    """,
-}
-
-
-class CalledProcessError(subprocess.CalledProcessError):
-  """Adds stderr data."""
-  def __init__(self, returncode, cmd, output, stderr, cwd):
-    super(CalledProcessError, self).__init__(returncode, cmd, output)
-    self.stderr = stderr
-    self.cwd = cwd
-
-  def __str__(self):
-    return super(CalledProcessError, self).__str__() + (
-        '\n'
-        'cwd=%s\n%s\n%s\n%s') % (
-            self.cwd,
-            self.output,
-            self.stderr,
-            ' '.join(self.cmd))
-
-
-def list_files_tree(directory):
-  """Returns the list of all the files in a tree."""
-  actual = []
-  for root, dirnames, filenames in os.walk(directory):
-    actual.extend(os.path.join(root, f)[len(directory)+1:] for f in filenames)
-    for dirname in dirnames:
-      full = os.path.join(root, dirname)
-      # Manually include symlinks.
-      if os.path.islink(full):
-        actual.append(full[len(directory)+1:])
-  return sorted(actual)
-
-
-def _isolate_dict_to_string(values):
-  buf = cStringIO.StringIO()
-  isolate.isolate_format.pretty_print(values, buf)
-  return buf.getvalue()
-
-
-def _wrap_in_condition(variables):
-  """Wraps a variables dict inside the current OS condition.
-
-  Returns the equivalent string.
-  """
-  return _isolate_dict_to_string(
-      {
-        'conditions': [
-          ['OS=="mac" and chromeos==0', {
-            'variables': variables
-          }],
-        ],
-      })
-
-
-def _fix_file_mode(filename, read_only):
-  """4 modes are supported, 0700 (rwx), 0600 (rw), 0500 (rx), 0400 (r)."""
-  min_mode = 0400
-  if not read_only:
-    min_mode |= 0200
-  return (min_mode | 0100) if filename.endswith('.py') else min_mode
-
-
-class Isolate(unittest.TestCase):
-  def test_help_modes(self):
-    # Check coherency in the help and implemented modes.
-    p = subprocess.Popen(
-        [sys.executable, os.path.join(ROOT_DIR, 'isolate.py'), '--help'],
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        cwd=ROOT_DIR)
-    out = p.communicate()[0].splitlines()
-    self.assertEqual(0, p.returncode)
-    out = out[out.index('Commands are:') + 1:]
-    out = out[:out.index('')]
-    regexp = '^  (?:\x1b\\[\\d\\dm|)(\\w+)\s*(:?\x1b\\[\\d\\dm|) .+'
-    modes = [re.match(regexp, l) for l in out]
-    modes = [m.group(1) for m in modes if m]
-    EXPECTED_MODES = (
-        'archive',
-        'batcharchive',
-        'check',
-        'help',
-        'remap',
-        'run',
-    )
-    # If a new command is added it should at least has a bare test.
-    self.assertEqual(sorted(EXPECTED_MODES), sorted(modes))
-
-
-class IsolateTempdirBase(unittest.TestCase):
-  def setUp(self):
-    super(IsolateTempdirBase, self).setUp()
-    self.tempdir = file_path.get_native_path_case(
-        unicode(tempfile.mkdtemp(prefix=u'isolate_smoke_')))
-    self.isolated = os.path.join(self.tempdir, 'isolate_smoke_test.isolated')
-    self.isolate_dir = os.path.join(self.tempdir, 'isolate')
-
-  def tearDown(self):
-    try:
-      logging.debug(self.tempdir)
-      file_path.rmtree(self.tempdir)
-    finally:
-      super(IsolateTempdirBase, self).tearDown()
-
-  def make_tree(self, case=None):
-    case = case or self.case()
-    if not case:
-      return
-    test_utils.make_tree(self.isolate_dir, DEPENDENCIES[case][0])
-
-  def _gen_files(self, read_only, empty_file, with_time):
-    """Returns a dict of files like calling isolate.files_to_metadata() on each
-    file.
-
-    Arguments:
-    - read_only: Mark all the 'm' modes without the writeable bit.
-    - empty_file: Add a specific empty file (size 0).
-    - with_time: Include 't' timestamps. For saved state .state files.
-    """
-    root_dir = self.isolate_dir
-    if RELATIVE_CWD[self.case()] == '.':
-      root_dir = os.path.join(root_dir, 'tests', 'isolate')
-
-    files = {unicode(f): {} for f in DEPENDENCIES[self.case()][1]}
-    for relfile, v in files.iteritems():
-      filepath = os.path.join(root_dir, relfile)
-      filestats = os.lstat(filepath)
-      is_link = stat.S_ISLNK(filestats.st_mode)
-      if not is_link:
-        v[u's'] = int(filestats.st_size)
-        if sys.platform != 'win32':
-          v[u'm'] = _fix_file_mode(relfile, read_only)
-      if with_time:
-        # Used to skip recalculating the hash. Use the most recent update
-        # time.
-        v[u't'] = int(round(filestats.st_mtime))
-      if is_link:
-        v[u'l'] = os.readlink(filepath)  # pylint: disable=E1101
-      else:
-        # Upgrade the value to unicode so diffing the structure in case of
-        # test failure is easier, since the basestring type must match,
-        # str!=unicode.
-        v[u'h'] = unicode(isolated_format.hash_file(filepath, ALGO))
-
-    if empty_file:
-      item = files[empty_file]
-      item['h'] = unicode(HASH_NULL)
-      if sys.platform != 'win32':
-        item['m'] = 0400
-      item['s'] = 0
-      if with_time:
-        item.pop('t', None)
-    return files
-
-  def _expected_isolated(self, args, read_only, empty_file):
-    """Verifies self.isolated contains the expected data."""
-    expected = {
-      u'algo': u'sha-1',
-      u'files': self._gen_files(read_only, empty_file, False),
-      u'read_only': 1,
-      u'relative_cwd': unicode(RELATIVE_CWD[self.case()]),
-      u'version': unicode(isolated_format.ISOLATED_FILE_VERSION),
-    }
-    if read_only is not None:
-      expected[u'read_only'] = read_only
-    if args:
-      expected[u'command'] = [u'python'] + [unicode(x) for x in args]
-    with open(self.isolated, 'r') as f:
-      self.assertEqual(expected, json.load(f))
-
-  def _expected_saved_state(
-      self, args, read_only, empty_file, extra_vars, root_dir):
-    expected = {
-      u'OS': unicode(sys.platform),
-      u'algo': u'sha-1',
-      u'child_isolated_files': [],
-      u'command': [],
-      u'config_variables': {
-        u'OS': u'mac',
-        u'chromeos': 0,
-      },
-      u'extra_variables': {
-        u'EXECUTABLE_SUFFIX': u'.exe' if sys.platform == 'win32' else u'',
-      },
-      u'files': self._gen_files(read_only, empty_file, True),
-      u'isolate_file': file_path.safe_relpath(
-          file_path.get_native_path_case(unicode(self.filename())),
-          unicode(os.path.dirname(self.isolated))),
-      u'path_variables': {},
-      u'relative_cwd': unicode(RELATIVE_CWD[self.case()]),
-      u'root_dir': unicode(root_dir or os.path.dirname(self.filename())),
-      u'version': unicode(isolate.SavedState.EXPECTED_VERSION),
-    }
-    if args:
-      expected[u'command'] = [u'python'] + [unicode(x) for x in args]
-    expected['extra_variables'].update(extra_vars or {})
-    with open(self.saved_state(), 'r') as f:
-      self.assertEqual(expected, json.load(f))
-
-  def _expect_results(
-      self, args, read_only, extra_vars, empty_file, root_dir=None):
-    self._expected_isolated(args, read_only, empty_file)
-    self._expected_saved_state(
-        args, read_only, empty_file, extra_vars, root_dir)
-    # Also verifies run_isolated.py will be able to read it.
-    with open(self.isolated, 'rb') as f:
-      isolated_format.load_isolated(f.read(), ALGO)
-
-  def _expect_no_result(self):
-    self.assertFalse(os.path.exists(self.isolated))
-
-  def _get_cmd(self, mode):
-    return [
-      sys.executable, os.path.join(ROOT_DIR, 'isolate.py'),
-      mode,
-      '--isolated', self.isolated,
-      '--isolate', self.filename(),
-      '--config-variable', 'OS', 'mac',
-      '--config-variable', 'chromeos', '0',
-    ]
-
-  def _execute(self, mode, case, args, cwd=ROOT_DIR):
-    """Executes isolate.py."""
-    self.assertEqual(
-        case,
-        self.case() + '.isolate',
-        'Rename the test case to test_%s()' % case)
-    cmd = self._get_cmd(mode)
-    cmd.extend(args)
-
-    env = os.environ.copy()
-    if 'ISOLATE_DEBUG' in env:
-      del env['ISOLATE_DEBUG']
-    logging.debug(cmd)
-    p = subprocess.Popen(
-        cmd,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        cwd=cwd,
-        env=env,
-        universal_newlines=True)
-    out, err = p.communicate()
-    if p.returncode:
-      raise CalledProcessError(p.returncode, cmd, out, err, cwd)
-
-    # Do not check on Windows since a lot of spew is generated there.
-    if sys.platform != 'win32':
-      self.assertTrue(err in (None, ''), err)
-    return out
-
-  def case(self):
-    """Returns the filename corresponding to this test case."""
-    test_id = self.id().split('.')
-    return re.match('^test_([a-z_]+)$', test_id[2]).group(1)
-
-  def filename(self):
-    """Returns the filename corresponding to this test case."""
-    filename = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', self.case() + '.isolate')
-    self.assertTrue(os.path.isfile(filename), filename)
-    return filename
-
-  def saved_state(self):
-    return isolate.isolatedfile_to_state(self.isolated)
-
-  def _test_all_items_invalid(self, mode):
-    out = self._execute(
-        mode, 'all_items_invalid.isolate', ['--ignore_broken_item'])
-    self._expect_results(['empty.py'], None, None, None)
-
-    return out or ''
-
-  def _test_missing_trailing_slash(self, mode):
-    try:
-      self._execute(mode, 'missing_trailing_slash.isolate', [])
-      self.fail()
-    except subprocess.CalledProcessError as e:
-      self.assertEqual('', e.output)
-      out = e.stderr
-    self._expect_no_result()
-    root = file_path.get_native_path_case(unicode(self.isolate_dir))
-    expected = (
-      'Input directory %s must have a trailing slash' %
-          os.path.join(root, 'tests', 'isolate', 'files1')
-    )
-    self.assertIn(expected, out)
-
-  def _test_non_existent(self, mode):
-    try:
-      self._execute(mode, 'non_existent.isolate', [])
-      self.fail()
-    except subprocess.CalledProcessError as e:
-      self.assertEqual('', e.output)
-      out = e.stderr
-    self._expect_no_result()
-    root = file_path.get_native_path_case(unicode(self.isolate_dir))
-    expected = (
-      'Input file %s doesn\'t exist' %
-          os.path.join(root, 'tests', 'isolate', 'A_file_that_do_not_exist')
-    )
-    self.assertIn(expected, out)
-
-
-class IsolateOutdir(IsolateTempdirBase):
-  def setUp(self):
-    super(IsolateOutdir, self).setUp()
-    # The tests assume the current directory is the file's directory.
-    os.mkdir(self.isolate_dir, 0700)
-    self.old_cwd = os.getcwd()
-    os.chdir(self.isolate_dir)
-    self.outdir = os.path.join(self.tempdir, 'isolated')
-
-  def tearDown(self):
-    os.chdir(self.old_cwd)
-    super(IsolateOutdir, self).tearDown()
-
-  def _expect_no_tree(self):
-    # No outdir was created.
-    self.assertFalse(os.path.exists(self.outdir))
-
-  def _result_tree(self):
-    return list_files_tree(self.outdir)
-
-  def _expected_tree(self):
-    """Verifies the files written in the temporary directory."""
-    self.assertEqual(
-        sorted(f for f in DEPENDENCIES[self.case()][1]), self._result_tree())
-
-  def _get_cmd(self, mode):
-    """Adds --outdir for the commands supporting it."""
-    cmd = super(IsolateOutdir, self)._get_cmd(mode)
-    cmd.extend(('--outdir', self.outdir))
-    return cmd
-
-  def _test_missing_trailing_slash(self, mode):
-    super(IsolateOutdir, self)._test_missing_trailing_slash(mode)
-    self._expect_no_tree()
-
-  def _test_non_existent(self, mode):
-    super(IsolateOutdir, self)._test_non_existent(mode)
-    self._expect_no_tree()
-
-
-class Isolate_check(IsolateTempdirBase):
-  def setUp(self):
-    super(Isolate_check, self).setUp()
-    self.make_tree()
-
-  def test_fail(self):
-    self._execute('check', 'fail.isolate', [])
-    self._expect_results(['fail.py'], None, None, None)
-
-  def test_missing_trailing_slash(self):
-    self._test_missing_trailing_slash('check')
-
-  def test_non_existent(self):
-    self._test_non_existent('check')
-
-  def test_all_items_invalid(self):
-    out = self._test_all_items_invalid('check')
-    self.assertEqual('', out)
-
-  def test_no_run(self):
-    self._execute('check', 'no_run.isolate', [])
-    self._expect_results([], None, None, None)
-
-  # TODO(csharp): Disabled until crbug.com/150823 is fixed.
-  def do_not_test_touch_only(self):
-    self._execute(
-        'check', 'touch_only.isolate', ['--extra-variable', 'FLAG', 'gyp'])
-    empty = os.path.join('files1', 'test_file1.txt')
-    self._expected_isolated(['touch_only.py', 'gyp'], None, empty)
-
-  def test_touch_root(self):
-    self._execute('check', 'touch_root.isolate', [])
-    self._expect_results(['touch_root.py'], None, None, None, self.isolate_dir)
-
-  def test_with_flag(self):
-    self._execute(
-        'check', 'with_flag.isolate', ['--extra-variable', 'FLAG', 'gyp'])
-    self._expect_results(
-        ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'}, None)
-
-  if sys.platform != 'win32':
-    def test_symlink_full(self):
-      self._execute('check', 'symlink_full.isolate', [])
-      self._expect_results(['symlink_full.py'], None, None, None)
-
-    def test_symlink_partial(self):
-      self._execute('check', 'symlink_partial.isolate', [])
-      self._expect_results(['symlink_partial.py'], None, None, None)
-
-    def test_symlink_outside_build_root(self):
-      self._execute('check', 'symlink_outside_build_root.isolate', [])
-      self._expect_results(['symlink_outside_build_root.py'], None, None, None)
-
-
-class Isolate_remap(IsolateOutdir):
-  def setUp(self):
-    super(Isolate_remap, self).setUp()
-    self.make_tree()
-
-  def test_fail(self):
-    self._execute('remap', 'fail.isolate', [])
-    self._expected_tree()
-    self._expect_results(['fail.py'], None, None, None)
-
-  def test_missing_trailing_slash(self):
-    self._test_missing_trailing_slash('remap')
-
-  def test_non_existent(self):
-    self._test_non_existent('remap')
-
-  def test_all_items_invalid(self):
-    out = self._test_all_items_invalid('remap')
-    self.assertTrue(out.startswith('Remapping'))
-    self._expected_tree()
-
-  def test_no_run(self):
-    self._execute('remap', 'no_run.isolate', [])
-    self._expected_tree()
-    self._expect_results([], None, None, None)
-
-  # TODO(csharp): Disabled until crbug.com/150823 is fixed.
-  def do_not_test_touch_only(self):
-    self._execute(
-        'remap', 'touch_only.isolate', ['--extra-variable', 'FLAG', 'gyp'])
-    self._expected_tree()
-    empty = os.path.join('files1', 'test_file1.txt')
-    self._expect_results(
-        ['touch_only.py', 'gyp'], None, {u'FLAG': u'gyp'}, empty)
-
-  def test_touch_root(self):
-    self._execute('remap', 'touch_root.isolate', [])
-    self._expected_tree()
-    self._expect_results(['touch_root.py'], None, None, None, self.isolate_dir)
-
-  def test_with_flag(self):
-    self._execute(
-        'remap', 'with_flag.isolate', ['--extra-variable', 'FLAG', 'gyp'])
-    self._expected_tree()
-    self._expect_results(
-        ['with_flag.py', 'gyp'], None, {u'FLAG': u'gyp'}, None)
-
-  if sys.platform != 'win32':
-    def test_symlink_full(self):
-      self._execute('remap', 'symlink_full.isolate', [])
-      self._expected_tree()
-      self._expect_results(['symlink_full.py'], None, None, None)
-
-    def test_symlink_partial(self):
-      self._execute('remap', 'symlink_partial.isolate', [])
-      self._expected_tree()
-      self._expect_results(['symlink_partial.py'], None, None, None)
-
-    def test_symlink_outside_build_root(self):
-      self._execute('remap', 'symlink_outside_build_root.isolate', [])
-      self._expected_tree()
-      self._expect_results(['symlink_outside_build_root.py'], None, None, None)
-
-
-class Isolate_run(IsolateTempdirBase):
-  def setUp(self):
-    super(Isolate_run, self).setUp()
-    self.make_tree()
-
-  def test_fail(self):
-    try:
-      self._execute('run', 'fail.isolate', [])
-      self.fail()
-    except subprocess.CalledProcessError:
-      pass
-    self._expect_results(['fail.py'], None, None, None)
-
-  def test_missing_trailing_slash(self):
-    self._test_missing_trailing_slash('run')
-
-  def test_non_existent(self):
-    self._test_non_existent('run')
-
-  def test_all_items_invalid(self):
-    out = self._test_all_items_invalid('run')
-    self.assertEqual('', out)
-
-  def test_no_run(self):
-    try:
-      self._execute('run', 'no_run.isolate', [])
-      self.fail()
-    except subprocess.CalledProcessError:
-      pass
-    self._expect_no_result()
-
-  # TODO(csharp): Disabled until crbug.com/150823 is fixed.
-  def do_not_test_touch_only(self):
-    self._execute(
-        'run', 'touch_only.isolate', ['--extra-variable', 'FLAG', 'run'])
-    empty = os.path.join('files1', 'test_file1.txt')
-    self._expect_results(
-        ['touch_only.py', 'run'], None, {u'FLAG': u'run'}, empty)
-
-  def test_touch_root(self):
-    self._execute('run', 'touch_root.isolate', [])
-    self._expect_results(['touch_root.py'], None, None, None, self.isolate_dir)
-
-  def test_with_flag(self):
-    self._execute(
-        'run', 'with_flag.isolate', ['--extra-variable', 'FLAG', 'run'])
-    self._expect_results(
-        ['with_flag.py', 'run'], None, {u'FLAG': u'run'}, None)
-
-  if sys.platform != 'win32':
-    def test_symlink_full(self):
-      self._execute('run', 'symlink_full.isolate', [])
-      self._expect_results(['symlink_full.py'], None, None, None)
-
-    def test_symlink_partial(self):
-      self._execute('run', 'symlink_partial.isolate', [])
-      self._expect_results(['symlink_partial.py'], None, None, None)
-
-    def test_symlink_outside_build_root(self):
-      self._execute('run', 'symlink_outside_build_root.isolate', [])
-      self._expect_results(['symlink_outside_build_root.py'], None, None, None)
-
-
-class IsolateNoOutdir(IsolateTempdirBase):
-  # Test without the --outdir flag.
-  # So all the files are first copied in the tempdir and the test is run from
-  # there.
-  def setUp(self):
-    super(IsolateNoOutdir, self).setUp()
-    self.make_tree('touch_root')
-
-  def _execute(self, mode, args):  # pylint: disable=W0221
-    """Executes isolate.py."""
-    cmd = [
-      sys.executable, os.path.join(ROOT_DIR, 'isolate.py'),
-      mode,
-      '--isolated', self.isolated,
-      '--config-variable', 'OS', 'mac',
-      '--config-variable', 'chromeos', '0',
-    ]
-    cmd.extend(args)
-
-    env = os.environ.copy()
-    if 'ISOLATE_DEBUG' in env:
-      del env['ISOLATE_DEBUG']
-    logging.debug(cmd)
-    cwd = self.tempdir
-    p = subprocess.Popen(
-        cmd,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        cwd=cwd,
-        env=env,
-        universal_newlines=True)
-    out, err = p.communicate()
-    if p.returncode:
-      raise CalledProcessError(p.returncode, cmd, out, err, cwd)
-    return out
-
-  def mode(self):
-    """Returns the execution mode corresponding to this test case."""
-    test_id = self.id().split('.')
-    self.assertEqual(3, len(test_id))
-    self.assertEqual('__main__', test_id[0])
-    return re.match('^test_([a-z]+)$', test_id[2]).group(1)
-
-  def filename(self):
-    """Returns the filename corresponding to this test case."""
-    filename = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', 'touch_root.isolate')
-    self.assertTrue(os.path.isfile(filename), filename)
-    return filename
-
-  def test_check(self):
-    self._execute('check', ['--isolate', self.filename()])
-    files = sorted([
-      'isolate_smoke_test.isolated',
-      'isolate_smoke_test.isolated.state',
-      os.path.join('isolate', 'tests', 'isolate', 'touch_root.isolate'),
-      os.path.join('isolate', 'tests', 'isolate', 'touch_root.py'),
-      os.path.join('isolate', 'at_root'),
-    ])
-    self.assertEqual(files, list_files_tree(self.tempdir))
-
-  def test_remap(self):
-    with self.assertRaises(CalledProcessError):
-      self._execute('remap', ['--isolate', self.filename()])
-
-  def test_run(self):
-    self._execute('run', ['--isolate', self.filename()])
-    files = sorted([
-      'isolate_smoke_test.isolated',
-      'isolate_smoke_test.isolated.state',
-      os.path.join('isolate', 'tests', 'isolate', 'touch_root.isolate'),
-      os.path.join('isolate', 'tests', 'isolate', 'touch_root.py'),
-      os.path.join('isolate', 'at_root'),
-    ])
-    self.assertEqual(files, list_files_tree(self.tempdir))
-
-
-class IsolateOther(IsolateTempdirBase):
-  def test_run_mixed(self):
-    # Test when a user mapped from a directory and then replay from another
-    # directory. This is a very rare corner case.
-    indir = os.path.join(self.tempdir, 'input')
-    test_utils.make_tree(indir, SIMPLE_ISOLATE)
-    proc = subprocess.Popen(
-        [
-          sys.executable, 'isolate.py',
-          'check',
-          '-i', os.path.join(indir, 'simple.isolate'),
-          '-s', os.path.join(indir, 'simple.isolated'),
-          '--config-variable', 'OS', 'mac',
-        ],
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        cwd=ROOT_DIR)
-    stdout = proc.communicate()[0]
-    self.assertEqual('', stdout)
-    self.assertEqual(0, proc.returncode)
-    expected = [
-      'simple.isolate', 'simple.isolated', 'simple.isolated.state', 'simple.py',
-    ]
-    self.assertEqual(expected, sorted(os.listdir(indir)))
-
-    # Remove the original directory.
-    indir2 = indir + '2'
-    os.rename(indir, indir2)
-
-    # simple.isolated.state is required; it contains the variables.
-    proc = subprocess.Popen(
-        [
-          sys.executable, 'isolate.py', 'run',
-          '-s', os.path.join(indir2, 'simple.isolated'),
-          '--skip-refresh',
-        ],
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        cwd=ROOT_DIR,
-        universal_newlines=True)
-    stdout = proc.communicate()[0]
-    self.assertEqual(1, proc.returncode)
-    self.assertTrue('simple.py is missing' in stdout)
-
-  def test_empty_and_renamed(self):
-    a_isolate = os.path.join(self.tempdir, 'a.isolate')
-    with open(a_isolate, 'wb') as f:
-      f.write('{}')
-
-    cmd = [
-        sys.executable, 'isolate.py', 'check',
-        '-s', os.path.join(self.tempdir, 'out.isolated'),
-    ]
-    subprocess.check_call(cmd + ['-i', a_isolate], cwd=ROOT_DIR)
-
-    # Move the .isolate file aside and rerun the command with the new source but
-    # same destination.
-    b_isolate = os.path.join(self.tempdir, 'b.isolate')
-    os.rename(a_isolate, b_isolate)
-    subprocess.check_call(cmd + ['-i', b_isolate], cwd=ROOT_DIR)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  test_utils.main()
diff --git a/tools/swarming_client/tests/isolate_test.py b/tools/swarming_client/tests/isolate_test.py
deleted file mode 100755
index eaffdf3..0000000
--- a/tools/swarming_client/tests/isolate_test.py
+++ /dev/null
@@ -1,1628 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import cStringIO
-import hashlib
-import json
-import logging
-import optparse
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-from depot_tools import auto_stub
-from depot_tools import fix_encoding
-import auth
-import isolate
-import isolate_format
-import isolated_format
-import isolateserver
-from utils import file_path
-from utils import logging_utils
-from utils import tools
-import test_utils
-
-ALGO = hashlib.sha1
-
-
-NO_RUN_ISOLATE = {
-  'tests/isolate/files1/subdir/42.txt':
-      'the answer to life the universe and everything\n',
-  'tests/isolate/files1/test_file1.txt': 'Foo\n',
-  'tests/isolate/files1/test_file2.txt': 'Bar\n',
-  'tests/isolate/no_run.isolate':
-    """{
-        # Includes itself.
-      'variables': {'files': ['no_run.isolate', 'files1/']},
-    }""",
-}
-
-SPLIT_ISOLATE = {
-  'tests/isolate/files1/subdir/42.txt':
-      'the answer to life the universe and everything',
-  'tests/isolate/split.isolate':
-    """{
-      'variables': {
-        'command': ['python', 'split.py'],
-        'files': [
-          '<(DEPTH)/split.py',
-          '<(PRODUCT_DIR)/subdir/42.txt',
-          'test/data/foo.txt',
-        ],
-      },
-    }""",
-  'tests/isolate/split.py': "import sys; sys.exit(1)",
-  'tests/isolate/test/data/foo.txt': 'Split',
-}
-
-
-TOUCH_ROOT_ISOLATE = {
-  'tests/isolate/touch_root.isolate':
-    """{
-      'conditions': [
-        ['(OS=="linux" and chromeos==1) or ((OS=="mac" or OS=="win") and '
-         'chromeos==0)', {
-          'variables': {
-            'command': ['python', 'touch_root.py'],
-            'files': ['../../at_root', 'touch_root.py'],
-          },
-        }],
-      ],
-    }""",
-  'tests/isolate/touch_root.py':
-    "def main():\n"
-    "  import os, sys\n"
-    "  print('child_touch_root: Verify the relative directories')\n"
-    "  root_dir = os.path.dirname(os.path.abspath(__file__))\n"
-    "  parent_dir, base = os.path.split(root_dir)\n"
-    "  parent_dir, base2 = os.path.split(parent_dir)\n"
-    "  if base != 'isolate' or base2 != 'tests':\n"
-    "    print 'Invalid root dir %s' % root_dir\n"
-    "    return 4\n"
-    "  content = open(os.path.join(parent_dir, 'at_root'), 'r').read()\n"
-    "  return int(content != 'foo')\n"
-    "\n"
-    "if __name__ == '__main__':\n"
-    "  sys.exit(main())\n",
-  'at_root': 'foo',
-}
-
-
-class IsolateBase(auto_stub.TestCase):
-  def setUp(self):
-    super(IsolateBase, self).setUp()
-    self.mock(auth, 'ensure_logged_in', lambda _: None)
-    self.old_cwd = os.getcwd()
-    self.cwd = file_path.get_native_path_case(
-        unicode(tempfile.mkdtemp(prefix=u'isolate_')))
-    # Everything should work even from another directory.
-    os.chdir(self.cwd)
-    self.mock(
-        logging_utils.OptionParserWithLogging, 'logger_root',
-        logging.Logger('unittest'))
-
-  def tearDown(self):
-    try:
-      os.chdir(self.old_cwd)
-      file_path.rmtree(self.cwd)
-    finally:
-      super(IsolateBase, self).tearDown()
-
-
-class IsolateTest(IsolateBase):
-  def test_savedstate_load_minimal(self):
-    # The file referenced by 'isolate_file' must exist even if its content is
-    # not read.
-    open(os.path.join(self.cwd, 'fake.isolate'), 'wb').close()
-    values = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'isolate_file': 'fake.isolate',
-    }
-    expected = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'child_isolated_files': [],
-      'config_variables': {},
-      'command': [],
-      'extra_variables': {},
-      'files': {},
-      'isolate_file': 'fake.isolate',
-      'path_variables': {},
-      'version': isolate.SavedState.EXPECTED_VERSION,
-    }
-    saved_state = isolate.SavedState.load(values, self.cwd)
-    self.assertEqual(expected, saved_state.flatten())
-
-  def test_savedstate_load(self):
-    # The file referenced by 'isolate_file' must exist even if its content is
-    # not read.
-    open(os.path.join(self.cwd, 'fake.isolate'), 'wb').close()
-    values = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'config_variables': {},
-      'extra_variables': {
-        'foo': 42,
-      },
-      'isolate_file': 'fake.isolate',
-    }
-    expected = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'child_isolated_files': [],
-      'command': [],
-      'config_variables': {},
-      'extra_variables': {
-        'foo': 42,
-      },
-      'files': {},
-      'isolate_file': 'fake.isolate',
-      'path_variables': {},
-      'version': isolate.SavedState.EXPECTED_VERSION,
-    }
-    saved_state = isolate.SavedState.load(values, self.cwd)
-    self.assertEqual(expected, saved_state.flatten())
-
-  def test_variable_arg(self):
-    parser = optparse.OptionParser()
-    isolate.add_isolate_options(parser)
-    options, args = parser.parse_args(
-        ['--config-variable', 'Foo', 'bar',
-          '--path-variable', 'Baz=sub=string',
-          '--extra-variable', 'biz', 'b uz=a'])
-    isolate.process_isolate_options(parser, options, require_isolated=False)
-
-    expected_path = {
-      'Baz': 'sub=string',
-    }
-    expected_config = {
-      'Foo': 'bar',
-    }
-    expected_extra = {
-      'biz': 'b uz=a',
-      'EXECUTABLE_SUFFIX': '.exe' if sys.platform == 'win32' else '',
-    }
-    self.assertEqual(expected_path, options.path_variables)
-    self.assertEqual(expected_config, options.config_variables)
-    self.assertEqual(expected_extra, options.extra_variables)
-    self.assertEqual([], args)
-
-  def test_variable_arg_fail(self):
-    parser = optparse.OptionParser()
-    isolate.add_isolate_options(parser)
-    self.mock(sys, 'stderr', cStringIO.StringIO())
-    with self.assertRaises(SystemExit):
-      parser.parse_args(['--config-variable', 'Foo'])
-
-  def test_blacklist_default(self):
-    ok = [
-      '.git2',
-      '.pyc',
-      '.swp',
-      'allo.git',
-      'foo',
-    ]
-    blocked = [
-      '.git',
-      os.path.join('foo', '.git'),
-      'foo.pyc',
-      'bar.swp',
-    ]
-    blacklist = tools.gen_blacklist(isolateserver.DEFAULT_BLACKLIST)
-    for i in ok:
-      self.assertFalse(blacklist(i), i)
-    for i in blocked:
-      self.assertTrue(blacklist(i), i)
-
-  def test_blacklist_custom(self):
-    ok = [
-      '.run_test_cases',
-      'testserver.log2',
-    ]
-    blocked = [
-      'foo.run_test_cases',
-      'testserver.log',
-      os.path.join('foo', 'testserver.log'),
-    ]
-    blacklist = tools.gen_blacklist([r'^.+\.run_test_cases$', r'^.+\.log$'])
-    for i in ok:
-      self.assertFalse(blacklist(i), i)
-    for i in blocked:
-      self.assertTrue(blacklist(i), i)
-
-  def test_read_only(self):
-    isolate_file = os.path.join(self.cwd, 'fake.isolate')
-    isolate_content = {
-      'variables': {
-        'read_only': 0,
-      },
-    }
-    tools.write_json(isolate_file, isolate_content, False)
-    expected = {
-      'algo': 'sha-1',
-      'files': {},
-      'read_only': 0,
-      'relative_cwd': '.',
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    complete_state = isolate.CompleteState(None, isolate.SavedState(self.cwd))
-    complete_state.load_isolate(
-        unicode(self.cwd), unicode(isolate_file), {}, {}, {}, None, False)
-    self.assertEqual(expected, complete_state.saved_state.to_isolated())
-
-
-class IsolateLoad(IsolateBase):
-  def setUp(self):
-    super(IsolateLoad, self).setUp()
-    self.directory = tempfile.mkdtemp(prefix=u'isolate_')
-    self.isolate_dir = os.path.join(self.directory, u'isolate')
-    self.isolated_dir = os.path.join(self.directory, u'isolated')
-    os.mkdir(self.isolated_dir, 0700)
-
-  def tearDown(self):
-    try:
-      file_path.rmtree(self.directory)
-    finally:
-      super(IsolateLoad, self).tearDown()
-
-  def _get_option(self, *isolatepath):
-    isolate_file = os.path.join(self.isolate_dir, *isolatepath)
-    class Options(object):
-      isolated = os.path.join(self.isolated_dir, 'foo.isolated')
-      outdir = os.path.join(self.directory, 'outdir')
-      isolate = isolate_file
-      blacklist = list(isolateserver.DEFAULT_BLACKLIST)
-      path_variables = {}
-      config_variables = {
-        'OS': 'linux',
-        'chromeos': 1,
-      }
-      extra_variables = {'foo': 'bar'}
-      ignore_broken_items = False
-    return Options()
-
-  def _cleanup_isolated(self, expected_isolated):
-    """Modifies isolated to remove the non-deterministic parts."""
-    if sys.platform == 'win32':
-      # 'm' are not saved in windows.
-      for values in expected_isolated['files'].itervalues():
-        self.assertTrue(values.pop('m'))
-
-  def _cleanup_saved_state(self, actual_saved_state):
-    for item in actual_saved_state['files'].itervalues():
-      self.assertTrue(item.pop('t'))
-
-  def make_tree(self, contents):
-    test_utils.make_tree(self.isolate_dir, contents)
-
-  def size(self, *args):
-    return os.stat(os.path.join(self.isolate_dir, *args)).st_size
-
-  def hash_file(self, *args):
-    p = os.path.join(*args)
-    if not os.path.isabs(p):
-      p = os.path.join(self.isolate_dir, p)
-    return isolated_format.hash_file(p, ALGO)
-
-  def test_load_stale_isolated(self):
-    # Data to be loaded in the .isolated file. Do not create a .state file.
-    self.make_tree(TOUCH_ROOT_ISOLATE)
-    input_data = {
-      'command': ['python'],
-      'files': {
-        'foo': {
-          "m": 0640,
-          "h": "invalid",
-          "s": 538,
-          "t": 1335146921,
-        },
-        os.path.join('tests', 'isolate', 'touch_root.py'): {
-          "m": 0750,
-          "h": "invalid",
-          "s": 538,
-          "t": 1335146921,
-        },
-      },
-    }
-    options = self._get_option('tests', 'isolate', 'touch_root.isolate')
-    tools.write_json(options.isolated, input_data, False)
-
-    # A CompleteState object contains two parts:
-    # - Result instance stored in complete_state.isolated, corresponding to the
-    #   .isolated file, is what is read by run_test_from_archive.py.
-    # - SavedState instance stored in compelte_state.saved_state,
-    #   corresponding to the .state file, which is simply to aid the developer
-    #   when re-running the same command multiple times and contain
-    #   discardable information.
-    complete_state = isolate.load_complete_state(options, self.cwd, None, False)
-    actual_isolated = complete_state.saved_state.to_isolated()
-    actual_saved_state = complete_state.saved_state.flatten()
-
-    expected_isolated = {
-      'algo': 'sha-1',
-      'command': ['python', 'touch_root.py'],
-      'files': {
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-        u'at_root': {
-          'm': 0600,
-          'h': self.hash_file('at_root'),
-          's': self.size('at_root'),
-        },
-      },
-      'read_only': 1,
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    self._cleanup_isolated(expected_isolated)
-    self.assertEqual(expected_isolated, actual_isolated)
-
-    isolate_file = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', 'touch_root.isolate')
-    expected_saved_state = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'child_isolated_files': [],
-      'command': ['python', 'touch_root.py'],
-      'config_variables': {
-        'OS': 'linux',
-        'chromeos': options.config_variables['chromeos'],
-      },
-      'extra_variables': {
-        'foo': 'bar',
-      },
-      'files': {
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-        u'at_root': {
-          'm': 0600,
-          'h': self.hash_file('at_root'),
-          's': self.size('at_root'),
-        },
-      },
-      'isolate_file': file_path.safe_relpath(
-          file_path.get_native_path_case(isolate_file),
-          os.path.dirname(options.isolated)),
-      'path_variables': {},
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'root_dir': file_path.get_native_path_case(self.isolate_dir),
-      'version': isolate.SavedState.EXPECTED_VERSION,
-    }
-    self._cleanup_isolated(expected_saved_state)
-    self._cleanup_saved_state(actual_saved_state)
-    self.assertEqual(expected_saved_state, actual_saved_state)
-
-  def test_subdir(self):
-    # The resulting .isolated file will be missing ../../at_root. It is
-    # because this file is outside the --subdir parameter.
-    self.make_tree(TOUCH_ROOT_ISOLATE)
-    options = self._get_option('tests', 'isolate', 'touch_root.isolate')
-    complete_state = isolate.load_complete_state(
-        options, self.cwd, os.path.join('tests', 'isolate'), False)
-    actual_isolated = complete_state.saved_state.to_isolated()
-    actual_saved_state = complete_state.saved_state.flatten()
-
-    expected_isolated =  {
-      'algo': 'sha-1',
-      'command': ['python', 'touch_root.py'],
-      'files': {
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-      },
-      'read_only': 1,
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    self._cleanup_isolated(expected_isolated)
-    self.assertEqual(expected_isolated, actual_isolated)
-
-    isolate_file = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', 'touch_root.isolate')
-    expected_saved_state = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'child_isolated_files': [],
-      'command': ['python', 'touch_root.py'],
-      'config_variables': {
-        'OS': 'linux',
-        'chromeos': 1,
-      },
-      'extra_variables': {
-        'foo': 'bar',
-      },
-      'files': {
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-      },
-      'isolate_file': file_path.safe_relpath(
-          file_path.get_native_path_case(isolate_file),
-          os.path.dirname(options.isolated)),
-      'path_variables': {},
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'root_dir': file_path.get_native_path_case(self.isolate_dir),
-      'version': isolate.SavedState.EXPECTED_VERSION,
-    }
-    self._cleanup_isolated(expected_saved_state)
-    self._cleanup_saved_state(actual_saved_state)
-    self.assertEqual(expected_saved_state, actual_saved_state)
-
-  def test_subdir_variable(self):
-    # the resulting .isolated file will be missing ../../at_root. it is
-    # because this file is outside the --subdir parameter.
-    self.make_tree(TOUCH_ROOT_ISOLATE)
-    options = self._get_option('tests', 'isolate', 'touch_root.isolate')
-    # Path variables are keyed on the directory containing the .isolate file.
-    options.path_variables['TEST_ISOLATE'] = '.'
-    # Note that options.isolated is in self.directory, which is a temporary
-    # directory.
-    complete_state = isolate.load_complete_state(
-        options, os.path.join(self.isolate_dir, 'tests', 'isolate'),
-        '<(TEST_ISOLATE)', False)
-    actual_isolated = complete_state.saved_state.to_isolated()
-    actual_saved_state = complete_state.saved_state.flatten()
-
-    expected_isolated =  {
-      'algo': 'sha-1',
-      'command': ['python', 'touch_root.py'],
-      'files': {
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-      },
-      'read_only': 1,
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    self._cleanup_isolated(expected_isolated)
-    self.assertEqual(expected_isolated, actual_isolated)
-
-    # It is important to note:
-    # - the root directory is self.isolate_dir.
-    # - relative_cwd is tests/isolate.
-    # - TEST_ISOLATE is based of relative_cwd, so it represents tests/isolate.
-    # - anything outside TEST_ISOLATE was not included in the 'files' section.
-    isolate_file = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', 'touch_root.isolate')
-    expected_saved_state = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'child_isolated_files': [],
-      'command': ['python', 'touch_root.py'],
-      'config_variables': {
-        'OS': 'linux',
-        'chromeos': 1,
-      },
-      'extra_variables': {
-        'foo': 'bar',
-      },
-      'files': {
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-      },
-      'isolate_file': file_path.safe_relpath(
-          file_path.get_native_path_case(isolate_file),
-          os.path.dirname(options.isolated)),
-      'path_variables': {
-        'TEST_ISOLATE': '.',
-      },
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'root_dir': file_path.get_native_path_case(self.isolate_dir),
-      'version': isolate.SavedState.EXPECTED_VERSION,
-    }
-    self._cleanup_isolated(expected_saved_state)
-    self._cleanup_saved_state(actual_saved_state)
-    self.assertEqual(expected_saved_state, actual_saved_state)
-
-  def test_variable_not_exist(self):
-    self.make_tree(TOUCH_ROOT_ISOLATE)
-    options = self._get_option('tests', 'isolate', 'touch_root.isolate')
-    options.path_variables['PRODUCT_DIR'] = os.path.join(u'tests', u'isolate')
-    native_cwd = file_path.get_native_path_case(unicode(self.cwd))
-    try:
-      isolate.load_complete_state(options, self.cwd, None, False)
-      self.fail()
-    except isolate.ExecutionError, e:
-      self.assertEqual(
-          'PRODUCT_DIR=%s is not a directory' %
-            os.path.join(native_cwd, 'tests', 'isolate'),
-          e.args[0])
-
-  def test_variable(self):
-    self.make_tree(TOUCH_ROOT_ISOLATE)
-    options = self._get_option('tests', 'isolate', 'touch_root.isolate')
-    options.path_variables['PRODUCT_DIR'] = os.path.join('tests', 'isolate')
-    complete_state = isolate.load_complete_state(
-        options, self.isolate_dir, None, False)
-    actual_isolated = complete_state.saved_state.to_isolated()
-    actual_saved_state = complete_state.saved_state.flatten()
-
-    expected_isolated =  {
-      'algo': 'sha-1',
-      'command': ['python', 'touch_root.py'],
-      'files': {
-        u'at_root': {
-          'm': 0600,
-          'h': self.hash_file('at_root'),
-          's': self.size('at_root'),
-        },
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-      },
-      'read_only': 1,
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    self._cleanup_isolated(expected_isolated)
-    self.assertEqual(expected_isolated, actual_isolated)
-    isolate_file = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', 'touch_root.isolate')
-    expected_saved_state = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'child_isolated_files': [],
-      'command': ['python', 'touch_root.py'],
-      'config_variables': {
-        'OS': 'linux',
-        'chromeos': 1,
-      },
-      'extra_variables': {
-        'foo': 'bar',
-      },
-      'files': {
-        u'at_root': {
-          'm': 0600,
-          'h': self.hash_file('at_root'),
-          's': self.size('at_root'),
-        },
-        os.path.join(u'tests', 'isolate', 'touch_root.py'): {
-          'm': 0700,
-          'h': self.hash_file('tests', 'isolate', 'touch_root.py'),
-          's': self.size('tests', 'isolate', 'touch_root.py'),
-        },
-      },
-      'isolate_file': file_path.safe_relpath(
-          file_path.get_native_path_case(isolate_file),
-          os.path.dirname(options.isolated)),
-      'path_variables': {
-        'PRODUCT_DIR': '.',
-      },
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'root_dir': file_path.get_native_path_case(self.isolate_dir),
-      'version': isolate.SavedState.EXPECTED_VERSION,
-    }
-    self._cleanup_isolated(expected_saved_state)
-    self._cleanup_saved_state(actual_saved_state)
-    self.assertEqual(expected_saved_state, actual_saved_state)
-    self.assertEqual([], os.listdir(self.isolated_dir))
-
-  def test_root_dir_because_of_variable(self):
-    # Ensures that load_isolate() works even when path variables have deep root
-    # dirs. The end result is similar to touch_root.isolate, except that
-    # no_run.isolate doesn't reference '..' at all.
-    #
-    # A real world example would be PRODUCT_DIR=../../out/Release but nothing in
-    # this directory is mapped.
-    #
-    # Imagine base/base_unittests.isolate would not map anything in
-    # PRODUCT_DIR. In that case, the automatically determined root dir is
-    # src/base, since nothing outside this directory is mapped.
-    self.make_tree(NO_RUN_ISOLATE)
-    options = self._get_option('tests', 'isolate', 'no_run.isolate')
-    # Any directory outside <self.isolate_dir>/tests/isolate.
-    options.path_variables['PRODUCT_DIR'] = 'third_party'
-    os.mkdir(os.path.join(self.isolate_dir, 'third_party'), 0700)
-    complete_state = isolate.load_complete_state(
-        options, self.isolate_dir, None, False)
-    actual_isolated = complete_state.saved_state.to_isolated()
-    actual_saved_state = complete_state.saved_state.flatten()
-
-    expected_isolated = {
-      'algo': 'sha-1',
-      'files': {
-        os.path.join(u'tests', 'isolate', 'files1', 'subdir', '42.txt'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt'),
-          's': self.size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
-        },
-        os.path.join(u'tests', 'isolate', 'files1', 'test_file1.txt'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'files1', 'test_file1.txt'),
-          's': self.size('tests', 'isolate', 'files1', 'test_file1.txt'),
-        },
-        os.path.join(u'tests', 'isolate', 'files1', 'test_file2.txt'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'files1', 'test_file2.txt'),
-          's': self.size('tests', 'isolate', 'files1', 'test_file2.txt'),
-        },
-        os.path.join(u'tests', 'isolate', 'no_run.isolate'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'no_run.isolate'),
-          's': self.size('tests', 'isolate', 'no_run.isolate'),
-        },
-      },
-      'read_only': 1,
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    self._cleanup_isolated(expected_isolated)
-    self.assertEqual(expected_isolated, actual_isolated)
-    isolate_file = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', 'no_run.isolate')
-    expected_saved_state = {
-      'OS': sys.platform,
-      'algo': 'sha-1',
-      'child_isolated_files': [],
-      'command': [],
-      'config_variables': {
-        'OS': 'linux',
-        'chromeos': 1,
-      },
-      'extra_variables': {
-        'foo': 'bar',
-      },
-      'files': {
-        os.path.join(u'tests', 'isolate', 'files1', 'subdir', '42.txt'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt'),
-          's': self.size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
-        },
-        os.path.join(u'tests', 'isolate', 'files1', 'test_file1.txt'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'files1', 'test_file1.txt'),
-          's': self.size('tests', 'isolate', 'files1', 'test_file1.txt'),
-        },
-        os.path.join(u'tests', 'isolate', 'files1', 'test_file2.txt'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'files1', 'test_file2.txt'),
-          's': self.size('tests', 'isolate', 'files1', 'test_file2.txt'),
-        },
-        os.path.join(u'tests', 'isolate', 'no_run.isolate'): {
-          'm': 0600,
-          'h': self.hash_file('tests', 'isolate', 'no_run.isolate'),
-          's': self.size('tests', 'isolate', 'no_run.isolate'),
-        },
-      },
-      'isolate_file': file_path.safe_relpath(
-          file_path.get_native_path_case(isolate_file),
-          os.path.dirname(options.isolated)),
-      'path_variables': {
-        'PRODUCT_DIR': os.path.join(u'..', '..', 'third_party'),
-      },
-      'relative_cwd': os.path.join(u'tests', 'isolate'),
-      'root_dir': file_path.get_native_path_case(self.isolate_dir),
-      'version': isolate.SavedState.EXPECTED_VERSION,
-    }
-    self._cleanup_isolated(expected_saved_state)
-    self._cleanup_saved_state(actual_saved_state)
-    self.assertEqual(expected_saved_state, actual_saved_state)
-    self.assertEqual([], os.listdir(self.isolated_dir))
-
-  def test_chromium_split(self):
-    # Create an .isolate file and a tree of random stuff.
-    self.make_tree(SPLIT_ISOLATE)
-    options = self._get_option('tests', 'isolate', 'split.isolate')
-    options.path_variables = {
-      'DEPTH': '.',
-      'PRODUCT_DIR': os.path.join('files1'),
-    }
-    options.config_variables = {
-      'OS': 'linux',
-    }
-    complete_state = isolate.load_complete_state(
-        options, os.path.join(self.isolate_dir, 'tests', 'isolate'), None,
-        False)
-    # By saving the files, it forces splitting the data up.
-    complete_state.save_files()
-
-    actual_isolated_master = tools.read_json(
-        os.path.join(self.isolated_dir, 'foo.isolated'))
-    expected_isolated_master = {
-      u'algo': u'sha-1',
-      u'command': [u'python', u'split.py'],
-      u'files': {
-        u'split.py': {
-          u'm': 0700,
-          u'h': unicode(self.hash_file('tests', 'isolate', 'split.py')),
-          u's': self.size('tests', 'isolate', 'split.py'),
-        },
-      },
-      u'includes': [
-        unicode(self.hash_file(self.isolated_dir, 'foo.0.isolated')),
-        unicode(self.hash_file(self.isolated_dir, 'foo.1.isolated')),
-      ],
-      u'read_only': 1,
-      u'relative_cwd': u'.',
-      u'version': unicode(isolated_format.ISOLATED_FILE_VERSION),
-    }
-    self._cleanup_isolated(expected_isolated_master)
-    self.assertEqual(expected_isolated_master, actual_isolated_master)
-
-    actual_isolated_0 = tools.read_json(
-        os.path.join(self.isolated_dir, 'foo.0.isolated'))
-    expected_isolated_0 = {
-      u'algo': u'sha-1',
-      u'files': {
-        os.path.join(u'test', 'data', 'foo.txt'): {
-          u'm': 0600,
-          u'h': unicode(
-              self.hash_file('tests', 'isolate', 'test', 'data', 'foo.txt')),
-          u's': self.size('tests', 'isolate', 'test', 'data', 'foo.txt'),
-        },
-      },
-      u'version': unicode(isolated_format.ISOLATED_FILE_VERSION),
-    }
-    self._cleanup_isolated(expected_isolated_0)
-    self.assertEqual(expected_isolated_0, actual_isolated_0)
-
-    actual_isolated_1 = tools.read_json(
-        os.path.join(self.isolated_dir, 'foo.1.isolated'))
-    expected_isolated_1 = {
-      u'algo': u'sha-1',
-      u'files': {
-        os.path.join(u'files1', 'subdir', '42.txt'): {
-          u'm': 0600,
-          u'h': unicode(
-              self.hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt')),
-          u's': self.size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
-        },
-      },
-      u'version': unicode(isolated_format.ISOLATED_FILE_VERSION),
-    }
-    self._cleanup_isolated(expected_isolated_1)
-    self.assertEqual(expected_isolated_1, actual_isolated_1)
-
-    actual_saved_state = tools.read_json(
-        isolate.isolatedfile_to_state(options.isolated))
-    isolated_base = unicode(os.path.basename(options.isolated))
-    isolate_file = os.path.join(
-        self.isolate_dir, 'tests', 'isolate', 'split.isolate')
-    expected_saved_state = {
-      u'OS': unicode(sys.platform),
-      u'algo': u'sha-1',
-      u'child_isolated_files': [
-        isolated_base[:-len('.isolated')] + '.0.isolated',
-        isolated_base[:-len('.isolated')] + '.1.isolated',
-      ],
-      u'command': [u'python', u'split.py'],
-      u'config_variables': {
-        u'OS': u'linux',
-      },
-      u'extra_variables': {
-        u'foo': u'bar',
-      },
-      u'files': {
-        os.path.join(u'files1', 'subdir', '42.txt'): {
-          u'm': 0600,
-          u'h': unicode(
-              self.hash_file('tests', 'isolate', 'files1', 'subdir', '42.txt')),
-          u's': self.size('tests', 'isolate', 'files1', 'subdir', '42.txt'),
-        },
-        u'split.py': {
-          u'm': 0700,
-          u'h': unicode(self.hash_file('tests', 'isolate', 'split.py')),
-          u's': self.size('tests', 'isolate', 'split.py'),
-        },
-        os.path.join(u'test', 'data', 'foo.txt'): {
-          u'm': 0600,
-          u'h': unicode(
-              self.hash_file('tests', 'isolate', 'test', 'data', 'foo.txt')),
-          u's': self.size('tests', 'isolate', 'test', 'data', 'foo.txt'),
-        },
-      },
-      u'isolate_file': file_path.safe_relpath(
-          file_path.get_native_path_case(isolate_file),
-          unicode(os.path.dirname(options.isolated))),
-      u'path_variables': {
-        u'DEPTH': u'.',
-        u'PRODUCT_DIR': u'files1',
-      },
-      u'relative_cwd': u'.',
-      u'root_dir': file_path.get_native_path_case(
-          os.path.dirname(isolate_file)),
-      u'version': unicode(isolate.SavedState.EXPECTED_VERSION),
-    }
-    self._cleanup_isolated(expected_saved_state)
-    self._cleanup_saved_state(actual_saved_state)
-    self.assertEqual(expected_saved_state, actual_saved_state)
-    self.assertEqual(
-        [
-          'foo.0.isolated', 'foo.1.isolated',
-          'foo.isolated', 'foo.isolated.state',
-        ],
-        sorted(os.listdir(self.isolated_dir)))
-
-  def test_load_isolate_include_command(self):
-    # Ensure that using a .isolate that includes another one in a different
-    # directory will lead to the proper relative directory. See
-    # test_load_with_includes_with_commands in isolate_format_test.py as
-    # reference.
-
-    # Exactly the same thing as in isolate_format_test.py
-    isolate1 = {
-      'conditions': [
-        ['OS=="amiga" or OS=="win"', {
-          'variables': {
-            'command': [
-              'foo', 'amiga_or_win',
-            ],
-          },
-        }],
-        ['OS=="linux"', {
-          'variables': {
-            'command': [
-              'foo', 'linux',
-            ],
-            'files': [
-              'file_linux',
-            ],
-          },
-        }],
-        ['OS=="mac" or OS=="win"', {
-          'variables': {
-            'files': [
-              'file_non_linux',
-            ],
-          },
-        }],
-      ],
-    }
-    isolate2 = {
-      'conditions': [
-        ['OS=="linux" or OS=="mac"', {
-          'variables': {
-            'command': [
-              'foo', 'linux_or_mac',
-            ],
-            'files': [
-              'other/file',
-            ],
-          },
-        }],
-      ],
-    }
-    # Do not define command in isolate3, otherwise commands in the other
-    # included .isolated will be ignored.
-    isolate3 = {
-      'includes': [
-        '../1/isolate1.isolate',
-        '2/isolate2.isolate',
-      ],
-      'conditions': [
-        ['OS=="amiga"', {
-          'variables': {
-            'files': [
-              'file_amiga',
-            ],
-          },
-        }],
-        ['OS=="mac"', {
-          'variables': {
-            'files': [
-              'file_mac',
-            ],
-          },
-        }],
-      ],
-    }
-
-    def test_with_os(
-        config_os, files_to_create, expected_files, command, relative_cwd):
-      """Creates a tree of files in a subdirectory for testing and test this
-      set of conditions.
-      """
-      directory = os.path.join(unicode(self.directory), config_os)
-      os.mkdir(directory)
-      isolate_dir = os.path.join(directory, u'isolate')
-      isolate_dir_1 = os.path.join(isolate_dir, u'1')
-      isolate_dir_3 = os.path.join(isolate_dir, u'3')
-      isolate_dir_3_2 = os.path.join(isolate_dir_3, u'2')
-      isolated_dir = os.path.join(directory, u'isolated')
-      os.mkdir(isolated_dir)
-      os.mkdir(isolate_dir)
-      os.mkdir(isolate_dir_1)
-      os.mkdir(isolate_dir_3)
-      os.mkdir(isolate_dir_3_2)
-      isolated = os.path.join(isolated_dir, u'foo.isolated')
-
-      with open(os.path.join(isolate_dir_1, 'isolate1.isolate'), 'wb') as f:
-        isolate_format.pretty_print(isolate1, f)
-      with open(os.path.join(isolate_dir_3_2, 'isolate2.isolate'), 'wb') as f:
-        isolate_format.pretty_print(isolate2, f)
-      root_isolate = os.path.join(isolate_dir_3, 'isolate3.isolate')
-      with open(root_isolate, 'wb') as f:
-        isolate_format.pretty_print(isolate3, f)
-
-      # Make all the touched files.
-      mapping = {1: isolate_dir_1, 2: isolate_dir_3_2, 3: isolate_dir_3}
-      for k, v in files_to_create.iteritems():
-        f = os.path.join(mapping[k], v)
-        base = os.path.dirname(f)
-        if not os.path.isdir(base):
-          os.mkdir(base)
-        open(f, 'wb').close()
-
-      c = isolate.CompleteState(isolated, isolate.SavedState(isolated_dir))
-      config = {
-        'OS': config_os,
-      }
-      c.load_isolate(
-          unicode(self.cwd), root_isolate, {}, config, {}, None, False)
-      # Note that load_isolate() doesn't retrieve the meta data about each file.
-      expected = {
-        'algo': 'sha-1',
-        'command': command,
-        'files': {
-          unicode(f.replace('/', os.path.sep)):{} for f in expected_files
-        },
-        'read_only': 1,
-        'relative_cwd': relative_cwd.replace('/', os.path.sep),
-        'version': isolated_format.ISOLATED_FILE_VERSION,
-      }
-      self.assertEqual(expected, c.saved_state.to_isolated())
-
-    # root is .../isolate/.
-    test_with_os(
-        'amiga',
-        {
-          3: 'file_amiga',
-        },
-        (
-          u'3/file_amiga',
-        ),
-        ['foo', 'amiga_or_win'],
-        '1')
-    # root is .../isolate/.
-    test_with_os(
-        'linux',
-        {
-          1: 'file_linux',
-          2: 'other/file',
-        },
-        (
-          u'1/file_linux',
-          u'3/2/other/file',
-        ),
-        ['foo', 'linux_or_mac'],
-        '3/2')
-    # root is .../isolate/.
-    test_with_os(
-        'mac',
-        {
-          1: 'file_non_linux',
-          2: 'other/file',
-          3: 'file_mac',
-        },
-        (
-          u'1/file_non_linux',
-          u'3/2/other/file',
-          u'3/file_mac',
-        ),
-        ['foo', 'linux_or_mac'],
-        '3/2')
-    # root is .../isolate/1/.
-    test_with_os(
-        'win',
-        {
-          1: 'file_non_linux',
-        },
-        (
-          u'file_non_linux',
-        ),
-        ['foo', 'amiga_or_win'],
-        '.')
-
-  def test_load_isolate_include_command_and_variables(self):
-    # Ensure that using a .isolate that includes another one in a different
-    # directory will lead to the proper relative directory when using variables.
-    # See test_load_with_includes_with_commands_and_variables in
-    # isolate_format_test.py as reference.
-    #
-    # With path variables, 'cwd' is used instead of the path to the .isolate
-    # file. So the files have to be set towards the cwd accordingly. While this
-    # may seem surprising, this makes the whole thing work in the first place.
-
-    # Almost exactly the same thing as in isolate_format_test.py plus the EXTRA
-    # for better testing with variable replacement.
-    isolate1 = {
-      'conditions': [
-        ['OS=="amiga" or OS=="win"', {
-          'variables': {
-            'command': [
-              'foo', 'amiga_or_win', '<(PATH)', '<(EXTRA)',
-            ],
-          },
-        }],
-        ['OS=="linux"', {
-          'variables': {
-            'command': [
-              'foo', 'linux', '<(PATH)', '<(EXTRA)',
-            ],
-            'files': [
-              '<(PATH)/file_linux',
-            ],
-          },
-        }],
-        ['OS=="mac" or OS=="win"', {
-          'variables': {
-            'files': [
-              '<(PATH)/file_non_linux',
-            ],
-          },
-        }],
-      ],
-    }
-    isolate2 = {
-      'conditions': [
-        ['OS=="linux" or OS=="mac"', {
-          'variables': {
-            'command': [
-              'foo', 'linux_or_mac', '<(PATH)', '<(EXTRA)',
-            ],
-            'files': [
-              '<(PATH)/other/file',
-            ],
-          },
-        }],
-      ],
-    }
-    isolate3 = {
-      'includes': [
-        '../1/isolate1.isolate',
-        '2/isolate2.isolate',
-      ],
-      'conditions': [
-        ['OS=="amiga"', {
-          'variables': {
-            'files': [
-              '<(PATH)/file_amiga',
-            ],
-          },
-        }],
-        ['OS=="mac"', {
-          'variables': {
-            'command': [
-              'foo', 'mac', '<(PATH)', '<(EXTRA)',
-            ],
-            'files': [
-              '<(PATH)/file_mac',
-            ],
-          },
-        }],
-      ],
-    }
-
-    def test_with_os(config_os, expected_files, command, relative_cwd):
-      """Creates a tree of files in a subdirectory for testing and test this
-      set of conditions.
-      """
-      directory = os.path.join(unicode(self.directory), config_os)
-      os.mkdir(directory)
-      cwd = os.path.join(unicode(self.cwd), config_os)
-      os.mkdir(cwd)
-      isolate_dir = os.path.join(directory, u'isolate')
-      isolate_dir_1 = os.path.join(isolate_dir, u'1')
-      isolate_dir_3 = os.path.join(isolate_dir, u'3')
-      isolate_dir_3_2 = os.path.join(isolate_dir_3, u'2')
-      isolated_dir = os.path.join(directory, u'isolated')
-      os.mkdir(isolated_dir)
-      os.mkdir(isolate_dir)
-      os.mkdir(isolate_dir_1)
-      os.mkdir(isolate_dir_3)
-      os.mkdir(isolate_dir_3_2)
-      isolated = os.path.join(isolated_dir, u'foo.isolated')
-
-      with open(os.path.join(isolate_dir_1, 'isolate1.isolate'), 'wb') as f:
-        isolate_format.pretty_print(isolate1, f)
-      with open(os.path.join(isolate_dir_3_2, 'isolate2.isolate'), 'wb') as f:
-        isolate_format.pretty_print(isolate2, f)
-      root_isolate = os.path.join(isolate_dir_3, 'isolate3.isolate')
-      with open(root_isolate, 'wb') as f:
-        isolate_format.pretty_print(isolate3, f)
-
-      # Make all the touched files.
-      path_dir = os.path.join(cwd, 'path')
-      os.mkdir(path_dir)
-      for v in expected_files:
-        f = os.path.join(path_dir, v)
-        base = os.path.dirname(f)
-        if not os.path.isdir(base):
-          os.makedirs(base)
-        logging.warn(f)
-        open(f, 'wb').close()
-
-      c = isolate.CompleteState(isolated, isolate.SavedState(isolated_dir))
-      config = {
-        'OS': config_os,
-      }
-      paths = {
-        'PATH': 'path/',
-      }
-      extra = {
-        'EXTRA': 'indeed',
-      }
-      c.load_isolate(
-          unicode(cwd), root_isolate, paths, config, extra, None, False)
-      # Note that load_isolate() doesn't retrieve the meta data about each file.
-      expected = {
-        'algo': 'sha-1',
-        'command': command,
-        'files': {
-          unicode(os.path.join(cwd_name, config_os, 'path', f)): {}
-          for f in expected_files
-        },
-        'read_only': 1,
-        'relative_cwd': relative_cwd,
-        'version': isolated_format.ISOLATED_FILE_VERSION,
-      }
-      if not command:
-        expected.pop('command')
-      self.assertEqual(expected, c.saved_state.to_isolated())
-
-    cwd_name = os.path.basename(self.cwd)
-    dir_name = os.path.basename(self.directory)
-    test_with_os(
-        'amiga',
-        (
-          'file_amiga',
-        ),
-        [],
-        os.path.join(dir_name, u'amiga', 'isolate', '3'))
-    test_with_os(
-        'linux',
-        (
-          u'file_linux',
-          os.path.join(u'other', 'file'),
-        ),
-        [],
-        os.path.join(dir_name, u'linux', 'isolate', '3', '2'))
-    test_with_os(
-        'mac',
-        (
-          'file_non_linux',
-          os.path.join(u'other', 'file'),
-          'file_mac',
-        ),
-        [
-          'foo',
-          'mac',
-          os.path.join(u'..', '..', '..', '..', cwd_name, 'mac', 'path'),
-          'indeed',
-        ],
-        os.path.join(dir_name, u'mac', 'isolate', '3'))
-    test_with_os(
-        'win',
-        (
-          'file_non_linux',
-        ),
-        [],
-        os.path.join(dir_name, u'win', 'isolate', '1'))
-
-
-class IsolateCommand(IsolateBase):
-  def load_complete_state(self, *_):
-    """Creates a minimalist CompleteState instance without an .isolated
-    reference.
-    """
-    out = isolate.CompleteState(None, isolate.SavedState(self.cwd))
-    out.saved_state.isolate_file = u'blah.isolate'
-    out.saved_state.relative_cwd = u''
-    out.saved_state.root_dir = ROOT_DIR
-    return out
-
-  def test_CMDarchive(self):
-    actual = []
-
-    def mocked_upload_tree(base_url, infiles, namespace):
-      # |infiles| may be a generator of pair, materialize it into a list.
-      actual.append({
-        'base_url': base_url,
-        'infiles': dict(infiles),
-        'namespace': namespace,
-      })
-    self.mock(isolateserver, 'upload_tree', mocked_upload_tree)
-
-    def join(*path):
-      return os.path.join(self.cwd, *path)
-
-    isolate_file = join('x.isolate')
-    isolated_file = join('x.isolated')
-    with open(isolate_file, 'wb') as f:
-      f.write(
-          '# Foo\n'
-          '{'
-          '  \'conditions\':['
-          '    [\'OS=="dendy"\', {'
-          '      \'variables\': {'
-          '        \'files\': [\'foo\'],'
-          '      },'
-          '    }],'
-          '  ],'
-          '}')
-    with open(join('foo'), 'wb') as f:
-      f.write('fooo')
-
-    self.mock(sys, 'stdout', cStringIO.StringIO())
-    cmd = [
-        '-i', isolate_file,
-        '-s', isolated_file,
-        '--isolate-server', 'http://localhost:1',
-        '--config-variable', 'OS', 'dendy',
-    ]
-    self.assertEqual(0, isolate.CMDarchive(optparse.OptionParser(), cmd))
-    expected = [
-        {
-          'base_url': 'http://localhost:1',
-          'infiles': {
-            join(isolated_file): {
-              'priority': '0',
-            },
-            join('foo'): {
-              'h': '520d41b29f891bbaccf31d9fcfa72e82ea20fcf0',
-              's': 4,
-            },
-          },
-          'namespace': 'default-gzip',
-        },
-    ]
-    # These always change.
-    actual[0]['infiles'][join(isolated_file)].pop('h')
-    actual[0]['infiles'][join(isolated_file)].pop('s')
-    # 'm' is not set on Windows.
-    actual[0]['infiles'][join('foo')].pop('m', None)
-    actual[0]['infiles'][join('foo')].pop('t')
-    self.assertEqual(expected, actual)
-
-  def test_CMDbatcharchive(self):
-    # Same as test_CMDarchive but via code path that parses *.gen.json files.
-    actual = []
-
-    def mocked_upload_tree(base_url, infiles, namespace):
-      # |infiles| may be a generator of pair, materialize it into a list.
-      actual.append({
-        'base_url': base_url,
-        'infiles': dict(infiles),
-        'namespace': namespace,
-      })
-    self.mock(isolateserver, 'upload_tree', mocked_upload_tree)
-
-    def join(*path):
-      return os.path.join(self.cwd, *path)
-
-    # First isolate: x.isolate.
-    isolate_file_x = join('x.isolate')
-    isolated_file_x = join('x.isolated')
-    with open(isolate_file_x, 'wb') as f:
-      f.write(
-          '# Foo\n'
-          '{'
-          '  \'conditions\':['
-          '    [\'OS=="dendy"\', {'
-          '      \'variables\': {'
-          '        \'files\': [\'foo\'],'
-          '      },'
-          '    }],'
-          '  ],'
-          '}')
-    with open(join('foo'), 'wb') as f:
-      f.write('fooo')
-    with open(join('x.isolated.gen.json'), 'wb') as f:
-      json.dump({
-        'args': [
-          '-i', isolate_file_x,
-          '-s', isolated_file_x,
-          '--config-variable', 'OS', 'dendy',
-        ],
-        'dir': self.cwd,
-        'version': 1,
-      }, f)
-
-    # Second isolate: y.isolate.
-    isolate_file_y = join('y.isolate')
-    isolated_file_y = join('y.isolated')
-    with open(isolate_file_y, 'wb') as f:
-      f.write(
-          '# Foo\n'
-          '{'
-          '  \'conditions\':['
-          '    [\'OS=="dendy"\', {'
-          '      \'variables\': {'
-          '        \'files\': [\'bar\'],'
-          '      },'
-          '    }],'
-          '  ],'
-          '}')
-    with open(join('bar'), 'wb') as f:
-      f.write('barr')
-    with open(join('y.isolated.gen.json'), 'wb') as f:
-      json.dump({
-        'args': [
-          '-i', isolate_file_y,
-          '-s', isolated_file_y,
-          '--config-variable', 'OS', 'dendy',
-        ],
-        'dir': self.cwd,
-        'version': 1,
-      }, f)
-
-    self.mock(sys, 'stdout', cStringIO.StringIO())
-    cmd = [
-      '--isolate-server', 'http://localhost:1',
-      '--dump-json', 'json_output.json',
-      join('x.isolated.gen.json'),
-      join('y.isolated.gen.json'),
-    ]
-    self.assertEqual(
-        0,
-        isolate.CMDbatcharchive(logging_utils.OptionParserWithLogging(), cmd))
-    expected = [
-        {
-          'base_url': 'http://localhost:1',
-          'infiles': {
-            join(isolated_file_x): {
-              'priority': '0',
-            },
-            join('foo'): {
-              'h': '520d41b29f891bbaccf31d9fcfa72e82ea20fcf0',
-              's': 4,
-            },
-            join(isolated_file_y): {
-              'priority': '0',
-            },
-            join('bar'): {
-              'h': 'e918b3a3f9597e3cfdc62ce20ecf5756191cb3ec',
-              's': 4,
-            },
-          },
-          'namespace': 'default-gzip',
-        },
-    ]
-    # These always change.
-    actual[0]['infiles'][join(isolated_file_x)].pop('h')
-    actual[0]['infiles'][join(isolated_file_x)].pop('s')
-    actual[0]['infiles'][join('foo')].pop('m', None)
-    actual[0]['infiles'][join('foo')].pop('t')
-    actual[0]['infiles'][join(isolated_file_y)].pop('h')
-    actual[0]['infiles'][join(isolated_file_y)].pop('s')
-    actual[0]['infiles'][join('bar')].pop('m', None)
-    actual[0]['infiles'][join('bar')].pop('t')
-    self.assertEqual(expected, actual)
-
-    expected_json = {
-      'x': isolated_format.hash_file(
-          os.path.join(self.cwd, 'x.isolated'), ALGO),
-      'y': isolated_format.hash_file(
-          os.path.join(self.cwd, 'y.isolated'), ALGO),
-    }
-    self.assertEqual(expected_json, tools.read_json('json_output.json'))
-
-  def test_CMDcheck_empty(self):
-    isolate_file = os.path.join(self.cwd, 'x.isolate')
-    isolated_file = os.path.join(self.cwd, 'x.isolated')
-    with open(isolate_file, 'wb') as f:
-      f.write('# Foo\n{\n}')
-
-    self.mock(sys, 'stdout', cStringIO.StringIO())
-    cmd = ['-i', isolate_file, '-s', isolated_file]
-    isolate.CMDcheck(optparse.OptionParser(), cmd)
-
-  def test_CMDcheck_stale_version(self):
-    isolate_file = os.path.join(self.cwd, 'x.isolate')
-    isolated_file = os.path.join(self.cwd, 'x.isolated')
-    with open(isolate_file, 'wb') as f:
-      f.write(
-          '# Foo\n'
-          '{'
-          '  \'conditions\':['
-          '    [\'OS=="dendy"\', {'
-          '      \'variables\': {'
-          '        \'command\': [\'foo\'],'
-          '      },'
-          '    }],'
-          '  ],'
-          '}')
-
-    self.mock(sys, 'stdout', cStringIO.StringIO())
-    cmd = [
-        '-i', isolate_file,
-        '-s', isolated_file,
-        '--config-variable', 'OS=dendy',
-    ]
-    self.assertEqual(0, isolate.CMDcheck(optparse.OptionParser(), cmd))
-
-    with open(isolate_file, 'rb') as f:
-      actual = f.read()
-    expected = (
-        '# Foo\n{  \'conditions\':[    [\'OS=="dendy"\', {      '
-        '\'variables\': {        \'command\': [\'foo\'],      },    }],  ],}')
-    self.assertEqual(expected, actual)
-
-    with open(isolated_file, 'rb') as f:
-      actual_isolated = f.read()
-    expected_isolated = (
-        '{"algo":"sha-1","command":["foo"],"files":{},'
-        '"read_only":1,"relative_cwd":".","version":"%s"}'
-    ) % isolated_format.ISOLATED_FILE_VERSION
-    self.assertEqual(expected_isolated, actual_isolated)
-    isolated_data = json.loads(actual_isolated)
-
-    with open(isolated_file + '.state', 'rb') as f:
-      actual_isolated_state = f.read()
-    expected_isolated_state = (
-        '{"OS":"%s","algo":"sha-1","child_isolated_files":[],"command":["foo"],'
-        '"config_variables":{"OS":"dendy"},'
-        '"extra_variables":{"EXECUTABLE_SUFFIX":"%s"},"files":{},'
-        '"isolate_file":"x.isolate","path_variables":{},'
-        '"relative_cwd":".","root_dir":%s,"version":"%s"}'
-    ) % (
-      sys.platform,
-      '.exe' if sys.platform=='win32' else '',
-      json.dumps(self.cwd),
-      isolate.SavedState.EXPECTED_VERSION)
-    self.assertEqual(expected_isolated_state, actual_isolated_state)
-    isolated_state_data = json.loads(actual_isolated_state)
-
-    # Now edit the .isolated.state file to break the version number and make
-    # sure it doesn't crash.
-    with open(isolated_file + '.state', 'wb') as f:
-      isolated_state_data['version'] = '100.42'
-      json.dump(isolated_state_data, f)
-    self.assertEqual(0, isolate.CMDcheck(optparse.OptionParser(), cmd))
-
-    # Now edit the .isolated file to break the version number and make
-    # sure it doesn't crash.
-    with open(isolated_file, 'wb') as f:
-      isolated_data['version'] = '100.42'
-      json.dump(isolated_data, f)
-    self.assertEqual(0, isolate.CMDcheck(optparse.OptionParser(), cmd))
-
-    # Make sure the files were regenerated.
-    with open(isolated_file, 'rb') as f:
-      actual_isolated = f.read()
-    self.assertEqual(expected_isolated, actual_isolated)
-    with open(isolated_file + '.state', 'rb') as f:
-      actual_isolated_state = f.read()
-    self.assertEqual(expected_isolated_state, actual_isolated_state)
-
-  def test_CMDcheck_new_variables(self):
-    # Test bug #61.
-    isolate_file = os.path.join(self.cwd, 'x.isolate')
-    isolated_file = os.path.join(self.cwd, 'x.isolated')
-    cmd = [
-        '-i', isolate_file,
-        '-s', isolated_file,
-        '--config-variable', 'OS=dendy',
-    ]
-    with open(isolate_file, 'wb') as f:
-      f.write(
-          '# Foo\n'
-          '{'
-          '  \'conditions\':['
-          '    [\'OS=="dendy"\', {'
-          '      \'variables\': {'
-          '        \'command\': [\'foo\'],'
-          '        \'files\': [\'foo\'],'
-          '      },'
-          '    }],'
-          '  ],'
-          '}')
-    with open(os.path.join(self.cwd, 'foo'), 'wb') as f:
-      f.write('yeah')
-
-    self.mock(sys, 'stdout', cStringIO.StringIO())
-    self.assertEqual(0, isolate.CMDcheck(optparse.OptionParser(), cmd))
-
-    # Now add a new config variable.
-    with open(isolate_file, 'wb') as f:
-      f.write(
-          '# Foo\n'
-          '{'
-          '  \'conditions\':['
-          '    [\'OS=="dendy"\', {'
-          '      \'variables\': {'
-          '        \'command\': [\'foo\'],'
-          '        \'files\': [\'foo\'],'
-          '      },'
-          '    }],'
-          '    [\'foo=="baz"\', {'
-          '      \'variables\': {'
-          '        \'files\': [\'bar\'],'
-          '      },'
-          '    }],'
-          '  ],'
-          '}')
-    with open(os.path.join(self.cwd, 'bar'), 'wb') as f:
-      f.write('yeah right!')
-
-    # The configuration is OS=dendy and foo=bar. So it should load both
-    # configurations.
-    self.assertEqual(
-        0,
-        isolate.CMDcheck(
-            optparse.OptionParser(), cmd + ['--config-variable', 'foo=bar']))
-
-  def test_CMDcheck_isolate_copied(self):
-    # Note that moving the .isolate file is a different code path, this is about
-    # copying the .isolate file to a new place and specifying the new location
-    # on a subsequent execution.
-    x_isolate_file = os.path.join(self.cwd, 'x.isolate')
-    isolated_file = os.path.join(self.cwd, 'x.isolated')
-    cmd = ['-i', x_isolate_file, '-s', isolated_file]
-    with open(x_isolate_file, 'wb') as f:
-      f.write('{}')
-    self.assertEqual(0, isolate.CMDcheck(optparse.OptionParser(), cmd))
-    self.assertTrue(os.path.isfile(isolated_file + '.state'))
-    with open(isolated_file + '.state', 'rb') as f:
-      self.assertEqual(json.load(f)['isolate_file'], 'x.isolate')
-
-    # Move the .isolate file.
-    y_isolate_file = os.path.join(self.cwd, 'Y.isolate')
-    shutil.copyfile(x_isolate_file, y_isolate_file)
-    cmd = ['-i', y_isolate_file, '-s', isolated_file]
-    self.assertEqual(0, isolate.CMDcheck(optparse.OptionParser(), cmd))
-    with open(isolated_file + '.state', 'rb') as f:
-      self.assertEqual(json.load(f)['isolate_file'], 'Y.isolate')
-
-  def test_CMDrun_extra_args(self):
-    cmd = [
-      'run',
-      '--isolate', 'blah.isolate',
-      '--', 'extra_args',
-    ]
-    self.mock(isolate, 'load_complete_state', self.load_complete_state)
-    self.mock(subprocess, 'call', lambda *_, **_kwargs: 0)
-    self.assertEqual(0, isolate.CMDrun(optparse.OptionParser(), cmd))
-
-  def test_CMDrun_no_isolated(self):
-    isolate_file = os.path.join(self.cwd, 'x.isolate')
-    with open(isolate_file, 'wb') as f:
-      f.write('{"variables": {"command": ["python", "-c", "print(\'hi\')"]} }')
-
-    def expect_call(cmd, cwd):
-      self.assertEqual([sys.executable, '-c', "print('hi')", 'run'], cmd)
-      self.assertTrue(os.path.isdir(cwd))
-      return 0
-    self.mock(subprocess, 'call', expect_call)
-
-    cmd = ['run', '--isolate', isolate_file]
-    self.assertEqual(0, isolate.CMDrun(optparse.OptionParser(), cmd))
-
-
-def clear_env_vars():
-  for e in ('ISOLATE_DEBUG', 'ISOLATE_SERVER'):
-    os.environ.pop(e, None)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  clear_env_vars()
-  test_utils.main()
diff --git a/tools/swarming_client/tests/isolated_format_test.py b/tools/swarming_client/tests/isolated_format_test.py
deleted file mode 100755
index ed8319d..0000000
--- a/tools/swarming_client/tests/isolated_format_test.py
+++ /dev/null
@@ -1,258 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import hashlib
-import json
-import logging
-import os
-import sys
-import tempfile
-import unittest
-
-# net_utils adjusts sys.path.
-import net_utils
-
-import isolated_format
-from depot_tools import auto_stub
-from depot_tools import fix_encoding
-from utils import file_path
-from utils import tools
-
-import isolateserver_mock
-
-
-ALGO = hashlib.sha1
-
-
-class TestCase(net_utils.TestCase):
-  def test_get_hash_algo(self):
-    # Tests here assume ALGO is used for default namespaces, check this
-    # assumption.
-    self.assertIs(isolated_format.get_hash_algo('default'), ALGO)
-    self.assertIs(isolated_format.get_hash_algo('default-gzip'), ALGO)
-
-
-class SymlinkTest(unittest.TestCase):
-  def setUp(self):
-    super(SymlinkTest, self).setUp()
-    self.old_cwd = os.getcwd()
-    self.cwd = tempfile.mkdtemp(prefix=u'isolate_')
-    # Everything should work even from another directory.
-    os.chdir(self.cwd)
-
-  def tearDown(self):
-    try:
-      os.chdir(self.old_cwd)
-      file_path.rmtree(self.cwd)
-    finally:
-      super(SymlinkTest, self).tearDown()
-
-  if sys.platform == 'darwin':
-    def test_expand_symlinks_path_case(self):
-      # Ensures that the resulting path case is fixed on case insensitive file
-      # system.
-      os.symlink('dest', os.path.join(self.cwd, 'link'))
-      os.mkdir(os.path.join(self.cwd, 'Dest'))
-      open(os.path.join(self.cwd, 'Dest', 'file.txt'), 'w').close()
-
-      result = isolated_format.expand_symlinks(unicode(self.cwd), 'link')
-      self.assertEqual((u'Dest', [u'link']), result)
-      result = isolated_format.expand_symlinks(
-          unicode(self.cwd), 'link/File.txt')
-      self.assertEqual((u'Dest/file.txt', [u'link']), result)
-
-    def test_expand_directories_and_symlinks_path_case(self):
-      # Ensures that the resulting path case is fixed on case insensitive file
-      # system. A superset of test_expand_symlinks_path_case.
-      # Create *all* the paths with the wrong path case.
-      basedir = os.path.join(self.cwd, 'baseDir')
-      os.mkdir(basedir.lower())
-      subdir = os.path.join(basedir, 'subDir')
-      os.mkdir(subdir.lower())
-      open(os.path.join(subdir, 'Foo.txt'), 'w').close()
-      os.symlink('subDir', os.path.join(basedir, 'linkdir'))
-      actual = isolated_format.expand_directories_and_symlinks(
-          unicode(self.cwd), [u'baseDir/'], lambda _: None, True, False)
-      expected = [
-        u'basedir/linkdir',
-        u'basedir/subdir/Foo.txt',
-        u'basedir/subdir/Foo.txt',
-      ]
-      self.assertEqual(expected, actual)
-
-    def test_file_to_metadata_path_case_simple(self):
-      # Ensure the symlink dest is saved in the right path case.
-      subdir = os.path.join(self.cwd, 'subdir')
-      os.mkdir(subdir)
-      linkdir = os.path.join(self.cwd, 'linkdir')
-      os.symlink('subDir', linkdir)
-      actual = isolated_format.file_to_metadata(
-          unicode(linkdir.upper()), {}, True, ALGO)
-      expected = {'l': u'subdir', 't': int(os.stat(linkdir).st_mtime)}
-      self.assertEqual(expected, actual)
-
-    def test_file_to_metadata_path_case_complex(self):
-      # Ensure the symlink dest is saved in the right path case. This includes 2
-      # layers of symlinks.
-      basedir = os.path.join(self.cwd, 'basebir')
-      os.mkdir(basedir)
-
-      linkeddir2 = os.path.join(self.cwd, 'linkeddir2')
-      os.mkdir(linkeddir2)
-
-      linkeddir1 = os.path.join(basedir, 'linkeddir1')
-      os.symlink('../linkedDir2', linkeddir1)
-
-      subsymlinkdir = os.path.join(basedir, 'symlinkdir')
-      os.symlink('linkedDir1', subsymlinkdir)
-
-      actual = isolated_format.file_to_metadata(
-          unicode(subsymlinkdir.upper()), {}, True, ALGO)
-      expected = {
-        'l': u'linkeddir1', 't': int(os.stat(subsymlinkdir).st_mtime),
-      }
-      self.assertEqual(expected, actual)
-
-      actual = isolated_format.file_to_metadata(
-          unicode(linkeddir1.upper()), {}, True, ALGO)
-      expected = {
-        'l': u'../linkeddir2', 't': int(os.stat(linkeddir1).st_mtime),
-      }
-      self.assertEqual(expected, actual)
-
-  if sys.platform != 'win32':
-    def test_symlink_input_absolute_path(self):
-      # A symlink is outside of the checkout, it should be treated as a normal
-      # directory.
-      # .../src
-      # .../src/out -> .../tmp/foo
-      # .../tmp
-      # .../tmp/foo
-      src = os.path.join(self.cwd, u'src')
-      src_out = os.path.join(src, 'out')
-      tmp = os.path.join(self.cwd, 'tmp')
-      tmp_foo = os.path.join(tmp, 'foo')
-      os.mkdir(src)
-      os.mkdir(tmp)
-      os.mkdir(tmp_foo)
-      # The problem was that it's an absolute path, so it must be considered a
-      # normal directory.
-      os.symlink(tmp, src_out)
-      open(os.path.join(tmp_foo, 'bar.txt'), 'w').close()
-      actual = isolated_format.expand_symlinks(src, u'out/foo/bar.txt')
-      self.assertEqual((u'out/foo/bar.txt', []), actual)
-
-
-class TestIsolated(auto_stub.TestCase):
-  def test_load_isolated_empty(self):
-    m = isolated_format.load_isolated('{}', isolateserver_mock.ALGO)
-    self.assertEqual({}, m)
-
-  def test_load_isolated_good(self):
-    data = {
-      u'command': [u'foo', u'bar'],
-      u'files': {
-        u'a': {
-          u'l': u'somewhere',
-        },
-        u'b': {
-          u'm': 123,
-          u'h': u'0123456789abcdef0123456789abcdef01234567',
-          u's': 3,
-        }
-      },
-      u'includes': [u'0123456789abcdef0123456789abcdef01234567'],
-      u'read_only': 1,
-      u'relative_cwd': u'somewhere_else',
-      u'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    m = isolated_format.load_isolated(json.dumps(data), isolateserver_mock.ALGO)
-    self.assertEqual(data, m)
-
-  def test_load_isolated_bad(self):
-    data = {
-      u'files': {
-        u'a': {
-          u'l': u'somewhere',
-          u'h': u'0123456789abcdef0123456789abcdef01234567'
-        }
-      },
-      u'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    with self.assertRaises(isolated_format.IsolatedError):
-      isolated_format.load_isolated(json.dumps(data), isolateserver_mock.ALGO)
-
-  def test_load_isolated_bad_abs(self):
-    for i in ('/a', 'a/..', 'a/', '\\\\a'):
-      data = {
-        u'files': {i: {u'l': u'somewhere'}},
-        u'version': isolated_format.ISOLATED_FILE_VERSION,
-      }
-      with self.assertRaises(isolated_format.IsolatedError):
-        isolated_format.load_isolated(json.dumps(data), isolateserver_mock.ALGO)
-
-  def test_load_isolated_os_only(self):
-    # Tolerate 'os' on older version.
-    data = {
-      u'os': 'HP/UX',
-      u'version': '1.3',
-    }
-    m = isolated_format.load_isolated(json.dumps(data), isolateserver_mock.ALGO)
-    self.assertEqual(data, m)
-
-  def test_load_isolated_os_only_bad(self):
-    data = {
-      u'os': 'HP/UX',
-      u'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    with self.assertRaises(isolated_format.IsolatedError):
-      isolated_format.load_isolated(json.dumps(data), isolateserver_mock.ALGO)
-
-  def test_load_isolated_path(self):
-    # Automatically convert the path case.
-    wrong_path_sep = u'\\' if os.path.sep == '/' else u'/'
-    def gen_data(path_sep):
-      return {
-        u'command': [u'foo', u'bar'],
-        u'files': {
-          path_sep.join(('a', 'b')): {
-            u'l': path_sep.join(('..', 'somewhere')),
-          },
-        },
-        u'relative_cwd': path_sep.join(('somewhere', 'else')),
-        u'version': isolated_format.ISOLATED_FILE_VERSION,
-      }
-
-    data = gen_data(wrong_path_sep)
-    actual = isolated_format.load_isolated(
-        json.dumps(data), isolateserver_mock.ALGO)
-    expected = gen_data(os.path.sep)
-    self.assertEqual(expected, actual)
-
-  def test_save_isolated_good_long_size(self):
-    calls = []
-    self.mock(tools, 'write_json', lambda *x: calls.append(x))
-    data = {
-      u'algo': 'sha-1',
-      u'files': {
-        u'b': {
-          u'm': 123,
-          u'h': u'0123456789abcdef0123456789abcdef01234567',
-          u's': 2181582786L,
-        }
-      },
-    }
-    m = isolated_format.save_isolated('foo', data)
-    self.assertEqual([], m)
-    self.assertEqual([('foo', data, True)], calls)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  logging.basicConfig(
-      level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR))
-  unittest.main()
diff --git a/tools/swarming_client/tests/isolateserver_mock.py b/tools/swarming_client/tests/isolateserver_mock.py
deleted file mode 100644
index a88707e..0000000
--- a/tools/swarming_client/tests/isolateserver_mock.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import base64
-import hashlib
-import json
-import logging
-import re
-import zlib
-
-import httpserver_mock
-
-ALGO = hashlib.sha1
-
-
-def hash_content(content):
-  return ALGO(content).hexdigest()
-
-
-class FakeSigner(object):
-
-  @classmethod
-  def generate(cls, message, embedded):
-    return '%s_<<<%s>>>' % (repr(message), json.dumps(embedded))
-
-  @classmethod
-  def validate(cls, ticket, message):
-    a = re.match(r'^' + repr(message) + r'_<<<(.*)>>>$', ticket, re.DOTALL)
-    if not a:
-      raise ValueError('Message %s cannot validate ticket %s' % (
-          repr(message), ticket))
-    return json.loads(a.groups()[0])
-
-
-class IsolateServerHandler(httpserver_mock.MockHandler):
-  """An extremely minimal implementation of the isolate server API v1.0."""
-
-  def _should_push_to_gs(self, isolated, size):
-    max_memcache = 500 * 1024
-    min_direct_gs = 501
-    if isolated and size <= max_memcache:
-      return False
-    return size >= min_direct_gs
-
-  def _generate_signed_url(self, digest, namespace='default'):
-    return '%s/FAKE_GCS/%s/%s' % (self.server.url, namespace, digest)
-
-  def _generate_ticket(self, entry_dict):
-    embedded = dict(
-        entry_dict,
-        **{
-            'c': 'flate',
-            'h': 'SHA-1',
-        })
-    message = ['datastore', 'gs'][
-        self._should_push_to_gs(embedded['i'], embedded['s'])]
-    return FakeSigner.generate(message, embedded)
-
-  def _storage_helper(self, body, gs=False):
-    request = json.loads(body)
-    message = ['datastore', 'gs'][gs]
-    content = request['content'] if not gs else None
-    embedded = FakeSigner.validate(request['upload_ticket'], message)
-    namespace = embedded['n']
-    if namespace not in self.server.contents:
-      self.server.contents[namespace] = {}
-    self.server.contents[namespace][embedded['d']] = content
-    self._json({'ok': True})
-
-  ### Mocked HTTP Methods
-
-  def do_GET(self):
-    logging.info('GET %s', self.path)
-    if self.path in ('/on/load', '/on/quit'):
-      self._octet_stream('')
-    elif self.path == '/auth/api/v1/server/oauth_config':
-      self._json({
-          'client_id': 'c',
-          'client_not_so_secret': 's',
-          'primary_url': self.server.url})
-    elif self.path == '/auth/api/v1/accounts/self':
-      self._json({'identity': 'user:joe', 'xsrf_token': 'foo'})
-    else:
-      raise NotImplementedError(self.path)
-
-  def do_POST(self):
-    logging.info('POST %s', self.path)
-    body = self._read_body()
-    if self.path.startswith('/api/isolateservice/v1/preupload'):
-      response = {'items': []}
-      def append_entry(entry, index, li):
-        """Converts a {'h', 's', 'i'} to ["<upload url>", "<finalize url>"] or
-        None.
-        """
-        if entry['d'] not in self.server.contents.get(entry['n'], {}):
-          status = {
-              'digest': entry['d'],
-              'index': str(index),
-              'upload_ticket': self._generate_ticket(entry),
-          }
-          if self._should_push_to_gs(entry['i'], entry['s']):
-            status['gs_upload_url'] = self._generate_signed_url(entry['d'])
-          li.append(status)
-        # Don't use finalize url for the mock.
-
-      request = json.loads(body)
-      namespace = request['namespace']['namespace']
-      for index, i in enumerate(request['items']):
-        append_entry({
-            'd': i['digest'],
-            'i': i['is_isolated'],
-            'n': namespace,
-            's': i['size'],
-        }, index, response['items'])
-      logging.info('Returning %s' % response)
-      self._json(response)
-    elif self.path.startswith('/api/isolateservice/v1/store_inline'):
-      self._storage_helper(body)
-    elif self.path.startswith('/api/isolateservice/v1/finalize_gs_upload'):
-      self._storage_helper(body, True)
-    elif self.path.startswith('/api/isolateservice/v1/retrieve'):
-      request = json.loads(body)
-      namespace = request['namespace']['namespace']
-      data = self.server.contents[namespace].get(request['digest'])
-      if data is None:
-        logging.error(
-            'Failed to retrieve %s / %s', namespace, request['digest'])
-      self._json({'content': data})
-    elif self.path.startswith('/api/isolateservice/v1/server_details'):
-      self._json({'server_version': 'such a good version'})
-    else:
-      raise NotImplementedError(self.path)
-
-  def do_PUT(self):
-    if self.server.discard_content:
-      body = '<skipped>'
-      self._drop_body()
-    else:
-      body = self._read_body()
-    if self.path.startswith('/FAKE_GCS/'):
-      namespace, h = self.path[len('/FAKE_GCS/'):].split('/', 1)
-      self.server.contents.setdefault(namespace, {})[h] = body
-      self._octet_stream('')
-    else:
-      raise NotImplementedError(self.path)
-
-
-class MockIsolateServer(httpserver_mock.MockServer):
-  _HANDLER_CLS = IsolateServerHandler
-
-  def __init__(self):
-    super(MockIsolateServer, self).__init__()
-    self._server.contents = {}
-    self._server.discard_content = False
-
-  def discard_content(self):
-    """Stops saving content in memory. Used to test large files."""
-    self._server.discard_content = True
-
-  @property
-  def contents(self):
-    return self._server.contents
-
-  def add_content_compressed(self, namespace, content):
-    assert not self._server.discard_content
-    h = hash_content(content)
-    logging.info('add_content_compressed(%s, %s)', namespace, h)
-    self._server.contents.setdefault(namespace, {})[h] = base64.b64encode(
-        zlib.compress(content))
-    return h
-
-  def add_content(self, namespace, content):
-    assert not self._server.discard_content
-    h = hash_content(content)
-    logging.info('add_content(%s, %s)', namespace, h)
-    self._server.contents.setdefault(namespace, {})[h] = base64.b64encode(
-        content)
-    return h
diff --git a/tools/swarming_client/tests/isolateserver_smoke_test.py b/tools/swarming_client/tests/isolateserver_smoke_test.py
deleted file mode 100755
index 45c2396..0000000
--- a/tools/swarming_client/tests/isolateserver_smoke_test.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import hashlib
-import logging
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-import isolated_format
-import test_utils
-from third_party.depot_tools import fix_encoding
-from utils import file_path
-
-# Ensure that the testing machine has access to this server.
-ISOLATE_SERVER = 'https://isolateserver.appspot.com/'
-
-
-CONTENTS = {
-  'empty_file.txt': '',
-  'small_file.txt': 'small file\n',
-  # TODO(maruel): symlinks.
-}
-
-
-class IsolateServerArchiveSmokeTest(unittest.TestCase):
-  def setUp(self):
-    super(IsolateServerArchiveSmokeTest, self).setUp()
-    # The namespace must end in '-gzip' since all files are now compressed
-    # before being uploaded.
-    # TODO(maruel): This should not be leaked to the client. It's a
-    # transport/storage detail.
-    self.namespace = ('temporary' + str(long(time.time())).split('.', 1)[0]
-                      + '-gzip')
-    self.tempdir = tempfile.mkdtemp(prefix=u'isolateserver')
-    self.rootdir = os.path.join(self.tempdir, 'rootdir')
-    self.test_data = os.path.join(self.tempdir, 'test_data')
-    test_utils.make_tree(self.test_data, CONTENTS)
-
-  def tearDown(self):
-    try:
-      file_path.rmtree(self.tempdir)
-    finally:
-      super(IsolateServerArchiveSmokeTest, self).tearDown()
-
-  def _run(self, args):
-    """Runs isolateserver.py."""
-    cmd = [
-        sys.executable,
-        os.path.join(ROOT_DIR, 'isolateserver.py'),
-    ]
-    cmd.extend(args)
-    cmd.extend(
-        [
-          '--isolate-server', ISOLATE_SERVER,
-          '--namespace', self.namespace
-        ])
-    if '-v' in sys.argv:
-      cmd.append('--verbose')
-      subprocess.check_call(cmd)
-    else:
-      subprocess.check_output(cmd)
-
-  def _archive_given_files(self, files):
-    """Given a list of files, call isolateserver.py with them. Then
-    verify they are all on the server."""
-    files = [os.path.join(self.test_data, filename) for filename in files]
-    self._run(['archive'] + files)
-    self._download_given_files(files)
-
-  def _download_given_files(self, files):
-    """Tries to download the files from the server."""
-    args = ['download', '--target', self.rootdir]
-    file_hashes = [isolated_format.hash_file(f, hashlib.sha1) for f in files]
-    for f in file_hashes:
-      args.extend(['--file', f, f])
-    self._run(args)
-    # Assert the files are present.
-    actual = [
-        isolated_format.hash_file(os.path.join(self.rootdir, f), hashlib.sha1)
-        for f in os.listdir(self.rootdir)
-    ]
-    self.assertEqual(sorted(file_hashes), sorted(actual))
-
-  def test_archive_empty_file(self):
-    self._archive_given_files(['empty_file.txt'])
-
-  def test_archive_small_file(self):
-    self._archive_given_files(['small_file.txt'])
-
-  def test_archive_huge_file(self):
-    # Create a file over 2gbs.
-    name = '2.1gb.7z'
-    with open(os.path.join(self.test_data, name), 'wb') as f:
-      # Write 2.1gb.
-      data = os.urandom(1024)
-      for _ in xrange(2150 * 1024):
-        f.write(data)
-    self._archive_given_files([name])
-
-  if sys.maxsize == (2**31) - 1:
-    def test_archive_multiple_huge_file(self):
-      # Create multiple files over 2.5gb. This test exists to stress the virtual
-      # address space on 32 bits systems
-      files = []
-      for i in xrange(5):
-        name = '512mb_%d.7z' % i
-        files.append(name)
-        with open(os.path.join(self.test_data, name), 'wb') as f:
-          # Write 512mb.
-          data = os.urandom(1024)
-          for _ in xrange(512 * 1024):
-            f.write(data)
-      self._archive_given_files(files)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  if len(sys.argv) > 1 and sys.argv[1].startswith('http'):
-    ISOLATE_SERVER = sys.argv.pop(1).rstrip('/') + '/'
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/isolateserver_test.py b/tools/swarming_client/tests/isolateserver_test.py
deleted file mode 100755
index 550e161..0000000
--- a/tools/swarming_client/tests/isolateserver_test.py
+++ /dev/null
@@ -1,1369 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# pylint: disable=W0212,W0223,W0231,W0613
-
-import base64
-import collections
-import hashlib
-import json
-import logging
-import io
-import os
-import StringIO
-import sys
-import tempfile
-import unittest
-import urllib
-import zlib
-
-# net_utils adjusts sys.path.
-import net_utils
-
-import auth
-import isolated_format
-import isolateserver
-import test_utils
-from depot_tools import auto_stub
-from depot_tools import fix_encoding
-from utils import file_path
-from utils import fs
-from utils import logging_utils
-from utils import threading_utils
-
-import isolateserver_mock
-
-
-CONTENTS = {
-  'empty_file.txt': '',
-  'small_file.txt': 'small file\n',
-  # TODO(maruel): symlinks.
-}
-
-
-class TestCase(net_utils.TestCase):
-  """Mocks out url_open() calls and sys.stdout/stderr."""
-  _tempdir = None
-
-  def setUp(self):
-    super(TestCase, self).setUp()
-    self.mock(auth, 'ensure_logged_in', lambda _: None)
-    self.mock(sys, 'stdout', StringIO.StringIO())
-    self.mock(sys, 'stderr', StringIO.StringIO())
-    self.old_cwd = os.getcwd()
-
-  def tearDown(self):
-    try:
-      os.chdir(self.old_cwd)
-      if self._tempdir:
-        file_path.rmtree(self._tempdir)
-      if not self.has_failed():
-        self.checkOutput('', '')
-    finally:
-      super(TestCase, self).tearDown()
-
-  @property
-  def tempdir(self):
-    if not self._tempdir:
-      self._tempdir = tempfile.mkdtemp(prefix=u'isolateserver')
-    return self._tempdir
-
-  def make_tree(self, contents):
-    test_utils.make_tree(self.tempdir, contents)
-
-  def checkOutput(self, expected_out, expected_err):
-    try:
-      self.assertEqual(expected_err, sys.stderr.getvalue())
-      self.assertEqual(expected_out, sys.stdout.getvalue())
-    finally:
-      # Prevent double-fail.
-      self.mock(sys, 'stdout', StringIO.StringIO())
-      self.mock(sys, 'stderr', StringIO.StringIO())
-
-
-class TestZipCompression(TestCase):
-  """Test zip_compress and zip_decompress generators."""
-
-  def test_compress_and_decompress(self):
-    """Test data === decompress(compress(data))."""
-    original = [str(x) for x in xrange(0, 1000)]
-    processed = isolateserver.zip_decompress(
-        isolateserver.zip_compress(original))
-    self.assertEqual(''.join(original), ''.join(processed))
-
-  def test_zip_bomb(self):
-    """Verify zip_decompress always returns small chunks."""
-    original = '\x00' * 100000
-    bomb = ''.join(isolateserver.zip_compress(original))
-    decompressed = []
-    chunk_size = 1000
-    for chunk in isolateserver.zip_decompress([bomb], chunk_size):
-      self.assertLessEqual(len(chunk), chunk_size)
-      decompressed.append(chunk)
-    self.assertEqual(original, ''.join(decompressed))
-
-  def test_bad_zip_file(self):
-    """Verify decompressing broken file raises IOError."""
-    with self.assertRaises(IOError):
-      ''.join(isolateserver.zip_decompress(['Im not a zip file']))
-
-
-class FakeItem(isolateserver.Item):
-  def __init__(self, data, high_priority=False):
-    super(FakeItem, self).__init__(
-      isolateserver_mock.hash_content(data), len(data), high_priority)
-    self.data = data
-
-  def content(self):
-    return [self.data]
-
-  @property
-  def zipped(self):
-    return zlib.compress(self.data, self.compression_level)
-
-
-class MockedStorageApi(isolateserver.StorageApi):
-  def __init__(
-      self, missing_hashes, push_side_effect=None, namespace='default'):
-    self.missing_hashes = missing_hashes
-    self.push_side_effect = push_side_effect
-    self.push_calls = []
-    self.contains_calls = []
-    self._namespace = namespace
-
-  @property
-  def namespace(self):
-    return self._namespace
-
-  def push(self, item, push_state, content=None):
-    content = ''.join(item.content() if content is None else content)
-    self.push_calls.append((item, push_state, content))
-    if self.push_side_effect:
-      self.push_side_effect()
-
-  def contains(self, items):
-    self.contains_calls.append(items)
-    missing = {}
-    for item in items:
-      if item.digest in self.missing_hashes:
-        missing[item] = self.missing_hashes[item.digest]
-    return missing
-
-
-class UtilsTest(TestCase):
-  """Tests for helper methods in isolateserver file."""
-
-  def assertFile(self, path, contents):
-    self.assertTrue(fs.exists(path), 'File %s doesn\'t exist!' % path)
-    self.assertMultiLineEqual(contents, fs.open(path, 'rb').read())
-
-  def test_file_read(self):
-    # TODO(maruel): Write test for file_read generator (or remove it).
-    pass
-
-  def test_file_write(self):
-    # TODO(maruel): Write test for file_write generator (or remove it).
-    pass
-
-  def test_fileobj_path(self):
-    # No path on in-memory objects
-    self.assertIs(None, isolateserver.fileobj_path(io.BytesIO('hello')))
-
-    # Path on opened files
-    thisfile = os.path.abspath(__file__.decode(sys.getfilesystemencoding()))
-    f = fs.open(thisfile)
-    result = isolateserver.fileobj_path(f)
-    self.assertIsInstance(result, unicode)
-    self.assertSequenceEqual(result, thisfile)
-
-    # Path on temporary files
-    tf = tempfile.NamedTemporaryFile()
-    result = isolateserver.fileobj_path(tf)
-    self.assertIsInstance(result, unicode)
-    self.assertSequenceEqual(result, tf.name)
-
-    # No path on files which are no longer on the file system
-    tf = tempfile.NamedTemporaryFile(delete=False)
-    fs.unlink(tf.name.decode(sys.getfilesystemencoding()))
-    self.assertIs(None, isolateserver.fileobj_path(tf))
-
-  def test_fileobj_copy_simple(self):
-    inobj = io.BytesIO('hello')
-    outobj = io.BytesIO()
-
-    isolateserver.fileobj_copy(outobj, inobj)
-    self.assertEqual('hello', outobj.getvalue())
-
-  def test_fileobj_copy_partial(self):
-    inobj = io.BytesIO('adatab')
-    outobj = io.BytesIO()
-    inobj.read(1)
-
-    isolateserver.fileobj_copy(outobj, inobj, size=4)
-    self.assertEqual('data', outobj.getvalue())
-
-  def test_fileobj_copy_partial_file_no_size(self):
-    with self.assertRaises(IOError):
-      inobj = io.BytesIO('hello')
-      outobj = io.BytesIO()
-
-      inobj.read(1)
-      isolateserver.fileobj_copy(outobj, inobj)
-
-  def test_fileobj_copy_size_but_file_short(self):
-    with self.assertRaises(IOError):
-      inobj = io.BytesIO('hello')
-      outobj = io.BytesIO()
-
-      isolateserver.fileobj_copy(outobj, inobj, size=10)
-
-  def test_putfile(self):
-    tmpoutdir = None
-    tmpindir = None
-
-    try:
-      tmpindir = tempfile.mkdtemp(prefix='isolateserver_test')
-      infile = os.path.join(tmpindir, u'in')
-      with fs.open(infile, 'wb') as f:
-        f.write('data')
-
-      tmpoutdir = tempfile.mkdtemp(prefix='isolateserver_test')
-
-      # Copy as fileobj
-      fo = os.path.join(tmpoutdir, u'fo')
-      isolateserver.putfile(io.BytesIO('data'), fo)
-      self.assertEqual(True, fs.exists(fo))
-      self.assertEqual(False, fs.islink(fo))
-      self.assertFile(fo, 'data')
-
-      # Copy with partial fileobj
-      pfo = os.path.join(tmpoutdir, u'pfo')
-      fobj = io.BytesIO('adatab')
-      fobj.read(1)  # Read the 'a'
-      isolateserver.putfile(fobj, pfo, size=4)
-      self.assertEqual(True, fs.exists(pfo))
-      self.assertEqual(False, fs.islink(pfo))
-      self.assertEqual('b', fobj.read())
-      self.assertFile(pfo, 'data')
-
-      # Copy as not readonly
-      cp = os.path.join(tmpoutdir, u'cp')
-      with fs.open(infile, 'rb') as f:
-        isolateserver.putfile(f, cp, file_mode=0755)
-      self.assertEqual(True, fs.exists(cp))
-      self.assertEqual(False, fs.islink(cp))
-      self.assertFile(cp, 'data')
-
-      # Use hardlink
-      hl = os.path.join(tmpoutdir, u'hl')
-      with fs.open(infile, 'rb') as f:
-        isolateserver.putfile(f, hl, use_symlink=False)
-      self.assertEqual(True, fs.exists(hl))
-      self.assertEqual(False, fs.islink(hl))
-      self.assertFile(hl, 'data')
-
-      # Use symlink
-      sl = os.path.join(tmpoutdir, u'sl')
-      with fs.open(infile, 'rb') as f:
-        isolateserver.putfile(f, sl, use_symlink=True)
-      self.assertEqual(True, fs.exists(sl))
-      self.assertEqual(True, fs.islink(sl))
-      self.assertEqual('data', fs.open(sl, 'rb').read())
-      self.assertFile(sl, 'data')
-
-    finally:
-      if tmpindir:
-        file_path.rmtree(tmpindir)
-      if tmpoutdir:
-        file_path.rmtree(tmpoutdir)
-
-
-class StorageTest(TestCase):
-  """Tests for Storage methods."""
-
-  def assertEqualIgnoringOrder(self, a, b):
-    """Asserts that containers |a| and |b| contain same items."""
-    self.assertEqual(len(a), len(b))
-    self.assertEqual(set(a), set(b))
-
-  def get_push_state(self, storage, item):
-    missing = list(storage.get_missing_items([item]))
-    self.assertEqual(1, len(missing))
-    self.assertEqual(item, missing[0][0])
-    return missing[0][1]
-
-  def test_batch_items_for_check(self):
-    items = [
-      isolateserver.Item('foo', 12),
-      isolateserver.Item('blow', 0),
-      isolateserver.Item('bizz', 1222),
-      isolateserver.Item('buzz', 1223),
-    ]
-    expected = [
-      [items[3], items[2], items[0], items[1]],
-    ]
-    batches = list(isolateserver.batch_items_for_check(items))
-    self.assertEqual(batches, expected)
-
-  def test_get_missing_items(self):
-    items = [
-      isolateserver.Item('foo', 12),
-      isolateserver.Item('blow', 0),
-      isolateserver.Item('bizz', 1222),
-      isolateserver.Item('buzz', 1223),
-    ]
-    missing = {
-      items[2]: 123,
-      items[3]: 456,
-    }
-
-    storage_api = MockedStorageApi(
-        {item.digest: push_state for item, push_state in missing.iteritems()})
-    storage = isolateserver.Storage(storage_api)
-
-    # 'get_missing_items' is a generator yielding pairs, materialize its
-    # result in a dict.
-    result = dict(storage.get_missing_items(items))
-    self.assertEqual(missing, result)
-
-  def test_async_push(self):
-    for use_zip in (False, True):
-      item = FakeItem('1234567')
-      storage_api = MockedStorageApi(
-          {item.digest: 'push_state'},
-          namespace='default-gzip' if use_zip else 'default')
-      storage = isolateserver.Storage(storage_api)
-      channel = threading_utils.TaskChannel()
-      storage.async_push(channel, item, self.get_push_state(storage, item))
-      # Wait for push to finish.
-      pushed_item = channel.pull()
-      self.assertEqual(item, pushed_item)
-      # StorageApi.push was called with correct arguments.
-      self.assertEqual(
-          [(item, 'push_state', item.zipped if use_zip else item.data)],
-          storage_api.push_calls)
-
-  def test_async_push_generator_errors(self):
-    class FakeException(Exception):
-      pass
-
-    def faulty_generator():
-      yield 'Hi!'
-      raise FakeException('fake exception')
-
-    for use_zip in (False, True):
-      item = FakeItem('')
-      self.mock(item, 'content', faulty_generator)
-      storage_api = MockedStorageApi(
-          {item.digest: 'push_state'},
-          namespace='default-gzip' if use_zip else 'default')
-      storage = isolateserver.Storage(storage_api)
-      channel = threading_utils.TaskChannel()
-      storage.async_push(channel, item, self.get_push_state(storage, item))
-      with self.assertRaises(FakeException):
-        channel.pull()
-      # StorageApi's push should never complete when data can not be read.
-      self.assertEqual(0, len(storage_api.push_calls))
-
-  def test_async_push_upload_errors(self):
-    chunk = 'data_chunk'
-
-    def _generator():
-      yield chunk
-
-    def push_side_effect():
-      raise IOError('Nope')
-
-    # TODO(vadimsh): Retrying push when fetching data from a generator is
-    # broken now (it reuses same generator instance when retrying).
-    content_sources = (
-        # generator(),
-        lambda: [chunk],
-    )
-
-    for use_zip in (False, True):
-      for source in content_sources:
-        item = FakeItem(chunk)
-        self.mock(item, 'content', source)
-        storage_api = MockedStorageApi(
-            {item.digest: 'push_state'},
-            push_side_effect,
-            namespace='default-gzip' if use_zip else 'default')
-        storage = isolateserver.Storage(storage_api)
-        channel = threading_utils.TaskChannel()
-        storage.async_push(channel, item, self.get_push_state(storage, item))
-        with self.assertRaises(IOError):
-          channel.pull()
-        # First initial attempt + all retries.
-        attempts = 1 + storage.net_thread_pool.RETRIES
-        # Single push attempt call arguments.
-        expected_push = (
-            item, 'push_state', item.zipped if use_zip else item.data)
-        # Ensure all pushes are attempted.
-        self.assertEqual(
-            [expected_push] * attempts, storage_api.push_calls)
-
-  def test_upload_tree(self):
-    files = {
-      u'/a': {
-        's': 100,
-        'h': 'hash_a',
-      },
-      u'/some/dir/b': {
-        's': 200,
-        'h': 'hash_b',
-      },
-      u'/another/dir/c': {
-        's': 300,
-        'h': 'hash_c',
-      },
-      u'/a_copy': {
-        's': 100,
-        'h': 'hash_a',
-      },
-    }
-    files_data = {k: 'x' * files[k]['s'] for k in files}
-    all_hashes = set(f['h'] for f in files.itervalues())
-    missing_hashes = {'hash_a': 'push a', 'hash_b': 'push b'}
-
-    # Files read by mocked_file_read.
-    read_calls = []
-
-    def mocked_file_read(filepath, chunk_size=0, offset=0):
-      self.assertIn(filepath, files_data)
-      read_calls.append(filepath)
-      return files_data[filepath]
-    self.mock(isolateserver, 'file_read', mocked_file_read)
-
-    storage_api = MockedStorageApi(missing_hashes)
-    storage = isolateserver.Storage(storage_api)
-    def mock_get_storage(base_url, namespace):
-      self.assertEqual('base_url', base_url)
-      self.assertEqual('some-namespace', namespace)
-      return storage
-    self.mock(isolateserver, 'get_storage', mock_get_storage)
-
-    isolateserver.upload_tree('base_url', files.iteritems(), 'some-namespace')
-
-    # Was reading only missing files.
-    self.assertEqualIgnoringOrder(
-        missing_hashes,
-        [files[path]['h'] for path in read_calls])
-    # 'contains' checked for existence of all files.
-    self.assertEqualIgnoringOrder(
-        all_hashes,
-        [i.digest for i in sum(storage_api.contains_calls, [])])
-    # Pushed only missing files.
-    self.assertEqualIgnoringOrder(
-        missing_hashes,
-        [call[0].digest for call in storage_api.push_calls])
-    # Pushing with correct data, size and push state.
-    for pushed_item, push_state, pushed_content in storage_api.push_calls:
-      filenames = [
-          name for name, metadata in files.iteritems()
-          if metadata['h'] == pushed_item.digest
-      ]
-      # If there are multiple files that map to same hash, upload_tree chooses
-      # a first one.
-      filename = filenames[0]
-      self.assertEqual(filename, pushed_item.path)
-      self.assertEqual(files_data[filename], pushed_content)
-      self.assertEqual(missing_hashes[pushed_item.digest], push_state)
-
-
-class IsolateServerStorageApiTest(TestCase):
-  @staticmethod
-  def mock_fetch_request(server, namespace, item, data=None, offset=0):
-    compression = 'flate' if namespace.endswith(('-gzip', '-flate')) else ''
-    if data is None:
-      response = {'url': server + '/some/gs/url/%s/%s' % (namespace, item)}
-    else:
-      response = {'content': base64.b64encode(data[offset:])}
-    return (
-      server + '/api/isolateservice/v1/retrieve',
-      {
-          'data': {
-              'digest': item,
-              'namespace': {
-                  'compression': compression,
-                  'digest_hash': 'sha-1',
-                  'namespace': namespace,
-              },
-              'offset': offset,
-          },
-          'read_timeout': 60,
-      },
-      response,
-    )
-
-  @staticmethod
-  def mock_server_details_request(server):
-    return (
-        server + '/api/isolateservice/v1/server_details',
-        {'data': {}},
-        {'server_version': 'such a good version'}
-    )
-
-  @staticmethod
-  def mock_gs_request(server, namespace, item, data=None, offset=0,
-                      request_headers=None, response_headers=None):
-    response = data
-    return (
-        server + '/some/gs/url/%s/%s' % (namespace, item),
-        {},
-        response,
-        response_headers,
-    )
-
-  @staticmethod
-  def mock_contains_request(
-      server, namespace, request, response, compression=''):
-    url = server + '/api/isolateservice/v1/preupload'
-    digest_collection = dict(request, namespace={
-        'compression': compression,
-        'digest_hash': 'sha-1',
-        'namespace': namespace,
-    })
-    return (url, {'data': digest_collection}, response)
-
-  @staticmethod
-  def mock_upload_request(server, content, ticket, response=None):
-    url = server + '/api/isolateservice/v1/store_inline'
-    request = {'content': content, 'upload_ticket': ticket}
-    return (url, {'data': request}, response)
-
-  def test_server_capabilities_success(self):
-    server = 'http://example.com'
-    namespace ='default'
-    self.expected_requests([self.mock_server_details_request(server)])
-    storage = isolateserver.IsolateServer(server, namespace)
-    caps = storage._server_capabilities
-    self.assertEqual({'server_version': 'such a good version'}, caps)
-
-  def test_fetch_success(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    data = ''.join(str(x) for x in xrange(1000))
-    item = isolateserver_mock.hash_content(data)
-    self.expected_requests(
-        [self.mock_fetch_request(server, namespace, item, data)])
-    storage = isolateserver.IsolateServer(server, namespace)
-    fetched = ''.join(storage.fetch(item))
-    self.assertEqual(data, fetched)
-
-  def test_fetch_failure(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    item = isolateserver_mock.hash_content('something')
-    self.expected_requests(
-        [self.mock_fetch_request(server, namespace, item)[:-1] + (None,)])
-    storage = isolateserver.IsolateServer(server, namespace)
-    with self.assertRaises(IOError):
-      _ = ''.join(storage.fetch(item))
-
-  def test_fetch_offset_success(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    data = ''.join(str(x) for x in xrange(1000))
-    item = isolateserver_mock.hash_content(data)
-    offset = 200
-    size = len(data)
-
-    good_content_range_headers = [
-      'bytes %d-%d/%d' % (offset, size - 1, size),
-      'bytes %d-%d/*' % (offset, size - 1),
-    ]
-
-    for _content_range_header in good_content_range_headers:
-      self.expected_requests([self.mock_fetch_request(
-          server, namespace, item, data, offset=offset)])
-      storage = isolateserver.IsolateServer(server, namespace)
-      fetched = ''.join(storage.fetch(item, offset))
-      self.assertEqual(data[offset:], fetched)
-
-  def test_fetch_offset_bad_header(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    data = ''.join(str(x) for x in xrange(1000))
-    item = isolateserver_mock.hash_content(data)
-    offset = 200
-    size = len(data)
-
-    bad_content_range_headers = [
-      # Missing header.
-      None,
-      '',
-      # Bad format.
-      'not bytes %d-%d/%d' % (offset, size - 1, size),
-      'bytes %d-%d' % (offset, size - 1),
-      # Bad offset.
-      'bytes %d-%d/%d' % (offset - 1, size - 1, size),
-      # Incomplete chunk.
-      'bytes %d-%d/%d' % (offset, offset + 10, size),
-    ]
-
-    for content_range_header in bad_content_range_headers:
-      self.expected_requests([
-          self.mock_fetch_request(
-              server, namespace, item, offset=offset),
-          self.mock_gs_request(
-              server, namespace, item, data, offset=offset,
-              request_headers={'Range': 'bytes=%d-' % offset},
-              response_headers={'Content-Range': content_range_header}),
-      ])
-      storage = isolateserver.IsolateServer(server, namespace)
-      with self.assertRaises(IOError):
-        _ = ''.join(storage.fetch(item, offset))
-
-  def test_push_success(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    data = ''.join(str(x) for x in xrange(1000))
-    item = FakeItem(data)
-    contains_request = {'items': [
-        {'digest': item.digest, 'size': item.size, 'is_isolated': 0}]}
-    contains_response = {'items': [{'index': 0, 'upload_ticket': 'ticket!'}]}
-    requests = [
-      self.mock_contains_request(
-          server, namespace, contains_request, contains_response),
-      self.mock_upload_request(
-          server,
-          base64.b64encode(data),
-          contains_response['items'][0]['upload_ticket'],
-          {'ok': True},
-      ),
-    ]
-    self.expected_requests(requests)
-    storage = isolateserver.IsolateServer(server, namespace)
-    missing = storage.contains([item])
-    self.assertEqual([item], missing.keys())
-    push_state = missing[item]
-    storage.push(item, push_state, [data])
-    self.assertTrue(push_state.uploaded)
-    self.assertTrue(push_state.finalized)
-
-  def test_push_failure_upload(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    data = ''.join(str(x) for x in xrange(1000))
-    item = FakeItem(data)
-    contains_request = {'items': [
-        {'digest': item.digest, 'size': item.size, 'is_isolated': 0}]}
-    contains_response = {'items': [{'index': 0, 'upload_ticket': 'ticket!'}]}
-    requests = [
-      self.mock_contains_request(
-          server, namespace, contains_request, contains_response),
-      self.mock_upload_request(
-          server,
-          base64.b64encode(data),
-          contains_response['items'][0]['upload_ticket'],
-      ),
-    ]
-    self.expected_requests(requests)
-    storage = isolateserver.IsolateServer(server, namespace)
-    missing = storage.contains([item])
-    self.assertEqual([item], missing.keys())
-    push_state = missing[item]
-    with self.assertRaises(IOError):
-      storage.push(item, push_state, [data])
-    self.assertFalse(push_state.uploaded)
-    self.assertFalse(push_state.finalized)
-
-  def test_push_failure_finalize(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    data = ''.join(str(x) for x in xrange(1000))
-    item = FakeItem(data)
-    contains_request = {'items': [
-        {'digest': item.digest, 'size': item.size, 'is_isolated': 0}]}
-    contains_response = {'items': [
-        {'index': 0,
-         'gs_upload_url': server + '/FAKE_GCS/whatevs/1234',
-         'upload_ticket': 'ticket!'}]}
-    requests = [
-      self.mock_contains_request(
-          server, namespace, contains_request, contains_response),
-      (
-        server + '/FAKE_GCS/whatevs/1234',
-        {
-          'data': data,
-          'content_type': 'application/octet-stream',
-          'method': 'PUT',
-          'headers': {'Cache-Control': 'public, max-age=31536000'},
-        },
-        '',
-        None,
-      ),
-      (
-        server + '/api/isolateservice/v1/finalize_gs_upload',
-        {'data': {'upload_ticket': 'ticket!'}},
-        None,
-      ),
-    ]
-    self.expected_requests(requests)
-    storage = isolateserver.IsolateServer(server, namespace)
-    missing = storage.contains([item])
-    self.assertEqual([item], missing.keys())
-    push_state = missing[item]
-    with self.assertRaises(IOError):
-      storage.push(item, push_state, [data])
-    self.assertTrue(push_state.uploaded)
-    self.assertFalse(push_state.finalized)
-
-  def test_contains_success(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    files = [
-      FakeItem('1', high_priority=True),
-      FakeItem('2' * 100),
-      FakeItem('3' * 200),
-    ]
-    request = {'items': [
-        {'digest': f.digest, 'is_isolated': not i, 'size': f.size}
-        for i, f in enumerate(files)]}
-    response = {
-        'items': [
-            {'index': str(i), 'upload_ticket': 'ticket_%d' % i}
-            for i in xrange(3)],
-    }
-    missing = [
-        files[0],
-        files[1],
-        files[2],
-    ]
-    self._requests = [
-      self.mock_contains_request(server, namespace, request, response),
-    ]
-    storage = isolateserver.IsolateServer(server, namespace)
-    result = storage.contains(files)
-    self.assertEqual(set(missing), set(result.keys()))
-    for i, (_item, push_state) in enumerate(result.iteritems()):
-      self.assertEqual(
-          push_state.upload_url, 'api/isolateservice/v1/store_inline')
-      self.assertEqual(push_state.finalize_url, None)
-
-  def test_contains_network_failure(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    self.expected_requests([self.mock_contains_request(
-        server, namespace, {'items': []}, None)])
-    storage = isolateserver.IsolateServer(server, namespace)
-    with self.assertRaises(isolated_format.MappingError):
-      storage.contains([])
-
-  def test_contains_format_failure(self):
-    server = 'http://example.com'
-    namespace = 'default'
-    self.expected_requests([self.mock_contains_request(
-        server, namespace, {'items': []}, None)])
-    storage = isolateserver.IsolateServer(server, namespace)
-    with self.assertRaises(isolated_format.MappingError):
-      storage.contains([])
-
-
-class IsolateServerStorageSmokeTest(unittest.TestCase):
-  """Tests public API of Storage class using file system as a store."""
-
-  def setUp(self):
-    super(IsolateServerStorageSmokeTest, self).setUp()
-    self.tempdir = tempfile.mkdtemp(prefix=u'isolateserver')
-    self.server = isolateserver_mock.MockIsolateServer()
-
-  def tearDown(self):
-    try:
-      self.server.close_start()
-      file_path.rmtree(self.tempdir)
-      self.server.close_end()
-    finally:
-      super(IsolateServerStorageSmokeTest, self).tearDown()
-
-  def run_synchronous_push_test(self, namespace):
-    storage = isolateserver.get_storage(self.server.url, namespace)
-
-    # Items to upload.
-    items = [isolateserver.BufferItem('item %d' % i) for i in xrange(10)]
-
-    # Storage is empty, all items are missing.
-    missing = dict(storage.get_missing_items(items))
-    self.assertEqual(set(items), set(missing))
-
-    # Push, one by one.
-    for item, push_state in missing.iteritems():
-      storage.push(item, push_state)
-
-    # All items are there now.
-    self.assertFalse(dict(storage.get_missing_items(items)))
-
-  def test_synchronous_push(self):
-    self.run_synchronous_push_test('default')
-
-  def test_synchronous_push_gzip(self):
-    self.run_synchronous_push_test('default-gzip')
-
-  def run_upload_items_test(self, namespace):
-    storage = isolateserver.get_storage(self.server.url, namespace)
-
-    # Items to upload.
-    items = [isolateserver.BufferItem('item %d' % i) for i in xrange(10)]
-
-    # Do it.
-    uploaded = storage.upload_items(items)
-    self.assertEqual(set(items), set(uploaded))
-
-    # All items are there now.
-    self.assertFalse(dict(storage.get_missing_items(items)))
-
-    # Now ensure upload_items skips existing items.
-    more = [isolateserver.BufferItem('more item %d' % i) for i in xrange(10)]
-
-    # Uploaded only |more|.
-    uploaded = storage.upload_items(items + more)
-    self.assertEqual(set(more), set(uploaded))
-
-  def test_upload_items(self):
-    self.run_upload_items_test('default')
-
-  def test_upload_items_gzip(self):
-    self.run_upload_items_test('default-gzip')
-
-  def run_push_and_fetch_test(self, namespace):
-    storage = isolateserver.get_storage(self.server.url, namespace)
-
-    # Upload items.
-    items = [isolateserver.BufferItem('item %d' % i) for i in xrange(10)]
-    uploaded = storage.upload_items(items)
-    self.assertEqual(set(items), set(uploaded))
-
-    # Fetch them all back into local memory cache.
-    cache = isolateserver.MemoryCache()
-    queue = isolateserver.FetchQueue(storage, cache)
-
-    # Start fetching.
-    pending = set()
-    for item in items:
-      pending.add(item.digest)
-      queue.add(item.digest)
-
-    # Wait for fetch to complete.
-    while pending:
-      fetched = queue.wait(pending)
-      pending.discard(fetched)
-
-    # Ensure fetched same data as was pushed.
-    actual = []
-    for i in items:
-      with cache.getfileobj(i.digest) as f:
-        actual.append(f.read())
-
-    self.assertEqual([i.buffer for i in items], actual)
-
-  def test_push_and_fetch(self):
-    self.run_push_and_fetch_test('default')
-
-  def test_push_and_fetch_gzip(self):
-    self.run_push_and_fetch_test('default-gzip')
-
-  if sys.maxsize == (2**31) - 1:
-    def test_archive_multiple_huge_file(self):
-      self.server.discard_content()
-      # Create multiple files over 2.5gb. This test exists to stress the virtual
-      # address space on 32 bits systems. Make real files since it wouldn't fit
-      # memory by definition.
-      # Sadly, this makes this test very slow so it's only run on 32 bits
-      # platform, since it's known to work on 64 bits platforms anyway.
-      #
-      # It's a fairly slow test, well over 15 seconds.
-      files = {}
-      size = 512 * 1024 * 1024
-      for i in xrange(5):
-        name = '512mb_%d.%s' % (i, isolateserver.ALREADY_COMPRESSED_TYPES[0])
-        p = os.path.join(self.tempdir, name)
-        with open(p, 'wb') as f:
-          # Write 512mb.
-          h = hashlib.sha1()
-          data = os.urandom(1024)
-          for _ in xrange(size / 1024):
-            f.write(data)
-            h.update(data)
-          os.chmod(p, 0600)
-          files[p] = {
-            'h': h.hexdigest(),
-            'm': 0600,
-            's': size,
-          }
-          if sys.platform == 'win32':
-            files[p].pop('m')
-
-      # upload_tree() is a thin wrapper around Storage.
-      isolateserver.upload_tree(self.server.url, files.items(), 'testing')
-      expected = {'testing': {f['h']: '<skipped>' for f in files.itervalues()}}
-      self.assertEqual(expected, self.server.contents)
-
-
-class IsolateServerDownloadTest(TestCase):
-
-  def _url_read_json(self, url, **kwargs):
-    """Current _url_read_json mock doesn't respect identical URLs."""
-    logging.warn('url_read_json(%s, %s)', url[:500], str(kwargs)[:500])
-    with self._lock:
-      if not self._requests:
-        return None
-      if not self._flagged_requests:
-        self._flagged_requests = [0 for _element in self._requests]
-      # Ignore 'stream' argument, it's not important for these tests.
-      kwargs.pop('stream', None)
-      for i, (new_url, expected_kwargs, result) in enumerate(self._requests):
-        if new_url == url and expected_kwargs == kwargs:
-          self._flagged_requests[i] = 1
-          return result
-    self.fail('Unknown request %s' % url)
-
-  def setUp(self):
-    super(IsolateServerDownloadTest, self).setUp()
-    self._flagged_requests = []
-    self.mock(logging_utils, 'prepare_logging', lambda *_: None)
-    self.mock(logging_utils, 'set_console_level', lambda *_: None)
-
-  def tearDown(self):
-    if all(self._flagged_requests):
-      self._requests = []
-    super(IsolateServerDownloadTest, self).tearDown()
-
-  def test_download_two_files(self):
-    # Test downloading two files.
-    actual = {}
-    def out(key, generator):
-      actual[key] = ''.join(generator)
-    self.mock(isolateserver, 'file_write', out)
-    server = 'http://example.com'
-    requests = [
-      (
-        server + '/api/isolateservice/v1/retrieve',
-        {
-            'data': {
-                'digest': h.encode('utf-8'),
-                'namespace': {
-                    'namespace': 'default-gzip',
-                    'digest_hash': 'sha-1',
-                    'compression': 'flate',
-                },
-                'offset': 0,
-            },
-            'read_timeout': 60,
-        },
-        {'content': base64.b64encode(zlib.compress(v))},
-      ) for h, v in [('sha-1', 'Coucou'), ('sha-2', 'Bye Bye')]
-    ]
-    self.expected_requests(requests)
-    cmd = [
-      'download',
-      '--isolate-server', server,
-      '--target', net_utils.ROOT_DIR,
-      '--file', 'sha-1', 'path/to/a',
-      '--file', 'sha-2', 'path/to/b',
-    ]
-    self.assertEqual(0, isolateserver.main(cmd))
-    expected = {
-      os.path.join(net_utils.ROOT_DIR, 'path/to/a'): 'Coucou',
-      os.path.join(net_utils.ROOT_DIR, 'path/to/b'): 'Bye Bye',
-    }
-    self.assertEqual(expected, actual)
-
-  def test_download_isolated_simple(self):
-    # Test downloading an isolated tree.
-    actual = {}
-    def putfile_mock(
-        srcfileobj, dstpath, file_mode=None, size=-1, use_symlink=False):
-      actual[dstpath] = srcfileobj.read()
-    self.mock(isolateserver, 'putfile', putfile_mock)
-    self.mock(os, 'makedirs', lambda _: None)
-    server = 'http://example.com'
-    files = {
-      os.path.join('a', 'foo'): 'Content',
-      'b': 'More content',
-    }
-    isolated = {
-      'command': ['Absurb', 'command'],
-      'relative_cwd': 'a',
-      'files': dict(
-          (k, {'h': isolateserver_mock.hash_content(v), 's': len(v)})
-          for k, v in files.iteritems()),
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
-    isolated_hash = isolateserver_mock.hash_content(isolated_data)
-    requests = [(v['h'], files[k]) for k, v in isolated['files'].iteritems()]
-    requests.append((isolated_hash, isolated_data))
-    requests = [
-      (
-        server + '/api/isolateservice/v1/retrieve',
-        {
-            'data': {
-                'digest': h.encode('utf-8'),
-                'namespace': {
-                    'namespace': 'default-gzip',
-                    'digest_hash': 'sha-1',
-                    'compression': 'flate',
-                },
-                'offset': 0,
-            },
-            'read_timeout': 60,
-        },
-        {'content': base64.b64encode(zlib.compress(v))},
-      ) for h, v in requests
-    ]
-    cmd = [
-      'download',
-      '--isolate-server', server,
-      '--target', self.tempdir,
-      '--isolated', isolated_hash,
-    ]
-    self.expected_requests(requests)
-    self.assertEqual(0, isolateserver.main(cmd))
-    expected = dict(
-        (os.path.join(self.tempdir, k), v) for k, v in files.iteritems())
-    self.assertEqual(expected, actual)
-    expected_stdout = (
-        'To run this test please run from the directory %s:\n  Absurb command\n'
-        % os.path.join(self.tempdir, 'a'))
-    self.checkOutput(expected_stdout, '')
-
-  def test_download_isolated_archive(self):
-    # Test downloading an isolated tree.
-    actual = {}
-    def putfile_mock(
-        srcfileobj, dstpath, file_mode=None, size=-1, use_symlink=False):
-      actual[dstpath] = srcfileobj.read(size)
-    self.mock(isolateserver, 'putfile', putfile_mock)
-    self.mock(os, 'makedirs', lambda _: None)
-    server = 'http://example.com'
-
-    files = {
-      os.path.join('a', 'foo'): 'Content',
-      'b': 'More content',
-      'c': 'Even more content!',
-    }
-
-    archive = (
-      # ar file header
-      '!<arch>\n'
-      # File 1 -------------------------
-      # (16 bytes) filename len
-      '#1/5            '
-      # file metadata
-      '1447140471  1000  1000  100640  '
-      # (10 bytes) Data size
-      '12        '
-      # (2 bytes) File magic
-      '\x60\n'
-      # (5 bytes) File name
-      'a/foo'
-      # (7 bytes) File data
-      'Content'
-      # File 2 -------------------------
-      # (16 bytes) filename
-      'b               '
-      # file metadata
-      '1447140471  1000  1000  100640  '
-      # (12 bytes) Data size
-      '12        '
-      # (2 bytes) File magic
-      '\x60\n'
-      # (12 bytes) File data
-      'More content'
-      '')
-
-    isolated = {
-      'command': ['Absurb', 'command'],
-      'relative_cwd': 'a',
-      'files': {
-        'archive1': {
-          'h': isolateserver_mock.hash_content(archive),
-          's': len(archive),
-          't': 'ar',
-        },
-        'c': {
-          'h': isolateserver_mock.hash_content(files['c']),
-          's': len(files['c']),
-        },
-      },
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
-    isolated_hash = isolateserver_mock.hash_content(isolated_data)
-    requests = [
-      (isolated['files']['archive1']['h'], archive),
-      (isolated['files']['c']['h'], files['c']),
-    ]
-    requests.append((isolated_hash, isolated_data))
-    requests = [
-      (
-        server + '/api/isolateservice/v1/retrieve',
-        {
-            'data': {
-                'digest': h.encode('utf-8'),
-                'namespace': {
-                    'namespace': 'default-gzip',
-                    'digest_hash': 'sha-1',
-                    'compression': 'flate',
-                },
-                'offset': 0,
-            },
-            'read_timeout': 60,
-        },
-        {'content': base64.b64encode(zlib.compress(v))},
-      ) for h, v in requests
-    ]
-    cmd = [
-      'download',
-      '--isolate-server', server,
-      '--target', self.tempdir,
-      '--isolated', isolated_hash,
-    ]
-    self.expected_requests(requests)
-    self.assertEqual(0, isolateserver.main(cmd))
-    expected = dict(
-        (os.path.join(self.tempdir, k), v) for k, v in files.iteritems())
-    self.assertEqual(expected, actual)
-    expected_stdout = (
-        'To run this test please run from the directory %s:\n  Absurb command\n'
-        % os.path.join(self.tempdir, 'a'))
-    self.checkOutput(expected_stdout, '')
-
-
-
-def get_storage(_isolate_server, namespace):
-  class StorageFake(object):
-    def __enter__(self, *_):
-      return self
-
-    def __exit__(self, *_):
-      pass
-
-    @property
-    def hash_algo(self):  # pylint: disable=R0201
-      return isolated_format.get_hash_algo(namespace)
-
-    @staticmethod
-    def upload_items(items):
-      # Always returns the second item as not present.
-      return [items[1]]
-  return StorageFake()
-
-
-class TestArchive(TestCase):
-  @staticmethod
-  def get_isolateserver_prog():
-    """Returns 'isolateserver.py' or 'isolateserver.pyc'."""
-    return os.path.basename(sys.modules[isolateserver.__name__].__file__)
-
-  def test_archive_no_server(self):
-    with self.assertRaises(SystemExit):
-      isolateserver.main(['archive', '.'])
-    prog = self.get_isolateserver_prog()
-    self.checkOutput(
-        '',
-        'Usage: %(prog)s archive [options] <file1..fileN> or - to read '
-        'from stdin\n\n'
-        '%(prog)s: error: --isolate-server is required.\n' % {'prog': prog})
-
-  def test_archive_duplicates(self):
-    with self.assertRaises(SystemExit):
-      isolateserver.main(
-          [
-            'archive', '--isolate-server', 'https://localhost:1',
-            # Effective dupes.
-            '.', os.getcwd(),
-          ])
-    prog = self.get_isolateserver_prog()
-    self.checkOutput(
-        '',
-        'Usage: %(prog)s archive [options] <file1..fileN> or - to read '
-        'from stdin\n\n'
-        '%(prog)s: error: Duplicate entries found.\n' % {'prog': prog})
-
-  def test_archive_files(self):
-    self.mock(isolateserver, 'get_storage', get_storage)
-    self.make_tree(CONTENTS)
-    f = ['empty_file.txt', 'small_file.txt']
-    os.chdir(self.tempdir)
-    isolateserver.main(
-        ['archive', '--isolate-server', 'https://localhost:1'] + f)
-    self.checkOutput(
-        'da39a3ee5e6b4b0d3255bfef95601890afd80709 empty_file.txt\n'
-        '0491bd1da8087ad10fcdd7c9634e308804b72158 small_file.txt\n',
-        '')
-
-  def help_test_archive(self, cmd_line_prefix):
-    self.mock(isolateserver, 'get_storage', get_storage)
-    self.make_tree(CONTENTS)
-    isolateserver.main(cmd_line_prefix + [self.tempdir])
-    # If you modify isolated_format.ISOLATED_FILE_VERSION, you'll have to update
-    # the hash below. Sorry about that but this ensures the .isolated format is
-    # stable.
-    isolated = {
-      'algo': 'sha-1',
-      'files': {},
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    for k, v in CONTENTS.iteritems():
-      isolated['files'][k] = {
-        'h': isolateserver_mock.hash_content(v),
-        's': len(v),
-      }
-      if sys.platform != 'win32':
-        isolated['files'][k]['m'] = 0600
-    isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
-    isolated_hash = isolateserver_mock.hash_content(isolated_data)
-    self.checkOutput(
-        '%s %s\n' % (isolated_hash, self.tempdir),
-        '')
-
-  def test_archive_directory(self):
-    self.help_test_archive(['archive', '-I', 'https://localhost:1'])
-
-  def test_archive_directory_envvar(self):
-    with test_utils.EnvVars({'ISOLATE_SERVER': 'https://localhost:1'}):
-      self.help_test_archive(['archive'])
-
-
-class DiskCacheTest(TestCase):
-  def setUp(self):
-    super(DiskCacheTest, self).setUp()
-    # If this fails on Windows, please rerun this tests as an elevated user with
-    # administrator access right.
-    self.assertEqual(True, file_path.enable_symlink())
-
-    self._algo = isolated_format.get_hash_algo('default-gzip')
-    self._free_disk = 1000
-    # Max: 100 bytes, 2 items
-    # Min free disk: 1000 bytes.
-    self._policies = isolateserver.CachePolicies(100, 1000, 2)
-    def get_free_space(p):
-      self.assertEqual(p, self.tempdir)
-      return self._free_disk
-    self.mock(file_path, 'get_free_space', get_free_space)
-    # TODO(maruel): Test the following.
-    #cache.touch()
-
-  def get_cache(self):
-    return isolateserver.DiskCache(self.tempdir, self._policies, self._algo)
-
-  def to_hash(self, content):
-    return self._algo(content).hexdigest(), content
-
-  def test_read_evict(self):
-    self._free_disk = 1100
-    h_a = self.to_hash('a')[0]
-    with self.get_cache() as cache:
-      cache.write(h_a, 'a')
-      with cache.getfileobj(h_a) as f:
-        self.assertEqual('a', f.read())
-
-    with self.get_cache() as cache:
-      cache.evict(h_a)
-      with self.assertRaises(isolateserver.CacheMiss):
-        cache.getfileobj(h_a)
-
-  def test_policies_free_disk(self):
-    with self.assertRaises(isolateserver.Error):
-      self.get_cache().write(*self.to_hash('a'))
-
-  def test_policies_fit(self):
-    self._free_disk = 1100
-    self.get_cache().write(*self.to_hash('a'*100))
-
-  def test_policies_too_much(self):
-    # Cache (size and # items) is not enforced while adding items but free disk
-    # is.
-    self._free_disk = 1004
-    cache = self.get_cache()
-    for i in ('a', 'b', 'c', 'd'):
-      cache.write(*self.to_hash(i))
-    # Mapping more content than the amount of free disk required.
-    with self.assertRaises(isolateserver.Error):
-      cache.write(*self.to_hash('e'))
-
-  def test_cleanup(self):
-    # Inject an item without a state.json. It will be deleted on cleanup.
-    h_a = self.to_hash('a')[0]
-    isolateserver.file_write(os.path.join(self.tempdir, h_a), 'a')
-    cache = self.get_cache()
-    self.assertEqual([], sorted(cache._lru._items.iteritems()))
-    self.assertEqual(
-        sorted([h_a, u'state.json']), sorted(os.listdir(self.tempdir)))
-    cache.cleanup()
-    self.assertEqual([u'state.json'], os.listdir(self.tempdir))
-
-  def test_policies_active_trimming(self):
-    # Start with a larger cache, add many object.
-    # Reload the cache with smaller policies, the cache should be trimmed on
-    # load.
-    h_a = self.to_hash('a')[0]
-    h_b = self.to_hash('b')[0]
-    h_c = self.to_hash('c')[0]
-    h_large, large = self.to_hash('b' * 99)
-
-    # Max policies is 100 bytes, 2 items, 1000 bytes free space.
-    self._free_disk = 1101
-    with self.get_cache() as cache:
-      cache.write(h_a, 'a')
-      cache.write(h_large, large)
-      # Cache (size and # items) is not enforced while adding items. The
-      # rationale is that a task may request more data than the size of the
-      # cache policies. As long as there is free space, this is fine.
-      cache.write(h_b, 'b')
-      expected = sorted(((h_a, 1), (h_large, len(large)), (h_b, 1)))
-      self.assertEqual(expected, sorted(cache._lru._items.iteritems()))
-      self.assertEqual(h_a, cache._protected)
-      self.assertEqual(1000, cache._free_disk)
-      self.assertEqual(0, cache.initial_number_items)
-      self.assertEqual(0, cache.initial_size)
-      # Free disk is enforced, because otherwise we assume the task wouldn't
-      # be able to start. In this case, it throws an exception since all items
-      # are protected. The item is added since it's detected after the fact.
-      with self.assertRaises(isolateserver.Error):
-        cache.write(h_c, 'c')
-
-    # At this point, after the implicit trim in __exit__(), h_a and h_large were
-    # evicted.
-    self.assertEqual(
-        sorted([h_b, h_c, u'state.json']), sorted(os.listdir(self.tempdir)))
-
-    # Allow 3 items and 101 bytes so h_large is kept.
-    self._policies = isolateserver.CachePolicies(101, 1000, 3)
-    with self.get_cache() as cache:
-      cache.write(h_large, large)
-      self.assertEqual(2, cache.initial_number_items)
-      self.assertEqual(2, cache.initial_size)
-
-    self.assertEqual(
-        sorted([h_b, h_c, h_large, u'state.json']),
-        sorted(os.listdir(self.tempdir)))
-
-    # Assert that trimming is done in constructor too.
-    self._policies = isolateserver.CachePolicies(100, 1000, 2)
-    with self.get_cache() as cache:
-      expected = collections.OrderedDict([(h_c, 1), (h_large, len(large))])
-      self.assertEqual(expected, cache._lru._items)
-      self.assertEqual(None, cache._protected)
-      self.assertEqual(1101, cache._free_disk)
-      self.assertEqual(2, cache.initial_number_items)
-      self.assertEqual(100, cache.initial_size)
-
-
-def clear_env_vars():
-  for e in ('ISOLATE_DEBUG', 'ISOLATE_SERVER'):
-    os.environ.pop(e, None)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  logging.basicConfig(
-      level=(logging.DEBUG if '-v' in sys.argv else logging.CRITICAL))
-  clear_env_vars()
-  unittest.main()
diff --git a/tools/swarming_client/tests/large_test.py b/tools/swarming_client/tests/large_test.py
deleted file mode 100755
index 3e85d45..0000000
--- a/tools/swarming_client/tests/large_test.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import logging
-import os
-import random
-import sys
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-from utils import large
-
-
-class LargeTest(unittest.TestCase):
-  def test_1m_1(self):
-    array = range(1000000)
-    data = large.pack(array)
-    self.assertGreater(1000, len(data))
-    self.assertEqual(array, large.unpack(data))
-
-  def test_1m_1000(self):
-    array = [i*1000 for i in xrange(1000000)]
-    data = large.pack(array)
-    self.assertGreater(2000, len(data))
-    self.assertEqual(array, large.unpack(data))
-
-  def test_1m_pseudo(self):
-    # Compresses a pseudo-random suite. Still compresses very well.
-    random.seed(0)
-    array = sorted(random.randint(0, 1000000) for _ in xrange(1000000))
-    data = large.pack(array)
-    self.assertGreater(302000, len(data))
-    self.assertEqual(array, large.unpack(data))
-
-  def test_empty(self):
-    self.assertEqual('', large.pack([]))
-    self.assertEqual([], large.unpack(''))
-
-
-if __name__ == '__main__':
-  VERBOSE = '-v' in sys.argv
-  logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/logging_utils_test.py b/tools/swarming_client/tests/logging_utils_test.py
deleted file mode 100755
index 1232f5e..0000000
--- a/tools/swarming_client/tests/logging_utils_test.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import logging
-import os
-import subprocess
-import sys
-import tempfile
-import unittest
-import re
-
-THIS_FILE = os.path.abspath(__file__.decode(sys.getfilesystemencoding()))
-sys.path.insert(0, os.path.dirname(os.path.dirname(THIS_FILE)))
-
-from third_party.depot_tools import fix_encoding
-from utils import file_path
-from utils import logging_utils
-
-
-# PID YYYY-MM-DD HH:MM:SS.MMM
-_LOG_HEADER = r'^%d \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d' % os.getpid()
-_LOG_HEADER_PID = r'^\d+ \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d'
-
-
-_PHASE = 'LOGGING_UTILS_TESTS_PHASE'
-
-
-def call(phase, cwd):
-  """Calls itself back."""
-  env = os.environ.copy()
-  env[_PHASE] = phase
-  return subprocess.call([sys.executable, '-u', THIS_FILE], env=env, cwd=cwd)
-
-
-class Test(unittest.TestCase):
-  def setUp(self):
-    super(Test, self).setUp()
-    self.tmp = tempfile.mkdtemp(prefix='logging_utils')
-
-  def tearDown(self):
-    try:
-      file_path.rmtree(self.tmp)
-    finally:
-      super(Test, self).tearDown()
-
-  def test_capture(self):
-    root = logging.RootLogger(logging.DEBUG)
-    with logging_utils.CaptureLogs('foo', root) as log:
-      root.debug('foo')
-      result = log.read()
-    expected = _LOG_HEADER + ': DEBUG foo\n$'
-    if sys.platform == 'win32':
-      expected = expected.replace('\n', '\r\n')
-    self.assertTrue(re.match(expected, result), (expected, result))
-
-  def test_prepare_logging(self):
-    root = logging.RootLogger(logging.DEBUG)
-    filepath = os.path.join(self.tmp, 'test.log')
-    logging_utils.prepare_logging(filepath, root)
-    root.debug('foo')
-    with open(filepath, 'rb') as f:
-      result = f.read()
-    # It'd be nice to figure out a way to ensure it's properly in UTC but it's
-    # tricky to do reliably.
-    expected = _LOG_HEADER + ' D: foo\n$'
-    self.assertTrue(re.match(expected, result), (expected, result))
-
-  def test_rotating(self):
-    # Create a rotating log. Create a subprocess then delete the file. Make sure
-    # nothing blows up.
-    # Everything is done in a child process because the called functions mutate
-    # the global state.
-    self.assertEqual(0, call('test_rotating_phase_1', cwd=self.tmp))
-    self.assertEqual({'shared.1.log'}, set(os.listdir(self.tmp)))
-    with open(os.path.join(self.tmp, 'shared.1.log'), 'rb') as f:
-      lines = f.read().splitlines()
-    expected = [
-      r' I: Parent1',
-      r' I: Child1',
-      r' I: Child2',
-      r' I: Parent2',
-    ]
-    for e, l in zip(expected, lines):
-      ex = _LOG_HEADER_PID + e + '$'
-      self.assertTrue(re.match(ex, l), (ex, l))
-    self.assertEqual(len(expected), len(lines))
-
-
-def test_rotating_phase_1():
-  logging_utils.prepare_logging('shared.log')
-  logging.info('Parent1')
-  r = call('test_rotating_phase_2', None)
-  logging.info('Parent2')
-  return r
-
-
-def test_rotating_phase_2():
-  # Simulate rotating the log.
-  logging_utils.prepare_logging('shared.log')
-  logging.info('Child1')
-  os.rename('shared.log', 'shared.1.log')
-  logging.info('Child2')
-  return 0
-
-
-def main():
-  phase = os.environ.get(_PHASE)
-  if phase:
-    return getattr(sys.modules[__name__], phase)()
-  verbose = '-v' in sys.argv
-  logging.basicConfig(level=logging.DEBUG if verbose else logging.ERROR)
-  unittest.main()
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  sys.exit(main())
diff --git a/tools/swarming_client/tests/lru_test.py b/tools/swarming_client/tests/lru_test.py
deleted file mode 100755
index 795c110..0000000
--- a/tools/swarming_client/tests/lru_test.py
+++ /dev/null
@@ -1,229 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import json
-import logging
-import os
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-from utils import lru
-
-
-class LRUDictTest(unittest.TestCase):
-  @staticmethod
-  def prepare_lru_dict(keys):
-    """Returns new LRUDict with given |keys| added one by one."""
-    lru_dict = lru.LRUDict()
-    for key in keys:
-      lru_dict.add(key, None)
-    return lru_dict
-
-  def assert_order(self, lru_dict, expected_keys):
-    """Asserts order of keys in |lru_dict| is |expected_keys|.
-
-    expected_keys[0] is supposedly oldest key, expected_keys[-1] is newest.
-
-    Destroys |lru_dict| state in the process.
-    """
-    # Check keys iteration works.
-    self.assertEqual(lru_dict.keys_set(), set(expected_keys))
-
-    # Check pop_oldest returns keys in expected order.
-    actual_keys = []
-    while lru_dict:
-      oldest_key, _ = lru_dict.pop_oldest()
-      actual_keys.append(oldest_key)
-    self.assertEqual(actual_keys, expected_keys)
-
-  def assert_same_data(self, lru_dict, regular_dict):
-    """Asserts that given |lru_dict| contains same data as |regular_dict|."""
-    self.assertEqual(lru_dict.keys_set(), set(regular_dict.keys()))
-    self.assertEqual(set(lru_dict.itervalues()), set(regular_dict.values()))
-
-    for k, v in regular_dict.items():
-      self.assertEqual(lru_dict.get(k), v)
-
-  def test_basic_dict_funcs(self):
-    lru_dict = lru.LRUDict()
-
-    # Add a bunch.
-    data = {1: 'one', 2: 'two', 3: 'three'}
-    for k, v in data.items():
-      lru_dict.add(k, v)
-    # Check its there.
-    self.assert_same_data(lru_dict, data)
-
-    # Replace value.
-    lru_dict.add(1, 'one!!!')
-    data[1] = 'one!!!'
-    self.assert_same_data(lru_dict, data)
-
-    # Check pop works.
-    self.assertEqual(lru_dict.pop(2), 'two')
-    data.pop(2)
-    self.assert_same_data(lru_dict, data)
-
-    # Pop missing key.
-    with self.assertRaises(KeyError):
-      lru_dict.pop(2)
-
-    # Touch has no effect on set of keys and values.
-    lru_dict.touch(1)
-    self.assert_same_data(lru_dict, data)
-
-    # Touch fails on missing key.
-    with self.assertRaises(KeyError):
-      lru_dict.touch(22)
-
-  def test_magic_methods(self):
-    # Check __nonzero__, __len__ and __contains__ for empty dict.
-    lru_dict = lru.LRUDict()
-    self.assertFalse(lru_dict)
-    self.assertEqual(len(lru_dict), 0)
-    self.assertFalse(1 in lru_dict)
-
-    # Dict with one item.
-    lru_dict.add(1, 'one')
-    self.assertTrue(lru_dict)
-    self.assertEqual(len(lru_dict), 1)
-    self.assertTrue(1 in lru_dict)
-    self.assertFalse(2 in lru_dict)
-
-  def test_order(self):
-    data = [1, 2, 3]
-
-    # Edge cases.
-    self.assert_order(self.prepare_lru_dict([]), [])
-    self.assert_order(self.prepare_lru_dict([1]), [1])
-
-    # No touches.
-    self.assert_order(self.prepare_lru_dict(data), data)
-
-    # Touching newest item is noop.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.touch(3)
-    self.assert_order(lru_dict, data)
-
-    # Touch to move to newest.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.touch(2)
-    self.assert_order(lru_dict, [1, 3, 2])
-
-    # Pop newest.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.pop(1)
-    self.assert_order(lru_dict, [2, 3])
-
-    # Pop in the middle.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.pop(2)
-    self.assert_order(lru_dict, [1, 3])
-
-    # Pop oldest.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.pop(3)
-    self.assert_order(lru_dict, [1, 2])
-
-    # Add oldest.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.batch_insert_oldest([(4, 4), (5, 5)])
-    self.assert_order(lru_dict, [4, 5] + data)
-
-    # Add newest.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.add(4, 4)
-    self.assert_order(lru_dict, data + [4])
-
-  def test_load_save(self):
-    def save_and_load(lru_dict):
-      handle, tmp_name = tempfile.mkstemp(prefix=u'lru_test')
-      os.close(handle)
-      try:
-        lru_dict.save(tmp_name)
-        return lru.LRUDict.load(tmp_name)
-      finally:
-        try:
-          os.unlink(tmp_name)
-        except OSError:
-          pass
-
-    data = [1, 2, 3]
-
-    # Edge case.
-    empty = save_and_load(lru.LRUDict())
-    self.assertFalse(empty)
-
-    # Normal flow.
-    lru_dict = save_and_load(self.prepare_lru_dict(data))
-    self.assert_order(lru_dict, data)
-
-    # After touches.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.touch(2)
-    lru_dict = save_and_load(lru_dict)
-    self.assert_order(lru_dict, [1, 3, 2])
-
-    # After pop.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.pop(2)
-    lru_dict = save_and_load(lru_dict)
-    self.assert_order(lru_dict, [1, 3])
-
-    # After add.
-    lru_dict = self.prepare_lru_dict(data)
-    lru_dict.add(4, 4)
-    lru_dict.batch_insert_oldest([(5, 5), (6, 6)])
-    lru_dict = save_and_load(lru_dict)
-    self.assert_order(lru_dict, [5, 6] + data + [4])
-
-  def test_corrupted_state_file(self):
-    def load_from_state(state_text):
-      handle, tmp_name = tempfile.mkstemp(prefix=u'lru_test')
-      os.close(handle)
-      try:
-        with open(tmp_name, 'w') as f:
-          f.write(state_text)
-        return lru.LRUDict.load(tmp_name)
-      finally:
-        os.unlink(tmp_name)
-
-    # Loads correct state just fine.
-    self.assertIsNotNone(load_from_state(json.dumps([
-        ['key1', 'value1'],
-        ['key2', 'value2'],
-    ])))
-
-    # Not a json.
-    with self.assertRaises(ValueError):
-      load_from_state('garbage, not a state')
-
-    # Not a list.
-    with self.assertRaises(ValueError):
-      load_from_state('{}')
-
-    # Not a list of pairs.
-    with self.assertRaises(ValueError):
-      load_from_state(json.dumps([
-          ['key', 'value', 'and whats this?'],
-      ]))
-
-    # Duplicate keys.
-    with self.assertRaises(ValueError):
-      load_from_state(json.dumps([
-          ['key', 'value'],
-          ['key', 'another_value'],
-      ]))
-
-
-if __name__ == '__main__':
-  VERBOSE = '-v' in sys.argv
-  logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/net_test.py b/tools/swarming_client/tests/net_test.py
deleted file mode 100755
index 1fcbbf8..0000000
--- a/tools/swarming_client/tests/net_test.py
+++ /dev/null
@@ -1,384 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# pylint: disable=R0201,W0613
-
-import StringIO
-import __builtin__
-import contextlib
-import logging
-import math
-import os
-import sys
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-from depot_tools import auto_stub
-from utils import net
-import net_utils
-
-
-class RetryLoopMockedTest(auto_stub.TestCase):
-  """Base class for test cases that mock retry loop."""
-
-  def setUp(self):
-    super(RetryLoopMockedTest, self).setUp()
-    self._retry_attemps_cls = net.RetryAttempt
-    self.mock(net, 'sleep_before_retry', self.mocked_sleep_before_retry)
-    self.mock(net, 'current_time', self.mocked_current_time)
-    self.mock(net, 'RetryAttempt', self.mocked_retry_attempt)
-    self.sleeps = []
-    self.attempts = []
-
-  def mocked_sleep_before_retry(self, attempt, max_wait):
-    self.sleeps.append((attempt, max_wait))
-
-  def mocked_current_time(self):
-    # One attempt is one virtual second.
-    return float(len(self.attempts))
-
-  def mocked_retry_attempt(self, *args, **kwargs):
-    attempt = self._retry_attemps_cls(*args, **kwargs)
-    self.attempts.append(attempt)
-    return attempt
-
-  def assertAttempts(self, attempts, max_timeout):
-    """Asserts that retry loop executed given number of |attempts|."""
-    expected = [(i, max_timeout - i) for i in xrange(attempts)]
-    actual = [(x.attempt, x.remaining) for x in self.attempts]
-    self.assertEqual(expected, actual)
-
-  def assertSleeps(self, sleeps):
-    """Asserts that retry loop slept given number of times."""
-    self.assertEqual(sleeps, len(self.sleeps))
-
-
-class RetryLoopTest(RetryLoopMockedTest):
-  """Test for retry_loop implementation."""
-
-  def test_sleep_before_retry(self):
-    # Verifies bounds. Because it's using a pseudo-random number generator and
-    # not a read random source, it's basically guaranteed to never return the
-    # same value twice consecutively.
-    a = net.calculate_sleep_before_retry(0, 0)
-    b = net.calculate_sleep_before_retry(0, 0)
-    self.assertTrue(a >= math.pow(1.5, -1), a)
-    self.assertTrue(b >= math.pow(1.5, -1), b)
-    self.assertTrue(a < 1.5 + math.pow(1.5, -1), a)
-    self.assertTrue(b < 1.5 + math.pow(1.5, -1), b)
-    self.assertNotEqual(a, b)
-
-
-class HttpServiceTest(RetryLoopMockedTest):
-  """Tests for HttpService class."""
-
-  @staticmethod
-  def mocked_http_service(
-      url='http://example.com',
-      perform_request=None,
-      authorize=None,
-      login=None):
-
-    class MockedAuthenticator(net.Authenticator):
-      def authorize(self, request):
-        return authorize(request) if authorize else None
-      def login(self, allow_user_interaction):
-        return login(allow_user_interaction) if login else False
-
-    class MockedRequestEngine(object):
-      def perform_request(self, request):
-        return perform_request(request) if perform_request else None
-      @classmethod
-      def timeout_exception_classes(cls):
-        return ()
-      @classmethod
-      def parse_request_exception(cls, exc):
-        return None, None
-
-    return net.HttpService(
-        url,
-        authenticator=MockedAuthenticator(),
-        engine=MockedRequestEngine())
-
-  def test_request_GET_success(self):
-    service_url = 'http://example.com'
-    request_url = '/some_request'
-    response = 'True'
-
-    def mock_perform_request(request):
-      self.assertTrue(
-          request.get_full_url().startswith(service_url + request_url))
-      return net_utils.make_fake_response(
-          response, request.get_full_url())
-
-    service = self.mocked_http_service(url=service_url,
-        perform_request=mock_perform_request)
-    self.assertEqual(service.request(request_url).read(), response)
-    self.assertAttempts(1, net.URL_OPEN_TIMEOUT)
-
-  def test_request_POST_success(self):
-    service_url = 'http://example.com'
-    request_url = '/some_request'
-    response = 'True'
-
-    def mock_perform_request(request):
-      self.assertTrue(
-          request.get_full_url().startswith(service_url + request_url))
-      self.assertEqual('', request.body)
-      return net_utils.make_fake_response(response, request.get_full_url())
-
-    service = self.mocked_http_service(url=service_url,
-        perform_request=mock_perform_request)
-    self.assertEqual(service.request(request_url, data={}).read(), response)
-    self.assertAttempts(1, net.URL_OPEN_TIMEOUT)
-
-  def test_request_PUT_success(self):
-    service_url = 'http://example.com'
-    request_url = '/some_request'
-    request_body = 'data_body'
-    response_body = 'True'
-    content_type = 'application/octet-stream'
-
-    def mock_perform_request(request):
-      self.assertTrue(
-          request.get_full_url().startswith(service_url + request_url))
-      self.assertEqual(request_body, request.body)
-      self.assertEqual(request.method, 'PUT')
-      self.assertEqual(request.headers['Content-Type'], content_type)
-      return net_utils.make_fake_response(response_body, request.get_full_url())
-
-    service = self.mocked_http_service(url=service_url,
-        perform_request=mock_perform_request)
-    response = service.request(request_url,
-        data=request_body, content_type=content_type, method='PUT')
-    self.assertEqual(response.read(), response_body)
-    self.assertAttempts(1, net.URL_OPEN_TIMEOUT)
-
-  def test_request_success_after_failure(self):
-    response = 'True'
-    attempts = []
-
-    def mock_perform_request(request):
-      attempts.append(request)
-      if len(attempts) == 1:
-        raise net.ConnectionError()
-      return net_utils.make_fake_response(response, request.get_full_url())
-
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    self.assertEqual(service.request('/', data={}).read(), response)
-    self.assertAttempts(2, net.URL_OPEN_TIMEOUT)
-
-  def test_request_failure_max_attempts_default(self):
-    def mock_perform_request(_request):
-      raise net.ConnectionError()
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    self.assertEqual(service.request('/'), None)
-    self.assertAttempts(net.URL_OPEN_MAX_ATTEMPTS, net.URL_OPEN_TIMEOUT)
-
-  def test_request_failure_max_attempts(self):
-    def mock_perform_request(_request):
-      raise net.ConnectionError()
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    self.assertEqual(service.request('/', max_attempts=23), None)
-    self.assertAttempts(23, net.URL_OPEN_TIMEOUT)
-
-  def test_request_failure_timeout(self):
-    def mock_perform_request(_request):
-      raise net.ConnectionError()
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    self.assertEqual(service.request('/', max_attempts=10000), None)
-    self.assertAttempts(int(net.URL_OPEN_TIMEOUT) + 1, net.URL_OPEN_TIMEOUT)
-
-  def test_request_failure_timeout_default(self):
-    def mock_perform_request(_request):
-      raise net.ConnectionError()
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    self.assertEqual(service.request('/', timeout=10.), None)
-    self.assertAttempts(11, 10.0)
-
-  def test_request_HTTP_error_no_retry(self):
-    count = []
-    def mock_perform_request(request):
-      count.append(request)
-      raise net.HttpError(400, 'text/plain', None)
-
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    self.assertEqual(service.request('/', data={}), None)
-    self.assertEqual(1, len(count))
-    self.assertAttempts(1, net.URL_OPEN_TIMEOUT)
-
-  def test_request_HTTP_error_retry_404(self):
-    response = 'data'
-    attempts = []
-
-    def mock_perform_request(request):
-      attempts.append(request)
-      if len(attempts) == 1:
-        raise net.HttpError(404, 'text/plain', None)
-      return net_utils.make_fake_response(response, request.get_full_url())
-
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    result = service.request('/', data={}, retry_404=True)
-    self.assertEqual(result.read(), response)
-    self.assertAttempts(2, net.URL_OPEN_TIMEOUT)
-
-  def test_request_HTTP_error_retry_404_endpoints(self):
-    response = 'data'
-    attempts = []
-
-    def mock_perform_request(request):
-      attempts.append(request)
-      if len(attempts) == 1:
-        raise net.HttpError(404, 'application/text; charset=ASCII', None)
-      return net_utils.make_fake_response(response, request.get_full_url())
-
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    result = service.request('/_ah/api/foo/v1/bar', )
-    self.assertEqual(result.read(), response)
-    self.assertAttempts(2, net.URL_OPEN_TIMEOUT)
-
-  def test_request_HTTP_error_with_retry(self):
-    response = 'response'
-    attempts = []
-
-    def mock_perform_request(request):
-      attempts.append(request)
-      if len(attempts) == 1:
-        raise net.HttpError(500, 'text/plain', None)
-      return net_utils.make_fake_response(response, request.get_full_url())
-
-    service = self.mocked_http_service(perform_request=mock_perform_request)
-    self.assertTrue(service.request('/', data={}).read(), response)
-    self.assertAttempts(2, net.URL_OPEN_TIMEOUT)
-
-  def test_auth_success(self):
-    calls = []
-    response = 'response'
-
-    def mock_perform_request(request):
-      calls.append('request')
-      if 'login' not in calls:
-        raise net.HttpError(403, 'text/plain', None)
-      return net_utils.make_fake_response(response, request.get_full_url())
-
-    def mock_authorize(request):
-      calls.append('authorize')
-
-    def mock_login(allow_user_interaction):
-      self.assertFalse(allow_user_interaction)
-      calls.append('login')
-      return True
-
-    service = self.mocked_http_service(
-        perform_request=mock_perform_request,
-        authorize=mock_authorize,
-        login=mock_login)
-    self.assertEqual(service.request('/').read(), response)
-    self.assertEqual(
-        ['authorize', 'request', 'login', 'authorize', 'request'], calls)
-    self.assertAttempts(2, net.URL_OPEN_TIMEOUT)
-    self.assertSleeps(0)
-
-  def test_auth_failure(self):
-    count = []
-
-    def mock_perform_request(_request):
-      raise net.HttpError(403, 'text/plain', None)
-
-    def mock_login(allow_user_interaction):
-      self.assertFalse(allow_user_interaction)
-      count.append(1)
-      return False
-
-    service = self.mocked_http_service(perform_request=mock_perform_request,
-        login=mock_login)
-    self.assertEqual(service.request('/'), None)
-    self.assertEqual(len(count), 1)
-    self.assertAttempts(1, net.URL_OPEN_TIMEOUT)
-
-  def test_url_read(self):
-    # Successfully reads the data.
-    self.mock(net, 'url_open',
-        lambda url, **_kwargs: net_utils.make_fake_response('111', url))
-    self.assertEqual(net.url_read('https://fake_url.com/test'), '111')
-
-    # Respects url_open connection errors.
-    self.mock(net, 'url_open', lambda _url, **_kwargs: None)
-    self.assertIsNone(net.url_read('https://fake_url.com/test'))
-
-    # Respects read timeout errors.
-    def timeouting_http_response(url):
-      def read_mock(_size=None):
-        raise net.TimeoutError()
-      response = net_utils.make_fake_response('', url)
-      self.mock(response, 'read', read_mock)
-      return response
-
-    self.mock(net, 'url_open',
-        lambda url, **_kwargs: timeouting_http_response(url))
-    self.assertIsNone(net.url_read('https://fake_url.com/test'))
-
-  def test_url_retrieve(self):
-    # Successfully reads the data.
-    @contextlib.contextmanager
-    def fake_open(_filepath, _mode):
-      yield StringIO.StringIO()
-
-    self.mock(__builtin__, 'open', fake_open)
-    self.mock(net, 'url_open',
-        lambda url, **_kwargs: net_utils.make_fake_response('111', url))
-    self.assertEqual(
-        True, net.url_retrieve('filepath', 'https://localhost/test'))
-
-    # Respects url_open connection errors.
-    self.mock(net, 'url_open', lambda _url, **_kwargs: None)
-    self.assertEqual(
-        False, net.url_retrieve('filepath', 'https://localhost/test'))
-
-    # Respects read timeout errors.
-    def timeouting_http_response(url):
-      def iter_content_mock(_size=None):
-        raise net.TimeoutError()
-      response = net_utils.make_fake_response('', url)
-      self.mock(response, 'iter_content', iter_content_mock)
-      return response
-
-    removed = []
-    self.mock(os, 'remove', removed.append)
-    self.mock(net, 'url_open',
-        lambda url, **_kwargs: timeouting_http_response(url))
-    self.assertEqual(
-        False, net.url_retrieve('filepath', 'https://localhost/test'))
-    self.assertEqual(['filepath'], removed)
-
-
-class TestNetFunctions(auto_stub.TestCase):
-  def test_fix_url(self):
-    data = [
-      ('http://foo.com/', 'http://foo.com'),
-      ('https://foo.com/', 'https://foo.com'),
-      ('https://foo.com', 'https://foo.com'),
-      ('https://foo.com/a', 'https://foo.com/a'),
-      ('https://foo.com/a/', 'https://foo.com/a'),
-      ('https://foo.com:8080/a/', 'https://foo.com:8080/a'),
-      ('foo.com', 'https://foo.com'),
-      ('foo.com:8080', 'https://foo.com:8080'),
-      ('foo.com/', 'https://foo.com'),
-      ('foo.com/a/', 'https://foo.com/a'),
-    ]
-    for value, expected in data:
-      self.assertEqual(expected, net.fix_url(value))
-
-
-if __name__ == '__main__':
-  logging.basicConfig(
-      level=(logging.DEBUG if '-v' in sys.argv else logging.FATAL))
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  unittest.main()
diff --git a/tools/swarming_client/tests/net_utils.py b/tools/swarming_client/tests/net_utils.py
deleted file mode 100644
index a111984..0000000
--- a/tools/swarming_client/tests/net_utils.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import logging
-import os
-import sys
-import threading
-
-TEST_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-ROOT_DIR = os.path.dirname(TEST_DIR)
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-from depot_tools import auto_stub
-from utils import net
-
-
-def make_fake_response(content, url, headers=None):
-  """Returns HttpResponse with predefined content, useful in tests."""
-  headers = dict(headers or {})
-  headers['Content-Length'] = len(content)
-  class _Fake(object):
-    def __init__(self):
-      self.content = content
-    def iter_content(self, chunk_size):
-      c = self.content
-      while c:
-        yield c[:chunk_size]
-        c = c[chunk_size:]
-    def read(self):
-      return self.content
-  return net.HttpResponse(_Fake(), url, headers)
-
-
-class TestCase(auto_stub.TestCase):
-  """Mocks out url_open() calls."""
-  def setUp(self):
-    super(TestCase, self).setUp()
-    self.mock(net, 'url_open', self._url_open)
-    self.mock(net, 'url_read_json', self._url_read_json)
-    self.mock(net, 'sleep_before_retry', lambda *_: None)
-    self._lock = threading.Lock()
-    self._requests = []
-
-  def tearDown(self):
-    try:
-      if not self.has_failed():
-        self.assertEqual([], self._requests)
-    finally:
-      super(TestCase, self).tearDown()
-
-  def expected_requests(self, requests):
-    """Registers the expected requests along their reponses.
-
-    Arguments:
-      request: list of tuple(url, kwargs, response, headers) for normal requests
-          and tuple(url, kwargs, response) for json requests. kwargs can be a
-          callable. In that case, it's called with the actual kwargs. It's
-          useful when the kwargs values are not deterministic.
-    """
-    requests = requests[:]
-    for request in requests:
-      self.assertEqual(tuple, request.__class__)
-      # 3 = json request (url_read_json).
-      # 4 = normal request (url_open).
-      self.assertIn(len(request), (3, 4))
-
-    with self._lock:
-      self.assertEqual([], self._requests)
-      self._requests = requests
-
-  def _url_open(self, url, **kwargs):
-    logging.warn('url_open(%s, %s)', url[:500], str(kwargs)[:500])
-    with self._lock:
-      if not self._requests:
-        return None
-      # Ignore 'stream' argument, it's not important for these tests.
-      kwargs.pop('stream', None)
-      for i, n in enumerate(self._requests):
-        if n[0] == url:
-          data = self._requests.pop(i)
-          if len(data) != 4:
-            self.fail('Expected normal request, got json data; %s' % url)
-          _, expected_kwargs, result, headers = data
-          if callable(expected_kwargs):
-            expected_kwargs(kwargs)
-          else:
-            self.assertEqual(expected_kwargs, kwargs)
-          if result is not None:
-            return make_fake_response(result, url, headers)
-          return None
-    self.fail('Unknown request %s' % url)
-
-  def _url_read_json(self, url, **kwargs):
-    logging.warn('url_read_json(%s, %s)', url[:500], str(kwargs)[:500])
-    with self._lock:
-      if not self._requests:
-        return None
-      # Ignore 'stream' argument, it's not important for these tests.
-      kwargs.pop('stream', None)
-      for i, n in enumerate(self._requests):
-        if n[0] == url:
-          data = self._requests.pop(i)
-          if len(data) != 3:
-            self.fail('Expected json request, got normal data; %s' % url)
-          _, expected_kwargs, result = data
-          if callable(expected_kwargs):
-            expected_kwargs(kwargs)
-          else:
-            self.assertEqual(expected_kwargs, kwargs)
-          if result is not None:
-            return result
-          return None
-    self.fail('Unknown request %s %s' % (url, kwargs))
diff --git a/tools/swarming_client/tests/on_error_test.py b/tools/swarming_client/tests/on_error_test.py
deleted file mode 100755
index d52a13a..0000000
--- a/tools/swarming_client/tests/on_error_test.py
+++ /dev/null
@@ -1,425 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import BaseHTTPServer
-import atexit
-import cgi
-import getpass
-import json
-import logging
-import os
-import platform
-import re
-import socket
-import ssl
-import subprocess
-import sys
-import threading
-import unittest
-import urllib
-import urlparse
-
-TESTS_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-ROOT_DIR = os.path.dirname(TESTS_DIR)
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-from depot_tools import auto_stub
-from depot_tools import fix_encoding
-
-from utils import on_error
-
-
-PEM = os.path.join(TESTS_DIR, 'self_signed.pem')
-
-
-# Access to a protected member XXX of a client class - pylint: disable=W0212
-
-
-def _serialize_env():
-  return dict(
-      (unicode(k), unicode(v.encode('ascii', 'replace')))
-      for k, v in os.environ.iteritems())
-
-
-class HttpsServer(BaseHTTPServer.HTTPServer):
-  def __init__(self, addr, cls, hostname, pem):
-    BaseHTTPServer.HTTPServer.__init__(self, addr, cls)
-    self.hostname = hostname
-    self.pem = pem
-    self.socket = ssl.wrap_socket(
-        self.socket,
-        server_side=True,
-        certfile=self.pem)
-    self.keep_running = True
-    self.requests = []
-    self._thread = None
-
-  @property
-  def url(self):
-    return 'https://%s:%d' % (self.hostname, self.server_address[1])
-
-  def start(self):
-    assert not self._thread
-
-    def _server_loop():
-      while self.keep_running:
-        self.handle_request()
-
-    self._thread = threading.Thread(name='http', target=_server_loop)
-    self._thread.daemon = True
-    self._thread.start()
-
-    while True:
-      # Ensures it is up.
-      try:
-        urllib.urlopen(self.url + '/_warmup').read()
-      except IOError:
-        continue
-      return
-
-  def stop(self):
-    self.keep_running = False
-    urllib.urlopen(self.url + '/_quit').read()
-    self._thread.join()
-    self._thread = None
-
-  def register_call(self, request):
-    if request.path not in ('/_quit', '/_warmup'):
-      self.requests.append((request.path, request.parse_POST()))
-
-
-class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
-  def log_message(self, fmt, *args):
-    logging.debug(
-        '%s - - [%s] %s',
-        self.address_string(), self.log_date_time_string(), fmt % args)
-
-  def parse_POST(self):
-    ctype, pdict = cgi.parse_header(self.headers['Content-Type'])
-    if ctype == 'multipart/form-data':
-      return cgi.parse_multipart(self.rfile, pdict)
-    if ctype == 'application/x-www-form-urlencoded':
-      length = int(self.headers['Content-Length'])
-      return urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1)
-    if ctype in ('application/json', 'application/json; charset=utf-8'):
-      length = int(self.headers['Content-Length'])
-      return json.loads(self.rfile.read(length))
-    assert False, ctype
-
-  def do_GET(self):
-    self.server.register_call(self)
-    self.send_response(200)
-    self.send_header('Content-type', 'text/plain')
-    self.end_headers()
-    self.wfile.write('Rock on')
-
-  def do_POST(self):
-    self.server.register_call(self)
-    self.send_response(200)
-    self.send_header('Content-type', 'application/json; charset=utf-8')
-    self.end_headers()
-    data = {
-      'id': '1234',
-      'url': 'https://localhost/error/1234',
-    }
-    self.wfile.write(json.dumps(data))
-
-
-def start_server():
-  """Starts an HTTPS web server and returns the port bound."""
-  # A premade passwordless self-signed certificate. It works because urllib
-  # doesn't verify the certificate validity.
-  httpd = HttpsServer(('127.0.0.1', 0), Handler, 'localhost', pem=PEM)
-  httpd.start()
-  return httpd
-
-
-class OnErrorBase(auto_stub.TestCase):
-  HOSTNAME = socket.getfqdn()
-
-  def setUp(self):
-    super(OnErrorBase, self).setUp()
-    os.chdir(TESTS_DIR)
-    self._atexit = []
-    self.mock(atexit, 'register', self._atexit.append)
-    self.mock(on_error, '_ENABLED_DOMAINS', (self.HOSTNAME,))
-    self.mock(on_error, '_HOSTNAME', None)
-    self.mock(on_error, '_SERVER', None)
-    self.mock(on_error, '_is_in_test', lambda: False)
-
-
-class OnErrorTest(OnErrorBase):
-  def test_report(self):
-    url = 'https://localhost/'
-    on_error.report_on_exception_exit(url)
-    self.assertEqual([on_error._check_for_exception_on_exit], self._atexit)
-    self.assertEqual('https://localhost', on_error._SERVER.urlhost)
-    self.assertEqual(self.HOSTNAME, on_error._HOSTNAME)
-    with self.assertRaises(ValueError):
-      on_error.report_on_exception_exit(url)
-
-  def test_no_http(self):
-    # http:// url are denied.
-    url = 'http://localhost/'
-    self.assertIs(False, on_error.report_on_exception_exit(url))
-    self.assertEqual([], self._atexit)
-
-
-class OnErrorServerTest(OnErrorBase):
-  def call(self, url, arg, returncode):
-    cmd = [sys.executable, 'on_error_test.py', 'run_shell_out', url, arg]
-    proc = subprocess.Popen(
-        cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=os.environ,
-        universal_newlines=True)
-    out = proc.communicate()[0]
-    logging.debug('\n%s', out)
-    self.assertEqual(returncode, proc.returncode)
-    return out
-
-  def one_request(self, httpd):
-    self.assertEqual(1, len(httpd.requests))
-    resource, params = httpd.requests[0]
-    self.assertEqual('/ereporter2/api/v1/on_error', resource)
-    self.assertEqual(['r', 'v'], params.keys())
-    self.assertEqual('1', params['v'])
-    return params['r']
-
-  def test_shell_out_hacked(self):
-    # Rerun itself, report an error, ensure the error was reported.
-    httpd = start_server()
-    out = self.call(httpd.url, 'hacked', 0)
-    self.assertEqual([], httpd.requests)
-    self.assertEqual('', out)
-    httpd.stop()
-
-  def test_shell_out_report(self):
-    # Rerun itself, report an error manually, ensure the error was reported.
-    httpd = start_server()
-    out = self.call(httpd.url, 'report', 0)
-    expected = (
-        'Sending the report ... done.\n'
-        'Report URL: https://localhost/error/1234\n'
-        'Oh dang\n')
-    self.assertEqual(expected, out)
-
-    actual = self.one_request(httpd)
-    self.assertGreater(actual.pop('duration'), 0.000001)
-    expected = {
-      u'args': [
-        u'on_error_test.py', u'run_shell_out', unicode(httpd.url), u'report',
-      ],
-      u'category': u'report',
-      u'cwd': unicode(os.getcwd()),
-      u'env': _serialize_env(),
-      u'hostname': unicode(socket.getfqdn()),
-      u'message': u'Oh dang',
-      u'os': unicode(sys.platform),
-      u'python_version': unicode(platform.python_version()),
-      u'source': u'on_error_test.py',
-      u'user': unicode(getpass.getuser()),
-      # The version was added dynamically for testing purpose.
-      u'version': u'123',
-    }
-    self.assertEqual(expected, actual)
-    httpd.stop()
-
-  def test_shell_out_exception(self):
-    # Rerun itself, report an exception manually, ensure the error was reported.
-    httpd = start_server()
-    out = self.call(httpd.url, 'exception', 0)
-    expected = (
-        'Sending the crash report ... done.\n'
-        'Report URL: https://localhost/error/1234\n'
-        'Really\nYou are not my type\n')
-    self.assertEqual(expected, out)
-
-    actual = self.one_request(httpd)
-    self.assertGreater(actual.pop('duration'), 0.000001)
-    # Remove numbers so editing the code doesn't invalidate the expectation.
-    actual['stack'] = re.sub(r' \d+', ' 0', actual['stack'])
-    expected = {
-      u'args': [
-        u'on_error_test.py', u'run_shell_out', unicode(httpd.url), u'exception',
-      ],
-      u'cwd': unicode(os.getcwd()),
-      u'category': u'exception',
-      u'env': _serialize_env(),
-      u'exception_type': u'TypeError',
-      u'hostname': unicode(socket.getfqdn()),
-      u'message': u'Really\nYou are not my type',
-      u'os': unicode(sys.platform),
-      u'python_version': unicode(platform.python_version()),
-      u'source': u'on_error_test.py',
-      u'stack':
-        u'File "on_error_test.py", line 0, in run_shell_out\n'
-        u'  raise TypeError(\'You are not my type\')',
-      u'user': unicode(getpass.getuser()),
-    }
-    self.assertEqual(expected, actual)
-    httpd.stop()
-
-  def test_shell_out_exception_no_msg(self):
-    # Rerun itself, report an exception manually, ensure the error was reported.
-    httpd = start_server()
-    out = self.call(httpd.url, 'exception_no_msg', 0)
-    expected = (
-        'Sending the crash report ... done.\n'
-        'Report URL: https://localhost/error/1234\n'
-        'You are not my type #2\n')
-    self.assertEqual(expected, out)
-
-    actual = self.one_request(httpd)
-    self.assertGreater(actual.pop('duration'), 0.000001)
-    # Remove numbers so editing the code doesn't invalidate the expectation.
-    actual['stack'] = re.sub(r' \d+', ' 0', actual['stack'])
-    expected = {
-      u'args': [
-        u'on_error_test.py', u'run_shell_out', unicode(httpd.url),
-        u'exception_no_msg',
-      ],
-      u'category': u'exception',
-      u'cwd': unicode(os.getcwd()),
-      u'env': _serialize_env(),
-      u'exception_type': u'TypeError',
-      u'hostname': unicode(socket.getfqdn()),
-      u'message': u'You are not my type #2',
-      u'os': unicode(sys.platform),
-      u'python_version': unicode(platform.python_version()),
-      u'source': u'on_error_test.py',
-      u'stack':
-        u'File "on_error_test.py", line 0, in run_shell_out\n'
-        u'  raise TypeError(\'You are not my type #2\')',
-      u'user': unicode(getpass.getuser()),
-    }
-    self.assertEqual(expected, actual)
-    httpd.stop()
-
-  def test_shell_out_crash(self):
-    # Rerun itself, report an error with a crash, ensure the error was reported.
-    httpd = start_server()
-    out = self.call(httpd.url, 'crash', 1)
-    expected = (
-        'Traceback (most recent call last):\n'
-        '  File "on_error_test.py", line 0, in <module>\n'
-        '    sys.exit(run_shell_out(sys.argv[2], sys.argv[3]))\n'
-        '  File "on_error_test.py", line 0, in run_shell_out\n'
-        '    raise ValueError(\'Oops\')\n'
-        'ValueError: Oops\n'
-        'Sending the crash report ... done.\n'
-        'Report URL: https://localhost/error/1234\n'
-        'Process exited due to exception\n'
-        'Oops\n')
-    # Remove numbers so editing the code doesn't invalidate the expectation.
-    self.assertEqual(expected, re.sub(r' \d+', ' 0', out))
-
-    actual = self.one_request(httpd)
-    # Remove numbers so editing the code doesn't invalidate the expectation.
-    actual['stack'] = re.sub(r' \d+', ' 0', actual['stack'])
-    self.assertGreater(actual.pop('duration'), 0.000001)
-    expected = {
-      u'args': [
-        u'on_error_test.py', u'run_shell_out', unicode(httpd.url), u'crash',
-      ],
-      u'category': u'exception',
-      u'cwd': unicode(os.getcwd()),
-      u'env': _serialize_env(),
-      u'exception_type': u'ValueError',
-      u'hostname': unicode(socket.getfqdn()),
-      u'message': u'Process exited due to exception\nOops',
-      u'os': unicode(sys.platform),
-      u'python_version': unicode(platform.python_version()),
-      u'source': u'on_error_test.py',
-      # The stack trace is stripped off the heading and absolute paths.
-      u'stack':
-        u'File "on_error_test.py", line 0, in <module>\n'
-        u'  sys.exit(run_shell_out(sys.argv[2], sys.argv[3]))\n'
-        u'File "on_error_test.py", line 0, in run_shell_out\n'
-        u'  raise ValueError(\'Oops\')',
-      u'user': unicode(getpass.getuser()),
-    }
-    self.assertEqual(expected, actual)
-    httpd.stop()
-
-  def test_shell_out_crash_server_down(self):
-    # Rerun itself, report an error, ensure the error was reported.
-    out = self.call('https://localhost:1', 'crash', 1)
-    expected = (
-        'Traceback (most recent call last):\n'
-        '  File "on_error_test.py", line 0, in <module>\n'
-        '    sys.exit(run_shell_out(sys.argv[2], sys.argv[3]))\n'
-        '  File "on_error_test.py", line 0, in run_shell_out\n'
-        '    raise ValueError(\'Oops\')\n'
-        'ValueError: Oops\n'
-        'Sending the crash report ... failed!\n'
-        'Process exited due to exception\n'
-        'Oops\n')
-    # Remove numbers so editing the code doesn't invalidate the expectation.
-    self.assertEqual(expected, re.sub(r' \d+', ' 0', out))
-
-
-def run_shell_out(url, mode):
-  # Enable 'report_on_exception_exit' even though main file is *_test.py.
-  on_error._is_in_test = lambda: False
-
-  # Hack it out so registering works.
-  on_error._ENABLED_DOMAINS = (socket.getfqdn(),)
-
-  # Don't try to authenticate into localhost.
-  on_error.net.OAuthAuthenticator = lambda *_: None
-
-  if not on_error.report_on_exception_exit(url):
-    print 'Failure to register the handler'
-    return 1
-
-  # Hack out certificate verification because we are using a self-signed
-  # certificate here. In practice, the SSL certificate is signed to guard
-  # against MITM attacks.
-  on_error._SERVER.engine.session.verify = False
-
-  if mode == 'crash':
-    # Sadly, net is a bit overly verbose, which breaks
-    # test_shell_out_crash_server_down.
-    logging.error = lambda *_, **_kwargs: None
-    logging.warning = lambda *_, **_kwargs: None
-    raise ValueError('Oops')
-
-  if mode == 'report':
-    # Generate a manual report without an exception frame. Also set the version
-    # value.
-    setattr(sys.modules['__main__'], '__version__', '123')
-    on_error.report('Oh dang')
-
-  if mode == 'exception':
-    # Report from inside an exception frame.
-    try:
-      raise TypeError('You are not my type')
-    except TypeError:
-      on_error.report('Really')
-
-  if mode == 'exception_no_msg':
-    # Report from inside an exception frame.
-    try:
-      raise TypeError('You are not my type #2')
-    except TypeError:
-      on_error.report(None)
-  return 0
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-
-  # Ignore _DISABLE_ENVVAR if set.
-  os.environ.pop(on_error._DISABLE_ENVVAR, None)
-
-  if len(sys.argv) == 4 and sys.argv[1] == 'run_shell_out':
-    sys.exit(run_shell_out(sys.argv[2], sys.argv[3]))
-
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/run_isolated_smoke_test.py b/tools/swarming_client/tests/run_isolated_smoke_test.py
deleted file mode 100755
index a74355b..0000000
--- a/tools/swarming_client/tests/run_isolated_smoke_test.py
+++ /dev/null
@@ -1,467 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import json
-import logging
-import os
-import subprocess
-import sys
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-import isolated_format
-import run_isolated
-from depot_tools import fix_encoding
-from utils import file_path
-
-import isolateserver_mock
-import test_utils
-
-
-CONTENTS = {
-  'check_files.py': """if True:
-      import os, sys
-      ROOT_DIR = os.path.dirname(os.path.abspath(
-          __file__.decode(sys.getfilesystemencoding())))
-      expected = [
-        'check_files.py', 'file1.txt', 'file1_copy.txt', 'file2.txt',
-        'repeated_files.py',
-      ]
-      actual = sorted(os.listdir(ROOT_DIR))
-      if expected != actual:
-        print >> sys.stderr, 'Expected list doesn\\'t match:'
-        print >> sys.stderr, '%s\\n%s' % (','.join(expected), ','.join(actual))
-        sys.exit(1)
-      # Check that file2.txt is in reality file3.txt.
-      with open(os.path.join(ROOT_DIR, 'file2.txt'), 'rb') as f:
-        if f.read() != 'File3\\n':
-          print >> sys.stderr, 'file2.txt should be file3.txt in reality'
-          sys.exit(2)
-      print('Success')""",
-  'file1.txt': 'File1\n',
-  'file2.txt': 'File2.txt\n',
-  'file3.txt': 'File3\n',
-  'repeated_files.py': """if True:
-      import os, sys
-      expected = ['file1.txt', 'file1_copy.txt', 'repeated_files.py']
-      actual = sorted(os.listdir(os.path.dirname(os.path.abspath(
-          __file__.decode(sys.getfilesystemencoding())))))
-      if expected != actual:
-        print >> sys.stderr, 'Expected list doesn\\'t match:'
-        print >> sys.stderr, '%s\\n%s' % (','.join(expected), ','.join(actual))
-        sys.exit(1)
-      print('Success')""",
-  'max_path.py': """if True:
-      import os, sys
-      prefix = u'\\\\\\\\?\\\\' if sys.platform == 'win32' else u''
-      path = os.path.join(os.getcwd().decode(
-          sys.getfilesystemencoding()), 'a' * 200, 'b' * 200)
-      with open(prefix + path, 'rb') as f:
-        actual = f.read()
-        if actual != 'File1\\n':
-          print >> sys.stderr, 'Unexpected content: %s' % actual
-          sys.exit(1)
-      print('Success')""",
-  'archive': (
-      '!<arch>\n'
-      '#1/5            '
-      '1447140471  1000  1000  100640  '
-      '12        '
-      '\x60\n'
-      'a/foo'
-      'Content'
-      'b               '
-      '1447140471  1000  1000  100640  '
-      '12        '
-      '\x60\n'
-      'More content'),
-  'archive_files.py': """if True:
-      import os, sys
-      ROOT_DIR = os.path.dirname(os.path.abspath(
-          __file__.decode(sys.getfilesystemencoding())))
-      expected = ['a', 'archive_files.py', 'b']
-      actual = sorted(os.listdir(ROOT_DIR))
-      if expected != actual:
-        print >> sys.stderr, 'Expected list doesn\\'t match:'
-        print >> sys.stderr, '%s\\n%s' % (','.join(expected), ','.join(actual))
-        sys.exit(1)
-      expected = ['foo']
-      actual = sorted(os.listdir(os.path.join(ROOT_DIR, 'a')))
-      if expected != actual:
-        print >> sys.stderr, 'Expected list doesn\\'t match:'
-        print >> sys.stderr, '%s\\n%s' % (','.join(expected), ','.join(actual))
-        sys.exit(2)
-      # Check that a/foo has right contents.
-      with open(os.path.join(ROOT_DIR, 'a/foo'), 'rb') as f:
-        d = f.read()
-        if d != 'Content':
-          print >> sys.stderr, 'a/foo contained %r' % d
-          sys.exit(3)
-      # Check that b has right contents.
-      with open(os.path.join(ROOT_DIR, 'b'), 'rb') as f:
-        d = f.read()
-        if d != 'More content':
-          print >> sys.stderr, 'b contained %r' % d
-          sys.exit(4)
-      print('Success')""",
-}
-
-
-def file_meta(filename):
-  return {
-    'h': isolateserver_mock.hash_content(CONTENTS[filename]),
-    's': len(CONTENTS[filename]),
-  }
-
-
-CONTENTS['download.isolated'] = json.dumps(
-    {
-      'command': ['python', 'repeated_files.py'],
-      'files': {
-        'file1.txt': file_meta('file1.txt'),
-        'file1_symlink.txt': {'l': 'files1.txt'},
-        'new_folder/file1.txt': file_meta('file1.txt'),
-        'repeated_files.py': file_meta('repeated_files.py'),
-      },
-    })
-
-
-CONTENTS['file_with_size.isolated'] = json.dumps(
-    {
-      'command': [ 'python', '-V' ],
-      'files': {'file1.txt': file_meta('file1.txt')},
-      'read_only': 1,
-    })
-
-
-CONTENTS['manifest1.isolated'] = json.dumps(
-    {'files': {'file1.txt': file_meta('file1.txt')}})
-
-
-CONTENTS['manifest2.isolated'] = json.dumps(
-    {
-      'files': {'file2.txt': file_meta('file2.txt')},
-      'includes': [
-        isolateserver_mock.hash_content(CONTENTS['manifest1.isolated']),
-      ],
-    })
-
-
-CONTENTS['archive.isolated'] = json.dumps(
-    {
-      'command': ['python', 'archive_files.py'],
-      'files': {
-        'archive': {
-          'h': isolateserver_mock.hash_content(CONTENTS['archive']),
-          's': len(CONTENTS['archive']),
-          't': 'ar',
-        },
-        'archive_files.py': file_meta('archive_files.py'),
-      },
-    })
-
-
-CONTENTS['max_path.isolated'] = json.dumps(
-    {
-      'command': ['python', 'max_path.py'],
-      'files': {
-        'a' * 200 + '/' + 'b' * 200: file_meta('file1.txt'),
-        'max_path.py': file_meta('max_path.py'),
-      },
-    })
-
-
-CONTENTS['repeated_files.isolated'] = json.dumps(
-    {
-      'command': ['python', 'repeated_files.py'],
-      'files': {
-        'file1.txt': file_meta('file1.txt'),
-        'file1_copy.txt': file_meta('file1.txt'),
-        'repeated_files.py': file_meta('repeated_files.py'),
-      },
-    })
-
-
-CONTENTS['check_files.isolated'] = json.dumps(
-    {
-      'command': ['python', 'check_files.py'],
-      'files': {
-        'check_files.py': file_meta('check_files.py'),
-        # Mapping another file.
-        'file2.txt': file_meta('file3.txt'),
-      },
-      'includes': [
-        isolateserver_mock.hash_content(CONTENTS[i])
-        for i in ('manifest2.isolated', 'repeated_files.isolated')
-      ]
-    })
-
-
-def list_files_tree(directory):
-  """Returns the list of all the files in a tree."""
-  actual = []
-  for root, _dirs, files in os.walk(directory):
-    actual.extend(os.path.join(root, f)[len(directory)+1:] for f in files)
-  return sorted(actual)
-
-
-def read_content(filepath):
-  with open(filepath, 'rb') as f:
-    return f.read()
-
-
-def write_content(filepath, content):
-  with open(filepath, 'wb') as f:
-    f.write(content)
-
-
-def tree_modes(root):
-  """Returns the dict of files in a directory with their filemode.
-
-  Includes |root| as '.'.
-  """
-  out = {}
-  offset = len(root.rstrip('/\\')) + 1
-  out['.'] = oct(os.stat(root).st_mode)
-  for dirpath, dirnames, filenames in os.walk(root):
-    for filename in filenames:
-      p = os.path.join(dirpath, filename)
-      out[p[offset:]] = oct(os.stat(p).st_mode)
-    for dirname in dirnames:
-      p = os.path.join(dirpath, dirname)
-      out[p[offset:]] = oct(os.stat(p).st_mode)
-  return out
-
-
-class RunIsolatedTest(unittest.TestCase):
-  def setUp(self):
-    super(RunIsolatedTest, self).setUp()
-    self.tempdir = run_isolated.make_temp_dir(
-        u'run_isolated_smoke_test', ROOT_DIR)
-    logging.debug(self.tempdir)
-    # run_isolated.zip executable package.
-    self.run_isolated_zip = os.path.join(self.tempdir, 'run_isolated.zip')
-    run_isolated.get_as_zip_package().zip_into_file(
-        self.run_isolated_zip, compress=False)
-    # The run_isolated local cache.
-    self.cache = os.path.join(self.tempdir, 'cache')
-    self.server = isolateserver_mock.MockIsolateServer()
-
-  def tearDown(self):
-    try:
-      self.server.close_start()
-      file_path.rmtree(self.tempdir)
-      self.server.close_end()
-    finally:
-      super(RunIsolatedTest, self).tearDown()
-
-  def _run(self, args):
-    cmd = [sys.executable, self.run_isolated_zip]
-    cmd.extend(args)
-    pipe = subprocess.PIPE
-    logging.debug(' '.join(cmd))
-    proc = subprocess.Popen(
-        cmd,
-        stdout=pipe,
-        stderr=pipe,
-        universal_newlines=True,
-        cwd=self.tempdir)
-    out, err = proc.communicate()
-    return out, err, proc.returncode
-
-  def _store_isolated(self, data):
-    """Stores an isolated file and returns its hash."""
-    return self.server.add_content('default', json.dumps(data, sort_keys=True))
-
-  def _store(self, filename):
-    """Stores a test data file in the table and returns its hash."""
-    return self.server.add_content('default', CONTENTS[filename])
-
-  def _cmd_args(self, hash_value):
-    """Generates the standard arguments used with |hash_value| as the hash.
-
-    Returns a list of the required arguments.
-    """
-    return [
-      '--isolated', hash_value,
-      '--cache', self.cache,
-      '--isolate-server', self.server.url,
-      '--namespace', 'default',
-    ]
-
-  def assertTreeModes(self, root, expected):
-    """Compares the file modes of everything in |root| with |expected|.
-
-    Arguments:
-      root: directory to list its tree.
-      expected: dict(relpath: (linux_mode, mac_mode, win_mode)) where each mode
-                is the expected file mode on this OS. For practical purposes,
-                linux is "anything but OSX or Windows". The modes should be
-                ints.
-    """
-    actual = tree_modes(root)
-    if sys.platform == 'win32':
-      index = 2
-    elif sys.platform == 'darwin':
-      index = 1
-    else:
-      index = 0
-    expected_mangled = dict((k, oct(v[index])) for k, v in expected.iteritems())
-    self.assertEqual(expected_mangled, actual)
-
-  def test_normal(self):
-    # Loads the .isolated from the store as a hash.
-    # Load an isolated file with the same content (same SHA-1), listed under two
-    # different names and ensure both are created.
-    isolated_hash = self._store('repeated_files.isolated')
-    expected = [
-      'state.json',
-      isolated_hash,
-      self._store('file1.txt'),
-      self._store('repeated_files.py'),
-    ]
-
-    out, err, returncode = self._run(self._cmd_args(isolated_hash))
-    self.assertEqual('', err)
-    self.assertEqual('Success\n', out, out)
-    self.assertEqual(0, returncode)
-    actual = list_files_tree(self.cache)
-    self.assertEqual(sorted(set(expected)), actual)
-
-  def test_max_path(self):
-    # Make sure we can map and delete a tree that has paths longer than
-    # MAX_PATH.
-    isolated_hash = self._store('max_path.isolated')
-    expected = [
-      'state.json',
-      isolated_hash,
-      self._store('file1.txt'),
-      self._store('max_path.py'),
-    ]
-    out, err, returncode = self._run(self._cmd_args(isolated_hash))
-    self.assertEqual('', err)
-    self.assertEqual('Success\n', out, out)
-    self.assertEqual(0, returncode)
-    actual = list_files_tree(self.cache)
-    self.assertEqual(sorted(set(expected)), actual)
-
-  def test_fail_empty_isolated(self):
-    isolated_hash = self._store_isolated({})
-    expected = ['state.json', isolated_hash]
-    out, err, returncode = self._run(self._cmd_args(isolated_hash))
-    self.assertEqual('', out)
-    self.assertIn(
-        '<The .isolated doesn\'t declare any command to run!>\n'
-        '<Check your .isolate for missing \'command\' variable>\n',
-        err)
-    self.assertEqual(1, returncode)
-    actual = list_files_tree(self.cache)
-    self.assertEqual(sorted(expected), actual)
-
-  def test_includes(self):
-    # Loads an .isolated that includes another one.
-
-    # References manifest2.isolated and repeated_files.isolated. Maps file3.txt
-    # as file2.txt.
-    isolated_hash = self._store('check_files.isolated')
-    expected = [
-      'state.json',
-      isolated_hash,
-      self._store('check_files.py'),
-      self._store('file1.txt'),
-      self._store('file3.txt'),
-      # Maps file1.txt.
-      self._store('manifest1.isolated'),
-      # References manifest1.isolated. Maps file2.txt but it is overriden.
-      self._store('manifest2.isolated'),
-      self._store('repeated_files.py'),
-      self._store('repeated_files.isolated'),
-    ]
-    out, err, returncode = self._run(self._cmd_args(isolated_hash))
-    self.assertEqual('', err)
-    self.assertEqual('Success\n', out)
-    self.assertEqual(0, returncode)
-    actual = list_files_tree(self.cache)
-    self.assertEqual(sorted(expected), actual)
-
-  def test_archive(self):
-    # Loads an .isolated that includes an ar archive.
-    isolated_hash = self._store('archive.isolated')
-    expected = [
-      'state.json',
-      isolated_hash,
-      self._store('archive'),
-      self._store('archive_files.py'),
-    ]
-    out, err, returncode = self._run(self._cmd_args(isolated_hash))
-    self.assertEqual('', err)
-    self.assertEqual('Success\n', out)
-    self.assertEqual(0, returncode)
-    actual = list_files_tree(self.cache)
-    self.assertEqual(sorted(expected), actual)
-
-  def _test_corruption_common(self, new_content):
-    isolated_hash = self._store('file_with_size.isolated')
-    file1_hash = self._store('file1.txt')
-
-    # Run the test once to generate the cache.
-    _out, _err, returncode = self._run(self._cmd_args(isolated_hash))
-    self.assertEqual(0, returncode)
-    expected = {
-      '.': (040700, 040700, 040777),
-      'state.json': (0100600, 0100600, 0100666),
-      # The reason for 0100666 on Windows is that the file node had to be
-      # modified to delete the hardlinked node. The read only bit is reset on
-      # load.
-      file1_hash: (0100400, 0100400, 0100666),
-      isolated_hash: (0100400, 0100400, 0100444),
-    }
-    self.assertTreeModes(self.cache, expected)
-
-    # Modify one of the files in the cache to be invalid.
-    cached_file_path = os.path.join(self.cache, file1_hash)
-    previous_mode = os.stat(cached_file_path).st_mode
-    os.chmod(cached_file_path, 0600)
-    write_content(cached_file_path, new_content)
-    os.chmod(cached_file_path, previous_mode)
-    logging.info('Modified %s', cached_file_path)
-    # Ensure that the cache has an invalid file.
-    self.assertNotEqual(CONTENTS['file1.txt'], read_content(cached_file_path))
-
-    # Rerun the test and make sure the cache contains the right file afterwards.
-    out, err, returncode = self._run(self._cmd_args(isolated_hash))
-    self.assertEqual(0, returncode, (out, err, returncode))
-    expected = {
-      '.': (040700, 040700, 040777),
-      u'state.json': (0100600, 0100600, 0100666),
-      unicode(file1_hash): (0100400, 0100400, 0100666),
-      unicode(isolated_hash): (0100400, 0100400, 0100444),
-    }
-    self.assertTreeModes(self.cache, expected)
-    return cached_file_path
-
-  def test_corrupted_cache_entry_different_size(self):
-    # Test that an entry with an invalid file size properly gets removed and
-    # fetched again. This test case also check for file modes.
-    cached_file_path = self._test_corruption_common(
-        CONTENTS['file1.txt'] + ' now invalid size')
-    self.assertEqual(CONTENTS['file1.txt'], read_content(cached_file_path))
-
-  def test_corrupted_cache_entry_same_size(self):
-    # Test that an entry with an invalid file content but same size is NOT
-    # detected property.
-    cached_file_path = self._test_corruption_common(
-        CONTENTS['file1.txt'][:-1] + ' ')
-    # TODO(maruel): This corruption is NOT detected.
-    # This needs to be fixed.
-    self.assertNotEqual(CONTENTS['file1.txt'], read_content(cached_file_path))
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  test_utils.main()
diff --git a/tools/swarming_client/tests/run_isolated_test.py b/tools/swarming_client/tests/run_isolated_test.py
deleted file mode 100755
index 6907594..0000000
--- a/tools/swarming_client/tests/run_isolated_test.py
+++ /dev/null
@@ -1,658 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# pylint: disable=R0201
-
-import StringIO
-import base64
-import functools
-import json
-import logging
-import os
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-import cipd
-import isolated_format
-import isolateserver
-import run_isolated
-from depot_tools import auto_stub
-from depot_tools import fix_encoding
-from utils import file_path
-from utils import fs
-from utils import large
-from utils import logging_utils
-from utils import on_error
-from utils import subprocess42
-from utils import tools
-
-import isolateserver_mock
-import cipdserver_mock
-
-
-def write_content(filepath, content):
-  with open(filepath, 'wb') as f:
-    f.write(content)
-
-
-def json_dumps(data):
-  return json.dumps(data, sort_keys=True, separators=(',', ':'))
-
-
-class StorageFake(object):
-  def __init__(self, files):
-    self._files = files.copy()
-    self.namespace = 'default-gzip'
-    self.location = 'http://localhost:1'
-
-  def __enter__(self, *_):
-    return self
-
-  def __exit__(self, *_):
-    pass
-
-  @property
-  def hash_algo(self):
-    return isolateserver_mock.ALGO
-
-  def async_fetch(self, channel, _priority, digest, _size, sink):
-    sink([self._files[digest]])
-    channel.send_result(digest)
-
-  def upload_items(self, items_to_upload):
-    # Return all except the first one.
-    return items_to_upload[1:]
-
-
-class RunIsolatedTestBase(auto_stub.TestCase):
-  def setUp(self):
-    super(RunIsolatedTestBase, self).setUp()
-    self.tempdir = tempfile.mkdtemp(prefix=u'run_isolated_test')
-    logging.debug(self.tempdir)
-    self.mock(run_isolated, 'make_temp_dir', self.fake_make_temp_dir)
-    self.mock(run_isolated.auth, 'ensure_logged_in', lambda _: None)
-    self.mock(
-        logging_utils.OptionParserWithLogging, 'logger_root',
-        logging.Logger('unittest'))
-
-    self.cipd_server = cipdserver_mock.MockCipdServer()
-
-  def tearDown(self):
-    file_path.rmtree(self.tempdir)
-    self.cipd_server.close()
-    super(RunIsolatedTestBase, self).tearDown()
-
-  @property
-  def run_test_temp_dir(self):
-    """Where to map all files in run_isolated.run_tha_test."""
-    return os.path.join(self.tempdir, run_isolated.ISOLATED_RUN_DIR)
-
-  def fake_make_temp_dir(self, prefix, _root_dir):
-    """Predictably returns directory for run_tha_test (one per test case)."""
-    self.assertIn(
-        prefix,
-        (run_isolated.ISOLATED_OUT_DIR, run_isolated.ISOLATED_RUN_DIR,
-          run_isolated.ISOLATED_TMP_DIR, 'cipd_site_root'))
-    temp_dir = os.path.join(self.tempdir, prefix)
-    self.assertFalse(os.path.isdir(temp_dir))
-    os.makedirs(temp_dir)
-    return temp_dir
-
-  def temp_join(self, *args):
-    """Shortcut for joining path with self.run_test_temp_dir."""
-    return os.path.join(self.run_test_temp_dir, *args)
-
-
-class RunIsolatedTest(RunIsolatedTestBase):
-  def setUp(self):
-    super(RunIsolatedTest, self).setUp()
-    # list of func(args, **kwargs) -> retcode
-    # if the func returns None, then it's skipped. The first function to return
-    # non-None is taken as the retcode for the mocked Popen call.
-    self.popen_mocks = []
-    self.popen_calls = []
-    # pylint: disable=no-self-argument
-    class Popen(object):
-      def __init__(self2, args, **kwargs):
-        kwargs.pop('cwd', None)
-        kwargs.pop('env', None)
-        self2.returncode = None
-        self2.args = args
-        self2.kwargs = kwargs
-        self.popen_calls.append((args, kwargs))
-
-      def yield_any_line(self, timeout=None):  # pylint: disable=unused-argument
-        return ()
-
-      def wait(self2, timeout=None):  # pylint: disable=unused-argument
-        self2.returncode = 0
-        for mock_fn in self.popen_mocks:
-          ret = mock_fn(self2.args, **self2.kwargs)
-          if ret is not None:
-            self2.returncode = ret
-            break
-        return self2.returncode
-
-      def kill(self):
-        pass
-
-    self.mock(subprocess42, 'Popen', Popen)
-
-  def test_main(self):
-    self.mock(tools, 'disable_buffering', lambda: None)
-    isolated = json_dumps(
-        {
-          'command': ['foo.exe', 'cmd with space'],
-        })
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    def get_storage(_isolate_server, _namespace):
-      return StorageFake({isolated_hash:isolated})
-    self.mock(isolateserver, 'get_storage', get_storage)
-
-    cmd = [
-        '--no-log',
-        '--isolated', isolated_hash,
-        '--cache', self.tempdir,
-        '--isolate-server', 'https://localhost',
-    ]
-    ret = run_isolated.main(cmd)
-    self.assertEqual(0, ret)
-    self.assertEqual(
-        [([self.temp_join(u'foo.exe'), u'cmd with space'], {'detached': True})],
-        self.popen_calls)
-
-  def test_main_args(self):
-    self.mock(tools, 'disable_buffering', lambda: None)
-    isolated = json_dumps({'command': ['foo.exe', 'cmd w/ space']})
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    def get_storage(_isolate_server, _namespace):
-      return StorageFake({isolated_hash:isolated})
-    self.mock(isolateserver, 'get_storage', get_storage)
-
-    cmd = [
-        '--use-symlinks',
-        '--no-log',
-        '--isolated', isolated_hash,
-        '--cache', self.tempdir,
-        '--isolate-server', 'https://localhost',
-        '--',
-        '--extraargs',
-        'bar',
-    ]
-    ret = run_isolated.main(cmd)
-    self.assertEqual(0, ret)
-    self.assertEqual(
-        [
-          ([self.temp_join(u'foo.exe'), u'cmd w/ space', '--extraargs', 'bar'],
-            {'detached': True}),
-          ],
-        self.popen_calls)
-
-  def _run_tha_test(self, isolated_hash=None, files=None, command=None):
-    files = files or {}
-    make_tree_call = []
-    def add(i, _):
-      make_tree_call.append(i)
-    for i in ('make_tree_read_only', 'make_tree_files_read_only',
-              'make_tree_deleteable', 'make_tree_writeable'):
-      self.mock(file_path, i, functools.partial(add, i))
-
-    ret = run_isolated.run_tha_test(
-        command,
-        isolated_hash,
-        StorageFake(files),
-        isolateserver.MemoryCache(),
-        False,
-        None,
-        None,
-        None,
-        None,
-        None,
-        None,
-        lambda run_dir: None,
-        False)
-    self.assertEqual(0, ret)
-    return make_tree_call
-
-  def test_run_tha_test_naked(self):
-    isolated = json_dumps({'command': ['invalid', 'command']})
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    files = {isolated_hash:isolated}
-    make_tree_call = self._run_tha_test(isolated_hash, files)
-    self.assertEqual(
-        [
-          'make_tree_writeable', 'make_tree_deleteable', 'make_tree_deleteable',
-          'make_tree_deleteable',
-        ],
-        make_tree_call)
-    self.assertEqual(1, len(self.popen_calls))
-    self.assertEqual(
-        [([self.temp_join(u'invalid'), u'command'], {'detached': True})],
-        self.popen_calls)
-
-  def test_run_tha_test_naked_read_only_0(self):
-    isolated = json_dumps(
-        {
-          'command': ['invalid', 'command'],
-          'read_only': 0,
-        })
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    files = {isolated_hash:isolated}
-    make_tree_call = self._run_tha_test(isolated_hash, files)
-    self.assertEqual(
-        [
-          'make_tree_writeable', 'make_tree_deleteable', 'make_tree_deleteable',
-          'make_tree_deleteable',
-        ],
-        make_tree_call)
-    self.assertEqual(1, len(self.popen_calls))
-    self.assertEqual(
-        [([self.temp_join(u'invalid'), u'command'], {'detached': True})],
-        self.popen_calls)
-
-  def test_run_tha_test_naked_read_only_1(self):
-    isolated = json_dumps(
-        {
-          'command': ['invalid', 'command'],
-          'read_only': 1,
-        })
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    files = {isolated_hash:isolated}
-    make_tree_call = self._run_tha_test(isolated_hash, files)
-    self.assertEqual(
-        [
-          'make_tree_files_read_only', 'make_tree_deleteable',
-          'make_tree_deleteable', 'make_tree_deleteable',
-        ],
-        make_tree_call)
-    self.assertEqual(1, len(self.popen_calls))
-    self.assertEqual(
-        [([self.temp_join(u'invalid'), u'command'], {'detached': True})],
-        self.popen_calls)
-
-  def test_run_tha_test_naked_read_only_2(self):
-    isolated = json_dumps(
-        {
-          'command': ['invalid', 'command'],
-          'read_only': 2,
-        })
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    files = {isolated_hash:isolated}
-    make_tree_call = self._run_tha_test(isolated_hash, files)
-    self.assertEqual(
-        [
-          'make_tree_read_only', 'make_tree_deleteable', 'make_tree_deleteable',
-          'make_tree_deleteable',
-        ],
-        make_tree_call)
-    self.assertEqual(1, len(self.popen_calls))
-    self.assertEqual(
-        [([self.temp_join(u'invalid'), u'command'], {'detached': True})],
-        self.popen_calls)
-
-  def mock_popen_with_oserr(self):
-    def r(self, args, **kwargs):
-      old_init(self, args, **kwargs)
-      raise OSError('Unknown')
-    old_init = self.mock(subprocess42.Popen, '__init__', r)
-
-  def test_main_naked(self):
-    self.mock_popen_with_oserr()
-    self.mock(on_error, 'report', lambda _: None)
-    # The most naked .isolated file that can exist.
-    self.mock(tools, 'disable_buffering', lambda: None)
-    isolated = json_dumps({'command': ['invalid', 'command']})
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    def get_storage(_isolate_server, _namespace):
-      return StorageFake({isolated_hash:isolated})
-    self.mock(isolateserver, 'get_storage', get_storage)
-
-    cmd = [
-        '--no-log',
-        '--isolated', isolated_hash,
-        '--cache', self.tempdir,
-        '--isolate-server', 'https://localhost',
-    ]
-    ret = run_isolated.main(cmd)
-    self.assertEqual(1, ret)
-    self.assertEqual(1, len(self.popen_calls))
-    self.assertEqual(
-        [([self.temp_join(u'invalid'), u'command'], {'detached': True})],
-        self.popen_calls)
-
-  def test_main_naked_without_isolated(self):
-    self.mock_popen_with_oserr()
-    cmd = [
-      '--no-log',
-      '--cache', self.tempdir,
-      '/bin/echo',
-      'hello',
-      'world',
-    ]
-    ret = run_isolated.main(cmd)
-    self.assertEqual(1, ret)
-    self.assertEqual(1, len(self.popen_calls))
-    self.assertEqual(
-        [([u'/bin/echo', u'hello', u'world'], {'detached': True})],
-        self.popen_calls)
-
-  def test_main_naked_with_packages(self):
-    pin_idx_ref = [0]
-    pins = [
-      [
-        ('infra/data/x', 'badc0fee'*5),
-        ('infra/data/y', 'cafebabe'*5),
-      ],
-      [
-        ('infra/tools/echo/linux-amd64', 'deadbeef'*5),
-      ],
-    ]
-
-    def fake_ensure(args, **_kwargs):
-      if (args[0].endswith('/cipd') and
-          args[1] == 'ensure'
-          and '-json-output' in args):
-        idx = args.index('-json-output')
-        with open(args[idx+1], 'w') as json_out:
-          json.dump({
-            'result': [
-              {'package': pkg, 'instance_id': ver}
-              for pkg, ver in pins[pin_idx_ref[0]]
-            ],
-          }, json_out)
-        pin_idx_ref[0] += 1
-        return 0
-
-    self.popen_mocks.append(fake_ensure)
-    cipd_cache = os.path.join(self.tempdir, 'cipd_cache')
-    cmd = [
-      '--no-log',
-      '--cache', os.path.join(self.tempdir, 'cache'),
-      '--cipd-client-version', 'git:wowza',
-      '--cipd-package', 'bin:infra/tools/echo/${platform}:latest',
-      '--cipd-package', '.:infra/data/x:latest',
-      '--cipd-package', '.:infra/data/y:canary',
-      '--cipd-server', self.cipd_server.url,
-      '--cipd-cache', cipd_cache,
-      'bin/echo${EXECUTABLE_SUFFIX}',
-      'hello',
-      'world',
-    ]
-    ret = run_isolated.main(cmd)
-    self.assertEqual(0, ret)
-
-    self.assertEqual(3, len(self.popen_calls))
-
-    # Test cipd-ensure command for installing packages.
-    for cipd_ensure_cmd, _ in self.popen_calls[0:2]:
-      self.assertEqual(cipd_ensure_cmd[:2], [
-        os.path.join(cipd_cache, 'cipd' + cipd.EXECUTABLE_SUFFIX),
-        'ensure',
-      ])
-      cache_dir_index = cipd_ensure_cmd.index('-cache-dir')
-      self.assertEqual(
-          cipd_ensure_cmd[cache_dir_index+1],
-          os.path.join(cipd_cache, 'cipd_internal'))
-
-    # Test cipd client cache. `git:wowza` was a tag and so is cacheable.
-    self.assertEqual(len(os.listdir(os.path.join(cipd_cache, 'versions'))), 2)
-    version_file = unicode(os.path.join(
-        cipd_cache, 'versions', '633d2aa4119cc66803f1600f9c4d85ce0e0581b5'))
-    self.assertTrue(fs.isfile(version_file))
-    with open(version_file) as f:
-      self.assertEqual(f.read(), 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
-
-    client_binary_file = unicode(os.path.join(
-        cipd_cache, 'clients', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'))
-    self.assertTrue(fs.isfile(client_binary_file))
-
-    # Test echo call.
-    echo_cmd, _ = self.popen_calls[2]
-    self.assertTrue(echo_cmd[0].endswith(
-        os.path.sep + 'bin' + os.path.sep + 'echo' + cipd.EXECUTABLE_SUFFIX),
-        echo_cmd[0])
-    self.assertEqual(echo_cmd[1:], ['hello', 'world'])
-
-  def test_modified_cwd(self):
-    isolated = json_dumps({
-        'command': ['../out/some.exe', 'arg'],
-        'relative_cwd': 'some',
-    })
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    files = {isolated_hash:isolated}
-    _ = self._run_tha_test(isolated_hash, files)
-    self.assertEqual(1, len(self.popen_calls))
-    self.assertEqual(
-        [([self.temp_join(u'out', u'some.exe'), 'arg'], {'detached': True})],
-        self.popen_calls)
-
-  def test_python_cmd(self):
-    isolated = json_dumps({
-        'command': ['../out/cmd.py', 'arg'],
-        'relative_cwd': 'some',
-    })
-    isolated_hash = isolateserver_mock.hash_content(isolated)
-    files = {isolated_hash:isolated}
-    _ = self._run_tha_test(isolated_hash, files)
-    self.assertEqual(1, len(self.popen_calls))
-    # Injects sys.executable.
-    self.assertEqual(
-        [
-          ([sys.executable, os.path.join(u'..', 'out', 'cmd.py'), u'arg'],
-            {'detached': True}),
-        ],
-        self.popen_calls)
-
-  def test_run_tha_test_non_isolated(self):
-    _ = self._run_tha_test(command=['/bin/echo', 'hello', 'world'])
-    self.assertEqual(
-        [([u'/bin/echo', u'hello', u'world'], {'detached': True})],
-        self.popen_calls)
-
-
-class RunIsolatedTestRun(RunIsolatedTestBase):
-  def test_output(self):
-    # Starts a full isolate server mock and have run_tha_test() uploads results
-    # back after the task completed.
-    server = isolateserver_mock.MockIsolateServer()
-    try:
-      script = (
-        'import sys\n'
-        'open(sys.argv[1], "w").write("bar")\n')
-      script_hash = isolateserver_mock.hash_content(script)
-      isolated = {
-        'algo': 'sha-1',
-        'command': ['cmd.py', '${ISOLATED_OUTDIR}/foo'],
-        'files': {
-          'cmd.py': {
-            'h': script_hash,
-            'm': 0700,
-            's': len(script),
-          },
-        },
-        'version': isolated_format.ISOLATED_FILE_VERSION,
-      }
-      if sys.platform == 'win32':
-        isolated['files']['cmd.py'].pop('m')
-      isolated_data = json_dumps(isolated)
-      isolated_hash = isolateserver_mock.hash_content(isolated_data)
-      server.add_content('default-store', script)
-      server.add_content('default-store', isolated_data)
-      store = isolateserver.get_storage(server.url, 'default-store')
-
-      self.mock(sys, 'stdout', StringIO.StringIO())
-      ret = run_isolated.run_tha_test(
-          None,
-          isolated_hash,
-          store,
-          isolateserver.MemoryCache(),
-          False,
-          None,
-          None,
-          None,
-          None,
-          None,
-          None,
-          lambda run_dir: None,
-          False)
-      self.assertEqual(0, ret)
-
-      # It uploaded back. Assert the store has a new item containing foo.
-      hashes = {isolated_hash, script_hash}
-      output_hash = isolateserver_mock.hash_content('bar')
-      hashes.add(output_hash)
-      isolated =  {
-        'algo': 'sha-1',
-        'files': {
-          'foo': {
-            'h': output_hash,
-            # TODO(maruel): Handle umask.
-            'm': 0640,
-            's': 3,
-          },
-        },
-        'version': isolated_format.ISOLATED_FILE_VERSION,
-      }
-      if sys.platform == 'win32':
-        isolated['files']['foo'].pop('m')
-      uploaded = json_dumps(isolated)
-      uploaded_hash = isolateserver_mock.hash_content(uploaded)
-      hashes.add(uploaded_hash)
-      self.assertEqual(hashes, set(server.contents['default-store']))
-
-      expected = ''.join([
-        '[run_isolated_out_hack]',
-        '{"hash":"%s","namespace":"default-store","storage":%s}' % (
-            uploaded_hash, json.dumps(server.url)),
-        '[/run_isolated_out_hack]'
-      ]) + '\n'
-      self.assertEqual(expected, sys.stdout.getvalue())
-    finally:
-      server.close()
-
-
-class RunIsolatedJsonTest(RunIsolatedTestBase):
-  # Similar to RunIsolatedTest but adds the hacks to process ISOLATED_OUTDIR to
-  # generate a json result file.
-  def setUp(self):
-    super(RunIsolatedJsonTest, self).setUp()
-    self.popen_calls = []
-
-    # pylint: disable=no-self-argument
-    class Popen(object):
-      def __init__(self2, args, **kwargs):
-        kwargs.pop('cwd', None)
-        kwargs.pop('env', None)
-        self.popen_calls.append((args, kwargs))
-        # Assume ${ISOLATED_OUTDIR} is the last one for testing purpose.
-        self2._path = args[-1]
-        self2.returncode = None
-
-      def wait(self, timeout=None):  # pylint: disable=unused-argument
-        self.returncode = 0
-        with open(self._path, 'wb') as f:
-          f.write('generated data\n')
-        return self.returncode
-
-      def kill(self):
-        pass
-
-    self.mock(subprocess42, 'Popen', Popen)
-
-  def test_main_json(self):
-    # Instruct the Popen mock to write a file in ISOLATED_OUTDIR so it will be
-    # archived back on termination.
-    self.mock(tools, 'disable_buffering', lambda: None)
-    sub_cmd = [
-      self.temp_join(u'foo.exe'), u'cmd with space',
-      '${ISOLATED_OUTDIR}/out.txt',
-    ]
-    isolated_in_json = json_dumps({'command': sub_cmd})
-    isolated_in_hash = isolateserver_mock.hash_content(isolated_in_json)
-    def get_storage(_isolate_server, _namespace):
-      return StorageFake({isolated_in_hash:isolated_in_json})
-    self.mock(isolateserver, 'get_storage', get_storage)
-
-    out = os.path.join(self.tempdir, 'res.json')
-    cmd = [
-        '--no-log',
-        '--isolated', isolated_in_hash,
-        '--cache', self.tempdir,
-        '--isolate-server', 'https://localhost:1',
-        '--json', out,
-    ]
-    ret = run_isolated.main(cmd)
-    self.assertEqual(0, ret)
-    # Replace ${ISOLATED_OUTDIR} with the temporary directory.
-    sub_cmd[2] = self.popen_calls[0][0][2]
-    self.assertNotIn('ISOLATED_OUTDIR', sub_cmd[2])
-    self.assertEqual([(sub_cmd, {'detached': True})], self.popen_calls)
-    isolated_out = {
-      'algo': 'sha-1',
-      'files': {
-        'out.txt': {
-          'h': isolateserver_mock.hash_content('generated data\n'),
-          's': 15,
-          'm': 0640,
-        },
-      },
-      'version': isolated_format.ISOLATED_FILE_VERSION,
-    }
-    if sys.platform == 'win32':
-      del isolated_out['files']['out.txt']['m']
-    isolated_out_json = json_dumps(isolated_out)
-    isolated_out_hash = isolateserver_mock.hash_content(isolated_out_json)
-    expected = {
-      u'exit_code': 0,
-      u'had_hard_timeout': False,
-      u'internal_failure': None,
-      u'outputs_ref': {
-        u'isolated': unicode(isolated_out_hash),
-        u'isolatedserver': u'http://localhost:1',
-        u'namespace': u'default-gzip',
-      },
-      u'stats': {
-        u'isolated': {
-          u'download': {
-            u'initial_number_items': 0,
-            u'initial_size': 0,
-            u'items_cold': [len(isolated_in_json)],
-            u'items_hot': [],
-          },
-          u'upload': {
-            u'items_cold': [len(isolated_out_json)],
-            u'items_hot': [15],
-          },
-        },
-      },
-      u'version': 5,
-    }
-    actual = tools.read_json(out)
-    # duration can be exactly 0 due to low timer resolution, especially but not
-    # exclusively on Windows.
-    self.assertLessEqual(0, actual.pop(u'duration'))
-    actual_isolated_stats = actual[u'stats'][u'isolated']
-    self.assertLessEqual(0, actual_isolated_stats[u'download'].pop(u'duration'))
-    self.assertLessEqual(0, actual_isolated_stats[u'upload'].pop(u'duration'))
-    for i in (u'download', u'upload'):
-      for j in (u'items_cold', u'items_hot'):
-        actual_isolated_stats[i][j] = large.unpack(
-            base64.b64decode(actual_isolated_stats[i][j]))
-    self.assertEqual(expected, actual)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/self_signed.pem b/tools/swarming_client/tests/self_signed.pem
deleted file mode 100644
index d0a9f5d..0000000
--- a/tools/swarming_client/tests/self_signed.pem
+++ /dev/null
@@ -1,36 +0,0 @@
-This file was generated with:
-  openssl req -new -x509 -keyout self_signed.pem -out self_signed.pem \
-    -days 1 -nodes -subj /C=US/ST=Denial/L=Springfield/O=Dis/CN=localhost
-
------BEGIN PRIVATE KEY-----
-MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBAL/KaQ+4sg8YlhG/
-Ndk0HjikVtaP3RV9Burwp4/DmGC6gJFeuPqhexLu+UopFPsMQCHiPytWFfB7kRqu
-lNxC6klKxPFQNBGfwnojBazh+L7f+q8sgzETcfpfWAE1wTAMi/metm10R1tdNxu5
-+PEOY8oUABnH72M0WtErICrN3SvnAgMBAAECgYBw9H8n+Tk1Vt+bmCfYwq9B4Ngc
-CiLFcxtN52poa2QlI/Jwq376bXyUzBYaLVPj/3UN/7gAh/Tn636sXkHh768Xcg6S
-qsKrDJYGPuSpCHcN62IjAdBoDjegvIH2PWNcT+8YIzCdGPEWxwkgp3npf/xRZO3t
-VXbZcgDOqmR1WcCDYQJBAOk/PBRgg64+TI6SA6VTTmGlQQ1f3GSABpBS8WVWkSEh
-/A7760XSV5/FrYo1r0BGL3XU7hCtwcEL1aBkvBsJjR8CQQDSf+gHkSNgvFYVvTc0
-PFa65qpU3WmEuZVVu9xj6XZLND2QGFD1T31uhHDtrE1fco6GdJcnELeBHZsmpAjm
-60A5AkA+iuMsP5jvjuruS77IhOb+jdY8pNiu5hlcd7Ec0DKIJNm2ltxGU5mFUPcw
-mSaN4IP3X4oaVM0gW3ED2h8KTOxXAkAkru+jqfJtYETsQu5E5yMCLUKPyYoi/Ch/
-KV7t9niMAI2d9+7b4T9trBz1/mn5cUBPRDA3OZ2RHbS1Fi8K9wJZAkBegGBKq1vt
-p8yRupW0w7jKT0pB8wNXcYunrk5UPRgz7eVU6fbgQRTQC6uAITIM0NIkFrA+dKPk
-erKhF1yuPWDt
------END PRIVATE KEY-----
------BEGIN CERTIFICATE-----
-MIICejCCAeOgAwIBAgIJAKzLwCBl4CHXMA0GCSqGSIb3DQEBBQUAMFYxCzAJBgNV
-BAYTAlVTMQ8wDQYDVQQIDAZEZW5pYWwxFDASBgNVBAcMC1NwcmluZ2ZpZWxkMQww
-CgYDVQQKDANEaXMxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0xNDA3MDExMzQwNDFa
-Fw0xNDA3MDIxMzQwNDFaMFYxCzAJBgNVBAYTAlVTMQ8wDQYDVQQIDAZEZW5pYWwx
-FDASBgNVBAcMC1NwcmluZ2ZpZWxkMQwwCgYDVQQKDANEaXMxEjAQBgNVBAMMCWxv
-Y2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAv8ppD7iyDxiWEb81
-2TQeOKRW1o/dFX0G6vCnj8OYYLqAkV64+qF7Eu75SikU+wxAIeI/K1YV8HuRGq6U
-3ELqSUrE8VA0EZ/CeiMFrOH4vt/6ryyDMRNx+l9YATXBMAyL+Z62bXRHW103G7n4
-8Q5jyhQAGcfvYzRa0SsgKs3dK+cCAwEAAaNQME4wHQYDVR0OBBYEFKqYKJxPj8Rj
-ejaEHfWRPPkB8zvrMB8GA1UdIwQYMBaAFKqYKJxPj8RjejaEHfWRPPkB8zvrMAwG
-A1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAkZQnEOb7cIio60llQmhizzlX
-hT/wYRiiSHz/eb1BnmhQjXlWQE/ulyTMwgxDs8UIuMth3uviUK8SrI9nxmmGEXq6
-5w18MeDc1FibM+0LHnKYTqjYADADC99BK9fjRoxmjUhYsky2JK+JAi0XG26qjTSM
-1rIe1CdJ/iNcDzEJ4wc=
------END CERTIFICATE-----
diff --git a/tools/swarming_client/tests/subprocess42_test.py b/tools/swarming_client/tests/subprocess42_test.py
deleted file mode 100755
index 7f3d802..0000000
--- a/tools/swarming_client/tests/subprocess42_test.py
+++ /dev/null
@@ -1,728 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import itertools
-import logging
-import os
-import sys
-import tempfile
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-sys.path.insert(0, ROOT_DIR)
-
-from utils import subprocess42
-
-
-# Disable pre-set unbuffered output to not interfere with the testing being done
-# here. Otherwise everything would test with unbuffered; which is fine but
-# that's not what we specifically want to test here.
-ENV = os.environ.copy()
-ENV.pop('PYTHONUNBUFFERED', None)
-
-
-SCRIPT_OUT = (
-  'import signal, sys, time;\n'
-  'l = [];\n'
-  'def handler(signum, _):\n'
-  '  l.append(signum);\n'
-  '  sys.stdout.write(\'got signal %%d\\n\' %% signum);\n'
-  '  sys.stdout.flush();\n'
-  'signal.signal(%s, handler);\n'
-  'sys.stdout.write(\'hi\\n\');\n'
-  'sys.stdout.flush();\n'
-  'while not l:\n'
-  '  try:\n'
-  '    time.sleep(0.01);\n'
-  '  except IOError:\n'
-  '    sys.stdout.write(\'ioerror\\n\');\n'
-  '    sys.stdout.flush();\n'
-  'sys.stdout.write(\'bye\\n\');\n'
-  'sys.stdout.flush();\n') % (
-    'signal.SIGBREAK' if sys.platform == 'win32' else 'signal.SIGTERM')
-
-
-SCRIPT_ERR = (
-  'import signal, sys, time;\n'
-  'l = [];\n'
-  'def handler(signum, _):\n'
-  '  l.append(signum);\n'
-  '  sys.stderr.write(\'got signal %%d\\n\' %% signum);\n'
-  '  sys.stderr.flush();\n'
-  'signal.signal(%s, handler);\n'
-  'sys.stderr.write(\'hi\\n\');\n'
-  'sys.stderr.flush();\n'
-  'while not l:\n'
-  '  try:\n'
-  '    time.sleep(0.01);\n'
-  '  except IOError:\n'
-  '    sys.stderr.write(\'ioerror\\n\');\n'
-  '    sys.stderr.flush();\n'
-  'sys.stderr.write(\'bye\\n\');\n'
-  'sys.stderr.flush();\n') % (
-    'signal.SIGBREAK' if sys.platform == 'win32' else 'signal.SIGTERM')
-
-
-OUTPUT_SCRIPT = r"""
-import re
-import sys
-import time
-
-def main():
-  try:
-    for command in sys.argv[1:]:
-      if re.match(r'^[0-9\.]+$', command):
-        time.sleep(float(command))
-        continue
-
-      if command.startswith('out_'):
-        pipe = sys.stdout
-      elif command.startswith('err_'):
-        pipe = sys.stderr
-      else:
-        return 1
-
-      command = command[4:]
-      if command == 'print':
-        pipe.write('printing')
-      elif command == 'sleeping':
-        pipe.write('Sleeping.\n')
-      elif command == 'slept':
-        pipe.write('Slept.\n')
-      elif command == 'lf':
-        pipe.write('\n')
-      elif command == 'flush':
-        pipe.flush()
-      else:
-        return 1
-    return 0
-  except OSError:
-    return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
-"""
-
-
-def to_native_eol(string):
-  if string is None:
-    return string
-  if sys.platform == 'win32':
-    return string.replace('\n', '\r\n')
-  return string
-
-
-def get_output_sleep_proc(flush, unbuffered, sleep_duration):
-  """Returns process with universal_newlines=True that prints to stdout before
-  after a sleep.
-
-  It also optionally sys.stdout.flush() before the sleep and optionally enable
-  unbuffered output in python.
-  """
-  command = [
-    'import sys,time',
-    'print(\'A\')',
-  ]
-  if flush:
-    # Sadly, this doesn't work otherwise in some combination.
-    command.append('sys.stdout.flush()')
-  command.extend((
-    'time.sleep(%s)' % sleep_duration,
-    'print(\'B\')',
-  ))
-  cmd = [sys.executable, '-c', ';'.join(command)]
-  if unbuffered:
-    cmd.append('-u')
-  return subprocess42.Popen(
-      cmd, env=ENV, stdout=subprocess42.PIPE, universal_newlines=True)
-
-
-def get_output_sleep_proc_err(sleep_duration):
-  """Returns process with universal_newlines=True that prints to stderr before
-  and after a sleep.
-  """
-  command = [
-    'import sys,time',
-    'sys.stderr.write(\'A\\n\')',
-  ]
-  command.extend((
-    'time.sleep(%s)' % sleep_duration,
-    'sys.stderr.write(\'B\\n\')',
-  ))
-  cmd = [sys.executable, '-c', ';'.join(command)]
-  return subprocess42.Popen(
-      cmd, env=ENV, stderr=subprocess42.PIPE, universal_newlines=True)
-
-
-class Subprocess42Test(unittest.TestCase):
-  def setUp(self):
-    self._output_script = None
-    super(Subprocess42Test, self).setUp()
-
-  def tearDown(self):
-    try:
-      if self._output_script:
-        os.remove(self._output_script)
-    finally:
-      super(Subprocess42Test, self).tearDown()
-
-  @property
-  def output_script(self):
-    if not self._output_script:
-      handle, self._output_script = tempfile.mkstemp(
-          prefix='subprocess42', suffix='.py')
-      os.write(handle, OUTPUT_SCRIPT)
-      os.close(handle)
-    return self._output_script
-
-  def test_communicate_timeout(self):
-    timedout = 1 if sys.platform == 'win32' else -9
-    # Format is:
-    # ( (cmd, stderr_pipe, timeout), (stdout, stderr, returncode) ), ...
-    # See OUTPUT script for the meaning of the commands.
-    test_data = [
-      # 0 means no timeout, like None.
-      (
-        (['out_sleeping', '0.001', 'out_slept', 'err_print'], None, 0),
-        ('Sleeping.\nSlept.\n', None, 0),
-      ),
-      (
-        (['err_print'], subprocess42.STDOUT, 0),
-        ('printing', None, 0),
-      ),
-      (
-        (['err_print'], subprocess42.PIPE, 0),
-        ('', 'printing', 0),
-      ),
-
-      # On a loaded system, this can be tight.
-      (
-        (['out_sleeping', 'out_flush', '60', 'out_slept'], None, 0.5),
-        ('Sleeping.\n', None, timedout),
-      ),
-      (
-        (
-          # Note that err_flush is necessary on Windows but not on the other
-          # OSes. This means the likelihood of missing stderr output from a
-          # killed child process on Windows is much higher than on other OSes.
-          [
-            'out_sleeping', 'out_flush', 'err_print', 'err_flush', '60',
-            'out_slept',
-          ],
-          subprocess42.PIPE,
-          0.5),
-        ('Sleeping.\n', 'printing', timedout),
-      ),
-
-      (
-        (['out_sleeping', '0.001', 'out_slept'], None, 60),
-        ('Sleeping.\nSlept.\n', None, 0),
-      ),
-    ]
-    for i, ((args, errpipe, timeout), expected) in enumerate(test_data):
-      proc = subprocess42.Popen(
-          [sys.executable, self.output_script] + args,
-          env=ENV,
-          stdout=subprocess42.PIPE,
-          stderr=errpipe)
-      try:
-        stdout, stderr = proc.communicate(timeout=timeout)
-        code = proc.returncode
-      except subprocess42.TimeoutExpired as e:
-        stdout = e.output
-        stderr = e.stderr
-        self.assertTrue(proc.kill())
-        code = proc.wait()
-      finally:
-        duration = proc.duration()
-      expected_duration = 0.0001 if not timeout or timeout == 60 else timeout
-      self.assertTrue(duration >= expected_duration, (i, expected_duration))
-      self.assertEqual(
-          (i, stdout, stderr, code),
-          (i,
-            to_native_eol(expected[0]),
-            to_native_eol(expected[1]),
-            expected[2]))
-
-      # Try again with universal_newlines=True.
-      proc = subprocess42.Popen(
-          [sys.executable, self.output_script] + args,
-          env=ENV,
-          stdout=subprocess42.PIPE,
-          stderr=errpipe,
-          universal_newlines=True)
-      try:
-        stdout, stderr = proc.communicate(timeout=timeout)
-        code = proc.returncode
-      except subprocess42.TimeoutExpired as e:
-        stdout = e.output
-        stderr = e.stderr
-        self.assertTrue(proc.kill())
-        code = proc.wait()
-      finally:
-        duration = proc.duration()
-      self.assertTrue(duration >= expected_duration, (i, expected_duration))
-      self.assertEqual(
-          (i, stdout, stderr, code),
-          (i,) + expected)
-
-  def test_communicate_input(self):
-    cmd = [
-      sys.executable, '-u', '-c',
-      'import sys; sys.stdout.write(sys.stdin.read(5))',
-    ]
-    proc = subprocess42.Popen(
-        cmd, stdin=subprocess42.PIPE, stdout=subprocess42.PIPE)
-    out, err = proc.communicate(input='12345')
-    self.assertEqual('12345', out)
-    self.assertEqual(None, err)
-
-  def test_communicate_input_timeout(self):
-    cmd = [sys.executable, '-u', '-c', 'import time; time.sleep(60)']
-    proc = subprocess42.Popen(cmd, stdin=subprocess42.PIPE)
-    try:
-      proc.communicate(input='12345', timeout=0.5)
-      self.fail()
-    except subprocess42.TimeoutExpired as e:
-      self.assertEqual(None, e.output)
-      self.assertEqual(None, e.stderr)
-      self.assertTrue(proc.kill())
-      proc.wait()
-      self.assertLessEqual(0.5, proc.duration())
-
-  def test_communicate_input_stdout_timeout(self):
-    cmd = [
-      sys.executable, '-u', '-c',
-      'import sys, time; sys.stdout.write(sys.stdin.read(5)); time.sleep(60)',
-    ]
-    proc = subprocess42.Popen(
-        cmd, stdin=subprocess42.PIPE, stdout=subprocess42.PIPE)
-    try:
-      proc.communicate(input='12345', timeout=0.5)
-      self.fail()
-    except subprocess42.TimeoutExpired as e:
-      self.assertEqual('12345', e.output)
-      self.assertEqual(None, e.stderr)
-      self.assertTrue(proc.kill())
-      proc.wait()
-      self.assertLessEqual(0.5, proc.duration())
-
-  def test_communicate_timeout_no_pipe(self):
-    # In this case, it's effectively a wait() call.
-    cmd = [sys.executable, '-u', '-c', 'import time; time.sleep(60)']
-    proc = subprocess42.Popen(cmd)
-    try:
-      proc.communicate(timeout=0.5)
-      self.fail()
-    except subprocess42.TimeoutExpired as e:
-      self.assertEqual(None, e.output)
-      self.assertEqual(None, e.stderr)
-      self.assertTrue(proc.kill())
-      proc.wait()
-      self.assertLessEqual(0.5, proc.duration())
-
-  def test_call(self):
-    cmd = [sys.executable, '-u', '-c', 'import sys; sys.exit(0)']
-    self.assertEqual(0, subprocess42.call(cmd))
-
-    cmd = [sys.executable, '-u', '-c', 'import sys; sys.exit(1)']
-    self.assertEqual(1, subprocess42.call(cmd))
-
-  def test_check_call(self):
-    cmd = [sys.executable, '-u', '-c', 'import sys; sys.exit(0)']
-    self.assertEqual(0, subprocess42.check_call(cmd))
-
-    cmd = [sys.executable, '-u', '-c', 'import sys; sys.exit(1)']
-    try:
-      self.assertEqual(1, subprocess42.check_call(cmd))
-      self.fail()
-    except subprocess42.CalledProcessError as e:
-      self.assertEqual(None, e.output)
-
-  def test_check_output(self):
-    cmd = [sys.executable, '-u', '-c', 'print(\'.\')']
-    self.assertEqual(
-        '.\n',
-        subprocess42.check_output(cmd, universal_newlines=True))
-
-    cmd = [sys.executable, '-u', '-c', 'import sys; print(\'.\'); sys.exit(1)']
-    try:
-      subprocess42.check_output(cmd, universal_newlines=True)
-      self.fail()
-    except subprocess42.CalledProcessError as e:
-      self.assertEqual('.\n', e.output)
-
-  def test_recv_any(self):
-    # Test all pipe direction and output scenarios.
-    combinations = [
-      {
-        'cmd': ['out_print', 'err_print'],
-        'stdout': None,
-        'stderr': None,
-        'expected': {},
-      },
-      {
-        'cmd': ['out_print', 'err_print'],
-        'stdout': None,
-        'stderr': subprocess42.STDOUT,
-        'expected': {},
-      },
-
-      {
-        'cmd': ['out_print'],
-        'stdout': subprocess42.PIPE,
-        'stderr': subprocess42.PIPE,
-        'expected': {'stdout': 'printing'},
-      },
-      {
-        'cmd': ['out_print'],
-        'stdout': subprocess42.PIPE,
-        'stderr': None,
-        'expected': {'stdout': 'printing'},
-      },
-      {
-        'cmd': ['out_print'],
-        'stdout': subprocess42.PIPE,
-        'stderr': subprocess42.STDOUT,
-        'expected': {'stdout': 'printing'},
-      },
-
-      {
-        'cmd': ['err_print'],
-        'stdout': subprocess42.PIPE,
-        'stderr': subprocess42.PIPE,
-        'expected': {'stderr': 'printing'},
-      },
-      {
-        'cmd': ['err_print'],
-        'stdout': None,
-        'stderr': subprocess42.PIPE,
-        'expected': {'stderr': 'printing'},
-      },
-      {
-        'cmd': ['err_print'],
-        'stdout': subprocess42.PIPE,
-        'stderr': subprocess42.STDOUT,
-        'expected': {'stdout': 'printing'},
-      },
-
-      {
-        'cmd': ['out_print', 'err_print'],
-        'stdout': subprocess42.PIPE,
-        'stderr': subprocess42.PIPE,
-        'expected': {'stderr': 'printing', 'stdout': 'printing'},
-      },
-      {
-        'cmd': ['out_print', 'err_print'],
-        'stdout': subprocess42.PIPE,
-        'stderr': subprocess42.STDOUT,
-        'expected': {'stdout': 'printingprinting'},
-      },
-    ]
-    for i, testcase in enumerate(combinations):
-      cmd = [sys.executable, self.output_script] + testcase['cmd']
-      p = subprocess42.Popen(
-          cmd, env=ENV, stdout=testcase['stdout'], stderr=testcase['stderr'])
-      actual = {}
-      while p.poll() is None:
-        pipe, data = p.recv_any()
-        if data:
-          actual.setdefault(pipe, '')
-          actual[pipe] += data
-
-      # The process exited, read any remaining data in the pipes.
-      while True:
-        pipe, data = p.recv_any()
-        if pipe is None:
-          break
-        actual.setdefault(pipe, '')
-        actual[pipe] += data
-      self.assertEqual(
-          testcase['expected'],
-          actual,
-          (i, testcase['cmd'], testcase['expected'], actual))
-      self.assertEqual((None, None), p.recv_any())
-      self.assertEqual(0, p.returncode)
-
-  def test_recv_any_different_buffering(self):
-    # Specifically test all buffering scenarios.
-    for flush, unbuffered in itertools.product([True, False], [True, False]):
-      actual = ''
-      proc = get_output_sleep_proc(flush, unbuffered, 0.5)
-      while True:
-        p, data = proc.recv_any()
-        if not p:
-          break
-        self.assertEqual('stdout', p)
-        self.assertTrue(data, (p, data))
-        actual += data
-
-      self.assertEqual('A\nB\n', actual)
-      # Contrary to yield_any() or recv_any(0), wait() needs to be used here.
-      proc.wait()
-      self.assertEqual(0, proc.returncode)
-
-  def test_recv_any_timeout_0(self):
-    # rec_any() is expected to timeout and return None with no data pending at
-    # least once, due to the sleep of 'duration' and the use of timeout=0.
-    for flush, unbuffered in itertools.product([True, False], [True, False]):
-      for duration in (0.05, 0.1, 0.5, 2):
-        try:
-          actual = ''
-          proc = get_output_sleep_proc(flush, unbuffered, duration)
-          try:
-            got_none = False
-            while True:
-              p, data = proc.recv_any(timeout=0)
-              if not p:
-                if proc.poll() is None:
-                  got_none = True
-                  continue
-                break
-              self.assertEqual('stdout', p)
-              self.assertTrue(data, (p, data))
-              actual += data
-
-            self.assertEqual('A\nB\n', actual)
-            self.assertEqual(0, proc.returncode)
-            self.assertEqual(True, got_none)
-            break
-          finally:
-            proc.kill()
-            proc.wait()
-        except AssertionError:
-          if duration != 2:
-            print('Sleeping rocks. Trying slower.')
-            continue
-          raise
-
-  def test_yield_any_no_timeout(self):
-    for duration in (0.05, 0.1, 0.5, 2):
-      try:
-        proc = get_output_sleep_proc(True, True, duration)
-        try:
-          expected = [
-            'A\n',
-            'B\n',
-          ]
-          for p, data in proc.yield_any():
-            self.assertEqual('stdout', p)
-            self.assertEqual(expected.pop(0), data)
-          self.assertEqual(0, proc.returncode)
-          self.assertEqual([], expected)
-          break
-        finally:
-          proc.kill()
-          proc.wait()
-      except AssertionError:
-        if duration != 2:
-          print('Sleeping rocks. Trying slower.')
-          continue
-        raise
-
-  def test_yield_any_timeout_0(self):
-    # rec_any() is expected to timeout and return None with no data pending at
-    # least once, due to the sleep of 'duration' and the use of timeout=0.
-    for duration in (0.05, 0.1, 0.5, 2):
-      try:
-        proc = get_output_sleep_proc(True, True, duration)
-        try:
-          expected = [
-            'A\n',
-            'B\n',
-          ]
-          got_none = False
-          for p, data in proc.yield_any(timeout=0):
-            if not p:
-              got_none = True
-              continue
-            self.assertEqual('stdout', p)
-            self.assertEqual(expected.pop(0), data)
-          self.assertEqual(0, proc.returncode)
-          self.assertEqual([], expected)
-          self.assertEqual(True, got_none)
-          break
-        finally:
-          proc.kill()
-          proc.wait()
-      except AssertionError:
-        if duration != 2:
-          print('Sleeping rocks. Trying slower.')
-          continue
-        raise
-
-  def test_yield_any_timeout_0_called(self):
-    # rec_any() is expected to timeout and return None with no data pending at
-    # least once, due to the sleep of 'duration' and the use of timeout=0.
-    for duration in (0.05, 0.1, 0.5, 2):
-      try:
-        proc = get_output_sleep_proc(True, True, duration)
-        try:
-          expected = [
-            'A\n',
-            'B\n',
-          ]
-          got_none = False
-          called = []
-          def timeout():
-            called.append(0)
-            return 0
-          for p, data in proc.yield_any(timeout=timeout):
-            if not p:
-              got_none = True
-              continue
-            self.assertEqual('stdout', p)
-            self.assertEqual(expected.pop(0), data)
-          self.assertEqual(0, proc.returncode)
-          self.assertEqual([], expected)
-          self.assertEqual(True, got_none)
-          self.assertTrue(called)
-          break
-        finally:
-          proc.kill()
-          proc.wait()
-      except AssertionError:
-        if duration != 2:
-          print('Sleeping rocks. Trying slower.')
-          continue
-        raise
-
-  def test_yield_any_returncode(self):
-    proc = subprocess42.Popen(
-        [sys.executable, '-c', 'import sys;sys.stdout.write("yo");sys.exit(1)'],
-        stdout=subprocess42.PIPE)
-    for p, d in proc.yield_any():
-      self.assertEqual('stdout', p)
-      self.assertEqual('yo', d)
-    # There was a bug where the second call to wait() would overwrite
-    # proc.returncode with 0 when timeout is not None.
-    self.assertEqual(1, proc.wait())
-    self.assertEqual(1, proc.wait(timeout=0))
-    self.assertEqual(1, proc.poll())
-    self.assertEqual(1, proc.returncode)
-    # On Windows, the clock resolution is 15ms so Popen.duration() will likely
-    # be 0.
-    self.assertLessEqual(0, proc.duration())
-
-  def _wait_for_hi(self, proc, err):
-    actual = ''
-    while True:
-      if err:
-        data = proc.recv_err(timeout=5)
-      else:
-        data = proc.recv_out(timeout=5)
-      if not data:
-        self.fail('%r' % actual)
-      self.assertTrue(data)
-      actual += data
-      if actual in ('hi\n', 'hi\r\n'):
-        break
-
-  def _proc(self, err, **kwargs):
-    # Do not use the -u flag here, we want to test when it is buffered by
-    # default. See reference above about PYTHONUNBUFFERED.
-    # That's why the two scripts uses .flush(). Sadly, the flush() call is
-    # needed on Windows even for sys.stderr (!)
-    cmd = [sys.executable, '-c', SCRIPT_ERR if err else SCRIPT_OUT]
-    # TODO(maruel): Make universal_newlines=True work and not hang.
-    if err:
-      kwargs['stderr'] = subprocess42.PIPE
-    else:
-      kwargs['stdout'] = subprocess42.PIPE
-    return subprocess42.Popen(cmd, **kwargs)
-
-  def test_detached(self):
-    self._test_detached(False)
-    self._test_detached(True)
-
-  def _test_detached(self, err):
-    is_win = (sys.platform == 'win32')
-    key = 'stderr' if err else 'stdout'
-    proc = self._proc(err, detached=True)
-    try:
-      self._wait_for_hi(proc, err)
-      proc.terminate()
-      if is_win:
-        # What happens on Windows is that the process is immediately killed
-        # after handling SIGBREAK.
-        self.assertEqual(0, proc.wait())
-        # Windows...
-        self.assertIn(
-            proc.recv_any(),
-            (
-              (key, 'got signal 21\r\nioerror\r\nbye\r\n'),
-              (key, 'got signal 21\nioerror\nbye\n'),
-              (key, 'got signal 21\r\nbye\r\n'),
-              (key, 'got signal 21\nbye\n'),
-            ))
-      else:
-        self.assertEqual(0, proc.wait())
-        self.assertEqual((key, 'got signal 15\nbye\n'), proc.recv_any())
-    finally:
-      # In case the test fails.
-      proc.kill()
-      proc.wait()
-
-  def test_attached(self):
-    self._test_attached(False)
-    self._test_attached(True)
-
-  def _test_attached(self, err):
-    is_win = (sys.platform == 'win32')
-    key = 'stderr' if err else 'stdout'
-    proc = self._proc(err, detached=False)
-    try:
-      self._wait_for_hi(proc, err)
-      proc.terminate()
-      if is_win:
-        # If attached, it's hard killed.
-        self.assertEqual(1, proc.wait())
-        self.assertEqual((None, None), proc.recv_any())
-      else:
-        self.assertEqual(0, proc.wait())
-        self.assertEqual((key, 'got signal 15\nbye\n'), proc.recv_any())
-    finally:
-      # In case the test fails.
-      proc.kill()
-      proc.wait()
-
-  def test_split(self):
-    data = [
-      ('stdout', 'o1\no2\no3\n'),
-      ('stderr', 'e1\ne2\ne3\n'),
-      ('stdout', '\n\n'),
-      ('stdout', '\n'),
-      ('stdout', 'o4\no5'),
-      ('stdout', '_sameline\npart1 of one line '),
-      ('stderr', 'err inserted between two parts of stdout\n'),
-      ('stdout', 'part2 of one line\n'),
-      ('stdout', 'incomplete last stdout'),
-      ('stderr', 'incomplete last stderr'),
-    ]
-    self.assertEquals(list(subprocess42.split(data)), [
-      ('stdout', 'o1'),
-      ('stdout', 'o2'),
-      ('stdout', 'o3'),
-      ('stderr', 'e1'),
-      ('stderr', 'e2'),
-      ('stderr', 'e3'),
-      ('stdout', ''),
-      ('stdout', ''),
-      ('stdout', ''),
-      ('stdout', 'o4'),
-      ('stdout', 'o5_sameline'),
-      ('stderr', 'err inserted between two parts of stdout'),
-      ('stdout', 'part1 of one line part2 of one line'),
-      ('stderr', 'incomplete last stderr'),
-      ('stdout', 'incomplete last stdout'),
-    ])
-
-if __name__ == '__main__':
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/swarming_smoke_test.py b/tools/swarming_client/tests/swarming_smoke_test.py
deleted file mode 100755
index 8d9616e..0000000
--- a/tools/swarming_client/tests/swarming_smoke_test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import logging
-import os
-import subprocess
-import sys
-import unittest
-
-BASE_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-
-ISOLATE_SERVER = 'https://isolateserver.appspot.com/'
-SWARMING_SERVER = 'https://chromium-swarm.appspot.com/'
-
-
-class TestSwarm(unittest.TestCase):
-  def test_example(self):
-    # pylint: disable=W0101
-    # A user should be able to trigger a swarm job and return results.
-    cmd = [
-      sys.executable,
-      '3_swarming_trigger_collect.py',
-      '--isolate-server', ISOLATE_SERVER,
-      '--swarming', SWARMING_SERVER,
-    ]
-    if '-v' in sys.argv:
-      cmd.append('--verbose')
-    p = subprocess.Popen(
-        cmd,
-        cwd=os.path.join(BASE_DIR, '..', 'example'),
-        stdin=subprocess.PIPE,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT)
-    out = p.communicate()[0]
-    logging.debug(out)
-    self.assertEqual(0, p.returncode, out)
-
-
-if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  unittest.main()
diff --git a/tools/swarming_client/tests/swarming_test.py b/tools/swarming_client/tests/swarming_test.py
deleted file mode 100755
index 06089ab..0000000
--- a/tools/swarming_client/tests/swarming_test.py
+++ /dev/null
@@ -1,1529 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import datetime
-import hashlib
-import json
-import logging
-import os
-import re
-import StringIO
-import subprocess
-import sys
-import tempfile
-import threading
-import time
-import unittest
-
-# net_utils adjusts sys.path.
-import net_utils
-
-from depot_tools import auto_stub
-
-import auth
-import isolateserver
-import swarming
-import test_utils
-
-from depot_tools import fix_encoding
-from utils import file_path
-from utils import logging_utils
-from utils import tools
-
-import httpserver_mock
-import isolateserver_mock
-
-
-FILE_HASH = u'1' * 40
-TEST_NAME = u'unit_tests'
-
-
-OUTPUT = 'Ran stuff\n'
-
-SHARD_OUTPUT_1 = 'Shard 1 of 3.'
-SHARD_OUTPUT_2 = 'Shard 2 of 3.'
-SHARD_OUTPUT_3 = 'Shard 3 of 3.'
-
-
-def gen_yielded_data(index, **kwargs):
-  """Returns an entry as it would be yielded by yield_results()."""
-  return index, gen_result_response(**kwargs)
-
-
-def get_results(keys, output_collector=None):
-  """Simplifies the call to yield_results().
-
-  The timeout is hard-coded to 10 seconds.
-  """
-  return list(
-      swarming.yield_results(
-          'https://host:9001', keys, 10., None, True, output_collector, False))
-
-
-def collect(url, task_ids):
-  """Simplifies the call to swarming.collect()."""
-  return swarming.collect(
-    swarming=url,
-    task_ids=task_ids,
-    timeout=10,
-    decorate=True,
-    print_status_updates=True,
-    task_summary_json=None,
-    task_output_dir=None,
-    include_perf=False)
-
-
-def main(args):
-  """Bypasses swarming.main()'s exception handling.
-
-  It gets in the way when debugging test failures.
-  """
-  dispatcher = swarming.subcommand.CommandDispatcher('swarming')
-  return dispatcher.execute(swarming.OptionParserSwarming(), args)
-
-
-def gen_request_data(properties=None, **kwargs):
-  out = {
-    'expiration_secs': 3600,
-    'name': 'unit_tests',
-    'parent_task_id': '',
-    'priority': 101,
-    'properties': {
-      'cipd_input': None,
-      'command': None,
-      'dimensions': [
-        {'key': 'foo', 'value': 'bar'},
-        {'key': 'os', 'value': 'Mac'},
-      ],
-      'env': [],
-      'execution_timeout_secs': 60,
-      'extra_args': ['--some-arg', '123'],
-      'grace_period_secs': 30,
-      'idempotent': False,
-      'inputs_ref': None,
-      'io_timeout_secs': 60,
-    },
-    'tags': ['tag:a', 'tag:b'],
-    'user': 'joe@localhost',
-  }
-  out.update(kwargs)
-  out['properties'].update(properties or {})
-  return out
-
-
-def gen_request_response(request, **kwargs):
-  # As seen in services/swarming/handlers_api.py.
-  out = {
-    'request': request.copy(),
-    'task_id': '12300',
-  }
-  out.update(kwargs)
-  return out
-
-
-def gen_result_response(**kwargs):
-  out = {
-    u'bot_id': u'swarm6',
-    u'completed_ts': u'2014-09-24T13:49:16.012345',
-    u'created_ts': u'2014-09-24T13:49:03.012345',
-    u'duration': 0.9636809825897217,
-    u'exit_code': 0,
-    u'failure': False,
-    u'internal_failure': False,
-    u'modified_ts': u'2014-09-24T13:49:17.012345',
-    u'name': u'heartbeat-canary-2014-09-24_13:49:01-os=Linux',
-    u'server_versions': [u'1'],
-    u'started_ts': u'2014-09-24T13:49:09.012345',
-    u'state': 'COMPLETED',
-    u'tags': [u'cpu:x86', u'priority:100', u'user:joe@localhost'],
-    u'task_id': u'10100',
-    u'try_number': 1,
-    u'user': u'joe@localhost',
-  }
-  out.update(kwargs)
-  return out
-
-
-# Silence pylint 'Access to a protected member _Event of a client class'.
-class NonBlockingEvent(threading._Event):  # pylint: disable=W0212
-  """Just like threading.Event, but a class and ignores timeout in 'wait'.
-
-  Intended to be used as a mock for threading.Event in tests.
-  """
-
-  def wait(self, timeout=None):
-    return super(NonBlockingEvent, self).wait(0)
-
-
-class SwarmingServerHandler(httpserver_mock.MockHandler):
-  """An extremely minimal implementation of the swarming server API v1.0."""
-
-  def do_GET(self):
-    logging.info('S GET %s', self.path)
-    if self.path in ('/on/load', '/on/quit'):
-      self._octet_stream('')
-    elif self.path == '/auth/api/v1/server/oauth_config':
-      self._json({
-          'client_id': 'c',
-          'client_not_so_secret': 's',
-          'primary_url': self.server.url})
-    elif self.path == '/auth/api/v1/accounts/self':
-      self._json({'identity': 'user:joe', 'xsrf_token': 'foo'})
-    else:
-      m = re.match(r'/api/swarming/v1/task/(\d+)/request', self.path)
-      if m:
-        logging.info('%s', m.group(1))
-        self._json(self.server.tasks[int(m.group(1))])
-      else:
-        self._json( {'a': 'b'})
-        #raise NotImplementedError(self.path)
-
-  def do_POST(self):
-    logging.info('POST %s', self.path)
-    raise NotImplementedError(self.path)
-
-
-class MockSwarmingServer(httpserver_mock.MockServer):
-  _HANDLER_CLS = SwarmingServerHandler
-
-  def __init__(self):
-    super(MockSwarmingServer, self).__init__()
-    self._server.tasks = {}
-
-
-class Common(object):
-  def setUp(self):
-    self._tempdir = None
-    self.mock(auth, 'ensure_logged_in', lambda _: None)
-    self.mock(sys, 'stdout', StringIO.StringIO())
-    self.mock(sys, 'stderr', StringIO.StringIO())
-    self.mock(logging_utils, 'prepare_logging', lambda *args: None)
-    self.mock(logging_utils, 'set_console_level', lambda *args: None)
-
-  def tearDown(self):
-    if self._tempdir:
-      file_path.rmtree(self._tempdir)
-    if not self.has_failed():
-      self._check_output('', '')
-
-  @property
-  def tempdir(self):
-    """Creates the directory on first reference."""
-    if not self._tempdir:
-      self._tempdir = tempfile.mkdtemp(prefix=u'swarming_test')
-    return self._tempdir
-
-  def _check_output(self, out, err):
-    self.assertEqual(
-        out.splitlines(True), sys.stdout.getvalue().splitlines(True))
-    self.assertEqual(
-        err.splitlines(True), sys.stderr.getvalue().splitlines(True))
-
-    # Flush their content by mocking them again.
-    self.mock(sys, 'stdout', StringIO.StringIO())
-    self.mock(sys, 'stderr', StringIO.StringIO())
-
-
-class NetTestCase(net_utils.TestCase, Common):
-  """Base class that defines the url_open mock."""
-  def setUp(self):
-    net_utils.TestCase.setUp(self)
-    Common.setUp(self)
-    self.mock(time, 'sleep', lambda _: None)
-    self.mock(subprocess, 'call', lambda *_: self.fail())
-    self.mock(threading, 'Event', NonBlockingEvent)
-
-
-class TestIsolated(auto_stub.TestCase, Common):
-  """Test functions with isolated_ prefix."""
-  def setUp(self):
-    auto_stub.TestCase.setUp(self)
-    Common.setUp(self)
-    self._isolate = isolateserver_mock.MockIsolateServer()
-    self._swarming = MockSwarmingServer()
-
-  def tearDown(self):
-    try:
-      self._isolate.close_start()
-      self._swarming.close_start()
-      self._isolate.close_end()
-      self._swarming.close_end()
-    finally:
-      Common.tearDown(self)
-      auto_stub.TestCase.tearDown(self)
-
-  def test_reproduce_isolated(self):
-    old_cwd = os.getcwd()
-    try:
-      os.chdir(self.tempdir)
-
-      def call(cmd, env, cwd):
-        self.assertEqual([sys.executable, u'main.py', u'foo', '--bar'], cmd)
-        self.assertEqual(None, env)
-        self.assertEqual(unicode(os.path.abspath('work')), cwd)
-        return 0
-
-      self.mock(subprocess, 'call', call)
-
-      main_hash = self._isolate.add_content_compressed(
-          'default-gzip', 'not executed')
-      isolated = {
-        'files': {
-          'main.py': {
-            'h': main_hash,
-            's': 12,
-            'm': 0700,
-          },
-        },
-        'command': ['python', 'main.py'],
-      }
-      isolated_hash = self._isolate.add_content_compressed(
-          'default-gzip', json.dumps(isolated))
-      self._swarming._server.tasks[123] = {
-        'properties': {
-          'inputs_ref': {
-            'isolatedserver': self._isolate.url,
-            'namespace': 'default-gzip',
-            'isolated': isolated_hash,
-          },
-          'extra_args': ['foo'],
-        },
-      }
-      ret = main(
-          [
-            'reproduce', '--swarming', self._swarming.url, '123', '--',
-            '--bar',
-          ])
-      self._check_output('', '')
-      self.assertEqual(0, ret)
-    finally:
-      os.chdir(old_cwd)
-
-
-class TestSwarmingTrigger(NetTestCase):
-  def test_trigger_task_shards_2_shards(self):
-    task_request = swarming.NewTaskRequest(
-        expiration_secs=60*60,
-        name=TEST_NAME,
-        parent_task_id=None,
-        priority=101,
-        properties=swarming.TaskProperties(
-            cipd_input=None,
-            command=['a', 'b'],
-            dimensions={'foo': 'bar', 'os': 'Mac'},
-            env={},
-            execution_timeout_secs=60,
-            extra_args=[],
-            grace_period_secs=30,
-            idempotent=False,
-            inputs_ref=None,
-            io_timeout_secs=60),
-        tags=['tag:a', 'tag:b'],
-        user='joe@localhost')
-
-    request_1 = swarming.task_request_to_raw_request(task_request)
-    request_1['name'] = u'unit_tests:0:2'
-    request_1['properties']['env'] = [
-      {'key': 'GTEST_SHARD_INDEX', 'value': '0'},
-      {'key': 'GTEST_TOTAL_SHARDS', 'value': '2'},
-    ]
-    result_1 = gen_request_response(request_1)
-
-    request_2 = swarming.task_request_to_raw_request(task_request)
-    request_2['name'] = u'unit_tests:1:2'
-    request_2['properties']['env'] = [
-      {'key': 'GTEST_SHARD_INDEX', 'value': '1'},
-      {'key': 'GTEST_TOTAL_SHARDS', 'value': '2'},
-    ]
-    result_2 = gen_request_response(request_2, task_id='12400')
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request_1},
-            result_1,
-          ),
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request_2},
-            result_2,
-          ),
-        ])
-
-    tasks = swarming.trigger_task_shards(
-        swarming='https://localhost:1',
-        task_request=task_request,
-        shards=2)
-    expected = {
-      u'unit_tests:0:2': {
-        'shard_index': 0,
-        'task_id': '12300',
-        'view_url': 'https://localhost:1/user/task/12300',
-      },
-      u'unit_tests:1:2': {
-        'shard_index': 1,
-        'task_id': '12400',
-        'view_url': 'https://localhost:1/user/task/12400',
-      },
-    }
-    self.assertEqual(expected, tasks)
-
-  def test_trigger_task_shards_priority_override(self):
-    task_request = swarming.NewTaskRequest(
-        expiration_secs=60*60,
-        name=TEST_NAME,
-        parent_task_id='123',
-        priority=101,
-        properties=swarming.TaskProperties(
-            cipd_input=None,
-            command=['a', 'b'],
-            dimensions={'foo': 'bar', 'os': 'Mac'},
-            env={},
-            execution_timeout_secs=60,
-            extra_args=[],
-            grace_period_secs=30,
-            idempotent=False,
-            inputs_ref=None,
-            io_timeout_secs=60),
-        tags=['tag:a', 'tag:b'],
-        user='joe@localhost')
-
-    request = swarming.task_request_to_raw_request(task_request)
-    self.assertEqual('123', request['parent_task_id'])
-
-    result = gen_request_response(request)
-    result['request']['priority'] = 200
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request},
-            result,
-          ),
-        ])
-
-    os.environ['SWARMING_TASK_ID'] = '123'
-    try:
-      tasks = swarming.trigger_task_shards(
-          swarming='https://localhost:1',
-          shards=1,
-          task_request=task_request)
-    finally:
-      os.environ.pop('SWARMING_TASK_ID')
-    expected = {
-      u'unit_tests': {
-        'shard_index': 0,
-        'task_id': '12300',
-        'view_url': 'https://localhost:1/user/task/12300',
-      }
-    }
-    self.assertEqual(expected, tasks)
-    self._check_output('', 'Priority was reset to 200\n')
-
-  def test_trigger_cipd_package(self):
-    task_request = swarming.NewTaskRequest(
-        expiration_secs=60*60,
-        name=TEST_NAME,
-        parent_task_id='123',
-        priority=101,
-        properties=swarming.TaskProperties(
-            cipd_input=swarming.CipdInput(
-                client_package=None,
-                packages=[
-                    swarming.CipdPackage(
-                        package_name='mypackage',
-                        path='path/to/package',
-                        version='abc123')],
-                server=None),
-            command=['a', 'b'],
-            dimensions={'foo': 'bar', 'os': 'Mac'},
-            env={},
-            execution_timeout_secs=60,
-            extra_args=[],
-            grace_period_secs=30,
-            idempotent=False,
-            inputs_ref=None,
-            io_timeout_secs=60),
-        tags=['tag:a', 'tag:b'],
-        user='joe@localhost')
-
-    request = swarming.task_request_to_raw_request(task_request)
-    expected = {
-      'client_package': None,
-      'packages': [{
-          'package_name': 'mypackage',
-          'path': 'path/to/package',
-          'version': 'abc123',
-      }],
-      'server': None
-    }
-    self.assertEqual(expected, request['properties']['cipd_input'])
-
-    result = gen_request_response(request)
-    result['request']['priority'] = 200
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request},
-            result,
-          ),
-        ])
-
-    os.environ['SWARMING_TASK_ID'] = '123'
-    try:
-      tasks = swarming.trigger_task_shards(
-          swarming='https://localhost:1',
-          shards=1,
-          task_request=task_request)
-    finally:
-      os.environ.pop('SWARMING_TASK_ID')
-    expected = {
-      u'unit_tests': {
-        'shard_index': 0,
-        'task_id': '12300',
-        'view_url': 'https://localhost:1/user/task/12300',
-      }
-    }
-    self.assertEqual(expected, tasks)
-    self._check_output('', 'Priority was reset to 200\n')
-
-
-class TestSwarmingCollection(NetTestCase):
-  def test_success(self):
-    self.expected_requests(
-        [
-          (
-            'https://host:9001/api/swarming/v1/task/10100/result',
-            {'retry_50x': False},
-            gen_result_response(),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10100/stdout',
-            {},
-            {'output': OUTPUT},
-          ),
-        ])
-    expected = [gen_yielded_data(0, output=OUTPUT)]
-    self.assertEqual(expected, get_results(['10100']))
-
-  def test_failure(self):
-    self.expected_requests(
-        [
-          (
-            'https://host:9001/api/swarming/v1/task/10100/result',
-            {'retry_50x': False},
-            gen_result_response(exit_code=1),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10100/stdout',
-            {},
-            {'output': OUTPUT},
-          ),
-        ])
-    expected = [gen_yielded_data(0, output=OUTPUT, exit_code=1)]
-    self.assertEqual(expected, get_results(['10100']))
-
-  def test_no_ids(self):
-    actual = get_results([])
-    self.assertEqual([], actual)
-
-  def test_url_errors(self):
-    self.mock(logging, 'error', lambda *_, **__: None)
-    # NOTE: get_results() hardcodes timeout=10.
-    now = {}
-    lock = threading.Lock()
-    def get_now():
-      t = threading.current_thread()
-      with lock:
-        return now.setdefault(t, range(10)).pop(0)
-    self.mock(swarming.net, 'sleep_before_retry', lambda _x, _y: None)
-    self.mock(swarming, 'now', get_now)
-    # The actual number of requests here depends on 'now' progressing to 10
-    # seconds. It's called once per loop. Loop makes 9 iterations.
-    self.expected_requests(
-        9 * [
-          (
-            'https://host:9001/api/swarming/v1/task/10100/result',
-            {'retry_50x': False},
-            None,
-          )
-        ])
-    actual = get_results(['10100'])
-    self.assertEqual([], actual)
-    self.assertTrue(all(not v for v in now.itervalues()), now)
-
-  def test_many_shards(self):
-    self.expected_requests(
-        [
-          (
-            'https://host:9001/api/swarming/v1/task/10100/result',
-            {'retry_50x': False},
-            gen_result_response(),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10100/stdout',
-            {},
-            {'output': SHARD_OUTPUT_1},
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10200/result',
-            {'retry_50x': False},
-            gen_result_response(),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10200/stdout',
-            {},
-            {'output': SHARD_OUTPUT_2},
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10300/result',
-            {'retry_50x': False},
-            gen_result_response(),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10300/stdout',
-            {},
-            {'output': SHARD_OUTPUT_3},
-          ),
-        ])
-    expected = [
-      gen_yielded_data(0, output=SHARD_OUTPUT_1),
-      gen_yielded_data(1, output=SHARD_OUTPUT_2),
-      gen_yielded_data(2, output=SHARD_OUTPUT_3),
-    ]
-    actual = get_results(['10100', '10200', '10300'])
-    self.assertEqual(expected, sorted(actual))
-
-  def test_output_collector_called(self):
-    # Three shards, one failed. All results are passed to output collector.
-    self.expected_requests(
-        [
-          (
-            'https://host:9001/api/swarming/v1/task/10100/result',
-            {'retry_50x': False},
-            gen_result_response(),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10100/stdout',
-            {},
-            {'output': SHARD_OUTPUT_1},
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10200/result',
-            {'retry_50x': False},
-            gen_result_response(),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10200/stdout',
-            {},
-            {'output': SHARD_OUTPUT_2},
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10300/result',
-            {'retry_50x': False},
-            gen_result_response(exit_code=1),
-          ),
-          (
-            'https://host:9001/api/swarming/v1/task/10300/stdout',
-            {},
-            {'output': SHARD_OUTPUT_3},
-          ),
-        ])
-
-    class FakeOutputCollector(object):
-      def __init__(self):
-        self.results = []
-        self._lock = threading.Lock()
-
-      def process_shard_result(self, index, result):
-        with self._lock:
-          self.results.append((index, result))
-
-    output_collector = FakeOutputCollector()
-    get_results(['10100', '10200', '10300'], output_collector)
-
-    expected = [
-      gen_yielded_data(0, output=SHARD_OUTPUT_1),
-      gen_yielded_data(1, output=SHARD_OUTPUT_2),
-      gen_yielded_data(2, output=SHARD_OUTPUT_3, exit_code=1),
-    ]
-    self.assertEqual(sorted(expected), sorted(output_collector.results))
-
-  def test_collect_nothing(self):
-    self.mock(swarming, 'yield_results', lambda *_: [])
-    self.assertEqual(1, collect('https://localhost:1', ['10100', '10200']))
-    self._check_output('', 'Results from some shards are missing: 0, 1\n')
-
-  def test_collect_success(self):
-    data = gen_result_response(output='Foo')
-    self.mock(swarming, 'yield_results', lambda *_: [(0, data)])
-    self.assertEqual(0, collect('https://localhost:1', ['10100']))
-    expected = u'\n'.join((
-      '+---------------------------------------------------------------------+',
-      '| Shard 0  https://localhost:1/user/task/10100                        |',
-      '+---------------------------------------------------------------------+',
-      'Foo',
-      '+---------------------------------------------------------------------+',
-      '| End of shard 0  Pending: 6.0s  Duration: 1.0s  Bot: swarm6  Exit: 0 |',
-      '+---------------------------------------------------------------------+',
-      'Total duration: 1.0s',
-      ''))
-    self._check_output(expected, '')
-
-  def test_collect_fail(self):
-    data = gen_result_response(output='Foo', exit_code=-9)
-    data['output'] = 'Foo'
-    self.mock(swarming, 'yield_results', lambda *_: [(0, data)])
-    self.assertEqual(-9, collect('https://localhost:1', ['10100']))
-    expected = u'\n'.join((
-      '+----------------------------------------------------------------------'
-        '+',
-      '| Shard 0  https://localhost:1/user/task/10100                         '
-        '|',
-      '+----------------------------------------------------------------------'
-        '+',
-      'Foo',
-      '+----------------------------------------------------------------------'
-        '+',
-      '| End of shard 0  Pending: 6.0s  Duration: 1.0s  Bot: swarm6  Exit: -9 '
-        '|',
-      '+----------------------------------------------------------------------'
-        '+',
-      'Total duration: 1.0s',
-      ''))
-    self._check_output(expected, '')
-
-  def test_collect_one_missing(self):
-    data = gen_result_response(output='Foo')
-    data['output'] = 'Foo'
-    self.mock(swarming, 'yield_results', lambda *_: [(0, data)])
-    self.assertEqual(1, collect('https://localhost:1', ['10100', '10200']))
-    expected = u'\n'.join((
-      '+---------------------------------------------------------------------+',
-      '| Shard 0  https://localhost:1/user/task/10100                        |',
-      '+---------------------------------------------------------------------+',
-      'Foo',
-      '+---------------------------------------------------------------------+',
-      '| End of shard 0  Pending: 6.0s  Duration: 1.0s  Bot: swarm6  Exit: 0 |',
-      '+---------------------------------------------------------------------+',
-      '',
-      'Total duration: 1.0s',
-      ''))
-    self._check_output(expected, 'Results from some shards are missing: 1\n')
-
-  def test_collect_multi(self):
-    actual_calls = []
-    def fetch_isolated(isolated_hash, storage, cache, outdir, use_symlinks):
-      self.assertIs(storage.__class__, isolateserver.Storage)
-      self.assertIs(cache.__class__, isolateserver.MemoryCache)
-      # Ensure storage is pointing to required location.
-      self.assertEqual('https://localhost:2', storage.location)
-      self.assertEqual('default', storage.namespace)
-      self.assertEqual(False, use_symlinks)
-      actual_calls.append((isolated_hash, outdir))
-    self.mock(isolateserver, 'fetch_isolated', fetch_isolated)
-
-    collector = swarming.TaskOutputCollector(self.tempdir, 2)
-    for index in xrange(2):
-      collector.process_shard_result(
-          index,
-          gen_result_response(
-              outputs_ref={
-                'isolated': str(index) * 40,
-                'isolatedserver': 'https://localhost:2',
-                'namespace': 'default',
-              }))
-    summary = collector.finalize()
-
-    expected_calls = [
-      ('0'*40, os.path.join(self.tempdir, '0')),
-      ('1'*40, os.path.join(self.tempdir, '1')),
-    ]
-    self.assertEqual(expected_calls, actual_calls)
-
-    # Ensure collected summary is correct.
-    outputs_refs = [
-      {
-        'isolated': '0'*40,
-        'isolatedserver': 'https://localhost:2',
-        'namespace': 'default',
-        'view_url':
-            'https://localhost:2/browse?namespace=default&hash=' + '0'*40,
-      },
-      {
-        'isolated': '1'*40,
-        'isolatedserver': 'https://localhost:2',
-        'namespace': 'default',
-        'view_url':
-            'https://localhost:2/browse?namespace=default&hash=' + '1'*40,
-      },
-    ]
-    expected = {
-      'shards': [gen_result_response(outputs_ref=o) for o in outputs_refs],
-    }
-    self.assertEqual(expected, summary)
-
-    # Ensure summary dumped to a file is correct as well.
-    with open(os.path.join(self.tempdir, 'summary.json'), 'r') as f:
-      summary_dump = json.load(f)
-    self.assertEqual(expected, summary_dump)
-
-  def test_ensures_same_server(self):
-    self.mock(logging, 'error', lambda *_: None)
-    # Two shard results, attempt to use different servers.
-    actual_calls = []
-    self.mock(
-        isolateserver, 'fetch_isolated',
-        lambda *args: actual_calls.append(args))
-    data = [
-      gen_result_response(
-        outputs_ref={
-          'isolatedserver': 'https://server1',
-          'namespace': 'namespace',
-          'isolated':'hash1',
-        }),
-      gen_result_response(
-        outputs_ref={
-          'isolatedserver': 'https://server2',
-          'namespace': 'namespace',
-          'isolated':'hash1',
-        }),
-    ]
-
-    # Feed them to collector.
-    collector = swarming.TaskOutputCollector(self.tempdir, 2)
-    for index, result in enumerate(data):
-      collector.process_shard_result(index, result)
-    collector.finalize()
-
-    # Only first fetch is made, second one is ignored.
-    self.assertEqual(1, len(actual_calls))
-    isolated_hash, storage, _, outdir, _ = actual_calls[0]
-    self.assertEqual(
-        ('hash1', os.path.join(self.tempdir, '0')),
-        (isolated_hash, outdir))
-    self.assertEqual('https://server1', storage.location)
-
-
-class TestMain(NetTestCase):
-  # Tests calling main().
-  def test_bot_delete(self):
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/bot/foo/delete',
-            {'method': 'POST', 'data': {}},
-            {},
-          ),
-        ])
-    ret = main(
-        ['bot_delete', '--swarming', 'https://localhost:1', 'foo', '--force'])
-    self._check_output('', '')
-    self.assertEqual(0, ret)
-
-  def test_run_raw_cmd(self):
-    # Minimalist use.
-    request = {
-      'expiration_secs': 21600,
-      'name': u'None/foo=bar',
-      'parent_task_id': '',
-      'priority': 100,
-      'properties': {
-        'cipd_input': None,
-        'command': ['python', '-c', 'print(\'hi\')'],
-        'dimensions': [
-          {'key': 'foo', 'value': 'bar'},
-        ],
-        'env': [],
-        'execution_timeout_secs': 3600,
-        'extra_args': None,
-        'grace_period_secs': 30,
-        'idempotent': False,
-        'inputs_ref': None,
-        'io_timeout_secs': 1200,
-      },
-      'tags': [],
-      'user': None,
-    }
-    result = gen_request_response(request)
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request},
-            result,
-          ),
-        ])
-    ret = main([
-        'trigger',
-        '--swarming', 'https://localhost:1',
-        '--dimension', 'foo', 'bar',
-        '--raw-cmd',
-        '--',
-        'python',
-        '-c',
-        'print(\'hi\')',
-      ])
-    actual = sys.stdout.getvalue()
-    self.assertEqual(0, ret, (actual, sys.stderr.getvalue()))
-    self._check_output(
-        'Triggered task: None/foo=bar\n'
-        'To collect results, use:\n'
-        '  swarming.py collect -S https://localhost:1 12300\n'
-        'Or visit:\n'
-        '  https://localhost:1/user/task/12300\n',
-        '')
-
-  def test_run_isolated_hash(self):
-    # pylint: disable=unused-argument
-    self.mock(swarming, 'now', lambda: 123456)
-
-    request = gen_request_data(
-        properties={
-          'command': None,
-          'inputs_ref': {
-            'isolated': u'1111111111111111111111111111111111111111',
-            'isolatedserver': 'https://localhost:2',
-            'namespace': 'default-gzip',
-          },
-        })
-    result = gen_request_response(request)
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request},
-            result,
-          ),
-        ])
-    ret = main([
-        'trigger',
-        '--swarming', 'https://localhost:1',
-        '--isolate-server', 'https://localhost:2',
-        '--shards', '1',
-        '--priority', '101',
-        '--dimension', 'foo', 'bar',
-        '--dimension', 'os', 'Mac',
-        '--expiration', '3600',
-        '--user', 'joe@localhost',
-        '--tags', 'tag:a',
-        '--tags', 'tag:b',
-        '--hard-timeout', '60',
-        '--io-timeout', '60',
-        '--task-name', 'unit_tests',
-        '--isolated', FILE_HASH,
-        '--',
-        '--some-arg',
-        '123',
-      ])
-    actual = sys.stdout.getvalue()
-    self.assertEqual(0, ret, (actual, sys.stderr.getvalue()))
-    self._check_output(
-        'Triggered task: unit_tests\n'
-        'To collect results, use:\n'
-        '  swarming.py collect -S https://localhost:1 12300\n'
-        'Or visit:\n'
-        '  https://localhost:1/user/task/12300\n',
-        '')
-
-  def test_run_isolated_upload_and_json(self):
-    # pylint: disable=unused-argument
-    write_json_calls = []
-    self.mock(tools, 'write_json', lambda *args: write_json_calls.append(args))
-    subprocess_calls = []
-    self.mock(subprocess, 'call', lambda *c: subprocess_calls.append(c))
-    self.mock(swarming, 'now', lambda: 123456)
-
-    isolated = os.path.join(self.tempdir, 'zaz.isolated')
-    content = '{}'
-    with open(isolated, 'wb') as f:
-      f.write(content)
-
-    isolated_hash = isolateserver_mock.hash_content(content)
-    request = gen_request_data(
-        properties={
-          'command': None,
-          'idempotent': True,
-          'inputs_ref': {
-            'isolated': isolated_hash,
-            'isolatedserver': 'https://localhost:2',
-            'namespace': 'default-gzip',
-          },
-        })
-    result = gen_request_response(request)
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request},
-            result,
-          ),
-        ])
-    ret = main([
-        'trigger',
-        '--swarming', 'https://localhost:1',
-        '--isolate-server', 'https://localhost:2',
-        '--shards', '1',
-        '--priority', '101',
-        '--dimension', 'foo', 'bar',
-        '--dimension', 'os', 'Mac',
-        '--expiration', '3600',
-        '--user', 'joe@localhost',
-        '--tags', 'tag:a',
-        '--tags', 'tag:b',
-        '--hard-timeout', '60',
-        '--io-timeout', '60',
-        '--idempotent',
-        '--task-name', 'unit_tests',
-        '--dump-json', 'foo.json',
-        isolated,
-        '--',
-        '--some-arg',
-        '123',
-      ])
-    actual = sys.stdout.getvalue()
-    self.assertEqual(0, ret, (actual, sys.stderr.getvalue()))
-    self.assertEqual([], subprocess_calls)
-    self._check_output(
-        'Triggered task: unit_tests\n'
-        'To collect results, use:\n'
-        '  swarming.py collect -S https://localhost:1 --json foo.json\n'
-        'Or visit:\n'
-        '  https://localhost:1/user/task/12300\n',
-        '')
-    expected = [
-      (
-        u'foo.json',
-        {
-          'base_task_name': 'unit_tests',
-          'tasks': {
-            'unit_tests': {
-              'shard_index': 0,
-              'task_id': '12300',
-              'view_url': 'https://localhost:1/user/task/12300',
-            }
-          },
-          'request': {
-            'expiration_secs': 3600,
-            'name': 'unit_tests',
-            'parent_task_id': '',
-            'priority': 101,
-            'properties': {
-              'cipd_input': None,
-              'command': None,
-              'dimensions': [
-                {'key': 'foo', 'value': 'bar'},
-                {'key': 'os', 'value': 'Mac'},
-              ],
-              'env': [],
-              'execution_timeout_secs': 60,
-              'extra_args': ['--some-arg', '123'],
-              'grace_period_secs': 30,
-              'idempotent': True,
-              'inputs_ref': {
-                'isolated': isolated_hash,
-                'isolatedserver': 'https://localhost:2',
-                'namespace': 'default-gzip',
-                },
-              'io_timeout_secs': 60,
-            },
-            'tags': ['tag:a', 'tag:b'],
-            'user': 'joe@localhost',
-          },
-        },
-        True,
-      ),
-    ]
-    self.assertEqual(expected, write_json_calls)
-
-  def test_trigger_cipd(self):
-    self.mock(swarming, 'now', lambda: 123456)
-
-    request = gen_request_data(
-        properties={
-          'cipd_input': {
-            'client_package': None,
-            'packages': [{
-              'package_name': 'super/awesome/pkg',
-              'path': 'path/to/pkg',
-              'version': 'version:42',
-            }],
-            'server': None,
-          },
-          'command': None,
-          'inputs_ref': {
-            'isolated': u'1111111111111111111111111111111111111111',
-            'isolatedserver': 'https://localhost:2',
-            'namespace': 'default-gzip',
-          },
-        })
-    result = gen_request_response(request)
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/tasks/new',
-            {'data': request},
-            result,
-          ),
-        ])
-    ret = main([
-        'trigger',
-        '--swarming', 'https://localhost:1',
-        '--isolate-server', 'https://localhost:2',
-        '--shards', '1',
-        '--priority', '101',
-        '--dimension', 'foo', 'bar',
-        '--dimension', 'os', 'Mac',
-        '--expiration', '3600',
-        '--user', 'joe@localhost',
-        '--tags', 'tag:a',
-        '--tags', 'tag:b',
-        '--hard-timeout', '60',
-        '--io-timeout', '60',
-        '--task-name', 'unit_tests',
-        '--isolated', FILE_HASH,
-        '--cipd-package', 'path/to/pkg:super/awesome/pkg:version:42',
-        '--',
-        '--some-arg',
-        '123',
-      ])
-    actual = sys.stdout.getvalue()
-    self.assertEqual(0, ret, (actual, sys.stderr.getvalue()))
-    self._check_output(
-        'Triggered task: unit_tests\n'
-        'To collect results, use:\n'
-        '  swarming.py collect -S https://localhost:1 12300\n'
-        'Or visit:\n'
-        '  https://localhost:1/user/task/12300\n',
-        '')
-
-  def test_trigger_no_request(self):
-    with self.assertRaises(SystemExit):
-      main([
-            'trigger', '--swarming', 'https://host',
-            '--isolate-server', 'https://host', '-T', 'foo',
-            '-d', 'os', 'amgia',
-          ])
-    self._check_output(
-        '',
-        'Usage: swarming.py trigger [options] (hash|isolated) '
-          '[-- extra_args|raw command]\n'
-        '\n'
-        'swarming.py: error: Use --isolated, --raw-cmd or \'--\' to pass '
-          'arguments to the called process.\n')
-
-  def test_trigger_no_env_vars(self):
-    with self.assertRaises(SystemExit):
-      main(['trigger'])
-    self._check_output(
-        '',
-        'Usage: swarming.py trigger [options] (hash|isolated) '
-          '[-- extra_args|raw command]'
-        '\n\n'
-        'swarming.py: error: --swarming is required.'
-        '\n')
-
-  def test_trigger_no_swarming_env_var(self):
-    with self.assertRaises(SystemExit):
-      with test_utils.EnvVars({'ISOLATE_SERVER': 'https://host'}):
-        main(['trigger', '-T' 'foo', 'foo.isolated'])
-    self._check_output(
-        '',
-        'Usage: swarming.py trigger [options] (hash|isolated) '
-          '[-- extra_args|raw command]'
-        '\n\n'
-        'swarming.py: error: --swarming is required.'
-        '\n')
-
-  def test_trigger_no_isolate_server(self):
-    with self.assertRaises(SystemExit):
-      with test_utils.EnvVars({'SWARMING_SERVER': 'https://host'}):
-        main(['trigger', 'foo.isolated', '-d', 'os', 'amiga'])
-    self._check_output(
-        '',
-        'Usage: swarming.py trigger [options] (hash|isolated) '
-          '[-- extra_args|raw command]'
-        '\n\n'
-        'swarming.py: error: --isolate-server is required.'
-        '\n')
-
-  def test_trigger_no_dimension(self):
-    with self.assertRaises(SystemExit):
-      main([
-            'trigger', '--swarming', 'https://host', '--raw-cmd', '--', 'foo',
-          ])
-    self._check_output(
-        '',
-        'Usage: swarming.py trigger [options] (hash|isolated) '
-          '[-- extra_args|raw command]'
-        '\n\n'
-        'swarming.py: error: Please at least specify one --dimension\n')
-
-  def test_collect_default_json(self):
-    j = os.path.join(self.tempdir, 'foo.json')
-    data = {
-      'base_task_name': 'unit_tests',
-      'tasks': {
-        'unit_tests': {
-          'shard_index': 0,
-          'task_id': '12300',
-          'view_url': 'https://localhost:1/user/task/12300',
-        }
-      },
-      'request': {
-        'expiration_secs': 3600,
-        'name': 'unit_tests',
-        'parent_task_id': '',
-        'priority': 101,
-        'properties': {
-          'command': None,
-          'dimensions': [
-            {'key': 'foo', 'value': 'bar'},
-            {'key': 'os', 'value': 'Mac'},
-          ],
-          'env': [],
-          'execution_timeout_secs': 60,
-          'extra_args': ['--some-arg', '123'],
-          'grace_period_secs': 30,
-          'idempotent': True,
-          'inputs_ref': {
-            'isolated': '1'*40,
-            'isolatedserver': 'https://localhost:2',
-            'namespace': 'default-gzip',
-            },
-          'io_timeout_secs': 60,
-        },
-        'tags': ['tag:a', 'tag:b'],
-        'user': 'joe@localhost',
-      },
-    }
-    with open(j, 'wb') as f:
-      json.dump(data, f)
-    def stub_collect(
-        swarming_server, task_ids, timeout, decorate, print_status_updates,
-        task_summary_json, task_output_dir, include_perf):
-      self.assertEqual('https://host', swarming_server)
-      self.assertEqual([u'12300'], task_ids)
-      # It is automatically calculated from hard timeout + expiration + 10.
-      self.assertEqual(3670., timeout)
-      self.assertEqual(True, decorate)
-      self.assertEqual(True, print_status_updates)
-      self.assertEqual('/a', task_summary_json)
-      self.assertEqual('/b', task_output_dir)
-      self.assertEqual(False, include_perf)
-      print('Fake output')
-    self.mock(swarming, 'collect', stub_collect)
-    main(
-        ['collect', '--swarming', 'https://host', '--json', j, '--decorate',
-          '--print-status-updates', '--task-summary-json', '/a',
-          '--task-output-dir', '/b'])
-    self._check_output('Fake output\n', '')
-
-  def test_query_base(self):
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/bot/botid/tasks?limit=200',
-            {},
-            {'yo': 'dawg'},
-          ),
-        ])
-    ret = main(
-        [
-          'query', '--swarming', 'https://localhost:1', 'bot/botid/tasks',
-        ])
-    self._check_output('{\n  "yo": "dawg"\n}\n', '')
-    self.assertEqual(0, ret)
-
-  def test_query_cursor(self):
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/bot/botid/tasks?'
-                'foo=bar&limit=2',
-            {},
-            {
-              'cursor': '%',
-              'extra': False,
-              'items': ['A'],
-            },
-          ),
-          (
-            'https://localhost:1/api/swarming/v1/bot/botid/tasks?'
-                'foo=bar&cursor=%25&limit=1',
-            {},
-            {
-              'cursor': None,
-              'items': ['B'],
-              'ignored': True,
-            },
-          ),
-        ])
-    ret = main(
-        [
-          'query', '--swarming', 'https://localhost:1',
-          'bot/botid/tasks?foo=bar',
-          '--limit', '2',
-        ])
-    expected = (
-        '{\n'
-        '  "extra": false, \n'
-        '  "items": [\n'
-        '    "A", \n'
-        '    "B"\n'
-        '  ]\n'
-        '}\n')
-    self._check_output(expected, '')
-    self.assertEqual(0, ret)
-
-  def test_reproduce(self):
-    old_cwd = os.getcwd()
-    try:
-      os.chdir(self.tempdir)
-
-      def call(cmd, env, cwd):
-        self.assertEqual(['foo', '--bar'], cmd)
-        expected = os.environ.copy()
-        expected['aa'] = 'bb'
-        self.assertEqual(expected, env)
-        self.assertEqual(unicode(os.path.abspath('work')), cwd)
-        return 0
-
-      self.mock(subprocess, 'call', call)
-
-      self.expected_requests(
-          [
-            (
-              'https://localhost:1/api/swarming/v1/task/123/request',
-              {},
-              {
-                'properties': {
-                  'command': ['foo'],
-                  'env': [
-                    {'key': 'aa', 'value': 'bb'},
-                  ],
-                },
-              },
-            ),
-          ])
-      ret = main(
-          [
-            'reproduce', '--swarming', 'https://localhost:1', '123', '--',
-            '--bar',
-          ])
-      self._check_output('', '')
-      self.assertEqual(0, ret)
-    finally:
-      os.chdir(old_cwd)
-
-
-class TestCommandBot(NetTestCase):
-  # Specialized test fixture for command 'bot'.
-  def setUp(self):
-    super(TestCommandBot, self).setUp()
-    # Expected requests are always the same, independent of the test case.
-    self.expected_requests(
-        [
-          (
-            'https://localhost:1/api/swarming/v1/bots/list?limit=250',
-            {},
-            self.mock_swarming_api_v1_bots_page_1(),
-          ),
-          (
-            'https://localhost:1/api/swarming/v1/bots/list?limit=250&'
-              'cursor=opaque_cursor',
-            {},
-            self.mock_swarming_api_v1_bots_page_2(),
-          ),
-        ])
-
-  @staticmethod
-  def mock_swarming_api_v1_bots_page_1():
-    """Returns fake /api/swarming/v1/bots/list data."""
-    # Sample data retrieved from actual server.
-    now = unicode(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
-    return {
-      u'items': [
-        {
-          u'bot_id': u'swarm3',
-          u'created_ts': now,
-          u'dimensions': [
-            {u'key': u'cores', u'value': [u'4']},
-            {u'key': u'cpu', u'value': [u'x86', u'x86-64']},
-            {u'key': u'gpu', u'value': [u'15ad', u'15ad:0405']},
-            {u'key': u'id', u'value': [u'swarm3']},
-            {u'key': u'os', u'value': [u'Mac', u'Mac-10.9']},
-          ],
-          u'external_ip': u'1.1.1.3',
-          u'hostname': u'swarm3.example.com',
-          u'internal_ip': u'192.168.0.3',
-          u'is_dead': False,
-          u'last_seen_ts': now,
-          u'quarantined': False,
-          u'task_id': u'148569b73a89501',
-          u'task_name': u'browser_tests',
-          u'version': u'56918a2ea28a6f51751ad14cc086f118b8727905',
-        },
-        {
-          u'bot_id': u'swarm1',
-          u'created_ts': now,
-          u'dimensions': [
-            {u'key': u'cores', u'value': [u'8']},
-            {u'key': u'cpu', u'value': [u'x86', u'x86-64']},
-            {u'key': u'gpu', u'value': []},
-            {u'key': u'id', u'value': [u'swarm1']},
-            {u'key': u'os', u'value': [u'Linux', u'Linux-12.04']},
-          ],
-          u'external_ip': u'1.1.1.1',
-          u'hostname': u'swarm1.example.com',
-          u'internal_ip': u'192.168.0.1',
-          u'is_dead': True,
-          u'last_seen_ts': 'A long time ago',
-          u'quarantined': False,
-          u'task_id': u'',
-          u'task_name': None,
-          u'version': u'56918a2ea28a6f51751ad14cc086f118b8727905',
-        },
-        {
-          u'bot_id': u'swarm2',
-          u'created_ts': now,
-          u'dimensions': [
-            {u'key': u'cores', u'value': [u'8']},
-            {u'key': u'cpu', u'value': [u'x86', u'x86-64']},
-            {u'key': u'gpu', u'value': [
-              u'15ad',
-              u'15ad:0405',
-              u'VMware Virtual SVGA 3D Graphics Adapter',
-            ]},
-            {u'key': u'id', u'value': [u'swarm2']},
-            {u'key': u'os', u'value': [u'Windows', u'Windows-6.1']},
-          ],
-          u'external_ip': u'1.1.1.2',
-          u'hostname': u'swarm2.example.com',
-          u'internal_ip': u'192.168.0.2',
-          u'is_dead': False,
-          u'last_seen_ts': now,
-          u'quarantined': False,
-          u'task_id': u'',
-          u'task_name': None,
-          u'version': u'56918a2ea28a6f51751ad14cc086f118b8727905',
-        },
-      ],
-      u'cursor': u'opaque_cursor',
-      u'death_timeout': 1800.0,
-      u'limit': 4,
-      u'now': unicode(now),
-    }
-
-  @staticmethod
-  def mock_swarming_api_v1_bots_page_2():
-    """Returns fake /api/swarming/v1/bots/list data."""
-    # Sample data retrieved from actual server.
-    now = unicode(datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
-    return {
-      u'items': [
-        {
-          u'bot_id': u'swarm4',
-          u'created_ts': now,
-          u'dimensions': [
-            {u'key': u'cores', u'value': [u'8']},
-            {u'key': u'cpu', u'value': [u'x86', u'x86-64']},
-            {u'key': u'gpu', u'value': []},
-            {u'key': u'id', u'value': [u'swarm4']},
-            {u'key': u'os', u'value': [u'Linux', u'Linux-12.04']},
-          ],
-          u'external_ip': u'1.1.1.4',
-          u'hostname': u'swarm4.example.com',
-          u'internal_ip': u'192.168.0.4',
-          u'is_dead': False,
-          u'last_seen_ts': now,
-          u'quarantined': False,
-          u'task_id': u'14856971a64c601',
-          u'task_name': u'base_unittests',
-          u'version': u'56918a2ea28a6f51751ad14cc086f118b8727905',
-        }
-      ],
-      u'cursor': None,
-      u'death_timeout': 1800.0,
-      u'limit': 4,
-      u'now': unicode(now),
-    }
-
-  def test_bots(self):
-    ret = main(['bots', '--swarming', 'https://localhost:1'])
-    expected = (
-        u'swarm2\n'
-        u'  {"cores": ["8"], "cpu": ["x86", "x86-64"], "gpu": '
-          '["15ad", "15ad:0405", "VMware Virtual SVGA 3D Graphics Adapter"], '
-          '"id": ["swarm2"], "os": ["Windows", "Windows-6.1"]}\n'
-        'swarm3\n'
-        '  {"cores": ["4"], "cpu": ["x86", "x86-64"], "gpu": ["15ad", '
-          '"15ad:0405"], "id": ["swarm3"], "os": ["Mac", "Mac-10.9"]}\n'
-        u'  task: 148569b73a89501\n'
-        u'swarm4\n'
-        u'  {"cores": ["8"], "cpu": ["x86", "x86-64"], "gpu": [], '
-          '"id": ["swarm4"], "os": ["Linux", "Linux-12.04"]}\n'
-        u'  task: 14856971a64c601\n')
-    self._check_output(expected, '')
-    self.assertEqual(0, ret)
-
-  def test_bots_bare(self):
-    ret = main(['bots', '--swarming', 'https://localhost:1', '--bare'])
-    self._check_output("swarm2\nswarm3\nswarm4\n", '')
-    self.assertEqual(0, ret)
-
-  def test_bots_filter(self):
-    ret = main(
-        [
-          'bots', '--swarming', 'https://localhost:1',
-          '--dimension', 'os', 'Windows',
-        ])
-    expected = (
-        u'swarm2\n  {"cores": ["8"], "cpu": ["x86", "x86-64"], '
-          '"gpu": ["15ad", "15ad:0405", "VMware Virtual SVGA 3D Graphics '
-          'Adapter"], "id": ["swarm2"], '
-          '"os": ["Windows", "Windows-6.1"]}\n')
-    self._check_output(expected, '')
-    self.assertEqual(0, ret)
-
-  def test_bots_filter_keep_dead(self):
-    ret = main(
-        [
-          'bots', '--swarming', 'https://localhost:1',
-          '--dimension', 'os', 'Linux', '--keep-dead',
-        ])
-    expected = (
-        u'swarm1\n  {"cores": ["8"], "cpu": ["x86", "x86-64"], "gpu": [], '
-          '"id": ["swarm1"], "os": ["Linux", "Linux-12.04"]}\n'
-        u'swarm4\n'
-        u'  {"cores": ["8"], "cpu": ["x86", "x86-64"], "gpu": [], '
-          '"id": ["swarm4"], "os": ["Linux", "Linux-12.04"]}\n'
-        u'  task: 14856971a64c601\n')
-    self._check_output(expected, '')
-    self.assertEqual(0, ret)
-
-  def test_bots_filter_dead_only(self):
-    ret = main(
-        [
-          'bots', '--swarming', 'https://localhost:1',
-          '--dimension', 'os', 'Linux', '--dead-only',
-        ])
-    expected = (
-        u'swarm1\n  {"cores": ["8"], "cpu": ["x86", "x86-64"], "gpu": [], '
-          '"id": ["swarm1"], "os": ["Linux", "Linux-12.04"]}\n')
-    self._check_output(expected, '')
-    self.assertEqual(0, ret)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.CRITICAL)
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  for e in ('ISOLATE_SERVER', 'SWARMING_TASK_ID', 'SWARMING_SERVER'):
-    os.environ.pop(e, None)
-  unittest.main()
diff --git a/tools/swarming_client/tests/test_utils.py b/tools/swarming_client/tests/test_utils.py
deleted file mode 100644
index 69578d3..0000000
--- a/tools/swarming_client/tests/test_utils.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import logging
-import os
-import sys
-import unittest
-
-
-_UMASK = None
-
-
-class EnvVars(object):
-  """Context manager for environment variables.
-
-  Passed a dict to the constructor it sets variables named with the key to the
-  value.  Exiting the context causes all the variables named with the key to be
-  restored to their value before entering the context.
-  """
-  def __init__(self, var_map):
-    self.var_map = var_map
-    self._backup = None
-
-  def __enter__(self):
-    self._backup = os.environ
-    os.environ = os.environ.copy()
-    os.environ.update(self.var_map)
-
-  def __exit__(self, exc_type, exc_value, traceback):
-    os.environ = self._backup
-
-
-class SymLink(str):
-  """Used as a marker to create a symlink instead of a file."""
-
-
-def umask():
-  """Returns current process umask without modifying it."""
-  global _UMASK
-  if _UMASK is None:
-    _UMASK = os.umask(0777)
-    os.umask(_UMASK)
-  return _UMASK
-
-
-def make_tree(out, contents):
-  for relpath, content in sorted(contents.iteritems()):
-    filepath = os.path.join(out, relpath.replace('/', os.path.sep))
-    dirpath = os.path.dirname(filepath)
-    if not os.path.isdir(dirpath):
-      os.makedirs(dirpath, 0700)
-    if isinstance(content, SymLink):
-      os.symlink(content, filepath)
-    else:
-      mode = 0700 if relpath.endswith('.py') else 0600
-      flags = os.O_WRONLY | os.O_CREAT
-      if sys.platform == 'win32':
-        flags |= os.O_BINARY
-      with os.fdopen(os.open(filepath, flags, mode), 'wb') as f:
-        f.write(content)
-
-
-def main():
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR,
-      format='%(levelname)5s %(filename)15s(%(lineno)3d): %(message)s')
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  # Use an unusual umask.
-  os.umask(0070)
-  unittest.main()
diff --git a/tools/swarming_client/tests/threading_utils_test.py b/tools/swarming_client/tests/threading_utils_test.py
deleted file mode 100755
index b913a09..0000000
--- a/tools/swarming_client/tests/threading_utils_test.py
+++ /dev/null
@@ -1,617 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# Lambda may not be necessary.
-# pylint: disable=W0108
-
-import functools
-import logging
-import os
-import signal
-import sys
-import threading
-import time
-import traceback
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-from utils import threading_utils
-
-
-def timeout(max_running_time):
-  """Test method decorator that fails the test if it executes longer
-  than |max_running_time| seconds.
-
-  It exists to terminate tests in case of deadlocks. There's a high chance that
-  process is broken after such timeout (due to hanging deadlocked threads that
-  can own some shared resources). But failing early (maybe not in a cleanest
-  way) due to timeout is generally better than hanging indefinitely.
-
-  |max_running_time| should be an order of magnitude (or even two orders) larger
-  than the expected run time of the test to compensate for slow machine, high
-  CPU utilization by some other processes, etc.
-
-  Can not be nested.
-
-  Noop on windows (since win32 doesn't support signal.setitimer).
-  """
-  if sys.platform == 'win32':
-    return lambda method: method
-
-  def decorator(method):
-    @functools.wraps(method)
-    def wrapper(self, *args, **kwargs):
-      signal.signal(signal.SIGALRM, lambda *_args: self.fail('Timeout'))
-      signal.setitimer(signal.ITIMER_REAL, max_running_time)
-      try:
-        return method(self, *args, **kwargs)
-      finally:
-        signal.signal(signal.SIGALRM, signal.SIG_DFL)
-        signal.setitimer(signal.ITIMER_REAL, 0)
-    return wrapper
-
-  return decorator
-
-
-class ThreadPoolTest(unittest.TestCase):
-  MIN_THREADS = 0
-  MAX_THREADS = 32
-
-  # Append custom assert messages to default ones (works with python >= 2.7).
-  longMessage = True
-
-  @staticmethod
-  def sleep_task(duration=0.01):
-    """Returns function that sleeps |duration| sec and returns its argument."""
-    def task(arg):
-      time.sleep(duration)
-      return arg
-    return task
-
-  def retrying_sleep_task(self, duration=0.01):
-    """Returns function that adds sleep_task to the thread pool."""
-    def task(arg):
-      self.thread_pool.add_task(0, self.sleep_task(duration), arg)
-    return task
-
-  @staticmethod
-  def none_task():
-    """Returns function that returns None."""
-    return lambda _arg: None
-
-  def setUp(self):
-    super(ThreadPoolTest, self).setUp()
-    self.thread_pool = threading_utils.ThreadPool(
-        self.MIN_THREADS, self.MAX_THREADS, 0)
-
-  @timeout(10)
-  def tearDown(self):
-    super(ThreadPoolTest, self).tearDown()
-    self.thread_pool.close()
-
-  def get_results_via_join(self, _expected):
-    return self.thread_pool.join()
-
-  def get_results_via_get_one_result(self, expected):
-    return [self.thread_pool.get_one_result() for _ in expected]
-
-  def get_results_via_iter_results(self, _expected):
-    return list(self.thread_pool.iter_results())
-
-  def run_results_test(self, task, results_getter, args=None, expected=None):
-    """Template function for tests checking that pool returns all results.
-
-    Will add multiple instances of |task| to the thread pool, then call
-    |results_getter| to get back all results and compare them to expected ones.
-    """
-    args = range(0, 100) if args is None else args
-    expected = args if expected is None else expected
-    msg = 'Using \'%s\' to get results.' % (results_getter.__name__,)
-
-    for i in args:
-      self.thread_pool.add_task(0, task, i)
-    results = results_getter(expected)
-
-    # Check that got all results back (exact same set, no duplicates).
-    self.assertEqual(set(expected), set(results), msg)
-    self.assertEqual(len(expected), len(results), msg)
-
-    # Queue is empty, result request should fail.
-    with self.assertRaises(threading_utils.ThreadPoolEmpty):
-      self.thread_pool.get_one_result()
-
-  @timeout(10)
-  def test_get_one_result_ok(self):
-    self.thread_pool.add_task(0, lambda: 'OK')
-    self.assertEqual(self.thread_pool.get_one_result(), 'OK')
-
-  @timeout(10)
-  def test_get_one_result_fail(self):
-    # No tasks added -> get_one_result raises an exception.
-    with self.assertRaises(threading_utils.ThreadPoolEmpty):
-      self.thread_pool.get_one_result()
-
-  @timeout(30)
-  def test_join(self):
-    self.run_results_test(self.sleep_task(),
-                          self.get_results_via_join)
-
-  @timeout(30)
-  def test_get_one_result(self):
-    self.run_results_test(self.sleep_task(),
-                          self.get_results_via_get_one_result)
-
-  @timeout(30)
-  def test_iter_results(self):
-    self.run_results_test(self.sleep_task(),
-                          self.get_results_via_iter_results)
-
-  @timeout(30)
-  def test_retry_and_join(self):
-    self.run_results_test(self.retrying_sleep_task(),
-                          self.get_results_via_join)
-
-  @timeout(30)
-  def test_retry_and_get_one_result(self):
-    self.run_results_test(self.retrying_sleep_task(),
-                          self.get_results_via_get_one_result)
-
-  @timeout(30)
-  def test_retry_and_iter_results(self):
-    self.run_results_test(self.retrying_sleep_task(),
-                          self.get_results_via_iter_results)
-
-  @timeout(30)
-  def test_none_task_and_join(self):
-    self.run_results_test(self.none_task(),
-                          self.get_results_via_join,
-                          expected=[])
-
-  @timeout(30)
-  def test_none_task_and_get_one_result(self):
-    self.thread_pool.add_task(0, self.none_task(), 0)
-    with self.assertRaises(threading_utils.ThreadPoolEmpty):
-      self.thread_pool.get_one_result()
-
-  @timeout(30)
-  def test_none_task_and_and_iter_results(self):
-    self.run_results_test(self.none_task(),
-                          self.get_results_via_iter_results,
-                          expected=[])
-
-  @timeout(30)
-  def test_generator_task(self):
-    MULTIPLIER = 1000
-    COUNT = 10
-
-    # Generator that yields [i * MULTIPLIER, i * MULTIPLIER + COUNT).
-    def generator_task(i):
-      for j in xrange(COUNT):
-        time.sleep(0.001)
-        yield i * MULTIPLIER + j
-
-    # Arguments for tasks and expected results.
-    args = range(0, 10)
-    expected = [i * MULTIPLIER + j for i in args for j in xrange(COUNT)]
-
-    # Test all possible ways to pull results from the thread pool.
-    getters = (self.get_results_via_join,
-               self.get_results_via_iter_results,
-               self.get_results_via_get_one_result,)
-    for results_getter in getters:
-      self.run_results_test(generator_task, results_getter, args, expected)
-
-  @timeout(30)
-  def test_concurrent_iter_results(self):
-    def poller_proc(result):
-      result.extend(self.thread_pool.iter_results())
-
-    args = range(0, 100)
-    for i in args:
-      self.thread_pool.add_task(0, self.sleep_task(), i)
-
-    # Start a bunch of threads, all calling iter_results in parallel.
-    pollers = []
-    for _ in xrange(0, 4):
-      result = []
-      poller = threading.Thread(target=poller_proc, args=(result,))
-      poller.start()
-      pollers.append((poller, result))
-
-    # Collects results from all polling threads.
-    all_results = []
-    for poller, results in pollers:
-      poller.join()
-      all_results.extend(results)
-
-    # Check that got all results back (exact same set, no duplicates).
-    self.assertEqual(set(args), set(all_results))
-    self.assertEqual(len(args), len(all_results))
-
-  @timeout(10)
-  def test_adding_tasks_after_close(self):
-    pool = threading_utils.ThreadPool(1, 1, 0)
-    pool.add_task(0, lambda: None)
-    pool.close()
-    with self.assertRaises(threading_utils.ThreadPoolClosed):
-      pool.add_task(0, lambda: None)
-
-  @timeout(10)
-  def test_double_close(self):
-    pool = threading_utils.ThreadPool(1, 1, 0)
-    pool.close()
-    with self.assertRaises(threading_utils.ThreadPoolClosed):
-      pool.close()
-
-  def test_priority(self):
-    # Verifies that a lower priority is run first.
-    with threading_utils.ThreadPool(1, 1, 0) as pool:
-      lock = threading.Lock()
-
-      def wait_and_return(x):
-        with lock:
-          return x
-
-      def return_x(x):
-        return x
-
-      with lock:
-        pool.add_task(0, wait_and_return, 'a')
-        pool.add_task(2, return_x, 'b')
-        pool.add_task(1, return_x, 'c')
-
-      actual = pool.join()
-    self.assertEqual(['a', 'c', 'b'], actual)
-
-  @timeout(30)
-  def test_abort(self):
-    # Trigger a ridiculous amount of tasks, and abort the remaining.
-    with threading_utils.ThreadPool(2, 2, 0) as pool:
-      # Allow 10 tasks to run initially.
-      sem = threading.Semaphore(10)
-
-      def grab_and_return(x):
-        sem.acquire()
-        return x
-
-      for i in range(100):
-        pool.add_task(0, grab_and_return, i)
-
-      # Running at 11 would hang.
-      results = [pool.get_one_result() for _ in xrange(10)]
-      # At that point, there's 10 completed tasks and 2 tasks hanging, 88
-      # pending.
-      self.assertEqual(88, pool.abort())
-      # Calling .join() before these 2 .release() would hang.
-      sem.release()
-      sem.release()
-      results.extend(pool.join())
-    # The results *may* be out of order. Even if the calls are processed
-    # strictly in FIFO mode, a thread may preempt another one when returning the
-    # values.
-    self.assertEqual(range(12), sorted(results))
-
-
-class AutoRetryThreadPoolTest(unittest.TestCase):
-  def test_bad_class(self):
-    exceptions = [AutoRetryThreadPoolTest]
-    with self.assertRaises(AssertionError):
-      threading_utils.AutoRetryThreadPool(exceptions, 1, 0, 1, 0)
-
-  def test_no_exception(self):
-    with self.assertRaises(AssertionError):
-      threading_utils.AutoRetryThreadPool([], 1, 0, 1, 0)
-
-  def test_bad_retry(self):
-    exceptions = [IOError]
-    with self.assertRaises(AssertionError):
-      threading_utils.AutoRetryThreadPool(exceptions, 256, 0, 1, 0)
-
-  def test_bad_priority(self):
-    exceptions = [IOError]
-    with threading_utils.AutoRetryThreadPool(exceptions, 1, 1, 1, 0) as pool:
-      pool.add_task(0, lambda x: x, 0)
-      pool.add_task(256, lambda x: x, 0)
-      pool.add_task(512, lambda x: x, 0)
-      with self.assertRaises(AssertionError):
-        pool.add_task(1, lambda x: x, 0)
-      with self.assertRaises(AssertionError):
-        pool.add_task(255, lambda x: x, 0)
-
-  def test_priority(self):
-    # Verifies that a lower priority is run first.
-    exceptions = [IOError]
-    with threading_utils.AutoRetryThreadPool(exceptions, 1, 1, 1, 0) as pool:
-      lock = threading.Lock()
-
-      def wait_and_return(x):
-        with lock:
-          return x
-
-      def return_x(x):
-        return x
-
-      with lock:
-        pool.add_task(threading_utils.PRIORITY_HIGH, wait_and_return, 'a')
-        pool.add_task(threading_utils.PRIORITY_LOW, return_x, 'b')
-        pool.add_task(threading_utils.PRIORITY_MED, return_x, 'c')
-
-      actual = pool.join()
-    self.assertEqual(['a', 'c', 'b'], actual)
-
-  def test_retry_inherited(self):
-    # Exception class inheritance works.
-    class CustomException(IOError):
-      pass
-    ran = []
-    def throw(to_throw, x):
-      ran.append(x)
-      if to_throw:
-        raise to_throw.pop(0)
-      return x
-    with threading_utils.AutoRetryThreadPool([IOError], 1, 1, 1, 0) as pool:
-      pool.add_task(
-          threading_utils.PRIORITY_MED, throw, [CustomException('a')], 'yay')
-      actual = pool.join()
-    self.assertEqual(['yay'], actual)
-    self.assertEqual(['yay', 'yay'], ran)
-
-  def test_retry_2_times(self):
-    exceptions = [IOError, OSError]
-    to_throw = [OSError('a'), IOError('b')]
-    def throw(x):
-      if to_throw:
-        raise to_throw.pop(0)
-      return x
-    with threading_utils.AutoRetryThreadPool(exceptions, 2, 1, 1, 0) as pool:
-      pool.add_task(threading_utils.PRIORITY_MED, throw, 'yay')
-      actual = pool.join()
-    self.assertEqual(['yay'], actual)
-
-  def test_retry_too_many_times(self):
-    exceptions = [IOError, OSError]
-    to_throw = [OSError('a'), IOError('b')]
-    def throw(x):
-      if to_throw:
-        raise to_throw.pop(0)
-      return x
-    with threading_utils.AutoRetryThreadPool(exceptions, 1, 1, 1, 0) as pool:
-      pool.add_task(threading_utils.PRIORITY_MED, throw, 'yay')
-      with self.assertRaises(IOError):
-        pool.join()
-
-  def test_retry_mutation_1(self):
-    # This is to warn that mutable arguments WILL be mutated.
-    def throw(to_throw, x):
-      if to_throw:
-        raise to_throw.pop(0)
-      return x
-    exceptions = [IOError, OSError]
-    with threading_utils.AutoRetryThreadPool(exceptions, 1, 1, 1, 0) as pool:
-      pool.add_task(
-          threading_utils.PRIORITY_MED,
-          throw,
-          [OSError('a'), IOError('b')],
-          'yay')
-      with self.assertRaises(IOError):
-        pool.join()
-
-  def test_retry_mutation_2(self):
-    # This is to warn that mutable arguments WILL be mutated.
-    def throw(to_throw, x):
-      if to_throw:
-        raise to_throw.pop(0)
-      return x
-    exceptions = [IOError, OSError]
-    with threading_utils.AutoRetryThreadPool(exceptions, 2, 1, 1, 0) as pool:
-      pool.add_task(
-          threading_utils.PRIORITY_MED,
-          throw,
-          [OSError('a'), IOError('b')],
-          'yay')
-      actual = pool.join()
-    self.assertEqual(['yay'], actual)
-
-  def test_retry_interleaved(self):
-    # Verifies that retries are interleaved. This is important, we don't want a
-    # retried task to take all the pool during retries.
-    exceptions = [IOError, OSError]
-    lock = threading.Lock()
-    ran = []
-    with threading_utils.AutoRetryThreadPool(exceptions, 2, 1, 1, 0) as pool:
-      def lock_and_throw(to_throw, x):
-        with lock:
-          ran.append(x)
-          if to_throw:
-            raise to_throw.pop(0)
-          return x
-      with lock:
-        pool.add_task(
-            threading_utils.PRIORITY_MED,
-            lock_and_throw,
-            [OSError('a'), IOError('b')],
-            'A')
-        pool.add_task(
-            threading_utils.PRIORITY_MED,
-            lock_and_throw,
-            [OSError('a'), IOError('b')],
-            'B')
-
-      actual = pool.join()
-    self.assertEqual(['A', 'B'], actual)
-    # Retries are properly interleaved:
-    self.assertEqual(['A', 'B', 'A', 'B', 'A', 'B'], ran)
-
-  def test_add_task_with_channel_success(self):
-    with threading_utils.AutoRetryThreadPool([OSError], 2, 1, 1, 0) as pool:
-      channel = threading_utils.TaskChannel()
-      pool.add_task_with_channel(channel, 0, lambda: 0)
-      self.assertEqual(0, channel.pull())
-
-  def test_add_task_with_channel_fatal_error(self):
-    with threading_utils.AutoRetryThreadPool([OSError], 2, 1, 1, 0) as pool:
-      channel = threading_utils.TaskChannel()
-      def throw(exc):
-        raise exc
-      pool.add_task_with_channel(channel, 0, throw, ValueError())
-      with self.assertRaises(ValueError):
-        channel.pull()
-
-  def test_add_task_with_channel_retryable_error(self):
-    with threading_utils.AutoRetryThreadPool([OSError], 2, 1, 1, 0) as pool:
-      channel = threading_utils.TaskChannel()
-      def throw(exc):
-        raise exc
-      pool.add_task_with_channel(channel, 0, throw, OSError())
-      with self.assertRaises(OSError):
-        channel.pull()
-
-  def test_add_task_with_channel_captures_stack_trace(self):
-    with threading_utils.AutoRetryThreadPool([OSError], 2, 1, 1, 0) as pool:
-      channel = threading_utils.TaskChannel()
-      def throw(exc):
-        def function_with_some_unusual_name():
-          raise exc
-        function_with_some_unusual_name()
-      pool.add_task_with_channel(channel, 0, throw, OSError())
-      exc_traceback = ''
-      try:
-        channel.pull()
-      except OSError:
-        exc_traceback = traceback.format_exc()
-      self.assertIn('function_with_some_unusual_name', exc_traceback)
-
-  def test_max_value(self):
-    self.assertEqual(16, threading_utils.IOAutoRetryThreadPool.MAX_WORKERS)
-
-
-class FakeProgress(object):
-  @staticmethod
-  def print_update():
-    pass
-
-
-class WorkerPoolTest(unittest.TestCase):
-  def test_normal(self):
-    mapper = lambda value: -value
-    progress = FakeProgress()
-    with threading_utils.ThreadPoolWithProgress(progress, 8, 8, 0) as pool:
-      for i in range(32):
-        pool.add_task(0, mapper, i)
-      results = pool.join()
-    self.assertEqual(range(-31, 1), sorted(results))
-
-  def test_exception(self):
-    class FearsomeException(Exception):
-      pass
-    def mapper(value):
-      raise FearsomeException(value)
-    task_added = False
-    try:
-      progress = FakeProgress()
-      with threading_utils.ThreadPoolWithProgress(progress, 8, 8, 0) as pool:
-        pool.add_task(0, mapper, 0)
-        task_added = True
-        pool.join()
-        self.fail()
-    except FearsomeException:
-      self.assertEqual(True, task_added)
-
-
-class TaskChannelTest(unittest.TestCase):
-  def test_passes_simple_value(self):
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      tp.add_task(0, lambda: channel.send_result(0))
-      self.assertEqual(0, channel.pull())
-
-  def test_passes_exception_value(self):
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      tp.add_task(0, lambda: channel.send_result(Exception()))
-      self.assertTrue(isinstance(channel.pull(), Exception))
-
-  def test_wrap_task_passes_simple_value(self):
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      tp.add_task(0, channel.wrap_task(lambda: 0))
-      self.assertEqual(0, channel.pull())
-
-  def test_wrap_task_passes_exception_value(self):
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      tp.add_task(0, channel.wrap_task(lambda: Exception()))
-      self.assertTrue(isinstance(channel.pull(), Exception))
-
-  def test_send_exception_raises_exception(self):
-    class CustomError(Exception):
-      pass
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      exc_info = (CustomError, CustomError(), None)
-      tp.add_task(0, lambda: channel.send_exception(exc_info))
-      with self.assertRaises(CustomError):
-        channel.pull()
-
-  def test_wrap_task_raises_exception(self):
-    class CustomError(Exception):
-      pass
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      def task_func():
-        raise CustomError()
-      tp.add_task(0, channel.wrap_task(task_func))
-      with self.assertRaises(CustomError):
-        channel.pull()
-
-  def test_wrap_task_exception_captures_stack_trace(self):
-    class CustomError(Exception):
-      pass
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      def task_func():
-        def function_with_some_unusual_name():
-          raise CustomError()
-        function_with_some_unusual_name()
-      tp.add_task(0, channel.wrap_task(task_func))
-      exc_traceback = ''
-      try:
-        channel.pull()
-      except CustomError:
-        exc_traceback = traceback.format_exc()
-      self.assertIn('function_with_some_unusual_name', exc_traceback)
-
-  def test_pull_timeout(self):
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      def task_func():
-        # This test ultimately relies on the condition variable primitive
-        # provided by pthreads. There's no easy way to mock time for it.
-        # Increase this duration if the test is flaky.
-        time.sleep(0.2)
-        return 123
-      tp.add_task(0, channel.wrap_task(task_func))
-      with self.assertRaises(threading_utils.TaskChannel.Timeout):
-        channel.pull(timeout=0.001)
-      self.assertEqual(123, channel.pull())
-
-  def test_timeout_exception_from_task(self):
-    with threading_utils.ThreadPool(1, 1, 0) as tp:
-      channel = threading_utils.TaskChannel()
-      def task_func():
-        raise threading_utils.TaskChannel.Timeout()
-      tp.add_task(0, channel.wrap_task(task_func))
-      # 'Timeout' raised by task gets transformed into 'RuntimeError'.
-      with self.assertRaises(RuntimeError):
-        channel.pull()
-
-
-if __name__ == '__main__':
-  VERBOSE = '-v' in sys.argv
-  logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/trace_inputs/child1.py b/tools/swarming_client/tests/trace_inputs/child1.py
deleted file mode 100755
index feb00c2..0000000
--- a/tools/swarming_client/tests/trace_inputs/child1.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import os
-import subprocess
-import sys
-
-
-def child():
-  """When the gyp argument is not specified, the command is started from
-  --root-dir directory.
-  """
-  print 'child from %s' % os.getcwd()
-  # Force file opening with a non-normalized path.
-  open(os.path.join('tests', '..', 'trace_inputs.py'), 'rb').close()
-  open(os.path.join(
-      'tests', '..', 'tests', 'trace_inputs_smoke_test.py'), 'rb').close()
-  # Do not wait for the child to exit.
-  # Use relative directory.
-  subprocess.Popen(
-      ['python', 'child2.py'], cwd=os.path.join('tests', 'trace_inputs'))
-  return 0
-
-
-def child_gyp():
-  """When the gyp argument is specified, the command is started from --cwd
-  directory.
-  """
-  print 'child_gyp from %s' % os.getcwd()
-  # Force file opening.
-  open(os.path.join('..', 'trace_inputs.py'), 'rb').close()
-  open(os.path.join('..', 'tests', 'trace_inputs_smoke_test.py'), 'rb').close()
-  # Do not wait for the child to exit.
-  # Use relative directory.
-  subprocess.Popen(['python', 'child2.py'], cwd='trace_inputs')
-  return 0
-
-
-def main():
-  if sys.argv[1] == '--child':
-    return child()
-  if sys.argv[1] == '--child-gyp':
-    return child_gyp()
-  return 1
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tests/trace_inputs/child2.py b/tools/swarming_client/tests/trace_inputs/child2.py
deleted file mode 100755
index 3896808..0000000
--- a/tools/swarming_client/tests/trace_inputs/child2.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import os
-import sys
-import time
-
-
-def main():
-  print 'child2'
-  # Introduce a race condition with the parent so the parent may have a chance
-  # to exit before the child. Will be random.
-  time.sleep(.01)
-
-  if sys.platform in ('darwin', 'win32'):
-    # Check for case-insensitive file system. This happens on Windows and OSX.
-    # The log should still list test_file.txt.
-    open('Test_File.txt', 'rb').close()
-  else:
-    open('test_file.txt', 'rb').close()
-
-  expected = {
-    'bar': 'Foo\n',
-    'foo': 'Bar\n',
-  }
-
-  root = 'files1'
-  actual = dict(
-      (filename, open(os.path.join(root, filename), 'rb').read())
-      for filename in (os.listdir(root))
-      if (filename != 'do_not_care.txt' and
-          os.path.isfile(os.path.join(root, filename))))
-
-  if actual != expected:
-    print 'Failure'
-    print actual
-    print expected
-    return 1
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tests/trace_inputs/files1/bar b/tools/swarming_client/tests/trace_inputs/files1/bar
deleted file mode 100644
index bc56c4d..0000000
--- a/tools/swarming_client/tests/trace_inputs/files1/bar
+++ /dev/null
@@ -1 +0,0 @@
-Foo
diff --git a/tools/swarming_client/tests/trace_inputs/files1/do_not_care.txt b/tools/swarming_client/tests/trace_inputs/files1/do_not_care.txt
deleted file mode 100644
index 9a10460..0000000
--- a/tools/swarming_client/tests/trace_inputs/files1/do_not_care.txt
+++ /dev/null
@@ -1 +0,0 @@
-This file is ignored.
diff --git a/tools/swarming_client/tests/trace_inputs/files1/foo b/tools/swarming_client/tests/trace_inputs/files1/foo
deleted file mode 100644
index ebd7525..0000000
--- a/tools/swarming_client/tests/trace_inputs/files1/foo
+++ /dev/null
@@ -1 +0,0 @@
-Bar
diff --git a/tools/swarming_client/tests/trace_inputs/files2 b/tools/swarming_client/tests/trace_inputs/files2
deleted file mode 120000
index 49a73ae..0000000
--- a/tools/swarming_client/tests/trace_inputs/files2
+++ /dev/null
@@ -1 +0,0 @@
-files1
\ No newline at end of file
diff --git a/tools/swarming_client/tests/trace_inputs/ignored.txt b/tools/swarming_client/tests/trace_inputs/ignored.txt
deleted file mode 100644
index ca599e4..0000000
--- a/tools/swarming_client/tests/trace_inputs/ignored.txt
+++ /dev/null
@@ -1 +0,0 @@
-This file is not read.
diff --git a/tools/swarming_client/tests/trace_inputs/symlink.py b/tools/swarming_client/tests/trace_inputs/symlink.py
deleted file mode 100755
index c8ae028..0000000
--- a/tools/swarming_client/tests/trace_inputs/symlink.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import os
-import sys
-
-
-def main():
-  print 'symlink: touches files2/'
-  assert len(sys.argv) == 1
-
-  expected = {
-    'bar': 'Foo\n',
-    'foo': 'Bar\n',
-  }
-
-  if not os.path.basename(os.getcwd()) == 'tests':
-    print 'Start this script from inside "tests"'
-    return 1
-
-  root = os.path.join('trace_inputs', 'files2')
-  actual = dict(
-      (filename, open(os.path.join(root, filename), 'rb').read())
-      for filename in (os.listdir(root)) if filename != '.svn')
-
-  if actual != expected:
-    print 'Failure'
-    print actual
-    print expected
-    return 2
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tests/trace_inputs/test_file.txt b/tools/swarming_client/tests/trace_inputs/test_file.txt
deleted file mode 100644
index bc56c4d..0000000
--- a/tools/swarming_client/tests/trace_inputs/test_file.txt
+++ /dev/null
@@ -1 +0,0 @@
-Foo
diff --git a/tools/swarming_client/tests/trace_inputs/touch_only.py b/tools/swarming_client/tests/trace_inputs/touch_only.py
deleted file mode 100755
index fdd9b78..0000000
--- a/tools/swarming_client/tests/trace_inputs/touch_only.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Uses different APIs to touch a file."""
-
-import os
-import sys
-
-
-BASE_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-
-
-def main():
-  print 'Only look if a file exists but do not open it.'
-  assert len(sys.argv) == 2
-  path = os.path.join(BASE_DIR, 'test_file.txt')
-  command = sys.argv[1]
-  if command == 'access':
-    return not os.access(path, os.R_OK)
-  elif command == 'isfile':
-    return not os.path.isfile(path)
-  elif command == 'stat':
-    return not os.stat(path).st_size
-  return 1
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tests/trace_inputs/tricky_filename.py b/tools/swarming_client/tests/trace_inputs/tricky_filename.py
deleted file mode 100755
index 39f187a..0000000
--- a/tools/swarming_client/tests/trace_inputs/tricky_filename.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import sys
-
-
-def main():
-  """Creates a file name with whitespaces and comma in it."""
-  # We could test ?><:*|"' and chr(1 to 32) on linux.
-  # We could test ?<>*|"' on OSX.
-  # On Windows, skip the Chinese characters for now as the log parsing code is
-  # using the current code page to generate the log.
-  if sys.platform == 'win32':
-    filename = u'foo, bar,  ~p#o,,ué^t%t .txt'
-  else:
-    filename = u'foo, bar,  ~p#o,,ué^t%t 和平.txt'
-  with open(filename, 'w') as f:
-    f.write('Bingo!')
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tests/trace_inputs_smoke_test.py b/tools/swarming_client/tests/trace_inputs_smoke_test.py
deleted file mode 100755
index e5ff632..0000000
--- a/tools/swarming_client/tests/trace_inputs_smoke_test.py
+++ /dev/null
@@ -1,737 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import functools
-import json
-import logging
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import unicodedata
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-import trace_inputs
-from third_party.depot_tools import fix_encoding
-from utils import file_path
-from utils import threading_utils
-
-
-FILENAME = os.path.basename(__file__.decode(sys.getfilesystemencoding()))
-REL_DATA = os.path.join(u'tests', 'trace_inputs')
-VERBOSE = False
-
-# TODO(maruel): Have the kernel tracer on Windows differentiate between file
-# read or file write.
-MODE_R = trace_inputs.Results.File.READ if sys.platform != 'win32' else None
-MODE_W = trace_inputs.Results.File.WRITE if sys.platform != 'win32' else None
-MODE_T = trace_inputs.Results.File.TOUCHED
-
-
-def check_can_trace(fn):
-  """Function decorator that skips test that need to be able trace."""
-  @functools.wraps(fn)
-  def hook(self, *args, **kwargs):
-    if not trace_inputs.can_trace():
-      self.fail('Please rerun this test with admin privileges.')
-    return fn(self, *args, **kwargs)
-  return hook
-
-
-class CalledProcessError(subprocess.CalledProcessError):
-  """Makes 2.6 version act like 2.7"""
-  def __init__(self, returncode, cmd, output, cwd):
-    super(CalledProcessError, self).__init__(returncode, cmd)
-    self.output = output
-    self.cwd = cwd
-
-  def __str__(self):
-    return super(CalledProcessError, self).__str__() + (
-        '\n'
-        'cwd=%s\n%s') % (self.cwd, self.output)
-
-
-class TraceInputsBase(unittest.TestCase):
-  def setUp(self):
-    self.tempdir = None
-    self.trace_inputs_path = os.path.join(ROOT_DIR, 'trace_inputs.py')
-
-    # Wraps up all the differences between OSes here.
-    # - Windows doesn't track initial_cwd.
-    # - OSX replaces /usr/bin/python with /usr/bin/python2.7.
-    self.cwd = os.path.join(ROOT_DIR, u'tests')
-    self.initial_cwd = unicode(self.cwd)
-    self.expected_cwd = unicode(ROOT_DIR)
-    if sys.platform == 'win32':
-      # Not supported on Windows.
-      self.initial_cwd = None
-      self.expected_cwd = None
-
-    # There's 3 kinds of references to python, self.executable,
-    # self.real_executable and self.naked_executable. It depends how python was
-    # started.
-    self.executable = sys.executable
-    if sys.platform == 'darwin':
-      # /usr/bin/python is a thunk executable that decides which version of
-      # python gets executed.
-      suffix = '.'.join(map(str, sys.version_info[0:2]))
-      if os.access(self.executable + suffix, os.X_OK):
-        # So it'll look like /usr/bin/python2.7
-        self.executable += suffix
-
-    self.real_executable = file_path.get_native_path_case(
-        unicode(self.executable))
-    self.tempdir = file_path.get_native_path_case(
-        unicode(tempfile.mkdtemp(prefix=u'trace_smoke_test')))
-    self.log = os.path.join(self.tempdir, 'log')
-
-    # self.naked_executable will only be naked on Windows.
-    self.naked_executable = unicode(sys.executable)
-    if sys.platform == 'win32':
-      self.naked_executable = os.path.basename(sys.executable)
-
-  def tearDown(self):
-    if self.tempdir:
-      if VERBOSE:
-        print 'Leaking: %s' % self.tempdir
-      else:
-        file_path.rmtree(self.tempdir)
-
-  @staticmethod
-  def get_child_command(from_data):
-    """Returns command to run the child1.py."""
-    cmd = [sys.executable]
-    if from_data:
-      # When the gyp argument is specified, the command is started from --cwd
-      # directory. In this case, 'tests'.
-      cmd.extend([os.path.join('trace_inputs', 'child1.py'), '--child-gyp'])
-    else:
-      # When the gyp argument is not specified, the command is started from
-      # --root-dir directory.
-      cmd.extend([os.path.join(REL_DATA, 'child1.py'), '--child'])
-    return cmd
-
-  @staticmethod
-  def _size(*args):
-    return os.stat(os.path.join(ROOT_DIR, *args)).st_size
-
-
-class TraceInputs(TraceInputsBase):
-  def _execute(self, mode, command, cwd):
-    cmd = [
-      sys.executable,
-      self.trace_inputs_path,
-      mode,
-      '--log', self.log,
-    ]
-    if VERBOSE:
-      cmd.extend(['-v'] * 3)
-    cmd.extend(command)
-    logging.info('Command: %s' % ' '.join(cmd))
-    p = subprocess.Popen(
-        cmd,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        cwd=cwd,
-        universal_newlines=True)
-    out, err = p.communicate()
-    if VERBOSE:
-      print err
-    if p.returncode:
-      raise CalledProcessError(p.returncode, cmd, out + err, cwd)
-    return out or ''
-
-  def _trace(self, from_data):
-    if from_data:
-      cwd = os.path.join(ROOT_DIR, 'tests')
-    else:
-      cwd = ROOT_DIR
-    return self._execute('trace', self.get_child_command(from_data), cwd=cwd)
-
-  @check_can_trace
-  def test_trace(self):
-    expected = '\n'.join((
-      'Total: 7',
-      'Non existent: 0',
-      'Interesting: 7 reduced to 6',
-      '  tests/trace_inputs/child1.py'.replace('/', os.path.sep),
-      '  tests/trace_inputs/child2.py'.replace('/', os.path.sep),
-      '  tests/trace_inputs/files1/'.replace('/', os.path.sep),
-      '  tests/trace_inputs/test_file.txt'.replace('/', os.path.sep),
-      ('  tests/%s' % FILENAME).replace('/', os.path.sep),
-      '  trace_inputs.py',
-    )) + '\n'
-    trace_expected = '\n'.join((
-      'child from %s' % ROOT_DIR,
-      'child2',
-    )) + '\n'
-    trace_actual = self._trace(False)
-    actual = self._execute(
-        'read',
-        [
-          '--root-dir', ROOT_DIR,
-          '--trace-blacklist', '.+\\.pyc',
-          '--trace-blacklist', '.*\\.svn',
-          '--trace-blacklist', '.*do_not_care\\.txt',
-        ],
-        cwd=unicode(ROOT_DIR))
-    self.assertEqual(expected, actual)
-    self.assertEqual(trace_expected, trace_actual)
-
-  @check_can_trace
-  def test_trace_json(self):
-    expected = {
-      u'root': {
-        u'children': [
-          {
-            u'children': [],
-            u'command': [u'python', u'child2.py'],
-            u'executable': self.naked_executable,
-            u'files': [
-              {
-                'mode': MODE_R,
-                u'path': os.path.join(REL_DATA, 'child2.py'),
-                u'size': self._size(REL_DATA, 'child2.py'),
-              },
-              {
-                'mode': MODE_R,
-                u'path': os.path.join(REL_DATA, 'files1', 'bar'),
-                u'size': self._size(REL_DATA, 'files1', 'bar'),
-              },
-              {
-                'mode': MODE_R,
-                u'path': os.path.join(REL_DATA, 'files1', 'foo'),
-                u'size': self._size(REL_DATA, 'files1', 'foo'),
-              },
-              {
-                'mode': MODE_R,
-                u'path': os.path.join(REL_DATA, 'test_file.txt'),
-                u'size': self._size(REL_DATA, 'test_file.txt'),
-              },
-            ],
-            u'initial_cwd': self.initial_cwd,
-            #u'pid': 123,
-          },
-        ],
-        u'command': [
-          unicode(self.executable),
-          os.path.join(u'trace_inputs', 'child1.py'),
-          u'--child-gyp',
-        ],
-        u'executable': self.real_executable,
-        u'files': [
-          {
-            u'mode': MODE_R,
-            u'path': os.path.join(REL_DATA, 'child1.py'),
-            u'size': self._size(REL_DATA, 'child1.py'),
-          },
-          {
-            u'mode': MODE_R,
-            u'path': os.path.join(u'tests', u'trace_inputs_smoke_test.py'),
-            u'size': self._size('tests', 'trace_inputs_smoke_test.py'),
-          },
-          {
-            u'mode': MODE_R,
-            u'path': u'trace_inputs.py',
-            u'size': self._size('trace_inputs.py'),
-          },
-        ],
-        u'initial_cwd': self.initial_cwd,
-        #u'pid': 123,
-      },
-    }
-    trace_expected = '\n'.join((
-      'child_gyp from %s' % os.path.join(ROOT_DIR, 'tests'),
-      'child2',
-    )) + '\n'
-    trace_actual = self._trace(True)
-    actual_text = self._execute(
-        'read',
-        [
-          '--root-dir', ROOT_DIR,
-          '--trace-blacklist', '.+\\.pyc',
-          '--trace-blacklist', '.*\\.svn',
-          '--trace-blacklist', '.*do_not_care\\.txt',
-          '--json',
-        ],
-        cwd=unicode(ROOT_DIR))
-    actual_json = json.loads(actual_text)
-    self.assertEqual(list, actual_json.__class__)
-    self.assertEqual(1, len(actual_json))
-    actual_json = actual_json[0]
-    # Removes the pids.
-    self.assertTrue(actual_json['root'].pop('pid'))
-    self.assertTrue(actual_json['root']['children'][0].pop('pid'))
-    self.assertEqual(expected, actual_json)
-    self.assertEqual(trace_expected, trace_actual)
-
-
-class TraceInputsImport(TraceInputsBase):
-
-  # Similar to TraceInputs test fixture except that it calls the function
-  # directly, so the Results instance can be inspected.
-  # Roughly, make sure the API is stable.
-  def _execute_trace(self, command):
-    # Similar to what trace_test_cases.py does.
-    api = trace_inputs.get_api()
-    _, _ = trace_inputs.trace(self.log, command, self.cwd, api, True)
-    # TODO(maruel): Check
-    #self.assertEqual(0, returncode)
-    #self.assertEqual('', output)
-    def blacklist(f):
-      return f.endswith(('.pyc', '.svn', 'do_not_care.txt'))
-    data = api.parse_log(self.log, blacklist, None)
-    self.assertEqual(1, len(data))
-    if 'exception' in data[0]:
-      raise data[0]['exception'][0], \
-          data[0]['exception'][1], \
-          data[0]['exception'][2]
-
-    return data[0]['results'].strip_root(unicode(ROOT_DIR))
-
-  def _gen_dict_wrong_path(self):
-    """Returns the expected flattened Results when child1.py is called with the
-    wrong relative path.
-    """
-    return {
-      'root': {
-        'children': [],
-        'command': [
-          self.executable,
-          os.path.join(REL_DATA, 'child1.py'),
-          '--child',
-        ],
-        'executable': self.real_executable,
-        'files': [],
-        'initial_cwd': self.initial_cwd,
-      },
-    }
-
-  def _gen_dict_full(self):
-    """Returns the expected flattened Results when child1.py is called with
-    --child.
-    """
-    return {
-      'root': {
-        'children': [
-          {
-            'children': [],
-            'command': ['python', 'child2.py'],
-            'executable': self.naked_executable,
-            'files': [
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'child2.py'),
-                'size': self._size(REL_DATA, 'child2.py'),
-              },
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'files1', 'bar'),
-                'size': self._size(REL_DATA, 'files1', 'bar'),
-              },
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'files1', 'foo'),
-                'size': self._size(REL_DATA, 'files1', 'foo'),
-              },
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'test_file.txt'),
-                'size': self._size(REL_DATA, 'test_file.txt'),
-              },
-            ],
-            'initial_cwd': self.expected_cwd,
-          },
-        ],
-        'command': [
-          self.executable,
-          os.path.join(REL_DATA, 'child1.py'),
-          '--child',
-        ],
-        'executable': self.real_executable,
-        'files': [
-          {
-            'mode': MODE_R,
-            'path': os.path.join(REL_DATA, 'child1.py'),
-            'size': self._size(REL_DATA, 'child1.py'),
-          },
-          {
-            'mode': MODE_R,
-            'path': os.path.join(u'tests', u'trace_inputs_smoke_test.py'),
-            'size': self._size('tests', 'trace_inputs_smoke_test.py'),
-          },
-          {
-            'mode': MODE_R,
-            'path': u'trace_inputs.py',
-            'size': self._size('trace_inputs.py'),
-          },
-        ],
-        'initial_cwd': self.expected_cwd,
-      },
-    }
-
-  def _gen_dict_full_gyp(self):
-    """Returns the expected flattened results when child1.py is called with
-    --child-gyp.
-    """
-    return {
-      'root': {
-        'children': [
-          {
-            'children': [],
-            'command': [u'python', u'child2.py'],
-            'executable': self.naked_executable,
-            'files': [
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'child2.py'),
-                'size': self._size(REL_DATA, 'child2.py'),
-              },
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'files1', 'bar'),
-                'size': self._size(REL_DATA, 'files1', 'bar'),
-              },
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'files1', 'foo'),
-                'size': self._size(REL_DATA, 'files1', 'foo'),
-              },
-              {
-                'mode': MODE_R,
-                'path': os.path.join(REL_DATA, 'test_file.txt'),
-                'size': self._size(REL_DATA, 'test_file.txt'),
-              },
-            ],
-            'initial_cwd': self.initial_cwd,
-          },
-        ],
-        'command': [
-          self.executable,
-          os.path.join('trace_inputs', 'child1.py'),
-          '--child-gyp',
-        ],
-        'executable': self.real_executable,
-        'files': [
-          {
-            'mode': MODE_R,
-            'path': os.path.join(REL_DATA, 'child1.py'),
-            'size': self._size(REL_DATA, 'child1.py'),
-          },
-          {
-            'mode': MODE_R,
-            'path': os.path.join(u'tests', u'trace_inputs_smoke_test.py'),
-            'size': self._size('tests', 'trace_inputs_smoke_test.py'),
-          },
-          {
-            'mode': MODE_R,
-            'path': u'trace_inputs.py',
-            'size': self._size('trace_inputs.py'),
-          },
-        ],
-        'initial_cwd': self.initial_cwd,
-      },
-    }
-
-  @check_can_trace
-  def test_trace_wrong_path(self):
-    # Deliberately start the trace from the wrong path. Starts it from the
-    # directory 'tests' so 'tests/tests/trace_inputs/child1.py' is not
-    # accessible, so child2.py process is not started.
-    results = self._execute_trace(self.get_child_command(False))
-    expected = self._gen_dict_wrong_path()
-    actual = results.flatten()
-    self.assertTrue(actual['root'].pop('pid'))
-    self.assertEqual(expected, actual)
-
-  @check_can_trace
-  def test_trace(self):
-    expected = self._gen_dict_full_gyp()
-    results = self._execute_trace(self.get_child_command(True))
-    actual = results.flatten()
-    self.assertTrue(actual['root'].pop('pid'))
-    self.assertTrue(actual['root']['children'][0].pop('pid'))
-    self.assertEqual(expected, actual)
-    files = [
-      u'tests/trace_inputs/child1.py'.replace('/', os.path.sep),
-      u'tests/trace_inputs/child2.py'.replace('/', os.path.sep),
-      u'tests/trace_inputs/files1/'.replace('/', os.path.sep),
-      u'tests/trace_inputs/test_file.txt'.replace('/', os.path.sep),
-      u'tests/trace_inputs_smoke_test.py'.replace('/', os.path.sep),
-      u'trace_inputs.py',
-    ]
-    def blacklist(f):
-      return f.endswith(('.pyc', 'do_not_care.txt', '.git', '.svn'))
-    simplified = trace_inputs.extract_directories(
-        file_path.get_native_path_case(unicode(ROOT_DIR)),
-        results.files,
-        blacklist)
-    self.assertEqual(files, [f.path for f in simplified])
-
-  @check_can_trace
-  def test_trace_multiple(self):
-    # Starts parallel threads and trace parallel child processes simultaneously.
-    # Some are started from 'tests' directory, others from this script's
-    # directory. One trace fails. Verify everything still goes one.
-    parallel = 8
-
-    def trace(tracer, cmd, cwd, tracename):
-      resultcode, output = tracer.trace(cmd, cwd, tracename, True)
-      return (tracename, resultcode, output)
-
-    with threading_utils.ThreadPool(parallel, parallel, 0) as pool:
-      api = trace_inputs.get_api()
-      with api.get_tracer(self.log) as tracer:
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(False), ROOT_DIR, 'trace1')
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(True), self.cwd, 'trace2')
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(False), ROOT_DIR, 'trace3')
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(True), self.cwd, 'trace4')
-        # Have this one fail since it's started from the wrong directory.
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(False), self.cwd, 'trace5')
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(True), self.cwd, 'trace6')
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(False), ROOT_DIR, 'trace7')
-        pool.add_task(
-            0, trace, tracer, self.get_child_command(True), self.cwd, 'trace8')
-        trace_results = pool.join()
-    def blacklist(f):
-      return f.endswith(('.pyc', 'do_not_care.txt', '.git', '.svn'))
-    actual_results = api.parse_log(self.log, blacklist, None)
-    self.assertEqual(8, len(trace_results))
-    self.assertEqual(8, len(actual_results))
-
-    # Convert to dict keyed on the trace name, simpler to verify.
-    trace_results = dict((i[0], i[1:]) for i in trace_results)
-    actual_results = dict((x.pop('trace'), x) for x in actual_results)
-    self.assertEqual(sorted(trace_results), sorted(actual_results))
-
-    # It'd be nice to start different kinds of processes.
-    expected_results = [
-      self._gen_dict_full(),
-      self._gen_dict_full_gyp(),
-      self._gen_dict_full(),
-      self._gen_dict_full_gyp(),
-      self._gen_dict_wrong_path(),
-      self._gen_dict_full_gyp(),
-      self._gen_dict_full(),
-      self._gen_dict_full_gyp(),
-    ]
-    self.assertEqual(len(expected_results), len(trace_results))
-
-    # See the comment above about the trace that fails because it's started from
-    # the wrong directory.
-    busted = 4
-    for index, key in enumerate(sorted(actual_results)):
-      self.assertEqual('trace%d' % (index + 1), key)
-      self.assertEqual(2, len(trace_results[key]))
-      # returncode
-      self.assertEqual(0 if index != busted else 2, trace_results[key][0])
-      # output
-      self.assertEqual(actual_results[key]['output'], trace_results[key][1])
-
-      self.assertEqual(['output', 'results'], sorted(actual_results[key]))
-      results = actual_results[key]['results']
-      results = results.strip_root(unicode(ROOT_DIR))
-      actual = results.flatten()
-      self.assertTrue(actual['root'].pop('pid'))
-      if index != busted:
-        self.assertTrue(actual['root']['children'][0].pop('pid'))
-      self.assertEqual(expected_results[index], actual)
-
-  if sys.platform != 'win32':
-    def test_trace_symlink(self):
-      expected = {
-        'root': {
-          'children': [],
-          'command': [
-            self.executable,
-            os.path.join('trace_inputs', 'symlink.py'),
-          ],
-          'executable': self.real_executable,
-          'files': [
-            {
-              'mode': MODE_R,
-              'path': os.path.join(REL_DATA, 'files2', 'bar'),
-              'size': self._size(REL_DATA, 'files2', 'bar'),
-            },
-            {
-              'mode': MODE_R,
-              'path': os.path.join(REL_DATA, 'files2', 'foo'),
-              'size': self._size(REL_DATA, 'files2', 'foo'),
-            },
-            {
-              'mode': MODE_R,
-              'path': os.path.join(REL_DATA, 'symlink.py'),
-              'size': self._size(REL_DATA, 'symlink.py'),
-            },
-          ],
-          'initial_cwd': self.initial_cwd,
-        },
-      }
-      cmd = [sys.executable, os.path.join('trace_inputs', 'symlink.py')]
-      results = self._execute_trace(cmd)
-      actual = results.flatten()
-      self.assertTrue(actual['root'].pop('pid'))
-      self.assertEqual(expected, actual)
-      files = [
-        # In particular, the symlink is *not* resolved.
-        u'tests/trace_inputs/files2/'.replace('/', os.path.sep),
-        u'tests/trace_inputs/symlink.py'.replace('/', os.path.sep),
-      ]
-      def blacklist(f):
-        return f.endswith(('.pyc', '.svn', 'do_not_care.txt'))
-      simplified = trace_inputs.extract_directories(
-          unicode(ROOT_DIR), results.files, blacklist)
-      self.assertEqual(files, [f.path for f in simplified])
-
-  @check_can_trace
-  def test_trace_quoted(self):
-    results = self._execute_trace([sys.executable, '-c', 'print("hi")'])
-    expected = {
-      'root': {
-        'children': [],
-        'command': [
-          self.executable,
-          '-c',
-          'print("hi")',
-        ],
-        'executable': self.real_executable,
-        'files': [],
-        'initial_cwd': self.initial_cwd,
-      },
-    }
-    actual = results.flatten()
-    self.assertTrue(actual['root'].pop('pid'))
-    self.assertEqual(expected, actual)
-
-  @check_can_trace
-  def _touch_expected(self, command):
-    # Looks for file that were touched but not opened, using different apis.
-    results = self._execute_trace(
-      [sys.executable, os.path.join('trace_inputs', 'touch_only.py'), command])
-    expected = {
-      'root': {
-        'children': [],
-        'command': [
-          self.executable,
-          os.path.join('trace_inputs', 'touch_only.py'),
-          command,
-        ],
-        'executable': self.real_executable,
-        'files': [
-          {
-            'mode': MODE_T,
-            'path': os.path.join(REL_DATA, 'test_file.txt'),
-            'size': self._size(REL_DATA, 'test_file.txt'),
-          },
-          {
-            'mode': MODE_R,
-            'path': os.path.join(REL_DATA, 'touch_only.py'),
-            'size': self._size(REL_DATA, 'touch_only.py'),
-          },
-        ],
-        'initial_cwd': self.initial_cwd,
-      },
-    }
-    if sys.platform != 'linux2':
-      # TODO(maruel): Remove once properly implemented.
-      expected['root']['files'].pop(0)
-
-    actual = results.flatten()
-    self.assertTrue(actual['root'].pop('pid'))
-    self.assertEqual(expected, actual)
-
-  def test_trace_touch_only_access(self):
-    self._touch_expected('access')
-
-  def test_trace_touch_only_isfile(self):
-    self._touch_expected('isfile')
-
-  def test_trace_touch_only_stat(self):
-    self._touch_expected('stat')
-
-  @check_can_trace
-  def test_trace_tricky_filename(self):
-    # TODO(maruel):  On Windows, it's using the current code page so some
-    # characters can't be represented. As a nice North American, hard code the
-    # string to something representable in code page 1252. The exact code page
-    # depends on the user system.
-    if sys.platform == 'win32':
-      filename = u'foo, bar,  ~p#o,,ué^t%t .txt'
-    else:
-      filename = u'foo, bar,  ~p#o,,ué^t%t 和平.txt'
-
-    exe = os.path.join(self.tempdir, 'tricky_filename.py')
-    shutil.copyfile(
-        os.path.join(self.cwd, 'trace_inputs', 'tricky_filename.py'), exe)
-    expected = {
-      'root': {
-        'children': [],
-        'command': [
-          self.executable,
-          exe,
-        ],
-        'executable': self.real_executable,
-        'files': [
-          {
-            'mode': MODE_W,
-            'path':  filename,
-            'size': long(len('Bingo!')),
-          },
-          {
-            'mode': MODE_R,
-            'path': u'tricky_filename.py',
-            'size': self._size(REL_DATA, 'tricky_filename.py'),
-          },
-        ],
-        'initial_cwd': self.tempdir if sys.platform != 'win32' else None,
-      },
-    }
-
-    api = trace_inputs.get_api()
-    returncode, output = trace_inputs.trace(
-        self.log, [exe], self.tempdir, api, True)
-    self.assertEqual('', output)
-    self.assertEqual(0, returncode)
-    data = api.parse_log(self.log, lambda _: False, None)
-    self.assertEqual(1, len(data))
-    if 'exception' in data[0]:
-      raise data[0]['exception'][0], \
-          data[0]['exception'][1], \
-          data[0]['exception'][2]
-    actual = data[0]['results'].strip_root(self.tempdir).flatten()
-    self.assertTrue(actual['root'].pop('pid'))
-    self.assertEqual(expected, actual)
-    trace_inputs.get_api().clean_trace(self.log)
-    files = sorted(
-        unicodedata.normalize('NFC', i)
-        for i in os.listdir(unicode(self.tempdir)))
-    self.assertEqual([filename, 'tricky_filename.py'], files)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  VERBOSE = '-v' in sys.argv
-  logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
-  if VERBOSE:
-    unittest.TestCase.maxDiff = None
-  # Necessary for the dtrace logger to work around execve() hook. See
-  # trace_inputs.py for more details.
-  os.environ['TRACE_INPUTS_DTRACE_ENABLE_EXECVE'] = '1'
-  print >> sys.stderr, 'Test are currently disabled'
-  sys.exit(0)
-  #unittest.main()
diff --git a/tools/swarming_client/tests/trace_inputs_test.py b/tools/swarming_client/tests/trace_inputs_test.py
deleted file mode 100755
index 49a4b4a..0000000
--- a/tools/swarming_client/tests/trace_inputs_test.py
+++ /dev/null
@@ -1,691 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import StringIO
-import logging
-import os
-import sys
-import unittest
-
-BASE_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-ROOT_DIR = os.path.dirname(BASE_DIR)
-sys.path.insert(0, ROOT_DIR)
-
-FILE_PATH = os.path.abspath(__file__.decode(sys.getfilesystemencoding()))
-
-import trace_inputs
-
-# Access to a protected member _FOO of a client class
-# pylint: disable=W0212
-
-
-def join_norm(*args):
-  """Joins and normalizes path in a single step."""
-  return unicode(os.path.normpath(os.path.join(*args)))
-
-
-class TraceInputs(unittest.TestCase):
-  def test_process_quoted_arguments(self):
-    test_cases = (
-      ('"foo"', ['foo']),
-      ('"foo", "bar"', ['foo', 'bar']),
-      ('"foo"..., "bar"', ['foo', 'bar']),
-      ('"foo", "bar"...', ['foo', 'bar']),
-      (
-        '"/browser_tests", "--type=use,comma"',
-        ['/browser_tests', '--type=use,comma']
-      ),
-      (
-        '"/browser_tests", "--ignored=\\" --type=renderer \\""',
-        ['/browser_tests', '--ignored=" --type=renderer "']
-      ),
-      (
-        '"/Release+Asserts/bin/clang", "-cc1", ...',
-        ['/Release+Asserts/bin/clang', '-cc1'],
-      ),
-    )
-    for actual, expected in test_cases:
-      self.assertEqual(
-          expected, trace_inputs.strace_process_quoted_arguments(actual))
-
-  def test_process_escaped_arguments(self):
-    test_cases = (
-      ('foo\\0', ['foo']),
-      ('foo\\001bar\\0', ['foo', 'bar']),
-      ('\\"foo\\"\\0', ['"foo"']),
-    )
-    for actual, expected in test_cases:
-      self.assertEqual(
-          expected,
-          trace_inputs.Dtrace.Context.process_escaped_arguments(actual))
-
-  def test_variable_abs(self):
-    value = trace_inputs.Results.File(
-        None, u'/foo/bar', None, None, trace_inputs.Results.File.READ)
-    actual = value.replace_variables({'$FOO': '/foo'})
-    self.assertEqual('$FOO/bar', actual.path)
-    self.assertEqual('$FOO/bar', actual.full_path)
-    self.assertEqual(True, actual.tainted)
-
-  def test_variable_rel(self):
-    value = trace_inputs.Results.File(
-        u'/usr', u'foo/bar', None, None, trace_inputs.Results.File.READ)
-    actual = value.replace_variables({'$FOO': 'foo'})
-    self.assertEqual('$FOO/bar', actual.path)
-    self.assertEqual(os.path.join('/usr', '$FOO/bar'), actual.full_path)
-    self.assertEqual(True, actual.tainted)
-
-  def test_strace_filename(self):
-    filename = u'foo, bar,  ~p#o,,ué^t%t.txt'
-    data = 'foo, bar,  ~p#o,,u\\303\\251^t%t.txt'
-    self.assertEqual(filename, trace_inputs.Strace.load_filename(data))
-
-  def test_CsvReader(self):
-    test_cases = {
-      u'   Next is empty, ,  {00000000-0000}':
-        [u'Next is empty', u'', u'{00000000-0000}'],
-
-      u'   Foo, , "\\\\NT AUTHORITY\\SYSTEM", "Idle", ""':
-        [u'Foo', u'', u'\\\\NT AUTHORITY\\SYSTEM', u'Idle', u''],
-
-      u'   Foo,  ""Who the hell thought delimiters are great as escape too""':
-        [u'Foo', u'"Who the hell thought delimiters are great as escape too"'],
-
-      (
-        u'  "remoting.exe", ""C:\\Program Files\\remoting.exe" '
-        u'--host="C:\\ProgramData\\host.json""'
-      ):
-        [
-          u'remoting.exe',
-          u'"C:\\Program Files\\remoting.exe" '
-          u'--host="C:\\ProgramData\\host.json"'
-        ],
-
-      u'"MONSTRE", "", 0x0': [u'MONSTRE', u'', u'0x0'],
-
-      # To whoever wrote this code at Microsoft: You did it wrong.
-      u'"cmd.exe", ""C:\\\\Winz\\\\cmd.exe" /k ""C:\\\\MSVS\\\\vc.bat"" x86"':
-        [u'cmd.exe', u'"C:\\\\Winz\\\\cmd.exe" /k "C:\\\\MSVS\\\\vc.bat" x86'],
-    }
-    for data, expected in test_cases.iteritems():
-      csv = trace_inputs.LogmanTrace.Tracer.CsvReader(StringIO.StringIO(data))
-      actual = [i for i in csv]
-      self.assertEqual(1, len(actual))
-      self.assertEqual(expected, actual[0])
-
-
-if sys.platform != 'win32':
-  class StraceInputs(unittest.TestCase):
-    # Represents the root process pid (an arbitrary number).
-    _ROOT_PID = 27
-    _CHILD_PID = 24
-    _GRAND_CHILD_PID = 70
-
-    @staticmethod
-    def _load_context(lines, initial_cwd):
-      context = trace_inputs.Strace.Context(lambda _: False, None, initial_cwd)
-      for line in lines:
-        context.on_line(*line)
-      done = any(p._done for p in context._process_lookup.itervalues())
-      return context.to_results().flatten(), done
-
-    def assertContext(self, lines, initial_cwd, expected, expected_done):
-      actual, actual_done = self._load_context(lines, initial_cwd)
-      self.assertEqual(expected, actual)
-      # If actual_done is True, this means the log was cut off abruptly.
-      self.assertEqual(expected_done, actual_done)
-
-    def _test_lines(self, lines, initial_cwd, files, command=None):
-      filepath = join_norm(initial_cwd, '../out/unittests')
-      command = command or ['../out/unittests']
-      expected = {
-        'root': {
-          'children': [],
-          'command': command,
-          'executable': filepath,
-          'files': files,
-          'initial_cwd': initial_cwd,
-          'pid': self._ROOT_PID,
-        }
-      }
-      if not files:
-        expected['root']['command'] = None
-        expected['root']['executable'] = None
-      self.assertContext(lines, initial_cwd, expected, False)
-
-    def test_execve(self):
-      lines = [
-        (self._ROOT_PID,
-          'execve("/home/foo_bar_user/out/unittests", '
-            '["/home/foo_bar_user/out/unittests", '
-            '"--random-flag"], [/* 44 vars */])         = 0'),
-        (self._ROOT_PID,
-          'open("out/unittests.log", O_WRONLY|O_CREAT|O_APPEND, 0666) = 8'),
-      ]
-      files = [
-        {
-          'mode': trace_inputs.Results.File.READ,
-          'path': u'/home/foo_bar_user/out/unittests',
-          'size': -1,
-        },
-        {
-          'mode': trace_inputs.Results.File.WRITE,
-          'path': u'/home/foo_bar_user/src/out/unittests.log',
-          'size': -1,
-        },
-      ]
-      command = [
-        '/home/foo_bar_user/out/unittests', '--random-flag',
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', files, command)
-
-    def test_empty(self):
-      try:
-        self._load_context([], None)
-        self.fail()
-      except trace_inputs.TracingFailure, e:
-        expected = (
-          'Found internal inconsitency in process lifetime detection '
-          'while finding the root process',
-          None,
-          None,
-          None,
-          None,
-          [])
-        self.assertEqual(expected, e.args)
-
-    def test_chmod(self):
-      lines = [
-          (self._ROOT_PID, 'chmod("temp/file", 0100644) = 0'),
-      ]
-      expected = {
-        'root': {
-          'children': [],
-          'command': None,
-          'executable': None,
-          'files': [
-            {
-              'mode': trace_inputs.Results.File.WRITE,
-              'path': u'/home/foo_bar_user/src/temp/file',
-              'size': -1,
-            },
-          ],
-          'initial_cwd': u'/home/foo_bar_user/src',
-          'pid': self._ROOT_PID,
-        }
-      }
-      self.assertContext(lines, u'/home/foo_bar_user/src', expected, False)
-
-    def test_close(self):
-      lines = [
-        (self._ROOT_PID, 'close(7)                          = 0'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_clone(self):
-      # Grand-child with relative directory.
-      lines = [
-        (self._ROOT_PID,
-          'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
-            '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
-            self._CHILD_PID),
-        (self._CHILD_PID,
-          'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
-            '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
-            self._GRAND_CHILD_PID),
-        (self._GRAND_CHILD_PID,
-          'open("%s", O_RDONLY)       = 76' % os.path.basename(
-              FILE_PATH.encode('utf-8'))),
-      ]
-      size = os.stat(FILE_PATH).st_size
-      expected = {
-        'root': {
-          'children': [
-            {
-              'children': [
-                {
-                  'children': [],
-                  'command': None,
-                  'executable': None,
-                  'files': [
-                    {
-                      'mode': trace_inputs.Results.File.READ,
-                      'path': FILE_PATH,
-                      'size': size,
-                    },
-                  ],
-                  'initial_cwd': BASE_DIR,
-                  'pid': self._GRAND_CHILD_PID,
-                },
-              ],
-              'command': None,
-              'executable': None,
-              'files': [],
-              'initial_cwd': BASE_DIR,
-              'pid': self._CHILD_PID,
-            },
-          ],
-          'command': None,
-          'executable': None,
-          'files': [],
-          'initial_cwd': BASE_DIR,
-          'pid': self._ROOT_PID,
-        },
-      }
-      self.assertContext(lines, BASE_DIR, expected, False)
-
-    def test_clone_chdir(self):
-      # Grand-child with relative directory.
-      lines = [
-        (self._ROOT_PID,
-          'execve("../out/unittests", '
-            '["../out/unittests"...], [/* 44 vars */])         = 0'),
-        (self._ROOT_PID,
-          'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
-            '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
-            self._CHILD_PID),
-        (self._CHILD_PID,
-          'chdir("/home_foo_bar_user/path1") = 0'),
-        (self._CHILD_PID,
-          'clone(child_stack=0, flags=CLONE_CHILD_CLEARTID'
-            '|CLONE_CHILD_SETTID|SIGCHLD, child_tidptr=0x7f5350f829d0) = %d' %
-            self._GRAND_CHILD_PID),
-        (self._GRAND_CHILD_PID,
-          'execve("../out/unittests", '
-            '["../out/unittests"...], [/* 44 vars */])         = 0'),
-        (self._ROOT_PID, 'chdir("/home_foo_bar_user/path2") = 0'),
-        (self._GRAND_CHILD_PID,
-          'open("random.txt", O_RDONLY)       = 76'),
-      ]
-      expected = {
-        'root': {
-          'children': [
-            {
-              'children': [
-                {
-                  'children': [],
-                  'command': ['../out/unittests'],
-                  'executable': '/home_foo_bar_user/out/unittests',
-                  'files': [
-                    {
-                      'mode': trace_inputs.Results.File.READ,
-                      'path': u'/home_foo_bar_user/out/unittests',
-                      'size': -1,
-                    },
-                    {
-                      'mode': trace_inputs.Results.File.READ,
-                      'path': u'/home_foo_bar_user/path1/random.txt',
-                      'size': -1,
-                    },
-                  ],
-                  'initial_cwd': u'/home_foo_bar_user/path1',
-                  'pid': self._GRAND_CHILD_PID,
-                },
-              ],
-              # clone does not carry over the command and executable so it is
-              # clear if an execve() call was done or not.
-              'command': None,
-              'executable': None,
-              # This is important, since no execve call was done, it didn't
-              # touch the executable file.
-              'files': [],
-              'initial_cwd': unicode(ROOT_DIR),
-              'pid': self._CHILD_PID,
-            },
-          ],
-          'command': ['../out/unittests'],
-          'executable': join_norm(ROOT_DIR, '../out/unittests'),
-          'files': [
-            {
-              'mode': trace_inputs.Results.File.READ,
-              'path': join_norm(ROOT_DIR, '../out/unittests'),
-              'size': -1,
-            },
-          ],
-          'initial_cwd': unicode(ROOT_DIR),
-          'pid': self._ROOT_PID,
-        },
-      }
-      self.assertContext(lines, ROOT_DIR, expected, False)
-
-    def test_faccess(self):
-      lines = [
-        (self._ROOT_PID,
-         'faccessat(AT_FDCWD, "/home_foo_bar_user/file", W_OK) = 0'),
-      ]
-      expected = {
-        'root': {
-          'children': [],
-          'command': None,
-          'executable': None,
-          'files': [
-            {
-              'mode': trace_inputs.Results.File.TOUCHED,
-              'path': u'/home_foo_bar_user/file',
-              'size': -1,
-            },
-          ],
-          'initial_cwd': unicode(ROOT_DIR),
-          'pid': self._ROOT_PID,
-        },
-      }
-      self.assertContext(lines, ROOT_DIR, expected, False)
-
-    def test_futex_died(self):
-      # That's a pretty bad fork, copy-pasted from a real log.
-      lines = [
-        (self._ROOT_PID, 'close(9)                                = 0'),
-        (self._ROOT_PID, 'futex( <unfinished ... exit status 0>'),
-      ]
-      expected = {
-        'root': {
-          'children': [],
-          'command': None,
-          'executable': None,
-          'files': [],
-          'initial_cwd': unicode(ROOT_DIR),
-          'pid': self._ROOT_PID,
-        },
-      }
-      self.assertContext(lines, ROOT_DIR, expected, True)
-
-    def test_futex_missing_in_action(self):
-      # That's how futex() calls roll.
-      lines = [
-        (self._ROOT_PID,
-          'clone(child_stack=0x7fae9f4bed70, flags=CLONE_VM|CLONE_FS|'
-          'CLONE_FILES|CLONE_SIGHAND|CLONE_THREAD|CLONE_SYSVSEM|CLONE_SETTLS|'
-          'CLONE_PARENT_SETTID|CLONE_CHILD_CLEARTID, '
-          'parent_tidptr=0x7fae9f4bf9d0, tls=0x7fae9f4bf700, '
-          'child_tidptr=0x7fae9f4bf9d0) = 3862'),
-        (self._ROOT_PID,
-          'futex(0x1407670, FUTEX_WAIT_PRIVATE, 2, {0, 0}) = -1 EAGAIN '
-          '(Resource temporarily unavailable)'),
-        (self._ROOT_PID, 'futex(0x1407670, FUTEX_WAKE_PRIVATE, 1) = 0'),
-        (self._ROOT_PID, 'close(9)                                = 0'),
-        (self._ROOT_PID, 'futex('),
-      ]
-      expected = {
-        'root': {
-          'children': [
-            {
-              'children': [],
-              'command': None,
-              'executable': None,
-              'files': [],
-              'initial_cwd': unicode(ROOT_DIR),
-              'pid': 3862,
-            },
-          ],
-          'command': None,
-          'executable': None,
-          'files': [],
-          'initial_cwd': unicode(ROOT_DIR),
-          'pid': self._ROOT_PID,
-        },
-      }
-      self.assertContext(lines, ROOT_DIR, expected, True)
-
-    def test_futex_missing_in_partial_action(self):
-      # That's how futex() calls roll even more.
-      lines = [
-        (self._ROOT_PID,
-          'futex(0x7fff25718b14, FUTEX_CMP_REQUEUE_PRIVATE, 1, 2147483647, '
-          '0x7fff25718ae8, 2) = 1'),
-        (self._ROOT_PID, 'futex(0x7fff25718ae8, FUTEX_WAKE_PRIVATE, 1) = 0'),
-        (self._ROOT_PID, 'futex(0x697263c, FUTEX_WAIT_PRIVATE, 1, NULL) = 0'),
-        (self._ROOT_PID, 'futex(0x6972610, FUTEX_WAKE_PRIVATE, 1) = 0'),
-        (self._ROOT_PID, 'futex(0x697263c, FUTEX_WAIT_PRIVATE, 3, NULL) = 0'),
-        (self._ROOT_PID, 'futex(0x6972610, FUTEX_WAKE_PRIVATE, 1) = 0'),
-        (self._ROOT_PID, 'futex(0x697263c, FUTEX_WAIT_PRIVATE, 5, NULL) = 0'),
-        (self._ROOT_PID, 'futex(0x6972610, FUTEX_WAKE_PRIVATE, 1) = 0'),
-        (self._ROOT_PID, 'futex(0x697263c, FUTEX_WAIT_PRIVATE, 7, NULL) = 0'),
-        (self._ROOT_PID, 'futex(0x6972610, FUTEX_WAKE_PRIVATE, 1) = 0'),
-        (self._ROOT_PID, 'futex(0x7f0c17780634, '
-          'FUTEX_WAIT_BITSET_PRIVATE|FUTEX_CLOCK_REALTIME, 1, '
-          '{1351180745, 913067000}, ffffffff'),
-      ]
-      expected = {
-        'root': {
-          'children': [],
-          'command': None,
-          'executable': None,
-          'files': [],
-          'initial_cwd': unicode(ROOT_DIR),
-          'pid': self._ROOT_PID,
-        },
-      }
-      self.assertContext(lines, ROOT_DIR, expected, True)
-
-    def test_futex_missing_in_partial_action_with_no_process(self):
-      # That's how futex() calls roll even more (again).
-      lines = [
-          (self._ROOT_PID, 'syscall_317(0, 0, 0, 0, 0, 0) = 0'),
-          (self._ROOT_PID, 'futex(0x7134840, FUTEX_WAIT_PRIVATE, 2, '
-           'NULL <ptrace(SYSCALL):No such process>'),
-      ]
-      expected = {
-         'root': {
-           'children': [],
-           'command': None,
-           'executable': None,
-           'files': [],
-           'initial_cwd': unicode(ROOT_DIR),
-           'pid': self._ROOT_PID,
-         },
-       }
-      self.assertContext(lines, ROOT_DIR, expected, False)
-
-    def test_getcwd(self):
-      lines = [
-          (self._ROOT_PID, 'getcwd(0x7fffff0e13f0, 4096) = 52'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_lstat(self):
-      lines = [
-          (self._ROOT_PID, 'lstat(0x169a210, {...}) = 0'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_open(self):
-      lines = [
-        (self._ROOT_PID,
-          'execve("../out/unittests", '
-            '["../out/unittests"...], [/* 44 vars */])         = 0'),
-        (self._ROOT_PID,
-          'open("out/unittests.log", O_WRONLY|O_CREAT|O_APPEND, 0666) = 8'),
-        (self._ROOT_PID, 'open(0x7f68d954bb10, O_RDONLY|O_CLOEXEC) = 3'),
-      ]
-      files = [
-        {
-          'mode': trace_inputs.Results.File.READ,
-          'path': u'/home/foo_bar_user/out/unittests',
-          'size': -1,
-        },
-        {
-          'mode': trace_inputs.Results.File.WRITE,
-          'path': u'/home/foo_bar_user/src/out/unittests.log',
-          'size': -1,
-        },
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', files)
-
-    def test_open_resumed(self):
-      lines = [
-        (self._ROOT_PID,
-          'execve("../out/unittests", '
-            '["../out/unittests"...], [/* 44 vars */])         = 0'),
-        (self._ROOT_PID,
-          'open("out/unittests.log", O_WRONLY|O_CREAT|O_APPEND '
-            '<unfinished ...>'),
-        (self._ROOT_PID, '<... open resumed> )              = 3'),
-      ]
-      files = [
-        {
-          'mode': trace_inputs.Results.File.READ,
-          'path': u'/home/foo_bar_user/out/unittests',
-          'size': -1,
-        },
-        {
-          'mode': trace_inputs.Results.File.WRITE,
-          'path': u'/home/foo_bar_user/src/out/unittests.log',
-          'size': -1,
-        },
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', files)
-
-    def test_openat(self):
-      lines = [
-        (self._ROOT_PID,
-          'execve("../out/unittests", '
-            '["../out/unittests"...], [/* 44 vars */])         = 0'),
-        (self._ROOT_PID,
-         'openat(AT_FDCWD, "/home/foo_bar_user/file", O_RDONLY) = 0'),
-        (self._ROOT_PID,
-         'openat(AT_FDCWD, 0xa23f60, '
-           'O_RDONLY|O_NONBLOCK|O_DIRECTORY|O_CLOEXEC) = 4'),
-      ]
-      files = [
-        {
-          'mode': trace_inputs.Results.File.READ,
-          'path': u'/home/foo_bar_user/file',
-          'size': -1,
-        },
-        {
-          'mode': trace_inputs.Results.File.READ,
-          'path': u'/home/foo_bar_user/out/unittests',
-          'size': -1,
-        },
-      ]
-
-      self._test_lines(lines, u'/home/foo_bar_user/src', files)
-
-    def test_openat_died(self):
-      lines = [
-          # It's fine as long as there is nothing after.
-        ( self._ROOT_PID,
-          'openat(AT_FDCWD, "/tmp/random_dir/Plugins", '
-          'O_RDONLY|O_NONBLOCK|O_DIRECTORY|O_CLOEXEC'),
-      ]
-      expected = {
-         'root': {
-           'children': [],
-           'command': None,
-           'executable': None,
-           'files': [],
-           'initial_cwd': unicode(ROOT_DIR),
-           'pid': self._ROOT_PID,
-         },
-       }
-      self.assertContext(lines, ROOT_DIR, expected, True)
-
-    def test_readlink(self):
-      lines = [
-          (self._ROOT_PID, 'readlink(0x941e60, 0x7fff7a632d60, 4096) = 9'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_stat(self):
-      lines = [
-          (self._ROOT_PID,
-            'stat(0x941e60, {st_mode=S_IFREG|0644, st_size=25769, ...}) = 0'),
-          (self._ROOT_PID, 'stat(0x941e60, {...}) = 0'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_rmdir(self):
-      lines = [
-          (self._ROOT_PID, 'rmdir("directory/to/delete") = 0'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_setxattr(self):
-      lines = [
-          (self._ROOT_PID,
-           'setxattr("file.exe", "attribute", "value", 0, 0) = 0'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_sig_unexpected(self):
-      lines = [
-        (self._ROOT_PID, 'exit_group(0)                     = ?'),
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', [])
-
-    def test_stray(self):
-      lines = [
-        (self._ROOT_PID,
-          'execve("../out/unittests", '
-            '["../out/unittests"...], [/* 44 vars */])         = 0'),
-        (self._ROOT_PID,
-          ')                                       = ? <unavailable>'),
-      ]
-      files = [
-        {
-          'mode': trace_inputs.Results.File.READ,
-          'path': u'/home/foo_bar_user/out/unittests',
-          'size': -1,
-        },
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', files)
-
-    def test_truncate(self):
-      lines = [
-          (self._ROOT_PID,
-          'execve("../out/unittests", '
-            '["../out/unittests"...], [/* 44 vars */])         = 0'),
-          (self._ROOT_PID,
-           'truncate("file.exe", 0) = 0'),
-      ]
-      files = [
-        {
-          'mode': trace_inputs.Results.File.READ,
-          'path': u'/home/foo_bar_user/out/unittests',
-          'size': -1,
-        },
-        {
-          'mode': trace_inputs.Results.File.WRITE,
-          'path': u'/home/foo_bar_user/src/file.exe',
-          'size': -1,
-        },
-      ]
-      self._test_lines(lines, u'/home/foo_bar_user/src', files)
-
-    def test_vfork(self):
-      # vfork is the only function traced that doesn't take parameters.
-      lines = [
-          (self._ROOT_PID, 'vfork() = %d' % self._CHILD_PID),
-      ]
-      expected = {
-         'root': {
-           'children': [
-             {
-               'children': [],
-               'command': None,
-               'executable': None,
-               'files': [],
-                'initial_cwd': unicode(ROOT_DIR),
-               'pid': self._CHILD_PID,
-              }
-            ],
-           'command': None,
-           'executable': None,
-           'files': [],
-           'initial_cwd': unicode(ROOT_DIR),
-           'pid': self._ROOT_PID,
-         },
-       }
-      self.assertContext(lines, ROOT_DIR, expected, False)
-
-
-if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  if '-v' in sys.argv:
-    unittest.TestCase.maxDiff = None
-  unittest.main()
diff --git a/tools/swarming_client/tests/url_open_timeout_test.py b/tools/swarming_client/tests/url_open_timeout_test.py
deleted file mode 100755
index 162cf65..0000000
--- a/tools/swarming_client/tests/url_open_timeout_test.py
+++ /dev/null
@@ -1,178 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import BaseHTTPServer
-import logging
-import os
-import re
-import SocketServer
-import sys
-import threading
-import time
-import unittest
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-
-from depot_tools import auto_stub
-from utils import net
-
-
-class SleepingServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
-  """Multithreaded server that serves requests that block at various stages."""
-
-  # Lingering keep-alive HTTP connections keep (not very smart) HTTPServer
-  # threads alive as well. Convert them to deamon threads so that they don't
-  # block process exit.
-  daemon_threads = True
-
-  def __init__(self):
-    BaseHTTPServer.HTTPServer.__init__(self, ('127.0.0.1', 0), SleepingHandler)
-    self.dying = False
-    self.dying_cv = threading.Condition()
-    self.serving_thread = None
-
-  def handle_error(self, _request, _client_address):
-    # Mute "error: [Errno 32] Broken pipe" errors.
-    pass
-
-  def start(self):
-    self.serving_thread = threading.Thread(target=self.serve_forever,
-                                           kwargs={'poll_interval': 0.05})
-    self.serving_thread.start()
-
-  def stop(self):
-    with self.dying_cv:
-      self.dying = True
-      self.dying_cv.notifyAll()
-    self.shutdown()
-
-  @property
-  def url(self):
-    return 'http://%s:%d' % self.socket.getsockname()
-
-  def sleep(self, timeout):
-    deadline = time.time() + timeout
-    with self.dying_cv:
-      while not self.dying and time.time() < deadline:
-        self.dying_cv.wait(deadline - time.time())
-
-
-class SleepingHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-  protocol_version = 'HTTP/1.1'
-
-  path_re = re.compile(r'/(.*)/([\.\d]*)(\?.*)?')
-
-  first_line = 'FIRST LINE\n'
-  second_line = 'SECOND LINE\n'
-  full_response = first_line + second_line
-
-  modes = {
-    'sleep_before_response': ['SLEEP', 'HEADERS', 'FIRST', 'SECOND'],
-    'sleep_after_headers': ['HEADERS', 'SLEEP', 'FIRST', 'SECOND'],
-    'sleep_during_response': ['HEADERS', 'FIRST', 'SLEEP', 'SECOND'],
-    'sleep_after_response': ['HEADERS', 'FIRST', 'SECOND', 'SLEEP'],
-  }
-
-  def send_headers(self):
-    self.send_response(200)
-    self.send_header('Content-Length', len(self.full_response))
-    self.end_headers()
-
-  def log_message(self, _format, *_args):
-    # Mute "GET /sleep_before_response/0.000000 HTTP/1.1" 200 -" messages.
-    pass
-
-  def do_GET(self):
-    # Split request string like '/sleep/0.1?param=1' into ('sleep', 0.1) pair.
-    match = self.path_re.match(self.path)
-    if not match:
-      self.send_error(404)
-      return
-    mode, timeout, _ = match.groups()
-    # Ensure timeout is float.
-    try:
-      timeout = float(timeout)
-    except ValueError:
-      self.send_error(400)
-      return
-    # Ensure mode is known.
-    if mode not in self.modes:
-      self.send_error(404)
-      return
-    # Mapping mode's action -> function to call.
-    actions = {
-      'SLEEP': lambda: self.server.sleep(timeout),
-      'HEADERS': self.send_headers,
-      'FIRST': lambda: self.wfile.write(self.first_line),
-      'SECOND': lambda: self.wfile.write(self.second_line),
-    }
-    # Execute all actions defined by the mode.
-    for action in self.modes[mode]:
-      actions[action]()
-
-
-class UrlOpenTimeoutTest(auto_stub.TestCase):
-  def setUp(self):
-    super(UrlOpenTimeoutTest, self).setUp()
-    self.mock(net, 'OAuthAuthenticator', lambda *_: None)
-    self.server = SleepingServer()
-    self.server.start()
-
-  def tearDown(self):
-    self.server.stop()
-    self.server = None
-    super(UrlOpenTimeoutTest, self).tearDown()
-
-  def call(self, mode, sleep_duration, **kwargs):
-    url = self.server.url + '/%s/%f' % (mode, sleep_duration)
-    kwargs['max_attempts'] = 2
-    return net.url_open(url, **kwargs)
-
-  def test_urlopen_success(self):
-    # Server doesn't block.
-    for mode in SleepingHandler.modes:
-      self.assertEqual(self.call(mode, 0, read_timeout=0.1).read(),
-                       SleepingHandler.full_response)
-    # Server does block, but url_open called without read timeout.
-    for mode in SleepingHandler.modes:
-      self.assertEqual(self.call(mode, 0.25, read_timeout=None).read(),
-                       SleepingHandler.full_response)
-
-  def test_urlopen_retry(self):
-    # This should trigger retry logic and eventually return None.
-    self.mock(net, 'sleep_before_retry', lambda *_: None)
-    stream = self.call('sleep_before_response', 0.25, read_timeout=0.1)
-    self.assertIsNone(stream)
-
-  def test_urlopen_keeping_connection(self):
-    # Sleeping after request is sent -> it's just connection keep alive.
-    stream = self.call('sleep_after_response', 0.25, read_timeout=0.1)
-    self.assertEqual(stream.read(), SleepingHandler.full_response)
-
-  def test_urlopen_timeout_early_stream(self):
-    # Timeouts while reading from the stream.
-    stream = self.call('sleep_after_headers', 0.25, read_timeout=0.1)
-    self.assertTrue(stream)
-    gen = stream.iter_content(len(SleepingHandler.first_line))
-    with self.assertRaises(net.TimeoutError):
-      gen.next()
-
-  def test_urlopen_timeout_mid_stream(self):
-    # Timeouts while reading from the stream.
-    stream = self.call('sleep_during_response', 0.25, read_timeout=0.1)
-    self.assertTrue(stream)
-    gen = stream.iter_content(len(SleepingHandler.first_line))
-    gen.next()
-    with self.assertRaises(net.TimeoutError):
-      gen.next()
-
-
-if __name__ == '__main__':
-  VERBOSE = '-v' in sys.argv
-  logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/tests/zip_package_test.py b/tools/swarming_client/tests/zip_package_test.py
deleted file mode 100755
index 8e6ce9c..0000000
--- a/tools/swarming_client/tests/zip_package_test.py
+++ /dev/null
@@ -1,292 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import cStringIO as StringIO
-import logging
-import os
-import subprocess
-import sys
-import tempfile
-import unittest
-import zipfile
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-from third_party.depot_tools import fix_encoding
-from utils import file_path
-from utils import zip_package
-
-
-def check_output(*args, **kwargs):
-  return  subprocess.check_output(*args, stderr=subprocess.STDOUT, **kwargs)
-
-
-class ZipPackageTest(unittest.TestCase):
-  def setUp(self):
-    super(ZipPackageTest, self).setUp()
-    self.temp_dir = tempfile.mkdtemp(prefix=u'zip_package_test')
-
-  def tearDown(self):
-    try:
-      file_path.rmtree(self.temp_dir)
-    finally:
-      super(ZipPackageTest, self).tearDown()
-
-  def stage_files(self, files):
-    """Populates temp directory with given files specified as a list or dict."""
-    if not isinstance(files, dict):
-      files = dict((p, '') for p in files)
-    for path, content in files.iteritems():
-      abs_path = os.path.join(self.temp_dir, path.replace('/', os.sep))
-      dir_path = os.path.dirname(abs_path)
-      if not os.path.exists(dir_path):
-        os.makedirs(dir_path)
-      with open(abs_path, 'wb') as f:
-        f.write(content)
-
-  @staticmethod
-  def read_zip(stream):
-    """Given some stream with zip data, reads and decompresses it into dict."""
-    zip_file = zipfile.ZipFile(stream, 'r')
-    try:
-      return dict((i.filename, zip_file.read(i)) for i in zip_file.infolist())
-    finally:
-      zip_file.close()
-
-  def test_require_absolute_root(self):
-    # Absolute path is ok.
-    zip_package.ZipPackage(self.temp_dir)
-    # Relative path is not ok.
-    with self.assertRaises(AssertionError):
-      zip_package.ZipPackage('.')
-
-  def test_require_absolute_file_paths(self):
-    # Add some files to temp_dir.
-    self.stage_files([
-      'a.txt',
-      'b.py',
-      'c/c.txt',
-    ])
-
-    # Item to add -> method used to add it.
-    cases = [
-      ('a.txt', zip_package.ZipPackage.add_file),
-      ('b.py', zip_package.ZipPackage.add_python_file),
-      ('c', zip_package.ZipPackage.add_directory),
-    ]
-
-    for path, method in cases:
-      pkg = zip_package.ZipPackage(self.temp_dir)
-      # Absolute path is ok.
-      method(pkg, os.path.join(self.temp_dir, path))
-      # Relative path is not ok.
-      with self.assertRaises(AssertionError):
-        method(pkg, path)
-
-  def test_added_files_are_under_root(self):
-    # Add some files to temp_dir.
-    self.stage_files([
-      'a.txt',
-      'p.py',
-      'pkg/1.txt',
-      'some_dir/2.txt',
-    ])
-
-    # Adding using |archive_path| should work.
-    pkg = zip_package.ZipPackage(os.path.join(self.temp_dir, 'pkg'))
-    pkg.add_file(os.path.join(self.temp_dir, 'a.txt'), '_a.txt')
-    pkg.add_python_file(os.path.join(self.temp_dir, 'p.py'), '_p.py')
-    pkg.add_directory(os.path.join(self.temp_dir, 'pkg'), '_pkg')
-
-    # Adding without |archive_path| should fail.
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_file(os.path.join(self.temp_dir, 'a.txt'))
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_python_file(os.path.join(self.temp_dir, 'p.py'))
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_directory(os.path.join(self.temp_dir, 'a.txt'))
-
-  def test_adding_missing_files(self):
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_file(os.path.join(self.temp_dir, 'im_not_here'))
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_python_file(os.path.join(self.temp_dir, 'im_not_here.py'))
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_directory(os.path.join(self.temp_dir, 'im_not_here_dir'))
-
-  def test_adding_dir_as_file(self):
-    # Create 'dir'.
-    self.stage_files(['dir/keep'])
-    # Try to add it as file, not a directory.
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_file(os.path.join(self.temp_dir, 'dir'))
-    # Adding as directory works.
-    pkg.add_directory(os.path.join(self.temp_dir, 'dir'))
-
-  def test_adding_non_python_as_python(self):
-    self.stage_files(['file.sh'])
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_python_file(os.path.join(self.temp_dir, 'file.sh'))
-
-  def test_adding_py_instead_of_pyc(self):
-    self.stage_files([
-      'file.py',
-      'file.pyo',
-      'file.pyc',
-    ])
-    for alternative in ('file.pyc', 'file.pyo'):
-      pkg = zip_package.ZipPackage(self.temp_dir)
-      pkg.add_python_file(os.path.join(self.temp_dir, alternative))
-      self.assertIn('file.py', pkg.files)
-
-  def test_adding_same_file_twice(self):
-    self.stage_files(['file'])
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_file(os.path.join(self.temp_dir, 'file'))
-    with self.assertRaises(zip_package.ZipPackageError):
-      pkg.add_file(os.path.join(self.temp_dir, 'file'))
-
-  def test_add_directory(self):
-    should_add = [
-      'script.py',
-      'a/1.txt',
-      'a/2.txt',
-      'a/b/3.txt',
-      'a/script.py',
-    ]
-    should_ignore = [
-      'script.pyc',
-      'a/script.pyo',
-      '.git/stuff',
-      '.svn/stuff',
-      'a/.svn/stuff',
-      'a/b/.svn/stuff',
-    ]
-    # Add a whole set and verify only files from |should_add| were added.
-    self.stage_files(should_add + should_ignore)
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_directory(self.temp_dir)
-    self.assertEqual(set(pkg.files), set(should_add))
-
-  def test_archive_path_is_respected(self):
-    self.stage_files(['a', 'b.py', 'dir/c'])
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_file(os.path.join(self.temp_dir, 'a'), 'd1/a')
-    pkg.add_python_file(os.path.join(self.temp_dir, 'b.py'), 'd2/b.py')
-    pkg.add_directory(os.path.join(self.temp_dir, 'dir'), 'd3')
-    self.assertEqual(set(pkg.files), set(['d1/a', 'd2/b.py', 'd3/c']))
-
-  def test_add_buffer(self):
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_buffer('buf1', '')
-    self.assertEqual(pkg.files, ['buf1'])
-    # No unicode.
-    with self.assertRaises(AssertionError):
-      pkg.add_buffer('buf2', u'unicode')
-
-  def test_zipping(self):
-    data = {'a': '123', 'b/c': '456'}
-    self.stage_files(data)
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_directory(self.temp_dir)
-    # Test zip_into_buffer produces readable zip with same content.
-    for compress in (True, False):
-      buf = pkg.zip_into_buffer(compress=compress)
-      self.assertEqual(data, self.read_zip(StringIO.StringIO(buf)))
-    # Test zip_into_file produces readable zip with same content.
-    for compress in (True, False):
-      path = os.path.join(self.temp_dir, 'pkg.zip')
-      pkg.zip_into_file(path, compress=compress)
-      with open(path, 'rb') as f:
-        self.assertEqual(data, self.read_zip(f))
-
-  def test_repeatable_content(self):
-    content = []
-    for _ in range(2):
-      # Build temp dir content from scratch.
-      assert not os.listdir(self.temp_dir)
-      self.stage_files({'a': '123', 'b': '456', 'c': '789'})
-      # Zip it.
-      pkg = zip_package.ZipPackage(self.temp_dir)
-      pkg.add_directory(self.temp_dir)
-      content.append(pkg.zip_into_buffer())
-      # Clear everything.
-      for name in os.listdir(self.temp_dir):
-        os.remove(os.path.join(self.temp_dir, name))
-    # Contents of both runs should match exactly.
-    self.assertEqual(content[0], content[1])
-
-  def test_running_from_zip(self):
-    # Test assumes that it runs from a normal checkout, not a zip.
-    self.assertFalse(zip_package.is_zipped_module(sys.modules[__name__]))
-    self.assertIsNone(zip_package.get_module_zip_archive(sys.modules[__name__]))
-    self.assertTrue(os.path.abspath(
-        zip_package.get_main_script_path()).startswith(ROOT_DIR))
-
-    # Build executable zip that calls same functions.
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_directory(os.path.join(ROOT_DIR, 'utils'), 'utils')
-    pkg.add_buffer('__main__.py', '\n'.join([
-      'import sys',
-      '',
-      'from utils import zip_package',
-      '',
-      'print zip_package.is_zipped_module(sys.modules[__name__])',
-      'print zip_package.get_module_zip_archive(sys.modules[__name__])',
-      'print zip_package.get_main_script_path()',
-    ]))
-    zip_file = os.path.join(self.temp_dir, 'out.zip')
-    pkg.zip_into_file(zip_file)
-
-    # Run the zip, validate results.
-    actual = check_output([sys.executable, zip_file]).strip().splitlines()
-    self.assertEqual(['True', zip_file, zip_file], actual)
-
-  def test_extract_resource(self):
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_directory(os.path.join(ROOT_DIR, 'utils'), 'utils')
-    pkg.add_buffer('cert.pem', 'Certificate\n')
-    pkg.add_buffer('__main__.py', '\n'.join([
-      'import sys',
-      'from utils import zip_package',
-      'print zip_package.extract_resource(sys.modules[__name__], \'cert.pem\')',
-    ]))
-    zip_file = os.path.join(self.temp_dir, 'out.zip')
-    pkg.zip_into_file(zip_file)
-    actual = check_output([sys.executable, zip_file]).strip()
-    self.assertEqual(tempfile.gettempdir(), os.path.dirname(actual))
-    basename = os.path.basename(actual)
-    self.assertTrue(basename.startswith('.zip_pkg-'), actual)
-    self.assertTrue(basename.endswith('-cert.pem'), actual)
-
-  def test_extract_resource_temp_dir(self):
-    pkg = zip_package.ZipPackage(self.temp_dir)
-    pkg.add_directory(os.path.join(ROOT_DIR, 'utils'), 'utils')
-    pkg.add_buffer('cert.pem', 'Certificate\n')
-    pkg.add_buffer('__main__.py', '\n'.join([
-      'import sys',
-      'from utils import zip_package',
-      'print zip_package.extract_resource(',
-      '  sys.modules[__name__], \'cert.pem\', %r)' % self.temp_dir,
-    ]))
-    zip_file = os.path.join(self.temp_dir, 'out.zip')
-    pkg.zip_into_file(zip_file)
-    actual = check_output([sys.executable, zip_file]).strip()
-    expected = os.path.join(
-        self.temp_dir,
-        '321690737f78d081937f88c3fd0e625dd48ae07d-cert.pem')
-    self.assertEqual(expected, actual)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  VERBOSE = '-v' in sys.argv
-  logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR)
-  unittest.main()
diff --git a/tools/swarming_client/third_party/README.txt b/tools/swarming_client/third_party/README.txt
deleted file mode 100644
index c8b1bb3..0000000
--- a/tools/swarming_client/third_party/README.txt
+++ /dev/null
@@ -1,23 +0,0 @@
-Packages in this directory are installed / copied from other
-repositories. When vendoring a package to be compatible with
-infra.git, please use glyco tool:
-
-  https://chromium.googlesource.com/infra/infra/+/HEAD/glyco/README.md
-
-For example, installing google-api-python-client:
-
-Look up the revision used in
-  https://chromium.googlesource.com/infra/infra/+/master/bootstrap/deps.pyl
-At the time of writing, it's d83246e69b22f084d1ae92da5897572a4a4eb03d.
-
-cd <scratch dir>
-git clone https://chromium.googlesource.com/external/github.com/google/google-api-python-client
-cd google-api-python-client
-git checkout d83246e69b22f084d1ae92da5897572a4a4eb03d
-glyco pack . -o <wheelhouse dir>
-
-cd <luci-py checkout>/client/third_party
-glyco install -i . <wheelhouse dir>/google_api_python_client-1.4.2-*
-
-# Local modification - document it in README.swarming
-rm -rf apiclient
diff --git a/tools/swarming_client/third_party/__init__.py b/tools/swarming_client/third_party/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/third_party/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/third_party/chromium/LICENSE b/tools/swarming_client/third_party/chromium/LICENSE
deleted file mode 100644
index 972bb2e..0000000
--- a/tools/swarming_client/third_party/chromium/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//    * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//    * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//    * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/tools/swarming_client/third_party/chromium/README.swarming b/tools/swarming_client/third_party/chromium/README.swarming
deleted file mode 100644
index e40b4f3..0000000
--- a/tools/swarming_client/third_party/chromium/README.swarming
+++ /dev/null
@@ -1,11 +0,0 @@
-Name: natsort.py
-URL: Was part of the Chromium Commit Queue code that was deleted on 2014-02-05.
-Author: Marc-Antoine Ruel
-Revision: N/A
-License: BSD
-
-Description:
-Native (human) sorting.
-
-Local Modifications:
-None.
diff --git a/tools/swarming_client/third_party/chromium/__init__.py b/tools/swarming_client/third_party/chromium/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/third_party/chromium/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/third_party/chromium/natsort.py b/tools/swarming_client/third_party/chromium/natsort.py
deleted file mode 100644
index 76e90da..0000000
--- a/tools/swarming_client/third_party/chromium/natsort.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Intelligent natural sort implementation."""
-
-import re
-
-
-def natcmp(a, b):
-  """Natural string comparison, case sensitive."""
-  try_int = lambda s: int(s) if s.isdigit() else s
-  def natsort_key(s):
-    if not isinstance(s, basestring):
-      # Since re.findall() generates a list out of a string, returns a list here
-      # to balance the comparison done in cmp().
-      return [s]
-    return map(try_int, re.findall(r'(\d+|\D+)', s))
-  return cmp(natsort_key(a), natsort_key(b))
-
-
-def try_lower(x):
-  """Opportunistically lower() a string if it is a string."""
-  return x.lower() if hasattr(x, 'lower') else x
-
-
-def naticasecmp(a, b):
-  """Natural string comparison, ignores case."""
-  return natcmp(try_lower(a), try_lower(b))
-
-
-def natsort(seq, cmp=natcmp, *args, **kwargs):  # pylint: disable=W0622
-  """In-place natural string sort.
-  >>> a = ['3A2', '3a1']
-  >>> natsort(a, key=try_lower)
-  >>> a
-  ['3a1', '3A2']
-  >>> a = ['3a2', '3A1']
-  >>> natsort(a, key=try_lower)
-  >>> a
-  ['3A1', '3a2']
-  >>> a = ['3A2', '3a1']
-  >>> natsort(a, cmp=naticasecmp)
-  >>> a
-  ['3a1', '3A2']
-  >>> a = ['3a2', '3A1']
-  >>> natsort(a, cmp=naticasecmp)
-  >>> a
-  ['3A1', '3a2']
-  """
-  seq.sort(cmp=cmp, *args, **kwargs)
-
-
-def natsorted(seq, cmp=natcmp, *args, **kwargs):  # pylint: disable=W0622
-  """Returns a copy of seq, sorted by natural string sort.
-
-  >>> natsorted(i for i in [4, '3a', '2', 1])
-  [1, '2', '3a', 4]
-  >>> natsorted(['a4', 'a30'])
-  ['a4', 'a30']
-  >>> natsorted(['3A2', '3a1'], key=try_lower)
-  ['3a1', '3A2']
-  >>> natsorted(['3a2', '3A1'], key=try_lower)
-  ['3A1', '3a2']
-  >>> natsorted(['3A2', '3a1'], cmp=naticasecmp)
-  ['3a1', '3A2']
-  >>> natsorted(['3a2', '3A1'], cmp=naticasecmp)
-  ['3A1', '3a2']
-  >>> natsorted(['3A2', '3a1'])
-  ['3A2', '3a1']
-  >>> natsorted(['3a2', '3A1'])
-  ['3A1', '3a2']
-  """
-  return sorted(seq, cmp=cmp, *args, **kwargs)
diff --git a/tools/swarming_client/third_party/chromium/natsort_test.py b/tools/swarming_client/third_party/chromium/natsort_test.py
deleted file mode 100755
index 6d35ae2..0000000
--- a/tools/swarming_client/third_party/chromium/natsort_test.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for nasort.py."""
-
-import doctest
-import os
-import sys
-
-import natsort
-
-
-if __name__ == '__main__':
-  doctest.testmod(natsort)
diff --git a/tools/swarming_client/third_party/colorama/LICENSE.txt b/tools/swarming_client/third_party/colorama/LICENSE.txt
deleted file mode 100644
index 5f56779..0000000
--- a/tools/swarming_client/third_party/colorama/LICENSE.txt
+++ /dev/null
@@ -1,28 +0,0 @@
-Copyright (c) 2010 Jonathan Hartley
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-* Redistributions of source code must retain the above copyright notice, this
-  list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright notice,
-  this list of conditions and the following disclaimer in the documentation
-  and/or other materials provided with the distribution.
-
-* Neither the name of the copyright holders, nor those of its contributors
-  may be used to endorse or promote products derived from this software without
-  specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
diff --git a/tools/swarming_client/third_party/colorama/README.swarming b/tools/swarming_client/third_party/colorama/README.swarming
deleted file mode 100644
index 2605ac9..0000000
--- a/tools/swarming_client/third_party/colorama/README.swarming
+++ /dev/null
@@ -1,13 +0,0 @@
-Name: colorama
-URL: http://code.google.com/p/colorama
-Version: 5a3100113a3a (0.2.7)
-Revision: 5a3100113a3a
-
-Description:
-Provides a simple cross-platform API to print colored terminal text from Python
-applications.
-
-Additional changes:
-- Kept colorama/ but removed colorama/tests/.
-- Copied LICENSE.txt and README.txt.
-- Converted all the files to LF EOL style.
diff --git a/tools/swarming_client/third_party/colorama/README.txt b/tools/swarming_client/third_party/colorama/README.txt
deleted file mode 100644
index 8910ba5..0000000
--- a/tools/swarming_client/third_party/colorama/README.txt
+++ /dev/null
@@ -1,304 +0,0 @@
-Download and docs:
-    http://pypi.python.org/pypi/colorama
-Development:
-    http://code.google.com/p/colorama
-Discussion group:
-     https://groups.google.com/forum/#!forum/python-colorama
-
-Description
-===========
-
-Makes ANSI escape character sequences for producing colored terminal text and
-cursor positioning work under MS Windows.
-
-ANSI escape character sequences have long been used to produce colored terminal
-text and cursor positioning on Unix and Macs. Colorama makes this work on
-Windows, too, by wrapping stdout, stripping ANSI sequences it finds (which
-otherwise show up as gobbledygook in your output), and converting them into the
-appropriate win32 calls to modify the state of the terminal. On other platforms,
-Colorama does nothing.
-
-Colorama also provides some shortcuts to help generate ANSI sequences
-but works fine in conjunction with any other ANSI sequence generation library,
-such as Termcolor (http://pypi.python.org/pypi/termcolor.)
-
-This has the upshot of providing a simple cross-platform API for printing
-colored terminal text from Python, and has the happy side-effect that existing
-applications or libraries which use ANSI sequences to produce colored output on
-Linux or Macs can now also work on Windows, simply by calling
-``colorama.init()``.
-
-An alternative approach is to install 'ansi.sys' on Windows machines, which
-provides the same behaviour for all applications running in terminals. Colorama
-is intended for situations where that isn't easy (e.g. maybe your app doesn't
-have an installer.)
-
-Demo scripts in the source code repository prints some colored text using
-ANSI sequences. Compare their output under Gnome-terminal's built in ANSI
-handling, versus on Windows Command-Prompt using Colorama:
-
-.. image:: http://colorama.googlecode.com/hg/screenshots/ubuntu-demo.png
-    :width: 661
-    :height: 357
-    :alt: ANSI sequences on Ubuntu under gnome-terminal.
-
-.. image:: http://colorama.googlecode.com/hg/screenshots/windows-demo.png
-    :width: 668
-    :height: 325
-    :alt: Same ANSI sequences on Windows, using Colorama.
-
-These screengrabs show that Colorama on Windows does not support ANSI 'dim
-text': it looks the same as 'normal text'.
-
-
-License
-=======
-
-Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-
-
-Dependencies
-============
-
-None, other than Python. Tested on Python 2.5.5, 2.6.5, 2.7, 3.1.2, and 3.2
-
-Usage
-=====
-
-Initialisation
---------------
-
-Applications should initialise Colorama using::
-
-    from colorama import init
-    init()
-
-If you are on Windows, the call to ``init()`` will start filtering ANSI escape
-sequences out of any text sent to stdout or stderr, and will replace them with
-equivalent Win32 calls.
-
-Calling ``init()`` has no effect on other platforms (unless you request other
-optional functionality, see keyword args below.) The intention is that
-applications can call ``init()`` unconditionally on all platforms, after which
-ANSI output should just work.
-
-To stop using colorama before your program exits, simply call ``deinit()``.
-This will restore stdout and stderr to their original values, so that Colorama
-is disabled. To start using Colorama again, call ``reinit()``, which wraps
-stdout and stderr again, but is cheaper to call than doing ``init()`` all over
-again.
-
-
-Colored Output
---------------
-
-Cross-platform printing of colored text can then be done using Colorama's
-constant shorthand for ANSI escape sequences::
-
-    from colorama import Fore, Back, Style
-    print(Fore.RED + 'some red text')
-    print(Back.GREEN + 'and with a green background')
-    print(Style.DIM + 'and in dim text')
-    print(Fore.RESET + Back.RESET + Style.RESET_ALL)
-    print('back to normal now')
-
-or simply by manually printing ANSI sequences from your own code::
-
-    print('/033[31m' + 'some red text')
-    print('/033[30m' # and reset to default color)
-
-or Colorama can be used happily in conjunction with existing ANSI libraries
-such as Termcolor::
-
-    from colorama import init
-    from termcolor import colored
-
-    # use Colorama to make Termcolor work on Windows too
-    init()
-
-    # then use Termcolor for all colored text output
-    print(colored('Hello, World!', 'green', 'on_red'))
-
-Available formatting constants are::
-
-    Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
-    Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET.
-    Style: DIM, NORMAL, BRIGHT, RESET_ALL
-
-Style.RESET_ALL resets foreground, background and brightness. Colorama will
-perform this reset automatically on program exit.
-
-
-Cursor Positioning
-------------------
-
-ANSI codes to reposition the cursor are supported. See demos/demo06.py for
-an example of how to generate them.
-
-
-Init Keyword Args
------------------
-
-``init()`` accepts some kwargs to override default behaviour.
-
-init(autoreset=False):
-    If you find yourself repeatedly sending reset sequences to turn off color
-    changes at the end of every print, then ``init(autoreset=True)`` will
-    automate that::
-
-        from colorama import init
-        init(autoreset=True)
-        print(Fore.RED + 'some red text')
-        print('automatically back to default color again')
-
-init(strip=None):
-    Pass ``True`` or ``False`` to override whether ansi codes should be
-    stripped from the output. The default behaviour is to strip if on Windows.
-
-init(convert=None):
-    Pass ``True`` or ``False`` to override whether to convert ansi codes in the
-    output into win32 calls. The default behaviour is to convert if on Windows
-    and output is to a tty (terminal).
-
-init(wrap=True):
-    On Windows, colorama works by replacing ``sys.stdout`` and ``sys.stderr``
-    with proxy objects, which override the .write() method to do their work. If
-    this wrapping causes you problems, then this can be disabled by passing
-    ``init(wrap=False)``. The default behaviour is to wrap if autoreset or
-    strip or convert are True.
-
-    When wrapping is disabled, colored printing on non-Windows platforms will
-    continue to work as normal. To do cross-platform colored output, you can
-    use Colorama's ``AnsiToWin32`` proxy directly::
-
-        import sys
-        from colorama import init, AnsiToWin32
-        init(wrap=False)
-        stream = AnsiToWin32(sys.stderr).stream
-
-        # Python 2
-        print >>stream, Fore.BLUE + 'blue text on stderr'
-
-        # Python 3
-        print(Fore.BLUE + 'blue text on stderr', file=stream)
-
-
-Status & Known Problems
-=======================
-
-I've personally only tested it on WinXP (CMD, Console2), Ubuntu
-(gnome-terminal, xterm), and OSX.
-
-Some presumably valid ANSI sequences aren't recognised (see details below)
-but to my knowledge nobody has yet complained about this. Puzzling.
-
-See outstanding issues and wishlist at:
-http://code.google.com/p/colorama/issues/list
-
-If anything doesn't work for you, or doesn't do what you expected or hoped for,
-I'd love to hear about it on that issues list, would be delighted by patches,
-and would be happy to grant commit access to anyone who submits a working patch
-or two.
-
-
-Recognised ANSI Sequences
-=========================
-
-ANSI sequences generally take the form:
-
-    ESC [ <param> ; <param> ... <command>
-
-Where <param> is an integer, and <command> is a single letter. Zero or more
-params are passed to a <command>. If no params are passed, it is generally
-synonymous with passing a single zero. No spaces exist in the sequence, they
-have just been inserted here to make it easy to read.
-
-The only ANSI sequences that colorama converts into win32 calls are::
-
-    ESC [ 0 m       # reset all (colors and brightness)
-    ESC [ 1 m       # bright
-    ESC [ 2 m       # dim (looks same as normal brightness)
-    ESC [ 22 m      # normal brightness
-
-    # FOREGROUND:
-    ESC [ 30 m      # black
-    ESC [ 31 m      # red
-    ESC [ 32 m      # green
-    ESC [ 33 m      # yellow
-    ESC [ 34 m      # blue
-    ESC [ 35 m      # magenta
-    ESC [ 36 m      # cyan
-    ESC [ 37 m      # white
-    ESC [ 39 m      # reset
-
-    # BACKGROUND
-    ESC [ 40 m      # black
-    ESC [ 41 m      # red
-    ESC [ 42 m      # green
-    ESC [ 43 m      # yellow
-    ESC [ 44 m      # blue
-    ESC [ 45 m      # magenta
-    ESC [ 46 m      # cyan
-    ESC [ 47 m      # white
-    ESC [ 49 m      # reset
-
-    # cursor positioning
-    ESC [ y;x H     # position cursor at x across, y down
-
-    # clear the screen
-    ESC [ mode J    # clear the screen. Only mode 2 (clear entire screen)
-                    # is supported. It should be easy to add other modes,
-                    # let me know if that would be useful.
-
-Multiple numeric params to the 'm' command can be combined into a single
-sequence, eg::
-
-    ESC [ 36 ; 45 ; 1 m     # bright cyan text on magenta background
-
-All other ANSI sequences of the form ``ESC [ <param> ; <param> ... <command>``
-are silently stripped from the output on Windows.
-
-Any other form of ANSI sequence, such as single-character codes or alternative
-initial characters, are not recognised nor stripped. It would be cool to add
-them though. Let me know if it would be useful for you, via the issues on
-google code.
-
-
-Development
-===========
-
-Help and fixes welcome! Ask Jonathan for commit rights, you'll get them.
-
-Running tests requires:
-
-- Michael Foord's 'mock' module to be installed.
-- Tests are written using the 2010 era updates to 'unittest', and require to
-  be run either using Python2.7 or greater, or else to have Michael Foord's
-  'unittest2' module installed.
-
-unittest2 test discovery doesn't work for colorama, so I use 'nose'::
-
-    nosetests -s
-
-The -s is required because 'nosetests' otherwise applies a proxy of its own to
-stdout, which confuses the unit tests.
-
-
-Contact
-=======
-
-Created by Jonathan Hartley, tartley@tartley.com
-
-
-Thanks
-======
-| Ben Hoyt, for a magnificent fix under 64-bit Windows.
-| Jesse@EmptySquare for submitting a fix for examples in the README.
-| User 'jamessp', an observant documentation fix for cursor positioning.
-| User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7 fix.
-| Julien Stuyck, for wisely suggesting Python3 compatible updates to README.
-| Daniel Griffith for multiple fabulous patches.
-| Oscar Lesta for valuable fix to stop ANSI chars being sent to non-tty output.
-| Roger Binns, for many suggestions, valuable feedback, & bug reports.
-| Tim Golden for thought and much appreciated feedback on the initial idea.
-
diff --git a/tools/swarming_client/third_party/colorama/__init__.py b/tools/swarming_client/third_party/colorama/__init__.py
deleted file mode 100644
index 2d127fa..0000000
--- a/tools/swarming_client/third_party/colorama/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-from .initialise import init, deinit, reinit
-from .ansi import Fore, Back, Style
-from .ansitowin32 import AnsiToWin32
-
-VERSION = '0.2.7'
-
diff --git a/tools/swarming_client/third_party/colorama/ansi.py b/tools/swarming_client/third_party/colorama/ansi.py
deleted file mode 100644
index 5dfe374..0000000
--- a/tools/swarming_client/third_party/colorama/ansi.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-'''
-This module generates ANSI character codes to printing colors to terminals.
-See: http://en.wikipedia.org/wiki/ANSI_escape_code
-'''
-
-CSI = '\033['
-
-def code_to_chars(code):
-    return CSI + str(code) + 'm'
-
-class AnsiCodes(object):
-    def __init__(self, codes):
-        for name in dir(codes):
-            if not name.startswith('_'):
-                value = getattr(codes, name)
-                setattr(self, name, code_to_chars(value))
-
-class AnsiFore:
-    BLACK   = 30
-    RED     = 31
-    GREEN   = 32
-    YELLOW  = 33
-    BLUE    = 34
-    MAGENTA = 35
-    CYAN    = 36
-    WHITE   = 37
-    RESET   = 39
-
-class AnsiBack:
-    BLACK   = 40
-    RED     = 41
-    GREEN   = 42
-    YELLOW  = 43
-    BLUE    = 44
-    MAGENTA = 45
-    CYAN    = 46
-    WHITE   = 47
-    RESET   = 49
-
-class AnsiStyle:
-    BRIGHT    = 1
-    DIM       = 2
-    NORMAL    = 22
-    RESET_ALL = 0
-
-Fore = AnsiCodes( AnsiFore )
-Back = AnsiCodes( AnsiBack )
-Style = AnsiCodes( AnsiStyle )
-
diff --git a/tools/swarming_client/third_party/colorama/ansitowin32.py b/tools/swarming_client/third_party/colorama/ansitowin32.py
deleted file mode 100644
index ea0a6c1..0000000
--- a/tools/swarming_client/third_party/colorama/ansitowin32.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-import re
-import sys
-
-from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
-from .winterm import WinTerm, WinColor, WinStyle
-from .win32 import windll
-
-
-if windll is not None:
-    winterm = WinTerm()
-
-
-def is_a_tty(stream):
-    return hasattr(stream, 'isatty') and stream.isatty()
-
-
-class StreamWrapper(object):
-    '''
-    Wraps a stream (such as stdout), acting as a transparent proxy for all
-    attribute access apart from method 'write()', which is delegated to our
-    Converter instance.
-    '''
-    def __init__(self, wrapped, converter):
-        # double-underscore everything to prevent clashes with names of
-        # attributes on the wrapped stream object.
-        self.__wrapped = wrapped
-        self.__convertor = converter
-
-    def __getattr__(self, name):
-        return getattr(self.__wrapped, name)
-
-    def write(self, text):
-        self.__convertor.write(text)
-
-
-class AnsiToWin32(object):
-    '''
-    Implements a 'write()' method which, on Windows, will strip ANSI character
-    sequences from the text, and if outputting to a tty, will convert them into
-    win32 function calls.
-    '''
-    ANSI_RE = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
-
-    def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
-        # The wrapped stream (normally sys.stdout or sys.stderr)
-        self.wrapped = wrapped
-
-        # should we reset colors to defaults after every .write()
-        self.autoreset = autoreset
-
-        # create the proxy wrapping our output stream
-        self.stream = StreamWrapper(wrapped, self)
-
-        on_windows = sys.platform.startswith('win')
-
-        # should we strip ANSI sequences from our output?
-        if strip is None:
-            strip = on_windows
-        self.strip = strip
-
-        # should we should convert ANSI sequences into win32 calls?
-        if convert is None:
-            convert = on_windows and is_a_tty(wrapped)
-        self.convert = convert
-
-        # dict of ansi codes to win32 functions and parameters
-        self.win32_calls = self.get_win32_calls()
-
-        # are we wrapping stderr?
-        self.on_stderr = self.wrapped is sys.stderr
-
-
-    def should_wrap(self):
-        '''
-        True if this class is actually needed. If false, then the output
-        stream will not be affected, nor will win32 calls be issued, so
-        wrapping stdout is not actually required. This will generally be
-        False on non-Windows platforms, unless optional functionality like
-        autoreset has been requested using kwargs to init()
-        '''
-        return self.convert or self.strip or self.autoreset
-
-
-    def get_win32_calls(self):
-        if self.convert and winterm:
-            return {
-                AnsiStyle.RESET_ALL: (winterm.reset_all, ),
-                AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
-                AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
-                AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
-                AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
-                AnsiFore.RED: (winterm.fore, WinColor.RED),
-                AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
-                AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
-                AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
-                AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
-                AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
-                AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
-                AnsiFore.RESET: (winterm.fore, ),
-                AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
-                AnsiBack.RED: (winterm.back, WinColor.RED),
-                AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
-                AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
-                AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
-                AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
-                AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
-                AnsiBack.WHITE: (winterm.back, WinColor.GREY),
-                AnsiBack.RESET: (winterm.back, ),
-            }
-
-
-    def write(self, text):
-        if self.strip or self.convert:
-            self.write_and_convert(text)
-        else:
-            self.wrapped.write(text)
-            self.wrapped.flush()
-        if self.autoreset:
-            self.reset_all()
-
-
-    def reset_all(self):
-        if self.convert:
-            self.call_win32('m', (0,))
-        elif is_a_tty(self.wrapped):
-            self.wrapped.write(Style.RESET_ALL)
-
-
-    def write_and_convert(self, text):
-        '''
-        Write the given text to our wrapped stream, stripping any ANSI
-        sequences from the text, and optionally converting them into win32
-        calls.
-        '''
-        cursor = 0
-        for match in self.ANSI_RE.finditer(text):
-            start, end = match.span()
-            self.write_plain_text(text, cursor, start)
-            self.convert_ansi(*match.groups())
-            cursor = end
-        self.write_plain_text(text, cursor, len(text))
-
-
-    def write_plain_text(self, text, start, end):
-        if start < end:
-            self.wrapped.write(text[start:end])
-            self.wrapped.flush()
-
-
-    def convert_ansi(self, paramstring, command):
-        if self.convert:
-            params = self.extract_params(paramstring)
-            self.call_win32(command, params)
-
-
-    def extract_params(self, paramstring):
-        def split(paramstring):
-            for p in paramstring.split(';'):
-                if p != '':
-                    yield int(p)
-        return tuple(split(paramstring))
-
-
-    def call_win32(self, command, params):
-        if params == []:
-            params = [0]
-        if command == 'm':
-            for param in params:
-                if param in self.win32_calls:
-                    func_args = self.win32_calls[param]
-                    func = func_args[0]
-                    args = func_args[1:]
-                    kwargs = dict(on_stderr=self.on_stderr)
-                    func(*args, **kwargs)
-        elif command in ('H', 'f'): # set cursor position
-            func = winterm.set_cursor_position
-            func(params, on_stderr=self.on_stderr)
-        elif command in ('J'):
-            func = winterm.erase_data
-            func(params, on_stderr=self.on_stderr)
-        elif command == 'A':
-            if params == () or params == None:
-                num_rows = 1
-            else:
-                num_rows = params[0]
-            func = winterm.cursor_up
-            func(num_rows, on_stderr=self.on_stderr)
-
diff --git a/tools/swarming_client/third_party/colorama/initialise.py b/tools/swarming_client/third_party/colorama/initialise.py
deleted file mode 100644
index cba3676..0000000
--- a/tools/swarming_client/third_party/colorama/initialise.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-import atexit
-import sys
-
-from .ansitowin32 import AnsiToWin32
-
-
-orig_stdout = sys.stdout
-orig_stderr = sys.stderr
-
-wrapped_stdout = sys.stdout
-wrapped_stderr = sys.stderr
-
-atexit_done = False
-
-
-def reset_all():
-    AnsiToWin32(orig_stdout).reset_all()
-
-
-def init(autoreset=False, convert=None, strip=None, wrap=True):
-
-    if not wrap and any([autoreset, convert, strip]):
-        raise ValueError('wrap=False conflicts with any other arg=True')
-
-    global wrapped_stdout, wrapped_stderr
-    sys.stdout = wrapped_stdout = \
-        wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
-    sys.stderr = wrapped_stderr = \
-        wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
-
-    global atexit_done
-    if not atexit_done:
-        atexit.register(reset_all)
-        atexit_done = True
-
-
-def deinit():
-    sys.stdout = orig_stdout
-    sys.stderr = orig_stderr
-
-
-def reinit():
-    sys.stdout = wrapped_stdout
-    sys.stderr = wrapped_stdout
-
-
-def wrap_stream(stream, convert, strip, autoreset, wrap):
-    if wrap:
-        wrapper = AnsiToWin32(stream,
-            convert=convert, strip=strip, autoreset=autoreset)
-        if wrapper.should_wrap():
-            stream = wrapper.stream
-    return stream
-
-
diff --git a/tools/swarming_client/third_party/colorama/win32.py b/tools/swarming_client/third_party/colorama/win32.py
deleted file mode 100644
index f4024f9..0000000
--- a/tools/swarming_client/third_party/colorama/win32.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-
-# from winbase.h
-STDOUT = -11
-STDERR = -12
-
-try:
-    from ctypes import windll
-    from ctypes import wintypes
-except ImportError:
-    windll = None
-    SetConsoleTextAttribute = lambda *_: None
-else:
-    from ctypes import (
-        byref, Structure, c_char, c_short, c_uint32, c_ushort, POINTER
-    )
-
-    class CONSOLE_SCREEN_BUFFER_INFO(Structure):
-        """struct in wincon.h."""
-        _fields_ = [
-            ("dwSize", wintypes._COORD),
-            ("dwCursorPosition", wintypes._COORD),
-            ("wAttributes", wintypes.WORD),
-            ("srWindow", wintypes.SMALL_RECT),
-            ("dwMaximumWindowSize", wintypes._COORD),
-        ]
-        def __str__(self):
-            return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
-                self.dwSize.Y, self.dwSize.X
-                , self.dwCursorPosition.Y, self.dwCursorPosition.X
-                , self.wAttributes
-                , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
-                , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
-            )
-
-    _GetStdHandle = windll.kernel32.GetStdHandle
-    _GetStdHandle.argtypes = [
-        wintypes.DWORD,
-    ]
-    _GetStdHandle.restype = wintypes.HANDLE
-
-    _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
-    _GetConsoleScreenBufferInfo.argtypes = [
-        wintypes.HANDLE,
-        POINTER(CONSOLE_SCREEN_BUFFER_INFO),
-    ]
-    _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
-
-    _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
-    _SetConsoleTextAttribute.argtypes = [
-        wintypes.HANDLE,
-        wintypes.WORD,
-    ]
-    _SetConsoleTextAttribute.restype = wintypes.BOOL
-
-    _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
-    _SetConsoleCursorPosition.argtypes = [
-        wintypes.HANDLE,
-        wintypes._COORD,
-    ]
-    _SetConsoleCursorPosition.restype = wintypes.BOOL
-
-    _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
-    _FillConsoleOutputCharacterA.argtypes = [
-        wintypes.HANDLE,
-        c_char,
-        wintypes.DWORD,
-        wintypes._COORD,
-        POINTER(wintypes.DWORD),
-    ]
-    _FillConsoleOutputCharacterA.restype = wintypes.BOOL
-
-    _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
-    _FillConsoleOutputAttribute.argtypes = [
-        wintypes.HANDLE,
-        wintypes.WORD,
-        wintypes.DWORD,
-        wintypes._COORD,
-        POINTER(wintypes.DWORD),
-    ]
-    _FillConsoleOutputAttribute.restype = wintypes.BOOL
-
-    handles = {
-        STDOUT: _GetStdHandle(STDOUT),
-        STDERR: _GetStdHandle(STDERR),
-    }
-
-    def GetConsoleScreenBufferInfo(stream_id=STDOUT):
-        handle = handles[stream_id]
-        csbi = CONSOLE_SCREEN_BUFFER_INFO()
-        success = _GetConsoleScreenBufferInfo(
-            handle, byref(csbi))
-        return csbi
-
-    def SetConsoleTextAttribute(stream_id, attrs):
-        handle = handles[stream_id]
-        return _SetConsoleTextAttribute(handle, attrs)
-
-    def SetConsoleCursorPosition(stream_id, position):
-        position = wintypes._COORD(*position)
-        # If the position is out of range, do nothing.
-        if position.Y <= 0 or position.X <= 0:
-            return
-        # Adjust for Windows' SetConsoleCursorPosition:
-        #    1. being 0-based, while ANSI is 1-based.
-        #    2. expecting (x,y), while ANSI uses (y,x).
-        adjusted_position = wintypes._COORD(position.Y - 1, position.X - 1)
-        # Adjust for viewport's scroll position
-        sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
-        adjusted_position.Y += sr.Top
-        adjusted_position.X += sr.Left
-        # Resume normal processing
-        handle = handles[stream_id]
-        return _SetConsoleCursorPosition(handle, adjusted_position)
-
-    def FillConsoleOutputCharacter(stream_id, char, length, start):
-        handle = handles[stream_id]
-        char = c_char(char)
-        length = wintypes.DWORD(length)
-        num_written = wintypes.DWORD(0)
-        # Note that this is hard-coded for ANSI (vs wide) bytes.
-        success = _FillConsoleOutputCharacterA(
-            handle, char, length, start, byref(num_written))
-        return num_written.value
-
-    def FillConsoleOutputAttribute(stream_id, attr, length, start):
-        ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
-        handle = handles[stream_id]
-        attribute = wintypes.WORD(attr)
-        length = wintypes.DWORD(length)
-        num_written = wintypes.DWORD(0)
-        # Note that this is hard-coded for ANSI (vs wide) bytes.
-        return _FillConsoleOutputAttribute(
-            handle, attribute, length, start, byref(num_written))
diff --git a/tools/swarming_client/third_party/colorama/winterm.py b/tools/swarming_client/third_party/colorama/winterm.py
deleted file mode 100644
index 2708811..0000000
--- a/tools/swarming_client/third_party/colorama/winterm.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-from . import win32
-
-
-# from wincon.h
-class WinColor(object):
-    BLACK   = 0
-    BLUE    = 1
-    GREEN   = 2
-    CYAN    = 3
-    RED     = 4
-    MAGENTA = 5
-    YELLOW  = 6
-    GREY    = 7
-
-# from wincon.h
-class WinStyle(object):
-    NORMAL = 0x00 # dim text, dim background
-    BRIGHT = 0x08 # bright text, dim background
-
-
-class WinTerm(object):
-
-    def __init__(self):
-        self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
-        self.set_attrs(self._default)
-        self._default_fore = self._fore
-        self._default_back = self._back
-        self._default_style = self._style
-
-    def get_attrs(self):
-        return self._fore + self._back * 16 + self._style
-
-    def set_attrs(self, value):
-        self._fore = value & 7
-        self._back = (value >> 4) & 7
-        self._style = value & WinStyle.BRIGHT
-
-    def reset_all(self, on_stderr=None):
-        self.set_attrs(self._default)
-        self.set_console(attrs=self._default)
-
-    def fore(self, fore=None, on_stderr=False):
-        if fore is None:
-            fore = self._default_fore
-        self._fore = fore
-        self.set_console(on_stderr=on_stderr)
-
-    def back(self, back=None, on_stderr=False):
-        if back is None:
-            back = self._default_back
-        self._back = back
-        self.set_console(on_stderr=on_stderr)
-
-    def style(self, style=None, on_stderr=False):
-        if style is None:
-            style = self._default_style
-        self._style = style
-        self.set_console(on_stderr=on_stderr)
-
-    def set_console(self, attrs=None, on_stderr=False):
-        if attrs is None:
-            attrs = self.get_attrs()
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        win32.SetConsoleTextAttribute(handle, attrs)
-
-    def get_position(self, handle):
-        position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
-        # Because Windows coordinates are 0-based,
-        # and win32.SetConsoleCursorPosition expects 1-based.
-        position.X += 1
-        position.Y += 1
-        return position
-    
-    def set_cursor_position(self, position=None, on_stderr=False):
-        if position is None:
-            #I'm not currently tracking the position, so there is no default.
-            #position = self.get_position()
-            return
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        win32.SetConsoleCursorPosition(handle, position)
-
-    def cursor_up(self, num_rows=0, on_stderr=False):
-        if num_rows == 0:
-            return
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        position = self.get_position(handle)
-        adjusted_position = (position.Y - num_rows, position.X)
-        self.set_cursor_position(adjusted_position, on_stderr)
-
-    def erase_data(self, mode=0, on_stderr=False):
-        # 0 (or None) should clear from the cursor to the end of the screen.
-        # 1 should clear from the cursor to the beginning of the screen.
-        # 2 should clear the entire screen. (And maybe move cursor to (1,1)?)
-        #
-        # At the moment, I only support mode 2. From looking at the API, it
-        #    should be possible to calculate a different number of bytes to clear,
-        #    and to do so relative to the cursor position.
-        if mode[0] not in (2,):
-            return
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        # here's where we'll home the cursor
-        coord_screen = win32.COORD(0,0)
-        csbi = win32.GetConsoleScreenBufferInfo(handle)
-        # get the number of character cells in the current buffer
-        dw_con_size = csbi.dwSize.X * csbi.dwSize.Y
-        # fill the entire screen with blanks
-        win32.FillConsoleOutputCharacter(handle, ' ', dw_con_size, coord_screen)
-        # now set the buffer's attributes accordingly
-        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), dw_con_size, coord_screen );
-        # put the cursor at (0, 0)
-        win32.SetConsoleCursorPosition(handle, (coord_screen.X, coord_screen.Y))
diff --git a/tools/swarming_client/third_party/depot_tools/README.swarming b/tools/swarming_client/third_party/depot_tools/README.swarming
deleted file mode 100644
index 98fbc77..0000000
--- a/tools/swarming_client/third_party/depot_tools/README.swarming
+++ /dev/null
@@ -1,11 +0,0 @@
-Name: Chromium depot_tools
-Short Name: depot_tools
-URL: https://chromium.googlesource.com/chromium/tools/depot_tools.git
-Revision: 9f7fd121125278b61c89d159fe35d739aa95a9e0
-License: BSD
-
-Description:
-N/A
-
-Local Modifications:
-Kept only a small subset of the files.
diff --git a/tools/swarming_client/third_party/depot_tools/__init__.py b/tools/swarming_client/third_party/depot_tools/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/third_party/depot_tools/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/third_party/depot_tools/auto_stub.py b/tools/swarming_client/third_party/depot_tools/auto_stub.py
deleted file mode 100644
index c5a3495..0000000
--- a/tools/swarming_client/third_party/depot_tools/auto_stub.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-__version__ = '1.0'
-
-import collections
-import inspect
-import unittest
-
-
-class AutoStubMixIn(object):
-  """Automatically restores stubbed functions on unit test teardDown.
-
-  It's an extremely lightweight mocking class that doesn't require bookeeping.
-  """
-  _saved = None
-
-  def mock(self, obj, member, mock):
-    self._saved = self._saved or collections.OrderedDict()
-    old_value = self._saved.setdefault(
-        obj, collections.OrderedDict()).setdefault(member, getattr(obj, member))
-    setattr(obj, member, mock)
-    return old_value
-
-  def tearDown(self):
-    """Restore all the mocked members."""
-    if self._saved:
-      for obj, items in self._saved.iteritems():
-        for member, previous_value in items.iteritems():
-          setattr(obj, member, previous_value)
-
-
-class SimpleMock(object):
-  """Really simple manual class mock."""
-  def __init__(self, unit_test):
-    """Do not call __init__ if you want to use the global call list to detect
-    ordering across different instances.
-    """
-    self.calls = []
-    self.unit_test = unit_test
-    self.assertEqual = unit_test.assertEqual
-
-  def pop_calls(self):
-    """Returns the list of calls up to date.
-
-    Good to do self.assertEqual(expected, mock.pop_calls()).
-    """
-    calls = self.calls
-    self.calls = []
-    return calls
-
-  def check_calls(self, expected):
-    self.assertEqual(expected, self.pop_calls())
-
-  def _register_call(self, *args, **kwargs):
-    """Registers the name of the caller function."""
-    caller_name = kwargs.pop('caller_name', None) or inspect.stack()[1][3]
-    str_args = ', '.join(repr(arg) for arg in args)
-    str_kwargs = ', '.join('%s=%r' % (k, v) for k, v in kwargs.iteritems())
-    self.calls.append('%s(%s)' % (
-        caller_name, ', '.join(filter(None, [str_args, str_kwargs]))))
-
-
-class TestCase(unittest.TestCase, AutoStubMixIn):
-  """Adds self.mock() and self.has_failed() to a TestCase."""
-  def tearDown(self):
-    AutoStubMixIn.tearDown(self)
-    unittest.TestCase.tearDown(self)
-
-  def has_failed(self):
-    """Returns True if the test has failed."""
-    return not self._resultForDoCleanups.wasSuccessful()
diff --git a/tools/swarming_client/third_party/depot_tools/fix_encoding.py b/tools/swarming_client/third_party/depot_tools/fix_encoding.py
deleted file mode 100644
index 5da9135..0000000
--- a/tools/swarming_client/third_party/depot_tools/fix_encoding.py
+++ /dev/null
@@ -1,371 +0,0 @@
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Collection of functions and classes to fix various encoding problems on
-multiple platforms with python.
-"""
-
-import codecs
-import locale
-import os
-import sys
-
-
-# Prevents initializing multiple times.
-_SYS_ARGV_PROCESSED = False
-
-
-def complain(message):
-  """If any exception occurs in this file, we'll probably try to print it
-  on stderr, which makes for frustrating debugging if stderr is directed
-  to our wrapper. So be paranoid about catching errors and reporting them
-  to sys.__stderr__, so that the user has a higher chance to see them.
-  """
-  print >> sys.__stderr__, (
-      isinstance(message, str) and message or repr(message))
-
-
-def fix_default_encoding():
-  """Forces utf8 solidly on all platforms.
-
-  By default python execution environment is lazy and defaults to ascii
-  encoding.
-
-  http://uucode.com/blog/2007/03/23/shut-up-you-dummy-7-bit-python/
-  """
-  if sys.getdefaultencoding() == 'utf-8':
-    return False
-
-  # Regenerate setdefaultencoding.
-  reload(sys)
-  # Module 'sys' has no 'setdefaultencoding' member
-  # pylint: disable=E1101
-  sys.setdefaultencoding('utf-8')
-  for attr in dir(locale):
-    if attr[0:3] != 'LC_':
-      continue
-    aref = getattr(locale, attr)
-    try:
-      locale.setlocale(aref, '')
-    except locale.Error:
-      continue
-    try:
-      lang = locale.getlocale(aref)[0]
-    except (TypeError, ValueError):
-      continue
-    if lang:
-      try:
-        locale.setlocale(aref, (lang, 'UTF-8'))
-      except locale.Error:
-        os.environ[attr] = lang + '.UTF-8'
-  try:
-    locale.setlocale(locale.LC_ALL, '')
-  except locale.Error:
-    pass
-  return True
-
-
-###############################
-# Windows specific
-
-
-def fix_win_sys_argv(encoding):
-  """Converts sys.argv to 'encoding' encoded string.
-
-  utf-8 is recommended.
-
-  Works around <http://bugs.python.org/issue2128>.
-  """
-  global _SYS_ARGV_PROCESSED
-  if _SYS_ARGV_PROCESSED:
-    return False
-
-  # These types are available on linux but not Mac.
-  # pylint: disable=E0611,F0401
-  from ctypes import byref, c_int, POINTER, windll, WINFUNCTYPE
-  from ctypes.wintypes import LPCWSTR, LPWSTR
-
-  # <http://msdn.microsoft.com/en-us/library/ms683156.aspx>
-  GetCommandLineW = WINFUNCTYPE(LPWSTR)(('GetCommandLineW', windll.kernel32))
-  # <http://msdn.microsoft.com/en-us/library/bb776391.aspx>
-  CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
-      ('CommandLineToArgvW', windll.shell32))
-
-  argc = c_int(0)
-  argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
-  argv = [
-      argv_unicode[i].encode(encoding, 'replace')
-      for i in xrange(0, argc.value)]
-
-  if not hasattr(sys, 'frozen'):
-    # If this is an executable produced by py2exe or bbfreeze, then it
-    # will have been invoked directly. Otherwise, unicode_argv[0] is the
-    # Python interpreter, so skip that.
-    argv = argv[1:]
-
-    # Also skip option arguments to the Python interpreter.
-    while len(argv) > 0:
-      arg = argv[0]
-      if not arg.startswith(u'-') or arg == u'-':
-        break
-      argv = argv[1:]
-      if arg == u'-m':
-        # sys.argv[0] should really be the absolute path of the
-        # module source, but never mind.
-        break
-      if arg == u'-c':
-        argv[0] = u'-c'
-        break
-  sys.argv = argv
-  _SYS_ARGV_PROCESSED = True
-  return True
-
-
-def fix_win_codec():
-  """Works around <http://bugs.python.org/issue6058>."""
-  # <http://msdn.microsoft.com/en-us/library/dd317756.aspx>
-  try:
-    codecs.lookup('cp65001')
-    return False
-  except LookupError:
-    codecs.register(
-        lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
-    return True
-
-
-class WinUnicodeOutputBase(object):
-  """Base class to adapt sys.stdout or sys.stderr to behave correctly on
-  Windows.
-
-  Setting encoding to utf-8 is recommended.
-  """
-  def __init__(self, fileno, name, encoding):
-    # Corresponding file handle.
-    self._fileno = fileno
-    self.encoding = encoding
-    self.name = name
-
-    self.closed = False
-    self.softspace = False
-    self.mode = 'w'
-
-  @staticmethod
-  def isatty():
-    return False
-
-  def close(self):
-    # Don't really close the handle, that would only cause problems.
-    self.closed = True
-
-  def fileno(self):
-    return self._fileno
-
-  def flush(self):
-    raise NotImplementedError()
-
-  def write(self, text):
-    raise NotImplementedError()
-
-  def writelines(self, lines):
-    try:
-      for line in lines:
-        self.write(line)
-    except Exception, e:
-      complain('%s.writelines: %r' % (self.name, e))
-      raise
-
-
-class WinUnicodeConsoleOutput(WinUnicodeOutputBase):
-  """Output adapter to a Windows Console.
-
-  Understands how to use the win32 console API.
-  """
-  def __init__(self, console_handle, fileno, stream_name, encoding):
-    super(WinUnicodeConsoleOutput, self).__init__(
-        fileno, '<Unicode console %s>' % stream_name, encoding)
-    # Handle to use for WriteConsoleW
-    self._console_handle = console_handle
-
-    # Loads the necessary function.
-    # These types are available on linux but not Mac.
-    # pylint: disable=E0611,F0401
-    from ctypes import byref, GetLastError, POINTER, windll, WINFUNCTYPE
-    from ctypes.wintypes import BOOL, DWORD, HANDLE, LPWSTR
-    from ctypes.wintypes import LPVOID  # pylint: disable=E0611
-
-    self._DWORD = DWORD
-    self._byref = byref
-
-    # <http://msdn.microsoft.com/en-us/library/ms687401.aspx>
-    self._WriteConsoleW = WINFUNCTYPE(
-        BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID)(
-            ('WriteConsoleW', windll.kernel32))
-    self._GetLastError = GetLastError
-
-  def flush(self):
-    # No need to flush the console since it's immediate.
-    pass
-
-  def write(self, text):
-    try:
-      if not isinstance(text, unicode):
-        # Convert to unicode.
-        text = str(text).decode(self.encoding, 'replace')
-      remaining = len(text)
-      while remaining > 0:
-        n = self._DWORD(0)
-        # There is a shorter-than-documented limitation on the length of the
-        # string passed to WriteConsoleW. See
-        # <http://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232>.
-        retval = self._WriteConsoleW(
-            self._console_handle, text,
-            min(remaining, 10000),
-            self._byref(n), None)
-        if retval == 0 or n.value == 0:
-          raise IOError(
-              'WriteConsoleW returned %r, n.value = %r, last error = %r' % (
-                retval, n.value, self._GetLastError()))
-        remaining -= n.value
-        if not remaining:
-          break
-        text = text[int(n.value):]
-    except Exception, e:
-      complain('%s.write: %r' % (self.name, e))
-      raise
-
-
-class WinUnicodeOutput(WinUnicodeOutputBase):
-  """Output adaptor to a file output on Windows.
-
-  If the standard FileWrite function is used, it will be encoded in the current
-  code page. WriteConsoleW() permits writting any character.
-  """
-  def __init__(self, stream, fileno, encoding):
-    super(WinUnicodeOutput, self).__init__(
-        fileno, '<Unicode redirected %s>' % stream.name, encoding)
-    # Output stream
-    self._stream = stream
-
-    # Flush right now.
-    self.flush()
-
-  def flush(self):
-    try:
-      self._stream.flush()
-    except Exception, e:
-      complain('%s.flush: %r from %r' % (self.name, e, self._stream))
-      raise
-
-  def write(self, text):
-    try:
-      if isinstance(text, unicode):
-        # Replace characters that cannot be printed instead of failing.
-        text = text.encode(self.encoding, 'replace')
-      self._stream.write(text)
-    except Exception, e:
-      complain('%s.write: %r' % (self.name, e))
-      raise
-
-
-def win_handle_is_a_console(handle):
-  """Returns True if a Windows file handle is a handle to a console."""
-  # These types are available on linux but not Mac.
-  # pylint: disable=E0611,F0401
-  from ctypes import byref, POINTER, windll, WINFUNCTYPE
-  from ctypes.wintypes import BOOL, DWORD, HANDLE
-
-  FILE_TYPE_CHAR   = 0x0002
-  FILE_TYPE_REMOTE = 0x8000
-  INVALID_HANDLE_VALUE = DWORD(-1).value
-
-  # <http://msdn.microsoft.com/en-us/library/ms683167.aspx>
-  GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(
-      ('GetConsoleMode', windll.kernel32))
-  # <http://msdn.microsoft.com/en-us/library/aa364960.aspx>
-  GetFileType = WINFUNCTYPE(DWORD, DWORD)(('GetFileType', windll.kernel32))
-
-  # GetStdHandle returns INVALID_HANDLE_VALUE, NULL, or a valid handle.
-  if handle == INVALID_HANDLE_VALUE or handle is None:
-    return False
-  return (
-      (GetFileType(handle) & ~FILE_TYPE_REMOTE) == FILE_TYPE_CHAR and
-       GetConsoleMode(handle, byref(DWORD())))
-
-
-def win_get_unicode_stream(stream, excepted_fileno, output_handle, encoding):
-  """Returns a unicode-compatible stream.
-
-  This function will return a direct-Console writing object only if:
-  - the file number is the expected console file number
-  - the handle the expected file handle
-  - the 'real' handle is in fact a handle to a console.
-  """
-  old_fileno = getattr(stream, 'fileno', lambda: None)()
-  if old_fileno == excepted_fileno:
-    # These types are available on linux but not Mac.
-    # pylint: disable=E0611,F0401
-    from ctypes import windll, WINFUNCTYPE
-    from ctypes.wintypes import DWORD, HANDLE
-
-    # <http://msdn.microsoft.com/en-us/library/ms683231.aspx>
-    GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(('GetStdHandle', windll.kernel32))
-
-    real_output_handle = GetStdHandle(DWORD(output_handle))
-    if win_handle_is_a_console(real_output_handle):
-      # It's a console.
-      return WinUnicodeConsoleOutput(
-          real_output_handle, old_fileno, stream.name, encoding)
-
-  # It's something else. Create an auto-encoding stream.
-  return WinUnicodeOutput(stream, old_fileno, encoding)
-
-
-def fix_win_console(encoding):
-  """Makes Unicode console output work independently of the current code page.
-
-  This also fixes <http://bugs.python.org/issue1602>.
-  Credit to Michael Kaplan
-  <http://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx> and
-  TZOmegaTZIOY
-  <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
-  """
-  if (isinstance(sys.stdout, WinUnicodeOutputBase) or
-      isinstance(sys.stderr, WinUnicodeOutputBase)):
-    return False
-
-  try:
-    # SetConsoleCP and SetConsoleOutputCP could be used to change the code page
-    # but it's not really useful since the code here is using WriteConsoleW().
-    # Also, changing the code page is 'permanent' to the console and needs to be
-    # reverted manually.
-    # In practice one needs to set the console font to a TTF font to be able to
-    # see all the characters but it failed for me in practice. In any case, it
-    # won't throw any exception when printing, which is the important part.
-    # -11 and -12 are defined in stdio.h
-    sys.stdout = win_get_unicode_stream(sys.stdout, 1, -11, encoding)
-    sys.stderr = win_get_unicode_stream(sys.stderr, 2, -12, encoding)
-    # TODO(maruel): Do sys.stdin with ReadConsoleW(). Albeit the limitation is
-    # "It doesn't appear to be possible to read Unicode characters in UTF-8
-    # mode" and this appears to be a limitation of cmd.exe.
-  except Exception, e:
-    complain('exception %r while fixing up sys.stdout and sys.stderr' % e)
-  return True
-
-
-def fix_encoding():
-  """Fixes various encoding problems on all platforms.
-
-  Should be called at the very begining of the process.
-  """
-  ret = True
-  if sys.platform == 'win32':
-    ret &= fix_win_codec()
-
-  ret &= fix_default_encoding()
-
-  if sys.platform == 'win32':
-    encoding = sys.getdefaultencoding()
-    ret &= fix_win_sys_argv(encoding)
-    ret &= fix_win_console(encoding)
-  return ret
diff --git a/tools/swarming_client/third_party/depot_tools/subcommand.py b/tools/swarming_client/third_party/depot_tools/subcommand.py
deleted file mode 100644
index 0201c90..0000000
--- a/tools/swarming_client/third_party/depot_tools/subcommand.py
+++ /dev/null
@@ -1,260 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Manages subcommands in a script.
-
-Each subcommand should look like this:
-  @usage('[pet name]')
-  def CMDpet(parser, args):
-    '''Prints a pet.
-
-    Many people likes pet. This command prints a pet for your pleasure.
-    '''
-    parser.add_option('--color', help='color of your pet')
-    options, args = parser.parse_args(args)
-    if len(args) != 1:
-      parser.error('A pet name is required')
-    pet = args[0]
-    if options.color:
-      print('Nice %s %d' % (options.color, pet))
-    else:
-      print('Nice %s' % pet)
-    return 0
-
-Explanation:
-  - usage decorator alters the 'usage: %prog' line in the command's help.
-  - docstring is used to both short help line and long help line.
-  - parser can be augmented with arguments.
-  - return the exit code.
-  - Every function in the specified module with a name starting with 'CMD' will
-    be a subcommand.
-  - The module's docstring will be used in the default 'help' page.
-  - If a command has no docstring, it will not be listed in the 'help' page.
-    Useful to keep compatibility commands around or aliases.
-  - If a command is an alias to another one, it won't be documented. E.g.:
-      CMDoldname = CMDnewcmd
-    will result in oldname not being documented but supported and redirecting to
-    newcmd. Make it a real function that calls the old function if you want it
-    to be documented.
-  - CMDfoo_bar will be command 'foo-bar'.
-"""
-
-import difflib
-import sys
-import textwrap
-
-
-def usage(more):
-  """Adds a 'usage_more' property to a CMD function."""
-  def hook(fn):
-    fn.usage_more = more
-    return fn
-  return hook
-
-
-def epilog(text):
-  """Adds an 'epilog' property to a CMD function.
-
-  It will be shown in the epilog. Usually useful for examples.
-  """
-  def hook(fn):
-    fn.epilog = text
-    return fn
-  return hook
-
-
-def CMDhelp(parser, args):
-  """Prints list of commands or help for a specific command."""
-  # This is the default help implementation. It can be disabled or overriden if
-  # wanted.
-  if not any(i in ('-h', '--help') for i in args):
-    args = args + ['--help']
-  _, args = parser.parse_args(args)
-  # Never gets there.
-  assert False
-
-
-def _get_color_module():
-  """Returns the colorama module if available.
-
-  If so, assumes colors are supported and return the module handle.
-  """
-  return sys.modules.get('colorama') or sys.modules.get('third_party.colorama')
-
-
-def _function_to_name(name):
-  """Returns the name of a CMD function."""
-  return name[3:].replace('_', '-')
-
-
-class CommandDispatcher(object):
-  def __init__(self, module):
-    """module is the name of the main python module where to look for commands.
-
-    The python builtin variable __name__ MUST be used for |module|. If the
-    script is executed in the form 'python script.py', __name__ == '__main__'
-    and sys.modules['script'] doesn't exist. On the other hand if it is unit
-    tested, __main__ will be the unit test's module so it has to reference to
-    itself with 'script'. __name__ always match the right value.
-    """
-    self.module = sys.modules[module]
-
-  def enumerate_commands(self):
-    """Returns a dict of command and their handling function.
-
-    The commands must be in the '__main__' modules. To import a command from a
-    submodule, use:
-      from mysubcommand import CMDfoo
-
-    Automatically adds 'help' if not already defined.
-
-    Normalizes '_' in the commands to '-'.
-
-    A command can be effectively disabled by defining a global variable to None,
-    e.g.:
-      CMDhelp = None
-    """
-    cmds = dict(
-        (_function_to_name(name), getattr(self.module, name))
-        for name in dir(self.module) if name.startswith('CMD'))
-    cmds.setdefault('help', CMDhelp)
-    return cmds
-
-  def find_nearest_command(self, name_asked):
-    """Retrieves the function to handle a command as supplied by the user.
-
-    It automatically tries to guess the _intended command_ by handling typos
-    and/or incomplete names.
-    """
-    commands = self.enumerate_commands()
-    if name_asked in commands:
-      return commands[name_asked]
-
-    # An exact match was not found. Try to be smart and look if there's
-    # something similar.
-    commands_with_prefix = [c for c in commands if c.startswith(name_asked)]
-    if len(commands_with_prefix) == 1:
-      return commands[commands_with_prefix[0]]
-
-    # A #closeenough approximation of levenshtein distance.
-    def close_enough(a, b):
-      return difflib.SequenceMatcher(a=a, b=b).ratio()
-
-    hamming_commands = sorted(
-        ((close_enough(c, name_asked), c) for c in commands),
-        reverse=True)
-    if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3:
-      # Too ambiguous.
-      return
-
-    if hamming_commands[0][0] < 0.8:
-      # Not similar enough. Don't be a fool and run a random command.
-      return
-
-    return commands[hamming_commands[0][1]]
-
-  def _gen_commands_list(self):
-    """Generates the short list of supported commands."""
-    commands = self.enumerate_commands()
-    docs = sorted(
-        (cmd_name, self._create_command_summary(cmd_name, handler))
-        for cmd_name, handler in commands.iteritems())
-    # Skip commands without a docstring.
-    docs = [i for i in docs if i[1]]
-    # Then calculate maximum length for alignment:
-    length = max(len(c) for c in commands)
-
-    # Look if color is supported.
-    colors = _get_color_module()
-    green = reset = ''
-    if colors:
-      green = colors.Fore.GREEN
-      reset = colors.Fore.RESET
-    return (
-        'Commands are:\n' +
-        ''.join(
-            '  %s%-*s%s %s\n' % (green, length, cmd_name, reset, doc)
-            for cmd_name, doc in docs))
-
-  def _add_command_usage(self, parser, command):
-    """Modifies an OptionParser object with the function's documentation."""
-    cmd_name = _function_to_name(command.__name__)
-    if cmd_name == 'help':
-      cmd_name = '<command>'
-      # Use the module's docstring as the description for the 'help' command if
-      # available.
-      parser.description = (self.module.__doc__ or '').rstrip()
-      if parser.description:
-        parser.description += '\n\n'
-      parser.description += self._gen_commands_list()
-      # Do not touch epilog.
-    else:
-      # Use the command's docstring if available. For commands, unlike module
-      # docstring, realign.
-      lines = (command.__doc__ or '').rstrip().splitlines()
-      if lines[:1]:
-        rest = textwrap.dedent('\n'.join(lines[1:]))
-        parser.description = '\n'.join((lines[0], rest))
-      else:
-        parser.description = lines[0] if lines else ''
-      if parser.description:
-        parser.description += '\n'
-      parser.epilog = getattr(command, 'epilog', None)
-      if parser.epilog:
-        parser.epilog = '\n' + parser.epilog.strip() + '\n'
-
-    more = getattr(command, 'usage_more', '')
-    extra = '' if not more else ' ' + more
-    parser.set_usage('usage: %%prog %s [options]%s' % (cmd_name, extra))
-
-  @staticmethod
-  def _create_command_summary(cmd_name, command):
-    """Creates a oneliner summary from the command's docstring."""
-    if cmd_name != _function_to_name(command.__name__):
-      # Skip aliases. For example using at module level:
-      # CMDfoo = CMDbar
-      return ''
-    doc = command.__doc__ or ''
-    line = doc.split('\n', 1)[0].rstrip('.')
-    if not line:
-      return line
-    return (line[0].lower() + line[1:]).strip()
-
-  def execute(self, parser, args):
-    """Dispatches execution to the right command.
-
-    Fallbacks to 'help' if not disabled.
-    """
-    # Unconditionally disable format_description() and format_epilog().
-    # Technically, a formatter should be used but it's not worth (yet) the
-    # trouble.
-    parser.format_description = lambda _: parser.description or ''
-    parser.format_epilog = lambda _: parser.epilog or ''
-
-    if args:
-      if args[0] in ('-h', '--help') and len(args) > 1:
-        # Inverse the argument order so 'tool --help cmd' is rewritten to
-        # 'tool cmd --help'.
-        args = [args[1], args[0]] + args[2:]
-      command = self.find_nearest_command(args[0])
-      if command:
-        if command.__name__ == 'CMDhelp' and len(args) > 1:
-          # Inverse the arguments order so 'tool help cmd' is rewritten to
-          # 'tool cmd --help'. Do it here since we want 'tool hel cmd' to work
-          # too.
-          args = [args[1], '--help'] + args[2:]
-          command = self.find_nearest_command(args[0]) or command
-
-        # "fix" the usage and the description now that we know the subcommand.
-        self._add_command_usage(parser, command)
-        return command(parser, args[1:])
-
-    cmdhelp = self.enumerate_commands().get('help')
-    if cmdhelp:
-      # Not a known command. Default to help.
-      self._add_command_usage(parser, cmdhelp)
-      return cmdhelp(parser, args)
-
-    # Nothing can be done.
-    return 2
diff --git a/tools/swarming_client/third_party/google/README.swarming b/tools/swarming_client/third_party/google/README.swarming
deleted file mode 100644
index 1d4613c..0000000
--- a/tools/swarming_client/third_party/google/README.swarming
+++ /dev/null
@@ -1,41 +0,0 @@
-Name: Protocol Buffers
-Short Name: protobuf
-Version: 3.0.0
-Revision: e8ae137c96444ea313485ed1118c5e43b2099cf1
-Home-page: https://github.com/google/protobuf
-License: New BSD License
-
-Description:
-Protocol Buffers are Google's data interchange format.
-
-Local Modifications:
-- Installed using glyco (see ../README.txt for more info)
-  - run 'python setup.py build' before 'glyco pack ...', to build *_pb.py files.
-- Removed protobuf/internal/*_test.py
-- Added README.swarming
-- Modified __init__.py files as follows:
-
-    diff --git a/__init__.py b/__init__.py
-    index e69de29..de40ea7 100644
-    --- a/__init__.py
-    +++ b/__init__.py
-    @@ -0,0 +1 @@
-    +try:
-    +  __import__('pkg_resources').declare_namespace(__name__)
-    +except ImportError:
-    +  __path__ = __import__('pkgutil').extend_path(__path__, __name__)
-
-    diff --git a/protobuf/__init__.py b/protobuf/__init__.py
-    index 6210a40..4a74785 100644
-    --- a/protobuf/__init__.py
-    +++ b/protobuf/__init__.py
-    @@ -31,9 +31,3 @@
-     # Copyright 2007 Google Inc. All Rights Reserved.
-
-     __version__ = '3.0.0'
-    -
-    -if __name__ != '__main__':
-    -  try:
-    -    __import__('pkg_resources').declare_namespace(__name__)
-    -  except ImportError:
-    -    __path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/tools/swarming_client/third_party/google/__init__.py b/tools/swarming_client/third_party/google/__init__.py
deleted file mode 100644
index d2d2a43..0000000
--- a/tools/swarming_client/third_party/google/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-try:
-    __import__('pkg_resources').declare_namespace(__name__)
-except ImportError:
-    __path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/tools/swarming_client/third_party/google/protobuf/__init__.py b/tools/swarming_client/third_party/google/protobuf/__init__.py
deleted file mode 100644
index 4a74785..0000000
--- a/tools/swarming_client/third_party/google/protobuf/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# Copyright 2007 Google Inc. All Rights Reserved.
-
-__version__ = '3.0.0'
diff --git a/tools/swarming_client/third_party/google/protobuf/any_pb2.py b/tools/swarming_client/third_party/google/protobuf/any_pb2.py
deleted file mode 100644
index f219c9d..0000000
--- a/tools/swarming_client/third_party/google/protobuf/any_pb2.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/any.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/any.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42r\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z%github.com/golang/protobuf/ptypes/any\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_ANY = _descriptor.Descriptor(
-  name='Any',
-  full_name='google.protobuf.Any',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='type_url', full_name='google.protobuf.Any.type_url', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.Any.value', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=46,
-  serialized_end=84,
-)
-
-DESCRIPTOR.message_types_by_name['Any'] = _ANY
-
-Any = _reflection.GeneratedProtocolMessageType('Any', (_message.Message,), dict(
-  DESCRIPTOR = _ANY,
-  __module__ = 'google.protobuf.any_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Any)
-  ))
-_sym_db.RegisterMessage(Any)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\010AnyProtoP\001Z%github.com/golang/protobuf/ptypes/any\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/any_test_pb2.py b/tools/swarming_client/third_party/google/protobuf/any_test_pb2.py
deleted file mode 100644
index 64f90e8..0000000
--- a/tools/swarming_client/third_party/google/protobuf/any_test_pb2.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/any_test.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/any_test.proto',
-  package='protobuf_unittest',
-  syntax='proto3',
-  serialized_pb=_b('\n\x1egoogle/protobuf/any_test.proto\x12\x11protobuf_unittest\x1a\x19google/protobuf/any.proto\"y\n\x07TestAny\x12\x13\n\x0bint32_value\x18\x01 \x01(\x05\x12\'\n\tany_value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x30\n\x12repeated_any_value\x18\x03 \x03(\x0b\x32\x14.google.protobuf.Anyb\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_TESTANY = _descriptor.Descriptor(
-  name='TestAny',
-  full_name='protobuf_unittest.TestAny',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='int32_value', full_name='protobuf_unittest.TestAny.int32_value', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='any_value', full_name='protobuf_unittest.TestAny.any_value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_any_value', full_name='protobuf_unittest.TestAny.repeated_any_value', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=80,
-  serialized_end=201,
-)
-
-_TESTANY.fields_by_name['any_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-_TESTANY.fields_by_name['repeated_any_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY
-
-TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), dict(
-  DESCRIPTOR = _TESTANY,
-  __module__ = 'google.protobuf.any_test_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAny)
-  ))
-_sym_db.RegisterMessage(TestAny)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/api_pb2.py b/tools/swarming_client/third_party/google/protobuf/api_pb2.py
deleted file mode 100644
index 5a0bc1b..0000000
--- a/tools/swarming_client/third_party/google/protobuf/api_pb2.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/api.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
-from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/api.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBK\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_source__context__pb2.DESCRIPTOR,google_dot_protobuf_dot_type__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_API = _descriptor.Descriptor(
-  name='Api',
-  full_name='google.protobuf.Api',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.Api.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='methods', full_name='google.protobuf.Api.methods', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.Api.options', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='version', full_name='google.protobuf.Api.version', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='source_context', full_name='google.protobuf.Api.source_context', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='mixins', full_name='google.protobuf.Api.mixins', index=5,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='syntax', full_name='google.protobuf.Api.syntax', index=6,
-      number=7, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=113,
-  serialized_end=370,
-)
-
-
-_METHOD = _descriptor.Descriptor(
-  name='Method',
-  full_name='google.protobuf.Method',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.Method.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='request_type_url', full_name='google.protobuf.Method.request_type_url', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='request_streaming', full_name='google.protobuf.Method.request_streaming', index=2,
-      number=3, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='response_type_url', full_name='google.protobuf.Method.response_type_url', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='response_streaming', full_name='google.protobuf.Method.response_streaming', index=4,
-      number=5, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.Method.options', index=5,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='syntax', full_name='google.protobuf.Method.syntax', index=6,
-      number=7, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=373,
-  serialized_end=586,
-)
-
-
-_MIXIN = _descriptor.Descriptor(
-  name='Mixin',
-  full_name='google.protobuf.Mixin',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.Mixin.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='root', full_name='google.protobuf.Mixin.root', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=588,
-  serialized_end=623,
-)
-
-_API.fields_by_name['methods'].message_type = _METHOD
-_API.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION
-_API.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT
-_API.fields_by_name['mixins'].message_type = _MIXIN
-_API.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX
-_METHOD.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION
-_METHOD.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX
-DESCRIPTOR.message_types_by_name['Api'] = _API
-DESCRIPTOR.message_types_by_name['Method'] = _METHOD
-DESCRIPTOR.message_types_by_name['Mixin'] = _MIXIN
-
-Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), dict(
-  DESCRIPTOR = _API,
-  __module__ = 'google.protobuf.api_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Api)
-  ))
-_sym_db.RegisterMessage(Api)
-
-Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), dict(
-  DESCRIPTOR = _METHOD,
-  __module__ = 'google.protobuf.api_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Method)
-  ))
-_sym_db.RegisterMessage(Method)
-
-Mixin = _reflection.GeneratedProtocolMessageType('Mixin', (_message.Message,), dict(
-  DESCRIPTOR = _MIXIN,
-  __module__ = 'google.protobuf.api_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Mixin)
-  ))
-_sym_db.RegisterMessage(Mixin)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\010ApiProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/compiler/__init__.py b/tools/swarming_client/third_party/google/protobuf/compiler/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/third_party/google/protobuf/compiler/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/third_party/google/protobuf/compiler/plugin_pb2.py b/tools/swarming_client/third_party/google/protobuf/compiler/plugin_pb2.py
deleted file mode 100644
index e01b7a7..0000000
--- a/tools/swarming_client/third_party/google/protobuf/compiler/plugin_pb2.py
+++ /dev/null
@@ -1,188 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/compiler/plugin.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/compiler/plugin.proto',
-  package='google.protobuf.compiler',
-  syntax='proto2',
-  serialized_pb=_b('\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB7\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ\tplugin_go')
-  ,
-  dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_CODEGENERATORREQUEST = _descriptor.Descriptor(
-  name='CodeGeneratorRequest',
-  full_name='google.protobuf.compiler.CodeGeneratorRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0,
-      number=1, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2,
-      number=15, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=101,
-  serialized_end=226,
-)
-
-
-_CODEGENERATORRESPONSE_FILE = _descriptor.Descriptor(
-  name='File',
-  full_name='google.protobuf.compiler.CodeGeneratorResponse.File',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2,
-      number=15, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=337,
-  serialized_end=399,
-)
-
-_CODEGENERATORRESPONSE = _descriptor.Descriptor(
-  name='CodeGeneratorResponse',
-  full_name='google.protobuf.compiler.CodeGeneratorResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=1,
-      number=15, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_CODEGENERATORRESPONSE_FILE, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=229,
-  serialized_end=399,
-)
-
-_CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google_dot_protobuf_dot_descriptor__pb2._FILEDESCRIPTORPROTO
-_CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE
-_CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE
-DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST
-DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE
-
-CodeGeneratorRequest = _reflection.GeneratedProtocolMessageType('CodeGeneratorRequest', (_message.Message,), dict(
-  DESCRIPTOR = _CODEGENERATORREQUEST,
-  __module__ = 'google.protobuf.compiler.plugin_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest)
-  ))
-_sym_db.RegisterMessage(CodeGeneratorRequest)
-
-CodeGeneratorResponse = _reflection.GeneratedProtocolMessageType('CodeGeneratorResponse', (_message.Message,), dict(
-
-  File = _reflection.GeneratedProtocolMessageType('File', (_message.Message,), dict(
-    DESCRIPTOR = _CODEGENERATORRESPONSE_FILE,
-    __module__ = 'google.protobuf.compiler.plugin_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File)
-    ))
-  ,
-  DESCRIPTOR = _CODEGENERATORRESPONSE,
-  __module__ = 'google.protobuf.compiler.plugin_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse)
-  ))
-_sym_db.RegisterMessage(CodeGeneratorResponse)
-_sym_db.RegisterMessage(CodeGeneratorResponse.File)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.protobuf.compilerB\014PluginProtosZ\tplugin_go'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/descriptor.py b/tools/swarming_client/third_party/google/protobuf/descriptor.py
deleted file mode 100644
index 873af30..0000000
--- a/tools/swarming_client/third_party/google/protobuf/descriptor.py
+++ /dev/null
@@ -1,993 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Descriptors essentially contain exactly the information found in a .proto
-file, in types that make this information accessible in Python.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-import six
-
-from google.protobuf.internal import api_implementation
-
-_USE_C_DESCRIPTORS = False
-if api_implementation.Type() == 'cpp':
-  # Used by MakeDescriptor in cpp mode
-  import os
-  import uuid
-  from google.protobuf.pyext import _message
-  _USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False)
-
-
-class Error(Exception):
-  """Base error for this module."""
-
-
-class TypeTransformationError(Error):
-  """Error transforming between python proto type and corresponding C++ type."""
-
-
-if _USE_C_DESCRIPTORS:
-  # This metaclass allows to override the behavior of code like
-  #     isinstance(my_descriptor, FieldDescriptor)
-  # and make it return True when the descriptor is an instance of the extension
-  # type written in C++.
-  class DescriptorMetaclass(type):
-    def __instancecheck__(cls, obj):
-      if super(DescriptorMetaclass, cls).__instancecheck__(obj):
-        return True
-      if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
-        return True
-      return False
-else:
-  # The standard metaclass; nothing changes.
-  DescriptorMetaclass = type
-
-
-class DescriptorBase(six.with_metaclass(DescriptorMetaclass)):
-
-  """Descriptors base class.
-
-  This class is the base of all descriptor classes. It provides common options
-  related functionality.
-
-  Attributes:
-    has_options:  True if the descriptor has non-default options.  Usually it
-        is not necessary to read this -- just call GetOptions() which will
-        happily return the default instance.  However, it's sometimes useful
-        for efficiency, and also useful inside the protobuf implementation to
-        avoid some bootstrapping issues.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    # The class, or tuple of classes, that are considered as "virtual
-    # subclasses" of this descriptor class.
-    _C_DESCRIPTOR_CLASS = ()
-
-  def __init__(self, options, options_class_name):
-    """Initialize the descriptor given its options message and the name of the
-    class of the options message. The name of the class is required in case
-    the options message is None and has to be created.
-    """
-    self._options = options
-    self._options_class_name = options_class_name
-
-    # Does this descriptor have non-default options?
-    self.has_options = options is not None
-
-  def _SetOptions(self, options, options_class_name):
-    """Sets the descriptor's options
-
-    This function is used in generated proto2 files to update descriptor
-    options. It must not be used outside proto2.
-    """
-    self._options = options
-    self._options_class_name = options_class_name
-
-    # Does this descriptor have non-default options?
-    self.has_options = options is not None
-
-  def GetOptions(self):
-    """Retrieves descriptor options.
-
-    This method returns the options set or creates the default options for the
-    descriptor.
-    """
-    if self._options:
-      return self._options
-    from google.protobuf import descriptor_pb2
-    try:
-      options_class = getattr(descriptor_pb2, self._options_class_name)
-    except AttributeError:
-      raise RuntimeError('Unknown options class name %s!' %
-                         (self._options_class_name))
-    self._options = options_class()
-    return self._options
-
-
-class _NestedDescriptorBase(DescriptorBase):
-  """Common class for descriptors that can be nested."""
-
-  def __init__(self, options, options_class_name, name, full_name,
-               file, containing_type, serialized_start=None,
-               serialized_end=None):
-    """Constructor.
-
-    Args:
-      options: Protocol message options or None
-        to use default message options.
-      options_class_name: (str) The class name of the above options.
-
-      name: (str) Name of this protocol message type.
-      full_name: (str) Fully-qualified name of this protocol message type,
-        which will include protocol "package" name and the name of any
-        enclosing types.
-      file: (FileDescriptor) Reference to file info.
-      containing_type: if provided, this is a nested descriptor, with this
-        descriptor as parent, otherwise None.
-      serialized_start: The start index (inclusive) in block in the
-        file.serialized_pb that describes this descriptor.
-      serialized_end: The end index (exclusive) in block in the
-        file.serialized_pb that describes this descriptor.
-    """
-    super(_NestedDescriptorBase, self).__init__(
-        options, options_class_name)
-
-    self.name = name
-    # TODO(falk): Add function to calculate full_name instead of having it in
-    #             memory?
-    self.full_name = full_name
-    self.file = file
-    self.containing_type = containing_type
-
-    self._serialized_start = serialized_start
-    self._serialized_end = serialized_end
-
-  def GetTopLevelContainingType(self):
-    """Returns the root if this is a nested type, or itself if its the root."""
-    desc = self
-    while desc.containing_type is not None:
-      desc = desc.containing_type
-    return desc
-
-  def CopyToProto(self, proto):
-    """Copies this to the matching proto in descriptor_pb2.
-
-    Args:
-      proto: An empty proto instance from descriptor_pb2.
-
-    Raises:
-      Error: If self couldnt be serialized, due to to few constructor arguments.
-    """
-    if (self.file is not None and
-        self._serialized_start is not None and
-        self._serialized_end is not None):
-      proto.ParseFromString(self.file.serialized_pb[
-          self._serialized_start:self._serialized_end])
-    else:
-      raise Error('Descriptor does not contain serialization.')
-
-
-class Descriptor(_NestedDescriptorBase):
-
-  """Descriptor for a protocol message type.
-
-  A Descriptor instance has the following attributes:
-
-    name: (str) Name of this protocol message type.
-    full_name: (str) Fully-qualified name of this protocol message type,
-      which will include protocol "package" name and the name of any
-      enclosing types.
-
-    containing_type: (Descriptor) Reference to the descriptor of the
-      type containing us, or None if this is top-level.
-
-    fields: (list of FieldDescriptors) Field descriptors for all
-      fields in this type.
-    fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
-      objects as in |fields|, but indexed by "number" attribute in each
-      FieldDescriptor.
-    fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
-      objects as in |fields|, but indexed by "name" attribute in each
-      FieldDescriptor.
-    fields_by_camelcase_name: (dict str -> FieldDescriptor) Same
-      FieldDescriptor objects as in |fields|, but indexed by
-      "camelcase_name" attribute in each FieldDescriptor.
-
-    nested_types: (list of Descriptors) Descriptor references
-      for all protocol message types nested within this one.
-    nested_types_by_name: (dict str -> Descriptor) Same Descriptor
-      objects as in |nested_types|, but indexed by "name" attribute
-      in each Descriptor.
-
-    enum_types: (list of EnumDescriptors) EnumDescriptor references
-      for all enums contained within this type.
-    enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
-      objects as in |enum_types|, but indexed by "name" attribute
-      in each EnumDescriptor.
-    enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
-      from enum value name to EnumValueDescriptor for that value.
-
-    extensions: (list of FieldDescriptor) All extensions defined directly
-      within this message type (NOT within a nested type).
-    extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
-      objects as |extensions|, but indexed by "name" attribute of each
-      FieldDescriptor.
-
-    is_extendable:  Does this type define any extension ranges?
-
-    oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields
-      in this message.
-    oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|,
-      but indexed by "name" attribute.
-
-    file: (FileDescriptor) Reference to file descriptor.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.Descriptor
-
-    def __new__(cls, name, full_name, filename, containing_type, fields,
-                nested_types, enum_types, extensions, options=None,
-                is_extendable=True, extension_ranges=None, oneofs=None,
-                file=None, serialized_start=None, serialized_end=None,  # pylint: disable=redefined-builtin
-                syntax=None):
-      _message.Message._CheckCalledFromGeneratedFile()
-      return _message.default_pool.FindMessageTypeByName(full_name)
-
-  # NOTE(tmarek): The file argument redefining a builtin is nothing we can
-  # fix right now since we don't know how many clients already rely on the
-  # name of the argument.
-  def __init__(self, name, full_name, filename, containing_type, fields,
-               nested_types, enum_types, extensions, options=None,
-               is_extendable=True, extension_ranges=None, oneofs=None,
-               file=None, serialized_start=None, serialized_end=None,  # pylint: disable=redefined-builtin
-               syntax=None):
-    """Arguments to __init__() are as described in the description
-    of Descriptor fields above.
-
-    Note that filename is an obsolete argument, that is not used anymore.
-    Please use file.name to access this as an attribute.
-    """
-    super(Descriptor, self).__init__(
-        options, 'MessageOptions', name, full_name, file,
-        containing_type, serialized_start=serialized_start,
-        serialized_end=serialized_end)
-
-    # We have fields in addition to fields_by_name and fields_by_number,
-    # so that:
-    #   1. Clients can index fields by "order in which they're listed."
-    #   2. Clients can easily iterate over all fields with the terse
-    #      syntax: for f in descriptor.fields: ...
-    self.fields = fields
-    for field in self.fields:
-      field.containing_type = self
-    self.fields_by_number = dict((f.number, f) for f in fields)
-    self.fields_by_name = dict((f.name, f) for f in fields)
-    self._fields_by_camelcase_name = None
-
-    self.nested_types = nested_types
-    for nested_type in nested_types:
-      nested_type.containing_type = self
-    self.nested_types_by_name = dict((t.name, t) for t in nested_types)
-
-    self.enum_types = enum_types
-    for enum_type in self.enum_types:
-      enum_type.containing_type = self
-    self.enum_types_by_name = dict((t.name, t) for t in enum_types)
-    self.enum_values_by_name = dict(
-        (v.name, v) for t in enum_types for v in t.values)
-
-    self.extensions = extensions
-    for extension in self.extensions:
-      extension.extension_scope = self
-    self.extensions_by_name = dict((f.name, f) for f in extensions)
-    self.is_extendable = is_extendable
-    self.extension_ranges = extension_ranges
-    self.oneofs = oneofs if oneofs is not None else []
-    self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
-    for oneof in self.oneofs:
-      oneof.containing_type = self
-    self.syntax = syntax or "proto2"
-
-  @property
-  def fields_by_camelcase_name(self):
-    if self._fields_by_camelcase_name is None:
-      self._fields_by_camelcase_name = dict(
-          (f.camelcase_name, f) for f in self.fields)
-    return self._fields_by_camelcase_name
-
-  def EnumValueName(self, enum, value):
-    """Returns the string name of an enum value.
-
-    This is just a small helper method to simplify a common operation.
-
-    Args:
-      enum: string name of the Enum.
-      value: int, value of the enum.
-
-    Returns:
-      string name of the enum value.
-
-    Raises:
-      KeyError if either the Enum doesn't exist or the value is not a valid
-        value for the enum.
-    """
-    return self.enum_types_by_name[enum].values_by_number[value].name
-
-  def CopyToProto(self, proto):
-    """Copies this to a descriptor_pb2.DescriptorProto.
-
-    Args:
-      proto: An empty descriptor_pb2.DescriptorProto.
-    """
-    # This function is overridden to give a better doc comment.
-    super(Descriptor, self).CopyToProto(proto)
-
-
-# TODO(robinson): We should have aggressive checking here,
-# for example:
-#   * If you specify a repeated field, you should not be allowed
-#     to specify a default value.
-#   * [Other examples here as needed].
-#
-# TODO(robinson): for this and other *Descriptor classes, we
-# might also want to lock things down aggressively (e.g.,
-# prevent clients from setting the attributes).  Having
-# stronger invariants here in general will reduce the number
-# of runtime checks we must do in reflection.py...
-class FieldDescriptor(DescriptorBase):
-
-  """Descriptor for a single field in a .proto file.
-
-  A FieldDescriptor instance has the following attributes:
-
-    name: (str) Name of this field, exactly as it appears in .proto.
-    full_name: (str) Name of this field, including containing scope.  This is
-      particularly relevant for extensions.
-    camelcase_name: (str) Camelcase name of this field.
-    index: (int) Dense, 0-indexed index giving the order that this
-      field textually appears within its message in the .proto file.
-    number: (int) Tag number declared for this field in the .proto file.
-
-    type: (One of the TYPE_* constants below) Declared type.
-    cpp_type: (One of the CPPTYPE_* constants below) C++ type used to
-      represent this field.
-
-    label: (One of the LABEL_* constants below) Tells whether this
-      field is optional, required, or repeated.
-    has_default_value: (bool) True if this field has a default value defined,
-      otherwise false.
-    default_value: (Varies) Default value of this field.  Only
-      meaningful for non-repeated scalar fields.  Repeated fields
-      should always set this to [], and non-repeated composite
-      fields should always set this to None.
-
-    containing_type: (Descriptor) Descriptor of the protocol message
-      type that contains this field.  Set by the Descriptor constructor
-      if we're passed into one.
-      Somewhat confusingly, for extension fields, this is the
-      descriptor of the EXTENDED message, not the descriptor
-      of the message containing this field.  (See is_extension and
-      extension_scope below).
-    message_type: (Descriptor) If a composite field, a descriptor
-      of the message type contained in this field.  Otherwise, this is None.
-    enum_type: (EnumDescriptor) If this field contains an enum, a
-      descriptor of that enum.  Otherwise, this is None.
-
-    is_extension: True iff this describes an extension field.
-    extension_scope: (Descriptor) Only meaningful if is_extension is True.
-      Gives the message that immediately contains this extension field.
-      Will be None iff we're a top-level (file-level) extension field.
-
-    options: (descriptor_pb2.FieldOptions) Protocol message field options or
-      None to use default field options.
-
-    containing_oneof: (OneofDescriptor) If the field is a member of a oneof
-      union, contains its descriptor. Otherwise, None.
-  """
-
-  # Must be consistent with C++ FieldDescriptor::Type enum in
-  # descriptor.h.
-  #
-  # TODO(robinson): Find a way to eliminate this repetition.
-  TYPE_DOUBLE         = 1
-  TYPE_FLOAT          = 2
-  TYPE_INT64          = 3
-  TYPE_UINT64         = 4
-  TYPE_INT32          = 5
-  TYPE_FIXED64        = 6
-  TYPE_FIXED32        = 7
-  TYPE_BOOL           = 8
-  TYPE_STRING         = 9
-  TYPE_GROUP          = 10
-  TYPE_MESSAGE        = 11
-  TYPE_BYTES          = 12
-  TYPE_UINT32         = 13
-  TYPE_ENUM           = 14
-  TYPE_SFIXED32       = 15
-  TYPE_SFIXED64       = 16
-  TYPE_SINT32         = 17
-  TYPE_SINT64         = 18
-  MAX_TYPE            = 18
-
-  # Must be consistent with C++ FieldDescriptor::CppType enum in
-  # descriptor.h.
-  #
-  # TODO(robinson): Find a way to eliminate this repetition.
-  CPPTYPE_INT32       = 1
-  CPPTYPE_INT64       = 2
-  CPPTYPE_UINT32      = 3
-  CPPTYPE_UINT64      = 4
-  CPPTYPE_DOUBLE      = 5
-  CPPTYPE_FLOAT       = 6
-  CPPTYPE_BOOL        = 7
-  CPPTYPE_ENUM        = 8
-  CPPTYPE_STRING      = 9
-  CPPTYPE_MESSAGE     = 10
-  MAX_CPPTYPE         = 10
-
-  _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
-      TYPE_DOUBLE: CPPTYPE_DOUBLE,
-      TYPE_FLOAT: CPPTYPE_FLOAT,
-      TYPE_ENUM: CPPTYPE_ENUM,
-      TYPE_INT64: CPPTYPE_INT64,
-      TYPE_SINT64: CPPTYPE_INT64,
-      TYPE_SFIXED64: CPPTYPE_INT64,
-      TYPE_UINT64: CPPTYPE_UINT64,
-      TYPE_FIXED64: CPPTYPE_UINT64,
-      TYPE_INT32: CPPTYPE_INT32,
-      TYPE_SFIXED32: CPPTYPE_INT32,
-      TYPE_SINT32: CPPTYPE_INT32,
-      TYPE_UINT32: CPPTYPE_UINT32,
-      TYPE_FIXED32: CPPTYPE_UINT32,
-      TYPE_BYTES: CPPTYPE_STRING,
-      TYPE_STRING: CPPTYPE_STRING,
-      TYPE_BOOL: CPPTYPE_BOOL,
-      TYPE_MESSAGE: CPPTYPE_MESSAGE,
-      TYPE_GROUP: CPPTYPE_MESSAGE
-      }
-
-  # Must be consistent with C++ FieldDescriptor::Label enum in
-  # descriptor.h.
-  #
-  # TODO(robinson): Find a way to eliminate this repetition.
-  LABEL_OPTIONAL      = 1
-  LABEL_REQUIRED      = 2
-  LABEL_REPEATED      = 3
-  MAX_LABEL           = 3
-
-  # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
-  # and kLastReservedNumber in descriptor.h
-  MAX_FIELD_NUMBER = (1 << 29) - 1
-  FIRST_RESERVED_FIELD_NUMBER = 19000
-  LAST_RESERVED_FIELD_NUMBER = 19999
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.FieldDescriptor
-
-    def __new__(cls, name, full_name, index, number, type, cpp_type, label,
-                default_value, message_type, enum_type, containing_type,
-                is_extension, extension_scope, options=None,
-                has_default_value=True, containing_oneof=None):
-      _message.Message._CheckCalledFromGeneratedFile()
-      if is_extension:
-        return _message.default_pool.FindExtensionByName(full_name)
-      else:
-        return _message.default_pool.FindFieldByName(full_name)
-
-  def __init__(self, name, full_name, index, number, type, cpp_type, label,
-               default_value, message_type, enum_type, containing_type,
-               is_extension, extension_scope, options=None,
-               has_default_value=True, containing_oneof=None):
-    """The arguments are as described in the description of FieldDescriptor
-    attributes above.
-
-    Note that containing_type may be None, and may be set later if necessary
-    (to deal with circular references between message types, for example).
-    Likewise for extension_scope.
-    """
-    super(FieldDescriptor, self).__init__(options, 'FieldOptions')
-    self.name = name
-    self.full_name = full_name
-    self._camelcase_name = None
-    self.index = index
-    self.number = number
-    self.type = type
-    self.cpp_type = cpp_type
-    self.label = label
-    self.has_default_value = has_default_value
-    self.default_value = default_value
-    self.containing_type = containing_type
-    self.message_type = message_type
-    self.enum_type = enum_type
-    self.is_extension = is_extension
-    self.extension_scope = extension_scope
-    self.containing_oneof = containing_oneof
-    if api_implementation.Type() == 'cpp':
-      if is_extension:
-        self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
-      else:
-        self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
-    else:
-      self._cdescriptor = None
-
-  @property
-  def camelcase_name(self):
-    if self._camelcase_name is None:
-      self._camelcase_name = _ToCamelCase(self.name)
-    return self._camelcase_name
-
-  @staticmethod
-  def ProtoTypeToCppProtoType(proto_type):
-    """Converts from a Python proto type to a C++ Proto Type.
-
-    The Python ProtocolBuffer classes specify both the 'Python' datatype and the
-    'C++' datatype - and they're not the same. This helper method should
-    translate from one to another.
-
-    Args:
-      proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
-    Returns:
-      descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
-    Raises:
-      TypeTransformationError: when the Python proto type isn't known.
-    """
-    try:
-      return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
-    except KeyError:
-      raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
-
-
-class EnumDescriptor(_NestedDescriptorBase):
-
-  """Descriptor for an enum defined in a .proto file.
-
-  An EnumDescriptor instance has the following attributes:
-
-    name: (str) Name of the enum type.
-    full_name: (str) Full name of the type, including package name
-      and any enclosing type(s).
-
-    values: (list of EnumValueDescriptors) List of the values
-      in this enum.
-    values_by_name: (dict str -> EnumValueDescriptor) Same as |values|,
-      but indexed by the "name" field of each EnumValueDescriptor.
-    values_by_number: (dict int -> EnumValueDescriptor) Same as |values|,
-      but indexed by the "number" field of each EnumValueDescriptor.
-    containing_type: (Descriptor) Descriptor of the immediate containing
-      type of this enum, or None if this is an enum defined at the
-      top level in a .proto file.  Set by Descriptor's constructor
-      if we're passed into one.
-    file: (FileDescriptor) Reference to file descriptor.
-    options: (descriptor_pb2.EnumOptions) Enum options message or
-      None to use default enum options.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.EnumDescriptor
-
-    def __new__(cls, name, full_name, filename, values,
-                containing_type=None, options=None, file=None,
-                serialized_start=None, serialized_end=None):
-      _message.Message._CheckCalledFromGeneratedFile()
-      return _message.default_pool.FindEnumTypeByName(full_name)
-
-  def __init__(self, name, full_name, filename, values,
-               containing_type=None, options=None, file=None,
-               serialized_start=None, serialized_end=None):
-    """Arguments are as described in the attribute description above.
-
-    Note that filename is an obsolete argument, that is not used anymore.
-    Please use file.name to access this as an attribute.
-    """
-    super(EnumDescriptor, self).__init__(
-        options, 'EnumOptions', name, full_name, file,
-        containing_type, serialized_start=serialized_start,
-        serialized_end=serialized_end)
-
-    self.values = values
-    for value in self.values:
-      value.type = self
-    self.values_by_name = dict((v.name, v) for v in values)
-    self.values_by_number = dict((v.number, v) for v in values)
-
-  def CopyToProto(self, proto):
-    """Copies this to a descriptor_pb2.EnumDescriptorProto.
-
-    Args:
-      proto: An empty descriptor_pb2.EnumDescriptorProto.
-    """
-    # This function is overridden to give a better doc comment.
-    super(EnumDescriptor, self).CopyToProto(proto)
-
-
-class EnumValueDescriptor(DescriptorBase):
-
-  """Descriptor for a single value within an enum.
-
-    name: (str) Name of this value.
-    index: (int) Dense, 0-indexed index giving the order that this
-      value appears textually within its enum in the .proto file.
-    number: (int) Actual number assigned to this enum value.
-    type: (EnumDescriptor) EnumDescriptor to which this value
-      belongs.  Set by EnumDescriptor's constructor if we're
-      passed into one.
-    options: (descriptor_pb2.EnumValueOptions) Enum value options message or
-      None to use default enum value options options.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
-
-    def __new__(cls, name, index, number, type=None, options=None):
-      _message.Message._CheckCalledFromGeneratedFile()
-      # There is no way we can build a complete EnumValueDescriptor with the
-      # given parameters (the name of the Enum is not known, for example).
-      # Fortunately generated files just pass it to the EnumDescriptor()
-      # constructor, which will ignore it, so returning None is good enough.
-      return None
-
-  def __init__(self, name, index, number, type=None, options=None):
-    """Arguments are as described in the attribute description above."""
-    super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
-    self.name = name
-    self.index = index
-    self.number = number
-    self.type = type
-
-
-class OneofDescriptor(DescriptorBase):
-  """Descriptor for a oneof field.
-
-    name: (str) Name of the oneof field.
-    full_name: (str) Full name of the oneof field, including package name.
-    index: (int) 0-based index giving the order of the oneof field inside
-      its containing type.
-    containing_type: (Descriptor) Descriptor of the protocol message
-      type that contains this field.  Set by the Descriptor constructor
-      if we're passed into one.
-    fields: (list of FieldDescriptor) The list of field descriptors this
-      oneof can contain.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.OneofDescriptor
-
-    def __new__(
-        cls, name, full_name, index, containing_type, fields, options=None):
-      _message.Message._CheckCalledFromGeneratedFile()
-      return _message.default_pool.FindOneofByName(full_name)
-
-  def __init__(
-      self, name, full_name, index, containing_type, fields, options=None):
-    """Arguments are as described in the attribute description above."""
-    super(OneofDescriptor, self).__init__(options, 'OneofOptions')
-    self.name = name
-    self.full_name = full_name
-    self.index = index
-    self.containing_type = containing_type
-    self.fields = fields
-
-
-class ServiceDescriptor(_NestedDescriptorBase):
-
-  """Descriptor for a service.
-
-    name: (str) Name of the service.
-    full_name: (str) Full name of the service, including package name.
-    index: (int) 0-indexed index giving the order that this services
-      definition appears withing the .proto file.
-    methods: (list of MethodDescriptor) List of methods provided by this
-      service.
-    methods_by_name: (dict str -> MethodDescriptor) Same MethodDescriptor
-      objects as in |methods_by_name|, but indexed by "name" attribute in each
-      MethodDescriptor.
-    options: (descriptor_pb2.ServiceOptions) Service options message or
-      None to use default service options.
-    file: (FileDescriptor) Reference to file info.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
-
-    def __new__(cls, name, full_name, index, methods, options=None, file=None,  # pylint: disable=redefined-builtin
-                serialized_start=None, serialized_end=None):
-      _message.Message._CheckCalledFromGeneratedFile()  # pylint: disable=protected-access
-      return _message.default_pool.FindServiceByName(full_name)
-
-  def __init__(self, name, full_name, index, methods, options=None, file=None,
-               serialized_start=None, serialized_end=None):
-    super(ServiceDescriptor, self).__init__(
-        options, 'ServiceOptions', name, full_name, file,
-        None, serialized_start=serialized_start,
-        serialized_end=serialized_end)
-    self.index = index
-    self.methods = methods
-    self.methods_by_name = dict((m.name, m) for m in methods)
-    # Set the containing service for each method in this service.
-    for method in self.methods:
-      method.containing_service = self
-
-  def FindMethodByName(self, name):
-    """Searches for the specified method, and returns its descriptor."""
-    return self.methods_by_name.get(name, None)
-
-  def CopyToProto(self, proto):
-    """Copies this to a descriptor_pb2.ServiceDescriptorProto.
-
-    Args:
-      proto: An empty descriptor_pb2.ServiceDescriptorProto.
-    """
-    # This function is overridden to give a better doc comment.
-    super(ServiceDescriptor, self).CopyToProto(proto)
-
-
-class MethodDescriptor(DescriptorBase):
-
-  """Descriptor for a method in a service.
-
-  name: (str) Name of the method within the service.
-  full_name: (str) Full name of method.
-  index: (int) 0-indexed index of the method inside the service.
-  containing_service: (ServiceDescriptor) The service that contains this
-    method.
-  input_type: The descriptor of the message that this method accepts.
-  output_type: The descriptor of the message that this method returns.
-  options: (descriptor_pb2.MethodOptions) Method options message or
-    None to use default method options.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.MethodDescriptor
-
-    def __new__(cls, name, full_name, index, containing_service,
-                input_type, output_type, options=None):
-      _message.Message._CheckCalledFromGeneratedFile()  # pylint: disable=protected-access
-      return _message.default_pool.FindMethodByName(full_name)
-
-  def __init__(self, name, full_name, index, containing_service,
-               input_type, output_type, options=None):
-    """The arguments are as described in the description of MethodDescriptor
-    attributes above.
-
-    Note that containing_service may be None, and may be set later if necessary.
-    """
-    super(MethodDescriptor, self).__init__(options, 'MethodOptions')
-    self.name = name
-    self.full_name = full_name
-    self.index = index
-    self.containing_service = containing_service
-    self.input_type = input_type
-    self.output_type = output_type
-
-
-class FileDescriptor(DescriptorBase):
-  """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
-
-  Note that enum_types_by_name, extensions_by_name, and dependencies
-  fields are only set by the message_factory module, and not by the
-  generated proto code.
-
-  name: name of file, relative to root of source tree.
-  package: name of the package
-  syntax: string indicating syntax of the file (can be "proto2" or "proto3")
-  serialized_pb: (str) Byte string of serialized
-    descriptor_pb2.FileDescriptorProto.
-  dependencies: List of other FileDescriptors this FileDescriptor depends on.
-  public_dependencies: A list of FileDescriptors, subset of the dependencies
-    above, which were declared as "public".
-  message_types_by_name: Dict of message names of their descriptors.
-  enum_types_by_name: Dict of enum names and their descriptors.
-  extensions_by_name: Dict of extension names and their descriptors.
-  services_by_name: Dict of services names and their descriptors.
-  pool: the DescriptorPool this descriptor belongs to.  When not passed to the
-    constructor, the global default pool is used.
-  """
-
-  if _USE_C_DESCRIPTORS:
-    _C_DESCRIPTOR_CLASS = _message.FileDescriptor
-
-    def __new__(cls, name, package, options=None, serialized_pb=None,
-                dependencies=None, public_dependencies=None,
-                syntax=None, pool=None):
-      # FileDescriptor() is called from various places, not only from generated
-      # files, to register dynamic proto files and messages.
-      if serialized_pb:
-        # TODO(amauryfa): use the pool passed as argument. This will work only
-        # for C++-implemented DescriptorPools.
-        return _message.default_pool.AddSerializedFile(serialized_pb)
-      else:
-        return super(FileDescriptor, cls).__new__(cls)
-
-  def __init__(self, name, package, options=None, serialized_pb=None,
-               dependencies=None, public_dependencies=None,
-               syntax=None, pool=None):
-    """Constructor."""
-    super(FileDescriptor, self).__init__(options, 'FileOptions')
-
-    if pool is None:
-      from google.protobuf import descriptor_pool
-      pool = descriptor_pool.Default()
-    self.pool = pool
-    self.message_types_by_name = {}
-    self.name = name
-    self.package = package
-    self.syntax = syntax or "proto2"
-    self.serialized_pb = serialized_pb
-
-    self.enum_types_by_name = {}
-    self.extensions_by_name = {}
-    self.services_by_name = {}
-    self.dependencies = (dependencies or [])
-    self.public_dependencies = (public_dependencies or [])
-
-    if (api_implementation.Type() == 'cpp' and
-        self.serialized_pb is not None):
-      _message.default_pool.AddSerializedFile(self.serialized_pb)
-
-  def CopyToProto(self, proto):
-    """Copies this to a descriptor_pb2.FileDescriptorProto.
-
-    Args:
-      proto: An empty descriptor_pb2.FileDescriptorProto.
-    """
-    proto.ParseFromString(self.serialized_pb)
-
-
-def _ParseOptions(message, string):
-  """Parses serialized options.
-
-  This helper function is used to parse serialized options in generated
-  proto2 files. It must not be used outside proto2.
-  """
-  message.ParseFromString(string)
-  return message
-
-
-def _ToCamelCase(name):
-  """Converts name to camel-case and returns it."""
-  capitalize_next = False
-  result = []
-
-  for c in name:
-    if c == '_':
-      if result:
-        capitalize_next = True
-    elif capitalize_next:
-      result.append(c.upper())
-      capitalize_next = False
-    else:
-      result += c
-
-  # Lower-case the first letter.
-  if result and result[0].isupper():
-    result[0] = result[0].lower()
-  return ''.join(result)
-
-
-def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
-                   syntax=None):
-  """Make a protobuf Descriptor given a DescriptorProto protobuf.
-
-  Handles nested descriptors. Note that this is limited to the scope of defining
-  a message inside of another message. Composite fields can currently only be
-  resolved if the message is defined in the same scope as the field.
-
-  Args:
-    desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
-    package: Optional package name for the new message Descriptor (string).
-    build_file_if_cpp: Update the C++ descriptor pool if api matches.
-                       Set to False on recursion, so no duplicates are created.
-    syntax: The syntax/semantics that should be used.  Set to "proto3" to get
-            proto3 field presence semantics.
-  Returns:
-    A Descriptor for protobuf messages.
-  """
-  if api_implementation.Type() == 'cpp' and build_file_if_cpp:
-    # The C++ implementation requires all descriptors to be backed by the same
-    # definition in the C++ descriptor pool. To do this, we build a
-    # FileDescriptorProto with the same definition as this descriptor and build
-    # it into the pool.
-    from google.protobuf import descriptor_pb2
-    file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
-    file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
-
-    # Generate a random name for this proto file to prevent conflicts with any
-    # imported ones. We need to specify a file name so the descriptor pool
-    # accepts our FileDescriptorProto, but it is not important what that file
-    # name is actually set to.
-    proto_name = str(uuid.uuid4())
-
-    if package:
-      file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
-                                                proto_name + '.proto')
-      file_descriptor_proto.package = package
-    else:
-      file_descriptor_proto.name = proto_name + '.proto'
-
-    _message.default_pool.Add(file_descriptor_proto)
-    result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
-
-    if _USE_C_DESCRIPTORS:
-      return result.message_types_by_name[desc_proto.name]
-
-  full_message_name = [desc_proto.name]
-  if package: full_message_name.insert(0, package)
-
-  # Create Descriptors for enum types
-  enum_types = {}
-  for enum_proto in desc_proto.enum_type:
-    full_name = '.'.join(full_message_name + [enum_proto.name])
-    enum_desc = EnumDescriptor(
-      enum_proto.name, full_name, None, [
-          EnumValueDescriptor(enum_val.name, ii, enum_val.number)
-          for ii, enum_val in enumerate(enum_proto.value)])
-    enum_types[full_name] = enum_desc
-
-  # Create Descriptors for nested types
-  nested_types = {}
-  for nested_proto in desc_proto.nested_type:
-    full_name = '.'.join(full_message_name + [nested_proto.name])
-    # Nested types are just those defined inside of the message, not all types
-    # used by fields in the message, so no loops are possible here.
-    nested_desc = MakeDescriptor(nested_proto,
-                                 package='.'.join(full_message_name),
-                                 build_file_if_cpp=False,
-                                 syntax=syntax)
-    nested_types[full_name] = nested_desc
-
-  fields = []
-  for field_proto in desc_proto.field:
-    full_name = '.'.join(full_message_name + [field_proto.name])
-    enum_desc = None
-    nested_desc = None
-    if field_proto.HasField('type_name'):
-      type_name = field_proto.type_name
-      full_type_name = '.'.join(full_message_name +
-                                [type_name[type_name.rfind('.')+1:]])
-      if full_type_name in nested_types:
-        nested_desc = nested_types[full_type_name]
-      elif full_type_name in enum_types:
-        enum_desc = enum_types[full_type_name]
-      # Else type_name references a non-local type, which isn't implemented
-    field = FieldDescriptor(
-        field_proto.name, full_name, field_proto.number - 1,
-        field_proto.number, field_proto.type,
-        FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
-        field_proto.label, None, nested_desc, enum_desc, None, False, None,
-        options=field_proto.options, has_default_value=False)
-    fields.append(field)
-
-  desc_name = '.'.join(full_message_name)
-  return Descriptor(desc_proto.name, desc_name, None, None, fields,
-                    list(nested_types.values()), list(enum_types.values()), [],
-                    options=desc_proto.options)
diff --git a/tools/swarming_client/third_party/google/protobuf/descriptor_database.py b/tools/swarming_client/third_party/google/protobuf/descriptor_database.py
deleted file mode 100644
index 1333f99..0000000
--- a/tools/swarming_client/third_party/google/protobuf/descriptor_database.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides a container for DescriptorProtos."""
-
-__author__ = 'matthewtoia@google.com (Matt Toia)'
-
-
-class Error(Exception):
-  pass
-
-
-class DescriptorDatabaseConflictingDefinitionError(Error):
-  """Raised when a proto is added with the same name & different descriptor."""
-
-
-class DescriptorDatabase(object):
-  """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
-
-  def __init__(self):
-    self._file_desc_protos_by_file = {}
-    self._file_desc_protos_by_symbol = {}
-
-  def Add(self, file_desc_proto):
-    """Adds the FileDescriptorProto and its types to this database.
-
-    Args:
-      file_desc_proto: The FileDescriptorProto to add.
-    Raises:
-      DescriptorDatabaseException: if an attempt is made to add a proto
-        with the same name but different definition than an exisiting
-        proto in the database.
-    """
-    proto_name = file_desc_proto.name
-    if proto_name not in self._file_desc_protos_by_file:
-      self._file_desc_protos_by_file[proto_name] = file_desc_proto
-    elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
-      raise DescriptorDatabaseConflictingDefinitionError(
-          '%s already added, but with different descriptor.' % proto_name)
-
-    # Add the top-level Message, Enum and Extension descriptors to the index.
-    package = file_desc_proto.package
-    for message in file_desc_proto.message_type:
-      self._file_desc_protos_by_symbol.update(
-          (name, file_desc_proto) for name in _ExtractSymbols(message, package))
-    for enum in file_desc_proto.enum_type:
-      self._file_desc_protos_by_symbol[
-          '.'.join((package, enum.name))] = file_desc_proto
-    for extension in file_desc_proto.extension:
-      self._file_desc_protos_by_symbol[
-          '.'.join((package, extension.name))] = file_desc_proto
-
-  def FindFileByName(self, name):
-    """Finds the file descriptor proto by file name.
-
-    Typically the file name is a relative path ending to a .proto file. The
-    proto with the given name will have to have been added to this database
-    using the Add method or else an error will be raised.
-
-    Args:
-      name: The file name to find.
-
-    Returns:
-      The file descriptor proto matching the name.
-
-    Raises:
-      KeyError if no file by the given name was added.
-    """
-
-    return self._file_desc_protos_by_file[name]
-
-  def FindFileContainingSymbol(self, symbol):
-    """Finds the file descriptor proto containing the specified symbol.
-
-    The symbol should be a fully qualified name including the file descriptor's
-    package and any containing messages. Some examples:
-
-    'some.package.name.Message'
-    'some.package.name.Message.NestedEnum'
-
-    The file descriptor proto containing the specified symbol must be added to
-    this database using the Add method or else an error will be raised.
-
-    Args:
-      symbol: The fully qualified symbol name.
-
-    Returns:
-      The file descriptor proto containing the symbol.
-
-    Raises:
-      KeyError if no file contains the specified symbol.
-    """
-
-    return self._file_desc_protos_by_symbol[symbol]
-
-
-def _ExtractSymbols(desc_proto, package):
-  """Pulls out all the symbols from a descriptor proto.
-
-  Args:
-    desc_proto: The proto to extract symbols from.
-    package: The package containing the descriptor type.
-
-  Yields:
-    The fully qualified name found in the descriptor.
-  """
-
-  message_name = '.'.join((package, desc_proto.name))
-  yield message_name
-  for nested_type in desc_proto.nested_type:
-    for symbol in _ExtractSymbols(nested_type, message_name):
-      yield symbol
-  for enum_type in desc_proto.enum_type:
-    yield '.'.join((message_name, enum_type.name))
diff --git a/tools/swarming_client/third_party/google/protobuf/descriptor_pb2.py b/tools/swarming_client/third_party/google/protobuf/descriptor_pb2.py
deleted file mode 100644
index 3d2964a..0000000
--- a/tools/swarming_client/third_party/google/protobuf/descriptor_pb2.py
+++ /dev/null
@@ -1,1845 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/descriptor.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/descriptor.proto',
-  package='google.protobuf',
-  syntax='proto2',
-  serialized_pb=_b('\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xf0\x04\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xbc\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\x87\x05\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x98\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42[\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z\ndescriptor\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-_FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
-  name='Type',
-  full_name='google.protobuf.FieldDescriptorProto.Type',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_DOUBLE', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_FLOAT', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_INT64', index=2, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_UINT64', index=3, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_INT32', index=4, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_FIXED64', index=5, number=6,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_FIXED32', index=6, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_BOOL', index=7, number=8,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_STRING', index=8, number=9,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_GROUP', index=9, number=10,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_MESSAGE', index=10, number=11,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_BYTES', index=11, number=12,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_UINT32', index=12, number=13,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_ENUM', index=13, number=14,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SFIXED32', index=14, number=15,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SFIXED64', index=15, number=16,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SINT32', index=16, number=17,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SINT64', index=17, number=18,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1553,
-  serialized_end=1863,
-)
-_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
-
-_FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
-  name='Label',
-  full_name='google.protobuf.FieldDescriptorProto.Label',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='LABEL_OPTIONAL', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LABEL_REQUIRED', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LABEL_REPEATED', index=2, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1865,
-  serialized_end=1932,
-)
-_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
-
-_FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
-  name='OptimizeMode',
-  full_name='google.protobuf.FileOptions.OptimizeMode',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='SPEED', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CODE_SIZE', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LITE_RUNTIME', index=2, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3189,
-  serialized_end=3247,
-)
-_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
-
-_FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
-  name='CType',
-  full_name='google.protobuf.FieldOptions.CType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='STRING', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CORD', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='STRING_PIECE', index=2, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3795,
-  serialized_end=3842,
-)
-_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
-
-_FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor(
-  name='JSType',
-  full_name='google.protobuf.FieldOptions.JSType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='JS_NORMAL', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='JS_STRING', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='JS_NUMBER', index=2, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3844,
-  serialized_end=3897,
-)
-_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE)
-
-
-_FILEDESCRIPTORSET = _descriptor.Descriptor(
-  name='FileDescriptorSet',
-  full_name='google.protobuf.FileDescriptorSet',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=53,
-  serialized_end=124,
-)
-
-
-_FILEDESCRIPTORPROTO = _descriptor.Descriptor(
-  name='FileDescriptorProto',
-  full_name='google.protobuf.FileDescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
-      number=3, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
-      number=10, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
-      number=11, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
-      number=7, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
-      number=8, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
-      number=9, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11,
-      number=12, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=127,
-  serialized_end=602,
-)
-
-
-_DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
-  name='ExtensionRange',
-  full_name='google.protobuf.DescriptorProto.ExtensionRange',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1140,
-  serialized_end=1184,
-)
-
-_DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor(
-  name='ReservedRange',
-  full_name='google.protobuf.DescriptorProto.ReservedRange',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1186,
-  serialized_end=1229,
-)
-
-_DESCRIPTORPROTO = _descriptor.Descriptor(
-  name='DescriptorProto',
-  full_name='google.protobuf.DescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
-      number=8, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
-      number=7, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8,
-      number=9, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9,
-      number=10, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=605,
-  serialized_end=1229,
-)
-
-
-_FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
-  name='FieldDescriptorProto',
-  full_name='google.protobuf.FieldDescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
-      number=4, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
-      number=7, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
-      number=9, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8,
-      number=10, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9,
-      number=8, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _FIELDDESCRIPTORPROTO_TYPE,
-    _FIELDDESCRIPTORPROTO_LABEL,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1232,
-  serialized_end=1932,
-)
-
-
-_ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
-  name='OneofDescriptorProto',
-  full_name='google.protobuf.OneofDescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1934,
-  serialized_end=2018,
-)
-
-
-_ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
-  name='EnumDescriptorProto',
-  full_name='google.protobuf.EnumDescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2021,
-  serialized_end=2161,
-)
-
-
-_ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
-  name='EnumValueDescriptorProto',
-  full_name='google.protobuf.EnumValueDescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2163,
-  serialized_end=2271,
-)
-
-
-_SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
-  name='ServiceDescriptorProto',
-  full_name='google.protobuf.ServiceDescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2274,
-  serialized_end=2418,
-)
-
-
-_METHODDESCRIPTORPROTO = _descriptor.Descriptor(
-  name='MethodDescriptorProto',
-  full_name='google.protobuf.MethodDescriptorProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4,
-      number=5, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5,
-      number=6, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2421,
-  serialized_end=2614,
-)
-
-
-_FILEOPTIONS = _descriptor.Descriptor(
-  name='FileOptions',
-  full_name='google.protobuf.FileOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
-      number=8, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
-      number=10, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
-      number=20, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
-      number=27, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
-      number=9, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
-      number=11, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
-      number=16, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
-      number=17, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
-      number=18, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=10,
-      number=23, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=11,
-      number=31, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=12,
-      number=36, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=13,
-      number=37, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=14,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _FILEOPTIONS_OPTIMIZEMODE,
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=2617,
-  serialized_end=3264,
-)
-
-
-_MESSAGEOPTIONS = _descriptor.Descriptor(
-  name='MessageOptions',
-  full_name='google.protobuf.MessageOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
-      number=3, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3,
-      number=7, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=3267,
-  serialized_end=3497,
-)
-
-
-_FIELDOPTIONS = _descriptor.Descriptor(
-  name='FieldOptions',
-  full_name='google.protobuf.FieldOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2,
-      number=6, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3,
-      number=5, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=4,
-      number=3, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='weak', full_name='google.protobuf.FieldOptions.weak', index=5,
-      number=10, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=6,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _FIELDOPTIONS_CTYPE,
-    _FIELDOPTIONS_JSTYPE,
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=3500,
-  serialized_end=3908,
-)
-
-
-_ONEOFOPTIONS = _descriptor.Descriptor(
-  name='OneofOptions',
-  full_name='google.protobuf.OneofOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=3910,
-  serialized_end=4004,
-)
-
-
-_ENUMOPTIONS = _descriptor.Descriptor(
-  name='EnumOptions',
-  full_name='google.protobuf.EnumOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
-      number=3, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=4007,
-  serialized_end=4148,
-)
-
-
-_ENUMVALUEOPTIONS = _descriptor.Descriptor(
-  name='EnumValueOptions',
-  full_name='google.protobuf.EnumValueOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=4150,
-  serialized_end=4275,
-)
-
-
-_SERVICEOPTIONS = _descriptor.Descriptor(
-  name='ServiceOptions',
-  full_name='google.protobuf.ServiceOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0,
-      number=33, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=4277,
-  serialized_end=4400,
-)
-
-
-_METHODOPTIONS = _descriptor.Descriptor(
-  name='MethodOptions',
-  full_name='google.protobuf.MethodOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
-      number=33, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=1,
-      number=999, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=4402,
-  serialized_end=4524,
-)
-
-
-_UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
-  name='NamePart',
-  full_name='google.protobuf.UninterpretedOption.NamePart',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
-      number=1, type=9, cpp_type=9, label=2,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
-      number=2, type=8, cpp_type=7, label=2,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4762,
-  serialized_end=4813,
-)
-
-_UNINTERPRETEDOPTION = _descriptor.Descriptor(
-  name='UninterpretedOption',
-  full_name='google.protobuf.UninterpretedOption',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
-      number=4, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
-      number=6, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
-      number=7, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
-      number=8, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4527,
-  serialized_end=4813,
-)
-
-
-_SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
-  name='Location',
-  full_name='google.protobuf.SourceCodeInfo.Location',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
-      number=1, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
-      number=2, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4,
-      number=6, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4895,
-  serialized_end=5029,
-)
-
-_SOURCECODEINFO = _descriptor.Descriptor(
-  name='SourceCodeInfo',
-  full_name='google.protobuf.SourceCodeInfo',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_SOURCECODEINFO_LOCATION, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4816,
-  serialized_end=5029,
-)
-
-
-_GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor(
-  name='Annotation',
-  full_name='google.protobuf.GeneratedCodeInfo.Annotation',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0,
-      number=1, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3,
-      number=4, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5120,
-  serialized_end=5199,
-)
-
-_GENERATEDCODEINFO = _descriptor.Descriptor(
-  name='GeneratedCodeInfo',
-  full_name='google.protobuf.GeneratedCodeInfo',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_GENERATEDCODEINFO_ANNOTATION, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5032,
-  serialized_end=5199,
-)
-
-_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
-_FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
-_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
-_DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
-_DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
-_DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE
-_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
-_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
-_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
-_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
-_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
-_ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS
-_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
-_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
-_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
-_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
-_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
-_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
-_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
-_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
-_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
-_FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE
-_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
-_FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS
-_ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
-_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
-_SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
-_SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
-_GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO
-_GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION
-DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
-DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
-DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
-DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
-DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
-DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS
-DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
-DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
-DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
-DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
-DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
-DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
-DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO
-
-FileDescriptorSet = _reflection.GeneratedProtocolMessageType('FileDescriptorSet', (_message.Message,), dict(
-  DESCRIPTOR = _FILEDESCRIPTORSET,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet)
-  ))
-_sym_db.RegisterMessage(FileDescriptorSet)
-
-FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), dict(
-  DESCRIPTOR = _FILEDESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto)
-  ))
-_sym_db.RegisterMessage(FileDescriptorProto)
-
-DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), dict(
-
-  ExtensionRange = _reflection.GeneratedProtocolMessageType('ExtensionRange', (_message.Message,), dict(
-    DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE,
-    __module__ = 'google.protobuf.descriptor_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange)
-    ))
-  ,
-
-  ReservedRange = _reflection.GeneratedProtocolMessageType('ReservedRange', (_message.Message,), dict(
-    DESCRIPTOR = _DESCRIPTORPROTO_RESERVEDRANGE,
-    __module__ = 'google.protobuf.descriptor_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ReservedRange)
-    ))
-  ,
-  DESCRIPTOR = _DESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto)
-  ))
-_sym_db.RegisterMessage(DescriptorProto)
-_sym_db.RegisterMessage(DescriptorProto.ExtensionRange)
-_sym_db.RegisterMessage(DescriptorProto.ReservedRange)
-
-FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), dict(
-  DESCRIPTOR = _FIELDDESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto)
-  ))
-_sym_db.RegisterMessage(FieldDescriptorProto)
-
-OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), dict(
-  DESCRIPTOR = _ONEOFDESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.OneofDescriptorProto)
-  ))
-_sym_db.RegisterMessage(OneofDescriptorProto)
-
-EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), dict(
-  DESCRIPTOR = _ENUMDESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto)
-  ))
-_sym_db.RegisterMessage(EnumDescriptorProto)
-
-EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), dict(
-  DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto)
-  ))
-_sym_db.RegisterMessage(EnumValueDescriptorProto)
-
-ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), dict(
-  DESCRIPTOR = _SERVICEDESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto)
-  ))
-_sym_db.RegisterMessage(ServiceDescriptorProto)
-
-MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), dict(
-  DESCRIPTOR = _METHODDESCRIPTORPROTO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto)
-  ))
-_sym_db.RegisterMessage(MethodDescriptorProto)
-
-FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), dict(
-  DESCRIPTOR = _FILEOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.FileOptions)
-  ))
-_sym_db.RegisterMessage(FileOptions)
-
-MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), dict(
-  DESCRIPTOR = _MESSAGEOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions)
-  ))
-_sym_db.RegisterMessage(MessageOptions)
-
-FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), dict(
-  DESCRIPTOR = _FIELDOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions)
-  ))
-_sym_db.RegisterMessage(FieldOptions)
-
-OneofOptions = _reflection.GeneratedProtocolMessageType('OneofOptions', (_message.Message,), dict(
-  DESCRIPTOR = _ONEOFOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.OneofOptions)
-  ))
-_sym_db.RegisterMessage(OneofOptions)
-
-EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), dict(
-  DESCRIPTOR = _ENUMOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions)
-  ))
-_sym_db.RegisterMessage(EnumOptions)
-
-EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), dict(
-  DESCRIPTOR = _ENUMVALUEOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions)
-  ))
-_sym_db.RegisterMessage(EnumValueOptions)
-
-ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), dict(
-  DESCRIPTOR = _SERVICEOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions)
-  ))
-_sym_db.RegisterMessage(ServiceOptions)
-
-MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), dict(
-  DESCRIPTOR = _METHODOPTIONS,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions)
-  ))
-_sym_db.RegisterMessage(MethodOptions)
-
-UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), dict(
-
-  NamePart = _reflection.GeneratedProtocolMessageType('NamePart', (_message.Message,), dict(
-    DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART,
-    __module__ = 'google.protobuf.descriptor_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart)
-    ))
-  ,
-  DESCRIPTOR = _UNINTERPRETEDOPTION,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption)
-  ))
-_sym_db.RegisterMessage(UninterpretedOption)
-_sym_db.RegisterMessage(UninterpretedOption.NamePart)
-
-SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), dict(
-
-  Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), dict(
-    DESCRIPTOR = _SOURCECODEINFO_LOCATION,
-    __module__ = 'google.protobuf.descriptor_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo.Location)
-    ))
-  ,
-  DESCRIPTOR = _SOURCECODEINFO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo)
-  ))
-_sym_db.RegisterMessage(SourceCodeInfo)
-_sym_db.RegisterMessage(SourceCodeInfo.Location)
-
-GeneratedCodeInfo = _reflection.GeneratedProtocolMessageType('GeneratedCodeInfo', (_message.Message,), dict(
-
-  Annotation = _reflection.GeneratedProtocolMessageType('Annotation', (_message.Message,), dict(
-    DESCRIPTOR = _GENERATEDCODEINFO_ANNOTATION,
-    __module__ = 'google.protobuf.descriptor_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.GeneratedCodeInfo.Annotation)
-    ))
-  ,
-  DESCRIPTOR = _GENERATEDCODEINFO,
-  __module__ = 'google.protobuf.descriptor_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.GeneratedCodeInfo)
-  ))
-_sym_db.RegisterMessage(GeneratedCodeInfo)
-_sym_db.RegisterMessage(GeneratedCodeInfo.Annotation)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/descriptor_pool.py b/tools/swarming_client/third_party/google/protobuf/descriptor_pool.py
deleted file mode 100644
index 5c055ab..0000000
--- a/tools/swarming_client/third_party/google/protobuf/descriptor_pool.py
+++ /dev/null
@@ -1,814 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides DescriptorPool to use as a container for proto2 descriptors.
-
-The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
-a collection of protocol buffer descriptors for use when dynamically creating
-message types at runtime.
-
-For most applications protocol buffers should be used via modules generated by
-the protocol buffer compiler tool. This should only be used when the type of
-protocol buffers used in an application or library cannot be predetermined.
-
-Below is a straightforward example on how to use this class:
-
-  pool = DescriptorPool()
-  file_descriptor_protos = [ ... ]
-  for file_descriptor_proto in file_descriptor_protos:
-    pool.Add(file_descriptor_proto)
-  my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
-
-The message descriptor can be used in conjunction with the message_factory
-module in order to create a protocol buffer class that can be encoded and
-decoded.
-
-If you want to get a Python class for the specified proto, use the
-helper functions inside google.protobuf.message_factory
-directly instead of this class.
-"""
-
-__author__ = 'matthewtoia@google.com (Matt Toia)'
-
-from google.protobuf import descriptor
-from google.protobuf import descriptor_database
-from google.protobuf import text_encoding
-
-
-_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS
-
-
-def _NormalizeFullyQualifiedName(name):
-  """Remove leading period from fully-qualified type name.
-
-  Due to b/13860351 in descriptor_database.py, types in the root namespace are
-  generated with a leading period. This function removes that prefix.
-
-  Args:
-    name: A str, the fully-qualified symbol name.
-
-  Returns:
-    A str, the normalized fully-qualified symbol name.
-  """
-  return name.lstrip('.')
-
-
-class DescriptorPool(object):
-  """A collection of protobufs dynamically constructed by descriptor protos."""
-
-  if _USE_C_DESCRIPTORS:
-
-    def __new__(cls, descriptor_db=None):
-      # pylint: disable=protected-access
-      return descriptor._message.DescriptorPool(descriptor_db)
-
-  def __init__(self, descriptor_db=None):
-    """Initializes a Pool of proto buffs.
-
-    The descriptor_db argument to the constructor is provided to allow
-    specialized file descriptor proto lookup code to be triggered on demand. An
-    example would be an implementation which will read and compile a file
-    specified in a call to FindFileByName() and not require the call to Add()
-    at all. Results from this database will be cached internally here as well.
-
-    Args:
-      descriptor_db: A secondary source of file descriptors.
-    """
-
-    self._internal_db = descriptor_database.DescriptorDatabase()
-    self._descriptor_db = descriptor_db
-    self._descriptors = {}
-    self._enum_descriptors = {}
-    self._file_descriptors = {}
-
-  def Add(self, file_desc_proto):
-    """Adds the FileDescriptorProto and its types to this pool.
-
-    Args:
-      file_desc_proto: The FileDescriptorProto to add.
-    """
-
-    self._internal_db.Add(file_desc_proto)
-
-  def AddSerializedFile(self, serialized_file_desc_proto):
-    """Adds the FileDescriptorProto and its types to this pool.
-
-    Args:
-      serialized_file_desc_proto: A bytes string, serialization of the
-        FileDescriptorProto to add.
-    """
-
-    # pylint: disable=g-import-not-at-top
-    from google.protobuf import descriptor_pb2
-    file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
-        serialized_file_desc_proto)
-    self.Add(file_desc_proto)
-
-  def AddDescriptor(self, desc):
-    """Adds a Descriptor to the pool, non-recursively.
-
-    If the Descriptor contains nested messages or enums, the caller must
-    explicitly register them. This method also registers the FileDescriptor
-    associated with the message.
-
-    Args:
-      desc: A Descriptor.
-    """
-    if not isinstance(desc, descriptor.Descriptor):
-      raise TypeError('Expected instance of descriptor.Descriptor.')
-
-    self._descriptors[desc.full_name] = desc
-    self.AddFileDescriptor(desc.file)
-
-  def AddEnumDescriptor(self, enum_desc):
-    """Adds an EnumDescriptor to the pool.
-
-    This method also registers the FileDescriptor associated with the message.
-
-    Args:
-      enum_desc: An EnumDescriptor.
-    """
-
-    if not isinstance(enum_desc, descriptor.EnumDescriptor):
-      raise TypeError('Expected instance of descriptor.EnumDescriptor.')
-
-    self._enum_descriptors[enum_desc.full_name] = enum_desc
-    self.AddFileDescriptor(enum_desc.file)
-
-  def AddFileDescriptor(self, file_desc):
-    """Adds a FileDescriptor to the pool, non-recursively.
-
-    If the FileDescriptor contains messages or enums, the caller must explicitly
-    register them.
-
-    Args:
-      file_desc: A FileDescriptor.
-    """
-
-    if not isinstance(file_desc, descriptor.FileDescriptor):
-      raise TypeError('Expected instance of descriptor.FileDescriptor.')
-    self._file_descriptors[file_desc.name] = file_desc
-
-  def FindFileByName(self, file_name):
-    """Gets a FileDescriptor by file name.
-
-    Args:
-      file_name: The path to the file to get a descriptor for.
-
-    Returns:
-      A FileDescriptor for the named file.
-
-    Raises:
-      KeyError: if the file can not be found in the pool.
-    """
-
-    try:
-      return self._file_descriptors[file_name]
-    except KeyError:
-      pass
-
-    try:
-      file_proto = self._internal_db.FindFileByName(file_name)
-    except KeyError as error:
-      if self._descriptor_db:
-        file_proto = self._descriptor_db.FindFileByName(file_name)
-      else:
-        raise error
-    if not file_proto:
-      raise KeyError('Cannot find a file named %s' % file_name)
-    return self._ConvertFileProtoToFileDescriptor(file_proto)
-
-  def FindFileContainingSymbol(self, symbol):
-    """Gets the FileDescriptor for the file containing the specified symbol.
-
-    Args:
-      symbol: The name of the symbol to search for.
-
-    Returns:
-      A FileDescriptor that contains the specified symbol.
-
-    Raises:
-      KeyError: if the file can not be found in the pool.
-    """
-
-    symbol = _NormalizeFullyQualifiedName(symbol)
-    try:
-      return self._descriptors[symbol].file
-    except KeyError:
-      pass
-
-    try:
-      return self._enum_descriptors[symbol].file
-    except KeyError:
-      pass
-
-    try:
-      file_proto = self._internal_db.FindFileContainingSymbol(symbol)
-    except KeyError as error:
-      if self._descriptor_db:
-        file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
-      else:
-        raise error
-    if not file_proto:
-      raise KeyError('Cannot find a file containing %s' % symbol)
-    return self._ConvertFileProtoToFileDescriptor(file_proto)
-
-  def FindMessageTypeByName(self, full_name):
-    """Loads the named descriptor from the pool.
-
-    Args:
-      full_name: The full name of the descriptor to load.
-
-    Returns:
-      The descriptor for the named type.
-    """
-
-    full_name = _NormalizeFullyQualifiedName(full_name)
-    if full_name not in self._descriptors:
-      self.FindFileContainingSymbol(full_name)
-    return self._descriptors[full_name]
-
-  def FindEnumTypeByName(self, full_name):
-    """Loads the named enum descriptor from the pool.
-
-    Args:
-      full_name: The full name of the enum descriptor to load.
-
-    Returns:
-      The enum descriptor for the named type.
-    """
-
-    full_name = _NormalizeFullyQualifiedName(full_name)
-    if full_name not in self._enum_descriptors:
-      self.FindFileContainingSymbol(full_name)
-    return self._enum_descriptors[full_name]
-
-  def FindFieldByName(self, full_name):
-    """Loads the named field descriptor from the pool.
-
-    Args:
-      full_name: The full name of the field descriptor to load.
-
-    Returns:
-      The field descriptor for the named field.
-    """
-    full_name = _NormalizeFullyQualifiedName(full_name)
-    message_name, _, field_name = full_name.rpartition('.')
-    message_descriptor = self.FindMessageTypeByName(message_name)
-    return message_descriptor.fields_by_name[field_name]
-
-  def FindExtensionByName(self, full_name):
-    """Loads the named extension descriptor from the pool.
-
-    Args:
-      full_name: The full name of the extension descriptor to load.
-
-    Returns:
-      A FieldDescriptor, describing the named extension.
-    """
-    full_name = _NormalizeFullyQualifiedName(full_name)
-    message_name, _, extension_name = full_name.rpartition('.')
-    try:
-      # Most extensions are nested inside a message.
-      scope = self.FindMessageTypeByName(message_name)
-    except KeyError:
-      # Some extensions are defined at file scope.
-      scope = self.FindFileContainingSymbol(full_name)
-    return scope.extensions_by_name[extension_name]
-
-  def _ConvertFileProtoToFileDescriptor(self, file_proto):
-    """Creates a FileDescriptor from a proto or returns a cached copy.
-
-    This method also has the side effect of loading all the symbols found in
-    the file into the appropriate dictionaries in the pool.
-
-    Args:
-      file_proto: The proto to convert.
-
-    Returns:
-      A FileDescriptor matching the passed in proto.
-    """
-
-    if file_proto.name not in self._file_descriptors:
-      built_deps = list(self._GetDeps(file_proto.dependency))
-      direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
-      public_deps = [direct_deps[i] for i in file_proto.public_dependency]
-
-      file_descriptor = descriptor.FileDescriptor(
-          pool=self,
-          name=file_proto.name,
-          package=file_proto.package,
-          syntax=file_proto.syntax,
-          options=file_proto.options,
-          serialized_pb=file_proto.SerializeToString(),
-          dependencies=direct_deps,
-          public_dependencies=public_deps)
-      if _USE_C_DESCRIPTORS:
-        # When using C++ descriptors, all objects defined in the file were added
-        # to the C++ database when the FileDescriptor was built above.
-        # Just add them to this descriptor pool.
-        def _AddMessageDescriptor(message_desc):
-          self._descriptors[message_desc.full_name] = message_desc
-          for nested in message_desc.nested_types:
-            _AddMessageDescriptor(nested)
-          for enum_type in message_desc.enum_types:
-            _AddEnumDescriptor(enum_type)
-        def _AddEnumDescriptor(enum_desc):
-          self._enum_descriptors[enum_desc.full_name] = enum_desc
-        for message_type in file_descriptor.message_types_by_name.values():
-          _AddMessageDescriptor(message_type)
-        for enum_type in file_descriptor.enum_types_by_name.values():
-          _AddEnumDescriptor(enum_type)
-      else:
-        scope = {}
-
-        # This loop extracts all the message and enum types from all the
-        # dependencies of the file_proto. This is necessary to create the
-        # scope of available message types when defining the passed in
-        # file proto.
-        for dependency in built_deps:
-          scope.update(self._ExtractSymbols(
-              dependency.message_types_by_name.values()))
-          scope.update((_PrefixWithDot(enum.full_name), enum)
-                       for enum in dependency.enum_types_by_name.values())
-
-        for message_type in file_proto.message_type:
-          message_desc = self._ConvertMessageDescriptor(
-              message_type, file_proto.package, file_descriptor, scope,
-              file_proto.syntax)
-          file_descriptor.message_types_by_name[message_desc.name] = (
-              message_desc)
-
-        for enum_type in file_proto.enum_type:
-          file_descriptor.enum_types_by_name[enum_type.name] = (
-              self._ConvertEnumDescriptor(enum_type, file_proto.package,
-                                          file_descriptor, None, scope))
-
-        for index, extension_proto in enumerate(file_proto.extension):
-          extension_desc = self._MakeFieldDescriptor(
-              extension_proto, file_proto.package, index, is_extension=True)
-          extension_desc.containing_type = self._GetTypeFromScope(
-              file_descriptor.package, extension_proto.extendee, scope)
-          self._SetFieldType(extension_proto, extension_desc,
-                            file_descriptor.package, scope)
-          file_descriptor.extensions_by_name[extension_desc.name] = (
-              extension_desc)
-
-        for desc_proto in file_proto.message_type:
-          self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
-
-        if file_proto.package:
-          desc_proto_prefix = _PrefixWithDot(file_proto.package)
-        else:
-          desc_proto_prefix = ''
-
-        for desc_proto in file_proto.message_type:
-          desc = self._GetTypeFromScope(
-              desc_proto_prefix, desc_proto.name, scope)
-          file_descriptor.message_types_by_name[desc_proto.name] = desc
-
-        for index, service_proto in enumerate(file_proto.service):
-          file_descriptor.services_by_name[service_proto.name] = (
-              self._MakeServiceDescriptor(service_proto, index, scope,
-                                          file_proto.package, file_descriptor))
-
-      self.Add(file_proto)
-      self._file_descriptors[file_proto.name] = file_descriptor
-
-    return self._file_descriptors[file_proto.name]
-
-  def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
-                                scope=None, syntax=None):
-    """Adds the proto to the pool in the specified package.
-
-    Args:
-      desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
-      package: The package the proto should be located in.
-      file_desc: The file containing this message.
-      scope: Dict mapping short and full symbols to message and enum types.
-
-    Returns:
-      The added descriptor.
-    """
-
-    if package:
-      desc_name = '.'.join((package, desc_proto.name))
-    else:
-      desc_name = desc_proto.name
-
-    if file_desc is None:
-      file_name = None
-    else:
-      file_name = file_desc.name
-
-    if scope is None:
-      scope = {}
-
-    nested = [
-        self._ConvertMessageDescriptor(
-            nested, desc_name, file_desc, scope, syntax)
-        for nested in desc_proto.nested_type]
-    enums = [
-        self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
-        for enum in desc_proto.enum_type]
-    fields = [self._MakeFieldDescriptor(field, desc_name, index)
-              for index, field in enumerate(desc_proto.field)]
-    extensions = [
-        self._MakeFieldDescriptor(extension, desc_name, index,
-                                  is_extension=True)
-        for index, extension in enumerate(desc_proto.extension)]
-    oneofs = [
-        descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)),
-                                   index, None, [], desc.options)
-        for index, desc in enumerate(desc_proto.oneof_decl)]
-    extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
-    if extension_ranges:
-      is_extendable = True
-    else:
-      is_extendable = False
-    desc = descriptor.Descriptor(
-        name=desc_proto.name,
-        full_name=desc_name,
-        filename=file_name,
-        containing_type=None,
-        fields=fields,
-        oneofs=oneofs,
-        nested_types=nested,
-        enum_types=enums,
-        extensions=extensions,
-        options=desc_proto.options,
-        is_extendable=is_extendable,
-        extension_ranges=extension_ranges,
-        file=file_desc,
-        serialized_start=None,
-        serialized_end=None,
-        syntax=syntax)
-    for nested in desc.nested_types:
-      nested.containing_type = desc
-    for enum in desc.enum_types:
-      enum.containing_type = desc
-    for field_index, field_desc in enumerate(desc_proto.field):
-      if field_desc.HasField('oneof_index'):
-        oneof_index = field_desc.oneof_index
-        oneofs[oneof_index].fields.append(fields[field_index])
-        fields[field_index].containing_oneof = oneofs[oneof_index]
-
-    scope[_PrefixWithDot(desc_name)] = desc
-    self._descriptors[desc_name] = desc
-    return desc
-
-  def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
-                             containing_type=None, scope=None):
-    """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
-
-    Args:
-      enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
-      package: Optional package name for the new message EnumDescriptor.
-      file_desc: The file containing the enum descriptor.
-      containing_type: The type containing this enum.
-      scope: Scope containing available types.
-
-    Returns:
-      The added descriptor
-    """
-
-    if package:
-      enum_name = '.'.join((package, enum_proto.name))
-    else:
-      enum_name = enum_proto.name
-
-    if file_desc is None:
-      file_name = None
-    else:
-      file_name = file_desc.name
-
-    values = [self._MakeEnumValueDescriptor(value, index)
-              for index, value in enumerate(enum_proto.value)]
-    desc = descriptor.EnumDescriptor(name=enum_proto.name,
-                                     full_name=enum_name,
-                                     filename=file_name,
-                                     file=file_desc,
-                                     values=values,
-                                     containing_type=containing_type,
-                                     options=enum_proto.options)
-    scope['.%s' % enum_name] = desc
-    self._enum_descriptors[enum_name] = desc
-    return desc
-
-  def _MakeFieldDescriptor(self, field_proto, message_name, index,
-                           is_extension=False):
-    """Creates a field descriptor from a FieldDescriptorProto.
-
-    For message and enum type fields, this method will do a look up
-    in the pool for the appropriate descriptor for that type. If it
-    is unavailable, it will fall back to the _source function to
-    create it. If this type is still unavailable, construction will
-    fail.
-
-    Args:
-      field_proto: The proto describing the field.
-      message_name: The name of the containing message.
-      index: Index of the field
-      is_extension: Indication that this field is for an extension.
-
-    Returns:
-      An initialized FieldDescriptor object
-    """
-
-    if message_name:
-      full_name = '.'.join((message_name, field_proto.name))
-    else:
-      full_name = field_proto.name
-
-    return descriptor.FieldDescriptor(
-        name=field_proto.name,
-        full_name=full_name,
-        index=index,
-        number=field_proto.number,
-        type=field_proto.type,
-        cpp_type=None,
-        message_type=None,
-        enum_type=None,
-        containing_type=None,
-        label=field_proto.label,
-        has_default_value=False,
-        default_value=None,
-        is_extension=is_extension,
-        extension_scope=None,
-        options=field_proto.options)
-
-  def _SetAllFieldTypes(self, package, desc_proto, scope):
-    """Sets all the descriptor's fields's types.
-
-    This method also sets the containing types on any extensions.
-
-    Args:
-      package: The current package of desc_proto.
-      desc_proto: The message descriptor to update.
-      scope: Enclosing scope of available types.
-    """
-
-    package = _PrefixWithDot(package)
-
-    main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
-
-    if package == '.':
-      nested_package = _PrefixWithDot(desc_proto.name)
-    else:
-      nested_package = '.'.join([package, desc_proto.name])
-
-    for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
-      self._SetFieldType(field_proto, field_desc, nested_package, scope)
-
-    for extension_proto, extension_desc in (
-        zip(desc_proto.extension, main_desc.extensions)):
-      extension_desc.containing_type = self._GetTypeFromScope(
-          nested_package, extension_proto.extendee, scope)
-      self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
-
-    for nested_type in desc_proto.nested_type:
-      self._SetAllFieldTypes(nested_package, nested_type, scope)
-
-  def _SetFieldType(self, field_proto, field_desc, package, scope):
-    """Sets the field's type, cpp_type, message_type and enum_type.
-
-    Args:
-      field_proto: Data about the field in proto format.
-      field_desc: The descriptor to modiy.
-      package: The package the field's container is in.
-      scope: Enclosing scope of available types.
-    """
-    if field_proto.type_name:
-      desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
-    else:
-      desc = None
-
-    if not field_proto.HasField('type'):
-      if isinstance(desc, descriptor.Descriptor):
-        field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
-      else:
-        field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
-
-    field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
-        field_proto.type)
-
-    if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
-        or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
-      field_desc.message_type = desc
-
-    if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
-      field_desc.enum_type = desc
-
-    if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-      field_desc.has_default_value = False
-      field_desc.default_value = []
-    elif field_proto.HasField('default_value'):
-      field_desc.has_default_value = True
-      if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
-          field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
-        field_desc.default_value = float(field_proto.default_value)
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
-        field_desc.default_value = field_proto.default_value
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
-        field_desc.default_value = field_proto.default_value.lower() == 'true'
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
-        field_desc.default_value = field_desc.enum_type.values_by_name[
-            field_proto.default_value].number
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
-        field_desc.default_value = text_encoding.CUnescape(
-            field_proto.default_value)
-      else:
-        # All other types are of the "int" type.
-        field_desc.default_value = int(field_proto.default_value)
-    else:
-      field_desc.has_default_value = False
-      if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
-          field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
-        field_desc.default_value = 0.0
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
-        field_desc.default_value = u''
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
-        field_desc.default_value = False
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
-        field_desc.default_value = field_desc.enum_type.values[0].number
-      elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
-        field_desc.default_value = b''
-      else:
-        # All other types are of the "int" type.
-        field_desc.default_value = 0
-
-    field_desc.type = field_proto.type
-
-  def _MakeEnumValueDescriptor(self, value_proto, index):
-    """Creates a enum value descriptor object from a enum value proto.
-
-    Args:
-      value_proto: The proto describing the enum value.
-      index: The index of the enum value.
-
-    Returns:
-      An initialized EnumValueDescriptor object.
-    """
-
-    return descriptor.EnumValueDescriptor(
-        name=value_proto.name,
-        index=index,
-        number=value_proto.number,
-        options=value_proto.options,
-        type=None)
-
-  def _MakeServiceDescriptor(self, service_proto, service_index, scope,
-                             package, file_desc):
-    """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
-
-    Args:
-      service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
-      service_index: The index of the service in the File.
-      scope: Dict mapping short and full symbols to message and enum types.
-      package: Optional package name for the new message EnumDescriptor.
-      file_desc: The file containing the service descriptor.
-
-    Returns:
-      The added descriptor.
-    """
-
-    if package:
-      service_name = '.'.join((package, service_proto.name))
-    else:
-      service_name = service_proto.name
-
-    methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
-                                          scope, index)
-               for index, method_proto in enumerate(service_proto.method)]
-    desc = descriptor.ServiceDescriptor(name=service_proto.name,
-                                        full_name=service_name,
-                                        index=service_index,
-                                        methods=methods,
-                                        options=service_proto.options,
-                                        file=file_desc)
-    return desc
-
-  def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
-                            index):
-    """Creates a method descriptor from a MethodDescriptorProto.
-
-    Args:
-      method_proto: The proto describing the method.
-      service_name: The name of the containing service.
-      package: Optional package name to look up for types.
-      scope: Scope containing available types.
-      index: Index of the method in the service.
-
-    Returns:
-      An initialized MethodDescriptor object.
-    """
-    full_name = '.'.join((service_name, method_proto.name))
-    input_type = self._GetTypeFromScope(
-        package, method_proto.input_type, scope)
-    output_type = self._GetTypeFromScope(
-        package, method_proto.output_type, scope)
-    return descriptor.MethodDescriptor(name=method_proto.name,
-                                       full_name=full_name,
-                                       index=index,
-                                       containing_service=None,
-                                       input_type=input_type,
-                                       output_type=output_type,
-                                       options=method_proto.options)
-
-  def _ExtractSymbols(self, descriptors):
-    """Pulls out all the symbols from descriptor protos.
-
-    Args:
-      descriptors: The messages to extract descriptors from.
-    Yields:
-      A two element tuple of the type name and descriptor object.
-    """
-
-    for desc in descriptors:
-      yield (_PrefixWithDot(desc.full_name), desc)
-      for symbol in self._ExtractSymbols(desc.nested_types):
-        yield symbol
-      for enum in desc.enum_types:
-        yield (_PrefixWithDot(enum.full_name), enum)
-
-  def _GetDeps(self, dependencies):
-    """Recursively finds dependencies for file protos.
-
-    Args:
-      dependencies: The names of the files being depended on.
-
-    Yields:
-      Each direct and indirect dependency.
-    """
-
-    for dependency in dependencies:
-      dep_desc = self.FindFileByName(dependency)
-      yield dep_desc
-      for parent_dep in dep_desc.dependencies:
-        yield parent_dep
-
-  def _GetTypeFromScope(self, package, type_name, scope):
-    """Finds a given type name in the current scope.
-
-    Args:
-      package: The package the proto should be located in.
-      type_name: The name of the type to be found in the scope.
-      scope: Dict mapping short and full symbols to message and enum types.
-
-    Returns:
-      The descriptor for the requested type.
-    """
-    if type_name not in scope:
-      components = _PrefixWithDot(package).split('.')
-      while components:
-        possible_match = '.'.join(components + [type_name])
-        if possible_match in scope:
-          type_name = possible_match
-          break
-        else:
-          components.pop(-1)
-    return scope[type_name]
-
-
-def _PrefixWithDot(name):
-  return name if name.startswith('.') else '.%s' % name
-
-
-if _USE_C_DESCRIPTORS:
-  # TODO(amauryfa): This pool could be constructed from Python code, when we
-  # support a flag like 'use_cpp_generated_pool=True'.
-  # pylint: disable=protected-access
-  _DEFAULT = descriptor._message.default_pool
-else:
-  _DEFAULT = DescriptorPool()
-
-
-def Default():
-  return _DEFAULT
diff --git a/tools/swarming_client/third_party/google/protobuf/duration_pb2.py b/tools/swarming_client/third_party/google/protobuf/duration_pb2.py
deleted file mode 100644
index f28b667..0000000
--- a/tools/swarming_client/third_party/google/protobuf/duration_pb2.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/duration.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/duration.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42|\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z*github.com/golang/protobuf/ptypes/duration\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_DURATION = _descriptor.Descriptor(
-  name='Duration',
-  full_name='google.protobuf.Duration',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='seconds', full_name='google.protobuf.Duration.seconds', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nanos', full_name='google.protobuf.Duration.nanos', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=51,
-  serialized_end=93,
-)
-
-DESCRIPTOR.message_types_by_name['Duration'] = _DURATION
-
-Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), dict(
-  DESCRIPTOR = _DURATION,
-  __module__ = 'google.protobuf.duration_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Duration)
-  ))
-_sym_db.RegisterMessage(Duration)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\rDurationProtoP\001Z*github.com/golang/protobuf/ptypes/duration\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/empty_pb2.py b/tools/swarming_client/third_party/google/protobuf/empty_pb2.py
deleted file mode 100644
index fae44a9..0000000
--- a/tools/swarming_client/third_party/google/protobuf/empty_pb2.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/empty.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/empty.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyBy\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z\'github.com/golang/protobuf/ptypes/empty\xa0\x01\x01\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_EMPTY = _descriptor.Descriptor(
-  name='Empty',
-  full_name='google.protobuf.Empty',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=48,
-  serialized_end=55,
-)
-
-DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
-
-Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), dict(
-  DESCRIPTOR = _EMPTY,
-  __module__ = 'google.protobuf.empty_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Empty)
-  ))
-_sym_db.RegisterMessage(Empty)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\nEmptyProtoP\001Z\'github.com/golang/protobuf/ptypes/empty\240\001\001\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/field_mask_pb2.py b/tools/swarming_client/third_party/google/protobuf/field_mask_pb2.py
deleted file mode 100644
index bfda7fc..0000000
--- a/tools/swarming_client/third_party/google/protobuf/field_mask_pb2.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/field_mask.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/field_mask.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tBQ\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_FIELDMASK = _descriptor.Descriptor(
-  name='FieldMask',
-  full_name='google.protobuf.FieldMask',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='paths', full_name='google.protobuf.FieldMask.paths', index=0,
-      number=1, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=53,
-  serialized_end=79,
-)
-
-DESCRIPTOR.message_types_by_name['FieldMask'] = _FIELDMASK
-
-FieldMask = _reflection.GeneratedProtocolMessageType('FieldMask', (_message.Message,), dict(
-  DESCRIPTOR = _FIELDMASK,
-  __module__ = 'google.protobuf.field_mask_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.FieldMask)
-  ))
-_sym_db.RegisterMessage(FieldMask)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\016FieldMaskProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/__init__.py b/tools/swarming_client/third_party/google/protobuf/internal/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/_parameterized.py b/tools/swarming_client/third_party/google/protobuf/internal/_parameterized.py
deleted file mode 100644
index 23a78f0..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/_parameterized.py
+++ /dev/null
@@ -1,443 +0,0 @@
-#! /usr/bin/env python
-#
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Adds support for parameterized tests to Python's unittest TestCase class.
-
-A parameterized test is a method in a test case that is invoked with different
-argument tuples.
-
-A simple example:
-
-  class AdditionExample(parameterized.ParameterizedTestCase):
-    @parameterized.Parameters(
-       (1, 2, 3),
-       (4, 5, 9),
-       (1, 1, 3))
-    def testAddition(self, op1, op2, result):
-      self.assertEqual(result, op1 + op2)
-
-
-Each invocation is a separate test case and properly isolated just
-like a normal test method, with its own setUp/tearDown cycle. In the
-example above, there are three separate testcases, one of which will
-fail due to an assertion error (1 + 1 != 3).
-
-Parameters for invididual test cases can be tuples (with positional parameters)
-or dictionaries (with named parameters):
-
-  class AdditionExample(parameterized.ParameterizedTestCase):
-    @parameterized.Parameters(
-       {'op1': 1, 'op2': 2, 'result': 3},
-       {'op1': 4, 'op2': 5, 'result': 9},
-    )
-    def testAddition(self, op1, op2, result):
-      self.assertEqual(result, op1 + op2)
-
-If a parameterized test fails, the error message will show the
-original test name (which is modified internally) and the arguments
-for the specific invocation, which are part of the string returned by
-the shortDescription() method on test cases.
-
-The id method of the test, used internally by the unittest framework,
-is also modified to show the arguments. To make sure that test names
-stay the same across several invocations, object representations like
-
-  >>> class Foo(object):
-  ...  pass
-  >>> repr(Foo())
-  '<__main__.Foo object at 0x23d8610>'
-
-are turned into '<__main__.Foo>'. For even more descriptive names,
-especially in test logs, you can use the NamedParameters decorator. In
-this case, only tuples are supported, and the first parameters has to
-be a string (or an object that returns an apt name when converted via
-str()):
-
-  class NamedExample(parameterized.ParameterizedTestCase):
-    @parameterized.NamedParameters(
-       ('Normal', 'aa', 'aaa', True),
-       ('EmptyPrefix', '', 'abc', True),
-       ('BothEmpty', '', '', True))
-    def testStartsWith(self, prefix, string, result):
-      self.assertEqual(result, strings.startswith(prefix))
-
-Named tests also have the benefit that they can be run individually
-from the command line:
-
-  $ testmodule.py NamedExample.testStartsWithNormal
-  .
-  --------------------------------------------------------------------
-  Ran 1 test in 0.000s
-
-  OK
-
-Parameterized Classes
-=====================
-If invocation arguments are shared across test methods in a single
-ParameterizedTestCase class, instead of decorating all test methods
-individually, the class itself can be decorated:
-
-  @parameterized.Parameters(
-    (1, 2, 3)
-    (4, 5, 9))
-  class ArithmeticTest(parameterized.ParameterizedTestCase):
-    def testAdd(self, arg1, arg2, result):
-      self.assertEqual(arg1 + arg2, result)
-
-    def testSubtract(self, arg2, arg2, result):
-      self.assertEqual(result - arg1, arg2)
-
-Inputs from Iterables
-=====================
-If parameters should be shared across several test cases, or are dynamically
-created from other sources, a single non-tuple iterable can be passed into
-the decorator. This iterable will be used to obtain the test cases:
-
-  class AdditionExample(parameterized.ParameterizedTestCase):
-    @parameterized.Parameters(
-      c.op1, c.op2, c.result for c in testcases
-    )
-    def testAddition(self, op1, op2, result):
-      self.assertEqual(result, op1 + op2)
-
-
-Single-Argument Test Methods
-============================
-If a test method takes only one argument, the single argument does not need to
-be wrapped into a tuple:
-
-  class NegativeNumberExample(parameterized.ParameterizedTestCase):
-    @parameterized.Parameters(
-       -1, -3, -4, -5
-    )
-    def testIsNegative(self, arg):
-      self.assertTrue(IsNegative(arg))
-"""
-
-__author__ = 'tmarek@google.com (Torsten Marek)'
-
-import collections
-import functools
-import re
-import types
-try:
-  import unittest2 as unittest
-except ImportError:
-  import unittest
-import uuid
-
-import six
-
-ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
-_SEPARATOR = uuid.uuid1().hex
-_FIRST_ARG = object()
-_ARGUMENT_REPR = object()
-
-
-def _CleanRepr(obj):
-  return ADDR_RE.sub(r'<\1>', repr(obj))
-
-
-# Helper function formerly from the unittest module, removed from it in
-# Python 2.7.
-def _StrClass(cls):
-  return '%s.%s' % (cls.__module__, cls.__name__)
-
-
-def _NonStringIterable(obj):
-  return (isinstance(obj, collections.Iterable) and not
-          isinstance(obj, six.string_types))
-
-
-def _FormatParameterList(testcase_params):
-  if isinstance(testcase_params, collections.Mapping):
-    return ', '.join('%s=%s' % (argname, _CleanRepr(value))
-                     for argname, value in testcase_params.items())
-  elif _NonStringIterable(testcase_params):
-    return ', '.join(map(_CleanRepr, testcase_params))
-  else:
-    return _FormatParameterList((testcase_params,))
-
-
-class _ParameterizedTestIter(object):
-  """Callable and iterable class for producing new test cases."""
-
-  def __init__(self, test_method, testcases, naming_type):
-    """Returns concrete test functions for a test and a list of parameters.
-
-    The naming_type is used to determine the name of the concrete
-    functions as reported by the unittest framework. If naming_type is
-    _FIRST_ARG, the testcases must be tuples, and the first element must
-    have a string representation that is a valid Python identifier.
-
-    Args:
-      test_method: The decorated test method.
-      testcases: (list of tuple/dict) A list of parameter
-                 tuples/dicts for individual test invocations.
-      naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
-    """
-    self._test_method = test_method
-    self.testcases = testcases
-    self._naming_type = naming_type
-
-  def __call__(self, *args, **kwargs):
-    raise RuntimeError('You appear to be running a parameterized test case '
-                       'without having inherited from parameterized.'
-                       'ParameterizedTestCase. This is bad because none of '
-                       'your test cases are actually being run.')
-
-  def __iter__(self):
-    test_method = self._test_method
-    naming_type = self._naming_type
-
-    def MakeBoundParamTest(testcase_params):
-      @functools.wraps(test_method)
-      def BoundParamTest(self):
-        if isinstance(testcase_params, collections.Mapping):
-          test_method(self, **testcase_params)
-        elif _NonStringIterable(testcase_params):
-          test_method(self, *testcase_params)
-        else:
-          test_method(self, testcase_params)
-
-      if naming_type is _FIRST_ARG:
-        # Signal the metaclass that the name of the test function is unique
-        # and descriptive.
-        BoundParamTest.__x_use_name__ = True
-        BoundParamTest.__name__ += str(testcase_params[0])
-        testcase_params = testcase_params[1:]
-      elif naming_type is _ARGUMENT_REPR:
-        # __x_extra_id__ is used to pass naming information to the __new__
-        # method of TestGeneratorMetaclass.
-        # The metaclass will make sure to create a unique, but nondescriptive
-        # name for this test.
-        BoundParamTest.__x_extra_id__ = '(%s)' % (
-            _FormatParameterList(testcase_params),)
-      else:
-        raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
-
-      BoundParamTest.__doc__ = '%s(%s)' % (
-          BoundParamTest.__name__, _FormatParameterList(testcase_params))
-      if test_method.__doc__:
-        BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
-      return BoundParamTest
-    return (MakeBoundParamTest(c) for c in self.testcases)
-
-
-def _IsSingletonList(testcases):
-  """True iff testcases contains only a single non-tuple element."""
-  return len(testcases) == 1 and not isinstance(testcases[0], tuple)
-
-
-def _ModifyClass(class_object, testcases, naming_type):
-  assert not getattr(class_object, '_id_suffix', None), (
-      'Cannot add parameters to %s,'
-      ' which already has parameterized methods.' % (class_object,))
-  class_object._id_suffix = id_suffix = {}
-  # We change the size of __dict__ while we iterate over it, 
-  # which Python 3.x will complain about, so use copy().
-  for name, obj in class_object.__dict__.copy().items():
-    if (name.startswith(unittest.TestLoader.testMethodPrefix)
-        and isinstance(obj, types.FunctionType)):
-      delattr(class_object, name)
-      methods = {}
-      _UpdateClassDictForParamTestCase(
-          methods, id_suffix, name,
-          _ParameterizedTestIter(obj, testcases, naming_type))
-      for name, meth in methods.items():
-        setattr(class_object, name, meth)
-
-
-def _ParameterDecorator(naming_type, testcases):
-  """Implementation of the parameterization decorators.
-
-  Args:
-    naming_type: The naming type.
-    testcases: Testcase parameters.
-
-  Returns:
-    A function for modifying the decorated object.
-  """
-  def _Apply(obj):
-    if isinstance(obj, type):
-      _ModifyClass(
-          obj,
-          list(testcases) if not isinstance(testcases, collections.Sequence)
-          else testcases,
-          naming_type)
-      return obj
-    else:
-      return _ParameterizedTestIter(obj, testcases, naming_type)
-
-  if _IsSingletonList(testcases):
-    assert _NonStringIterable(testcases[0]), (
-        'Single parameter argument must be a non-string iterable')
-    testcases = testcases[0]
-
-  return _Apply
-
-
-def Parameters(*testcases):
-  """A decorator for creating parameterized tests.
-
-  See the module docstring for a usage example.
-  Args:
-    *testcases: Parameters for the decorated method, either a single
-                iterable, or a list of tuples/dicts/objects (for tests
-                with only one argument).
-
-  Returns:
-     A test generator to be handled by TestGeneratorMetaclass.
-  """
-  return _ParameterDecorator(_ARGUMENT_REPR, testcases)
-
-
-def NamedParameters(*testcases):
-  """A decorator for creating parameterized tests.
-
-  See the module docstring for a usage example. The first element of
-  each parameter tuple should be a string and will be appended to the
-  name of the test method.
-
-  Args:
-    *testcases: Parameters for the decorated method, either a single
-                iterable, or a list of tuples.
-
-  Returns:
-     A test generator to be handled by TestGeneratorMetaclass.
-  """
-  return _ParameterDecorator(_FIRST_ARG, testcases)
-
-
-class TestGeneratorMetaclass(type):
-  """Metaclass for test cases with test generators.
-
-  A test generator is an iterable in a testcase that produces callables. These
-  callables must be single-argument methods. These methods are injected into
-  the class namespace and the original iterable is removed. If the name of the
-  iterable conforms to the test pattern, the injected methods will be picked
-  up as tests by the unittest framework.
-
-  In general, it is supposed to be used in conjunction with the
-  Parameters decorator.
-  """
-
-  def __new__(mcs, class_name, bases, dct):
-    dct['_id_suffix'] = id_suffix = {}
-    for name, obj in dct.items():
-      if (name.startswith(unittest.TestLoader.testMethodPrefix) and
-          _NonStringIterable(obj)):
-        iterator = iter(obj)
-        dct.pop(name)
-        _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
-
-    return type.__new__(mcs, class_name, bases, dct)
-
-
-def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
-  """Adds individual test cases to a dictionary.
-
-  Args:
-    dct: The target dictionary.
-    id_suffix: The dictionary for mapping names to test IDs.
-    name: The original name of the test case.
-    iterator: The iterator generating the individual test cases.
-  """
-  for idx, func in enumerate(iterator):
-    assert callable(func), 'Test generators must yield callables, got %r' % (
-        func,)
-    if getattr(func, '__x_use_name__', False):
-      new_name = func.__name__
-    else:
-      new_name = '%s%s%d' % (name, _SEPARATOR, idx)
-    assert new_name not in dct, (
-        'Name of parameterized test case "%s" not unique' % (new_name,))
-    dct[new_name] = func
-    id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
-
-
-class ParameterizedTestCase(unittest.TestCase):
-  """Base class for test cases using the Parameters decorator."""
-  __metaclass__ = TestGeneratorMetaclass
-
-  def _OriginalName(self):
-    return self._testMethodName.split(_SEPARATOR)[0]
-
-  def __str__(self):
-    return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
-
-  def id(self):  # pylint: disable=invalid-name
-    """Returns the descriptive ID of the test.
-
-    This is used internally by the unittesting framework to get a name
-    for the test to be used in reports.
-
-    Returns:
-      The test id.
-    """
-    return '%s.%s%s' % (_StrClass(self.__class__),
-                        self._OriginalName(),
-                        self._id_suffix.get(self._testMethodName, ''))
-
-
-def CoopParameterizedTestCase(other_base_class):
-  """Returns a new base class with a cooperative metaclass base.
-
-  This enables the ParameterizedTestCase to be used in combination
-  with other base classes that have custom metaclasses, such as
-  mox.MoxTestBase.
-
-  Only works with metaclasses that do not override type.__new__.
-
-  Example:
-
-    import google3
-    import mox
-
-    from google3.testing.pybase import parameterized
-
-    class ExampleTest(parameterized.CoopParameterizedTestCase(mox.MoxTestBase)):
-      ...
-
-  Args:
-    other_base_class: (class) A test case base class.
-
-  Returns:
-    A new class object.
-  """
-  metaclass = type(
-      'CoopMetaclass',
-      (other_base_class.__metaclass__,
-       TestGeneratorMetaclass), {})
-  return metaclass(
-      'CoopParameterizedTestCase',
-      (other_base_class, ParameterizedTestCase), {})
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/any_test_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/any_test_pb2.py
deleted file mode 100644
index ded08b6..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/any_test_pb2.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/any_test.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/any_test.proto',
-  package='google.protobuf.internal',
-  syntax='proto3',
-  serialized_pb=_b('\n\'google/protobuf/internal/any_test.proto\x12\x18google.protobuf.internal\x1a\x19google/protobuf/any.proto\"A\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tint_value\x18\x02 \x01(\x05\x62\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_TESTANY = _descriptor.Descriptor(
-  name='TestAny',
-  full_name='google.protobuf.internal.TestAny',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.internal.TestAny.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int_value', full_name='google.protobuf.internal.TestAny.int_value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=96,
-  serialized_end=161,
-)
-
-_TESTANY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY
-
-TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), dict(
-  DESCRIPTOR = _TESTANY,
-  __module__ = 'google.protobuf.internal.any_test_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestAny)
-  ))
-_sym_db.RegisterMessage(TestAny)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/api_implementation.py b/tools/swarming_client/third_party/google/protobuf/internal/api_implementation.py
deleted file mode 100644
index 460a4a6..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/api_implementation.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Determine which implementation of the protobuf API is used in this process.
-"""
-
-import os
-import warnings
-import sys
-
-try:
-  # pylint: disable=g-import-not-at-top
-  from google.protobuf.internal import _api_implementation
-  # The compile-time constants in the _api_implementation module can be used to
-  # switch to a certain implementation of the Python API at build time.
-  _api_version = _api_implementation.api_version
-  _proto_extension_modules_exist_in_build = True
-except ImportError:
-  _api_version = -1  # Unspecified by compiler flags.
-  _proto_extension_modules_exist_in_build = False
-
-if _api_version == 1:
-  raise ValueError('api_version=1 is no longer supported.')
-if _api_version < 0:  # Still unspecified?
-  try:
-    # The presence of this module in a build allows the proto implementation to
-    # be upgraded merely via build deps rather than a compiler flag or the
-    # runtime environment variable.
-    # pylint: disable=g-import-not-at-top
-    from google.protobuf import _use_fast_cpp_protos
-    # Work around a known issue in the classic bootstrap .par import hook.
-    if not _use_fast_cpp_protos:
-      raise ImportError('_use_fast_cpp_protos import succeeded but was None')
-    del _use_fast_cpp_protos
-    _api_version = 2
-  except ImportError:
-    if _proto_extension_modules_exist_in_build:
-      if sys.version_info[0] >= 3:  # Python 3 defaults to C++ impl v2.
-        _api_version = 2
-      # TODO(b/17427486): Make Python 2 default to C++ impl v2.
-
-_default_implementation_type = (
-    'python' if _api_version <= 0 else 'cpp')
-
-# This environment variable can be used to switch to a certain implementation
-# of the Python API, overriding the compile-time constants in the
-# _api_implementation module. Right now only 'python' and 'cpp' are valid
-# values. Any other value will be ignored.
-_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
-                                 _default_implementation_type)
-
-if _implementation_type != 'python':
-  _implementation_type = 'cpp'
-
-if 'PyPy' in sys.version and _implementation_type == 'cpp':
-  warnings.warn('PyPy does not work yet with cpp protocol buffers. '
-                'Falling back to the python implementation.')
-  _implementation_type = 'python'
-
-# This environment variable can be used to switch between the two
-# 'cpp' implementations, overriding the compile-time constants in the
-# _api_implementation module. Right now only '2' is supported. Any other
-# value will cause an error to be raised.
-_implementation_version_str = os.getenv(
-    'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION', '2')
-
-if _implementation_version_str != '2':
-  raise ValueError(
-      'unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: "' +
-      _implementation_version_str + '" (supported versions: 2)'
-      )
-
-_implementation_version = int(_implementation_version_str)
-
-
-# Usage of this function is discouraged. Clients shouldn't care which
-# implementation of the API is in use. Note that there is no guarantee
-# that differences between APIs will be maintained.
-# Please don't use this function if possible.
-def Type():
-  return _implementation_type
-
-
-# See comment on 'Type' above.
-def Version():
-  return _implementation_version
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/containers.py b/tools/swarming_client/third_party/google/protobuf/internal/containers.py
deleted file mode 100644
index ce46d08..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/containers.py
+++ /dev/null
@@ -1,615 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains container classes to represent different protocol buffer types.
-
-This file defines container classes which represent categories of protocol
-buffer field types which need extra maintenance. Currently these categories
-are:
-  - Repeated scalar fields - These are all repeated fields which aren't
-    composite (e.g. they are of simple types like int32, string, etc).
-  - Repeated composite fields - Repeated fields which are composite. This
-    includes groups and nested messages.
-"""
-
-__author__ = 'petar@google.com (Petar Petrov)'
-
-import collections
-import sys
-
-if sys.version_info[0] < 3:
-  # We would use collections.MutableMapping all the time, but in Python 2 it
-  # doesn't define __slots__.  This causes two significant problems:
-  #
-  # 1. we can't disallow arbitrary attribute assignment, even if our derived
-  #    classes *do* define __slots__.
-  #
-  # 2. we can't safely derive a C type from it without __slots__ defined (the
-  #    interpreter expects to find a dict at tp_dictoffset, which we can't
-  #    robustly provide.  And we don't want an instance dict anyway.
-  #
-  # So this is the Python 2.7 definition of Mapping/MutableMapping functions
-  # verbatim, except that:
-  # 1. We declare __slots__.
-  # 2. We don't declare this as a virtual base class.  The classes defined
-  #    in collections are the interesting base classes, not us.
-  #
-  # Note: deriving from object is critical.  It is the only thing that makes
-  # this a true type, allowing us to derive from it in C++ cleanly and making
-  # __slots__ properly disallow arbitrary element assignment.
-
-  class Mapping(object):
-    __slots__ = ()
-
-    def get(self, key, default=None):
-      try:
-        return self[key]
-      except KeyError:
-        return default
-
-    def __contains__(self, key):
-      try:
-        self[key]
-      except KeyError:
-        return False
-      else:
-        return True
-
-    def iterkeys(self):
-      return iter(self)
-
-    def itervalues(self):
-      for key in self:
-        yield self[key]
-
-    def iteritems(self):
-      for key in self:
-        yield (key, self[key])
-
-    def keys(self):
-      return list(self)
-
-    def items(self):
-      return [(key, self[key]) for key in self]
-
-    def values(self):
-      return [self[key] for key in self]
-
-    # Mappings are not hashable by default, but subclasses can change this
-    __hash__ = None
-
-    def __eq__(self, other):
-      if not isinstance(other, collections.Mapping):
-        return NotImplemented
-      return dict(self.items()) == dict(other.items())
-
-    def __ne__(self, other):
-      return not (self == other)
-
-  class MutableMapping(Mapping):
-    __slots__ = ()
-
-    __marker = object()
-
-    def pop(self, key, default=__marker):
-      try:
-        value = self[key]
-      except KeyError:
-        if default is self.__marker:
-          raise
-        return default
-      else:
-        del self[key]
-        return value
-
-    def popitem(self):
-      try:
-        key = next(iter(self))
-      except StopIteration:
-        raise KeyError
-      value = self[key]
-      del self[key]
-      return key, value
-
-    def clear(self):
-      try:
-        while True:
-          self.popitem()
-      except KeyError:
-        pass
-
-    def update(*args, **kwds):
-      if len(args) > 2:
-        raise TypeError("update() takes at most 2 positional "
-                        "arguments ({} given)".format(len(args)))
-      elif not args:
-        raise TypeError("update() takes at least 1 argument (0 given)")
-      self = args[0]
-      other = args[1] if len(args) >= 2 else ()
-
-      if isinstance(other, Mapping):
-        for key in other:
-          self[key] = other[key]
-      elif hasattr(other, "keys"):
-        for key in other.keys():
-          self[key] = other[key]
-      else:
-        for key, value in other:
-          self[key] = value
-      for key, value in kwds.items():
-        self[key] = value
-
-    def setdefault(self, key, default=None):
-      try:
-        return self[key]
-      except KeyError:
-        self[key] = default
-      return default
-
-  collections.Mapping.register(Mapping)
-  collections.MutableMapping.register(MutableMapping)
-
-else:
-  # In Python 3 we can just use MutableMapping directly, because it defines
-  # __slots__.
-  MutableMapping = collections.MutableMapping
-
-
-class BaseContainer(object):
-
-  """Base container class."""
-
-  # Minimizes memory usage and disallows assignment to other attributes.
-  __slots__ = ['_message_listener', '_values']
-
-  def __init__(self, message_listener):
-    """
-    Args:
-      message_listener: A MessageListener implementation.
-        The RepeatedScalarFieldContainer will call this object's
-        Modified() method when it is modified.
-    """
-    self._message_listener = message_listener
-    self._values = []
-
-  def __getitem__(self, key):
-    """Retrieves item by the specified key."""
-    return self._values[key]
-
-  def __len__(self):
-    """Returns the number of elements in the container."""
-    return len(self._values)
-
-  def __ne__(self, other):
-    """Checks if another instance isn't equal to this one."""
-    # The concrete classes should define __eq__.
-    return not self == other
-
-  def __hash__(self):
-    raise TypeError('unhashable object')
-
-  def __repr__(self):
-    return repr(self._values)
-
-  def sort(self, *args, **kwargs):
-    # Continue to support the old sort_function keyword argument.
-    # This is expected to be a rare occurrence, so use LBYL to avoid
-    # the overhead of actually catching KeyError.
-    if 'sort_function' in kwargs:
-      kwargs['cmp'] = kwargs.pop('sort_function')
-    self._values.sort(*args, **kwargs)
-
-
-class RepeatedScalarFieldContainer(BaseContainer):
-
-  """Simple, type-checked, list-like container for holding repeated scalars."""
-
-  # Disallows assignment to other attributes.
-  __slots__ = ['_type_checker']
-
-  def __init__(self, message_listener, type_checker):
-    """
-    Args:
-      message_listener: A MessageListener implementation.
-        The RepeatedScalarFieldContainer will call this object's
-        Modified() method when it is modified.
-      type_checker: A type_checkers.ValueChecker instance to run on elements
-        inserted into this container.
-    """
-    super(RepeatedScalarFieldContainer, self).__init__(message_listener)
-    self._type_checker = type_checker
-
-  def append(self, value):
-    """Appends an item to the list. Similar to list.append()."""
-    self._values.append(self._type_checker.CheckValue(value))
-    if not self._message_listener.dirty:
-      self._message_listener.Modified()
-
-  def insert(self, key, value):
-    """Inserts the item at the specified position. Similar to list.insert()."""
-    self._values.insert(key, self._type_checker.CheckValue(value))
-    if not self._message_listener.dirty:
-      self._message_listener.Modified()
-
-  def extend(self, elem_seq):
-    """Extends by appending the given iterable. Similar to list.extend()."""
-
-    if elem_seq is None:
-      return
-    try:
-      elem_seq_iter = iter(elem_seq)
-    except TypeError:
-      if not elem_seq:
-        # silently ignore falsy inputs :-/.
-        # TODO(ptucker): Deprecate this behavior. b/18413862
-        return
-      raise
-
-    new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
-    if new_values:
-      self._values.extend(new_values)
-      self._message_listener.Modified()
-
-  def MergeFrom(self, other):
-    """Appends the contents of another repeated field of the same type to this
-    one. We do not check the types of the individual fields.
-    """
-    self._values.extend(other._values)
-    self._message_listener.Modified()
-
-  def remove(self, elem):
-    """Removes an item from the list. Similar to list.remove()."""
-    self._values.remove(elem)
-    self._message_listener.Modified()
-
-  def pop(self, key=-1):
-    """Removes and returns an item at a given index. Similar to list.pop()."""
-    value = self._values[key]
-    self.__delitem__(key)
-    return value
-
-  def __setitem__(self, key, value):
-    """Sets the item on the specified position."""
-    if isinstance(key, slice):  # PY3
-      if key.step is not None:
-        raise ValueError('Extended slices not supported')
-      self.__setslice__(key.start, key.stop, value)
-    else:
-      self._values[key] = self._type_checker.CheckValue(value)
-      self._message_listener.Modified()
-
-  def __getslice__(self, start, stop):
-    """Retrieves the subset of items from between the specified indices."""
-    return self._values[start:stop]
-
-  def __setslice__(self, start, stop, values):
-    """Sets the subset of items from between the specified indices."""
-    new_values = []
-    for value in values:
-      new_values.append(self._type_checker.CheckValue(value))
-    self._values[start:stop] = new_values
-    self._message_listener.Modified()
-
-  def __delitem__(self, key):
-    """Deletes the item at the specified position."""
-    del self._values[key]
-    self._message_listener.Modified()
-
-  def __delslice__(self, start, stop):
-    """Deletes the subset of items from between the specified indices."""
-    del self._values[start:stop]
-    self._message_listener.Modified()
-
-  def __eq__(self, other):
-    """Compares the current instance with another one."""
-    if self is other:
-      return True
-    # Special case for the same type which should be common and fast.
-    if isinstance(other, self.__class__):
-      return other._values == self._values
-    # We are presumably comparing against some other sequence type.
-    return other == self._values
-
-collections.MutableSequence.register(BaseContainer)
-
-
-class RepeatedCompositeFieldContainer(BaseContainer):
-
-  """Simple, list-like container for holding repeated composite fields."""
-
-  # Disallows assignment to other attributes.
-  __slots__ = ['_message_descriptor']
-
-  def __init__(self, message_listener, message_descriptor):
-    """
-    Note that we pass in a descriptor instead of the generated directly,
-    since at the time we construct a _RepeatedCompositeFieldContainer we
-    haven't yet necessarily initialized the type that will be contained in the
-    container.
-
-    Args:
-      message_listener: A MessageListener implementation.
-        The RepeatedCompositeFieldContainer will call this object's
-        Modified() method when it is modified.
-      message_descriptor: A Descriptor instance describing the protocol type
-        that should be present in this container.  We'll use the
-        _concrete_class field of this descriptor when the client calls add().
-    """
-    super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
-    self._message_descriptor = message_descriptor
-
-  def add(self, **kwargs):
-    """Adds a new element at the end of the list and returns it. Keyword
-    arguments may be used to initialize the element.
-    """
-    new_element = self._message_descriptor._concrete_class(**kwargs)
-    new_element._SetListener(self._message_listener)
-    self._values.append(new_element)
-    if not self._message_listener.dirty:
-      self._message_listener.Modified()
-    return new_element
-
-  def extend(self, elem_seq):
-    """Extends by appending the given sequence of elements of the same type
-    as this one, copying each individual message.
-    """
-    message_class = self._message_descriptor._concrete_class
-    listener = self._message_listener
-    values = self._values
-    for message in elem_seq:
-      new_element = message_class()
-      new_element._SetListener(listener)
-      new_element.MergeFrom(message)
-      values.append(new_element)
-    listener.Modified()
-
-  def MergeFrom(self, other):
-    """Appends the contents of another repeated field of the same type to this
-    one, copying each individual message.
-    """
-    self.extend(other._values)
-
-  def remove(self, elem):
-    """Removes an item from the list. Similar to list.remove()."""
-    self._values.remove(elem)
-    self._message_listener.Modified()
-
-  def pop(self, key=-1):
-    """Removes and returns an item at a given index. Similar to list.pop()."""
-    value = self._values[key]
-    self.__delitem__(key)
-    return value
-
-  def __getslice__(self, start, stop):
-    """Retrieves the subset of items from between the specified indices."""
-    return self._values[start:stop]
-
-  def __delitem__(self, key):
-    """Deletes the item at the specified position."""
-    del self._values[key]
-    self._message_listener.Modified()
-
-  def __delslice__(self, start, stop):
-    """Deletes the subset of items from between the specified indices."""
-    del self._values[start:stop]
-    self._message_listener.Modified()
-
-  def __eq__(self, other):
-    """Compares the current instance with another one."""
-    if self is other:
-      return True
-    if not isinstance(other, self.__class__):
-      raise TypeError('Can only compare repeated composite fields against '
-                      'other repeated composite fields.')
-    return self._values == other._values
-
-
-class ScalarMap(MutableMapping):
-
-  """Simple, type-checked, dict-like container for holding repeated scalars."""
-
-  # Disallows assignment to other attributes.
-  __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener']
-
-  def __init__(self, message_listener, key_checker, value_checker):
-    """
-    Args:
-      message_listener: A MessageListener implementation.
-        The ScalarMap will call this object's Modified() method when it
-        is modified.
-      key_checker: A type_checkers.ValueChecker instance to run on keys
-        inserted into this container.
-      value_checker: A type_checkers.ValueChecker instance to run on values
-        inserted into this container.
-    """
-    self._message_listener = message_listener
-    self._key_checker = key_checker
-    self._value_checker = value_checker
-    self._values = {}
-
-  def __getitem__(self, key):
-    try:
-      return self._values[key]
-    except KeyError:
-      key = self._key_checker.CheckValue(key)
-      val = self._value_checker.DefaultValue()
-      self._values[key] = val
-      return val
-
-  def __contains__(self, item):
-    # We check the key's type to match the strong-typing flavor of the API.
-    # Also this makes it easier to match the behavior of the C++ implementation.
-    self._key_checker.CheckValue(item)
-    return item in self._values
-
-  # We need to override this explicitly, because our defaultdict-like behavior
-  # will make the default implementation (from our base class) always insert
-  # the key.
-  def get(self, key, default=None):
-    if key in self:
-      return self[key]
-    else:
-      return default
-
-  def __setitem__(self, key, value):
-    checked_key = self._key_checker.CheckValue(key)
-    checked_value = self._value_checker.CheckValue(value)
-    self._values[checked_key] = checked_value
-    self._message_listener.Modified()
-
-  def __delitem__(self, key):
-    del self._values[key]
-    self._message_listener.Modified()
-
-  def __len__(self):
-    return len(self._values)
-
-  def __iter__(self):
-    return iter(self._values)
-
-  def __repr__(self):
-    return repr(self._values)
-
-  def MergeFrom(self, other):
-    self._values.update(other._values)
-    self._message_listener.Modified()
-
-  def InvalidateIterators(self):
-    # It appears that the only way to reliably invalidate iterators to
-    # self._values is to ensure that its size changes.
-    original = self._values
-    self._values = original.copy()
-    original[None] = None
-
-  # This is defined in the abstract base, but we can do it much more cheaply.
-  def clear(self):
-    self._values.clear()
-    self._message_listener.Modified()
-
-
-class MessageMap(MutableMapping):
-
-  """Simple, type-checked, dict-like container for with submessage values."""
-
-  # Disallows assignment to other attributes.
-  __slots__ = ['_key_checker', '_values', '_message_listener',
-               '_message_descriptor']
-
-  def __init__(self, message_listener, message_descriptor, key_checker):
-    """
-    Args:
-      message_listener: A MessageListener implementation.
-        The ScalarMap will call this object's Modified() method when it
-        is modified.
-      key_checker: A type_checkers.ValueChecker instance to run on keys
-        inserted into this container.
-      value_checker: A type_checkers.ValueChecker instance to run on values
-        inserted into this container.
-    """
-    self._message_listener = message_listener
-    self._message_descriptor = message_descriptor
-    self._key_checker = key_checker
-    self._values = {}
-
-  def __getitem__(self, key):
-    try:
-      return self._values[key]
-    except KeyError:
-      key = self._key_checker.CheckValue(key)
-      new_element = self._message_descriptor._concrete_class()
-      new_element._SetListener(self._message_listener)
-      self._values[key] = new_element
-      self._message_listener.Modified()
-
-      return new_element
-
-  def get_or_create(self, key):
-    """get_or_create() is an alias for getitem (ie. map[key]).
-
-    Args:
-      key: The key to get or create in the map.
-
-    This is useful in cases where you want to be explicit that the call is
-    mutating the map.  This can avoid lint errors for statements like this
-    that otherwise would appear to be pointless statements:
-
-      msg.my_map[key]
-    """
-    return self[key]
-
-  # We need to override this explicitly, because our defaultdict-like behavior
-  # will make the default implementation (from our base class) always insert
-  # the key.
-  def get(self, key, default=None):
-    if key in self:
-      return self[key]
-    else:
-      return default
-
-  def __contains__(self, item):
-    return item in self._values
-
-  def __setitem__(self, key, value):
-    raise ValueError('May not set values directly, call my_map[key].foo = 5')
-
-  def __delitem__(self, key):
-    del self._values[key]
-    self._message_listener.Modified()
-
-  def __len__(self):
-    return len(self._values)
-
-  def __iter__(self):
-    return iter(self._values)
-
-  def __repr__(self):
-    return repr(self._values)
-
-  def MergeFrom(self, other):
-    for key in other:
-      # According to documentation: "When parsing from the wire or when merging,
-      # if there are duplicate map keys the last key seen is used".
-      if key in self:
-        del self[key]
-      self[key].CopyFrom(other[key])
-    # self._message_listener.Modified() not required here, because
-    # mutations to submessages already propagate.
-
-  def InvalidateIterators(self):
-    # It appears that the only way to reliably invalidate iterators to
-    # self._values is to ensure that its size changes.
-    original = self._values
-    self._values = original.copy()
-    original[None] = None
-
-  # This is defined in the abstract base, but we can do it much more cheaply.
-  def clear(self):
-    self._values.clear()
-    self._message_listener.Modified()
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/decoder.py b/tools/swarming_client/third_party/google/protobuf/internal/decoder.py
deleted file mode 100644
index 31869e4..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/decoder.py
+++ /dev/null
@@ -1,854 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Code for decoding protocol buffer primitives.
-
-This code is very similar to encoder.py -- read the docs for that module first.
-
-A "decoder" is a function with the signature:
-  Decode(buffer, pos, end, message, field_dict)
-The arguments are:
-  buffer:     The string containing the encoded message.
-  pos:        The current position in the string.
-  end:        The position in the string where the current message ends.  May be
-              less than len(buffer) if we're reading a sub-message.
-  message:    The message object into which we're parsing.
-  field_dict: message._fields (avoids a hashtable lookup).
-The decoder reads the field and stores it into field_dict, returning the new
-buffer position.  A decoder for a repeated field may proactively decode all of
-the elements of that field, if they appear consecutively.
-
-Note that decoders may throw any of the following:
-  IndexError:  Indicates a truncated message.
-  struct.error:  Unpacking of a fixed-width field failed.
-  message.DecodeError:  Other errors.
-
-Decoders are expected to raise an exception if they are called with pos > end.
-This allows callers to be lax about bounds checking:  it's fineto read past
-"end" as long as you are sure that someone else will notice and throw an
-exception later on.
-
-Something up the call stack is expected to catch IndexError and struct.error
-and convert them to message.DecodeError.
-
-Decoders are constructed using decoder constructors with the signature:
-  MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
-The arguments are:
-  field_number:  The field number of the field we want to decode.
-  is_repeated:   Is the field a repeated field? (bool)
-  is_packed:     Is the field a packed field? (bool)
-  key:           The key to use when looking up the field within field_dict.
-                 (This is actually the FieldDescriptor but nothing in this
-                 file should depend on that.)
-  new_default:   A function which takes a message object as a parameter and
-                 returns a new instance of the default value for this field.
-                 (This is called for repeated fields and sub-messages, when an
-                 instance does not already exist.)
-
-As with encoders, we define a decoder constructor for every type of field.
-Then, for every field of every message class we construct an actual decoder.
-That decoder goes into a dict indexed by tag, so when we decode a message
-we repeatedly read a tag, look up the corresponding decoder, and invoke it.
-"""
-
-__author__ = 'kenton@google.com (Kenton Varda)'
-
-import struct
-
-import six
-
-if six.PY3:
-  long = int
-
-from google.protobuf.internal import encoder
-from google.protobuf.internal import wire_format
-from google.protobuf import message
-
-
-# This will overflow and thus become IEEE-754 "infinity".  We would use
-# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
-_POS_INF = 1e10000
-_NEG_INF = -_POS_INF
-_NAN = _POS_INF * 0
-
-
-# This is not for optimization, but rather to avoid conflicts with local
-# variables named "message".
-_DecodeError = message.DecodeError
-
-
-def _VarintDecoder(mask, result_type):
-  """Return an encoder for a basic varint value (does not include tag).
-
-  Decoded values will be bitwise-anded with the given mask before being
-  returned, e.g. to limit them to 32 bits.  The returned decoder does not
-  take the usual "end" parameter -- the caller is expected to do bounds checking
-  after the fact (often the caller can defer such checking until later).  The
-  decoder returns a (value, new_pos) pair.
-  """
-
-  def DecodeVarint(buffer, pos):
-    result = 0
-    shift = 0
-    while 1:
-      b = six.indexbytes(buffer, pos)
-      result |= ((b & 0x7f) << shift)
-      pos += 1
-      if not (b & 0x80):
-        result &= mask
-        result = result_type(result)
-        return (result, pos)
-      shift += 7
-      if shift >= 64:
-        raise _DecodeError('Too many bytes when decoding varint.')
-  return DecodeVarint
-
-
-def _SignedVarintDecoder(mask, result_type):
-  """Like _VarintDecoder() but decodes signed values."""
-
-  def DecodeVarint(buffer, pos):
-    result = 0
-    shift = 0
-    while 1:
-      b = six.indexbytes(buffer, pos)
-      result |= ((b & 0x7f) << shift)
-      pos += 1
-      if not (b & 0x80):
-        if result > 0x7fffffffffffffff:
-          result -= (1 << 64)
-          result |= ~mask
-        else:
-          result &= mask
-        result = result_type(result)
-        return (result, pos)
-      shift += 7
-      if shift >= 64:
-        raise _DecodeError('Too many bytes when decoding varint.')
-  return DecodeVarint
-
-# We force 32-bit values to int and 64-bit values to long to make
-# alternate implementations where the distinction is more significant
-# (e.g. the C++ implementation) simpler.
-
-_DecodeVarint = _VarintDecoder((1 << 64) - 1, long)
-_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long)
-
-# Use these versions for values which must be limited to 32 bits.
-_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
-_DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1, int)
-
-
-def ReadTag(buffer, pos):
-  """Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple.
-
-  We return the raw bytes of the tag rather than decoding them.  The raw
-  bytes can then be used to look up the proper decoder.  This effectively allows
-  us to trade some work that would be done in pure-python (decoding a varint)
-  for work that is done in C (searching for a byte string in a hash table).
-  In a low-level language it would be much cheaper to decode the varint and
-  use that, but not in Python.
-  """
-
-  start = pos
-  while six.indexbytes(buffer, pos) & 0x80:
-    pos += 1
-  pos += 1
-  return (buffer[start:pos], pos)
-
-
-# --------------------------------------------------------------------
-
-
-def _SimpleDecoder(wire_type, decode_value):
-  """Return a constructor for a decoder for fields of a particular type.
-
-  Args:
-      wire_type:  The field's wire type.
-      decode_value:  A function which decodes an individual value, e.g.
-        _DecodeVarint()
-  """
-
-  def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default):
-    if is_packed:
-      local_DecodeVarint = _DecodeVarint
-      def DecodePackedField(buffer, pos, end, message, field_dict):
-        value = field_dict.get(key)
-        if value is None:
-          value = field_dict.setdefault(key, new_default(message))
-        (endpoint, pos) = local_DecodeVarint(buffer, pos)
-        endpoint += pos
-        if endpoint > end:
-          raise _DecodeError('Truncated message.')
-        while pos < endpoint:
-          (element, pos) = decode_value(buffer, pos)
-          value.append(element)
-        if pos > endpoint:
-          del value[-1]   # Discard corrupt value.
-          raise _DecodeError('Packed element was truncated.')
-        return pos
-      return DecodePackedField
-    elif is_repeated:
-      tag_bytes = encoder.TagBytes(field_number, wire_type)
-      tag_len = len(tag_bytes)
-      def DecodeRepeatedField(buffer, pos, end, message, field_dict):
-        value = field_dict.get(key)
-        if value is None:
-          value = field_dict.setdefault(key, new_default(message))
-        while 1:
-          (element, new_pos) = decode_value(buffer, pos)
-          value.append(element)
-          # Predict that the next tag is another copy of the same repeated
-          # field.
-          pos = new_pos + tag_len
-          if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
-            # Prediction failed.  Return.
-            if new_pos > end:
-              raise _DecodeError('Truncated message.')
-            return new_pos
-      return DecodeRepeatedField
-    else:
-      def DecodeField(buffer, pos, end, message, field_dict):
-        (field_dict[key], pos) = decode_value(buffer, pos)
-        if pos > end:
-          del field_dict[key]  # Discard corrupt value.
-          raise _DecodeError('Truncated message.')
-        return pos
-      return DecodeField
-
-  return SpecificDecoder
-
-
-def _ModifiedDecoder(wire_type, decode_value, modify_value):
-  """Like SimpleDecoder but additionally invokes modify_value on every value
-  before storing it.  Usually modify_value is ZigZagDecode.
-  """
-
-  # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
-  # not enough to make a significant difference.
-
-  def InnerDecode(buffer, pos):
-    (result, new_pos) = decode_value(buffer, pos)
-    return (modify_value(result), new_pos)
-  return _SimpleDecoder(wire_type, InnerDecode)
-
-
-def _StructPackDecoder(wire_type, format):
-  """Return a constructor for a decoder for a fixed-width field.
-
-  Args:
-      wire_type:  The field's wire type.
-      format:  The format string to pass to struct.unpack().
-  """
-
-  value_size = struct.calcsize(format)
-  local_unpack = struct.unpack
-
-  # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
-  # not enough to make a significant difference.
-
-  # Note that we expect someone up-stack to catch struct.error and convert
-  # it to _DecodeError -- this way we don't have to set up exception-
-  # handling blocks every time we parse one value.
-
-  def InnerDecode(buffer, pos):
-    new_pos = pos + value_size
-    result = local_unpack(format, buffer[pos:new_pos])[0]
-    return (result, new_pos)
-  return _SimpleDecoder(wire_type, InnerDecode)
-
-
-def _FloatDecoder():
-  """Returns a decoder for a float field.
-
-  This code works around a bug in struct.unpack for non-finite 32-bit
-  floating-point values.
-  """
-
-  local_unpack = struct.unpack
-
-  def InnerDecode(buffer, pos):
-    # We expect a 32-bit value in little-endian byte order.  Bit 1 is the sign
-    # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
-    new_pos = pos + 4
-    float_bytes = buffer[pos:new_pos]
-
-    # If this value has all its exponent bits set, then it's non-finite.
-    # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
-    # To avoid that, we parse it specially.
-    if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
-      # If at least one significand bit is set...
-      if float_bytes[0:3] != b'\x00\x00\x80':
-        return (_NAN, new_pos)
-      # If sign bit is set...
-      if float_bytes[3:4] == b'\xFF':
-        return (_NEG_INF, new_pos)
-      return (_POS_INF, new_pos)
-
-    # Note that we expect someone up-stack to catch struct.error and convert
-    # it to _DecodeError -- this way we don't have to set up exception-
-    # handling blocks every time we parse one value.
-    result = local_unpack('<f', float_bytes)[0]
-    return (result, new_pos)
-  return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
-
-
-def _DoubleDecoder():
-  """Returns a decoder for a double field.
-
-  This code works around a bug in struct.unpack for not-a-number.
-  """
-
-  local_unpack = struct.unpack
-
-  def InnerDecode(buffer, pos):
-    # We expect a 64-bit value in little-endian byte order.  Bit 1 is the sign
-    # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
-    new_pos = pos + 8
-    double_bytes = buffer[pos:new_pos]
-
-    # If this value has all its exponent bits set and at least one significand
-    # bit set, it's not a number.  In Python 2.4, struct.unpack will treat it
-    # as inf or -inf.  To avoid that, we treat it specially.
-    if ((double_bytes[7:8] in b'\x7F\xFF')
-        and (double_bytes[6:7] >= b'\xF0')
-        and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
-      return (_NAN, new_pos)
-
-    # Note that we expect someone up-stack to catch struct.error and convert
-    # it to _DecodeError -- this way we don't have to set up exception-
-    # handling blocks every time we parse one value.
-    result = local_unpack('<d', double_bytes)[0]
-    return (result, new_pos)
-  return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
-
-
-def EnumDecoder(field_number, is_repeated, is_packed, key, new_default):
-  enum_type = key.enum_type
-  if is_packed:
-    local_DecodeVarint = _DecodeVarint
-    def DecodePackedField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      (endpoint, pos) = local_DecodeVarint(buffer, pos)
-      endpoint += pos
-      if endpoint > end:
-        raise _DecodeError('Truncated message.')
-      while pos < endpoint:
-        value_start_pos = pos
-        (element, pos) = _DecodeSignedVarint32(buffer, pos)
-        if element in enum_type.values_by_number:
-          value.append(element)
-        else:
-          if not message._unknown_fields:
-            message._unknown_fields = []
-          tag_bytes = encoder.TagBytes(field_number,
-                                       wire_format.WIRETYPE_VARINT)
-          message._unknown_fields.append(
-              (tag_bytes, buffer[value_start_pos:pos]))
-      if pos > endpoint:
-        if element in enum_type.values_by_number:
-          del value[-1]   # Discard corrupt value.
-        else:
-          del message._unknown_fields[-1]
-        raise _DecodeError('Packed element was truncated.')
-      return pos
-    return DecodePackedField
-  elif is_repeated:
-    tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
-    tag_len = len(tag_bytes)
-    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      while 1:
-        (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
-        if element in enum_type.values_by_number:
-          value.append(element)
-        else:
-          if not message._unknown_fields:
-            message._unknown_fields = []
-          message._unknown_fields.append(
-              (tag_bytes, buffer[pos:new_pos]))
-        # Predict that the next tag is another copy of the same repeated
-        # field.
-        pos = new_pos + tag_len
-        if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
-          # Prediction failed.  Return.
-          if new_pos > end:
-            raise _DecodeError('Truncated message.')
-          return new_pos
-    return DecodeRepeatedField
-  else:
-    def DecodeField(buffer, pos, end, message, field_dict):
-      value_start_pos = pos
-      (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
-      if pos > end:
-        raise _DecodeError('Truncated message.')
-      if enum_value in enum_type.values_by_number:
-        field_dict[key] = enum_value
-      else:
-        if not message._unknown_fields:
-          message._unknown_fields = []
-        tag_bytes = encoder.TagBytes(field_number,
-                                     wire_format.WIRETYPE_VARINT)
-        message._unknown_fields.append(
-          (tag_bytes, buffer[value_start_pos:pos]))
-      return pos
-    return DecodeField
-
-
-# --------------------------------------------------------------------
-
-
-Int32Decoder = _SimpleDecoder(
-    wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
-
-Int64Decoder = _SimpleDecoder(
-    wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
-
-UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
-UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
-
-SInt32Decoder = _ModifiedDecoder(
-    wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
-SInt64Decoder = _ModifiedDecoder(
-    wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
-
-# Note that Python conveniently guarantees that when using the '<' prefix on
-# formats, they will also have the same size across all platforms (as opposed
-# to without the prefix, where their sizes depend on the C compiler's basic
-# type sizes).
-Fixed32Decoder  = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
-Fixed64Decoder  = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
-SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
-SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
-FloatDecoder = _FloatDecoder()
-DoubleDecoder = _DoubleDecoder()
-
-BoolDecoder = _ModifiedDecoder(
-    wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
-
-
-def StringDecoder(field_number, is_repeated, is_packed, key, new_default):
-  """Returns a decoder for a string field."""
-
-  local_DecodeVarint = _DecodeVarint
-  local_unicode = six.text_type
-
-  def _ConvertToUnicode(byte_str):
-    try:
-      return local_unicode(byte_str, 'utf-8')
-    except UnicodeDecodeError as e:
-      # add more information to the error message and re-raise it.
-      e.reason = '%s in field: %s' % (e, key.full_name)
-      raise
-
-  assert not is_packed
-  if is_repeated:
-    tag_bytes = encoder.TagBytes(field_number,
-                                 wire_format.WIRETYPE_LENGTH_DELIMITED)
-    tag_len = len(tag_bytes)
-    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      while 1:
-        (size, pos) = local_DecodeVarint(buffer, pos)
-        new_pos = pos + size
-        if new_pos > end:
-          raise _DecodeError('Truncated string.')
-        value.append(_ConvertToUnicode(buffer[pos:new_pos]))
-        # Predict that the next tag is another copy of the same repeated field.
-        pos = new_pos + tag_len
-        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
-          # Prediction failed.  Return.
-          return new_pos
-    return DecodeRepeatedField
-  else:
-    def DecodeField(buffer, pos, end, message, field_dict):
-      (size, pos) = local_DecodeVarint(buffer, pos)
-      new_pos = pos + size
-      if new_pos > end:
-        raise _DecodeError('Truncated string.')
-      field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
-      return new_pos
-    return DecodeField
-
-
-def BytesDecoder(field_number, is_repeated, is_packed, key, new_default):
-  """Returns a decoder for a bytes field."""
-
-  local_DecodeVarint = _DecodeVarint
-
-  assert not is_packed
-  if is_repeated:
-    tag_bytes = encoder.TagBytes(field_number,
-                                 wire_format.WIRETYPE_LENGTH_DELIMITED)
-    tag_len = len(tag_bytes)
-    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      while 1:
-        (size, pos) = local_DecodeVarint(buffer, pos)
-        new_pos = pos + size
-        if new_pos > end:
-          raise _DecodeError('Truncated string.')
-        value.append(buffer[pos:new_pos])
-        # Predict that the next tag is another copy of the same repeated field.
-        pos = new_pos + tag_len
-        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
-          # Prediction failed.  Return.
-          return new_pos
-    return DecodeRepeatedField
-  else:
-    def DecodeField(buffer, pos, end, message, field_dict):
-      (size, pos) = local_DecodeVarint(buffer, pos)
-      new_pos = pos + size
-      if new_pos > end:
-        raise _DecodeError('Truncated string.')
-      field_dict[key] = buffer[pos:new_pos]
-      return new_pos
-    return DecodeField
-
-
-def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
-  """Returns a decoder for a group field."""
-
-  end_tag_bytes = encoder.TagBytes(field_number,
-                                   wire_format.WIRETYPE_END_GROUP)
-  end_tag_len = len(end_tag_bytes)
-
-  assert not is_packed
-  if is_repeated:
-    tag_bytes = encoder.TagBytes(field_number,
-                                 wire_format.WIRETYPE_START_GROUP)
-    tag_len = len(tag_bytes)
-    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      while 1:
-        value = field_dict.get(key)
-        if value is None:
-          value = field_dict.setdefault(key, new_default(message))
-        # Read sub-message.
-        pos = value.add()._InternalParse(buffer, pos, end)
-        # Read end tag.
-        new_pos = pos+end_tag_len
-        if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
-          raise _DecodeError('Missing group end tag.')
-        # Predict that the next tag is another copy of the same repeated field.
-        pos = new_pos + tag_len
-        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
-          # Prediction failed.  Return.
-          return new_pos
-    return DecodeRepeatedField
-  else:
-    def DecodeField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      # Read sub-message.
-      pos = value._InternalParse(buffer, pos, end)
-      # Read end tag.
-      new_pos = pos+end_tag_len
-      if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
-        raise _DecodeError('Missing group end tag.')
-      return new_pos
-    return DecodeField
-
-
-def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
-  """Returns a decoder for a message field."""
-
-  local_DecodeVarint = _DecodeVarint
-
-  assert not is_packed
-  if is_repeated:
-    tag_bytes = encoder.TagBytes(field_number,
-                                 wire_format.WIRETYPE_LENGTH_DELIMITED)
-    tag_len = len(tag_bytes)
-    def DecodeRepeatedField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      while 1:
-        # Read length.
-        (size, pos) = local_DecodeVarint(buffer, pos)
-        new_pos = pos + size
-        if new_pos > end:
-          raise _DecodeError('Truncated message.')
-        # Read sub-message.
-        if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
-          # The only reason _InternalParse would return early is if it
-          # encountered an end-group tag.
-          raise _DecodeError('Unexpected end-group tag.')
-        # Predict that the next tag is another copy of the same repeated field.
-        pos = new_pos + tag_len
-        if buffer[new_pos:pos] != tag_bytes or new_pos == end:
-          # Prediction failed.  Return.
-          return new_pos
-    return DecodeRepeatedField
-  else:
-    def DecodeField(buffer, pos, end, message, field_dict):
-      value = field_dict.get(key)
-      if value is None:
-        value = field_dict.setdefault(key, new_default(message))
-      # Read length.
-      (size, pos) = local_DecodeVarint(buffer, pos)
-      new_pos = pos + size
-      if new_pos > end:
-        raise _DecodeError('Truncated message.')
-      # Read sub-message.
-      if value._InternalParse(buffer, pos, new_pos) != new_pos:
-        # The only reason _InternalParse would return early is if it encountered
-        # an end-group tag.
-        raise _DecodeError('Unexpected end-group tag.')
-      return new_pos
-    return DecodeField
-
-
-# --------------------------------------------------------------------
-
-MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
-
-def MessageSetItemDecoder(extensions_by_number):
-  """Returns a decoder for a MessageSet item.
-
-  The parameter is the _extensions_by_number map for the message class.
-
-  The message set message looks like this:
-    message MessageSet {
-      repeated group Item = 1 {
-        required int32 type_id = 2;
-        required string message = 3;
-      }
-    }
-  """
-
-  type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
-  message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
-  item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
-
-  local_ReadTag = ReadTag
-  local_DecodeVarint = _DecodeVarint
-  local_SkipField = SkipField
-
-  def DecodeItem(buffer, pos, end, message, field_dict):
-    message_set_item_start = pos
-    type_id = -1
-    message_start = -1
-    message_end = -1
-
-    # Technically, type_id and message can appear in any order, so we need
-    # a little loop here.
-    while 1:
-      (tag_bytes, pos) = local_ReadTag(buffer, pos)
-      if tag_bytes == type_id_tag_bytes:
-        (type_id, pos) = local_DecodeVarint(buffer, pos)
-      elif tag_bytes == message_tag_bytes:
-        (size, message_start) = local_DecodeVarint(buffer, pos)
-        pos = message_end = message_start + size
-      elif tag_bytes == item_end_tag_bytes:
-        break
-      else:
-        pos = SkipField(buffer, pos, end, tag_bytes)
-        if pos == -1:
-          raise _DecodeError('Missing group end tag.')
-
-    if pos > end:
-      raise _DecodeError('Truncated message.')
-
-    if type_id == -1:
-      raise _DecodeError('MessageSet item missing type_id.')
-    if message_start == -1:
-      raise _DecodeError('MessageSet item missing message.')
-
-    extension = extensions_by_number.get(type_id)
-    if extension is not None:
-      value = field_dict.get(extension)
-      if value is None:
-        value = field_dict.setdefault(
-            extension, extension.message_type._concrete_class())
-      if value._InternalParse(buffer, message_start,message_end) != message_end:
-        # The only reason _InternalParse would return early is if it encountered
-        # an end-group tag.
-        raise _DecodeError('Unexpected end-group tag.')
-    else:
-      if not message._unknown_fields:
-        message._unknown_fields = []
-      message._unknown_fields.append((MESSAGE_SET_ITEM_TAG,
-                                      buffer[message_set_item_start:pos]))
-
-    return pos
-
-  return DecodeItem
-
-# --------------------------------------------------------------------
-
-def MapDecoder(field_descriptor, new_default, is_message_map):
-  """Returns a decoder for a map field."""
-
-  key = field_descriptor
-  tag_bytes = encoder.TagBytes(field_descriptor.number,
-                               wire_format.WIRETYPE_LENGTH_DELIMITED)
-  tag_len = len(tag_bytes)
-  local_DecodeVarint = _DecodeVarint
-  # Can't read _concrete_class yet; might not be initialized.
-  message_type = field_descriptor.message_type
-
-  def DecodeMap(buffer, pos, end, message, field_dict):
-    submsg = message_type._concrete_class()
-    value = field_dict.get(key)
-    if value is None:
-      value = field_dict.setdefault(key, new_default(message))
-    while 1:
-      # Read length.
-      (size, pos) = local_DecodeVarint(buffer, pos)
-      new_pos = pos + size
-      if new_pos > end:
-        raise _DecodeError('Truncated message.')
-      # Read sub-message.
-      submsg.Clear()
-      if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
-        # The only reason _InternalParse would return early is if it
-        # encountered an end-group tag.
-        raise _DecodeError('Unexpected end-group tag.')
-
-      if is_message_map:
-        value[submsg.key].MergeFrom(submsg.value)
-      else:
-        value[submsg.key] = submsg.value
-
-      # Predict that the next tag is another copy of the same repeated field.
-      pos = new_pos + tag_len
-      if buffer[new_pos:pos] != tag_bytes or new_pos == end:
-        # Prediction failed.  Return.
-        return new_pos
-
-  return DecodeMap
-
-# --------------------------------------------------------------------
-# Optimization is not as heavy here because calls to SkipField() are rare,
-# except for handling end-group tags.
-
-def _SkipVarint(buffer, pos, end):
-  """Skip a varint value.  Returns the new position."""
-  # Previously ord(buffer[pos]) raised IndexError when pos is out of range.
-  # With this code, ord(b'') raises TypeError.  Both are handled in
-  # python_message.py to generate a 'Truncated message' error.
-  while ord(buffer[pos:pos+1]) & 0x80:
-    pos += 1
-  pos += 1
-  if pos > end:
-    raise _DecodeError('Truncated message.')
-  return pos
-
-def _SkipFixed64(buffer, pos, end):
-  """Skip a fixed64 value.  Returns the new position."""
-
-  pos += 8
-  if pos > end:
-    raise _DecodeError('Truncated message.')
-  return pos
-
-def _SkipLengthDelimited(buffer, pos, end):
-  """Skip a length-delimited value.  Returns the new position."""
-
-  (size, pos) = _DecodeVarint(buffer, pos)
-  pos += size
-  if pos > end:
-    raise _DecodeError('Truncated message.')
-  return pos
-
-def _SkipGroup(buffer, pos, end):
-  """Skip sub-group.  Returns the new position."""
-
-  while 1:
-    (tag_bytes, pos) = ReadTag(buffer, pos)
-    new_pos = SkipField(buffer, pos, end, tag_bytes)
-    if new_pos == -1:
-      return pos
-    pos = new_pos
-
-def _EndGroup(buffer, pos, end):
-  """Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
-
-  return -1
-
-def _SkipFixed32(buffer, pos, end):
-  """Skip a fixed32 value.  Returns the new position."""
-
-  pos += 4
-  if pos > end:
-    raise _DecodeError('Truncated message.')
-  return pos
-
-def _RaiseInvalidWireType(buffer, pos, end):
-  """Skip function for unknown wire types.  Raises an exception."""
-
-  raise _DecodeError('Tag had invalid wire type.')
-
-def _FieldSkipper():
-  """Constructs the SkipField function."""
-
-  WIRETYPE_TO_SKIPPER = [
-      _SkipVarint,
-      _SkipFixed64,
-      _SkipLengthDelimited,
-      _SkipGroup,
-      _EndGroup,
-      _SkipFixed32,
-      _RaiseInvalidWireType,
-      _RaiseInvalidWireType,
-      ]
-
-  wiretype_mask = wire_format.TAG_TYPE_MASK
-
-  def SkipField(buffer, pos, end, tag_bytes):
-    """Skips a field with the specified tag.
-
-    |pos| should point to the byte immediately after the tag.
-
-    Returns:
-        The new position (after the tag value), or -1 if the tag is an end-group
-        tag (in which case the calling loop should break).
-    """
-
-    # The wire type is always in the first byte since varints are little-endian.
-    wire_type = ord(tag_bytes[0:1]) & wiretype_mask
-    return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
-
-  return SkipField
-
-SkipField = _FieldSkipper()
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/descriptor_pool_test1_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/descriptor_pool_test1_pb2.py
deleted file mode 100644
index f093d83..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/descriptor_pool_test1_pb2.py
+++ /dev/null
@@ -1,474 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/descriptor_pool_test1.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/descriptor_pool_test1.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n4google/protobuf/internal/descriptor_pool_test1.proto\x12\x1fgoogle.protobuf.python.internal\"\xfb\x05\n\x13\x44\x65scriptorPoolTest1\x12Z\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest1.NestedEnum:\x04\x42\x45TA\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage\x1a\xfd\x03\n\rNestedMessage\x12h\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.NestedEnum:\x04ZETA\x12\x1a\n\x0cnested_field\x18\x02 \x01(\t:\x04\x62\x65ta\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12y\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.NestedEnum:\x03\x45TA\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05theta\" \n\nNestedEnum\x12\x07\n\x03\x45TA\x10\x07\x12\t\n\x05THETA\x10\x08\"#\n\nNestedEnum\x12\x0b\n\x07\x45PSILON\x10\x05\x12\x08\n\x04ZETA\x10\x06\"!\n\nNestedEnum\x12\t\n\x05\x41LPHA\x10\x01\x12\x08\n\x04\x42\x45TA\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xf1\x05\n\x13\x44\x65scriptorPoolTest2\x12[\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest2.NestedEnum:\x05GAMMA\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage\x1a\xfc\x03\n\rNestedMessage\x12h\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.NestedEnum:\x04IOTA\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05\x64\x65lta\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12x\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.NestedEnum:\x02MU\x12\x1c\n\x0cnested_field\x18\x02 \x01(\t:\x06lambda\" \n\nNestedEnum\x12\n\n\x06LAMBDA\x10\x0b\x12\x06\n\x02MU\x10\x0c\"!\n\nNestedEnum\x12\x08\n\x04IOTA\x10\t\x12\t\n\x05KAPPA\x10\n\"\"\n\nNestedEnum\x12\t\n\x05GAMMA\x10\x03\x12\t\n\x05\x44\x45LTA\x10\x04')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='ETA', index=0, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='THETA', index=1, number=8,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=738,
-  serialized_end=770,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM)
-
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='EPSILON', index=0, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='ZETA', index=1, number=6,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=772,
-  serialized_end=807,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM)
-
-_DESCRIPTORPOOLTEST1_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='ALPHA', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BETA', index=1, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=809,
-  serialized_end=842,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDENUM)
-
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='LAMBDA', index=0, number=11,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MU', index=1, number=12,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1506,
-  serialized_end=1538,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM)
-
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='IOTA', index=0, number=9,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='KAPPA', index=1, number=10,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1540,
-  serialized_end=1573,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM)
-
-_DESCRIPTORPOOLTEST2_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='GAMMA', index=0, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DELTA', index=1, number=4,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1575,
-  serialized_end=1609,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDENUM)
-
-
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor(
-  name='DeepNestedMessage',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=7,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.nested_field', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("theta").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=565,
-  serialized_end=770,
-)
-
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=6,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.nested_field', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("beta").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.deep_nested_message', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ],
-  enum_types=[
-    _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=298,
-  serialized_end=807,
-)
-
-_DESCRIPTORPOOLTEST1 = _descriptor.Descriptor(
-  name='DescriptorPoolTest1',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest1',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=2,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest1.nested_message', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_DESCRIPTORPOOLTEST1_NESTEDMESSAGE, ],
-  enum_types=[
-    _DESCRIPTORPOOLTEST1_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=90,
-  serialized_end=853,
-)
-
-
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor(
-  name='DeepNestedMessage',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=12,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.nested_field', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("lambda").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1333,
-  serialized_end=1538,
-)
-
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=9,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.nested_field', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("delta").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.deep_nested_message', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ],
-  enum_types=[
-    _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1065,
-  serialized_end=1573,
-)
-
-_DESCRIPTORPOOLTEST2 = _descriptor.Descriptor(
-  name='DescriptorPoolTest2',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest2',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=3,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest2.nested_message', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_DESCRIPTORPOOLTEST2_NESTEDMESSAGE, ],
-  enum_types=[
-    _DESCRIPTORPOOLTEST2_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=856,
-  serialized_end=1609,
-)
-
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST1
-_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST1.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDENUM
-_DESCRIPTORPOOLTEST1.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST1_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST2
-_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST2.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDENUM
-_DESCRIPTORPOOLTEST2.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST2_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2
-DESCRIPTOR.message_types_by_name['DescriptorPoolTest1'] = _DESCRIPTORPOOLTEST1
-DESCRIPTOR.message_types_by_name['DescriptorPoolTest2'] = _DESCRIPTORPOOLTEST2
-
-DescriptorPoolTest1 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest1', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-
-    DeepNestedMessage = _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), dict(
-      DESCRIPTOR = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE,
-      __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2'
-      # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage)
-      ))
-    ,
-    DESCRIPTOR = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _DESCRIPTORPOOLTEST1,
-  __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1)
-  ))
-_sym_db.RegisterMessage(DescriptorPoolTest1)
-_sym_db.RegisterMessage(DescriptorPoolTest1.NestedMessage)
-_sym_db.RegisterMessage(DescriptorPoolTest1.NestedMessage.DeepNestedMessage)
-
-DescriptorPoolTest2 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest2', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-
-    DeepNestedMessage = _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), dict(
-      DESCRIPTOR = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE,
-      __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2'
-      # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage)
-      ))
-    ,
-    DESCRIPTOR = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _DESCRIPTORPOOLTEST2,
-  __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2)
-  ))
-_sym_db.RegisterMessage(DescriptorPoolTest2)
-_sym_db.RegisterMessage(DescriptorPoolTest2.NestedMessage)
-_sym_db.RegisterMessage(DescriptorPoolTest2.NestedMessage.DeepNestedMessage)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/descriptor_pool_test2_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/descriptor_pool_test2_pb2.py
deleted file mode 100644
index eee46a5..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/descriptor_pool_test2_pb2.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/descriptor_pool_test2.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf.internal import descriptor_pool_test1_pb2 as google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2
-from google.protobuf.internal import more_messages_pb2 as google_dot_protobuf_dot_internal_dot_more__messages__pb2
-
-from google.protobuf.internal.more_messages_pb2 import *
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/descriptor_pool_test2.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n4google/protobuf/internal/descriptor_pool_test2.proto\x12\x1fgoogle.protobuf.python.internal\x1a\x34google/protobuf/internal/descriptor_pool_test1.proto\x1a,google/protobuf/internal/more_messages.proto\"\xef\x06\n\x13\x44\x65scriptorPoolTest3\x12X\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum:\x02XI\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage\x1a\xf7\x03\n\rNestedMessage\x12\x66\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum:\x02PI\x12\x18\n\x0cnested_field\x18\x02 \x01(\t:\x02nu\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12y\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum:\x03RHO\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05sigma\" \n\nNestedEnum\x12\x07\n\x03RHO\x10\x11\x12\t\n\x05SIGMA\x10\x12\"!\n\nNestedEnum\x12\x0b\n\x07OMICRON\x10\x0f\x12\x06\n\x02PI\x10\x10\"\x1c\n\nNestedEnum\x12\x06\n\x02NU\x10\r\x12\x06\n\x02XI\x10\x0e\x32\x89\x01\n\x14\x64\x65scriptor_pool_test\x12\x34.google.protobuf.python.internal.DescriptorPoolTest1\x18\xe9\x07 \x01(\x0b\x32\x34.google.protobuf.python.internal.DescriptorPoolTest3P\x01')
-  ,
-  dependencies=[google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DESCRIPTOR,google_dot_protobuf_dot_internal_dot_more__messages__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='RHO', index=0, number=17,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SIGMA', index=1, number=18,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=832,
-  serialized_end=864,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM)
-
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='OMICRON', index=0, number=15,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PI', index=1, number=16,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=866,
-  serialized_end=899,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM)
-
-_DESCRIPTORPOOLTEST3_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='NU', index=0, number=13,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='XI', index=1, number=14,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=901,
-  serialized_end=929,
-)
-_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDENUM)
-
-
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor(
-  name='DeepNestedMessage',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=17,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_field', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("sigma").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=659,
-  serialized_end=864,
-)
-
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=16,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_field', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("nu").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.deep_nested_message', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ],
-  enum_types=[
-    _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=396,
-  serialized_end=899,
-)
-
-_DESCRIPTORPOOLTEST3 = _descriptor.Descriptor(
-  name='DescriptorPoolTest3',
-  full_name='google.protobuf.python.internal.DescriptorPoolTest3',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=14,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_message', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='descriptor_pool_test', full_name='google.protobuf.python.internal.DescriptorPoolTest3.descriptor_pool_test', index=0,
-      number=1001, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE, ],
-  enum_types=[
-    _DESCRIPTORPOOLTEST3_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=190,
-  serialized_end=1069,
-)
-
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3
-_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST3.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDENUM
-_DESCRIPTORPOOLTEST3.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
-_DESCRIPTORPOOLTEST3_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3
-DESCRIPTOR.message_types_by_name['DescriptorPoolTest3'] = _DESCRIPTORPOOLTEST3
-
-DescriptorPoolTest3 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest3', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-
-    DeepNestedMessage = _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), dict(
-      DESCRIPTOR = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE,
-      __module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2'
-      # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage)
-      ))
-    ,
-    DESCRIPTOR = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _DESCRIPTORPOOLTEST3,
-  __module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3)
-  ))
-_sym_db.RegisterMessage(DescriptorPoolTest3)
-_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage)
-_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage.DeepNestedMessage)
-
-_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test'].message_type = _DESCRIPTORPOOLTEST3
-google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DescriptorPoolTest1.RegisterExtension(_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test'])
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/encoder.py b/tools/swarming_client/third_party/google/protobuf/internal/encoder.py
deleted file mode 100644
index 48ef2df..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/encoder.py
+++ /dev/null
@@ -1,823 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Code for encoding protocol message primitives.
-
-Contains the logic for encoding every logical protocol field type
-into one of the 5 physical wire types.
-
-This code is designed to push the Python interpreter's performance to the
-limits.
-
-The basic idea is that at startup time, for every field (i.e. every
-FieldDescriptor) we construct two functions:  a "sizer" and an "encoder".  The
-sizer takes a value of this field's type and computes its byte size.  The
-encoder takes a writer function and a value.  It encodes the value into byte
-strings and invokes the writer function to write those strings.  Typically the
-writer function is the write() method of a BytesIO.
-
-We try to do as much work as possible when constructing the writer and the
-sizer rather than when calling them.  In particular:
-* We copy any needed global functions to local variables, so that we do not need
-  to do costly global table lookups at runtime.
-* Similarly, we try to do any attribute lookups at startup time if possible.
-* Every field's tag is encoded to bytes at startup, since it can't change at
-  runtime.
-* Whatever component of the field size we can compute at startup, we do.
-* We *avoid* sharing code if doing so would make the code slower and not sharing
-  does not burden us too much.  For example, encoders for repeated fields do
-  not just call the encoders for singular fields in a loop because this would
-  add an extra function call overhead for every loop iteration; instead, we
-  manually inline the single-value encoder into the loop.
-* If a Python function lacks a return statement, Python actually generates
-  instructions to pop the result of the last statement off the stack, push
-  None onto the stack, and then return that.  If we really don't care what
-  value is returned, then we can save two instructions by returning the
-  result of the last statement.  It looks funny but it helps.
-* We assume that type and bounds checking has happened at a higher level.
-"""
-
-__author__ = 'kenton@google.com (Kenton Varda)'
-
-import struct
-
-import six
-
-from google.protobuf.internal import wire_format
-
-
-# This will overflow and thus become IEEE-754 "infinity".  We would use
-# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
-_POS_INF = 1e10000
-_NEG_INF = -_POS_INF
-
-
-def _VarintSize(value):
-  """Compute the size of a varint value."""
-  if value <= 0x7f: return 1
-  if value <= 0x3fff: return 2
-  if value <= 0x1fffff: return 3
-  if value <= 0xfffffff: return 4
-  if value <= 0x7ffffffff: return 5
-  if value <= 0x3ffffffffff: return 6
-  if value <= 0x1ffffffffffff: return 7
-  if value <= 0xffffffffffffff: return 8
-  if value <= 0x7fffffffffffffff: return 9
-  return 10
-
-
-def _SignedVarintSize(value):
-  """Compute the size of a signed varint value."""
-  if value < 0: return 10
-  if value <= 0x7f: return 1
-  if value <= 0x3fff: return 2
-  if value <= 0x1fffff: return 3
-  if value <= 0xfffffff: return 4
-  if value <= 0x7ffffffff: return 5
-  if value <= 0x3ffffffffff: return 6
-  if value <= 0x1ffffffffffff: return 7
-  if value <= 0xffffffffffffff: return 8
-  if value <= 0x7fffffffffffffff: return 9
-  return 10
-
-
-def _TagSize(field_number):
-  """Returns the number of bytes required to serialize a tag with this field
-  number."""
-  # Just pass in type 0, since the type won't affect the tag+type size.
-  return _VarintSize(wire_format.PackTag(field_number, 0))
-
-
-# --------------------------------------------------------------------
-# In this section we define some generic sizers.  Each of these functions
-# takes parameters specific to a particular field type, e.g. int32 or fixed64.
-# It returns another function which in turn takes parameters specific to a
-# particular field, e.g. the field number and whether it is repeated or packed.
-# Look at the next section to see how these are used.
-
-
-def _SimpleSizer(compute_value_size):
-  """A sizer which uses the function compute_value_size to compute the size of
-  each value.  Typically compute_value_size is _VarintSize."""
-
-  def SpecificSizer(field_number, is_repeated, is_packed):
-    tag_size = _TagSize(field_number)
-    if is_packed:
-      local_VarintSize = _VarintSize
-      def PackedFieldSize(value):
-        result = 0
-        for element in value:
-          result += compute_value_size(element)
-        return result + local_VarintSize(result) + tag_size
-      return PackedFieldSize
-    elif is_repeated:
-      def RepeatedFieldSize(value):
-        result = tag_size * len(value)
-        for element in value:
-          result += compute_value_size(element)
-        return result
-      return RepeatedFieldSize
-    else:
-      def FieldSize(value):
-        return tag_size + compute_value_size(value)
-      return FieldSize
-
-  return SpecificSizer
-
-
-def _ModifiedSizer(compute_value_size, modify_value):
-  """Like SimpleSizer, but modify_value is invoked on each value before it is
-  passed to compute_value_size.  modify_value is typically ZigZagEncode."""
-
-  def SpecificSizer(field_number, is_repeated, is_packed):
-    tag_size = _TagSize(field_number)
-    if is_packed:
-      local_VarintSize = _VarintSize
-      def PackedFieldSize(value):
-        result = 0
-        for element in value:
-          result += compute_value_size(modify_value(element))
-        return result + local_VarintSize(result) + tag_size
-      return PackedFieldSize
-    elif is_repeated:
-      def RepeatedFieldSize(value):
-        result = tag_size * len(value)
-        for element in value:
-          result += compute_value_size(modify_value(element))
-        return result
-      return RepeatedFieldSize
-    else:
-      def FieldSize(value):
-        return tag_size + compute_value_size(modify_value(value))
-      return FieldSize
-
-  return SpecificSizer
-
-
-def _FixedSizer(value_size):
-  """Like _SimpleSizer except for a fixed-size field.  The input is the size
-  of one value."""
-
-  def SpecificSizer(field_number, is_repeated, is_packed):
-    tag_size = _TagSize(field_number)
-    if is_packed:
-      local_VarintSize = _VarintSize
-      def PackedFieldSize(value):
-        result = len(value) * value_size
-        return result + local_VarintSize(result) + tag_size
-      return PackedFieldSize
-    elif is_repeated:
-      element_size = value_size + tag_size
-      def RepeatedFieldSize(value):
-        return len(value) * element_size
-      return RepeatedFieldSize
-    else:
-      field_size = value_size + tag_size
-      def FieldSize(value):
-        return field_size
-      return FieldSize
-
-  return SpecificSizer
-
-
-# ====================================================================
-# Here we declare a sizer constructor for each field type.  Each "sizer
-# constructor" is a function that takes (field_number, is_repeated, is_packed)
-# as parameters and returns a sizer, which in turn takes a field value as
-# a parameter and returns its encoded size.
-
-
-Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
-
-UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
-
-SInt32Sizer = SInt64Sizer = _ModifiedSizer(
-    _SignedVarintSize, wire_format.ZigZagEncode)
-
-Fixed32Sizer = SFixed32Sizer = FloatSizer  = _FixedSizer(4)
-Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
-
-BoolSizer = _FixedSizer(1)
-
-
-def StringSizer(field_number, is_repeated, is_packed):
-  """Returns a sizer for a string field."""
-
-  tag_size = _TagSize(field_number)
-  local_VarintSize = _VarintSize
-  local_len = len
-  assert not is_packed
-  if is_repeated:
-    def RepeatedFieldSize(value):
-      result = tag_size * len(value)
-      for element in value:
-        l = local_len(element.encode('utf-8'))
-        result += local_VarintSize(l) + l
-      return result
-    return RepeatedFieldSize
-  else:
-    def FieldSize(value):
-      l = local_len(value.encode('utf-8'))
-      return tag_size + local_VarintSize(l) + l
-    return FieldSize
-
-
-def BytesSizer(field_number, is_repeated, is_packed):
-  """Returns a sizer for a bytes field."""
-
-  tag_size = _TagSize(field_number)
-  local_VarintSize = _VarintSize
-  local_len = len
-  assert not is_packed
-  if is_repeated:
-    def RepeatedFieldSize(value):
-      result = tag_size * len(value)
-      for element in value:
-        l = local_len(element)
-        result += local_VarintSize(l) + l
-      return result
-    return RepeatedFieldSize
-  else:
-    def FieldSize(value):
-      l = local_len(value)
-      return tag_size + local_VarintSize(l) + l
-    return FieldSize
-
-
-def GroupSizer(field_number, is_repeated, is_packed):
-  """Returns a sizer for a group field."""
-
-  tag_size = _TagSize(field_number) * 2
-  assert not is_packed
-  if is_repeated:
-    def RepeatedFieldSize(value):
-      result = tag_size * len(value)
-      for element in value:
-        result += element.ByteSize()
-      return result
-    return RepeatedFieldSize
-  else:
-    def FieldSize(value):
-      return tag_size + value.ByteSize()
-    return FieldSize
-
-
-def MessageSizer(field_number, is_repeated, is_packed):
-  """Returns a sizer for a message field."""
-
-  tag_size = _TagSize(field_number)
-  local_VarintSize = _VarintSize
-  assert not is_packed
-  if is_repeated:
-    def RepeatedFieldSize(value):
-      result = tag_size * len(value)
-      for element in value:
-        l = element.ByteSize()
-        result += local_VarintSize(l) + l
-      return result
-    return RepeatedFieldSize
-  else:
-    def FieldSize(value):
-      l = value.ByteSize()
-      return tag_size + local_VarintSize(l) + l
-    return FieldSize
-
-
-# --------------------------------------------------------------------
-# MessageSet is special: it needs custom logic to compute its size properly.
-
-
-def MessageSetItemSizer(field_number):
-  """Returns a sizer for extensions of MessageSet.
-
-  The message set message looks like this:
-    message MessageSet {
-      repeated group Item = 1 {
-        required int32 type_id = 2;
-        required string message = 3;
-      }
-    }
-  """
-  static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
-                 _TagSize(3))
-  local_VarintSize = _VarintSize
-
-  def FieldSize(value):
-    l = value.ByteSize()
-    return static_size + local_VarintSize(l) + l
-
-  return FieldSize
-
-
-# --------------------------------------------------------------------
-# Map is special: it needs custom logic to compute its size properly.
-
-
-def MapSizer(field_descriptor):
-  """Returns a sizer for a map field."""
-
-  # Can't look at field_descriptor.message_type._concrete_class because it may
-  # not have been initialized yet.
-  message_type = field_descriptor.message_type
-  message_sizer = MessageSizer(field_descriptor.number, False, False)
-
-  def FieldSize(map_value):
-    total = 0
-    for key in map_value:
-      value = map_value[key]
-      # It's wasteful to create the messages and throw them away one second
-      # later since we'll do the same for the actual encode.  But there's not an
-      # obvious way to avoid this within the current design without tons of code
-      # duplication.
-      entry_msg = message_type._concrete_class(key=key, value=value)
-      total += message_sizer(entry_msg)
-    return total
-
-  return FieldSize
-
-# ====================================================================
-# Encoders!
-
-
-def _VarintEncoder():
-  """Return an encoder for a basic varint value (does not include tag)."""
-
-  def EncodeVarint(write, value):
-    bits = value & 0x7f
-    value >>= 7
-    while value:
-      write(six.int2byte(0x80|bits))
-      bits = value & 0x7f
-      value >>= 7
-    return write(six.int2byte(bits))
-
-  return EncodeVarint
-
-
-def _SignedVarintEncoder():
-  """Return an encoder for a basic signed varint value (does not include
-  tag)."""
-
-  def EncodeSignedVarint(write, value):
-    if value < 0:
-      value += (1 << 64)
-    bits = value & 0x7f
-    value >>= 7
-    while value:
-      write(six.int2byte(0x80|bits))
-      bits = value & 0x7f
-      value >>= 7
-    return write(six.int2byte(bits))
-
-  return EncodeSignedVarint
-
-
-_EncodeVarint = _VarintEncoder()
-_EncodeSignedVarint = _SignedVarintEncoder()
-
-
-def _VarintBytes(value):
-  """Encode the given integer as a varint and return the bytes.  This is only
-  called at startup time so it doesn't need to be fast."""
-
-  pieces = []
-  _EncodeVarint(pieces.append, value)
-  return b"".join(pieces)
-
-
-def TagBytes(field_number, wire_type):
-  """Encode the given tag and return the bytes.  Only called at startup."""
-
-  return _VarintBytes(wire_format.PackTag(field_number, wire_type))
-
-# --------------------------------------------------------------------
-# As with sizers (see above), we have a number of common encoder
-# implementations.
-
-
-def _SimpleEncoder(wire_type, encode_value, compute_value_size):
-  """Return a constructor for an encoder for fields of a particular type.
-
-  Args:
-      wire_type:  The field's wire type, for encoding tags.
-      encode_value:  A function which encodes an individual value, e.g.
-        _EncodeVarint().
-      compute_value_size:  A function which computes the size of an individual
-        value, e.g. _VarintSize().
-  """
-
-  def SpecificEncoder(field_number, is_repeated, is_packed):
-    if is_packed:
-      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-      local_EncodeVarint = _EncodeVarint
-      def EncodePackedField(write, value):
-        write(tag_bytes)
-        size = 0
-        for element in value:
-          size += compute_value_size(element)
-        local_EncodeVarint(write, size)
-        for element in value:
-          encode_value(write, element)
-      return EncodePackedField
-    elif is_repeated:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeRepeatedField(write, value):
-        for element in value:
-          write(tag_bytes)
-          encode_value(write, element)
-      return EncodeRepeatedField
-    else:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeField(write, value):
-        write(tag_bytes)
-        return encode_value(write, value)
-      return EncodeField
-
-  return SpecificEncoder
-
-
-def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
-  """Like SimpleEncoder but additionally invokes modify_value on every value
-  before passing it to encode_value.  Usually modify_value is ZigZagEncode."""
-
-  def SpecificEncoder(field_number, is_repeated, is_packed):
-    if is_packed:
-      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-      local_EncodeVarint = _EncodeVarint
-      def EncodePackedField(write, value):
-        write(tag_bytes)
-        size = 0
-        for element in value:
-          size += compute_value_size(modify_value(element))
-        local_EncodeVarint(write, size)
-        for element in value:
-          encode_value(write, modify_value(element))
-      return EncodePackedField
-    elif is_repeated:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeRepeatedField(write, value):
-        for element in value:
-          write(tag_bytes)
-          encode_value(write, modify_value(element))
-      return EncodeRepeatedField
-    else:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeField(write, value):
-        write(tag_bytes)
-        return encode_value(write, modify_value(value))
-      return EncodeField
-
-  return SpecificEncoder
-
-
-def _StructPackEncoder(wire_type, format):
-  """Return a constructor for an encoder for a fixed-width field.
-
-  Args:
-      wire_type:  The field's wire type, for encoding tags.
-      format:  The format string to pass to struct.pack().
-  """
-
-  value_size = struct.calcsize(format)
-
-  def SpecificEncoder(field_number, is_repeated, is_packed):
-    local_struct_pack = struct.pack
-    if is_packed:
-      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-      local_EncodeVarint = _EncodeVarint
-      def EncodePackedField(write, value):
-        write(tag_bytes)
-        local_EncodeVarint(write, len(value) * value_size)
-        for element in value:
-          write(local_struct_pack(format, element))
-      return EncodePackedField
-    elif is_repeated:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeRepeatedField(write, value):
-        for element in value:
-          write(tag_bytes)
-          write(local_struct_pack(format, element))
-      return EncodeRepeatedField
-    else:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeField(write, value):
-        write(tag_bytes)
-        return write(local_struct_pack(format, value))
-      return EncodeField
-
-  return SpecificEncoder
-
-
-def _FloatingPointEncoder(wire_type, format):
-  """Return a constructor for an encoder for float fields.
-
-  This is like StructPackEncoder, but catches errors that may be due to
-  passing non-finite floating-point values to struct.pack, and makes a
-  second attempt to encode those values.
-
-  Args:
-      wire_type:  The field's wire type, for encoding tags.
-      format:  The format string to pass to struct.pack().
-  """
-
-  value_size = struct.calcsize(format)
-  if value_size == 4:
-    def EncodeNonFiniteOrRaise(write, value):
-      # Remember that the serialized form uses little-endian byte order.
-      if value == _POS_INF:
-        write(b'\x00\x00\x80\x7F')
-      elif value == _NEG_INF:
-        write(b'\x00\x00\x80\xFF')
-      elif value != value:           # NaN
-        write(b'\x00\x00\xC0\x7F')
-      else:
-        raise
-  elif value_size == 8:
-    def EncodeNonFiniteOrRaise(write, value):
-      if value == _POS_INF:
-        write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
-      elif value == _NEG_INF:
-        write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
-      elif value != value:                         # NaN
-        write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
-      else:
-        raise
-  else:
-    raise ValueError('Can\'t encode floating-point values that are '
-                     '%d bytes long (only 4 or 8)' % value_size)
-
-  def SpecificEncoder(field_number, is_repeated, is_packed):
-    local_struct_pack = struct.pack
-    if is_packed:
-      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-      local_EncodeVarint = _EncodeVarint
-      def EncodePackedField(write, value):
-        write(tag_bytes)
-        local_EncodeVarint(write, len(value) * value_size)
-        for element in value:
-          # This try/except block is going to be faster than any code that
-          # we could write to check whether element is finite.
-          try:
-            write(local_struct_pack(format, element))
-          except SystemError:
-            EncodeNonFiniteOrRaise(write, element)
-      return EncodePackedField
-    elif is_repeated:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeRepeatedField(write, value):
-        for element in value:
-          write(tag_bytes)
-          try:
-            write(local_struct_pack(format, element))
-          except SystemError:
-            EncodeNonFiniteOrRaise(write, element)
-      return EncodeRepeatedField
-    else:
-      tag_bytes = TagBytes(field_number, wire_type)
-      def EncodeField(write, value):
-        write(tag_bytes)
-        try:
-          write(local_struct_pack(format, value))
-        except SystemError:
-          EncodeNonFiniteOrRaise(write, value)
-      return EncodeField
-
-  return SpecificEncoder
-
-
-# ====================================================================
-# Here we declare an encoder constructor for each field type.  These work
-# very similarly to sizer constructors, described earlier.
-
-
-Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
-    wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
-
-UInt32Encoder = UInt64Encoder = _SimpleEncoder(
-    wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
-
-SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
-    wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
-    wire_format.ZigZagEncode)
-
-# Note that Python conveniently guarantees that when using the '<' prefix on
-# formats, they will also have the same size across all platforms (as opposed
-# to without the prefix, where their sizes depend on the C compiler's basic
-# type sizes).
-Fixed32Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
-Fixed64Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
-SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
-SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
-FloatEncoder    = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
-DoubleEncoder   = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
-
-
-def BoolEncoder(field_number, is_repeated, is_packed):
-  """Returns an encoder for a boolean field."""
-
-  false_byte = b'\x00'
-  true_byte = b'\x01'
-  if is_packed:
-    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-    local_EncodeVarint = _EncodeVarint
-    def EncodePackedField(write, value):
-      write(tag_bytes)
-      local_EncodeVarint(write, len(value))
-      for element in value:
-        if element:
-          write(true_byte)
-        else:
-          write(false_byte)
-    return EncodePackedField
-  elif is_repeated:
-    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
-    def EncodeRepeatedField(write, value):
-      for element in value:
-        write(tag_bytes)
-        if element:
-          write(true_byte)
-        else:
-          write(false_byte)
-    return EncodeRepeatedField
-  else:
-    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
-    def EncodeField(write, value):
-      write(tag_bytes)
-      if value:
-        return write(true_byte)
-      return write(false_byte)
-    return EncodeField
-
-
-def StringEncoder(field_number, is_repeated, is_packed):
-  """Returns an encoder for a string field."""
-
-  tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-  local_EncodeVarint = _EncodeVarint
-  local_len = len
-  assert not is_packed
-  if is_repeated:
-    def EncodeRepeatedField(write, value):
-      for element in value:
-        encoded = element.encode('utf-8')
-        write(tag)
-        local_EncodeVarint(write, local_len(encoded))
-        write(encoded)
-    return EncodeRepeatedField
-  else:
-    def EncodeField(write, value):
-      encoded = value.encode('utf-8')
-      write(tag)
-      local_EncodeVarint(write, local_len(encoded))
-      return write(encoded)
-    return EncodeField
-
-
-def BytesEncoder(field_number, is_repeated, is_packed):
-  """Returns an encoder for a bytes field."""
-
-  tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-  local_EncodeVarint = _EncodeVarint
-  local_len = len
-  assert not is_packed
-  if is_repeated:
-    def EncodeRepeatedField(write, value):
-      for element in value:
-        write(tag)
-        local_EncodeVarint(write, local_len(element))
-        write(element)
-    return EncodeRepeatedField
-  else:
-    def EncodeField(write, value):
-      write(tag)
-      local_EncodeVarint(write, local_len(value))
-      return write(value)
-    return EncodeField
-
-
-def GroupEncoder(field_number, is_repeated, is_packed):
-  """Returns an encoder for a group field."""
-
-  start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
-  end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
-  assert not is_packed
-  if is_repeated:
-    def EncodeRepeatedField(write, value):
-      for element in value:
-        write(start_tag)
-        element._InternalSerialize(write)
-        write(end_tag)
-    return EncodeRepeatedField
-  else:
-    def EncodeField(write, value):
-      write(start_tag)
-      value._InternalSerialize(write)
-      return write(end_tag)
-    return EncodeField
-
-
-def MessageEncoder(field_number, is_repeated, is_packed):
-  """Returns an encoder for a message field."""
-
-  tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
-  local_EncodeVarint = _EncodeVarint
-  assert not is_packed
-  if is_repeated:
-    def EncodeRepeatedField(write, value):
-      for element in value:
-        write(tag)
-        local_EncodeVarint(write, element.ByteSize())
-        element._InternalSerialize(write)
-    return EncodeRepeatedField
-  else:
-    def EncodeField(write, value):
-      write(tag)
-      local_EncodeVarint(write, value.ByteSize())
-      return value._InternalSerialize(write)
-    return EncodeField
-
-
-# --------------------------------------------------------------------
-# As before, MessageSet is special.
-
-
-def MessageSetItemEncoder(field_number):
-  """Encoder for extensions of MessageSet.
-
-  The message set message looks like this:
-    message MessageSet {
-      repeated group Item = 1 {
-        required int32 type_id = 2;
-        required string message = 3;
-      }
-    }
-  """
-  start_bytes = b"".join([
-      TagBytes(1, wire_format.WIRETYPE_START_GROUP),
-      TagBytes(2, wire_format.WIRETYPE_VARINT),
-      _VarintBytes(field_number),
-      TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
-  end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
-  local_EncodeVarint = _EncodeVarint
-
-  def EncodeField(write, value):
-    write(start_bytes)
-    local_EncodeVarint(write, value.ByteSize())
-    value._InternalSerialize(write)
-    return write(end_bytes)
-
-  return EncodeField
-
-
-# --------------------------------------------------------------------
-# As before, Map is special.
-
-
-def MapEncoder(field_descriptor):
-  """Encoder for extensions of MessageSet.
-
-  Maps always have a wire format like this:
-    message MapEntry {
-      key_type key = 1;
-      value_type value = 2;
-    }
-    repeated MapEntry map = N;
-  """
-  # Can't look at field_descriptor.message_type._concrete_class because it may
-  # not have been initialized yet.
-  message_type = field_descriptor.message_type
-  encode_message = MessageEncoder(field_descriptor.number, False, False)
-
-  def EncodeField(write, value):
-    for key in value:
-      entry_msg = message_type._concrete_class(key=key, value=value[key])
-      encode_message(write, entry_msg)
-
-  return EncodeField
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/enum_type_wrapper.py b/tools/swarming_client/third_party/google/protobuf/internal/enum_type_wrapper.py
deleted file mode 100644
index 1cffe35..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/enum_type_wrapper.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""A simple wrapper around enum types to expose utility functions.
-
-Instances are created as properties with the same name as the enum they wrap
-on proto classes.  For usage, see:
-  reflection_test.py
-"""
-
-__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
-
-
-class EnumTypeWrapper(object):
-  """A utility for finding the names of enum values."""
-
-  DESCRIPTOR = None
-
-  def __init__(self, enum_type):
-    """Inits EnumTypeWrapper with an EnumDescriptor."""
-    self._enum_type = enum_type
-    self.DESCRIPTOR = enum_type;
-
-  def Name(self, number):
-    """Returns a string containing the name of an enum value."""
-    if number in self._enum_type.values_by_number:
-      return self._enum_type.values_by_number[number].name
-    raise ValueError('Enum %s has no name defined for value %d' % (
-        self._enum_type.name, number))
-
-  def Value(self, name):
-    """Returns the value coresponding to the given enum name."""
-    if name in self._enum_type.values_by_name:
-      return self._enum_type.values_by_name[name].number
-    raise ValueError('Enum %s has no value defined for name %s' % (
-        self._enum_type.name, name))
-
-  def keys(self):
-    """Return a list of the string names in the enum.
-
-    These are returned in the order they were defined in the .proto file.
-    """
-
-    return [value_descriptor.name
-            for value_descriptor in self._enum_type.values]
-
-  def values(self):
-    """Return a list of the integer values in the enum.
-
-    These are returned in the order they were defined in the .proto file.
-    """
-
-    return [value_descriptor.number
-            for value_descriptor in self._enum_type.values]
-
-  def items(self):
-    """Return a list of the (name, value) pairs of the enum.
-
-    These are returned in the order they were defined in the .proto file.
-    """
-    return [(value_descriptor.name, value_descriptor.number)
-            for value_descriptor in self._enum_type.values]
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/factory_test1_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/factory_test1_pb2.py
deleted file mode 100644
index baa0ae4..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/factory_test1_pb2.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/factory_test1.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/factory_test1.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n,google/protobuf/internal/factory_test1.proto\x12\x1fgoogle.protobuf.python.internal\"\xd5\x03\n\x0f\x46\x61\x63tory1Message\x12\x45\n\x0e\x66\x61\x63tory_1_enum\x18\x01 \x01(\x0e\x32-.google.protobuf.python.internal.Factory1Enum\x12\x62\n\x15nested_factory_1_enum\x18\x02 \x01(\x0e\x32\x43.google.protobuf.python.internal.Factory1Message.NestedFactory1Enum\x12h\n\x18nested_factory_1_message\x18\x03 \x01(\x0b\x32\x46.google.protobuf.python.internal.Factory1Message.NestedFactory1Message\x12\x14\n\x0cscalar_value\x18\x04 \x01(\x05\x12\x12\n\nlist_value\x18\x05 \x03(\t\x1a&\n\x15NestedFactory1Message\x12\r\n\x05value\x18\x01 \x01(\t\"P\n\x12NestedFactory1Enum\x12\x1c\n\x18NESTED_FACTORY_1_VALUE_0\x10\x00\x12\x1c\n\x18NESTED_FACTORY_1_VALUE_1\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02*<\n\x0c\x46\x61\x63tory1Enum\x12\x15\n\x11\x46\x41\x43TORY_1_VALUE_0\x10\x00\x12\x15\n\x11\x46\x41\x43TORY_1_VALUE_1\x10\x01')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_FACTORY1ENUM = _descriptor.EnumDescriptor(
-  name='Factory1Enum',
-  full_name='google.protobuf.python.internal.Factory1Enum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FACTORY_1_VALUE_0', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FACTORY_1_VALUE_1', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=553,
-  serialized_end=613,
-)
-_sym_db.RegisterEnumDescriptor(_FACTORY1ENUM)
-
-Factory1Enum = enum_type_wrapper.EnumTypeWrapper(_FACTORY1ENUM)
-FACTORY_1_VALUE_0 = 0
-FACTORY_1_VALUE_1 = 1
-
-
-_FACTORY1MESSAGE_NESTEDFACTORY1ENUM = _descriptor.EnumDescriptor(
-  name='NestedFactory1Enum',
-  full_name='google.protobuf.python.internal.Factory1Message.NestedFactory1Enum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='NESTED_FACTORY_1_VALUE_0', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NESTED_FACTORY_1_VALUE_1', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=460,
-  serialized_end=540,
-)
-_sym_db.RegisterEnumDescriptor(_FACTORY1MESSAGE_NESTEDFACTORY1ENUM)
-
-
-_FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE = _descriptor.Descriptor(
-  name='NestedFactory1Message',
-  full_name='google.protobuf.python.internal.Factory1Message.NestedFactory1Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.python.internal.Factory1Message.NestedFactory1Message.value', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=420,
-  serialized_end=458,
-)
-
-_FACTORY1MESSAGE = _descriptor.Descriptor(
-  name='Factory1Message',
-  full_name='google.protobuf.python.internal.Factory1Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='factory_1_enum', full_name='google.protobuf.python.internal.Factory1Message.factory_1_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_factory_1_enum', full_name='google.protobuf.python.internal.Factory1Message.nested_factory_1_enum', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_factory_1_message', full_name='google.protobuf.python.internal.Factory1Message.nested_factory_1_message', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='scalar_value', full_name='google.protobuf.python.internal.Factory1Message.scalar_value', index=3,
-      number=4, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='list_value', full_name='google.protobuf.python.internal.Factory1Message.list_value', index=4,
-      number=5, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE, ],
-  enum_types=[
-    _FACTORY1MESSAGE_NESTEDFACTORY1ENUM,
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=82,
-  serialized_end=551,
-)
-
-_FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE.containing_type = _FACTORY1MESSAGE
-_FACTORY1MESSAGE.fields_by_name['factory_1_enum'].enum_type = _FACTORY1ENUM
-_FACTORY1MESSAGE.fields_by_name['nested_factory_1_enum'].enum_type = _FACTORY1MESSAGE_NESTEDFACTORY1ENUM
-_FACTORY1MESSAGE.fields_by_name['nested_factory_1_message'].message_type = _FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE
-_FACTORY1MESSAGE_NESTEDFACTORY1ENUM.containing_type = _FACTORY1MESSAGE
-DESCRIPTOR.message_types_by_name['Factory1Message'] = _FACTORY1MESSAGE
-DESCRIPTOR.enum_types_by_name['Factory1Enum'] = _FACTORY1ENUM
-
-Factory1Message = _reflection.GeneratedProtocolMessageType('Factory1Message', (_message.Message,), dict(
-
-  NestedFactory1Message = _reflection.GeneratedProtocolMessageType('NestedFactory1Message', (_message.Message,), dict(
-    DESCRIPTOR = _FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE,
-    __module__ = 'google.protobuf.internal.factory_test1_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory1Message.NestedFactory1Message)
-    ))
-  ,
-  DESCRIPTOR = _FACTORY1MESSAGE,
-  __module__ = 'google.protobuf.internal.factory_test1_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory1Message)
-  ))
-_sym_db.RegisterMessage(Factory1Message)
-_sym_db.RegisterMessage(Factory1Message.NestedFactory1Message)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/factory_test2_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/factory_test2_pb2.py
deleted file mode 100644
index 64808ae..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/factory_test2_pb2.py
+++ /dev/null
@@ -1,477 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/factory_test2.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf.internal import factory_test1_pb2 as google_dot_protobuf_dot_internal_dot_factory__test1__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/factory_test2.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n,google/protobuf/internal/factory_test2.proto\x12\x1fgoogle.protobuf.python.internal\x1a,google/protobuf/internal/factory_test1.proto\"\xd8\x0b\n\x0f\x46\x61\x63tory2Message\x12\x11\n\tmandatory\x18\x01 \x02(\x05\x12\x45\n\x0e\x66\x61\x63tory_2_enum\x18\x02 \x01(\x0e\x32-.google.protobuf.python.internal.Factory2Enum\x12\x62\n\x15nested_factory_2_enum\x18\x03 \x01(\x0e\x32\x43.google.protobuf.python.internal.Factory2Message.NestedFactory2Enum\x12h\n\x18nested_factory_2_message\x18\x04 \x01(\x0b\x32\x46.google.protobuf.python.internal.Factory2Message.NestedFactory2Message\x12K\n\x11\x66\x61\x63tory_1_message\x18\x05 \x01(\x0b\x32\x30.google.protobuf.python.internal.Factory1Message\x12\x45\n\x0e\x66\x61\x63tory_1_enum\x18\x06 \x01(\x0e\x32-.google.protobuf.python.internal.Factory1Enum\x12\x62\n\x15nested_factory_1_enum\x18\x07 \x01(\x0e\x32\x43.google.protobuf.python.internal.Factory1Message.NestedFactory1Enum\x12h\n\x18nested_factory_1_message\x18\x08 \x01(\x0b\x32\x46.google.protobuf.python.internal.Factory1Message.NestedFactory1Message\x12J\n\x10\x63ircular_message\x18\t \x01(\x0b\x32\x30.google.protobuf.python.internal.Factory2Message\x12\x14\n\x0cscalar_value\x18\n \x01(\t\x12\x12\n\nlist_value\x18\x0b \x03(\t\x12I\n\x07grouped\x18\x0c \x03(\n28.google.protobuf.python.internal.Factory2Message.Grouped\x12:\n\x04loop\x18\x0f \x01(\x0b\x32,.google.protobuf.python.internal.LoopMessage\x12\x1e\n\x10int_with_default\x18\x10 \x01(\x05:\x04\x31\x37\x37\x36\x12!\n\x13\x64ouble_with_default\x18\x11 \x01(\x01:\x04\x39.99\x12(\n\x13string_with_default\x18\x12 \x01(\t:\x0bhello world\x12 \n\x11\x62ool_with_default\x18\x13 \x01(\x08:\x05\x66\x61lse\x12[\n\x11\x65num_with_default\x18\x14 \x01(\x0e\x32-.google.protobuf.python.internal.Factory2Enum:\x11\x46\x41\x43TORY_2_VALUE_1\x12&\n\x12\x62ytes_with_default\x18\x15 \x01(\x0c:\na\\373\\000c\x12\x13\n\toneof_int\x18\x16 \x01(\x05H\x00\x12\x16\n\x0coneof_string\x18\x17 \x01(\tH\x00\x1a&\n\x15NestedFactory2Message\x12\r\n\x05value\x18\x01 \x01(\t\x1a)\n\x07Grouped\x12\x0e\n\x06part_1\x18\r \x01(\t\x12\x0e\n\x06part_2\x18\x0e \x01(\t\"P\n\x12NestedFactory2Enum\x12\x1c\n\x18NESTED_FACTORY_2_VALUE_0\x10\x00\x12\x1c\n\x18NESTED_FACTORY_2_VALUE_1\x10\x01\x32I\n\x0eone_more_field\x12\x30.google.protobuf.python.internal.Factory1Message\x18\xe9\x07 \x01(\tB\r\n\x0boneof_field\"M\n\x0bLoopMessage\x12>\n\x04loop\x18\x01 \x01(\x0b\x32\x30.google.protobuf.python.internal.Factory2Message\"D\n\x19MessageWithNestedEnumOnly\"\'\n\nNestedEnum\x12\x19\n\x15NESTED_MESSAGE_ENUM_0\x10\x00*<\n\x0c\x46\x61\x63tory2Enum\x12\x15\n\x11\x46\x41\x43TORY_2_VALUE_0\x10\x00\x12\x15\n\x11\x46\x41\x43TORY_2_VALUE_1\x10\x01:H\n\ranother_field\x12\x30.google.protobuf.python.internal.Factory1Message\x18\xea\x07 \x01(\t')
-  ,
-  dependencies=[google_dot_protobuf_dot_internal_dot_factory__test1__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_FACTORY2ENUM = _descriptor.EnumDescriptor(
-  name='Factory2Enum',
-  full_name='google.protobuf.python.internal.Factory2Enum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FACTORY_2_VALUE_0', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FACTORY_2_VALUE_1', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1775,
-  serialized_end=1835,
-)
-_sym_db.RegisterEnumDescriptor(_FACTORY2ENUM)
-
-Factory2Enum = enum_type_wrapper.EnumTypeWrapper(_FACTORY2ENUM)
-FACTORY_2_VALUE_0 = 0
-FACTORY_2_VALUE_1 = 1
-
-ANOTHER_FIELD_FIELD_NUMBER = 1002
-another_field = _descriptor.FieldDescriptor(
-  name='another_field', full_name='google.protobuf.python.internal.another_field', index=0,
-  number=1002, type=9, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b("").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-_FACTORY2MESSAGE_NESTEDFACTORY2ENUM = _descriptor.EnumDescriptor(
-  name='NestedFactory2Enum',
-  full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Enum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='NESTED_FACTORY_2_VALUE_0', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NESTED_FACTORY_2_VALUE_1', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1454,
-  serialized_end=1534,
-)
-_sym_db.RegisterEnumDescriptor(_FACTORY2MESSAGE_NESTEDFACTORY2ENUM)
-
-_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='NESTED_MESSAGE_ENUM_0', index=0, number=0,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1734,
-  serialized_end=1773,
-)
-_sym_db.RegisterEnumDescriptor(_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM)
-
-
-_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE = _descriptor.Descriptor(
-  name='NestedFactory2Message',
-  full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Message.value', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1371,
-  serialized_end=1409,
-)
-
-_FACTORY2MESSAGE_GROUPED = _descriptor.Descriptor(
-  name='Grouped',
-  full_name='google.protobuf.python.internal.Factory2Message.Grouped',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='part_1', full_name='google.protobuf.python.internal.Factory2Message.Grouped.part_1', index=0,
-      number=13, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='part_2', full_name='google.protobuf.python.internal.Factory2Message.Grouped.part_2', index=1,
-      number=14, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1411,
-  serialized_end=1452,
-)
-
-_FACTORY2MESSAGE = _descriptor.Descriptor(
-  name='Factory2Message',
-  full_name='google.protobuf.python.internal.Factory2Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='mandatory', full_name='google.protobuf.python.internal.Factory2Message.mandatory', index=0,
-      number=1, type=5, cpp_type=1, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='factory_2_enum', full_name='google.protobuf.python.internal.Factory2Message.factory_2_enum', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_factory_2_enum', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_2_enum', index=2,
-      number=3, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_factory_2_message', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_2_message', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='factory_1_message', full_name='google.protobuf.python.internal.Factory2Message.factory_1_message', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='factory_1_enum', full_name='google.protobuf.python.internal.Factory2Message.factory_1_enum', index=5,
-      number=6, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_factory_1_enum', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_1_enum', index=6,
-      number=7, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_factory_1_message', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_1_message', index=7,
-      number=8, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='circular_message', full_name='google.protobuf.python.internal.Factory2Message.circular_message', index=8,
-      number=9, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='scalar_value', full_name='google.protobuf.python.internal.Factory2Message.scalar_value', index=9,
-      number=10, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='list_value', full_name='google.protobuf.python.internal.Factory2Message.list_value', index=10,
-      number=11, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='grouped', full_name='google.protobuf.python.internal.Factory2Message.grouped', index=11,
-      number=12, type=10, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='loop', full_name='google.protobuf.python.internal.Factory2Message.loop', index=12,
-      number=15, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int_with_default', full_name='google.protobuf.python.internal.Factory2Message.int_with_default', index=13,
-      number=16, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=1776,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='double_with_default', full_name='google.protobuf.python.internal.Factory2Message.double_with_default', index=14,
-      number=17, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=float(9.99),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_with_default', full_name='google.protobuf.python.internal.Factory2Message.string_with_default', index=15,
-      number=18, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("hello world").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bool_with_default', full_name='google.protobuf.python.internal.Factory2Message.bool_with_default', index=16,
-      number=19, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='enum_with_default', full_name='google.protobuf.python.internal.Factory2Message.enum_with_default', index=17,
-      number=20, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bytes_with_default', full_name='google.protobuf.python.internal.Factory2Message.bytes_with_default', index=18,
-      number=21, type=12, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("a\373\000c"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_int', full_name='google.protobuf.python.internal.Factory2Message.oneof_int', index=19,
-      number=22, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_string', full_name='google.protobuf.python.internal.Factory2Message.oneof_string', index=20,
-      number=23, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='one_more_field', full_name='google.protobuf.python.internal.Factory2Message.one_more_field', index=0,
-      number=1001, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE, _FACTORY2MESSAGE_GROUPED, ],
-  enum_types=[
-    _FACTORY2MESSAGE_NESTEDFACTORY2ENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='oneof_field', full_name='google.protobuf.python.internal.Factory2Message.oneof_field',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=128,
-  serialized_end=1624,
-)
-
-
-_LOOPMESSAGE = _descriptor.Descriptor(
-  name='LoopMessage',
-  full_name='google.protobuf.python.internal.LoopMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='loop', full_name='google.protobuf.python.internal.LoopMessage.loop', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1626,
-  serialized_end=1703,
-)
-
-
-_MESSAGEWITHNESTEDENUMONLY = _descriptor.Descriptor(
-  name='MessageWithNestedEnumOnly',
-  full_name='google.protobuf.python.internal.MessageWithNestedEnumOnly',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _MESSAGEWITHNESTEDENUMONLY_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1705,
-  serialized_end=1773,
-)
-
-_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE.containing_type = _FACTORY2MESSAGE
-_FACTORY2MESSAGE_GROUPED.containing_type = _FACTORY2MESSAGE
-_FACTORY2MESSAGE.fields_by_name['factory_2_enum'].enum_type = _FACTORY2ENUM
-_FACTORY2MESSAGE.fields_by_name['nested_factory_2_enum'].enum_type = _FACTORY2MESSAGE_NESTEDFACTORY2ENUM
-_FACTORY2MESSAGE.fields_by_name['nested_factory_2_message'].message_type = _FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE
-_FACTORY2MESSAGE.fields_by_name['factory_1_message'].message_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE
-_FACTORY2MESSAGE.fields_by_name['factory_1_enum'].enum_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1ENUM
-_FACTORY2MESSAGE.fields_by_name['nested_factory_1_enum'].enum_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE_NESTEDFACTORY1ENUM
-_FACTORY2MESSAGE.fields_by_name['nested_factory_1_message'].message_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE
-_FACTORY2MESSAGE.fields_by_name['circular_message'].message_type = _FACTORY2MESSAGE
-_FACTORY2MESSAGE.fields_by_name['grouped'].message_type = _FACTORY2MESSAGE_GROUPED
-_FACTORY2MESSAGE.fields_by_name['loop'].message_type = _LOOPMESSAGE
-_FACTORY2MESSAGE.fields_by_name['enum_with_default'].enum_type = _FACTORY2ENUM
-_FACTORY2MESSAGE_NESTEDFACTORY2ENUM.containing_type = _FACTORY2MESSAGE
-_FACTORY2MESSAGE.oneofs_by_name['oneof_field'].fields.append(
-  _FACTORY2MESSAGE.fields_by_name['oneof_int'])
-_FACTORY2MESSAGE.fields_by_name['oneof_int'].containing_oneof = _FACTORY2MESSAGE.oneofs_by_name['oneof_field']
-_FACTORY2MESSAGE.oneofs_by_name['oneof_field'].fields.append(
-  _FACTORY2MESSAGE.fields_by_name['oneof_string'])
-_FACTORY2MESSAGE.fields_by_name['oneof_string'].containing_oneof = _FACTORY2MESSAGE.oneofs_by_name['oneof_field']
-_LOOPMESSAGE.fields_by_name['loop'].message_type = _FACTORY2MESSAGE
-_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM.containing_type = _MESSAGEWITHNESTEDENUMONLY
-DESCRIPTOR.message_types_by_name['Factory2Message'] = _FACTORY2MESSAGE
-DESCRIPTOR.message_types_by_name['LoopMessage'] = _LOOPMESSAGE
-DESCRIPTOR.message_types_by_name['MessageWithNestedEnumOnly'] = _MESSAGEWITHNESTEDENUMONLY
-DESCRIPTOR.enum_types_by_name['Factory2Enum'] = _FACTORY2ENUM
-DESCRIPTOR.extensions_by_name['another_field'] = another_field
-
-Factory2Message = _reflection.GeneratedProtocolMessageType('Factory2Message', (_message.Message,), dict(
-
-  NestedFactory2Message = _reflection.GeneratedProtocolMessageType('NestedFactory2Message', (_message.Message,), dict(
-    DESCRIPTOR = _FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE,
-    __module__ = 'google.protobuf.internal.factory_test2_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message.NestedFactory2Message)
-    ))
-  ,
-
-  Grouped = _reflection.GeneratedProtocolMessageType('Grouped', (_message.Message,), dict(
-    DESCRIPTOR = _FACTORY2MESSAGE_GROUPED,
-    __module__ = 'google.protobuf.internal.factory_test2_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message.Grouped)
-    ))
-  ,
-  DESCRIPTOR = _FACTORY2MESSAGE,
-  __module__ = 'google.protobuf.internal.factory_test2_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message)
-  ))
-_sym_db.RegisterMessage(Factory2Message)
-_sym_db.RegisterMessage(Factory2Message.NestedFactory2Message)
-_sym_db.RegisterMessage(Factory2Message.Grouped)
-
-LoopMessage = _reflection.GeneratedProtocolMessageType('LoopMessage', (_message.Message,), dict(
-  DESCRIPTOR = _LOOPMESSAGE,
-  __module__ = 'google.protobuf.internal.factory_test2_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.LoopMessage)
-  ))
-_sym_db.RegisterMessage(LoopMessage)
-
-MessageWithNestedEnumOnly = _reflection.GeneratedProtocolMessageType('MessageWithNestedEnumOnly', (_message.Message,), dict(
-  DESCRIPTOR = _MESSAGEWITHNESTEDENUMONLY,
-  __module__ = 'google.protobuf.internal.factory_test2_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.MessageWithNestedEnumOnly)
-  ))
-_sym_db.RegisterMessage(MessageWithNestedEnumOnly)
-
-google_dot_protobuf_dot_internal_dot_factory__test1__pb2.Factory1Message.RegisterExtension(another_field)
-google_dot_protobuf_dot_internal_dot_factory__test1__pb2.Factory1Message.RegisterExtension(_FACTORY2MESSAGE.extensions_by_name['one_more_field'])
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/file_options_test_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/file_options_test_pb2.py
deleted file mode 100644
index 315df4b..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/file_options_test_pb2.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/file_options_test.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/file_options_test.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n0google/protobuf/internal/file_options_test.proto\x12\x1fgoogle.protobuf.python.internal\x1a google/protobuf/descriptor.proto\"\x1e\n\nFooOptions\x12\x10\n\x08\x66oo_name\x18\x01 \x01(\t:a\n\x0b\x66oo_options\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\xb6\x39 \x01(\x0b\x32+.google.protobuf.python.internal.FooOptions')
-  ,
-  dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-FOO_OPTIONS_FIELD_NUMBER = 120436268
-foo_options = _descriptor.FieldDescriptor(
-  name='foo_options', full_name='google.protobuf.python.internal.foo_options', index=0,
-  number=120436268, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_FOOOPTIONS = _descriptor.Descriptor(
-  name='FooOptions',
-  full_name='google.protobuf.python.internal.FooOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foo_name', full_name='google.protobuf.python.internal.FooOptions.foo_name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=119,
-  serialized_end=149,
-)
-
-DESCRIPTOR.message_types_by_name['FooOptions'] = _FOOOPTIONS
-DESCRIPTOR.extensions_by_name['foo_options'] = foo_options
-
-FooOptions = _reflection.GeneratedProtocolMessageType('FooOptions', (_message.Message,), dict(
-  DESCRIPTOR = _FOOOPTIONS,
-  __module__ = 'google.protobuf.internal.file_options_test_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.FooOptions)
-  ))
-_sym_db.RegisterMessage(FooOptions)
-
-foo_options.message_type = _FOOOPTIONS
-google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(foo_options)
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/__init__.py b/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/__init__.py
deleted file mode 100644
index 5121dd0..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Sample module importing a nested proto from itself."""
-
-from google.protobuf.internal.import_test_package import outer_pb2 as myproto
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/inner_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/inner_pb2.py
deleted file mode 100644
index e45c21b..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/inner_pb2.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/import_test_package/inner.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/import_test_package/inner.proto',
-  package='google.protobuf.python.internal.import_test_package',
-  syntax='proto2',
-  serialized_pb=_b('\n8google/protobuf/internal/import_test_package/inner.proto\x12\x33google.protobuf.python.internal.import_test_package\"\x1a\n\x05Inner\x12\x11\n\x05value\x18\x01 \x01(\x05:\x02\x35\x37')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_INNER = _descriptor.Descriptor(
-  name='Inner',
-  full_name='google.protobuf.python.internal.import_test_package.Inner',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.python.internal.import_test_package.Inner.value', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=57,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=113,
-  serialized_end=139,
-)
-
-DESCRIPTOR.message_types_by_name['Inner'] = _INNER
-
-Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict(
-  DESCRIPTOR = _INNER,
-  __module__ = 'google.protobuf.internal.import_test_package.inner_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.import_test_package.Inner)
-  ))
-_sym_db.RegisterMessage(Inner)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/outer_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/outer_pb2.py
deleted file mode 100644
index 15f72f4..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/import_test_package/outer_pb2.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/import_test_package/outer.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf.internal.import_test_package import inner_pb2 as google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/import_test_package/outer.proto',
-  package='google.protobuf.python.internal.import_test_package',
-  syntax='proto2',
-  serialized_pb=_b('\n8google/protobuf/internal/import_test_package/outer.proto\x12\x33google.protobuf.python.internal.import_test_package\x1a\x38google/protobuf/internal/import_test_package/inner.proto\"R\n\x05Outer\x12I\n\x05inner\x18\x01 \x01(\x0b\x32:.google.protobuf.python.internal.import_test_package.Inner')
-  ,
-  dependencies=[google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_OUTER = _descriptor.Descriptor(
-  name='Outer',
-  full_name='google.protobuf.python.internal.import_test_package.Outer',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='inner', full_name='google.protobuf.python.internal.import_test_package.Outer.inner', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=171,
-  serialized_end=253,
-)
-
-_OUTER.fields_by_name['inner'].message_type = google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2._INNER
-DESCRIPTOR.message_types_by_name['Outer'] = _OUTER
-
-Outer = _reflection.GeneratedProtocolMessageType('Outer', (_message.Message,), dict(
-  DESCRIPTOR = _OUTER,
-  __module__ = 'google.protobuf.internal.import_test_package.outer_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.import_test_package.Outer)
-  ))
-_sym_db.RegisterMessage(Outer)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/message_listener.py b/tools/swarming_client/third_party/google/protobuf/internal/message_listener.py
deleted file mode 100644
index 0fc255a..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/message_listener.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Defines a listener interface for observing certain
-state transitions on Message objects.
-
-Also defines a null implementation of this interface.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-
-class MessageListener(object):
-
-  """Listens for modifications made to a message.  Meant to be registered via
-  Message._SetListener().
-
-  Attributes:
-    dirty:  If True, then calling Modified() would be a no-op.  This can be
-            used to avoid these calls entirely in the common case.
-  """
-
-  def Modified(self):
-    """Called every time the message is modified in such a way that the parent
-    message may need to be updated.  This currently means either:
-    (a) The message was modified for the first time, so the parent message
-        should henceforth mark the message as present.
-    (b) The message's cached byte size became dirty -- i.e. the message was
-        modified for the first time after a previous call to ByteSize().
-        Therefore the parent should also mark its byte size as dirty.
-    Note that (a) implies (b), since new objects start out with a client cached
-    size (zero).  However, we document (a) explicitly because it is important.
-
-    Modified() will *only* be called in response to one of these two events --
-    not every time the sub-message is modified.
-
-    Note that if the listener's |dirty| attribute is true, then calling
-    Modified at the moment would be a no-op, so it can be skipped.  Performance-
-    sensitive callers should check this attribute directly before calling since
-    it will be true most of the time.
-    """
-
-    raise NotImplementedError
-
-
-class NullMessageListener(object):
-
-  """No-op MessageListener implementation."""
-
-  def Modified(self):
-    pass
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/message_set_extensions_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/message_set_extensions_pb2.py
deleted file mode 100644
index 4c9b475..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/message_set_extensions_pb2.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/message_set_extensions.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/message_set_extensions.proto',
-  package='google.protobuf.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-MESSAGE_SET_EXTENSION3_FIELD_NUMBER = 98418655
-message_set_extension3 = _descriptor.FieldDescriptor(
-  name='message_set_extension3', full_name='google.protobuf.internal.message_set_extension3', index=0,
-  number=98418655, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_TESTMESSAGESET = _descriptor.Descriptor(
-  name='TestMessageSet',
-  full_name='google.protobuf.internal.TestMessageSet',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')),
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(4, 2147483647), ],
-  oneofs=[
-  ],
-  serialized_start=83,
-  serialized_end=113,
-)
-
-
-_TESTMESSAGESETEXTENSION1 = _descriptor.Descriptor(
-  name='TestMessageSetExtension1',
-  full_name='google.protobuf.internal.TestMessageSetExtension1',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='i', full_name='google.protobuf.internal.TestMessageSetExtension1.i', index=0,
-      number=15, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='message_set_extension', full_name='google.protobuf.internal.TestMessageSetExtension1.message_set_extension', index=0,
-      number=98418603, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=116,
-  serialized_end=281,
-)
-
-
-_TESTMESSAGESETEXTENSION2 = _descriptor.Descriptor(
-  name='TestMessageSetExtension2',
-  full_name='google.protobuf.internal.TestMessageSetExtension2',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='str', full_name='google.protobuf.internal.TestMessageSetExtension2.str', index=0,
-      number=25, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='message_set_extension', full_name='google.protobuf.internal.TestMessageSetExtension2.message_set_extension', index=0,
-      number=98418634, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=284,
-  serialized_end=451,
-)
-
-
-_TESTMESSAGESETEXTENSION3 = _descriptor.Descriptor(
-  name='TestMessageSetExtension3',
-  full_name='google.protobuf.internal.TestMessageSetExtension3',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='text', full_name='google.protobuf.internal.TestMessageSetExtension3.text', index=0,
-      number=35, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=453,
-  serialized_end=493,
-)
-
-DESCRIPTOR.message_types_by_name['TestMessageSet'] = _TESTMESSAGESET
-DESCRIPTOR.message_types_by_name['TestMessageSetExtension1'] = _TESTMESSAGESETEXTENSION1
-DESCRIPTOR.message_types_by_name['TestMessageSetExtension2'] = _TESTMESSAGESETEXTENSION2
-DESCRIPTOR.message_types_by_name['TestMessageSetExtension3'] = _TESTMESSAGESETEXTENSION3
-DESCRIPTOR.extensions_by_name['message_set_extension3'] = message_set_extension3
-
-TestMessageSet = _reflection.GeneratedProtocolMessageType('TestMessageSet', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESET,
-  __module__ = 'google.protobuf.internal.message_set_extensions_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSet)
-  ))
-_sym_db.RegisterMessage(TestMessageSet)
-
-TestMessageSetExtension1 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension1', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESETEXTENSION1,
-  __module__ = 'google.protobuf.internal.message_set_extensions_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension1)
-  ))
-_sym_db.RegisterMessage(TestMessageSetExtension1)
-
-TestMessageSetExtension2 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension2', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESETEXTENSION2,
-  __module__ = 'google.protobuf.internal.message_set_extensions_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension2)
-  ))
-_sym_db.RegisterMessage(TestMessageSetExtension2)
-
-TestMessageSetExtension3 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension3', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESETEXTENSION3,
-  __module__ = 'google.protobuf.internal.message_set_extensions_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension3)
-  ))
-_sym_db.RegisterMessage(TestMessageSetExtension3)
-
-message_set_extension3.message_type = _TESTMESSAGESETEXTENSION3
-TestMessageSet.RegisterExtension(message_set_extension3)
-_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION1
-TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'])
-_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION2
-TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'])
-
-_TESTMESSAGESET.has_options = True
-_TESTMESSAGESET._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/missing_enum_values_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/missing_enum_values_pb2.py
deleted file mode 100644
index 4767f03..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/missing_enum_values_pb2.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/missing_enum_values.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/missing_enum_values.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-_TESTENUMVALUES_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.TestEnumValues.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='ZERO', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='ONE', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=378,
-  serialized_end=409,
-)
-_sym_db.RegisterEnumDescriptor(_TESTENUMVALUES_NESTEDENUM)
-
-_TESTMISSINGENUMVALUES_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.TestMissingEnumValues.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='TWO', index=0, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=730,
-  serialized_end=751,
-)
-_sym_db.RegisterEnumDescriptor(_TESTMISSINGENUMVALUES_NESTEDENUM)
-
-
-_TESTENUMVALUES = _descriptor.Descriptor(
-  name='TestEnumValues',
-  full_name='google.protobuf.python.internal.TestEnumValues',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.optional_nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.repeated_nested_enum', index=1,
-      number=2, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='packed_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.packed_nested_enum', index=2,
-      number=3, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _TESTENUMVALUES_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=88,
-  serialized_end=409,
-)
-
-
-_TESTMISSINGENUMVALUES = _descriptor.Descriptor(
-  name='TestMissingEnumValues',
-  full_name='google.protobuf.python.internal.TestMissingEnumValues',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.optional_nested_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=2,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.repeated_nested_enum', index=1,
-      number=2, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='packed_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.packed_nested_enum', index=2,
-      number=3, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _TESTMISSINGENUMVALUES_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=412,
-  serialized_end=751,
-)
-
-
-_JUSTSTRING = _descriptor.Descriptor(
-  name='JustString',
-  full_name='google.protobuf.python.internal.JustString',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='dummy', full_name='google.protobuf.python.internal.JustString.dummy', index=0,
-      number=1, type=9, cpp_type=9, label=2,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=753,
-  serialized_end=780,
-)
-
-_TESTENUMVALUES.fields_by_name['optional_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM
-_TESTENUMVALUES.fields_by_name['repeated_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM
-_TESTENUMVALUES.fields_by_name['packed_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM
-_TESTENUMVALUES_NESTEDENUM.containing_type = _TESTENUMVALUES
-_TESTMISSINGENUMVALUES.fields_by_name['optional_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM
-_TESTMISSINGENUMVALUES.fields_by_name['repeated_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM
-_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM
-_TESTMISSINGENUMVALUES_NESTEDENUM.containing_type = _TESTMISSINGENUMVALUES
-DESCRIPTOR.message_types_by_name['TestEnumValues'] = _TESTENUMVALUES
-DESCRIPTOR.message_types_by_name['TestMissingEnumValues'] = _TESTMISSINGENUMVALUES
-DESCRIPTOR.message_types_by_name['JustString'] = _JUSTSTRING
-
-TestEnumValues = _reflection.GeneratedProtocolMessageType('TestEnumValues', (_message.Message,), dict(
-  DESCRIPTOR = _TESTENUMVALUES,
-  __module__ = 'google.protobuf.internal.missing_enum_values_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestEnumValues)
-  ))
-_sym_db.RegisterMessage(TestEnumValues)
-
-TestMissingEnumValues = _reflection.GeneratedProtocolMessageType('TestMissingEnumValues', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMISSINGENUMVALUES,
-  __module__ = 'google.protobuf.internal.missing_enum_values_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestMissingEnumValues)
-  ))
-_sym_db.RegisterMessage(TestMissingEnumValues)
-
-JustString = _reflection.GeneratedProtocolMessageType('JustString', (_message.Message,), dict(
-  DESCRIPTOR = _JUSTSTRING,
-  __module__ = 'google.protobuf.internal.missing_enum_values_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.JustString)
-  ))
-_sym_db.RegisterMessage(JustString)
-
-
-_TESTENUMVALUES.fields_by_name['packed_nested_enum'].has_options = True
-_TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum'].has_options = True
-_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/more_extensions_dynamic_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/more_extensions_dynamic_pb2.py
deleted file mode 100644
index e147800..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/more_extensions_dynamic_pb2.py
+++ /dev/null
@@ -1,92 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/more_extensions_dynamic.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/more_extensions_dynamic.proto',
-  package='google.protobuf.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType')
-  ,
-  dependencies=[google_dot_protobuf_dot_internal_dot_more__extensions__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-DYNAMIC_INT32_EXTENSION_FIELD_NUMBER = 100
-dynamic_int32_extension = _descriptor.FieldDescriptor(
-  name='dynamic_int32_extension', full_name='google.protobuf.internal.dynamic_int32_extension', index=0,
-  number=100, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DYNAMIC_MESSAGE_EXTENSION_FIELD_NUMBER = 101
-dynamic_message_extension = _descriptor.FieldDescriptor(
-  name='dynamic_message_extension', full_name='google.protobuf.internal.dynamic_message_extension', index=1,
-  number=101, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_DYNAMICMESSAGETYPE = _descriptor.Descriptor(
-  name='DynamicMessageType',
-  full_name='google.protobuf.internal.DynamicMessageType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='google.protobuf.internal.DynamicMessageType.a', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=132,
-  serialized_end=163,
-)
-
-DESCRIPTOR.message_types_by_name['DynamicMessageType'] = _DYNAMICMESSAGETYPE
-DESCRIPTOR.extensions_by_name['dynamic_int32_extension'] = dynamic_int32_extension
-DESCRIPTOR.extensions_by_name['dynamic_message_extension'] = dynamic_message_extension
-
-DynamicMessageType = _reflection.GeneratedProtocolMessageType('DynamicMessageType', (_message.Message,), dict(
-  DESCRIPTOR = _DYNAMICMESSAGETYPE,
-  __module__ = 'google.protobuf.internal.more_extensions_dynamic_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.DynamicMessageType)
-  ))
-_sym_db.RegisterMessage(DynamicMessageType)
-
-google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension)
-dynamic_message_extension.message_type = _DYNAMICMESSAGETYPE
-google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension)
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/more_extensions_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/more_extensions_pb2.py
deleted file mode 100644
index c2a03aa..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/more_extensions_pb2.py
+++ /dev/null
@@ -1,183 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/more_extensions.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/more_extensions.proto',
-  package='google.protobuf.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"P\n\x0fTopLevelMessage\x12=\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessage\"\x1b\n\x0f\x45xtendedMessage*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-OPTIONAL_INT_EXTENSION_FIELD_NUMBER = 1
-optional_int_extension = _descriptor.FieldDescriptor(
-  name='optional_int_extension', full_name='google.protobuf.internal.optional_int_extension', index=0,
-  number=1, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_MESSAGE_EXTENSION_FIELD_NUMBER = 2
-optional_message_extension = _descriptor.FieldDescriptor(
-  name='optional_message_extension', full_name='google.protobuf.internal.optional_message_extension', index=1,
-  number=2, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_INT_EXTENSION_FIELD_NUMBER = 3
-repeated_int_extension = _descriptor.FieldDescriptor(
-  name='repeated_int_extension', full_name='google.protobuf.internal.repeated_int_extension', index=2,
-  number=3, type=5, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_MESSAGE_EXTENSION_FIELD_NUMBER = 4
-repeated_message_extension = _descriptor.FieldDescriptor(
-  name='repeated_message_extension', full_name='google.protobuf.internal.repeated_message_extension', index=3,
-  number=4, type=11, cpp_type=10, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_TOPLEVELMESSAGE = _descriptor.Descriptor(
-  name='TopLevelMessage',
-  full_name='google.protobuf.internal.TopLevelMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='submessage', full_name='google.protobuf.internal.TopLevelMessage.submessage', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=76,
-  serialized_end=156,
-)
-
-
-_EXTENDEDMESSAGE = _descriptor.Descriptor(
-  name='ExtendedMessage',
-  full_name='google.protobuf.internal.ExtendedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=158,
-  serialized_end=185,
-)
-
-
-_FOREIGNMESSAGE = _descriptor.Descriptor(
-  name='ForeignMessage',
-  full_name='google.protobuf.internal.ForeignMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foreign_message_int', full_name='google.protobuf.internal.ForeignMessage.foreign_message_int', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=187,
-  serialized_end=232,
-)
-
-_TOPLEVELMESSAGE.fields_by_name['submessage'].message_type = _EXTENDEDMESSAGE
-DESCRIPTOR.message_types_by_name['TopLevelMessage'] = _TOPLEVELMESSAGE
-DESCRIPTOR.message_types_by_name['ExtendedMessage'] = _EXTENDEDMESSAGE
-DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE
-DESCRIPTOR.extensions_by_name['optional_int_extension'] = optional_int_extension
-DESCRIPTOR.extensions_by_name['optional_message_extension'] = optional_message_extension
-DESCRIPTOR.extensions_by_name['repeated_int_extension'] = repeated_int_extension
-DESCRIPTOR.extensions_by_name['repeated_message_extension'] = repeated_message_extension
-
-TopLevelMessage = _reflection.GeneratedProtocolMessageType('TopLevelMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TOPLEVELMESSAGE,
-  __module__ = 'google.protobuf.internal.more_extensions_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.TopLevelMessage)
-  ))
-_sym_db.RegisterMessage(TopLevelMessage)
-
-ExtendedMessage = _reflection.GeneratedProtocolMessageType('ExtendedMessage', (_message.Message,), dict(
-  DESCRIPTOR = _EXTENDEDMESSAGE,
-  __module__ = 'google.protobuf.internal.more_extensions_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.ExtendedMessage)
-  ))
-_sym_db.RegisterMessage(ExtendedMessage)
-
-ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict(
-  DESCRIPTOR = _FOREIGNMESSAGE,
-  __module__ = 'google.protobuf.internal.more_extensions_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.ForeignMessage)
-  ))
-_sym_db.RegisterMessage(ForeignMessage)
-
-ExtendedMessage.RegisterExtension(optional_int_extension)
-optional_message_extension.message_type = _FOREIGNMESSAGE
-ExtendedMessage.RegisterExtension(optional_message_extension)
-ExtendedMessage.RegisterExtension(repeated_int_extension)
-repeated_message_extension.message_type = _FOREIGNMESSAGE
-ExtendedMessage.RegisterExtension(repeated_message_extension)
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/more_messages_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/more_messages_pb2.py
deleted file mode 100644
index 4ad9c9e..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/more_messages_pb2.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/more_messages.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/more_messages.proto',
-  package='google.protobuf.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-OPTIONAL_UINT64_FIELD_NUMBER = 4
-optional_uint64 = _descriptor.FieldDescriptor(
-  name='optional_uint64', full_name='google.protobuf.internal.optional_uint64', index=0,
-  number=4, type=4, cpp_type=4, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_INT64_FIELD_NUMBER = 2
-optional_int64 = _descriptor.FieldDescriptor(
-  name='optional_int64', full_name='google.protobuf.internal.optional_int64', index=1,
-  number=2, type=3, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_OUTOFORDERFIELDS = _descriptor.Descriptor(
-  name='OutOfOrderFields',
-  full_name='google.protobuf.internal.OutOfOrderFields',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_sint32', full_name='google.protobuf.internal.OutOfOrderFields.optional_sint32', index=0,
-      number=5, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_uint32', full_name='google.protobuf.internal.OutOfOrderFields.optional_uint32', index=1,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_int32', full_name='google.protobuf.internal.OutOfOrderFields.optional_int32', index=2,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(4, 5), (2, 3), ],
-  oneofs=[
-  ],
-  serialized_start=74,
-  serialized_end=178,
-)
-
-DESCRIPTOR.message_types_by_name['OutOfOrderFields'] = _OUTOFORDERFIELDS
-DESCRIPTOR.extensions_by_name['optional_uint64'] = optional_uint64
-DESCRIPTOR.extensions_by_name['optional_int64'] = optional_int64
-
-OutOfOrderFields = _reflection.GeneratedProtocolMessageType('OutOfOrderFields', (_message.Message,), dict(
-  DESCRIPTOR = _OUTOFORDERFIELDS,
-  __module__ = 'google.protobuf.internal.more_messages_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.internal.OutOfOrderFields)
-  ))
-_sym_db.RegisterMessage(OutOfOrderFields)
-
-OutOfOrderFields.RegisterExtension(optional_uint64)
-OutOfOrderFields.RegisterExtension(optional_int64)
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/packed_field_test_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/packed_field_test_pb2.py
deleted file mode 100644
index 77b6384..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/packed_field_test_pb2.py
+++ /dev/null
@@ -1,348 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/packed_field_test.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/packed_field_test.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto3',
-  serialized_pb=_b('\n0google/protobuf/internal/packed_field_test.proto\x12\x1fgoogle.protobuf.python.internal\"\xdb\x03\n\x0fTestPackedTypes\x12\x16\n\x0erepeated_int32\x18\x01 \x03(\x05\x12\x16\n\x0erepeated_int64\x18\x02 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18\x03 \x03(\r\x12\x17\n\x0frepeated_uint64\x18\x04 \x03(\x04\x12\x17\n\x0frepeated_sint32\x18\x05 \x03(\x11\x12\x17\n\x0frepeated_sint64\x18\x06 \x03(\x12\x12\x18\n\x10repeated_fixed32\x18\x07 \x03(\x07\x12\x18\n\x10repeated_fixed64\x18\x08 \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\t \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18\n \x03(\x10\x12\x16\n\x0erepeated_float\x18\x0b \x03(\x02\x12\x17\n\x0frepeated_double\x18\x0c \x03(\x01\x12\x15\n\rrepeated_bool\x18\r \x03(\x08\x12Y\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32;.google.protobuf.python.internal.TestPackedTypes.NestedEnum\"\'\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x12\x07\n\x03\x42\x41Z\x10\x02\"\xec\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0erepeated_int32\x18\x01 \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0erepeated_int64\x18\x02 \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0frepeated_uint32\x18\x03 \x03(\rB\x02\x10\x00\x12\x1b\n\x0frepeated_uint64\x18\x04 \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint32\x18\x05 \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint64\x18\x06 \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed32\x18\x07 \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed64\x18\x08 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed32\x18\t \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed64\x18\n \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0erepeated_float\x18\x0b \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0frepeated_double\x18\x0c \x03(\x01\x42\x02\x10\x00\x12\x19\n\rrepeated_bool\x18\r \x03(\x08\x42\x02\x10\x00\x12]\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32;.google.protobuf.python.internal.TestPackedTypes.NestedEnumB\x02\x10\x00\x62\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-_TESTPACKEDTYPES_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='google.protobuf.python.internal.TestPackedTypes.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAZ', index=2, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=522,
-  serialized_end=561,
-)
-_sym_db.RegisterEnumDescriptor(_TESTPACKEDTYPES_NESTEDENUM)
-
-
-_TESTPACKEDTYPES = _descriptor.Descriptor(
-  name='TestPackedTypes',
-  full_name='google.protobuf.python.internal.TestPackedTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='repeated_int32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_int32', index=0,
-      number=1, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_int64', index=1,
-      number=2, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_uint32', index=2,
-      number=3, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_uint64', index=3,
-      number=4, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sint32', index=4,
-      number=5, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sint64', index=5,
-      number=6, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_fixed32', index=6,
-      number=7, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_fixed64', index=7,
-      number=8, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sfixed32', index=8,
-      number=9, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sfixed64', index=9,
-      number=10, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_float', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_float', index=10,
-      number=11, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_double', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_double', index=11,
-      number=12, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_bool', index=12,
-      number=13, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_nested_enum', index=13,
-      number=14, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _TESTPACKEDTYPES_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=86,
-  serialized_end=561,
-)
-
-
-_TESTUNPACKEDTYPES = _descriptor.Descriptor(
-  name='TestUnpackedTypes',
-  full_name='google.protobuf.python.internal.TestUnpackedTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='repeated_int32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_int32', index=0,
-      number=1, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_int64', index=1,
-      number=2, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_uint32', index=2,
-      number=3, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_uint64', index=3,
-      number=4, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sint32', index=4,
-      number=5, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sint64', index=5,
-      number=6, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_fixed32', index=6,
-      number=7, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_fixed64', index=7,
-      number=8, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sfixed32', index=8,
-      number=9, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sfixed64', index=9,
-      number=10, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_float', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_float', index=10,
-      number=11, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_double', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_double', index=11,
-      number=12, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_bool', index=12,
-      number=13, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_nested_enum', index=13,
-      number=14, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=564,
-  serialized_end=1056,
-)
-
-_TESTPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTPACKEDTYPES_NESTEDENUM
-_TESTPACKEDTYPES_NESTEDENUM.containing_type = _TESTPACKEDTYPES
-_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTPACKEDTYPES_NESTEDENUM
-DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES
-DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES
-
-TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), dict(
-  DESCRIPTOR = _TESTPACKEDTYPES,
-  __module__ = 'google.protobuf.internal.packed_field_test_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestPackedTypes)
-  ))
-_sym_db.RegisterMessage(TestPackedTypes)
-
-TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), dict(
-  DESCRIPTOR = _TESTUNPACKEDTYPES,
-  __module__ = 'google.protobuf.internal.packed_field_test_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestUnpackedTypes)
-  ))
-_sym_db.RegisterMessage(TestUnpackedTypes)
-
-
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_float'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_double'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_bool'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/python_message.py b/tools/swarming_client/third_party/google/protobuf/internal/python_message.py
deleted file mode 100644
index c0d0ad4..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/python_message.py
+++ /dev/null
@@ -1,1550 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# This code is meant to work on Python 2.4 and above only.
-#
-# TODO(robinson): Helpers for verbose, common checks like seeing if a
-# descriptor's cpp_type is CPPTYPE_MESSAGE.
-
-"""Contains a metaclass and helper functions used to create
-protocol message classes from Descriptor objects at runtime.
-
-Recall that a metaclass is the "type" of a class.
-(A class is to a metaclass what an instance is to a class.)
-
-In this case, we use the GeneratedProtocolMessageType metaclass
-to inject all the useful functionality into the classes
-output by the protocol compiler at compile-time.
-
-The upshot of all this is that the real implementation
-details for ALL pure-Python protocol buffers are *here in
-this file*.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-from io import BytesIO
-import sys
-import struct
-import weakref
-
-import six
-try:
-  import six.moves.copyreg as copyreg
-except ImportError:
-  # On some platforms, for example gMac, we run native Python because there is
-  # nothing like hermetic Python. This means lesser control on the system and
-  # the six.moves package may be missing (is missing on 20150321 on gMac). Be
-  # extra conservative and try to load the old replacement if it fails.
-  import copy_reg as copyreg
-
-# We use "as" to avoid name collisions with variables.
-from google.protobuf.internal import containers
-from google.protobuf.internal import decoder
-from google.protobuf.internal import encoder
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf.internal import message_listener as message_listener_mod
-from google.protobuf.internal import type_checkers
-from google.protobuf.internal import well_known_types
-from google.protobuf.internal import wire_format
-from google.protobuf import descriptor as descriptor_mod
-from google.protobuf import message as message_mod
-from google.protobuf import text_format
-
-_FieldDescriptor = descriptor_mod.FieldDescriptor
-_AnyFullTypeName = 'google.protobuf.Any'
-
-
-class GeneratedProtocolMessageType(type):
-
-  """Metaclass for protocol message classes created at runtime from Descriptors.
-
-  We add implementations for all methods described in the Message class.  We
-  also create properties to allow getting/setting all fields in the protocol
-  message.  Finally, we create slots to prevent users from accidentally
-  "setting" nonexistent fields in the protocol message, which then wouldn't get
-  serialized / deserialized properly.
-
-  The protocol compiler currently uses this metaclass to create protocol
-  message classes at runtime.  Clients can also manually create their own
-  classes at runtime, as in this example:
-
-  mydescriptor = Descriptor(.....)
-  factory = symbol_database.Default()
-  factory.pool.AddDescriptor(mydescriptor)
-  MyProtoClass = factory.GetPrototype(mydescriptor)
-  myproto_instance = MyProtoClass()
-  myproto.foo_field = 23
-  ...
-  """
-
-  # Must be consistent with the protocol-compiler code in
-  # proto2/compiler/internal/generator.*.
-  _DESCRIPTOR_KEY = 'DESCRIPTOR'
-
-  def __new__(cls, name, bases, dictionary):
-    """Custom allocation for runtime-generated class types.
-
-    We override __new__ because this is apparently the only place
-    where we can meaningfully set __slots__ on the class we're creating(?).
-    (The interplay between metaclasses and slots is not very well-documented).
-
-    Args:
-      name: Name of the class (ignored, but required by the
-        metaclass protocol).
-      bases: Base classes of the class we're constructing.
-        (Should be message.Message).  We ignore this field, but
-        it's required by the metaclass protocol
-      dictionary: The class dictionary of the class we're
-        constructing.  dictionary[_DESCRIPTOR_KEY] must contain
-        a Descriptor object describing this protocol message
-        type.
-
-    Returns:
-      Newly-allocated class.
-    """
-    descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
-    if descriptor.full_name in well_known_types.WKTBASES:
-      bases += (well_known_types.WKTBASES[descriptor.full_name],)
-    _AddClassAttributesForNestedExtensions(descriptor, dictionary)
-    _AddSlots(descriptor, dictionary)
-
-    superclass = super(GeneratedProtocolMessageType, cls)
-    new_class = superclass.__new__(cls, name, bases, dictionary)
-    return new_class
-
-  def __init__(cls, name, bases, dictionary):
-    """Here we perform the majority of our work on the class.
-    We add enum getters, an __init__ method, implementations
-    of all Message methods, and properties for all fields
-    in the protocol type.
-
-    Args:
-      name: Name of the class (ignored, but required by the
-        metaclass protocol).
-      bases: Base classes of the class we're constructing.
-        (Should be message.Message).  We ignore this field, but
-        it's required by the metaclass protocol
-      dictionary: The class dictionary of the class we're
-        constructing.  dictionary[_DESCRIPTOR_KEY] must contain
-        a Descriptor object describing this protocol message
-        type.
-    """
-    descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
-    cls._decoders_by_tag = {}
-    cls._extensions_by_name = {}
-    cls._extensions_by_number = {}
-    if (descriptor.has_options and
-        descriptor.GetOptions().message_set_wire_format):
-      cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
-          decoder.MessageSetItemDecoder(cls._extensions_by_number), None)
-
-    # Attach stuff to each FieldDescriptor for quick lookup later on.
-    for field in descriptor.fields:
-      _AttachFieldHelpers(cls, field)
-
-    descriptor._concrete_class = cls  # pylint: disable=protected-access
-    _AddEnumValues(descriptor, cls)
-    _AddInitMethod(descriptor, cls)
-    _AddPropertiesForFields(descriptor, cls)
-    _AddPropertiesForExtensions(descriptor, cls)
-    _AddStaticMethods(cls)
-    _AddMessageMethods(descriptor, cls)
-    _AddPrivateHelperMethods(descriptor, cls)
-    copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
-
-    superclass = super(GeneratedProtocolMessageType, cls)
-    superclass.__init__(name, bases, dictionary)
-
-
-# Stateless helpers for GeneratedProtocolMessageType below.
-# Outside clients should not access these directly.
-#
-# I opted not to make any of these methods on the metaclass, to make it more
-# clear that I'm not really using any state there and to keep clients from
-# thinking that they have direct access to these construction helpers.
-
-
-def _PropertyName(proto_field_name):
-  """Returns the name of the public property attribute which
-  clients can use to get and (in some cases) set the value
-  of a protocol message field.
-
-  Args:
-    proto_field_name: The protocol message field name, exactly
-      as it appears (or would appear) in a .proto file.
-  """
-  # TODO(robinson): Escape Python keywords (e.g., yield), and test this support.
-  # nnorwitz makes my day by writing:
-  # """
-  # FYI.  See the keyword module in the stdlib. This could be as simple as:
-  #
-  # if keyword.iskeyword(proto_field_name):
-  #   return proto_field_name + "_"
-  # return proto_field_name
-  # """
-  # Kenton says:  The above is a BAD IDEA.  People rely on being able to use
-  #   getattr() and setattr() to reflectively manipulate field values.  If we
-  #   rename the properties, then every such user has to also make sure to apply
-  #   the same transformation.  Note that currently if you name a field "yield",
-  #   you can still access it just fine using getattr/setattr -- it's not even
-  #   that cumbersome to do so.
-  # TODO(kenton):  Remove this method entirely if/when everyone agrees with my
-  #   position.
-  return proto_field_name
-
-
-def _VerifyExtensionHandle(message, extension_handle):
-  """Verify that the given extension handle is valid."""
-
-  if not isinstance(extension_handle, _FieldDescriptor):
-    raise KeyError('HasExtension() expects an extension handle, got: %s' %
-                   extension_handle)
-
-  if not extension_handle.is_extension:
-    raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
-
-  if not extension_handle.containing_type:
-    raise KeyError('"%s" is missing a containing_type.'
-                   % extension_handle.full_name)
-
-  if extension_handle.containing_type is not message.DESCRIPTOR:
-    raise KeyError('Extension "%s" extends message type "%s", but this '
-                   'message is of type "%s".' %
-                   (extension_handle.full_name,
-                    extension_handle.containing_type.full_name,
-                    message.DESCRIPTOR.full_name))
-
-
-def _AddSlots(message_descriptor, dictionary):
-  """Adds a __slots__ entry to dictionary, containing the names of all valid
-  attributes for this message type.
-
-  Args:
-    message_descriptor: A Descriptor instance describing this message type.
-    dictionary: Class dictionary to which we'll add a '__slots__' entry.
-  """
-  dictionary['__slots__'] = ['_cached_byte_size',
-                             '_cached_byte_size_dirty',
-                             '_fields',
-                             '_unknown_fields',
-                             '_is_present_in_parent',
-                             '_listener',
-                             '_listener_for_children',
-                             '__weakref__',
-                             '_oneofs']
-
-
-def _IsMessageSetExtension(field):
-  return (field.is_extension and
-          field.containing_type.has_options and
-          field.containing_type.GetOptions().message_set_wire_format and
-          field.type == _FieldDescriptor.TYPE_MESSAGE and
-          field.label == _FieldDescriptor.LABEL_OPTIONAL)
-
-
-def _IsMapField(field):
-  return (field.type == _FieldDescriptor.TYPE_MESSAGE and
-          field.message_type.has_options and
-          field.message_type.GetOptions().map_entry)
-
-
-def _IsMessageMapField(field):
-  value_type = field.message_type.fields_by_name["value"]
-  return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
-
-
-def _AttachFieldHelpers(cls, field_descriptor):
-  is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
-  is_packable = (is_repeated and
-                 wire_format.IsTypePackable(field_descriptor.type))
-  if not is_packable:
-    is_packed = False
-  elif field_descriptor.containing_type.syntax == "proto2":
-    is_packed = (field_descriptor.has_options and
-                field_descriptor.GetOptions().packed)
-  else:
-    has_packed_false = (field_descriptor.has_options and
-                        field_descriptor.GetOptions().HasField("packed") and
-                        field_descriptor.GetOptions().packed == False)
-    is_packed = not has_packed_false
-  is_map_entry = _IsMapField(field_descriptor)
-
-  if is_map_entry:
-    field_encoder = encoder.MapEncoder(field_descriptor)
-    sizer = encoder.MapSizer(field_descriptor)
-  elif _IsMessageSetExtension(field_descriptor):
-    field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
-    sizer = encoder.MessageSetItemSizer(field_descriptor.number)
-  else:
-    field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
-        field_descriptor.number, is_repeated, is_packed)
-    sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
-        field_descriptor.number, is_repeated, is_packed)
-
-  field_descriptor._encoder = field_encoder
-  field_descriptor._sizer = sizer
-  field_descriptor._default_constructor = _DefaultValueConstructorForField(
-      field_descriptor)
-
-  def AddDecoder(wiretype, is_packed):
-    tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
-    decode_type = field_descriptor.type
-    if (decode_type == _FieldDescriptor.TYPE_ENUM and
-        type_checkers.SupportsOpenEnums(field_descriptor)):
-      decode_type = _FieldDescriptor.TYPE_INT32
-
-    oneof_descriptor = None
-    if field_descriptor.containing_oneof is not None:
-      oneof_descriptor = field_descriptor
-
-    if is_map_entry:
-      is_message_map = _IsMessageMapField(field_descriptor)
-
-      field_decoder = decoder.MapDecoder(
-          field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
-          is_message_map)
-    else:
-      field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
-              field_descriptor.number, is_repeated, is_packed,
-              field_descriptor, field_descriptor._default_constructor)
-
-    cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor)
-
-  AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type],
-             False)
-
-  if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
-    # To support wire compatibility of adding packed = true, add a decoder for
-    # packed values regardless of the field's options.
-    AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
-
-
-def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
-  extension_dict = descriptor.extensions_by_name
-  for extension_name, extension_field in extension_dict.items():
-    assert extension_name not in dictionary
-    dictionary[extension_name] = extension_field
-
-
-def _AddEnumValues(descriptor, cls):
-  """Sets class-level attributes for all enum fields defined in this message.
-
-  Also exporting a class-level object that can name enum values.
-
-  Args:
-    descriptor: Descriptor object for this message type.
-    cls: Class we're constructing for this message type.
-  """
-  for enum_type in descriptor.enum_types:
-    setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
-    for enum_value in enum_type.values:
-      setattr(cls, enum_value.name, enum_value.number)
-
-
-def _GetInitializeDefaultForMap(field):
-  if field.label != _FieldDescriptor.LABEL_REPEATED:
-    raise ValueError('map_entry set on non-repeated field %s' % (
-        field.name))
-  fields_by_name = field.message_type.fields_by_name
-  key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
-
-  value_field = fields_by_name['value']
-  if _IsMessageMapField(field):
-    def MakeMessageMapDefault(message):
-      return containers.MessageMap(
-          message._listener_for_children, value_field.message_type, key_checker)
-    return MakeMessageMapDefault
-  else:
-    value_checker = type_checkers.GetTypeChecker(value_field)
-    def MakePrimitiveMapDefault(message):
-      return containers.ScalarMap(
-          message._listener_for_children, key_checker, value_checker)
-    return MakePrimitiveMapDefault
-
-def _DefaultValueConstructorForField(field):
-  """Returns a function which returns a default value for a field.
-
-  Args:
-    field: FieldDescriptor object for this field.
-
-  The returned function has one argument:
-    message: Message instance containing this field, or a weakref proxy
-      of same.
-
-  That function in turn returns a default value for this field.  The default
-    value may refer back to |message| via a weak reference.
-  """
-
-  if _IsMapField(field):
-    return _GetInitializeDefaultForMap(field)
-
-  if field.label == _FieldDescriptor.LABEL_REPEATED:
-    if field.has_default_value and field.default_value != []:
-      raise ValueError('Repeated field default value not empty list: %s' % (
-          field.default_value))
-    if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-      # We can't look at _concrete_class yet since it might not have
-      # been set.  (Depends on order in which we initialize the classes).
-      message_type = field.message_type
-      def MakeRepeatedMessageDefault(message):
-        return containers.RepeatedCompositeFieldContainer(
-            message._listener_for_children, field.message_type)
-      return MakeRepeatedMessageDefault
-    else:
-      type_checker = type_checkers.GetTypeChecker(field)
-      def MakeRepeatedScalarDefault(message):
-        return containers.RepeatedScalarFieldContainer(
-            message._listener_for_children, type_checker)
-      return MakeRepeatedScalarDefault
-
-  if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-    # _concrete_class may not yet be initialized.
-    message_type = field.message_type
-    def MakeSubMessageDefault(message):
-      result = message_type._concrete_class()
-      result._SetListener(
-          _OneofListener(message, field)
-          if field.containing_oneof is not None
-          else message._listener_for_children)
-      return result
-    return MakeSubMessageDefault
-
-  def MakeScalarDefault(message):
-    # TODO(protobuf-team): This may be broken since there may not be
-    # default_value.  Combine with has_default_value somehow.
-    return field.default_value
-  return MakeScalarDefault
-
-
-def _ReraiseTypeErrorWithFieldName(message_name, field_name):
-  """Re-raise the currently-handled TypeError with the field name added."""
-  exc = sys.exc_info()[1]
-  if len(exc.args) == 1 and type(exc) is TypeError:
-    # simple TypeError; add field name to exception message
-    exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
-
-  # re-raise possibly-amended exception with original traceback:
-  six.reraise(type(exc), exc, sys.exc_info()[2])
-
-
-def _AddInitMethod(message_descriptor, cls):
-  """Adds an __init__ method to cls."""
-
-  def _GetIntegerEnumValue(enum_type, value):
-    """Convert a string or integer enum value to an integer.
-
-    If the value is a string, it is converted to the enum value in
-    enum_type with the same name.  If the value is not a string, it's
-    returned as-is.  (No conversion or bounds-checking is done.)
-    """
-    if isinstance(value, six.string_types):
-      try:
-        return enum_type.values_by_name[value].number
-      except KeyError:
-        raise ValueError('Enum type %s: unknown label "%s"' % (
-            enum_type.full_name, value))
-    return value
-
-  def init(self, **kwargs):
-    self._cached_byte_size = 0
-    self._cached_byte_size_dirty = len(kwargs) > 0
-    self._fields = {}
-    # Contains a mapping from oneof field descriptors to the descriptor
-    # of the currently set field in that oneof field.
-    self._oneofs = {}
-
-    # _unknown_fields is () when empty for efficiency, and will be turned into
-    # a list if fields are added.
-    self._unknown_fields = ()
-    self._is_present_in_parent = False
-    self._listener = message_listener_mod.NullMessageListener()
-    self._listener_for_children = _Listener(self)
-    for field_name, field_value in kwargs.items():
-      field = _GetFieldByName(message_descriptor, field_name)
-      if field is None:
-        raise TypeError("%s() got an unexpected keyword argument '%s'" %
-                        (message_descriptor.name, field_name))
-      if field_value is None:
-        # field=None is the same as no field at all.
-        continue
-      if field.label == _FieldDescriptor.LABEL_REPEATED:
-        copy = field._default_constructor(self)
-        if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:  # Composite
-          if _IsMapField(field):
-            if _IsMessageMapField(field):
-              for key in field_value:
-                copy[key].MergeFrom(field_value[key])
-            else:
-              copy.update(field_value)
-          else:
-            for val in field_value:
-              if isinstance(val, dict):
-                copy.add(**val)
-              else:
-                copy.add().MergeFrom(val)
-        else:  # Scalar
-          if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
-            field_value = [_GetIntegerEnumValue(field.enum_type, val)
-                           for val in field_value]
-          copy.extend(field_value)
-        self._fields[field] = copy
-      elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-        copy = field._default_constructor(self)
-        new_val = field_value
-        if isinstance(field_value, dict):
-          new_val = field.message_type._concrete_class(**field_value)
-        try:
-          copy.MergeFrom(new_val)
-        except TypeError:
-          _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
-        self._fields[field] = copy
-      else:
-        if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
-          field_value = _GetIntegerEnumValue(field.enum_type, field_value)
-        try:
-          setattr(self, field_name, field_value)
-        except TypeError:
-          _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
-
-  init.__module__ = None
-  init.__doc__ = None
-  cls.__init__ = init
-
-
-def _GetFieldByName(message_descriptor, field_name):
-  """Returns a field descriptor by field name.
-
-  Args:
-    message_descriptor: A Descriptor describing all fields in message.
-    field_name: The name of the field to retrieve.
-  Returns:
-    The field descriptor associated with the field name.
-  """
-  try:
-    return message_descriptor.fields_by_name[field_name]
-  except KeyError:
-    raise ValueError('Protocol message %s has no "%s" field.' %
-                     (message_descriptor.name, field_name))
-
-
-def _AddPropertiesForFields(descriptor, cls):
-  """Adds properties for all fields in this protocol message type."""
-  for field in descriptor.fields:
-    _AddPropertiesForField(field, cls)
-
-  if descriptor.is_extendable:
-    # _ExtensionDict is just an adaptor with no state so we allocate a new one
-    # every time it is accessed.
-    cls.Extensions = property(lambda self: _ExtensionDict(self))
-
-
-def _AddPropertiesForField(field, cls):
-  """Adds a public property for a protocol message field.
-  Clients can use this property to get and (in the case
-  of non-repeated scalar fields) directly set the value
-  of a protocol message field.
-
-  Args:
-    field: A FieldDescriptor for this field.
-    cls: The class we're constructing.
-  """
-  # Catch it if we add other types that we should
-  # handle specially here.
-  assert _FieldDescriptor.MAX_CPPTYPE == 10
-
-  constant_name = field.name.upper() + "_FIELD_NUMBER"
-  setattr(cls, constant_name, field.number)
-
-  if field.label == _FieldDescriptor.LABEL_REPEATED:
-    _AddPropertiesForRepeatedField(field, cls)
-  elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-    _AddPropertiesForNonRepeatedCompositeField(field, cls)
-  else:
-    _AddPropertiesForNonRepeatedScalarField(field, cls)
-
-
-def _AddPropertiesForRepeatedField(field, cls):
-  """Adds a public property for a "repeated" protocol message field.  Clients
-  can use this property to get the value of the field, which will be either a
-  _RepeatedScalarFieldContainer or _RepeatedCompositeFieldContainer (see
-  below).
-
-  Note that when clients add values to these containers, we perform
-  type-checking in the case of repeated scalar fields, and we also set any
-  necessary "has" bits as a side-effect.
-
-  Args:
-    field: A FieldDescriptor for this field.
-    cls: The class we're constructing.
-  """
-  proto_field_name = field.name
-  property_name = _PropertyName(proto_field_name)
-
-  def getter(self):
-    field_value = self._fields.get(field)
-    if field_value is None:
-      # Construct a new object to represent this field.
-      field_value = field._default_constructor(self)
-
-      # Atomically check if another thread has preempted us and, if not, swap
-      # in the new object we just created.  If someone has preempted us, we
-      # take that object and discard ours.
-      # WARNING:  We are relying on setdefault() being atomic.  This is true
-      #   in CPython but we haven't investigated others.  This warning appears
-      #   in several other locations in this file.
-      field_value = self._fields.setdefault(field, field_value)
-    return field_value
-  getter.__module__ = None
-  getter.__doc__ = 'Getter for %s.' % proto_field_name
-
-  # We define a setter just so we can throw an exception with a more
-  # helpful error message.
-  def setter(self, new_value):
-    raise AttributeError('Assignment not allowed to repeated field '
-                         '"%s" in protocol message object.' % proto_field_name)
-
-  doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
-  setattr(cls, property_name, property(getter, setter, doc=doc))
-
-
-def _AddPropertiesForNonRepeatedScalarField(field, cls):
-  """Adds a public property for a nonrepeated, scalar protocol message field.
-  Clients can use this property to get and directly set the value of the field.
-  Note that when the client sets the value of a field by using this property,
-  all necessary "has" bits are set as a side-effect, and we also perform
-  type-checking.
-
-  Args:
-    field: A FieldDescriptor for this field.
-    cls: The class we're constructing.
-  """
-  proto_field_name = field.name
-  property_name = _PropertyName(proto_field_name)
-  type_checker = type_checkers.GetTypeChecker(field)
-  default_value = field.default_value
-  valid_values = set()
-  is_proto3 = field.containing_type.syntax == "proto3"
-
-  def getter(self):
-    # TODO(protobuf-team): This may be broken since there may not be
-    # default_value.  Combine with has_default_value somehow.
-    return self._fields.get(field, default_value)
-  getter.__module__ = None
-  getter.__doc__ = 'Getter for %s.' % proto_field_name
-
-  clear_when_set_to_default = is_proto3 and not field.containing_oneof
-
-  def field_setter(self, new_value):
-    # pylint: disable=protected-access
-    # Testing the value for truthiness captures all of the proto3 defaults
-    # (0, 0.0, enum 0, and False).
-    new_value = type_checker.CheckValue(new_value)
-    if clear_when_set_to_default and not new_value:
-      self._fields.pop(field, None)
-    else:
-      self._fields[field] = new_value
-    # Check _cached_byte_size_dirty inline to improve performance, since scalar
-    # setters are called frequently.
-    if not self._cached_byte_size_dirty:
-      self._Modified()
-
-  if field.containing_oneof:
-    def setter(self, new_value):
-      field_setter(self, new_value)
-      self._UpdateOneofState(field)
-  else:
-    setter = field_setter
-
-  setter.__module__ = None
-  setter.__doc__ = 'Setter for %s.' % proto_field_name
-
-  # Add a property to encapsulate the getter/setter.
-  doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
-  setattr(cls, property_name, property(getter, setter, doc=doc))
-
-
-def _AddPropertiesForNonRepeatedCompositeField(field, cls):
-  """Adds a public property for a nonrepeated, composite protocol message field.
-  A composite field is a "group" or "message" field.
-
-  Clients can use this property to get the value of the field, but cannot
-  assign to the property directly.
-
-  Args:
-    field: A FieldDescriptor for this field.
-    cls: The class we're constructing.
-  """
-  # TODO(robinson): Remove duplication with similar method
-  # for non-repeated scalars.
-  proto_field_name = field.name
-  property_name = _PropertyName(proto_field_name)
-
-  def getter(self):
-    field_value = self._fields.get(field)
-    if field_value is None:
-      # Construct a new object to represent this field.
-      field_value = field._default_constructor(self)
-
-      # Atomically check if another thread has preempted us and, if not, swap
-      # in the new object we just created.  If someone has preempted us, we
-      # take that object and discard ours.
-      # WARNING:  We are relying on setdefault() being atomic.  This is true
-      #   in CPython but we haven't investigated others.  This warning appears
-      #   in several other locations in this file.
-      field_value = self._fields.setdefault(field, field_value)
-    return field_value
-  getter.__module__ = None
-  getter.__doc__ = 'Getter for %s.' % proto_field_name
-
-  # We define a setter just so we can throw an exception with a more
-  # helpful error message.
-  def setter(self, new_value):
-    raise AttributeError('Assignment not allowed to composite field '
-                         '"%s" in protocol message object.' % proto_field_name)
-
-  # Add a property to encapsulate the getter.
-  doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
-  setattr(cls, property_name, property(getter, setter, doc=doc))
-
-
-def _AddPropertiesForExtensions(descriptor, cls):
-  """Adds properties for all fields in this protocol message type."""
-  extension_dict = descriptor.extensions_by_name
-  for extension_name, extension_field in extension_dict.items():
-    constant_name = extension_name.upper() + "_FIELD_NUMBER"
-    setattr(cls, constant_name, extension_field.number)
-
-
-def _AddStaticMethods(cls):
-  # TODO(robinson): This probably needs to be thread-safe(?)
-  def RegisterExtension(extension_handle):
-    extension_handle.containing_type = cls.DESCRIPTOR
-    _AttachFieldHelpers(cls, extension_handle)
-
-    # Try to insert our extension, failing if an extension with the same number
-    # already exists.
-    actual_handle = cls._extensions_by_number.setdefault(
-        extension_handle.number, extension_handle)
-    if actual_handle is not extension_handle:
-      raise AssertionError(
-          'Extensions "%s" and "%s" both try to extend message type "%s" with '
-          'field number %d.' %
-          (extension_handle.full_name, actual_handle.full_name,
-           cls.DESCRIPTOR.full_name, extension_handle.number))
-
-    cls._extensions_by_name[extension_handle.full_name] = extension_handle
-
-    handle = extension_handle  # avoid line wrapping
-    if _IsMessageSetExtension(handle):
-      # MessageSet extension.  Also register under type name.
-      cls._extensions_by_name[
-          extension_handle.message_type.full_name] = extension_handle
-
-  cls.RegisterExtension = staticmethod(RegisterExtension)
-
-  def FromString(s):
-    message = cls()
-    message.MergeFromString(s)
-    return message
-  cls.FromString = staticmethod(FromString)
-
-
-def _IsPresent(item):
-  """Given a (FieldDescriptor, value) tuple from _fields, return true if the
-  value should be included in the list returned by ListFields()."""
-
-  if item[0].label == _FieldDescriptor.LABEL_REPEATED:
-    return bool(item[1])
-  elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-    return item[1]._is_present_in_parent
-  else:
-    return True
-
-
-def _AddListFieldsMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-
-  def ListFields(self):
-    all_fields = [item for item in self._fields.items() if _IsPresent(item)]
-    all_fields.sort(key = lambda item: item[0].number)
-    return all_fields
-
-  cls.ListFields = ListFields
-
-_Proto3HasError = 'Protocol message has no non-repeated submessage field "%s"'
-_Proto2HasError = 'Protocol message has no non-repeated field "%s"'
-
-def _AddHasFieldMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-
-  is_proto3 = (message_descriptor.syntax == "proto3")
-  error_msg = _Proto3HasError if is_proto3 else _Proto2HasError
-
-  hassable_fields = {}
-  for field in message_descriptor.fields:
-    if field.label == _FieldDescriptor.LABEL_REPEATED:
-      continue
-    # For proto3, only submessages and fields inside a oneof have presence.
-    if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and
-        not field.containing_oneof):
-      continue
-    hassable_fields[field.name] = field
-
-  if not is_proto3:
-    # Fields inside oneofs are never repeated (enforced by the compiler).
-    for oneof in message_descriptor.oneofs:
-      hassable_fields[oneof.name] = oneof
-
-  def HasField(self, field_name):
-    try:
-      field = hassable_fields[field_name]
-    except KeyError:
-      raise ValueError(error_msg % field_name)
-
-    if isinstance(field, descriptor_mod.OneofDescriptor):
-      try:
-        return HasField(self, self._oneofs[field].name)
-      except KeyError:
-        return False
-    else:
-      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-        value = self._fields.get(field)
-        return value is not None and value._is_present_in_parent
-      else:
-        return field in self._fields
-
-  cls.HasField = HasField
-
-
-def _AddClearFieldMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-  def ClearField(self, field_name):
-    try:
-      field = message_descriptor.fields_by_name[field_name]
-    except KeyError:
-      try:
-        field = message_descriptor.oneofs_by_name[field_name]
-        if field in self._oneofs:
-          field = self._oneofs[field]
-        else:
-          return
-      except KeyError:
-        raise ValueError('Protocol message %s() has no "%s" field.' %
-                         (message_descriptor.name, field_name))
-
-    if field in self._fields:
-      # To match the C++ implementation, we need to invalidate iterators
-      # for map fields when ClearField() happens.
-      if hasattr(self._fields[field], 'InvalidateIterators'):
-        self._fields[field].InvalidateIterators()
-
-      # Note:  If the field is a sub-message, its listener will still point
-      #   at us.  That's fine, because the worst than can happen is that it
-      #   will call _Modified() and invalidate our byte size.  Big deal.
-      del self._fields[field]
-
-      if self._oneofs.get(field.containing_oneof, None) is field:
-        del self._oneofs[field.containing_oneof]
-
-    # Always call _Modified() -- even if nothing was changed, this is
-    # a mutating method, and thus calling it should cause the field to become
-    # present in the parent message.
-    self._Modified()
-
-  cls.ClearField = ClearField
-
-
-def _AddClearExtensionMethod(cls):
-  """Helper for _AddMessageMethods()."""
-  def ClearExtension(self, extension_handle):
-    _VerifyExtensionHandle(self, extension_handle)
-
-    # Similar to ClearField(), above.
-    if extension_handle in self._fields:
-      del self._fields[extension_handle]
-    self._Modified()
-  cls.ClearExtension = ClearExtension
-
-
-def _AddHasExtensionMethod(cls):
-  """Helper for _AddMessageMethods()."""
-  def HasExtension(self, extension_handle):
-    _VerifyExtensionHandle(self, extension_handle)
-    if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
-      raise KeyError('"%s" is repeated.' % extension_handle.full_name)
-
-    if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-      value = self._fields.get(extension_handle)
-      return value is not None and value._is_present_in_parent
-    else:
-      return extension_handle in self._fields
-  cls.HasExtension = HasExtension
-
-def _InternalUnpackAny(msg):
-  """Unpacks Any message and returns the unpacked message.
-
-  This internal method is differnt from public Any Unpack method which takes
-  the target message as argument. _InternalUnpackAny method does not have
-  target message type and need to find the message type in descriptor pool.
-
-  Args:
-    msg: An Any message to be unpacked.
-
-  Returns:
-    The unpacked message.
-  """
-  # TODO(amauryfa): Don't use the factory of generated messages.
-  # To make Any work with custom factories, use the message factory of the
-  # parent message.
-  # pylint: disable=g-import-not-at-top
-  from google.protobuf import symbol_database
-  factory = symbol_database.Default()
-
-  type_url = msg.type_url
-
-  if not type_url:
-    return None
-
-  # TODO(haberman): For now we just strip the hostname.  Better logic will be
-  # required.
-  type_name = type_url.split('/')[-1]
-  descriptor = factory.pool.FindMessageTypeByName(type_name)
-
-  if descriptor is None:
-    return None
-
-  message_class = factory.GetPrototype(descriptor)
-  message = message_class()
-
-  message.ParseFromString(msg.value)
-  return message
-
-
-def _AddEqualsMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-  def __eq__(self, other):
-    if (not isinstance(other, message_mod.Message) or
-        other.DESCRIPTOR != self.DESCRIPTOR):
-      return False
-
-    if self is other:
-      return True
-
-    if self.DESCRIPTOR.full_name == _AnyFullTypeName:
-      any_a = _InternalUnpackAny(self)
-      any_b = _InternalUnpackAny(other)
-      if any_a and any_b:
-        return any_a == any_b
-
-    if not self.ListFields() == other.ListFields():
-      return False
-
-    # Sort unknown fields because their order shouldn't affect equality test.
-    unknown_fields = list(self._unknown_fields)
-    unknown_fields.sort()
-    other_unknown_fields = list(other._unknown_fields)
-    other_unknown_fields.sort()
-
-    return unknown_fields == other_unknown_fields
-
-  cls.__eq__ = __eq__
-
-
-def _AddStrMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-  def __str__(self):
-    return text_format.MessageToString(self)
-  cls.__str__ = __str__
-
-
-def _AddReprMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-  def __repr__(self):
-    return text_format.MessageToString(self)
-  cls.__repr__ = __repr__
-
-
-def _AddUnicodeMethod(unused_message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-
-  def __unicode__(self):
-    return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
-  cls.__unicode__ = __unicode__
-
-
-def _BytesForNonRepeatedElement(value, field_number, field_type):
-  """Returns the number of bytes needed to serialize a non-repeated element.
-  The returned byte count includes space for tag information and any
-  other additional space associated with serializing value.
-
-  Args:
-    value: Value we're serializing.
-    field_number: Field number of this value.  (Since the field number
-      is stored as part of a varint-encoded tag, this has an impact
-      on the total bytes required to serialize the value).
-    field_type: The type of the field.  One of the TYPE_* constants
-      within FieldDescriptor.
-  """
-  try:
-    fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
-    return fn(field_number, value)
-  except KeyError:
-    raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
-
-
-def _AddByteSizeMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-
-  def ByteSize(self):
-    if not self._cached_byte_size_dirty:
-      return self._cached_byte_size
-
-    size = 0
-    for field_descriptor, field_value in self.ListFields():
-      size += field_descriptor._sizer(field_value)
-
-    for tag_bytes, value_bytes in self._unknown_fields:
-      size += len(tag_bytes) + len(value_bytes)
-
-    self._cached_byte_size = size
-    self._cached_byte_size_dirty = False
-    self._listener_for_children.dirty = False
-    return size
-
-  cls.ByteSize = ByteSize
-
-
-def _AddSerializeToStringMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-
-  def SerializeToString(self):
-    # Check if the message has all of its required fields set.
-    errors = []
-    if not self.IsInitialized():
-      raise message_mod.EncodeError(
-          'Message %s is missing required fields: %s' % (
-          self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
-    return self.SerializePartialToString()
-  cls.SerializeToString = SerializeToString
-
-
-def _AddSerializePartialToStringMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-
-  def SerializePartialToString(self):
-    out = BytesIO()
-    self._InternalSerialize(out.write)
-    return out.getvalue()
-  cls.SerializePartialToString = SerializePartialToString
-
-  def InternalSerialize(self, write_bytes):
-    for field_descriptor, field_value in self.ListFields():
-      field_descriptor._encoder(write_bytes, field_value)
-    for tag_bytes, value_bytes in self._unknown_fields:
-      write_bytes(tag_bytes)
-      write_bytes(value_bytes)
-  cls._InternalSerialize = InternalSerialize
-
-
-def _AddMergeFromStringMethod(message_descriptor, cls):
-  """Helper for _AddMessageMethods()."""
-  def MergeFromString(self, serialized):
-    length = len(serialized)
-    try:
-      if self._InternalParse(serialized, 0, length) != length:
-        # The only reason _InternalParse would return early is if it
-        # encountered an end-group tag.
-        raise message_mod.DecodeError('Unexpected end-group tag.')
-    except (IndexError, TypeError):
-      # Now ord(buf[p:p+1]) == ord('') gets TypeError.
-      raise message_mod.DecodeError('Truncated message.')
-    except struct.error as e:
-      raise message_mod.DecodeError(e)
-    return length   # Return this for legacy reasons.
-  cls.MergeFromString = MergeFromString
-
-  local_ReadTag = decoder.ReadTag
-  local_SkipField = decoder.SkipField
-  decoders_by_tag = cls._decoders_by_tag
-  is_proto3 = message_descriptor.syntax == "proto3"
-
-  def InternalParse(self, buffer, pos, end):
-    self._Modified()
-    field_dict = self._fields
-    unknown_field_list = self._unknown_fields
-    while pos != end:
-      (tag_bytes, new_pos) = local_ReadTag(buffer, pos)
-      field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None))
-      if field_decoder is None:
-        value_start_pos = new_pos
-        new_pos = local_SkipField(buffer, new_pos, end, tag_bytes)
-        if new_pos == -1:
-          return pos
-        if not is_proto3:
-          if not unknown_field_list:
-            unknown_field_list = self._unknown_fields = []
-          unknown_field_list.append(
-              (tag_bytes, buffer[value_start_pos:new_pos]))
-        pos = new_pos
-      else:
-        pos = field_decoder(buffer, new_pos, end, self, field_dict)
-        if field_desc:
-          self._UpdateOneofState(field_desc)
-    return pos
-  cls._InternalParse = InternalParse
-
-
-def _AddIsInitializedMethod(message_descriptor, cls):
-  """Adds the IsInitialized and FindInitializationError methods to the
-  protocol message class."""
-
-  required_fields = [field for field in message_descriptor.fields
-                           if field.label == _FieldDescriptor.LABEL_REQUIRED]
-
-  def IsInitialized(self, errors=None):
-    """Checks if all required fields of a message are set.
-
-    Args:
-      errors:  A list which, if provided, will be populated with the field
-               paths of all missing required fields.
-
-    Returns:
-      True iff the specified message has all required fields set.
-    """
-
-    # Performance is critical so we avoid HasField() and ListFields().
-
-    for field in required_fields:
-      if (field not in self._fields or
-          (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
-           not self._fields[field]._is_present_in_parent)):
-        if errors is not None:
-          errors.extend(self.FindInitializationErrors())
-        return False
-
-    for field, value in list(self._fields.items()):  # dict can change size!
-      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-        if field.label == _FieldDescriptor.LABEL_REPEATED:
-          if (field.message_type.has_options and
-              field.message_type.GetOptions().map_entry):
-            continue
-          for element in value:
-            if not element.IsInitialized():
-              if errors is not None:
-                errors.extend(self.FindInitializationErrors())
-              return False
-        elif value._is_present_in_parent and not value.IsInitialized():
-          if errors is not None:
-            errors.extend(self.FindInitializationErrors())
-          return False
-
-    return True
-
-  cls.IsInitialized = IsInitialized
-
-  def FindInitializationErrors(self):
-    """Finds required fields which are not initialized.
-
-    Returns:
-      A list of strings.  Each string is a path to an uninitialized field from
-      the top-level message, e.g. "foo.bar[5].baz".
-    """
-
-    errors = []  # simplify things
-
-    for field in required_fields:
-      if not self.HasField(field.name):
-        errors.append(field.name)
-
-    for field, value in self.ListFields():
-      if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-        if field.is_extension:
-          name = "(%s)" % field.full_name
-        else:
-          name = field.name
-
-        if _IsMapField(field):
-          if _IsMessageMapField(field):
-            for key in value:
-              element = value[key]
-              prefix = "%s[%s]." % (name, key)
-              sub_errors = element.FindInitializationErrors()
-              errors += [prefix + error for error in sub_errors]
-          else:
-            # ScalarMaps can't have any initialization errors.
-            pass
-        elif field.label == _FieldDescriptor.LABEL_REPEATED:
-          for i in range(len(value)):
-            element = value[i]
-            prefix = "%s[%d]." % (name, i)
-            sub_errors = element.FindInitializationErrors()
-            errors += [prefix + error for error in sub_errors]
-        else:
-          prefix = name + "."
-          sub_errors = value.FindInitializationErrors()
-          errors += [prefix + error for error in sub_errors]
-
-    return errors
-
-  cls.FindInitializationErrors = FindInitializationErrors
-
-
-def _AddMergeFromMethod(cls):
-  LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
-  CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
-
-  def MergeFrom(self, msg):
-    if not isinstance(msg, cls):
-      raise TypeError(
-          "Parameter to MergeFrom() must be instance of same class: "
-          "expected %s got %s." % (cls.__name__, type(msg).__name__))
-
-    assert msg is not self
-    self._Modified()
-
-    fields = self._fields
-
-    for field, value in msg._fields.items():
-      if field.label == LABEL_REPEATED:
-        field_value = fields.get(field)
-        if field_value is None:
-          # Construct a new object to represent this field.
-          field_value = field._default_constructor(self)
-          fields[field] = field_value
-        field_value.MergeFrom(value)
-      elif field.cpp_type == CPPTYPE_MESSAGE:
-        if value._is_present_in_parent:
-          field_value = fields.get(field)
-          if field_value is None:
-            # Construct a new object to represent this field.
-            field_value = field._default_constructor(self)
-            fields[field] = field_value
-          field_value.MergeFrom(value)
-      else:
-        self._fields[field] = value
-        if field.containing_oneof:
-          self._UpdateOneofState(field)
-
-    if msg._unknown_fields:
-      if not self._unknown_fields:
-        self._unknown_fields = []
-      self._unknown_fields.extend(msg._unknown_fields)
-
-  cls.MergeFrom = MergeFrom
-
-
-def _AddWhichOneofMethod(message_descriptor, cls):
-  def WhichOneof(self, oneof_name):
-    """Returns the name of the currently set field inside a oneof, or None."""
-    try:
-      field = message_descriptor.oneofs_by_name[oneof_name]
-    except KeyError:
-      raise ValueError(
-          'Protocol message has no oneof "%s" field.' % oneof_name)
-
-    nested_field = self._oneofs.get(field, None)
-    if nested_field is not None and self.HasField(nested_field.name):
-      return nested_field.name
-    else:
-      return None
-
-  cls.WhichOneof = WhichOneof
-
-
-def _Clear(self):
-  # Clear fields.
-  self._fields = {}
-  self._unknown_fields = ()
-  self._oneofs = {}
-  self._Modified()
-
-
-def _DiscardUnknownFields(self):
-  self._unknown_fields = []
-  for field, value in self.ListFields():
-    if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-      if field.label == _FieldDescriptor.LABEL_REPEATED:
-        for sub_message in value:
-          sub_message.DiscardUnknownFields()
-      else:
-        value.DiscardUnknownFields()
-
-
-def _SetListener(self, listener):
-  if listener is None:
-    self._listener = message_listener_mod.NullMessageListener()
-  else:
-    self._listener = listener
-
-
-def _AddMessageMethods(message_descriptor, cls):
-  """Adds implementations of all Message methods to cls."""
-  _AddListFieldsMethod(message_descriptor, cls)
-  _AddHasFieldMethod(message_descriptor, cls)
-  _AddClearFieldMethod(message_descriptor, cls)
-  if message_descriptor.is_extendable:
-    _AddClearExtensionMethod(cls)
-    _AddHasExtensionMethod(cls)
-  _AddEqualsMethod(message_descriptor, cls)
-  _AddStrMethod(message_descriptor, cls)
-  _AddReprMethod(message_descriptor, cls)
-  _AddUnicodeMethod(message_descriptor, cls)
-  _AddByteSizeMethod(message_descriptor, cls)
-  _AddSerializeToStringMethod(message_descriptor, cls)
-  _AddSerializePartialToStringMethod(message_descriptor, cls)
-  _AddMergeFromStringMethod(message_descriptor, cls)
-  _AddIsInitializedMethod(message_descriptor, cls)
-  _AddMergeFromMethod(cls)
-  _AddWhichOneofMethod(message_descriptor, cls)
-  # Adds methods which do not depend on cls.
-  cls.Clear = _Clear
-  cls.DiscardUnknownFields = _DiscardUnknownFields
-  cls._SetListener = _SetListener
-
-
-def _AddPrivateHelperMethods(message_descriptor, cls):
-  """Adds implementation of private helper methods to cls."""
-
-  def Modified(self):
-    """Sets the _cached_byte_size_dirty bit to true,
-    and propagates this to our listener iff this was a state change.
-    """
-
-    # Note:  Some callers check _cached_byte_size_dirty before calling
-    #   _Modified() as an extra optimization.  So, if this method is ever
-    #   changed such that it does stuff even when _cached_byte_size_dirty is
-    #   already true, the callers need to be updated.
-    if not self._cached_byte_size_dirty:
-      self._cached_byte_size_dirty = True
-      self._listener_for_children.dirty = True
-      self._is_present_in_parent = True
-      self._listener.Modified()
-
-  def _UpdateOneofState(self, field):
-    """Sets field as the active field in its containing oneof.
-
-    Will also delete currently active field in the oneof, if it is different
-    from the argument. Does not mark the message as modified.
-    """
-    other_field = self._oneofs.setdefault(field.containing_oneof, field)
-    if other_field is not field:
-      del self._fields[other_field]
-      self._oneofs[field.containing_oneof] = field
-
-  cls._Modified = Modified
-  cls.SetInParent = Modified
-  cls._UpdateOneofState = _UpdateOneofState
-
-
-class _Listener(object):
-
-  """MessageListener implementation that a parent message registers with its
-  child message.
-
-  In order to support semantics like:
-
-    foo.bar.baz.qux = 23
-    assert foo.HasField('bar')
-
-  ...child objects must have back references to their parents.
-  This helper class is at the heart of this support.
-  """
-
-  def __init__(self, parent_message):
-    """Args:
-      parent_message: The message whose _Modified() method we should call when
-        we receive Modified() messages.
-    """
-    # This listener establishes a back reference from a child (contained) object
-    # to its parent (containing) object.  We make this a weak reference to avoid
-    # creating cyclic garbage when the client finishes with the 'parent' object
-    # in the tree.
-    if isinstance(parent_message, weakref.ProxyType):
-      self._parent_message_weakref = parent_message
-    else:
-      self._parent_message_weakref = weakref.proxy(parent_message)
-
-    # As an optimization, we also indicate directly on the listener whether
-    # or not the parent message is dirty.  This way we can avoid traversing
-    # up the tree in the common case.
-    self.dirty = False
-
-  def Modified(self):
-    if self.dirty:
-      return
-    try:
-      # Propagate the signal to our parents iff this is the first field set.
-      self._parent_message_weakref._Modified()
-    except ReferenceError:
-      # We can get here if a client has kept a reference to a child object,
-      # and is now setting a field on it, but the child's parent has been
-      # garbage-collected.  This is not an error.
-      pass
-
-
-class _OneofListener(_Listener):
-  """Special listener implementation for setting composite oneof fields."""
-
-  def __init__(self, parent_message, field):
-    """Args:
-      parent_message: The message whose _Modified() method we should call when
-        we receive Modified() messages.
-      field: The descriptor of the field being set in the parent message.
-    """
-    super(_OneofListener, self).__init__(parent_message)
-    self._field = field
-
-  def Modified(self):
-    """Also updates the state of the containing oneof in the parent message."""
-    try:
-      self._parent_message_weakref._UpdateOneofState(self._field)
-      super(_OneofListener, self).Modified()
-    except ReferenceError:
-      pass
-
-
-# TODO(robinson): Move elsewhere?  This file is getting pretty ridiculous...
-# TODO(robinson): Unify error handling of "unknown extension" crap.
-# TODO(robinson): Support iteritems()-style iteration over all
-# extensions with the "has" bits turned on?
-class _ExtensionDict(object):
-
-  """Dict-like container for supporting an indexable "Extensions"
-  field on proto instances.
-
-  Note that in all cases we expect extension handles to be
-  FieldDescriptors.
-  """
-
-  def __init__(self, extended_message):
-    """extended_message: Message instance for which we are the Extensions dict.
-    """
-
-    self._extended_message = extended_message
-
-  def __getitem__(self, extension_handle):
-    """Returns the current value of the given extension handle."""
-
-    _VerifyExtensionHandle(self._extended_message, extension_handle)
-
-    result = self._extended_message._fields.get(extension_handle)
-    if result is not None:
-      return result
-
-    if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
-      result = extension_handle._default_constructor(self._extended_message)
-    elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
-      result = extension_handle.message_type._concrete_class()
-      try:
-        result._SetListener(self._extended_message._listener_for_children)
-      except ReferenceError:
-        pass
-    else:
-      # Singular scalar -- just return the default without inserting into the
-      # dict.
-      return extension_handle.default_value
-
-    # Atomically check if another thread has preempted us and, if not, swap
-    # in the new object we just created.  If someone has preempted us, we
-    # take that object and discard ours.
-    # WARNING:  We are relying on setdefault() being atomic.  This is true
-    #   in CPython but we haven't investigated others.  This warning appears
-    #   in several other locations in this file.
-    result = self._extended_message._fields.setdefault(
-        extension_handle, result)
-
-    return result
-
-  def __eq__(self, other):
-    if not isinstance(other, self.__class__):
-      return False
-
-    my_fields = self._extended_message.ListFields()
-    other_fields = other._extended_message.ListFields()
-
-    # Get rid of non-extension fields.
-    my_fields    = [ field for field in my_fields    if field.is_extension ]
-    other_fields = [ field for field in other_fields if field.is_extension ]
-
-    return my_fields == other_fields
-
-  def __ne__(self, other):
-    return not self == other
-
-  def __hash__(self):
-    raise TypeError('unhashable object')
-
-  # Note that this is only meaningful for non-repeated, scalar extension
-  # fields.  Note also that we may have to call _Modified() when we do
-  # successfully set a field this way, to set any necssary "has" bits in the
-  # ancestors of the extended message.
-  def __setitem__(self, extension_handle, value):
-    """If extension_handle specifies a non-repeated, scalar extension
-    field, sets the value of that field.
-    """
-
-    _VerifyExtensionHandle(self._extended_message, extension_handle)
-
-    if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or
-        extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE):
-      raise TypeError(
-          'Cannot assign to extension "%s" because it is a repeated or '
-          'composite type.' % extension_handle.full_name)
-
-    # It's slightly wasteful to lookup the type checker each time,
-    # but we expect this to be a vanishingly uncommon case anyway.
-    type_checker = type_checkers.GetTypeChecker(extension_handle)
-    # pylint: disable=protected-access
-    self._extended_message._fields[extension_handle] = (
-        type_checker.CheckValue(value))
-    self._extended_message._Modified()
-
-  def _FindExtensionByName(self, name):
-    """Tries to find a known extension with the specified name.
-
-    Args:
-      name: Extension full name.
-
-    Returns:
-      Extension field descriptor.
-    """
-    return self._extended_message._extensions_by_name.get(name, None)
-
-  def _FindExtensionByNumber(self, number):
-    """Tries to find a known extension with the field number.
-
-    Args:
-      number: Extension field number.
-
-    Returns:
-      Extension field descriptor.
-    """
-    return self._extended_message._extensions_by_number.get(number, None)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/test_bad_identifiers_pb2.py b/tools/swarming_client/third_party/google/protobuf/internal/test_bad_identifiers_pb2.py
deleted file mode 100644
index 306b576..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/test_bad_identifiers_pb2.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/internal/test_bad_identifiers.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import service as _service
-from google.protobuf import service_reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/internal/test_bad_identifiers.proto',
-  package='protobuf_unittest',
-  syntax='proto2',
-  serialized_pb=_b('\n3google/protobuf/internal/test_bad_identifiers.proto\x12\x11protobuf_unittest\"\x1e\n\x12TestBadIdentifiers*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"\x10\n\x0e\x41notherMessage2\x10\n\x0e\x41notherService:;\n\x07message\x12%.protobuf_unittest.TestBadIdentifiers\x18\x64 \x01(\t:\x03\x66oo:>\n\ndescriptor\x12%.protobuf_unittest.TestBadIdentifiers\x18\x65 \x01(\t:\x03\x62\x61r:>\n\nreflection\x12%.protobuf_unittest.TestBadIdentifiers\x18\x66 \x01(\t:\x03\x62\x61z:;\n\x07service\x12%.protobuf_unittest.TestBadIdentifiers\x18g \x01(\t:\x03quxB\x03\x90\x01\x01')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-MESSAGE_FIELD_NUMBER = 100
-message = _descriptor.FieldDescriptor(
-  name='message', full_name='protobuf_unittest.message', index=0,
-  number=100, type=9, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("foo").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DESCRIPTOR_FIELD_NUMBER = 101
-descriptor = _descriptor.FieldDescriptor(
-  name='descriptor', full_name='protobuf_unittest.descriptor', index=1,
-  number=101, type=9, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("bar").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REFLECTION_FIELD_NUMBER = 102
-reflection = _descriptor.FieldDescriptor(
-  name='reflection', full_name='protobuf_unittest.reflection', index=2,
-  number=102, type=9, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("baz").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-SERVICE_FIELD_NUMBER = 103
-service = _descriptor.FieldDescriptor(
-  name='service', full_name='protobuf_unittest.service', index=3,
-  number=103, type=9, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("qux").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_TESTBADIDENTIFIERS = _descriptor.Descriptor(
-  name='TestBadIdentifiers',
-  full_name='protobuf_unittest.TestBadIdentifiers',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(100, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=74,
-  serialized_end=104,
-)
-
-
-_ANOTHERMESSAGE = _descriptor.Descriptor(
-  name='AnotherMessage',
-  full_name='protobuf_unittest.AnotherMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=106,
-  serialized_end=122,
-)
-
-DESCRIPTOR.message_types_by_name['TestBadIdentifiers'] = _TESTBADIDENTIFIERS
-DESCRIPTOR.message_types_by_name['AnotherMessage'] = _ANOTHERMESSAGE
-DESCRIPTOR.extensions_by_name['message'] = message
-DESCRIPTOR.extensions_by_name['descriptor'] = descriptor
-DESCRIPTOR.extensions_by_name['reflection'] = reflection
-DESCRIPTOR.extensions_by_name['service'] = service
-
-TestBadIdentifiers = _reflection.GeneratedProtocolMessageType('TestBadIdentifiers', (_message.Message,), dict(
-  DESCRIPTOR = _TESTBADIDENTIFIERS,
-  __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestBadIdentifiers)
-  ))
-_sym_db.RegisterMessage(TestBadIdentifiers)
-
-AnotherMessage = _reflection.GeneratedProtocolMessageType('AnotherMessage', (_message.Message,), dict(
-  DESCRIPTOR = _ANOTHERMESSAGE,
-  __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.AnotherMessage)
-  ))
-_sym_db.RegisterMessage(AnotherMessage)
-
-TestBadIdentifiers.RegisterExtension(message)
-TestBadIdentifiers.RegisterExtension(descriptor)
-TestBadIdentifiers.RegisterExtension(reflection)
-TestBadIdentifiers.RegisterExtension(service)
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\220\001\001'))
-
-_ANOTHERSERVICE = _descriptor.ServiceDescriptor(
-  name='AnotherService',
-  full_name='protobuf_unittest.AnotherService',
-  file=DESCRIPTOR,
-  index=0,
-  options=None,
-  serialized_start=124,
-  serialized_end=140,
-  methods=[
-])
-
-AnotherService = service_reflection.GeneratedServiceType('AnotherService', (_service.Service,), dict(
-  DESCRIPTOR = _ANOTHERSERVICE,
-  __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2'
-  ))
-
-AnotherService_Stub = service_reflection.GeneratedServiceStubType('AnotherService_Stub', (AnotherService,), dict(
-  DESCRIPTOR = _ANOTHERSERVICE,
-  __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2'
-  ))
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/test_util.py b/tools/swarming_client/third_party/google/protobuf/internal/test_util.py
deleted file mode 100644
index 2c80559..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/test_util.py
+++ /dev/null
@@ -1,696 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Utilities for Python proto2 tests.
-
-This is intentionally modeled on C++ code in
-//google/protobuf/test_util.*.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-import os.path
-
-import sys
-
-from google.protobuf import unittest_import_pb2
-from google.protobuf import unittest_pb2
-from google.protobuf import descriptor_pb2
-
-# Tests whether the given TestAllTypes message is proto2 or not.
-# This is used to gate several fields/features that only exist
-# for the proto2 version of the message.
-def IsProto2(message):
-  return message.DESCRIPTOR.syntax == "proto2"
-
-def SetAllNonLazyFields(message):
-  """Sets every non-lazy field in the message to a unique value.
-
-  Args:
-    message: A TestAllTypes instance.
-  """
-
-  #
-  # Optional fields.
-  #
-
-  message.optional_int32    = 101
-  message.optional_int64    = 102
-  message.optional_uint32   = 103
-  message.optional_uint64   = 104
-  message.optional_sint32   = 105
-  message.optional_sint64   = 106
-  message.optional_fixed32  = 107
-  message.optional_fixed64  = 108
-  message.optional_sfixed32 = 109
-  message.optional_sfixed64 = 110
-  message.optional_float    = 111
-  message.optional_double   = 112
-  message.optional_bool     = True
-  message.optional_string   = u'115'
-  message.optional_bytes    = b'116'
-
-  if IsProto2(message):
-    message.optionalgroup.a = 117
-  message.optional_nested_message.bb = 118
-  message.optional_foreign_message.c = 119
-  message.optional_import_message.d = 120
-  message.optional_public_import_message.e = 126
-
-  message.optional_nested_enum = unittest_pb2.TestAllTypes.BAZ
-  message.optional_foreign_enum = unittest_pb2.FOREIGN_BAZ
-  if IsProto2(message):
-    message.optional_import_enum = unittest_import_pb2.IMPORT_BAZ
-
-  message.optional_string_piece = u'124'
-  message.optional_cord = u'125'
-
-  #
-  # Repeated fields.
-  #
-
-  message.repeated_int32.append(201)
-  message.repeated_int64.append(202)
-  message.repeated_uint32.append(203)
-  message.repeated_uint64.append(204)
-  message.repeated_sint32.append(205)
-  message.repeated_sint64.append(206)
-  message.repeated_fixed32.append(207)
-  message.repeated_fixed64.append(208)
-  message.repeated_sfixed32.append(209)
-  message.repeated_sfixed64.append(210)
-  message.repeated_float.append(211)
-  message.repeated_double.append(212)
-  message.repeated_bool.append(True)
-  message.repeated_string.append(u'215')
-  message.repeated_bytes.append(b'216')
-
-  if IsProto2(message):
-    message.repeatedgroup.add().a = 217
-  message.repeated_nested_message.add().bb = 218
-  message.repeated_foreign_message.add().c = 219
-  message.repeated_import_message.add().d = 220
-  message.repeated_lazy_message.add().bb = 227
-
-  message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAR)
-  message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAR)
-  if IsProto2(message):
-    message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAR)
-
-  message.repeated_string_piece.append(u'224')
-  message.repeated_cord.append(u'225')
-
-  # Add a second one of each field.
-  message.repeated_int32.append(301)
-  message.repeated_int64.append(302)
-  message.repeated_uint32.append(303)
-  message.repeated_uint64.append(304)
-  message.repeated_sint32.append(305)
-  message.repeated_sint64.append(306)
-  message.repeated_fixed32.append(307)
-  message.repeated_fixed64.append(308)
-  message.repeated_sfixed32.append(309)
-  message.repeated_sfixed64.append(310)
-  message.repeated_float.append(311)
-  message.repeated_double.append(312)
-  message.repeated_bool.append(False)
-  message.repeated_string.append(u'315')
-  message.repeated_bytes.append(b'316')
-
-  if IsProto2(message):
-    message.repeatedgroup.add().a = 317
-  message.repeated_nested_message.add().bb = 318
-  message.repeated_foreign_message.add().c = 319
-  message.repeated_import_message.add().d = 320
-  message.repeated_lazy_message.add().bb = 327
-
-  message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAZ)
-  message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAZ)
-  if IsProto2(message):
-    message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAZ)
-
-  message.repeated_string_piece.append(u'324')
-  message.repeated_cord.append(u'325')
-
-  #
-  # Fields that have defaults.
-  #
-
-  if IsProto2(message):
-    message.default_int32 = 401
-    message.default_int64 = 402
-    message.default_uint32 = 403
-    message.default_uint64 = 404
-    message.default_sint32 = 405
-    message.default_sint64 = 406
-    message.default_fixed32 = 407
-    message.default_fixed64 = 408
-    message.default_sfixed32 = 409
-    message.default_sfixed64 = 410
-    message.default_float = 411
-    message.default_double = 412
-    message.default_bool = False
-    message.default_string = '415'
-    message.default_bytes = b'416'
-
-    message.default_nested_enum = unittest_pb2.TestAllTypes.FOO
-    message.default_foreign_enum = unittest_pb2.FOREIGN_FOO
-    message.default_import_enum = unittest_import_pb2.IMPORT_FOO
-
-    message.default_string_piece = '424'
-    message.default_cord = '425'
-
-  message.oneof_uint32 = 601
-  message.oneof_nested_message.bb = 602
-  message.oneof_string = '603'
-  message.oneof_bytes = b'604'
-
-
-def SetAllFields(message):
-  SetAllNonLazyFields(message)
-  message.optional_lazy_message.bb = 127
-
-
-def SetAllExtensions(message):
-  """Sets every extension in the message to a unique value.
-
-  Args:
-    message: A unittest_pb2.TestAllExtensions instance.
-  """
-
-  extensions = message.Extensions
-  pb2 = unittest_pb2
-  import_pb2 = unittest_import_pb2
-
-  #
-  # Optional fields.
-  #
-
-  extensions[pb2.optional_int32_extension] = 101
-  extensions[pb2.optional_int64_extension] = 102
-  extensions[pb2.optional_uint32_extension] = 103
-  extensions[pb2.optional_uint64_extension] = 104
-  extensions[pb2.optional_sint32_extension] = 105
-  extensions[pb2.optional_sint64_extension] = 106
-  extensions[pb2.optional_fixed32_extension] = 107
-  extensions[pb2.optional_fixed64_extension] = 108
-  extensions[pb2.optional_sfixed32_extension] = 109
-  extensions[pb2.optional_sfixed64_extension] = 110
-  extensions[pb2.optional_float_extension] = 111
-  extensions[pb2.optional_double_extension] = 112
-  extensions[pb2.optional_bool_extension] = True
-  extensions[pb2.optional_string_extension] = u'115'
-  extensions[pb2.optional_bytes_extension] = b'116'
-
-  extensions[pb2.optionalgroup_extension].a = 117
-  extensions[pb2.optional_nested_message_extension].bb = 118
-  extensions[pb2.optional_foreign_message_extension].c = 119
-  extensions[pb2.optional_import_message_extension].d = 120
-  extensions[pb2.optional_public_import_message_extension].e = 126
-  extensions[pb2.optional_lazy_message_extension].bb = 127
-
-  extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
-  extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
-  extensions[pb2.optional_foreign_enum_extension] = pb2.FOREIGN_BAZ
-  extensions[pb2.optional_import_enum_extension] = import_pb2.IMPORT_BAZ
-
-  extensions[pb2.optional_string_piece_extension] = u'124'
-  extensions[pb2.optional_cord_extension] = u'125'
-
-  #
-  # Repeated fields.
-  #
-
-  extensions[pb2.repeated_int32_extension].append(201)
-  extensions[pb2.repeated_int64_extension].append(202)
-  extensions[pb2.repeated_uint32_extension].append(203)
-  extensions[pb2.repeated_uint64_extension].append(204)
-  extensions[pb2.repeated_sint32_extension].append(205)
-  extensions[pb2.repeated_sint64_extension].append(206)
-  extensions[pb2.repeated_fixed32_extension].append(207)
-  extensions[pb2.repeated_fixed64_extension].append(208)
-  extensions[pb2.repeated_sfixed32_extension].append(209)
-  extensions[pb2.repeated_sfixed64_extension].append(210)
-  extensions[pb2.repeated_float_extension].append(211)
-  extensions[pb2.repeated_double_extension].append(212)
-  extensions[pb2.repeated_bool_extension].append(True)
-  extensions[pb2.repeated_string_extension].append(u'215')
-  extensions[pb2.repeated_bytes_extension].append(b'216')
-
-  extensions[pb2.repeatedgroup_extension].add().a = 217
-  extensions[pb2.repeated_nested_message_extension].add().bb = 218
-  extensions[pb2.repeated_foreign_message_extension].add().c = 219
-  extensions[pb2.repeated_import_message_extension].add().d = 220
-  extensions[pb2.repeated_lazy_message_extension].add().bb = 227
-
-  extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAR)
-  extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAR)
-  extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAR)
-
-  extensions[pb2.repeated_string_piece_extension].append(u'224')
-  extensions[pb2.repeated_cord_extension].append(u'225')
-
-  # Append a second one of each field.
-  extensions[pb2.repeated_int32_extension].append(301)
-  extensions[pb2.repeated_int64_extension].append(302)
-  extensions[pb2.repeated_uint32_extension].append(303)
-  extensions[pb2.repeated_uint64_extension].append(304)
-  extensions[pb2.repeated_sint32_extension].append(305)
-  extensions[pb2.repeated_sint64_extension].append(306)
-  extensions[pb2.repeated_fixed32_extension].append(307)
-  extensions[pb2.repeated_fixed64_extension].append(308)
-  extensions[pb2.repeated_sfixed32_extension].append(309)
-  extensions[pb2.repeated_sfixed64_extension].append(310)
-  extensions[pb2.repeated_float_extension].append(311)
-  extensions[pb2.repeated_double_extension].append(312)
-  extensions[pb2.repeated_bool_extension].append(False)
-  extensions[pb2.repeated_string_extension].append(u'315')
-  extensions[pb2.repeated_bytes_extension].append(b'316')
-
-  extensions[pb2.repeatedgroup_extension].add().a = 317
-  extensions[pb2.repeated_nested_message_extension].add().bb = 318
-  extensions[pb2.repeated_foreign_message_extension].add().c = 319
-  extensions[pb2.repeated_import_message_extension].add().d = 320
-  extensions[pb2.repeated_lazy_message_extension].add().bb = 327
-
-  extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAZ)
-  extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAZ)
-  extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAZ)
-
-  extensions[pb2.repeated_string_piece_extension].append(u'324')
-  extensions[pb2.repeated_cord_extension].append(u'325')
-
-  #
-  # Fields with defaults.
-  #
-
-  extensions[pb2.default_int32_extension] = 401
-  extensions[pb2.default_int64_extension] = 402
-  extensions[pb2.default_uint32_extension] = 403
-  extensions[pb2.default_uint64_extension] = 404
-  extensions[pb2.default_sint32_extension] = 405
-  extensions[pb2.default_sint64_extension] = 406
-  extensions[pb2.default_fixed32_extension] = 407
-  extensions[pb2.default_fixed64_extension] = 408
-  extensions[pb2.default_sfixed32_extension] = 409
-  extensions[pb2.default_sfixed64_extension] = 410
-  extensions[pb2.default_float_extension] = 411
-  extensions[pb2.default_double_extension] = 412
-  extensions[pb2.default_bool_extension] = False
-  extensions[pb2.default_string_extension] = u'415'
-  extensions[pb2.default_bytes_extension] = b'416'
-
-  extensions[pb2.default_nested_enum_extension] = pb2.TestAllTypes.FOO
-  extensions[pb2.default_foreign_enum_extension] = pb2.FOREIGN_FOO
-  extensions[pb2.default_import_enum_extension] = import_pb2.IMPORT_FOO
-
-  extensions[pb2.default_string_piece_extension] = u'424'
-  extensions[pb2.default_cord_extension] = '425'
-
-  extensions[pb2.oneof_uint32_extension] = 601
-  extensions[pb2.oneof_nested_message_extension].bb = 602
-  extensions[pb2.oneof_string_extension] = u'603'
-  extensions[pb2.oneof_bytes_extension] = b'604'
-
-
-def SetAllFieldsAndExtensions(message):
-  """Sets every field and extension in the message to a unique value.
-
-  Args:
-    message: A unittest_pb2.TestAllExtensions message.
-  """
-  message.my_int = 1
-  message.my_string = 'foo'
-  message.my_float = 1.0
-  message.Extensions[unittest_pb2.my_extension_int] = 23
-  message.Extensions[unittest_pb2.my_extension_string] = 'bar'
-
-
-def ExpectAllFieldsAndExtensionsInOrder(serialized):
-  """Ensures that serialized is the serialization we expect for a message
-  filled with SetAllFieldsAndExtensions().  (Specifically, ensures that the
-  serialization is in canonical, tag-number order).
-  """
-  my_extension_int = unittest_pb2.my_extension_int
-  my_extension_string = unittest_pb2.my_extension_string
-  expected_strings = []
-  message = unittest_pb2.TestFieldOrderings()
-  message.my_int = 1  # Field 1.
-  expected_strings.append(message.SerializeToString())
-  message.Clear()
-  message.Extensions[my_extension_int] = 23  # Field 5.
-  expected_strings.append(message.SerializeToString())
-  message.Clear()
-  message.my_string = 'foo'  # Field 11.
-  expected_strings.append(message.SerializeToString())
-  message.Clear()
-  message.Extensions[my_extension_string] = 'bar'  # Field 50.
-  expected_strings.append(message.SerializeToString())
-  message.Clear()
-  message.my_float = 1.0
-  expected_strings.append(message.SerializeToString())
-  message.Clear()
-  expected = b''.join(expected_strings)
-
-  if expected != serialized:
-    raise ValueError('Expected %r, found %r' % (expected, serialized))
-
-
-def ExpectAllFieldsSet(test_case, message):
-  """Check all fields for correct values have after Set*Fields() is called."""
-  test_case.assertTrue(message.HasField('optional_int32'))
-  test_case.assertTrue(message.HasField('optional_int64'))
-  test_case.assertTrue(message.HasField('optional_uint32'))
-  test_case.assertTrue(message.HasField('optional_uint64'))
-  test_case.assertTrue(message.HasField('optional_sint32'))
-  test_case.assertTrue(message.HasField('optional_sint64'))
-  test_case.assertTrue(message.HasField('optional_fixed32'))
-  test_case.assertTrue(message.HasField('optional_fixed64'))
-  test_case.assertTrue(message.HasField('optional_sfixed32'))
-  test_case.assertTrue(message.HasField('optional_sfixed64'))
-  test_case.assertTrue(message.HasField('optional_float'))
-  test_case.assertTrue(message.HasField('optional_double'))
-  test_case.assertTrue(message.HasField('optional_bool'))
-  test_case.assertTrue(message.HasField('optional_string'))
-  test_case.assertTrue(message.HasField('optional_bytes'))
-
-  if IsProto2(message):
-    test_case.assertTrue(message.HasField('optionalgroup'))
-  test_case.assertTrue(message.HasField('optional_nested_message'))
-  test_case.assertTrue(message.HasField('optional_foreign_message'))
-  test_case.assertTrue(message.HasField('optional_import_message'))
-
-  test_case.assertTrue(message.optionalgroup.HasField('a'))
-  test_case.assertTrue(message.optional_nested_message.HasField('bb'))
-  test_case.assertTrue(message.optional_foreign_message.HasField('c'))
-  test_case.assertTrue(message.optional_import_message.HasField('d'))
-
-  test_case.assertTrue(message.HasField('optional_nested_enum'))
-  test_case.assertTrue(message.HasField('optional_foreign_enum'))
-  if IsProto2(message):
-    test_case.assertTrue(message.HasField('optional_import_enum'))
-
-  test_case.assertTrue(message.HasField('optional_string_piece'))
-  test_case.assertTrue(message.HasField('optional_cord'))
-
-  test_case.assertEqual(101, message.optional_int32)
-  test_case.assertEqual(102, message.optional_int64)
-  test_case.assertEqual(103, message.optional_uint32)
-  test_case.assertEqual(104, message.optional_uint64)
-  test_case.assertEqual(105, message.optional_sint32)
-  test_case.assertEqual(106, message.optional_sint64)
-  test_case.assertEqual(107, message.optional_fixed32)
-  test_case.assertEqual(108, message.optional_fixed64)
-  test_case.assertEqual(109, message.optional_sfixed32)
-  test_case.assertEqual(110, message.optional_sfixed64)
-  test_case.assertEqual(111, message.optional_float)
-  test_case.assertEqual(112, message.optional_double)
-  test_case.assertEqual(True, message.optional_bool)
-  test_case.assertEqual('115', message.optional_string)
-  test_case.assertEqual(b'116', message.optional_bytes)
-
-  if IsProto2(message):
-    test_case.assertEqual(117, message.optionalgroup.a)
-  test_case.assertEqual(118, message.optional_nested_message.bb)
-  test_case.assertEqual(119, message.optional_foreign_message.c)
-  test_case.assertEqual(120, message.optional_import_message.d)
-  test_case.assertEqual(126, message.optional_public_import_message.e)
-  test_case.assertEqual(127, message.optional_lazy_message.bb)
-
-  test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
-                        message.optional_nested_enum)
-  test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
-                        message.optional_foreign_enum)
-  if IsProto2(message):
-    test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
-                          message.optional_import_enum)
-
-  # -----------------------------------------------------------------
-
-  test_case.assertEqual(2, len(message.repeated_int32))
-  test_case.assertEqual(2, len(message.repeated_int64))
-  test_case.assertEqual(2, len(message.repeated_uint32))
-  test_case.assertEqual(2, len(message.repeated_uint64))
-  test_case.assertEqual(2, len(message.repeated_sint32))
-  test_case.assertEqual(2, len(message.repeated_sint64))
-  test_case.assertEqual(2, len(message.repeated_fixed32))
-  test_case.assertEqual(2, len(message.repeated_fixed64))
-  test_case.assertEqual(2, len(message.repeated_sfixed32))
-  test_case.assertEqual(2, len(message.repeated_sfixed64))
-  test_case.assertEqual(2, len(message.repeated_float))
-  test_case.assertEqual(2, len(message.repeated_double))
-  test_case.assertEqual(2, len(message.repeated_bool))
-  test_case.assertEqual(2, len(message.repeated_string))
-  test_case.assertEqual(2, len(message.repeated_bytes))
-
-  if IsProto2(message):
-    test_case.assertEqual(2, len(message.repeatedgroup))
-  test_case.assertEqual(2, len(message.repeated_nested_message))
-  test_case.assertEqual(2, len(message.repeated_foreign_message))
-  test_case.assertEqual(2, len(message.repeated_import_message))
-  test_case.assertEqual(2, len(message.repeated_nested_enum))
-  test_case.assertEqual(2, len(message.repeated_foreign_enum))
-  if IsProto2(message):
-    test_case.assertEqual(2, len(message.repeated_import_enum))
-
-  test_case.assertEqual(2, len(message.repeated_string_piece))
-  test_case.assertEqual(2, len(message.repeated_cord))
-
-  test_case.assertEqual(201, message.repeated_int32[0])
-  test_case.assertEqual(202, message.repeated_int64[0])
-  test_case.assertEqual(203, message.repeated_uint32[0])
-  test_case.assertEqual(204, message.repeated_uint64[0])
-  test_case.assertEqual(205, message.repeated_sint32[0])
-  test_case.assertEqual(206, message.repeated_sint64[0])
-  test_case.assertEqual(207, message.repeated_fixed32[0])
-  test_case.assertEqual(208, message.repeated_fixed64[0])
-  test_case.assertEqual(209, message.repeated_sfixed32[0])
-  test_case.assertEqual(210, message.repeated_sfixed64[0])
-  test_case.assertEqual(211, message.repeated_float[0])
-  test_case.assertEqual(212, message.repeated_double[0])
-  test_case.assertEqual(True, message.repeated_bool[0])
-  test_case.assertEqual('215', message.repeated_string[0])
-  test_case.assertEqual(b'216', message.repeated_bytes[0])
-
-  if IsProto2(message):
-    test_case.assertEqual(217, message.repeatedgroup[0].a)
-  test_case.assertEqual(218, message.repeated_nested_message[0].bb)
-  test_case.assertEqual(219, message.repeated_foreign_message[0].c)
-  test_case.assertEqual(220, message.repeated_import_message[0].d)
-  test_case.assertEqual(227, message.repeated_lazy_message[0].bb)
-
-  test_case.assertEqual(unittest_pb2.TestAllTypes.BAR,
-                        message.repeated_nested_enum[0])
-  test_case.assertEqual(unittest_pb2.FOREIGN_BAR,
-                        message.repeated_foreign_enum[0])
-  if IsProto2(message):
-    test_case.assertEqual(unittest_import_pb2.IMPORT_BAR,
-                          message.repeated_import_enum[0])
-
-  test_case.assertEqual(301, message.repeated_int32[1])
-  test_case.assertEqual(302, message.repeated_int64[1])
-  test_case.assertEqual(303, message.repeated_uint32[1])
-  test_case.assertEqual(304, message.repeated_uint64[1])
-  test_case.assertEqual(305, message.repeated_sint32[1])
-  test_case.assertEqual(306, message.repeated_sint64[1])
-  test_case.assertEqual(307, message.repeated_fixed32[1])
-  test_case.assertEqual(308, message.repeated_fixed64[1])
-  test_case.assertEqual(309, message.repeated_sfixed32[1])
-  test_case.assertEqual(310, message.repeated_sfixed64[1])
-  test_case.assertEqual(311, message.repeated_float[1])
-  test_case.assertEqual(312, message.repeated_double[1])
-  test_case.assertEqual(False, message.repeated_bool[1])
-  test_case.assertEqual('315', message.repeated_string[1])
-  test_case.assertEqual(b'316', message.repeated_bytes[1])
-
-  if IsProto2(message):
-    test_case.assertEqual(317, message.repeatedgroup[1].a)
-  test_case.assertEqual(318, message.repeated_nested_message[1].bb)
-  test_case.assertEqual(319, message.repeated_foreign_message[1].c)
-  test_case.assertEqual(320, message.repeated_import_message[1].d)
-  test_case.assertEqual(327, message.repeated_lazy_message[1].bb)
-
-  test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
-                        message.repeated_nested_enum[1])
-  test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
-                        message.repeated_foreign_enum[1])
-  if IsProto2(message):
-    test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
-                          message.repeated_import_enum[1])
-
-  # -----------------------------------------------------------------
-
-  if IsProto2(message):
-    test_case.assertTrue(message.HasField('default_int32'))
-    test_case.assertTrue(message.HasField('default_int64'))
-    test_case.assertTrue(message.HasField('default_uint32'))
-    test_case.assertTrue(message.HasField('default_uint64'))
-    test_case.assertTrue(message.HasField('default_sint32'))
-    test_case.assertTrue(message.HasField('default_sint64'))
-    test_case.assertTrue(message.HasField('default_fixed32'))
-    test_case.assertTrue(message.HasField('default_fixed64'))
-    test_case.assertTrue(message.HasField('default_sfixed32'))
-    test_case.assertTrue(message.HasField('default_sfixed64'))
-    test_case.assertTrue(message.HasField('default_float'))
-    test_case.assertTrue(message.HasField('default_double'))
-    test_case.assertTrue(message.HasField('default_bool'))
-    test_case.assertTrue(message.HasField('default_string'))
-    test_case.assertTrue(message.HasField('default_bytes'))
-
-    test_case.assertTrue(message.HasField('default_nested_enum'))
-    test_case.assertTrue(message.HasField('default_foreign_enum'))
-    test_case.assertTrue(message.HasField('default_import_enum'))
-
-    test_case.assertEqual(401, message.default_int32)
-    test_case.assertEqual(402, message.default_int64)
-    test_case.assertEqual(403, message.default_uint32)
-    test_case.assertEqual(404, message.default_uint64)
-    test_case.assertEqual(405, message.default_sint32)
-    test_case.assertEqual(406, message.default_sint64)
-    test_case.assertEqual(407, message.default_fixed32)
-    test_case.assertEqual(408, message.default_fixed64)
-    test_case.assertEqual(409, message.default_sfixed32)
-    test_case.assertEqual(410, message.default_sfixed64)
-    test_case.assertEqual(411, message.default_float)
-    test_case.assertEqual(412, message.default_double)
-    test_case.assertEqual(False, message.default_bool)
-    test_case.assertEqual('415', message.default_string)
-    test_case.assertEqual(b'416', message.default_bytes)
-
-    test_case.assertEqual(unittest_pb2.TestAllTypes.FOO,
-                          message.default_nested_enum)
-    test_case.assertEqual(unittest_pb2.FOREIGN_FOO,
-                          message.default_foreign_enum)
-    test_case.assertEqual(unittest_import_pb2.IMPORT_FOO,
-                          message.default_import_enum)
-
-
-def GoldenFile(filename):
-  """Finds the given golden file and returns a file object representing it."""
-
-  # Search up the directory tree looking for the C++ protobuf source code.
-  path = '.'
-  while os.path.exists(path):
-    if os.path.exists(os.path.join(path, 'src/google/protobuf')):
-      # Found it.  Load the golden file from the testdata directory.
-      full_path = os.path.join(path, 'src/google/protobuf/testdata', filename)
-      return open(full_path, 'rb')
-    path = os.path.join(path, '..')
-
-  # Search internally.
-  path = '.'
-  full_path = os.path.join(path, 'third_party/py/google/protobuf/testdata',
-                           filename)
-  if os.path.exists(full_path):
-    # Found it.  Load the golden file from the testdata directory.
-    return open(full_path, 'rb')
-
-  raise RuntimeError(
-      'Could not find golden files.  This test must be run from within the '
-      'protobuf source package so that it can read test data files from the '
-      'C++ source tree.')
-
-
-def GoldenFileData(filename):
-  """Finds the given golden file and returns its contents."""
-  with GoldenFile(filename) as f:
-    return f.read()
-
-
-def SetAllPackedFields(message):
-  """Sets every field in the message to a unique value.
-
-  Args:
-    message: A TestPackedTypes instance.
-  """
-  message.packed_int32.extend([601, 701])
-  message.packed_int64.extend([602, 702])
-  message.packed_uint32.extend([603, 703])
-  message.packed_uint64.extend([604, 704])
-  message.packed_sint32.extend([605, 705])
-  message.packed_sint64.extend([606, 706])
-  message.packed_fixed32.extend([607, 707])
-  message.packed_fixed64.extend([608, 708])
-  message.packed_sfixed32.extend([609, 709])
-  message.packed_sfixed64.extend([610, 710])
-  message.packed_float.extend([611.0, 711.0])
-  message.packed_double.extend([612.0, 712.0])
-  message.packed_bool.extend([True, False])
-  message.packed_enum.extend([unittest_pb2.FOREIGN_BAR,
-                              unittest_pb2.FOREIGN_BAZ])
-
-
-def SetAllPackedExtensions(message):
-  """Sets every extension in the message to a unique value.
-
-  Args:
-    message: A unittest_pb2.TestPackedExtensions instance.
-  """
-  extensions = message.Extensions
-  pb2 = unittest_pb2
-
-  extensions[pb2.packed_int32_extension].extend([601, 701])
-  extensions[pb2.packed_int64_extension].extend([602, 702])
-  extensions[pb2.packed_uint32_extension].extend([603, 703])
-  extensions[pb2.packed_uint64_extension].extend([604, 704])
-  extensions[pb2.packed_sint32_extension].extend([605, 705])
-  extensions[pb2.packed_sint64_extension].extend([606, 706])
-  extensions[pb2.packed_fixed32_extension].extend([607, 707])
-  extensions[pb2.packed_fixed64_extension].extend([608, 708])
-  extensions[pb2.packed_sfixed32_extension].extend([609, 709])
-  extensions[pb2.packed_sfixed64_extension].extend([610, 710])
-  extensions[pb2.packed_float_extension].extend([611.0, 711.0])
-  extensions[pb2.packed_double_extension].extend([612.0, 712.0])
-  extensions[pb2.packed_bool_extension].extend([True, False])
-  extensions[pb2.packed_enum_extension].extend([unittest_pb2.FOREIGN_BAR,
-                                                unittest_pb2.FOREIGN_BAZ])
-
-
-def SetAllUnpackedFields(message):
-  """Sets every field in the message to a unique value.
-
-  Args:
-    message: A unittest_pb2.TestUnpackedTypes instance.
-  """
-  message.unpacked_int32.extend([601, 701])
-  message.unpacked_int64.extend([602, 702])
-  message.unpacked_uint32.extend([603, 703])
-  message.unpacked_uint64.extend([604, 704])
-  message.unpacked_sint32.extend([605, 705])
-  message.unpacked_sint64.extend([606, 706])
-  message.unpacked_fixed32.extend([607, 707])
-  message.unpacked_fixed64.extend([608, 708])
-  message.unpacked_sfixed32.extend([609, 709])
-  message.unpacked_sfixed64.extend([610, 710])
-  message.unpacked_float.extend([611.0, 711.0])
-  message.unpacked_double.extend([612.0, 712.0])
-  message.unpacked_bool.extend([True, False])
-  message.unpacked_enum.extend([unittest_pb2.FOREIGN_BAR,
-                                unittest_pb2.FOREIGN_BAZ])
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/type_checkers.py b/tools/swarming_client/third_party/google/protobuf/internal/type_checkers.py
deleted file mode 100644
index 1be3ad9..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/type_checkers.py
+++ /dev/null
@@ -1,352 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides type checking routines.
-
-This module defines type checking utilities in the forms of dictionaries:
-
-VALUE_CHECKERS: A dictionary of field types and a value validation object.
-TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
-  function.
-TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
-  function.
-FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
-  coresponding wire types.
-TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
-  function.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-import six
-
-if six.PY3:
-  long = int
-
-from google.protobuf.internal import api_implementation
-from google.protobuf.internal import decoder
-from google.protobuf.internal import encoder
-from google.protobuf.internal import wire_format
-from google.protobuf import descriptor
-
-_FieldDescriptor = descriptor.FieldDescriptor
-
-def SupportsOpenEnums(field_descriptor):
-  return field_descriptor.containing_type.syntax == "proto3"
-
-def GetTypeChecker(field):
-  """Returns a type checker for a message field of the specified types.
-
-  Args:
-    field: FieldDescriptor object for this field.
-
-  Returns:
-    An instance of TypeChecker which can be used to verify the types
-    of values assigned to a field of the specified type.
-  """
-  if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
-      field.type == _FieldDescriptor.TYPE_STRING):
-    return UnicodeValueChecker()
-  if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
-    if SupportsOpenEnums(field):
-      # When open enums are supported, any int32 can be assigned.
-      return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
-    else:
-      return EnumValueChecker(field.enum_type)
-  return _VALUE_CHECKERS[field.cpp_type]
-
-
-# None of the typecheckers below make any attempt to guard against people
-# subclassing builtin types and doing weird things.  We're not trying to
-# protect against malicious clients here, just people accidentally shooting
-# themselves in the foot in obvious ways.
-
-class TypeChecker(object):
-
-  """Type checker used to catch type errors as early as possible
-  when the client is setting scalar fields in protocol messages.
-  """
-
-  def __init__(self, *acceptable_types):
-    self._acceptable_types = acceptable_types
-
-  def CheckValue(self, proposed_value):
-    """Type check the provided value and return it.
-
-    The returned value might have been normalized to another type.
-    """
-    if not isinstance(proposed_value, self._acceptable_types):
-      message = ('%.1024r has type %s, but expected one of: %s' %
-                 (proposed_value, type(proposed_value), self._acceptable_types))
-      raise TypeError(message)
-    return proposed_value
-
-
-class TypeCheckerWithDefault(TypeChecker):
-
-  def __init__(self, default_value, *acceptable_types):
-    TypeChecker.__init__(self, acceptable_types)
-    self._default_value = default_value
-
-  def DefaultValue(self):
-    return self._default_value
-
-
-# IntValueChecker and its subclasses perform integer type-checks
-# and bounds-checks.
-class IntValueChecker(object):
-
-  """Checker used for integer fields.  Performs type-check and range check."""
-
-  def CheckValue(self, proposed_value):
-    if not isinstance(proposed_value, six.integer_types):
-      message = ('%.1024r has type %s, but expected one of: %s' %
-                 (proposed_value, type(proposed_value), six.integer_types))
-      raise TypeError(message)
-    if not self._MIN <= proposed_value <= self._MAX:
-      raise ValueError('Value out of range: %d' % proposed_value)
-    # We force 32-bit values to int and 64-bit values to long to make
-    # alternate implementations where the distinction is more significant
-    # (e.g. the C++ implementation) simpler.
-    proposed_value = self._TYPE(proposed_value)
-    return proposed_value
-
-  def DefaultValue(self):
-    return 0
-
-
-class EnumValueChecker(object):
-
-  """Checker used for enum fields.  Performs type-check and range check."""
-
-  def __init__(self, enum_type):
-    self._enum_type = enum_type
-
-  def CheckValue(self, proposed_value):
-    if not isinstance(proposed_value, six.integer_types):
-      message = ('%.1024r has type %s, but expected one of: %s' %
-                 (proposed_value, type(proposed_value), six.integer_types))
-      raise TypeError(message)
-    if proposed_value not in self._enum_type.values_by_number:
-      raise ValueError('Unknown enum value: %d' % proposed_value)
-    return proposed_value
-
-  def DefaultValue(self):
-    return self._enum_type.values[0].number
-
-
-class UnicodeValueChecker(object):
-
-  """Checker used for string fields.
-
-  Always returns a unicode value, even if the input is of type str.
-  """
-
-  def CheckValue(self, proposed_value):
-    if not isinstance(proposed_value, (bytes, six.text_type)):
-      message = ('%.1024r has type %s, but expected one of: %s' %
-                 (proposed_value, type(proposed_value), (bytes, six.text_type)))
-      raise TypeError(message)
-
-    # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
-    if isinstance(proposed_value, bytes):
-      try:
-        proposed_value = proposed_value.decode('utf-8')
-      except UnicodeDecodeError:
-        raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
-                         'encoding. Non-UTF-8 strings must be converted to '
-                         'unicode objects before being added.' %
-                         (proposed_value))
-    return proposed_value
-
-  def DefaultValue(self):
-    return u""
-
-
-class Int32ValueChecker(IntValueChecker):
-  # We're sure to use ints instead of longs here since comparison may be more
-  # efficient.
-  _MIN = -2147483648
-  _MAX = 2147483647
-  _TYPE = int
-
-
-class Uint32ValueChecker(IntValueChecker):
-  _MIN = 0
-  _MAX = (1 << 32) - 1
-  _TYPE = int
-
-
-class Int64ValueChecker(IntValueChecker):
-  _MIN = -(1 << 63)
-  _MAX = (1 << 63) - 1
-  _TYPE = long
-
-
-class Uint64ValueChecker(IntValueChecker):
-  _MIN = 0
-  _MAX = (1 << 64) - 1
-  _TYPE = long
-
-
-# Type-checkers for all scalar CPPTYPEs.
-_VALUE_CHECKERS = {
-    _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
-    _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
-    _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
-    _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
-    _FieldDescriptor.CPPTYPE_DOUBLE: TypeCheckerWithDefault(
-        0.0, float, int, long),
-    _FieldDescriptor.CPPTYPE_FLOAT: TypeCheckerWithDefault(
-        0.0, float, int, long),
-    _FieldDescriptor.CPPTYPE_BOOL: TypeCheckerWithDefault(
-        False, bool, int),
-    _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
-    }
-
-
-# Map from field type to a function F, such that F(field_num, value)
-# gives the total byte size for a value of the given type.  This
-# byte size includes tag information and any other additional space
-# associated with serializing "value".
-TYPE_TO_BYTE_SIZE_FN = {
-    _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
-    _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
-    _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
-    _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
-    _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
-    _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
-    _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
-    _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
-    _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
-    _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
-    _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
-    _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
-    _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
-    _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
-    _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
-    _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
-    _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
-    _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
-    }
-
-
-# Maps from field types to encoder constructors.
-TYPE_TO_ENCODER = {
-    _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
-    _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
-    _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
-    _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
-    _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
-    _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
-    _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
-    _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
-    _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
-    _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
-    _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
-    _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
-    _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
-    _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
-    _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
-    _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
-    _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
-    _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
-    }
-
-
-# Maps from field types to sizer constructors.
-TYPE_TO_SIZER = {
-    _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
-    _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
-    _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
-    _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
-    _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
-    _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
-    _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
-    _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
-    _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
-    _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
-    _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
-    _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
-    _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
-    _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
-    _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
-    _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
-    _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
-    _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
-    }
-
-
-# Maps from field type to a decoder constructor.
-TYPE_TO_DECODER = {
-    _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
-    _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
-    _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
-    _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
-    _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
-    _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
-    _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
-    _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
-    _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
-    _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
-    _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
-    _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
-    _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
-    _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
-    _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
-    _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
-    _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
-    _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
-    }
-
-# Maps from field type to expected wiretype.
-FIELD_TYPE_TO_WIRE_TYPE = {
-    _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
-    _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
-    _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
-    _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
-    _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
-    _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
-    _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
-    _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
-    _FieldDescriptor.TYPE_STRING:
-      wire_format.WIRETYPE_LENGTH_DELIMITED,
-    _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
-    _FieldDescriptor.TYPE_MESSAGE:
-      wire_format.WIRETYPE_LENGTH_DELIMITED,
-    _FieldDescriptor.TYPE_BYTES:
-      wire_format.WIRETYPE_LENGTH_DELIMITED,
-    _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
-    _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
-    _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
-    _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
-    _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
-    _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
-    }
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/well_known_types.py b/tools/swarming_client/third_party/google/protobuf/internal/well_known_types.py
deleted file mode 100644
index 7c5dffd..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/well_known_types.py
+++ /dev/null
@@ -1,724 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains well known classes.
-
-This files defines well known classes which need extra maintenance including:
-  - Any
-  - Duration
-  - FieldMask
-  - Struct
-  - Timestamp
-"""
-
-__author__ = 'jieluo@google.com (Jie Luo)'
-
-from datetime import datetime
-from datetime import timedelta
-import six
-
-from google.protobuf.descriptor import FieldDescriptor
-
-_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
-_NANOS_PER_SECOND = 1000000000
-_NANOS_PER_MILLISECOND = 1000000
-_NANOS_PER_MICROSECOND = 1000
-_MILLIS_PER_SECOND = 1000
-_MICROS_PER_SECOND = 1000000
-_SECONDS_PER_DAY = 24 * 3600
-
-
-class Error(Exception):
-  """Top-level module error."""
-
-
-class ParseError(Error):
-  """Thrown in case of parsing error."""
-
-
-class Any(object):
-  """Class for Any Message type."""
-
-  def Pack(self, msg, type_url_prefix='type.googleapis.com/'):
-    """Packs the specified message into current Any message."""
-    if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
-      self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
-    else:
-      self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
-    self.value = msg.SerializeToString()
-
-  def Unpack(self, msg):
-    """Unpacks the current Any message into specified message."""
-    descriptor = msg.DESCRIPTOR
-    if not self.Is(descriptor):
-      return False
-    msg.ParseFromString(self.value)
-    return True
-
-  def TypeName(self):
-    """Returns the protobuf type name of the inner message."""
-    # Only last part is to be used: b/25630112
-    return self.type_url.split('/')[-1]
-
-  def Is(self, descriptor):
-    """Checks if this Any represents the given protobuf type."""
-    return self.TypeName() == descriptor.full_name
-
-
-class Timestamp(object):
-  """Class for Timestamp message type."""
-
-  def ToJsonString(self):
-    """Converts Timestamp to RFC 3339 date string format.
-
-    Returns:
-      A string converted from timestamp. The string is always Z-normalized
-      and uses 3, 6 or 9 fractional digits as required to represent the
-      exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
-    """
-    nanos = self.nanos % _NANOS_PER_SECOND
-    total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
-    seconds = total_sec % _SECONDS_PER_DAY
-    days = (total_sec - seconds) // _SECONDS_PER_DAY
-    dt = datetime(1970, 1, 1) + timedelta(days, seconds)
-
-    result = dt.isoformat()
-    if (nanos % 1e9) == 0:
-      # If there are 0 fractional digits, the fractional
-      # point '.' should be omitted when serializing.
-      return result + 'Z'
-    if (nanos % 1e6) == 0:
-      # Serialize 3 fractional digits.
-      return result + '.%03dZ' % (nanos / 1e6)
-    if (nanos % 1e3) == 0:
-      # Serialize 6 fractional digits.
-      return result + '.%06dZ' % (nanos / 1e3)
-    # Serialize 9 fractional digits.
-    return result + '.%09dZ' % nanos
-
-  def FromJsonString(self, value):
-    """Parse a RFC 3339 date string format to Timestamp.
-
-    Args:
-      value: A date string. Any fractional digits (or none) and any offset are
-          accepted as long as they fit into nano-seconds precision.
-          Example of accepted format: '1972-01-01T10:00:20.021-05:00'
-
-    Raises:
-      ParseError: On parsing problems.
-    """
-    timezone_offset = value.find('Z')
-    if timezone_offset == -1:
-      timezone_offset = value.find('+')
-    if timezone_offset == -1:
-      timezone_offset = value.rfind('-')
-    if timezone_offset == -1:
-      raise ParseError(
-          'Failed to parse timestamp: missing valid timezone offset.')
-    time_value = value[0:timezone_offset]
-    # Parse datetime and nanos.
-    point_position = time_value.find('.')
-    if point_position == -1:
-      second_value = time_value
-      nano_value = ''
-    else:
-      second_value = time_value[:point_position]
-      nano_value = time_value[point_position + 1:]
-    date_object = datetime.strptime(second_value, _TIMESTAMPFOMAT)
-    td = date_object - datetime(1970, 1, 1)
-    seconds = td.seconds + td.days * _SECONDS_PER_DAY
-    if len(nano_value) > 9:
-      raise ParseError(
-          'Failed to parse Timestamp: nanos {0} more than '
-          '9 fractional digits.'.format(nano_value))
-    if nano_value:
-      nanos = round(float('0.' + nano_value) * 1e9)
-    else:
-      nanos = 0
-    # Parse timezone offsets.
-    if value[timezone_offset] == 'Z':
-      if len(value) != timezone_offset + 1:
-        raise ParseError('Failed to parse timestamp: invalid trailing'
-                         ' data {0}.'.format(value))
-    else:
-      timezone = value[timezone_offset:]
-      pos = timezone.find(':')
-      if pos == -1:
-        raise ParseError(
-            'Invalid timezone offset value: {0}.'.format(timezone))
-      if timezone[0] == '+':
-        seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
-      else:
-        seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
-    # Set seconds and nanos
-    self.seconds = int(seconds)
-    self.nanos = int(nanos)
-
-  def GetCurrentTime(self):
-    """Get the current UTC into Timestamp."""
-    self.FromDatetime(datetime.utcnow())
-
-  def ToNanoseconds(self):
-    """Converts Timestamp to nanoseconds since epoch."""
-    return self.seconds * _NANOS_PER_SECOND + self.nanos
-
-  def ToMicroseconds(self):
-    """Converts Timestamp to microseconds since epoch."""
-    return (self.seconds * _MICROS_PER_SECOND +
-            self.nanos // _NANOS_PER_MICROSECOND)
-
-  def ToMilliseconds(self):
-    """Converts Timestamp to milliseconds since epoch."""
-    return (self.seconds * _MILLIS_PER_SECOND +
-            self.nanos // _NANOS_PER_MILLISECOND)
-
-  def ToSeconds(self):
-    """Converts Timestamp to seconds since epoch."""
-    return self.seconds
-
-  def FromNanoseconds(self, nanos):
-    """Converts nanoseconds since epoch to Timestamp."""
-    self.seconds = nanos // _NANOS_PER_SECOND
-    self.nanos = nanos % _NANOS_PER_SECOND
-
-  def FromMicroseconds(self, micros):
-    """Converts microseconds since epoch to Timestamp."""
-    self.seconds = micros // _MICROS_PER_SECOND
-    self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
-
-  def FromMilliseconds(self, millis):
-    """Converts milliseconds since epoch to Timestamp."""
-    self.seconds = millis // _MILLIS_PER_SECOND
-    self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
-
-  def FromSeconds(self, seconds):
-    """Converts seconds since epoch to Timestamp."""
-    self.seconds = seconds
-    self.nanos = 0
-
-  def ToDatetime(self):
-    """Converts Timestamp to datetime."""
-    return datetime.utcfromtimestamp(
-        self.seconds + self.nanos / float(_NANOS_PER_SECOND))
-
-  def FromDatetime(self, dt):
-    """Converts datetime to Timestamp."""
-    td = dt - datetime(1970, 1, 1)
-    self.seconds = td.seconds + td.days * _SECONDS_PER_DAY
-    self.nanos = td.microseconds * _NANOS_PER_MICROSECOND
-
-
-class Duration(object):
-  """Class for Duration message type."""
-
-  def ToJsonString(self):
-    """Converts Duration to string format.
-
-    Returns:
-      A string converted from self. The string format will contains
-      3, 6, or 9 fractional digits depending on the precision required to
-      represent the exact Duration value. For example: "1s", "1.010s",
-      "1.000000100s", "-3.100s"
-    """
-    if self.seconds < 0 or self.nanos < 0:
-      result = '-'
-      seconds = - self.seconds + int((0 - self.nanos) // 1e9)
-      nanos = (0 - self.nanos) % 1e9
-    else:
-      result = ''
-      seconds = self.seconds + int(self.nanos // 1e9)
-      nanos = self.nanos % 1e9
-    result += '%d' % seconds
-    if (nanos % 1e9) == 0:
-      # If there are 0 fractional digits, the fractional
-      # point '.' should be omitted when serializing.
-      return result + 's'
-    if (nanos % 1e6) == 0:
-      # Serialize 3 fractional digits.
-      return result + '.%03ds' % (nanos / 1e6)
-    if (nanos % 1e3) == 0:
-      # Serialize 6 fractional digits.
-      return result + '.%06ds' % (nanos / 1e3)
-    # Serialize 9 fractional digits.
-    return result + '.%09ds' % nanos
-
-  def FromJsonString(self, value):
-    """Converts a string to Duration.
-
-    Args:
-      value: A string to be converted. The string must end with 's'. Any
-          fractional digits (or none) are accepted as long as they fit into
-          precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
-
-    Raises:
-      ParseError: On parsing problems.
-    """
-    if len(value) < 1 or value[-1] != 's':
-      raise ParseError(
-          'Duration must end with letter "s": {0}.'.format(value))
-    try:
-      pos = value.find('.')
-      if pos == -1:
-        self.seconds = int(value[:-1])
-        self.nanos = 0
-      else:
-        self.seconds = int(value[:pos])
-        if value[0] == '-':
-          self.nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
-        else:
-          self.nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
-    except ValueError:
-      raise ParseError(
-          'Couldn\'t parse duration: {0}.'.format(value))
-
-  def ToNanoseconds(self):
-    """Converts a Duration to nanoseconds."""
-    return self.seconds * _NANOS_PER_SECOND + self.nanos
-
-  def ToMicroseconds(self):
-    """Converts a Duration to microseconds."""
-    micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
-    return self.seconds * _MICROS_PER_SECOND + micros
-
-  def ToMilliseconds(self):
-    """Converts a Duration to milliseconds."""
-    millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
-    return self.seconds * _MILLIS_PER_SECOND + millis
-
-  def ToSeconds(self):
-    """Converts a Duration to seconds."""
-    return self.seconds
-
-  def FromNanoseconds(self, nanos):
-    """Converts nanoseconds to Duration."""
-    self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
-                            nanos % _NANOS_PER_SECOND)
-
-  def FromMicroseconds(self, micros):
-    """Converts microseconds to Duration."""
-    self._NormalizeDuration(
-        micros // _MICROS_PER_SECOND,
-        (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
-
-  def FromMilliseconds(self, millis):
-    """Converts milliseconds to Duration."""
-    self._NormalizeDuration(
-        millis // _MILLIS_PER_SECOND,
-        (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
-
-  def FromSeconds(self, seconds):
-    """Converts seconds to Duration."""
-    self.seconds = seconds
-    self.nanos = 0
-
-  def ToTimedelta(self):
-    """Converts Duration to timedelta."""
-    return timedelta(
-        seconds=self.seconds, microseconds=_RoundTowardZero(
-            self.nanos, _NANOS_PER_MICROSECOND))
-
-  def FromTimedelta(self, td):
-    """Convertd timedelta to Duration."""
-    self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
-                            td.microseconds * _NANOS_PER_MICROSECOND)
-
-  def _NormalizeDuration(self, seconds, nanos):
-    """Set Duration by seconds and nonas."""
-    # Force nanos to be negative if the duration is negative.
-    if seconds < 0 and nanos > 0:
-      seconds += 1
-      nanos -= _NANOS_PER_SECOND
-    self.seconds = seconds
-    self.nanos = nanos
-
-
-def _RoundTowardZero(value, divider):
-  """Truncates the remainder part after division."""
-  # For some languanges, the sign of the remainder is implementation
-  # dependent if any of the operands is negative. Here we enforce
-  # "rounded toward zero" semantics. For example, for (-5) / 2 an
-  # implementation may give -3 as the result with the remainder being
-  # 1. This function ensures we always return -2 (closer to zero).
-  result = value // divider
-  remainder = value % divider
-  if result < 0 and remainder > 0:
-    return result + 1
-  else:
-    return result
-
-
-class FieldMask(object):
-  """Class for FieldMask message type."""
-
-  def ToJsonString(self):
-    """Converts FieldMask to string according to proto3 JSON spec."""
-    return ','.join(self.paths)
-
-  def FromJsonString(self, value):
-    """Converts string to FieldMask according to proto3 JSON spec."""
-    self.Clear()
-    for path in value.split(','):
-      self.paths.append(path)
-
-  def IsValidForDescriptor(self, message_descriptor):
-    """Checks whether the FieldMask is valid for Message Descriptor."""
-    for path in self.paths:
-      if not _IsValidPath(message_descriptor, path):
-        return False
-    return True
-
-  def AllFieldsFromDescriptor(self, message_descriptor):
-    """Gets all direct fields of Message Descriptor to FieldMask."""
-    self.Clear()
-    for field in message_descriptor.fields:
-      self.paths.append(field.name)
-
-  def CanonicalFormFromMask(self, mask):
-    """Converts a FieldMask to the canonical form.
-
-    Removes paths that are covered by another path. For example,
-    "foo.bar" is covered by "foo" and will be removed if "foo"
-    is also in the FieldMask. Then sorts all paths in alphabetical order.
-
-    Args:
-      mask: The original FieldMask to be converted.
-    """
-    tree = _FieldMaskTree(mask)
-    tree.ToFieldMask(self)
-
-  def Union(self, mask1, mask2):
-    """Merges mask1 and mask2 into this FieldMask."""
-    _CheckFieldMaskMessage(mask1)
-    _CheckFieldMaskMessage(mask2)
-    tree = _FieldMaskTree(mask1)
-    tree.MergeFromFieldMask(mask2)
-    tree.ToFieldMask(self)
-
-  def Intersect(self, mask1, mask2):
-    """Intersects mask1 and mask2 into this FieldMask."""
-    _CheckFieldMaskMessage(mask1)
-    _CheckFieldMaskMessage(mask2)
-    tree = _FieldMaskTree(mask1)
-    intersection = _FieldMaskTree()
-    for path in mask2.paths:
-      tree.IntersectPath(path, intersection)
-    intersection.ToFieldMask(self)
-
-  def MergeMessage(
-      self, source, destination,
-      replace_message_field=False, replace_repeated_field=False):
-    """Merges fields specified in FieldMask from source to destination.
-
-    Args:
-      source: Source message.
-      destination: The destination message to be merged into.
-      replace_message_field: Replace message field if True. Merge message
-          field if False.
-      replace_repeated_field: Replace repeated field if True. Append
-          elements of repeated field if False.
-    """
-    tree = _FieldMaskTree(self)
-    tree.MergeMessage(
-        source, destination, replace_message_field, replace_repeated_field)
-
-
-def _IsValidPath(message_descriptor, path):
-  """Checks whether the path is valid for Message Descriptor."""
-  parts = path.split('.')
-  last = parts.pop()
-  for name in parts:
-    field = message_descriptor.fields_by_name[name]
-    if (field is None or
-        field.label == FieldDescriptor.LABEL_REPEATED or
-        field.type != FieldDescriptor.TYPE_MESSAGE):
-      return False
-    message_descriptor = field.message_type
-  return last in message_descriptor.fields_by_name
-
-
-def _CheckFieldMaskMessage(message):
-  """Raises ValueError if message is not a FieldMask."""
-  message_descriptor = message.DESCRIPTOR
-  if (message_descriptor.name != 'FieldMask' or
-      message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
-    raise ValueError('Message {0} is not a FieldMask.'.format(
-        message_descriptor.full_name))
-
-
-class _FieldMaskTree(object):
-  """Represents a FieldMask in a tree structure.
-
-  For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
-  the FieldMaskTree will be:
-      [_root] -+- foo -+- bar
-            |       |
-            |       +- baz
-            |
-            +- bar --- baz
-  In the tree, each leaf node represents a field path.
-  """
-
-  def __init__(self, field_mask=None):
-    """Initializes the tree by FieldMask."""
-    self._root = {}
-    if field_mask:
-      self.MergeFromFieldMask(field_mask)
-
-  def MergeFromFieldMask(self, field_mask):
-    """Merges a FieldMask to the tree."""
-    for path in field_mask.paths:
-      self.AddPath(path)
-
-  def AddPath(self, path):
-    """Adds a field path into the tree.
-
-    If the field path to add is a sub-path of an existing field path
-    in the tree (i.e., a leaf node), it means the tree already matches
-    the given path so nothing will be added to the tree. If the path
-    matches an existing non-leaf node in the tree, that non-leaf node
-    will be turned into a leaf node with all its children removed because
-    the path matches all the node's children. Otherwise, a new path will
-    be added.
-
-    Args:
-      path: The field path to add.
-    """
-    node = self._root
-    for name in path.split('.'):
-      if name not in node:
-        node[name] = {}
-      elif not node[name]:
-        # Pre-existing empty node implies we already have this entire tree.
-        return
-      node = node[name]
-    # Remove any sub-trees we might have had.
-    node.clear()
-
-  def ToFieldMask(self, field_mask):
-    """Converts the tree to a FieldMask."""
-    field_mask.Clear()
-    _AddFieldPaths(self._root, '', field_mask)
-
-  def IntersectPath(self, path, intersection):
-    """Calculates the intersection part of a field path with this tree.
-
-    Args:
-      path: The field path to calculates.
-      intersection: The out tree to record the intersection part.
-    """
-    node = self._root
-    for name in path.split('.'):
-      if name not in node:
-        return
-      elif not node[name]:
-        intersection.AddPath(path)
-        return
-      node = node[name]
-    intersection.AddLeafNodes(path, node)
-
-  def AddLeafNodes(self, prefix, node):
-    """Adds leaf nodes begin with prefix to this tree."""
-    if not node:
-      self.AddPath(prefix)
-    for name in node:
-      child_path = prefix + '.' + name
-      self.AddLeafNodes(child_path, node[name])
-
-  def MergeMessage(
-      self, source, destination,
-      replace_message, replace_repeated):
-    """Merge all fields specified by this tree from source to destination."""
-    _MergeMessage(
-        self._root, source, destination, replace_message, replace_repeated)
-
-
-def _StrConvert(value):
-  """Converts value to str if it is not."""
-  # This file is imported by c extension and some methods like ClearField
-  # requires string for the field name. py2/py3 has different text
-  # type and may use unicode.
-  if not isinstance(value, str):
-    return value.encode('utf-8')
-  return value
-
-
-def _MergeMessage(
-    node, source, destination, replace_message, replace_repeated):
-  """Merge all fields specified by a sub-tree from source to destination."""
-  source_descriptor = source.DESCRIPTOR
-  for name in node:
-    child = node[name]
-    field = source_descriptor.fields_by_name[name]
-    if field is None:
-      raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
-          name, source_descriptor.full_name))
-    if child:
-      # Sub-paths are only allowed for singular message fields.
-      if (field.label == FieldDescriptor.LABEL_REPEATED or
-          field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
-        raise ValueError('Error: Field {0} in message {1} is not a singular '
-                         'message field and cannot have sub-fields.'.format(
-                             name, source_descriptor.full_name))
-      _MergeMessage(
-          child, getattr(source, name), getattr(destination, name),
-          replace_message, replace_repeated)
-      continue
-    if field.label == FieldDescriptor.LABEL_REPEATED:
-      if replace_repeated:
-        destination.ClearField(_StrConvert(name))
-      repeated_source = getattr(source, name)
-      repeated_destination = getattr(destination, name)
-      if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
-        for item in repeated_source:
-          repeated_destination.add().MergeFrom(item)
-      else:
-        repeated_destination.extend(repeated_source)
-    else:
-      if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
-        if replace_message:
-          destination.ClearField(_StrConvert(name))
-        if source.HasField(name):
-          getattr(destination, name).MergeFrom(getattr(source, name))
-      else:
-        setattr(destination, name, getattr(source, name))
-
-
-def _AddFieldPaths(node, prefix, field_mask):
-  """Adds the field paths descended from node to field_mask."""
-  if not node:
-    field_mask.paths.append(prefix)
-    return
-  for name in sorted(node):
-    if prefix:
-      child_path = prefix + '.' + name
-    else:
-      child_path = name
-    _AddFieldPaths(node[name], child_path, field_mask)
-
-
-_INT_OR_FLOAT = six.integer_types + (float,)
-
-
-def _SetStructValue(struct_value, value):
-  if value is None:
-    struct_value.null_value = 0
-  elif isinstance(value, bool):
-    # Note: this check must come before the number check because in Python
-    # True and False are also considered numbers.
-    struct_value.bool_value = value
-  elif isinstance(value, six.string_types):
-    struct_value.string_value = value
-  elif isinstance(value, _INT_OR_FLOAT):
-    struct_value.number_value = value
-  else:
-    raise ValueError('Unexpected type')
-
-
-def _GetStructValue(struct_value):
-  which = struct_value.WhichOneof('kind')
-  if which == 'struct_value':
-    return struct_value.struct_value
-  elif which == 'null_value':
-    return None
-  elif which == 'number_value':
-    return struct_value.number_value
-  elif which == 'string_value':
-    return struct_value.string_value
-  elif which == 'bool_value':
-    return struct_value.bool_value
-  elif which == 'list_value':
-    return struct_value.list_value
-  elif which is None:
-    raise ValueError('Value not set')
-
-
-class Struct(object):
-  """Class for Struct message type."""
-
-  __slots__ = []
-
-  def __getitem__(self, key):
-    return _GetStructValue(self.fields[key])
-
-  def __setitem__(self, key, value):
-    _SetStructValue(self.fields[key], value)
-
-  def get_or_create_list(self, key):
-    """Returns a list for this key, creating if it didn't exist already."""
-    return self.fields[key].list_value
-
-  def get_or_create_struct(self, key):
-    """Returns a struct for this key, creating if it didn't exist already."""
-    return self.fields[key].struct_value
-
-  # TODO(haberman): allow constructing/merging from dict.
-
-
-class ListValue(object):
-  """Class for ListValue message type."""
-
-  def __len__(self):
-    return len(self.values)
-
-  def append(self, value):
-    _SetStructValue(self.values.add(), value)
-
-  def extend(self, elem_seq):
-    for value in elem_seq:
-      self.append(value)
-
-  def __getitem__(self, index):
-    """Retrieves item by the specified index."""
-    return _GetStructValue(self.values.__getitem__(index))
-
-  def __setitem__(self, index, value):
-    _SetStructValue(self.values.__getitem__(index), value)
-
-  def items(self):
-    for i in range(len(self)):
-      yield self[i]
-
-  def add_struct(self):
-    """Appends and returns a struct value as the next value in the list."""
-    return self.values.add().struct_value
-
-  def add_list(self):
-    """Appends and returns a list value as the next value in the list."""
-    return self.values.add().list_value
-
-
-WKTBASES = {
-    'google.protobuf.Any': Any,
-    'google.protobuf.Duration': Duration,
-    'google.protobuf.FieldMask': FieldMask,
-    'google.protobuf.ListValue': ListValue,
-    'google.protobuf.Struct': Struct,
-    'google.protobuf.Timestamp': Timestamp,
-}
diff --git a/tools/swarming_client/third_party/google/protobuf/internal/wire_format.py b/tools/swarming_client/third_party/google/protobuf/internal/wire_format.py
deleted file mode 100644
index 883f525..0000000
--- a/tools/swarming_client/third_party/google/protobuf/internal/wire_format.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Constants and static functions to support protocol buffer wire format."""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-import struct
-from google.protobuf import descriptor
-from google.protobuf import message
-
-
-TAG_TYPE_BITS = 3  # Number of bits used to hold type info in a proto tag.
-TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1  # 0x7
-
-# These numbers identify the wire type of a protocol buffer value.
-# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
-# tag-and-type to store one of these WIRETYPE_* constants.
-# These values must match WireType enum in google/protobuf/wire_format.h.
-WIRETYPE_VARINT = 0
-WIRETYPE_FIXED64 = 1
-WIRETYPE_LENGTH_DELIMITED = 2
-WIRETYPE_START_GROUP = 3
-WIRETYPE_END_GROUP = 4
-WIRETYPE_FIXED32 = 5
-_WIRETYPE_MAX = 5
-
-
-# Bounds for various integer types.
-INT32_MAX = int((1 << 31) - 1)
-INT32_MIN = int(-(1 << 31))
-UINT32_MAX = (1 << 32) - 1
-
-INT64_MAX = (1 << 63) - 1
-INT64_MIN = -(1 << 63)
-UINT64_MAX = (1 << 64) - 1
-
-# "struct" format strings that will encode/decode the specified formats.
-FORMAT_UINT32_LITTLE_ENDIAN = '<I'
-FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
-FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
-FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
-
-
-# We'll have to provide alternate implementations of AppendLittleEndian*() on
-# any architectures where these checks fail.
-if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
-  raise AssertionError('Format "I" is not a 32-bit number.')
-if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
-  raise AssertionError('Format "Q" is not a 64-bit number.')
-
-
-def PackTag(field_number, wire_type):
-  """Returns an unsigned 32-bit integer that encodes the field number and
-  wire type information in standard protocol message wire format.
-
-  Args:
-    field_number: Expected to be an integer in the range [1, 1 << 29)
-    wire_type: One of the WIRETYPE_* constants.
-  """
-  if not 0 <= wire_type <= _WIRETYPE_MAX:
-    raise message.EncodeError('Unknown wire type: %d' % wire_type)
-  return (field_number << TAG_TYPE_BITS) | wire_type
-
-
-def UnpackTag(tag):
-  """The inverse of PackTag().  Given an unsigned 32-bit number,
-  returns a (field_number, wire_type) tuple.
-  """
-  return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
-
-
-def ZigZagEncode(value):
-  """ZigZag Transform:  Encodes signed integers so that they can be
-  effectively used with varint encoding.  See wire_format.h for
-  more details.
-  """
-  if value >= 0:
-    return value << 1
-  return (value << 1) ^ (~0)
-
-
-def ZigZagDecode(value):
-  """Inverse of ZigZagEncode()."""
-  if not value & 0x1:
-    return value >> 1
-  return (value >> 1) ^ (~0)
-
-
-
-# The *ByteSize() functions below return the number of bytes required to
-# serialize "field number + type" information and then serialize the value.
-
-
-def Int32ByteSize(field_number, int32):
-  return Int64ByteSize(field_number, int32)
-
-
-def Int32ByteSizeNoTag(int32):
-  return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
-
-
-def Int64ByteSize(field_number, int64):
-  # Have to convert to uint before calling UInt64ByteSize().
-  return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
-
-
-def UInt32ByteSize(field_number, uint32):
-  return UInt64ByteSize(field_number, uint32)
-
-
-def UInt64ByteSize(field_number, uint64):
-  return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
-
-
-def SInt32ByteSize(field_number, int32):
-  return UInt32ByteSize(field_number, ZigZagEncode(int32))
-
-
-def SInt64ByteSize(field_number, int64):
-  return UInt64ByteSize(field_number, ZigZagEncode(int64))
-
-
-def Fixed32ByteSize(field_number, fixed32):
-  return TagByteSize(field_number) + 4
-
-
-def Fixed64ByteSize(field_number, fixed64):
-  return TagByteSize(field_number) + 8
-
-
-def SFixed32ByteSize(field_number, sfixed32):
-  return TagByteSize(field_number) + 4
-
-
-def SFixed64ByteSize(field_number, sfixed64):
-  return TagByteSize(field_number) + 8
-
-
-def FloatByteSize(field_number, flt):
-  return TagByteSize(field_number) + 4
-
-
-def DoubleByteSize(field_number, double):
-  return TagByteSize(field_number) + 8
-
-
-def BoolByteSize(field_number, b):
-  return TagByteSize(field_number) + 1
-
-
-def EnumByteSize(field_number, enum):
-  return UInt32ByteSize(field_number, enum)
-
-
-def StringByteSize(field_number, string):
-  return BytesByteSize(field_number, string.encode('utf-8'))
-
-
-def BytesByteSize(field_number, b):
-  return (TagByteSize(field_number)
-          + _VarUInt64ByteSizeNoTag(len(b))
-          + len(b))
-
-
-def GroupByteSize(field_number, message):
-  return (2 * TagByteSize(field_number)  # START and END group.
-          + message.ByteSize())
-
-
-def MessageByteSize(field_number, message):
-  return (TagByteSize(field_number)
-          + _VarUInt64ByteSizeNoTag(message.ByteSize())
-          + message.ByteSize())
-
-
-def MessageSetItemByteSize(field_number, msg):
-  # First compute the sizes of the tags.
-  # There are 2 tags for the beginning and ending of the repeated group, that
-  # is field number 1, one with field number 2 (type_id) and one with field
-  # number 3 (message).
-  total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
-
-  # Add the number of bytes for type_id.
-  total_size += _VarUInt64ByteSizeNoTag(field_number)
-
-  message_size = msg.ByteSize()
-
-  # The number of bytes for encoding the length of the message.
-  total_size += _VarUInt64ByteSizeNoTag(message_size)
-
-  # The size of the message.
-  total_size += message_size
-  return total_size
-
-
-def TagByteSize(field_number):
-  """Returns the bytes required to serialize a tag with this field number."""
-  # Just pass in type 0, since the type won't affect the tag+type size.
-  return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
-
-
-# Private helper function for the *ByteSize() functions above.
-
-def _VarUInt64ByteSizeNoTag(uint64):
-  """Returns the number of bytes required to serialize a single varint
-  using boundary value comparisons. (unrolled loop optimization -WPierce)
-  uint64 must be unsigned.
-  """
-  if uint64 <= 0x7f: return 1
-  if uint64 <= 0x3fff: return 2
-  if uint64 <= 0x1fffff: return 3
-  if uint64 <= 0xfffffff: return 4
-  if uint64 <= 0x7ffffffff: return 5
-  if uint64 <= 0x3ffffffffff: return 6
-  if uint64 <= 0x1ffffffffffff: return 7
-  if uint64 <= 0xffffffffffffff: return 8
-  if uint64 <= 0x7fffffffffffffff: return 9
-  if uint64 > UINT64_MAX:
-    raise message.EncodeError('Value out of range: %d' % uint64)
-  return 10
-
-
-NON_PACKABLE_TYPES = (
-  descriptor.FieldDescriptor.TYPE_STRING,
-  descriptor.FieldDescriptor.TYPE_GROUP,
-  descriptor.FieldDescriptor.TYPE_MESSAGE,
-  descriptor.FieldDescriptor.TYPE_BYTES
-)
-
-
-def IsTypePackable(field_type):
-  """Return true iff packable = true is valid for fields of this type.
-
-  Args:
-    field_type: a FieldDescriptor::Type value.
-
-  Returns:
-    True iff fields of this type are packable.
-  """
-  return field_type not in NON_PACKABLE_TYPES
diff --git a/tools/swarming_client/third_party/google/protobuf/json_format.py b/tools/swarming_client/third_party/google/protobuf/json_format.py
deleted file mode 100644
index bb6a199..0000000
--- a/tools/swarming_client/third_party/google/protobuf/json_format.py
+++ /dev/null
@@ -1,664 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains routines for printing protocol messages in JSON format.
-
-Simple usage example:
-
-  # Create a proto object and serialize it to a json format string.
-  message = my_proto_pb2.MyMessage(foo='bar')
-  json_string = json_format.MessageToJson(message)
-
-  # Parse a json format string to proto object.
-  message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
-"""
-
-__author__ = 'jieluo@google.com (Jie Luo)'
-
-try:
-    from collections import OrderedDict
-except ImportError:
-    from ordereddict import OrderedDict  #PY26
-import base64
-import json
-import math
-import re
-import six
-import sys
-
-from operator import methodcaller
-from google.protobuf import descriptor
-from google.protobuf import symbol_database
-
-_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
-_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
-                        descriptor.FieldDescriptor.CPPTYPE_UINT32,
-                        descriptor.FieldDescriptor.CPPTYPE_INT64,
-                        descriptor.FieldDescriptor.CPPTYPE_UINT64])
-_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64,
-                          descriptor.FieldDescriptor.CPPTYPE_UINT64])
-_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
-                          descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
-_INFINITY = 'Infinity'
-_NEG_INFINITY = '-Infinity'
-_NAN = 'NaN'
-
-_UNPAIRED_SURROGATE_PATTERN = re.compile(six.u(
-    r'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]'
-))
-
-class Error(Exception):
-  """Top-level module error for json_format."""
-
-
-class SerializeToJsonError(Error):
-  """Thrown if serialization to JSON fails."""
-
-
-class ParseError(Error):
-  """Thrown in case of parsing error."""
-
-
-def MessageToJson(message, including_default_value_fields=False):
-  """Converts protobuf message to JSON format.
-
-  Args:
-    message: The protocol buffers message instance to serialize.
-    including_default_value_fields: If True, singular primitive fields,
-        repeated fields, and map fields will always be serialized.  If
-        False, only serialize non-empty fields.  Singular message fields
-        and oneof fields are not affected by this option.
-
-  Returns:
-    A string containing the JSON formatted protocol buffer message.
-  """
-  printer = _Printer(including_default_value_fields)
-  return printer.ToJsonString(message)
-
-
-def _IsMapEntry(field):
-  return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
-          field.message_type.has_options and
-          field.message_type.GetOptions().map_entry)
-
-
-class _Printer(object):
-  """JSON format printer for protocol message."""
-
-  def __init__(self,
-               including_default_value_fields=False):
-    self.including_default_value_fields = including_default_value_fields
-
-  def ToJsonString(self, message):
-    js = self._MessageToJsonObject(message)
-    return json.dumps(js, indent=2)
-
-  def _MessageToJsonObject(self, message):
-    """Converts message to an object according to Proto3 JSON Specification."""
-    message_descriptor = message.DESCRIPTOR
-    full_name = message_descriptor.full_name
-    if _IsWrapperMessage(message_descriptor):
-      return self._WrapperMessageToJsonObject(message)
-    if full_name in _WKTJSONMETHODS:
-      return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self)
-    js = {}
-    return self._RegularMessageToJsonObject(message, js)
-
-  def _RegularMessageToJsonObject(self, message, js):
-    """Converts normal message according to Proto3 JSON Specification."""
-    fields = message.ListFields()
-
-    try:
-      for field, value in fields:
-        name = field.camelcase_name
-        if _IsMapEntry(field):
-          # Convert a map field.
-          v_field = field.message_type.fields_by_name['value']
-          js_map = {}
-          for key in value:
-            if isinstance(key, bool):
-              if key:
-                recorded_key = 'true'
-              else:
-                recorded_key = 'false'
-            else:
-              recorded_key = key
-            js_map[recorded_key] = self._FieldToJsonObject(
-                v_field, value[key])
-          js[name] = js_map
-        elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-          # Convert a repeated field.
-          js[name] = [self._FieldToJsonObject(field, k)
-                      for k in value]
-        else:
-          js[name] = self._FieldToJsonObject(field, value)
-
-      # Serialize default value if including_default_value_fields is True.
-      if self.including_default_value_fields:
-        message_descriptor = message.DESCRIPTOR
-        for field in message_descriptor.fields:
-          # Singular message fields and oneof fields will not be affected.
-          if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and
-               field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or
-              field.containing_oneof):
-            continue
-          name = field.camelcase_name
-          if name in js:
-            # Skip the field which has been serailized already.
-            continue
-          if _IsMapEntry(field):
-            js[name] = {}
-          elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-            js[name] = []
-          else:
-            js[name] = self._FieldToJsonObject(field, field.default_value)
-
-    except ValueError as e:
-      raise SerializeToJsonError(
-          'Failed to serialize {0} field: {1}.'.format(field.name, e))
-
-    return js
-
-  def _FieldToJsonObject(self, field, value):
-    """Converts field value according to Proto3 JSON Specification."""
-    if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-      return self._MessageToJsonObject(value)
-    elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
-      enum_value = field.enum_type.values_by_number.get(value, None)
-      if enum_value is not None:
-        return enum_value.name
-      else:
-        raise SerializeToJsonError('Enum field contains an integer value '
-                                   'which can not mapped to an enum value.')
-    elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
-      if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
-        # Use base64 Data encoding for bytes
-        return base64.b64encode(value).decode('utf-8')
-      else:
-        return value
-    elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
-      return bool(value)
-    elif field.cpp_type in _INT64_TYPES:
-      return str(value)
-    elif field.cpp_type in _FLOAT_TYPES:
-      if math.isinf(value):
-        if value < 0.0:
-          return _NEG_INFINITY
-        else:
-          return _INFINITY
-      if math.isnan(value):
-        return _NAN
-    return value
-
-  def _AnyMessageToJsonObject(self, message):
-    """Converts Any message according to Proto3 JSON Specification."""
-    if not message.ListFields():
-      return {}
-    # Must print @type first, use OrderedDict instead of {}
-    js = OrderedDict()
-    type_url = message.type_url
-    js['@type'] = type_url
-    sub_message = _CreateMessageFromTypeUrl(type_url)
-    sub_message.ParseFromString(message.value)
-    message_descriptor = sub_message.DESCRIPTOR
-    full_name = message_descriptor.full_name
-    if _IsWrapperMessage(message_descriptor):
-      js['value'] = self._WrapperMessageToJsonObject(sub_message)
-      return js
-    if full_name in _WKTJSONMETHODS:
-      js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0],
-                                 sub_message)(self)
-      return js
-    return self._RegularMessageToJsonObject(sub_message, js)
-
-  def _GenericMessageToJsonObject(self, message):
-    """Converts message according to Proto3 JSON Specification."""
-    # Duration, Timestamp and FieldMask have ToJsonString method to do the
-    # convert. Users can also call the method directly.
-    return message.ToJsonString()
-
-  def _ValueMessageToJsonObject(self, message):
-    """Converts Value message according to Proto3 JSON Specification."""
-    which = message.WhichOneof('kind')
-    # If the Value message is not set treat as null_value when serialize
-    # to JSON. The parse back result will be different from original message.
-    if which is None or which == 'null_value':
-      return None
-    if which == 'list_value':
-      return self._ListValueMessageToJsonObject(message.list_value)
-    if which == 'struct_value':
-      value = message.struct_value
-    else:
-      value = getattr(message, which)
-    oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
-    return self._FieldToJsonObject(oneof_descriptor, value)
-
-  def _ListValueMessageToJsonObject(self, message):
-    """Converts ListValue message according to Proto3 JSON Specification."""
-    return [self._ValueMessageToJsonObject(value)
-            for value in message.values]
-
-  def _StructMessageToJsonObject(self, message):
-    """Converts Struct message according to Proto3 JSON Specification."""
-    fields = message.fields
-    ret = {}
-    for key in fields:
-      ret[key] = self._ValueMessageToJsonObject(fields[key])
-    return ret
-
-  def _WrapperMessageToJsonObject(self, message):
-    return self._FieldToJsonObject(
-        message.DESCRIPTOR.fields_by_name['value'], message.value)
-
-
-def _IsWrapperMessage(message_descriptor):
-  return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
-
-
-def _DuplicateChecker(js):
-  result = {}
-  for name, value in js:
-    if name in result:
-      raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
-    result[name] = value
-  return result
-
-
-def _CreateMessageFromTypeUrl(type_url):
-  # TODO(jieluo): Should add a way that users can register the type resolver
-  # instead of the default one.
-  db = symbol_database.Default()
-  type_name = type_url.split('/')[-1]
-  try:
-    message_descriptor = db.pool.FindMessageTypeByName(type_name)
-  except KeyError:
-    raise TypeError(
-        'Can not find message descriptor by type_url: {0}.'.format(type_url))
-  message_class = db.GetPrototype(message_descriptor)
-  return message_class()
-
-
-def Parse(text, message, ignore_unknown_fields=False):
-  """Parses a JSON representation of a protocol message into a message.
-
-  Args:
-    text: Message JSON representation.
-    message: A protocol beffer message to merge into.
-    ignore_unknown_fields: If True, do not raise errors for unknown fields.
-
-  Returns:
-    The same message passed as argument.
-
-  Raises::
-    ParseError: On JSON parsing problems.
-  """
-  if not isinstance(text, six.text_type): text = text.decode('utf-8')
-  try:
-    if sys.version_info < (2, 7):
-      # object_pair_hook is not supported before python2.7
-      js = json.loads(text)
-    else:
-      js = json.loads(text, object_pairs_hook=_DuplicateChecker)
-  except ValueError as e:
-    raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
-  parser = _Parser(ignore_unknown_fields)
-  parser.ConvertMessage(js, message)
-  return message
-
-
-_INT_OR_FLOAT = six.integer_types + (float,)
-
-
-class _Parser(object):
-  """JSON format parser for protocol message."""
-
-  def __init__(self,
-               ignore_unknown_fields):
-    self.ignore_unknown_fields = ignore_unknown_fields
-
-  def ConvertMessage(self, value, message):
-    """Convert a JSON object into a message.
-
-    Args:
-      value: A JSON object.
-      message: A WKT or regular protocol message to record the data.
-
-    Raises:
-      ParseError: In case of convert problems.
-    """
-    message_descriptor = message.DESCRIPTOR
-    full_name = message_descriptor.full_name
-    if _IsWrapperMessage(message_descriptor):
-      self._ConvertWrapperMessage(value, message)
-    elif full_name in _WKTJSONMETHODS:
-      methodcaller(_WKTJSONMETHODS[full_name][1], value, message)(self)
-    else:
-      self._ConvertFieldValuePair(value, message)
-
-  def _ConvertFieldValuePair(self, js, message):
-    """Convert field value pairs into regular message.
-
-    Args:
-      js: A JSON object to convert the field value pairs.
-      message: A regular protocol message to record the data.
-
-    Raises:
-      ParseError: In case of problems converting.
-    """
-    names = []
-    message_descriptor = message.DESCRIPTOR
-    for name in js:
-      try:
-        field = message_descriptor.fields_by_camelcase_name.get(name, None)
-        if not field:
-          if self.ignore_unknown_fields:
-            continue
-          raise ParseError(
-              'Message type "{0}" has no field named "{1}".'.format(
-                  message_descriptor.full_name, name))
-        if name in names:
-          raise ParseError('Message type "{0}" should not have multiple '
-                           '"{1}" fields.'.format(
-                               message.DESCRIPTOR.full_name, name))
-        names.append(name)
-        # Check no other oneof field is parsed.
-        if field.containing_oneof is not None:
-          oneof_name = field.containing_oneof.name
-          if oneof_name in names:
-            raise ParseError('Message type "{0}" should not have multiple '
-                             '"{1}" oneof fields.'.format(
-                                 message.DESCRIPTOR.full_name, oneof_name))
-          names.append(oneof_name)
-
-        value = js[name]
-        if value is None:
-          message.ClearField(field.name)
-          continue
-
-        # Parse field value.
-        if _IsMapEntry(field):
-          message.ClearField(field.name)
-          self._ConvertMapFieldValue(value, message, field)
-        elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-          message.ClearField(field.name)
-          if not isinstance(value, list):
-            raise ParseError('repeated field {0} must be in [] which is '
-                             '{1}.'.format(name, value))
-          if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-            # Repeated message field.
-            for item in value:
-              sub_message = getattr(message, field.name).add()
-              # None is a null_value in Value.
-              if (item is None and
-                  sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
-                raise ParseError('null is not allowed to be used as an element'
-                                 ' in a repeated field.')
-              self.ConvertMessage(item, sub_message)
-          else:
-            # Repeated scalar field.
-            for item in value:
-              if item is None:
-                raise ParseError('null is not allowed to be used as an element'
-                                 ' in a repeated field.')
-              getattr(message, field.name).append(
-                  _ConvertScalarFieldValue(item, field))
-        elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-          sub_message = getattr(message, field.name)
-          self.ConvertMessage(value, sub_message)
-        else:
-          setattr(message, field.name, _ConvertScalarFieldValue(value, field))
-      except ParseError as e:
-        if field and field.containing_oneof is None:
-          raise ParseError('Failed to parse {0} field: {1}'.format(name, e))
-        else:
-          raise ParseError(str(e))
-      except ValueError as e:
-        raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
-      except TypeError as e:
-        raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
-
-  def _ConvertAnyMessage(self, value, message):
-    """Convert a JSON representation into Any message."""
-    if isinstance(value, dict) and not value:
-      return
-    try:
-      type_url = value['@type']
-    except KeyError:
-      raise ParseError('@type is missing when parsing any message.')
-
-    sub_message = _CreateMessageFromTypeUrl(type_url)
-    message_descriptor = sub_message.DESCRIPTOR
-    full_name = message_descriptor.full_name
-    if _IsWrapperMessage(message_descriptor):
-      self._ConvertWrapperMessage(value['value'], sub_message)
-    elif full_name in _WKTJSONMETHODS:
-      methodcaller(
-          _WKTJSONMETHODS[full_name][1], value['value'], sub_message)(self)
-    else:
-      del value['@type']
-      self._ConvertFieldValuePair(value, sub_message)
-    # Sets Any message
-    message.value = sub_message.SerializeToString()
-    message.type_url = type_url
-
-  def _ConvertGenericMessage(self, value, message):
-    """Convert a JSON representation into message with FromJsonString."""
-    # Durantion, Timestamp, FieldMask have FromJsonString method to do the
-    # convert. Users can also call the method directly.
-    message.FromJsonString(value)
-
-  def _ConvertValueMessage(self, value, message):
-    """Convert a JSON representation into Value message."""
-    if isinstance(value, dict):
-      self._ConvertStructMessage(value, message.struct_value)
-    elif isinstance(value, list):
-      self. _ConvertListValueMessage(value, message.list_value)
-    elif value is None:
-      message.null_value = 0
-    elif isinstance(value, bool):
-      message.bool_value = value
-    elif isinstance(value, six.string_types):
-      message.string_value = value
-    elif isinstance(value, _INT_OR_FLOAT):
-      message.number_value = value
-    else:
-      raise ParseError('Unexpected type for Value message.')
-
-  def _ConvertListValueMessage(self, value, message):
-    """Convert a JSON representation into ListValue message."""
-    if not isinstance(value, list):
-      raise ParseError(
-          'ListValue must be in [] which is {0}.'.format(value))
-    message.ClearField('values')
-    for item in value:
-      self._ConvertValueMessage(item, message.values.add())
-
-  def _ConvertStructMessage(self, value, message):
-    """Convert a JSON representation into Struct message."""
-    if not isinstance(value, dict):
-      raise ParseError(
-          'Struct must be in a dict which is {0}.'.format(value))
-    for key in value:
-      self._ConvertValueMessage(value[key], message.fields[key])
-    return
-
-  def _ConvertWrapperMessage(self, value, message):
-    """Convert a JSON representation into Wrapper message."""
-    field = message.DESCRIPTOR.fields_by_name['value']
-    setattr(message, 'value', _ConvertScalarFieldValue(value, field))
-
-  def _ConvertMapFieldValue(self, value, message, field):
-    """Convert map field value for a message map field.
-
-    Args:
-      value: A JSON object to convert the map field value.
-      message: A protocol message to record the converted data.
-      field: The descriptor of the map field to be converted.
-
-    Raises:
-      ParseError: In case of convert problems.
-    """
-    if not isinstance(value, dict):
-      raise ParseError(
-          'Map field {0} must be in a dict which is {1}.'.format(
-              field.name, value))
-    key_field = field.message_type.fields_by_name['key']
-    value_field = field.message_type.fields_by_name['value']
-    for key in value:
-      key_value = _ConvertScalarFieldValue(key, key_field, True)
-      if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-        self.ConvertMessage(value[key], getattr(
-            message, field.name)[key_value])
-      else:
-        getattr(message, field.name)[key_value] = _ConvertScalarFieldValue(
-            value[key], value_field)
-
-
-def _ConvertScalarFieldValue(value, field, require_str=False):
-  """Convert a single scalar field value.
-
-  Args:
-    value: A scalar value to convert the scalar field value.
-    field: The descriptor of the field to convert.
-    require_str: If True, the field value must be a str.
-
-  Returns:
-    The converted scalar field value
-
-  Raises:
-    ParseError: In case of convert problems.
-  """
-  if field.cpp_type in _INT_TYPES:
-    return _ConvertInteger(value)
-  elif field.cpp_type in _FLOAT_TYPES:
-    return _ConvertFloat(value)
-  elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
-    return _ConvertBool(value, require_str)
-  elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
-    if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
-      return base64.b64decode(value)
-    else:
-      # Checking for unpaired surrogates appears to be unreliable,
-      # depending on the specific Python version, so we check manually.
-      if _UNPAIRED_SURROGATE_PATTERN.search(value):
-        raise ParseError('Unpaired surrogate')
-      return value
-  elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
-    # Convert an enum value.
-    enum_value = field.enum_type.values_by_name.get(value, None)
-    if enum_value is None:
-      raise ParseError(
-          'Enum value must be a string literal with double quotes. '
-          'Type "{0}" has no value named {1}.'.format(
-              field.enum_type.full_name, value))
-    return enum_value.number
-
-
-def _ConvertInteger(value):
-  """Convert an integer.
-
-  Args:
-    value: A scalar value to convert.
-
-  Returns:
-    The integer value.
-
-  Raises:
-    ParseError: If an integer couldn't be consumed.
-  """
-  if isinstance(value, float):
-    raise ParseError('Couldn\'t parse integer: {0}.'.format(value))
-
-  if isinstance(value, six.text_type) and value.find(' ') != -1:
-    raise ParseError('Couldn\'t parse integer: "{0}".'.format(value))
-
-  return int(value)
-
-
-def _ConvertFloat(value):
-  """Convert an floating point number."""
-  if value == 'nan':
-    raise ParseError('Couldn\'t parse float "nan", use "NaN" instead.')
-  try:
-    # Assume Python compatible syntax.
-    return float(value)
-  except ValueError:
-    # Check alternative spellings.
-    if value == _NEG_INFINITY:
-      return float('-inf')
-    elif value == _INFINITY:
-      return float('inf')
-    elif value == _NAN:
-      return float('nan')
-    else:
-      raise ParseError('Couldn\'t parse float: {0}.'.format(value))
-
-
-def _ConvertBool(value, require_str):
-  """Convert a boolean value.
-
-  Args:
-    value: A scalar value to convert.
-    require_str: If True, value must be a str.
-
-  Returns:
-    The bool parsed.
-
-  Raises:
-    ParseError: If a boolean value couldn't be consumed.
-  """
-  if require_str:
-    if value == 'true':
-      return True
-    elif value == 'false':
-      return False
-    else:
-      raise ParseError('Expected "true" or "false", not {0}.'.format(value))
-
-  if not isinstance(value, bool):
-    raise ParseError('Expected true or false without quotes.')
-  return value
-
-_WKTJSONMETHODS = {
-    'google.protobuf.Any': ['_AnyMessageToJsonObject',
-                            '_ConvertAnyMessage'],
-    'google.protobuf.Duration': ['_GenericMessageToJsonObject',
-                                 '_ConvertGenericMessage'],
-    'google.protobuf.FieldMask': ['_GenericMessageToJsonObject',
-                                  '_ConvertGenericMessage'],
-    'google.protobuf.ListValue': ['_ListValueMessageToJsonObject',
-                                  '_ConvertListValueMessage'],
-    'google.protobuf.Struct': ['_StructMessageToJsonObject',
-                               '_ConvertStructMessage'],
-    'google.protobuf.Timestamp': ['_GenericMessageToJsonObject',
-                                  '_ConvertGenericMessage'],
-    'google.protobuf.Value': ['_ValueMessageToJsonObject',
-                              '_ConvertValueMessage']
-}
diff --git a/tools/swarming_client/third_party/google/protobuf/map_unittest_pb2.py b/tools/swarming_client/third_party/google/protobuf/map_unittest_pb2.py
deleted file mode 100644
index a9384f7..0000000
--- a/tools/swarming_client/third_party/google/protobuf/map_unittest_pb2.py
+++ /dev/null
@@ -1,2801 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/map_unittest.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2
-from google.protobuf import unittest_no_arena_pb2 as google_dot_protobuf_dot_unittest__no__arena__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/map_unittest.proto',
-  package='protobuf_unittest',
-  syntax='proto3',
-  serialized_pb=_b('\n\"google/protobuf/map_unittest.proto\x12\x11protobuf_unittest\x1a\x1egoogle/protobuf/unittest.proto\x1a\'google/protobuf/unittest_no_arena.proto\"\xd6\x13\n\x07TestMap\x12\x46\n\x0fmap_int32_int32\x18\x01 \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32Int32Entry\x12\x46\n\x0fmap_int64_int64\x18\x02 \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt64Int64Entry\x12J\n\x11map_uint32_uint32\x18\x03 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapUint32Uint32Entry\x12J\n\x11map_uint64_uint64\x18\x04 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapUint64Uint64Entry\x12J\n\x11map_sint32_sint32\x18\x05 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapSint32Sint32Entry\x12J\n\x11map_sint64_sint64\x18\x06 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapSint64Sint64Entry\x12N\n\x13map_fixed32_fixed32\x18\x07 \x03(\x0b\x32\x31.protobuf_unittest.TestMap.MapFixed32Fixed32Entry\x12N\n\x13map_fixed64_fixed64\x18\x08 \x03(\x0b\x32\x31.protobuf_unittest.TestMap.MapFixed64Fixed64Entry\x12R\n\x15map_sfixed32_sfixed32\x18\t \x03(\x0b\x32\x33.protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry\x12R\n\x15map_sfixed64_sfixed64\x18\n \x03(\x0b\x32\x33.protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry\x12\x46\n\x0fmap_int32_float\x18\x0b \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32FloatEntry\x12H\n\x10map_int32_double\x18\x0c \x03(\x0b\x32..protobuf_unittest.TestMap.MapInt32DoubleEntry\x12\x42\n\rmap_bool_bool\x18\r \x03(\x0b\x32+.protobuf_unittest.TestMap.MapBoolBoolEntry\x12J\n\x11map_string_string\x18\x0e \x03(\x0b\x32/.protobuf_unittest.TestMap.MapStringStringEntry\x12\x46\n\x0fmap_int32_bytes\x18\x0f \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32BytesEntry\x12\x44\n\x0emap_int32_enum\x18\x10 \x03(\x0b\x32,.protobuf_unittest.TestMap.MapInt32EnumEntry\x12Y\n\x19map_int32_foreign_message\x18\x11 \x03(\x0b\x32\x36.protobuf_unittest.TestMap.MapInt32ForeignMessageEntry\x12[\n\x1amap_string_foreign_message\x18\x12 \x03(\x0b\x32\x37.protobuf_unittest.TestMap.MapStringForeignMessageEntry\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12MapInt32BytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1aO\n\x11MapInt32EnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12)\n\x05value\x18\x02 \x01(\x0e\x32\x1a.protobuf_unittest.MapEnum:\x02\x38\x01\x1a`\n\x1bMapInt32ForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\x1a\x61\n\x1cMapStringForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\"A\n\x11TestMapSubmessage\x12,\n\x08test_map\x18\x01 \x01(\x0b\x32\x1a.protobuf_unittest.TestMap\"\xbc\x01\n\x0eTestMessageMap\x12Q\n\x11map_int32_message\x18\x01 \x03(\x0b\x32\x36.protobuf_unittest.TestMessageMap.MapInt32MessageEntry\x1aW\n\x14MapInt32MessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes:\x02\x38\x01\"\xe3\x01\n\x0fTestSameTypeMap\x12:\n\x04map1\x18\x01 \x03(\x0b\x32,.protobuf_unittest.TestSameTypeMap.Map1Entry\x12:\n\x04map2\x18\x02 \x03(\x0b\x32,.protobuf_unittest.TestSameTypeMap.Map2Entry\x1a+\n\tMap1Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a+\n\tMap2Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\xb6\x01\n\x16TestRequiredMessageMap\x12J\n\tmap_field\x18\x01 \x03(\x0b\x32\x37.protobuf_unittest.TestRequiredMessageMap.MapFieldEntry\x1aP\n\rMapFieldEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired:\x02\x38\x01\"\xd2\x14\n\x0cTestArenaMap\x12K\n\x0fmap_int32_int32\x18\x01 \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32Int32Entry\x12K\n\x0fmap_int64_int64\x18\x02 \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt64Int64Entry\x12O\n\x11map_uint32_uint32\x18\x03 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapUint32Uint32Entry\x12O\n\x11map_uint64_uint64\x18\x04 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapUint64Uint64Entry\x12O\n\x11map_sint32_sint32\x18\x05 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapSint32Sint32Entry\x12O\n\x11map_sint64_sint64\x18\x06 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapSint64Sint64Entry\x12S\n\x13map_fixed32_fixed32\x18\x07 \x03(\x0b\x32\x36.protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry\x12S\n\x13map_fixed64_fixed64\x18\x08 \x03(\x0b\x32\x36.protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry\x12W\n\x15map_sfixed32_sfixed32\x18\t \x03(\x0b\x32\x38.protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry\x12W\n\x15map_sfixed64_sfixed64\x18\n \x03(\x0b\x32\x38.protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry\x12K\n\x0fmap_int32_float\x18\x0b \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32FloatEntry\x12M\n\x10map_int32_double\x18\x0c \x03(\x0b\x32\x33.protobuf_unittest.TestArenaMap.MapInt32DoubleEntry\x12G\n\rmap_bool_bool\x18\r \x03(\x0b\x32\x30.protobuf_unittest.TestArenaMap.MapBoolBoolEntry\x12O\n\x11map_string_string\x18\x0e \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapStringStringEntry\x12K\n\x0fmap_int32_bytes\x18\x0f \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32BytesEntry\x12I\n\x0emap_int32_enum\x18\x10 \x03(\x0b\x32\x31.protobuf_unittest.TestArenaMap.MapInt32EnumEntry\x12^\n\x19map_int32_foreign_message\x18\x11 \x03(\x0b\x32;.protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry\x12n\n\"map_int32_foreign_message_no_arena\x18\x12 \x03(\x0b\x32\x42.protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12MapInt32BytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1aO\n\x11MapInt32EnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12)\n\x05value\x18\x02 \x01(\x0e\x32\x1a.protobuf_unittest.MapEnum:\x02\x38\x01\x1a`\n\x1bMapInt32ForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\x1ap\n\"MapInt32ForeignMessageNoArenaEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.protobuf_unittest_no_arena.ForeignMessage:\x02\x38\x01\"\xe4\x01\n\x1fMessageContainingEnumCalledType\x12J\n\x04type\x18\x01 \x03(\x0b\x32<.protobuf_unittest.MessageContainingEnumCalledType.TypeEntry\x1a_\n\tTypeEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.protobuf_unittest.MessageContainingEnumCalledType:\x02\x38\x01\"\x14\n\x04Type\x12\x0c\n\x08TYPE_FOO\x10\x00\"\x9d\x01\n\x1fMessageContainingMapCalledEntry\x12L\n\x05\x65ntry\x18\x01 \x03(\x0b\x32=.protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry\x1a,\n\nEntryEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\xad\x01\n\x17TestRecursiveMapMessage\x12<\n\x01\x61\x18\x01 \x03(\x0b\x32\x31.protobuf_unittest.TestRecursiveMapMessage.AEntry\x1aT\n\x06\x41\x45ntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.protobuf_unittest.TestRecursiveMapMessage:\x02\x38\x01*?\n\x07MapEnum\x12\x10\n\x0cMAP_ENUM_FOO\x10\x00\x12\x10\n\x0cMAP_ENUM_BAR\x10\x01\x12\x10\n\x0cMAP_ENUM_BAZ\x10\x02\x42\x03\xf8\x01\x01\x62\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_unittest__pb2.DESCRIPTOR,google_dot_protobuf_dot_unittest__no__arena__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_MAPENUM = _descriptor.EnumDescriptor(
-  name='MapEnum',
-  full_name='protobuf_unittest.MapEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='MAP_ENUM_FOO', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MAP_ENUM_BAR', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MAP_ENUM_BAZ', index=2, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=6536,
-  serialized_end=6599,
-)
-_sym_db.RegisterEnumDescriptor(_MAPENUM)
-
-MapEnum = enum_type_wrapper.EnumTypeWrapper(_MAPENUM)
-MAP_ENUM_FOO = 0
-MAP_ENUM_BAR = 1
-MAP_ENUM_BAZ = 2
-
-
-_MESSAGECONTAININGENUMCALLEDTYPE_TYPE = _descriptor.EnumDescriptor(
-  name='Type',
-  full_name='protobuf_unittest.MessageContainingEnumCalledType.Type',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_FOO', index=0, number=0,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=6178,
-  serialized_end=6198,
-)
-_sym_db.RegisterEnumDescriptor(_MESSAGECONTAININGENUMCALLEDTYPE_TYPE)
-
-
-_TESTMAP_MAPINT32INT32ENTRY = _descriptor.Descriptor(
-  name='MapInt32Int32Entry',
-  full_name='protobuf_unittest.TestMap.MapInt32Int32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapInt32Int32Entry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapInt32Int32Entry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1534,
-  serialized_end=1586,
-)
-
-_TESTMAP_MAPINT64INT64ENTRY = _descriptor.Descriptor(
-  name='MapInt64Int64Entry',
-  full_name='protobuf_unittest.TestMap.MapInt64Int64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapInt64Int64Entry.key', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapInt64Int64Entry.value', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1588,
-  serialized_end=1640,
-)
-
-_TESTMAP_MAPUINT32UINT32ENTRY = _descriptor.Descriptor(
-  name='MapUint32Uint32Entry',
-  full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry.key', index=0,
-      number=1, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry.value', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1642,
-  serialized_end=1696,
-)
-
-_TESTMAP_MAPUINT64UINT64ENTRY = _descriptor.Descriptor(
-  name='MapUint64Uint64Entry',
-  full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry.key', index=0,
-      number=1, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry.value', index=1,
-      number=2, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1698,
-  serialized_end=1752,
-)
-
-_TESTMAP_MAPSINT32SINT32ENTRY = _descriptor.Descriptor(
-  name='MapSint32Sint32Entry',
-  full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry.key', index=0,
-      number=1, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry.value', index=1,
-      number=2, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1754,
-  serialized_end=1808,
-)
-
-_TESTMAP_MAPSINT64SINT64ENTRY = _descriptor.Descriptor(
-  name='MapSint64Sint64Entry',
-  full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry.key', index=0,
-      number=1, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry.value', index=1,
-      number=2, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1810,
-  serialized_end=1864,
-)
-
-_TESTMAP_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor(
-  name='MapFixed32Fixed32Entry',
-  full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry.key', index=0,
-      number=1, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry.value', index=1,
-      number=2, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1866,
-  serialized_end=1922,
-)
-
-_TESTMAP_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor(
-  name='MapFixed64Fixed64Entry',
-  full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry.key', index=0,
-      number=1, type=6, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry.value', index=1,
-      number=2, type=6, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1924,
-  serialized_end=1980,
-)
-
-_TESTMAP_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor(
-  name='MapSfixed32Sfixed32Entry',
-  full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry.key', index=0,
-      number=1, type=15, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry.value', index=1,
-      number=2, type=15, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1982,
-  serialized_end=2040,
-)
-
-_TESTMAP_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor(
-  name='MapSfixed64Sfixed64Entry',
-  full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry.key', index=0,
-      number=1, type=16, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry.value', index=1,
-      number=2, type=16, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2042,
-  serialized_end=2100,
-)
-
-_TESTMAP_MAPINT32FLOATENTRY = _descriptor.Descriptor(
-  name='MapInt32FloatEntry',
-  full_name='protobuf_unittest.TestMap.MapInt32FloatEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapInt32FloatEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapInt32FloatEntry.value', index=1,
-      number=2, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2102,
-  serialized_end=2154,
-)
-
-_TESTMAP_MAPINT32DOUBLEENTRY = _descriptor.Descriptor(
-  name='MapInt32DoubleEntry',
-  full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry.value', index=1,
-      number=2, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2156,
-  serialized_end=2209,
-)
-
-_TESTMAP_MAPBOOLBOOLENTRY = _descriptor.Descriptor(
-  name='MapBoolBoolEntry',
-  full_name='protobuf_unittest.TestMap.MapBoolBoolEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapBoolBoolEntry.key', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapBoolBoolEntry.value', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2211,
-  serialized_end=2261,
-)
-
-_TESTMAP_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor(
-  name='MapStringStringEntry',
-  full_name='protobuf_unittest.TestMap.MapStringStringEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapStringStringEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapStringStringEntry.value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2263,
-  serialized_end=2317,
-)
-
-_TESTMAP_MAPINT32BYTESENTRY = _descriptor.Descriptor(
-  name='MapInt32BytesEntry',
-  full_name='protobuf_unittest.TestMap.MapInt32BytesEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapInt32BytesEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapInt32BytesEntry.value', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2319,
-  serialized_end=2371,
-)
-
-_TESTMAP_MAPINT32ENUMENTRY = _descriptor.Descriptor(
-  name='MapInt32EnumEntry',
-  full_name='protobuf_unittest.TestMap.MapInt32EnumEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapInt32EnumEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapInt32EnumEntry.value', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2373,
-  serialized_end=2452,
-)
-
-_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY = _descriptor.Descriptor(
-  name='MapInt32ForeignMessageEntry',
-  full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2454,
-  serialized_end=2550,
-)
-
-_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY = _descriptor.Descriptor(
-  name='MapStringForeignMessageEntry',
-  full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2552,
-  serialized_end=2649,
-)
-
-_TESTMAP = _descriptor.Descriptor(
-  name='TestMap',
-  full_name='protobuf_unittest.TestMap',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='map_int32_int32', full_name='protobuf_unittest.TestMap.map_int32_int32', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int64_int64', full_name='protobuf_unittest.TestMap.map_int64_int64', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_uint32_uint32', full_name='protobuf_unittest.TestMap.map_uint32_uint32', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_uint64_uint64', full_name='protobuf_unittest.TestMap.map_uint64_uint64', index=3,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sint32_sint32', full_name='protobuf_unittest.TestMap.map_sint32_sint32', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sint64_sint64', full_name='protobuf_unittest.TestMap.map_sint64_sint64', index=5,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_fixed32_fixed32', full_name='protobuf_unittest.TestMap.map_fixed32_fixed32', index=6,
-      number=7, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_fixed64_fixed64', full_name='protobuf_unittest.TestMap.map_fixed64_fixed64', index=7,
-      number=8, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sfixed32_sfixed32', full_name='protobuf_unittest.TestMap.map_sfixed32_sfixed32', index=8,
-      number=9, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sfixed64_sfixed64', full_name='protobuf_unittest.TestMap.map_sfixed64_sfixed64', index=9,
-      number=10, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_float', full_name='protobuf_unittest.TestMap.map_int32_float', index=10,
-      number=11, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_double', full_name='protobuf_unittest.TestMap.map_int32_double', index=11,
-      number=12, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_bool_bool', full_name='protobuf_unittest.TestMap.map_bool_bool', index=12,
-      number=13, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_string_string', full_name='protobuf_unittest.TestMap.map_string_string', index=13,
-      number=14, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_bytes', full_name='protobuf_unittest.TestMap.map_int32_bytes', index=14,
-      number=15, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_enum', full_name='protobuf_unittest.TestMap.map_int32_enum', index=15,
-      number=16, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_foreign_message', full_name='protobuf_unittest.TestMap.map_int32_foreign_message', index=16,
-      number=17, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_string_foreign_message', full_name='protobuf_unittest.TestMap.map_string_foreign_message', index=17,
-      number=18, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTMAP_MAPINT32INT32ENTRY, _TESTMAP_MAPINT64INT64ENTRY, _TESTMAP_MAPUINT32UINT32ENTRY, _TESTMAP_MAPUINT64UINT64ENTRY, _TESTMAP_MAPSINT32SINT32ENTRY, _TESTMAP_MAPSINT64SINT64ENTRY, _TESTMAP_MAPFIXED32FIXED32ENTRY, _TESTMAP_MAPFIXED64FIXED64ENTRY, _TESTMAP_MAPSFIXED32SFIXED32ENTRY, _TESTMAP_MAPSFIXED64SFIXED64ENTRY, _TESTMAP_MAPINT32FLOATENTRY, _TESTMAP_MAPINT32DOUBLEENTRY, _TESTMAP_MAPBOOLBOOLENTRY, _TESTMAP_MAPSTRINGSTRINGENTRY, _TESTMAP_MAPINT32BYTESENTRY, _TESTMAP_MAPINT32ENUMENTRY, _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY, _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=131,
-  serialized_end=2649,
-)
-
-
-_TESTMAPSUBMESSAGE = _descriptor.Descriptor(
-  name='TestMapSubmessage',
-  full_name='protobuf_unittest.TestMapSubmessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='test_map', full_name='protobuf_unittest.TestMapSubmessage.test_map', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2651,
-  serialized_end=2716,
-)
-
-
-_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY = _descriptor.Descriptor(
-  name='MapInt32MessageEntry',
-  full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2820,
-  serialized_end=2907,
-)
-
-_TESTMESSAGEMAP = _descriptor.Descriptor(
-  name='TestMessageMap',
-  full_name='protobuf_unittest.TestMessageMap',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='map_int32_message', full_name='protobuf_unittest.TestMessageMap.map_int32_message', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2719,
-  serialized_end=2907,
-)
-
-
-_TESTSAMETYPEMAP_MAP1ENTRY = _descriptor.Descriptor(
-  name='Map1Entry',
-  full_name='protobuf_unittest.TestSameTypeMap.Map1Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestSameTypeMap.Map1Entry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestSameTypeMap.Map1Entry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3049,
-  serialized_end=3092,
-)
-
-_TESTSAMETYPEMAP_MAP2ENTRY = _descriptor.Descriptor(
-  name='Map2Entry',
-  full_name='protobuf_unittest.TestSameTypeMap.Map2Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestSameTypeMap.Map2Entry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestSameTypeMap.Map2Entry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3094,
-  serialized_end=3137,
-)
-
-_TESTSAMETYPEMAP = _descriptor.Descriptor(
-  name='TestSameTypeMap',
-  full_name='protobuf_unittest.TestSameTypeMap',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='map1', full_name='protobuf_unittest.TestSameTypeMap.map1', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map2', full_name='protobuf_unittest.TestSameTypeMap.map2', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTSAMETYPEMAP_MAP1ENTRY, _TESTSAMETYPEMAP_MAP2ENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2910,
-  serialized_end=3137,
-)
-
-
-_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY = _descriptor.Descriptor(
-  name='MapFieldEntry',
-  full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3242,
-  serialized_end=3322,
-)
-
-_TESTREQUIREDMESSAGEMAP = _descriptor.Descriptor(
-  name='TestRequiredMessageMap',
-  full_name='protobuf_unittest.TestRequiredMessageMap',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='map_field', full_name='protobuf_unittest.TestRequiredMessageMap.map_field', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3140,
-  serialized_end=3322,
-)
-
-
-_TESTARENAMAP_MAPINT32INT32ENTRY = _descriptor.Descriptor(
-  name='MapInt32Int32Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1534,
-  serialized_end=1586,
-)
-
-_TESTARENAMAP_MAPINT64INT64ENTRY = _descriptor.Descriptor(
-  name='MapInt64Int64Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry.key', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry.value', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1588,
-  serialized_end=1640,
-)
-
-_TESTARENAMAP_MAPUINT32UINT32ENTRY = _descriptor.Descriptor(
-  name='MapUint32Uint32Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry.key', index=0,
-      number=1, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry.value', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1642,
-  serialized_end=1696,
-)
-
-_TESTARENAMAP_MAPUINT64UINT64ENTRY = _descriptor.Descriptor(
-  name='MapUint64Uint64Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry.key', index=0,
-      number=1, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry.value', index=1,
-      number=2, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1698,
-  serialized_end=1752,
-)
-
-_TESTARENAMAP_MAPSINT32SINT32ENTRY = _descriptor.Descriptor(
-  name='MapSint32Sint32Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry.key', index=0,
-      number=1, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry.value', index=1,
-      number=2, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1754,
-  serialized_end=1808,
-)
-
-_TESTARENAMAP_MAPSINT64SINT64ENTRY = _descriptor.Descriptor(
-  name='MapSint64Sint64Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry.key', index=0,
-      number=1, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry.value', index=1,
-      number=2, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1810,
-  serialized_end=1864,
-)
-
-_TESTARENAMAP_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor(
-  name='MapFixed32Fixed32Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry.key', index=0,
-      number=1, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry.value', index=1,
-      number=2, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1866,
-  serialized_end=1922,
-)
-
-_TESTARENAMAP_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor(
-  name='MapFixed64Fixed64Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry.key', index=0,
-      number=1, type=6, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry.value', index=1,
-      number=2, type=6, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1924,
-  serialized_end=1980,
-)
-
-_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor(
-  name='MapSfixed32Sfixed32Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry.key', index=0,
-      number=1, type=15, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry.value', index=1,
-      number=2, type=15, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1982,
-  serialized_end=2040,
-)
-
-_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor(
-  name='MapSfixed64Sfixed64Entry',
-  full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry.key', index=0,
-      number=1, type=16, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry.value', index=1,
-      number=2, type=16, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2042,
-  serialized_end=2100,
-)
-
-_TESTARENAMAP_MAPINT32FLOATENTRY = _descriptor.Descriptor(
-  name='MapInt32FloatEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry.value', index=1,
-      number=2, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2102,
-  serialized_end=2154,
-)
-
-_TESTARENAMAP_MAPINT32DOUBLEENTRY = _descriptor.Descriptor(
-  name='MapInt32DoubleEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry.value', index=1,
-      number=2, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2156,
-  serialized_end=2209,
-)
-
-_TESTARENAMAP_MAPBOOLBOOLENTRY = _descriptor.Descriptor(
-  name='MapBoolBoolEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry.key', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry.value', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2211,
-  serialized_end=2261,
-)
-
-_TESTARENAMAP_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor(
-  name='MapStringStringEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry.value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2263,
-  serialized_end=2317,
-)
-
-_TESTARENAMAP_MAPINT32BYTESENTRY = _descriptor.Descriptor(
-  name='MapInt32BytesEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry.value', index=1,
-      number=2, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2319,
-  serialized_end=2371,
-)
-
-_TESTARENAMAP_MAPINT32ENUMENTRY = _descriptor.Descriptor(
-  name='MapInt32EnumEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry.value', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2373,
-  serialized_end=2452,
-)
-
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY = _descriptor.Descriptor(
-  name='MapInt32ForeignMessageEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2454,
-  serialized_end=2550,
-)
-
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY = _descriptor.Descriptor(
-  name='MapInt32ForeignMessageNoArenaEntry',
-  full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5855,
-  serialized_end=5967,
-)
-
-_TESTARENAMAP = _descriptor.Descriptor(
-  name='TestArenaMap',
-  full_name='protobuf_unittest.TestArenaMap',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='map_int32_int32', full_name='protobuf_unittest.TestArenaMap.map_int32_int32', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int64_int64', full_name='protobuf_unittest.TestArenaMap.map_int64_int64', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_uint32_uint32', full_name='protobuf_unittest.TestArenaMap.map_uint32_uint32', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_uint64_uint64', full_name='protobuf_unittest.TestArenaMap.map_uint64_uint64', index=3,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sint32_sint32', full_name='protobuf_unittest.TestArenaMap.map_sint32_sint32', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sint64_sint64', full_name='protobuf_unittest.TestArenaMap.map_sint64_sint64', index=5,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_fixed32_fixed32', full_name='protobuf_unittest.TestArenaMap.map_fixed32_fixed32', index=6,
-      number=7, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_fixed64_fixed64', full_name='protobuf_unittest.TestArenaMap.map_fixed64_fixed64', index=7,
-      number=8, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sfixed32_sfixed32', full_name='protobuf_unittest.TestArenaMap.map_sfixed32_sfixed32', index=8,
-      number=9, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_sfixed64_sfixed64', full_name='protobuf_unittest.TestArenaMap.map_sfixed64_sfixed64', index=9,
-      number=10, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_float', full_name='protobuf_unittest.TestArenaMap.map_int32_float', index=10,
-      number=11, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_double', full_name='protobuf_unittest.TestArenaMap.map_int32_double', index=11,
-      number=12, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_bool_bool', full_name='protobuf_unittest.TestArenaMap.map_bool_bool', index=12,
-      number=13, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_string_string', full_name='protobuf_unittest.TestArenaMap.map_string_string', index=13,
-      number=14, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_bytes', full_name='protobuf_unittest.TestArenaMap.map_int32_bytes', index=14,
-      number=15, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_enum', full_name='protobuf_unittest.TestArenaMap.map_int32_enum', index=15,
-      number=16, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_foreign_message', full_name='protobuf_unittest.TestArenaMap.map_int32_foreign_message', index=16,
-      number=17, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_int32_foreign_message_no_arena', full_name='protobuf_unittest.TestArenaMap.map_int32_foreign_message_no_arena', index=17,
-      number=18, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTARENAMAP_MAPINT32INT32ENTRY, _TESTARENAMAP_MAPINT64INT64ENTRY, _TESTARENAMAP_MAPUINT32UINT32ENTRY, _TESTARENAMAP_MAPUINT64UINT64ENTRY, _TESTARENAMAP_MAPSINT32SINT32ENTRY, _TESTARENAMAP_MAPSINT64SINT64ENTRY, _TESTARENAMAP_MAPFIXED32FIXED32ENTRY, _TESTARENAMAP_MAPFIXED64FIXED64ENTRY, _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY, _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY, _TESTARENAMAP_MAPINT32FLOATENTRY, _TESTARENAMAP_MAPINT32DOUBLEENTRY, _TESTARENAMAP_MAPBOOLBOOLENTRY, _TESTARENAMAP_MAPSTRINGSTRINGENTRY, _TESTARENAMAP_MAPINT32BYTESENTRY, _TESTARENAMAP_MAPINT32ENUMENTRY, _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY, _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3325,
-  serialized_end=5967,
-)
-
-
-_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY = _descriptor.Descriptor(
-  name='TypeEntry',
-  full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=6081,
-  serialized_end=6176,
-)
-
-_MESSAGECONTAININGENUMCALLEDTYPE = _descriptor.Descriptor(
-  name='MessageContainingEnumCalledType',
-  full_name='protobuf_unittest.MessageContainingEnumCalledType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='type', full_name='protobuf_unittest.MessageContainingEnumCalledType.type', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY, ],
-  enum_types=[
-    _MESSAGECONTAININGENUMCALLEDTYPE_TYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5970,
-  serialized_end=6198,
-)
-
-
-_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY = _descriptor.Descriptor(
-  name='EntryEntry',
-  full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=6314,
-  serialized_end=6358,
-)
-
-_MESSAGECONTAININGMAPCALLEDENTRY = _descriptor.Descriptor(
-  name='MessageContainingMapCalledEntry',
-  full_name='protobuf_unittest.MessageContainingMapCalledEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='entry', full_name='protobuf_unittest.MessageContainingMapCalledEntry.entry', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=6201,
-  serialized_end=6358,
-)
-
-
-_TESTRECURSIVEMAPMESSAGE_AENTRY = _descriptor.Descriptor(
-  name='AEntry',
-  full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=6450,
-  serialized_end=6534,
-)
-
-_TESTRECURSIVEMAPMESSAGE = _descriptor.Descriptor(
-  name='TestRecursiveMapMessage',
-  full_name='protobuf_unittest.TestRecursiveMapMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestRecursiveMapMessage.a', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTRECURSIVEMAPMESSAGE_AENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=6361,
-  serialized_end=6534,
-)
-
-_TESTMAP_MAPINT32INT32ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPINT64INT64ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPUINT32UINT32ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPUINT64UINT64ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPSINT32SINT32ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPSINT64SINT64ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPFIXED32FIXED32ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPFIXED64FIXED64ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPINT32FLOATENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPINT32DOUBLEENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPBOOLBOOLENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPSTRINGSTRINGENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPINT32BYTESENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPINT32ENUMENTRY.fields_by_name['value'].enum_type = _MAPENUM
-_TESTMAP_MAPINT32ENUMENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE
-_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.containing_type = _TESTMAP
-_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE
-_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.containing_type = _TESTMAP
-_TESTMAP.fields_by_name['map_int32_int32'].message_type = _TESTMAP_MAPINT32INT32ENTRY
-_TESTMAP.fields_by_name['map_int64_int64'].message_type = _TESTMAP_MAPINT64INT64ENTRY
-_TESTMAP.fields_by_name['map_uint32_uint32'].message_type = _TESTMAP_MAPUINT32UINT32ENTRY
-_TESTMAP.fields_by_name['map_uint64_uint64'].message_type = _TESTMAP_MAPUINT64UINT64ENTRY
-_TESTMAP.fields_by_name['map_sint32_sint32'].message_type = _TESTMAP_MAPSINT32SINT32ENTRY
-_TESTMAP.fields_by_name['map_sint64_sint64'].message_type = _TESTMAP_MAPSINT64SINT64ENTRY
-_TESTMAP.fields_by_name['map_fixed32_fixed32'].message_type = _TESTMAP_MAPFIXED32FIXED32ENTRY
-_TESTMAP.fields_by_name['map_fixed64_fixed64'].message_type = _TESTMAP_MAPFIXED64FIXED64ENTRY
-_TESTMAP.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTMAP_MAPSFIXED32SFIXED32ENTRY
-_TESTMAP.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTMAP_MAPSFIXED64SFIXED64ENTRY
-_TESTMAP.fields_by_name['map_int32_float'].message_type = _TESTMAP_MAPINT32FLOATENTRY
-_TESTMAP.fields_by_name['map_int32_double'].message_type = _TESTMAP_MAPINT32DOUBLEENTRY
-_TESTMAP.fields_by_name['map_bool_bool'].message_type = _TESTMAP_MAPBOOLBOOLENTRY
-_TESTMAP.fields_by_name['map_string_string'].message_type = _TESTMAP_MAPSTRINGSTRINGENTRY
-_TESTMAP.fields_by_name['map_int32_bytes'].message_type = _TESTMAP_MAPINT32BYTESENTRY
-_TESTMAP.fields_by_name['map_int32_enum'].message_type = _TESTMAP_MAPINT32ENUMENTRY
-_TESTMAP.fields_by_name['map_int32_foreign_message'].message_type = _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY
-_TESTMAP.fields_by_name['map_string_foreign_message'].message_type = _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY
-_TESTMAPSUBMESSAGE.fields_by_name['test_map'].message_type = _TESTMAP
-_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._TESTALLTYPES
-_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.containing_type = _TESTMESSAGEMAP
-_TESTMESSAGEMAP.fields_by_name['map_int32_message'].message_type = _TESTMESSAGEMAP_MAPINT32MESSAGEENTRY
-_TESTSAMETYPEMAP_MAP1ENTRY.containing_type = _TESTSAMETYPEMAP
-_TESTSAMETYPEMAP_MAP2ENTRY.containing_type = _TESTSAMETYPEMAP
-_TESTSAMETYPEMAP.fields_by_name['map1'].message_type = _TESTSAMETYPEMAP_MAP1ENTRY
-_TESTSAMETYPEMAP.fields_by_name['map2'].message_type = _TESTSAMETYPEMAP_MAP2ENTRY
-_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._TESTREQUIRED
-_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.containing_type = _TESTREQUIREDMESSAGEMAP
-_TESTREQUIREDMESSAGEMAP.fields_by_name['map_field'].message_type = _TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY
-_TESTARENAMAP_MAPINT32INT32ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPINT64INT64ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPUINT32UINT32ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPUINT64UINT64ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPSINT32SINT32ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPSINT64SINT64ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPFIXED32FIXED32ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPFIXED64FIXED64ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPINT32FLOATENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPINT32DOUBLEENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPBOOLBOOLENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPSTRINGSTRINGENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPINT32BYTESENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPINT32ENUMENTRY.fields_by_name['value'].enum_type = _MAPENUM
-_TESTARENAMAP_MAPINT32ENUMENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__no__arena__pb2._FOREIGNMESSAGE
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.containing_type = _TESTARENAMAP
-_TESTARENAMAP.fields_by_name['map_int32_int32'].message_type = _TESTARENAMAP_MAPINT32INT32ENTRY
-_TESTARENAMAP.fields_by_name['map_int64_int64'].message_type = _TESTARENAMAP_MAPINT64INT64ENTRY
-_TESTARENAMAP.fields_by_name['map_uint32_uint32'].message_type = _TESTARENAMAP_MAPUINT32UINT32ENTRY
-_TESTARENAMAP.fields_by_name['map_uint64_uint64'].message_type = _TESTARENAMAP_MAPUINT64UINT64ENTRY
-_TESTARENAMAP.fields_by_name['map_sint32_sint32'].message_type = _TESTARENAMAP_MAPSINT32SINT32ENTRY
-_TESTARENAMAP.fields_by_name['map_sint64_sint64'].message_type = _TESTARENAMAP_MAPSINT64SINT64ENTRY
-_TESTARENAMAP.fields_by_name['map_fixed32_fixed32'].message_type = _TESTARENAMAP_MAPFIXED32FIXED32ENTRY
-_TESTARENAMAP.fields_by_name['map_fixed64_fixed64'].message_type = _TESTARENAMAP_MAPFIXED64FIXED64ENTRY
-_TESTARENAMAP.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY
-_TESTARENAMAP.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY
-_TESTARENAMAP.fields_by_name['map_int32_float'].message_type = _TESTARENAMAP_MAPINT32FLOATENTRY
-_TESTARENAMAP.fields_by_name['map_int32_double'].message_type = _TESTARENAMAP_MAPINT32DOUBLEENTRY
-_TESTARENAMAP.fields_by_name['map_bool_bool'].message_type = _TESTARENAMAP_MAPBOOLBOOLENTRY
-_TESTARENAMAP.fields_by_name['map_string_string'].message_type = _TESTARENAMAP_MAPSTRINGSTRINGENTRY
-_TESTARENAMAP.fields_by_name['map_int32_bytes'].message_type = _TESTARENAMAP_MAPINT32BYTESENTRY
-_TESTARENAMAP.fields_by_name['map_int32_enum'].message_type = _TESTARENAMAP_MAPINT32ENUMENTRY
-_TESTARENAMAP.fields_by_name['map_int32_foreign_message'].message_type = _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY
-_TESTARENAMAP.fields_by_name['map_int32_foreign_message_no_arena'].message_type = _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY
-_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.fields_by_name['value'].message_type = _MESSAGECONTAININGENUMCALLEDTYPE
-_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.containing_type = _MESSAGECONTAININGENUMCALLEDTYPE
-_MESSAGECONTAININGENUMCALLEDTYPE.fields_by_name['type'].message_type = _MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY
-_MESSAGECONTAININGENUMCALLEDTYPE_TYPE.containing_type = _MESSAGECONTAININGENUMCALLEDTYPE
-_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY.containing_type = _MESSAGECONTAININGMAPCALLEDENTRY
-_MESSAGECONTAININGMAPCALLEDENTRY.fields_by_name['entry'].message_type = _MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY
-_TESTRECURSIVEMAPMESSAGE_AENTRY.fields_by_name['value'].message_type = _TESTRECURSIVEMAPMESSAGE
-_TESTRECURSIVEMAPMESSAGE_AENTRY.containing_type = _TESTRECURSIVEMAPMESSAGE
-_TESTRECURSIVEMAPMESSAGE.fields_by_name['a'].message_type = _TESTRECURSIVEMAPMESSAGE_AENTRY
-DESCRIPTOR.message_types_by_name['TestMap'] = _TESTMAP
-DESCRIPTOR.message_types_by_name['TestMapSubmessage'] = _TESTMAPSUBMESSAGE
-DESCRIPTOR.message_types_by_name['TestMessageMap'] = _TESTMESSAGEMAP
-DESCRIPTOR.message_types_by_name['TestSameTypeMap'] = _TESTSAMETYPEMAP
-DESCRIPTOR.message_types_by_name['TestRequiredMessageMap'] = _TESTREQUIREDMESSAGEMAP
-DESCRIPTOR.message_types_by_name['TestArenaMap'] = _TESTARENAMAP
-DESCRIPTOR.message_types_by_name['MessageContainingEnumCalledType'] = _MESSAGECONTAININGENUMCALLEDTYPE
-DESCRIPTOR.message_types_by_name['MessageContainingMapCalledEntry'] = _MESSAGECONTAININGMAPCALLEDENTRY
-DESCRIPTOR.message_types_by_name['TestRecursiveMapMessage'] = _TESTRECURSIVEMAPMESSAGE
-DESCRIPTOR.enum_types_by_name['MapEnum'] = _MAPENUM
-
-TestMap = _reflection.GeneratedProtocolMessageType('TestMap', (_message.Message,), dict(
-
-  MapInt32Int32Entry = _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPINT32INT32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32Int32Entry)
-    ))
-  ,
-
-  MapInt64Int64Entry = _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPINT64INT64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt64Int64Entry)
-    ))
-  ,
-
-  MapUint32Uint32Entry = _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPUINT32UINT32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapUint32Uint32Entry)
-    ))
-  ,
-
-  MapUint64Uint64Entry = _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPUINT64UINT64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapUint64Uint64Entry)
-    ))
-  ,
-
-  MapSint32Sint32Entry = _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPSINT32SINT32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSint32Sint32Entry)
-    ))
-  ,
-
-  MapSint64Sint64Entry = _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPSINT64SINT64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSint64Sint64Entry)
-    ))
-  ,
-
-  MapFixed32Fixed32Entry = _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPFIXED32FIXED32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapFixed32Fixed32Entry)
-    ))
-  ,
-
-  MapFixed64Fixed64Entry = _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPFIXED64FIXED64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapFixed64Fixed64Entry)
-    ))
-  ,
-
-  MapSfixed32Sfixed32Entry = _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPSFIXED32SFIXED32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry)
-    ))
-  ,
-
-  MapSfixed64Sfixed64Entry = _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPSFIXED64SFIXED64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry)
-    ))
-  ,
-
-  MapInt32FloatEntry = _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPINT32FLOATENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32FloatEntry)
-    ))
-  ,
-
-  MapInt32DoubleEntry = _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPINT32DOUBLEENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32DoubleEntry)
-    ))
-  ,
-
-  MapBoolBoolEntry = _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPBOOLBOOLENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapBoolBoolEntry)
-    ))
-  ,
-
-  MapStringStringEntry = _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPSTRINGSTRINGENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapStringStringEntry)
-    ))
-  ,
-
-  MapInt32BytesEntry = _reflection.GeneratedProtocolMessageType('MapInt32BytesEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPINT32BYTESENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32BytesEntry)
-    ))
-  ,
-
-  MapInt32EnumEntry = _reflection.GeneratedProtocolMessageType('MapInt32EnumEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPINT32ENUMENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32EnumEntry)
-    ))
-  ,
-
-  MapInt32ForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32ForeignMessageEntry)
-    ))
-  ,
-
-  MapStringForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapStringForeignMessageEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapStringForeignMessageEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTMAP,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap)
-  ))
-_sym_db.RegisterMessage(TestMap)
-_sym_db.RegisterMessage(TestMap.MapInt32Int32Entry)
-_sym_db.RegisterMessage(TestMap.MapInt64Int64Entry)
-_sym_db.RegisterMessage(TestMap.MapUint32Uint32Entry)
-_sym_db.RegisterMessage(TestMap.MapUint64Uint64Entry)
-_sym_db.RegisterMessage(TestMap.MapSint32Sint32Entry)
-_sym_db.RegisterMessage(TestMap.MapSint64Sint64Entry)
-_sym_db.RegisterMessage(TestMap.MapFixed32Fixed32Entry)
-_sym_db.RegisterMessage(TestMap.MapFixed64Fixed64Entry)
-_sym_db.RegisterMessage(TestMap.MapSfixed32Sfixed32Entry)
-_sym_db.RegisterMessage(TestMap.MapSfixed64Sfixed64Entry)
-_sym_db.RegisterMessage(TestMap.MapInt32FloatEntry)
-_sym_db.RegisterMessage(TestMap.MapInt32DoubleEntry)
-_sym_db.RegisterMessage(TestMap.MapBoolBoolEntry)
-_sym_db.RegisterMessage(TestMap.MapStringStringEntry)
-_sym_db.RegisterMessage(TestMap.MapInt32BytesEntry)
-_sym_db.RegisterMessage(TestMap.MapInt32EnumEntry)
-_sym_db.RegisterMessage(TestMap.MapInt32ForeignMessageEntry)
-_sym_db.RegisterMessage(TestMap.MapStringForeignMessageEntry)
-
-TestMapSubmessage = _reflection.GeneratedProtocolMessageType('TestMapSubmessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMAPSUBMESSAGE,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMapSubmessage)
-  ))
-_sym_db.RegisterMessage(TestMapSubmessage)
-
-TestMessageMap = _reflection.GeneratedProtocolMessageType('TestMessageMap', (_message.Message,), dict(
-
-  MapInt32MessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32MessageEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMESSAGEMAP_MAPINT32MESSAGEENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageMap.MapInt32MessageEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTMESSAGEMAP,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageMap)
-  ))
-_sym_db.RegisterMessage(TestMessageMap)
-_sym_db.RegisterMessage(TestMessageMap.MapInt32MessageEntry)
-
-TestSameTypeMap = _reflection.GeneratedProtocolMessageType('TestSameTypeMap', (_message.Message,), dict(
-
-  Map1Entry = _reflection.GeneratedProtocolMessageType('Map1Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTSAMETYPEMAP_MAP1ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap.Map1Entry)
-    ))
-  ,
-
-  Map2Entry = _reflection.GeneratedProtocolMessageType('Map2Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTSAMETYPEMAP_MAP2ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap.Map2Entry)
-    ))
-  ,
-  DESCRIPTOR = _TESTSAMETYPEMAP,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap)
-  ))
-_sym_db.RegisterMessage(TestSameTypeMap)
-_sym_db.RegisterMessage(TestSameTypeMap.Map1Entry)
-_sym_db.RegisterMessage(TestSameTypeMap.Map2Entry)
-
-TestRequiredMessageMap = _reflection.GeneratedProtocolMessageType('TestRequiredMessageMap', (_message.Message,), dict(
-
-  MapFieldEntry = _reflection.GeneratedProtocolMessageType('MapFieldEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredMessageMap.MapFieldEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTREQUIREDMESSAGEMAP,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredMessageMap)
-  ))
-_sym_db.RegisterMessage(TestRequiredMessageMap)
-_sym_db.RegisterMessage(TestRequiredMessageMap.MapFieldEntry)
-
-TestArenaMap = _reflection.GeneratedProtocolMessageType('TestArenaMap', (_message.Message,), dict(
-
-  MapInt32Int32Entry = _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT32INT32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32Int32Entry)
-    ))
-  ,
-
-  MapInt64Int64Entry = _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT64INT64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt64Int64Entry)
-    ))
-  ,
-
-  MapUint32Uint32Entry = _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPUINT32UINT32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapUint32Uint32Entry)
-    ))
-  ,
-
-  MapUint64Uint64Entry = _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPUINT64UINT64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapUint64Uint64Entry)
-    ))
-  ,
-
-  MapSint32Sint32Entry = _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPSINT32SINT32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSint32Sint32Entry)
-    ))
-  ,
-
-  MapSint64Sint64Entry = _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPSINT64SINT64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSint64Sint64Entry)
-    ))
-  ,
-
-  MapFixed32Fixed32Entry = _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPFIXED32FIXED32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry)
-    ))
-  ,
-
-  MapFixed64Fixed64Entry = _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPFIXED64FIXED64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry)
-    ))
-  ,
-
-  MapSfixed32Sfixed32Entry = _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry)
-    ))
-  ,
-
-  MapSfixed64Sfixed64Entry = _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry)
-    ))
-  ,
-
-  MapInt32FloatEntry = _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT32FLOATENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32FloatEntry)
-    ))
-  ,
-
-  MapInt32DoubleEntry = _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT32DOUBLEENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32DoubleEntry)
-    ))
-  ,
-
-  MapBoolBoolEntry = _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPBOOLBOOLENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapBoolBoolEntry)
-    ))
-  ,
-
-  MapStringStringEntry = _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPSTRINGSTRINGENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapStringStringEntry)
-    ))
-  ,
-
-  MapInt32BytesEntry = _reflection.GeneratedProtocolMessageType('MapInt32BytesEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT32BYTESENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32BytesEntry)
-    ))
-  ,
-
-  MapInt32EnumEntry = _reflection.GeneratedProtocolMessageType('MapInt32EnumEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT32ENUMENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32EnumEntry)
-    ))
-  ,
-
-  MapInt32ForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry)
-    ))
-  ,
-
-  MapInt32ForeignMessageNoArenaEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageNoArenaEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTARENAMAP,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap)
-  ))
-_sym_db.RegisterMessage(TestArenaMap)
-_sym_db.RegisterMessage(TestArenaMap.MapInt32Int32Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapInt64Int64Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapUint32Uint32Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapUint64Uint64Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapSint32Sint32Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapSint64Sint64Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapFixed32Fixed32Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapFixed64Fixed64Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapSfixed32Sfixed32Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapSfixed64Sfixed64Entry)
-_sym_db.RegisterMessage(TestArenaMap.MapInt32FloatEntry)
-_sym_db.RegisterMessage(TestArenaMap.MapInt32DoubleEntry)
-_sym_db.RegisterMessage(TestArenaMap.MapBoolBoolEntry)
-_sym_db.RegisterMessage(TestArenaMap.MapStringStringEntry)
-_sym_db.RegisterMessage(TestArenaMap.MapInt32BytesEntry)
-_sym_db.RegisterMessage(TestArenaMap.MapInt32EnumEntry)
-_sym_db.RegisterMessage(TestArenaMap.MapInt32ForeignMessageEntry)
-_sym_db.RegisterMessage(TestArenaMap.MapInt32ForeignMessageNoArenaEntry)
-
-MessageContainingEnumCalledType = _reflection.GeneratedProtocolMessageType('MessageContainingEnumCalledType', (_message.Message,), dict(
-
-  TypeEntry = _reflection.GeneratedProtocolMessageType('TypeEntry', (_message.Message,), dict(
-    DESCRIPTOR = _MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingEnumCalledType.TypeEntry)
-    ))
-  ,
-  DESCRIPTOR = _MESSAGECONTAININGENUMCALLEDTYPE,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingEnumCalledType)
-  ))
-_sym_db.RegisterMessage(MessageContainingEnumCalledType)
-_sym_db.RegisterMessage(MessageContainingEnumCalledType.TypeEntry)
-
-MessageContainingMapCalledEntry = _reflection.GeneratedProtocolMessageType('MessageContainingMapCalledEntry', (_message.Message,), dict(
-
-  EntryEntry = _reflection.GeneratedProtocolMessageType('EntryEntry', (_message.Message,), dict(
-    DESCRIPTOR = _MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry)
-    ))
-  ,
-  DESCRIPTOR = _MESSAGECONTAININGMAPCALLEDENTRY,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingMapCalledEntry)
-  ))
-_sym_db.RegisterMessage(MessageContainingMapCalledEntry)
-_sym_db.RegisterMessage(MessageContainingMapCalledEntry.EntryEntry)
-
-TestRecursiveMapMessage = _reflection.GeneratedProtocolMessageType('TestRecursiveMapMessage', (_message.Message,), dict(
-
-  AEntry = _reflection.GeneratedProtocolMessageType('AEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTRECURSIVEMAPMESSAGE_AENTRY,
-    __module__ = 'google.protobuf.map_unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMapMessage.AEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTRECURSIVEMAPMESSAGE,
-  __module__ = 'google.protobuf.map_unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMapMessage)
-  ))
-_sym_db.RegisterMessage(TestRecursiveMapMessage)
-_sym_db.RegisterMessage(TestRecursiveMapMessage.AEntry)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
-_TESTMAP_MAPINT32INT32ENTRY.has_options = True
-_TESTMAP_MAPINT32INT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPINT64INT64ENTRY.has_options = True
-_TESTMAP_MAPINT64INT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPUINT32UINT32ENTRY.has_options = True
-_TESTMAP_MAPUINT32UINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPUINT64UINT64ENTRY.has_options = True
-_TESTMAP_MAPUINT64UINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPSINT32SINT32ENTRY.has_options = True
-_TESTMAP_MAPSINT32SINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPSINT64SINT64ENTRY.has_options = True
-_TESTMAP_MAPSINT64SINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPFIXED32FIXED32ENTRY.has_options = True
-_TESTMAP_MAPFIXED32FIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPFIXED64FIXED64ENTRY.has_options = True
-_TESTMAP_MAPFIXED64FIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPSFIXED32SFIXED32ENTRY.has_options = True
-_TESTMAP_MAPSFIXED32SFIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPSFIXED64SFIXED64ENTRY.has_options = True
-_TESTMAP_MAPSFIXED64SFIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPINT32FLOATENTRY.has_options = True
-_TESTMAP_MAPINT32FLOATENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPINT32DOUBLEENTRY.has_options = True
-_TESTMAP_MAPINT32DOUBLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPBOOLBOOLENTRY.has_options = True
-_TESTMAP_MAPBOOLBOOLENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPSTRINGSTRINGENTRY.has_options = True
-_TESTMAP_MAPSTRINGSTRINGENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPINT32BYTESENTRY.has_options = True
-_TESTMAP_MAPINT32BYTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPINT32ENUMENTRY.has_options = True
-_TESTMAP_MAPINT32ENUMENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.has_options = True
-_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.has_options = True
-_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.has_options = True
-_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTSAMETYPEMAP_MAP1ENTRY.has_options = True
-_TESTSAMETYPEMAP_MAP1ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTSAMETYPEMAP_MAP2ENTRY.has_options = True
-_TESTSAMETYPEMAP_MAP2ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.has_options = True
-_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT32INT32ENTRY.has_options = True
-_TESTARENAMAP_MAPINT32INT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT64INT64ENTRY.has_options = True
-_TESTARENAMAP_MAPINT64INT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPUINT32UINT32ENTRY.has_options = True
-_TESTARENAMAP_MAPUINT32UINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPUINT64UINT64ENTRY.has_options = True
-_TESTARENAMAP_MAPUINT64UINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPSINT32SINT32ENTRY.has_options = True
-_TESTARENAMAP_MAPSINT32SINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPSINT64SINT64ENTRY.has_options = True
-_TESTARENAMAP_MAPSINT64SINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPFIXED32FIXED32ENTRY.has_options = True
-_TESTARENAMAP_MAPFIXED32FIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPFIXED64FIXED64ENTRY.has_options = True
-_TESTARENAMAP_MAPFIXED64FIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY.has_options = True
-_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY.has_options = True
-_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT32FLOATENTRY.has_options = True
-_TESTARENAMAP_MAPINT32FLOATENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT32DOUBLEENTRY.has_options = True
-_TESTARENAMAP_MAPINT32DOUBLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPBOOLBOOLENTRY.has_options = True
-_TESTARENAMAP_MAPBOOLBOOLENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPSTRINGSTRINGENTRY.has_options = True
-_TESTARENAMAP_MAPSTRINGSTRINGENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT32BYTESENTRY.has_options = True
-_TESTARENAMAP_MAPINT32BYTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT32ENUMENTRY.has_options = True
-_TESTARENAMAP_MAPINT32ENUMENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.has_options = True
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.has_options = True
-_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.has_options = True
-_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY.has_options = True
-_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTRECURSIVEMAPMESSAGE_AENTRY.has_options = True
-_TESTRECURSIVEMAPMESSAGE_AENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/message.py b/tools/swarming_client/third_party/google/protobuf/message.py
deleted file mode 100644
index 606f735..0000000
--- a/tools/swarming_client/third_party/google/protobuf/message.py
+++ /dev/null
@@ -1,295 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# TODO(robinson): We should just make these methods all "pure-virtual" and move
-# all implementation out, into reflection.py for now.
-
-
-"""Contains an abstract base class for protocol messages."""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-class Error(Exception): pass
-class DecodeError(Error): pass
-class EncodeError(Error): pass
-
-
-class Message(object):
-
-  """Abstract base class for protocol messages.
-
-  Protocol message classes are almost always generated by the protocol
-  compiler.  These generated types subclass Message and implement the methods
-  shown below.
-
-  TODO(robinson): Link to an HTML document here.
-
-  TODO(robinson): Document that instances of this class will also
-  have an Extensions attribute with __getitem__ and __setitem__.
-  Again, not sure how to best convey this.
-
-  TODO(robinson): Document that the class must also have a static
-    RegisterExtension(extension_field) method.
-    Not sure how to best express at this point.
-  """
-
-  # TODO(robinson): Document these fields and methods.
-
-  __slots__ = []
-
-  DESCRIPTOR = None
-
-  def __deepcopy__(self, memo=None):
-    clone = type(self)()
-    clone.MergeFrom(self)
-    return clone
-
-  def __eq__(self, other_msg):
-    """Recursively compares two messages by value and structure."""
-    raise NotImplementedError
-
-  def __ne__(self, other_msg):
-    # Can't just say self != other_msg, since that would infinitely recurse. :)
-    return not self == other_msg
-
-  def __hash__(self):
-    raise TypeError('unhashable object')
-
-  def __str__(self):
-    """Outputs a human-readable representation of the message."""
-    raise NotImplementedError
-
-  def __unicode__(self):
-    """Outputs a human-readable representation of the message."""
-    raise NotImplementedError
-
-  def MergeFrom(self, other_msg):
-    """Merges the contents of the specified message into current message.
-
-    This method merges the contents of the specified message into the current
-    message. Singular fields that are set in the specified message overwrite
-    the corresponding fields in the current message. Repeated fields are
-    appended. Singular sub-messages and groups are recursively merged.
-
-    Args:
-      other_msg: Message to merge into the current message.
-    """
-    raise NotImplementedError
-
-  def CopyFrom(self, other_msg):
-    """Copies the content of the specified message into the current message.
-
-    The method clears the current message and then merges the specified
-    message using MergeFrom.
-
-    Args:
-      other_msg: Message to copy into the current one.
-    """
-    if self is other_msg:
-      return
-    self.Clear()
-    self.MergeFrom(other_msg)
-
-  def Clear(self):
-    """Clears all data that was set in the message."""
-    raise NotImplementedError
-
-  def SetInParent(self):
-    """Mark this as present in the parent.
-
-    This normally happens automatically when you assign a field of a
-    sub-message, but sometimes you want to make the sub-message
-    present while keeping it empty.  If you find yourself using this,
-    you may want to reconsider your design."""
-    raise NotImplementedError
-
-  def IsInitialized(self):
-    """Checks if the message is initialized.
-
-    Returns:
-      The method returns True if the message is initialized (i.e. all of its
-      required fields are set).
-    """
-    raise NotImplementedError
-
-  # TODO(robinson): MergeFromString() should probably return None and be
-  # implemented in terms of a helper that returns the # of bytes read.  Our
-  # deserialization routines would use the helper when recursively
-  # deserializing, but the end user would almost always just want the no-return
-  # MergeFromString().
-
-  def MergeFromString(self, serialized):
-    """Merges serialized protocol buffer data into this message.
-
-    When we find a field in |serialized| that is already present
-    in this message:
-      - If it's a "repeated" field, we append to the end of our list.
-      - Else, if it's a scalar, we overwrite our field.
-      - Else, (it's a nonrepeated composite), we recursively merge
-        into the existing composite.
-
-    TODO(robinson): Document handling of unknown fields.
-
-    Args:
-      serialized: Any object that allows us to call buffer(serialized)
-        to access a string of bytes using the buffer interface.
-
-    TODO(robinson): When we switch to a helper, this will return None.
-
-    Returns:
-      The number of bytes read from |serialized|.
-      For non-group messages, this will always be len(serialized),
-      but for messages which are actually groups, this will
-      generally be less than len(serialized), since we must
-      stop when we reach an END_GROUP tag.  Note that if
-      we *do* stop because of an END_GROUP tag, the number
-      of bytes returned does not include the bytes
-      for the END_GROUP tag information.
-    """
-    raise NotImplementedError
-
-  def ParseFromString(self, serialized):
-    """Parse serialized protocol buffer data into this message.
-
-    Like MergeFromString(), except we clear the object first and
-    do not return the value that MergeFromString returns.
-    """
-    self.Clear()
-    self.MergeFromString(serialized)
-
-  def SerializeToString(self):
-    """Serializes the protocol message to a binary string.
-
-    Returns:
-      A binary string representation of the message if all of the required
-      fields in the message are set (i.e. the message is initialized).
-
-    Raises:
-      message.EncodeError if the message isn't initialized.
-    """
-    raise NotImplementedError
-
-  def SerializePartialToString(self):
-    """Serializes the protocol message to a binary string.
-
-    This method is similar to SerializeToString but doesn't check if the
-    message is initialized.
-
-    Returns:
-      A string representation of the partial message.
-    """
-    raise NotImplementedError
-
-  # TODO(robinson): Decide whether we like these better
-  # than auto-generated has_foo() and clear_foo() methods
-  # on the instances themselves.  This way is less consistent
-  # with C++, but it makes reflection-type access easier and
-  # reduces the number of magically autogenerated things.
-  #
-  # TODO(robinson): Be sure to document (and test) exactly
-  # which field names are accepted here.  Are we case-sensitive?
-  # What do we do with fields that share names with Python keywords
-  # like 'lambda' and 'yield'?
-  #
-  # nnorwitz says:
-  # """
-  # Typically (in python), an underscore is appended to names that are
-  # keywords. So they would become lambda_ or yield_.
-  # """
-  def ListFields(self):
-    """Returns a list of (FieldDescriptor, value) tuples for all
-    fields in the message which are not empty.  A singular field is non-empty
-    if HasField() would return true, and a repeated field is non-empty if
-    it contains at least one element.  The fields are ordered by field
-    number"""
-    raise NotImplementedError
-
-  def HasField(self, field_name):
-    """Checks if a certain field is set for the message, or if any field inside
-    a oneof group is set.  Note that if the field_name is not defined in the
-    message descriptor, ValueError will be raised."""
-    raise NotImplementedError
-
-  def ClearField(self, field_name):
-    """Clears the contents of a given field, or the field set inside a oneof
-    group.  If the name neither refers to a defined field or oneof group,
-    ValueError is raised."""
-    raise NotImplementedError
-
-  def WhichOneof(self, oneof_group):
-    """Returns the name of the field that is set inside a oneof group, or
-    None if no field is set.  If no group with the given name exists, ValueError
-    will be raised."""
-    raise NotImplementedError
-
-  def HasExtension(self, extension_handle):
-    raise NotImplementedError
-
-  def ClearExtension(self, extension_handle):
-    raise NotImplementedError
-
-  def DiscardUnknownFields(self):
-    raise NotImplementedError
-
-  def ByteSize(self):
-    """Returns the serialized size of this message.
-    Recursively calls ByteSize() on all contained messages.
-    """
-    raise NotImplementedError
-
-  def _SetListener(self, message_listener):
-    """Internal method used by the protocol message implementation.
-    Clients should not call this directly.
-
-    Sets a listener that this message will call on certain state transitions.
-
-    The purpose of this method is to register back-edges from children to
-    parents at runtime, for the purpose of setting "has" bits and
-    byte-size-dirty bits in the parent and ancestor objects whenever a child or
-    descendant object is modified.
-
-    If the client wants to disconnect this Message from the object tree, she
-    explicitly sets callback to None.
-
-    If message_listener is None, unregisters any existing listener.  Otherwise,
-    message_listener must implement the MessageListener interface in
-    internal/message_listener.py, and we discard any listener registered
-    via a previous _SetListener() call.
-    """
-    raise NotImplementedError
-
-  def __getstate__(self):
-    """Support the pickle protocol."""
-    return dict(serialized=self.SerializePartialToString())
-
-  def __setstate__(self, state):
-    """Support the pickle protocol."""
-    self.__init__()
-    self.ParseFromString(state['serialized'])
diff --git a/tools/swarming_client/third_party/google/protobuf/message_factory.py b/tools/swarming_client/third_party/google/protobuf/message_factory.py
deleted file mode 100644
index 1b059d1..0000000
--- a/tools/swarming_client/third_party/google/protobuf/message_factory.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides a factory class for generating dynamic messages.
-
-The easiest way to use this class is if you have access to the FileDescriptor
-protos containing the messages you want to create you can just do the following:
-
-message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
-my_proto_instance = message_classes['some.proto.package.MessageName']()
-"""
-
-__author__ = 'matthewtoia@google.com (Matt Toia)'
-
-from google.protobuf import descriptor_pool
-from google.protobuf import message
-from google.protobuf import reflection
-
-
-class MessageFactory(object):
-  """Factory for creating Proto2 messages from descriptors in a pool."""
-
-  def __init__(self, pool=None):
-    """Initializes a new factory."""
-    self.pool = pool or descriptor_pool.DescriptorPool()
-
-    # local cache of all classes built from protobuf descriptors
-    self._classes = {}
-
-  def GetPrototype(self, descriptor):
-    """Builds a proto2 message class based on the passed in descriptor.
-
-    Passing a descriptor with a fully qualified name matching a previous
-    invocation will cause the same class to be returned.
-
-    Args:
-      descriptor: The descriptor to build from.
-
-    Returns:
-      A class describing the passed in descriptor.
-    """
-    if descriptor.full_name not in self._classes:
-      descriptor_name = descriptor.name
-      if str is bytes:  # PY2
-        descriptor_name = descriptor.name.encode('ascii', 'ignore')
-      result_class = reflection.GeneratedProtocolMessageType(
-          descriptor_name,
-          (message.Message,),
-          {'DESCRIPTOR': descriptor, '__module__': None})
-          # If module not set, it wrongly points to the reflection.py module.
-      self._classes[descriptor.full_name] = result_class
-      for field in descriptor.fields:
-        if field.message_type:
-          self.GetPrototype(field.message_type)
-      for extension in result_class.DESCRIPTOR.extensions:
-        if extension.containing_type.full_name not in self._classes:
-          self.GetPrototype(extension.containing_type)
-        extended_class = self._classes[extension.containing_type.full_name]
-        extended_class.RegisterExtension(extension)
-    return self._classes[descriptor.full_name]
-
-  def GetMessages(self, files):
-    """Gets all the messages from a specified file.
-
-    This will find and resolve dependencies, failing if the descriptor
-    pool cannot satisfy them.
-
-    Args:
-      files: The file names to extract messages from.
-
-    Returns:
-      A dictionary mapping proto names to the message classes. This will include
-      any dependent messages as well as any messages defined in the same file as
-      a specified message.
-    """
-    result = {}
-    for file_name in files:
-      file_desc = self.pool.FindFileByName(file_name)
-      for name, msg in file_desc.message_types_by_name.items():
-        if file_desc.package:
-          full_name = '.'.join([file_desc.package, name])
-        else:
-          full_name = msg.name
-        result[full_name] = self.GetPrototype(
-            self.pool.FindMessageTypeByName(full_name))
-
-      # While the extension FieldDescriptors are created by the descriptor pool,
-      # the python classes created in the factory need them to be registered
-      # explicitly, which is done below.
-      #
-      # The call to RegisterExtension will specifically check if the
-      # extension was already registered on the object and either
-      # ignore the registration if the original was the same, or raise
-      # an error if they were different.
-
-      for name, extension in file_desc.extensions_by_name.items():
-        if extension.containing_type.full_name not in self._classes:
-          self.GetPrototype(extension.containing_type)
-        extended_class = self._classes[extension.containing_type.full_name]
-        extended_class.RegisterExtension(extension)
-    return result
-
-
-_FACTORY = MessageFactory()
-
-
-def GetMessages(file_protos):
-  """Builds a dictionary of all the messages available in a set of files.
-
-  Args:
-    file_protos: A sequence of file protos to build messages out of.
-
-  Returns:
-    A dictionary mapping proto names to the message classes. This will include
-    any dependent messages as well as any messages defined in the same file as
-    a specified message.
-  """
-  for file_proto in file_protos:
-    _FACTORY.pool.Add(file_proto)
-  return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
diff --git a/tools/swarming_client/third_party/google/protobuf/proto_builder.py b/tools/swarming_client/third_party/google/protobuf/proto_builder.py
deleted file mode 100644
index 736caed..0000000
--- a/tools/swarming_client/third_party/google/protobuf/proto_builder.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Dynamic Protobuf class creator."""
-
-try:
-    from collections import OrderedDict
-except ImportError:
-    from ordereddict import OrderedDict  #PY26
-import hashlib
-import os
-
-from google.protobuf import descriptor_pb2
-from google.protobuf import message_factory
-
-
-def _GetMessageFromFactory(factory, full_name):
-  """Get a proto class from the MessageFactory by name.
-
-  Args:
-    factory: a MessageFactory instance.
-    full_name: str, the fully qualified name of the proto type.
-  Returns:
-    A class, for the type identified by full_name.
-  Raises:
-    KeyError, if the proto is not found in the factory's descriptor pool.
-  """
-  proto_descriptor = factory.pool.FindMessageTypeByName(full_name)
-  proto_cls = factory.GetPrototype(proto_descriptor)
-  return proto_cls
-
-
-def MakeSimpleProtoClass(fields, full_name=None, pool=None):
-  """Create a Protobuf class whose fields are basic types.
-
-  Note: this doesn't validate field names!
-
-  Args:
-    fields: dict of {name: field_type} mappings for each field in the proto. If
-        this is an OrderedDict the order will be maintained, otherwise the
-        fields will be sorted by name.
-    full_name: optional str, the fully-qualified name of the proto type.
-    pool: optional DescriptorPool instance.
-  Returns:
-    a class, the new protobuf class with a FileDescriptor.
-  """
-  factory = message_factory.MessageFactory(pool=pool)
-
-  if full_name is not None:
-    try:
-      proto_cls = _GetMessageFromFactory(factory, full_name)
-      return proto_cls
-    except KeyError:
-      # The factory's DescriptorPool doesn't know about this class yet.
-      pass
-
-  # Get a list of (name, field_type) tuples from the fields dict. If fields was
-  # an OrderedDict we keep the order, but otherwise we sort the field to ensure
-  # consistent ordering.
-  field_items = fields.items()
-  if not isinstance(fields, OrderedDict):
-    field_items = sorted(field_items)
-
-  # Use a consistent file name that is unlikely to conflict with any imported
-  # proto files.
-  fields_hash = hashlib.sha1()
-  for f_name, f_type in field_items:
-    fields_hash.update(f_name.encode('utf-8'))
-    fields_hash.update(str(f_type).encode('utf-8'))
-  proto_file_name = fields_hash.hexdigest() + '.proto'
-
-  # If the proto is anonymous, use the same hash to name it.
-  if full_name is None:
-    full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
-                 fields_hash.hexdigest())
-    try:
-      proto_cls = _GetMessageFromFactory(factory, full_name)
-      return proto_cls
-    except KeyError:
-      # The factory's DescriptorPool doesn't know about this class yet.
-      pass
-
-  # This is the first time we see this proto: add a new descriptor to the pool.
-  factory.pool.Add(
-      _MakeFileDescriptorProto(proto_file_name, full_name, field_items))
-  return _GetMessageFromFactory(factory, full_name)
-
-
-def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
-  """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
-  package, name = full_name.rsplit('.', 1)
-  file_proto = descriptor_pb2.FileDescriptorProto()
-  file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
-  file_proto.package = package
-  desc_proto = file_proto.message_type.add()
-  desc_proto.name = name
-  for f_number, (f_name, f_type) in enumerate(field_items, 1):
-    field_proto = desc_proto.field.add()
-    field_proto.name = f_name
-    field_proto.number = f_number
-    field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
-    field_proto.type = f_type
-  return file_proto
diff --git a/tools/swarming_client/third_party/google/protobuf/pyext/__init__.py b/tools/swarming_client/third_party/google/protobuf/pyext/__init__.py
deleted file mode 100644
index 5585614..0000000
--- a/tools/swarming_client/third_party/google/protobuf/pyext/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-try:
-  __import__('pkg_resources').declare_namespace(__name__)
-except ImportError:
-  __path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/tools/swarming_client/third_party/google/protobuf/pyext/cpp_message.py b/tools/swarming_client/third_party/google/protobuf/pyext/cpp_message.py
deleted file mode 100644
index fc8eb32..0000000
--- a/tools/swarming_client/third_party/google/protobuf/pyext/cpp_message.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Protocol message implementation hooks for C++ implementation.
-
-Contains helper functions used to create protocol message classes from
-Descriptor objects at runtime backed by the protocol buffer C++ API.
-"""
-
-__author__ = 'tibell@google.com (Johan Tibell)'
-
-from google.protobuf.pyext import _message
-
-
-class GeneratedProtocolMessageType(_message.MessageMeta):
-
-  """Metaclass for protocol message classes created at runtime from Descriptors.
-
-  The protocol compiler currently uses this metaclass to create protocol
-  message classes at runtime.  Clients can also manually create their own
-  classes at runtime, as in this example:
-
-  mydescriptor = Descriptor(.....)
-  factory = symbol_database.Default()
-  factory.pool.AddDescriptor(mydescriptor)
-  MyProtoClass = factory.GetPrototype(mydescriptor)
-  myproto_instance = MyProtoClass()
-  myproto.foo_field = 23
-  ...
-
-  The above example will not work for nested types. If you wish to include them,
-  use reflection.MakeClass() instead of manually instantiating the class in
-  order to create the appropriate class structure.
-  """
-
-  # Must be consistent with the protocol-compiler code in
-  # proto2/compiler/internal/generator.*.
-  _DESCRIPTOR_KEY = 'DESCRIPTOR'
diff --git a/tools/swarming_client/third_party/google/protobuf/pyext/python_pb2.py b/tools/swarming_client/third_party/google/protobuf/pyext/python_pb2.py
deleted file mode 100644
index 3750682..0000000
--- a/tools/swarming_client/third_party/google/protobuf/pyext/python_pb2.py
+++ /dev/null
@@ -1,234 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/pyext/python.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/pyext/python.proto',
-  package='google.protobuf.python.internal',
-  syntax='proto2',
-  serialized_pb=_b('\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-OPTIONAL_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 1
-optional_nested_message_extension = _descriptor.FieldDescriptor(
-  name='optional_nested_message_extension', full_name='google.protobuf.python.internal.optional_nested_message_extension', index=0,
-  number=1, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 2
-repeated_nested_message_extension = _descriptor.FieldDescriptor(
-  name='repeated_nested_message_extension', full_name='google.protobuf.python.internal.repeated_nested_message_extension', index=1,
-  number=2, type=11, cpp_type=10, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bb', full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage.bb', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cc', full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage.cc', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=300,
-  serialized_end=388,
-)
-
-_TESTALLTYPES = _descriptor.Descriptor(
-  name='TestAllTypes',
-  full_name='google.protobuf.python.internal.TestAllTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_message', full_name='google.protobuf.python.internal.TestAllTypes.repeated_nested_message', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_message', full_name='google.protobuf.python.internal.TestAllTypes.optional_nested_message', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_int32', full_name='google.protobuf.python.internal.TestAllTypes.optional_int32', index=2,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTALLTYPES_NESTEDMESSAGE, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=72,
-  serialized_end=388,
-)
-
-
-_FOREIGNMESSAGE = _descriptor.Descriptor(
-  name='ForeignMessage',
-  full_name='google.protobuf.python.internal.ForeignMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='c', full_name='google.protobuf.python.internal.ForeignMessage.c', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='d', full_name='google.protobuf.python.internal.ForeignMessage.d', index=1,
-      number=2, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=390,
-  serialized_end=428,
-)
-
-
-_TESTALLEXTENSIONS = _descriptor.Descriptor(
-  name='TestAllExtensions',
-  full_name='google.protobuf.python.internal.TestAllExtensions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=430,
-  serialized_end=459,
-)
-
-_TESTALLTYPES_NESTEDMESSAGE.fields_by_name['cc'].message_type = _FOREIGNMESSAGE
-_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES
-_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES
-DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE
-DESCRIPTOR.message_types_by_name['TestAllExtensions'] = _TESTALLEXTENSIONS
-DESCRIPTOR.extensions_by_name['optional_nested_message_extension'] = optional_nested_message_extension
-DESCRIPTOR.extensions_by_name['repeated_nested_message_extension'] = repeated_nested_message_extension
-
-TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.pyext.python_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllTypes.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _TESTALLTYPES,
-  __module__ = 'google.protobuf.pyext.python_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllTypes)
-  ))
-_sym_db.RegisterMessage(TestAllTypes)
-_sym_db.RegisterMessage(TestAllTypes.NestedMessage)
-
-ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict(
-  DESCRIPTOR = _FOREIGNMESSAGE,
-  __module__ = 'google.protobuf.pyext.python_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.ForeignMessage)
-  ))
-_sym_db.RegisterMessage(ForeignMessage)
-
-TestAllExtensions = _reflection.GeneratedProtocolMessageType('TestAllExtensions', (_message.Message,), dict(
-  DESCRIPTOR = _TESTALLEXTENSIONS,
-  __module__ = 'google.protobuf.pyext.python_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllExtensions)
-  ))
-_sym_db.RegisterMessage(TestAllExtensions)
-
-optional_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
-TestAllExtensions.RegisterExtension(optional_nested_message_extension)
-repeated_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
-TestAllExtensions.RegisterExtension(repeated_nested_message_extension)
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/reflection.py b/tools/swarming_client/third_party/google/protobuf/reflection.py
deleted file mode 100644
index 51c8332..0000000
--- a/tools/swarming_client/third_party/google/protobuf/reflection.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# This code is meant to work on Python 2.4 and above only.
-
-"""Contains a metaclass and helper functions used to create
-protocol message classes from Descriptor objects at runtime.
-
-Recall that a metaclass is the "type" of a class.
-(A class is to a metaclass what an instance is to a class.)
-
-In this case, we use the GeneratedProtocolMessageType metaclass
-to inject all the useful functionality into the classes
-output by the protocol compiler at compile-time.
-
-The upshot of all this is that the real implementation
-details for ALL pure-Python protocol buffers are *here in
-this file*.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-
-from google.protobuf.internal import api_implementation
-from google.protobuf import message
-
-
-if api_implementation.Type() == 'cpp':
-  from google.protobuf.pyext import cpp_message as message_impl
-else:
-  from google.protobuf.internal import python_message as message_impl
-
-# The type of all Message classes.
-# Part of the public interface, but normally only used by message factories.
-GeneratedProtocolMessageType = message_impl.GeneratedProtocolMessageType
-
-
-def ParseMessage(descriptor, byte_str):
-  """Generate a new Message instance from this Descriptor and a byte string.
-
-  Args:
-    descriptor: Protobuf Descriptor object
-    byte_str: Serialized protocol buffer byte string
-
-  Returns:
-    Newly created protobuf Message object.
-  """
-  result_class = MakeClass(descriptor)
-  new_msg = result_class()
-  new_msg.ParseFromString(byte_str)
-  return new_msg
-
-
-def MakeClass(descriptor):
-  """Construct a class object for a protobuf described by descriptor.
-
-  Composite descriptors are handled by defining the new class as a member of the
-  parent class, recursing as deep as necessary.
-  This is the dynamic equivalent to:
-
-  class Parent(message.Message):
-    __metaclass__ = GeneratedProtocolMessageType
-    DESCRIPTOR = descriptor
-    class Child(message.Message):
-      __metaclass__ = GeneratedProtocolMessageType
-      DESCRIPTOR = descriptor.nested_types[0]
-
-  Sample usage:
-    file_descriptor = descriptor_pb2.FileDescriptorProto()
-    file_descriptor.ParseFromString(proto2_string)
-    msg_descriptor = descriptor.MakeDescriptor(file_descriptor.message_type[0])
-    msg_class = reflection.MakeClass(msg_descriptor)
-    msg = msg_class()
-
-  Args:
-    descriptor: A descriptor.Descriptor object describing the protobuf.
-  Returns:
-    The Message class object described by the descriptor.
-  """
-  attributes = {}
-  for name, nested_type in descriptor.nested_types_by_name.items():
-    attributes[name] = MakeClass(nested_type)
-
-  attributes[GeneratedProtocolMessageType._DESCRIPTOR_KEY] = descriptor
-
-  return GeneratedProtocolMessageType(str(descriptor.name), (message.Message,),
-                                      attributes)
diff --git a/tools/swarming_client/third_party/google/protobuf/service.py b/tools/swarming_client/third_party/google/protobuf/service.py
deleted file mode 100644
index 9e00de7..0000000
--- a/tools/swarming_client/third_party/google/protobuf/service.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""DEPRECATED:  Declares the RPC service interfaces.
-
-This module declares the abstract interfaces underlying proto2 RPC
-services.  These are intended to be independent of any particular RPC
-implementation, so that proto2 services can be used on top of a variety
-of implementations.  Starting with version 2.3.0, RPC implementations should
-not try to build on these, but should instead provide code generator plugins
-which generate code specific to the particular RPC implementation.  This way
-the generated code can be more appropriate for the implementation in use
-and can avoid unnecessary layers of indirection.
-"""
-
-__author__ = 'petar@google.com (Petar Petrov)'
-
-
-class RpcException(Exception):
-  """Exception raised on failed blocking RPC method call."""
-  pass
-
-
-class Service(object):
-
-  """Abstract base interface for protocol-buffer-based RPC services.
-
-  Services themselves are abstract classes (implemented either by servers or as
-  stubs), but they subclass this base interface. The methods of this
-  interface can be used to call the methods of the service without knowing
-  its exact type at compile time (analogous to the Message interface).
-  """
-
-  def GetDescriptor():
-    """Retrieves this service's descriptor."""
-    raise NotImplementedError
-
-  def CallMethod(self, method_descriptor, rpc_controller,
-                 request, done):
-    """Calls a method of the service specified by method_descriptor.
-
-    If "done" is None then the call is blocking and the response
-    message will be returned directly.  Otherwise the call is asynchronous
-    and "done" will later be called with the response value.
-
-    In the blocking case, RpcException will be raised on error.
-
-    Preconditions:
-    * method_descriptor.service == GetDescriptor
-    * request is of the exact same classes as returned by
-      GetRequestClass(method).
-    * After the call has started, the request must not be modified.
-    * "rpc_controller" is of the correct type for the RPC implementation being
-      used by this Service.  For stubs, the "correct type" depends on the
-      RpcChannel which the stub is using.
-
-    Postconditions:
-    * "done" will be called when the method is complete.  This may be
-      before CallMethod() returns or it may be at some point in the future.
-    * If the RPC failed, the response value passed to "done" will be None.
-      Further details about the failure can be found by querying the
-      RpcController.
-    """
-    raise NotImplementedError
-
-  def GetRequestClass(self, method_descriptor):
-    """Returns the class of the request message for the specified method.
-
-    CallMethod() requires that the request is of a particular subclass of
-    Message. GetRequestClass() gets the default instance of this required
-    type.
-
-    Example:
-      method = service.GetDescriptor().FindMethodByName("Foo")
-      request = stub.GetRequestClass(method)()
-      request.ParseFromString(input)
-      service.CallMethod(method, request, callback)
-    """
-    raise NotImplementedError
-
-  def GetResponseClass(self, method_descriptor):
-    """Returns the class of the response message for the specified method.
-
-    This method isn't really needed, as the RpcChannel's CallMethod constructs
-    the response protocol message. It's provided anyway in case it is useful
-    for the caller to know the response type in advance.
-    """
-    raise NotImplementedError
-
-
-class RpcController(object):
-
-  """An RpcController mediates a single method call.
-
-  The primary purpose of the controller is to provide a way to manipulate
-  settings specific to the RPC implementation and to find out about RPC-level
-  errors. The methods provided by the RpcController interface are intended
-  to be a "least common denominator" set of features which we expect all
-  implementations to support.  Specific implementations may provide more
-  advanced features (e.g. deadline propagation).
-  """
-
-  # Client-side methods below
-
-  def Reset(self):
-    """Resets the RpcController to its initial state.
-
-    After the RpcController has been reset, it may be reused in
-    a new call. Must not be called while an RPC is in progress.
-    """
-    raise NotImplementedError
-
-  def Failed(self):
-    """Returns true if the call failed.
-
-    After a call has finished, returns true if the call failed.  The possible
-    reasons for failure depend on the RPC implementation.  Failed() must not
-    be called before a call has finished.  If Failed() returns true, the
-    contents of the response message are undefined.
-    """
-    raise NotImplementedError
-
-  def ErrorText(self):
-    """If Failed is true, returns a human-readable description of the error."""
-    raise NotImplementedError
-
-  def StartCancel(self):
-    """Initiate cancellation.
-
-    Advises the RPC system that the caller desires that the RPC call be
-    canceled.  The RPC system may cancel it immediately, may wait awhile and
-    then cancel it, or may not even cancel the call at all.  If the call is
-    canceled, the "done" callback will still be called and the RpcController
-    will indicate that the call failed at that time.
-    """
-    raise NotImplementedError
-
-  # Server-side methods below
-
-  def SetFailed(self, reason):
-    """Sets a failure reason.
-
-    Causes Failed() to return true on the client side.  "reason" will be
-    incorporated into the message returned by ErrorText().  If you find
-    you need to return machine-readable information about failures, you
-    should incorporate it into your response protocol buffer and should
-    NOT call SetFailed().
-    """
-    raise NotImplementedError
-
-  def IsCanceled(self):
-    """Checks if the client cancelled the RPC.
-
-    If true, indicates that the client canceled the RPC, so the server may
-    as well give up on replying to it.  The server should still call the
-    final "done" callback.
-    """
-    raise NotImplementedError
-
-  def NotifyOnCancel(self, callback):
-    """Sets a callback to invoke on cancel.
-
-    Asks that the given callback be called when the RPC is canceled.  The
-    callback will always be called exactly once.  If the RPC completes without
-    being canceled, the callback will be called after completion.  If the RPC
-    has already been canceled when NotifyOnCancel() is called, the callback
-    will be called immediately.
-
-    NotifyOnCancel() must be called no more than once per request.
-    """
-    raise NotImplementedError
-
-
-class RpcChannel(object):
-
-  """Abstract interface for an RPC channel.
-
-  An RpcChannel represents a communication line to a service which can be used
-  to call that service's methods.  The service may be running on another
-  machine. Normally, you should not use an RpcChannel directly, but instead
-  construct a stub {@link Service} wrapping it.  Example:
-
-  Example:
-    RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
-    RpcController controller = rpcImpl.Controller()
-    MyService service = MyService_Stub(channel)
-    service.MyMethod(controller, request, callback)
-  """
-
-  def CallMethod(self, method_descriptor, rpc_controller,
-                 request, response_class, done):
-    """Calls the method identified by the descriptor.
-
-    Call the given method of the remote service.  The signature of this
-    procedure looks the same as Service.CallMethod(), but the requirements
-    are less strict in one important way:  the request object doesn't have to
-    be of any specific class as long as its descriptor is method.input_type.
-    """
-    raise NotImplementedError
diff --git a/tools/swarming_client/third_party/google/protobuf/service_reflection.py b/tools/swarming_client/third_party/google/protobuf/service_reflection.py
deleted file mode 100644
index 1c3636a..0000000
--- a/tools/swarming_client/third_party/google/protobuf/service_reflection.py
+++ /dev/null
@@ -1,284 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains metaclasses used to create protocol service and service stub
-classes from ServiceDescriptor objects at runtime.
-
-The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
-inject all useful functionality into the classes output by the protocol
-compiler at compile-time.
-"""
-
-__author__ = 'petar@google.com (Petar Petrov)'
-
-
-class GeneratedServiceType(type):
-
-  """Metaclass for service classes created at runtime from ServiceDescriptors.
-
-  Implementations for all methods described in the Service class are added here
-  by this class. We also create properties to allow getting/setting all fields
-  in the protocol message.
-
-  The protocol compiler currently uses this metaclass to create protocol service
-  classes at runtime. Clients can also manually create their own classes at
-  runtime, as in this example:
-
-  mydescriptor = ServiceDescriptor(.....)
-  class MyProtoService(service.Service):
-    __metaclass__ = GeneratedServiceType
-    DESCRIPTOR = mydescriptor
-  myservice_instance = MyProtoService()
-  ...
-  """
-
-  _DESCRIPTOR_KEY = 'DESCRIPTOR'
-
-  def __init__(cls, name, bases, dictionary):
-    """Creates a message service class.
-
-    Args:
-      name: Name of the class (ignored, but required by the metaclass
-        protocol).
-      bases: Base classes of the class being constructed.
-      dictionary: The class dictionary of the class being constructed.
-        dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
-        describing this protocol service type.
-    """
-    # Don't do anything if this class doesn't have a descriptor. This happens
-    # when a service class is subclassed.
-    if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
-      return
-    descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
-    service_builder = _ServiceBuilder(descriptor)
-    service_builder.BuildService(cls)
-
-
-class GeneratedServiceStubType(GeneratedServiceType):
-
-  """Metaclass for service stubs created at runtime from ServiceDescriptors.
-
-  This class has similar responsibilities as GeneratedServiceType, except that
-  it creates the service stub classes.
-  """
-
-  _DESCRIPTOR_KEY = 'DESCRIPTOR'
-
-  def __init__(cls, name, bases, dictionary):
-    """Creates a message service stub class.
-
-    Args:
-      name: Name of the class (ignored, here).
-      bases: Base classes of the class being constructed.
-      dictionary: The class dictionary of the class being constructed.
-        dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
-        describing this protocol service type.
-    """
-    super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
-    # Don't do anything if this class doesn't have a descriptor. This happens
-    # when a service stub is subclassed.
-    if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
-      return
-    descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
-    service_stub_builder = _ServiceStubBuilder(descriptor)
-    service_stub_builder.BuildServiceStub(cls)
-
-
-class _ServiceBuilder(object):
-
-  """This class constructs a protocol service class using a service descriptor.
-
-  Given a service descriptor, this class constructs a class that represents
-  the specified service descriptor. One service builder instance constructs
-  exactly one service class. That means all instances of that class share the
-  same builder.
-  """
-
-  def __init__(self, service_descriptor):
-    """Initializes an instance of the service class builder.
-
-    Args:
-      service_descriptor: ServiceDescriptor to use when constructing the
-        service class.
-    """
-    self.descriptor = service_descriptor
-
-  def BuildService(self, cls):
-    """Constructs the service class.
-
-    Args:
-      cls: The class that will be constructed.
-    """
-
-    # CallMethod needs to operate with an instance of the Service class. This
-    # internal wrapper function exists only to be able to pass the service
-    # instance to the method that does the real CallMethod work.
-    def _WrapCallMethod(srvc, method_descriptor,
-                        rpc_controller, request, callback):
-      return self._CallMethod(srvc, method_descriptor,
-                       rpc_controller, request, callback)
-    self.cls = cls
-    cls.CallMethod = _WrapCallMethod
-    cls.GetDescriptor = staticmethod(lambda: self.descriptor)
-    cls.GetDescriptor.__doc__ = "Returns the service descriptor."
-    cls.GetRequestClass = self._GetRequestClass
-    cls.GetResponseClass = self._GetResponseClass
-    for method in self.descriptor.methods:
-      setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
-
-  def _CallMethod(self, srvc, method_descriptor,
-                  rpc_controller, request, callback):
-    """Calls the method described by a given method descriptor.
-
-    Args:
-      srvc: Instance of the service for which this method is called.
-      method_descriptor: Descriptor that represent the method to call.
-      rpc_controller: RPC controller to use for this method's execution.
-      request: Request protocol message.
-      callback: A callback to invoke after the method has completed.
-    """
-    if method_descriptor.containing_service != self.descriptor:
-      raise RuntimeError(
-          'CallMethod() given method descriptor for wrong service type.')
-    method = getattr(srvc, method_descriptor.name)
-    return method(rpc_controller, request, callback)
-
-  def _GetRequestClass(self, method_descriptor):
-    """Returns the class of the request protocol message.
-
-    Args:
-      method_descriptor: Descriptor of the method for which to return the
-        request protocol message class.
-
-    Returns:
-      A class that represents the input protocol message of the specified
-      method.
-    """
-    if method_descriptor.containing_service != self.descriptor:
-      raise RuntimeError(
-          'GetRequestClass() given method descriptor for wrong service type.')
-    return method_descriptor.input_type._concrete_class
-
-  def _GetResponseClass(self, method_descriptor):
-    """Returns the class of the response protocol message.
-
-    Args:
-      method_descriptor: Descriptor of the method for which to return the
-        response protocol message class.
-
-    Returns:
-      A class that represents the output protocol message of the specified
-      method.
-    """
-    if method_descriptor.containing_service != self.descriptor:
-      raise RuntimeError(
-          'GetResponseClass() given method descriptor for wrong service type.')
-    return method_descriptor.output_type._concrete_class
-
-  def _GenerateNonImplementedMethod(self, method):
-    """Generates and returns a method that can be set for a service methods.
-
-    Args:
-      method: Descriptor of the service method for which a method is to be
-        generated.
-
-    Returns:
-      A method that can be added to the service class.
-    """
-    return lambda inst, rpc_controller, request, callback: (
-        self._NonImplementedMethod(method.name, rpc_controller, callback))
-
-  def _NonImplementedMethod(self, method_name, rpc_controller, callback):
-    """The body of all methods in the generated service class.
-
-    Args:
-      method_name: Name of the method being executed.
-      rpc_controller: RPC controller used to execute this method.
-      callback: A callback which will be invoked when the method finishes.
-    """
-    rpc_controller.SetFailed('Method %s not implemented.' % method_name)
-    callback(None)
-
-
-class _ServiceStubBuilder(object):
-
-  """Constructs a protocol service stub class using a service descriptor.
-
-  Given a service descriptor, this class constructs a suitable stub class.
-  A stub is just a type-safe wrapper around an RpcChannel which emulates a
-  local implementation of the service.
-
-  One service stub builder instance constructs exactly one class. It means all
-  instances of that class share the same service stub builder.
-  """
-
-  def __init__(self, service_descriptor):
-    """Initializes an instance of the service stub class builder.
-
-    Args:
-      service_descriptor: ServiceDescriptor to use when constructing the
-        stub class.
-    """
-    self.descriptor = service_descriptor
-
-  def BuildServiceStub(self, cls):
-    """Constructs the stub class.
-
-    Args:
-      cls: The class that will be constructed.
-    """
-
-    def _ServiceStubInit(stub, rpc_channel):
-      stub.rpc_channel = rpc_channel
-    self.cls = cls
-    cls.__init__ = _ServiceStubInit
-    for method in self.descriptor.methods:
-      setattr(cls, method.name, self._GenerateStubMethod(method))
-
-  def _GenerateStubMethod(self, method):
-    return (lambda inst, rpc_controller, request, callback=None:
-        self._StubMethod(inst, method, rpc_controller, request, callback))
-
-  def _StubMethod(self, stub, method_descriptor,
-                  rpc_controller, request, callback):
-    """The body of all service methods in the generated stub class.
-
-    Args:
-      stub: Stub instance.
-      method_descriptor: Descriptor of the invoked method.
-      rpc_controller: Rpc controller to execute the method.
-      request: Request protocol message.
-      callback: A callback to execute when the method finishes.
-    Returns:
-      Response message (in case of blocking call).
-    """
-    return stub.rpc_channel.CallMethod(
-        method_descriptor, rpc_controller, request,
-        method_descriptor.output_type._concrete_class, callback)
diff --git a/tools/swarming_client/third_party/google/protobuf/source_context_pb2.py b/tools/swarming_client/third_party/google/protobuf/source_context_pb2.py
deleted file mode 100644
index 6527294..0000000
--- a/tools/swarming_client/third_party/google/protobuf/source_context_pb2.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/source_context.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/source_context.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tBU\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_SOURCECONTEXT = _descriptor.Descriptor(
-  name='SourceContext',
-  full_name='google.protobuf.SourceContext',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='file_name', full_name='google.protobuf.SourceContext.file_name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=57,
-  serialized_end=91,
-)
-
-DESCRIPTOR.message_types_by_name['SourceContext'] = _SOURCECONTEXT
-
-SourceContext = _reflection.GeneratedProtocolMessageType('SourceContext', (_message.Message,), dict(
-  DESCRIPTOR = _SOURCECONTEXT,
-  __module__ = 'google.protobuf.source_context_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.SourceContext)
-  ))
-_sym_db.RegisterMessage(SourceContext)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\022SourceContextProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/struct_pb2.py b/tools/swarming_client/third_party/google/protobuf/struct_pb2.py
deleted file mode 100644
index b38975a..0000000
--- a/tools/swarming_client/third_party/google/protobuf/struct_pb2.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/struct.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/struct.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x81\x01\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z1github.com/golang/protobuf/ptypes/struct;structpb\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_NULLVALUE = _descriptor.EnumDescriptor(
-  name='NullValue',
-  full_name='google.protobuf.NullValue',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='NULL_VALUE', index=0, number=0,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=474,
-  serialized_end=501,
-)
-_sym_db.RegisterEnumDescriptor(_NULLVALUE)
-
-NullValue = enum_type_wrapper.EnumTypeWrapper(_NULLVALUE)
-NULL_VALUE = 0
-
-
-
-_STRUCT_FIELDSENTRY = _descriptor.Descriptor(
-  name='FieldsEntry',
-  full_name='google.protobuf.Struct.FieldsEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='google.protobuf.Struct.FieldsEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.Struct.FieldsEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=113,
-  serialized_end=182,
-)
-
-_STRUCT = _descriptor.Descriptor(
-  name='Struct',
-  full_name='google.protobuf.Struct',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='fields', full_name='google.protobuf.Struct.fields', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_STRUCT_FIELDSENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=50,
-  serialized_end=182,
-)
-
-
-_VALUE = _descriptor.Descriptor(
-  name='Value',
-  full_name='google.protobuf.Value',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='null_value', full_name='google.protobuf.Value.null_value', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='number_value', full_name='google.protobuf.Value.number_value', index=1,
-      number=2, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_value', full_name='google.protobuf.Value.string_value', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bool_value', full_name='google.protobuf.Value.bool_value', index=3,
-      number=4, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='struct_value', full_name='google.protobuf.Value.struct_value', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='list_value', full_name='google.protobuf.Value.list_value', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='kind', full_name='google.protobuf.Value.kind',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=185,
-  serialized_end=419,
-)
-
-
-_LISTVALUE = _descriptor.Descriptor(
-  name='ListValue',
-  full_name='google.protobuf.ListValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='values', full_name='google.protobuf.ListValue.values', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=421,
-  serialized_end=472,
-)
-
-_STRUCT_FIELDSENTRY.fields_by_name['value'].message_type = _VALUE
-_STRUCT_FIELDSENTRY.containing_type = _STRUCT
-_STRUCT.fields_by_name['fields'].message_type = _STRUCT_FIELDSENTRY
-_VALUE.fields_by_name['null_value'].enum_type = _NULLVALUE
-_VALUE.fields_by_name['struct_value'].message_type = _STRUCT
-_VALUE.fields_by_name['list_value'].message_type = _LISTVALUE
-_VALUE.oneofs_by_name['kind'].fields.append(
-  _VALUE.fields_by_name['null_value'])
-_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
-_VALUE.oneofs_by_name['kind'].fields.append(
-  _VALUE.fields_by_name['number_value'])
-_VALUE.fields_by_name['number_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
-_VALUE.oneofs_by_name['kind'].fields.append(
-  _VALUE.fields_by_name['string_value'])
-_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
-_VALUE.oneofs_by_name['kind'].fields.append(
-  _VALUE.fields_by_name['bool_value'])
-_VALUE.fields_by_name['bool_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
-_VALUE.oneofs_by_name['kind'].fields.append(
-  _VALUE.fields_by_name['struct_value'])
-_VALUE.fields_by_name['struct_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
-_VALUE.oneofs_by_name['kind'].fields.append(
-  _VALUE.fields_by_name['list_value'])
-_VALUE.fields_by_name['list_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
-_LISTVALUE.fields_by_name['values'].message_type = _VALUE
-DESCRIPTOR.message_types_by_name['Struct'] = _STRUCT
-DESCRIPTOR.message_types_by_name['Value'] = _VALUE
-DESCRIPTOR.message_types_by_name['ListValue'] = _LISTVALUE
-DESCRIPTOR.enum_types_by_name['NullValue'] = _NULLVALUE
-
-Struct = _reflection.GeneratedProtocolMessageType('Struct', (_message.Message,), dict(
-
-  FieldsEntry = _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), dict(
-    DESCRIPTOR = _STRUCT_FIELDSENTRY,
-    __module__ = 'google.protobuf.struct_pb2'
-    # @@protoc_insertion_point(class_scope:google.protobuf.Struct.FieldsEntry)
-    ))
-  ,
-  DESCRIPTOR = _STRUCT,
-  __module__ = 'google.protobuf.struct_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Struct)
-  ))
-_sym_db.RegisterMessage(Struct)
-_sym_db.RegisterMessage(Struct.FieldsEntry)
-
-Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict(
-  DESCRIPTOR = _VALUE,
-  __module__ = 'google.protobuf.struct_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Value)
-  ))
-_sym_db.RegisterMessage(Value)
-
-ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), dict(
-  DESCRIPTOR = _LISTVALUE,
-  __module__ = 'google.protobuf.struct_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.ListValue)
-  ))
-_sym_db.RegisterMessage(ListValue)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\013StructProtoP\001Z1github.com/golang/protobuf/ptypes/struct;structpb\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-_STRUCT_FIELDSENTRY.has_options = True
-_STRUCT_FIELDSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/symbol_database.py b/tools/swarming_client/third_party/google/protobuf/symbol_database.py
deleted file mode 100644
index aa466ab..0000000
--- a/tools/swarming_client/third_party/google/protobuf/symbol_database.py
+++ /dev/null
@@ -1,169 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""A database of Python protocol buffer generated symbols.
-
-SymbolDatabase is the MessageFactory for messages generated at compile time,
-and makes it easy to create new instances of a registered type, given only the
-type's protocol buffer symbol name.
-
-Example usage:
-
-  db = symbol_database.SymbolDatabase()
-
-  # Register symbols of interest, from one or multiple files.
-  db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
-  db.RegisterMessage(my_proto_pb2.MyMessage)
-  db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
-
-  # The database can be used as a MessageFactory, to generate types based on
-  # their name:
-  types = db.GetMessages(['my_proto.proto'])
-  my_message_instance = types['MyMessage']()
-
-  # The database's underlying descriptor pool can be queried, so it's not
-  # necessary to know a type's filename to be able to generate it:
-  filename = db.pool.FindFileContainingSymbol('MyMessage')
-  my_message_instance = db.GetMessages([filename])['MyMessage']()
-
-  # This functionality is also provided directly via a convenience method:
-  my_message_instance = db.GetSymbol('MyMessage')()
-"""
-
-
-from google.protobuf import descriptor_pool
-from google.protobuf import message_factory
-
-
-class SymbolDatabase(message_factory.MessageFactory):
-  """A database of Python generated symbols."""
-
-  def RegisterMessage(self, message):
-    """Registers the given message type in the local database.
-
-    Calls to GetSymbol() and GetMessages() will return messages registered here.
-
-    Args:
-      message: a message.Message, to be registered.
-
-    Returns:
-      The provided message.
-    """
-
-    desc = message.DESCRIPTOR
-    self._classes[desc.full_name] = message
-    self.pool.AddDescriptor(desc)
-    return message
-
-  def RegisterEnumDescriptor(self, enum_descriptor):
-    """Registers the given enum descriptor in the local database.
-
-    Args:
-      enum_descriptor: a descriptor.EnumDescriptor.
-
-    Returns:
-      The provided descriptor.
-    """
-    self.pool.AddEnumDescriptor(enum_descriptor)
-    return enum_descriptor
-
-  def RegisterFileDescriptor(self, file_descriptor):
-    """Registers the given file descriptor in the local database.
-
-    Args:
-      file_descriptor: a descriptor.FileDescriptor.
-
-    Returns:
-      The provided descriptor.
-    """
-    self.pool.AddFileDescriptor(file_descriptor)
-
-  def GetSymbol(self, symbol):
-    """Tries to find a symbol in the local database.
-
-    Currently, this method only returns message.Message instances, however, if
-    may be extended in future to support other symbol types.
-
-    Args:
-      symbol: A str, a protocol buffer symbol.
-
-    Returns:
-      A Python class corresponding to the symbol.
-
-    Raises:
-      KeyError: if the symbol could not be found.
-    """
-
-    return self._classes[symbol]
-
-  def GetMessages(self, files):
-    # TODO(amauryfa): Fix the differences with MessageFactory.
-    """Gets all registered messages from a specified file.
-
-    Only messages already created and registered will be returned; (this is the
-    case for imported _pb2 modules)
-    But unlike MessageFactory, this version also returns nested messages.
-
-    Args:
-      files: The file names to extract messages from.
-
-    Returns:
-      A dictionary mapping proto names to the message classes.
-
-    Raises:
-      KeyError: if a file could not be found.
-    """
-
-    def _GetAllMessageNames(desc):
-      """Walk a message Descriptor and recursively yields all message names."""
-      yield desc.full_name
-      for msg_desc in desc.nested_types:
-        for full_name in _GetAllMessageNames(msg_desc):
-          yield full_name
-
-    result = {}
-    for file_name in files:
-      file_desc = self.pool.FindFileByName(file_name)
-      for msg_desc in file_desc.message_types_by_name.values():
-        for full_name in _GetAllMessageNames(msg_desc):
-          try:
-            result[full_name] = self._classes[full_name]
-          except KeyError:
-            # This descriptor has no registered class, skip it.
-            pass
-    return result
-
-
-_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
-
-
-def Default():
-  """Returns the default SymbolDatabase."""
-  return _DEFAULT
diff --git a/tools/swarming_client/third_party/google/protobuf/text_encoding.py b/tools/swarming_client/third_party/google/protobuf/text_encoding.py
deleted file mode 100644
index 9899563..0000000
--- a/tools/swarming_client/third_party/google/protobuf/text_encoding.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Encoding related utilities."""
-import re
-
-import six
-
-# Lookup table for utf8
-_cescape_utf8_to_str = [chr(i) for i in range(0, 256)]
-_cescape_utf8_to_str[9] = r'\t'  # optional escape
-_cescape_utf8_to_str[10] = r'\n'  # optional escape
-_cescape_utf8_to_str[13] = r'\r'  # optional escape
-_cescape_utf8_to_str[39] = r"\'"  # optional escape
-
-_cescape_utf8_to_str[34] = r'\"'  # necessary escape
-_cescape_utf8_to_str[92] = r'\\'  # necessary escape
-
-# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
-_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] +
-                        [chr(i) for i in range(32, 127)] +
-                        [r'\%03o' % i for i in range(127, 256)])
-_cescape_byte_to_str[9] = r'\t'  # optional escape
-_cescape_byte_to_str[10] = r'\n'  # optional escape
-_cescape_byte_to_str[13] = r'\r'  # optional escape
-_cescape_byte_to_str[39] = r"\'"  # optional escape
-
-_cescape_byte_to_str[34] = r'\"'  # necessary escape
-_cescape_byte_to_str[92] = r'\\'  # necessary escape
-
-
-def CEscape(text, as_utf8):
-  """Escape a bytes string for use in an ascii protocol buffer.
-
-  text.encode('string_escape') does not seem to satisfy our needs as it
-  encodes unprintable characters using two-digit hex escapes whereas our
-  C++ unescaping function allows hex escapes to be any length.  So,
-  "\0011".encode('string_escape') ends up being "\\x011", which will be
-  decoded in C++ as a single-character string with char code 0x11.
-
-  Args:
-    text: A byte string to be escaped
-    as_utf8: Specifies if result should be returned in UTF-8 encoding
-  Returns:
-    Escaped string
-  """
-  # PY3 hack: make Ord work for str and bytes:
-  # //platforms/networking/data uses unicode here, hence basestring.
-  Ord = ord if isinstance(text, six.string_types) else lambda x: x
-  if as_utf8:
-    return ''.join(_cescape_utf8_to_str[Ord(c)] for c in text)
-  return ''.join(_cescape_byte_to_str[Ord(c)] for c in text)
-
-
-_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
-_cescape_highbit_to_str = ([chr(i) for i in range(0, 127)] +
-                           [r'\%03o' % i for i in range(127, 256)])
-
-
-def CUnescape(text):
-  """Unescape a text string with C-style escape sequences to UTF-8 bytes."""
-
-  def ReplaceHex(m):
-    # Only replace the match if the number of leading back slashes is odd. i.e.
-    # the slash itself is not escaped.
-    if len(m.group(1)) & 1:
-      return m.group(1) + 'x0' + m.group(2)
-    return m.group(0)
-
-  # This is required because the 'string_escape' encoding doesn't
-  # allow single-digit hex escapes (like '\xf').
-  result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
-
-  if str is bytes:  # PY2
-    return result.decode('string_escape')
-  result = ''.join(_cescape_highbit_to_str[ord(c)] for c in result)
-  return (result.encode('ascii')  # Make it bytes to allow decode.
-          .decode('unicode_escape')
-          # Make it bytes again to return the proper type.
-          .encode('raw_unicode_escape'))
diff --git a/tools/swarming_client/third_party/google/protobuf/text_format.py b/tools/swarming_client/third_party/google/protobuf/text_format.py
deleted file mode 100644
index 06b79d7..0000000
--- a/tools/swarming_client/third_party/google/protobuf/text_format.py
+++ /dev/null
@@ -1,1490 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc.  All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-#     * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#     * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-#     * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains routines for printing protocol messages in text format.
-
-Simple usage example:
-
-  # Create a proto object and serialize it to a text proto string.
-  message = my_proto_pb2.MyMessage(foo='bar')
-  text_proto = text_format.MessageToString(message)
-
-  # Parse a text proto string.
-  message = text_format.Parse(text_proto, my_proto_pb2.MyMessage())
-"""
-
-__author__ = 'kenton@google.com (Kenton Varda)'
-
-import io
-import re
-
-import six
-
-if six.PY3:
-  long = int  # pylint: disable=redefined-builtin,invalid-name
-
-# pylint: disable=g-import-not-at-top
-from google.protobuf.internal import type_checkers
-from google.protobuf import descriptor
-from google.protobuf import text_encoding
-
-__all__ = ['MessageToString', 'PrintMessage', 'PrintField', 'PrintFieldValue',
-           'Merge']
-
-_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
-                     type_checkers.Int32ValueChecker(),
-                     type_checkers.Uint64ValueChecker(),
-                     type_checkers.Int64ValueChecker())
-_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?', re.IGNORECASE)
-_FLOAT_NAN = re.compile('nanf?', re.IGNORECASE)
-_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
-                          descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
-_QUOTES = frozenset(("'", '"'))
-_ANY_FULL_TYPE_NAME = 'google.protobuf.Any'
-
-
-class Error(Exception):
-  """Top-level module error for text_format."""
-
-
-class ParseError(Error):
-  """Thrown in case of text parsing or tokenizing error."""
-
-  def __init__(self, message=None, line=None, column=None):
-    if message is not None and line is not None:
-      loc = str(line)
-      if column is not None:
-        loc += ':{0}'.format(column)
-      message = '{0} : {1}'.format(loc, message)
-    if message is not None:
-      super(ParseError, self).__init__(message)
-    else:
-      super(ParseError, self).__init__()
-    self._line = line
-    self._column = column
-
-  def GetLine(self):
-    return self._line
-
-  def GetColumn(self):
-    return self._column
-
-
-class TextWriter(object):
-
-  def __init__(self, as_utf8):
-    if six.PY2:
-      self._writer = io.BytesIO()
-    else:
-      self._writer = io.StringIO()
-
-  def write(self, val):
-    if six.PY2:
-      if isinstance(val, six.text_type):
-        val = val.encode('utf-8')
-    return self._writer.write(val)
-
-  def close(self):
-    return self._writer.close()
-
-  def getvalue(self):
-    return self._writer.getvalue()
-
-
-def MessageToString(message,
-                    as_utf8=False,
-                    as_one_line=False,
-                    pointy_brackets=False,
-                    use_index_order=False,
-                    float_format=None,
-                    use_field_number=False,
-                    descriptor_pool=None,
-                    indent=0):
-  """Convert protobuf message to text format.
-
-  Floating point values can be formatted compactly with 15 digits of
-  precision (which is the most that IEEE 754 "double" can guarantee)
-  using float_format='.15g'. To ensure that converting to text and back to a
-  proto will result in an identical value, float_format='.17g' should be used.
-
-  Args:
-    message: The protocol buffers message.
-    as_utf8: Produce text output in UTF8 format.
-    as_one_line: Don't introduce newlines between fields.
-    pointy_brackets: If True, use angle brackets instead of curly braces for
-      nesting.
-    use_index_order: If True, print fields of a proto message using the order
-      defined in source code instead of the field number. By default, use the
-      field number order.
-    float_format: If set, use this to specify floating point number formatting
-      (per the "Format Specification Mini-Language"); otherwise, str() is used.
-    use_field_number: If True, print field numbers instead of names.
-    descriptor_pool: A DescriptorPool used to resolve Any types.
-    indent: The indent level, in terms of spaces, for pretty print.
-
-  Returns:
-    A string of the text formatted protocol buffer message.
-  """
-  out = TextWriter(as_utf8)
-  printer = _Printer(out, indent, as_utf8, as_one_line, pointy_brackets,
-                     use_index_order, float_format, use_field_number,
-                     descriptor_pool)
-  printer.PrintMessage(message)
-  result = out.getvalue()
-  out.close()
-  if as_one_line:
-    return result.rstrip()
-  return result
-
-
-def _IsMapEntry(field):
-  return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
-          field.message_type.has_options and
-          field.message_type.GetOptions().map_entry)
-
-
-def PrintMessage(message,
-                 out,
-                 indent=0,
-                 as_utf8=False,
-                 as_one_line=False,
-                 pointy_brackets=False,
-                 use_index_order=False,
-                 float_format=None,
-                 use_field_number=False,
-                 descriptor_pool=None):
-  printer = _Printer(out, indent, as_utf8, as_one_line, pointy_brackets,
-                     use_index_order, float_format, use_field_number,
-                     descriptor_pool)
-  printer.PrintMessage(message)
-
-
-def PrintField(field,
-               value,
-               out,
-               indent=0,
-               as_utf8=False,
-               as_one_line=False,
-               pointy_brackets=False,
-               use_index_order=False,
-               float_format=None):
-  """Print a single field name/value pair."""
-  printer = _Printer(out, indent, as_utf8, as_one_line, pointy_brackets,
-                     use_index_order, float_format)
-  printer.PrintField(field, value)
-
-
-def PrintFieldValue(field,
-                    value,
-                    out,
-                    indent=0,
-                    as_utf8=False,
-                    as_one_line=False,
-                    pointy_brackets=False,
-                    use_index_order=False,
-                    float_format=None):
-  """Print a single field value (not including name)."""
-  printer = _Printer(out, indent, as_utf8, as_one_line, pointy_brackets,
-                     use_index_order, float_format)
-  printer.PrintFieldValue(field, value)
-
-
-def _BuildMessageFromTypeName(type_name, descriptor_pool):
-  """Returns a protobuf message instance.
-
-  Args:
-    type_name: Fully-qualified protobuf  message type name string.
-    descriptor_pool: DescriptorPool instance.
-
-  Returns:
-    A Message instance of type matching type_name, or None if the a Descriptor
-    wasn't found matching type_name.
-  """
-  # pylint: disable=g-import-not-at-top
-  from google.protobuf import message_factory
-  factory = message_factory.MessageFactory(descriptor_pool)
-  try:
-    message_descriptor = descriptor_pool.FindMessageTypeByName(type_name)
-  except KeyError:
-    return None
-  message_type = factory.GetPrototype(message_descriptor)
-  return message_type()
-
-
-class _Printer(object):
-  """Text format printer for protocol message."""
-
-  def __init__(self,
-               out,
-               indent=0,
-               as_utf8=False,
-               as_one_line=False,
-               pointy_brackets=False,
-               use_index_order=False,
-               float_format=None,
-               use_field_number=False,
-               descriptor_pool=None):
-    """Initialize the Printer.
-
-    Floating point values can be formatted compactly with 15 digits of
-    precision (which is the most that IEEE 754 "double" can guarantee)
-    using float_format='.15g'. To ensure that converting to text and back to a
-    proto will result in an identical value, float_format='.17g' should be used.
-
-    Args:
-      out: To record the text format result.
-      indent: The indent level for pretty print.
-      as_utf8: Produce text output in UTF8 format.
-      as_one_line: Don't introduce newlines between fields.
-      pointy_brackets: If True, use angle brackets instead of curly braces for
-        nesting.
-      use_index_order: If True, print fields of a proto message using the order
-        defined in source code instead of the field number. By default, use the
-        field number order.
-      float_format: If set, use this to specify floating point number formatting
-        (per the "Format Specification Mini-Language"); otherwise, str() is
-        used.
-      use_field_number: If True, print field numbers instead of names.
-      descriptor_pool: A DescriptorPool used to resolve Any types.
-    """
-    self.out = out
-    self.indent = indent
-    self.as_utf8 = as_utf8
-    self.as_one_line = as_one_line
-    self.pointy_brackets = pointy_brackets
-    self.use_index_order = use_index_order
-    self.float_format = float_format
-    self.use_field_number = use_field_number
-    self.descriptor_pool = descriptor_pool
-
-  def _TryPrintAsAnyMessage(self, message):
-    """Serializes if message is a google.protobuf.Any field."""
-    packed_message = _BuildMessageFromTypeName(message.TypeName(),
-                                               self.descriptor_pool)
-    if packed_message:
-      packed_message.MergeFromString(message.value)
-      self.out.write('%s[%s]' % (self.indent * ' ', message.type_url))
-      self._PrintMessageFieldValue(packed_message)
-      self.out.write(' ' if self.as_one_line else '\n')
-      return True
-    else:
-      return False
-
-  def PrintMessage(self, message):
-    """Convert protobuf message to text format.
-
-    Args:
-      message: The protocol buffers message.
-    """
-    if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and
-        self.descriptor_pool and self._TryPrintAsAnyMessage(message)):
-      return
-    fields = message.ListFields()
-    if self.use_index_order:
-      fields.sort(key=lambda x: x[0].index)
-    for field, value in fields:
-      if _IsMapEntry(field):
-        for key in sorted(value):
-          # This is slow for maps with submessage entires because it copies the
-          # entire tree.  Unfortunately this would take significant refactoring
-          # of this file to work around.
-          #
-          # TODO(haberman): refactor and optimize if this becomes an issue.
-          entry_submsg = field.message_type._concrete_class(key=key,
-                                                            value=value[key])
-          self.PrintField(field, entry_submsg)
-      elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-        for element in value:
-          self.PrintField(field, element)
-      else:
-        self.PrintField(field, value)
-
-  def PrintField(self, field, value):
-    """Print a single field name/value pair."""
-    out = self.out
-    out.write(' ' * self.indent)
-    if self.use_field_number:
-      out.write(str(field.number))
-    else:
-      if field.is_extension:
-        out.write('[')
-        if (field.containing_type.GetOptions().message_set_wire_format and
-            field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
-            field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
-          out.write(field.message_type.full_name)
-        else:
-          out.write(field.full_name)
-        out.write(']')
-      elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
-        # For groups, use the capitalized name.
-        out.write(field.message_type.name)
-      else:
-        out.write(field.name)
-
-    if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-      # The colon is optional in this case, but our cross-language golden files
-      # don't include it.
-      out.write(': ')
-
-    self.PrintFieldValue(field, value)
-    if self.as_one_line:
-      out.write(' ')
-    else:
-      out.write('\n')
-
-  def _PrintMessageFieldValue(self, value):
-    if self.pointy_brackets:
-      openb = '<'
-      closeb = '>'
-    else:
-      openb = '{'
-      closeb = '}'
-
-    if self.as_one_line:
-      self.out.write(' %s ' % openb)
-      self.PrintMessage(value)
-      self.out.write(closeb)
-    else:
-      self.out.write(' %s\n' % openb)
-      self.indent += 2
-      self.PrintMessage(value)
-      self.indent -= 2
-      self.out.write(' ' * self.indent + closeb)
-
-  def PrintFieldValue(self, field, value):
-    """Print a single field value (not including name).
-
-    For repeated fields, the value should be a single element.
-
-    Args:
-      field: The descriptor of the field to be printed.
-      value: The value of the field.
-    """
-    out = self.out
-    if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-      self._PrintMessageFieldValue(value)
-    elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
-      enum_value = field.enum_type.values_by_number.get(value, None)
-      if enum_value is not None:
-        out.write(enum_value.name)
-      else:
-        out.write(str(value))
-    elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
-      out.write('\"')
-      if isinstance(value, six.text_type):
-        out_value = value.encode('utf-8')
-      else:
-        out_value = value
-      if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
-        # We need to escape non-UTF8 chars in TYPE_BYTES field.
-        out_as_utf8 = False
-      else:
-        out_as_utf8 = self.as_utf8
-      out.write(text_encoding.CEscape(out_value, out_as_utf8))
-      out.write('\"')
-    elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
-      if value:
-        out.write('true')
-      else:
-        out.write('false')
-    elif field.cpp_type in _FLOAT_TYPES and self.float_format is not None:
-      out.write('{1:{0}}'.format(self.float_format, value))
-    else:
-      out.write(str(value))
-
-
-def Parse(text,
-          message,
-          allow_unknown_extension=False,
-          allow_field_number=False):
-  """Parses a text representation of a protocol message into a message.
-
-  Args:
-    text: Message text representation.
-    message: A protocol buffer message to merge into.
-    allow_unknown_extension: if True, skip over missing extensions and keep
-      parsing
-    allow_field_number: if True, both field number and field name are allowed.
-
-  Returns:
-    The same message passed as argument.
-
-  Raises:
-    ParseError: On text parsing problems.
-  """
-  if not isinstance(text, str):
-    text = text.decode('utf-8')
-  return ParseLines(
-      text.split('\n'), message, allow_unknown_extension, allow_field_number)
-
-
-def Merge(text,
-          message,
-          allow_unknown_extension=False,
-          allow_field_number=False,
-          descriptor_pool=None):
-  """Parses a text representation of a protocol message into a message.
-
-  Like Parse(), but allows repeated values for a non-repeated field, and uses
-  the last one.
-
-  Args:
-    text: Message text representation.
-    message: A protocol buffer message to merge into.
-    allow_unknown_extension: if True, skip over missing extensions and keep
-      parsing
-    allow_field_number: if True, both field number and field name are allowed.
-    descriptor_pool: A DescriptorPool used to resolve Any types.
-
-  Returns:
-    The same message passed as argument.
-
-  Raises:
-    ParseError: On text parsing problems.
-  """
-  return MergeLines(
-      text.split('\n'),
-      message,
-      allow_unknown_extension,
-      allow_field_number,
-      descriptor_pool=descriptor_pool)
-
-
-def ParseLines(lines,
-               message,
-               allow_unknown_extension=False,
-               allow_field_number=False):
-  """Parses a text representation of a protocol message into a message.
-
-  Args:
-    lines: An iterable of lines of a message's text representation.
-    message: A protocol buffer message to merge into.
-    allow_unknown_extension: if True, skip over missing extensions and keep
-      parsing
-    allow_field_number: if True, both field number and field name are allowed.
-    descriptor_pool: A DescriptorPool used to resolve Any types.
-
-  Returns:
-    The same message passed as argument.
-
-  Raises:
-    ParseError: On text parsing problems.
-  """
-  parser = _Parser(allow_unknown_extension, allow_field_number)
-  return parser.ParseLines(lines, message)
-
-
-def MergeLines(lines,
-               message,
-               allow_unknown_extension=False,
-               allow_field_number=False,
-               descriptor_pool=None):
-  """Parses a text representation of a protocol message into a message.
-
-  Args:
-    lines: An iterable of lines of a message's text representation.
-    message: A protocol buffer message to merge into.
-    allow_unknown_extension: if True, skip over missing extensions and keep
-      parsing
-    allow_field_number: if True, both field number and field name are allowed.
-
-  Returns:
-    The same message passed as argument.
-
-  Raises:
-    ParseError: On text parsing problems.
-  """
-  parser = _Parser(allow_unknown_extension,
-                   allow_field_number,
-                   descriptor_pool=descriptor_pool)
-  return parser.MergeLines(lines, message)
-
-
-class _Parser(object):
-  """Text format parser for protocol message."""
-
-  def __init__(self,
-               allow_unknown_extension=False,
-               allow_field_number=False,
-               descriptor_pool=None):
-    self.allow_unknown_extension = allow_unknown_extension
-    self.allow_field_number = allow_field_number
-    self.descriptor_pool = descriptor_pool
-
-  def ParseFromString(self, text, message):
-    """Parses a text representation of a protocol message into a message."""
-    if not isinstance(text, str):
-      text = text.decode('utf-8')
-    return self.ParseLines(text.split('\n'), message)
-
-  def ParseLines(self, lines, message):
-    """Parses a text representation of a protocol message into a message."""
-    self._allow_multiple_scalars = False
-    self._ParseOrMerge(lines, message)
-    return message
-
-  def MergeFromString(self, text, message):
-    """Merges a text representation of a protocol message into a message."""
-    return self._MergeLines(text.split('\n'), message)
-
-  def MergeLines(self, lines, message):
-    """Merges a text representation of a protocol message into a message."""
-    self._allow_multiple_scalars = True
-    self._ParseOrMerge(lines, message)
-    return message
-
-  def _ParseOrMerge(self, lines, message):
-    """Converts a text representation of a protocol message into a message.
-
-    Args:
-      lines: Lines of a message's text representation.
-      message: A protocol buffer message to merge into.
-
-    Raises:
-      ParseError: On text parsing problems.
-    """
-    tokenizer = Tokenizer(lines)
-    while not tokenizer.AtEnd():
-      self._MergeField(tokenizer, message)
-
-  def _MergeField(self, tokenizer, message):
-    """Merges a single protocol message field into a message.
-
-    Args:
-      tokenizer: A tokenizer to parse the field name and values.
-      message: A protocol message to record the data.
-
-    Raises:
-      ParseError: In case of text parsing problems.
-    """
-    message_descriptor = message.DESCRIPTOR
-    if (hasattr(message_descriptor, 'syntax') and
-        message_descriptor.syntax == 'proto3'):
-      # Proto3 doesn't represent presence so we can't test if multiple
-      # scalars have occurred.  We have to allow them.
-      self._allow_multiple_scalars = True
-    if tokenizer.TryConsume('['):
-      name = [tokenizer.ConsumeIdentifier()]
-      while tokenizer.TryConsume('.'):
-        name.append(tokenizer.ConsumeIdentifier())
-      name = '.'.join(name)
-
-      if not message_descriptor.is_extendable:
-        raise tokenizer.ParseErrorPreviousToken(
-            'Message type "%s" does not have extensions.' %
-            message_descriptor.full_name)
-      # pylint: disable=protected-access
-      field = message.Extensions._FindExtensionByName(name)
-      # pylint: enable=protected-access
-      if not field:
-        if self.allow_unknown_extension:
-          field = None
-        else:
-          raise tokenizer.ParseErrorPreviousToken(
-              'Extension "%s" not registered.' % name)
-      elif message_descriptor != field.containing_type:
-        raise tokenizer.ParseErrorPreviousToken(
-            'Extension "%s" does not extend message type "%s".' %
-            (name, message_descriptor.full_name))
-
-      tokenizer.Consume(']')
-
-    else:
-      name = tokenizer.ConsumeIdentifierOrNumber()
-      if self.allow_field_number and name.isdigit():
-        number = ParseInteger(name, True, True)
-        field = message_descriptor.fields_by_number.get(number, None)
-        if not field and message_descriptor.is_extendable:
-          field = message.Extensions._FindExtensionByNumber(number)
-      else:
-        field = message_descriptor.fields_by_name.get(name, None)
-
-        # Group names are expected to be capitalized as they appear in the
-        # .proto file, which actually matches their type names, not their field
-        # names.
-        if not field:
-          field = message_descriptor.fields_by_name.get(name.lower(), None)
-          if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
-            field = None
-
-        if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
-            field.message_type.name != name):
-          field = None
-
-      if not field:
-        raise tokenizer.ParseErrorPreviousToken(
-            'Message type "%s" has no field named "%s".' %
-            (message_descriptor.full_name, name))
-
-    if field:
-      if not self._allow_multiple_scalars and field.containing_oneof:
-        # Check if there's a different field set in this oneof.
-        # Note that we ignore the case if the same field was set before, and we
-        # apply _allow_multiple_scalars to non-scalar fields as well.
-        which_oneof = message.WhichOneof(field.containing_oneof.name)
-        if which_oneof is not None and which_oneof != field.name:
-          raise tokenizer.ParseErrorPreviousToken(
-              'Field "%s" is specified along with field "%s", another member '
-              'of oneof "%s" for message type "%s".' %
-              (field.name, which_oneof, field.containing_oneof.name,
-               message_descriptor.full_name))
-
-      if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-        tokenizer.TryConsume(':')
-        merger = self._MergeMessageField
-      else:
-        tokenizer.Consume(':')
-        merger = self._MergeScalarField
-
-      if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and
-          tokenizer.TryConsume('[')):
-        # Short repeated format, e.g. "foo: [1, 2, 3]"
-        while True:
-          merger(tokenizer, message, field)
-          if tokenizer.TryConsume(']'):
-            break
-          tokenizer.Consume(',')
-
-      else:
-        merger(tokenizer, message, field)
-
-    else:  # Proto field is unknown.
-      assert self.allow_unknown_extension
-      _SkipFieldContents(tokenizer)
-
-    # For historical reasons, fields may optionally be separated by commas or
-    # semicolons.
-    if not tokenizer.TryConsume(','):
-      tokenizer.TryConsume(';')
-
-  def _ConsumeAnyTypeUrl(self, tokenizer):
-    """Consumes a google.protobuf.Any type URL and returns the type name."""
-    # Consume "type.googleapis.com/".
-    tokenizer.ConsumeIdentifier()
-    tokenizer.Consume('.')
-    tokenizer.ConsumeIdentifier()
-    tokenizer.Consume('.')
-    tokenizer.ConsumeIdentifier()
-    tokenizer.Consume('/')
-    # Consume the fully-qualified type name.
-    name = [tokenizer.ConsumeIdentifier()]
-    while tokenizer.TryConsume('.'):
-      name.append(tokenizer.ConsumeIdentifier())
-    return '.'.join(name)
-
-  def _MergeMessageField(self, tokenizer, message, field):
-    """Merges a single scalar field into a message.
-
-    Args:
-      tokenizer: A tokenizer to parse the field value.
-      message: The message of which field is a member.
-      field: The descriptor of the field to be merged.
-
-    Raises:
-      ParseError: In case of text parsing problems.
-    """
-    is_map_entry = _IsMapEntry(field)
-
-    if tokenizer.TryConsume('<'):
-      end_token = '>'
-    else:
-      tokenizer.Consume('{')
-      end_token = '}'
-
-    if (field.message_type.full_name == _ANY_FULL_TYPE_NAME and
-        tokenizer.TryConsume('[')):
-      packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
-      tokenizer.Consume(']')
-      tokenizer.TryConsume(':')
-      if tokenizer.TryConsume('<'):
-        expanded_any_end_token = '>'
-      else:
-        tokenizer.Consume('{')
-        expanded_any_end_token = '}'
-      if not self.descriptor_pool:
-        raise ParseError('Descriptor pool required to parse expanded Any field')
-      expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
-                                                           self.descriptor_pool)
-      if not expanded_any_sub_message:
-        raise ParseError('Type %s not found in descriptor pool' %
-                         packed_type_name)
-      while not tokenizer.TryConsume(expanded_any_end_token):
-        if tokenizer.AtEnd():
-          raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
-                                                  (expanded_any_end_token,))
-        self._MergeField(tokenizer, expanded_any_sub_message)
-      if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-        any_message = getattr(message, field.name).add()
-      else:
-        any_message = getattr(message, field.name)
-      any_message.Pack(expanded_any_sub_message)
-    elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-      if field.is_extension:
-        sub_message = message.Extensions[field].add()
-      elif is_map_entry:
-        # pylint: disable=protected-access
-        sub_message = field.message_type._concrete_class()
-      else:
-        sub_message = getattr(message, field.name).add()
-    else:
-      if field.is_extension:
-        sub_message = message.Extensions[field]
-      else:
-        sub_message = getattr(message, field.name)
-      sub_message.SetInParent()
-
-    while not tokenizer.TryConsume(end_token):
-      if tokenizer.AtEnd():
-        raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
-      self._MergeField(tokenizer, sub_message)
-
-    if is_map_entry:
-      value_cpptype = field.message_type.fields_by_name['value'].cpp_type
-      if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
-        value = getattr(message, field.name)[sub_message.key]
-        value.MergeFrom(sub_message.value)
-      else:
-        getattr(message, field.name)[sub_message.key] = sub_message.value
-
-  def _MergeScalarField(self, tokenizer, message, field):
-    """Merges a single scalar field into a message.
-
-    Args:
-      tokenizer: A tokenizer to parse the field value.
-      message: A protocol message to record the data.
-      field: The descriptor of the field to be merged.
-
-    Raises:
-      ParseError: In case of text parsing problems.
-      RuntimeError: On runtime errors.
-    """
-    _ = self.allow_unknown_extension
-    value = None
-
-    if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
-                      descriptor.FieldDescriptor.TYPE_SINT32,
-                      descriptor.FieldDescriptor.TYPE_SFIXED32):
-      value = _ConsumeInt32(tokenizer)
-    elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
-                        descriptor.FieldDescriptor.TYPE_SINT64,
-                        descriptor.FieldDescriptor.TYPE_SFIXED64):
-      value = _ConsumeInt64(tokenizer)
-    elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
-                        descriptor.FieldDescriptor.TYPE_FIXED32):
-      value = _ConsumeUint32(tokenizer)
-    elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
-                        descriptor.FieldDescriptor.TYPE_FIXED64):
-      value = _ConsumeUint64(tokenizer)
-    elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
-                        descriptor.FieldDescriptor.TYPE_DOUBLE):
-      value = tokenizer.ConsumeFloat()
-    elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
-      value = tokenizer.ConsumeBool()
-    elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
-      value = tokenizer.ConsumeString()
-    elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
-      value = tokenizer.ConsumeByteString()
-    elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
-      value = tokenizer.ConsumeEnum(field)
-    else:
-      raise RuntimeError('Unknown field type %d' % field.type)
-
-    if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
-      if field.is_extension:
-        message.Extensions[field].append(value)
-      else:
-        getattr(message, field.name).append(value)
-    else:
-      if field.is_extension:
-        if not self._allow_multiple_scalars and message.HasExtension(field):
-          raise tokenizer.ParseErrorPreviousToken(
-              'Message type "%s" should not have multiple "%s" extensions.' %
-              (message.DESCRIPTOR.full_name, field.full_name))
-        else:
-          message.Extensions[field] = value
-      else:
-        if not self._allow_multiple_scalars and message.HasField(field.name):
-          raise tokenizer.ParseErrorPreviousToken(
-              'Message type "%s" should not have multiple "%s" fields.' %
-              (message.DESCRIPTOR.full_name, field.name))
-        else:
-          setattr(message, field.name, value)
-
-
-def _SkipFieldContents(tokenizer):
-  """Skips over contents (value or message) of a field.
-
-  Args:
-    tokenizer: A tokenizer to parse the field name and values.
-  """
-  # Try to guess the type of this field.
-  # If this field is not a message, there should be a ":" between the
-  # field name and the field value and also the field value should not
-  # start with "{" or "<" which indicates the beginning of a message body.
-  # If there is no ":" or there is a "{" or "<" after ":", this field has
-  # to be a message or the input is ill-formed.
-  if tokenizer.TryConsume(':') and not tokenizer.LookingAt(
-      '{') and not tokenizer.LookingAt('<'):
-    _SkipFieldValue(tokenizer)
-  else:
-    _SkipFieldMessage(tokenizer)
-
-
-def _SkipField(tokenizer):
-  """Skips over a complete field (name and value/message).
-
-  Args:
-    tokenizer: A tokenizer to parse the field name and values.
-  """
-  if tokenizer.TryConsume('['):
-    # Consume extension name.
-    tokenizer.ConsumeIdentifier()
-    while tokenizer.TryConsume('.'):
-      tokenizer.ConsumeIdentifier()
-    tokenizer.Consume(']')
-  else:
-    tokenizer.ConsumeIdentifier()
-
-  _SkipFieldContents(tokenizer)
-
-  # For historical reasons, fields may optionally be separated by commas or
-  # semicolons.
-  if not tokenizer.TryConsume(','):
-    tokenizer.TryConsume(';')
-
-
-def _SkipFieldMessage(tokenizer):
-  """Skips over a field message.
-
-  Args:
-    tokenizer: A tokenizer to parse the field name and values.
-  """
-
-  if tokenizer.TryConsume('<'):
-    delimiter = '>'
-  else:
-    tokenizer.Consume('{')
-    delimiter = '}'
-
-  while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
-    _SkipField(tokenizer)
-
-  tokenizer.Consume(delimiter)
-
-
-def _SkipFieldValue(tokenizer):
-  """Skips over a field value.
-
-  Args:
-    tokenizer: A tokenizer to parse the field name and values.
-
-  Raises:
-    ParseError: In case an invalid field value is found.
-  """
-  # String/bytes tokens can come in multiple adjacent string literals.
-  # If we can consume one, consume as many as we can.
-  if tokenizer.TryConsumeByteString():
-    while tokenizer.TryConsumeByteString():
-      pass
-    return
-
-  if (not tokenizer.TryConsumeIdentifier() and
-      not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and
-      not tokenizer.TryConsumeFloat()):
-    raise ParseError('Invalid field value: ' + tokenizer.token)
-
-
-class Tokenizer(object):
-  """Protocol buffer text representation tokenizer.
-
-  This class handles the lower level string parsing by splitting it into
-  meaningful tokens.
-
-  It was directly ported from the Java protocol buffer API.
-  """
-
-  _WHITESPACE = re.compile(r'\s+')
-  _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE)
-  _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE)
-  _TOKEN = re.compile('|'.join([
-      r'[a-zA-Z_][0-9a-zA-Z_+-]*',  # an identifier
-      r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*',  # a number
-  ] + [  # quoted str for each quote mark
-      r'{qt}([^{qt}\n\\]|\\.)*({qt}|\\?$)'.format(qt=mark) for mark in _QUOTES
-  ]))
-
-  _IDENTIFIER = re.compile(r'[^\d\W]\w*')
-  _IDENTIFIER_OR_NUMBER = re.compile(r'\w+')
-
-  def __init__(self, lines, skip_comments=True):
-    self._position = 0
-    self._line = -1
-    self._column = 0
-    self._token_start = None
-    self.token = ''
-    self._lines = iter(lines)
-    self._current_line = ''
-    self._previous_line = 0
-    self._previous_column = 0
-    self._more_lines = True
-    self._skip_comments = skip_comments
-    self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT
-                                or self._WHITESPACE)
-    self._SkipWhitespace()
-    self.NextToken()
-
-  def LookingAt(self, token):
-    return self.token == token
-
-  def AtEnd(self):
-    """Checks the end of the text was reached.
-
-    Returns:
-      True iff the end was reached.
-    """
-    return not self.token
-
-  def _PopLine(self):
-    while len(self._current_line) <= self._column:
-      try:
-        self._current_line = next(self._lines)
-      except StopIteration:
-        self._current_line = ''
-        self._more_lines = False
-        return
-      else:
-        self._line += 1
-        self._column = 0
-
-  def _SkipWhitespace(self):
-    while True:
-      self._PopLine()
-      match = self._whitespace_pattern.match(self._current_line, self._column)
-      if not match:
-        break
-      length = len(match.group(0))
-      self._column += length
-
-  def TryConsume(self, token):
-    """Tries to consume a given piece of text.
-
-    Args:
-      token: Text to consume.
-
-    Returns:
-      True iff the text was consumed.
-    """
-    if self.token == token:
-      self.NextToken()
-      return True
-    return False
-
-  def Consume(self, token):
-    """Consumes a piece of text.
-
-    Args:
-      token: Text to consume.
-
-    Raises:
-      ParseError: If the text couldn't be consumed.
-    """
-    if not self.TryConsume(token):
-      raise self.ParseError('Expected "%s".' % token)
-
-  def ConsumeComment(self):
-    result = self.token
-    if not self._COMMENT.match(result):
-      raise self.ParseError('Expected comment.')
-    self.NextToken()
-    return result
-
-  def TryConsumeIdentifier(self):
-    try:
-      self.ConsumeIdentifier()
-      return True
-    except ParseError:
-      return False
-
-  def ConsumeIdentifier(self):
-    """Consumes protocol message field identifier.
-
-    Returns:
-      Identifier string.
-
-    Raises:
-      ParseError: If an identifier couldn't be consumed.
-    """
-    result = self.token
-    if not self._IDENTIFIER.match(result):
-      raise self.ParseError('Expected identifier.')
-    self.NextToken()
-    return result
-
-  def TryConsumeIdentifierOrNumber(self):
-    try:
-      self.ConsumeIdentifierOrNumber()
-      return True
-    except ParseError:
-      return False
-
-  def ConsumeIdentifierOrNumber(self):
-    """Consumes protocol message field identifier.
-
-    Returns:
-      Identifier string.
-
-    Raises:
-      ParseError: If an identifier couldn't be consumed.
-    """
-    result = self.token
-    if not self._IDENTIFIER_OR_NUMBER.match(result):
-      raise self.ParseError('Expected identifier or number.')
-    self.NextToken()
-    return result
-
-  def TryConsumeInteger(self):
-    try:
-      # Note: is_long only affects value type, not whether an error is raised.
-      self.ConsumeInteger()
-      return True
-    except ParseError:
-      return False
-
-  def ConsumeInteger(self, is_long=False):
-    """Consumes an integer number.
-
-    Args:
-      is_long: True if the value should be returned as a long integer.
-    Returns:
-      The integer parsed.
-
-    Raises:
-      ParseError: If an integer couldn't be consumed.
-    """
-    try:
-      result = _ParseAbstractInteger(self.token, is_long=is_long)
-    except ValueError as e:
-      raise self.ParseError(str(e))
-    self.NextToken()
-    return result
-
-  def TryConsumeFloat(self):
-    try:
-      self.ConsumeFloat()
-      return True
-    except ParseError:
-      return False
-
-  def ConsumeFloat(self):
-    """Consumes an floating point number.
-
-    Returns:
-      The number parsed.
-
-    Raises:
-      ParseError: If a floating point number couldn't be consumed.
-    """
-    try:
-      result = ParseFloat(self.token)
-    except ValueError as e:
-      raise self.ParseError(str(e))
-    self.NextToken()
-    return result
-
-  def ConsumeBool(self):
-    """Consumes a boolean value.
-
-    Returns:
-      The bool parsed.
-
-    Raises:
-      ParseError: If a boolean value couldn't be consumed.
-    """
-    try:
-      result = ParseBool(self.token)
-    except ValueError as e:
-      raise self.ParseError(str(e))
-    self.NextToken()
-    return result
-
-  def TryConsumeByteString(self):
-    try:
-      self.ConsumeByteString()
-      return True
-    except ParseError:
-      return False
-
-  def ConsumeString(self):
-    """Consumes a string value.
-
-    Returns:
-      The string parsed.
-
-    Raises:
-      ParseError: If a string value couldn't be consumed.
-    """
-    the_bytes = self.ConsumeByteString()
-    try:
-      return six.text_type(the_bytes, 'utf-8')
-    except UnicodeDecodeError as e:
-      raise self._StringParseError(e)
-
-  def ConsumeByteString(self):
-    """Consumes a byte array value.
-
-    Returns:
-      The array parsed (as a string).
-
-    Raises:
-      ParseError: If a byte array value couldn't be consumed.
-    """
-    the_list = [self._ConsumeSingleByteString()]
-    while self.token and self.token[0] in _QUOTES:
-      the_list.append(self._ConsumeSingleByteString())
-    return b''.join(the_list)
-
-  def _ConsumeSingleByteString(self):
-    """Consume one token of a string literal.
-
-    String literals (whether bytes or text) can come in multiple adjacent
-    tokens which are automatically concatenated, like in C or Python.  This
-    method only consumes one token.
-
-    Returns:
-      The token parsed.
-    Raises:
-      ParseError: When the wrong format data is found.
-    """
-    text = self.token
-    if len(text) < 1 or text[0] not in _QUOTES:
-      raise self.ParseError('Expected string but found: %r' % (text,))
-
-    if len(text) < 2 or text[-1] != text[0]:
-      raise self.ParseError('String missing ending quote: %r' % (text,))
-
-    try:
-      result = text_encoding.CUnescape(text[1:-1])
-    except ValueError as e:
-      raise self.ParseError(str(e))
-    self.NextToken()
-    return result
-
-  def ConsumeEnum(self, field):
-    try:
-      result = ParseEnum(field, self.token)
-    except ValueError as e:
-      raise self.ParseError(str(e))
-    self.NextToken()
-    return result
-
-  def ParseErrorPreviousToken(self, message):
-    """Creates and *returns* a ParseError for the previously read token.
-
-    Args:
-      message: A message to set for the exception.
-
-    Returns:
-      A ParseError instance.
-    """
-    return ParseError(message, self._previous_line + 1,
-                      self._previous_column + 1)
-
-  def ParseError(self, message):
-    """Creates and *returns* a ParseError for the current token."""
-    return ParseError(message, self._line + 1, self._column + 1)
-
-  def _StringParseError(self, e):
-    return self.ParseError('Couldn\'t parse string: ' + str(e))
-
-  def NextToken(self):
-    """Reads the next meaningful token."""
-    self._previous_line = self._line
-    self._previous_column = self._column
-
-    self._column += len(self.token)
-    self._SkipWhitespace()
-
-    if not self._more_lines:
-      self.token = ''
-      return
-
-    match = self._TOKEN.match(self._current_line, self._column)
-    if not match and not self._skip_comments:
-      match = self._COMMENT.match(self._current_line, self._column)
-    if match:
-      token = match.group(0)
-      self.token = token
-    else:
-      self.token = self._current_line[self._column]
-
-# Aliased so it can still be accessed by current visibility violators.
-# TODO(dbarnett): Migrate violators to textformat_tokenizer.
-_Tokenizer = Tokenizer  # pylint: disable=invalid-name
-
-
-def _ConsumeInt32(tokenizer):
-  """Consumes a signed 32bit integer number from tokenizer.
-
-  Args:
-    tokenizer: A tokenizer used to parse the number.
-
-  Returns:
-    The integer parsed.
-
-  Raises:
-    ParseError: If a signed 32bit integer couldn't be consumed.
-  """
-  return _ConsumeInteger(tokenizer, is_signed=True, is_long=False)
-
-
-def _ConsumeUint32(tokenizer):
-  """Consumes an unsigned 32bit integer number from tokenizer.
-
-  Args:
-    tokenizer: A tokenizer used to parse the number.
-
-  Returns:
-    The integer parsed.
-
-  Raises:
-    ParseError: If an unsigned 32bit integer couldn't be consumed.
-  """
-  return _ConsumeInteger(tokenizer, is_signed=False, is_long=False)
-
-
-def _TryConsumeInt64(tokenizer):
-  try:
-    _ConsumeInt64(tokenizer)
-    return True
-  except ParseError:
-    return False
-
-
-def _ConsumeInt64(tokenizer):
-  """Consumes a signed 32bit integer number from tokenizer.
-
-  Args:
-    tokenizer: A tokenizer used to parse the number.
-
-  Returns:
-    The integer parsed.
-
-  Raises:
-    ParseError: If a signed 32bit integer couldn't be consumed.
-  """
-  return _ConsumeInteger(tokenizer, is_signed=True, is_long=True)
-
-
-def _TryConsumeUint64(tokenizer):
-  try:
-    _ConsumeUint64(tokenizer)
-    return True
-  except ParseError:
-    return False
-
-
-def _ConsumeUint64(tokenizer):
-  """Consumes an unsigned 64bit integer number from tokenizer.
-
-  Args:
-    tokenizer: A tokenizer used to parse the number.
-
-  Returns:
-    The integer parsed.
-
-  Raises:
-    ParseError: If an unsigned 64bit integer couldn't be consumed.
-  """
-  return _ConsumeInteger(tokenizer, is_signed=False, is_long=True)
-
-
-def _TryConsumeInteger(tokenizer, is_signed=False, is_long=False):
-  try:
-    _ConsumeInteger(tokenizer, is_signed=is_signed, is_long=is_long)
-    return True
-  except ParseError:
-    return False
-
-
-def _ConsumeInteger(tokenizer, is_signed=False, is_long=False):
-  """Consumes an integer number from tokenizer.
-
-  Args:
-    tokenizer: A tokenizer used to parse the number.
-    is_signed: True if a signed integer must be parsed.
-    is_long: True if a long integer must be parsed.
-
-  Returns:
-    The integer parsed.
-
-  Raises:
-    ParseError: If an integer with given characteristics couldn't be consumed.
-  """
-  try:
-    result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long)
-  except ValueError as e:
-    raise tokenizer.ParseError(str(e))
-  tokenizer.NextToken()
-  return result
-
-
-def ParseInteger(text, is_signed=False, is_long=False):
-  """Parses an integer.
-
-  Args:
-    text: The text to parse.
-    is_signed: True if a signed integer must be parsed.
-    is_long: True if a long integer must be parsed.
-
-  Returns:
-    The integer value.
-
-  Raises:
-    ValueError: Thrown Iff the text is not a valid integer.
-  """
-  # Do the actual parsing. Exception handling is propagated to caller.
-  result = _ParseAbstractInteger(text, is_long=is_long)
-
-  # Check if the integer is sane. Exceptions handled by callers.
-  checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
-  checker.CheckValue(result)
-  return result
-
-
-def _ParseAbstractInteger(text, is_long=False):
-  """Parses an integer without checking size/signedness.
-
-  Args:
-    text: The text to parse.
-    is_long: True if the value should be returned as a long integer.
-
-  Returns:
-    The integer value.
-
-  Raises:
-    ValueError: Thrown Iff the text is not a valid integer.
-  """
-  # Do the actual parsing. Exception handling is propagated to caller.
-  try:
-    # We force 32-bit values to int and 64-bit values to long to make
-    # alternate implementations where the distinction is more significant
-    # (e.g. the C++ implementation) simpler.
-    if is_long:
-      return long(text, 0)
-    else:
-      return int(text, 0)
-  except ValueError:
-    raise ValueError('Couldn\'t parse integer: %s' % text)
-
-
-def ParseFloat(text):
-  """Parse a floating point number.
-
-  Args:
-    text: Text to parse.
-
-  Returns:
-    The number parsed.
-
-  Raises:
-    ValueError: If a floating point number couldn't be parsed.
-  """
-  try:
-    # Assume Python compatible syntax.
-    return float(text)
-  except ValueError:
-    # Check alternative spellings.
-    if _FLOAT_INFINITY.match(text):
-      if text[0] == '-':
-        return float('-inf')
-      else:
-        return float('inf')
-    elif _FLOAT_NAN.match(text):
-      return float('nan')
-    else:
-      # assume '1.0f' format
-      try:
-        return float(text.rstrip('f'))
-      except ValueError:
-        raise ValueError('Couldn\'t parse float: %s' % text)
-
-
-def ParseBool(text):
-  """Parse a boolean value.
-
-  Args:
-    text: Text to parse.
-
-  Returns:
-    Boolean values parsed
-
-  Raises:
-    ValueError: If text is not a valid boolean.
-  """
-  if text in ('true', 't', '1'):
-    return True
-  elif text in ('false', 'f', '0'):
-    return False
-  else:
-    raise ValueError('Expected "true" or "false".')
-
-
-def ParseEnum(field, value):
-  """Parse an enum value.
-
-  The value can be specified by a number (the enum value), or by
-  a string literal (the enum name).
-
-  Args:
-    field: Enum field descriptor.
-    value: String value.
-
-  Returns:
-    Enum value number.
-
-  Raises:
-    ValueError: If the enum value could not be parsed.
-  """
-  enum_descriptor = field.enum_type
-  try:
-    number = int(value, 0)
-  except ValueError:
-    # Identifier.
-    enum_value = enum_descriptor.values_by_name.get(value, None)
-    if enum_value is None:
-      raise ValueError('Enum type "%s" has no value named %s.' %
-                       (enum_descriptor.full_name, value))
-  else:
-    # Numeric value.
-    enum_value = enum_descriptor.values_by_number.get(number, None)
-    if enum_value is None:
-      raise ValueError('Enum type "%s" has no value with number %d.' %
-                       (enum_descriptor.full_name, number))
-  return enum_value.number
diff --git a/tools/swarming_client/third_party/google/protobuf/timestamp_pb2.py b/tools/swarming_client/third_party/google/protobuf/timestamp_pb2.py
deleted file mode 100644
index bd61186..0000000
--- a/tools/swarming_client/third_party/google/protobuf/timestamp_pb2.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/timestamp.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/timestamp.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x81\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z+github.com/golang/protobuf/ptypes/timestamp\xa0\x01\x01\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_TIMESTAMP = _descriptor.Descriptor(
-  name='Timestamp',
-  full_name='google.protobuf.Timestamp',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='seconds', full_name='google.protobuf.Timestamp.seconds', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nanos', full_name='google.protobuf.Timestamp.nanos', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=52,
-  serialized_end=95,
-)
-
-DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP
-
-Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict(
-  DESCRIPTOR = _TIMESTAMP,
-  __module__ = 'google.protobuf.timestamp_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Timestamp)
-  ))
-_sym_db.RegisterMessage(Timestamp)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\016TimestampProtoP\001Z+github.com/golang/protobuf/ptypes/timestamp\240\001\001\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/type_pb2.py b/tools/swarming_client/third_party/google/protobuf/type_pb2.py
deleted file mode 100644
index 737493f..0000000
--- a/tools/swarming_client/third_party/google/protobuf/type_pb2.py
+++ /dev/null
@@ -1,541 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/type.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
-from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/type.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42L\n\x13\x63om.google.protobufB\tTypeProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_source__context__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_SYNTAX = _descriptor.EnumDescriptor(
-  name='Syntax',
-  full_name='google.protobuf.Syntax',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='SYNTAX_PROTO2', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SYNTAX_PROTO3', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1413,
-  serialized_end=1459,
-)
-_sym_db.RegisterEnumDescriptor(_SYNTAX)
-
-Syntax = enum_type_wrapper.EnumTypeWrapper(_SYNTAX)
-SYNTAX_PROTO2 = 0
-SYNTAX_PROTO3 = 1
-
-
-_FIELD_KIND = _descriptor.EnumDescriptor(
-  name='Kind',
-  full_name='google.protobuf.Field.Kind',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_DOUBLE', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_FLOAT', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_INT64', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_UINT64', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_INT32', index=5, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_FIXED64', index=6, number=6,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_FIXED32', index=7, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_BOOL', index=8, number=8,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_STRING', index=9, number=9,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_GROUP', index=10, number=10,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_MESSAGE', index=11, number=11,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_BYTES', index=12, number=12,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_UINT32', index=13, number=13,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_ENUM', index=14, number=14,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SFIXED32', index=15, number=15,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SFIXED64', index=16, number=16,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SINT32', index=17, number=17,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TYPE_SINT64', index=18, number=18,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=610,
-  serialized_end=938,
-)
-_sym_db.RegisterEnumDescriptor(_FIELD_KIND)
-
-_FIELD_CARDINALITY = _descriptor.EnumDescriptor(
-  name='Cardinality',
-  full_name='google.protobuf.Field.Cardinality',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='CARDINALITY_UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CARDINALITY_OPTIONAL', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CARDINALITY_REQUIRED', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CARDINALITY_REPEATED', index=3, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=940,
-  serialized_end=1056,
-)
-_sym_db.RegisterEnumDescriptor(_FIELD_CARDINALITY)
-
-
-_TYPE = _descriptor.Descriptor(
-  name='Type',
-  full_name='google.protobuf.Type',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.Type.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fields', full_name='google.protobuf.Type.fields', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneofs', full_name='google.protobuf.Type.oneofs', index=2,
-      number=3, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.Type.options', index=3,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='source_context', full_name='google.protobuf.Type.source_context', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='syntax', full_name='google.protobuf.Type.syntax', index=5,
-      number=6, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=113,
-  serialized_end=328,
-)
-
-
-_FIELD = _descriptor.Descriptor(
-  name='Field',
-  full_name='google.protobuf.Field',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='kind', full_name='google.protobuf.Field.kind', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cardinality', full_name='google.protobuf.Field.cardinality', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='number', full_name='google.protobuf.Field.number', index=2,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.Field.name', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='type_url', full_name='google.protobuf.Field.type_url', index=4,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_index', full_name='google.protobuf.Field.oneof_index', index=5,
-      number=7, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='packed', full_name='google.protobuf.Field.packed', index=6,
-      number=8, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.Field.options', index=7,
-      number=9, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='json_name', full_name='google.protobuf.Field.json_name', index=8,
-      number=10, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_value', full_name='google.protobuf.Field.default_value', index=9,
-      number=11, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _FIELD_KIND,
-    _FIELD_CARDINALITY,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=331,
-  serialized_end=1056,
-)
-
-
-_ENUM = _descriptor.Descriptor(
-  name='Enum',
-  full_name='google.protobuf.Enum',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.Enum.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='enumvalue', full_name='google.protobuf.Enum.enumvalue', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.Enum.options', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='source_context', full_name='google.protobuf.Enum.source_context', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='syntax', full_name='google.protobuf.Enum.syntax', index=4,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1059,
-  serialized_end=1265,
-)
-
-
-_ENUMVALUE = _descriptor.Descriptor(
-  name='EnumValue',
-  full_name='google.protobuf.EnumValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.EnumValue.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='number', full_name='google.protobuf.EnumValue.number', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='options', full_name='google.protobuf.EnumValue.options', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1267,
-  serialized_end=1350,
-)
-
-
-_OPTION = _descriptor.Descriptor(
-  name='Option',
-  full_name='google.protobuf.Option',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='google.protobuf.Option.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.Option.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1352,
-  serialized_end=1411,
-)
-
-_TYPE.fields_by_name['fields'].message_type = _FIELD
-_TYPE.fields_by_name['options'].message_type = _OPTION
-_TYPE.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT
-_TYPE.fields_by_name['syntax'].enum_type = _SYNTAX
-_FIELD.fields_by_name['kind'].enum_type = _FIELD_KIND
-_FIELD.fields_by_name['cardinality'].enum_type = _FIELD_CARDINALITY
-_FIELD.fields_by_name['options'].message_type = _OPTION
-_FIELD_KIND.containing_type = _FIELD
-_FIELD_CARDINALITY.containing_type = _FIELD
-_ENUM.fields_by_name['enumvalue'].message_type = _ENUMVALUE
-_ENUM.fields_by_name['options'].message_type = _OPTION
-_ENUM.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT
-_ENUM.fields_by_name['syntax'].enum_type = _SYNTAX
-_ENUMVALUE.fields_by_name['options'].message_type = _OPTION
-_OPTION.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-DESCRIPTOR.message_types_by_name['Type'] = _TYPE
-DESCRIPTOR.message_types_by_name['Field'] = _FIELD
-DESCRIPTOR.message_types_by_name['Enum'] = _ENUM
-DESCRIPTOR.message_types_by_name['EnumValue'] = _ENUMVALUE
-DESCRIPTOR.message_types_by_name['Option'] = _OPTION
-DESCRIPTOR.enum_types_by_name['Syntax'] = _SYNTAX
-
-Type = _reflection.GeneratedProtocolMessageType('Type', (_message.Message,), dict(
-  DESCRIPTOR = _TYPE,
-  __module__ = 'google.protobuf.type_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Type)
-  ))
-_sym_db.RegisterMessage(Type)
-
-Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), dict(
-  DESCRIPTOR = _FIELD,
-  __module__ = 'google.protobuf.type_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Field)
-  ))
-_sym_db.RegisterMessage(Field)
-
-Enum = _reflection.GeneratedProtocolMessageType('Enum', (_message.Message,), dict(
-  DESCRIPTOR = _ENUM,
-  __module__ = 'google.protobuf.type_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Enum)
-  ))
-_sym_db.RegisterMessage(Enum)
-
-EnumValue = _reflection.GeneratedProtocolMessageType('EnumValue', (_message.Message,), dict(
-  DESCRIPTOR = _ENUMVALUE,
-  __module__ = 'google.protobuf.type_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.EnumValue)
-  ))
-_sym_db.RegisterMessage(EnumValue)
-
-Option = _reflection.GeneratedProtocolMessageType('Option', (_message.Message,), dict(
-  DESCRIPTOR = _OPTION,
-  __module__ = 'google.protobuf.type_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Option)
-  ))
-_sym_db.RegisterMessage(Option)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\tTypeProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_arena_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_arena_pb2.py
deleted file mode 100644
index f843190..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_arena_pb2.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_arena.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import unittest_no_arena_import_pb2 as google_dot_protobuf_dot_unittest__no__arena__import__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_arena.proto',
-  package='proto2_arena_unittest',
-  syntax='proto2',
-  serialized_pb=_b('\n$google/protobuf/unittest_arena.proto\x12\x15proto2_arena_unittest\x1a.google/protobuf/unittest_no_arena_import.proto\"\x1a\n\rNestedMessage\x12\t\n\x01\x64\x18\x01 \x01(\x05\"\xb2\x01\n\x0c\x41renaMessage\x12\x45\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32$.proto2_arena_unittest.NestedMessage\x12[\n repeated_import_no_arena_message\x18\x02 \x03(\x0b\x32\x31.proto2_arena_unittest.ImportNoArenaNestedMessageB\x03\xf8\x01\x01')
-  ,
-  dependencies=[google_dot_protobuf_dot_unittest__no__arena__import__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='proto2_arena_unittest.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='d', full_name='proto2_arena_unittest.NestedMessage.d', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=111,
-  serialized_end=137,
-)
-
-
-_ARENAMESSAGE = _descriptor.Descriptor(
-  name='ArenaMessage',
-  full_name='proto2_arena_unittest.ArenaMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_message', full_name='proto2_arena_unittest.ArenaMessage.repeated_nested_message', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_import_no_arena_message', full_name='proto2_arena_unittest.ArenaMessage.repeated_import_no_arena_message', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=140,
-  serialized_end=318,
-)
-
-_ARENAMESSAGE.fields_by_name['repeated_nested_message'].message_type = _NESTEDMESSAGE
-_ARENAMESSAGE.fields_by_name['repeated_import_no_arena_message'].message_type = google_dot_protobuf_dot_unittest__no__arena__import__pb2._IMPORTNOARENANESTEDMESSAGE
-DESCRIPTOR.message_types_by_name['NestedMessage'] = _NESTEDMESSAGE
-DESCRIPTOR.message_types_by_name['ArenaMessage'] = _ARENAMESSAGE
-
-NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-  DESCRIPTOR = _NESTEDMESSAGE,
-  __module__ = 'google.protobuf.unittest_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto2_arena_unittest.NestedMessage)
-  ))
-_sym_db.RegisterMessage(NestedMessage)
-
-ArenaMessage = _reflection.GeneratedProtocolMessageType('ArenaMessage', (_message.Message,), dict(
-  DESCRIPTOR = _ARENAMESSAGE,
-  __module__ = 'google.protobuf.unittest_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto2_arena_unittest.ArenaMessage)
-  ))
-_sym_db.RegisterMessage(ArenaMessage)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_custom_options_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_custom_options_pb2.py
deleted file mode 100644
index 4e39e1d..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_custom_options_pb2.py
+++ /dev/null
@@ -1,1862 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_custom_options.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import service as _service
-from google.protobuf import service_reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_custom_options.proto',
-  package='protobuf_unittest',
-  syntax='proto2',
-  serialized_pb=_b('\n-google/protobuf/unittest_custom_options.proto\x12\x11protobuf_unittest\x1a google/protobuf/descriptor.proto\"\xbf\x01\n\x1cTestMessageWithCustomOptions\x12\x1e\n\x06\x66ield1\x18\x01 \x01(\tB\x0e\x08\x01\xc1\xe0\xc3\x1d-\xe1u\n\x02\x00\x00\x00\x12\x15\n\x0boneof_field\x18\x02 \x01(\x05H\x00\";\n\x06\x41nEnum\x12\x0f\n\x0b\x41NENUM_VAL1\x10\x01\x12\x16\n\x0b\x41NENUM_VAL2\x10\x02\x1a\x05\xb0\x86\xfa\x05{\x1a\x08\xc5\xf6\xc9\x1d\xeb\xfc\xff\xff:\x10\x08\x00\xe0\xe9\xc2\x1d\xc8\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\x19\n\x07\x41nOneof\x12\x0e\xf8\xac\xc3\x1d\x9d\xff\xff\xff\xff\xff\xff\xff\xff\x01\"\x18\n\x16\x43ustomOptionFooRequest\"\x19\n\x17\x43ustomOptionFooResponse\"\x1e\n\x1c\x43ustomOptionFooClientMessage\"\x1e\n\x1c\x43ustomOptionFooServerMessage\"m\n\x1a\x44ummyMessageContainingEnum\"O\n\x0cTestEnumType\x12\x1a\n\x16TEST_OPTION_ENUM_TYPE1\x10\x16\x12#\n\x16TEST_OPTION_ENUM_TYPE2\x10\xe9\xff\xff\xff\xff\xff\xff\xff\xff\x01\"!\n\x1f\x44ummyMessageInvalidAsOptionType\"\x8a\x01\n\x1c\x43ustomOptionMinIntegerValues:j\xd0\xde\xb2\x1d\x00\xe8\xc6\xb2\x1d\x80\x80\x80\x80\xf8\xff\xff\xff\xff\x01\xb0\xbc\xb2\x1d\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01\x80\x93\xb2\x1d\x00\xf8\xf5\xb0\x1d\x00\x80\xc4\xb0\x1d\xff\xff\xff\xff\x0f\xf8\x97\xb0\x1d\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x9d\xf5\xaf\x1d\x00\x00\x00\x00\x91\xee\xaf\x1d\x00\x00\x00\x00\x00\x00\x00\x00\xad\x8d\xaf\x1d\x00\x00\x00\x80\x99\xd6\xa8\x1d\x00\x00\x00\x00\x00\x00\x00\x80\"\x91\x01\n\x1c\x43ustomOptionMaxIntegerValues:q\xd0\xde\xb2\x1d\x01\xe8\xc6\xb2\x1d\xff\xff\xff\xff\x07\xb0\xbc\xb2\x1d\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x80\x93\xb2\x1d\xff\xff\xff\xff\x0f\xf8\xf5\xb0\x1d\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x80\xc4\xb0\x1d\xfe\xff\xff\xff\x0f\xf8\x97\xb0\x1d\xfe\xff\xff\xff\xff\xff\xff\xff\xff\x01\x9d\xf5\xaf\x1d\xff\xff\xff\xff\x91\xee\xaf\x1d\xff\xff\xff\xff\xff\xff\xff\xff\xad\x8d\xaf\x1d\xff\xff\xff\x7f\x99\xd6\xa8\x1d\xff\xff\xff\xff\xff\xff\xff\x7f\"n\n\x17\x43ustomOptionOtherValues:S\xe8\xc6\xb2\x1d\x9c\xff\xff\xff\xff\xff\xff\xff\xff\x01\xf5\xdf\xa3\x1d\xe7\x87\x45\x41\xe9\xdc\xa2\x1d\xfbY\x8c\x42\xca\xc0\xf3?\xaa\xdc\xa2\x1d\x0eHello, \"World\"\xb2\xd9\xa2\x1d\x0bHello\x00World\x88\xd9\xa2\x1d\xe9\xff\xff\xff\xff\xff\xff\xff\xff\x01\"4\n\x1cSettingRealsFromPositiveInts:\x14\xf5\xdf\xa3\x1d\x00\x00@A\xe9\xdc\xa2\x1d\x00\x00\x00\x00\x00@c@\"4\n\x1cSettingRealsFromNegativeInts:\x14\xf5\xdf\xa3\x1d\x00\x00@\xc1\xe9\xdc\xa2\x1d\x00\x00\x00\x00\x00@c\xc0\"U\n\x12\x43omplexOptionType1\x12\x0b\n\x03\x66oo\x18\x01 \x01(\x05\x12\x0c\n\x04\x66oo2\x18\x02 \x01(\x05\x12\x0c\n\x04\x66oo3\x18\x03 \x01(\x05\x12\x0c\n\x04\x66oo4\x18\x04 \x03(\x05*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"\x8b\x03\n\x12\x43omplexOptionType2\x12\x32\n\x03\x62\x61r\x18\x01 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType1\x12\x0b\n\x03\x62\x61z\x18\x02 \x01(\x05\x12\x46\n\x04\x66red\x18\x03 \x01(\x0b\x32\x38.protobuf_unittest.ComplexOptionType2.ComplexOptionType4\x12H\n\x06\x62\x61rney\x18\x04 \x03(\x0b\x32\x38.protobuf_unittest.ComplexOptionType2.ComplexOptionType4\x1a\x97\x01\n\x12\x43omplexOptionType4\x12\r\n\x05waldo\x18\x01 \x01(\x05\x32r\n\x0c\x63omplex_opt4\x12\x1f.google.protobuf.MessageOptions\x18\x8a\xf5\xd1\x03 \x01(\x0b\x32\x38.protobuf_unittest.ComplexOptionType2.ComplexOptionType4*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"\x9c\x01\n\x12\x43omplexOptionType3\x12\x0b\n\x03qux\x18\x01 \x01(\x05\x12T\n\x12\x63omplexoptiontype5\x18\x02 \x01(\n28.protobuf_unittest.ComplexOptionType3.ComplexOptionType5\x1a#\n\x12\x43omplexOptionType5\x12\r\n\x05plugh\x18\x03 \x01(\x05\"\x1f\n\x0b\x43omplexOpt6\x12\x10\n\x05xyzzy\x18\xdf\xbf\xcf\x03 \x01(\x05\"\xf1\x01\n\x15VariousComplexOptions:\xd7\x01\xa2\xe2\x95\x1d\x02\x08*\xa2\xe2\x95\x1d\x06\xd8\x85\x9e\x1d\xc4\x02\xa2\xe2\x95\x1d\x08\x92\xf5\x9d\x1d\x03\x08\xec\x06\xa2\xe2\x95\x1d\x02 c\xa2\xe2\x95\x1d\x02 X\xaa\xfd\x90\x1d\x03\x10\xdb\x07\xaa\xfd\x90\x1d\x06\xf8\xe6\x97\x1d\x8e\x05\xaa\xfd\x90\x1d\x05\n\x03\x08\xe7\x05\xaa\xfd\x90\x1d\x08\n\x06\xd8\x85\x9e\x1d\xcf\x0f\xaa\xfd\x90\x1d\n\n\x08\x92\xf5\x9d\x1d\x03\x08\xd8\x0f\xaa\xfd\x90\x1d\x08\xc2\xac\x97\x1d\x03\x08\xe5\x05\xaa\xfd\x90\x1d\x0b\xc2\xac\x97\x1d\x06\xd8\x85\x9e\x1d\xce\x0f\xaa\xfd\x90\x1d\r\xc2\xac\x97\x1d\x08\x92\xf5\x9d\x1d\x03\x08\xc9\x10\xd2\xa8\x8f\x1d\x03\x08\xb3\x0f\xaa\xfd\x90\x1d\x05\x1a\x03\x08\xc1\x02\xaa\xfd\x90\x1d\x04\"\x02\x08\x65\xaa\xfd\x90\x1d\x05\"\x03\x08\xd4\x01\xfa\xde\x90\x1d\x02\x08\t\xfa\xde\x90\x1d\x04\x13\x18\x16\x14\xe3\xdc\xfc\x1c\xf8\xfd\xfb\x1c\x18\xe4\xdc\xfc\x1c\"#\n\x13\x41ggregateMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa0\x01\n\x1a\x41ggregateMessageSetElement\x12\t\n\x01s\x18\x01 \x01(\t2w\n\x15message_set_extension\x12&.protobuf_unittest.AggregateMessageSet\x18\xf6\xeb\xae\x07 \x01(\x0b\x32-.protobuf_unittest.AggregateMessageSetElement\"\xfd\x01\n\tAggregate\x12\t\n\x01i\x18\x01 \x01(\x05\x12\t\n\x01s\x18\x02 \x01(\t\x12)\n\x03sub\x18\x03 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate\x12*\n\x04\x66ile\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x34\n\x04mset\x18\x05 \x01(\x0b\x32&.protobuf_unittest.AggregateMessageSet2M\n\x06nested\x12\x1c.google.protobuf.FileOptions\x18\xa7\xd1\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate\"Y\n\x10\x41ggregateMessage\x12)\n\tfieldname\x18\x01 \x01(\x05\x42\x16\xf2\xa1\x87;\x11\x12\x0f\x46ieldAnnotation:\x1a\xc2\xd1\x86;\x15\x08\x65\x12\x11MessageAnnotation\"\xc9\x01\n\x10NestedOptionType\x1a;\n\rNestedMessage\x12\"\n\x0cnested_field\x18\x01 \x01(\x05\x42\x0c\xc1\xe0\xc3\x1d\xea\x03\x00\x00\x00\x00\x00\x00:\x06\xe0\xe9\xc2\x1d\xe9\x07\"5\n\nNestedEnum\x12\x1d\n\x11NESTED_ENUM_VALUE\x10\x01\x1a\x06\xb0\x86\xfa\x05\xec\x07\x1a\x08\xc5\xf6\xc9\x1d\xeb\x03\x00\x00\x32\x41\n\x10nested_extension\x12\x1c.google.protobuf.FileOptions\x18\xfd\xf8\xe2\x03 \x01(\x05\x42\x06\xc8\x8b\xca\x1d\xed\x07\"d\n\rOldOptionType\x12\x38\n\x05value\x18\x01 \x02(\x0e\x32).protobuf_unittest.OldOptionType.TestEnum\"\x19\n\x08TestEnum\x12\r\n\tOLD_VALUE\x10\x00\"s\n\rNewOptionType\x12\x38\n\x05value\x18\x01 \x02(\x0e\x32).protobuf_unittest.NewOptionType.TestEnum\"(\n\x08TestEnum\x12\r\n\tOLD_VALUE\x10\x00\x12\r\n\tNEW_VALUE\x10\x01\"-\n!TestMessageWithRequiredEnumOption:\x08\xfa\xe8\xfc\x94\x03\x02\x08\x00*6\n\nMethodOpt1\x12\x13\n\x0fMETHODOPT1_VAL1\x10\x01\x12\x13\n\x0fMETHODOPT1_VAL2\x10\x02*M\n\rAggregateEnum\x12%\n\x05VALUE\x10\x01\x1a\x1a\xca\xfc\x89;\x15\x12\x13\x45numValueAnnotation\x1a\x15\x92\x95\x88;\x10\x12\x0e\x45numAnnotation2\x8e\x01\n\x1cTestServiceWithCustomOptions\x12\x63\n\x03\x46oo\x12).protobuf_unittest.CustomOptionFooRequest\x1a*.protobuf_unittest.CustomOptionFooResponse\"\x05\xe0\xfa\x8c\x1e\x02\x1a\t\x90\xb2\x8b\x1e\xd3\xdb\x80\xcbI2\x99\x01\n\x10\x41ggregateService\x12k\n\x06Method\x12#.protobuf_unittest.AggregateMessage\x1a#.protobuf_unittest.AggregateMessage\"\x17\xca\xc8\x96;\x12\x12\x10MethodAnnotation\x1a\x18\xca\xfb\x8e;\x13\x12\x11ServiceAnnotation:2\n\tfile_opt1\x12\x1c.google.protobuf.FileOptions\x18\x8e\x9d\xd8\x03 \x01(\x04:8\n\x0cmessage_opt1\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xad\xd8\x03 \x01(\x05:4\n\nfield_opt1\x12\x1d.google.protobuf.FieldOptions\x18\x88\xbc\xd8\x03 \x01(\x06:8\n\nfield_opt2\x12\x1d.google.protobuf.FieldOptions\x18\xb9\xa1\xd9\x03 \x01(\x05:\x02\x34\x32:4\n\noneof_opt1\x12\x1d.google.protobuf.OneofOptions\x18\xcf\xb5\xd8\x03 \x01(\x05:2\n\tenum_opt1\x12\x1c.google.protobuf.EnumOptions\x18\xe8\x9e\xd9\x03 \x01(\x0f:<\n\x0f\x65num_value_opt1\x12!.google.protobuf.EnumValueOptions\x18\xe6\xa0_ \x01(\x05:8\n\x0cservice_opt1\x12\x1f.google.protobuf.ServiceOptions\x18\xa2\xb6\xe1\x03 \x01(\x12:U\n\x0bmethod_opt1\x12\x1e.google.protobuf.MethodOptions\x18\xac\xcf\xe1\x03 \x01(\x0e\x32\x1d.protobuf_unittest.MethodOpt1:4\n\x08\x62ool_opt\x12\x1f.google.protobuf.MessageOptions\x18\xea\xab\xd6\x03 \x01(\x08:5\n\tint32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xed\xa8\xd6\x03 \x01(\x05:5\n\tint64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xc6\xa7\xd6\x03 \x01(\x03:6\n\nuint32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xb0\xa2\xd6\x03 \x01(\r:6\n\nuint64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xdf\x8e\xd6\x03 \x01(\x04:6\n\nsint32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x88\xd6\x03 \x01(\x11:6\n\nsint64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xff\x82\xd6\x03 \x01(\x12:7\n\x0b\x66ixed32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xd3\xfe\xd5\x03 \x01(\x07:7\n\x0b\x66ixed64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xe2\xfd\xd5\x03 \x01(\x06:8\n\x0csfixed32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xd5\xf1\xd5\x03 \x01(\x0f:8\n\x0csfixed64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xe3\x8a\xd5\x03 \x01(\x10:5\n\tfloat_opt\x12\x1f.google.protobuf.MessageOptions\x18\xfe\xbb\xd4\x03 \x01(\x02:6\n\ndouble_opt\x12\x1f.google.protobuf.MessageOptions\x18\xcd\xab\xd4\x03 \x01(\x01:6\n\nstring_opt\x12\x1f.google.protobuf.MessageOptions\x18\xc5\xab\xd4\x03 \x01(\t:5\n\tbytes_opt\x12\x1f.google.protobuf.MessageOptions\x18\x96\xab\xd4\x03 \x01(\x0c:p\n\x08\x65num_opt\x12\x1f.google.protobuf.MessageOptions\x18\x91\xab\xd4\x03 \x01(\x0e\x32:.protobuf_unittest.DummyMessageContainingEnum.TestEnumType:p\n\x10message_type_opt\x12\x1f.google.protobuf.MessageOptions\x18\xaf\xf2\xd3\x03 \x01(\x0b\x32\x32.protobuf_unittest.DummyMessageInvalidAsOptionType:6\n\x04quux\x12%.protobuf_unittest.ComplexOptionType1\x18\xdb\xe0\xd3\x03 \x01(\x05:^\n\x05\x63orge\x12%.protobuf_unittest.ComplexOptionType1\x18\xd2\xde\xd3\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType3:8\n\x06grault\x12%.protobuf_unittest.ComplexOptionType2\x18\xef\xfc\xd2\x03 \x01(\x05:_\n\x06garply\x12%.protobuf_unittest.ComplexOptionType2\x18\xc8\xf5\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType1:_\n\x0c\x63omplex_opt1\x12\x1f.google.protobuf.MessageOptions\x18\xa4\xdc\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType1:_\n\x0c\x63omplex_opt2\x12\x1f.google.protobuf.MessageOptions\x18\xd5\x8f\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType2:_\n\x0c\x63omplex_opt3\x12\x1f.google.protobuf.MessageOptions\x18\xef\x8b\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType3:W\n\x0b\x63omplexopt6\x12\x1f.google.protobuf.MessageOptions\x18\xcc\xcb\xcf\x03 \x01(\n2\x1e.protobuf_unittest.ComplexOpt6:N\n\x07\x66ileopt\x12\x1c.google.protobuf.FileOptions\x18\xcf\xdd\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:P\n\x06msgopt\x12\x1f.google.protobuf.MessageOptions\x18\x98\xea\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:P\n\x08\x66ieldopt\x12\x1d.google.protobuf.FieldOptions\x18\x9e\xf4\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:N\n\x07\x65numopt\x12\x1c.google.protobuf.EnumOptions\x18\xd2\x82\xb1\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:V\n\nenumvalopt\x12!.google.protobuf.EnumValueOptions\x18\xc9\x9f\xb1\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:T\n\nserviceopt\x12\x1f.google.protobuf.ServiceOptions\x18\xb9\xef\xb1\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:R\n\tmethodopt\x12\x1e.google.protobuf.MethodOptions\x18\x89\xe9\xb2\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:_\n\x11required_enum_opt\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xcd\xcf\x32 \x01(\x0b\x32 .protobuf_unittest.OldOptionTypeB\x87\x01\x80\x01\x01\x88\x01\x01\x90\x01\x01\xf0\xe8\xc1\x1d\xea\xad\xc0\xe5$\xfa\xec\x85;p\x08\x64\x12\x0e\x46ileAnnotation\x1a\x16\x12\x14NestedFileAnnotation\"\x1e\xfa\xec\x85;\x19\x12\x17\x46ileExtensionAnnotation*$\x0b\x10\xf6\xeb\xae\x07\x1a\x1b\n\x19\x45mbeddedMessageSetElement\x0c')
-  ,
-  dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_METHODOPT1 = _descriptor.EnumDescriptor(
-  name='MethodOpt1',
-  full_name='protobuf_unittest.MethodOpt1',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='METHODOPT1_VAL1', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='METHODOPT1_VAL2', index=1, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3006,
-  serialized_end=3060,
-)
-_sym_db.RegisterEnumDescriptor(_METHODOPT1)
-
-MethodOpt1 = enum_type_wrapper.EnumTypeWrapper(_METHODOPT1)
-_AGGREGATEENUM = _descriptor.EnumDescriptor(
-  name='AggregateEnum',
-  full_name='protobuf_unittest.AggregateEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='VALUE', index=0, number=1,
-      options=_descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\312\374\211;\025\022\023EnumValueAnnotation')),
-      type=None),
-  ],
-  containing_type=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\222\225\210;\020\022\016EnumAnnotation')),
-  serialized_start=3062,
-  serialized_end=3139,
-)
-_sym_db.RegisterEnumDescriptor(_AGGREGATEENUM)
-
-AggregateEnum = enum_type_wrapper.EnumTypeWrapper(_AGGREGATEENUM)
-METHODOPT1_VAL1 = 1
-METHODOPT1_VAL2 = 2
-VALUE = 1
-
-FILE_OPT1_FIELD_NUMBER = 7736974
-file_opt1 = _descriptor.FieldDescriptor(
-  name='file_opt1', full_name='protobuf_unittest.file_opt1', index=0,
-  number=7736974, type=4, cpp_type=4, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-MESSAGE_OPT1_FIELD_NUMBER = 7739036
-message_opt1 = _descriptor.FieldDescriptor(
-  name='message_opt1', full_name='protobuf_unittest.message_opt1', index=1,
-  number=7739036, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-FIELD_OPT1_FIELD_NUMBER = 7740936
-field_opt1 = _descriptor.FieldDescriptor(
-  name='field_opt1', full_name='protobuf_unittest.field_opt1', index=2,
-  number=7740936, type=6, cpp_type=4, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-FIELD_OPT2_FIELD_NUMBER = 7753913
-field_opt2 = _descriptor.FieldDescriptor(
-  name='field_opt2', full_name='protobuf_unittest.field_opt2', index=3,
-  number=7753913, type=5, cpp_type=1, label=1,
-  has_default_value=True, default_value=42,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ONEOF_OPT1_FIELD_NUMBER = 7740111
-oneof_opt1 = _descriptor.FieldDescriptor(
-  name='oneof_opt1', full_name='protobuf_unittest.oneof_opt1', index=4,
-  number=7740111, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ENUM_OPT1_FIELD_NUMBER = 7753576
-enum_opt1 = _descriptor.FieldDescriptor(
-  name='enum_opt1', full_name='protobuf_unittest.enum_opt1', index=5,
-  number=7753576, type=15, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ENUM_VALUE_OPT1_FIELD_NUMBER = 1560678
-enum_value_opt1 = _descriptor.FieldDescriptor(
-  name='enum_value_opt1', full_name='protobuf_unittest.enum_value_opt1', index=6,
-  number=1560678, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-SERVICE_OPT1_FIELD_NUMBER = 7887650
-service_opt1 = _descriptor.FieldDescriptor(
-  name='service_opt1', full_name='protobuf_unittest.service_opt1', index=7,
-  number=7887650, type=18, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-METHOD_OPT1_FIELD_NUMBER = 7890860
-method_opt1 = _descriptor.FieldDescriptor(
-  name='method_opt1', full_name='protobuf_unittest.method_opt1', index=8,
-  number=7890860, type=14, cpp_type=8, label=1,
-  has_default_value=False, default_value=1,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-BOOL_OPT_FIELD_NUMBER = 7706090
-bool_opt = _descriptor.FieldDescriptor(
-  name='bool_opt', full_name='protobuf_unittest.bool_opt', index=9,
-  number=7706090, type=8, cpp_type=7, label=1,
-  has_default_value=False, default_value=False,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-INT32_OPT_FIELD_NUMBER = 7705709
-int32_opt = _descriptor.FieldDescriptor(
-  name='int32_opt', full_name='protobuf_unittest.int32_opt', index=10,
-  number=7705709, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-INT64_OPT_FIELD_NUMBER = 7705542
-int64_opt = _descriptor.FieldDescriptor(
-  name='int64_opt', full_name='protobuf_unittest.int64_opt', index=11,
-  number=7705542, type=3, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-UINT32_OPT_FIELD_NUMBER = 7704880
-uint32_opt = _descriptor.FieldDescriptor(
-  name='uint32_opt', full_name='protobuf_unittest.uint32_opt', index=12,
-  number=7704880, type=13, cpp_type=3, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-UINT64_OPT_FIELD_NUMBER = 7702367
-uint64_opt = _descriptor.FieldDescriptor(
-  name='uint64_opt', full_name='protobuf_unittest.uint64_opt', index=13,
-  number=7702367, type=4, cpp_type=4, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-SINT32_OPT_FIELD_NUMBER = 7701568
-sint32_opt = _descriptor.FieldDescriptor(
-  name='sint32_opt', full_name='protobuf_unittest.sint32_opt', index=14,
-  number=7701568, type=17, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-SINT64_OPT_FIELD_NUMBER = 7700863
-sint64_opt = _descriptor.FieldDescriptor(
-  name='sint64_opt', full_name='protobuf_unittest.sint64_opt', index=15,
-  number=7700863, type=18, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-FIXED32_OPT_FIELD_NUMBER = 7700307
-fixed32_opt = _descriptor.FieldDescriptor(
-  name='fixed32_opt', full_name='protobuf_unittest.fixed32_opt', index=16,
-  number=7700307, type=7, cpp_type=3, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-FIXED64_OPT_FIELD_NUMBER = 7700194
-fixed64_opt = _descriptor.FieldDescriptor(
-  name='fixed64_opt', full_name='protobuf_unittest.fixed64_opt', index=17,
-  number=7700194, type=6, cpp_type=4, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-SFIXED32_OPT_FIELD_NUMBER = 7698645
-sfixed32_opt = _descriptor.FieldDescriptor(
-  name='sfixed32_opt', full_name='protobuf_unittest.sfixed32_opt', index=18,
-  number=7698645, type=15, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-SFIXED64_OPT_FIELD_NUMBER = 7685475
-sfixed64_opt = _descriptor.FieldDescriptor(
-  name='sfixed64_opt', full_name='protobuf_unittest.sfixed64_opt', index=19,
-  number=7685475, type=16, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-FLOAT_OPT_FIELD_NUMBER = 7675390
-float_opt = _descriptor.FieldDescriptor(
-  name='float_opt', full_name='protobuf_unittest.float_opt', index=20,
-  number=7675390, type=2, cpp_type=6, label=1,
-  has_default_value=False, default_value=float(0),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DOUBLE_OPT_FIELD_NUMBER = 7673293
-double_opt = _descriptor.FieldDescriptor(
-  name='double_opt', full_name='protobuf_unittest.double_opt', index=21,
-  number=7673293, type=1, cpp_type=5, label=1,
-  has_default_value=False, default_value=float(0),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-STRING_OPT_FIELD_NUMBER = 7673285
-string_opt = _descriptor.FieldDescriptor(
-  name='string_opt', full_name='protobuf_unittest.string_opt', index=22,
-  number=7673285, type=9, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b("").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-BYTES_OPT_FIELD_NUMBER = 7673238
-bytes_opt = _descriptor.FieldDescriptor(
-  name='bytes_opt', full_name='protobuf_unittest.bytes_opt', index=23,
-  number=7673238, type=12, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b(""),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ENUM_OPT_FIELD_NUMBER = 7673233
-enum_opt = _descriptor.FieldDescriptor(
-  name='enum_opt', full_name='protobuf_unittest.enum_opt', index=24,
-  number=7673233, type=14, cpp_type=8, label=1,
-  has_default_value=False, default_value=22,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-MESSAGE_TYPE_OPT_FIELD_NUMBER = 7665967
-message_type_opt = _descriptor.FieldDescriptor(
-  name='message_type_opt', full_name='protobuf_unittest.message_type_opt', index=25,
-  number=7665967, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-QUUX_FIELD_NUMBER = 7663707
-quux = _descriptor.FieldDescriptor(
-  name='quux', full_name='protobuf_unittest.quux', index=26,
-  number=7663707, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-CORGE_FIELD_NUMBER = 7663442
-corge = _descriptor.FieldDescriptor(
-  name='corge', full_name='protobuf_unittest.corge', index=27,
-  number=7663442, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-GRAULT_FIELD_NUMBER = 7650927
-grault = _descriptor.FieldDescriptor(
-  name='grault', full_name='protobuf_unittest.grault', index=28,
-  number=7650927, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-GARPLY_FIELD_NUMBER = 7649992
-garply = _descriptor.FieldDescriptor(
-  name='garply', full_name='protobuf_unittest.garply', index=29,
-  number=7649992, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-COMPLEX_OPT1_FIELD_NUMBER = 7646756
-complex_opt1 = _descriptor.FieldDescriptor(
-  name='complex_opt1', full_name='protobuf_unittest.complex_opt1', index=30,
-  number=7646756, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-COMPLEX_OPT2_FIELD_NUMBER = 7636949
-complex_opt2 = _descriptor.FieldDescriptor(
-  name='complex_opt2', full_name='protobuf_unittest.complex_opt2', index=31,
-  number=7636949, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-COMPLEX_OPT3_FIELD_NUMBER = 7636463
-complex_opt3 = _descriptor.FieldDescriptor(
-  name='complex_opt3', full_name='protobuf_unittest.complex_opt3', index=32,
-  number=7636463, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-COMPLEXOPT6_FIELD_NUMBER = 7595468
-complexopt6 = _descriptor.FieldDescriptor(
-  name='complexopt6', full_name='protobuf_unittest.complexopt6', index=33,
-  number=7595468, type=10, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-FILEOPT_FIELD_NUMBER = 15478479
-fileopt = _descriptor.FieldDescriptor(
-  name='fileopt', full_name='protobuf_unittest.fileopt', index=34,
-  number=15478479, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-MSGOPT_FIELD_NUMBER = 15480088
-msgopt = _descriptor.FieldDescriptor(
-  name='msgopt', full_name='protobuf_unittest.msgopt', index=35,
-  number=15480088, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-FIELDOPT_FIELD_NUMBER = 15481374
-fieldopt = _descriptor.FieldDescriptor(
-  name='fieldopt', full_name='protobuf_unittest.fieldopt', index=36,
-  number=15481374, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ENUMOPT_FIELD_NUMBER = 15483218
-enumopt = _descriptor.FieldDescriptor(
-  name='enumopt', full_name='protobuf_unittest.enumopt', index=37,
-  number=15483218, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ENUMVALOPT_FIELD_NUMBER = 15486921
-enumvalopt = _descriptor.FieldDescriptor(
-  name='enumvalopt', full_name='protobuf_unittest.enumvalopt', index=38,
-  number=15486921, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-SERVICEOPT_FIELD_NUMBER = 15497145
-serviceopt = _descriptor.FieldDescriptor(
-  name='serviceopt', full_name='protobuf_unittest.serviceopt', index=39,
-  number=15497145, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-METHODOPT_FIELD_NUMBER = 15512713
-methodopt = _descriptor.FieldDescriptor(
-  name='methodopt', full_name='protobuf_unittest.methodopt', index=40,
-  number=15512713, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REQUIRED_ENUM_OPT_FIELD_NUMBER = 106161807
-required_enum_opt = _descriptor.FieldDescriptor(
-  name='required_enum_opt', full_name='protobuf_unittest.required_enum_opt', index=41,
-  number=106161807, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM = _descriptor.EnumDescriptor(
-  name='AnEnum',
-  full_name='protobuf_unittest.TestMessageWithCustomOptions.AnEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='ANENUM_VAL1', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='ANENUM_VAL2', index=1, number=2,
-      options=_descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005{')),
-      type=None),
-  ],
-  containing_type=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\374\377\377')),
-  serialized_start=190,
-  serialized_end=249,
-)
-_sym_db.RegisterEnumDescriptor(_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM)
-
-_DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE = _descriptor.EnumDescriptor(
-  name='TestEnumType',
-  full_name='protobuf_unittest.DummyMessageContainingEnum.TestEnumType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='TEST_OPTION_ENUM_TYPE1', index=0, number=22,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TEST_OPTION_ENUM_TYPE2', index=1, number=-23,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=443,
-  serialized_end=522,
-)
-_sym_db.RegisterEnumDescriptor(_DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE)
-
-_NESTEDOPTIONTYPE_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='protobuf_unittest.NestedOptionType.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='NESTED_ENUM_VALUE', index=0, number=1,
-      options=_descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005\354\007')),
-      type=None),
-  ],
-  containing_type=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\003\000\000')),
-  serialized_start=2618,
-  serialized_end=2671,
-)
-_sym_db.RegisterEnumDescriptor(_NESTEDOPTIONTYPE_NESTEDENUM)
-
-_OLDOPTIONTYPE_TESTENUM = _descriptor.EnumDescriptor(
-  name='TestEnum',
-  full_name='protobuf_unittest.OldOptionType.TestEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='OLD_VALUE', index=0, number=0,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=2815,
-  serialized_end=2840,
-)
-_sym_db.RegisterEnumDescriptor(_OLDOPTIONTYPE_TESTENUM)
-
-_NEWOPTIONTYPE_TESTENUM = _descriptor.EnumDescriptor(
-  name='TestEnum',
-  full_name='protobuf_unittest.NewOptionType.TestEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='OLD_VALUE', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NEW_VALUE', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=2917,
-  serialized_end=2957,
-)
-_sym_db.RegisterEnumDescriptor(_NEWOPTIONTYPE_TESTENUM)
-
-
-_TESTMESSAGEWITHCUSTOMOPTIONS = _descriptor.Descriptor(
-  name='TestMessageWithCustomOptions',
-  full_name='protobuf_unittest.TestMessageWithCustomOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='field1', full_name='protobuf_unittest.TestMessageWithCustomOptions.field1', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001\301\340\303\035-\341u\n\002\000\000\000'))),
-    _descriptor.FieldDescriptor(
-      name='oneof_field', full_name='protobuf_unittest.TestMessageWithCustomOptions.oneof_field', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM,
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\000\340\351\302\035\310\377\377\377\377\377\377\377\377\001')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='AnOneof', full_name='protobuf_unittest.TestMessageWithCustomOptions.AnOneof',
-      index=0, containing_type=None, fields=[], options=_descriptor._ParseOptions(descriptor_pb2.OneofOptions(), _b('\370\254\303\035\235\377\377\377\377\377\377\377\377\001'))),
-  ],
-  serialized_start=103,
-  serialized_end=294,
-)
-
-
-_CUSTOMOPTIONFOOREQUEST = _descriptor.Descriptor(
-  name='CustomOptionFooRequest',
-  full_name='protobuf_unittest.CustomOptionFooRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=296,
-  serialized_end=320,
-)
-
-
-_CUSTOMOPTIONFOORESPONSE = _descriptor.Descriptor(
-  name='CustomOptionFooResponse',
-  full_name='protobuf_unittest.CustomOptionFooResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=322,
-  serialized_end=347,
-)
-
-
-_CUSTOMOPTIONFOOCLIENTMESSAGE = _descriptor.Descriptor(
-  name='CustomOptionFooClientMessage',
-  full_name='protobuf_unittest.CustomOptionFooClientMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=349,
-  serialized_end=379,
-)
-
-
-_CUSTOMOPTIONFOOSERVERMESSAGE = _descriptor.Descriptor(
-  name='CustomOptionFooServerMessage',
-  full_name='protobuf_unittest.CustomOptionFooServerMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=381,
-  serialized_end=411,
-)
-
-
-_DUMMYMESSAGECONTAININGENUM = _descriptor.Descriptor(
-  name='DummyMessageContainingEnum',
-  full_name='protobuf_unittest.DummyMessageContainingEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=413,
-  serialized_end=522,
-)
-
-
-_DUMMYMESSAGEINVALIDASOPTIONTYPE = _descriptor.Descriptor(
-  name='DummyMessageInvalidAsOptionType',
-  full_name='protobuf_unittest.DummyMessageInvalidAsOptionType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=524,
-  serialized_end=557,
-)
-
-
-_CUSTOMOPTIONMININTEGERVALUES = _descriptor.Descriptor(
-  name='CustomOptionMinIntegerValues',
-  full_name='protobuf_unittest.CustomOptionMinIntegerValues',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\000\350\306\262\035\200\200\200\200\370\377\377\377\377\001\260\274\262\035\200\200\200\200\200\200\200\200\200\001\200\223\262\035\000\370\365\260\035\000\200\304\260\035\377\377\377\377\017\370\227\260\035\377\377\377\377\377\377\377\377\377\001\235\365\257\035\000\000\000\000\221\356\257\035\000\000\000\000\000\000\000\000\255\215\257\035\000\000\000\200\231\326\250\035\000\000\000\000\000\000\000\200')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=560,
-  serialized_end=698,
-)
-
-
-_CUSTOMOPTIONMAXINTEGERVALUES = _descriptor.Descriptor(
-  name='CustomOptionMaxIntegerValues',
-  full_name='protobuf_unittest.CustomOptionMaxIntegerValues',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\001\350\306\262\035\377\377\377\377\007\260\274\262\035\377\377\377\377\377\377\377\377\177\200\223\262\035\377\377\377\377\017\370\365\260\035\377\377\377\377\377\377\377\377\377\001\200\304\260\035\376\377\377\377\017\370\227\260\035\376\377\377\377\377\377\377\377\377\001\235\365\257\035\377\377\377\377\221\356\257\035\377\377\377\377\377\377\377\377\255\215\257\035\377\377\377\177\231\326\250\035\377\377\377\377\377\377\377\177')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=701,
-  serialized_end=846,
-)
-
-
-_CUSTOMOPTIONOTHERVALUES = _descriptor.Descriptor(
-  name='CustomOptionOtherValues',
-  full_name='protobuf_unittest.CustomOptionOtherValues',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\350\306\262\035\234\377\377\377\377\377\377\377\377\001\365\337\243\035\347\207EA\351\334\242\035\373Y\214B\312\300\363?\252\334\242\035\016Hello, \"World\"\262\331\242\035\013Hello\000World\210\331\242\035\351\377\377\377\377\377\377\377\377\001')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=848,
-  serialized_end=958,
-)
-
-
-_SETTINGREALSFROMPOSITIVEINTS = _descriptor.Descriptor(
-  name='SettingRealsFromPositiveInts',
-  full_name='protobuf_unittest.SettingRealsFromPositiveInts',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@A\351\334\242\035\000\000\000\000\000@c@')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=960,
-  serialized_end=1012,
-)
-
-
-_SETTINGREALSFROMNEGATIVEINTS = _descriptor.Descriptor(
-  name='SettingRealsFromNegativeInts',
-  full_name='protobuf_unittest.SettingRealsFromNegativeInts',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@\301\351\334\242\035\000\000\000\000\000@c\300')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1014,
-  serialized_end=1066,
-)
-
-
-_COMPLEXOPTIONTYPE1 = _descriptor.Descriptor(
-  name='ComplexOptionType1',
-  full_name='protobuf_unittest.ComplexOptionType1',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foo', full_name='protobuf_unittest.ComplexOptionType1.foo', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo2', full_name='protobuf_unittest.ComplexOptionType1.foo2', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo3', full_name='protobuf_unittest.ComplexOptionType1.foo3', index=2,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo4', full_name='protobuf_unittest.ComplexOptionType1.foo4', index=3,
-      number=4, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(100, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=1068,
-  serialized_end=1153,
-)
-
-
-_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4 = _descriptor.Descriptor(
-  name='ComplexOptionType4',
-  full_name='protobuf_unittest.ComplexOptionType2.ComplexOptionType4',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='waldo', full_name='protobuf_unittest.ComplexOptionType2.ComplexOptionType4.waldo', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='complex_opt4', full_name='protobuf_unittest.ComplexOptionType2.ComplexOptionType4.complex_opt4', index=0,
-      number=7633546, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1390,
-  serialized_end=1541,
-)
-
-_COMPLEXOPTIONTYPE2 = _descriptor.Descriptor(
-  name='ComplexOptionType2',
-  full_name='protobuf_unittest.ComplexOptionType2',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bar', full_name='protobuf_unittest.ComplexOptionType2.bar', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='baz', full_name='protobuf_unittest.ComplexOptionType2.baz', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fred', full_name='protobuf_unittest.ComplexOptionType2.fred', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='barney', full_name='protobuf_unittest.ComplexOptionType2.barney', index=3,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(100, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=1156,
-  serialized_end=1551,
-)
-
-
-_COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5 = _descriptor.Descriptor(
-  name='ComplexOptionType5',
-  full_name='protobuf_unittest.ComplexOptionType3.ComplexOptionType5',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='plugh', full_name='protobuf_unittest.ComplexOptionType3.ComplexOptionType5.plugh', index=0,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1675,
-  serialized_end=1710,
-)
-
-_COMPLEXOPTIONTYPE3 = _descriptor.Descriptor(
-  name='ComplexOptionType3',
-  full_name='protobuf_unittest.ComplexOptionType3',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='qux', full_name='protobuf_unittest.ComplexOptionType3.qux', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='complexoptiontype5', full_name='protobuf_unittest.ComplexOptionType3.complexoptiontype5', index=1,
-      number=2, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1554,
-  serialized_end=1710,
-)
-
-
-_COMPLEXOPT6 = _descriptor.Descriptor(
-  name='ComplexOpt6',
-  full_name='protobuf_unittest.ComplexOpt6',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='xyzzy', full_name='protobuf_unittest.ComplexOpt6.xyzzy', index=0,
-      number=7593951, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1712,
-  serialized_end=1743,
-)
-
-
-_VARIOUSCOMPLEXOPTIONS = _descriptor.Descriptor(
-  name='VariousComplexOptions',
-  full_name='protobuf_unittest.VariousComplexOptions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\242\342\225\035\002\010*\242\342\225\035\006\330\205\236\035\304\002\242\342\225\035\010\222\365\235\035\003\010\354\006\242\342\225\035\002 c\242\342\225\035\002 X\252\375\220\035\003\020\333\007\252\375\220\035\006\370\346\227\035\216\005\252\375\220\035\005\n\003\010\347\005\252\375\220\035\010\n\006\330\205\236\035\317\017\252\375\220\035\n\n\010\222\365\235\035\003\010\330\017\252\375\220\035\010\302\254\227\035\003\010\345\005\252\375\220\035\013\302\254\227\035\006\330\205\236\035\316\017\252\375\220\035\r\302\254\227\035\010\222\365\235\035\003\010\311\020\322\250\217\035\003\010\263\017\252\375\220\035\005\032\003\010\301\002\252\375\220\035\004\"\002\010e\252\375\220\035\005\"\003\010\324\001\372\336\220\035\002\010\t\372\336\220\035\004\023\030\026\024\343\334\374\034\370\375\373\034\030\344\334\374\034')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1746,
-  serialized_end=1987,
-)
-
-
-_AGGREGATEMESSAGESET = _descriptor.Descriptor(
-  name='AggregateMessageSet',
-  full_name='protobuf_unittest.AggregateMessageSet',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')),
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(4, 2147483647), ],
-  oneofs=[
-  ],
-  serialized_start=1989,
-  serialized_end=2024,
-)
-
-
-_AGGREGATEMESSAGESETELEMENT = _descriptor.Descriptor(
-  name='AggregateMessageSetElement',
-  full_name='protobuf_unittest.AggregateMessageSetElement',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='s', full_name='protobuf_unittest.AggregateMessageSetElement.s', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='message_set_extension', full_name='protobuf_unittest.AggregateMessageSetElement.message_set_extension', index=0,
-      number=15447542, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2027,
-  serialized_end=2187,
-)
-
-
-_AGGREGATE = _descriptor.Descriptor(
-  name='Aggregate',
-  full_name='protobuf_unittest.Aggregate',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='i', full_name='protobuf_unittest.Aggregate.i', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='s', full_name='protobuf_unittest.Aggregate.s', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='sub', full_name='protobuf_unittest.Aggregate.sub', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='file', full_name='protobuf_unittest.Aggregate.file', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='mset', full_name='protobuf_unittest.Aggregate.mset', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='nested', full_name='protobuf_unittest.Aggregate.nested', index=0,
-      number=15476903, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2190,
-  serialized_end=2443,
-)
-
-
-_AGGREGATEMESSAGE = _descriptor.Descriptor(
-  name='AggregateMessage',
-  full_name='protobuf_unittest.AggregateMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='fieldname', full_name='protobuf_unittest.AggregateMessage.fieldname', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\362\241\207;\021\022\017FieldAnnotation'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\302\321\206;\025\010e\022\021MessageAnnotation')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2445,
-  serialized_end=2534,
-)
-
-
-_NESTEDOPTIONTYPE_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='protobuf_unittest.NestedOptionType.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nested_field', full_name='protobuf_unittest.NestedOptionType.NestedMessage.nested_field', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\301\340\303\035\352\003\000\000\000\000\000\000'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\340\351\302\035\351\007')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2557,
-  serialized_end=2616,
-)
-
-_NESTEDOPTIONTYPE = _descriptor.Descriptor(
-  name='NestedOptionType',
-  full_name='protobuf_unittest.NestedOptionType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='nested_extension', full_name='protobuf_unittest.NestedOptionType.nested_extension', index=0,
-      number=7912573, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\213\312\035\355\007'))),
-  ],
-  nested_types=[_NESTEDOPTIONTYPE_NESTEDMESSAGE, ],
-  enum_types=[
-    _NESTEDOPTIONTYPE_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2537,
-  serialized_end=2738,
-)
-
-
-_OLDOPTIONTYPE = _descriptor.Descriptor(
-  name='OldOptionType',
-  full_name='protobuf_unittest.OldOptionType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.OldOptionType.value', index=0,
-      number=1, type=14, cpp_type=8, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _OLDOPTIONTYPE_TESTENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2740,
-  serialized_end=2840,
-)
-
-
-_NEWOPTIONTYPE = _descriptor.Descriptor(
-  name='NewOptionType',
-  full_name='protobuf_unittest.NewOptionType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='protobuf_unittest.NewOptionType.value', index=0,
-      number=1, type=14, cpp_type=8, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _NEWOPTIONTYPE_TESTENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2842,
-  serialized_end=2957,
-)
-
-
-_TESTMESSAGEWITHREQUIREDENUMOPTION = _descriptor.Descriptor(
-  name='TestMessageWithRequiredEnumOption',
-  full_name='protobuf_unittest.TestMessageWithRequiredEnumOption',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\372\350\374\224\003\002\010\000')),
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2959,
-  serialized_end=3004,
-)
-
-_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.containing_type = _TESTMESSAGEWITHCUSTOMOPTIONS
-_TESTMESSAGEWITHCUSTOMOPTIONS.oneofs_by_name['AnOneof'].fields.append(
-  _TESTMESSAGEWITHCUSTOMOPTIONS.fields_by_name['oneof_field'])
-_TESTMESSAGEWITHCUSTOMOPTIONS.fields_by_name['oneof_field'].containing_oneof = _TESTMESSAGEWITHCUSTOMOPTIONS.oneofs_by_name['AnOneof']
-_DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE.containing_type = _DUMMYMESSAGECONTAININGENUM
-_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4.containing_type = _COMPLEXOPTIONTYPE2
-_COMPLEXOPTIONTYPE2.fields_by_name['bar'].message_type = _COMPLEXOPTIONTYPE1
-_COMPLEXOPTIONTYPE2.fields_by_name['fred'].message_type = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4
-_COMPLEXOPTIONTYPE2.fields_by_name['barney'].message_type = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4
-_COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5.containing_type = _COMPLEXOPTIONTYPE3
-_COMPLEXOPTIONTYPE3.fields_by_name['complexoptiontype5'].message_type = _COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5
-_AGGREGATE.fields_by_name['sub'].message_type = _AGGREGATE
-_AGGREGATE.fields_by_name['file'].message_type = google_dot_protobuf_dot_descriptor__pb2._FILEOPTIONS
-_AGGREGATE.fields_by_name['mset'].message_type = _AGGREGATEMESSAGESET
-_NESTEDOPTIONTYPE_NESTEDMESSAGE.containing_type = _NESTEDOPTIONTYPE
-_NESTEDOPTIONTYPE_NESTEDENUM.containing_type = _NESTEDOPTIONTYPE
-_OLDOPTIONTYPE.fields_by_name['value'].enum_type = _OLDOPTIONTYPE_TESTENUM
-_OLDOPTIONTYPE_TESTENUM.containing_type = _OLDOPTIONTYPE
-_NEWOPTIONTYPE.fields_by_name['value'].enum_type = _NEWOPTIONTYPE_TESTENUM
-_NEWOPTIONTYPE_TESTENUM.containing_type = _NEWOPTIONTYPE
-DESCRIPTOR.message_types_by_name['TestMessageWithCustomOptions'] = _TESTMESSAGEWITHCUSTOMOPTIONS
-DESCRIPTOR.message_types_by_name['CustomOptionFooRequest'] = _CUSTOMOPTIONFOOREQUEST
-DESCRIPTOR.message_types_by_name['CustomOptionFooResponse'] = _CUSTOMOPTIONFOORESPONSE
-DESCRIPTOR.message_types_by_name['CustomOptionFooClientMessage'] = _CUSTOMOPTIONFOOCLIENTMESSAGE
-DESCRIPTOR.message_types_by_name['CustomOptionFooServerMessage'] = _CUSTOMOPTIONFOOSERVERMESSAGE
-DESCRIPTOR.message_types_by_name['DummyMessageContainingEnum'] = _DUMMYMESSAGECONTAININGENUM
-DESCRIPTOR.message_types_by_name['DummyMessageInvalidAsOptionType'] = _DUMMYMESSAGEINVALIDASOPTIONTYPE
-DESCRIPTOR.message_types_by_name['CustomOptionMinIntegerValues'] = _CUSTOMOPTIONMININTEGERVALUES
-DESCRIPTOR.message_types_by_name['CustomOptionMaxIntegerValues'] = _CUSTOMOPTIONMAXINTEGERVALUES
-DESCRIPTOR.message_types_by_name['CustomOptionOtherValues'] = _CUSTOMOPTIONOTHERVALUES
-DESCRIPTOR.message_types_by_name['SettingRealsFromPositiveInts'] = _SETTINGREALSFROMPOSITIVEINTS
-DESCRIPTOR.message_types_by_name['SettingRealsFromNegativeInts'] = _SETTINGREALSFROMNEGATIVEINTS
-DESCRIPTOR.message_types_by_name['ComplexOptionType1'] = _COMPLEXOPTIONTYPE1
-DESCRIPTOR.message_types_by_name['ComplexOptionType2'] = _COMPLEXOPTIONTYPE2
-DESCRIPTOR.message_types_by_name['ComplexOptionType3'] = _COMPLEXOPTIONTYPE3
-DESCRIPTOR.message_types_by_name['ComplexOpt6'] = _COMPLEXOPT6
-DESCRIPTOR.message_types_by_name['VariousComplexOptions'] = _VARIOUSCOMPLEXOPTIONS
-DESCRIPTOR.message_types_by_name['AggregateMessageSet'] = _AGGREGATEMESSAGESET
-DESCRIPTOR.message_types_by_name['AggregateMessageSetElement'] = _AGGREGATEMESSAGESETELEMENT
-DESCRIPTOR.message_types_by_name['Aggregate'] = _AGGREGATE
-DESCRIPTOR.message_types_by_name['AggregateMessage'] = _AGGREGATEMESSAGE
-DESCRIPTOR.message_types_by_name['NestedOptionType'] = _NESTEDOPTIONTYPE
-DESCRIPTOR.message_types_by_name['OldOptionType'] = _OLDOPTIONTYPE
-DESCRIPTOR.message_types_by_name['NewOptionType'] = _NEWOPTIONTYPE
-DESCRIPTOR.message_types_by_name['TestMessageWithRequiredEnumOption'] = _TESTMESSAGEWITHREQUIREDENUMOPTION
-DESCRIPTOR.enum_types_by_name['MethodOpt1'] = _METHODOPT1
-DESCRIPTOR.enum_types_by_name['AggregateEnum'] = _AGGREGATEENUM
-DESCRIPTOR.extensions_by_name['file_opt1'] = file_opt1
-DESCRIPTOR.extensions_by_name['message_opt1'] = message_opt1
-DESCRIPTOR.extensions_by_name['field_opt1'] = field_opt1
-DESCRIPTOR.extensions_by_name['field_opt2'] = field_opt2
-DESCRIPTOR.extensions_by_name['oneof_opt1'] = oneof_opt1
-DESCRIPTOR.extensions_by_name['enum_opt1'] = enum_opt1
-DESCRIPTOR.extensions_by_name['enum_value_opt1'] = enum_value_opt1
-DESCRIPTOR.extensions_by_name['service_opt1'] = service_opt1
-DESCRIPTOR.extensions_by_name['method_opt1'] = method_opt1
-DESCRIPTOR.extensions_by_name['bool_opt'] = bool_opt
-DESCRIPTOR.extensions_by_name['int32_opt'] = int32_opt
-DESCRIPTOR.extensions_by_name['int64_opt'] = int64_opt
-DESCRIPTOR.extensions_by_name['uint32_opt'] = uint32_opt
-DESCRIPTOR.extensions_by_name['uint64_opt'] = uint64_opt
-DESCRIPTOR.extensions_by_name['sint32_opt'] = sint32_opt
-DESCRIPTOR.extensions_by_name['sint64_opt'] = sint64_opt
-DESCRIPTOR.extensions_by_name['fixed32_opt'] = fixed32_opt
-DESCRIPTOR.extensions_by_name['fixed64_opt'] = fixed64_opt
-DESCRIPTOR.extensions_by_name['sfixed32_opt'] = sfixed32_opt
-DESCRIPTOR.extensions_by_name['sfixed64_opt'] = sfixed64_opt
-DESCRIPTOR.extensions_by_name['float_opt'] = float_opt
-DESCRIPTOR.extensions_by_name['double_opt'] = double_opt
-DESCRIPTOR.extensions_by_name['string_opt'] = string_opt
-DESCRIPTOR.extensions_by_name['bytes_opt'] = bytes_opt
-DESCRIPTOR.extensions_by_name['enum_opt'] = enum_opt
-DESCRIPTOR.extensions_by_name['message_type_opt'] = message_type_opt
-DESCRIPTOR.extensions_by_name['quux'] = quux
-DESCRIPTOR.extensions_by_name['corge'] = corge
-DESCRIPTOR.extensions_by_name['grault'] = grault
-DESCRIPTOR.extensions_by_name['garply'] = garply
-DESCRIPTOR.extensions_by_name['complex_opt1'] = complex_opt1
-DESCRIPTOR.extensions_by_name['complex_opt2'] = complex_opt2
-DESCRIPTOR.extensions_by_name['complex_opt3'] = complex_opt3
-DESCRIPTOR.extensions_by_name['complexopt6'] = complexopt6
-DESCRIPTOR.extensions_by_name['fileopt'] = fileopt
-DESCRIPTOR.extensions_by_name['msgopt'] = msgopt
-DESCRIPTOR.extensions_by_name['fieldopt'] = fieldopt
-DESCRIPTOR.extensions_by_name['enumopt'] = enumopt
-DESCRIPTOR.extensions_by_name['enumvalopt'] = enumvalopt
-DESCRIPTOR.extensions_by_name['serviceopt'] = serviceopt
-DESCRIPTOR.extensions_by_name['methodopt'] = methodopt
-DESCRIPTOR.extensions_by_name['required_enum_opt'] = required_enum_opt
-
-TestMessageWithCustomOptions = _reflection.GeneratedProtocolMessageType('TestMessageWithCustomOptions', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGEWITHCUSTOMOPTIONS,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageWithCustomOptions)
-  ))
-_sym_db.RegisterMessage(TestMessageWithCustomOptions)
-
-CustomOptionFooRequest = _reflection.GeneratedProtocolMessageType('CustomOptionFooRequest', (_message.Message,), dict(
-  DESCRIPTOR = _CUSTOMOPTIONFOOREQUEST,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooRequest)
-  ))
-_sym_db.RegisterMessage(CustomOptionFooRequest)
-
-CustomOptionFooResponse = _reflection.GeneratedProtocolMessageType('CustomOptionFooResponse', (_message.Message,), dict(
-  DESCRIPTOR = _CUSTOMOPTIONFOORESPONSE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooResponse)
-  ))
-_sym_db.RegisterMessage(CustomOptionFooResponse)
-
-CustomOptionFooClientMessage = _reflection.GeneratedProtocolMessageType('CustomOptionFooClientMessage', (_message.Message,), dict(
-  DESCRIPTOR = _CUSTOMOPTIONFOOCLIENTMESSAGE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooClientMessage)
-  ))
-_sym_db.RegisterMessage(CustomOptionFooClientMessage)
-
-CustomOptionFooServerMessage = _reflection.GeneratedProtocolMessageType('CustomOptionFooServerMessage', (_message.Message,), dict(
-  DESCRIPTOR = _CUSTOMOPTIONFOOSERVERMESSAGE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooServerMessage)
-  ))
-_sym_db.RegisterMessage(CustomOptionFooServerMessage)
-
-DummyMessageContainingEnum = _reflection.GeneratedProtocolMessageType('DummyMessageContainingEnum', (_message.Message,), dict(
-  DESCRIPTOR = _DUMMYMESSAGECONTAININGENUM,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.DummyMessageContainingEnum)
-  ))
-_sym_db.RegisterMessage(DummyMessageContainingEnum)
-
-DummyMessageInvalidAsOptionType = _reflection.GeneratedProtocolMessageType('DummyMessageInvalidAsOptionType', (_message.Message,), dict(
-  DESCRIPTOR = _DUMMYMESSAGEINVALIDASOPTIONTYPE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.DummyMessageInvalidAsOptionType)
-  ))
-_sym_db.RegisterMessage(DummyMessageInvalidAsOptionType)
-
-CustomOptionMinIntegerValues = _reflection.GeneratedProtocolMessageType('CustomOptionMinIntegerValues', (_message.Message,), dict(
-  DESCRIPTOR = _CUSTOMOPTIONMININTEGERVALUES,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionMinIntegerValues)
-  ))
-_sym_db.RegisterMessage(CustomOptionMinIntegerValues)
-
-CustomOptionMaxIntegerValues = _reflection.GeneratedProtocolMessageType('CustomOptionMaxIntegerValues', (_message.Message,), dict(
-  DESCRIPTOR = _CUSTOMOPTIONMAXINTEGERVALUES,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionMaxIntegerValues)
-  ))
-_sym_db.RegisterMessage(CustomOptionMaxIntegerValues)
-
-CustomOptionOtherValues = _reflection.GeneratedProtocolMessageType('CustomOptionOtherValues', (_message.Message,), dict(
-  DESCRIPTOR = _CUSTOMOPTIONOTHERVALUES,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionOtherValues)
-  ))
-_sym_db.RegisterMessage(CustomOptionOtherValues)
-
-SettingRealsFromPositiveInts = _reflection.GeneratedProtocolMessageType('SettingRealsFromPositiveInts', (_message.Message,), dict(
-  DESCRIPTOR = _SETTINGREALSFROMPOSITIVEINTS,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.SettingRealsFromPositiveInts)
-  ))
-_sym_db.RegisterMessage(SettingRealsFromPositiveInts)
-
-SettingRealsFromNegativeInts = _reflection.GeneratedProtocolMessageType('SettingRealsFromNegativeInts', (_message.Message,), dict(
-  DESCRIPTOR = _SETTINGREALSFROMNEGATIVEINTS,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.SettingRealsFromNegativeInts)
-  ))
-_sym_db.RegisterMessage(SettingRealsFromNegativeInts)
-
-ComplexOptionType1 = _reflection.GeneratedProtocolMessageType('ComplexOptionType1', (_message.Message,), dict(
-  DESCRIPTOR = _COMPLEXOPTIONTYPE1,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType1)
-  ))
-_sym_db.RegisterMessage(ComplexOptionType1)
-
-ComplexOptionType2 = _reflection.GeneratedProtocolMessageType('ComplexOptionType2', (_message.Message,), dict(
-
-  ComplexOptionType4 = _reflection.GeneratedProtocolMessageType('ComplexOptionType4', (_message.Message,), dict(
-    DESCRIPTOR = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4,
-    __module__ = 'google.protobuf.unittest_custom_options_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType2.ComplexOptionType4)
-    ))
-  ,
-  DESCRIPTOR = _COMPLEXOPTIONTYPE2,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType2)
-  ))
-_sym_db.RegisterMessage(ComplexOptionType2)
-_sym_db.RegisterMessage(ComplexOptionType2.ComplexOptionType4)
-
-ComplexOptionType3 = _reflection.GeneratedProtocolMessageType('ComplexOptionType3', (_message.Message,), dict(
-
-  ComplexOptionType5 = _reflection.GeneratedProtocolMessageType('ComplexOptionType5', (_message.Message,), dict(
-    DESCRIPTOR = _COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5,
-    __module__ = 'google.protobuf.unittest_custom_options_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType3.ComplexOptionType5)
-    ))
-  ,
-  DESCRIPTOR = _COMPLEXOPTIONTYPE3,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType3)
-  ))
-_sym_db.RegisterMessage(ComplexOptionType3)
-_sym_db.RegisterMessage(ComplexOptionType3.ComplexOptionType5)
-
-ComplexOpt6 = _reflection.GeneratedProtocolMessageType('ComplexOpt6', (_message.Message,), dict(
-  DESCRIPTOR = _COMPLEXOPT6,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOpt6)
-  ))
-_sym_db.RegisterMessage(ComplexOpt6)
-
-VariousComplexOptions = _reflection.GeneratedProtocolMessageType('VariousComplexOptions', (_message.Message,), dict(
-  DESCRIPTOR = _VARIOUSCOMPLEXOPTIONS,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.VariousComplexOptions)
-  ))
-_sym_db.RegisterMessage(VariousComplexOptions)
-
-AggregateMessageSet = _reflection.GeneratedProtocolMessageType('AggregateMessageSet', (_message.Message,), dict(
-  DESCRIPTOR = _AGGREGATEMESSAGESET,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.AggregateMessageSet)
-  ))
-_sym_db.RegisterMessage(AggregateMessageSet)
-
-AggregateMessageSetElement = _reflection.GeneratedProtocolMessageType('AggregateMessageSetElement', (_message.Message,), dict(
-  DESCRIPTOR = _AGGREGATEMESSAGESETELEMENT,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.AggregateMessageSetElement)
-  ))
-_sym_db.RegisterMessage(AggregateMessageSetElement)
-
-Aggregate = _reflection.GeneratedProtocolMessageType('Aggregate', (_message.Message,), dict(
-  DESCRIPTOR = _AGGREGATE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.Aggregate)
-  ))
-_sym_db.RegisterMessage(Aggregate)
-
-AggregateMessage = _reflection.GeneratedProtocolMessageType('AggregateMessage', (_message.Message,), dict(
-  DESCRIPTOR = _AGGREGATEMESSAGE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.AggregateMessage)
-  ))
-_sym_db.RegisterMessage(AggregateMessage)
-
-NestedOptionType = _reflection.GeneratedProtocolMessageType('NestedOptionType', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _NESTEDOPTIONTYPE_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_custom_options_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.NestedOptionType.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _NESTEDOPTIONTYPE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.NestedOptionType)
-  ))
-_sym_db.RegisterMessage(NestedOptionType)
-_sym_db.RegisterMessage(NestedOptionType.NestedMessage)
-
-OldOptionType = _reflection.GeneratedProtocolMessageType('OldOptionType', (_message.Message,), dict(
-  DESCRIPTOR = _OLDOPTIONTYPE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.OldOptionType)
-  ))
-_sym_db.RegisterMessage(OldOptionType)
-
-NewOptionType = _reflection.GeneratedProtocolMessageType('NewOptionType', (_message.Message,), dict(
-  DESCRIPTOR = _NEWOPTIONTYPE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.NewOptionType)
-  ))
-_sym_db.RegisterMessage(NewOptionType)
-
-TestMessageWithRequiredEnumOption = _reflection.GeneratedProtocolMessageType('TestMessageWithRequiredEnumOption', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGEWITHREQUIREDENUMOPTION,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageWithRequiredEnumOption)
-  ))
-_sym_db.RegisterMessage(TestMessageWithRequiredEnumOption)
-
-google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(file_opt1)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(message_opt1)
-google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_opt1)
-google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_opt2)
-google_dot_protobuf_dot_descriptor__pb2.OneofOptions.RegisterExtension(oneof_opt1)
-google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_opt1)
-google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enum_value_opt1)
-google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(service_opt1)
-method_opt1.enum_type = _METHODOPT1
-google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(method_opt1)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(bool_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(int32_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(int64_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(uint32_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(uint64_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sint32_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sint64_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(fixed32_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(fixed64_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sfixed32_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sfixed64_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(float_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(double_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(string_opt)
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(bytes_opt)
-enum_opt.enum_type = _DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(enum_opt)
-message_type_opt.message_type = _DUMMYMESSAGEINVALIDASOPTIONTYPE
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(message_type_opt)
-ComplexOptionType1.RegisterExtension(quux)
-corge.message_type = _COMPLEXOPTIONTYPE3
-ComplexOptionType1.RegisterExtension(corge)
-ComplexOptionType2.RegisterExtension(grault)
-garply.message_type = _COMPLEXOPTIONTYPE1
-ComplexOptionType2.RegisterExtension(garply)
-complex_opt1.message_type = _COMPLEXOPTIONTYPE1
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complex_opt1)
-complex_opt2.message_type = _COMPLEXOPTIONTYPE2
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complex_opt2)
-complex_opt3.message_type = _COMPLEXOPTIONTYPE3
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complex_opt3)
-complexopt6.message_type = _COMPLEXOPT6
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complexopt6)
-fileopt.message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(fileopt)
-msgopt.message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(msgopt)
-fieldopt.message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(fieldopt)
-enumopt.message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enumopt)
-enumvalopt.message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enumvalopt)
-serviceopt.message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(serviceopt)
-methodopt.message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(methodopt)
-required_enum_opt.message_type = _OLDOPTIONTYPE
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(required_enum_opt)
-_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4.extensions_by_name['complex_opt4'].message_type = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4
-google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4.extensions_by_name['complex_opt4'])
-_AGGREGATEMESSAGESETELEMENT.extensions_by_name['message_set_extension'].message_type = _AGGREGATEMESSAGESETELEMENT
-AggregateMessageSet.RegisterExtension(_AGGREGATEMESSAGESETELEMENT.extensions_by_name['message_set_extension'])
-_AGGREGATE.extensions_by_name['nested'].message_type = _AGGREGATE
-google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(_AGGREGATE.extensions_by_name['nested'])
-google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(_NESTEDOPTIONTYPE.extensions_by_name['nested_extension'])
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\200\001\001\210\001\001\220\001\001\360\350\301\035\352\255\300\345$\372\354\205;p\010d\022\016FileAnnotation\032\026\022\024NestedFileAnnotation\"\036\372\354\205;\031\022\027FileExtensionAnnotation*$\013\020\366\353\256\007\032\033\n\031EmbeddedMessageSetElement\014'))
-_AGGREGATEENUM.has_options = True
-_AGGREGATEENUM._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\222\225\210;\020\022\016EnumAnnotation'))
-_AGGREGATEENUM.values_by_name["VALUE"].has_options = True
-_AGGREGATEENUM.values_by_name["VALUE"]._options = _descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\312\374\211;\025\022\023EnumValueAnnotation'))
-_TESTMESSAGEWITHCUSTOMOPTIONS.oneofs_by_name['AnOneof'].has_options = True
-_TESTMESSAGEWITHCUSTOMOPTIONS.oneofs_by_name['AnOneof']._options = _descriptor._ParseOptions(descriptor_pb2.OneofOptions(), _b('\370\254\303\035\235\377\377\377\377\377\377\377\377\001'))
-_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.has_options = True
-_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\374\377\377'))
-_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.values_by_name["ANENUM_VAL2"].has_options = True
-_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.values_by_name["ANENUM_VAL2"]._options = _descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005{'))
-_TESTMESSAGEWITHCUSTOMOPTIONS.fields_by_name['field1'].has_options = True
-_TESTMESSAGEWITHCUSTOMOPTIONS.fields_by_name['field1']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001\301\340\303\035-\341u\n\002\000\000\000'))
-_TESTMESSAGEWITHCUSTOMOPTIONS.has_options = True
-_TESTMESSAGEWITHCUSTOMOPTIONS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\000\340\351\302\035\310\377\377\377\377\377\377\377\377\001'))
-_CUSTOMOPTIONMININTEGERVALUES.has_options = True
-_CUSTOMOPTIONMININTEGERVALUES._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\000\350\306\262\035\200\200\200\200\370\377\377\377\377\001\260\274\262\035\200\200\200\200\200\200\200\200\200\001\200\223\262\035\000\370\365\260\035\000\200\304\260\035\377\377\377\377\017\370\227\260\035\377\377\377\377\377\377\377\377\377\001\235\365\257\035\000\000\000\000\221\356\257\035\000\000\000\000\000\000\000\000\255\215\257\035\000\000\000\200\231\326\250\035\000\000\000\000\000\000\000\200'))
-_CUSTOMOPTIONMAXINTEGERVALUES.has_options = True
-_CUSTOMOPTIONMAXINTEGERVALUES._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\001\350\306\262\035\377\377\377\377\007\260\274\262\035\377\377\377\377\377\377\377\377\177\200\223\262\035\377\377\377\377\017\370\365\260\035\377\377\377\377\377\377\377\377\377\001\200\304\260\035\376\377\377\377\017\370\227\260\035\376\377\377\377\377\377\377\377\377\001\235\365\257\035\377\377\377\377\221\356\257\035\377\377\377\377\377\377\377\377\255\215\257\035\377\377\377\177\231\326\250\035\377\377\377\377\377\377\377\177'))
-_CUSTOMOPTIONOTHERVALUES.has_options = True
-_CUSTOMOPTIONOTHERVALUES._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\350\306\262\035\234\377\377\377\377\377\377\377\377\001\365\337\243\035\347\207EA\351\334\242\035\373Y\214B\312\300\363?\252\334\242\035\016Hello, \"World\"\262\331\242\035\013Hello\000World\210\331\242\035\351\377\377\377\377\377\377\377\377\001'))
-_SETTINGREALSFROMPOSITIVEINTS.has_options = True
-_SETTINGREALSFROMPOSITIVEINTS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@A\351\334\242\035\000\000\000\000\000@c@'))
-_SETTINGREALSFROMNEGATIVEINTS.has_options = True
-_SETTINGREALSFROMNEGATIVEINTS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@\301\351\334\242\035\000\000\000\000\000@c\300'))
-_VARIOUSCOMPLEXOPTIONS.has_options = True
-_VARIOUSCOMPLEXOPTIONS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\242\342\225\035\002\010*\242\342\225\035\006\330\205\236\035\304\002\242\342\225\035\010\222\365\235\035\003\010\354\006\242\342\225\035\002 c\242\342\225\035\002 X\252\375\220\035\003\020\333\007\252\375\220\035\006\370\346\227\035\216\005\252\375\220\035\005\n\003\010\347\005\252\375\220\035\010\n\006\330\205\236\035\317\017\252\375\220\035\n\n\010\222\365\235\035\003\010\330\017\252\375\220\035\010\302\254\227\035\003\010\345\005\252\375\220\035\013\302\254\227\035\006\330\205\236\035\316\017\252\375\220\035\r\302\254\227\035\010\222\365\235\035\003\010\311\020\322\250\217\035\003\010\263\017\252\375\220\035\005\032\003\010\301\002\252\375\220\035\004\"\002\010e\252\375\220\035\005\"\003\010\324\001\372\336\220\035\002\010\t\372\336\220\035\004\023\030\026\024\343\334\374\034\370\375\373\034\030\344\334\374\034'))
-_AGGREGATEMESSAGESET.has_options = True
-_AGGREGATEMESSAGESET._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001'))
-_AGGREGATEMESSAGE.fields_by_name['fieldname'].has_options = True
-_AGGREGATEMESSAGE.fields_by_name['fieldname']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\362\241\207;\021\022\017FieldAnnotation'))
-_AGGREGATEMESSAGE.has_options = True
-_AGGREGATEMESSAGE._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\302\321\206;\025\010e\022\021MessageAnnotation'))
-_NESTEDOPTIONTYPE_NESTEDMESSAGE.fields_by_name['nested_field'].has_options = True
-_NESTEDOPTIONTYPE_NESTEDMESSAGE.fields_by_name['nested_field']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\301\340\303\035\352\003\000\000\000\000\000\000'))
-_NESTEDOPTIONTYPE_NESTEDMESSAGE.has_options = True
-_NESTEDOPTIONTYPE_NESTEDMESSAGE._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\340\351\302\035\351\007'))
-_NESTEDOPTIONTYPE_NESTEDENUM.has_options = True
-_NESTEDOPTIONTYPE_NESTEDENUM._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\003\000\000'))
-_NESTEDOPTIONTYPE_NESTEDENUM.values_by_name["NESTED_ENUM_VALUE"].has_options = True
-_NESTEDOPTIONTYPE_NESTEDENUM.values_by_name["NESTED_ENUM_VALUE"]._options = _descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005\354\007'))
-_NESTEDOPTIONTYPE.extensions_by_name['nested_extension'].has_options = True
-_NESTEDOPTIONTYPE.extensions_by_name['nested_extension']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\213\312\035\355\007'))
-_TESTMESSAGEWITHREQUIREDENUMOPTION.has_options = True
-_TESTMESSAGEWITHREQUIREDENUMOPTION._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\372\350\374\224\003\002\010\000'))
-
-_TESTSERVICEWITHCUSTOMOPTIONS = _descriptor.ServiceDescriptor(
-  name='TestServiceWithCustomOptions',
-  full_name='protobuf_unittest.TestServiceWithCustomOptions',
-  file=DESCRIPTOR,
-  index=0,
-  options=_descriptor._ParseOptions(descriptor_pb2.ServiceOptions(), _b('\220\262\213\036\323\333\200\313I')),
-  serialized_start=3142,
-  serialized_end=3284,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='Foo',
-    full_name='protobuf_unittest.TestServiceWithCustomOptions.Foo',
-    index=0,
-    containing_service=None,
-    input_type=_CUSTOMOPTIONFOOREQUEST,
-    output_type=_CUSTOMOPTIONFOORESPONSE,
-    options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\340\372\214\036\002')),
-  ),
-])
-
-TestServiceWithCustomOptions = service_reflection.GeneratedServiceType('TestServiceWithCustomOptions', (_service.Service,), dict(
-  DESCRIPTOR = _TESTSERVICEWITHCUSTOMOPTIONS,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  ))
-
-TestServiceWithCustomOptions_Stub = service_reflection.GeneratedServiceStubType('TestServiceWithCustomOptions_Stub', (TestServiceWithCustomOptions,), dict(
-  DESCRIPTOR = _TESTSERVICEWITHCUSTOMOPTIONS,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  ))
-
-
-
-_AGGREGATESERVICE = _descriptor.ServiceDescriptor(
-  name='AggregateService',
-  full_name='protobuf_unittest.AggregateService',
-  file=DESCRIPTOR,
-  index=1,
-  options=_descriptor._ParseOptions(descriptor_pb2.ServiceOptions(), _b('\312\373\216;\023\022\021ServiceAnnotation')),
-  serialized_start=3287,
-  serialized_end=3440,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='Method',
-    full_name='protobuf_unittest.AggregateService.Method',
-    index=0,
-    containing_service=None,
-    input_type=_AGGREGATEMESSAGE,
-    output_type=_AGGREGATEMESSAGE,
-    options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\312\310\226;\022\022\020MethodAnnotation')),
-  ),
-])
-
-AggregateService = service_reflection.GeneratedServiceType('AggregateService', (_service.Service,), dict(
-  DESCRIPTOR = _AGGREGATESERVICE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  ))
-
-AggregateService_Stub = service_reflection.GeneratedServiceStubType('AggregateService_Stub', (AggregateService,), dict(
-  DESCRIPTOR = _AGGREGATESERVICE,
-  __module__ = 'google.protobuf.unittest_custom_options_pb2'
-  ))
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_import_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_import_pb2.py
deleted file mode 100644
index 0e80b85..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_import_pb2.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_import.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import unittest_import_public_pb2 as google_dot_protobuf_dot_unittest__import__public__pb2
-
-from google.protobuf.unittest_import_public_pb2 import *
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_import.proto',
-  package='protobuf_unittest_import',
-  syntax='proto2',
-  serialized_pb=_b('\n%google/protobuf/unittest_import.proto\x12\x18protobuf_unittest_import\x1a,google/protobuf/unittest_import_public.proto\"\x1a\n\rImportMessage\x12\t\n\x01\x64\x18\x01 \x01(\x05*<\n\nImportEnum\x12\x0e\n\nIMPORT_FOO\x10\x07\x12\x0e\n\nIMPORT_BAR\x10\x08\x12\x0e\n\nIMPORT_BAZ\x10\t*1\n\x10ImportEnumForMap\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x42\x1f\n\x18\x63om.google.protobuf.testH\x01\xf8\x01\x01P\x00')
-  ,
-  dependencies=[google_dot_protobuf_dot_unittest__import__public__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_IMPORTENUM = _descriptor.EnumDescriptor(
-  name='ImportEnum',
-  full_name='protobuf_unittest_import.ImportEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='IMPORT_FOO', index=0, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='IMPORT_BAR', index=1, number=8,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='IMPORT_BAZ', index=2, number=9,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=141,
-  serialized_end=201,
-)
-_sym_db.RegisterEnumDescriptor(_IMPORTENUM)
-
-ImportEnum = enum_type_wrapper.EnumTypeWrapper(_IMPORTENUM)
-_IMPORTENUMFORMAP = _descriptor.EnumDescriptor(
-  name='ImportEnumForMap',
-  full_name='protobuf_unittest_import.ImportEnumForMap',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR', index=2, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=203,
-  serialized_end=252,
-)
-_sym_db.RegisterEnumDescriptor(_IMPORTENUMFORMAP)
-
-ImportEnumForMap = enum_type_wrapper.EnumTypeWrapper(_IMPORTENUMFORMAP)
-IMPORT_FOO = 7
-IMPORT_BAR = 8
-IMPORT_BAZ = 9
-UNKNOWN = 0
-FOO = 1
-BAR = 2
-
-
-
-_IMPORTMESSAGE = _descriptor.Descriptor(
-  name='ImportMessage',
-  full_name='protobuf_unittest_import.ImportMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='d', full_name='protobuf_unittest_import.ImportMessage.d', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=113,
-  serialized_end=139,
-)
-
-DESCRIPTOR.message_types_by_name['ImportMessage'] = _IMPORTMESSAGE
-DESCRIPTOR.enum_types_by_name['ImportEnum'] = _IMPORTENUM
-DESCRIPTOR.enum_types_by_name['ImportEnumForMap'] = _IMPORTENUMFORMAP
-
-ImportMessage = _reflection.GeneratedProtocolMessageType('ImportMessage', (_message.Message,), dict(
-  DESCRIPTOR = _IMPORTMESSAGE,
-  __module__ = 'google.protobuf.unittest_import_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest_import.ImportMessage)
-  ))
-_sym_db.RegisterMessage(ImportMessage)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030com.google.protobuf.testH\001\370\001\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_import_public_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_import_public_pb2.py
deleted file mode 100644
index 8627385..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_import_public_pb2.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_import_public.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_import_public.proto',
-  package='protobuf_unittest_import',
-  syntax='proto2',
-  serialized_pb=_b('\n,google/protobuf/unittest_import_public.proto\x12\x18protobuf_unittest_import\" \n\x13PublicImportMessage\x12\t\n\x01\x65\x18\x01 \x01(\x05\x42\x1a\n\x18\x63om.google.protobuf.test')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_PUBLICIMPORTMESSAGE = _descriptor.Descriptor(
-  name='PublicImportMessage',
-  full_name='protobuf_unittest_import.PublicImportMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='e', full_name='protobuf_unittest_import.PublicImportMessage.e', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=74,
-  serialized_end=106,
-)
-
-DESCRIPTOR.message_types_by_name['PublicImportMessage'] = _PUBLICIMPORTMESSAGE
-
-PublicImportMessage = _reflection.GeneratedProtocolMessageType('PublicImportMessage', (_message.Message,), dict(
-  DESCRIPTOR = _PUBLICIMPORTMESSAGE,
-  __module__ = 'google.protobuf.unittest_import_public_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest_import.PublicImportMessage)
-  ))
-_sym_db.RegisterMessage(PublicImportMessage)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030com.google.protobuf.test'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_mset_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_mset_pb2.py
deleted file mode 100644
index c76e484..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_mset_pb2.py
+++ /dev/null
@@ -1,256 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_mset.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import unittest_mset_wire_format_pb2 as google_dot_protobuf_dot_unittest__mset__wire__format__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_mset.proto',
-  package='protobuf_unittest',
-  syntax='proto2',
-  serialized_pb=_b('\n#google/protobuf/unittest_mset.proto\x12\x11protobuf_unittest\x1a/google/protobuf/unittest_mset_wire_format.proto\"Z\n\x17TestMessageSetContainer\x12?\n\x0bmessage_set\x18\x01 \x01(\x0b\x32*.proto2_wireformat_unittest.TestMessageSet\"\x9f\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32x\n\x15message_set_extension\x12*.proto2_wireformat_unittest.TestMessageSet\x18\xb0\xa6^ \x01(\x0b\x32+.protobuf_unittest.TestMessageSetExtension1\"\xa1\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2x\n\x15message_set_extension\x12*.proto2_wireformat_unittest.TestMessageSet\x18\xf9\xbb^ \x01(\x0b\x32+.protobuf_unittest.TestMessageSetExtension2\"n\n\rRawMessageSet\x12\x33\n\x04item\x18\x01 \x03(\n2%.protobuf_unittest.RawMessageSet.Item\x1a(\n\x04Item\x12\x0f\n\x07type_id\x18\x02 \x02(\x05\x12\x0f\n\x07message\x18\x03 \x02(\x0c\x42\x05H\x01\xf8\x01\x01')
-  ,
-  dependencies=[google_dot_protobuf_dot_unittest__mset__wire__format__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_TESTMESSAGESETCONTAINER = _descriptor.Descriptor(
-  name='TestMessageSetContainer',
-  full_name='protobuf_unittest.TestMessageSetContainer',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='message_set', full_name='protobuf_unittest.TestMessageSetContainer.message_set', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=107,
-  serialized_end=197,
-)
-
-
-_TESTMESSAGESETEXTENSION1 = _descriptor.Descriptor(
-  name='TestMessageSetExtension1',
-  full_name='protobuf_unittest.TestMessageSetExtension1',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='i', full_name='protobuf_unittest.TestMessageSetExtension1.i', index=0,
-      number=15, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='message_set_extension', full_name='protobuf_unittest.TestMessageSetExtension1.message_set_extension', index=0,
-      number=1545008, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=200,
-  serialized_end=359,
-)
-
-
-_TESTMESSAGESETEXTENSION2 = _descriptor.Descriptor(
-  name='TestMessageSetExtension2',
-  full_name='protobuf_unittest.TestMessageSetExtension2',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='str', full_name='protobuf_unittest.TestMessageSetExtension2.str', index=0,
-      number=25, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='message_set_extension', full_name='protobuf_unittest.TestMessageSetExtension2.message_set_extension', index=0,
-      number=1547769, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=362,
-  serialized_end=523,
-)
-
-
-_RAWMESSAGESET_ITEM = _descriptor.Descriptor(
-  name='Item',
-  full_name='protobuf_unittest.RawMessageSet.Item',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='type_id', full_name='protobuf_unittest.RawMessageSet.Item.type_id', index=0,
-      number=2, type=5, cpp_type=1, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='message', full_name='protobuf_unittest.RawMessageSet.Item.message', index=1,
-      number=3, type=12, cpp_type=9, label=2,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=595,
-  serialized_end=635,
-)
-
-_RAWMESSAGESET = _descriptor.Descriptor(
-  name='RawMessageSet',
-  full_name='protobuf_unittest.RawMessageSet',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='item', full_name='protobuf_unittest.RawMessageSet.item', index=0,
-      number=1, type=10, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_RAWMESSAGESET_ITEM, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=525,
-  serialized_end=635,
-)
-
-_TESTMESSAGESETCONTAINER.fields_by_name['message_set'].message_type = google_dot_protobuf_dot_unittest__mset__wire__format__pb2._TESTMESSAGESET
-_RAWMESSAGESET_ITEM.containing_type = _RAWMESSAGESET
-_RAWMESSAGESET.fields_by_name['item'].message_type = _RAWMESSAGESET_ITEM
-DESCRIPTOR.message_types_by_name['TestMessageSetContainer'] = _TESTMESSAGESETCONTAINER
-DESCRIPTOR.message_types_by_name['TestMessageSetExtension1'] = _TESTMESSAGESETEXTENSION1
-DESCRIPTOR.message_types_by_name['TestMessageSetExtension2'] = _TESTMESSAGESETEXTENSION2
-DESCRIPTOR.message_types_by_name['RawMessageSet'] = _RAWMESSAGESET
-
-TestMessageSetContainer = _reflection.GeneratedProtocolMessageType('TestMessageSetContainer', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESETCONTAINER,
-  __module__ = 'google.protobuf.unittest_mset_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageSetContainer)
-  ))
-_sym_db.RegisterMessage(TestMessageSetContainer)
-
-TestMessageSetExtension1 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension1', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESETEXTENSION1,
-  __module__ = 'google.protobuf.unittest_mset_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageSetExtension1)
-  ))
-_sym_db.RegisterMessage(TestMessageSetExtension1)
-
-TestMessageSetExtension2 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension2', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESETEXTENSION2,
-  __module__ = 'google.protobuf.unittest_mset_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageSetExtension2)
-  ))
-_sym_db.RegisterMessage(TestMessageSetExtension2)
-
-RawMessageSet = _reflection.GeneratedProtocolMessageType('RawMessageSet', (_message.Message,), dict(
-
-  Item = _reflection.GeneratedProtocolMessageType('Item', (_message.Message,), dict(
-    DESCRIPTOR = _RAWMESSAGESET_ITEM,
-    __module__ = 'google.protobuf.unittest_mset_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.RawMessageSet.Item)
-    ))
-  ,
-  DESCRIPTOR = _RAWMESSAGESET,
-  __module__ = 'google.protobuf.unittest_mset_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.RawMessageSet)
-  ))
-_sym_db.RegisterMessage(RawMessageSet)
-_sym_db.RegisterMessage(RawMessageSet.Item)
-
-_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION1
-google_dot_protobuf_dot_unittest__mset__wire__format__pb2.TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'])
-_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION2
-google_dot_protobuf_dot_unittest__mset__wire__format__pb2.TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'])
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\001\370\001\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_mset_wire_format_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_mset_wire_format_pb2.py
deleted file mode 100644
index acab49c..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_mset_wire_format_pb2.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_mset_wire_format.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_mset_wire_format.proto',
-  package='proto2_wireformat_unittest',
-  syntax='proto2',
-  serialized_pb=_b('\n/google/protobuf/unittest_mset_wire_format.proto\x12\x1aproto2_wireformat_unittest\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"d\n!TestMessageSetWireFormatContainer\x12?\n\x0bmessage_set\x18\x01 \x01(\x0b\x32*.proto2_wireformat_unittest.TestMessageSetB)H\x01\xf8\x01\x01\xaa\x02!Google.ProtocolBuffers.TestProtos')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_TESTMESSAGESET = _descriptor.Descriptor(
-  name='TestMessageSet',
-  full_name='proto2_wireformat_unittest.TestMessageSet',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')),
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(4, 2147483647), ],
-  oneofs=[
-  ],
-  serialized_start=79,
-  serialized_end=109,
-)
-
-
-_TESTMESSAGESETWIREFORMATCONTAINER = _descriptor.Descriptor(
-  name='TestMessageSetWireFormatContainer',
-  full_name='proto2_wireformat_unittest.TestMessageSetWireFormatContainer',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='message_set', full_name='proto2_wireformat_unittest.TestMessageSetWireFormatContainer.message_set', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=111,
-  serialized_end=211,
-)
-
-_TESTMESSAGESETWIREFORMATCONTAINER.fields_by_name['message_set'].message_type = _TESTMESSAGESET
-DESCRIPTOR.message_types_by_name['TestMessageSet'] = _TESTMESSAGESET
-DESCRIPTOR.message_types_by_name['TestMessageSetWireFormatContainer'] = _TESTMESSAGESETWIREFORMATCONTAINER
-
-TestMessageSet = _reflection.GeneratedProtocolMessageType('TestMessageSet', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESET,
-  __module__ = 'google.protobuf.unittest_mset_wire_format_pb2'
-  # @@protoc_insertion_point(class_scope:proto2_wireformat_unittest.TestMessageSet)
-  ))
-_sym_db.RegisterMessage(TestMessageSet)
-
-TestMessageSetWireFormatContainer = _reflection.GeneratedProtocolMessageType('TestMessageSetWireFormatContainer', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGESETWIREFORMATCONTAINER,
-  __module__ = 'google.protobuf.unittest_mset_wire_format_pb2'
-  # @@protoc_insertion_point(class_scope:proto2_wireformat_unittest.TestMessageSetWireFormatContainer)
-  ))
-_sym_db.RegisterMessage(TestMessageSetWireFormatContainer)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\001\370\001\001\252\002!Google.ProtocolBuffers.TestProtos'))
-_TESTMESSAGESET.has_options = True
-_TESTMESSAGESET._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_no_arena_import_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_no_arena_import_pb2.py
deleted file mode 100644
index fb3ddc7..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_no_arena_import_pb2.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_no_arena_import.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_no_arena_import.proto',
-  package='proto2_arena_unittest',
-  syntax='proto2',
-  serialized_pb=_b('\n.google/protobuf/unittest_no_arena_import.proto\x12\x15proto2_arena_unittest\"\'\n\x1aImportNoArenaNestedMessage\x12\t\n\x01\x64\x18\x01 \x01(\x05')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_IMPORTNOARENANESTEDMESSAGE = _descriptor.Descriptor(
-  name='ImportNoArenaNestedMessage',
-  full_name='proto2_arena_unittest.ImportNoArenaNestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='d', full_name='proto2_arena_unittest.ImportNoArenaNestedMessage.d', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=73,
-  serialized_end=112,
-)
-
-DESCRIPTOR.message_types_by_name['ImportNoArenaNestedMessage'] = _IMPORTNOARENANESTEDMESSAGE
-
-ImportNoArenaNestedMessage = _reflection.GeneratedProtocolMessageType('ImportNoArenaNestedMessage', (_message.Message,), dict(
-  DESCRIPTOR = _IMPORTNOARENANESTEDMESSAGE,
-  __module__ = 'google.protobuf.unittest_no_arena_import_pb2'
-  # @@protoc_insertion_point(class_scope:proto2_arena_unittest.ImportNoArenaNestedMessage)
-  ))
-_sym_db.RegisterMessage(ImportNoArenaNestedMessage)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_no_arena_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_no_arena_pb2.py
deleted file mode 100644
index ba6523e..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_no_arena_pb2.py
+++ /dev/null
@@ -1,918 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_no_arena.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import unittest_import_pb2 as google_dot_protobuf_dot_unittest__import__pb2
-google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google_dot_protobuf_dot_unittest__import__public__pb2
-from google.protobuf import unittest_arena_pb2 as google_dot_protobuf_dot_unittest__arena__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_no_arena.proto',
-  package='protobuf_unittest_no_arena',
-  syntax='proto2',
-  serialized_pb=_b('\n\'google/protobuf/unittest_no_arena.proto\x12\x1aprotobuf_unittest_no_arena\x1a%google/protobuf/unittest_import.proto\x1a$google/protobuf/unittest_arena.proto\"\xd0\x1a\n\x0cTestAllTypes\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12M\n\roptionalgroup\x18\x10 \x01(\n26.protobuf_unittest_no_arena.TestAllTypes.OptionalGroup\x12W\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessage\x12L\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32*.protobuf_unittest_no_arena.ForeignMessage\x12H\n\x17optional_import_message\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12Q\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32\x33.protobuf_unittest_no_arena.TestAllTypes.NestedEnum\x12\x46\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\'.protobuf_unittest_no_arena.ForeignEnum\x12\x42\n\x14optional_import_enum\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12U\n\x1eoptional_public_import_message\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage\x12T\n\x10optional_message\x18\x1b \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18  \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12M\n\rrepeatedgroup\x18. \x03(\n26.protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup\x12W\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessage\x12L\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32*.protobuf_unittest_no_arena.ForeignMessage\x12H\n\x17repeated_import_message\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12Q\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32\x33.protobuf_unittest_no_arena.TestAllTypes.NestedEnum\x12\x46\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\'.protobuf_unittest_no_arena.ForeignEnum\x12\x42\n\x14repeated_import_enum\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12Y\n\x15repeated_lazy_message\x18\x39 \x03(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageB\x02(\x01\x12\x19\n\rdefault_int32\x18= \x01(\x05:\x02\x34\x31\x12\x19\n\rdefault_int64\x18> \x01(\x03:\x02\x34\x32\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint32\x18? \x01(\r:\x02\x34\x33\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint64\x18@ \x01(\x04:\x02\x34\x34\x12\x1b\n\x0e\x64\x65\x66\x61ult_sint32\x18\x41 \x01(\x11:\x03-45\x12\x1a\n\x0e\x64\x65\x66\x61ult_sint64\x18\x42 \x01(\x12:\x02\x34\x36\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed32\x18\x43 \x01(\x07:\x02\x34\x37\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed64\x18\x44 \x01(\x06:\x02\x34\x38\x12\x1c\n\x10\x64\x65\x66\x61ult_sfixed32\x18\x45 \x01(\x0f:\x02\x34\x39\x12\x1d\n\x10\x64\x65\x66\x61ult_sfixed64\x18\x46 \x01(\x10:\x03-50\x12\x1b\n\rdefault_float\x18G \x01(\x02:\x04\x35\x31.5\x12\x1d\n\x0e\x64\x65\x66\x61ult_double\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30\x12\x1a\n\x0c\x64\x65\x66\x61ult_bool\x18I \x01(\x08:\x04true\x12\x1d\n\x0e\x64\x65\x66\x61ult_string\x18J \x01(\t:\x05hello\x12\x1c\n\rdefault_bytes\x18K \x01(\x0c:\x05world\x12U\n\x13\x64\x65\x66\x61ult_nested_enum\x18Q \x01(\x0e\x32\x33.protobuf_unittest_no_arena.TestAllTypes.NestedEnum:\x03\x42\x41R\x12R\n\x14\x64\x65\x66\x61ult_foreign_enum\x18R \x01(\x0e\x32\'.protobuf_unittest_no_arena.ForeignEnum:\x0b\x46OREIGN_BAR\x12M\n\x13\x64\x65\x66\x61ult_import_enum\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR\x12%\n\x14\x64\x65\x66\x61ult_string_piece\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02\x12\x1d\n\x0c\x64\x65\x66\x61ult_cord\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12V\n\x14oneof_nested_message\x18p \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x12_\n\x19lazy_oneof_nested_message\x18s \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageB\x02(\x01H\x00\x1a\x1b\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x11 \x01(\x05\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x61\x18/ \x01(\x05\"9\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\r\n\x0boneof_field\"\x1b\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\"P\n\x12TestNoArenaMessage\x12:\n\rarena_message\x18\x01 \x01(\x0b\x32#.proto2_arena_unittest.ArenaMessage*@\n\x0b\x46oreignEnum\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x04\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x05\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x06\x42%B\rUnittestProtoH\x01\x80\x01\x01\x88\x01\x01\x90\x01\x01\xf8\x01\x00\xa2\x02\x05NOARN')
-  ,
-  dependencies=[google_dot_protobuf_dot_unittest__import__pb2.DESCRIPTOR,google_dot_protobuf_dot_unittest__arena__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_FOREIGNENUM = _descriptor.EnumDescriptor(
-  name='ForeignEnum',
-  full_name='protobuf_unittest_no_arena.ForeignEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_FOO', index=0, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_BAR', index=1, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_BAZ', index=2, number=6,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3670,
-  serialized_end=3734,
-)
-_sym_db.RegisterEnumDescriptor(_FOREIGNENUM)
-
-ForeignEnum = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUM)
-FOREIGN_FOO = 4
-FOREIGN_BAR = 5
-FOREIGN_BAZ = 6
-
-
-_TESTALLTYPES_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='protobuf_unittest_no_arena.TestAllTypes.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAZ', index=2, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NEG', index=3, number=-1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3485,
-  serialized_end=3542,
-)
-_sym_db.RegisterEnumDescriptor(_TESTALLTYPES_NESTEDENUM)
-
-
-_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='protobuf_unittest_no_arena.TestAllTypes.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bb', full_name='protobuf_unittest_no_arena.TestAllTypes.NestedMessage.bb', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3400,
-  serialized_end=3427,
-)
-
-_TESTALLTYPES_OPTIONALGROUP = _descriptor.Descriptor(
-  name='OptionalGroup',
-  full_name='protobuf_unittest_no_arena.TestAllTypes.OptionalGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest_no_arena.TestAllTypes.OptionalGroup.a', index=0,
-      number=17, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3429,
-  serialized_end=3455,
-)
-
-_TESTALLTYPES_REPEATEDGROUP = _descriptor.Descriptor(
-  name='RepeatedGroup',
-  full_name='protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup.a', index=0,
-      number=47, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3457,
-  serialized_end=3483,
-)
-
-_TESTALLTYPES = _descriptor.Descriptor(
-  name='TestAllTypes',
-  full_name='protobuf_unittest_no_arena.TestAllTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_int32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_int32', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_int64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_int64', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_uint32', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_uint64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_uint64', index=3,
-      number=4, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sint32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sint32', index=4,
-      number=5, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sint64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sint64', index=5,
-      number=6, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_fixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_fixed32', index=6,
-      number=7, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_fixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_fixed64', index=7,
-      number=8, type=6, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sfixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sfixed32', index=8,
-      number=9, type=15, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sfixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sfixed64', index=9,
-      number=10, type=16, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_float', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_float', index=10,
-      number=11, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_double', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_double', index=11,
-      number=12, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_bool', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_bool', index=12,
-      number=13, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_string', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_string', index=13,
-      number=14, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_bytes', index=14,
-      number=15, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optionalgroup', full_name='protobuf_unittest_no_arena.TestAllTypes.optionalgroup', index=15,
-      number=16, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_nested_message', index=16,
-      number=18, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_foreign_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_foreign_message', index=17,
-      number=19, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_import_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_import_message', index=18,
-      number=20, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_nested_enum', index=19,
-      number=21, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_foreign_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_foreign_enum', index=20,
-      number=22, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=4,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_import_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_import_enum', index=21,
-      number=23, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=7,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_string_piece', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_string_piece', index=22,
-      number=24, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='optional_cord', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_cord', index=23,
-      number=25, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='optional_public_import_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_public_import_message', index=24,
-      number=26, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_message', index=25,
-      number=27, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_int32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_int32', index=26,
-      number=31, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_int64', index=27,
-      number=32, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_uint32', index=28,
-      number=33, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_uint64', index=29,
-      number=34, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sint32', index=30,
-      number=35, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sint64', index=31,
-      number=36, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_fixed32', index=32,
-      number=37, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_fixed64', index=33,
-      number=38, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sfixed32', index=34,
-      number=39, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sfixed64', index=35,
-      number=40, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_float', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_float', index=36,
-      number=41, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_double', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_double', index=37,
-      number=42, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_bool', index=38,
-      number=43, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_string', index=39,
-      number=44, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_bytes', index=40,
-      number=45, type=12, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeatedgroup', full_name='protobuf_unittest_no_arena.TestAllTypes.repeatedgroup', index=41,
-      number=46, type=10, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_nested_message', index=42,
-      number=48, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_foreign_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_foreign_message', index=43,
-      number=49, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_import_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_import_message', index=44,
-      number=50, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_nested_enum', index=45,
-      number=51, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_foreign_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_foreign_enum', index=46,
-      number=52, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_import_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_import_enum', index=47,
-      number=53, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string_piece', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_string_piece', index=48,
-      number=54, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_cord', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_cord', index=49,
-      number=55, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_lazy_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_lazy_message', index=50,
-      number=57, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-    _descriptor.FieldDescriptor(
-      name='default_int32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_int32', index=51,
-      number=61, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=41,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_int64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_int64', index=52,
-      number=62, type=3, cpp_type=2, label=1,
-      has_default_value=True, default_value=42,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_uint32', index=53,
-      number=63, type=13, cpp_type=3, label=1,
-      has_default_value=True, default_value=43,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_uint64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_uint64', index=54,
-      number=64, type=4, cpp_type=4, label=1,
-      has_default_value=True, default_value=44,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sint32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sint32', index=55,
-      number=65, type=17, cpp_type=1, label=1,
-      has_default_value=True, default_value=-45,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sint64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sint64', index=56,
-      number=66, type=18, cpp_type=2, label=1,
-      has_default_value=True, default_value=46,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_fixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_fixed32', index=57,
-      number=67, type=7, cpp_type=3, label=1,
-      has_default_value=True, default_value=47,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_fixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_fixed64', index=58,
-      number=68, type=6, cpp_type=4, label=1,
-      has_default_value=True, default_value=48,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sfixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sfixed32', index=59,
-      number=69, type=15, cpp_type=1, label=1,
-      has_default_value=True, default_value=49,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sfixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sfixed64', index=60,
-      number=70, type=16, cpp_type=2, label=1,
-      has_default_value=True, default_value=-50,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_float', full_name='protobuf_unittest_no_arena.TestAllTypes.default_float', index=61,
-      number=71, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(51.5),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_double', full_name='protobuf_unittest_no_arena.TestAllTypes.default_double', index=62,
-      number=72, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=float(52000),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_bool', full_name='protobuf_unittest_no_arena.TestAllTypes.default_bool', index=63,
-      number=73, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=True,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_string', full_name='protobuf_unittest_no_arena.TestAllTypes.default_string', index=64,
-      number=74, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("hello").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.default_bytes', index=65,
-      number=75, type=12, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("world"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_nested_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.default_nested_enum', index=66,
-      number=81, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=2,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_foreign_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.default_foreign_enum', index=67,
-      number=82, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=5,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_import_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.default_import_enum', index=68,
-      number=83, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=8,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_string_piece', full_name='protobuf_unittest_no_arena.TestAllTypes.default_string_piece', index=69,
-      number=84, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("abc").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='default_cord', full_name='protobuf_unittest_no_arena.TestAllTypes.default_cord', index=70,
-      number=85, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("123").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='oneof_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_uint32', index=71,
-      number=111, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_nested_message', index=72,
-      number=112, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_string', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_string', index=73,
-      number=113, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_bytes', index=74,
-      number=114, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='lazy_oneof_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.lazy_oneof_nested_message', index=75,
-      number=115, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTALLTYPES_NESTEDMESSAGE, _TESTALLTYPES_OPTIONALGROUP, _TESTALLTYPES_REPEATEDGROUP, ],
-  enum_types=[
-    _TESTALLTYPES_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='oneof_field', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_field',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=149,
-  serialized_end=3557,
-)
-
-
-_FOREIGNMESSAGE = _descriptor.Descriptor(
-  name='ForeignMessage',
-  full_name='protobuf_unittest_no_arena.ForeignMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='c', full_name='protobuf_unittest_no_arena.ForeignMessage.c', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3559,
-  serialized_end=3586,
-)
-
-
-_TESTNOARENAMESSAGE = _descriptor.Descriptor(
-  name='TestNoArenaMessage',
-  full_name='protobuf_unittest_no_arena.TestNoArenaMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='arena_message', full_name='protobuf_unittest_no_arena.TestNoArenaMessage.arena_message', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3588,
-  serialized_end=3668,
-)
-
-_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES
-_TESTALLTYPES_OPTIONALGROUP.containing_type = _TESTALLTYPES
-_TESTALLTYPES_REPEATEDGROUP.containing_type = _TESTALLTYPES
-_TESTALLTYPES.fields_by_name['optionalgroup'].message_type = _TESTALLTYPES_OPTIONALGROUP
-_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGE
-_TESTALLTYPES.fields_by_name['optional_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['optional_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-_TESTALLTYPES.fields_by_name['optional_public_import_message'].message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['optional_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['repeatedgroup'].message_type = _TESTALLTYPES_REPEATEDGROUP
-_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['repeated_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-_TESTALLTYPES.fields_by_name['repeated_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['default_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['default_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['default_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-_TESTALLTYPES.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES_NESTEDENUM.containing_type = _TESTALLTYPES
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_uint32'])
-_TESTALLTYPES.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_nested_message'])
-_TESTALLTYPES.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_string'])
-_TESTALLTYPES.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_bytes'])
-_TESTALLTYPES.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['lazy_oneof_nested_message'])
-_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTNOARENAMESSAGE.fields_by_name['arena_message'].message_type = google_dot_protobuf_dot_unittest__arena__pb2._ARENAMESSAGE
-DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES
-DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE
-DESCRIPTOR.message_types_by_name['TestNoArenaMessage'] = _TESTNOARENAMESSAGE
-DESCRIPTOR.enum_types_by_name['ForeignEnum'] = _FOREIGNENUM
-
-TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_no_arena_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes.NestedMessage)
-    ))
-  ,
-
-  OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_OPTIONALGROUP,
-    __module__ = 'google.protobuf.unittest_no_arena_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes.OptionalGroup)
-    ))
-  ,
-
-  RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_REPEATEDGROUP,
-    __module__ = 'google.protobuf.unittest_no_arena_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup)
-    ))
-  ,
-  DESCRIPTOR = _TESTALLTYPES,
-  __module__ = 'google.protobuf.unittest_no_arena_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes)
-  ))
-_sym_db.RegisterMessage(TestAllTypes)
-_sym_db.RegisterMessage(TestAllTypes.NestedMessage)
-_sym_db.RegisterMessage(TestAllTypes.OptionalGroup)
-_sym_db.RegisterMessage(TestAllTypes.RepeatedGroup)
-
-ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict(
-  DESCRIPTOR = _FOREIGNMESSAGE,
-  __module__ = 'google.protobuf.unittest_no_arena_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.ForeignMessage)
-  ))
-_sym_db.RegisterMessage(ForeignMessage)
-
-TestNoArenaMessage = _reflection.GeneratedProtocolMessageType('TestNoArenaMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTNOARENAMESSAGE,
-  __module__ = 'google.protobuf.unittest_no_arena_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestNoArenaMessage)
-  ))
-_sym_db.RegisterMessage(TestNoArenaMessage)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('B\rUnittestProtoH\001\200\001\001\210\001\001\220\001\001\370\001\000\242\002\005NOARN'))
-_TESTALLTYPES.fields_by_name['optional_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['optional_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTALLTYPES.fields_by_name['optional_message'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTALLTYPES.fields_by_name['repeated_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['repeated_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTALLTYPES.fields_by_name['repeated_lazy_message'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTALLTYPES.fields_by_name['default_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['default_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['default_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['default_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message'].has_options = True
-_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_no_generic_services_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_no_generic_services_pb2.py
deleted file mode 100644
index d40ba3b..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_no_generic_services_pb2.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_no_generic_services.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_no_generic_services.proto',
-  package='google.protobuf.no_generic_services_test',
-  syntax='proto2',
-  serialized_pb=_b('\n2google/protobuf/unittest_no_generic_services.proto\x12(google.protobuf.no_generic_services_test\"#\n\x0bTestMessage\x12\t\n\x01\x61\x18\x01 \x01(\x05*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02*\x13\n\x08TestEnum\x12\x07\n\x03\x46OO\x10\x01\x32\x82\x01\n\x0bTestService\x12s\n\x03\x46oo\x12\x35.google.protobuf.no_generic_services_test.TestMessage\x1a\x35.google.protobuf.no_generic_services_test.TestMessage:N\n\x0etest_extension\x12\x35.google.protobuf.no_generic_services_test.TestMessage\x18\xe8\x07 \x01(\x05')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_TESTENUM = _descriptor.EnumDescriptor(
-  name='TestEnum',
-  full_name='google.protobuf.no_generic_services_test.TestEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=0, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=133,
-  serialized_end=152,
-)
-_sym_db.RegisterEnumDescriptor(_TESTENUM)
-
-TestEnum = enum_type_wrapper.EnumTypeWrapper(_TESTENUM)
-FOO = 1
-
-TEST_EXTENSION_FIELD_NUMBER = 1000
-test_extension = _descriptor.FieldDescriptor(
-  name='test_extension', full_name='google.protobuf.no_generic_services_test.test_extension', index=0,
-  number=1000, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-
-
-_TESTMESSAGE = _descriptor.Descriptor(
-  name='TestMessage',
-  full_name='google.protobuf.no_generic_services_test.TestMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='google.protobuf.no_generic_services_test.TestMessage.a', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=96,
-  serialized_end=131,
-)
-
-DESCRIPTOR.message_types_by_name['TestMessage'] = _TESTMESSAGE
-DESCRIPTOR.enum_types_by_name['TestEnum'] = _TESTENUM
-DESCRIPTOR.extensions_by_name['test_extension'] = test_extension
-
-TestMessage = _reflection.GeneratedProtocolMessageType('TestMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGE,
-  __module__ = 'google.protobuf.unittest_no_generic_services_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.no_generic_services_test.TestMessage)
-  ))
-_sym_db.RegisterMessage(TestMessage)
-
-TestMessage.RegisterExtension(test_extension)
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_pb2.py
deleted file mode 100644
index d82d56f..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_pb2.py
+++ /dev/null
@@ -1,6099 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import service as _service
-from google.protobuf import service_reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import unittest_import_pb2 as google_dot_protobuf_dot_unittest__import__pb2
-google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google_dot_protobuf_dot_unittest__import__public__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest.proto',
-  package='protobuf_unittest',
-  syntax='proto2',
-  serialized_pb=_b('\n\x1egoogle/protobuf/unittest.proto\x12\x11protobuf_unittest\x1a%google/protobuf/unittest_import.proto\"\xed\x18\n\x0cTestAllTypes\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12\x44\n\roptionalgroup\x18\x10 \x01(\n2-.protobuf_unittest.TestAllTypes.OptionalGroup\x12N\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\x12\x43\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12H\n\x17optional_import_message\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12H\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum\x12=\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x42\n\x14optional_import_enum\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12U\n\x1eoptional_public_import_message\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage\x12P\n\x15optional_lazy_message\x18\x1b \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18  \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12\x44\n\rrepeatedgroup\x18. \x03(\n2-.protobuf_unittest.TestAllTypes.RepeatedGroup\x12N\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\x12\x43\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\x12H\n\x17repeated_import_message\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12H\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum\x12=\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x42\n\x14repeated_import_enum\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12P\n\x15repeated_lazy_message\x18\x39 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x19\n\rdefault_int32\x18= \x01(\x05:\x02\x34\x31\x12\x19\n\rdefault_int64\x18> \x01(\x03:\x02\x34\x32\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint32\x18? \x01(\r:\x02\x34\x33\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint64\x18@ \x01(\x04:\x02\x34\x34\x12\x1b\n\x0e\x64\x65\x66\x61ult_sint32\x18\x41 \x01(\x11:\x03-45\x12\x1a\n\x0e\x64\x65\x66\x61ult_sint64\x18\x42 \x01(\x12:\x02\x34\x36\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed32\x18\x43 \x01(\x07:\x02\x34\x37\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed64\x18\x44 \x01(\x06:\x02\x34\x38\x12\x1c\n\x10\x64\x65\x66\x61ult_sfixed32\x18\x45 \x01(\x0f:\x02\x34\x39\x12\x1d\n\x10\x64\x65\x66\x61ult_sfixed64\x18\x46 \x01(\x10:\x03-50\x12\x1b\n\rdefault_float\x18G \x01(\x02:\x04\x35\x31.5\x12\x1d\n\x0e\x64\x65\x66\x61ult_double\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30\x12\x1a\n\x0c\x64\x65\x66\x61ult_bool\x18I \x01(\x08:\x04true\x12\x1d\n\x0e\x64\x65\x66\x61ult_string\x18J \x01(\t:\x05hello\x12\x1c\n\rdefault_bytes\x18K \x01(\x0c:\x05world\x12L\n\x13\x64\x65\x66\x61ult_nested_enum\x18Q \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:\x03\x42\x41R\x12I\n\x14\x64\x65\x66\x61ult_foreign_enum\x18R \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:\x0b\x46OREIGN_BAR\x12M\n\x13\x64\x65\x66\x61ult_import_enum\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR\x12%\n\x14\x64\x65\x66\x61ult_string_piece\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02\x12\x1d\n\x0c\x64\x65\x66\x61ult_cord\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12M\n\x14oneof_nested_message\x18p \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x1a\x1b\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x11 \x01(\x05\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x61\x18/ \x01(\x05\"9\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\r\n\x0boneof_field\"\xbb\x01\n\x12NestedTestAllTypes\x12\x34\n\x05\x63hild\x18\x01 \x01(\x0b\x32%.protobuf_unittest.NestedTestAllTypes\x12\x30\n\x07payload\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12=\n\x0erepeated_child\x18\x03 \x03(\x0b\x32%.protobuf_unittest.NestedTestAllTypes\"4\n\x14TestDeprecatedFields\x12\x1c\n\x10\x64\x65precated_int32\x18\x01 \x01(\x05\x42\x02\x18\x01\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x01(\x05\"0\n\x12TestReservedFieldsJ\x04\x08\x02\x10\x03J\x04\x08\x0f\x10\x10J\x04\x08\t\x10\x0cR\x03\x62\x61rR\x03\x62\x61z\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"$\n\x17OptionalGroup_extension\x12\t\n\x01\x61\x18\x11 \x01(\x05\"$\n\x17RepeatedGroup_extension\x12\t\n\x01\x61\x18/ \x01(\x05\"\x98\x01\n\x13TestNestedExtension29\n\x04test\x12$.protobuf_unittest.TestAllExtensions\x18\xea\x07 \x01(\t:\x04test2F\n\x17nested_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18\xeb\x07 \x01(\t\"\xd5\x05\n\x0cTestRequired\x12\t\n\x01\x61\x18\x01 \x02(\x05\x12\x0e\n\x06\x64ummy2\x18\x02 \x01(\x05\x12\t\n\x01\x62\x18\x03 \x02(\x05\x12\x0e\n\x06\x64ummy4\x18\x04 \x01(\x05\x12\x0e\n\x06\x64ummy5\x18\x05 \x01(\x05\x12\x0e\n\x06\x64ummy6\x18\x06 \x01(\x05\x12\x0e\n\x06\x64ummy7\x18\x07 \x01(\x05\x12\x0e\n\x06\x64ummy8\x18\x08 \x01(\x05\x12\x0e\n\x06\x64ummy9\x18\t \x01(\x05\x12\x0f\n\x07\x64ummy10\x18\n \x01(\x05\x12\x0f\n\x07\x64ummy11\x18\x0b \x01(\x05\x12\x0f\n\x07\x64ummy12\x18\x0c \x01(\x05\x12\x0f\n\x07\x64ummy13\x18\r \x01(\x05\x12\x0f\n\x07\x64ummy14\x18\x0e \x01(\x05\x12\x0f\n\x07\x64ummy15\x18\x0f \x01(\x05\x12\x0f\n\x07\x64ummy16\x18\x10 \x01(\x05\x12\x0f\n\x07\x64ummy17\x18\x11 \x01(\x05\x12\x0f\n\x07\x64ummy18\x18\x12 \x01(\x05\x12\x0f\n\x07\x64ummy19\x18\x13 \x01(\x05\x12\x0f\n\x07\x64ummy20\x18\x14 \x01(\x05\x12\x0f\n\x07\x64ummy21\x18\x15 \x01(\x05\x12\x0f\n\x07\x64ummy22\x18\x16 \x01(\x05\x12\x0f\n\x07\x64ummy23\x18\x17 \x01(\x05\x12\x0f\n\x07\x64ummy24\x18\x18 \x01(\x05\x12\x0f\n\x07\x64ummy25\x18\x19 \x01(\x05\x12\x0f\n\x07\x64ummy26\x18\x1a \x01(\x05\x12\x0f\n\x07\x64ummy27\x18\x1b \x01(\x05\x12\x0f\n\x07\x64ummy28\x18\x1c \x01(\x05\x12\x0f\n\x07\x64ummy29\x18\x1d \x01(\x05\x12\x0f\n\x07\x64ummy30\x18\x1e \x01(\x05\x12\x0f\n\x07\x64ummy31\x18\x1f \x01(\x05\x12\x0f\n\x07\x64ummy32\x18  \x01(\x05\x12\t\n\x01\x63\x18! \x02(\x05\x32V\n\x06single\x12$.protobuf_unittest.TestAllExtensions\x18\xe8\x07 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired2U\n\x05multi\x12$.protobuf_unittest.TestAllExtensions\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestRequired\"\x9a\x01\n\x13TestRequiredForeign\x12\x39\n\x10optional_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\x39\n\x10repeated_message\x18\x02 \x03(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\r\n\x05\x64ummy\x18\x03 \x01(\x05\"Z\n\x11TestForeignNested\x12\x45\n\x0e\x66oreign_nested\x18\x01 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\"\x12\n\x10TestEmptyMessage\"*\n\x1eTestEmptyMessageWithExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"7\n\x1bTestMultipleExtensionRanges*\x04\x08*\x10+*\x06\x08\xaf \x10\x94!*\n\x08\x80\x80\x04\x10\x80\x80\x80\x80\x02\"4\n\x18TestReallyLargeTagNumber\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x02\x62\x62\x18\xff\xff\xff\x7f \x01(\x05\"U\n\x14TestRecursiveMessage\x12\x32\n\x01\x61\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestRecursiveMessage\x12\t\n\x01i\x18\x02 \x01(\x05\"K\n\x14TestMutualRecursionA\x12\x33\n\x02\x62\x62\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestMutualRecursionB\"b\n\x14TestMutualRecursionB\x12\x32\n\x01\x61\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestMutualRecursionA\x12\x16\n\x0eoptional_int32\x18\x02 \x01(\x05\"\xb3\x01\n\x12TestDupFieldNumber\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\x36\n\x03\x66oo\x18\x02 \x01(\n2).protobuf_unittest.TestDupFieldNumber.Foo\x12\x36\n\x03\x62\x61r\x18\x03 \x01(\n2).protobuf_unittest.TestDupFieldNumber.Bar\x1a\x10\n\x03\x46oo\x12\t\n\x01\x61\x18\x01 \x01(\x05\x1a\x10\n\x03\x42\x61r\x12\t\n\x01\x61\x18\x01 \x01(\x05\"L\n\x10TestEagerMessage\x12\x38\n\x0bsub_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesB\x02(\x00\"K\n\x0fTestLazyMessage\x12\x38\n\x0bsub_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesB\x02(\x01\"\x80\x02\n\x18TestNestedMessageHasBits\x12Z\n\x17optional_nested_message\x18\x01 \x01(\x0b\x32\x39.protobuf_unittest.TestNestedMessageHasBits.NestedMessage\x1a\x87\x01\n\rNestedMessage\x12$\n\x1cnestedmessage_repeated_int32\x18\x01 \x03(\x05\x12P\n%nestedmessage_repeated_foreignmessage\x18\x02 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\"\xe5\x03\n\x17TestCamelCaseFieldNames\x12\x16\n\x0ePrimitiveField\x18\x01 \x01(\x05\x12\x13\n\x0bStringField\x18\x02 \x01(\t\x12\x31\n\tEnumField\x18\x03 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x37\n\x0cMessageField\x18\x04 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12\x1c\n\x10StringPieceField\x18\x05 \x01(\tB\x02\x08\x02\x12\x15\n\tCordField\x18\x06 \x01(\tB\x02\x08\x01\x12\x1e\n\x16RepeatedPrimitiveField\x18\x07 \x03(\x05\x12\x1b\n\x13RepeatedStringField\x18\x08 \x03(\t\x12\x39\n\x11RepeatedEnumField\x18\t \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12?\n\x14RepeatedMessageField\x18\n \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\x12$\n\x18RepeatedStringPieceField\x18\x0b \x03(\tB\x02\x08\x02\x12\x1d\n\x11RepeatedCordField\x18\x0c \x03(\tB\x02\x08\x01\"\xd5\x01\n\x12TestFieldOrderings\x12\x11\n\tmy_string\x18\x0b \x01(\t\x12\x0e\n\x06my_int\x18\x01 \x01(\x03\x12\x10\n\x08my_float\x18\x65 \x01(\x02\x12U\n\x17optional_nested_message\x18\xc8\x01 \x01(\x0b\x32\x33.protobuf_unittest.TestFieldOrderings.NestedMessage\x1a\'\n\rNestedMessage\x12\n\n\x02oo\x18\x02 \x01(\x03\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05*\x04\x08\x02\x10\x0b*\x04\x08\x0c\x10\x65\"\xb6\x07\n\x18TestExtremeDefaultValues\x12?\n\rescaped_bytes\x18\x01 \x01(\x0c:(\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\\"\\376\x12 \n\x0clarge_uint32\x18\x02 \x01(\r:\n4294967295\x12*\n\x0clarge_uint64\x18\x03 \x01(\x04:\x14\x31\x38\x34\x34\x36\x37\x34\x34\x30\x37\x33\x37\x30\x39\x35\x35\x31\x36\x31\x35\x12 \n\x0bsmall_int32\x18\x04 \x01(\x05:\x0b-2147483647\x12)\n\x0bsmall_int64\x18\x05 \x01(\x03:\x14-9223372036854775807\x12\'\n\x12really_small_int32\x18\x15 \x01(\x05:\x0b-2147483648\x12\x30\n\x12really_small_int64\x18\x16 \x01(\x03:\x14-9223372036854775808\x12\x18\n\x0butf8_string\x18\x06 \x01(\t:\x03\xe1\x88\xb4\x12\x15\n\nzero_float\x18\x07 \x01(\x02:\x01\x30\x12\x14\n\tone_float\x18\x08 \x01(\x02:\x01\x31\x12\x18\n\x0bsmall_float\x18\t \x01(\x02:\x03\x31.5\x12\x1e\n\x12negative_one_float\x18\n \x01(\x02:\x02-1\x12\x1c\n\x0enegative_float\x18\x0b \x01(\x02:\x04-1.5\x12\x1a\n\x0blarge_float\x18\x0c \x01(\x02:\x05\x32\x65+08\x12$\n\x14small_negative_float\x18\r \x01(\x02:\x06-8e-28\x12\x17\n\ninf_double\x18\x0e \x01(\x01:\x03inf\x12\x1c\n\x0eneg_inf_double\x18\x0f \x01(\x01:\x04-inf\x12\x17\n\nnan_double\x18\x10 \x01(\x01:\x03nan\x12\x16\n\tinf_float\x18\x11 \x01(\x02:\x03inf\x12\x1b\n\rneg_inf_float\x18\x12 \x01(\x02:\x04-inf\x12\x16\n\tnan_float\x18\x13 \x01(\x02:\x03nan\x12+\n\x0c\x63pp_trigraph\x18\x14 \x01(\t:\x15? ? ?? ?? ??? ??/ ??-\x12 \n\x10string_with_zero\x18\x17 \x01(\t:\x06hel\x00lo\x12\"\n\x0f\x62ytes_with_zero\x18\x18 \x01(\x0c:\twor\\000ld\x12(\n\x16string_piece_with_zero\x18\x19 \x01(\t:\x04\x61\x62\x00\x63\x42\x02\x08\x02\x12 \n\x0e\x63ord_with_zero\x18\x1a \x01(\t:\x04\x31\x32\x00\x33\x42\x02\x08\x01\x12&\n\x12replacement_string\x18\x1b \x01(\t:\n${unknown}\"K\n\x11SparseEnumMessage\x12\x36\n\x0bsparse_enum\x18\x01 \x01(\x0e\x32!.protobuf_unittest.TestSparseEnum\"\x19\n\tOneString\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\t\"\x1a\n\nMoreString\x12\x0c\n\x04\x64\x61ta\x18\x01 \x03(\t\"\x18\n\x08OneBytes\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"\x19\n\tMoreBytes\x12\x0c\n\x04\x64\x61ta\x18\x01 \x03(\x0c\"\x1c\n\x0cInt32Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x05\"\x1d\n\rUint32Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\r\"\x1c\n\x0cInt64Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x03\"\x1d\n\rUint64Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x04\"\x1b\n\x0b\x42oolMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x08\"\xd0\x01\n\tTestOneof\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12\x36\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesH\x00\x12\x39\n\x08\x66oogroup\x18\x04 \x01(\n2%.protobuf_unittest.TestOneof.FooGroupH\x00\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\t\n\x01\x62\x18\x06 \x01(\tB\x05\n\x03\x66oo\"\xe7\x01\n\x1cTestOneofBackwardsCompatible\x12\x0f\n\x07\x66oo_int\x18\x01 \x01(\x05\x12\x12\n\nfoo_string\x18\x02 \x01(\t\x12\x34\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12J\n\x08\x66oogroup\x18\x04 \x01(\n28.protobuf_unittest.TestOneofBackwardsCompatible.FooGroup\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\t\n\x01\x62\x18\x06 \x01(\t\"\x9e\x06\n\nTestOneof2\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12\x16\n\x08\x66oo_cord\x18\x03 \x01(\tB\x02\x08\x01H\x00\x12\x1e\n\x10\x66oo_string_piece\x18\x04 \x01(\tB\x02\x08\x02H\x00\x12\x13\n\tfoo_bytes\x18\x05 \x01(\x0cH\x00\x12<\n\x08\x66oo_enum\x18\x06 \x01(\x0e\x32(.protobuf_unittest.TestOneof2.NestedEnumH\x00\x12\x42\n\x0b\x66oo_message\x18\x07 \x01(\x0b\x32+.protobuf_unittest.TestOneof2.NestedMessageH\x00\x12:\n\x08\x66oogroup\x18\x08 \x01(\n2&.protobuf_unittest.TestOneof2.FooGroupH\x00\x12K\n\x10\x66oo_lazy_message\x18\x0b \x01(\x0b\x32+.protobuf_unittest.TestOneof2.NestedMessageB\x02(\x01H\x00\x12\x14\n\x07\x62\x61r_int\x18\x0c \x01(\x05:\x01\x35H\x01\x12\x1c\n\nbar_string\x18\r \x01(\t:\x06STRINGH\x01\x12\x1c\n\x08\x62\x61r_cord\x18\x0e \x01(\t:\x04\x43ORDB\x02\x08\x01H\x01\x12&\n\x10\x62\x61r_string_piece\x18\x0f \x01(\t:\x06SPIECEB\x02\x08\x02H\x01\x12\x1a\n\tbar_bytes\x18\x10 \x01(\x0c:\x05\x42YTESH\x01\x12\x41\n\x08\x62\x61r_enum\x18\x11 \x01(\x0e\x32(.protobuf_unittest.TestOneof2.NestedEnum:\x03\x42\x41RH\x01\x12\x0f\n\x07\x62\x61z_int\x18\x12 \x01(\x05\x12\x17\n\nbaz_string\x18\x13 \x01(\t:\x03\x42\x41Z\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\t \x01(\x05\x12\t\n\x01\x62\x18\n \x01(\t\x1a\x33\n\rNestedMessage\x12\x0f\n\x07qux_int\x18\x01 \x01(\x03\x12\x11\n\tcorge_int\x18\x02 \x03(\x05\"\'\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x42\x05\n\x03\x66ooB\x05\n\x03\x62\x61r\"\xb8\x01\n\x11TestRequiredOneof\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12I\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x32.protobuf_unittest.TestRequiredOneof.NestedMessageH\x00\x1a(\n\rNestedMessage\x12\x17\n\x0frequired_double\x18\x01 \x02(\x01\x42\x05\n\x03\x66oo\"\xaa\x03\n\x0fTestPackedTypes\x12\x18\n\x0cpacked_int32\x18Z \x03(\x05\x42\x02\x10\x01\x12\x18\n\x0cpacked_int64\x18[ \x03(\x03\x42\x02\x10\x01\x12\x19\n\rpacked_uint32\x18\\ \x03(\rB\x02\x10\x01\x12\x19\n\rpacked_uint64\x18] \x03(\x04\x42\x02\x10\x01\x12\x19\n\rpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x01\x12\x19\n\rpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed32\x18` \x03(\x07\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x01\x12\x18\n\x0cpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x01\x12\x19\n\rpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x01\x12\x37\n\x0bpacked_enum\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x01\"\xc8\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0eunpacked_int32\x18Z \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_int64\x18[ \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0funpacked_uint32\x18\\ \x03(\rB\x02\x10\x00\x12\x1b\n\x0funpacked_uint64\x18] \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed32\x18` \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0funpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x00\x12\x19\n\runpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x00\x12\x39\n\runpacked_enum\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x00\" \n\x14TestPackedExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"\"\n\x16TestUnpackedExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"\x99\x04\n\x15TestDynamicExtensions\x12\x19\n\x10scalar_extension\x18\xd0\x0f \x01(\x07\x12\x37\n\x0e\x65num_extension\x18\xd1\x0f \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12Y\n\x16\x64ynamic_enum_extension\x18\xd2\x0f \x01(\x0e\x32\x38.protobuf_unittest.TestDynamicExtensions.DynamicEnumType\x12=\n\x11message_extension\x18\xd3\x0f \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12_\n\x19\x64ynamic_message_extension\x18\xd4\x0f \x01(\x0b\x32;.protobuf_unittest.TestDynamicExtensions.DynamicMessageType\x12\x1b\n\x12repeated_extension\x18\xd5\x0f \x03(\t\x12\x1d\n\x10packed_extension\x18\xd6\x0f \x03(\x11\x42\x02\x10\x01\x1a,\n\x12\x44ynamicMessageType\x12\x16\n\rdynamic_field\x18\xb4\x10 \x01(\x05\"G\n\x0f\x44ynamicEnumType\x12\x10\n\x0b\x44YNAMIC_FOO\x10\x98\x11\x12\x10\n\x0b\x44YNAMIC_BAR\x10\x99\x11\x12\x10\n\x0b\x44YNAMIC_BAZ\x10\x9a\x11\"\xc0\x01\n#TestRepeatedScalarDifferentTagSizes\x12\x18\n\x10repeated_fixed32\x18\x0c \x03(\x07\x12\x16\n\x0erepeated_int32\x18\r \x03(\x05\x12\x19\n\x10repeated_fixed64\x18\xfe\x0f \x03(\x06\x12\x17\n\x0erepeated_int64\x18\xff\x0f \x03(\x03\x12\x18\n\x0erepeated_float\x18\xfe\xff\x0f \x03(\x02\x12\x19\n\x0frepeated_uint64\x18\xff\xff\x0f \x03(\x04\"\xf7\t\n\x10TestParsingMerge\x12;\n\x12required_all_types\x18\x01 \x02(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12;\n\x12optional_all_types\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12;\n\x12repeated_all_types\x18\x03 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12H\n\roptionalgroup\x18\n \x01(\n21.protobuf_unittest.TestParsingMerge.OptionalGroup\x12H\n\rrepeatedgroup\x18\x14 \x03(\n21.protobuf_unittest.TestParsingMerge.RepeatedGroup\x1a\xaa\x04\n\x17RepeatedFieldsGenerator\x12/\n\x06\x66ield1\x18\x01 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12/\n\x06\x66ield2\x18\x02 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12/\n\x06\x66ield3\x18\x03 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12R\n\x06group1\x18\n \x03(\n2B.protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1\x12R\n\x06group2\x18\x14 \x03(\n2B.protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2\x12.\n\x04\x65xt1\x18\xe8\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12.\n\x04\x65xt2\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1a\x39\n\x06Group1\x12/\n\x06\x66ield1\x18\x0b \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1a\x39\n\x06Group2\x12/\n\x06\x66ield1\x18\x15 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1aR\n\rOptionalGroup\x12\x41\n\x18optional_group_all_types\x18\x0b \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1aR\n\rRepeatedGroup\x12\x41\n\x18repeated_group_all_types\x18\x15 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\x32[\n\x0coptional_ext\x12#.protobuf_unittest.TestParsingMerge\x18\xe8\x07 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes2[\n\x0crepeated_ext\x12#.protobuf_unittest.TestParsingMerge\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\"D\n\x1bTestCommentInjectionMessage\x12%\n\x01\x61\x18\x01 \x01(\t:\x1a*/ <- Neither should this.\"\x0c\n\nFooRequest\"\r\n\x0b\x46ooResponse\"\x12\n\x10\x46ooClientMessage\"\x12\n\x10\x46ooServerMessage\"\x0c\n\nBarRequest\"\r\n\x0b\x42\x61rResponse*@\n\x0b\x46oreignEnum\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x04\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x05\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x06*K\n\x14TestEnumWithDupValue\x12\x08\n\x04\x46OO1\x10\x01\x12\x08\n\x04\x42\x41R1\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x08\n\x04\x46OO2\x10\x01\x12\x08\n\x04\x42\x41R2\x10\x02\x1a\x02\x10\x01*\x89\x01\n\x0eTestSparseEnum\x12\x0c\n\x08SPARSE_A\x10{\x12\x0e\n\x08SPARSE_B\x10\xa6\xe7\x03\x12\x0f\n\x08SPARSE_C\x10\xb2\xb1\x80\x06\x12\x15\n\x08SPARSE_D\x10\xf1\xff\xff\xff\xff\xff\xff\xff\xff\x01\x12\x15\n\x08SPARSE_E\x10\xb4\xde\xfc\xff\xff\xff\xff\xff\xff\x01\x12\x0c\n\x08SPARSE_F\x10\x00\x12\x0c\n\x08SPARSE_G\x10\x02\x32\x99\x01\n\x0bTestService\x12\x44\n\x03\x46oo\x12\x1d.protobuf_unittest.FooRequest\x1a\x1e.protobuf_unittest.FooResponse\x12\x44\n\x03\x42\x61r\x12\x1d.protobuf_unittest.BarRequest\x1a\x1e.protobuf_unittest.BarResponse:F\n\x18optional_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x01 \x01(\x05:F\n\x18optional_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x02 \x01(\x03:G\n\x19optional_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x03 \x01(\r:G\n\x19optional_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x04 \x01(\x04:G\n\x19optional_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x05 \x01(\x11:G\n\x19optional_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x06 \x01(\x12:H\n\x1aoptional_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x07 \x01(\x07:H\n\x1aoptional_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x08 \x01(\x06:I\n\x1boptional_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\t \x01(\x0f:I\n\x1boptional_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\n \x01(\x10:F\n\x18optional_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0b \x01(\x02:G\n\x19optional_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0c \x01(\x01:E\n\x17optional_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18\r \x01(\x08:G\n\x19optional_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0e \x01(\t:F\n\x18optional_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0f \x01(\x0c:q\n\x17optionalgroup_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x10 \x01(\n2*.protobuf_unittest.OptionalGroup_extension:~\n!optional_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x12 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:s\n\"optional_foreign_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x13 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:x\n!optional_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage:x\n\x1eoptional_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x15 \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:m\n\x1foptional_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:r\n\x1eoptional_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:Q\n\x1foptional_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x18 \x01(\tB\x02\x08\x02:I\n\x17optional_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x19 \x01(\tB\x02\x08\x01:\x85\x01\n(optional_public_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage:\x80\x01\n\x1foptional_lazy_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1b \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01:F\n\x18repeated_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1f \x03(\x05:F\n\x18repeated_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18  \x03(\x03:G\n\x19repeated_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18! \x03(\r:G\n\x19repeated_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\" \x03(\x04:G\n\x19repeated_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18# \x03(\x11:G\n\x19repeated_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18$ \x03(\x12:H\n\x1arepeated_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18% \x03(\x07:H\n\x1arepeated_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18& \x03(\x06:I\n\x1brepeated_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\' \x03(\x0f:I\n\x1brepeated_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18( \x03(\x10:F\n\x18repeated_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18) \x03(\x02:G\n\x19repeated_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18* \x03(\x01:E\n\x17repeated_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18+ \x03(\x08:G\n\x19repeated_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18, \x03(\t:F\n\x18repeated_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18- \x03(\x0c:q\n\x17repeatedgroup_extension\x12$.protobuf_unittest.TestAllExtensions\x18. \x03(\n2*.protobuf_unittest.RepeatedGroup_extension:~\n!repeated_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x30 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:s\n\"repeated_foreign_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x31 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage:x\n!repeated_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage:x\n\x1erepeated_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x33 \x03(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:m\n\x1frepeated_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x34 \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:r\n\x1erepeated_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum:Q\n\x1frepeated_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x36 \x03(\tB\x02\x08\x02:I\n\x17repeated_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x37 \x03(\tB\x02\x08\x01:\x80\x01\n\x1frepeated_lazy_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x39 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01:I\n\x17\x64\x65\x66\x61ult_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18= \x01(\x05:\x02\x34\x31:I\n\x17\x64\x65\x66\x61ult_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18> \x01(\x03:\x02\x34\x32:J\n\x18\x64\x65\x66\x61ult_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18? \x01(\r:\x02\x34\x33:J\n\x18\x64\x65\x66\x61ult_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18@ \x01(\x04:\x02\x34\x34:K\n\x18\x64\x65\x66\x61ult_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x41 \x01(\x11:\x03-45:J\n\x18\x64\x65\x66\x61ult_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x42 \x01(\x12:\x02\x34\x36:K\n\x19\x64\x65\x66\x61ult_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x43 \x01(\x07:\x02\x34\x37:K\n\x19\x64\x65\x66\x61ult_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x44 \x01(\x06:\x02\x34\x38:L\n\x1a\x64\x65\x66\x61ult_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x45 \x01(\x0f:\x02\x34\x39:M\n\x1a\x64\x65\x66\x61ult_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x46 \x01(\x10:\x03-50:K\n\x17\x64\x65\x66\x61ult_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18G \x01(\x02:\x04\x35\x31.5:M\n\x18\x64\x65\x66\x61ult_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30:J\n\x16\x64\x65\x66\x61ult_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18I \x01(\x08:\x04true:M\n\x18\x64\x65\x66\x61ult_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18J \x01(\t:\x05hello:L\n\x17\x64\x65\x66\x61ult_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18K \x01(\x0c:\x05world:|\n\x1d\x64\x65\x66\x61ult_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18Q \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:\x03\x42\x41R:y\n\x1e\x64\x65\x66\x61ult_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18R \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:\x0b\x46OREIGN_BAR:}\n\x1d\x64\x65\x66\x61ult_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR:U\n\x1e\x64\x65\x66\x61ult_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02:M\n\x16\x64\x65\x66\x61ult_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01:D\n\x16oneof_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18o \x01(\r:{\n\x1eoneof_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18p \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:D\n\x16oneof_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18q \x01(\t:C\n\x15oneof_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18r \x01(\x0c:B\n\x13my_extension_string\x12%.protobuf_unittest.TestFieldOrderings\x18\x32 \x01(\t:?\n\x10my_extension_int\x12%.protobuf_unittest.TestFieldOrderings\x18\x05 \x01(\x05:K\n\x16packed_int32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18Z \x03(\x05\x42\x02\x10\x01:K\n\x16packed_int64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18[ \x03(\x03\x42\x02\x10\x01:L\n\x17packed_uint32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\\ \x03(\rB\x02\x10\x01:L\n\x17packed_uint64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18] \x03(\x04\x42\x02\x10\x01:L\n\x17packed_sint32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18^ \x03(\x11\x42\x02\x10\x01:L\n\x17packed_sint64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18_ \x03(\x12\x42\x02\x10\x01:M\n\x18packed_fixed32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18` \x03(\x07\x42\x02\x10\x01:M\n\x18packed_fixed64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x61 \x03(\x06\x42\x02\x10\x01:N\n\x19packed_sfixed32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x62 \x03(\x0f\x42\x02\x10\x01:N\n\x19packed_sfixed64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x63 \x03(\x10\x42\x02\x10\x01:K\n\x16packed_float_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x64 \x03(\x02\x42\x02\x10\x01:L\n\x17packed_double_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x65 \x03(\x01\x42\x02\x10\x01:J\n\x15packed_bool_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x66 \x03(\x08\x42\x02\x10\x01:j\n\x15packed_enum_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x01:O\n\x18unpacked_int32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18Z \x03(\x05\x42\x02\x10\x00:O\n\x18unpacked_int64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18[ \x03(\x03\x42\x02\x10\x00:P\n\x19unpacked_uint32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\\ \x03(\rB\x02\x10\x00:P\n\x19unpacked_uint64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18] \x03(\x04\x42\x02\x10\x00:P\n\x19unpacked_sint32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18^ \x03(\x11\x42\x02\x10\x00:P\n\x19unpacked_sint64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18_ \x03(\x12\x42\x02\x10\x00:Q\n\x1aunpacked_fixed32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18` \x03(\x07\x42\x02\x10\x00:Q\n\x1aunpacked_fixed64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x61 \x03(\x06\x42\x02\x10\x00:R\n\x1bunpacked_sfixed32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x62 \x03(\x0f\x42\x02\x10\x00:R\n\x1bunpacked_sfixed64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x63 \x03(\x10\x42\x02\x10\x00:O\n\x18unpacked_float_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x64 \x03(\x02\x42\x02\x10\x00:P\n\x19unpacked_double_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x65 \x03(\x01\x42\x02\x10\x00:N\n\x17unpacked_bool_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x66 \x03(\x08\x42\x02\x10\x00:n\n\x17unpacked_enum_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x00\x42\x1d\x42\rUnittestProtoH\x01\x80\x01\x01\x88\x01\x01\x90\x01\x01\xf8\x01\x01')
-  ,
-  dependencies=[google_dot_protobuf_dot_unittest__import__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_FOREIGNENUM = _descriptor.EnumDescriptor(
-  name='ForeignEnum',
-  full_name='protobuf_unittest.ForeignEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_FOO', index=0, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_BAR', index=1, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_BAZ', index=2, number=6,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=12445,
-  serialized_end=12509,
-)
-_sym_db.RegisterEnumDescriptor(_FOREIGNENUM)
-
-ForeignEnum = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUM)
-_TESTENUMWITHDUPVALUE = _descriptor.EnumDescriptor(
-  name='TestEnumWithDupValue',
-  full_name='protobuf_unittest.TestEnumWithDupValue',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOO1', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR1', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAZ', index=2, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOO2', index=3, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR2', index=4, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\020\001')),
-  serialized_start=12511,
-  serialized_end=12586,
-)
-_sym_db.RegisterEnumDescriptor(_TESTENUMWITHDUPVALUE)
-
-TestEnumWithDupValue = enum_type_wrapper.EnumTypeWrapper(_TESTENUMWITHDUPVALUE)
-_TESTSPARSEENUM = _descriptor.EnumDescriptor(
-  name='TestSparseEnum',
-  full_name='protobuf_unittest.TestSparseEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='SPARSE_A', index=0, number=123,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SPARSE_B', index=1, number=62374,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SPARSE_C', index=2, number=12589234,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SPARSE_D', index=3, number=-15,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SPARSE_E', index=4, number=-53452,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SPARSE_F', index=5, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SPARSE_G', index=6, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=12589,
-  serialized_end=12726,
-)
-_sym_db.RegisterEnumDescriptor(_TESTSPARSEENUM)
-
-TestSparseEnum = enum_type_wrapper.EnumTypeWrapper(_TESTSPARSEENUM)
-FOREIGN_FOO = 4
-FOREIGN_BAR = 5
-FOREIGN_BAZ = 6
-FOO1 = 1
-BAR1 = 2
-BAZ = 3
-FOO2 = 1
-BAR2 = 2
-SPARSE_A = 123
-SPARSE_B = 62374
-SPARSE_C = 12589234
-SPARSE_D = -15
-SPARSE_E = -53452
-SPARSE_F = 0
-SPARSE_G = 2
-
-OPTIONAL_INT32_EXTENSION_FIELD_NUMBER = 1
-optional_int32_extension = _descriptor.FieldDescriptor(
-  name='optional_int32_extension', full_name='protobuf_unittest.optional_int32_extension', index=0,
-  number=1, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_INT64_EXTENSION_FIELD_NUMBER = 2
-optional_int64_extension = _descriptor.FieldDescriptor(
-  name='optional_int64_extension', full_name='protobuf_unittest.optional_int64_extension', index=1,
-  number=2, type=3, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_UINT32_EXTENSION_FIELD_NUMBER = 3
-optional_uint32_extension = _descriptor.FieldDescriptor(
-  name='optional_uint32_extension', full_name='protobuf_unittest.optional_uint32_extension', index=2,
-  number=3, type=13, cpp_type=3, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_UINT64_EXTENSION_FIELD_NUMBER = 4
-optional_uint64_extension = _descriptor.FieldDescriptor(
-  name='optional_uint64_extension', full_name='protobuf_unittest.optional_uint64_extension', index=3,
-  number=4, type=4, cpp_type=4, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_SINT32_EXTENSION_FIELD_NUMBER = 5
-optional_sint32_extension = _descriptor.FieldDescriptor(
-  name='optional_sint32_extension', full_name='protobuf_unittest.optional_sint32_extension', index=4,
-  number=5, type=17, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_SINT64_EXTENSION_FIELD_NUMBER = 6
-optional_sint64_extension = _descriptor.FieldDescriptor(
-  name='optional_sint64_extension', full_name='protobuf_unittest.optional_sint64_extension', index=5,
-  number=6, type=18, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_FIXED32_EXTENSION_FIELD_NUMBER = 7
-optional_fixed32_extension = _descriptor.FieldDescriptor(
-  name='optional_fixed32_extension', full_name='protobuf_unittest.optional_fixed32_extension', index=6,
-  number=7, type=7, cpp_type=3, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_FIXED64_EXTENSION_FIELD_NUMBER = 8
-optional_fixed64_extension = _descriptor.FieldDescriptor(
-  name='optional_fixed64_extension', full_name='protobuf_unittest.optional_fixed64_extension', index=7,
-  number=8, type=6, cpp_type=4, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_SFIXED32_EXTENSION_FIELD_NUMBER = 9
-optional_sfixed32_extension = _descriptor.FieldDescriptor(
-  name='optional_sfixed32_extension', full_name='protobuf_unittest.optional_sfixed32_extension', index=8,
-  number=9, type=15, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_SFIXED64_EXTENSION_FIELD_NUMBER = 10
-optional_sfixed64_extension = _descriptor.FieldDescriptor(
-  name='optional_sfixed64_extension', full_name='protobuf_unittest.optional_sfixed64_extension', index=9,
-  number=10, type=16, cpp_type=2, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_FLOAT_EXTENSION_FIELD_NUMBER = 11
-optional_float_extension = _descriptor.FieldDescriptor(
-  name='optional_float_extension', full_name='protobuf_unittest.optional_float_extension', index=10,
-  number=11, type=2, cpp_type=6, label=1,
-  has_default_value=False, default_value=float(0),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_DOUBLE_EXTENSION_FIELD_NUMBER = 12
-optional_double_extension = _descriptor.FieldDescriptor(
-  name='optional_double_extension', full_name='protobuf_unittest.optional_double_extension', index=11,
-  number=12, type=1, cpp_type=5, label=1,
-  has_default_value=False, default_value=float(0),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_BOOL_EXTENSION_FIELD_NUMBER = 13
-optional_bool_extension = _descriptor.FieldDescriptor(
-  name='optional_bool_extension', full_name='protobuf_unittest.optional_bool_extension', index=12,
-  number=13, type=8, cpp_type=7, label=1,
-  has_default_value=False, default_value=False,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_STRING_EXTENSION_FIELD_NUMBER = 14
-optional_string_extension = _descriptor.FieldDescriptor(
-  name='optional_string_extension', full_name='protobuf_unittest.optional_string_extension', index=13,
-  number=14, type=9, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b("").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_BYTES_EXTENSION_FIELD_NUMBER = 15
-optional_bytes_extension = _descriptor.FieldDescriptor(
-  name='optional_bytes_extension', full_name='protobuf_unittest.optional_bytes_extension', index=14,
-  number=15, type=12, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b(""),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONALGROUP_EXTENSION_FIELD_NUMBER = 16
-optionalgroup_extension = _descriptor.FieldDescriptor(
-  name='optionalgroup_extension', full_name='protobuf_unittest.optionalgroup_extension', index=15,
-  number=16, type=10, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 18
-optional_nested_message_extension = _descriptor.FieldDescriptor(
-  name='optional_nested_message_extension', full_name='protobuf_unittest.optional_nested_message_extension', index=16,
-  number=18, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_FOREIGN_MESSAGE_EXTENSION_FIELD_NUMBER = 19
-optional_foreign_message_extension = _descriptor.FieldDescriptor(
-  name='optional_foreign_message_extension', full_name='protobuf_unittest.optional_foreign_message_extension', index=17,
-  number=19, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 20
-optional_import_message_extension = _descriptor.FieldDescriptor(
-  name='optional_import_message_extension', full_name='protobuf_unittest.optional_import_message_extension', index=18,
-  number=20, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 21
-optional_nested_enum_extension = _descriptor.FieldDescriptor(
-  name='optional_nested_enum_extension', full_name='protobuf_unittest.optional_nested_enum_extension', index=19,
-  number=21, type=14, cpp_type=8, label=1,
-  has_default_value=False, default_value=1,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 22
-optional_foreign_enum_extension = _descriptor.FieldDescriptor(
-  name='optional_foreign_enum_extension', full_name='protobuf_unittest.optional_foreign_enum_extension', index=20,
-  number=22, type=14, cpp_type=8, label=1,
-  has_default_value=False, default_value=4,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 23
-optional_import_enum_extension = _descriptor.FieldDescriptor(
-  name='optional_import_enum_extension', full_name='protobuf_unittest.optional_import_enum_extension', index=21,
-  number=23, type=14, cpp_type=8, label=1,
-  has_default_value=False, default_value=7,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_STRING_PIECE_EXTENSION_FIELD_NUMBER = 24
-optional_string_piece_extension = _descriptor.FieldDescriptor(
-  name='optional_string_piece_extension', full_name='protobuf_unittest.optional_string_piece_extension', index=22,
-  number=24, type=9, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b("").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')))
-OPTIONAL_CORD_EXTENSION_FIELD_NUMBER = 25
-optional_cord_extension = _descriptor.FieldDescriptor(
-  name='optional_cord_extension', full_name='protobuf_unittest.optional_cord_extension', index=23,
-  number=25, type=9, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b("").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')))
-OPTIONAL_PUBLIC_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 26
-optional_public_import_message_extension = _descriptor.FieldDescriptor(
-  name='optional_public_import_message_extension', full_name='protobuf_unittest.optional_public_import_message_extension', index=24,
-  number=26, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-OPTIONAL_LAZY_MESSAGE_EXTENSION_FIELD_NUMBER = 27
-optional_lazy_message_extension = _descriptor.FieldDescriptor(
-  name='optional_lazy_message_extension', full_name='protobuf_unittest.optional_lazy_message_extension', index=25,
-  number=27, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')))
-REPEATED_INT32_EXTENSION_FIELD_NUMBER = 31
-repeated_int32_extension = _descriptor.FieldDescriptor(
-  name='repeated_int32_extension', full_name='protobuf_unittest.repeated_int32_extension', index=26,
-  number=31, type=5, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_INT64_EXTENSION_FIELD_NUMBER = 32
-repeated_int64_extension = _descriptor.FieldDescriptor(
-  name='repeated_int64_extension', full_name='protobuf_unittest.repeated_int64_extension', index=27,
-  number=32, type=3, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_UINT32_EXTENSION_FIELD_NUMBER = 33
-repeated_uint32_extension = _descriptor.FieldDescriptor(
-  name='repeated_uint32_extension', full_name='protobuf_unittest.repeated_uint32_extension', index=28,
-  number=33, type=13, cpp_type=3, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_UINT64_EXTENSION_FIELD_NUMBER = 34
-repeated_uint64_extension = _descriptor.FieldDescriptor(
-  name='repeated_uint64_extension', full_name='protobuf_unittest.repeated_uint64_extension', index=29,
-  number=34, type=4, cpp_type=4, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_SINT32_EXTENSION_FIELD_NUMBER = 35
-repeated_sint32_extension = _descriptor.FieldDescriptor(
-  name='repeated_sint32_extension', full_name='protobuf_unittest.repeated_sint32_extension', index=30,
-  number=35, type=17, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_SINT64_EXTENSION_FIELD_NUMBER = 36
-repeated_sint64_extension = _descriptor.FieldDescriptor(
-  name='repeated_sint64_extension', full_name='protobuf_unittest.repeated_sint64_extension', index=31,
-  number=36, type=18, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_FIXED32_EXTENSION_FIELD_NUMBER = 37
-repeated_fixed32_extension = _descriptor.FieldDescriptor(
-  name='repeated_fixed32_extension', full_name='protobuf_unittest.repeated_fixed32_extension', index=32,
-  number=37, type=7, cpp_type=3, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_FIXED64_EXTENSION_FIELD_NUMBER = 38
-repeated_fixed64_extension = _descriptor.FieldDescriptor(
-  name='repeated_fixed64_extension', full_name='protobuf_unittest.repeated_fixed64_extension', index=33,
-  number=38, type=6, cpp_type=4, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_SFIXED32_EXTENSION_FIELD_NUMBER = 39
-repeated_sfixed32_extension = _descriptor.FieldDescriptor(
-  name='repeated_sfixed32_extension', full_name='protobuf_unittest.repeated_sfixed32_extension', index=34,
-  number=39, type=15, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_SFIXED64_EXTENSION_FIELD_NUMBER = 40
-repeated_sfixed64_extension = _descriptor.FieldDescriptor(
-  name='repeated_sfixed64_extension', full_name='protobuf_unittest.repeated_sfixed64_extension', index=35,
-  number=40, type=16, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_FLOAT_EXTENSION_FIELD_NUMBER = 41
-repeated_float_extension = _descriptor.FieldDescriptor(
-  name='repeated_float_extension', full_name='protobuf_unittest.repeated_float_extension', index=36,
-  number=41, type=2, cpp_type=6, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_DOUBLE_EXTENSION_FIELD_NUMBER = 42
-repeated_double_extension = _descriptor.FieldDescriptor(
-  name='repeated_double_extension', full_name='protobuf_unittest.repeated_double_extension', index=37,
-  number=42, type=1, cpp_type=5, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_BOOL_EXTENSION_FIELD_NUMBER = 43
-repeated_bool_extension = _descriptor.FieldDescriptor(
-  name='repeated_bool_extension', full_name='protobuf_unittest.repeated_bool_extension', index=38,
-  number=43, type=8, cpp_type=7, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_STRING_EXTENSION_FIELD_NUMBER = 44
-repeated_string_extension = _descriptor.FieldDescriptor(
-  name='repeated_string_extension', full_name='protobuf_unittest.repeated_string_extension', index=39,
-  number=44, type=9, cpp_type=9, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_BYTES_EXTENSION_FIELD_NUMBER = 45
-repeated_bytes_extension = _descriptor.FieldDescriptor(
-  name='repeated_bytes_extension', full_name='protobuf_unittest.repeated_bytes_extension', index=40,
-  number=45, type=12, cpp_type=9, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATEDGROUP_EXTENSION_FIELD_NUMBER = 46
-repeatedgroup_extension = _descriptor.FieldDescriptor(
-  name='repeatedgroup_extension', full_name='protobuf_unittest.repeatedgroup_extension', index=41,
-  number=46, type=10, cpp_type=10, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 48
-repeated_nested_message_extension = _descriptor.FieldDescriptor(
-  name='repeated_nested_message_extension', full_name='protobuf_unittest.repeated_nested_message_extension', index=42,
-  number=48, type=11, cpp_type=10, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_FOREIGN_MESSAGE_EXTENSION_FIELD_NUMBER = 49
-repeated_foreign_message_extension = _descriptor.FieldDescriptor(
-  name='repeated_foreign_message_extension', full_name='protobuf_unittest.repeated_foreign_message_extension', index=43,
-  number=49, type=11, cpp_type=10, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 50
-repeated_import_message_extension = _descriptor.FieldDescriptor(
-  name='repeated_import_message_extension', full_name='protobuf_unittest.repeated_import_message_extension', index=44,
-  number=50, type=11, cpp_type=10, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 51
-repeated_nested_enum_extension = _descriptor.FieldDescriptor(
-  name='repeated_nested_enum_extension', full_name='protobuf_unittest.repeated_nested_enum_extension', index=45,
-  number=51, type=14, cpp_type=8, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 52
-repeated_foreign_enum_extension = _descriptor.FieldDescriptor(
-  name='repeated_foreign_enum_extension', full_name='protobuf_unittest.repeated_foreign_enum_extension', index=46,
-  number=52, type=14, cpp_type=8, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 53
-repeated_import_enum_extension = _descriptor.FieldDescriptor(
-  name='repeated_import_enum_extension', full_name='protobuf_unittest.repeated_import_enum_extension', index=47,
-  number=53, type=14, cpp_type=8, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-REPEATED_STRING_PIECE_EXTENSION_FIELD_NUMBER = 54
-repeated_string_piece_extension = _descriptor.FieldDescriptor(
-  name='repeated_string_piece_extension', full_name='protobuf_unittest.repeated_string_piece_extension', index=48,
-  number=54, type=9, cpp_type=9, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')))
-REPEATED_CORD_EXTENSION_FIELD_NUMBER = 55
-repeated_cord_extension = _descriptor.FieldDescriptor(
-  name='repeated_cord_extension', full_name='protobuf_unittest.repeated_cord_extension', index=49,
-  number=55, type=9, cpp_type=9, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')))
-REPEATED_LAZY_MESSAGE_EXTENSION_FIELD_NUMBER = 57
-repeated_lazy_message_extension = _descriptor.FieldDescriptor(
-  name='repeated_lazy_message_extension', full_name='protobuf_unittest.repeated_lazy_message_extension', index=50,
-  number=57, type=11, cpp_type=10, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')))
-DEFAULT_INT32_EXTENSION_FIELD_NUMBER = 61
-default_int32_extension = _descriptor.FieldDescriptor(
-  name='default_int32_extension', full_name='protobuf_unittest.default_int32_extension', index=51,
-  number=61, type=5, cpp_type=1, label=1,
-  has_default_value=True, default_value=41,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_INT64_EXTENSION_FIELD_NUMBER = 62
-default_int64_extension = _descriptor.FieldDescriptor(
-  name='default_int64_extension', full_name='protobuf_unittest.default_int64_extension', index=52,
-  number=62, type=3, cpp_type=2, label=1,
-  has_default_value=True, default_value=42,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_UINT32_EXTENSION_FIELD_NUMBER = 63
-default_uint32_extension = _descriptor.FieldDescriptor(
-  name='default_uint32_extension', full_name='protobuf_unittest.default_uint32_extension', index=53,
-  number=63, type=13, cpp_type=3, label=1,
-  has_default_value=True, default_value=43,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_UINT64_EXTENSION_FIELD_NUMBER = 64
-default_uint64_extension = _descriptor.FieldDescriptor(
-  name='default_uint64_extension', full_name='protobuf_unittest.default_uint64_extension', index=54,
-  number=64, type=4, cpp_type=4, label=1,
-  has_default_value=True, default_value=44,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_SINT32_EXTENSION_FIELD_NUMBER = 65
-default_sint32_extension = _descriptor.FieldDescriptor(
-  name='default_sint32_extension', full_name='protobuf_unittest.default_sint32_extension', index=55,
-  number=65, type=17, cpp_type=1, label=1,
-  has_default_value=True, default_value=-45,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_SINT64_EXTENSION_FIELD_NUMBER = 66
-default_sint64_extension = _descriptor.FieldDescriptor(
-  name='default_sint64_extension', full_name='protobuf_unittest.default_sint64_extension', index=56,
-  number=66, type=18, cpp_type=2, label=1,
-  has_default_value=True, default_value=46,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_FIXED32_EXTENSION_FIELD_NUMBER = 67
-default_fixed32_extension = _descriptor.FieldDescriptor(
-  name='default_fixed32_extension', full_name='protobuf_unittest.default_fixed32_extension', index=57,
-  number=67, type=7, cpp_type=3, label=1,
-  has_default_value=True, default_value=47,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_FIXED64_EXTENSION_FIELD_NUMBER = 68
-default_fixed64_extension = _descriptor.FieldDescriptor(
-  name='default_fixed64_extension', full_name='protobuf_unittest.default_fixed64_extension', index=58,
-  number=68, type=6, cpp_type=4, label=1,
-  has_default_value=True, default_value=48,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_SFIXED32_EXTENSION_FIELD_NUMBER = 69
-default_sfixed32_extension = _descriptor.FieldDescriptor(
-  name='default_sfixed32_extension', full_name='protobuf_unittest.default_sfixed32_extension', index=59,
-  number=69, type=15, cpp_type=1, label=1,
-  has_default_value=True, default_value=49,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_SFIXED64_EXTENSION_FIELD_NUMBER = 70
-default_sfixed64_extension = _descriptor.FieldDescriptor(
-  name='default_sfixed64_extension', full_name='protobuf_unittest.default_sfixed64_extension', index=60,
-  number=70, type=16, cpp_type=2, label=1,
-  has_default_value=True, default_value=-50,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_FLOAT_EXTENSION_FIELD_NUMBER = 71
-default_float_extension = _descriptor.FieldDescriptor(
-  name='default_float_extension', full_name='protobuf_unittest.default_float_extension', index=61,
-  number=71, type=2, cpp_type=6, label=1,
-  has_default_value=True, default_value=float(51.5),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_DOUBLE_EXTENSION_FIELD_NUMBER = 72
-default_double_extension = _descriptor.FieldDescriptor(
-  name='default_double_extension', full_name='protobuf_unittest.default_double_extension', index=62,
-  number=72, type=1, cpp_type=5, label=1,
-  has_default_value=True, default_value=float(52000),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_BOOL_EXTENSION_FIELD_NUMBER = 73
-default_bool_extension = _descriptor.FieldDescriptor(
-  name='default_bool_extension', full_name='protobuf_unittest.default_bool_extension', index=63,
-  number=73, type=8, cpp_type=7, label=1,
-  has_default_value=True, default_value=True,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_STRING_EXTENSION_FIELD_NUMBER = 74
-default_string_extension = _descriptor.FieldDescriptor(
-  name='default_string_extension', full_name='protobuf_unittest.default_string_extension', index=64,
-  number=74, type=9, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("hello").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_BYTES_EXTENSION_FIELD_NUMBER = 75
-default_bytes_extension = _descriptor.FieldDescriptor(
-  name='default_bytes_extension', full_name='protobuf_unittest.default_bytes_extension', index=65,
-  number=75, type=12, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("world"),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 81
-default_nested_enum_extension = _descriptor.FieldDescriptor(
-  name='default_nested_enum_extension', full_name='protobuf_unittest.default_nested_enum_extension', index=66,
-  number=81, type=14, cpp_type=8, label=1,
-  has_default_value=True, default_value=2,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 82
-default_foreign_enum_extension = _descriptor.FieldDescriptor(
-  name='default_foreign_enum_extension', full_name='protobuf_unittest.default_foreign_enum_extension', index=67,
-  number=82, type=14, cpp_type=8, label=1,
-  has_default_value=True, default_value=5,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 83
-default_import_enum_extension = _descriptor.FieldDescriptor(
-  name='default_import_enum_extension', full_name='protobuf_unittest.default_import_enum_extension', index=68,
-  number=83, type=14, cpp_type=8, label=1,
-  has_default_value=True, default_value=8,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-DEFAULT_STRING_PIECE_EXTENSION_FIELD_NUMBER = 84
-default_string_piece_extension = _descriptor.FieldDescriptor(
-  name='default_string_piece_extension', full_name='protobuf_unittest.default_string_piece_extension', index=69,
-  number=84, type=9, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("abc").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')))
-DEFAULT_CORD_EXTENSION_FIELD_NUMBER = 85
-default_cord_extension = _descriptor.FieldDescriptor(
-  name='default_cord_extension', full_name='protobuf_unittest.default_cord_extension', index=70,
-  number=85, type=9, cpp_type=9, label=1,
-  has_default_value=True, default_value=_b("123").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')))
-ONEOF_UINT32_EXTENSION_FIELD_NUMBER = 111
-oneof_uint32_extension = _descriptor.FieldDescriptor(
-  name='oneof_uint32_extension', full_name='protobuf_unittest.oneof_uint32_extension', index=71,
-  number=111, type=13, cpp_type=3, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ONEOF_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 112
-oneof_nested_message_extension = _descriptor.FieldDescriptor(
-  name='oneof_nested_message_extension', full_name='protobuf_unittest.oneof_nested_message_extension', index=72,
-  number=112, type=11, cpp_type=10, label=1,
-  has_default_value=False, default_value=None,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ONEOF_STRING_EXTENSION_FIELD_NUMBER = 113
-oneof_string_extension = _descriptor.FieldDescriptor(
-  name='oneof_string_extension', full_name='protobuf_unittest.oneof_string_extension', index=73,
-  number=113, type=9, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b("").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-ONEOF_BYTES_EXTENSION_FIELD_NUMBER = 114
-oneof_bytes_extension = _descriptor.FieldDescriptor(
-  name='oneof_bytes_extension', full_name='protobuf_unittest.oneof_bytes_extension', index=74,
-  number=114, type=12, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b(""),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-MY_EXTENSION_STRING_FIELD_NUMBER = 50
-my_extension_string = _descriptor.FieldDescriptor(
-  name='my_extension_string', full_name='protobuf_unittest.my_extension_string', index=75,
-  number=50, type=9, cpp_type=9, label=1,
-  has_default_value=False, default_value=_b("").decode('utf-8'),
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-MY_EXTENSION_INT_FIELD_NUMBER = 5
-my_extension_int = _descriptor.FieldDescriptor(
-  name='my_extension_int', full_name='protobuf_unittest.my_extension_int', index=76,
-  number=5, type=5, cpp_type=1, label=1,
-  has_default_value=False, default_value=0,
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=None)
-PACKED_INT32_EXTENSION_FIELD_NUMBER = 90
-packed_int32_extension = _descriptor.FieldDescriptor(
-  name='packed_int32_extension', full_name='protobuf_unittest.packed_int32_extension', index=77,
-  number=90, type=5, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_INT64_EXTENSION_FIELD_NUMBER = 91
-packed_int64_extension = _descriptor.FieldDescriptor(
-  name='packed_int64_extension', full_name='protobuf_unittest.packed_int64_extension', index=78,
-  number=91, type=3, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_UINT32_EXTENSION_FIELD_NUMBER = 92
-packed_uint32_extension = _descriptor.FieldDescriptor(
-  name='packed_uint32_extension', full_name='protobuf_unittest.packed_uint32_extension', index=79,
-  number=92, type=13, cpp_type=3, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_UINT64_EXTENSION_FIELD_NUMBER = 93
-packed_uint64_extension = _descriptor.FieldDescriptor(
-  name='packed_uint64_extension', full_name='protobuf_unittest.packed_uint64_extension', index=80,
-  number=93, type=4, cpp_type=4, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_SINT32_EXTENSION_FIELD_NUMBER = 94
-packed_sint32_extension = _descriptor.FieldDescriptor(
-  name='packed_sint32_extension', full_name='protobuf_unittest.packed_sint32_extension', index=81,
-  number=94, type=17, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_SINT64_EXTENSION_FIELD_NUMBER = 95
-packed_sint64_extension = _descriptor.FieldDescriptor(
-  name='packed_sint64_extension', full_name='protobuf_unittest.packed_sint64_extension', index=82,
-  number=95, type=18, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_FIXED32_EXTENSION_FIELD_NUMBER = 96
-packed_fixed32_extension = _descriptor.FieldDescriptor(
-  name='packed_fixed32_extension', full_name='protobuf_unittest.packed_fixed32_extension', index=83,
-  number=96, type=7, cpp_type=3, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_FIXED64_EXTENSION_FIELD_NUMBER = 97
-packed_fixed64_extension = _descriptor.FieldDescriptor(
-  name='packed_fixed64_extension', full_name='protobuf_unittest.packed_fixed64_extension', index=84,
-  number=97, type=6, cpp_type=4, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_SFIXED32_EXTENSION_FIELD_NUMBER = 98
-packed_sfixed32_extension = _descriptor.FieldDescriptor(
-  name='packed_sfixed32_extension', full_name='protobuf_unittest.packed_sfixed32_extension', index=85,
-  number=98, type=15, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_SFIXED64_EXTENSION_FIELD_NUMBER = 99
-packed_sfixed64_extension = _descriptor.FieldDescriptor(
-  name='packed_sfixed64_extension', full_name='protobuf_unittest.packed_sfixed64_extension', index=86,
-  number=99, type=16, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_FLOAT_EXTENSION_FIELD_NUMBER = 100
-packed_float_extension = _descriptor.FieldDescriptor(
-  name='packed_float_extension', full_name='protobuf_unittest.packed_float_extension', index=87,
-  number=100, type=2, cpp_type=6, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_DOUBLE_EXTENSION_FIELD_NUMBER = 101
-packed_double_extension = _descriptor.FieldDescriptor(
-  name='packed_double_extension', full_name='protobuf_unittest.packed_double_extension', index=88,
-  number=101, type=1, cpp_type=5, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_BOOL_EXTENSION_FIELD_NUMBER = 102
-packed_bool_extension = _descriptor.FieldDescriptor(
-  name='packed_bool_extension', full_name='protobuf_unittest.packed_bool_extension', index=89,
-  number=102, type=8, cpp_type=7, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-PACKED_ENUM_EXTENSION_FIELD_NUMBER = 103
-packed_enum_extension = _descriptor.FieldDescriptor(
-  name='packed_enum_extension', full_name='protobuf_unittest.packed_enum_extension', index=90,
-  number=103, type=14, cpp_type=8, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')))
-UNPACKED_INT32_EXTENSION_FIELD_NUMBER = 90
-unpacked_int32_extension = _descriptor.FieldDescriptor(
-  name='unpacked_int32_extension', full_name='protobuf_unittest.unpacked_int32_extension', index=91,
-  number=90, type=5, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_INT64_EXTENSION_FIELD_NUMBER = 91
-unpacked_int64_extension = _descriptor.FieldDescriptor(
-  name='unpacked_int64_extension', full_name='protobuf_unittest.unpacked_int64_extension', index=92,
-  number=91, type=3, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_UINT32_EXTENSION_FIELD_NUMBER = 92
-unpacked_uint32_extension = _descriptor.FieldDescriptor(
-  name='unpacked_uint32_extension', full_name='protobuf_unittest.unpacked_uint32_extension', index=93,
-  number=92, type=13, cpp_type=3, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_UINT64_EXTENSION_FIELD_NUMBER = 93
-unpacked_uint64_extension = _descriptor.FieldDescriptor(
-  name='unpacked_uint64_extension', full_name='protobuf_unittest.unpacked_uint64_extension', index=94,
-  number=93, type=4, cpp_type=4, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_SINT32_EXTENSION_FIELD_NUMBER = 94
-unpacked_sint32_extension = _descriptor.FieldDescriptor(
-  name='unpacked_sint32_extension', full_name='protobuf_unittest.unpacked_sint32_extension', index=95,
-  number=94, type=17, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_SINT64_EXTENSION_FIELD_NUMBER = 95
-unpacked_sint64_extension = _descriptor.FieldDescriptor(
-  name='unpacked_sint64_extension', full_name='protobuf_unittest.unpacked_sint64_extension', index=96,
-  number=95, type=18, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_FIXED32_EXTENSION_FIELD_NUMBER = 96
-unpacked_fixed32_extension = _descriptor.FieldDescriptor(
-  name='unpacked_fixed32_extension', full_name='protobuf_unittest.unpacked_fixed32_extension', index=97,
-  number=96, type=7, cpp_type=3, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_FIXED64_EXTENSION_FIELD_NUMBER = 97
-unpacked_fixed64_extension = _descriptor.FieldDescriptor(
-  name='unpacked_fixed64_extension', full_name='protobuf_unittest.unpacked_fixed64_extension', index=98,
-  number=97, type=6, cpp_type=4, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_SFIXED32_EXTENSION_FIELD_NUMBER = 98
-unpacked_sfixed32_extension = _descriptor.FieldDescriptor(
-  name='unpacked_sfixed32_extension', full_name='protobuf_unittest.unpacked_sfixed32_extension', index=99,
-  number=98, type=15, cpp_type=1, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_SFIXED64_EXTENSION_FIELD_NUMBER = 99
-unpacked_sfixed64_extension = _descriptor.FieldDescriptor(
-  name='unpacked_sfixed64_extension', full_name='protobuf_unittest.unpacked_sfixed64_extension', index=100,
-  number=99, type=16, cpp_type=2, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_FLOAT_EXTENSION_FIELD_NUMBER = 100
-unpacked_float_extension = _descriptor.FieldDescriptor(
-  name='unpacked_float_extension', full_name='protobuf_unittest.unpacked_float_extension', index=101,
-  number=100, type=2, cpp_type=6, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_DOUBLE_EXTENSION_FIELD_NUMBER = 101
-unpacked_double_extension = _descriptor.FieldDescriptor(
-  name='unpacked_double_extension', full_name='protobuf_unittest.unpacked_double_extension', index=102,
-  number=101, type=1, cpp_type=5, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_BOOL_EXTENSION_FIELD_NUMBER = 102
-unpacked_bool_extension = _descriptor.FieldDescriptor(
-  name='unpacked_bool_extension', full_name='protobuf_unittest.unpacked_bool_extension', index=103,
-  number=102, type=8, cpp_type=7, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-UNPACKED_ENUM_EXTENSION_FIELD_NUMBER = 103
-unpacked_enum_extension = _descriptor.FieldDescriptor(
-  name='unpacked_enum_extension', full_name='protobuf_unittest.unpacked_enum_extension', index=104,
-  number=103, type=14, cpp_type=8, label=3,
-  has_default_value=False, default_value=[],
-  message_type=None, enum_type=None, containing_type=None,
-  is_extension=True, extension_scope=None,
-  options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')))
-
-_TESTALLTYPES_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='protobuf_unittest.TestAllTypes.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAZ', index=2, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NEG', index=3, number=-1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3202,
-  serialized_end=3259,
-)
-_sym_db.RegisterEnumDescriptor(_TESTALLTYPES_NESTEDENUM)
-
-_TESTONEOF2_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='protobuf_unittest.TestOneof2.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAZ', index=2, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3202,
-  serialized_end=3241,
-)
-_sym_db.RegisterEnumDescriptor(_TESTONEOF2_NESTEDENUM)
-
-_TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE = _descriptor.EnumDescriptor(
-  name='DynamicEnumType',
-  full_name='protobuf_unittest.TestDynamicExtensions.DynamicEnumType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='DYNAMIC_FOO', index=0, number=2200,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DYNAMIC_BAR', index=1, number=2201,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DYNAMIC_BAZ', index=2, number=2202,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=10735,
-  serialized_end=10806,
-)
-_sym_db.RegisterEnumDescriptor(_TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE)
-
-
-_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='protobuf_unittest.TestAllTypes.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bb', full_name='protobuf_unittest.TestAllTypes.NestedMessage.bb', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3117,
-  serialized_end=3144,
-)
-
-_TESTALLTYPES_OPTIONALGROUP = _descriptor.Descriptor(
-  name='OptionalGroup',
-  full_name='protobuf_unittest.TestAllTypes.OptionalGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestAllTypes.OptionalGroup.a', index=0,
-      number=17, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3146,
-  serialized_end=3172,
-)
-
-_TESTALLTYPES_REPEATEDGROUP = _descriptor.Descriptor(
-  name='RepeatedGroup',
-  full_name='protobuf_unittest.TestAllTypes.RepeatedGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestAllTypes.RepeatedGroup.a', index=0,
-      number=47, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3174,
-  serialized_end=3200,
-)
-
-_TESTALLTYPES = _descriptor.Descriptor(
-  name='TestAllTypes',
-  full_name='protobuf_unittest.TestAllTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_int32', full_name='protobuf_unittest.TestAllTypes.optional_int32', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_int64', full_name='protobuf_unittest.TestAllTypes.optional_int64', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_uint32', full_name='protobuf_unittest.TestAllTypes.optional_uint32', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_uint64', full_name='protobuf_unittest.TestAllTypes.optional_uint64', index=3,
-      number=4, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sint32', full_name='protobuf_unittest.TestAllTypes.optional_sint32', index=4,
-      number=5, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sint64', full_name='protobuf_unittest.TestAllTypes.optional_sint64', index=5,
-      number=6, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_fixed32', full_name='protobuf_unittest.TestAllTypes.optional_fixed32', index=6,
-      number=7, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_fixed64', full_name='protobuf_unittest.TestAllTypes.optional_fixed64', index=7,
-      number=8, type=6, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sfixed32', full_name='protobuf_unittest.TestAllTypes.optional_sfixed32', index=8,
-      number=9, type=15, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sfixed64', full_name='protobuf_unittest.TestAllTypes.optional_sfixed64', index=9,
-      number=10, type=16, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_float', full_name='protobuf_unittest.TestAllTypes.optional_float', index=10,
-      number=11, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_double', full_name='protobuf_unittest.TestAllTypes.optional_double', index=11,
-      number=12, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_bool', full_name='protobuf_unittest.TestAllTypes.optional_bool', index=12,
-      number=13, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_string', full_name='protobuf_unittest.TestAllTypes.optional_string', index=13,
-      number=14, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_bytes', full_name='protobuf_unittest.TestAllTypes.optional_bytes', index=14,
-      number=15, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optionalgroup', full_name='protobuf_unittest.TestAllTypes.optionalgroup', index=15,
-      number=16, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_message', full_name='protobuf_unittest.TestAllTypes.optional_nested_message', index=16,
-      number=18, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_foreign_message', full_name='protobuf_unittest.TestAllTypes.optional_foreign_message', index=17,
-      number=19, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_import_message', full_name='protobuf_unittest.TestAllTypes.optional_import_message', index=18,
-      number=20, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_enum', full_name='protobuf_unittest.TestAllTypes.optional_nested_enum', index=19,
-      number=21, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_foreign_enum', full_name='protobuf_unittest.TestAllTypes.optional_foreign_enum', index=20,
-      number=22, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=4,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_import_enum', full_name='protobuf_unittest.TestAllTypes.optional_import_enum', index=21,
-      number=23, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=7,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_string_piece', full_name='protobuf_unittest.TestAllTypes.optional_string_piece', index=22,
-      number=24, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='optional_cord', full_name='protobuf_unittest.TestAllTypes.optional_cord', index=23,
-      number=25, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='optional_public_import_message', full_name='protobuf_unittest.TestAllTypes.optional_public_import_message', index=24,
-      number=26, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_lazy_message', full_name='protobuf_unittest.TestAllTypes.optional_lazy_message', index=25,
-      number=27, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_int32', full_name='protobuf_unittest.TestAllTypes.repeated_int32', index=26,
-      number=31, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64', full_name='protobuf_unittest.TestAllTypes.repeated_int64', index=27,
-      number=32, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32', full_name='protobuf_unittest.TestAllTypes.repeated_uint32', index=28,
-      number=33, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64', full_name='protobuf_unittest.TestAllTypes.repeated_uint64', index=29,
-      number=34, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint32', full_name='protobuf_unittest.TestAllTypes.repeated_sint32', index=30,
-      number=35, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint64', full_name='protobuf_unittest.TestAllTypes.repeated_sint64', index=31,
-      number=36, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed32', full_name='protobuf_unittest.TestAllTypes.repeated_fixed32', index=32,
-      number=37, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed64', full_name='protobuf_unittest.TestAllTypes.repeated_fixed64', index=33,
-      number=38, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed32', full_name='protobuf_unittest.TestAllTypes.repeated_sfixed32', index=34,
-      number=39, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed64', full_name='protobuf_unittest.TestAllTypes.repeated_sfixed64', index=35,
-      number=40, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_float', full_name='protobuf_unittest.TestAllTypes.repeated_float', index=36,
-      number=41, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_double', full_name='protobuf_unittest.TestAllTypes.repeated_double', index=37,
-      number=42, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool', full_name='protobuf_unittest.TestAllTypes.repeated_bool', index=38,
-      number=43, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string', full_name='protobuf_unittest.TestAllTypes.repeated_string', index=39,
-      number=44, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bytes', full_name='protobuf_unittest.TestAllTypes.repeated_bytes', index=40,
-      number=45, type=12, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeatedgroup', full_name='protobuf_unittest.TestAllTypes.repeatedgroup', index=41,
-      number=46, type=10, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_message', full_name='protobuf_unittest.TestAllTypes.repeated_nested_message', index=42,
-      number=48, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_foreign_message', full_name='protobuf_unittest.TestAllTypes.repeated_foreign_message', index=43,
-      number=49, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_import_message', full_name='protobuf_unittest.TestAllTypes.repeated_import_message', index=44,
-      number=50, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='protobuf_unittest.TestAllTypes.repeated_nested_enum', index=45,
-      number=51, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_foreign_enum', full_name='protobuf_unittest.TestAllTypes.repeated_foreign_enum', index=46,
-      number=52, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_import_enum', full_name='protobuf_unittest.TestAllTypes.repeated_import_enum', index=47,
-      number=53, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string_piece', full_name='protobuf_unittest.TestAllTypes.repeated_string_piece', index=48,
-      number=54, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_cord', full_name='protobuf_unittest.TestAllTypes.repeated_cord', index=49,
-      number=55, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_lazy_message', full_name='protobuf_unittest.TestAllTypes.repeated_lazy_message', index=50,
-      number=57, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-    _descriptor.FieldDescriptor(
-      name='default_int32', full_name='protobuf_unittest.TestAllTypes.default_int32', index=51,
-      number=61, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=41,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_int64', full_name='protobuf_unittest.TestAllTypes.default_int64', index=52,
-      number=62, type=3, cpp_type=2, label=1,
-      has_default_value=True, default_value=42,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_uint32', full_name='protobuf_unittest.TestAllTypes.default_uint32', index=53,
-      number=63, type=13, cpp_type=3, label=1,
-      has_default_value=True, default_value=43,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_uint64', full_name='protobuf_unittest.TestAllTypes.default_uint64', index=54,
-      number=64, type=4, cpp_type=4, label=1,
-      has_default_value=True, default_value=44,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sint32', full_name='protobuf_unittest.TestAllTypes.default_sint32', index=55,
-      number=65, type=17, cpp_type=1, label=1,
-      has_default_value=True, default_value=-45,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sint64', full_name='protobuf_unittest.TestAllTypes.default_sint64', index=56,
-      number=66, type=18, cpp_type=2, label=1,
-      has_default_value=True, default_value=46,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_fixed32', full_name='protobuf_unittest.TestAllTypes.default_fixed32', index=57,
-      number=67, type=7, cpp_type=3, label=1,
-      has_default_value=True, default_value=47,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_fixed64', full_name='protobuf_unittest.TestAllTypes.default_fixed64', index=58,
-      number=68, type=6, cpp_type=4, label=1,
-      has_default_value=True, default_value=48,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sfixed32', full_name='protobuf_unittest.TestAllTypes.default_sfixed32', index=59,
-      number=69, type=15, cpp_type=1, label=1,
-      has_default_value=True, default_value=49,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_sfixed64', full_name='protobuf_unittest.TestAllTypes.default_sfixed64', index=60,
-      number=70, type=16, cpp_type=2, label=1,
-      has_default_value=True, default_value=-50,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_float', full_name='protobuf_unittest.TestAllTypes.default_float', index=61,
-      number=71, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(51.5),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_double', full_name='protobuf_unittest.TestAllTypes.default_double', index=62,
-      number=72, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=float(52000),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_bool', full_name='protobuf_unittest.TestAllTypes.default_bool', index=63,
-      number=73, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=True,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_string', full_name='protobuf_unittest.TestAllTypes.default_string', index=64,
-      number=74, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("hello").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_bytes', full_name='protobuf_unittest.TestAllTypes.default_bytes', index=65,
-      number=75, type=12, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("world"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_nested_enum', full_name='protobuf_unittest.TestAllTypes.default_nested_enum', index=66,
-      number=81, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=2,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_foreign_enum', full_name='protobuf_unittest.TestAllTypes.default_foreign_enum', index=67,
-      number=82, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=5,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_import_enum', full_name='protobuf_unittest.TestAllTypes.default_import_enum', index=68,
-      number=83, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=8,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='default_string_piece', full_name='protobuf_unittest.TestAllTypes.default_string_piece', index=69,
-      number=84, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("abc").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='default_cord', full_name='protobuf_unittest.TestAllTypes.default_cord', index=70,
-      number=85, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("123").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='oneof_uint32', full_name='protobuf_unittest.TestAllTypes.oneof_uint32', index=71,
-      number=111, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_nested_message', full_name='protobuf_unittest.TestAllTypes.oneof_nested_message', index=72,
-      number=112, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_string', full_name='protobuf_unittest.TestAllTypes.oneof_string', index=73,
-      number=113, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_bytes', full_name='protobuf_unittest.TestAllTypes.oneof_bytes', index=74,
-      number=114, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTALLTYPES_NESTEDMESSAGE, _TESTALLTYPES_OPTIONALGROUP, _TESTALLTYPES_REPEATEDGROUP, ],
-  enum_types=[
-    _TESTALLTYPES_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='oneof_field', full_name='protobuf_unittest.TestAllTypes.oneof_field',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=93,
-  serialized_end=3274,
-)
-
-
-_NESTEDTESTALLTYPES = _descriptor.Descriptor(
-  name='NestedTestAllTypes',
-  full_name='protobuf_unittest.NestedTestAllTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='child', full_name='protobuf_unittest.NestedTestAllTypes.child', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='payload', full_name='protobuf_unittest.NestedTestAllTypes.payload', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_child', full_name='protobuf_unittest.NestedTestAllTypes.repeated_child', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3277,
-  serialized_end=3464,
-)
-
-
-_TESTDEPRECATEDFIELDS = _descriptor.Descriptor(
-  name='TestDeprecatedFields',
-  full_name='protobuf_unittest.TestDeprecatedFields',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='deprecated_int32', full_name='protobuf_unittest.TestDeprecatedFields.deprecated_int32', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3466,
-  serialized_end=3518,
-)
-
-
-_FOREIGNMESSAGE = _descriptor.Descriptor(
-  name='ForeignMessage',
-  full_name='protobuf_unittest.ForeignMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='c', full_name='protobuf_unittest.ForeignMessage.c', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='d', full_name='protobuf_unittest.ForeignMessage.d', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3520,
-  serialized_end=3558,
-)
-
-
-_TESTRESERVEDFIELDS = _descriptor.Descriptor(
-  name='TestReservedFields',
-  full_name='protobuf_unittest.TestReservedFields',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3560,
-  serialized_end=3608,
-)
-
-
-_TESTALLEXTENSIONS = _descriptor.Descriptor(
-  name='TestAllExtensions',
-  full_name='protobuf_unittest.TestAllExtensions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=3610,
-  serialized_end=3639,
-)
-
-
-_OPTIONALGROUP_EXTENSION = _descriptor.Descriptor(
-  name='OptionalGroup_extension',
-  full_name='protobuf_unittest.OptionalGroup_extension',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.OptionalGroup_extension.a', index=0,
-      number=17, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3641,
-  serialized_end=3677,
-)
-
-
-_REPEATEDGROUP_EXTENSION = _descriptor.Descriptor(
-  name='RepeatedGroup_extension',
-  full_name='protobuf_unittest.RepeatedGroup_extension',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.RepeatedGroup_extension.a', index=0,
-      number=47, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3679,
-  serialized_end=3715,
-)
-
-
-_TESTNESTEDEXTENSION = _descriptor.Descriptor(
-  name='TestNestedExtension',
-  full_name='protobuf_unittest.TestNestedExtension',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='test', full_name='protobuf_unittest.TestNestedExtension.test', index=0,
-      number=1002, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("test").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nested_string_extension', full_name='protobuf_unittest.TestNestedExtension.nested_string_extension', index=1,
-      number=1003, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3718,
-  serialized_end=3870,
-)
-
-
-_TESTREQUIRED = _descriptor.Descriptor(
-  name='TestRequired',
-  full_name='protobuf_unittest.TestRequired',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestRequired.a', index=0,
-      number=1, type=5, cpp_type=1, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy2', full_name='protobuf_unittest.TestRequired.dummy2', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='b', full_name='protobuf_unittest.TestRequired.b', index=2,
-      number=3, type=5, cpp_type=1, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy4', full_name='protobuf_unittest.TestRequired.dummy4', index=3,
-      number=4, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy5', full_name='protobuf_unittest.TestRequired.dummy5', index=4,
-      number=5, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy6', full_name='protobuf_unittest.TestRequired.dummy6', index=5,
-      number=6, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy7', full_name='protobuf_unittest.TestRequired.dummy7', index=6,
-      number=7, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy8', full_name='protobuf_unittest.TestRequired.dummy8', index=7,
-      number=8, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy9', full_name='protobuf_unittest.TestRequired.dummy9', index=8,
-      number=9, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy10', full_name='protobuf_unittest.TestRequired.dummy10', index=9,
-      number=10, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy11', full_name='protobuf_unittest.TestRequired.dummy11', index=10,
-      number=11, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy12', full_name='protobuf_unittest.TestRequired.dummy12', index=11,
-      number=12, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy13', full_name='protobuf_unittest.TestRequired.dummy13', index=12,
-      number=13, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy14', full_name='protobuf_unittest.TestRequired.dummy14', index=13,
-      number=14, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy15', full_name='protobuf_unittest.TestRequired.dummy15', index=14,
-      number=15, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy16', full_name='protobuf_unittest.TestRequired.dummy16', index=15,
-      number=16, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy17', full_name='protobuf_unittest.TestRequired.dummy17', index=16,
-      number=17, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy18', full_name='protobuf_unittest.TestRequired.dummy18', index=17,
-      number=18, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy19', full_name='protobuf_unittest.TestRequired.dummy19', index=18,
-      number=19, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy20', full_name='protobuf_unittest.TestRequired.dummy20', index=19,
-      number=20, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy21', full_name='protobuf_unittest.TestRequired.dummy21', index=20,
-      number=21, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy22', full_name='protobuf_unittest.TestRequired.dummy22', index=21,
-      number=22, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy23', full_name='protobuf_unittest.TestRequired.dummy23', index=22,
-      number=23, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy24', full_name='protobuf_unittest.TestRequired.dummy24', index=23,
-      number=24, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy25', full_name='protobuf_unittest.TestRequired.dummy25', index=24,
-      number=25, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy26', full_name='protobuf_unittest.TestRequired.dummy26', index=25,
-      number=26, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy27', full_name='protobuf_unittest.TestRequired.dummy27', index=26,
-      number=27, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy28', full_name='protobuf_unittest.TestRequired.dummy28', index=27,
-      number=28, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy29', full_name='protobuf_unittest.TestRequired.dummy29', index=28,
-      number=29, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy30', full_name='protobuf_unittest.TestRequired.dummy30', index=29,
-      number=30, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy31', full_name='protobuf_unittest.TestRequired.dummy31', index=30,
-      number=31, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy32', full_name='protobuf_unittest.TestRequired.dummy32', index=31,
-      number=32, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='c', full_name='protobuf_unittest.TestRequired.c', index=32,
-      number=33, type=5, cpp_type=1, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='single', full_name='protobuf_unittest.TestRequired.single', index=0,
-      number=1000, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='multi', full_name='protobuf_unittest.TestRequired.multi', index=1,
-      number=1001, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3873,
-  serialized_end=4598,
-)
-
-
-_TESTREQUIREDFOREIGN = _descriptor.Descriptor(
-  name='TestRequiredForeign',
-  full_name='protobuf_unittest.TestRequiredForeign',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_message', full_name='protobuf_unittest.TestRequiredForeign.optional_message', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_message', full_name='protobuf_unittest.TestRequiredForeign.repeated_message', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dummy', full_name='protobuf_unittest.TestRequiredForeign.dummy', index=2,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4601,
-  serialized_end=4755,
-)
-
-
-_TESTFOREIGNNESTED = _descriptor.Descriptor(
-  name='TestForeignNested',
-  full_name='protobuf_unittest.TestForeignNested',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foreign_nested', full_name='protobuf_unittest.TestForeignNested.foreign_nested', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4757,
-  serialized_end=4847,
-)
-
-
-_TESTEMPTYMESSAGE = _descriptor.Descriptor(
-  name='TestEmptyMessage',
-  full_name='protobuf_unittest.TestEmptyMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4849,
-  serialized_end=4867,
-)
-
-
-_TESTEMPTYMESSAGEWITHEXTENSIONS = _descriptor.Descriptor(
-  name='TestEmptyMessageWithExtensions',
-  full_name='protobuf_unittest.TestEmptyMessageWithExtensions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=4869,
-  serialized_end=4911,
-)
-
-
-_TESTMULTIPLEEXTENSIONRANGES = _descriptor.Descriptor(
-  name='TestMultipleExtensionRanges',
-  full_name='protobuf_unittest.TestMultipleExtensionRanges',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(42, 43), (4143, 4244), (65536, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=4913,
-  serialized_end=4968,
-)
-
-
-_TESTREALLYLARGETAGNUMBER = _descriptor.Descriptor(
-  name='TestReallyLargeTagNumber',
-  full_name='protobuf_unittest.TestReallyLargeTagNumber',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestReallyLargeTagNumber.a', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bb', full_name='protobuf_unittest.TestReallyLargeTagNumber.bb', index=1,
-      number=268435455, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4970,
-  serialized_end=5022,
-)
-
-
-_TESTRECURSIVEMESSAGE = _descriptor.Descriptor(
-  name='TestRecursiveMessage',
-  full_name='protobuf_unittest.TestRecursiveMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestRecursiveMessage.a', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='i', full_name='protobuf_unittest.TestRecursiveMessage.i', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5024,
-  serialized_end=5109,
-)
-
-
-_TESTMUTUALRECURSIONA = _descriptor.Descriptor(
-  name='TestMutualRecursionA',
-  full_name='protobuf_unittest.TestMutualRecursionA',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bb', full_name='protobuf_unittest.TestMutualRecursionA.bb', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5111,
-  serialized_end=5186,
-)
-
-
-_TESTMUTUALRECURSIONB = _descriptor.Descriptor(
-  name='TestMutualRecursionB',
-  full_name='protobuf_unittest.TestMutualRecursionB',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestMutualRecursionB.a', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_int32', full_name='protobuf_unittest.TestMutualRecursionB.optional_int32', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5188,
-  serialized_end=5286,
-)
-
-
-_TESTDUPFIELDNUMBER_FOO = _descriptor.Descriptor(
-  name='Foo',
-  full_name='protobuf_unittest.TestDupFieldNumber.Foo',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestDupFieldNumber.Foo.a', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5434,
-  serialized_end=5450,
-)
-
-_TESTDUPFIELDNUMBER_BAR = _descriptor.Descriptor(
-  name='Bar',
-  full_name='protobuf_unittest.TestDupFieldNumber.Bar',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestDupFieldNumber.Bar.a', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5452,
-  serialized_end=5468,
-)
-
-_TESTDUPFIELDNUMBER = _descriptor.Descriptor(
-  name='TestDupFieldNumber',
-  full_name='protobuf_unittest.TestDupFieldNumber',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestDupFieldNumber.a', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo', full_name='protobuf_unittest.TestDupFieldNumber.foo', index=1,
-      number=2, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bar', full_name='protobuf_unittest.TestDupFieldNumber.bar', index=2,
-      number=3, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTDUPFIELDNUMBER_FOO, _TESTDUPFIELDNUMBER_BAR, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5289,
-  serialized_end=5468,
-)
-
-
-_TESTEAGERMESSAGE = _descriptor.Descriptor(
-  name='TestEagerMessage',
-  full_name='protobuf_unittest.TestEagerMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='sub_message', full_name='protobuf_unittest.TestEagerMessage.sub_message', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\000'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5470,
-  serialized_end=5546,
-)
-
-
-_TESTLAZYMESSAGE = _descriptor.Descriptor(
-  name='TestLazyMessage',
-  full_name='protobuf_unittest.TestLazyMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='sub_message', full_name='protobuf_unittest.TestLazyMessage.sub_message', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5548,
-  serialized_end=5623,
-)
-
-
-_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='nestedmessage_repeated_int32', full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage.nestedmessage_repeated_int32', index=0,
-      number=1, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nestedmessage_repeated_foreignmessage', full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage.nestedmessage_repeated_foreignmessage', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5747,
-  serialized_end=5882,
-)
-
-_TESTNESTEDMESSAGEHASBITS = _descriptor.Descriptor(
-  name='TestNestedMessageHasBits',
-  full_name='protobuf_unittest.TestNestedMessageHasBits',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_nested_message', full_name='protobuf_unittest.TestNestedMessageHasBits.optional_nested_message', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5626,
-  serialized_end=5882,
-)
-
-
-_TESTCAMELCASEFIELDNAMES = _descriptor.Descriptor(
-  name='TestCamelCaseFieldNames',
-  full_name='protobuf_unittest.TestCamelCaseFieldNames',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='PrimitiveField', full_name='protobuf_unittest.TestCamelCaseFieldNames.PrimitiveField', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='StringField', full_name='protobuf_unittest.TestCamelCaseFieldNames.StringField', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='EnumField', full_name='protobuf_unittest.TestCamelCaseFieldNames.EnumField', index=2,
-      number=3, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=4,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='MessageField', full_name='protobuf_unittest.TestCamelCaseFieldNames.MessageField', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='StringPieceField', full_name='protobuf_unittest.TestCamelCaseFieldNames.StringPieceField', index=4,
-      number=5, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='CordField', full_name='protobuf_unittest.TestCamelCaseFieldNames.CordField', index=5,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='RepeatedPrimitiveField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedPrimitiveField', index=6,
-      number=7, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='RepeatedStringField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedStringField', index=7,
-      number=8, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='RepeatedEnumField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedEnumField', index=8,
-      number=9, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='RepeatedMessageField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedMessageField', index=9,
-      number=10, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='RepeatedStringPieceField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedStringPieceField', index=10,
-      number=11, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='RepeatedCordField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedCordField', index=11,
-      number=12, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=5885,
-  serialized_end=6370,
-)
-
-
-_TESTFIELDORDERINGS_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='protobuf_unittest.TestFieldOrderings.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='oo', full_name='protobuf_unittest.TestFieldOrderings.NestedMessage.oo', index=0,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bb', full_name='protobuf_unittest.TestFieldOrderings.NestedMessage.bb', index=1,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=6535,
-  serialized_end=6574,
-)
-
-_TESTFIELDORDERINGS = _descriptor.Descriptor(
-  name='TestFieldOrderings',
-  full_name='protobuf_unittest.TestFieldOrderings',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='my_string', full_name='protobuf_unittest.TestFieldOrderings.my_string', index=0,
-      number=11, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='my_int', full_name='protobuf_unittest.TestFieldOrderings.my_int', index=1,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='my_float', full_name='protobuf_unittest.TestFieldOrderings.my_float', index=2,
-      number=101, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_message', full_name='protobuf_unittest.TestFieldOrderings.optional_nested_message', index=3,
-      number=200, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTFIELDORDERINGS_NESTEDMESSAGE, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(2, 11), (12, 101), ],
-  oneofs=[
-  ],
-  serialized_start=6373,
-  serialized_end=6586,
-)
-
-
-_TESTEXTREMEDEFAULTVALUES = _descriptor.Descriptor(
-  name='TestExtremeDefaultValues',
-  full_name='protobuf_unittest.TestExtremeDefaultValues',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='escaped_bytes', full_name='protobuf_unittest.TestExtremeDefaultValues.escaped_bytes', index=0,
-      number=1, type=12, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("\000\001\007\010\014\n\r\t\013\\\'\"\376"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='large_uint32', full_name='protobuf_unittest.TestExtremeDefaultValues.large_uint32', index=1,
-      number=2, type=13, cpp_type=3, label=1,
-      has_default_value=True, default_value=4294967295,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='large_uint64', full_name='protobuf_unittest.TestExtremeDefaultValues.large_uint64', index=2,
-      number=3, type=4, cpp_type=4, label=1,
-      has_default_value=True, default_value=18446744073709551615,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='small_int32', full_name='protobuf_unittest.TestExtremeDefaultValues.small_int32', index=3,
-      number=4, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=-2147483647,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='small_int64', full_name='protobuf_unittest.TestExtremeDefaultValues.small_int64', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=True, default_value=-9223372036854775807,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='really_small_int32', full_name='protobuf_unittest.TestExtremeDefaultValues.really_small_int32', index=5,
-      number=21, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=-2147483648,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='really_small_int64', full_name='protobuf_unittest.TestExtremeDefaultValues.really_small_int64', index=6,
-      number=22, type=3, cpp_type=2, label=1,
-      has_default_value=True, default_value=-9223372036854775808,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='utf8_string', full_name='protobuf_unittest.TestExtremeDefaultValues.utf8_string', index=7,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("\341\210\264").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='zero_float', full_name='protobuf_unittest.TestExtremeDefaultValues.zero_float', index=8,
-      number=7, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='one_float', full_name='protobuf_unittest.TestExtremeDefaultValues.one_float', index=9,
-      number=8, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(1),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='small_float', full_name='protobuf_unittest.TestExtremeDefaultValues.small_float', index=10,
-      number=9, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(1.5),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='negative_one_float', full_name='protobuf_unittest.TestExtremeDefaultValues.negative_one_float', index=11,
-      number=10, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(-1),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='negative_float', full_name='protobuf_unittest.TestExtremeDefaultValues.negative_float', index=12,
-      number=11, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(-1.5),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='large_float', full_name='protobuf_unittest.TestExtremeDefaultValues.large_float', index=13,
-      number=12, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(2e+08),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='small_negative_float', full_name='protobuf_unittest.TestExtremeDefaultValues.small_negative_float', index=14,
-      number=13, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=float(-8e-28),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='inf_double', full_name='protobuf_unittest.TestExtremeDefaultValues.inf_double', index=15,
-      number=14, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=1e10000,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='neg_inf_double', full_name='protobuf_unittest.TestExtremeDefaultValues.neg_inf_double', index=16,
-      number=15, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=-1e10000,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nan_double', full_name='protobuf_unittest.TestExtremeDefaultValues.nan_double', index=17,
-      number=16, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=(1e10000 * 0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='inf_float', full_name='protobuf_unittest.TestExtremeDefaultValues.inf_float', index=18,
-      number=17, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=1e10000,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='neg_inf_float', full_name='protobuf_unittest.TestExtremeDefaultValues.neg_inf_float', index=19,
-      number=18, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=-1e10000,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='nan_float', full_name='protobuf_unittest.TestExtremeDefaultValues.nan_float', index=20,
-      number=19, type=2, cpp_type=6, label=1,
-      has_default_value=True, default_value=(1e10000 * 0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cpp_trigraph', full_name='protobuf_unittest.TestExtremeDefaultValues.cpp_trigraph', index=21,
-      number=20, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("? ? ?? ?? ??? ??/ ??-").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.string_with_zero', index=22,
-      number=23, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("hel\000lo").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bytes_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.bytes_with_zero', index=23,
-      number=24, type=12, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("wor\000ld"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_piece_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.string_piece_with_zero', index=24,
-      number=25, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("ab\000c").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='cord_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.cord_with_zero', index=25,
-      number=26, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("12\0003").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='replacement_string', full_name='protobuf_unittest.TestExtremeDefaultValues.replacement_string', index=26,
-      number=27, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("${unknown}").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=6589,
-  serialized_end=7539,
-)
-
-
-_SPARSEENUMMESSAGE = _descriptor.Descriptor(
-  name='SparseEnumMessage',
-  full_name='protobuf_unittest.SparseEnumMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='sparse_enum', full_name='protobuf_unittest.SparseEnumMessage.sparse_enum', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=123,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7541,
-  serialized_end=7616,
-)
-
-
-_ONESTRING = _descriptor.Descriptor(
-  name='OneString',
-  full_name='protobuf_unittest.OneString',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.OneString.data', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7618,
-  serialized_end=7643,
-)
-
-
-_MORESTRING = _descriptor.Descriptor(
-  name='MoreString',
-  full_name='protobuf_unittest.MoreString',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.MoreString.data', index=0,
-      number=1, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7645,
-  serialized_end=7671,
-)
-
-
-_ONEBYTES = _descriptor.Descriptor(
-  name='OneBytes',
-  full_name='protobuf_unittest.OneBytes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.OneBytes.data', index=0,
-      number=1, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7673,
-  serialized_end=7697,
-)
-
-
-_MOREBYTES = _descriptor.Descriptor(
-  name='MoreBytes',
-  full_name='protobuf_unittest.MoreBytes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.MoreBytes.data', index=0,
-      number=1, type=12, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7699,
-  serialized_end=7724,
-)
-
-
-_INT32MESSAGE = _descriptor.Descriptor(
-  name='Int32Message',
-  full_name='protobuf_unittest.Int32Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.Int32Message.data', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7726,
-  serialized_end=7754,
-)
-
-
-_UINT32MESSAGE = _descriptor.Descriptor(
-  name='Uint32Message',
-  full_name='protobuf_unittest.Uint32Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.Uint32Message.data', index=0,
-      number=1, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7756,
-  serialized_end=7785,
-)
-
-
-_INT64MESSAGE = _descriptor.Descriptor(
-  name='Int64Message',
-  full_name='protobuf_unittest.Int64Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.Int64Message.data', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7787,
-  serialized_end=7815,
-)
-
-
-_UINT64MESSAGE = _descriptor.Descriptor(
-  name='Uint64Message',
-  full_name='protobuf_unittest.Uint64Message',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.Uint64Message.data', index=0,
-      number=1, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7817,
-  serialized_end=7846,
-)
-
-
-_BOOLMESSAGE = _descriptor.Descriptor(
-  name='BoolMessage',
-  full_name='protobuf_unittest.BoolMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='protobuf_unittest.BoolMessage.data', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=7848,
-  serialized_end=7875,
-)
-
-
-_TESTONEOF_FOOGROUP = _descriptor.Descriptor(
-  name='FooGroup',
-  full_name='protobuf_unittest.TestOneof.FooGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestOneof.FooGroup.a', index=0,
-      number=5, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='b', full_name='protobuf_unittest.TestOneof.FooGroup.b', index=1,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=8047,
-  serialized_end=8079,
-)
-
-_TESTONEOF = _descriptor.Descriptor(
-  name='TestOneof',
-  full_name='protobuf_unittest.TestOneof',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foo_int', full_name='protobuf_unittest.TestOneof.foo_int', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_string', full_name='protobuf_unittest.TestOneof.foo_string', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_message', full_name='protobuf_unittest.TestOneof.foo_message', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foogroup', full_name='protobuf_unittest.TestOneof.foogroup', index=3,
-      number=4, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTONEOF_FOOGROUP, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='foo', full_name='protobuf_unittest.TestOneof.foo',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=7878,
-  serialized_end=8086,
-)
-
-
-_TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP = _descriptor.Descriptor(
-  name='FooGroup',
-  full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup.a', index=0,
-      number=5, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='b', full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup.b', index=1,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=8047,
-  serialized_end=8079,
-)
-
-_TESTONEOFBACKWARDSCOMPATIBLE = _descriptor.Descriptor(
-  name='TestOneofBackwardsCompatible',
-  full_name='protobuf_unittest.TestOneofBackwardsCompatible',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foo_int', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_int', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_string', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_string', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_message', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_message', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foogroup', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foogroup', index=3,
-      number=4, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=8089,
-  serialized_end=8320,
-)
-
-
-_TESTONEOF2_FOOGROUP = _descriptor.Descriptor(
-  name='FooGroup',
-  full_name='protobuf_unittest.TestOneof2.FooGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestOneof2.FooGroup.a', index=0,
-      number=9, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='b', full_name='protobuf_unittest.TestOneof2.FooGroup.b', index=1,
-      number=10, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=8981,
-  serialized_end=9013,
-)
-
-_TESTONEOF2_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='protobuf_unittest.TestOneof2.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='qux_int', full_name='protobuf_unittest.TestOneof2.NestedMessage.qux_int', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='corge_int', full_name='protobuf_unittest.TestOneof2.NestedMessage.corge_int', index=1,
-      number=2, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=9015,
-  serialized_end=9066,
-)
-
-_TESTONEOF2 = _descriptor.Descriptor(
-  name='TestOneof2',
-  full_name='protobuf_unittest.TestOneof2',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foo_int', full_name='protobuf_unittest.TestOneof2.foo_int', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_string', full_name='protobuf_unittest.TestOneof2.foo_string', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_cord', full_name='protobuf_unittest.TestOneof2.foo_cord', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='foo_string_piece', full_name='protobuf_unittest.TestOneof2.foo_string_piece', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='foo_bytes', full_name='protobuf_unittest.TestOneof2.foo_bytes', index=4,
-      number=5, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_enum', full_name='protobuf_unittest.TestOneof2.foo_enum', index=5,
-      number=6, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_message', full_name='protobuf_unittest.TestOneof2.foo_message', index=6,
-      number=7, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foogroup', full_name='protobuf_unittest.TestOneof2.foogroup', index=7,
-      number=8, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_lazy_message', full_name='protobuf_unittest.TestOneof2.foo_lazy_message', index=8,
-      number=11, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-    _descriptor.FieldDescriptor(
-      name='bar_int', full_name='protobuf_unittest.TestOneof2.bar_int', index=9,
-      number=12, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=5,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bar_string', full_name='protobuf_unittest.TestOneof2.bar_string', index=10,
-      number=13, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("STRING").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bar_cord', full_name='protobuf_unittest.TestOneof2.bar_cord', index=11,
-      number=14, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("CORD").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='bar_string_piece', full_name='protobuf_unittest.TestOneof2.bar_string_piece', index=12,
-      number=15, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("SPIECE").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='bar_bytes', full_name='protobuf_unittest.TestOneof2.bar_bytes', index=13,
-      number=16, type=12, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("BYTES"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bar_enum', full_name='protobuf_unittest.TestOneof2.bar_enum', index=14,
-      number=17, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=2,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='baz_int', full_name='protobuf_unittest.TestOneof2.baz_int', index=15,
-      number=18, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='baz_string', full_name='protobuf_unittest.TestOneof2.baz_string', index=16,
-      number=19, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("BAZ").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTONEOF2_FOOGROUP, _TESTONEOF2_NESTEDMESSAGE, ],
-  enum_types=[
-    _TESTONEOF2_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='foo', full_name='protobuf_unittest.TestOneof2.foo',
-      index=0, containing_type=None, fields=[]),
-    _descriptor.OneofDescriptor(
-      name='bar', full_name='protobuf_unittest.TestOneof2.bar',
-      index=1, containing_type=None, fields=[]),
-  ],
-  serialized_start=8323,
-  serialized_end=9121,
-)
-
-
-_TESTREQUIREDONEOF_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='protobuf_unittest.TestRequiredOneof.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='required_double', full_name='protobuf_unittest.TestRequiredOneof.NestedMessage.required_double', index=0,
-      number=1, type=1, cpp_type=5, label=2,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=9261,
-  serialized_end=9301,
-)
-
-_TESTREQUIREDONEOF = _descriptor.Descriptor(
-  name='TestRequiredOneof',
-  full_name='protobuf_unittest.TestRequiredOneof',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='foo_int', full_name='protobuf_unittest.TestRequiredOneof.foo_int', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_string', full_name='protobuf_unittest.TestRequiredOneof.foo_string', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='foo_message', full_name='protobuf_unittest.TestRequiredOneof.foo_message', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTREQUIREDONEOF_NESTEDMESSAGE, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='foo', full_name='protobuf_unittest.TestRequiredOneof.foo',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=9124,
-  serialized_end=9308,
-)
-
-
-_TESTPACKEDTYPES = _descriptor.Descriptor(
-  name='TestPackedTypes',
-  full_name='protobuf_unittest.TestPackedTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='packed_int32', full_name='protobuf_unittest.TestPackedTypes.packed_int32', index=0,
-      number=90, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_int64', full_name='protobuf_unittest.TestPackedTypes.packed_int64', index=1,
-      number=91, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_uint32', full_name='protobuf_unittest.TestPackedTypes.packed_uint32', index=2,
-      number=92, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_uint64', full_name='protobuf_unittest.TestPackedTypes.packed_uint64', index=3,
-      number=93, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sint32', full_name='protobuf_unittest.TestPackedTypes.packed_sint32', index=4,
-      number=94, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sint64', full_name='protobuf_unittest.TestPackedTypes.packed_sint64', index=5,
-      number=95, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_fixed32', full_name='protobuf_unittest.TestPackedTypes.packed_fixed32', index=6,
-      number=96, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_fixed64', full_name='protobuf_unittest.TestPackedTypes.packed_fixed64', index=7,
-      number=97, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sfixed32', full_name='protobuf_unittest.TestPackedTypes.packed_sfixed32', index=8,
-      number=98, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sfixed64', full_name='protobuf_unittest.TestPackedTypes.packed_sfixed64', index=9,
-      number=99, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_float', full_name='protobuf_unittest.TestPackedTypes.packed_float', index=10,
-      number=100, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_double', full_name='protobuf_unittest.TestPackedTypes.packed_double', index=11,
-      number=101, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_bool', full_name='protobuf_unittest.TestPackedTypes.packed_bool', index=12,
-      number=102, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_enum', full_name='protobuf_unittest.TestPackedTypes.packed_enum', index=13,
-      number=103, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=9311,
-  serialized_end=9737,
-)
-
-
-_TESTUNPACKEDTYPES = _descriptor.Descriptor(
-  name='TestUnpackedTypes',
-  full_name='protobuf_unittest.TestUnpackedTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='unpacked_int32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_int32', index=0,
-      number=90, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_int64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_int64', index=1,
-      number=91, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_uint32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_uint32', index=2,
-      number=92, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_uint64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_uint64', index=3,
-      number=93, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_sint32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sint32', index=4,
-      number=94, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_sint64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sint64', index=5,
-      number=95, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_fixed32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_fixed32', index=6,
-      number=96, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_fixed64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_fixed64', index=7,
-      number=97, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_sfixed32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sfixed32', index=8,
-      number=98, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_sfixed64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sfixed64', index=9,
-      number=99, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_float', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_float', index=10,
-      number=100, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_double', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_double', index=11,
-      number=101, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_bool', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_bool', index=12,
-      number=102, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='unpacked_enum', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_enum', index=13,
-      number=103, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=9740,
-  serialized_end=10196,
-)
-
-
-_TESTPACKEDEXTENSIONS = _descriptor.Descriptor(
-  name='TestPackedExtensions',
-  full_name='protobuf_unittest.TestPackedExtensions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=10198,
-  serialized_end=10230,
-)
-
-
-_TESTUNPACKEDEXTENSIONS = _descriptor.Descriptor(
-  name='TestUnpackedExtensions',
-  full_name='protobuf_unittest.TestUnpackedExtensions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=10232,
-  serialized_end=10266,
-)
-
-
-_TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE = _descriptor.Descriptor(
-  name='DynamicMessageType',
-  full_name='protobuf_unittest.TestDynamicExtensions.DynamicMessageType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='dynamic_field', full_name='protobuf_unittest.TestDynamicExtensions.DynamicMessageType.dynamic_field', index=0,
-      number=2100, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=10689,
-  serialized_end=10733,
-)
-
-_TESTDYNAMICEXTENSIONS = _descriptor.Descriptor(
-  name='TestDynamicExtensions',
-  full_name='protobuf_unittest.TestDynamicExtensions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='scalar_extension', full_name='protobuf_unittest.TestDynamicExtensions.scalar_extension', index=0,
-      number=2000, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='enum_extension', full_name='protobuf_unittest.TestDynamicExtensions.enum_extension', index=1,
-      number=2001, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=4,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dynamic_enum_extension', full_name='protobuf_unittest.TestDynamicExtensions.dynamic_enum_extension', index=2,
-      number=2002, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=2200,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='message_extension', full_name='protobuf_unittest.TestDynamicExtensions.message_extension', index=3,
-      number=2003, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dynamic_message_extension', full_name='protobuf_unittest.TestDynamicExtensions.dynamic_message_extension', index=4,
-      number=2004, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_extension', full_name='protobuf_unittest.TestDynamicExtensions.repeated_extension', index=5,
-      number=2005, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='packed_extension', full_name='protobuf_unittest.TestDynamicExtensions.packed_extension', index=6,
-      number=2006, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE, ],
-  enum_types=[
-    _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=10269,
-  serialized_end=10806,
-)
-
-
-_TESTREPEATEDSCALARDIFFERENTTAGSIZES = _descriptor.Descriptor(
-  name='TestRepeatedScalarDifferentTagSizes',
-  full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed32', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_fixed32', index=0,
-      number=12, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int32', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_int32', index=1,
-      number=13, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_fixed64', index=2,
-      number=2046, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_int64', index=3,
-      number=2047, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_float', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_float', index=4,
-      number=262142, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_uint64', index=5,
-      number=262143, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=10809,
-  serialized_end=11001,
-)
-
-
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1 = _descriptor.Descriptor(
-  name='Group1',
-  full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1.field1', index=0,
-      number=11, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=11794,
-  serialized_end=11851,
-)
-
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2 = _descriptor.Descriptor(
-  name='Group2',
-  full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2.field1', index=0,
-      number=21, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=11853,
-  serialized_end=11910,
-)
-
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR = _descriptor.Descriptor(
-  name='RepeatedFieldsGenerator',
-  full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field1', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='field2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field2', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='field3', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field3', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='group1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.group1', index=3,
-      number=10, type=10, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='group2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.group2', index=4,
-      number=20, type=10, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='ext1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.ext1', index=5,
-      number=1000, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='ext2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.ext2', index=6,
-      number=1001, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1, _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=11356,
-  serialized_end=11910,
-)
-
-_TESTPARSINGMERGE_OPTIONALGROUP = _descriptor.Descriptor(
-  name='OptionalGroup',
-  full_name='protobuf_unittest.TestParsingMerge.OptionalGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_group_all_types', full_name='protobuf_unittest.TestParsingMerge.OptionalGroup.optional_group_all_types', index=0,
-      number=11, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=11912,
-  serialized_end=11994,
-)
-
-_TESTPARSINGMERGE_REPEATEDGROUP = _descriptor.Descriptor(
-  name='RepeatedGroup',
-  full_name='protobuf_unittest.TestParsingMerge.RepeatedGroup',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='repeated_group_all_types', full_name='protobuf_unittest.TestParsingMerge.RepeatedGroup.repeated_group_all_types', index=0,
-      number=21, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=11996,
-  serialized_end=12078,
-)
-
-_TESTPARSINGMERGE = _descriptor.Descriptor(
-  name='TestParsingMerge',
-  full_name='protobuf_unittest.TestParsingMerge',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='required_all_types', full_name='protobuf_unittest.TestParsingMerge.required_all_types', index=0,
-      number=1, type=11, cpp_type=10, label=2,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_all_types', full_name='protobuf_unittest.TestParsingMerge.optional_all_types', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_all_types', full_name='protobuf_unittest.TestParsingMerge.repeated_all_types', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optionalgroup', full_name='protobuf_unittest.TestParsingMerge.optionalgroup', index=3,
-      number=10, type=10, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeatedgroup', full_name='protobuf_unittest.TestParsingMerge.repeatedgroup', index=4,
-      number=20, type=10, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-    _descriptor.FieldDescriptor(
-      name='optional_ext', full_name='protobuf_unittest.TestParsingMerge.optional_ext', index=0,
-      number=1000, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_ext', full_name='protobuf_unittest.TestParsingMerge.repeated_ext', index=1,
-      number=1001, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=True, extension_scope=None,
-      options=None),
-  ],
-  nested_types=[_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR, _TESTPARSINGMERGE_OPTIONALGROUP, _TESTPARSINGMERGE_REPEATEDGROUP, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=True,
-  syntax='proto2',
-  extension_ranges=[(1000, 536870912), ],
-  oneofs=[
-  ],
-  serialized_start=11004,
-  serialized_end=12275,
-)
-
-
-_TESTCOMMENTINJECTIONMESSAGE = _descriptor.Descriptor(
-  name='TestCommentInjectionMessage',
-  full_name='protobuf_unittest.TestCommentInjectionMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='a', full_name='protobuf_unittest.TestCommentInjectionMessage.a', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=True, default_value=_b("*/ <- Neither should this.").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=12277,
-  serialized_end=12345,
-)
-
-
-_FOOREQUEST = _descriptor.Descriptor(
-  name='FooRequest',
-  full_name='protobuf_unittest.FooRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=12347,
-  serialized_end=12359,
-)
-
-
-_FOORESPONSE = _descriptor.Descriptor(
-  name='FooResponse',
-  full_name='protobuf_unittest.FooResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=12361,
-  serialized_end=12374,
-)
-
-
-_FOOCLIENTMESSAGE = _descriptor.Descriptor(
-  name='FooClientMessage',
-  full_name='protobuf_unittest.FooClientMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=12376,
-  serialized_end=12394,
-)
-
-
-_FOOSERVERMESSAGE = _descriptor.Descriptor(
-  name='FooServerMessage',
-  full_name='protobuf_unittest.FooServerMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=12396,
-  serialized_end=12414,
-)
-
-
-_BARREQUEST = _descriptor.Descriptor(
-  name='BarRequest',
-  full_name='protobuf_unittest.BarRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=12416,
-  serialized_end=12428,
-)
-
-
-_BARRESPONSE = _descriptor.Descriptor(
-  name='BarResponse',
-  full_name='protobuf_unittest.BarResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto2',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=12430,
-  serialized_end=12443,
-)
-
-_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES
-_TESTALLTYPES_OPTIONALGROUP.containing_type = _TESTALLTYPES
-_TESTALLTYPES_REPEATEDGROUP.containing_type = _TESTALLTYPES
-_TESTALLTYPES.fields_by_name['optionalgroup'].message_type = _TESTALLTYPES_OPTIONALGROUP
-_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGE
-_TESTALLTYPES.fields_by_name['optional_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['optional_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-_TESTALLTYPES.fields_by_name['optional_public_import_message'].message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['optional_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['repeatedgroup'].message_type = _TESTALLTYPES_REPEATEDGROUP
-_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['repeated_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-_TESTALLTYPES.fields_by_name['repeated_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['default_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['default_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['default_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-_TESTALLTYPES.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES_NESTEDENUM.containing_type = _TESTALLTYPES
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_uint32'])
-_TESTALLTYPES.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_nested_message'])
-_TESTALLTYPES.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_string'])
-_TESTALLTYPES.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_bytes'])
-_TESTALLTYPES.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_NESTEDTESTALLTYPES.fields_by_name['child'].message_type = _NESTEDTESTALLTYPES
-_NESTEDTESTALLTYPES.fields_by_name['payload'].message_type = _TESTALLTYPES
-_NESTEDTESTALLTYPES.fields_by_name['repeated_child'].message_type = _NESTEDTESTALLTYPES
-_TESTREQUIREDFOREIGN.fields_by_name['optional_message'].message_type = _TESTREQUIRED
-_TESTREQUIREDFOREIGN.fields_by_name['repeated_message'].message_type = _TESTREQUIRED
-_TESTFOREIGNNESTED.fields_by_name['foreign_nested'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTRECURSIVEMESSAGE.fields_by_name['a'].message_type = _TESTRECURSIVEMESSAGE
-_TESTMUTUALRECURSIONA.fields_by_name['bb'].message_type = _TESTMUTUALRECURSIONB
-_TESTMUTUALRECURSIONB.fields_by_name['a'].message_type = _TESTMUTUALRECURSIONA
-_TESTDUPFIELDNUMBER_FOO.containing_type = _TESTDUPFIELDNUMBER
-_TESTDUPFIELDNUMBER_BAR.containing_type = _TESTDUPFIELDNUMBER
-_TESTDUPFIELDNUMBER.fields_by_name['foo'].message_type = _TESTDUPFIELDNUMBER_FOO
-_TESTDUPFIELDNUMBER.fields_by_name['bar'].message_type = _TESTDUPFIELDNUMBER_BAR
-_TESTEAGERMESSAGE.fields_by_name['sub_message'].message_type = _TESTALLTYPES
-_TESTLAZYMESSAGE.fields_by_name['sub_message'].message_type = _TESTALLTYPES
-_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE.fields_by_name['nestedmessage_repeated_foreignmessage'].message_type = _FOREIGNMESSAGE
-_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE.containing_type = _TESTNESTEDMESSAGEHASBITS
-_TESTNESTEDMESSAGEHASBITS.fields_by_name['optional_nested_message'].message_type = _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE
-_TESTCAMELCASEFIELDNAMES.fields_by_name['EnumField'].enum_type = _FOREIGNENUM
-_TESTCAMELCASEFIELDNAMES.fields_by_name['MessageField'].message_type = _FOREIGNMESSAGE
-_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedEnumField'].enum_type = _FOREIGNENUM
-_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedMessageField'].message_type = _FOREIGNMESSAGE
-_TESTFIELDORDERINGS_NESTEDMESSAGE.containing_type = _TESTFIELDORDERINGS
-_TESTFIELDORDERINGS.fields_by_name['optional_nested_message'].message_type = _TESTFIELDORDERINGS_NESTEDMESSAGE
-_SPARSEENUMMESSAGE.fields_by_name['sparse_enum'].enum_type = _TESTSPARSEENUM
-_TESTONEOF_FOOGROUP.containing_type = _TESTONEOF
-_TESTONEOF.fields_by_name['foo_message'].message_type = _TESTALLTYPES
-_TESTONEOF.fields_by_name['foogroup'].message_type = _TESTONEOF_FOOGROUP
-_TESTONEOF.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF.fields_by_name['foo_int'])
-_TESTONEOF.fields_by_name['foo_int'].containing_oneof = _TESTONEOF.oneofs_by_name['foo']
-_TESTONEOF.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF.fields_by_name['foo_string'])
-_TESTONEOF.fields_by_name['foo_string'].containing_oneof = _TESTONEOF.oneofs_by_name['foo']
-_TESTONEOF.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF.fields_by_name['foo_message'])
-_TESTONEOF.fields_by_name['foo_message'].containing_oneof = _TESTONEOF.oneofs_by_name['foo']
-_TESTONEOF.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF.fields_by_name['foogroup'])
-_TESTONEOF.fields_by_name['foogroup'].containing_oneof = _TESTONEOF.oneofs_by_name['foo']
-_TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP.containing_type = _TESTONEOFBACKWARDSCOMPATIBLE
-_TESTONEOFBACKWARDSCOMPATIBLE.fields_by_name['foo_message'].message_type = _TESTALLTYPES
-_TESTONEOFBACKWARDSCOMPATIBLE.fields_by_name['foogroup'].message_type = _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP
-_TESTONEOF2_FOOGROUP.containing_type = _TESTONEOF2
-_TESTONEOF2_NESTEDMESSAGE.containing_type = _TESTONEOF2
-_TESTONEOF2.fields_by_name['foo_enum'].enum_type = _TESTONEOF2_NESTEDENUM
-_TESTONEOF2.fields_by_name['foo_message'].message_type = _TESTONEOF2_NESTEDMESSAGE
-_TESTONEOF2.fields_by_name['foogroup'].message_type = _TESTONEOF2_FOOGROUP
-_TESTONEOF2.fields_by_name['foo_lazy_message'].message_type = _TESTONEOF2_NESTEDMESSAGE
-_TESTONEOF2.fields_by_name['bar_enum'].enum_type = _TESTONEOF2_NESTEDENUM
-_TESTONEOF2_NESTEDENUM.containing_type = _TESTONEOF2
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_int'])
-_TESTONEOF2.fields_by_name['foo_int'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_string'])
-_TESTONEOF2.fields_by_name['foo_string'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_cord'])
-_TESTONEOF2.fields_by_name['foo_cord'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_string_piece'])
-_TESTONEOF2.fields_by_name['foo_string_piece'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_bytes'])
-_TESTONEOF2.fields_by_name['foo_bytes'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_enum'])
-_TESTONEOF2.fields_by_name['foo_enum'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_message'])
-_TESTONEOF2.fields_by_name['foo_message'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foogroup'])
-_TESTONEOF2.fields_by_name['foogroup'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['foo'].fields.append(
-  _TESTONEOF2.fields_by_name['foo_lazy_message'])
-_TESTONEOF2.fields_by_name['foo_lazy_message'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo']
-_TESTONEOF2.oneofs_by_name['bar'].fields.append(
-  _TESTONEOF2.fields_by_name['bar_int'])
-_TESTONEOF2.fields_by_name['bar_int'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar']
-_TESTONEOF2.oneofs_by_name['bar'].fields.append(
-  _TESTONEOF2.fields_by_name['bar_string'])
-_TESTONEOF2.fields_by_name['bar_string'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar']
-_TESTONEOF2.oneofs_by_name['bar'].fields.append(
-  _TESTONEOF2.fields_by_name['bar_cord'])
-_TESTONEOF2.fields_by_name['bar_cord'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar']
-_TESTONEOF2.oneofs_by_name['bar'].fields.append(
-  _TESTONEOF2.fields_by_name['bar_string_piece'])
-_TESTONEOF2.fields_by_name['bar_string_piece'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar']
-_TESTONEOF2.oneofs_by_name['bar'].fields.append(
-  _TESTONEOF2.fields_by_name['bar_bytes'])
-_TESTONEOF2.fields_by_name['bar_bytes'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar']
-_TESTONEOF2.oneofs_by_name['bar'].fields.append(
-  _TESTONEOF2.fields_by_name['bar_enum'])
-_TESTONEOF2.fields_by_name['bar_enum'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar']
-_TESTREQUIREDONEOF_NESTEDMESSAGE.containing_type = _TESTREQUIREDONEOF
-_TESTREQUIREDONEOF.fields_by_name['foo_message'].message_type = _TESTREQUIREDONEOF_NESTEDMESSAGE
-_TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append(
-  _TESTREQUIREDONEOF.fields_by_name['foo_int'])
-_TESTREQUIREDONEOF.fields_by_name['foo_int'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo']
-_TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append(
-  _TESTREQUIREDONEOF.fields_by_name['foo_string'])
-_TESTREQUIREDONEOF.fields_by_name['foo_string'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo']
-_TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append(
-  _TESTREQUIREDONEOF.fields_by_name['foo_message'])
-_TESTREQUIREDONEOF.fields_by_name['foo_message'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo']
-_TESTPACKEDTYPES.fields_by_name['packed_enum'].enum_type = _FOREIGNENUM
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_enum'].enum_type = _FOREIGNENUM
-_TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE.containing_type = _TESTDYNAMICEXTENSIONS
-_TESTDYNAMICEXTENSIONS.fields_by_name['enum_extension'].enum_type = _FOREIGNENUM
-_TESTDYNAMICEXTENSIONS.fields_by_name['dynamic_enum_extension'].enum_type = _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE
-_TESTDYNAMICEXTENSIONS.fields_by_name['message_extension'].message_type = _FOREIGNMESSAGE
-_TESTDYNAMICEXTENSIONS.fields_by_name['dynamic_message_extension'].message_type = _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE
-_TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE.containing_type = _TESTDYNAMICEXTENSIONS
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1.fields_by_name['field1'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1.containing_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2.fields_by_name['field1'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2.containing_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field1'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field2'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field3'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['group1'].message_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['group2'].message_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['ext1'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['ext2'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.containing_type = _TESTPARSINGMERGE
-_TESTPARSINGMERGE_OPTIONALGROUP.fields_by_name['optional_group_all_types'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_OPTIONALGROUP.containing_type = _TESTPARSINGMERGE
-_TESTPARSINGMERGE_REPEATEDGROUP.fields_by_name['repeated_group_all_types'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE_REPEATEDGROUP.containing_type = _TESTPARSINGMERGE
-_TESTPARSINGMERGE.fields_by_name['required_all_types'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE.fields_by_name['optional_all_types'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE.fields_by_name['repeated_all_types'].message_type = _TESTALLTYPES
-_TESTPARSINGMERGE.fields_by_name['optionalgroup'].message_type = _TESTPARSINGMERGE_OPTIONALGROUP
-_TESTPARSINGMERGE.fields_by_name['repeatedgroup'].message_type = _TESTPARSINGMERGE_REPEATEDGROUP
-DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES
-DESCRIPTOR.message_types_by_name['NestedTestAllTypes'] = _NESTEDTESTALLTYPES
-DESCRIPTOR.message_types_by_name['TestDeprecatedFields'] = _TESTDEPRECATEDFIELDS
-DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE
-DESCRIPTOR.message_types_by_name['TestReservedFields'] = _TESTRESERVEDFIELDS
-DESCRIPTOR.message_types_by_name['TestAllExtensions'] = _TESTALLEXTENSIONS
-DESCRIPTOR.message_types_by_name['OptionalGroup_extension'] = _OPTIONALGROUP_EXTENSION
-DESCRIPTOR.message_types_by_name['RepeatedGroup_extension'] = _REPEATEDGROUP_EXTENSION
-DESCRIPTOR.message_types_by_name['TestNestedExtension'] = _TESTNESTEDEXTENSION
-DESCRIPTOR.message_types_by_name['TestRequired'] = _TESTREQUIRED
-DESCRIPTOR.message_types_by_name['TestRequiredForeign'] = _TESTREQUIREDFOREIGN
-DESCRIPTOR.message_types_by_name['TestForeignNested'] = _TESTFOREIGNNESTED
-DESCRIPTOR.message_types_by_name['TestEmptyMessage'] = _TESTEMPTYMESSAGE
-DESCRIPTOR.message_types_by_name['TestEmptyMessageWithExtensions'] = _TESTEMPTYMESSAGEWITHEXTENSIONS
-DESCRIPTOR.message_types_by_name['TestMultipleExtensionRanges'] = _TESTMULTIPLEEXTENSIONRANGES
-DESCRIPTOR.message_types_by_name['TestReallyLargeTagNumber'] = _TESTREALLYLARGETAGNUMBER
-DESCRIPTOR.message_types_by_name['TestRecursiveMessage'] = _TESTRECURSIVEMESSAGE
-DESCRIPTOR.message_types_by_name['TestMutualRecursionA'] = _TESTMUTUALRECURSIONA
-DESCRIPTOR.message_types_by_name['TestMutualRecursionB'] = _TESTMUTUALRECURSIONB
-DESCRIPTOR.message_types_by_name['TestDupFieldNumber'] = _TESTDUPFIELDNUMBER
-DESCRIPTOR.message_types_by_name['TestEagerMessage'] = _TESTEAGERMESSAGE
-DESCRIPTOR.message_types_by_name['TestLazyMessage'] = _TESTLAZYMESSAGE
-DESCRIPTOR.message_types_by_name['TestNestedMessageHasBits'] = _TESTNESTEDMESSAGEHASBITS
-DESCRIPTOR.message_types_by_name['TestCamelCaseFieldNames'] = _TESTCAMELCASEFIELDNAMES
-DESCRIPTOR.message_types_by_name['TestFieldOrderings'] = _TESTFIELDORDERINGS
-DESCRIPTOR.message_types_by_name['TestExtremeDefaultValues'] = _TESTEXTREMEDEFAULTVALUES
-DESCRIPTOR.message_types_by_name['SparseEnumMessage'] = _SPARSEENUMMESSAGE
-DESCRIPTOR.message_types_by_name['OneString'] = _ONESTRING
-DESCRIPTOR.message_types_by_name['MoreString'] = _MORESTRING
-DESCRIPTOR.message_types_by_name['OneBytes'] = _ONEBYTES
-DESCRIPTOR.message_types_by_name['MoreBytes'] = _MOREBYTES
-DESCRIPTOR.message_types_by_name['Int32Message'] = _INT32MESSAGE
-DESCRIPTOR.message_types_by_name['Uint32Message'] = _UINT32MESSAGE
-DESCRIPTOR.message_types_by_name['Int64Message'] = _INT64MESSAGE
-DESCRIPTOR.message_types_by_name['Uint64Message'] = _UINT64MESSAGE
-DESCRIPTOR.message_types_by_name['BoolMessage'] = _BOOLMESSAGE
-DESCRIPTOR.message_types_by_name['TestOneof'] = _TESTONEOF
-DESCRIPTOR.message_types_by_name['TestOneofBackwardsCompatible'] = _TESTONEOFBACKWARDSCOMPATIBLE
-DESCRIPTOR.message_types_by_name['TestOneof2'] = _TESTONEOF2
-DESCRIPTOR.message_types_by_name['TestRequiredOneof'] = _TESTREQUIREDONEOF
-DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES
-DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES
-DESCRIPTOR.message_types_by_name['TestPackedExtensions'] = _TESTPACKEDEXTENSIONS
-DESCRIPTOR.message_types_by_name['TestUnpackedExtensions'] = _TESTUNPACKEDEXTENSIONS
-DESCRIPTOR.message_types_by_name['TestDynamicExtensions'] = _TESTDYNAMICEXTENSIONS
-DESCRIPTOR.message_types_by_name['TestRepeatedScalarDifferentTagSizes'] = _TESTREPEATEDSCALARDIFFERENTTAGSIZES
-DESCRIPTOR.message_types_by_name['TestParsingMerge'] = _TESTPARSINGMERGE
-DESCRIPTOR.message_types_by_name['TestCommentInjectionMessage'] = _TESTCOMMENTINJECTIONMESSAGE
-DESCRIPTOR.message_types_by_name['FooRequest'] = _FOOREQUEST
-DESCRIPTOR.message_types_by_name['FooResponse'] = _FOORESPONSE
-DESCRIPTOR.message_types_by_name['FooClientMessage'] = _FOOCLIENTMESSAGE
-DESCRIPTOR.message_types_by_name['FooServerMessage'] = _FOOSERVERMESSAGE
-DESCRIPTOR.message_types_by_name['BarRequest'] = _BARREQUEST
-DESCRIPTOR.message_types_by_name['BarResponse'] = _BARRESPONSE
-DESCRIPTOR.enum_types_by_name['ForeignEnum'] = _FOREIGNENUM
-DESCRIPTOR.enum_types_by_name['TestEnumWithDupValue'] = _TESTENUMWITHDUPVALUE
-DESCRIPTOR.enum_types_by_name['TestSparseEnum'] = _TESTSPARSEENUM
-DESCRIPTOR.extensions_by_name['optional_int32_extension'] = optional_int32_extension
-DESCRIPTOR.extensions_by_name['optional_int64_extension'] = optional_int64_extension
-DESCRIPTOR.extensions_by_name['optional_uint32_extension'] = optional_uint32_extension
-DESCRIPTOR.extensions_by_name['optional_uint64_extension'] = optional_uint64_extension
-DESCRIPTOR.extensions_by_name['optional_sint32_extension'] = optional_sint32_extension
-DESCRIPTOR.extensions_by_name['optional_sint64_extension'] = optional_sint64_extension
-DESCRIPTOR.extensions_by_name['optional_fixed32_extension'] = optional_fixed32_extension
-DESCRIPTOR.extensions_by_name['optional_fixed64_extension'] = optional_fixed64_extension
-DESCRIPTOR.extensions_by_name['optional_sfixed32_extension'] = optional_sfixed32_extension
-DESCRIPTOR.extensions_by_name['optional_sfixed64_extension'] = optional_sfixed64_extension
-DESCRIPTOR.extensions_by_name['optional_float_extension'] = optional_float_extension
-DESCRIPTOR.extensions_by_name['optional_double_extension'] = optional_double_extension
-DESCRIPTOR.extensions_by_name['optional_bool_extension'] = optional_bool_extension
-DESCRIPTOR.extensions_by_name['optional_string_extension'] = optional_string_extension
-DESCRIPTOR.extensions_by_name['optional_bytes_extension'] = optional_bytes_extension
-DESCRIPTOR.extensions_by_name['optionalgroup_extension'] = optionalgroup_extension
-DESCRIPTOR.extensions_by_name['optional_nested_message_extension'] = optional_nested_message_extension
-DESCRIPTOR.extensions_by_name['optional_foreign_message_extension'] = optional_foreign_message_extension
-DESCRIPTOR.extensions_by_name['optional_import_message_extension'] = optional_import_message_extension
-DESCRIPTOR.extensions_by_name['optional_nested_enum_extension'] = optional_nested_enum_extension
-DESCRIPTOR.extensions_by_name['optional_foreign_enum_extension'] = optional_foreign_enum_extension
-DESCRIPTOR.extensions_by_name['optional_import_enum_extension'] = optional_import_enum_extension
-DESCRIPTOR.extensions_by_name['optional_string_piece_extension'] = optional_string_piece_extension
-DESCRIPTOR.extensions_by_name['optional_cord_extension'] = optional_cord_extension
-DESCRIPTOR.extensions_by_name['optional_public_import_message_extension'] = optional_public_import_message_extension
-DESCRIPTOR.extensions_by_name['optional_lazy_message_extension'] = optional_lazy_message_extension
-DESCRIPTOR.extensions_by_name['repeated_int32_extension'] = repeated_int32_extension
-DESCRIPTOR.extensions_by_name['repeated_int64_extension'] = repeated_int64_extension
-DESCRIPTOR.extensions_by_name['repeated_uint32_extension'] = repeated_uint32_extension
-DESCRIPTOR.extensions_by_name['repeated_uint64_extension'] = repeated_uint64_extension
-DESCRIPTOR.extensions_by_name['repeated_sint32_extension'] = repeated_sint32_extension
-DESCRIPTOR.extensions_by_name['repeated_sint64_extension'] = repeated_sint64_extension
-DESCRIPTOR.extensions_by_name['repeated_fixed32_extension'] = repeated_fixed32_extension
-DESCRIPTOR.extensions_by_name['repeated_fixed64_extension'] = repeated_fixed64_extension
-DESCRIPTOR.extensions_by_name['repeated_sfixed32_extension'] = repeated_sfixed32_extension
-DESCRIPTOR.extensions_by_name['repeated_sfixed64_extension'] = repeated_sfixed64_extension
-DESCRIPTOR.extensions_by_name['repeated_float_extension'] = repeated_float_extension
-DESCRIPTOR.extensions_by_name['repeated_double_extension'] = repeated_double_extension
-DESCRIPTOR.extensions_by_name['repeated_bool_extension'] = repeated_bool_extension
-DESCRIPTOR.extensions_by_name['repeated_string_extension'] = repeated_string_extension
-DESCRIPTOR.extensions_by_name['repeated_bytes_extension'] = repeated_bytes_extension
-DESCRIPTOR.extensions_by_name['repeatedgroup_extension'] = repeatedgroup_extension
-DESCRIPTOR.extensions_by_name['repeated_nested_message_extension'] = repeated_nested_message_extension
-DESCRIPTOR.extensions_by_name['repeated_foreign_message_extension'] = repeated_foreign_message_extension
-DESCRIPTOR.extensions_by_name['repeated_import_message_extension'] = repeated_import_message_extension
-DESCRIPTOR.extensions_by_name['repeated_nested_enum_extension'] = repeated_nested_enum_extension
-DESCRIPTOR.extensions_by_name['repeated_foreign_enum_extension'] = repeated_foreign_enum_extension
-DESCRIPTOR.extensions_by_name['repeated_import_enum_extension'] = repeated_import_enum_extension
-DESCRIPTOR.extensions_by_name['repeated_string_piece_extension'] = repeated_string_piece_extension
-DESCRIPTOR.extensions_by_name['repeated_cord_extension'] = repeated_cord_extension
-DESCRIPTOR.extensions_by_name['repeated_lazy_message_extension'] = repeated_lazy_message_extension
-DESCRIPTOR.extensions_by_name['default_int32_extension'] = default_int32_extension
-DESCRIPTOR.extensions_by_name['default_int64_extension'] = default_int64_extension
-DESCRIPTOR.extensions_by_name['default_uint32_extension'] = default_uint32_extension
-DESCRIPTOR.extensions_by_name['default_uint64_extension'] = default_uint64_extension
-DESCRIPTOR.extensions_by_name['default_sint32_extension'] = default_sint32_extension
-DESCRIPTOR.extensions_by_name['default_sint64_extension'] = default_sint64_extension
-DESCRIPTOR.extensions_by_name['default_fixed32_extension'] = default_fixed32_extension
-DESCRIPTOR.extensions_by_name['default_fixed64_extension'] = default_fixed64_extension
-DESCRIPTOR.extensions_by_name['default_sfixed32_extension'] = default_sfixed32_extension
-DESCRIPTOR.extensions_by_name['default_sfixed64_extension'] = default_sfixed64_extension
-DESCRIPTOR.extensions_by_name['default_float_extension'] = default_float_extension
-DESCRIPTOR.extensions_by_name['default_double_extension'] = default_double_extension
-DESCRIPTOR.extensions_by_name['default_bool_extension'] = default_bool_extension
-DESCRIPTOR.extensions_by_name['default_string_extension'] = default_string_extension
-DESCRIPTOR.extensions_by_name['default_bytes_extension'] = default_bytes_extension
-DESCRIPTOR.extensions_by_name['default_nested_enum_extension'] = default_nested_enum_extension
-DESCRIPTOR.extensions_by_name['default_foreign_enum_extension'] = default_foreign_enum_extension
-DESCRIPTOR.extensions_by_name['default_import_enum_extension'] = default_import_enum_extension
-DESCRIPTOR.extensions_by_name['default_string_piece_extension'] = default_string_piece_extension
-DESCRIPTOR.extensions_by_name['default_cord_extension'] = default_cord_extension
-DESCRIPTOR.extensions_by_name['oneof_uint32_extension'] = oneof_uint32_extension
-DESCRIPTOR.extensions_by_name['oneof_nested_message_extension'] = oneof_nested_message_extension
-DESCRIPTOR.extensions_by_name['oneof_string_extension'] = oneof_string_extension
-DESCRIPTOR.extensions_by_name['oneof_bytes_extension'] = oneof_bytes_extension
-DESCRIPTOR.extensions_by_name['my_extension_string'] = my_extension_string
-DESCRIPTOR.extensions_by_name['my_extension_int'] = my_extension_int
-DESCRIPTOR.extensions_by_name['packed_int32_extension'] = packed_int32_extension
-DESCRIPTOR.extensions_by_name['packed_int64_extension'] = packed_int64_extension
-DESCRIPTOR.extensions_by_name['packed_uint32_extension'] = packed_uint32_extension
-DESCRIPTOR.extensions_by_name['packed_uint64_extension'] = packed_uint64_extension
-DESCRIPTOR.extensions_by_name['packed_sint32_extension'] = packed_sint32_extension
-DESCRIPTOR.extensions_by_name['packed_sint64_extension'] = packed_sint64_extension
-DESCRIPTOR.extensions_by_name['packed_fixed32_extension'] = packed_fixed32_extension
-DESCRIPTOR.extensions_by_name['packed_fixed64_extension'] = packed_fixed64_extension
-DESCRIPTOR.extensions_by_name['packed_sfixed32_extension'] = packed_sfixed32_extension
-DESCRIPTOR.extensions_by_name['packed_sfixed64_extension'] = packed_sfixed64_extension
-DESCRIPTOR.extensions_by_name['packed_float_extension'] = packed_float_extension
-DESCRIPTOR.extensions_by_name['packed_double_extension'] = packed_double_extension
-DESCRIPTOR.extensions_by_name['packed_bool_extension'] = packed_bool_extension
-DESCRIPTOR.extensions_by_name['packed_enum_extension'] = packed_enum_extension
-DESCRIPTOR.extensions_by_name['unpacked_int32_extension'] = unpacked_int32_extension
-DESCRIPTOR.extensions_by_name['unpacked_int64_extension'] = unpacked_int64_extension
-DESCRIPTOR.extensions_by_name['unpacked_uint32_extension'] = unpacked_uint32_extension
-DESCRIPTOR.extensions_by_name['unpacked_uint64_extension'] = unpacked_uint64_extension
-DESCRIPTOR.extensions_by_name['unpacked_sint32_extension'] = unpacked_sint32_extension
-DESCRIPTOR.extensions_by_name['unpacked_sint64_extension'] = unpacked_sint64_extension
-DESCRIPTOR.extensions_by_name['unpacked_fixed32_extension'] = unpacked_fixed32_extension
-DESCRIPTOR.extensions_by_name['unpacked_fixed64_extension'] = unpacked_fixed64_extension
-DESCRIPTOR.extensions_by_name['unpacked_sfixed32_extension'] = unpacked_sfixed32_extension
-DESCRIPTOR.extensions_by_name['unpacked_sfixed64_extension'] = unpacked_sfixed64_extension
-DESCRIPTOR.extensions_by_name['unpacked_float_extension'] = unpacked_float_extension
-DESCRIPTOR.extensions_by_name['unpacked_double_extension'] = unpacked_double_extension
-DESCRIPTOR.extensions_by_name['unpacked_bool_extension'] = unpacked_bool_extension
-DESCRIPTOR.extensions_by_name['unpacked_enum_extension'] = unpacked_enum_extension
-
-TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.NestedMessage)
-    ))
-  ,
-
-  OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_OPTIONALGROUP,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.OptionalGroup)
-    ))
-  ,
-
-  RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_REPEATEDGROUP,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.RepeatedGroup)
-    ))
-  ,
-  DESCRIPTOR = _TESTALLTYPES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes)
-  ))
-_sym_db.RegisterMessage(TestAllTypes)
-_sym_db.RegisterMessage(TestAllTypes.NestedMessage)
-_sym_db.RegisterMessage(TestAllTypes.OptionalGroup)
-_sym_db.RegisterMessage(TestAllTypes.RepeatedGroup)
-
-NestedTestAllTypes = _reflection.GeneratedProtocolMessageType('NestedTestAllTypes', (_message.Message,), dict(
-  DESCRIPTOR = _NESTEDTESTALLTYPES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.NestedTestAllTypes)
-  ))
-_sym_db.RegisterMessage(NestedTestAllTypes)
-
-TestDeprecatedFields = _reflection.GeneratedProtocolMessageType('TestDeprecatedFields', (_message.Message,), dict(
-  DESCRIPTOR = _TESTDEPRECATEDFIELDS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDeprecatedFields)
-  ))
-_sym_db.RegisterMessage(TestDeprecatedFields)
-
-ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict(
-  DESCRIPTOR = _FOREIGNMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.ForeignMessage)
-  ))
-_sym_db.RegisterMessage(ForeignMessage)
-
-TestReservedFields = _reflection.GeneratedProtocolMessageType('TestReservedFields', (_message.Message,), dict(
-  DESCRIPTOR = _TESTRESERVEDFIELDS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestReservedFields)
-  ))
-_sym_db.RegisterMessage(TestReservedFields)
-
-TestAllExtensions = _reflection.GeneratedProtocolMessageType('TestAllExtensions', (_message.Message,), dict(
-  DESCRIPTOR = _TESTALLEXTENSIONS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllExtensions)
-  ))
-_sym_db.RegisterMessage(TestAllExtensions)
-
-OptionalGroup_extension = _reflection.GeneratedProtocolMessageType('OptionalGroup_extension', (_message.Message,), dict(
-  DESCRIPTOR = _OPTIONALGROUP_EXTENSION,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.OptionalGroup_extension)
-  ))
-_sym_db.RegisterMessage(OptionalGroup_extension)
-
-RepeatedGroup_extension = _reflection.GeneratedProtocolMessageType('RepeatedGroup_extension', (_message.Message,), dict(
-  DESCRIPTOR = _REPEATEDGROUP_EXTENSION,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.RepeatedGroup_extension)
-  ))
-_sym_db.RegisterMessage(RepeatedGroup_extension)
-
-TestNestedExtension = _reflection.GeneratedProtocolMessageType('TestNestedExtension', (_message.Message,), dict(
-  DESCRIPTOR = _TESTNESTEDEXTENSION,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedExtension)
-  ))
-_sym_db.RegisterMessage(TestNestedExtension)
-
-TestRequired = _reflection.GeneratedProtocolMessageType('TestRequired', (_message.Message,), dict(
-  DESCRIPTOR = _TESTREQUIRED,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequired)
-  ))
-_sym_db.RegisterMessage(TestRequired)
-
-TestRequiredForeign = _reflection.GeneratedProtocolMessageType('TestRequiredForeign', (_message.Message,), dict(
-  DESCRIPTOR = _TESTREQUIREDFOREIGN,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredForeign)
-  ))
-_sym_db.RegisterMessage(TestRequiredForeign)
-
-TestForeignNested = _reflection.GeneratedProtocolMessageType('TestForeignNested', (_message.Message,), dict(
-  DESCRIPTOR = _TESTFOREIGNNESTED,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestForeignNested)
-  ))
-_sym_db.RegisterMessage(TestForeignNested)
-
-TestEmptyMessage = _reflection.GeneratedProtocolMessageType('TestEmptyMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTEMPTYMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEmptyMessage)
-  ))
-_sym_db.RegisterMessage(TestEmptyMessage)
-
-TestEmptyMessageWithExtensions = _reflection.GeneratedProtocolMessageType('TestEmptyMessageWithExtensions', (_message.Message,), dict(
-  DESCRIPTOR = _TESTEMPTYMESSAGEWITHEXTENSIONS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEmptyMessageWithExtensions)
-  ))
-_sym_db.RegisterMessage(TestEmptyMessageWithExtensions)
-
-TestMultipleExtensionRanges = _reflection.GeneratedProtocolMessageType('TestMultipleExtensionRanges', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMULTIPLEEXTENSIONRANGES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMultipleExtensionRanges)
-  ))
-_sym_db.RegisterMessage(TestMultipleExtensionRanges)
-
-TestReallyLargeTagNumber = _reflection.GeneratedProtocolMessageType('TestReallyLargeTagNumber', (_message.Message,), dict(
-  DESCRIPTOR = _TESTREALLYLARGETAGNUMBER,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestReallyLargeTagNumber)
-  ))
-_sym_db.RegisterMessage(TestReallyLargeTagNumber)
-
-TestRecursiveMessage = _reflection.GeneratedProtocolMessageType('TestRecursiveMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTRECURSIVEMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMessage)
-  ))
-_sym_db.RegisterMessage(TestRecursiveMessage)
-
-TestMutualRecursionA = _reflection.GeneratedProtocolMessageType('TestMutualRecursionA', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMUTUALRECURSIONA,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionA)
-  ))
-_sym_db.RegisterMessage(TestMutualRecursionA)
-
-TestMutualRecursionB = _reflection.GeneratedProtocolMessageType('TestMutualRecursionB', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMUTUALRECURSIONB,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionB)
-  ))
-_sym_db.RegisterMessage(TestMutualRecursionB)
-
-TestDupFieldNumber = _reflection.GeneratedProtocolMessageType('TestDupFieldNumber', (_message.Message,), dict(
-
-  Foo = _reflection.GeneratedProtocolMessageType('Foo', (_message.Message,), dict(
-    DESCRIPTOR = _TESTDUPFIELDNUMBER_FOO,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber.Foo)
-    ))
-  ,
-
-  Bar = _reflection.GeneratedProtocolMessageType('Bar', (_message.Message,), dict(
-    DESCRIPTOR = _TESTDUPFIELDNUMBER_BAR,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber.Bar)
-    ))
-  ,
-  DESCRIPTOR = _TESTDUPFIELDNUMBER,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber)
-  ))
-_sym_db.RegisterMessage(TestDupFieldNumber)
-_sym_db.RegisterMessage(TestDupFieldNumber.Foo)
-_sym_db.RegisterMessage(TestDupFieldNumber.Bar)
-
-TestEagerMessage = _reflection.GeneratedProtocolMessageType('TestEagerMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTEAGERMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEagerMessage)
-  ))
-_sym_db.RegisterMessage(TestEagerMessage)
-
-TestLazyMessage = _reflection.GeneratedProtocolMessageType('TestLazyMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTLAZYMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestLazyMessage)
-  ))
-_sym_db.RegisterMessage(TestLazyMessage)
-
-TestNestedMessageHasBits = _reflection.GeneratedProtocolMessageType('TestNestedMessageHasBits', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedMessageHasBits.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _TESTNESTEDMESSAGEHASBITS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedMessageHasBits)
-  ))
-_sym_db.RegisterMessage(TestNestedMessageHasBits)
-_sym_db.RegisterMessage(TestNestedMessageHasBits.NestedMessage)
-
-TestCamelCaseFieldNames = _reflection.GeneratedProtocolMessageType('TestCamelCaseFieldNames', (_message.Message,), dict(
-  DESCRIPTOR = _TESTCAMELCASEFIELDNAMES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestCamelCaseFieldNames)
-  ))
-_sym_db.RegisterMessage(TestCamelCaseFieldNames)
-
-TestFieldOrderings = _reflection.GeneratedProtocolMessageType('TestFieldOrderings', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTFIELDORDERINGS_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFieldOrderings.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _TESTFIELDORDERINGS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFieldOrderings)
-  ))
-_sym_db.RegisterMessage(TestFieldOrderings)
-_sym_db.RegisterMessage(TestFieldOrderings.NestedMessage)
-
-TestExtremeDefaultValues = _reflection.GeneratedProtocolMessageType('TestExtremeDefaultValues', (_message.Message,), dict(
-  DESCRIPTOR = _TESTEXTREMEDEFAULTVALUES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtremeDefaultValues)
-  ))
-_sym_db.RegisterMessage(TestExtremeDefaultValues)
-
-SparseEnumMessage = _reflection.GeneratedProtocolMessageType('SparseEnumMessage', (_message.Message,), dict(
-  DESCRIPTOR = _SPARSEENUMMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.SparseEnumMessage)
-  ))
-_sym_db.RegisterMessage(SparseEnumMessage)
-
-OneString = _reflection.GeneratedProtocolMessageType('OneString', (_message.Message,), dict(
-  DESCRIPTOR = _ONESTRING,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.OneString)
-  ))
-_sym_db.RegisterMessage(OneString)
-
-MoreString = _reflection.GeneratedProtocolMessageType('MoreString', (_message.Message,), dict(
-  DESCRIPTOR = _MORESTRING,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.MoreString)
-  ))
-_sym_db.RegisterMessage(MoreString)
-
-OneBytes = _reflection.GeneratedProtocolMessageType('OneBytes', (_message.Message,), dict(
-  DESCRIPTOR = _ONEBYTES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.OneBytes)
-  ))
-_sym_db.RegisterMessage(OneBytes)
-
-MoreBytes = _reflection.GeneratedProtocolMessageType('MoreBytes', (_message.Message,), dict(
-  DESCRIPTOR = _MOREBYTES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.MoreBytes)
-  ))
-_sym_db.RegisterMessage(MoreBytes)
-
-Int32Message = _reflection.GeneratedProtocolMessageType('Int32Message', (_message.Message,), dict(
-  DESCRIPTOR = _INT32MESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.Int32Message)
-  ))
-_sym_db.RegisterMessage(Int32Message)
-
-Uint32Message = _reflection.GeneratedProtocolMessageType('Uint32Message', (_message.Message,), dict(
-  DESCRIPTOR = _UINT32MESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.Uint32Message)
-  ))
-_sym_db.RegisterMessage(Uint32Message)
-
-Int64Message = _reflection.GeneratedProtocolMessageType('Int64Message', (_message.Message,), dict(
-  DESCRIPTOR = _INT64MESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.Int64Message)
-  ))
-_sym_db.RegisterMessage(Int64Message)
-
-Uint64Message = _reflection.GeneratedProtocolMessageType('Uint64Message', (_message.Message,), dict(
-  DESCRIPTOR = _UINT64MESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.Uint64Message)
-  ))
-_sym_db.RegisterMessage(Uint64Message)
-
-BoolMessage = _reflection.GeneratedProtocolMessageType('BoolMessage', (_message.Message,), dict(
-  DESCRIPTOR = _BOOLMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.BoolMessage)
-  ))
-_sym_db.RegisterMessage(BoolMessage)
-
-TestOneof = _reflection.GeneratedProtocolMessageType('TestOneof', (_message.Message,), dict(
-
-  FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTONEOF_FOOGROUP,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof.FooGroup)
-    ))
-  ,
-  DESCRIPTOR = _TESTONEOF,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof)
-  ))
-_sym_db.RegisterMessage(TestOneof)
-_sym_db.RegisterMessage(TestOneof.FooGroup)
-
-TestOneofBackwardsCompatible = _reflection.GeneratedProtocolMessageType('TestOneofBackwardsCompatible', (_message.Message,), dict(
-
-  FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneofBackwardsCompatible.FooGroup)
-    ))
-  ,
-  DESCRIPTOR = _TESTONEOFBACKWARDSCOMPATIBLE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneofBackwardsCompatible)
-  ))
-_sym_db.RegisterMessage(TestOneofBackwardsCompatible)
-_sym_db.RegisterMessage(TestOneofBackwardsCompatible.FooGroup)
-
-TestOneof2 = _reflection.GeneratedProtocolMessageType('TestOneof2', (_message.Message,), dict(
-
-  FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTONEOF2_FOOGROUP,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2.FooGroup)
-    ))
-  ,
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTONEOF2_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _TESTONEOF2,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2)
-  ))
-_sym_db.RegisterMessage(TestOneof2)
-_sym_db.RegisterMessage(TestOneof2.FooGroup)
-_sym_db.RegisterMessage(TestOneof2.NestedMessage)
-
-TestRequiredOneof = _reflection.GeneratedProtocolMessageType('TestRequiredOneof', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTREQUIREDONEOF_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredOneof.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _TESTREQUIREDONEOF,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredOneof)
-  ))
-_sym_db.RegisterMessage(TestRequiredOneof)
-_sym_db.RegisterMessage(TestRequiredOneof.NestedMessage)
-
-TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), dict(
-  DESCRIPTOR = _TESTPACKEDTYPES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestPackedTypes)
-  ))
-_sym_db.RegisterMessage(TestPackedTypes)
-
-TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), dict(
-  DESCRIPTOR = _TESTUNPACKEDTYPES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestUnpackedTypes)
-  ))
-_sym_db.RegisterMessage(TestUnpackedTypes)
-
-TestPackedExtensions = _reflection.GeneratedProtocolMessageType('TestPackedExtensions', (_message.Message,), dict(
-  DESCRIPTOR = _TESTPACKEDEXTENSIONS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestPackedExtensions)
-  ))
-_sym_db.RegisterMessage(TestPackedExtensions)
-
-TestUnpackedExtensions = _reflection.GeneratedProtocolMessageType('TestUnpackedExtensions', (_message.Message,), dict(
-  DESCRIPTOR = _TESTUNPACKEDEXTENSIONS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestUnpackedExtensions)
-  ))
-_sym_db.RegisterMessage(TestUnpackedExtensions)
-
-TestDynamicExtensions = _reflection.GeneratedProtocolMessageType('TestDynamicExtensions', (_message.Message,), dict(
-
-  DynamicMessageType = _reflection.GeneratedProtocolMessageType('DynamicMessageType', (_message.Message,), dict(
-    DESCRIPTOR = _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDynamicExtensions.DynamicMessageType)
-    ))
-  ,
-  DESCRIPTOR = _TESTDYNAMICEXTENSIONS,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDynamicExtensions)
-  ))
-_sym_db.RegisterMessage(TestDynamicExtensions)
-_sym_db.RegisterMessage(TestDynamicExtensions.DynamicMessageType)
-
-TestRepeatedScalarDifferentTagSizes = _reflection.GeneratedProtocolMessageType('TestRepeatedScalarDifferentTagSizes', (_message.Message,), dict(
-  DESCRIPTOR = _TESTREPEATEDSCALARDIFFERENTTAGSIZES,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRepeatedScalarDifferentTagSizes)
-  ))
-_sym_db.RegisterMessage(TestRepeatedScalarDifferentTagSizes)
-
-TestParsingMerge = _reflection.GeneratedProtocolMessageType('TestParsingMerge', (_message.Message,), dict(
-
-  RepeatedFieldsGenerator = _reflection.GeneratedProtocolMessageType('RepeatedFieldsGenerator', (_message.Message,), dict(
-
-    Group1 = _reflection.GeneratedProtocolMessageType('Group1', (_message.Message,), dict(
-      DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1,
-      __module__ = 'google.protobuf.unittest_pb2'
-      # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1)
-      ))
-    ,
-
-    Group2 = _reflection.GeneratedProtocolMessageType('Group2', (_message.Message,), dict(
-      DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2,
-      __module__ = 'google.protobuf.unittest_pb2'
-      # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2)
-      ))
-    ,
-    DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator)
-    ))
-  ,
-
-  OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTPARSINGMERGE_OPTIONALGROUP,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.OptionalGroup)
-    ))
-  ,
-
-  RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict(
-    DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDGROUP,
-    __module__ = 'google.protobuf.unittest_pb2'
-    # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedGroup)
-    ))
-  ,
-  DESCRIPTOR = _TESTPARSINGMERGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge)
-  ))
-_sym_db.RegisterMessage(TestParsingMerge)
-_sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator)
-_sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator.Group1)
-_sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator.Group2)
-_sym_db.RegisterMessage(TestParsingMerge.OptionalGroup)
-_sym_db.RegisterMessage(TestParsingMerge.RepeatedGroup)
-
-TestCommentInjectionMessage = _reflection.GeneratedProtocolMessageType('TestCommentInjectionMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTCOMMENTINJECTIONMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.TestCommentInjectionMessage)
-  ))
-_sym_db.RegisterMessage(TestCommentInjectionMessage)
-
-FooRequest = _reflection.GeneratedProtocolMessageType('FooRequest', (_message.Message,), dict(
-  DESCRIPTOR = _FOOREQUEST,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.FooRequest)
-  ))
-_sym_db.RegisterMessage(FooRequest)
-
-FooResponse = _reflection.GeneratedProtocolMessageType('FooResponse', (_message.Message,), dict(
-  DESCRIPTOR = _FOORESPONSE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.FooResponse)
-  ))
-_sym_db.RegisterMessage(FooResponse)
-
-FooClientMessage = _reflection.GeneratedProtocolMessageType('FooClientMessage', (_message.Message,), dict(
-  DESCRIPTOR = _FOOCLIENTMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.FooClientMessage)
-  ))
-_sym_db.RegisterMessage(FooClientMessage)
-
-FooServerMessage = _reflection.GeneratedProtocolMessageType('FooServerMessage', (_message.Message,), dict(
-  DESCRIPTOR = _FOOSERVERMESSAGE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.FooServerMessage)
-  ))
-_sym_db.RegisterMessage(FooServerMessage)
-
-BarRequest = _reflection.GeneratedProtocolMessageType('BarRequest', (_message.Message,), dict(
-  DESCRIPTOR = _BARREQUEST,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.BarRequest)
-  ))
-_sym_db.RegisterMessage(BarRequest)
-
-BarResponse = _reflection.GeneratedProtocolMessageType('BarResponse', (_message.Message,), dict(
-  DESCRIPTOR = _BARRESPONSE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  # @@protoc_insertion_point(class_scope:protobuf_unittest.BarResponse)
-  ))
-_sym_db.RegisterMessage(BarResponse)
-
-TestAllExtensions.RegisterExtension(optional_int32_extension)
-TestAllExtensions.RegisterExtension(optional_int64_extension)
-TestAllExtensions.RegisterExtension(optional_uint32_extension)
-TestAllExtensions.RegisterExtension(optional_uint64_extension)
-TestAllExtensions.RegisterExtension(optional_sint32_extension)
-TestAllExtensions.RegisterExtension(optional_sint64_extension)
-TestAllExtensions.RegisterExtension(optional_fixed32_extension)
-TestAllExtensions.RegisterExtension(optional_fixed64_extension)
-TestAllExtensions.RegisterExtension(optional_sfixed32_extension)
-TestAllExtensions.RegisterExtension(optional_sfixed64_extension)
-TestAllExtensions.RegisterExtension(optional_float_extension)
-TestAllExtensions.RegisterExtension(optional_double_extension)
-TestAllExtensions.RegisterExtension(optional_bool_extension)
-TestAllExtensions.RegisterExtension(optional_string_extension)
-TestAllExtensions.RegisterExtension(optional_bytes_extension)
-optionalgroup_extension.message_type = _OPTIONALGROUP_EXTENSION
-TestAllExtensions.RegisterExtension(optionalgroup_extension)
-optional_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
-TestAllExtensions.RegisterExtension(optional_nested_message_extension)
-optional_foreign_message_extension.message_type = _FOREIGNMESSAGE
-TestAllExtensions.RegisterExtension(optional_foreign_message_extension)
-optional_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-TestAllExtensions.RegisterExtension(optional_import_message_extension)
-optional_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM
-TestAllExtensions.RegisterExtension(optional_nested_enum_extension)
-optional_foreign_enum_extension.enum_type = _FOREIGNENUM
-TestAllExtensions.RegisterExtension(optional_foreign_enum_extension)
-optional_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-TestAllExtensions.RegisterExtension(optional_import_enum_extension)
-TestAllExtensions.RegisterExtension(optional_string_piece_extension)
-TestAllExtensions.RegisterExtension(optional_cord_extension)
-optional_public_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE
-TestAllExtensions.RegisterExtension(optional_public_import_message_extension)
-optional_lazy_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
-TestAllExtensions.RegisterExtension(optional_lazy_message_extension)
-TestAllExtensions.RegisterExtension(repeated_int32_extension)
-TestAllExtensions.RegisterExtension(repeated_int64_extension)
-TestAllExtensions.RegisterExtension(repeated_uint32_extension)
-TestAllExtensions.RegisterExtension(repeated_uint64_extension)
-TestAllExtensions.RegisterExtension(repeated_sint32_extension)
-TestAllExtensions.RegisterExtension(repeated_sint64_extension)
-TestAllExtensions.RegisterExtension(repeated_fixed32_extension)
-TestAllExtensions.RegisterExtension(repeated_fixed64_extension)
-TestAllExtensions.RegisterExtension(repeated_sfixed32_extension)
-TestAllExtensions.RegisterExtension(repeated_sfixed64_extension)
-TestAllExtensions.RegisterExtension(repeated_float_extension)
-TestAllExtensions.RegisterExtension(repeated_double_extension)
-TestAllExtensions.RegisterExtension(repeated_bool_extension)
-TestAllExtensions.RegisterExtension(repeated_string_extension)
-TestAllExtensions.RegisterExtension(repeated_bytes_extension)
-repeatedgroup_extension.message_type = _REPEATEDGROUP_EXTENSION
-TestAllExtensions.RegisterExtension(repeatedgroup_extension)
-repeated_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
-TestAllExtensions.RegisterExtension(repeated_nested_message_extension)
-repeated_foreign_message_extension.message_type = _FOREIGNMESSAGE
-TestAllExtensions.RegisterExtension(repeated_foreign_message_extension)
-repeated_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-TestAllExtensions.RegisterExtension(repeated_import_message_extension)
-repeated_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM
-TestAllExtensions.RegisterExtension(repeated_nested_enum_extension)
-repeated_foreign_enum_extension.enum_type = _FOREIGNENUM
-TestAllExtensions.RegisterExtension(repeated_foreign_enum_extension)
-repeated_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-TestAllExtensions.RegisterExtension(repeated_import_enum_extension)
-TestAllExtensions.RegisterExtension(repeated_string_piece_extension)
-TestAllExtensions.RegisterExtension(repeated_cord_extension)
-repeated_lazy_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
-TestAllExtensions.RegisterExtension(repeated_lazy_message_extension)
-TestAllExtensions.RegisterExtension(default_int32_extension)
-TestAllExtensions.RegisterExtension(default_int64_extension)
-TestAllExtensions.RegisterExtension(default_uint32_extension)
-TestAllExtensions.RegisterExtension(default_uint64_extension)
-TestAllExtensions.RegisterExtension(default_sint32_extension)
-TestAllExtensions.RegisterExtension(default_sint64_extension)
-TestAllExtensions.RegisterExtension(default_fixed32_extension)
-TestAllExtensions.RegisterExtension(default_fixed64_extension)
-TestAllExtensions.RegisterExtension(default_sfixed32_extension)
-TestAllExtensions.RegisterExtension(default_sfixed64_extension)
-TestAllExtensions.RegisterExtension(default_float_extension)
-TestAllExtensions.RegisterExtension(default_double_extension)
-TestAllExtensions.RegisterExtension(default_bool_extension)
-TestAllExtensions.RegisterExtension(default_string_extension)
-TestAllExtensions.RegisterExtension(default_bytes_extension)
-default_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM
-TestAllExtensions.RegisterExtension(default_nested_enum_extension)
-default_foreign_enum_extension.enum_type = _FOREIGNENUM
-TestAllExtensions.RegisterExtension(default_foreign_enum_extension)
-default_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM
-TestAllExtensions.RegisterExtension(default_import_enum_extension)
-TestAllExtensions.RegisterExtension(default_string_piece_extension)
-TestAllExtensions.RegisterExtension(default_cord_extension)
-TestAllExtensions.RegisterExtension(oneof_uint32_extension)
-oneof_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
-TestAllExtensions.RegisterExtension(oneof_nested_message_extension)
-TestAllExtensions.RegisterExtension(oneof_string_extension)
-TestAllExtensions.RegisterExtension(oneof_bytes_extension)
-TestFieldOrderings.RegisterExtension(my_extension_string)
-TestFieldOrderings.RegisterExtension(my_extension_int)
-TestPackedExtensions.RegisterExtension(packed_int32_extension)
-TestPackedExtensions.RegisterExtension(packed_int64_extension)
-TestPackedExtensions.RegisterExtension(packed_uint32_extension)
-TestPackedExtensions.RegisterExtension(packed_uint64_extension)
-TestPackedExtensions.RegisterExtension(packed_sint32_extension)
-TestPackedExtensions.RegisterExtension(packed_sint64_extension)
-TestPackedExtensions.RegisterExtension(packed_fixed32_extension)
-TestPackedExtensions.RegisterExtension(packed_fixed64_extension)
-TestPackedExtensions.RegisterExtension(packed_sfixed32_extension)
-TestPackedExtensions.RegisterExtension(packed_sfixed64_extension)
-TestPackedExtensions.RegisterExtension(packed_float_extension)
-TestPackedExtensions.RegisterExtension(packed_double_extension)
-TestPackedExtensions.RegisterExtension(packed_bool_extension)
-packed_enum_extension.enum_type = _FOREIGNENUM
-TestPackedExtensions.RegisterExtension(packed_enum_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_int32_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_int64_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_uint32_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_uint64_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_sint32_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_sint64_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_fixed32_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_fixed64_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_sfixed32_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_sfixed64_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_float_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_double_extension)
-TestUnpackedExtensions.RegisterExtension(unpacked_bool_extension)
-unpacked_enum_extension.enum_type = _FOREIGNENUM
-TestUnpackedExtensions.RegisterExtension(unpacked_enum_extension)
-TestAllExtensions.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['test'])
-TestAllExtensions.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['nested_string_extension'])
-_TESTREQUIRED.extensions_by_name['single'].message_type = _TESTREQUIRED
-TestAllExtensions.RegisterExtension(_TESTREQUIRED.extensions_by_name['single'])
-_TESTREQUIRED.extensions_by_name['multi'].message_type = _TESTREQUIRED
-TestAllExtensions.RegisterExtension(_TESTREQUIRED.extensions_by_name['multi'])
-_TESTPARSINGMERGE.extensions_by_name['optional_ext'].message_type = _TESTALLTYPES
-TestParsingMerge.RegisterExtension(_TESTPARSINGMERGE.extensions_by_name['optional_ext'])
-_TESTPARSINGMERGE.extensions_by_name['repeated_ext'].message_type = _TESTALLTYPES
-TestParsingMerge.RegisterExtension(_TESTPARSINGMERGE.extensions_by_name['repeated_ext'])
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('B\rUnittestProtoH\001\200\001\001\210\001\001\220\001\001\370\001\001'))
-_TESTENUMWITHDUPVALUE.has_options = True
-_TESTENUMWITHDUPVALUE._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\020\001'))
-optional_string_piece_extension.has_options = True
-optional_string_piece_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-optional_cord_extension.has_options = True
-optional_cord_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-optional_lazy_message_extension.has_options = True
-optional_lazy_message_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-repeated_string_piece_extension.has_options = True
-repeated_string_piece_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-repeated_cord_extension.has_options = True
-repeated_cord_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-repeated_lazy_message_extension.has_options = True
-repeated_lazy_message_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-default_string_piece_extension.has_options = True
-default_string_piece_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-default_cord_extension.has_options = True
-default_cord_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-packed_int32_extension.has_options = True
-packed_int32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_int64_extension.has_options = True
-packed_int64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_uint32_extension.has_options = True
-packed_uint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_uint64_extension.has_options = True
-packed_uint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_sint32_extension.has_options = True
-packed_sint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_sint64_extension.has_options = True
-packed_sint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_fixed32_extension.has_options = True
-packed_fixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_fixed64_extension.has_options = True
-packed_fixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_sfixed32_extension.has_options = True
-packed_sfixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_sfixed64_extension.has_options = True
-packed_sfixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_float_extension.has_options = True
-packed_float_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_double_extension.has_options = True
-packed_double_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_bool_extension.has_options = True
-packed_bool_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-packed_enum_extension.has_options = True
-packed_enum_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-unpacked_int32_extension.has_options = True
-unpacked_int32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_int64_extension.has_options = True
-unpacked_int64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_uint32_extension.has_options = True
-unpacked_uint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_uint64_extension.has_options = True
-unpacked_uint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_sint32_extension.has_options = True
-unpacked_sint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_sint64_extension.has_options = True
-unpacked_sint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_fixed32_extension.has_options = True
-unpacked_fixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_fixed64_extension.has_options = True
-unpacked_fixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_sfixed32_extension.has_options = True
-unpacked_sfixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_sfixed64_extension.has_options = True
-unpacked_sfixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_float_extension.has_options = True
-unpacked_float_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_double_extension.has_options = True
-unpacked_double_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_bool_extension.has_options = True
-unpacked_bool_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-unpacked_enum_extension.has_options = True
-unpacked_enum_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTALLTYPES.fields_by_name['optional_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['optional_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTALLTYPES.fields_by_name['optional_lazy_message'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTALLTYPES.fields_by_name['repeated_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['repeated_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTALLTYPES.fields_by_name['repeated_lazy_message'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTALLTYPES.fields_by_name['default_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['default_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['default_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['default_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32'].has_options = True
-_TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))
-_TESTEAGERMESSAGE.fields_by_name['sub_message'].has_options = True
-_TESTEAGERMESSAGE.fields_by_name['sub_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\000'))
-_TESTLAZYMESSAGE.fields_by_name['sub_message'].has_options = True
-_TESTLAZYMESSAGE.fields_by_name['sub_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTCAMELCASEFIELDNAMES.fields_by_name['StringPieceField'].has_options = True
-_TESTCAMELCASEFIELDNAMES.fields_by_name['StringPieceField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTCAMELCASEFIELDNAMES.fields_by_name['CordField'].has_options = True
-_TESTCAMELCASEFIELDNAMES.fields_by_name['CordField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedStringPieceField'].has_options = True
-_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedStringPieceField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedCordField'].has_options = True
-_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedCordField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTEXTREMEDEFAULTVALUES.fields_by_name['string_piece_with_zero'].has_options = True
-_TESTEXTREMEDEFAULTVALUES.fields_by_name['string_piece_with_zero']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTEXTREMEDEFAULTVALUES.fields_by_name['cord_with_zero'].has_options = True
-_TESTEXTREMEDEFAULTVALUES.fields_by_name['cord_with_zero']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTONEOF2.fields_by_name['foo_cord'].has_options = True
-_TESTONEOF2.fields_by_name['foo_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTONEOF2.fields_by_name['foo_string_piece'].has_options = True
-_TESTONEOF2.fields_by_name['foo_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTONEOF2.fields_by_name['foo_lazy_message'].has_options = True
-_TESTONEOF2.fields_by_name['foo_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTONEOF2.fields_by_name['bar_cord'].has_options = True
-_TESTONEOF2.fields_by_name['bar_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTONEOF2.fields_by_name['bar_string_piece'].has_options = True
-_TESTONEOF2.fields_by_name['bar_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTPACKEDTYPES.fields_by_name['packed_int32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_int64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_uint32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_uint64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sint32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sint64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_fixed32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_fixed64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_float'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_double'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_bool'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_enum'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_int32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_int64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_float'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_double'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_bool'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_enum'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['unpacked_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTDYNAMICEXTENSIONS.fields_by_name['packed_extension'].has_options = True
-_TESTDYNAMICEXTENSIONS.fields_by_name['packed_extension']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-
-_TESTSERVICE = _descriptor.ServiceDescriptor(
-  name='TestService',
-  full_name='protobuf_unittest.TestService',
-  file=DESCRIPTOR,
-  index=0,
-  options=None,
-  serialized_start=12729,
-  serialized_end=12882,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='Foo',
-    full_name='protobuf_unittest.TestService.Foo',
-    index=0,
-    containing_service=None,
-    input_type=_FOOREQUEST,
-    output_type=_FOORESPONSE,
-    options=None,
-  ),
-  _descriptor.MethodDescriptor(
-    name='Bar',
-    full_name='protobuf_unittest.TestService.Bar',
-    index=1,
-    containing_service=None,
-    input_type=_BARREQUEST,
-    output_type=_BARRESPONSE,
-    options=None,
-  ),
-])
-
-TestService = service_reflection.GeneratedServiceType('TestService', (_service.Service,), dict(
-  DESCRIPTOR = _TESTSERVICE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  ))
-
-TestService_Stub = service_reflection.GeneratedServiceStubType('TestService_Stub', (TestService,), dict(
-  DESCRIPTOR = _TESTSERVICE,
-  __module__ = 'google.protobuf.unittest_pb2'
-  ))
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/unittest_proto3_arena_pb2.py b/tools/swarming_client/third_party/google/protobuf/unittest_proto3_arena_pb2.py
deleted file mode 100644
index ba2c829..0000000
--- a/tools/swarming_client/third_party/google/protobuf/unittest_proto3_arena_pb2.py
+++ /dev/null
@@ -1,1014 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/unittest_proto3_arena.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import unittest_import_pb2 as google_dot_protobuf_dot_unittest__import__pb2
-google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google_dot_protobuf_dot_unittest__import__public__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/unittest_proto3_arena.proto',
-  package='proto3_arena_unittest',
-  syntax='proto3',
-  serialized_pb=_b('\n+google/protobuf/unittest_proto3_arena.proto\x12\x15proto3_arena_unittest\x1a%google/protobuf/unittest_import.proto\"\xf6\x10\n\x0cTestAllTypes\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12R\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessage\x12G\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32%.proto3_arena_unittest.ForeignMessage\x12H\n\x17optional_import_message\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12L\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32..proto3_arena_unittest.TestAllTypes.NestedEnum\x12\x41\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\".proto3_arena_unittest.ForeignEnum\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12U\n\x1eoptional_public_import_message\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage\x12T\n\x15optional_lazy_message\x18\x1b \x01(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18  \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12R\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessage\x12G\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32%.proto3_arena_unittest.ForeignMessage\x12H\n\x17repeated_import_message\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12L\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32..proto3_arena_unittest.TestAllTypes.NestedEnum\x12\x41\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\".proto3_arena_unittest.ForeignEnum\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12T\n\x15repeated_lazy_message\x18\x39 \x03(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12Q\n\x14oneof_nested_message\x18p \x01(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x1a\x1b\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\"C\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\r\n\x0boneof_field\"\xae\x03\n\x0fTestPackedTypes\x12\x18\n\x0cpacked_int32\x18Z \x03(\x05\x42\x02\x10\x01\x12\x18\n\x0cpacked_int64\x18[ \x03(\x03\x42\x02\x10\x01\x12\x19\n\rpacked_uint32\x18\\ \x03(\rB\x02\x10\x01\x12\x19\n\rpacked_uint64\x18] \x03(\x04\x42\x02\x10\x01\x12\x19\n\rpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x01\x12\x19\n\rpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed32\x18` \x03(\x07\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x01\x12\x18\n\x0cpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x01\x12\x19\n\rpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x01\x12;\n\x0bpacked_enum\x18g \x03(\x0e\x32\".proto3_arena_unittest.ForeignEnumB\x02\x10\x01\"\xdf\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0erepeated_int32\x18\x01 \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0erepeated_int64\x18\x02 \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0frepeated_uint32\x18\x03 \x03(\rB\x02\x10\x00\x12\x1b\n\x0frepeated_uint64\x18\x04 \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint32\x18\x05 \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint64\x18\x06 \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed32\x18\x07 \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed64\x18\x08 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed32\x18\t \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed64\x18\n \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0erepeated_float\x18\x0b \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0frepeated_double\x18\x0c \x03(\x01\x42\x02\x10\x00\x12\x19\n\rrepeated_bool\x18\r \x03(\x08\x42\x02\x10\x00\x12P\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32..proto3_arena_unittest.TestAllTypes.NestedEnumB\x02\x10\x00\"\x84\x01\n\x12NestedTestAllTypes\x12\x38\n\x05\x63hild\x18\x01 \x01(\x0b\x32).proto3_arena_unittest.NestedTestAllTypes\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.proto3_arena_unittest.TestAllTypes\"\x1b\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\"\x12\n\x10TestEmptyMessage*R\n\x0b\x46oreignEnum\x12\x10\n\x0c\x46OREIGN_ZERO\x10\x00\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x04\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x05\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x06\x42\x03\xf8\x01\x01\x62\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_unittest__import__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_FOREIGNENUM = _descriptor.EnumDescriptor(
-  name='ForeignEnum',
-  full_name='proto3_arena_unittest.ForeignEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_ZERO', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_FOO', index=1, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_BAR', index=2, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOREIGN_BAZ', index=3, number=6,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3377,
-  serialized_end=3459,
-)
-_sym_db.RegisterEnumDescriptor(_FOREIGNENUM)
-
-ForeignEnum = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUM)
-FOREIGN_ZERO = 0
-FOREIGN_FOO = 4
-FOREIGN_BAR = 5
-FOREIGN_BAZ = 6
-
-
-_TESTALLTYPES_NESTEDENUM = _descriptor.EnumDescriptor(
-  name='NestedEnum',
-  full_name='proto3_arena_unittest.TestAllTypes.NestedEnum',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='ZERO', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAZ', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NEG', index=4, number=-1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=2194,
-  serialized_end=2261,
-)
-_sym_db.RegisterEnumDescriptor(_TESTALLTYPES_NESTEDENUM)
-
-
-_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor(
-  name='NestedMessage',
-  full_name='proto3_arena_unittest.TestAllTypes.NestedMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bb', full_name='proto3_arena_unittest.TestAllTypes.NestedMessage.bb', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2165,
-  serialized_end=2192,
-)
-
-_TESTALLTYPES = _descriptor.Descriptor(
-  name='TestAllTypes',
-  full_name='proto3_arena_unittest.TestAllTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='optional_int32', full_name='proto3_arena_unittest.TestAllTypes.optional_int32', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_int64', full_name='proto3_arena_unittest.TestAllTypes.optional_int64', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_uint32', full_name='proto3_arena_unittest.TestAllTypes.optional_uint32', index=2,
-      number=3, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_uint64', full_name='proto3_arena_unittest.TestAllTypes.optional_uint64', index=3,
-      number=4, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sint32', full_name='proto3_arena_unittest.TestAllTypes.optional_sint32', index=4,
-      number=5, type=17, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sint64', full_name='proto3_arena_unittest.TestAllTypes.optional_sint64', index=5,
-      number=6, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_fixed32', full_name='proto3_arena_unittest.TestAllTypes.optional_fixed32', index=6,
-      number=7, type=7, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_fixed64', full_name='proto3_arena_unittest.TestAllTypes.optional_fixed64', index=7,
-      number=8, type=6, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sfixed32', full_name='proto3_arena_unittest.TestAllTypes.optional_sfixed32', index=8,
-      number=9, type=15, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_sfixed64', full_name='proto3_arena_unittest.TestAllTypes.optional_sfixed64', index=9,
-      number=10, type=16, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_float', full_name='proto3_arena_unittest.TestAllTypes.optional_float', index=10,
-      number=11, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_double', full_name='proto3_arena_unittest.TestAllTypes.optional_double', index=11,
-      number=12, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_bool', full_name='proto3_arena_unittest.TestAllTypes.optional_bool', index=12,
-      number=13, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_string', full_name='proto3_arena_unittest.TestAllTypes.optional_string', index=13,
-      number=14, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_bytes', full_name='proto3_arena_unittest.TestAllTypes.optional_bytes', index=14,
-      number=15, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_message', full_name='proto3_arena_unittest.TestAllTypes.optional_nested_message', index=15,
-      number=18, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_foreign_message', full_name='proto3_arena_unittest.TestAllTypes.optional_foreign_message', index=16,
-      number=19, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_import_message', full_name='proto3_arena_unittest.TestAllTypes.optional_import_message', index=17,
-      number=20, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_nested_enum', full_name='proto3_arena_unittest.TestAllTypes.optional_nested_enum', index=18,
-      number=21, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_foreign_enum', full_name='proto3_arena_unittest.TestAllTypes.optional_foreign_enum', index=19,
-      number=22, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_string_piece', full_name='proto3_arena_unittest.TestAllTypes.optional_string_piece', index=20,
-      number=24, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='optional_cord', full_name='proto3_arena_unittest.TestAllTypes.optional_cord', index=21,
-      number=25, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='optional_public_import_message', full_name='proto3_arena_unittest.TestAllTypes.optional_public_import_message', index=22,
-      number=26, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='optional_lazy_message', full_name='proto3_arena_unittest.TestAllTypes.optional_lazy_message', index=23,
-      number=27, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_int32', full_name='proto3_arena_unittest.TestAllTypes.repeated_int32', index=24,
-      number=31, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64', full_name='proto3_arena_unittest.TestAllTypes.repeated_int64', index=25,
-      number=32, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32', full_name='proto3_arena_unittest.TestAllTypes.repeated_uint32', index=26,
-      number=33, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64', full_name='proto3_arena_unittest.TestAllTypes.repeated_uint64', index=27,
-      number=34, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint32', full_name='proto3_arena_unittest.TestAllTypes.repeated_sint32', index=28,
-      number=35, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint64', full_name='proto3_arena_unittest.TestAllTypes.repeated_sint64', index=29,
-      number=36, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed32', full_name='proto3_arena_unittest.TestAllTypes.repeated_fixed32', index=30,
-      number=37, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed64', full_name='proto3_arena_unittest.TestAllTypes.repeated_fixed64', index=31,
-      number=38, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed32', full_name='proto3_arena_unittest.TestAllTypes.repeated_sfixed32', index=32,
-      number=39, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed64', full_name='proto3_arena_unittest.TestAllTypes.repeated_sfixed64', index=33,
-      number=40, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_float', full_name='proto3_arena_unittest.TestAllTypes.repeated_float', index=34,
-      number=41, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_double', full_name='proto3_arena_unittest.TestAllTypes.repeated_double', index=35,
-      number=42, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool', full_name='proto3_arena_unittest.TestAllTypes.repeated_bool', index=36,
-      number=43, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string', full_name='proto3_arena_unittest.TestAllTypes.repeated_string', index=37,
-      number=44, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bytes', full_name='proto3_arena_unittest.TestAllTypes.repeated_bytes', index=38,
-      number=45, type=12, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_nested_message', index=39,
-      number=48, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_foreign_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_foreign_message', index=40,
-      number=49, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_import_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_import_message', index=41,
-      number=50, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='proto3_arena_unittest.TestAllTypes.repeated_nested_enum', index=42,
-      number=51, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_foreign_enum', full_name='proto3_arena_unittest.TestAllTypes.repeated_foreign_enum', index=43,
-      number=52, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string_piece', full_name='proto3_arena_unittest.TestAllTypes.repeated_string_piece', index=44,
-      number=54, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_cord', full_name='proto3_arena_unittest.TestAllTypes.repeated_cord', index=45,
-      number=55, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_lazy_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_lazy_message', index=46,
-      number=57, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))),
-    _descriptor.FieldDescriptor(
-      name='oneof_uint32', full_name='proto3_arena_unittest.TestAllTypes.oneof_uint32', index=47,
-      number=111, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_nested_message', full_name='proto3_arena_unittest.TestAllTypes.oneof_nested_message', index=48,
-      number=112, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_string', full_name='proto3_arena_unittest.TestAllTypes.oneof_string', index=49,
-      number=113, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_bytes', full_name='proto3_arena_unittest.TestAllTypes.oneof_bytes', index=50,
-      number=114, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTALLTYPES_NESTEDMESSAGE, ],
-  enum_types=[
-    _TESTALLTYPES_NESTEDENUM,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='oneof_field', full_name='proto3_arena_unittest.TestAllTypes.oneof_field',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=110,
-  serialized_end=2276,
-)
-
-
-_TESTPACKEDTYPES = _descriptor.Descriptor(
-  name='TestPackedTypes',
-  full_name='proto3_arena_unittest.TestPackedTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='packed_int32', full_name='proto3_arena_unittest.TestPackedTypes.packed_int32', index=0,
-      number=90, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_int64', full_name='proto3_arena_unittest.TestPackedTypes.packed_int64', index=1,
-      number=91, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_uint32', full_name='proto3_arena_unittest.TestPackedTypes.packed_uint32', index=2,
-      number=92, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_uint64', full_name='proto3_arena_unittest.TestPackedTypes.packed_uint64', index=3,
-      number=93, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sint32', full_name='proto3_arena_unittest.TestPackedTypes.packed_sint32', index=4,
-      number=94, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sint64', full_name='proto3_arena_unittest.TestPackedTypes.packed_sint64', index=5,
-      number=95, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_fixed32', full_name='proto3_arena_unittest.TestPackedTypes.packed_fixed32', index=6,
-      number=96, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_fixed64', full_name='proto3_arena_unittest.TestPackedTypes.packed_fixed64', index=7,
-      number=97, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sfixed32', full_name='proto3_arena_unittest.TestPackedTypes.packed_sfixed32', index=8,
-      number=98, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_sfixed64', full_name='proto3_arena_unittest.TestPackedTypes.packed_sfixed64', index=9,
-      number=99, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_float', full_name='proto3_arena_unittest.TestPackedTypes.packed_float', index=10,
-      number=100, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_double', full_name='proto3_arena_unittest.TestPackedTypes.packed_double', index=11,
-      number=101, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_bool', full_name='proto3_arena_unittest.TestPackedTypes.packed_bool', index=12,
-      number=102, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-    _descriptor.FieldDescriptor(
-      name='packed_enum', full_name='proto3_arena_unittest.TestPackedTypes.packed_enum', index=13,
-      number=103, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2279,
-  serialized_end=2709,
-)
-
-
-_TESTUNPACKEDTYPES = _descriptor.Descriptor(
-  name='TestUnpackedTypes',
-  full_name='proto3_arena_unittest.TestUnpackedTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='repeated_int32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_int32', index=0,
-      number=1, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_int64', index=1,
-      number=2, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_uint32', index=2,
-      number=3, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_uint64', index=3,
-      number=4, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sint32', index=4,
-      number=5, type=17, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sint64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sint64', index=5,
-      number=6, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_fixed32', index=6,
-      number=7, type=7, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_fixed64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_fixed64', index=7,
-      number=8, type=6, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sfixed32', index=8,
-      number=9, type=15, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_sfixed64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sfixed64', index=9,
-      number=10, type=16, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_float', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_float', index=10,
-      number=11, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_double', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_double', index=11,
-      number=12, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_bool', index=12,
-      number=13, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-    _descriptor.FieldDescriptor(
-      name='repeated_nested_enum', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_nested_enum', index=13,
-      number=14, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2712,
-  serialized_end=3191,
-)
-
-
-_NESTEDTESTALLTYPES = _descriptor.Descriptor(
-  name='NestedTestAllTypes',
-  full_name='proto3_arena_unittest.NestedTestAllTypes',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='child', full_name='proto3_arena_unittest.NestedTestAllTypes.child', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='payload', full_name='proto3_arena_unittest.NestedTestAllTypes.payload', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3194,
-  serialized_end=3326,
-)
-
-
-_FOREIGNMESSAGE = _descriptor.Descriptor(
-  name='ForeignMessage',
-  full_name='proto3_arena_unittest.ForeignMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='c', full_name='proto3_arena_unittest.ForeignMessage.c', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3328,
-  serialized_end=3355,
-)
-
-
-_TESTEMPTYMESSAGE = _descriptor.Descriptor(
-  name='TestEmptyMessage',
-  full_name='proto3_arena_unittest.TestEmptyMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3357,
-  serialized_end=3375,
-)
-
-_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES
-_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGE
-_TESTALLTYPES.fields_by_name['optional_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['optional_public_import_message'].message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['optional_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE
-_TESTALLTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_TESTALLTYPES.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUM
-_TESTALLTYPES.fields_by_name['repeated_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
-_TESTALLTYPES_NESTEDENUM.containing_type = _TESTALLTYPES
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_uint32'])
-_TESTALLTYPES.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_nested_message'])
-_TESTALLTYPES.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_string'])
-_TESTALLTYPES.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append(
-  _TESTALLTYPES.fields_by_name['oneof_bytes'])
-_TESTALLTYPES.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field']
-_TESTPACKEDTYPES.fields_by_name['packed_enum'].enum_type = _FOREIGNENUM
-_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM
-_NESTEDTESTALLTYPES.fields_by_name['child'].message_type = _NESTEDTESTALLTYPES
-_NESTEDTESTALLTYPES.fields_by_name['payload'].message_type = _TESTALLTYPES
-DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES
-DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES
-DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES
-DESCRIPTOR.message_types_by_name['NestedTestAllTypes'] = _NESTEDTESTALLTYPES
-DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE
-DESCRIPTOR.message_types_by_name['TestEmptyMessage'] = _TESTEMPTYMESSAGE
-DESCRIPTOR.enum_types_by_name['ForeignEnum'] = _FOREIGNENUM
-
-TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict(
-
-  NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
-    DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE,
-    __module__ = 'google.protobuf.unittest_proto3_arena_pb2'
-    # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestAllTypes.NestedMessage)
-    ))
-  ,
-  DESCRIPTOR = _TESTALLTYPES,
-  __module__ = 'google.protobuf.unittest_proto3_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestAllTypes)
-  ))
-_sym_db.RegisterMessage(TestAllTypes)
-_sym_db.RegisterMessage(TestAllTypes.NestedMessage)
-
-TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), dict(
-  DESCRIPTOR = _TESTPACKEDTYPES,
-  __module__ = 'google.protobuf.unittest_proto3_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestPackedTypes)
-  ))
-_sym_db.RegisterMessage(TestPackedTypes)
-
-TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), dict(
-  DESCRIPTOR = _TESTUNPACKEDTYPES,
-  __module__ = 'google.protobuf.unittest_proto3_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestUnpackedTypes)
-  ))
-_sym_db.RegisterMessage(TestUnpackedTypes)
-
-NestedTestAllTypes = _reflection.GeneratedProtocolMessageType('NestedTestAllTypes', (_message.Message,), dict(
-  DESCRIPTOR = _NESTEDTESTALLTYPES,
-  __module__ = 'google.protobuf.unittest_proto3_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto3_arena_unittest.NestedTestAllTypes)
-  ))
-_sym_db.RegisterMessage(NestedTestAllTypes)
-
-ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict(
-  DESCRIPTOR = _FOREIGNMESSAGE,
-  __module__ = 'google.protobuf.unittest_proto3_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto3_arena_unittest.ForeignMessage)
-  ))
-_sym_db.RegisterMessage(ForeignMessage)
-
-TestEmptyMessage = _reflection.GeneratedProtocolMessageType('TestEmptyMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTEMPTYMESSAGE,
-  __module__ = 'google.protobuf.unittest_proto3_arena_pb2'
-  # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestEmptyMessage)
-  ))
-_sym_db.RegisterMessage(TestEmptyMessage)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
-_TESTALLTYPES.fields_by_name['optional_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['optional_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTALLTYPES.fields_by_name['optional_lazy_message'].has_options = True
-_TESTALLTYPES.fields_by_name['optional_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTALLTYPES.fields_by_name['repeated_string_piece'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))
-_TESTALLTYPES.fields_by_name['repeated_cord'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))
-_TESTALLTYPES.fields_by_name['repeated_lazy_message'].has_options = True
-_TESTALLTYPES.fields_by_name['repeated_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_int32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_int64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_uint32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_uint64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sint32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sint64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_fixed32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_fixed64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed32'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed64'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_float'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_double'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_bool'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTPACKEDTYPES.fields_by_name['packed_enum'].has_options = True
-_TESTPACKEDTYPES.fields_by_name['packed_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_float'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_double'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_bool'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].has_options = True
-_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/util/__init__.py b/tools/swarming_client/third_party/google/protobuf/util/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/third_party/google/protobuf/util/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/third_party/google/protobuf/util/json_format_proto3_pb2.py b/tools/swarming_client/third_party/google/protobuf/util/json_format_proto3_pb2.py
deleted file mode 100644
index d7aaf29..0000000
--- a/tools/swarming_client/third_party/google/protobuf/util/json_format_proto3_pb2.py
+++ /dev/null
@@ -1,1852 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/util/json_format_proto3.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
-from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
-from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
-from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
-from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
-from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/util/json_format_proto3.proto',
-  package='proto3',
-  syntax='proto3',
-  serialized_pb=_b('\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x19google/protobuf/any.proto\x1a google/protobuf/field_mask.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\xd4\x01\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x62\x06proto3')
-  ,
-  dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_unittest__pb2.DESCRIPTOR,])
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-_ENUMTYPE = _descriptor.EnumDescriptor(
-  name='EnumType',
-  full_name='proto3.EnumType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FOO', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BAR', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=4533,
-  serialized_end=4561,
-)
-_sym_db.RegisterEnumDescriptor(_ENUMTYPE)
-
-EnumType = enum_type_wrapper.EnumTypeWrapper(_ENUMTYPE)
-FOO = 0
-BAR = 1
-
-
-
-_MESSAGETYPE = _descriptor.Descriptor(
-  name='MessageType',
-  full_name='proto3.MessageType',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.MessageType.value', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=277,
-  serialized_end=305,
-)
-
-
-_TESTMESSAGE = _descriptor.Descriptor(
-  name='TestMessage',
-  full_name='proto3.TestMessage',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bool_value', full_name='proto3.TestMessage.bool_value', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int32_value', full_name='proto3.TestMessage.int32_value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int64_value', full_name='proto3.TestMessage.int64_value', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint32_value', full_name='proto3.TestMessage.uint32_value', index=3,
-      number=4, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint64_value', full_name='proto3.TestMessage.uint64_value', index=4,
-      number=5, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='float_value', full_name='proto3.TestMessage.float_value', index=5,
-      number=6, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='double_value', full_name='proto3.TestMessage.double_value', index=6,
-      number=7, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_value', full_name='proto3.TestMessage.string_value', index=7,
-      number=8, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bytes_value', full_name='proto3.TestMessage.bytes_value', index=8,
-      number=9, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='enum_value', full_name='proto3.TestMessage.enum_value', index=9,
-      number=10, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='message_value', full_name='proto3.TestMessage.message_value', index=10,
-      number=11, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool_value', full_name='proto3.TestMessage.repeated_bool_value', index=11,
-      number=21, type=8, cpp_type=7, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int32_value', full_name='proto3.TestMessage.repeated_int32_value', index=12,
-      number=22, type=5, cpp_type=1, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64_value', full_name='proto3.TestMessage.repeated_int64_value', index=13,
-      number=23, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32_value', full_name='proto3.TestMessage.repeated_uint32_value', index=14,
-      number=24, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64_value', full_name='proto3.TestMessage.repeated_uint64_value', index=15,
-      number=25, type=4, cpp_type=4, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_float_value', full_name='proto3.TestMessage.repeated_float_value', index=16,
-      number=26, type=2, cpp_type=6, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_double_value', full_name='proto3.TestMessage.repeated_double_value', index=17,
-      number=27, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string_value', full_name='proto3.TestMessage.repeated_string_value', index=18,
-      number=28, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bytes_value', full_name='proto3.TestMessage.repeated_bytes_value', index=19,
-      number=29, type=12, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_enum_value', full_name='proto3.TestMessage.repeated_enum_value', index=20,
-      number=30, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_message_value', full_name='proto3.TestMessage.repeated_message_value', index=21,
-      number=31, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=308,
-  serialized_end=968,
-)
-
-
-_TESTONEOF = _descriptor.Descriptor(
-  name='TestOneof',
-  full_name='proto3.TestOneof',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='oneof_int32_value', full_name='proto3.TestOneof.oneof_int32_value', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_string_value', full_name='proto3.TestOneof.oneof_string_value', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_bytes_value', full_name='proto3.TestOneof.oneof_bytes_value', index=2,
-      number=3, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_enum_value', full_name='proto3.TestOneof.oneof_enum_value', index=3,
-      number=4, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='oneof_message_value', full_name='proto3.TestOneof.oneof_message_value', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-    _descriptor.OneofDescriptor(
-      name='oneof_value', full_name='proto3.TestOneof.oneof_value',
-      index=0, containing_type=None, fields=[]),
-  ],
-  serialized_start=971,
-  serialized_end=1183,
-)
-
-
-_TESTMAP_BOOLMAPENTRY = _descriptor.Descriptor(
-  name='BoolMapEntry',
-  full_name='proto3.TestMap.BoolMapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestMap.BoolMapEntry.key', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestMap.BoolMapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1501,
-  serialized_end=1547,
-)
-
-_TESTMAP_INT32MAPENTRY = _descriptor.Descriptor(
-  name='Int32MapEntry',
-  full_name='proto3.TestMap.Int32MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestMap.Int32MapEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestMap.Int32MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1549,
-  serialized_end=1596,
-)
-
-_TESTMAP_INT64MAPENTRY = _descriptor.Descriptor(
-  name='Int64MapEntry',
-  full_name='proto3.TestMap.Int64MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestMap.Int64MapEntry.key', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestMap.Int64MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1598,
-  serialized_end=1645,
-)
-
-_TESTMAP_UINT32MAPENTRY = _descriptor.Descriptor(
-  name='Uint32MapEntry',
-  full_name='proto3.TestMap.Uint32MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestMap.Uint32MapEntry.key', index=0,
-      number=1, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestMap.Uint32MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1647,
-  serialized_end=1695,
-)
-
-_TESTMAP_UINT64MAPENTRY = _descriptor.Descriptor(
-  name='Uint64MapEntry',
-  full_name='proto3.TestMap.Uint64MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestMap.Uint64MapEntry.key', index=0,
-      number=1, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestMap.Uint64MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1697,
-  serialized_end=1745,
-)
-
-_TESTMAP_STRINGMAPENTRY = _descriptor.Descriptor(
-  name='StringMapEntry',
-  full_name='proto3.TestMap.StringMapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestMap.StringMapEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestMap.StringMapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1747,
-  serialized_end=1795,
-)
-
-_TESTMAP = _descriptor.Descriptor(
-  name='TestMap',
-  full_name='proto3.TestMap',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bool_map', full_name='proto3.TestMap.bool_map', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int32_map', full_name='proto3.TestMap.int32_map', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int64_map', full_name='proto3.TestMap.int64_map', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint32_map', full_name='proto3.TestMap.uint32_map', index=3,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint64_map', full_name='proto3.TestMap.uint64_map', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_map', full_name='proto3.TestMap.string_map', index=5,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTMAP_BOOLMAPENTRY, _TESTMAP_INT32MAPENTRY, _TESTMAP_INT64MAPENTRY, _TESTMAP_UINT32MAPENTRY, _TESTMAP_UINT64MAPENTRY, _TESTMAP_STRINGMAPENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1186,
-  serialized_end=1795,
-)
-
-
-_TESTNESTEDMAP_BOOLMAPENTRY = _descriptor.Descriptor(
-  name='BoolMapEntry',
-  full_name='proto3.TestNestedMap.BoolMapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestNestedMap.BoolMapEntry.key', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestNestedMap.BoolMapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1501,
-  serialized_end=1547,
-)
-
-_TESTNESTEDMAP_INT32MAPENTRY = _descriptor.Descriptor(
-  name='Int32MapEntry',
-  full_name='proto3.TestNestedMap.Int32MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestNestedMap.Int32MapEntry.key', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestNestedMap.Int32MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1549,
-  serialized_end=1596,
-)
-
-_TESTNESTEDMAP_INT64MAPENTRY = _descriptor.Descriptor(
-  name='Int64MapEntry',
-  full_name='proto3.TestNestedMap.Int64MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestNestedMap.Int64MapEntry.key', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestNestedMap.Int64MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1598,
-  serialized_end=1645,
-)
-
-_TESTNESTEDMAP_UINT32MAPENTRY = _descriptor.Descriptor(
-  name='Uint32MapEntry',
-  full_name='proto3.TestNestedMap.Uint32MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestNestedMap.Uint32MapEntry.key', index=0,
-      number=1, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestNestedMap.Uint32MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1647,
-  serialized_end=1695,
-)
-
-_TESTNESTEDMAP_UINT64MAPENTRY = _descriptor.Descriptor(
-  name='Uint64MapEntry',
-  full_name='proto3.TestNestedMap.Uint64MapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestNestedMap.Uint64MapEntry.key', index=0,
-      number=1, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestNestedMap.Uint64MapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1697,
-  serialized_end=1745,
-)
-
-_TESTNESTEDMAP_STRINGMAPENTRY = _descriptor.Descriptor(
-  name='StringMapEntry',
-  full_name='proto3.TestNestedMap.StringMapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestNestedMap.StringMapEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestNestedMap.StringMapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1747,
-  serialized_end=1795,
-)
-
-_TESTNESTEDMAP_MAPMAPENTRY = _descriptor.Descriptor(
-  name='MapMapEntry',
-  full_name='proto3.TestNestedMap.MapMapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestNestedMap.MapMapEntry.key', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestNestedMap.MapMapEntry.value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2503,
-  serialized_end=2571,
-)
-
-_TESTNESTEDMAP = _descriptor.Descriptor(
-  name='TestNestedMap',
-  full_name='proto3.TestNestedMap',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bool_map', full_name='proto3.TestNestedMap.bool_map', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int32_map', full_name='proto3.TestNestedMap.int32_map', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int64_map', full_name='proto3.TestNestedMap.int64_map', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint32_map', full_name='proto3.TestNestedMap.uint32_map', index=3,
-      number=4, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint64_map', full_name='proto3.TestNestedMap.uint64_map', index=4,
-      number=5, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_map', full_name='proto3.TestNestedMap.string_map', index=5,
-      number=6, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='map_map', full_name='proto3.TestNestedMap.map_map', index=6,
-      number=7, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTNESTEDMAP_BOOLMAPENTRY, _TESTNESTEDMAP_INT32MAPENTRY, _TESTNESTEDMAP_INT64MAPENTRY, _TESTNESTEDMAP_UINT32MAPENTRY, _TESTNESTEDMAP_UINT64MAPENTRY, _TESTNESTEDMAP_STRINGMAPENTRY, _TESTNESTEDMAP_MAPMAPENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1798,
-  serialized_end=2571,
-)
-
-
-_TESTWRAPPER = _descriptor.Descriptor(
-  name='TestWrapper',
-  full_name='proto3.TestWrapper',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bool_value', full_name='proto3.TestWrapper.bool_value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int32_value', full_name='proto3.TestWrapper.int32_value', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int64_value', full_name='proto3.TestWrapper.int64_value', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint32_value', full_name='proto3.TestWrapper.uint32_value', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uint64_value', full_name='proto3.TestWrapper.uint64_value', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='float_value', full_name='proto3.TestWrapper.float_value', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='double_value', full_name='proto3.TestWrapper.double_value', index=6,
-      number=7, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_value', full_name='proto3.TestWrapper.string_value', index=7,
-      number=8, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bytes_value', full_name='proto3.TestWrapper.bytes_value', index=8,
-      number=9, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bool_value', full_name='proto3.TestWrapper.repeated_bool_value', index=9,
-      number=11, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int32_value', full_name='proto3.TestWrapper.repeated_int32_value', index=10,
-      number=12, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_int64_value', full_name='proto3.TestWrapper.repeated_int64_value', index=11,
-      number=13, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint32_value', full_name='proto3.TestWrapper.repeated_uint32_value', index=12,
-      number=14, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_uint64_value', full_name='proto3.TestWrapper.repeated_uint64_value', index=13,
-      number=15, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_float_value', full_name='proto3.TestWrapper.repeated_float_value', index=14,
-      number=16, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_double_value', full_name='proto3.TestWrapper.repeated_double_value', index=15,
-      number=17, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_string_value', full_name='proto3.TestWrapper.repeated_string_value', index=16,
-      number=18, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_bytes_value', full_name='proto3.TestWrapper.repeated_bytes_value', index=17,
-      number=19, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=2574,
-  serialized_end=3580,
-)
-
-
-_TESTTIMESTAMP = _descriptor.Descriptor(
-  name='TestTimestamp',
-  full_name='proto3.TestTimestamp',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestTimestamp.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_value', full_name='proto3.TestTimestamp.repeated_value', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3582,
-  serialized_end=3692,
-)
-
-
-_TESTDURATION = _descriptor.Descriptor(
-  name='TestDuration',
-  full_name='proto3.TestDuration',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestDuration.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_value', full_name='proto3.TestDuration.repeated_value', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3694,
-  serialized_end=3801,
-)
-
-
-_TESTFIELDMASK = _descriptor.Descriptor(
-  name='TestFieldMask',
-  full_name='proto3.TestFieldMask',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestFieldMask.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3803,
-  serialized_end=3861,
-)
-
-
-_TESTSTRUCT = _descriptor.Descriptor(
-  name='TestStruct',
-  full_name='proto3.TestStruct',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestStruct.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_value', full_name='proto3.TestStruct.repeated_value', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3863,
-  serialized_end=3964,
-)
-
-
-_TESTANY = _descriptor.Descriptor(
-  name='TestAny',
-  full_name='proto3.TestAny',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestAny.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_value', full_name='proto3.TestAny.repeated_value', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=3966,
-  serialized_end=4058,
-)
-
-
-_TESTVALUE = _descriptor.Descriptor(
-  name='TestValue',
-  full_name='proto3.TestValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestValue.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_value', full_name='proto3.TestValue.repeated_value', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4060,
-  serialized_end=4158,
-)
-
-
-_TESTLISTVALUE = _descriptor.Descriptor(
-  name='TestListValue',
-  full_name='proto3.TestListValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestListValue.value', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='repeated_value', full_name='proto3.TestListValue.repeated_value', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4160,
-  serialized_end=4270,
-)
-
-
-_TESTBOOLVALUE_BOOLMAPENTRY = _descriptor.Descriptor(
-  name='BoolMapEntry',
-  full_name='proto3.TestBoolValue.BoolMapEntry',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='key', full_name='proto3.TestBoolValue.BoolMapEntry.key', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestBoolValue.BoolMapEntry.value', index=1,
-      number=2, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=1501,
-  serialized_end=1547,
-)
-
-_TESTBOOLVALUE = _descriptor.Descriptor(
-  name='TestBoolValue',
-  full_name='proto3.TestBoolValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='bool_value', full_name='proto3.TestBoolValue.bool_value', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bool_map', full_name='proto3.TestBoolValue.bool_map', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTBOOLVALUE_BOOLMAPENTRY, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4273,
-  serialized_end=4410,
-)
-
-
-_TESTCUSTOMJSONNAME = _descriptor.Descriptor(
-  name='TestCustomJsonName',
-  full_name='proto3.TestCustomJsonName',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='proto3.TestCustomJsonName.value', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4412,
-  serialized_end=4455,
-)
-
-
-_TESTEXTENSIONS = _descriptor.Descriptor(
-  name='TestExtensions',
-  full_name='proto3.TestExtensions',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='extensions', full_name='proto3.TestExtensions.extensions', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=4457,
-  serialized_end=4531,
-)
-
-_TESTMESSAGE.fields_by_name['enum_value'].enum_type = _ENUMTYPE
-_TESTMESSAGE.fields_by_name['message_value'].message_type = _MESSAGETYPE
-_TESTMESSAGE.fields_by_name['repeated_enum_value'].enum_type = _ENUMTYPE
-_TESTMESSAGE.fields_by_name['repeated_message_value'].message_type = _MESSAGETYPE
-_TESTONEOF.fields_by_name['oneof_enum_value'].enum_type = _ENUMTYPE
-_TESTONEOF.fields_by_name['oneof_message_value'].message_type = _MESSAGETYPE
-_TESTONEOF.oneofs_by_name['oneof_value'].fields.append(
-  _TESTONEOF.fields_by_name['oneof_int32_value'])
-_TESTONEOF.fields_by_name['oneof_int32_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value']
-_TESTONEOF.oneofs_by_name['oneof_value'].fields.append(
-  _TESTONEOF.fields_by_name['oneof_string_value'])
-_TESTONEOF.fields_by_name['oneof_string_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value']
-_TESTONEOF.oneofs_by_name['oneof_value'].fields.append(
-  _TESTONEOF.fields_by_name['oneof_bytes_value'])
-_TESTONEOF.fields_by_name['oneof_bytes_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value']
-_TESTONEOF.oneofs_by_name['oneof_value'].fields.append(
-  _TESTONEOF.fields_by_name['oneof_enum_value'])
-_TESTONEOF.fields_by_name['oneof_enum_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value']
-_TESTONEOF.oneofs_by_name['oneof_value'].fields.append(
-  _TESTONEOF.fields_by_name['oneof_message_value'])
-_TESTONEOF.fields_by_name['oneof_message_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value']
-_TESTMAP_BOOLMAPENTRY.containing_type = _TESTMAP
-_TESTMAP_INT32MAPENTRY.containing_type = _TESTMAP
-_TESTMAP_INT64MAPENTRY.containing_type = _TESTMAP
-_TESTMAP_UINT32MAPENTRY.containing_type = _TESTMAP
-_TESTMAP_UINT64MAPENTRY.containing_type = _TESTMAP
-_TESTMAP_STRINGMAPENTRY.containing_type = _TESTMAP
-_TESTMAP.fields_by_name['bool_map'].message_type = _TESTMAP_BOOLMAPENTRY
-_TESTMAP.fields_by_name['int32_map'].message_type = _TESTMAP_INT32MAPENTRY
-_TESTMAP.fields_by_name['int64_map'].message_type = _TESTMAP_INT64MAPENTRY
-_TESTMAP.fields_by_name['uint32_map'].message_type = _TESTMAP_UINT32MAPENTRY
-_TESTMAP.fields_by_name['uint64_map'].message_type = _TESTMAP_UINT64MAPENTRY
-_TESTMAP.fields_by_name['string_map'].message_type = _TESTMAP_STRINGMAPENTRY
-_TESTNESTEDMAP_BOOLMAPENTRY.containing_type = _TESTNESTEDMAP
-_TESTNESTEDMAP_INT32MAPENTRY.containing_type = _TESTNESTEDMAP
-_TESTNESTEDMAP_INT64MAPENTRY.containing_type = _TESTNESTEDMAP
-_TESTNESTEDMAP_UINT32MAPENTRY.containing_type = _TESTNESTEDMAP
-_TESTNESTEDMAP_UINT64MAPENTRY.containing_type = _TESTNESTEDMAP
-_TESTNESTEDMAP_STRINGMAPENTRY.containing_type = _TESTNESTEDMAP
-_TESTNESTEDMAP_MAPMAPENTRY.fields_by_name['value'].message_type = _TESTNESTEDMAP
-_TESTNESTEDMAP_MAPMAPENTRY.containing_type = _TESTNESTEDMAP
-_TESTNESTEDMAP.fields_by_name['bool_map'].message_type = _TESTNESTEDMAP_BOOLMAPENTRY
-_TESTNESTEDMAP.fields_by_name['int32_map'].message_type = _TESTNESTEDMAP_INT32MAPENTRY
-_TESTNESTEDMAP.fields_by_name['int64_map'].message_type = _TESTNESTEDMAP_INT64MAPENTRY
-_TESTNESTEDMAP.fields_by_name['uint32_map'].message_type = _TESTNESTEDMAP_UINT32MAPENTRY
-_TESTNESTEDMAP.fields_by_name['uint64_map'].message_type = _TESTNESTEDMAP_UINT64MAPENTRY
-_TESTNESTEDMAP.fields_by_name['string_map'].message_type = _TESTNESTEDMAP_STRINGMAPENTRY
-_TESTNESTEDMAP.fields_by_name['map_map'].message_type = _TESTNESTEDMAP_MAPMAPENTRY
-_TESTWRAPPER.fields_by_name['bool_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
-_TESTWRAPPER.fields_by_name['int32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
-_TESTWRAPPER.fields_by_name['int64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
-_TESTWRAPPER.fields_by_name['uint32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE
-_TESTWRAPPER.fields_by_name['uint64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT64VALUE
-_TESTWRAPPER.fields_by_name['float_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._FLOATVALUE
-_TESTWRAPPER.fields_by_name['double_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
-_TESTWRAPPER.fields_by_name['string_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
-_TESTWRAPPER.fields_by_name['bytes_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE
-_TESTWRAPPER.fields_by_name['repeated_bool_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
-_TESTWRAPPER.fields_by_name['repeated_int32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
-_TESTWRAPPER.fields_by_name['repeated_int64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
-_TESTWRAPPER.fields_by_name['repeated_uint32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE
-_TESTWRAPPER.fields_by_name['repeated_uint64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT64VALUE
-_TESTWRAPPER.fields_by_name['repeated_float_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._FLOATVALUE
-_TESTWRAPPER.fields_by_name['repeated_double_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
-_TESTWRAPPER.fields_by_name['repeated_string_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
-_TESTWRAPPER.fields_by_name['repeated_bytes_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE
-_TESTTIMESTAMP.fields_by_name['value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_TESTTIMESTAMP.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
-_TESTDURATION.fields_by_name['value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
-_TESTDURATION.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
-_TESTFIELDMASK.fields_by_name['value'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
-_TESTSTRUCT.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
-_TESTSTRUCT.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
-_TESTANY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-_TESTANY.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
-_TESTVALUE.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._VALUE
-_TESTVALUE.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_struct__pb2._VALUE
-_TESTLISTVALUE.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._LISTVALUE
-_TESTLISTVALUE.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_struct__pb2._LISTVALUE
-_TESTBOOLVALUE_BOOLMAPENTRY.containing_type = _TESTBOOLVALUE
-_TESTBOOLVALUE.fields_by_name['bool_map'].message_type = _TESTBOOLVALUE_BOOLMAPENTRY
-_TESTEXTENSIONS.fields_by_name['extensions'].message_type = google_dot_protobuf_dot_unittest__pb2._TESTALLEXTENSIONS
-DESCRIPTOR.message_types_by_name['MessageType'] = _MESSAGETYPE
-DESCRIPTOR.message_types_by_name['TestMessage'] = _TESTMESSAGE
-DESCRIPTOR.message_types_by_name['TestOneof'] = _TESTONEOF
-DESCRIPTOR.message_types_by_name['TestMap'] = _TESTMAP
-DESCRIPTOR.message_types_by_name['TestNestedMap'] = _TESTNESTEDMAP
-DESCRIPTOR.message_types_by_name['TestWrapper'] = _TESTWRAPPER
-DESCRIPTOR.message_types_by_name['TestTimestamp'] = _TESTTIMESTAMP
-DESCRIPTOR.message_types_by_name['TestDuration'] = _TESTDURATION
-DESCRIPTOR.message_types_by_name['TestFieldMask'] = _TESTFIELDMASK
-DESCRIPTOR.message_types_by_name['TestStruct'] = _TESTSTRUCT
-DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY
-DESCRIPTOR.message_types_by_name['TestValue'] = _TESTVALUE
-DESCRIPTOR.message_types_by_name['TestListValue'] = _TESTLISTVALUE
-DESCRIPTOR.message_types_by_name['TestBoolValue'] = _TESTBOOLVALUE
-DESCRIPTOR.message_types_by_name['TestCustomJsonName'] = _TESTCUSTOMJSONNAME
-DESCRIPTOR.message_types_by_name['TestExtensions'] = _TESTEXTENSIONS
-DESCRIPTOR.enum_types_by_name['EnumType'] = _ENUMTYPE
-
-MessageType = _reflection.GeneratedProtocolMessageType('MessageType', (_message.Message,), dict(
-  DESCRIPTOR = _MESSAGETYPE,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.MessageType)
-  ))
-_sym_db.RegisterMessage(MessageType)
-
-TestMessage = _reflection.GeneratedProtocolMessageType('TestMessage', (_message.Message,), dict(
-  DESCRIPTOR = _TESTMESSAGE,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestMessage)
-  ))
-_sym_db.RegisterMessage(TestMessage)
-
-TestOneof = _reflection.GeneratedProtocolMessageType('TestOneof', (_message.Message,), dict(
-  DESCRIPTOR = _TESTONEOF,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestOneof)
-  ))
-_sym_db.RegisterMessage(TestOneof)
-
-TestMap = _reflection.GeneratedProtocolMessageType('TestMap', (_message.Message,), dict(
-
-  BoolMapEntry = _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_BOOLMAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestMap.BoolMapEntry)
-    ))
-  ,
-
-  Int32MapEntry = _reflection.GeneratedProtocolMessageType('Int32MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_INT32MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestMap.Int32MapEntry)
-    ))
-  ,
-
-  Int64MapEntry = _reflection.GeneratedProtocolMessageType('Int64MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_INT64MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestMap.Int64MapEntry)
-    ))
-  ,
-
-  Uint32MapEntry = _reflection.GeneratedProtocolMessageType('Uint32MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_UINT32MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestMap.Uint32MapEntry)
-    ))
-  ,
-
-  Uint64MapEntry = _reflection.GeneratedProtocolMessageType('Uint64MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_UINT64MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestMap.Uint64MapEntry)
-    ))
-  ,
-
-  StringMapEntry = _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTMAP_STRINGMAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestMap.StringMapEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTMAP,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestMap)
-  ))
-_sym_db.RegisterMessage(TestMap)
-_sym_db.RegisterMessage(TestMap.BoolMapEntry)
-_sym_db.RegisterMessage(TestMap.Int32MapEntry)
-_sym_db.RegisterMessage(TestMap.Int64MapEntry)
-_sym_db.RegisterMessage(TestMap.Uint32MapEntry)
-_sym_db.RegisterMessage(TestMap.Uint64MapEntry)
-_sym_db.RegisterMessage(TestMap.StringMapEntry)
-
-TestNestedMap = _reflection.GeneratedProtocolMessageType('TestNestedMap', (_message.Message,), dict(
-
-  BoolMapEntry = _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMAP_BOOLMAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.BoolMapEntry)
-    ))
-  ,
-
-  Int32MapEntry = _reflection.GeneratedProtocolMessageType('Int32MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMAP_INT32MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Int32MapEntry)
-    ))
-  ,
-
-  Int64MapEntry = _reflection.GeneratedProtocolMessageType('Int64MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMAP_INT64MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Int64MapEntry)
-    ))
-  ,
-
-  Uint32MapEntry = _reflection.GeneratedProtocolMessageType('Uint32MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMAP_UINT32MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Uint32MapEntry)
-    ))
-  ,
-
-  Uint64MapEntry = _reflection.GeneratedProtocolMessageType('Uint64MapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMAP_UINT64MAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Uint64MapEntry)
-    ))
-  ,
-
-  StringMapEntry = _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMAP_STRINGMAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.StringMapEntry)
-    ))
-  ,
-
-  MapMapEntry = _reflection.GeneratedProtocolMessageType('MapMapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTNESTEDMAP_MAPMAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.MapMapEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTNESTEDMAP,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestNestedMap)
-  ))
-_sym_db.RegisterMessage(TestNestedMap)
-_sym_db.RegisterMessage(TestNestedMap.BoolMapEntry)
-_sym_db.RegisterMessage(TestNestedMap.Int32MapEntry)
-_sym_db.RegisterMessage(TestNestedMap.Int64MapEntry)
-_sym_db.RegisterMessage(TestNestedMap.Uint32MapEntry)
-_sym_db.RegisterMessage(TestNestedMap.Uint64MapEntry)
-_sym_db.RegisterMessage(TestNestedMap.StringMapEntry)
-_sym_db.RegisterMessage(TestNestedMap.MapMapEntry)
-
-TestWrapper = _reflection.GeneratedProtocolMessageType('TestWrapper', (_message.Message,), dict(
-  DESCRIPTOR = _TESTWRAPPER,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestWrapper)
-  ))
-_sym_db.RegisterMessage(TestWrapper)
-
-TestTimestamp = _reflection.GeneratedProtocolMessageType('TestTimestamp', (_message.Message,), dict(
-  DESCRIPTOR = _TESTTIMESTAMP,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestTimestamp)
-  ))
-_sym_db.RegisterMessage(TestTimestamp)
-
-TestDuration = _reflection.GeneratedProtocolMessageType('TestDuration', (_message.Message,), dict(
-  DESCRIPTOR = _TESTDURATION,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestDuration)
-  ))
-_sym_db.RegisterMessage(TestDuration)
-
-TestFieldMask = _reflection.GeneratedProtocolMessageType('TestFieldMask', (_message.Message,), dict(
-  DESCRIPTOR = _TESTFIELDMASK,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestFieldMask)
-  ))
-_sym_db.RegisterMessage(TestFieldMask)
-
-TestStruct = _reflection.GeneratedProtocolMessageType('TestStruct', (_message.Message,), dict(
-  DESCRIPTOR = _TESTSTRUCT,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestStruct)
-  ))
-_sym_db.RegisterMessage(TestStruct)
-
-TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), dict(
-  DESCRIPTOR = _TESTANY,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestAny)
-  ))
-_sym_db.RegisterMessage(TestAny)
-
-TestValue = _reflection.GeneratedProtocolMessageType('TestValue', (_message.Message,), dict(
-  DESCRIPTOR = _TESTVALUE,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestValue)
-  ))
-_sym_db.RegisterMessage(TestValue)
-
-TestListValue = _reflection.GeneratedProtocolMessageType('TestListValue', (_message.Message,), dict(
-  DESCRIPTOR = _TESTLISTVALUE,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestListValue)
-  ))
-_sym_db.RegisterMessage(TestListValue)
-
-TestBoolValue = _reflection.GeneratedProtocolMessageType('TestBoolValue', (_message.Message,), dict(
-
-  BoolMapEntry = _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), dict(
-    DESCRIPTOR = _TESTBOOLVALUE_BOOLMAPENTRY,
-    __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-    # @@protoc_insertion_point(class_scope:proto3.TestBoolValue.BoolMapEntry)
-    ))
-  ,
-  DESCRIPTOR = _TESTBOOLVALUE,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestBoolValue)
-  ))
-_sym_db.RegisterMessage(TestBoolValue)
-_sym_db.RegisterMessage(TestBoolValue.BoolMapEntry)
-
-TestCustomJsonName = _reflection.GeneratedProtocolMessageType('TestCustomJsonName', (_message.Message,), dict(
-  DESCRIPTOR = _TESTCUSTOMJSONNAME,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestCustomJsonName)
-  ))
-_sym_db.RegisterMessage(TestCustomJsonName)
-
-TestExtensions = _reflection.GeneratedProtocolMessageType('TestExtensions', (_message.Message,), dict(
-  DESCRIPTOR = _TESTEXTENSIONS,
-  __module__ = 'google.protobuf.util.json_format_proto3_pb2'
-  # @@protoc_insertion_point(class_scope:proto3.TestExtensions)
-  ))
-_sym_db.RegisterMessage(TestExtensions)
-
-
-_TESTMAP_BOOLMAPENTRY.has_options = True
-_TESTMAP_BOOLMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_INT32MAPENTRY.has_options = True
-_TESTMAP_INT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_INT64MAPENTRY.has_options = True
-_TESTMAP_INT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_UINT32MAPENTRY.has_options = True
-_TESTMAP_UINT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_UINT64MAPENTRY.has_options = True
-_TESTMAP_UINT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTMAP_STRINGMAPENTRY.has_options = True
-_TESTMAP_STRINGMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTNESTEDMAP_BOOLMAPENTRY.has_options = True
-_TESTNESTEDMAP_BOOLMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTNESTEDMAP_INT32MAPENTRY.has_options = True
-_TESTNESTEDMAP_INT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTNESTEDMAP_INT64MAPENTRY.has_options = True
-_TESTNESTEDMAP_INT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTNESTEDMAP_UINT32MAPENTRY.has_options = True
-_TESTNESTEDMAP_UINT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTNESTEDMAP_UINT64MAPENTRY.has_options = True
-_TESTNESTEDMAP_UINT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTNESTEDMAP_STRINGMAPENTRY.has_options = True
-_TESTNESTEDMAP_STRINGMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTNESTEDMAP_MAPMAPENTRY.has_options = True
-_TESTNESTEDMAP_MAPMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-_TESTBOOLVALUE_BOOLMAPENTRY.has_options = True
-_TESTBOOLVALUE_BOOLMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/google/protobuf/wrappers_pb2.py b/tools/swarming_client/third_party/google/protobuf/wrappers_pb2.py
deleted file mode 100644
index 03de834..0000000
--- a/tools/swarming_client/third_party/google/protobuf/wrappers_pb2.py
+++ /dev/null
@@ -1,383 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: google/protobuf/wrappers.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='google/protobuf/wrappers.proto',
-  package='google.protobuf',
-  syntax='proto3',
-  serialized_pb=_b('\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x7f\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z*github.com/golang/protobuf/ptypes/wrappers\xa0\x01\x01\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-
-_DOUBLEVALUE = _descriptor.Descriptor(
-  name='DoubleValue',
-  full_name='google.protobuf.DoubleValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.DoubleValue.value', index=0,
-      number=1, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=51,
-  serialized_end=79,
-)
-
-
-_FLOATVALUE = _descriptor.Descriptor(
-  name='FloatValue',
-  full_name='google.protobuf.FloatValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.FloatValue.value', index=0,
-      number=1, type=2, cpp_type=6, label=1,
-      has_default_value=False, default_value=float(0),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=81,
-  serialized_end=108,
-)
-
-
-_INT64VALUE = _descriptor.Descriptor(
-  name='Int64Value',
-  full_name='google.protobuf.Int64Value',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.Int64Value.value', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=110,
-  serialized_end=137,
-)
-
-
-_UINT64VALUE = _descriptor.Descriptor(
-  name='UInt64Value',
-  full_name='google.protobuf.UInt64Value',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.UInt64Value.value', index=0,
-      number=1, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=139,
-  serialized_end=167,
-)
-
-
-_INT32VALUE = _descriptor.Descriptor(
-  name='Int32Value',
-  full_name='google.protobuf.Int32Value',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.Int32Value.value', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=169,
-  serialized_end=196,
-)
-
-
-_UINT32VALUE = _descriptor.Descriptor(
-  name='UInt32Value',
-  full_name='google.protobuf.UInt32Value',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.UInt32Value.value', index=0,
-      number=1, type=13, cpp_type=3, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=198,
-  serialized_end=226,
-)
-
-
-_BOOLVALUE = _descriptor.Descriptor(
-  name='BoolValue',
-  full_name='google.protobuf.BoolValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.BoolValue.value', index=0,
-      number=1, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=228,
-  serialized_end=254,
-)
-
-
-_STRINGVALUE = _descriptor.Descriptor(
-  name='StringValue',
-  full_name='google.protobuf.StringValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.StringValue.value', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=256,
-  serialized_end=284,
-)
-
-
-_BYTESVALUE = _descriptor.Descriptor(
-  name='BytesValue',
-  full_name='google.protobuf.BytesValue',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='value', full_name='google.protobuf.BytesValue.value', index=0,
-      number=1, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b(""),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=286,
-  serialized_end=313,
-)
-
-DESCRIPTOR.message_types_by_name['DoubleValue'] = _DOUBLEVALUE
-DESCRIPTOR.message_types_by_name['FloatValue'] = _FLOATVALUE
-DESCRIPTOR.message_types_by_name['Int64Value'] = _INT64VALUE
-DESCRIPTOR.message_types_by_name['UInt64Value'] = _UINT64VALUE
-DESCRIPTOR.message_types_by_name['Int32Value'] = _INT32VALUE
-DESCRIPTOR.message_types_by_name['UInt32Value'] = _UINT32VALUE
-DESCRIPTOR.message_types_by_name['BoolValue'] = _BOOLVALUE
-DESCRIPTOR.message_types_by_name['StringValue'] = _STRINGVALUE
-DESCRIPTOR.message_types_by_name['BytesValue'] = _BYTESVALUE
-
-DoubleValue = _reflection.GeneratedProtocolMessageType('DoubleValue', (_message.Message,), dict(
-  DESCRIPTOR = _DOUBLEVALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.DoubleValue)
-  ))
-_sym_db.RegisterMessage(DoubleValue)
-
-FloatValue = _reflection.GeneratedProtocolMessageType('FloatValue', (_message.Message,), dict(
-  DESCRIPTOR = _FLOATVALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.FloatValue)
-  ))
-_sym_db.RegisterMessage(FloatValue)
-
-Int64Value = _reflection.GeneratedProtocolMessageType('Int64Value', (_message.Message,), dict(
-  DESCRIPTOR = _INT64VALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Int64Value)
-  ))
-_sym_db.RegisterMessage(Int64Value)
-
-UInt64Value = _reflection.GeneratedProtocolMessageType('UInt64Value', (_message.Message,), dict(
-  DESCRIPTOR = _UINT64VALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.UInt64Value)
-  ))
-_sym_db.RegisterMessage(UInt64Value)
-
-Int32Value = _reflection.GeneratedProtocolMessageType('Int32Value', (_message.Message,), dict(
-  DESCRIPTOR = _INT32VALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.Int32Value)
-  ))
-_sym_db.RegisterMessage(Int32Value)
-
-UInt32Value = _reflection.GeneratedProtocolMessageType('UInt32Value', (_message.Message,), dict(
-  DESCRIPTOR = _UINT32VALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.UInt32Value)
-  ))
-_sym_db.RegisterMessage(UInt32Value)
-
-BoolValue = _reflection.GeneratedProtocolMessageType('BoolValue', (_message.Message,), dict(
-  DESCRIPTOR = _BOOLVALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.BoolValue)
-  ))
-_sym_db.RegisterMessage(BoolValue)
-
-StringValue = _reflection.GeneratedProtocolMessageType('StringValue', (_message.Message,), dict(
-  DESCRIPTOR = _STRINGVALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.StringValue)
-  ))
-_sym_db.RegisterMessage(StringValue)
-
-BytesValue = _reflection.GeneratedProtocolMessageType('BytesValue', (_message.Message,), dict(
-  DESCRIPTOR = _BYTESVALUE,
-  __module__ = 'google.protobuf.wrappers_pb2'
-  # @@protoc_insertion_point(class_scope:google.protobuf.BytesValue)
-  ))
-_sym_db.RegisterMessage(BytesValue)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\rWrappersProtoP\001Z*github.com/golang/protobuf/ptypes/wrappers\240\001\001\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/googleapiclient/README.swarming b/tools/swarming_client/third_party/googleapiclient/README.swarming
deleted file mode 100644
index 01775b0..0000000
--- a/tools/swarming_client/third_party/googleapiclient/README.swarming
+++ /dev/null
@@ -1,14 +0,0 @@
-Name: google-api-python-client
-Short Name: googleapiclient
-Version: 1.4.2
-Revision: 821:d83246e69b22
-Home-page: https://chromium.googlesource.com/external/github.com/google/google-api-python-client
-License: Apache 2.0
-Description:
-The Google API Client for Python is a client library for
-accessing the Plus, Moderator, and many other Google APIs.
-
-Local Modifications:
-- Installed using glyco (see ../README.txt for more info)
-- Removed apiclient
-- Added README.swarming
diff --git a/tools/swarming_client/third_party/googleapiclient/__init__.py b/tools/swarming_client/third_party/googleapiclient/__init__.py
deleted file mode 100644
index ceeae8d..0000000
--- a/tools/swarming_client/third_party/googleapiclient/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-__version__ = "1.4.2"
diff --git a/tools/swarming_client/third_party/googleapiclient/channel.py b/tools/swarming_client/third_party/googleapiclient/channel.py
deleted file mode 100644
index 702186b..0000000
--- a/tools/swarming_client/third_party/googleapiclient/channel.py
+++ /dev/null
@@ -1,287 +0,0 @@
-"""Channel notifications support.
-
-Classes and functions to support channel subscriptions and notifications
-on those channels.
-
-Notes:
-  - This code is based on experimental APIs and is subject to change.
-  - Notification does not do deduplication of notification ids, that's up to
-    the receiver.
-  - Storing the Channel between calls is up to the caller.
-
-
-Example setting up a channel:
-
-  # Create a new channel that gets notifications via webhook.
-  channel = new_webhook_channel("https://example.com/my_web_hook")
-
-  # Store the channel, keyed by 'channel.id'. Store it before calling the
-  # watch method because notifications may start arriving before the watch
-  # method returns.
-  ...
-
-  resp = service.objects().watchAll(
-    bucket="some_bucket_id", body=channel.body()).execute()
-  channel.update(resp)
-
-  # Store the channel, keyed by 'channel.id'. Store it after being updated
-  # since the resource_id value will now be correct, and that's needed to
-  # stop a subscription.
-  ...
-
-
-An example Webhook implementation using webapp2. Note that webapp2 puts
-headers in a case insensitive dictionary, as headers aren't guaranteed to
-always be upper case.
-
-  id = self.request.headers[X_GOOG_CHANNEL_ID]
-
-  # Retrieve the channel by id.
-  channel = ...
-
-  # Parse notification from the headers, including validating the id.
-  n = notification_from_headers(channel, self.request.headers)
-
-  # Do app specific stuff with the notification here.
-  if n.resource_state == 'sync':
-    # Code to handle sync state.
-  elif n.resource_state == 'exists':
-    # Code to handle the exists state.
-  elif n.resource_state == 'not_exists':
-    # Code to handle the not exists state.
-
-
-Example of unsubscribing.
-
-  service.channels().stop(channel.body())
-"""
-from __future__ import absolute_import
-
-import datetime
-import uuid
-
-from googleapiclient import errors
-from oauth2client import util
-import six
-
-
-# The unix time epoch starts at midnight 1970.
-EPOCH = datetime.datetime.utcfromtimestamp(0)
-
-# Map the names of the parameters in the JSON channel description to
-# the parameter names we use in the Channel class.
-CHANNEL_PARAMS = {
-    'address': 'address',
-    'id': 'id',
-    'expiration': 'expiration',
-    'params': 'params',
-    'resourceId': 'resource_id',
-    'resourceUri': 'resource_uri',
-    'type': 'type',
-    'token': 'token',
-    }
-
-X_GOOG_CHANNEL_ID     = 'X-GOOG-CHANNEL-ID'
-X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'
-X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'
-X_GOOG_RESOURCE_URI   = 'X-GOOG-RESOURCE-URI'
-X_GOOG_RESOURCE_ID    = 'X-GOOG-RESOURCE-ID'
-
-
-def _upper_header_keys(headers):
-  new_headers = {}
-  for k, v in six.iteritems(headers):
-    new_headers[k.upper()] = v
-  return new_headers
-
-
-class Notification(object):
-  """A Notification from a Channel.
-
-  Notifications are not usually constructed directly, but are returned
-  from functions like notification_from_headers().
-
-  Attributes:
-    message_number: int, The unique id number of this notification.
-    state: str, The state of the resource being monitored.
-    uri: str, The address of the resource being monitored.
-    resource_id: str, The unique identifier of the version of the resource at
-      this event.
-  """
-  @util.positional(5)
-  def __init__(self, message_number, state, resource_uri, resource_id):
-    """Notification constructor.
-
-    Args:
-      message_number: int, The unique id number of this notification.
-      state: str, The state of the resource being monitored. Can be one
-        of "exists", "not_exists", or "sync".
-      resource_uri: str, The address of the resource being monitored.
-      resource_id: str, The identifier of the watched resource.
-    """
-    self.message_number = message_number
-    self.state = state
-    self.resource_uri = resource_uri
-    self.resource_id = resource_id
-
-
-class Channel(object):
-  """A Channel for notifications.
-
-  Usually not constructed directly, instead it is returned from helper
-  functions like new_webhook_channel().
-
-  Attributes:
-    type: str, The type of delivery mechanism used by this channel. For
-      example, 'web_hook'.
-    id: str, A UUID for the channel.
-    token: str, An arbitrary string associated with the channel that
-      is delivered to the target address with each event delivered
-      over this channel.
-    address: str, The address of the receiving entity where events are
-      delivered. Specific to the channel type.
-    expiration: int, The time, in milliseconds from the epoch, when this
-      channel will expire.
-    params: dict, A dictionary of string to string, with additional parameters
-      controlling delivery channel behavior.
-    resource_id: str, An opaque id that identifies the resource that is
-      being watched. Stable across different API versions.
-    resource_uri: str, The canonicalized ID of the watched resource.
-  """
-
-  @util.positional(5)
-  def __init__(self, type, id, token, address, expiration=None,
-               params=None, resource_id="", resource_uri=""):
-    """Create a new Channel.
-
-    In user code, this Channel constructor will not typically be called
-    manually since there are functions for creating channels for each specific
-    type with a more customized set of arguments to pass.
-
-    Args:
-      type: str, The type of delivery mechanism used by this channel. For
-        example, 'web_hook'.
-      id: str, A UUID for the channel.
-      token: str, An arbitrary string associated with the channel that
-        is delivered to the target address with each event delivered
-        over this channel.
-      address: str,  The address of the receiving entity where events are
-        delivered. Specific to the channel type.
-      expiration: int, The time, in milliseconds from the epoch, when this
-        channel will expire.
-      params: dict, A dictionary of string to string, with additional parameters
-        controlling delivery channel behavior.
-      resource_id: str, An opaque id that identifies the resource that is
-        being watched. Stable across different API versions.
-      resource_uri: str, The canonicalized ID of the watched resource.
-    """
-    self.type = type
-    self.id = id
-    self.token = token
-    self.address = address
-    self.expiration = expiration
-    self.params = params
-    self.resource_id = resource_id
-    self.resource_uri = resource_uri
-
-  def body(self):
-    """Build a body from the Channel.
-
-    Constructs a dictionary that's appropriate for passing into watch()
-    methods as the value of body argument.
-
-    Returns:
-      A dictionary representation of the channel.
-    """
-    result = {
-        'id': self.id,
-        'token': self.token,
-        'type': self.type,
-        'address': self.address
-        }
-    if self.params:
-      result['params'] = self.params
-    if self.resource_id:
-      result['resourceId'] = self.resource_id
-    if self.resource_uri:
-      result['resourceUri'] = self.resource_uri
-    if self.expiration:
-      result['expiration'] = self.expiration
-
-    return result
-
-  def update(self, resp):
-    """Update a channel with information from the response of watch().
-
-    When a request is sent to watch() a resource, the response returned
-    from the watch() request is a dictionary with updated channel information,
-    such as the resource_id, which is needed when stopping a subscription.
-
-    Args:
-      resp: dict, The response from a watch() method.
-    """
-    for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
-      value = resp.get(json_name)
-      if value is not None:
-        setattr(self, param_name, value)
-
-
-def notification_from_headers(channel, headers):
-  """Parse a notification from the webhook request headers, validate
-    the notification, and return a Notification object.
-
-  Args:
-    channel: Channel, The channel that the notification is associated with.
-    headers: dict, A dictionary like object that contains the request headers
-      from the webhook HTTP request.
-
-  Returns:
-    A Notification object.
-
-  Raises:
-    errors.InvalidNotificationError if the notification is invalid.
-    ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
-  """
-  headers = _upper_header_keys(headers)
-  channel_id = headers[X_GOOG_CHANNEL_ID]
-  if channel.id != channel_id:
-    raise errors.InvalidNotificationError(
-        'Channel id mismatch: %s != %s' % (channel.id, channel_id))
-  else:
-    message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
-    state = headers[X_GOOG_RESOURCE_STATE]
-    resource_uri = headers[X_GOOG_RESOURCE_URI]
-    resource_id = headers[X_GOOG_RESOURCE_ID]
-    return Notification(message_number, state, resource_uri, resource_id)
-
-
-@util.positional(2)
-def new_webhook_channel(url, token=None, expiration=None, params=None):
-    """Create a new webhook Channel.
-
-    Args:
-      url: str, URL to post notifications to.
-      token: str, An arbitrary string associated with the channel that
-        is delivered to the target address with each notification delivered
-        over this channel.
-      expiration: datetime.datetime, A time in the future when the channel
-        should expire. Can also be None if the subscription should use the
-        default expiration. Note that different services may have different
-        limits on how long a subscription lasts. Check the response from the
-        watch() method to see the value the service has set for an expiration
-        time.
-      params: dict, Extra parameters to pass on channel creation. Currently
-        not used for webhook channels.
-    """
-    expiration_ms = 0
-    if expiration:
-      delta = expiration - EPOCH
-      expiration_ms = delta.microseconds/1000 + (
-          delta.seconds + delta.days*24*3600)*1000
-      if expiration_ms < 0:
-        expiration_ms = 0
-
-    return Channel('web_hook', str(uuid.uuid4()),
-                   token, url, expiration=expiration_ms,
-                   params=params)
-
diff --git a/tools/swarming_client/third_party/googleapiclient/discovery.py b/tools/swarming_client/third_party/googleapiclient/discovery.py
deleted file mode 100644
index cee5628..0000000
--- a/tools/swarming_client/third_party/googleapiclient/discovery.py
+++ /dev/null
@@ -1,1088 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Client for discovery based APIs.
-
-A client library for Google's discovery based APIs.
-"""
-from __future__ import absolute_import
-import six
-from six.moves import zip
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-__all__ = [
-    'build',
-    'build_from_document',
-    'fix_method_name',
-    'key2param',
-    ]
-
-from six import BytesIO
-from six.moves import http_client
-from six.moves.urllib.parse import urlencode, urlparse, urljoin, \
-  urlunparse, parse_qsl
-
-# Standard library imports
-import copy
-try:
-  from email.generator import BytesGenerator
-except ImportError:
-  from email.generator import Generator as BytesGenerator
-from email.mime.multipart import MIMEMultipart
-from email.mime.nonmultipart import MIMENonMultipart
-import json
-import keyword
-import logging
-import mimetypes
-import os
-import re
-
-# Third-party imports
-import httplib2
-import uritemplate
-
-# Local imports
-from googleapiclient import mimeparse
-from googleapiclient.errors import HttpError
-from googleapiclient.errors import InvalidJsonError
-from googleapiclient.errors import MediaUploadSizeError
-from googleapiclient.errors import UnacceptableMimeTypeError
-from googleapiclient.errors import UnknownApiNameOrVersion
-from googleapiclient.errors import UnknownFileType
-from googleapiclient.http import BatchHttpRequest
-from googleapiclient.http import HttpRequest
-from googleapiclient.http import MediaFileUpload
-from googleapiclient.http import MediaUpload
-from googleapiclient.model import JsonModel
-from googleapiclient.model import MediaModel
-from googleapiclient.model import RawModel
-from googleapiclient.schema import Schemas
-from oauth2client.client import GoogleCredentials
-from oauth2client.util import _add_query_parameter
-from oauth2client.util import positional
-
-
-# The client library requires a version of httplib2 that supports RETRIES.
-httplib2.RETRIES = 1
-
-logger = logging.getLogger(__name__)
-
-URITEMPLATE = re.compile('{[^}]*}')
-VARNAME = re.compile('[a-zA-Z0-9_-]+')
-DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
-                 '{api}/{apiVersion}/rest')
-DEFAULT_METHOD_DOC = 'A description of how to use this function'
-HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
-_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
-BODY_PARAMETER_DEFAULT_VALUE = {
-    'description': 'The request body.',
-    'type': 'object',
-    'required': True,
-}
-MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
-    'description': ('The filename of the media request body, or an instance '
-                    'of a MediaUpload object.'),
-    'type': 'string',
-    'required': False,
-}
-
-# Parameters accepted by the stack, but not visible via discovery.
-# TODO(dhermes): Remove 'userip' in 'v2'.
-STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
-STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
-
-# Library-specific reserved words beyond Python keywords.
-RESERVED_WORDS = frozenset(['body'])
-
-# patch _write_lines to avoid munging '\r' into '\n'
-# ( https://bugs.python.org/issue18886 https://bugs.python.org/issue19003 )
-class _BytesGenerator(BytesGenerator):
-  _write_lines = BytesGenerator.write
-
-def fix_method_name(name):
-  """Fix method names to avoid reserved word conflicts.
-
-  Args:
-    name: string, method name.
-
-  Returns:
-    The name with a '_' prefixed if the name is a reserved word.
-  """
-  if keyword.iskeyword(name) or name in RESERVED_WORDS:
-    return name + '_'
-  else:
-    return name
-
-
-def key2param(key):
-  """Converts key names into parameter names.
-
-  For example, converting "max-results" -> "max_results"
-
-  Args:
-    key: string, the method key name.
-
-  Returns:
-    A safe method name based on the key name.
-  """
-  result = []
-  key = list(key)
-  if not key[0].isalpha():
-    result.append('x')
-  for c in key:
-    if c.isalnum():
-      result.append(c)
-    else:
-      result.append('_')
-
-  return ''.join(result)
-
-
-@positional(2)
-def build(serviceName,
-          version,
-          http=None,
-          discoveryServiceUrl=DISCOVERY_URI,
-          developerKey=None,
-          model=None,
-          requestBuilder=HttpRequest,
-          credentials=None,
-          cache_discovery=True,
-          cache=None):
-  """Construct a Resource for interacting with an API.
-
-  Construct a Resource object for interacting with an API. The serviceName and
-  version are the names from the Discovery service.
-
-  Args:
-    serviceName: string, name of the service.
-    version: string, the version of the service.
-    http: httplib2.Http, An instance of httplib2.Http or something that acts
-      like it that HTTP requests will be made through.
-    discoveryServiceUrl: string, a URI Template that points to the location of
-      the discovery service. It should have two parameters {api} and
-      {apiVersion} that when filled in produce an absolute URI to the discovery
-      document for that service.
-    developerKey: string, key obtained from
-      https://code.google.com/apis/console.
-    model: googleapiclient.Model, converts to and from the wire format.
-    requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP
-      request.
-    credentials: oauth2client.Credentials, credentials to be used for
-      authentication.
-    cache_discovery: Boolean, whether or not to cache the discovery doc.
-    cache: googleapiclient.discovery_cache.base.CacheBase, an optional
-      cache object for the discovery documents.
-
-  Returns:
-    A Resource object with methods for interacting with the service.
-  """
-  params = {
-      'api': serviceName,
-      'apiVersion': version
-      }
-
-  if http is None:
-    http = httplib2.Http()
-
-  requested_url = uritemplate.expand(discoveryServiceUrl, params)
-
-  try:
-    content = _retrieve_discovery_doc(requested_url, http, cache_discovery,
-                                      cache)
-  except HttpError as e:
-    if e.resp.status == http_client.NOT_FOUND:
-      raise UnknownApiNameOrVersion("name: %s  version: %s" % (serviceName,
-                                                               version))
-    else:
-      raise e
-
-  return build_from_document(content, base=discoveryServiceUrl, http=http,
-      developerKey=developerKey, model=model, requestBuilder=requestBuilder,
-      credentials=credentials)
-
-
-def _retrieve_discovery_doc(url, http, cache_discovery, cache=None):
-  """Retrieves the discovery_doc from cache or the internet.
-
-  Args:
-    url: string, the URL of the discovery document.
-    http: httplib2.Http, An instance of httplib2.Http or something that acts
-      like it through which HTTP requests will be made.
-    cache_discovery: Boolean, whether or not to cache the discovery doc.
-    cache: googleapiclient.discovery_cache.base.Cache, an optional cache
-      object for the discovery documents.
-
-  Returns:
-    A unicode string representation of the discovery document.
-  """
-  if cache_discovery:
-    from . import discovery_cache
-    from .discovery_cache import base
-    if cache is None:
-      cache = discovery_cache.autodetect()
-    if cache:
-      content = cache.get(url)
-      if content:
-        return content
-
-  actual_url = url
-  # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
-  # variable that contains the network address of the client sending the
-  # request. If it exists then add that to the request for the discovery
-  # document to avoid exceeding the quota on discovery requests.
-  if 'REMOTE_ADDR' in os.environ:
-    actual_url = _add_query_parameter(url, 'userIp', os.environ['REMOTE_ADDR'])
-  logger.info('URL being requested: GET %s', actual_url)
-
-  resp, content = http.request(actual_url)
-
-  if resp.status >= 400:
-    raise HttpError(resp, content, uri=actual_url)
-
-  try:
-    content = content.decode('utf-8')
-  except AttributeError:
-    pass
-
-  try:
-    service = json.loads(content)
-  except ValueError as e:
-    logger.error('Failed to parse as JSON: ' + content)
-    raise InvalidJsonError()
-  if cache_discovery and cache:
-    cache.set(url, content)
-  return content
-
-
-@positional(1)
-def build_from_document(
-    service,
-    base=None,
-    future=None,
-    http=None,
-    developerKey=None,
-    model=None,
-    requestBuilder=HttpRequest,
-    credentials=None):
-  """Create a Resource for interacting with an API.
-
-  Same as `build()`, but constructs the Resource object from a discovery
-  document that is it given, as opposed to retrieving one over HTTP.
-
-  Args:
-    service: string or object, the JSON discovery document describing the API.
-      The value passed in may either be the JSON string or the deserialized
-      JSON.
-    base: string, base URI for all HTTP requests, usually the discovery URI.
-      This parameter is no longer used as rootUrl and servicePath are included
-      within the discovery document. (deprecated)
-    future: string, discovery document with future capabilities (deprecated).
-    http: httplib2.Http, An instance of httplib2.Http or something that acts
-      like it that HTTP requests will be made through.
-    developerKey: string, Key for controlling API usage, generated
-      from the API Console.
-    model: Model class instance that serializes and de-serializes requests and
-      responses.
-    requestBuilder: Takes an http request and packages it up to be executed.
-    credentials: object, credentials to be used for authentication.
-
-  Returns:
-    A Resource object with methods for interacting with the service.
-  """
-
-  if http is None:
-    http = httplib2.Http()
-
-  # future is no longer used.
-  future = {}
-
-  if isinstance(service, six.string_types):
-    service = json.loads(service)
-  base = urljoin(service['rootUrl'], service['servicePath'])
-  schema = Schemas(service)
-
-  if credentials:
-    # If credentials were passed in, we could have two cases:
-    # 1. the scopes were specified, in which case the given credentials
-    #    are used for authorizing the http;
-    # 2. the scopes were not provided (meaning the Application Default
-    #    Credentials are to be used). In this case, the Application Default
-    #    Credentials are built and used instead of the original credentials.
-    #    If there are no scopes found (meaning the given service requires no
-    #    authentication), there is no authorization of the http.
-    if (isinstance(credentials, GoogleCredentials) and
-        credentials.create_scoped_required()):
-      scopes = service.get('auth', {}).get('oauth2', {}).get('scopes', {})
-      if scopes:
-        credentials = credentials.create_scoped(list(scopes.keys()))
-      else:
-        # No need to authorize the http object
-        # if the service does not require authentication.
-        credentials = None
-
-    if credentials:
-      http = credentials.authorize(http)
-
-  if model is None:
-    features = service.get('features', [])
-    model = JsonModel('dataWrapper' in features)
-  return Resource(http=http, baseUrl=base, model=model,
-                  developerKey=developerKey, requestBuilder=requestBuilder,
-                  resourceDesc=service, rootDesc=service, schema=schema)
-
-
-def _cast(value, schema_type):
-  """Convert value to a string based on JSON Schema type.
-
-  See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
-  JSON Schema.
-
-  Args:
-    value: any, the value to convert
-    schema_type: string, the type that value should be interpreted as
-
-  Returns:
-    A string representation of 'value' based on the schema_type.
-  """
-  if schema_type == 'string':
-    if type(value) == type('') or type(value) == type(u''):
-      return value
-    else:
-      return str(value)
-  elif schema_type == 'integer':
-    return str(int(value))
-  elif schema_type == 'number':
-    return str(float(value))
-  elif schema_type == 'boolean':
-    return str(bool(value)).lower()
-  else:
-    if type(value) == type('') or type(value) == type(u''):
-      return value
-    else:
-      return str(value)
-
-
-def _media_size_to_long(maxSize):
-  """Convert a string media size, such as 10GB or 3TB into an integer.
-
-  Args:
-    maxSize: string, size as a string, such as 2MB or 7GB.
-
-  Returns:
-    The size as an integer value.
-  """
-  if len(maxSize) < 2:
-    return 0
-  units = maxSize[-2:].upper()
-  bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
-  if bit_shift is not None:
-    return int(maxSize[:-2]) << bit_shift
-  else:
-    return int(maxSize)
-
-
-def _media_path_url_from_info(root_desc, path_url):
-  """Creates an absolute media path URL.
-
-  Constructed using the API root URI and service path from the discovery
-  document and the relative path for the API method.
-
-  Args:
-    root_desc: Dictionary; the entire original deserialized discovery document.
-    path_url: String; the relative URL for the API method. Relative to the API
-        root, which is specified in the discovery document.
-
-  Returns:
-    String; the absolute URI for media upload for the API method.
-  """
-  return '%(root)supload/%(service_path)s%(path)s' % {
-      'root': root_desc['rootUrl'],
-      'service_path': root_desc['servicePath'],
-      'path': path_url,
-  }
-
-
-def _fix_up_parameters(method_desc, root_desc, http_method):
-  """Updates parameters of an API method with values specific to this library.
-
-  Specifically, adds whatever global parameters are specified by the API to the
-  parameters for the individual method. Also adds parameters which don't
-  appear in the discovery document, but are available to all discovery based
-  APIs (these are listed in STACK_QUERY_PARAMETERS).
-
-  SIDE EFFECTS: This updates the parameters dictionary object in the method
-  description.
-
-  Args:
-    method_desc: Dictionary with metadata describing an API method. Value comes
-        from the dictionary of methods stored in the 'methods' key in the
-        deserialized discovery document.
-    root_desc: Dictionary; the entire original deserialized discovery document.
-    http_method: String; the HTTP method used to call the API method described
-        in method_desc.
-
-  Returns:
-    The updated Dictionary stored in the 'parameters' key of the method
-        description dictionary.
-  """
-  parameters = method_desc.setdefault('parameters', {})
-
-  # Add in the parameters common to all methods.
-  for name, description in six.iteritems(root_desc.get('parameters', {})):
-    parameters[name] = description
-
-  # Add in undocumented query parameters.
-  for name in STACK_QUERY_PARAMETERS:
-    parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
-
-  # Add 'body' (our own reserved word) to parameters if the method supports
-  # a request payload.
-  if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
-    body = BODY_PARAMETER_DEFAULT_VALUE.copy()
-    body.update(method_desc['request'])
-    parameters['body'] = body
-
-  return parameters
-
-
-def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
-  """Updates parameters of API by adding 'media_body' if supported by method.
-
-  SIDE EFFECTS: If the method supports media upload and has a required body,
-  sets body to be optional (required=False) instead. Also, if there is a
-  'mediaUpload' in the method description, adds 'media_upload' key to
-  parameters.
-
-  Args:
-    method_desc: Dictionary with metadata describing an API method. Value comes
-        from the dictionary of methods stored in the 'methods' key in the
-        deserialized discovery document.
-    root_desc: Dictionary; the entire original deserialized discovery document.
-    path_url: String; the relative URL for the API method. Relative to the API
-        root, which is specified in the discovery document.
-    parameters: A dictionary describing method parameters for method described
-        in method_desc.
-
-  Returns:
-    Triple (accept, max_size, media_path_url) where:
-      - accept is a list of strings representing what content types are
-        accepted for media upload. Defaults to empty list if not in the
-        discovery document.
-      - max_size is a long representing the max size in bytes allowed for a
-        media upload. Defaults to 0L if not in the discovery document.
-      - media_path_url is a String; the absolute URI for media upload for the
-        API method. Constructed using the API root URI and service path from
-        the discovery document and the relative path for the API method. If
-        media upload is not supported, this is None.
-  """
-  media_upload = method_desc.get('mediaUpload', {})
-  accept = media_upload.get('accept', [])
-  max_size = _media_size_to_long(media_upload.get('maxSize', ''))
-  media_path_url = None
-
-  if media_upload:
-    media_path_url = _media_path_url_from_info(root_desc, path_url)
-    parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
-    if 'body' in parameters:
-      parameters['body']['required'] = False
-
-  return accept, max_size, media_path_url
-
-
-def _fix_up_method_description(method_desc, root_desc):
-  """Updates a method description in a discovery document.
-
-  SIDE EFFECTS: Changes the parameters dictionary in the method description with
-  extra parameters which are used locally.
-
-  Args:
-    method_desc: Dictionary with metadata describing an API method. Value comes
-        from the dictionary of methods stored in the 'methods' key in the
-        deserialized discovery document.
-    root_desc: Dictionary; the entire original deserialized discovery document.
-
-  Returns:
-    Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
-    where:
-      - path_url is a String; the relative URL for the API method. Relative to
-        the API root, which is specified in the discovery document.
-      - http_method is a String; the HTTP method used to call the API method
-        described in the method description.
-      - method_id is a String; the name of the RPC method associated with the
-        API method, and is in the method description in the 'id' key.
-      - accept is a list of strings representing what content types are
-        accepted for media upload. Defaults to empty list if not in the
-        discovery document.
-      - max_size is a long representing the max size in bytes allowed for a
-        media upload. Defaults to 0L if not in the discovery document.
-      - media_path_url is a String; the absolute URI for media upload for the
-        API method. Constructed using the API root URI and service path from
-        the discovery document and the relative path for the API method. If
-        media upload is not supported, this is None.
-  """
-  path_url = method_desc['path']
-  http_method = method_desc['httpMethod']
-  method_id = method_desc['id']
-
-  parameters = _fix_up_parameters(method_desc, root_desc, http_method)
-  # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
-  # 'parameters' key and needs to know if there is a 'body' parameter because it
-  # also sets a 'media_body' parameter.
-  accept, max_size, media_path_url = _fix_up_media_upload(
-      method_desc, root_desc, path_url, parameters)
-
-  return path_url, http_method, method_id, accept, max_size, media_path_url
-
-
-def _urljoin(base, url):
-  """Custom urljoin replacement supporting : before / in url."""
-  # In general, it's unsafe to simply join base and url. However, for
-  # the case of discovery documents, we know:
-  #  * base will never contain params, query, or fragment
-  #  * url will never contain a scheme or net_loc.
-  # In general, this means we can safely join on /; we just need to
-  # ensure we end up with precisely one / joining base and url. The
-  # exception here is the case of media uploads, where url will be an
-  # absolute url.
-  if url.startswith('http://') or url.startswith('https://'):
-    return urljoin(base, url)
-  new_base = base if base.endswith('/') else base + '/'
-  new_url = url[1:] if url.startswith('/') else url
-  return new_base + new_url
-
-
-# TODO(dhermes): Convert this class to ResourceMethod and make it callable
-class ResourceMethodParameters(object):
-  """Represents the parameters associated with a method.
-
-  Attributes:
-    argmap: Map from method parameter name (string) to query parameter name
-        (string).
-    required_params: List of required parameters (represented by parameter
-        name as string).
-    repeated_params: List of repeated parameters (represented by parameter
-        name as string).
-    pattern_params: Map from method parameter name (string) to regular
-        expression (as a string). If the pattern is set for a parameter, the
-        value for that parameter must match the regular expression.
-    query_params: List of parameters (represented by parameter name as string)
-        that will be used in the query string.
-    path_params: Set of parameters (represented by parameter name as string)
-        that will be used in the base URL path.
-    param_types: Map from method parameter name (string) to parameter type. Type
-        can be any valid JSON schema type; valid values are 'any', 'array',
-        'boolean', 'integer', 'number', 'object', or 'string'. Reference:
-        http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
-    enum_params: Map from method parameter name (string) to list of strings,
-       where each list of strings is the list of acceptable enum values.
-  """
-
-  def __init__(self, method_desc):
-    """Constructor for ResourceMethodParameters.
-
-    Sets default values and defers to set_parameters to populate.
-
-    Args:
-      method_desc: Dictionary with metadata describing an API method. Value
-          comes from the dictionary of methods stored in the 'methods' key in
-          the deserialized discovery document.
-    """
-    self.argmap = {}
-    self.required_params = []
-    self.repeated_params = []
-    self.pattern_params = {}
-    self.query_params = []
-    # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
-    #                parsing is gotten rid of.
-    self.path_params = set()
-    self.param_types = {}
-    self.enum_params = {}
-
-    self.set_parameters(method_desc)
-
-  def set_parameters(self, method_desc):
-    """Populates maps and lists based on method description.
-
-    Iterates through each parameter for the method and parses the values from
-    the parameter dictionary.
-
-    Args:
-      method_desc: Dictionary with metadata describing an API method. Value
-          comes from the dictionary of methods stored in the 'methods' key in
-          the deserialized discovery document.
-    """
-    for arg, desc in six.iteritems(method_desc.get('parameters', {})):
-      param = key2param(arg)
-      self.argmap[param] = arg
-
-      if desc.get('pattern'):
-        self.pattern_params[param] = desc['pattern']
-      if desc.get('enum'):
-        self.enum_params[param] = desc['enum']
-      if desc.get('required'):
-        self.required_params.append(param)
-      if desc.get('repeated'):
-        self.repeated_params.append(param)
-      if desc.get('location') == 'query':
-        self.query_params.append(param)
-      if desc.get('location') == 'path':
-        self.path_params.add(param)
-      self.param_types[param] = desc.get('type', 'string')
-
-    # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
-    #                should have all path parameters already marked with
-    #                'location: path'.
-    for match in URITEMPLATE.finditer(method_desc['path']):
-      for namematch in VARNAME.finditer(match.group(0)):
-        name = key2param(namematch.group(0))
-        self.path_params.add(name)
-        if name in self.query_params:
-          self.query_params.remove(name)
-
-
-def createMethod(methodName, methodDesc, rootDesc, schema):
-  """Creates a method for attaching to a Resource.
-
-  Args:
-    methodName: string, name of the method to use.
-    methodDesc: object, fragment of deserialized discovery document that
-      describes the method.
-    rootDesc: object, the entire deserialized discovery document.
-    schema: object, mapping of schema names to schema descriptions.
-  """
-  methodName = fix_method_name(methodName)
-  (pathUrl, httpMethod, methodId, accept,
-   maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc)
-
-  parameters = ResourceMethodParameters(methodDesc)
-
-  def method(self, **kwargs):
-    # Don't bother with doc string, it will be over-written by createMethod.
-
-    for name in six.iterkeys(kwargs):
-      if name not in parameters.argmap:
-        raise TypeError('Got an unexpected keyword argument "%s"' % name)
-
-    # Remove args that have a value of None.
-    keys = list(kwargs.keys())
-    for name in keys:
-      if kwargs[name] is None:
-        del kwargs[name]
-
-    for name in parameters.required_params:
-      if name not in kwargs:
-        raise TypeError('Missing required parameter "%s"' % name)
-
-    for name, regex in six.iteritems(parameters.pattern_params):
-      if name in kwargs:
-        if isinstance(kwargs[name], six.string_types):
-          pvalues = [kwargs[name]]
-        else:
-          pvalues = kwargs[name]
-        for pvalue in pvalues:
-          if re.match(regex, pvalue) is None:
-            raise TypeError(
-                'Parameter "%s" value "%s" does not match the pattern "%s"' %
-                (name, pvalue, regex))
-
-    for name, enums in six.iteritems(parameters.enum_params):
-      if name in kwargs:
-        # We need to handle the case of a repeated enum
-        # name differently, since we want to handle both
-        # arg='value' and arg=['value1', 'value2']
-        if (name in parameters.repeated_params and
-            not isinstance(kwargs[name], six.string_types)):
-          values = kwargs[name]
-        else:
-          values = [kwargs[name]]
-        for value in values:
-          if value not in enums:
-            raise TypeError(
-                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
-                (name, value, str(enums)))
-
-    actual_query_params = {}
-    actual_path_params = {}
-    for key, value in six.iteritems(kwargs):
-      to_type = parameters.param_types.get(key, 'string')
-      # For repeated parameters we cast each member of the list.
-      if key in parameters.repeated_params and type(value) == type([]):
-        cast_value = [_cast(x, to_type) for x in value]
-      else:
-        cast_value = _cast(value, to_type)
-      if key in parameters.query_params:
-        actual_query_params[parameters.argmap[key]] = cast_value
-      if key in parameters.path_params:
-        actual_path_params[parameters.argmap[key]] = cast_value
-    body_value = kwargs.get('body', None)
-    media_filename = kwargs.get('media_body', None)
-
-    if self._developerKey:
-      actual_query_params['key'] = self._developerKey
-
-    model = self._model
-    if methodName.endswith('_media'):
-      model = MediaModel()
-    elif 'response' not in methodDesc:
-      model = RawModel()
-
-    headers = {}
-    headers, params, query, body = model.request(headers,
-        actual_path_params, actual_query_params, body_value)
-
-    expanded_url = uritemplate.expand(pathUrl, params)
-    url = _urljoin(self._baseUrl, expanded_url + query)
-
-    resumable = None
-    multipart_boundary = ''
-
-    if media_filename:
-      # Ensure we end up with a valid MediaUpload object.
-      if isinstance(media_filename, six.string_types):
-        (media_mime_type, encoding) = mimetypes.guess_type(media_filename)
-        if media_mime_type is None:
-          raise UnknownFileType(media_filename)
-        if not mimeparse.best_match([media_mime_type], ','.join(accept)):
-          raise UnacceptableMimeTypeError(media_mime_type)
-        media_upload = MediaFileUpload(media_filename,
-                                       mimetype=media_mime_type)
-      elif isinstance(media_filename, MediaUpload):
-        media_upload = media_filename
-      else:
-        raise TypeError('media_filename must be str or MediaUpload.')
-
-      # Check the maxSize
-      if media_upload.size() is not None and media_upload.size() > maxSize > 0:
-        raise MediaUploadSizeError("Media larger than: %s" % maxSize)
-
-      # Use the media path uri for media uploads
-      expanded_url = uritemplate.expand(mediaPathUrl, params)
-      url = _urljoin(self._baseUrl, expanded_url + query)
-      if media_upload.resumable():
-        url = _add_query_parameter(url, 'uploadType', 'resumable')
-
-      if media_upload.resumable():
-        # This is all we need to do for resumable, if the body exists it gets
-        # sent in the first request, otherwise an empty body is sent.
-        resumable = media_upload
-      else:
-        # A non-resumable upload
-        if body is None:
-          # This is a simple media upload
-          headers['content-type'] = media_upload.mimetype()
-          body = media_upload.getbytes(0, media_upload.size())
-          url = _add_query_parameter(url, 'uploadType', 'media')
-        else:
-          # This is a multipart/related upload.
-          msgRoot = MIMEMultipart('related')
-          # msgRoot should not write out it's own headers
-          setattr(msgRoot, '_write_headers', lambda self: None)
-
-          # attach the body as one part
-          msg = MIMENonMultipart(*headers['content-type'].split('/'))
-          msg.set_payload(body)
-          msgRoot.attach(msg)
-
-          # attach the media as the second part
-          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
-          msg['Content-Transfer-Encoding'] = 'binary'
-
-          payload = media_upload.getbytes(0, media_upload.size())
-          msg.set_payload(payload)
-          msgRoot.attach(msg)
-          # encode the body: note that we can't use `as_string`, because
-          # it plays games with `From ` lines.
-          fp = BytesIO()
-          g = _BytesGenerator(fp, mangle_from_=False)
-          g.flatten(msgRoot, unixfrom=False)
-          body = fp.getvalue()
-
-          multipart_boundary = msgRoot.get_boundary()
-          headers['content-type'] = ('multipart/related; '
-                                     'boundary="%s"') % multipart_boundary
-          url = _add_query_parameter(url, 'uploadType', 'multipart')
-
-    logger.info('URL being requested: %s %s' % (httpMethod,url))
-    return self._requestBuilder(self._http,
-                                model.response,
-                                url,
-                                method=httpMethod,
-                                body=body,
-                                headers=headers,
-                                methodId=methodId,
-                                resumable=resumable)
-
-  docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
-  if len(parameters.argmap) > 0:
-    docs.append('Args:\n')
-
-  # Skip undocumented params and params common to all methods.
-  skip_parameters = list(rootDesc.get('parameters', {}).keys())
-  skip_parameters.extend(STACK_QUERY_PARAMETERS)
-
-  all_args = list(parameters.argmap.keys())
-  args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
-
-  # Move body to the front of the line.
-  if 'body' in all_args:
-    args_ordered.append('body')
-
-  for name in all_args:
-    if name not in args_ordered:
-      args_ordered.append(name)
-
-  for arg in args_ordered:
-    if arg in skip_parameters:
-      continue
-
-    repeated = ''
-    if arg in parameters.repeated_params:
-      repeated = ' (repeated)'
-    required = ''
-    if arg in parameters.required_params:
-      required = ' (required)'
-    paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
-    paramdoc = paramdesc.get('description', 'A parameter')
-    if '$ref' in paramdesc:
-      docs.append(
-          ('  %s: object, %s%s%s\n    The object takes the'
-          ' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
-            schema.prettyPrintByName(paramdesc['$ref'])))
-    else:
-      paramtype = paramdesc.get('type', 'string')
-      docs.append('  %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
-                                          repeated))
-    enum = paramdesc.get('enum', [])
-    enumDesc = paramdesc.get('enumDescriptions', [])
-    if enum and enumDesc:
-      docs.append('    Allowed values\n')
-      for (name, desc) in zip(enum, enumDesc):
-        docs.append('      %s - %s\n' % (name, desc))
-  if 'response' in methodDesc:
-    if methodName.endswith('_media'):
-      docs.append('\nReturns:\n  The media object as a string.\n\n    ')
-    else:
-      docs.append('\nReturns:\n  An object of the form:\n\n    ')
-      docs.append(schema.prettyPrintSchema(methodDesc['response']))
-
-  setattr(method, '__doc__', ''.join(docs))
-  return (methodName, method)
-
-
-def createNextMethod(methodName):
-  """Creates any _next methods for attaching to a Resource.
-
-  The _next methods allow for easy iteration through list() responses.
-
-  Args:
-    methodName: string, name of the method to use.
-  """
-  methodName = fix_method_name(methodName)
-
-  def methodNext(self, previous_request, previous_response):
-    """Retrieves the next page of results.
-
-Args:
-  previous_request: The request for the previous page. (required)
-  previous_response: The response from the request for the previous page. (required)
-
-Returns:
-  A request object that you can call 'execute()' on to request the next
-  page. Returns None if there are no more items in the collection.
-    """
-    # Retrieve nextPageToken from previous_response
-    # Use as pageToken in previous_request to create new request.
-
-    if 'nextPageToken' not in previous_response or not previous_response['nextPageToken']:
-      return None
-
-    request = copy.copy(previous_request)
-
-    pageToken = previous_response['nextPageToken']
-    parsed = list(urlparse(request.uri))
-    q = parse_qsl(parsed[4])
-
-    # Find and remove old 'pageToken' value from URI
-    newq = [(key, value) for (key, value) in q if key != 'pageToken']
-    newq.append(('pageToken', pageToken))
-    parsed[4] = urlencode(newq)
-    uri = urlunparse(parsed)
-
-    request.uri = uri
-
-    logger.info('URL being requested: %s %s' % (methodName,uri))
-
-    return request
-
-  return (methodName, methodNext)
-
-
-class Resource(object):
-  """A class for interacting with a resource."""
-
-  def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
-               resourceDesc, rootDesc, schema):
-    """Build a Resource from the API description.
-
-    Args:
-      http: httplib2.Http, Object to make http requests with.
-      baseUrl: string, base URL for the API. All requests are relative to this
-          URI.
-      model: googleapiclient.Model, converts to and from the wire format.
-      requestBuilder: class or callable that instantiates an
-          googleapiclient.HttpRequest object.
-      developerKey: string, key obtained from
-          https://code.google.com/apis/console
-      resourceDesc: object, section of deserialized discovery document that
-          describes a resource. Note that the top level discovery document
-          is considered a resource.
-      rootDesc: object, the entire deserialized discovery document.
-      schema: object, mapping of schema names to schema descriptions.
-    """
-    self._dynamic_attrs = []
-
-    self._http = http
-    self._baseUrl = baseUrl
-    self._model = model
-    self._developerKey = developerKey
-    self._requestBuilder = requestBuilder
-    self._resourceDesc = resourceDesc
-    self._rootDesc = rootDesc
-    self._schema = schema
-
-    self._set_service_methods()
-
-  def _set_dynamic_attr(self, attr_name, value):
-    """Sets an instance attribute and tracks it in a list of dynamic attributes.
-
-    Args:
-      attr_name: string; The name of the attribute to be set
-      value: The value being set on the object and tracked in the dynamic cache.
-    """
-    self._dynamic_attrs.append(attr_name)
-    self.__dict__[attr_name] = value
-
-  def __getstate__(self):
-    """Trim the state down to something that can be pickled.
-
-    Uses the fact that the instance variable _dynamic_attrs holds attrs that
-    will be wiped and restored on pickle serialization.
-    """
-    state_dict = copy.copy(self.__dict__)
-    for dynamic_attr in self._dynamic_attrs:
-      del state_dict[dynamic_attr]
-    del state_dict['_dynamic_attrs']
-    return state_dict
-
-  def __setstate__(self, state):
-    """Reconstitute the state of the object from being pickled.
-
-    Uses the fact that the instance variable _dynamic_attrs holds attrs that
-    will be wiped and restored on pickle serialization.
-    """
-    self.__dict__.update(state)
-    self._dynamic_attrs = []
-    self._set_service_methods()
-
-  def _set_service_methods(self):
-    self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
-    self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
-    self._add_next_methods(self._resourceDesc, self._schema)
-
-  def _add_basic_methods(self, resourceDesc, rootDesc, schema):
-    # If this is the root Resource, add a new_batch_http_request() method.
-    if resourceDesc == rootDesc:
-      batch_uri = '%s%s' % (
-        rootDesc['rootUrl'], rootDesc.get('batchPath', 'batch'))
-      def new_batch_http_request(callback=None):
-        """Create a BatchHttpRequest object based on the discovery document.
-
-        Args:
-          callback: callable, A callback to be called for each response, of the
-            form callback(id, response, exception). The first parameter is the
-            request id, and the second is the deserialized response object. The
-            third is an apiclient.errors.HttpError exception object if an HTTP
-            error occurred while processing the request, or None if no error
-            occurred.
-
-        Returns:
-          A BatchHttpRequest object based on the discovery document.
-        """
-        return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
-      self._set_dynamic_attr('new_batch_http_request', new_batch_http_request)
-
-    # Add basic methods to Resource
-    if 'methods' in resourceDesc:
-      for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
-        fixedMethodName, method = createMethod(
-            methodName, methodDesc, rootDesc, schema)
-        self._set_dynamic_attr(fixedMethodName,
-                               method.__get__(self, self.__class__))
-        # Add in _media methods. The functionality of the attached method will
-        # change when it sees that the method name ends in _media.
-        if methodDesc.get('supportsMediaDownload', False):
-          fixedMethodName, method = createMethod(
-              methodName + '_media', methodDesc, rootDesc, schema)
-          self._set_dynamic_attr(fixedMethodName,
-                                 method.__get__(self, self.__class__))
-
-  def _add_nested_resources(self, resourceDesc, rootDesc, schema):
-    # Add in nested resources
-    if 'resources' in resourceDesc:
-
-      def createResourceMethod(methodName, methodDesc):
-        """Create a method on the Resource to access a nested Resource.
-
-        Args:
-          methodName: string, name of the method to use.
-          methodDesc: object, fragment of deserialized discovery document that
-            describes the method.
-        """
-        methodName = fix_method_name(methodName)
-
-        def methodResource(self):
-          return Resource(http=self._http, baseUrl=self._baseUrl,
-                          model=self._model, developerKey=self._developerKey,
-                          requestBuilder=self._requestBuilder,
-                          resourceDesc=methodDesc, rootDesc=rootDesc,
-                          schema=schema)
-
-        setattr(methodResource, '__doc__', 'A collection resource.')
-        setattr(methodResource, '__is_resource__', True)
-
-        return (methodName, methodResource)
-
-      for methodName, methodDesc in six.iteritems(resourceDesc['resources']):
-        fixedMethodName, method = createResourceMethod(methodName, methodDesc)
-        self._set_dynamic_attr(fixedMethodName,
-                               method.__get__(self, self.__class__))
-
-  def _add_next_methods(self, resourceDesc, schema):
-    # Add _next() methods
-    # Look for response bodies in schema that contain nextPageToken, and methods
-    # that take a pageToken parameter.
-    if 'methods' in resourceDesc:
-      for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
-        if 'response' in methodDesc:
-          responseSchema = methodDesc['response']
-          if '$ref' in responseSchema:
-            responseSchema = schema.get(responseSchema['$ref'])
-          hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
-                                                                   {})
-          hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
-          if hasNextPageToken and hasPageToken:
-            fixedMethodName, method = createNextMethod(methodName + '_next')
-            self._set_dynamic_attr(fixedMethodName,
-                                   method.__get__(self, self.__class__))
diff --git a/tools/swarming_client/third_party/googleapiclient/discovery_cache/__init__.py b/tools/swarming_client/third_party/googleapiclient/discovery_cache/__init__.py
deleted file mode 100644
index c56fd65..0000000
--- a/tools/swarming_client/third_party/googleapiclient/discovery_cache/__init__.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Caching utility for the discovery document."""
-
-from __future__ import absolute_import
-
-import logging
-import datetime
-
-DISCOVERY_DOC_MAX_AGE = 60 * 60 * 24  # 1 day
-
-
-def autodetect():
-  """Detects an appropriate cache module and returns it.
-
-  Returns:
-    googleapiclient.discovery_cache.base.Cache, a cache object which
-    is auto detected, or None if no cache object is available.
-  """
-  try:
-    from google.appengine.api import memcache
-    from . import appengine_memcache
-    return appengine_memcache.cache
-  except Exception:
-    try:
-      from . import file_cache
-      return file_cache.cache
-    except Exception as e:
-      logging.warning(e, exc_info=True)
-      return None
diff --git a/tools/swarming_client/third_party/googleapiclient/discovery_cache/appengine_memcache.py b/tools/swarming_client/third_party/googleapiclient/discovery_cache/appengine_memcache.py
deleted file mode 100644
index a521fc3..0000000
--- a/tools/swarming_client/third_party/googleapiclient/discovery_cache/appengine_memcache.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""App Engine memcache based cache for the discovery document."""
-
-import logging
-
-# This is only an optional dependency because we only import this
-# module when google.appengine.api.memcache is available.
-from google.appengine.api import memcache
-
-from . import base
-from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
-
-NAMESPACE = 'google-api-client'
-
-
-class Cache(base.Cache):
-  """A cache with app engine memcache API."""
-
-  def __init__(self, max_age):
-      """Constructor.
-
-      Args:
-        max_age: Cache expiration in seconds.
-      """
-      self._max_age = max_age
-
-  def get(self, url):
-    try:
-      return memcache.get(url, namespace=NAMESPACE)
-    except Exception as e:
-      logging.warning(e, exc_info=True)
-
-  def set(self, url, content):
-    try:
-      memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
-    except Exception as e:
-      logging.warning(e, exc_info=True)
-
-cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/tools/swarming_client/third_party/googleapiclient/discovery_cache/base.py b/tools/swarming_client/third_party/googleapiclient/discovery_cache/base.py
deleted file mode 100644
index 00e466d..0000000
--- a/tools/swarming_client/third_party/googleapiclient/discovery_cache/base.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""An abstract class for caching the discovery document."""
-
-import abc
-
-
-class Cache(object):
-  """A base abstract cache class."""
-  __metaclass__ = abc.ABCMeta
-
-  @abc.abstractmethod
-  def get(self, url):
-    """Gets the content from the memcache with a given key.
-
-    Args:
-      url: string, the key for the cache.
-
-    Returns:
-      object, the value in the cache for the given key, or None if the key is
-      not in the cache.
-    """
-    raise NotImplementedError()
-
-  @abc.abstractmethod
-  def set(self, url, content):
-    """Sets the given key and content in the cache.
-
-    Args:
-      url: string, the key for the cache.
-      content: string, the discovery document.
-    """
-    raise NotImplementedError()
diff --git a/tools/swarming_client/third_party/googleapiclient/discovery_cache/file_cache.py b/tools/swarming_client/third_party/googleapiclient/discovery_cache/file_cache.py
deleted file mode 100644
index ce540f0..0000000
--- a/tools/swarming_client/third_party/googleapiclient/discovery_cache/file_cache.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""File based cache for the discovery document.
-
-The cache is stored in a single file so that multiple processes can
-share the same cache. It locks the file whenever accesing to the
-file. When the cache content is corrupted, it will be initialized with
-an empty cache.
-"""
-
-from __future__ import division
-
-import datetime
-import json
-import logging
-import os
-import tempfile
-import threading
-
-from oauth2client.locked_file import LockedFile
-
-from . import base
-from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
-
-logger = logging.getLogger(__name__)
-
-FILENAME = 'google-api-python-client-discovery-doc.cache'
-EPOCH = datetime.datetime.utcfromtimestamp(0)
-
-
-def _to_timestamp(date):
-  try:
-    return (date - EPOCH).total_seconds()
-  except AttributeError:
-    # The following is the equivalent of total_seconds() in Python2.6.
-    # See also: https://docs.python.org/2/library/datetime.html
-    delta = date - EPOCH
-    return ((delta.microseconds + (delta.seconds + delta.days * 24 * 3600)
-             * 10**6) / 10**6)
-
-
-def _read_or_initialize_cache(f):
-  f.file_handle().seek(0)
-  try:
-    cache = json.load(f.file_handle())
-  except Exception:
-    # This means it opens the file for the first time, or the cache is
-    # corrupted, so initializing the file with an empty dict.
-    cache = {}
-    f.file_handle().truncate(0)
-    f.file_handle().seek(0)
-    json.dump(cache, f.file_handle())
-  return cache
-
-
-class Cache(base.Cache):
-  """A file based cache for the discovery documents."""
-
-  def __init__(self, max_age):
-      """Constructor.
-
-      Args:
-        max_age: Cache expiration in seconds.
-      """
-      self._max_age = max_age
-      self._file = os.path.join(tempfile.gettempdir(), FILENAME)
-      f = LockedFile(self._file, 'a+', 'r')
-      try:
-        f.open_and_lock()
-        if f.is_locked():
-          _read_or_initialize_cache(f)
-        # If we can not obtain the lock, other process or thread must
-        # have initialized the file.
-      except Exception as e:
-        logging.warning(e, exc_info=True)
-      finally:
-        f.unlock_and_close()
-
-  def get(self, url):
-    f = LockedFile(self._file, 'r+', 'r')
-    try:
-      f.open_and_lock()
-      if f.is_locked():
-        cache = _read_or_initialize_cache(f)
-        if url in cache:
-          content, t = cache.get(url, (None, 0))
-          if _to_timestamp(datetime.datetime.now()) < t + self._max_age:
-            return content
-        return None
-      else:
-        logger.debug('Could not obtain a lock for the cache file.')
-        return None
-    except Exception as e:
-      logger.warning(e, exc_info=True)
-    finally:
-      f.unlock_and_close()
-
-  def set(self, url, content):
-    f = LockedFile(self._file, 'r+', 'r')
-    try:
-      f.open_and_lock()
-      if f.is_locked():
-        cache = _read_or_initialize_cache(f)
-        cache[url] = (content, _to_timestamp(datetime.datetime.now()))
-        # Remove stale cache.
-        for k, (_, timestamp) in list(cache.items()):
-          if _to_timestamp(datetime.datetime.now()) >= timestamp + self._max_age:
-            del cache[k]
-        f.file_handle().truncate(0)
-        f.file_handle().seek(0)
-        json.dump(cache, f.file_handle())
-      else:
-        logger.debug('Could not obtain a lock for the cache file.')
-    except Exception as e:
-      logger.warning(e, exc_info=True)
-    finally:
-      f.unlock_and_close()
-
-
-cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/tools/swarming_client/third_party/googleapiclient/errors.py b/tools/swarming_client/third_party/googleapiclient/errors.py
deleted file mode 100644
index 3d44de7..0000000
--- a/tools/swarming_client/third_party/googleapiclient/errors.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Errors for the library.
-
-All exceptions defined by the library
-should be defined in this file.
-"""
-from __future__ import absolute_import
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import json
-
-from oauth2client import util
-
-
-class Error(Exception):
-  """Base error for this module."""
-  pass
-
-
-class HttpError(Error):
-  """HTTP data was invalid or unexpected."""
-
-  @util.positional(3)
-  def __init__(self, resp, content, uri=None):
-    self.resp = resp
-    if not isinstance(content, bytes):
-        raise TypeError("HTTP content should be bytes")
-    self.content = content
-    self.uri = uri
-
-  def _get_reason(self):
-    """Calculate the reason for the error from the response content."""
-    reason = self.resp.reason
-    try:
-      data = json.loads(self.content.decode('utf-8'))
-      reason = data['error']['message']
-    except (ValueError, KeyError):
-      pass
-    if reason is None:
-      reason = ''
-    return reason
-
-  def __repr__(self):
-    if self.uri:
-      return '<HttpError %s when requesting %s returned "%s">' % (
-          self.resp.status, self.uri, self._get_reason().strip())
-    else:
-      return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
-
-  __str__ = __repr__
-
-
-class InvalidJsonError(Error):
-  """The JSON returned could not be parsed."""
-  pass
-
-
-class UnknownFileType(Error):
-  """File type unknown or unexpected."""
-  pass
-
-
-class UnknownLinkType(Error):
-  """Link type unknown or unexpected."""
-  pass
-
-
-class UnknownApiNameOrVersion(Error):
-  """No API with that name and version exists."""
-  pass
-
-
-class UnacceptableMimeTypeError(Error):
-  """That is an unacceptable mimetype for this operation."""
-  pass
-
-
-class MediaUploadSizeError(Error):
-  """Media is larger than the method can accept."""
-  pass
-
-
-class ResumableUploadError(HttpError):
-  """Error occured during resumable upload."""
-  pass
-
-
-class InvalidChunkSizeError(Error):
-  """The given chunksize is not valid."""
-  pass
-
-class InvalidNotificationError(Error):
-  """The channel Notification is invalid."""
-  pass
-
-class BatchError(HttpError):
-  """Error occured during batch operations."""
-
-  @util.positional(2)
-  def __init__(self, reason, resp=None, content=None):
-    self.resp = resp
-    self.content = content
-    self.reason = reason
-
-  def __repr__(self):
-      return '<BatchError %s "%s">' % (self.resp.status, self.reason)
-
-  __str__ = __repr__
-
-
-class UnexpectedMethodError(Error):
-  """Exception raised by RequestMockBuilder on unexpected calls."""
-
-  @util.positional(1)
-  def __init__(self, methodId=None):
-    """Constructor for an UnexpectedMethodError."""
-    super(UnexpectedMethodError, self).__init__(
-        'Received unexpected call %s' % methodId)
-
-
-class UnexpectedBodyError(Error):
-  """Exception raised by RequestMockBuilder on unexpected bodies."""
-
-  def __init__(self, expected, provided):
-    """Constructor for an UnexpectedMethodError."""
-    super(UnexpectedBodyError, self).__init__(
-        'Expected: [%s] - Provided: [%s]' % (expected, provided))
diff --git a/tools/swarming_client/third_party/googleapiclient/http.py b/tools/swarming_client/third_party/googleapiclient/http.py
deleted file mode 100644
index 2245e8d..0000000
--- a/tools/swarming_client/third_party/googleapiclient/http.py
+++ /dev/null
@@ -1,1644 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Classes to encapsulate a single HTTP request.
-
-The classes implement a command pattern, with every
-object supporting an execute() method that does the
-actuall HTTP request.
-"""
-from __future__ import absolute_import
-import six
-from six.moves import range
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-from six import BytesIO, StringIO
-from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
-
-import base64
-import copy
-import gzip
-import httplib2
-import json
-import logging
-import mimetypes
-import os
-import random
-import ssl
-import sys
-import time
-import uuid
-
-from email.generator import Generator
-from email.mime.multipart import MIMEMultipart
-from email.mime.nonmultipart import MIMENonMultipart
-from email.parser import FeedParser
-
-from googleapiclient import mimeparse
-from googleapiclient.errors import BatchError
-from googleapiclient.errors import HttpError
-from googleapiclient.errors import InvalidChunkSizeError
-from googleapiclient.errors import ResumableUploadError
-from googleapiclient.errors import UnexpectedBodyError
-from googleapiclient.errors import UnexpectedMethodError
-from googleapiclient.model import JsonModel
-from oauth2client import util
-
-
-DEFAULT_CHUNK_SIZE = 512*1024
-
-MAX_URI_LENGTH = 2048
-
-
-def _retry_request(http, num_retries, req_type, sleep, rand, uri, method, *args,
-                   **kwargs):
-  """Retries an HTTP request multiple times while handling errors.
-
-  If after all retries the request still fails, last error is either returned as
-  return value (for HTTP 5xx errors) or thrown (for ssl.SSLError).
-
-  Args:
-    http: Http object to be used to execute request.
-    num_retries: Maximum number of retries.
-    req_type: Type of the request (used for logging retries).
-    sleep, rand: Functions to sleep for random time between retries.
-    uri: URI to be requested.
-    method: HTTP method to be used.
-    args, kwargs: Additional arguments passed to http.request.
-
-  Returns:
-    resp, content - Response from the http request (may be HTTP 5xx).
-  """
-  resp = None
-  for retry_num in range(num_retries + 1):
-    if retry_num > 0:
-      sleep(rand() * 2**retry_num)
-      logging.warning(
-          'Retry #%d for %s: %s %s%s' % (retry_num, req_type, method, uri,
-          ', following status: %d' % resp.status if resp else ''))
-
-    try:
-      resp, content = http.request(uri, method, *args, **kwargs)
-    except ssl.SSLError:
-      if retry_num == num_retries:
-        raise
-      else:
-        continue
-    if resp.status < 500:
-      break
-
-  return resp, content
-
-
-class MediaUploadProgress(object):
-  """Status of a resumable upload."""
-
-  def __init__(self, resumable_progress, total_size):
-    """Constructor.
-
-    Args:
-      resumable_progress: int, bytes sent so far.
-      total_size: int, total bytes in complete upload, or None if the total
-        upload size isn't known ahead of time.
-    """
-    self.resumable_progress = resumable_progress
-    self.total_size = total_size
-
-  def progress(self):
-    """Percent of upload completed, as a float.
-
-    Returns:
-      the percentage complete as a float, returning 0.0 if the total size of
-      the upload is unknown.
-    """
-    if self.total_size is not None:
-      return float(self.resumable_progress) / float(self.total_size)
-    else:
-      return 0.0
-
-
-class MediaDownloadProgress(object):
-  """Status of a resumable download."""
-
-  def __init__(self, resumable_progress, total_size):
-    """Constructor.
-
-    Args:
-      resumable_progress: int, bytes received so far.
-      total_size: int, total bytes in complete download.
-    """
-    self.resumable_progress = resumable_progress
-    self.total_size = total_size
-
-  def progress(self):
-    """Percent of download completed, as a float.
-
-    Returns:
-      the percentage complete as a float, returning 0.0 if the total size of
-      the download is unknown.
-    """
-    if self.total_size is not None:
-      return float(self.resumable_progress) / float(self.total_size)
-    else:
-      return 0.0
-
-
-class MediaUpload(object):
-  """Describes a media object to upload.
-
-  Base class that defines the interface of MediaUpload subclasses.
-
-  Note that subclasses of MediaUpload may allow you to control the chunksize
-  when uploading a media object. It is important to keep the size of the chunk
-  as large as possible to keep the upload efficient. Other factors may influence
-  the size of the chunk you use, particularly if you are working in an
-  environment where individual HTTP requests may have a hardcoded time limit,
-  such as under certain classes of requests under Google App Engine.
-
-  Streams are io.Base compatible objects that support seek(). Some MediaUpload
-  subclasses support using streams directly to upload data. Support for
-  streaming may be indicated by a MediaUpload sub-class and if appropriate for a
-  platform that stream will be used for uploading the media object. The support
-  for streaming is indicated by has_stream() returning True. The stream() method
-  should return an io.Base object that supports seek(). On platforms where the
-  underlying httplib module supports streaming, for example Python 2.6 and
-  later, the stream will be passed into the http library which will result in
-  less memory being used and possibly faster uploads.
-
-  If you need to upload media that can't be uploaded using any of the existing
-  MediaUpload sub-class then you can sub-class MediaUpload for your particular
-  needs.
-  """
-
-  def chunksize(self):
-    """Chunk size for resumable uploads.
-
-    Returns:
-      Chunk size in bytes.
-    """
-    raise NotImplementedError()
-
-  def mimetype(self):
-    """Mime type of the body.
-
-    Returns:
-      Mime type.
-    """
-    return 'application/octet-stream'
-
-  def size(self):
-    """Size of upload.
-
-    Returns:
-      Size of the body, or None of the size is unknown.
-    """
-    return None
-
-  def resumable(self):
-    """Whether this upload is resumable.
-
-    Returns:
-      True if resumable upload or False.
-    """
-    return False
-
-  def getbytes(self, begin, end):
-    """Get bytes from the media.
-
-    Args:
-      begin: int, offset from beginning of file.
-      length: int, number of bytes to read, starting at begin.
-
-    Returns:
-      A string of bytes read. May be shorter than length if EOF was reached
-      first.
-    """
-    raise NotImplementedError()
-
-  def has_stream(self):
-    """Does the underlying upload support a streaming interface.
-
-    Streaming means it is an io.IOBase subclass that supports seek, i.e.
-    seekable() returns True.
-
-    Returns:
-      True if the call to stream() will return an instance of a seekable io.Base
-      subclass.
-    """
-    return False
-
-  def stream(self):
-    """A stream interface to the data being uploaded.
-
-    Returns:
-      The returned value is an io.IOBase subclass that supports seek, i.e.
-      seekable() returns True.
-    """
-    raise NotImplementedError()
-
-  @util.positional(1)
-  def _to_json(self, strip=None):
-    """Utility function for creating a JSON representation of a MediaUpload.
-
-    Args:
-      strip: array, An array of names of members to not include in the JSON.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    t = type(self)
-    d = copy.copy(self.__dict__)
-    if strip is not None:
-      for member in strip:
-        del d[member]
-    d['_class'] = t.__name__
-    d['_module'] = t.__module__
-    return json.dumps(d)
-
-  def to_json(self):
-    """Create a JSON representation of an instance of MediaUpload.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    return self._to_json()
-
-  @classmethod
-  def new_from_json(cls, s):
-    """Utility class method to instantiate a MediaUpload subclass from a JSON
-    representation produced by to_json().
-
-    Args:
-      s: string, JSON from to_json().
-
-    Returns:
-      An instance of the subclass of MediaUpload that was serialized with
-      to_json().
-    """
-    data = json.loads(s)
-    # Find and call the right classmethod from_json() to restore the object.
-    module = data['_module']
-    m = __import__(module, fromlist=module.split('.')[:-1])
-    kls = getattr(m, data['_class'])
-    from_json = getattr(kls, 'from_json')
-    return from_json(s)
-
-
-class MediaIoBaseUpload(MediaUpload):
-  """A MediaUpload for a io.Base objects.
-
-  Note that the Python file object is compatible with io.Base and can be used
-  with this class also.
-
-    fh = BytesIO('...Some data to upload...')
-    media = MediaIoBaseUpload(fh, mimetype='image/png',
-      chunksize=1024*1024, resumable=True)
-    farm.animals().insert(
-        id='cow',
-        name='cow.png',
-        media_body=media).execute()
-
-  Depending on the platform you are working on, you may pass -1 as the
-  chunksize, which indicates that the entire file should be uploaded in a single
-  request. If the underlying platform supports streams, such as Python 2.6 or
-  later, then this can be very efficient as it avoids multiple connections, and
-  also avoids loading the entire file into memory before sending it. Note that
-  Google App Engine has a 5MB limit on request size, so you should never set
-  your chunksize larger than 5MB, or to -1.
-  """
-
-  @util.positional(3)
-  def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
-      resumable=False):
-    """Constructor.
-
-    Args:
-      fd: io.Base or file object, The source of the bytes to upload. MUST be
-        opened in blocking mode, do not use streams opened in non-blocking mode.
-        The given stream must be seekable, that is, it must be able to call
-        seek() on fd.
-      mimetype: string, Mime-type of the file.
-      chunksize: int, File will be uploaded in chunks of this many bytes. Only
-        used if resumable=True. Pass in a value of -1 if the file is to be
-        uploaded as a single chunk. Note that Google App Engine has a 5MB limit
-        on request size, so you should never set your chunksize larger than 5MB,
-        or to -1.
-      resumable: bool, True if this is a resumable upload. False means upload
-        in a single request.
-    """
-    super(MediaIoBaseUpload, self).__init__()
-    self._fd = fd
-    self._mimetype = mimetype
-    if not (chunksize == -1 or chunksize > 0):
-      raise InvalidChunkSizeError()
-    self._chunksize = chunksize
-    self._resumable = resumable
-
-    self._fd.seek(0, os.SEEK_END)
-    self._size = self._fd.tell()
-
-  def chunksize(self):
-    """Chunk size for resumable uploads.
-
-    Returns:
-      Chunk size in bytes.
-    """
-    return self._chunksize
-
-  def mimetype(self):
-    """Mime type of the body.
-
-    Returns:
-      Mime type.
-    """
-    return self._mimetype
-
-  def size(self):
-    """Size of upload.
-
-    Returns:
-      Size of the body, or None of the size is unknown.
-    """
-    return self._size
-
-  def resumable(self):
-    """Whether this upload is resumable.
-
-    Returns:
-      True if resumable upload or False.
-    """
-    return self._resumable
-
-  def getbytes(self, begin, length):
-    """Get bytes from the media.
-
-    Args:
-      begin: int, offset from beginning of file.
-      length: int, number of bytes to read, starting at begin.
-
-    Returns:
-      A string of bytes read. May be shorted than length if EOF was reached
-      first.
-    """
-    self._fd.seek(begin)
-    return self._fd.read(length)
-
-  def has_stream(self):
-    """Does the underlying upload support a streaming interface.
-
-    Streaming means it is an io.IOBase subclass that supports seek, i.e.
-    seekable() returns True.
-
-    Returns:
-      True if the call to stream() will return an instance of a seekable io.Base
-      subclass.
-    """
-    return True
-
-  def stream(self):
-    """A stream interface to the data being uploaded.
-
-    Returns:
-      The returned value is an io.IOBase subclass that supports seek, i.e.
-      seekable() returns True.
-    """
-    return self._fd
-
-  def to_json(self):
-    """This upload type is not serializable."""
-    raise NotImplementedError('MediaIoBaseUpload is not serializable.')
-
-
-class MediaFileUpload(MediaIoBaseUpload):
-  """A MediaUpload for a file.
-
-  Construct a MediaFileUpload and pass as the media_body parameter of the
-  method. For example, if we had a service that allowed uploading images:
-
-
-    media = MediaFileUpload('cow.png', mimetype='image/png',
-      chunksize=1024*1024, resumable=True)
-    farm.animals().insert(
-        id='cow',
-        name='cow.png',
-        media_body=media).execute()
-
-  Depending on the platform you are working on, you may pass -1 as the
-  chunksize, which indicates that the entire file should be uploaded in a single
-  request. If the underlying platform supports streams, such as Python 2.6 or
-  later, then this can be very efficient as it avoids multiple connections, and
-  also avoids loading the entire file into memory before sending it. Note that
-  Google App Engine has a 5MB limit on request size, so you should never set
-  your chunksize larger than 5MB, or to -1.
-  """
-
-  @util.positional(2)
-  def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
-               resumable=False):
-    """Constructor.
-
-    Args:
-      filename: string, Name of the file.
-      mimetype: string, Mime-type of the file. If None then a mime-type will be
-        guessed from the file extension.
-      chunksize: int, File will be uploaded in chunks of this many bytes. Only
-        used if resumable=True. Pass in a value of -1 if the file is to be
-        uploaded in a single chunk. Note that Google App Engine has a 5MB limit
-        on request size, so you should never set your chunksize larger than 5MB,
-        or to -1.
-      resumable: bool, True if this is a resumable upload. False means upload
-        in a single request.
-    """
-    self._filename = filename
-    fd = open(self._filename, 'rb')
-    if mimetype is None:
-      # No mimetype provided, make a guess.
-      mimetype, _ = mimetypes.guess_type(filename)
-      if mimetype is None:
-        # Guess failed, use octet-stream.
-        mimetype = 'application/octet-stream'
-    super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize,
-                                          resumable=resumable)
-
-  def to_json(self):
-    """Creating a JSON representation of an instance of MediaFileUpload.
-
-    Returns:
-       string, a JSON representation of this instance, suitable to pass to
-       from_json().
-    """
-    return self._to_json(strip=['_fd'])
-
-  @staticmethod
-  def from_json(s):
-    d = json.loads(s)
-    return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'],
-                           chunksize=d['_chunksize'], resumable=d['_resumable'])
-
-
-class MediaInMemoryUpload(MediaIoBaseUpload):
-  """MediaUpload for a chunk of bytes.
-
-  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
-  the stream.
-  """
-
-  @util.positional(2)
-  def __init__(self, body, mimetype='application/octet-stream',
-               chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
-    """Create a new MediaInMemoryUpload.
-
-  DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
-  the stream.
-
-  Args:
-    body: string, Bytes of body content.
-    mimetype: string, Mime-type of the file or default of
-      'application/octet-stream'.
-    chunksize: int, File will be uploaded in chunks of this many bytes. Only
-      used if resumable=True.
-    resumable: bool, True if this is a resumable upload. False means upload
-      in a single request.
-    """
-    fd = BytesIO(body)
-    super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
-                                              resumable=resumable)
-
-
-class MediaIoBaseDownload(object):
-  """"Download media resources.
-
-  Note that the Python file object is compatible with io.Base and can be used
-  with this class also.
-
-
-  Example:
-    request = farms.animals().get_media(id='cow')
-    fh = io.FileIO('cow.png', mode='wb')
-    downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024)
-
-    done = False
-    while done is False:
-      status, done = downloader.next_chunk()
-      if status:
-        print "Download %d%%." % int(status.progress() * 100)
-    print "Download Complete!"
-  """
-
-  @util.positional(3)
-  def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
-    """Constructor.
-
-    Args:
-      fd: io.Base or file object, The stream in which to write the downloaded
-        bytes.
-      request: googleapiclient.http.HttpRequest, the media request to perform in
-        chunks.
-      chunksize: int, File will be downloaded in chunks of this many bytes.
-    """
-    self._fd = fd
-    self._request = request
-    self._uri = request.uri
-    self._chunksize = chunksize
-    self._progress = 0
-    self._total_size = None
-    self._done = False
-
-    # Stubs for testing.
-    self._sleep = time.sleep
-    self._rand = random.random
-
-  @util.positional(1)
-  def next_chunk(self, num_retries=0):
-    """Get the next chunk of the download.
-
-    Args:
-      num_retries: Integer, number of times to retry 500's with randomized
-            exponential backoff. If all retries fail, the raised HttpError
-            represents the last request. If zero (default), we attempt the
-            request only once.
-
-    Returns:
-      (status, done): (MediaDownloadStatus, boolean)
-         The value of 'done' will be True when the media has been fully
-         downloaded.
-
-    Raises:
-      googleapiclient.errors.HttpError if the response was not a 2xx.
-      httplib2.HttpLib2Error if a transport error has occured.
-    """
-    headers = {
-        'range': 'bytes=%d-%d' % (
-            self._progress, self._progress + self._chunksize)
-        }
-    http = self._request.http
-
-    resp, content = _retry_request(
-        http, num_retries, 'media download', self._sleep, self._rand, self._uri,
-        'GET', headers=headers)
-
-    if resp.status in [200, 206]:
-      if 'content-location' in resp and resp['content-location'] != self._uri:
-        self._uri = resp['content-location']
-      self._progress += len(content)
-      self._fd.write(content)
-
-      if 'content-range' in resp:
-        content_range = resp['content-range']
-        length = content_range.rsplit('/', 1)[1]
-        self._total_size = int(length)
-      elif 'content-length' in resp:
-        self._total_size = int(resp['content-length'])
-
-      if self._progress == self._total_size:
-        self._done = True
-      return MediaDownloadProgress(self._progress, self._total_size), self._done
-    else:
-      raise HttpError(resp, content, uri=self._uri)
-
-
-class _StreamSlice(object):
-  """Truncated stream.
-
-  Takes a stream and presents a stream that is a slice of the original stream.
-  This is used when uploading media in chunks. In later versions of Python a
-  stream can be passed to httplib in place of the string of data to send. The
-  problem is that httplib just blindly reads to the end of the stream. This
-  wrapper presents a virtual stream that only reads to the end of the chunk.
-  """
-
-  def __init__(self, stream, begin, chunksize):
-    """Constructor.
-
-    Args:
-      stream: (io.Base, file object), the stream to wrap.
-      begin: int, the seek position the chunk begins at.
-      chunksize: int, the size of the chunk.
-    """
-    self._stream = stream
-    self._begin = begin
-    self._chunksize = chunksize
-    self._stream.seek(begin)
-
-  def read(self, n=-1):
-    """Read n bytes.
-
-    Args:
-      n, int, the number of bytes to read.
-
-    Returns:
-      A string of length 'n', or less if EOF is reached.
-    """
-    # The data left available to read sits in [cur, end)
-    cur = self._stream.tell()
-    end = self._begin + self._chunksize
-    if n == -1 or cur + n > end:
-      n = end - cur
-    return self._stream.read(n)
-
-
-class HttpRequest(object):
-  """Encapsulates a single HTTP request."""
-
-  @util.positional(4)
-  def __init__(self, http, postproc, uri,
-               method='GET',
-               body=None,
-               headers=None,
-               methodId=None,
-               resumable=None):
-    """Constructor for an HttpRequest.
-
-    Args:
-      http: httplib2.Http, the transport object to use to make a request
-      postproc: callable, called on the HTTP response and content to transform
-                it into a data object before returning, or raising an exception
-                on an error.
-      uri: string, the absolute URI to send the request to
-      method: string, the HTTP method to use
-      body: string, the request body of the HTTP request,
-      headers: dict, the HTTP request headers
-      methodId: string, a unique identifier for the API method being called.
-      resumable: MediaUpload, None if this is not a resumbale request.
-    """
-    self.uri = uri
-    self.method = method
-    self.body = body
-    self.headers = headers or {}
-    self.methodId = methodId
-    self.http = http
-    self.postproc = postproc
-    self.resumable = resumable
-    self.response_callbacks = []
-    self._in_error_state = False
-
-    # Pull the multipart boundary out of the content-type header.
-    major, minor, params = mimeparse.parse_mime_type(
-        self.headers.get('content-type', 'application/json'))
-
-    # The size of the non-media part of the request.
-    self.body_size = len(self.body or '')
-
-    # The resumable URI to send chunks to.
-    self.resumable_uri = None
-
-    # The bytes that have been uploaded.
-    self.resumable_progress = 0
-
-    # Stubs for testing.
-    self._rand = random.random
-    self._sleep = time.sleep
-
-  @util.positional(1)
-  def execute(self, http=None, num_retries=0):
-    """Execute the request.
-
-    Args:
-      http: httplib2.Http, an http object to be used in place of the
-            one the HttpRequest request object was constructed with.
-      num_retries: Integer, number of times to retry 500's with randomized
-            exponential backoff. If all retries fail, the raised HttpError
-            represents the last request. If zero (default), we attempt the
-            request only once.
-
-    Returns:
-      A deserialized object model of the response body as determined
-      by the postproc.
-
-    Raises:
-      googleapiclient.errors.HttpError if the response was not a 2xx.
-      httplib2.HttpLib2Error if a transport error has occured.
-    """
-    if http is None:
-      http = self.http
-
-    if self.resumable:
-      body = None
-      while body is None:
-        _, body = self.next_chunk(http=http, num_retries=num_retries)
-      return body
-
-    # Non-resumable case.
-
-    if 'content-length' not in self.headers:
-      self.headers['content-length'] = str(self.body_size)
-    # If the request URI is too long then turn it into a POST request.
-    if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET':
-      self.method = 'POST'
-      self.headers['x-http-method-override'] = 'GET'
-      self.headers['content-type'] = 'application/x-www-form-urlencoded'
-      parsed = urlparse(self.uri)
-      self.uri = urlunparse(
-          (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
-           None)
-          )
-      self.body = parsed.query
-      self.headers['content-length'] = str(len(self.body))
-
-    # Handle retries for server-side errors.
-    resp, content = _retry_request(
-          http, num_retries, 'request', self._sleep, self._rand, str(self.uri),
-          method=str(self.method), body=self.body, headers=self.headers)
-
-    for callback in self.response_callbacks:
-      callback(resp)
-    if resp.status >= 300:
-      raise HttpError(resp, content, uri=self.uri)
-    return self.postproc(resp, content)
-
-  @util.positional(2)
-  def add_response_callback(self, cb):
-    """add_response_headers_callback
-
-    Args:
-      cb: Callback to be called on receiving the response headers, of signature:
-
-      def cb(resp):
-        # Where resp is an instance of httplib2.Response
-    """
-    self.response_callbacks.append(cb)
-
-  @util.positional(1)
-  def next_chunk(self, http=None, num_retries=0):
-    """Execute the next step of a resumable upload.
-
-    Can only be used if the method being executed supports media uploads and
-    the MediaUpload object passed in was flagged as using resumable upload.
-
-    Example:
-
-      media = MediaFileUpload('cow.png', mimetype='image/png',
-                              chunksize=1000, resumable=True)
-      request = farm.animals().insert(
-          id='cow',
-          name='cow.png',
-          media_body=media)
-
-      response = None
-      while response is None:
-        status, response = request.next_chunk()
-        if status:
-          print "Upload %d%% complete." % int(status.progress() * 100)
-
-
-    Args:
-      http: httplib2.Http, an http object to be used in place of the
-            one the HttpRequest request object was constructed with.
-      num_retries: Integer, number of times to retry 500's with randomized
-            exponential backoff. If all retries fail, the raised HttpError
-            represents the last request. If zero (default), we attempt the
-            request only once.
-
-    Returns:
-      (status, body): (ResumableMediaStatus, object)
-         The body will be None until the resumable media is fully uploaded.
-
-    Raises:
-      googleapiclient.errors.HttpError if the response was not a 2xx.
-      httplib2.HttpLib2Error if a transport error has occured.
-    """
-    if http is None:
-      http = self.http
-
-    if self.resumable.size() is None:
-      size = '*'
-    else:
-      size = str(self.resumable.size())
-
-    if self.resumable_uri is None:
-      start_headers = copy.copy(self.headers)
-      start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
-      if size != '*':
-        start_headers['X-Upload-Content-Length'] = size
-      start_headers['content-length'] = str(self.body_size)
-
-      resp, content = _retry_request(
-          http, num_retries, 'resumable URI request', self._sleep, self._rand,
-          self.uri, method=self.method, body=self.body, headers=start_headers)
-
-      if resp.status == 200 and 'location' in resp:
-        self.resumable_uri = resp['location']
-      else:
-        raise ResumableUploadError(resp, content)
-    elif self._in_error_state:
-      # If we are in an error state then query the server for current state of
-      # the upload by sending an empty PUT and reading the 'range' header in
-      # the response.
-      headers = {
-          'Content-Range': 'bytes */%s' % size,
-          'content-length': '0'
-          }
-      resp, content = http.request(self.resumable_uri, 'PUT',
-                                   headers=headers)
-      status, body = self._process_response(resp, content)
-      if body:
-        # The upload was complete.
-        return (status, body)
-
-    if self.resumable.has_stream():
-      data = self.resumable.stream()
-      if self.resumable.chunksize() == -1:
-        data.seek(self.resumable_progress)
-        chunk_end = self.resumable.size() - self.resumable_progress - 1
-      else:
-        # Doing chunking with a stream, so wrap a slice of the stream.
-        data = _StreamSlice(data, self.resumable_progress,
-                            self.resumable.chunksize())
-        chunk_end = min(
-            self.resumable_progress + self.resumable.chunksize() - 1,
-            self.resumable.size() - 1)
-    else:
-      data = self.resumable.getbytes(
-          self.resumable_progress, self.resumable.chunksize())
-
-      # A short read implies that we are at EOF, so finish the upload.
-      if len(data) < self.resumable.chunksize():
-        size = str(self.resumable_progress + len(data))
-
-      chunk_end = self.resumable_progress + len(data) - 1
-
-    headers = {
-        'Content-Range': 'bytes %d-%d/%s' % (
-            self.resumable_progress, chunk_end, size),
-        # Must set the content-length header here because httplib can't
-        # calculate the size when working with _StreamSlice.
-        'Content-Length': str(chunk_end - self.resumable_progress + 1)
-        }
-
-    for retry_num in range(num_retries + 1):
-      if retry_num > 0:
-        self._sleep(self._rand() * 2**retry_num)
-        logging.warning(
-            'Retry #%d for media upload: %s %s, following status: %d'
-            % (retry_num, self.method, self.uri, resp.status))
-
-      try:
-        resp, content = http.request(self.resumable_uri, method='PUT',
-                                     body=data,
-                                     headers=headers)
-      except:
-        self._in_error_state = True
-        raise
-      if resp.status < 500:
-        break
-
-    return self._process_response(resp, content)
-
-  def _process_response(self, resp, content):
-    """Process the response from a single chunk upload.
-
-    Args:
-      resp: httplib2.Response, the response object.
-      content: string, the content of the response.
-
-    Returns:
-      (status, body): (ResumableMediaStatus, object)
-         The body will be None until the resumable media is fully uploaded.
-
-    Raises:
-      googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
-    """
-    if resp.status in [200, 201]:
-      self._in_error_state = False
-      return None, self.postproc(resp, content)
-    elif resp.status == 308:
-      self._in_error_state = False
-      # A "308 Resume Incomplete" indicates we are not done.
-      self.resumable_progress = int(resp['range'].split('-')[1]) + 1
-      if 'location' in resp:
-        self.resumable_uri = resp['location']
-    else:
-      self._in_error_state = True
-      raise HttpError(resp, content, uri=self.uri)
-
-    return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
-            None)
-
-  def to_json(self):
-    """Returns a JSON representation of the HttpRequest."""
-    d = copy.copy(self.__dict__)
-    if d['resumable'] is not None:
-      d['resumable'] = self.resumable.to_json()
-    del d['http']
-    del d['postproc']
-    del d['_sleep']
-    del d['_rand']
-
-    return json.dumps(d)
-
-  @staticmethod
-  def from_json(s, http, postproc):
-    """Returns an HttpRequest populated with info from a JSON object."""
-    d = json.loads(s)
-    if d['resumable'] is not None:
-      d['resumable'] = MediaUpload.new_from_json(d['resumable'])
-    return HttpRequest(
-        http,
-        postproc,
-        uri=d['uri'],
-        method=d['method'],
-        body=d['body'],
-        headers=d['headers'],
-        methodId=d['methodId'],
-        resumable=d['resumable'])
-
-
-class BatchHttpRequest(object):
-  """Batches multiple HttpRequest objects into a single HTTP request.
-
-  Example:
-    from googleapiclient.http import BatchHttpRequest
-
-    def list_animals(request_id, response, exception):
-      \"\"\"Do something with the animals list response.\"\"\"
-      if exception is not None:
-        # Do something with the exception.
-        pass
-      else:
-        # Do something with the response.
-        pass
-
-    def list_farmers(request_id, response, exception):
-      \"\"\"Do something with the farmers list response.\"\"\"
-      if exception is not None:
-        # Do something with the exception.
-        pass
-      else:
-        # Do something with the response.
-        pass
-
-    service = build('farm', 'v2')
-
-    batch = BatchHttpRequest()
-
-    batch.add(service.animals().list(), list_animals)
-    batch.add(service.farmers().list(), list_farmers)
-    batch.execute(http=http)
-  """
-
-  @util.positional(1)
-  def __init__(self, callback=None, batch_uri=None):
-    """Constructor for a BatchHttpRequest.
-
-    Args:
-      callback: callable, A callback to be called for each response, of the
-        form callback(id, response, exception). The first parameter is the
-        request id, and the second is the deserialized response object. The
-        third is an googleapiclient.errors.HttpError exception object if an HTTP error
-        occurred while processing the request, or None if no error occurred.
-      batch_uri: string, URI to send batch requests to.
-    """
-    if batch_uri is None:
-      batch_uri = 'https://www.googleapis.com/batch'
-    self._batch_uri = batch_uri
-
-    # Global callback to be called for each individual response in the batch.
-    self._callback = callback
-
-    # A map from id to request.
-    self._requests = {}
-
-    # A map from id to callback.
-    self._callbacks = {}
-
-    # List of request ids, in the order in which they were added.
-    self._order = []
-
-    # The last auto generated id.
-    self._last_auto_id = 0
-
-    # Unique ID on which to base the Content-ID headers.
-    self._base_id = None
-
-    # A map from request id to (httplib2.Response, content) response pairs
-    self._responses = {}
-
-    # A map of id(Credentials) that have been refreshed.
-    self._refreshed_credentials = {}
-
-  def _refresh_and_apply_credentials(self, request, http):
-    """Refresh the credentials and apply to the request.
-
-    Args:
-      request: HttpRequest, the request.
-      http: httplib2.Http, the global http object for the batch.
-    """
-    # For the credentials to refresh, but only once per refresh_token
-    # If there is no http per the request then refresh the http passed in
-    # via execute()
-    creds = None
-    if request.http is not None and hasattr(request.http.request,
-        'credentials'):
-      creds = request.http.request.credentials
-    elif http is not None and hasattr(http.request, 'credentials'):
-      creds = http.request.credentials
-    if creds is not None:
-      if id(creds) not in self._refreshed_credentials:
-        creds.refresh(http)
-        self._refreshed_credentials[id(creds)] = 1
-
-    # Only apply the credentials if we are using the http object passed in,
-    # otherwise apply() will get called during _serialize_request().
-    if request.http is None or not hasattr(request.http.request,
-        'credentials'):
-      creds.apply(request.headers)
-
-  def _id_to_header(self, id_):
-    """Convert an id to a Content-ID header value.
-
-    Args:
-      id_: string, identifier of individual request.
-
-    Returns:
-      A Content-ID header with the id_ encoded into it. A UUID is prepended to
-      the value because Content-ID headers are supposed to be universally
-      unique.
-    """
-    if self._base_id is None:
-      self._base_id = uuid.uuid4()
-
-    return '<%s+%s>' % (self._base_id, quote(id_))
-
-  def _header_to_id(self, header):
-    """Convert a Content-ID header value to an id.
-
-    Presumes the Content-ID header conforms to the format that _id_to_header()
-    returns.
-
-    Args:
-      header: string, Content-ID header value.
-
-    Returns:
-      The extracted id value.
-
-    Raises:
-      BatchError if the header is not in the expected format.
-    """
-    if header[0] != '<' or header[-1] != '>':
-      raise BatchError("Invalid value for Content-ID: %s" % header)
-    if '+' not in header:
-      raise BatchError("Invalid value for Content-ID: %s" % header)
-    base, id_ = header[1:-1].rsplit('+', 1)
-
-    return unquote(id_)
-
-  def _serialize_request(self, request):
-    """Convert an HttpRequest object into a string.
-
-    Args:
-      request: HttpRequest, the request to serialize.
-
-    Returns:
-      The request as a string in application/http format.
-    """
-    # Construct status line
-    parsed = urlparse(request.uri)
-    request_line = urlunparse(
-        ('', '', parsed.path, parsed.params, parsed.query, '')
-        )
-    status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
-    major, minor = request.headers.get('content-type', 'application/json').split('/')
-    msg = MIMENonMultipart(major, minor)
-    headers = request.headers.copy()
-
-    if request.http is not None and hasattr(request.http.request,
-        'credentials'):
-      request.http.request.credentials.apply(headers)
-
-    # MIMENonMultipart adds its own Content-Type header.
-    if 'content-type' in headers:
-      del headers['content-type']
-
-    for key, value in six.iteritems(headers):
-      msg[key] = value
-    msg['Host'] = parsed.netloc
-    msg.set_unixfrom(None)
-
-    if request.body is not None:
-      msg.set_payload(request.body)
-      msg['content-length'] = str(len(request.body))
-
-    # Serialize the mime message.
-    fp = StringIO()
-    # maxheaderlen=0 means don't line wrap headers.
-    g = Generator(fp, maxheaderlen=0)
-    g.flatten(msg, unixfrom=False)
-    body = fp.getvalue()
-
-    return status_line + body
-
-  def _deserialize_response(self, payload):
-    """Convert string into httplib2 response and content.
-
-    Args:
-      payload: string, headers and body as a string.
-
-    Returns:
-      A pair (resp, content), such as would be returned from httplib2.request.
-    """
-    # Strip off the status line
-    status_line, payload = payload.split('\n', 1)
-    protocol, status, reason = status_line.split(' ', 2)
-
-    # Parse the rest of the response
-    parser = FeedParser()
-    parser.feed(payload)
-    msg = parser.close()
-    msg['status'] = status
-
-    # Create httplib2.Response from the parsed headers.
-    resp = httplib2.Response(msg)
-    resp.reason = reason
-    resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
-
-    content = payload.split('\r\n\r\n', 1)[1]
-
-    return resp, content
-
-  def _new_id(self):
-    """Create a new id.
-
-    Auto incrementing number that avoids conflicts with ids already used.
-
-    Returns:
-       string, a new unique id.
-    """
-    self._last_auto_id += 1
-    while str(self._last_auto_id) in self._requests:
-      self._last_auto_id += 1
-    return str(self._last_auto_id)
-
-  @util.positional(2)
-  def add(self, request, callback=None, request_id=None):
-    """Add a new request.
-
-    Every callback added will be paired with a unique id, the request_id. That
-    unique id will be passed back to the callback when the response comes back
-    from the server. The default behavior is to have the library generate it's
-    own unique id. If the caller passes in a request_id then they must ensure
-    uniqueness for each request_id, and if they are not an exception is
-    raised. Callers should either supply all request_ids or nevery supply a
-    request id, to avoid such an error.
-
-    Args:
-      request: HttpRequest, Request to add to the batch.
-      callback: callable, A callback to be called for this response, of the
-        form callback(id, response, exception). The first parameter is the
-        request id, and the second is the deserialized response object. The
-        third is an googleapiclient.errors.HttpError exception object if an HTTP error
-        occurred while processing the request, or None if no errors occurred.
-      request_id: string, A unique id for the request. The id will be passed to
-        the callback with the response.
-
-    Returns:
-      None
-
-    Raises:
-      BatchError if a media request is added to a batch.
-      KeyError is the request_id is not unique.
-    """
-    if request_id is None:
-      request_id = self._new_id()
-    if request.resumable is not None:
-      raise BatchError("Media requests cannot be used in a batch request.")
-    if request_id in self._requests:
-      raise KeyError("A request with this ID already exists: %s" % request_id)
-    self._requests[request_id] = request
-    self._callbacks[request_id] = callback
-    self._order.append(request_id)
-
-  def _execute(self, http, order, requests):
-    """Serialize batch request, send to server, process response.
-
-    Args:
-      http: httplib2.Http, an http object to be used to make the request with.
-      order: list, list of request ids in the order they were added to the
-        batch.
-      request: list, list of request objects to send.
-
-    Raises:
-      httplib2.HttpLib2Error if a transport error has occured.
-      googleapiclient.errors.BatchError if the response is the wrong format.
-    """
-    message = MIMEMultipart('mixed')
-    # Message should not write out it's own headers.
-    setattr(message, '_write_headers', lambda self: None)
-
-    # Add all the individual requests.
-    for request_id in order:
-      request = requests[request_id]
-
-      msg = MIMENonMultipart('application', 'http')
-      msg['Content-Transfer-Encoding'] = 'binary'
-      msg['Content-ID'] = self._id_to_header(request_id)
-
-      body = self._serialize_request(request)
-      msg.set_payload(body)
-      message.attach(msg)
-
-    # encode the body: note that we can't use `as_string`, because
-    # it plays games with `From ` lines.
-    fp = StringIO()
-    g = Generator(fp, mangle_from_=False)
-    g.flatten(message, unixfrom=False)
-    body = fp.getvalue()
-
-    headers = {}
-    headers['content-type'] = ('multipart/mixed; '
-                               'boundary="%s"') % message.get_boundary()
-
-    resp, content = http.request(self._batch_uri, method='POST', body=body,
-                                 headers=headers)
-
-    if resp.status >= 300:
-      raise HttpError(resp, content, uri=self._batch_uri)
-
-    # Prepend with a content-type header so FeedParser can handle it.
-    header = 'content-type: %s\r\n\r\n' % resp['content-type']
-    # PY3's FeedParser only accepts unicode. So we should decode content
-    # here, and encode each payload again.
-    if six.PY3:
-      content = content.decode('utf-8')
-    for_parser = header + content
-
-    parser = FeedParser()
-    parser.feed(for_parser)
-    mime_response = parser.close()
-
-    if not mime_response.is_multipart():
-      raise BatchError("Response not in multipart/mixed format.", resp=resp,
-                       content=content)
-
-    for part in mime_response.get_payload():
-      request_id = self._header_to_id(part['Content-ID'])
-      response, content = self._deserialize_response(part.get_payload())
-      # We encode content here to emulate normal http response.
-      if isinstance(content, six.text_type):
-        content = content.encode('utf-8')
-      self._responses[request_id] = (response, content)
-
-  @util.positional(1)
-  def execute(self, http=None):
-    """Execute all the requests as a single batched HTTP request.
-
-    Args:
-      http: httplib2.Http, an http object to be used in place of the one the
-        HttpRequest request object was constructed with. If one isn't supplied
-        then use a http object from the requests in this batch.
-
-    Returns:
-      None
-
-    Raises:
-      httplib2.HttpLib2Error if a transport error has occured.
-      googleapiclient.errors.BatchError if the response is the wrong format.
-    """
-    # If we have no requests return
-    if len(self._order) == 0:
-      return None
-
-    # If http is not supplied use the first valid one given in the requests.
-    if http is None:
-      for request_id in self._order:
-        request = self._requests[request_id]
-        if request is not None:
-          http = request.http
-          break
-
-    if http is None:
-      raise ValueError("Missing a valid http object.")
-
-    self._execute(http, self._order, self._requests)
-
-    # Loop over all the requests and check for 401s. For each 401 request the
-    # credentials should be refreshed and then sent again in a separate batch.
-    redo_requests = {}
-    redo_order = []
-
-    for request_id in self._order:
-      resp, content = self._responses[request_id]
-      if resp['status'] == '401':
-        redo_order.append(request_id)
-        request = self._requests[request_id]
-        self._refresh_and_apply_credentials(request, http)
-        redo_requests[request_id] = request
-
-    if redo_requests:
-      self._execute(http, redo_order, redo_requests)
-
-    # Now process all callbacks that are erroring, and raise an exception for
-    # ones that return a non-2xx response? Or add extra parameter to callback
-    # that contains an HttpError?
-
-    for request_id in self._order:
-      resp, content = self._responses[request_id]
-
-      request = self._requests[request_id]
-      callback = self._callbacks[request_id]
-
-      response = None
-      exception = None
-      try:
-        if resp.status >= 300:
-          raise HttpError(resp, content, uri=request.uri)
-        response = request.postproc(resp, content)
-      except HttpError as e:
-        exception = e
-
-      if callback is not None:
-        callback(request_id, response, exception)
-      if self._callback is not None:
-        self._callback(request_id, response, exception)
-
-
-class HttpRequestMock(object):
-  """Mock of HttpRequest.
-
-  Do not construct directly, instead use RequestMockBuilder.
-  """
-
-  def __init__(self, resp, content, postproc):
-    """Constructor for HttpRequestMock
-
-    Args:
-      resp: httplib2.Response, the response to emulate coming from the request
-      content: string, the response body
-      postproc: callable, the post processing function usually supplied by
-                the model class. See model.JsonModel.response() as an example.
-    """
-    self.resp = resp
-    self.content = content
-    self.postproc = postproc
-    if resp is None:
-      self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
-    if 'reason' in self.resp:
-      self.resp.reason = self.resp['reason']
-
-  def execute(self, http=None):
-    """Execute the request.
-
-    Same behavior as HttpRequest.execute(), but the response is
-    mocked and not really from an HTTP request/response.
-    """
-    return self.postproc(self.resp, self.content)
-
-
-class RequestMockBuilder(object):
-  """A simple mock of HttpRequest
-
-    Pass in a dictionary to the constructor that maps request methodIds to
-    tuples of (httplib2.Response, content, opt_expected_body) that should be
-    returned when that method is called. None may also be passed in for the
-    httplib2.Response, in which case a 200 OK response will be generated.
-    If an opt_expected_body (str or dict) is provided, it will be compared to
-    the body and UnexpectedBodyError will be raised on inequality.
-
-    Example:
-      response = '{"data": {"id": "tag:google.c...'
-      requestBuilder = RequestMockBuilder(
-        {
-          'plus.activities.get': (None, response),
-        }
-      )
-      googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
-
-    Methods that you do not supply a response for will return a
-    200 OK with an empty string as the response content or raise an excpetion
-    if check_unexpected is set to True. The methodId is taken from the rpcName
-    in the discovery document.
-
-    For more details see the project wiki.
-  """
-
-  def __init__(self, responses, check_unexpected=False):
-    """Constructor for RequestMockBuilder
-
-    The constructed object should be a callable object
-    that can replace the class HttpResponse.
-
-    responses - A dictionary that maps methodIds into tuples
-                of (httplib2.Response, content). The methodId
-                comes from the 'rpcName' field in the discovery
-                document.
-    check_unexpected - A boolean setting whether or not UnexpectedMethodError
-                       should be raised on unsupplied method.
-    """
-    self.responses = responses
-    self.check_unexpected = check_unexpected
-
-  def __call__(self, http, postproc, uri, method='GET', body=None,
-               headers=None, methodId=None, resumable=None):
-    """Implements the callable interface that discovery.build() expects
-    of requestBuilder, which is to build an object compatible with
-    HttpRequest.execute(). See that method for the description of the
-    parameters and the expected response.
-    """
-    if methodId in self.responses:
-      response = self.responses[methodId]
-      resp, content = response[:2]
-      if len(response) > 2:
-        # Test the body against the supplied expected_body.
-        expected_body = response[2]
-        if bool(expected_body) != bool(body):
-          # Not expecting a body and provided one
-          # or expecting a body and not provided one.
-          raise UnexpectedBodyError(expected_body, body)
-        if isinstance(expected_body, str):
-          expected_body = json.loads(expected_body)
-        body = json.loads(body)
-        if body != expected_body:
-          raise UnexpectedBodyError(expected_body, body)
-      return HttpRequestMock(resp, content, postproc)
-    elif self.check_unexpected:
-      raise UnexpectedMethodError(methodId=methodId)
-    else:
-      model = JsonModel(False)
-      return HttpRequestMock(None, '{}', model.response)
-
-
-class HttpMock(object):
-  """Mock of httplib2.Http"""
-
-  def __init__(self, filename=None, headers=None):
-    """
-    Args:
-      filename: string, absolute filename to read response from
-      headers: dict, header to return with response
-    """
-    if headers is None:
-      headers = {'status': '200'}
-    if filename:
-      f = open(filename, 'rb')
-      self.data = f.read()
-      f.close()
-    else:
-      self.data = None
-    self.response_headers = headers
-    self.headers = None
-    self.uri = None
-    self.method = None
-    self.body = None
-    self.headers = None
-
-
-  def request(self, uri,
-              method='GET',
-              body=None,
-              headers=None,
-              redirections=1,
-              connection_type=None):
-    self.uri = uri
-    self.method = method
-    self.body = body
-    self.headers = headers
-    return httplib2.Response(self.response_headers), self.data
-
-
-class HttpMockSequence(object):
-  """Mock of httplib2.Http
-
-  Mocks a sequence of calls to request returning different responses for each
-  call. Create an instance initialized with the desired response headers
-  and content and then use as if an httplib2.Http instance.
-
-    http = HttpMockSequence([
-      ({'status': '401'}, ''),
-      ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
-      ({'status': '200'}, 'echo_request_headers'),
-      ])
-    resp, content = http.request("http://examples.com")
-
-  There are special values you can pass in for content to trigger
-  behavours that are helpful in testing.
-
-  'echo_request_headers' means return the request headers in the response body
-  'echo_request_headers_as_json' means return the request headers in
-     the response body
-  'echo_request_body' means return the request body in the response body
-  'echo_request_uri' means return the request uri in the response body
-  """
-
-  def __init__(self, iterable):
-    """
-    Args:
-      iterable: iterable, a sequence of pairs of (headers, body)
-    """
-    self._iterable = iterable
-    self.follow_redirects = True
-
-  def request(self, uri,
-              method='GET',
-              body=None,
-              headers=None,
-              redirections=1,
-              connection_type=None):
-    resp, content = self._iterable.pop(0)
-    if content == 'echo_request_headers':
-      content = headers
-    elif content == 'echo_request_headers_as_json':
-      content = json.dumps(headers)
-    elif content == 'echo_request_body':
-      if hasattr(body, 'read'):
-        content = body.read()
-      else:
-        content = body
-    elif content == 'echo_request_uri':
-      content = uri
-    if isinstance(content, six.text_type):
-      content = content.encode('utf-8')
-    return httplib2.Response(resp), content
-
-
-def set_user_agent(http, user_agent):
-  """Set the user-agent on every request.
-
-  Args:
-     http - An instance of httplib2.Http
-         or something that acts like it.
-     user_agent: string, the value for the user-agent header.
-
-  Returns:
-     A modified instance of http that was passed in.
-
-  Example:
-
-    h = httplib2.Http()
-    h = set_user_agent(h, "my-app-name/6.0")
-
-  Most of the time the user-agent will be set doing auth, this is for the rare
-  cases where you are accessing an unauthenticated endpoint.
-  """
-  request_orig = http.request
-
-  # The closure that will replace 'httplib2.Http.request'.
-  def new_request(uri, method='GET', body=None, headers=None,
-                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                  connection_type=None):
-    """Modify the request headers to add the user-agent."""
-    if headers is None:
-      headers = {}
-    if 'user-agent' in headers:
-      headers['user-agent'] = user_agent + ' ' + headers['user-agent']
-    else:
-      headers['user-agent'] = user_agent
-    resp, content = request_orig(uri, method, body, headers,
-                        redirections, connection_type)
-    return resp, content
-
-  http.request = new_request
-  return http
-
-
-def tunnel_patch(http):
-  """Tunnel PATCH requests over POST.
-  Args:
-     http - An instance of httplib2.Http
-         or something that acts like it.
-
-  Returns:
-     A modified instance of http that was passed in.
-
-  Example:
-
-    h = httplib2.Http()
-    h = tunnel_patch(h, "my-app-name/6.0")
-
-  Useful if you are running on a platform that doesn't support PATCH.
-  Apply this last if you are using OAuth 1.0, as changing the method
-  will result in a different signature.
-  """
-  request_orig = http.request
-
-  # The closure that will replace 'httplib2.Http.request'.
-  def new_request(uri, method='GET', body=None, headers=None,
-                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                  connection_type=None):
-    """Modify the request headers to add the user-agent."""
-    if headers is None:
-      headers = {}
-    if method == 'PATCH':
-      if 'oauth_token' in headers.get('authorization', ''):
-        logging.warning(
-            'OAuth 1.0 request made with Credentials after tunnel_patch.')
-      headers['x-http-method-override'] = "PATCH"
-      method = 'POST'
-    resp, content = request_orig(uri, method, body, headers,
-                        redirections, connection_type)
-    return resp, content
-
-  http.request = new_request
-  return http
diff --git a/tools/swarming_client/third_party/googleapiclient/mimeparse.py b/tools/swarming_client/third_party/googleapiclient/mimeparse.py
deleted file mode 100644
index bc9ad09..0000000
--- a/tools/swarming_client/third_party/googleapiclient/mimeparse.py
+++ /dev/null
@@ -1,175 +0,0 @@
-# Copyright 2014 Joe Gregorio
-#
-# Licensed under the MIT License
-
-"""MIME-Type Parser
-
-This module provides basic functions for handling mime-types. It can handle
-matching mime-types against a list of media-ranges. See section 14.1 of the
-HTTP specification [RFC 2616] for a complete explanation.
-
-   http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
-
-Contents:
- - parse_mime_type():   Parses a mime-type into its component parts.
- - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
-                          quality parameter.
- - quality():           Determines the quality ('q') of a mime-type when
-                          compared against a list of media-ranges.
- - quality_parsed():    Just like quality() except the second parameter must be
-                          pre-parsed.
- - best_match():        Choose the mime-type with the highest quality ('q')
-                          from a list of candidates.
-"""
-from __future__ import absolute_import
-from functools import reduce
-import six
-
-__version__ = '0.1.3'
-__author__ = 'Joe Gregorio'
-__email__ = 'joe@bitworking.org'
-__license__ = 'MIT License'
-__credits__ = ''
-
-
-def parse_mime_type(mime_type):
-    """Parses a mime-type into its component parts.
-
-    Carves up a mime-type and returns a tuple of the (type, subtype, params)
-    where 'params' is a dictionary of all the parameters for the media range.
-    For example, the media range 'application/xhtml;q=0.5' would get parsed
-    into:
-
-       ('application', 'xhtml', {'q', '0.5'})
-       """
-    parts = mime_type.split(';')
-    params = dict([tuple([s.strip() for s in param.split('=', 1)])\
-            for param in parts[1:]
-                  ])
-    full_type = parts[0].strip()
-    # Java URLConnection class sends an Accept header that includes a
-    # single '*'. Turn it into a legal wildcard.
-    if full_type == '*':
-        full_type = '*/*'
-    (type, subtype) = full_type.split('/')
-
-    return (type.strip(), subtype.strip(), params)
-
-
-def parse_media_range(range):
-    """Parse a media-range into its component parts.
-
-    Carves up a media range and returns a tuple of the (type, subtype,
-    params) where 'params' is a dictionary of all the parameters for the media
-    range.  For example, the media range 'application/*;q=0.5' would get parsed
-    into:
-
-       ('application', '*', {'q', '0.5'})
-
-    In addition this function also guarantees that there is a value for 'q'
-    in the params dictionary, filling it in with a proper default if
-    necessary.
-    """
-    (type, subtype, params) = parse_mime_type(range)
-    if 'q' not in params or not params['q'] or \
-            not float(params['q']) or float(params['q']) > 1\
-            or float(params['q']) < 0:
-        params['q'] = '1'
-
-    return (type, subtype, params)
-
-
-def fitness_and_quality_parsed(mime_type, parsed_ranges):
-    """Find the best match for a mime-type amongst parsed media-ranges.
-
-    Find the best match for a given mime-type against a list of media_ranges
-    that have already been parsed by parse_media_range(). Returns a tuple of
-    the fitness value and the value of the 'q' quality parameter of the best
-    match, or (-1, 0) if no match was found. Just as for quality_parsed(),
-    'parsed_ranges' must be a list of parsed media ranges.
-    """
-    best_fitness = -1
-    best_fit_q = 0
-    (target_type, target_subtype, target_params) =\
-            parse_media_range(mime_type)
-    for (type, subtype, params) in parsed_ranges:
-        type_match = (type == target_type or\
-                      type == '*' or\
-                      target_type == '*')
-        subtype_match = (subtype == target_subtype or\
-                         subtype == '*' or\
-                         target_subtype == '*')
-        if type_match and subtype_match:
-            param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
-                    six.iteritems(target_params) if key != 'q' and \
-                    key in params and value == params[key]], 0)
-            fitness = (type == target_type) and 100 or 0
-            fitness += (subtype == target_subtype) and 10 or 0
-            fitness += param_matches
-            if fitness > best_fitness:
-                best_fitness = fitness
-                best_fit_q = params['q']
-
-    return best_fitness, float(best_fit_q)
-
-
-def quality_parsed(mime_type, parsed_ranges):
-    """Find the best match for a mime-type amongst parsed media-ranges.
-
-    Find the best match for a given mime-type against a list of media_ranges
-    that have already been parsed by parse_media_range(). Returns the 'q'
-    quality parameter of the best match, 0 if no match was found. This function
-    bahaves the same as quality() except that 'parsed_ranges' must be a list of
-    parsed media ranges.
-    """
-
-    return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
-
-
-def quality(mime_type, ranges):
-    """Return the quality ('q') of a mime-type against a list of media-ranges.
-
-    Returns the quality 'q' of a mime-type when compared against the
-    media-ranges in ranges. For example:
-
-    >>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
-                  text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
-    0.7
-
-    """
-    parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
-
-    return quality_parsed(mime_type, parsed_ranges)
-
-
-def best_match(supported, header):
-    """Return mime-type with the highest quality ('q') from list of candidates.
-
-    Takes a list of supported mime-types and finds the best match for all the
-    media-ranges listed in header. The value of header must be a string that
-    conforms to the format of the HTTP Accept: header. The value of 'supported'
-    is a list of mime-types. The list of supported mime-types should be sorted
-    in order of increasing desirability, in case of a situation where there is
-    a tie.
-
-    >>> best_match(['application/xbel+xml', 'text/xml'],
-                   'text/*;q=0.5,*/*; q=0.1')
-    'text/xml'
-    """
-    split_header = _filter_blank(header.split(','))
-    parsed_header = [parse_media_range(r) for r in split_header]
-    weighted_matches = []
-    pos = 0
-    for mime_type in supported:
-        weighted_matches.append((fitness_and_quality_parsed(mime_type,
-                                 parsed_header), pos, mime_type))
-        pos += 1
-    weighted_matches.sort()
-
-    return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
-
-
-def _filter_blank(i):
-    for s in i:
-        if s.strip():
-            yield s
diff --git a/tools/swarming_client/third_party/googleapiclient/model.py b/tools/swarming_client/third_party/googleapiclient/model.py
deleted file mode 100644
index e8afb63..0000000
--- a/tools/swarming_client/third_party/googleapiclient/model.py
+++ /dev/null
@@ -1,387 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Model objects for requests and responses.
-
-Each API may support one or more serializations, such
-as JSON, Atom, etc. The model classes are responsible
-for converting between the wire format and the Python
-object representation.
-"""
-from __future__ import absolute_import
-import six
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import json
-import logging
-
-from six.moves.urllib.parse import urlencode
-
-from googleapiclient import __version__
-from googleapiclient.errors import HttpError
-
-
-dump_request_response = False
-
-
-def _abstract():
-  raise NotImplementedError('You need to override this function')
-
-
-class Model(object):
-  """Model base class.
-
-  All Model classes should implement this interface.
-  The Model serializes and de-serializes between a wire
-  format such as JSON and a Python object representation.
-  """
-
-  def request(self, headers, path_params, query_params, body_value):
-    """Updates outgoing requests with a serialized body.
-
-    Args:
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query_params: dict, parameters that appear in the query
-      body_value: object, the request body as a Python object, which must be
-                  serializable.
-    Returns:
-      A tuple of (headers, path_params, query, body)
-
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query: string, query part of the request URI
-      body: string, the body serialized in the desired wire format.
-    """
-    _abstract()
-
-  def response(self, resp, content):
-    """Convert the response wire format into a Python object.
-
-    Args:
-      resp: httplib2.Response, the HTTP response headers and status
-      content: string, the body of the HTTP response
-
-    Returns:
-      The body de-serialized as a Python object.
-
-    Raises:
-      googleapiclient.errors.HttpError if a non 2xx response is received.
-    """
-    _abstract()
-
-
-class BaseModel(Model):
-  """Base model class.
-
-  Subclasses should provide implementations for the "serialize" and
-  "deserialize" methods, as well as values for the following class attributes.
-
-  Attributes:
-    accept: The value to use for the HTTP Accept header.
-    content_type: The value to use for the HTTP Content-type header.
-    no_content_response: The value to return when deserializing a 204 "No
-        Content" response.
-    alt_param: The value to supply as the "alt" query parameter for requests.
-  """
-
-  accept = None
-  content_type = None
-  no_content_response = None
-  alt_param = None
-
-  def _log_request(self, headers, path_params, query, body):
-    """Logs debugging information about the request if requested."""
-    if dump_request_response:
-      logging.info('--request-start--')
-      logging.info('-headers-start-')
-      for h, v in six.iteritems(headers):
-        logging.info('%s: %s', h, v)
-      logging.info('-headers-end-')
-      logging.info('-path-parameters-start-')
-      for h, v in six.iteritems(path_params):
-        logging.info('%s: %s', h, v)
-      logging.info('-path-parameters-end-')
-      logging.info('body: %s', body)
-      logging.info('query: %s', query)
-      logging.info('--request-end--')
-
-  def request(self, headers, path_params, query_params, body_value):
-    """Updates outgoing requests with a serialized body.
-
-    Args:
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query_params: dict, parameters that appear in the query
-      body_value: object, the request body as a Python object, which must be
-                  serializable by json.
-    Returns:
-      A tuple of (headers, path_params, query, body)
-
-      headers: dict, request headers
-      path_params: dict, parameters that appear in the request path
-      query: string, query part of the request URI
-      body: string, the body serialized as JSON
-    """
-    query = self._build_query(query_params)
-    headers['accept'] = self.accept
-    headers['accept-encoding'] = 'gzip, deflate'
-    if 'user-agent' in headers:
-      headers['user-agent'] += ' '
-    else:
-      headers['user-agent'] = ''
-    headers['user-agent'] += 'google-api-python-client/%s (gzip)' % __version__
-
-    if body_value is not None:
-      headers['content-type'] = self.content_type
-      body_value = self.serialize(body_value)
-    self._log_request(headers, path_params, query, body_value)
-    return (headers, path_params, query, body_value)
-
-  def _build_query(self, params):
-    """Builds a query string.
-
-    Args:
-      params: dict, the query parameters
-
-    Returns:
-      The query parameters properly encoded into an HTTP URI query string.
-    """
-    if self.alt_param is not None:
-      params.update({'alt': self.alt_param})
-    astuples = []
-    for key, value in six.iteritems(params):
-      if type(value) == type([]):
-        for x in value:
-          x = x.encode('utf-8')
-          astuples.append((key, x))
-      else:
-        if isinstance(value, six.text_type) and callable(value.encode):
-          value = value.encode('utf-8')
-        astuples.append((key, value))
-    return '?' + urlencode(astuples)
-
-  def _log_response(self, resp, content):
-    """Logs debugging information about the response if requested."""
-    if dump_request_response:
-      logging.info('--response-start--')
-      for h, v in six.iteritems(resp):
-        logging.info('%s: %s', h, v)
-      if content:
-        logging.info(content)
-      logging.info('--response-end--')
-
-  def response(self, resp, content):
-    """Convert the response wire format into a Python object.
-
-    Args:
-      resp: httplib2.Response, the HTTP response headers and status
-      content: string, the body of the HTTP response
-
-    Returns:
-      The body de-serialized as a Python object.
-
-    Raises:
-      googleapiclient.errors.HttpError if a non 2xx response is received.
-    """
-    self._log_response(resp, content)
-    # Error handling is TBD, for example, do we retry
-    # for some operation/error combinations?
-    if resp.status < 300:
-      if resp.status == 204:
-        # A 204: No Content response should be treated differently
-        # to all the other success states
-        return self.no_content_response
-      return self.deserialize(content)
-    else:
-      logging.debug('Content from bad request was: %s' % content)
-      raise HttpError(resp, content)
-
-  def serialize(self, body_value):
-    """Perform the actual Python object serialization.
-
-    Args:
-      body_value: object, the request body as a Python object.
-
-    Returns:
-      string, the body in serialized form.
-    """
-    _abstract()
-
-  def deserialize(self, content):
-    """Perform the actual deserialization from response string to Python
-    object.
-
-    Args:
-      content: string, the body of the HTTP response
-
-    Returns:
-      The body de-serialized as a Python object.
-    """
-    _abstract()
-
-
-class JsonModel(BaseModel):
-  """Model class for JSON.
-
-  Serializes and de-serializes between JSON and the Python
-  object representation of HTTP request and response bodies.
-  """
-  accept = 'application/json'
-  content_type = 'application/json'
-  alt_param = 'json'
-
-  def __init__(self, data_wrapper=False):
-    """Construct a JsonModel.
-
-    Args:
-      data_wrapper: boolean, wrap requests and responses in a data wrapper
-    """
-    self._data_wrapper = data_wrapper
-
-  def serialize(self, body_value):
-    if (isinstance(body_value, dict) and 'data' not in body_value and
-        self._data_wrapper):
-      body_value = {'data': body_value}
-    return json.dumps(body_value)
-
-  def deserialize(self, content):
-    try:
-        content = content.decode('utf-8')
-    except AttributeError:
-        pass
-    body = json.loads(content)
-    if self._data_wrapper and isinstance(body, dict) and 'data' in body:
-      body = body['data']
-    return body
-
-  @property
-  def no_content_response(self):
-    return {}
-
-
-class RawModel(JsonModel):
-  """Model class for requests that don't return JSON.
-
-  Serializes and de-serializes between JSON and the Python
-  object representation of HTTP request, and returns the raw bytes
-  of the response body.
-  """
-  accept = '*/*'
-  content_type = 'application/json'
-  alt_param = None
-
-  def deserialize(self, content):
-    return content
-
-  @property
-  def no_content_response(self):
-    return ''
-
-
-class MediaModel(JsonModel):
-  """Model class for requests that return Media.
-
-  Serializes and de-serializes between JSON and the Python
-  object representation of HTTP request, and returns the raw bytes
-  of the response body.
-  """
-  accept = '*/*'
-  content_type = 'application/json'
-  alt_param = 'media'
-
-  def deserialize(self, content):
-    return content
-
-  @property
-  def no_content_response(self):
-    return ''
-
-
-class ProtocolBufferModel(BaseModel):
-  """Model class for protocol buffers.
-
-  Serializes and de-serializes the binary protocol buffer sent in the HTTP
-  request and response bodies.
-  """
-  accept = 'application/x-protobuf'
-  content_type = 'application/x-protobuf'
-  alt_param = 'proto'
-
-  def __init__(self, protocol_buffer):
-    """Constructs a ProtocolBufferModel.
-
-    The serialzed protocol buffer returned in an HTTP response will be
-    de-serialized using the given protocol buffer class.
-
-    Args:
-      protocol_buffer: The protocol buffer class used to de-serialize a
-      response from the API.
-    """
-    self._protocol_buffer = protocol_buffer
-
-  def serialize(self, body_value):
-    return body_value.SerializeToString()
-
-  def deserialize(self, content):
-    return self._protocol_buffer.FromString(content)
-
-  @property
-  def no_content_response(self):
-    return self._protocol_buffer()
-
-
-def makepatch(original, modified):
-  """Create a patch object.
-
-  Some methods support PATCH, an efficient way to send updates to a resource.
-  This method allows the easy construction of patch bodies by looking at the
-  differences between a resource before and after it was modified.
-
-  Args:
-    original: object, the original deserialized resource
-    modified: object, the modified deserialized resource
-  Returns:
-    An object that contains only the changes from original to modified, in a
-    form suitable to pass to a PATCH method.
-
-  Example usage:
-    item = service.activities().get(postid=postid, userid=userid).execute()
-    original = copy.deepcopy(item)
-    item['object']['content'] = 'This is updated.'
-    service.activities.patch(postid=postid, userid=userid,
-      body=makepatch(original, item)).execute()
-  """
-  patch = {}
-  for key, original_value in six.iteritems(original):
-    modified_value = modified.get(key, None)
-    if modified_value is None:
-      # Use None to signal that the element is deleted
-      patch[key] = None
-    elif original_value != modified_value:
-      if type(original_value) == type({}):
-        # Recursively descend objects
-        patch[key] = makepatch(original_value, modified_value)
-      else:
-        # In the case of simple types or arrays we just replace
-        patch[key] = modified_value
-    else:
-      # Don't add anything to patch if there's no change
-      pass
-  for key in modified:
-    if key not in original:
-      patch[key] = modified[key]
-
-  return patch
diff --git a/tools/swarming_client/third_party/googleapiclient/sample_tools.py b/tools/swarming_client/third_party/googleapiclient/sample_tools.py
deleted file mode 100644
index 2b4e7b4..0000000
--- a/tools/swarming_client/third_party/googleapiclient/sample_tools.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for making samples.
-
-Consolidates a lot of code commonly repeated in sample applications.
-"""
-from __future__ import absolute_import
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-__all__ = ['init']
-
-
-import argparse
-import httplib2
-import os
-
-from googleapiclient import discovery
-from oauth2client import client
-from oauth2client import file
-from oauth2client import tools
-
-
-def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None):
-  """A common initialization routine for samples.
-
-  Many of the sample applications do the same initialization, which has now
-  been consolidated into this function. This function uses common idioms found
-  in almost all the samples, i.e. for an API with name 'apiname', the
-  credentials are stored in a file named apiname.dat, and the
-  client_secrets.json file is stored in the same directory as the application
-  main file.
-
-  Args:
-    argv: list of string, the command-line parameters of the application.
-    name: string, name of the API.
-    version: string, version of the API.
-    doc: string, description of the application. Usually set to __doc__.
-    file: string, filename of the application. Usually set to __file__.
-    parents: list of argparse.ArgumentParser, additional command-line flags.
-    scope: string, The OAuth scope used.
-    discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL.
-
-  Returns:
-    A tuple of (service, flags), where service is the service object and flags
-    is the parsed command-line flags.
-  """
-  if scope is None:
-    scope = 'https://www.googleapis.com/auth/' + name
-
-  # Parser command-line arguments.
-  parent_parsers = [tools.argparser]
-  parent_parsers.extend(parents)
-  parser = argparse.ArgumentParser(
-      description=doc,
-      formatter_class=argparse.RawDescriptionHelpFormatter,
-      parents=parent_parsers)
-  flags = parser.parse_args(argv[1:])
-
-  # Name of a file containing the OAuth 2.0 information for this
-  # application, including client_id and client_secret, which are found
-  # on the API Access tab on the Google APIs
-  # Console <http://code.google.com/apis/console>.
-  client_secrets = os.path.join(os.path.dirname(filename),
-                                'client_secrets.json')
-
-  # Set up a Flow object to be used if we need to authenticate.
-  flow = client.flow_from_clientsecrets(client_secrets,
-      scope=scope,
-      message=tools.message_if_missing(client_secrets))
-
-  # Prepare credentials, and authorize HTTP object with them.
-  # If the credentials don't exist or are invalid run through the native client
-  # flow. The Storage object will ensure that if successful the good
-  # credentials will get written back to a file.
-  storage = file.Storage(name + '.dat')
-  credentials = storage.get()
-  if credentials is None or credentials.invalid:
-    credentials = tools.run_flow(flow, storage, flags)
-  http = credentials.authorize(http = httplib2.Http())
-
-  if discovery_filename is None:
-    # Construct a service object via the discovery service.
-    service = discovery.build(name, version, http=http)
-  else:
-    # Construct a service object using a local discovery document file.
-    with open(discovery_filename) as discovery_file:
-      service = discovery.build_from_document(
-          discovery_file.read(),
-          base='https://www.googleapis.com/',
-          http=http)
-  return (service, flags)
diff --git a/tools/swarming_client/third_party/googleapiclient/schema.py b/tools/swarming_client/third_party/googleapiclient/schema.py
deleted file mode 100644
index ecb3f8b..0000000
--- a/tools/swarming_client/third_party/googleapiclient/schema.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Schema processing for discovery based APIs
-
-Schemas holds an APIs discovery schemas. It can return those schema as
-deserialized JSON objects, or pretty print them as prototype objects that
-conform to the schema.
-
-For example, given the schema:
-
- schema = \"\"\"{
-   "Foo": {
-    "type": "object",
-    "properties": {
-     "etag": {
-      "type": "string",
-      "description": "ETag of the collection."
-     },
-     "kind": {
-      "type": "string",
-      "description": "Type of the collection ('calendar#acl').",
-      "default": "calendar#acl"
-     },
-     "nextPageToken": {
-      "type": "string",
-      "description": "Token used to access the next
-         page of this result. Omitted if no further results are available."
-     }
-    }
-   }
- }\"\"\"
-
- s = Schemas(schema)
- print s.prettyPrintByName('Foo')
-
- Produces the following output:
-
-  {
-   "nextPageToken": "A String", # Token used to access the
-       # next page of this result. Omitted if no further results are available.
-   "kind": "A String", # Type of the collection ('calendar#acl').
-   "etag": "A String", # ETag of the collection.
-  },
-
-The constructor takes a discovery document in which to look up named schema.
-"""
-from __future__ import absolute_import
-import six
-
-# TODO(jcgregorio) support format, enum, minimum, maximum
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-import copy
-
-from oauth2client import util
-
-
-class Schemas(object):
-  """Schemas for an API."""
-
-  def __init__(self, discovery):
-    """Constructor.
-
-    Args:
-      discovery: object, Deserialized discovery document from which we pull
-        out the named schema.
-    """
-    self.schemas = discovery.get('schemas', {})
-
-    # Cache of pretty printed schemas.
-    self.pretty = {}
-
-  @util.positional(2)
-  def _prettyPrintByName(self, name, seen=None, dent=0):
-    """Get pretty printed object prototype from the schema name.
-
-    Args:
-      name: string, Name of schema in the discovery document.
-      seen: list of string, Names of schema already seen. Used to handle
-        recursive definitions.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    if seen is None:
-      seen = []
-
-    if name in seen:
-      # Do not fall into an infinite loop over recursive definitions.
-      return '# Object with schema name: %s' % name
-    seen.append(name)
-
-    if name not in self.pretty:
-      self.pretty[name] = _SchemaToStruct(self.schemas[name],
-          seen, dent=dent).to_str(self._prettyPrintByName)
-
-    seen.pop()
-
-    return self.pretty[name]
-
-  def prettyPrintByName(self, name):
-    """Get pretty printed object prototype from the schema name.
-
-    Args:
-      name: string, Name of schema in the discovery document.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    # Return with trailing comma and newline removed.
-    return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
-
-  @util.positional(2)
-  def _prettyPrintSchema(self, schema, seen=None, dent=0):
-    """Get pretty printed object prototype of schema.
-
-    Args:
-      schema: object, Parsed JSON schema.
-      seen: list of string, Names of schema already seen. Used to handle
-        recursive definitions.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    if seen is None:
-      seen = []
-
-    return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
-
-  def prettyPrintSchema(self, schema):
-    """Get pretty printed object prototype of schema.
-
-    Args:
-      schema: object, Parsed JSON schema.
-
-    Returns:
-      string, A string that contains a prototype object with
-        comments that conforms to the given schema.
-    """
-    # Return with trailing comma and newline removed.
-    return self._prettyPrintSchema(schema, dent=1)[:-2]
-
-  def get(self, name):
-    """Get deserialized JSON schema from the schema name.
-
-    Args:
-      name: string, Schema name.
-    """
-    return self.schemas[name]
-
-
-class _SchemaToStruct(object):
-  """Convert schema to a prototype object."""
-
-  @util.positional(3)
-  def __init__(self, schema, seen, dent=0):
-    """Constructor.
-
-    Args:
-      schema: object, Parsed JSON schema.
-      seen: list, List of names of schema already seen while parsing. Used to
-        handle recursive definitions.
-      dent: int, Initial indentation depth.
-    """
-    # The result of this parsing kept as list of strings.
-    self.value = []
-
-    # The final value of the parsing.
-    self.string = None
-
-    # The parsed JSON schema.
-    self.schema = schema
-
-    # Indentation level.
-    self.dent = dent
-
-    # Method that when called returns a prototype object for the schema with
-    # the given name.
-    self.from_cache = None
-
-    # List of names of schema already seen while parsing.
-    self.seen = seen
-
-  def emit(self, text):
-    """Add text as a line to the output.
-
-    Args:
-      text: string, Text to output.
-    """
-    self.value.extend(["  " * self.dent, text, '\n'])
-
-  def emitBegin(self, text):
-    """Add text to the output, but with no line terminator.
-
-    Args:
-      text: string, Text to output.
-      """
-    self.value.extend(["  " * self.dent, text])
-
-  def emitEnd(self, text, comment):
-    """Add text and comment to the output with line terminator.
-
-    Args:
-      text: string, Text to output.
-      comment: string, Python comment.
-    """
-    if comment:
-      divider = '\n' + '  ' * (self.dent + 2) + '# '
-      lines = comment.splitlines()
-      lines = [x.rstrip() for x in lines]
-      comment = divider.join(lines)
-      self.value.extend([text, ' # ', comment, '\n'])
-    else:
-      self.value.extend([text, '\n'])
-
-  def indent(self):
-    """Increase indentation level."""
-    self.dent += 1
-
-  def undent(self):
-    """Decrease indentation level."""
-    self.dent -= 1
-
-  def _to_str_impl(self, schema):
-    """Prototype object based on the schema, in Python code with comments.
-
-    Args:
-      schema: object, Parsed JSON schema file.
-
-    Returns:
-      Prototype object based on the schema, in Python code with comments.
-    """
-    stype = schema.get('type')
-    if stype == 'object':
-      self.emitEnd('{', schema.get('description', ''))
-      self.indent()
-      if 'properties' in schema:
-        for pname, pschema in six.iteritems(schema.get('properties', {})):
-          self.emitBegin('"%s": ' % pname)
-          self._to_str_impl(pschema)
-      elif 'additionalProperties' in schema:
-        self.emitBegin('"a_key": ')
-        self._to_str_impl(schema['additionalProperties'])
-      self.undent()
-      self.emit('},')
-    elif '$ref' in schema:
-      schemaName = schema['$ref']
-      description = schema.get('description', '')
-      s = self.from_cache(schemaName, seen=self.seen)
-      parts = s.splitlines()
-      self.emitEnd(parts[0], description)
-      for line in parts[1:]:
-        self.emit(line.rstrip())
-    elif stype == 'boolean':
-      value = schema.get('default', 'True or False')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'string':
-      value = schema.get('default', 'A String')
-      self.emitEnd('"%s",' % str(value), schema.get('description', ''))
-    elif stype == 'integer':
-      value = schema.get('default', '42')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'number':
-      value = schema.get('default', '3.14')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'null':
-      self.emitEnd('None,', schema.get('description', ''))
-    elif stype == 'any':
-      self.emitEnd('"",', schema.get('description', ''))
-    elif stype == 'array':
-      self.emitEnd('[', schema.get('description'))
-      self.indent()
-      self.emitBegin('')
-      self._to_str_impl(schema['items'])
-      self.undent()
-      self.emit('],')
-    else:
-      self.emit('Unknown type! %s' % stype)
-      self.emitEnd('', '')
-
-    self.string = ''.join(self.value)
-    return self.string
-
-  def to_str(self, from_cache):
-    """Prototype object based on the schema, in Python code with comments.
-
-    Args:
-      from_cache: callable(name, seen), Callable that retrieves an object
-         prototype for a schema with the given name. Seen is a list of schema
-         names already seen as we recursively descend the schema definition.
-
-    Returns:
-      Prototype object based on the schema, in Python code with comments.
-      The lines of the code will all be properly indented.
-    """
-    self.from_cache = from_cache
-    return self._to_str_impl(self.schema)
diff --git a/tools/swarming_client/third_party/httplib2/MODIFICATIONS.diff b/tools/swarming_client/third_party/httplib2/MODIFICATIONS.diff
deleted file mode 100644
index 6a0788d..0000000
--- a/tools/swarming_client/third_party/httplib2/MODIFICATIONS.diff
+++ /dev/null
@@ -1,68 +0,0 @@
---- a/__init__.py
-+++ b/__init__.py
-@@ -1079,65 +1079,6 @@ SCHEME_TO_CONNECTION = {
-     'https': HTTPSConnectionWithTimeout
- }
- 
--# Use a different connection object for Google App Engine
--try:
--    try:
--        from google.appengine.api import apiproxy_stub_map
--        if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
--            raise ImportError  # Bail out; we're not actually running on App Engine.
--        from google.appengine.api.urlfetch import fetch
--        from google.appengine.api.urlfetch import InvalidURLError
--    except (ImportError, AttributeError):
--        from google3.apphosting.api import apiproxy_stub_map
--        if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
--            raise ImportError  # Bail out; we're not actually running on App Engine.
--        from google3.apphosting.api.urlfetch import fetch
--        from google3.apphosting.api.urlfetch import InvalidURLError
--
--    def _new_fixed_fetch(validate_certificate):
--        def fixed_fetch(url, payload=None, method="GET", headers={},
--                        allow_truncated=False, follow_redirects=True,
--                        deadline=None):
--            if deadline is None:
--                deadline = socket.getdefaulttimeout() or 5
--            return fetch(url, payload=payload, method=method, headers=headers,
--                         allow_truncated=allow_truncated,
--                         follow_redirects=follow_redirects, deadline=deadline,
--                         validate_certificate=validate_certificate)
--        return fixed_fetch
--
--    class AppEngineHttpConnection(httplib.HTTPConnection):
--        """Use httplib on App Engine, but compensate for its weirdness.
--
--        The parameters key_file, cert_file, proxy_info, ca_certs, and
--        disable_ssl_certificate_validation are all dropped on the ground.
--        """
--        def __init__(self, host, port=None, key_file=None, cert_file=None,
--                     strict=None, timeout=None, proxy_info=None, ca_certs=None,
--                     disable_ssl_certificate_validation=False):
--            httplib.HTTPConnection.__init__(self, host, port=port,
--                                            strict=strict, timeout=timeout)
--
--    class AppEngineHttpsConnection(httplib.HTTPSConnection):
--        """Same as AppEngineHttpConnection, but for HTTPS URIs."""
--        def __init__(self, host, port=None, key_file=None, cert_file=None,
--                     strict=None, timeout=None, proxy_info=None, ca_certs=None,
--                     disable_ssl_certificate_validation=False):
--            httplib.HTTPSConnection.__init__(self, host, port=port,
--                                             key_file=key_file,
--                                             cert_file=cert_file, strict=strict,
--                                             timeout=timeout)
--            self._fetch = _new_fixed_fetch(
--                    not disable_ssl_certificate_validation)
--
--    # Update the connection classes to use the Googel App Engine specific ones.
--    SCHEME_TO_CONNECTION = {
--        'http': AppEngineHttpConnection,
--        'https': AppEngineHttpsConnection
--    }
--except (ImportError, AttributeError):
--    pass
--
- 
- class Http(object):
-     """An HTTP client that handles:
diff --git a/tools/swarming_client/third_party/httplib2/README.swarming b/tools/swarming_client/third_party/httplib2/README.swarming
deleted file mode 100644
index 1f461b1..0000000
--- a/tools/swarming_client/third_party/httplib2/README.swarming
+++ /dev/null
@@ -1,18 +0,0 @@
-Name: httplib2
-Short Name: httplib2
-URL: https://github.com/jcgregorio/httplib2/archive/0.9.2.tar.gz
-Version: 0.9.2 (plus 4 commits)
-Revision: e7f6e622047107e701ee70e7ec586717d97b0cbb
-License: MIT License
-
-Description:
-A comprehensive HTTP client library in Python.
-
-Local Modifications:
-- python2/httplib2/ is kept.
-- python2/httplib2/test/ stripped.
-- Appengine related code is removed.
-- See MODIFICATIONS.diff.
-
-Notes:
-Required by oauth2client library.
diff --git a/tools/swarming_client/third_party/httplib2/__init__.py b/tools/swarming_client/third_party/httplib2/__init__.py
deleted file mode 100644
index b1f56e8..0000000
--- a/tools/swarming_client/third_party/httplib2/__init__.py
+++ /dev/null
@@ -1,1639 +0,0 @@
-from __future__ import generators
-"""
-httplib2
-
-A caching http interface that supports ETags and gzip
-to conserve bandwidth.
-
-Requires Python 2.3 or later
-
-Changelog:
-2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
-
-"""
-
-__author__ = "Joe Gregorio (joe@bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
-                    "James Antill",
-                    "Xavier Verges Farrero",
-                    "Jonathan Feinberg",
-                    "Blair Zajac",
-                    "Sam Ruby",
-                    "Louis Nyffenegger"]
-__license__ = "MIT"
-__version__ = "0.9.2"
-
-import re
-import sys
-import email
-import email.Utils
-import email.Message
-import email.FeedParser
-import StringIO
-import gzip
-import zlib
-import httplib
-import urlparse
-import urllib
-import base64
-import os
-import copy
-import calendar
-import time
-import random
-import errno
-try:
-    from hashlib import sha1 as _sha, md5 as _md5
-except ImportError:
-    # prior to Python 2.5, these were separate modules
-    import sha
-    import md5
-    _sha = sha.new
-    _md5 = md5.new
-import hmac
-from gettext import gettext as _
-import socket
-
-try:
-    from httplib2 import socks
-except ImportError:
-    try:
-        import socks
-    except (ImportError, AttributeError):
-        socks = None
-
-# Build the appropriate socket wrapper for ssl
-try:
-    import ssl # python 2.6
-    ssl_SSLError = ssl.SSLError
-    def _ssl_wrap_socket(sock, key_file, cert_file,
-                         disable_validation, ca_certs):
-        if disable_validation:
-            cert_reqs = ssl.CERT_NONE
-        else:
-            cert_reqs = ssl.CERT_REQUIRED
-        # We should be specifying SSL version 3 or TLS v1, but the ssl module
-        # doesn't expose the necessary knobs. So we need to go with the default
-        # of SSLv23.
-        return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
-                               cert_reqs=cert_reqs, ca_certs=ca_certs)
-except (AttributeError, ImportError):
-    ssl_SSLError = None
-    def _ssl_wrap_socket(sock, key_file, cert_file,
-                         disable_validation, ca_certs):
-        if not disable_validation:
-            raise CertificateValidationUnsupported(
-                    "SSL certificate validation is not supported without "
-                    "the ssl module installed. To avoid this error, install "
-                    "the ssl module, or explicity disable validation.")
-        ssl_sock = socket.ssl(sock, key_file, cert_file)
-        return httplib.FakeSocket(sock, ssl_sock)
-
-
-if sys.version_info >= (2,3):
-    from iri2uri import iri2uri
-else:
-    def iri2uri(uri):
-        return uri
-
-def has_timeout(timeout): # python 2.6
-    if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
-        return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
-    return (timeout is not None)
-
-__all__ = [
-    'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
-    'RedirectLimit', 'FailedToDecompressContent',
-    'UnimplementedDigestAuthOptionError',
-    'UnimplementedHmacDigestAuthOptionError',
-    'debuglevel', 'ProxiesUnavailableError']
-
-
-# The httplib debug level, set to a non-zero value to get debug output
-debuglevel = 0
-
-# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
-RETRIES = 2
-
-# Python 2.3 support
-if sys.version_info < (2,4):
-    def sorted(seq):
-        seq.sort()
-        return seq
-
-# Python 2.3 support
-def HTTPResponse__getheaders(self):
-    """Return list of (header, value) tuples."""
-    if self.msg is None:
-        raise httplib.ResponseNotReady()
-    return self.msg.items()
-
-if not hasattr(httplib.HTTPResponse, 'getheaders'):
-    httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
-
-# All exceptions raised here derive from HttpLib2Error
-class HttpLib2Error(Exception): pass
-
-# Some exceptions can be caught and optionally
-# be turned back into responses.
-class HttpLib2ErrorWithResponse(HttpLib2Error):
-    def __init__(self, desc, response, content):
-        self.response = response
-        self.content = content
-        HttpLib2Error.__init__(self, desc)
-
-class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
-class RedirectLimit(HttpLib2ErrorWithResponse): pass
-class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
-class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-
-class MalformedHeader(HttpLib2Error): pass
-class RelativeURIError(HttpLib2Error): pass
-class ServerNotFoundError(HttpLib2Error): pass
-class ProxiesUnavailableError(HttpLib2Error): pass
-class CertificateValidationUnsupported(HttpLib2Error): pass
-class SSLHandshakeError(HttpLib2Error): pass
-class NotSupportedOnThisPlatform(HttpLib2Error): pass
-class CertificateHostnameMismatch(SSLHandshakeError):
-    def __init__(self, desc, host, cert):
-        HttpLib2Error.__init__(self, desc)
-        self.host = host
-        self.cert = cert
-
-# Open Items:
-# -----------
-# Proxy support
-
-# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
-
-# Pluggable cache storage (supports storing the cache in
-#   flat files by default. We need a plug-in architecture
-#   that can support Berkeley DB and Squid)
-
-# == Known Issues ==
-# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
-# Does not handle Cache-Control: max-stale
-# Does not use Age: headers when calculating cache freshness.
-
-
-# The number of redirections to follow before giving up.
-# Note that only GET redirects are automatically followed.
-# Will also honor 301 requests by saving that info and never
-# requesting that URI again.
-DEFAULT_MAX_REDIRECTS = 5
-
-try:
-    # Users can optionally provide a module that tells us where the CA_CERTS
-    # are located.
-    import ca_certs_locater
-    CA_CERTS = ca_certs_locater.get()
-except ImportError:
-    # Default CA certificates file bundled with httplib2.
-    CA_CERTS = os.path.join(
-        os.path.dirname(os.path.abspath(
-            __file__.decode(sys.getfilesystemencoding()))), "cacerts.txt")
-
-# Which headers are hop-by-hop headers by default
-HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
-
-def _get_end2end_headers(response):
-    hopbyhop = list(HOP_BY_HOP)
-    hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
-    return [header for header in response.keys() if header not in hopbyhop]
-
-URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
-
-def parse_uri(uri):
-    """Parses a URI using the regex given in Appendix B of RFC 3986.
-
-        (scheme, authority, path, query, fragment) = parse_uri(uri)
-    """
-    groups = URI.match(uri).groups()
-    return (groups[1], groups[3], groups[4], groups[6], groups[8])
-
-def urlnorm(uri):
-    (scheme, authority, path, query, fragment) = parse_uri(uri)
-    if not scheme or not authority:
-        raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
-    authority = authority.lower()
-    scheme = scheme.lower()
-    if not path:
-        path = "/"
-    # Could do syntax based normalization of the URI before
-    # computing the digest. See Section 6.2.2 of Std 66.
-    request_uri = query and "?".join([path, query]) or path
-    scheme = scheme.lower()
-    defrag_uri = scheme + "://" + authority + request_uri
-    return scheme, authority, request_uri, defrag_uri
-
-
-# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
-re_url_scheme    = re.compile(r'^\w+://')
-re_slash         = re.compile(r'[?/:|]+')
-
-def safename(filename):
-    """Return a filename suitable for the cache.
-
-    Strips dangerous and common characters to create a filename we
-    can use to store the cache in.
-    """
-
-    try:
-        if re_url_scheme.match(filename):
-            if isinstance(filename,str):
-                filename = filename.decode('utf-8')
-                filename = filename.encode('idna')
-            else:
-                filename = filename.encode('idna')
-    except UnicodeError:
-        pass
-    if isinstance(filename,unicode):
-        filename=filename.encode('utf-8')
-    filemd5 = _md5(filename).hexdigest()
-    filename = re_url_scheme.sub("", filename)
-    filename = re_slash.sub(",", filename)
-
-    # limit length of filename
-    if len(filename)>200:
-        filename=filename[:200]
-    return ",".join((filename, filemd5))
-
-NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
-def _normalize_headers(headers):
-    return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip())  for (key, value) in headers.iteritems()])
-
-def _parse_cache_control(headers):
-    retval = {}
-    if headers.has_key('cache-control'):
-        parts =  headers['cache-control'].split(',')
-        parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
-        parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
-        retval = dict(parts_with_args + parts_wo_args)
-    return retval
-
-# Whether to use a strict mode to parse WWW-Authenticate headers
-# Might lead to bad results in case of ill-formed header value,
-# so disabled by default, falling back to relaxed parsing.
-# Set to true to turn on, usefull for testing servers.
-USE_WWW_AUTH_STRICT_PARSING = 0
-
-# In regex below:
-#    [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+             matches a "token" as defined by HTTP
-#    "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?"    matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
-# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
-#    \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
-WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
-WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
-UNQUOTE_PAIRS = re.compile(r'\\(.)')
-def _parse_www_authenticate(headers, headername='www-authenticate'):
-    """Returns a dictionary of dictionaries, one dict
-    per auth_scheme."""
-    retval = {}
-    if headers.has_key(headername):
-        try:
-
-            authenticate = headers[headername].strip()
-            www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
-            while authenticate:
-                # Break off the scheme at the beginning of the line
-                if headername == 'authentication-info':
-                    (auth_scheme, the_rest) = ('digest', authenticate)
-                else:
-                    (auth_scheme, the_rest) = authenticate.split(" ", 1)
-                # Now loop over all the key value pairs that come after the scheme,
-                # being careful not to roll into the next scheme
-                match = www_auth.search(the_rest)
-                auth_params = {}
-                while match:
-                    if match and len(match.groups()) == 3:
-                        (key, value, the_rest) = match.groups()
-                        auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
-                    match = www_auth.search(the_rest)
-                retval[auth_scheme.lower()] = auth_params
-                authenticate = the_rest.strip()
-
-        except ValueError:
-            raise MalformedHeader("WWW-Authenticate")
-    return retval
-
-
-def _entry_disposition(response_headers, request_headers):
-    """Determine freshness from the Date, Expires and Cache-Control headers.
-
-    We don't handle the following:
-
-    1. Cache-Control: max-stale
-    2. Age: headers are not used in the calculations.
-
-    Not that this algorithm is simpler than you might think
-    because we are operating as a private (non-shared) cache.
-    This lets us ignore 's-maxage'. We can also ignore
-    'proxy-invalidate' since we aren't a proxy.
-    We will never return a stale document as
-    fresh as a design decision, and thus the non-implementation
-    of 'max-stale'. This also lets us safely ignore 'must-revalidate'
-    since we operate as if every server has sent 'must-revalidate'.
-    Since we are private we get to ignore both 'public' and
-    'private' parameters. We also ignore 'no-transform' since
-    we don't do any transformations.
-    The 'no-store' parameter is handled at a higher level.
-    So the only Cache-Control parameters we look at are:
-
-    no-cache
-    only-if-cached
-    max-age
-    min-fresh
-    """
-
-    retval = "STALE"
-    cc = _parse_cache_control(request_headers)
-    cc_response = _parse_cache_control(response_headers)
-
-    if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
-        retval = "TRANSPARENT"
-        if 'cache-control' not in request_headers:
-            request_headers['cache-control'] = 'no-cache'
-    elif cc.has_key('no-cache'):
-        retval = "TRANSPARENT"
-    elif cc_response.has_key('no-cache'):
-        retval = "STALE"
-    elif cc.has_key('only-if-cached'):
-        retval = "FRESH"
-    elif response_headers.has_key('date'):
-        date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
-        now = time.time()
-        current_age = max(0, now - date)
-        if cc_response.has_key('max-age'):
-            try:
-                freshness_lifetime = int(cc_response['max-age'])
-            except ValueError:
-                freshness_lifetime = 0
-        elif response_headers.has_key('expires'):
-            expires = email.Utils.parsedate_tz(response_headers['expires'])
-            if None == expires:
-                freshness_lifetime = 0
-            else:
-                freshness_lifetime = max(0, calendar.timegm(expires) - date)
-        else:
-            freshness_lifetime = 0
-        if cc.has_key('max-age'):
-            try:
-                freshness_lifetime = int(cc['max-age'])
-            except ValueError:
-                freshness_lifetime = 0
-        if cc.has_key('min-fresh'):
-            try:
-                min_fresh = int(cc['min-fresh'])
-            except ValueError:
-                min_fresh = 0
-            current_age += min_fresh
-        if freshness_lifetime > current_age:
-            retval = "FRESH"
-    return retval
-
-def _decompressContent(response, new_content):
-    content = new_content
-    try:
-        encoding = response.get('content-encoding', None)
-        if encoding in ['gzip', 'deflate']:
-            if encoding == 'gzip':
-                content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
-            if encoding == 'deflate':
-                content = zlib.decompress(content)
-            response['content-length'] = str(len(content))
-            # Record the historical presence of the encoding in a way the won't interfere.
-            response['-content-encoding'] = response['content-encoding']
-            del response['content-encoding']
-    except IOError:
-        content = ""
-        raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
-    return content
-
-def _updateCache(request_headers, response_headers, content, cache, cachekey):
-    if cachekey:
-        cc = _parse_cache_control(request_headers)
-        cc_response = _parse_cache_control(response_headers)
-        if cc.has_key('no-store') or cc_response.has_key('no-store'):
-            cache.delete(cachekey)
-        else:
-            info = email.Message.Message()
-            for key, value in response_headers.iteritems():
-                if key not in ['status','content-encoding','transfer-encoding']:
-                    info[key] = value
-
-            # Add annotations to the cache to indicate what headers
-            # are variant for this request.
-            vary = response_headers.get('vary', None)
-            if vary:
-                vary_headers = vary.lower().replace(' ', '').split(',')
-                for header in vary_headers:
-                    key = '-varied-%s' % header
-                    try:
-                        info[key] = request_headers[header]
-                    except KeyError:
-                        pass
-
-            status = response_headers.status
-            if status == 304:
-                status = 200
-
-            status_header = 'status: %d\r\n' % status
-
-            header_str = info.as_string()
-
-            header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
-            text = "".join([status_header, header_str, content])
-
-            cache.set(cachekey, text)
-
-def _cnonce():
-    dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
-    return dig[:16]
-
-def _wsse_username_token(cnonce, iso_now, password):
-    return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
-
-
-# For credentials we need two things, first
-# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
-# Then we also need a list of URIs that have already demanded authentication
-# That list is tricky since sub-URIs can take the same auth, or the
-# auth scheme may change as you descend the tree.
-# So we also need each Auth instance to be able to tell us
-# how close to the 'top' it is.
-
-class Authentication(object):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        self.path = path
-        self.host = host
-        self.credentials = credentials
-        self.http = http
-
-    def depth(self, request_uri):
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        return request_uri[len(self.path):].count("/")
-
-    def inscope(self, host, request_uri):
-        # XXX Should we normalize the request_uri?
-        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
-        return (host == self.host) and path.startswith(self.path)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header. Over-ride this in sub-classes."""
-        pass
-
-    def response(self, response, content):
-        """Gives us a chance to update with new nonces
-        or such returned from the last authorized response.
-        Over-rise this in sub-classes if necessary.
-
-        Return TRUE is the request is to be retried, for
-        example Digest may return stale=true.
-        """
-        return False
-
-
-
-class BasicAuthentication(Authentication):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
-
-
-class DigestAuthentication(Authentication):
-    """Only do qop='auth' and MD5, since that
-    is all Apache currently implements"""
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        self.challenge = challenge['digest']
-        qop = self.challenge.get('qop', 'auth')
-        self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
-        if self.challenge['qop'] is None:
-            raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
-        self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
-        if self.challenge['algorithm'] != 'MD5':
-            raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
-        self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
-        self.challenge['nc'] = 1
-
-    def request(self, method, request_uri, headers, content, cnonce = None):
-        """Modify the request headers"""
-        H = lambda x: _md5(x).hexdigest()
-        KD = lambda s, d: H("%s:%s" % (s, d))
-        A2 = "".join([method, ":", request_uri])
-        self.challenge['cnonce'] = cnonce or _cnonce()
-        request_digest  = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
-                self.challenge['nonce'],
-                '%08x' % self.challenge['nc'],
-                self.challenge['cnonce'],
-                self.challenge['qop'], H(A2)))
-        headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
-                self.credentials[0],
-                self.challenge['realm'],
-                self.challenge['nonce'],
-                request_uri,
-                self.challenge['algorithm'],
-                request_digest,
-                self.challenge['qop'],
-                self.challenge['nc'],
-                self.challenge['cnonce'])
-        if self.challenge.get('opaque'):
-            headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
-        self.challenge['nc'] += 1
-
-    def response(self, response, content):
-        if not response.has_key('authentication-info'):
-            challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
-            if 'true' == challenge.get('stale'):
-                self.challenge['nonce'] = challenge['nonce']
-                self.challenge['nc'] = 1
-                return True
-        else:
-            updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
-
-            if updated_challenge.has_key('nextnonce'):
-                self.challenge['nonce'] = updated_challenge['nextnonce']
-                self.challenge['nc'] = 1
-        return False
-
-
-class HmacDigestAuthentication(Authentication):
-    """Adapted from Robert Sayre's code and DigestAuthentication above."""
-    __author__ = "Thomas Broyer (t.broyer@ltgt.net)"
-
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        self.challenge = challenge['hmacdigest']
-        # TODO: self.challenge['domain']
-        self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
-        if self.challenge['reason'] not in ['unauthorized', 'integrity']:
-            self.challenge['reason'] = 'unauthorized'
-        self.challenge['salt'] = self.challenge.get('salt', '')
-        if not self.challenge.get('snonce'):
-            raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
-        self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
-        if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
-            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
-        self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
-        if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
-            raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
-        if self.challenge['algorithm'] == 'HMAC-MD5':
-            self.hashmod = _md5
-        else:
-            self.hashmod = _sha
-        if self.challenge['pw-algorithm'] == 'MD5':
-            self.pwhashmod = _md5
-        else:
-            self.pwhashmod = _sha
-        self.key = "".join([self.credentials[0], ":",
-                            self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
-                            ":", self.challenge['realm']])
-        self.key = self.pwhashmod.new(self.key).hexdigest().lower()
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers"""
-        keys = _get_end2end_headers(headers)
-        keylist = "".join(["%s " % k for k in keys])
-        headers_val = "".join([headers[k] for k in keys])
-        created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
-        cnonce = _cnonce()
-        request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
-        request_digest  = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
-        headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
-                self.credentials[0],
-                self.challenge['realm'],
-                self.challenge['snonce'],
-                cnonce,
-                request_uri,
-                created,
-                request_digest,
-                keylist)
-
-    def response(self, response, content):
-        challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
-        if challenge.get('reason') in ['integrity', 'stale']:
-            return True
-        return False
-
-
-class WsseAuthentication(Authentication):
-    """This is thinly tested and should not be relied upon.
-    At this time there isn't any third party server to test against.
-    Blogger and TypePad implemented this algorithm at one point
-    but Blogger has since switched to Basic over HTTPS and
-    TypePad has implemented it wrong, by never issuing a 401
-    challenge but instead requiring your client to telepathically know that
-    their endpoint is expecting WSSE profile="UsernameToken"."""
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'WSSE profile="UsernameToken"'
-        iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
-        cnonce = _cnonce()
-        password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
-        headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
-                self.credentials[0],
-                password_digest,
-                cnonce,
-                iso_now)
-
-class GoogleLoginAuthentication(Authentication):
-    def __init__(self, credentials, host, request_uri, headers, response, content, http):
-        from urllib import urlencode
-        Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
-        challenge = _parse_www_authenticate(response, 'www-authenticate')
-        service = challenge['googlelogin'].get('service', 'xapi')
-        # Bloggger actually returns the service in the challenge
-        # For the rest we guess based on the URI
-        if service == 'xapi' and  request_uri.find("calendar") > 0:
-            service = "cl"
-        # No point in guessing Base or Spreadsheet
-        #elif request_uri.find("spreadsheets") > 0:
-        #    service = "wise"
-
-        auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
-        resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
-        lines = content.split('\n')
-        d = dict([tuple(line.split("=", 1)) for line in lines if line])
-        if resp.status == 403:
-            self.Auth = ""
-        else:
-            self.Auth = d['Auth']
-
-    def request(self, method, request_uri, headers, content):
-        """Modify the request headers to add the appropriate
-        Authorization header."""
-        headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
-
-
-AUTH_SCHEME_CLASSES = {
-    "basic": BasicAuthentication,
-    "wsse": WsseAuthentication,
-    "digest": DigestAuthentication,
-    "hmacdigest": HmacDigestAuthentication,
-    "googlelogin": GoogleLoginAuthentication
-}
-
-AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
-
-class FileCache(object):
-    """Uses a local directory as a store for cached files.
-    Not really safe to use if multiple threads or processes are going to
-    be running on the same cache.
-    """
-    def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
-        self.cache = cache
-        self.safe = safe
-        if not os.path.exists(cache):
-            os.makedirs(self.cache)
-
-    def get(self, key):
-        retval = None
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        try:
-            f = file(cacheFullPath, "rb")
-            retval = f.read()
-            f.close()
-        except IOError:
-            pass
-        return retval
-
-    def set(self, key, value):
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        f = file(cacheFullPath, "wb")
-        f.write(value)
-        f.close()
-
-    def delete(self, key):
-        cacheFullPath = os.path.join(self.cache, self.safe(key))
-        if os.path.exists(cacheFullPath):
-            os.remove(cacheFullPath)
-
-class Credentials(object):
-    def __init__(self):
-        self.credentials = []
-
-    def add(self, name, password, domain=""):
-        self.credentials.append((domain.lower(), name, password))
-
-    def clear(self):
-        self.credentials = []
-
-    def iter(self, domain):
-        for (cdomain, name, password) in self.credentials:
-            if cdomain == "" or domain == cdomain:
-                yield (name, password)
-
-class KeyCerts(Credentials):
-    """Identical to Credentials except that
-    name/password are mapped to key/cert."""
-    pass
-
-class AllHosts(object):
-    pass
-
-class ProxyInfo(object):
-    """Collect information required to use a proxy."""
-    bypass_hosts = ()
-
-    def __init__(self, proxy_type, proxy_host, proxy_port,
-                 proxy_rdns=True, proxy_user=None, proxy_pass=None):
-        """
-        Args:
-          proxy_type: The type of proxy server.  This must be set to one of
-          socks.PROXY_TYPE_XXX constants.  For example:
-
-            p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
-              proxy_host='localhost', proxy_port=8000)
-
-          proxy_host: The hostname or IP address of the proxy server.
-
-          proxy_port: The port that the proxy server is running on.
-
-          proxy_rdns: If True (default), DNS queries will not be performed
-          locally, and instead, handed to the proxy to resolve.  This is useful
-          if the network does not allow resolution of non-local names.  In
-          httplib2 0.9 and earlier, this defaulted to False.
-
-          proxy_user: The username used to authenticate with the proxy server.
-
-          proxy_pass: The password used to authenticate with the proxy server.
-        """
-        self.proxy_type = proxy_type
-        self.proxy_host = proxy_host
-        self.proxy_port = proxy_port
-        self.proxy_rdns = proxy_rdns
-        self.proxy_user = proxy_user
-        self.proxy_pass = proxy_pass
-
-    def astuple(self):
-        return (self.proxy_type, self.proxy_host, self.proxy_port,
-                self.proxy_rdns, self.proxy_user, self.proxy_pass)
-
-    def isgood(self):
-        return (self.proxy_host != None) and (self.proxy_port != None)
-
-    def applies_to(self, hostname):
-        return not self.bypass_host(hostname)
-
-    def bypass_host(self, hostname):
-        """Has this host been excluded from the proxy config"""
-        if self.bypass_hosts is AllHosts:
-            return True
-
-        bypass = False
-        for domain in self.bypass_hosts:
-            if hostname.endswith(domain):
-                bypass = True
-
-        return bypass
-
-
-def proxy_info_from_environment(method='http'):
-    """
-    Read proxy info from the environment variables.
-    """
-    if method not in ['http', 'https']:
-        return
-
-    env_var = method + '_proxy'
-    url = os.environ.get(env_var, os.environ.get(env_var.upper()))
-    if not url:
-        return
-    pi = proxy_info_from_url(url, method)
-
-    no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
-    bypass_hosts = []
-    if no_proxy:
-        bypass_hosts = no_proxy.split(',')
-    # special case, no_proxy=* means all hosts bypassed
-    if no_proxy == '*':
-        bypass_hosts = AllHosts
-
-    pi.bypass_hosts = bypass_hosts
-    return pi
-
-def proxy_info_from_url(url, method='http'):
-    """
-    Construct a ProxyInfo from a URL (such as http_proxy env var)
-    """
-    url = urlparse.urlparse(url)
-    username = None
-    password = None
-    port = None
-    if '@' in url[1]:
-        ident, host_port = url[1].split('@', 1)
-        if ':' in ident:
-            username, password = ident.split(':', 1)
-        else:
-            password = ident
-    else:
-        host_port = url[1]
-    if ':' in host_port:
-        host, port = host_port.split(':', 1)
-    else:
-        host = host_port
-
-    if port:
-        port = int(port)
-    else:
-        port = dict(https=443, http=80)[method]
-
-    proxy_type = 3 # socks.PROXY_TYPE_HTTP
-    return ProxyInfo(
-        proxy_type = proxy_type,
-        proxy_host = host,
-        proxy_port = port,
-        proxy_user = username or None,
-        proxy_pass = password or None,
-    )
-
-
-class HTTPConnectionWithTimeout(httplib.HTTPConnection):
-    """
-    HTTPConnection subclass that supports timeouts
-
-    All timeouts are in seconds. If None is passed for timeout then
-    Python's default timeout for sockets will be used. See for example
-    the docs of socket.setdefaulttimeout():
-    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-    """
-
-    def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
-        httplib.HTTPConnection.__init__(self, host, port, strict)
-        self.timeout = timeout
-        self.proxy_info = proxy_info
-
-    def connect(self):
-        """Connect to the host and port specified in __init__."""
-        # Mostly verbatim from httplib.py.
-        if self.proxy_info and socks is None:
-            raise ProxiesUnavailableError(
-                'Proxy support missing but proxy use was requested!')
-        msg = "getaddrinfo returns an empty list"
-        if self.proxy_info and self.proxy_info.isgood():
-            use_proxy = True
-            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
-
-            host = proxy_host
-            port = proxy_port
-        else:
-            use_proxy = False
-
-            host = self.host
-            port = self.port
-
-        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
-            af, socktype, proto, canonname, sa = res
-            try:
-                if use_proxy:
-                    self.sock = socks.socksocket(af, socktype, proto)
-                    self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
-                else:
-                    self.sock = socket.socket(af, socktype, proto)
-                    self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-                # Different from httplib: support timeouts.
-                if has_timeout(self.timeout):
-                    self.sock.settimeout(self.timeout)
-                    # End of difference from httplib.
-                if self.debuglevel > 0:
-                    print "connect: (%s, %s) ************" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-
-                self.sock.connect((self.host, self.port) + sa[2:])
-            except socket.error, msg:
-                if self.debuglevel > 0:
-                    print "connect fail: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                continue
-            break
-        if not self.sock:
-            raise socket.error, msg
-
-class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
-    """
-    This class allows communication via SSL.
-
-    All timeouts are in seconds. If None is passed for timeout then
-    Python's default timeout for sockets will be used. See for example
-    the docs of socket.setdefaulttimeout():
-    http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-    """
-    def __init__(self, host, port=None, key_file=None, cert_file=None,
-                 strict=None, timeout=None, proxy_info=None,
-                 ca_certs=None, disable_ssl_certificate_validation=False):
-        httplib.HTTPSConnection.__init__(self, host, port=port,
-                                         key_file=key_file,
-                                         cert_file=cert_file, strict=strict)
-        self.timeout = timeout
-        self.proxy_info = proxy_info
-        if ca_certs is None:
-            ca_certs = CA_CERTS
-        self.ca_certs = ca_certs
-        self.disable_ssl_certificate_validation = \
-                disable_ssl_certificate_validation
-
-    # The following two methods were adapted from https_wrapper.py, released
-    # with the Google Appengine SDK at
-    # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
-    # under the following license:
-    #
-    # Copyright 2007 Google Inc.
-    #
-    # Licensed under the Apache License, Version 2.0 (the "License");
-    # you may not use this file except in compliance with the License.
-    # You may obtain a copy of the License at
-    #
-    #     http://www.apache.org/licenses/LICENSE-2.0
-    #
-    # Unless required by applicable law or agreed to in writing, software
-    # distributed under the License is distributed on an "AS IS" BASIS,
-    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    # See the License for the specific language governing permissions and
-    # limitations under the License.
-    #
-
-    def _GetValidHostsForCert(self, cert):
-        """Returns a list of valid host globs for an SSL certificate.
-
-        Args:
-          cert: A dictionary representing an SSL certificate.
-        Returns:
-          list: A list of valid host globs.
-        """
-        if 'subjectAltName' in cert:
-            return [x[1] for x in cert['subjectAltName']
-                    if x[0].lower() == 'dns']
-        else:
-            return [x[0][1] for x in cert['subject']
-                    if x[0][0].lower() == 'commonname']
-
-    def _ValidateCertificateHostname(self, cert, hostname):
-        """Validates that a given hostname is valid for an SSL certificate.
-
-        Args:
-          cert: A dictionary representing an SSL certificate.
-          hostname: The hostname to test.
-        Returns:
-          bool: Whether or not the hostname is valid for this certificate.
-        """
-        hosts = self._GetValidHostsForCert(cert)
-        for host in hosts:
-            host_re = host.replace('.', '\.').replace('*', '[^.]*')
-            if re.search('^%s$' % (host_re,), hostname, re.I):
-                return True
-        return False
-
-    def connect(self):
-        "Connect to a host on a given (SSL) port."
-
-        msg = "getaddrinfo returns an empty list"
-        if self.proxy_info and self.proxy_info.isgood():
-            use_proxy = True
-            proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
-
-            host = proxy_host
-            port = proxy_port
-        else:
-            use_proxy = False
-
-            host = self.host
-            port = self.port
-
-        address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
-        for family, socktype, proto, canonname, sockaddr in address_info:
-            try:
-                if use_proxy:
-                    sock = socks.socksocket(family, socktype, proto)
-
-                    sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
-                else:
-                    sock = socket.socket(family, socktype, proto)
-                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
-                if has_timeout(self.timeout):
-                    sock.settimeout(self.timeout)
-                sock.connect((self.host, self.port))
-                self.sock =_ssl_wrap_socket(
-                    sock, self.key_file, self.cert_file,
-                    self.disable_ssl_certificate_validation, self.ca_certs)
-                if self.debuglevel > 0:
-                    print "connect: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if not self.disable_ssl_certificate_validation:
-                    cert = self.sock.getpeercert()
-                    hostname = self.host.split(':', 0)[0]
-                    if not self._ValidateCertificateHostname(cert, hostname):
-                        raise CertificateHostnameMismatch(
-                            'Server presented certificate that does not match '
-                            'host %s: %s' % (hostname, cert), hostname, cert)
-            except ssl_SSLError, e:
-                if sock:
-                    sock.close()
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                # Unfortunately the ssl module doesn't seem to provide any way
-                # to get at more detailed error information, in particular
-                # whether the error is due to certificate validation or
-                # something else (such as SSL protocol mismatch).
-                if e.errno == ssl.SSL_ERROR_SSL:
-                    raise SSLHandshakeError(e)
-                else:
-                    raise
-            except (socket.timeout, socket.gaierror):
-                raise
-            except socket.error, msg:
-                if self.debuglevel > 0:
-                    print "connect fail: (%s, %s)" % (self.host, self.port)
-                    if use_proxy:
-                        print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
-                if self.sock:
-                    self.sock.close()
-                self.sock = None
-                continue
-            break
-        if not self.sock:
-            raise socket.error, msg
-
-SCHEME_TO_CONNECTION = {
-    'http': HTTPConnectionWithTimeout,
-    'https': HTTPSConnectionWithTimeout
-}
-
-
-class Http(object):
-    """An HTTP client that handles:
-
-    - all methods
-    - caching
-    - ETags
-    - compression,
-    - HTTPS
-    - Basic
-    - Digest
-    - WSSE
-
-    and more.
-    """
-    def __init__(self, cache=None, timeout=None,
-                 proxy_info=proxy_info_from_environment,
-                 ca_certs=None, disable_ssl_certificate_validation=False):
-        """If 'cache' is a string then it is used as a directory name for
-        a disk cache. Otherwise it must be an object that supports the
-        same interface as FileCache.
-
-        All timeouts are in seconds. If None is passed for timeout
-        then Python's default timeout for sockets will be used. See
-        for example the docs of socket.setdefaulttimeout():
-        http://docs.python.org/library/socket.html#socket.setdefaulttimeout
-
-        `proxy_info` may be:
-          - a callable that takes the http scheme ('http' or 'https') and
-            returns a ProxyInfo instance per request. By default, uses
-            proxy_nfo_from_environment.
-          - a ProxyInfo instance (static proxy config).
-          - None (proxy disabled).
-
-        ca_certs is the path of a file containing root CA certificates for SSL
-        server certificate validation.  By default, a CA cert file bundled with
-        httplib2 is used.
-
-        If disable_ssl_certificate_validation is true, SSL cert validation will
-        not be performed.
-        """
-        self.proxy_info = proxy_info
-        self.ca_certs = ca_certs
-        self.disable_ssl_certificate_validation = \
-                disable_ssl_certificate_validation
-
-        # Map domain name to an httplib connection
-        self.connections = {}
-        # The location of the cache, for now a directory
-        # where cached responses are held.
-        if cache and isinstance(cache, basestring):
-            self.cache = FileCache(cache)
-        else:
-            self.cache = cache
-
-        # Name/password
-        self.credentials = Credentials()
-
-        # Key/cert
-        self.certificates = KeyCerts()
-
-        # authorization objects
-        self.authorizations = []
-
-        # If set to False then no redirects are followed, even safe ones.
-        self.follow_redirects = True
-
-        # Which HTTP methods do we apply optimistic concurrency to, i.e.
-        # which methods get an "if-match:" etag header added to them.
-        self.optimistic_concurrency_methods = ["PUT", "PATCH"]
-
-        # If 'follow_redirects' is True, and this is set to True then
-        # all redirecs are followed, including unsafe ones.
-        self.follow_all_redirects = False
-
-        self.ignore_etag = False
-
-        self.force_exception_to_status_code = False
-
-        self.timeout = timeout
-
-        # Keep Authorization: headers on a redirect.
-        self.forward_authorization_headers = False
-
-    def __getstate__(self):
-        state_dict = copy.copy(self.__dict__)
-        # In case request is augmented by some foreign object such as
-        # credentials which handle auth
-        if 'request' in state_dict:
-            del state_dict['request']
-        if 'connections' in state_dict:
-            del state_dict['connections']
-        return state_dict
-
-    def __setstate__(self, state):
-        self.__dict__.update(state)
-        self.connections = {}
-
-    def _auth_from_challenge(self, host, request_uri, headers, response, content):
-        """A generator that creates Authorization objects
-           that can be applied to requests.
-        """
-        challenges = _parse_www_authenticate(response, 'www-authenticate')
-        for cred in self.credentials.iter(host):
-            for scheme in AUTH_SCHEME_ORDER:
-                if challenges.has_key(scheme):
-                    yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
-
-    def add_credentials(self, name, password, domain=""):
-        """Add a name and password that will be used
-        any time a request requires authentication."""
-        self.credentials.add(name, password, domain)
-
-    def add_certificate(self, key, cert, domain):
-        """Add a key and cert that will be used
-        any time a request requires authentication."""
-        self.certificates.add(key, cert, domain)
-
-    def clear_credentials(self):
-        """Remove all the names and passwords
-        that are used for authentication"""
-        self.credentials.clear()
-        self.authorizations = []
-
-    def _conn_request(self, conn, request_uri, method, body, headers):
-        i = 0
-        seen_bad_status_line = False
-        while i < RETRIES:
-            i += 1
-            try:
-                if hasattr(conn, 'sock') and conn.sock is None:
-                    conn.connect()
-                conn.request(method, request_uri, body, headers)
-            except socket.timeout:
-                raise
-            except socket.gaierror:
-                conn.close()
-                raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
-            except ssl_SSLError:
-                conn.close()
-                raise
-            except socket.error, e:
-                err = 0
-                if hasattr(e, 'args'):
-                    err = getattr(e, 'args')[0]
-                else:
-                    err = e.errno
-                if err == errno.ECONNREFUSED: # Connection refused
-                    raise
-                if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
-                    continue  # retry on potentially transient socket errors
-            except httplib.HTTPException:
-                # Just because the server closed the connection doesn't apparently mean
-                # that the server didn't send a response.
-                if hasattr(conn, 'sock') and conn.sock is None:
-                    if i < RETRIES-1:
-                        conn.close()
-                        conn.connect()
-                        continue
-                    else:
-                        conn.close()
-                        raise
-                if i < RETRIES-1:
-                    conn.close()
-                    conn.connect()
-                    continue
-            try:
-                response = conn.getresponse()
-            except httplib.BadStatusLine:
-                # If we get a BadStatusLine on the first try then that means
-                # the connection just went stale, so retry regardless of the
-                # number of RETRIES set.
-                if not seen_bad_status_line and i == 1:
-                    i = 0
-                    seen_bad_status_line = True
-                    conn.close()
-                    conn.connect()
-                    continue
-                else:
-                    conn.close()
-                    raise
-            except (socket.error, httplib.HTTPException):
-                if i < RETRIES-1:
-                    conn.close()
-                    conn.connect()
-                    continue
-                else:
-                    conn.close()
-                    raise
-            else:
-                content = ""
-                if method == "HEAD":
-                    conn.close()
-                else:
-                    content = response.read()
-                response = Response(response)
-                if method != "HEAD":
-                    content = _decompressContent(response, content)
-            break
-        return (response, content)
-
-
-    def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
-        """Do the actual request using the connection object
-        and also follow one level of redirects if necessary"""
-
-        auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
-        auth = auths and sorted(auths)[0][1] or None
-        if auth:
-            auth.request(method, request_uri, headers, body)
-
-        (response, content) = self._conn_request(conn, request_uri, method, body, headers)
-
-        if auth:
-            if auth.response(response, body):
-                auth.request(method, request_uri, headers, body)
-                (response, content) = self._conn_request(conn, request_uri, method, body, headers )
-                response._stale_digest = 1
-
-        if response.status == 401:
-            for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
-                authorization.request(method, request_uri, headers, body)
-                (response, content) = self._conn_request(conn, request_uri, method, body, headers, )
-                if response.status != 401:
-                    self.authorizations.append(authorization)
-                    authorization.response(response, body)
-                    break
-
-        if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
-            if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
-                # Pick out the location header and basically start from the beginning
-                # remembering first to strip the ETag header and decrement our 'depth'
-                if redirections:
-                    if not response.has_key('location') and response.status != 300:
-                        raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
-                    # Fix-up relative redirects (which violate an RFC 2616 MUST)
-                    if response.has_key('location'):
-                        location = response['location']
-                        (scheme, authority, path, query, fragment) = parse_uri(location)
-                        if authority == None:
-                            response['location'] = urlparse.urljoin(absolute_uri, location)
-                    if response.status == 301 and method in ["GET", "HEAD"]:
-                        response['-x-permanent-redirect-url'] = response['location']
-                        if not response.has_key('content-location'):
-                            response['content-location'] = absolute_uri
-                        _updateCache(headers, response, content, self.cache, cachekey)
-                    if headers.has_key('if-none-match'):
-                        del headers['if-none-match']
-                    if headers.has_key('if-modified-since'):
-                        del headers['if-modified-since']
-                    if 'authorization' in headers and not self.forward_authorization_headers:
-                        del headers['authorization']
-                    if response.has_key('location'):
-                        location = response['location']
-                        old_response = copy.deepcopy(response)
-                        if not old_response.has_key('content-location'):
-                            old_response['content-location'] = absolute_uri
-                        redirect_method = method
-                        if response.status in [302, 303]:
-                            redirect_method = "GET"
-                            body = None
-                        (response, content) = self.request(
-                            location, method=redirect_method,
-                            body=body, headers=headers,
-                            redirections=redirections - 1)
-                        response.previous = old_response
-                else:
-                    raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
-            elif response.status in [200, 203] and method in ["GET", "HEAD"]:
-                # Don't cache 206's since we aren't going to handle byte range requests
-                if not response.has_key('content-location'):
-                    response['content-location'] = absolute_uri
-                _updateCache(headers, response, content, self.cache, cachekey)
-
-        return (response, content)
-
-    def _normalize_headers(self, headers):
-        return _normalize_headers(headers)
-
-# Need to catch and rebrand some exceptions
-# Then need to optionally turn all exceptions into status codes
-# including all socket.* and httplib.* exceptions.
-
-
-    def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
-        """ Performs a single HTTP request.
-
-        The 'uri' is the URI of the HTTP resource and can begin with either
-        'http' or 'https'. The value of 'uri' must be an absolute URI.
-
-        The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
-        etc. There is no restriction on the methods allowed.
-
-        The 'body' is the entity body to be sent with the request. It is a
-        string object.
-
-        Any extra headers that are to be sent with the request should be
-        provided in the 'headers' dictionary.
-
-        The maximum number of redirect to follow before raising an
-        exception is 'redirections. The default is 5.
-
-        The return value is a tuple of (response, content), the first
-        being and instance of the 'Response' class, the second being
-        a string that contains the response entity body.
-        """
-        try:
-            if headers is None:
-                headers = {}
-            else:
-                headers = self._normalize_headers(headers)
-
-            if not headers.has_key('user-agent'):
-                headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
-
-            uri = iri2uri(uri)
-
-            (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
-            domain_port = authority.split(":")[0:2]
-            if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
-                scheme = 'https'
-                authority = domain_port[0]
-
-            proxy_info = self._get_proxy_info(scheme, authority)
-
-            conn_key = scheme+":"+authority
-            if conn_key in self.connections:
-                conn = self.connections[conn_key]
-            else:
-                if not connection_type:
-                    connection_type = SCHEME_TO_CONNECTION[scheme]
-                certs = list(self.certificates.iter(authority))
-                if scheme == 'https':
-                    if certs:
-                        conn = self.connections[conn_key] = connection_type(
-                                authority, key_file=certs[0][0],
-                                cert_file=certs[0][1], timeout=self.timeout,
-                                proxy_info=proxy_info,
-                                ca_certs=self.ca_certs,
-                                disable_ssl_certificate_validation=
-                                        self.disable_ssl_certificate_validation)
-                    else:
-                        conn = self.connections[conn_key] = connection_type(
-                                authority, timeout=self.timeout,
-                                proxy_info=proxy_info,
-                                ca_certs=self.ca_certs,
-                                disable_ssl_certificate_validation=
-                                        self.disable_ssl_certificate_validation)
-                else:
-                    conn = self.connections[conn_key] = connection_type(
-                            authority, timeout=self.timeout,
-                            proxy_info=proxy_info)
-                conn.set_debuglevel(debuglevel)
-
-            if 'range' not in headers and 'accept-encoding' not in headers:
-                headers['accept-encoding'] = 'gzip, deflate'
-
-            info = email.Message.Message()
-            cached_value = None
-            if self.cache:
-                cachekey = defrag_uri.encode('utf-8')
-                cached_value = self.cache.get(cachekey)
-                if cached_value:
-                    # info = email.message_from_string(cached_value)
-                    #
-                    # Need to replace the line above with the kludge below
-                    # to fix the non-existent bug not fixed in this
-                    # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
-                    try:
-                        info, content = cached_value.split('\r\n\r\n', 1)
-                        feedparser = email.FeedParser.FeedParser()
-                        feedparser.feed(info)
-                        info = feedparser.close()
-                        feedparser._parse = None
-                    except (IndexError, ValueError):
-                        self.cache.delete(cachekey)
-                        cachekey = None
-                        cached_value = None
-            else:
-                cachekey = None
-
-            if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
-                # http://www.w3.org/1999/04/Editing/
-                headers['if-match'] = info['etag']
-
-            if method not in ["GET", "HEAD"] and self.cache and cachekey:
-                # RFC 2616 Section 13.10
-                self.cache.delete(cachekey)
-
-            # Check the vary header in the cache to see if this request
-            # matches what varies in the cache.
-            if method in ['GET', 'HEAD'] and 'vary' in info:
-                vary = info['vary']
-                vary_headers = vary.lower().replace(' ', '').split(',')
-                for header in vary_headers:
-                    key = '-varied-%s' % header
-                    value = info[key]
-                    if headers.get(header, None) != value:
-                        cached_value = None
-                        break
-
-            if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
-                if info.has_key('-x-permanent-redirect-url'):
-                    # Should cached permanent redirects be counted in our redirection count? For now, yes.
-                    if redirections <= 0:
-                        raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
-                    (response, new_content) = self.request(
-                        info['-x-permanent-redirect-url'], method='GET',
-                        headers=headers, redirections=redirections - 1)
-                    response.previous = Response(info)
-                    response.previous.fromcache = True
-                else:
-                    # Determine our course of action:
-                    #   Is the cached entry fresh or stale?
-                    #   Has the client requested a non-cached response?
-                    #
-                    # There seems to be three possible answers:
-                    # 1. [FRESH] Return the cache entry w/o doing a GET
-                    # 2. [STALE] Do the GET (but add in cache validators if available)
-                    # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
-                    entry_disposition = _entry_disposition(info, headers)
-
-                    if entry_disposition == "FRESH":
-                        if not cached_value:
-                            info['status'] = '504'
-                            content = ""
-                        response = Response(info)
-                        if cached_value:
-                            response.fromcache = True
-                        return (response, content)
-
-                    if entry_disposition == "STALE":
-                        if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
-                            headers['if-none-match'] = info['etag']
-                        if info.has_key('last-modified') and not 'last-modified' in headers:
-                            headers['if-modified-since'] = info['last-modified']
-                    elif entry_disposition == "TRANSPARENT":
-                        pass
-
-                    (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
-
-                if response.status == 304 and method == "GET":
-                    # Rewrite the cache entry with the new end-to-end headers
-                    # Take all headers that are in response
-                    # and overwrite their values in info.
-                    # unless they are hop-by-hop, or are listed in the connection header.
-
-                    for key in _get_end2end_headers(response):
-                        info[key] = response[key]
-                    merged_response = Response(info)
-                    if hasattr(response, "_stale_digest"):
-                        merged_response._stale_digest = response._stale_digest
-                    _updateCache(headers, merged_response, content, self.cache, cachekey)
-                    response = merged_response
-                    response.status = 200
-                    response.fromcache = True
-
-                elif response.status == 200:
-                    content = new_content
-                else:
-                    self.cache.delete(cachekey)
-                    content = new_content
-            else:
-                cc = _parse_cache_control(headers)
-                if cc.has_key('only-if-cached'):
-                    info['status'] = '504'
-                    response = Response(info)
-                    content = ""
-                else:
-                    (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
-        except Exception, e:
-            if self.force_exception_to_status_code:
-                if isinstance(e, HttpLib2ErrorWithResponse):
-                    response = e.response
-                    content = e.content
-                    response.status = 500
-                    response.reason = str(e)
-                elif isinstance(e, socket.timeout):
-                    content = "Request Timeout"
-                    response = Response({
-                        "content-type": "text/plain",
-                        "status": "408",
-                        "content-length": len(content)
-                    })
-                    response.reason = "Request Timeout"
-                else:
-                    content = str(e)
-                    response = Response({
-                        "content-type": "text/plain",
-                        "status": "400",
-                        "content-length": len(content)
-                    })
-                    response.reason = "Bad Request"
-            else:
-                raise
-
-
-        return (response, content)
-
-    def _get_proxy_info(self, scheme, authority):
-        """Return a ProxyInfo instance (or None) based on the scheme
-        and authority.
-        """
-        hostname, port = urllib.splitport(authority)
-        proxy_info = self.proxy_info
-        if callable(proxy_info):
-            proxy_info = proxy_info(scheme)
-
-        if (hasattr(proxy_info, 'applies_to')
-            and not proxy_info.applies_to(hostname)):
-            proxy_info = None
-        return proxy_info
-
-
-class Response(dict):
-    """An object more like email.Message than httplib.HTTPResponse."""
-
-    """Is this response from our local cache"""
-    fromcache = False
-
-    """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
-    version = 11
-
-    "Status code returned by server. "
-    status = 200
-
-    """Reason phrase returned by server."""
-    reason = "Ok"
-
-    previous = None
-
-    def __init__(self, info):
-        # info is either an email.Message or
-        # an httplib.HTTPResponse object.
-        if isinstance(info, httplib.HTTPResponse):
-            for key, value in info.getheaders():
-                self[key.lower()] = value
-            self.status = info.status
-            self['status'] = str(self.status)
-            self.reason = info.reason
-            self.version = info.version
-        elif isinstance(info, email.Message.Message):
-            for key, value in info.items():
-                self[key.lower()] = value
-            self.status = int(self['status'])
-        else:
-            for key, value in info.iteritems():
-                self[key.lower()] = value
-            self.status = int(self.get('status', self.status))
-            self.reason = self.get('reason', self.reason)
-
-
-    def __getattr__(self, name):
-        if name == 'dict':
-            return self
-        else:
-            raise AttributeError, name
diff --git a/tools/swarming_client/third_party/httplib2/cacerts.txt b/tools/swarming_client/third_party/httplib2/cacerts.txt
deleted file mode 100644
index 8791824..0000000
--- a/tools/swarming_client/third_party/httplib2/cacerts.txt
+++ /dev/null
@@ -1,2139 +0,0 @@
-# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Label: "GTE CyberTrust Global Root"
-# Serial: 421
-# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
-# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
-# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
------BEGIN CERTIFICATE-----
-MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
-VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
-bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
-b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
-iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
-r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
-04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
-GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
-3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
-lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Server CA"
-# Serial: 1
-# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
-# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
-# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
------BEGIN CERTIFICATE-----
-MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
-MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
-MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
-DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
-dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
-cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
-DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
-gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
-yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
-L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
-EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
-7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
-QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
-qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Premium Server CA"
-# Serial: 1
-# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
-# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
-# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
------BEGIN CERTIFICATE-----
-MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
-dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
-MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
-MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
-A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
-cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
-VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
-ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
-uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
-9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
-hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
-pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
------END CERTIFICATE-----
-
-# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
-# Subject: O=Equifax OU=Equifax Secure Certificate Authority
-# Label: "Equifax Secure CA"
-# Serial: 903804111
-# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
-# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
-# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
-dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
-MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
-dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
-BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
-cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
-MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
-aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
-ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
-IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
-7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
-1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
-# Serial: 167285380242319648451154478808036881606
-# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
-# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
-# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
------BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
-pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
-13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
-U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
-F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
-oJ2daZH9
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Label: "GlobalSign Root CA"
-# Serial: 4835703278459707669005204
-# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
-# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
-# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
-MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
-aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
-jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
-xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
-1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
-snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
-U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
-9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
-AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
-yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
-38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
-AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
-DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
-HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Label: "GlobalSign Root CA - R2"
-# Serial: 4835703278459682885658125
-# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
-# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
-# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
-MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
-v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
-eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
-tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
-C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
-zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
-mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
-V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
-bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
-3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
-J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
-291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
-ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
-AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
-TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Label: "ValiCert Class 1 VA"
-# Serial: 1
-# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
-# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
-# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
-NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
-LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
-TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
-TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
-LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
-I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
-nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Label: "ValiCert Class 2 VA"
-# Serial: 1
-# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
-# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
-# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Label: "RSA Root Certificate 1"
-# Serial: 1
-# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
-# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
-# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
-NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
-cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
-2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
-JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
-Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
-n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
-PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
-# Serial: 206684696279472310254277870180966723415
-# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
-# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
-# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
-N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
-KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
-kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
-CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
-Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
-imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
-2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
-DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
-/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
-F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
-TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
-# Serial: 314531972711909413743075096039378935511
-# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
-# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
-# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
-GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
-+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
-U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
-NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
-ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
-ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
-CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
-g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
-fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
-2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
-bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Secure Server CA"
-# Serial: 927650371
-# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
-# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
-# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
------BEGIN CERTIFICATE-----
-MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
-MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
-ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
-b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
-bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
-U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
-A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
-I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
-wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
-AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
-oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
-BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
-dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
-MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
-b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
-dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
-MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
-E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
-MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
-hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
-95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
-2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Premium 2048 Secure Server CA"
-# Serial: 946059622
-# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc
-# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe
-# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f
------BEGIN CERTIFICATE-----
-MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
-RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
-bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
-IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy
-MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
-LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
-YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
-A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
-K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
-sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
-MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
-XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
-HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
-4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA
-vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G
-CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA
-WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo
-oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ
-h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18
-f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN
-B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy
-vUxFnmG6v4SBkgPR0ml8xQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Label: "Baltimore CyberTrust Root"
-# Serial: 33554617
-# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
-# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
-# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
-RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
-VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
-DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
-ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
-VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
-mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
-IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
-mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
-XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
-dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
-jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
-BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
-DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
-9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
-jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
-Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
-ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
-R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure Global eBusiness CA"
-# Serial: 1
-# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
-# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
-# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
------BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure eBusiness CA 1"
-# Serial: 4
-# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
-# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
-# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
------BEGIN CERTIFICATE-----
-MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
-ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
-MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
-LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
-KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
-RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
-WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
-Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
-AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
-eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
-zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
-WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
-/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
------END CERTIFICATE-----
-
-# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
-# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
-# Label: "Equifax Secure eBusiness CA 2"
-# Serial: 930140085
-# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca
-# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc
-# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
-dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
-NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
-VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
-vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
-BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
-IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
-NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
-y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
-0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
-E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Low-Value Services Root"
-# Serial: 1
-# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
-# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
-# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
------BEGIN CERTIFICATE-----
-MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
-MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
-QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
-VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
-A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
-CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
-tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
-dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
-PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
-+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
-BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
-MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
-ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
-IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
-7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
-43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
-eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
-pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
-WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Label: "AddTrust External Root"
-# Serial: 1
-# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
-# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
-# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
------BEGIN CERTIFICATE-----
-MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
-IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
-MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
-FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
-bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
-dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
-H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
-uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
-mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
-a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
-E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
-WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
-VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
-Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
-cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
-IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
-AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
-YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
-6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
-Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
-c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
-mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Public Services Root"
-# Serial: 1
-# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
-# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
-# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
-MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
-ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
-BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
-6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
-GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
-dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
-1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
-62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
-BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
-AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
-MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
-cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
-b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
-IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
-iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
-GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
-4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
-XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Qualified Certificates Root"
-# Serial: 1
-# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
-# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
-# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
------BEGIN CERTIFICATE-----
-MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
-MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
-EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
-BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
-xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
-87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
-2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
-WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
-0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
-A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
-pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
-ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
-aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
-hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
-hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
-dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
-P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
-iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
-xqE=
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Label: "Entrust Root Certification Authority"
-# Serial: 1164660820
-# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
-# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
-# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
------BEGIN CERTIFICATE-----
-MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
-Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
-KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
-cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
-NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
-NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
-ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
-BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
-KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
-Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
-4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
-KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
-rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
-94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
-sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
-gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
-kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
-vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
-A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
-O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
-AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
-9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
-eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
-0vdXcDazv/wor3ElhVsT/h5/WrQ8
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Label: "GeoTrust Global CA"
-# Serial: 144470
-# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
-# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
-# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
------BEGIN CERTIFICATE-----
-MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
-MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
-YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
-R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
-9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
-fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
-iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
-1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
-bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
-MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
-ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
-uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
-Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
-tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
-PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
-hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
-5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Global CA 2"
-# Serial: 1
-# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
-# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
-# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
------BEGIN CERTIFICATE-----
-MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
-IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
-R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
-PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
-Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
-TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
-5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
-S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
-2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
-FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
-EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
-EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
-/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
-A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
-abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
-I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
-4iIprn2DQKi6bA==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA"
-# Serial: 1
-# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
-# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
-# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
------BEGIN CERTIFICATE-----
-MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
-BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
-IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
-VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
-cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
-QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
-F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
-c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
-mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
-VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
-teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
-f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
-Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
-nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
-/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
-MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
-9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
-aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
-IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
-ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
-uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
-Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
-QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
-koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
-ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
-DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
-bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA 2"
-# Serial: 1
-# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
-# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
-# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
------BEGIN CERTIFICATE-----
-MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
-VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
-c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
-AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
-WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
-FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
-XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
-se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
-KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
-IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
-y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
-hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
-QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
-Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
-HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
-KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
-dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
-L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
-Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
-ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
-T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
-GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
-1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
-OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
-6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
-QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
------END CERTIFICATE-----
-
-# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc.
-# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc.
-# Label: "America Online Root Certification Authority 1"
-# Serial: 1
-# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e
-# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a
-# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3
------BEGIN CERTIFICATE-----
-MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
-bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2
-MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
-ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
-Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk
-hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym
-1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW
-OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb
-2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko
-O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU
-AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
-BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF
-Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb
-LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir
-oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C
-MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds
-sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7
------END CERTIFICATE-----
-
-# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc.
-# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc.
-# Label: "America Online Root Certification Authority 2"
-# Serial: 1
-# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf
-# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84
-# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd
------BEGIN CERTIFICATE-----
-MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
-bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2
-MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
-ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
-Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
-ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC
-206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci
-KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2
-JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9
-BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e
-Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B
-PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67
-Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq
-Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ
-o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3
-+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj
-YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj
-FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE
-AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn
-xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2
-LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc
-obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8
-CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe
-IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA
-DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F
-AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX
-Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb
-AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl
-Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw
-RY8mkaKO/qk=
------END CERTIFICATE-----
-
-# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
-# Subject: CN=AAA Certificate Services O=Comodo CA Limited
-# Label: "Comodo AAA Services root"
-# Serial: 1
-# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
-# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
-# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
------BEGIN CERTIFICATE-----
-MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
-YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
-GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
-BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
-3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
-YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
-rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
-ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
-oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
-MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
-QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
-b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
-AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
-GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
-Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
-G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
-l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
-smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
------END CERTIFICATE-----
-
-# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
-# Subject: CN=Secure Certificate Services O=Comodo CA Limited
-# Label: "Comodo Secure Services root"
-# Serial: 1
-# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
-# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
-# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
------BEGIN CERTIFICATE-----
-MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
-ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
-fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
-BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
-cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
-HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
-CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
-3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
-6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
-HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
-EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
-Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
-Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
-DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
-5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
-Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
-gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
-aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
-izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
------END CERTIFICATE-----
-
-# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
-# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
-# Label: "Comodo Trusted Services root"
-# Serial: 1
-# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
-# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
-# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
-aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
-MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
-BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
-VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
-fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
-TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
-fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
-1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
-kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
-A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
-ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
-dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
-Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
-HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
-pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
-jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
-xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
-dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
------END CERTIFICATE-----
-
-# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN DATACorp SGC Root CA"
-# Serial: 91374294542884689855167577680241077609
-# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
-# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
-# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
------BEGIN CERTIFICATE-----
-MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
-kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
-IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
-EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
-VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
-dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
-BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
-E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
-D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
-4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
-lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
-bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
-o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
-MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
-LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
-BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
-AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
-Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
-j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
-KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
-2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
-mfnGV/TJVTl4uix5yaaIK/QI
------END CERTIFICATE-----
-
-# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN USERFirst Hardware Root CA"
-# Serial: 91374294542884704022267039221184531197
-# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
-# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
-# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
------BEGIN CERTIFICATE-----
-MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
-SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
-A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
-MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
-d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
-cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
-0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
-M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
-MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
-oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
-DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
-oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
-VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
-dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
-bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
-BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
-//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
-CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
-CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
-3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
-KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
------END CERTIFICATE-----
-
-# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Label: "XRamp Global CA Root"
-# Serial: 107108908803651509692980124233745014957
-# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
-# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
-# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
------BEGIN CERTIFICATE-----
-MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
-gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
-MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
-UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
-NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
-dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
-dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
-dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
-38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
-KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
-DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
-qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
-JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
-PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
-BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
-jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
-eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
-ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
-vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
-qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
-IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
-i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
-O+7ETPTsJ3xCwnR8gooJybQDJbw=
------END CERTIFICATE-----
-
-# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Label: "Go Daddy Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
-# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
-# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
------BEGIN CERTIFICATE-----
-MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
-MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
-YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
-MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
-ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
-MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
-ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
-PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
-wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
-EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
-avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
-YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
-sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
-/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
-IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
-OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
-TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
-HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
-dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
-ReYNnyicsbkqWletNw+vHX/bvZ8=
------END CERTIFICATE-----
-
-# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Label: "Starfield Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
-# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
-# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
-MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
-U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
-NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
-ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
-ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
-DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
-8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
-+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
-X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
-K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
-1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
-A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
-zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
-YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
-bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
-DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
-L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
-eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
-xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
-VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
-WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
-# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
-# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
------BEGIN CERTIFICATE-----
-MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
-FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
-ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
-LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
-BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
-Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
-dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
-cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
-YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
-dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
-bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
-YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
-TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
-9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
-jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
-FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
-ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
-ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
-EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
-L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
-yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
-O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
-um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
-NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root CA"
-# Serial: 17154717934120587862167794914071425081
-# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
-# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
-# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
------BEGIN CERTIFICATE-----
-MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
-JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
-mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
-wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
-VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
-AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
-AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
-BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
-pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
-dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
-fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
-NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
-H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
-+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root CA"
-# Serial: 10944719598952040374951832963794454346
-# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
-# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
-# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
------BEGIN CERTIFICATE-----
-MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
-QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
-CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
-nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
-43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
-T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
-gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
-BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
-TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
-DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
-hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
-06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
-PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
-YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
-CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert High Assurance EV Root CA"
-# Serial: 3553400076410547919724730734378100087
-# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
-# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
-# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
-ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
-LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
-RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
-+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
-PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
-xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
-Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
-hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
-EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
-FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
-nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
-eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
-hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
-Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
-vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
-+OkuE6N36B9K
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Label: "GeoTrust Primary Certification Authority"
-# Serial: 32798226551256963324313806436981982369
-# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
-# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
-# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
------BEGIN CERTIFICATE-----
-MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
-MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
-R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
-MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
-Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
-AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
-ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
-7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
-kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
-mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
-KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
-6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
-4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
-oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
-UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
-AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA"
-# Serial: 69529181992039203566298953787712940909
-# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
-# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
-# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
-qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
-BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
-NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
-LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
-A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
-IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
-W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
-3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
-6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
-Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
-NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
-MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
-r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
-DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
-YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
-xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
-/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
-LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
-jVaMaA==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
-# Serial: 33037644167568058970164719475676101450
-# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
-# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
-# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
------BEGIN CERTIFICATE-----
-MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
-yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
-ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
-nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
-t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
-SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
-BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
-rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
-NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
-BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
-BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
-aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
-MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
-p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
-5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
-WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
-4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
-hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
-# Label: "COMODO Certification Authority"
-# Serial: 104350513648249232941998508985834464573
-# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
-# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
-# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
------BEGIN CERTIFICATE-----
-MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
-gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
-BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
-MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
-YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
-RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
-UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
-2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
-Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
-+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
-DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
-nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
-/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
-PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
-QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
-SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
-IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
-RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
-zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
-BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
-ZQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Label: "Network Solutions Certificate Authority"
-# Serial: 116697915152937497490437556386812487904
-# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
-# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
-# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
------BEGIN CERTIFICATE-----
-MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
-MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
-MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
-dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
-UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
-ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
-c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
-OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
-mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
-BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
-qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
-gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
-bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
-dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
-6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
-h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
-/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
-wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
-pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Label: "COMODO ECC Certification Authority"
-# Serial: 41578283867086692638256921589707938090
-# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
-# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
-# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
------BEGIN CERTIFICATE-----
-MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
-IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
-MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
-ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
-T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
-biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
-FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
-cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
-BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
-fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
-GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
-# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
-# Label: "TC TrustCenter Class 2 CA II"
-# Serial: 941389028203453866782103406992443
-# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23
-# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e
-# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4
------BEGIN CERTIFICATE-----
-MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
-BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
-Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1
-OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
-SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc
-VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf
-tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg
-uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J
-XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK
-8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99
-5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3
-kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
-dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6
-Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
-JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
-Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
-TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS
-GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt
-ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8
-au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV
-hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI
-dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
-# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
-# Label: "TC TrustCenter Class 3 CA II"
-# Serial: 1506523511417715638772220530020799
-# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e
-# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5
-# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e
------BEGIN CERTIFICATE-----
-MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
-BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
-Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1
-OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
-SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc
-VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW
-Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q
-Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2
-1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq
-ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1
-Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX
-XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
-dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6
-Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
-JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
-Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
-TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN
-irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8
-TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6
-g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB
-95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj
-S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Label: "TC TrustCenter Universal CA I"
-# Serial: 601024842042189035295619584734726
-# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c
-# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3
-# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7
------BEGIN CERTIFICATE-----
-MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
-BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1
-c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx
-MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg
-R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD
-VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR
-JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T
-fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu
-jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z
-wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ
-fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD
-VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G
-CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1
-7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn
-8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs
-ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT
-ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
-2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
------END CERTIFICATE-----
-
-# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Label: "Cybertrust Global Root"
-# Serial: 4835703278459682877484360
-# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
-# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
-# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
------BEGIN CERTIFICATE-----
-MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
-A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
-bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
-ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
-b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
-7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
-J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
-HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
-t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
-FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
-XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
-MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
-hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
-MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
-A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
-Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
-XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
-omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
-A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
-WL1WMRJOEcgh4LMRkWXbtKaIOM5V
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G3"
-# Serial: 28809105769928564313984085209975885599
-# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
-# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
-# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
------BEGIN CERTIFICATE-----
-MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
-mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
-MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
-eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
-cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
-BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
-MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
-BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
-+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
-hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
-5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
-JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
-DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
-huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
-HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
-AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
-zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
-kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
-AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
-SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
-spki4cErx5z481+oghLrGREt
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G2"
-# Serial: 71758320672825410020661621085256472406
-# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
-# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
-# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
------BEGIN CERTIFICATE-----
-MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
-IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
-BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
-MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
-d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
-YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
-dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
-BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
-papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
-BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
-DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
-KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
-XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G3"
-# Serial: 127614157056681299805556476275995414779
-# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
-# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
-# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
-rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
-BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
-Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
-LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
-MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
-ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
-gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
-YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
-b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
-9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
-zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
-OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
-HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
-2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
-oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
-t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
-KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
-m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
-MdRAGmI0Nj81Aa6sY6A=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G2"
-# Serial: 80682863203381065782177908751794619243
-# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
-# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
-# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
------BEGIN CERTIFICATE-----
-MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
-MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
-KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
-MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
-eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
-BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
-NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
-BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
-MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
-So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
-tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
-CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
-qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
-rD6ogRLQy7rQkgu2npaqBA+K
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Universal Root Certification Authority"
-# Serial: 85209574734084581917763752644031726877
-# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
-# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
-# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
------BEGIN CERTIFICATE-----
-MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
-vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
-ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
-IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
-IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
-bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
-9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
-H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
-LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
-/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
-rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
-WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
-exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
-DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
-sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
-seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
-4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
-BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
-lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
-7M2CYfE45k+XmCpajQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
-# Serial: 63143484348153506665311985501458640051
-# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
-# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
-# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
------BEGIN CERTIFICATE-----
-MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
-U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
-SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
-biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
-GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
-fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
-AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
-aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
-aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
-kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
-4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
-FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Label: "GlobalSign Root CA - R3"
-# Serial: 4835703278459759426209954
-# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
-# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
-# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
------BEGIN CERTIFICATE-----
-MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
-MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
-RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
-gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
-KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
-QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
-XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
-LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
-RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
-jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
-6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
-mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
-Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
-WD9f
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Label: "TC TrustCenter Universal CA III"
-# Serial: 2010889993983507346460533407902964
-# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b
-# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87
-# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d
------BEGIN CERTIFICATE-----
-MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
-BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1
-c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy
-MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl
-ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm
-BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF
-5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv
-DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v
-zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT
-yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj
-dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh
-MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB
-Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI
-4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz
-dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY
-aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G
-DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV
-CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH
-LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==
------END CERTIFICATE-----
-
-# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Label: "Go Daddy Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
-# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
-# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
-EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
-ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
-NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
-EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
-AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
-DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
-E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
-/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
-DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
-GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
-tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
-AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
-FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
-WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
-9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
-gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
-2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
-LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
-4uJEvlz36hz1
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
-# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
-# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
------BEGIN CERTIFICATE-----
-MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
-ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
-MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
-b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
-aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
-Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
-nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
-HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
-Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
-dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
-HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
-CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
-sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
-4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
-8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
-pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
-mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Services Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
-# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
-# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
------BEGIN CERTIFICATE-----
-MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
-ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
-MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
-VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
-ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
-dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
-OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
-8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
-Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
-hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
-6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
-DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
-AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
-bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
-ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
-qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
-iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
-0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
-sSi6
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
-# Subject: CN=AffirmTrust Commercial O=AffirmTrust
-# Label: "AffirmTrust Commercial"
-# Serial: 8608355977964138876
-# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
-# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
-# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
-Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
-ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
-MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
-yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
-VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
-nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
-XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
-vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
-Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
-N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
-nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Networking O=AffirmTrust
-# Subject: CN=AffirmTrust Networking O=AffirmTrust
-# Label: "AffirmTrust Networking"
-# Serial: 8957382827206547757
-# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
-# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
-# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
-YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
-kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
-QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
-6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
-yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
-QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
-tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
-QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
-Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
-olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
-x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium O=AffirmTrust
-# Subject: CN=AffirmTrust Premium O=AffirmTrust
-# Label: "AffirmTrust Premium"
-# Serial: 7893706540734352110
-# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
-# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
-# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
------BEGIN CERTIFICATE-----
-MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
-dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
-A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
-cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
-qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
-JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
-+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
-s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
-HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
-70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
-V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
-qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
-5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
-C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
-OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
-FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
-KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
-Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
-8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
-MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
-0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
-u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
-u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
-YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
-GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
-RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
-KeC2uAloGRwYQw==
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Label: "AffirmTrust Premium ECC"
-# Serial: 8401224907861490260
-# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
-# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
-# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
------BEGIN CERTIFICATE-----
-MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
-VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
-cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
-BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
-VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
-0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
-ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
-A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
-aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
-flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 45
-# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
-# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
-# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
------BEGIN CERTIFICATE-----
-MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
-VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
-F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
-ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
-ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
-aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
-YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
-c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
-d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
-CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
-dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
-wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
-Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
-0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
-pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
-CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
-P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
-1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
-KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
-JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
-8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
-fyWl8kgAwKQB2j8=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Label: "StartCom Certification Authority G2"
-# Serial: 59
-# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
-# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
-# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
------BEGIN CERTIFICATE-----
-MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
-OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
-A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
-JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
-vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
-D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
-Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
-RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
-HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
-nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
-0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
-UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
-Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
-TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
-AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
-BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
-2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
-UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
-6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
-9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
-HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
-wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
-XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
-IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
-hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
-so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
------END CERTIFICATE-----
diff --git a/tools/swarming_client/third_party/httplib2/iri2uri.py b/tools/swarming_client/third_party/httplib2/iri2uri.py
deleted file mode 100644
index d88c91f..0000000
--- a/tools/swarming_client/third_party/httplib2/iri2uri.py
+++ /dev/null
@@ -1,110 +0,0 @@
-"""
-iri2uri
-
-Converts an IRI to a URI.
-
-"""
-__author__ = "Joe Gregorio (joe@bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = []
-__version__ = "1.0.0"
-__license__ = "MIT"
-__history__ = """
-"""
-
-import urlparse
-
-
-# Convert an IRI to a URI following the rules in RFC 3987
-#
-# The characters we need to enocde and escape are defined in the spec:
-#
-# iprivate =  %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
-# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
-#         / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
-#         / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
-#         / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
-#         / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
-#         / %xD0000-DFFFD / %xE1000-EFFFD
-
-escape_range = [
-    (0xA0, 0xD7FF),
-    (0xE000, 0xF8FF),
-    (0xF900, 0xFDCF),
-    (0xFDF0, 0xFFEF),
-    (0x10000, 0x1FFFD),
-    (0x20000, 0x2FFFD),
-    (0x30000, 0x3FFFD),
-    (0x40000, 0x4FFFD),
-    (0x50000, 0x5FFFD),
-    (0x60000, 0x6FFFD),
-    (0x70000, 0x7FFFD),
-    (0x80000, 0x8FFFD),
-    (0x90000, 0x9FFFD),
-    (0xA0000, 0xAFFFD),
-    (0xB0000, 0xBFFFD),
-    (0xC0000, 0xCFFFD),
-    (0xD0000, 0xDFFFD),
-    (0xE1000, 0xEFFFD),
-    (0xF0000, 0xFFFFD),
-    (0x100000, 0x10FFFD),
-]
-
-def encode(c):
-    retval = c
-    i = ord(c)
-    for low, high in escape_range:
-        if i < low:
-            break
-        if i >= low and i <= high:
-            retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
-            break
-    return retval
-
-
-def iri2uri(uri):
-    """Convert an IRI to a URI. Note that IRIs must be
-    passed in a unicode strings. That is, do not utf-8 encode
-    the IRI before passing it into the function."""
-    if isinstance(uri ,unicode):
-        (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
-        authority = authority.encode('idna')
-        # For each character in 'ucschar' or 'iprivate'
-        #  1. encode as utf-8
-        #  2. then %-encode each octet of that utf-8
-        uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
-        uri = "".join([encode(c) for c in uri])
-    return uri
-
-if __name__ == "__main__":
-    import unittest
-
-    class Test(unittest.TestCase):
-
-        def test_uris(self):
-            """Test that URIs are invariant under the transformation."""
-            invariant = [
-                u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
-                u"http://www.ietf.org/rfc/rfc2396.txt",
-                u"ldap://[2001:db8::7]/c=GB?objectClass?one",
-                u"mailto:John.Doe@example.com",
-                u"news:comp.infosystems.www.servers.unix",
-                u"tel:+1-816-555-1212",
-                u"telnet://192.0.2.16:80/",
-                u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
-            for uri in invariant:
-                self.assertEqual(uri, iri2uri(uri))
-
-        def test_iri(self):
-            """ Test that the right type of escaping is done for each part of the URI."""
-            self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
-            self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
-            self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
-            self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
-            self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
-            self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
-
-    unittest.main()
-
-
diff --git a/tools/swarming_client/third_party/httplib2/socks.py b/tools/swarming_client/third_party/httplib2/socks.py
deleted file mode 100644
index 0991f4c..0000000
--- a/tools/swarming_client/third_party/httplib2/socks.py
+++ /dev/null
@@ -1,438 +0,0 @@
-"""SocksiPy - Python SOCKS module.
-Version 1.00
-
-Copyright 2006 Dan-Haim. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
-1. Redistributions of source code must retain the above copyright notice, this
-   list of conditions and the following disclaimer.
-2. Redistributions in binary form must reproduce the above copyright notice,
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-3. Neither the name of Dan Haim nor the names of his contributors may be used
-   to endorse or promote products derived from this software without specific
-   prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
-WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
-OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
-
-
-This module provides a standard socket-like interface for Python
-for tunneling connections through SOCKS proxies.
-
-"""
-
-"""
-
-Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
-for use in PyLoris (http://pyloris.sourceforge.net/)
-
-Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
-mainly to merge bug fixes found in Sourceforge
-
-"""
-
-import base64
-import socket
-import struct
-import sys
-
-if getattr(socket, 'socket', None) is None:
-    raise ImportError('socket.socket missing, proxy support unusable')
-
-PROXY_TYPE_SOCKS4 = 1
-PROXY_TYPE_SOCKS5 = 2
-PROXY_TYPE_HTTP = 3
-PROXY_TYPE_HTTP_NO_TUNNEL = 4
-
-_defaultproxy = None
-_orgsocket = socket.socket
-
-class ProxyError(Exception): pass
-class GeneralProxyError(ProxyError): pass
-class Socks5AuthError(ProxyError): pass
-class Socks5Error(ProxyError): pass
-class Socks4Error(ProxyError): pass
-class HTTPError(ProxyError): pass
-
-_generalerrors = ("success",
-    "invalid data",
-    "not connected",
-    "not available",
-    "bad proxy type",
-    "bad input")
-
-_socks5errors = ("succeeded",
-    "general SOCKS server failure",
-    "connection not allowed by ruleset",
-    "Network unreachable",
-    "Host unreachable",
-    "Connection refused",
-    "TTL expired",
-    "Command not supported",
-    "Address type not supported",
-    "Unknown error")
-
-_socks5autherrors = ("succeeded",
-    "authentication is required",
-    "all offered authentication methods were rejected",
-    "unknown username or invalid password",
-    "unknown error")
-
-_socks4errors = ("request granted",
-    "request rejected or failed",
-    "request rejected because SOCKS server cannot connect to identd on the client",
-    "request rejected because the client program and identd report different user-ids",
-    "unknown error")
-
-def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
-    """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
-    Sets a default proxy which all further socksocket objects will use,
-    unless explicitly changed.
-    """
-    global _defaultproxy
-    _defaultproxy = (proxytype, addr, port, rdns, username, password)
-
-def wrapmodule(module):
-    """wrapmodule(module)
-    Attempts to replace a module's socket library with a SOCKS socket. Must set
-    a default proxy using setdefaultproxy(...) first.
-    This will only work on modules that import socket directly into the namespace;
-    most of the Python Standard Library falls into this category.
-    """
-    if _defaultproxy != None:
-        module.socket.socket = socksocket
-    else:
-        raise GeneralProxyError((4, "no proxy specified"))
-
-class socksocket(socket.socket):
-    """socksocket([family[, type[, proto]]]) -> socket object
-    Open a SOCKS enabled socket. The parameters are the same as
-    those of the standard socket init. In order for SOCKS to work,
-    you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
-    """
-
-    def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
-        _orgsocket.__init__(self, family, type, proto, _sock)
-        if _defaultproxy != None:
-            self.__proxy = _defaultproxy
-        else:
-            self.__proxy = (None, None, None, None, None, None)
-        self.__proxysockname = None
-        self.__proxypeername = None
-        self.__httptunnel = True
-
-    def __recvall(self, count):
-        """__recvall(count) -> data
-        Receive EXACTLY the number of bytes requested from the socket.
-        Blocks until the required number of bytes have been received.
-        """
-        data = self.recv(count)
-        while len(data) < count:
-            d = self.recv(count-len(data))
-            if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
-            data = data + d
-        return data
-
-    def sendall(self, content, *args):
-        """ override socket.socket.sendall method to rewrite the header
-        for non-tunneling proxies if needed
-        """
-        if not self.__httptunnel:
-            content = self.__rewriteproxy(content)
-        return super(socksocket, self).sendall(content, *args)
-
-    def __rewriteproxy(self, header):
-        """ rewrite HTTP request headers to support non-tunneling proxies
-        (i.e. those which do not support the CONNECT method).
-        This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
-        """
-        host, endpt = None, None
-        hdrs = header.split("\r\n")
-        for hdr in hdrs:
-            if hdr.lower().startswith("host:"):
-                host = hdr
-            elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
-                endpt = hdr
-        if host and endpt:
-            hdrs.remove(host)
-            hdrs.remove(endpt)
-            host = host.split(" ")[1]
-            endpt = endpt.split(" ")
-            if (self.__proxy[4] != None and self.__proxy[5] != None):
-                hdrs.insert(0, self.__getauthheader())
-            hdrs.insert(0, "Host: %s" % host)
-            hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
-        return "\r\n".join(hdrs)
-
-    def __getauthheader(self):
-        auth = self.__proxy[4] + ":" + self.__proxy[5]
-        return "Proxy-Authorization: Basic " + base64.b64encode(auth)
-
-    def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
-        """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
-        Sets the proxy to be used.
-        proxytype -    The type of the proxy to be used. Three types
-                are supported: PROXY_TYPE_SOCKS4 (including socks4a),
-                PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
-        addr -        The address of the server (IP or DNS).
-        port -        The port of the server. Defaults to 1080 for SOCKS
-                servers and 8080 for HTTP proxy servers.
-        rdns -        Should DNS queries be preformed on the remote side
-                (rather than the local side). The default is True.
-                Note: This has no effect with SOCKS4 servers.
-        username -    Username to authenticate with to the server.
-                The default is no authentication.
-        password -    Password to authenticate with to the server.
-                Only relevant when username is also provided.
-        """
-        self.__proxy = (proxytype, addr, port, rdns, username, password)
-
-    def __negotiatesocks5(self, destaddr, destport):
-        """__negotiatesocks5(self,destaddr,destport)
-        Negotiates a connection through a SOCKS5 server.
-        """
-        # First we'll send the authentication packages we support.
-        if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
-            # The username/password details were supplied to the
-            # setproxy method so we support the USERNAME/PASSWORD
-            # authentication (in addition to the standard none).
-            self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
-        else:
-            # No username/password were entered, therefore we
-            # only support connections with no authentication.
-            self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
-        # We'll receive the server's response to determine which
-        # method was selected
-        chosenauth = self.__recvall(2)
-        if chosenauth[0:1] != chr(0x05).encode():
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        # Check the chosen authentication method
-        if chosenauth[1:2] == chr(0x00).encode():
-            # No authentication is required
-            pass
-        elif chosenauth[1:2] == chr(0x02).encode():
-            # Okay, we need to perform a basic username/password
-            # authentication.
-            self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
-            authstat = self.__recvall(2)
-            if authstat[0:1] != chr(0x01).encode():
-                # Bad response
-                self.close()
-                raise GeneralProxyError((1, _generalerrors[1]))
-            if authstat[1:2] != chr(0x00).encode():
-                # Authentication failed
-                self.close()
-                raise Socks5AuthError((3, _socks5autherrors[3]))
-            # Authentication succeeded
-        else:
-            # Reaching here is always bad
-            self.close()
-            if chosenauth[1] == chr(0xFF).encode():
-                raise Socks5AuthError((2, _socks5autherrors[2]))
-            else:
-                raise GeneralProxyError((1, _generalerrors[1]))
-        # Now we can request the actual connection
-        req = struct.pack('BBB', 0x05, 0x01, 0x00)
-        # If the given destination address is an IP address, we'll
-        # use the IPv4 address request even if remote resolving was specified.
-        try:
-            ipaddr = socket.inet_aton(destaddr)
-            req = req + chr(0x01).encode() + ipaddr
-        except socket.error:
-            # Well it's not an IP number,  so it's probably a DNS name.
-            if self.__proxy[3]:
-                # Resolve remotely
-                ipaddr = None
-                req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr
-            else:
-                # Resolve locally
-                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
-                req = req + chr(0x01).encode() + ipaddr
-        req = req + struct.pack(">H", destport)
-        self.sendall(req)
-        # Get the response
-        resp = self.__recvall(4)
-        if resp[0:1] != chr(0x05).encode():
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        elif resp[1:2] != chr(0x00).encode():
-            # Connection failed
-            self.close()
-            if ord(resp[1:2])<=8:
-                raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
-            else:
-                raise Socks5Error((9, _socks5errors[9]))
-        # Get the bound address/port
-        elif resp[3:4] == chr(0x01).encode():
-            boundaddr = self.__recvall(4)
-        elif resp[3:4] == chr(0x03).encode():
-            resp = resp + self.recv(1)
-            boundaddr = self.__recvall(ord(resp[4:5]))
-        else:
-            self.close()
-            raise GeneralProxyError((1,_generalerrors[1]))
-        boundport = struct.unpack(">H", self.__recvall(2))[0]
-        self.__proxysockname = (boundaddr, boundport)
-        if ipaddr != None:
-            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
-        else:
-            self.__proxypeername = (destaddr, destport)
-
-    def getproxysockname(self):
-        """getsockname() -> address info
-        Returns the bound IP address and port number at the proxy.
-        """
-        return self.__proxysockname
-
-    def getproxypeername(self):
-        """getproxypeername() -> address info
-        Returns the IP and port number of the proxy.
-        """
-        return _orgsocket.getpeername(self)
-
-    def getpeername(self):
-        """getpeername() -> address info
-        Returns the IP address and port number of the destination
-        machine (note: getproxypeername returns the proxy)
-        """
-        return self.__proxypeername
-
-    def __negotiatesocks4(self,destaddr,destport):
-        """__negotiatesocks4(self,destaddr,destport)
-        Negotiates a connection through a SOCKS4 server.
-        """
-        # Check if the destination address provided is an IP address
-        rmtrslv = False
-        try:
-            ipaddr = socket.inet_aton(destaddr)
-        except socket.error:
-            # It's a DNS name. Check where it should be resolved.
-            if self.__proxy[3]:
-                ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
-                rmtrslv = True
-            else:
-                ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
-        # Construct the request packet
-        req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
-        # The username parameter is considered userid for SOCKS4
-        if self.__proxy[4] != None:
-            req = req + self.__proxy[4]
-        req = req + chr(0x00).encode()
-        # DNS name if remote resolving is required
-        # NOTE: This is actually an extension to the SOCKS4 protocol
-        # called SOCKS4A and may not be supported in all cases.
-        if rmtrslv:
-            req = req + destaddr + chr(0x00).encode()
-        self.sendall(req)
-        # Get the response from the server
-        resp = self.__recvall(8)
-        if resp[0:1] != chr(0x00).encode():
-            # Bad data
-            self.close()
-            raise GeneralProxyError((1,_generalerrors[1]))
-        if resp[1:2] != chr(0x5A).encode():
-            # Server returned an error
-            self.close()
-            if ord(resp[1:2]) in (91, 92, 93):
-                self.close()
-                raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
-            else:
-                raise Socks4Error((94, _socks4errors[4]))
-        # Get the bound address/port
-        self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
-        if rmtrslv != None:
-            self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
-        else:
-            self.__proxypeername = (destaddr, destport)
-
-    def __negotiatehttp(self, destaddr, destport):
-        """__negotiatehttp(self,destaddr,destport)
-        Negotiates a connection through an HTTP server.
-        """
-        # If we need to resolve locally, we do this now
-        if not self.__proxy[3]:
-            addr = socket.gethostbyname(destaddr)
-        else:
-            addr = destaddr
-        headers =  ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
-        headers += ["Host: ", destaddr, "\r\n"]
-        if (self.__proxy[4] != None and self.__proxy[5] != None):
-                headers += [self.__getauthheader(), "\r\n"]
-        headers.append("\r\n")
-        self.sendall("".join(headers).encode())
-        # We read the response until we get the string "\r\n\r\n"
-        resp = self.recv(1)
-        while resp.find("\r\n\r\n".encode()) == -1:
-            resp = resp + self.recv(1)
-        # We just need the first line to check if the connection
-        # was successful
-        statusline = resp.splitlines()[0].split(" ".encode(), 2)
-        if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        try:
-            statuscode = int(statusline[1])
-        except ValueError:
-            self.close()
-            raise GeneralProxyError((1, _generalerrors[1]))
-        if statuscode != 200:
-            self.close()
-            raise HTTPError((statuscode, statusline[2]))
-        self.__proxysockname = ("0.0.0.0", 0)
-        self.__proxypeername = (addr, destport)
-
-    def connect(self, destpair):
-        """connect(self, despair)
-        Connects to the specified destination through a proxy.
-        destpar - A tuple of the IP/DNS address and the port number.
-        (identical to socket's connect).
-        To select the proxy server use setproxy().
-        """
-        # Do a minimal input check first
-        if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
-            raise GeneralProxyError((5, _generalerrors[5]))
-        if self.__proxy[0] == PROXY_TYPE_SOCKS5:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 1080
-            _orgsocket.connect(self, (self.__proxy[1], portnum))
-            self.__negotiatesocks5(destpair[0], destpair[1])
-        elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 1080
-            _orgsocket.connect(self,(self.__proxy[1], portnum))
-            self.__negotiatesocks4(destpair[0], destpair[1])
-        elif self.__proxy[0] == PROXY_TYPE_HTTP:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 8080
-            _orgsocket.connect(self,(self.__proxy[1], portnum))
-            self.__negotiatehttp(destpair[0], destpair[1])
-        elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
-            if self.__proxy[2] != None:
-                portnum = self.__proxy[2]
-            else:
-                portnum = 8080
-            _orgsocket.connect(self,(self.__proxy[1],portnum))
-            if destpair[1] == 443:
-                self.__negotiatehttp(destpair[0],destpair[1])
-            else:
-                self.__httptunnel = False
-        elif self.__proxy[0] == None:
-            _orgsocket.connect(self, (destpair[0], destpair[1]))
-        else:
-            raise GeneralProxyError((4, _generalerrors[4]))
diff --git a/tools/swarming_client/third_party/infra_libs/LICENSE b/tools/swarming_client/third_party/infra_libs/LICENSE
deleted file mode 100644
index a32e00c..0000000
--- a/tools/swarming_client/third_party/infra_libs/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-//    * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-//    * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-//    * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/tools/swarming_client/third_party/infra_libs/README.swarming b/tools/swarming_client/third_party/infra_libs/README.swarming
deleted file mode 100644
index 6dd0181..0000000
--- a/tools/swarming_client/third_party/infra_libs/README.swarming
+++ /dev/null
@@ -1,13 +0,0 @@
-Name: infra_libs
-Short Name: infra_libs
-URL: https://chromium.googlesource.com/infra/infra/+/master/packages/infra_libs/infra_libs/
-Version: 1.0.0
-Revision: 7e28e70c2fe992617957983af92ba827183cc9fd
-License: Chromium License
-
-Description:
-Common libraries used by Chrome Infrastructure.
-
-Local Modifications:
-- Removed all test/ directories.
-- Copied LICENSE from the root of infra.git.
diff --git a/tools/swarming_client/third_party/infra_libs/__init__.py b/tools/swarming_client/third_party/infra_libs/__init__.py
deleted file mode 100644
index 4033f42..0000000
--- a/tools/swarming_client/third_party/infra_libs/__init__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from . import ts_mon  # Must be imported first so httplib2_utils can import it.
-
-from infra_libs.httplib2_utils import AuthError
-from infra_libs.httplib2_utils import get_authenticated_http
-from infra_libs.httplib2_utils import get_signed_jwt_assertion_credentials
-from infra_libs.httplib2_utils import RetriableHttp, InstrumentedHttp, HttpMock
-from infra_libs.httplib2_utils import SERVICE_ACCOUNTS_CREDS_ROOT
-from infra_libs.utils import read_json_as_utf8
-from infra_libs.utils import rmtree
-from infra_libs.utils import temporary_directory
-
-import sys
-
-if sys.platform == 'linux2':  # pragma: no cover
-  from . import _command_line_linux as command_line
-else:  # pragma: no cover
-  from . import _command_line_stub as command_line
diff --git a/tools/swarming_client/third_party/infra_libs/app.py b/tools/swarming_client/third_party/infra_libs/app.py
deleted file mode 100644
index 1aafe7c..0000000
--- a/tools/swarming_client/third_party/infra_libs/app.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import argparse
-import datetime
-import logging
-import os
-import sys
-
-import psutil
-
-from infra_libs import logs
-from infra_libs import ts_mon
-
-
-class BaseApplication(object):
-  """Encapsulates common boilerplate for setting up an application.
-
-  Subclasses must implement the main() method, and will usually also implement
-  add_argparse_options().
-
-  By default this will initialise logging and timeseries monitoring (ts_mon)
-  modules.
-
-  Minimal example::
-
-    from infra_libs import app
-
-    class MyApplication(app.BaseApplication):
-      def main(self, opts):
-        # Do stuff.
-
-    if __name__ == '__main__':
-      MyApplication().run()
-
-  Class variables (override these in your class definition):
-    PROG_NAME: The program name to display in the --help message.  Defaults to
-               sys.argv[0].  Passed to argparse.ArgumentParser.
-    DESCRIPTION: Text to display in the --help message.  Passed to
-                 argparse.ArgumentParser.
-    USES_STANDARD_LOGGING: Whether to configure the standard logging libraries.
-                           Defaults to True.
-    USES_TS_MON: Whether to configure timeseries monitoring.  Defaults to True.
-
-  Instance variables (use these in your application):
-    opts: The argparse.Namespace containing parsed commandline arguments.
-  """
-
-  PROG_NAME = None
-  DESCRIPTION = None
-  USES_STANDARD_LOGGING = True
-  USES_TS_MON = True
-
-  def __init__(self):
-    self.opts = None
-    self.parser = None
-
-  def add_argparse_options(self, parser):
-    """Register any arguments used by this application.
-
-    Override this method and call parser.add_argument().
-
-    Args:
-      parser: An argparse.ArgumentParser object.
-    """
-
-    if self.USES_STANDARD_LOGGING:
-      logs.add_argparse_options(parser)
-    if self.USES_TS_MON:
-      ts_mon.add_argparse_options(parser)
-
-  def process_argparse_options(self, options):
-    """Process any commandline arguments.
-
-    Args:
-      options: An argparse.Namespace object.
-    """
-
-    if self.USES_STANDARD_LOGGING:
-      logs.process_argparse_options(options)
-    if self.USES_TS_MON:
-      ts_mon.process_argparse_options(options)
-
-  def main(self, opts):
-    """Your application's main method.
-
-    Do the work of your application here.  When this method returns the
-    application will exit.
-
-    Args:
-      opts: An argparse.Namespace containing parsed commandline options.  This
-        is passed as an argument for convenience but is also accessible as an
-        instance variable (self.opts).
-
-    Return:
-      An integer exit status, or None to use an exit status of 0.
-    """
-    raise NotImplementedError
-
-  def run(self, args=None):
-    """Main application entry point."""
-
-    if args is None:  # pragma: no cover
-      args = sys.argv
-
-    # Add and parse commandline args.
-    self.parser = argparse.ArgumentParser(
-        description=self.DESCRIPTION,
-        prog=self.PROG_NAME or args[0],
-        formatter_class=argparse.RawTextHelpFormatter)
-
-    self.add_argparse_options(self.parser)
-    self.opts = self.parser.parse_args(args[1:])
-    self.process_argparse_options(self.opts)
-
-    # Print a startup log message.
-    logging.info('Process started at %s', datetime.datetime.utcfromtimestamp(
-        psutil.Process().create_time()).isoformat())
-    logging.info('Command line arguments:')
-    for index, arg in enumerate(sys.argv):
-      logging.info('argv[%d]: %s', index, arg)
-    logging.info('Process id %d', os.getpid())
-    logging.info('Current working directory %s', os.getcwd())
-
-    # Run the application's main function.
-    try:
-      status = self.main(self.opts)
-    except Exception:
-      logging.exception('Uncaught exception, exiting:')
-      if self.USES_TS_MON:
-        # Flushing ts_mon to try to report the exception.
-        ts_mon.flush()
-      status = 1
-
-    sys.exit(status)
diff --git a/tools/swarming_client/third_party/infra_libs/authentication.py b/tools/swarming_client/third_party/infra_libs/authentication.py
deleted file mode 100644
index 84b54ea..0000000
--- a/tools/swarming_client/third_party/infra_libs/authentication.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# TODO(dsansome): Change callers to using this directly, and remove this file.
-from infra_libs import get_signed_jwt_assertion_credentials
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/__init__.py b/tools/swarming_client/third_party/infra_libs/event_mon/__init__.py
deleted file mode 100644
index fa9e8f8..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/__init__.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-# pylint: disable=line-too-long
-from infra_libs.event_mon.checkouts import get_revinfo, parse_revinfo
-
-from infra_libs.event_mon.config import add_argparse_options
-from infra_libs.event_mon.config import close
-from infra_libs.event_mon.config import set_default_event, get_default_event
-from infra_libs.event_mon.config import process_argparse_options
-from infra_libs.event_mon.config import setup_monitoring
-
-from infra_libs.event_mon.monitoring import BUILD_EVENT_TYPES, BUILD_RESULTS
-from infra_libs.event_mon.monitoring import EVENT_TYPES, TIMESTAMP_KINDS
-from infra_libs.event_mon.monitoring import GOMA_ERROR_TYPES
-from infra_libs.event_mon.monitoring import Event
-from infra_libs.event_mon.monitoring import get_build_event
-from infra_libs.event_mon.monitoring import send_build_event
-from infra_libs.event_mon.monitoring import send_events
-from infra_libs.event_mon.monitoring import send_service_event
-
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import ChromeInfraEvent
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import BuildEvent
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import ServiceEvent
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import InfraEventSource
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import CodeVersion
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import CQEvent
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import MachineProviderEvent
-
-from infra_libs.event_mon.protos.log_request_lite_pb2 import LogRequestLite
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/checkouts.py b/tools/swarming_client/third_party/infra_libs/event_mon/checkouts.py
deleted file mode 100644
index 92532b8..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/checkouts.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Functions in this file relies on depot_tools been checked-out as a sibling
-# of infra.git.
-
-import logging
-import os
-import re
-import subprocess
-
-
-BASE_DIR = os.path.dirname(
-  os.path.dirname(
-    os.path.dirname(
-      os.path.dirname(os.path.realpath(__file__)))))
-
-
-def parse_revinfo(revinfo):
-  """Parse the output of "gclient revinfo -a"
-
-  Args:
-    revinfo (str): string containing gclient stdout.
-
-  Returns:
-    revinfo_d (dict): <directory>: (URL, revision)
-  """
-  revision_expr = re.compile('(.*)@([^@]*)')
-
-  revinfo_d = {}
-  for line in revinfo.splitlines():
-    if ':' not in line:
-      continue
-
-    # TODO: this fails when the file name contains a colon.
-    path, line = line.split(':', 1)
-    if '@' in line:
-      url, revision = revision_expr.match(line).groups()
-      revision = revision.strip()
-    else:
-      # Split at the last @
-      url, revision = line.strip(), None
-
-    path = path.strip()
-    url = url.strip()
-    revinfo_d[path] = {'source_url': url, 'revision': revision}
-  return revinfo_d
-
-
-def get_revinfo(cwd=None):  # pragma: no cover
-  """Call gclient to get the list of all revisions actually checked out on disk.
-
-  gclient is expected to be under depot_tools/ sibling to infra/.
-  If gclient can't be found or fail to run returns {}.
-
-  Args:
-    cwd (str): working directory where to run gclient. If None, use the
-      current working directory.
-  Returns:
-    revinfo (dict): keys are local paths, values are dicts with keys:
-      'source_url' or 'revision'. The latter can be a git SHA1 or an svn
-      revision.
-  """
-
-  cmd = [os.path.join(BASE_DIR, 'depot_tools', 'gclient'), 'revinfo', '-a']
-  logging.debug('Running: %s', ' '.join(cmd))
-  revinfo = ''
-  try:
-    revinfo = subprocess.check_output(cmd, cwd=cwd)
-  except (subprocess.CalledProcessError, OSError):
-    logging.exception('Command failed to run: %s', ' '.join(cmd))
-  return parse_revinfo(revinfo)
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/config.py b/tools/swarming_client/third_party/infra_libs/event_mon/config.py
deleted file mode 100644
index e915456..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/config.py
+++ /dev/null
@@ -1,230 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import socket
-
-import infra_libs
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import (
-  ChromeInfraEvent, ServiceEvent)
-from infra_libs.event_mon import router as ev_router
-
-DEFAULT_SERVICE_ACCOUNT_CREDS = 'service-account-event-mon.json'
-RUNTYPES = set(('dry', 'test', 'prod', 'file'))
-
-# Remote endpoints
-ENDPOINTS = {
-  'test': 'https://jmt17.google.com/log',
-  'prod': 'https://play.googleapis.com/log',
-}
-
-# Instance of router._Router (singleton)
-_router = None
-
-# Cache some generally useful values / options
-_cache = {}
-
-
-def add_argparse_options(parser):
-  # The default values should make sense for local testing, not production.
-  group = parser.add_argument_group('Event monitoring (event_mon) '
-                                    'global options')
-  group.add_argument('--dry-run', default=False,
-                     action='store_true',
-                     help='When passed, just print what would happen, but '
-                     'do not do it.'
-                     )
-  group.add_argument('--event-mon-run-type', default='dry',
-                      choices=RUNTYPES,
-                      help='Determine how to send data. "dry" does not send\n'
-                      'anything. "test" sends to the test endpoint, \n'
-                      '"prod" to the actual production endpoint, and "file" \n'
-                      'writes to a file.')
-  group.add_argument('--event-mon-output-file', default='event_mon.output',
-                     help='File into which LogEventLite serialized protos are\n'
-                     'written when --event-mon-run-type is "file"')
-  group.add_argument('--event-mon-service-name',
-                      help='Service name to use in log events.')
-  group.add_argument('--event-mon-hostname',
-                      help='Hostname to use in log events.')
-  group.add_argument('--event-mon-appengine-name',
-                      help='App name to use in log events.')
-  group.add_argument('--event-mon-service-account-creds',
-                     default=DEFAULT_SERVICE_ACCOUNT_CREDS,
-                     metavar='JSON_FILE',
-                     help="Path to a json file containing a service account's"
-                     "\ncredentials. This is relative to the path specified\n"
-                     "in --event-mon-service-accounts-creds-root\n"
-                     "Defaults to '%(default)s'")
-  group.add_argument('--event-mon-service-accounts-creds-root',
-                     metavar='DIR',
-                     default=infra_libs.SERVICE_ACCOUNTS_CREDS_ROOT,
-                     help="Directory containing service accounts credentials.\n"
-                     "Defaults to %(default)s"
-                     )
-  group.add_argument('--event-mon-http-timeout', default=10, type=int,
-                     help='Timeout in seconds for HTTP requests to send events')
-  group.add_argument('--event-mon-http-retry-backoff', default=2., type=float,
-                     help='Time in seconds before retrying POSTing to the HTTP '
-                     'endpoint. Randomized exponential backoff is applied on '
-                     'subsequent retries.')
-
-
-def process_argparse_options(args):  # pragma: no cover
-  """Initializes event monitoring based on provided arguments.
-
-  Args:
-    args(argparse.Namespace): output of ArgumentParser.parse_args.
-  """
-  setup_monitoring(
-    run_type=args.event_mon_run_type,
-    hostname=args.event_mon_hostname,
-    service_name=args.event_mon_service_name,
-    appengine_name=args.event_mon_appengine_name,
-    service_account_creds=args.event_mon_service_account_creds,
-    service_accounts_creds_root=args.event_mon_service_accounts_creds_root,
-    output_file=args.event_mon_output_file,
-    dry_run=args.dry_run,
-    http_timeout=args.event_mon_http_timeout,
-    http_retry_backoff=args.event_mon_http_retry_backoff
-  )
-
-
-def setup_monitoring(run_type='dry',
-                     hostname=None,
-                     service_name=None,
-                     appengine_name=None,
-                     service_account_creds=None,
-                     service_accounts_creds_root=None,
-                     output_file=None,
-                     dry_run=False,
-                     http_timeout=10,
-                     http_retry_backoff=2.):
-  """Initializes event monitoring.
-
-  This function is mainly used to provide default global values which are
-  required for the module to work.
-
-  If you're implementing a command-line tool, use process_argparse_options
-  instead.
-
-  Args:
-    run_type (str): One of 'dry', 'test', or 'prod'. Do respectively nothing,
-      hit the testing endpoint and the production endpoint.
-
-    hostname (str): hostname as it should appear in the event. If not provided
-      a default value is computed.
-
-    service_name (str): logical name of the service that emits events. e.g.
-      "commit_queue".
-
-    appengine_name (str): name of the appengine app, if running on appengine.
-
-    service_account_creds (str): path to a json file containing a service
-      account's credentials obtained from a Google Cloud project. **Path is
-      relative to service_account_creds_root**, which is not the current path by
-      default. See infra_libs.authentication for details.
-
-    service_account_creds_root (str): path containing credentials files.
-
-    output_file (str): file where to write the output in run_type == 'file'
-      mode.
-
-    dry_run (bool): if True, the code has no side-effect, what would have been
-      done is printed instead.
-
-    http_timeout (int): timeout in seconds for HTTP requests to send events.
-
-    http_retry_backoff (float): time in seconds before retrying POSTing to the
-         HTTP endpoint. Randomized exponential backoff is applied on subsequent
-         retries.
-  """
-  global _router
-  logging.debug('event_mon: setting up monitoring.')
-
-  if not _router:
-    default_event = ChromeInfraEvent()
-
-    hostname = hostname or socket.getfqdn()
-    # hostname might be empty string or None on some systems, who knows.
-    if hostname:  # pragma: no branch
-      default_event.event_source.host_name = hostname
-    else:  # pragma: no cover
-      logging.warning('event_mon: unable to determine hostname.')
-
-    if service_name:
-      default_event.event_source.service_name = service_name
-    if appengine_name:
-      default_event.event_source.appengine_name = appengine_name
-
-    _cache['default_event'] = default_event
-    if run_type in ('prod', 'test'):
-      _cache['service_account_creds'] = service_account_creds
-      _cache['service_accounts_creds_root'] = service_accounts_creds_root
-    else:
-      _cache['service_account_creds'] = None
-      _cache['service_accounts_creds_root'] = None
-
-    if run_type not in RUNTYPES:
-      logging.error('Unknown run_type (%s). Setting to "dry"', run_type)
-      run_type = 'dry'
-
-    if run_type == 'dry':
-      # If we are running on AppEngine or devserver, use logging module.
-      server_software = os.environ.get('SERVER_SOFTWARE', '')
-      if (server_software.startswith('Google App Engine') or
-          server_software.startswith('Development')):
-        _router = ev_router._LoggingStreamRouter()
-      else:
-        _router = ev_router._TextStreamRouter()
-    elif run_type == 'file':
-      _router = ev_router._LocalFileRouter(output_file,
-                                           dry_run=dry_run)
-    else:
-      _router = ev_router._HttpRouter(_cache,
-                                      ENDPOINTS.get(run_type),
-                                      dry_run=dry_run,
-                                      timeout=http_timeout,
-                                      retry_backoff=http_retry_backoff)
-
-
-def get_default_event():
-  """Returns a copy of the default event."""
-
-  # We return a copy here to tell people not to modify the event directly.
-  ret = ChromeInfraEvent()
-  ret.CopyFrom(_cache['default_event'])
-  return ret
-
-
-def set_default_event(event):
-  """Change the default ChromeInfraEvent used to compute all events.
-
-  Args:
-    event (ChromeInfraEvent): default event
-  """
-  # Here we raise an exception because failing to set the default event
-  # could lead to invalid data in the database.
-  if not isinstance(event, ChromeInfraEvent):
-    msg = ('A ChromeInfraEvent is required as the default event. Got %s' %
-           type(event))
-    logging.error(msg)
-    raise TypeError(msg)
-
-  _cache['default_event'] = event
-
-
-def close():
-  """Reset the state.
-
-  Call this right before exiting the program.
-
-  Returns:
-    success (bool): False if an error occured
-  """
-  global _router, _cache
-  _router = None
-  _cache = {}
-  return True
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/monitoring.py b/tools/swarming_client/third_party/infra_libs/event_mon/monitoring.py
deleted file mode 100644
index 5a16b0e..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/monitoring.py
+++ /dev/null
@@ -1,522 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from google.protobuf.message import DecodeError
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import (
-  ChromeInfraEvent, ServiceEvent, BuildEvent)
-from infra_libs.event_mon.protos.goma_stats_pb2 import GomaStats
-from infra_libs.event_mon.protos.log_request_lite_pb2 import LogRequestLite
-from infra_libs.event_mon import config, router
-
-
-# These constants are part of the API.
-EVENT_TYPES = ('START', 'STOP', 'UPDATE', 'CURRENT_VERSION', 'CRASH')
-BUILD_EVENT_TYPES = ('SCHEDULER', 'BUILD', 'STEP')
-BUILD_RESULTS = ('UNKNOWN', 'SUCCESS', 'FAILURE', 'INFRA_FAILURE',
-                 'WARNING', 'SKIPPED', 'RETRY')
-TIMESTAMP_KINDS = ('UNKNOWN', 'POINT', 'BEGIN', 'END')
-GOMA_ERROR_TYPES = ('GOMA_ERROR_OK', 'GOMA_ERROR_UNKNOWN', 'GOMA_ERROR_CRASHED',
-                    'GOMA_ERROR_LOG_FATAL')
-
-# Maximum size of stack trace sent in an event, in characters.
-STACK_TRACE_MAX_SIZE = 1000
-
-
-class Event(object):
-  """Wraps the event proto with the necessary boilerplate code."""
-
-  def __init__(self, timestamp_kind=None,
-               event_timestamp_ms=None, service_name=None):
-    """
-    Args:
-      timestamp_kind (string): 'POINT', 'START' or 'STOP'.
-      event_timestamp_ms (int or float): time of the event in milliseconds
-         from Unix epoch. Default: now.
-      service_name (string): name of the monitored service.
-    """
-    self._timestamp_ms = event_timestamp_ms
-    self._event =  _get_chrome_infra_event(
-        timestamp_kind, service_name=service_name)
-
-  @property
-  def is_null(self):
-    return self.proto is None
-
-  @staticmethod
-  def null():
-    """Create an "null" Event, without the proto.
-
-    Null event's send() method will fail (return False). This is useful for
-    returning a consistent object type from helper functions even in the
-    case of failure.
-    """
-    event = Event()
-    event._event = None
-    return event
-
-  @property
-  def proto(self):
-    return self._event
-
-  def log_event(self):
-    if self.is_null:
-      return None
-    return _get_log_event_lite(
-        self.proto, event_timestamp=self._timestamp_ms)
-
-  def send(self):
-    if self.proto is None:
-      return False
-    return config._router.push_event(self.log_event())
-
-
-def _get_chrome_infra_event(timestamp_kind, service_name=None):
-  """Compute a basic event.
-
-  Validates the inputs and returns a pre-filled ChromeInfraEvent or
-  None if any check failed.
-
-  The proto is filled using values provided in setup_monitoring() at
-  initialization time, and args.
-
-  Args:
-    timestamp_kind (string): any of ('POINT', 'BEGIN', 'END').
-
-  Returns:
-    event (chrome_infra_log_pb2.ChromeInfraEvent):
-  """
-  # Testing for None because we want an error message when timestamp_kind == ''.
-  if timestamp_kind is not None and timestamp_kind not in TIMESTAMP_KINDS:
-    logging.error('Invalid value for timestamp_kind: %s', timestamp_kind)
-    return None
-
-  # We must accept unicode here.
-  if service_name is not None and not isinstance(service_name, basestring):
-    logging.error('Invalid type for service_name: %s', type(service_name))
-    return None
-
-  event = ChromeInfraEvent()
-  event.CopyFrom(config._cache['default_event'])
-
-  if timestamp_kind:
-    event.timestamp_kind = ChromeInfraEvent.TimestampKind.Value(timestamp_kind)
-  if service_name:
-    event.event_source.service_name = service_name
-
-  return event
-
-
-def _get_log_event_lite(chrome_infra_event, event_timestamp=None):
-  """Wraps a ChromeInfraEvent into a LogEventLite.
-
-  Args:
-    event_timestamp (int or float): timestamp of when the event happened
-      as a number of milliseconds since the epoch. If None, the current time
-      is used.
-
-  Returns:
-    log_event (log_request_lite_pb2.LogRequestLite.LogEventLite):
-  """
-  if not isinstance(event_timestamp, (int, float, None.__class__ )):
-    logging.error('Invalid type for event_timestamp. Needs a number, got %s',
-                  type(event_timestamp))
-    return None
-
-  log_event = LogRequestLite.LogEventLite()
-  log_event.event_time_ms = int(event_timestamp or router.time_ms())
-  log_event.source_extension = chrome_infra_event.SerializeToString()
-  return log_event
-
-
-def _get_service_event(event_type,
-                       timestamp_kind=None,
-                       event_timestamp=None,
-                       code_version=None,
-                       stack_trace=None,
-                       service_name=None):
-  """Compute a ChromeInfraEvent filled with a ServiceEvent.
-  Arguments are identical to those in send_service_event(), please refer
-  to this docstring.
-
-  Returns:
-    event (Event): can be a "null" Event if there is a major processing issue.
-  """
-  if event_type not in EVENT_TYPES:
-    logging.error('Invalid value for event_type: %s', event_type)
-    return Event.null()
-
-  if timestamp_kind is None:
-    timestamp_kind = 'POINT'
-    if event_type == 'START':
-      timestamp_kind = 'BEGIN'
-    elif event_type == 'STOP':
-      timestamp_kind = 'END'
-    elif event_type == 'CRASH':
-      timestamp_kind = 'END'
-
-  event_wrapper = Event(timestamp_kind, event_timestamp, service_name)
-  if event_wrapper.is_null:
-    return event_wrapper
-
-  event = event_wrapper.proto
-
-  event.service_event.type = getattr(ServiceEvent, event_type)
-
-  if code_version is None:
-    code_version = ()
-  if not isinstance(code_version, (tuple, list)):
-    logging.error('Invalid type provided to code_version argument in '
-                  '_get_service_event. Please fix the calling code. '
-                  'Type provided: %s, expected list, tuple or None.',
-                  type(code_version))
-    code_version = ()
-
-  for version_d in code_version:
-    try:
-      if 'source_url' not in version_d:
-        logging.error('source_url missing in %s', version_d)
-        continue
-
-      version = event.service_event.code_version.add()
-      version.source_url = version_d['source_url']
-      if 'revision' in version_d:
-        # Rely on the url to switch between svn and git because an
-        # abbreviated sha1 can sometimes be confused with an int.
-        if version.source_url.startswith('svn://'):
-          version.svn_revision = int(version_d['revision'])
-        else:
-          version.git_hash = version_d['revision']
-
-      if 'version' in version_d:
-        version.version = version_d['version']
-      if 'dirty' in version_d:
-        version.dirty = version_d['dirty']
-
-    except TypeError:
-      logging.exception('Invalid type provided to code_version argument in '
-                        '_get_service_event. Please fix the calling code.')
-      continue
-
-  if isinstance(stack_trace, basestring):
-    if event_type != 'CRASH':
-      logging.error('stack_trace provide for an event different from CRASH.'
-                    ' Got: %s', event_type)
-    event.service_event.stack_trace = stack_trace[-STACK_TRACE_MAX_SIZE:]
-  else:
-    if stack_trace is not None:
-      logging.error('stack_trace should be a string, got %s',
-                    stack_trace.__class__.__name__)
-
-  return event_wrapper
-
-
-def send_service_event(event_type,
-                       timestamp_kind=None,
-                       event_timestamp=None,
-                       code_version=(),
-                       stack_trace=None):
-  """Send service event.
-
-  Args:
-    event_type (string): any name of enum ServiceEvent.ServiceEventType.
-      ('START', 'STOP', 'UPDATE', 'CURRENT_VERSION', 'CRASH')
-
-  Keyword Args:
-    timestamp_kind (string): any of ('POINT', 'BEGIN', 'END').
-
-    event_timestamp (int or float): timestamp of when the event happened
-      as a number of milliseconds since the epoch. If not provided, the
-      current time is used.
-
-    code_version (list/tuple of dict or None): required keys are
-        'source_url' -> full url to the repository
-        'revision' -> (string) git sha1 or svn revision number.
-      optional keys are
-        'dirty' -> boolean. True if the local source tree has local
-            modification.
-        'version' -> manually-set version number (like 'v2.6.0')
-
-    stack_trace (str): when event_type is 'CRASH', stack trace of the crash
-      as a string. String is truncated to 1000 characters (the last ones
-      are kept). Use traceback.format_exc() to get the stack trace from an
-      exception handler.
-
-  Returns:
-    success (bool): False if some error happened.
-  """
-  return _get_service_event(event_type=event_type,
-                            timestamp_kind=timestamp_kind,
-                            service_name=None,
-                            event_timestamp=event_timestamp,
-                            code_version=code_version,
-                            stack_trace=stack_trace).send()
-
-
-def get_build_event(event_type,
-                    hostname,
-                    build_name,
-                    build_number=None,
-                    build_scheduling_time=None,
-                    step_name=None,
-                    step_text=None,
-                    step_number=None,
-                    result=None,
-                    extra_result_code=None,
-                    timestamp_kind=None,
-                    event_timestamp=None,
-                    service_name=None,
-                    goma_stats=None,
-                    goma_error=None,
-                    goma_crash_report_id=None,
-                    patch_url=None,
-                    bbucket_id=None,
-                    category=None,
-                    head_revision_git_hash=None):
-  """Compute a ChromeInfraEvent filled with a BuildEvent.
-
-  Arguments are identical to those in send_build_event(), please refer
-  to this docstring.
-
-  Returns:
-    event (log_request_lite_pb2.LogRequestLite.LogEventLite): can be None
-      if there is a major processing issue.
-  """
-  if event_type not in BUILD_EVENT_TYPES:
-    logging.error('Invalid value for event_type: %s', event_type)
-    return Event.null()
-
-  event_wrapper = Event(timestamp_kind, event_timestamp,
-                        service_name=service_name)
-  if event_wrapper.is_null:
-    return event_wrapper
-
-  event = event_wrapper.proto
-  event.build_event.type = BuildEvent.BuildEventType.Value(event_type)
-
-  if hostname:
-    event.build_event.host_name = hostname
-  if not event.build_event.HasField('host_name'):
-    logging.error('hostname must be provided, got %s', hostname)
-
-  if build_name:
-    event.build_event.build_name = build_name
-  if not event.build_event.HasField('build_name'):
-    logging.error('build_name must be provided, got %s', build_name)
-
-  # 0 is a valid value for build_number
-  if build_number is not None:
-    event.build_event.build_number = build_number
-
-  # 0 is not a valid scheduling time
-  if build_scheduling_time:
-    event.build_event.build_scheduling_time_ms = build_scheduling_time
-
-  if event.build_event.HasField('build_number'):
-    if event_type == 'SCHEDULER':
-      logging.error('build_number should not be provided for a "SCHEDULER"'
-                    ' type, got %s (drop or use BUILD or STEP type)',
-                    build_number)
-
-    if not event.build_event.HasField('build_scheduling_time_ms'):
-      logging.error('build_number has been provided (%s), '
-                    'build_scheduling_time was not. '
-                    'Provide either both or none.',
-                    event.build_event.build_number)
-  else: # no 'build_number' field
-    if event.build_event.HasField('build_scheduling_time_ms'):
-      logging.error('build_number has not been provided, '
-                    'build_scheduling_time was provided (%s). '
-                    'Both must be present or missing.',
-                    event.build_event.build_scheduling_time_ms)
-
-  if step_name:
-    event.build_event.step_name = str(step_name)
-  if step_text:
-    event.build_event.step_text = str(step_text)
-
-  if step_number is not None:
-    event.build_event.step_number = step_number
-  if patch_url is not None:
-    event.build_event.patch_url = patch_url
-  if bbucket_id is not None:
-    try:
-      event.build_event.bbucket_id = int(bbucket_id)
-    except (ValueError, TypeError):
-      pass
-
-  if category:
-    event.build_event.category = {
-      'cq': BuildEvent.CATEGORY_CQ,
-      'cq_experimental': BuildEvent.CATEGORY_CQ_EXPERIMENTAL,
-      'git_cl_try': BuildEvent.CATEGORY_GIT_CL_TRY,
-    }.get(category.lower(), BuildEvent.CATEGORY_UNKNOWN)
-
-  if head_revision_git_hash:
-    event.build_event.head_revision.git_hash = head_revision_git_hash
-    
-
-  if event.build_event.step_name:
-    if event_type != 'STEP':
-      logging.error('step_name should be provided only for type "STEP", '
-                    'got %s', event_type)
-    if not event.build_event.HasField('step_number'):
-      logging.error('step_number was not provided, but got a value for '
-                    'step_name (%s). Provide either both or none',
-                    step_name)
-    if (not event.build_event.HasField('build_number')
-        and not event.build_event.HasField('build_scheduling_time_ms')):
-      logging.error('build information must be provided when step '
-                    'information is provided. Got nothing in build_name '
-                    'and build_number')
-  else:
-    if event.build_event.HasField('step_number'):
-      logging.error('step_number has been provided (%s), '
-                    'step_name has not. '
-                    'Both must be present or missing.',
-                    event.build_event.step_number)
-
-  # TODO(pgervais) remove this.
-  # Hack to work around errors in the proto
-  mapping = {'WARNINGS': 'WARNING', 'EXCEPTION': 'INFRA_FAILURE'}
-  result = mapping.get(result, result)
-
-  if result is not None:  # we want an error message if result==''.
-    if result not in BUILD_RESULTS:
-      logging.error('Invalid value for result: %s', result)
-    else:
-      event.build_event.result = getattr(BuildEvent, result)
-
-      if event_type == 'SCHEDULER':
-        logging.error('A result was provided for a "SCHEDULER" event type '
-                      '(%s). This is only accepted for BUILD and TEST types.',
-                      result)
-
-  if isinstance(extra_result_code, basestring):
-    extra_result_code = (extra_result_code, )
-  if not isinstance(extra_result_code, (list, tuple)):
-    if extra_result_code is not None:
-      logging.error('extra_result_code must be a string or list of strings. '
-                    'Got %s' % type(extra_result_code))
-  else:
-    non_strings = []
-    extra_result_strings = []
-    for s in extra_result_code:
-      if not isinstance(s, basestring):
-        non_strings.append(s)
-      else:
-        extra_result_strings.append(s)
-
-    if non_strings:
-      logging.error('some values provided to extra_result_code are not strings:'
-                    ' %s' % str(non_strings))
-    for s in extra_result_strings:
-      event.build_event.extra_result_code.append(s)
-
-  if goma_stats:
-    if isinstance(goma_stats, GomaStats):
-      event.build_event.goma_stats.MergeFrom(goma_stats)
-    else:
-      logging.error('expected goma_stats to be an instance of GomaStats, '
-                    'got %s', type(goma_stats))
-  if goma_error:
-    if goma_stats:
-      logging.error('Only one of goma_error and goma_stats can be provided. '
-                    'Got %s and %s.', goma_error, goma_stats)
-    event.build_event.goma_error = BuildEvent.GomaErrorType.Value(goma_error)
-    if goma_crash_report_id:
-      event.build_event.goma_crash_report_id = goma_crash_report_id
-      if goma_error != 'GOMA_ERROR_CRASHED':
-        logging.error('A crash report id (%s) was provided for GomaErrorType '
-                      '(%s).  This is only accepted for GOMA_ERROR_CRASHED '
-                      'type.', goma_crash_report_id, goma_error)
-
-  return event_wrapper
-
-
-def send_build_event(event_type,
-                     hostname,
-                     build_name,
-                     build_number=None,
-                     build_scheduling_time=None,
-                     step_name=None,
-                     step_text=None,
-                     step_number=None,
-                     result=None,
-                     extra_result_code=None,
-                     timestamp_kind=None,
-                     event_timestamp=None,
-                     goma_stats=None,
-                     goma_error=None,
-                     goma_crash_report_id=None,
-                     patch_url=None,
-                     bbucket_id=None,
-                     category=None,
-                     head_revision_git_hash=None):
-  """Send a ChromeInfraEvent filled with a BuildEvent
-
-  Args:
-    event_type (string): any name of enum BuildEvent.BuildEventType.
-      (listed in infra_libs.event_mon.monitoring.BUILD_EVENT_TYPES)
-    hostname (string): fqdn of the machine that is running the build / step.
-      aka the bot name.
-    build_name (string): name of the builder.
-
-  Keyword args:
-    build_number (int): as the name says.
-    build_scheduling_time (int): timestamp telling when the build was
-      scheduled. This is required when build_number is provided to make it
-      possibly to distinguish two builds with the same build number.
-    step_name (str): name of the step.
-    step_text (str): text of the step.
-    step_number (int): rank of the step in the build. This is mandatory
-      if step_name is provided, because step_name is not enough to tell the
-      order.
-    result (string): any name of enum BuildEvent.BuildResult.
-      (listed in infra_libs.event_mon.monitoring.BUILD_RESULTS)
-    extra_result_code (string or list of): arbitrary strings intended to provide
-      more fine-grained information about the result.
-    goma_stats (goma_stats_pb2.GomaStats): statistics output by the Goma proxy.
-    goma_error (string): goma error type defined as GomaErrorType.
-    goma_crash_report_id (string): id of goma crash report.
-    patch_url (string): URL of the patch that triggered build
-    bbucket_id (long): Buildbucket ID of the build.
-    category (string): Build category, e.g. cq or git_cl_try.
-    head_revision_git_hash (string): Revision fetched from the Git repository.
-
-  Returns:
-    success (bool): False if some error happened.
-  """
-  return get_build_event(event_type,
-                         hostname,
-                         build_name,
-                         build_number=build_number,
-                         build_scheduling_time=build_scheduling_time,
-                         step_name=step_name,
-                         step_text=step_text,
-                         step_number=step_number,
-                         result=result,
-                         extra_result_code=extra_result_code,
-                         timestamp_kind=timestamp_kind,
-                         event_timestamp=event_timestamp,
-                         goma_stats=goma_stats,
-                         goma_error=goma_error,
-                         goma_crash_report_id=goma_crash_report_id,
-                         patch_url=patch_url,
-                         bbucket_id=bbucket_id,
-                         category=category,
-                         head_revision_git_hash=head_revision_git_hash).send()
-
-
-def send_events(events):
-  """Send several events at once to the endpoint.
-
-  Args:
-    events (iterable of Event): events to send
-
-  Return:
-    success (bool): True if data was successfully received by the endpoint.
-  """
-  return config._router.push_event(tuple(e.log_event() for e in events))
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/protos/README.md b/tools/swarming_client/third_party/infra_libs/event_mon/protos/README.md
deleted file mode 100644
index f4c8e98..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/protos/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Updating the *_pb2.py files: see go/chrome-infra-event-proto-doc
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/protos/__init__.py b/tools/swarming_client/third_party/infra_libs/event_mon/protos/__init__.py
deleted file mode 100644
index 50b23df..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/protos/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/protos/chrome_infra_log_pb2.py b/tools/swarming_client/third_party/infra_libs/event_mon/protos/chrome_infra_log_pb2.py
deleted file mode 100644
index 0f11732..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/protos/chrome_infra_log_pb2.py
+++ /dev/null
@@ -1,1796 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: chrome_infra_log.proto
-
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-
-import goma_stats_pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='chrome_infra_log.proto',
-  package='crit_event.proto',
-  serialized_pb='\n\x16\x63hrome_infra_log.proto\x12\x10\x63rit_event.proto\x1a\x10goma_stats.proto\"\xcb\x10\n\x07\x43QEvent\x12\x30\n\x06\x61\x63tion\x18\x01 \x01(\x0e\x32 .crit_event.proto.CQEvent.Action\x12\x34\n\x08verifier\x18\x02 \x01(\x0e\x32\".crit_event.proto.CQEvent.Verifier\x12\x0f\n\x07\x63q_name\x18\x03 \x01(\t\x12\r\n\x05issue\x18\x04 \x01(\t\x12\x10\n\x08patchset\x18\x05 \x01(\t\x12\x1a\n\x12\x61ttempt_start_usec\x18\x06 \x01(\x03\x12\x34\n\x06status\x18\x07 \x01(\x0e\x32 .crit_event.proto.CQEvent.StatusB\x02\x18\x01\x12\x0c\n\x04\x64one\x18\x08 \x01(\x08\x12?\n\x0e\x66\x61ilure_reason\x18\t \x01(\x0b\x32\'.crit_event.proto.CQEvent.FailureReason\x12\x0f\n\x07\x64ry_run\x18\n \x01(\x08\x12\x1a\n\x12global_retry_quota\x18\x0b \x01(\x05\x12\x44\n\x11triggered_tryjobs\x18\x0c \x03(\x0b\x32).crit_event.proto.CQEvent.TriggeredTryJob\x12\r\n\x05\x66iles\x18\r \x03(\t\x12\x1b\n\x13\x63odereview_hostname\x18\x0e \x01(\t\x12\x17\n\x0f\x63onfig_revision\x18\x0f \x01(\t\x1a\xa9\x06\n\rFailureReason\x12\x34\n\x08verifier\x18\x01 \x01(\x0e\x32\".crit_event.proto.CQEvent.Verifier\x12\x43\n\tfail_type\x18\x02 \x01(\x0e\x32\x30.crit_event.proto.CQEvent.FailureReason.FailType\x12M\n\x0f\x66\x61iled_try_jobs\x18\x03 \x03(\x0b\x32\x34.crit_event.proto.CQEvent.FailureReason.FailedTryJob\x1a{\n\x0c\x46\x61iledTryJob\x12\x0e\n\x06master\x18\x01 \x01(\t\x12\x0f\n\x07\x62uilder\x18\x02 \x01(\t\x12\x10\n\x08\x62uild_id\x18\x03 \x01(\x03\x12\x38\n\tfail_type\x18\x05 \x01(\x0e\x32%.crit_event.proto.BuildEvent.FailType\"\xd0\x03\n\x08\x46\x61ilType\x12\x15\n\x11UNKNOWN_FAIL_TYPE\x10\x00\x12\x18\n\x14\x46\x41ILED_PRESUBMIT_BOT\x10\x01\x12\x1e\n\x1a\x46\x41ILED_PRESUBMIT_BOT_INFRA\x10\x11\x12\x0f\n\x0b\x46\x41ILED_JOBS\x10\x02\x12\x18\n\x14RETRY_QUOTA_EXCEEDED\x10\x03\x12\x0c\n\x08NOT_LGTM\x10\x04\x12\x10\n\x0cMISSING_LGTM\x10\x05\x12\x0e\n\nNO_SIGNCLA\x10\x06\x12\x1a\n\x16\x46\x41ILED_SIGNCLA_REQUEST\x10\x07\x12\x11\n\rMANUAL_CANCEL\x10\x08\x12\x10\n\x0c\x43OMMIT_FALSE\x10\t\x12\x13\n\x0fOPEN_DEPENDENCY\x10\n\x12\x15\n\x11INVALID_DELIMITER\x10\x0b\x12\x1f\n\x1b\x46\x41ILED_REMOTE_REF_PRESUBMIT\x10\x0c\x12\x1a\n\x16\x46\x41ILED_TO_TRIGGER_JOBS\x10\x12\x12\x1a\n\x16UNSUPPORTED_CQ_FEATURE\x10\x13\x12\x13\n\x0f\x46\x41ILED_CHECKOUT\x10\r\x12\x11\n\rFAILED_COMMIT\x10\x0e\x12\x10\n\x0c\x46\x41ILED_PATCH\x10\x0f\x12\x18\n\x14\x46\x41ILED_REQUEST_PATCH\x10\x10\x1a\x41\n\x0fTriggeredTryJob\x12\x0e\n\x06master\x18\x01 \x01(\t\x12\x0f\n\x07\x62uilder\x18\x02 \x01(\t\x12\r\n\x05tests\x18\x03 \x03(\t\"\xc8\x03\n\x06\x41\x63tion\x12\x12\n\x0eUNKNOWN_ACTION\x10\x00\x12\x0c\n\x08\x43Q_START\x10\x01\x12\x0b\n\x07\x43Q_STOP\x10\x02\x12\x13\n\x0fPATCH_COMMITTED\x10\x03\x12\x14\n\x10PATCH_COMMITTING\x10\x04\x12\x10\n\x0cPATCH_FAILED\x10\x05\x12\x19\n\x15PATCH_READY_TO_COMMIT\x10\x06\x12\x0f\n\x0bPATCH_START\x10\x07\x12\x0e\n\nPATCH_STOP\x10\x08\x12\x13\n\x0fPATCH_THROTTLED\x10\t\x12\x15\n\x11PATCH_TREE_CLOSED\x10\n\x12\x1b\n\x17VERIFIER_CUSTOM_TRYBOTS\x10\x0b\x12\x12\n\x0eVERIFIER_ERROR\x10\x0c\x12\x11\n\rVERIFIER_FAIL\x10\r\x12\x18\n\x14VERIFIER_JOBS_UPDATE\x10\x0e\x12\x11\n\rVERIFIER_PASS\x10\x0f\x12\x12\n\x0eVERIFIER_RETRY\x10\x10\x12\x11\n\rVERIFIER_SKIP\x10\x11\x12\x12\n\x0eVERIFIER_START\x10\x12\x12\x14\n\x10VERIFIER_TIMEOUT\x10\x13\x12\x14\n\x10VERIFIER_TRIGGER\x10\x14\x12\x12\n\x0eVERIFIER_NOTRY\x10\x15\"\x7f\n\x08Verifier\x12\x14\n\x10UNKNOWN_VERIFIER\x10\x00\x12\x11\n\rREVIEWER_LGTM\x10\x01\x12\x0c\n\x08SIGN_CLA\x10\x02\x12\x0f\n\x0bTREE_STATUS\x10\x03\x12\x1e\n\x1aTRIGGER_EXPERIMENT_TRY_JOB\x10\x04\x12\x0b\n\x07TRY_JOB\x10\x05\"\x91\x01\n\x06Status\x12\x12\n\x0eUNKNOWN_STATUS\x10\x00\x12\t\n\x05START\x10\x01\x12\x08\n\x04STOP\x10\x02\x12\x13\n\x0fREADY_TO_COMMIT\x10\x03\x12\x0e\n\nCOMMITTING\x10\x04\x12\r\n\tCOMMITTED\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x12\r\n\tTHROTTLED\x10\x07\x12\x0f\n\x0bTREE_CLOSED\x10\x08\"y\n\x0b\x43odeVersion\x12\x12\n\nsource_url\x18\x01 \x01(\t\x12\r\n\x05\x64irty\x18\x02 \x01(\x08\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x10\n\x08git_hash\x18\x04 \x01(\t\x12\x14\n\x0csvn_revision\x18\x05 \x01(\x05\x12\x0e\n\x06\x62ranch\x18\x06 \x01(\t\"\x82\x02\n\x0cServiceEvent\x12\x46\n\x04type\x18\x01 \x01(\x0e\x32/.crit_event.proto.ServiceEvent.ServiceEventType:\x07UNKNOWN\x12\x33\n\x0c\x63ode_version\x18\x02 \x03(\x0b\x32\x1d.crit_event.proto.CodeVersion\x12\x13\n\x0bstack_trace\x18\x03 \x01(\t\"`\n\x10ServiceEventType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05START\x10\x01\x12\x08\n\x04STOP\x10\x02\x12\n\n\x06UPDATE\x10\x03\x12\x13\n\x0f\x43URRENT_VERSION\x10\x04\x12\t\n\x05\x43RASH\x10\x05\"\xd1\n\n\nBuildEvent\x12\x39\n\x04type\x18\x01 \x01(\x0e\x32+.crit_event.proto.BuildEvent.BuildEventType\x12\x11\n\thost_name\x18\x02 \x01(\t\x12\x12\n\nbuild_name\x18\x03 \x01(\t\x12\x14\n\x0c\x62uild_number\x18\x04 \x01(\x05\x12 \n\x18\x62uild_scheduling_time_ms\x18\x05 \x01(\x03\x12\x11\n\tstep_name\x18\x06 \x01(\t\x12\x11\n\tstep_text\x18\x13 \x01(\t\x12\x13\n\x0bstep_number\x18\x07 \x01(\x05\x12\x41\n\x06result\x18\x08 \x01(\x0e\x32(.crit_event.proto.BuildEvent.BuildResult:\x07UNKNOWN\x12\x19\n\x11\x65xtra_result_code\x18\n \x03(\t\x12,\n\ngoma_stats\x18\t \x01(\x0b\x32\x18.devtools_goma.GomaStats\x12M\n\ngoma_error\x18\x11 \x01(\x0e\x32*.crit_event.proto.BuildEvent.GomaErrorType:\rGOMA_ERROR_OK\x12\x1c\n\x14goma_crash_report_id\x18\x12 \x01(\t\x12\x0f\n\x07project\x18\x0b \x01(\t\x12\x34\n\rhead_revision\x18\x0c \x01(\x0b\x32\x1d.crit_event.proto.CodeVersion\x12/\n\x08revision\x18\r \x03(\x0b\x32\x1d.crit_event.proto.CodeVersion\x12\x11\n\tpatch_url\x18\x0e \x01(\t\x12\x10\n\x08\x62uildset\x18\x18 \x01(\t\x12\x37\n\x08\x63\x61tegory\x18\x14 \x01(\x0e\x32%.crit_event.proto.BuildEvent.Category\x12\x12\n\nbbucket_id\x18\x0f \x01(\x03\x12\x1a\n\x12\x62\x62ucket_user_agent\x18\x10 \x01(\t\x12\x38\n\tfail_type\x18\x15 \x01(\x0e\x32%.crit_event.proto.BuildEvent.FailType\x12\x12\n\ncq_project\x18\x16 \x01(\t\x12\x15\n\rcq_subproject\x18\x17 \x01(\t\"4\n\x0e\x42uildEventType\x12\r\n\tSCHEDULER\x10\x00\x12\t\n\x05\x42UILD\x10\x01\x12\x08\n\x04STEP\x10\x02\"l\n\x0b\x42uildResult\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x12\x0b\n\x07\x46\x41ILURE\x10\x02\x12\x11\n\rINFRA_FAILURE\x10\x03\x12\x0b\n\x07WARNING\x10\x04\x12\x0b\n\x07SKIPPED\x10\x05\x12\t\n\x05RETRY\x10\x06\"l\n\rGomaErrorType\x12\x11\n\rGOMA_ERROR_OK\x10\x00\x12\x16\n\x12GOMA_ERROR_UNKNOWN\x10\x01\x12\x16\n\x12GOMA_ERROR_CRASHED\x10\x02\x12\x18\n\x14GOMA_ERROR_LOG_FATAL\x10\x03\"h\n\x08\x43\x61tegory\x12\x14\n\x10\x43\x41TEGORY_UNKNOWN\x10\x00\x12\x0f\n\x0b\x43\x41TEGORY_CQ\x10\x01\x12\x1c\n\x18\x43\x41TEGORY_CQ_EXPERIMENTAL\x10\x02\x12\x17\n\x13\x43\x41TEGORY_GIT_CL_TRY\x10\x03\"\x8d\x01\n\x08\x46\x61ilType\x12\x15\n\x11\x46\x41IL_TYPE_UNKNOWN\x10\x00\x12\x13\n\x0f\x46\x41IL_TYPE_INFRA\x10\x01\x12\x15\n\x11\x46\x41IL_TYPE_COMPILE\x10\x02\x12\x12\n\x0e\x46\x41IL_TYPE_TEST\x10\x03\x12\x15\n\x11\x46\x41IL_TYPE_INVALID\x10\x04\x12\x13\n\x0f\x46\x41IL_TYPE_PATCH\x10\x05\"S\n\x10InfraEventSource\x12\x11\n\thost_name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ppengine_name\x18\x02 \x01(\t\x12\x14\n\x0cservice_name\x18\x03 \x01(\t\"\x83\x05\n\x10TestResultsEvent\x12\x13\n\x0bmaster_name\x18\x01 \x01(\t\x12\x14\n\x0c\x62uilder_name\x18\x02 \x01(\t\x12\x14\n\x0c\x62uild_number\x18\x03 \x01(\x05\x12\x11\n\ttest_type\x18\x04 \x01(\t\x12\x13\n\x0binterrupted\x18\x05 \x01(\x08\x12\x0f\n\x07version\x18\x06 \x01(\x05\x12\x18\n\x10usec_since_epoch\x18\x07 \x01(\x03\x12<\n\x05tests\x18\x08 \x03(\x0b\x32-.crit_event.proto.TestResultsEvent.TestResult\x1a\xb5\x01\n\nTestResult\x12\x11\n\ttest_name\x18\x01 \x01(\t\x12\x41\n\x06\x61\x63tual\x18\x02 \x03(\x0e\x32\x31.crit_event.proto.TestResultsEvent.TestResultType\x12\x43\n\x08\x65xpected\x18\x03 \x03(\x0e\x32\x31.crit_event.proto.TestResultsEvent.TestResultType\x12\x0c\n\x04\x62ugs\x18\x04 \x03(\t\"\xe4\x01\n\x0eTestResultType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04SKIP\x10\x01\x12\x08\n\x04PASS\x10\x02\x12\x08\n\x04\x46\x41IL\x10\x03\x12\t\n\x05\x43RASH\x10\x04\x12\x0b\n\x07TIMEOUT\x10\x05\x12\x0b\n\x07MISSING\x10\x06\x12\x08\n\x04LEAK\x10\x07\x12\x08\n\x04SLOW\x10\x08\x12\x08\n\x04TEXT\x10\t\x12\t\n\x05\x41UDIO\x10\n\x12\t\n\x05IMAGE\x10\x0b\x12\x0e\n\nIMAGE_TEXT\x10\x0c\x12\x0e\n\nREBASELINE\x10\r\x12\x13\n\x0fNEEDSREBASELINE\x10\x0e\x12\x19\n\x15NEEDSMANUALREBASELINE\x10\x0f\"\xef\x08\n\x14MachineProviderEvent\x12X\n\x11gce_backend_state\x18\x01 \x01(\x0e\x32=.crit_event.proto.MachineProviderEvent.GCEBackendMachineState\x12\x64\n\x1bgce_backend_deletion_reason\x18\x02 \x01(\x0e\x32?.crit_event.proto.MachineProviderEvent.GCEBackendDeletionReason\x12\x62\n\x16machine_provider_state\x18\x03 \x01(\x0e\x32\x42.crit_event.proto.MachineProviderEvent.MachineProviderMachineState\x12\x1b\n\x13pubsub_subscription\x18\x04 \x01(\t\x12\x1b\n\x13lease_expiration_ts\x18\x05 \x01(\x03\x12W\n\rmachine_state\x18\x06 \x01(\x0e\x32@.crit_event.proto.MachineProviderEvent.MachineProviderAgentState\x12\x17\n\x0fswarming_server\x18\x07 \x01(\t\"\xea\x02\n\x16GCEBackendMachineState\x12\x0f\n\x0bGCE_UNKNOWN\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\r\n\tCATALOGED\x10\x02\x12\x19\n\x15SUBSCRIPTION_RECEIVED\x10\x03\x12\x1c\n\x18METADATA_UPDATE_PROPOSED\x10\x04\x12\x19\n\x15METADATA_UPDATE_READY\x10\x05\x12\x1d\n\x19METADATA_UPDATE_SCHEDULED\x10\x06\x12\x1a\n\x16METADATA_UPDATE_FAILED\x10\x07\x12\x1d\n\x19METADATA_UPDATE_SUCCEEDED\x10\x08\x12\x12\n\x0eLEASE_RECEIVED\x10\t\x12\r\n\tRECLAIMED\x10\n\x12\x15\n\x11\x44\x45LETION_PROPOSED\x10\x0b\x12\x16\n\x12\x44\x45LETION_SCHEDULED\x10\x0c\x12\x16\n\x12\x44\x45LETION_SUCCEEDED\x10\r\x12\x0b\n\x07\x44\x45LETED\x10\x0e\"8\n\x18GCEBackendDeletionReason\x12\x0f\n\x0bRECLAMATION\x10\x01\x12\x0b\n\x07\x44RAINED\x10\x02\"{\n\x1bMachineProviderMachineState\x12\x0e\n\nMP_UNKNOWN\x10\x00\x12\x0c\n\x08RECEIVED\x10\x01\x12\x0e\n\nSUBSCRIBED\x10\x02\x12\n\n\x06LEASED\x10\x03\x12\x11\n\rLEASE_EXPIRED\x10\x04\x12\x0f\n\x0bUNCATALOGED\x10\x05\"c\n\x19MachineProviderAgentState\x12\x11\n\rAGENT_UNKNOWN\x10\x00\x12\x0b\n\x07POLLING\x10\x01\x12\r\n\tALLOCATED\x10\x02\x12\x17\n\x13SWARMING_CONFIGURED\x10\x03\"\xab\x04\n\x10\x43hromeInfraEvent\x12Q\n\x0etimestamp_kind\x18\x01 \x01(\x0e\x32\x30.crit_event.proto.ChromeInfraEvent.TimestampKind:\x07UNKNOWN\x12\x10\n\x08trace_id\x18\x02 \x01(\t\x12\x0f\n\x07span_id\x18\x03 \x01(\t\x12\x11\n\tparent_id\x18\x04 \x01(\t\x12\x38\n\x0c\x65vent_source\x18\x05 \x01(\x0b\x32\".crit_event.proto.InfraEventSource\x12\x35\n\rservice_event\x18\x06 \x01(\x0b\x32\x1e.crit_event.proto.ServiceEvent\x12\x31\n\x0b\x62uild_event\x18\x08 \x01(\x0b\x32\x1c.crit_event.proto.BuildEvent\x12+\n\x08\x63q_event\x18\t \x01(\x0b\x32\x19.crit_event.proto.CQEvent\x12\x38\n\x0ctest_results\x18\n \x01(\x0b\x32\".crit_event.proto.TestResultsEvent\x12\x46\n\x16machine_provider_event\x18\x0b \x01(\x0b\x32&.crit_event.proto.MachineProviderEvent\";\n\rTimestampKind\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05POINT\x10\x01\x12\t\n\x05\x42\x45GIN\x10\x02\x12\x07\n\x03\x45ND\x10\x03')
-
-
-
-_CQEVENT_FAILUREREASON_FAILTYPE = _descriptor.EnumDescriptor(
-  name='FailType',
-  full_name='crit_event.proto.CQEvent.FailureReason.FailType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_FAIL_TYPE', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_PRESUBMIT_BOT', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_PRESUBMIT_BOT_INFRA', index=2, number=17,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_JOBS', index=3, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='RETRY_QUOTA_EXCEEDED', index=4, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NOT_LGTM', index=5, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MISSING_LGTM', index=6, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NO_SIGNCLA', index=7, number=6,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_SIGNCLA_REQUEST', index=8, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MANUAL_CANCEL', index=9, number=8,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='COMMIT_FALSE', index=10, number=9,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='OPEN_DEPENDENCY', index=11, number=10,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='INVALID_DELIMITER', index=12, number=11,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_REMOTE_REF_PRESUBMIT', index=13, number=12,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_TO_TRIGGER_JOBS', index=14, number=18,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='UNSUPPORTED_CQ_FEATURE', index=15, number=19,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_CHECKOUT', index=16, number=13,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_COMMIT', index=17, number=14,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_PATCH', index=18, number=15,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED_REQUEST_PATCH', index=19, number=16,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=919,
-  serialized_end=1383,
-)
-
-_CQEVENT_ACTION = _descriptor.EnumDescriptor(
-  name='Action',
-  full_name='crit_event.proto.CQEvent.Action',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_ACTION', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CQ_START', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CQ_STOP', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_COMMITTED', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_COMMITTING', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_FAILED', index=5, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_READY_TO_COMMIT', index=6, number=6,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_START', index=7, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_STOP', index=8, number=8,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_THROTTLED', index=9, number=9,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PATCH_TREE_CLOSED', index=10, number=10,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_CUSTOM_TRYBOTS', index=11, number=11,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_ERROR', index=12, number=12,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_FAIL', index=13, number=13,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_JOBS_UPDATE', index=14, number=14,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_PASS', index=15, number=15,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_RETRY', index=16, number=16,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_SKIP', index=17, number=17,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_START', index=18, number=18,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_TIMEOUT', index=19, number=19,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_TRIGGER', index=20, number=20,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='VERIFIER_NOTRY', index=21, number=21,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1453,
-  serialized_end=1909,
-)
-
-_CQEVENT_VERIFIER = _descriptor.EnumDescriptor(
-  name='Verifier',
-  full_name='crit_event.proto.CQEvent.Verifier',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_VERIFIER', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='REVIEWER_LGTM', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SIGN_CLA', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TREE_STATUS', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TRIGGER_EXPERIMENT_TRY_JOB', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TRY_JOB', index=5, number=5,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1911,
-  serialized_end=2038,
-)
-
-_CQEVENT_STATUS = _descriptor.EnumDescriptor(
-  name='Status',
-  full_name='crit_event.proto.CQEvent.Status',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_STATUS', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='START', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='STOP', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='READY_TO_COMMIT', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='COMMITTING', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='COMMITTED', index=5, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILED', index=6, number=6,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='THROTTLED', index=7, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TREE_CLOSED', index=8, number=8,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=2041,
-  serialized_end=2186,
-)
-
-_SERVICEEVENT_SERVICEEVENTTYPE = _descriptor.EnumDescriptor(
-  name='ServiceEventType',
-  full_name='crit_event.proto.ServiceEvent.ServiceEventType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='START', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='STOP', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='UPDATE', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CURRENT_VERSION', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CRASH', index=5, number=5,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=2474,
-  serialized_end=2570,
-)
-
-_BUILDEVENT_BUILDEVENTTYPE = _descriptor.EnumDescriptor(
-  name='BuildEventType',
-  full_name='crit_event.proto.BuildEvent.BuildEventType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='SCHEDULER', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BUILD', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='STEP', index=2, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3412,
-  serialized_end=3464,
-)
-
-_BUILDEVENT_BUILDRESULT = _descriptor.EnumDescriptor(
-  name='BuildResult',
-  full_name='crit_event.proto.BuildEvent.BuildResult',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SUCCESS', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAILURE', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='INFRA_FAILURE', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='WARNING', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SKIPPED', index=5, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='RETRY', index=6, number=6,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3466,
-  serialized_end=3574,
-)
-
-_BUILDEVENT_GOMAERRORTYPE = _descriptor.EnumDescriptor(
-  name='GomaErrorType',
-  full_name='crit_event.proto.BuildEvent.GomaErrorType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='GOMA_ERROR_OK', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='GOMA_ERROR_UNKNOWN', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='GOMA_ERROR_CRASHED', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='GOMA_ERROR_LOG_FATAL', index=3, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3576,
-  serialized_end=3684,
-)
-
-_BUILDEVENT_CATEGORY = _descriptor.EnumDescriptor(
-  name='Category',
-  full_name='crit_event.proto.BuildEvent.Category',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='CATEGORY_UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CATEGORY_CQ', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CATEGORY_CQ_EXPERIMENTAL', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CATEGORY_GIT_CL_TRY', index=3, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3686,
-  serialized_end=3790,
-)
-
-_BUILDEVENT_FAILTYPE = _descriptor.EnumDescriptor(
-  name='FailType',
-  full_name='crit_event.proto.BuildEvent.FailType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='FAIL_TYPE_UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAIL_TYPE_INFRA', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAIL_TYPE_COMPILE', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAIL_TYPE_TEST', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAIL_TYPE_INVALID', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAIL_TYPE_PATCH', index=5, number=5,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=3793,
-  serialized_end=3934,
-)
-
-_TESTRESULTSEVENT_TESTRESULTTYPE = _descriptor.EnumDescriptor(
-  name='TestResultType',
-  full_name='crit_event.proto.TestResultsEvent.TestResultType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SKIP', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='PASS', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='FAIL', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CRASH', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TIMEOUT', index=5, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MISSING', index=6, number=6,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LEAK', index=7, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SLOW', index=8, number=8,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='TEXT', index=9, number=9,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='AUDIO', index=10, number=10,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='IMAGE', index=11, number=11,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='IMAGE_TEXT', index=12, number=12,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='REBASELINE', index=13, number=13,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NEEDSREBASELINE', index=14, number=14,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NEEDSMANUALREBASELINE', index=15, number=15,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=4437,
-  serialized_end=4665,
-)
-
-_MACHINEPROVIDEREVENT_GCEBACKENDMACHINESTATE = _descriptor.EnumDescriptor(
-  name='GCEBackendMachineState',
-  full_name='crit_event.proto.MachineProviderEvent.GCEBackendMachineState',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='GCE_UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CREATED', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CATALOGED', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SUBSCRIPTION_RECEIVED', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='METADATA_UPDATE_PROPOSED', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='METADATA_UPDATE_READY', index=5, number=5,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='METADATA_UPDATE_SCHEDULED', index=6, number=6,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='METADATA_UPDATE_FAILED', index=7, number=7,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='METADATA_UPDATE_SUCCEEDED', index=8, number=8,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LEASE_RECEIVED', index=9, number=9,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='RECLAIMED', index=10, number=10,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DELETION_PROPOSED', index=11, number=11,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DELETION_SCHEDULED', index=12, number=12,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DELETION_SUCCEEDED', index=13, number=13,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DELETED', index=14, number=14,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=5157,
-  serialized_end=5519,
-)
-
-_MACHINEPROVIDEREVENT_GCEBACKENDDELETIONREASON = _descriptor.EnumDescriptor(
-  name='GCEBackendDeletionReason',
-  full_name='crit_event.proto.MachineProviderEvent.GCEBackendDeletionReason',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='RECLAMATION', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DRAINED', index=1, number=2,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=5521,
-  serialized_end=5577,
-)
-
-_MACHINEPROVIDEREVENT_MACHINEPROVIDERMACHINESTATE = _descriptor.EnumDescriptor(
-  name='MachineProviderMachineState',
-  full_name='crit_event.proto.MachineProviderEvent.MachineProviderMachineState',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='MP_UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='RECEIVED', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SUBSCRIBED', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LEASED', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LEASE_EXPIRED', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='UNCATALOGED', index=5, number=5,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=5579,
-  serialized_end=5702,
-)
-
-_MACHINEPROVIDEREVENT_MACHINEPROVIDERAGENTSTATE = _descriptor.EnumDescriptor(
-  name='MachineProviderAgentState',
-  full_name='crit_event.proto.MachineProviderEvent.MachineProviderAgentState',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='AGENT_UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='POLLING', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='ALLOCATED', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SWARMING_CONFIGURED', index=3, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=5704,
-  serialized_end=5803,
-)
-
-_CHROMEINFRAEVENT_TIMESTAMPKIND = _descriptor.EnumDescriptor(
-  name='TimestampKind',
-  full_name='crit_event.proto.ChromeInfraEvent.TimestampKind',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='POINT', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BEGIN', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='END', index=3, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=6302,
-  serialized_end=6361,
-)
-
-
-_CQEVENT_FAILUREREASON_FAILEDTRYJOB = _descriptor.Descriptor(
-  name='FailedTryJob',
-  full_name='crit_event.proto.CQEvent.FailureReason.FailedTryJob',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='master', full_name='crit_event.proto.CQEvent.FailureReason.FailedTryJob.master', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='builder', full_name='crit_event.proto.CQEvent.FailureReason.FailedTryJob.builder', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='build_id', full_name='crit_event.proto.CQEvent.FailureReason.FailedTryJob.build_id', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fail_type', full_name='crit_event.proto.CQEvent.FailureReason.FailedTryJob.fail_type', index=3,
-      number=5, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=793,
-  serialized_end=916,
-)
-
-_CQEVENT_FAILUREREASON = _descriptor.Descriptor(
-  name='FailureReason',
-  full_name='crit_event.proto.CQEvent.FailureReason',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='verifier', full_name='crit_event.proto.CQEvent.FailureReason.verifier', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fail_type', full_name='crit_event.proto.CQEvent.FailureReason.fail_type', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='failed_try_jobs', full_name='crit_event.proto.CQEvent.FailureReason.failed_try_jobs', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_CQEVENT_FAILUREREASON_FAILEDTRYJOB, ],
-  enum_types=[
-    _CQEVENT_FAILUREREASON_FAILTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=574,
-  serialized_end=1383,
-)
-
-_CQEVENT_TRIGGEREDTRYJOB = _descriptor.Descriptor(
-  name='TriggeredTryJob',
-  full_name='crit_event.proto.CQEvent.TriggeredTryJob',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='master', full_name='crit_event.proto.CQEvent.TriggeredTryJob.master', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='builder', full_name='crit_event.proto.CQEvent.TriggeredTryJob.builder', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='tests', full_name='crit_event.proto.CQEvent.TriggeredTryJob.tests', index=2,
-      number=3, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1385,
-  serialized_end=1450,
-)
-
-_CQEVENT = _descriptor.Descriptor(
-  name='CQEvent',
-  full_name='crit_event.proto.CQEvent',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='action', full_name='crit_event.proto.CQEvent.action', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='verifier', full_name='crit_event.proto.CQEvent.verifier', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cq_name', full_name='crit_event.proto.CQEvent.cq_name', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='issue', full_name='crit_event.proto.CQEvent.issue', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='patchset', full_name='crit_event.proto.CQEvent.patchset', index=4,
-      number=5, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='attempt_start_usec', full_name='crit_event.proto.CQEvent.attempt_start_usec', index=5,
-      number=6, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='status', full_name='crit_event.proto.CQEvent.status', index=6,
-      number=7, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\030\001')),
-    _descriptor.FieldDescriptor(
-      name='done', full_name='crit_event.proto.CQEvent.done', index=7,
-      number=8, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='failure_reason', full_name='crit_event.proto.CQEvent.failure_reason', index=8,
-      number=9, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dry_run', full_name='crit_event.proto.CQEvent.dry_run', index=9,
-      number=10, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='global_retry_quota', full_name='crit_event.proto.CQEvent.global_retry_quota', index=10,
-      number=11, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='triggered_tryjobs', full_name='crit_event.proto.CQEvent.triggered_tryjobs', index=11,
-      number=12, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='files', full_name='crit_event.proto.CQEvent.files', index=12,
-      number=13, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='codereview_hostname', full_name='crit_event.proto.CQEvent.codereview_hostname', index=13,
-      number=14, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='config_revision', full_name='crit_event.proto.CQEvent.config_revision', index=14,
-      number=15, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_CQEVENT_FAILUREREASON, _CQEVENT_TRIGGEREDTRYJOB, ],
-  enum_types=[
-    _CQEVENT_ACTION,
-    _CQEVENT_VERIFIER,
-    _CQEVENT_STATUS,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=63,
-  serialized_end=2186,
-)
-
-
-_CODEVERSION = _descriptor.Descriptor(
-  name='CodeVersion',
-  full_name='crit_event.proto.CodeVersion',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='source_url', full_name='crit_event.proto.CodeVersion.source_url', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='dirty', full_name='crit_event.proto.CodeVersion.dirty', index=1,
-      number=2, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='version', full_name='crit_event.proto.CodeVersion.version', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='git_hash', full_name='crit_event.proto.CodeVersion.git_hash', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='svn_revision', full_name='crit_event.proto.CodeVersion.svn_revision', index=4,
-      number=5, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='branch', full_name='crit_event.proto.CodeVersion.branch', index=5,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=2188,
-  serialized_end=2309,
-)
-
-
-_SERVICEEVENT = _descriptor.Descriptor(
-  name='ServiceEvent',
-  full_name='crit_event.proto.ServiceEvent',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='type', full_name='crit_event.proto.ServiceEvent.type', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='code_version', full_name='crit_event.proto.ServiceEvent.code_version', index=1,
-      number=2, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='stack_trace', full_name='crit_event.proto.ServiceEvent.stack_trace', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _SERVICEEVENT_SERVICEEVENTTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=2312,
-  serialized_end=2570,
-)
-
-
-_BUILDEVENT = _descriptor.Descriptor(
-  name='BuildEvent',
-  full_name='crit_event.proto.BuildEvent',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='type', full_name='crit_event.proto.BuildEvent.type', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='host_name', full_name='crit_event.proto.BuildEvent.host_name', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='build_name', full_name='crit_event.proto.BuildEvent.build_name', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='build_number', full_name='crit_event.proto.BuildEvent.build_number', index=3,
-      number=4, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='build_scheduling_time_ms', full_name='crit_event.proto.BuildEvent.build_scheduling_time_ms', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='step_name', full_name='crit_event.proto.BuildEvent.step_name', index=5,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='step_text', full_name='crit_event.proto.BuildEvent.step_text', index=6,
-      number=19, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='step_number', full_name='crit_event.proto.BuildEvent.step_number', index=7,
-      number=7, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='result', full_name='crit_event.proto.BuildEvent.result', index=8,
-      number=8, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='extra_result_code', full_name='crit_event.proto.BuildEvent.extra_result_code', index=9,
-      number=10, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='goma_stats', full_name='crit_event.proto.BuildEvent.goma_stats', index=10,
-      number=9, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='goma_error', full_name='crit_event.proto.BuildEvent.goma_error', index=11,
-      number=17, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='goma_crash_report_id', full_name='crit_event.proto.BuildEvent.goma_crash_report_id', index=12,
-      number=18, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='project', full_name='crit_event.proto.BuildEvent.project', index=13,
-      number=11, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='head_revision', full_name='crit_event.proto.BuildEvent.head_revision', index=14,
-      number=12, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='revision', full_name='crit_event.proto.BuildEvent.revision', index=15,
-      number=13, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='patch_url', full_name='crit_event.proto.BuildEvent.patch_url', index=16,
-      number=14, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='buildset', full_name='crit_event.proto.BuildEvent.buildset', index=17,
-      number=24, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='category', full_name='crit_event.proto.BuildEvent.category', index=18,
-      number=20, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bbucket_id', full_name='crit_event.proto.BuildEvent.bbucket_id', index=19,
-      number=15, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bbucket_user_agent', full_name='crit_event.proto.BuildEvent.bbucket_user_agent', index=20,
-      number=16, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fail_type', full_name='crit_event.proto.BuildEvent.fail_type', index=21,
-      number=21, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cq_project', full_name='crit_event.proto.BuildEvent.cq_project', index=22,
-      number=22, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cq_subproject', full_name='crit_event.proto.BuildEvent.cq_subproject', index=23,
-      number=23, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _BUILDEVENT_BUILDEVENTTYPE,
-    _BUILDEVENT_BUILDRESULT,
-    _BUILDEVENT_GOMAERRORTYPE,
-    _BUILDEVENT_CATEGORY,
-    _BUILDEVENT_FAILTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=2573,
-  serialized_end=3934,
-)
-
-
-_INFRAEVENTSOURCE = _descriptor.Descriptor(
-  name='InfraEventSource',
-  full_name='crit_event.proto.InfraEventSource',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='host_name', full_name='crit_event.proto.InfraEventSource.host_name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='appengine_name', full_name='crit_event.proto.InfraEventSource.appengine_name', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='service_name', full_name='crit_event.proto.InfraEventSource.service_name', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=3936,
-  serialized_end=4019,
-)
-
-
-_TESTRESULTSEVENT_TESTRESULT = _descriptor.Descriptor(
-  name='TestResult',
-  full_name='crit_event.proto.TestResultsEvent.TestResult',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='test_name', full_name='crit_event.proto.TestResultsEvent.TestResult.test_name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='actual', full_name='crit_event.proto.TestResultsEvent.TestResult.actual', index=1,
-      number=2, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='expected', full_name='crit_event.proto.TestResultsEvent.TestResult.expected', index=2,
-      number=3, type=14, cpp_type=8, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bugs', full_name='crit_event.proto.TestResultsEvent.TestResult.bugs', index=3,
-      number=4, type=9, cpp_type=9, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=4253,
-  serialized_end=4434,
-)
-
-_TESTRESULTSEVENT = _descriptor.Descriptor(
-  name='TestResultsEvent',
-  full_name='crit_event.proto.TestResultsEvent',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='master_name', full_name='crit_event.proto.TestResultsEvent.master_name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='builder_name', full_name='crit_event.proto.TestResultsEvent.builder_name', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='build_number', full_name='crit_event.proto.TestResultsEvent.build_number', index=2,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='test_type', full_name='crit_event.proto.TestResultsEvent.test_type', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='interrupted', full_name='crit_event.proto.TestResultsEvent.interrupted', index=4,
-      number=5, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='version', full_name='crit_event.proto.TestResultsEvent.version', index=5,
-      number=6, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='usec_since_epoch', full_name='crit_event.proto.TestResultsEvent.usec_since_epoch', index=6,
-      number=7, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='tests', full_name='crit_event.proto.TestResultsEvent.tests', index=7,
-      number=8, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_TESTRESULTSEVENT_TESTRESULT, ],
-  enum_types=[
-    _TESTRESULTSEVENT_TESTRESULTTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=4022,
-  serialized_end=4665,
-)
-
-
-_MACHINEPROVIDEREVENT = _descriptor.Descriptor(
-  name='MachineProviderEvent',
-  full_name='crit_event.proto.MachineProviderEvent',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='gce_backend_state', full_name='crit_event.proto.MachineProviderEvent.gce_backend_state', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='gce_backend_deletion_reason', full_name='crit_event.proto.MachineProviderEvent.gce_backend_deletion_reason', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='machine_provider_state', full_name='crit_event.proto.MachineProviderEvent.machine_provider_state', index=2,
-      number=3, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='pubsub_subscription', full_name='crit_event.proto.MachineProviderEvent.pubsub_subscription', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='lease_expiration_ts', full_name='crit_event.proto.MachineProviderEvent.lease_expiration_ts', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='machine_state', full_name='crit_event.proto.MachineProviderEvent.machine_state', index=5,
-      number=6, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='swarming_server', full_name='crit_event.proto.MachineProviderEvent.swarming_server', index=6,
-      number=7, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _MACHINEPROVIDEREVENT_GCEBACKENDMACHINESTATE,
-    _MACHINEPROVIDEREVENT_GCEBACKENDDELETIONREASON,
-    _MACHINEPROVIDEREVENT_MACHINEPROVIDERMACHINESTATE,
-    _MACHINEPROVIDEREVENT_MACHINEPROVIDERAGENTSTATE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=4668,
-  serialized_end=5803,
-)
-
-
-_CHROMEINFRAEVENT = _descriptor.Descriptor(
-  name='ChromeInfraEvent',
-  full_name='crit_event.proto.ChromeInfraEvent',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='timestamp_kind', full_name='crit_event.proto.ChromeInfraEvent.timestamp_kind', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='trace_id', full_name='crit_event.proto.ChromeInfraEvent.trace_id', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='span_id', full_name='crit_event.proto.ChromeInfraEvent.span_id', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='parent_id', full_name='crit_event.proto.ChromeInfraEvent.parent_id', index=3,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='event_source', full_name='crit_event.proto.ChromeInfraEvent.event_source', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='service_event', full_name='crit_event.proto.ChromeInfraEvent.service_event', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='build_event', full_name='crit_event.proto.ChromeInfraEvent.build_event', index=6,
-      number=8, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cq_event', full_name='crit_event.proto.ChromeInfraEvent.cq_event', index=7,
-      number=9, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='test_results', full_name='crit_event.proto.ChromeInfraEvent.test_results', index=8,
-      number=10, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='machine_provider_event', full_name='crit_event.proto.ChromeInfraEvent.machine_provider_event', index=9,
-      number=11, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _CHROMEINFRAEVENT_TIMESTAMPKIND,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=5806,
-  serialized_end=6361,
-)
-
-_CQEVENT_FAILUREREASON_FAILEDTRYJOB.fields_by_name['fail_type'].enum_type = _BUILDEVENT_FAILTYPE
-_CQEVENT_FAILUREREASON_FAILEDTRYJOB.containing_type = _CQEVENT_FAILUREREASON;
-_CQEVENT_FAILUREREASON.fields_by_name['verifier'].enum_type = _CQEVENT_VERIFIER
-_CQEVENT_FAILUREREASON.fields_by_name['fail_type'].enum_type = _CQEVENT_FAILUREREASON_FAILTYPE
-_CQEVENT_FAILUREREASON.fields_by_name['failed_try_jobs'].message_type = _CQEVENT_FAILUREREASON_FAILEDTRYJOB
-_CQEVENT_FAILUREREASON.containing_type = _CQEVENT;
-_CQEVENT_FAILUREREASON_FAILTYPE.containing_type = _CQEVENT_FAILUREREASON;
-_CQEVENT_TRIGGEREDTRYJOB.containing_type = _CQEVENT;
-_CQEVENT.fields_by_name['action'].enum_type = _CQEVENT_ACTION
-_CQEVENT.fields_by_name['verifier'].enum_type = _CQEVENT_VERIFIER
-_CQEVENT.fields_by_name['status'].enum_type = _CQEVENT_STATUS
-_CQEVENT.fields_by_name['failure_reason'].message_type = _CQEVENT_FAILUREREASON
-_CQEVENT.fields_by_name['triggered_tryjobs'].message_type = _CQEVENT_TRIGGEREDTRYJOB
-_CQEVENT_ACTION.containing_type = _CQEVENT;
-_CQEVENT_VERIFIER.containing_type = _CQEVENT;
-_CQEVENT_STATUS.containing_type = _CQEVENT;
-_SERVICEEVENT.fields_by_name['type'].enum_type = _SERVICEEVENT_SERVICEEVENTTYPE
-_SERVICEEVENT.fields_by_name['code_version'].message_type = _CODEVERSION
-_SERVICEEVENT_SERVICEEVENTTYPE.containing_type = _SERVICEEVENT;
-_BUILDEVENT.fields_by_name['type'].enum_type = _BUILDEVENT_BUILDEVENTTYPE
-_BUILDEVENT.fields_by_name['result'].enum_type = _BUILDEVENT_BUILDRESULT
-_BUILDEVENT.fields_by_name['goma_stats'].message_type = goma_stats_pb2._GOMASTATS
-_BUILDEVENT.fields_by_name['goma_error'].enum_type = _BUILDEVENT_GOMAERRORTYPE
-_BUILDEVENT.fields_by_name['head_revision'].message_type = _CODEVERSION
-_BUILDEVENT.fields_by_name['revision'].message_type = _CODEVERSION
-_BUILDEVENT.fields_by_name['category'].enum_type = _BUILDEVENT_CATEGORY
-_BUILDEVENT.fields_by_name['fail_type'].enum_type = _BUILDEVENT_FAILTYPE
-_BUILDEVENT_BUILDEVENTTYPE.containing_type = _BUILDEVENT;
-_BUILDEVENT_BUILDRESULT.containing_type = _BUILDEVENT;
-_BUILDEVENT_GOMAERRORTYPE.containing_type = _BUILDEVENT;
-_BUILDEVENT_CATEGORY.containing_type = _BUILDEVENT;
-_BUILDEVENT_FAILTYPE.containing_type = _BUILDEVENT;
-_TESTRESULTSEVENT_TESTRESULT.fields_by_name['actual'].enum_type = _TESTRESULTSEVENT_TESTRESULTTYPE
-_TESTRESULTSEVENT_TESTRESULT.fields_by_name['expected'].enum_type = _TESTRESULTSEVENT_TESTRESULTTYPE
-_TESTRESULTSEVENT_TESTRESULT.containing_type = _TESTRESULTSEVENT;
-_TESTRESULTSEVENT.fields_by_name['tests'].message_type = _TESTRESULTSEVENT_TESTRESULT
-_TESTRESULTSEVENT_TESTRESULTTYPE.containing_type = _TESTRESULTSEVENT;
-_MACHINEPROVIDEREVENT.fields_by_name['gce_backend_state'].enum_type = _MACHINEPROVIDEREVENT_GCEBACKENDMACHINESTATE
-_MACHINEPROVIDEREVENT.fields_by_name['gce_backend_deletion_reason'].enum_type = _MACHINEPROVIDEREVENT_GCEBACKENDDELETIONREASON
-_MACHINEPROVIDEREVENT.fields_by_name['machine_provider_state'].enum_type = _MACHINEPROVIDEREVENT_MACHINEPROVIDERMACHINESTATE
-_MACHINEPROVIDEREVENT.fields_by_name['machine_state'].enum_type = _MACHINEPROVIDEREVENT_MACHINEPROVIDERAGENTSTATE
-_MACHINEPROVIDEREVENT_GCEBACKENDMACHINESTATE.containing_type = _MACHINEPROVIDEREVENT;
-_MACHINEPROVIDEREVENT_GCEBACKENDDELETIONREASON.containing_type = _MACHINEPROVIDEREVENT;
-_MACHINEPROVIDEREVENT_MACHINEPROVIDERMACHINESTATE.containing_type = _MACHINEPROVIDEREVENT;
-_MACHINEPROVIDEREVENT_MACHINEPROVIDERAGENTSTATE.containing_type = _MACHINEPROVIDEREVENT;
-_CHROMEINFRAEVENT.fields_by_name['timestamp_kind'].enum_type = _CHROMEINFRAEVENT_TIMESTAMPKIND
-_CHROMEINFRAEVENT.fields_by_name['event_source'].message_type = _INFRAEVENTSOURCE
-_CHROMEINFRAEVENT.fields_by_name['service_event'].message_type = _SERVICEEVENT
-_CHROMEINFRAEVENT.fields_by_name['build_event'].message_type = _BUILDEVENT
-_CHROMEINFRAEVENT.fields_by_name['cq_event'].message_type = _CQEVENT
-_CHROMEINFRAEVENT.fields_by_name['test_results'].message_type = _TESTRESULTSEVENT
-_CHROMEINFRAEVENT.fields_by_name['machine_provider_event'].message_type = _MACHINEPROVIDEREVENT
-_CHROMEINFRAEVENT_TIMESTAMPKIND.containing_type = _CHROMEINFRAEVENT;
-DESCRIPTOR.message_types_by_name['CQEvent'] = _CQEVENT
-DESCRIPTOR.message_types_by_name['CodeVersion'] = _CODEVERSION
-DESCRIPTOR.message_types_by_name['ServiceEvent'] = _SERVICEEVENT
-DESCRIPTOR.message_types_by_name['BuildEvent'] = _BUILDEVENT
-DESCRIPTOR.message_types_by_name['InfraEventSource'] = _INFRAEVENTSOURCE
-DESCRIPTOR.message_types_by_name['TestResultsEvent'] = _TESTRESULTSEVENT
-DESCRIPTOR.message_types_by_name['MachineProviderEvent'] = _MACHINEPROVIDEREVENT
-DESCRIPTOR.message_types_by_name['ChromeInfraEvent'] = _CHROMEINFRAEVENT
-
-class CQEvent(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-
-  class FailureReason(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-
-    class FailedTryJob(_message.Message):
-      __metaclass__ = _reflection.GeneratedProtocolMessageType
-      DESCRIPTOR = _CQEVENT_FAILUREREASON_FAILEDTRYJOB
-
-      # @@protoc_insertion_point(class_scope:crit_event.proto.CQEvent.FailureReason.FailedTryJob)
-    DESCRIPTOR = _CQEVENT_FAILUREREASON
-
-    # @@protoc_insertion_point(class_scope:crit_event.proto.CQEvent.FailureReason)
-
-  class TriggeredTryJob(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _CQEVENT_TRIGGEREDTRYJOB
-
-    # @@protoc_insertion_point(class_scope:crit_event.proto.CQEvent.TriggeredTryJob)
-  DESCRIPTOR = _CQEVENT
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.CQEvent)
-
-class CodeVersion(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _CODEVERSION
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.CodeVersion)
-
-class ServiceEvent(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _SERVICEEVENT
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.ServiceEvent)
-
-class BuildEvent(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _BUILDEVENT
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.BuildEvent)
-
-class InfraEventSource(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _INFRAEVENTSOURCE
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.InfraEventSource)
-
-class TestResultsEvent(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-
-  class TestResult(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _TESTRESULTSEVENT_TESTRESULT
-
-    # @@protoc_insertion_point(class_scope:crit_event.proto.TestResultsEvent.TestResult)
-  DESCRIPTOR = _TESTRESULTSEVENT
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.TestResultsEvent)
-
-class MachineProviderEvent(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _MACHINEPROVIDEREVENT
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.MachineProviderEvent)
-
-class ChromeInfraEvent(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _CHROMEINFRAEVENT
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.ChromeInfraEvent)
-
-
-_CQEVENT.fields_by_name['status'].has_options = True
-_CQEVENT.fields_by_name['status']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\030\001')
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/protos/goma_stats_pb2.py b/tools/swarming_client/third_party/infra_libs/event_mon/protos/goma_stats_pb2.py
deleted file mode 100644
index f66fee2..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/protos/goma_stats_pb2.py
+++ /dev/null
@@ -1,1380 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: goma_stats.proto
-
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='goma_stats.proto',
-  package='devtools_goma',
-  serialized_pb='\n\x10goma_stats.proto\x12\rdevtools_goma\"\x93\x02\n\x0cRequestStats\x12\r\n\x05total\x18\x01 \x01(\x03\x12\x0f\n\x07success\x18\x02 \x01(\x03\x12\x0f\n\x07\x66\x61ilure\x18\x03 \x01(\x03\x12\x39\n\x0e\x63ompiler_proxy\x18\x04 \x01(\x0b\x32!.devtools_goma.CompilerProxyStats\x12\x37\n\rcompiler_info\x18\x05 \x01(\x0b\x32 .devtools_goma.CompilerInfoStats\x12-\n\x04goma\x18\x06 \x01(\x0b\x32\x1f.devtools_goma.GomaCompileStats\x12/\n\x05local\x18\x07 \x01(\x0b\x32 .devtools_goma.LocalCompileStats\"\"\n\x12\x43ompilerProxyStats\x12\x0c\n\x04\x66\x61il\x18\x01 \x01(\x03\"n\n\x11\x43ompilerInfoStats\x12\x0e\n\x06stores\x18\x01 \x01(\x03\x12\x12\n\nstore_dups\x18\x02 \x01(\x03\x12\x0c\n\x04miss\x18\x03 \x01(\x03\x12\x0c\n\x04\x66\x61il\x18\x04 \x01(\x03\x12\x19\n\x11loaded_size_bytes\x18\x05 \x01(\x03\"e\n\x10GomaCompileStats\x12\x10\n\x08\x66inished\x18\x01 \x01(\x03\x12\x11\n\tcache_hit\x18\x02 \x01(\x03\x12\x0f\n\x07\x61\x62orted\x18\x03 \x01(\x03\x12\r\n\x05retry\x18\x04 \x01(\x03\x12\x0c\n\x04\x66\x61il\x18\x05 \x01(\x03\"B\n\x11LocalCompileStats\x12\x0b\n\x03run\x18\x01 \x01(\x03\x12\x0e\n\x06killed\x18\x02 \x01(\x03\x12\x10\n\x08\x66inished\x18\x03 \x01(\x03\"@\n\tFileStats\x12\x11\n\trequested\x18\x01 \x01(\x03\x12\x10\n\x08uploaded\x18\x02 \x01(\x03\x12\x0e\n\x06missed\x18\x03 \x01(\x03\"K\n\x0bOutputStats\x12\r\n\x05\x66iles\x18\x01 \x01(\x03\x12\x0e\n\x06rename\x18\x02 \x01(\x03\x12\x0b\n\x03\x62uf\x18\x03 \x01(\x03\x12\x10\n\x08peak_req\x18\x04 \x01(\x03\" \n\x0bMemoryStats\x12\x11\n\tconsuming\x18\x01 \x01(\x03\"\x1b\n\tTimeStats\x12\x0e\n\x06uptime\x18\x01 \x01(\x03\"h\n\x15IncludeProcessorStats\x12\r\n\x05total\x18\x01 \x01(\x03\x12\x0f\n\x07skipped\x18\x02 \x01(\x03\x12\x17\n\x0ftotal_wait_time\x18\x03 \x01(\x03\x12\x16\n\x0etotal_run_time\x18\x04 \x01(\x03\"\xf3\x01\n\x11IncludeCacheStats\x12\x15\n\rtotal_entries\x18\x01 \x01(\x03\x12\x18\n\x10total_cache_size\x18\x02 \x01(\x03\x12\x0b\n\x03hit\x18\x03 \x01(\x03\x12\x0e\n\x06missed\x18\x04 \x01(\x03\x12\x0f\n\x07updated\x18\x05 \x01(\x03\x12\x0f\n\x07\x65victed\x18\x06 \x01(\x03\x12\x1b\n\x13original_total_size\x18\x07 \x01(\x03\x12\x19\n\x11original_max_size\x18\x08 \x01(\x03\x12\x1b\n\x13\x66iltered_total_size\x18\t \x01(\x03\x12\x19\n\x11\x66iltered_max_size\x18\n \x01(\x03\"\x99\x01\n\x0e\x44\x65psCacheStats\x12\x17\n\x0f\x64\x65ps_table_size\x18\x01 \x01(\x03\x12\x13\n\x0bmax_entries\x18\x02 \x01(\x03\x12\x15\n\rtotal_entries\x18\x03 \x01(\x03\x12\x14\n\x0cidtable_size\x18\x04 \x01(\x03\x12\x0b\n\x03hit\x18\x05 \x01(\x03\x12\x0f\n\x07updated\x18\x06 \x01(\x03\x12\x0e\n\x06missed\x18\x07 \x01(\x03\"Z\n\x14IncludeDirCacheStats\x12\x11\n\tinstances\x18\x01 \x01(\x03\x12\x0e\n\x06memory\x18\x02 \x01(\x03\x12\x0f\n\x07\x63reated\x18\x03 \x01(\x03\x12\x0e\n\x06reused\x18\x04 \x01(\x03\"\xb8\x02\n\x0cHttpRPCStats\x12\x18\n\x10ping_status_code\x18\x01 \x01(\x05\x12\x1f\n\x17ping_round_trip_time_ms\x18\n \x01(\x05\x12\r\n\x05query\x18\x02 \x01(\x03\x12\x0e\n\x06\x61\x63tive\x18\x03 \x01(\x03\x12\r\n\x05retry\x18\x04 \x01(\x03\x12\x0f\n\x07timeout\x18\x05 \x01(\x03\x12\r\n\x05\x65rror\x18\x06 \x01(\x03\x12\x15\n\rnetwork_error\x18\x07 \x01(\x03\x12\x19\n\x11network_recovered\x18\x08 \x01(\x03\x12;\n\x0bstatus_code\x18\t \x03(\x0b\x32&.devtools_goma.HttpRPCStats.HttpStatus\x1a\x30\n\nHttpStatus\x12\x13\n\x0bstatus_code\x18\x01 \x01(\x05\x12\r\n\x05\x63ount\x18\x02 \x01(\x03\"q\n\x0eGomaErrorStats\x12\x12\n\nuser_error\x18\x01 \x01(\x03\x12#\n\x17\x44\x45PRECATED_user_warning\x18\x02 \x01(\x03\x42\x02\x18\x01\x12\x11\n\tlog_error\x18\x03 \x01(\x03\x12\x13\n\x0blog_warning\x18\x04 \x01(\x03\"p\n\x11GomaMismatchStats\x12 \n\x18\x63ommand_version_mismatch\x18\x01 \x01(\x03\x12\x1c\n\x14\x62inary_hash_mismatch\x18\x02 \x01(\x03\x12\x1b\n\x13subprogram_mismatch\x18\x03 \x01(\x03\"\x88\x01\n\x11\x44istributionProto\x12\r\n\x05\x63ount\x18\x01 \x02(\x03\x12\x0b\n\x03sum\x18\x02 \x02(\x03\x12\x16\n\x0esum_of_squares\x18\x03 \x02(\x01\x12\x0b\n\x03min\x18\x04 \x01(\x03\x12\x0b\n\x03max\x18\x05 \x01(\x03\x12\x0f\n\x07logbase\x18\x06 \x01(\x01\x12\x14\n\x0c\x62ucket_value\x18\x07 \x03(\x03\"\x86\x01\n\x0eGomaHistograms\x12\x37\n\rrpc_call_time\x18\x01 \x01(\x0b\x32 .devtools_goma.DistributionProto\x12;\n\x11\x65xecutor_run_time\x18\x02 \x01(\x0b\x32 .devtools_goma.DistributionProto\"\xd2\x01\n\x0bMachineInfo\x12\x15\n\rgoma_revision\x18\x01 \x01(\t\x12-\n\x02os\x18\x02 \x01(\x0e\x32!.devtools_goma.MachineInfo.OSType\x12\r\n\x05ncpus\x18\x03 \x01(\x05\x12\x13\n\x0bmemory_size\x18\x04 \x01(\x03\x12%\n\x19\x44\x45PRECATED_cpu_capability\x18\x05 \x03(\rB\x02\x18\x01\"2\n\x06OSType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05LINUX\x10\x01\x12\x07\n\x03MAC\x10\x02\x12\x07\n\x03WIN\x10\x03\"\x83\x06\n\tGomaStats\x12\x32\n\rrequest_stats\x18\x01 \x01(\x0b\x32\x1b.devtools_goma.RequestStats\x12,\n\nfile_stats\x18\x02 \x01(\x0b\x32\x18.devtools_goma.FileStats\x12\x30\n\x0coutput_stats\x18\x03 \x01(\x0b\x32\x1a.devtools_goma.OutputStats\x12\x30\n\x0cmemory_stats\x18\x04 \x01(\x0b\x32\x1a.devtools_goma.MemoryStats\x12,\n\ntime_stats\x18\x05 \x01(\x0b\x32\x18.devtools_goma.TimeStats\x12\x45\n\x17include_processor_stats\x18\x06 \x01(\x0b\x32$.devtools_goma.IncludeProcessorStats\x12\x36\n\x0f\x64\x65pscache_stats\x18\x07 \x01(\x0b\x32\x1d.devtools_goma.DepsCacheStats\x12>\n\x11incdircache_stats\x18\x08 \x01(\x0b\x32#.devtools_goma.IncludeDirCacheStats\x12\x33\n\x0ehttp_rpc_stats\x18\t \x01(\x0b\x32\x1b.devtools_goma.HttpRPCStats\x12\x32\n\x0b\x65rror_stats\x18\x0c \x01(\x0b\x32\x1d.devtools_goma.GomaErrorStats\x12\x38\n\x0emismatch_stats\x18\r \x01(\x0b\x32 .devtools_goma.GomaMismatchStats\x12<\n\x12includecache_stats\x18\x0e \x01(\x0b\x32 .devtools_goma.IncludeCacheStats\x12\x30\n\thistogram\x18\n \x01(\x0b\x32\x1d.devtools_goma.GomaHistograms\x12\x30\n\x0cmachine_info\x18\x0b \x01(\x0b\x32\x1a.devtools_goma.MachineInfo')
-
-
-
-_MACHINEINFO_OSTYPE = _descriptor.EnumDescriptor(
-  name='OSType',
-  full_name='devtools_goma.MachineInfo.OSType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='LINUX', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MAC', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='WIN', index=3, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=2419,
-  serialized_end=2469,
-)
-
-
-_REQUESTSTATS = _descriptor.Descriptor(
-  name='RequestStats',
-  full_name='devtools_goma.RequestStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='total', full_name='devtools_goma.RequestStats.total', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='success', full_name='devtools_goma.RequestStats.success', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='failure', full_name='devtools_goma.RequestStats.failure', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='compiler_proxy', full_name='devtools_goma.RequestStats.compiler_proxy', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='compiler_info', full_name='devtools_goma.RequestStats.compiler_info', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='goma', full_name='devtools_goma.RequestStats.goma', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='local', full_name='devtools_goma.RequestStats.local', index=6,
-      number=7, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=36,
-  serialized_end=311,
-)
-
-
-_COMPILERPROXYSTATS = _descriptor.Descriptor(
-  name='CompilerProxyStats',
-  full_name='devtools_goma.CompilerProxyStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='fail', full_name='devtools_goma.CompilerProxyStats.fail', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=313,
-  serialized_end=347,
-)
-
-
-_COMPILERINFOSTATS = _descriptor.Descriptor(
-  name='CompilerInfoStats',
-  full_name='devtools_goma.CompilerInfoStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='stores', full_name='devtools_goma.CompilerInfoStats.stores', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='store_dups', full_name='devtools_goma.CompilerInfoStats.store_dups', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='miss', full_name='devtools_goma.CompilerInfoStats.miss', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fail', full_name='devtools_goma.CompilerInfoStats.fail', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='loaded_size_bytes', full_name='devtools_goma.CompilerInfoStats.loaded_size_bytes', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=349,
-  serialized_end=459,
-)
-
-
-_GOMACOMPILESTATS = _descriptor.Descriptor(
-  name='GomaCompileStats',
-  full_name='devtools_goma.GomaCompileStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='finished', full_name='devtools_goma.GomaCompileStats.finished', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cache_hit', full_name='devtools_goma.GomaCompileStats.cache_hit', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='aborted', full_name='devtools_goma.GomaCompileStats.aborted', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='retry', full_name='devtools_goma.GomaCompileStats.retry', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fail', full_name='devtools_goma.GomaCompileStats.fail', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=461,
-  serialized_end=562,
-)
-
-
-_LOCALCOMPILESTATS = _descriptor.Descriptor(
-  name='LocalCompileStats',
-  full_name='devtools_goma.LocalCompileStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='run', full_name='devtools_goma.LocalCompileStats.run', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='killed', full_name='devtools_goma.LocalCompileStats.killed', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='finished', full_name='devtools_goma.LocalCompileStats.finished', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=564,
-  serialized_end=630,
-)
-
-
-_FILESTATS = _descriptor.Descriptor(
-  name='FileStats',
-  full_name='devtools_goma.FileStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='requested', full_name='devtools_goma.FileStats.requested', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='uploaded', full_name='devtools_goma.FileStats.uploaded', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='missed', full_name='devtools_goma.FileStats.missed', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=632,
-  serialized_end=696,
-)
-
-
-_OUTPUTSTATS = _descriptor.Descriptor(
-  name='OutputStats',
-  full_name='devtools_goma.OutputStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='files', full_name='devtools_goma.OutputStats.files', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='rename', full_name='devtools_goma.OutputStats.rename', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='buf', full_name='devtools_goma.OutputStats.buf', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='peak_req', full_name='devtools_goma.OutputStats.peak_req', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=698,
-  serialized_end=773,
-)
-
-
-_MEMORYSTATS = _descriptor.Descriptor(
-  name='MemoryStats',
-  full_name='devtools_goma.MemoryStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='consuming', full_name='devtools_goma.MemoryStats.consuming', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=775,
-  serialized_end=807,
-)
-
-
-_TIMESTATS = _descriptor.Descriptor(
-  name='TimeStats',
-  full_name='devtools_goma.TimeStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='uptime', full_name='devtools_goma.TimeStats.uptime', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=809,
-  serialized_end=836,
-)
-
-
-_INCLUDEPROCESSORSTATS = _descriptor.Descriptor(
-  name='IncludeProcessorStats',
-  full_name='devtools_goma.IncludeProcessorStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='total', full_name='devtools_goma.IncludeProcessorStats.total', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='skipped', full_name='devtools_goma.IncludeProcessorStats.skipped', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='total_wait_time', full_name='devtools_goma.IncludeProcessorStats.total_wait_time', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='total_run_time', full_name='devtools_goma.IncludeProcessorStats.total_run_time', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=838,
-  serialized_end=942,
-)
-
-
-_INCLUDECACHESTATS = _descriptor.Descriptor(
-  name='IncludeCacheStats',
-  full_name='devtools_goma.IncludeCacheStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='total_entries', full_name='devtools_goma.IncludeCacheStats.total_entries', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='total_cache_size', full_name='devtools_goma.IncludeCacheStats.total_cache_size', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='hit', full_name='devtools_goma.IncludeCacheStats.hit', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='missed', full_name='devtools_goma.IncludeCacheStats.missed', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='updated', full_name='devtools_goma.IncludeCacheStats.updated', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='evicted', full_name='devtools_goma.IncludeCacheStats.evicted', index=5,
-      number=6, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='original_total_size', full_name='devtools_goma.IncludeCacheStats.original_total_size', index=6,
-      number=7, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='original_max_size', full_name='devtools_goma.IncludeCacheStats.original_max_size', index=7,
-      number=8, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='filtered_total_size', full_name='devtools_goma.IncludeCacheStats.filtered_total_size', index=8,
-      number=9, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='filtered_max_size', full_name='devtools_goma.IncludeCacheStats.filtered_max_size', index=9,
-      number=10, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=945,
-  serialized_end=1188,
-)
-
-
-_DEPSCACHESTATS = _descriptor.Descriptor(
-  name='DepsCacheStats',
-  full_name='devtools_goma.DepsCacheStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='deps_table_size', full_name='devtools_goma.DepsCacheStats.deps_table_size', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='max_entries', full_name='devtools_goma.DepsCacheStats.max_entries', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='total_entries', full_name='devtools_goma.DepsCacheStats.total_entries', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='idtable_size', full_name='devtools_goma.DepsCacheStats.idtable_size', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='hit', full_name='devtools_goma.DepsCacheStats.hit', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='updated', full_name='devtools_goma.DepsCacheStats.updated', index=5,
-      number=6, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='missed', full_name='devtools_goma.DepsCacheStats.missed', index=6,
-      number=7, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1191,
-  serialized_end=1344,
-)
-
-
-_INCLUDEDIRCACHESTATS = _descriptor.Descriptor(
-  name='IncludeDirCacheStats',
-  full_name='devtools_goma.IncludeDirCacheStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='instances', full_name='devtools_goma.IncludeDirCacheStats.instances', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='memory', full_name='devtools_goma.IncludeDirCacheStats.memory', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='created', full_name='devtools_goma.IncludeDirCacheStats.created', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='reused', full_name='devtools_goma.IncludeDirCacheStats.reused', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1346,
-  serialized_end=1436,
-)
-
-
-_HTTPRPCSTATS_HTTPSTATUS = _descriptor.Descriptor(
-  name='HttpStatus',
-  full_name='devtools_goma.HttpRPCStats.HttpStatus',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='status_code', full_name='devtools_goma.HttpRPCStats.HttpStatus.status_code', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='count', full_name='devtools_goma.HttpRPCStats.HttpStatus.count', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1703,
-  serialized_end=1751,
-)
-
-_HTTPRPCSTATS = _descriptor.Descriptor(
-  name='HttpRPCStats',
-  full_name='devtools_goma.HttpRPCStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='ping_status_code', full_name='devtools_goma.HttpRPCStats.ping_status_code', index=0,
-      number=1, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='ping_round_trip_time_ms', full_name='devtools_goma.HttpRPCStats.ping_round_trip_time_ms', index=1,
-      number=10, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='query', full_name='devtools_goma.HttpRPCStats.query', index=2,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='active', full_name='devtools_goma.HttpRPCStats.active', index=3,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='retry', full_name='devtools_goma.HttpRPCStats.retry', index=4,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='timeout', full_name='devtools_goma.HttpRPCStats.timeout', index=5,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='error', full_name='devtools_goma.HttpRPCStats.error', index=6,
-      number=6, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='network_error', full_name='devtools_goma.HttpRPCStats.network_error', index=7,
-      number=7, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='network_recovered', full_name='devtools_goma.HttpRPCStats.network_recovered', index=8,
-      number=8, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='status_code', full_name='devtools_goma.HttpRPCStats.status_code', index=9,
-      number=9, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_HTTPRPCSTATS_HTTPSTATUS, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1439,
-  serialized_end=1751,
-)
-
-
-_GOMAERRORSTATS = _descriptor.Descriptor(
-  name='GomaErrorStats',
-  full_name='devtools_goma.GomaErrorStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='user_error', full_name='devtools_goma.GomaErrorStats.user_error', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='DEPRECATED_user_warning', full_name='devtools_goma.GomaErrorStats.DEPRECATED_user_warning', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\030\001')),
-    _descriptor.FieldDescriptor(
-      name='log_error', full_name='devtools_goma.GomaErrorStats.log_error', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='log_warning', full_name='devtools_goma.GomaErrorStats.log_warning', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1753,
-  serialized_end=1866,
-)
-
-
-_GOMAMISMATCHSTATS = _descriptor.Descriptor(
-  name='GomaMismatchStats',
-  full_name='devtools_goma.GomaMismatchStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='command_version_mismatch', full_name='devtools_goma.GomaMismatchStats.command_version_mismatch', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='binary_hash_mismatch', full_name='devtools_goma.GomaMismatchStats.binary_hash_mismatch', index=1,
-      number=2, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='subprogram_mismatch', full_name='devtools_goma.GomaMismatchStats.subprogram_mismatch', index=2,
-      number=3, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1868,
-  serialized_end=1980,
-)
-
-
-_DISTRIBUTIONPROTO = _descriptor.Descriptor(
-  name='DistributionProto',
-  full_name='devtools_goma.DistributionProto',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='count', full_name='devtools_goma.DistributionProto.count', index=0,
-      number=1, type=3, cpp_type=2, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='sum', full_name='devtools_goma.DistributionProto.sum', index=1,
-      number=2, type=3, cpp_type=2, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='sum_of_squares', full_name='devtools_goma.DistributionProto.sum_of_squares', index=2,
-      number=3, type=1, cpp_type=5, label=2,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='min', full_name='devtools_goma.DistributionProto.min', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='max', full_name='devtools_goma.DistributionProto.max', index=4,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='logbase', full_name='devtools_goma.DistributionProto.logbase', index=5,
-      number=6, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bucket_value', full_name='devtools_goma.DistributionProto.bucket_value', index=6,
-      number=7, type=3, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=1983,
-  serialized_end=2119,
-)
-
-
-_GOMAHISTOGRAMS = _descriptor.Descriptor(
-  name='GomaHistograms',
-  full_name='devtools_goma.GomaHistograms',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='rpc_call_time', full_name='devtools_goma.GomaHistograms.rpc_call_time', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='executor_run_time', full_name='devtools_goma.GomaHistograms.executor_run_time', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=2122,
-  serialized_end=2256,
-)
-
-
-_MACHINEINFO = _descriptor.Descriptor(
-  name='MachineInfo',
-  full_name='devtools_goma.MachineInfo',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='goma_revision', full_name='devtools_goma.MachineInfo.goma_revision', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='os', full_name='devtools_goma.MachineInfo.os', index=1,
-      number=2, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='ncpus', full_name='devtools_goma.MachineInfo.ncpus', index=2,
-      number=3, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='memory_size', full_name='devtools_goma.MachineInfo.memory_size', index=3,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='DEPRECATED_cpu_capability', full_name='devtools_goma.MachineInfo.DEPRECATED_cpu_capability', index=4,
-      number=5, type=13, cpp_type=3, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\030\001')),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _MACHINEINFO_OSTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=2259,
-  serialized_end=2469,
-)
-
-
-_GOMASTATS = _descriptor.Descriptor(
-  name='GomaStats',
-  full_name='devtools_goma.GomaStats',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='request_stats', full_name='devtools_goma.GomaStats.request_stats', index=0,
-      number=1, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='file_stats', full_name='devtools_goma.GomaStats.file_stats', index=1,
-      number=2, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='output_stats', full_name='devtools_goma.GomaStats.output_stats', index=2,
-      number=3, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='memory_stats', full_name='devtools_goma.GomaStats.memory_stats', index=3,
-      number=4, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='time_stats', full_name='devtools_goma.GomaStats.time_stats', index=4,
-      number=5, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='include_processor_stats', full_name='devtools_goma.GomaStats.include_processor_stats', index=5,
-      number=6, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='depscache_stats', full_name='devtools_goma.GomaStats.depscache_stats', index=6,
-      number=7, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='incdircache_stats', full_name='devtools_goma.GomaStats.incdircache_stats', index=7,
-      number=8, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='http_rpc_stats', full_name='devtools_goma.GomaStats.http_rpc_stats', index=8,
-      number=9, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='error_stats', full_name='devtools_goma.GomaStats.error_stats', index=9,
-      number=12, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='mismatch_stats', full_name='devtools_goma.GomaStats.mismatch_stats', index=10,
-      number=13, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='includecache_stats', full_name='devtools_goma.GomaStats.includecache_stats', index=11,
-      number=14, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='histogram', full_name='devtools_goma.GomaStats.histogram', index=12,
-      number=10, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='machine_info', full_name='devtools_goma.GomaStats.machine_info', index=13,
-      number=11, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=2472,
-  serialized_end=3243,
-)
-
-_REQUESTSTATS.fields_by_name['compiler_proxy'].message_type = _COMPILERPROXYSTATS
-_REQUESTSTATS.fields_by_name['compiler_info'].message_type = _COMPILERINFOSTATS
-_REQUESTSTATS.fields_by_name['goma'].message_type = _GOMACOMPILESTATS
-_REQUESTSTATS.fields_by_name['local'].message_type = _LOCALCOMPILESTATS
-_HTTPRPCSTATS_HTTPSTATUS.containing_type = _HTTPRPCSTATS;
-_HTTPRPCSTATS.fields_by_name['status_code'].message_type = _HTTPRPCSTATS_HTTPSTATUS
-_GOMAHISTOGRAMS.fields_by_name['rpc_call_time'].message_type = _DISTRIBUTIONPROTO
-_GOMAHISTOGRAMS.fields_by_name['executor_run_time'].message_type = _DISTRIBUTIONPROTO
-_MACHINEINFO.fields_by_name['os'].enum_type = _MACHINEINFO_OSTYPE
-_MACHINEINFO_OSTYPE.containing_type = _MACHINEINFO;
-_GOMASTATS.fields_by_name['request_stats'].message_type = _REQUESTSTATS
-_GOMASTATS.fields_by_name['file_stats'].message_type = _FILESTATS
-_GOMASTATS.fields_by_name['output_stats'].message_type = _OUTPUTSTATS
-_GOMASTATS.fields_by_name['memory_stats'].message_type = _MEMORYSTATS
-_GOMASTATS.fields_by_name['time_stats'].message_type = _TIMESTATS
-_GOMASTATS.fields_by_name['include_processor_stats'].message_type = _INCLUDEPROCESSORSTATS
-_GOMASTATS.fields_by_name['depscache_stats'].message_type = _DEPSCACHESTATS
-_GOMASTATS.fields_by_name['incdircache_stats'].message_type = _INCLUDEDIRCACHESTATS
-_GOMASTATS.fields_by_name['http_rpc_stats'].message_type = _HTTPRPCSTATS
-_GOMASTATS.fields_by_name['error_stats'].message_type = _GOMAERRORSTATS
-_GOMASTATS.fields_by_name['mismatch_stats'].message_type = _GOMAMISMATCHSTATS
-_GOMASTATS.fields_by_name['includecache_stats'].message_type = _INCLUDECACHESTATS
-_GOMASTATS.fields_by_name['histogram'].message_type = _GOMAHISTOGRAMS
-_GOMASTATS.fields_by_name['machine_info'].message_type = _MACHINEINFO
-DESCRIPTOR.message_types_by_name['RequestStats'] = _REQUESTSTATS
-DESCRIPTOR.message_types_by_name['CompilerProxyStats'] = _COMPILERPROXYSTATS
-DESCRIPTOR.message_types_by_name['CompilerInfoStats'] = _COMPILERINFOSTATS
-DESCRIPTOR.message_types_by_name['GomaCompileStats'] = _GOMACOMPILESTATS
-DESCRIPTOR.message_types_by_name['LocalCompileStats'] = _LOCALCOMPILESTATS
-DESCRIPTOR.message_types_by_name['FileStats'] = _FILESTATS
-DESCRIPTOR.message_types_by_name['OutputStats'] = _OUTPUTSTATS
-DESCRIPTOR.message_types_by_name['MemoryStats'] = _MEMORYSTATS
-DESCRIPTOR.message_types_by_name['TimeStats'] = _TIMESTATS
-DESCRIPTOR.message_types_by_name['IncludeProcessorStats'] = _INCLUDEPROCESSORSTATS
-DESCRIPTOR.message_types_by_name['IncludeCacheStats'] = _INCLUDECACHESTATS
-DESCRIPTOR.message_types_by_name['DepsCacheStats'] = _DEPSCACHESTATS
-DESCRIPTOR.message_types_by_name['IncludeDirCacheStats'] = _INCLUDEDIRCACHESTATS
-DESCRIPTOR.message_types_by_name['HttpRPCStats'] = _HTTPRPCSTATS
-DESCRIPTOR.message_types_by_name['GomaErrorStats'] = _GOMAERRORSTATS
-DESCRIPTOR.message_types_by_name['GomaMismatchStats'] = _GOMAMISMATCHSTATS
-DESCRIPTOR.message_types_by_name['DistributionProto'] = _DISTRIBUTIONPROTO
-DESCRIPTOR.message_types_by_name['GomaHistograms'] = _GOMAHISTOGRAMS
-DESCRIPTOR.message_types_by_name['MachineInfo'] = _MACHINEINFO
-DESCRIPTOR.message_types_by_name['GomaStats'] = _GOMASTATS
-
-class RequestStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _REQUESTSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.RequestStats)
-
-class CompilerProxyStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _COMPILERPROXYSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.CompilerProxyStats)
-
-class CompilerInfoStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _COMPILERINFOSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.CompilerInfoStats)
-
-class GomaCompileStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _GOMACOMPILESTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.GomaCompileStats)
-
-class LocalCompileStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _LOCALCOMPILESTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.LocalCompileStats)
-
-class FileStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _FILESTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.FileStats)
-
-class OutputStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _OUTPUTSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.OutputStats)
-
-class MemoryStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _MEMORYSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.MemoryStats)
-
-class TimeStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _TIMESTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.TimeStats)
-
-class IncludeProcessorStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _INCLUDEPROCESSORSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.IncludeProcessorStats)
-
-class IncludeCacheStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _INCLUDECACHESTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.IncludeCacheStats)
-
-class DepsCacheStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _DEPSCACHESTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.DepsCacheStats)
-
-class IncludeDirCacheStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _INCLUDEDIRCACHESTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.IncludeDirCacheStats)
-
-class HttpRPCStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-
-  class HttpStatus(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _HTTPRPCSTATS_HTTPSTATUS
-
-    # @@protoc_insertion_point(class_scope:devtools_goma.HttpRPCStats.HttpStatus)
-  DESCRIPTOR = _HTTPRPCSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.HttpRPCStats)
-
-class GomaErrorStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _GOMAERRORSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.GomaErrorStats)
-
-class GomaMismatchStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _GOMAMISMATCHSTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.GomaMismatchStats)
-
-class DistributionProto(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _DISTRIBUTIONPROTO
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.DistributionProto)
-
-class GomaHistograms(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _GOMAHISTOGRAMS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.GomaHistograms)
-
-class MachineInfo(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _MACHINEINFO
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.MachineInfo)
-
-class GomaStats(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _GOMASTATS
-
-  # @@protoc_insertion_point(class_scope:devtools_goma.GomaStats)
-
-
-_GOMAERRORSTATS.fields_by_name['DEPRECATED_user_warning'].has_options = True
-_GOMAERRORSTATS.fields_by_name['DEPRECATED_user_warning']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\030\001')
-_MACHINEINFO.fields_by_name['DEPRECATED_cpu_capability'].has_options = True
-_MACHINEINFO.fields_by_name['DEPRECATED_cpu_capability']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), '\030\001')
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/protos/log_request_lite_pb2.py b/tools/swarming_client/third_party/infra_libs/event_mon/protos/log_request_lite_pb2.py
deleted file mode 100644
index 88eac66..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/protos/log_request_lite_pb2.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: log_request_lite.proto
-
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='log_request_lite.proto',
-  package='crit_event.proto',
-  serialized_pb='\n\x16log_request_lite.proto\x12\x10\x63rit_event.proto\"\xf0\x01\n\x0eLogRequestLite\x12\x17\n\x0frequest_time_ms\x18\x04 \x01(\x03\x12\x17\n\x0flog_source_name\x18\x06 \x01(\t\x12@\n\tlog_event\x18\x03 \x03(\x0b\x32-.crit_event.proto.LogRequestLite.LogEventLite\x1aj\n\x0cLogEventLite\x12\x15\n\revent_time_ms\x18\x01 \x01(\x03\x12\x12\n\nevent_code\x18\x0b \x01(\x05\x12\x15\n\revent_flow_id\x18\x0c \x01(\x05\x12\x18\n\x10source_extension\x18\x06 \x01(\x0c')
-
-
-
-
-_LOGREQUESTLITE_LOGEVENTLITE = _descriptor.Descriptor(
-  name='LogEventLite',
-  full_name='crit_event.proto.LogRequestLite.LogEventLite',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='event_time_ms', full_name='crit_event.proto.LogRequestLite.LogEventLite.event_time_ms', index=0,
-      number=1, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='event_code', full_name='crit_event.proto.LogRequestLite.LogEventLite.event_code', index=1,
-      number=11, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='event_flow_id', full_name='crit_event.proto.LogRequestLite.LogEventLite.event_flow_id', index=2,
-      number=12, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='source_extension', full_name='crit_event.proto.LogRequestLite.LogEventLite.source_extension', index=3,
-      number=6, type=12, cpp_type=9, label=1,
-      has_default_value=False, default_value="",
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=179,
-  serialized_end=285,
-)
-
-_LOGREQUESTLITE = _descriptor.Descriptor(
-  name='LogRequestLite',
-  full_name='crit_event.proto.LogRequestLite',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='request_time_ms', full_name='crit_event.proto.LogRequestLite.request_time_ms', index=0,
-      number=4, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='log_source_name', full_name='crit_event.proto.LogRequestLite.log_source_name', index=1,
-      number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='log_event', full_name='crit_event.proto.LogRequestLite.log_event', index=2,
-      number=3, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[_LOGREQUESTLITE_LOGEVENTLITE, ],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=45,
-  serialized_end=285,
-)
-
-_LOGREQUESTLITE_LOGEVENTLITE.containing_type = _LOGREQUESTLITE;
-_LOGREQUESTLITE.fields_by_name['log_event'].message_type = _LOGREQUESTLITE_LOGEVENTLITE
-DESCRIPTOR.message_types_by_name['LogRequestLite'] = _LOGREQUESTLITE
-
-class LogRequestLite(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-
-  class LogEventLite(_message.Message):
-    __metaclass__ = _reflection.GeneratedProtocolMessageType
-    DESCRIPTOR = _LOGREQUESTLITE_LOGEVENTLITE
-
-    # @@protoc_insertion_point(class_scope:crit_event.proto.LogRequestLite.LogEventLite)
-  DESCRIPTOR = _LOGREQUESTLITE
-
-  # @@protoc_insertion_point(class_scope:crit_event.proto.LogRequestLite)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/infra_libs/event_mon/router.py b/tools/swarming_client/third_party/infra_libs/event_mon/router.py
deleted file mode 100644
index 437e118..0000000
--- a/tools/swarming_client/third_party/infra_libs/event_mon/router.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-import os
-import random
-import sys
-import time
-
-import httplib2
-
-import infra_libs
-from infra_libs.event_mon.protos.log_request_lite_pb2 import LogRequestLite
-from infra_libs.event_mon.protos.chrome_infra_log_pb2 import ChromeInfraEvent
-
-
-def time_ms():
-  """Return current timestamp in milliseconds."""
-  return int(1000 * time.time())
-
-
-def backoff_time(attempt, retry_backoff=2., max_delay=30.):
-  """Compute randomized exponential backoff time.
-
-  Args:
-    attempt (int): attempt number, starting at zero.
-
-  Keyword Args:
-    retry_backoff(float): backoff time on the first attempt.
-    max_delay(float): maximum returned value.
-  """
-  delay = retry_backoff * (2 ** attempt)
-  # Add +-25% of variation.
-  delay += delay * ((random.random() - 0.5) / 2.)
-  return min(delay, max_delay)
-
-
-class _Router(object):
-  """Route events to the right destination. Base class.
-
-  This object is meant to be a singleton, and is not part of the API.
-  Subclasses must implement _send_to_endpoint().
-
-  Usage:
-  router = _Router()
-  event = ChromeInfraEvent.LogEventLite(...)
-  ... fill in event ...
-  router.push_event(event)
-  """
-  def push_event(self, log_events):
-    """Enqueue event to push to the collection service.
-
-    This method offers no guarantee on return that the event have been pushed
-    externally, as some buffering can take place.
-
-    Args:
-      log_events (LogRequestLite.LogEventLite or list/tuple of): events.
-
-    Returns:
-      success (bool): False if an error happened. True means 'event accepted',
-        but NOT 'event successfully pushed to the remote'.
-    """
-    if isinstance(log_events, LogRequestLite.LogEventLite):
-      log_events = (log_events,)
-
-    if not isinstance(log_events, (list, tuple)):
-      logging.error('Invalid type for "event", should be LogEventLite or '
-                    'list of. Got %s' % str(type(log_events)))
-      return False
-
-    request_p = LogRequestLite()
-    request_p.log_source_name = 'CHROME_INFRA'
-    request_p.log_event.extend(log_events)  # copies the protobuf
-    # Sets the sending time here for safety, _send_to_endpoint should change it
-    # if needed.
-    request_p.request_time_ms = time_ms()
-    return self._send_to_endpoint(request_p)
-
-  def _send_to_endpoint(self, events):
-    """Send a protobuf to wherever it should be sent.
-
-    This method is called by push_event.
-    If some computation is require, make sure to update events.request_time_ms
-    right before sending.
-
-    Args:
-      events(LogRequestLite): protobuf to send.
-
-    Returns:
-      success(bool): whether POSTing/writing succeeded or not.
-    """
-    raise NotImplementedError('Please implement _send_to_endpoint().')
-
-
-class _LocalFileRouter(_Router):
-  def __init__(self, output_file, dry_run=False):
-    """Initialize the router.
-
-    Writes a serialized LogRequestLite protobuf in a local file. File is
-    created/truncated before writing (no append).
-
-    Args:
-      output_file(str): path to file where to write the protobuf.
-
-    Keyword Args:
-      dry_run(bool): if True, the file is not written.
-    """
-    _Router.__init__(self)
-    self.output_file = output_file
-    self._dry_run = dry_run
-
-  def _send_to_endpoint(self, events):
-    try:
-      if not os.path.isdir(os.path.dirname(self.output_file)):
-        logging.error('File cannot be written, parent directory does '
-                      'not exist: %s' % os.path.dirname(self.output_file))
-      if self._dry_run:
-        logging.info('Would have written in %s', self.output_file)
-      else:
-        with open(self.output_file, 'wb') as f:
-          f.write(events.SerializeToString())  # pragma: no branch
-    except Exception:
-      logging.exception('Failed to write in file: %s', self.output_file)
-      return False
-
-    return True
-
-
-class _TextStreamRouter(_Router):
-  def __init__(self, stream=sys.stdout):
-    """Initialize the router.
-
-    Args:
-      stream(File): where to write the output.
-    """
-    _Router.__init__(self)
-    self.stream = stream
-
-  def _send_to_endpoint(self, events):
-    # Prints individual events because it's what we're usually interested in
-    # in that case.
-    infra_events = [str(ChromeInfraEvent.FromString(
-      ev.source_extension)) for ev in events.log_event]
-    try:
-      self.stream.write('%s\n' % '\n'.join(infra_events))
-    except Exception:
-      logging.exception('Unable to write to provided stream')
-      return False
-    return True
-
-
-class _LoggingStreamRouter(_Router):
-  def __init__(self, severity=logging.INFO):
-    """Initialize the router.
-
-    Args:
-      severity: severity of the messages for reporting events
-    """
-    _Router.__init__(self)
-    self.severity = severity
-
-  def _send_to_endpoint(self, events):
-    try:
-      for ev in events.log_event:
-        ev_str = str(ChromeInfraEvent.FromString(ev.source_extension))
-        logging.log(self.severity, 'Sending event_mon event:\n%s' % ev_str)
-    except Exception:
-      logging.exception('Unable to log the events')
-      return False
-    return True
-
-
-class _HttpRouter(_Router):
-  def __init__(self, cache, endpoint, timeout=10, try_num=3, retry_backoff=2.,
-               dry_run=False, _sleep_fn=time.sleep):
-    """Initialize the router.
-
-    Args:
-      cache(dict): This must be config._cache. Passed as a parameter to
-        avoid a circular import.
-      endpoint(str or None): None means 'dry run'. What would be sent is printed
-        on stdout. If endpoint starts with 'https://' data is POSTed there.
-        Otherwise it is interpreted as a file where to write serialized
-        LogEventLite protos.
-      try_num(int): max number of http requests send to the endpoint.
-      retry_backoff(float): time in seconds before retrying posting to the
-         endpoint. Randomized exponential backoff is applied on subsequent
-         retries.
-      dry_run(boolean): if True, no http request is sent. Instead a message is
-         printed.
-      _sleep_fn (function): function to wait specified number of seconds. This
-        argument is provided for testing purposes.
-    """
-    HTTP_IDENTIFIER = 'event_mon'
-    _Router.__init__(self)
-    self.endpoint = endpoint
-    self.try_num = try_num
-    self.retry_backoff = retry_backoff
-    self._cache = cache
-    self._http = infra_libs.InstrumentedHttp(HTTP_IDENTIFIER, timeout=timeout)
-    self._dry_run = dry_run
-    self._sleep_fn = _sleep_fn
-
-    # TODO(pgervais) pass this as parameters instead.
-    if self._cache.get('service_account_creds'):
-      try:
-        logging.debug('Activating OAuth2 authentication.')
-        self._http = infra_libs.get_authenticated_http(
-          self._cache['service_account_creds'],
-          service_accounts_creds_root=
-              self._cache['service_accounts_creds_root'],
-          scope='https://www.googleapis.com/auth/cclog',
-          http_identifier=HTTP_IDENTIFIER,
-          timeout=timeout
-        )
-      except IOError:
-        logging.error('Unable to read credentials, requests will be '
-                      'unauthenticated. File: %s',
-                      self._cache.get('service_account_creds'))
-
-  def _send_to_endpoint(self, events):
-    """Send protobuf to endpoint
-
-    Args:
-      events(LogRequestLite): the protobuf to send.
-
-    Returns:
-      success(bool): whether POSTing/writing succeeded or not.
-    """
-    if not self.endpoint.startswith('https://'):
-      logging.error("Received invalid https endpoint: %s", self.endpoint)
-      return False
-
-    logging.debug('event_mon: POSTing events to %s', self.endpoint)
-
-    attempt = 0  # silencing pylint
-    for attempt in xrange(self.try_num):  # pragma: no branch
-      # (re)set this time at the very last moment
-      events.request_time_ms = time_ms()
-      response = None
-      try:
-        if self._dry_run:
-          logging.info('Http requests disabled. Not sending anything')
-        else:  # pragma: no cover
-          response, _ = self._http.request(
-            uri=self.endpoint,
-            method='POST',
-            headers={'Content-Type': 'application/octet-stream'},
-            body=events.SerializeToString()
-          )
-
-        if self._dry_run or response.status == 200:
-          logging.debug('Succeeded POSTing data after %d attempts', attempt + 1)
-          return True
-
-      except Exception:
-        logging.exception('exception when POSTing data')
-
-      if response:
-        logging.error('failed to POST data to %s Status: %d (attempt %d)',
-                      self.endpoint, response.status, attempt)
-
-      if attempt == 0:
-        logging.error('data: %s', str(events)[:2000])
-
-      self._sleep_fn(backoff_time(attempt, retry_backoff=self.retry_backoff))
-
-    logging.error('failed to POST data after %d attempts, giving up.',
-                  attempt+1)
-    return False
diff --git a/tools/swarming_client/third_party/infra_libs/experiments.py b/tools/swarming_client/third_party/infra_libs/experiments.py
deleted file mode 100644
index 7a85037..0000000
--- a/tools/swarming_client/third_party/infra_libs/experiments.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tools for gradually enabling a feature on a deterministic set of machines.
-
-Add a flag to your program to control the percentage of machines that a new
-feature should be enabled on::
-
-    def add_argparse_options(self, parser):
-      parser.add_argument('--myfeature-percent', type=int, default=0)
-
-    def main(self, opts):
-      if experiments.is_active_for_host('myfeature', opts.myfeature_percent):
-        # do myfeature
-"""
-
-import hashlib
-import logging
-import socket
-import struct
-
-
-def _is_active(labels, percent):
-  h = hashlib.md5()
-  for label, value in sorted(labels.iteritems()):
-    h.update(label)
-    h.update(value)
-
-  # The first 8 bytes of the hash digest as an unsigned integer.
-  hash_num = struct.unpack_from('Q', h.digest())[0]
-
-  return (hash_num % 100) < percent
-
-
-def is_active_for_host(experiment_name, percent):
-  ret = _is_active({
-      'name': experiment_name,
-      'host': socket.getfqdn(),
-  }, percent)
-
-  if ret:
-    logging.info('Experiment "%s" is active', experiment_name)
-
-  return ret
diff --git a/tools/swarming_client/third_party/infra_libs/httplib2_utils.py b/tools/swarming_client/third_party/infra_libs/httplib2_utils.py
deleted file mode 100644
index a37447f..0000000
--- a/tools/swarming_client/third_party/infra_libs/httplib2_utils.py
+++ /dev/null
@@ -1,320 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import copy
-import json
-import logging
-import os
-import re
-import socket
-import sys
-import time
-
-import httplib2
-import oauth2client.client
-
-from googleapiclient import errors
-from infra_libs.ts_mon.common import http_metrics
-
-DEFAULT_SCOPES = ['email']
-
-# default timeout for http requests, in seconds
-DEFAULT_TIMEOUT = 30
-
-# This is part of the API.
-if sys.platform.startswith('win'): # pragma: no cover
-  SERVICE_ACCOUNTS_CREDS_ROOT = 'C:\\creds\\service_accounts'
-else:
-  SERVICE_ACCOUNTS_CREDS_ROOT = '/creds/service_accounts'
-
-
-class AuthError(Exception):
-  pass
-
-
-def load_service_account_credentials(credentials_filename,
-                                     service_accounts_creds_root=None):
-  """Loads and validate a credential JSON file.
-
-  Example of a well-formatted file:
-    {
-      "private_key_id": "4168d274cdc7a1eaef1c59f5b34bdf255",
-      "private_key": ("-----BEGIN PRIVATE KEY-----\nMIIhkiG9w0BAQEFAASCAmEwsd"
-                      "sdfsfFd\ngfxFChctlOdTNm2Wrr919Nx9q+sPV5ibyaQt5Dgn89fKV"
-                      "jftrO3AMDS3sMjaE4Ib\nZwJgy90wwBbMT7/YOzCgf5PZfivUe8KkB"
-                      -----END PRIVATE KEY-----\n",
-      "client_email": "234243-rjstu8hi95iglc8at3@developer.gserviceaccount.com",
-      "client_id": "234243-rjstu8hi95iglc8at3.apps.googleusercontent.com",
-      "type": "service_account"
-    }
-
-  Args:
-    credentials_filename (str): path to a .json file containing credentials
-      for a Cloud platform service account.
-
-  Keyword Args:
-    service_accounts_creds_root (str or None): location where all service
-      account credentials are stored. ``credentials_filename`` is relative
-      to this path. None means 'use default location'.
-
-  Raises:
-    AuthError: if the file content is invalid.
-  """
-  service_accounts_creds_root = (service_accounts_creds_root
-                                 or SERVICE_ACCOUNTS_CREDS_ROOT)
-
-  service_account_file = os.path.join(service_accounts_creds_root,
-                                      credentials_filename)
-  try:
-    with open(service_account_file, 'r') as f:
-      key = json.load(f)
-  except ValueError as e:
-    raise AuthError('Parsing of file as JSON failed (%s): %s',
-                    e, service_account_file)
-
-  if key.get('type') != 'service_account':
-    msg = ('Credentials type must be for a service_account, got %s.'
-           ' Check content of %s' % (key.get('type'), service_account_file))
-    logging.error(msg)
-    raise AuthError(msg)
-
-  if not key.get('client_email'):
-    msg = ('client_email field missing in credentials json file. '
-           ' Check content of %s' % service_account_file)
-    logging.error(msg)
-    raise AuthError(msg)
-
-  if not key.get('private_key'):
-    msg = ('private_key field missing in credentials json. '
-           ' Check content of %s' % service_account_file)
-    logging.error(msg)
-    raise AuthError(msg)
-
-  return key
-
-
-def get_signed_jwt_assertion_credentials(credentials_filename,
-                                         scope=None,
-                                         service_accounts_creds_root=None):
-  """Factory for SignedJwtAssertionCredentials
-
-  Reads and validate the json credential file.
-
-  Args:
-    credentials_filename (str): path to the service account key file.
-      See load_service_account_credentials() docstring for the file format.
-
-  Keyword Args:
-    scope (str|list of str): scope(s) of the credentials being
-      requested. Defaults to https://www.googleapis.com/auth/userinfo.email.
-    service_accounts_creds_root (str or None): location where all service
-      account credentials are stored. ``credentials_filename`` is relative
-      to this path. None means 'use default location'.
-  """
-  scope = scope or DEFAULT_SCOPES
-  if isinstance(scope, basestring):
-    scope = [scope]
-  assert all(isinstance(s, basestring) for s in scope)
-
-  key = load_service_account_credentials(
-    credentials_filename,
-    service_accounts_creds_root=service_accounts_creds_root)
-
-  return oauth2client.client.SignedJwtAssertionCredentials(
-    key['client_email'], key['private_key'], scope)
-
-
-def get_authenticated_http(credentials_filename,
-                           scope=None,
-                           service_accounts_creds_root=None,
-                           http_identifier=None,
-                           timeout=DEFAULT_TIMEOUT):
-  """Creates an httplib2.Http wrapped with a service account authenticator.
-
-  Args:
-    credentials_filename (str): relative path to the file containing
-      credentials in json format. Path is relative to the default
-      location where credentials are stored (platform-dependent).
-
-  Keyword Args:
-    scope (str|list of str): scope(s) of the credentials being
-      requested. Defaults to https://www.googleapis.com/auth/userinfo.email.
-    service_accounts_creds_root (str or None): location where all service
-      account credentials are stored. ``credentials_filename`` is relative
-      to this path. None means 'use default location'.
-    http_identifier (str): if provided, returns an instrumented http request
-      and use this string to identify it to ts_mon.
-    timeout (int): timeout passed to httplib2.Http, in seconds.
-
-  Returns:
-    httplib2.Http authenticated with master's service account.
-  """
-  creds = get_signed_jwt_assertion_credentials(
-    credentials_filename,
-    scope=scope,
-    service_accounts_creds_root=service_accounts_creds_root)
-
-  if http_identifier:
-    http = InstrumentedHttp(http_identifier, timeout=timeout)
-  else:
-    http = httplib2.Http(timeout=timeout)
-  return creds.authorize(http)
-
-class RetriableHttp(object):
-  """A httplib2.Http object that retries on failure."""
-
-  def __init__(self, http, max_tries=5, backoff_time=1,
-               retrying_statuses_fn=None):
-    """
-    Args:
-      http: an httplib2.Http instance
-      max_tries: a number of maximum tries
-      backoff_time: a number of seconds to sleep between retries
-      retrying_statuses_fn: a function that returns True if a given status
-                            should be retried
-    """
-    self._http = http
-    self._max_tries = max_tries
-    self._backoff_time = backoff_time
-    self._retrying_statuses_fn = retrying_statuses_fn or \
-                                 set(range(500,599)).__contains__
-
-  def request(self, uri, method='GET', body=None, *args, **kwargs):
-    for i in range(1, self._max_tries + 1):
-      try:
-        response, content = self._http.request(uri, method, body, *args,
-                                               **kwargs)
-
-        if self._retrying_statuses_fn(response.status):
-          logging.info('RetriableHttp: attempt %d receiving status %d, %s',
-                       i, response.status,
-                       'final attempt' if i == self._max_tries else \
-                       'will retry')
-        else:
-          break
-      except (ValueError, errors.Error,
-              socket.timeout, socket.error, socket.herror, socket.gaierror,
-              httplib2.HttpLib2Error) as error:
-        logging.info('RetriableHttp: attempt %d received exception: %s, %s',
-                     i, error, 'final attempt' if i == self._max_tries else \
-                     'will retry')
-        if i == self._max_tries:
-          raise
-      time.sleep(self._backoff_time)
-
-    return response, content
-
-  def __getattr__(self, name):
-    return getattr(self._http, name)
-
-  def __setattr__(self, name, value):
-    if name in ('request', '_http', '_max_tries', '_backoff_time',
-                '_retrying_statuses_fn'):
-      self.__dict__[name] = value
-    else:
-      setattr(self._http, name, value)
-
-class InstrumentedHttp(httplib2.Http):
-  """A httplib2.Http object that reports ts_mon metrics about its requests."""
-
-  def __init__(self, name, time_fn=time.time, timeout=DEFAULT_TIMEOUT,
-               **kwargs):
-    """
-    Args:
-      name: An identifier for the HTTP requests made by this object.
-      time_fn: Function returning the current time in seconds. Use for testing
-        purposes only.
-    """
-
-    super(InstrumentedHttp, self).__init__(timeout=timeout, **kwargs)
-    self.fields = {'name': name, 'client': 'httplib2'}
-    self.time_fn = time_fn
-
-  def _update_metrics(self, status, start_time):
-    status_fields = {'status': status}
-    status_fields.update(self.fields)
-    http_metrics.response_status.increment(fields=status_fields)
-
-    duration_msec = (self.time_fn() - start_time) * 1000
-    http_metrics.durations.add(duration_msec, fields=self.fields)
-
-  def request(self, uri, method="GET", body=None, *args, **kwargs):
-    request_bytes = 0
-    if body is not None:
-      request_bytes = len(body)
-    http_metrics.request_bytes.add(request_bytes, fields=self.fields)
-
-    start_time = self.time_fn()
-    try:
-      response, content = super(InstrumentedHttp, self).request(
-          uri, method, body, *args, **kwargs)
-    except socket.timeout:
-      self._update_metrics(http_metrics.STATUS_TIMEOUT, start_time)
-      raise
-    except (socket.error, socket.herror, socket.gaierror):
-      self._update_metrics(http_metrics.STATUS_ERROR, start_time)
-      raise
-    except httplib2.HttpLib2Error:
-      self._update_metrics(http_metrics.STATUS_EXCEPTION, start_time)
-      raise
-    http_metrics.response_bytes.add(len(content), fields=self.fields)
-
-    self._update_metrics(response.status, start_time)
-
-    return response, content
-
-
-class HttpMock(object):
-  """Mock of httplib2.Http"""
-  HttpCall = collections.namedtuple('HttpCall', ('uri', 'method', 'body',
-                                                 'headers'))
-
-  def __init__(self, uris):
-    """
-    Args:
-      uris(dict): list of  (uri, headers, body). `uri` is a regexp for
-        matching the requested uri, (headers, body) gives the values returned
-        by the mock. Uris are tested in the order from `uris`.
-        `headers` is a dict mapping headers to value. The 'status' key is
-        mandatory. `body` is a string.
-        Ex: [('.*', {'status': 200}, 'nicely done.')]
-    """
-    self._uris = []
-    self.requests_made = []
-
-    for value in uris:
-      if not isinstance(value, (list, tuple)) or len(value) != 3:
-        raise ValueError("'uris' must be a sequence of (uri, headers, body)")
-      uri, headers, body = value
-      compiled_uri = re.compile(uri)
-      if not isinstance(headers, dict):
-        raise TypeError("'headers' must be a dict")
-      if not 'status' in headers:
-        raise ValueError("'headers' must have 'status' as a key")
-
-      new_headers = copy.copy(headers)
-      new_headers['status'] = int(new_headers['status'])
-
-      if not isinstance(body, basestring):
-        raise TypeError("'body' must be a string, got %s" % type(body))
-      self._uris.append((compiled_uri, new_headers, body))
-
-  # pylint: disable=unused-argument
-  def request(self, uri,
-              method='GET',
-              body=None,
-              headers=None,
-              redirections=1,
-              connection_type=None):
-    self.requests_made.append(self.HttpCall(uri, method, body, headers))
-    headers = None
-    body = None
-    for candidate in self._uris:
-      if candidate[0].match(uri):
-        _, headers, body = candidate
-        break
-    if not headers:
-      raise AssertionError("Unexpected request to %s" % uri)
-    return httplib2.Response(headers), body
diff --git a/tools/swarming_client/third_party/infra_libs/infra_types/__init__.py b/tools/swarming_client/third_party/infra_libs/infra_types/__init__.py
deleted file mode 100644
index 00835ac..0000000
--- a/tools/swarming_client/third_party/infra_libs/infra_types/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from infra_libs.infra_types.infra_types import freeze
-from infra_libs.infra_types.infra_types import thaw
-from infra_libs.infra_types.infra_types import FrozenDict
diff --git a/tools/swarming_client/third_party/infra_libs/infra_types/infra_types.py b/tools/swarming_client/third_party/infra_libs/infra_types/infra_types.py
deleted file mode 100644
index faa20e0..0000000
--- a/tools/swarming_client/third_party/infra_libs/infra_types/infra_types.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import operator
-
-
-def freeze(obj):
-  """Takes a generic object ``obj``, and returns an immutable version of it.
-
-  Supported types:
-    * dict / OrderedDict -> FrozenDict
-    * list -> tuple
-    * set -> frozenset
-    * any object with a working __hash__ implementation (assumes that hashable
-      means immutable)
-
-  Will raise TypeError if you pass an object which is not hashable.
-  """
-  if isinstance(obj, dict):
-    return FrozenDict((freeze(k), freeze(v)) for k, v in obj.iteritems())
-  elif isinstance(obj, (list, tuple)):
-    return tuple(freeze(i) for i in obj)
-  elif isinstance(obj, set):
-    return frozenset(freeze(i) for i in obj)
-  else:
-    hash(obj)
-    return obj
-
-
-def thaw(obj):
-  """Takes an object from freeze() and returns a mutable copy of it."""
-  if isinstance(obj, FrozenDict):
-    return collections.OrderedDict(
-        (thaw(k), thaw(v)) for k, v in obj.iteritems())
-  elif isinstance(obj, tuple):
-    return list(thaw(i) for i in obj)
-  elif isinstance(obj, frozenset):
-    return set(thaw(i) for i in obj)
-  else:
-    return obj
-
-
-class FrozenDict(collections.Mapping):
-  """An immutable OrderedDict.
-
-  Modified From: http://stackoverflow.com/a/2704866
-  """
-  def __init__(self, *args, **kwargs):
-    self._d = collections.OrderedDict(*args, **kwargs)
-
-    # Calculate the hash immediately so that we know all the items are
-    # hashable too.
-    self._hash = reduce(operator.xor,
-                        (hash(i) for i in enumerate(self._d.iteritems())), 0)
-
-  def __eq__(self, other):
-    if not isinstance(other, collections.Mapping):
-      return NotImplemented
-    if self is other:
-      return True
-    if len(self) != len(other):
-      return False
-    for k, v in self.iteritems():
-      if k not in other or other[k] != v:
-        return False
-    return True
-
-  def __iter__(self):
-    return iter(self._d)
-
-  def __len__(self):
-    return len(self._d)
-
-  def __getitem__(self, key):
-    return self._d[key]
-
-  def __hash__(self):
-    return self._hash
-
-  def __repr__(self):
-    return 'FrozenDict(%r)' % (self._d.items(),)
diff --git a/tools/swarming_client/third_party/infra_libs/instrumented_requests.py b/tools/swarming_client/third_party/infra_libs/instrumented_requests.py
deleted file mode 100644
index c04c203..0000000
--- a/tools/swarming_client/third_party/infra_libs/instrumented_requests.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Report ts_mon metrics for HTTP requests made by the `requests` library.
-
-This module provides get(), post(), etc. methods that wrap the corresponding
-methods in the requests module.  They take an additional first 'name' argument
-which is an identifier for the type of request being made.
-
-Example::
-
-  from infra_libs import instrumented_requests
-  r = instrumented_requests.get('myapi', 'https://example.com/api')
-
-Alternatively you can add the hook manually::
-
-  import requests
-  from infra_libs import instrumented_requests
-  r = requests.get(
-      'https://example.com/api',
-      hooks={'response': instrumented_requests.instrumentation_hook('myapi')})
-"""
-
-import functools
-
-import requests
-
-from infra_libs.ts_mon.common import http_metrics
-
-
-def instrumentation_hook(name):
-  """Returns a hook function that records ts_mon metrics about the request.
-
-  Usage::
-
-    r = requests.get(
-        'https://example.com/api',
-        hooks={'response': instrumented_requests.instrumentation_hook('myapi')})
-
-  Args:
-    name: An identifier for the HTTP requests made by this object.
-  """
-
-  def _content_length(headers):
-    if headers is None or 'content-length' not in headers:
-      return 0
-    return int(headers['content-length'])
-
-  def hook(response, *_args, **_kwargs):
-    request_bytes = _content_length(response.request.headers)
-    response_bytes = _content_length(response.headers)
-    duration_msec = response.elapsed.total_seconds() * 1000
-
-    fields = {'name': name, 'client': 'requests'}
-    http_metrics.request_bytes.add(request_bytes, fields=fields)
-    http_metrics.response_bytes.add(response_bytes, fields=fields)
-    http_metrics.durations.add(duration_msec, fields=fields)
-
-    _update_status(name, response.status_code)
-
-  return hook
-
-
-def _update_status(name, status):
-  fields = {'status': status, 'name': name, 'client': 'requests'}
-  http_metrics.response_status.increment(fields=fields)
-
-
-def _wrap(method, name, url, *args, **kwargs):
-  hooks = {'response': instrumentation_hook(name)}
-  if 'hooks' in kwargs:
-    hooks.update(kwargs['hooks'])
-  kwargs['hooks'] = hooks
-
-  try:
-    return getattr(requests, method)(url, *args, **kwargs)
-  except requests.exceptions.ReadTimeout:
-    _update_status(name, http_metrics.STATUS_TIMEOUT)
-    raise
-  except requests.exceptions.ConnectionError:
-    _update_status(name, http_metrics.STATUS_ERROR)
-    raise
-  except requests.exceptions.RequestException:
-    _update_status(name, http_metrics.STATUS_EXCEPTION)
-    raise
-
-
-request = functools.partial(_wrap, 'request')
-get = functools.partial(_wrap, 'get')
-head = functools.partial(_wrap, 'head')
-post = functools.partial(_wrap, 'post')
-patch = functools.partial(_wrap, 'patch')
-put = functools.partial(_wrap, 'put')
-delete = functools.partial(_wrap, 'delete')
-options = functools.partial(_wrap, 'options')
diff --git a/tools/swarming_client/third_party/infra_libs/logs/README.md b/tools/swarming_client/third_party/infra_libs/logs/README.md
deleted file mode 100644
index 6cc211c..0000000
--- a/tools/swarming_client/third_party/infra_libs/logs/README.md
+++ /dev/null
@@ -1,59 +0,0 @@
-# Logging in infra.git
-
-## Features
-
-The `infra_libs.logs` package contains some code to simplify logging and
-make it consistent and easily configurable. Using it makes your code
-future-proof.
-
-Offered features:
-
-* log level can be changed from the command-line.
-* too verbose modules can be blacklisted for easier debugging.
-* ensures a consistent log format.
-
-A typical log line looks like:
-
-    [I2014-06-27T11:42:32.418716-07:00 7082 logs:71] this is the message
-
-The first letter gives the severity of the message, followed by a
-timestamp with timezone information (iso8601), the process id, the
-current module name, and the thread id. After the closing square bracket
-comes the actual message.
-
-## Sample code
-
-This is the standard way to set up logging so as to take advantage of
-the goodness provided by `infra_libs.logs`.
-
-In top-level files (other example in
-[infra.services.sysmon.__main__](../../infra/services/sysmon/__main__.py)):
-
-```python
-import argparse
-import infra_libs.logs
-
-parser = argparse.ArgumentParser()
-infra_libs.logs.add_argparse_options(parser)
-
-options = parser.parse_args()
-infra_libs.logs.process_argparse_options(options)
-```
-
-Logging messages should be done this way (other example in
-`infra.libs.service_utils.outer_loop`):
-
-```python
-import logging
-LOGGER = logging.getLogger(__name__)
-
-LOGGER.info('great message')
-LOGGER.error('terrible error')
-```
-
-Using `logging.getLogger` is a good practice in general (not restricted to
-using infra_libs.logs) because it allows for module blacklisting and
-other goodness. It should be done at import time. See also the official
-[logging HOWTO](https://docs.python.org/2/howto/logging.html).
-`infra_libs.logs` also formats the output of the root logger, but using
-this logger is not recommended.
diff --git a/tools/swarming_client/third_party/infra_libs/logs/__init__.py b/tools/swarming_client/third_party/infra_libs/logs/__init__.py
deleted file mode 100644
index d220875..0000000
--- a/tools/swarming_client/third_party/infra_libs/logs/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from infra_libs.logs.logs import add_argparse_options
-from infra_libs.logs.logs import add_handler
-from infra_libs.logs.logs import process_argparse_options
diff --git a/tools/swarming_client/third_party/infra_libs/logs/logs.py b/tools/swarming_client/third_party/infra_libs/logs/logs.py
deleted file mode 100644
index 2a32786..0000000
--- a/tools/swarming_client/third_party/infra_libs/logs/logs.py
+++ /dev/null
@@ -1,295 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utilities for logging.
-
-Example usage:
-
-.. code-block:: python
-
-    import argparse
-    import logging
-    import infra_libs.logs
-
-    parser = argparse.ArgumentParser()
-    infra_libs.logs.add_argparse_options(parser)
-
-    options = parser.parse_args()
-    infra_libs.logs.process_argparse_options(options)
-
-    LOGGER = logging.getLogger(__name__)
-    LOGGER.info('test message')
-
-The last line should print something like::
-
-  [I2014-06-27T11:42:32.418716-07:00 7082 logs:71] test message
-
-"""
-
-import datetime
-import getpass
-import inspect
-import logging
-import logging.handlers
-import os
-import re
-import socket
-import sys
-import tempfile
-import textwrap
-
-import pytz
-
-from infra_libs.ts_mon.common.metrics import CumulativeMetric
-
-log_metric = CumulativeMetric(
-  'proc/log_lines', description="Number of log lines, per severity level.")
-
-if sys.platform.startswith('win'):  # pragma: no cover
-  DEFAULT_LOG_DIRECTORIES = os.pathsep.join([
-      'E:\\chrome-infra-logs',
-      'C:\\chrome-infra-logs',
-  ])
-else:
-  DEFAULT_LOG_DIRECTORIES = '/var/log/chrome-infra'
-
-
-class InfraFilter(logging.Filter):  # pragma: no cover
-  """Adds fields used by the infra-specific formatter.
-
-  Fields added:
-
-  - 'iso8601': timestamp
-  - 'severity': one-letter indicator of log level (first letter of levelname).
-  - 'fullModuleName': name of the module including the package name.
-
-  Args:
-    timezone (str): timezone in which timestamps should be printed.
-    module_name_blacklist (str): do not print log lines from modules whose name
-      matches this regular expression.
-  """
-  def __init__(self, timezone, module_name_blacklist=None):
-    super(InfraFilter, self).__init__()
-    self.module_name_blacklist = None
-
-    if module_name_blacklist:
-      self.module_name_blacklist = re.compile(module_name_blacklist)
-
-    self.tz = pytz.timezone(timezone)
-
-  def filter(self, record):
-    dt = datetime.datetime.fromtimestamp(record.created, tz=pytz.utc)
-    record.iso8601 = self.tz.normalize(dt).isoformat()
-    record.severity = record.levelname[0]
-    log_metric.increment(fields={'level': record.severity})
-    record.fullModuleName = self._full_module_name() or record.module
-    if self.module_name_blacklist:
-      if self.module_name_blacklist.search(record.fullModuleName):
-        return False
-    return True
-
-  def _full_module_name(self):
-    frame = inspect.currentframe()
-    try:
-      while frame is not None:
-        try:
-          name = frame.f_globals['__name__']
-        except KeyError:
-          continue
-
-        if name not in (__name__, 'logging'):
-          return name
-        frame = frame.f_back
-    finally:
-      del frame
-
-    return None
-
-
-class InfraFormatter(logging.Formatter):  # pragma: no cover
-  """Formats log messages in a standard way.
-
-  This object processes fields added by :class:`InfraFilter`.
-  """
-  def __init__(self):
-    super(InfraFormatter, self).__init__(
-        '[%(severity)s%(iso8601)s %(process)d %(thread)d '
-        '%(fullModuleName)s:%(lineno)s] %(message)s')
-
-
-def add_handler(logger, handler=None, timezone='UTC',
-                level=logging.WARNING,
-                module_name_blacklist=None):  # pragma: no cover
-  """Configures and adds a handler to a logger the standard way for infra.
-
-  Args:
-    logger (logging.Logger): logger object obtained from `logging.getLogger`.
-
-  Keyword Args:
-    handler (logging.Handler): handler to add to the logger. defaults to
-       logging.StreamHandler.
-    timezone (str): timezone to use for timestamps.
-    level (int): logging level. Could be one of DEBUG, INFO, WARNING, CRITICAL
-    module_name_blacklist (str): do not print log lines from modules whose name
-      matches this regular expression.
-
-  Example usage::
-
-    import logging
-    import infra_libs.logs
-    logger = logging.getLogger('foo')
-    infra_libs.logs.add_handler(logger, timezone='US/Pacific')
-    logger.info('test message')
-
-  The last line should print something like::
-
-    [I2014-06-27T11:42:32.418716-07:00 7082 logs:71] test message
-
-  """
-  handler = handler or logging.StreamHandler()
-  handler.addFilter(InfraFilter(timezone,
-                                module_name_blacklist=module_name_blacklist))
-  handler.setFormatter(InfraFormatter())
-  handler.setLevel(level=level)
-  logger.addHandler(handler)
-
-  # Formatters only get messages that pass this filter: let everything through.
-  logger.setLevel(level=logging.DEBUG)
-
-
-def default_program_name():
-  # Use argv[0] as the program name, except when it's '__main__.py' which is the
-  # case when we were invoked by run.py.  In this case look at the main module's
-  # __package__ variable which is set by runpy.
-  ret = os.path.basename(sys.argv[0])
-  if ret == '__main__.py':
-    package = sys.modules['__main__'].__package__
-    if package is not None:
-      return package.split('.')[-1]
-  return ret
-
-
-def add_argparse_options(parser,
-                         default_level=logging.WARNING):  # pragma: no cover
-  """Adds logging related options to an argparse.ArgumentParser.
-
-  See also: :func:`process_argparse_options`
-  """
-
-  parser = parser.add_argument_group('Logging Options')
-  g = parser.add_mutually_exclusive_group()
-  g.set_defaults(log_level=default_level)
-  g.add_argument('--logs-quiet', '--quiet',
-                 action='store_const', const=logging.ERROR,
-                 dest='log_level', help='Make the output quieter (ERROR).')
-  g.add_argument('--logs-warning', '--warning',
-                 action='store_const', const=logging.WARNING,
-                 dest='log_level',
-                 help='Set the output to an average verbosity (WARNING).')
-  g.add_argument('--logs-verbose', '--verbose',
-                 action='store_const', const=logging.INFO,
-                 dest='log_level', help='Make the output louder (INFO).')
-  g.add_argument('--logs-debug', '--debug',
-                 action='store_const', const=logging.DEBUG,
-                 dest='log_level', help='Make the output really loud (DEBUG).')
-  parser.add_argument('--logs-black-list', metavar='REGEX',
-                      help='hide log lines emitted by modules whose name '
-                           'matches\nthis regular expression.')
-  parser.add_argument(
-      '--logs-directory',
-      default=DEFAULT_LOG_DIRECTORIES,
-      help=textwrap.fill(
-          'directory into which to write logs (default: %%(default)s). If '
-          'this directory does not exist or is not writable, the temporary '
-          'directory (%s) will be used instead. If this is explicitly set '
-          'to the empty string, logs will not be written at all. May be set '
-          'to multiple directories separated by the "%s" character, in '
-          'which case the first one that exists and is writable is used.' % (
-                tempfile.gettempdir(), os.pathsep), width=56))
-  parser.add_argument(
-      '--logs-program-name',
-      default=default_program_name(),
-      help='the program name used to name the log files created in '
-           '--logs-directory (default: %(default)s).')
-  parser.add_argument(
-      '--logs-debug-file',
-      action='store_true',
-      help='by default only INFO, WARNING and ERROR log files are written to '
-           'disk.  This flag causes a DEBUG log to be written as well.')
-
-
-def process_argparse_options(options, logger=None):  # pragma: no cover
-  """Handles logging argparse options added in 'add_argparse_options'.
-
-  Configures 'logging' module.
-
-  Args:
-    options: return value of argparse.ArgumentParser.parse_args.
-    logger (logging.Logger): logger to apply the configuration to.
-
-  Example usage::
-
-    import argparse
-    import sys
-    import infra_libs.logs
-
-    parser = argparse.ArgumentParser()
-    infra_libs.logs.add_argparse_options(parser)
-
-    options = parser.parse_args(sys.path[1:])
-    infra_libs.logs.process_argparse_options(options)
-  """
-
-  if logger is None:
-    logger = logging.root
-
-  add_handler(logger, level=options.log_level,
-              module_name_blacklist=options.logs_black_list)
-
-  if options.logs_directory:
-    _add_file_handlers(options, logger)
-
-
-def _add_file_handlers(options, logger):  # pragma: no cover
-  # Test whether we can write to the log directory.  If not, write to a
-  # temporary directory instead.  One of the DEFAULT_LOG_DIRECTORIES are created
-  # on the real production machines by puppet, so /tmp should only be used when
-  # running locally on developers' workstations.
-  logs_directory = tempfile.gettempdir()
-  for directory in options.logs_directory.split(os.pathsep):
-    if not os.path.isdir(directory):
-      continue
-
-    try:
-      with tempfile.TemporaryFile(dir=directory):
-        pass
-    except OSError:
-      pass
-    else:
-      logs_directory = directory
-      break
-
-  # Log files are named with this pattern:
-  # <program>.<hostname>.<username>.log.<level>.YYYYMMDD-HHMMSS.<pid>
-  pattern = "%s.%s.%s.log.%%s.%s.%d" % (
-      options.logs_program_name,
-      socket.getfqdn().split('.')[0],
-      getpass.getuser(),
-      datetime.datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
-      os.getpid())
-
-  file_levels = [logging.INFO, logging.WARNING, logging.ERROR]
-  if options.logs_debug_file:
-    file_levels.append(logging.DEBUG)
-
-  for level in file_levels:
-    add_handler(
-        logger,
-        handler=logging.handlers.RotatingFileHandler(
-            filename=os.path.join(
-                logs_directory, pattern % logging.getLevelName(level)),
-            maxBytes=10 * 1024 * 1024,
-            backupCount=10,
-            delay=True),
-        level=level)
diff --git a/tools/swarming_client/third_party/infra_libs/memoize.py b/tools/swarming_client/third_party/infra_libs/memoize.py
deleted file mode 100644
index 3eed568..0000000
--- a/tools/swarming_client/third_party/infra_libs/memoize.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Collection of decorators to help optimize Python functions and classes
-by caching return values.
-
-Return values are (by default) keyed off of the function's parameters.
-Consequently, any parameters that are used in memoization must be hashable.
-
-This library offers two styles of memoization:
-1) Absolute memoization (memo) uses the full set of parameters against a
-   per-function memo dictionary. If this is used on an instance method, the
-   'self' parameter is included in the key.
-2) Instance memoization (memo_i) uses a per-instance memoization dictionary.
-   The dictionary is stored as a member ('_memo__dict') of of the instance.
-   Consequently, the 'self' parameter is no longer needed/used in the
-   memoization key, removing the need to have the instance itself support being
-   hashed.
-
-Memoized function state can be cleared by calling the memoized function's
-'memo_clear' method.
-"""
-
-import inspect
-
-
-# Instance variable added to class instances that use memoization to
-# differentiate their memoization values.
-MEMO_INSTANCE_VARIABLE = '_memo__dict'
-
-
-class MemoizedFunction(object):
-  """Handles the memoization state of a given memoized function."""
-
-  # Memoization constant to represent 'un-memoized' (b/c 'None' is a valid
-  # result)
-  class _EMPTY(object):
-    pass
-  EMPTY = _EMPTY()
-
-  def __init__(self, func, ignore=None, memo_dict=None):
-    """
-    Args:
-      func: (function) The function to memoize
-      ignore: (container) The names of 'func' parameters to ignore when
-          generating its memo key. Only parameters that have no effect on the
-          output of the function should be included.
-    """
-    self.func = func
-    self.ignore = frozenset(ignore or ())
-    self.memo_dict = memo_dict
-    self.im_self = None
-    self.im_class = None
-
-  def __repr__(self):
-    properties = [str(self.func)]
-    if self.im_self is not None:
-      properties.append('bound=%s' % (self.im_self,))
-    if len(self.ignore) > 0:
-      properties.append('ignore=%s' % (','.join(sorted(self.ignore))))
-    return '%s(%s)' % (type(self).__name__, ', '.join(properties))
-
-  def __get__(self, obj, klass=None):
-    # Make this callable class a bindable Descriptor
-    if klass is None:
-      klass = type(obj)
-    self.im_self = obj
-    self.im_class = klass
-    return self
-
-  def _get_call_args(self, args):
-    """Returns the call arguments, factoring in 'self' if this method is bound.
-    """
-    if self.im_self is not None:
-      return (self.im_self,) + args
-    return args
-
-  def _get_memo_dict(self):
-    """Returns: (dict) the memoization dictionary to store return values in."""
-    memo_dict = None
-    if self.im_self is not None:
-      # Is the instance dictionary defined?
-      memo_dict = getattr(self.im_self, MEMO_INSTANCE_VARIABLE, None)
-      if memo_dict is None:
-        memo_dict = {}
-        setattr(self.im_self, MEMO_INSTANCE_VARIABLE, memo_dict)
-      return memo_dict
-
-    # No instance dict; use our local 'memo_dict'.
-    if self.memo_dict is None:
-      self.memo_dict = {}
-    return self.memo_dict
-
-  def _key(self, args, kwargs):
-    """Returns: the memoization key for a set of function arguments.
-
-    This 'ignored' parameters are removed prior to generating the key.
-    """
-    if self.im_self is not None:
-      # We are bound to an instance; use None for args[0] ("self").
-      args = (None,) + tuple(args)
-
-    call_params = inspect.getcallargs(self.func, *args, **kwargs)
-    return tuple((k, v)
-                 for k, v in sorted(call_params.iteritems())
-                 if k not in self.ignore)
-
-  def __call__(self, *args, **kwargs):
-    """Retrieves the memoized function result.
-
-    If the memoized function has not been memoized, it will be invoked;
-    otherwise, the memoized value will be returned.
-
-    Args:
-      memo_key: The generated memoization key for this invocation.
-      args, kwargs: Function parameters (only used if not memoized yet)
-    Returns:
-      The memoized function's return value.
-    """
-
-    memo_dict = self._get_memo_dict()
-    memo_key = self._key(args, kwargs)
-    result = memo_dict.get(memo_key, self.EMPTY)
-    if result is self.EMPTY:
-      args = self._get_call_args(args)
-      result = memo_dict[memo_key] = self.func(*args, **kwargs)
-    return result
-
-  def memo_clear(self, *args, **kwargs):
-    """Clears memoization results for a given set of arguments.
-
-    If no arguments are supplied, this will clear all retained memoization for
-    this function.
-
-    If no memoized result is stored for the supplied parameters, this function
-    is a no-op.
-
-    Args:
-      args, kwargs: Memoization function parameters whose memoized value should
-          be cleared.
-    """
-    memo_dict = self._get_memo_dict()
-
-    if args or kwargs:
-      memo_key = self._key(args, kwargs)
-      memo_dict.pop(memo_key, None)
-    else:
-      memo_dict.clear()
-
-
-
-def memo(ignore=None, memo_dict=None):
-  """Generic function memoization decorator.
-
-  This memoizes a specific function using a function key.
-
-  The following example memoizes the function absolutely. It will be executed
-  once and, after that, will only returned cached results.
-
-  @memo.memo(ignore=('print_debug_output',))
-  def my_method(print_debug_output=False):
-    # Perform complex calculation
-    if print_debug_output:
-      print 'The calculated result is: %r' % (result)
-    return result
-
-  The following example memoizes for unique values of 'param1' and 'param2',
-  but not 'print_debug_output' since it doesn't affect the function's result:
-
-  @memo.memo()
-  def my_method(param1, param2):
-    # Perform complex calculation using 'param1' and 'param2'
-    if print_debug_output:
-      print 'The calculated result for (%r, %r) is: %r' %
-            (param1, param2, result)
-    return result
-
-  Args:
-    ignore: (list) The names of parameters to ignore when memoizing.
-  """
-  def wrap(func):
-    return MemoizedFunction(
-        func,
-        ignore=ignore,
-        memo_dict=memo_dict,
-    )
-  return wrap
diff --git a/tools/swarming_client/third_party/infra_libs/time_functions/README.md b/tools/swarming_client/third_party/infra_libs/time_functions/README.md
deleted file mode 100644
index 4942e0c..0000000
--- a/tools/swarming_client/third_party/infra_libs/time_functions/README.md
+++ /dev/null
@@ -1,341 +0,0 @@
-# A README ON TIME
-
-[TOC]
-
-## ABOUT THIS DOCUMENT
-
-This is a document intended to persuade you, in the course of your computer
-doings, to use a particular representation of time called *stiptime*. Why a
-particular representation of time, and why so staunch about it? The main reason
-is that time is not handled well by libraries in any programming language, and
-misuse leads to subtle bugs. Like memory allocators, representations of time are
-not something application developers give much thought to. Unlike memory
-allocators, you cannot expect your time libraries to "just work." This leads to
-a wide class of bugs that are unintuitive, difficult to anticipate, and
-difficult to test for. stiptime has been designed to avoid many of these
-pitfalls. For a large number of time-related tasks, stiptime just works.
-
-If reading this document causes your head to spin, it has achieved its goal. Use
-stiptime and go on with your day.
-
-## STIPTIME VS BIZARRO TIME
-
-### stiptime
-
-stiptime is a contemporary terrestrial time format meant to reduce the number of
-time-related bugs in computer programs. It makes certain compromises to be
-easier to implement on most operating systems and programming languages circa
-2015-07-10T22:54:32.0Z.
-
-- stiptime is for absolute times: stiptime is meant to represent the absolute
-  (instead of relative) time an event happened. It is not intended for durations
-  or for local representations of time (it will not tell you where the sun is in
-  the sky).
-- stiptime is terrestrial: it is not suitable for astronomical calculations or
-  events happening on Mars. It does not account for [relativistic time
-  dilation](https://en.wikipedia.org/wiki/Barycentric_Dynamical_Time) while
-  traveling through the solar system. It is not suitable for comparing clocks
-  across astronomically vast distances.
-- stiptime is contemporary: it is not particularly useful for describing dates
-  in antiquity, such as anything before the invention of Greenwich Mean Time.
-- stiptime is based on UTC, and uses UTC's concept of leap seconds. Many OSes
-  and date libraries [handle leap
-  seconds](https://tools.ietf.org/html/rfc7164#page-3) in a way that make
-  duration computations inaccurate across them. Unfortunately, continuous
-  timescales like [TAI](https://en.wikipedia.org/wiki/International_Atomic_Time)
-  are not easily available on modern OSes. In light of the difficulty of
-  obtaining TAI, stiptime compromises by being based on UTC and urges caution
-  when making duration computations.
-
-#### definition
-
-stiptime's format is UTC represented as follows:
-
-	YYYY-MM-DDThh:mm:ssZ
-
-where
-
-	YYYY: four digit year
-	MM: zero padded month
-	DD: zero padded day
-	hh: zero padded 24hr hour
-	mm: zero padded minute
-	ss: zero padded second, with a required fractional part
-
-You may notice that this is exactly the [ISO 8601 date
-format](http://www.w3.org/TR/NOTE-datetime) with Z used to represent UTC. That's
-because it is! Z is the [nautical
-timezone](https://en.wikipedia.org/wiki/Nautical_time) for UTC. Since 'Zulu' is
-the [NATO phonetic](https://en.wikipedia.org/wiki/NATO_phonetic_alphabet)
-representation of Z, UTC (and by extension stiptime) can also be referred to as
-Zulu time. Z is used instead of +00:00 as it unambiguously signals UTC and not
-"put in any arbitrary timezone here." It is also short and compact.
-
-#### examples of stiptime
-
-- 2015-06-30T18:50:50.0Z    *typical representation*
-- 2015-06-30T18:50:50.123Z    *fractional seconds*
-- 2015-06-30T23:59:60.0Z    *leap second*
-- 2015-06-30T23:59:60.123Z    *fractional leap second*
-
-#### lesser stiptime
-
-Lesser stiptime is to be used only when necessary. Lesser stiptime is Unix time
-or POSIX time, "fractional seconds since the epoch, defined as
-1970-01-01T00:00:00Z. Seconds are corrected such that days are exactly 86400
-seconds long." The reason stiptime is preferred over lesser stiptime is due to
-that last correction. Since UTC occasionally contains days longer than 86400
-seconds, lesser stiptime cannot encode positive leap seconds unambiguously. The
-consequences of this will be described in a later section. stiptime is preferred
-over lesser stiptime, but lesser stiptime is definitely preferred over bizarro
-time.
-
-### bizarro time
-
-Bizarro time is any time format that is used for absolute time that isn't
-stiptime. Some of these may seem obvious and good, but a later section will show
-their pitfalls.
-
-#### examples of bizarro time
-
-- Tomorrow, 3pm
-- Tuesday 14, July 2015 4:38PM PST
-- 2015-06-30T06:50:50.0PMZ    *not 24hr time*
-- 2015/06/30 18:50:50.0Z    *incorrect separators*
-- 2015-06-30T18:50:50.0    *no Z at the end, unknown timezone*
-- 2015-06-30T18:50:50.0 PST    *PST instead of Z*
-- 2015-06-30T18:50:50.0
-  [America/Los_Angeles](https://en.wikipedia.org/wiki/America/Los_Angeles)
-- 2015-06-30T18:50:50.0+00:00    *using +00:00 instead of Z for UTC*
-- 2015-06-30T18:50:50.0-07:00    *not using UTC*
-- 2015-06-30T18:50:50Z    *no fractional seconds*
-
-## STUCK IN TIME JAIL (THE PITFALLS OF BIZARRO TIME)
-
-Using bizarro time will eventually lead to "fun" and subtle bugs in your
-programs. The inevitability of dealing with all of the contingencies of bizarro
-time (and the libraries that handle it) leads to a profound frustration and
-ennui — this is when you are stuck in *time jail*. The entrances to time jail
-are many, but can be broadly classified into implementation-induced time jail
-and timezone-induced time jail.
-
-### implementation-induced time jail
-
-Writing proper time-handling libraries is hard, which means that most time
-libraries have quirks, unexpected behavior or outright bugs. Some of these even
-occur at the operating system level. This section mostly describes the
-[datetime](https://docs.python.org/2/library/datetime.html) module included in
-the python standard library, but other quirks are documented as well.
-
-#### python 2.7 datetime
-
-- python 2.7 datetime lets you print a date that itself cannot parse
-
-
-	>>> import datetime
-	>>> from dateutil import tz
-	>>> offset = tz.tzoffset(None, -7*60*60)
-	>>> dt = datetime.datetime(2015, 1, 1, 0, 0, 0, tzinfo=offset)
-	>>> a = dt.strftime('%Y-%m-%dT%H:%M:%S %z')
-	>>> a
-	'2015-01-01T00:00:00 -0700'
-	>>> datetime.datetime.strptime(a, '%Y-%m-%dT%H:%M:%S %z')
-	ValueError: 'z' is a bad directive in format '%Y-%m-%dT%H:%M:%S %z'
-
-
-- it lets you print a date that it can parse most of the time, except that one
-  time when you have zero microseconds and then it can't
-
-  See https://bugs.python.org/issue19475 for the problem, and see
-  [zulu.py](https://chromium.googlesource.com/infra/infra/+/master/infra_libs/time_functions/zulu.py)
-  for the 'solution.'
-    
-
-	>>> import datetime
-	>>> a = datetime.datetime(2015, 1, 1, 0, 0, 0, 0).isoformat()
-	>>> b = datetime.datetime(2015, 1, 1, 0, 0, 0, 123).isoformat()
-	>>> a
-	'2015-01-01T00:00:00'
-	>>> b
-	'2015-01-01T00:00:00.000123'
-	>>> datetime.datetime.strptime(a, '%Y-%m-%dT%H:%M:%S')
-	datetime.datetime(2015, 1, 1, 0, 0)
-	>>> datetime.datetime.strptime(b, '%Y-%m-%dT%H:%M:%S')
-	ValueError: unconverted data remains: .000123
-	# okay, let's try with .%f at the end
-	>>> datetime.datetime.strptime(b, '%Y-%m-%dT%H:%M:%S.%f')
-	datetime.datetime(2015, 1, 1, 0, 0, 0, 123)
-	>>> datetime.datetime.strptime(a, '%Y-%m-%dT%H:%M:%S.%f')
-	ValueError: time data '2015-01-01T00:00:00' does not match format '%Y-%m-%dT%H:%M:%S.%f'
-
-- it can't understand leap seconds
-
-
-	>>> import datetime
-	>>> datetime.datetime(2015, 6, 30, 23, 59, 60)
-	ValueError: second must be in 0..59
-
-- it can't tell you what timezone datetime.now() is
-
-
-	>>> import datetime
-	>>> datetime.datetime.now().tzinfo is None
-	true
-	>>> datetime.datetime.now().utcoffset() is None
-	true
-	>>> datetime.datetime.utcnow().tzinfo is None
-	true
-	>>> datetime.datetime.utcnow().utcoffset() is None
-	true
-	>>> datetime.datetime.now().isoformat()
-	'2015-07-15T15:36:23.591431'
-	>>> datetime.datetime.utcnow().isoformat()
-	'2015-07-15T22:36:28.431225'
-
-- it can't mix timezone aware datetimes with naive datetimes (why have naive
-  datetimes to begin with?)
-
-
-	>>> import datetime
-	>>> from dateutil import tz
-	>>> a = datetime.datetime(2015, 1, 1, 0, 0, 0, tzinfo=tz.tzoffset(None, -7*60*60))
-	>>> b = datetime.datetime(2015, 1, 1, 0, 0, 0)
-	>>> a == b
-	TypeError: can't compare offset-naive and offset-aware datetimes
-
-#### that's okay, I'll just use dateutil
-
-- [dateutil](http://labix.org/python-dateutil) is not part of the standard
-  library
-
-  You'll have to venv or wheel it wherever you go. A (non-leap second aware)
-  stiptime parser is a single line of python and requires nothing more than the
-  standard library. A stiptime formatter is 4 lines, and also requires nothing
-  more than the standard library.
-
-- dateutil can't understand leap seconds
-
-
-	>>> import dateutil.parser
-	>>> dateutil.parser.parse('2015-06-30T23:59:60')
-	ValueError: second must be in 0..59
-
-- parser.parse works great, except when it silently doesn't
-
-  (note, running this example will yield different results depending on your
-  local timezone. lol.)
-
-
-	>>> import dateutil.parser
-	>>> a = dateutil.parser.parse('2015-01-01T00:00:00 PST')
-	>>> b = dateutil.parser.parse('2015-01-01T00:00:00 PDT')
-	>>> c = dateutil.parser.parse('2015-01-01T00:00:00 EST')
-	>>> a.isoformat()
-	'2015-01-01T00:00:00-08:00'		# great!
-	>>> b.isoformat()
-	'2015-01-01T00:00:00-08:00'		# uh...
-	>>> c.isoformat()
-	'2015-01-01T00:00:00'					# uhhhhhhh
-	>>> c == a
-	TypeError: can't compare offset-naive and offset-aware datetimes
-
-#### python 2.7 time
-
-- time.time()'s definition is incorrect
-
-  According to [the python docs](https://docs.python.org/2/library/time.html),
-  time.time() "[returns] the time in seconds since the epoch as a floating point
-  number." Except it doesn't, as (at least on unix) days are corrected to be
-  86400 seconds long. Thus the true definition of time.time() should be "the
-  time in seconds since the epoch minus any UTC leap seconds added since the
-  epoch."
-
-### timezone-induced time jail
-
-These are a collection of gotchas that occur even if your timezone-handling
-libraries are perfect. They arise purely out of not using UTC for internal
-computations.
-
-- dates which represent the same moment in time can have different weekdays or
-  other attributes
-
-
-	>>> import dateutil.parser
-	>>> a = dateutil.parser.parse('2015-06-17T23:00:00 PDT')
-	>>> b = dateutil.parser.parse('2015-06-18T06:00:00 UTC')
-	>>> a == b
-	True
-	>>> a.weekday()
-	2
-	>>> b.weekday()
-	3
-
-- it's easy to write code thinking it's in one timezone when it's really in
-  another
-
-  - pop quiz: what timezone does the AppEngine datastore store times in?
-  - pop quiz: what months does daylight savings take effect in Australia? North
-    America?
-  - pop quiz: what timezone does buildbot write twistd.log in? What timezone
-    does it write http.log in?
-
-
-- you can have timezone un-aware code in a timezone that shifts (PST -> PDT).
-
-  Now you have graphs wrapping back on themselves, systems restarting repeatedly
-  for an hour, or silent data corruption. This is the classic timezone bug.
-
-- ambiguous encoding
-
-  If you're not careful, you can encode dates which refer to two instances in
-  time. The date '2015-11-01T01:30:00 Pacific' or '2015-11-01T01:30:00
-  America/Los_Angeles' refers to *two* distinct times:
-  '2015-11-01T01:30:00-0800' and '2015-11-01T01:30:00-0700'. Imagine an alarm
-  clock or cron job which triggers on that.
-
-- illegal dates that aren't obviously illegal
-
-  The opposite of ambiguous encoding: did you know that '2015-03-08T02:30:00
-  Pacific' doesn't exist? It jumped from 2015-03-08T01:59:59 immediately to
-  2015-03-08T03:00:00.
-
-- what does a configuration file look like where any timezone is allowed?
-
-  You've now required everyone to convert every timezone into every timezone,
-  instead of every timezone into one (UTC):
-
-
-	['2015-06-18T05:00:00+00:00'
-	 '2015-06-17T23:00:00-07:00',
-	 '2015-06-18T06:00:00-10:00',
-	]
-
-### leap second time jail
-
-Finally, there is a small jail associated with errors occurring due to leap
-seconds themselves. Unfortunately, stiptime is *not* immune to these.
-
-- ambiguous unix time
-
-
-	2015-06-30T23:59:59.0Z -> 1435708799.0
-	2015-06-30T23:59:60.0Z -> 1435708799.0
-
-- illegal unix time
-
-  This hasn't happened yet, but if a negative leap second ever occurs, there
-  will be a floating point time which 'never happened.'
-
-- calculating durations using start/stop times
-
-  Of course, calculating durations across leap seconds can cause slight errors,
-  mistriggers or even retriggers. Since they happen infrequently (and TAI is not
-  commonly available), stiptime has chosen to be susceptible.
-
-## CONCLUSION
-
-It is my hope that after reading this document, you will consider stiptime and,
-occasionally, lesser stiptime to be the proper way to represent time in stored
-formats. I firmly believe time should be displayed to humans in their local
-format — but only in ephemeral displays. A local absolute time should never
-touch a disk. Join me in using stiptime and get back some sanity in your life.
diff --git a/tools/swarming_client/third_party/infra_libs/time_functions/__init__.py b/tools/swarming_client/third_party/infra_libs/time_functions/__init__.py
deleted file mode 100644
index 50b23df..0000000
--- a/tools/swarming_client/third_party/infra_libs/time_functions/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
diff --git a/tools/swarming_client/third_party/infra_libs/time_functions/parser.py b/tools/swarming_client/third_party/infra_libs/time_functions/parser.py
deleted file mode 100644
index dacc7c3..0000000
--- a/tools/swarming_client/third_party/infra_libs/time_functions/parser.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to parse time/duration arguments."""
-
-
-import argparse
-import datetime
-import re
-
-
-_TIMEDELTA_CONV = {
-  'us': datetime.timedelta(microseconds=1),
-  'ms': datetime.timedelta(milliseconds=1),
-  's': datetime.timedelta(seconds=1),
-  'm': datetime.timedelta(minutes=1),
-  'h': datetime.timedelta(hours=1),
-  'd': datetime.timedelta(days=1),
-}
-_TIMEDELTA_RE = re.compile(r'(\d+)(\w+)')
-
-
-def parse_timedelta(v):
-  """Returns (datetime.timedelta) The parsed timedelta.
-
-  Args:
-    v (str): The time delta string, a comma-delimited set of <count><unit>
-        tokens comprising the timedelta (e.g., 10d,2h is 10 days, 2 hours).
-
-  Raises:
-    ValueError: If parsing failed.
-  """
-  result = datetime.timedelta()
-  for comp in v.split(','):
-    match = _TIMEDELTA_RE.match(comp)
-    if match is None:
-      raise ValueError('Invalid timedelta token (%s)' % (comp,))
-    count, unit = int(match.group(1)), match.group(2)
-    unit_value = _TIMEDELTA_CONV.get(unit)
-    if unit_value is None:
-      raise ValueError('Invalid timedelta token unit (%s)' % (unit,))
-    result += (unit_value * count)
-  return result
-
-
-def argparse_timedelta_type(v):
-  """Returns (datetime.timedelta) The parsed timedelta.
-
-  This is an argparse-compatible version of `parse_timedelta` that raises an
-  argparse.ArgumentTypeError on failure instead of a ValueError.
-
-  Raises:
-    argparse.ArgumentTypeError: If parsing failed.
-  """
-  try:
-    return parse_timedelta(v)
-  except ValueError as e:
-    raise argparse.ArgumentTypeError(e.message)
diff --git a/tools/swarming_client/third_party/infra_libs/time_functions/testing.py b/tools/swarming_client/third_party/infra_libs/time_functions/testing.py
deleted file mode 100644
index 3eef8e3..0000000
--- a/tools/swarming_client/third_party/infra_libs/time_functions/testing.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""Provides functions to mock current time in tests."""
-
-import datetime
-import functools
-import mock
-import pytz
-import tzlocal
-
-
-def mock_datetime_utc(*dec_args, **dec_kwargs):
-  """Overrides built-in datetime and date classes to always return a given time.
-
-  Args:
-    Same arguments as datetime.datetime accepts to mock UTC time.
-  
-  Example usage:
-    @mock_datetime_utc(2015, 10, 11, 20, 0, 0)
-    def my_test(self):
-      hour_ago_utc = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
-      self.assertEqual(hour_ago_utc, datetime.datetime(2015, 10, 11, 19, 0, 0))
-
-  Note that if you are using now() and today() methods, you should also use
-  mock_timezone decorator to have consistent test results across timezones:
-
-    @mock_timezone('US/Pacific')
-    @mock_datetime_utc(2015, 10, 11, 20, 0, 0)
-    def my_test(self):
-      local_dt = datetime.datetime.now()
-      self.assertEqual(local_dt, datetime.datetime(2015, 10, 11, 12, 0, 0))
-  """
-  # We record original values currently stored in the datetime.datetime and
-  # datetime.date here. Note that they are no necessarily vanilla Python types
-  # and can already be mock classes - this can happen if nested mocking is used.
-  original_datetime = datetime.datetime
-  original_date = datetime.date
-
-  # Our metaclass must be derived from the parent class metaclass, but if the
-  # parent class doesn't have one, we use 'type' type.
-  class MockDateTimeMeta(original_datetime.__dict__.get('__metaclass__', type)):
-    @classmethod
-    def __instancecheck__(cls, instance):
-      return isinstance(instance, original_datetime)
-
-  class _MockDateTime(original_datetime):
-    __metaclass__ = MockDateTimeMeta
-    mock_utcnow = original_datetime(*dec_args, **dec_kwargs)
-  
-    @classmethod
-    def utcnow(cls):
-      return cls.mock_utcnow
-  
-    @classmethod
-    def now(cls, tz=None):
-      if not tz:
-        tz = tzlocal.get_localzone()
-      tzaware_utcnow = pytz.utc.localize(cls.mock_utcnow)
-      return tz.normalize(tzaware_utcnow.astimezone(tz)).replace(tzinfo=None)
-  
-    @classmethod
-    def today(cls):
-      return cls.now().date()
-
-    @classmethod
-    def fromtimestamp(cls, timestamp, tz=None):
-      if not tz:
-        # TODO(sergiyb): This may fail for some unclear reason because pytz
-        # doesn't find normal timezones such as 'Europe/Berlin'. This seems to
-        # happen only in appengine/chromium_try_flakes tests, and not in tests
-        # for this module itself.
-        tz = tzlocal.get_localzone()
-      tzaware_dt = pytz.utc.localize(cls.utcfromtimestamp(timestamp))
-      return tz.normalize(tzaware_dt.astimezone(tz)).replace(tzinfo=None)
-  
-  # Our metaclass must be derived from the parent class metaclass, but if the
-  # parent class doesn't have one, we use 'type' type.
-  class MockDateMeta(original_date.__dict__.get('__metaclass__', type)):
-    @classmethod
-    def __instancecheck__(cls, instance):
-      return isinstance(instance, original_date)
-
-  class _MockDate(original_date):
-    __metaclass__ = MockDateMeta
-
-    @classmethod
-    def today(cls):
-      return _MockDateTime.today()
-
-    @classmethod
-    def fromtimestamp(cls, timestamp, tz=None):
-      return _MockDateTime.fromtimestamp(timestamp, tz).date()
-
-  def decorator(func):
-    @functools.wraps(func)
-    def wrapper(*args, **kwargs):
-      with mock.patch('datetime.datetime', _MockDateTime):
-        with mock.patch('datetime.date', _MockDate):
-          return func(*args, **kwargs)
-    return wrapper
-  return decorator
-
-
-def mock_timezone(tzname):
-  """Mocks tzlocal.get_localzone method to always return a given timezone.
-
-  This should be used in combination with mock_datetime_utc in order to achieve
-  consistent test results accross timezones if datetime.now, datetime.today or
-  date.today functions are used.
-  
-  Args:
-    tzname: Name of the timezone to be used (as passed to pytz.timezone).
-  """
-  # TODO(sergiyb): Also mock other common libraries, e.g. time, pytz.reference.
-  def decorator(func):
-    @functools.wraps(func)
-    def wrapper(*args, **kwargs):
-      with mock.patch('tzlocal.get_localzone', lambda: pytz.timezone(tzname)):
-        return func(*args, **kwargs)
-    return wrapper
-  return decorator
diff --git a/tools/swarming_client/third_party/infra_libs/time_functions/timestamp.py b/tools/swarming_client/third_party/infra_libs/time_functions/timestamp.py
deleted file mode 100644
index 2bda0a1..0000000
--- a/tools/swarming_client/third_party/infra_libs/time_functions/timestamp.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""Provides common timestamp functions."""
-
-import datetime
-import pytz
-
-
-def utctimestamp(dt):
-  """Converts a datetime object into a floating point timestamp since the epoch.
-
-  dt is the datetime to convert. If dt is a naive (non-tz-aware) object, it
-     will implicitly be treated as UTC.
-  """
-  epoch = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.UTC)
-  # This check is from http://stackoverflow.com/a/27596917/3984761.
-  if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
-    dt = dt.replace(tzinfo=pytz.UTC)
-  return (dt - epoch).total_seconds()
-
-
-def utcnow_ts():  # pragma: no cover
-  """Returns the floating point number of seconds since the UTC epoch."""
-  return utctimestamp(datetime.datetime.utcnow())
diff --git a/tools/swarming_client/third_party/infra_libs/time_functions/zulu.py b/tools/swarming_client/third_party/infra_libs/time_functions/zulu.py
deleted file mode 100644
index 9429125..0000000
--- a/tools/swarming_client/third_party/infra_libs/time_functions/zulu.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""Provides functions for parsing and outputting Zulu time."""
-
-import datetime
-import pytz
-
-from infra_libs.time_functions import timestamp
-
-
-def parse_zulu_time(string):
-  """Parses a Zulu time string, returning None if unparseable."""
-
-  # Ugh https://bugs.python.org/issue19475.
-  zulu_format = "%Y-%m-%dT%H:%M:%S"
-  if '.' in string:
-    zulu_format += ".%f"
-  zulu_format += "Z"
-  try:
-    return datetime.datetime.strptime(string, zulu_format)
-  except ValueError:
-    return None
-
-
-def parse_zulu_ts(string):
-  """Parses Zulu time and converts into a timestamp or None."""
-  zuluparse = parse_zulu_time(string)
-  if zuluparse is None:
-    return None
-  return timestamp.utctimestamp(zuluparse)
-
-
-def to_zulu_string(dt):
-  """Returns a Zulu time string from a datetime.
-
-  Assumes naive datetime objects are in UTC.
-  Ensures the output always has a floating-point number of seconds.
-  """
-
-  # Assume non-tz-aware datetimes are in UTC.
-  if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
-    dt = dt.replace(tzinfo=pytz.UTC)
-
-  # Convert datetime into UTC.
-  isodate = dt.astimezone(pytz.UTC).isoformat().split('+')[0]
-
-  # Add fractional seconds if not present.
-  if '.' not in isodate:
-    isodate += '.0'
-
-  return isodate + 'Z'
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/__init__.py b/tools/swarming_client/third_party/infra_libs/ts_mon/__init__.py
deleted file mode 100644
index bda03e9..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/__init__.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from infra_libs.ts_mon.config import add_argparse_options
-from infra_libs.ts_mon.config import process_argparse_options
-
-from infra_libs.ts_mon.common.distribution import Distribution
-from infra_libs.ts_mon.common.distribution import FixedWidthBucketer
-from infra_libs.ts_mon.common.distribution import GeometricBucketer
-
-from infra_libs.ts_mon.common.errors import MonitoringError
-from infra_libs.ts_mon.common.errors import MonitoringDecreasingValueError
-from infra_libs.ts_mon.common.errors import MonitoringDuplicateRegistrationError
-from infra_libs.ts_mon.common.errors import MonitoringIncrementUnsetValueError
-from infra_libs.ts_mon.common.errors import MonitoringInvalidFieldTypeError
-from infra_libs.ts_mon.common.errors import MonitoringInvalidValueTypeError
-from infra_libs.ts_mon.common.errors import MonitoringTooManyFieldsError
-from infra_libs.ts_mon.common.errors import MonitoringNoConfiguredMonitorError
-from infra_libs.ts_mon.common.errors import MonitoringNoConfiguredTargetError
-
-from infra_libs.ts_mon.common.helpers import ScopedIncrementCounter
-
-from infra_libs.ts_mon.common.interface import close
-from infra_libs.ts_mon.common.interface import flush
-from infra_libs.ts_mon.common.interface import reset_for_unittest
-
-from infra_libs.ts_mon.common.metrics import BooleanMetric
-from infra_libs.ts_mon.common.metrics import CounterMetric
-from infra_libs.ts_mon.common.metrics import CumulativeDistributionMetric
-from infra_libs.ts_mon.common.metrics import CumulativeMetric
-from infra_libs.ts_mon.common.metrics import DistributionMetric
-from infra_libs.ts_mon.common.metrics import FloatMetric
-from infra_libs.ts_mon.common.metrics import GaugeMetric
-from infra_libs.ts_mon.common.metrics import NonCumulativeDistributionMetric
-from infra_libs.ts_mon.common.metrics import MetricsDataUnits
-from infra_libs.ts_mon.common.metrics import StringMetric
-
-from infra_libs.ts_mon.common.targets import TaskTarget
-from infra_libs.ts_mon.common.targets import DeviceTarget
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/__init__.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/__init__.py
deleted file mode 100644
index 50b23df..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/distribution.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/distribution.py
deleted file mode 100644
index 44c28c1..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/distribution.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import bisect
-import collections
-
-
-class Bucketer(object):
-  """Bucketing function for histograms recorded by the Distribution class."""
-
-  def __init__(self, width, growth_factor, num_finite_buckets):
-    """The bucket sizes are controlled by width and growth_factor, and the total
-    number of buckets is set by num_finite_buckets:
-
-    Args:
-      width: fixed size of each bucket.
-      growth_factor: if non-zero, the size of each bucket increases by another
-          multiplicative factor of this factor (see lower bound formula below).
-      num_finite_buckets: the number of finite buckets.  There are two
-          additional buckets - an underflow and an overflow bucket - that have
-          lower and upper bounds of Infinity.
-
-    Specify a width for fixed-size buckets or specify a growth_factor for bucket
-    sizes that follow a geometric progression.  Specifying both is valid as
-    well::
-
-      lower bound of bucket i = width * i + growth_factor ^ (i - 1)
-    """
-
-    if num_finite_buckets < 0:
-      raise ValueError('num_finite_buckets must be >= 0 (was %d)' %
-          num_finite_buckets)
-
-    self.width = width
-    self.growth_factor = growth_factor
-    self.num_finite_buckets = num_finite_buckets
-    self.total_buckets = num_finite_buckets + 2
-    self.underflow_bucket = 0
-    self.overflow_bucket = self.total_buckets - 1
-
-    self._lower_bounds = list(self._generate_lower_bounds())
-
-  def _generate_lower_bounds(self):
-    yield float('-Inf')
-    yield 0
-
-    previous = 0
-    for i in xrange(self.num_finite_buckets):
-      lower_bound = self.width * (i + 1)
-      if self.growth_factor != 0:
-        lower_bound += self.growth_factor ** i
-
-      if lower_bound <= previous:
-        raise ValueError('bucket boundaries must be monotonically increasing')
-      yield lower_bound
-      previous = lower_bound
-
-  def bucket_for_value(self, value):
-    """Returns the index of the bucket that this value belongs to."""
-
-    # bisect.bisect_left is wrong because the buckets are of [lower, upper) form
-    return bisect.bisect(self._lower_bounds, value) - 1
-
-  def bucket_boundaries(self, bucket):
-    """Returns a tuple that is the [lower, upper) bounds of this bucket.
-
-    The lower bound of the first bucket is -Infinity, and the upper bound of the
-    last bucket is +Infinity.
-    """
-
-    if bucket < 0 or bucket >= self.total_buckets:
-      raise IndexError('bucket %d out of range' % bucket)
-    if bucket == self.total_buckets - 1:
-      return (self._lower_bounds[bucket], float('Inf'))
-    return (self._lower_bounds[bucket], self._lower_bounds[bucket + 1])
-
-  def all_bucket_boundaries(self):
-    """Generator that produces the [lower, upper) bounds of all buckets.
-
-    This is equivalent to calling::
-
-      (b.bucket_boundaries(i) for i in xrange(b.total_buckets))
-
-    but is more efficient.
-    """
-
-    lower = self._lower_bounds[0]
-    for i in xrange(1, self.total_buckets):
-      upper = self._lower_bounds[i]
-      yield (lower, upper)
-      lower = upper
-
-    yield (lower, float('Inf'))
-
-
-def FixedWidthBucketer(width, num_finite_buckets=100):
-  """Convenience function that returns a fixed width Bucketer."""
-  return Bucketer(width=width, growth_factor=0.0,
-      num_finite_buckets=num_finite_buckets)
-
-
-def GeometricBucketer(growth_factor=10**0.2, num_finite_buckets=100):
-  """Convenience function that returns a geometric progression Bucketer."""
-  return Bucketer(width=0, growth_factor=growth_factor,
-      num_finite_buckets=num_finite_buckets)
-
-
-class Distribution(object):
-  """Holds a histogram distribution.
-
-  Buckets are chosen for values by the provided Bucketer.
-  """
-
-  def __init__(self, bucketer):
-    self.bucketer = bucketer
-    self.sum = 0
-    self.count = 0
-    self.buckets = collections.defaultdict(int)
-
-  def add(self, value):
-    self.buckets[self.bucketer.bucket_for_value(value)] += 1
-    self.sum += value
-    self.count += 1
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/errors.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/errors.py
deleted file mode 100644
index 29f3cd1..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/errors.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Classes representing errors that can be raised by the monitoring library."""
-
-
-class MonitoringError(Exception):
-  """Base class for exceptions raised by this module."""
-
-
-class MonitoringDecreasingValueError(MonitoringError):
-  """Raised when setting a metric value that should increase but doesn't."""
-
-  def __init__(self, metric, old_value, new_value):
-    self.metric = metric
-    self.old_value = old_value
-    self.new_value = new_value
-
-  def __str__(self):
-    return ('Monotonically increasing metric "%s" was given value "%s", which '
-            'is not greater than or equal to "%s".' % (
-                self.metric, self.new_value, self.old_value))
-
-
-class MonitoringDuplicateRegistrationError(MonitoringError):
-  """Raised when trying to register a metric with the same name as another."""
-
-  def __init__(self, metric):
-    self.metric = metric
-
-  def __str__(self):
-    return 'Different metrics with the same name "%s" were both registered.' % (
-        self.metric)
-
-
-class MonitoringIncrementUnsetValueError(MonitoringError):
-  """Raised when trying to increment a metric which hasn't been set."""
-
-  def __init__(self, metric):
-    self.metric = metric
-
-  def __str__(self):
-    return 'Metric "%s" was incremented without first setting a value.' % (
-        self.metric)
-
-
-class MonitoringInvalidValueTypeError(MonitoringError):
-  """Raised when sending a metric value is not a valid type."""
-
-  def __init__(self, metric, value):
-    self.metric = metric
-    self.value = value
-
-  def __str__(self):
-    return 'Metric "%s" was given invalid value "%s" (%s).' % (
-        self.metric, self.value, type(self.value))
-
-
-class MonitoringInvalidFieldTypeError(MonitoringError):
-  """Raised when sending a metric with a field value of an invalid type."""
-
-  def __init__(self, metric, field, value):
-    self.metric = metric
-    self.field = field
-    self.value = value
-
-  def __str__(self):
-    return 'Metric "%s" was given field "%s" with invalid value "%s" (%s).' % (
-        self.metric, self.field, self.value, type(self.value))
-
-
-class MonitoringTooManyFieldsError(MonitoringError):
-  """Raised when sending a metric with more than 7 fields."""
-
-  def __init__(self, metric, fields):
-    self.metric = metric
-    self.fields = fields
-
-  def __str__(self):
-    return 'Metric "%s" was given too many (%d > 7) fields: %s.' % (
-        self.metric, len(self.fields), self.fields)
-
-
-class MonitoringNoConfiguredMonitorError(MonitoringError):
-  """Raised when sending a metric without configuring the global Monitor."""
-
-  def __init__(self, metric):
-    self.metric = metric
-
-  def __str__(self):
-    if self.metric is not None:
-      return 'Metric "%s" was sent before initializing the global Monitor.' % (
-          self.metric)
-    else:
-      return 'Metrics were sent before initializing the global Monitor.'
-
-
-class MonitoringNoConfiguredTargetError(MonitoringError):
-  """Raised when sending a metric with no global nor local Target."""
-
-  def __init__(self, metric):
-    self.metric = metric
-
-  def __str__(self):
-    return 'Metric "%s" was sent with no Target configured.' % (self.metric)
-
-
-class UnknownModificationTypeError(MonitoringError):
-  """Raised when using a Modification with an unknown type value."""
-
-  def __init__(self, mod_type):
-    self.mod_type = mod_type
-
-  def __str__(self):
-    return 'Unknown modification type "%s"' % self.mod_type
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/helpers.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/helpers.py
deleted file mode 100644
index 80a3584..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/helpers.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Helper classes that make it easier to instrument code for monitoring."""
-
-
-class ScopedIncrementCounter(object):
-  """Increment a counter when the wrapped code exits.
-
-  The counter will be given a 'status' = 'success' or 'failure' label whose
-  value will be set to depending on whether the wrapped code threw an exception.
-
-  Example:
-
-    mycounter = Counter('foo/stuff_done')
-    with ScopedIncrementCounter(mycounter):
-      DoStuff()
-
-  To set a custom status label and status value:
-
-    mycounter = Counter('foo/http_requests')
-    with ScopedIncrementCounter(mycounter, 'response_code') as sc:
-      status = MakeHttpRequest()
-      sc.set_status(status)  # This custom status now won't be overwritten if
-                             # the code later raises an exception.
-  """
-
-  def __init__(self, counter, label='status', success_value='success',
-               failure_value='failure'):
-    self.counter = counter
-    self.label = label
-    self.success_value = success_value
-    self.failure_value = failure_value
-    self.status = None
-
-  def set_failure(self):
-    self.set_status(self.failure_value)
-
-  def set_status(self, status):
-    self.status = status
-
-  def __enter__(self):
-    self.status = None
-    return self
-
-  def __exit__(self, exc_type, exc_value, traceback):
-    if self.status is None:
-      if exc_type is None:
-        self.status = self.success_value
-      else:
-        self.status = self.failure_value
-    self.counter.increment({self.label: self.status})
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/http_metrics.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/http_metrics.py
deleted file mode 100644
index 0df4fac..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/http_metrics.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from infra_libs.ts_mon.common import metrics
-
-
-# Extending HTTP status codes to client-side errors and timeouts.
-STATUS_OK = 200
-STATUS_ERROR = 901
-STATUS_TIMEOUT = 902
-STATUS_EXCEPTION = 909
-
-
-request_bytes = metrics.CumulativeDistributionMetric('http/request_bytes',
-    description='Bytes sent per http request (body only).')
-response_bytes = metrics.CumulativeDistributionMetric('http/response_bytes',
-    description='Bytes received per http request (content only).')
-durations = metrics.CumulativeDistributionMetric('http/durations',
-    description='Time elapsed between sending a request and getting a'
-                ' response (including parsing) in milliseconds.')
-response_status = metrics.CounterMetric('http/response_status',
-    description='Number of responses received by HTTP status code.')
-
-
-server_request_bytes = metrics.CumulativeDistributionMetric(
-    'http/server_request_bytes',
-    description='Bytes received per http request (body only).')
-server_response_bytes = metrics.CumulativeDistributionMetric(
-    'http/server_response_bytes',
-    description='Bytes sent per http request (content only).')
-server_durations = metrics.CumulativeDistributionMetric('http/server_durations',
-    description='Time elapsed between receiving a request and sending a'
-                ' response (including parsing) in milliseconds.')
-server_response_status = metrics.CounterMetric('http/server_response_status',
-    description='Number of responses sent by HTTP status code.')
-
-
-def update_http_server_metrics(endpoint_name, response_status_code, elapsed_ms,
-                               request_size=None, response_size=None,
-                               user_agent=None):
-  fields = {'status': response_status_code, 'name': endpoint_name,
-            'is_robot': False}
-  if user_agent is not None:
-    # We must not log user agents, but we can store whether or not the
-    # user agent string indicates that the requester was a Google bot.
-    fields['is_robot'] = (
-        'GoogleBot' in user_agent or
-        'GoogleSecurityScanner' in user_agent or
-        user_agent == 'B3M/prober')
-
-  server_durations.add(elapsed_ms, fields=fields)
-  server_response_status.increment(fields=fields)
-  if request_size is not None:
-    server_request_bytes.add(request_size, fields=fields)
-  if response_size is not None:
-    server_response_bytes.add(response_size, fields=fields)
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/interface.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/interface.py
deleted file mode 100644
index df3f502..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/interface.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Classes representing the monitoring interface for tasks or devices.
-
-Usage:
-  import argparse
-  from infra_libs import ts_mon
-
-  p = argparse.ArgumentParser()
-  ts_mon.add_argparse_options(p)
-  args = p.parse_args()  # Must contain info for Monitor (and optionally Target)
-  ts_mon.process_argparse_options(args)
-
-  # Will use the default Target set up via command line args:
-  m = ts_mon.BooleanMetric('/my/metric/name', fields={'foo': 1, 'bar': 'baz'})
-  m.set(True)
-
-  # Use a custom Target:
-  t = ts_mon.TaskTarget('service', 'job', 'region', 'host')  # or DeviceTarget
-  m2 = ts_mon.GaugeMetric('/my/metric/name2', fields={'asdf': 'qwer'}, target=t)
-  m2.set(5)
-
-Library usage:
-  from infra_libs.ts_mon import CounterMetric
-  # No need to set up Monitor or Target, assume calling code did that.
-  c = CounterMetric('/my/counter', fields={'source': 'mylibrary'})
-  c.set(0)
-  for x in range(100):
-    c.increment()
-"""
-
-import datetime
-import logging
-import random
-import threading
-import time
-
-from infra_libs.ts_mon.common import errors
-from infra_libs.ts_mon.common import metric_store
-from infra_libs.ts_mon.protos import metrics_pb2
-
-# The maximum number of MetricsData messages to include in each HTTP request.
-# MetricsCollections larger than this will be split into multiple requests.
-METRICS_DATA_LENGTH_LIMIT = 1000
-
-
-class State(object):
-  """Package-level state is stored here so that it is easily accessible.
-
-  Configuration is kept in this one object at the global level so that all
-  libraries in use by the same tool or service can all take advantage of the
-  same configuration.
-  """
-
-  def __init__(self, store_ctor=None, target=None):
-    """Optional arguments are for unit tests."""
-    if store_ctor is None:  # pragma: no branch
-      store_ctor = metric_store.InProcessMetricStore
-    # The Monitor object that will be used to send all metrics.
-    self.global_monitor = None
-    # The Target object that will be paired with all metrics that don't supply
-    # their own.
-    self.target = target
-    # The flush mode being used to control when metrics are pushed.
-    self.flush_mode = None
-    # A predicate to determine if metrics should be sent.
-    self.flush_enabled_fn = lambda: True
-    # The background thread that flushes metrics every
-    # --ts-mon-flush-interval-secs seconds.  May be None if
-    # --ts-mon-flush != 'auto' or --ts-mon-flush-interval-secs == 0.
-    self.flush_thread = None
-    # All metrics created by this application.
-    self.metrics = {}
-    # The MetricStore object that holds the actual metric values.
-    self.store = store_ctor(self)
-    # Cached time of the last flush. Useful mostly in AppEngine apps.
-    self.last_flushed = datetime.datetime.utcfromtimestamp(0)
-    # Metric name prefix
-    self.metric_name_prefix = '/chrome/infra/'
-
-  def reset_for_unittest(self):
-    self.metrics = {}
-    self.last_flushed = datetime.datetime.utcfromtimestamp(0)
-    self.store.reset_for_unittest()
-
-state = State()
-
-
-def flush():
-  """Send all metrics that are registered in the application."""
-
-  if not state.flush_enabled_fn():
-    logging.debug('ts_mon: sending metrics is disabled.')
-    return
-
-  if not state.global_monitor or not state.target:
-    raise errors.MonitoringNoConfiguredMonitorError(None)
-
-  proto = metrics_pb2.MetricsCollection()
-
-  for target, metric, start_time, fields_values in state.store.get_all():
-    for fields, value in fields_values.iteritems():
-      if len(proto.data) >= METRICS_DATA_LENGTH_LIMIT:
-        state.global_monitor.send(proto)
-        del proto.data[:]
-
-      metric.serialize_to(proto, start_time, fields, value, target)
-
-  state.global_monitor.send(proto)
-  state.last_flushed = datetime.datetime.utcnow()
-
-
-def register(metric):
-  """Adds the metric to the list of metrics sent by flush().
-
-  This is called automatically by Metric's constructor.
-  """
-  # If someone is registering the same metric object twice, that's okay, but
-  # registering two different metric objects with the same metric name is not.
-  for m in state.metrics.values():
-    if metric == m:
-      state.metrics[metric.name] = metric
-      return
-  if metric.name in state.metrics:
-    raise errors.MonitoringDuplicateRegistrationError(metric.name)
-
-  state.metrics[metric.name] = metric
-
-
-def unregister(metric):
-  """Removes the metric from the list of metrics sent by flush()."""
-  del state.metrics[metric.name]
-
-
-def close():
-  """Stops any background threads and waits for them to exit."""
-  if state.flush_thread is not None:
-    state.flush_thread.stop()
-
-
-def reset_for_unittest(disable=False):
-  state.reset_for_unittest()
-  if disable:
-    state.flush_enabled_fn = lambda: False
-
-
-class _FlushThread(threading.Thread):
-  """Background thread that flushes metrics on an interval."""
-
-  def __init__(self, interval_secs, stop_event=None):
-    super(_FlushThread, self).__init__(name='ts_mon')
-
-    if stop_event is None:
-      stop_event = threading.Event()
-
-    self.daemon = True
-    self.interval_secs = interval_secs
-    self.stop_event = stop_event
-
-  def _flush_and_log_exceptions(self):
-    try:
-      flush()
-    except Exception:
-      logging.exception('Automatic monitoring flush failed.')
-
-  def run(self):
-    # Jitter the first interval so tasks started at the same time (say, by cron)
-    # on different machines don't all send metrics simultaneously.
-    next_timeout = random.uniform(self.interval_secs / 2.0, self.interval_secs)
-
-    while True:
-      if self.stop_event.wait(next_timeout):
-        self._flush_and_log_exceptions()
-        return
-
-      # Try to flush every N seconds exactly so rate calculations are more
-      # consistent.
-      start = time.time()
-      self._flush_and_log_exceptions()
-      flush_duration = time.time() - start
-      next_timeout = self.interval_secs - flush_duration
-
-      if next_timeout < 0:
-        logging.warning(
-            'Last monitoring flush took %f seconds (longer than '
-            '--ts-mon-flush-interval-secs = %f seconds)',
-            flush_duration, self.interval_secs)
-        next_timeout = 0
-
-  def stop(self):
-    """Stops the background thread and performs a final flush."""
-
-    self.stop_event.set()
-    self.join()
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/metric_store.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/metric_store.py
deleted file mode 100644
index 4d21fee..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/metric_store.py
+++ /dev/null
@@ -1,301 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import copy
-import itertools
-import threading
-import time
-
-from infra_libs.ts_mon.common import errors
-
-
-"""A light-weight representation of a set or an incr.
-
-Args:
-  name: The metric name.
-  fields: The normalized field tuple.
-  mod_type: Either 'set' or 'incr'.  Other values will raise
-            UnknownModificationTypeError when it's used.
-  args: (value, enforce_ge) for 'set' or (delta, modify_fn) for 'incr'.
-"""  # pylint: disable=pointless-string-statement
-Modification = collections.namedtuple(
-    'Modification', ['name', 'fields', 'mod_type', 'args'])
-
-
-def default_modify_fn(name):
-  def _modify_fn(value, delta):
-    if delta < 0:
-      raise errors.MonitoringDecreasingValueError(name, None, delta)
-    return value + delta
-  return _modify_fn
-
-
-class MetricStore(object):
-  """A place to store values for each metric.
-
-  Several methods take "a normalized field tuple".  This is a tuple of
-  (key, value) tuples sorted by key.  (The reason this is given as a tuple
-  instead of a dict is because tuples are hashable and can be used as dict keys,
-  dicts can not).
-
-  The MetricStore is also responsible for keeping the start_time of each metric.
-  This is what goes into the start_timestamp_us field in the MetricsData proto
-  for cumulative metrics and distributions, and helps Monarch identify when a
-  counter was reset.  This is the MetricStore's job because an implementation
-  might share counter values across multiple instances of a task (like on
-  Appengine), so the start time must be associated with that value so that it
-  can be reset for all tasks at once when the value is reset.
-
-  External metric stores (like those backed by memcache) may be cleared (either
-  wholly or partially) at any time.  When this happens the MetricStore *must*
-  generate a new start_time for all the affected metrics.
-
-  Metrics can specify their own explicit start time if they are mirroring the
-  value of some external counter that started counting at a known time.
-
-  Otherwise the MetricStore's time_fn (defaults to time.time()) is called the
-  first time a metric is set or incremented, or after it is cleared externally.
-  """
-
-  def __init__(self, state, time_fn=None):
-    self._state = state
-    self._time_fn = time_fn or time.time
-
-  def get(self, name, fields, target_fields, default=None):
-    """Fetches the current value for the metric.
-
-    Args:
-      name (string): the metric's name.
-      fields (tuple): a normalized field tuple.
-      target_fields (dict or None): target fields to override.
-      default: the value to return if the metric has no value of this set of
-          field values.
-    """
-    raise NotImplementedError
-
-  def get_all(self):
-    """Returns an iterator over all the metrics present in the store.
-
-    The iterator yields 4-tuples:
-      (target, metric, start_time, field_values)
-    """
-    raise NotImplementedError
-
-  def set(self, name, fields, target_fields, value, enforce_ge=False):
-    """Sets the metric's value.
-
-    Args:
-      name: the metric's name.
-      fields: a normalized field tuple.
-      target_fields (dict or None): target fields to override.
-      value: the new value for the metric.
-      enforce_ge: if this is True, raise an exception if the new value is
-          less than the old value.
-
-    Raises:
-      MonitoringDecreasingValueError: if enforce_ge is True and the new value is
-          smaller than the old value.
-    """
-    raise NotImplementedError
-
-  def incr(self, name, fields, target_fields, delta, modify_fn=None):
-    """Increments the metric's value.
-
-    Args:
-      name: the metric's name.
-      fields: a normalized field tuple.
-      target_fields (dict or None): target fields to override.
-      delta: how much to increment the value by.
-      modify_fn: this function is called with the original value and the delta
-          as its arguments and is expected to return the new value.  The
-          function must be idempotent as it may be called multiple times.
-    """
-    raise NotImplementedError
-
-  def modify_multi(self, modifications):
-    """Modifies multiple metrics in one go.
-
-    Args:
-      modifications: an iterable of Modification objects.
-    """
-    raise NotImplementedError
-
-  def reset_for_unittest(self, name=None):
-    """Clears the values metrics.  Useful in unittests.
-
-    Args:
-      name: the name of an individual metric to reset, or if None resets all
-        metrics.
-    """
-    raise NotImplementedError
-
-  def initialize_context(self):
-    """Opens a request-local context for deferring metric updates."""
-    pass  # pragma: no cover
-
-  def finalize_context(self):
-    """Closes a request-local context opened by initialize_context."""
-    pass  # pragma: no cover
-
-  def _start_time(self, name):
-    if name in self._state.metrics:
-      ret = self._state.metrics[name].start_time
-      if ret is not None:
-        return ret
-
-    return self._time_fn()
-
-  @staticmethod
-  def _normalize_target_fields(target_fields):
-    """Converts target fields into a hashable tuple.
-
-    Args:
-      target_fields (dict): target fields to override the default target.
-    """
-    if not target_fields:
-      target_fields = {}
-    return tuple(sorted(target_fields.iteritems()))
-
-
-class MetricFieldsValues(object):
-  def __init__(self):
-    # Map normalized fields to single metric values.
-    self._values = {}
-    self._thread_lock = threading.Lock()
-
-  def get_value(self, fields, default=None):
-    return self._values.get(fields, default)
-
-  def set_value(self, fields, value):
-    self._values[fields] = value
-
-  def iteritems(self):
-    # Make a copy of the metric values in case another thread (or this
-    # generator's consumer) modifies them while we're iterating.
-    with self._thread_lock:
-      values = copy.copy(self._values)
-    for fields, value in values.iteritems():
-      yield fields, value
-
-
-class TargetFieldsValues(object):
-  def __init__(self, store):
-    # Map normalized target fields to MetricFieldsValues.
-    self._values = collections.defaultdict(MetricFieldsValues)
-    self._store = store
-    self._thread_lock = threading.Lock()
-
-  def get_target_values(self, target_fields):
-    key = self._store._normalize_target_fields(target_fields)
-    return self._values[key]
-
-  def get_value(self, fields, target_fields, default=None):
-    return self.get_target_values(target_fields).get_value(
-        fields, default)
-
-  def set_value(self, fields, target_fields, value):
-    self.get_target_values(target_fields).set_value(fields, value)
-
-  def iter_targets(self):
-    # Make a copy of the values in case another thread (or this
-    # generator's consumer) modifies them while we're iterating.
-    with self._thread_lock:
-      values = copy.copy(self._values)
-    for target_fields, fields_values in values.iteritems():
-      target = copy.copy(self._store._state.target)
-      if target_fields:
-        target.update({k: v for k, v in target_fields})
-      yield target, fields_values
-
-
-class MetricValues(object):
-  def __init__(self, store, start_time):
-    self._start_time = start_time
-    self._values = TargetFieldsValues(store)
-
-  @property
-  def start_time(self):
-    return self._start_time
-
-  @property
-  def values(self):
-    return self._values
-
-  def get_value(self, fields, target_fields, default=None):
-    return self.values.get_value(fields, target_fields, default)
-
-  def set_value(self, fields, target_fields, value):
-    self.values.set_value(fields, target_fields, value)
-
-
-class InProcessMetricStore(MetricStore):
-  """A thread-safe metric store that keeps values in memory."""
-
-  def __init__(self, state, time_fn=None):
-    super(InProcessMetricStore, self).__init__(state, time_fn=time_fn)
-
-    self._values = {}
-    self._thread_lock = threading.Lock()
-
-  def _entry(self, name):
-    if name not in self._values:
-      self._reset(name)
-
-    return self._values[name]
-
-  def get(self, name, fields, target_fields, default=None):
-    return self._entry(name).get_value(fields, target_fields, default)
-
-  def iter_field_values(self, name):
-    return itertools.chain.from_iterable(
-        x.iteritems() for _, x in self._entry(name).values.iter_targets())
-
-  def get_all(self):
-    # Make a copy of the metric values in case another thread (or this
-    # generator's consumer) modifies them while we're iterating.
-    with self._thread_lock:
-      values = copy.copy(self._values)
-
-    for name, metric_values in values.iteritems():
-      if name not in self._state.metrics:
-        continue
-      start_time = metric_values.start_time
-      for target, fields_values in metric_values.values.iter_targets():
-        yield target, self._state.metrics[name], start_time, fields_values
-
-  def set(self, name, fields, target_fields, value, enforce_ge=False):
-    with self._thread_lock:
-      if enforce_ge:
-        old_value = self._entry(name).get_value(fields, target_fields, 0)
-        if value < old_value:
-          raise errors.MonitoringDecreasingValueError(name, old_value, value)
-
-      self._entry(name).set_value(fields, target_fields, value)
-
-  def incr(self, name, fields, target_fields, delta, modify_fn=None):
-    if delta < 0:
-      raise errors.MonitoringDecreasingValueError(name, None, delta)
-
-    if modify_fn is None:
-      modify_fn = default_modify_fn(name)
-
-    with self._thread_lock:
-      self._entry(name).set_value(fields, target_fields, modify_fn(
-          self.get(name, fields, target_fields, 0), delta))
-
-  def modify_multi(self, modifications):
-    # This is only used by DeferredMetricStore on top of MemcacheMetricStore,
-    # but could be implemented here if required in the future.
-    raise NotImplementedError
-
-  def reset_for_unittest(self, name=None):
-    if name is not None:
-      self._reset(name)
-    else:
-      for name in self._values.keys():
-        self._reset(name)
-
-  def _reset(self, name):
-    self._values[name] = MetricValues(self, self._start_time(name))
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/metrics.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/metrics.py
deleted file mode 100644
index a9ee417..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/metrics.py
+++ /dev/null
@@ -1,493 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Classes representing individual metrics that can be sent."""
-
-import copy
-
-from infra_libs.ts_mon.protos import metrics_pb2
-
-from infra_libs.ts_mon.common import distribution
-from infra_libs.ts_mon.common import errors
-from infra_libs.ts_mon.common import interface
-
-
-MICROSECONDS_PER_SECOND = 1000000
-
-
-class Metric(object):
-  """Abstract base class for a metric.
-
-  A Metric is an attribute that may be monitored across many targets. Examples
-  include disk usage or the number of requests a server has received. A single
-  process may keep track of many metrics.
-
-  Note that Metric objects may be initialized at any time (for example, at the
-  top of a library), but cannot be sent until the underlying Monitor object
-  has been set up (usually by the top-level process parsing the command line).
-
-  A Metric can actually store multiple values that are identified by a set of
-  fields (which are themselves key-value pairs).  Fields can be passed to the
-  set() or increment() methods to modify a particular value, or passed to the
-  constructor in which case they will be used as the defaults for this Metric.
-
-  The unit of measurement for Metric data can be specified with MetricsDataUnits
-  when a Metric object is created:
-  e.g., MetricsDataUnits.SECONDS, MetricsDataUnits.BYTES, and etc..,
-  A full list of supported units can be found in the following protobuf file
-  : infra_libs/ts_mon/protos/metrics.proto
-
-  Do not directly instantiate an object of this class.
-  Use the concrete child classes instead:
-  * StringMetric for metrics with string value
-  * BooleanMetric for metrics with boolean values
-  * CounterMetric for metrics with monotonically increasing integer values
-  * GaugeMetric for metrics with arbitrarily varying integer values
-  * CumulativeMetric for metrics with monotonically increasing float values
-  * FloatMetric for metrics with arbitrarily varying float values
-
-  See http://go/inframon-doc for help designing and using your metrics.
-  """
-
-  def __init__(self, name, fields=None, description=None, units=None):
-    """Create an instance of a Metric.
-
-    Args:
-      name (str): the file-like name of this metric
-      fields (dict): a set of key-value pairs to be set as default metric fields
-      description (string): help string for the metric. Should be enough to
-                            know what the metric is about.
-      units (int): the unit used to measure data for given
-                   metric. Please use the attributes of MetricDataUnit to find
-                   valid integer values for this argument.
-    """
-    self._name = name.lstrip('/')
-    self._start_time = None
-    fields = fields or {}
-    if len(fields) > 7:
-      raise errors.MonitoringTooManyFieldsError(self._name, fields)
-    self._fields = fields
-    self._normalized_fields = self._normalize_fields(self._fields)
-    self._description = description
-    self._units = units
-
-    interface.register(self)
-
-  @property
-  def name(self):
-    return self._name
-
-  @property
-  def start_time(self):
-    return self._start_time
-
-  def is_cumulative(self):
-    raise NotImplementedError()
-
-  def __eq__(self, other):
-    return (self.name == other.name and
-            self._fields == other._fields and
-            type(self) == type(other))
-
-  def unregister(self):
-    interface.unregister(self)
-
-  def serialize_to(self, collection_pb, start_time, fields, value, target):
-    """Generate metrics_pb2.MetricsData messages for this metric.
-
-    Args:
-      collection_pb (metrics_pb2.MetricsCollection): protocol buffer into which
-        to add the current metric values.
-      start_time (int): timestamp in microseconds since UNIX epoch.
-      target (Target): a Target to use.
-    """
-
-    metric_pb = collection_pb.data.add()
-    metric_pb.metric_name_prefix = interface.state.metric_name_prefix
-    metric_pb.name = self._name
-    if self._description is not None:
-      metric_pb.description = self._description
-    if self._units is not None:
-      metric_pb.units = self._units
-
-    self._populate_value(metric_pb, value, start_time)
-    self._populate_fields(metric_pb, fields)
-
-    target._populate_target_pb(metric_pb)
-
-  def _populate_fields(self, metric, fields):
-    """Fill in the fields attribute of a metric protocol buffer.
-
-    Args:
-      metric (metrics_pb2.MetricsData): a metrics protobuf to populate
-      fields (list of (key, value) tuples): normalized metric fields
-
-    Raises:
-      MonitoringInvalidFieldTypeError: if a field has a value of unknown type
-    """
-    for key, value in fields:
-      field = metric.fields.add()
-      field.name = key
-      if isinstance(value, basestring):
-        field.type = metrics_pb2.MetricsField.STRING
-        field.string_value = value
-      elif isinstance(value, bool):
-        field.type = metrics_pb2.MetricsField.BOOL
-        field.bool_value = value
-      elif isinstance(value, int):
-        field.type = metrics_pb2.MetricsField.INT
-        field.int_value = value
-      else:
-        raise errors.MonitoringInvalidFieldTypeError(self._name, key, value)
-
-  def _normalize_fields(self, fields):
-    """Merges the fields with the default fields and returns something hashable.
-
-    Args:
-      fields (dict): A dict of fields passed by the user, or None.
-
-    Returns:
-      A tuple of (key, value) tuples, ordered by key.  This whole tuple is used
-      as the key in the self._values dict to identify the cell for a value.
-
-    Raises:
-      MonitoringTooManyFieldsError: if there are more than seven metric fields
-    """
-    if fields is None:
-      return self._normalized_fields
-
-    all_fields = copy.copy(self._fields)
-    all_fields.update(fields)
-
-    if len(all_fields) > 7:
-      raise errors.MonitoringTooManyFieldsError(self._name, all_fields)
-
-    return tuple(sorted(all_fields.iteritems()))
-
-  def _populate_value(self, metric, value, start_time):
-    """Fill in the the data values of a metric protocol buffer.
-
-    Args:
-      metric (metrics_pb2.MetricsData): a metrics protobuf to populate
-      value (see concrete class): the value of the metric to be set
-      start_time (int): timestamp in microseconds since UNIX epoch.
-    """
-    raise NotImplementedError()
-
-  def set(self, value, fields=None, target_fields=None):
-    """Set a new value for this metric. Results in sending a new value.
-
-    The subclass should do appropriate type checking on value and then call
-    self._set_and_send_value.
-
-    Args:
-      value (see concrete class): the value of the metric to be set
-      fields (dict): additional metric fields to complement those on self
-      target_fields (dict): overwrite some of the default target fields
-    """
-    raise NotImplementedError()
-
-  def get(self, fields=None, target_fields=None):
-    """Returns the current value for this metric.
-
-    Subclasses should never use this to get a value, modify it and set it again.
-    Instead use _incr with a modify_fn.
-    """
-    return interface.state.store.get(
-        self.name, self._normalize_fields(fields), target_fields)
-
-  def get_all(self):
-    return interface.state.store.iter_field_values(self.name)
-
-  def reset(self):
-    """Clears the values of this metric.  Useful in unit tests.
-
-    It might be easier to call ts_mon.reset_for_unittest() in your setUp()
-    method instead of resetting every individual metric.
-    """
-
-    interface.state.store.reset_for_unittest(self.name)
-
-  def _set(self, fields, target_fields, value, enforce_ge=False):
-    interface.state.store.set(self.name, self._normalize_fields(fields),
-                              target_fields, value, enforce_ge=enforce_ge)
-
-  def _incr(self, fields, target_fields, delta, modify_fn=None):
-    interface.state.store.incr(self.name, self._normalize_fields(fields),
-                               target_fields, delta, modify_fn=modify_fn)
-
-
-class StringMetric(Metric):
-  """A metric whose value type is a string."""
-
-  def _populate_value(self, metric, value, start_time):
-    metric.string_value = value
-
-  def set(self, value, fields=None, target_fields=None):
-    if not isinstance(value, basestring):
-      raise errors.MonitoringInvalidValueTypeError(self._name, value)
-    self._set(fields, target_fields, value)
-
-  def is_cumulative(self):
-    return False
-
-
-class BooleanMetric(Metric):
-  """A metric whose value type is a boolean."""
-
-  def _populate_value(self, metric, value, start_time):
-    metric.boolean_value = value
-
-  def set(self, value, fields=None, target_fields=None):
-    if not isinstance(value, bool):
-      raise errors.MonitoringInvalidValueTypeError(self._name, value)
-    self._set(fields, target_fields, value)
-
-  def is_cumulative(self):
-    return False
-
-
-class NumericMetric(Metric):  # pylint: disable=abstract-method
-  """Abstract base class for numeric (int or float) metrics."""
-  # TODO(agable): Figure out if there's a way to send units with these metrics.
-
-  def increment(self, fields=None, target_fields=None):
-    self._incr(fields, target_fields, 1)
-
-  def increment_by(self, step, fields=None, target_fields=None):
-    self._incr(fields, target_fields, step)
-
-
-class CounterMetric(NumericMetric):
-  """A metric whose value type is a monotonically increasing integer."""
-
-  def __init__(self, name, fields=None, start_time=None, description=None,
-               units=None):
-    super(CounterMetric, self).__init__(
-        name, fields=fields, description=description, units=units)
-    self._start_time = start_time
-
-  def _populate_value(self, metric, value, start_time):
-    metric.counter = value
-    metric.start_timestamp_us = int(start_time * MICROSECONDS_PER_SECOND)
-
-  def set(self, value, fields=None, target_fields=None):
-    if not isinstance(value, (int, long)):
-      raise errors.MonitoringInvalidValueTypeError(self._name, value)
-    self._set(fields, target_fields, value, enforce_ge=True)
-
-  def increment_by(self, step, fields=None, target_fields=None):
-    if not isinstance(step, (int, long)):
-      raise errors.MonitoringInvalidValueTypeError(self._name, step)
-    self._incr(fields, target_fields, step)
-
-  def is_cumulative(self):
-    return True
-
-
-class GaugeMetric(NumericMetric):
-  """A metric whose value type is an integer."""
-
-  def _populate_value(self, metric, value, start_time):
-    metric.gauge = value
-
-  def set(self, value, fields=None, target_fields=None):
-    if not isinstance(value, (int, long)):
-      raise errors.MonitoringInvalidValueTypeError(self._name, value)
-    self._set(fields, target_fields, value)
-
-  def is_cumulative(self):
-    return False
-
-
-class CumulativeMetric(NumericMetric):
-  """A metric whose value type is a monotonically increasing float."""
-
-  def __init__(self, name, fields=None, start_time=None, description=None,
-               units=None):
-    super(CumulativeMetric, self).__init__(
-        name, fields=fields, description=description, units=units)
-    self._start_time = start_time
-
-  def _populate_value(self, metric, value, start_time):
-    metric.cumulative_double_value = value
-    metric.start_timestamp_us = int(start_time * MICROSECONDS_PER_SECOND)
-
-  def set(self, value, fields=None, target_fields=None):
-    if not isinstance(value, (float, int)):
-      raise errors.MonitoringInvalidValueTypeError(self._name, value)
-    self._set(fields, target_fields, float(value), enforce_ge=True)
-
-  def is_cumulative(self):
-    return True
-
-
-class FloatMetric(NumericMetric):
-  """A metric whose value type is a float."""
-
-  def _populate_value(self, metric, value, start_time):
-    metric.noncumulative_double_value = value
-
-  def set(self, value, fields=None, target_fields=None):
-    if not isinstance(value, (float, int)):
-      raise errors.MonitoringInvalidValueTypeError(self._name, value)
-    self._set(fields, target_fields, float(value))
-
-  def is_cumulative(self):
-    return False
-
-
-class DistributionMetric(Metric):
-  """A metric that holds a distribution of values.
-
-  By default buckets are chosen from a geometric progression, each bucket being
-  approximately 1.59 times bigger than the last.  In practice this is suitable
-  for many kinds of data, but you may want to provide a FixedWidthBucketer or
-  GeometricBucketer with different parameters."""
-
-  CANONICAL_SPEC_TYPES = {
-      2: metrics_pb2.PrecomputedDistribution.CANONICAL_POWERS_OF_2,
-      10**0.2: metrics_pb2.PrecomputedDistribution.CANONICAL_POWERS_OF_10_P_0_2,
-      10: metrics_pb2.PrecomputedDistribution.CANONICAL_POWERS_OF_10,
-  }
-
-  def __init__(self, name, is_cumulative=True, bucketer=None, fields=None,
-               start_time=None, description=None, units=None):
-    super(DistributionMetric, self).__init__(
-        name, fields=fields, description=description, units=units)
-    self._start_time = start_time
-
-    if bucketer is None:
-      bucketer = distribution.GeometricBucketer()
-
-    self._is_cumulative = is_cumulative
-    self.bucketer = bucketer
-
-  def _populate_value(self, metric, value, start_time):
-    pb = metric.distribution
-
-    pb.is_cumulative = self._is_cumulative
-    if self._is_cumulative:
-      metric.start_timestamp_us = int(start_time * MICROSECONDS_PER_SECOND)
-
-    # Copy the bucketer params.
-    if (value.bucketer.width == 0 and
-        value.bucketer.growth_factor in self.CANONICAL_SPEC_TYPES):
-      pb.spec_type = self.CANONICAL_SPEC_TYPES[value.bucketer.growth_factor]
-    else:
-      pb.spec_type = metrics_pb2.PrecomputedDistribution.CUSTOM_PARAMETERIZED
-      pb.width = value.bucketer.width
-      pb.growth_factor = value.bucketer.growth_factor
-      pb.num_buckets = value.bucketer.num_finite_buckets
-
-    # Copy the distribution bucket values.  Only include the finite buckets, not
-    # the overflow buckets on each end.
-    pb.bucket.extend(self._running_zero_generator(
-        value.buckets.get(i, 0) for i in
-        xrange(1, value.bucketer.total_buckets - 1)))
-
-    # Add the overflow buckets if present.
-    if value.bucketer.underflow_bucket in value.buckets:
-      pb.underflow = value.buckets[value.bucketer.underflow_bucket]
-    if value.bucketer.overflow_bucket in value.buckets:
-      pb.overflow = value.buckets[value.bucketer.overflow_bucket]
-
-    if value.count != 0:
-      pb.mean = float(value.sum) / value.count
-
-  @staticmethod
-  def _running_zero_generator(iterable):
-    """Compresses sequences of zeroes in the iterable into negative zero counts.
-
-    For example an input of [1, 0, 0, 0, 2] is converted to [1, -3, 2].
-    """
-
-    count = 0
-
-    for value in iterable:
-      if value == 0:
-        count += 1
-      else:
-        if count != 0:
-          yield -count
-          count = 0
-        yield value
-
-  def add(self, value, fields=None, target_fields=None):
-    def modify_fn(dist, value):
-      if dist == 0:
-        dist = distribution.Distribution(self.bucketer)
-      dist.add(value)
-      return dist
-
-    self._incr(fields, target_fields, value, modify_fn=modify_fn)
-
-  def set(self, value, fields=None, target_fields=None):
-    """Replaces the distribution with the given fields with another one.
-
-    This only makes sense on non-cumulative DistributionMetrics.
-
-    Args:
-      value: A infra_libs.ts_mon.Distribution.
-    """
-
-    if self._is_cumulative:
-      raise TypeError(
-          'Cannot set() a cumulative DistributionMetric (use add() instead)')
-
-    if not isinstance(value, distribution.Distribution):
-      raise errors.MonitoringInvalidValueTypeError(self._name, value)
-
-    self._set(fields, target_fields, value)
-
-  def is_cumulative(self):
-    raise NotImplementedError()  # Keep this class abstract.
-
-
-class CumulativeDistributionMetric(DistributionMetric):
-  """A DistributionMetric with is_cumulative set to True."""
-
-  def __init__(self, name, bucketer=None, fields=None,
-               description=None, units=None):
-    super(CumulativeDistributionMetric, self).__init__(
-        name,
-        is_cumulative=True,
-        bucketer=bucketer,
-        fields=fields,
-        description=description,
-        units=units)
-
-  def is_cumulative(self):
-    return True
-
-
-class NonCumulativeDistributionMetric(DistributionMetric):
-  """A DistributionMetric with is_cumulative set to False."""
-
-  def __init__(self, name, bucketer=None, fields=None,
-               description=None, units=None):
-    super(NonCumulativeDistributionMetric, self).__init__(
-        name,
-        is_cumulative=False,
-        bucketer=bucketer,
-        fields=fields,
-        description=description,
-        units=units)
-
-  def is_cumulative(self):
-    return False
-
-
-class MetaMetricsDataUnits(type):
-  """Metaclass to populate the enum values of metrics_pb2.MetricsData.Units."""
-  def __new__(mcs, name, bases, attrs):
-    attrs.update(metrics_pb2.MetricsData.Units.items())
-    return super(MetaMetricsDataUnits, mcs).__new__(mcs, name, bases, attrs)
-
-
-class MetricsDataUnits(object):
-  """An enumeration class for units of measurement for Metrics data.
-  See infra_libs/ts_mon/protos/metrics.proto for a full list of supported units.
-  """
-  __metaclass__ = MetaMetricsDataUnits
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/monitors.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/monitors.py
deleted file mode 100644
index e39cdfe..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/monitors.py
+++ /dev/null
@@ -1,229 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Classes representing the monitoring interface for tasks or devices."""
-
-
-import base64
-import httplib2
-import json
-import logging
-import socket
-import traceback
-
-from googleapiclient import discovery
-from googleapiclient import errors
-from infra_libs import httplib2_utils
-from infra_libs.ts_mon.common import http_metrics
-from infra_libs.ts_mon.common import pb_to_popo
-from infra_libs.ts_mon.protos import metrics_pb2
-from oauth2client import gce
-from oauth2client.client import GoogleCredentials
-from oauth2client.file import Storage
-
-# Special string that can be passed through as the credentials path to use the
-# default Appengine or GCE service account.
-APPENGINE_CREDENTIALS = ':appengine'
-GCE_CREDENTIALS = ':gce'
-
-
-class Monitor(object):
-  """Abstract base class encapsulating the ability to collect and send metrics.
-
-  This is a singleton class. There should only be one instance of a Monitor at
-  a time. It will be created and initialized by process_argparse_options. It
-  must exist in order for any metrics to be sent, although both Targets and
-  Metrics may be initialized before the underlying Monitor. If it does not exist
-  at the time that a Metric is sent, an exception will be raised.
-  """
-
-  _SCOPES = []
-
-  @staticmethod
-  def _wrap_proto(data):
-    """Normalize MetricsData, list(MetricsData), and MetricsCollection.
-
-    Args:
-      input: A MetricsData, list of MetricsData, or a MetricsCollection.
-
-    Returns:
-      A MetricsCollection with the appropriate data attribute set.
-    """
-    if isinstance(data, metrics_pb2.MetricsCollection):
-      ret = data
-    elif isinstance(data, list):
-      ret = metrics_pb2.MetricsCollection(data=data)
-    else:
-      ret = metrics_pb2.MetricsCollection(data=[data])
-    return ret
-
-  def _load_credentials(self, credentials_file_path):
-    if credentials_file_path == GCE_CREDENTIALS:
-      return gce.AppAssertionCredentials(self._SCOPES)
-    if credentials_file_path == APPENGINE_CREDENTIALS:  # pragma: no cover
-      # This import doesn't work outside appengine, so delay it until it's used.
-      from oauth2client import appengine
-      from google.appengine.api import app_identity
-      logging.info('Initializing with service account %s',
-                   app_identity.get_service_account_name())
-      return appengine.AppAssertionCredentials(self._SCOPES)
-
-    with open(credentials_file_path, 'r') as credentials_file:
-      credentials_json = json.load(credentials_file)
-    if credentials_json.get('type', None):
-      credentials = GoogleCredentials.from_stream(credentials_file_path)
-      credentials = credentials.create_scoped(self._SCOPES)
-      return credentials
-    return Storage(credentials_file_path).get()
-
-  def send(self, metric_pb):
-    raise NotImplementedError()
-
-class HttpsMonitor(Monitor):
-
-  _SCOPES = [
-    'https://www.googleapis.com/auth/prodxmon'
-  ]
-
-  def __init__(self, endpoint, credentials_file_path, http=None):
-    self._endpoint = endpoint
-    credentials = self._load_credentials(credentials_file_path)
-    if http is None:
-      http = httplib2_utils.RetriableHttp(
-          httplib2_utils.InstrumentedHttp('acq-mon-api'))
-    self._http = credentials.authorize(http)
-
-  def encodeToJson(self, metric_pb):
-    return json.dumps({ 'resource': pb_to_popo.convert(metric_pb) })
-
-  def send(self, metric_pb):
-    body = self.encodeToJson(self._wrap_proto(metric_pb))
-
-    try:
-      resp, content = self._http.request(self._endpoint, method='POST',
-                                         body=body)
-      if resp.status != 200:
-        logging.warning('HttpsMonitor.send received status %d: %s', resp.status,
-                        content)
-    except (ValueError, errors.Error,
-            socket.timeout, socket.error, socket.herror, socket.gaierror,
-            httplib2.HttpLib2Error):
-      logging.warning('HttpsMonitor.send failed: %s\n',
-                      traceback.format_exc())
-
-
-class PubSubMonitor(Monitor):
-  """Class which publishes metrics to a Cloud Pub/Sub topic."""
-
-  _SCOPES = [
-      'https://www.googleapis.com/auth/pubsub',
-  ]
-
-  TIMEOUT = 10  # seconds
-
-  def _initialize(self):
-    creds = self._load_credentials(self._credsfile)
-    creds.authorize(self._http)
-    self._api = discovery.build('pubsub', 'v1', http=self._http)
-
-  def _update_init_metrics(self, status):
-    if not self._use_instrumented_http:
-      return
-    fields = {'name': 'acq-mon-api-pubsub',
-              'client': 'discovery',
-              'status': status}
-    http_metrics.response_status.increment(fields=fields)
-
-  def _check_initialize(self):
-    if self._api:
-      return True
-    try:
-      self._initialize()
-    except (ValueError, errors.Error,
-            socket.timeout, socket.error, socket.herror, socket.gaierror,
-            httplib2.HttpLib2Error, EnvironmentError):
-      # Log a warning, not error, to avoid false alarms in AppEngine apps.
-      logging.warning('PubSubMonitor._initialize failed:\n%s',
-                      traceback.format_exc())
-      self._api = None
-      self._update_init_metrics(http_metrics.STATUS_ERROR)
-      return False
-
-    self._update_init_metrics(http_metrics.STATUS_OK)
-    return True
-
-  def __init__(self, credsfile, project, topic, use_instrumented_http=True):
-    """Process monitoring related command line flags and initialize api.
-
-    Args:
-      credsfile (str): path to the credentials json file
-      project (str): the name of the Pub/Sub project to publish to.
-      topic (str): the name of the Pub/Sub topic to publish to.
-      use_instrumented_http (bool): whether to record monitoring metrics for
-          HTTP requests made to the pubsub API.
-    """
-    # Do not call self._check_initialize() in the constructor. This
-    # class is constructed during app initialization on AppEngine, and
-    # network calls are especially flaky during that time.
-    self._api = None
-    self._use_instrumented_http = use_instrumented_http
-    if use_instrumented_http:
-      self._http = httplib2_utils.InstrumentedHttp(
-          'acq-mon-api-pubsub', timeout=self.TIMEOUT)
-    else:
-      self._http = httplib2.Http(timeout=self.TIMEOUT)
-    self._credsfile = credsfile
-    self._topic = 'projects/%s/topics/%s' % (project, topic)
-
-  def send(self, metric_pb):
-    """Send a metric proto to the monitoring api.
-
-    Args:
-      metric_pb (MetricsData or MetricsCollection): the metric protobuf to send
-    """
-    if not self._check_initialize():
-      return
-    proto = self._wrap_proto(metric_pb)
-    logging.debug('ts_mon: sending %d metrics to PubSub', len(proto.data))
-    body = {
-        'messages': [
-          {'data': base64.b64encode(proto.SerializeToString())},
-        ],
-    }
-    # Occasionally, client fails to receive a proper internal JSON
-    # from the server and raises ValueError trying to parse it.  Other
-    # times we may fail with a network error. This is not fatal, we'll
-    # resend metrics next time.
-    try:
-      self._api.projects().topics().publish(
-          topic=self._topic,
-          body=body).execute(num_retries=5)
-    except (ValueError, errors.Error,
-            socket.timeout, socket.error, socket.herror, socket.gaierror,
-            httplib2.HttpLib2Error):
-      # Log a warning, not error, to avoid false alarms in AppEngine apps.
-      logging.warning('PubSubMonitor.send failed:\n%s',
-                      traceback.format_exc())
-
-
-class DebugMonitor(Monitor):
-  """Class which writes metrics to logs or a local file for debugging."""
-  def __init__(self, filepath=None):
-    if filepath is None:
-      self._fh = None
-    else:
-      self._fh = open(filepath, 'a')
-
-  def send(self, metric_pb):
-    text = str(self._wrap_proto(metric_pb))
-    logging.info('Flushing monitoring metrics:\n%s', text)
-    if self._fh is not None:
-      self._fh.write(text + '\n\n')
-      self._fh.flush()
-
-
-class NullMonitor(Monitor):
-  """Class that doesn't send metrics anywhere."""
-  def send(self, metric_pb):
-    pass
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/pb_to_popo.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/pb_to_popo.py
deleted file mode 100644
index 8266f9a..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/pb_to_popo.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import logging
-
-from google.protobuf.descriptor import FieldDescriptor as fd
-
-def convert(pb):
-  """Convert protobuf to plain-old-python-object"""
-  obj = {}
-  for field, value in pb.ListFields():
-    if field.label == fd.LABEL_REPEATED:
-      obj[field.name] = list(_get_json_func(field.type)(v) for v in value)
-    else:
-      obj[field.name] = _get_json_func(field.type)(value)
-  return obj
-
-def _get_json_func(field_type):
-  if field_type in _FD_TO_JSON:
-    return _FD_TO_JSON[field_type]
-  else: # pragma: no cover
-    logging.warning("pb_to_popo doesn't support converting %s", field_type)
-    return unicode
-
-_FD_TO_JSON  = {
-  fd.TYPE_BOOL: bool,
-  fd.TYPE_DOUBLE: float,
-  fd.TYPE_ENUM: int,
-  fd.TYPE_FIXED32: float,
-  fd.TYPE_FIXED64: float,
-  fd.TYPE_FLOAT: float,
-  fd.TYPE_INT32: int,
-  fd.TYPE_INT64: long,
-  fd.TYPE_SFIXED32: float,
-  fd.TYPE_SFIXED64: float,
-  fd.TYPE_SINT32: int,
-  fd.TYPE_SINT64: long,
-  fd.TYPE_STRING: unicode,
-  fd.TYPE_UINT32: int,
-  fd.TYPE_UINT64: long,
-  fd.TYPE_MESSAGE: convert
-}
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/standard_metrics.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/standard_metrics.py
deleted file mode 100644
index bc42da8..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/standard_metrics.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Metrics common to all tasks and devices."""
-
-from infra_libs.ts_mon.common import metrics
-
-
-up = metrics.BooleanMetric(
-    'presence/up',
-    description="Set to True when the program is running, missing otherwise.")
-
-
-def init():
-  # TODO(dsansome): Add more metrics for git revision, cipd package version,
-  # uptime, etc.
-  up.set(True)
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/common/targets.py b/tools/swarming_client/third_party/infra_libs/ts_mon/common/targets.py
deleted file mode 100644
index 065f975..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/common/targets.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Classes representing the monitoring interface for tasks or devices."""
-
-
-class Target(object):
-  """Abstract base class for a monitoring target.
-
-  A Target is a "thing" that should be monitored, for example, a device or a
-  process. The majority of the time, a single process will have only a single
-  Target.
-
-  Do not directly instantiate an object of this class.
-  Use the concrete child classes instead:
-  * TaskTarget to monitor a job or tasks running in (potentially) many places;
-  * DeviceTarget to monitor a host machine that may be running a task.
-  """
-
-  def __init__(self):
-    # Subclasses should list the updatable target fields here.
-    self._fields = tuple()
-
-  def _populate_target_pb(self, metric):
-    """Populate the 'target' embedded message field of a metric protobuf."""
-    raise NotImplementedError()
-
-  def to_dict(self):
-    """Return target field values as a dictionary."""
-    return {field: getattr(self, field) for field in self._fields}
-
-  def update(self, target_fields):
-    """Update values of some target fields given as a dict."""
-    for field, value in target_fields.iteritems():
-      if field not in self._fields:
-        raise AttributeError('Bad target field: %s' % field)
-      # Make sure the attribute actually exists in the object.
-      getattr(self, field)
-      setattr(self, field, value)
-
-
-class DeviceTarget(Target):
-  """Monitoring interface class for monitoring specific hosts or devices."""
-
-  def __init__(self, region, role, network, hostname):
-    """Create a Target object exporting info about a specific device.
-
-    Args:
-      region (str): physical region in which the device is located.
-      role (str): role of the device.
-      network (str): virtual network on which the device is located.
-      hostname (str): name by which the device self-identifies.
-    """
-    super(DeviceTarget, self).__init__()
-    self.region = region
-    self.role = role
-    self.network = network
-    self.hostname = hostname
-    self.realm = 'ACQ_CHROME'
-    self.alertable = True
-    self._fields = ('region', 'role', 'network', 'hostname')
-
-  def _populate_target_pb(self, metric):
-    """Populate the 'network_device' embedded message of a metric protobuf.
-
-    Args:
-      metric (metrics_pb2.MetricsData): the metric proto to be populated.
-    """
-    # Note that this disregards the pop, asn, role, and vendor fields.
-    metric.network_device.metro = self.region
-    metric.network_device.role = self.role
-    metric.network_device.hostgroup = self.network
-    metric.network_device.hostname = self.hostname
-    metric.network_device.realm = self.realm
-    metric.network_device.alertable = self.alertable
-
-
-class TaskTarget(Target):
-  """Monitoring interface class for monitoring active jobs or processes."""
-
-  def __init__(self, service_name, job_name, region, hostname, task_num=0):
-    """Create a Target object exporting info about a specific task.
-
-    Args:
-      service_name (str): service of which this task is a part.
-      job_name (str): specific name of this task.
-      region (str): general region in which this task is running.
-      hostname (str): specific machine on which this task is running.
-      task_num (int): replication id of this task.
-    """
-    super(TaskTarget, self).__init__()
-    self.service_name = service_name
-    self.job_name = job_name
-    self.region = region
-    self.hostname = hostname
-    self.task_num = task_num
-    self._fields = ('service_name', 'job_name', 'region',
-                    'hostname', 'task_num')
-
-  def _populate_target_pb(self, metric):
-    """Populate the 'task' embedded message field of a metric protobuf.
-
-    Args:
-      metric (metrics_pb2.MetricsData): the metric proto to be populated.
-    """
-    metric.task.service_name = self.service_name
-    metric.task.job_name = self.job_name
-    metric.task.data_center = self.region
-    metric.task.host_name = self.hostname
-    metric.task.task_num = self.task_num
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/config.proto b/tools/swarming_client/third_party/infra_libs/ts_mon/config.proto
deleted file mode 100644
index e7c1338..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/config.proto
+++ /dev/null
@@ -1,13 +0,0 @@
-syntax = "proto3";
-
-// ts_mon's config file in /etc/chrome-infra/ts-mon.json is a JSON-encoded
-// ConfigFile message.
-// Note: this .proto file isn't currently used to encode/decode the config file,
-// it's just here as a reference.
-message ConfigFile {
-  // Url to post monitoring metrics to.  file:// URLs are supported as well.
-  string endpoint = 1;
-
-  // Path to a pkcs8 json credential file.
-  string credentials = 2;
-}
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/config.py b/tools/swarming_client/third_party/infra_libs/ts_mon/config.py
deleted file mode 100644
index b13e21d..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/config.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import json
-import logging
-import os
-import socket
-import sys
-import urlparse
-import re
-
-import requests
-
-from infra_libs.ts_mon.common import interface
-from infra_libs.ts_mon.common import metric_store
-from infra_libs.ts_mon.common import monitors
-from infra_libs.ts_mon.common import standard_metrics
-from infra_libs.ts_mon.common import targets
-
-
-def load_machine_config(filename):
-  if not os.path.exists(filename):
-    logging.info('Configuration file does not exist, ignoring: %s', filename)
-    return {}
-
-  try:
-    with open(filename) as fh:
-      return json.load(fh)
-  except Exception:
-    logging.error('Configuration file couldn\'t be read: %s', filename)
-    raise
-
-
-def _default_region(fqdn):
-  # Check if we're running in a GCE instance.
-  try:
-    r = requests.get(
-        'http://metadata.google.internal/computeMetadata/v1/instance/zone',
-        headers={'Metadata-Flavor': 'Google'},
-        timeout=1.0)
-  except requests.exceptions.RequestException:
-    pass
-  else:
-    if r.status_code == requests.codes.ok:
-      # The zone is the last slash-separated component.
-      return r.text.split('/')[-1]
-
-  try:
-    return fqdn.split('.')[1]  # [chrome|golo]
-  except IndexError:
-    return ''
-
-
-def _default_network(host):
-  try:
-    # Regular expression that matches the vast majority of our host names.
-    # Matches everything of the form 'masterN', 'masterNa', and 'foo-xN'.
-    return re.match(r'^([\w-]*?-[acm]|master)(\d+)a?$', host).group(2)  # N
-  except AttributeError:
-    return ''
-
-
-def add_argparse_options(parser):
-  """Add monitoring related flags to a process' argument parser.
-
-  Args:
-    parser (argparse.ArgumentParser): the parser for the main process.
-  """
-  if sys.platform == 'win32':  # pragma: no cover
-    default_config_file = 'C:\\chrome-infra\\ts-mon.json'
-  else:  # pragma: no cover
-    default_config_file = '/etc/chrome-infra/ts-mon.json'
-
-  parser = parser.add_argument_group('Timeseries Monitoring Options')
-  parser.add_argument(
-      '--ts-mon-config-file',
-      default=default_config_file,
-      help='path to a JSON config file that contains suitable values for '
-           '"endpoint" and "credentials" for this machine. This config file is '
-           'intended to be shared by all processes on the machine, as the '
-           'values depend on the machine\'s position in the network, IP '
-           'whitelisting and deployment of credentials. (default: %(default)s)')
-  parser.add_argument(
-      '--ts-mon-endpoint',
-      help='url (including file://, pubsub://project/topic, https://) to post '
-           'monitoring metrics to. If set, overrides the value in '
-           '--ts-mon-config-file')
-  parser.add_argument(
-      '--ts-mon-credentials',
-      help='path to a pkcs8 json credential file. If set, overrides the value '
-           'in --ts-mon-config-file')
-  parser.add_argument(
-      '--ts-mon-flush',
-      choices=('manual', 'auto'), default='auto',
-      help=('metric push behavior: manual (only send when flush() is called), '
-            'or auto (send automatically every --ts-mon-flush-interval-secs '
-            'seconds). (default: %(default)s)'))
-  parser.add_argument(
-      '--ts-mon-flush-interval-secs',
-      type=int,
-      default=60,
-      help=('automatically push metrics on this interval if '
-            '--ts-mon-flush=auto.'))
-  parser.add_argument(
-      '--ts-mon-autogen-hostname',
-      action="store_true",
-      help=('Indicate that the hostname is autogenerated. '
-            'This option must be set on autoscaled GCE VMs, Kubernetes pods, '
-            'or any other hosts with dynamically generated names.'))
-
-  parser.add_argument(
-      '--ts-mon-target-type',
-      choices=('device', 'task'),
-      default='device',
-      help='the type of target that is being monitored ("device" or "task").'
-           ' (default: %(default)s)')
-
-  fqdn = socket.getfqdn().lower()  # foo-[a|m]N.[chrome|golo].chromium.org
-  host = fqdn.split('.')[0]  # foo-[a|m]N
-  region = _default_region(fqdn)
-  network = _default_network(host)
-
-  parser.add_argument(
-      '--ts-mon-device-hostname',
-      default=host,
-      help='name of this device, (default: %(default)s)')
-  parser.add_argument(
-      '--ts-mon-device-region',
-      default=region,
-      help='name of the region this devices lives in. (default: %(default)s)')
-  parser.add_argument(
-      '--ts-mon-device-role',
-      default='default',
-      help='Role of the device. (default: %(default)s)')
-  parser.add_argument(
-      '--ts-mon-device-network',
-      default=network,
-      help='name of the network this device is connected to. '
-           '(default: %(default)s)')
-
-  parser.add_argument(
-      '--ts-mon-task-service-name',
-      help='name of the service being monitored')
-  parser.add_argument(
-      '--ts-mon-task-job-name',
-      help='name of this job instance of the task')
-  parser.add_argument(
-      '--ts-mon-task-region',
-      default=region,
-      help='name of the region in which this task is running '
-           '(default: %(default)s)')
-  parser.add_argument(
-      '--ts-mon-task-hostname',
-      default=host,
-      help='name of the host on which this task is running '
-           '(default: %(default)s)')
-  parser.add_argument(
-      '--ts-mon-task-number', type=int, default=0,
-      help='number (e.g. for replication) of this instance of this task '
-           '(default: %(default)s)')
-
-  parser.add_argument(
-      '--ts-mon-metric-name-prefix',
-      default='/chrome/infra/',
-      help='metric name prefix for all metrics (default: %(default)s)')
-
-def process_argparse_options(args):
-  """Process command line arguments to initialize the global monitor.
-
-  Also initializes the default target.
-
-  Starts a background thread to automatically flush monitoring metrics if not
-  disabled by command line arguments.
-
-  Args:
-    args (argparse.Namespace): the result of parsing the command line arguments
-  """
-  # Parse the config file if it exists.
-  config = load_machine_config(args.ts_mon_config_file)
-  endpoint = config.get('endpoint', '')
-  credentials = config.get('credentials', '')
-  autogen_hostname = config.get('autogen_hostname', False)
-
-  # Command-line args override the values in the config file.
-  if args.ts_mon_endpoint is not None:
-    endpoint = args.ts_mon_endpoint
-  if args.ts_mon_credentials is not None:
-    credentials = args.ts_mon_credentials
-
-  if args.ts_mon_target_type == 'device':
-    hostname = args.ts_mon_device_hostname
-    if args.ts_mon_autogen_hostname or autogen_hostname:
-      hostname = 'autogen:' + hostname
-    interface.state.target = targets.DeviceTarget(
-        args.ts_mon_device_region,
-        args.ts_mon_device_role,
-        args.ts_mon_device_network,
-        hostname)
-  if args.ts_mon_target_type == 'task':
-    # Reimplement ArgumentParser.error, since we don't have access to the parser
-    if not args.ts_mon_task_service_name:
-      print >> sys.stderr, ('Argument --ts-mon-task-service-name must be '
-                            'provided when the target type is "task".')
-      sys.exit(2)
-    if not args.ts_mon_task_job_name:
-      print >> sys.stderr, ('Argument --ts-mon-task-job-name must be provided '
-                            'when the target type is "task".')
-      sys.exit(2)
-    hostname = args.ts_mon_task_hostname
-    if args.ts_mon_autogen_hostname or autogen_hostname:
-      hostname = 'autogen:' + hostname
-    interface.state.target = targets.TaskTarget(
-        args.ts_mon_task_service_name,
-        args.ts_mon_task_job_name,
-        args.ts_mon_task_region,
-        hostname,
-        args.ts_mon_task_number)
-
-  interface.state.metric_name_prefix = args.ts_mon_metric_name_prefix
-  interface.state.global_monitor = monitors.NullMonitor()
-
-  if endpoint.startswith('file://'):
-    interface.state.global_monitor = monitors.DebugMonitor(
-        endpoint[len('file://'):])
-  elif endpoint.startswith('pubsub://'):
-    if credentials:
-      url = urlparse.urlparse(endpoint)
-      project = url.netloc
-      topic = url.path.strip('/')
-      interface.state.global_monitor = monitors.PubSubMonitor(
-          credentials, project, topic, use_instrumented_http=True)
-    else:
-      logging.error('ts_mon monitoring is disabled because credentials are not '
-                    'available')
-  elif endpoint.startswith('https://'):
-    interface.state.global_monitor = monitors.HttpsMonitor(endpoint,
-                                                           credentials)
-  elif endpoint.lower() == 'none':
-    logging.info('ts_mon monitoring has been explicitly disabled')
-  else:
-    logging.error('ts_mon monitoring is disabled because the endpoint provided'
-                  ' is invalid or not supported: %s', endpoint)
-
-  interface.state.flush_mode = args.ts_mon_flush
-
-  if args.ts_mon_flush == 'auto':
-    interface.state.flush_thread = interface._FlushThread(
-        args.ts_mon_flush_interval_secs)
-    interface.state.flush_thread.start()
-
-  standard_metrics.init()
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/REAME.md b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/REAME.md
deleted file mode 100644
index c651c7d..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/REAME.md
+++ /dev/null
@@ -1,8 +0,0 @@
-Updating the *.proto files: see go/updating-tsmon-protos
-
-To generate the `*_pb2.py` files from the `*.proto` files:
-
-    cd infra_libs/ts_mon/protos
-    protoc --python_out=. *.proto
-
-protoc version tested: 2.6.0
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/__init__.py b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/__init__.py
deleted file mode 100644
index 50b23df..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_network_device.proto b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_network_device.proto
deleted file mode 100644
index d4c0c28..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_network_device.proto
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-syntax = "proto2";
-
-package ts_mon.proto;
-
-message NetworkDevice {
-  enum TypeId { MESSAGE_TYPE_ID = 34049749; };
-
-  optional bool alertable = 101;
-  optional string realm = 102;
-  optional string metro = 104;
-  optional string role = 105;
-  optional string hostname = 106;
-  optional string hostgroup = 108;
-}
-
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_network_device_pb2.py b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_network_device_pb2.py
deleted file mode 100644
index eb8938f..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_network_device_pb2.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: acquisition_network_device.proto
-
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='acquisition_network_device.proto',
-  package='ts_mon.proto',
-  serialized_pb='\n acquisition_network_device.proto\x12\x0cts_mon.proto\"\x95\x01\n\rNetworkDevice\x12\x11\n\talertable\x18\x65 \x01(\x08\x12\r\n\x05realm\x18\x66 \x01(\t\x12\r\n\x05metro\x18h \x01(\t\x12\x0c\n\x04role\x18i \x01(\t\x12\x10\n\x08hostname\x18j \x01(\t\x12\x11\n\thostgroup\x18l \x01(\t\" \n\x06TypeId\x12\x16\n\x0fMESSAGE_TYPE_ID\x10\xd5\x9d\x9e\x10')
-
-
-
-_NETWORKDEVICE_TYPEID = _descriptor.EnumDescriptor(
-  name='TypeId',
-  full_name='ts_mon.proto.NetworkDevice.TypeId',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='MESSAGE_TYPE_ID', index=0, number=34049749,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=168,
-  serialized_end=200,
-)
-
-
-_NETWORKDEVICE = _descriptor.Descriptor(
-  name='NetworkDevice',
-  full_name='ts_mon.proto.NetworkDevice',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='alertable', full_name='ts_mon.proto.NetworkDevice.alertable', index=0,
-      number=101, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='realm', full_name='ts_mon.proto.NetworkDevice.realm', index=1,
-      number=102, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='metro', full_name='ts_mon.proto.NetworkDevice.metro', index=2,
-      number=104, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='role', full_name='ts_mon.proto.NetworkDevice.role', index=3,
-      number=105, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='hostname', full_name='ts_mon.proto.NetworkDevice.hostname', index=4,
-      number=106, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='hostgroup', full_name='ts_mon.proto.NetworkDevice.hostgroup', index=5,
-      number=108, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _NETWORKDEVICE_TYPEID,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=51,
-  serialized_end=200,
-)
-
-_NETWORKDEVICE_TYPEID.containing_type = _NETWORKDEVICE;
-DESCRIPTOR.message_types_by_name['NetworkDevice'] = _NETWORKDEVICE
-
-class NetworkDevice(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _NETWORKDEVICE
-
-  # @@protoc_insertion_point(class_scope:ts_mon.proto.NetworkDevice)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_task.proto b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_task.proto
deleted file mode 100644
index 8f20b9b..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_task.proto
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-syntax = "proto2";
-
-package ts_mon.proto;
-
-message Task {
-
-  enum TypeId { MESSAGE_TYPE_ID = 34049749; };
-  optional string service_name = 20;
-  optional string job_name = 30;
-  optional string data_center = 40;
-  optional string host_name = 50;
-  optional int32 task_num = 60;
-}
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_task_pb2.py b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_task_pb2.py
deleted file mode 100644
index cef9bd8..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/acquisition_task_pb2.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: acquisition_task.proto
-
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='acquisition_task.proto',
-  package='ts_mon.proto',
-  serialized_pb='\n\x16\x61\x63quisition_task.proto\x12\x0cts_mon.proto\"\x8a\x01\n\x04Task\x12\x14\n\x0cservice_name\x18\x14 \x01(\t\x12\x10\n\x08job_name\x18\x1e \x01(\t\x12\x13\n\x0b\x64\x61ta_center\x18( \x01(\t\x12\x11\n\thost_name\x18\x32 \x01(\t\x12\x10\n\x08task_num\x18< \x01(\x05\" \n\x06TypeId\x12\x16\n\x0fMESSAGE_TYPE_ID\x10\xd5\x9d\x9e\x10')
-
-
-
-_TASK_TYPEID = _descriptor.EnumDescriptor(
-  name='TypeId',
-  full_name='ts_mon.proto.Task.TypeId',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='MESSAGE_TYPE_ID', index=0, number=34049749,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=147,
-  serialized_end=179,
-)
-
-
-_TASK = _descriptor.Descriptor(
-  name='Task',
-  full_name='ts_mon.proto.Task',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='service_name', full_name='ts_mon.proto.Task.service_name', index=0,
-      number=20, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='job_name', full_name='ts_mon.proto.Task.job_name', index=1,
-      number=30, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='data_center', full_name='ts_mon.proto.Task.data_center', index=2,
-      number=40, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='host_name', full_name='ts_mon.proto.Task.host_name', index=3,
-      number=50, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='task_num', full_name='ts_mon.proto.Task.task_num', index=4,
-      number=60, type=5, cpp_type=1, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _TASK_TYPEID,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=41,
-  serialized_end=179,
-)
-
-_TASK_TYPEID.containing_type = _TASK;
-DESCRIPTOR.message_types_by_name['Task'] = _TASK
-
-class Task(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _TASK
-
-  # @@protoc_insertion_point(class_scope:ts_mon.proto.Task)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/metrics.proto b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/metrics.proto
deleted file mode 100644
index 0e3861d..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/metrics.proto
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-syntax = "proto2";
-
-package ts_mon.proto;
-
-import "acquisition_network_device.proto";
-import "acquisition_task.proto";
-
-message MetricsCollection {
-  repeated MetricsData data = 1;
-
-  optional uint64 start_timestamp_us = 2;
-  optional string collection_point_id = 3;
-}
-
-message MetricsField {
-  optional string name = 1;
-
-  optional FieldType type = 3 [default = STRING];
-  enum FieldType {
-    STRING = 1;
-    INT = 2;
-    BOOL = 3;
-  };
-
-  optional string string_value = 4;
-  optional int64 int_value = 5;
-  optional bool bool_value = 6;
-}
-
-message PrecomputedDistribution {
-  enum SpecType {
-
-    CANONICAL_POWERS_OF_2 = 1;
-    CANONICAL_POWERS_OF_10_P_0_2 = 2;
-    CANONICAL_POWERS_OF_10 = 3;
-    CUSTOM_PARAMETERIZED = 20;
-    CUSTOM_BOUNDED = 21;
-  }
-
-  optional SpecType spec_type = 1;
-  optional double width = 2 [default = 10.0];
-  optional double growth_factor = 3 [default = 0.0];
-  optional int32 num_buckets = 4 [default = 10];
-  repeated double lower_bounds = 5;
-  optional bool is_cumulative = 6 [default = false];
-  repeated sint64 bucket = 7;
-  optional sint64 underflow = 8;
-  optional sint64 overflow = 9;
-  optional double mean = 10;
-  optional double sum_of_squared_deviation = 11;
-}
-
-message MetricsData {
-  required string name = 1;
-  optional string metric_name_prefix = 2;
-
-  optional ts_mon.proto.NetworkDevice network_device = 11;
-  optional ts_mon.proto.Task task = 12;
-
-  repeated MetricsField fields = 20;
-
-  optional int64 counter = 30;
-  optional int64 gauge = 32;
-  optional double noncumulative_double_value = 34;
-  optional PrecomputedDistribution distribution = 35;
-  optional string string_value = 36;
-  optional bool boolean_value = 37;
-  optional double cumulative_double_value = 38;
-
-  optional uint64 start_timestamp_us = 40;
-
-  enum Units {
-    UNKNOWN_UNITS = 0;
-    SECONDS = 1;
-    MILLISECONDS = 2;
-    MICROSECONDS = 3;
-    NANOSECONDS = 4;
-
-    BITS = 21;
-    BYTES = 22;
-
-    /** 1000 bytes (not 1024). */
-    KILOBYTES = 31;
-    /** 1e6 (1,000,000) bytes. */
-    MEGABYTES = 32;
-    /** 1e9 (1,000,000,000) bytes. */
-    GIGABYTES = 33;
-
-    /** 1024 bytes. */
-    KIBIBYTES = 41;
-    /** 1024^2 (1,048,576) bytes. */
-    MEBIBYTES = 42;
-    /** 1024^3 (1,073,741,824) bytes. */
-    GIBIBYTES = 43;
-
-    /** Extended Units */
-    AMPS = 60;
-    MILLIAMPS = 61;
-    DEGREES_CELSIUS = 62;
-  }
-  optional Units units = 41;
-
-  optional string description = 43;
-}
diff --git a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/metrics_pb2.py b/tools/swarming_client/third_party/infra_libs/ts_mon/protos/metrics_pb2.py
deleted file mode 100644
index ac1d3f1..0000000
--- a/tools/swarming_client/third_party/infra_libs/ts_mon/protos/metrics_pb2.py
+++ /dev/null
@@ -1,523 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: metrics.proto
-
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-
-import acquisition_network_device_pb2
-import acquisition_task_pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='metrics.proto',
-  package='ts_mon.proto',
-  serialized_pb='\n\rmetrics.proto\x12\x0cts_mon.proto\x1a acquisition_network_device.proto\x1a\x16\x61\x63quisition_task.proto\"u\n\x11MetricsCollection\x12\'\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x19.ts_mon.proto.MetricsData\x12\x1a\n\x12start_timestamp_us\x18\x02 \x01(\x04\x12\x1b\n\x13\x63ollection_point_id\x18\x03 \x01(\t\"\xc1\x01\n\x0cMetricsField\x12\x0c\n\x04name\x18\x01 \x01(\t\x12:\n\x04type\x18\x03 \x01(\x0e\x32$.ts_mon.proto.MetricsField.FieldType:\x06STRING\x12\x14\n\x0cstring_value\x18\x04 \x01(\t\x12\x11\n\tint_value\x18\x05 \x01(\x03\x12\x12\n\nbool_value\x18\x06 \x01(\x08\"*\n\tFieldType\x12\n\n\x06STRING\x10\x01\x12\x07\n\x03INT\x10\x02\x12\x08\n\x04\x42OOL\x10\x03\"\xcf\x03\n\x17PrecomputedDistribution\x12\x41\n\tspec_type\x18\x01 \x01(\x0e\x32..ts_mon.proto.PrecomputedDistribution.SpecType\x12\x11\n\x05width\x18\x02 \x01(\x01:\x02\x31\x30\x12\x18\n\rgrowth_factor\x18\x03 \x01(\x01:\x01\x30\x12\x17\n\x0bnum_buckets\x18\x04 \x01(\x05:\x02\x31\x30\x12\x14\n\x0clower_bounds\x18\x05 \x03(\x01\x12\x1c\n\ris_cumulative\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x0e\n\x06\x62ucket\x18\x07 \x03(\x12\x12\x11\n\tunderflow\x18\x08 \x01(\x12\x12\x10\n\x08overflow\x18\t \x01(\x12\x12\x0c\n\x04mean\x18\n \x01(\x01\x12 \n\x18sum_of_squared_deviation\x18\x0b \x01(\x01\"\x91\x01\n\x08SpecType\x12\x19\n\x15\x43\x41NONICAL_POWERS_OF_2\x10\x01\x12 \n\x1c\x43\x41NONICAL_POWERS_OF_10_P_0_2\x10\x02\x12\x1a\n\x16\x43\x41NONICAL_POWERS_OF_10\x10\x03\x12\x18\n\x14\x43USTOM_PARAMETERIZED\x10\x14\x12\x12\n\x0e\x43USTOM_BOUNDED\x10\x15\"\xe6\x05\n\x0bMetricsData\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x1a\n\x12metric_name_prefix\x18\x02 \x01(\t\x12\x33\n\x0enetwork_device\x18\x0b \x01(\x0b\x32\x1b.ts_mon.proto.NetworkDevice\x12 \n\x04task\x18\x0c \x01(\x0b\x32\x12.ts_mon.proto.Task\x12*\n\x06\x66ields\x18\x14 \x03(\x0b\x32\x1a.ts_mon.proto.MetricsField\x12\x0f\n\x07\x63ounter\x18\x1e \x01(\x03\x12\r\n\x05gauge\x18  \x01(\x03\x12\"\n\x1anoncumulative_double_value\x18\" \x01(\x01\x12;\n\x0c\x64istribution\x18# \x01(\x0b\x32%.ts_mon.proto.PrecomputedDistribution\x12\x14\n\x0cstring_value\x18$ \x01(\t\x12\x15\n\rboolean_value\x18% \x01(\x08\x12\x1f\n\x17\x63umulative_double_value\x18& \x01(\x01\x12\x1a\n\x12start_timestamp_us\x18( \x01(\x04\x12.\n\x05units\x18) \x01(\x0e\x32\x1f.ts_mon.proto.MetricsData.Units\x12\x13\n\x0b\x64\x65scription\x18+ \x01(\t\"\xf9\x01\n\x05Units\x12\x11\n\rUNKNOWN_UNITS\x10\x00\x12\x0b\n\x07SECONDS\x10\x01\x12\x10\n\x0cMILLISECONDS\x10\x02\x12\x10\n\x0cMICROSECONDS\x10\x03\x12\x0f\n\x0bNANOSECONDS\x10\x04\x12\x08\n\x04\x42ITS\x10\x15\x12\t\n\x05\x42YTES\x10\x16\x12\r\n\tKILOBYTES\x10\x1f\x12\r\n\tMEGABYTES\x10 \x12\r\n\tGIGABYTES\x10!\x12\r\n\tKIBIBYTES\x10)\x12\r\n\tMEBIBYTES\x10*\x12\r\n\tGIBIBYTES\x10+\x12\x08\n\x04\x41MPS\x10<\x12\r\n\tMILLIAMPS\x10=\x12\x13\n\x0f\x44\x45GREES_CELSIUS\x10>')
-
-
-
-_METRICSFIELD_FIELDTYPE = _descriptor.EnumDescriptor(
-  name='FieldType',
-  full_name='ts_mon.proto.MetricsField.FieldType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='STRING', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='INT', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BOOL', index=2, number=3,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=360,
-  serialized_end=402,
-)
-
-_PRECOMPUTEDDISTRIBUTION_SPECTYPE = _descriptor.EnumDescriptor(
-  name='SpecType',
-  full_name='ts_mon.proto.PrecomputedDistribution.SpecType',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='CANONICAL_POWERS_OF_2', index=0, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CANONICAL_POWERS_OF_10_P_0_2', index=1, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CANONICAL_POWERS_OF_10', index=2, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CUSTOM_PARAMETERIZED', index=3, number=20,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='CUSTOM_BOUNDED', index=4, number=21,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=723,
-  serialized_end=868,
-)
-
-_METRICSDATA_UNITS = _descriptor.EnumDescriptor(
-  name='Units',
-  full_name='ts_mon.proto.MetricsData.Units',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='UNKNOWN_UNITS', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='SECONDS', index=1, number=1,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MILLISECONDS', index=2, number=2,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MICROSECONDS', index=3, number=3,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NANOSECONDS', index=4, number=4,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BITS', index=5, number=21,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='BYTES', index=6, number=22,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='KILOBYTES', index=7, number=31,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MEGABYTES', index=8, number=32,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='GIGABYTES', index=9, number=33,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='KIBIBYTES', index=10, number=41,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MEBIBYTES', index=11, number=42,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='GIBIBYTES', index=12, number=43,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='AMPS', index=13, number=60,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='MILLIAMPS', index=14, number=61,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='DEGREES_CELSIUS', index=15, number=62,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=1364,
-  serialized_end=1613,
-)
-
-
-_METRICSCOLLECTION = _descriptor.Descriptor(
-  name='MetricsCollection',
-  full_name='ts_mon.proto.MetricsCollection',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='data', full_name='ts_mon.proto.MetricsCollection.data', index=0,
-      number=1, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='start_timestamp_us', full_name='ts_mon.proto.MetricsCollection.start_timestamp_us', index=1,
-      number=2, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='collection_point_id', full_name='ts_mon.proto.MetricsCollection.collection_point_id', index=2,
-      number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=89,
-  serialized_end=206,
-)
-
-
-_METRICSFIELD = _descriptor.Descriptor(
-  name='MetricsField',
-  full_name='ts_mon.proto.MetricsField',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='ts_mon.proto.MetricsField.name', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='type', full_name='ts_mon.proto.MetricsField.type', index=1,
-      number=3, type=14, cpp_type=8, label=1,
-      has_default_value=True, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_value', full_name='ts_mon.proto.MetricsField.string_value', index=2,
-      number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='int_value', full_name='ts_mon.proto.MetricsField.int_value', index=3,
-      number=5, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bool_value', full_name='ts_mon.proto.MetricsField.bool_value', index=4,
-      number=6, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _METRICSFIELD_FIELDTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=209,
-  serialized_end=402,
-)
-
-
-_PRECOMPUTEDDISTRIBUTION = _descriptor.Descriptor(
-  name='PrecomputedDistribution',
-  full_name='ts_mon.proto.PrecomputedDistribution',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='spec_type', full_name='ts_mon.proto.PrecomputedDistribution.spec_type', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=1,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='width', full_name='ts_mon.proto.PrecomputedDistribution.width', index=1,
-      number=2, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=10,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='growth_factor', full_name='ts_mon.proto.PrecomputedDistribution.growth_factor', index=2,
-      number=3, type=1, cpp_type=5, label=1,
-      has_default_value=True, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='num_buckets', full_name='ts_mon.proto.PrecomputedDistribution.num_buckets', index=3,
-      number=4, type=5, cpp_type=1, label=1,
-      has_default_value=True, default_value=10,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='lower_bounds', full_name='ts_mon.proto.PrecomputedDistribution.lower_bounds', index=4,
-      number=5, type=1, cpp_type=5, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='is_cumulative', full_name='ts_mon.proto.PrecomputedDistribution.is_cumulative', index=5,
-      number=6, type=8, cpp_type=7, label=1,
-      has_default_value=True, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='bucket', full_name='ts_mon.proto.PrecomputedDistribution.bucket', index=6,
-      number=7, type=18, cpp_type=2, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='underflow', full_name='ts_mon.proto.PrecomputedDistribution.underflow', index=7,
-      number=8, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='overflow', full_name='ts_mon.proto.PrecomputedDistribution.overflow', index=8,
-      number=9, type=18, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='mean', full_name='ts_mon.proto.PrecomputedDistribution.mean', index=9,
-      number=10, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='sum_of_squared_deviation', full_name='ts_mon.proto.PrecomputedDistribution.sum_of_squared_deviation', index=10,
-      number=11, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _PRECOMPUTEDDISTRIBUTION_SPECTYPE,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=405,
-  serialized_end=868,
-)
-
-
-_METRICSDATA = _descriptor.Descriptor(
-  name='MetricsData',
-  full_name='ts_mon.proto.MetricsData',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='name', full_name='ts_mon.proto.MetricsData.name', index=0,
-      number=1, type=9, cpp_type=9, label=2,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='metric_name_prefix', full_name='ts_mon.proto.MetricsData.metric_name_prefix', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='network_device', full_name='ts_mon.proto.MetricsData.network_device', index=2,
-      number=11, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='task', full_name='ts_mon.proto.MetricsData.task', index=3,
-      number=12, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='fields', full_name='ts_mon.proto.MetricsData.fields', index=4,
-      number=20, type=11, cpp_type=10, label=3,
-      has_default_value=False, default_value=[],
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='counter', full_name='ts_mon.proto.MetricsData.counter', index=5,
-      number=30, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='gauge', full_name='ts_mon.proto.MetricsData.gauge', index=6,
-      number=32, type=3, cpp_type=2, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='noncumulative_double_value', full_name='ts_mon.proto.MetricsData.noncumulative_double_value', index=7,
-      number=34, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='distribution', full_name='ts_mon.proto.MetricsData.distribution', index=8,
-      number=35, type=11, cpp_type=10, label=1,
-      has_default_value=False, default_value=None,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='string_value', full_name='ts_mon.proto.MetricsData.string_value', index=9,
-      number=36, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='boolean_value', full_name='ts_mon.proto.MetricsData.boolean_value', index=10,
-      number=37, type=8, cpp_type=7, label=1,
-      has_default_value=False, default_value=False,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='cumulative_double_value', full_name='ts_mon.proto.MetricsData.cumulative_double_value', index=11,
-      number=38, type=1, cpp_type=5, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='start_timestamp_us', full_name='ts_mon.proto.MetricsData.start_timestamp_us', index=12,
-      number=40, type=4, cpp_type=4, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='units', full_name='ts_mon.proto.MetricsData.units', index=13,
-      number=41, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='description', full_name='ts_mon.proto.MetricsData.description', index=14,
-      number=43, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=unicode("", "utf-8"),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _METRICSDATA_UNITS,
-  ],
-  options=None,
-  is_extendable=False,
-  extension_ranges=[],
-  serialized_start=871,
-  serialized_end=1613,
-)
-
-_METRICSCOLLECTION.fields_by_name['data'].message_type = _METRICSDATA
-_METRICSFIELD.fields_by_name['type'].enum_type = _METRICSFIELD_FIELDTYPE
-_METRICSFIELD_FIELDTYPE.containing_type = _METRICSFIELD;
-_PRECOMPUTEDDISTRIBUTION.fields_by_name['spec_type'].enum_type = _PRECOMPUTEDDISTRIBUTION_SPECTYPE
-_PRECOMPUTEDDISTRIBUTION_SPECTYPE.containing_type = _PRECOMPUTEDDISTRIBUTION;
-_METRICSDATA.fields_by_name['network_device'].message_type = acquisition_network_device_pb2._NETWORKDEVICE
-_METRICSDATA.fields_by_name['task'].message_type = acquisition_task_pb2._TASK
-_METRICSDATA.fields_by_name['fields'].message_type = _METRICSFIELD
-_METRICSDATA.fields_by_name['distribution'].message_type = _PRECOMPUTEDDISTRIBUTION
-_METRICSDATA.fields_by_name['units'].enum_type = _METRICSDATA_UNITS
-_METRICSDATA_UNITS.containing_type = _METRICSDATA;
-DESCRIPTOR.message_types_by_name['MetricsCollection'] = _METRICSCOLLECTION
-DESCRIPTOR.message_types_by_name['MetricsField'] = _METRICSFIELD
-DESCRIPTOR.message_types_by_name['PrecomputedDistribution'] = _PRECOMPUTEDDISTRIBUTION
-DESCRIPTOR.message_types_by_name['MetricsData'] = _METRICSDATA
-
-class MetricsCollection(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _METRICSCOLLECTION
-
-  # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsCollection)
-
-class MetricsField(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _METRICSFIELD
-
-  # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsField)
-
-class PrecomputedDistribution(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _PRECOMPUTEDDISTRIBUTION
-
-  # @@protoc_insertion_point(class_scope:ts_mon.proto.PrecomputedDistribution)
-
-class MetricsData(_message.Message):
-  __metaclass__ = _reflection.GeneratedProtocolMessageType
-  DESCRIPTOR = _METRICSDATA
-
-  # @@protoc_insertion_point(class_scope:ts_mon.proto.MetricsData)
-
-
-# @@protoc_insertion_point(module_scope)
diff --git a/tools/swarming_client/third_party/infra_libs/utils.py b/tools/swarming_client/third_party/infra_libs/utils.py
deleted file mode 100644
index 8c49d3e..0000000
--- a/tools/swarming_client/third_party/infra_libs/utils.py
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Miscellaneous utility functions."""
-
-
-import contextlib
-import errno
-import json
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-
-
-def read_json_as_utf8(filename=None, text=None):
-  """Read and deserialize a json file or string.
-
-  This function is different from json.load and json.loads in that it
-  returns utf8-encoded string for keys and values instead of unicode.
-
-  Args:
-    filename (str): path of a file to parse
-    text (str): json string to parse
-
-  ``filename`` and ``text`` are mutually exclusive. ValueError is raised if
-  both are provided.
-  """
-
-  if filename is not None and text is not None:
-    raise ValueError('Only one of "filename" and "text" can be provided at '
-                     'the same time')
-
-  if filename is None and text is None:
-    raise ValueError('One of "filename" and "text" must be provided')
-
-  def to_utf8(obj):
-    if isinstance(obj, dict):
-      return {to_utf8(key): to_utf8(value) for key, value in obj.iteritems()}
-    if isinstance(obj, list):
-      return [to_utf8(item) for item in obj]
-    if isinstance(obj, unicode):
-      return obj.encode('utf-8')
-    return obj
-
-  if filename:
-    with open(filename, 'rb') as f:
-      obj = json.load(f)
-  else:
-    obj = json.loads(text)
-
-  return to_utf8(obj)
-
-
-# TODO(hinoka): Add tests crbug.com/500781
-def rmtree(file_path):  # pragma: no cover
-  """Recursively removes a directory, even if it's marked read-only.
-
-  Remove the directory located at file_path, if it exists.
-
-  shutil.rmtree() doesn't work on Windows if any of the files or directories
-  are read-only, which svn repositories and some .svn files are.  We need to
-  be able to force the files to be writable (i.e., deletable) as we traverse
-  the tree.
-
-  Even with all this, Windows still sometimes fails to delete a file, citing
-  a permission error (maybe something to do with antivirus scans or disk
-  indexing).  The best suggestion any of the user forums had was to wait a
-  bit and try again, so we do that too.  It's hand-waving, but sometimes it
-  works. :/
-  """
-  if not os.path.exists(file_path):
-    return
-
-  if sys.platform == 'win32':
-    # Give up and use cmd.exe's rd command.
-    file_path = os.path.normcase(file_path)
-    for _ in xrange(3):
-      if not subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', file_path]):
-        break
-      time.sleep(3)
-    return
-
-  def remove_with_retry(rmfunc, path):
-    if os.path.islink(path):
-      return os.remove(path)
-    else:
-      return rmfunc(path)
-
-  def rmtree_on_error(function, _, excinfo):
-    """This works around a problem whereby python 2.x on Windows has no ability
-    to check for symbolic links.  os.path.islink always returns False.  But
-    shutil.rmtree will fail if invoked on a symbolic link whose target was
-    deleted before the link.  E.g., reproduce like this:
-    > mkdir test
-    > mkdir test\1
-    > mklink /D test\current test\1
-    > python -c "import infra_libs; infra_libs.rmtree('test')"
-    To avoid this issue, we pass this error-handling function to rmtree.  If
-    we see the exact sort of failure, we ignore it.  All other failures we re-
-    raise.
-    """
-
-    exception_type = excinfo[0]
-    exception_value = excinfo[1]
-    # If shutil.rmtree encounters a symbolic link on Windows, os.listdir will
-    # fail with a WindowsError exception with an ENOENT errno (i.e., file not
-    # found).  We'll ignore that error.  Note that WindowsError is not defined
-    # for non-Windows platforms, so we use OSError (of which it is a subclass)
-    # to avoid lint complaints about an undefined global on non-Windows
-    # platforms.
-    if (function is os.listdir) and issubclass(exception_type, OSError):
-      if exception_value.errno != errno.ENOENT:
-        raise
-    else:
-      raise
-
-  for root, dirs, files in os.walk(file_path, topdown=False):
-    # For POSIX:  making the directory writable guarantees removability.
-    # Windows will ignore the non-read-only bits in the chmod value.
-    os.chmod(root, 0770)
-    for name in files:
-      remove_with_retry(os.remove, os.path.join(root, name))
-    for name in dirs:
-      remove_with_retry(lambda p: shutil.rmtree(p, onerror=rmtree_on_error),
-                        os.path.join(root, name))
-
-  remove_with_retry(os.rmdir, file_path)
-
-
-# We're trying to be compatible with Python3 tempfile.TemporaryDirectory
-# context manager here. And they used 'dir' as a keyword argument.
-# pylint: disable=redefined-builtin
-@contextlib.contextmanager
-def temporary_directory(suffix="", prefix="tmp", dir=None,
-                        keep_directory=False):
-  """Create and return a temporary directory.  This has the same
-  behavior as mkdtemp but can be used as a context manager.  For
-  example:
-
-    with temporary_directory() as tmpdir:
-      ...
-
-  Upon exiting the context, the directory and everything contained
-  in it are removed.
-
-  Args:
-    suffix, prefix, dir: same arguments as for tempfile.mkdtemp.
-    keep_directory (bool): if True, do not delete the temporary directory
-      when exiting. Useful for debugging.
-
-  Returns:
-    tempdir (str): full path to the temporary directory.
-  """
-  tempdir = None  # Handle mkdtemp raising an exception
-  try:
-    tempdir = tempfile.mkdtemp(suffix, prefix, dir)
-    yield tempdir
-
-  finally:
-    if tempdir and not keep_directory:  # pragma: no branch
-      try:
-        # TODO(pgervais,496347) Make this work reliably on Windows.
-        shutil.rmtree(tempdir, ignore_errors=True)
-      except OSError as ex:  # pragma: no cover
-        print >> sys.stderr, (
-          "ERROR: {!r} while cleaning up {!r}".format(ex, tempdir))
diff --git a/tools/swarming_client/third_party/oauth2client/LICENSE b/tools/swarming_client/third_party/oauth2client/LICENSE
deleted file mode 100644
index b506d50..0000000
--- a/tools/swarming_client/third_party/oauth2client/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
- Copyright 2014 Google Inc.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-Dependent Modules
-=================
-
-This code has the following dependencies
-above and beyond the Python standard library:
-
-uritemplates - Apache License 2.0
-httplib2 - MIT License
diff --git a/tools/swarming_client/third_party/oauth2client/README.md b/tools/swarming_client/third_party/oauth2client/README.md
deleted file mode 100644
index 005aff5..0000000
--- a/tools/swarming_client/third_party/oauth2client/README.md
+++ /dev/null
@@ -1,29 +0,0 @@
-[![Build Status](https://travis-ci.org/google/oauth2client.svg?branch=master)](https://travis-ci.org/google/oauth2client)
-[![Coverage Status](https://coveralls.io/repos/google/oauth2client/badge.svg?branch=master&service=github)](https://coveralls.io/github/google/oauth2client?branch=master)
-[![Documentation Status](https://readthedocs.org/projects/oauth2client/badge/?version=latest)](http://oauth2client.readthedocs.org/)
-
-This is a client library for accessing resources protected by OAuth 2.0.
-
-Installation
-============
-
-To install, simply say
-
-```bash
-$ pip install --upgrade oauth2client
-```
-
-Contributing
-============
-
-Please see the [CONTRIBUTING page][1] for more information. In particular, we
-love pull requests -- but please make sure to sign the contributor license
-agreement.
-
-Supported Python Versions
-=========================
-
-We support Python 2.6, 2.7, 3.3+. More information [in the docs][2].
-
-[1]: https://github.com/google/oauth2client/blob/master/CONTRIBUTING.md
-[2]: http://oauth2client.readthedocs.org/#supported-python-versions
diff --git a/tools/swarming_client/third_party/oauth2client/README.swarming b/tools/swarming_client/third_party/oauth2client/README.swarming
deleted file mode 100644
index 9193d48..0000000
--- a/tools/swarming_client/third_party/oauth2client/README.swarming
+++ /dev/null
@@ -1,14 +0,0 @@
-Name: oauth2client
-Short Name: oauth2client
-URL: https://github.com/google/oauth2client/archive/v1.5.2.tar.gz
-Version: 1.5.2
-Revision: 73d9d55447de97dfe541395817a0c8241701f7d6
-License: Apache License, Version 2.0
-
-Description:
-The oauth2client is a client library for OAuth 2.0.
-
-Local Modifications:
-- Kept oauth2client/.
-- Removed: appengine.py devshell.py django_orm.py flask_util.py
-- Kept LICENSE and README.md.
diff --git a/tools/swarming_client/third_party/oauth2client/__init__.py b/tools/swarming_client/third_party/oauth2client/__init__.py
deleted file mode 100644
index f7c36c1..0000000
--- a/tools/swarming_client/third_party/oauth2client/__init__.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Client library for using OAuth2, especially with Google APIs."""
-
-__version__ = '1.5.2'
-
-GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
-GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code'
-GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
-GOOGLE_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
-GOOGLE_TOKEN_INFO_URI = 'https://www.googleapis.com/oauth2/v2/tokeninfo'
diff --git a/tools/swarming_client/third_party/oauth2client/_helpers.py b/tools/swarming_client/third_party/oauth2client/_helpers.py
deleted file mode 100644
index 39bfeb6..0000000
--- a/tools/swarming_client/third_party/oauth2client/_helpers.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Helper functions for commonly used utilities."""
-
-import base64
-import json
-import six
-
-
-def _parse_pem_key(raw_key_input):
-    """Identify and extract PEM keys.
-
-    Determines whether the given key is in the format of PEM key, and extracts
-    the relevant part of the key if it is.
-
-    Args:
-        raw_key_input: The contents of a private key file (either PEM or
-                       PKCS12).
-
-    Returns:
-        string, The actual key if the contents are from a PEM file, or
-        else None.
-    """
-    offset = raw_key_input.find(b'-----BEGIN ')
-    if offset != -1:
-        return raw_key_input[offset:]
-
-
-def _json_encode(data):
-    return json.dumps(data, separators=(',', ':'))
-
-
-def _to_bytes(value, encoding='ascii'):
-    """Converts a string value to bytes, if necessary.
-
-    Unfortunately, ``six.b`` is insufficient for this task since in
-    Python2 it does not modify ``unicode`` objects.
-
-    Args:
-        value: The string/bytes value to be converted.
-        encoding: The encoding to use to convert unicode to bytes. Defaults
-                  to "ascii", which will not allow any characters from ordinals
-                  larger than 127. Other useful values are "latin-1", which
-                  which will only allows byte ordinals (up to 255) and "utf-8",
-                  which will encode any unicode that needs to be.
-
-    Returns:
-        The original value converted to bytes (if unicode) or as passed in
-        if it started out as bytes.
-
-    Raises:
-        ValueError if the value could not be converted to bytes.
-    """
-    result = (value.encode(encoding)
-              if isinstance(value, six.text_type) else value)
-    if isinstance(result, six.binary_type):
-        return result
-    else:
-        raise ValueError('%r could not be converted to bytes' % (value,))
-
-
-def _from_bytes(value):
-    """Converts bytes to a string value, if necessary.
-
-    Args:
-        value: The string/bytes value to be converted.
-
-    Returns:
-        The original value converted to unicode (if bytes) or as passed in
-        if it started out as unicode.
-
-    Raises:
-        ValueError if the value could not be converted to unicode.
-    """
-    result = (value.decode('utf-8')
-              if isinstance(value, six.binary_type) else value)
-    if isinstance(result, six.text_type):
-        return result
-    else:
-        raise ValueError('%r could not be converted to unicode' % (value,))
-
-
-def _urlsafe_b64encode(raw_bytes):
-    raw_bytes = _to_bytes(raw_bytes, encoding='utf-8')
-    return base64.urlsafe_b64encode(raw_bytes).rstrip(b'=')
-
-
-def _urlsafe_b64decode(b64string):
-    # Guard against unicode strings, which base64 can't handle.
-    b64string = _to_bytes(b64string)
-    padded = b64string + b'=' * (4 - len(b64string) % 4)
-    return base64.urlsafe_b64decode(padded)
diff --git a/tools/swarming_client/third_party/oauth2client/_openssl_crypt.py b/tools/swarming_client/third_party/oauth2client/_openssl_crypt.py
deleted file mode 100644
index d024cf3..0000000
--- a/tools/swarming_client/third_party/oauth2client/_openssl_crypt.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""OpenSSL Crypto-related routines for oauth2client."""
-
-import base64
-
-from OpenSSL import crypto
-
-from oauth2client._helpers import _parse_pem_key
-from oauth2client._helpers import _to_bytes
-
-
-class OpenSSLVerifier(object):
-    """Verifies the signature on a message."""
-
-    def __init__(self, pubkey):
-        """Constructor.
-
-        Args:
-            pubkey: OpenSSL.crypto.PKey, The public key to verify with.
-        """
-        self._pubkey = pubkey
-
-    def verify(self, message, signature):
-        """Verifies a message against a signature.
-
-        Args:
-        message: string or bytes, The message to verify. If string, will be
-                 encoded to bytes as utf-8.
-        signature: string or bytes, The signature on the message. If string,
-                   will be encoded to bytes as utf-8.
-
-        Returns:
-            True if message was signed by the private key associated with the
-            public key that this object was constructed with.
-        """
-        message = _to_bytes(message, encoding='utf-8')
-        signature = _to_bytes(signature, encoding='utf-8')
-        try:
-            crypto.verify(self._pubkey, signature, message, 'sha256')
-            return True
-        except crypto.Error:
-            return False
-
-    @staticmethod
-    def from_string(key_pem, is_x509_cert):
-        """Construct a Verified instance from a string.
-
-        Args:
-            key_pem: string, public key in PEM format.
-            is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
-                          is expected to be an RSA key in PEM format.
-
-        Returns:
-            Verifier instance.
-
-        Raises:
-            OpenSSL.crypto.Error: if the key_pem can't be parsed.
-        """
-        if is_x509_cert:
-            pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
-        else:
-            pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
-        return OpenSSLVerifier(pubkey)
-
-
-class OpenSSLSigner(object):
-    """Signs messages with a private key."""
-
-    def __init__(self, pkey):
-        """Constructor.
-
-        Args:
-            pkey: OpenSSL.crypto.PKey (or equiv), The private key to sign with.
-        """
-        self._key = pkey
-
-    def sign(self, message):
-        """Signs a message.
-
-        Args:
-            message: bytes, Message to be signed.
-
-        Returns:
-            string, The signature of the message for the given key.
-        """
-        message = _to_bytes(message, encoding='utf-8')
-        return crypto.sign(self._key, message, 'sha256')
-
-    @staticmethod
-    def from_string(key, password=b'notasecret'):
-        """Construct a Signer instance from a string.
-
-        Args:
-            key: string, private key in PKCS12 or PEM format.
-            password: string, password for the private key file.
-
-        Returns:
-            Signer instance.
-
-        Raises:
-            OpenSSL.crypto.Error if the key can't be parsed.
-        """
-        parsed_pem_key = _parse_pem_key(key)
-        if parsed_pem_key:
-            pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key)
-        else:
-            password = _to_bytes(password, encoding='utf-8')
-            pkey = crypto.load_pkcs12(key, password).get_privatekey()
-        return OpenSSLSigner(pkey)
-
-
-def pkcs12_key_as_pem(private_key_text, private_key_password):
-    """Convert the contents of a PKCS12 key to PEM using OpenSSL.
-
-    Args:
-        private_key_text: String. Private key.
-        private_key_password: String. Password for PKCS12.
-
-    Returns:
-        String. PEM contents of ``private_key_text``.
-    """
-    decoded_body = base64.b64decode(private_key_text)
-    private_key_password = _to_bytes(private_key_password)
-
-    pkcs12 = crypto.load_pkcs12(decoded_body, private_key_password)
-    return crypto.dump_privatekey(crypto.FILETYPE_PEM,
-                                  pkcs12.get_privatekey())
diff --git a/tools/swarming_client/third_party/oauth2client/_pycrypto_crypt.py b/tools/swarming_client/third_party/oauth2client/_pycrypto_crypt.py
deleted file mode 100644
index 7b277aa..0000000
--- a/tools/swarming_client/third_party/oauth2client/_pycrypto_crypt.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""pyCrypto Crypto-related routines for oauth2client."""
-
-from Crypto.PublicKey import RSA
-from Crypto.Hash import SHA256
-from Crypto.Signature import PKCS1_v1_5
-from Crypto.Util.asn1 import DerSequence
-
-from oauth2client._helpers import _parse_pem_key
-from oauth2client._helpers import _to_bytes
-from oauth2client._helpers import _urlsafe_b64decode
-
-
-class PyCryptoVerifier(object):
-    """Verifies the signature on a message."""
-
-    def __init__(self, pubkey):
-        """Constructor.
-
-        Args:
-            pubkey: OpenSSL.crypto.PKey (or equiv), The public key to verify
-            with.
-        """
-        self._pubkey = pubkey
-
-    def verify(self, message, signature):
-        """Verifies a message against a signature.
-
-        Args:
-            message: string or bytes, The message to verify. If string, will be
-                     encoded to bytes as utf-8.
-            signature: string or bytes, The signature on the message.
-
-        Returns:
-            True if message was signed by the private key associated with the
-            public key that this object was constructed with.
-        """
-        message = _to_bytes(message, encoding='utf-8')
-        return PKCS1_v1_5.new(self._pubkey).verify(
-            SHA256.new(message), signature)
-
-    @staticmethod
-    def from_string(key_pem, is_x509_cert):
-        """Construct a Verified instance from a string.
-
-        Args:
-            key_pem: string, public key in PEM format.
-            is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
-                          is expected to be an RSA key in PEM format.
-
-        Returns:
-            Verifier instance.
-        """
-        if is_x509_cert:
-            key_pem = _to_bytes(key_pem)
-            pemLines = key_pem.replace(b' ', b'').split()
-            certDer = _urlsafe_b64decode(b''.join(pemLines[1:-1]))
-            certSeq = DerSequence()
-            certSeq.decode(certDer)
-            tbsSeq = DerSequence()
-            tbsSeq.decode(certSeq[0])
-            pubkey = RSA.importKey(tbsSeq[6])
-        else:
-            pubkey = RSA.importKey(key_pem)
-        return PyCryptoVerifier(pubkey)
-
-
-class PyCryptoSigner(object):
-    """Signs messages with a private key."""
-
-    def __init__(self, pkey):
-        """Constructor.
-
-        Args:
-            pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
-        """
-        self._key = pkey
-
-    def sign(self, message):
-        """Signs a message.
-
-        Args:
-            message: string, Message to be signed.
-
-        Returns:
-            string, The signature of the message for the given key.
-        """
-        message = _to_bytes(message, encoding='utf-8')
-        return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
-
-    @staticmethod
-    def from_string(key, password='notasecret'):
-        """Construct a Signer instance from a string.
-
-        Args:
-            key: string, private key in PEM format.
-            password: string, password for private key file. Unused for PEM
-                      files.
-
-        Returns:
-            Signer instance.
-
-        Raises:
-            NotImplementedError if the key isn't in PEM format.
-        """
-        parsed_pem_key = _parse_pem_key(key)
-        if parsed_pem_key:
-            pkey = RSA.importKey(parsed_pem_key)
-        else:
-            raise NotImplementedError(
-                'PKCS12 format is not supported by the PyCrypto library. '
-                'Try converting to a "PEM" '
-                '(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > '
-                'privatekey.pem) '
-                'or using PyOpenSSL if native code is an option.')
-        return PyCryptoSigner(pkey)
diff --git a/tools/swarming_client/third_party/oauth2client/client.py b/tools/swarming_client/third_party/oauth2client/client.py
deleted file mode 100644
index cd5959f..0000000
--- a/tools/swarming_client/third_party/oauth2client/client.py
+++ /dev/null
@@ -1,2242 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""An OAuth 2.0 client.
-
-Tools for interacting with OAuth 2.0 protected resources.
-"""
-
-import base64
-import collections
-import copy
-import datetime
-import json
-import logging
-import os
-import socket
-import sys
-import tempfile
-import time
-import shutil
-import six
-from six.moves import urllib
-
-import httplib2
-from oauth2client import GOOGLE_AUTH_URI
-from oauth2client import GOOGLE_DEVICE_URI
-from oauth2client import GOOGLE_REVOKE_URI
-from oauth2client import GOOGLE_TOKEN_URI
-from oauth2client import GOOGLE_TOKEN_INFO_URI
-from oauth2client._helpers import _from_bytes
-from oauth2client._helpers import _to_bytes
-from oauth2client._helpers import _urlsafe_b64decode
-from oauth2client import clientsecrets
-from oauth2client import util
-
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-HAS_OPENSSL = False
-HAS_CRYPTO = False
-try:
-    from oauth2client import crypt
-    HAS_CRYPTO = True
-    if crypt.OpenSSLVerifier is not None:
-        HAS_OPENSSL = True
-except ImportError:
-    pass
-
-
-logger = logging.getLogger(__name__)
-
-# Expiry is stored in RFC3339 UTC format
-EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
-
-# Which certs to use to validate id_tokens received.
-ID_TOKEN_VERIFICATION_CERTS = 'https://www.googleapis.com/oauth2/v1/certs'
-# This symbol previously had a typo in the name; we keep the old name
-# around for now, but will remove it in the future.
-ID_TOKEN_VERIFICATON_CERTS = ID_TOKEN_VERIFICATION_CERTS
-
-# Constant to use for the out of band OAuth 2.0 flow.
-OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob'
-
-# Google Data client libraries may need to set this to [401, 403].
-REFRESH_STATUS_CODES = [401]
-
-# The value representing user credentials.
-AUTHORIZED_USER = 'authorized_user'
-
-# The value representing service account credentials.
-SERVICE_ACCOUNT = 'service_account'
-
-# The environment variable pointing the file with local
-# Application Default Credentials.
-GOOGLE_APPLICATION_CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS'
-# The ~/.config subdirectory containing gcloud credentials. Intended
-# to be swapped out in tests.
-_CLOUDSDK_CONFIG_DIRECTORY = 'gcloud'
-# The environment variable name which can replace ~/.config if set.
-_CLOUDSDK_CONFIG_ENV_VAR = 'CLOUDSDK_CONFIG'
-
-# The error message we show users when we can't find the Application
-# Default Credentials.
-ADC_HELP_MSG = (
-    'The Application Default Credentials are not available. They are '
-    'available if running in Google Compute Engine. Otherwise, the '
-    'environment variable ' +
-    GOOGLE_APPLICATION_CREDENTIALS +
-    ' must be defined pointing to a file defining the credentials. See '
-    'https://developers.google.com/accounts/docs/'
-    'application-default-credentials for more information.')
-
-# The access token along with the seconds in which it expires.
-AccessTokenInfo = collections.namedtuple(
-    'AccessTokenInfo', ['access_token', 'expires_in'])
-
-DEFAULT_ENV_NAME = 'UNKNOWN'
-
-# If set to True _get_environment avoid GCE check (_detect_gce_environment)
-NO_GCE_CHECK = os.environ.setdefault('NO_GCE_CHECK', 'False')
-
-_SERVER_SOFTWARE = 'SERVER_SOFTWARE'
-_GCE_METADATA_HOST = '169.254.169.254'
-_METADATA_FLAVOR_HEADER = 'Metadata-Flavor'
-_DESIRED_METADATA_FLAVOR = 'Google'
-
-
-class SETTINGS(object):
-    """Settings namespace for globally defined values."""
-    env_name = None
-
-
-class Error(Exception):
-    """Base error for this module."""
-
-
-class FlowExchangeError(Error):
-    """Error trying to exchange an authorization grant for an access token."""
-
-
-class AccessTokenRefreshError(Error):
-    """Error trying to refresh an expired access token."""
-
-
-class HttpAccessTokenRefreshError(AccessTokenRefreshError):
-    """Error (with HTTP status) trying to refresh an expired access token."""
-    def __init__(self, *args, **kwargs):
-        super(HttpAccessTokenRefreshError, self).__init__(*args)
-        self.status = kwargs.get('status')
-
-
-class TokenRevokeError(Error):
-    """Error trying to revoke a token."""
-
-
-class UnknownClientSecretsFlowError(Error):
-    """The client secrets file called for an unknown type of OAuth 2.0 flow."""
-
-
-class AccessTokenCredentialsError(Error):
-    """Having only the access_token means no refresh is possible."""
-
-
-class VerifyJwtTokenError(Error):
-    """Could not retrieve certificates for validation."""
-
-
-class NonAsciiHeaderError(Error):
-    """Header names and values must be ASCII strings."""
-
-
-class ApplicationDefaultCredentialsError(Error):
-    """Error retrieving the Application Default Credentials."""
-
-
-class OAuth2DeviceCodeError(Error):
-    """Error trying to retrieve a device code."""
-
-
-class CryptoUnavailableError(Error, NotImplementedError):
-    """Raised when a crypto library is required, but none is available."""
-
-
-def _abstract():
-    raise NotImplementedError('You need to override this function')
-
-
-class MemoryCache(object):
-    """httplib2 Cache implementation which only caches locally."""
-
-    def __init__(self):
-        self.cache = {}
-
-    def get(self, key):
-        return self.cache.get(key)
-
-    def set(self, key, value):
-        self.cache[key] = value
-
-    def delete(self, key):
-        self.cache.pop(key, None)
-
-
-class Credentials(object):
-    """Base class for all Credentials objects.
-
-    Subclasses must define an authorize() method that applies the credentials
-    to an HTTP transport.
-
-    Subclasses must also specify a classmethod named 'from_json' that takes a
-    JSON string as input and returns an instantiated Credentials object.
-    """
-
-    NON_SERIALIZED_MEMBERS = ['store']
-
-    def authorize(self, http):
-        """Take an httplib2.Http instance (or equivalent) and authorizes it.
-
-        Authorizes it for the set of credentials, usually by replacing
-        http.request() with a method that adds in the appropriate headers and
-        then delegates to the original Http.request() method.
-
-        Args:
-            http: httplib2.Http, an http object to be used to make the refresh
-                  request.
-        """
-        _abstract()
-
-    def refresh(self, http):
-        """Forces a refresh of the access_token.
-
-        Args:
-            http: httplib2.Http, an http object to be used to make the refresh
-                  request.
-        """
-        _abstract()
-
-    def revoke(self, http):
-        """Revokes a refresh_token and makes the credentials void.
-
-        Args:
-            http: httplib2.Http, an http object to be used to make the revoke
-                  request.
-        """
-        _abstract()
-
-    def apply(self, headers):
-        """Add the authorization to the headers.
-
-        Args:
-            headers: dict, the headers to add the Authorization header to.
-        """
-        _abstract()
-
-    def _to_json(self, strip):
-        """Utility function that creates JSON repr. of a Credentials object.
-
-        Args:
-            strip: array, An array of names of members to not include in the
-                   JSON.
-
-        Returns:
-            string, a JSON representation of this instance, suitable to pass to
-            from_json().
-        """
-        t = type(self)
-        d = copy.copy(self.__dict__)
-        for member in strip:
-            if member in d:
-                del d[member]
-        if (d.get('token_expiry') and
-                isinstance(d['token_expiry'], datetime.datetime)):
-            d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT)
-        # Add in information we will need later to reconsistitue this instance.
-        d['_class'] = t.__name__
-        d['_module'] = t.__module__
-        for key, val in d.items():
-            if isinstance(val, bytes):
-                d[key] = val.decode('utf-8')
-            if isinstance(val, set):
-                d[key] = list(val)
-        return json.dumps(d)
-
-    def to_json(self):
-        """Creating a JSON representation of an instance of Credentials.
-
-        Returns:
-            string, a JSON representation of this instance, suitable to pass to
-            from_json().
-        """
-        return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
-
-    @classmethod
-    def new_from_json(cls, s):
-        """Utility class method to instantiate a Credentials subclass from JSON.
-
-        Expects the JSON string to have been produced by to_json().
-
-        Args:
-            s: string or bytes, JSON from to_json().
-
-        Returns:
-            An instance of the subclass of Credentials that was serialized with
-            to_json().
-        """
-        json_string_as_unicode = _from_bytes(s)
-        data = json.loads(json_string_as_unicode)
-        # Find and call the right classmethod from_json() to restore
-        # the object.
-        module_name = data['_module']
-        try:
-            module_obj = __import__(module_name)
-        except ImportError:
-            # In case there's an object from the old package structure,
-            # update it
-            module_name = module_name.replace('.googleapiclient', '')
-            module_obj = __import__(module_name)
-
-        module_obj = __import__(module_name,
-                                fromlist=module_name.split('.')[:-1])
-        kls = getattr(module_obj, data['_class'])
-        from_json = getattr(kls, 'from_json')
-        return from_json(json_string_as_unicode)
-
-    @classmethod
-    def from_json(cls, unused_data):
-        """Instantiate a Credentials object from a JSON description of it.
-
-        The JSON should have been produced by calling .to_json() on the object.
-
-        Args:
-            unused_data: dict, A deserialized JSON object.
-
-        Returns:
-            An instance of a Credentials subclass.
-        """
-        return Credentials()
-
-
-class Flow(object):
-    """Base class for all Flow objects."""
-    pass
-
-
-class Storage(object):
-    """Base class for all Storage objects.
-
-    Store and retrieve a single credential. This class supports locking
-    such that multiple processes and threads can operate on a single
-    store.
-    """
-
-    def acquire_lock(self):
-        """Acquires any lock necessary to access this Storage.
-
-        This lock is not reentrant.
-        """
-        pass
-
-    def release_lock(self):
-        """Release the Storage lock.
-
-        Trying to release a lock that isn't held will result in a
-        RuntimeError.
-        """
-        pass
-
-    def locked_get(self):
-        """Retrieve credential.
-
-        The Storage lock must be held when this is called.
-
-        Returns:
-            oauth2client.client.Credentials
-        """
-        _abstract()
-
-    def locked_put(self, credentials):
-        """Write a credential.
-
-        The Storage lock must be held when this is called.
-
-        Args:
-            credentials: Credentials, the credentials to store.
-        """
-        _abstract()
-
-    def locked_delete(self):
-        """Delete a credential.
-
-        The Storage lock must be held when this is called.
-        """
-        _abstract()
-
-    def get(self):
-        """Retrieve credential.
-
-        The Storage lock must *not* be held when this is called.
-
-        Returns:
-            oauth2client.client.Credentials
-        """
-        self.acquire_lock()
-        try:
-            return self.locked_get()
-        finally:
-            self.release_lock()
-
-    def put(self, credentials):
-        """Write a credential.
-
-        The Storage lock must be held when this is called.
-
-        Args:
-            credentials: Credentials, the credentials to store.
-        """
-        self.acquire_lock()
-        try:
-            self.locked_put(credentials)
-        finally:
-            self.release_lock()
-
-    def delete(self):
-        """Delete credential.
-
-        Frees any resources associated with storing the credential.
-        The Storage lock must *not* be held when this is called.
-
-        Returns:
-            None
-        """
-        self.acquire_lock()
-        try:
-            return self.locked_delete()
-        finally:
-            self.release_lock()
-
-
-def clean_headers(headers):
-    """Forces header keys and values to be strings, i.e not unicode.
-
-    The httplib module just concats the header keys and values in a way that
-    may make the message header a unicode string, which, if it then tries to
-    contatenate to a binary request body may result in a unicode decode error.
-
-    Args:
-        headers: dict, A dictionary of headers.
-
-    Returns:
-        The same dictionary but with all the keys converted to strings.
-    """
-    clean = {}
-    try:
-        for k, v in six.iteritems(headers):
-            if not isinstance(k, six.binary_type):
-                k = str(k)
-            if not isinstance(v, six.binary_type):
-                v = str(v)
-            clean[_to_bytes(k)] = _to_bytes(v)
-    except UnicodeEncodeError:
-        raise NonAsciiHeaderError(k, ': ', v)
-    return clean
-
-
-def _update_query_params(uri, params):
-    """Updates a URI with new query parameters.
-
-    Args:
-        uri: string, A valid URI, with potential existing query parameters.
-        params: dict, A dictionary of query parameters.
-
-    Returns:
-        The same URI but with the new query parameters added.
-    """
-    parts = urllib.parse.urlparse(uri)
-    query_params = dict(urllib.parse.parse_qsl(parts.query))
-    query_params.update(params)
-    new_parts = parts._replace(query=urllib.parse.urlencode(query_params))
-    return urllib.parse.urlunparse(new_parts)
-
-
-class OAuth2Credentials(Credentials):
-    """Credentials object for OAuth 2.0.
-
-    Credentials can be applied to an httplib2.Http object using the authorize()
-    method, which then adds the OAuth 2.0 access token to each request.
-
-    OAuth2Credentials objects may be safely pickled and unpickled.
-    """
-
-    @util.positional(8)
-    def __init__(self, access_token, client_id, client_secret, refresh_token,
-                 token_expiry, token_uri, user_agent, revoke_uri=None,
-                 id_token=None, token_response=None, scopes=None,
-                 token_info_uri=None):
-        """Create an instance of OAuth2Credentials.
-
-        This constructor is not usually called by the user, instead
-        OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
-
-        Args:
-            access_token: string, access token.
-            client_id: string, client identifier.
-            client_secret: string, client secret.
-            refresh_token: string, refresh token.
-            token_expiry: datetime, when the access_token expires.
-            token_uri: string, URI of token endpoint.
-            user_agent: string, The HTTP User-Agent to provide for this
-                        application.
-            revoke_uri: string, URI for revoke endpoint. Defaults to None; a
-                        token can't be revoked if this is None.
-            id_token: object, The identity of the resource owner.
-            token_response: dict, the decoded response to the token request.
-                            None if a token hasn't been requested yet. Stored
-                            because some providers (e.g. wordpress.com) include
-                            extra fields that clients may want.
-            scopes: list, authorized scopes for these credentials.
-          token_info_uri: string, the URI for the token info endpoint. Defaults
-                          to None; scopes can not be refreshed if this is None.
-
-        Notes:
-            store: callable, A callable that when passed a Credential
-                   will store the credential back to where it came from.
-                   This is needed to store the latest access_token if it
-                   has expired and been refreshed.
-        """
-        self.access_token = access_token
-        self.client_id = client_id
-        self.client_secret = client_secret
-        self.refresh_token = refresh_token
-        self.store = None
-        self.token_expiry = token_expiry
-        self.token_uri = token_uri
-        self.user_agent = user_agent
-        self.revoke_uri = revoke_uri
-        self.id_token = id_token
-        self.token_response = token_response
-        self.scopes = set(util.string_to_scopes(scopes or []))
-        self.token_info_uri = token_info_uri
-
-        # True if the credentials have been revoked or expired and can't be
-        # refreshed.
-        self.invalid = False
-
-    def authorize(self, http):
-        """Authorize an httplib2.Http instance with these credentials.
-
-        The modified http.request method will add authentication headers to
-        each request and will refresh access_tokens when a 401 is received on a
-        request. In addition the http.request method has a credentials
-        property, http.request.credentials, which is the Credentials object
-        that authorized it.
-
-        Args:
-            http: An instance of ``httplib2.Http`` or something that acts
-                  like it.
-
-        Returns:
-            A modified instance of http that was passed in.
-
-        Example::
-
-            h = httplib2.Http()
-            h = credentials.authorize(h)
-
-        You can't create a new OAuth subclass of httplib2.Authentication
-        because it never gets passed the absolute URI, which is needed for
-        signing. So instead we have to overload 'request' with a closure
-        that adds in the Authorization header and then calls the original
-        version of 'request()'.
-        """
-        request_orig = http.request
-
-        # The closure that will replace 'httplib2.Http.request'.
-        def new_request(uri, method='GET', body=None, headers=None,
-                        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                        connection_type=None):
-            if not self.access_token:
-                logger.info('Attempting refresh to obtain '
-                            'initial access_token')
-                self._refresh(request_orig)
-
-            # Clone and modify the request headers to add the appropriate
-            # Authorization header.
-            if headers is None:
-                headers = {}
-            else:
-                headers = dict(headers)
-            self.apply(headers)
-
-            if self.user_agent is not None:
-                if 'user-agent' in headers:
-                    headers['user-agent'] = (self.user_agent + ' ' +
-                                             headers['user-agent'])
-                else:
-                    headers['user-agent'] = self.user_agent
-
-            body_stream_position = None
-            if all(getattr(body, stream_prop, None) for stream_prop in
-                   ('read', 'seek', 'tell')):
-                body_stream_position = body.tell()
-
-            resp, content = request_orig(uri, method, body,
-                                         clean_headers(headers),
-                                         redirections, connection_type)
-
-            # A stored token may expire between the time it is retrieved and
-            # the time the request is made, so we may need to try twice.
-            max_refresh_attempts = 2
-            for refresh_attempt in range(max_refresh_attempts):
-                if resp.status not in REFRESH_STATUS_CODES:
-                    break
-                logger.info('Refreshing due to a %s (attempt %s/%s)',
-                            resp.status, refresh_attempt + 1,
-                            max_refresh_attempts)
-                self._refresh(request_orig)
-                self.apply(headers)
-                if body_stream_position is not None:
-                    body.seek(body_stream_position)
-
-                resp, content = request_orig(uri, method, body,
-                                             clean_headers(headers),
-                                             redirections, connection_type)
-
-            return (resp, content)
-
-        # Replace the request method with our own closure.
-        http.request = new_request
-
-        # Set credentials as a property of the request method.
-        setattr(http.request, 'credentials', self)
-
-        return http
-
-    def refresh(self, http):
-        """Forces a refresh of the access_token.
-
-        Args:
-            http: httplib2.Http, an http object to be used to make the refresh
-                  request.
-        """
-        self._refresh(http.request)
-
-    def revoke(self, http):
-        """Revokes a refresh_token and makes the credentials void.
-
-        Args:
-            http: httplib2.Http, an http object to be used to make the revoke
-                  request.
-        """
-        self._revoke(http.request)
-
-    def apply(self, headers):
-        """Add the authorization to the headers.
-
-        Args:
-            headers: dict, the headers to add the Authorization header to.
-        """
-        headers['Authorization'] = 'Bearer ' + self.access_token
-
-    def has_scopes(self, scopes):
-        """Verify that the credentials are authorized for the given scopes.
-
-        Returns True if the credentials authorized scopes contain all of the
-        scopes given.
-
-        Args:
-            scopes: list or string, the scopes to check.
-
-        Notes:
-            There are cases where the credentials are unaware of which scopes
-            are authorized. Notably, credentials obtained and stored before
-            this code was added will not have scopes, AccessTokenCredentials do
-            not have scopes. In both cases, you can use refresh_scopes() to
-            obtain the canonical set of scopes.
-        """
-        scopes = util.string_to_scopes(scopes)
-        return set(scopes).issubset(self.scopes)
-
-    def retrieve_scopes(self, http):
-        """Retrieves the canonical list of scopes for this access token.
-
-        Gets the scopes from the OAuth2 provider.
-
-        Args:
-            http: httplib2.Http, an http object to be used to make the refresh
-                  request.
-
-        Returns:
-            A set of strings containing the canonical list of scopes.
-        """
-        self._retrieve_scopes(http.request)
-        return self.scopes
-
-    def to_json(self):
-        return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
-
-    @classmethod
-    def from_json(cls, s):
-        """Instantiate a Credentials object from a JSON description of it.
-
-        The JSON should have been produced by calling .to_json() on the object.
-
-        Args:
-            data: dict, A deserialized JSON object.
-
-        Returns:
-            An instance of a Credentials subclass.
-        """
-        s = _from_bytes(s)
-        data = json.loads(s)
-        if (data.get('token_expiry') and
-                not isinstance(data['token_expiry'], datetime.datetime)):
-            try:
-                data['token_expiry'] = datetime.datetime.strptime(
-                    data['token_expiry'], EXPIRY_FORMAT)
-            except ValueError:
-                data['token_expiry'] = None
-        retval = cls(
-            data['access_token'],
-            data['client_id'],
-            data['client_secret'],
-            data['refresh_token'],
-            data['token_expiry'],
-            data['token_uri'],
-            data['user_agent'],
-            revoke_uri=data.get('revoke_uri', None),
-            id_token=data.get('id_token', None),
-            token_response=data.get('token_response', None),
-            scopes=data.get('scopes', None),
-            token_info_uri=data.get('token_info_uri', None))
-        retval.invalid = data['invalid']
-        return retval
-
-    @property
-    def access_token_expired(self):
-        """True if the credential is expired or invalid.
-
-        If the token_expiry isn't set, we assume the token doesn't expire.
-        """
-        if self.invalid:
-            return True
-
-        if not self.token_expiry:
-            return False
-
-        now = datetime.datetime.utcnow()
-        if now >= self.token_expiry:
-            logger.info('access_token is expired. Now: %s, token_expiry: %s',
-                        now, self.token_expiry)
-            return True
-        return False
-
-    def get_access_token(self, http=None):
-        """Return the access token and its expiration information.
-
-        If the token does not exist, get one.
-        If the token expired, refresh it.
-        """
-        if not self.access_token or self.access_token_expired:
-            if not http:
-                http = httplib2.Http()
-            self.refresh(http)
-        return AccessTokenInfo(access_token=self.access_token,
-                               expires_in=self._expires_in())
-
-    def set_store(self, store):
-        """Set the Storage for the credential.
-
-        Args:
-            store: Storage, an implementation of Storage object.
-                   This is needed to store the latest access_token if it
-                   has expired and been refreshed. This implementation uses
-                   locking to check for updates before updating the
-                   access_token.
-        """
-        self.store = store
-
-    def _expires_in(self):
-        """Return the number of seconds until this token expires.
-
-        If token_expiry is in the past, this method will return 0, meaning the
-        token has already expired.
-
-        If token_expiry is None, this method will return None. Note that
-        returning 0 in such a case would not be fair: the token may still be
-        valid; we just don't know anything about it.
-        """
-        if self.token_expiry:
-            now = datetime.datetime.utcnow()
-            if self.token_expiry > now:
-                time_delta = self.token_expiry - now
-                # TODO(orestica): return time_delta.total_seconds()
-                # once dropping support for Python 2.6
-                return time_delta.days * 86400 + time_delta.seconds
-            else:
-                return 0
-
-    def _updateFromCredential(self, other):
-        """Update this Credential from another instance."""
-        self.__dict__.update(other.__getstate__())
-
-    def __getstate__(self):
-        """Trim the state down to something that can be pickled."""
-        d = copy.copy(self.__dict__)
-        del d['store']
-        return d
-
-    def __setstate__(self, state):
-        """Reconstitute the state of the object from being pickled."""
-        self.__dict__.update(state)
-        self.store = None
-
-    def _generate_refresh_request_body(self):
-        """Generate the body that will be used in the refresh request."""
-        body = urllib.parse.urlencode({
-            'grant_type': 'refresh_token',
-            'client_id': self.client_id,
-            'client_secret': self.client_secret,
-            'refresh_token': self.refresh_token,
-        })
-        return body
-
-    def _generate_refresh_request_headers(self):
-        """Generate the headers that will be used in the refresh request."""
-        headers = {
-            'content-type': 'application/x-www-form-urlencoded',
-        }
-
-        if self.user_agent is not None:
-            headers['user-agent'] = self.user_agent
-
-        return headers
-
-    def _refresh(self, http_request):
-        """Refreshes the access_token.
-
-        This method first checks by reading the Storage object if available.
-        If a refresh is still needed, it holds the Storage lock until the
-        refresh is completed.
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          refresh request.
-
-        Raises:
-            HttpAccessTokenRefreshError: When the refresh fails.
-        """
-        if not self.store:
-            self._do_refresh_request(http_request)
-        else:
-            self.store.acquire_lock()
-            try:
-                new_cred = self.store.locked_get()
-
-                if (new_cred and not new_cred.invalid and
-                        new_cred.access_token != self.access_token and
-                        not new_cred.access_token_expired):
-                    logger.info('Updated access_token read from Storage')
-                    self._updateFromCredential(new_cred)
-                else:
-                    self._do_refresh_request(http_request)
-            finally:
-                self.store.release_lock()
-
-    def _do_refresh_request(self, http_request):
-        """Refresh the access_token using the refresh_token.
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          refresh request.
-
-        Raises:
-            HttpAccessTokenRefreshError: When the refresh fails.
-        """
-        body = self._generate_refresh_request_body()
-        headers = self._generate_refresh_request_headers()
-
-        logger.info('Refreshing access_token')
-        resp, content = http_request(
-            self.token_uri, method='POST', body=body, headers=headers)
-        content = _from_bytes(content)
-        if resp.status == 200:
-            d = json.loads(content)
-            self.token_response = d
-            self.access_token = d['access_token']
-            self.refresh_token = d.get('refresh_token', self.refresh_token)
-            if 'expires_in' in d:
-                self.token_expiry = datetime.timedelta(
-                    seconds=int(d['expires_in'])) + datetime.datetime.utcnow()
-            else:
-                self.token_expiry = None
-            # On temporary refresh errors, the user does not actually have to
-            # re-authorize, so we unflag here.
-            self.invalid = False
-            if self.store:
-                self.store.locked_put(self)
-        else:
-            # An {'error':...} response body means the token is expired or
-            # revoked, so we flag the credentials as such.
-            logger.info('Failed to retrieve access token: %s', content)
-            error_msg = 'Invalid response %s.' % resp['status']
-            try:
-                d = json.loads(content)
-                if 'error' in d:
-                    error_msg = d['error']
-                    if 'error_description' in d:
-                        error_msg += ': ' + d['error_description']
-                    self.invalid = True
-                    if self.store:
-                        self.store.locked_put(self)
-            except (TypeError, ValueError):
-                pass
-            raise HttpAccessTokenRefreshError(error_msg, status=resp.status)
-
-    def _revoke(self, http_request):
-        """Revokes this credential and deletes the stored copy (if it exists).
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          revoke request.
-        """
-        self._do_revoke(http_request, self.refresh_token or self.access_token)
-
-    def _do_revoke(self, http_request, token):
-        """Revokes this credential and deletes the stored copy (if it exists).
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          refresh request.
-            token: A string used as the token to be revoked. Can be either an
-                   access_token or refresh_token.
-
-        Raises:
-            TokenRevokeError: If the revoke request does not return with a
-                              200 OK.
-        """
-        logger.info('Revoking token')
-        query_params = {'token': token}
-        token_revoke_uri = _update_query_params(self.revoke_uri, query_params)
-        resp, content = http_request(token_revoke_uri)
-        if resp.status == 200:
-            self.invalid = True
-        else:
-            error_msg = 'Invalid response %s.' % resp.status
-            try:
-                d = json.loads(_from_bytes(content))
-                if 'error' in d:
-                    error_msg = d['error']
-            except (TypeError, ValueError):
-                pass
-            raise TokenRevokeError(error_msg)
-
-        if self.store:
-            self.store.delete()
-
-    def _retrieve_scopes(self, http_request):
-        """Retrieves the list of authorized scopes from the OAuth2 provider.
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          revoke request.
-        """
-        self._do_retrieve_scopes(http_request, self.access_token)
-
-    def _do_retrieve_scopes(self, http_request, token):
-        """Retrieves the list of authorized scopes from the OAuth2 provider.
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          refresh request.
-            token: A string used as the token to identify the credentials to
-                   the provider.
-
-        Raises:
-            Error: When refresh fails, indicating the the access token is
-                   invalid.
-        """
-        logger.info('Refreshing scopes')
-        query_params = {'access_token': token, 'fields': 'scope'}
-        token_info_uri = _update_query_params(self.token_info_uri,
-                                              query_params)
-        resp, content = http_request(token_info_uri)
-        content = _from_bytes(content)
-        if resp.status == 200:
-            d = json.loads(content)
-            self.scopes = set(util.string_to_scopes(d.get('scope', '')))
-        else:
-            error_msg = 'Invalid response %s.' % (resp.status,)
-            try:
-                d = json.loads(content)
-                if 'error_description' in d:
-                    error_msg = d['error_description']
-            except (TypeError, ValueError):
-                pass
-            raise Error(error_msg)
-
-
-class AccessTokenCredentials(OAuth2Credentials):
-    """Credentials object for OAuth 2.0.
-
-    Credentials can be applied to an httplib2.Http object using the
-    authorize() method, which then signs each request from that object
-    with the OAuth 2.0 access token. This set of credentials is for the
-    use case where you have acquired an OAuth 2.0 access_token from
-    another place such as a JavaScript client or another web
-    application, and wish to use it from Python. Because only the
-    access_token is present it can not be refreshed and will in time
-    expire.
-
-    AccessTokenCredentials objects may be safely pickled and unpickled.
-
-    Usage::
-
-        credentials = AccessTokenCredentials('<an access token>',
-            'my-user-agent/1.0')
-        http = httplib2.Http()
-        http = credentials.authorize(http)
-
-    Raises:
-        AccessTokenCredentialsExpired: raised when the access_token expires or
-                                       is revoked.
-    """
-
-    def __init__(self, access_token, user_agent, revoke_uri=None):
-        """Create an instance of OAuth2Credentials
-
-        This is one of the few types if Credentials that you should contrust,
-        Credentials objects are usually instantiated by a Flow.
-
-        Args:
-            access_token: string, access token.
-            user_agent: string, The HTTP User-Agent to provide for this
-                        application.
-            revoke_uri: string, URI for revoke endpoint. Defaults to None; a
-                        token can't be revoked if this is None.
-        """
-        super(AccessTokenCredentials, self).__init__(
-            access_token,
-            None,
-            None,
-            None,
-            None,
-            None,
-            user_agent,
-            revoke_uri=revoke_uri)
-
-    @classmethod
-    def from_json(cls, s):
-        data = json.loads(_from_bytes(s))
-        retval = AccessTokenCredentials(
-            data['access_token'],
-            data['user_agent'])
-        return retval
-
-    def _refresh(self, http_request):
-        raise AccessTokenCredentialsError(
-            'The access_token is expired or invalid and can\'t be refreshed.')
-
-    def _revoke(self, http_request):
-        """Revokes the access_token and deletes the store if available.
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          revoke request.
-        """
-        self._do_revoke(http_request, self.access_token)
-
-
-def _detect_gce_environment():
-    """Determine if the current environment is Compute Engine.
-
-    Returns:
-        Boolean indicating whether or not the current environment is Google
-        Compute Engine.
-    """
-    # NOTE: The explicit ``timeout`` is a workaround. The underlying
-    #       issue is that resolving an unknown host on some networks will take
-    #       20-30 seconds; making this timeout short fixes the issue, but
-    #       could lead to false negatives in the event that we are on GCE, but
-    #       the metadata resolution was particularly slow. The latter case is
-    #       "unlikely".
-    connection = six.moves.http_client.HTTPConnection(
-        _GCE_METADATA_HOST, timeout=1)
-
-    try:
-        headers = {_METADATA_FLAVOR_HEADER: _DESIRED_METADATA_FLAVOR}
-        connection.request('GET', '/', headers=headers)
-        response = connection.getresponse()
-        if response.status == 200:
-            return (response.getheader(_METADATA_FLAVOR_HEADER) ==
-                    _DESIRED_METADATA_FLAVOR)
-    except socket.error:  # socket.timeout or socket.error(64, 'Host is down')
-        logger.info('Timeout attempting to reach GCE metadata service.')
-        return False
-    finally:
-        connection.close()
-
-
-def _in_gae_environment():
-    """Detects if the code is running in the App Engine environment.
-
-    Returns:
-        True if running in the GAE environment, False otherwise.
-    """
-    if SETTINGS.env_name is not None:
-        return SETTINGS.env_name in ('GAE_PRODUCTION', 'GAE_LOCAL')
-
-    try:
-        import google.appengine  # noqa: unused import
-    except ImportError:
-        pass
-    else:
-        server_software = os.environ.get(_SERVER_SOFTWARE, '')
-        if server_software.startswith('Google App Engine/'):
-            SETTINGS.env_name = 'GAE_PRODUCTION'
-            return True
-        elif server_software.startswith('Development/'):
-            SETTINGS.env_name = 'GAE_LOCAL'
-            return True
-
-    return False
-
-
-def _in_gce_environment():
-    """Detect if the code is running in the Compute Engine environment.
-
-    Returns:
-        True if running in the GCE environment, False otherwise.
-    """
-    if SETTINGS.env_name is not None:
-        return SETTINGS.env_name == 'GCE_PRODUCTION'
-
-    if NO_GCE_CHECK != 'True' and _detect_gce_environment():
-        SETTINGS.env_name = 'GCE_PRODUCTION'
-        return True
-    return False
-
-
-class GoogleCredentials(OAuth2Credentials):
-    """Application Default Credentials for use in calling Google APIs.
-
-    The Application Default Credentials are being constructed as a function of
-    the environment where the code is being run.
-    More details can be found on this page:
-    https://developers.google.com/accounts/docs/application-default-credentials
-
-    Here is an example of how to use the Application Default Credentials for a
-    service that requires authentication::
-
-        from googleapiclient.discovery import build
-        from oauth2client.client import GoogleCredentials
-
-        credentials = GoogleCredentials.get_application_default()
-        service = build('compute', 'v1', credentials=credentials)
-
-        PROJECT = 'bamboo-machine-422'
-        ZONE = 'us-central1-a'
-        request = service.instances().list(project=PROJECT, zone=ZONE)
-        response = request.execute()
-
-        print(response)
-    """
-
-    def __init__(self, access_token, client_id, client_secret, refresh_token,
-                 token_expiry, token_uri, user_agent,
-                 revoke_uri=GOOGLE_REVOKE_URI):
-        """Create an instance of GoogleCredentials.
-
-        This constructor is not usually called by the user, instead
-        GoogleCredentials objects are instantiated by
-        GoogleCredentials.from_stream() or
-        GoogleCredentials.get_application_default().
-
-        Args:
-            access_token: string, access token.
-            client_id: string, client identifier.
-            client_secret: string, client secret.
-            refresh_token: string, refresh token.
-            token_expiry: datetime, when the access_token expires.
-            token_uri: string, URI of token endpoint.
-            user_agent: string, The HTTP User-Agent to provide for this
-                        application.
-            revoke_uri: string, URI for revoke endpoint. Defaults to
-                        GOOGLE_REVOKE_URI; a token can't be revoked if this
-                        is None.
-        """
-        super(GoogleCredentials, self).__init__(
-            access_token, client_id, client_secret, refresh_token,
-            token_expiry, token_uri, user_agent, revoke_uri=revoke_uri)
-
-    def create_scoped_required(self):
-        """Whether this Credentials object is scopeless.
-
-        create_scoped(scopes) method needs to be called in order to create
-        a Credentials object for API calls.
-        """
-        return False
-
-    def create_scoped(self, scopes):
-        """Create a Credentials object for the given scopes.
-
-        The Credentials type is preserved.
-        """
-        return self
-
-    @property
-    def serialization_data(self):
-        """Get the fields and values identifying the current credentials."""
-        return {
-            'type': 'authorized_user',
-            'client_id': self.client_id,
-            'client_secret': self.client_secret,
-            'refresh_token': self.refresh_token
-        }
-
-    @staticmethod
-    def _implicit_credentials_from_gae():
-        """Attempts to get implicit credentials in Google App Engine env.
-
-        If the current environment is not detected as App Engine, returns None,
-        indicating no Google App Engine credentials can be detected from the
-        current environment.
-
-        Returns:
-            None, if not in GAE, else an appengine.AppAssertionCredentials
-            object.
-        """
-        if not _in_gae_environment():
-            return None
-
-        return _get_application_default_credential_GAE()
-
-    @staticmethod
-    def _implicit_credentials_from_gce():
-        """Attempts to get implicit credentials in Google Compute Engine env.
-
-        If the current environment is not detected as Compute Engine, returns
-        None, indicating no Google Compute Engine credentials can be detected
-        from the current environment.
-
-        Returns:
-            None, if not in GCE, else a gce.AppAssertionCredentials object.
-        """
-        if not _in_gce_environment():
-            return None
-
-        return _get_application_default_credential_GCE()
-
-    @staticmethod
-    def _implicit_credentials_from_files():
-        """Attempts to get implicit credentials from local credential files.
-
-        First checks if the environment variable GOOGLE_APPLICATION_CREDENTIALS
-        is set with a filename and then falls back to a configuration file (the
-        "well known" file) associated with the 'gcloud' command line tool.
-
-        Returns:
-            Credentials object associated with the
-            GOOGLE_APPLICATION_CREDENTIALS file or the "well known" file if
-            either exist. If neither file is define, returns None, indicating
-            no credentials from a file can detected from the current
-            environment.
-        """
-        credentials_filename = _get_environment_variable_file()
-        if not credentials_filename:
-            credentials_filename = _get_well_known_file()
-            if os.path.isfile(credentials_filename):
-                extra_help = (' (produced automatically when running'
-                              ' "gcloud auth login" command)')
-            else:
-                credentials_filename = None
-        else:
-            extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS +
-                          ' environment variable)')
-
-        if not credentials_filename:
-            return
-
-        # If we can read the credentials from a file, we don't need to know
-        # what environment we are in.
-        SETTINGS.env_name = DEFAULT_ENV_NAME
-
-        try:
-            return _get_application_default_credential_from_file(
-                credentials_filename)
-        except (ApplicationDefaultCredentialsError, ValueError) as error:
-            _raise_exception_for_reading_json(credentials_filename,
-                                              extra_help, error)
-
-    @classmethod
-    def _get_implicit_credentials(cls):
-        """Gets credentials implicitly from the environment.
-
-        Checks environment in order of precedence:
-        - Google App Engine (production and testing)
-        - Environment variable GOOGLE_APPLICATION_CREDENTIALS pointing to
-          a file with stored credentials information.
-        - Stored "well known" file associated with `gcloud` command line tool.
-        - Google Compute Engine production environment.
-
-        Raises:
-            ApplicationDefaultCredentialsError: raised when the credentials
-                                                fail to be retrieved.
-        """
-        # Environ checks (in order).
-        environ_checkers = [
-            cls._implicit_credentials_from_gae,
-            cls._implicit_credentials_from_files,
-            cls._implicit_credentials_from_gce,
-        ]
-
-        for checker in environ_checkers:
-            credentials = checker()
-            if credentials is not None:
-                return credentials
-
-        # If no credentials, fail.
-        raise ApplicationDefaultCredentialsError(ADC_HELP_MSG)
-
-    @staticmethod
-    def get_application_default():
-        """Get the Application Default Credentials for the current environment.
-
-        Raises:
-            ApplicationDefaultCredentialsError: raised when the credentials
-                                                fail to be retrieved.
-        """
-        return GoogleCredentials._get_implicit_credentials()
-
-    @staticmethod
-    def from_stream(credential_filename):
-        """Create a Credentials object by reading information from a file.
-
-        It returns an object of type GoogleCredentials.
-
-        Args:
-            credential_filename: the path to the file from where the
-                                 credentials are to be read
-
-        Raises:
-            ApplicationDefaultCredentialsError: raised when the credentials
-                                                fail to be retrieved.
-        """
-        if credential_filename and os.path.isfile(credential_filename):
-            try:
-                return _get_application_default_credential_from_file(
-                    credential_filename)
-            except (ApplicationDefaultCredentialsError, ValueError) as error:
-                extra_help = (' (provided as parameter to the '
-                              'from_stream() method)')
-                _raise_exception_for_reading_json(credential_filename,
-                                                  extra_help,
-                                                  error)
-        else:
-            raise ApplicationDefaultCredentialsError(
-                'The parameter passed to the from_stream() '
-                'method should point to a file.')
-
-
-def _save_private_file(filename, json_contents):
-    """Saves a file with read-write permissions on for the owner.
-
-    Args:
-        filename: String. Absolute path to file.
-        json_contents: JSON serializable object to be saved.
-    """
-    temp_filename = tempfile.mktemp()
-    file_desc = os.open(temp_filename, os.O_WRONLY | os.O_CREAT, 0o600)
-    with os.fdopen(file_desc, 'w') as file_handle:
-        json.dump(json_contents, file_handle, sort_keys=True,
-                  indent=2, separators=(',', ': '))
-    shutil.move(temp_filename, filename)
-
-
-def save_to_well_known_file(credentials, well_known_file=None):
-    """Save the provided GoogleCredentials to the well known file.
-
-    Args:
-        credentials: the credentials to be saved to the well known file;
-                     it should be an instance of GoogleCredentials
-        well_known_file: the name of the file where the credentials are to be
-                         saved; this parameter is supposed to be used for
-                         testing only
-    """
-    # TODO(orestica): move this method to tools.py
-    # once the argparse import gets fixed (it is not present in Python 2.6)
-
-    if well_known_file is None:
-        well_known_file = _get_well_known_file()
-
-    config_dir = os.path.dirname(well_known_file)
-    if not os.path.isdir(config_dir):
-        raise OSError('Config directory does not exist: %s' % config_dir)
-
-    credentials_data = credentials.serialization_data
-    _save_private_file(well_known_file, credentials_data)
-
-
-def _get_environment_variable_file():
-    application_default_credential_filename = (
-      os.environ.get(GOOGLE_APPLICATION_CREDENTIALS,
-                     None))
-
-    if application_default_credential_filename:
-        if os.path.isfile(application_default_credential_filename):
-            return application_default_credential_filename
-        else:
-            raise ApplicationDefaultCredentialsError(
-                'File ' + application_default_credential_filename +
-                ' (pointed by ' +
-                GOOGLE_APPLICATION_CREDENTIALS +
-                ' environment variable) does not exist!')
-
-
-def _get_well_known_file():
-    """Get the well known file produced by command 'gcloud auth login'."""
-    # TODO(orestica): Revisit this method once gcloud provides a better way
-    # of pinpointing the exact location of the file.
-
-    WELL_KNOWN_CREDENTIALS_FILE = 'application_default_credentials.json'
-
-    default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR)
-    if default_config_dir is None:
-        if os.name == 'nt':
-            try:
-                default_config_dir = os.path.join(os.environ['APPDATA'],
-                                                  _CLOUDSDK_CONFIG_DIRECTORY)
-            except KeyError:
-                # This should never happen unless someone is really
-                # messing with things.
-                drive = os.environ.get('SystemDrive', 'C:')
-                default_config_dir = os.path.join(drive, '\\',
-                                                  _CLOUDSDK_CONFIG_DIRECTORY)
-        else:
-            default_config_dir = os.path.join(os.path.expanduser('~'),
-                                              '.config',
-                                              _CLOUDSDK_CONFIG_DIRECTORY)
-
-    return os.path.join(default_config_dir, WELL_KNOWN_CREDENTIALS_FILE)
-
-
-def _get_application_default_credential_from_file(filename):
-    """Build the Application Default Credentials from file."""
-
-    from oauth2client import service_account
-
-    # read the credentials from the file
-    with open(filename) as file_obj:
-        client_credentials = json.load(file_obj)
-
-    credentials_type = client_credentials.get('type')
-    if credentials_type == AUTHORIZED_USER:
-        required_fields = set(['client_id', 'client_secret', 'refresh_token'])
-    elif credentials_type == SERVICE_ACCOUNT:
-        required_fields = set(['client_id', 'client_email', 'private_key_id',
-                               'private_key'])
-    else:
-        raise ApplicationDefaultCredentialsError(
-            "'type' field should be defined (and have one of the '" +
-            AUTHORIZED_USER + "' or '" + SERVICE_ACCOUNT + "' values)")
-
-    missing_fields = required_fields.difference(client_credentials.keys())
-
-    if missing_fields:
-        _raise_exception_for_missing_fields(missing_fields)
-
-    if client_credentials['type'] == AUTHORIZED_USER:
-        return GoogleCredentials(
-            access_token=None,
-            client_id=client_credentials['client_id'],
-            client_secret=client_credentials['client_secret'],
-            refresh_token=client_credentials['refresh_token'],
-            token_expiry=None,
-            token_uri=GOOGLE_TOKEN_URI,
-            user_agent='Python client library')
-    else:  # client_credentials['type'] == SERVICE_ACCOUNT
-        return service_account._ServiceAccountCredentials(
-            service_account_id=client_credentials['client_id'],
-            service_account_email=client_credentials['client_email'],
-            private_key_id=client_credentials['private_key_id'],
-            private_key_pkcs8_text=client_credentials['private_key'],
-            scopes=[])
-
-
-def _raise_exception_for_missing_fields(missing_fields):
-    raise ApplicationDefaultCredentialsError(
-        'The following field(s) must be defined: ' + ', '.join(missing_fields))
-
-
-def _raise_exception_for_reading_json(credential_file,
-                                      extra_help,
-                                      error):
-    raise ApplicationDefaultCredentialsError(
-      'An error was encountered while reading json file: ' +
-      credential_file + extra_help + ': ' + str(error))
-
-
-def _get_application_default_credential_GAE():
-    from oauth2client.appengine import AppAssertionCredentials
-
-    return AppAssertionCredentials([])
-
-
-def _get_application_default_credential_GCE():
-    from oauth2client.gce import AppAssertionCredentials
-
-    return AppAssertionCredentials([])
-
-
-class AssertionCredentials(GoogleCredentials):
-    """Abstract Credentials object used for OAuth 2.0 assertion grants.
-
-    This credential does not require a flow to instantiate because it
-    represents a two legged flow, and therefore has all of the required
-    information to generate and refresh its own access tokens. It must
-    be subclassed to generate the appropriate assertion string.
-
-    AssertionCredentials objects may be safely pickled and unpickled.
-    """
-
-    @util.positional(2)
-    def __init__(self, assertion_type, user_agent=None,
-                 token_uri=GOOGLE_TOKEN_URI,
-                 revoke_uri=GOOGLE_REVOKE_URI,
-                 **unused_kwargs):
-        """Constructor for AssertionFlowCredentials.
-
-        Args:
-            assertion_type: string, assertion type that will be declared to the
-                            auth server
-            user_agent: string, The HTTP User-Agent to provide for this
-                        application.
-            token_uri: string, URI for token endpoint. For convenience defaults
-                       to Google's endpoints but any OAuth 2.0 provider can be
-                       used.
-            revoke_uri: string, URI for revoke endpoint.
-        """
-        super(AssertionCredentials, self).__init__(
-            None,
-            None,
-            None,
-            None,
-            None,
-            token_uri,
-            user_agent,
-            revoke_uri=revoke_uri)
-        self.assertion_type = assertion_type
-
-    def _generate_refresh_request_body(self):
-        assertion = self._generate_assertion()
-
-        body = urllib.parse.urlencode({
-            'assertion': assertion,
-            'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
-        })
-
-        return body
-
-    def _generate_assertion(self):
-        """Generate assertion string to be used in the access token request."""
-        _abstract()
-
-    def _revoke(self, http_request):
-        """Revokes the access_token and deletes the store if available.
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make the
-                          revoke request.
-        """
-        self._do_revoke(http_request, self.access_token)
-
-
-def _RequireCryptoOrDie():
-    """Ensure we have a crypto library, or throw CryptoUnavailableError.
-
-    The oauth2client.crypt module requires either PyCrypto or PyOpenSSL
-    to be available in order to function, but these are optional
-    dependencies.
-    """
-    if not HAS_CRYPTO:
-        raise CryptoUnavailableError('No crypto library available')
-
-
-class SignedJwtAssertionCredentials(AssertionCredentials):
-    """Credentials object used for OAuth 2.0 Signed JWT assertion grants.
-
-    This credential does not require a flow to instantiate because it
-    represents a two legged flow, and therefore has all of the required
-    information to generate and refresh its own access tokens.
-
-    SignedJwtAssertionCredentials requires either PyOpenSSL, or PyCrypto
-    2.6 or later. For App Engine you may also consider using
-    AppAssertionCredentials.
-    """
-
-    MAX_TOKEN_LIFETIME_SECS = 3600  # 1 hour in seconds
-
-    @util.positional(4)
-    def __init__(self,
-                 service_account_name,
-                 private_key,
-                 scope,
-                 private_key_password='notasecret',
-                 user_agent=None,
-                 token_uri=GOOGLE_TOKEN_URI,
-                 revoke_uri=GOOGLE_REVOKE_URI,
-                 **kwargs):
-        """Constructor for SignedJwtAssertionCredentials.
-
-        Args:
-            service_account_name: string, id for account, usually an email
-                                  address.
-            private_key: string or bytes, private key in PKCS12 or PEM format.
-            scope: string or iterable of strings, scope(s) of the credentials
-                   being requested.
-            private_key_password: string, password for private_key, unused if
-                                  private_key is in PEM format.
-            user_agent: string, HTTP User-Agent to provide for this
-                        application.
-            token_uri: string, URI for token endpoint. For convenience defaults
-                       to Google's endpoints but any OAuth 2.0 provider can be
-                       used.
-            revoke_uri: string, URI for revoke endpoint.
-            kwargs: kwargs, Additional parameters to add to the JWT token, for
-                    example sub=joe@xample.org.
-
-        Raises:
-            CryptoUnavailableError if no crypto library is available.
-        """
-        _RequireCryptoOrDie()
-        super(SignedJwtAssertionCredentials, self).__init__(
-            None,
-            user_agent=user_agent,
-            token_uri=token_uri,
-            revoke_uri=revoke_uri,
-        )
-
-        self.scope = util.scopes_to_string(scope)
-
-        # Keep base64 encoded so it can be stored in JSON.
-        self.private_key = base64.b64encode(_to_bytes(private_key))
-        self.private_key_password = private_key_password
-        self.service_account_name = service_account_name
-        self.kwargs = kwargs
-
-    @classmethod
-    def from_json(cls, s):
-        data = json.loads(_from_bytes(s))
-        retval = SignedJwtAssertionCredentials(
-            data['service_account_name'],
-            base64.b64decode(data['private_key']),
-            data['scope'],
-            private_key_password=data['private_key_password'],
-            user_agent=data['user_agent'],
-            token_uri=data['token_uri'],
-            **data['kwargs']
-        )
-        retval.invalid = data['invalid']
-        retval.access_token = data['access_token']
-        return retval
-
-    def _generate_assertion(self):
-        """Generate the assertion that will be used in the request."""
-        now = int(time.time())
-        payload = {
-            'aud': self.token_uri,
-            'scope': self.scope,
-            'iat': now,
-            'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS,
-            'iss': self.service_account_name
-        }
-        payload.update(self.kwargs)
-        logger.debug(str(payload))
-
-        private_key = base64.b64decode(self.private_key)
-        return crypt.make_signed_jwt(crypt.Signer.from_string(
-            private_key, self.private_key_password), payload)
-
-# Only used in verify_id_token(), which is always calling to the same URI
-# for the certs.
-_cached_http = httplib2.Http(MemoryCache())
-
-
-@util.positional(2)
-def verify_id_token(id_token, audience, http=None,
-                    cert_uri=ID_TOKEN_VERIFICATION_CERTS):
-    """Verifies a signed JWT id_token.
-
-    This function requires PyOpenSSL and because of that it does not work on
-    App Engine.
-
-    Args:
-        id_token: string, A Signed JWT.
-        audience: string, The audience 'aud' that the token should be for.
-        http: httplib2.Http, instance to use to make the HTTP request. Callers
-              should supply an instance that has caching enabled.
-        cert_uri: string, URI of the certificates in JSON format to
-                  verify the JWT against.
-
-    Returns:
-        The deserialized JSON in the JWT.
-
-    Raises:
-        oauth2client.crypt.AppIdentityError: if the JWT fails to verify.
-        CryptoUnavailableError: if no crypto library is available.
-    """
-    _RequireCryptoOrDie()
-    if http is None:
-        http = _cached_http
-
-    resp, content = http.request(cert_uri)
-    if resp.status == 200:
-        certs = json.loads(_from_bytes(content))
-        return crypt.verify_signed_jwt_with_certs(id_token, certs, audience)
-    else:
-        raise VerifyJwtTokenError('Status code: %d' % resp.status)
-
-
-def _extract_id_token(id_token):
-    """Extract the JSON payload from a JWT.
-
-    Does the extraction w/o checking the signature.
-
-    Args:
-        id_token: string or bytestring, OAuth 2.0 id_token.
-
-    Returns:
-        object, The deserialized JSON payload.
-    """
-    if type(id_token) == bytes:
-        segments = id_token.split(b'.')
-    else:
-        segments = id_token.split(u'.')
-
-    if len(segments) != 3:
-        raise VerifyJwtTokenError(
-            'Wrong number of segments in token: %s' % id_token)
-
-    return json.loads(_from_bytes(_urlsafe_b64decode(segments[1])))
-
-
-def _parse_exchange_token_response(content):
-    """Parses response of an exchange token request.
-
-    Most providers return JSON but some (e.g. Facebook) return a
-    url-encoded string.
-
-    Args:
-        content: The body of a response
-
-    Returns:
-        Content as a dictionary object. Note that the dict could be empty,
-        i.e. {}. That basically indicates a failure.
-    """
-    resp = {}
-    content = _from_bytes(content)
-    try:
-        resp = json.loads(content)
-    except Exception:
-        # different JSON libs raise different exceptions,
-        # so we just do a catch-all here
-        resp = dict(urllib.parse.parse_qsl(content))
-
-    # some providers respond with 'expires', others with 'expires_in'
-    if resp and 'expires' in resp:
-        resp['expires_in'] = resp.pop('expires')
-
-    return resp
-
-
-@util.positional(4)
-def credentials_from_code(client_id, client_secret, scope, code,
-                          redirect_uri='postmessage', http=None,
-                          user_agent=None, token_uri=GOOGLE_TOKEN_URI,
-                          auth_uri=GOOGLE_AUTH_URI,
-                          revoke_uri=GOOGLE_REVOKE_URI,
-                          device_uri=GOOGLE_DEVICE_URI,
-                          token_info_uri=GOOGLE_TOKEN_INFO_URI):
-    """Exchanges an authorization code for an OAuth2Credentials object.
-
-    Args:
-        client_id: string, client identifier.
-        client_secret: string, client secret.
-        scope: string or iterable of strings, scope(s) to request.
-        code: string, An authorization code, most likely passed down from
-              the client
-        redirect_uri: string, this is generally set to 'postmessage' to match
-                      the redirect_uri that the client specified
-        http: httplib2.Http, optional http instance to use to do the fetch
-        token_uri: string, URI for token endpoint. For convenience defaults
-                   to Google's endpoints but any OAuth 2.0 provider can be
-                   used.
-        auth_uri: string, URI for authorization endpoint. For convenience
-                  defaults to Google's endpoints but any OAuth 2.0 provider
-                  can be used.
-        revoke_uri: string, URI for revoke endpoint. For convenience
-                    defaults to Google's endpoints but any OAuth 2.0 provider
-                    can be used.
-        device_uri: string, URI for device authorization endpoint. For
-                    convenience defaults to Google's endpoints but any OAuth
-                    2.0 provider can be used.
-
-    Returns:
-        An OAuth2Credentials object.
-
-    Raises:
-        FlowExchangeError if the authorization code cannot be exchanged for an
-        access token
-    """
-    flow = OAuth2WebServerFlow(client_id, client_secret, scope,
-                               redirect_uri=redirect_uri,
-                               user_agent=user_agent, auth_uri=auth_uri,
-                               token_uri=token_uri, revoke_uri=revoke_uri,
-                               device_uri=device_uri,
-                               token_info_uri=token_info_uri)
-
-    credentials = flow.step2_exchange(code, http=http)
-    return credentials
-
-
-@util.positional(3)
-def credentials_from_clientsecrets_and_code(filename, scope, code,
-                                            message=None,
-                                            redirect_uri='postmessage',
-                                            http=None,
-                                            cache=None,
-                                            device_uri=None):
-    """Returns OAuth2Credentials from a clientsecrets file and an auth code.
-
-    Will create the right kind of Flow based on the contents of the
-    clientsecrets file or will raise InvalidClientSecretsError for unknown
-    types of Flows.
-
-    Args:
-        filename: string, File name of clientsecrets.
-        scope: string or iterable of strings, scope(s) to request.
-        code: string, An authorization code, most likely passed down from
-              the client
-        message: string, A friendly string to display to the user if the
-                 clientsecrets file is missing or invalid. If message is
-                 provided then sys.exit will be called in the case of an error.
-                 If message in not provided then
-                 clientsecrets.InvalidClientSecretsError will be raised.
-        redirect_uri: string, this is generally set to 'postmessage' to match
-                      the redirect_uri that the client specified
-        http: httplib2.Http, optional http instance to use to do the fetch
-        cache: An optional cache service client that implements get() and set()
-               methods. See clientsecrets.loadfile() for details.
-        device_uri: string, OAuth 2.0 device authorization endpoint
-
-    Returns:
-        An OAuth2Credentials object.
-
-    Raises:
-        FlowExchangeError: if the authorization code cannot be exchanged for an
-                           access token
-        UnknownClientSecretsFlowError: if the file describes an unknown kind
-                                       of Flow.
-        clientsecrets.InvalidClientSecretsError: if the clientsecrets file is
-                                                 invalid.
-    """
-    flow = flow_from_clientsecrets(filename, scope, message=message,
-                                   cache=cache, redirect_uri=redirect_uri,
-                                   device_uri=device_uri)
-    credentials = flow.step2_exchange(code, http=http)
-    return credentials
-
-
-class DeviceFlowInfo(collections.namedtuple('DeviceFlowInfo', (
-        'device_code', 'user_code', 'interval', 'verification_url',
-        'user_code_expiry'))):
-    """Intermediate information the OAuth2 for devices flow."""
-
-    @classmethod
-    def FromResponse(cls, response):
-        """Create a DeviceFlowInfo from a server response.
-
-        The response should be a dict containing entries as described here:
-
-        http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1
-        """
-        # device_code, user_code, and verification_url are required.
-        kwargs = {
-            'device_code': response['device_code'],
-            'user_code': response['user_code'],
-        }
-        # The response may list the verification address as either
-        # verification_url or verification_uri, so we check for both.
-        verification_url = response.get(
-            'verification_url', response.get('verification_uri'))
-        if verification_url is None:
-            raise OAuth2DeviceCodeError(
-                'No verification_url provided in server response')
-        kwargs['verification_url'] = verification_url
-        # expires_in and interval are optional.
-        kwargs.update({
-            'interval': response.get('interval'),
-            'user_code_expiry': None,
-        })
-        if 'expires_in' in response:
-            kwargs['user_code_expiry'] = (
-                datetime.datetime.now() +
-                datetime.timedelta(seconds=int(response['expires_in'])))
-        return cls(**kwargs)
-
-
-class OAuth2WebServerFlow(Flow):
-    """Does the Web Server Flow for OAuth 2.0.
-
-    OAuth2WebServerFlow objects may be safely pickled and unpickled.
-    """
-
-    @util.positional(4)
-    def __init__(self, client_id,
-                 client_secret=None,
-                 scope=None,
-                 redirect_uri=None,
-                 user_agent=None,
-                 auth_uri=GOOGLE_AUTH_URI,
-                 token_uri=GOOGLE_TOKEN_URI,
-                 revoke_uri=GOOGLE_REVOKE_URI,
-                 login_hint=None,
-                 device_uri=GOOGLE_DEVICE_URI,
-                 token_info_uri=GOOGLE_TOKEN_INFO_URI,
-                 authorization_header=None,
-                 **kwargs):
-        """Constructor for OAuth2WebServerFlow.
-
-        The kwargs argument is used to set extra query parameters on the
-        auth_uri. For example, the access_type and approval_prompt
-        query parameters can be set via kwargs.
-
-        Args:
-            client_id: string, client identifier.
-            client_secret: string client secret.
-            scope: string or iterable of strings, scope(s) of the credentials
-                   being requested.
-            redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob'
-                          for a non-web-based application, or a URI that
-                          handles the callback from the authorization server.
-            user_agent: string, HTTP User-Agent to provide for this
-                        application.
-            auth_uri: string, URI for authorization endpoint. For convenience
-                      defaults to Google's endpoints but any OAuth 2.0 provider
-                      can be used.
-            token_uri: string, URI for token endpoint. For convenience
-                       defaults to Google's endpoints but any OAuth 2.0
-                       provider can be used.
-            revoke_uri: string, URI for revoke endpoint. For convenience
-                        defaults to Google's endpoints but any OAuth 2.0
-                        provider can be used.
-            login_hint: string, Either an email address or domain. Passing this
-                        hint will either pre-fill the email box on the sign-in
-                        form or select the proper multi-login session, thereby
-                        simplifying the login flow.
-            device_uri: string, URI for device authorization endpoint. For
-                        convenience defaults to Google's endpoints but any
-                        OAuth 2.0 provider can be used.
-            authorization_header: string, For use with OAuth 2.0 providers that
-                                  require a client to authenticate using a
-                                  header value instead of passing client_secret
-                                  in the POST body.
-            **kwargs: dict, The keyword arguments are all optional and required
-                      parameters for the OAuth calls.
-        """
-        # scope is a required argument, but to preserve backwards-compatibility
-        # we don't want to rearrange the positional arguments
-        if scope is None:
-            raise TypeError("The value of scope must not be None")
-        self.client_id = client_id
-        self.client_secret = client_secret
-        self.scope = util.scopes_to_string(scope)
-        self.redirect_uri = redirect_uri
-        self.login_hint = login_hint
-        self.user_agent = user_agent
-        self.auth_uri = auth_uri
-        self.token_uri = token_uri
-        self.revoke_uri = revoke_uri
-        self.device_uri = device_uri
-        self.token_info_uri = token_info_uri
-        self.authorization_header = authorization_header
-        self.params = {
-            'access_type': 'offline',
-            'response_type': 'code',
-        }
-        self.params.update(kwargs)
-
-    @util.positional(1)
-    def step1_get_authorize_url(self, redirect_uri=None, state=None):
-        """Returns a URI to redirect to the provider.
-
-        Args:
-            redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob'
-                          for a non-web-based application, or a URI that
-                          handles the callback from the authorization server.
-                          This parameter is deprecated, please move to passing
-                          the redirect_uri in via the constructor.
-            state: string, Opaque state string which is passed through the
-                   OAuth2 flow and returned to the client as a query parameter
-                   in the callback.
-
-        Returns:
-            A URI as a string to redirect the user to begin the authorization
-            flow.
-        """
-        if redirect_uri is not None:
-            logger.warning((
-                'The redirect_uri parameter for '
-                'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. '
-                'Please move to passing the redirect_uri in via the '
-                'constructor.'))
-            self.redirect_uri = redirect_uri
-
-        if self.redirect_uri is None:
-            raise ValueError('The value of redirect_uri must not be None.')
-
-        query_params = {
-            'client_id': self.client_id,
-            'redirect_uri': self.redirect_uri,
-            'scope': self.scope,
-        }
-        if state is not None:
-            query_params['state'] = state
-        if self.login_hint is not None:
-            query_params['login_hint'] = self.login_hint
-        query_params.update(self.params)
-        return _update_query_params(self.auth_uri, query_params)
-
-    @util.positional(1)
-    def step1_get_device_and_user_codes(self, http=None):
-        """Returns a user code and the verification URL where to enter it
-
-        Returns:
-            A user code as a string for the user to authorize the application
-            An URL as a string where the user has to enter the code
-        """
-        if self.device_uri is None:
-            raise ValueError('The value of device_uri must not be None.')
-
-        body = urllib.parse.urlencode({
-            'client_id': self.client_id,
-            'scope': self.scope,
-        })
-        headers = {
-            'content-type': 'application/x-www-form-urlencoded',
-        }
-
-        if self.user_agent is not None:
-            headers['user-agent'] = self.user_agent
-
-        if http is None:
-            http = httplib2.Http()
-
-        resp, content = http.request(self.device_uri, method='POST', body=body,
-                                     headers=headers)
-        content = _from_bytes(content)
-        if resp.status == 200:
-            try:
-                flow_info = json.loads(content)
-            except ValueError as e:
-                raise OAuth2DeviceCodeError(
-                    'Could not parse server response as JSON: "%s", '
-                    'error: "%s"' % (content, e))
-            return DeviceFlowInfo.FromResponse(flow_info)
-        else:
-            error_msg = 'Invalid response %s.' % resp.status
-            try:
-                d = json.loads(content)
-                if 'error' in d:
-                    error_msg += ' Error: %s' % d['error']
-            except ValueError:
-                # Couldn't decode a JSON response, stick with the
-                # default message.
-                pass
-            raise OAuth2DeviceCodeError(error_msg)
-
-    @util.positional(2)
-    def step2_exchange(self, code=None, http=None, device_flow_info=None):
-        """Exchanges a code for OAuth2Credentials.
-
-        Args:
-            code: string, a dict-like object, or None. For a non-device
-                  flow, this is either the response code as a string, or a
-                  dictionary of query parameters to the redirect_uri. For a
-                  device flow, this should be None.
-            http: httplib2.Http, optional http instance to use when fetching
-                  credentials.
-            device_flow_info: DeviceFlowInfo, return value from step1 in the
-                              case of a device flow.
-
-        Returns:
-            An OAuth2Credentials object that can be used to authorize requests.
-
-        Raises:
-            FlowExchangeError: if a problem occurred exchanging the code for a
-                               refresh_token.
-            ValueError: if code and device_flow_info are both provided or both
-                        missing.
-        """
-        if code is None and device_flow_info is None:
-            raise ValueError('No code or device_flow_info provided.')
-        if code is not None and device_flow_info is not None:
-            raise ValueError('Cannot provide both code and device_flow_info.')
-
-        if code is None:
-            code = device_flow_info.device_code
-        elif not isinstance(code, six.string_types):
-            if 'code' not in code:
-                raise FlowExchangeError(code.get(
-                    'error', 'No code was supplied in the query parameters.'))
-            code = code['code']
-
-        post_data = {
-            'client_id': self.client_id,
-            'code': code,
-            'scope': self.scope,
-        }
-        if self.client_secret is not None:
-            post_data['client_secret'] = self.client_secret
-        if device_flow_info is not None:
-            post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0'
-        else:
-            post_data['grant_type'] = 'authorization_code'
-            post_data['redirect_uri'] = self.redirect_uri
-        body = urllib.parse.urlencode(post_data)
-        headers = {
-            'content-type': 'application/x-www-form-urlencoded',
-        }
-        if self.authorization_header is not None:
-            headers['Authorization'] = self.authorization_header
-        if self.user_agent is not None:
-            headers['user-agent'] = self.user_agent
-
-        if http is None:
-            http = httplib2.Http()
-
-        resp, content = http.request(self.token_uri, method='POST', body=body,
-                                     headers=headers)
-        d = _parse_exchange_token_response(content)
-        if resp.status == 200 and 'access_token' in d:
-            access_token = d['access_token']
-            refresh_token = d.get('refresh_token', None)
-            if not refresh_token:
-                logger.info(
-                    'Received token response with no refresh_token. Consider '
-                    "reauthenticating with approval_prompt='force'.")
-            token_expiry = None
-            if 'expires_in' in d:
-                token_expiry = (
-                    datetime.datetime.utcnow() +
-                    datetime.timedelta(seconds=int(d['expires_in'])))
-
-            extracted_id_token = None
-            if 'id_token' in d:
-                extracted_id_token = _extract_id_token(d['id_token'])
-
-            logger.info('Successfully retrieved access token')
-            return OAuth2Credentials(
-                access_token, self.client_id, self.client_secret,
-                refresh_token, token_expiry, self.token_uri, self.user_agent,
-                revoke_uri=self.revoke_uri, id_token=extracted_id_token,
-                token_response=d, scopes=self.scope,
-                token_info_uri=self.token_info_uri)
-        else:
-            logger.info('Failed to retrieve access token: %s', content)
-            if 'error' in d:
-                # you never know what those providers got to say
-                error_msg = (str(d['error']) +
-                             str(d.get('error_description', '')))
-            else:
-                error_msg = 'Invalid response: %s.' % str(resp.status)
-            raise FlowExchangeError(error_msg)
-
-
-@util.positional(2)
-def flow_from_clientsecrets(filename, scope, redirect_uri=None,
-                            message=None, cache=None, login_hint=None,
-                            device_uri=None):
-    """Create a Flow from a clientsecrets file.
-
-    Will create the right kind of Flow based on the contents of the
-    clientsecrets file or will raise InvalidClientSecretsError for unknown
-    types of Flows.
-
-    Args:
-        filename: string, File name of client secrets.
-        scope: string or iterable of strings, scope(s) to request.
-        redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for
-                      a non-web-based application, or a URI that handles the
-                      callback from the authorization server.
-        message: string, A friendly string to display to the user if the
-                 clientsecrets file is missing or invalid. If message is
-                 provided then sys.exit will be called in the case of an error.
-                 If message in not provided then
-                 clientsecrets.InvalidClientSecretsError will be raised.
-        cache: An optional cache service client that implements get() and set()
-               methods. See clientsecrets.loadfile() for details.
-        login_hint: string, Either an email address or domain. Passing this
-                    hint will either pre-fill the email box on the sign-in form
-                    or select the proper multi-login session, thereby
-                    simplifying the login flow.
-        device_uri: string, URI for device authorization endpoint. For
-                    convenience defaults to Google's endpoints but any
-                    OAuth 2.0 provider can be used.
-
-    Returns:
-        A Flow object.
-
-    Raises:
-        UnknownClientSecretsFlowError: if the file describes an unknown kind of
-                                       Flow.
-        clientsecrets.InvalidClientSecretsError: if the clientsecrets file is
-                                                 invalid.
-    """
-    try:
-        client_type, client_info = clientsecrets.loadfile(filename,
-                                                          cache=cache)
-        if client_type in (clientsecrets.TYPE_WEB,
-                           clientsecrets.TYPE_INSTALLED):
-            constructor_kwargs = {
-                'redirect_uri': redirect_uri,
-                'auth_uri': client_info['auth_uri'],
-                'token_uri': client_info['token_uri'],
-                'login_hint': login_hint,
-            }
-            revoke_uri = client_info.get('revoke_uri')
-            if revoke_uri is not None:
-                constructor_kwargs['revoke_uri'] = revoke_uri
-            if device_uri is not None:
-                constructor_kwargs['device_uri'] = device_uri
-            return OAuth2WebServerFlow(
-                client_info['client_id'], client_info['client_secret'],
-                scope, **constructor_kwargs)
-
-    except clientsecrets.InvalidClientSecretsError:
-        if message:
-            sys.exit(message)
-        else:
-            raise
-    else:
-        raise UnknownClientSecretsFlowError(
-            'This OAuth 2.0 flow is unsupported: %r' % client_type)
diff --git a/tools/swarming_client/third_party/oauth2client/clientsecrets.py b/tools/swarming_client/third_party/oauth2client/clientsecrets.py
deleted file mode 100644
index eba1fd9..0000000
--- a/tools/swarming_client/third_party/oauth2client/clientsecrets.py
+++ /dev/null
@@ -1,173 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for reading OAuth 2.0 client secret files.
-
-A client_secrets.json file contains all the information needed to interact with
-an OAuth 2.0 protected service.
-"""
-
-import json
-import six
-
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-# Properties that make a client_secrets.json file valid.
-TYPE_WEB = 'web'
-TYPE_INSTALLED = 'installed'
-
-VALID_CLIENT = {
-    TYPE_WEB: {
-        'required': [
-            'client_id',
-            'client_secret',
-            'redirect_uris',
-            'auth_uri',
-            'token_uri',
-        ],
-        'string': [
-            'client_id',
-            'client_secret',
-        ],
-    },
-    TYPE_INSTALLED: {
-        'required': [
-            'client_id',
-            'client_secret',
-            'redirect_uris',
-            'auth_uri',
-            'token_uri',
-        ],
-        'string': [
-            'client_id',
-            'client_secret',
-        ],
-    },
-}
-
-
-class Error(Exception):
-    """Base error for this module."""
-
-
-class InvalidClientSecretsError(Error):
-    """Format of ClientSecrets file is invalid."""
-
-
-def _validate_clientsecrets(clientsecrets_dict):
-    """Validate parsed client secrets from a file.
-
-    Args:
-        clientsecrets_dict: dict, a dictionary holding the client secrets.
-
-    Returns:
-        tuple, a string of the client type and the information parsed
-        from the file.
-    """
-    _INVALID_FILE_FORMAT_MSG = (
-        'Invalid file format. See '
-        'https://developers.google.com/api-client-library/'
-        'python/guide/aaa_client_secrets')
-
-    if clientsecrets_dict is None:
-        raise InvalidClientSecretsError(_INVALID_FILE_FORMAT_MSG)
-    try:
-        (client_type, client_info), = clientsecrets_dict.items()
-    except (ValueError, AttributeError):
-        raise InvalidClientSecretsError(
-            _INVALID_FILE_FORMAT_MSG + ' '
-            'Expected a JSON object with a single property for a "web" or '
-            '"installed" application')
-
-    if client_type not in VALID_CLIENT:
-        raise InvalidClientSecretsError(
-            'Unknown client type: %s.' % (client_type,))
-
-    for prop_name in VALID_CLIENT[client_type]['required']:
-        if prop_name not in client_info:
-            raise InvalidClientSecretsError(
-                'Missing property "%s" in a client type of "%s".' %
-                (prop_name, client_type))
-    for prop_name in VALID_CLIENT[client_type]['string']:
-        if client_info[prop_name].startswith('[['):
-            raise InvalidClientSecretsError(
-                'Property "%s" is not configured.' % prop_name)
-    return client_type, client_info
-
-
-def load(fp):
-    obj = json.load(fp)
-    return _validate_clientsecrets(obj)
-
-
-def loads(s):
-    obj = json.loads(s)
-    return _validate_clientsecrets(obj)
-
-
-def _loadfile(filename):
-    try:
-        with open(filename, 'r') as fp:
-            obj = json.load(fp)
-    except IOError:
-        raise InvalidClientSecretsError('File not found: "%s"' % filename)
-    return _validate_clientsecrets(obj)
-
-
-def loadfile(filename, cache=None):
-    """Loading of client_secrets JSON file, optionally backed by a cache.
-
-    Typical cache storage would be App Engine memcache service,
-    but you can pass in any other cache client that implements
-    these methods:
-
-    * ``get(key, namespace=ns)``
-    * ``set(key, value, namespace=ns)``
-
-    Usage::
-
-        # without caching
-        client_type, client_info = loadfile('secrets.json')
-        # using App Engine memcache service
-        from google.appengine.api import memcache
-        client_type, client_info = loadfile('secrets.json', cache=memcache)
-
-    Args:
-        filename: string, Path to a client_secrets.json file on a filesystem.
-        cache: An optional cache service client that implements get() and set()
-        methods. If not specified, the file is always being loaded from
-                 a filesystem.
-
-    Raises:
-        InvalidClientSecretsError: In case of a validation error or some
-                                   I/O failure. Can happen only on cache miss.
-
-    Returns:
-        (client_type, client_info) tuple, as _loadfile() normally would.
-        JSON contents is validated only during first load. Cache hits are not
-        validated.
-    """
-    _SECRET_NAMESPACE = 'oauth2client:secrets#ns'
-
-    if not cache:
-        return _loadfile(filename)
-
-    obj = cache.get(filename, namespace=_SECRET_NAMESPACE)
-    if obj is None:
-        client_type, client_info = _loadfile(filename)
-        obj = {client_type: client_info}
-        cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
-
-    return next(six.iteritems(obj))
diff --git a/tools/swarming_client/third_party/oauth2client/crypt.py b/tools/swarming_client/third_party/oauth2client/crypt.py
deleted file mode 100644
index c450c5c..0000000
--- a/tools/swarming_client/third_party/oauth2client/crypt.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Crypto-related routines for oauth2client."""
-
-import json
-import logging
-import time
-
-from oauth2client._helpers import _from_bytes
-from oauth2client._helpers import _json_encode
-from oauth2client._helpers import _to_bytes
-from oauth2client._helpers import _urlsafe_b64decode
-from oauth2client._helpers import _urlsafe_b64encode
-
-
-CLOCK_SKEW_SECS = 300  # 5 minutes in seconds
-AUTH_TOKEN_LIFETIME_SECS = 300  # 5 minutes in seconds
-MAX_TOKEN_LIFETIME_SECS = 86400  # 1 day in seconds
-
-logger = logging.getLogger(__name__)
-
-
-class AppIdentityError(Exception):
-    """Error to indicate crypto failure."""
-
-
-def _bad_pkcs12_key_as_pem(*args, **kwargs):
-    raise NotImplementedError('pkcs12_key_as_pem requires OpenSSL.')
-
-
-try:
-    from oauth2client._openssl_crypt import OpenSSLVerifier
-    from oauth2client._openssl_crypt import OpenSSLSigner
-    from oauth2client._openssl_crypt import pkcs12_key_as_pem
-except ImportError:  # pragma: NO COVER
-    OpenSSLVerifier = None
-    OpenSSLSigner = None
-    pkcs12_key_as_pem = _bad_pkcs12_key_as_pem
-
-try:
-    from oauth2client._pycrypto_crypt import PyCryptoVerifier
-    from oauth2client._pycrypto_crypt import PyCryptoSigner
-except ImportError:  # pragma: NO COVER
-    PyCryptoVerifier = None
-    PyCryptoSigner = None
-
-
-if OpenSSLSigner:
-    Signer = OpenSSLSigner
-    Verifier = OpenSSLVerifier
-elif PyCryptoSigner:  # pragma: NO COVER
-    Signer = PyCryptoSigner
-    Verifier = PyCryptoVerifier
-else:  # pragma: NO COVER
-    raise ImportError('No encryption library found. Please install either '
-                      'PyOpenSSL, or PyCrypto 2.6 or later')
-
-
-def make_signed_jwt(signer, payload):
-    """Make a signed JWT.
-
-    See http://self-issued.info/docs/draft-jones-json-web-token.html.
-
-    Args:
-        signer: crypt.Signer, Cryptographic signer.
-        payload: dict, Dictionary of data to convert to JSON and then sign.
-
-    Returns:
-        string, The JWT for the payload.
-    """
-    header = {'typ': 'JWT', 'alg': 'RS256'}
-
-    segments = [
-      _urlsafe_b64encode(_json_encode(header)),
-      _urlsafe_b64encode(_json_encode(payload)),
-    ]
-    signing_input = b'.'.join(segments)
-
-    signature = signer.sign(signing_input)
-    segments.append(_urlsafe_b64encode(signature))
-
-    logger.debug(str(segments))
-
-    return b'.'.join(segments)
-
-
-def _verify_signature(message, signature, certs):
-    """Verifies signed content using a list of certificates.
-
-    Args:
-        message: string or bytes, The message to verify.
-        signature: string or bytes, The signature on the message.
-        certs: iterable, certificates in PEM format.
-
-    Raises:
-        AppIdentityError: If none of the certificates can verify the message
-                          against the signature.
-    """
-    for pem in certs:
-        verifier = Verifier.from_string(pem, is_x509_cert=True)
-        if verifier.verify(message, signature):
-            return
-
-    # If we have not returned, no certificate confirms the signature.
-    raise AppIdentityError('Invalid token signature')
-
-
-def _check_audience(payload_dict, audience):
-    """Checks audience field from a JWT payload.
-
-    Does nothing if the passed in ``audience`` is null.
-
-    Args:
-        payload_dict: dict, A dictionary containing a JWT payload.
-        audience: string or NoneType, an audience to check for in
-                  the JWT payload.
-
-    Raises:
-        AppIdentityError: If there is no ``'aud'`` field in the payload
-                          dictionary but there is an ``audience`` to check.
-        AppIdentityError: If the ``'aud'`` field in the payload dictionary
-                          does not match the ``audience``.
-    """
-    if audience is None:
-        return
-
-    audience_in_payload = payload_dict.get('aud')
-    if audience_in_payload is None:
-        raise AppIdentityError('No aud field in token: %s' %
-                               (payload_dict,))
-    if audience_in_payload != audience:
-        raise AppIdentityError('Wrong recipient, %s != %s: %s' %
-                               (audience_in_payload, audience, payload_dict))
-
-
-def _verify_time_range(payload_dict):
-    """Verifies the issued at and expiration from a JWT payload.
-
-    Makes sure the current time (in UTC) falls between the issued at and
-    expiration for the JWT (with some skew allowed for via
-    ``CLOCK_SKEW_SECS``).
-
-    Args:
-        payload_dict: dict, A dictionary containing a JWT payload.
-
-    Raises:
-        AppIdentityError: If there is no ``'iat'`` field in the payload
-                          dictionary.
-        AppIdentityError: If there is no ``'exp'`` field in the payload
-                          dictionary.
-        AppIdentityError: If the JWT expiration is too far in the future (i.e.
-                          if the expiration would imply a token lifetime
-                          longer than what is allowed.)
-        AppIdentityError: If the token appears to have been issued in the
-                          future (up to clock skew).
-        AppIdentityError: If the token appears to have expired in the past
-                          (up to clock skew).
-    """
-    # Get the current time to use throughout.
-    now = int(time.time())
-
-    # Make sure issued at and expiration are in the payload.
-    issued_at = payload_dict.get('iat')
-    if issued_at is None:
-        raise AppIdentityError('No iat field in token: %s' % (payload_dict,))
-    expiration = payload_dict.get('exp')
-    if expiration is None:
-        raise AppIdentityError('No exp field in token: %s' % (payload_dict,))
-
-    # Make sure the expiration gives an acceptable token lifetime.
-    if expiration >= now + MAX_TOKEN_LIFETIME_SECS:
-        raise AppIdentityError('exp field too far in future: %s' %
-                               (payload_dict,))
-
-    # Make sure (up to clock skew) that the token wasn't issued in the future.
-    earliest = issued_at - CLOCK_SKEW_SECS
-    if now < earliest:
-        raise AppIdentityError('Token used too early, %d < %d: %s' %
-                               (now, earliest, payload_dict))
-    # Make sure (up to clock skew) that the token isn't already expired.
-    latest = expiration + CLOCK_SKEW_SECS
-    if now > latest:
-        raise AppIdentityError('Token used too late, %d > %d: %s' %
-                               (now, latest, payload_dict))
-
-
-def verify_signed_jwt_with_certs(jwt, certs, audience=None):
-    """Verify a JWT against public certs.
-
-    See http://self-issued.info/docs/draft-jones-json-web-token.html.
-
-    Args:
-        jwt: string, A JWT.
-        certs: dict, Dictionary where values of public keys in PEM format.
-        audience: string, The audience, 'aud', that this JWT should contain. If
-                  None then the JWT's 'aud' parameter is not verified.
-
-    Returns:
-        dict, The deserialized JSON payload in the JWT.
-
-    Raises:
-        AppIdentityError: if any checks are failed.
-    """
-    jwt = _to_bytes(jwt)
-
-    if jwt.count(b'.') != 2:
-        raise AppIdentityError(
-            'Wrong number of segments in token: %s' % (jwt,))
-
-    header, payload, signature = jwt.split(b'.')
-    message_to_sign = header + b'.' + payload
-    signature = _urlsafe_b64decode(signature)
-
-    # Parse token.
-    payload_bytes = _urlsafe_b64decode(payload)
-    try:
-        payload_dict = json.loads(_from_bytes(payload_bytes))
-    except:
-        raise AppIdentityError('Can\'t parse token: %s' % (payload_bytes,))
-
-    # Verify that the signature matches the message.
-    _verify_signature(message_to_sign, signature, certs.values())
-
-    # Verify the issued at and created times in the payload.
-    _verify_time_range(payload_dict)
-
-    # Check audience.
-    _check_audience(payload_dict, audience)
-
-    return payload_dict
diff --git a/tools/swarming_client/third_party/oauth2client/file.py b/tools/swarming_client/third_party/oauth2client/file.py
deleted file mode 100644
index d0dd174..0000000
--- a/tools/swarming_client/third_party/oauth2client/file.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for OAuth.
-
-Utilities for making it easier to work with OAuth 2.0
-credentials.
-"""
-
-import os
-import threading
-
-from oauth2client.client import Credentials
-from oauth2client.client import Storage as BaseStorage
-
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-
-class CredentialsFileSymbolicLinkError(Exception):
-    """Credentials files must not be symbolic links."""
-
-
-class Storage(BaseStorage):
-    """Store and retrieve a single credential to and from a file."""
-
-    def __init__(self, filename):
-        self._filename = filename
-        self._lock = threading.Lock()
-
-    def _validate_file(self):
-        if os.path.islink(self._filename):
-            raise CredentialsFileSymbolicLinkError(
-                'File: %s is a symbolic link.' % self._filename)
-
-    def acquire_lock(self):
-        """Acquires any lock necessary to access this Storage.
-
-        This lock is not reentrant.
-        """
-        self._lock.acquire()
-
-    def release_lock(self):
-        """Release the Storage lock.
-
-        Trying to release a lock that isn't held will result in a
-        RuntimeError.
-        """
-        self._lock.release()
-
-    def locked_get(self):
-        """Retrieve Credential from file.
-
-        Returns:
-            oauth2client.client.Credentials
-
-        Raises:
-            CredentialsFileSymbolicLinkError if the file is a symbolic link.
-        """
-        credentials = None
-        self._validate_file()
-        try:
-            f = open(self._filename, 'rb')
-            content = f.read()
-            f.close()
-        except IOError:
-            return credentials
-
-        try:
-            credentials = Credentials.new_from_json(content)
-            credentials.set_store(self)
-        except ValueError:
-            pass
-
-        return credentials
-
-    def _create_file_if_needed(self):
-        """Create an empty file if necessary.
-
-        This method will not initialize the file. Instead it implements a
-        simple version of "touch" to ensure the file has been created.
-        """
-        if not os.path.exists(self._filename):
-            old_umask = os.umask(0o177)
-            try:
-                open(self._filename, 'a+b').close()
-            finally:
-                os.umask(old_umask)
-
-    def locked_put(self, credentials):
-        """Write Credentials to file.
-
-        Args:
-            credentials: Credentials, the credentials to store.
-
-        Raises:
-            CredentialsFileSymbolicLinkError if the file is a symbolic link.
-        """
-        self._create_file_if_needed()
-        self._validate_file()
-        f = open(self._filename, 'w')
-        f.write(credentials.to_json())
-        f.close()
-
-    def locked_delete(self):
-        """Delete Credentials file.
-
-        Args:
-            credentials: Credentials, the credentials to store.
-        """
-        os.unlink(self._filename)
diff --git a/tools/swarming_client/third_party/oauth2client/gce.py b/tools/swarming_client/third_party/oauth2client/gce.py
deleted file mode 100644
index 77b08f1..0000000
--- a/tools/swarming_client/third_party/oauth2client/gce.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for Google Compute Engine
-
-Utilities for making it easier to use OAuth 2.0 on Google Compute Engine.
-"""
-
-import json
-import logging
-from six.moves import urllib
-
-from oauth2client._helpers import _from_bytes
-from oauth2client import util
-from oauth2client.client import HttpAccessTokenRefreshError
-from oauth2client.client import AssertionCredentials
-
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-logger = logging.getLogger(__name__)
-
-# URI Template for the endpoint that returns access_tokens.
-META = ('http://metadata.google.internal/0.1/meta-data/service-accounts/'
-        'default/acquire{?scope}')
-
-
-class AppAssertionCredentials(AssertionCredentials):
-    """Credentials object for Compute Engine Assertion Grants
-
-    This object will allow a Compute Engine instance to identify itself to
-    Google and other OAuth 2.0 servers that can verify assertions. It can be
-    used for the purpose of accessing data stored under an account assigned to
-    the Compute Engine instance itself.
-
-    This credential does not require a flow to instantiate because it
-    represents a two legged flow, and therefore has all of the required
-    information to generate and refresh its own access tokens.
-    """
-
-    @util.positional(2)
-    def __init__(self, scope, **kwargs):
-        """Constructor for AppAssertionCredentials
-
-        Args:
-            scope: string or iterable of strings, scope(s) of the credentials
-                   being requested.
-        """
-        self.scope = util.scopes_to_string(scope)
-        self.kwargs = kwargs
-
-        # Assertion type is no longer used, but still in the
-        # parent class signature.
-        super(AppAssertionCredentials, self).__init__(None)
-
-    @classmethod
-    def from_json(cls, json_data):
-        data = json.loads(_from_bytes(json_data))
-        return AppAssertionCredentials(data['scope'])
-
-    def _refresh(self, http_request):
-        """Refreshes the access_token.
-
-        Skip all the storage hoops and just refresh using the API.
-
-        Args:
-            http_request: callable, a callable that matches the method
-                          signature of httplib2.Http.request, used to make
-                          the refresh request.
-
-        Raises:
-            HttpAccessTokenRefreshError: When the refresh fails.
-        """
-        query = '?scope=%s' % urllib.parse.quote(self.scope, '')
-        uri = META.replace('{?scope}', query)
-        response, content = http_request(uri)
-        content = _from_bytes(content)
-        if response.status == 200:
-            try:
-                d = json.loads(content)
-            except Exception as e:
-                raise HttpAccessTokenRefreshError(str(e),
-                                                  status=response.status)
-            self.access_token = d['accessToken']
-        else:
-            if response.status == 404:
-                content += (' This can occur if a VM was created'
-                            ' with no service account or scopes.')
-            raise HttpAccessTokenRefreshError(content, status=response.status)
-
-    @property
-    def serialization_data(self):
-        raise NotImplementedError(
-            'Cannot serialize credentials for GCE service accounts.')
-
-    def create_scoped_required(self):
-        return not self.scope
-
-    def create_scoped(self, scopes):
-        return AppAssertionCredentials(scopes, **self.kwargs)
diff --git a/tools/swarming_client/third_party/oauth2client/keyring_storage.py b/tools/swarming_client/third_party/oauth2client/keyring_storage.py
deleted file mode 100644
index 0a4c285..0000000
--- a/tools/swarming_client/third_party/oauth2client/keyring_storage.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A keyring based Storage.
-
-A Storage for Credentials that uses the keyring module.
-"""
-
-import threading
-
-import keyring
-
-from oauth2client.client import Credentials
-from oauth2client.client import Storage as BaseStorage
-
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-
-
-class Storage(BaseStorage):
-    """Store and retrieve a single credential to and from the keyring.
-
-    To use this module you must have the keyring module installed. See
-    <http://pypi.python.org/pypi/keyring/>. This is an optional module and is
-    not installed with oauth2client by default because it does not work on all
-    the platforms that oauth2client supports, such as Google App Engine.
-
-    The keyring module <http://pypi.python.org/pypi/keyring/> is a
-    cross-platform library for access the keyring capabilities of the local
-    system. The user will be prompted for their keyring password when this
-    module is used, and the manner in which the user is prompted will vary per
-    platform.
-
-    Usage::
-
-        from oauth2client.keyring_storage import Storage
-
-        s = Storage('name_of_application', 'user1')
-        credentials = s.get()
-
-    """
-
-    def __init__(self, service_name, user_name):
-        """Constructor.
-
-        Args:
-            service_name: string, The name of the service under which the
-                          credentials are stored.
-            user_name: string, The name of the user to store credentials for.
-        """
-        self._service_name = service_name
-        self._user_name = user_name
-        self._lock = threading.Lock()
-
-    def acquire_lock(self):
-        """Acquires any lock necessary to access this Storage.
-
-        This lock is not reentrant.
-        """
-        self._lock.acquire()
-
-    def release_lock(self):
-        """Release the Storage lock.
-
-        Trying to release a lock that isn't held will result in a
-        RuntimeError.
-        """
-        self._lock.release()
-
-    def locked_get(self):
-        """Retrieve Credential from file.
-
-        Returns:
-            oauth2client.client.Credentials
-        """
-        credentials = None
-        content = keyring.get_password(self._service_name, self._user_name)
-
-        if content is not None:
-            try:
-                credentials = Credentials.new_from_json(content)
-                credentials.set_store(self)
-            except ValueError:
-                pass
-
-        return credentials
-
-    def locked_put(self, credentials):
-        """Write Credentials to file.
-
-        Args:
-            credentials: Credentials, the credentials to store.
-        """
-        keyring.set_password(self._service_name, self._user_name,
-                             credentials.to_json())
-
-    def locked_delete(self):
-        """Delete Credentials file.
-
-        Args:
-            credentials: Credentials, the credentials to store.
-        """
-        keyring.set_password(self._service_name, self._user_name, '')
diff --git a/tools/swarming_client/third_party/oauth2client/locked_file.py b/tools/swarming_client/third_party/oauth2client/locked_file.py
deleted file mode 100644
index 1028a7e..0000000
--- a/tools/swarming_client/third_party/oauth2client/locked_file.py
+++ /dev/null
@@ -1,387 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Locked file interface that should work on Unix and Windows pythons.
-
-This module first tries to use fcntl locking to ensure serialized access
-to a file, then falls back on a lock file if that is unavialable.
-
-Usage::
-
-    f = LockedFile('filename', 'r+b', 'rb')
-    f.open_and_lock()
-    if f.is_locked():
-      print('Acquired filename with r+b mode')
-      f.file_handle().write('locked data')
-    else:
-      print('Acquired filename with rb mode')
-    f.unlock_and_close()
-
-"""
-
-from __future__ import print_function
-
-import errno
-import logging
-import os
-import time
-
-from oauth2client import util
-
-
-__author__ = 'cache@google.com (David T McWherter)'
-
-logger = logging.getLogger(__name__)
-
-
-class CredentialsFileSymbolicLinkError(Exception):
-    """Credentials files must not be symbolic links."""
-
-
-class AlreadyLockedException(Exception):
-    """Trying to lock a file that has already been locked by the LockedFile."""
-    pass
-
-
-def validate_file(filename):
-    if os.path.islink(filename):
-        raise CredentialsFileSymbolicLinkError(
-            'File: %s is a symbolic link.' % filename)
-
-
-class _Opener(object):
-    """Base class for different locking primitives."""
-
-    def __init__(self, filename, mode, fallback_mode):
-        """Create an Opener.
-
-        Args:
-            filename: string, The pathname of the file.
-            mode: string, The preferred mode to access the file with.
-            fallback_mode: string, The mode to use if locking fails.
-        """
-        self._locked = False
-        self._filename = filename
-        self._mode = mode
-        self._fallback_mode = fallback_mode
-        self._fh = None
-        self._lock_fd = None
-
-    def is_locked(self):
-        """Was the file locked."""
-        return self._locked
-
-    def file_handle(self):
-        """The file handle to the file. Valid only after opened."""
-        return self._fh
-
-    def filename(self):
-        """The filename that is being locked."""
-        return self._filename
-
-    def open_and_lock(self, timeout, delay):
-        """Open the file and lock it.
-
-        Args:
-            timeout: float, How long to try to lock for.
-            delay: float, How long to wait between retries.
-        """
-        pass
-
-    def unlock_and_close(self):
-        """Unlock and close the file."""
-        pass
-
-
-class _PosixOpener(_Opener):
-    """Lock files using Posix advisory lock files."""
-
-    def open_and_lock(self, timeout, delay):
-        """Open the file and lock it.
-
-        Tries to create a .lock file next to the file we're trying to open.
-
-        Args:
-            timeout: float, How long to try to lock for.
-            delay: float, How long to wait between retries.
-
-        Raises:
-            AlreadyLockedException: if the lock is already acquired.
-            IOError: if the open fails.
-            CredentialsFileSymbolicLinkError if the file is a symbolic link.
-        """
-        if self._locked:
-            raise AlreadyLockedException('File %s is already locked' %
-                                         self._filename)
-        self._locked = False
-
-        validate_file(self._filename)
-        try:
-            self._fh = open(self._filename, self._mode)
-        except IOError as e:
-            # If we can't access with _mode, try _fallback_mode and don't lock.
-            if e.errno == errno.EACCES:
-                self._fh = open(self._filename, self._fallback_mode)
-                return
-
-        lock_filename = self._posix_lockfile(self._filename)
-        start_time = time.time()
-        while True:
-            try:
-                self._lock_fd = os.open(lock_filename,
-                                        os.O_CREAT | os.O_EXCL | os.O_RDWR)
-                self._locked = True
-                break
-
-            except OSError as e:
-                if e.errno != errno.EEXIST:
-                    raise
-                if (time.time() - start_time) >= timeout:
-                    logger.warn('Could not acquire lock %s in %s seconds',
-                                lock_filename, timeout)
-                    # Close the file and open in fallback_mode.
-                    if self._fh:
-                        self._fh.close()
-                    self._fh = open(self._filename, self._fallback_mode)
-                    return
-                time.sleep(delay)
-
-    def unlock_and_close(self):
-        """Unlock a file by removing the .lock file, and close the handle."""
-        if self._locked:
-            lock_filename = self._posix_lockfile(self._filename)
-            os.close(self._lock_fd)
-            os.unlink(lock_filename)
-            self._locked = False
-            self._lock_fd = None
-        if self._fh:
-            self._fh.close()
-
-    def _posix_lockfile(self, filename):
-        """The name of the lock file to use for posix locking."""
-        return '%s.lock' % filename
-
-
-try:
-    import fcntl
-
-    class _FcntlOpener(_Opener):
-        """Open, lock, and unlock a file using fcntl.lockf."""
-
-        def open_and_lock(self, timeout, delay):
-            """Open the file and lock it.
-
-            Args:
-                timeout: float, How long to try to lock for.
-                delay: float, How long to wait between retries
-
-            Raises:
-                AlreadyLockedException: if the lock is already acquired.
-                IOError: if the open fails.
-                CredentialsFileSymbolicLinkError: if the file is a symbolic
-                                                  link.
-            """
-            if self._locked:
-                raise AlreadyLockedException('File %s is already locked' %
-                                             self._filename)
-            start_time = time.time()
-
-            validate_file(self._filename)
-            try:
-                self._fh = open(self._filename, self._mode)
-            except IOError as e:
-                # If we can't access with _mode, try _fallback_mode and
-                # don't lock.
-                if e.errno in (errno.EPERM, errno.EACCES):
-                    self._fh = open(self._filename, self._fallback_mode)
-                    return
-
-            # We opened in _mode, try to lock the file.
-            while True:
-                try:
-                    fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX)
-                    self._locked = True
-                    return
-                except IOError as e:
-                    # If not retrying, then just pass on the error.
-                    if timeout == 0:
-                        raise
-                    if e.errno != errno.EACCES:
-                        raise
-                    # We could not acquire the lock. Try again.
-                    if (time.time() - start_time) >= timeout:
-                        logger.warn('Could not lock %s in %s seconds',
-                                    self._filename, timeout)
-                        if self._fh:
-                            self._fh.close()
-                        self._fh = open(self._filename, self._fallback_mode)
-                        return
-                    time.sleep(delay)
-
-        def unlock_and_close(self):
-            """Close and unlock the file using the fcntl.lockf primitive."""
-            if self._locked:
-                fcntl.lockf(self._fh.fileno(), fcntl.LOCK_UN)
-            self._locked = False
-            if self._fh:
-                self._fh.close()
-except ImportError:
-    _FcntlOpener = None
-
-
-try:
-    import pywintypes
-    import win32con
-    import win32file
-
-    class _Win32Opener(_Opener):
-        """Open, lock, and unlock a file using windows primitives."""
-
-        # Error #33:
-        #  'The process cannot access the file because another process'
-        FILE_IN_USE_ERROR = 33
-
-        # Error #158:
-        #  'The segment is already unlocked.'
-        FILE_ALREADY_UNLOCKED_ERROR = 158
-
-        def open_and_lock(self, timeout, delay):
-            """Open the file and lock it.
-
-            Args:
-                timeout: float, How long to try to lock for.
-                delay: float, How long to wait between retries
-
-            Raises:
-                AlreadyLockedException: if the lock is already acquired.
-                IOError: if the open fails.
-                CredentialsFileSymbolicLinkError: if the file is a symbolic
-                                                  link.
-            """
-            if self._locked:
-                raise AlreadyLockedException('File %s is already locked' %
-                                             self._filename)
-            start_time = time.time()
-
-            validate_file(self._filename)
-            try:
-                self._fh = open(self._filename, self._mode)
-            except IOError as e:
-                # If we can't access with _mode, try _fallback_mode
-                # and don't lock.
-                if e.errno == errno.EACCES:
-                    self._fh = open(self._filename, self._fallback_mode)
-                    return
-
-            # We opened in _mode, try to lock the file.
-            while True:
-                try:
-                    hfile = win32file._get_osfhandle(self._fh.fileno())
-                    win32file.LockFileEx(
-                        hfile,
-                        (win32con.LOCKFILE_FAIL_IMMEDIATELY |
-                         win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000,
-                        pywintypes.OVERLAPPED())
-                    self._locked = True
-                    return
-                except pywintypes.error as e:
-                    if timeout == 0:
-                        raise
-
-                    # If the error is not that the file is already
-                    # in use, raise.
-                    if e[0] != _Win32Opener.FILE_IN_USE_ERROR:
-                        raise
-
-                    # We could not acquire the lock. Try again.
-                    if (time.time() - start_time) >= timeout:
-                        logger.warn('Could not lock %s in %s seconds' % (
-                            self._filename, timeout))
-                        if self._fh:
-                            self._fh.close()
-                        self._fh = open(self._filename, self._fallback_mode)
-                        return
-                    time.sleep(delay)
-
-        def unlock_and_close(self):
-            """Close and unlock the file using the win32 primitive."""
-            if self._locked:
-                try:
-                    hfile = win32file._get_osfhandle(self._fh.fileno())
-                    win32file.UnlockFileEx(hfile, 0, -0x10000,
-                                           pywintypes.OVERLAPPED())
-                except pywintypes.error as e:
-                    if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR:
-                        raise
-            self._locked = False
-            if self._fh:
-                self._fh.close()
-except ImportError:
-    _Win32Opener = None
-
-
-class LockedFile(object):
-    """Represent a file that has exclusive access."""
-
-    @util.positional(4)
-    def __init__(self, filename, mode, fallback_mode, use_native_locking=True):
-        """Construct a LockedFile.
-
-        Args:
-            filename: string, The path of the file to open.
-            mode: string, The mode to try to open the file with.
-            fallback_mode: string, The mode to use if locking fails.
-            use_native_locking: bool, Whether or not fcntl/win32 locking is
-                                used.
-        """
-        opener = None
-        if not opener and use_native_locking:
-            if _Win32Opener:
-                opener = _Win32Opener(filename, mode, fallback_mode)
-            if _FcntlOpener:
-                opener = _FcntlOpener(filename, mode, fallback_mode)
-
-        if not opener:
-            opener = _PosixOpener(filename, mode, fallback_mode)
-
-        self._opener = opener
-
-    def filename(self):
-        """Return the filename we were constructed with."""
-        return self._opener._filename
-
-    def file_handle(self):
-        """Return the file_handle to the opened file."""
-        return self._opener.file_handle()
-
-    def is_locked(self):
-        """Return whether we successfully locked the file."""
-        return self._opener.is_locked()
-
-    def open_and_lock(self, timeout=0, delay=0.05):
-        """Open the file, trying to lock it.
-
-        Args:
-            timeout: float, The number of seconds to try to acquire the lock.
-            delay: float, The number of seconds to wait between retry attempts.
-
-        Raises:
-            AlreadyLockedException: if the lock is already acquired.
-            IOError: if the open fails.
-        """
-        self._opener.open_and_lock(timeout, delay)
-
-    def unlock_and_close(self):
-        """Unlock and close a file."""
-        self._opener.unlock_and_close()
diff --git a/tools/swarming_client/third_party/oauth2client/multistore_file.py b/tools/swarming_client/third_party/oauth2client/multistore_file.py
deleted file mode 100644
index 5a12797..0000000
--- a/tools/swarming_client/third_party/oauth2client/multistore_file.py
+++ /dev/null
@@ -1,484 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Multi-credential file store with lock support.
-
-This module implements a JSON credential store where multiple
-credentials can be stored in one file. That file supports locking
-both in a single process and across processes.
-
-The credential themselves are keyed off of:
-
-* client_id
-* user_agent
-* scope
-
-The format of the stored data is like so::
-
-    {
-      'file_version': 1,
-      'data': [
-          {
-              'key': {
-                  'clientId': '<client id>',
-                  'userAgent': '<user agent>',
-                  'scope': '<scope>'
-              },
-              'credential': {
-                  # JSON serialized Credentials.
-              }
-          }
-      ]
-    }
-
-"""
-
-import errno
-import json
-import logging
-import os
-import threading
-
-from oauth2client.client import Credentials
-from oauth2client.client import Storage as BaseStorage
-from oauth2client import util
-from oauth2client.locked_file import LockedFile
-
-
-__author__ = 'jbeda@google.com (Joe Beda)'
-
-logger = logging.getLogger(__name__)
-
-# A dict from 'filename'->_MultiStore instances
-_multistores = {}
-_multistores_lock = threading.Lock()
-
-
-class Error(Exception):
-    """Base error for this module."""
-
-
-class NewerCredentialStoreError(Error):
-    """The credential store is a newer version than supported."""
-
-
-@util.positional(4)
-def get_credential_storage(filename, client_id, user_agent, scope,
-                           warn_on_readonly=True):
-    """Get a Storage instance for a credential.
-
-    Args:
-        filename: The JSON file storing a set of credentials
-        client_id: The client_id for the credential
-        user_agent: The user agent for the credential
-        scope: string or iterable of strings, Scope(s) being requested
-        warn_on_readonly: if True, log a warning if the store is readonly
-
-    Returns:
-        An object derived from client.Storage for getting/setting the
-        credential.
-    """
-    # Recreate the legacy key with these specific parameters
-    key = {'clientId': client_id, 'userAgent': user_agent,
-           'scope': util.scopes_to_string(scope)}
-    return get_credential_storage_custom_key(
-      filename, key, warn_on_readonly=warn_on_readonly)
-
-
-@util.positional(2)
-def get_credential_storage_custom_string_key(filename, key_string,
-                                             warn_on_readonly=True):
-    """Get a Storage instance for a credential using a single string as a key.
-
-    Allows you to provide a string as a custom key that will be used for
-    credential storage and retrieval.
-
-    Args:
-        filename: The JSON file storing a set of credentials
-        key_string: A string to use as the key for storing this credential.
-        warn_on_readonly: if True, log a warning if the store is readonly
-
-    Returns:
-        An object derived from client.Storage for getting/setting the
-        credential.
-    """
-    # Create a key dictionary that can be used
-    key_dict = {'key': key_string}
-    return get_credential_storage_custom_key(
-      filename, key_dict, warn_on_readonly=warn_on_readonly)
-
-
-@util.positional(2)
-def get_credential_storage_custom_key(filename, key_dict,
-                                      warn_on_readonly=True):
-    """Get a Storage instance for a credential using a dictionary as a key.
-
-    Allows you to provide a dictionary as a custom key that will be used for
-    credential storage and retrieval.
-
-    Args:
-        filename: The JSON file storing a set of credentials
-        key_dict: A dictionary to use as the key for storing this credential.
-                  There is no ordering of the keys in the dictionary. Logically
-                  equivalent dictionaries will produce equivalent storage keys.
-        warn_on_readonly: if True, log a warning if the store is readonly
-
-    Returns:
-        An object derived from client.Storage for getting/setting the
-        credential.
-    """
-    multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)
-    key = util.dict_to_tuple_key(key_dict)
-    return multistore._get_storage(key)
-
-
-@util.positional(1)
-def get_all_credential_keys(filename, warn_on_readonly=True):
-    """Gets all the registered credential keys in the given Multistore.
-
-    Args:
-        filename: The JSON file storing a set of credentials
-        warn_on_readonly: if True, log a warning if the store is readonly
-
-    Returns:
-        A list of the credential keys present in the file.  They are returned
-        as dictionaries that can be passed into
-        get_credential_storage_custom_key to get the actual credentials.
-    """
-    multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)
-    multistore._lock()
-    try:
-        return multistore._get_all_credential_keys()
-    finally:
-        multistore._unlock()
-
-
-@util.positional(1)
-def _get_multistore(filename, warn_on_readonly=True):
-    """A helper method to initialize the multistore with proper locking.
-
-    Args:
-        filename: The JSON file storing a set of credentials
-        warn_on_readonly: if True, log a warning if the store is readonly
-
-    Returns:
-        A multistore object
-    """
-    filename = os.path.expanduser(filename)
-    _multistores_lock.acquire()
-    try:
-        multistore = _multistores.setdefault(
-            filename, _MultiStore(filename, warn_on_readonly=warn_on_readonly))
-    finally:
-        _multistores_lock.release()
-    return multistore
-
-
-class _MultiStore(object):
-    """A file backed store for multiple credentials."""
-
-    @util.positional(2)
-    def __init__(self, filename, warn_on_readonly=True):
-        """Initialize the class.
-
-        This will create the file if necessary.
-        """
-        self._file = LockedFile(filename, 'r+', 'r')
-        self._thread_lock = threading.Lock()
-        self._read_only = False
-        self._warn_on_readonly = warn_on_readonly
-
-        self._create_file_if_needed()
-
-        # Cache of deserialized store. This is only valid after the
-        # _MultiStore is locked or _refresh_data_cache is called. This is
-        # of the form of:
-        #
-        # ((key, value), (key, value)...) -> OAuth2Credential
-        #
-        # If this is None, then the store hasn't been read yet.
-        self._data = None
-
-    class _Storage(BaseStorage):
-        """A Storage object that can read/write a single credential."""
-
-        def __init__(self, multistore, key):
-            self._multistore = multistore
-            self._key = key
-
-        def acquire_lock(self):
-            """Acquires any lock necessary to access this Storage.
-
-            This lock is not reentrant.
-            """
-            self._multistore._lock()
-
-        def release_lock(self):
-            """Release the Storage lock.
-
-            Trying to release a lock that isn't held will result in a
-            RuntimeError.
-            """
-            self._multistore._unlock()
-
-        def locked_get(self):
-            """Retrieve credential.
-
-            The Storage lock must be held when this is called.
-
-            Returns:
-                oauth2client.client.Credentials
-            """
-            credential = self._multistore._get_credential(self._key)
-            if credential:
-                credential.set_store(self)
-            return credential
-
-        def locked_put(self, credentials):
-            """Write a credential.
-
-            The Storage lock must be held when this is called.
-
-            Args:
-                credentials: Credentials, the credentials to store.
-            """
-            self._multistore._update_credential(self._key, credentials)
-
-        def locked_delete(self):
-            """Delete a credential.
-
-            The Storage lock must be held when this is called.
-
-            Args:
-                credentials: Credentials, the credentials to store.
-            """
-            self._multistore._delete_credential(self._key)
-
-    def _create_file_if_needed(self):
-        """Create an empty file if necessary.
-
-        This method will not initialize the file. Instead it implements a
-        simple version of "touch" to ensure the file has been created.
-        """
-        if not os.path.exists(self._file.filename()):
-            old_umask = os.umask(0o177)
-            try:
-                open(self._file.filename(), 'a+b').close()
-            finally:
-                os.umask(old_umask)
-
-    def _lock(self):
-        """Lock the entire multistore."""
-        self._thread_lock.acquire()
-        try:
-            self._file.open_and_lock()
-        except IOError as e:
-            if e.errno == errno.ENOSYS:
-                logger.warn('File system does not support locking the '
-                            'credentials file.')
-            elif e.errno == errno.ENOLCK:
-                logger.warn('File system is out of resources for writing the '
-                            'credentials file (is your disk full?).')
-            elif e.errno == errno.EDEADLK:
-                logger.warn('Lock contention on multistore file, opening '
-                            'in read-only mode.')
-            else:
-                raise
-        if not self._file.is_locked():
-            self._read_only = True
-            if self._warn_on_readonly:
-                logger.warn('The credentials file (%s) is not writable. '
-                            'Opening in read-only mode. Any refreshed '
-                            'credentials will only be '
-                            'valid for this run.', self._file.filename())
-        if os.path.getsize(self._file.filename()) == 0:
-            logger.debug('Initializing empty multistore file')
-            # The multistore is empty so write out an empty file.
-            self._data = {}
-            self._write()
-        elif not self._read_only or self._data is None:
-            # Only refresh the data if we are read/write or we haven't
-            # cached the data yet. If we are readonly, we assume is isn't
-            # changing out from under us and that we only have to read it
-            # once. This prevents us from whacking any new access keys that
-            # we have cached in memory but were unable to write out.
-            self._refresh_data_cache()
-
-    def _unlock(self):
-        """Release the lock on the multistore."""
-        self._file.unlock_and_close()
-        self._thread_lock.release()
-
-    def _locked_json_read(self):
-        """Get the raw content of the multistore file.
-
-        The multistore must be locked when this is called.
-
-        Returns:
-            The contents of the multistore decoded as JSON.
-        """
-        assert self._thread_lock.locked()
-        self._file.file_handle().seek(0)
-        return json.load(self._file.file_handle())
-
-    def _locked_json_write(self, data):
-        """Write a JSON serializable data structure to the multistore.
-
-        The multistore must be locked when this is called.
-
-        Args:
-            data: The data to be serialized and written.
-        """
-        assert self._thread_lock.locked()
-        if self._read_only:
-            return
-        self._file.file_handle().seek(0)
-        json.dump(data, self._file.file_handle(),
-                  sort_keys=True, indent=2, separators=(',', ': '))
-        self._file.file_handle().truncate()
-
-    def _refresh_data_cache(self):
-        """Refresh the contents of the multistore.
-
-        The multistore must be locked when this is called.
-
-        Raises:
-            NewerCredentialStoreError: Raised when a newer client has written
-            the store.
-        """
-        self._data = {}
-        try:
-            raw_data = self._locked_json_read()
-        except Exception:
-            logger.warn('Credential data store could not be loaded. '
-                        'Will ignore and overwrite.')
-            return
-
-        version = 0
-        try:
-            version = raw_data['file_version']
-        except Exception:
-            logger.warn('Missing version for credential data store. It may be '
-                        'corrupt or an old version. Overwriting.')
-        if version > 1:
-            raise NewerCredentialStoreError(
-                'Credential file has file_version of %d. '
-                'Only file_version of 1 is supported.' % version)
-
-        credentials = []
-        try:
-            credentials = raw_data['data']
-        except (TypeError, KeyError):
-            pass
-
-        for cred_entry in credentials:
-            try:
-                key, credential = self._decode_credential_from_json(cred_entry)
-                self._data[key] = credential
-            except:
-                # If something goes wrong loading a credential, just ignore it
-                logger.info('Error decoding credential, skipping',
-                            exc_info=True)
-
-    def _decode_credential_from_json(self, cred_entry):
-        """Load a credential from our JSON serialization.
-
-        Args:
-            cred_entry: A dict entry from the data member of our format
-
-        Returns:
-            (key, cred) where the key is the key tuple and the cred is the
-            OAuth2Credential object.
-        """
-        raw_key = cred_entry['key']
-        key = util.dict_to_tuple_key(raw_key)
-        credential = None
-        credential = Credentials.new_from_json(
-            json.dumps(cred_entry['credential']))
-        return (key, credential)
-
-    def _write(self):
-        """Write the cached data back out.
-
-        The multistore must be locked.
-        """
-        raw_data = {'file_version': 1}
-        raw_creds = []
-        raw_data['data'] = raw_creds
-        for (cred_key, cred) in self._data.items():
-            raw_key = dict(cred_key)
-            raw_cred = json.loads(cred.to_json())
-            raw_creds.append({'key': raw_key, 'credential': raw_cred})
-        self._locked_json_write(raw_data)
-
-    def _get_all_credential_keys(self):
-        """Gets all the registered credential keys in the multistore.
-
-        Returns:
-            A list of dictionaries corresponding to all the keys currently
-            registered
-        """
-        return [dict(key) for key in self._data.keys()]
-
-    def _get_credential(self, key):
-        """Get a credential from the multistore.
-
-        The multistore must be locked.
-
-        Args:
-            key: The key used to retrieve the credential
-
-        Returns:
-            The credential specified or None if not present
-        """
-        return self._data.get(key, None)
-
-    def _update_credential(self, key, cred):
-        """Update a credential and write the multistore.
-
-        This must be called when the multistore is locked.
-
-        Args:
-            key: The key used to retrieve the credential
-            cred: The OAuth2Credential to update/set
-        """
-        self._data[key] = cred
-        self._write()
-
-    def _delete_credential(self, key):
-        """Delete a credential and write the multistore.
-
-        This must be called when the multistore is locked.
-
-        Args:
-            key: The key used to retrieve the credential
-        """
-        try:
-            del self._data[key]
-        except KeyError:
-            pass
-        self._write()
-
-    def _get_storage(self, key):
-        """Get a Storage object to get/set a credential.
-
-        This Storage is a 'view' into the multistore.
-
-        Args:
-            key: The key used to retrieve the credential
-
-        Returns:
-            A Storage object that can be used to get/set this cred
-        """
-        return self._Storage(self, key)
diff --git a/tools/swarming_client/third_party/oauth2client/service_account.py b/tools/swarming_client/third_party/oauth2client/service_account.py
deleted file mode 100644
index 8d3dc65..0000000
--- a/tools/swarming_client/third_party/oauth2client/service_account.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""A service account credentials class.
-
-This credentials class is implemented on top of rsa library.
-"""
-
-import base64
-import time
-
-from pyasn1.codec.ber import decoder
-from pyasn1_modules.rfc5208 import PrivateKeyInfo
-import rsa
-
-from oauth2client import GOOGLE_REVOKE_URI
-from oauth2client import GOOGLE_TOKEN_URI
-from oauth2client._helpers import _json_encode
-from oauth2client._helpers import _to_bytes
-from oauth2client._helpers import _urlsafe_b64encode
-from oauth2client import util
-from oauth2client.client import AssertionCredentials
-
-
-class _ServiceAccountCredentials(AssertionCredentials):
-    """Class representing a service account (signed JWT) credential."""
-
-    MAX_TOKEN_LIFETIME_SECS = 3600  # 1 hour in seconds
-
-    def __init__(self, service_account_id, service_account_email,
-                 private_key_id, private_key_pkcs8_text, scopes,
-                 user_agent=None, token_uri=GOOGLE_TOKEN_URI,
-                 revoke_uri=GOOGLE_REVOKE_URI, **kwargs):
-
-        super(_ServiceAccountCredentials, self).__init__(
-            None, user_agent=user_agent, token_uri=token_uri,
-            revoke_uri=revoke_uri)
-
-        self._service_account_id = service_account_id
-        self._service_account_email = service_account_email
-        self._private_key_id = private_key_id
-        self._private_key = _get_private_key(private_key_pkcs8_text)
-        self._private_key_pkcs8_text = private_key_pkcs8_text
-        self._scopes = util.scopes_to_string(scopes)
-        self._user_agent = user_agent
-        self._token_uri = token_uri
-        self._revoke_uri = revoke_uri
-        self._kwargs = kwargs
-
-    def _generate_assertion(self):
-        """Generate the assertion that will be used in the request."""
-
-        header = {
-            'alg': 'RS256',
-            'typ': 'JWT',
-            'kid': self._private_key_id
-        }
-
-        now = int(time.time())
-        payload = {
-            'aud': self._token_uri,
-            'scope': self._scopes,
-            'iat': now,
-            'exp': now + _ServiceAccountCredentials.MAX_TOKEN_LIFETIME_SECS,
-            'iss': self._service_account_email
-        }
-        payload.update(self._kwargs)
-
-        first_segment = _urlsafe_b64encode(_json_encode(header))
-        second_segment = _urlsafe_b64encode(_json_encode(payload))
-        assertion_input = first_segment + b'.' + second_segment
-
-        # Sign the assertion.
-        rsa_bytes = rsa.pkcs1.sign(assertion_input, self._private_key,
-                                   'SHA-256')
-        signature = base64.urlsafe_b64encode(rsa_bytes).rstrip(b'=')
-
-        return assertion_input + b'.' + signature
-
-    def sign_blob(self, blob):
-        # Ensure that it is bytes
-        blob = _to_bytes(blob, encoding='utf-8')
-        return (self._private_key_id,
-                rsa.pkcs1.sign(blob, self._private_key, 'SHA-256'))
-
-    @property
-    def service_account_email(self):
-        return self._service_account_email
-
-    @property
-    def serialization_data(self):
-        return {
-            'type': 'service_account',
-            'client_id': self._service_account_id,
-            'client_email': self._service_account_email,
-            'private_key_id': self._private_key_id,
-            'private_key': self._private_key_pkcs8_text
-        }
-
-    def create_scoped_required(self):
-        return not self._scopes
-
-    def create_scoped(self, scopes):
-        return _ServiceAccountCredentials(self._service_account_id,
-                                          self._service_account_email,
-                                          self._private_key_id,
-                                          self._private_key_pkcs8_text,
-                                          scopes,
-                                          user_agent=self._user_agent,
-                                          token_uri=self._token_uri,
-                                          revoke_uri=self._revoke_uri,
-                                          **self._kwargs)
-
-
-def _get_private_key(private_key_pkcs8_text):
-    """Get an RSA private key object from a pkcs8 representation."""
-    private_key_pkcs8_text = _to_bytes(private_key_pkcs8_text)
-    der = rsa.pem.load_pem(private_key_pkcs8_text, 'PRIVATE KEY')
-    asn1_private_key, _ = decoder.decode(der, asn1Spec=PrivateKeyInfo())
-    return rsa.PrivateKey.load_pkcs1(
-        asn1_private_key.getComponentByName('privateKey').asOctets(),
-        format='DER')
diff --git a/tools/swarming_client/third_party/oauth2client/tools.py b/tools/swarming_client/third_party/oauth2client/tools.py
deleted file mode 100644
index 629866b..0000000
--- a/tools/swarming_client/third_party/oauth2client/tools.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Command-line tools for authenticating via OAuth 2.0
-
-Do the OAuth 2.0 Web Server dance for a command line application. Stores the
-generated credentials in a common file that is used by other example apps in
-the same directory.
-"""
-
-from __future__ import print_function
-
-import logging
-import socket
-import sys
-
-from six.moves import BaseHTTPServer
-from six.moves import urllib
-from six.moves import input
-
-from oauth2client import client
-from oauth2client import util
-
-
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-__all__ = ['argparser', 'run_flow', 'message_if_missing']
-
-_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0
-
-To make this sample run you will need to populate the client_secrets.json file
-found at:
-
-   %s
-
-with information from the APIs Console <https://code.google.com/apis/console>.
-
-"""
-
-
-def _CreateArgumentParser():
-    try:
-        import argparse
-    except ImportError:
-        return None
-    parser = argparse.ArgumentParser(add_help=False)
-    parser.add_argument('--auth_host_name', default='localhost',
-                        help='Hostname when running a local web server.')
-    parser.add_argument('--noauth_local_webserver', action='store_true',
-                        default=False, help='Do not run a local web server.')
-    parser.add_argument('--auth_host_port', default=[8080, 8090], type=int,
-                        nargs='*', help='Port web server should listen on.')
-    parser.add_argument(
-        '--logging_level', default='ERROR',
-        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
-        help='Set the logging level of detail.')
-    return parser
-
-# argparser is an ArgumentParser that contains command-line options expected
-# by tools.run(). Pass it in as part of the 'parents' argument to your own
-# ArgumentParser.
-argparser = _CreateArgumentParser()
-
-
-class ClientRedirectServer(BaseHTTPServer.HTTPServer):
-    """A server to handle OAuth 2.0 redirects back to localhost.
-
-    Waits for a single request and parses the query parameters
-    into query_params and then stops serving.
-    """
-    query_params = {}
-
-
-class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-    """A handler for OAuth 2.0 redirects back to localhost.
-
-    Waits for a single request and parses the query parameters
-    into the servers query_params and then stops serving.
-    """
-
-    def do_GET(self):
-        """Handle a GET request.
-
-        Parses the query parameters and prints a message
-        if the flow has completed. Note that we can't detect
-        if an error occurred.
-        """
-        self.send_response(200)
-        self.send_header("Content-type", "text/html")
-        self.end_headers()
-        query = self.path.split('?', 1)[-1]
-        query = dict(urllib.parse.parse_qsl(query))
-        self.server.query_params = query
-        self.wfile.write(
-            b"<html><head><title>Authentication Status</title></head>")
-        self.wfile.write(
-            b"<body><p>The authentication flow has completed.</p>")
-        self.wfile.write(b"</body></html>")
-
-    def log_message(self, format, *args):
-        """Do not log messages to stdout while running as cmd. line program."""
-
-
-@util.positional(3)
-def run_flow(flow, storage, flags, http=None):
-    """Core code for a command-line application.
-
-    The ``run()`` function is called from your application and runs
-    through all the steps to obtain credentials. It takes a ``Flow``
-    argument and attempts to open an authorization server page in the
-    user's default web browser. The server asks the user to grant your
-    application access to the user's data. If the user grants access,
-    the ``run()`` function returns new credentials. The new credentials
-    are also stored in the ``storage`` argument, which updates the file
-    associated with the ``Storage`` object.
-
-    It presumes it is run from a command-line application and supports the
-    following flags:
-
-        ``--auth_host_name`` (string, default: ``localhost``)
-           Host name to use when running a local web server to handle
-           redirects during OAuth authorization.
-
-        ``--auth_host_port`` (integer, default: ``[8080, 8090]``)
-           Port to use when running a local web server to handle redirects
-           during OAuth authorization. Repeat this option to specify a list
-           of values.
-
-        ``--[no]auth_local_webserver`` (boolean, default: ``True``)
-           Run a local web server to handle redirects during OAuth
-           authorization.
-
-    The tools module defines an ``ArgumentParser`` the already contains the
-    flag definitions that ``run()`` requires. You can pass that
-    ``ArgumentParser`` to your ``ArgumentParser`` constructor::
-
-        parser = argparse.ArgumentParser(
-            description=__doc__,
-            formatter_class=argparse.RawDescriptionHelpFormatter,
-            parents=[tools.argparser])
-        flags = parser.parse_args(argv)
-
-    Args:
-        flow: Flow, an OAuth 2.0 Flow to step through.
-        storage: Storage, a ``Storage`` to store the credential in.
-        flags: ``argparse.Namespace``, The command-line flags. This is the
-               object returned from calling ``parse_args()`` on
-               ``argparse.ArgumentParser`` as described above.
-        http: An instance of ``httplib2.Http.request`` or something that
-              acts like it.
-
-    Returns:
-        Credentials, the obtained credential.
-    """
-    logging.getLogger().setLevel(getattr(logging, flags.logging_level))
-    if not flags.noauth_local_webserver:
-        success = False
-        port_number = 0
-        for port in flags.auth_host_port:
-            port_number = port
-            try:
-                httpd = ClientRedirectServer((flags.auth_host_name, port),
-                                             ClientRedirectHandler)
-            except socket.error:
-                pass
-            else:
-                success = True
-                break
-        flags.noauth_local_webserver = not success
-        if not success:
-            print('Failed to start a local webserver listening '
-                  'on either port 8080')
-            print('or port 8090. Please check your firewall settings and locally')
-            print('running programs that may be blocking or using those ports.')
-            print()
-            print('Falling back to --noauth_local_webserver and continuing with')
-            print('authorization.')
-            print()
-
-    if not flags.noauth_local_webserver:
-        oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number)
-    else:
-        oauth_callback = client.OOB_CALLBACK_URN
-    flow.redirect_uri = oauth_callback
-    authorize_url = flow.step1_get_authorize_url()
-
-    if not flags.noauth_local_webserver:
-        import webbrowser
-        webbrowser.open(authorize_url, new=1, autoraise=True)
-        print('Your browser has been opened to visit:')
-        print()
-        print('    ' + authorize_url)
-        print()
-        print('If your browser is on a different machine then '
-              'exit and re-run this')
-        print('application with the command-line parameter ')
-        print()
-        print('  --noauth_local_webserver')
-        print()
-    else:
-        print('Go to the following link in your browser:')
-        print()
-        print('    ' + authorize_url)
-        print()
-
-    code = None
-    if not flags.noauth_local_webserver:
-        httpd.handle_request()
-        if 'error' in httpd.query_params:
-            sys.exit('Authentication request was rejected.')
-        if 'code' in httpd.query_params:
-            code = httpd.query_params['code']
-        else:
-            print('Failed to find "code" in the query parameters '
-                  'of the redirect.')
-            sys.exit('Try running with --noauth_local_webserver.')
-    else:
-        code = input('Enter verification code: ').strip()
-
-    try:
-        credential = flow.step2_exchange(code, http=http)
-    except client.FlowExchangeError as e:
-        sys.exit('Authentication has failed: %s' % e)
-
-    storage.put(credential)
-    credential.set_store(storage)
-    print('Authentication successful.')
-
-    return credential
-
-
-def message_if_missing(filename):
-    """Helpful message to display if the CLIENT_SECRETS file is missing."""
-    return _CLIENT_SECRETS_MESSAGE % filename
diff --git a/tools/swarming_client/third_party/oauth2client/util.py b/tools/swarming_client/third_party/oauth2client/util.py
deleted file mode 100644
index 1150e2b..0000000
--- a/tools/swarming_client/third_party/oauth2client/util.py
+++ /dev/null
@@ -1,224 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""Common utility library."""
-
-import functools
-import inspect
-import logging
-
-import six
-from six.moves import urllib
-
-
-__author__ = [
-    'rafek@google.com (Rafe Kaplan)',
-    'guido@google.com (Guido van Rossum)',
-]
-
-__all__ = [
-    'positional',
-    'POSITIONAL_WARNING',
-    'POSITIONAL_EXCEPTION',
-    'POSITIONAL_IGNORE',
-]
-
-logger = logging.getLogger(__name__)
-
-POSITIONAL_WARNING = 'WARNING'
-POSITIONAL_EXCEPTION = 'EXCEPTION'
-POSITIONAL_IGNORE = 'IGNORE'
-POSITIONAL_SET = frozenset([POSITIONAL_WARNING, POSITIONAL_EXCEPTION,
-                            POSITIONAL_IGNORE])
-
-positional_parameters_enforcement = POSITIONAL_WARNING
-
-
-def positional(max_positional_args):
-    """A decorator to declare that only the first N arguments my be positional.
-
-    This decorator makes it easy to support Python 3 style keyword-only
-    parameters. For example, in Python 3 it is possible to write::
-
-        def fn(pos1, *, kwonly1=None, kwonly1=None):
-            ...
-
-    All named parameters after ``*`` must be a keyword::
-
-        fn(10, 'kw1', 'kw2')  # Raises exception.
-        fn(10, kwonly1='kw1')  # Ok.
-
-    Example
-    ^^^^^^^
-
-    To define a function like above, do::
-
-        @positional(1)
-        def fn(pos1, kwonly1=None, kwonly2=None):
-            ...
-
-    If no default value is provided to a keyword argument, it becomes a
-    required keyword argument::
-
-        @positional(0)
-        def fn(required_kw):
-            ...
-
-    This must be called with the keyword parameter::
-
-        fn()  # Raises exception.
-        fn(10)  # Raises exception.
-        fn(required_kw=10)  # Ok.
-
-    When defining instance or class methods always remember to account for
-    ``self`` and ``cls``::
-
-        class MyClass(object):
-
-            @positional(2)
-            def my_method(self, pos1, kwonly1=None):
-                ...
-
-            @classmethod
-            @positional(2)
-            def my_method(cls, pos1, kwonly1=None):
-                ...
-
-    The positional decorator behavior is controlled by
-    ``util.positional_parameters_enforcement``, which may be set to
-    ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or
-    ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do
-    nothing, respectively, if a declaration is violated.
-
-    Args:
-        max_positional_arguments: Maximum number of positional arguments. All
-                                  parameters after the this index must be
-                                  keyword only.
-
-    Returns:
-        A decorator that prevents using arguments after max_positional_args
-        from being used as positional parameters.
-
-    Raises:
-        TypeError: if a key-word only argument is provided as a positional
-                   parameter, but only if
-                   util.positional_parameters_enforcement is set to
-                   POSITIONAL_EXCEPTION.
-    """
-
-    def positional_decorator(wrapped):
-        @functools.wraps(wrapped)
-        def positional_wrapper(*args, **kwargs):
-            if len(args) > max_positional_args:
-                plural_s = ''
-                if max_positional_args != 1:
-                    plural_s = 's'
-                message = ('%s() takes at most %d positional '
-                           'argument%s (%d given)' % (
-                               wrapped.__name__, max_positional_args,
-                               plural_s, len(args)))
-                if positional_parameters_enforcement == POSITIONAL_EXCEPTION:
-                    raise TypeError(message)
-                elif positional_parameters_enforcement == POSITIONAL_WARNING:
-                    logger.warning(message)
-                else:  # IGNORE
-                    pass
-            return wrapped(*args, **kwargs)
-        return positional_wrapper
-
-    if isinstance(max_positional_args, six.integer_types):
-        return positional_decorator
-    else:
-        args, _, _, defaults = inspect.getargspec(max_positional_args)
-        return positional(len(args) - len(defaults))(max_positional_args)
-
-
-def scopes_to_string(scopes):
-    """Converts scope value to a string.
-
-    If scopes is a string then it is simply passed through. If scopes is an
-    iterable then a string is returned that is all the individual scopes
-    concatenated with spaces.
-
-    Args:
-        scopes: string or iterable of strings, the scopes.
-
-    Returns:
-        The scopes formatted as a single string.
-    """
-    if isinstance(scopes, six.string_types):
-        return scopes
-    else:
-        return ' '.join(scopes)
-
-
-def string_to_scopes(scopes):
-    """Converts stringifed scope value to a list.
-
-    If scopes is a list then it is simply passed through. If scopes is an
-    string then a list of each individual scope is returned.
-
-    Args:
-        scopes: a string or iterable of strings, the scopes.
-
-    Returns:
-        The scopes in a list.
-    """
-    if not scopes:
-        return []
-    if isinstance(scopes, six.string_types):
-        return scopes.split(' ')
-    else:
-        return scopes
-
-
-def dict_to_tuple_key(dictionary):
-    """Converts a dictionary to a tuple that can be used as an immutable key.
-
-    The resulting key is always sorted so that logically equivalent
-    dictionaries always produce an identical tuple for a key.
-
-    Args:
-        dictionary: the dictionary to use as the key.
-
-    Returns:
-        A tuple representing the dictionary in it's naturally sorted ordering.
-    """
-    return tuple(sorted(dictionary.items()))
-
-
-def _add_query_parameter(url, name, value):
-    """Adds a query parameter to a url.
-
-    Replaces the current value if it already exists in the URL.
-
-    Args:
-        url: string, url to add the query parameter to.
-        name: string, query parameter name.
-        value: string, query parameter value.
-
-    Returns:
-        Updated query parameter. Does not update the url if value is None.
-    """
-    if value is None:
-        return url
-    else:
-        parsed = list(urllib.parse.urlparse(url))
-        q = dict(urllib.parse.parse_qsl(parsed[4]))
-        q[name] = value
-        parsed[4] = urllib.parse.urlencode(q)
-        return urllib.parse.urlunparse(parsed)
diff --git a/tools/swarming_client/third_party/oauth2client/xsrfutil.py b/tools/swarming_client/third_party/oauth2client/xsrfutil.py
deleted file mode 100644
index 10bbe3f..0000000
--- a/tools/swarming_client/third_party/oauth2client/xsrfutil.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#
-# Copyright 2014 the Melange authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helper methods for creating & verifying XSRF tokens."""
-
-import base64
-import binascii
-import hmac
-import time
-
-from oauth2client._helpers import _to_bytes
-from oauth2client import util
-
-__authors__ = [
-    '"Doug Coker" <dcoker@google.com>',
-    '"Joe Gregorio" <jcgregorio@google.com>',
-]
-
-# Delimiter character
-DELIMITER = b':'
-
-# 1 hour in seconds
-DEFAULT_TIMEOUT_SECS = 60 * 60
-
-
-@util.positional(2)
-def generate_token(key, user_id, action_id='', when=None):
-    """Generates a URL-safe token for the given user, action, time tuple.
-
-    Args:
-        key: secret key to use.
-        user_id: the user ID of the authenticated user.
-        action_id: a string identifier of the action they requested
-                   authorization for.
-        when: the time in seconds since the epoch at which the user was
-              authorized for this action. If not set the current time is used.
-
-    Returns:
-        A string XSRF protection token.
-    """
-    digester = hmac.new(_to_bytes(key, encoding='utf-8'))
-    digester.update(_to_bytes(str(user_id), encoding='utf-8'))
-    digester.update(DELIMITER)
-    digester.update(_to_bytes(action_id, encoding='utf-8'))
-    digester.update(DELIMITER)
-    when = _to_bytes(str(when or int(time.time())), encoding='utf-8')
-    digester.update(when)
-    digest = digester.digest()
-
-    token = base64.urlsafe_b64encode(digest + DELIMITER + when)
-    return token
-
-
-@util.positional(3)
-def validate_token(key, token, user_id, action_id="", current_time=None):
-    """Validates that the given token authorizes the user for the action.
-
-    Tokens are invalid if the time of issue is too old or if the token
-    does not match what generateToken outputs (i.e. the token was forged).
-
-    Args:
-        key: secret key to use.
-        token: a string of the token generated by generateToken.
-        user_id: the user ID of the authenticated user.
-        action_id: a string identifier of the action they requested
-                   authorization for.
-
-    Returns:
-        A boolean - True if the user is authorized for the action, False
-        otherwise.
-    """
-    if not token:
-        return False
-    try:
-        decoded = base64.urlsafe_b64decode(token)
-        token_time = int(decoded.split(DELIMITER)[-1])
-    except (TypeError, ValueError, binascii.Error):
-        return False
-    if current_time is None:
-        current_time = time.time()
-    # If the token is too old it's not valid.
-    if current_time - token_time > DEFAULT_TIMEOUT_SECS:
-        return False
-
-    # The given token should match the generated one with the same time.
-    expected_token = generate_token(key, user_id, action_id=action_id,
-                                    when=token_time)
-    if len(token) != len(expected_token):
-        return False
-
-    # Perform constant time comparison to avoid timing attacks
-    different = 0
-    for x, y in zip(bytearray(token), bytearray(expected_token)):
-        different |= x ^ y
-    return not different
diff --git a/tools/swarming_client/third_party/pyasn1/LICENSE b/tools/swarming_client/third_party/pyasn1/LICENSE
deleted file mode 100644
index 53158c7..0000000
--- a/tools/swarming_client/third_party/pyasn1/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2005-2015, Ilya Etingof <ilya@glas.net>
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-  * Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer.
-
-  * Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE. 
diff --git a/tools/swarming_client/third_party/pyasn1/README.swarming b/tools/swarming_client/third_party/pyasn1/README.swarming
deleted file mode 100644
index ac1e6bf..0000000
--- a/tools/swarming_client/third_party/pyasn1/README.swarming
+++ /dev/null
@@ -1,8 +0,0 @@
-URL: http://sourceforge.net/projects/pyasn1/files/pyasn1/0.1.9/pyasn1-0.1.9.tar.gz/download
-Version: 0.1.9
-License: BSD
-Description: Implementation of ASN.1 types and codecs.
-Revision: http://pyasn1.cvs.sourceforge.net/viewvc/pyasn1/pyasn1/?pathrev=release_0_1_9_tag
-
-Local Modifications:
-- Only kept pyasn1/.
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/__init__.py b/tools/swarming_client/third_party/pyasn1/pyasn1/__init__.py
deleted file mode 100644
index 5f09300..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import sys
-
-# http://www.python.org/dev/peps/pep-0396/
-__version__ = '0.1.9'
-
-if sys.version_info[:2] < (2, 4):
-    raise RuntimeError('PyASN1 requires Python 2.4 or later')
-
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/__init__.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/__init__.py
deleted file mode 100644
index 8c3066b..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# This file is necessary to make this directory a package.
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/__init__.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/__init__.py
deleted file mode 100644
index 8c3066b..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# This file is necessary to make this directory a package.
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/decoder.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/decoder.py
deleted file mode 100644
index 61bfbce..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/decoder.py
+++ /dev/null
@@ -1,841 +0,0 @@
-# BER decoder
-from pyasn1.type import tag, univ, char, useful, tagmap
-from pyasn1.codec.ber import eoo
-from pyasn1.compat.octets import oct2int, isOctetsType
-from pyasn1 import debug, error
-
-class AbstractDecoder:
-    protoComponent = None
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                             length, state, decodeFun, substrateFun):
-        raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
-
-class AbstractSimpleDecoder(AbstractDecoder):
-    tagFormats = (tag.tagFormatSimple,)
-    def _createComponent(self, asn1Spec, tagSet, value=None):
-        if tagSet[0][1] not in self.tagFormats:
-            raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
-        if asn1Spec is None:
-            return self.protoComponent.clone(value, tagSet)
-        elif value is None:
-            return asn1Spec
-        else:
-            return asn1Spec.clone(value)
-        
-class AbstractConstructedDecoder(AbstractDecoder):
-    tagFormats = (tag.tagFormatConstructed,)
-    def _createComponent(self, asn1Spec, tagSet, value=None):
-        if tagSet[0][1] not in self.tagFormats:
-            raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
-        if asn1Spec is None:
-            return self.protoComponent.clone(tagSet)
-        else:
-            return asn1Spec.clone()
-                                
-class ExplicitTagDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.Any('')
-    tagFormats = (tag.tagFormatConstructed,)
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        if substrateFun:
-            return substrateFun(
-                       self._createComponent(asn1Spec, tagSet, ''),
-                       substrate, length
-                   )
-        head, tail = substrate[:length], substrate[length:]
-        value, _ = decodeFun(head, asn1Spec, tagSet, length)
-        return value, tail
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                             length, state, decodeFun, substrateFun):
-        if substrateFun:
-            return substrateFun(
-                       self._createComponent(asn1Spec, tagSet, ''),
-                       substrate, length
-                   )
-        value, substrate = decodeFun(substrate, asn1Spec, tagSet, length)
-        terminator, substrate = decodeFun(substrate, allowEoo=True)
-        if eoo.endOfOctets.isSameTypeWith(terminator) and \
-                terminator == eoo.endOfOctets:
-            return value, substrate
-        else:
-            raise error.PyAsn1Error('Missing end-of-octets terminator')
-
-explicitTagDecoder = ExplicitTagDecoder()
-
-class IntegerDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.Integer(0)
-    precomputedValues = {
-        '\x00':  0,
-        '\x01':  1,
-        '\x02':  2,
-        '\x03':  3,
-        '\x04':  4,
-        '\x05':  5,
-        '\x06':  6,
-        '\x07':  7,
-        '\x08':  8,
-        '\x09':  9,
-        '\xff': -1,
-        '\xfe': -2,
-        '\xfd': -3,
-        '\xfc': -4,
-        '\xfb': -5
-        }
-    
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
-                     state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        if not head:
-            return self._createComponent(asn1Spec, tagSet, 0), tail
-        if head in self.precomputedValues:
-            value = self.precomputedValues[head]
-        else:
-            firstOctet = oct2int(head[0])
-            if firstOctet & 0x80:
-                value = -1
-            else:
-                value = 0
-            for octet in head:
-                value = value << 8 | oct2int(octet)
-        return self._createComponent(asn1Spec, tagSet, value), tail
-
-class BooleanDecoder(IntegerDecoder):
-    protoComponent = univ.Boolean(0)
-    def _createComponent(self, asn1Spec, tagSet, value=None):
-        return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0)
-
-class BitStringDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.BitString(())
-    tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
-                     state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        if tagSet[0][1] == tag.tagFormatSimple:    # XXX what tag to check?
-            if not head:
-                raise error.PyAsn1Error('Empty substrate')
-            trailingBits = oct2int(head[0])
-            if trailingBits > 7:
-                raise error.PyAsn1Error(
-                    'Trailing bits overflow %s' % trailingBits
-                    )
-            head = head[1:]
-            lsb = p = 0; l = len(head)-1; b = []
-            while p <= l:
-                if p == l:
-                    lsb = trailingBits
-                j = 7                    
-                o = oct2int(head[p])
-                while j >= lsb:
-                    b.append((o>>j)&0x01)
-                    j = j - 1
-                p = p + 1
-            return self._createComponent(asn1Spec, tagSet, b), tail
-        r = self._createComponent(asn1Spec, tagSet, ())
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        while head:
-            component, head = decodeFun(head, self.protoComponent)
-            r = r + component
-        return r, tail
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                             length, state, decodeFun, substrateFun):
-        r = self._createComponent(asn1Spec, tagSet, '')
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        while substrate:
-            component, substrate = decodeFun(substrate, self.protoComponent,
-                                             allowEoo=True)
-            if eoo.endOfOctets.isSameTypeWith(component) and \
-                    component == eoo.endOfOctets:
-                break
-            r = r + component
-        else:
-            raise error.SubstrateUnderrunError(
-                'No EOO seen before substrate ends'
-                )
-        return r, substrate
-
-class OctetStringDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.OctetString('')
-    tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
-                     state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        if tagSet[0][1] == tag.tagFormatSimple:    # XXX what tag to check?
-            return self._createComponent(asn1Spec, tagSet, head), tail
-        r = self._createComponent(asn1Spec, tagSet, '')
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        while head:
-            component, head = decodeFun(head, self.protoComponent)
-            r = r + component
-        return r, tail
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                             length, state, decodeFun, substrateFun):
-        r = self._createComponent(asn1Spec, tagSet, '')
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        while substrate:
-            component, substrate = decodeFun(substrate, self.protoComponent,
-                                             allowEoo=True)
-            if eoo.endOfOctets.isSameTypeWith(component) and \
-                    component == eoo.endOfOctets:
-                break
-            r = r + component
-        else:
-            raise error.SubstrateUnderrunError(
-                'No EOO seen before substrate ends'
-                )
-        return r, substrate
-
-class NullDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.Null('')
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        r = self._createComponent(asn1Spec, tagSet)
-        if head:
-            raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length)
-        return r, tail
-
-class ObjectIdentifierDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.ObjectIdentifier(())
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
-                     state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        if not head:
-            raise error.PyAsn1Error('Empty substrate')
-
-        oid = ()
-        index = 0
-        substrateLen = len(head)
-        while index < substrateLen:
-            subId = oct2int(head[index])
-            index += 1
-            if subId < 128:
-                oid = oid + (subId,)
-            elif subId > 128:
-                # Construct subid from a number of octets
-                nextSubId = subId
-                subId = 0
-                while nextSubId >= 128:
-                    subId = (subId << 7) + (nextSubId & 0x7F)
-                    if index >= substrateLen:
-                        raise error.SubstrateUnderrunError(
-                            'Short substrate for sub-OID past %s' % (oid,)
-                        )
-                    nextSubId = oct2int(head[index])
-                    index += 1
-                oid = oid + ((subId << 7) + nextSubId,)
-            elif subId == 128:
-                # ASN.1 spec forbids leading zeros (0x80) in OID
-                # encoding, tolerating it opens a vulnerability. See
-                # http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf
-                # page 7
-                raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding')
-       
-        # Decode two leading arcs
-        if 0 <= oid[0] <= 39:
-            oid = (0,) + oid
-        elif 40 <= oid[0] <= 79:
-            oid = (1, oid[0]-40) + oid[1:]
-        elif oid[0] >= 80:
-            oid = (2, oid[0]-80) + oid[1:]
-        else:
-            raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0])
-
-        return self._createComponent(asn1Spec, tagSet, oid), tail
-
-class RealDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.Real()
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        if not head:
-            return self._createComponent(asn1Spec, tagSet, 0.0), tail
-        fo = oct2int(head[0]); head = head[1:]
-        if fo & 0x80:  # binary encoding
-            if not head:
-                raise error.PyAsn1Error("Incomplete floating-point value")
-            n = (fo & 0x03) + 1
-            if n == 4:
-                n = oct2int(head[0])
-                head = head[1:]
-            eo, head = head[:n], head[n:]
-            if not eo or not head:
-                raise error.PyAsn1Error('Real exponent screwed')
-            e = oct2int(eo[0]) & 0x80 and -1 or 0
-            while eo:         # exponent
-                e <<= 8
-                e |= oct2int(eo[0])
-                eo = eo[1:]
-            b = fo >> 4 & 0x03 # base bits
-            if b > 2:
-                raise error.PyAsn1Error('Illegal Real base')
-            if b == 1: # encbase = 8
-                e *= 3
-            elif b == 2: # encbase = 16
-                e *= 4
-            p = 0
-            while head:  # value
-                p <<= 8
-                p |= oct2int(head[0])
-                head = head[1:]
-            if fo & 0x40:    # sign bit
-                p = -p
-            sf = fo >> 2 & 0x03  # scale bits
-            p *= 2**sf
-            value = (p, 2, e)
-        elif fo & 0x40:  # infinite value
-            value = fo & 0x01 and '-inf' or 'inf'
-        elif fo & 0xc0 == 0:  # character encoding
-            if not head:
-                raise error.PyAsn1Error("Incomplete floating-point value")
-            try:
-                if fo & 0x3 == 0x1:  # NR1
-                    value = (int(head), 10, 0)
-                elif fo & 0x3 == 0x2:  # NR2
-                    value = float(head)
-                elif fo & 0x3 == 0x3:  # NR3
-                    value = float(head)
-                else:
-                    raise error.SubstrateUnderrunError(
-                        'Unknown NR (tag %s)' % fo
-                        )
-            except ValueError:
-                raise error.SubstrateUnderrunError(
-                    'Bad character Real syntax'
-                    )
-        else:
-            raise error.SubstrateUnderrunError(
-                'Unknown encoding (tag %s)' % fo
-                )
-        return self._createComponent(asn1Spec, tagSet, value), tail
-        
-class SequenceDecoder(AbstractConstructedDecoder):
-    protoComponent = univ.Sequence()
-    def _getComponentTagMap(self, r, idx):
-        try:
-            return r.getComponentTagMapNearPosition(idx)
-        except error.PyAsn1Error:
-            return
-
-    def _getComponentPositionByType(self, r, t, idx):
-        return r.getComponentPositionNearType(t, idx)
-    
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        r = self._createComponent(asn1Spec, tagSet)
-        idx = 0
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        while head:
-            asn1Spec = self._getComponentTagMap(r, idx)
-            component, head = decodeFun(head, asn1Spec)
-            idx = self._getComponentPositionByType(
-                r, component.getEffectiveTagSet(), idx
-                )
-            r.setComponentByPosition(idx, component, asn1Spec is None)
-            idx = idx + 1
-        r.setDefaultComponents()
-        r.verifySizeSpec()
-        return r, tail
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                             length, state, decodeFun, substrateFun):
-        r = self._createComponent(asn1Spec, tagSet)
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        idx = 0
-        while substrate:
-            asn1Spec = self._getComponentTagMap(r, idx)
-            component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
-            if eoo.endOfOctets.isSameTypeWith(component) and \
-                    component == eoo.endOfOctets:
-                break
-            idx = self._getComponentPositionByType(
-                r, component.getEffectiveTagSet(), idx
-                )            
-            r.setComponentByPosition(idx, component, asn1Spec is None)
-            idx = idx + 1                
-        else:
-            raise error.SubstrateUnderrunError(
-                'No EOO seen before substrate ends'
-                )
-        r.setDefaultComponents()
-        r.verifySizeSpec()
-        return r, substrate
-
-class SequenceOfDecoder(AbstractConstructedDecoder):
-    protoComponent = univ.SequenceOf()    
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        r = self._createComponent(asn1Spec, tagSet)
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        asn1Spec = r.getComponentType()
-        idx = 0
-        while head:
-            component, head = decodeFun(head, asn1Spec)
-            r.setComponentByPosition(idx, component, asn1Spec is None)
-            idx = idx + 1
-        r.verifySizeSpec()
-        return r, tail
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                             length, state, decodeFun, substrateFun):
-        r = self._createComponent(asn1Spec, tagSet)
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        asn1Spec = r.getComponentType()
-        idx = 0
-        while substrate:
-            component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
-            if eoo.endOfOctets.isSameTypeWith(component) and \
-                    component == eoo.endOfOctets:
-                break
-            r.setComponentByPosition(idx, component, asn1Spec is None)
-            idx = idx + 1                
-        else:
-            raise error.SubstrateUnderrunError(
-                'No EOO seen before substrate ends'
-                )
-        r.verifySizeSpec()
-        return r, substrate
-
-class SetDecoder(SequenceDecoder):
-    protoComponent = univ.Set()
-    def _getComponentTagMap(self, r, idx):
-        return r.getComponentTagMap()
-
-    def _getComponentPositionByType(self, r, t, idx):
-        nextIdx = r.getComponentPositionByType(t)
-        if nextIdx is None:
-            return idx
-        else:
-            return nextIdx
-    
-class SetOfDecoder(SequenceOfDecoder):
-    protoComponent = univ.SetOf()
-    
-class ChoiceDecoder(AbstractConstructedDecoder):
-    protoComponent = univ.Choice()
-    tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        r = self._createComponent(asn1Spec, tagSet)
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        if r.getTagSet() == tagSet: # explicitly tagged Choice
-            component, head = decodeFun(
-                head, r.getComponentTagMap()
-                )
-        else:
-            component, head = decodeFun(
-                head, r.getComponentTagMap(), tagSet, length, state
-                )
-        if isinstance(component, univ.Choice):
-            effectiveTagSet = component.getEffectiveTagSet()
-        else:
-            effectiveTagSet = component.getTagSet()
-        r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None)
-        return r, tail
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        r = self._createComponent(asn1Spec, tagSet)
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        if r.getTagSet() == tagSet: # explicitly tagged Choice
-            component, substrate = decodeFun(substrate, r.getComponentTagMap())
-            # eat up EOO marker
-            eooMarker, substrate = decodeFun(substrate, allowEoo=True)
-            if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \
-                    eooMarker != eoo.endOfOctets:
-                raise error.PyAsn1Error('No EOO seen before substrate ends')
-        else:
-            component, substrate= decodeFun(
-                substrate, r.getComponentTagMap(), tagSet, length, state
-            )
-        if isinstance(component, univ.Choice):
-            effectiveTagSet = component.getEffectiveTagSet()
-        else:
-            effectiveTagSet = component.getTagSet()
-        r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None)
-        return r, substrate
-
-class AnyDecoder(AbstractSimpleDecoder):
-    protoComponent = univ.Any()
-    tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed)
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                     length, state, decodeFun, substrateFun):
-        if asn1Spec is None or \
-               asn1Spec is not None and tagSet != asn1Spec.getTagSet():
-            # untagged Any container, recover inner header substrate
-            length = length + len(fullSubstrate) - len(substrate)
-            substrate = fullSubstrate
-        if substrateFun:
-            return substrateFun(self._createComponent(asn1Spec, tagSet),
-                                substrate, length)
-        head, tail = substrate[:length], substrate[length:]
-        return self._createComponent(asn1Spec, tagSet, value=head), tail
-
-    def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
-                             length, state, decodeFun, substrateFun):
-        if asn1Spec is not None and tagSet == asn1Spec.getTagSet():
-            # tagged Any type -- consume header substrate
-            header = ''
-        else:
-            # untagged Any, recover header substrate
-            header = fullSubstrate[:-len(substrate)]
-
-        r = self._createComponent(asn1Spec, tagSet, header)
-
-        # Any components do not inherit initial tag
-        asn1Spec = self.protoComponent
-        
-        if substrateFun:
-            return substrateFun(r, substrate, length)
-        while substrate:
-            component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
-            if eoo.endOfOctets.isSameTypeWith(component) and \
-                    component == eoo.endOfOctets:
-                break
-            r = r + component
-        else:
-            raise error.SubstrateUnderrunError(
-                'No EOO seen before substrate ends'
-                )
-        return r, substrate
-
-# character string types
-class UTF8StringDecoder(OctetStringDecoder):
-    protoComponent = char.UTF8String()
-class NumericStringDecoder(OctetStringDecoder):
-    protoComponent = char.NumericString()
-class PrintableStringDecoder(OctetStringDecoder):
-    protoComponent = char.PrintableString()
-class TeletexStringDecoder(OctetStringDecoder):
-    protoComponent = char.TeletexString()
-class VideotexStringDecoder(OctetStringDecoder):
-    protoComponent = char.VideotexString()
-class IA5StringDecoder(OctetStringDecoder):
-    protoComponent = char.IA5String()
-class GraphicStringDecoder(OctetStringDecoder):
-    protoComponent = char.GraphicString()
-class VisibleStringDecoder(OctetStringDecoder):
-    protoComponent = char.VisibleString()
-class GeneralStringDecoder(OctetStringDecoder):
-    protoComponent = char.GeneralString()
-class UniversalStringDecoder(OctetStringDecoder):
-    protoComponent = char.UniversalString()
-class BMPStringDecoder(OctetStringDecoder):
-    protoComponent = char.BMPString()
-
-# "useful" types
-class ObjectDescriptorDecoder(OctetStringDecoder):
-    protoComponent = useful.ObjectDescriptor()
-class GeneralizedTimeDecoder(OctetStringDecoder):
-    protoComponent = useful.GeneralizedTime()
-class UTCTimeDecoder(OctetStringDecoder):
-    protoComponent = useful.UTCTime()
-
-tagMap = {
-    univ.Integer.tagSet: IntegerDecoder(),
-    univ.Boolean.tagSet: BooleanDecoder(),
-    univ.BitString.tagSet: BitStringDecoder(),
-    univ.OctetString.tagSet: OctetStringDecoder(),
-    univ.Null.tagSet: NullDecoder(),
-    univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(),
-    univ.Enumerated.tagSet: IntegerDecoder(),
-    univ.Real.tagSet: RealDecoder(),
-    univ.Sequence.tagSet: SequenceDecoder(),  # conflicts with SequenceOf
-    univ.Set.tagSet: SetDecoder(),            # conflicts with SetOf
-    univ.Choice.tagSet: ChoiceDecoder(),      # conflicts with Any
-    # character string types
-    char.UTF8String.tagSet: UTF8StringDecoder(),
-    char.NumericString.tagSet: NumericStringDecoder(),
-    char.PrintableString.tagSet: PrintableStringDecoder(),
-    char.TeletexString.tagSet: TeletexStringDecoder(),
-    char.VideotexString.tagSet: VideotexStringDecoder(),
-    char.IA5String.tagSet: IA5StringDecoder(),
-    char.GraphicString.tagSet: GraphicStringDecoder(),
-    char.VisibleString.tagSet: VisibleStringDecoder(),
-    char.GeneralString.tagSet: GeneralStringDecoder(),
-    char.UniversalString.tagSet: UniversalStringDecoder(),
-    char.BMPString.tagSet: BMPStringDecoder(),
-    # useful types
-    useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(),
-    useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
-    useful.UTCTime.tagSet: UTCTimeDecoder()
-}
-
-# Type-to-codec map for ambiguous ASN.1 types
-typeMap = {
-    univ.Set.typeId: SetDecoder(),
-    univ.SetOf.typeId: SetOfDecoder(),
-    univ.Sequence.typeId: SequenceDecoder(),
-    univ.SequenceOf.typeId: SequenceOfDecoder(),
-    univ.Choice.typeId: ChoiceDecoder(),
-    univ.Any.typeId: AnyDecoder()
-}
-
-( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec,
-  stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue,
-  stDumpRawValue, stErrorCondition, stStop ) = [x for x in range(10)]
-
-class Decoder:
-    defaultErrorState = stErrorCondition
-#    defaultErrorState = stDumpRawValue
-    defaultRawDecoder = AnyDecoder()
-    supportIndefLength = True
-    def __init__(self, tagMap, typeMap={}):
-        self.__tagMap = tagMap
-        self.__typeMap = typeMap
-        # Tag & TagSet objects caches
-        self.__tagCache = {}
-        self.__tagSetCache = {}
-        
-    def __call__(self, substrate, asn1Spec=None, tagSet=None,
-                 length=None, state=stDecodeTag, recursiveFlag=1,
-                 substrateFun=None, allowEoo=False):
-        if debug.logger & debug.flagDecoder:
-            debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
-        fullSubstrate = substrate
-        while state != stStop:
-            if state == stDecodeTag:
-                if not substrate:
-                    raise error.SubstrateUnderrunError(
-                        'Short octet stream on tag decoding'
-                        )
-                if not isOctetsType(substrate) and \
-                   not isinstance(substrate, univ.OctetString):
-                    raise error.PyAsn1Error('Bad octet stream type')
-                # Decode tag
-                firstOctet = substrate[0]
-                substrate = substrate[1:]
-                if firstOctet in self.__tagCache:
-                    lastTag = self.__tagCache[firstOctet]
-                else:
-                    t = oct2int(firstOctet)
-                    # Look for end-of-octets sentinel
-                    if t == 0:
-                        if substrate and oct2int(substrate[0]) == 0:
-                            if allowEoo and self.supportIndefLength:
-                                debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found')
-                                value, substrate = eoo.endOfOctets, substrate[1:]
-                                state = stStop
-                                continue
-                            else:
-                                raise error.PyAsn1Error('Unexpected end-of-contents sentinel')
-                        else:
-                            raise error.PyAsn1Error('Zero tag encountered')
-                    tagClass = t&0xC0
-                    tagFormat = t&0x20
-                    tagId = t&0x1F
-                    if tagId == 0x1F:
-                        tagId = 0
-                        while 1:
-                            if not substrate:
-                                raise error.SubstrateUnderrunError(
-                                    'Short octet stream on long tag decoding'
-                                    )
-                            t = oct2int(substrate[0])
-                            tagId = tagId << 7 | (t&0x7F)
-                            substrate = substrate[1:]
-                            if not t&0x80:
-                                break
-                    lastTag = tag.Tag(
-                        tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
-                    )
-                    if tagId < 31:
-                        # cache short tags
-                        self.__tagCache[firstOctet] = lastTag
-                if tagSet is None:
-                    if firstOctet in self.__tagSetCache:
-                        tagSet = self.__tagSetCache[firstOctet]
-                    else:
-                        # base tag not recovered
-                        tagSet = tag.TagSet((), lastTag)
-                        if firstOctet in self.__tagCache:
-                            self.__tagSetCache[firstOctet] = tagSet
-                else:
-                    tagSet = lastTag + tagSet
-                state = stDecodeLength
-                debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %s, decoding length' % tagSet)
-            if state == stDecodeLength:
-                # Decode length
-                if not substrate:
-                    raise error.SubstrateUnderrunError(
-                        'Short octet stream on length decoding'
-                    )
-                firstOctet  = oct2int(substrate[0])
-                if firstOctet == 128:
-                    size = 1
-                    length = -1
-                elif firstOctet < 128:
-                    length, size = firstOctet, 1
-                else:
-                    size = firstOctet & 0x7F
-                    # encoded in size bytes
-                    length = 0
-                    lengthString = substrate[1:size+1]
-                    # missing check on maximum size, which shouldn't be a
-                    # problem, we can handle more than is possible
-                    if len(lengthString) != size:
-                        raise error.SubstrateUnderrunError(
-                            '%s<%s at %s' %
-                            (size, len(lengthString), tagSet)
-                            )
-                    for char in lengthString:
-                        length = (length << 8) | oct2int(char)
-                    size = size + 1
-                substrate = substrate[size:]
-                if length != -1 and len(substrate) < length:
-                    raise error.SubstrateUnderrunError(
-                        '%d-octet short' % (length - len(substrate))
-                        )
-                if length == -1 and not self.supportIndefLength:
-                    error.PyAsn1Error('Indefinite length encoding not supported by this codec')
-                state = stGetValueDecoder
-                debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
-            if state == stGetValueDecoder:
-                if asn1Spec is None:
-                    state = stGetValueDecoderByTag
-                else:
-                    state = stGetValueDecoderByAsn1Spec
-            #
-            # There're two ways of creating subtypes in ASN.1 what influences
-            # decoder operation. These methods are:
-            # 1) Either base types used in or no IMPLICIT tagging has been
-            #    applied on subtyping.
-            # 2) Subtype syntax drops base type information (by means of
-            #    IMPLICIT tagging.
-            # The first case allows for complete tag recovery from substrate
-            # while the second one requires original ASN.1 type spec for
-            # decoding.
-            #
-            # In either case a set of tags (tagSet) is coming from substrate
-            # in an incremental, tag-by-tag fashion (this is the case of
-            # EXPLICIT tag which is most basic). Outermost tag comes first
-            # from the wire.
-            #            
-            if state == stGetValueDecoderByTag:
-                if tagSet in self.__tagMap:
-                    concreteDecoder = self.__tagMap[tagSet]
-                else:
-                    concreteDecoder = None
-                if concreteDecoder:
-                    state = stDecodeValue
-                else:
-                    _k = tagSet[:1]
-                    if _k in self.__tagMap:
-                        concreteDecoder = self.__tagMap[_k]
-                    else:
-                        concreteDecoder = None
-                    if concreteDecoder:
-                        state = stDecodeValue
-                    else:
-                        state = stTryAsExplicitTag
-                if debug.logger and debug.logger & debug.flagDecoder:
-                    debug.logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag'))
-                    debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__)
-            if state == stGetValueDecoderByAsn1Spec:
-                if isinstance(asn1Spec, (dict, tagmap.TagMap)):
-                    if tagSet in asn1Spec:
-                        __chosenSpec = asn1Spec[tagSet]
-                    else:
-                        __chosenSpec = None
-                    if debug.logger and debug.logger & debug.flagDecoder:
-                        debug.logger('candidate ASN.1 spec is a map of:')
-                        for t, v in asn1Spec.getPosMap().items():
-                            debug.logger('  %s -> %s' % (t, v.__class__.__name__))
-                        if asn1Spec.getNegMap():
-                            debug.logger('but neither of: ')
-                            for t, v in asn1Spec.getNegMap().items():
-                                debug.logger('  %s -> %s' % (t, v.__class__.__name__))
-                        debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (__chosenSpec is None and '<none>' or __chosenSpec.prettyPrintType(), tagSet))
-                else:
-                    __chosenSpec = asn1Spec
-                    debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
-                if __chosenSpec is not None and (
-                       tagSet == __chosenSpec.getTagSet() or \
-                       tagSet in __chosenSpec.getTagMap()
-                       ):
-                    # use base type for codec lookup to recover untagged types
-                    baseTagSet = __chosenSpec.baseTagSet
-                    if __chosenSpec.typeId is not None and \
-                           __chosenSpec.typeId in self.__typeMap:
-                        # ambiguous type
-                        concreteDecoder = self.__typeMap[__chosenSpec.typeId]
-                        debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen for an ambiguous type by type ID %s' % (__chosenSpec.typeId,))
-                    elif baseTagSet in self.__tagMap:
-                        # base type or tagged subtype
-                        concreteDecoder = self.__tagMap[baseTagSet]
-                        debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %s' % (baseTagSet,))
-                    else:
-                        concreteDecoder = None
-                    if concreteDecoder:
-                        asn1Spec = __chosenSpec
-                        state = stDecodeValue
-                    else:
-                        state = stTryAsExplicitTag
-                else:
-                    concreteDecoder = None
-                    state = stTryAsExplicitTag
-                if debug.logger and debug.logger & debug.flagDecoder:
-                    debug.logger('codec %s chosen by ASN.1 spec, decoding %s' % (state == stDecodeValue and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as explicit tag'))
-                    debug.scope.push(__chosenSpec is None and '?' or __chosenSpec.__class__.__name__)
-            if state == stTryAsExplicitTag:
-                if tagSet and \
-                       tagSet[0][1] == tag.tagFormatConstructed and \
-                       tagSet[0][0] != tag.tagClassUniversal:
-                    # Assume explicit tagging
-                    concreteDecoder = explicitTagDecoder
-                    state = stDecodeValue
-                else:                    
-                    concreteDecoder = None
-                    state = self.defaultErrorState
-                debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "<none>", state == stDecodeValue and 'value' or 'as failure'))
-            if state == stDumpRawValue:
-                concreteDecoder = self.defaultRawDecoder
-                debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__)
-                state = stDecodeValue
-            if state == stDecodeValue:
-                if recursiveFlag == 0 and not substrateFun: # legacy
-                    substrateFun = lambda a,b,c: (a,b[:c])
-                if length == -1:  # indef length
-                    value, substrate = concreteDecoder.indefLenValueDecoder(
-                        fullSubstrate, substrate, asn1Spec, tagSet, length,
-                        stGetValueDecoder, self, substrateFun
-                        )
-                else:
-                    value, substrate = concreteDecoder.valueDecoder(
-                        fullSubstrate, substrate, asn1Spec, tagSet, length,
-                        stGetValueDecoder, self, substrateFun
-                        )
-                state = stStop
-                debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '<none>'))
-            if state == stErrorCondition:
-                raise error.PyAsn1Error(
-                    '%s not in asn1Spec: %s' % (tagSet, asn1Spec)
-                )
-        if debug.logger and debug.logger & debug.flagDecoder:
-            debug.scope.pop()
-            debug.logger('decoder left scope %s, call completed' % debug.scope)
-        return value, substrate
-            
-decode = Decoder(tagMap, typeMap)
-
-# XXX
-# non-recursive decoding; return position rather than substrate
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/encoder.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/encoder.py
deleted file mode 100644
index 0fb4ae7..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/encoder.py
+++ /dev/null
@@ -1,433 +0,0 @@
-# BER encoder
-from pyasn1.type import base, tag, univ, char, useful
-from pyasn1.codec.ber import eoo
-from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs
-from pyasn1 import debug, error
-
-class Error(Exception): pass
-
-class AbstractItemEncoder:
-    supportIndefLenMode = 1
-    def encodeTag(self, t, isConstructed):
-        tagClass, tagFormat, tagId = t.asTuple()  # this is a hotspot
-        v = tagClass | tagFormat
-        if isConstructed:
-            v = v|tag.tagFormatConstructed
-        if tagId < 31:
-            return int2oct(v|tagId)
-        else:
-            s = int2oct(tagId&0x7f)
-            tagId = tagId >> 7
-            while tagId:
-                s = int2oct(0x80|(tagId&0x7f)) + s
-                tagId = tagId >> 7
-            return int2oct(v|0x1F) + s
-
-    def encodeLength(self, length, defMode):
-        if not defMode and self.supportIndefLenMode:
-            return int2oct(0x80)
-        if length < 0x80:
-            return int2oct(length)
-        else:
-            substrate = null
-            while length:
-                substrate = int2oct(length&0xff) + substrate
-                length = length >> 8
-            substrateLen = len(substrate)
-            if substrateLen > 126:
-                raise Error('Length octets overflow (%d)' % substrateLen)
-            return int2oct(0x80 | substrateLen) + substrate
-
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        raise Error('Not implemented')
-
-    def _encodeEndOfOctets(self, encodeFun, defMode):
-        if defMode or not self.supportIndefLenMode:
-            return null
-        else:
-            return encodeFun(eoo.endOfOctets, defMode)
-        
-    def encode(self, encodeFun, value, defMode, maxChunkSize):
-        substrate, isConstructed = self.encodeValue(
-            encodeFun, value, defMode, maxChunkSize
-            )
-        tagSet = value.getTagSet()
-        if tagSet:
-            if not isConstructed:  # primitive form implies definite mode
-                defMode = 1
-            return self.encodeTag(
-                tagSet[-1], isConstructed
-                ) + self.encodeLength(
-                len(substrate), defMode
-                ) + substrate + self._encodeEndOfOctets(encodeFun, defMode)
-        else:
-            return substrate  # untagged value
-
-class EndOfOctetsEncoder(AbstractItemEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        return null, 0
-
-class ExplicitlyTaggedItemEncoder(AbstractItemEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        if isinstance(value, base.AbstractConstructedAsn1Item):
-            value = value.clone(tagSet=value.getTagSet()[:-1],
-                                cloneValueFlag=1)
-        else:
-            value = value.clone(tagSet=value.getTagSet()[:-1])
-        return encodeFun(value, defMode, maxChunkSize), 1
-
-explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder()
-
-class BooleanEncoder(AbstractItemEncoder):
-    supportIndefLenMode = 0
-    _true = ints2octs((1,))
-    _false = ints2octs((0,))
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        return value and self._true or self._false, 0
-
-class IntegerEncoder(AbstractItemEncoder):
-    supportIndefLenMode = 0
-    supportCompactZero = False
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        if value == 0:  # shortcut for zero value
-            if self.supportCompactZero:
-                # this seems to be a correct way for encoding zeros
-                return null, 0
-            else:
-                # this seems to be a widespread way for encoding zeros
-                return ints2octs((0,)), 0
-        octets = []
-        value = int(value) # to save on ops on asn1 type
-        while 1:
-            octets.insert(0, value & 0xff)
-            if value == 0 or value == -1:
-                break
-            value = value >> 8
-        if value == 0 and octets[0] & 0x80:
-            octets.insert(0, 0)
-        while len(octets) > 1 and \
-                  (octets[0] == 0 and octets[1] & 0x80 == 0 or \
-                   octets[0] == 0xff and octets[1] & 0x80 != 0):
-            del octets[0]
-        return ints2octs(octets), 0
-
-class BitStringEncoder(AbstractItemEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        if not maxChunkSize or len(value) <= maxChunkSize*8:
-            out_len = (len(value) + 7) // 8
-            out_list = out_len * [0]
-            j = 7
-            i = -1
-            for val in value:
-                j += 1
-                if j == 8:
-                    i += 1
-                    j = 0
-                out_list[i] = out_list[i] | val << (7-j)
-            return int2oct(7-j) + ints2octs(out_list), 0
-        else:
-            pos = 0; substrate = null
-            while 1:
-                # count in octets
-                v = value.clone(value[pos*8:pos*8+maxChunkSize*8])
-                if not v:
-                    break
-                substrate = substrate + encodeFun(v, defMode, maxChunkSize)
-                pos = pos + maxChunkSize
-            return substrate, 1
-
-class OctetStringEncoder(AbstractItemEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        if not maxChunkSize or len(value) <= maxChunkSize:
-            return value.asOctets(), 0
-        else:
-            pos = 0; substrate = null
-            while 1:
-                v = value.clone(value[pos:pos+maxChunkSize])
-                if not v:
-                    break
-                substrate = substrate + encodeFun(v, defMode, maxChunkSize)
-                pos = pos + maxChunkSize
-            return substrate, 1
-
-class NullEncoder(AbstractItemEncoder):
-    supportIndefLenMode = 0
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        return null, 0
-
-class ObjectIdentifierEncoder(AbstractItemEncoder):
-    supportIndefLenMode = 0
-    precomputedValues = {
-        (1, 3, 6, 1, 2): (43, 6, 1, 2),        
-        (1, 3, 6, 1, 4): (43, 6, 1, 4)
-    }
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):    
-        oid = value.asTuple()
-        if oid[:5] in self.precomputedValues:
-            octets = self.precomputedValues[oid[:5]]
-            oid = oid[5:]
-        else:
-            if len(oid) < 2:
-                raise error.PyAsn1Error('Short OID %s' % (value,))
-
-            octets = ()
-
-            # Build the first twos
-            if oid[0] == 0 and 0 <= oid[1] <= 39:
-                oid = (oid[1],) + oid[2:]
-            elif oid[0] == 1 and 0 <= oid[1] <= 39:
-                oid = (oid[1] + 40,) + oid[2:]
-            elif oid[0] == 2:
-                oid = (oid[1] + 80,) + oid[2:]
-            else:
-                raise error.PyAsn1Error(
-                    'Impossible initial arcs %s at %s' % (oid[:2], value)
-                    )
-
-        # Cycle through subIds
-        for subId in oid:
-            if subId > -1 and subId < 128:
-                # Optimize for the common case
-                octets = octets + (subId & 0x7f,)
-            elif subId < 0:
-                raise error.PyAsn1Error(
-                    'Negative OID arc %s at %s' % (subId, value)
-                )
-            else:
-                # Pack large Sub-Object IDs
-                res = (subId & 0x7f,)
-                subId = subId >> 7
-                while subId > 0:
-                    res = (0x80 | (subId & 0x7f),) + res
-                    subId = subId >> 7 
-                # Add packed Sub-Object ID to resulted Object ID
-                octets += res
-
-        return ints2octs(octets), 0
-
-class RealEncoder(AbstractItemEncoder):
-    supportIndefLenMode = 0
-    binEncBase = 2 # set to None to choose encoding base automatically 
-    def _dropFloatingPoint(self, m, encbase, e):
-        ms, es = 1, 1
-        if m < 0:
-            ms = -1  # mantissa sign
-        if e < 0:
-            es = -1  # exponenta sign 
-        m *= ms 
-        if encbase == 8:
-            m = m*2**(abs(e) % 3 * es)
-            e = abs(e) // 3 * es
-        elif encbase == 16:
-            m = m*2**(abs(e) % 4 * es)
-            e = abs(e) // 4 * es
-
-        while 1:
-            if int(m) != m:
-                m *= encbase
-                e -= 1
-                continue
-            break
-        return ms, int(m), encbase, e
-
-    def _chooseEncBase(self, value):
-        m, b, e = value
-        base = [2, 8, 16]
-        if value.binEncBase in base:
-            return self._dropFloatingPoint(m, value.binEncBase, e)
-        elif self.binEncBase in base:
-            return self._dropFloatingPoint(m, self.binEncBase, e)
-        # auto choosing base 2/8/16 
-        mantissa = [m, m, m]
-        exponenta = [e, e, e]
-        encbase = 2 
-        e = float('inf')
-        for i in range(3):
-            sign, mantissa[i], base[i], exponenta[i] = \
-                self._dropFloatingPoint(mantissa[i], base[i], exponenta[i])
-            if abs(exponenta[i]) < abs(e) or \
-               (abs(exponenta[i]) == abs(e) and mantissa[i] < m):
-                e = exponenta[i]
-                m = int(mantissa[i])
-                encbase = base[i]
-        return sign, m, encbase, e
-
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        if value.isPlusInfinity():
-            return int2oct(0x40), 0
-        if value.isMinusInfinity():
-            return int2oct(0x41), 0
-        m, b, e = value
-        if not m:
-            return null, 0
-        if b == 10:
-            return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0
-        elif b == 2:
-            fo = 0x80 # binary encoding
-            ms, m, encbase, e = self._chooseEncBase(value)
-            if ms < 0: # mantissa sign
-                fo = fo | 0x40 # sign bit
-            # exponenta & mantissa normalization
-            if encbase == 2:
-                while m & 0x1 == 0:
-                    m >>= 1
-                    e += 1
-            elif encbase == 8:
-                while m & 0x7 == 0:
-                    m >>= 3
-                    e += 1
-                fo |= 0x10
-            else: # encbase = 16
-                while m & 0xf == 0:
-                    m >>= 4
-                    e += 1
-                fo |= 0x20
-            sf = 0 # scale factor
-            while m & 0x1 == 0:
-                m >>= 1
-                sf += 1
-            if sf > 3:
-                raise error.PyAsn1Error('Scale factor overflow') # bug if raised
-            fo |= sf << 2
-            eo = null
-            if e == 0 or e == -1:
-                eo = int2oct(e&0xff)
-            else: 
-                while e not in (0, -1):
-                    eo = int2oct(e&0xff) + eo
-                    e >>= 8
-                if e == 0 and eo and oct2int(eo[0]) & 0x80:
-                    eo = int2oct(0) + eo
-                if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
-                    eo = int2oct(0xff) + eo
-            n = len(eo)
-            if n > 0xff:
-                raise error.PyAsn1Error('Real exponent overflow')
-            if n == 1:
-                pass
-            elif n == 2:
-                fo |= 1
-            elif n == 3:
-                fo |= 2
-            else:
-                fo |= 3
-                eo = int2oct(n&0xff) + eo
-            po = null
-            while m:
-                po = int2oct(m&0xff) + po
-                m >>= 8
-            substrate = int2oct(fo) + eo + po
-            return substrate, 0
-        else:
-            raise error.PyAsn1Error('Prohibited Real base %s' % b)
-
-class SequenceEncoder(AbstractItemEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        value.setDefaultComponents()
-        value.verifySizeSpec()
-        substrate = null; idx = len(value)
-        while idx > 0:
-            idx = idx - 1
-            if value[idx] is None:  # Optional component
-                continue
-            component = value.getDefaultComponentByPosition(idx)
-            if component is not None and component == value[idx]:
-                continue
-            substrate = encodeFun(
-                value[idx], defMode, maxChunkSize
-                ) + substrate
-        return substrate, 1
-
-class SequenceOfEncoder(AbstractItemEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        value.verifySizeSpec()
-        substrate = null; idx = len(value)
-        while idx > 0:
-            idx = idx - 1
-            substrate = encodeFun(
-                value[idx], defMode, maxChunkSize
-                ) + substrate
-        return substrate, 1
-
-class ChoiceEncoder(AbstractItemEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        return encodeFun(value.getComponent(), defMode, maxChunkSize), 1
-
-class AnyEncoder(OctetStringEncoder):
-    def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
-        return value.asOctets(), defMode == 0
-
-tagMap = {
-    eoo.endOfOctets.tagSet: EndOfOctetsEncoder(),
-    univ.Boolean.tagSet: BooleanEncoder(),
-    univ.Integer.tagSet: IntegerEncoder(),
-    univ.BitString.tagSet: BitStringEncoder(),
-    univ.OctetString.tagSet: OctetStringEncoder(),
-    univ.Null.tagSet: NullEncoder(),
-    univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(),
-    univ.Enumerated.tagSet: IntegerEncoder(),
-    univ.Real.tagSet: RealEncoder(),
-    # Sequence & Set have same tags as SequenceOf & SetOf
-    univ.SequenceOf.tagSet: SequenceOfEncoder(),
-    univ.SetOf.tagSet: SequenceOfEncoder(),
-    univ.Choice.tagSet: ChoiceEncoder(),
-    # character string types
-    char.UTF8String.tagSet: OctetStringEncoder(),
-    char.NumericString.tagSet: OctetStringEncoder(),
-    char.PrintableString.tagSet: OctetStringEncoder(),
-    char.TeletexString.tagSet: OctetStringEncoder(),
-    char.VideotexString.tagSet: OctetStringEncoder(),
-    char.IA5String.tagSet: OctetStringEncoder(),
-    char.GraphicString.tagSet: OctetStringEncoder(),
-    char.VisibleString.tagSet: OctetStringEncoder(),
-    char.GeneralString.tagSet: OctetStringEncoder(),
-    char.UniversalString.tagSet: OctetStringEncoder(),
-    char.BMPString.tagSet: OctetStringEncoder(),
-    # useful types
-    useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
-    useful.GeneralizedTime.tagSet: OctetStringEncoder(),
-    useful.UTCTime.tagSet: OctetStringEncoder()        
-    }
-
-# Type-to-codec map for ambiguous ASN.1 types
-typeMap = {
-    univ.Set.typeId: SequenceEncoder(),
-    univ.SetOf.typeId: SequenceOfEncoder(),
-    univ.Sequence.typeId: SequenceEncoder(),
-    univ.SequenceOf.typeId: SequenceOfEncoder(),
-    univ.Choice.typeId: ChoiceEncoder(),
-    univ.Any.typeId: AnyEncoder()
-    }
-
-class Encoder:
-    supportIndefLength = True
-    def __init__(self, tagMap, typeMap={}):
-        self.__tagMap = tagMap
-        self.__typeMap = typeMap
-
-    def __call__(self, value, defMode=True, maxChunkSize=0):
-        if not defMode and not self.supportIndefLength:
-            raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
-        debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint()))
-        tagSet = value.getTagSet()
-        if len(tagSet) > 1:
-            concreteEncoder = explicitlyTaggedItemEncoder
-        else:
-            if value.typeId is not None and value.typeId in self.__typeMap:
-                concreteEncoder = self.__typeMap[value.typeId]
-            elif tagSet in self.__tagMap:
-                concreteEncoder = self.__tagMap[tagSet]
-            else:
-                tagSet = value.baseTagSet
-                if tagSet in self.__tagMap:
-                    concreteEncoder = self.__tagMap[tagSet]
-                else:
-                    raise Error('No encoder for %s' % (value,))
-        debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
-        substrate = concreteEncoder.encode(
-            self, value, defMode, maxChunkSize
-            )
-        debug.logger & debug.flagEncoder and debug.logger('built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate)))
-        return substrate
-
-encode = Encoder(tagMap, typeMap)
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/eoo.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/eoo.py
deleted file mode 100644
index 379be19..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/ber/eoo.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from pyasn1.type import base, tag
-
-class EndOfOctets(base.AbstractSimpleAsn1Item):
-    defaultValue = 0
-    tagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00)
-        )
-endOfOctets = EndOfOctets()
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/__init__.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/__init__.py
deleted file mode 100644
index 8c3066b..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# This file is necessary to make this directory a package.
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/decoder.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/decoder.py
deleted file mode 100644
index 1770cd8..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/decoder.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# CER decoder
-from pyasn1.type import univ
-from pyasn1.codec.ber import decoder
-from pyasn1.compat.octets import oct2int
-from pyasn1 import error
-
-class BooleanDecoder(decoder.AbstractSimpleDecoder):
-    protoComponent = univ.Boolean(0)
-    def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
-                     state, decodeFun, substrateFun):
-        head, tail = substrate[:length], substrate[length:]
-        if not head or length != 1:
-            raise error.PyAsn1Error('Not single-octet Boolean payload')
-        byte = oct2int(head[0])
-        # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
-        # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1 
-        # in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
-        if byte == 0xff:
-            value = 1
-        elif byte == 0x00:
-            value = 0
-        else:
-            raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte)
-        return self._createComponent(asn1Spec, tagSet, value), tail
-
-tagMap = decoder.tagMap.copy()
-tagMap.update({
-    univ.Boolean.tagSet: BooleanDecoder()
-    })
-
-typeMap = decoder.typeMap
-
-class Decoder(decoder.Decoder): pass
-
-decode = Decoder(tagMap, decoder.typeMap)
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/encoder.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/encoder.py
deleted file mode 100644
index 61ce8a1..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/cer/encoder.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# CER encoder
-from pyasn1.type import univ
-from pyasn1.type import useful
-from pyasn1.codec.ber import encoder
-from pyasn1.compat.octets import int2oct, str2octs, null
-from pyasn1 import error
-
-class BooleanEncoder(encoder.IntegerEncoder):
-    def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
-        if client == 0:
-            substrate = int2oct(0)
-        else:
-            substrate = int2oct(255)
-        return substrate, 0
-
-class BitStringEncoder(encoder.BitStringEncoder):
-    def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
-        return encoder.BitStringEncoder.encodeValue(
-            self, encodeFun, client, defMode, 1000
-        )
-
-class OctetStringEncoder(encoder.OctetStringEncoder):
-    def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
-        return encoder.OctetStringEncoder.encodeValue(
-            self, encodeFun, client, defMode, 1000
-        )
-
-class RealEncoder(encoder.RealEncoder):
-    def _chooseEncBase(self, value):
-        m, b, e = value
-        return self._dropFloatingPoint(m, b, e)
-
-# specialized GeneralStringEncoder here
-
-class GeneralizedTimeEncoder(OctetStringEncoder):
-    zchar = str2octs('Z')
-    pluschar = str2octs('+')
-    minuschar = str2octs('-')
-    zero = str2octs('0')
-    def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
-        octets = client.asOctets()
-# This breaks too many existing data items
-#        if '.' not in octets:
-#            raise error.PyAsn1Error('Format must include fraction of second: %r' % octets)
-        if len(octets) < 15:
-            raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
-        if self.pluschar in octets or self.minuschar in octets:
-            raise error.PyAsn1Error('Must be UTC time: %r' % octets)
-        if octets[-1] != self.zchar[0]:
-            raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
-        return encoder.OctetStringEncoder.encodeValue(
-            self, encodeFun, client, defMode, 1000
-        )
-
-class UTCTimeEncoder(encoder.OctetStringEncoder):
-    zchar = str2octs('Z')
-    pluschar = str2octs('+')
-    minuschar = str2octs('-')
-    def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
-        octets = client.asOctets()
-        if self.pluschar in octets or self.minuschar in octets:
-            raise error.PyAsn1Error('Must be UTC time: %r' % octets)
-        if octets and octets[-1] != self.zchar[0]:
-            client = client.clone(octets + self.zchar)
-        if len(client) != 13:
-            raise error.PyAsn1Error('Bad UTC time length: %r' % client)
-        return encoder.OctetStringEncoder.encodeValue(
-            self, encodeFun, client, defMode, 1000
-        )
-
-class SetOfEncoder(encoder.SequenceOfEncoder):
-    def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
-        if isinstance(client, univ.SequenceAndSetBase):
-            client.setDefaultComponents()
-        client.verifySizeSpec()
-        substrate = null; idx = len(client)
-        # This is certainly a hack but how else do I distinguish SetOf
-        # from Set if they have the same tags&constraints?
-        if isinstance(client, univ.SequenceAndSetBase):
-            # Set
-            comps = []
-            while idx > 0:
-                idx = idx - 1
-                if client[idx] is None:  # Optional component
-                    continue
-                if client.getDefaultComponentByPosition(idx) == client[idx]:
-                    continue
-                comps.append(client[idx])
-            comps.sort(key=lambda x: isinstance(x, univ.Choice) and \
-                                     x.getMinTagSet() or x.getTagSet())
-            for c in comps:
-                substrate += encodeFun(c, defMode, maxChunkSize)
-        else:
-            # SetOf
-            compSubs = []
-            while idx > 0:
-                idx = idx - 1
-                compSubs.append(
-                    encodeFun(client[idx], defMode, maxChunkSize)
-                    )
-            compSubs.sort()  # perhaps padding's not needed
-            substrate = null
-            for compSub in compSubs:
-                substrate += compSub
-        return substrate, 1
-
-tagMap = encoder.tagMap.copy()
-tagMap.update({
-    univ.Boolean.tagSet: BooleanEncoder(),
-    univ.BitString.tagSet: BitStringEncoder(),
-    univ.OctetString.tagSet: OctetStringEncoder(),
-    univ.Real.tagSet: RealEncoder(),
-    useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
-    useful.UTCTime.tagSet: UTCTimeEncoder(),
-    univ.SetOf().tagSet: SetOfEncoder()  # conflcts with Set
-})
-
-typeMap = encoder.typeMap.copy()
-typeMap.update({
-    univ.Set.typeId: SetOfEncoder(),
-    univ.SetOf.typeId: SetOfEncoder()
-})
-
-class Encoder(encoder.Encoder):
-    def __call__(self, client, defMode=False, maxChunkSize=0):
-        return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
-
-encode = Encoder(tagMap, typeMap)
-
-# EncoderFactory queries class instance and builds a map of tags -> encoders
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/__init__.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/__init__.py
deleted file mode 100644
index 8c3066b..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# This file is necessary to make this directory a package.
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/decoder.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/decoder.py
deleted file mode 100644
index ea58d6d..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/decoder.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# DER decoder
-from pyasn1.codec.cer import decoder
-
-tagMap = decoder.tagMap
-typeMap = decoder.typeMap
-class Decoder(decoder.Decoder):
-    supportIndefLength = False
-
-decode = Decoder(tagMap, typeMap)
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/encoder.py b/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/encoder.py
deleted file mode 100644
index 7f55eeb..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/codec/der/encoder.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# DER encoder
-from pyasn1.type import univ
-from pyasn1.codec.cer import encoder
-from pyasn1 import error
-
-class SetOfEncoder(encoder.SetOfEncoder):
-    def _cmpSetComponents(self, c1, c2):
-        tagSet1 = isinstance(c1, univ.Choice) and \
-                  c1.getEffectiveTagSet() or c1.getTagSet()
-        tagSet2 = isinstance(c2, univ.Choice) and \
-                  c2.getEffectiveTagSet() or c2.getTagSet()        
-        return cmp(tagSet1, tagSet2)
-
-tagMap = encoder.tagMap.copy()
-tagMap.update({
-    # Overload CER encoders with BER ones (a bit hackerish XXX)
-    univ.BitString.tagSet: encoder.encoder.BitStringEncoder(),
-    univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(),
-    # Set & SetOf have same tags
-    univ.SetOf().tagSet: SetOfEncoder()
-})
-
-typeMap = encoder.typeMap
-
-class Encoder(encoder.Encoder):
-    supportIndefLength = False
-    def __call__(self, client, defMode=True, maxChunkSize=0):
-        if not defMode:
-            raise error.PyAsn1Error('DER forbids indefinite length mode')
-        return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
-
-encode = Encoder(tagMap, typeMap)
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/compat/__init__.py b/tools/swarming_client/third_party/pyasn1/pyasn1/compat/__init__.py
deleted file mode 100644
index 8c3066b..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/compat/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# This file is necessary to make this directory a package.
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/compat/binary.py b/tools/swarming_client/third_party/pyasn1/pyasn1/compat/binary.py
deleted file mode 100644
index b38932a..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/compat/binary.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from sys import version_info
-
-if version_info[0:2] < (2, 6):
-    def bin(x):
-        if x <= 1:
-            return '0b'+str(x)
-        else:
-            return bin(x>>1) + str(x&1)
-else:
-    bin = bin
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/compat/octets.py b/tools/swarming_client/third_party/pyasn1/pyasn1/compat/octets.py
deleted file mode 100644
index e812737..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/compat/octets.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from sys import version_info
-
-if version_info[0] <= 2:
-    int2oct = chr
-    ints2octs = lambda s: ''.join([ int2oct(x) for x in s ])
-    null = ''
-    oct2int = ord
-    octs2ints = lambda s: [ oct2int(x) for x in s ]
-    str2octs = lambda x: x
-    octs2str = lambda x: x
-    isOctetsType = lambda s: isinstance(s, str)
-    isStringType = lambda s: isinstance(s, (str, unicode))
-else:
-    ints2octs = bytes
-    int2oct = lambda x: ints2octs((x,))
-    null = ints2octs()
-    oct2int = lambda x: x
-    octs2ints = lambda s: [ x for x in s ]
-    str2octs = lambda x: x.encode()
-    octs2str = lambda x: x.decode()
-    isOctetsType = lambda s: isinstance(s, bytes)
-    isStringType = lambda s: isinstance(s, str)
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/debug.py b/tools/swarming_client/third_party/pyasn1/pyasn1/debug.py
deleted file mode 100644
index 9b69886..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/debug.py
+++ /dev/null
@@ -1,110 +0,0 @@
-import time
-import logging
-from pyasn1.compat.octets import octs2ints
-from pyasn1 import error
-from pyasn1 import __version__
-
-flagNone     = 0x0000
-flagEncoder  = 0x0001
-flagDecoder  = 0x0002
-flagAll      = 0xffff
-
-flagMap = {
-    'encoder': flagEncoder,
-    'decoder': flagDecoder,
-    'all': flagAll
-    }
-
-class Printer:
-    def __init__(self, logger=None, handler=None, formatter=None):
-        if logger is None:
-            logger = logging.getLogger('pyasn1')
-        logger.setLevel(logging.DEBUG)
-        if handler is None:
-            handler = logging.StreamHandler()
-        if formatter is None:
-            formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
-        handler.setFormatter(formatter)
-        handler.setLevel(logging.DEBUG)
-        logger.addHandler(handler)
-        self.__logger = logger
-
-    def __call__(self, msg): self.__logger.debug(msg)
-    def __str__(self): return '<python built-in logging>'
-
-if hasattr(logging, 'NullHandler'):
-    NullHandler = logging.NullHandler
-else:
-    # Python 2.6 and older
-    class NullHandler(logging.Handler):
-        def emit(self, record):
-            pass
-
-class Debug:
-    defaultPrinter = None
-    def __init__(self, *flags, **options):
-        self._flags = flagNone
-        if options.get('printer') is not None:
-            self._printer = options.get('printer')
-        elif self.defaultPrinter is not None:
-            self._printer = self.defaultPrinter
-        if 'loggerName' in options: 
-            # route our logs to parent logger
-            self._printer = Printer(
-                logger=logging.getLogger(options['loggerName']),
-                handler=NullHandler()
-            )
-        else:
-            self._printer = Printer()
-        self('running pyasn1 version %s' % __version__)
-        for f in flags:
-            inverse = f and f[0] in ('!', '~')
-            if inverse:
-                f = f[1:]
-            try:
-                if inverse:
-                    self._flags &= ~flagMap[f]
-                else:
-                    self._flags |= flagMap[f]
-            except KeyError:
-                raise error.PyAsn1Error('bad debug flag %s' % f)
-  
-            self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled'))
-
-    def __str__(self):
-        return 'logger %s, flags %x' % (self._printer, self._flags)
-    
-    def __call__(self, msg):
-        self._printer(msg)
-
-    def __and__(self, flag):
-        return self._flags & flag
-
-    def __rand__(self, flag):
-        return flag & self._flags
-
-logger = 0
-
-def setLogger(l):
-    global logger
-    logger = l
-
-def hexdump(octets):
-    return ' '.join(
-            [ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x) 
-              for n,x in zip(range(len(octets)), octs2ints(octets)) ]
-        )
-
-class Scope:
-    def __init__(self):
-        self._list = []
-
-    def __str__(self): return '.'.join(self._list)
-
-    def push(self, token):
-        self._list.append(token)
-
-    def pop(self):
-        return self._list.pop()
-
-scope = Scope()
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/error.py b/tools/swarming_client/third_party/pyasn1/pyasn1/error.py
deleted file mode 100644
index 716406f..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/error.py
+++ /dev/null
@@ -1,3 +0,0 @@
-class PyAsn1Error(Exception): pass
-class ValueConstraintError(PyAsn1Error): pass
-class SubstrateUnderrunError(PyAsn1Error): pass
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/__init__.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/__init__.py
deleted file mode 100644
index 8c3066b..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# This file is necessary to make this directory a package.
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/base.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/base.py
deleted file mode 100644
index 72920a9..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/base.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# Base classes for ASN.1 types
-import sys
-from pyasn1.type import constraint, tagmap, tag
-from pyasn1 import error
-
-class Asn1Item: pass
-
-class Asn1ItemBase(Asn1Item):
-    # Set of tags for this ASN.1 type
-    tagSet = tag.TagSet()
-    
-    # A list of constraint.Constraint instances for checking values
-    subtypeSpec = constraint.ConstraintsIntersection()
-
-    # Used for ambiguous ASN.1 types identification
-    typeId = None
-    
-    def __init__(self, tagSet=None, subtypeSpec=None):
-        if tagSet is None:
-            self._tagSet = self.tagSet
-        else:
-            self._tagSet = tagSet
-        if subtypeSpec is None:
-            self._subtypeSpec = self.subtypeSpec
-        else:
-            self._subtypeSpec = subtypeSpec
-
-    def _verifySubtypeSpec(self, value, idx=None):
-        try:
-            self._subtypeSpec(value, idx)
-        except error.PyAsn1Error:
-            c, i, t = sys.exc_info()
-            raise c('%s at %s' % (i, self.__class__.__name__))
-        
-    def getSubtypeSpec(self): return self._subtypeSpec
-    
-    def getTagSet(self): return self._tagSet
-    def getEffectiveTagSet(self): return self._tagSet  # used by untagged types
-    def getTagMap(self): return tagmap.TagMap({self._tagSet: self})
-    
-    def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
-        return self is other or \
-               (not matchTags or \
-                self._tagSet == other.getTagSet()) and \
-               (not matchConstraints or \
-                self._subtypeSpec==other.getSubtypeSpec())
-
-    def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
-        """Returns true if argument is a ASN1 subtype of ourselves"""
-        return (not matchTags or  \
-                self._tagSet.isSuperTagSetOf(other.getTagSet())) and \
-               (not matchConstraints or \
-                (self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec())))
-
-class NoValue:
-    def __getattr__(self, attr):
-        raise error.PyAsn1Error('No value for %s()' % attr)
-    def __getitem__(self, i):
-        raise error.PyAsn1Error('No value')
-    def __repr__(self): return '%s()' % self.__class__.__name__
-    
-noValue = NoValue()
-
-# Base class for "simple" ASN.1 objects. These are immutable.
-class AbstractSimpleAsn1Item(Asn1ItemBase):    
-    defaultValue = noValue
-    def __init__(self, value=None, tagSet=None, subtypeSpec=None):
-        Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
-        if value is None or value is noValue:
-            value = self.defaultValue
-        if value is None or value is noValue:
-            self.__hashedValue = value = noValue
-        else:
-            value = self.prettyIn(value)
-            self._verifySubtypeSpec(value)
-            self.__hashedValue = hash(value)
-        self._value = value
-        self._len = None
-        
-    def __repr__(self):
-        r = []
-        if self._value is not self.defaultValue:
-            r.append(self.prettyOut(self._value))
-        if self._tagSet is not self.tagSet:
-            r.append('tagSet=%r' % (self._tagSet,))
-        if self._subtypeSpec is not self.subtypeSpec:
-            r.append('subtypeSpec=%r' % (self._subtypeSpec,))
-        return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
-
-    def __str__(self): return str(self._value)
-    def __eq__(self, other):
-        return self is other and True or self._value == other
-    def __ne__(self, other): return self._value != other
-    def __lt__(self, other): return self._value < other
-    def __le__(self, other): return self._value <= other
-    def __gt__(self, other): return self._value > other
-    def __ge__(self, other): return self._value >= other
-    if sys.version_info[0] <= 2:
-        def __nonzero__(self): return bool(self._value)
-    else:
-        def __bool__(self): return bool(self._value)
-    def __hash__(self):
-        return self.__hashedValue is noValue and hash(noValue) or self.__hashedValue
-
-    def hasValue(self):
-        return not isinstance(self._value, NoValue)
-
-    def clone(self, value=None, tagSet=None, subtypeSpec=None):
-        if value is None and tagSet is None and subtypeSpec is None:
-            return self
-        if value is None:
-            value = self._value
-        if tagSet is None:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        return self.__class__(value, tagSet, subtypeSpec)
-
-    def subtype(self, value=None, implicitTag=None, explicitTag=None,
-                subtypeSpec=None):
-        if value is None:
-            value = self._value
-        if implicitTag is not None:
-            tagSet = self._tagSet.tagImplicitly(implicitTag)
-        elif explicitTag is not None:
-            tagSet = self._tagSet.tagExplicitly(explicitTag)
-        else:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        else:
-            subtypeSpec = subtypeSpec + self._subtypeSpec
-        return self.__class__(value, tagSet, subtypeSpec)
-
-    def prettyIn(self, value): return value
-    def prettyOut(self, value): return str(value)
-
-    def prettyPrint(self, scope=0):
-        if self.hasValue():
-            return self.prettyOut(self._value)
-        else:
-            return '<no value>'
-
-    # XXX Compatibility stub
-    def prettyPrinter(self, scope=0): return self.prettyPrint(scope)
-    
-    def prettyPrintType(self, scope=0):
-        return '%s -> %s' % (self.getTagSet(), self.__class__.__name__)
-
-#
-# Constructed types:
-# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
-# * ASN1 types and values are represened by Python class instances
-# * Value initialization is made for defaulted components only
-# * Primary method of component addressing is by-position. Data model for base
-#   type is Python sequence. Additional type-specific addressing methods
-#   may be implemented for particular types.
-# * SequenceOf and SetOf types do not implement any additional methods
-# * Sequence, Set and Choice types also implement by-identifier addressing
-# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing
-# * Sequence and Set types may include optional and defaulted
-#   components
-# * Constructed types hold a reference to component types used for value
-#   verification and ordering.
-# * Component type is a scalar type for SequenceOf/SetOf types and a list
-#   of types for Sequence/Set/Choice.
-#
-
-class AbstractConstructedAsn1Item(Asn1ItemBase):
-    componentType = None
-    sizeSpec = constraint.ConstraintsIntersection()
-    def __init__(self, componentType=None, tagSet=None,
-                 subtypeSpec=None, sizeSpec=None):
-        Asn1ItemBase.__init__(self, tagSet, subtypeSpec)
-        if componentType is None:
-            self._componentType = self.componentType
-        else:
-            self._componentType = componentType
-        if sizeSpec is None:
-            self._sizeSpec = self.sizeSpec
-        else:
-            self._sizeSpec = sizeSpec
-        self._componentValues = []
-        self._componentValuesSet = 0
-
-    def __repr__(self):
-        r = []
-        if self._componentType is not self.componentType:
-            r.append('componentType=%r' % (self._componentType,))
-        if self._tagSet is not self.tagSet:
-            r.append('tagSet=%r' % (self._tagSet,))
-        if self._subtypeSpec is not self.subtypeSpec:
-            r.append('subtypeSpec=%r' % (self._subtypeSpec,))
-        r = '%s(%s)' % (self.__class__.__name__, ', '.join(r))
-        if self._componentValues:
-            r += '.setComponents(%s)' % ', '.join([repr(x) for x in self._componentValues])
-        return r
-
-    def __eq__(self, other):
-        return self is other and True or self._componentValues == other
-    def __ne__(self, other): return self._componentValues != other
-    def __lt__(self, other): return self._componentValues < other
-    def __le__(self, other): return self._componentValues <= other
-    def __gt__(self, other): return self._componentValues > other
-    def __ge__(self, other): return self._componentValues >= other
-    if sys.version_info[0] <= 2:
-        def __nonzero__(self): return bool(self._componentValues)
-    else:
-        def __bool__(self): return bool(self._componentValues)
-
-    def getComponentTagMap(self):
-        raise error.PyAsn1Error('Method not implemented')
-
-    def _cloneComponentValues(self, myClone, cloneValueFlag): pass
-
-    def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None, 
-              cloneValueFlag=None):
-        if tagSet is None:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        if sizeSpec is None:
-            sizeSpec = self._sizeSpec
-        r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
-        if cloneValueFlag:
-            self._cloneComponentValues(r, cloneValueFlag)
-        return r
-
-    def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None,
-                sizeSpec=None, cloneValueFlag=None):
-        if implicitTag is not None:
-            tagSet = self._tagSet.tagImplicitly(implicitTag)
-        elif explicitTag is not None:
-            tagSet = self._tagSet.tagExplicitly(explicitTag)
-        else:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        else:
-            subtypeSpec = subtypeSpec + self._subtypeSpec
-        if sizeSpec is None:
-            sizeSpec = self._sizeSpec
-        else:
-            sizeSpec = sizeSpec + self._sizeSpec
-        r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec)
-        if cloneValueFlag:
-            self._cloneComponentValues(r, cloneValueFlag)
-        return r
-
-    def _verifyComponent(self, idx, value): pass
-
-    def verifySizeSpec(self): self._sizeSpec(self)
-
-    def getComponentByPosition(self, idx):
-        raise error.PyAsn1Error('Method not implemented')
-    def setComponentByPosition(self, idx, value, verifyConstraints=True):
-        raise error.PyAsn1Error('Method not implemented')
-
-    def setComponents(self, *args, **kwargs):
-        for idx in range(len(args)):
-            self[idx] = args[idx]
-        for k in kwargs:
-            self[k] = kwargs[k]
-        return self
-
-    def getComponentType(self): return self._componentType
-
-    def setDefaultComponents(self): pass
-
-    def __getitem__(self, idx): return self.getComponentByPosition(idx)
-    def __setitem__(self, idx, value): self.setComponentByPosition(idx, value)
-
-    def __len__(self): return len(self._componentValues)
-    
-    def clear(self):
-        self._componentValues = []
-        self._componentValuesSet = 0
-
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/char.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/char.py
deleted file mode 100644
index af49ab3..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/char.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# ASN.1 "character string" types
-from pyasn1.type import univ, tag
-
-class NumericString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
-        )
-
-class PrintableString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
-        )
-
-class TeletexString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
-        )
-
-class T61String(TeletexString): pass
-
-class VideotexString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
-        )
-
-class IA5String(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
-        )
-
-class GraphicString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
-        )
-
-class VisibleString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
-        )
-
-class ISO646String(VisibleString): pass
-
-class GeneralString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
-        )
-
-class UniversalString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
-        )
-    encoding = "utf-32-be"
-
-class BMPString(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
-        )
-    encoding = "utf-16-be"
-
-class UTF8String(univ.OctetString):
-    tagSet = univ.OctetString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
-        )
-    encoding = "utf-8"
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/constraint.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/constraint.py
deleted file mode 100644
index 6687393..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/constraint.py
+++ /dev/null
@@ -1,200 +0,0 @@
-#
-#   ASN.1 subtype constraints classes.
-#
-#   Constraints are relatively rare, but every ASN1 object
-#   is doing checks all the time for whether they have any
-#   constraints and whether they are applicable to the object.
-#
-#   What we're going to do is define objects/functions that
-#   can be called unconditionally if they are present, and that
-#   are simply not present if there are no constraints.
-#
-#   Original concept and code by Mike C. Fletcher.
-#
-import sys
-from pyasn1.type import error
-
-class AbstractConstraint:
-    """Abstract base-class for constraint objects
-
-       Constraints should be stored in a simple sequence in the
-       namespace of their client Asn1Item sub-classes.
-    """
-    def __init__(self, *values):
-        self._valueMap = {}
-        self._setValues(values)
-        self.__hashedValues = None
-    def __call__(self, value, idx=None):
-        try:
-            self._testValue(value, idx)
-        except error.ValueConstraintError:
-            raise error.ValueConstraintError(
-               '%s failed at: \"%s\"' % (self, sys.exc_info()[1])
-            )
-    def __repr__(self):
-        return '%s(%s)' % (
-            self.__class__.__name__,
-            ', '.join([repr(x) for x in self._values])
-        )
-    def __eq__(self, other):
-        return self is other and True or self._values == other
-    def __ne__(self, other): return self._values != other
-    def __lt__(self, other): return self._values < other
-    def __le__(self, other): return self._values <= other
-    def __gt__(self, other): return self._values > other
-    def __ge__(self, other): return self._values >= other
-    if sys.version_info[0] <= 2:
-        def __nonzero__(self): return bool(self._values)
-    else:
-        def __bool__(self): return bool(self._values)
-
-    def __hash__(self):
-        if self.__hashedValues is None:
-            self.__hashedValues = hash((self.__class__.__name__, self._values))
-        return self.__hashedValues
-
-    def _setValues(self, values): self._values = values
-    def _testValue(self, value, idx):
-        raise error.ValueConstraintError(value)
-
-    # Constraints derivation logic
-    def getValueMap(self): return self._valueMap
-    def isSuperTypeOf(self, otherConstraint):
-        return self in otherConstraint.getValueMap() or \
-               otherConstraint is self or otherConstraint == self
-    def isSubTypeOf(self, otherConstraint):
-        return otherConstraint in self._valueMap or \
-               otherConstraint is self or otherConstraint == self
-
-class SingleValueConstraint(AbstractConstraint):
-    """Value must be part of defined values constraint"""
-    def _testValue(self, value, idx):
-        # XXX index vals for performance?
-        if value not in self._values:
-            raise error.ValueConstraintError(value)
-
-class ContainedSubtypeConstraint(AbstractConstraint):
-    """Value must satisfy all of defined set of constraints"""
-    def _testValue(self, value, idx):
-        for c in self._values:
-            c(value, idx)
-
-class ValueRangeConstraint(AbstractConstraint):
-    """Value must be within start and stop values (inclusive)"""
-    def _testValue(self, value, idx):
-        if value < self.start or value > self.stop:
-            raise error.ValueConstraintError(value)
-
-    def _setValues(self, values):
-        if len(values) != 2:
-            raise error.PyAsn1Error(
-                '%s: bad constraint values' % (self.__class__.__name__,)
-                )
-        self.start, self.stop = values
-        if self.start > self.stop:
-            raise error.PyAsn1Error(
-                '%s: screwed constraint values (start > stop): %s > %s' % (
-                    self.__class__.__name__,
-                    self.start, self.stop
-                )
-            )
-        AbstractConstraint._setValues(self, values)
-        
-class ValueSizeConstraint(ValueRangeConstraint):
-    """len(value) must be within start and stop values (inclusive)"""
-    def _testValue(self, value, idx):
-        l = len(value)
-        if l < self.start or l > self.stop:
-            raise error.ValueConstraintError(value)
-
-class PermittedAlphabetConstraint(SingleValueConstraint):
-    def _setValues(self, values):
-        self._values = ()
-        for v in values:
-            self._values = self._values + tuple(v)
-
-    def _testValue(self, value, idx):
-        for v in value:
-            if v not in self._values:
-                raise error.ValueConstraintError(value)
-
-# This is a bit kludgy, meaning two op modes within a single constraing
-class InnerTypeConstraint(AbstractConstraint):
-    """Value must satisfy type and presense constraints"""
-    def _testValue(self, value, idx):
-        if self.__singleTypeConstraint:
-            self.__singleTypeConstraint(value)
-        elif self.__multipleTypeConstraint:
-            if idx not in self.__multipleTypeConstraint:
-                raise error.ValueConstraintError(value)
-            constraint, status = self.__multipleTypeConstraint[idx]
-            if status == 'ABSENT':   # XXX presense is not checked!
-                raise error.ValueConstraintError(value)
-            constraint(value)
-
-    def _setValues(self, values):
-        self.__multipleTypeConstraint = {}
-        self.__singleTypeConstraint = None
-        for v in values:
-            if isinstance(v, tuple):
-                self.__multipleTypeConstraint[v[0]] = v[1], v[2]
-            else:
-                self.__singleTypeConstraint = v
-        AbstractConstraint._setValues(self, values)
-
-# Boolean ops on constraints 
-
-class ConstraintsExclusion(AbstractConstraint):
-    """Value must not fit the single constraint"""
-    def _testValue(self, value, idx):
-        try:
-            self._values[0](value, idx)
-        except error.ValueConstraintError:
-            return
-        else:
-            raise error.ValueConstraintError(value)
-
-    def _setValues(self, values):
-        if len(values) != 1:
-            raise error.PyAsn1Error('Single constraint expected')
-        AbstractConstraint._setValues(self, values)
-
-class AbstractConstraintSet(AbstractConstraint):
-    """Value must not satisfy the single constraint"""
-    def __getitem__(self, idx): return self._values[idx]
-
-    def __add__(self, value): return self.__class__(self, value)
-    def __radd__(self, value): return self.__class__(self, value)
-
-    def __len__(self): return len(self._values)
-
-    # Constraints inclusion in sets
-    
-    def _setValues(self, values):
-        self._values = values
-        for v in values:
-            self._valueMap[v] = 1
-            self._valueMap.update(v.getValueMap())
-
-class ConstraintsIntersection(AbstractConstraintSet):
-    """Value must satisfy all constraints"""
-    def _testValue(self, value, idx):
-        for v in self._values:
-            v(value, idx)
-
-class ConstraintsUnion(AbstractConstraintSet):
-    """Value must satisfy at least one constraint"""
-    def _testValue(self, value, idx):
-        for v in self._values:
-            try:
-                v(value, idx)
-            except error.ValueConstraintError:
-                pass
-            else:
-                return
-        raise error.ValueConstraintError(
-            'all of %s failed for \"%s\"' % (self._values, value)
-            )
-
-# XXX
-# add tests for type check
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/error.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/error.py
deleted file mode 100644
index 3e68484..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/error.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from pyasn1.error import PyAsn1Error
-
-class ValueConstraintError(PyAsn1Error): pass
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/namedtype.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/namedtype.py
deleted file mode 100644
index aca4282..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/namedtype.py
+++ /dev/null
@@ -1,149 +0,0 @@
-# NamedType specification for constructed types
-import sys
-from pyasn1.type import tagmap
-from pyasn1 import error
-
-class NamedType:
-    isOptional = 0
-    isDefaulted = 0
-    def __init__(self, name, t):
-        self.__name = name; self.__type = t
-    def __repr__(self): return '%s(%r, %r)' % (
-        self.__class__.__name__, self.__name, self.__type
-        )
-    def __eq__(self, other): return tuple(self) == tuple(other)
-    def __ne__(self, other): return tuple(self) != tuple(other)
-    def __lt__(self, other): return tuple(self) < tuple(other)
-    def __le__(self, other): return tuple(self) <= tuple(other)
-    def __gt__(self, other): return tuple(self) > tuple(other)
-    def __ge__(self, other): return tuple(self) >= tuple(other)
-    def __hash__(self): return hash(tuple(self))
- 
-    def getType(self): return self.__type
-    def getName(self): return self.__name
-    def __getitem__(self, idx):
-        if idx == 0: return self.__name
-        if idx == 1: return self.__type
-        raise IndexError()
-    
-class OptionalNamedType(NamedType):
-    isOptional = 1
-class DefaultedNamedType(NamedType):
-    isDefaulted = 1
-    
-class NamedTypes:
-    def __init__(self, *namedTypes):
-        self.__namedTypes = namedTypes
-        self.__namedTypesLen = len(self.__namedTypes)
-        self.__minTagSet = None
-        self.__tagToPosIdx = {}; self.__nameToPosIdx = {}
-        self.__tagMap = { False: None, True: None }
-        self.__ambigiousTypes = {}
-
-    def __repr__(self):
-        return '%s(%s)' % (
-            self.__class__.__name__,
-            ', '.join([ repr(x) for x in self.__namedTypes ])
-        )
-    def __eq__(self, other): return tuple(self) == tuple(other)
-    def __ne__(self, other): return tuple(self) != tuple(other)
-    def __lt__(self, other): return tuple(self) < tuple(other)
-    def __le__(self, other): return tuple(self) <= tuple(other)
-    def __gt__(self, other): return tuple(self) > tuple(other)
-    def __ge__(self, other): return tuple(self) >= tuple(other)
-    def __hash__(self): return hash(tuple(self))
-   
-    def __getitem__(self, idx): return self.__namedTypes[idx]
-
-    if sys.version_info[0] <= 2:
-        def __nonzero__(self): return bool(self.__namedTypesLen)
-    else:
-        def __bool__(self): return bool(self.__namedTypesLen)
-    def __len__(self): return self.__namedTypesLen
-   
-    def clone(self): return self.__class__(*self.__namedTypes)
-     
-    def getTypeByPosition(self, idx):
-        if idx < 0 or idx >= self.__namedTypesLen:
-            raise error.PyAsn1Error('Type position out of range')
-        else:
-            return self.__namedTypes[idx].getType()
-
-    def getPositionByType(self, tagSet):
-        if not self.__tagToPosIdx:
-            idx = self.__namedTypesLen
-            while idx > 0:
-                idx = idx - 1
-                tagMap = self.__namedTypes[idx].getType().getTagMap()
-                for t in tagMap.getPosMap():
-                    if t in self.__tagToPosIdx:
-                        raise error.PyAsn1Error('Duplicate type %s' % (t,))
-                    self.__tagToPosIdx[t] = idx
-        try:
-            return self.__tagToPosIdx[tagSet]
-        except KeyError:
-            raise error.PyAsn1Error('Type %s not found' % (tagSet,))
-        
-    def getNameByPosition(self, idx):
-        try:
-            return self.__namedTypes[idx].getName()
-        except IndexError:
-            raise error.PyAsn1Error('Type position out of range')
-    def getPositionByName(self, name):
-        if not self.__nameToPosIdx:
-            idx = self.__namedTypesLen
-            while idx > 0:
-                idx = idx - 1
-                n = self.__namedTypes[idx].getName()
-                if n in self.__nameToPosIdx:
-                    raise error.PyAsn1Error('Duplicate name %s' % (n,))
-                self.__nameToPosIdx[n] = idx
-        try:
-            return self.__nameToPosIdx[name]
-        except KeyError:
-            raise error.PyAsn1Error('Name %s not found' % (name,))
-
-    def __buildAmbigiousTagMap(self):
-        ambigiousTypes = ()
-        idx = self.__namedTypesLen
-        while idx > 0:
-            idx = idx - 1
-            t = self.__namedTypes[idx]
-            if t.isOptional or t.isDefaulted:
-                ambigiousTypes = (t, ) + ambigiousTypes
-            else:
-                ambigiousTypes = (t, )
-            self.__ambigiousTypes[idx] = NamedTypes(*ambigiousTypes)
-        
-    def getTagMapNearPosition(self, idx):
-        if not self.__ambigiousTypes: self.__buildAmbigiousTagMap()
-        try:
-            return self.__ambigiousTypes[idx].getTagMap()
-        except KeyError:
-            raise error.PyAsn1Error('Type position out of range')
-
-    def getPositionNearType(self, tagSet, idx):
-        if not self.__ambigiousTypes: self.__buildAmbigiousTagMap()
-        try:
-            return idx+self.__ambigiousTypes[idx].getPositionByType(tagSet)
-        except KeyError:
-            raise error.PyAsn1Error('Type position out of range')
-
-    def genMinTagSet(self):
-        if self.__minTagSet is None:
-            for t in self.__namedTypes:
-                __type = t.getType()
-                tagSet = getattr(__type,'getMinTagSet',__type.getTagSet)()
-                if self.__minTagSet is None or tagSet < self.__minTagSet:
-                    self.__minTagSet = tagSet
-        return self.__minTagSet
-    
-    def getTagMap(self, uniq=False):
-        if self.__tagMap[uniq] is None:
-            tagMap = tagmap.TagMap()
-            for nt in self.__namedTypes:
-                tagMap = tagMap.clone(
-                    nt.getType(), nt.getType().getTagMap(), uniq
-                    )
-            self.__tagMap[uniq] = tagMap
-        return self.__tagMap[uniq]
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/namedval.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/namedval.py
deleted file mode 100644
index 676cb93..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/namedval.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# ASN.1 named integers
-from pyasn1 import error
-
-__all__ = [ 'NamedValues' ]
-
-class NamedValues:
-    def __init__(self, *namedValues):
-        self.nameToValIdx = {}; self.valToNameIdx = {}
-        self.namedValues = ()        
-        automaticVal = 1
-        for namedValue in namedValues:
-            if isinstance(namedValue, tuple):
-                name, val = namedValue
-            else:
-                name = namedValue
-                val = automaticVal
-            if name in self.nameToValIdx:
-                raise error.PyAsn1Error('Duplicate name %s' % (name,))
-            self.nameToValIdx[name] = val
-            if val in self.valToNameIdx:
-                raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val))
-            self.valToNameIdx[val] = name
-            self.namedValues = self.namedValues + ((name, val),)
-            automaticVal = automaticVal + 1
-
-    def __repr__(self):
-        return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues]))
-
-    def __str__(self): return str(self.namedValues)
-
-    def __eq__(self, other): return tuple(self) == tuple(other)
-    def __ne__(self, other): return tuple(self) != tuple(other)
-    def __lt__(self, other): return tuple(self) < tuple(other)
-    def __le__(self, other): return tuple(self) <= tuple(other)
-    def __gt__(self, other): return tuple(self) > tuple(other)
-    def __ge__(self, other): return tuple(self) >= tuple(other)
-    def __hash__(self): return hash(tuple(self))
-    
-    def getName(self, value):
-        if value in self.valToNameIdx:
-            return self.valToNameIdx[value]
-
-    def getValue(self, name):
-        if name in self.nameToValIdx:
-            return self.nameToValIdx[name]
-    
-    def __getitem__(self, i): return self.namedValues[i]
-    def __len__(self): return len(self.namedValues)
-
-    def __add__(self, namedValues):
-        return self.__class__(*self.namedValues + namedValues)
-    def __radd__(self, namedValues):
-        return self.__class__(*namedValues + tuple(self))
-        
-    def clone(self, *namedValues):
-        return self.__class__(*tuple(self) + namedValues)
-
-# XXX clone/subtype?
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/tag.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/tag.py
deleted file mode 100644
index 7471a9b..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/tag.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# ASN.1 types tags
-from operator import getitem
-from pyasn1 import error
-
-tagClassUniversal = 0x00
-tagClassApplication = 0x40
-tagClassContext = 0x80
-tagClassPrivate = 0xC0
-
-tagFormatSimple = 0x00
-tagFormatConstructed = 0x20
-
-tagCategoryImplicit = 0x01
-tagCategoryExplicit = 0x02
-tagCategoryUntagged = 0x04
-
-class Tag:
-    def __init__(self, tagClass, tagFormat, tagId):
-        if tagId < 0:
-            raise error.PyAsn1Error(
-                'Negative tag ID (%s) not allowed' % (tagId,)
-                )
-        self.__tag = (tagClass, tagFormat, tagId)
-        self.uniq = (tagClass, tagId)
-        self.__hashedUniqTag = hash(self.uniq)
-
-    def __str__(self):
-        return '[%s:%s:%s]' % self.__tag
-
-    def __repr__(self):
-        return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
-            (self.__class__.__name__,) + self.__tag
-            )
-    # These is really a hotspot -- expose public "uniq" attribute to save on
-    # function calls
-    def __eq__(self, other): return self.uniq == other.uniq
-    def __ne__(self, other): return self.uniq != other.uniq
-    def __lt__(self, other): return self.uniq < other.uniq
-    def __le__(self, other): return self.uniq <= other.uniq
-    def __gt__(self, other): return self.uniq > other.uniq
-    def __ge__(self, other): return self.uniq >= other.uniq
-    def __hash__(self): return self.__hashedUniqTag
-    def __getitem__(self, idx): return self.__tag[idx]
-    def __and__(self, otherTag):
-        (tagClass, tagFormat, tagId) = otherTag
-        return self.__class__(
-            self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId
-            )
-    def __or__(self, otherTag):
-        (tagClass, tagFormat, tagId) = otherTag
-        return self.__class__(
-            self.__tag[0]|tagClass,
-            self.__tag[1]|tagFormat,
-            self.__tag[2]|tagId
-            )
-    def asTuple(self): return self.__tag  # __getitem__() is slow
-    
-class TagSet:
-    def __init__(self, baseTag=(), *superTags):
-        self.__baseTag = baseTag
-        self.__superTags = superTags
-        self.__hashedSuperTags = hash(superTags)
-        _uniq = ()
-        for t in superTags:
-            _uniq = _uniq + t.uniq
-        self.uniq = _uniq
-        self.__lenOfSuperTags = len(superTags)
-
-    def __str__(self):
-        return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]'
- 
-    def __repr__(self):
-        return '%s(%s)' % (
-            self.__class__.__name__,
-            '(), ' + ', '.join([repr(x) for x in self.__superTags])
-            )
-
-    def __add__(self, superTag):
-        return self.__class__(
-            self.__baseTag, *self.__superTags + (superTag,)
-            )
-    def __radd__(self, superTag):
-        return self.__class__(
-            self.__baseTag, *(superTag,) + self.__superTags
-            )
-
-    def tagExplicitly(self, superTag):
-        tagClass, tagFormat, tagId = superTag
-        if tagClass == tagClassUniversal:
-            raise error.PyAsn1Error(
-                'Can\'t tag with UNIVERSAL-class tag'
-                )
-        if tagFormat != tagFormatConstructed:
-            superTag = Tag(tagClass, tagFormatConstructed, tagId)
-        return self + superTag
-
-    def tagImplicitly(self, superTag):
-        tagClass, tagFormat, tagId = superTag
-        if self.__superTags:
-            superTag = Tag(tagClass, self.__superTags[-1][1], tagId)
-        return self[:-1] + superTag
-
-    def getBaseTag(self): return self.__baseTag
-    def __getitem__(self, idx):
-        if isinstance(idx, slice):
-            return self.__class__(
-               self.__baseTag, *getitem(self.__superTags, idx)
-            )
-        return self.__superTags[idx]
-    def __eq__(self, other): return self.uniq == other.uniq
-    def __ne__(self, other): return self.uniq != other.uniq
-    def __lt__(self, other): return self.uniq < other.uniq
-    def __le__(self, other): return self.uniq <= other.uniq
-    def __gt__(self, other): return self.uniq > other.uniq
-    def __ge__(self, other): return self.uniq >= other.uniq
-    def __hash__(self): return self.__hashedSuperTags
-    def __len__(self): return self.__lenOfSuperTags
-    def isSuperTagSetOf(self, tagSet):
-        if len(tagSet) < self.__lenOfSuperTags:
-            return
-        idx = self.__lenOfSuperTags - 1
-        while idx >= 0:
-            if self.__superTags[idx] != tagSet[idx]:
-                return
-            idx = idx - 1
-        return 1
-    
-def initTagSet(tag): return TagSet(tag, tag)
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/tagmap.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/tagmap.py
deleted file mode 100644
index feb91ae..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/tagmap.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from pyasn1 import error
-
-class TagMap:
-    def __init__(self, posMap={}, negMap={}, defType=None):
-        self.__posMap = posMap.copy()
-        self.__negMap = negMap.copy()
-        self.__defType = defType
-        
-    def __contains__(self, tagSet):
-        return tagSet in self.__posMap or \
-               self.__defType is not None and tagSet not in self.__negMap
-
-    def __getitem__(self, tagSet):
-        if tagSet in self.__posMap:
-            return self.__posMap[tagSet]
-        elif tagSet in self.__negMap:
-            raise error.PyAsn1Error('Key in negative map')
-        elif self.__defType is not None:
-            return self.__defType
-        else:
-            raise KeyError()
-
-    def __repr__(self):
-        s = self.__class__.__name__ + '('
-        if self.__posMap:
-            s = s + 'posMap=%r, ' % (self.__posMap,)
-        if self.__negMap:
-            s = s + 'negMap=%r, ' % (self.__negMap,)
-        if self.__defType is not None:
-            s = s + 'defType=%r' % (self.__defType,)
-        return s + ')'
-
-    def __str__(self):
-        s = self.__class__.__name__ + ':\n'
-        if self.__posMap:
-            s = s + 'posMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__posMap.values()])
-        if self.__negMap:
-            s = s + 'negMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__negMap.values()])
-        if self.__defType is not None:
-            s = s + 'defType:\n%s, ' % self.__defType.prettyPrintType()
-        return s
-
-    def clone(self, parentType, tagMap, uniq=False):
-        if self.__defType is not None and tagMap.getDef() is not None:
-            raise error.PyAsn1Error('Duplicate default value at %s' % (self,))
-        if tagMap.getDef() is not None:
-            defType = tagMap.getDef()
-        else:
-            defType = self.__defType
-            
-        posMap = self.__posMap.copy()
-        for k in tagMap.getPosMap():
-            if uniq and k in posMap:
-                raise error.PyAsn1Error('Duplicate positive key %s' % (k,))
-            posMap[k] = parentType
-
-        negMap = self.__negMap.copy()
-        negMap.update(tagMap.getNegMap())
-        
-        return self.__class__(
-            posMap, negMap, defType,
-            )
-
-    def getPosMap(self): return self.__posMap.copy()
-    def getNegMap(self): return self.__negMap.copy()
-    def getDef(self): return self.__defType
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/univ.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/univ.py
deleted file mode 100644
index 4ed640f..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/univ.py
+++ /dev/null
@@ -1,1156 +0,0 @@
-# ASN.1 "universal" data types
-import operator, sys, math
-from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap
-from pyasn1.codec.ber import eoo
-from pyasn1.compat import octets
-from pyasn1 import error
-
-# "Simple" ASN.1 types (yet incomplete)
-
-class Integer(base.AbstractSimpleAsn1Item):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02)
-        )
-    namedValues = namedval.NamedValues()
-    def __init__(self, value=None, tagSet=None, subtypeSpec=None,
-                 namedValues=None):
-        if namedValues is None:
-            self.__namedValues = self.namedValues
-        else:
-            self.__namedValues = namedValues
-        base.AbstractSimpleAsn1Item.__init__(
-            self, value, tagSet, subtypeSpec
-            )
-
-    def __repr__(self):
-        if self.__namedValues is not self.namedValues:
-            return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues)
-        else:
-            return base.AbstractSimpleAsn1Item.__repr__(self)
-
-    def __and__(self, value): return self.clone(self._value & value)
-    def __rand__(self, value): return self.clone(value & self._value)
-    def __or__(self, value): return self.clone(self._value | value)
-    def __ror__(self, value): return self.clone(value | self._value)
-    def __xor__(self, value): return self.clone(self._value ^ value)
-    def __rxor__(self, value): return self.clone(value ^ self._value)
-    def __lshift__(self, value): return self.clone(self._value << value)
-    def __rshift__(self, value): return self.clone(self._value >> value)
-
-    def __add__(self, value): return self.clone(self._value + value)
-    def __radd__(self, value): return self.clone(value + self._value)
-    def __sub__(self, value): return self.clone(self._value - value)
-    def __rsub__(self, value): return self.clone(value - self._value)
-    def __mul__(self, value): return self.clone(self._value * value)
-    def __rmul__(self, value): return self.clone(value * self._value)
-    def __mod__(self, value): return self.clone(self._value % value)
-    def __rmod__(self, value): return self.clone(value % self._value)
-    def __pow__(self, value, modulo=None): return self.clone(pow(self._value, value, modulo))
-    def __rpow__(self, value): return self.clone(pow(value, self._value))
-
-    if sys.version_info[0] <= 2:
-        def __div__(self, value):  return self.clone(self._value // value)
-        def __rdiv__(self, value):  return self.clone(value // self._value)
-    else:
-        def __truediv__(self, value):  return self.clone(self._value / value)
-        def __rtruediv__(self, value):  return self.clone(value / self._value)
-        def __divmod__(self, value):  return self.clone(self._value // value)
-        def __rdivmod__(self, value):  return self.clone(value // self._value)
-
-        __hash__ = base.AbstractSimpleAsn1Item.__hash__
-
-    def __int__(self): return int(self._value)
-    if sys.version_info[0] <= 2:
-        def __long__(self): return long(self._value)
-    def __float__(self): return float(self._value)    
-    def __abs__(self): return self.clone(abs(self._value))
-    def __index__(self): return int(self._value)
-    def __pos__(self): return self.clone(+self._value)
-    def __neg__(self): return self.clone(-self._value)
-    def __invert__(self): return self.clone(~self._value)
-    def __round__(self, n=0):
-        r = round(self._value, n)
-        if n:
-            return self.clone(r)
-        else:
-            return r
-    def __floor__(self): return math.floor(self._value)
-    def __ceil__(self): return math.ceil(self._value)
-    if sys.version_info[0:2] > (2, 5):
-        def __trunc__(self): return self.clone(math.trunc(self._value))
-
-    def __lt__(self, value): return self._value < value
-    def __le__(self, value): return self._value <= value
-    def __eq__(self, value): return self._value == value
-    def __ne__(self, value): return self._value != value
-    def __gt__(self, value): return self._value > value
-    def __ge__(self, value): return self._value >= value
-
-    def prettyIn(self, value):
-        if not isinstance(value, str):
-            try:
-                return int(value)
-            except:
-                raise error.PyAsn1Error(
-                    'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
-                    )
-        r = self.__namedValues.getValue(value)
-        if r is not None:
-            return r
-        try:
-            return int(value)
-        except:
-            raise error.PyAsn1Error(
-                'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
-                )
-
-    def prettyOut(self, value):
-        r = self.__namedValues.getName(value)
-        return r is None and str(value) or repr(r)
-
-    def getNamedValues(self): return self.__namedValues
-
-    def clone(self, value=None, tagSet=None, subtypeSpec=None,
-              namedValues=None):
-        if value is None and tagSet is None and subtypeSpec is None \
-               and namedValues is None:
-            return self
-        if value is None:
-            value = self._value
-        if tagSet is None:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        if namedValues is None:
-            namedValues = self.__namedValues
-        return self.__class__(value, tagSet, subtypeSpec, namedValues)
-
-    def subtype(self, value=None, implicitTag=None, explicitTag=None,
-                subtypeSpec=None, namedValues=None):
-        if value is None:
-            value = self._value
-        if implicitTag is not None:
-            tagSet = self._tagSet.tagImplicitly(implicitTag)
-        elif explicitTag is not None:
-            tagSet = self._tagSet.tagExplicitly(explicitTag)
-        else:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        else:
-            subtypeSpec = subtypeSpec + self._subtypeSpec
-        if namedValues is None:
-            namedValues = self.__namedValues
-        else:
-            namedValues = namedValues + self.__namedValues
-        return self.__class__(value, tagSet, subtypeSpec, namedValues)
-
-class Boolean(Integer):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01),
-        )
-    subtypeSpec = Integer.subtypeSpec+constraint.SingleValueConstraint(0,1)
-    namedValues = Integer.namedValues.clone(('False', 0), ('True', 1))
-
-class BitString(base.AbstractSimpleAsn1Item):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03)
-        )
-    namedValues = namedval.NamedValues()
-    def __init__(self, value=None, tagSet=None, subtypeSpec=None,
-                 namedValues=None):
-        if namedValues is None:
-            self.__namedValues = self.namedValues
-        else:
-            self.__namedValues = namedValues
-        base.AbstractSimpleAsn1Item.__init__(
-            self, value, tagSet, subtypeSpec
-            )
-
-    def clone(self, value=None, tagSet=None, subtypeSpec=None,
-              namedValues=None):
-        if value is None and tagSet is None and subtypeSpec is None \
-               and namedValues is None:
-            return self
-        if value is None:
-            value = self._value
-        if tagSet is None:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        if namedValues is None:
-            namedValues = self.__namedValues
-        return self.__class__(value, tagSet, subtypeSpec, namedValues)
-
-    def subtype(self, value=None, implicitTag=None, explicitTag=None,
-                subtypeSpec=None, namedValues=None):
-        if value is None:
-            value = self._value
-        if implicitTag is not None:
-            tagSet = self._tagSet.tagImplicitly(implicitTag)
-        elif explicitTag is not None:
-            tagSet = self._tagSet.tagExplicitly(explicitTag)
-        else:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        else:
-            subtypeSpec = subtypeSpec + self._subtypeSpec
-        if namedValues is None:
-            namedValues = self.__namedValues
-        else:
-            namedValues = namedValues + self.__namedValues
-        return self.__class__(value, tagSet, subtypeSpec, namedValues)
-
-    def __str__(self): return str(tuple(self))
-
-    # Immutable sequence object protocol
-
-    def __len__(self):
-        if self._len is None:
-            self._len = len(self._value)
-        return self._len
-    def __getitem__(self, i):
-        if isinstance(i, slice):
-            return self.clone(operator.getitem(self._value, i))
-        else:
-            return self._value[i]
-
-    def __add__(self, value): return self.clone(self._value + value)
-    def __radd__(self, value): return self.clone(value + self._value)
-    def __mul__(self, value): return self.clone(self._value * value)
-    def __rmul__(self, value): return self * value
-
-    def prettyIn(self, value):
-        r = []
-        if not value:
-            return ()
-        elif isinstance(value, str):
-            if value[0] == '\'':
-                if value[-2:] == '\'B':
-                    for v in value[1:-2]:
-                        if v == '0':
-                            r.append(0)
-                        elif v == '1':
-                            r.append(1)
-                        else:
-                            raise error.PyAsn1Error(
-                                'Non-binary BIT STRING initializer %s' % (v,)
-                                )
-                    return tuple(r)
-                elif value[-2:] == '\'H':
-                    for v in value[1:-2]:
-                        i = 4
-                        v = int(v, 16)
-                        while i:
-                            i = i - 1
-                            r.append((v>>i)&0x01)
-                    return tuple(r)
-                else:
-                    raise error.PyAsn1Error(
-                        'Bad BIT STRING value notation %s' % (value,)
-                        )                
-            else:
-                for i in value.split(','):
-                    j = self.__namedValues.getValue(i)
-                    if j is None:
-                        raise error.PyAsn1Error(
-                            'Unknown bit identifier \'%s\'' % (i,)
-                            )
-                    if j >= len(r):
-                        r.extend([0]*(j-len(r)+1))
-                    r[j] = 1
-                return tuple(r)
-        elif isinstance(value, (tuple, list)):
-            r = tuple(value)
-            for b in r:
-                if b and b != 1:
-                    raise error.PyAsn1Error(
-                        'Non-binary BitString initializer \'%s\'' % (r,)
-                        )
-            return r
-        elif isinstance(value, BitString):
-            return tuple(value)
-        else:
-            raise error.PyAsn1Error(
-                'Bad BitString initializer type \'%s\'' % (value,)
-                )
-
-    def prettyOut(self, value):
-        return '\"\'%s\'B\"' % ''.join([str(x) for x in value])
-
-try:
-    all
-except NameError:  # Python 2.4
-    def all(iterable):
-        for element in iterable:
-            if not element:
-                return False
-        return True
-
-class OctetString(base.AbstractSimpleAsn1Item):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
-        )
-    defaultBinValue = defaultHexValue = base.noValue
-    encoding = 'us-ascii'
-    def __init__(self, value=None, tagSet=None, subtypeSpec=None,
-                 encoding=None, binValue=None, hexValue=None):
-        if encoding is None:
-            self._encoding = self.encoding
-        else:
-            self._encoding = encoding
-        if binValue is not None:
-            value = self.fromBinaryString(binValue)
-        if hexValue is not None:
-            value = self.fromHexString(hexValue)
-        if value is None or value is base.noValue:
-            value = self.defaultHexValue
-        if value is None or value is base.noValue:
-            value = self.defaultBinValue
-        self.__asNumbersCache = None
-        base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec)
-
-    def clone(self, value=None, tagSet=None, subtypeSpec=None,
-              encoding=None, binValue=None, hexValue=None):
-        if value is None and tagSet is None and subtypeSpec is None and \
-               encoding is None and binValue is None and hexValue is None:
-            return self
-        if value is None and binValue is None and hexValue is None:
-            value = self._value
-        if tagSet is None:
-            tagSet = self._tagSet
-        if subtypeSpec is None:
-            subtypeSpec = self._subtypeSpec
-        if encoding is None:
-            encoding = self._encoding
-        return self.__class__(
-            value, tagSet, subtypeSpec, encoding, binValue, hexValue
-            )
-   
-    if sys.version_info[0] <= 2:
-        def prettyIn(self, value):
-            if isinstance(value, str):
-                return value
-            elif isinstance(value, unicode):
-                try:
-                    return value.encode(self._encoding)
-                except (LookupError, UnicodeEncodeError):
-                    raise error.PyAsn1Error(
-                        'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
-                    )
-            elif isinstance(value, (tuple, list)):
-                try:
-                    return ''.join([ chr(x) for x in value ])
-                except ValueError:
-                    raise error.PyAsn1Error(
-                        'Bad OctetString initializer \'%s\'' % (value,)
-                    )                
-            else:
-                return str(value)
-    else:
-        def prettyIn(self, value):
-            if isinstance(value, bytes):
-                return value
-            elif isinstance(value, str):
-                try:
-                    return value.encode(self._encoding)
-                except UnicodeEncodeError:
-                    raise error.PyAsn1Error(
-                        'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
-                    )
-            elif isinstance(value, OctetString):
-                return value.asOctets()
-            elif isinstance(value, (tuple, list, map)):
-                try:
-                    return bytes(value)
-                except ValueError:
-                    raise error.PyAsn1Error(
-                        'Bad OctetString initializer \'%s\'' % (value,)
-                    )
-            else:
-                try:
-                    return str(value).encode(self._encoding)
-                except UnicodeEncodeError:
-                    raise error.PyAsn1Error(
-                        'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
-                    )
-                        
-
-    def fromBinaryString(self, value):
-        bitNo = 8; byte = 0; r = ()
-        for v in value:
-            if bitNo:
-                bitNo = bitNo - 1
-            else:
-                bitNo = 7
-                r = r + (byte,)
-                byte = 0
-            if v == '0':
-                v = 0
-            elif v == '1':
-                v = 1
-            else:
-                raise error.PyAsn1Error(
-                    'Non-binary OCTET STRING initializer %s' % (v,)
-                    )
-            byte = byte | (v << bitNo)
-        return octets.ints2octs(r + (byte,))
-        
-    def fromHexString(self, value):            
-        r = p = ()
-        for v in value:
-            if p:
-                r = r + (int(p+v, 16),)
-                p = ()
-            else:
-                p = v
-        if p:
-            r = r + (int(p+'0', 16),)
-        return octets.ints2octs(r)
-
-    def prettyOut(self, value):
-        if sys.version_info[0] <= 2:
-            numbers = tuple(( ord(x) for x in value ))
-        else:
-            numbers = tuple(value)
-        if all(x >= 32 and x <= 126 for x in numbers):
-            return str(value)
-        else:
-            return '0x' + ''.join(( '%.2x' % x for x in numbers ))
-
-    def __repr__(self):
-        r = []
-        doHex = False
-        if self._value is not self.defaultValue:
-            for x in self.asNumbers():
-                if x < 32 or x > 126:
-                    doHex = True
-                    break
-            if not doHex:
-                r.append('%r' % (self._value,))
-        if self._tagSet is not self.tagSet:
-            r.append('tagSet=%r' % (self._tagSet,))
-        if self._subtypeSpec is not self.subtypeSpec:
-            r.append('subtypeSpec=%r' % (self._subtypeSpec,))
-        if self.encoding is not self._encoding:
-            r.append('encoding=%r' % (self._encoding,))
-        if doHex:
-            r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ]))
-        return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
-                                
-    if sys.version_info[0] <= 2:
-        def __str__(self): return str(self._value)
-        def __unicode__(self):
-            return self._value.decode(self._encoding, 'ignore')
-        def asOctets(self): return self._value
-        def asNumbers(self):
-            if self.__asNumbersCache is None:
-                self.__asNumbersCache = tuple([ ord(x) for x in self._value ])
-            return self.__asNumbersCache
-    else:
-        def __str__(self): return self._value.decode(self._encoding, 'ignore')
-        def __bytes__(self): return self._value
-        def asOctets(self): return self._value
-        def asNumbers(self):
-            if self.__asNumbersCache is None:
-                self.__asNumbersCache = tuple(self._value)
-            return self.__asNumbersCache
- 
-    # Immutable sequence object protocol
-    
-    def __len__(self):
-        if self._len is None:
-            self._len = len(self._value)
-        return self._len
-    def __getitem__(self, i):
-        if isinstance(i, slice):
-            return self.clone(operator.getitem(self._value, i))
-        else:
-            return self._value[i]
-
-    def __add__(self, value): return self.clone(self._value + self.prettyIn(value))
-    def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value)
-    def __mul__(self, value): return self.clone(self._value * value)
-    def __rmul__(self, value): return self * value
-    def __int__(self): return int(self._value)
-    def __float__(self): return float(self._value)
-    
-class Null(OctetString):
-    defaultValue = ''.encode()  # This is tightly constrained
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05)
-        )
-    subtypeSpec = OctetString.subtypeSpec+constraint.SingleValueConstraint(''.encode())
-    
-if sys.version_info[0] <= 2:
-    intTypes = (int, long)
-else:
-    intTypes = (int,)
-
-numericTypes = intTypes + (float,)
-
-class ObjectIdentifier(base.AbstractSimpleAsn1Item):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06)
-        )
-    def __add__(self, other): return self.clone(self._value + other)
-    def __radd__(self, other): return self.clone(other + self._value)
-
-    def asTuple(self): return self._value
-    
-    # Sequence object protocol
-    
-    def __len__(self):
-        if self._len is None:
-            self._len = len(self._value)
-        return self._len
-    def __getitem__(self, i):
-        if isinstance(i, slice):
-            return self.clone(
-                operator.getitem(self._value, i)
-                )
-        else:
-            return self._value[i]
-
-    def __str__(self): return self.prettyPrint()
-    def __repr__(self):
-        return '%s(%r)' % (self.__class__.__name__, self.prettyPrint())
-
-    def index(self, suboid): return self._value.index(suboid)
-
-    def isPrefixOf(self, value):
-        """Returns true if argument OID resides deeper in the OID tree"""
-        l = len(self)
-        if l <= len(value):
-            if self._value[:l] == value[:l]:
-                return 1
-        return 0
-
-    def prettyIn(self, value):
-        """Dotted -> tuple of numerics OID converter"""
-        if isinstance(value, tuple):
-            pass
-        elif isinstance(value, ObjectIdentifier):
-            return tuple(value)        
-        elif octets.isStringType(value):
-            r = []
-            for element in [ x for x in value.split('.') if x != '' ]:
-                try:
-                    r.append(int(element, 0))
-                except ValueError:
-                    raise error.PyAsn1Error(
-                        'Malformed Object ID %s at %s: %s' %
-                        (str(value), self.__class__.__name__, sys.exc_info()[1])
-                        )
-            value = tuple(r)
-        else:
-            try:
-                value = tuple(value)
-            except TypeError:
-                raise error.PyAsn1Error(
-                        'Malformed Object ID %s at %s: %s' %
-                        (str(value), self.__class__.__name__,sys.exc_info()[1])
-                        )
-
-        for x in value:
-            if not isinstance(x, intTypes) or x < 0:
-                raise error.PyAsn1Error(
-                    'Invalid sub-ID in %s at %s' % (value, self.__class__.__name__)
-                    )
-    
-        return value
-
-    def prettyOut(self, value): return '.'.join([ str(x) for x in value ])
-    
-class Real(base.AbstractSimpleAsn1Item):
-    binEncBase = None # binEncBase = 16 is recommended for large numbers
-    try:
-        _plusInf = float('inf')
-        _minusInf = float('-inf')
-        _inf = (_plusInf, _minusInf)
-    except ValueError:
-        # Infinity support is platform and Python dependent
-        _plusInf = _minusInf = None
-        _inf = ()
-
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09)
-        )
-
-    def __normalizeBase10(self, value):
-        m, b, e = value
-        while m and m % 10 == 0:
-            m = m / 10
-            e = e + 1
-        return m, b, e
-
-    def prettyIn(self, value):
-        if isinstance(value, tuple) and len(value) == 3:
-            if not isinstance(value[0], numericTypes) or \
-                    not isinstance(value[1], intTypes) or \
-                    not isinstance(value[2], intTypes):
-                raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,))
-            if isinstance(value[0], float) and \
-                self._inf and value[0] in self._inf:
-                return value[0]
-            if value[1] not in (2, 10):
-                raise error.PyAsn1Error(
-                    'Prohibited base for Real value: %s' % (value[1],)
-                    )
-            if value[1] == 10:
-                value = self.__normalizeBase10(value)
-            return value
-        elif isinstance(value, intTypes):
-            return self.__normalizeBase10((value, 10, 0))
-        elif isinstance(value, (str, float)):
-            if isinstance(value, str):
-                try:
-                    value = float(value)
-                except ValueError:
-                    raise error.PyAsn1Error(
-                        'Bad real value syntax: %s' % (value,)
-                    )
-            if self._inf and value in self._inf:
-                return value
-            else:
-                e = 0
-                while int(value) != value:
-                    value = value * 10
-                    e = e - 1
-                return self.__normalizeBase10((int(value), 10, e))
-        elif isinstance(value, Real):
-            return tuple(value)
-        raise error.PyAsn1Error(
-            'Bad real value syntax: %s' % (value,)
-            )
-        
-    def prettyOut(self, value):
-        if value in self._inf:
-            return '\'%s\'' % value
-        else:
-            return str(value)
-
-    def prettyPrint(self, scope=0):
-        if self.isInfinity():
-            return self.prettyOut(self._value)
-        else:
-            return str(float(self))
-
-    def isPlusInfinity(self): return self._value == self._plusInf
-    def isMinusInfinity(self): return self._value == self._minusInf
-    def isInfinity(self): return self._value in self._inf
-    
-    def __str__(self): return str(float(self))
-    
-    def __add__(self, value): return self.clone(float(self) + value)
-    def __radd__(self, value): return self + value
-    def __mul__(self, value): return self.clone(float(self) * value)
-    def __rmul__(self, value): return self * value
-    def __sub__(self, value): return self.clone(float(self) - value)
-    def __rsub__(self, value): return self.clone(value - float(self))
-    def __mod__(self, value): return self.clone(float(self) % value)
-    def __rmod__(self, value): return self.clone(value % float(self))
-    def __pow__(self, value, modulo=None): return self.clone(pow(float(self), value, modulo))
-    def __rpow__(self, value): return self.clone(pow(value, float(self)))
-
-    if sys.version_info[0] <= 2:
-        def __div__(self, value): return self.clone(float(self) / value)
-        def __rdiv__(self, value): return self.clone(value / float(self))
-    else:
-        def __truediv__(self, value): return self.clone(float(self) / value)
-        def __rtruediv__(self, value): return self.clone(value / float(self))
-        def __divmod__(self, value): return self.clone(float(self) // value)
-        def __rdivmod__(self, value): return self.clone(value // float(self))
-
-    def __int__(self): return int(float(self))
-    if sys.version_info[0] <= 2:
-        def __long__(self): return long(float(self))
-    def __float__(self):
-        if self._value in self._inf:
-            return self._value
-        else:
-            return float(
-                self._value[0] * pow(self._value[1], self._value[2])
-            )
-    def __abs__(self): return self.clone(abs(float(self)))
-    def __pos__(self): return self.clone(+float(self))
-    def __neg__(self): return self.clone(-float(self))
-    def __round__(self, n=0):
-        r = round(float(self), n)
-        if n:
-            return self.clone(r)
-        else:
-            return r
-    def __floor__(self): return self.clone(math.floor(float(self)))
-    def __ceil__(self): return self.clone(math.ceil(float(self)))
-    if sys.version_info[0:2] > (2, 5):
-        def __trunc__(self): return self.clone(math.trunc(float(self)))
-
-    def __lt__(self, value): return float(self) < value
-    def __le__(self, value): return float(self) <= value
-    def __eq__(self, value): return float(self) == value
-    def __ne__(self, value): return float(self) != value
-    def __gt__(self, value): return float(self) > value
-    def __ge__(self, value): return float(self) >= value
-
-    if sys.version_info[0] <= 2:
-        def __nonzero__(self): return bool(float(self))
-    else:
-        def __bool__(self): return bool(float(self))
-        __hash__ = base.AbstractSimpleAsn1Item.__hash__
-
-    def __getitem__(self, idx):
-        if self._value in self._inf:
-            raise error.PyAsn1Error('Invalid infinite value operation')
-        else:
-            return self._value[idx]
-    
-class Enumerated(Integer):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A)
-        )
-
-# "Structured" ASN.1 types
-
-class SetOf(base.AbstractConstructedAsn1Item):
-    componentType = None
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
-        )
-    typeId = 1
-    strictConstraints = False
-
-    def _cloneComponentValues(self, myClone, cloneValueFlag):
-        idx = 0; l = len(self._componentValues)
-        while idx < l:
-            c = self._componentValues[idx]
-            if c is not None:
-                if isinstance(c, base.AbstractConstructedAsn1Item):
-                    myClone.setComponentByPosition(
-                        idx, c.clone(cloneValueFlag=cloneValueFlag)
-                        )
-                else:
-                    myClone.setComponentByPosition(idx, c.clone())
-            idx = idx + 1
-        
-    def _verifyComponent(self, idx, value):
-        t = self._componentType
-        if t is None:
-            return
-        if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
-            raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
-        if self.strictConstraints and \
-                not t.isSuperTypeOf(value, matchTags=False):
-            raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
-
-    def getComponentByPosition(self, idx): return self._componentValues[idx]
-    def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
-        l = len(self._componentValues)
-        if idx >= l:
-            self._componentValues = self._componentValues + (idx-l+1)*[None]
-        if value is None:
-            if self._componentValues[idx] is None:
-                if self._componentType is None:
-                    raise error.PyAsn1Error('Component type not defined')
-                self._componentValues[idx] = self._componentType.clone()
-                self._componentValuesSet = self._componentValuesSet + 1
-            return self
-        elif not isinstance(value, base.Asn1Item):
-            if self._componentType is None:
-                raise error.PyAsn1Error('Component type not defined')
-            if isinstance(self._componentType, base.AbstractSimpleAsn1Item):
-                value = self._componentType.clone(value=value)
-            else:
-                raise error.PyAsn1Error('Instance value required')
-        if verifyConstraints:
-            if self._componentType is not None:
-                self._verifyComponent(idx, value)
-            self._verifySubtypeSpec(value, idx)            
-        if self._componentValues[idx] is None:
-            self._componentValuesSet = self._componentValuesSet + 1
-        self._componentValues[idx] = value
-        return self
-
-    def getComponentTagMap(self):
-        if self._componentType is not None:
-            return self._componentType.getTagMap()
-
-    def prettyPrint(self, scope=0):
-        scope = scope + 1
-        r = self.__class__.__name__ + ':\n'        
-        for idx in range(len(self._componentValues)):
-            r = r + ' '*scope
-            if self._componentValues[idx] is None:
-                r = r + '<empty>'
-            else:
-                r = r + self._componentValues[idx].prettyPrint(scope)
-        return r
-
-    def prettyPrintType(self, scope=0):
-        scope = scope + 1
-        r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
-        if self._componentType is not None:
-            r = r + ' '*scope
-            r = r + self._componentType.prettyPrintType(scope)
-        return r + '\n' + ' '*(scope-1) + '}'
-
-class SequenceOf(SetOf):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
-        )
-    typeId = 2
-
-class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
-    componentType = namedtype.NamedTypes()
-    strictConstraints = False
-    def __init__(self, componentType=None, tagSet=None,
-                 subtypeSpec=None, sizeSpec=None):
-        if componentType is None:
-            componentType = self.componentType
-        base.AbstractConstructedAsn1Item.__init__(
-            self, componentType.clone(), tagSet, subtypeSpec, sizeSpec
-        )
-        self._componentTypeLen = len(self._componentType)
-
-    def __getitem__(self, idx):
-        if isinstance(idx, str):
-            return self.getComponentByName(idx)
-        else:
-            return base.AbstractConstructedAsn1Item.__getitem__(self, idx)
-
-    def __setitem__(self, idx, value):
-        if isinstance(idx, str):
-            self.setComponentByName(idx, value)
-        else:
-            base.AbstractConstructedAsn1Item.__setitem__(self, idx, value)
-        
-    def _cloneComponentValues(self, myClone, cloneValueFlag):
-        idx = 0; l = len(self._componentValues)
-        while idx < l:
-            c = self._componentValues[idx]
-            if c is not None:
-                if isinstance(c, base.AbstractConstructedAsn1Item):
-                    myClone.setComponentByPosition(
-                        idx, c.clone(cloneValueFlag=cloneValueFlag)
-                        )
-                else:
-                    myClone.setComponentByPosition(idx, c.clone())
-            idx = idx + 1
-
-    def _verifyComponent(self, idx, value):
-        if idx >= self._componentTypeLen:
-            raise error.PyAsn1Error(
-                'Component type error out of range'
-                )
-        t = self._componentType[idx].getType()
-        if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
-            raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
-        if self.strictConstraints and \
-                not t.isSuperTypeOf(value, matchTags=False):
-            raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
-
-    def getComponentByName(self, name):
-        return self.getComponentByPosition(
-            self._componentType.getPositionByName(name)
-            )
-    def setComponentByName(self, name, value=None, verifyConstraints=True):
-        return self.setComponentByPosition(
-            self._componentType.getPositionByName(name),value,verifyConstraints
-        )
-
-    def getComponentByPosition(self, idx):
-        try:
-            return self._componentValues[idx]
-        except IndexError:
-            if idx < self._componentTypeLen:
-                return
-            raise
-    def setComponentByPosition(self, idx, value=None,
-                               verifyConstraints=True,
-                               exactTypes=False,
-                               matchTags=True,
-                               matchConstraints=True):
-        l = len(self._componentValues)
-        if idx >= l:
-            self._componentValues = self._componentValues + (idx-l+1)*[None]
-        if value is None:
-            if self._componentValues[idx] is None:
-                self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
-                self._componentValuesSet = self._componentValuesSet + 1
-            return self
-        elif not isinstance(value, base.Asn1Item):
-            t = self._componentType.getTypeByPosition(idx)
-            if isinstance(t, base.AbstractSimpleAsn1Item):
-                value = t.clone(value=value)
-            else:
-                raise error.PyAsn1Error('Instance value required')
-        if verifyConstraints:
-            if self._componentTypeLen:
-                self._verifyComponent(idx, value)
-            self._verifySubtypeSpec(value, idx)            
-        if self._componentValues[idx] is None:
-            self._componentValuesSet = self._componentValuesSet + 1
-        self._componentValues[idx] = value
-        return self
-
-    def getNameByPosition(self, idx):
-        if self._componentTypeLen:
-            return self._componentType.getNameByPosition(idx)
-
-    def getDefaultComponentByPosition(self, idx):
-        if self._componentTypeLen and self._componentType[idx].isDefaulted:
-            return self._componentType[idx].getType()
-
-    def getComponentType(self):
-        if self._componentTypeLen:
-            return self._componentType
-    
-    def setDefaultComponents(self):
-        if self._componentTypeLen == self._componentValuesSet:
-            return
-        idx = self._componentTypeLen
-        while idx:
-            idx = idx - 1
-            if self._componentType[idx].isDefaulted:
-                if self.getComponentByPosition(idx) is None:
-                    self.setComponentByPosition(idx)
-            elif not self._componentType[idx].isOptional:
-                if self.getComponentByPosition(idx) is None:
-                    raise error.PyAsn1Error(
-                        'Uninitialized component #%s at %r' % (idx, self)
-                        )
-
-    def prettyPrint(self, scope=0):
-        scope = scope + 1
-        r = self.__class__.__name__ + ':\n'
-        for idx in range(len(self._componentValues)):
-            if self._componentValues[idx] is not None:
-                r = r + ' '*scope
-                componentType = self.getComponentType()
-                if componentType is None:
-                    r = r + '<no-name>'
-                else:
-                    r = r + componentType.getNameByPosition(idx)
-                r = '%s=%s\n' % (
-                    r, self._componentValues[idx].prettyPrint(scope)
-                    )
-        return r
-
-    def prettyPrintType(self, scope=0):
-        scope = scope + 1
-        r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
-        for idx in range(len(self.componentType)):
-            r = r + ' '*scope
-            r = r + '"%s"' % self.componentType.getNameByPosition(idx)
-            r = '%s = %s\n' % (
-                r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope)
-            )
-        return r + '\n' + ' '*(scope-1) + '}'
-
-class Sequence(SequenceAndSetBase):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
-        )
-    typeId = 3
-
-    def getComponentTagMapNearPosition(self, idx):
-        if self._componentType:
-            return self._componentType.getTagMapNearPosition(idx)
-    
-    def getComponentPositionNearType(self, tagSet, idx):
-        if self._componentType:
-            return self._componentType.getPositionNearType(tagSet, idx)
-        else:
-            return idx
-    
-class Set(SequenceAndSetBase):
-    tagSet = baseTagSet = tag.initTagSet(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
-        )
-    typeId = 4
-
-    def getComponent(self, innerFlag=0): return self
-    
-    def getComponentByType(self, tagSet, innerFlag=0):
-        c = self.getComponentByPosition(
-            self._componentType.getPositionByType(tagSet)
-            )
-        if innerFlag and isinstance(c, Set):
-            # get inner component by inner tagSet
-            return c.getComponent(1)
-        else:
-            # get outer component by inner tagSet
-            return c
-        
-    def setComponentByType(self, tagSet, value=None, innerFlag=0,
-                           verifyConstraints=True):
-        idx = self._componentType.getPositionByType(tagSet)
-        t = self._componentType.getTypeByPosition(idx)
-        if innerFlag:  # set inner component by inner tagSet
-            if t.getTagSet():
-                return self.setComponentByPosition(
-                    idx, value, verifyConstraints
-                )
-            else:
-                t = self.setComponentByPosition(idx).getComponentByPosition(idx)
-                return t.setComponentByType(
-                    tagSet, value, innerFlag, verifyConstraints
-                )
-        else:  # set outer component by inner tagSet
-            return self.setComponentByPosition(
-                idx, value, verifyConstraints
-            )
-            
-    def getComponentTagMap(self):
-        if self._componentType:
-            return self._componentType.getTagMap(True)
-
-    def getComponentPositionByType(self, tagSet):
-        if self._componentType:
-            return self._componentType.getPositionByType(tagSet)
-
-class Choice(Set):
-    tagSet = baseTagSet = tag.TagSet()  # untagged
-    sizeSpec = constraint.ConstraintsIntersection(
-        constraint.ValueSizeConstraint(1, 1)
-        )
-    typeId = 5
-    _currentIdx = None
-
-    def __eq__(self, other):
-        if self._componentValues:
-            return self._componentValues[self._currentIdx] == other
-        return NotImplemented
-    def __ne__(self, other):
-        if self._componentValues:
-            return self._componentValues[self._currentIdx] != other
-        return NotImplemented
-    def __lt__(self, other):
-        if self._componentValues:
-            return self._componentValues[self._currentIdx] < other
-        return NotImplemented
-    def __le__(self, other):
-        if self._componentValues:
-            return self._componentValues[self._currentIdx] <= other
-        return NotImplemented
-    def __gt__(self, other):
-        if self._componentValues:
-            return self._componentValues[self._currentIdx] > other
-        return NotImplemented
-    def __ge__(self, other):
-        if self._componentValues:
-            return self._componentValues[self._currentIdx] >= other
-        return NotImplemented
-    if sys.version_info[0] <= 2:
-        def __nonzero__(self): return bool(self._componentValues)
-    else:
-        def __bool__(self): return bool(self._componentValues)
-
-    def __len__(self): return self._currentIdx is not None and 1 or 0
-    
-    def verifySizeSpec(self):
-        if self._currentIdx is None:
-            raise error.PyAsn1Error('Component not chosen')
-        else:
-            self._sizeSpec(' ')
-
-    def _cloneComponentValues(self, myClone, cloneValueFlag):
-        try:
-            c = self.getComponent()
-        except error.PyAsn1Error:
-            pass
-        else:
-            if isinstance(c, Choice):
-                tagSet = c.getEffectiveTagSet()
-            else:
-                tagSet = c.getTagSet()
-            if isinstance(c, base.AbstractConstructedAsn1Item):
-                myClone.setComponentByType(
-                    tagSet, c.clone(cloneValueFlag=cloneValueFlag)
-                    )
-            else:
-                myClone.setComponentByType(tagSet, c.clone())
-
-    def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
-        l = len(self._componentValues)
-        if idx >= l:
-            self._componentValues = self._componentValues + (idx-l+1)*[None]
-        if self._currentIdx is not None:
-            self._componentValues[self._currentIdx] = None
-        if value is None:
-            if self._componentValues[idx] is None:
-                self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone()
-                self._componentValuesSet = 1
-                self._currentIdx = idx
-            return self
-        elif not isinstance(value, base.Asn1Item):
-            value = self._componentType.getTypeByPosition(idx).clone(
-                value=value
-                )
-        if verifyConstraints:
-            if self._componentTypeLen:
-                self._verifyComponent(idx, value)
-            self._verifySubtypeSpec(value, idx)            
-        self._componentValues[idx] = value
-        self._currentIdx = idx
-        self._componentValuesSet = 1
-        return self
-
-    def getMinTagSet(self):
-        if self._tagSet:
-            return self._tagSet
-        else:
-            return self._componentType.genMinTagSet()
-
-    def getEffectiveTagSet(self):
-        if self._tagSet:
-            return self._tagSet
-        else:
-            c = self.getComponent()
-            if isinstance(c, Choice):
-                return c.getEffectiveTagSet()
-            else:
-                return c.getTagSet()
-
-    def getTagMap(self):
-        if self._tagSet:
-            return Set.getTagMap(self)
-        else:
-            return Set.getComponentTagMap(self)
-
-    def getComponent(self, innerFlag=0):
-        if self._currentIdx is None:
-            raise error.PyAsn1Error('Component not chosen')
-        else:
-            c = self._componentValues[self._currentIdx]
-            if innerFlag and isinstance(c, Choice):
-                return c.getComponent(innerFlag)
-            else:
-                return c
-
-    def getName(self, innerFlag=0):
-        if self._currentIdx is None:
-            raise error.PyAsn1Error('Component not chosen')
-        else:
-            if innerFlag:
-                c = self._componentValues[self._currentIdx]
-                if isinstance(c, Choice):
-                    return c.getName(innerFlag)
-            return self._componentType.getNameByPosition(self._currentIdx)
-
-    def setDefaultComponents(self): pass
-
-class Any(OctetString):
-    tagSet = baseTagSet = tag.TagSet()  # untagged
-    typeId = 6
-
-    def getTagMap(self):
-        return tagmap.TagMap(
-            { self.getTagSet(): self },
-            { eoo.endOfOctets.getTagSet(): eoo.endOfOctets },
-            self
-            )
-
-# XXX
-# coercion rules?
diff --git a/tools/swarming_client/third_party/pyasn1/pyasn1/type/useful.py b/tools/swarming_client/third_party/pyasn1/pyasn1/type/useful.py
deleted file mode 100644
index 1766534..0000000
--- a/tools/swarming_client/third_party/pyasn1/pyasn1/type/useful.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# ASN.1 "useful" types
-from pyasn1.type import char, tag
-
-class ObjectDescriptor(char.GraphicString):
-    tagSet = char.GraphicString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
-        )
-
-class GeneralizedTime(char.VisibleString):
-    tagSet = char.VisibleString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
-        )
-
-class UTCTime(char.VisibleString):
-    tagSet = char.VisibleString.tagSet.tagImplicitly(
-        tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23)
-        )
diff --git a/tools/swarming_client/third_party/requests/LICENSE b/tools/swarming_client/third_party/requests/LICENSE
deleted file mode 100644
index a103fc9..0000000
--- a/tools/swarming_client/third_party/requests/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright 2015 Kenneth Reitz
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/tools/swarming_client/third_party/requests/MODIFICATIONS.diff b/tools/swarming_client/third_party/requests/MODIFICATIONS.diff
deleted file mode 100644
index 6fd0a00..0000000
--- a/tools/swarming_client/third_party/requests/MODIFICATIONS.diff
+++ /dev/null
@@ -1,56 +0,0 @@
---- a/compat.py
-+++ b/compat.py
-@@ -4,7 +4,7 @@
- pythoncompat
- """
- 
--from .packages import chardet
-+chardet = None
- 
- import sys
- 
---- a/models.py
-+++ b/models.py
-@@ -639,8 +639,8 @@ class Response(object):
- 
-     @property
-     def apparent_encoding(self):
--        """The apparent encoding, provided by the chardet library"""
--        return chardet.detect(self.content)['encoding']
-+        # TODO(maruel): charade is really large, so it is trimmed off.
-+        return 'utf-8'
- 
-     def iter_content(self, chunk_size=1, decode_unicode=False):
-         """Iterates over the response data.  When stream=True is set on the
---- a/packages/urllib3/__init__.py
-+++ b/packages/urllib3/__init__.py
-@@ -76,14 +76,14 @@ del NullHandler
- 
- 
- # SecurityWarning's always go off by default.
--warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
-+warnings.simplefilter('ignore', exceptions.SecurityWarning, append=True)
- # SubjectAltNameWarning's should go off once per host
- warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
- # InsecurePlatformWarning's don't vary between requests, so we keep it default.
--warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
-+warnings.simplefilter('ignore', exceptions.InsecurePlatformWarning,
-                       append=True)
- # SNIMissingWarnings should go off only once.
--warnings.simplefilter('default', exceptions.SNIMissingWarning)
-+warnings.simplefilter('ignore', exceptions.SNIMissingWarning)
- 
- 
- def disable_warnings(category=exceptions.HTTPWarning):
---- a/packages/__init__.py
-+++ b/packages/__init__.py
-@@ -28,9 +28,3 @@ try:
- except ImportError:
-     import urllib3
-     sys.modules['%s.urllib3' % __name__] = urllib3
--
--try:
--    from . import chardet
--except ImportError:
--    import chardet
--    sys.modules['%s.chardet' % __name__] = chardet
diff --git a/tools/swarming_client/third_party/requests/NOTICE b/tools/swarming_client/third_party/requests/NOTICE
deleted file mode 100644
index f583e47..0000000
--- a/tools/swarming_client/third_party/requests/NOTICE
+++ /dev/null
@@ -1,54 +0,0 @@
-Requests includes some vendorized python libraries to ease installation.
-
-Urllib3 License
-===============
-
-This is the MIT license: http://www.opensource.org/licenses/mit-license.php
-
-Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt),
-Modifications copyright 2012 Kenneth Reitz.
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-Chardet License
-===============
-
-This library is free software; you can redistribute it and/or
-modify it under the terms of the GNU Lesser General Public
-License as published by the Free Software Foundation; either
-version 2.1 of the License, or (at your option) any later version.
-
-This library is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-Lesser General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public
-License along with this library; if not, write to the Free Software
-Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-02110-1301  USA
-
-
-CA Bundle License
-=================
-
-This Source Code Form is subject to the terms of the Mozilla Public
-License, v. 2.0. If a copy of the MPL was not distributed with this
-file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/tools/swarming_client/third_party/requests/README.rst b/tools/swarming_client/third_party/requests/README.rst
deleted file mode 100644
index 99d30e7..0000000
--- a/tools/swarming_client/third_party/requests/README.rst
+++ /dev/null
@@ -1,86 +0,0 @@
-Requests: HTTP for Humans
-=========================
-
-.. image:: https://img.shields.io/pypi/v/requests.svg
-    :target: https://pypi.python.org/pypi/requests
-
-.. image:: https://img.shields.io/pypi/dm/requests.svg
-        :target: https://pypi.python.org/pypi/requests
-
-
-
-
-Requests is an Apache2 Licensed HTTP library, written in Python, for human
-beings.
-
-Most existing Python modules for sending HTTP requests are extremely
-verbose and cumbersome. Python's builtin urllib2 module provides most of
-the HTTP capabilities you should need, but the api is thoroughly broken.
-It requires an enormous amount of work (even method overrides) to
-perform the simplest of tasks.
-
-Things shouldn't be this way. Not in Python.
-
-.. code-block:: python
-
-    >>> r = requests.get('https://api.github.com', auth=('user', 'pass'))
-    >>> r.status_code
-    204
-    >>> r.headers['content-type']
-    'application/json'
-    >>> r.text
-    ...
-
-See `the same code, without Requests <https://gist.github.com/973705>`_.
-
-Requests allow you to send HTTP/1.1 requests. You can add headers, form data,
-multipart files, and parameters with simple Python dictionaries, and access the
-response data in the same way. It's powered by httplib and `urllib3
-<https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
-hacks for you.
-
-
-Features
---------
-
-- International Domains and URLs
-- Keep-Alive & Connection Pooling
-- Sessions with Cookie Persistence
-- Browser-style SSL Verification
-- Basic/Digest Authentication
-- Elegant Key/Value Cookies
-- Automatic Decompression
-- Unicode Response Bodies
-- Multipart File Uploads
-- Connection Timeouts
-- Thread-safety
-- HTTP(S) proxy support
-
-
-Installation
-------------
-
-To install Requests, simply:
-
-.. code-block:: bash
-
-    $ pip install requests
-
-
-Documentation
--------------
-
-Documentation is available at http://docs.python-requests.org/.
-
-
-Contribute
-----------
-
-#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
-#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
-#. Write a test which shows that the bug was fixed or that the feature works as expected.
-#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
-
-.. _`the repository`: http://github.com/kennethreitz/requests
-.. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
-.. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
diff --git a/tools/swarming_client/third_party/requests/README.swarming b/tools/swarming_client/third_party/requests/README.swarming
deleted file mode 100644
index 7b95592..0000000
--- a/tools/swarming_client/third_party/requests/README.swarming
+++ /dev/null
@@ -1,15 +0,0 @@
-Name: requests
-Short Name: requests
-URL: https://github.com/kennethreitz/requests/archive/v2.9.1.tar.gz
-Version: 2.9.1
-Revision: 1108058626450b863d154bb74d669754b480caa4
-License: Apache License, Version 2.0
-
-Description:
-Python HTTP for Humans.
-
-Local Modifications:
-- Only kept requests/.
-- Removed requests/packages/chardet/.
-- Kept LICENSE, NOTICE and README.rst.
-- Applied MODIFICATIONS.diff
diff --git a/tools/swarming_client/third_party/requests/__init__.py b/tools/swarming_client/third_party/requests/__init__.py
deleted file mode 100644
index bd5b5b9..0000000
--- a/tools/swarming_client/third_party/requests/__init__.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#   __
-#  /__)  _  _     _   _ _/   _
-# / (   (- (/ (/ (- _)  /  _)
-#          /
-
-"""
-Requests HTTP library
-~~~~~~~~~~~~~~~~~~~~~
-
-Requests is an HTTP library, written in Python, for human beings. Basic GET
-usage:
-
-   >>> import requests
-   >>> r = requests.get('https://www.python.org')
-   >>> r.status_code
-   200
-   >>> 'Python is a programming language' in r.content
-   True
-
-... or POST:
-
-   >>> payload = dict(key1='value1', key2='value2')
-   >>> r = requests.post('http://httpbin.org/post', data=payload)
-   >>> print(r.text)
-   {
-     ...
-     "form": {
-       "key2": "value2",
-       "key1": "value1"
-     },
-     ...
-   }
-
-The other HTTP methods are supported - see `requests.api`. Full documentation
-is at <http://python-requests.org>.
-
-:copyright: (c) 2015 by Kenneth Reitz.
-:license: Apache 2.0, see LICENSE for more details.
-
-"""
-
-__title__ = 'requests'
-__version__ = '2.9.1'
-__build__ = 0x020901
-__author__ = 'Kenneth Reitz'
-__license__ = 'Apache 2.0'
-__copyright__ = 'Copyright 2015 Kenneth Reitz'
-
-# Attempt to enable urllib3's SNI support, if possible
-try:
-    from .packages.urllib3.contrib import pyopenssl
-    pyopenssl.inject_into_urllib3()
-except ImportError:
-    pass
-
-from . import utils
-from .models import Request, Response, PreparedRequest
-from .api import request, get, head, post, patch, put, delete, options
-from .sessions import session, Session
-from .status_codes import codes
-from .exceptions import (
-    RequestException, Timeout, URLRequired,
-    TooManyRedirects, HTTPError, ConnectionError,
-    FileModeWarning,
-)
-
-# Set default logging handler to avoid "No handler found" warnings.
-import logging
-try:  # Python 2.7+
-    from logging import NullHandler
-except ImportError:
-    class NullHandler(logging.Handler):
-        def emit(self, record):
-            pass
-
-logging.getLogger(__name__).addHandler(NullHandler())
-
-import warnings
-
-# FileModeWarnings go off per the default.
-warnings.simplefilter('default', FileModeWarning, append=True)
diff --git a/tools/swarming_client/third_party/requests/adapters.py b/tools/swarming_client/third_party/requests/adapters.py
deleted file mode 100644
index 6266d5b..0000000
--- a/tools/swarming_client/third_party/requests/adapters.py
+++ /dev/null
@@ -1,453 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.adapters
-~~~~~~~~~~~~~~~~~
-
-This module contains the transport adapters that Requests uses to define
-and maintain connections.
-"""
-
-import os.path
-import socket
-
-from .models import Response
-from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
-from .packages.urllib3.response import HTTPResponse
-from .packages.urllib3.util import Timeout as TimeoutSauce
-from .packages.urllib3.util.retry import Retry
-from .compat import urlparse, basestring
-from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
-                    prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
-                    select_proxy)
-from .structures import CaseInsensitiveDict
-from .packages.urllib3.exceptions import ClosedPoolError
-from .packages.urllib3.exceptions import ConnectTimeoutError
-from .packages.urllib3.exceptions import HTTPError as _HTTPError
-from .packages.urllib3.exceptions import MaxRetryError
-from .packages.urllib3.exceptions import NewConnectionError
-from .packages.urllib3.exceptions import ProxyError as _ProxyError
-from .packages.urllib3.exceptions import ProtocolError
-from .packages.urllib3.exceptions import ReadTimeoutError
-from .packages.urllib3.exceptions import SSLError as _SSLError
-from .packages.urllib3.exceptions import ResponseError
-from .cookies import extract_cookies_to_jar
-from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
-                         ProxyError, RetryError)
-from .auth import _basic_auth_str
-
-DEFAULT_POOLBLOCK = False
-DEFAULT_POOLSIZE = 10
-DEFAULT_RETRIES = 0
-DEFAULT_POOL_TIMEOUT = None
-
-
-class BaseAdapter(object):
-    """The Base Transport Adapter"""
-
-    def __init__(self):
-        super(BaseAdapter, self).__init__()
-
-    def send(self):
-        raise NotImplementedError
-
-    def close(self):
-        raise NotImplementedError
-
-
-class HTTPAdapter(BaseAdapter):
-    """The built-in HTTP Adapter for urllib3.
-
-    Provides a general-case interface for Requests sessions to contact HTTP and
-    HTTPS urls by implementing the Transport Adapter interface. This class will
-    usually be created by the :class:`Session <Session>` class under the
-    covers.
-
-    :param pool_connections: The number of urllib3 connection pools to cache.
-    :param pool_maxsize: The maximum number of connections to save in the pool.
-    :param int max_retries: The maximum number of retries each connection
-        should attempt. Note, this applies only to failed DNS lookups, socket
-        connections and connection timeouts, never to requests where data has
-        made it to the server. By default, Requests does not retry failed
-        connections. If you need granular control over the conditions under
-        which we retry a request, import urllib3's ``Retry`` class and pass
-        that instead.
-    :param pool_block: Whether the connection pool should block for connections.
-
-    Usage::
-
-      >>> import requests
-      >>> s = requests.Session()
-      >>> a = requests.adapters.HTTPAdapter(max_retries=3)
-      >>> s.mount('http://', a)
-    """
-    __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
-                 '_pool_block']
-
-    def __init__(self, pool_connections=DEFAULT_POOLSIZE,
-                 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
-                 pool_block=DEFAULT_POOLBLOCK):
-        if max_retries == DEFAULT_RETRIES:
-            self.max_retries = Retry(0, read=False)
-        else:
-            self.max_retries = Retry.from_int(max_retries)
-        self.config = {}
-        self.proxy_manager = {}
-
-        super(HTTPAdapter, self).__init__()
-
-        self._pool_connections = pool_connections
-        self._pool_maxsize = pool_maxsize
-        self._pool_block = pool_block
-
-        self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
-
-    def __getstate__(self):
-        return dict((attr, getattr(self, attr, None)) for attr in
-                    self.__attrs__)
-
-    def __setstate__(self, state):
-        # Can't handle by adding 'proxy_manager' to self.__attrs__ because
-        # self.poolmanager uses a lambda function, which isn't pickleable.
-        self.proxy_manager = {}
-        self.config = {}
-
-        for attr, value in state.items():
-            setattr(self, attr, value)
-
-        self.init_poolmanager(self._pool_connections, self._pool_maxsize,
-                              block=self._pool_block)
-
-    def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
-        """Initializes a urllib3 PoolManager.
-
-        This method should not be called from user code, and is only
-        exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param connections: The number of urllib3 connection pools to cache.
-        :param maxsize: The maximum number of connections to save in the pool.
-        :param block: Block when no free connections are available.
-        :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
-        """
-        # save these values for pickling
-        self._pool_connections = connections
-        self._pool_maxsize = maxsize
-        self._pool_block = block
-
-        self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
-                                       block=block, strict=True, **pool_kwargs)
-
-    def proxy_manager_for(self, proxy, **proxy_kwargs):
-        """Return urllib3 ProxyManager for the given proxy.
-
-        This method should not be called from user code, and is only
-        exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param proxy: The proxy to return a urllib3 ProxyManager for.
-        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
-        :returns: ProxyManager
-        """
-        if not proxy in self.proxy_manager:
-            proxy_headers = self.proxy_headers(proxy)
-            self.proxy_manager[proxy] = proxy_from_url(
-                proxy,
-                proxy_headers=proxy_headers,
-                num_pools=self._pool_connections,
-                maxsize=self._pool_maxsize,
-                block=self._pool_block,
-                **proxy_kwargs)
-
-        return self.proxy_manager[proxy]
-
-    def cert_verify(self, conn, url, verify, cert):
-        """Verify a SSL certificate. This method should not be called from user
-        code, and is only exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param conn: The urllib3 connection object associated with the cert.
-        :param url: The requested URL.
-        :param verify: Whether we should actually verify the certificate.
-        :param cert: The SSL certificate to verify.
-        """
-        if url.lower().startswith('https') and verify:
-
-            cert_loc = None
-
-            # Allow self-specified cert location.
-            if verify is not True:
-                cert_loc = verify
-
-            if not cert_loc:
-                cert_loc = DEFAULT_CA_BUNDLE_PATH
-
-            if not cert_loc:
-                raise Exception("Could not find a suitable SSL CA certificate bundle.")
-
-            conn.cert_reqs = 'CERT_REQUIRED'
-
-            if not os.path.isdir(cert_loc):
-                conn.ca_certs = cert_loc
-            else:
-                conn.ca_cert_dir = cert_loc
-        else:
-            conn.cert_reqs = 'CERT_NONE'
-            conn.ca_certs = None
-            conn.ca_cert_dir = None
-
-        if cert:
-            if not isinstance(cert, basestring):
-                conn.cert_file = cert[0]
-                conn.key_file = cert[1]
-            else:
-                conn.cert_file = cert
-
-    def build_response(self, req, resp):
-        """Builds a :class:`Response <requests.Response>` object from a urllib3
-        response. This should not be called from user code, and is only exposed
-        for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
-
-        :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
-        :param resp: The urllib3 response object.
-        """
-        response = Response()
-
-        # Fallback to None if there's no status_code, for whatever reason.
-        response.status_code = getattr(resp, 'status', None)
-
-        # Make headers case-insensitive.
-        response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
-
-        # Set encoding.
-        response.encoding = get_encoding_from_headers(response.headers)
-        response.raw = resp
-        response.reason = response.raw.reason
-
-        if isinstance(req.url, bytes):
-            response.url = req.url.decode('utf-8')
-        else:
-            response.url = req.url
-
-        # Add new cookies from the server.
-        extract_cookies_to_jar(response.cookies, req, resp)
-
-        # Give the Response some context.
-        response.request = req
-        response.connection = self
-
-        return response
-
-    def get_connection(self, url, proxies=None):
-        """Returns a urllib3 connection for the given URL. This should not be
-        called from user code, and is only exposed for use when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param url: The URL to connect to.
-        :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
-        """
-        proxy = select_proxy(url, proxies)
-
-        if proxy:
-            proxy = prepend_scheme_if_needed(proxy, 'http')
-            proxy_manager = self.proxy_manager_for(proxy)
-            conn = proxy_manager.connection_from_url(url)
-        else:
-            # Only scheme should be lower case
-            parsed = urlparse(url)
-            url = parsed.geturl()
-            conn = self.poolmanager.connection_from_url(url)
-
-        return conn
-
-    def close(self):
-        """Disposes of any internal state.
-
-        Currently, this just closes the PoolManager, which closes pooled
-        connections.
-        """
-        self.poolmanager.clear()
-
-    def request_url(self, request, proxies):
-        """Obtain the url to use when making the final request.
-
-        If the message is being sent through a HTTP proxy, the full URL has to
-        be used. Otherwise, we should only use the path portion of the URL.
-
-        This should not be called from user code, and is only exposed for use
-        when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
-        :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
-        """
-        proxy = select_proxy(request.url, proxies)
-        scheme = urlparse(request.url).scheme
-        if proxy and scheme != 'https':
-            url = urldefragauth(request.url)
-        else:
-            url = request.path_url
-
-        return url
-
-    def add_headers(self, request, **kwargs):
-        """Add any headers needed by the connection. As of v2.0 this does
-        nothing by default, but is left for overriding by users that subclass
-        the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        This should not be called from user code, and is only exposed for use
-        when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
-        :param kwargs: The keyword arguments from the call to send().
-        """
-        pass
-
-    def proxy_headers(self, proxy):
-        """Returns a dictionary of the headers to add to any request sent
-        through a proxy. This works with urllib3 magic to ensure that they are
-        correctly sent to the proxy, rather than in a tunnelled request if
-        CONNECT is being used.
-
-        This should not be called from user code, and is only exposed for use
-        when subclassing the
-        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
-
-        :param proxies: The url of the proxy being used for this request.
-        """
-        headers = {}
-        username, password = get_auth_from_url(proxy)
-
-        if username and password:
-            headers['Proxy-Authorization'] = _basic_auth_str(username,
-                                                             password)
-
-        return headers
-
-    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
-        """Sends PreparedRequest object. Returns Response object.
-
-        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
-        :param stream: (optional) Whether to stream the request content.
-        :param timeout: (optional) How long to wait for the server to send
-            data before giving up, as a float, or a :ref:`(connect timeout,
-            read timeout) <timeouts>` tuple.
-        :type timeout: float or tuple
-        :param verify: (optional) Whether to verify SSL certificates.
-        :param cert: (optional) Any user-provided SSL certificate to be trusted.
-        :param proxies: (optional) The proxies dictionary to apply to the request.
-        """
-
-        conn = self.get_connection(request.url, proxies)
-
-        self.cert_verify(conn, request.url, verify, cert)
-        url = self.request_url(request, proxies)
-        self.add_headers(request)
-
-        chunked = not (request.body is None or 'Content-Length' in request.headers)
-
-        if isinstance(timeout, tuple):
-            try:
-                connect, read = timeout
-                timeout = TimeoutSauce(connect=connect, read=read)
-            except ValueError as e:
-                # this may raise a string formatting error.
-                err = ("Invalid timeout {0}. Pass a (connect, read) "
-                       "timeout tuple, or a single float to set "
-                       "both timeouts to the same value".format(timeout))
-                raise ValueError(err)
-        else:
-            timeout = TimeoutSauce(connect=timeout, read=timeout)
-
-        try:
-            if not chunked:
-                resp = conn.urlopen(
-                    method=request.method,
-                    url=url,
-                    body=request.body,
-                    headers=request.headers,
-                    redirect=False,
-                    assert_same_host=False,
-                    preload_content=False,
-                    decode_content=False,
-                    retries=self.max_retries,
-                    timeout=timeout
-                )
-
-            # Send the request.
-            else:
-                if hasattr(conn, 'proxy_pool'):
-                    conn = conn.proxy_pool
-
-                low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
-
-                try:
-                    low_conn.putrequest(request.method,
-                                        url,
-                                        skip_accept_encoding=True)
-
-                    for header, value in request.headers.items():
-                        low_conn.putheader(header, value)
-
-                    low_conn.endheaders()
-
-                    for i in request.body:
-                        low_conn.send(hex(len(i))[2:].encode('utf-8'))
-                        low_conn.send(b'\r\n')
-                        low_conn.send(i)
-                        low_conn.send(b'\r\n')
-                    low_conn.send(b'0\r\n\r\n')
-
-                    # Receive the response from the server
-                    try:
-                        # For Python 2.7+ versions, use buffering of HTTP
-                        # responses
-                        r = low_conn.getresponse(buffering=True)
-                    except TypeError:
-                        # For compatibility with Python 2.6 versions and back
-                        r = low_conn.getresponse()
-
-                    resp = HTTPResponse.from_httplib(
-                        r,
-                        pool=conn,
-                        connection=low_conn,
-                        preload_content=False,
-                        decode_content=False
-                    )
-                except:
-                    # If we hit any problems here, clean up the connection.
-                    # Then, reraise so that we can handle the actual exception.
-                    low_conn.close()
-                    raise
-
-        except (ProtocolError, socket.error) as err:
-            raise ConnectionError(err, request=request)
-
-        except MaxRetryError as e:
-            if isinstance(e.reason, ConnectTimeoutError):
-                # TODO: Remove this in 3.0.0: see #2811
-                if not isinstance(e.reason, NewConnectionError):
-                    raise ConnectTimeout(e, request=request)
-
-            if isinstance(e.reason, ResponseError):
-                raise RetryError(e, request=request)
-
-            raise ConnectionError(e, request=request)
-
-        except ClosedPoolError as e:
-            raise ConnectionError(e, request=request)
-
-        except _ProxyError as e:
-            raise ProxyError(e)
-
-        except (_SSLError, _HTTPError) as e:
-            if isinstance(e, _SSLError):
-                raise SSLError(e, request=request)
-            elif isinstance(e, ReadTimeoutError):
-                raise ReadTimeout(e, request=request)
-            else:
-                raise
-
-        return self.build_response(request, resp)
diff --git a/tools/swarming_client/third_party/requests/api.py b/tools/swarming_client/third_party/requests/api.py
deleted file mode 100644
index b21a1a4..0000000
--- a/tools/swarming_client/third_party/requests/api.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.api
-~~~~~~~~~~~~
-
-This module implements the Requests API.
-
-:copyright: (c) 2012 by Kenneth Reitz.
-:license: Apache2, see LICENSE for more details.
-
-"""
-
-from . import sessions
-
-
-def request(method, url, **kwargs):
-    """Constructs and sends a :class:`Request <Request>`.
-
-    :param method: method for the new :class:`Request` object.
-    :param url: URL for the new :class:`Request` object.
-    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param json: (optional) json data to send in the body of the :class:`Request`.
-    :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
-    :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
-    :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
-    :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
-    :param timeout: (optional) How long to wait for the server to send data
-        before giving up, as a float, or a :ref:`(connect timeout, read
-        timeout) <timeouts>` tuple.
-    :type timeout: float or tuple
-    :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
-    :type allow_redirects: bool
-    :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
-    :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
-    :param stream: (optional) if ``False``, the response content will be immediately downloaded.
-    :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-
-    Usage::
-
-      >>> import requests
-      >>> req = requests.request('GET', 'http://httpbin.org/get')
-      <Response [200]>
-    """
-
-    # By using the 'with' statement we are sure the session is closed, thus we
-    # avoid leaving sockets open which can trigger a ResourceWarning in some
-    # cases, and look like a memory leak in others.
-    with sessions.Session() as session:
-        return session.request(method=method, url=url, **kwargs)
-
-
-def get(url, params=None, **kwargs):
-    """Sends a GET request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    kwargs.setdefault('allow_redirects', True)
-    return request('get', url, params=params, **kwargs)
-
-
-def options(url, **kwargs):
-    """Sends a OPTIONS request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    kwargs.setdefault('allow_redirects', True)
-    return request('options', url, **kwargs)
-
-
-def head(url, **kwargs):
-    """Sends a HEAD request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    kwargs.setdefault('allow_redirects', False)
-    return request('head', url, **kwargs)
-
-
-def post(url, data=None, json=None, **kwargs):
-    """Sends a POST request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param json: (optional) json data to send in the body of the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('post', url, data=data, json=json, **kwargs)
-
-
-def put(url, data=None, **kwargs):
-    """Sends a PUT request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('put', url, data=data, **kwargs)
-
-
-def patch(url, data=None, **kwargs):
-    """Sends a PATCH request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('patch', url,  data=data, **kwargs)
-
-
-def delete(url, **kwargs):
-    """Sends a DELETE request.
-
-    :param url: URL for the new :class:`Request` object.
-    :param \*\*kwargs: Optional arguments that ``request`` takes.
-    :return: :class:`Response <Response>` object
-    :rtype: requests.Response
-    """
-
-    return request('delete', url, **kwargs)
diff --git a/tools/swarming_client/third_party/requests/auth.py b/tools/swarming_client/third_party/requests/auth.py
deleted file mode 100644
index 2af55fb..0000000
--- a/tools/swarming_client/third_party/requests/auth.py
+++ /dev/null
@@ -1,223 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.auth
-~~~~~~~~~~~~~
-
-This module contains the authentication handlers for Requests.
-"""
-
-import os
-import re
-import time
-import hashlib
-import threading
-
-from base64 import b64encode
-
-from .compat import urlparse, str
-from .cookies import extract_cookies_to_jar
-from .utils import parse_dict_header, to_native_string
-from .status_codes import codes
-
-CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
-CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
-
-
-def _basic_auth_str(username, password):
-    """Returns a Basic Auth string."""
-
-    authstr = 'Basic ' + to_native_string(
-        b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
-    )
-
-    return authstr
-
-
-class AuthBase(object):
-    """Base class that all auth implementations derive from"""
-
-    def __call__(self, r):
-        raise NotImplementedError('Auth hooks must be callable.')
-
-
-class HTTPBasicAuth(AuthBase):
-    """Attaches HTTP Basic Authentication to the given Request object."""
-    def __init__(self, username, password):
-        self.username = username
-        self.password = password
-
-    def __call__(self, r):
-        r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
-        return r
-
-
-class HTTPProxyAuth(HTTPBasicAuth):
-    """Attaches HTTP Proxy Authentication to a given Request object."""
-    def __call__(self, r):
-        r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
-        return r
-
-
-class HTTPDigestAuth(AuthBase):
-    """Attaches HTTP Digest Authentication to the given Request object."""
-    def __init__(self, username, password):
-        self.username = username
-        self.password = password
-        # Keep state in per-thread local storage
-        self._thread_local = threading.local()
-
-    def init_per_thread_state(self):
-        # Ensure state is initialized just once per-thread
-        if not hasattr(self._thread_local, 'init'):
-            self._thread_local.init = True
-            self._thread_local.last_nonce = ''
-            self._thread_local.nonce_count = 0
-            self._thread_local.chal = {}
-            self._thread_local.pos = None
-            self._thread_local.num_401_calls = None
-
-    def build_digest_header(self, method, url):
-
-        realm = self._thread_local.chal['realm']
-        nonce = self._thread_local.chal['nonce']
-        qop = self._thread_local.chal.get('qop')
-        algorithm = self._thread_local.chal.get('algorithm')
-        opaque = self._thread_local.chal.get('opaque')
-
-        if algorithm is None:
-            _algorithm = 'MD5'
-        else:
-            _algorithm = algorithm.upper()
-        # lambdas assume digest modules are imported at the top level
-        if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
-            def md5_utf8(x):
-                if isinstance(x, str):
-                    x = x.encode('utf-8')
-                return hashlib.md5(x).hexdigest()
-            hash_utf8 = md5_utf8
-        elif _algorithm == 'SHA':
-            def sha_utf8(x):
-                if isinstance(x, str):
-                    x = x.encode('utf-8')
-                return hashlib.sha1(x).hexdigest()
-            hash_utf8 = sha_utf8
-
-        KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
-
-        if hash_utf8 is None:
-            return None
-
-        # XXX not implemented yet
-        entdig = None
-        p_parsed = urlparse(url)
-        #: path is request-uri defined in RFC 2616 which should not be empty
-        path = p_parsed.path or "/"
-        if p_parsed.query:
-            path += '?' + p_parsed.query
-
-        A1 = '%s:%s:%s' % (self.username, realm, self.password)
-        A2 = '%s:%s' % (method, path)
-
-        HA1 = hash_utf8(A1)
-        HA2 = hash_utf8(A2)
-
-        if nonce == self._thread_local.last_nonce:
-            self._thread_local.nonce_count += 1
-        else:
-            self._thread_local.nonce_count = 1
-        ncvalue = '%08x' % self._thread_local.nonce_count
-        s = str(self._thread_local.nonce_count).encode('utf-8')
-        s += nonce.encode('utf-8')
-        s += time.ctime().encode('utf-8')
-        s += os.urandom(8)
-
-        cnonce = (hashlib.sha1(s).hexdigest()[:16])
-        if _algorithm == 'MD5-SESS':
-            HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
-
-        if not qop:
-            respdig = KD(HA1, "%s:%s" % (nonce, HA2))
-        elif qop == 'auth' or 'auth' in qop.split(','):
-            noncebit = "%s:%s:%s:%s:%s" % (
-                nonce, ncvalue, cnonce, 'auth', HA2
-                )
-            respdig = KD(HA1, noncebit)
-        else:
-            # XXX handle auth-int.
-            return None
-
-        self._thread_local.last_nonce = nonce
-
-        # XXX should the partial digests be encoded too?
-        base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
-               'response="%s"' % (self.username, realm, nonce, path, respdig)
-        if opaque:
-            base += ', opaque="%s"' % opaque
-        if algorithm:
-            base += ', algorithm="%s"' % algorithm
-        if entdig:
-            base += ', digest="%s"' % entdig
-        if qop:
-            base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
-
-        return 'Digest %s' % (base)
-
-    def handle_redirect(self, r, **kwargs):
-        """Reset num_401_calls counter on redirects."""
-        if r.is_redirect:
-            self._thread_local.num_401_calls = 1
-
-    def handle_401(self, r, **kwargs):
-        """Takes the given response and tries digest-auth, if needed."""
-
-        if self._thread_local.pos is not None:
-            # Rewind the file position indicator of the body to where
-            # it was to resend the request.
-            r.request.body.seek(self._thread_local.pos)
-        s_auth = r.headers.get('www-authenticate', '')
-
-        if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
-
-            self._thread_local.num_401_calls += 1
-            pat = re.compile(r'digest ', flags=re.IGNORECASE)
-            self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
-
-            # Consume content and release the original connection
-            # to allow our new request to reuse the same one.
-            r.content
-            r.close()
-            prep = r.request.copy()
-            extract_cookies_to_jar(prep._cookies, r.request, r.raw)
-            prep.prepare_cookies(prep._cookies)
-
-            prep.headers['Authorization'] = self.build_digest_header(
-                prep.method, prep.url)
-            _r = r.connection.send(prep, **kwargs)
-            _r.history.append(r)
-            _r.request = prep
-
-            return _r
-
-        self._thread_local.num_401_calls = 1
-        return r
-
-    def __call__(self, r):
-        # Initialize per-thread state, if needed
-        self.init_per_thread_state()
-        # If we have a saved nonce, skip the 401
-        if self._thread_local.last_nonce:
-            r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
-        try:
-            self._thread_local.pos = r.body.tell()
-        except AttributeError:
-            # In the case of HTTPDigestAuth being reused and the body of
-            # the previous request was a file-like object, pos has the
-            # file position of the previous body. Ensure it's set to
-            # None.
-            self._thread_local.pos = None
-        r.register_hook('response', self.handle_401)
-        r.register_hook('response', self.handle_redirect)
-        self._thread_local.num_401_calls = 1
-
-        return r
diff --git a/tools/swarming_client/third_party/requests/cacert.pem b/tools/swarming_client/third_party/requests/cacert.pem
deleted file mode 100644
index 6a66daa..0000000
--- a/tools/swarming_client/third_party/requests/cacert.pem
+++ /dev/null
@@ -1,5616 +0,0 @@
-
-# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
-# Subject: O=Equifax OU=Equifax Secure Certificate Authority
-# Label: "Equifax Secure CA"
-# Serial: 903804111
-# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
-# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
-# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
-dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
-MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
-dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
-BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
-cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
-MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
-aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
-ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
-IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
-7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
-1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Label: "GlobalSign Root CA"
-# Serial: 4835703278459707669005204
-# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
-# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
-# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
-MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
-aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
-jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
-xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
-1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
-snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
-U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
-9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
-AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
-yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
-38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
-AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
-DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
-HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Label: "GlobalSign Root CA - R2"
-# Serial: 4835703278459682885658125
-# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
-# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
-# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
-MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
-v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
-eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
-tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
-C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
-zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
-mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
-V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
-bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
-3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
-J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
-291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
-ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
-AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
-TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
-# Serial: 206684696279472310254277870180966723415
-# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
-# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
-# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
-N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
-KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
-kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
-CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
-Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
-imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
-2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
-DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
-/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
-F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
-TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
-# Serial: 314531972711909413743075096039378935511
-# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
-# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
-# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
-GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
-+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
-U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
-NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
-ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
-ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
-CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
-g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
-fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
-2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
-bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Premium 2048 Secure Server CA"
-# Serial: 946069240
-# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
-# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
-# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
-RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
-bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
-IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
-MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
-LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
-YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
-A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
-K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
-sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
-MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
-XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
-HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
-4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
-HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
-j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
-U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
-zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
-u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
-bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
-fF6adulZkMV8gzURZVE=
------END CERTIFICATE-----
-
-# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Label: "Baltimore CyberTrust Root"
-# Serial: 33554617
-# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
-# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
-# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
-RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
-VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
-DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
-ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
-VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
-mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
-IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
-mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
-XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
-dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
-jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
-BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
-DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
-9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
-jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
-Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
-ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
-R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Low-Value Services Root"
-# Serial: 1
-# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
-# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
-# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
------BEGIN CERTIFICATE-----
-MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
-MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
-QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
-VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
-A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
-CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
-tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
-dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
-PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
-+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
-BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
-MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
-ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
-IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
-7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
-43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
-eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
-pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
-WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Label: "AddTrust External Root"
-# Serial: 1
-# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
-# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
-# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
------BEGIN CERTIFICATE-----
-MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
-IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
-MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
-FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
-bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
-dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
-H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
-uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
-mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
-a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
-E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
-WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
-VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
-Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
-cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
-IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
-AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
-YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
-6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
-Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
-c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
-mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Public Services Root"
-# Serial: 1
-# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
-# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
-# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
-MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
-ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
-BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
-6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
-GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
-dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
-1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
-62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
-BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
-AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
-MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
-cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
-b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
-IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
-iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
-GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
-4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
-XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Qualified Certificates Root"
-# Serial: 1
-# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
-# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
-# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
------BEGIN CERTIFICATE-----
-MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
-MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
-EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
-BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
-xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
-87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
-2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
-WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
-0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
-A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
-pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
-ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
-aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
-hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
-hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
-dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
-P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
-iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
-xqE=
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Label: "Entrust Root Certification Authority"
-# Serial: 1164660820
-# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
-# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
-# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
------BEGIN CERTIFICATE-----
-MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
-Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
-KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
-cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
-NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
-NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
-ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
-BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
-KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
-Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
-4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
-KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
-rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
-94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
-sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
-gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
-kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
-vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
-A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
-O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
-AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
-9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
-eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
-0vdXcDazv/wor3ElhVsT/h5/WrQ8
------END CERTIFICATE-----
-
-# Issuer: O=RSA Security Inc OU=RSA Security 2048 V3
-# Subject: O=RSA Security Inc OU=RSA Security 2048 V3
-# Label: "RSA Security 2048 v3"
-# Serial: 13297492616345471454730593562152402946
-# MD5 Fingerprint: 77:0d:19:b1:21:fd:00:42:9c:3e:0c:a5:dd:0b:02:8e
-# SHA1 Fingerprint: 25:01:90:19:cf:fb:d9:99:1c:b7:68:25:74:8d:94:5f:30:93:95:42
-# SHA256 Fingerprint: af:8b:67:62:a1:e5:28:22:81:61:a9:5d:5c:55:9e:e2:66:27:8f:75:d7:9e:83:01:89:a5:03:50:6a:bd:6b:4c
------BEGIN CERTIFICATE-----
-MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6
-MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp
-dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX
-BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy
-MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp
-eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg
-/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl
-wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh
-AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2
-PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu
-AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR
-MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc
-HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/
-Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+
-f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO
-rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch
-6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3
-7CAFYd4=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Label: "GeoTrust Global CA"
-# Serial: 144470
-# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
-# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
-# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
------BEGIN CERTIFICATE-----
-MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
-MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
-YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
-R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
-9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
-fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
-iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
-1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
-bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
-MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
-ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
-uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
-Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
-tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
-PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
-hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
-5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Global CA 2"
-# Serial: 1
-# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
-# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
-# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
------BEGIN CERTIFICATE-----
-MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
-IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
-R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
-PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
-Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
-TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
-5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
-S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
-2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
-FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
-EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
-EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
-/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
-A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
-abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
-I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
-4iIprn2DQKi6bA==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA"
-# Serial: 1
-# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
-# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
-# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
------BEGIN CERTIFICATE-----
-MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
-BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
-IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
-VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
-cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
-QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
-F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
-c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
-mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
-VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
-teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
-f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
-Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
-nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
-/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
-MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
-9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
-aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
-IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
-ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
-uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
-Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
-QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
-koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
-ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
-DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
-bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA 2"
-# Serial: 1
-# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
-# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
-# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
------BEGIN CERTIFICATE-----
-MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
-VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
-c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
-AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
-WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
-FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
-XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
-se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
-KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
-IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
-y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
-hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
-QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
-Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
-HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
-KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
-dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
-L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
-Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
-ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
-T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
-GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
-1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
-OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
-6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
-QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
------END CERTIFICATE-----
-
-# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
-# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
-# Label: "Visa eCommerce Root"
-# Serial: 25952180776285836048024890241505565794
-# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
-# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
-# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
------BEGIN CERTIFICATE-----
-MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
-MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
-cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
-bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
-CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
-dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
-cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
-2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
-lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
-ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
-299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
-vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
-dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
-AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
-AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
-zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
-LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
-7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
-++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
-398znM/jra6O1I7mT1GvFpLgXPYHDw==
------END CERTIFICATE-----
-
-# Issuer: CN=Certum CA O=Unizeto Sp. z o.o.
-# Subject: CN=Certum CA O=Unizeto Sp. z o.o.
-# Label: "Certum Root CA"
-# Serial: 65568
-# MD5 Fingerprint: 2c:8f:9f:66:1d:18:90:b1:47:26:9d:8e:86:82:8c:a9
-# SHA1 Fingerprint: 62:52:dc:40:f7:11:43:a2:2f:de:9e:f7:34:8e:06:42:51:b1:81:18
-# SHA256 Fingerprint: d8:e0:fe:bc:1d:b2:e3:8d:00:94:0f:37:d2:7d:41:34:4d:99:3e:73:4b:99:d5:65:6d:97:78:d4:d8:14:36:24
------BEGIN CERTIFICATE-----
-MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM
-MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
-QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM
-MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
-QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E
-jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo
-ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI
-ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu
-Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg
-AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7
-HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA
-uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa
-TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg
-xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q
-CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x
-O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs
-6GAqm4VKQPNriiTsBhYscw==
------END CERTIFICATE-----
-
-# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
-# Subject: CN=AAA Certificate Services O=Comodo CA Limited
-# Label: "Comodo AAA Services root"
-# Serial: 1
-# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
-# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
-# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
------BEGIN CERTIFICATE-----
-MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
-YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
-GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
-BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
-3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
-YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
-rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
-ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
-oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
-MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
-QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
-b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
-AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
-GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
-Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
-G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
-l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
-smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
------END CERTIFICATE-----
-
-# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
-# Subject: CN=Secure Certificate Services O=Comodo CA Limited
-# Label: "Comodo Secure Services root"
-# Serial: 1
-# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
-# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
-# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
------BEGIN CERTIFICATE-----
-MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
-ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
-fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
-BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
-cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
-HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
-CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
-3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
-6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
-HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
-EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
-Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
-Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
-DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
-5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
-Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
-gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
-aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
-izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
------END CERTIFICATE-----
-
-# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
-# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
-# Label: "Comodo Trusted Services root"
-# Serial: 1
-# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
-# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
-# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
-aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
-MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
-BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
-VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
-fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
-TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
-fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
-1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
-kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
-A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
-ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
-dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
-Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
-HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
-pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
-jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
-xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
-dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
-# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
-# Label: "QuoVadis Root CA"
-# Serial: 985026699
-# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
-# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
-# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
------BEGIN CERTIFICATE-----
-MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
-TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
-MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
-IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
-dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
-li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
-rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
-WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
-F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
-xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
-Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
-dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
-ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
-IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
-c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
-ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
-Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
-KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
-KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
-y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
-dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
-VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
-MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
-fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
-7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
-cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
-mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
-xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
-SnQ2+Q==
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 2"
-# Serial: 1289
-# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
-# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
-# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
------BEGIN CERTIFICATE-----
-MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
-GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
-b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
-BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
-YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
-GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
-Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
-WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
-rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
-+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
-ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
-Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
-PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
-/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
-oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
-yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
-EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
-A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
-MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
-ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
-BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
-g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
-fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
-WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
-B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
-hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
-TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
-mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
-ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
-4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
-8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 3"
-# Serial: 1478
-# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
-# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
-# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
------BEGIN CERTIFICATE-----
-MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
-GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
-b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
-BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
-YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
-V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
-4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
-H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
-8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
-vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
-mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
-btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
-T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
-WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
-c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
-4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
-VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
-CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
-aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
-aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
-dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
-czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
-A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
-TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
-Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
-7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
-d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
-+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
-4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
-t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
-DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
-k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
-zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
-Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
-mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
-4SVhM7JZG+Ju1zdXtg2pEto=
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
-# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
-# Label: "Security Communication Root CA"
-# Serial: 0
-# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
-# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
-# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
------BEGIN CERTIFICATE-----
-MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
-MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
-dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
-WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
-VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
-DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
-9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
-DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
-Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
-QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
-xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
-A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
-kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
-Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
-Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
-JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
-RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
------END CERTIFICATE-----
-
-# Issuer: CN=Sonera Class2 CA O=Sonera
-# Subject: CN=Sonera Class2 CA O=Sonera
-# Label: "Sonera Class 2 Root CA"
-# Serial: 29
-# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
-# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
-# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
------BEGIN CERTIFICATE-----
-MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
-MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
-MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
-BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
-Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
-5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
-3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
-vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
-8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
-DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
-MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
-zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
-3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
-FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
-Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
-ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA"
-# Serial: 10000010
-# MD5 Fingerprint: 60:84:7c:5a:ce:db:0c:d4:cb:a7:e9:fe:02:c6:a9:c0
-# SHA1 Fingerprint: 10:1d:fa:3f:d5:0b:cb:bb:9b:b5:60:0c:19:55:a4:1a:f4:73:3a:04
-# SHA256 Fingerprint: d4:1d:82:9e:8c:16:59:82:2a:f9:3f:ce:62:bf:fc:de:26:4f:c8:4e:8b:95:0c:5f:f2:75:d0:52:35:46:95:a3
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO
-TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy
-MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk
-ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn
-ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71
-9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO
-hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U
-tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o
-BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh
-SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww
-OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv
-cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA
-7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k
-/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm
-eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6
-u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy
-7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR
-iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw==
------END CERTIFICATE-----
-
-# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN DATACorp SGC Root CA"
-# Serial: 91374294542884689855167577680241077609
-# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
-# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
-# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
------BEGIN CERTIFICATE-----
-MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
-kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
-IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
-EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
-VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
-dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
-BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
-E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
-D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
-4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
-lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
-bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
-o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
-MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
-LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
-BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
-AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
-Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
-j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
-KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
-2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
-mfnGV/TJVTl4uix5yaaIK/QI
------END CERTIFICATE-----
-
-# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN USERFirst Hardware Root CA"
-# Serial: 91374294542884704022267039221184531197
-# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
-# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
-# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
------BEGIN CERTIFICATE-----
-MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
-SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
-A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
-MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
-d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
-cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
-0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
-M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
-MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
-oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
-DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
-oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
-VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
-dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
-bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
-BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
-//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
-CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
-CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
-3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
-KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
------END CERTIFICATE-----
-
-# Issuer: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Subject: CN=Chambers of Commerce Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Label: "Camerfirma Chambers of Commerce Root"
-# Serial: 0
-# MD5 Fingerprint: b0:01:ee:14:d9:af:29:18:94:76:8e:f1:69:33:2a:84
-# SHA1 Fingerprint: 6e:3a:55:a4:19:0c:19:5c:93:84:3c:c0:db:72:2e:31:30:61:f0:b1
-# SHA256 Fingerprint: 0c:25:8a:12:a5:67:4a:ef:25:f2:8b:a7:dc:fa:ec:ee:a3:48:e5:41:e6:f5:cc:4e:e6:3b:71:b3:61:60:6a:c3
------BEGIN CERTIFICATE-----
-MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn
-MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
-ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg
-b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa
-MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB
-ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw
-IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B
-AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb
-unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d
-BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq
-7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3
-0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX
-roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG
-A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j
-aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p
-26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA
-BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud
-EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN
-BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz
-aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB
-AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd
-p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi
-1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc
-XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0
-eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu
-tGWaIZDgqtCYvDi1czyL+Nw=
------END CERTIFICATE-----
-
-# Issuer: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Subject: CN=Global Chambersign Root O=AC Camerfirma SA CIF A82743287 OU=http://www.chambersign.org
-# Label: "Camerfirma Global Chambersign Root"
-# Serial: 0
-# MD5 Fingerprint: c5:e6:7b:bf:06:d0:4f:43:ed:c4:7a:65:8a:fb:6b:19
-# SHA1 Fingerprint: 33:9b:6b:14:50:24:9b:55:7a:01:87:72:84:d9:e0:2f:c3:d2:d8:e9
-# SHA256 Fingerprint: ef:3c:b4:17:fc:8e:bf:6f:97:87:6c:9e:4e:ce:39:de:1e:a5:fe:64:91:41:d1:02:8b:7d:11:c0:b2:29:8c:ed
------BEGIN CERTIFICATE-----
-MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn
-MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
-ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo
-YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9
-MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy
-NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G
-A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA
-A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0
-Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s
-QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV
-eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795
-B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh
-z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T
-AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i
-ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w
-TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH
-MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD
-VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE
-VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh
-bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B
-AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM
-bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi
-ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG
-VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c
-ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/
-AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Kozjegyzoi (Class A) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Notary (Class A) Root"
-# Serial: 259
-# MD5 Fingerprint: 86:38:6d:5e:49:63:6c:85:5c:db:6d:dc:94:b7:d0:f7
-# SHA1 Fingerprint: ac:ed:5f:65:53:fd:25:ce:01:5f:1f:7a:48:3b:6a:74:9f:61:78:c6
-# SHA256 Fingerprint: 7f:12:cd:5f:7e:5e:29:0e:c7:d8:51:79:d5:b7:2c:20:a5:be:75:08:ff:db:5b:f8:1a:b9:68:4a:7f:c9:f6:67
------BEGIN CERTIFICATE-----
-MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV
-MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe
-TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0
-dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB
-KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0
-N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC
-dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu
-MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL
-b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD
-zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi
-3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8
-WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY
-Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi
-NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC
-ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4
-QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0
-YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz
-aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu
-IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm
-ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg
-ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs
-amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv
-IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3
-Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6
-ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1
-YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg
-dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs
-b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G
-CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO
-xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP
-0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ
-QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk
-f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK
-8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI
------END CERTIFICATE-----
-
-# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Label: "XRamp Global CA Root"
-# Serial: 107108908803651509692980124233745014957
-# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
-# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
-# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
------BEGIN CERTIFICATE-----
-MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
-gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
-MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
-UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
-NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
-dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
-dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
-dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
-38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
-KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
-DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
-qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
-JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
-PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
-BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
-jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
-eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
-ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
-vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
-qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
-IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
-i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
-O+7ETPTsJ3xCwnR8gooJybQDJbw=
------END CERTIFICATE-----
-
-# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Label: "Go Daddy Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
-# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
-# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
------BEGIN CERTIFICATE-----
-MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
-MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
-YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
-MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
-ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
-MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
-ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
-PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
-wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
-EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
-avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
-YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
-sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
-/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
-IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
-OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
-TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
-HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
-dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
-ReYNnyicsbkqWletNw+vHX/bvZ8=
------END CERTIFICATE-----
-
-# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Label: "Starfield Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
-# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
-# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
-MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
-U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
-NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
-ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
-ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
-DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
-8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
-+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
-X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
-K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
-1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
-A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
-zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
-YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
-bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
-DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
-L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
-eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
-xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
-VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
-WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
-# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
-# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
------BEGIN CERTIFICATE-----
-MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
-FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
-ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
-LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
-BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
-Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
-dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
-cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
-YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
-dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
-bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
-YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
-TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
-9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
-jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
-FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
-ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
-ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
-EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
-L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
-yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
-O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
-um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
-NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
------END CERTIFICATE-----
-
-# Issuer: O=Government Root Certification Authority
-# Subject: O=Government Root Certification Authority
-# Label: "Taiwan GRCA"
-# Serial: 42023070807708724159991140556527066870
-# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
-# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
-# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
------BEGIN CERTIFICATE-----
-MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
-MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
-PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
-Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
-AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
-IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
-gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
-yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
-F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
-jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
-ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
-VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
-YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
-EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
-Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
-DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
-MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
-UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
-TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
-qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
-ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
-JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
-hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
-EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
-nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
-udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
-ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
-LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
-pYYsfPQS
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root CA 1 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root CA 1"
-# Serial: 122348795730808398873664200247279986742
-# MD5 Fingerprint: f8:38:7c:77:88:df:2c:16:68:2e:c2:e2:52:4b:b8:f9
-# SHA1 Fingerprint: 5f:3a:fc:0a:8b:64:f6:86:67:34:74:df:7e:a9:a2:fe:f9:fa:7a:51
-# SHA256 Fingerprint: 21:db:20:12:36:60:bb:2e:d4:18:20:5d:a1:1e:e7:a8:5a:65:e2:bc:6e:55:b5:af:7e:78:99:c8:a2:66:d9:2e
------BEGIN CERTIFICATE-----
-MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk
-MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
-YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
-Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT
-AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
-Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN
-BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9
-m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih
-FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/
-TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F
-EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco
-kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu
-HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF
-vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo
-19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC
-L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW
-bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX
-JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
-FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j
-BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc
-K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf
-ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik
-Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB
-sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e
-3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR
-ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip
-mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH
-b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf
-rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms
-hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y
-zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6
-MBr1mmz0DlP5OlvRHA==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root CA"
-# Serial: 17154717934120587862167794914071425081
-# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
-# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
-# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
------BEGIN CERTIFICATE-----
-MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
-JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
-mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
-wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
-VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
-AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
-AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
-BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
-pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
-dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
-fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
-NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
-H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
-+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root CA"
-# Serial: 10944719598952040374951832963794454346
-# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
-# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
-# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
------BEGIN CERTIFICATE-----
-MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
-QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
-CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
-nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
-43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
-T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
-gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
-BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
-TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
-DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
-hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
-06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
-PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
-YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
-CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert High Assurance EV Root CA"
-# Serial: 3553400076410547919724730734378100087
-# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
-# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
-# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
-ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
-LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
-RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
-+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
-PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
-xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
-Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
-hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
-EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
-FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
-nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
-eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
-hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
-Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
-vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
-+OkuE6N36B9K
------END CERTIFICATE-----
-
-# Issuer: CN=Class 2 Primary CA O=Certplus
-# Subject: CN=Class 2 Primary CA O=Certplus
-# Label: "Certplus Class 2 Primary CA"
-# Serial: 177770208045934040241468760488327595043
-# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
-# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
-# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
------BEGIN CERTIFICATE-----
-MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
-PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
-cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
-MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
-IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
-ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
-VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
-kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
-EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
-H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
-HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
-DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
-QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
-Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
-AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
-yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
-FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
-ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
-kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
-l7+ijrRU
------END CERTIFICATE-----
-
-# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
-# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
-# Label: "DST Root CA X3"
-# Serial: 91299735575339953335919266965803778155
-# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
-# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
-# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
------BEGIN CERTIFICATE-----
-MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
-MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
-DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
-PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
-Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
-rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
-OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
-xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
-7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
-aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
-HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
-SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
-ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
-AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
-R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
-JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
-Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
------END CERTIFICATE-----
-
-# Issuer: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
-# Subject: CN=DST ACES CA X6 O=Digital Signature Trust OU=DST ACES
-# Label: "DST ACES CA X6"
-# Serial: 17771143917277623872238992636097467865
-# MD5 Fingerprint: 21:d8:4c:82:2b:99:09:33:a2:eb:14:24:8d:8e:5f:e8
-# SHA1 Fingerprint: 40:54:da:6f:1c:3f:40:74:ac:ed:0f:ec:cd:db:79:d1:53:fb:90:1d
-# SHA256 Fingerprint: 76:7c:95:5a:76:41:2c:89:af:68:8e:90:a1:c7:0f:55:6c:fd:6b:60:25:db:ea:10:41:6d:7e:b6:83:1f:8c:40
------BEGIN CERTIFICATE-----
-MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx
-ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w
-MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD
-VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx
-FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu
-ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7
-gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH
-fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a
-ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT
-ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF
-MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk
-c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto
-dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt
-aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI
-hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk
-QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/
-h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq
-nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR
-rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2
-9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Kasım 2005
-# Label: "TURKTRUST Certificate Services Provider Root 2"
-# Serial: 1
-# MD5 Fingerprint: 37:a5:6e:d4:b1:25:84:97:b7:fd:56:15:7a:f9:a2:00
-# SHA1 Fingerprint: b4:35:d4:e1:11:9d:1c:66:90:a7:49:eb:b3:94:bd:63:7b:a7:82:b7
-# SHA256 Fingerprint: c4:70:cf:54:7e:23:02:b9:77:fb:29:dd:71:a8:9a:7b:6c:1f:60:77:7b:03:29:f5:60:17:f3:28:bf:4f:6b:e6
------BEGIN CERTIFICATE-----
-MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc
-UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS
-S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
-SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3
-WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv
-bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU
-UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw
-bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe
-LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef
-J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh
-R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ
-Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX
-JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p
-zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S
-Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ
-KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq
-ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4
-Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz
-gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH
-uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS
-y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI=
------END CERTIFICATE-----
-
-# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
-# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
-# Label: "SwissSign Gold CA - G2"
-# Serial: 13492815561806991280
-# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
-# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
-# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
------BEGIN CERTIFICATE-----
-MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
-BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
-biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
-MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
-d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
-CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
-76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
-bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
-6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
-emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
-MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
-MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
-MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
-FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
-aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
-gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
-qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
-lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
-8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
-L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
-45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
-UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
-O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
-bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
-GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
-77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
-hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
-92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
-Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
-ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
-Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
------END CERTIFICATE-----
-
-# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Label: "SwissSign Silver CA - G2"
-# Serial: 5700383053117599563
-# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
-# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
-# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
------BEGIN CERTIFICATE-----
-MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
-BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
-IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
-RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
-U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
-MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
-Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
-YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
-nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
-6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
-eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
-c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
-MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
-HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
-jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
-5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
-rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
-F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
-wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
-cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
-AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
-WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
-xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
-2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
-IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
-aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
-em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
-dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
-OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
-hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
-tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Label: "GeoTrust Primary Certification Authority"
-# Serial: 32798226551256963324313806436981982369
-# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
-# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
-# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
------BEGIN CERTIFICATE-----
-MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
-MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
-R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
-MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
-Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
-AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
-ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
-7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
-kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
-mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
-KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
-6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
-4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
-oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
-UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
-AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA"
-# Serial: 69529181992039203566298953787712940909
-# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
-# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
-# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
-qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
-BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
-NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
-LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
-A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
-IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
-W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
-3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
-6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
-Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
-NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
-MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
-r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
-DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
-YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
-xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
-/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
-LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
-jVaMaA==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
-# Serial: 33037644167568058970164719475676101450
-# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
-# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
-# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
------BEGIN CERTIFICATE-----
-MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
-yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
-ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
-nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
-t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
-SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
-BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
-rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
-NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
-BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
-BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
-aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
-MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
-p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
-5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
-WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
-4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
-hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
------END CERTIFICATE-----
-
-# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
-# Subject: CN=SecureTrust CA O=SecureTrust Corporation
-# Label: "SecureTrust CA"
-# Serial: 17199774589125277788362757014266862032
-# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
-# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
-# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
------BEGIN CERTIFICATE-----
-MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
-FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
-MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
-cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
-Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
-0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
-wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
-7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
-8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
-BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
-/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
-JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
-NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
-6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
-3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
-D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
-CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
-3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
------END CERTIFICATE-----
-
-# Issuer: CN=Secure Global CA O=SecureTrust Corporation
-# Subject: CN=Secure Global CA O=SecureTrust Corporation
-# Label: "Secure Global CA"
-# Serial: 9751836167731051554232119481456978597
-# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
-# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
-# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
------BEGIN CERTIFICATE-----
-MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
-GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
-MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
-Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
-iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
-/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
-jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
-HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
-sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
-gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
-KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
-AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
-URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
-H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
-I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
-iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
-f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
-# Label: "COMODO Certification Authority"
-# Serial: 104350513648249232941998508985834464573
-# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
-# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
-# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
------BEGIN CERTIFICATE-----
-MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
-gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
-BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
-MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
-YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
-RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
-UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
-2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
-Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
-+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
-DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
-nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
-/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
-PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
-QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
-SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
-IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
-RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
-zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
-BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
-ZQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Label: "Network Solutions Certificate Authority"
-# Serial: 116697915152937497490437556386812487904
-# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
-# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
-# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
------BEGIN CERTIFICATE-----
-MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
-MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
-MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
-dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
-UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
-ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
-c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
-OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
-mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
-BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
-qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
-gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
-bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
-dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
-6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
-h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
-/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
-wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
-pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
------END CERTIFICATE-----
-
-# Issuer: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
-# Subject: CN=WellsSecure Public Root Certificate Authority O=Wells Fargo WellsSecure OU=Wells Fargo Bank NA
-# Label: "WellsSecure Public Root Certificate Authority"
-# Serial: 1
-# MD5 Fingerprint: 15:ac:a5:c2:92:2d:79:bc:e8:7f:cb:67:ed:02:cf:36
-# SHA1 Fingerprint: e7:b4:f6:9d:61:ec:90:69:db:7e:90:a7:40:1a:3c:f4:7d:4f:e8:ee
-# SHA256 Fingerprint: a7:12:72:ae:aa:a3:cf:e8:72:7f:7f:b3:9f:0f:b3:d1:e5:42:6e:90:60:b0:6e:e6:f1:3e:9a:3c:58:33:cd:43
------BEGIN CERTIFICATE-----
-MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx
-IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs
-cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v
-dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0
-MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl
-bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD
-DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw
-ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r
-WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU
-Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs
-HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj
-z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf
-SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl
-AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG
-KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P
-AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j
-BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC
-VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX
-ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg
-Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB
-ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd
-/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB
-A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn
-k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9
-iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv
-2G0xffX8oRAHh84vWdw+WNs=
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Label: "COMODO ECC Certification Authority"
-# Serial: 41578283867086692638256921589707938090
-# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
-# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
-# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
------BEGIN CERTIFICATE-----
-MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
-IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
-MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
-ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
-T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
-biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
-FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
-cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
-BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
-fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
-GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
------END CERTIFICATE-----
-
-# Issuer: CN=IGC/A O=PM/SGDN OU=DCSSI
-# Subject: CN=IGC/A O=PM/SGDN OU=DCSSI
-# Label: "IGC/A"
-# Serial: 245102874772
-# MD5 Fingerprint: 0c:7f:dd:6a:f4:2a:b9:c8:9b:bd:20:7e:a9:db:5c:37
-# SHA1 Fingerprint: 60:d6:89:74:b5:c2:65:9e:8a:0f:c1:88:7c:88:d2:46:69:1b:18:2c
-# SHA256 Fingerprint: b9:be:a7:86:0a:96:2e:a3:61:1d:ab:97:ab:6d:a3:e2:1c:10:68:b9:7d:55:57:5e:d0:e1:12:79:c1:1c:89:32
------BEGIN CERTIFICATE-----
-MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT
-AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ
-TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG
-9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw
-MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM
-BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO
-MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2
-LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI
-s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2
-xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4
-u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b
-F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx
-Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd
-PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV
-HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx
-NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF
-AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ
-L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY
-YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg
-Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a
-NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R
-0982gaEbeC9xs/FZTEYYKKuF0mBWWg==
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
-# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication EV RootCA1
-# Label: "Security Communication EV RootCA1"
-# Serial: 0
-# MD5 Fingerprint: 22:2d:a6:01:ea:7c:0a:f7:f0:6c:56:43:3f:77:76:d3
-# SHA1 Fingerprint: fe:b8:c4:32:dc:f9:76:9a:ce:ae:3d:d8:90:8f:fd:28:86:65:64:7d
-# SHA256 Fingerprint: a2:2d:ba:68:1e:97:37:6e:2d:39:7d:72:8a:ae:3a:9b:62:96:b9:fd:ba:60:bc:2e:11:f6:47:f2:c6:75:fb:37
------BEGIN CERTIFICATE-----
-MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl
-MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh
-U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz
-MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N
-IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11
-bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE
-RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO
-zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5
-bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF
-MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1
-VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC
-OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW
-tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ
-q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb
-EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+
-Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O
-VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490
------END CERTIFICATE-----
-
-# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
-# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
-# Label: "OISTE WISeKey Global Root GA CA"
-# Serial: 86718877871133159090080555911823548314
-# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
-# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
-# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
------BEGIN CERTIFICATE-----
-MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
-ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
-aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
-ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
-NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
-A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
-VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
-SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
-VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
-w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
-mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
-4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
-4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
-DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
-EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
-SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
-ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
-vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
-hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
-Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
-/L7fCg0=
------END CERTIFICATE-----
-
-# Issuer: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
-# Subject: CN=Microsec e-Szigno Root CA O=Microsec Ltd. OU=e-Szigno CA
-# Label: "Microsec e-Szigno Root CA"
-# Serial: 272122594155480254301341951808045322001
-# MD5 Fingerprint: f0:96:b6:2f:c5:10:d5:67:8e:83:25:32:e8:5e:2e:e5
-# SHA1 Fingerprint: 23:88:c9:d3:71:cc:9e:96:3d:ff:7d:3c:a7:ce:fc:d6:25:ec:19:0d
-# SHA256 Fingerprint: 32:7a:3d:76:1a:ba:de:a0:34:eb:99:84:06:27:5c:b1:a4:77:6e:fd:ae:2f:df:6d:01:68:ea:1c:4f:55:67:d0
------BEGIN CERTIFICATE-----
-MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw
-cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy
-b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z
-ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4
-NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN
-TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p
-Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
-ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u
-uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+
-LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA
-vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770
-Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx
-62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB
-AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw
-LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP
-BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB
-AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov
-MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5
-ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn
-AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT
-AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh
-ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo
-AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa
-AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln
-bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p
-Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP
-PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv
-Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB
-EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu
-w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj
-cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV
-HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI
-VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS
-BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS
-b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS
-8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds
-ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl
-7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a
-86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR
-hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/
-MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU=
------END CERTIFICATE-----
-
-# Issuer: CN=Certigna O=Dhimyotis
-# Subject: CN=Certigna O=Dhimyotis
-# Label: "Certigna"
-# Serial: 18364802974209362175
-# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
-# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
-# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
------BEGIN CERTIFICATE-----
-MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
-BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
-DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
-BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
-DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
-QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
-gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
-zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
-130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
-JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
-DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
-ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
-AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
-AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
-9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
-bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
-fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
-HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
-t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
-WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
------END CERTIFICATE-----
-
-# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
-# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
-# Label: "Deutsche Telekom Root CA 2"
-# Serial: 38
-# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
-# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
-# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
------BEGIN CERTIFICATE-----
-MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
-MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
-IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
-IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
-RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
-U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
-IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
-ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
-QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
-rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
-NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
-QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
-txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
-BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
-AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
-tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
-IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
-6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
-xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
-Cm26OWMohpLzGITY+9HPBVZkVw==
------END CERTIFICATE-----
-
-# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Label: "Cybertrust Global Root"
-# Serial: 4835703278459682877484360
-# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
-# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
-# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
------BEGIN CERTIFICATE-----
-MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
-A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
-bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
-ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
-b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
-7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
-J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
-HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
-t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
-FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
-XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
-MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
-hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
-MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
-A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
-Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
-XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
-omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
-A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
-WL1WMRJOEcgh4LMRkWXbtKaIOM5V
------END CERTIFICATE-----
-
-# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
-# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
-# Label: "ePKI Root Certification Authority"
-# Serial: 28956088682735189655030529057352760477
-# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
-# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
-# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
------BEGIN CERTIFICATE-----
-MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
-MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
-ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
-IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
-SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
-AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
-SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
-ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
-DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
-TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
-fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
-sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
-WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
-nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
-dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
-NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
-AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
-MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
-ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
-uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
-PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
-JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
-gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
-j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
-5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
-o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
-/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
-Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
-W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
-hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
-# Subject: CN=TÜBİTAK UEKAE Kök Sertifika Hizmet Sağlayıcısı - Sürüm 3 O=Türkiye Bilimsel ve Teknolojik Araştırma Kurumu - TÜBİTAK OU=Ulusal Elektronik ve Kriptoloji Araştırma Enstitüsü - UEKAE/Kamu Sertifikasyon Merkezi
-# Label: "T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3"
-# Serial: 17
-# MD5 Fingerprint: ed:41:f5:8c:50:c5:2b:9c:73:e6:ee:6c:eb:c2:a8:26
-# SHA1 Fingerprint: 1b:4b:39:61:26:27:6b:64:91:a2:68:6d:d7:02:43:21:2d:1f:1d:96
-# SHA256 Fingerprint: e4:c7:34:30:d7:a5:b5:09:25:df:43:37:0a:0d:21:6e:9a:79:b9:d6:db:83:73:a0:c6:9e:b1:cc:31:c7:c5:2a
------BEGIN CERTIFICATE-----
-MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS
-MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp
-bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw
-VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy
-YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy
-dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2
-ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe
-Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx
-GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls
-aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU
-QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh
-xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0
-aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr
-IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h
-gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK
-O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO
-fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw
-lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL
-hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID
-AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP
-NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t
-wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM
-7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh
-gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n
-oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs
-yZyQ2uypQjyttgI=
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
-# Subject: CN=Buypass Class 2 CA 1 O=Buypass AS-983163327
-# Label: "Buypass Class 2 CA 1"
-# Serial: 1
-# MD5 Fingerprint: b8:08:9a:f0:03:cc:1b:0d:c8:6c:0b:76:a1:75:64:23
-# SHA1 Fingerprint: a0:a1:ab:90:c9:fc:84:7b:3b:12:61:e8:97:7d:5f:d3:22:61:d3:cc
-# SHA256 Fingerprint: 0f:4e:9c:dd:26:4b:02:55:50:d1:70:80:63:40:21:4f:e9:44:34:c9:b0:2f:69:7e:c7:10:fc:5f:ea:fb:5e:38
------BEGIN CERTIFICATE-----
-MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
-Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL
-MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
-VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0
-ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX
-l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB
-HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B
-5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3
-WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD
-AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP
-gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+
-DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu
-BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs
-h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk
-LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
------END CERTIFICATE-----
-
-# Issuer: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
-# Subject: CN=EBG Elektronik Sertifika Hizmet Sağlayıcısı O=EBG Bilişim Teknolojileri ve Hizmetleri A.Ş.
-# Label: "EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1"
-# Serial: 5525761995591021570
-# MD5 Fingerprint: 2c:20:26:9d:cb:1a:4a:00:85:b5:b7:5a:ae:c2:01:37
-# SHA1 Fingerprint: 8c:96:ba:eb:dd:2b:07:07:48:ee:30:32:66:a0:f3:98:6e:7c:ae:58
-# SHA256 Fingerprint: 35:ae:5b:dd:d8:f7:ae:63:5c:ff:ba:56:82:a8:f0:0b:95:f4:84:62:c7:10:8e:e9:a0:e5:29:2b:07:4a:af:b2
------BEGIN CERTIFICATE-----
-MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV
-BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt
-ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4
-MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg
-SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl
-a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h
-4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk
-tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s
-tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL
-dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4
-c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um
-TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z
-+kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O
-Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW
-OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW
-fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2
-l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB
-/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw
-FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+
-8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI
-6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO
-TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME
-wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY
-Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn
-xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q
-DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q
-Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t
-hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4
-7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7
-QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT
------END CERTIFICATE-----
-
-# Issuer: O=certSIGN OU=certSIGN ROOT CA
-# Subject: O=certSIGN OU=certSIGN ROOT CA
-# Label: "certSIGN ROOT CA"
-# Serial: 35210227249154
-# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
-# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
-# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
------BEGIN CERTIFICATE-----
-MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
-AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
-QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
-MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
-ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
-0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
-UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
-RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
-OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
-JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
-AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
-BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
-LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
-MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
-44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
-Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
-i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
-9u6wWk5JRFRYX0KD
------END CERTIFICATE-----
-
-# Issuer: CN=CNNIC ROOT O=CNNIC
-# Subject: CN=CNNIC ROOT O=CNNIC
-# Label: "CNNIC ROOT"
-# Serial: 1228079105
-# MD5 Fingerprint: 21:bc:82:ab:49:c4:13:3b:4b:b2:2b:5c:6b:90:9c:19
-# SHA1 Fingerprint: 8b:af:4c:9b:1d:f0:2a:92:f7:da:12:8e:b9:1b:ac:f4:98:60:4b:6f
-# SHA256 Fingerprint: e2:83:93:77:3d:a8:45:a6:79:f2:08:0c:c7:fb:44:a3:b7:a1:c3:79:2c:b7:eb:77:29:fd:cb:6a:8d:99:ae:a7
------BEGIN CERTIFICATE-----
-MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD
-TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2
-MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF
-Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB
-DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh
-IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6
-dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO
-V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC
-GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN
-v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB
-AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB
-Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO
-76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK
-OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH
-ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi
-yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL
-buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj
-2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE=
------END CERTIFICATE-----
-
-# Issuer: O=Japanese Government OU=ApplicationCA
-# Subject: O=Japanese Government OU=ApplicationCA
-# Label: "ApplicationCA - Japanese Government"
-# Serial: 49
-# MD5 Fingerprint: 7e:23:4e:5b:a7:a5:b4:25:e9:00:07:74:11:62:ae:d6
-# SHA1 Fingerprint: 7f:8a:b0:cf:d0:51:87:6a:66:f3:36:0f:47:c8:8d:8c:d3:35:fc:74
-# SHA256 Fingerprint: 2d:47:43:7d:e1:79:51:21:5a:12:f3:c5:8e:51:c7:29:a5:80:26:ef:1f:cc:0a:5f:b3:d9:dc:01:2f:60:0d:19
------BEGIN CERTIFICATE-----
-MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc
-MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp
-b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT
-AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs
-aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H
-j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K
-f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55
-IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw
-FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht
-QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm
-/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ
-k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ
-MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC
-seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ
-hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+
-eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U
-DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj
-B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL
-rosot4LKGAfmt1t06SAZf7IbiVQ=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G3"
-# Serial: 28809105769928564313984085209975885599
-# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
-# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
-# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
------BEGIN CERTIFICATE-----
-MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
-mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
-MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
-eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
-cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
-BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
-MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
-BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
-+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
-hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
-5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
-JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
-DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
-huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
-HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
-AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
-zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
-kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
-AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
-SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
-spki4cErx5z481+oghLrGREt
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G2"
-# Serial: 71758320672825410020661621085256472406
-# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
-# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
-# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
------BEGIN CERTIFICATE-----
-MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
-IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
-BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
-MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
-d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
-YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
-dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
-BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
-papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
-BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
-DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
-KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
-XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G3"
-# Serial: 127614157056681299805556476275995414779
-# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
-# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
-# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
-rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
-BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
-Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
-LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
-MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
-ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
-gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
-YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
-b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
-9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
-zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
-OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
-HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
-2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
-oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
-t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
-KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
-m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
-MdRAGmI0Nj81Aa6sY6A=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G2"
-# Serial: 80682863203381065782177908751794619243
-# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
-# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
-# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
------BEGIN CERTIFICATE-----
-MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
-MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
-KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
-MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
-eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
-BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
-NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
-BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
-MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
-So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
-tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
-CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
-qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
-rD6ogRLQy7rQkgu2npaqBA+K
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Universal Root Certification Authority"
-# Serial: 85209574734084581917763752644031726877
-# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
-# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
-# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
------BEGIN CERTIFICATE-----
-MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
-vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
-ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
-IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
-IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
-bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
-9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
-H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
-LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
-/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
-rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
-WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
-exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
-DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
-sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
-seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
-4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
-BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
-lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
-7M2CYfE45k+XmCpajQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
-# Serial: 63143484348153506665311985501458640051
-# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
-# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
-# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
------BEGIN CERTIFICATE-----
-MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
-U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
-SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
-biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
-GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
-fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
-AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
-aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
-aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
-kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
-4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
-FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
-# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services)
-# Label: "NetLock Arany (Class Gold) Főtanúsítvány"
-# Serial: 80544274841616
-# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
-# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
-# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
-EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
-MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
-cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
-dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
-pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
-b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
-aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
-IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
-lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
-AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
-VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
-ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
-BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
-AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
-U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
-bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
-+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
-bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
-uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
-XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA - G2"
-# Serial: 10000012
-# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
-# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
-# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
------BEGIN CERTIFICATE-----
-MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
-DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
-ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
-b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
-qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
-uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
-Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
-pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
-5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
-UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
-GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
-5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
-6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
-eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
-B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
-BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
-L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
-HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
-SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
-CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
-5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
-IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
-gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
-+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
-vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
-bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
-N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
-Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
-ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig O=Disig a.s.
-# Subject: CN=CA Disig O=Disig a.s.
-# Label: "CA Disig"
-# Serial: 1
-# MD5 Fingerprint: 3f:45:96:39:e2:50:87:f7:bb:fe:98:0c:3c:20:98:e6
-# SHA1 Fingerprint: 2a:c8:d5:8b:57:ce:bf:2f:49:af:f2:fc:76:8f:51:14:62:90:7a:41
-# SHA256 Fingerprint: 92:bf:51:19:ab:ec:ca:d0:b1:33:2d:c4:e1:d0:5f:ba:75:b5:67:90:44:ee:0c:a2:6e:93:1f:74:4f:2f:33:cf
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET
-MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE
-AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw
-CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg
-YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
-ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE
-Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX
-mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD
-XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW
-S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp
-FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD
-AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu
-ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z
-ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv
-Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw
-DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6
-yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq
-EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/
-CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB
-EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN
-PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag=
------END CERTIFICATE-----
-
-# Issuer: CN=Juur-SK O=AS Sertifitseerimiskeskus
-# Subject: CN=Juur-SK O=AS Sertifitseerimiskeskus
-# Label: "Juur-SK"
-# Serial: 999181308
-# MD5 Fingerprint: aa:8e:5d:d9:f8:db:0a:58:b7:8d:26:87:6c:82:35:55
-# SHA1 Fingerprint: 40:9d:4b:d9:17:b5:5c:27:b6:9b:64:cb:98:22:44:0d:cd:09:b8:89
-# SHA256 Fingerprint: ec:c3:e9:c3:40:75:03:be:e0:91:aa:95:2f:41:34:8f:f8:8b:aa:86:3b:22:64:be:fa:c8:07:90:15:74:e9:39
------BEGIN CERTIFICATE-----
-MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN
-AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp
-dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw
-MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw
-CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ
-MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB
-SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz
-ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH
-LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP
-PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL
-2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w
-ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC
-MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk
-AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0
-AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz
-AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz
-AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f
-BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE
-FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY
-P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi
-CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g
-kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95
-HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS
-na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q
-qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z
-TbvGRNs2yyqcjg==
------END CERTIFICATE-----
-
-# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
-# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
-# Label: "Hongkong Post Root CA 1"
-# Serial: 1000
-# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
-# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
-# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
------BEGIN CERTIFICATE-----
-MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
-FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
-Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
-A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
-b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
-jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
-PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
-ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
-nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
-q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
-MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
-mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
-7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
-oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
-EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
-fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
-AmvZWg==
------END CERTIFICATE-----
-
-# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Label: "SecureSign RootCA11"
-# Serial: 1
-# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
-# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
-# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
------BEGIN CERTIFICATE-----
-MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
-MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
-A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
-MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
-Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
-QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
-i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
-h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
-MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
-UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
-8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
-h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
-VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
-KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
-X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
-QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
-pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
-QSdJQO7e5iNEOdyhIta6A/I=
------END CERTIFICATE-----
-
-# Issuer: CN=ACEDICOM Root O=EDICOM OU=PKI
-# Subject: CN=ACEDICOM Root O=EDICOM OU=PKI
-# Label: "ACEDICOM Root"
-# Serial: 7029493972724711941
-# MD5 Fingerprint: 42:81:a0:e2:1c:e3:55:10:de:55:89:42:65:96:22:e6
-# SHA1 Fingerprint: e0:b4:32:2e:b2:f6:a5:68:b6:54:53:84:48:18:4a:50:36:87:43:84
-# SHA256 Fingerprint: 03:95:0f:b4:9a:53:1f:3e:19:91:94:23:98:df:a9:e0:ea:32:d7:ba:1c:dd:9b:c8:5d:b5:7e:d9:40:0b:43:4a
------BEGIN CERTIFICATE-----
-MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE
-AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x
-CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW
-MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF
-RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
-AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7
-09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7
-XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P
-Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK
-t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb
-X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28
-MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU
-fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI
-2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH
-K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae
-ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP
-BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ
-MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw
-RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv
-bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm
-fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3
-gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe
-I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i
-5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi
-ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn
-MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ
-o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6
-zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN
-GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt
-r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK
-Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==
------END CERTIFICATE-----
-
-# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
-# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
-# Label: "Microsec e-Szigno Root CA 2009"
-# Serial: 14014712776195784473
-# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
-# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
-# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
------BEGIN CERTIFICATE-----
-MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
-VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
-ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
-CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
-OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
-FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
-Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
-dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
-kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
-cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
-fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
-N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
-xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
-+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
-A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
-Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
-SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
-mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
-ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
-tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
-2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
-HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Label: "GlobalSign Root CA - R3"
-# Serial: 4835703278459759426209954
-# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
-# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
-# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
------BEGIN CERTIFICATE-----
-MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
-MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
-RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
-gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
-KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
-QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
-XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
-LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
-RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
-jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
-6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
-mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
-Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
-WD9f
------END CERTIFICATE-----
-
-# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
-# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
-# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
-# Serial: 6047274297262753887
-# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
-# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
-# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
------BEGIN CERTIFICATE-----
-MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
-BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
-cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
-MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
-Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
-thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
-cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
-L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
-NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
-X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
-m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
-Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
-EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
-KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
-6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
-OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
-VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
-VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
-cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
-ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
-AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
-661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
-am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
-ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
-PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
-3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
-SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
-3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
-ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
-StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
-Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
-jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
------END CERTIFICATE-----
-
-# Issuer: CN=Izenpe.com O=IZENPE S.A.
-# Subject: CN=Izenpe.com O=IZENPE S.A.
-# Label: "Izenpe.com"
-# Serial: 917563065490389241595536686991402621
-# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
-# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
-# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
------BEGIN CERTIFICATE-----
-MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
-MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
-ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
-VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
-b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
-scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
-xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
-LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
-uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
-yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
-JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
-rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
-BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
-hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
-QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
-HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
-Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
-QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
-BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
-MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
-AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
-A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
-laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
-awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
-JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
-LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
-VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
-LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
-UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
-QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
-naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
-QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
------END CERTIFICATE-----
-
-# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
-# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
-# Label: "Chambers of Commerce Root - 2008"
-# Serial: 11806822484801597146
-# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
-# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
-# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
------BEGIN CERTIFICATE-----
-MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
-VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
-IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
-MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
-IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
-MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
-dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
-EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
-MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
-28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
-VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
-DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
-5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
-ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
-Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
-UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
-+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
-Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
-ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
-hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
-HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
-+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
-YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
-L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
-ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
-IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
-HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
-DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
-PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
-5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
-glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
-FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
-pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
-xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
-tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
-jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
-fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
-OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
-d0jQ
------END CERTIFICATE-----
-
-# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
-# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
-# Label: "Global Chambersign Root - 2008"
-# Serial: 14541511773111788494
-# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
-# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
-# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
------BEGIN CERTIFICATE-----
-MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
-VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
-IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
-MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
-aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
-MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
-cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
-A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
-BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
-hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
-KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
-G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
-zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
-ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
-HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
-Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
-yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
-beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
-6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
-wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
-zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
-BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
-ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
-ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
-cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
-YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
-CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
-KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
-hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
-UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
-X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
-fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
-a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
-Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
-SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
-AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
-M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
-v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
-09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
------END CERTIFICATE-----
-
-# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Label: "Go Daddy Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
-# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
-# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
-EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
-ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
-NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
-EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
-AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
-DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
-E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
-/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
-DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
-GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
-tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
-AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
-FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
-WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
-9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
-gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
-2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
-LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
-4uJEvlz36hz1
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
-# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
-# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
------BEGIN CERTIFICATE-----
-MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
-ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
-MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
-b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
-aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
-Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
-nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
-HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
-Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
-dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
-HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
-CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
-sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
-4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
-8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
-pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
-mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Services Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
-# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
-# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
------BEGIN CERTIFICATE-----
-MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
-ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
-MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
-VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
-ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
-dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
-OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
-8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
-Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
-hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
-6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
-DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
-AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
-bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
-ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
-qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
-iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
-0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
-sSi6
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
-# Subject: CN=AffirmTrust Commercial O=AffirmTrust
-# Label: "AffirmTrust Commercial"
-# Serial: 8608355977964138876
-# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
-# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
-# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
-Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
-ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
-MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
-yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
-VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
-nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
-XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
-vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
-Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
-N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
-nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Networking O=AffirmTrust
-# Subject: CN=AffirmTrust Networking O=AffirmTrust
-# Label: "AffirmTrust Networking"
-# Serial: 8957382827206547757
-# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
-# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
-# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
-YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
-kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
-QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
-6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
-yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
-QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
-tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
-QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
-Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
-olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
-x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium O=AffirmTrust
-# Subject: CN=AffirmTrust Premium O=AffirmTrust
-# Label: "AffirmTrust Premium"
-# Serial: 7893706540734352110
-# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
-# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
-# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
------BEGIN CERTIFICATE-----
-MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
-dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
-A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
-cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
-qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
-JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
-+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
-s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
-HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
-70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
-V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
-qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
-5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
-C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
-OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
-FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
-KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
-Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
-8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
-MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
-0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
-u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
-u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
-YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
-GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
-RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
-KeC2uAloGRwYQw==
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Label: "AffirmTrust Premium ECC"
-# Serial: 8401224907861490260
-# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
-# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
-# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
------BEGIN CERTIFICATE-----
-MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
-VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
-cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
-BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
-VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
-0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
-ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
-A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
-aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
-flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Label: "Certum Trusted Network CA"
-# Serial: 279744
-# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
-# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
-# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
------BEGIN CERTIFICATE-----
-MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
-MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
-ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
-cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
-WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
-Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
-IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
-UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
-TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
-BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
-kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
-AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
-HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
-sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
-I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
-J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
-VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
-03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
------END CERTIFICATE-----
-
-# Issuer: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
-# Subject: CN=Certinomis - Autorité Racine O=Certinomis OU=0002 433998903
-# Label: "Certinomis - Autorité Racine"
-# Serial: 1
-# MD5 Fingerprint: 7f:30:78:8c:03:e3:ca:c9:0a:e2:c9:ea:1e:aa:55:1a
-# SHA1 Fingerprint: 2e:14:da:ec:28:f0:fa:1e:8e:38:9a:4e:ab:eb:26:c0:0a:d3:83:c3
-# SHA256 Fingerprint: fc:bf:e2:88:62:06:f7:2b:27:59:3c:8b:07:02:97:e1:2d:76:9e:d1:0e:d7:93:07:05:a8:09:8e:ff:c1:4d:17
------BEGIN CERTIFICATE-----
-MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET
-MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk
-BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4
-Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl
-cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0
-aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP
-ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY
-F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N
-8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe
-rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K
-/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu
-7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC
-28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6
-lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E
-nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB
-0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09
-5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj
-WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN
-jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ
-KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s
-ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM
-OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q
-619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn
-2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj
-o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v
-nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG
-5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq
-pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb
-dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0
-BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5
------END CERTIFICATE-----
-
-# Issuer: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
-# Subject: CN=Root CA Generalitat Valenciana O=Generalitat Valenciana OU=PKIGVA
-# Label: "Root CA Generalitat Valenciana"
-# Serial: 994436456
-# MD5 Fingerprint: 2c:8c:17:5e:b1:54:ab:93:17:b5:36:5a:db:d1:c6:f2
-# SHA1 Fingerprint: a0:73:e5:c5:bd:43:61:0d:86:4c:21:13:0a:85:58:57:cc:9c:ea:46
-# SHA256 Fingerprint: 8c:4e:df:d0:43:48:f3:22:96:9e:7e:29:a4:cd:4d:ca:00:46:55:06:1c:16:e1:b0:76:42:2e:f3:42:ad:63:0e
------BEGIN CERTIFICATE-----
-MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF
-UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ
-R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN
-MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G
-A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw
-JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+
-WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj
-SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl
-u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy
-A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk
-Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7
-MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr
-aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC
-IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A
-cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA
-YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA
-bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA
-bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA
-aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA
-aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA
-ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA
-YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA
-ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA
-LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6
-Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y
-eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw
-CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G
-A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu
-Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn
-lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt
-b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg
-9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF
-ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC
-IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM=
------END CERTIFICATE-----
-
-# Issuer: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
-# Subject: CN=A-Trust-nQual-03 O=A-Trust Ges. f. Sicherheitssysteme im elektr. Datenverkehr GmbH OU=A-Trust-nQual-03
-# Label: "A-Trust-nQual-03"
-# Serial: 93214
-# MD5 Fingerprint: 49:63:ae:27:f4:d5:95:3d:d8:db:24:86:b8:9c:07:53
-# SHA1 Fingerprint: d3:c0:63:f2:19:ed:07:3e:34:ad:5d:75:0b:32:76:29:ff:d5:9a:f2
-# SHA256 Fingerprint: 79:3c:bf:45:59:b9:fd:e3:8a:b2:2d:f1:68:69:f6:98:81:ae:14:c4:b0:13:9a:c7:88:a7:8a:1a:fc:ca:02:fb
------BEGIN CERTIFICATE-----
-MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB
-VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp
-bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R
-dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw
-MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy
-dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52
-ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM
-EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj
-lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ
-znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH
-2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1
-k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs
-2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD
-VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
-AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG
-KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+
-8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R
-FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS
-mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE
-DNuxUCAKGkq6ahq97BvIxYSazQ==
------END CERTIFICATE-----
-
-# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
-# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
-# Label: "TWCA Root Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
-# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
-# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
------BEGIN CERTIFICATE-----
-MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
-MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
-V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
-WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
-LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
-AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
-K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
-RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
-rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
-3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
-hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
-MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
-XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
-lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
-aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
-YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
-# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
-# Label: "Security Communication RootCA2"
-# Serial: 0
-# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
-# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
-# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
-MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
-U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
-DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
-dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
-YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
-OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
-zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
-VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
-hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
-ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
-awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
-OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
-DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
-coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
-okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
-t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
-1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
-SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
------END CERTIFICATE-----
-
-# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
-# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
-# Label: "Hellenic Academic and Research Institutions RootCA 2011"
-# Serial: 0
-# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
-# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
-# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
------BEGIN CERTIFICATE-----
-MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
-RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
-dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
-YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
-NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
-EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
-cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
-c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
-dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
-fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
-bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
-75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
-FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
-HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
-5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
-b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
-A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
-6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
-TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
-dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
-Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
-l7WdmplNsDz4SgCbZN2fOUvRJ9e4
------END CERTIFICATE-----
-
-# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
-# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
-# Label: "Actalis Authentication Root CA"
-# Serial: 6271844772424770508
-# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
-# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
-# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
------BEGIN CERTIFICATE-----
-MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
-BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
-MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
-IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
-SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
-ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
-UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
-4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
-KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
-gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
-rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
-51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
-be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
-KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
-v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
-fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
-jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
-ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
-ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
-e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
-jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
-WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
-SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
-pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
-X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
-fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
-K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
-ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
-LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
-LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
------END CERTIFICATE-----
-
-# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
-# Subject: O=Trustis Limited OU=Trustis FPS Root CA
-# Label: "Trustis FPS Root CA"
-# Serial: 36053640375399034304724988975563710553
-# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
-# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
-# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
------BEGIN CERTIFICATE-----
-MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
-MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
-ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
-MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
-MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
-AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
-iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
-vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
-0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
-OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
-BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
-FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
-GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
-zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
-1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
-f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
-jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
-ZetX2fNXlrtIzYE=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 45
-# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
-# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
-# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
------BEGIN CERTIFICATE-----
-MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
-VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
-F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
-ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
-ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
-aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
-YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
-c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
-d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
-CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
-dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
-wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
-Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
-0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
-pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
-CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
-P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
-1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
-KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
-JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
-8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
-fyWl8kgAwKQB2j8=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Label: "StartCom Certification Authority G2"
-# Serial: 59
-# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
-# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
-# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
------BEGIN CERTIFICATE-----
-MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
-OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
-A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
-JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
-vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
-D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
-Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
-RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
-HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
-nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
-0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
-UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
-Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
-TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
-AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
-BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
-2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
-UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
-6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
-9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
-HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
-wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
-XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
-IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
-hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
-so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
-# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
-# Label: "Buypass Class 2 Root CA"
-# Serial: 2
-# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
-# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
-# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
------BEGIN CERTIFICATE-----
-MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
-Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
-TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
-HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
-BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
-6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
-L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
-1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
-MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
-QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
-arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
-Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
-FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
-P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
-9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
-AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
-uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
-9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
-A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
-OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
-+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
-KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
-DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
-H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
-I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
-5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
-3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
-Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
-# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
-# Label: "Buypass Class 3 Root CA"
-# Serial: 2
-# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
-# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
-# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
------BEGIN CERTIFICATE-----
-MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
-Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
-TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
-HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
-BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
-ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
-N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
-tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
-0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
-/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
-KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
-zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
-O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
-34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
-K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
-AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
-Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
-QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
-cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
-IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
-HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
-O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
-033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
-dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
-kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
-3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
-u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
-4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
------END CERTIFICATE-----
-
-# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Label: "T-TeleSec GlobalRoot Class 3"
-# Serial: 1
-# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
-# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
-# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
------BEGIN CERTIFICATE-----
-MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
-KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
-BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
-YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
-OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
-aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
-ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
-8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
-RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
-hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
-ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
-EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
-QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
-A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
-WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
-1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
-6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
-91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
-e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
-TpPDpFQUWw==
------END CERTIFICATE-----
-
-# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
-# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
-# Label: "EE Certification Centre Root CA"
-# Serial: 112324828676200291871926431888494945866
-# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
-# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
-# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
------BEGIN CERTIFICATE-----
-MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
-MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
-czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
-CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
-MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
-ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
-b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
-AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
-euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
-bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
-WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
-MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
-1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
-VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
-zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
-BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
-BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
-v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
-E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
-uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
-iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
-GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş. (c) Aralık 2007
-# Label: "TURKTRUST Certificate Services Provider Root 2007"
-# Serial: 1
-# MD5 Fingerprint: 2b:70:20:56:86:82:a0:18:c8:07:53:12:28:70:21:72
-# SHA1 Fingerprint: f1:7f:6f:b6:31:dc:99:e3:a3:c8:7f:fe:1c:f1:81:10:88:d9:60:33
-# SHA256 Fingerprint: 97:8c:d9:66:f2:fa:a0:7b:a7:aa:95:00:d9:c0:2e:9d:77:f2:cd:ad:a6:ad:6b:a7:4a:f4:b9:1c:66:59:3c:50
------BEGIN CERTIFICATE-----
-MIIEPTCCAyWgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvzE/MD0GA1UEAww2VMOc
-UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
-c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV4wXAYDVQQKDFVUw5xS
-S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
-SGl6bWV0bGVyaSBBLsWeLiAoYykgQXJhbMSxayAyMDA3MB4XDTA3MTIyNTE4Mzcx
-OVoXDTE3MTIyMjE4MzcxOVowgb8xPzA9BgNVBAMMNlTDnFJLVFJVU1QgRWxla3Ry
-b25payBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTELMAkGA1UEBhMC
-VFIxDzANBgNVBAcMBkFua2FyYTFeMFwGA1UECgxVVMOcUktUUlVTVCBCaWxnaSDE
-sGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkgQS7F
-ni4gKGMpIEFyYWzEsWsgMjAwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAKu3PgqMyKVYFeaK7yc9SrToJdPNM8Ig3BnuiD9NYvDdE3ePYakqtdTyuTFY
-KTsvP2qcb3N2Je40IIDu6rfwxArNK4aUyeNgsURSsloptJGXg9i3phQvKUmi8wUG
-+7RP2qFsmmaf8EMJyupyj+sA1zU511YXRxcw9L6/P8JorzZAwan0qafoEGsIiveG
-HtyaKhUG9qPw9ODHFNRRf8+0222vR5YXm3dx2KdxnSQM9pQ/hTEST7ruToK4uT6P
-IzdezKKqdfcYbwnTrqdUKDT74eA7YH2gvnmJhsifLfkKS8RQouf9eRbHegsYz85M
-733WB2+Y8a+xwXrXgTW4qhe04MsCAwEAAaNCMEAwHQYDVR0OBBYEFCnFkKslrxHk
-Yb+j/4hhkeYO/pyBMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
-CSqGSIb3DQEBBQUAA4IBAQAQDdr4Ouwo0RSVgrESLFF6QSU2TJ/sPx+EnWVUXKgW
-AkD6bho3hO9ynYYKVZ1WKKxmLNA6VpM0ByWtCLCPyA8JWcqdmBzlVPi5RX9ql2+I
-aE1KBiY3iAIOtsbWcpnOa3faYjGkVh+uX4132l32iPwa2Z61gfAyuOOI0JzzaqC5
-mxRZNTZPz/OOXl0XrRWV2N2y1RVuAE6zS89mlOTgzbUF2mNXi+WzqtvALhyQRNsa
-XRik7r4EW5nVcV9VZWRi1aKbBFmGyGJ353yCRWo9F7/snXUMrqNvWtMvmDb08PUZ
-qxFdyKbjKlhqQgnDvZImZjINXQhVdP+MmNAKpoRq0Tl9
------END CERTIFICATE-----
-
-# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
-# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
-# Label: "D-TRUST Root Class 3 CA 2 2009"
-# Serial: 623603
-# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
-# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
-# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
------BEGIN CERTIFICATE-----
-MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
-MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
-bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
-ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
-HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
-UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
-tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
-ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
-lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
-/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
-A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
-A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
-dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
-MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
-cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
-L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
-BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
-acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
-o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
-zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
-PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
-Johw1+qRzT65ysCQblrGXnRl11z+o+I=
------END CERTIFICATE-----
-
-# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
-# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
-# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
-# Serial: 623604
-# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
-# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
-# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
-MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
-bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
-NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
-BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
-ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
-3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
-qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
-p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
-HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
-ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
-HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
-Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
-c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
-RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
-dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
-Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
-3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
-nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
-CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
-xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
-KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
------END CERTIFICATE-----
-
-# Issuer: CN=Autoridad de Certificacion Raiz del Estado Venezolano O=Sistema Nacional de Certificacion Electronica OU=Superintendencia de Servicios de Certificacion Electronica
-# Subject: CN=PSCProcert O=Sistema Nacional de Certificacion Electronica OU=Proveedor de Certificados PROCERT
-# Label: "PSCProcert"
-# Serial: 11
-# MD5 Fingerprint: e6:24:e9:12:01:ae:0c:de:8e:85:c4:ce:a3:12:dd:ec
-# SHA1 Fingerprint: 70:c1:8d:74:b4:28:81:0a:e4:fd:a5:75:d7:01:9f:99:b0:3d:50:74
-# SHA256 Fingerprint: 3c:fc:3c:14:d1:f6:84:ff:17:e3:8c:43:ca:44:0c:00:b9:67:ec:93:3e:8b:fe:06:4c:a1:d7:2c:90:f2:ad:b0
------BEGIN CERTIFICATE-----
-MIIJhjCCB26gAwIBAgIBCzANBgkqhkiG9w0BAQsFADCCAR4xPjA8BgNVBAMTNUF1
-dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIFJhaXogZGVsIEVzdGFkbyBWZW5lem9s
-YW5vMQswCQYDVQQGEwJWRTEQMA4GA1UEBxMHQ2FyYWNhczEZMBcGA1UECBMQRGlz
-dHJpdG8gQ2FwaXRhbDE2MDQGA1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0
-aWZpY2FjaW9uIEVsZWN0cm9uaWNhMUMwQQYDVQQLEzpTdXBlcmludGVuZGVuY2lh
-IGRlIFNlcnZpY2lvcyBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9uaWNhMSUwIwYJ
-KoZIhvcNAQkBFhZhY3JhaXpAc3VzY2VydGUuZ29iLnZlMB4XDTEwMTIyODE2NTEw
-MFoXDTIwMTIyNTIzNTk1OVowgdExJjAkBgkqhkiG9w0BCQEWF2NvbnRhY3RvQHBy
-b2NlcnQubmV0LnZlMQ8wDQYDVQQHEwZDaGFjYW8xEDAOBgNVBAgTB01pcmFuZGEx
-KjAoBgNVBAsTIVByb3ZlZWRvciBkZSBDZXJ0aWZpY2Fkb3MgUFJPQ0VSVDE2MDQG
-A1UEChMtU2lzdGVtYSBOYWNpb25hbCBkZSBDZXJ0aWZpY2FjaW9uIEVsZWN0cm9u
-aWNhMQswCQYDVQQGEwJWRTETMBEGA1UEAxMKUFNDUHJvY2VydDCCAiIwDQYJKoZI
-hvcNAQEBBQADggIPADCCAgoCggIBANW39KOUM6FGqVVhSQ2oh3NekS1wwQYalNo9
-7BVCwfWMrmoX8Yqt/ICV6oNEolt6Vc5Pp6XVurgfoCfAUFM+jbnADrgV3NZs+J74
-BCXfgI8Qhd19L3uA3VcAZCP4bsm+lU/hdezgfl6VzbHvvnpC2Mks0+saGiKLt38G
-ieU89RLAu9MLmV+QfI4tL3czkkohRqipCKzx9hEC2ZUWno0vluYC3XXCFCpa1sl9
-JcLB/KpnheLsvtF8PPqv1W7/U0HU9TI4seJfxPmOEO8GqQKJ/+MMbpfg353bIdD0
-PghpbNjU5Db4g7ayNo+c7zo3Fn2/omnXO1ty0K+qP1xmk6wKImG20qCZyFSTXai2
-0b1dCl53lKItwIKOvMoDKjSuc/HUtQy9vmebVOvh+qBa7Dh+PsHMosdEMXXqP+UH
-0quhJZb25uSgXTcYOWEAM11G1ADEtMo88aKjPvM6/2kwLkDd9p+cJsmWN63nOaK/
-6mnbVSKVUyqUtd+tFjiBdWbjxywbk5yqjKPK2Ww8F22c3HxT4CAnQzb5EuE8XL1m
-v6JpIzi4mWCZDlZTOpx+FIywBm/xhnaQr/2v/pDGj59/i5IjnOcVdo/Vi5QTcmn7
-K2FjiO/mpF7moxdqWEfLcU8UC17IAggmosvpr2uKGcfLFFb14dq12fy/czja+eev
-bqQ34gcnAgMBAAGjggMXMIIDEzASBgNVHRMBAf8ECDAGAQH/AgEBMDcGA1UdEgQw
-MC6CD3N1c2NlcnRlLmdvYi52ZaAbBgVghl4CAqASDBBSSUYtRy0yMDAwNDAzNi0w
-MB0GA1UdDgQWBBRBDxk4qpl/Qguk1yeYVKIXTC1RVDCCAVAGA1UdIwSCAUcwggFD
-gBStuyIdxuDSAaj9dlBSk+2YwU2u06GCASakggEiMIIBHjE+MDwGA1UEAxM1QXV0
-b3JpZGFkIGRlIENlcnRpZmljYWNpb24gUmFpeiBkZWwgRXN0YWRvIFZlbmV6b2xh
-bm8xCzAJBgNVBAYTAlZFMRAwDgYDVQQHEwdDYXJhY2FzMRkwFwYDVQQIExBEaXN0
-cml0byBDYXBpdGFsMTYwNAYDVQQKEy1TaXN0ZW1hIE5hY2lvbmFsIGRlIENlcnRp
-ZmljYWNpb24gRWxlY3Ryb25pY2ExQzBBBgNVBAsTOlN1cGVyaW50ZW5kZW5jaWEg
-ZGUgU2VydmljaW9zIGRlIENlcnRpZmljYWNpb24gRWxlY3Ryb25pY2ExJTAjBgkq
-hkiG9w0BCQEWFmFjcmFpekBzdXNjZXJ0ZS5nb2IudmWCAQowDgYDVR0PAQH/BAQD
-AgEGME0GA1UdEQRGMESCDnByb2NlcnQubmV0LnZloBUGBWCGXgIBoAwMClBTQy0w
-MDAwMDKgGwYFYIZeAgKgEgwQUklGLUotMzE2MzUzNzMtNzB2BgNVHR8EbzBtMEag
-RKBChkBodHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9sY3IvQ0VSVElGSUNBRE8t
-UkFJWi1TSEEzODRDUkxERVIuY3JsMCOgIaAfhh1sZGFwOi8vYWNyYWl6LnN1c2Nl
-cnRlLmdvYi52ZTA3BggrBgEFBQcBAQQrMCkwJwYIKwYBBQUHMAGGG2h0dHA6Ly9v
-Y3NwLnN1c2NlcnRlLmdvYi52ZTBBBgNVHSAEOjA4MDYGBmCGXgMBAjAsMCoGCCsG
-AQUFBwIBFh5odHRwOi8vd3d3LnN1c2NlcnRlLmdvYi52ZS9kcGMwDQYJKoZIhvcN
-AQELBQADggIBACtZ6yKZu4SqT96QxtGGcSOeSwORR3C7wJJg7ODU523G0+1ng3dS
-1fLld6c2suNUvtm7CpsR72H0xpkzmfWvADmNg7+mvTV+LFwxNG9s2/NkAZiqlCxB
-3RWGymspThbASfzXg0gTB1GEMVKIu4YXx2sviiCtxQuPcD4quxtxj7mkoP3Yldmv
-Wb8lK5jpY5MvYB7Eqvh39YtsL+1+LrVPQA3uvFd359m21D+VJzog1eWuq2w1n8Gh
-HVnchIHuTQfiSLaeS5UtQbHh6N5+LwUeaO6/u5BlOsju6rEYNxxik6SgMexxbJHm
-pHmJWhSnFFAFTKQAVzAswbVhltw+HoSvOULP5dAssSS830DD7X9jSr3hTxJkhpXz
-sOfIt+FTvZLm8wyWuevo5pLtp4EJFAv8lXrPj9Y0TzYS3F7RNHXGRoAvlQSMx4bE
-qCaJqD8Zm4G7UaRKhqsLEQ+xrmNTbSjq3TNWOByyrYDT13K9mmyZY+gAu0F2Bbdb
-mRiKw7gSXFbPVgx96OLP7bx0R/vu0xdOIk9W/1DzLuY5poLWccret9W6aAjtmcz9
-opLLabid+Qqkpj5PkygqYWwHJgD/ll9ohri4zspV4KuxPX+Y1zMOWj3YeMLEYC/H
-YvBhkdI4sPaeVdtAgAUSM84dkpvRabP/v/GSCmE1P93+hvS84Bpxs2Km
------END CERTIFICATE-----
-
-# Issuer: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
-# Subject: CN=China Internet Network Information Center EV Certificates Root O=China Internet Network Information Center
-# Label: "China Internet Network Information Center EV Certificates Root"
-# Serial: 1218379777
-# MD5 Fingerprint: 55:5d:63:00:97:bd:6a:97:f5:67:ab:4b:fb:6e:63:15
-# SHA1 Fingerprint: 4f:99:aa:93:fb:2b:d1:37:26:a1:99:4a:ce:7f:f0:05:f2:93:5d:1e
-# SHA256 Fingerprint: 1c:01:c6:f4:db:b2:fe:fc:22:55:8b:2b:ca:32:56:3f:49:84:4a:cf:c3:2b:7b:e4:b0:ff:59:9f:9e:8c:7a:f7
------BEGIN CERTIFICATE-----
-MIID9zCCAt+gAwIBAgIESJ8AATANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMC
-Q04xMjAwBgNVBAoMKUNoaW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24g
-Q2VudGVyMUcwRQYDVQQDDD5DaGluYSBJbnRlcm5ldCBOZXR3b3JrIEluZm9ybWF0
-aW9uIENlbnRlciBFViBDZXJ0aWZpY2F0ZXMgUm9vdDAeFw0xMDA4MzEwNzExMjVa
-Fw0zMDA4MzEwNzExMjVaMIGKMQswCQYDVQQGEwJDTjEyMDAGA1UECgwpQ2hpbmEg
-SW50ZXJuZXQgTmV0d29yayBJbmZvcm1hdGlvbiBDZW50ZXIxRzBFBgNVBAMMPkNo
-aW5hIEludGVybmV0IE5ldHdvcmsgSW5mb3JtYXRpb24gQ2VudGVyIEVWIENlcnRp
-ZmljYXRlcyBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm35z
-7r07eKpkQ0H1UN+U8i6yjUqORlTSIRLIOTJCBumD1Z9S7eVnAztUwYyZmczpwA//
-DdmEEbK40ctb3B75aDFk4Zv6dOtouSCV98YPjUesWgbdYavi7NifFy2cyjw1l1Vx
-zUOFsUcW9SxTgHbP0wBkvUCZ3czY28Sf1hNfQYOL+Q2HklY0bBoQCxfVWhyXWIQ8
-hBouXJE0bhlffxdpxWXvayHG1VA6v2G5BY3vbzQ6sm8UY78WO5upKv23KzhmBsUs
-4qpnHkWnjQRmQvaPK++IIGmPMowUc9orhpFjIpryp9vOiYurXccUwVswah+xt54u
-gQEC7c+WXmPbqOY4twIDAQABo2MwYTAfBgNVHSMEGDAWgBR8cks5x8DbYqVPm6oY
-NJKiyoOCWTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E
-FgQUfHJLOcfA22KlT5uqGDSSosqDglkwDQYJKoZIhvcNAQEFBQADggEBACrDx0M3
-j92tpLIM7twUbY8opJhJywyA6vPtI2Z1fcXTIWd50XPFtQO3WKwMVC/GVhMPMdoG
-52U7HW8228gd+f2ABsqjPWYWqJ1MFn3AlUa1UeTiH9fqBk1jjZaM7+czV0I664zB
-echNdn3e9rG3geCg+aF4RhcaVpjwTj2rHO3sOdwHSPdj/gauwqRcalsyiMXHM4Ws
-ZkJHwlgkmeHlPuV1LI5D1l08eB6olYIpUNHRFrrvwb562bTYzB5MRuF3sTGrvSrI
-zo9uoV1/A3U05K2JRVRevq4opbs/eHnrc7MKDf2+yfdWrPa37S+bISnHOLaVxATy
-wy39FCqQmbkHzJ8=
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root CA 2 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root CA 2"
-# Serial: 40698052477090394928831521023204026294
-# MD5 Fingerprint: 5b:04:69:ec:a5:83:94:63:18:a7:86:d0:e4:f2:6e:19
-# SHA1 Fingerprint: 77:47:4f:c6:30:e4:0f:4c:47:64:3f:84:ba:b8:c6:95:4a:8a:41:ec
-# SHA256 Fingerprint: f0:9b:12:2c:71:14:f4:a0:9b:d4:ea:4f:4a:99:d5:58:b4:6e:4c:25:cd:81:14:0d:29:c0:56:13:91:4c:38:41
------BEGIN CERTIFICATE-----
-MIIF2TCCA8GgAwIBAgIQHp4o6Ejy5e/DfEoeWhhntjANBgkqhkiG9w0BAQsFADBk
-MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
-YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
-Q0EgMjAeFw0xMTA2MjQwODM4MTRaFw0zMTA2MjUwNzM4MTRaMGQxCzAJBgNVBAYT
-AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
-Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAyMIICIjAN
-BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlUJOhJ1R5tMJ6HJaI2nbeHCOFvEr
-jw0DzpPMLgAIe6szjPTpQOYXTKueuEcUMncy3SgM3hhLX3af+Dk7/E6J2HzFZ++r
-0rk0X2s682Q2zsKwzxNoysjL67XiPS4h3+os1OD5cJZM/2pYmLcX5BtS5X4HAB1f
-2uY+lQS3aYg5oUFgJWFLlTloYhyxCwWJwDaCFCE/rtuh/bxvHGCGtlOUSbkrRsVP
-ACu/obvLP+DHVxxX6NZp+MEkUp2IVd3Chy50I9AU/SpHWrumnf2U5NGKpV+GY3aF
-y6//SSj8gO1MedK75MDvAe5QQQg1I3ArqRa0jG6F6bYRzzHdUyYb3y1aSgJA/MTA
-tukxGggo5WDDH8SQjhBiYEQN7Aq+VRhxLKX0srwVYv8c474d2h5Xszx+zYIdkeNL
-6yxSNLCK/RJOlrDrcH+eOfdmQrGrrFLadkBXeyq96G4DsguAhYidDMfCd7Camlf0
-uPoTXGiTOmekl9AbmbeGMktg2M7v0Ax/lZ9vh0+Hio5fCHyqW/xavqGRn1V9TrAL
-acywlKinh/LTSlDcX3KwFnUey7QYYpqwpzmqm59m2I2mbJYV4+by+PGDYmy7Velh
-k6M99bFXi08jsJvllGov34zflVEpYKELKeRcVVi3qPyZ7iVNTA6z00yPhOgpD/0Q
-VAKFyPnlw4vP5w8CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
-FDASBgdghXQBUwIBBgdghXQBUwIBMBIGA1UdEwEB/wQIMAYBAf8CAQcwHQYDVR0O
-BBYEFE0mICKJS9PVpAqhb97iEoHF8TwuMB8GA1UdIwQYMBaAFE0mICKJS9PVpAqh
-b97iEoHF8TwuMA0GCSqGSIb3DQEBCwUAA4ICAQAyCrKkG8t9voJXiblqf/P0wS4R
-fbgZPnm3qKhyN2abGu2sEzsOv2LwnN+ee6FTSA5BesogpxcbtnjsQJHzQq0Qw1zv
-/2BZf82Fo4s9SBwlAjxnffUy6S8w5X2lejjQ82YqZh6NM4OKb3xuqFp1mrjX2lhI
-REeoTPpMSQpKwhI3qEAMw8jh0FcNlzKVxzqfl9NX+Ave5XLzo9v/tdhZsnPdTSpx
-srpJ9csc1fV5yJmz/MFMdOO0vSk3FQQoHt5FRnDsr7p4DooqzgB53MBfGWcsa0vv
-aGgLQ+OswWIJ76bdZWGgr4RVSJFSHMYlkSrQwSIjYVmvRRGFHQEkNI/Ps/8XciAT
-woCqISxxOQ7Qj1zB09GOInJGTB2Wrk9xseEFKZZZ9LuedT3PDTcNYtsmjGOpI99n
-Bjx8Oto0QuFmtEYE3saWmA9LSHokMnWRn6z3aOkquVVlzl1h0ydw2Df+n7mvoC5W
-t6NlUe07qxS/TFED6F+KBZvuim6c779o+sjaC+NCydAXFJy3SuCvkychVSa1ZC+N
-8f+mQAWFBVzKBxlcCxMoTFh/wqXvRdpg065lYZ1Tg3TCrvJcwhbtkj6EPnNgiLx2
-9CzP0H1907he0ZESEOnN3col49XtmS++dYFLJPlFRpTJKSFTnCZFqhMX5OfNeOI5
-wSsSnqaeG8XmDtkx2Q==
------END CERTIFICATE-----
-
-# Issuer: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
-# Subject: CN=Swisscom Root EV CA 2 O=Swisscom OU=Digital Certificate Services
-# Label: "Swisscom Root EV CA 2"
-# Serial: 322973295377129385374608406479535262296
-# MD5 Fingerprint: 7b:30:34:9f:dd:0a:4b:6b:35:ca:31:51:28:5d:ae:ec
-# SHA1 Fingerprint: e7:a1:90:29:d3:d5:52:dc:0d:0f:c6:92:d3:ea:88:0d:15:2e:1a:6b
-# SHA256 Fingerprint: d9:5f:ea:3c:a4:ee:dc:e7:4c:d7:6e:75:fc:6d:1f:f6:2c:44:1f:0f:a8:bc:77:f0:34:b1:9e:5d:b2:58:01:5d
------BEGIN CERTIFICATE-----
-MIIF4DCCA8igAwIBAgIRAPL6ZOJ0Y9ON/RAdBB92ylgwDQYJKoZIhvcNAQELBQAw
-ZzELMAkGA1UEBhMCY2gxETAPBgNVBAoTCFN3aXNzY29tMSUwIwYDVQQLExxEaWdp
-dGFsIENlcnRpZmljYXRlIFNlcnZpY2VzMR4wHAYDVQQDExVTd2lzc2NvbSBSb290
-IEVWIENBIDIwHhcNMTEwNjI0MDk0NTA4WhcNMzEwNjI1MDg0NTA4WjBnMQswCQYD
-VQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0YWwgQ2Vy
-dGlmaWNhdGUgU2VydmljZXMxHjAcBgNVBAMTFVN3aXNzY29tIFJvb3QgRVYgQ0Eg
-MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMT3HS9X6lds93BdY7Bx
-UglgRCgzo3pOCvrY6myLURYaVa5UJsTMRQdBTxB5f3HSek4/OE6zAMaVylvNwSqD
-1ycfMQ4jFrclyxy0uYAyXhqdk/HoPGAsp15XGVhRXrwsVgu42O+LgrQ8uMIkqBPH
-oCE2G3pXKSinLr9xJZDzRINpUKTk4RtiGZQJo/PDvO/0vezbE53PnUgJUmfANykR
-HvvSEaeFGHR55E+FFOtSN+KxRdjMDUN/rhPSays/p8LiqG12W0OfvrSdsyaGOx9/
-5fLoZigWJdBLlzin5M8J0TbDC77aO0RYjb7xnglrPvMyxyuHxuxenPaHZa0zKcQv
-idm5y8kDnftslFGXEBuGCxobP/YCfnvUxVFkKJ3106yDgYjTdLRZncHrYTNaRdHL
-OdAGalNgHa/2+2m8atwBz735j9m9W8E6X47aD0upm50qKGsaCnw8qyIL5XctcfaC
-NYGu+HuB5ur+rPQam3Rc6I8k9l2dRsQs0h4rIWqDJ2dVSqTjyDKXZpBy2uPUZC5f
-46Fq9mDU5zXNysRojddxyNMkM3OxbPlq4SjbX8Y96L5V5jcb7STZDxmPX2MYWFCB
-UWVv8p9+agTnNCRxunZLWB4ZvRVgRaoMEkABnRDixzgHcgplwLa7JSnaFp6LNYth
-7eVxV4O1PHGf40+/fh6Bn0GXAgMBAAGjgYYwgYMwDgYDVR0PAQH/BAQDAgGGMB0G
-A1UdIQQWMBQwEgYHYIV0AVMCAgYHYIV0AVMCAjASBgNVHRMBAf8ECDAGAQH/AgED
-MB0GA1UdDgQWBBRF2aWBbj2ITY1x0kbBbkUe88SAnTAfBgNVHSMEGDAWgBRF2aWB
-bj2ITY1x0kbBbkUe88SAnTANBgkqhkiG9w0BAQsFAAOCAgEAlDpzBp9SSzBc1P6x
-XCX5145v9Ydkn+0UjrgEjihLj6p7jjm02Vj2e6E1CqGdivdj5eu9OYLU43otb98T
-PLr+flaYC/NUn81ETm484T4VvwYmneTwkLbUwp4wLh/vx3rEUMfqe9pQy3omywC0
-Wqu1kx+AiYQElY2NfwmTv9SoqORjbdlk5LgpWgi/UOGED1V7XwgiG/W9mR4U9s70
-WBCCswo9GcG/W6uqmdjyMb3lOGbcWAXH7WMaLgqXfIeTK7KK4/HsGOV1timH59yL
-Gn602MnTihdsfSlEvoqq9X46Lmgxk7lq2prg2+kupYTNHAq4Sgj5nPFhJpiTt3tm
-7JFe3VE/23MPrQRYCd0EApUKPtN236YQHoA96M2kZNEzx5LH4k5E4wnJTsJdhw4S
-nr8PyQUQ3nqjsTzyP6WqJ3mtMX0f/fwZacXduT98zca0wjAefm6S139hdlqP65VN
-vBFuIXxZN5nQBrz5Bm0yFqXZaajh3DyAHmBR3NdUIR7KYndP+tiPsys6DXhyyWhB
-WkdKwqPrGtcKqzwyVcgKEZzfdNbwQBUdyLmPtTbFr/giuMod89a2GQ+fYWVq6nTI
-fI/DT11lgh/ZDYnadXL77/FHZxOzyNEZiCcmmpl5fx7kLD977vHeTYuWl8PVP3wb
-I+2ksx0WckNLIOFZfsLorSa/ovc=
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig Root R1 O=Disig a.s.
-# Subject: CN=CA Disig Root R1 O=Disig a.s.
-# Label: "CA Disig Root R1"
-# Serial: 14052245610670616104
-# MD5 Fingerprint: be:ec:11:93:9a:f5:69:21:bc:d7:c1:c0:67:89:cc:2a
-# SHA1 Fingerprint: 8e:1c:74:f8:a6:20:b9:e5:8a:f4:61:fa:ec:2b:47:56:51:1a:52:c6
-# SHA256 Fingerprint: f9:6f:23:f4:c3:e7:9c:07:7a:46:98:8d:5a:f5:90:06:76:a0:f0:39:cb:64:5d:d1:75:49:b2:16:c8:24:40:ce
------BEGIN CERTIFICATE-----
-MIIFaTCCA1GgAwIBAgIJAMMDmu5QkG4oMA0GCSqGSIb3DQEBBQUAMFIxCzAJBgNV
-BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
-MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIxMB4XDTEyMDcxOTA5MDY1NloXDTQy
-MDcxOTA5MDY1NlowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
-EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjEw
-ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCqw3j33Jijp1pedxiy3QRk
-D2P9m5YJgNXoqqXinCaUOuiZc4yd39ffg/N4T0Dhf9Kn0uXKE5Pn7cZ3Xza1lK/o
-OI7bm+V8u8yN63Vz4STN5qctGS7Y1oprFOsIYgrY3LMATcMjfF9DCCMyEtztDK3A
-fQ+lekLZWnDZv6fXARz2m6uOt0qGeKAeVjGu74IKgEH3G8muqzIm1Cxr7X1r5OJe
-IgpFy4QxTaz+29FHuvlglzmxZcfe+5nkCiKxLU3lSCZpq+Kq8/v8kiky6bM+TR8n
-oc2OuRf7JT7JbvN32g0S9l3HuzYQ1VTW8+DiR0jm3hTaYVKvJrT1cU/J19IG32PK
-/yHoWQbgCNWEFVP3Q+V8xaCJmGtzxmjOZd69fwX3se72V6FglcXM6pM6vpmumwKj
-rckWtc7dXpl4fho5frLABaTAgqWjR56M6ly2vGfb5ipN0gTco65F97yLnByn1tUD
-3AjLLhbKXEAz6GfDLuemROoRRRw1ZS0eRWEkG4IupZ0zXWX4Qfkuy5Q/H6MMMSRE
-7cderVC6xkGbrPAXZcD4XW9boAo0PO7X6oifmPmvTiT6l7Jkdtqr9O3jw2Dv1fkC
-yC2fg69naQanMVXVz0tv/wQFx1isXxYb5dKj6zHbHzMVTdDypVP1y+E9Tmgt2BLd
-qvLmTZtJ5cUoobqwWsagtQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
-DwEB/wQEAwIBBjAdBgNVHQ4EFgQUiQq0OJMa5qvum5EY+fU8PjXQ04IwDQYJKoZI
-hvcNAQEFBQADggIBADKL9p1Kyb4U5YysOMo6CdQbzoaz3evUuii+Eq5FLAR0rBNR
-xVgYZk2C2tXck8An4b58n1KeElb21Zyp9HWc+jcSjxyT7Ff+Bw+r1RL3D65hXlaA
-SfX8MPWbTx9BLxyE04nH4toCdu0Jz2zBuByDHBb6lM19oMgY0sidbvW9adRtPTXo
-HqJPYNcHKfyyo6SdbhWSVhlMCrDpfNIZTUJG7L399ldb3Zh+pE3McgODWF3vkzpB
-emOqfDqo9ayk0d2iLbYq/J8BjuIQscTK5GfbVSUZP/3oNn6z4eGBrxEWi1CXYBmC
-AMBrTXO40RMHPuq2MU/wQppt4hF05ZSsjYSVPCGvxdpHyN85YmLLW1AL14FABZyb
-7bq2ix4Eb5YgOe2kfSnbSM6C3NQCjR0EMVrHS/BsYVLXtFHCgWzN4funodKSds+x
-DzdYpPJScWc/DIh4gInByLUfkmO+p3qKViwaqKactV2zY9ATIKHrkWzQjX2v3wvk
-F7mGnjixlAxYjOBVqjtjbZqJYLhkKpLGN/R+Q0O3c+gB53+XD9fyexn9GtePyfqF
-a3qdnom2piiZk4hA9z7NUaPK6u95RyG1/jLix8NRb76AdPCkwzryT+lf3xkK8jsT
-Q6wxpLPn6/wY1gGp8yqPNg7rtLG8t0zJa7+h89n07eLw4+1knj0vllJPgFOL
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig Root R2 O=Disig a.s.
-# Subject: CN=CA Disig Root R2 O=Disig a.s.
-# Label: "CA Disig Root R2"
-# Serial: 10572350602393338211
-# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
-# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
-# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
------BEGIN CERTIFICATE-----
-MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
-BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
-MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
-MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
-EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
-ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
-NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
-PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
-x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
-QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
-yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
-QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
-H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
-QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
-i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
-nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
-rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
-DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
-hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
-tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
-GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
-lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
-+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
-TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
-nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
-gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
-G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
-zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
-L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
------END CERTIFICATE-----
-
-# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
-# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
-# Label: "ACCVRAIZ1"
-# Serial: 6828503384748696800
-# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
-# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
-# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
------BEGIN CERTIFICATE-----
-MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
-AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
-CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
-BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
-VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
-qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
-HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
-G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
-lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
-IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
-0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
-k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
-4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
-m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
-cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
-uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
-KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
-ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
-AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
-VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
-VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
-CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
-cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
-QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
-7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
-cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
-QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
-czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
-aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
-aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
-DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
-BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
-D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
-JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
-AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
-vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
-tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
-7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
-I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
-h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
-d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
-pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
------END CERTIFICATE-----
-
-# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
-# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
-# Label: "TWCA Global Root CA"
-# Serial: 3262
-# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
-# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
-# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
------BEGIN CERTIFICATE-----
-MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
-EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
-VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
-NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
-B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
-10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
-0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
-MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
-zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
-46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
-yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
-laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
-oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
-BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
-qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
-4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
-/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
-1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
-LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
-H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
-RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
-nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
-15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
-6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
-nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
-wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
-aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
-KwbQBM0=
------END CERTIFICATE-----
-
-# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
-# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
-# Label: "TeliaSonera Root CA v1"
-# Serial: 199041966741090107964904287217786801558
-# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
-# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
-# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
------BEGIN CERTIFICATE-----
-MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
-NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
-b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
-VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
-MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
-VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
-7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
-Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
-/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
-81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
-dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
-Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
-sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
-pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
-slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
-arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
-VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
-9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
-dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
-0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
-TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
-Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
-Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
-OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
-vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
-t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
-HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
-SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
------END CERTIFICATE-----
-
-# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
-# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi
-# Label: "E-Tugra Certification Authority"
-# Serial: 7667447206703254355
-# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
-# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
-# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
------BEGIN CERTIFICATE-----
-MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
-BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
-aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
-BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
-Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
-MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
-BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
-em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
-ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
-B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
-D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
-Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
-q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
-k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
-fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
-dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
-ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
-zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
-rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
-U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
-Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
-XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
-Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
-HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
-GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
-77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
-+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
-vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
-FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
-yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
-AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
-y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
-NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
------END CERTIFICATE-----
-
-# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Label: "T-TeleSec GlobalRoot Class 2"
-# Serial: 1
-# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
-# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
-# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
------BEGIN CERTIFICATE-----
-MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
-KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
-BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
-YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
-OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
-aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
-ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
-AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
-FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
-1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
-jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
-wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
-QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
-WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
-NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
-uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
-IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
-g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
-9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
-BSeOE6Fuwg==
------END CERTIFICATE-----
-
-# Issuer: CN=Atos TrustedRoot 2011 O=Atos
-# Subject: CN=Atos TrustedRoot 2011 O=Atos
-# Label: "Atos TrustedRoot 2011"
-# Serial: 6643877497813316402
-# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
-# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
-# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
-AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
-EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
-FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
-REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
-Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
-VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
-SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
-4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
-cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
-eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
-HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
-A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
-DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
-vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
-DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
-maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
-lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
-KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 1 G3"
-# Serial: 687049649626669250736271037606554624078720034195
-# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
-# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
-# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
-MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
-wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
-rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
-68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
-4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
-UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
-abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
-3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
-KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
-hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
-Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
-zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
-ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
-MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
-cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
-qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
-YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
-b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
-8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
-NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
-ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
-q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
-nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 2 G3"
-# Serial: 390156079458959257446133169266079962026824725800
-# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
-# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
-# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
-MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
-qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
-n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
-c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
-O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
-o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
-IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
-IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
-8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
-vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
-7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
-cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
-ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
-AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
-roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
-W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
-lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
-+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
-csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
-dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
-KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
-HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
-WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 3 G3"
-# Serial: 268090761170461462463995952157327242137089239581
-# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
-# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
-# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
-MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
-/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
-FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
-U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
-ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
-FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
-A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
-eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
-sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
-VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
-A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
-ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
-ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
-KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
-FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
-oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
-u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
-0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
-3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
-8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
-DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
-PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
-ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root G2"
-# Serial: 15385348160840213938643033620894905419
-# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
-# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
-# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
------BEGIN CERTIFICATE-----
-MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
-n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
-biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
-EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
-bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
-YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
-AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
-BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
-QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
-0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
-lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
-B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
-ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
-IhNzbM8m9Yop5w==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root G3"
-# Serial: 15459312981008553731928384953135426796
-# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
-# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
-# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
------BEGIN CERTIFICATE-----
-MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
-CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
-ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
-RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
-UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
-Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
-hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
-Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
-RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
-AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
-JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
-6pZjamVFkpUBtA==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root G2"
-# Serial: 4293743540046975378534879503202253541
-# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
-# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
-# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
------BEGIN CERTIFICATE-----
-MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
-MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
-2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
-1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
-q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
-tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
-vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
-BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
-5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
-1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
-NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
-Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
-8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
-pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
-MrY=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root G3"
-# Serial: 7089244469030293291760083333884364146
-# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
-# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
-# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
------BEGIN CERTIFICATE-----
-MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
-CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
-ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
-Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
-EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
-IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
-K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
-fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
-Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
-BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
-AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
-oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
-sycX
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Trusted Root G4"
-# Serial: 7451500558977370777930084869016614236
-# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
-# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
-# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
------BEGIN CERTIFICATE-----
-MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
-RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
-UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
-Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
-ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
-xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
-ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
-DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
-jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
-CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
-EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
-fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
-uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
-chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
-9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
-ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
-SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
-+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
-fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
-sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
-cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
-0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
-4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
-r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
-/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
-gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
------END CERTIFICATE-----
-
-# Issuer: CN=Certification Authority of WoSign O=WoSign CA Limited
-# Subject: CN=Certification Authority of WoSign O=WoSign CA Limited
-# Label: "WoSign"
-# Serial: 125491772294754854453622855443212256657
-# MD5 Fingerprint: a1:f2:f9:b5:d2:c8:7a:74:b8:f3:05:f1:d7:e1:84:8d
-# SHA1 Fingerprint: b9:42:94:bf:91:ea:8f:b6:4b:e6:10:97:c7:fb:00:13:59:b6:76:cb
-# SHA256 Fingerprint: 4b:22:d5:a6:ae:c9:9f:3c:db:79:aa:5e:c0:68:38:47:9c:d5:ec:ba:71:64:f7:f2:2d:c1:d6:5f:63:d8:57:08
------BEGIN CERTIFICATE-----
-MIIFdjCCA16gAwIBAgIQXmjWEXGUY1BWAGjzPsnFkTANBgkqhkiG9w0BAQUFADBV
-MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxKjAoBgNV
-BAMTIUNlcnRpZmljYXRpb24gQXV0aG9yaXR5IG9mIFdvU2lnbjAeFw0wOTA4MDgw
-MTAwMDFaFw0zOTA4MDgwMTAwMDFaMFUxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFX
-b1NpZ24gQ0EgTGltaXRlZDEqMCgGA1UEAxMhQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgb2YgV29TaWduMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvcqN
-rLiRFVaXe2tcesLea9mhsMMQI/qnobLMMfo+2aYpbxY94Gv4uEBf2zmoAHqLoE1U
-fcIiePyOCbiohdfMlZdLdNiefvAA5A6JrkkoRBoQmTIPJYhTpA2zDxIIFgsDcScc
-f+Hb0v1naMQFXQoOXXDX2JegvFNBmpGN9J42Znp+VsGQX+axaCA2pIwkLCxHC1l2
-ZjC1vt7tj/id07sBMOby8w7gLJKA84X5KIq0VC6a7fd2/BVoFutKbOsuEo/Uz/4M
-x1wdC34FMr5esAkqQtXJTpCzWQ27en7N1QhatH/YHGkR+ScPewavVIMYe+HdVHpR
-aG53/Ma/UkpmRqGyZxq7o093oL5d//xWC0Nyd5DKnvnyOfUNqfTq1+ezEC8wQjch
-zDBwyYaYD8xYTYO7feUapTeNtqwylwA6Y3EkHp43xP901DfA4v6IRmAR3Qg/UDar
-uHqklWJqbrDKaiFaafPz+x1wOZXzp26mgYmhiMU7ccqjUu6Du/2gd/Tkb+dC221K
-mYo0SLwX3OSACCK28jHAPwQ+658geda4BmRkAjHXqc1S+4RFaQkAKtxVi8QGRkvA
-Sh0JWzko/amrzgD5LkhLJuYwTKVYyrREgk/nkR4zw7CT/xH8gdLKH3Ep3XZPkiWv
-HYG3Dy+MwwbMLyejSuQOmbp8HkUff6oZRZb9/D0CAwEAAaNCMEAwDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFOFmzw7R8bNLtwYgFP6H
-EtX2/vs+MA0GCSqGSIb3DQEBBQUAA4ICAQCoy3JAsnbBfnv8rWTjMnvMPLZdRtP1
-LOJwXcgu2AZ9mNELIaCJWSQBnfmvCX0KI4I01fx8cpm5o9dU9OpScA7F9dY74ToJ
-MuYhOZO9sxXqT2r09Ys/L3yNWC7F4TmgPsc9SnOeQHrAK2GpZ8nzJLmzbVUsWh2e
-JXLOC62qx1ViC777Y7NhRCOjy+EaDveaBk3e1CNOIZZbOVtXHS9dCF4Jef98l7VN
-g64N1uajeeAz0JmWAjCnPv/So0M/BVoG6kQC2nz4SNAzqfkHx5Xh9T71XXG68pWp
-dIhhWeO/yloTunK0jF02h+mmxTwTv97QRCbut+wucPrXnbes5cVAWubXbHssw1ab
-R80LzvobtCHXt2a49CUwi1wNuepnsvRtrtWhnk/Yn+knArAdBtaP4/tIEp9/EaEQ
-PkxROpaw0RPxx9gmrjrKkcRpnd8BKWRRb2jaFOwIQZeQjdCygPLPwj2/kWjFgGce
-xGATVdVhmVd8upUPYUk6ynW8yQqTP2cOEvIo4jEbwFcW3wh8GcF+Dx+FHgo2fFt+
-J7x6v+Db9NpSvd4MVHAxkUOVyLzwPt0JfjBkUO1/AaQzZ01oT74V77D2AhGiGxMl
-OtzCWfHjXEa7ZywCRuoeSKbmW9m1vFGikpbbqsY3Iqb+zCB0oy2pLmvLwIIRIbWT
-ee5Ehr7XHuQe+w==
------END CERTIFICATE-----
-
-# Issuer: CN=CA 沃通根证书 O=WoSign CA Limited
-# Subject: CN=CA 沃通根证书 O=WoSign CA Limited
-# Label: "WoSign China"
-# Serial: 106921963437422998931660691310149453965
-# MD5 Fingerprint: 78:83:5b:52:16:76:c4:24:3b:83:78:e8:ac:da:9a:93
-# SHA1 Fingerprint: 16:32:47:8d:89:f9:21:3a:92:00:85:63:f5:a4:a7:d3:12:40:8a:d6
-# SHA256 Fingerprint: d6:f0:34:bd:94:aa:23:3f:02:97:ec:a4:24:5b:28:39:73:e4:47:aa:59:0f:31:0c:77:f4:8f:df:83:11:22:54
------BEGIN CERTIFICATE-----
-MIIFWDCCA0CgAwIBAgIQUHBrzdgT/BtOOzNy0hFIjTANBgkqhkiG9w0BAQsFADBG
-MQswCQYDVQQGEwJDTjEaMBgGA1UEChMRV29TaWduIENBIExpbWl0ZWQxGzAZBgNV
-BAMMEkNBIOayg+mAmuagueivgeS5pjAeFw0wOTA4MDgwMTAwMDFaFw0zOTA4MDgw
-MTAwMDFaMEYxCzAJBgNVBAYTAkNOMRowGAYDVQQKExFXb1NpZ24gQ0EgTGltaXRl
-ZDEbMBkGA1UEAwwSQ0Eg5rKD6YCa5qC56K+B5LmmMIICIjANBgkqhkiG9w0BAQEF
-AAOCAg8AMIICCgKCAgEA0EkhHiX8h8EqwqzbdoYGTufQdDTc7WU1/FDWiD+k8H/r
-D195L4mx/bxjWDeTmzj4t1up+thxx7S8gJeNbEvxUNUqKaqoGXqW5pWOdO2XCld1
-9AXbbQs5uQF/qvbW2mzmBeCkTVL829B0txGMe41P/4eDrv8FAxNXUDf+jJZSEExf
-v5RxadmWPgxDT74wwJ85dE8GRV2j1lY5aAfMh09Qd5Nx2UQIsYo06Yms25tO4dnk
-UkWMLhQfkWsZHWgpLFbE4h4TV2TwYeO5Ed+w4VegG63XX9Gv2ystP9Bojg/qnw+L
-NVgbExz03jWhCl3W6t8Sb8D7aQdGctyB9gQjF+BNdeFyb7Ao65vh4YOhn0pdr8yb
-+gIgthhid5E7o9Vlrdx8kHccREGkSovrlXLp9glk3Kgtn3R46MGiCWOc76DbT52V
-qyBPt7D3h1ymoOQ3OMdc4zUPLK2jgKLsLl3Az+2LBcLmc272idX10kaO6m1jGx6K
-yX2m+Jzr5dVjhU1zZmkR/sgO9MHHZklTfuQZa/HpelmjbX7FF+Ynxu8b22/8DU0G
-AbQOXDBGVWCvOGU6yke6rCzMRh+yRpY/8+0mBe53oWprfi1tWFxK1I5nuPHa1UaK
-J/kR8slC/k7e3x9cxKSGhxYzoacXGKUN5AXlK8IrC6KVkLn9YDxOiT7nnO4fuwEC
-AwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
-BBYEFOBNv9ybQV0T6GTwp+kVpOGBwboxMA0GCSqGSIb3DQEBCwUAA4ICAQBqinA4
-WbbaixjIvirTthnVZil6Xc1bL3McJk6jfW+rtylNpumlEYOnOXOvEESS5iVdT2H6
-yAa+Tkvv/vMx/sZ8cApBWNromUuWyXi8mHwCKe0JgOYKOoICKuLJL8hWGSbueBwj
-/feTZU7n85iYr83d2Z5AiDEoOqsuC7CsDCT6eiaY8xJhEPRdF/d+4niXVOKM6Cm6
-jBAyvd0zaziGfjk9DgNyp115j0WKWa5bIW4xRtVZjc8VX90xJc/bYNaBRHIpAlf2
-ltTW/+op2znFuCyKGo3Oy+dCMYYFaA6eFN0AkLppRQjbbpCBhqcqBT/mhDn4t/lX
-X0ykeVoQDF7Va/81XwVRHmyjdanPUIPTfPRm94KNPQx96N97qA4bLJyuQHCH2u2n
-FoJavjVsIE4iYdm8UXrNemHcSxH5/mc0zy4EZmFcV5cjjPOGG0jfKq+nwf/Yjj4D
-u9gqsPoUJbJRa4ZDhS4HIxaAjUz7tGM7zMN07RujHv41D198HRaG9Q7DlfEvr10l
-O1Hm13ZBONFLAzkopR6RctR9q5czxNM+4Gm2KHmgCY0c0f9BckgG/Jou5yD5m6Le
-ie2uPAmvylezkolwQOQvT8Jwg0DXJCxr5wkf09XHwQj02w47HAcLQxGEIYbpgNR1
-2KvxAmLBsX5VYc8T1yaw15zLKYs4SgsOkI26oQ==
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
-# Label: "COMODO RSA Certification Authority"
-# Serial: 101909084537582093308941363524873193117
-# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
-# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
-# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
------BEGIN CERTIFICATE-----
-MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
-hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
-BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
-MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
-EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
-Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
-6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
-pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
-9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
-/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
-Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
-+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
-qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
-SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
-u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
-Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
-crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
-FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
-/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
-wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
-4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
-2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
-FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
-CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
-boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
-jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
-S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
-QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
-0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
-NVOFBkpdn627G190
------END CERTIFICATE-----
-
-# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
-# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
-# Label: "USERTrust RSA Certification Authority"
-# Serial: 2645093764781058787591871645665788717
-# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
-# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
-# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
------BEGIN CERTIFICATE-----
-MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
-iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
-cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
-BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
-MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
-BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
-aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
-dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
-AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
-3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
-tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
-Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
-VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
-79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
-c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
-Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
-c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
-UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
-Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
-BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
-A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
-Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
-VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
-ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
-8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
-iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
-Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
-XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
-qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
-VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
-L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
-jjxDah2nGN59PRbxYvnKkKj9
------END CERTIFICATE-----
-
-# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
-# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
-# Label: "USERTrust ECC Certification Authority"
-# Serial: 123013823720199481456569720443997572134
-# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
-# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
-# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
------BEGIN CERTIFICATE-----
-MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
-MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
-eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
-JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
-MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
-Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
-VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
-I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
-o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
-A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
-zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
-RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
-# Label: "GlobalSign ECC Root CA - R4"
-# Serial: 14367148294922964480859022125800977897474
-# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
-# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
-# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
------BEGIN CERTIFICATE-----
-MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
-MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
-bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
-DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
-QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
-FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
-uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
-kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
-ewv4n4Q=
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
-# Label: "GlobalSign ECC Root CA - R5"
-# Serial: 32785792099990507226680698011560947931244
-# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
-# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
-# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
------BEGIN CERTIFICATE-----
-MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
-MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
-bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
-DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
-QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
-8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
-hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
-KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
-515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
-xwy8p2Fp8fc74SrL+SvzZpA3
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA - G3"
-# Serial: 10003001
-# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
-# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
-# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
------BEGIN CERTIFICATE-----
-MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
-DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
-ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
-b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
-cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
-IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
-xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
-KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
-9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
-5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
-6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
-Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
-bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
-BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
-XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
-MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
-INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
-U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
-LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
-Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
-gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
-/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
-0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
-fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
-4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
-1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
-QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
-94B7IWcnMFk=
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
-# Label: "Staat der Nederlanden EV Root CA"
-# Serial: 10000013
-# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
-# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
-# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
------BEGIN CERTIFICATE-----
-MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
-MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
-TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
-b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
-M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
-UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
-Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
-rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
-pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
-j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
-KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
-/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
-cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
-1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
-px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
-/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
-MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
-eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
-2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
-v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
-wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
-CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
-vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
-Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
-Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
-eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
-FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
-7uzXLg==
------END CERTIFICATE-----
-
-# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
-# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
-# Label: "IdenTrust Commercial Root CA 1"
-# Serial: 13298821034946342390520003877796839426
-# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
-# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
-# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
-MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
-VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
-MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
-JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
-3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
-+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
-S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
-bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
-T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
-vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
-Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
-dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
-c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
-l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
-iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
-/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
-ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
-6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
-LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
-nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
-+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
-W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
-AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
-l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
-4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
-mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
-7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
------END CERTIFICATE-----
-
-# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
-# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
-# Label: "IdenTrust Public Sector Root CA 1"
-# Serial: 13298821034946342390521976156843933698
-# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
-# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
-# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
------BEGIN CERTIFICATE-----
-MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
-MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
-VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
-MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
-MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
-ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
-RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
-bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
-/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
-3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
-EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
-9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
-GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
-2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
-WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
-W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
-AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
-t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
-DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
-TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
-lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
-mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
-WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
-+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
-tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
-GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
-8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
-# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
-# Label: "Entrust Root Certification Authority - G2"
-# Serial: 1246989352
-# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
-# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
-# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
------BEGIN CERTIFICATE-----
-MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
-cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
-IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
-dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
-NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
-dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
-dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
-aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
-AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
-RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
-cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
-wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
-U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
-jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
-BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
-BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
-jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
-Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
-1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
-nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
-VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
-# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
-# Label: "Entrust Root Certification Authority - EC1"
-# Serial: 51543124481930649114116133369
-# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
-# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
-# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
------BEGIN CERTIFICATE-----
-MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
-A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
-d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
-dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
-RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
-MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
-VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
-L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
-Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
-ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
-A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
-ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
-Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
-BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
-R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
-hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
------END CERTIFICATE-----
-
-# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
-# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
-# Label: "CFCA EV ROOT"
-# Serial: 407555286
-# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
-# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
-# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
------BEGIN CERTIFICATE-----
-MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
-TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
-MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
-aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
-T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
-sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
-TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
-/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
-7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
-EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
-hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
-a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
-aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
-TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
-PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
-cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
-tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
-BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
-ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
-ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
-jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
-ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
-P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
-xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
-Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
-5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
-/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
-AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
-5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H5"
-# Serial: 156233699172481
-# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
-# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
-# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
------BEGIN CERTIFICATE-----
-MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
-BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
-aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
-QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
-SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
-MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
-VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
-dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
-bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
-/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
-Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
-4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
-5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
-hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
-AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
-SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
-VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
-URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
-peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
-Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
-+qtB4Uu2NQvAmxU=
------END CERTIFICATE-----
-
-# Issuer: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Subject: CN=TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6 O=TÜRKTRUST Bilgi İletişim ve Bilişim Güvenliği Hizmetleri A.Ş.
-# Label: "TÜRKTRUST Elektronik Sertifika Hizmet Sağlayıcısı H6"
-# Serial: 138134509972618
-# MD5 Fingerprint: f8:c5:ee:2a:6b:be:95:8d:08:f7:25:4a:ea:71:3e:46
-# SHA1 Fingerprint: 8a:5c:8c:ee:a5:03:e6:05:56:ba:d8:1b:d4:f6:c9:b0:ed:e5:2f:e0
-# SHA256 Fingerprint: 8d:e7:86:55:e1:be:7f:78:47:80:0b:93:f6:94:d2:1d:36:8c:c0:6e:03:3e:7f:ab:04:bb:5e:b9:9d:a6:b7:00
------BEGIN CERTIFICATE-----
-MIIEJjCCAw6gAwIBAgIGfaHyZeyKMA0GCSqGSIb3DQEBCwUAMIGxMQswCQYDVQQG
-EwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYDVQQKDERUw5xSS1RSVVNUIEJpbGdp
-IMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBB
-LsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBI
-aXptZXQgU2HEn2xhecSxY8Sxc8SxIEg2MB4XDTEzMTIxODA5MDQxMFoXDTIzMTIx
-NjA5MDQxMFowgbExCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExTTBLBgNV
-BAoMRFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2
-ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMUIwQAYDVQQDDDlUw5xSS1RSVVNUIEVs
-ZWt0cm9uaWsgU2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLEgSDYwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCdsGjW6L0UlqMACprx9MfMkU1x
-eHe59yEmFXNRFpQJRwXiM/VomjX/3EsvMsew7eKC5W/a2uqsxgbPJQ1BgfbBOCK9
-+bGlprMBvD9QFyv26WZV1DOzXPhDIHiTVRZwGTLmiddk671IUP320EEDwnS3/faA
-z1vFq6TWlRKb55cTMgPp1KtDWxbtMyJkKbbSk60vbNg9tvYdDjTu0n2pVQ8g9P0p
-u5FbHH3GQjhtQiht1AH7zYiXSX6484P4tZgvsycLSF5W506jM7NE1qXyGJTtHB6p
-lVxiSvgNZ1GpryHV+DKdeboaX+UEVU0TRv/yz3THGmNtwx8XEsMeED5gCLMxAgMB
-AAGjQjBAMB0GA1UdDgQWBBTdVRcT9qzoSCHK77Wv0QAy7Z6MtTAOBgNVHQ8BAf8E
-BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAb1gNl0Oq
-FlQ+v6nfkkU/hQu7VtMMUszIv3ZnXuaqs6fvuay0EBQNdH49ba3RfdCaqaXKGDsC
-QC4qnFAUi/5XfldcEQlLNkVS9z2sFP1E34uXI9TDwe7UU5X+LEr+DXCqu4svLcsy
-o4LyVN/Y8t3XSHLuSqMplsNEzm61kod2pLv0kmzOLBQJZo6NrRa1xxsJYTvjIKID
-gI6tflEATseWhvtDmHd9KMeP2Cpu54Rvl0EpABZeTeIT6lnAY2c6RPuY/ATTMHKm
-9ocJV612ph1jmv3XZch4gyt1O6VbuA1df74jrlZVlFjvH4GMKrLN5ptjnhi85WsG
-tAuYSyher4hYyw==
------END CERTIFICATE-----
-
-# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
-# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
-# Label: "Certinomis - Root CA"
-# Serial: 1
-# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
-# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
-# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
------BEGIN CERTIFICATE-----
-MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
-MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
-BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
-MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
-FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
-Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
-fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
-LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
-WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
-TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
-5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
-CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
-wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
-wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
-m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
-F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
-WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
-2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
-AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
-0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
-F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
-g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
-qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
-h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
-ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
-btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
-Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
-8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
-gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
------END CERTIFICATE-----
-# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Secure Server CA"
-# Serial: 927650371
-# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
-# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
-# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
------BEGIN CERTIFICATE-----
-MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
-MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
-ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
-b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
-bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
-U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
-A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
-I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
-wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
-AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
-oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
-BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
-dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
-MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
-b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
-dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
-MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
-E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
-MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
-hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
-95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
-2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Label: "ValiCert Class 2 VA"
-# Serial: 1
-# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
-# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
-# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Expressz (Class C) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Express (Class C) Root"
-# Serial: 104
-# MD5 Fingerprint: 4f:eb:f1:f0:70:c2:80:63:5d:58:9f:da:12:3c:a9:c4
-# SHA1 Fingerprint: e3:92:51:2f:0a:cf:f5:05:df:f6:de:06:7f:75:37:e1:65:ea:57:4b
-# SHA256 Fingerprint: 0b:5e:ed:4e:84:64:03:cf:55:e0:65:84:84:40:ed:2a:82:75:8b:f5:b9:aa:1f:25:3d:46:13:cf:a0:80:ff:3f
------BEGIN CERTIFICATE-----
-MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx
-ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
-b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD
-EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X
-DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw
-DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u
-c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr
-TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN
-BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA
-OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC
-2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW
-RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P
-AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW
-ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0
-YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz
-b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO
-ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB
-IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs
-b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs
-ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s
-YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg
-a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g
-SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0
-aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg
-YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg
-Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY
-ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g
-pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4
-Fp1hBWeAyNDYpQcCNJgEjTME1A==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Subject: CN=NetLock Uzleti (Class B) Tanusitvanykiado O=NetLock Halozatbiztonsagi Kft. OU=Tanusitvanykiadok
-# Label: "NetLock Business (Class B) Root"
-# Serial: 105
-# MD5 Fingerprint: 39:16:aa:b9:6a:41:e1:14:69:df:9e:6c:3b:72:dc:b6
-# SHA1 Fingerprint: 87:9f:4b:ee:05:df:98:58:3b:e3:60:d6:33:e7:0d:3f:fe:98:71:af
-# SHA256 Fingerprint: 39:df:7b:68:2b:7b:93:8f:84:71:54:81:cc:de:8d:60:d8:f2:2e:c5:98:87:7d:0a:aa:c1:2b:59:18:2b:03:12
------BEGIN CERTIFICATE-----
-MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx
-ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
-b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD
-EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05
-OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G
-A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh
-Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l
-dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG
-SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK
-gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX
-iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc
-Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E
-BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G
-SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu
-b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh
-bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv
-Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln
-aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0
-IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh
-c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph
-biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo
-ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP
-UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj
-YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo
-dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA
-bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06
-sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa
-n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS
-NitjrFgBazMpUIaD8QFI
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Label: "RSA Root Certificate 1"
-# Serial: 1
-# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
-# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
-# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
-NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
-cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
-2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
-JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
-Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
-n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
-PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Label: "ValiCert Class 1 VA"
-# Serial: 1
-# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
-# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
-# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
-NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
-LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
-TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
-TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
-LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
-I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
-nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure eBusiness CA 1"
-# Serial: 4
-# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
-# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
-# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
------BEGIN CERTIFICATE-----
-MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
-ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
-MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
-LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
-KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
-RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
-WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
-Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
-AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
-eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
-zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
-WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
-/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure Global eBusiness CA"
-# Serial: 1
-# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
-# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
-# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
------BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Premium Server CA"
-# Serial: 1
-# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
-# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
-# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
------BEGIN CERTIFICATE-----
-MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
-dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
-MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
-MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
-A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
-cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
-VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
-ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
-uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
-9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
-hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
-pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Server CA"
-# Serial: 1
-# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
-# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
-# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
------BEGIN CERTIFICATE-----
-MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
-MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
-MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
-DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
-dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
-cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
-DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
-gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
-yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
-L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
-EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
-7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
-QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
-qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Label: "Verisign Class 3 Public Primary Certification Authority"
-# Serial: 149843929435818692848040365716851702463
-# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67
-# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2
-# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
-lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
-AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
-# Label: "Verisign Class 3 Public Primary Certification Authority"
-# Serial: 80507572722862485515306429940691309246
-# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4
-# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b
-# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
-BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
-I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
-CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
-2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
-2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
-# Serial: 167285380242319648451154478808036881606
-# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
-# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
-# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
------BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
-pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
-13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
-U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
-F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
-oJ2daZH9
------END CERTIFICATE-----
-
-# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Label: "GTE CyberTrust Global Root"
-# Serial: 421
-# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
-# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
-# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
------BEGIN CERTIFICATE-----
-MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
-VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
-bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
-b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
-iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
-r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
-04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
-GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
-3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
-lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
------END CERTIFICATE-----
diff --git a/tools/swarming_client/third_party/requests/certs.py b/tools/swarming_client/third_party/requests/certs.py
deleted file mode 100644
index 07e6475..0000000
--- a/tools/swarming_client/third_party/requests/certs.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-"""
-certs.py
-~~~~~~~~
-
-This module returns the preferred default CA certificate bundle.
-
-If you are packaging Requests, e.g., for a Linux distribution or a managed
-environment, you can change the definition of where() to return a separately
-packaged CA bundle.
-"""
-import os.path
-
-try:
-    from certifi import where
-except ImportError:
-    def where():
-        """Return the preferred certificate bundle."""
-        # vendored bundle inside Requests
-        return os.path.join(os.path.dirname(__file__), 'cacert.pem')
-
-if __name__ == '__main__':
-    print(where())
diff --git a/tools/swarming_client/third_party/requests/compat.py b/tools/swarming_client/third_party/requests/compat.py
deleted file mode 100644
index b5e5d11..0000000
--- a/tools/swarming_client/third_party/requests/compat.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-pythoncompat
-"""
-
-chardet = None
-
-import sys
-
-# -------
-# Pythons
-# -------
-
-# Syntax sugar.
-_ver = sys.version_info
-
-#: Python 2.x?
-is_py2 = (_ver[0] == 2)
-
-#: Python 3.x?
-is_py3 = (_ver[0] == 3)
-
-try:
-    import simplejson as json
-except (ImportError, SyntaxError):
-    # simplejson does not support Python 3.2, it throws a SyntaxError
-    # because of u'...' Unicode literals.
-    import json
-
-# ---------
-# Specifics
-# ---------
-
-if is_py2:
-    from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
-    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
-    from urllib2 import parse_http_list
-    import cookielib
-    from Cookie import Morsel
-    from StringIO import StringIO
-    from .packages.urllib3.packages.ordered_dict import OrderedDict
-
-    builtin_str = str
-    bytes = str
-    str = unicode
-    basestring = basestring
-    numeric_types = (int, long, float)
-
-elif is_py3:
-    from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
-    from urllib.request import parse_http_list, getproxies, proxy_bypass
-    from http import cookiejar as cookielib
-    from http.cookies import Morsel
-    from io import StringIO
-    from collections import OrderedDict
-
-    builtin_str = str
-    str = str
-    bytes = bytes
-    basestring = (str, bytes)
-    numeric_types = (int, float)
diff --git a/tools/swarming_client/third_party/requests/cookies.py b/tools/swarming_client/third_party/requests/cookies.py
deleted file mode 100644
index b85fd2b..0000000
--- a/tools/swarming_client/third_party/requests/cookies.py
+++ /dev/null
@@ -1,487 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-Compatibility code to be able to use `cookielib.CookieJar` with requests.
-
-requests.utils imports from here, so be careful with imports.
-"""
-
-import copy
-import time
-import calendar
-import collections
-from .compat import cookielib, urlparse, urlunparse, Morsel
-
-try:
-    import threading
-    # grr, pyflakes: this fixes "redefinition of unused 'threading'"
-    threading
-except ImportError:
-    import dummy_threading as threading
-
-
-class MockRequest(object):
-    """Wraps a `requests.Request` to mimic a `urllib2.Request`.
-
-    The code in `cookielib.CookieJar` expects this interface in order to correctly
-    manage cookie policies, i.e., determine whether a cookie can be set, given the
-    domains of the request and the cookie.
-
-    The original request object is read-only. The client is responsible for collecting
-    the new headers via `get_new_headers()` and interpreting them appropriately. You
-    probably want `get_cookie_header`, defined below.
-    """
-
-    def __init__(self, request):
-        self._r = request
-        self._new_headers = {}
-        self.type = urlparse(self._r.url).scheme
-
-    def get_type(self):
-        return self.type
-
-    def get_host(self):
-        return urlparse(self._r.url).netloc
-
-    def get_origin_req_host(self):
-        return self.get_host()
-
-    def get_full_url(self):
-        # Only return the response's URL if the user hadn't set the Host
-        # header
-        if not self._r.headers.get('Host'):
-            return self._r.url
-        # If they did set it, retrieve it and reconstruct the expected domain
-        host = self._r.headers['Host']
-        parsed = urlparse(self._r.url)
-        # Reconstruct the URL as we expect it
-        return urlunparse([
-            parsed.scheme, host, parsed.path, parsed.params, parsed.query,
-            parsed.fragment
-        ])
-
-    def is_unverifiable(self):
-        return True
-
-    def has_header(self, name):
-        return name in self._r.headers or name in self._new_headers
-
-    def get_header(self, name, default=None):
-        return self._r.headers.get(name, self._new_headers.get(name, default))
-
-    def add_header(self, key, val):
-        """cookielib has no legitimate use for this method; add it back if you find one."""
-        raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
-
-    def add_unredirected_header(self, name, value):
-        self._new_headers[name] = value
-
-    def get_new_headers(self):
-        return self._new_headers
-
-    @property
-    def unverifiable(self):
-        return self.is_unverifiable()
-
-    @property
-    def origin_req_host(self):
-        return self.get_origin_req_host()
-
-    @property
-    def host(self):
-        return self.get_host()
-
-
-class MockResponse(object):
-    """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
-
-    ...what? Basically, expose the parsed HTTP headers from the server response
-    the way `cookielib` expects to see them.
-    """
-
-    def __init__(self, headers):
-        """Make a MockResponse for `cookielib` to read.
-
-        :param headers: a httplib.HTTPMessage or analogous carrying the headers
-        """
-        self._headers = headers
-
-    def info(self):
-        return self._headers
-
-    def getheaders(self, name):
-        self._headers.getheaders(name)
-
-
-def extract_cookies_to_jar(jar, request, response):
-    """Extract the cookies from the response into a CookieJar.
-
-    :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
-    :param request: our own requests.Request object
-    :param response: urllib3.HTTPResponse object
-    """
-    if not (hasattr(response, '_original_response') and
-            response._original_response):
-        return
-    # the _original_response field is the wrapped httplib.HTTPResponse object,
-    req = MockRequest(request)
-    # pull out the HTTPMessage with the headers and put it in the mock:
-    res = MockResponse(response._original_response.msg)
-    jar.extract_cookies(res, req)
-
-
-def get_cookie_header(jar, request):
-    """Produce an appropriate Cookie header string to be sent with `request`, or None."""
-    r = MockRequest(request)
-    jar.add_cookie_header(r)
-    return r.get_new_headers().get('Cookie')
-
-
-def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
-    """Unsets a cookie by name, by default over all domains and paths.
-
-    Wraps CookieJar.clear(), is O(n).
-    """
-    clearables = []
-    for cookie in cookiejar:
-        if cookie.name != name:
-            continue
-        if domain is not None and domain != cookie.domain:
-            continue
-        if path is not None and path != cookie.path:
-            continue
-        clearables.append((cookie.domain, cookie.path, cookie.name))
-
-    for domain, path, name in clearables:
-        cookiejar.clear(domain, path, name)
-
-
-class CookieConflictError(RuntimeError):
-    """There are two cookies that meet the criteria specified in the cookie jar.
-    Use .get and .set and include domain and path args in order to be more specific."""
-
-
-class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
-    """Compatibility class; is a cookielib.CookieJar, but exposes a dict
-    interface.
-
-    This is the CookieJar we create by default for requests and sessions that
-    don't specify one, since some clients may expect response.cookies and
-    session.cookies to support dict operations.
-
-    Requests does not use the dict interface internally; it's just for
-    compatibility with external client code. All requests code should work
-    out of the box with externally provided instances of ``CookieJar``, e.g.
-    ``LWPCookieJar`` and ``FileCookieJar``.
-
-    Unlike a regular CookieJar, this class is pickleable.
-
-    .. warning:: dictionary operations that are normally O(1) may be O(n).
-    """
-    def get(self, name, default=None, domain=None, path=None):
-        """Dict-like get() that also supports optional domain and path args in
-        order to resolve naming collisions from using one cookie jar over
-        multiple domains.
-
-        .. warning:: operation is O(n), not O(1)."""
-        try:
-            return self._find_no_duplicates(name, domain, path)
-        except KeyError:
-            return default
-
-    def set(self, name, value, **kwargs):
-        """Dict-like set() that also supports optional domain and path args in
-        order to resolve naming collisions from using one cookie jar over
-        multiple domains."""
-        # support client code that unsets cookies by assignment of a None value:
-        if value is None:
-            remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
-            return
-
-        if isinstance(value, Morsel):
-            c = morsel_to_cookie(value)
-        else:
-            c = create_cookie(name, value, **kwargs)
-        self.set_cookie(c)
-        return c
-
-    def iterkeys(self):
-        """Dict-like iterkeys() that returns an iterator of names of cookies
-        from the jar. See itervalues() and iteritems()."""
-        for cookie in iter(self):
-            yield cookie.name
-
-    def keys(self):
-        """Dict-like keys() that returns a list of names of cookies from the
-        jar. See values() and items()."""
-        return list(self.iterkeys())
-
-    def itervalues(self):
-        """Dict-like itervalues() that returns an iterator of values of cookies
-        from the jar. See iterkeys() and iteritems()."""
-        for cookie in iter(self):
-            yield cookie.value
-
-    def values(self):
-        """Dict-like values() that returns a list of values of cookies from the
-        jar. See keys() and items()."""
-        return list(self.itervalues())
-
-    def iteritems(self):
-        """Dict-like iteritems() that returns an iterator of name-value tuples
-        from the jar. See iterkeys() and itervalues()."""
-        for cookie in iter(self):
-            yield cookie.name, cookie.value
-
-    def items(self):
-        """Dict-like items() that returns a list of name-value tuples from the
-        jar. See keys() and values(). Allows client-code to call
-        ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
-        pairs."""
-        return list(self.iteritems())
-
-    def list_domains(self):
-        """Utility method to list all the domains in the jar."""
-        domains = []
-        for cookie in iter(self):
-            if cookie.domain not in domains:
-                domains.append(cookie.domain)
-        return domains
-
-    def list_paths(self):
-        """Utility method to list all the paths in the jar."""
-        paths = []
-        for cookie in iter(self):
-            if cookie.path not in paths:
-                paths.append(cookie.path)
-        return paths
-
-    def multiple_domains(self):
-        """Returns True if there are multiple domains in the jar.
-        Returns False otherwise."""
-        domains = []
-        for cookie in iter(self):
-            if cookie.domain is not None and cookie.domain in domains:
-                return True
-            domains.append(cookie.domain)
-        return False  # there is only one domain in jar
-
-    def get_dict(self, domain=None, path=None):
-        """Takes as an argument an optional domain and path and returns a plain
-        old Python dict of name-value pairs of cookies that meet the
-        requirements."""
-        dictionary = {}
-        for cookie in iter(self):
-            if (domain is None or cookie.domain == domain) and (path is None
-                                                or cookie.path == path):
-                dictionary[cookie.name] = cookie.value
-        return dictionary
-
-    def __getitem__(self, name):
-        """Dict-like __getitem__() for compatibility with client code. Throws
-        exception if there are more than one cookie with name. In that case,
-        use the more explicit get() method instead.
-
-        .. warning:: operation is O(n), not O(1)."""
-
-        return self._find_no_duplicates(name)
-
-    def __setitem__(self, name, value):
-        """Dict-like __setitem__ for compatibility with client code. Throws
-        exception if there is already a cookie of that name in the jar. In that
-        case, use the more explicit set() method instead."""
-
-        self.set(name, value)
-
-    def __delitem__(self, name):
-        """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
-        ``remove_cookie_by_name()``."""
-        remove_cookie_by_name(self, name)
-
-    def set_cookie(self, cookie, *args, **kwargs):
-        if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
-            cookie.value = cookie.value.replace('\\"', '')
-        return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
-
-    def update(self, other):
-        """Updates this jar with cookies from another CookieJar or dict-like"""
-        if isinstance(other, cookielib.CookieJar):
-            for cookie in other:
-                self.set_cookie(copy.copy(cookie))
-        else:
-            super(RequestsCookieJar, self).update(other)
-
-    def _find(self, name, domain=None, path=None):
-        """Requests uses this method internally to get cookie values. Takes as
-        args name and optional domain and path. Returns a cookie.value. If
-        there are conflicting cookies, _find arbitrarily chooses one. See
-        _find_no_duplicates if you want an exception thrown if there are
-        conflicting cookies."""
-        for cookie in iter(self):
-            if cookie.name == name:
-                if domain is None or cookie.domain == domain:
-                    if path is None or cookie.path == path:
-                        return cookie.value
-
-        raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
-
-    def _find_no_duplicates(self, name, domain=None, path=None):
-        """Both ``__get_item__`` and ``get`` call this function: it's never
-        used elsewhere in Requests. Takes as args name and optional domain and
-        path. Returns a cookie.value. Throws KeyError if cookie is not found
-        and CookieConflictError if there are multiple cookies that match name
-        and optionally domain and path."""
-        toReturn = None
-        for cookie in iter(self):
-            if cookie.name == name:
-                if domain is None or cookie.domain == domain:
-                    if path is None or cookie.path == path:
-                        if toReturn is not None:  # if there are multiple cookies that meet passed in criteria
-                            raise CookieConflictError('There are multiple cookies with name, %r' % (name))
-                        toReturn = cookie.value  # we will eventually return this as long as no cookie conflict
-
-        if toReturn:
-            return toReturn
-        raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
-
-    def __getstate__(self):
-        """Unlike a normal CookieJar, this class is pickleable."""
-        state = self.__dict__.copy()
-        # remove the unpickleable RLock object
-        state.pop('_cookies_lock')
-        return state
-
-    def __setstate__(self, state):
-        """Unlike a normal CookieJar, this class is pickleable."""
-        self.__dict__.update(state)
-        if '_cookies_lock' not in self.__dict__:
-            self._cookies_lock = threading.RLock()
-
-    def copy(self):
-        """Return a copy of this RequestsCookieJar."""
-        new_cj = RequestsCookieJar()
-        new_cj.update(self)
-        return new_cj
-
-
-def _copy_cookie_jar(jar):
-    if jar is None:
-        return None
-
-    if hasattr(jar, 'copy'):
-        # We're dealing with an instance of RequestsCookieJar
-        return jar.copy()
-    # We're dealing with a generic CookieJar instance
-    new_jar = copy.copy(jar)
-    new_jar.clear()
-    for cookie in jar:
-        new_jar.set_cookie(copy.copy(cookie))
-    return new_jar
-
-
-def create_cookie(name, value, **kwargs):
-    """Make a cookie from underspecified parameters.
-
-    By default, the pair of `name` and `value` will be set for the domain ''
-    and sent on every request (this is sometimes called a "supercookie").
-    """
-    result = dict(
-        version=0,
-        name=name,
-        value=value,
-        port=None,
-        domain='',
-        path='/',
-        secure=False,
-        expires=None,
-        discard=True,
-        comment=None,
-        comment_url=None,
-        rest={'HttpOnly': None},
-        rfc2109=False,)
-
-    badargs = set(kwargs) - set(result)
-    if badargs:
-        err = 'create_cookie() got unexpected keyword arguments: %s'
-        raise TypeError(err % list(badargs))
-
-    result.update(kwargs)
-    result['port_specified'] = bool(result['port'])
-    result['domain_specified'] = bool(result['domain'])
-    result['domain_initial_dot'] = result['domain'].startswith('.')
-    result['path_specified'] = bool(result['path'])
-
-    return cookielib.Cookie(**result)
-
-
-def morsel_to_cookie(morsel):
-    """Convert a Morsel object into a Cookie containing the one k/v pair."""
-
-    expires = None
-    if morsel['max-age']:
-        try:
-            expires = int(time.time() + int(morsel['max-age']))
-        except ValueError:
-            raise TypeError('max-age: %s must be integer' % morsel['max-age'])
-    elif morsel['expires']:
-        time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
-        expires = calendar.timegm(
-            time.strptime(morsel['expires'], time_template)
-        )
-    return create_cookie(
-        comment=morsel['comment'],
-        comment_url=bool(morsel['comment']),
-        discard=False,
-        domain=morsel['domain'],
-        expires=expires,
-        name=morsel.key,
-        path=morsel['path'],
-        port=None,
-        rest={'HttpOnly': morsel['httponly']},
-        rfc2109=False,
-        secure=bool(morsel['secure']),
-        value=morsel.value,
-        version=morsel['version'] or 0,
-    )
-
-
-def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
-    """Returns a CookieJar from a key/value dictionary.
-
-    :param cookie_dict: Dict of key/values to insert into CookieJar.
-    :param cookiejar: (optional) A cookiejar to add the cookies to.
-    :param overwrite: (optional) If False, will not replace cookies
-        already in the jar with new ones.
-    """
-    if cookiejar is None:
-        cookiejar = RequestsCookieJar()
-
-    if cookie_dict is not None:
-        names_from_jar = [cookie.name for cookie in cookiejar]
-        for name in cookie_dict:
-            if overwrite or (name not in names_from_jar):
-                cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
-
-    return cookiejar
-
-
-def merge_cookies(cookiejar, cookies):
-    """Add cookies to cookiejar and returns a merged CookieJar.
-
-    :param cookiejar: CookieJar object to add the cookies to.
-    :param cookies: Dictionary or CookieJar object to be added.
-    """
-    if not isinstance(cookiejar, cookielib.CookieJar):
-        raise ValueError('You can only merge into CookieJar')
-
-    if isinstance(cookies, dict):
-        cookiejar = cookiejar_from_dict(
-            cookies, cookiejar=cookiejar, overwrite=False)
-    elif isinstance(cookies, cookielib.CookieJar):
-        try:
-            cookiejar.update(cookies)
-        except AttributeError:
-            for cookie_in_jar in cookies:
-                cookiejar.set_cookie(cookie_in_jar)
-
-    return cookiejar
diff --git a/tools/swarming_client/third_party/requests/exceptions.py b/tools/swarming_client/third_party/requests/exceptions.py
deleted file mode 100644
index ba0b910..0000000
--- a/tools/swarming_client/third_party/requests/exceptions.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.exceptions
-~~~~~~~~~~~~~~~~~~~
-
-This module contains the set of Requests' exceptions.
-
-"""
-from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
-
-
-class RequestException(IOError):
-    """There was an ambiguous exception that occurred while handling your
-    request."""
-
-    def __init__(self, *args, **kwargs):
-        """
-        Initialize RequestException with `request` and `response` objects.
-        """
-        response = kwargs.pop('response', None)
-        self.response = response
-        self.request = kwargs.pop('request', None)
-        if (response is not None and not self.request and
-                hasattr(response, 'request')):
-            self.request = self.response.request
-        super(RequestException, self).__init__(*args, **kwargs)
-
-
-class HTTPError(RequestException):
-    """An HTTP error occurred."""
-
-
-class ConnectionError(RequestException):
-    """A Connection error occurred."""
-
-
-class ProxyError(ConnectionError):
-    """A proxy error occurred."""
-
-
-class SSLError(ConnectionError):
-    """An SSL error occurred."""
-
-
-class Timeout(RequestException):
-    """The request timed out.
-
-    Catching this error will catch both
-    :exc:`~requests.exceptions.ConnectTimeout` and
-    :exc:`~requests.exceptions.ReadTimeout` errors.
-    """
-
-
-class ConnectTimeout(ConnectionError, Timeout):
-    """The request timed out while trying to connect to the remote server.
-
-    Requests that produced this error are safe to retry.
-    """
-
-
-class ReadTimeout(Timeout):
-    """The server did not send any data in the allotted amount of time."""
-
-
-class URLRequired(RequestException):
-    """A valid URL is required to make a request."""
-
-
-class TooManyRedirects(RequestException):
-    """Too many redirects."""
-
-
-class MissingSchema(RequestException, ValueError):
-    """The URL schema (e.g. http or https) is missing."""
-
-
-class InvalidSchema(RequestException, ValueError):
-    """See defaults.py for valid schemas."""
-
-
-class InvalidURL(RequestException, ValueError):
-    """ The URL provided was somehow invalid. """
-
-
-class ChunkedEncodingError(RequestException):
-    """The server declared chunked encoding but sent an invalid chunk."""
-
-
-class ContentDecodingError(RequestException, BaseHTTPError):
-    """Failed to decode response content"""
-
-
-class StreamConsumedError(RequestException, TypeError):
-    """The content for this response was already consumed"""
-
-
-class RetryError(RequestException):
-    """Custom retries logic failed"""
-
-
-# Warnings
-
-
-class RequestsWarning(Warning):
-    """Base warning for Requests."""
-    pass
-
-
-class FileModeWarning(RequestsWarning, DeprecationWarning):
-    """
-    A file was opened in text mode, but Requests determined its binary length.
-    """
-    pass
diff --git a/tools/swarming_client/third_party/requests/hooks.py b/tools/swarming_client/third_party/requests/hooks.py
deleted file mode 100644
index 9da9436..0000000
--- a/tools/swarming_client/third_party/requests/hooks.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.hooks
-~~~~~~~~~~~~~~
-
-This module provides the capabilities for the Requests hooks system.
-
-Available hooks:
-
-``response``:
-    The response generated from a Request.
-
-"""
-HOOKS = ['response']
-
-def default_hooks():
-    return dict((event, []) for event in HOOKS)
-
-# TODO: response is the only one
-
-
-def dispatch_hook(key, hooks, hook_data, **kwargs):
-    """Dispatches a hook dictionary on a given piece of data."""
-    hooks = hooks or dict()
-    hooks = hooks.get(key)
-    if hooks:
-        if hasattr(hooks, '__call__'):
-            hooks = [hooks]
-        for hook in hooks:
-            _hook_data = hook(hook_data, **kwargs)
-            if _hook_data is not None:
-                hook_data = _hook_data
-    return hook_data
diff --git a/tools/swarming_client/third_party/requests/models.py b/tools/swarming_client/third_party/requests/models.py
deleted file mode 100644
index 407cc61..0000000
--- a/tools/swarming_client/third_party/requests/models.py
+++ /dev/null
@@ -1,851 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.models
-~~~~~~~~~~~~~~~
-
-This module contains the primary objects that power Requests.
-"""
-
-import collections
-import datetime
-
-from io import BytesIO, UnsupportedOperation
-from .hooks import default_hooks
-from .structures import CaseInsensitiveDict
-
-from .auth import HTTPBasicAuth
-from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
-from .packages.urllib3.fields import RequestField
-from .packages.urllib3.filepost import encode_multipart_formdata
-from .packages.urllib3.util import parse_url
-from .packages.urllib3.exceptions import (
-    DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
-from .exceptions import (
-    HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
-    ContentDecodingError, ConnectionError, StreamConsumedError)
-from .utils import (
-    guess_filename, get_auth_from_url, requote_uri,
-    stream_decode_response_unicode, to_key_val_list, parse_header_links,
-    iter_slices, guess_json_utf, super_len, to_native_string)
-from .compat import (
-    cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
-    is_py2, chardet, builtin_str, basestring)
-from .compat import json as complexjson
-from .status_codes import codes
-
-#: The set of HTTP status codes that indicate an automatically
-#: processable redirect.
-REDIRECT_STATI = (
-    codes.moved,              # 301
-    codes.found,              # 302
-    codes.other,              # 303
-    codes.temporary_redirect, # 307
-    codes.permanent_redirect, # 308
-)
-
-DEFAULT_REDIRECT_LIMIT = 30
-CONTENT_CHUNK_SIZE = 10 * 1024
-ITER_CHUNK_SIZE = 512
-
-
-class RequestEncodingMixin(object):
-    @property
-    def path_url(self):
-        """Build the path URL to use."""
-
-        url = []
-
-        p = urlsplit(self.url)
-
-        path = p.path
-        if not path:
-            path = '/'
-
-        url.append(path)
-
-        query = p.query
-        if query:
-            url.append('?')
-            url.append(query)
-
-        return ''.join(url)
-
-    @staticmethod
-    def _encode_params(data):
-        """Encode parameters in a piece of data.
-
-        Will successfully encode parameters when passed as a dict or a list of
-        2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
-        if parameters are supplied as a dict.
-        """
-
-        if isinstance(data, (str, bytes)):
-            return data
-        elif hasattr(data, 'read'):
-            return data
-        elif hasattr(data, '__iter__'):
-            result = []
-            for k, vs in to_key_val_list(data):
-                if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
-                    vs = [vs]
-                for v in vs:
-                    if v is not None:
-                        result.append(
-                            (k.encode('utf-8') if isinstance(k, str) else k,
-                             v.encode('utf-8') if isinstance(v, str) else v))
-            return urlencode(result, doseq=True)
-        else:
-            return data
-
-    @staticmethod
-    def _encode_files(files, data):
-        """Build the body for a multipart/form-data request.
-
-        Will successfully encode files when passed as a dict or a list of
-        2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
-        if parameters are supplied as a dict.
-
-        """
-        if (not files):
-            raise ValueError("Files must be provided.")
-        elif isinstance(data, basestring):
-            raise ValueError("Data must not be a string.")
-
-        new_fields = []
-        fields = to_key_val_list(data or {})
-        files = to_key_val_list(files or {})
-
-        for field, val in fields:
-            if isinstance(val, basestring) or not hasattr(val, '__iter__'):
-                val = [val]
-            for v in val:
-                if v is not None:
-                    # Don't call str() on bytestrings: in Py3 it all goes wrong.
-                    if not isinstance(v, bytes):
-                        v = str(v)
-
-                    new_fields.append(
-                        (field.decode('utf-8') if isinstance(field, bytes) else field,
-                         v.encode('utf-8') if isinstance(v, str) else v))
-
-        for (k, v) in files:
-            # support for explicit filename
-            ft = None
-            fh = None
-            if isinstance(v, (tuple, list)):
-                if len(v) == 2:
-                    fn, fp = v
-                elif len(v) == 3:
-                    fn, fp, ft = v
-                else:
-                    fn, fp, ft, fh = v
-            else:
-                fn = guess_filename(v) or k
-                fp = v
-
-            if isinstance(fp, (str, bytes, bytearray)):
-                fdata = fp
-            else:
-                fdata = fp.read()
-
-            rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
-            rf.make_multipart(content_type=ft)
-            new_fields.append(rf)
-
-        body, content_type = encode_multipart_formdata(new_fields)
-
-        return body, content_type
-
-
-class RequestHooksMixin(object):
-    def register_hook(self, event, hook):
-        """Properly register a hook."""
-
-        if event not in self.hooks:
-            raise ValueError('Unsupported event specified, with event name "%s"' % (event))
-
-        if isinstance(hook, collections.Callable):
-            self.hooks[event].append(hook)
-        elif hasattr(hook, '__iter__'):
-            self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
-
-    def deregister_hook(self, event, hook):
-        """Deregister a previously registered hook.
-        Returns True if the hook existed, False if not.
-        """
-
-        try:
-            self.hooks[event].remove(hook)
-            return True
-        except ValueError:
-            return False
-
-
-class Request(RequestHooksMixin):
-    """A user-created :class:`Request <Request>` object.
-
-    Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
-
-    :param method: HTTP method to use.
-    :param url: URL to send.
-    :param headers: dictionary of headers to send.
-    :param files: dictionary of {filename: fileobject} files to multipart upload.
-    :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
-    :param json: json for the body to attach to the request (if files or data is not specified).
-    :param params: dictionary of URL parameters to append to the URL.
-    :param auth: Auth handler or (user, pass) tuple.
-    :param cookies: dictionary or CookieJar of cookies to attach to this request.
-    :param hooks: dictionary of callback hooks, for internal usage.
-
-    Usage::
-
-      >>> import requests
-      >>> req = requests.Request('GET', 'http://httpbin.org/get')
-      >>> req.prepare()
-      <PreparedRequest [GET]>
-
-    """
-    def __init__(self, method=None, url=None, headers=None, files=None,
-        data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
-
-        # Default empty dicts for dict params.
-        data = [] if data is None else data
-        files = [] if files is None else files
-        headers = {} if headers is None else headers
-        params = {} if params is None else params
-        hooks = {} if hooks is None else hooks
-
-        self.hooks = default_hooks()
-        for (k, v) in list(hooks.items()):
-            self.register_hook(event=k, hook=v)
-
-        self.method = method
-        self.url = url
-        self.headers = headers
-        self.files = files
-        self.data = data
-        self.json = json
-        self.params = params
-        self.auth = auth
-        self.cookies = cookies
-
-    def __repr__(self):
-        return '<Request [%s]>' % (self.method)
-
-    def prepare(self):
-        """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
-        p = PreparedRequest()
-        p.prepare(
-            method=self.method,
-            url=self.url,
-            headers=self.headers,
-            files=self.files,
-            data=self.data,
-            json=self.json,
-            params=self.params,
-            auth=self.auth,
-            cookies=self.cookies,
-            hooks=self.hooks,
-        )
-        return p
-
-
-class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
-    """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
-    containing the exact bytes that will be sent to the server.
-
-    Generated from either a :class:`Request <Request>` object or manually.
-
-    Usage::
-
-      >>> import requests
-      >>> req = requests.Request('GET', 'http://httpbin.org/get')
-      >>> r = req.prepare()
-      <PreparedRequest [GET]>
-
-      >>> s = requests.Session()
-      >>> s.send(r)
-      <Response [200]>
-
-    """
-
-    def __init__(self):
-        #: HTTP verb to send to the server.
-        self.method = None
-        #: HTTP URL to send the request to.
-        self.url = None
-        #: dictionary of HTTP headers.
-        self.headers = None
-        # The `CookieJar` used to create the Cookie header will be stored here
-        # after prepare_cookies is called
-        self._cookies = None
-        #: request body to send to the server.
-        self.body = None
-        #: dictionary of callback hooks, for internal usage.
-        self.hooks = default_hooks()
-
-    def prepare(self, method=None, url=None, headers=None, files=None,
-        data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
-        """Prepares the entire request with the given parameters."""
-
-        self.prepare_method(method)
-        self.prepare_url(url, params)
-        self.prepare_headers(headers)
-        self.prepare_cookies(cookies)
-        self.prepare_body(data, files, json)
-        self.prepare_auth(auth, url)
-
-        # Note that prepare_auth must be last to enable authentication schemes
-        # such as OAuth to work on a fully prepared request.
-
-        # This MUST go after prepare_auth. Authenticators could add a hook
-        self.prepare_hooks(hooks)
-
-    def __repr__(self):
-        return '<PreparedRequest [%s]>' % (self.method)
-
-    def copy(self):
-        p = PreparedRequest()
-        p.method = self.method
-        p.url = self.url
-        p.headers = self.headers.copy() if self.headers is not None else None
-        p._cookies = _copy_cookie_jar(self._cookies)
-        p.body = self.body
-        p.hooks = self.hooks
-        return p
-
-    def prepare_method(self, method):
-        """Prepares the given HTTP method."""
-        self.method = method
-        if self.method is not None:
-            self.method = to_native_string(self.method.upper())
-
-    def prepare_url(self, url, params):
-        """Prepares the given HTTP URL."""
-        #: Accept objects that have string representations.
-        #: We're unable to blindly call unicode/str functions
-        #: as this will include the bytestring indicator (b'')
-        #: on python 3.x.
-        #: https://github.com/kennethreitz/requests/pull/2238
-        if isinstance(url, bytes):
-            url = url.decode('utf8')
-        else:
-            url = unicode(url) if is_py2 else str(url)
-
-        # Don't do any URL preparation for non-HTTP schemes like `mailto`,
-        # `data` etc to work around exceptions from `url_parse`, which
-        # handles RFC 3986 only.
-        if ':' in url and not url.lower().startswith('http'):
-            self.url = url
-            return
-
-        # Support for unicode domain names and paths.
-        try:
-            scheme, auth, host, port, path, query, fragment = parse_url(url)
-        except LocationParseError as e:
-            raise InvalidURL(*e.args)
-
-        if not scheme:
-            error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
-            error = error.format(to_native_string(url, 'utf8'))
-
-            raise MissingSchema(error)
-
-        if not host:
-            raise InvalidURL("Invalid URL %r: No host supplied" % url)
-
-        # Only want to apply IDNA to the hostname
-        try:
-            host = host.encode('idna').decode('utf-8')
-        except UnicodeError:
-            raise InvalidURL('URL has an invalid label.')
-
-        # Carefully reconstruct the network location
-        netloc = auth or ''
-        if netloc:
-            netloc += '@'
-        netloc += host
-        if port:
-            netloc += ':' + str(port)
-
-        # Bare domains aren't valid URLs.
-        if not path:
-            path = '/'
-
-        if is_py2:
-            if isinstance(scheme, str):
-                scheme = scheme.encode('utf-8')
-            if isinstance(netloc, str):
-                netloc = netloc.encode('utf-8')
-            if isinstance(path, str):
-                path = path.encode('utf-8')
-            if isinstance(query, str):
-                query = query.encode('utf-8')
-            if isinstance(fragment, str):
-                fragment = fragment.encode('utf-8')
-
-        if isinstance(params, (str, bytes)):
-            params = to_native_string(params)
-
-        enc_params = self._encode_params(params)
-        if enc_params:
-            if query:
-                query = '%s&%s' % (query, enc_params)
-            else:
-                query = enc_params
-
-        url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
-        self.url = url
-
-    def prepare_headers(self, headers):
-        """Prepares the given HTTP headers."""
-
-        if headers:
-            self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
-        else:
-            self.headers = CaseInsensitiveDict()
-
-    def prepare_body(self, data, files, json=None):
-        """Prepares the given HTTP body data."""
-
-        # Check if file, fo, generator, iterator.
-        # If not, run through normal process.
-
-        # Nottin' on you.
-        body = None
-        content_type = None
-        length = None
-
-        if not data and json is not None:
-            content_type = 'application/json'
-            body = complexjson.dumps(json)
-
-        is_stream = all([
-            hasattr(data, '__iter__'),
-            not isinstance(data, (basestring, list, tuple, dict))
-        ])
-
-        try:
-            length = super_len(data)
-        except (TypeError, AttributeError, UnsupportedOperation):
-            length = None
-
-        if is_stream:
-            body = data
-
-            if files:
-                raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
-
-            if length:
-                self.headers['Content-Length'] = builtin_str(length)
-            else:
-                self.headers['Transfer-Encoding'] = 'chunked'
-        else:
-            # Multi-part file uploads.
-            if files:
-                (body, content_type) = self._encode_files(files, data)
-            else:
-                if data:
-                    body = self._encode_params(data)
-                    if isinstance(data, basestring) or hasattr(data, 'read'):
-                        content_type = None
-                    else:
-                        content_type = 'application/x-www-form-urlencoded'
-
-            self.prepare_content_length(body)
-
-            # Add content-type if it wasn't explicitly provided.
-            if content_type and ('content-type' not in self.headers):
-                self.headers['Content-Type'] = content_type
-
-        self.body = body
-
-    def prepare_content_length(self, body):
-        if hasattr(body, 'seek') and hasattr(body, 'tell'):
-            body.seek(0, 2)
-            self.headers['Content-Length'] = builtin_str(body.tell())
-            body.seek(0, 0)
-        elif body is not None:
-            l = super_len(body)
-            if l:
-                self.headers['Content-Length'] = builtin_str(l)
-        elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
-            self.headers['Content-Length'] = '0'
-
-    def prepare_auth(self, auth, url=''):
-        """Prepares the given HTTP auth data."""
-
-        # If no Auth is explicitly provided, extract it from the URL first.
-        if auth is None:
-            url_auth = get_auth_from_url(self.url)
-            auth = url_auth if any(url_auth) else None
-
-        if auth:
-            if isinstance(auth, tuple) and len(auth) == 2:
-                # special-case basic HTTP auth
-                auth = HTTPBasicAuth(*auth)
-
-            # Allow auth to make its changes.
-            r = auth(self)
-
-            # Update self to reflect the auth changes.
-            self.__dict__.update(r.__dict__)
-
-            # Recompute Content-Length
-            self.prepare_content_length(self.body)
-
-    def prepare_cookies(self, cookies):
-        """Prepares the given HTTP cookie data.
-
-        This function eventually generates a ``Cookie`` header from the
-        given cookies using cookielib. Due to cookielib's design, the header
-        will not be regenerated if it already exists, meaning this function
-        can only be called once for the life of the
-        :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
-        to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
-        header is removed beforehand."""
-
-        if isinstance(cookies, cookielib.CookieJar):
-            self._cookies = cookies
-        else:
-            self._cookies = cookiejar_from_dict(cookies)
-
-        cookie_header = get_cookie_header(self._cookies, self)
-        if cookie_header is not None:
-            self.headers['Cookie'] = cookie_header
-
-    def prepare_hooks(self, hooks):
-        """Prepares the given hooks."""
-        # hooks can be passed as None to the prepare method and to this
-        # method. To prevent iterating over None, simply use an empty list
-        # if hooks is False-y
-        hooks = hooks or []
-        for event in hooks:
-            self.register_hook(event, hooks[event])
-
-
-class Response(object):
-    """The :class:`Response <Response>` object, which contains a
-    server's response to an HTTP request.
-    """
-
-    __attrs__ = [
-        '_content', 'status_code', 'headers', 'url', 'history',
-        'encoding', 'reason', 'cookies', 'elapsed', 'request'
-    ]
-
-    def __init__(self):
-        super(Response, self).__init__()
-
-        self._content = False
-        self._content_consumed = False
-
-        #: Integer Code of responded HTTP Status, e.g. 404 or 200.
-        self.status_code = None
-
-        #: Case-insensitive Dictionary of Response Headers.
-        #: For example, ``headers['content-encoding']`` will return the
-        #: value of a ``'Content-Encoding'`` response header.
-        self.headers = CaseInsensitiveDict()
-
-        #: File-like object representation of response (for advanced usage).
-        #: Use of ``raw`` requires that ``stream=True`` be set on the request.
-        # This requirement does not apply for use internally to Requests.
-        self.raw = None
-
-        #: Final URL location of Response.
-        self.url = None
-
-        #: Encoding to decode with when accessing r.text.
-        self.encoding = None
-
-        #: A list of :class:`Response <Response>` objects from
-        #: the history of the Request. Any redirect responses will end
-        #: up here. The list is sorted from the oldest to the most recent request.
-        self.history = []
-
-        #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
-        self.reason = None
-
-        #: A CookieJar of Cookies the server sent back.
-        self.cookies = cookiejar_from_dict({})
-
-        #: The amount of time elapsed between sending the request
-        #: and the arrival of the response (as a timedelta).
-        #: This property specifically measures the time taken between sending
-        #: the first byte of the request and finishing parsing the headers. It
-        #: is therefore unaffected by consuming the response content or the
-        #: value of the ``stream`` keyword argument.
-        self.elapsed = datetime.timedelta(0)
-
-        #: The :class:`PreparedRequest <PreparedRequest>` object to which this
-        #: is a response.
-        self.request = None
-
-    def __getstate__(self):
-        # Consume everything; accessing the content attribute makes
-        # sure the content has been fully read.
-        if not self._content_consumed:
-            self.content
-
-        return dict(
-            (attr, getattr(self, attr, None))
-            for attr in self.__attrs__
-        )
-
-    def __setstate__(self, state):
-        for name, value in state.items():
-            setattr(self, name, value)
-
-        # pickled objects do not have .raw
-        setattr(self, '_content_consumed', True)
-        setattr(self, 'raw', None)
-
-    def __repr__(self):
-        return '<Response [%s]>' % (self.status_code)
-
-    def __bool__(self):
-        """Returns true if :attr:`status_code` is 'OK'."""
-        return self.ok
-
-    def __nonzero__(self):
-        """Returns true if :attr:`status_code` is 'OK'."""
-        return self.ok
-
-    def __iter__(self):
-        """Allows you to use a response as an iterator."""
-        return self.iter_content(128)
-
-    @property
-    def ok(self):
-        try:
-            self.raise_for_status()
-        except HTTPError:
-            return False
-        return True
-
-    @property
-    def is_redirect(self):
-        """True if this Response is a well-formed HTTP redirect that could have
-        been processed automatically (by :meth:`Session.resolve_redirects`).
-        """
-        return ('location' in self.headers and self.status_code in REDIRECT_STATI)
-
-    @property
-    def is_permanent_redirect(self):
-        """True if this Response one of the permanent versions of redirect"""
-        return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
-
-    @property
-    def apparent_encoding(self):
-        # TODO(maruel): charade is really large, so it is trimmed off.
-        return 'utf-8'
-
-    def iter_content(self, chunk_size=1, decode_unicode=False):
-        """Iterates over the response data.  When stream=True is set on the
-        request, this avoids reading the content at once into memory for
-        large responses.  The chunk size is the number of bytes it should
-        read into memory.  This is not necessarily the length of each item
-        returned as decoding can take place.
-
-        If decode_unicode is True, content will be decoded using the best
-        available encoding based on the response.
-        """
-
-        def generate():
-            # Special case for urllib3.
-            if hasattr(self.raw, 'stream'):
-                try:
-                    for chunk in self.raw.stream(chunk_size, decode_content=True):
-                        yield chunk
-                except ProtocolError as e:
-                    raise ChunkedEncodingError(e)
-                except DecodeError as e:
-                    raise ContentDecodingError(e)
-                except ReadTimeoutError as e:
-                    raise ConnectionError(e)
-            else:
-                # Standard file-like object.
-                while True:
-                    chunk = self.raw.read(chunk_size)
-                    if not chunk:
-                        break
-                    yield chunk
-
-            self._content_consumed = True
-
-        if self._content_consumed and isinstance(self._content, bool):
-            raise StreamConsumedError()
-        # simulate reading small chunks of the content
-        reused_chunks = iter_slices(self._content, chunk_size)
-
-        stream_chunks = generate()
-
-        chunks = reused_chunks if self._content_consumed else stream_chunks
-
-        if decode_unicode:
-            chunks = stream_decode_response_unicode(chunks, self)
-
-        return chunks
-
-    def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
-        """Iterates over the response data, one line at a time.  When
-        stream=True is set on the request, this avoids reading the
-        content at once into memory for large responses.
-
-        .. note:: This method is not reentrant safe.
-        """
-
-        pending = None
-
-        for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
-
-            if pending is not None:
-                chunk = pending + chunk
-
-            if delimiter:
-                lines = chunk.split(delimiter)
-            else:
-                lines = chunk.splitlines()
-
-            if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
-                pending = lines.pop()
-            else:
-                pending = None
-
-            for line in lines:
-                yield line
-
-        if pending is not None:
-            yield pending
-
-    @property
-    def content(self):
-        """Content of the response, in bytes."""
-
-        if self._content is False:
-            # Read the contents.
-            try:
-                if self._content_consumed:
-                    raise RuntimeError(
-                        'The content for this response was already consumed')
-
-                if self.status_code == 0:
-                    self._content = None
-                else:
-                    self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
-
-            except AttributeError:
-                self._content = None
-
-        self._content_consumed = True
-        # don't need to release the connection; that's been handled by urllib3
-        # since we exhausted the data.
-        return self._content
-
-    @property
-    def text(self):
-        """Content of the response, in unicode.
-
-        If Response.encoding is None, encoding will be guessed using
-        ``chardet``.
-
-        The encoding of the response content is determined based solely on HTTP
-        headers, following RFC 2616 to the letter. If you can take advantage of
-        non-HTTP knowledge to make a better guess at the encoding, you should
-        set ``r.encoding`` appropriately before accessing this property.
-        """
-
-        # Try charset from content-type
-        content = None
-        encoding = self.encoding
-
-        if not self.content:
-            return str('')
-
-        # Fallback to auto-detected encoding.
-        if self.encoding is None:
-            encoding = self.apparent_encoding
-
-        # Decode unicode from given encoding.
-        try:
-            content = str(self.content, encoding, errors='replace')
-        except (LookupError, TypeError):
-            # A LookupError is raised if the encoding was not found which could
-            # indicate a misspelling or similar mistake.
-            #
-            # A TypeError can be raised if encoding is None
-            #
-            # So we try blindly encoding.
-            content = str(self.content, errors='replace')
-
-        return content
-
-    def json(self, **kwargs):
-        """Returns the json-encoded content of a response, if any.
-
-        :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
-        """
-
-        if not self.encoding and len(self.content) > 3:
-            # No encoding set. JSON RFC 4627 section 3 states we should expect
-            # UTF-8, -16 or -32. Detect which one to use; If the detection or
-            # decoding fails, fall back to `self.text` (using chardet to make
-            # a best guess).
-            encoding = guess_json_utf(self.content)
-            if encoding is not None:
-                try:
-                    return complexjson.loads(
-                        self.content.decode(encoding), **kwargs
-                    )
-                except UnicodeDecodeError:
-                    # Wrong UTF codec detected; usually because it's not UTF-8
-                    # but some other 8-bit codec.  This is an RFC violation,
-                    # and the server didn't bother to tell us what codec *was*
-                    # used.
-                    pass
-        return complexjson.loads(self.text, **kwargs)
-
-    @property
-    def links(self):
-        """Returns the parsed header links of the response, if any."""
-
-        header = self.headers.get('link')
-
-        # l = MultiDict()
-        l = {}
-
-        if header:
-            links = parse_header_links(header)
-
-            for link in links:
-                key = link.get('rel') or link.get('url')
-                l[key] = link
-
-        return l
-
-    def raise_for_status(self):
-        """Raises stored :class:`HTTPError`, if one occurred."""
-
-        http_error_msg = ''
-
-        if 400 <= self.status_code < 500:
-            http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
-
-        elif 500 <= self.status_code < 600:
-            http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
-
-        if http_error_msg:
-            raise HTTPError(http_error_msg, response=self)
-
-    def close(self):
-        """Releases the connection back to the pool. Once this method has been
-        called the underlying ``raw`` object must not be accessed again.
-
-        *Note: Should not normally need to be called explicitly.*
-        """
-        if not self._content_consumed:
-            return self.raw.close()
-
-        return self.raw.release_conn()
diff --git a/tools/swarming_client/third_party/requests/packages/README.rst b/tools/swarming_client/third_party/requests/packages/README.rst
deleted file mode 100644
index 83e0c62..0000000
--- a/tools/swarming_client/third_party/requests/packages/README.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-If you are planning to submit a pull request to requests with any changes in 
-this library do not go any further. These are independent libraries which we
-vendor into requests. Any changes necessary to these libraries must be made in
-them and submitted as separate pull requests to those libraries.
-
-urllib3 pull requests go here: https://github.com/shazow/urllib3
-
-chardet pull requests go here: https://github.com/chardet/chardet
-
-See https://github.com/kennethreitz/requests/pull/1812#issuecomment-30854316
-for the reasoning behind this.
diff --git a/tools/swarming_client/third_party/requests/packages/__init__.py b/tools/swarming_client/third_party/requests/packages/__init__.py
deleted file mode 100644
index 1ddb524..0000000
--- a/tools/swarming_client/third_party/requests/packages/__init__.py
+++ /dev/null
@@ -1,30 +0,0 @@
-'''
-Debian and other distributions "unbundle" requests' vendored dependencies, and
-rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
-The problem with this is that not only requests itself imports those
-dependencies, but third-party code outside of the distros' control too.
-
-In reaction to these problems, the distro maintainers replaced
-``requests.packages`` with a magical "stub module" that imports the correct
-modules. The implementations were varying in quality and all had severe
-problems. For example, a symlink (or hardlink) that links the correct modules
-into place introduces problems regarding object identity, since you now have
-two modules in `sys.modules` with the same API, but different identities::
-
-    requests.packages.urllib3 is not urllib3
-
-With version ``2.5.2``, requests started to maintain its own stub, so that
-distro-specific breakage would be reduced to a minimum, even though the whole
-issue is not requests' fault in the first place. See
-https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
-request.
-'''
-
-from __future__ import absolute_import
-import sys
-
-try:
-    from . import urllib3
-except ImportError:
-    import urllib3
-    sys.modules['%s.urllib3' % __name__] = urllib3
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/__init__.py b/tools/swarming_client/third_party/requests/packages/urllib3/__init__.py
deleted file mode 100644
index b738898..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/__init__.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""
-urllib3 - Thread-safe connection pooling and re-using.
-"""
-
-from __future__ import absolute_import
-import warnings
-
-from .connectionpool import (
-    HTTPConnectionPool,
-    HTTPSConnectionPool,
-    connection_from_url
-)
-
-from . import exceptions
-from .filepost import encode_multipart_formdata
-from .poolmanager import PoolManager, ProxyManager, proxy_from_url
-from .response import HTTPResponse
-from .util.request import make_headers
-from .util.url import get_host
-from .util.timeout import Timeout
-from .util.retry import Retry
-
-
-# Set default logging handler to avoid "No handler found" warnings.
-import logging
-try:  # Python 2.7+
-    from logging import NullHandler
-except ImportError:
-    class NullHandler(logging.Handler):
-        def emit(self, record):
-            pass
-
-__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
-__license__ = 'MIT'
-__version__ = '1.13.1'
-
-__all__ = (
-    'HTTPConnectionPool',
-    'HTTPSConnectionPool',
-    'PoolManager',
-    'ProxyManager',
-    'HTTPResponse',
-    'Retry',
-    'Timeout',
-    'add_stderr_logger',
-    'connection_from_url',
-    'disable_warnings',
-    'encode_multipart_formdata',
-    'get_host',
-    'make_headers',
-    'proxy_from_url',
-)
-
-logging.getLogger(__name__).addHandler(NullHandler())
-
-
-def add_stderr_logger(level=logging.DEBUG):
-    """
-    Helper for quickly adding a StreamHandler to the logger. Useful for
-    debugging.
-
-    Returns the handler after adding it.
-    """
-    # This method needs to be in this __init__.py to get the __name__ correct
-    # even if urllib3 is vendored within another package.
-    logger = logging.getLogger(__name__)
-    handler = logging.StreamHandler()
-    handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
-    logger.addHandler(handler)
-    logger.setLevel(level)
-    logger.debug('Added a stderr logging handler to logger: %s' % __name__)
-    return handler
-
-# ... Clean up.
-del NullHandler
-
-
-# SecurityWarning's always go off by default.
-warnings.simplefilter('ignore', exceptions.SecurityWarning, append=True)
-# SubjectAltNameWarning's should go off once per host
-warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
-# InsecurePlatformWarning's don't vary between requests, so we keep it default.
-warnings.simplefilter('ignore', exceptions.InsecurePlatformWarning,
-                      append=True)
-# SNIMissingWarnings should go off only once.
-warnings.simplefilter('ignore', exceptions.SNIMissingWarning)
-
-
-def disable_warnings(category=exceptions.HTTPWarning):
-    """
-    Helper for quickly disabling all urllib3 warnings.
-    """
-    warnings.simplefilter('ignore', category)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/_collections.py b/tools/swarming_client/third_party/requests/packages/urllib3/_collections.py
deleted file mode 100644
index 67f3ce9..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/_collections.py
+++ /dev/null
@@ -1,324 +0,0 @@
-from __future__ import absolute_import
-from collections import Mapping, MutableMapping
-try:
-    from threading import RLock
-except ImportError:  # Platform-specific: No threads available
-    class RLock:
-        def __enter__(self):
-            pass
-
-        def __exit__(self, exc_type, exc_value, traceback):
-            pass
-
-
-try:  # Python 2.7+
-    from collections import OrderedDict
-except ImportError:
-    from .packages.ordered_dict import OrderedDict
-from .packages.six import iterkeys, itervalues, PY3
-
-
-__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
-
-
-_Null = object()
-
-
-class RecentlyUsedContainer(MutableMapping):
-    """
-    Provides a thread-safe dict-like container which maintains up to
-    ``maxsize`` keys while throwing away the least-recently-used keys beyond
-    ``maxsize``.
-
-    :param maxsize:
-        Maximum number of recent elements to retain.
-
-    :param dispose_func:
-        Every time an item is evicted from the container,
-        ``dispose_func(value)`` is called.  Callback which will get called
-    """
-
-    ContainerCls = OrderedDict
-
-    def __init__(self, maxsize=10, dispose_func=None):
-        self._maxsize = maxsize
-        self.dispose_func = dispose_func
-
-        self._container = self.ContainerCls()
-        self.lock = RLock()
-
-    def __getitem__(self, key):
-        # Re-insert the item, moving it to the end of the eviction line.
-        with self.lock:
-            item = self._container.pop(key)
-            self._container[key] = item
-            return item
-
-    def __setitem__(self, key, value):
-        evicted_value = _Null
-        with self.lock:
-            # Possibly evict the existing value of 'key'
-            evicted_value = self._container.get(key, _Null)
-            self._container[key] = value
-
-            # If we didn't evict an existing value, we might have to evict the
-            # least recently used item from the beginning of the container.
-            if len(self._container) > self._maxsize:
-                _key, evicted_value = self._container.popitem(last=False)
-
-        if self.dispose_func and evicted_value is not _Null:
-            self.dispose_func(evicted_value)
-
-    def __delitem__(self, key):
-        with self.lock:
-            value = self._container.pop(key)
-
-        if self.dispose_func:
-            self.dispose_func(value)
-
-    def __len__(self):
-        with self.lock:
-            return len(self._container)
-
-    def __iter__(self):
-        raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.')
-
-    def clear(self):
-        with self.lock:
-            # Copy pointers to all values, then wipe the mapping
-            values = list(itervalues(self._container))
-            self._container.clear()
-
-        if self.dispose_func:
-            for value in values:
-                self.dispose_func(value)
-
-    def keys(self):
-        with self.lock:
-            return list(iterkeys(self._container))
-
-
-class HTTPHeaderDict(MutableMapping):
-    """
-    :param headers:
-        An iterable of field-value pairs. Must not contain multiple field names
-        when compared case-insensitively.
-
-    :param kwargs:
-        Additional field-value pairs to pass in to ``dict.update``.
-
-    A ``dict`` like container for storing HTTP Headers.
-
-    Field names are stored and compared case-insensitively in compliance with
-    RFC 7230. Iteration provides the first case-sensitive key seen for each
-    case-insensitive pair.
-
-    Using ``__setitem__`` syntax overwrites fields that compare equal
-    case-insensitively in order to maintain ``dict``'s api. For fields that
-    compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
-    in a loop.
-
-    If multiple fields that are equal case-insensitively are passed to the
-    constructor or ``.update``, the behavior is undefined and some will be
-    lost.
-
-    >>> headers = HTTPHeaderDict()
-    >>> headers.add('Set-Cookie', 'foo=bar')
-    >>> headers.add('set-cookie', 'baz=quxx')
-    >>> headers['content-length'] = '7'
-    >>> headers['SET-cookie']
-    'foo=bar, baz=quxx'
-    >>> headers['Content-Length']
-    '7'
-    """
-
-    def __init__(self, headers=None, **kwargs):
-        super(HTTPHeaderDict, self).__init__()
-        self._container = {}
-        if headers is not None:
-            if isinstance(headers, HTTPHeaderDict):
-                self._copy_from(headers)
-            else:
-                self.extend(headers)
-        if kwargs:
-            self.extend(kwargs)
-
-    def __setitem__(self, key, val):
-        self._container[key.lower()] = (key, val)
-        return self._container[key.lower()]
-
-    def __getitem__(self, key):
-        val = self._container[key.lower()]
-        return ', '.join(val[1:])
-
-    def __delitem__(self, key):
-        del self._container[key.lower()]
-
-    def __contains__(self, key):
-        return key.lower() in self._container
-
-    def __eq__(self, other):
-        if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
-            return False
-        if not isinstance(other, type(self)):
-            other = type(self)(other)
-        return (dict((k.lower(), v) for k, v in self.itermerged()) ==
-                dict((k.lower(), v) for k, v in other.itermerged()))
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    if not PY3:  # Python 2
-        iterkeys = MutableMapping.iterkeys
-        itervalues = MutableMapping.itervalues
-
-    __marker = object()
-
-    def __len__(self):
-        return len(self._container)
-
-    def __iter__(self):
-        # Only provide the originally cased names
-        for vals in self._container.values():
-            yield vals[0]
-
-    def pop(self, key, default=__marker):
-        '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
-          If key is not found, d is returned if given, otherwise KeyError is raised.
-        '''
-        # Using the MutableMapping function directly fails due to the private marker.
-        # Using ordinary dict.pop would expose the internal structures.
-        # So let's reinvent the wheel.
-        try:
-            value = self[key]
-        except KeyError:
-            if default is self.__marker:
-                raise
-            return default
-        else:
-            del self[key]
-            return value
-
-    def discard(self, key):
-        try:
-            del self[key]
-        except KeyError:
-            pass
-
-    def add(self, key, val):
-        """Adds a (name, value) pair, doesn't overwrite the value if it already
-        exists.
-
-        >>> headers = HTTPHeaderDict(foo='bar')
-        >>> headers.add('Foo', 'baz')
-        >>> headers['foo']
-        'bar, baz'
-        """
-        key_lower = key.lower()
-        new_vals = key, val
-        # Keep the common case aka no item present as fast as possible
-        vals = self._container.setdefault(key_lower, new_vals)
-        if new_vals is not vals:
-            # new_vals was not inserted, as there was a previous one
-            if isinstance(vals, list):
-                # If already several items got inserted, we have a list
-                vals.append(val)
-            else:
-                # vals should be a tuple then, i.e. only one item so far
-                # Need to convert the tuple to list for further extension
-                self._container[key_lower] = [vals[0], vals[1], val]
-
-    def extend(self, *args, **kwargs):
-        """Generic import function for any type of header-like object.
-        Adapted version of MutableMapping.update in order to insert items
-        with self.add instead of self.__setitem__
-        """
-        if len(args) > 1:
-            raise TypeError("extend() takes at most 1 positional "
-                            "arguments ({0} given)".format(len(args)))
-        other = args[0] if len(args) >= 1 else ()
-
-        if isinstance(other, HTTPHeaderDict):
-            for key, val in other.iteritems():
-                self.add(key, val)
-        elif isinstance(other, Mapping):
-            for key in other:
-                self.add(key, other[key])
-        elif hasattr(other, "keys"):
-            for key in other.keys():
-                self.add(key, other[key])
-        else:
-            for key, value in other:
-                self.add(key, value)
-
-        for key, value in kwargs.items():
-            self.add(key, value)
-
-    def getlist(self, key):
-        """Returns a list of all the values for the named field. Returns an
-        empty list if the key doesn't exist."""
-        try:
-            vals = self._container[key.lower()]
-        except KeyError:
-            return []
-        else:
-            if isinstance(vals, tuple):
-                return [vals[1]]
-            else:
-                return vals[1:]
-
-    # Backwards compatibility for httplib
-    getheaders = getlist
-    getallmatchingheaders = getlist
-    iget = getlist
-
-    def __repr__(self):
-        return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
-
-    def _copy_from(self, other):
-        for key in other:
-            val = other.getlist(key)
-            if isinstance(val, list):
-                # Don't need to convert tuples
-                val = list(val)
-            self._container[key.lower()] = [key] + val
-
-    def copy(self):
-        clone = type(self)()
-        clone._copy_from(self)
-        return clone
-
-    def iteritems(self):
-        """Iterate over all header lines, including duplicate ones."""
-        for key in self:
-            vals = self._container[key.lower()]
-            for val in vals[1:]:
-                yield vals[0], val
-
-    def itermerged(self):
-        """Iterate over all headers, merging duplicate ones together."""
-        for key in self:
-            val = self._container[key.lower()]
-            yield val[0], ', '.join(val[1:])
-
-    def items(self):
-        return list(self.iteritems())
-
-    @classmethod
-    def from_httplib(cls, message):  # Python 2
-        """Read headers from a Python 2 httplib message object."""
-        # python2.7 does not expose a proper API for exporting multiheaders
-        # efficiently. This function re-reads raw lines from the message
-        # object and extracts the multiheaders properly.
-        headers = []
-
-        for line in message.headers:
-            if line.startswith((' ', '\t')):
-                key, value = headers[-1]
-                headers[-1] = (key, value + '\r\n' + line.rstrip())
-                continue
-
-            key, value = line.split(':', 1)
-            headers.append((key, value.strip()))
-
-        return cls(headers)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/connection.py b/tools/swarming_client/third_party/requests/packages/urllib3/connection.py
deleted file mode 100644
index 1e4cd41..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/connection.py
+++ /dev/null
@@ -1,288 +0,0 @@
-from __future__ import absolute_import
-import datetime
-import os
-import sys
-import socket
-from socket import error as SocketError, timeout as SocketTimeout
-import warnings
-from .packages import six
-
-try:  # Python 3
-    from http.client import HTTPConnection as _HTTPConnection
-    from http.client import HTTPException  # noqa: unused in this module
-except ImportError:
-    from httplib import HTTPConnection as _HTTPConnection
-    from httplib import HTTPException  # noqa: unused in this module
-
-try:  # Compiled with SSL?
-    import ssl
-    BaseSSLError = ssl.SSLError
-except (ImportError, AttributeError):  # Platform-specific: No SSL.
-    ssl = None
-
-    class BaseSSLError(BaseException):
-        pass
-
-
-try:  # Python 3:
-    # Not a no-op, we're adding this to the namespace so it can be imported.
-    ConnectionError = ConnectionError
-except NameError:  # Python 2:
-    class ConnectionError(Exception):
-        pass
-
-
-from .exceptions import (
-    NewConnectionError,
-    ConnectTimeoutError,
-    SubjectAltNameWarning,
-    SystemTimeWarning,
-)
-from .packages.ssl_match_hostname import match_hostname
-
-from .util.ssl_ import (
-    resolve_cert_reqs,
-    resolve_ssl_version,
-    ssl_wrap_socket,
-    assert_fingerprint,
-)
-
-
-from .util import connection
-
-port_by_scheme = {
-    'http': 80,
-    'https': 443,
-}
-
-RECENT_DATE = datetime.date(2014, 1, 1)
-
-
-class DummyConnection(object):
-    """Used to detect a failed ConnectionCls import."""
-    pass
-
-
-class HTTPConnection(_HTTPConnection, object):
-    """
-    Based on httplib.HTTPConnection but provides an extra constructor
-    backwards-compatibility layer between older and newer Pythons.
-
-    Additional keyword parameters are used to configure attributes of the connection.
-    Accepted parameters include:
-
-      - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
-      - ``source_address``: Set the source address for the current connection.
-
-        .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
-
-      - ``socket_options``: Set specific options on the underlying socket. If not specified, then
-        defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
-        Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
-
-        For example, if you wish to enable TCP Keep Alive in addition to the defaults,
-        you might pass::
-
-            HTTPConnection.default_socket_options + [
-                (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
-            ]
-
-        Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
-    """
-
-    default_port = port_by_scheme['http']
-
-    #: Disable Nagle's algorithm by default.
-    #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
-    default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
-
-    #: Whether this connection verifies the host's certificate.
-    is_verified = False
-
-    def __init__(self, *args, **kw):
-        if six.PY3:  # Python 3
-            kw.pop('strict', None)
-
-        # Pre-set source_address in case we have an older Python like 2.6.
-        self.source_address = kw.get('source_address')
-
-        if sys.version_info < (2, 7):  # Python 2.6
-            # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
-            # not newer versions. We can still use it when creating a
-            # connection though, so we pop it *after* we have saved it as
-            # self.source_address.
-            kw.pop('source_address', None)
-
-        #: The socket options provided by the user. If no options are
-        #: provided, we use the default options.
-        self.socket_options = kw.pop('socket_options', self.default_socket_options)
-
-        # Superclass also sets self.source_address in Python 2.7+.
-        _HTTPConnection.__init__(self, *args, **kw)
-
-    def _new_conn(self):
-        """ Establish a socket connection and set nodelay settings on it.
-
-        :return: New socket connection.
-        """
-        extra_kw = {}
-        if self.source_address:
-            extra_kw['source_address'] = self.source_address
-
-        if self.socket_options:
-            extra_kw['socket_options'] = self.socket_options
-
-        try:
-            conn = connection.create_connection(
-                (self.host, self.port), self.timeout, **extra_kw)
-
-        except SocketTimeout as e:
-            raise ConnectTimeoutError(
-                self, "Connection to %s timed out. (connect timeout=%s)" %
-                (self.host, self.timeout))
-
-        except SocketError as e:
-            raise NewConnectionError(
-                self, "Failed to establish a new connection: %s" % e)
-
-        return conn
-
-    def _prepare_conn(self, conn):
-        self.sock = conn
-        # the _tunnel_host attribute was added in python 2.6.3 (via
-        # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
-        # not have them.
-        if getattr(self, '_tunnel_host', None):
-            # TODO: Fix tunnel so it doesn't depend on self.sock state.
-            self._tunnel()
-            # Mark this connection as not reusable
-            self.auto_open = 0
-
-    def connect(self):
-        conn = self._new_conn()
-        self._prepare_conn(conn)
-
-
-class HTTPSConnection(HTTPConnection):
-    default_port = port_by_scheme['https']
-
-    def __init__(self, host, port=None, key_file=None, cert_file=None,
-                 strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
-
-        HTTPConnection.__init__(self, host, port, strict=strict,
-                                timeout=timeout, **kw)
-
-        self.key_file = key_file
-        self.cert_file = cert_file
-
-        # Required property for Google AppEngine 1.9.0 which otherwise causes
-        # HTTPS requests to go out as HTTP. (See Issue #356)
-        self._protocol = 'https'
-
-    def connect(self):
-        conn = self._new_conn()
-        self._prepare_conn(conn)
-        self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
-
-
-class VerifiedHTTPSConnection(HTTPSConnection):
-    """
-    Based on httplib.HTTPSConnection but wraps the socket with
-    SSL certification.
-    """
-    cert_reqs = None
-    ca_certs = None
-    ca_cert_dir = None
-    ssl_version = None
-    assert_fingerprint = None
-
-    def set_cert(self, key_file=None, cert_file=None,
-                 cert_reqs=None, ca_certs=None,
-                 assert_hostname=None, assert_fingerprint=None,
-                 ca_cert_dir=None):
-
-        if (ca_certs or ca_cert_dir) and cert_reqs is None:
-            cert_reqs = 'CERT_REQUIRED'
-
-        self.key_file = key_file
-        self.cert_file = cert_file
-        self.cert_reqs = cert_reqs
-        self.assert_hostname = assert_hostname
-        self.assert_fingerprint = assert_fingerprint
-        self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
-        self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
-
-    def connect(self):
-        # Add certificate verification
-        conn = self._new_conn()
-
-        resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
-        resolved_ssl_version = resolve_ssl_version(self.ssl_version)
-
-        hostname = self.host
-        if getattr(self, '_tunnel_host', None):
-            # _tunnel_host was added in Python 2.6.3
-            # (See: http://hg.python.org/cpython/rev/0f57b30a152f)
-
-            self.sock = conn
-            # Calls self._set_hostport(), so self.host is
-            # self._tunnel_host below.
-            self._tunnel()
-            # Mark this connection as not reusable
-            self.auto_open = 0
-
-            # Override the host with the one we're requesting data from.
-            hostname = self._tunnel_host
-
-        is_time_off = datetime.date.today() < RECENT_DATE
-        if is_time_off:
-            warnings.warn((
-                'System time is way off (before {0}). This will probably '
-                'lead to SSL verification errors').format(RECENT_DATE),
-                SystemTimeWarning
-            )
-
-        # Wrap socket using verification with the root certs in
-        # trusted_root_certs
-        self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
-                                    cert_reqs=resolved_cert_reqs,
-                                    ca_certs=self.ca_certs,
-                                    ca_cert_dir=self.ca_cert_dir,
-                                    server_hostname=hostname,
-                                    ssl_version=resolved_ssl_version)
-
-        if self.assert_fingerprint:
-            assert_fingerprint(self.sock.getpeercert(binary_form=True),
-                               self.assert_fingerprint)
-        elif resolved_cert_reqs != ssl.CERT_NONE \
-                and self.assert_hostname is not False:
-            cert = self.sock.getpeercert()
-            if not cert.get('subjectAltName', ()):
-                warnings.warn((
-                    'Certificate for {0} has no `subjectAltName`, falling back to check for a '
-                    '`commonName` for now. This feature is being removed by major browsers and '
-                    'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
-                    'for details.)'.format(hostname)),
-                    SubjectAltNameWarning
-                )
-
-            # In case the hostname is an IPv6 address, strip the square
-            # brackets from it before using it to validate. This is because
-            # a certificate with an IPv6 address in it won't have square
-            # brackets around that address. Sadly, match_hostname won't do this
-            # for us: it expects the plain host part without any extra work
-            # that might have been done to make it palatable to httplib.
-            asserted_hostname = self.assert_hostname or hostname
-            asserted_hostname = asserted_hostname.strip('[]')
-            match_hostname(cert, asserted_hostname)
-
-        self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
-                            self.assert_fingerprint is not None)
-
-
-if ssl:
-    # Make a copy for testing.
-    UnverifiedHTTPSConnection = HTTPSConnection
-    HTTPSConnection = VerifiedHTTPSConnection
-else:
-    HTTPSConnection = DummyConnection
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/connectionpool.py b/tools/swarming_client/third_party/requests/packages/urllib3/connectionpool.py
deleted file mode 100644
index 995b416..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/connectionpool.py
+++ /dev/null
@@ -1,818 +0,0 @@
-from __future__ import absolute_import
-import errno
-import logging
-import sys
-import warnings
-
-from socket import error as SocketError, timeout as SocketTimeout
-import socket
-
-try:  # Python 3
-    from queue import LifoQueue, Empty, Full
-except ImportError:
-    from Queue import LifoQueue, Empty, Full
-    # Queue is imported for side effects on MS Windows
-    import Queue as _unused_module_Queue  # noqa: unused
-
-
-from .exceptions import (
-    ClosedPoolError,
-    ProtocolError,
-    EmptyPoolError,
-    HeaderParsingError,
-    HostChangedError,
-    LocationValueError,
-    MaxRetryError,
-    ProxyError,
-    ReadTimeoutError,
-    SSLError,
-    TimeoutError,
-    InsecureRequestWarning,
-    NewConnectionError,
-)
-from .packages.ssl_match_hostname import CertificateError
-from .packages import six
-from .connection import (
-    port_by_scheme,
-    DummyConnection,
-    HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
-    HTTPException, BaseSSLError,
-)
-from .request import RequestMethods
-from .response import HTTPResponse
-
-from .util.connection import is_connection_dropped
-from .util.response import assert_header_parsing
-from .util.retry import Retry
-from .util.timeout import Timeout
-from .util.url import get_host, Url
-
-
-xrange = six.moves.xrange
-
-log = logging.getLogger(__name__)
-
-_Default = object()
-
-
-# Pool objects
-class ConnectionPool(object):
-    """
-    Base class for all connection pools, such as
-    :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
-    """
-
-    scheme = None
-    QueueCls = LifoQueue
-
-    def __init__(self, host, port=None):
-        if not host:
-            raise LocationValueError("No host specified.")
-
-        self.host = host
-        self.port = port
-
-    def __str__(self):
-        return '%s(host=%r, port=%r)' % (type(self).__name__,
-                                         self.host, self.port)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self.close()
-        # Return False to re-raise any potential exceptions
-        return False
-
-    def close():
-        """
-        Close all pooled connections and disable the pool.
-        """
-        pass
-
-
-# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
-_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
-
-
-class HTTPConnectionPool(ConnectionPool, RequestMethods):
-    """
-    Thread-safe connection pool for one host.
-
-    :param host:
-        Host used for this HTTP Connection (e.g. "localhost"), passed into
-        :class:`httplib.HTTPConnection`.
-
-    :param port:
-        Port used for this HTTP Connection (None is equivalent to 80), passed
-        into :class:`httplib.HTTPConnection`.
-
-    :param strict:
-        Causes BadStatusLine to be raised if the status line can't be parsed
-        as a valid HTTP/1.0 or 1.1 status line, passed into
-        :class:`httplib.HTTPConnection`.
-
-        .. note::
-           Only works in Python 2. This parameter is ignored in Python 3.
-
-    :param timeout:
-        Socket timeout in seconds for each individual connection. This can
-        be a float or integer, which sets the timeout for the HTTP request,
-        or an instance of :class:`urllib3.util.Timeout` which gives you more
-        fine-grained control over request timeouts. After the constructor has
-        been parsed, this is always a `urllib3.util.Timeout` object.
-
-    :param maxsize:
-        Number of connections to save that can be reused. More than 1 is useful
-        in multithreaded situations. If ``block`` is set to False, more
-        connections will be created but they will not be saved once they've
-        been used.
-
-    :param block:
-        If set to True, no more than ``maxsize`` connections will be used at
-        a time. When no free connections are available, the call will block
-        until a connection has been released. This is a useful side effect for
-        particular multithreaded situations where one does not want to use more
-        than maxsize connections per host to prevent flooding.
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-
-    :param retries:
-        Retry configuration to use by default with requests in this pool.
-
-    :param _proxy:
-        Parsed proxy URL, should not be used directly, instead, see
-        :class:`urllib3.connectionpool.ProxyManager`"
-
-    :param _proxy_headers:
-        A dictionary with proxy headers, should not be used directly,
-        instead, see :class:`urllib3.connectionpool.ProxyManager`"
-
-    :param \**conn_kw:
-        Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
-        :class:`urllib3.connection.HTTPSConnection` instances.
-    """
-
-    scheme = 'http'
-    ConnectionCls = HTTPConnection
-
-    def __init__(self, host, port=None, strict=False,
-                 timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
-                 headers=None, retries=None,
-                 _proxy=None, _proxy_headers=None,
-                 **conn_kw):
-        ConnectionPool.__init__(self, host, port)
-        RequestMethods.__init__(self, headers)
-
-        self.strict = strict
-
-        if not isinstance(timeout, Timeout):
-            timeout = Timeout.from_float(timeout)
-
-        if retries is None:
-            retries = Retry.DEFAULT
-
-        self.timeout = timeout
-        self.retries = retries
-
-        self.pool = self.QueueCls(maxsize)
-        self.block = block
-
-        self.proxy = _proxy
-        self.proxy_headers = _proxy_headers or {}
-
-        # Fill the queue up so that doing get() on it will block properly
-        for _ in xrange(maxsize):
-            self.pool.put(None)
-
-        # These are mostly for testing and debugging purposes.
-        self.num_connections = 0
-        self.num_requests = 0
-        self.conn_kw = conn_kw
-
-        if self.proxy:
-            # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
-            # We cannot know if the user has added default socket options, so we cannot replace the
-            # list.
-            self.conn_kw.setdefault('socket_options', [])
-
-    def _new_conn(self):
-        """
-        Return a fresh :class:`HTTPConnection`.
-        """
-        self.num_connections += 1
-        log.info("Starting new HTTP connection (%d): %s" %
-                 (self.num_connections, self.host))
-
-        conn = self.ConnectionCls(host=self.host, port=self.port,
-                                  timeout=self.timeout.connect_timeout,
-                                  strict=self.strict, **self.conn_kw)
-        return conn
-
-    def _get_conn(self, timeout=None):
-        """
-        Get a connection. Will return a pooled connection if one is available.
-
-        If no connections are available and :prop:`.block` is ``False``, then a
-        fresh connection is returned.
-
-        :param timeout:
-            Seconds to wait before giving up and raising
-            :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
-            :prop:`.block` is ``True``.
-        """
-        conn = None
-        try:
-            conn = self.pool.get(block=self.block, timeout=timeout)
-
-        except AttributeError:  # self.pool is None
-            raise ClosedPoolError(self, "Pool is closed.")
-
-        except Empty:
-            if self.block:
-                raise EmptyPoolError(self,
-                                     "Pool reached maximum size and no more "
-                                     "connections are allowed.")
-            pass  # Oh well, we'll create a new connection then
-
-        # If this is a persistent connection, check if it got disconnected
-        if conn and is_connection_dropped(conn):
-            log.info("Resetting dropped connection: %s" % self.host)
-            conn.close()
-            if getattr(conn, 'auto_open', 1) == 0:
-                # This is a proxied connection that has been mutated by
-                # httplib._tunnel() and cannot be reused (since it would
-                # attempt to bypass the proxy)
-                conn = None
-
-        return conn or self._new_conn()
-
-    def _put_conn(self, conn):
-        """
-        Put a connection back into the pool.
-
-        :param conn:
-            Connection object for the current host and port as returned by
-            :meth:`._new_conn` or :meth:`._get_conn`.
-
-        If the pool is already full, the connection is closed and discarded
-        because we exceeded maxsize. If connections are discarded frequently,
-        then maxsize should be increased.
-
-        If the pool is closed, then the connection will be closed and discarded.
-        """
-        try:
-            self.pool.put(conn, block=False)
-            return  # Everything is dandy, done.
-        except AttributeError:
-            # self.pool is None.
-            pass
-        except Full:
-            # This should never happen if self.block == True
-            log.warning(
-                "Connection pool is full, discarding connection: %s" %
-                self.host)
-
-        # Connection never got put back into the pool, close it.
-        if conn:
-            conn.close()
-
-    def _validate_conn(self, conn):
-        """
-        Called right before a request is made, after the socket is created.
-        """
-        pass
-
-    def _prepare_proxy(self, conn):
-        # Nothing to do for HTTP connections.
-        pass
-
-    def _get_timeout(self, timeout):
-        """ Helper that always returns a :class:`urllib3.util.Timeout` """
-        if timeout is _Default:
-            return self.timeout.clone()
-
-        if isinstance(timeout, Timeout):
-            return timeout.clone()
-        else:
-            # User passed us an int/float. This is for backwards compatibility,
-            # can be removed later
-            return Timeout.from_float(timeout)
-
-    def _raise_timeout(self, err, url, timeout_value):
-        """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
-
-        if isinstance(err, SocketTimeout):
-            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
-
-        # See the above comment about EAGAIN in Python 3. In Python 2 we have
-        # to specifically catch it and throw the timeout error
-        if hasattr(err, 'errno') and err.errno in _blocking_errnos:
-            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
-
-        # Catch possible read timeouts thrown as SSL errors. If not the
-        # case, rethrow the original. We need to do this because of:
-        # http://bugs.python.org/issue10272
-        if 'timed out' in str(err) or 'did not complete (read)' in str(err):  # Python 2.6
-            raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
-
-    def _make_request(self, conn, method, url, timeout=_Default,
-                      **httplib_request_kw):
-        """
-        Perform a request on a given urllib connection object taken from our
-        pool.
-
-        :param conn:
-            a connection from one of our connection pools
-
-        :param timeout:
-            Socket timeout in seconds for the request. This can be a
-            float or integer, which will set the same timeout value for
-            the socket connect and the socket read, or an instance of
-            :class:`urllib3.util.Timeout`, which gives you more fine-grained
-            control over your timeouts.
-        """
-        self.num_requests += 1
-
-        timeout_obj = self._get_timeout(timeout)
-        timeout_obj.start_connect()
-        conn.timeout = timeout_obj.connect_timeout
-
-        # Trigger any extra validation we need to do.
-        try:
-            self._validate_conn(conn)
-        except (SocketTimeout, BaseSSLError) as e:
-            # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
-            self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
-            raise
-
-        # conn.request() calls httplib.*.request, not the method in
-        # urllib3.request. It also calls makefile (recv) on the socket.
-        conn.request(method, url, **httplib_request_kw)
-
-        # Reset the timeout for the recv() on the socket
-        read_timeout = timeout_obj.read_timeout
-
-        # App Engine doesn't have a sock attr
-        if getattr(conn, 'sock', None):
-            # In Python 3 socket.py will catch EAGAIN and return None when you
-            # try and read into the file pointer created by http.client, which
-            # instead raises a BadStatusLine exception. Instead of catching
-            # the exception and assuming all BadStatusLine exceptions are read
-            # timeouts, check for a zero timeout before making the request.
-            if read_timeout == 0:
-                raise ReadTimeoutError(
-                    self, url, "Read timed out. (read timeout=%s)" % read_timeout)
-            if read_timeout is Timeout.DEFAULT_TIMEOUT:
-                conn.sock.settimeout(socket.getdefaulttimeout())
-            else:  # None or a value
-                conn.sock.settimeout(read_timeout)
-
-        # Receive the response from the server
-        try:
-            try:  # Python 2.7, use buffering of HTTP responses
-                httplib_response = conn.getresponse(buffering=True)
-            except TypeError:  # Python 2.6 and older
-                httplib_response = conn.getresponse()
-        except (SocketTimeout, BaseSSLError, SocketError) as e:
-            self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
-            raise
-
-        # AppEngine doesn't have a version attr.
-        http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
-        log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
-                                          httplib_response.status,
-                                          httplib_response.length))
-
-        try:
-            assert_header_parsing(httplib_response.msg)
-        except HeaderParsingError as hpe:  # Platform-specific: Python 3
-            log.warning(
-                'Failed to parse headers (url=%s): %s',
-                self._absolute_url(url), hpe, exc_info=True)
-
-        return httplib_response
-
-    def _absolute_url(self, path):
-        return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
-
-    def close(self):
-        """
-        Close all pooled connections and disable the pool.
-        """
-        # Disable access to the pool
-        old_pool, self.pool = self.pool, None
-
-        try:
-            while True:
-                conn = old_pool.get(block=False)
-                if conn:
-                    conn.close()
-
-        except Empty:
-            pass  # Done.
-
-    def is_same_host(self, url):
-        """
-        Check if the given ``url`` is a member of the same host as this
-        connection pool.
-        """
-        if url.startswith('/'):
-            return True
-
-        # TODO: Add optional support for socket.gethostbyname checking.
-        scheme, host, port = get_host(url)
-
-        # Use explicit default port for comparison when none is given
-        if self.port and not port:
-            port = port_by_scheme.get(scheme)
-        elif not self.port and port == port_by_scheme.get(scheme):
-            port = None
-
-        return (scheme, host, port) == (self.scheme, self.host, self.port)
-
-    def urlopen(self, method, url, body=None, headers=None, retries=None,
-                redirect=True, assert_same_host=True, timeout=_Default,
-                pool_timeout=None, release_conn=None, **response_kw):
-        """
-        Get a connection from the pool and perform an HTTP request. This is the
-        lowest level call for making a request, so you'll need to specify all
-        the raw details.
-
-        .. note::
-
-           More commonly, it's appropriate to use a convenience method provided
-           by :class:`.RequestMethods`, such as :meth:`request`.
-
-        .. note::
-
-           `release_conn` will only behave as expected if
-           `preload_content=False` because we want to make
-           `preload_content=False` the default behaviour someday soon without
-           breaking backwards compatibility.
-
-        :param method:
-            HTTP request method (such as GET, POST, PUT, etc.)
-
-        :param body:
-            Data to send in the request body (useful for creating
-            POST requests, see HTTPConnectionPool.post_url for
-            more convenience).
-
-        :param headers:
-            Dictionary of custom headers to send, such as User-Agent,
-            If-None-Match, etc. If None, pool headers are used. If provided,
-            these headers completely replace any pool-specific headers.
-
-        :param retries:
-            Configure the number of retries to allow before raising a
-            :class:`~urllib3.exceptions.MaxRetryError` exception.
-
-            Pass ``None`` to retry until you receive a response. Pass a
-            :class:`~urllib3.util.retry.Retry` object for fine-grained control
-            over different types of retries.
-            Pass an integer number to retry connection errors that many times,
-            but no other types of errors. Pass zero to never retry.
-
-            If ``False``, then retries are disabled and any exception is raised
-            immediately. Also, instead of raising a MaxRetryError on redirects,
-            the redirect response will be returned.
-
-        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
-
-        :param redirect:
-            If True, automatically handle redirects (status codes 301, 302,
-            303, 307, 308). Each redirect counts as a retry. Disabling retries
-            will disable redirect, too.
-
-        :param assert_same_host:
-            If ``True``, will make sure that the host of the pool requests is
-            consistent else will raise HostChangedError. When False, you can
-            use the pool on an HTTP proxy and request foreign hosts.
-
-        :param timeout:
-            If specified, overrides the default timeout for this one
-            request. It may be a float (in seconds) or an instance of
-            :class:`urllib3.util.Timeout`.
-
-        :param pool_timeout:
-            If set and the pool is set to block=True, then this method will
-            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
-            connection is available within the time period.
-
-        :param release_conn:
-            If False, then the urlopen call will not release the connection
-            back into the pool once a response is received (but will release if
-            you read the entire contents of the response such as when
-            `preload_content=True`). This is useful if you're not preloading
-            the response's content immediately. You will need to call
-            ``r.release_conn()`` on the response ``r`` to return the connection
-            back into the pool. If None, it takes the value of
-            ``response_kw.get('preload_content', True)``.
-
-        :param \**response_kw:
-            Additional parameters are passed to
-            :meth:`urllib3.response.HTTPResponse.from_httplib`
-        """
-        if headers is None:
-            headers = self.headers
-
-        if not isinstance(retries, Retry):
-            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
-
-        if release_conn is None:
-            release_conn = response_kw.get('preload_content', True)
-
-        # Check host
-        if assert_same_host and not self.is_same_host(url):
-            raise HostChangedError(self, url, retries)
-
-        conn = None
-
-        # Merge the proxy headers. Only do this in HTTP. We have to copy the
-        # headers dict so we can safely change it without those changes being
-        # reflected in anyone else's copy.
-        if self.scheme == 'http':
-            headers = headers.copy()
-            headers.update(self.proxy_headers)
-
-        # Must keep the exception bound to a separate variable or else Python 3
-        # complains about UnboundLocalError.
-        err = None
-
-        try:
-            # Request a connection from the queue.
-            timeout_obj = self._get_timeout(timeout)
-            conn = self._get_conn(timeout=pool_timeout)
-
-            conn.timeout = timeout_obj.connect_timeout
-
-            is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
-            if is_new_proxy_conn:
-                self._prepare_proxy(conn)
-
-            # Make the request on the httplib connection object.
-            httplib_response = self._make_request(conn, method, url,
-                                                  timeout=timeout_obj,
-                                                  body=body, headers=headers)
-
-            # If we're going to release the connection in ``finally:``, then
-            # the request doesn't need to know about the connection. Otherwise
-            # it will also try to release it and we'll have a double-release
-            # mess.
-            response_conn = not release_conn and conn
-
-            # Import httplib's response into our own wrapper object
-            response = HTTPResponse.from_httplib(httplib_response,
-                                                 pool=self,
-                                                 connection=response_conn,
-                                                 **response_kw)
-
-            # else:
-            #     The connection will be put back into the pool when
-            #     ``response.release_conn()`` is called (implicitly by
-            #     ``response.read()``)
-
-        except Empty:
-            # Timed out by queue.
-            raise EmptyPoolError(self, "No pool connections are available.")
-
-        except (BaseSSLError, CertificateError) as e:
-            # Close the connection. If a connection is reused on which there
-            # was a Certificate error, the next request will certainly raise
-            # another Certificate error.
-            conn = conn and conn.close()
-            release_conn = True
-            raise SSLError(e)
-
-        except SSLError:
-            # Treat SSLError separately from BaseSSLError to preserve
-            # traceback.
-            conn = conn and conn.close()
-            release_conn = True
-            raise
-
-        except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
-            # Discard the connection for these exceptions. It will be
-            # be replaced during the next _get_conn() call.
-            conn = conn and conn.close()
-            release_conn = True
-
-            if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
-                e = ProxyError('Cannot connect to proxy.', e)
-            elif isinstance(e, (SocketError, HTTPException)):
-                e = ProtocolError('Connection aborted.', e)
-
-            retries = retries.increment(method, url, error=e, _pool=self,
-                                        _stacktrace=sys.exc_info()[2])
-            retries.sleep()
-
-            # Keep track of the error for the retry warning.
-            err = e
-
-        finally:
-            if release_conn:
-                # Put the connection back to be reused. If the connection is
-                # expired then it will be None, which will get replaced with a
-                # fresh connection during _get_conn.
-                self._put_conn(conn)
-
-        if not conn:
-            # Try again
-            log.warning("Retrying (%r) after connection "
-                        "broken by '%r': %s" % (retries, err, url))
-            return self.urlopen(method, url, body, headers, retries,
-                                redirect, assert_same_host,
-                                timeout=timeout, pool_timeout=pool_timeout,
-                                release_conn=release_conn, **response_kw)
-
-        # Handle redirect?
-        redirect_location = redirect and response.get_redirect_location()
-        if redirect_location:
-            if response.status == 303:
-                method = 'GET'
-
-            try:
-                retries = retries.increment(method, url, response=response, _pool=self)
-            except MaxRetryError:
-                if retries.raise_on_redirect:
-                    # Release the connection for this response, since we're not
-                    # returning it to be released manually.
-                    response.release_conn()
-                    raise
-                return response
-
-            log.info("Redirecting %s -> %s" % (url, redirect_location))
-            return self.urlopen(
-                method, redirect_location, body, headers,
-                retries=retries, redirect=redirect,
-                assert_same_host=assert_same_host,
-                timeout=timeout, pool_timeout=pool_timeout,
-                release_conn=release_conn, **response_kw)
-
-        # Check if we should retry the HTTP response.
-        if retries.is_forced_retry(method, status_code=response.status):
-            retries = retries.increment(method, url, response=response, _pool=self)
-            retries.sleep()
-            log.info("Forced retry: %s" % url)
-            return self.urlopen(
-                method, url, body, headers,
-                retries=retries, redirect=redirect,
-                assert_same_host=assert_same_host,
-                timeout=timeout, pool_timeout=pool_timeout,
-                release_conn=release_conn, **response_kw)
-
-        return response
-
-
-class HTTPSConnectionPool(HTTPConnectionPool):
-    """
-    Same as :class:`.HTTPConnectionPool`, but HTTPS.
-
-    When Python is compiled with the :mod:`ssl` module, then
-    :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
-    instead of :class:`.HTTPSConnection`.
-
-    :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
-    ``assert_hostname`` and ``host`` in this order to verify connections.
-    If ``assert_hostname`` is False, no verification is done.
-
-    The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
-    ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
-    available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
-    the connection socket into an SSL socket.
-    """
-
-    scheme = 'https'
-    ConnectionCls = HTTPSConnection
-
-    def __init__(self, host, port=None,
-                 strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
-                 block=False, headers=None, retries=None,
-                 _proxy=None, _proxy_headers=None,
-                 key_file=None, cert_file=None, cert_reqs=None,
-                 ca_certs=None, ssl_version=None,
-                 assert_hostname=None, assert_fingerprint=None,
-                 ca_cert_dir=None, **conn_kw):
-
-        HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
-                                    block, headers, retries, _proxy, _proxy_headers,
-                                    **conn_kw)
-
-        if ca_certs and cert_reqs is None:
-            cert_reqs = 'CERT_REQUIRED'
-
-        self.key_file = key_file
-        self.cert_file = cert_file
-        self.cert_reqs = cert_reqs
-        self.ca_certs = ca_certs
-        self.ca_cert_dir = ca_cert_dir
-        self.ssl_version = ssl_version
-        self.assert_hostname = assert_hostname
-        self.assert_fingerprint = assert_fingerprint
-
-    def _prepare_conn(self, conn):
-        """
-        Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
-        and establish the tunnel if proxy is used.
-        """
-
-        if isinstance(conn, VerifiedHTTPSConnection):
-            conn.set_cert(key_file=self.key_file,
-                          cert_file=self.cert_file,
-                          cert_reqs=self.cert_reqs,
-                          ca_certs=self.ca_certs,
-                          ca_cert_dir=self.ca_cert_dir,
-                          assert_hostname=self.assert_hostname,
-                          assert_fingerprint=self.assert_fingerprint)
-            conn.ssl_version = self.ssl_version
-
-        return conn
-
-    def _prepare_proxy(self, conn):
-        """
-        Establish tunnel connection early, because otherwise httplib
-        would improperly set Host: header to proxy's IP:port.
-        """
-        # Python 2.7+
-        try:
-            set_tunnel = conn.set_tunnel
-        except AttributeError:  # Platform-specific: Python 2.6
-            set_tunnel = conn._set_tunnel
-
-        if sys.version_info <= (2, 6, 4) and not self.proxy_headers:   # Python 2.6.4 and older
-            set_tunnel(self.host, self.port)
-        else:
-            set_tunnel(self.host, self.port, self.proxy_headers)
-
-        conn.connect()
-
-    def _new_conn(self):
-        """
-        Return a fresh :class:`httplib.HTTPSConnection`.
-        """
-        self.num_connections += 1
-        log.info("Starting new HTTPS connection (%d): %s"
-                 % (self.num_connections, self.host))
-
-        if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
-            raise SSLError("Can't connect to HTTPS URL because the SSL "
-                           "module is not available.")
-
-        actual_host = self.host
-        actual_port = self.port
-        if self.proxy is not None:
-            actual_host = self.proxy.host
-            actual_port = self.proxy.port
-
-        conn = self.ConnectionCls(host=actual_host, port=actual_port,
-                                  timeout=self.timeout.connect_timeout,
-                                  strict=self.strict, **self.conn_kw)
-
-        return self._prepare_conn(conn)
-
-    def _validate_conn(self, conn):
-        """
-        Called right before a request is made, after the socket is created.
-        """
-        super(HTTPSConnectionPool, self)._validate_conn(conn)
-
-        # Force connect early to allow us to validate the connection.
-        if not getattr(conn, 'sock', None):  # AppEngine might not have  `.sock`
-            conn.connect()
-
-        if not conn.is_verified:
-            warnings.warn((
-                'Unverified HTTPS request is being made. '
-                'Adding certificate verification is strongly advised. See: '
-                'https://urllib3.readthedocs.org/en/latest/security.html'),
-                InsecureRequestWarning)
-
-
-def connection_from_url(url, **kw):
-    """
-    Given a url, return an :class:`.ConnectionPool` instance of its host.
-
-    This is a shortcut for not having to parse out the scheme, host, and port
-    of the url before creating an :class:`.ConnectionPool` instance.
-
-    :param url:
-        Absolute URL string that must include the scheme. Port is optional.
-
-    :param \**kw:
-        Passes additional parameters to the constructor of the appropriate
-        :class:`.ConnectionPool`. Useful for specifying things like
-        timeout, maxsize, headers, etc.
-
-    Example::
-
-        >>> conn = connection_from_url('http://google.com/')
-        >>> r = conn.request('GET', '/')
-    """
-    scheme, host, port = get_host(url)
-    if scheme == 'https':
-        return HTTPSConnectionPool(host, port=port, **kw)
-    else:
-        return HTTPConnectionPool(host, port=port, **kw)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/__init__.py b/tools/swarming_client/third_party/requests/packages/urllib3/contrib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/appengine.py b/tools/swarming_client/third_party/requests/packages/urllib3/contrib/appengine.py
deleted file mode 100644
index 884cdb2..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/appengine.py
+++ /dev/null
@@ -1,223 +0,0 @@
-from __future__ import absolute_import
-import logging
-import os
-import warnings
-
-from ..exceptions import (
-    HTTPError,
-    HTTPWarning,
-    MaxRetryError,
-    ProtocolError,
-    TimeoutError,
-    SSLError
-)
-
-from ..packages.six import BytesIO
-from ..request import RequestMethods
-from ..response import HTTPResponse
-from ..util.timeout import Timeout
-from ..util.retry import Retry
-
-try:
-    from google.appengine.api import urlfetch
-except ImportError:
-    urlfetch = None
-
-
-log = logging.getLogger(__name__)
-
-
-class AppEnginePlatformWarning(HTTPWarning):
-    pass
-
-
-class AppEnginePlatformError(HTTPError):
-    pass
-
-
-class AppEngineManager(RequestMethods):
-    """
-    Connection manager for Google App Engine sandbox applications.
-
-    This manager uses the URLFetch service directly instead of using the
-    emulated httplib, and is subject to URLFetch limitations as described in
-    the App Engine documentation here:
-
-        https://cloud.google.com/appengine/docs/python/urlfetch
-
-    Notably it will raise an AppEnginePlatformError if:
-        * URLFetch is not available.
-        * If you attempt to use this on GAEv2 (Managed VMs), as full socket
-          support is available.
-        * If a request size is more than 10 megabytes.
-        * If a response size is more than 32 megabtyes.
-        * If you use an unsupported request method such as OPTIONS.
-
-    Beyond those cases, it will raise normal urllib3 errors.
-    """
-
-    def __init__(self, headers=None, retries=None, validate_certificate=True):
-        if not urlfetch:
-            raise AppEnginePlatformError(
-                "URLFetch is not available in this environment.")
-
-        if is_prod_appengine_mvms():
-            raise AppEnginePlatformError(
-                "Use normal urllib3.PoolManager instead of AppEngineManager"
-                "on Managed VMs, as using URLFetch is not necessary in "
-                "this environment.")
-
-        warnings.warn(
-            "urllib3 is using URLFetch on Google App Engine sandbox instead "
-            "of sockets. To use sockets directly instead of URLFetch see "
-            "https://urllib3.readthedocs.org/en/latest/contrib.html.",
-            AppEnginePlatformWarning)
-
-        RequestMethods.__init__(self, headers)
-        self.validate_certificate = validate_certificate
-
-        self.retries = retries or Retry.DEFAULT
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        # Return False to re-raise any potential exceptions
-        return False
-
-    def urlopen(self, method, url, body=None, headers=None,
-                retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
-                **response_kw):
-
-        retries = self._get_retries(retries, redirect)
-
-        try:
-            response = urlfetch.fetch(
-                url,
-                payload=body,
-                method=method,
-                headers=headers or {},
-                allow_truncated=False,
-                follow_redirects=(
-                    redirect and
-                    retries.redirect != 0 and
-                    retries.total),
-                deadline=self._get_absolute_timeout(timeout),
-                validate_certificate=self.validate_certificate,
-            )
-        except urlfetch.DeadlineExceededError as e:
-            raise TimeoutError(self, e)
-
-        except urlfetch.InvalidURLError as e:
-            if 'too large' in str(e):
-                raise AppEnginePlatformError(
-                    "URLFetch request too large, URLFetch only "
-                    "supports requests up to 10mb in size.", e)
-            raise ProtocolError(e)
-
-        except urlfetch.DownloadError as e:
-            if 'Too many redirects' in str(e):
-                raise MaxRetryError(self, url, reason=e)
-            raise ProtocolError(e)
-
-        except urlfetch.ResponseTooLargeError as e:
-            raise AppEnginePlatformError(
-                "URLFetch response too large, URLFetch only supports"
-                "responses up to 32mb in size.", e)
-
-        except urlfetch.SSLCertificateError as e:
-            raise SSLError(e)
-
-        except urlfetch.InvalidMethodError as e:
-            raise AppEnginePlatformError(
-                "URLFetch does not support method: %s" % method, e)
-
-        http_response = self._urlfetch_response_to_http_response(
-            response, **response_kw)
-
-        # Check for redirect response
-        if (http_response.get_redirect_location() and
-                retries.raise_on_redirect and redirect):
-            raise MaxRetryError(self, url, "too many redirects")
-
-        # Check if we should retry the HTTP response.
-        if retries.is_forced_retry(method, status_code=http_response.status):
-            retries = retries.increment(
-                method, url, response=http_response, _pool=self)
-            log.info("Forced retry: %s" % url)
-            retries.sleep()
-            return self.urlopen(
-                method, url,
-                body=body, headers=headers,
-                retries=retries, redirect=redirect,
-                timeout=timeout, **response_kw)
-
-        return http_response
-
-    def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
-
-        if is_prod_appengine():
-            # Production GAE handles deflate encoding automatically, but does
-            # not remove the encoding header.
-            content_encoding = urlfetch_resp.headers.get('content-encoding')
-
-            if content_encoding == 'deflate':
-                del urlfetch_resp.headers['content-encoding']
-
-        return HTTPResponse(
-            # In order for decoding to work, we must present the content as
-            # a file-like object.
-            body=BytesIO(urlfetch_resp.content),
-            headers=urlfetch_resp.headers,
-            status=urlfetch_resp.status_code,
-            **response_kw
-        )
-
-    def _get_absolute_timeout(self, timeout):
-        if timeout is Timeout.DEFAULT_TIMEOUT:
-            return 5  # 5s is the default timeout for URLFetch.
-        if isinstance(timeout, Timeout):
-            if timeout.read is not timeout.connect:
-                warnings.warn(
-                    "URLFetch does not support granular timeout settings, "
-                    "reverting to total timeout.", AppEnginePlatformWarning)
-            return timeout.total
-        return timeout
-
-    def _get_retries(self, retries, redirect):
-        if not isinstance(retries, Retry):
-            retries = Retry.from_int(
-                retries, redirect=redirect, default=self.retries)
-
-        if retries.connect or retries.read or retries.redirect:
-            warnings.warn(
-                "URLFetch only supports total retries and does not "
-                "recognize connect, read, or redirect retry parameters.",
-                AppEnginePlatformWarning)
-
-        return retries
-
-
-def is_appengine():
-    return (is_local_appengine() or
-            is_prod_appengine() or
-            is_prod_appengine_mvms())
-
-
-def is_appengine_sandbox():
-    return is_appengine() and not is_prod_appengine_mvms()
-
-
-def is_local_appengine():
-    return ('APPENGINE_RUNTIME' in os.environ and
-            'Development/' in os.environ['SERVER_SOFTWARE'])
-
-
-def is_prod_appengine():
-    return ('APPENGINE_RUNTIME' in os.environ and
-            'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
-            not is_prod_appengine_mvms())
-
-
-def is_prod_appengine_mvms():
-    return os.environ.get('GAE_VM', False) == 'true'
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/ntlmpool.py b/tools/swarming_client/third_party/requests/packages/urllib3/contrib/ntlmpool.py
deleted file mode 100644
index c136a23..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/ntlmpool.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""
-NTLM authenticating pool, contributed by erikcederstran
-
-Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
-"""
-from __future__ import absolute_import
-
-try:
-    from http.client import HTTPSConnection
-except ImportError:
-    from httplib import HTTPSConnection
-from logging import getLogger
-from ntlm import ntlm
-
-from urllib3 import HTTPSConnectionPool
-
-
-log = getLogger(__name__)
-
-
-class NTLMConnectionPool(HTTPSConnectionPool):
-    """
-    Implements an NTLM authentication version of an urllib3 connection pool
-    """
-
-    scheme = 'https'
-
-    def __init__(self, user, pw, authurl, *args, **kwargs):
-        """
-        authurl is a random URL on the server that is protected by NTLM.
-        user is the Windows user, probably in the DOMAIN\\username format.
-        pw is the password for the user.
-        """
-        super(NTLMConnectionPool, self).__init__(*args, **kwargs)
-        self.authurl = authurl
-        self.rawuser = user
-        user_parts = user.split('\\', 1)
-        self.domain = user_parts[0].upper()
-        self.user = user_parts[1]
-        self.pw = pw
-
-    def _new_conn(self):
-        # Performs the NTLM handshake that secures the connection. The socket
-        # must be kept open while requests are performed.
-        self.num_connections += 1
-        log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' %
-                  (self.num_connections, self.host, self.authurl))
-
-        headers = {}
-        headers['Connection'] = 'Keep-Alive'
-        req_header = 'Authorization'
-        resp_header = 'www-authenticate'
-
-        conn = HTTPSConnection(host=self.host, port=self.port)
-
-        # Send negotiation message
-        headers[req_header] = (
-            'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
-        log.debug('Request headers: %s' % headers)
-        conn.request('GET', self.authurl, None, headers)
-        res = conn.getresponse()
-        reshdr = dict(res.getheaders())
-        log.debug('Response status: %s %s' % (res.status, res.reason))
-        log.debug('Response headers: %s' % reshdr)
-        log.debug('Response data: %s [...]' % res.read(100))
-
-        # Remove the reference to the socket, so that it can not be closed by
-        # the response object (we want to keep the socket open)
-        res.fp = None
-
-        # Server should respond with a challenge message
-        auth_header_values = reshdr[resp_header].split(', ')
-        auth_header_value = None
-        for s in auth_header_values:
-            if s[:5] == 'NTLM ':
-                auth_header_value = s[5:]
-        if auth_header_value is None:
-            raise Exception('Unexpected %s response header: %s' %
-                            (resp_header, reshdr[resp_header]))
-
-        # Send authentication message
-        ServerChallenge, NegotiateFlags = \
-            ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
-        auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
-                                                         self.user,
-                                                         self.domain,
-                                                         self.pw,
-                                                         NegotiateFlags)
-        headers[req_header] = 'NTLM %s' % auth_msg
-        log.debug('Request headers: %s' % headers)
-        conn.request('GET', self.authurl, None, headers)
-        res = conn.getresponse()
-        log.debug('Response status: %s %s' % (res.status, res.reason))
-        log.debug('Response headers: %s' % dict(res.getheaders()))
-        log.debug('Response data: %s [...]' % res.read()[:100])
-        if res.status != 200:
-            if res.status == 401:
-                raise Exception('Server rejected request: wrong '
-                                'username or password')
-            raise Exception('Wrong server response: %s %s' %
-                            (res.status, res.reason))
-
-        res.fp = None
-        log.debug('Connection established')
-        return conn
-
-    def urlopen(self, method, url, body=None, headers=None, retries=3,
-                redirect=True, assert_same_host=True):
-        if headers is None:
-            headers = {}
-        headers['Connection'] = 'Keep-Alive'
-        return super(NTLMConnectionPool, self).urlopen(method, url, body,
-                                                       headers, retries,
-                                                       redirect,
-                                                       assert_same_host)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/pyopenssl.py b/tools/swarming_client/third_party/requests/packages/urllib3/contrib/pyopenssl.py
deleted file mode 100644
index 5996153..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/contrib/pyopenssl.py
+++ /dev/null
@@ -1,310 +0,0 @@
-'''SSL with SNI_-support for Python 2. Follow these instructions if you would
-like to verify SSL certificates in Python 2. Note, the default libraries do
-*not* do certificate checking; you need to do additional work to validate
-certificates yourself.
-
-This needs the following packages installed:
-
-* pyOpenSSL (tested with 0.13)
-* ndg-httpsclient (tested with 0.3.2)
-* pyasn1 (tested with 0.1.6)
-
-You can install them with the following command:
-
-    pip install pyopenssl ndg-httpsclient pyasn1
-
-To activate certificate checking, call
-:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
-before you begin making HTTP requests. This can be done in a ``sitecustomize``
-module, or at any other time before your application begins using ``urllib3``,
-like this::
-
-    try:
-        import urllib3.contrib.pyopenssl
-        urllib3.contrib.pyopenssl.inject_into_urllib3()
-    except ImportError:
-        pass
-
-Now you can use :mod:`urllib3` as you normally would, and it will support SNI
-when the required modules are installed.
-
-Activating this module also has the positive side effect of disabling SSL/TLS
-compression in Python 2 (see `CRIME attack`_).
-
-If you want to configure the default list of supported cipher suites, you can
-set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
-
-Module Variables
-----------------
-
-:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
-
-.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
-.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
-
-'''
-from __future__ import absolute_import
-
-try:
-    from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
-    from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
-except SyntaxError as e:
-    raise ImportError(e)
-
-import OpenSSL.SSL
-from pyasn1.codec.der import decoder as der_decoder
-from pyasn1.type import univ, constraint
-from socket import _fileobject, timeout, error as SocketError
-import ssl
-import select
-
-from .. import connection
-from .. import util
-
-__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
-
-# SNI only *really* works if we can read the subjectAltName of certificates.
-HAS_SNI = SUBJ_ALT_NAME_SUPPORT
-
-# Map from urllib3 to PyOpenSSL compatible parameter-values.
-_openssl_versions = {
-    ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
-    ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
-}
-
-if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
-    _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
-
-if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
-    _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
-
-try:
-    _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
-except AttributeError:
-    pass
-
-_openssl_verify = {
-    ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
-    ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
-    ssl.CERT_REQUIRED:
-        OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
-}
-
-DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
-
-# OpenSSL will only write 16K at a time
-SSL_WRITE_BLOCKSIZE = 16384
-
-orig_util_HAS_SNI = util.HAS_SNI
-orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
-
-
-def inject_into_urllib3():
-    'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
-
-    connection.ssl_wrap_socket = ssl_wrap_socket
-    util.HAS_SNI = HAS_SNI
-
-
-def extract_from_urllib3():
-    'Undo monkey-patching by :func:`inject_into_urllib3`.'
-
-    connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
-    util.HAS_SNI = orig_util_HAS_SNI
-
-
-# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
-class SubjectAltName(BaseSubjectAltName):
-    '''ASN.1 implementation for subjectAltNames support'''
-
-    # There is no limit to how many SAN certificates a certificate may have,
-    #   however this needs to have some limit so we'll set an arbitrarily high
-    #   limit.
-    sizeSpec = univ.SequenceOf.sizeSpec + \
-        constraint.ValueSizeConstraint(1, 1024)
-
-
-# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
-def get_subj_alt_name(peer_cert):
-    # Search through extensions
-    dns_name = []
-    if not SUBJ_ALT_NAME_SUPPORT:
-        return dns_name
-
-    general_names = SubjectAltName()
-    for i in range(peer_cert.get_extension_count()):
-        ext = peer_cert.get_extension(i)
-        ext_name = ext.get_short_name()
-        if ext_name != 'subjectAltName':
-            continue
-
-        # PyOpenSSL returns extension data in ASN.1 encoded form
-        ext_dat = ext.get_data()
-        decoded_dat = der_decoder.decode(ext_dat,
-                                         asn1Spec=general_names)
-
-        for name in decoded_dat:
-            if not isinstance(name, SubjectAltName):
-                continue
-            for entry in range(len(name)):
-                component = name.getComponentByPosition(entry)
-                if component.getName() != 'dNSName':
-                    continue
-                dns_name.append(str(component.getComponent()))
-
-    return dns_name
-
-
-class WrappedSocket(object):
-    '''API-compatibility wrapper for Python OpenSSL's Connection-class.
-
-    Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
-    collector of pypy.
-    '''
-
-    def __init__(self, connection, socket, suppress_ragged_eofs=True):
-        self.connection = connection
-        self.socket = socket
-        self.suppress_ragged_eofs = suppress_ragged_eofs
-        self._makefile_refs = 0
-
-    def fileno(self):
-        return self.socket.fileno()
-
-    def makefile(self, mode, bufsize=-1):
-        self._makefile_refs += 1
-        return _fileobject(self, mode, bufsize, close=True)
-
-    def recv(self, *args, **kwargs):
-        try:
-            data = self.connection.recv(*args, **kwargs)
-        except OpenSSL.SSL.SysCallError as e:
-            if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
-                return b''
-            else:
-                raise SocketError(e)
-        except OpenSSL.SSL.ZeroReturnError as e:
-            if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
-                return b''
-            else:
-                raise
-        except OpenSSL.SSL.WantReadError:
-            rd, wd, ed = select.select(
-                [self.socket], [], [], self.socket.gettimeout())
-            if not rd:
-                raise timeout('The read operation timed out')
-            else:
-                return self.recv(*args, **kwargs)
-        else:
-            return data
-
-    def settimeout(self, timeout):
-        return self.socket.settimeout(timeout)
-
-    def _send_until_done(self, data):
-        while True:
-            try:
-                return self.connection.send(data)
-            except OpenSSL.SSL.WantWriteError:
-                _, wlist, _ = select.select([], [self.socket], [],
-                                            self.socket.gettimeout())
-                if not wlist:
-                    raise timeout()
-                continue
-
-    def sendall(self, data):
-        total_sent = 0
-        while total_sent < len(data):
-            sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
-            total_sent += sent
-
-    def shutdown(self):
-        # FIXME rethrow compatible exceptions should we ever use this
-        self.connection.shutdown()
-
-    def close(self):
-        if self._makefile_refs < 1:
-            try:
-                return self.connection.close()
-            except OpenSSL.SSL.Error:
-                return
-        else:
-            self._makefile_refs -= 1
-
-    def getpeercert(self, binary_form=False):
-        x509 = self.connection.get_peer_certificate()
-
-        if not x509:
-            return x509
-
-        if binary_form:
-            return OpenSSL.crypto.dump_certificate(
-                OpenSSL.crypto.FILETYPE_ASN1,
-                x509)
-
-        return {
-            'subject': (
-                (('commonName', x509.get_subject().CN),),
-            ),
-            'subjectAltName': [
-                ('DNS', value)
-                for value in get_subj_alt_name(x509)
-            ]
-        }
-
-    def _reuse(self):
-        self._makefile_refs += 1
-
-    def _drop(self):
-        if self._makefile_refs < 1:
-            self.close()
-        else:
-            self._makefile_refs -= 1
-
-
-def _verify_callback(cnx, x509, err_no, err_depth, return_code):
-    return err_no == 0
-
-
-def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
-                    ca_certs=None, server_hostname=None,
-                    ssl_version=None, ca_cert_dir=None):
-    ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
-    if certfile:
-        keyfile = keyfile or certfile  # Match behaviour of the normal python ssl library
-        ctx.use_certificate_file(certfile)
-    if keyfile:
-        ctx.use_privatekey_file(keyfile)
-    if cert_reqs != ssl.CERT_NONE:
-        ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
-    if ca_certs or ca_cert_dir:
-        try:
-            ctx.load_verify_locations(ca_certs, ca_cert_dir)
-        except OpenSSL.SSL.Error as e:
-            raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
-    else:
-        ctx.set_default_verify_paths()
-
-    # Disable TLS compression to migitate CRIME attack (issue #309)
-    OP_NO_COMPRESSION = 0x20000
-    ctx.set_options(OP_NO_COMPRESSION)
-
-    # Set list of supported ciphersuites.
-    ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
-
-    cnx = OpenSSL.SSL.Connection(ctx, sock)
-    cnx.set_tlsext_host_name(server_hostname)
-    cnx.set_connect_state()
-    while True:
-        try:
-            cnx.do_handshake()
-        except OpenSSL.SSL.WantReadError:
-            rd, _, _ = select.select([sock], [], [], sock.gettimeout())
-            if not rd:
-                raise timeout('select timed out')
-            continue
-        except OpenSSL.SSL.Error as e:
-            raise ssl.SSLError('bad handshake: %r' % e)
-        break
-
-    return WrappedSocket(cnx, sock)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/exceptions.py b/tools/swarming_client/third_party/requests/packages/urllib3/exceptions.py
deleted file mode 100644
index 8e07eb6..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/exceptions.py
+++ /dev/null
@@ -1,201 +0,0 @@
-from __future__ import absolute_import
-# Base Exceptions
-
-
-class HTTPError(Exception):
-    "Base exception used by this module."
-    pass
-
-
-class HTTPWarning(Warning):
-    "Base warning used by this module."
-    pass
-
-
-class PoolError(HTTPError):
-    "Base exception for errors caused within a pool."
-    def __init__(self, pool, message):
-        self.pool = pool
-        HTTPError.__init__(self, "%s: %s" % (pool, message))
-
-    def __reduce__(self):
-        # For pickling purposes.
-        return self.__class__, (None, None)
-
-
-class RequestError(PoolError):
-    "Base exception for PoolErrors that have associated URLs."
-    def __init__(self, pool, url, message):
-        self.url = url
-        PoolError.__init__(self, pool, message)
-
-    def __reduce__(self):
-        # For pickling purposes.
-        return self.__class__, (None, self.url, None)
-
-
-class SSLError(HTTPError):
-    "Raised when SSL certificate fails in an HTTPS connection."
-    pass
-
-
-class ProxyError(HTTPError):
-    "Raised when the connection to a proxy fails."
-    pass
-
-
-class DecodeError(HTTPError):
-    "Raised when automatic decoding based on Content-Type fails."
-    pass
-
-
-class ProtocolError(HTTPError):
-    "Raised when something unexpected happens mid-request/response."
-    pass
-
-
-#: Renamed to ProtocolError but aliased for backwards compatibility.
-ConnectionError = ProtocolError
-
-
-# Leaf Exceptions
-
-class MaxRetryError(RequestError):
-    """Raised when the maximum number of retries is exceeded.
-
-    :param pool: The connection pool
-    :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
-    :param string url: The requested Url
-    :param exceptions.Exception reason: The underlying error
-
-    """
-
-    def __init__(self, pool, url, reason=None):
-        self.reason = reason
-
-        message = "Max retries exceeded with url: %s (Caused by %r)" % (
-            url, reason)
-
-        RequestError.__init__(self, pool, url, message)
-
-
-class HostChangedError(RequestError):
-    "Raised when an existing pool gets a request for a foreign host."
-
-    def __init__(self, pool, url, retries=3):
-        message = "Tried to open a foreign host with url: %s" % url
-        RequestError.__init__(self, pool, url, message)
-        self.retries = retries
-
-
-class TimeoutStateError(HTTPError):
-    """ Raised when passing an invalid state to a timeout """
-    pass
-
-
-class TimeoutError(HTTPError):
-    """ Raised when a socket timeout error occurs.
-
-    Catching this error will catch both :exc:`ReadTimeoutErrors
-    <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
-    """
-    pass
-
-
-class ReadTimeoutError(TimeoutError, RequestError):
-    "Raised when a socket timeout occurs while receiving data from a server"
-    pass
-
-
-# This timeout error does not have a URL attached and needs to inherit from the
-# base HTTPError
-class ConnectTimeoutError(TimeoutError):
-    "Raised when a socket timeout occurs while connecting to a server"
-    pass
-
-
-class NewConnectionError(ConnectTimeoutError, PoolError):
-    "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
-    pass
-
-
-class EmptyPoolError(PoolError):
-    "Raised when a pool runs out of connections and no more are allowed."
-    pass
-
-
-class ClosedPoolError(PoolError):
-    "Raised when a request enters a pool after the pool has been closed."
-    pass
-
-
-class LocationValueError(ValueError, HTTPError):
-    "Raised when there is something wrong with a given URL input."
-    pass
-
-
-class LocationParseError(LocationValueError):
-    "Raised when get_host or similar fails to parse the URL input."
-
-    def __init__(self, location):
-        message = "Failed to parse: %s" % location
-        HTTPError.__init__(self, message)
-
-        self.location = location
-
-
-class ResponseError(HTTPError):
-    "Used as a container for an error reason supplied in a MaxRetryError."
-    GENERIC_ERROR = 'too many error responses'
-    SPECIFIC_ERROR = 'too many {status_code} error responses'
-
-
-class SecurityWarning(HTTPWarning):
-    "Warned when perfoming security reducing actions"
-    pass
-
-
-class SubjectAltNameWarning(SecurityWarning):
-    "Warned when connecting to a host with a certificate missing a SAN."
-    pass
-
-
-class InsecureRequestWarning(SecurityWarning):
-    "Warned when making an unverified HTTPS request."
-    pass
-
-
-class SystemTimeWarning(SecurityWarning):
-    "Warned when system time is suspected to be wrong"
-    pass
-
-
-class InsecurePlatformWarning(SecurityWarning):
-    "Warned when certain SSL configuration is not available on a platform."
-    pass
-
-
-class SNIMissingWarning(HTTPWarning):
-    "Warned when making a HTTPS request without SNI available."
-    pass
-
-
-class ResponseNotChunked(ProtocolError, ValueError):
-    "Response needs to be chunked in order to read it as chunks."
-    pass
-
-
-class ProxySchemeUnknown(AssertionError, ValueError):
-    "ProxyManager does not support the supplied scheme"
-    # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
-
-    def __init__(self, scheme):
-        message = "Not supported proxy scheme %s" % scheme
-        super(ProxySchemeUnknown, self).__init__(message)
-
-
-class HeaderParsingError(HTTPError):
-    "Raised by assert_header_parsing, but we convert it to a log.warning statement."
-    def __init__(self, defects, unparsed_data):
-        message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
-        super(HeaderParsingError, self).__init__(message)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/fields.py b/tools/swarming_client/third_party/requests/packages/urllib3/fields.py
deleted file mode 100644
index c7d4811..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/fields.py
+++ /dev/null
@@ -1,178 +0,0 @@
-from __future__ import absolute_import
-import email.utils
-import mimetypes
-
-from .packages import six
-
-
-def guess_content_type(filename, default='application/octet-stream'):
-    """
-    Guess the "Content-Type" of a file.
-
-    :param filename:
-        The filename to guess the "Content-Type" of using :mod:`mimetypes`.
-    :param default:
-        If no "Content-Type" can be guessed, default to `default`.
-    """
-    if filename:
-        return mimetypes.guess_type(filename)[0] or default
-    return default
-
-
-def format_header_param(name, value):
-    """
-    Helper function to format and quote a single header parameter.
-
-    Particularly useful for header parameters which might contain
-    non-ASCII values, like file names. This follows RFC 2231, as
-    suggested by RFC 2388 Section 4.4.
-
-    :param name:
-        The name of the parameter, a string expected to be ASCII only.
-    :param value:
-        The value of the parameter, provided as a unicode string.
-    """
-    if not any(ch in value for ch in '"\\\r\n'):
-        result = '%s="%s"' % (name, value)
-        try:
-            result.encode('ascii')
-        except UnicodeEncodeError:
-            pass
-        else:
-            return result
-    if not six.PY3:  # Python 2:
-        value = value.encode('utf-8')
-    value = email.utils.encode_rfc2231(value, 'utf-8')
-    value = '%s*=%s' % (name, value)
-    return value
-
-
-class RequestField(object):
-    """
-    A data container for request body parameters.
-
-    :param name:
-        The name of this request field.
-    :param data:
-        The data/value body.
-    :param filename:
-        An optional filename of the request field.
-    :param headers:
-        An optional dict-like object of headers to initially use for the field.
-    """
-    def __init__(self, name, data, filename=None, headers=None):
-        self._name = name
-        self._filename = filename
-        self.data = data
-        self.headers = {}
-        if headers:
-            self.headers = dict(headers)
-
-    @classmethod
-    def from_tuples(cls, fieldname, value):
-        """
-        A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
-
-        Supports constructing :class:`~urllib3.fields.RequestField` from
-        parameter of key/value strings AND key/filetuple. A filetuple is a
-        (filename, data, MIME type) tuple where the MIME type is optional.
-        For example::
-
-            'foo': 'bar',
-            'fakefile': ('foofile.txt', 'contents of foofile'),
-            'realfile': ('barfile.txt', open('realfile').read()),
-            'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
-            'nonamefile': 'contents of nonamefile field',
-
-        Field names and filenames must be unicode.
-        """
-        if isinstance(value, tuple):
-            if len(value) == 3:
-                filename, data, content_type = value
-            else:
-                filename, data = value
-                content_type = guess_content_type(filename)
-        else:
-            filename = None
-            content_type = None
-            data = value
-
-        request_param = cls(fieldname, data, filename=filename)
-        request_param.make_multipart(content_type=content_type)
-
-        return request_param
-
-    def _render_part(self, name, value):
-        """
-        Overridable helper function to format a single header parameter.
-
-        :param name:
-            The name of the parameter, a string expected to be ASCII only.
-        :param value:
-            The value of the parameter, provided as a unicode string.
-        """
-        return format_header_param(name, value)
-
-    def _render_parts(self, header_parts):
-        """
-        Helper function to format and quote a single header.
-
-        Useful for single headers that are composed of multiple items. E.g.,
-        'Content-Disposition' fields.
-
-        :param header_parts:
-            A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
-            as `k1="v1"; k2="v2"; ...`.
-        """
-        parts = []
-        iterable = header_parts
-        if isinstance(header_parts, dict):
-            iterable = header_parts.items()
-
-        for name, value in iterable:
-            if value:
-                parts.append(self._render_part(name, value))
-
-        return '; '.join(parts)
-
-    def render_headers(self):
-        """
-        Renders the headers for this request field.
-        """
-        lines = []
-
-        sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
-        for sort_key in sort_keys:
-            if self.headers.get(sort_key, False):
-                lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
-
-        for header_name, header_value in self.headers.items():
-            if header_name not in sort_keys:
-                if header_value:
-                    lines.append('%s: %s' % (header_name, header_value))
-
-        lines.append('\r\n')
-        return '\r\n'.join(lines)
-
-    def make_multipart(self, content_disposition=None, content_type=None,
-                       content_location=None):
-        """
-        Makes this request field into a multipart request field.
-
-        This method overrides "Content-Disposition", "Content-Type" and
-        "Content-Location" headers to the request parameter.
-
-        :param content_type:
-            The 'Content-Type' of the request body.
-        :param content_location:
-            The 'Content-Location' of the request body.
-
-        """
-        self.headers['Content-Disposition'] = content_disposition or 'form-data'
-        self.headers['Content-Disposition'] += '; '.join([
-            '', self._render_parts(
-                (('name', self._name), ('filename', self._filename))
-            )
-        ])
-        self.headers['Content-Type'] = content_type
-        self.headers['Content-Location'] = content_location
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/filepost.py b/tools/swarming_client/third_party/requests/packages/urllib3/filepost.py
deleted file mode 100644
index 97a2843..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/filepost.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from __future__ import absolute_import
-import codecs
-
-from uuid import uuid4
-from io import BytesIO
-
-from .packages import six
-from .packages.six import b
-from .fields import RequestField
-
-writer = codecs.lookup('utf-8')[3]
-
-
-def choose_boundary():
-    """
-    Our embarassingly-simple replacement for mimetools.choose_boundary.
-    """
-    return uuid4().hex
-
-
-def iter_field_objects(fields):
-    """
-    Iterate over fields.
-
-    Supports list of (k, v) tuples and dicts, and lists of
-    :class:`~urllib3.fields.RequestField`.
-
-    """
-    if isinstance(fields, dict):
-        i = six.iteritems(fields)
-    else:
-        i = iter(fields)
-
-    for field in i:
-        if isinstance(field, RequestField):
-            yield field
-        else:
-            yield RequestField.from_tuples(*field)
-
-
-def iter_fields(fields):
-    """
-    .. deprecated:: 1.6
-
-    Iterate over fields.
-
-    The addition of :class:`~urllib3.fields.RequestField` makes this function
-    obsolete. Instead, use :func:`iter_field_objects`, which returns
-    :class:`~urllib3.fields.RequestField` objects.
-
-    Supports list of (k, v) tuples and dicts.
-    """
-    if isinstance(fields, dict):
-        return ((k, v) for k, v in six.iteritems(fields))
-
-    return ((k, v) for k, v in fields)
-
-
-def encode_multipart_formdata(fields, boundary=None):
-    """
-    Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
-
-    :param fields:
-        Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
-
-    :param boundary:
-        If not specified, then a random boundary will be generated using
-        :func:`mimetools.choose_boundary`.
-    """
-    body = BytesIO()
-    if boundary is None:
-        boundary = choose_boundary()
-
-    for field in iter_field_objects(fields):
-        body.write(b('--%s\r\n' % (boundary)))
-
-        writer(body).write(field.render_headers())
-        data = field.data
-
-        if isinstance(data, int):
-            data = str(data)  # Backwards compatibility
-
-        if isinstance(data, six.text_type):
-            writer(body).write(data)
-        else:
-            body.write(data)
-
-        body.write(b'\r\n')
-
-    body.write(b('--%s--\r\n' % (boundary)))
-
-    content_type = str('multipart/form-data; boundary=%s' % boundary)
-
-    return body.getvalue(), content_type
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/packages/__init__.py b/tools/swarming_client/third_party/requests/packages/urllib3/packages/__init__.py
deleted file mode 100644
index 170e974..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/packages/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from __future__ import absolute_import
-
-from . import ssl_match_hostname
-
-__all__ = ('ssl_match_hostname', )
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/packages/ordered_dict.py b/tools/swarming_client/third_party/requests/packages/urllib3/packages/ordered_dict.py
deleted file mode 100644
index 4479363..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/packages/ordered_dict.py
+++ /dev/null
@@ -1,259 +0,0 @@
-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-# Copyright 2009 Raymond Hettinger, released under the MIT License.
-# http://code.activestate.com/recipes/576693/
-try:
-    from thread import get_ident as _get_ident
-except ImportError:
-    from dummy_thread import get_ident as _get_ident
-
-try:
-    from _abcoll import KeysView, ValuesView, ItemsView
-except ImportError:
-    pass
-
-
-class OrderedDict(dict):
-    'Dictionary that remembers insertion order'
-    # An inherited dict maps keys to values.
-    # The inherited dict provides __getitem__, __len__, __contains__, and get.
-    # The remaining methods are order-aware.
-    # Big-O running times for all methods are the same as for regular dictionaries.
-
-    # The internal self.__map dictionary maps keys to links in a doubly linked list.
-    # The circular doubly linked list starts and ends with a sentinel element.
-    # The sentinel element never gets deleted (this simplifies the algorithm).
-    # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
-
-    def __init__(self, *args, **kwds):
-        '''Initialize an ordered dictionary.  Signature is the same as for
-        regular dictionaries, but keyword arguments are not recommended
-        because their insertion order is arbitrary.
-
-        '''
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        try:
-            self.__root
-        except AttributeError:
-            self.__root = root = []                     # sentinel node
-            root[:] = [root, root, None]
-            self.__map = {}
-        self.__update(*args, **kwds)
-
-    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
-        'od.__setitem__(i, y) <==> od[i]=y'
-        # Setting a new item creates a new link which goes at the end of the linked
-        # list, and the inherited dictionary is updated with the new key/value pair.
-        if key not in self:
-            root = self.__root
-            last = root[0]
-            last[1] = root[0] = self.__map[key] = [last, root, key]
-        dict_setitem(self, key, value)
-
-    def __delitem__(self, key, dict_delitem=dict.__delitem__):
-        'od.__delitem__(y) <==> del od[y]'
-        # Deleting an existing item uses self.__map to find the link which is
-        # then removed by updating the links in the predecessor and successor nodes.
-        dict_delitem(self, key)
-        link_prev, link_next, key = self.__map.pop(key)
-        link_prev[1] = link_next
-        link_next[0] = link_prev
-
-    def __iter__(self):
-        'od.__iter__() <==> iter(od)'
-        root = self.__root
-        curr = root[1]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[1]
-
-    def __reversed__(self):
-        'od.__reversed__() <==> reversed(od)'
-        root = self.__root
-        curr = root[0]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[0]
-
-    def clear(self):
-        'od.clear() -> None.  Remove all items from od.'
-        try:
-            for node in self.__map.itervalues():
-                del node[:]
-            root = self.__root
-            root[:] = [root, root, None]
-            self.__map.clear()
-        except AttributeError:
-            pass
-        dict.clear(self)
-
-    def popitem(self, last=True):
-        '''od.popitem() -> (k, v), return and remove a (key, value) pair.
-        Pairs are returned in LIFO order if last is true or FIFO order if false.
-
-        '''
-        if not self:
-            raise KeyError('dictionary is empty')
-        root = self.__root
-        if last:
-            link = root[0]
-            link_prev = link[0]
-            link_prev[1] = root
-            root[0] = link_prev
-        else:
-            link = root[1]
-            link_next = link[1]
-            root[1] = link_next
-            link_next[0] = root
-        key = link[2]
-        del self.__map[key]
-        value = dict.pop(self, key)
-        return key, value
-
-    # -- the following methods do not depend on the internal structure --
-
-    def keys(self):
-        'od.keys() -> list of keys in od'
-        return list(self)
-
-    def values(self):
-        'od.values() -> list of values in od'
-        return [self[key] for key in self]
-
-    def items(self):
-        'od.items() -> list of (key, value) pairs in od'
-        return [(key, self[key]) for key in self]
-
-    def iterkeys(self):
-        'od.iterkeys() -> an iterator over the keys in od'
-        return iter(self)
-
-    def itervalues(self):
-        'od.itervalues -> an iterator over the values in od'
-        for k in self:
-            yield self[k]
-
-    def iteritems(self):
-        'od.iteritems -> an iterator over the (key, value) items in od'
-        for k in self:
-            yield (k, self[k])
-
-    def update(*args, **kwds):
-        '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
-
-        If E is a dict instance, does:           for k in E: od[k] = E[k]
-        If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
-        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
-        In either case, this is followed by:     for k, v in F.items(): od[k] = v
-
-        '''
-        if len(args) > 2:
-            raise TypeError('update() takes at most 2 positional '
-                            'arguments (%d given)' % (len(args),))
-        elif not args:
-            raise TypeError('update() takes at least 1 argument (0 given)')
-        self = args[0]
-        # Make progressively weaker assumptions about "other"
-        other = ()
-        if len(args) == 2:
-            other = args[1]
-        if isinstance(other, dict):
-            for key in other:
-                self[key] = other[key]
-        elif hasattr(other, 'keys'):
-            for key in other.keys():
-                self[key] = other[key]
-        else:
-            for key, value in other:
-                self[key] = value
-        for key, value in kwds.items():
-            self[key] = value
-
-    __update = update  # let subclasses override update without breaking __init__
-
-    __marker = object()
-
-    def pop(self, key, default=__marker):
-        '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
-        If key is not found, d is returned if given, otherwise KeyError is raised.
-
-        '''
-        if key in self:
-            result = self[key]
-            del self[key]
-            return result
-        if default is self.__marker:
-            raise KeyError(key)
-        return default
-
-    def setdefault(self, key, default=None):
-        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
-        if key in self:
-            return self[key]
-        self[key] = default
-        return default
-
-    def __repr__(self, _repr_running={}):
-        'od.__repr__() <==> repr(od)'
-        call_key = id(self), _get_ident()
-        if call_key in _repr_running:
-            return '...'
-        _repr_running[call_key] = 1
-        try:
-            if not self:
-                return '%s()' % (self.__class__.__name__,)
-            return '%s(%r)' % (self.__class__.__name__, self.items())
-        finally:
-            del _repr_running[call_key]
-
-    def __reduce__(self):
-        'Return state information for pickling'
-        items = [[k, self[k]] for k in self]
-        inst_dict = vars(self).copy()
-        for k in vars(OrderedDict()):
-            inst_dict.pop(k, None)
-        if inst_dict:
-            return (self.__class__, (items,), inst_dict)
-        return self.__class__, (items,)
-
-    def copy(self):
-        'od.copy() -> a shallow copy of od'
-        return self.__class__(self)
-
-    @classmethod
-    def fromkeys(cls, iterable, value=None):
-        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
-        and values equal to v (which defaults to None).
-
-        '''
-        d = cls()
-        for key in iterable:
-            d[key] = value
-        return d
-
-    def __eq__(self, other):
-        '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
-        while comparison to a regular mapping is order-insensitive.
-
-        '''
-        if isinstance(other, OrderedDict):
-            return len(self)==len(other) and self.items() == other.items()
-        return dict.__eq__(self, other)
-
-    def __ne__(self, other):
-        return not self == other
-
-    # -- the following methods are only used in Python 2.7 --
-
-    def viewkeys(self):
-        "od.viewkeys() -> a set-like object providing a view on od's keys"
-        return KeysView(self)
-
-    def viewvalues(self):
-        "od.viewvalues() -> an object providing a view on od's values"
-        return ValuesView(self)
-
-    def viewitems(self):
-        "od.viewitems() -> a set-like object providing a view on od's items"
-        return ItemsView(self)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/packages/six.py b/tools/swarming_client/third_party/requests/packages/urllib3/packages/six.py
deleted file mode 100644
index 27d8011..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/packages/six.py
+++ /dev/null
@@ -1,385 +0,0 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-#Copyright (c) 2010-2011 Benjamin Peterson
-
-#Permission is hereby granted, free of charge, to any person obtaining a copy of
-#this software and associated documentation files (the "Software"), to deal in
-#the Software without restriction, including without limitation the rights to
-#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-#the Software, and to permit persons to whom the Software is furnished to do so,
-#subject to the following conditions:
-
-#The above copyright notice and this permission notice shall be included in all
-#copies or substantial portions of the Software.
-
-#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.2.0"  # Revision 41c74fef2ded
-
-
-# True if we are running on Python 3.
-PY3 = sys.version_info[0] == 3
-
-if PY3:
-    string_types = str,
-    integer_types = int,
-    class_types = type,
-    text_type = str
-    binary_type = bytes
-
-    MAXSIZE = sys.maxsize
-else:
-    string_types = basestring,
-    integer_types = (int, long)
-    class_types = (type, types.ClassType)
-    text_type = unicode
-    binary_type = str
-
-    if sys.platform.startswith("java"):
-        # Jython always uses 32 bits.
-        MAXSIZE = int((1 << 31) - 1)
-    else:
-        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
-        class X(object):
-            def __len__(self):
-                return 1 << 31
-        try:
-            len(X())
-        except OverflowError:
-            # 32-bit
-            MAXSIZE = int((1 << 31) - 1)
-        else:
-            # 64-bit
-            MAXSIZE = int((1 << 63) - 1)
-            del X
-
-
-def _add_doc(func, doc):
-    """Add documentation to a function."""
-    func.__doc__ = doc
-
-
-def _import_module(name):
-    """Import module, returning the module after the last dot."""
-    __import__(name)
-    return sys.modules[name]
-
-
-class _LazyDescr(object):
-
-    def __init__(self, name):
-        self.name = name
-
-    def __get__(self, obj, tp):
-        result = self._resolve()
-        setattr(obj, self.name, result)
-        # This is a bit ugly, but it avoids running this again.
-        delattr(tp, self.name)
-        return result
-
-
-class MovedModule(_LazyDescr):
-
-    def __init__(self, name, old, new=None):
-        super(MovedModule, self).__init__(name)
-        if PY3:
-            if new is None:
-                new = name
-            self.mod = new
-        else:
-            self.mod = old
-
-    def _resolve(self):
-        return _import_module(self.mod)
-
-
-class MovedAttribute(_LazyDescr):
-
-    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
-        super(MovedAttribute, self).__init__(name)
-        if PY3:
-            if new_mod is None:
-                new_mod = name
-            self.mod = new_mod
-            if new_attr is None:
-                if old_attr is None:
-                    new_attr = name
-                else:
-                    new_attr = old_attr
-            self.attr = new_attr
-        else:
-            self.mod = old_mod
-            if old_attr is None:
-                old_attr = name
-            self.attr = old_attr
-
-    def _resolve(self):
-        module = _import_module(self.mod)
-        return getattr(module, self.attr)
-
-
-
-class _MovedItems(types.ModuleType):
-    """Lazy loading of moved objects"""
-
-
-_moved_attributes = [
-    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
-    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
-    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
-    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
-    MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
-    MovedAttribute("reduce", "__builtin__", "functools"),
-    MovedAttribute("StringIO", "StringIO", "io"),
-    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
-    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
-
-    MovedModule("builtins", "__builtin__"),
-    MovedModule("configparser", "ConfigParser"),
-    MovedModule("copyreg", "copy_reg"),
-    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
-    MovedModule("http_cookies", "Cookie", "http.cookies"),
-    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
-    MovedModule("html_parser", "HTMLParser", "html.parser"),
-    MovedModule("http_client", "httplib", "http.client"),
-    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
-    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
-    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
-    MovedModule("cPickle", "cPickle", "pickle"),
-    MovedModule("queue", "Queue"),
-    MovedModule("reprlib", "repr"),
-    MovedModule("socketserver", "SocketServer"),
-    MovedModule("tkinter", "Tkinter"),
-    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
-    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
-    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
-    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
-    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
-    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
-    MovedModule("tkinter_colorchooser", "tkColorChooser",
-                "tkinter.colorchooser"),
-    MovedModule("tkinter_commondialog", "tkCommonDialog",
-                "tkinter.commondialog"),
-    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
-    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
-    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
-                "tkinter.simpledialog"),
-    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
-    MovedModule("winreg", "_winreg"),
-]
-for attr in _moved_attributes:
-    setattr(_MovedItems, attr.name, attr)
-del attr
-
-moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
-
-
-def add_move(move):
-    """Add an item to six.moves."""
-    setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
-    """Remove item from six.moves."""
-    try:
-        delattr(_MovedItems, name)
-    except AttributeError:
-        try:
-            del moves.__dict__[name]
-        except KeyError:
-            raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
-    _meth_func = "__func__"
-    _meth_self = "__self__"
-
-    _func_code = "__code__"
-    _func_defaults = "__defaults__"
-
-    _iterkeys = "keys"
-    _itervalues = "values"
-    _iteritems = "items"
-else:
-    _meth_func = "im_func"
-    _meth_self = "im_self"
-
-    _func_code = "func_code"
-    _func_defaults = "func_defaults"
-
-    _iterkeys = "iterkeys"
-    _itervalues = "itervalues"
-    _iteritems = "iteritems"
-
-
-try:
-    advance_iterator = next
-except NameError:
-    def advance_iterator(it):
-        return it.next()
-next = advance_iterator
-
-
-if PY3:
-    def get_unbound_function(unbound):
-        return unbound
-
-    Iterator = object
-
-    def callable(obj):
-        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-else:
-    def get_unbound_function(unbound):
-        return unbound.im_func
-
-    class Iterator(object):
-
-        def next(self):
-            return type(self).__next__(self)
-
-    callable = callable
-_add_doc(get_unbound_function,
-         """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-
-
-def iterkeys(d):
-    """Return an iterator over the keys of a dictionary."""
-    return iter(getattr(d, _iterkeys)())
-
-def itervalues(d):
-    """Return an iterator over the values of a dictionary."""
-    return iter(getattr(d, _itervalues)())
-
-def iteritems(d):
-    """Return an iterator over the (key, value) pairs of a dictionary."""
-    return iter(getattr(d, _iteritems)())
-
-
-if PY3:
-    def b(s):
-        return s.encode("latin-1")
-    def u(s):
-        return s
-    if sys.version_info[1] <= 1:
-        def int2byte(i):
-            return bytes((i,))
-    else:
-        # This is about 2x faster than the implementation above on 3.2+
-        int2byte = operator.methodcaller("to_bytes", 1, "big")
-    import io
-    StringIO = io.StringIO
-    BytesIO = io.BytesIO
-else:
-    def b(s):
-        return s
-    def u(s):
-        return unicode(s, "unicode_escape")
-    int2byte = chr
-    import StringIO
-    StringIO = BytesIO = StringIO.StringIO
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-if PY3:
-    import builtins
-    exec_ = getattr(builtins, "exec")
-
-
-    def reraise(tp, value, tb=None):
-        if value.__traceback__ is not tb:
-            raise value.with_traceback(tb)
-        raise value
-
-
-    print_ = getattr(builtins, "print")
-    del builtins
-
-else:
-    def exec_(code, globs=None, locs=None):
-        """Execute code in a namespace."""
-        if globs is None:
-            frame = sys._getframe(1)
-            globs = frame.f_globals
-            if locs is None:
-                locs = frame.f_locals
-            del frame
-        elif locs is None:
-            locs = globs
-        exec("""exec code in globs, locs""")
-
-
-    exec_("""def reraise(tp, value, tb=None):
-    raise tp, value, tb
-""")
-
-
-    def print_(*args, **kwargs):
-        """The new-style print function."""
-        fp = kwargs.pop("file", sys.stdout)
-        if fp is None:
-            return
-        def write(data):
-            if not isinstance(data, basestring):
-                data = str(data)
-            fp.write(data)
-        want_unicode = False
-        sep = kwargs.pop("sep", None)
-        if sep is not None:
-            if isinstance(sep, unicode):
-                want_unicode = True
-            elif not isinstance(sep, str):
-                raise TypeError("sep must be None or a string")
-        end = kwargs.pop("end", None)
-        if end is not None:
-            if isinstance(end, unicode):
-                want_unicode = True
-            elif not isinstance(end, str):
-                raise TypeError("end must be None or a string")
-        if kwargs:
-            raise TypeError("invalid keyword arguments to print()")
-        if not want_unicode:
-            for arg in args:
-                if isinstance(arg, unicode):
-                    want_unicode = True
-                    break
-        if want_unicode:
-            newline = unicode("\n")
-            space = unicode(" ")
-        else:
-            newline = "\n"
-            space = " "
-        if sep is None:
-            sep = space
-        if end is None:
-            end = newline
-        for i, arg in enumerate(args):
-            if i:
-                write(sep)
-            write(arg)
-        write(end)
-
-_add_doc(reraise, """Reraise an exception.""")
-
-
-def with_metaclass(meta, base=object):
-    """Create a base class with a metaclass."""
-    return meta("NewBase", (base,), {})
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/tools/swarming_client/third_party/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
deleted file mode 100644
index dd59a75..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-try:
-    # Python 3.2+
-    from ssl import CertificateError, match_hostname
-except ImportError:
-    try:
-        # Backport of the function from a pypi module
-        from backports.ssl_match_hostname import CertificateError, match_hostname
-    except ImportError:
-        # Our vendored copy
-        from ._implementation import CertificateError, match_hostname
-
-# Not needed, but documenting what we provide.
-__all__ = ('CertificateError', 'match_hostname')
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py b/tools/swarming_client/third_party/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
deleted file mode 100644
index 52f4287..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
+++ /dev/null
@@ -1,105 +0,0 @@
-"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
-
-# Note: This file is under the PSF license as the code comes from the python
-# stdlib.   http://docs.python.org/3/license.html
-
-import re
-
-__version__ = '3.4.0.2'
-
-class CertificateError(ValueError):
-    pass
-
-
-def _dnsname_match(dn, hostname, max_wildcards=1):
-    """Matching according to RFC 6125, section 6.4.3
-
-    http://tools.ietf.org/html/rfc6125#section-6.4.3
-    """
-    pats = []
-    if not dn:
-        return False
-
-    # Ported from python3-syntax:
-    # leftmost, *remainder = dn.split(r'.')
-    parts = dn.split(r'.')
-    leftmost = parts[0]
-    remainder = parts[1:]
-
-    wildcards = leftmost.count('*')
-    if wildcards > max_wildcards:
-        # Issue #17980: avoid denials of service by refusing more
-        # than one wildcard per fragment.  A survey of established
-        # policy among SSL implementations showed it to be a
-        # reasonable choice.
-        raise CertificateError(
-            "too many wildcards in certificate DNS name: " + repr(dn))
-
-    # speed up common case w/o wildcards
-    if not wildcards:
-        return dn.lower() == hostname.lower()
-
-    # RFC 6125, section 6.4.3, subitem 1.
-    # The client SHOULD NOT attempt to match a presented identifier in which
-    # the wildcard character comprises a label other than the left-most label.
-    if leftmost == '*':
-        # When '*' is a fragment by itself, it matches a non-empty dotless
-        # fragment.
-        pats.append('[^.]+')
-    elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
-        # RFC 6125, section 6.4.3, subitem 3.
-        # The client SHOULD NOT attempt to match a presented identifier
-        # where the wildcard character is embedded within an A-label or
-        # U-label of an internationalized domain name.
-        pats.append(re.escape(leftmost))
-    else:
-        # Otherwise, '*' matches any dotless string, e.g. www*
-        pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
-
-    # add the remaining fragments, ignore any wildcards
-    for frag in remainder:
-        pats.append(re.escape(frag))
-
-    pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
-    return pat.match(hostname)
-
-
-def match_hostname(cert, hostname):
-    """Verify that *cert* (in decoded format as returned by
-    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
-    rules are followed, but IP addresses are not accepted for *hostname*.
-
-    CertificateError is raised on failure. On success, the function
-    returns nothing.
-    """
-    if not cert:
-        raise ValueError("empty or no certificate")
-    dnsnames = []
-    san = cert.get('subjectAltName', ())
-    for key, value in san:
-        if key == 'DNS':
-            if _dnsname_match(value, hostname):
-                return
-            dnsnames.append(value)
-    if not dnsnames:
-        # The subject is only checked when there is no dNSName entry
-        # in subjectAltName
-        for sub in cert.get('subject', ()):
-            for key, value in sub:
-                # XXX according to RFC 2818, the most specific Common Name
-                # must be used.
-                if key == 'commonName':
-                    if _dnsname_match(value, hostname):
-                        return
-                    dnsnames.append(value)
-    if len(dnsnames) > 1:
-        raise CertificateError("hostname %r "
-            "doesn't match either of %s"
-            % (hostname, ', '.join(map(repr, dnsnames))))
-    elif len(dnsnames) == 1:
-        raise CertificateError("hostname %r "
-            "doesn't match %r"
-            % (hostname, dnsnames[0]))
-    else:
-        raise CertificateError("no appropriate commonName or "
-            "subjectAltName fields were found")
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/poolmanager.py b/tools/swarming_client/third_party/requests/packages/urllib3/poolmanager.py
deleted file mode 100644
index f13e673..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/poolmanager.py
+++ /dev/null
@@ -1,281 +0,0 @@
-from __future__ import absolute_import
-import logging
-
-try:  # Python 3
-    from urllib.parse import urljoin
-except ImportError:
-    from urlparse import urljoin
-
-from ._collections import RecentlyUsedContainer
-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
-from .connectionpool import port_by_scheme
-from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
-from .request import RequestMethods
-from .util.url import parse_url
-from .util.retry import Retry
-
-
-__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
-
-
-pool_classes_by_scheme = {
-    'http': HTTPConnectionPool,
-    'https': HTTPSConnectionPool,
-}
-
-log = logging.getLogger(__name__)
-
-SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
-                'ssl_version', 'ca_cert_dir')
-
-
-class PoolManager(RequestMethods):
-    """
-    Allows for arbitrary requests while transparently keeping track of
-    necessary connection pools for you.
-
-    :param num_pools:
-        Number of connection pools to cache before discarding the least
-        recently used pool.
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-
-    :param \**connection_pool_kw:
-        Additional parameters are used to create fresh
-        :class:`urllib3.connectionpool.ConnectionPool` instances.
-
-    Example::
-
-        >>> manager = PoolManager(num_pools=2)
-        >>> r = manager.request('GET', 'http://google.com/')
-        >>> r = manager.request('GET', 'http://google.com/mail')
-        >>> r = manager.request('GET', 'http://yahoo.com/')
-        >>> len(manager.pools)
-        2
-
-    """
-
-    proxy = None
-
-    def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
-        RequestMethods.__init__(self, headers)
-        self.connection_pool_kw = connection_pool_kw
-        self.pools = RecentlyUsedContainer(num_pools,
-                                           dispose_func=lambda p: p.close())
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self.clear()
-        # Return False to re-raise any potential exceptions
-        return False
-
-    def _new_pool(self, scheme, host, port):
-        """
-        Create a new :class:`ConnectionPool` based on host, port and scheme.
-
-        This method is used to actually create the connection pools handed out
-        by :meth:`connection_from_url` and companion methods. It is intended
-        to be overridden for customization.
-        """
-        pool_cls = pool_classes_by_scheme[scheme]
-        kwargs = self.connection_pool_kw
-        if scheme == 'http':
-            kwargs = self.connection_pool_kw.copy()
-            for kw in SSL_KEYWORDS:
-                kwargs.pop(kw, None)
-
-        return pool_cls(host, port, **kwargs)
-
-    def clear(self):
-        """
-        Empty our store of pools and direct them all to close.
-
-        This will not affect in-flight connections, but they will not be
-        re-used after completion.
-        """
-        self.pools.clear()
-
-    def connection_from_host(self, host, port=None, scheme='http'):
-        """
-        Get a :class:`ConnectionPool` based on the host, port, and scheme.
-
-        If ``port`` isn't given, it will be derived from the ``scheme`` using
-        ``urllib3.connectionpool.port_by_scheme``.
-        """
-
-        if not host:
-            raise LocationValueError("No host specified.")
-
-        scheme = scheme or 'http'
-        port = port or port_by_scheme.get(scheme, 80)
-        pool_key = (scheme, host, port)
-
-        with self.pools.lock:
-            # If the scheme, host, or port doesn't match existing open
-            # connections, open a new ConnectionPool.
-            pool = self.pools.get(pool_key)
-            if pool:
-                return pool
-
-            # Make a fresh ConnectionPool of the desired type
-            pool = self._new_pool(scheme, host, port)
-            self.pools[pool_key] = pool
-
-        return pool
-
-    def connection_from_url(self, url):
-        """
-        Similar to :func:`urllib3.connectionpool.connection_from_url` but
-        doesn't pass any additional parameters to the
-        :class:`urllib3.connectionpool.ConnectionPool` constructor.
-
-        Additional parameters are taken from the :class:`.PoolManager`
-        constructor.
-        """
-        u = parse_url(url)
-        return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
-
-    def urlopen(self, method, url, redirect=True, **kw):
-        """
-        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
-        with custom cross-host redirect logic and only sends the request-uri
-        portion of the ``url``.
-
-        The given ``url`` parameter must be absolute, such that an appropriate
-        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
-        """
-        u = parse_url(url)
-        conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
-
-        kw['assert_same_host'] = False
-        kw['redirect'] = False
-        if 'headers' not in kw:
-            kw['headers'] = self.headers
-
-        if self.proxy is not None and u.scheme == "http":
-            response = conn.urlopen(method, url, **kw)
-        else:
-            response = conn.urlopen(method, u.request_uri, **kw)
-
-        redirect_location = redirect and response.get_redirect_location()
-        if not redirect_location:
-            return response
-
-        # Support relative URLs for redirecting.
-        redirect_location = urljoin(url, redirect_location)
-
-        # RFC 7231, Section 6.4.4
-        if response.status == 303:
-            method = 'GET'
-
-        retries = kw.get('retries')
-        if not isinstance(retries, Retry):
-            retries = Retry.from_int(retries, redirect=redirect)
-
-        try:
-            retries = retries.increment(method, url, response=response, _pool=conn)
-        except MaxRetryError:
-            if retries.raise_on_redirect:
-                raise
-            return response
-
-        kw['retries'] = retries
-        kw['redirect'] = redirect
-
-        log.info("Redirecting %s -> %s" % (url, redirect_location))
-        return self.urlopen(method, redirect_location, **kw)
-
-
-class ProxyManager(PoolManager):
-    """
-    Behaves just like :class:`PoolManager`, but sends all requests through
-    the defined proxy, using the CONNECT method for HTTPS URLs.
-
-    :param proxy_url:
-        The URL of the proxy to be used.
-
-    :param proxy_headers:
-        A dictionary contaning headers that will be sent to the proxy. In case
-        of HTTP they are being sent with each request, while in the
-        HTTPS/CONNECT case they are sent only once. Could be used for proxy
-        authentication.
-
-    Example:
-        >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
-        >>> r1 = proxy.request('GET', 'http://google.com/')
-        >>> r2 = proxy.request('GET', 'http://httpbin.org/')
-        >>> len(proxy.pools)
-        1
-        >>> r3 = proxy.request('GET', 'https://httpbin.org/')
-        >>> r4 = proxy.request('GET', 'https://twitter.com/')
-        >>> len(proxy.pools)
-        3
-
-    """
-
-    def __init__(self, proxy_url, num_pools=10, headers=None,
-                 proxy_headers=None, **connection_pool_kw):
-
-        if isinstance(proxy_url, HTTPConnectionPool):
-            proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
-                                        proxy_url.port)
-        proxy = parse_url(proxy_url)
-        if not proxy.port:
-            port = port_by_scheme.get(proxy.scheme, 80)
-            proxy = proxy._replace(port=port)
-
-        if proxy.scheme not in ("http", "https"):
-            raise ProxySchemeUnknown(proxy.scheme)
-
-        self.proxy = proxy
-        self.proxy_headers = proxy_headers or {}
-
-        connection_pool_kw['_proxy'] = self.proxy
-        connection_pool_kw['_proxy_headers'] = self.proxy_headers
-
-        super(ProxyManager, self).__init__(
-            num_pools, headers, **connection_pool_kw)
-
-    def connection_from_host(self, host, port=None, scheme='http'):
-        if scheme == "https":
-            return super(ProxyManager, self).connection_from_host(
-                host, port, scheme)
-
-        return super(ProxyManager, self).connection_from_host(
-            self.proxy.host, self.proxy.port, self.proxy.scheme)
-
-    def _set_proxy_headers(self, url, headers=None):
-        """
-        Sets headers needed by proxies: specifically, the Accept and Host
-        headers. Only sets headers not provided by the user.
-        """
-        headers_ = {'Accept': '*/*'}
-
-        netloc = parse_url(url).netloc
-        if netloc:
-            headers_['Host'] = netloc
-
-        if headers:
-            headers_.update(headers)
-        return headers_
-
-    def urlopen(self, method, url, redirect=True, **kw):
-        "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
-        u = parse_url(url)
-
-        if u.scheme == "http":
-            # For proxied HTTPS requests, httplib sets the necessary headers
-            # on the CONNECT to the proxy. For HTTP, we'll definitely
-            # need to set 'Host' at the very least.
-            headers = kw.get('headers', self.headers)
-            kw['headers'] = self._set_proxy_headers(url, headers)
-
-        return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
-
-
-def proxy_from_url(url, **kw):
-    return ProxyManager(proxy_url=url, **kw)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/request.py b/tools/swarming_client/third_party/requests/packages/urllib3/request.py
deleted file mode 100644
index d5aa62d..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/request.py
+++ /dev/null
@@ -1,151 +0,0 @@
-from __future__ import absolute_import
-try:
-    from urllib.parse import urlencode
-except ImportError:
-    from urllib import urlencode
-
-from .filepost import encode_multipart_formdata
-
-
-__all__ = ['RequestMethods']
-
-
-class RequestMethods(object):
-    """
-    Convenience mixin for classes who implement a :meth:`urlopen` method, such
-    as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
-    :class:`~urllib3.poolmanager.PoolManager`.
-
-    Provides behavior for making common types of HTTP request methods and
-    decides which type of request field encoding to use.
-
-    Specifically,
-
-    :meth:`.request_encode_url` is for sending requests whose fields are
-    encoded in the URL (such as GET, HEAD, DELETE).
-
-    :meth:`.request_encode_body` is for sending requests whose fields are
-    encoded in the *body* of the request using multipart or www-form-urlencoded
-    (such as for POST, PUT, PATCH).
-
-    :meth:`.request` is for making any kind of request, it will look up the
-    appropriate encoding format and use one of the above two methods to make
-    the request.
-
-    Initializer parameters:
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-    """
-
-    _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
-
-    def __init__(self, headers=None):
-        self.headers = headers or {}
-
-    def urlopen(self, method, url, body=None, headers=None,
-                encode_multipart=True, multipart_boundary=None,
-                **kw):  # Abstract
-        raise NotImplemented("Classes extending RequestMethods must implement "
-                             "their own ``urlopen`` method.")
-
-    def request(self, method, url, fields=None, headers=None, **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the appropriate encoding of
-        ``fields`` based on the ``method`` used.
-
-        This is a convenience method that requires the least amount of manual
-        effort. It can be used in most situations, while still having the
-        option to drop down to more specific methods when necessary, such as
-        :meth:`request_encode_url`, :meth:`request_encode_body`,
-        or even the lowest level :meth:`urlopen`.
-        """
-        method = method.upper()
-
-        if method in self._encode_url_methods:
-            return self.request_encode_url(method, url, fields=fields,
-                                           headers=headers,
-                                           **urlopen_kw)
-        else:
-            return self.request_encode_body(method, url, fields=fields,
-                                            headers=headers,
-                                            **urlopen_kw)
-
-    def request_encode_url(self, method, url, fields=None, headers=None,
-                           **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the ``fields`` encoded in
-        the url. This is useful for request methods like GET, HEAD, DELETE, etc.
-        """
-        if headers is None:
-            headers = self.headers
-
-        extra_kw = {'headers': headers}
-        extra_kw.update(urlopen_kw)
-
-        if fields:
-            url += '?' + urlencode(fields)
-
-        return self.urlopen(method, url, **extra_kw)
-
-    def request_encode_body(self, method, url, fields=None, headers=None,
-                            encode_multipart=True, multipart_boundary=None,
-                            **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the ``fields`` encoded in
-        the body. This is useful for request methods like POST, PUT, PATCH, etc.
-
-        When ``encode_multipart=True`` (default), then
-        :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
-        the payload with the appropriate content type. Otherwise
-        :meth:`urllib.urlencode` is used with the
-        'application/x-www-form-urlencoded' content type.
-
-        Multipart encoding must be used when posting files, and it's reasonably
-        safe to use it in other times too. However, it may break request
-        signing, such as with OAuth.
-
-        Supports an optional ``fields`` parameter of key/value strings AND
-        key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
-        the MIME type is optional. For example::
-
-            fields = {
-                'foo': 'bar',
-                'fakefile': ('foofile.txt', 'contents of foofile'),
-                'realfile': ('barfile.txt', open('realfile').read()),
-                'typedfile': ('bazfile.bin', open('bazfile').read(),
-                              'image/jpeg'),
-                'nonamefile': 'contents of nonamefile field',
-            }
-
-        When uploading a file, providing a filename (the first parameter of the
-        tuple) is optional but recommended to best mimick behavior of browsers.
-
-        Note that if ``headers`` are supplied, the 'Content-Type' header will
-        be overwritten because it depends on the dynamic random boundary string
-        which is used to compose the body of the request. The random boundary
-        string can be explicitly set with the ``multipart_boundary`` parameter.
-        """
-        if headers is None:
-            headers = self.headers
-
-        extra_kw = {'headers': {}}
-
-        if fields:
-            if 'body' in urlopen_kw:
-                raise TypeError(
-                    "request got values for both 'fields' and 'body', can only specify one.")
-
-            if encode_multipart:
-                body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
-            else:
-                body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
-
-            extra_kw['body'] = body
-            extra_kw['headers'] = {'Content-Type': content_type}
-
-        extra_kw['headers'].update(headers)
-        extra_kw.update(urlopen_kw)
-
-        return self.urlopen(method, url, **extra_kw)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/response.py b/tools/swarming_client/third_party/requests/packages/urllib3/response.py
deleted file mode 100644
index 8f2a1b5..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/response.py
+++ /dev/null
@@ -1,514 +0,0 @@
-from __future__ import absolute_import
-from contextlib import contextmanager
-import zlib
-import io
-from socket import timeout as SocketTimeout
-from socket import error as SocketError
-
-from ._collections import HTTPHeaderDict
-from .exceptions import (
-    ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
-)
-from .packages.six import string_types as basestring, binary_type, PY3
-from .packages.six.moves import http_client as httplib
-from .connection import HTTPException, BaseSSLError
-from .util.response import is_fp_closed, is_response_to_head
-
-
-class DeflateDecoder(object):
-
-    def __init__(self):
-        self._first_try = True
-        self._data = binary_type()
-        self._obj = zlib.decompressobj()
-
-    def __getattr__(self, name):
-        return getattr(self._obj, name)
-
-    def decompress(self, data):
-        if not data:
-            return data
-
-        if not self._first_try:
-            return self._obj.decompress(data)
-
-        self._data += data
-        try:
-            return self._obj.decompress(data)
-        except zlib.error:
-            self._first_try = False
-            self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
-            try:
-                return self.decompress(self._data)
-            finally:
-                self._data = None
-
-
-class GzipDecoder(object):
-
-    def __init__(self):
-        self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
-
-    def __getattr__(self, name):
-        return getattr(self._obj, name)
-
-    def decompress(self, data):
-        if not data:
-            return data
-        return self._obj.decompress(data)
-
-
-def _get_decoder(mode):
-    if mode == 'gzip':
-        return GzipDecoder()
-
-    return DeflateDecoder()
-
-
-class HTTPResponse(io.IOBase):
-    """
-    HTTP Response container.
-
-    Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
-    loaded and decoded on-demand when the ``data`` property is accessed.  This
-    class is also compatible with the Python standard library's :mod:`io`
-    module, and can hence be treated as a readable object in the context of that
-    framework.
-
-    Extra parameters for behaviour not present in httplib.HTTPResponse:
-
-    :param preload_content:
-        If True, the response's body will be preloaded during construction.
-
-    :param decode_content:
-        If True, attempts to decode specific content-encoding's based on headers
-        (like 'gzip' and 'deflate') will be skipped and raw data will be used
-        instead.
-
-    :param original_response:
-        When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
-        object, it's convenient to include the original for debug purposes. It's
-        otherwise unused.
-    """
-
-    CONTENT_DECODERS = ['gzip', 'deflate']
-    REDIRECT_STATUSES = [301, 302, 303, 307, 308]
-
-    def __init__(self, body='', headers=None, status=0, version=0, reason=None,
-                 strict=0, preload_content=True, decode_content=True,
-                 original_response=None, pool=None, connection=None):
-
-        if isinstance(headers, HTTPHeaderDict):
-            self.headers = headers
-        else:
-            self.headers = HTTPHeaderDict(headers)
-        self.status = status
-        self.version = version
-        self.reason = reason
-        self.strict = strict
-        self.decode_content = decode_content
-
-        self._decoder = None
-        self._body = None
-        self._fp = None
-        self._original_response = original_response
-        self._fp_bytes_read = 0
-
-        if body and isinstance(body, (basestring, binary_type)):
-            self._body = body
-
-        self._pool = pool
-        self._connection = connection
-
-        if hasattr(body, 'read'):
-            self._fp = body
-
-        # Are we using the chunked-style of transfer encoding?
-        self.chunked = False
-        self.chunk_left = None
-        tr_enc = self.headers.get('transfer-encoding', '').lower()
-        # Don't incur the penalty of creating a list and then discarding it
-        encodings = (enc.strip() for enc in tr_enc.split(","))
-        if "chunked" in encodings:
-            self.chunked = True
-
-        # If requested, preload the body.
-        if preload_content and not self._body:
-            self._body = self.read(decode_content=decode_content)
-
-    def get_redirect_location(self):
-        """
-        Should we redirect and where to?
-
-        :returns: Truthy redirect location string if we got a redirect status
-            code and valid location. ``None`` if redirect status and no
-            location. ``False`` if not a redirect status code.
-        """
-        if self.status in self.REDIRECT_STATUSES:
-            return self.headers.get('location')
-
-        return False
-
-    def release_conn(self):
-        if not self._pool or not self._connection:
-            return
-
-        self._pool._put_conn(self._connection)
-        self._connection = None
-
-    @property
-    def data(self):
-        # For backwords-compat with earlier urllib3 0.4 and earlier.
-        if self._body:
-            return self._body
-
-        if self._fp:
-            return self.read(cache_content=True)
-
-    def tell(self):
-        """
-        Obtain the number of bytes pulled over the wire so far. May differ from
-        the amount of content returned by :meth:``HTTPResponse.read`` if bytes
-        are encoded on the wire (e.g, compressed).
-        """
-        return self._fp_bytes_read
-
-    def _init_decoder(self):
-        """
-        Set-up the _decoder attribute if necessar.
-        """
-        # Note: content-encoding value should be case-insensitive, per RFC 7230
-        # Section 3.2
-        content_encoding = self.headers.get('content-encoding', '').lower()
-        if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
-            self._decoder = _get_decoder(content_encoding)
-
-    def _decode(self, data, decode_content, flush_decoder):
-        """
-        Decode the data passed in and potentially flush the decoder.
-        """
-        try:
-            if decode_content and self._decoder:
-                data = self._decoder.decompress(data)
-        except (IOError, zlib.error) as e:
-            content_encoding = self.headers.get('content-encoding', '').lower()
-            raise DecodeError(
-                "Received response with content-encoding: %s, but "
-                "failed to decode it." % content_encoding, e)
-
-        if flush_decoder and decode_content:
-            data += self._flush_decoder()
-
-        return data
-
-    def _flush_decoder(self):
-        """
-        Flushes the decoder. Should only be called if the decoder is actually
-        being used.
-        """
-        if self._decoder:
-            buf = self._decoder.decompress(b'')
-            return buf + self._decoder.flush()
-
-        return b''
-
-    @contextmanager
-    def _error_catcher(self):
-        """
-        Catch low-level python exceptions, instead re-raising urllib3
-        variants, so that low-level exceptions are not leaked in the
-        high-level api.
-
-        On exit, release the connection back to the pool.
-        """
-        try:
-            try:
-                yield
-
-            except SocketTimeout:
-                # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
-                # there is yet no clean way to get at it from this context.
-                raise ReadTimeoutError(self._pool, None, 'Read timed out.')
-
-            except BaseSSLError as e:
-                # FIXME: Is there a better way to differentiate between SSLErrors?
-                if 'read operation timed out' not in str(e):  # Defensive:
-                    # This shouldn't happen but just in case we're missing an edge
-                    # case, let's avoid swallowing SSL errors.
-                    raise
-
-                raise ReadTimeoutError(self._pool, None, 'Read timed out.')
-
-            except (HTTPException, SocketError) as e:
-                # This includes IncompleteRead.
-                raise ProtocolError('Connection broken: %r' % e, e)
-
-        except Exception:
-            # The response may not be closed but we're not going to use it anymore
-            # so close it now to ensure that the connection is released back to the pool.
-            if self._original_response and not self._original_response.isclosed():
-                self._original_response.close()
-
-            # Closing the response may not actually be sufficient to close
-            # everything, so if we have a hold of the connection close that
-            # too.
-            if self._connection is not None:
-                self._connection.close()
-
-            raise
-        finally:
-            if self._original_response and self._original_response.isclosed():
-                self.release_conn()
-
-    def read(self, amt=None, decode_content=None, cache_content=False):
-        """
-        Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
-        parameters: ``decode_content`` and ``cache_content``.
-
-        :param amt:
-            How much of the content to read. If specified, caching is skipped
-            because it doesn't make sense to cache partial content as the full
-            response.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-
-        :param cache_content:
-            If True, will save the returned data such that the same result is
-            returned despite of the state of the underlying file object. This
-            is useful if you want the ``.data`` property to continue working
-            after having ``.read()`` the file object. (Overridden if ``amt`` is
-            set.)
-        """
-        self._init_decoder()
-        if decode_content is None:
-            decode_content = self.decode_content
-
-        if self._fp is None:
-            return
-
-        flush_decoder = False
-        data = None
-
-        with self._error_catcher():
-            if amt is None:
-                # cStringIO doesn't like amt=None
-                data = self._fp.read()
-                flush_decoder = True
-            else:
-                cache_content = False
-                data = self._fp.read(amt)
-                if amt != 0 and not data:  # Platform-specific: Buggy versions of Python.
-                    # Close the connection when no data is returned
-                    #
-                    # This is redundant to what httplib/http.client _should_
-                    # already do.  However, versions of python released before
-                    # December 15, 2012 (http://bugs.python.org/issue16298) do
-                    # not properly close the connection in all cases. There is
-                    # no harm in redundantly calling close.
-                    self._fp.close()
-                    flush_decoder = True
-
-        if data:
-            self._fp_bytes_read += len(data)
-
-            data = self._decode(data, decode_content, flush_decoder)
-
-            if cache_content:
-                self._body = data
-
-        return data
-
-    def stream(self, amt=2**16, decode_content=None):
-        """
-        A generator wrapper for the read() method. A call will block until
-        ``amt`` bytes have been read from the connection or until the
-        connection is closed.
-
-        :param amt:
-            How much of the content to read. The generator will return up to
-            much data per iteration, but may return less. This is particularly
-            likely when using compressed data. However, the empty string will
-            never be returned.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-        """
-        if self.chunked:
-            for line in self.read_chunked(amt, decode_content=decode_content):
-                yield line
-        else:
-            while not is_fp_closed(self._fp):
-                data = self.read(amt=amt, decode_content=decode_content)
-
-                if data:
-                    yield data
-
-    @classmethod
-    def from_httplib(ResponseCls, r, **response_kw):
-        """
-        Given an :class:`httplib.HTTPResponse` instance ``r``, return a
-        corresponding :class:`urllib3.response.HTTPResponse` object.
-
-        Remaining parameters are passed to the HTTPResponse constructor, along
-        with ``original_response=r``.
-        """
-        headers = r.msg
-
-        if not isinstance(headers, HTTPHeaderDict):
-            if PY3:  # Python 3
-                headers = HTTPHeaderDict(headers.items())
-            else:  # Python 2
-                headers = HTTPHeaderDict.from_httplib(headers)
-
-        # HTTPResponse objects in Python 3 don't have a .strict attribute
-        strict = getattr(r, 'strict', 0)
-        resp = ResponseCls(body=r,
-                           headers=headers,
-                           status=r.status,
-                           version=r.version,
-                           reason=r.reason,
-                           strict=strict,
-                           original_response=r,
-                           **response_kw)
-        return resp
-
-    # Backwards-compatibility methods for httplib.HTTPResponse
-    def getheaders(self):
-        return self.headers
-
-    def getheader(self, name, default=None):
-        return self.headers.get(name, default)
-
-    # Overrides from io.IOBase
-    def close(self):
-        if not self.closed:
-            self._fp.close()
-
-    @property
-    def closed(self):
-        if self._fp is None:
-            return True
-        elif hasattr(self._fp, 'closed'):
-            return self._fp.closed
-        elif hasattr(self._fp, 'isclosed'):  # Python 2
-            return self._fp.isclosed()
-        else:
-            return True
-
-    def fileno(self):
-        if self._fp is None:
-            raise IOError("HTTPResponse has no file to get a fileno from")
-        elif hasattr(self._fp, "fileno"):
-            return self._fp.fileno()
-        else:
-            raise IOError("The file-like object this HTTPResponse is wrapped "
-                          "around has no file descriptor")
-
-    def flush(self):
-        if self._fp is not None and hasattr(self._fp, 'flush'):
-            return self._fp.flush()
-
-    def readable(self):
-        # This method is required for `io` module compatibility.
-        return True
-
-    def readinto(self, b):
-        # This method is required for `io` module compatibility.
-        temp = self.read(len(b))
-        if len(temp) == 0:
-            return 0
-        else:
-            b[:len(temp)] = temp
-            return len(temp)
-
-    def _update_chunk_length(self):
-        # First, we'll figure out length of a chunk and then
-        # we'll try to read it from socket.
-        if self.chunk_left is not None:
-            return
-        line = self._fp.fp.readline()
-        line = line.split(b';', 1)[0]
-        try:
-            self.chunk_left = int(line, 16)
-        except ValueError:
-            # Invalid chunked protocol response, abort.
-            self.close()
-            raise httplib.IncompleteRead(line)
-
-    def _handle_chunk(self, amt):
-        returned_chunk = None
-        if amt is None:
-            chunk = self._fp._safe_read(self.chunk_left)
-            returned_chunk = chunk
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-        elif amt < self.chunk_left:
-            value = self._fp._safe_read(amt)
-            self.chunk_left = self.chunk_left - amt
-            returned_chunk = value
-        elif amt == self.chunk_left:
-            value = self._fp._safe_read(amt)
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-            returned_chunk = value
-        else:  # amt > self.chunk_left
-            returned_chunk = self._fp._safe_read(self.chunk_left)
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-        return returned_chunk
-
-    def read_chunked(self, amt=None, decode_content=None):
-        """
-        Similar to :meth:`HTTPResponse.read`, but with an additional
-        parameter: ``decode_content``.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-        """
-        self._init_decoder()
-        # FIXME: Rewrite this method and make it a class with a better structured logic.
-        if not self.chunked:
-            raise ResponseNotChunked(
-                "Response is not chunked. "
-                "Header 'transfer-encoding: chunked' is missing.")
-
-        # Don't bother reading the body of a HEAD request.
-        if self._original_response and is_response_to_head(self._original_response):
-            self._original_response.close()
-            return
-
-        with self._error_catcher():
-            while True:
-                self._update_chunk_length()
-                if self.chunk_left == 0:
-                    break
-                chunk = self._handle_chunk(amt)
-                decoded = self._decode(chunk, decode_content=decode_content,
-                                       flush_decoder=False)
-                if decoded:
-                    yield decoded
-
-            if decode_content:
-                # On CPython and PyPy, we should never need to flush the
-                # decoder. However, on Jython we *might* need to, so
-                # lets defensively do it anyway.
-                decoded = self._flush_decoder()
-                if decoded:  # Platform-specific: Jython.
-                    yield decoded
-
-            # Chunk content ends with \r\n: discard it.
-            while True:
-                line = self._fp.fp.readline()
-                if not line:
-                    # Some sites may not end with '\r\n'.
-                    break
-                if line == b'\r\n':
-                    break
-
-            # We read everything; close the "file".
-            if self._original_response:
-                self._original_response.close()
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/__init__.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/__init__.py
deleted file mode 100644
index c6c6243..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/__init__.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from __future__ import absolute_import
-# For backwards compatibility, provide imports that used to be here.
-from .connection import is_connection_dropped
-from .request import make_headers
-from .response import is_fp_closed
-from .ssl_ import (
-    SSLContext,
-    HAS_SNI,
-    assert_fingerprint,
-    resolve_cert_reqs,
-    resolve_ssl_version,
-    ssl_wrap_socket,
-)
-from .timeout import (
-    current_time,
-    Timeout,
-)
-
-from .retry import Retry
-from .url import (
-    get_host,
-    parse_url,
-    split_first,
-    Url,
-)
-
-__all__ = (
-    'HAS_SNI',
-    'SSLContext',
-    'Retry',
-    'Timeout',
-    'Url',
-    'assert_fingerprint',
-    'current_time',
-    'is_connection_dropped',
-    'is_fp_closed',
-    'get_host',
-    'parse_url',
-    'make_headers',
-    'resolve_cert_reqs',
-    'resolve_ssl_version',
-    'split_first',
-    'ssl_wrap_socket',
-)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/connection.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/connection.py
deleted file mode 100644
index 01a4812..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/connection.py
+++ /dev/null
@@ -1,101 +0,0 @@
-from __future__ import absolute_import
-import socket
-try:
-    from select import poll, POLLIN
-except ImportError:  # `poll` doesn't exist on OSX and other platforms
-    poll = False
-    try:
-        from select import select
-    except ImportError:  # `select` doesn't exist on AppEngine.
-        select = False
-
-
-def is_connection_dropped(conn):  # Platform-specific
-    """
-    Returns True if the connection is dropped and should be closed.
-
-    :param conn:
-        :class:`httplib.HTTPConnection` object.
-
-    Note: For platforms like AppEngine, this will always return ``False`` to
-    let the platform handle connection recycling transparently for us.
-    """
-    sock = getattr(conn, 'sock', False)
-    if sock is False:  # Platform-specific: AppEngine
-        return False
-    if sock is None:  # Connection already closed (such as by httplib).
-        return True
-
-    if not poll:
-        if not select:  # Platform-specific: AppEngine
-            return False
-
-        try:
-            return select([sock], [], [], 0.0)[0]
-        except socket.error:
-            return True
-
-    # This version is better on platforms that support it.
-    p = poll()
-    p.register(sock, POLLIN)
-    for (fno, ev) in p.poll(0.0):
-        if fno == sock.fileno():
-            # Either data is buffered (bad), or the connection is dropped.
-            return True
-
-
-# This function is copied from socket.py in the Python 2.7 standard
-# library test suite. Added to its signature is only `socket_options`.
-def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
-                      source_address=None, socket_options=None):
-    """Connect to *address* and return the socket object.
-
-    Convenience function.  Connect to *address* (a 2-tuple ``(host,
-    port)``) and return the socket object.  Passing the optional
-    *timeout* parameter will set the timeout on the socket instance
-    before attempting to connect.  If no *timeout* is supplied, the
-    global default timeout setting returned by :func:`getdefaulttimeout`
-    is used.  If *source_address* is set it must be a tuple of (host, port)
-    for the socket to bind as a source address before making the connection.
-    An host of '' or port 0 tells the OS to use the default.
-    """
-
-    host, port = address
-    if host.startswith('['):
-        host = host.strip('[]')
-    err = None
-    for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
-        af, socktype, proto, canonname, sa = res
-        sock = None
-        try:
-            sock = socket.socket(af, socktype, proto)
-
-            # If provided, set socket level options before connecting.
-            # This is the only addition urllib3 makes to this function.
-            _set_socket_options(sock, socket_options)
-
-            if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
-                sock.settimeout(timeout)
-            if source_address:
-                sock.bind(source_address)
-            sock.connect(sa)
-            return sock
-
-        except socket.error as e:
-            err = e
-            if sock is not None:
-                sock.close()
-                sock = None
-
-    if err is not None:
-        raise err
-
-    raise socket.error("getaddrinfo returns an empty list")
-
-
-def _set_socket_options(sock, options):
-    if options is None:
-        return
-
-    for opt in options:
-        sock.setsockopt(*opt)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/request.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/request.py
deleted file mode 100644
index 7377931..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/request.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from __future__ import absolute_import
-from base64 import b64encode
-
-from ..packages.six import b
-
-ACCEPT_ENCODING = 'gzip,deflate'
-
-
-def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
-                 basic_auth=None, proxy_basic_auth=None, disable_cache=None):
-    """
-    Shortcuts for generating request headers.
-
-    :param keep_alive:
-        If ``True``, adds 'connection: keep-alive' header.
-
-    :param accept_encoding:
-        Can be a boolean, list, or string.
-        ``True`` translates to 'gzip,deflate'.
-        List will get joined by comma.
-        String will be used as provided.
-
-    :param user_agent:
-        String representing the user-agent you want, such as
-        "python-urllib3/0.6"
-
-    :param basic_auth:
-        Colon-separated username:password string for 'authorization: basic ...'
-        auth header.
-
-    :param proxy_basic_auth:
-        Colon-separated username:password string for 'proxy-authorization: basic ...'
-        auth header.
-
-    :param disable_cache:
-        If ``True``, adds 'cache-control: no-cache' header.
-
-    Example::
-
-        >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
-        {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
-        >>> make_headers(accept_encoding=True)
-        {'accept-encoding': 'gzip,deflate'}
-    """
-    headers = {}
-    if accept_encoding:
-        if isinstance(accept_encoding, str):
-            pass
-        elif isinstance(accept_encoding, list):
-            accept_encoding = ','.join(accept_encoding)
-        else:
-            accept_encoding = ACCEPT_ENCODING
-        headers['accept-encoding'] = accept_encoding
-
-    if user_agent:
-        headers['user-agent'] = user_agent
-
-    if keep_alive:
-        headers['connection'] = 'keep-alive'
-
-    if basic_auth:
-        headers['authorization'] = 'Basic ' + \
-            b64encode(b(basic_auth)).decode('utf-8')
-
-    if proxy_basic_auth:
-        headers['proxy-authorization'] = 'Basic ' + \
-            b64encode(b(proxy_basic_auth)).decode('utf-8')
-
-    if disable_cache:
-        headers['cache-control'] = 'no-cache'
-
-    return headers
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/response.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/response.py
deleted file mode 100644
index bc72327..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/response.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from __future__ import absolute_import
-from ..packages.six.moves import http_client as httplib
-
-from ..exceptions import HeaderParsingError
-
-
-def is_fp_closed(obj):
-    """
-    Checks whether a given file-like object is closed.
-
-    :param obj:
-        The file-like object to check.
-    """
-
-    try:
-        # Check via the official file-like-object way.
-        return obj.closed
-    except AttributeError:
-        pass
-
-    try:
-        # Check if the object is a container for another file-like object that
-        # gets released on exhaustion (e.g. HTTPResponse).
-        return obj.fp is None
-    except AttributeError:
-        pass
-
-    raise ValueError("Unable to determine whether fp is closed.")
-
-
-def assert_header_parsing(headers):
-    """
-    Asserts whether all headers have been successfully parsed.
-    Extracts encountered errors from the result of parsing headers.
-
-    Only works on Python 3.
-
-    :param headers: Headers to verify.
-    :type headers: `httplib.HTTPMessage`.
-
-    :raises urllib3.exceptions.HeaderParsingError:
-        If parsing errors are found.
-    """
-
-    # This will fail silently if we pass in the wrong kind of parameter.
-    # To make debugging easier add an explicit check.
-    if not isinstance(headers, httplib.HTTPMessage):
-        raise TypeError('expected httplib.Message, got {0}.'.format(
-            type(headers)))
-
-    defects = getattr(headers, 'defects', None)
-    get_payload = getattr(headers, 'get_payload', None)
-
-    unparsed_data = None
-    if get_payload:  # Platform-specific: Python 3.
-        unparsed_data = get_payload()
-
-    if defects or unparsed_data:
-        raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
-
-
-def is_response_to_head(response):
-    """
-    Checks, wether a the request of a response has been a HEAD-request.
-    Handles the quirks of AppEngine.
-
-    :param conn:
-    :type conn: :class:`httplib.HTTPResponse`
-    """
-    # FIXME: Can we do this somehow without accessing private httplib _method?
-    method = response._method
-    if isinstance(method, int):  # Platform-specific: Appengine
-        return method == 3
-    return method.upper() == 'HEAD'
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/retry.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/retry.py
deleted file mode 100644
index 03a0124..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/retry.py
+++ /dev/null
@@ -1,286 +0,0 @@
-from __future__ import absolute_import
-import time
-import logging
-
-from ..exceptions import (
-    ConnectTimeoutError,
-    MaxRetryError,
-    ProtocolError,
-    ReadTimeoutError,
-    ResponseError,
-)
-from ..packages import six
-
-
-log = logging.getLogger(__name__)
-
-
-class Retry(object):
-    """ Retry configuration.
-
-    Each retry attempt will create a new Retry object with updated values, so
-    they can be safely reused.
-
-    Retries can be defined as a default for a pool::
-
-        retries = Retry(connect=5, read=2, redirect=5)
-        http = PoolManager(retries=retries)
-        response = http.request('GET', 'http://example.com/')
-
-    Or per-request (which overrides the default for the pool)::
-
-        response = http.request('GET', 'http://example.com/', retries=Retry(10))
-
-    Retries can be disabled by passing ``False``::
-
-        response = http.request('GET', 'http://example.com/', retries=False)
-
-    Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
-    retries are disabled, in which case the causing exception will be raised.
-
-    :param int total:
-        Total number of retries to allow. Takes precedence over other counts.
-
-        Set to ``None`` to remove this constraint and fall back on other
-        counts. It's a good idea to set this to some sensibly-high value to
-        account for unexpected edge cases and avoid infinite retry loops.
-
-        Set to ``0`` to fail on the first retry.
-
-        Set to ``False`` to disable and imply ``raise_on_redirect=False``.
-
-    :param int connect:
-        How many connection-related errors to retry on.
-
-        These are errors raised before the request is sent to the remote server,
-        which we assume has not triggered the server to process the request.
-
-        Set to ``0`` to fail on the first retry of this type.
-
-    :param int read:
-        How many times to retry on read errors.
-
-        These errors are raised after the request was sent to the server, so the
-        request may have side-effects.
-
-        Set to ``0`` to fail on the first retry of this type.
-
-    :param int redirect:
-        How many redirects to perform. Limit this to avoid infinite redirect
-        loops.
-
-        A redirect is a HTTP response with a status code 301, 302, 303, 307 or
-        308.
-
-        Set to ``0`` to fail on the first retry of this type.
-
-        Set to ``False`` to disable and imply ``raise_on_redirect=False``.
-
-    :param iterable method_whitelist:
-        Set of uppercased HTTP method verbs that we should retry on.
-
-        By default, we only retry on methods which are considered to be
-        indempotent (multiple requests with the same parameters end with the
-        same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
-
-    :param iterable status_forcelist:
-        A set of HTTP status codes that we should force a retry on.
-
-        By default, this is disabled with ``None``.
-
-    :param float backoff_factor:
-        A backoff factor to apply between attempts. urllib3 will sleep for::
-
-            {backoff factor} * (2 ^ ({number of total retries} - 1))
-
-        seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
-        for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
-        than :attr:`Retry.BACKOFF_MAX`.
-
-        By default, backoff is disabled (set to 0).
-
-    :param bool raise_on_redirect: Whether, if the number of redirects is
-        exhausted, to raise a MaxRetryError, or to return a response with a
-        response code in the 3xx range.
-    """
-
-    DEFAULT_METHOD_WHITELIST = frozenset([
-        'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
-
-    #: Maximum backoff time.
-    BACKOFF_MAX = 120
-
-    def __init__(self, total=10, connect=None, read=None, redirect=None,
-                 method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
-                 backoff_factor=0, raise_on_redirect=True, _observed_errors=0):
-
-        self.total = total
-        self.connect = connect
-        self.read = read
-
-        if redirect is False or total is False:
-            redirect = 0
-            raise_on_redirect = False
-
-        self.redirect = redirect
-        self.status_forcelist = status_forcelist or set()
-        self.method_whitelist = method_whitelist
-        self.backoff_factor = backoff_factor
-        self.raise_on_redirect = raise_on_redirect
-        self._observed_errors = _observed_errors  # TODO: use .history instead?
-
-    def new(self, **kw):
-        params = dict(
-            total=self.total,
-            connect=self.connect, read=self.read, redirect=self.redirect,
-            method_whitelist=self.method_whitelist,
-            status_forcelist=self.status_forcelist,
-            backoff_factor=self.backoff_factor,
-            raise_on_redirect=self.raise_on_redirect,
-            _observed_errors=self._observed_errors,
-        )
-        params.update(kw)
-        return type(self)(**params)
-
-    @classmethod
-    def from_int(cls, retries, redirect=True, default=None):
-        """ Backwards-compatibility for the old retries format."""
-        if retries is None:
-            retries = default if default is not None else cls.DEFAULT
-
-        if isinstance(retries, Retry):
-            return retries
-
-        redirect = bool(redirect) and None
-        new_retries = cls(retries, redirect=redirect)
-        log.debug("Converted retries value: %r -> %r" % (retries, new_retries))
-        return new_retries
-
-    def get_backoff_time(self):
-        """ Formula for computing the current backoff
-
-        :rtype: float
-        """
-        if self._observed_errors <= 1:
-            return 0
-
-        backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
-        return min(self.BACKOFF_MAX, backoff_value)
-
-    def sleep(self):
-        """ Sleep between retry attempts using an exponential backoff.
-
-        By default, the backoff factor is 0 and this method will return
-        immediately.
-        """
-        backoff = self.get_backoff_time()
-        if backoff <= 0:
-            return
-        time.sleep(backoff)
-
-    def _is_connection_error(self, err):
-        """ Errors when we're fairly sure that the server did not receive the
-        request, so it should be safe to retry.
-        """
-        return isinstance(err, ConnectTimeoutError)
-
-    def _is_read_error(self, err):
-        """ Errors that occur after the request has been started, so we should
-        assume that the server began processing it.
-        """
-        return isinstance(err, (ReadTimeoutError, ProtocolError))
-
-    def is_forced_retry(self, method, status_code):
-        """ Is this method/status code retryable? (Based on method/codes whitelists)
-        """
-        if self.method_whitelist and method.upper() not in self.method_whitelist:
-            return False
-
-        return self.status_forcelist and status_code in self.status_forcelist
-
-    def is_exhausted(self):
-        """ Are we out of retries? """
-        retry_counts = (self.total, self.connect, self.read, self.redirect)
-        retry_counts = list(filter(None, retry_counts))
-        if not retry_counts:
-            return False
-
-        return min(retry_counts) < 0
-
-    def increment(self, method=None, url=None, response=None, error=None,
-                  _pool=None, _stacktrace=None):
-        """ Return a new Retry object with incremented retry counters.
-
-        :param response: A response object, or None, if the server did not
-            return a response.
-        :type response: :class:`~urllib3.response.HTTPResponse`
-        :param Exception error: An error encountered during the request, or
-            None if the response was received successfully.
-
-        :return: A new ``Retry`` object.
-        """
-        if self.total is False and error:
-            # Disabled, indicate to re-raise the error.
-            raise six.reraise(type(error), error, _stacktrace)
-
-        total = self.total
-        if total is not None:
-            total -= 1
-
-        _observed_errors = self._observed_errors
-        connect = self.connect
-        read = self.read
-        redirect = self.redirect
-        cause = 'unknown'
-
-        if error and self._is_connection_error(error):
-            # Connect retry?
-            if connect is False:
-                raise six.reraise(type(error), error, _stacktrace)
-            elif connect is not None:
-                connect -= 1
-            _observed_errors += 1
-
-        elif error and self._is_read_error(error):
-            # Read retry?
-            if read is False:
-                raise six.reraise(type(error), error, _stacktrace)
-            elif read is not None:
-                read -= 1
-            _observed_errors += 1
-
-        elif response and response.get_redirect_location():
-            # Redirect retry?
-            if redirect is not None:
-                redirect -= 1
-            cause = 'too many redirects'
-
-        else:
-            # Incrementing because of a server error like a 500 in
-            # status_forcelist and a the given method is in the whitelist
-            _observed_errors += 1
-            cause = ResponseError.GENERIC_ERROR
-            if response and response.status:
-                cause = ResponseError.SPECIFIC_ERROR.format(
-                    status_code=response.status)
-
-        new_retry = self.new(
-            total=total,
-            connect=connect, read=read, redirect=redirect,
-            _observed_errors=_observed_errors)
-
-        if new_retry.is_exhausted():
-            raise MaxRetryError(_pool, url, error or ResponseError(cause))
-
-        log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry))
-
-        return new_retry
-
-    def __repr__(self):
-        return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
-                'read={self.read}, redirect={self.redirect})').format(
-                    cls=type(self), self=self)
-
-
-# For backwards compatibility (equivalent to pre-v1.9):
-Retry.DEFAULT = Retry(3)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/ssl_.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/ssl_.py
deleted file mode 100644
index 67f8344..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/ssl_.py
+++ /dev/null
@@ -1,317 +0,0 @@
-from __future__ import absolute_import
-import errno
-import warnings
-import hmac
-
-from binascii import hexlify, unhexlify
-from hashlib import md5, sha1, sha256
-
-from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
-
-
-SSLContext = None
-HAS_SNI = False
-create_default_context = None
-
-# Maps the length of a digest to a possible hash function producing this digest
-HASHFUNC_MAP = {
-    32: md5,
-    40: sha1,
-    64: sha256,
-}
-
-
-def _const_compare_digest_backport(a, b):
-    """
-    Compare two digests of equal length in constant time.
-
-    The digests must be of type str/bytes.
-    Returns True if the digests match, and False otherwise.
-    """
-    result = abs(len(a) - len(b))
-    for l, r in zip(bytearray(a), bytearray(b)):
-        result |= l ^ r
-    return result == 0
-
-
-_const_compare_digest = getattr(hmac, 'compare_digest',
-                                _const_compare_digest_backport)
-
-
-try:  # Test for SSL features
-    import ssl
-    from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
-    from ssl import HAS_SNI  # Has SNI?
-except ImportError:
-    pass
-
-
-try:
-    from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
-except ImportError:
-    OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
-    OP_NO_COMPRESSION = 0x20000
-
-# A secure default.
-# Sources for more information on TLS ciphers:
-#
-# - https://wiki.mozilla.org/Security/Server_Side_TLS
-# - https://www.ssllabs.com/projects/best-practices/index.html
-# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
-#
-# The general intent is:
-# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
-# - prefer ECDHE over DHE for better performance,
-# - prefer any AES-GCM over any AES-CBC for better performance and security,
-# - use 3DES as fallback which is secure but slow,
-# - disable NULL authentication, MD5 MACs and DSS for security reasons.
-DEFAULT_CIPHERS = (
-    'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
-    'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
-    '!eNULL:!MD5'
-)
-
-try:
-    from ssl import SSLContext  # Modern SSL?
-except ImportError:
-    import sys
-
-    class SSLContext(object):  # Platform-specific: Python 2 & 3.1
-        supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
-                                (3, 2) <= sys.version_info)
-
-        def __init__(self, protocol_version):
-            self.protocol = protocol_version
-            # Use default values from a real SSLContext
-            self.check_hostname = False
-            self.verify_mode = ssl.CERT_NONE
-            self.ca_certs = None
-            self.options = 0
-            self.certfile = None
-            self.keyfile = None
-            self.ciphers = None
-
-        def load_cert_chain(self, certfile, keyfile):
-            self.certfile = certfile
-            self.keyfile = keyfile
-
-        def load_verify_locations(self, cafile=None, capath=None):
-            self.ca_certs = cafile
-
-            if capath is not None:
-                raise SSLError("CA directories not supported in older Pythons")
-
-        def set_ciphers(self, cipher_suite):
-            if not self.supports_set_ciphers:
-                raise TypeError(
-                    'Your version of Python does not support setting '
-                    'a custom cipher suite. Please upgrade to Python '
-                    '2.7, 3.2, or later if you need this functionality.'
-                )
-            self.ciphers = cipher_suite
-
-        def wrap_socket(self, socket, server_hostname=None):
-            warnings.warn(
-                'A true SSLContext object is not available. This prevents '
-                'urllib3 from configuring SSL appropriately and may cause '
-                'certain SSL connections to fail. For more information, see '
-                'https://urllib3.readthedocs.org/en/latest/security.html'
-                '#insecureplatformwarning.',
-                InsecurePlatformWarning
-            )
-            kwargs = {
-                'keyfile': self.keyfile,
-                'certfile': self.certfile,
-                'ca_certs': self.ca_certs,
-                'cert_reqs': self.verify_mode,
-                'ssl_version': self.protocol,
-            }
-            if self.supports_set_ciphers:  # Platform-specific: Python 2.7+
-                return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
-            else:  # Platform-specific: Python 2.6
-                return wrap_socket(socket, **kwargs)
-
-
-def assert_fingerprint(cert, fingerprint):
-    """
-    Checks if given fingerprint matches the supplied certificate.
-
-    :param cert:
-        Certificate as bytes object.
-    :param fingerprint:
-        Fingerprint as string of hexdigits, can be interspersed by colons.
-    """
-
-    fingerprint = fingerprint.replace(':', '').lower()
-    digest_length = len(fingerprint)
-    hashfunc = HASHFUNC_MAP.get(digest_length)
-    if not hashfunc:
-        raise SSLError(
-            'Fingerprint of invalid length: {0}'.format(fingerprint))
-
-    # We need encode() here for py32; works on py2 and p33.
-    fingerprint_bytes = unhexlify(fingerprint.encode())
-
-    cert_digest = hashfunc(cert).digest()
-
-    if not _const_compare_digest(cert_digest, fingerprint_bytes):
-        raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
-                       .format(fingerprint, hexlify(cert_digest)))
-
-
-def resolve_cert_reqs(candidate):
-    """
-    Resolves the argument to a numeric constant, which can be passed to
-    the wrap_socket function/method from the ssl module.
-    Defaults to :data:`ssl.CERT_NONE`.
-    If given a string it is assumed to be the name of the constant in the
-    :mod:`ssl` module or its abbrevation.
-    (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
-    If it's neither `None` nor a string we assume it is already the numeric
-    constant which can directly be passed to wrap_socket.
-    """
-    if candidate is None:
-        return CERT_NONE
-
-    if isinstance(candidate, str):
-        res = getattr(ssl, candidate, None)
-        if res is None:
-            res = getattr(ssl, 'CERT_' + candidate)
-        return res
-
-    return candidate
-
-
-def resolve_ssl_version(candidate):
-    """
-    like resolve_cert_reqs
-    """
-    if candidate is None:
-        return PROTOCOL_SSLv23
-
-    if isinstance(candidate, str):
-        res = getattr(ssl, candidate, None)
-        if res is None:
-            res = getattr(ssl, 'PROTOCOL_' + candidate)
-        return res
-
-    return candidate
-
-
-def create_urllib3_context(ssl_version=None, cert_reqs=None,
-                           options=None, ciphers=None):
-    """All arguments have the same meaning as ``ssl_wrap_socket``.
-
-    By default, this function does a lot of the same work that
-    ``ssl.create_default_context`` does on Python 3.4+. It:
-
-    - Disables SSLv2, SSLv3, and compression
-    - Sets a restricted set of server ciphers
-
-    If you wish to enable SSLv3, you can do::
-
-        from urllib3.util import ssl_
-        context = ssl_.create_urllib3_context()
-        context.options &= ~ssl_.OP_NO_SSLv3
-
-    You can do the same to enable compression (substituting ``COMPRESSION``
-    for ``SSLv3`` in the last line above).
-
-    :param ssl_version:
-        The desired protocol version to use. This will default to
-        PROTOCOL_SSLv23 which will negotiate the highest protocol that both
-        the server and your installation of OpenSSL support.
-    :param cert_reqs:
-        Whether to require the certificate verification. This defaults to
-        ``ssl.CERT_REQUIRED``.
-    :param options:
-        Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
-        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
-    :param ciphers:
-        Which cipher suites to allow the server to select.
-    :returns:
-        Constructed SSLContext object with specified options
-    :rtype: SSLContext
-    """
-    context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
-
-    # Setting the default here, as we may have no ssl module on import
-    cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
-
-    if options is None:
-        options = 0
-        # SSLv2 is easily broken and is considered harmful and dangerous
-        options |= OP_NO_SSLv2
-        # SSLv3 has several problems and is now dangerous
-        options |= OP_NO_SSLv3
-        # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
-        # (issue #309)
-        options |= OP_NO_COMPRESSION
-
-    context.options |= options
-
-    if getattr(context, 'supports_set_ciphers', True):  # Platform-specific: Python 2.6
-        context.set_ciphers(ciphers or DEFAULT_CIPHERS)
-
-    context.verify_mode = cert_reqs
-    if getattr(context, 'check_hostname', None) is not None:  # Platform-specific: Python 3.2
-        # We do our own verification, including fingerprints and alternative
-        # hostnames. So disable it here
-        context.check_hostname = False
-    return context
-
-
-def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
-                    ca_certs=None, server_hostname=None,
-                    ssl_version=None, ciphers=None, ssl_context=None,
-                    ca_cert_dir=None):
-    """
-    All arguments except for server_hostname, ssl_context, and ca_cert_dir have
-    the same meaning as they do when using :func:`ssl.wrap_socket`.
-
-    :param server_hostname:
-        When SNI is supported, the expected hostname of the certificate
-    :param ssl_context:
-        A pre-made :class:`SSLContext` object. If none is provided, one will
-        be created using :func:`create_urllib3_context`.
-    :param ciphers:
-        A string of ciphers we wish the client to support. This is not
-        supported on Python 2.6 as the ssl module does not support it.
-    :param ca_cert_dir:
-        A directory containing CA certificates in multiple separate files, as
-        supported by OpenSSL's -CApath flag or the capath argument to
-        SSLContext.load_verify_locations().
-    """
-    context = ssl_context
-    if context is None:
-        context = create_urllib3_context(ssl_version, cert_reqs,
-                                         ciphers=ciphers)
-
-    if ca_certs or ca_cert_dir:
-        try:
-            context.load_verify_locations(ca_certs, ca_cert_dir)
-        except IOError as e:  # Platform-specific: Python 2.6, 2.7, 3.2
-            raise SSLError(e)
-        # Py33 raises FileNotFoundError which subclasses OSError
-        # These are not equivalent unless we check the errno attribute
-        except OSError as e:  # Platform-specific: Python 3.3 and beyond
-            if e.errno == errno.ENOENT:
-                raise SSLError(e)
-            raise
-
-    if certfile:
-        context.load_cert_chain(certfile, keyfile)
-    if HAS_SNI:  # Platform-specific: OpenSSL with enabled SNI
-        return context.wrap_socket(sock, server_hostname=server_hostname)
-
-    warnings.warn(
-        'An HTTPS request has been made, but the SNI (Subject Name '
-        'Indication) extension to TLS is not available on this platform. '
-        'This may cause the server to present an incorrect TLS '
-        'certificate, which can cause validation failures. For more '
-        'information, see '
-        'https://urllib3.readthedocs.org/en/latest/security.html'
-        '#snimissingwarning.',
-        SNIMissingWarning
-    )
-    return context.wrap_socket(sock)
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/timeout.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/timeout.py
deleted file mode 100644
index ff62f47..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/timeout.py
+++ /dev/null
@@ -1,242 +0,0 @@
-from __future__ import absolute_import
-# The default socket timeout, used by httplib to indicate that no timeout was
-# specified by the user
-from socket import _GLOBAL_DEFAULT_TIMEOUT
-import time
-
-from ..exceptions import TimeoutStateError
-
-# A sentinel value to indicate that no timeout was specified by the user in
-# urllib3
-_Default = object()
-
-
-def current_time():
-    """
-    Retrieve the current time. This function is mocked out in unit testing.
-    """
-    return time.time()
-
-
-class Timeout(object):
-    """ Timeout configuration.
-
-    Timeouts can be defined as a default for a pool::
-
-        timeout = Timeout(connect=2.0, read=7.0)
-        http = PoolManager(timeout=timeout)
-        response = http.request('GET', 'http://example.com/')
-
-    Or per-request (which overrides the default for the pool)::
-
-        response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
-
-    Timeouts can be disabled by setting all the parameters to ``None``::
-
-        no_timeout = Timeout(connect=None, read=None)
-        response = http.request('GET', 'http://example.com/, timeout=no_timeout)
-
-
-    :param total:
-        This combines the connect and read timeouts into one; the read timeout
-        will be set to the time leftover from the connect attempt. In the
-        event that both a connect timeout and a total are specified, or a read
-        timeout and a total are specified, the shorter timeout will be applied.
-
-        Defaults to None.
-
-    :type total: integer, float, or None
-
-    :param connect:
-        The maximum amount of time to wait for a connection attempt to a server
-        to succeed. Omitting the parameter will default the connect timeout to
-        the system default, probably `the global default timeout in socket.py
-        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
-        None will set an infinite timeout for connection attempts.
-
-    :type connect: integer, float, or None
-
-    :param read:
-        The maximum amount of time to wait between consecutive
-        read operations for a response from the server. Omitting
-        the parameter will default the read timeout to the system
-        default, probably `the global default timeout in socket.py
-        <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
-        None will set an infinite timeout.
-
-    :type read: integer, float, or None
-
-    .. note::
-
-        Many factors can affect the total amount of time for urllib3 to return
-        an HTTP response.
-
-        For example, Python's DNS resolver does not obey the timeout specified
-        on the socket. Other factors that can affect total request time include
-        high CPU load, high swap, the program running at a low priority level,
-        or other behaviors.
-
-        In addition, the read and total timeouts only measure the time between
-        read operations on the socket connecting the client and the server,
-        not the total amount of time for the request to return a complete
-        response. For most requests, the timeout is raised because the server
-        has not sent the first byte in the specified time. This is not always
-        the case; if a server streams one byte every fifteen seconds, a timeout
-        of 20 seconds will not trigger, even though the request will take
-        several minutes to complete.
-
-        If your goal is to cut off any request after a set amount of wall clock
-        time, consider having a second "watcher" thread to cut off a slow
-        request.
-    """
-
-    #: A sentinel object representing the default timeout value
-    DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
-
-    def __init__(self, total=None, connect=_Default, read=_Default):
-        self._connect = self._validate_timeout(connect, 'connect')
-        self._read = self._validate_timeout(read, 'read')
-        self.total = self._validate_timeout(total, 'total')
-        self._start_connect = None
-
-    def __str__(self):
-        return '%s(connect=%r, read=%r, total=%r)' % (
-            type(self).__name__, self._connect, self._read, self.total)
-
-    @classmethod
-    def _validate_timeout(cls, value, name):
-        """ Check that a timeout attribute is valid.
-
-        :param value: The timeout value to validate
-        :param name: The name of the timeout attribute to validate. This is
-            used to specify in error messages.
-        :return: The validated and casted version of the given value.
-        :raises ValueError: If the type is not an integer or a float, or if it
-            is a numeric value less than zero.
-        """
-        if value is _Default:
-            return cls.DEFAULT_TIMEOUT
-
-        if value is None or value is cls.DEFAULT_TIMEOUT:
-            return value
-
-        try:
-            float(value)
-        except (TypeError, ValueError):
-            raise ValueError("Timeout value %s was %s, but it must be an "
-                             "int or float." % (name, value))
-
-        try:
-            if value < 0:
-                raise ValueError("Attempted to set %s timeout to %s, but the "
-                                 "timeout cannot be set to a value less "
-                                 "than 0." % (name, value))
-        except TypeError:  # Python 3
-            raise ValueError("Timeout value %s was %s, but it must be an "
-                             "int or float." % (name, value))
-
-        return value
-
-    @classmethod
-    def from_float(cls, timeout):
-        """ Create a new Timeout from a legacy timeout value.
-
-        The timeout value used by httplib.py sets the same timeout on the
-        connect(), and recv() socket requests. This creates a :class:`Timeout`
-        object that sets the individual timeouts to the ``timeout`` value
-        passed to this function.
-
-        :param timeout: The legacy timeout value.
-        :type timeout: integer, float, sentinel default object, or None
-        :return: Timeout object
-        :rtype: :class:`Timeout`
-        """
-        return Timeout(read=timeout, connect=timeout)
-
-    def clone(self):
-        """ Create a copy of the timeout object
-
-        Timeout properties are stored per-pool but each request needs a fresh
-        Timeout object to ensure each one has its own start/stop configured.
-
-        :return: a copy of the timeout object
-        :rtype: :class:`Timeout`
-        """
-        # We can't use copy.deepcopy because that will also create a new object
-        # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
-        # detect the user default.
-        return Timeout(connect=self._connect, read=self._read,
-                       total=self.total)
-
-    def start_connect(self):
-        """ Start the timeout clock, used during a connect() attempt
-
-        :raises urllib3.exceptions.TimeoutStateError: if you attempt
-            to start a timer that has been started already.
-        """
-        if self._start_connect is not None:
-            raise TimeoutStateError("Timeout timer has already been started.")
-        self._start_connect = current_time()
-        return self._start_connect
-
-    def get_connect_duration(self):
-        """ Gets the time elapsed since the call to :meth:`start_connect`.
-
-        :return: Elapsed time.
-        :rtype: float
-        :raises urllib3.exceptions.TimeoutStateError: if you attempt
-            to get duration for a timer that hasn't been started.
-        """
-        if self._start_connect is None:
-            raise TimeoutStateError("Can't get connect duration for timer "
-                                    "that has not started.")
-        return current_time() - self._start_connect
-
-    @property
-    def connect_timeout(self):
-        """ Get the value to use when setting a connection timeout.
-
-        This will be a positive float or integer, the value None
-        (never timeout), or the default system timeout.
-
-        :return: Connect timeout.
-        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
-        """
-        if self.total is None:
-            return self._connect
-
-        if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
-            return self.total
-
-        return min(self._connect, self.total)
-
-    @property
-    def read_timeout(self):
-        """ Get the value for the read timeout.
-
-        This assumes some time has elapsed in the connection timeout and
-        computes the read timeout appropriately.
-
-        If self.total is set, the read timeout is dependent on the amount of
-        time taken by the connect timeout. If the connection time has not been
-        established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
-        raised.
-
-        :return: Value to use for the read timeout.
-        :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
-        :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
-            has not yet been called on this object.
-        """
-        if (self.total is not None and
-                self.total is not self.DEFAULT_TIMEOUT and
-                self._read is not None and
-                self._read is not self.DEFAULT_TIMEOUT):
-            # In case the connect timeout has not yet been established.
-            if self._start_connect is None:
-                return self._read
-            return max(0, min(self.total - self.get_connect_duration(),
-                              self._read))
-        elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
-            return max(0, self.total - self.get_connect_duration())
-        else:
-            return self._read
diff --git a/tools/swarming_client/third_party/requests/packages/urllib3/util/url.py b/tools/swarming_client/third_party/requests/packages/urllib3/util/url.py
deleted file mode 100644
index e996204..0000000
--- a/tools/swarming_client/third_party/requests/packages/urllib3/util/url.py
+++ /dev/null
@@ -1,217 +0,0 @@
-from __future__ import absolute_import
-from collections import namedtuple
-
-from ..exceptions import LocationParseError
-
-
-url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
-
-
-class Url(namedtuple('Url', url_attrs)):
-    """
-    Datastructure for representing an HTTP URL. Used as a return value for
-    :func:`parse_url`.
-    """
-    slots = ()
-
-    def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
-                query=None, fragment=None):
-        if path and not path.startswith('/'):
-            path = '/' + path
-        return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
-                                       query, fragment)
-
-    @property
-    def hostname(self):
-        """For backwards-compatibility with urlparse. We're nice like that."""
-        return self.host
-
-    @property
-    def request_uri(self):
-        """Absolute path including the query string."""
-        uri = self.path or '/'
-
-        if self.query is not None:
-            uri += '?' + self.query
-
-        return uri
-
-    @property
-    def netloc(self):
-        """Network location including host and port"""
-        if self.port:
-            return '%s:%d' % (self.host, self.port)
-        return self.host
-
-    @property
-    def url(self):
-        """
-        Convert self into a url
-
-        This function should more or less round-trip with :func:`.parse_url`. The
-        returned url may not be exactly the same as the url inputted to
-        :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
-        with a blank port will have : removed).
-
-        Example: ::
-
-            >>> U = parse_url('http://google.com/mail/')
-            >>> U.url
-            'http://google.com/mail/'
-            >>> Url('http', 'username:password', 'host.com', 80,
-            ... '/path', 'query', 'fragment').url
-            'http://username:password@host.com:80/path?query#fragment'
-        """
-        scheme, auth, host, port, path, query, fragment = self
-        url = ''
-
-        # We use "is not None" we want things to happen with empty strings (or 0 port)
-        if scheme is not None:
-            url += scheme + '://'
-        if auth is not None:
-            url += auth + '@'
-        if host is not None:
-            url += host
-        if port is not None:
-            url += ':' + str(port)
-        if path is not None:
-            url += path
-        if query is not None:
-            url += '?' + query
-        if fragment is not None:
-            url += '#' + fragment
-
-        return url
-
-    def __str__(self):
-        return self.url
-
-
-def split_first(s, delims):
-    """
-    Given a string and an iterable of delimiters, split on the first found
-    delimiter. Return two split parts and the matched delimiter.
-
-    If not found, then the first part is the full input string.
-
-    Example::
-
-        >>> split_first('foo/bar?baz', '?/=')
-        ('foo', 'bar?baz', '/')
-        >>> split_first('foo/bar?baz', '123')
-        ('foo/bar?baz', '', None)
-
-    Scales linearly with number of delims. Not ideal for large number of delims.
-    """
-    min_idx = None
-    min_delim = None
-    for d in delims:
-        idx = s.find(d)
-        if idx < 0:
-            continue
-
-        if min_idx is None or idx < min_idx:
-            min_idx = idx
-            min_delim = d
-
-    if min_idx is None or min_idx < 0:
-        return s, '', None
-
-    return s[:min_idx], s[min_idx + 1:], min_delim
-
-
-def parse_url(url):
-    """
-    Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
-    performed to parse incomplete urls. Fields not provided will be None.
-
-    Partly backwards-compatible with :mod:`urlparse`.
-
-    Example::
-
-        >>> parse_url('http://google.com/mail/')
-        Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
-        >>> parse_url('google.com:80')
-        Url(scheme=None, host='google.com', port=80, path=None, ...)
-        >>> parse_url('/foo?bar')
-        Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
-    """
-
-    # While this code has overlap with stdlib's urlparse, it is much
-    # simplified for our needs and less annoying.
-    # Additionally, this implementations does silly things to be optimal
-    # on CPython.
-
-    if not url:
-        # Empty
-        return Url()
-
-    scheme = None
-    auth = None
-    host = None
-    port = None
-    path = None
-    fragment = None
-    query = None
-
-    # Scheme
-    if '://' in url:
-        scheme, url = url.split('://', 1)
-
-    # Find the earliest Authority Terminator
-    # (http://tools.ietf.org/html/rfc3986#section-3.2)
-    url, path_, delim = split_first(url, ['/', '?', '#'])
-
-    if delim:
-        # Reassemble the path
-        path = delim + path_
-
-    # Auth
-    if '@' in url:
-        # Last '@' denotes end of auth part
-        auth, url = url.rsplit('@', 1)
-
-    # IPv6
-    if url and url[0] == '[':
-        host, url = url.split(']', 1)
-        host += ']'
-
-    # Port
-    if ':' in url:
-        _host, port = url.split(':', 1)
-
-        if not host:
-            host = _host
-
-        if port:
-            # If given, ports must be integers.
-            if not port.isdigit():
-                raise LocationParseError(url)
-            port = int(port)
-        else:
-            # Blank ports are cool, too. (rfc3986#section-3.2.3)
-            port = None
-
-    elif not host and url:
-        host = url
-
-    if not path:
-        return Url(scheme, auth, host, port, path, query, fragment)
-
-    # Fragment
-    if '#' in path:
-        path, fragment = path.split('#', 1)
-
-    # Query
-    if '?' in path:
-        path, query = path.split('?', 1)
-
-    return Url(scheme, auth, host, port, path, query, fragment)
-
-
-def get_host(url):
-    """
-    Deprecated. Use :func:`.parse_url` instead.
-    """
-    p = parse_url(url)
-    return p.scheme or 'http', p.hostname, p.port
diff --git a/tools/swarming_client/third_party/requests/sessions.py b/tools/swarming_client/third_party/requests/sessions.py
deleted file mode 100644
index 9eaa36a..0000000
--- a/tools/swarming_client/third_party/requests/sessions.py
+++ /dev/null
@@ -1,680 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.session
-~~~~~~~~~~~~~~~~
-
-This module provides a Session object to manage and persist settings across
-requests (cookies, auth, proxies).
-
-"""
-import os
-from collections import Mapping
-from datetime import datetime
-
-from .auth import _basic_auth_str
-from .compat import cookielib, OrderedDict, urljoin, urlparse
-from .cookies import (
-    cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
-from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
-from .hooks import default_hooks, dispatch_hook
-from .utils import to_key_val_list, default_headers, to_native_string
-from .exceptions import (
-    TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
-from .packages.urllib3._collections import RecentlyUsedContainer
-from .structures import CaseInsensitiveDict
-
-from .adapters import HTTPAdapter
-
-from .utils import (
-    requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
-    get_auth_from_url
-)
-
-from .status_codes import codes
-
-# formerly defined here, reexposed here for backward compatibility
-from .models import REDIRECT_STATI
-
-REDIRECT_CACHE_SIZE = 1000
-
-
-def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
-    """
-    Determines appropriate setting for a given request, taking into account the
-    explicit setting on that request, and the setting in the session. If a
-    setting is a dictionary, they will be merged together using `dict_class`
-    """
-
-    if session_setting is None:
-        return request_setting
-
-    if request_setting is None:
-        return session_setting
-
-    # Bypass if not a dictionary (e.g. verify)
-    if not (
-            isinstance(session_setting, Mapping) and
-            isinstance(request_setting, Mapping)
-    ):
-        return request_setting
-
-    merged_setting = dict_class(to_key_val_list(session_setting))
-    merged_setting.update(to_key_val_list(request_setting))
-
-    # Remove keys that are set to None. Extract keys first to avoid altering
-    # the dictionary during iteration.
-    none_keys = [k for (k, v) in merged_setting.items() if v is None]
-    for key in none_keys:
-        del merged_setting[key]
-
-    return merged_setting
-
-
-def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
-    """
-    Properly merges both requests and session hooks.
-
-    This is necessary because when request_hooks == {'response': []}, the
-    merge breaks Session hooks entirely.
-    """
-    if session_hooks is None or session_hooks.get('response') == []:
-        return request_hooks
-
-    if request_hooks is None or request_hooks.get('response') == []:
-        return session_hooks
-
-    return merge_setting(request_hooks, session_hooks, dict_class)
-
-
-class SessionRedirectMixin(object):
-    def resolve_redirects(self, resp, req, stream=False, timeout=None,
-                          verify=True, cert=None, proxies=None, **adapter_kwargs):
-        """Receives a Response. Returns a generator of Responses."""
-
-        i = 0
-        hist = [] # keep track of history
-
-        while resp.is_redirect:
-            prepared_request = req.copy()
-
-            if i > 0:
-                # Update history and keep track of redirects.
-                hist.append(resp)
-                new_hist = list(hist)
-                resp.history = new_hist
-
-            try:
-                resp.content  # Consume socket so it can be released
-            except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
-                resp.raw.read(decode_content=False)
-
-            if i >= self.max_redirects:
-                raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
-
-            # Release the connection back into the pool.
-            resp.close()
-
-            url = resp.headers['location']
-            method = req.method
-
-            # Handle redirection without scheme (see: RFC 1808 Section 4)
-            if url.startswith('//'):
-                parsed_rurl = urlparse(resp.url)
-                url = '%s:%s' % (parsed_rurl.scheme, url)
-
-            # The scheme should be lower case...
-            parsed = urlparse(url)
-            url = parsed.geturl()
-
-            # Facilitate relative 'location' headers, as allowed by RFC 7231.
-            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
-            # Compliant with RFC3986, we percent encode the url.
-            if not parsed.netloc:
-                url = urljoin(resp.url, requote_uri(url))
-            else:
-                url = requote_uri(url)
-
-            prepared_request.url = to_native_string(url)
-            # Cache the url, unless it redirects to itself.
-            if resp.is_permanent_redirect and req.url != prepared_request.url:
-                self.redirect_cache[req.url] = prepared_request.url
-
-            # http://tools.ietf.org/html/rfc7231#section-6.4.4
-            if (resp.status_code == codes.see_other and
-                    method != 'HEAD'):
-                method = 'GET'
-
-            # Do what the browsers do, despite standards...
-            # First, turn 302s into GETs.
-            if resp.status_code == codes.found and method != 'HEAD':
-                method = 'GET'
-
-            # Second, if a POST is responded to with a 301, turn it into a GET.
-            # This bizarre behaviour is explained in Issue 1704.
-            if resp.status_code == codes.moved and method == 'POST':
-                method = 'GET'
-
-            prepared_request.method = method
-
-            # https://github.com/kennethreitz/requests/issues/1084
-            if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
-                if 'Content-Length' in prepared_request.headers:
-                    del prepared_request.headers['Content-Length']
-
-                prepared_request.body = None
-
-            headers = prepared_request.headers
-            try:
-                del headers['Cookie']
-            except KeyError:
-                pass
-
-            # Extract any cookies sent on the response to the cookiejar
-            # in the new request. Because we've mutated our copied prepared
-            # request, use the old one that we haven't yet touched.
-            extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
-            prepared_request._cookies.update(self.cookies)
-            prepared_request.prepare_cookies(prepared_request._cookies)
-
-            # Rebuild auth and proxy information.
-            proxies = self.rebuild_proxies(prepared_request, proxies)
-            self.rebuild_auth(prepared_request, resp)
-
-            # Override the original request.
-            req = prepared_request
-
-            resp = self.send(
-                req,
-                stream=stream,
-                timeout=timeout,
-                verify=verify,
-                cert=cert,
-                proxies=proxies,
-                allow_redirects=False,
-                **adapter_kwargs
-            )
-
-            extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
-
-            i += 1
-            yield resp
-
-    def rebuild_auth(self, prepared_request, response):
-        """
-        When being redirected we may want to strip authentication from the
-        request to avoid leaking credentials. This method intelligently removes
-        and reapplies authentication where possible to avoid credential loss.
-        """
-        headers = prepared_request.headers
-        url = prepared_request.url
-
-        if 'Authorization' in headers:
-            # If we get redirected to a new host, we should strip out any
-            # authentication headers.
-            original_parsed = urlparse(response.request.url)
-            redirect_parsed = urlparse(url)
-
-            if (original_parsed.hostname != redirect_parsed.hostname):
-                del headers['Authorization']
-
-        # .netrc might have more auth for us on our new host.
-        new_auth = get_netrc_auth(url) if self.trust_env else None
-        if new_auth is not None:
-            prepared_request.prepare_auth(new_auth)
-
-        return
-
-    def rebuild_proxies(self, prepared_request, proxies):
-        """
-        This method re-evaluates the proxy configuration by considering the
-        environment variables. If we are redirected to a URL covered by
-        NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
-        proxy keys for this URL (in case they were stripped by a previous
-        redirect).
-
-        This method also replaces the Proxy-Authorization header where
-        necessary.
-        """
-        headers = prepared_request.headers
-        url = prepared_request.url
-        scheme = urlparse(url).scheme
-        new_proxies = proxies.copy() if proxies is not None else {}
-
-        if self.trust_env and not should_bypass_proxies(url):
-            environ_proxies = get_environ_proxies(url)
-
-            proxy = environ_proxies.get(scheme)
-
-            if proxy:
-                new_proxies.setdefault(scheme, environ_proxies[scheme])
-
-        if 'Proxy-Authorization' in headers:
-            del headers['Proxy-Authorization']
-
-        try:
-            username, password = get_auth_from_url(new_proxies[scheme])
-        except KeyError:
-            username, password = None, None
-
-        if username and password:
-            headers['Proxy-Authorization'] = _basic_auth_str(username, password)
-
-        return new_proxies
-
-
-class Session(SessionRedirectMixin):
-    """A Requests session.
-
-    Provides cookie persistence, connection-pooling, and configuration.
-
-    Basic Usage::
-
-      >>> import requests
-      >>> s = requests.Session()
-      >>> s.get('http://httpbin.org/get')
-      <Response [200]>
-
-    Or as a context manager::
-
-      >>> with requests.Session() as s:
-      >>>     s.get('http://httpbin.org/get')
-      <Response [200]>
-    """
-
-    __attrs__ = [
-        'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
-        'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
-        'max_redirects',
-    ]
-
-    def __init__(self):
-
-        #: A case-insensitive dictionary of headers to be sent on each
-        #: :class:`Request <Request>` sent from this
-        #: :class:`Session <Session>`.
-        self.headers = default_headers()
-
-        #: Default Authentication tuple or object to attach to
-        #: :class:`Request <Request>`.
-        self.auth = None
-
-        #: Dictionary mapping protocol or protocol and host to the URL of the proxy
-        #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
-        #: be used on each :class:`Request <Request>`.
-        self.proxies = {}
-
-        #: Event-handling hooks.
-        self.hooks = default_hooks()
-
-        #: Dictionary of querystring data to attach to each
-        #: :class:`Request <Request>`. The dictionary values may be lists for
-        #: representing multivalued query parameters.
-        self.params = {}
-
-        #: Stream response content default.
-        self.stream = False
-
-        #: SSL Verification default.
-        self.verify = True
-
-        #: SSL certificate default.
-        self.cert = None
-
-        #: Maximum number of redirects allowed. If the request exceeds this
-        #: limit, a :class:`TooManyRedirects` exception is raised.
-        self.max_redirects = DEFAULT_REDIRECT_LIMIT
-
-        #: Trust environment settings for proxy configuration, default
-        #: authentication and similar.
-        self.trust_env = True
-
-        #: A CookieJar containing all currently outstanding cookies set on this
-        #: session. By default it is a
-        #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
-        #: may be any other ``cookielib.CookieJar`` compatible object.
-        self.cookies = cookiejar_from_dict({})
-
-        # Default connection adapters.
-        self.adapters = OrderedDict()
-        self.mount('https://', HTTPAdapter())
-        self.mount('http://', HTTPAdapter())
-
-        # Only store 1000 redirects to prevent using infinite memory
-        self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *args):
-        self.close()
-
-    def prepare_request(self, request):
-        """Constructs a :class:`PreparedRequest <PreparedRequest>` for
-        transmission and returns it. The :class:`PreparedRequest` has settings
-        merged from the :class:`Request <Request>` instance and those of the
-        :class:`Session`.
-
-        :param request: :class:`Request` instance to prepare with this
-            session's settings.
-        """
-        cookies = request.cookies or {}
-
-        # Bootstrap CookieJar.
-        if not isinstance(cookies, cookielib.CookieJar):
-            cookies = cookiejar_from_dict(cookies)
-
-        # Merge with session cookies
-        merged_cookies = merge_cookies(
-            merge_cookies(RequestsCookieJar(), self.cookies), cookies)
-
-
-        # Set environment's basic authentication if not explicitly set.
-        auth = request.auth
-        if self.trust_env and not auth and not self.auth:
-            auth = get_netrc_auth(request.url)
-
-        p = PreparedRequest()
-        p.prepare(
-            method=request.method.upper(),
-            url=request.url,
-            files=request.files,
-            data=request.data,
-            json=request.json,
-            headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
-            params=merge_setting(request.params, self.params),
-            auth=merge_setting(auth, self.auth),
-            cookies=merged_cookies,
-            hooks=merge_hooks(request.hooks, self.hooks),
-        )
-        return p
-
-    def request(self, method, url,
-        params=None,
-        data=None,
-        headers=None,
-        cookies=None,
-        files=None,
-        auth=None,
-        timeout=None,
-        allow_redirects=True,
-        proxies=None,
-        hooks=None,
-        stream=None,
-        verify=None,
-        cert=None,
-        json=None):
-        """Constructs a :class:`Request <Request>`, prepares it and sends it.
-        Returns :class:`Response <Response>` object.
-
-        :param method: method for the new :class:`Request` object.
-        :param url: URL for the new :class:`Request` object.
-        :param params: (optional) Dictionary or bytes to be sent in the query
-            string for the :class:`Request`.
-        :param data: (optional) Dictionary, bytes, or file-like object to send
-            in the body of the :class:`Request`.
-        :param json: (optional) json to send in the body of the
-            :class:`Request`.
-        :param headers: (optional) Dictionary of HTTP Headers to send with the
-            :class:`Request`.
-        :param cookies: (optional) Dict or CookieJar object to send with the
-            :class:`Request`.
-        :param files: (optional) Dictionary of ``'filename': file-like-objects``
-            for multipart encoding upload.
-        :param auth: (optional) Auth tuple or callable to enable
-            Basic/Digest/Custom HTTP Auth.
-        :param timeout: (optional) How long to wait for the server to send
-            data before giving up, as a float, or a :ref:`(connect timeout,
-            read timeout) <timeouts>` tuple.
-        :type timeout: float or tuple
-        :param allow_redirects: (optional) Set to True by default.
-        :type allow_redirects: bool
-        :param proxies: (optional) Dictionary mapping protocol or protocol and
-            hostname to the URL of the proxy.
-        :param stream: (optional) whether to immediately download the response
-            content. Defaults to ``False``.
-        :param verify: (optional) whether the SSL cert will be verified.
-            A CA_BUNDLE path can also be provided. Defaults to ``True``.
-        :param cert: (optional) if String, path to ssl client cert file (.pem).
-            If Tuple, ('cert', 'key') pair.
-        """
-        # Create the Request.
-        req = Request(
-            method = method.upper(),
-            url = url,
-            headers = headers,
-            files = files,
-            data = data or {},
-            json = json,
-            params = params or {},
-            auth = auth,
-            cookies = cookies,
-            hooks = hooks,
-        )
-        prep = self.prepare_request(req)
-
-        proxies = proxies or {}
-
-        settings = self.merge_environment_settings(
-            prep.url, proxies, stream, verify, cert
-        )
-
-        # Send the request.
-        send_kwargs = {
-            'timeout': timeout,
-            'allow_redirects': allow_redirects,
-        }
-        send_kwargs.update(settings)
-        resp = self.send(prep, **send_kwargs)
-
-        return resp
-
-    def get(self, url, **kwargs):
-        """Sends a GET request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        kwargs.setdefault('allow_redirects', True)
-        return self.request('GET', url, **kwargs)
-
-    def options(self, url, **kwargs):
-        """Sends a OPTIONS request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        kwargs.setdefault('allow_redirects', True)
-        return self.request('OPTIONS', url, **kwargs)
-
-    def head(self, url, **kwargs):
-        """Sends a HEAD request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        kwargs.setdefault('allow_redirects', False)
-        return self.request('HEAD', url, **kwargs)
-
-    def post(self, url, data=None, json=None, **kwargs):
-        """Sends a POST request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-        :param json: (optional) json to send in the body of the :class:`Request`.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('POST', url, data=data, json=json, **kwargs)
-
-    def put(self, url, data=None, **kwargs):
-        """Sends a PUT request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('PUT', url, data=data, **kwargs)
-
-    def patch(self, url, data=None, **kwargs):
-        """Sends a PATCH request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('PATCH', url,  data=data, **kwargs)
-
-    def delete(self, url, **kwargs):
-        """Sends a DELETE request. Returns :class:`Response` object.
-
-        :param url: URL for the new :class:`Request` object.
-        :param \*\*kwargs: Optional arguments that ``request`` takes.
-        """
-
-        return self.request('DELETE', url, **kwargs)
-
-    def send(self, request, **kwargs):
-        """Send a given PreparedRequest."""
-        # Set defaults that the hooks can utilize to ensure they always have
-        # the correct parameters to reproduce the previous request.
-        kwargs.setdefault('stream', self.stream)
-        kwargs.setdefault('verify', self.verify)
-        kwargs.setdefault('cert', self.cert)
-        kwargs.setdefault('proxies', self.proxies)
-
-        # It's possible that users might accidentally send a Request object.
-        # Guard against that specific failure case.
-        if not isinstance(request, PreparedRequest):
-            raise ValueError('You can only send PreparedRequests.')
-
-        checked_urls = set()
-        while request.url in self.redirect_cache:
-            checked_urls.add(request.url)
-            new_url = self.redirect_cache.get(request.url)
-            if new_url in checked_urls:
-                break
-            request.url = new_url
-
-        # Set up variables needed for resolve_redirects and dispatching of hooks
-        allow_redirects = kwargs.pop('allow_redirects', True)
-        stream = kwargs.get('stream')
-        hooks = request.hooks
-
-        # Get the appropriate adapter to use
-        adapter = self.get_adapter(url=request.url)
-
-        # Start time (approximately) of the request
-        start = datetime.utcnow()
-
-        # Send the request
-        r = adapter.send(request, **kwargs)
-
-        # Total elapsed time of the request (approximately)
-        r.elapsed = datetime.utcnow() - start
-
-        # Response manipulation hooks
-        r = dispatch_hook('response', hooks, r, **kwargs)
-
-        # Persist cookies
-        if r.history:
-
-            # If the hooks create history then we want those cookies too
-            for resp in r.history:
-                extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
-
-        extract_cookies_to_jar(self.cookies, request, r.raw)
-
-        # Redirect resolving generator.
-        gen = self.resolve_redirects(r, request, **kwargs)
-
-        # Resolve redirects if allowed.
-        history = [resp for resp in gen] if allow_redirects else []
-
-        # Shuffle things around if there's history.
-        if history:
-            # Insert the first (original) request at the start
-            history.insert(0, r)
-            # Get the last request made
-            r = history.pop()
-            r.history = history
-
-        if not stream:
-            r.content
-
-        return r
-
-    def merge_environment_settings(self, url, proxies, stream, verify, cert):
-        """Check the environment and merge it with some settings."""
-        # Gather clues from the surrounding environment.
-        if self.trust_env:
-            # Set environment's proxies.
-            env_proxies = get_environ_proxies(url) or {}
-            for (k, v) in env_proxies.items():
-                proxies.setdefault(k, v)
-
-            # Look for requests environment configuration and be compatible
-            # with cURL.
-            if verify is True or verify is None:
-                verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
-                          os.environ.get('CURL_CA_BUNDLE'))
-
-        # Merge all the kwargs.
-        proxies = merge_setting(proxies, self.proxies)
-        stream = merge_setting(stream, self.stream)
-        verify = merge_setting(verify, self.verify)
-        cert = merge_setting(cert, self.cert)
-
-        return {'verify': verify, 'proxies': proxies, 'stream': stream,
-                'cert': cert}
-
-    def get_adapter(self, url):
-        """Returns the appropriate connection adapter for the given URL."""
-        for (prefix, adapter) in self.adapters.items():
-
-            if url.lower().startswith(prefix):
-                return adapter
-
-        # Nothing matches :-/
-        raise InvalidSchema("No connection adapters were found for '%s'" % url)
-
-    def close(self):
-        """Closes all adapters and as such the session"""
-        for v in self.adapters.values():
-            v.close()
-
-    def mount(self, prefix, adapter):
-        """Registers a connection adapter to a prefix.
-
-        Adapters are sorted in descending order by key length."""
-
-        self.adapters[prefix] = adapter
-        keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
-
-        for key in keys_to_move:
-            self.adapters[key] = self.adapters.pop(key)
-
-    def __getstate__(self):
-        state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
-        state['redirect_cache'] = dict(self.redirect_cache)
-        return state
-
-    def __setstate__(self, state):
-        redirect_cache = state.pop('redirect_cache', {})
-        for attr, value in state.items():
-            setattr(self, attr, value)
-
-        self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
-        for redirect, to in redirect_cache.items():
-            self.redirect_cache[redirect] = to
-
-
-def session():
-    """Returns a :class:`Session` for context-management."""
-
-    return Session()
diff --git a/tools/swarming_client/third_party/requests/status_codes.py b/tools/swarming_client/third_party/requests/status_codes.py
deleted file mode 100644
index a852574..0000000
--- a/tools/swarming_client/third_party/requests/status_codes.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from .structures import LookupDict
-
-_codes = {
-
-    # Informational.
-    100: ('continue',),
-    101: ('switching_protocols',),
-    102: ('processing',),
-    103: ('checkpoint',),
-    122: ('uri_too_long', 'request_uri_too_long'),
-    200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
-    201: ('created',),
-    202: ('accepted',),
-    203: ('non_authoritative_info', 'non_authoritative_information'),
-    204: ('no_content',),
-    205: ('reset_content', 'reset'),
-    206: ('partial_content', 'partial'),
-    207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
-    208: ('already_reported',),
-    226: ('im_used',),
-
-    # Redirection.
-    300: ('multiple_choices',),
-    301: ('moved_permanently', 'moved', '\\o-'),
-    302: ('found',),
-    303: ('see_other', 'other'),
-    304: ('not_modified',),
-    305: ('use_proxy',),
-    306: ('switch_proxy',),
-    307: ('temporary_redirect', 'temporary_moved', 'temporary'),
-    308: ('permanent_redirect',
-          'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
-
-    # Client Error.
-    400: ('bad_request', 'bad'),
-    401: ('unauthorized',),
-    402: ('payment_required', 'payment'),
-    403: ('forbidden',),
-    404: ('not_found', '-o-'),
-    405: ('method_not_allowed', 'not_allowed'),
-    406: ('not_acceptable',),
-    407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
-    408: ('request_timeout', 'timeout'),
-    409: ('conflict',),
-    410: ('gone',),
-    411: ('length_required',),
-    412: ('precondition_failed', 'precondition'),
-    413: ('request_entity_too_large',),
-    414: ('request_uri_too_large',),
-    415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
-    416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
-    417: ('expectation_failed',),
-    418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
-    422: ('unprocessable_entity', 'unprocessable'),
-    423: ('locked',),
-    424: ('failed_dependency', 'dependency'),
-    425: ('unordered_collection', 'unordered'),
-    426: ('upgrade_required', 'upgrade'),
-    428: ('precondition_required', 'precondition'),
-    429: ('too_many_requests', 'too_many'),
-    431: ('header_fields_too_large', 'fields_too_large'),
-    444: ('no_response', 'none'),
-    449: ('retry_with', 'retry'),
-    450: ('blocked_by_windows_parental_controls', 'parental_controls'),
-    451: ('unavailable_for_legal_reasons', 'legal_reasons'),
-    499: ('client_closed_request',),
-
-    # Server Error.
-    500: ('internal_server_error', 'server_error', '/o\\', '✗'),
-    501: ('not_implemented',),
-    502: ('bad_gateway',),
-    503: ('service_unavailable', 'unavailable'),
-    504: ('gateway_timeout',),
-    505: ('http_version_not_supported', 'http_version'),
-    506: ('variant_also_negotiates',),
-    507: ('insufficient_storage',),
-    509: ('bandwidth_limit_exceeded', 'bandwidth'),
-    510: ('not_extended',),
-    511: ('network_authentication_required', 'network_auth', 'network_authentication'),
-}
-
-codes = LookupDict(name='status_codes')
-
-for code, titles in _codes.items():
-    for title in titles:
-        setattr(codes, title, code)
-        if not title.startswith('\\'):
-            setattr(codes, title.upper(), code)
diff --git a/tools/swarming_client/third_party/requests/structures.py b/tools/swarming_client/third_party/requests/structures.py
deleted file mode 100644
index 3e5f2fa..0000000
--- a/tools/swarming_client/third_party/requests/structures.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.structures
-~~~~~~~~~~~~~~~~~~~
-
-Data structures that power Requests.
-
-"""
-
-import collections
-
-
-class CaseInsensitiveDict(collections.MutableMapping):
-    """
-    A case-insensitive ``dict``-like object.
-
-    Implements all methods and operations of
-    ``collections.MutableMapping`` as well as dict's ``copy``. Also
-    provides ``lower_items``.
-
-    All keys are expected to be strings. The structure remembers the
-    case of the last key to be set, and ``iter(instance)``,
-    ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
-    will contain case-sensitive keys. However, querying and contains
-    testing is case insensitive::
-
-        cid = CaseInsensitiveDict()
-        cid['Accept'] = 'application/json'
-        cid['aCCEPT'] == 'application/json'  # True
-        list(cid) == ['Accept']  # True
-
-    For example, ``headers['content-encoding']`` will return the
-    value of a ``'Content-Encoding'`` response header, regardless
-    of how the header name was originally stored.
-
-    If the constructor, ``.update``, or equality comparison
-    operations are given keys that have equal ``.lower()``s, the
-    behavior is undefined.
-
-    """
-    def __init__(self, data=None, **kwargs):
-        self._store = dict()
-        if data is None:
-            data = {}
-        self.update(data, **kwargs)
-
-    def __setitem__(self, key, value):
-        # Use the lowercased key for lookups, but store the actual
-        # key alongside the value.
-        self._store[key.lower()] = (key, value)
-
-    def __getitem__(self, key):
-        return self._store[key.lower()][1]
-
-    def __delitem__(self, key):
-        del self._store[key.lower()]
-
-    def __iter__(self):
-        return (casedkey for casedkey, mappedvalue in self._store.values())
-
-    def __len__(self):
-        return len(self._store)
-
-    def lower_items(self):
-        """Like iteritems(), but with all lowercase keys."""
-        return (
-            (lowerkey, keyval[1])
-            for (lowerkey, keyval)
-            in self._store.items()
-        )
-
-    def __eq__(self, other):
-        if isinstance(other, collections.Mapping):
-            other = CaseInsensitiveDict(other)
-        else:
-            return NotImplemented
-        # Compare insensitively
-        return dict(self.lower_items()) == dict(other.lower_items())
-
-    # Copy is required
-    def copy(self):
-        return CaseInsensitiveDict(self._store.values())
-
-    def __repr__(self):
-        return str(dict(self.items()))
-
-class LookupDict(dict):
-    """Dictionary lookup object."""
-
-    def __init__(self, name=None):
-        self.name = name
-        super(LookupDict, self).__init__()
-
-    def __repr__(self):
-        return '<lookup \'%s\'>' % (self.name)
-
-    def __getitem__(self, key):
-        # We allow fall-through here, so values default to None
-
-        return self.__dict__.get(key, None)
-
-    def get(self, key, default=None):
-        return self.__dict__.get(key, default)
diff --git a/tools/swarming_client/third_party/requests/utils.py b/tools/swarming_client/third_party/requests/utils.py
deleted file mode 100644
index c5c3fd0..0000000
--- a/tools/swarming_client/third_party/requests/utils.py
+++ /dev/null
@@ -1,721 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.utils
-~~~~~~~~~~~~~~
-
-This module provides utility functions that are used within Requests
-that are also useful for external consumption.
-
-"""
-
-import cgi
-import codecs
-import collections
-import io
-import os
-import platform
-import re
-import sys
-import socket
-import struct
-import warnings
-
-from . import __version__
-from . import certs
-from .compat import parse_http_list as _parse_list_header
-from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
-                     builtin_str, getproxies, proxy_bypass, urlunparse,
-                     basestring)
-from .cookies import RequestsCookieJar, cookiejar_from_dict
-from .structures import CaseInsensitiveDict
-from .exceptions import InvalidURL, FileModeWarning
-
-_hush_pyflakes = (RequestsCookieJar,)
-
-NETRC_FILES = ('.netrc', '_netrc')
-
-DEFAULT_CA_BUNDLE_PATH = certs.where()
-
-
-def dict_to_sequence(d):
-    """Returns an internal sequence dictionary update."""
-
-    if hasattr(d, 'items'):
-        d = d.items()
-
-    return d
-
-
-def super_len(o):
-    total_length = 0
-    current_position = 0
-
-    if hasattr(o, '__len__'):
-        total_length = len(o)
-
-    elif hasattr(o, 'len'):
-        total_length = o.len
-
-    elif hasattr(o, 'getvalue'):
-        # e.g. BytesIO, cStringIO.StringIO
-        total_length = len(o.getvalue())
-
-    elif hasattr(o, 'fileno'):
-        try:
-            fileno = o.fileno()
-        except io.UnsupportedOperation:
-            pass
-        else:
-            total_length = os.fstat(fileno).st_size
-
-            # Having used fstat to determine the file length, we need to
-            # confirm that this file was opened up in binary mode.
-            if 'b' not in o.mode:
-                warnings.warn((
-                    "Requests has determined the content-length for this "
-                    "request using the binary size of the file: however, the "
-                    "file has been opened in text mode (i.e. without the 'b' "
-                    "flag in the mode). This may lead to an incorrect "
-                    "content-length. In Requests 3.0, support will be removed "
-                    "for files in text mode."),
-                    FileModeWarning
-                )
-
-    if hasattr(o, 'tell'):
-        current_position = o.tell()
-
-    return max(0, total_length - current_position)
-
-
-def get_netrc_auth(url, raise_errors=False):
-    """Returns the Requests tuple auth for a given url from netrc."""
-
-    try:
-        from netrc import netrc, NetrcParseError
-
-        netrc_path = None
-
-        for f in NETRC_FILES:
-            try:
-                loc = os.path.expanduser('~/{0}'.format(f))
-            except KeyError:
-                # os.path.expanduser can fail when $HOME is undefined and
-                # getpwuid fails. See http://bugs.python.org/issue20164 &
-                # https://github.com/kennethreitz/requests/issues/1846
-                return
-
-            if os.path.exists(loc):
-                netrc_path = loc
-                break
-
-        # Abort early if there isn't one.
-        if netrc_path is None:
-            return
-
-        ri = urlparse(url)
-
-        # Strip port numbers from netloc. This weird `if...encode`` dance is
-        # used for Python 3.2, which doesn't support unicode literals.
-        splitstr = b':'
-        if isinstance(url, str):
-            splitstr = splitstr.decode('ascii')
-        host = ri.netloc.split(splitstr)[0]
-
-        try:
-            _netrc = netrc(netrc_path).authenticators(host)
-            if _netrc:
-                # Return with login / password
-                login_i = (0 if _netrc[0] else 1)
-                return (_netrc[login_i], _netrc[2])
-        except (NetrcParseError, IOError):
-            # If there was a parsing error or a permissions issue reading the file,
-            # we'll just skip netrc auth unless explicitly asked to raise errors.
-            if raise_errors:
-                raise
-
-    # AppEngine hackiness.
-    except (ImportError, AttributeError):
-        pass
-
-
-def guess_filename(obj):
-    """Tries to guess the filename of the given object."""
-    name = getattr(obj, 'name', None)
-    if (name and isinstance(name, basestring) and name[0] != '<' and
-            name[-1] != '>'):
-        return os.path.basename(name)
-
-
-def from_key_val_list(value):
-    """Take an object and test to see if it can be represented as a
-    dictionary. Unless it can not be represented as such, return an
-    OrderedDict, e.g.,
-
-    ::
-
-        >>> from_key_val_list([('key', 'val')])
-        OrderedDict([('key', 'val')])
-        >>> from_key_val_list('string')
-        ValueError: need more than 1 value to unpack
-        >>> from_key_val_list({'key': 'val'})
-        OrderedDict([('key', 'val')])
-    """
-    if value is None:
-        return None
-
-    if isinstance(value, (str, bytes, bool, int)):
-        raise ValueError('cannot encode objects that are not 2-tuples')
-
-    return OrderedDict(value)
-
-
-def to_key_val_list(value):
-    """Take an object and test to see if it can be represented as a
-    dictionary. If it can be, return a list of tuples, e.g.,
-
-    ::
-
-        >>> to_key_val_list([('key', 'val')])
-        [('key', 'val')]
-        >>> to_key_val_list({'key': 'val'})
-        [('key', 'val')]
-        >>> to_key_val_list('string')
-        ValueError: cannot encode objects that are not 2-tuples.
-    """
-    if value is None:
-        return None
-
-    if isinstance(value, (str, bytes, bool, int)):
-        raise ValueError('cannot encode objects that are not 2-tuples')
-
-    if isinstance(value, collections.Mapping):
-        value = value.items()
-
-    return list(value)
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def parse_list_header(value):
-    """Parse lists as described by RFC 2068 Section 2.
-
-    In particular, parse comma-separated lists where the elements of
-    the list may include quoted-strings.  A quoted-string could
-    contain a comma.  A non-quoted string could have quotes in the
-    middle.  Quotes are removed automatically after parsing.
-
-    It basically works like :func:`parse_set_header` just that items
-    may appear multiple times and case sensitivity is preserved.
-
-    The return value is a standard :class:`list`:
-
-    >>> parse_list_header('token, "quoted value"')
-    ['token', 'quoted value']
-
-    To create a header from the :class:`list` again, use the
-    :func:`dump_header` function.
-
-    :param value: a string with a list header.
-    :return: :class:`list`
-    """
-    result = []
-    for item in _parse_list_header(value):
-        if item[:1] == item[-1:] == '"':
-            item = unquote_header_value(item[1:-1])
-        result.append(item)
-    return result
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def parse_dict_header(value):
-    """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
-    convert them into a python dict:
-
-    >>> d = parse_dict_header('foo="is a fish", bar="as well"')
-    >>> type(d) is dict
-    True
-    >>> sorted(d.items())
-    [('bar', 'as well'), ('foo', 'is a fish')]
-
-    If there is no value for a key it will be `None`:
-
-    >>> parse_dict_header('key_without_value')
-    {'key_without_value': None}
-
-    To create a header from the :class:`dict` again, use the
-    :func:`dump_header` function.
-
-    :param value: a string with a dict header.
-    :return: :class:`dict`
-    """
-    result = {}
-    for item in _parse_list_header(value):
-        if '=' not in item:
-            result[item] = None
-            continue
-        name, value = item.split('=', 1)
-        if value[:1] == value[-1:] == '"':
-            value = unquote_header_value(value[1:-1])
-        result[name] = value
-    return result
-
-
-# From mitsuhiko/werkzeug (used with permission).
-def unquote_header_value(value, is_filename=False):
-    r"""Unquotes a header value.  (Reversal of :func:`quote_header_value`).
-    This does not use the real unquoting but what browsers are actually
-    using for quoting.
-
-    :param value: the header value to unquote.
-    """
-    if value and value[0] == value[-1] == '"':
-        # this is not the real unquoting, but fixing this so that the
-        # RFC is met will result in bugs with internet explorer and
-        # probably some other browsers as well.  IE for example is
-        # uploading files with "C:\foo\bar.txt" as filename
-        value = value[1:-1]
-
-        # if this is a filename and the starting characters look like
-        # a UNC path, then just return the value without quotes.  Using the
-        # replace sequence below on a UNC path has the effect of turning
-        # the leading double slash into a single slash and then
-        # _fix_ie_filename() doesn't work correctly.  See #458.
-        if not is_filename or value[:2] != '\\\\':
-            return value.replace('\\\\', '\\').replace('\\"', '"')
-    return value
-
-
-def dict_from_cookiejar(cj):
-    """Returns a key/value dictionary from a CookieJar.
-
-    :param cj: CookieJar object to extract cookies from.
-    """
-
-    cookie_dict = {}
-
-    for cookie in cj:
-        cookie_dict[cookie.name] = cookie.value
-
-    return cookie_dict
-
-
-def add_dict_to_cookiejar(cj, cookie_dict):
-    """Returns a CookieJar from a key/value dictionary.
-
-    :param cj: CookieJar to insert cookies into.
-    :param cookie_dict: Dict of key/values to insert into CookieJar.
-    """
-
-    cj2 = cookiejar_from_dict(cookie_dict)
-    cj.update(cj2)
-    return cj
-
-
-def get_encodings_from_content(content):
-    """Returns encodings from given content string.
-
-    :param content: bytestring to extract encodings from.
-    """
-    warnings.warn((
-        'In requests 3.0, get_encodings_from_content will be removed. For '
-        'more information, please see the discussion on issue #2266. (This'
-        ' warning should only appear once.)'),
-        DeprecationWarning)
-
-    charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
-    pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
-    xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
-
-    return (charset_re.findall(content) +
-            pragma_re.findall(content) +
-            xml_re.findall(content))
-
-
-def get_encoding_from_headers(headers):
-    """Returns encodings from given HTTP Header Dict.
-
-    :param headers: dictionary to extract encoding from.
-    """
-
-    content_type = headers.get('content-type')
-
-    if not content_type:
-        return None
-
-    content_type, params = cgi.parse_header(content_type)
-
-    if 'charset' in params:
-        return params['charset'].strip("'\"")
-
-    if 'text' in content_type:
-        return 'ISO-8859-1'
-
-
-def stream_decode_response_unicode(iterator, r):
-    """Stream decodes a iterator."""
-
-    if r.encoding is None:
-        for item in iterator:
-            yield item
-        return
-
-    decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
-    for chunk in iterator:
-        rv = decoder.decode(chunk)
-        if rv:
-            yield rv
-    rv = decoder.decode(b'', final=True)
-    if rv:
-        yield rv
-
-
-def iter_slices(string, slice_length):
-    """Iterate over slices of a string."""
-    pos = 0
-    while pos < len(string):
-        yield string[pos:pos + slice_length]
-        pos += slice_length
-
-
-def get_unicode_from_response(r):
-    """Returns the requested content back in unicode.
-
-    :param r: Response object to get unicode content from.
-
-    Tried:
-
-    1. charset from content-type
-    2. fall back and replace all unicode characters
-
-    """
-    warnings.warn((
-        'In requests 3.0, get_unicode_from_response will be removed. For '
-        'more information, please see the discussion on issue #2266. (This'
-        ' warning should only appear once.)'),
-        DeprecationWarning)
-
-    tried_encodings = []
-
-    # Try charset from content-type
-    encoding = get_encoding_from_headers(r.headers)
-
-    if encoding:
-        try:
-            return str(r.content, encoding)
-        except UnicodeError:
-            tried_encodings.append(encoding)
-
-    # Fall back:
-    try:
-        return str(r.content, encoding, errors='replace')
-    except TypeError:
-        return r.content
-
-
-# The unreserved URI characters (RFC 3986)
-UNRESERVED_SET = frozenset(
-    "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
-    + "0123456789-._~")
-
-
-def unquote_unreserved(uri):
-    """Un-escape any percent-escape sequences in a URI that are unreserved
-    characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
-    """
-    parts = uri.split('%')
-    for i in range(1, len(parts)):
-        h = parts[i][0:2]
-        if len(h) == 2 and h.isalnum():
-            try:
-                c = chr(int(h, 16))
-            except ValueError:
-                raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
-
-            if c in UNRESERVED_SET:
-                parts[i] = c + parts[i][2:]
-            else:
-                parts[i] = '%' + parts[i]
-        else:
-            parts[i] = '%' + parts[i]
-    return ''.join(parts)
-
-
-def requote_uri(uri):
-    """Re-quote the given URI.
-
-    This function passes the given URI through an unquote/quote cycle to
-    ensure that it is fully and consistently quoted.
-    """
-    safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
-    safe_without_percent = "!#$&'()*+,/:;=?@[]~"
-    try:
-        # Unquote only the unreserved characters
-        # Then quote only illegal characters (do not quote reserved,
-        # unreserved, or '%')
-        return quote(unquote_unreserved(uri), safe=safe_with_percent)
-    except InvalidURL:
-        # We couldn't unquote the given URI, so let's try quoting it, but
-        # there may be unquoted '%'s in the URI. We need to make sure they're
-        # properly quoted so they do not cause issues elsewhere.
-        return quote(uri, safe=safe_without_percent)
-
-
-def address_in_network(ip, net):
-    """
-    This function allows you to check if on IP belongs to a network subnet
-    Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
-             returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
-    """
-    ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
-    netaddr, bits = net.split('/')
-    netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
-    network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
-    return (ipaddr & netmask) == (network & netmask)
-
-
-def dotted_netmask(mask):
-    """
-    Converts mask from /xx format to xxx.xxx.xxx.xxx
-    Example: if mask is 24 function returns 255.255.255.0
-    """
-    bits = 0xffffffff ^ (1 << 32 - mask) - 1
-    return socket.inet_ntoa(struct.pack('>I', bits))
-
-
-def is_ipv4_address(string_ip):
-    try:
-        socket.inet_aton(string_ip)
-    except socket.error:
-        return False
-    return True
-
-
-def is_valid_cidr(string_network):
-    """Very simple check of the cidr format in no_proxy variable"""
-    if string_network.count('/') == 1:
-        try:
-            mask = int(string_network.split('/')[1])
-        except ValueError:
-            return False
-
-        if mask < 1 or mask > 32:
-            return False
-
-        try:
-            socket.inet_aton(string_network.split('/')[0])
-        except socket.error:
-            return False
-    else:
-        return False
-    return True
-
-
-def should_bypass_proxies(url):
-    """
-    Returns whether we should bypass proxies or not.
-    """
-    get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
-
-    # First check whether no_proxy is defined. If it is, check that the URL
-    # we're getting isn't in the no_proxy list.
-    no_proxy = get_proxy('no_proxy')
-    netloc = urlparse(url).netloc
-
-    if no_proxy:
-        # We need to check whether we match here. We need to see if we match
-        # the end of the netloc, both with and without the port.
-        no_proxy = (
-            host for host in no_proxy.replace(' ', '').split(',') if host
-        )
-
-        ip = netloc.split(':')[0]
-        if is_ipv4_address(ip):
-            for proxy_ip in no_proxy:
-                if is_valid_cidr(proxy_ip):
-                    if address_in_network(ip, proxy_ip):
-                        return True
-        else:
-            for host in no_proxy:
-                if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
-                    # The URL does match something in no_proxy, so we don't want
-                    # to apply the proxies on this URL.
-                    return True
-
-    # If the system proxy settings indicate that this URL should be bypassed,
-    # don't proxy.
-    # The proxy_bypass function is incredibly buggy on OS X in early versions
-    # of Python 2.6, so allow this call to fail. Only catch the specific
-    # exceptions we've seen, though: this call failing in other ways can reveal
-    # legitimate problems.
-    try:
-        bypass = proxy_bypass(netloc)
-    except (TypeError, socket.gaierror):
-        bypass = False
-
-    if bypass:
-        return True
-
-    return False
-
-def get_environ_proxies(url):
-    """Return a dict of environment proxies."""
-    if should_bypass_proxies(url):
-        return {}
-    else:
-        return getproxies()
-
-def select_proxy(url, proxies):
-    """Select a proxy for the url, if applicable.
-
-    :param url: The url being for the request
-    :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
-    """
-    proxies = proxies or {}
-    urlparts = urlparse(url)
-    proxy = proxies.get(urlparts.scheme+'://'+urlparts.hostname)
-    if proxy is None:
-        proxy = proxies.get(urlparts.scheme)
-    return proxy
-
-def default_user_agent(name="python-requests"):
-    """Return a string representing the default user agent."""
-    return '%s/%s' % (name, __version__)
-
-
-def default_headers():
-    return CaseInsensitiveDict({
-        'User-Agent': default_user_agent(),
-        'Accept-Encoding': ', '.join(('gzip', 'deflate')),
-        'Accept': '*/*',
-        'Connection': 'keep-alive',
-    })
-
-
-def parse_header_links(value):
-    """Return a dict of parsed link headers proxies.
-
-    i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
-
-    """
-
-    links = []
-
-    replace_chars = " '\""
-
-    for val in re.split(", *<", value):
-        try:
-            url, params = val.split(";", 1)
-        except ValueError:
-            url, params = val, ''
-
-        link = {}
-
-        link["url"] = url.strip("<> '\"")
-
-        for param in params.split(";"):
-            try:
-                key, value = param.split("=")
-            except ValueError:
-                break
-
-            link[key.strip(replace_chars)] = value.strip(replace_chars)
-
-        links.append(link)
-
-    return links
-
-
-# Null bytes; no need to recreate these on each call to guess_json_utf
-_null = '\x00'.encode('ascii')  # encoding to ASCII for Python 3
-_null2 = _null * 2
-_null3 = _null * 3
-
-
-def guess_json_utf(data):
-    # JSON always starts with two ASCII characters, so detection is as
-    # easy as counting the nulls and from their location and count
-    # determine the encoding. Also detect a BOM, if present.
-    sample = data[:4]
-    if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE):
-        return 'utf-32'     # BOM included
-    if sample[:3] == codecs.BOM_UTF8:
-        return 'utf-8-sig'  # BOM included, MS style (discouraged)
-    if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
-        return 'utf-16'     # BOM included
-    nullcount = sample.count(_null)
-    if nullcount == 0:
-        return 'utf-8'
-    if nullcount == 2:
-        if sample[::2] == _null2:   # 1st and 3rd are null
-            return 'utf-16-be'
-        if sample[1::2] == _null2:  # 2nd and 4th are null
-            return 'utf-16-le'
-        # Did not detect 2 valid UTF-16 ascii-range characters
-    if nullcount == 3:
-        if sample[:3] == _null3:
-            return 'utf-32-be'
-        if sample[1:] == _null3:
-            return 'utf-32-le'
-        # Did not detect a valid UTF-32 ascii-range character
-    return None
-
-
-def prepend_scheme_if_needed(url, new_scheme):
-    '''Given a URL that may or may not have a scheme, prepend the given scheme.
-    Does not replace a present scheme with the one provided as an argument.'''
-    scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
-
-    # urlparse is a finicky beast, and sometimes decides that there isn't a
-    # netloc present. Assume that it's being over-cautious, and switch netloc
-    # and path if urlparse decided there was no netloc.
-    if not netloc:
-        netloc, path = path, netloc
-
-    return urlunparse((scheme, netloc, path, params, query, fragment))
-
-
-def get_auth_from_url(url):
-    """Given a url with authentication components, extract them into a tuple of
-    username,password."""
-    parsed = urlparse(url)
-
-    try:
-        auth = (unquote(parsed.username), unquote(parsed.password))
-    except (AttributeError, TypeError):
-        auth = ('', '')
-
-    return auth
-
-
-def to_native_string(string, encoding='ascii'):
-    """
-    Given a string object, regardless of type, returns a representation of that
-    string in the native string type, encoding and decoding where necessary.
-    This assumes ASCII unless told otherwise.
-    """
-    out = None
-
-    if isinstance(string, builtin_str):
-        out = string
-    else:
-        if is_py2:
-            out = string.encode(encoding)
-        else:
-            out = string.decode(encoding)
-
-    return out
-
-
-def urldefragauth(url):
-    """
-    Given a url remove the fragment and the authentication part
-    """
-    scheme, netloc, path, params, query, fragment = urlparse(url)
-
-    # see func:`prepend_scheme_if_needed`
-    if not netloc:
-        netloc, path = path, netloc
-
-    netloc = netloc.rsplit('@', 1)[-1]
-
-    return urlunparse((scheme, netloc, path, params, query, ''))
diff --git a/tools/swarming_client/third_party/rsa/LICENSE b/tools/swarming_client/third_party/rsa/LICENSE
deleted file mode 100644
index da76c9d..0000000
--- a/tools/swarming_client/third_party/rsa/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/tools/swarming_client/third_party/rsa/README.rst b/tools/swarming_client/third_party/rsa/README.rst
deleted file mode 100644
index 9f34863..0000000
--- a/tools/swarming_client/third_party/rsa/README.rst
+++ /dev/null
@@ -1,31 +0,0 @@
-Pure Python RSA implementation
-==============================
-
-`Python-RSA`_ is a pure-Python RSA implementation. It supports
-encryption and decryption, signing and verifying signatures, and key
-generation according to PKCS#1 version 1.5. It can be used as a Python
-library as well as on the commandline. The code was mostly written by
-Sybren A.  Stüvel.
-
-Documentation can be found at the Python-RSA homepage:
-http://stuvel.eu/rsa
-
-Download and install using::
-
-    pip install rsa
-
-or::
-
-    easy_install rsa
-
-or download it from the `Python Package Index`_.
-
-The source code is maintained in a `Mercurial repository`_ and is
-licensed under the `Apache License, version 2.0`_
-
-
-.. _`Python-RSA`: http://stuvel.eu/rsa
-.. _`Mercurial repository`: https://bitbucket.org/sybren/python-rsa
-.. _`Python Package Index`: http://pypi.python.org/pypi/rsa
-.. _`Apache License, version 2.0`: http://www.apache.org/licenses/LICENSE-2.0
-
diff --git a/tools/swarming_client/third_party/rsa/README.swarming b/tools/swarming_client/third_party/rsa/README.swarming
deleted file mode 100644
index b31172b..0000000
--- a/tools/swarming_client/third_party/rsa/README.swarming
+++ /dev/null
@@ -1,15 +0,0 @@
-Name: python-rsa
-Short Name: rsa
-URL: https://bitbucket.org/sybren/python-rsa/commits/tag/version-3.2.3
-Version: 3.2.3
-Revision: 280:fad443c7ea1a
-License: Apache License
-
-Description:
-Python-RSA is a pure-Python RSA implementation. It supports encryption and
-decryption, signing and verifying signatures, and key generation according to
-PKCS#1 version 1.5.
-
-Local Modifications:
-- Kept rsa/.
-- Kept LICENSE and README.rst.
diff --git a/tools/swarming_client/third_party/rsa/rsa/__init__.py b/tools/swarming_client/third_party/rsa/rsa/__init__.py
deleted file mode 100644
index 99fd668..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/__init__.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-"""RSA module
-
-Module for calculating large primes, and RSA encryption, decryption, signing
-and verification. Includes generating public and private keys.
-
-WARNING: this implementation does not use random padding, compression of the
-cleartext input to prevent repetitions, or other common security improvements.
-Use with care.
-
-If you want to have a more secure implementation, use the functions from the
-``rsa.pkcs1`` module.
-
-"""
-
-__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly"
-__date__ = "2015-11-05"
-__version__ = '3.2.3'
-
-from rsa.key import newkeys, PrivateKey, PublicKey
-from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \
-    VerificationError
-
-# Do doctest if we're run directly
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()
-
-__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey',
-    'PrivateKey', 'DecryptionError', 'VerificationError']
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/_compat.py b/tools/swarming_client/third_party/rsa/rsa/_compat.py
deleted file mode 100644
index 3c4eb81..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/_compat.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-"""Python compatibility wrappers."""
-
-
-from __future__ import absolute_import
-
-import sys
-from struct import pack
-
-try:
-    MAX_INT = sys.maxsize
-except AttributeError:
-    MAX_INT = sys.maxint
-
-MAX_INT64 = (1 << 63) - 1
-MAX_INT32 = (1 << 31) - 1
-MAX_INT16 = (1 << 15) - 1
-
-# Determine the word size of the processor.
-if MAX_INT == MAX_INT64:
-    # 64-bit processor.
-    MACHINE_WORD_SIZE = 64
-elif MAX_INT == MAX_INT32:
-    # 32-bit processor.
-    MACHINE_WORD_SIZE = 32
-else:
-    # Else we just assume 64-bit processor keeping up with modern times.
-    MACHINE_WORD_SIZE = 64
-
-
-try:
-    # < Python3
-    unicode_type = unicode
-    have_python3 = False
-except NameError:
-    # Python3.
-    unicode_type = str
-    have_python3 = True
-
-# Fake byte literals.
-if str is unicode_type:
-    def byte_literal(s):
-        return s.encode('latin1')
-else:
-    def byte_literal(s):
-        return s
-
-# ``long`` is no more. Do type detection using this instead.
-try:
-    integer_types = (int, long)
-except NameError:
-    integer_types = (int,)
-
-b = byte_literal
-
-try:
-    # Python 2.6 or higher.
-    bytes_type = bytes
-except NameError:
-    # Python 2.5
-    bytes_type = str
-
-
-# To avoid calling b() multiple times in tight loops.
-ZERO_BYTE = b('\x00')
-EMPTY_BYTE = b('')
-
-
-def is_bytes(obj):
-    """
-    Determines whether the given value is a byte string.
-
-    :param obj:
-        The value to test.
-    :returns:
-        ``True`` if ``value`` is a byte string; ``False`` otherwise.
-    """
-    return isinstance(obj, bytes_type)
-
-
-def is_integer(obj):
-    """
-    Determines whether the given value is an integer.
-
-    :param obj:
-        The value to test.
-    :returns:
-        ``True`` if ``value`` is an integer; ``False`` otherwise.
-    """
-    return isinstance(obj, integer_types)
-
-
-def byte(num):
-    """
-    Converts a number between 0 and 255 (both inclusive) to a base-256 (byte)
-    representation.
-
-    Use it as a replacement for ``chr`` where you are expecting a byte
-    because this will work on all current versions of Python::
-
-    :param num:
-        An unsigned integer between 0 and 255 (both inclusive).
-    :returns:
-        A single byte.
-    """
-    return pack("B", num)
-
-
-def get_word_alignment(num, force_arch=64,
-                       _machine_word_size=MACHINE_WORD_SIZE):
-    """
-    Returns alignment details for the given number based on the platform
-    Python is running on.
-
-    :param num:
-        Unsigned integral number.
-    :param force_arch:
-        If you don't want to use 64-bit unsigned chunks, set this to
-        anything other than 64. 32-bit chunks will be preferred then.
-        Default 64 will be used when on a 64-bit machine.
-    :param _machine_word_size:
-        (Internal) The machine word size used for alignment.
-    :returns:
-        4-tuple::
-
-            (word_bits, word_bytes,
-             max_uint, packing_format_type)
-    """
-    max_uint64 = 0xffffffffffffffff
-    max_uint32 = 0xffffffff
-    max_uint16 = 0xffff
-    max_uint8 = 0xff
-
-    if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32:
-        # 64-bit unsigned integer.
-        return 64, 8, max_uint64, "Q"
-    elif num > max_uint16:
-        # 32-bit unsigned integer
-        return 32, 4, max_uint32, "L"
-    elif num > max_uint8:
-        # 16-bit unsigned integer.
-        return 16, 2, max_uint16, "H"
-    else:
-        # 8-bit unsigned integer.
-        return 8, 1, max_uint8, "B"
diff --git a/tools/swarming_client/third_party/rsa/rsa/_version133.py b/tools/swarming_client/third_party/rsa/rsa/_version133.py
deleted file mode 100644
index dff0dda..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/_version133.py
+++ /dev/null
@@ -1,458 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-"""RSA module
-pri = k[1]                               	//Private part of keys d,p,q
-
-Module for calculating large primes, and RSA encryption, decryption,
-signing and verification. Includes generating public and private keys.
-
-WARNING: this code implements the mathematics of RSA. It is not suitable for
-real-world secure cryptography purposes. It has not been reviewed by a security
-expert. It does not include padding of data. There are many ways in which the
-output of this module, when used without any modification, can be sucessfully
-attacked.
-"""
-
-__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer"
-__date__ = "2010-02-05"
-__version__ = '1.3.3'
-
-# NOTE: Python's modulo can return negative numbers. We compensate for
-# this behaviour using the abs() function
-
-from cPickle import dumps, loads
-import base64
-import math
-import os
-import random
-import sys
-import types
-import zlib
-
-from rsa._compat import byte
-
-# Display a warning that this insecure version is imported.
-import warnings
-warnings.warn('Insecure version of the RSA module is imported as %s, be careful'
-        % __name__)
-
-def gcd(p, q):
-    """Returns the greatest common divisor of p and q
-
-
-    >>> gcd(42, 6)
-    6
-    """
-    if p<q: return gcd(q, p)
-    if q == 0: return p
-    return gcd(q, abs(p%q))
-
-def bytes2int(bytes):
-    """Converts a list of bytes or a string to an integer
-
-    >>> (128*256 + 64)*256 + + 15
-    8405007
-    >>> l = [128, 64, 15]
-    >>> bytes2int(l)
-    8405007
-    """
-
-    if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
-        raise TypeError("You must pass a string or a list")
-
-    # Convert byte stream to integer
-    integer = 0
-    for byte in bytes:
-        integer *= 256
-        if type(byte) is types.StringType: byte = ord(byte)
-        integer += byte
-
-    return integer
-
-def int2bytes(number):
-    """Converts a number to a string of bytes
-    
-    >>> bytes2int(int2bytes(123456789))
-    123456789
-    """
-
-    if not (type(number) is types.LongType or type(number) is types.IntType):
-        raise TypeError("You must pass a long or an int")
-
-    string = ""
-
-    while number > 0:
-        string = "%s%s" % (byte(number & 0xFF), string)
-        number /= 256
-    
-    return string
-
-def fast_exponentiation(a, p, n):
-    """Calculates r = a^p mod n
-    """
-    result = a % n
-    remainders = []
-    while p != 1:
-        remainders.append(p & 1)
-        p = p >> 1
-    while remainders:
-        rem = remainders.pop()
-        result = ((a ** rem) * result ** 2) % n
-    return result
-
-def read_random_int(nbits):
-    """Reads a random integer of approximately nbits bits rounded up
-    to whole bytes"""
-
-    nbytes = ceil(nbits/8.)
-    randomdata = os.urandom(nbytes)
-    return bytes2int(randomdata)
-
-def ceil(x):
-    """ceil(x) -> int(math.ceil(x))"""
-
-    return int(math.ceil(x))
-    
-def randint(minvalue, maxvalue):
-    """Returns a random integer x with minvalue <= x <= maxvalue"""
-
-    # Safety - get a lot of random data even if the range is fairly
-    # small
-    min_nbits = 32
-
-    # The range of the random numbers we need to generate
-    range = maxvalue - minvalue
-
-    # Which is this number of bytes
-    rangebytes = ceil(math.log(range, 2) / 8.)
-
-    # Convert to bits, but make sure it's always at least min_nbits*2
-    rangebits = max(rangebytes * 8, min_nbits * 2)
-    
-    # Take a random number of bits between min_nbits and rangebits
-    nbits = random.randint(min_nbits, rangebits)
-    
-    return (read_random_int(nbits) % range) + minvalue
-
-def fermat_little_theorem(p):
-    """Returns 1 if p may be prime, and something else if p definitely
-    is not prime"""
-
-    a = randint(1, p-1)
-    return fast_exponentiation(a, p-1, p)
-
-def jacobi(a, b):
-    """Calculates the value of the Jacobi symbol (a/b)
-    """
-
-    if a % b == 0:
-        return 0
-    result = 1
-    while a > 1:
-        if a & 1:
-            if ((a-1)*(b-1) >> 2) & 1:
-                result = -result
-            b, a = a, b % a
-        else:
-            if ((b ** 2 - 1) >> 3) & 1:
-                result = -result
-            a = a >> 1
-    return result
-
-def jacobi_witness(x, n):
-    """Returns False if n is an Euler pseudo-prime with base x, and
-    True otherwise.
-    """
-
-    j = jacobi(x, n) % n
-    f = fast_exponentiation(x, (n-1)/2, n)
-
-    if j == f: return False
-    return True
-
-def randomized_primality_testing(n, k):
-    """Calculates whether n is composite (which is always correct) or
-    prime (which is incorrect with error probability 2**-k)
-
-    Returns False if the number if composite, and True if it's
-    probably prime.
-    """
-
-    q = 0.5     # Property of the jacobi_witness function
-
-    # t = int(math.ceil(k / math.log(1/q, 2)))
-    t = ceil(k / math.log(1/q, 2))
-    for i in range(t+1):
-        x = randint(1, n-1)
-        if jacobi_witness(x, n): return False
-    
-    return True
-
-def is_prime(number):
-    """Returns True if the number is prime, and False otherwise.
-
-    >>> is_prime(42)
-    0
-    >>> is_prime(41)
-    1
-    """
-
-    """
-    if not fermat_little_theorem(number) == 1:
-        # Not prime, according to Fermat's little theorem
-        return False
-    """
-
-    if randomized_primality_testing(number, 5):
-        # Prime, according to Jacobi
-        return True
-    
-    # Not prime
-    return False
-
-    
-def getprime(nbits):
-    """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
-    other words: nbits is rounded up to whole bytes.
-
-    >>> p = getprime(8)
-    >>> is_prime(p-1)
-    0
-    >>> is_prime(p)
-    1
-    >>> is_prime(p+1)
-    0
-    """
-
-    nbytes = int(math.ceil(nbits/8.))
-
-    while True:
-        integer = read_random_int(nbits)
-
-        # Make sure it's odd
-        integer |= 1
-
-        # Test for primeness
-        if is_prime(integer): break
-
-        # Retry if not prime
-
-    return integer
-
-def are_relatively_prime(a, b):
-    """Returns True if a and b are relatively prime, and False if they
-    are not.
-
-    >>> are_relatively_prime(2, 3)
-    1
-    >>> are_relatively_prime(2, 4)
-    0
-    """
-
-    d = gcd(a, b)
-    return (d == 1)
-
-def find_p_q(nbits):
-    """Returns a tuple of two different primes of nbits bits"""
-
-    p = getprime(nbits)
-    while True:
-        q = getprime(nbits)
-        if not q == p: break
-    
-    return (p, q)
-
-def extended_euclid_gcd(a, b):
-    """Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb
-    """
-
-    if b == 0:
-        return (a, 1, 0)
-
-    q = abs(a % b)
-    r = long(a / b)
-    (d, k, l) = extended_euclid_gcd(b, q)
-
-    return (d, l, k - l*r)
-
-# Main function: calculate encryption and decryption keys
-def calculate_keys(p, q, nbits):
-    """Calculates an encryption and a decryption key for p and q, and
-    returns them as a tuple (e, d)"""
-
-    n = p * q
-    phi_n = (p-1) * (q-1)
-
-    while True:
-        # Make sure e has enough bits so we ensure "wrapping" through
-        # modulo n
-        e = getprime(max(8, nbits/2))
-        if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
-
-    (d, i, j) = extended_euclid_gcd(e, phi_n)
-
-    if not d == 1:
-        raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
-
-    if not (e * i) % phi_n == 1:
-        raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
-
-    return (e, i)
-
-
-def gen_keys(nbits):
-    """Generate RSA keys of nbits bits. Returns (p, q, e, d).
-
-    Note: this can take a long time, depending on the key size.
-    """
-
-    while True:
-        (p, q) = find_p_q(nbits)
-        (e, d) = calculate_keys(p, q, nbits)
-
-        # For some reason, d is sometimes negative. We don't know how
-        # to fix it (yet), so we keep trying until everything is shiny
-        if d > 0: break
-
-    return (p, q, e, d)
-
-def gen_pubpriv_keys(nbits):
-    """Generates public and private keys, and returns them as (pub,
-    priv).
-
-    The public key consists of a dict {e: ..., , n: ....). The private
-    key consists of a dict {d: ...., p: ...., q: ....).
-    """
-    
-    (p, q, e, d) = gen_keys(nbits)
-
-    return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
-
-def encrypt_int(message, ekey, n):
-    """Encrypts a message using encryption key 'ekey', working modulo
-    n"""
-
-    if type(message) is types.IntType:
-        return encrypt_int(long(message), ekey, n)
-
-    if not type(message) is types.LongType:
-        raise TypeError("You must pass a long or an int")
-
-    if message > 0 and \
-            math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)):
-        raise OverflowError("The message is too long")
-
-    return fast_exponentiation(message, ekey, n)
-
-def decrypt_int(cyphertext, dkey, n):
-    """Decrypts a cypher text using the decryption key 'dkey', working
-    modulo n"""
-
-    return encrypt_int(cyphertext, dkey, n)
-
-def sign_int(message, dkey, n):
-    """Signs 'message' using key 'dkey', working modulo n"""
-
-    return decrypt_int(message, dkey, n)
-
-def verify_int(signed, ekey, n):
-    """verifies 'signed' using key 'ekey', working modulo n"""
-
-    return encrypt_int(signed, ekey, n)
-
-def picklechops(chops):
-    """Pickles and base64encodes it's argument chops"""
-
-    value = zlib.compress(dumps(chops))
-    encoded = base64.encodestring(value)
-    return encoded.strip()
-
-def unpicklechops(string):
-    """base64decodes and unpickes it's argument string into chops"""
-
-    return loads(zlib.decompress(base64.decodestring(string)))
-
-def chopstring(message, key, n, funcref):
-    """Splits 'message' into chops that are at most as long as n,
-    converts these into integers, and calls funcref(integer, key, n)
-    for each chop.
-
-    Used by 'encrypt' and 'sign'.
-    """
-
-    msglen = len(message)
-    mbits = msglen * 8
-    nbits = int(math.floor(math.log(n, 2)))
-    nbytes = nbits / 8
-    blocks = msglen / nbytes
-
-    if msglen % nbytes > 0:
-        blocks += 1
-
-    cypher = []
-    
-    for bindex in range(blocks):
-        offset = bindex * nbytes
-        block = message[offset:offset+nbytes]
-        value = bytes2int(block)
-        cypher.append(funcref(value, key, n))
-
-    return picklechops(cypher)
-
-def gluechops(chops, key, n, funcref):
-    """Glues chops back together into a string.  calls
-    funcref(integer, key, n) for each chop.
-
-    Used by 'decrypt' and 'verify'.
-    """
-    message = ""
-
-    chops = unpicklechops(chops)
-    
-    for cpart in chops:
-        mpart = funcref(cpart, key, n)
-        message += int2bytes(mpart)
-    
-    return message
-
-def encrypt(message, key):
-    """Encrypts a string 'message' with the public key 'key'"""
-    
-    return chopstring(message, key['e'], key['n'], encrypt_int)
-
-def sign(message, key):
-    """Signs a string 'message' with the private key 'key'"""
-    
-    return chopstring(message, key['d'], key['p']*key['q'], decrypt_int)
-
-def decrypt(cypher, key):
-    """Decrypts a cypher with the private key 'key'"""
-
-    return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
-
-def verify(cypher, key):
-    """Verifies a cypher with the public key 'key'"""
-
-    return gluechops(cypher, key['e'], key['n'], encrypt_int)
-
-# Do doctest if we're not imported
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()
-
-__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"]
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/_version200.py b/tools/swarming_client/third_party/rsa/rsa/_version200.py
deleted file mode 100644
index 28f3601..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/_version200.py
+++ /dev/null
@@ -1,545 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-"""RSA module
-
-Module for calculating large primes, and RSA encryption, decryption,
-signing and verification. Includes generating public and private keys.
-
-WARNING: this implementation does not use random padding, compression of the
-cleartext input to prevent repetitions, or other common security improvements.
-Use with care.
-
-"""
-
-__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead"
-__date__ = "2010-02-08"
-__version__ = '2.0'
-
-import math
-import os
-import random
-import sys
-import types
-from rsa._compat import byte
-
-# Display a warning that this insecure version is imported.
-import warnings
-warnings.warn('Insecure version of the RSA module is imported as %s' % __name__)
-
-
-def bit_size(number):
-    """Returns the number of bits required to hold a specific long number"""
-
-    return int(math.ceil(math.log(number,2)))
-
-def gcd(p, q):
-    """Returns the greatest common divisor of p and q
-    >>> gcd(48, 180)
-    12
-    """
-    # Iterateive Version is faster and uses much less stack space
-    while q != 0:
-        if p < q: (p,q) = (q,p)
-        (p,q) = (q, p % q)
-    return p
-    
-
-def bytes2int(bytes):
-    """Converts a list of bytes or a string to an integer
-
-    >>> (((128 * 256) + 64) * 256) + 15
-    8405007
-    >>> l = [128, 64, 15]
-    >>> bytes2int(l)              #same as bytes2int('\x80@\x0f')
-    8405007
-    """
-
-    if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
-        raise TypeError("You must pass a string or a list")
-
-    # Convert byte stream to integer
-    integer = 0
-    for byte in bytes:
-        integer *= 256
-        if type(byte) is types.StringType: byte = ord(byte)
-        integer += byte
-
-    return integer
-
-def int2bytes(number):
-    """
-    Converts a number to a string of bytes
-    """
-
-    if not (type(number) is types.LongType or type(number) is types.IntType):
-        raise TypeError("You must pass a long or an int")
-
-    string = ""
-
-    while number > 0:
-        string = "%s%s" % (byte(number & 0xFF), string)
-        number /= 256
-    
-    return string
-
-def to64(number):
-    """Converts a number in the range of 0 to 63 into base 64 digit
-    character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'.
-    
-    >>> to64(10)
-    'A'
-    """
-
-    if not (type(number) is types.LongType or type(number) is types.IntType):
-        raise TypeError("You must pass a long or an int")
-
-    if 0 <= number <= 9:            #00-09 translates to '0' - '9'
-        return byte(number + 48)
-
-    if 10 <= number <= 35:
-        return byte(number + 55)     #10-35 translates to 'A' - 'Z'
-
-    if 36 <= number <= 61:
-        return byte(number + 61)     #36-61 translates to 'a' - 'z'
-
-    if number == 62:                # 62   translates to '-' (minus)
-        return byte(45)
-
-    if number == 63:                # 63   translates to '_' (underscore)
-        return byte(95)
-
-    raise ValueError('Invalid Base64 value: %i' % number)
-
-
-def from64(number):
-    """Converts an ordinal character value in the range of
-    0-9,A-Z,a-z,-,_ to a number in the range of 0-63.
-    
-    >>> from64(49)
-    1
-    """
-
-    if not (type(number) is types.LongType or type(number) is types.IntType):
-        raise TypeError("You must pass a long or an int")
-
-    if 48 <= number <= 57:         #ord('0') - ord('9') translates to 0-9
-        return(number - 48)
-
-    if 65 <= number <= 90:         #ord('A') - ord('Z') translates to 10-35
-        return(number - 55)
-
-    if 97 <= number <= 122:        #ord('a') - ord('z') translates to 36-61
-        return(number - 61)
-
-    if number == 45:               #ord('-') translates to 62
-        return(62)
-
-    if number == 95:               #ord('_') translates to 63
-        return(63)
-
-    raise ValueError('Invalid Base64 value: %i' % number)
-
-
-def int2str64(number):
-    """Converts a number to a string of base64 encoded characters in
-    the range of '0'-'9','A'-'Z,'a'-'z','-','_'.
-    
-    >>> int2str64(123456789)
-    '7MyqL'
-    """
-
-    if not (type(number) is types.LongType or type(number) is types.IntType):
-        raise TypeError("You must pass a long or an int")
-
-    string = ""
-
-    while number > 0:
-        string = "%s%s" % (to64(number & 0x3F), string)
-        number /= 64
-
-    return string
-
-
-def str642int(string):
-    """Converts a base64 encoded string into an integer.
-    The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_'
-    
-    >>> str642int('7MyqL')
-    123456789
-    """
-
-    if not (type(string) is types.ListType or type(string) is types.StringType):
-        raise TypeError("You must pass a string or a list")
-
-    integer = 0
-    for byte in string:
-        integer *= 64
-        if type(byte) is types.StringType: byte = ord(byte)
-        integer += from64(byte)
-
-    return integer
-
-def read_random_int(nbits):
-    """Reads a random integer of approximately nbits bits rounded up
-    to whole bytes"""
-
-    nbytes = int(math.ceil(nbits/8.))
-    randomdata = os.urandom(nbytes)
-    return bytes2int(randomdata)
-
-def randint(minvalue, maxvalue):
-    """Returns a random integer x with minvalue <= x <= maxvalue"""
-
-    # Safety - get a lot of random data even if the range is fairly
-    # small
-    min_nbits = 32
-
-    # The range of the random numbers we need to generate
-    range = (maxvalue - minvalue) + 1
-
-    # Which is this number of bytes
-    rangebytes = ((bit_size(range) + 7) / 8)
-
-    # Convert to bits, but make sure it's always at least min_nbits*2
-    rangebits = max(rangebytes * 8, min_nbits * 2)
-    
-    # Take a random number of bits between min_nbits and rangebits
-    nbits = random.randint(min_nbits, rangebits)
-    
-    return (read_random_int(nbits) % range) + minvalue
-
-def jacobi(a, b):
-    """Calculates the value of the Jacobi symbol (a/b)
-    where both a and b are positive integers, and b is odd
-    """
-
-    if a == 0: return 0
-    result = 1
-    while a > 1:
-        if a & 1:
-            if ((a-1)*(b-1) >> 2) & 1:
-                result = -result
-            a, b = b % a, a
-        else:
-            if (((b * b) - 1) >> 3) & 1:
-                result = -result
-            a >>= 1
-    if a == 0: return 0
-    return result
-
-def jacobi_witness(x, n):
-    """Returns False if n is an Euler pseudo-prime with base x, and
-    True otherwise.
-    """
-
-    j = jacobi(x, n) % n
-    f = pow(x, (n-1)/2, n)
-
-    if j == f: return False
-    return True
-
-def randomized_primality_testing(n, k):
-    """Calculates whether n is composite (which is always correct) or
-    prime (which is incorrect with error probability 2**-k)
-
-    Returns False if the number is composite, and True if it's
-    probably prime.
-    """
-
-    # 50% of Jacobi-witnesses can report compositness of non-prime numbers
-
-    for i in range(k):
-        x = randint(1, n-1)
-        if jacobi_witness(x, n): return False
-    
-    return True
-
-def is_prime(number):
-    """Returns True if the number is prime, and False otherwise.
-
-    >>> is_prime(42)
-    0
-    >>> is_prime(41)
-    1
-    """
-
-    if randomized_primality_testing(number, 6):
-        # Prime, according to Jacobi
-        return True
-    
-    # Not prime
-    return False
-
-    
-def getprime(nbits):
-    """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
-    other words: nbits is rounded up to whole bytes.
-
-    >>> p = getprime(8)
-    >>> is_prime(p-1)
-    0
-    >>> is_prime(p)
-    1
-    >>> is_prime(p+1)
-    0
-    """
-
-    while True:
-        integer = read_random_int(nbits)
-
-        # Make sure it's odd
-        integer |= 1
-
-        # Test for primeness
-        if is_prime(integer): break
-
-        # Retry if not prime
-
-    return integer
-
-def are_relatively_prime(a, b):
-    """Returns True if a and b are relatively prime, and False if they
-    are not.
-
-    >>> are_relatively_prime(2, 3)
-    1
-    >>> are_relatively_prime(2, 4)
-    0
-    """
-
-    d = gcd(a, b)
-    return (d == 1)
-
-def find_p_q(nbits):
-    """Returns a tuple of two different primes of nbits bits"""
-    pbits = nbits + (nbits/16)  #Make sure that p and q aren't too close
-    qbits = nbits - (nbits/16)  #or the factoring programs can factor n
-    p = getprime(pbits)
-    while True:
-        q = getprime(qbits)
-        #Make sure p and q are different.
-        if not q == p: break
-    return (p, q)
-
-def extended_gcd(a, b):
-    """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
-    """
-    # r = gcd(a,b) i = multiplicitive inverse of a mod b
-    #      or      j = multiplicitive inverse of b mod a
-    # Neg return values for i or j are made positive mod b or a respectively
-    # Iterateive Version is faster and uses much less stack space
-    x = 0
-    y = 1
-    lx = 1
-    ly = 0
-    oa = a                             #Remember original a/b to remove 
-    ob = b                             #negative values from return results
-    while b != 0:
-        q = long(a/b)
-        (a, b)  = (b, a % b)
-        (x, lx) = ((lx - (q * x)),x)
-        (y, ly) = ((ly - (q * y)),y)
-    if (lx < 0): lx += ob              #If neg wrap modulo orignal b
-    if (ly < 0): ly += oa              #If neg wrap modulo orignal a
-    return (a, lx, ly)                 #Return only positive values
-
-# Main function: calculate encryption and decryption keys
-def calculate_keys(p, q, nbits):
-    """Calculates an encryption and a decryption key for p and q, and
-    returns them as a tuple (e, d)"""
-
-    n = p * q
-    phi_n = (p-1) * (q-1)
-
-    while True:
-        # Make sure e has enough bits so we ensure "wrapping" through
-        # modulo n
-        e = max(65537,getprime(nbits/4))
-        if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
-
-    (d, i, j) = extended_gcd(e, phi_n)
-
-    if not d == 1:
-        raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
-    if (i < 0):
-        raise Exception("New extended_gcd shouldn't return negative values")
-    if not (e * i) % phi_n == 1:
-        raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
-
-    return (e, i)
-
-
-def gen_keys(nbits):
-    """Generate RSA keys of nbits bits. Returns (p, q, e, d).
-
-    Note: this can take a long time, depending on the key size.
-    """
-
-    (p, q) = find_p_q(nbits)
-    (e, d) = calculate_keys(p, q, nbits)
-
-    return (p, q, e, d)
-
-def newkeys(nbits):
-    """Generates public and private keys, and returns them as (pub,
-    priv).
-
-    The public key consists of a dict {e: ..., , n: ....). The private
-    key consists of a dict {d: ...., p: ...., q: ....).
-    """
-    nbits = max(9,nbits)           # Don't let nbits go below 9 bits
-    (p, q, e, d) = gen_keys(nbits)
-
-    return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
-
-def encrypt_int(message, ekey, n):
-    """Encrypts a message using encryption key 'ekey', working modulo n"""
-
-    if type(message) is types.IntType:
-        message = long(message)
-
-    if not type(message) is types.LongType:
-        raise TypeError("You must pass a long or int")
-
-    if message < 0 or message > n:
-        raise OverflowError("The message is too long")
-
-    #Note: Bit exponents start at zero (bit counts start at 1) this is correct
-    safebit = bit_size(n) - 2                   #compute safe bit (MSB - 1)
-    message += (1 << safebit)                   #add safebit to ensure folding
-
-    return pow(message, ekey, n)
-
-def decrypt_int(cyphertext, dkey, n):
-    """Decrypts a cypher text using the decryption key 'dkey', working
-    modulo n"""
-
-    message = pow(cyphertext, dkey, n)
-
-    safebit = bit_size(n) - 2                   #compute safe bit (MSB - 1)
-    message -= (1 << safebit)                   #remove safebit before decode
-
-    return message
-
-def encode64chops(chops):
-    """base64encodes chops and combines them into a ',' delimited string"""
-
-    chips = []                              #chips are character chops
-
-    for value in chops:
-        chips.append(int2str64(value))
-
-    #delimit chops with comma
-    encoded = ','.join(chips)
-
-    return encoded
-
-def decode64chops(string):
-    """base64decodes and makes a ',' delimited string into chops"""
-
-    chips = string.split(',')               #split chops at commas
-
-    chops = []
-
-    for string in chips:                    #make char chops (chips) into chops
-        chops.append(str642int(string))
-
-    return chops
-
-def chopstring(message, key, n, funcref):
-    """Chops the 'message' into integers that fit into n,
-    leaving room for a safebit to be added to ensure that all
-    messages fold during exponentiation.  The MSB of the number n
-    is not independant modulo n (setting it could cause overflow), so
-    use the next lower bit for the safebit.  Therefore reserve 2-bits
-    in the number n for non-data bits.  Calls specified encryption
-    function for each chop.
-
-    Used by 'encrypt' and 'sign'.
-    """
-
-    msglen = len(message)
-    mbits = msglen * 8
-    #Set aside 2-bits so setting of safebit won't overflow modulo n.
-    nbits = bit_size(n) - 2             # leave room for safebit
-    nbytes = nbits / 8
-    blocks = msglen / nbytes
-
-    if msglen % nbytes > 0:
-        blocks += 1
-
-    cypher = []
-    
-    for bindex in range(blocks):
-        offset = bindex * nbytes
-        block = message[offset:offset+nbytes]
-        value = bytes2int(block)
-        cypher.append(funcref(value, key, n))
-
-    return encode64chops(cypher)   #Encode encrypted ints to base64 strings
-
-def gluechops(string, key, n, funcref):
-    """Glues chops back together into a string.  calls
-    funcref(integer, key, n) for each chop.
-
-    Used by 'decrypt' and 'verify'.
-    """
-    message = ""
-
-    chops = decode64chops(string)  #Decode base64 strings into integer chops
-    
-    for cpart in chops:
-        mpart = funcref(cpart, key, n) #Decrypt each chop
-        message += int2bytes(mpart)    #Combine decrypted strings into a msg
-    
-    return message
-
-def encrypt(message, key):
-    """Encrypts a string 'message' with the public key 'key'"""
-    if 'n' not in key:
-        raise Exception("You must use the public key with encrypt")
-
-    return chopstring(message, key['e'], key['n'], encrypt_int)
-
-def sign(message, key):
-    """Signs a string 'message' with the private key 'key'"""
-    if 'p' not in key:
-        raise Exception("You must use the private key with sign")
-
-    return chopstring(message, key['d'], key['p']*key['q'], encrypt_int)
-
-def decrypt(cypher, key):
-    """Decrypts a string 'cypher' with the private key 'key'"""
-    if 'p' not in key:
-        raise Exception("You must use the private key with decrypt")
-
-    return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
-
-def verify(cypher, key):
-    """Verifies a string 'cypher' with the public key 'key'"""
-    if 'n' not in key:
-        raise Exception("You must use the public key with verify")
-
-    return gluechops(cypher, key['e'], key['n'], decrypt_int)
-
-# Do doctest if we're not imported
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()
-
-__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"]
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/asn1.py b/tools/swarming_client/third_party/rsa/rsa/asn1.py
deleted file mode 100644
index 6eb6da5..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/asn1.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''ASN.1 definitions.
-
-Not all ASN.1-handling code use these definitions, but when it does, they should be here.
-'''
-
-from pyasn1.type import univ, namedtype, tag
-
-class PubKeyHeader(univ.Sequence):
-    componentType = namedtype.NamedTypes(
-        namedtype.NamedType('oid', univ.ObjectIdentifier()),
-        namedtype.NamedType('parameters', univ.Null()),
-    )
-
-class OpenSSLPubKey(univ.Sequence):
-    componentType = namedtype.NamedTypes(
-        namedtype.NamedType('header', PubKeyHeader()),
-        
-        # This little hack (the implicit tag) allows us to get a Bit String as Octet String
-        namedtype.NamedType('key', univ.OctetString().subtype(
-                                          implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))),
-    )
-
-
-class AsnPubKey(univ.Sequence):
-    '''ASN.1 contents of DER encoded public key:
-    
-    RSAPublicKey ::= SEQUENCE {
-         modulus           INTEGER,  -- n
-         publicExponent    INTEGER,  -- e
-    '''
-
-    componentType = namedtype.NamedTypes(
-        namedtype.NamedType('modulus', univ.Integer()),
-        namedtype.NamedType('publicExponent', univ.Integer()),
-    )
diff --git a/tools/swarming_client/third_party/rsa/rsa/bigfile.py b/tools/swarming_client/third_party/rsa/rsa/bigfile.py
deleted file mode 100644
index 516cf56..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/bigfile.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Large file support
-
-    - break a file into smaller blocks, and encrypt them, and store the
-      encrypted blocks in another file.
-
-    - take such an encrypted files, decrypt its blocks, and reconstruct the
-      original file.
-
-The encrypted file format is as follows, where || denotes byte concatenation:
-
-    FILE := VERSION || BLOCK || BLOCK ...
-
-    BLOCK := LENGTH || DATA
-
-    LENGTH := varint-encoded length of the subsequent data. Varint comes from
-    Google Protobuf, and encodes an integer into a variable number of bytes.
-    Each byte uses the 7 lowest bits to encode the value. The highest bit set
-    to 1 indicates the next byte is also part of the varint. The last byte will
-    have this bit set to 0.
-
-This file format is called the VARBLOCK format, in line with the varint format
-used to denote the block sizes.
-
-'''
-
-from rsa import key, common, pkcs1, varblock
-from rsa._compat import byte
-
-def encrypt_bigfile(infile, outfile, pub_key):
-    '''Encrypts a file, writing it to 'outfile' in VARBLOCK format.
-    
-    :param infile: file-like object to read the cleartext from
-    :param outfile: file-like object to write the crypto in VARBLOCK format to
-    :param pub_key: :py:class:`rsa.PublicKey` to encrypt with
-
-    '''
-
-    if not isinstance(pub_key, key.PublicKey):
-        raise TypeError('Public key required, but got %r' % pub_key)
-
-    key_bytes = common.bit_size(pub_key.n) // 8
-    blocksize = key_bytes - 11 # keep space for PKCS#1 padding
-
-    # Write the version number to the VARBLOCK file
-    outfile.write(byte(varblock.VARBLOCK_VERSION))
-
-    # Encrypt and write each block
-    for block in varblock.yield_fixedblocks(infile, blocksize):
-        crypto = pkcs1.encrypt(block, pub_key)
-
-        varblock.write_varint(outfile, len(crypto))
-        outfile.write(crypto)
-
-def decrypt_bigfile(infile, outfile, priv_key):
-    '''Decrypts an encrypted VARBLOCK file, writing it to 'outfile'
-    
-    :param infile: file-like object to read the crypto in VARBLOCK format from
-    :param outfile: file-like object to write the cleartext to
-    :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with
-
-    '''
-
-    if not isinstance(priv_key, key.PrivateKey):
-        raise TypeError('Private key required, but got %r' % priv_key)
-    
-    for block in varblock.yield_varblocks(infile):
-        cleartext = pkcs1.decrypt(block, priv_key)
-        outfile.write(cleartext)
-
-__all__ = ['encrypt_bigfile', 'decrypt_bigfile']
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/cli.py b/tools/swarming_client/third_party/rsa/rsa/cli.py
deleted file mode 100644
index 527cc49..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/cli.py
+++ /dev/null
@@ -1,379 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Commandline scripts.
-
-These scripts are called by the executables defined in setup.py.
-'''
-
-from __future__ import with_statement, print_function
-
-import abc
-import sys
-from optparse import OptionParser
-
-import rsa
-import rsa.bigfile
-import rsa.pkcs1
-
-HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys())
-
-def keygen():
-    '''Key generator.'''
-
-    # Parse the CLI options
-    parser = OptionParser(usage='usage: %prog [options] keysize',
-            description='Generates a new RSA keypair of "keysize" bits.')
-    
-    parser.add_option('--pubout', type='string',
-            help='Output filename for the public key. The public key is '
-            'not saved if this option is not present. You can use '
-            'pyrsa-priv2pub to create the public key file later.')
-    
-    parser.add_option('-o', '--out', type='string',
-            help='Output filename for the private key. The key is '
-            'written to stdout if this option is not present.')
-
-    parser.add_option('--form',
-            help='key format of the private and public keys - default PEM',
-            choices=('PEM', 'DER'), default='PEM')
-
-    (cli, cli_args) = parser.parse_args(sys.argv[1:])
-
-    if len(cli_args) != 1:
-        parser.print_help()
-        raise SystemExit(1)
-    
-    try:
-        keysize = int(cli_args[0])
-    except ValueError:
-        parser.print_help()
-        print('Not a valid number: %s' % cli_args[0], file=sys.stderr)
-        raise SystemExit(1)
-
-    print('Generating %i-bit key' % keysize, file=sys.stderr)
-    (pub_key, priv_key) = rsa.newkeys(keysize)
-
-
-    # Save public key
-    if cli.pubout:
-        print('Writing public key to %s' % cli.pubout, file=sys.stderr)
-        data = pub_key.save_pkcs1(format=cli.form)
-        with open(cli.pubout, 'wb') as outfile:
-            outfile.write(data)
-
-    # Save private key
-    data = priv_key.save_pkcs1(format=cli.form)
-    
-    if cli.out:
-        print('Writing private key to %s' % cli.out, file=sys.stderr)
-        with open(cli.out, 'wb') as outfile:
-            outfile.write(data)
-    else:
-        print('Writing private key to stdout', file=sys.stderr)
-        sys.stdout.write(data)
-
-
-class CryptoOperation(object):
-    '''CLI callable that operates with input, output, and a key.'''
-
-    __metaclass__ = abc.ABCMeta
-
-    keyname = 'public' # or 'private'
-    usage = 'usage: %%prog [options] %(keyname)s_key'
-    description = None
-    operation = 'decrypt'
-    operation_past = 'decrypted'
-    operation_progressive = 'decrypting'
-    input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \
-            'not specified.'
-    output_help = 'Name of the file to write the %(operation_past)s file ' \
-            'to. Written to stdout if this option is not present.'
-    expected_cli_args = 1
-    has_output = True
-
-    key_class = rsa.PublicKey
-
-    def __init__(self):
-        self.usage = self.usage % self.__class__.__dict__
-        self.input_help = self.input_help % self.__class__.__dict__
-        self.output_help = self.output_help % self.__class__.__dict__
-
-    @abc.abstractmethod
-    def perform_operation(self, indata, key, cli_args=None):
-        '''Performs the program's operation.
-
-        Implement in a subclass.
-
-        :returns: the data to write to the output.
-        '''
-
-    def __call__(self):
-        '''Runs the program.'''
-
-        (cli, cli_args) = self.parse_cli()
-
-        key = self.read_key(cli_args[0], cli.keyform)
-
-        indata = self.read_infile(cli.input)
-
-        print(self.operation_progressive.title(), file=sys.stderr)
-        outdata = self.perform_operation(indata, key, cli_args)
-
-        if self.has_output:
-            self.write_outfile(outdata, cli.output)
-
-    def parse_cli(self):
-        '''Parse the CLI options
-        
-        :returns: (cli_opts, cli_args)
-        '''
-
-        parser = OptionParser(usage=self.usage, description=self.description)
-        
-        parser.add_option('-i', '--input', type='string', help=self.input_help)
-
-        if self.has_output:
-            parser.add_option('-o', '--output', type='string', help=self.output_help)
-
-        parser.add_option('--keyform',
-                help='Key format of the %s key - default PEM' % self.keyname,
-                choices=('PEM', 'DER'), default='PEM')
-
-        (cli, cli_args) = parser.parse_args(sys.argv[1:])
-
-        if len(cli_args) != self.expected_cli_args:
-            parser.print_help()
-            raise SystemExit(1)
-
-        return (cli, cli_args)
-
-    def read_key(self, filename, keyform):
-        '''Reads a public or private key.'''
-
-        print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr)
-        with open(filename, 'rb') as keyfile:
-            keydata = keyfile.read()
-
-        return self.key_class.load_pkcs1(keydata, keyform)
-    
-    def read_infile(self, inname):
-        '''Read the input file'''
-
-        if inname:
-            print('Reading input from %s' % inname, file=sys.stderr)
-            with open(inname, 'rb') as infile:
-                return infile.read()
-
-        print('Reading input from stdin', file=sys.stderr)
-        return sys.stdin.read()
-
-    def write_outfile(self, outdata, outname):
-        '''Write the output file'''
-
-        if outname:
-            print('Writing output to %s' % outname, file=sys.stderr)
-            with open(outname, 'wb') as outfile:
-                outfile.write(outdata)
-        else:
-            print('Writing output to stdout', file=sys.stderr)
-            sys.stdout.write(outdata)
-
-class EncryptOperation(CryptoOperation):
-    '''Encrypts a file.'''
-
-    keyname = 'public'
-    description = ('Encrypts a file. The file must be shorter than the key '
-            'length in order to be encrypted. For larger files, use the '
-            'pyrsa-encrypt-bigfile command.')
-    operation = 'encrypt'
-    operation_past = 'encrypted'
-    operation_progressive = 'encrypting'
-
-
-    def perform_operation(self, indata, pub_key, cli_args=None):
-        '''Encrypts files.'''
-
-        return rsa.encrypt(indata, pub_key)
-
-class DecryptOperation(CryptoOperation):
-    '''Decrypts a file.'''
-
-    keyname = 'private'
-    description = ('Decrypts a file. The original file must be shorter than '
-            'the key length in order to have been encrypted. For larger '
-            'files, use the pyrsa-decrypt-bigfile command.')
-    operation = 'decrypt'
-    operation_past = 'decrypted'
-    operation_progressive = 'decrypting'
-    key_class = rsa.PrivateKey
-
-    def perform_operation(self, indata, priv_key, cli_args=None):
-        '''Decrypts files.'''
-
-        return rsa.decrypt(indata, priv_key)
-
-class SignOperation(CryptoOperation):
-    '''Signs a file.'''
-
-    keyname = 'private'
-    usage = 'usage: %%prog [options] private_key hash_method'
-    description = ('Signs a file, outputs the signature. Choose the hash '
-            'method from %s' % ', '.join(HASH_METHODS))
-    operation = 'sign'
-    operation_past = 'signature'
-    operation_progressive = 'Signing'
-    key_class = rsa.PrivateKey
-    expected_cli_args = 2
-
-    output_help = ('Name of the file to write the signature to. Written '
-            'to stdout if this option is not present.')
-
-    def perform_operation(self, indata, priv_key, cli_args):
-        '''Decrypts files.'''
-
-        hash_method = cli_args[1]
-        if hash_method not in HASH_METHODS:
-            raise SystemExit('Invalid hash method, choose one of %s' % 
-                    ', '.join(HASH_METHODS))
-
-        return rsa.sign(indata, priv_key, hash_method)
-
-class VerifyOperation(CryptoOperation):
-    '''Verify a signature.'''
-
-    keyname = 'public'
-    usage = 'usage: %%prog [options] public_key signature_file'
-    description = ('Verifies a signature, exits with status 0 upon success, '
-        'prints an error message and exits with status 1 upon error.')
-    operation = 'verify'
-    operation_past = 'verified'
-    operation_progressive = 'Verifying'
-    key_class = rsa.PublicKey
-    expected_cli_args = 2
-    has_output = False
-
-    def perform_operation(self, indata, pub_key, cli_args):
-        '''Decrypts files.'''
-
-        signature_file = cli_args[1]
-        
-        with open(signature_file, 'rb') as sigfile:
-            signature = sigfile.read()
-
-        try:
-            rsa.verify(indata, signature, pub_key)
-        except rsa.VerificationError:
-            raise SystemExit('Verification failed.')
-
-        print('Verification OK', file=sys.stderr)
-
-
-class BigfileOperation(CryptoOperation):
-    '''CryptoOperation that doesn't read the entire file into memory.'''
-
-    def __init__(self):
-        CryptoOperation.__init__(self)
-
-        self.file_objects = []
-
-    def __del__(self):
-        '''Closes any open file handles.'''
-
-        for fobj in self.file_objects:
-            fobj.close()
-
-    def __call__(self):
-        '''Runs the program.'''
-
-        (cli, cli_args) = self.parse_cli()
-
-        key = self.read_key(cli_args[0], cli.keyform)
-
-        # Get the file handles
-        infile = self.get_infile(cli.input)
-        outfile = self.get_outfile(cli.output)
-
-        # Call the operation
-        print(self.operation_progressive.title(), file=sys.stderr)
-        self.perform_operation(infile, outfile, key, cli_args)
-
-    def get_infile(self, inname):
-        '''Returns the input file object'''
-
-        if inname:
-            print('Reading input from %s' % inname, file=sys.stderr)
-            fobj = open(inname, 'rb')
-            self.file_objects.append(fobj)
-        else:
-            print('Reading input from stdin', file=sys.stderr)
-            fobj = sys.stdin
-
-        return fobj
-
-    def get_outfile(self, outname):
-        '''Returns the output file object'''
-
-        if outname:
-            print('Will write output to %s' % outname, file=sys.stderr)
-            fobj = open(outname, 'wb')
-            self.file_objects.append(fobj)
-        else:
-            print('Will write output to stdout', file=sys.stderr)
-            fobj = sys.stdout
-
-        return fobj
-
-class EncryptBigfileOperation(BigfileOperation):
-    '''Encrypts a file to VARBLOCK format.'''
-
-    keyname = 'public'
-    description = ('Encrypts a file to an encrypted VARBLOCK file. The file '
-            'can be larger than the key length, but the output file is only '
-            'compatible with Python-RSA.')
-    operation = 'encrypt'
-    operation_past = 'encrypted'
-    operation_progressive = 'encrypting'
-
-    def perform_operation(self, infile, outfile, pub_key, cli_args=None):
-        '''Encrypts files to VARBLOCK.'''
-
-        return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key)
-
-class DecryptBigfileOperation(BigfileOperation):
-    '''Decrypts a file in VARBLOCK format.'''
-
-    keyname = 'private'
-    description = ('Decrypts an encrypted VARBLOCK file that was encrypted '
-            'with pyrsa-encrypt-bigfile')
-    operation = 'decrypt'
-    operation_past = 'decrypted'
-    operation_progressive = 'decrypting'
-    key_class = rsa.PrivateKey
-
-    def perform_operation(self, infile, outfile, priv_key, cli_args=None):
-        '''Decrypts a VARBLOCK file.'''
-
-        return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key)
-
-
-encrypt = EncryptOperation()
-decrypt = DecryptOperation()
-sign = SignOperation()
-verify = VerifyOperation()
-encrypt_bigfile = EncryptBigfileOperation()
-decrypt_bigfile = DecryptBigfileOperation()
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/common.py b/tools/swarming_client/third_party/rsa/rsa/common.py
deleted file mode 100644
index 39feb8c..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/common.py
+++ /dev/null
@@ -1,185 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Common functionality shared by several modules.'''
-
-
-def bit_size(num):
-    '''
-    Number of bits needed to represent a integer excluding any prefix
-    0 bits.
-
-    As per definition from http://wiki.python.org/moin/BitManipulation and
-    to match the behavior of the Python 3 API.
-
-    Usage::
-    
-        >>> bit_size(1023)
-        10
-        >>> bit_size(1024)
-        11
-        >>> bit_size(1025)
-        11
-
-    :param num:
-        Integer value. If num is 0, returns 0. Only the absolute value of the
-        number is considered. Therefore, signed integers will be abs(num)
-        before the number's bit length is determined.
-    :returns:
-        Returns the number of bits in the integer.
-    '''
-    if num == 0:
-        return 0
-    if num < 0:
-        num = -num
-
-    # Make sure this is an int and not a float.
-    num & 1
-
-    hex_num = "%x" % num
-    return ((len(hex_num) - 1) * 4) + {
-        '0':0, '1':1, '2':2, '3':2,
-        '4':3, '5':3, '6':3, '7':3,
-        '8':4, '9':4, 'a':4, 'b':4,
-        'c':4, 'd':4, 'e':4, 'f':4,
-     }[hex_num[0]]
-
-
-def _bit_size(number):
-    '''
-    Returns the number of bits required to hold a specific long number.
-    '''
-    if number < 0:
-        raise ValueError('Only nonnegative numbers possible: %s' % number)
-
-    if number == 0:
-        return 0
-    
-    # This works, even with very large numbers. When using math.log(number, 2),
-    # you'll get rounding errors and it'll fail.
-    bits = 0
-    while number:
-        bits += 1
-        number >>= 1
-
-    return bits
-
-
-def byte_size(number):
-    '''
-    Returns the number of bytes required to hold a specific long number.
-    
-    The number of bytes is rounded up.
-
-    Usage::
-
-        >>> byte_size(1 << 1023)
-        128
-        >>> byte_size((1 << 1024) - 1)
-        128
-        >>> byte_size(1 << 1024)
-        129
-
-    :param number:
-        An unsigned integer
-    :returns:
-        The number of bytes required to hold a specific long number.
-    '''
-    quanta, mod = divmod(bit_size(number), 8)
-    if mod or number == 0:
-        quanta += 1
-    return quanta
-    #return int(math.ceil(bit_size(number) / 8.0))
-
-
-def extended_gcd(a, b):
-    '''Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb
-    '''
-    # r = gcd(a,b) i = multiplicitive inverse of a mod b
-    #      or      j = multiplicitive inverse of b mod a
-    # Neg return values for i or j are made positive mod b or a respectively
-    # Iterateive Version is faster and uses much less stack space
-    x = 0
-    y = 1
-    lx = 1
-    ly = 0
-    oa = a                             #Remember original a/b to remove 
-    ob = b                             #negative values from return results
-    while b != 0:
-        q = a // b
-        (a, b)  = (b, a % b)
-        (x, lx) = ((lx - (q * x)),x)
-        (y, ly) = ((ly - (q * y)),y)
-    if (lx < 0): lx += ob              #If neg wrap modulo orignal b
-    if (ly < 0): ly += oa              #If neg wrap modulo orignal a
-    return (a, lx, ly)                 #Return only positive values
-
-
-def inverse(x, n):
-    '''Returns x^-1 (mod n)
-
-    >>> inverse(7, 4)
-    3
-    >>> (inverse(143, 4) * 143) % 4
-    1
-    '''
-
-    (divider, inv, _) = extended_gcd(x, n)
-
-    if divider != 1:
-        raise ValueError("x (%d) and n (%d) are not relatively prime" % (x, n))
-
-    return inv
-
-
-def crt(a_values, modulo_values):
-    '''Chinese Remainder Theorem.
-
-    Calculates x such that x = a[i] (mod m[i]) for each i.
-
-    :param a_values: the a-values of the above equation
-    :param modulo_values: the m-values of the above equation
-    :returns: x such that x = a[i] (mod m[i]) for each i
-    
-
-    >>> crt([2, 3], [3, 5])
-    8
-
-    >>> crt([2, 3, 2], [3, 5, 7])
-    23
-
-    >>> crt([2, 3, 0], [7, 11, 15])
-    135
-    '''
-
-    m = 1
-    x = 0 
-
-    for modulo in modulo_values:
-        m *= modulo
-
-    for (m_i, a_i) in zip(modulo_values, a_values):
-        M_i = m // m_i
-        inv = inverse(M_i, m_i)
-
-        x = (x + a_i * M_i * inv) % m
-
-    return x
-
-if __name__ == '__main__':
-    import doctest
-    doctest.testmod()
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/core.py b/tools/swarming_client/third_party/rsa/rsa/core.py
deleted file mode 100644
index 90dfee8..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/core.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Core mathematical operations.
-
-This is the actual core RSA implementation, which is only defined
-mathematically on integers.
-'''
-
-
-from rsa._compat import is_integer
-
-def assert_int(var, name):
-
-    if is_integer(var):
-        return
-
-    raise TypeError('%s should be an integer, not %s' % (name, var.__class__))
-
-def encrypt_int(message, ekey, n):
-    '''Encrypts a message using encryption key 'ekey', working modulo n'''
-
-    assert_int(message, 'message')
-    assert_int(ekey, 'ekey')
-    assert_int(n, 'n')
-
-    if message < 0:
-        raise ValueError('Only non-negative numbers are supported')
-         
-    if message > n:
-        raise OverflowError("The message %i is too long for n=%i" % (message, n))
-
-    return pow(message, ekey, n)
-
-def decrypt_int(cyphertext, dkey, n):
-    '''Decrypts a cypher text using the decryption key 'dkey', working
-    modulo n'''
-
-    assert_int(cyphertext, 'cyphertext')
-    assert_int(dkey, 'dkey')
-    assert_int(n, 'n')
-
-    message = pow(cyphertext, dkey, n)
-    return message
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/key.py b/tools/swarming_client/third_party/rsa/rsa/key.py
deleted file mode 100644
index b6de7b3..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/key.py
+++ /dev/null
@@ -1,612 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''RSA key generation code.
-
-Create new keys with the newkeys() function. It will give you a PublicKey and a
-PrivateKey object.
-
-Loading and saving keys requires the pyasn1 module. This module is imported as
-late as possible, such that other functionality will remain working in absence
-of pyasn1.
-
-'''
-
-import logging
-from rsa._compat import b, bytes_type
-
-import rsa.prime
-import rsa.pem
-import rsa.common
-
-log = logging.getLogger(__name__)
-
-
-
-class AbstractKey(object):
-    '''Abstract superclass for private and public keys.'''
-
-    @classmethod
-    def load_pkcs1(cls, keyfile, format='PEM'):
-        r'''Loads a key in PKCS#1 DER or PEM format.
-
-        :param keyfile: contents of a DER- or PEM-encoded file that contains
-            the public key.
-        :param format: the format of the file to load; 'PEM' or 'DER'
-
-        :return: a PublicKey object
-
-        '''
-
-        methods = {
-            'PEM': cls._load_pkcs1_pem,
-            'DER': cls._load_pkcs1_der,
-        }
-
-        if format not in methods:
-            formats = ', '.join(sorted(methods.keys()))
-            raise ValueError('Unsupported format: %r, try one of %s' % (format,
-                formats))
-
-        method = methods[format]
-        return method(keyfile)
-
-    def save_pkcs1(self, format='PEM'):
-        '''Saves the public key in PKCS#1 DER or PEM format.
-
-        :param format: the format to save; 'PEM' or 'DER'
-        :returns: the DER- or PEM-encoded public key.
-
-        '''
-
-        methods = {
-            'PEM': self._save_pkcs1_pem,
-            'DER': self._save_pkcs1_der,
-        }
-
-        if format not in methods:
-            formats = ', '.join(sorted(methods.keys()))
-            raise ValueError('Unsupported format: %r, try one of %s' % (format,
-                formats))
-
-        method = methods[format]
-        return method()
-
-class PublicKey(AbstractKey):
-    '''Represents a public RSA key.
-
-    This key is also known as the 'encryption key'. It contains the 'n' and 'e'
-    values.
-
-    Supports attributes as well as dictionary-like access. Attribute accesss is
-    faster, though.
-
-    >>> PublicKey(5, 3)
-    PublicKey(5, 3)
-
-    >>> key = PublicKey(5, 3)
-    >>> key.n
-    5
-    >>> key['n']
-    5
-    >>> key.e
-    3
-    >>> key['e']
-    3
-
-    '''
-
-    __slots__ = ('n', 'e')
-
-    def __init__(self, n, e):
-        self.n = n
-        self.e = e
-
-    def __getitem__(self, key):
-        return getattr(self, key)
-
-    def __repr__(self):
-        return 'PublicKey(%i, %i)' % (self.n, self.e)
-
-    def __eq__(self, other):
-        if other is None:
-            return False
-
-        if not isinstance(other, PublicKey):
-            return False
-
-        return self.n == other.n and self.e == other.e
-
-    def __ne__(self, other):
-        return not (self == other)
-
-    @classmethod
-    def _load_pkcs1_der(cls, keyfile):
-        r'''Loads a key in PKCS#1 DER format.
-
-        @param keyfile: contents of a DER-encoded file that contains the public
-            key.
-        @return: a PublicKey object
-
-        First let's construct a DER encoded key:
-
-        >>> import base64
-        >>> b64der = 'MAwCBQCNGmYtAgMBAAE='
-        >>> der = base64.decodestring(b64der)
-
-        This loads the file:
-
-        >>> PublicKey._load_pkcs1_der(der)
-        PublicKey(2367317549, 65537)
-
-        '''
-
-        from pyasn1.codec.der import decoder
-        from rsa.asn1 import AsnPubKey
-        
-        (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey())
-        return cls(n=int(priv['modulus']), e=int(priv['publicExponent']))
-
-    def _save_pkcs1_der(self):
-        '''Saves the public key in PKCS#1 DER format.
-
-        @returns: the DER-encoded public key.
-        '''
-
-        from pyasn1.codec.der import encoder
-        from rsa.asn1 import AsnPubKey
-
-        # Create the ASN object
-        asn_key = AsnPubKey()
-        asn_key.setComponentByName('modulus', self.n)
-        asn_key.setComponentByName('publicExponent', self.e)
-
-        return encoder.encode(asn_key)
-
-    @classmethod
-    def _load_pkcs1_pem(cls, keyfile):
-        '''Loads a PKCS#1 PEM-encoded public key file.
-
-        The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and
-        after the "-----END RSA PUBLIC KEY-----" lines is ignored.
-
-        @param keyfile: contents of a PEM-encoded file that contains the public
-            key.
-        @return: a PublicKey object
-        '''
-
-        der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY')
-        return cls._load_pkcs1_der(der)
-
-    def _save_pkcs1_pem(self):
-        '''Saves a PKCS#1 PEM-encoded public key file.
-
-        @return: contents of a PEM-encoded file that contains the public key.
-        '''
-
-        der = self._save_pkcs1_der()
-        return rsa.pem.save_pem(der, 'RSA PUBLIC KEY')
-
-    @classmethod
-    def load_pkcs1_openssl_pem(cls, keyfile):
-        '''Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL.
-        
-        These files can be recognised in that they start with BEGIN PUBLIC KEY
-        rather than BEGIN RSA PUBLIC KEY.
-        
-        The contents of the file before the "-----BEGIN PUBLIC KEY-----" and
-        after the "-----END PUBLIC KEY-----" lines is ignored.
-
-        @param keyfile: contents of a PEM-encoded file that contains the public
-            key, from OpenSSL.
-        @return: a PublicKey object
-        '''
-
-        der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY')
-        return cls.load_pkcs1_openssl_der(der)
-
-    @classmethod
-    def load_pkcs1_openssl_der(cls, keyfile):
-        '''Loads a PKCS#1 DER-encoded public key file from OpenSSL.
-
-        @param keyfile: contents of a DER-encoded file that contains the public
-            key, from OpenSSL.
-        @return: a PublicKey object
-        '''
-    
-        from rsa.asn1 import OpenSSLPubKey
-        from pyasn1.codec.der import decoder
-        from pyasn1.type import univ
-        
-        (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey())
-        
-        if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'):
-            raise TypeError("This is not a DER-encoded OpenSSL-compatible public key")
-                
-        return cls._load_pkcs1_der(keyinfo['key'][1:])
-        
-        
-
-
-class PrivateKey(AbstractKey):
-    '''Represents a private RSA key.
-
-    This key is also known as the 'decryption key'. It contains the 'n', 'e',
-    'd', 'p', 'q' and other values.
-
-    Supports attributes as well as dictionary-like access. Attribute accesss is
-    faster, though.
-
-    >>> PrivateKey(3247, 65537, 833, 191, 17)
-    PrivateKey(3247, 65537, 833, 191, 17)
-
-    exp1, exp2 and coef don't have to be given, they will be calculated:
-
-    >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
-    >>> pk.exp1
-    55063
-    >>> pk.exp2
-    10095
-    >>> pk.coef
-    50797
-
-    If you give exp1, exp2 or coef, they will be used as-is:
-
-    >>> pk = PrivateKey(1, 2, 3, 4, 5, 6, 7, 8)
-    >>> pk.exp1
-    6
-    >>> pk.exp2
-    7
-    >>> pk.coef
-    8
-
-    '''
-
-    __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef')
-
-    def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None):
-        self.n = n
-        self.e = e
-        self.d = d
-        self.p = p
-        self.q = q
-
-        # Calculate the other values if they aren't supplied
-        if exp1 is None:
-            self.exp1 = int(d % (p - 1))
-        else:
-            self.exp1 = exp1
-
-        if exp1 is None:
-            self.exp2 = int(d % (q - 1))
-        else:
-            self.exp2 = exp2
-
-        if coef is None:
-            self.coef = rsa.common.inverse(q, p)
-        else:
-            self.coef = coef
-
-    def __getitem__(self, key):
-        return getattr(self, key)
-
-    def __repr__(self):
-        return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self
-
-    def __eq__(self, other):
-        if other is None:
-            return False
-
-        if not isinstance(other, PrivateKey):
-            return False
-
-        return (self.n == other.n and
-            self.e == other.e and
-            self.d == other.d and
-            self.p == other.p and
-            self.q == other.q and
-            self.exp1 == other.exp1 and
-            self.exp2 == other.exp2 and
-            self.coef == other.coef)
-
-    def __ne__(self, other):
-        return not (self == other)
-
-    @classmethod
-    def _load_pkcs1_der(cls, keyfile):
-        r'''Loads a key in PKCS#1 DER format.
-
-        @param keyfile: contents of a DER-encoded file that contains the private
-            key.
-        @return: a PrivateKey object
-
-        First let's construct a DER encoded key:
-
-        >>> import base64
-        >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt'
-        >>> der = base64.decodestring(b64der)
-
-        This loads the file:
-
-        >>> PrivateKey._load_pkcs1_der(der)
-        PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
-
-        '''
-
-        from pyasn1.codec.der import decoder
-        (priv, _) = decoder.decode(keyfile)
-
-        # ASN.1 contents of DER encoded private key:
-        #
-        # RSAPrivateKey ::= SEQUENCE {
-        #     version           Version, 
-        #     modulus           INTEGER,  -- n
-        #     publicExponent    INTEGER,  -- e
-        #     privateExponent   INTEGER,  -- d
-        #     prime1            INTEGER,  -- p
-        #     prime2            INTEGER,  -- q
-        #     exponent1         INTEGER,  -- d mod (p-1)
-        #     exponent2         INTEGER,  -- d mod (q-1) 
-        #     coefficient       INTEGER,  -- (inverse of q) mod p
-        #     otherPrimeInfos   OtherPrimeInfos OPTIONAL 
-        # }
-
-        if priv[0] != 0:
-            raise ValueError('Unable to read this file, version %s != 0' % priv[0])
-
-        as_ints = tuple(int(x) for x in priv[1:9])
-        return cls(*as_ints)
-
-    def _save_pkcs1_der(self):
-        '''Saves the private key in PKCS#1 DER format.
-
-        @returns: the DER-encoded private key.
-        '''
-
-        from pyasn1.type import univ, namedtype
-        from pyasn1.codec.der import encoder
-
-        class AsnPrivKey(univ.Sequence):
-            componentType = namedtype.NamedTypes(
-                namedtype.NamedType('version', univ.Integer()),
-                namedtype.NamedType('modulus', univ.Integer()),
-                namedtype.NamedType('publicExponent', univ.Integer()),
-                namedtype.NamedType('privateExponent', univ.Integer()),
-                namedtype.NamedType('prime1', univ.Integer()),
-                namedtype.NamedType('prime2', univ.Integer()),
-                namedtype.NamedType('exponent1', univ.Integer()),
-                namedtype.NamedType('exponent2', univ.Integer()),
-                namedtype.NamedType('coefficient', univ.Integer()),
-            )
-
-        # Create the ASN object
-        asn_key = AsnPrivKey()
-        asn_key.setComponentByName('version', 0)
-        asn_key.setComponentByName('modulus', self.n)
-        asn_key.setComponentByName('publicExponent', self.e)
-        asn_key.setComponentByName('privateExponent', self.d)
-        asn_key.setComponentByName('prime1', self.p)
-        asn_key.setComponentByName('prime2', self.q)
-        asn_key.setComponentByName('exponent1', self.exp1)
-        asn_key.setComponentByName('exponent2', self.exp2)
-        asn_key.setComponentByName('coefficient', self.coef)
-
-        return encoder.encode(asn_key)
-
-    @classmethod
-    def _load_pkcs1_pem(cls, keyfile):
-        '''Loads a PKCS#1 PEM-encoded private key file.
-
-        The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and
-        after the "-----END RSA PRIVATE KEY-----" lines is ignored.
-
-        @param keyfile: contents of a PEM-encoded file that contains the private
-            key.
-        @return: a PrivateKey object
-        '''
-
-        der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY'))
-        return cls._load_pkcs1_der(der)
-
-    def _save_pkcs1_pem(self):
-        '''Saves a PKCS#1 PEM-encoded private key file.
-
-        @return: contents of a PEM-encoded file that contains the private key.
-        '''
-
-        der = self._save_pkcs1_der()
-        return rsa.pem.save_pem(der, b('RSA PRIVATE KEY'))
-
-def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True):
-    ''''Returns a tuple of two different primes of nbits bits each.
-    
-    The resulting p * q has exacty 2 * nbits bits, and the returned p and q
-    will not be equal.
-
-    :param nbits: the number of bits in each of p and q.
-    :param getprime_func: the getprime function, defaults to
-        :py:func:`rsa.prime.getprime`.
-
-        *Introduced in Python-RSA 3.1*
-
-    :param accurate: whether to enable accurate mode or not.
-    :returns: (p, q), where p > q
-
-    >>> (p, q) = find_p_q(128)
-    >>> from rsa import common
-    >>> common.bit_size(p * q)
-    256
-
-    When not in accurate mode, the number of bits can be slightly less
-
-    >>> (p, q) = find_p_q(128, accurate=False)
-    >>> from rsa import common
-    >>> common.bit_size(p * q) <= 256
-    True
-    >>> common.bit_size(p * q) > 240
-    True
-    
-    '''
-    
-    total_bits = nbits * 2
-
-    # Make sure that p and q aren't too close or the factoring programs can
-    # factor n.
-    shift = nbits // 16
-    pbits = nbits + shift
-    qbits = nbits - shift
-    
-    # Choose the two initial primes
-    log.debug('find_p_q(%i): Finding p', nbits)
-    p = getprime_func(pbits)
-    log.debug('find_p_q(%i): Finding q', nbits)
-    q = getprime_func(qbits)
-
-    def is_acceptable(p, q):
-        '''Returns True iff p and q are acceptable:
-            
-            - p and q differ
-            - (p * q) has the right nr of bits (when accurate=True)
-        '''
-
-        if p == q:
-            return False
-
-        if not accurate:
-            return True
-
-        # Make sure we have just the right amount of bits
-        found_size = rsa.common.bit_size(p * q)
-        return total_bits == found_size
-
-    # Keep choosing other primes until they match our requirements.
-    change_p = False
-    while not is_acceptable(p, q):
-        # Change p on one iteration and q on the other
-        if change_p:
-            p = getprime_func(pbits)
-        else:
-            q = getprime_func(qbits)
-
-        change_p = not change_p
-
-    # We want p > q as described on
-    # http://www.di-mgt.com.au/rsa_alg.html#crt
-    return (max(p, q), min(p, q))
-
-def calculate_keys(p, q, nbits):
-    '''Calculates an encryption and a decryption key given p and q, and
-    returns them as a tuple (e, d)
-
-    '''
-
-    phi_n = (p - 1) * (q - 1)
-
-    # A very common choice for e is 65537
-    e = 65537
-
-    try:
-        d = rsa.common.inverse(e, phi_n)
-    except ValueError:
-        raise ValueError("e (%d) and phi_n (%d) are not relatively prime" %
-                (e, phi_n))
-
-    if (e * d) % phi_n != 1:
-        raise ValueError("e (%d) and d (%d) are not mult. inv. modulo "
-                "phi_n (%d)" % (e, d, phi_n))
-
-    return (e, d)
-
-def gen_keys(nbits, getprime_func, accurate=True):
-    '''Generate RSA keys of nbits bits. Returns (p, q, e, d).
-
-    Note: this can take a long time, depending on the key size.
-    
-    :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and
-        ``q`` will use ``nbits/2`` bits.
-    :param getprime_func: either :py:func:`rsa.prime.getprime` or a function
-        with similar signature.
-    '''
-
-    (p, q) = find_p_q(nbits // 2, getprime_func, accurate)
-    (e, d) = calculate_keys(p, q, nbits // 2)
-
-    return (p, q, e, d)
-
-def newkeys(nbits, accurate=True, poolsize=1):
-    '''Generates public and private keys, and returns them as (pub, priv).
-
-    The public key is also known as the 'encryption key', and is a
-    :py:class:`rsa.PublicKey` object. The private key is also known as the
-    'decryption key' and is a :py:class:`rsa.PrivateKey` object.
-
-    :param nbits: the number of bits required to store ``n = p*q``.
-    :param accurate: when True, ``n`` will have exactly the number of bits you
-        asked for. However, this makes key generation much slower. When False,
-        `n`` may have slightly less bits.
-    :param poolsize: the number of processes to use to generate the prime
-        numbers. If set to a number > 1, a parallel algorithm will be used.
-        This requires Python 2.6 or newer.
-
-    :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`)
-
-    The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires
-    Python 2.6 or newer.
-    
-    '''
-
-    if nbits < 16:
-        raise ValueError('Key too small')
-
-    if poolsize < 1:
-        raise ValueError('Pool size (%i) should be >= 1' % poolsize)
-
-    # Determine which getprime function to use
-    if poolsize > 1:
-        from rsa import parallel
-        import functools
-
-        getprime_func = functools.partial(parallel.getprime, poolsize=poolsize)
-    else: getprime_func = rsa.prime.getprime
-
-    # Generate the key components
-    (p, q, e, d) = gen_keys(nbits, getprime_func)
-    
-    # Create the key objects
-    n = p * q
-
-    return (
-        PublicKey(n, e),
-        PrivateKey(n, e, d, p, q)
-    )
-
-__all__ = ['PublicKey', 'PrivateKey', 'newkeys']
-
-if __name__ == '__main__':
-    import doctest
-    
-    try:
-        for count in range(100):
-            (failures, tests) = doctest.testmod()
-            if failures:
-                break
-
-            if (count and count % 10 == 0) or count == 1:
-                print('%i times' % count)
-    except KeyboardInterrupt:
-        print('Aborted')
-    else:
-        print('Doctests done')
diff --git a/tools/swarming_client/third_party/rsa/rsa/parallel.py b/tools/swarming_client/third_party/rsa/rsa/parallel.py
deleted file mode 100644
index e5034ac..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/parallel.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Functions for parallel computation on multiple cores.
-
-Introduced in Python-RSA 3.1.
-
-.. note::
-
-    Requires Python 2.6 or newer.
-
-'''
-
-from __future__ import print_function
-
-import multiprocessing as mp
-
-import rsa.prime
-import rsa.randnum
-
-def _find_prime(nbits, pipe):
-    while True:
-        integer = rsa.randnum.read_random_int(nbits)
-
-        # Make sure it's odd
-        integer |= 1
-
-        # Test for primeness
-        if rsa.prime.is_prime(integer):
-            pipe.send(integer)
-            return
-
-def getprime(nbits, poolsize):
-    '''Returns a prime number that can be stored in 'nbits' bits.
-
-    Works in multiple threads at the same time.
-
-    >>> p = getprime(128, 3)
-    >>> rsa.prime.is_prime(p-1)
-    False
-    >>> rsa.prime.is_prime(p)
-    True
-    >>> rsa.prime.is_prime(p+1)
-    False
-    
-    >>> from rsa import common
-    >>> common.bit_size(p) == 128
-    True
-    
-    '''
-
-    (pipe_recv, pipe_send) = mp.Pipe(duplex=False)
-
-    # Create processes
-    procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send))
-             for _ in range(poolsize)]
-    [p.start() for p in procs]
-
-    result = pipe_recv.recv()
-
-    [p.terminate() for p in procs]
-
-    return result
-
-__all__ = ['getprime']
-
-    
-if __name__ == '__main__':
-    print('Running doctests 1000x or until failure')
-    import doctest
-    
-    for count in range(100):
-        (failures, tests) = doctest.testmod()
-        if failures:
-            break
-        
-        if count and count % 10 == 0:
-            print('%i times' % count)
-    
-    print('Doctests done')
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/pem.py b/tools/swarming_client/third_party/rsa/rsa/pem.py
deleted file mode 100644
index b1c3a0e..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/pem.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Functions that load and write PEM-encoded files.'''
-
-import base64
-from rsa._compat import b, is_bytes
-
-def _markers(pem_marker):
-    '''
-    Returns the start and end PEM markers
-    '''
-
-    if is_bytes(pem_marker):
-        pem_marker = pem_marker.decode('utf-8')
-
-    return (b('-----BEGIN %s-----' % pem_marker),
-            b('-----END %s-----' % pem_marker))
-
-def load_pem(contents, pem_marker):
-    '''Loads a PEM file.
-
-    @param contents: the contents of the file to interpret
-    @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
-        when your file has '-----BEGIN RSA PRIVATE KEY-----' and
-        '-----END RSA PRIVATE KEY-----' markers.
-
-    @return the base64-decoded content between the start and end markers.
-
-    @raise ValueError: when the content is invalid, for example when the start
-        marker cannot be found.
-
-    '''
-
-    (pem_start, pem_end) = _markers(pem_marker)
-
-    pem_lines = []
-    in_pem_part = False
-
-    for line in contents.splitlines():
-        line = line.strip()
-
-        # Skip empty lines
-        if not line:
-            continue
-
-        # Handle start marker
-        if line == pem_start:
-            if in_pem_part:
-                raise ValueError('Seen start marker "%s" twice' % pem_start)
-
-            in_pem_part = True
-            continue
-
-        # Skip stuff before first marker
-        if not in_pem_part:
-            continue
-
-        # Handle end marker
-        if in_pem_part and line == pem_end:
-            in_pem_part = False
-            break
-
-        # Load fields
-        if b(':') in line:
-            continue
-
-        pem_lines.append(line)
-
-    # Do some sanity checks
-    if not pem_lines:
-        raise ValueError('No PEM start marker "%s" found' % pem_start)
-
-    if in_pem_part:
-        raise ValueError('No PEM end marker "%s" found' % pem_end)
-
-    # Base64-decode the contents
-    pem = b('').join(pem_lines)
-    return base64.decodestring(pem)
-
-
-def save_pem(contents, pem_marker):
-    '''Saves a PEM file.
-
-    @param contents: the contents to encode in PEM format
-    @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
-        when your file has '-----BEGIN RSA PRIVATE KEY-----' and
-        '-----END RSA PRIVATE KEY-----' markers.
-
-    @return the base64-encoded content between the start and end markers.
-
-    '''
-
-    (pem_start, pem_end) = _markers(pem_marker)
-
-    b64 = base64.encodestring(contents).replace(b('\n'), b(''))
-    pem_lines = [pem_start]
-    
-    for block_start in range(0, len(b64), 64):
-        block = b64[block_start:block_start + 64]
-        pem_lines.append(block)
-
-    pem_lines.append(pem_end)
-    pem_lines.append(b(''))
-
-    return b('\n').join(pem_lines)
-    
diff --git a/tools/swarming_client/third_party/rsa/rsa/pkcs1.py b/tools/swarming_client/third_party/rsa/rsa/pkcs1.py
deleted file mode 100644
index 0e51928..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/pkcs1.py
+++ /dev/null
@@ -1,373 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Functions for PKCS#1 version 1.5 encryption and signing
-
-This module implements certain functionality from PKCS#1 version 1.5. For a
-very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes
-
-At least 8 bytes of random padding is used when encrypting a message. This makes
-these methods much more secure than the ones in the ``rsa`` module.
-
-WARNING: this module leaks information when decryption fails. The exceptions
-that are raised contain the Python traceback information, which can be used to
-deduce where in the process the failure occurred. DO NOT PASS SUCH INFORMATION
-to your users.
-'''
-
-import hashlib
-import os
-
-from rsa._compat import b
-from rsa import common, transform, core, varblock
-
-# ASN.1 codes that describe the hash algorithm used.
-HASH_ASN1 = {
-    'MD5': b('\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'),
-    'SHA-1': b('\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'),
-    'SHA-256': b('\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'),
-    'SHA-384': b('\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'),
-    'SHA-512': b('\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'),
-}
-
-HASH_METHODS = {
-    'MD5': hashlib.md5,
-    'SHA-1': hashlib.sha1,
-    'SHA-256': hashlib.sha256,
-    'SHA-384': hashlib.sha384,
-    'SHA-512': hashlib.sha512,
-}
-
-class CryptoError(Exception):
-    '''Base class for all exceptions in this module.'''
-
-class DecryptionError(CryptoError):
-    '''Raised when decryption fails.'''
-
-class VerificationError(CryptoError):
-    '''Raised when verification fails.'''
- 
-def _pad_for_encryption(message, target_length):
-    r'''Pads the message for encryption, returning the padded message.
-    
-    :return: 00 02 RANDOM_DATA 00 MESSAGE
-    
-    >>> block = _pad_for_encryption('hello', 16)
-    >>> len(block)
-    16
-    >>> block[0:2]
-    '\x00\x02'
-    >>> block[-6:]
-    '\x00hello'
-
-    '''
-
-    max_msglength = target_length - 11
-    msglength = len(message)
-    
-    if msglength > max_msglength:
-        raise OverflowError('%i bytes needed for message, but there is only'
-            ' space for %i' % (msglength, max_msglength))
-    
-    # Get random padding
-    padding = b('')
-    padding_length = target_length - msglength - 3
-    
-    # We remove 0-bytes, so we'll end up with less padding than we've asked for,
-    # so keep adding data until we're at the correct length.
-    while len(padding) < padding_length:
-        needed_bytes = padding_length - len(padding)
-        
-        # Always read at least 8 bytes more than we need, and trim off the rest
-        # after removing the 0-bytes. This increases the chance of getting
-        # enough bytes, especially when needed_bytes is small
-        new_padding = os.urandom(needed_bytes + 5)
-        new_padding = new_padding.replace(b('\x00'), b(''))
-        padding = padding + new_padding[:needed_bytes]
-    
-    assert len(padding) == padding_length
-    
-    return b('').join([b('\x00\x02'),
-                    padding,
-                    b('\x00'),
-                    message])
-    
-
-def _pad_for_signing(message, target_length):
-    r'''Pads the message for signing, returning the padded message.
-    
-    The padding is always a repetition of FF bytes.
-    
-    :return: 00 01 PADDING 00 MESSAGE
-    
-    >>> block = _pad_for_signing('hello', 16)
-    >>> len(block)
-    16
-    >>> block[0:2]
-    '\x00\x01'
-    >>> block[-6:]
-    '\x00hello'
-    >>> block[2:-6]
-    '\xff\xff\xff\xff\xff\xff\xff\xff'
-    
-    '''
-
-    max_msglength = target_length - 11
-    msglength = len(message)
-    
-    if msglength > max_msglength:
-        raise OverflowError('%i bytes needed for message, but there is only'
-            ' space for %i' % (msglength, max_msglength))
-    
-    padding_length = target_length - msglength - 3
-    
-    return b('').join([b('\x00\x01'),
-                    padding_length * b('\xff'),
-                    b('\x00'),
-                    message])
-    
-    
-def encrypt(message, pub_key):
-    '''Encrypts the given message using PKCS#1 v1.5
-    
-    :param message: the message to encrypt. Must be a byte string no longer than
-        ``k-11`` bytes, where ``k`` is the number of bytes needed to encode
-        the ``n`` component of the public key.
-    :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with.
-    :raise OverflowError: when the message is too large to fit in the padded
-        block.
-        
-    >>> from rsa import key, common
-    >>> (pub_key, priv_key) = key.newkeys(256)
-    >>> message = 'hello'
-    >>> crypto = encrypt(message, pub_key)
-    
-    The crypto text should be just as long as the public key 'n' component:
-
-    >>> len(crypto) == common.byte_size(pub_key.n)
-    True
-    
-    '''
-    
-    keylength = common.byte_size(pub_key.n)
-    padded = _pad_for_encryption(message, keylength)
-    
-    payload = transform.bytes2int(padded)
-    encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n)
-    block = transform.int2bytes(encrypted, keylength)
-    
-    return block
-
-def decrypt(crypto, priv_key):
-    r'''Decrypts the given message using PKCS#1 v1.5
-    
-    The decryption is considered 'failed' when the resulting cleartext doesn't
-    start with the bytes 00 02, or when the 00 byte between the padding and
-    the message cannot be found.
-    
-    :param crypto: the crypto text as returned by :py:func:`rsa.encrypt`
-    :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with.
-    :raise DecryptionError: when the decryption fails. No details are given as
-        to why the code thinks the decryption fails, as this would leak
-        information about the private key.
-
-
-    >>> import rsa
-    >>> (pub_key, priv_key) = rsa.newkeys(256)
-
-    It works with strings:
-
-    >>> crypto = encrypt('hello', pub_key)
-    >>> decrypt(crypto, priv_key)
-    'hello'
-    
-    And with binary data:
-
-    >>> crypto = encrypt('\x00\x00\x00\x00\x01', pub_key)
-    >>> decrypt(crypto, priv_key)
-    '\x00\x00\x00\x00\x01'
-
-    Altering the encrypted information will *likely* cause a
-    :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use
-    :py:func:`rsa.sign`.
-
-
-    .. warning::
-
-        Never display the stack trace of a
-        :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the
-        code the exception occurred, and thus leaks information about the key.
-        It's only a tiny bit of information, but every bit makes cracking the
-        keys easier.
-
-    >>> crypto = encrypt('hello', pub_key)
-    >>> crypto = crypto[0:5] + 'X' + crypto[6:] # change a byte
-    >>> decrypt(crypto, priv_key)
-    Traceback (most recent call last):
-    ...
-    DecryptionError: Decryption failed
-
-    '''
-    
-    blocksize = common.byte_size(priv_key.n)
-    encrypted = transform.bytes2int(crypto)
-    decrypted = core.decrypt_int(encrypted, priv_key.d, priv_key.n)
-    cleartext = transform.int2bytes(decrypted, blocksize)
-
-    # If we can't find the cleartext marker, decryption failed.
-    if cleartext[0:2] != b('\x00\x02'):
-        raise DecryptionError('Decryption failed')
-    
-    # Find the 00 separator between the padding and the message
-    try:
-        sep_idx = cleartext.index(b('\x00'), 2)
-    except ValueError:
-        raise DecryptionError('Decryption failed')
-    
-    return cleartext[sep_idx+1:]
-    
-def sign(message, priv_key, hash):
-    '''Signs the message with the private key.
-
-    Hashes the message, then signs the hash with the given key. This is known
-    as a "detached signature", because the message itself isn't altered.
-    
-    :param message: the message to sign. Can be an 8-bit string or a file-like
-        object. If ``message`` has a ``read()`` method, it is assumed to be a
-        file-like object.
-    :param priv_key: the :py:class:`rsa.PrivateKey` to sign with
-    :param hash: the hash method used on the message. Use 'MD5', 'SHA-1',
-        'SHA-256', 'SHA-384' or 'SHA-512'.
-    :return: a message signature block.
-    :raise OverflowError: if the private key is too small to contain the
-        requested hash.
-
-    '''
-
-    # Get the ASN1 code for this hash method
-    if hash not in HASH_ASN1:
-        raise ValueError('Invalid hash method: %s' % hash)
-    asn1code = HASH_ASN1[hash]
-    
-    # Calculate the hash
-    hash = _hash(message, hash)
-
-    # Encrypt the hash with the private key
-    cleartext = asn1code + hash
-    keylength = common.byte_size(priv_key.n)
-    padded = _pad_for_signing(cleartext, keylength)
-    
-    payload = transform.bytes2int(padded)
-    encrypted = core.encrypt_int(payload, priv_key.d, priv_key.n)
-    block = transform.int2bytes(encrypted, keylength)
-    
-    return block
-
-def verify(message, signature, pub_key):
-    '''Verifies that the signature matches the message.
-    
-    The hash method is detected automatically from the signature.
-    
-    :param message: the signed message. Can be an 8-bit string or a file-like
-        object. If ``message`` has a ``read()`` method, it is assumed to be a
-        file-like object.
-    :param signature: the signature block, as created with :py:func:`rsa.sign`.
-    :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message.
-    :raise VerificationError: when the signature doesn't match the message.
-
-    '''
-    
-    keylength = common.byte_size(pub_key.n)
-    encrypted = transform.bytes2int(signature)
-    decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n)
-    clearsig = transform.int2bytes(decrypted, keylength)
-    
-    # Get the hash method
-    method_name = _find_method_hash(clearsig)
-    message_hash = _hash(message, method_name)
-
-    # Reconstruct the expected padded hash
-    cleartext = HASH_ASN1[method_name] + message_hash
-    expected = _pad_for_signing(cleartext, keylength)
-
-    # Compare with the signed one
-    if expected != clearsig:
-        raise VerificationError('Verification failed')
-
-    return True
-
-def _hash(message, method_name):
-    '''Returns the message digest.
-    
-    :param message: the signed message. Can be an 8-bit string or a file-like
-        object. If ``message`` has a ``read()`` method, it is assumed to be a
-        file-like object.
-    :param method_name: the hash method, must be a key of
-        :py:const:`HASH_METHODS`.
-    
-    '''
-
-    if method_name not in HASH_METHODS:
-        raise ValueError('Invalid hash method: %s' % method_name)
-    
-    method = HASH_METHODS[method_name]
-    hasher = method()
-
-    if hasattr(message, 'read') and hasattr(message.read, '__call__'):
-        # read as 1K blocks
-        for block in varblock.yield_fixedblocks(message, 1024):
-            hasher.update(block)
-    else:
-        # hash the message object itself.
-        hasher.update(message)
-
-    return hasher.digest()
-
-
-def _find_method_hash(clearsig):
-    '''Finds the hash method.
-    
-    :param clearsig: full padded ASN1 and hash.
-    
-    :return: the used hash method.
-    
-    :raise VerificationFailed: when the hash method cannot be found
-
-    '''
-
-    for (hashname, asn1code) in HASH_ASN1.items():
-        if asn1code in clearsig:
-            return hashname
-    
-    raise VerificationError('Verification failed')
-
-
-__all__ = ['encrypt', 'decrypt', 'sign', 'verify',
-           'DecryptionError', 'VerificationError', 'CryptoError']
-
-if __name__ == '__main__':
-    print('Running doctests 1000x or until failure')
-    import doctest
-    
-    for count in range(1000):
-        (failures, tests) = doctest.testmod()
-        if failures:
-            break
-        
-        if count and count % 100 == 0:
-            print('%i times' % count)
-    
-    print('Doctests done')
diff --git a/tools/swarming_client/third_party/rsa/rsa/prime.py b/tools/swarming_client/third_party/rsa/rsa/prime.py
deleted file mode 100644
index 7422eb1..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/prime.py
+++ /dev/null
@@ -1,166 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Numerical functions related to primes.
-
-Implementation based on the book Algorithm Design by Michael T. Goodrich and
-Roberto Tamassia, 2002.
-'''
-
-__all__ = [ 'getprime', 'are_relatively_prime']
-
-import rsa.randnum
-
-def gcd(p, q):
-    '''Returns the greatest common divisor of p and q
-
-    >>> gcd(48, 180)
-    12
-    '''
-
-    while q != 0:
-        if p < q: (p,q) = (q,p)
-        (p,q) = (q, p % q)
-    return p
-    
-
-def jacobi(a, b):
-    '''Calculates the value of the Jacobi symbol (a/b) where both a and b are
-    positive integers, and b is odd
-
-    :returns: -1, 0 or 1
-    '''
-
-    assert a > 0
-    assert b > 0
-
-    if a == 0: return 0
-    result = 1
-    while a > 1:
-        if a & 1:
-            if ((a-1)*(b-1) >> 2) & 1:
-                result = -result
-            a, b = b % a, a
-        else:
-            if (((b * b) - 1) >> 3) & 1:
-                result = -result
-            a >>= 1
-    if a == 0: return 0
-    return result
-
-def jacobi_witness(x, n):
-    '''Returns False if n is an Euler pseudo-prime with base x, and
-    True otherwise.
-    '''
-
-    j = jacobi(x, n) % n
-
-    f = pow(x, n >> 1, n)
-
-    if j == f: return False
-    return True
-
-def randomized_primality_testing(n, k):
-    '''Calculates whether n is composite (which is always correct) or
-    prime (which is incorrect with error probability 2**-k)
-
-    Returns False if the number is composite, and True if it's
-    probably prime.
-    '''
-
-    # 50% of Jacobi-witnesses can report compositness of non-prime numbers
-
-    # The implemented algorithm using the Jacobi witness function has error
-    # probability q <= 0.5, according to Goodrich et. al
-    #
-    # q = 0.5
-    # t = int(math.ceil(k / log(1 / q, 2)))
-    # So t = k / log(2, 2) = k / 1 = k
-    # this means we can use range(k) rather than range(t)
-
-    for _ in range(k):
-        x = rsa.randnum.randint(n-1)
-        if jacobi_witness(x, n): return False
-    
-    return True
-
-def is_prime(number):
-    '''Returns True if the number is prime, and False otherwise.
-
-    >>> is_prime(42)
-    False
-    >>> is_prime(41)
-    True
-    '''
-
-    return randomized_primality_testing(number, 6)
-
-def getprime(nbits):
-    '''Returns a prime number that can be stored in 'nbits' bits.
-
-    >>> p = getprime(128)
-    >>> is_prime(p-1)
-    False
-    >>> is_prime(p)
-    True
-    >>> is_prime(p+1)
-    False
-    
-    >>> from rsa import common
-    >>> common.bit_size(p) == 128
-    True
-    
-    '''
-
-    while True:
-        integer = rsa.randnum.read_random_int(nbits)
-
-        # Make sure it's odd
-        integer |= 1
-
-        # Test for primeness
-        if is_prime(integer):
-            return integer
-
-        # Retry if not prime
-
-
-def are_relatively_prime(a, b):
-    '''Returns True if a and b are relatively prime, and False if they
-    are not.
-
-    >>> are_relatively_prime(2, 3)
-    1
-    >>> are_relatively_prime(2, 4)
-    0
-    '''
-
-    d = gcd(a, b)
-    return (d == 1)
-    
-if __name__ == '__main__':
-    print('Running doctests 1000x or until failure')
-    import doctest
-    
-    for count in range(1000):
-        (failures, tests) = doctest.testmod()
-        if failures:
-            break
-        
-        if count and count % 100 == 0:
-            print('%i times' % count)
-    
-    print('Doctests done')
diff --git a/tools/swarming_client/third_party/rsa/rsa/randnum.py b/tools/swarming_client/third_party/rsa/rsa/randnum.py
deleted file mode 100644
index 0e78274..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/randnum.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Functions for generating random numbers.'''
-
-# Source inspired by code by Yesudeep Mangalapilly <yesudeep@gmail.com>
-
-import os
-
-from rsa import common, transform
-from rsa._compat import byte
-
-def read_random_bits(nbits):
-    '''Reads 'nbits' random bits.
-
-    If nbits isn't a whole number of bytes, an extra byte will be appended with
-    only the lower bits set.
-    '''
-
-    nbytes, rbits = divmod(nbits, 8)
-
-    # Get the random bytes
-    randomdata = os.urandom(nbytes)
-
-    # Add the remaining random bits
-    if rbits > 0:
-        randomvalue = ord(os.urandom(1))
-        randomvalue >>= (8 - rbits)
-        randomdata = byte(randomvalue) + randomdata
-
-    return randomdata
-
-
-def read_random_int(nbits):
-    '''Reads a random integer of approximately nbits bits.
-    '''
-
-    randomdata = read_random_bits(nbits)
-    value = transform.bytes2int(randomdata)
-
-    # Ensure that the number is large enough to just fill out the required
-    # number of bits.
-    value |= 1 << (nbits - 1)
-
-    return value
-
-def randint(maxvalue):
-    '''Returns a random integer x with 1 <= x <= maxvalue
-    
-    May take a very long time in specific situations. If maxvalue needs N bits
-    to store, the closer maxvalue is to (2 ** N) - 1, the faster this function
-    is.
-    '''
-
-    bit_size = common.bit_size(maxvalue)
-
-    tries = 0
-    while True:
-        value = read_random_int(bit_size)
-        if value <= maxvalue:
-            break
-
-        if tries and tries % 10 == 0:
-            # After a lot of tries to get the right number of bits but still
-            # smaller than maxvalue, decrease the number of bits by 1. That'll
-            # dramatically increase the chances to get a large enough number.
-            bit_size -= 1
-        tries += 1
-
-    return value
-
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/transform.py b/tools/swarming_client/third_party/rsa/rsa/transform.py
deleted file mode 100644
index c740b2d..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/transform.py
+++ /dev/null
@@ -1,220 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Data transformation functions.
-
-From bytes to a number, number to bytes, etc.
-'''
-
-from __future__ import absolute_import
-
-try:
-    # We'll use psyco if available on 32-bit architectures to speed up code.
-    # Using psyco (if available) cuts down the execution time on Python 2.5
-    # at least by half.
-    import psyco
-    psyco.full()
-except ImportError:
-    pass
-
-import binascii
-from struct import pack
-from rsa import common
-from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE
-
-
-def bytes2int(raw_bytes):
-    r'''Converts a list of bytes or an 8-bit string to an integer.
-
-    When using unicode strings, encode it to some encoding like UTF8 first.
-
-    >>> (((128 * 256) + 64) * 256) + 15
-    8405007
-    >>> bytes2int('\x80@\x0f')
-    8405007
-
-    '''
-
-    return int(binascii.hexlify(raw_bytes), 16)
-
-
-def _int2bytes(number, block_size=None):
-    r'''Converts a number to a string of bytes.
-
-    Usage::
-
-        >>> _int2bytes(123456789)
-        '\x07[\xcd\x15'
-        >>> bytes2int(_int2bytes(123456789))
-        123456789
-
-        >>> _int2bytes(123456789, 6)
-        '\x00\x00\x07[\xcd\x15'
-        >>> bytes2int(_int2bytes(123456789, 128))
-        123456789
-
-        >>> _int2bytes(123456789, 3)
-        Traceback (most recent call last):
-        ...
-        OverflowError: Needed 4 bytes for number, but block size is 3
-
-    @param number: the number to convert
-    @param block_size: the number of bytes to output. If the number encoded to
-        bytes is less than this, the block will be zero-padded. When not given,
-        the returned block is not padded.
-
-    @throws OverflowError when block_size is given and the number takes up more
-        bytes than fit into the block.
-    '''
-    # Type checking
-    if not is_integer(number):
-        raise TypeError("You must pass an integer for 'number', not %s" %
-            number.__class__)
-
-    if number < 0:
-        raise ValueError('Negative numbers cannot be used: %i' % number)
-
-    # Do some bounds checking
-    if number == 0:
-        needed_bytes = 1
-        raw_bytes = [ZERO_BYTE]
-    else:
-        needed_bytes = common.byte_size(number)
-        raw_bytes = []
-
-    # You cannot compare None > 0 in Python 3x. It will fail with a TypeError.
-    if block_size and block_size > 0:
-        if needed_bytes > block_size:
-            raise OverflowError('Needed %i bytes for number, but block size '
-                'is %i' % (needed_bytes, block_size))
-
-    # Convert the number to bytes.
-    while number > 0:
-        raw_bytes.insert(0, byte(number & 0xFF))
-        number >>= 8
-
-    # Pad with zeroes to fill the block
-    if block_size and block_size > 0:
-        padding = (block_size - needed_bytes) * ZERO_BYTE
-    else:
-        padding = EMPTY_BYTE
-
-    return padding + EMPTY_BYTE.join(raw_bytes)
-
-
-def bytes_leading(raw_bytes, needle=ZERO_BYTE):
-    '''
-    Finds the number of prefixed byte occurrences in the haystack.
-
-    Useful when you want to deal with padding.
-
-    :param raw_bytes:
-        Raw bytes.
-    :param needle:
-        The byte to count. Default \000.
-    :returns:
-        The number of leading needle bytes.
-    '''
-    leading = 0
-    # Indexing keeps compatibility between Python 2.x and Python 3.x
-    _byte = needle[0]
-    for x in raw_bytes:
-        if x == _byte:
-            leading += 1
-        else:
-            break
-    return leading
-
-
-def int2bytes(number, fill_size=None, chunk_size=None, overflow=False):
-    '''
-    Convert an unsigned integer to bytes (base-256 representation)::
-
-    Does not preserve leading zeros if you don't specify a chunk size or
-    fill size.
-
-    .. NOTE:
-        You must not specify both fill_size and chunk_size. Only one
-        of them is allowed.
-
-    :param number:
-        Integer value
-    :param fill_size:
-        If the optional fill size is given the length of the resulting
-        byte string is expected to be the fill size and will be padded
-        with prefix zero bytes to satisfy that length.
-    :param chunk_size:
-        If optional chunk size is given and greater than zero, pad the front of
-        the byte string with binary zeros so that the length is a multiple of
-        ``chunk_size``.
-    :param overflow:
-        ``False`` (default). If this is ``True``, no ``OverflowError``
-        will be raised when the fill_size is shorter than the length
-        of the generated byte sequence. Instead the byte sequence will
-        be returned as is.
-    :returns:
-        Raw bytes (base-256 representation).
-    :raises:
-        ``OverflowError`` when fill_size is given and the number takes up more
-        bytes than fit into the block. This requires the ``overflow``
-        argument to this function to be set to ``False`` otherwise, no
-        error will be raised.
-    '''
-    if number < 0:
-        raise ValueError("Number must be an unsigned integer: %d" % number)
-
-    if fill_size and chunk_size:
-        raise ValueError("You can either fill or pad chunks, but not both")
-
-    # Ensure these are integers.
-    number & 1
-
-    raw_bytes = b('')
-
-    # Pack the integer one machine word at a time into bytes.
-    num = number
-    word_bits, _, max_uint, pack_type = get_word_alignment(num)
-    pack_format = ">%s" % pack_type
-    while num > 0:
-        raw_bytes = pack(pack_format, num & max_uint) + raw_bytes
-        num >>= word_bits
-    # Obtain the index of the first non-zero byte.
-    zero_leading = bytes_leading(raw_bytes)
-    if number == 0:
-        raw_bytes = ZERO_BYTE
-    # De-padding.
-    raw_bytes = raw_bytes[zero_leading:]
-
-    length = len(raw_bytes)
-    if fill_size and fill_size > 0:
-        if not overflow and length > fill_size:
-            raise OverflowError(
-                "Need %d bytes for number, but fill size is %d" %
-                (length, fill_size)
-            )
-        raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE)
-    elif chunk_size and chunk_size > 0:
-        remainder = length % chunk_size
-        if remainder:
-            padding_size = chunk_size - remainder
-            raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE)
-    return raw_bytes
-
-
-if __name__ == '__main__':
-    import doctest
-    doctest.testmod()
-
diff --git a/tools/swarming_client/third_party/rsa/rsa/util.py b/tools/swarming_client/third_party/rsa/rsa/util.py
deleted file mode 100644
index 5bbb70b..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/util.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''Utility functions.'''
-
-from __future__ import with_statement, print_function
-
-import sys
-from optparse import OptionParser
-
-import rsa.key
-
-def private_to_public():
-    '''Reads a private key and outputs the corresponding public key.'''
-
-    # Parse the CLI options
-    parser = OptionParser(usage='usage: %prog [options]',
-            description='Reads a private key and outputs the '
-            'corresponding public key. Both private and public keys use '
-            'the format described in PKCS#1 v1.5')
-
-    parser.add_option('-i', '--input', dest='infilename', type='string',
-            help='Input filename. Reads from stdin if not specified')
-    parser.add_option('-o', '--output', dest='outfilename', type='string',
-            help='Output filename. Writes to stdout of not specified')
-
-    parser.add_option('--inform', dest='inform',
-            help='key format of input - default PEM',
-            choices=('PEM', 'DER'), default='PEM')
-
-    parser.add_option('--outform', dest='outform',
-            help='key format of output - default PEM',
-            choices=('PEM', 'DER'), default='PEM')
-
-    (cli, cli_args) = parser.parse_args(sys.argv)
-
-    # Read the input data
-    if cli.infilename:
-        print('Reading private key from %s in %s format' % \
-            (cli.infilename, cli.inform), file=sys.stderr)
-        with open(cli.infilename, 'rb') as infile:
-            in_data = infile.read()
-    else:
-        print('Reading private key from stdin in %s format' % cli.inform,
-              file=sys.stderr)
-        in_data = sys.stdin.read().encode('ascii')
-
-    assert type(in_data) == bytes, type(in_data)
-
-
-    # Take the public fields and create a public key
-    priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform)
-    pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e)
-
-    # Save to the output file
-    out_data = pub_key.save_pkcs1(cli.outform)
-
-    if cli.outfilename:
-        print('Writing public key to %s in %s format' % \
-            (cli.outfilename, cli.outform), file=sys.stderr)
-        with open(cli.outfilename, 'wb') as outfile:
-            outfile.write(out_data)
-    else:
-        print('Writing public key to stdout in %s format' % cli.outform,
-              file=sys.stderr)
-        sys.stdout.write(out_data.decode('ascii'))
-
-    
diff --git a/tools/swarming_client/third_party/rsa/rsa/varblock.py b/tools/swarming_client/third_party/rsa/rsa/varblock.py
deleted file mode 100644
index c7d96ae..0000000
--- a/tools/swarming_client/third_party/rsa/rsa/varblock.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-#  Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-'''VARBLOCK file support
-
-The VARBLOCK file format is as follows, where || denotes byte concatenation:
-
-    FILE := VERSION || BLOCK || BLOCK ...
-
-    BLOCK := LENGTH || DATA
-
-    LENGTH := varint-encoded length of the subsequent data. Varint comes from
-    Google Protobuf, and encodes an integer into a variable number of bytes.
-    Each byte uses the 7 lowest bits to encode the value. The highest bit set
-    to 1 indicates the next byte is also part of the varint. The last byte will
-    have this bit set to 0.
-
-This file format is called the VARBLOCK format, in line with the varint format
-used to denote the block sizes.
-
-'''
-
-from rsa._compat import byte, b
-
-
-ZERO_BYTE = b('\x00')
-VARBLOCK_VERSION = 1
-
-def read_varint(infile):
-    '''Reads a varint from the file.
-
-    When the first byte to be read indicates EOF, (0, 0) is returned. When an
-    EOF occurs when at least one byte has been read, an EOFError exception is
-    raised.
-
-    @param infile: the file-like object to read from. It should have a read()
-        method.
-    @returns (varint, length), the read varint and the number of read bytes.
-    '''
-
-    varint = 0
-    read_bytes = 0
-
-    while True:
-        char = infile.read(1)
-        if len(char) == 0:
-            if read_bytes == 0:
-                return (0, 0)
-            raise EOFError('EOF while reading varint, value is %i so far' %
-                           varint)
-
-        byte = ord(char)
-        varint += (byte & 0x7F) << (7 * read_bytes)
-
-        read_bytes += 1
-
-        if not byte & 0x80:
-            return (varint, read_bytes)
-
-
-def write_varint(outfile, value):
-    '''Writes a varint to a file.
-
-    @param outfile: the file-like object to write to. It should have a write()
-        method.
-    @returns the number of written bytes.
-    '''
-
-    # there is a big difference between 'write the value 0' (this case) and
-    # 'there is nothing left to write' (the false-case of the while loop)
-
-    if value == 0:
-        outfile.write(ZERO_BYTE)
-        return 1
-
-    written_bytes = 0
-    while value > 0:
-        to_write = value & 0x7f
-        value = value >> 7
-
-        if value > 0:
-            to_write |= 0x80
-
-        outfile.write(byte(to_write))
-        written_bytes += 1
-
-    return written_bytes
-
-
-def yield_varblocks(infile):
-    '''Generator, yields each block in the input file.
-
-    @param infile: file to read, is expected to have the VARBLOCK format as
-        described in the module's docstring.
-    @yields the contents of each block.
-    '''
-
-    # Check the version number
-    first_char = infile.read(1)
-    if len(first_char) == 0:
-        raise EOFError('Unable to read VARBLOCK version number')
-
-    version = ord(first_char)
-    if version != VARBLOCK_VERSION:
-        raise ValueError('VARBLOCK version %i not supported' % version)
-
-    while True:
-        (block_size, read_bytes) = read_varint(infile)
-
-        # EOF at block boundary, that's fine.
-        if read_bytes == 0 and block_size == 0:
-            break
-
-        block = infile.read(block_size)
-
-        read_size = len(block)
-        if read_size != block_size:
-            raise EOFError('Block size is %i, but could read only %i bytes' %
-                           (block_size, read_size))
-
-        yield block
-
-
-def yield_fixedblocks(infile, blocksize):
-    '''Generator, yields each block of ``blocksize`` bytes in the input file.
-
-    :param infile: file to read and separate in blocks.
-    :returns: a generator that yields the contents of each block
-    '''
-
-    while True:
-        block = infile.read(blocksize)
-
-        read_bytes = len(block)
-        if read_bytes == 0:
-            break
-
-        yield block
-
-        if read_bytes < blocksize:
-            break
-
diff --git a/tools/swarming_client/third_party/six/LICENSE b/tools/swarming_client/third_party/six/LICENSE
deleted file mode 100644
index e558f9d..0000000
--- a/tools/swarming_client/third_party/six/LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-Copyright (c) 2010-2015 Benjamin Peterson
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/tools/swarming_client/third_party/six/README b/tools/swarming_client/third_party/six/README
deleted file mode 100644
index ee628a9..0000000
--- a/tools/swarming_client/third_party/six/README
+++ /dev/null
@@ -1,16 +0,0 @@
-Six is a Python 2 and 3 compatibility library.  It provides utility functions
-for smoothing over the differences between the Python versions with the goal of
-writing Python code that is compatible on both Python versions.  See the
-documentation for more information on what is provided.
-
-Six supports every Python version since 2.6.  It is contained in only one Python
-file, so it can be easily copied into your project. (The copyright and license
-notice must be retained.)
-
-Online documentation is at https://pythonhosted.org/six/.
-
-Bugs can be reported to https://bitbucket.org/gutworth/six.  The code can also
-be found there.
-
-For questions about six or porting in general, email the python-porting mailing
-list: https://mail.python.org/mailman/listinfo/python-porting
diff --git a/tools/swarming_client/third_party/six/README.swarming b/tools/swarming_client/third_party/six/README.swarming
deleted file mode 100644
index 100b24d..0000000
--- a/tools/swarming_client/third_party/six/README.swarming
+++ /dev/null
@@ -1,13 +0,0 @@
-Name: six
-Short Name: six
-URL: https://bitbucket.org/gutworth/six/commits/tag/1.10.0
-Version: 1.10.0
-Revision: 403:e5218c3f66a2
-License: Apache License, Version 2.0
-
-Description:
-Six is a Python 2 and 3 compatibility library.
-
-Local Modifications:
-- Copied six.py as __init__.py.
-- Kept LICENSE and README.
diff --git a/tools/swarming_client/third_party/six/__init__.py b/tools/swarming_client/third_party/six/__init__.py
deleted file mode 100644
index 56e4272..0000000
--- a/tools/swarming_client/third_party/six/__init__.py
+++ /dev/null
@@ -1,868 +0,0 @@
-# Copyright (c) 2010-2015 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-from __future__ import absolute_import
-
-import functools
-import itertools
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.10.0"
-
-
-# Useful for very coarse version differentiation.
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
-
-if PY3:
-    string_types = str,
-    integer_types = int,
-    class_types = type,
-    text_type = str
-    binary_type = bytes
-
-    MAXSIZE = sys.maxsize
-else:
-    string_types = basestring,
-    integer_types = (int, long)
-    class_types = (type, types.ClassType)
-    text_type = unicode
-    binary_type = str
-
-    if sys.platform.startswith("java"):
-        # Jython always uses 32 bits.
-        MAXSIZE = int((1 << 31) - 1)
-    else:
-        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
-        class X(object):
-
-            def __len__(self):
-                return 1 << 31
-        try:
-            len(X())
-        except OverflowError:
-            # 32-bit
-            MAXSIZE = int((1 << 31) - 1)
-        else:
-            # 64-bit
-            MAXSIZE = int((1 << 63) - 1)
-        del X
-
-
-def _add_doc(func, doc):
-    """Add documentation to a function."""
-    func.__doc__ = doc
-
-
-def _import_module(name):
-    """Import module, returning the module after the last dot."""
-    __import__(name)
-    return sys.modules[name]
-
-
-class _LazyDescr(object):
-
-    def __init__(self, name):
-        self.name = name
-
-    def __get__(self, obj, tp):
-        result = self._resolve()
-        setattr(obj, self.name, result)  # Invokes __set__.
-        try:
-            # This is a bit ugly, but it avoids running this again by
-            # removing this descriptor.
-            delattr(obj.__class__, self.name)
-        except AttributeError:
-            pass
-        return result
-
-
-class MovedModule(_LazyDescr):
-
-    def __init__(self, name, old, new=None):
-        super(MovedModule, self).__init__(name)
-        if PY3:
-            if new is None:
-                new = name
-            self.mod = new
-        else:
-            self.mod = old
-
-    def _resolve(self):
-        return _import_module(self.mod)
-
-    def __getattr__(self, attr):
-        _module = self._resolve()
-        value = getattr(_module, attr)
-        setattr(self, attr, value)
-        return value
-
-
-class _LazyModule(types.ModuleType):
-
-    def __init__(self, name):
-        super(_LazyModule, self).__init__(name)
-        self.__doc__ = self.__class__.__doc__
-
-    def __dir__(self):
-        attrs = ["__doc__", "__name__"]
-        attrs += [attr.name for attr in self._moved_attributes]
-        return attrs
-
-    # Subclasses should override this
-    _moved_attributes = []
-
-
-class MovedAttribute(_LazyDescr):
-
-    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
-        super(MovedAttribute, self).__init__(name)
-        if PY3:
-            if new_mod is None:
-                new_mod = name
-            self.mod = new_mod
-            if new_attr is None:
-                if old_attr is None:
-                    new_attr = name
-                else:
-                    new_attr = old_attr
-            self.attr = new_attr
-        else:
-            self.mod = old_mod
-            if old_attr is None:
-                old_attr = name
-            self.attr = old_attr
-
-    def _resolve(self):
-        module = _import_module(self.mod)
-        return getattr(module, self.attr)
-
-
-class _SixMetaPathImporter(object):
-
-    """
-    A meta path importer to import six.moves and its submodules.
-
-    This class implements a PEP302 finder and loader. It should be compatible
-    with Python 2.5 and all existing versions of Python3
-    """
-
-    def __init__(self, six_module_name):
-        self.name = six_module_name
-        self.known_modules = {}
-
-    def _add_module(self, mod, *fullnames):
-        for fullname in fullnames:
-            self.known_modules[self.name + "." + fullname] = mod
-
-    def _get_module(self, fullname):
-        return self.known_modules[self.name + "." + fullname]
-
-    def find_module(self, fullname, path=None):
-        if fullname in self.known_modules:
-            return self
-        return None
-
-    def __get_module(self, fullname):
-        try:
-            return self.known_modules[fullname]
-        except KeyError:
-            raise ImportError("This loader does not know module " + fullname)
-
-    def load_module(self, fullname):
-        try:
-            # in case of a reload
-            return sys.modules[fullname]
-        except KeyError:
-            pass
-        mod = self.__get_module(fullname)
-        if isinstance(mod, MovedModule):
-            mod = mod._resolve()
-        else:
-            mod.__loader__ = self
-        sys.modules[fullname] = mod
-        return mod
-
-    def is_package(self, fullname):
-        """
-        Return true, if the named module is a package.
-
-        We need this method to get correct spec objects with
-        Python 3.4 (see PEP451)
-        """
-        return hasattr(self.__get_module(fullname), "__path__")
-
-    def get_code(self, fullname):
-        """Return None
-
-        Required, if is_package is implemented"""
-        self.__get_module(fullname)  # eventually raises ImportError
-        return None
-    get_source = get_code  # same as get_code
-
-_importer = _SixMetaPathImporter(__name__)
-
-
-class _MovedItems(_LazyModule):
-
-    """Lazy loading of moved objects"""
-    __path__ = []  # mark as package
-
-
-_moved_attributes = [
-    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
-    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
-    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
-    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
-    MovedAttribute("intern", "__builtin__", "sys"),
-    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
-    MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
-    MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
-    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
-    MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
-    MovedAttribute("reduce", "__builtin__", "functools"),
-    MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
-    MovedAttribute("StringIO", "StringIO", "io"),
-    MovedAttribute("UserDict", "UserDict", "collections"),
-    MovedAttribute("UserList", "UserList", "collections"),
-    MovedAttribute("UserString", "UserString", "collections"),
-    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
-    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
-    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
-    MovedModule("builtins", "__builtin__"),
-    MovedModule("configparser", "ConfigParser"),
-    MovedModule("copyreg", "copy_reg"),
-    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
-    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
-    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
-    MovedModule("http_cookies", "Cookie", "http.cookies"),
-    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
-    MovedModule("html_parser", "HTMLParser", "html.parser"),
-    MovedModule("http_client", "httplib", "http.client"),
-    MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
-    MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
-    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
-    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
-    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
-    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
-    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
-    MovedModule("cPickle", "cPickle", "pickle"),
-    MovedModule("queue", "Queue"),
-    MovedModule("reprlib", "repr"),
-    MovedModule("socketserver", "SocketServer"),
-    MovedModule("_thread", "thread", "_thread"),
-    MovedModule("tkinter", "Tkinter"),
-    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
-    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
-    MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
-    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
-    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
-    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
-    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
-    MovedModule("tkinter_colorchooser", "tkColorChooser",
-                "tkinter.colorchooser"),
-    MovedModule("tkinter_commondialog", "tkCommonDialog",
-                "tkinter.commondialog"),
-    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
-    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
-    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
-                "tkinter.simpledialog"),
-    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
-    MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
-    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
-    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
-    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
-    MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
-]
-# Add windows specific modules.
-if sys.platform == "win32":
-    _moved_attributes += [
-        MovedModule("winreg", "_winreg"),
-    ]
-
-for attr in _moved_attributes:
-    setattr(_MovedItems, attr.name, attr)
-    if isinstance(attr, MovedModule):
-        _importer._add_module(attr, "moves." + attr.name)
-del attr
-
-_MovedItems._moved_attributes = _moved_attributes
-
-moves = _MovedItems(__name__ + ".moves")
-_importer._add_module(moves, "moves")
-
-
-class Module_six_moves_urllib_parse(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_parse"""
-
-
-_urllib_parse_moved_attributes = [
-    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
-    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
-    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
-    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
-    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
-    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
-    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
-    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
-    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
-    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
-    MovedAttribute("quote", "urllib", "urllib.parse"),
-    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
-    MovedAttribute("unquote", "urllib", "urllib.parse"),
-    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
-    MovedAttribute("urlencode", "urllib", "urllib.parse"),
-    MovedAttribute("splitquery", "urllib", "urllib.parse"),
-    MovedAttribute("splittag", "urllib", "urllib.parse"),
-    MovedAttribute("splituser", "urllib", "urllib.parse"),
-    MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_params", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_query", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
-]
-for attr in _urllib_parse_moved_attributes:
-    setattr(Module_six_moves_urllib_parse, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
-                      "moves.urllib_parse", "moves.urllib.parse")
-
-
-class Module_six_moves_urllib_error(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_error"""
-
-
-_urllib_error_moved_attributes = [
-    MovedAttribute("URLError", "urllib2", "urllib.error"),
-    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
-    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
-]
-for attr in _urllib_error_moved_attributes:
-    setattr(Module_six_moves_urllib_error, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
-                      "moves.urllib_error", "moves.urllib.error")
-
-
-class Module_six_moves_urllib_request(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_request"""
-
-
-_urllib_request_moved_attributes = [
-    MovedAttribute("urlopen", "urllib2", "urllib.request"),
-    MovedAttribute("install_opener", "urllib2", "urllib.request"),
-    MovedAttribute("build_opener", "urllib2", "urllib.request"),
-    MovedAttribute("pathname2url", "urllib", "urllib.request"),
-    MovedAttribute("url2pathname", "urllib", "urllib.request"),
-    MovedAttribute("getproxies", "urllib", "urllib.request"),
-    MovedAttribute("Request", "urllib2", "urllib.request"),
-    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
-    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
-    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
-    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
-    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
-    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
-    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
-    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
-    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
-    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
-    MovedAttribute("URLopener", "urllib", "urllib.request"),
-    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
-    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
-]
-for attr in _urllib_request_moved_attributes:
-    setattr(Module_six_moves_urllib_request, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
-                      "moves.urllib_request", "moves.urllib.request")
-
-
-class Module_six_moves_urllib_response(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_response"""
-
-
-_urllib_response_moved_attributes = [
-    MovedAttribute("addbase", "urllib", "urllib.response"),
-    MovedAttribute("addclosehook", "urllib", "urllib.response"),
-    MovedAttribute("addinfo", "urllib", "urllib.response"),
-    MovedAttribute("addinfourl", "urllib", "urllib.response"),
-]
-for attr in _urllib_response_moved_attributes:
-    setattr(Module_six_moves_urllib_response, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
-                      "moves.urllib_response", "moves.urllib.response")
-
-
-class Module_six_moves_urllib_robotparser(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_robotparser"""
-
-
-_urllib_robotparser_moved_attributes = [
-    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
-]
-for attr in _urllib_robotparser_moved_attributes:
-    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
-                      "moves.urllib_robotparser", "moves.urllib.robotparser")
-
-
-class Module_six_moves_urllib(types.ModuleType):
-
-    """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
-    __path__ = []  # mark as package
-    parse = _importer._get_module("moves.urllib_parse")
-    error = _importer._get_module("moves.urllib_error")
-    request = _importer._get_module("moves.urllib_request")
-    response = _importer._get_module("moves.urllib_response")
-    robotparser = _importer._get_module("moves.urllib_robotparser")
-
-    def __dir__(self):
-        return ['parse', 'error', 'request', 'response', 'robotparser']
-
-_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
-                      "moves.urllib")
-
-
-def add_move(move):
-    """Add an item to six.moves."""
-    setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
-    """Remove item from six.moves."""
-    try:
-        delattr(_MovedItems, name)
-    except AttributeError:
-        try:
-            del moves.__dict__[name]
-        except KeyError:
-            raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
-    _meth_func = "__func__"
-    _meth_self = "__self__"
-
-    _func_closure = "__closure__"
-    _func_code = "__code__"
-    _func_defaults = "__defaults__"
-    _func_globals = "__globals__"
-else:
-    _meth_func = "im_func"
-    _meth_self = "im_self"
-
-    _func_closure = "func_closure"
-    _func_code = "func_code"
-    _func_defaults = "func_defaults"
-    _func_globals = "func_globals"
-
-
-try:
-    advance_iterator = next
-except NameError:
-    def advance_iterator(it):
-        return it.next()
-next = advance_iterator
-
-
-try:
-    callable = callable
-except NameError:
-    def callable(obj):
-        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-
-
-if PY3:
-    def get_unbound_function(unbound):
-        return unbound
-
-    create_bound_method = types.MethodType
-
-    def create_unbound_method(func, cls):
-        return func
-
-    Iterator = object
-else:
-    def get_unbound_function(unbound):
-        return unbound.im_func
-
-    def create_bound_method(func, obj):
-        return types.MethodType(func, obj, obj.__class__)
-
-    def create_unbound_method(func, cls):
-        return types.MethodType(func, None, cls)
-
-    class Iterator(object):
-
-        def next(self):
-            return type(self).__next__(self)
-
-    callable = callable
-_add_doc(get_unbound_function,
-         """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_closure = operator.attrgetter(_func_closure)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-get_function_globals = operator.attrgetter(_func_globals)
-
-
-if PY3:
-    def iterkeys(d, **kw):
-        return iter(d.keys(**kw))
-
-    def itervalues(d, **kw):
-        return iter(d.values(**kw))
-
-    def iteritems(d, **kw):
-        return iter(d.items(**kw))
-
-    def iterlists(d, **kw):
-        return iter(d.lists(**kw))
-
-    viewkeys = operator.methodcaller("keys")
-
-    viewvalues = operator.methodcaller("values")
-
-    viewitems = operator.methodcaller("items")
-else:
-    def iterkeys(d, **kw):
-        return d.iterkeys(**kw)
-
-    def itervalues(d, **kw):
-        return d.itervalues(**kw)
-
-    def iteritems(d, **kw):
-        return d.iteritems(**kw)
-
-    def iterlists(d, **kw):
-        return d.iterlists(**kw)
-
-    viewkeys = operator.methodcaller("viewkeys")
-
-    viewvalues = operator.methodcaller("viewvalues")
-
-    viewitems = operator.methodcaller("viewitems")
-
-_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
-_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems,
-         "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(iterlists,
-         "Return an iterator over the (key, [values]) pairs of a dictionary.")
-
-
-if PY3:
-    def b(s):
-        return s.encode("latin-1")
-
-    def u(s):
-        return s
-    unichr = chr
-    import struct
-    int2byte = struct.Struct(">B").pack
-    del struct
-    byte2int = operator.itemgetter(0)
-    indexbytes = operator.getitem
-    iterbytes = iter
-    import io
-    StringIO = io.StringIO
-    BytesIO = io.BytesIO
-    _assertCountEqual = "assertCountEqual"
-    if sys.version_info[1] <= 1:
-        _assertRaisesRegex = "assertRaisesRegexp"
-        _assertRegex = "assertRegexpMatches"
-    else:
-        _assertRaisesRegex = "assertRaisesRegex"
-        _assertRegex = "assertRegex"
-else:
-    def b(s):
-        return s
-    # Workaround for standalone backslash
-
-    def u(s):
-        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
-    unichr = unichr
-    int2byte = chr
-
-    def byte2int(bs):
-        return ord(bs[0])
-
-    def indexbytes(buf, i):
-        return ord(buf[i])
-    iterbytes = functools.partial(itertools.imap, ord)
-    import StringIO
-    StringIO = BytesIO = StringIO.StringIO
-    _assertCountEqual = "assertItemsEqual"
-    _assertRaisesRegex = "assertRaisesRegexp"
-    _assertRegex = "assertRegexpMatches"
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-def assertCountEqual(self, *args, **kwargs):
-    return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
-    return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
-    return getattr(self, _assertRegex)(*args, **kwargs)
-
-
-if PY3:
-    exec_ = getattr(moves.builtins, "exec")
-
-    def reraise(tp, value, tb=None):
-        if value is None:
-            value = tp()
-        if value.__traceback__ is not tb:
-            raise value.with_traceback(tb)
-        raise value
-
-else:
-    def exec_(_code_, _globs_=None, _locs_=None):
-        """Execute code in a namespace."""
-        if _globs_ is None:
-            frame = sys._getframe(1)
-            _globs_ = frame.f_globals
-            if _locs_ is None:
-                _locs_ = frame.f_locals
-            del frame
-        elif _locs_ is None:
-            _locs_ = _globs_
-        exec("""exec _code_ in _globs_, _locs_""")
-
-    exec_("""def reraise(tp, value, tb=None):
-    raise tp, value, tb
-""")
-
-
-if sys.version_info[:2] == (3, 2):
-    exec_("""def raise_from(value, from_value):
-    if from_value is None:
-        raise value
-    raise value from from_value
-""")
-elif sys.version_info[:2] > (3, 2):
-    exec_("""def raise_from(value, from_value):
-    raise value from from_value
-""")
-else:
-    def raise_from(value, from_value):
-        raise value
-
-
-print_ = getattr(moves.builtins, "print", None)
-if print_ is None:
-    def print_(*args, **kwargs):
-        """The new-style print function for Python 2.4 and 2.5."""
-        fp = kwargs.pop("file", sys.stdout)
-        if fp is None:
-            return
-
-        def write(data):
-            if not isinstance(data, basestring):
-                data = str(data)
-            # If the file has an encoding, encode unicode with it.
-            if (isinstance(fp, file) and
-                    isinstance(data, unicode) and
-                    fp.encoding is not None):
-                errors = getattr(fp, "errors", None)
-                if errors is None:
-                    errors = "strict"
-                data = data.encode(fp.encoding, errors)
-            fp.write(data)
-        want_unicode = False
-        sep = kwargs.pop("sep", None)
-        if sep is not None:
-            if isinstance(sep, unicode):
-                want_unicode = True
-            elif not isinstance(sep, str):
-                raise TypeError("sep must be None or a string")
-        end = kwargs.pop("end", None)
-        if end is not None:
-            if isinstance(end, unicode):
-                want_unicode = True
-            elif not isinstance(end, str):
-                raise TypeError("end must be None or a string")
-        if kwargs:
-            raise TypeError("invalid keyword arguments to print()")
-        if not want_unicode:
-            for arg in args:
-                if isinstance(arg, unicode):
-                    want_unicode = True
-                    break
-        if want_unicode:
-            newline = unicode("\n")
-            space = unicode(" ")
-        else:
-            newline = "\n"
-            space = " "
-        if sep is None:
-            sep = space
-        if end is None:
-            end = newline
-        for i, arg in enumerate(args):
-            if i:
-                write(sep)
-            write(arg)
-        write(end)
-if sys.version_info[:2] < (3, 3):
-    _print = print_
-
-    def print_(*args, **kwargs):
-        fp = kwargs.get("file", sys.stdout)
-        flush = kwargs.pop("flush", False)
-        _print(*args, **kwargs)
-        if flush and fp is not None:
-            fp.flush()
-
-_add_doc(reraise, """Reraise an exception.""")
-
-if sys.version_info[0:2] < (3, 4):
-    def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
-              updated=functools.WRAPPER_UPDATES):
-        def wrapper(f):
-            f = functools.wraps(wrapped, assigned, updated)(f)
-            f.__wrapped__ = wrapped
-            return f
-        return wrapper
-else:
-    wraps = functools.wraps
-
-
-def with_metaclass(meta, *bases):
-    """Create a base class with a metaclass."""
-    # This requires a bit of explanation: the basic idea is to make a dummy
-    # metaclass for one level of class instantiation that replaces itself with
-    # the actual metaclass.
-    class metaclass(meta):
-
-        def __new__(cls, name, this_bases, d):
-            return meta(name, bases, d)
-    return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-def add_metaclass(metaclass):
-    """Class decorator for creating a class with a metaclass."""
-    def wrapper(cls):
-        orig_vars = cls.__dict__.copy()
-        slots = orig_vars.get('__slots__')
-        if slots is not None:
-            if isinstance(slots, str):
-                slots = [slots]
-            for slots_var in slots:
-                orig_vars.pop(slots_var)
-        orig_vars.pop('__dict__', None)
-        orig_vars.pop('__weakref__', None)
-        return metaclass(cls.__name__, cls.__bases__, orig_vars)
-    return wrapper
-
-
-def python_2_unicode_compatible(klass):
-    """
-    A decorator that defines __unicode__ and __str__ methods under Python 2.
-    Under Python 3 it does nothing.
-
-    To support Python 2 and 3 with a single code base, define a __str__ method
-    returning text and apply this decorator to the class.
-    """
-    if PY2:
-        if '__str__' not in klass.__dict__:
-            raise ValueError("@python_2_unicode_compatible cannot be applied "
-                             "to %s because it doesn't define __str__()." %
-                             klass.__name__)
-        klass.__unicode__ = klass.__str__
-        klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
-    return klass
-
-
-# Complete the moves implementation.
-# This code is at the end of this module to speed up module loading.
-# Turn this module into a package.
-__path__ = []  # required for PEP 302 and PEP 451
-__package__ = __name__  # see PEP 366 @ReservedAssignment
-if globals().get("__spec__") is not None:
-    __spec__.submodule_search_locations = []  # PEP 451 @UndefinedVariable
-# Remove other six meta path importers, since they cause problems. This can
-# happen if six is removed from sys.modules and then reloaded. (Setuptools does
-# this for some reason.)
-if sys.meta_path:
-    for i, importer in enumerate(sys.meta_path):
-        # Here's some real nastiness: Another "instance" of the six module might
-        # be floating around. Therefore, we can't use isinstance() to check for
-        # the six meta path importer, since the other six instance will have
-        # inserted an importer with different class.
-        if (type(importer).__name__ == "_SixMetaPathImporter" and
-                importer.name == __name__):
-            del sys.meta_path[i]
-            break
-    del i, importer
-# Finally, add the importer to the meta path import hook.
-sys.meta_path.append(_importer)
diff --git a/tools/swarming_client/third_party/uritemplate/README.swarming b/tools/swarming_client/third_party/uritemplate/README.swarming
deleted file mode 100644
index 86cb273..0000000
--- a/tools/swarming_client/third_party/uritemplate/README.swarming
+++ /dev/null
@@ -1,14 +0,0 @@
-Name: uritemplate
-Short Name: uritemplate
-Version: 0.6
-Revision: 1e780a49412cdbb273e9421974cb91845c124f3f
-Home-page: https://chromium.googlesource.com/external/github.com/uri-templates/uritemplate-py
-License: Apache License, Version 2.0
-
-Description:
-This is a Python implementation of `RFC6570`_, URI Template, and can
-expand templates up to and including Level 4 in that specification.
-
-Local Modifications:
-- Installed using glyco (see ../README.txt for more info)
-- Added README.swarming
diff --git a/tools/swarming_client/third_party/uritemplate/__init__.py b/tools/swarming_client/third_party/uritemplate/__init__.py
deleted file mode 100644
index 712405d..0000000
--- a/tools/swarming_client/third_party/uritemplate/__init__.py
+++ /dev/null
@@ -1,265 +0,0 @@
-#!/usr/bin/env python
-
-"""
-URI Template (RFC6570) Processor
-"""
-
-__copyright__ = """\
-Copyright 2011-2013 Joe Gregorio
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import re
-try:
-   from urllib.parse import quote
-except ImportError:
-   from urllib import quote
-
-
-
-__version__ = "0.6"
-
-RESERVED = ":/?#[]@!$&'()*+,;="
-OPERATOR = "+#./;?&|!@"
-MODIFIER = ":^"
-TEMPLATE = re.compile("{([^\}]+)}")
-
-
-def variables(template):
-    '''Returns the set of keywords in a uri template'''
-    vars = set()
-    for varlist in TEMPLATE.findall(template):
-        if varlist[0] in OPERATOR:
-            varlist = varlist[1:]
-        varspecs = varlist.split(',')
-        for var in varspecs:
-            # handle prefix values
-            var = var.split(':')[0]
-            # handle composite values
-            if var.endswith('*'):
-                var = var[:-1]
-            vars.add(var)
-    return vars
-
-
-def _quote(value, safe, prefix=None):
-    if prefix is not None:
-        return quote(str(value)[:prefix], safe)
-    return quote(str(value), safe)
-
-
-def _tostring(varname, value, explode, prefix, operator, safe=""):
-    if isinstance(value, list):
-        return ",".join([_quote(x, safe) for x in value])
-    if isinstance(value, dict):
-        keys = sorted(value.keys())
-        if explode:
-            return ",".join([_quote(key, safe) + "=" + \
-                             _quote(value[key], safe) for key in keys])
-        else:
-            return ",".join([_quote(key, safe) + "," + \
-                             _quote(value[key], safe) for key in keys])
-    elif value is None:
-        return
-    else:
-        return _quote(value, safe, prefix)
-
-
-def _tostring_path(varname, value, explode, prefix, operator, safe=""):
-    joiner = operator
-    if isinstance(value, list):
-        if explode:
-            out = [_quote(x, safe) for x in value if value is not None]
-        else:
-            joiner = ","
-            out = [_quote(x, safe) for x in value if value is not None]
-        if out:
-            return joiner.join(out)
-        else:
-            return
-    elif isinstance(value, dict):
-        keys = sorted(value.keys())
-        if explode:
-            out = [_quote(key, safe) + "=" + \
-                   _quote(value[key], safe) for key in keys \
-                   if value[key] is not None]
-        else:
-            joiner = ","
-            out = [_quote(key, safe) + "," + \
-                   _quote(value[key], safe) \
-                   for key in keys if value[key] is not None]
-        if out:
-            return joiner.join(out)
-        else:
-            return
-    elif value is None:
-        return
-    else:
-        return _quote(value, safe, prefix)
-
-
-def _tostring_semi(varname, value, explode, prefix, operator, safe=""):
-    joiner = operator
-    if operator == "?":
-        joiner = "&"
-    if isinstance(value, list):
-        if explode:
-            out = [varname + "=" + _quote(x, safe) \
-                   for x in value if x is not None]
-            if out:
-                return joiner.join(out)
-            else:
-                return
-        else:
-            return varname + "=" + ",".join([_quote(x, safe) \
-                                             for x in value])
-    elif isinstance(value, dict):
-        keys = sorted(value.keys())
-        if explode:
-            return joiner.join([_quote(key, safe) + "=" + \
-                                _quote(value[key], safe) \
-                                for key in keys if key is not None])
-        else:
-            return varname + "=" + ",".join([_quote(key, safe) + "," + \
-                             _quote(value[key], safe) for key in keys \
-                             if key is not None])
-    else:
-        if value is None:
-            return
-        elif value:
-            return (varname + "=" + _quote(value, safe, prefix))
-        else:
-            return varname
-
-
-def _tostring_query(varname, value, explode, prefix, operator, safe=""):
-    joiner = operator
-    if operator in ["?", "&"]:
-        joiner = "&"
-    if isinstance(value, list):
-        if 0 == len(value):
-            return None
-        if explode:
-            return joiner.join([varname + "=" + _quote(x, safe) \
-                                for x in value])
-        else:
-            return (varname + "=" + ",".join([_quote(x, safe) \
-                                             for x in value]))
-    elif isinstance(value, dict):
-        if 0 == len(value):
-            return None
-        keys = sorted(value.keys())
-        if explode:
-            return joiner.join([_quote(key, safe) + "=" + \
-                                _quote(value[key], safe) \
-                                for key in keys])
-        else:
-            return varname + "=" + \
-                   ",".join([_quote(key, safe) + "," + \
-                             _quote(value[key], safe) for key in keys])
-    else:
-        if value is None:
-            return
-        elif value:
-            return (varname + "=" + _quote(value, safe, prefix))
-        else:
-            return (varname  + "=")
-
-
-TOSTRING = {
-    "" : _tostring,
-    "+": _tostring,
-    "#": _tostring,
-    ";": _tostring_semi,
-    "?": _tostring_query,
-    "&": _tostring_query,
-    "/": _tostring_path,
-    ".": _tostring_path,
-    }
-
-
-def expand(template, variables):
-    """
-    Expand template as a URI Template using variables.
-    """
-    def _sub(match):
-        expression = match.group(1)
-        operator = ""
-        if expression[0] in OPERATOR:
-            operator = expression[0]
-            varlist = expression[1:]
-        else:
-            varlist = expression
-
-        safe = ""
-        if operator in ["+", "#"]:
-            safe = RESERVED
-        varspecs = varlist.split(",")
-        varnames = []
-        defaults = {}
-        for varspec in varspecs:
-            default = None
-            explode = False
-            prefix = None
-            if "=" in varspec:
-                varname, default = tuple(varspec.split("=", 1))
-            else:
-                varname = varspec
-            if varname[-1] == "*":
-                explode = True
-                varname = varname[:-1]
-            elif ":" in varname:
-                try:
-                    prefix = int(varname[varname.index(":")+1:])
-                except ValueError:
-                    raise ValueError("non-integer prefix '{0}'".format(
-                       varname[varname.index(":")+1:]))
-                varname = varname[:varname.index(":")]
-            if default:
-                defaults[varname] = default
-            varnames.append((varname, explode, prefix))
-
-        retval = []
-        joiner = operator
-        start = operator
-        if operator == "+":
-            start = ""
-            joiner = ","
-        if operator == "#":
-            joiner = ","
-        if operator == "?":
-            joiner = "&"
-        if operator == "&":
-            start = "&"
-        if operator == "":
-            joiner = ","
-        for varname, explode, prefix in varnames:
-            if varname in variables:
-                value = variables[varname]
-                if not value and value != "" and varname in defaults:
-                    value = defaults[varname]
-            elif varname in defaults:
-                value = defaults[varname]
-            else:
-                continue
-            expanded = TOSTRING[operator](
-              varname, value, explode, prefix, operator, safe=safe)
-            if expanded is not None:
-                retval.append(expanded)
-        if len(retval) > 0:
-            return start + joiner.join(retval)
-        else:
-            return ""
-
-    return TEMPLATE.sub(_sub, template)
diff --git a/tools/swarming_client/tools/cost.py b/tools/swarming_client/tools/cost.py
deleted file mode 100755
index d6409bb..0000000
--- a/tools/swarming_client/tools/cost.py
+++ /dev/null
@@ -1,409 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Calculate statistics about tasks.
-
-Saves the data fetched from the server into a json file to enable reprocessing
-the data without having to always fetch from the server.
-"""
-
-import datetime
-import json
-import logging
-import optparse
-import os
-import subprocess
-import sys
-import urllib
-
-
-CLIENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-
-_EPOCH = datetime.datetime.utcfromtimestamp(0)
-
-# Type of bucket to use.
-MAJOR_OS, MAJOR_OS_ASAN, MINOR_OS, MINOR_OS_GPU = range(4)
-
-
-def do_bucket(items, bucket_type):
-  """Categorizes the tasks based on one of the bucket type defined above."""
-  out = {}
-  for task in items:
-    if 'heartbeat:1' in task['tags']:
-      # Skip heartbeats.
-      continue
-
-    is_asan = 'asan:1' in task['tags']
-    os_tag = None
-    gpu_tag = None
-    for t in task['tags']:
-      if t.startswith('os:'):
-        os_tag = t[3:]
-        if os_tag == 'Linux':
-          # GPU tests still specify Linux.
-          # TODO(maruel): Fix the recipe.
-          os_tag = 'Ubuntu'
-      elif t.startswith('gpu:'):
-        gpu_tag = t[4:]
-
-    if bucket_type in (MAJOR_OS, MAJOR_OS_ASAN):
-      os_tag = os_tag.split('-')[0]
-    tag = os_tag
-    if bucket_type == MINOR_OS_GPU and gpu_tag and gpu_tag != 'none':
-      tag += ' gpu:' + gpu_tag
-    if bucket_type == MAJOR_OS_ASAN and is_asan:
-      tag += ' ASan'
-    out.setdefault(tag, []).append(task)
-
-    # Also create global buckets for ASan.
-    if bucket_type == MAJOR_OS_ASAN:
-      tag = '(any OS) ASan' if is_asan else '(any OS) Not ASan'
-      out.setdefault(tag, []).append(task)
-  return out
-
-
-def seconds_to_timedelta(seconds):
-  """Converts seconds in datetime.timedelta, stripping sub-second precision.
-
-  This is for presentation, where subsecond values for summaries is not useful.
-  """
-  return datetime.timedelta(seconds=round(seconds))
-
-
-def parse_time_option(value):
-  """Converts time as an option into a datetime.datetime.
-
-  Returns None if not specified.
-  """
-  if not value:
-    return None
-  try:
-    return _EPOCH + datetime.timedelta(seconds=int(value))
-  except ValueError:
-    pass
-  for fmt in (
-      '%Y-%m-%d',
-      '%Y-%m-%d %H:%M',
-      '%Y-%m-%dT%H:%M',
-      '%Y-%m-%d %H:%M:%S',
-      '%Y-%m-%dT%H:%M:%S',
-      '%Y-%m-%d %H:%M:%S.%f',
-      '%Y-%m-%dT%H:%M:%S.%f'):
-    try:
-      return datetime.datetime.strptime(value, fmt)
-    except ValueError:
-      pass
-  raise ValueError('Failed to parse %s' % value)
-
-
-def parse_time(value):
-  """Converts serialized time from the API to datetime.datetime."""
-  for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
-    try:
-      return datetime.datetime.strptime(value, fmt)
-    except ValueError:
-      pass
-  raise ValueError('Failed to parse %s' % value)
-
-
-def average(items):
-  if not items:
-    return 0.
-  return sum(items) / len(items)
-
-
-def median(items):
-  return percentile(items, 50)
-
-
-def percentile(items, percent):
-  """Uses NIST method."""
-  if not items:
-    return 0.
-  rank = percent * .01 * (len(items) + 1)
-  rank_int = int(rank)
-  rest = rank - rank_int
-  if rest and rank_int <= len(items) - 1:
-    return items[rank_int] + rest * (items[rank_int+1] - items[rank_int])
-  return items[min(rank_int, len(items) - 1)]
-
-
-def sp(dividend, divisor):
-  """Returns the percentage for dividend/divisor, safely."""
-  if not divisor:
-    return 0.
-  return 100. * float(dividend) / float(divisor)
-
-
-def fetch_data(options):
-  """Fetches data from options.swarming and writes it to options.json."""
-  if not options.start:
-    # Defaults to 25 hours ago.
-    options.start = datetime.datetime.utcnow() - datetime.timedelta(
-        seconds=25*60*60)
-  else:
-    options.start = parse_time_option(options.start)
-  if not options.end:
-    options.end = options.start + datetime.timedelta(days=1)
-  else:
-    options.end = parse_time_option(options.end)
-  url = 'tasks/list?' + urllib.urlencode(
-      {
-        'start': int((options.start - _EPOCH).total_seconds()),
-        'end': int((options.end - _EPOCH).total_seconds()),
-      })
-  cmd = [
-    sys.executable, os.path.join(CLIENT_DIR, 'swarming.py'),
-    'query',
-    '-S', options.swarming,
-    '--json', options.json,
-    # Start chocking at 1b tasks. The chromium infrastructure is currently at
-    # around 200k tasks/day.
-    '--limit', '1000000000',
-    '--progress',
-    url,
-  ]
-  if options.verbose:
-    cmd.append('--verbose')
-    cmd.append('--verbose')
-    cmd.append('--verbose')
-  logging.info('%s', ' '.join(cmd))
-  subprocess.check_call(cmd)
-  print('')
-
-
-def stats(tasks, show_cost):
-  """Calculates and prints statistics about the tasks."""
-  # Split tasks into 3 buckets.
-  # - 'rn' means ran, not idempotent
-  # - 'ri' means ran, idempotent
-  # - 'dd' means deduplicated.
-  rn = [
-    i for i in tasks
-    if not i.get('deduped_from') and not i.get('properties_hash')
-  ]
-  ri = [
-    i for i in tasks if not i.get('deduped_from') and i.get('properties_hash')
-  ]
-  dd = [i for i in tasks if i.get('deduped_from')]
-
-  # Note worthy results.
-  failures = [i for i in tasks if i.get('failure')]
-  internal_failures = [i for i in tasks if i.get('internal_failure')]
-  two_tries = [
-    i for i in tasks if i.get('try_number') == '2' and not i.get('deduped_from')
-  ]
-  # TODO(maruel): 'state'
-
-  # Summations.
-  duration_rn = sum(i.get('duration', 0.) for i in rn)
-  duration_ri = sum(i.get('duration', 0.) for i in ri)
-  duration_dd = sum(i.get('duration', 0.) for i in dd)
-  duration_total = duration_rn + duration_ri + duration_dd
-  cost_rn = sum(sum(i.get('costs_usd') or [0.]) for i in rn)
-  cost_ri = sum(sum(i.get('costs_usd') or [0.]) for i in ri)
-  cost_dd = sum(i.get('cost_saved_usd', 0.) for i in dd)
-  cost_total = cost_rn + cost_ri + cost_dd
-  pendings = [
-    (parse_time(i['started_ts']) - parse_time(i['created_ts'])).total_seconds()
-    for i in tasks if i.get('started_ts') and not i.get('deduped_from')
-  ]
-  pending_total = datetime.timedelta(seconds=round(sum(pendings)))
-  pending_avg = datetime.timedelta(seconds=round(average(pendings)))
-  pending_med = datetime.timedelta(seconds=round(median(pendings)))
-  pending_p99 = datetime.timedelta(seconds=round(percentile(pendings, 99)))
-
-  # Calculate percentages to understand load relativeness.
-  percent_rn_nb_total = sp(len(rn), len(tasks))
-  percent_ri_nb_total = sp(len(ri), len(tasks))
-  percent_dd_nb_total = sp(len(dd), len(tasks))
-  percent_dd_nb_rel = sp(len(dd), len(ri) + len(dd))
-  percent_rn_duration_total = sp(duration_rn, duration_total)
-  percent_ri_duration_total = sp(duration_ri, duration_total)
-  percent_dd_duration_total = sp(duration_dd, duration_total)
-  percent_dd_duration_rel = sp(duration_dd, duration_dd + duration_ri)
-  percent_rn_cost_total = sp(cost_rn, cost_total)
-  percent_ri_cost_total = sp(cost_ri, cost_total)
-  percent_dd_cost_total = sp(cost_dd, cost_total)
-  percent_dd_cost_rel = sp(cost_dd, cost_dd + cost_ri)
-  reliability = 100. - sp(len(internal_failures), len(tasks))
-  percent_failures = sp(len(failures), len(tasks))
-  percent_two_tries = sp(len(two_tries), len(tasks))
-
-
-  # Print results as a table.
-  if rn:
-    cost = '  %7.2f$ (%5.1f%%)' % (cost_rn, percent_rn_cost_total)
-    print(
-        '  %6d (%5.1f%%)  %18s (%5.1f%%)%s  '
-        'Real tasks executed, not idempotent' % (
-          len(rn), percent_rn_nb_total,
-          seconds_to_timedelta(duration_rn), percent_rn_duration_total,
-          cost if show_cost else ''))
-  if ri:
-    cost = '  %7.2f$ (%5.1f%%)' % (cost_ri, percent_ri_cost_total)
-    print(
-        '  %6d (%5.1f%%)  %18s (%5.1f%%)%s  '
-        'Real tasks executed, idempotent' % (
-          len(ri), percent_ri_nb_total,
-          seconds_to_timedelta(duration_ri), percent_ri_duration_total,
-          cost if show_cost else ''))
-  if ri and rn:
-    cost = '  %7.2f$      ' % (cost_rn + cost_ri)
-    print(
-        '  %6d           %18s         %s     '
-        'Real tasks executed, all types' % (
-          len(rn) + len(ri),
-          seconds_to_timedelta(duration_rn + duration_ri),
-          cost if show_cost else ''))
-  if dd:
-    cost = '  %7.2f$*(%5.1f%%)' % (cost_dd, percent_dd_cost_total)
-    print(
-        '  %6d*(%5.1f%%)  %18s*(%5.1f%%)%s  *Wasn\'t run, '
-        'previous results reused' % (
-          len(dd), percent_dd_nb_total,
-          seconds_to_timedelta(duration_dd), percent_dd_duration_total,
-          cost if show_cost else ''))
-    cost = '           (%5.1f%%)' % (percent_dd_cost_rel)
-    print(
-        '         (%5.1f%%)                     (%5.1f%%)%s  '
-        '  (relative to idempotent tasks only)' % (
-          percent_dd_nb_rel, percent_dd_duration_rel,
-          cost if show_cost else ''))
-  if int(bool(rn)) + int(bool(ri)) + int(bool(dd)) > 1:
-    cost = '           %7.2f$' % (cost_total)
-    print(
-        '  %6d           %18s%s           '
-        'Total tasks' % (
-          len(tasks), seconds_to_timedelta(duration_total),
-          cost if show_cost else ''))
-  print (
-      '        Reliability:    %7g%%    Internal errors: %-4d' % (
-        reliability, len(internal_failures)))
-  print (
-      '        Tasks failures: %-4d (%5.3f%%)' % (
-        len(failures), percent_failures))
-  print (
-      '        Retried:        %-4d (%5.3f%%)  (Upgraded an internal failure '
-      'to a successful task)' %
-        (len(two_tries), percent_two_tries))
-  print (
-      '        Pending  Total: %13s    Avg: %7s    Median: %7s  P99%%: %7s' % (
-        pending_total, pending_avg, pending_med, pending_p99))
-
-
-def present_task_types(items, bucket_type, show_cost):
-  cost = '    Usage Cost $USD' if show_cost else ''
-  print('      Nb of Tasks              Total Duration%s' % cost)
-  buckets = do_bucket(items, bucket_type)
-  for index, (bucket, tasks) in enumerate(sorted(buckets.iteritems())):
-    if index:
-      print('')
-    print('%s:' % (bucket))
-    stats(tasks, show_cost)
-  if buckets:
-    print('')
-  print('Global:')
-  stats(items, show_cost)
-
-
-def present_users(items):
-  users = {}
-  for task in items:
-    user = ''
-    for tag in task['tags']:
-      if tag.startswith('user:'):
-        if tag[5:]:
-          user = tag[5:]
-          break
-      if tag == 'purpose:CI':
-        user = 'CI'
-        break
-      if tag == 'heartbeat:1':
-        user = 'heartbeat'
-        break
-    if user:
-      users.setdefault(user, 0)
-      users[user] += 1
-  maxlen = max(len(i) for i in users)
-  maxusers = 100
-  for index, (name, tasks) in enumerate(
-      sorted(users.iteritems(), key=lambda x: -x[1])):
-    if index == maxusers:
-      break
-    print('%3d  %-*s: %d' % (index + 1, maxlen, name, tasks))
-
-
-def main():
-  parser = optparse.OptionParser(description=sys.modules['__main__'].__doc__)
-  parser.add_option(
-      '-S', '--swarming',
-      metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
-      help='Swarming server to use')
-  parser.add_option(
-      '--start', help='Starting date in UTC; defaults to 25 hours ago')
-  parser.add_option(
-      '--end', help='End date in UTC; defaults to --start+1 day')
-  parser.add_option(
-      '--no-cost', action='store_false', dest='cost', default=True,
-      help='Strip $ from display')
-  parser.add_option(
-      '--users', action='store_true', help='Display top users instead')
-  parser.add_option(
-      '--json', default='tasks.json',
-      help='File containing raw data; default: %default')
-  parser.add_option('-v', '--verbose', action='count', default=0)
-
-  group = optparse.OptionGroup(parser, 'Grouping')
-  group.add_option(
-      '--major-os', action='store_const',
-      dest='bucket', const=MAJOR_OS, default=MAJOR_OS,
-      help='Classify by OS type, independent of OS version (default)')
-  group.add_option(
-      '--minor-os', action='store_const',
-      dest='bucket', const=MINOR_OS,
-      help='Classify by minor OS version')
-  group.add_option(
-      '--gpu', action='store_const',
-      dest='bucket', const=MINOR_OS_GPU,
-      help='Classify by minor OS version and GPU type when requested')
-  group.add_option(
-      '--asan', action='store_const',
-      dest='bucket', const=MAJOR_OS_ASAN,
-      help='Classify by major OS version and ASAN')
-  parser.add_option_group(group)
-
-  options, args = parser.parse_args()
-
-  if args:
-    parser.error('Unsupported argument %s' % args)
-  logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
-  if options.swarming:
-    fetch_data(options)
-  elif not os.path.isfile(options.json):
-    parser.error('--swarming is required.')
-
-  with open(options.json, 'rb') as f:
-    items = json.load(f)['items']
-  first = items[-1]
-  last = items[0]
-  print(
-      'From %s to %s  (%s)' % (
-        first['created_ts'].split('.')[0],
-        last['created_ts'].split('.')[0],
-        parse_time(last['created_ts']) - parse_time(first['created_ts'])
-        ))
-  print('')
-
-  if options.users:
-    present_users(items)
-  else:
-    present_task_types(items, options.bucket, options.cost)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/diff_isolates.py b/tools/swarming_client/tools/diff_isolates.py
deleted file mode 100755
index 08279d9..0000000
--- a/tools/swarming_client/tools/diff_isolates.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Perform a recursive diff on two isolated file hashes."""
-
-import argparse
-import ast
-import os
-import json
-import shutil
-import subprocess
-import sys
-import tempfile
-
-CLIENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, CLIENT_DIR)
-
-from utils import net
-
-
-def download_isolated_file(h, workdir, isolate_server, namespace):
-  """Download the isolated file with the given hash and return its contents."""
-  dst = os.path.join(workdir, h)
-  if not os.path.isfile(dst):
-    subprocess.check_call(['python', 'isolateserver.py', 'download',
-                           '-I', isolate_server,
-                           '--namespace', namespace,
-                           '-t', workdir,
-                           '-f', h, h],
-                          cwd=CLIENT_DIR)
-  with open(dst) as f:
-    return f.read()
-
-
-def get_and_parse_isolated_file(h, workdir, isolate_server, namespace):
-  """Download and parse the isolated file with the given hash."""
-  contents = download_isolated_file(h, workdir, isolate_server, namespace)
-  return ast.literal_eval(contents)
-
-
-def build_recursive_isolate_structure(h, workdir, isolate_server, namespace):
-  """Build a recursive structure from possibly nested isolated files.
-
-  Replaces isolated hashes in the 'includes' list with the contents of the
-  associated isolated file.
-  """
-  rv = get_and_parse_isolated_file(h, workdir, isolate_server, namespace)
-  rv['includes'] = sorted(
-    build_recursive_isolate_structure(i, workdir, isolate_server, namespace)
-    for i in rv.get('includes', [])
-  )
-  return rv
-
-
-def write_summary_file(h, workdir, isolate_server, namespace):
-  """Write a flattened JSON summary file for the given isolated hash."""
-  s = build_recursive_isolate_structure(h, workdir, isolate_server, namespace)
-  filename = os.path.join(workdir, 'summary_%s.json' % h)
-  with open(filename, 'wb') as f:
-    json.dump(s, f, sort_keys=True, indent=2)
-  return filename
-
-
-def diff_isolates(h1, h2, workdir, difftool, isolate_server, namespace):
-  """Flatten the given isolated hashes and diff them."""
-  f1 = write_summary_file(h1, workdir, isolate_server, namespace)
-  f2 = write_summary_file(h2, workdir, isolate_server, namespace)
-  return subprocess.call([difftool, f1, f2])
-
-
-def main():
-  parser = argparse.ArgumentParser(description=sys.modules[__name__].__doc__)
-  parser.add_argument('hash1')
-  parser.add_argument('hash2')
-  parser.add_argument('--difftool', '-d', default='diff',
-                      help='Diff tool to use.')
-  parser.add_argument('--isolate-server', '-I',
-                      default=os.environ.get('ISOLATE_SERVER', ''),
-                      help='URL of the Isolate Server to use. Defaults to the'
-                           'environment variable ISOLATE_SERVER if set. No '
-                           'need to specify https://, this is assumed.')
-  parser.add_argument('--namespace', default='default-gzip',
-                      help='The namespace to use on the Isolate Server, '
-                           'default: %(default)s')
-  parser.add_argument('--workdir', '-w',
-                      help='Working directory to use. If not specified, a '
-                           'tmp dir is created.')
-  args = parser.parse_args()
-
-  if not args.isolate_server:
-    parser.error('--isolate-server is required.')
-  try:
-    args.isolate_server = net.fix_url(args.isolate_server)
-  except ValueError as e:
-    parser.error('--isolate-server %s' % e)
-
-  using_tmp_dir = False
-  if not args.workdir:
-    using_tmp_dir = True
-    args.workdir = tempfile.mkdtemp(prefix='diff_isolates')
-  else:
-    args.workdir = os.path.abspath(args.workdir)
-  if not os.path.isdir(args.workdir):
-    os.makedirs(args.workdir)
-
-  try:
-    return diff_isolates(args.hash1, args.hash2, args.workdir, args.difftool,
-                         args.isolate_server, args.namespace)
-  finally:
-    if using_tmp_dir:
-      shutil.rmtree(args.workdir)
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/fleet.py b/tools/swarming_client/tools/fleet.py
deleted file mode 100755
index ff8546c..0000000
--- a/tools/swarming_client/tools/fleet.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Calculate statistics about tasks.
-
-Saves the data fetched from the server into a json file to enable reprocessing
-the data without having to always fetch from the server.
-"""
-
-import datetime
-import json
-import logging
-import optparse
-import os
-import subprocess
-import sys
-import urllib
-
-
-CLIENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-
-_EPOCH = datetime.datetime.utcfromtimestamp(0)
-
-# Type of bucket to use.
-MAJOR_OS, MINOR_OS, MINOR_OS_GPU = range(3)
-
-
-def seconds_to_timedelta(seconds):
-  """Converts seconds in datetime.timedelta, stripping sub-second precision.
-
-  This is for presentation, where subsecond values for summaries is not useful.
-  """
-  return datetime.timedelta(seconds=round(seconds))
-
-
-def parse_time_option(value):
-  """Converts time as an option into a datetime.datetime.
-
-  Returns None if not specified.
-  """
-  if not value:
-    return None
-  try:
-    return _EPOCH + datetime.timedelta(seconds=int(value))
-  except ValueError:
-    pass
-  for fmt in (
-      '%Y-%m-%d',
-      '%Y-%m-%d %H:%M',
-      '%Y-%m-%dT%H:%M',
-      '%Y-%m-%d %H:%M:%S',
-      '%Y-%m-%dT%H:%M:%S',
-      '%Y-%m-%d %H:%M:%S.%f',
-      '%Y-%m-%dT%H:%M:%S.%f'):
-    try:
-      return datetime.datetime.strptime(value, fmt)
-    except ValueError:
-      pass
-  raise ValueError('Failed to parse %s' % value)
-
-
-def parse_time(value):
-  """Converts serialized time from the API to datetime.datetime."""
-  for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
-    try:
-      return datetime.datetime.strptime(value, fmt)
-    except ValueError:
-      pass
-  raise ValueError('Failed to parse %s' % value)
-
-
-def average(items):
-  if not items:
-    return 0.
-  return sum(items) / len(items)
-
-
-def median(items):
-  return percentile(items, 50)
-
-
-def percentile(items, percent):
-  """Uses NIST method."""
-  if not items:
-    return 0.
-  rank = percent * .01 * (len(items) + 1)
-  rank_int = int(rank)
-  rest = rank - rank_int
-  if rest and rank_int <= len(items) - 1:
-    return items[rank_int] + rest * (items[rank_int+1] - items[rank_int])
-  return items[min(rank_int, len(items) - 1)]
-
-
-def sp(dividend, divisor):
-  """Returns the percentage for dividend/divisor, safely."""
-  if not divisor:
-    return 0.
-  return 100. * float(dividend) / float(divisor)
-
-
-def fetch_data(options):
-  """Fetches data from options.swarming and writes it to options.json."""
-  cmd = [
-    sys.executable, os.path.join(CLIENT_DIR, 'swarming.py'),
-    'query',
-    '-S', options.swarming,
-    '--json', options.json,
-    # Start chocking at 1m bots. The chromium infrastructure is currently at
-    # around thousands range.
-    '--limit', '1000000',
-    '--progress',
-    'bots/list',
-  ]
-  if options.verbose:
-    cmd.append('--verbose')
-    cmd.append('--verbose')
-    cmd.append('--verbose')
-  logging.info('%s', ' '.join(cmd))
-  subprocess.check_call(cmd)
-  print('')
-
-
-def present_data(bots, bucket_type, order_count):
-  buckets = do_bucket(bots, bucket_type)
-  maxlen = max(len(i) for i in buckets)
-  print('%-*s  Alive  Dead' % (maxlen, 'Type'))
-  counts = {
-      k: [len(v), sum(1 for i in v if i.get('is_dead'))]
-        for k, v in buckets.iteritems()}
-  key = (lambda x: -x[1][0]) if order_count else (lambda x: x)
-  for bucket, count in sorted(counts.iteritems(), key=key):
-    print('%-*s: %5d %5d' % (maxlen, bucket, count[0], count[1]))
-
-
-def do_bucket(bots, bucket_type):
-  """Categorizes the bots based on one of the bucket type defined above."""
-  out = {}
-  for bot in bots:
-    # Convert dimensions from list of StringPairs to dict of list.
-    bot['dimensions'] = {i['key']: i['value'] for i in bot['dimensions']}
-    os_types = bot['dimensions']['os']
-    try:
-      os_types.remove('Linux')
-    except ValueError:
-      pass
-    if bucket_type == MAJOR_OS:
-      bucket = os_types[0]
-    else:
-      bucket = ' & '.join(os_types[1:])
-    if bucket_type == MINOR_OS_GPU:
-      gpu = bot['dimensions'].get('gpu', ['none'])[-1]
-      if gpu != 'none':
-        bucket += ' ' + gpu
-    out.setdefault(bucket, []).append(bot)
-  return out
-
-
-def main():
-  parser = optparse.OptionParser(description=sys.modules['__main__'].__doc__)
-  parser.add_option(
-      '-S', '--swarming',
-      metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
-      help='Swarming server to use')
-  parser.add_option(
-      '--json', default='fleet.json',
-      help='File containing raw data; default: %default')
-  parser.add_option('-v', '--verbose', action='count', default=0)
-  parser.add_option('--count', action='store_true', help='Order by count')
-
-  group = optparse.OptionGroup(parser, 'Grouping')
-  group.add_option(
-      '--major-os', action='store_const',
-      dest='bucket', const=MAJOR_OS,
-      help='Classify by OS type, independent of OS version')
-  group.add_option(
-      '--minor-os', action='store_const',
-      dest='bucket', const=MINOR_OS,
-      help='Classify by minor OS version')
-  group.add_option(
-      '--gpu', action='store_const',
-      dest='bucket', const=MINOR_OS_GPU, default=MINOR_OS_GPU,
-      help='Classify by minor OS version and GPU type when requested (default)')
-  parser.add_option_group(group)
-
-  options, args = parser.parse_args()
-
-  if args:
-    parser.error('Unsupported argument %s' % args)
-  logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
-  if options.swarming:
-    fetch_data(options)
-  elif not os.path.isfile(options.json):
-    parser.error('--swarming is required.')
-
-  with open(options.json, 'rb') as f:
-    items = json.load(f)['items']
-  present_data(items, options.bucket, options.count)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/gce_load_test_on_startup.sh b/tools/swarming_client/tools/gce_load_test_on_startup.sh
deleted file mode 100755
index 4065523..0000000
--- a/tools/swarming_client/tools/gce_load_test_on_startup.sh
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/bin/bash
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-# Script to be used on GCE slave startup to initiate a load test. Each VM will
-# fire an equivalent number of bots and clients. Fine tune the value depending
-# on what kind of load test is desired.
-#
-# Please see https://developers.google.com/compute/docs/howtos/startupscript for
-# more details on how to use this script.
-#
-# The script may run as root, which is counter intuitive. We don't mind much
-# because is deleted right after the load test, but still keep this in mind!
-
-set -e
-
-## Settings
-
-# Server to load test against. Please set to your server.
-SERVER=https://CHANGE-ME-TO-PROPER-VALUE.appspot.com
-
-# Source code to use.
-REPO=https://code.google.com/p/swarming.client.git
-
-# Once the tasks are completed, one can harvest the logs back:
-#   scp "slave:/var/log/swarming/*.*" .
-LOG=/var/log/swarming
-
-# Delay to wait before starting each client load test. This soften the curve.
-CLIENT_DELAY=60s
-
-
-## Actual work starts here.
-# Installs python and git. Do not bail out if it fails, since we do want the
-# script to be usable as non-root.
-apt-get install -y git-core python || true
-
-# It will end up in /client.
-rm -rf swarming_client
-git clone $REPO swarming_client
-TOOLS_DIR=./swarming_client/tools
-mkdir -p $LOG
-
-# This is assuming 8 cores system, so it's worth having 8 different python
-# processes, 4 fake bots load processes, 4 fake clients load processes. Each
-# load test process creates 250 instances. This gives us a nice 1k bots, 1k
-# clients per VM which makes it simple to size the load test, want 20k bots and
-# 20k clients? Fire 20 VMs. It assumes a high network throughput per host since
-# 250 bots + 250 clients generates a fair amount of HTTPS requests. This is not
-# a problem on GCE, these VMs have pretty high network I/O. This may not hold
-# true on other Cloud Hosting provider. Tune accordingly!
-
-echo "1. Starting bots."
-BOTS_PID=
-# Each load test bot process creates multiple (default is 250) fake bots.
-for i in {1..4}; do
-  $TOOLS_DIR/swarming_load_test_bot.py -S $SERVER --suffix $i \
-      --slaves 250 \
-      --dump=$LOG/bot$i.json > $LOG/bot$i.log 2>&1 &
-  BOT_PID=$!
-  echo "  Bot $i pid: $BOT_PID"
-  BOTS_PID="$BOTS_PID $BOT_PID"
-done
-
-echo "2. Starting clients."
-# Each client will send by default 16 tasks/sec * 60 sec, so 16*60*4 = 3840
-# tasks per VM.
-CLIENTS_PID=
-for i in {1..4}; do
-  echo "  Sleeping for $CLIENT_DELAY before starting client #$i"
-  sleep $CLIENT_DELAY
-  $TOOLS_DIR/swarming_load_test_client.py -S $SERVER \
-      --send-rate 16 \
-      --duration 60 \
-      --timeout 180 \
-      --concurrent 250 \
-      --dump=$LOG/client$i.json > $LOG/client$i.log 2>&1 &
-  CLIENT_PID=$!
-  echo "  Client $i pid: $CLIENT_PID"
-  CLIENTS_PID="$CLIENTS_PID $CLIENT_PID"
-done
-
-echo "3. Waiting for the clients to complete; $CLIENTS_PID"
-for i in $CLIENTS_PID; do
-  echo "  Waiting for $i"
-  wait $i
-done
-
-echo "4. Sending a Ctrl-C to each bot process so they stop; $BOTS_PID"
-for i in $BOTS_PID; do
-  kill $i
-done
-
-echo "5. Waiting for the bot processes to stop."
-for i in $BOTS_PID; do
-  echo "  Waiting for $i"
-  wait $i || true
-done
-
-echo "6. Load test is complete."
-touch $LOG/done
diff --git a/tools/swarming_client/tools/harvest_buildbot.py b/tools/swarming_client/tools/harvest_buildbot.py
deleted file mode 100755
index 0667a4c..0000000
--- a/tools/swarming_client/tools/harvest_buildbot.py
+++ /dev/null
@@ -1,101 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Harvest data on the Try Server.
-
-Please use sparingly. Large values for horizon will trash the Try Server memory.
-"""
-
-import os
-import optparse
-import sys
-
-
-def get_failed_builds(builder, horizon):
-  """Constructs the list of failed builds."""
-  builder.builds.cache()
-  return [
-      builder.builds[i] for i in xrange(-horizon, 0)
-      if not builder.builds[i].simplified_result
-  ]
-
-
-def get_parent_build(build):
-  """Returns the parent build for a triggered build."""
-  parent_buildername = build.properties_as_dict['parent_buildername']
-  parent_builder = build.builders[parent_buildername]
-  return parent_builder.builds[build.properties_as_dict['parent_buildnumber']]
-
-
-def parse(b, horizon):
-  print('Processing last %d entries' % horizon)
-  failed_builds = get_failed_builds(b.builders['swarm_triggered'], horizon)
-  rate = 100. * (1. - float(len(failed_builds)) / float(horizon))
-  print('Success: %3.1f%%' % rate)
-
-  NO_KEY = 'Warning: Unable to find any tests with the name'
-  HTTP_404 = 'threw HTTP Error 404: Not Found'
-  for build in failed_builds:
-    print('%s/%d on %s' % (build.builder.name, build.number, build.slave.name))
-    swarm_trigger_tests = build.steps['swarm_trigger_tests']
-    base_unittests = build.steps['base_unittests']
-    fail = ''
-    if swarm_trigger_tests.simplified_result:
-      fail = 'buildbot failed to pass arguments'
-    elif not swarm_trigger_tests.stdio:
-      fail = 'old swarm_trigger_step.py version'
-    else:
-      stdio = base_unittests.stdio
-      if NO_KEY in stdio:
-        fail = 'Failed to retrieve keys.'
-      elif HTTP_404 in stdio:
-        fail = 'Failed to retrieve keys with 404.'
-
-    if fail:
-      print('  Triggering failed: %s' % fail)
-    else:
-      # Print the first few lines.
-      lines = base_unittests.stdio.splitlines()[:15]
-      print('\n'.join('  ' + l for l in lines))
-    print('  %s  %s  %s  %s' % (
-      build.properties_as_dict['use_swarm_client_revision'],
-      build.properties_as_dict['swarm_hashes'],
-      build.properties_as_dict.get('use_swarm_client_revision'),
-      build.properties_as_dict.get('testfilter')))
-
-
-def main():
-  parser = optparse.OptionParser()
-  parser.add_option('-b', '--buildbot_json', help='path to buildbot_json.py')
-  parser.add_option(
-      '-u', '--url',
-      default='http://build.chromium.org/p/tryserver.chromium/',
-      help='server url, default: %default')
-  parser.add_option('-H', '--horizon', default=100, type='int')
-  options, args = parser.parse_args(None)
-  if args:
-    parser.error('Unsupported args: %s' % args)
-
-  if options.horizon < 10 or options.horizon > 2000:
-    parser.error('Use reasonable --horizon value')
-
-  if options.buildbot_json:
-    options.buildbot_json = os.path.abspath(options.buildbot_json)
-    if not os.path.isdir(options.buildbot_json):
-      parser.error('Pass a valid directory path to --buildbot_json')
-    sys.path.insert(0, options.buildbot_json)
-
-  try:
-    import buildbot_json  # pylint: disable=F0401
-  except ImportError:
-    parser.error('Pass a directory path to buildbot_json.py with -b')
-
-  b = buildbot_json.Buildbot(options.url)
-  parse(b, options.horizon)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/isolateserver_load_test.py b/tools/swarming_client/tools/isolateserver_load_test.py
deleted file mode 100755
index 06dedb5..0000000
--- a/tools/swarming_client/tools/isolateserver_load_test.py
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Uploads a ton of stuff to isolateserver to test its handling.
-
-Generates an histogram with the latencies to download a just uploaded file.
-
-Note that it only looks at uploading and downloading and do not test
-preupload, which is datastore read bound.
-"""
-
-import functools
-import json
-import logging
-import optparse
-import os
-import random
-import sys
-import time
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-from third_party import colorama
-
-import isolateserver
-
-from utils import graph
-from utils import threading_utils
-
-
-class Randomness(object):
-  def __init__(self, random_pool_size=1024):
-    """Creates 1mb of random data in a pool in 1kb chunks."""
-    self.pool = [
-      ''.join(chr(random.randrange(256)) for _ in xrange(1024))
-      for _ in xrange(random_pool_size)
-    ]
-
-  def gen(self, size):
-    """Returns a str containing random data from the pool of size |size|."""
-    chunks = int(size / 1024)
-    rest = size - (chunks*1024)
-    data = ''.join(random.choice(self.pool) for _ in xrange(chunks))
-    data += random.choice(self.pool)[:rest]
-    return data
-
-
-class Progress(threading_utils.Progress):
-  def _render_columns(self):
-    """Prints the size data as 'units'."""
-    columns_as_str = [
-        str(self._columns[0]),
-        graph.to_units(self._columns[1]).rjust(6),
-        str(self._columns[2]),
-    ]
-    max_len = max((len(columns_as_str[0]), len(columns_as_str[2])))
-    return '/'.join(i.rjust(max_len) for i in columns_as_str)
-
-
-def print_results(results, columns, buckets):
-  delays = [i[0] for i in results if isinstance(i[0], float)]
-  failures = [i for i in results if not isinstance(i[0], float)]
-  sizes = [i[1] for i in results]
-
-  print('%sSIZES%s (bytes):' % (colorama.Fore.RED, colorama.Fore.RESET))
-  graph.print_histogram(
-      graph.generate_histogram(sizes, buckets), columns, '%d')
-  print('')
-  total_size = sum(sizes)
-  print('Total size  : %s' % graph.to_units(total_size))
-  print('Total items : %d' % len(sizes))
-  print('Average size: %s' % graph.to_units(total_size / len(sizes)))
-  print('Largest item: %s' % graph.to_units(max(sizes)))
-  print('')
-  print('%sDELAYS%s (seconds):' % (colorama.Fore.RED, colorama.Fore.RESET))
-  graph.print_histogram(
-      graph.generate_histogram(delays, buckets), columns, '%.3f')
-
-  if failures:
-    print('')
-    print('%sFAILURES%s:' % (colorama.Fore.RED, colorama.Fore.RESET))
-    print(
-        '\n'.join('  %s (%s)' % (i[0], graph.to_units(i[1])) for i in failures))
-
-
-def gen_size(mid_size):
-  """Interesting non-guassian distribution, to get a few very large files.
-
-  Found via guessing on Wikipedia. Module 'random' says it's threadsafe.
-  """
-  return int(random.gammavariate(3, 2) * mid_size / 4)
-
-
-def send_and_receive(random_pool, storage, progress, size):
-  """Sends a random file and gets it back.
-
-  # TODO(maruel): Add a batching argument of value [1, 500] to batch requests.
-
-  Returns (delay, size)
-  """
-  # Create a file out of the pool.
-  start = time.time()
-  batch = 1
-  items = [
-    isolateserver.BufferItem(random_pool.gen(size), False)
-    for _ in xrange(batch)
-  ]
-  try:
-    # len(_uploaded) may be < len(items) happen if the items is not random
-    # enough or value of --mid-size is very low compared to --items.
-    _uploaded = storage.upload_items(items)
-
-    start = time.time()
-
-    cache = isolateserver.MemoryCache()
-    queue = isolateserver.FetchQueue(storage, cache)
-    for i in items:
-      queue.add(i.digest, i.size)
-
-    waiting = [i.digest for i in items]
-    while waiting:
-      waiting.remove(queue.wait(waiting))
-
-    expected = {i.digest: ''.join(i.content()) for i in items}
-    for d in cache.cached_set():
-      actual = cache.read(d)
-      assert expected.pop(d) == actual
-    assert not expected, expected
-
-    duration = max(0, time.time() - start)
-  except isolateserver.MappingError as e:
-    duration = str(e)
-  if isinstance(duration, float):
-    progress.update_item('', index=1, data=size)
-  else:
-    progress.update_item('', index=1)
-  return (duration, size)
-
-
-def main():
-  colorama.init()
-
-  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
-  parser.add_option(
-      '-I', '--isolate-server',
-      metavar='URL', default='',
-      help='Isolate server to use')
-  parser.add_option(
-      '--namespace', default='temporary%d-gzip' % time.time(), metavar='XX',
-      help='Namespace to use on the server, default: %default')
-  parser.add_option(
-      '--threads', type='int', default=16, metavar='N',
-      help='Parallel worker threads to use, default:%default')
-
-  data_group = optparse.OptionGroup(parser, 'Amount of data')
-  graph.unit_option(
-      data_group, '--items', default=0, help='Number of items to upload')
-  graph.unit_option(
-      data_group, '--max-size', default=0,
-      help='Loop until this amount of data was transferred')
-  graph.unit_option(
-      data_group, '--mid-size', default=100*1024,
-      help='Rough average size of each item, default:%default')
-  parser.add_option_group(data_group)
-
-  ui_group = optparse.OptionGroup(parser, 'Result histogram')
-  ui_group.add_option(
-      '--columns', type='int', default=graph.get_console_width(), metavar='N',
-      help='Width of histogram, default:%default')
-  ui_group.add_option(
-      '--buckets', type='int', default=20, metavar='N',
-      help='Number of histogram\'s buckets, default:%default')
-  parser.add_option_group(ui_group)
-
-  log_group = optparse.OptionGroup(parser, 'Logging')
-  log_group.add_option(
-      '--dump', metavar='FOO.JSON', help='Dumps to json file')
-  log_group.add_option(
-      '-v', '--verbose', action='store_true', help='Enable logging')
-  parser.add_option_group(log_group)
-
-  options, args = parser.parse_args()
-
-  logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
-  if args:
-    parser.error('Unsupported args: %s' % args)
-  if bool(options.max_size) == bool(options.items):
-    parser.error(
-        'Use one of --max-size or --items.\n'
-        '  Use --max-size if you want to run it until NN bytes where '
-        'transfered.\n'
-        '  Otherwise use --items to run it for NN items.')
-  options.isolate_server = options.isolate_server.rstrip('/')
-  if not options.isolate_server:
-    parser.error('--isolate-server is required.')
-
-  print(
-      ' - Using %d thread,  items=%d,  max-size=%d,  mid-size=%d' % (
-      options.threads, options.items, options.max_size, options.mid_size))
-
-  start = time.time()
-
-  random_pool = Randomness()
-  print(' - Generated pool after %.1fs' % (time.time() - start))
-
-  columns = [('index', 0), ('data', 0), ('size', options.items)]
-  progress = Progress(columns)
-  storage = isolateserver.get_storage(options.isolate_server, options.namespace)
-  do_item = functools.partial(
-      send_and_receive,
-      random_pool,
-      storage,
-      progress)
-
-  # TODO(maruel): Handle Ctrl-C should:
-  # - Stop adding tasks.
-  # - Stop scheduling tasks in ThreadPool.
-  # - Wait for the remaining ungoing tasks to complete.
-  # - Still print details and write the json file.
-  with threading_utils.ThreadPoolWithProgress(
-      progress, options.threads, options.threads, 0) as pool:
-    if options.items:
-      for _ in xrange(options.items):
-        pool.add_task(0, do_item, gen_size(options.mid_size))
-        progress.print_update()
-    elif options.max_size:
-      # This one is approximate.
-      total = 0
-      while True:
-        size = gen_size(options.mid_size)
-        progress.update_item('', size=1)
-        progress.print_update()
-        pool.add_task(0, do_item, size)
-        total += size
-        if total >= options.max_size:
-          break
-    results = sorted(pool.join())
-
-  print('')
-  print(' - Took %.1fs.' % (time.time() - start))
-  print('')
-  print_results(results, options.columns, options.buckets)
-  if options.dump:
-    with open(options.dump, 'w') as f:
-      json.dump(results, f, separators=(',',':'))
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/parallel_execution.py b/tools/swarming_client/tools/parallel_execution.py
deleted file mode 100644
index 706d54c..0000000
--- a/tools/swarming_client/tools/parallel_execution.py
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Toolset to run multiple Swarming tasks in parallel."""
-
-import getpass
-import json
-import os
-import optparse
-import subprocess
-import sys
-import tempfile
-import time
-
-BASE_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-ROOT_DIR = os.path.dirname(BASE_DIR)
-
-sys.path.insert(0, ROOT_DIR)
-
-import auth
-import isolateserver
-from utils import logging_utils
-from utils import threading_utils
-from utils import tools
-
-
-def task_to_name(name, dimensions, isolated_hash):
-  """Returns a task name the same way swarming.py generates them."""
-  return '%s/%s/%s' % (
-      name,
-      '_'.join('%s=%s' % (k, v) for k, v in sorted(dimensions.iteritems())),
-      isolated_hash)
-
-
-def capture(cmd):
-  assert all(isinstance(i, basestring) for i in cmd), cmd
-  start = time.time()
-  p = subprocess.Popen(
-      [sys.executable] + cmd, cwd=ROOT_DIR, stdout=subprocess.PIPE)
-  out = p.communicate()[0]
-  return p.returncode, out, time.time() - start
-
-
-def trigger(swarming_server, isolate_server, task_name, isolated_hash, args):
-  """Triggers a specified .isolated file."""
-  fd, jsonfile = tempfile.mkstemp(prefix=u'swarming')
-  os.close(fd)
-  try:
-    cmd = [
-      'swarming.py', 'trigger',
-      '--swarming', swarming_server,
-      '--isolate-server', isolate_server,
-      '--task-name', task_name,
-      '--dump-json', jsonfile,
-      isolated_hash,
-    ]
-    returncode, out, duration = capture(cmd + args)
-    with open(jsonfile) as f:
-      data = json.load(f)
-    task_id = str(data['tasks'][task_name]['task_id'])
-    return returncode, out, duration, task_id
-  finally:
-    os.remove(jsonfile)
-
-
-def collect(swarming_server, task_id):
-  """Collects results of a swarming task."""
-  cmd = ['swarming.py', 'collect', '--swarming', swarming_server, task_id]
-  return capture(cmd)
-
-
-class Runner(object):
-  """Runners runs tasks in parallel on Swarming."""
-  def __init__(
-      self, swarming_server, isolate_server, add_task, progress,
-      extra_trigger_args):
-    self.swarming_server = swarming_server
-    self.isolate_server = isolate_server
-    self.add_task = add_task
-    self.progress = progress
-    self.extra_trigger_args = extra_trigger_args
-
-  def trigger(self, task_name, isolated_hash, dimensions):
-    args = sum((['--dimension', k, v] for k, v in dimensions.iteritems()), [])
-    returncode, stdout, duration, task_id = trigger(
-        self.swarming_server,
-        self.isolate_server,
-        task_name,
-        isolated_hash,
-        self.extra_trigger_args + args)
-    step_name = '%s (%3.2fs)' % (task_name, duration)
-    if returncode:
-      line = 'Failed to trigger %s\n%s' % (step_name, stdout)
-      self.progress.update_item(line, index=1)
-      return
-    self.progress.update_item('Triggered %s' % step_name, index=1)
-    self.add_task(0, self.collect, task_name, task_id, dimensions)
-
-  def collect(self, task_name, task_id, dimensions):
-    returncode, stdout, duration = collect(self.swarming_server, task_id)
-    step_name = '%s (%3.2fs)' % (task_name, duration)
-    if returncode:
-      # Only print the output for failures, successes are unexciting.
-      self.progress.update_item(
-          'Failed %s:\n%s' % (step_name, stdout), index=1)
-      return (task_name, dimensions, stdout)
-    self.progress.update_item('Passed %s' % step_name, index=1)
-
-
-def run_swarming_tasks_parallel(
-    swarming_server, isolate_server, extra_trigger_args, tasks):
-  """Triggers swarming tasks in parallel and gets results.
-
-  This is done by using one thread per task and shelling out swarming.py.
-
-  Arguments:
-    extra_trigger_args: list of additional flags to pass down to
-        'swarming.py trigger'
-    tasks: list of tuple(task_name, isolated_hash, dimensions) where dimension
-        are --dimension flags to provide when triggering the task.
-
-  Yields:
-    tuple(name, dimensions, stdout) for the tasks that failed.
-  """
-  runs = len(tasks)
-  # triger + collect
-  total = 2 * runs
-  failed_tasks = []
-  progress = threading_utils.Progress([('index', 0), ('size', total)])
-  progress.use_cr_only = False
-  start = time.time()
-  with threading_utils.ThreadPoolWithProgress(
-      progress, runs, runs, total) as pool:
-    runner = Runner(
-        swarming_server, isolate_server, pool.add_task, progress,
-        extra_trigger_args)
-
-    for task_name, isolated_hash, dimensions in tasks:
-      pool.add_task(0, runner.trigger, task_name, isolated_hash, dimensions)
-
-    # Runner.collect() only return task failures.
-    for failed_task in pool.iter_results():
-      task_name, dimensions, stdout = failed_task
-      yield task_name, dimensions, stdout
-      failed_tasks.append(task_name)
-
-  duration = time.time() - start
-  print('\nCompleted in %3.2fs' % duration)
-  if failed_tasks:
-    print('Detected the following failures:')
-    for task in sorted(failed_tasks):
-      print('  %s' % task)
-
-
-class OptionParser(logging_utils.OptionParserWithLogging):
-  def __init__(self, **kwargs):
-    logging_utils.OptionParserWithLogging.__init__(self, **kwargs)
-    self.server_group = optparse.OptionGroup(self, 'Server')
-    self.server_group.add_option(
-        '-S', '--swarming',
-        metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
-        help='Swarming server to use')
-    isolateserver.add_isolate_server_options(self.server_group)
-    self.add_option_group(self.server_group)
-    auth.add_auth_options(self)
-    self.add_option(
-        '-d', '--dimension', default=[], action='append', nargs=2,
-        dest='dimensions', metavar='FOO bar',
-        help='dimension to filter on')
-    self.add_option(
-        '--priority', type='int',
-        help='The lower value, the more important the task is. It may be '
-            'important to specify a higher priority since the default value '
-            'will make the task to be triggered only when the bots are idle.')
-    self.add_option(
-        '--deadline', type='int', default=6*60*60,
-        help='Seconds to allow the task to be pending for a bot to run before '
-            'this task request expires.')
-
-  def parse_args(self, *args, **kwargs):
-    options, args = logging_utils.OptionParserWithLogging.parse_args(
-        self, *args, **kwargs)
-    options.swarming = options.swarming.rstrip('/')
-    if not options.swarming:
-      self.error('--swarming is required.')
-    auth.process_auth_options(self, options)
-    isolateserver.process_isolate_server_options(self, options, False, True)
-    options.dimensions = dict(options.dimensions)
-    return options, args
-
-  def format_description(self, _):
-    return self.description
diff --git a/tools/swarming_client/tools/run_on_bots.py b/tools/swarming_client/tools/run_on_bots.py
deleted file mode 100755
index 9d4b188..0000000
--- a/tools/swarming_client/tools/run_on_bots.py
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Automated maintenance tool to run a script on bots.
-
-To use this script, write a self-contained python script (use a .zip if
-necessary), specify it on the command line and it will be packaged and triggered
-on all the swarming bots corresponding to the --dimension filters specified, or
-all the bots if no filter is specified.
-"""
-
-__version__ = '0.1'
-
-import os
-import tempfile
-import shutil
-import subprocess
-import sys
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-
-# Must be first import.
-import parallel_execution
-
-from third_party import colorama
-from third_party.depot_tools import fix_encoding
-from utils import file_path
-from utils import tools
-
-
-def get_bot_list(swarming_server, dimensions, dead_only):
-  """Returns a list of swarming bots."""
-  cmd = [
-    sys.executable, 'swarming.py', 'bots',
-    '--swarming', swarming_server,
-    '--bare',
-  ]
-  for k, v in sorted(dimensions.iteritems()):
-    cmd.extend(('--dimension', k, v))
-  if dead_only:
-    cmd.append('--dead-only')
-  return subprocess.check_output(cmd, cwd=ROOT_DIR).splitlines()
-
-
-def archive(isolate_server, script):
-  """Archives the tool and return the sha-1."""
-  base_script = os.path.basename(script)
-  isolate = {
-    'variables': {
-      'command': ['python', base_script],
-      'files': [base_script],
-    },
-  }
-  tempdir = tempfile.mkdtemp(prefix=u'run_on_bots')
-  try:
-    isolate_file = os.path.join(tempdir, 'tool.isolate')
-    isolated_file = os.path.join(tempdir, 'tool.isolated')
-    with open(isolate_file, 'wb') as f:
-      f.write(str(isolate))
-    shutil.copyfile(script, os.path.join(tempdir, base_script))
-    cmd = [
-      sys.executable, 'isolate.py', 'archive',
-      '--isolate-server', isolate_server,
-      '-i', isolate_file,
-      '-s', isolated_file,
-    ]
-    return subprocess.check_output(cmd, cwd=ROOT_DIR).split()[0]
-  finally:
-    file_path.rmtree(tempdir)
-
-
-def run_serial(
-    swarming_server, isolate_server, priority, deadline, repeat, isolated_hash,
-    name, bots):
-  """Runs the task one at a time.
-
-  This will be mainly bound by task scheduling latency, especially if the bots
-  are busy and the priority is low.
-  """
-  result = 0
-  for i in xrange(repeat):
-    for bot in bots:
-      suffix = '/%d' % i if repeat > 1 else ''
-      task_name = parallel_execution.task_to_name(
-          name, {'id': bot}, isolated_hash) + suffix
-      cmd = [
-        sys.executable, 'swarming.py', 'run',
-        '--swarming', swarming_server,
-        '--isolate-server', isolate_server,
-        '--priority', priority,
-        '--deadline', deadline,
-        '--dimension', 'id', bot,
-        '--task-name', task_name,
-        isolated_hash,
-      ]
-      r = subprocess.call(cmd, cwd=ROOT_DIR)
-      result = max(r, result)
-  return result
-
-
-def run_parallel(
-    swarming_server, isolate_server, priority, deadline, repeat, isolated_hash,
-    name, bots):
-  tasks = []
-  for i in xrange(repeat):
-    suffix = '/%d' % i if repeat > 1 else ''
-    tasks.extend(
-        (
-          parallel_execution.task_to_name(
-              name, {'id': bot}, isolated_hash) + suffix,
-          isolated_hash,
-          {'id': bot},
-        ) for bot in bots)
-  extra_args = ['--priority', priority, '--deadline', deadline]
-  print('Using priority %s' % priority)
-  for failed_task in parallel_execution.run_swarming_tasks_parallel(
-      swarming_server, isolate_server, extra_args, tasks):
-    _name, dimensions, stdout = failed_task
-    print('%sFailure: %s%s\n%s' % (
-      colorama.Fore.RED, dimensions, colorama.Fore.RESET, stdout))
-
-
-def main():
-  parser = parallel_execution.OptionParser(
-      usage='%prog [options] script.py', version=__version__)
-  parser.add_option(
-      '--serial', action='store_true',
-      help='Runs the task serially, to be used when debugging problems since '
-           'it\'s slow')
-  parser.add_option(
-      '--repeat', type='int', default=1,
-      help='Runs the task multiple time on each bot, meant to be used as a '
-           'load test')
-  options, args = parser.parse_args()
-
-  if len(args) != 1:
-    parser.error(
-        'Must pass one python script to run. Use --help for more details')
-
-  if not options.priority:
-    parser.error(
-        'Please provide the --priority option. Either use a very low number\n'
-        'so the task completes as fast as possible, or an high number so the\n'
-        'task only runs when the bot is idle.')
-
-  # 1. Query the bots list.
-  bots = get_bot_list(options.swarming, options.dimensions, False)
-  print('Found %d bots to process' % len(bots))
-  if not bots:
-    return 1
-
-  dead_bots = get_bot_list(options.swarming, options.dimensions, True)
-  if dead_bots:
-    print('Warning: found %d dead bots' % len(dead_bots))
-
-  # 2. Archive the script to run.
-  isolated_hash = archive(options.isolate_server, args[0])
-  print('Running %s' % isolated_hash)
-
-  # 3. Trigger the tasks.
-  name = os.path.basename(args[0])
-  if options.serial:
-    return run_serial(
-        options.swarming,
-        options.isolate_server,
-        str(options.priority),
-        str(options.deadline),
-        options.repeat,
-        isolated_hash,
-        name,
-        bots)
-
-  return run_parallel(
-      options.swarming,
-      options.isolate_server,
-      str(options.priority),
-      str(options.deadline),
-      options.repeat,
-      isolated_hash,
-      name,
-      bots)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  tools.disable_buffering()
-  colorama.init()
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/run_swarming_tests_on_swarming.py b/tools/swarming_client/tools/run_swarming_tests_on_swarming.py
deleted file mode 100755
index 9b6cb31..0000000
--- a/tools/swarming_client/tools/run_swarming_tests_on_swarming.py
+++ /dev/null
@@ -1,217 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Runs the whole set of swarming client unit tests on swarming itself.
-
-This is done in a few steps:
-  - Archive the whole directory as a single .isolated file.
-  - Create one test-specific .isolated for each test to run. The file is created
-    directly and archived manually with isolateserver.py.
-  - Trigger each of these test-specific .isolated file per OS.
-  - Get all results out of order.
-"""
-
-__version__ = '0.1'
-
-import glob
-import logging
-import os
-import subprocess
-import sys
-import tempfile
-import time
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-
-# Must be first import.
-import parallel_execution
-
-from third_party import colorama
-from third_party.depot_tools import fix_encoding
-from utils import file_path
-from utils import tools
-
-
-def check_output(cmd):
-  return subprocess.check_output([sys.executable] + cmd, cwd=ROOT_DIR)
-
-
-def archive_tree(isolate_server):
-  """Archives a whole tree and return the sha1 of the .isolated file.
-
-  Manually creates a temporary isolated file and archives it.
-  """
-  cmd = [
-      'isolateserver.py', 'archive', '--isolate-server', isolate_server,
-      ROOT_DIR, '--blacklist="\\.git"',
-  ]
-  if logging.getLogger().isEnabledFor(logging.INFO):
-    cmd.append('--verbose')
-  out = check_output(cmd)
-  return out.split()[0]
-
-
-def archive_isolated_triggers(isolate_server, tree_isolated, tests):
-  """Creates and archives all the .isolated files for the tests at once.
-
-  Archiving them in one batch is faster than archiving each file individually.
-  Also the .isolated files can be reused across OSes, reducing the amount of
-  I/O.
-
-  Returns:
-    list of (test, sha1) tuples.
-  """
-  logging.info('archive_isolated_triggers(%s, %s)', tree_isolated, tests)
-  tempdir = tempfile.mkdtemp(prefix=u'run_swarming_tests_on_swarming_')
-  try:
-    isolateds = []
-    for test in tests:
-      test_name = os.path.basename(test)
-      # Creates a manual .isolated file. See
-      # https://code.google.com/p/swarming/wiki/IsolatedDesign for more details.
-      isolated = {
-        'algo': 'sha-1',
-        'command': ['python', test],
-        'includes': [tree_isolated],
-        'read_only': 0,
-        'version': '1.4',
-      }
-      v = os.path.join(tempdir, test_name + '.isolated')
-      tools.write_json(v, isolated, True)
-      isolateds.append(v)
-    cmd = [
-        'isolateserver.py', 'archive', '--isolate-server', isolate_server,
-    ] + isolateds
-    if logging.getLogger().isEnabledFor(logging.INFO):
-      cmd.append('--verbose')
-    items = [i.split() for i in check_output(cmd).splitlines()]
-    assert len(items) == len(tests)
-    assert all(
-        items[i][1].endswith(os.path.basename(tests[i]) + '.isolated')
-        for i in xrange(len(tests)))
-    return zip(tests, [i[0] for i in items])
-  finally:
-    file_path.rmtree(tempdir)
-
-
-
-def run_swarming_tests_on_swarming(
-    swarming_server, isolate_server, priority, oses, tests, logs,
-    no_idempotent):
-  """Archives, triggers swarming jobs and gets results."""
-  print('Archiving the whole tree.')
-  start = time.time()
-  tree_isolated = archive_tree(isolate_server)
-
-  # Create and archive all the .isolated files.
-  isolateds = archive_isolated_triggers(isolate_server, tree_isolated, tests)
-  print('Archival took %3.2fs' % (time.time() - start))
-
-  exploded = []
-  for test_path, isolated_hash in isolateds:
-    logging.debug('%s: %s', test_path, isolated_hash)
-    test_name = os.path.basename(test_path).split('.')[0]
-    for platform in oses:
-      exploded.append((test_name, platform, isolated_hash))
-
-  tasks = [
-    (
-      parallel_execution.task_to_name(name, {'os': platform}, isolated_hash),
-      isolated_hash,
-      {'os': platform},
-    ) for name, platform, isolated_hash in exploded
-  ]
-
-  extra_args = [
-    '--hard-timeout', '180',
-  ]
-  if not no_idempotent:
-    extra_args.append('--idempotent')
-  if priority:
-    extra_args.extend(['--priority', str(priority)])
-    print('Using priority %s' % priority)
-
-  result = 0
-  for failed_task in parallel_execution.run_swarming_tasks_parallel(
-      swarming_server, isolate_server, extra_args, tasks):
-    test_name, dimensions, stdout = failed_task
-    if logs:
-      # Write the logs are they are retrieved.
-      if not os.path.isdir(logs):
-        os.makedirs(logs)
-      name = '%s_%s.log' % (dimensions['os'], test_name.split('/', 1)[0])
-      with open(os.path.join(logs, name), 'wb') as f:
-        f.write(stdout)
-    result = 1
-  return result
-
-
-def main():
-  parser = parallel_execution.OptionParser(
-              usage='%prog [options]', version=__version__)
-  parser.add_option(
-      '--logs',
-      help='Destination where to store the failure logs (recommended!)')
-  parser.add_option('-o', '--os', help='Run tests only on this OS')
-  parser.add_option(
-      '-t', '--test', action='append',
-      help='Run only these test, can be specified multiple times')
-  parser.add_option(
-      '--no-idempotent', action='store_true',
-      help='Do not use --idempotent to detect flaky tests')
-  options, args = parser.parse_args()
-  if args:
-    parser.error('Unsupported argument %s' % args)
-
-  oses = ['Linux', 'Mac', 'Windows']
-  tests = [
-      os.path.relpath(i, ROOT_DIR)
-      for i in (
-      glob.glob(os.path.join(ROOT_DIR, 'tests', '*_test.py')) +
-      glob.glob(os.path.join(ROOT_DIR, 'googletest', 'tests', '*_test.py')))
-  ]
-  valid_tests = sorted(map(os.path.basename, tests))
-  assert len(valid_tests) == len(set(valid_tests)), (
-      'Can\'t have 2 tests with the same base name')
-
-  if options.test:
-    for t in options.test:
-      if not t in valid_tests:
-        parser.error(
-            '--test %s is unknown. Valid values are:\n%s' % (
-              t, '\n'.join('  ' + i for i in valid_tests)))
-    filters = tuple(os.path.sep + t for t in options.test)
-    tests = [t for t in tests if t.endswith(filters)]
-
-  if options.os:
-    if options.os not in oses:
-      parser.error(
-          '--os %s is unknown. Valid values are %s' % (
-            options.os, ', '.join(sorted(oses))))
-    oses = [options.os]
-
-  if sys.platform in ('win32', 'cygwin'):
-    # If we are on Windows, don't generate the tests for Linux and Mac since
-    # they use symlinks and we can't create symlinks on windows.
-    oses = ['Windows']
-    if options.os != 'win32':
-      print('Linux and Mac tests skipped since running on Windows.')
-
-  return run_swarming_tests_on_swarming(
-      options.swarming,
-      options.isolate_server,
-      options.priority,
-      oses,
-      tests,
-      options.logs,
-      options.no_idempotent)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  tools.disable_buffering()
-  colorama.init()
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/spam.isolate b/tools/swarming_client/tools/spam.isolate
deleted file mode 100644
index 014eb07..0000000
--- a/tools/swarming_client/tools/spam.isolate
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-{
-  'variables': {
-    'command': [
-      'python',
-      'spam.py',
-    ],
-    'files': [
-      'spam.py',
-    ],
-  },
-}
diff --git a/tools/swarming_client/tools/spam.py b/tools/swarming_client/tools/spam.py
deleted file mode 100755
index 8546c38..0000000
--- a/tools/swarming_client/tools/spam.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""spam.py spams stdout for load testing the stdout handler.
-
-To use on the server, use:
-  export ISOLATE=https://your-server.appspot.com
-  export SWARMING=https://your-server.appspot.com
-  ../isolate.py archive -I $ISOLATE -i spam.isolate -s spam.isolated
-  # Where Linux can also be Mac or Windows.
-  ../swarming.py run -I $ISOLATE -S $SWARMING spam.isolated -d os Linux \
---priority 10 -- --duration 600 --sleep 0.5 --size 1024
-
-./run_on_bot.py can also be used to trigger systematically on all bots.
- """
-
-import optparse
-import sys
-import time
-
-
-def main():
-  parser = optparse.OptionParser(
-      description=sys.modules[__name__].__doc__, usage='%prog [options]')
-  parser.format_description = lambda _: parser.description
-  parser.add_option(
-      '--duration', type='float', default=5., help='Duration in seconds')
-  parser.add_option(
-      '--sleep', type='float', default=1.,
-      help='Sleep in seconds between burst')
-  parser.add_option(
-      '--size', type='int', default=10, help='Data written at each burst')
-  options, args = parser.parse_args()
-  if args:
-    parser.error('Unknown args: %s' % args)
-  if options.duration <= 0:
-    parser.error('Invalid --duration')
-  if options.sleep < 0:
-    parser.error('Invalid --sleep')
-  if options.size < 1:
-    parser.error('Invalid --size')
-
-  print('Duration: %gs' % options.duration)
-  print('Sleep: %gs' % options.sleep)
-  print('Bursts size: %d' % options.size)
-  start = time.time()
-  end = start + options.duration
-  index = 0
-  while True:
-    sys.stdout.write(str(index) * (options.size - 1))
-    sys.stdout.write('\n')
-    if time.time() > end:
-      break
-    time.sleep(options.sleep)
-    index = (index + 1) % 10
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/swarming_count_tasks.py b/tools/swarming_client/tools/swarming_count_tasks.py
deleted file mode 100755
index 2d2e3dc..0000000
--- a/tools/swarming_client/tools/swarming_count_tasks.py
+++ /dev/null
@@ -1,266 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Calculate statistics about tasks.
-
-Saves the data fetched from the server into a json file to enable reprocessing
-the data without having to always fetch from the server.
-"""
-
-import collections
-import datetime
-import json
-import logging
-import optparse
-import os
-import subprocess
-import Queue
-import threading
-import sys
-import urllib
-
-
-CLIENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, CLIENT_DIR)
-
-
-from third_party import colorama
-from utils import graph
-
-
-_EPOCH = datetime.datetime.utcfromtimestamp(0)
-
-
-def parse_time_option(value):
-  """Converts time as an option into a datetime.datetime.
-
-  Returns None if not specified.
-  """
-  if not value:
-    return None
-  try:
-    return _EPOCH + datetime.timedelta(seconds=int(value))
-  except ValueError:
-    pass
-  for fmt in ('%Y-%m-%d',):
-    try:
-      return datetime.datetime.strptime(value, fmt)
-    except ValueError:
-      pass
-  raise ValueError('Failed to parse %s' % value)
-
-
-def flatten(dimensions):
-  items = {i['key']: i['value'] for i in dimensions}
-  return ','.join('%s=%s' % (k, v) for k, v in sorted(items.iteritems()))
-
-
-def fetch_tasks(swarming, start, end, state, tags):
-  """Fetches the data."""
-  def process(data):
-    return [
-        flatten(t['properties']['dimensions']) for t in data.get('items', [])]
-  return _fetch_internal(
-      swarming, process, 'tasks/requests', start, end, state, tags)
-
-
-def fetch_counts(swarming, start, end, state, tags):
-  """Fetches counts from swarming and returns it."""
-  def process(data):
-    return int(data['count'])
-  return _fetch_internal(
-      swarming, process, 'tasks/count', start, end, state, tags)
-
-
-def _fetch_internal(swarming, process, endpoint, start, end, state, tags):
-  # Split the work in days. That's a lot of requests to do.
-  queue = Queue.Queue()
-  threads = []
-  def run(start, cmd):
-    logging.info('Running %s', ' '.join(cmd))
-    raw = subprocess.check_output(cmd)
-    logging.info('- returned %d', len(raw))
-    queue.put((start, process(json.loads(raw))))
-
-  day = start
-  while day != end:
-    data = [
-      ('start', int((day - _EPOCH).total_seconds())),
-      ('end', int((day + datetime.timedelta(days=1)-_EPOCH).total_seconds())),
-      ('state', state),
-    ]
-    for tag in tags:
-      data.append(('tags', tag))
-    cmd = [
-      sys.executable, os.path.join(CLIENT_DIR, 'swarming.py'),
-      'query', '-S', swarming,
-      endpoint + '?' + urllib.urlencode(data),
-    ]
-    thread = threading.Thread(target=run, args=(day.strftime('%Y-%m-%d'), cmd))
-    thread.daemon = True
-    thread.start()
-    threads.append(thread)
-    while len(threads) > 100:
-      # Throttle a bit.
-      for i, thread in enumerate(threads):
-        if not thread.is_alive():
-          thread.join()
-          threads.pop(i)
-          sys.stdout.write('.')
-          sys.stdout.flush()
-          break
-    day = day + datetime.timedelta(days=1)
-
-  while threads:
-    # Throttle a bit.
-    for i, thread in enumerate(threads):
-      if not thread.is_alive():
-        thread.join()
-        threads.pop(i)
-        sys.stdout.write('.')
-        sys.stdout.flush()
-        break
-  print('')
-  data = []
-  while True:
-    try:
-      data.append(queue.get_nowait())
-    except Queue.Empty:
-      break
-  return dict(data)
-
-
-def present_dimensions(items, daily_count):
-  # Split items per group.
-  per_dimension = collections.defaultdict(lambda: collections.defaultdict(int))
-  for date, dimensions in items.iteritems():
-    for d in dimensions:
-      per_dimension[d][date] += 1
-  for i, (dimension, data) in enumerate(sorted(per_dimension.iteritems())):
-    print(
-        '%s%s%s' % (
-          colorama.Style.BRIGHT + colorama.Fore.MAGENTA,
-          dimension,
-          colorama.Fore.RESET))
-    present_counts(data, daily_count)
-    if i != len(per_dimension) - 1:
-      print('')
-
-
-def present_counts(items, daily_count):
-  months = collections.defaultdict(int)
-  for day, count in sorted(items.iteritems()):
-    month = day.rsplit('-', 1)[0]
-    months[month] += count
-
-  years = collections.defaultdict(int)
-  for month, count in months.iteritems():
-    year = month.rsplit('-', 1)[0]
-    years[year] += count
-  total = sum(months.itervalues())
-  maxlen = len(str(total))
-
-  if daily_count:
-    for day, count in sorted(items.iteritems()):
-      print('%s: %*d' % (day, maxlen, count))
-
-  if len(items) > 1:
-    for month, count in sorted(months.iteritems()):
-      print('%s   : %*d' % (month, maxlen, count))
-  if len(months) > 1:
-    for year, count in sorted(years.iteritems()):
-      print('%s      : %*d' % (year, maxlen, count))
-  if len(years) > 1:
-    print('Total     : %*d' % (maxlen, total))
-  if not daily_count:
-    print('')
-    graph.print_histogram(items)
-
-
-STATES = (
-    'PENDING',
-    'RUNNING',
-    'PENDING_RUNNING',
-    'COMPLETED',
-    'COMPLETED_SUCCESS',
-    'COMPLETED_FAILURE',
-    'EXPIRED',
-    'TIMED_OUT',
-    'BOT_DIED',
-    'CANCELED',
-    'ALL',
-    'DEDUPED')
-
-
-def main():
-  colorama.init()
-  parser = optparse.OptionParser(description=sys.modules['__main__'].__doc__)
-  tomorrow = datetime.datetime.utcnow().date() + datetime.timedelta(days=1)
-  year = datetime.datetime(tomorrow.year, 1, 1)
-  parser.add_option(
-      '-S', '--swarming',
-      metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
-      help='Swarming server to use')
-  group = optparse.OptionGroup(parser, 'Filtering')
-  group.add_option(
-      '--start', default=year.strftime('%Y-%m-%d'),
-      help='Starting date in UTC; defaults to start of year: %default')
-  group.add_option(
-      '--end', default=tomorrow.strftime('%Y-%m-%d'),
-      help='End date in UTC; defaults to tomorrow: %default')
-  group.add_option(
-      '--state', default='ALL', type='choice', choices=STATES,
-      help='State to filter on. Values are: %s' % ', '.join(STATES))
-  group.add_option(
-      '--tags', action='append', default=[], help='Tags to filter on')
-  parser.add_option_group(group)
-  group = optparse.OptionGroup(parser, 'Presentation')
-  group.add_option(
-      '--dimensions', action='store_true', help='Show the dimensions')
-  group.add_option(
-      '--daily-count', action='store_true',
-      help='Show the daily count in raw number instead of histogram')
-  parser.add_option_group(group)
-  parser.add_option(
-      '--json', default='counts.json',
-      help='File containing raw data; default: %default')
-  parser.add_option(
-      '-v', '--verbose', action='count', default=0, help='Log')
-  options, args = parser.parse_args()
-
-  if args:
-    parser.error('Unsupported argument %s' % args)
-  logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
-  start = parse_time_option(options.start)
-  end = parse_time_option(options.end)
-  print('From %s (%d) to %s (%d)' % (
-      start, int((start- _EPOCH).total_seconds()),
-      end, int((end - _EPOCH).total_seconds())))
-  if options.swarming:
-    if options.dimensions:
-      data = fetch_tasks(
-          options.swarming, start, end, options.state, options.tags)
-    else:
-      data = fetch_counts(
-          options.swarming, start, end, options.state, options.tags)
-    with open(options.json, 'wb') as f:
-      json.dump(data, f)
-  elif not os.path.isfile(options.json):
-    parser.error('--swarming is required.')
-  else:
-    with open(options.json, 'rb') as f:
-      data = json.load(f)
-
-  print('')
-  if options.dimensions:
-    present_dimensions(data, options.daily_count)
-  else:
-    present_counts(data, options.daily_count)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/swarming_load_test_bot.py b/tools/swarming_client/tools/swarming_load_test_bot.py
deleted file mode 100755
index 8b673aa..0000000
--- a/tools/swarming_client/tools/swarming_load_test_bot.py
+++ /dev/null
@@ -1,334 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Triggers a ton of fake jobs to test its handling under high load.
-
-Generates an histogram with the latencies to process the tasks and number of
-retries.
-"""
-
-import hashlib
-import json
-import logging
-import optparse
-import os
-import Queue
-import socket
-import StringIO
-import sys
-import threading
-import time
-import zipfile
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-from third_party import colorama
-
-import swarming
-
-from utils import graph
-from utils import net
-from utils import threading_utils
-
-# Line too long (NN/80)
-# pylint: disable=C0301
-
-OS_NAME = 'Comodore64'
-TASK_OUTPUT = 'This task ran with great success'
-
-
-def print_results(results, columns, buckets):
-  delays = [i for i in results if isinstance(i, float)]
-  failures = [i for i in results if not isinstance(i, float)]
-
-  print('%sDELAYS%s:' % (colorama.Fore.RED, colorama.Fore.RESET))
-  graph.print_histogram(
-      graph.generate_histogram(delays, buckets), columns, ' %.3f')
-  print('')
-  print('Total items  : %d' % len(results))
-  average = 0
-  if delays:
-    average = sum(delays)/ len(delays)
-  print('Average delay: %s' % graph.to_units(average))
-  print('')
-
-  if failures:
-    print('%sEVENTS%s:' % (colorama.Fore.RED, colorama.Fore.RESET))
-    values = {}
-    for f in failures:
-      values.setdefault(f, 0)
-      values[f] += 1
-    graph.print_histogram(values, columns, ' %s')
-    print('')
-
-
-def generate_version(source):
-  """Generates the sha-1 based on the content of this zip.
-
-  Copied from:
-  https://code.google.com/p/swarming/source/browse/services/swarming/swarm_bot/zipped_archive.py
-  """
-  result = hashlib.sha1()
-  with zipfile.ZipFile(source, 'r') as z:
-    for item in sorted(z.namelist()):
-      with z.open(item) as f:
-        result.update(item)
-        result.update('\x00')
-        result.update(f.read())
-        result.update('\x00')
-  return result.hexdigest()
-
-
-def calculate_version(url):
-  """Retrieves the swarm_bot code and returns the SHA-1 for it."""
-  # Cannot use url_open() since zipfile requires .seek().
-  return generate_version(StringIO.StringIO(net.url_read(url)))
-
-
-def get_hostname():
-  return socket.getfqdn().lower().split('.', 1)[0]
-
-
-class FakeSwarmBot(object):
-  """This is a Fake swarm_bot implementation simulating it is running
-  Comodore64.
-
-  It polls for job, acts as if it was processing them and return the fake
-  result.
-  """
-  def __init__(
-      self, swarming_url, dimensions, swarm_bot_version_hash, hostname, index,
-      progress, duration, events, kill_event):
-    self._lock = threading.Lock()
-    self._swarming = swarming_url
-    self._index = index
-    self._progress = progress
-    self._duration = duration
-    self._events = events
-    self._kill_event = kill_event
-    self._bot_id = '%s-%d' % (hostname, index)
-    self._attributes = {
-      'dimensions': dimensions,
-      'id': self._bot_id,
-      # TODO(maruel): Use os_utilities.py.
-      'ip': '127.0.0.1',
-      'try_count': 0,
-      'version': swarm_bot_version_hash,
-    }
-
-    self._thread = threading.Thread(target=self._run, name='bot%d' % index)
-    self._thread.daemon = True
-    self._thread.start()
-
-  def join(self):
-    self._thread.join()
-
-  def is_alive(self):
-    return self._thread.is_alive()
-
-  def _run(self):
-    """Polls the server and fake execution."""
-    try:
-      self._progress.update_item('%d alive' % self._index, bots=1)
-      while True:
-        if self._kill_event.is_set():
-          return
-        data = {'attributes': json.dumps(self._attributes)}
-        request = net.url_read(self._swarming + '/poll_for_test', data=data)
-        if request is None:
-          self._events.put('poll_for_test_empty')
-          continue
-        start = time.time()
-        try:
-          manifest = json.loads(request)
-        except ValueError:
-          self._progress.update_item('Failed to poll')
-          self._events.put('poll_for_test_invalid')
-          continue
-
-        commands = [c['function'] for c in manifest.get('commands', [])]
-        if not commands:
-          # Nothing to run.
-          self._events.put('sleep')
-          time.sleep(manifest['come_back'])
-          continue
-
-        if commands == ['UpdateSlave']:
-          # Calculate the proper SHA-1 and loop again.
-          # This could happen if the Swarming server is upgraded while this
-          # script runs.
-          self._attributes['version'] = calculate_version(
-              manifest['commands'][0]['args'])
-          self._events.put('update_slave')
-          continue
-
-        if commands != ['RunManifest']:
-          self._progress.update_item(
-              'Unexpected RPC call %s\n%s' % (commands, manifest))
-          self._events.put('unknown_rpc')
-          break
-
-        store_cmd = manifest['commands'][0]
-        if not isinstance(store_cmd['args'], unicode):
-          self._progress.update_item('Unexpected RPC manifest\n%s' % manifest)
-          self._events.put('unknown_args')
-          break
-
-        result_url = manifest['result_url']
-        test_run = json.loads(store_cmd['args'])
-        if result_url != test_run['result_url']:
-          self._progress.update_item(
-              'Unexpected result url: %s != %s' %
-              (result_url, test_run['result_url']))
-          self._events.put('invalid_result_url')
-          break
-        ping_url = test_run['ping_url']
-        ping_delay = test_run['ping_delay']
-        self._progress.update_item('%d processing' % self._index, processing=1)
-
-        # Fake activity and send pings as requested.
-        while True:
-          remaining = max(0, (start + self._duration) - time.time())
-          if remaining > ping_delay:
-            # Include empty data to ensure the request is a POST request.
-            result = net.url_read(ping_url, data={})
-            assert result == 'Success.', result
-            remaining = max(0, (start + self._duration) - time.time())
-          if not remaining:
-            break
-          time.sleep(remaining)
-
-        # In the old API, r=<task_id>&id=<bot_id> is passed as the url.
-        data = {
-          'o': TASK_OUTPUT,
-          'x': '0',
-        }
-        result = net.url_read(manifest['result_url'], data=data)
-        self._progress.update_item(
-            '%d processed' % self._index, processing=-1, processed=1)
-        if not result:
-          self._events.put('result_url_fail')
-        else:
-          assert result == 'Successfully update the runner results.', result
-          self._events.put(time.time() - start)
-    finally:
-      try:
-        # Unregister itself. Otherwise the server will have tons of fake slaves
-        # that the admin will have to remove manually.
-        response = net.url_read(
-            self._swarming + '/delete_machine_stats',
-            data=[('r', self._bot_id)])
-        if response is None:
-          self._events.put('failed_unregister')
-      finally:
-        self._progress.update_item('%d quit' % self._index, bots=-1)
-
-
-def main():
-  colorama.init()
-  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
-  parser.add_option(
-      '-S', '--swarming',
-      metavar='URL', default='',
-      help='Swarming server to use')
-  parser.add_option(
-      '--suffix', metavar='NAME', default='', help='Bot suffix name to use')
-  swarming.add_filter_options(parser)
-  # Use improbable values to reduce the chance of interferring with real slaves.
-  parser.set_defaults(
-      dimensions=[
-        ('cpu', ['arm36']),
-        ('hostname', socket.getfqdn()),
-        ('os', OS_NAME),
-      ])
-
-  group = optparse.OptionGroup(parser, 'Load generated')
-  group.add_option(
-      '--slaves', type='int', default=300, metavar='N',
-      help='Number of swarm bot slaves, default: %default')
-  group.add_option(
-      '-c', '--consume', type='float', default=60., metavar='N',
-      help='Duration (s) for consuming a request, default: %default')
-  parser.add_option_group(group)
-
-  group = optparse.OptionGroup(parser, 'Display options')
-  group.add_option(
-      '--columns', type='int', default=graph.get_console_width(), metavar='N',
-      help='For histogram display, default:%default')
-  group.add_option(
-      '--buckets', type='int', default=20, metavar='N',
-      help='Number of buckets for histogram display, default:%default')
-  parser.add_option_group(group)
-
-  parser.add_option(
-      '--dump', metavar='FOO.JSON', help='Dumps to json file')
-  parser.add_option(
-      '-v', '--verbose', action='store_true', help='Enables logging')
-
-  options, args = parser.parse_args()
-  logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
-  if args:
-    parser.error('Unsupported args: %s' % args)
-  options.swarming = options.swarming.rstrip('/')
-  if not options.swarming:
-    parser.error('--swarming is required.')
-  if options.consume <= 0:
-    parser.error('Needs --consume > 0. 0.01 is a valid value.')
-  swarming.process_filter_options(parser, options)
-
-  print(
-      'Running %d slaves, each task lasting %.1fs' % (
-        options.slaves, options.consume))
-  print('Ctrl-C to exit.')
-  print('[processing/processed/bots]')
-  columns = [('processing', 0), ('processed', 0), ('bots', 0)]
-  progress = threading_utils.Progress(columns)
-  events = Queue.Queue()
-  start = time.time()
-  kill_event = threading.Event()
-  swarm_bot_version_hash = calculate_version(options.swarming + '/bot_code')
-  hostname = get_hostname()
-  if options.suffix:
-    hostname += '-' + options.suffix
-  slaves = [
-    FakeSwarmBot(
-      options.swarming, options.dimensions, swarm_bot_version_hash, hostname, i,
-      progress, options.consume, events, kill_event)
-    for i in range(options.slaves)
-  ]
-  try:
-    # Wait for all the slaves to come alive.
-    while not all(s.is_alive() for s in slaves):
-      time.sleep(0.01)
-    progress.update_item('Ready to run')
-    while slaves:
-      progress.print_update()
-      time.sleep(0.01)
-      # The slaves could be told to die.
-      slaves = [s for s in slaves if s.is_alive()]
-  except KeyboardInterrupt:
-    kill_event.set()
-
-  progress.update_item('Waiting for slaves to quit.', raw=True)
-  progress.update_item('')
-  while slaves:
-    progress.print_update()
-    slaves = [s for s in slaves if s.is_alive()]
-  # At this point, progress is not used anymore.
-  print('')
-  print('Ran for %.1fs.' % (time.time() - start))
-  print('')
-  results = list(events.queue)
-  print_results(results, options.columns, options.buckets)
-  if options.dump:
-    with open(options.dump, 'w') as f:
-      json.dump(results, f, separators=(',',':'))
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/swarming_load_test_client.py b/tools/swarming_client/tools/swarming_load_test_client.py
deleted file mode 100755
index 24cf8f6..0000000
--- a/tools/swarming_client/tools/swarming_load_test_client.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Triggers a ton of fake jobs to test its handling under high load.
-
-Generates an histogram with the latencies to process the tasks and number of
-retries.
-"""
-
-import json
-import logging
-import optparse
-import os
-import random
-import re
-import string
-import sys
-import time
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-
-sys.path.insert(0, ROOT_DIR)
-
-from third_party import colorama
-
-import swarming
-
-from utils import graph
-from utils import net
-from utils import threading_utils
-
-import swarming_load_test_bot
-
-
-# Amount of time the timer should be reduced on the Swarming side.
-TIMEOUT_OVERHEAD = 10
-
-
-def print_results(results, columns, buckets):
-  delays = [i for i in results if isinstance(i, float)]
-  failures = [i for i in results if not isinstance(i, float)]
-
-  graph.print_histogram(
-      graph.generate_histogram(delays, buckets), columns, '%5.3f')
-  print('')
-  print('Total items : %d' % len(results))
-  average = 0
-  if delays:
-    average = sum(delays)/ len(delays)
-  print('Average delay: %.2fs' % average)
-  #print('Average overhead: %s' % graph.to_units(total_size / len(sizes)))
-  print('')
-  if failures:
-    print('')
-    print('%sFAILURES%s:' % (colorama.Fore.RED, colorama.Fore.RESET))
-    print('\n'.join('  %s' % i for i in failures))
-
-
-def trigger_task(
-    swarming_url, dimensions, sleep_time, output_size, progress,
-    unique, timeout, index):
-  """Triggers a Swarming job and collects results.
-
-  Returns the total amount of time to run a task remotely, including all the
-  overhead.
-  """
-  name = 'load-test-%d-%s' % (index, unique)
-  start = time.time()
-
-  logging.info('trigger')
-  manifest = swarming.Manifest(
-    isolate_server='http://localhost:1',
-    namespace='dummy-isolate',
-    isolated_hash=1,
-    task_name=name,
-    extra_args=[],
-    env={},
-    dimensions=dimensions,
-    deadline=int(timeout-TIMEOUT_OVERHEAD),
-    verbose=False,
-    profile=False,
-    priority=100)
-  cmd = [
-    'python',
-    '-c',
-    'import time; print(\'1\'*%s); time.sleep(%d); print(\'Back\')' %
-    (output_size, sleep_time)
-  ]
-  manifest.add_task('echo stuff', cmd)
-  data = {'request': manifest.to_json()}
-  response = net.url_read(swarming_url + '/test', data=data)
-  if response is None:
-    # Failed to trigger. Return a failure.
-    return 'failed_trigger'
-
-  result = json.loads(response)
-  # Old API uses harcoded config name. New API doesn't have concept of config
-  # name so it uses the task name. Ignore this detail.
-  test_keys = []
-  for key in result['test_keys']:
-    key.pop('config_name')
-    test_keys.append(key.pop('test_key'))
-    assert re.match('[0-9a-f]+', test_keys[-1]), test_keys
-  expected = {
-    u'priority': 100,
-    u'test_case_name': unicode(name),
-    u'test_keys': [
-      {
-        u'num_instances': 1,
-        u'instance_index': 0,
-      }
-    ],
-  }
-  assert result == expected, '\n%s\n%s' % (result, expected)
-
-  progress.update_item('%5d' % index, processing=1)
-  try:
-    logging.info('collect')
-    new_test_keys = swarming.get_task_keys(swarming_url, name)
-    if not new_test_keys:
-      return 'no_test_keys'
-    assert test_keys == new_test_keys, (test_keys, new_test_keys)
-    out = [
-      output
-      for _index, output in swarming.yield_results(
-          swarming_url, test_keys, timeout, None, False, None, False)
-    ]
-    if not out:
-      return 'no_result'
-    for item in out:
-      item.pop('machine_tag')
-      item.pop('machine_id')
-      # TODO(maruel): Assert output even when run on a real bot.
-      _out_actual = item.pop('output')
-      # assert out_actual == swarming_load_test_bot.TASK_OUTPUT, out_actual
-    expected = [
-      {
-        u'config_instance_index': 0,
-        u'exit_codes': u'0',
-        u'num_config_instances': 1,
-      }
-    ]
-    assert out == expected, '\n%s\n%s' % (out, expected)
-    return time.time() - start
-  finally:
-    progress.update_item('%5d - done' % index, processing=-1, processed=1)
-
-
-def main():
-  colorama.init()
-  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
-  parser.add_option(
-      '-S', '--swarming',
-      metavar='URL', default='',
-      help='Swarming server to use')
-  swarming.add_filter_options(parser)
-  parser.set_defaults(dimensions=[('os', swarming_load_test_bot.OS_NAME)])
-
-  group = optparse.OptionGroup(parser, 'Load generated')
-  group.add_option(
-      '-s', '--send-rate', type='float', default=16., metavar='RATE',
-      help='Rate (item/s) of sending requests as a float, default: %default')
-  group.add_option(
-      '-D', '--duration', type='float', default=60., metavar='N',
-      help='Duration (s) of the sending phase of the load test, '
-           'default: %default')
-  group.add_option(
-      '-m', '--concurrent', type='int', default=200, metavar='N',
-      help='Maximum concurrent on-going requests, default: %default')
-  group.add_option(
-      '-t', '--timeout', type='float', default=15*60., metavar='N',
-      help='Task expiration and timeout to get results, the task itself will '
-           'have %ds less than the value provided. Default: %%default' %
-               TIMEOUT_OVERHEAD)
-  group.add_option(
-      '-o', '--output-size', type='int', default=100, metavar='N',
-      help='Bytes sent to stdout, default: %default')
-  group.add_option(
-      '--sleep', type='int', default=60, metavar='N',
-      help='Amount of time the bot should sleep, e.g. faking work, '
-           'default: %default')
-  parser.add_option_group(group)
-
-  group = optparse.OptionGroup(parser, 'Display options')
-  group.add_option(
-      '--columns', type='int', default=graph.get_console_width(), metavar='N',
-      help='For histogram display, default:%default')
-  group.add_option(
-      '--buckets', type='int', default=20, metavar='N',
-      help='Number of buckets for histogram display, default:%default')
-  parser.add_option_group(group)
-
-  parser.add_option(
-      '--dump', metavar='FOO.JSON', help='Dumps to json file')
-  parser.add_option(
-      '-v', '--verbose', action='store_true', help='Enables logging')
-
-  options, args = parser.parse_args()
-  logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
-  if args:
-    parser.error('Unsupported args: %s' % args)
-  options.swarming = options.swarming.rstrip('/')
-  if not options.swarming:
-    parser.error('--swarming is required.')
-  if options.duration <= 0:
-    parser.error('Needs --duration > 0. 0.01 is a valid value.')
-  swarming.process_filter_options(parser, options)
-
-  total = int(round(options.send_rate * options.duration))
-  print(
-      'Sending %.1f i/s for %ds with max %d parallel requests; timeout %.1fs; '
-      'total %d' %
-        (options.send_rate, options.duration, options.concurrent,
-        options.timeout, total))
-  print('[processing/processed/todo]')
-
-  # This is used so there's no clash between runs and actual real usage.
-  unique = ''.join(random.choice(string.ascii_letters) for _ in range(8))
-  columns = [('processing', 0), ('processed', 0), ('todo', 0)]
-  progress = threading_utils.Progress(columns)
-  index = 0
-  results = []
-  with threading_utils.ThreadPoolWithProgress(
-      progress, 1, options.concurrent, 0) as pool:
-    try:
-      start = time.time()
-      while True:
-        duration = time.time() - start
-        if duration > options.duration:
-          break
-        should_have_triggered_so_far = int(round(duration * options.send_rate))
-        while index < should_have_triggered_so_far:
-          pool.add_task(
-              0,
-              trigger_task,
-              options.swarming,
-              options.dimensions,
-              options.sleep,
-              options.output_size,
-              progress,
-              unique,
-              options.timeout,
-              index)
-          progress.update_item('', todo=1)
-          index += 1
-          progress.print_update()
-        time.sleep(0.01)
-      progress.update_item('Getting results for on-going tasks.', raw=True)
-      for i in pool.iter_results():
-        results.append(i)
-        # This is a bit excessive but it's useful in the case where some tasks
-        # hangs, so at least partial data is available.
-        if options.dump:
-          results.sort()
-          if os.path.exists(options.dump):
-            os.rename(options.dump, options.dump + '.old')
-          with open(options.dump, 'wb') as f:
-            json.dump(results, f, separators=(',',':'))
-      if not options.dump:
-        results.sort()
-    except KeyboardInterrupt:
-      aborted = pool.abort()
-      progress.update_item(
-          'Got Ctrl-C. Aborted %d unsent tasks.' % aborted,
-          raw=True,
-          todo=-aborted)
-      progress.print_update()
-  progress.print_update()
-  # At this point, progress is not used anymore.
-  print('')
-  print(' - Took %.1fs.' % (time.time() - start))
-  print('')
-  print_results(results, options.columns, options.buckets)
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/zip_profiler.py b/tools/swarming_client/tools/zip_profiler.py
deleted file mode 100755
index dc36a17..0000000
--- a/tools/swarming_client/tools/zip_profiler.py
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Profiler to compare various compression levels with regards to speed
-and final size when compressing the full set of files from a given
-isolated file.
-"""
-
-import bz2
-import optparse
-import os
-import subprocess
-import sys
-import tempfile
-import time
-import zlib
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-from third_party.depot_tools import fix_encoding
-from utils import file_path
-from utils import tools
-
-
-def zip_file(compressor_constructor, compression_level, filename):
-  compressed_size = 0
-  compressor = compressor_constructor(compression_level)
-  with open(filename, 'rb') as f:
-    while True:
-      chunk = f.read(16 * 1024)
-      if not chunk:
-        break
-      compressed_size += len(compressor.compress(f.read()))
-    compressed_size += len(compressor.flush())
-
-  return compressed_size
-
-
-def zip_directory(compressor_constructor, compression_level, root_dir):
-  compressed_size = 0
-  for root, _, files in os.walk(root_dir):
-    compressed_size += sum(zip_file(compressor_constructor, compression_level,
-                                    os.path.join(root, name))
-                           for name in files)
-  return compressed_size
-
-
-def profile_compress(zip_module_name, compressor_constructor,
-                     compression_range, compress_func, compress_target):
-  for i in compression_range:
-    start_time = time.time()
-    compressed_size = compress_func(compressor_constructor, i, compress_target)
-    end_time = time.time()
-
-    print('%4s at compression level %s, total size %11d, time taken %6.3f' %
-          (zip_module_name, i, compressed_size, end_time - start_time))
-
-
-def tree_files(root_dir):
-  file_set = {}
-  for root, _, files in os.walk(root_dir):
-    for name in files:
-      filename = os.path.join(root, name)
-      file_set[filename] = os.stat(filename).st_size
-
-  return file_set
-
-
-def main():
-  tools.disable_buffering()
-  parser = optparse.OptionParser()
-  parser.add_option('-s', '--isolated', help='.isolated file to profile with.')
-  parser.add_option('--largest_files', type='int',
-                    help='If this is set, instead of compressing all the '
-                    'files, only the large n files will be compressed')
-  options, args = parser.parse_args()
-
-  if args:
-    parser.error('Unknown args passed in; %s' % args)
-  if not options.isolated:
-    parser.error('The .isolated file must be given.')
-
-  temp_dir = None
-  try:
-    temp_dir = tempfile.mkdtemp(prefix=u'zip_profiler')
-
-    # Create a directory of the required files
-    subprocess.check_call([os.path.join(ROOT_DIR, 'isolate.py'),
-                           'remap',
-                           '-s', options.isolated,
-                           '--outdir', temp_dir])
-
-    file_set = tree_files(temp_dir)
-
-    if options.largest_files:
-      sorted_by_size = sorted(file_set.iteritems(),  key=lambda x: x[1],
-                              reverse=True)
-      files_to_compress = sorted_by_size[:options.largest_files]
-
-      for filename, size in files_to_compress:
-        print('Compressing %s, uncompressed size %d' % (filename, size))
-
-        profile_compress('zlib', zlib.compressobj, range(10), zip_file,
-                         filename)
-        profile_compress('bz2', bz2.BZ2Compressor, range(1, 10), zip_file,
-                         filename)
-    else:
-      print('Number of files: %s' % len(file_set))
-      print('Total size: %s' % sum(file_set.itervalues()))
-
-      # Profile!
-      profile_compress('zlib', zlib.compressobj, range(10), zip_directory,
-                       temp_dir)
-      profile_compress('bz2', bz2.BZ2Compressor, range(1, 10), zip_directory,
-                       temp_dir)
-  finally:
-    file_path.rmtree(temp_dir)
-
-
-if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  sys.exit(main())
diff --git a/tools/swarming_client/tools/zip_run_isolated.py b/tools/swarming_client/tools/zip_run_isolated.py
deleted file mode 100755
index b4b775e..0000000
--- a/tools/swarming_client/tools/zip_run_isolated.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Converts run_isolated.py with dependencies into run_isolated.zip.
-
-run_isolated.zip will be created in the current directory.
-
-Useful for reproducing swarm-bot environment like this:
-  ./tools/zip_run_isolated.py && python run_isolated.zip ...
-"""
-
-import os
-import sys
-
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, ROOT_DIR)
-
-import run_isolated
-
-
-def main():
-  zip_package = run_isolated.get_as_zip_package()
-  zip_package.zip_into_file('run_isolated.zip')
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main())
diff --git a/tools/swarming_client/trace_inputs.py b/tools/swarming_client/trace_inputs.py
deleted file mode 100755
index 64ffbe2..0000000
--- a/tools/swarming_client/trace_inputs.py
+++ /dev/null
@@ -1,3463 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Copyright 2012 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Traces an executable and its child processes and extract the files accessed
-by them.
-
-The implementation uses OS-specific API. The native Kernel logger and the ETL
-interface is used on Windows. Dtrace is used on OSX. Strace is used otherwise.
-The OS-specific implementation is hidden in an 'API' interface.
-
-The results are embedded in a Results instance. The tracing is done in two
-phases, the first is to do the actual trace and generate an
-implementation-specific log file. Then the log file is parsed to extract the
-information, including the individual child processes and the files accessed
-from the log.
-"""
-
-import codecs
-import csv
-import errno
-import getpass
-import glob
-import logging
-import os
-import re
-import stat
-import subprocess
-import sys
-import tempfile
-import threading
-import time
-import weakref
-
-from third_party import colorama
-from third_party.depot_tools import fix_encoding
-from third_party.depot_tools import subcommand
-
-from utils import file_path
-from utils import fs
-from utils import logging_utils
-from utils import subprocess42
-from utils import tools
-
-## OS-specific imports
-
-if sys.platform == 'win32':
-  from ctypes.wintypes import byref, c_int, c_wchar_p
-  from ctypes.wintypes import windll # pylint: disable=E0611
-
-
-__version__ = '0.2'
-
-
-BASE_DIR = os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding())))
-ROOT_DIR = os.path.dirname(os.path.dirname(BASE_DIR))
-
-
-class TracingFailure(Exception):
-  """An exception occured during tracing."""
-  def __init__(self, description, pid, line_number, line, *args):
-    super(TracingFailure, self).__init__(
-        description, pid, line_number, line, *args)
-    self.description = description
-    self.pid = pid
-    self.line_number = line_number
-    self.line = line
-    self.extra = args
-
-  def __str__(self):
-    out = self.description
-    if self.pid:
-      out += '\npid: %d' % self.pid
-    if self.line_number:
-      out += '\nline: %d' % self.line_number
-    if self.line:
-      out += '\n%s' % self.line
-    if self.extra:
-      out += '\n' + ', '.join(map(str, filter(None, self.extra)))
-    return out
-
-
-## OS-specific functions
-
-if sys.platform == 'win32':
-  def get_current_encoding():
-    """Returns the 'ANSI' code page associated to the process."""
-    return 'cp%d' % int(windll.kernel32.GetACP())
-
-
-  def CommandLineToArgvW(command_line):
-    """Splits a commandline into argv using CommandLineToArgvW()."""
-    # http://msdn.microsoft.com/library/windows/desktop/bb776391.aspx
-    size = c_int()
-    assert isinstance(command_line, unicode)
-    ptr = windll.shell32.CommandLineToArgvW(command_line, byref(size))
-    try:
-      return [arg for arg in (c_wchar_p * size.value).from_address(ptr)]
-    finally:
-      windll.kernel32.LocalFree(ptr)
-
-
-def can_trace():
-  """Returns True if the user is an administrator on Windows.
-
-  It is required for tracing to work.
-  """
-  if sys.platform != 'win32':
-    return True
-  return bool(windll.shell32.IsUserAnAdmin())
-
-
-def create_subprocess_thunk():
-  """Creates a small temporary script to start the child process.
-
-  This thunk doesn't block, its unique name is used to identify it as the
-  parent.
-  """
-  handle, name = tempfile.mkstemp(prefix=u'trace_inputs_thunk', suffix='.py')
-  try:
-    os.write(
-        handle,
-        (
-          'import subprocess, sys\n'
-          'sys.exit(subprocess.call(sys.argv[2:]))\n'
-        ))
-  finally:
-    os.close(handle)
-  return name
-
-
-def create_exec_thunk():
-  """Creates a small temporary script to start the child executable.
-
-  Reads from the file handle provided as the fisrt argument to block, then
-  execv() the command to be traced.
-  """
-  handle, name = tempfile.mkstemp(prefix=u'trace_inputs_thunk', suffix='.py')
-  try:
-    os.write(
-        handle,
-        (
-          'import os, sys\n'
-          'fd = int(sys.argv[1])\n'
-          # This will block until the controlling process writes a byte on the
-          # pipe. It will do so once the tracing tool, e.g. strace, is ready to
-          # trace.
-          'os.read(fd, 1)\n'
-          'os.close(fd)\n'
-          'os.execv(sys.argv[2], sys.argv[2:])\n'
-        ))
-  finally:
-    os.close(handle)
-  return name
-
-
-def strace_process_quoted_arguments(text):
-  """Extracts quoted arguments on a string and return the arguments as a list.
-
-  Implemented as an automaton. Supports incomplete strings in the form
-  '"foo"...'.
-
-  Example:
-    With text = '"foo", "bar"', the function will return ['foo', 'bar']
-
-  TODO(maruel): Implement escaping.
-  """
-  # All the possible states of the DFA.
-  ( NEED_QUOTE,         # Begining of a new arguments.
-    INSIDE_STRING,      # Inside an argument.
-    ESCAPED,            # Found a '\' inside a quote. Treat the next char as-is.
-    NEED_COMMA_OR_DOT,  # Right after the closing quote of an argument. Could be
-                        # a serie of 3 dots or a comma.
-    NEED_SPACE,         # Right after a comma
-    NEED_DOT_2,         # Found a dot, need a second one.
-    NEED_DOT_3,         # Found second dot, need a third one.
-    NEED_COMMA,         # Found third dot, need a comma.
-    ) = range(8)
-
-  state = NEED_QUOTE
-  out = []
-  for index, char in enumerate(text):
-    if char == '"':
-      if state == NEED_QUOTE:
-        state = INSIDE_STRING
-        # A new argument was found.
-        out.append('')
-      elif state == INSIDE_STRING:
-        # The argument is now closed.
-        state = NEED_COMMA_OR_DOT
-      elif state == ESCAPED:
-        out[-1] += char
-        state = INSIDE_STRING
-      else:
-        raise ValueError(
-            'Can\'t process char \'%s\' at column %d for: %r' % (
-              char, index, text),
-            index,
-            text)
-    elif char == ',':
-      if state in (NEED_COMMA_OR_DOT, NEED_COMMA):
-        state = NEED_SPACE
-      elif state == INSIDE_STRING:
-        out[-1] += char
-      elif state == ESCAPED:
-        out[-1] += char
-        state = INSIDE_STRING
-      else:
-        raise ValueError(
-            'Can\'t process char \'%s\' at column %d for: %r' % (
-              char, index, text),
-            index,
-            text)
-    elif char == ' ':
-      if state == NEED_SPACE:
-        state = NEED_QUOTE
-      elif state == INSIDE_STRING:
-        out[-1] += char
-      elif state == ESCAPED:
-        out[-1] += char
-        state = INSIDE_STRING
-      else:
-        raise ValueError(
-            'Can\'t process char \'%s\' at column %d for: %r' % (
-              char, index, text),
-            index,
-            text)
-    elif char == '.':
-      if state in (NEED_QUOTE, NEED_COMMA_OR_DOT):
-        # The string is incomplete, this mean the strace -s flag should be
-        # increased.
-        # For NEED_QUOTE, the input string would look like '"foo", ...'.
-        # For NEED_COMMA_OR_DOT, the input string would look like '"foo"...'
-        state = NEED_DOT_2
-      elif state == NEED_DOT_2:
-        state = NEED_DOT_3
-      elif state == NEED_DOT_3:
-        state = NEED_COMMA
-      elif state == INSIDE_STRING:
-        out[-1] += char
-      elif state == ESCAPED:
-        out[-1] += char
-        state = INSIDE_STRING
-      else:
-        raise ValueError(
-            'Can\'t process char \'%s\' at column %d for: %r' % (
-              char, index, text),
-            index,
-            text)
-    elif char == '\\':
-      if state == ESCAPED:
-        out[-1] += char
-        state = INSIDE_STRING
-      elif state == INSIDE_STRING:
-        state = ESCAPED
-      else:
-        raise ValueError(
-            'Can\'t process char \'%s\' at column %d for: %r' % (
-              char, index, text),
-            index,
-            text)
-    else:
-      if state == INSIDE_STRING:
-        out[-1] += char
-      else:
-        raise ValueError(
-            'Can\'t process char \'%s\' at column %d for: %r' % (
-              char, index, text),
-            index,
-            text)
-  if state not in (NEED_COMMA, NEED_COMMA_OR_DOT):
-    raise ValueError(
-        'String is incorrectly terminated: %r' % text,
-        text)
-  return out
-
-
-def assert_is_renderable(pseudo_string):
-  """Asserts the input is a valid object to be processed by render()."""
-  assert (
-      pseudo_string is None or
-      isinstance(pseudo_string, unicode) or
-      hasattr(pseudo_string, 'render')), repr(pseudo_string)
-
-
-def render(pseudo_string):
-  """Converts the pseudo-string to an unicode string."""
-  if pseudo_string is None or isinstance(pseudo_string, unicode):
-    return pseudo_string
-  return pseudo_string.render()
-
-
-class Results(object):
-  """Results of a trace session."""
-
-  class _TouchedObject(object):
-    """Something, a file or a directory, that was accessed."""
-    def __init__(self, root, path, tainted, size, nb_files):
-      logging.debug(
-          '%s(%s, %s, %s, %s, %s)' %
-          (self.__class__.__name__, root, path, tainted, size, nb_files))
-      assert_is_renderable(root)
-      assert_is_renderable(path)
-      self.root = root
-      self.path = path
-      self.tainted = tainted
-      self.nb_files = nb_files
-      # Can be used as a cache or a default value, depending on context. In
-      # particular, once self.tainted is True, because the path was replaced
-      # with a variable, it is not possible to look up the file size.
-      self._size = size
-      # These are cache only.
-      self._real_path = None
-
-      # Check internal consistency.
-      assert path, path
-      assert tainted or bool(root) != bool(file_path.isabs(path)), (root, path)
-      assert tainted or (
-          not fs.exists(self.full_path) or
-          (self.full_path == file_path.get_native_path_case(self.full_path))), (
-              tainted,
-              self.full_path,
-              file_path.get_native_path_case(self.full_path))
-
-    @property
-    def existent(self):
-      return self.size != -1
-
-    @property
-    def full_path(self):
-      if self.root:
-        return os.path.join(self.root, self.path)
-      return self.path
-
-    @property
-    def real_path(self):
-      """Returns the path with symlinks resolved."""
-      if not self._real_path:
-        self._real_path = os.path.realpath(self.full_path)
-      return self._real_path
-
-    @property
-    def size(self):
-      """File's size. -1 is not existent.
-
-      Once tainted, it is not possible the retrieve the file size anymore since
-      the path is composed of variables.
-      """
-      if self._size is None and not self.tainted:
-        try:
-          self._size = fs.stat(self.full_path).st_size
-        except OSError:
-          self._size = -1
-      return self._size
-
-    def flatten(self):
-      """Returns a dict representing this object.
-
-      A 'size' of 0 means the file was only touched and not read.
-      """
-      return {
-        'path': self.path,
-        'size': self.size,
-      }
-
-    def replace_variables(self, variables):
-      """Replaces the root of this File with one of the variables if it matches.
-
-      If a variable replacement occurs, the cloned object becomes tainted.
-      """
-      for variable, root_path in variables.iteritems():
-        if self.path.startswith(root_path):
-          return self._clone(
-              self.root, variable + self.path[len(root_path):], True)
-      # No need to clone, returns ourself.
-      return self
-
-    def strip_root(self, root):
-      """Returns a clone of itself with 'root' stripped off.
-
-      Note that the file is kept if it is either accessible from a symlinked
-      path that was used to access the file or through the real path.
-      """
-      # Check internal consistency.
-      assert (
-          self.tainted or
-          (file_path.isabs(root) and root.endswith(os.path.sep))), root
-      if not self.full_path.startswith(root):
-        # Now try to resolve the symlinks to see if it can be reached this way.
-        # Only try *after* trying without resolving symlink.
-        if not self.real_path.startswith(root):
-          return None
-        path = self.real_path
-      else:
-        path = self.full_path
-      return self._clone(root, path[len(root):], self.tainted)
-
-    def _clone(self, new_root, new_path, tainted):
-      raise NotImplementedError(self.__class__.__name__)
-
-  class File(_TouchedObject):
-    """A file that was accessed. May not be present anymore.
-
-    If tainted is true, it means it is not a real path anymore as a variable
-    replacement occured.
-
-    |mode| can be one of None, TOUCHED, READ or WRITE.
-    """
-    # Was probed for existence, and it is existent, but was never _opened_.
-    TOUCHED = 't'
-    # Opened for read only and guaranteed to not have been written to.
-    READ = 'r'
-    # Opened for write.
-    WRITE = 'w'
-
-    # They are listed in order of priority. E.g. if a file is traced as TOUCHED
-    # then as WRITE, only keep WRITE. None means no idea, which is a problem on
-    # Windows.
-    ACCEPTABLE_MODES = (None, TOUCHED, READ, WRITE)
-
-    def __init__(self, root, path, tainted, size, mode):
-      assert mode in self.ACCEPTABLE_MODES
-      super(Results.File, self).__init__(root, path, tainted, size, 1)
-      self.mode = mode
-
-    def _clone(self, new_root, new_path, tainted):
-      """Clones itself keeping meta-data."""
-      # Keep the self.size and self._real_path caches for performance reason. It
-      # is also important when the file becomes tainted (with a variable instead
-      # of the real path) since self.path is not an on-disk path anymore so
-      # out._size cannot be updated.
-      out = self.__class__(new_root, new_path, tainted, self.size, self.mode)
-      out._real_path = self._real_path
-      return out
-
-    def flatten(self):
-      out = super(Results.File, self).flatten()
-      out['mode'] = self.mode
-      return out
-
-  class Directory(_TouchedObject):
-    """A directory of files. Must exist.
-
-    For a Directory instance, self.size is not a cache, it's an actual value
-    that is never modified and represents the total size of the files contained
-    in this directory. It is possible that the directory is empty so that
-    size==0; this happens if there's only an invalid symlink in it.
-    """
-    def __init__(self, root, path, tainted, size, nb_files):
-      """path='.' is a valid value and must be handled appropriately."""
-      assert not path.endswith(os.path.sep), path
-      super(Results.Directory, self).__init__(
-          root, path + os.path.sep, tainted, size, nb_files)
-
-    def flatten(self):
-      out = super(Results.Directory, self).flatten()
-      out['nb_files'] = self.nb_files
-      return out
-
-    def _clone(self, new_root, new_path, tainted):
-      """Clones itself keeping meta-data."""
-      out = self.__class__(
-          new_root,
-          new_path.rstrip(os.path.sep),
-          tainted,
-          self.size,
-          self.nb_files)
-      out._real_path = self._real_path
-      return out
-
-  class Process(object):
-    """A process that was traced.
-
-    Contains references to the files accessed by this process and its children.
-    """
-    def __init__(self, pid, files, executable, command, initial_cwd, children):
-      logging.debug('Process(%s, %d, ...)' % (pid, len(files)))
-      self.pid = pid
-      self.files = sorted(files, key=lambda x: x.path)
-      self.children = children
-      self.executable = executable
-      self.command = command
-      self.initial_cwd = initial_cwd
-
-      # Check internal consistency.
-      assert len(set(f.path for f in self.files)) == len(self.files), sorted(
-          f.path for f in self.files)
-      assert isinstance(self.children, list)
-      assert isinstance(self.files, list)
-
-    @property
-    def all(self):
-      for child in self.children:
-        for i in child.all:
-          yield i
-      yield self
-
-    def flatten(self):
-      return {
-        'children': [c.flatten() for c in self.children],
-        'command': self.command,
-        'executable': self.executable,
-        'files': [f.flatten() for f in self.files],
-        'initial_cwd': self.initial_cwd,
-        'pid': self.pid,
-      }
-
-    def strip_root(self, root):
-      assert file_path.isabs(root) and root.endswith(os.path.sep), root
-      # Loads the files after since they are constructed as objects.
-      out = self.__class__(
-          self.pid,
-          filter(None, (f.strip_root(root) for f in self.files)),
-          self.executable,
-          self.command,
-          self.initial_cwd,
-          [c.strip_root(root) for c in self.children])
-      logging.debug(
-          'strip_root(%s) %d -> %d' % (root, len(self.files), len(out.files)))
-      return out
-
-  def __init__(self, process):
-    self.process = process
-    # Cache.
-    self._files = None
-
-  def flatten(self):
-    return {
-      'root': self.process.flatten(),
-    }
-
-  @property
-  def files(self):
-    if self._files is None:
-      self._files = sorted(
-          sum((p.files for p in self.process.all), []),
-          key=lambda x: x.path)
-    return self._files
-
-  @property
-  def existent(self):
-    return [f for f in self.files if f.existent]
-
-  @property
-  def non_existent(self):
-    return [f for f in self.files if not f.existent]
-
-  def strip_root(self, root):
-    """Returns a clone with all the files outside the directory |root| removed
-    and converts all the path to be relative paths.
-
-    It keeps files accessible through the |root| directory or that have been
-    accessed through any symlink which points to the same directory.
-    """
-    # Resolve any symlink
-    root = os.path.realpath(root)
-    root = (
-        file_path.get_native_path_case(root).rstrip(os.path.sep) + os.path.sep)
-    logging.debug('strip_root(%s)' % root)
-    return Results(self.process.strip_root(root))
-
-
-class ApiBase(object):
-  """OS-agnostic API to trace a process and its children."""
-  class Context(object):
-    """Processes one log line at a time and keeps the list of traced processes.
-
-    The parsing is complicated by the fact that logs are traced out of order for
-    strace but in-order for dtrace and logman. In addition, on Windows it is
-    very frequent that processids are reused so a flat list cannot be used. But
-    at the same time, it is impossible to faithfully construct a graph when the
-    logs are processed out of order. So both a tree and a flat mapping are used,
-    the tree is the real process tree, while the flat mapping stores the last
-    valid process for the corresponding processid. For the strace case, the
-    tree's head is guessed at the last moment.
-    """
-    class Process(object):
-      """Keeps context for one traced child process.
-
-      Logs all the files this process touched. Ignores directories.
-      """
-      def __init__(self, blacklist, pid, initial_cwd):
-        # Check internal consistency.
-        assert isinstance(pid, int), repr(pid)
-        assert_is_renderable(initial_cwd)
-        self.pid = pid
-        # children are Process instances.
-        self.children = []
-        self.initial_cwd = initial_cwd
-        self.cwd = None
-        self.files = {}
-        self.executable = None
-        self.command = None
-        self._blacklist = blacklist
-
-      def to_results_process(self):
-        """Resolves file case sensitivity and or late-bound strings."""
-        # When resolving files, it's normal to get dupe because a file could be
-        # opened multiple times with different case. Resolve the deduplication
-        # here.
-        def fix_path(x):
-          """Returns the native file path case.
-
-          Converts late-bound strings.
-          """
-          if not x:
-            # Do not convert None instance to 'None'.
-            return x
-          x = render(x)
-          if os.path.isabs(x):
-            # If the path is not absolute, which tends to happen occasionally on
-            # Windows, it is not possible to get the native path case so ignore
-            # that trace. It mostly happens for 'executable' value.
-            x = file_path.get_native_path_case(x)
-          return x
-
-        def fix_and_blacklist_path(x, m):
-          """Receives a tuple (filepath, mode) and processes filepath."""
-          x = fix_path(x)
-          if not x:
-            return
-          # The blacklist needs to be reapplied, since path casing could
-          # influence blacklisting.
-          if self._blacklist(x):
-            return
-          # Filters out directories. Some may have passed through.
-          if fs.isdir(x):
-            return
-          return x, m
-
-        # Renders all the files as strings, as some could be RelativePath
-        # instances. It is important to do it first since there could still be
-        # multiple entries with the same path but different modes.
-        rendered = (
-            fix_and_blacklist_path(f, m) for f, m in self.files.iteritems())
-        files = sorted(
-          (f for f in rendered if f),
-          key=lambda x: (x[0], Results.File.ACCEPTABLE_MODES.index(x[1])))
-        # Then converting into a dict will automatically clean up lesser
-        # important values.
-        files = [
-          Results.File(None, f, False, None, m)
-          for f, m in dict(files).iteritems()
-        ]
-        return Results.Process(
-            self.pid,
-            files,
-            fix_path(self.executable),
-            self.command,
-            fix_path(self.initial_cwd),
-            [c.to_results_process() for c in self.children])
-
-      def add_file(self, filepath, mode):
-        """Adds a file if it passes the blacklist."""
-        if self._blacklist(render(filepath)):
-          return
-        logging.debug('add_file(%d, %s, %s)', self.pid, filepath, mode)
-        # Note that filepath and not render(filepath) is added. It is because
-        # filepath could be something else than a string, like a RelativePath
-        # instance for dtrace logs.
-        modes = Results.File.ACCEPTABLE_MODES
-        old_mode = self.files.setdefault(filepath, mode)
-        if old_mode != mode and modes.index(old_mode) < modes.index(mode):
-          # Take the highest value.
-          self.files[filepath] = mode
-
-    def __init__(self, blacklist):
-      self.blacklist = blacklist
-      # Initial process.
-      self.root_process = None
-      # dict to accelerate process lookup, to not have to lookup the whole graph
-      # each time.
-      self._process_lookup = {}
-
-  class Tracer(object):
-    """During it's lifetime, the tracing subsystem is enabled."""
-    def __init__(self, logname):
-      self._logname = logname
-      self._lock = threading.RLock()
-      self._traces = []
-      self._initialized = True
-      self._scripts_to_cleanup = []
-
-    def trace(self, cmd, cwd, tracename, output):
-      """Runs the OS-specific trace program on an executable.
-
-      Arguments:
-      - cmd: The command (a list) to run.
-      - cwd: Current directory to start the child process in.
-      - tracename: Name of the trace in the logname file.
-      - output: If False, redirects output to PIPEs.
-
-      Returns a tuple (resultcode, output) and updates the internal trace
-      entries.
-      """
-      # The implementation adds an item to self._traces.
-      raise NotImplementedError(self.__class__.__name__)
-
-    def close(self, _timeout=None):
-      """Saves the meta-data in the logname file.
-
-      For kernel-based tracing, stops the tracing subsystem.
-
-      Must not be used manually when using 'with' construct.
-      """
-      with self._lock:
-        if not self._initialized:
-          raise TracingFailure(
-              'Called %s.close() on an unitialized object' %
-                  self.__class__.__name__,
-              None, None, None)
-        try:
-          while self._scripts_to_cleanup:
-            try:
-              fs.remove(self._scripts_to_cleanup.pop())
-            except OSError as e:
-              logging.error('Failed to delete a temporary script: %s', e)
-          tools.write_json(self._logname, self._gen_logdata(), False)
-        finally:
-          self._initialized = False
-
-    def post_process_log(self):
-      """Post-processes the log so it becomes faster to load afterward.
-
-      Must not be used manually when using 'with' construct.
-      """
-      assert not self._initialized, 'Must stop tracing first.'
-
-    def _gen_logdata(self):
-      """Returns the data to be saved in the trace file."""
-      return  {
-        'traces': self._traces,
-      }
-
-    def __enter__(self):
-      """Enables 'with' statement."""
-      return self
-
-    def __exit__(self, exc_type, exc_value, traceback):
-      """Enables 'with' statement."""
-      self.close()
-      # If an exception was thrown, do not process logs.
-      if not exc_type:
-        self.post_process_log()
-
-  def get_tracer(self, logname):
-    """Returns an ApiBase.Tracer instance.
-
-    Initializes the tracing subsystem, which is a requirement for kernel-based
-    tracers. Only one tracer instance should be live at a time!
-
-    logname is the filepath to the json file that will contain the meta-data
-    about the logs.
-    """
-    return self.Tracer(logname)
-
-  @staticmethod
-  def clean_trace(logname):
-    """Deletes an old log."""
-    raise NotImplementedError()
-
-  @classmethod
-  def parse_log(cls, logname, blacklist, trace_name):
-    """Processes trace logs and returns the files opened and the files that do
-    not exist.
-
-    It does not track directories.
-
-    Arguments:
-      - logname: must be an absolute path.
-      - blacklist: must be a lambda.
-      - trace_name: optional trace to read, defaults to reading all traces.
-
-    Most of the time, files that do not exist are temporary test files that
-    should be put in /tmp instead. See http://crbug.com/116251.
-
-    Returns a list of dict with keys:
-      - results: A Results instance.
-      - trace: The corresponding tracename parameter provided to
-               get_tracer().trace().
-      - output: Output gathered during execution, if get_tracer().trace(...,
-                output=False) was used.
-    """
-    raise NotImplementedError(cls.__class__.__name__)
-
-
-class Strace(ApiBase):
-  """strace implies linux."""
-  @staticmethod
-  def load_filename(filename):
-    """Parses a filename in a log."""
-    # TODO(maruel): Be compatible with strace -x.
-    assert isinstance(filename, str)
-    out = ''
-    i = 0
-    while i < len(filename):
-      c = filename[i]
-      if c == '\\':
-        out += chr(int(filename[i+1:i+4], 8))
-        i += 4
-      else:
-        out += c
-        i += 1
-    # TODO(maruel): That's not necessarily true that the current code page is
-    # utf-8.
-    return out.decode('utf-8')
-
-  class Context(ApiBase.Context):
-    """Processes a strace log line and keeps the list of existent and non
-    existent files accessed.
-
-    Ignores directories.
-
-    Uses late-binding to processes the cwd of each process. The problem is that
-    strace generates one log file per process it traced but doesn't give any
-    information about which process was started when and by who. So process the
-    logs out of order and use late binding with RelativePath to be able to
-    deduce the initial directory of each process once all the logs are parsed.
-
-    TODO(maruel): Use the script even in the non-sudo case, so log parsing can
-    be done in two phase: first find the root process, then process the child
-    processes in order. With that, it should be possible to not use RelativePath
-    anymore. This would significantly simplify the code!
-    """
-    class Process(ApiBase.Context.Process):
-      """Represents the state of a process.
-
-      Contains all the information retrieved from the pid-specific log.
-      """
-      # Function names are using ([a-z_0-9]+)
-      # This is the most common format. function(args) = result
-      RE_HEADER = re.compile(r'^([a-z_0-9]+)\((.*?)\)\s+= (.+)$')
-      # An interrupted function call, only grab the minimal header.
-      RE_UNFINISHED = re.compile(r'^([^\(]+)(.*) \<unfinished \.\.\.\>$')
-      # A resumed function call.
-      RE_RESUMED = re.compile(r'^<\.\.\. ([^ ]+) resumed> (.+)$')
-      # A process received a signal.
-      RE_SIGNAL = re.compile(r'^--- SIG[A-Z]+ .+ ---')
-      # A process didn't handle a signal. Ignore any junk appearing before,
-      # because the process was forcibly killed so it won't open any new file.
-      RE_KILLED = re.compile(
-          r'^.*\+\+\+ killed by ([A-Z]+)( \(core dumped\))? \+\+\+$')
-      # The process has exited.
-      RE_PROCESS_EXITED = re.compile(r'^\+\+\+ exited with (\d+) \+\+\+')
-      # A call was canceled. Ignore any prefix.
-      RE_UNAVAILABLE = re.compile(r'^.*\)\s*= \? <unavailable>$')
-      # The process has exited due to the ptrace sandbox. RE_PROCESS_EXITED will
-      # follow on next line.
-      RE_PTRACE = re.compile(r'^.*<ptrace\(SYSCALL\):No such process>$')
-      # Happens when strace fails to even get the function name.
-      UNNAMED_FUNCTION = '????'
-
-      # Corner-case in python, a class member function decorator must not be
-      # @staticmethod.
-      def parse_args(regexp, expect_zero):  # pylint: disable=E0213
-        """Automatically convert the str 'args' into a list of processed
-        arguments.
-
-        Arguments:
-        - regexp is used to parse args.
-        - expect_zero: one of True, False or None.
-          - True: will check for result.startswith('0') first and will ignore
-            the trace line completely otherwise. This is important because for
-            many functions, the regexp will not process if the call failed.
-          - False: will check for not result.startswith(('?', '-1')) for the
-            same reason than with True.
-          - None: ignore result.
-        """
-        def meta_hook(function):
-          assert function.__name__.startswith('handle_')
-          def hook(self, args, result):
-            if expect_zero is True and not result.startswith('0'):
-              return
-            if expect_zero is False and result.startswith(('?', '-1')):
-              return
-            match = re.match(regexp, args)
-            if not match:
-              raise TracingFailure(
-                  'Failed to parse %s(%s) = %s' %
-                  (function.__name__[len('handle_'):], args, result),
-                  None, None, None)
-            return function(self, match.groups(), result)
-          return hook
-        return meta_hook
-
-      class RelativePath(object):
-        """A late-bound relative path."""
-        def __init__(self, parent, value):
-          assert_is_renderable(parent)
-          self.parent = parent
-          assert (
-              value is None or
-              (isinstance(value, unicode) and not os.path.isabs(value)))
-          self.value = value
-          if self.value:
-            # TODO(maruel): On POSIX, '\\' is a valid character so remove this
-            # assert.
-            assert '\\' not in self.value, value
-            assert '\\' not in self.value, (repr(value), repr(self.value))
-
-        def render(self):
-          """Returns the current directory this instance is representing.
-
-          This function is used to return the late-bound value.
-          """
-          assert self.parent is not None
-          parent = render(self.parent)
-          if self.value:
-            return os.path.normpath(os.path.join(parent, self.value))
-          return parent
-
-      def __init__(self, root, pid):
-        """Keeps enough information to be able to guess the original process
-        root.
-
-        strace doesn't store which process was the initial process. So more
-        information needs to be kept so the graph can be reconstructed from the
-        flat map.
-        """
-        logging.info('%s(%d)' % (self.__class__.__name__, pid))
-        super(Strace.Context.Process, self).__init__(root.blacklist, pid, None)
-        assert isinstance(root, ApiBase.Context)
-        self._root = weakref.ref(root)
-        # The dict key is the function name of the pending call, like 'open'
-        # or 'execve'.
-        self._pending_calls = {}
-        self._line_number = 0
-        # Current directory when the process started.
-        if isinstance(self._root(), unicode):
-          self.initial_cwd = self._root()
-        else:
-          self.initial_cwd = self.RelativePath(self._root(), None)
-        self.parentid = None
-        self._done = False
-
-      def get_cwd(self):
-        """Returns the best known value of cwd."""
-        return self.cwd or self.initial_cwd
-
-      def render(self):
-        """Returns the string value of the RelativePath() object.
-
-        Used by RelativePath. Returns the initial directory and not the
-        current one since the current directory 'cwd' validity is time-limited.
-
-        The validity is only guaranteed once all the logs are processed.
-        """
-        return self.initial_cwd.render()
-
-      def on_line(self, line):
-        assert isinstance(line, str)
-        self._line_number += 1
-        try:
-          if self._done:
-            raise TracingFailure(
-                'Found a trace for a terminated process or corrupted log',
-                None, None, None)
-
-          if self.RE_SIGNAL.match(line):
-            # Ignore signals.
-            return
-
-          match = self.RE_KILLED.match(line)
-          if match:
-            # Converts a '+++ killed by Foo +++' trace into an exit_group().
-            self.handle_exit_group(match.group(1), None)
-            return
-
-          match = self.RE_PROCESS_EXITED.match(line)
-          if match:
-            # Converts a '+++ exited with 1 +++' trace into an exit_group()
-            self.handle_exit_group(match.group(1), None)
-            return
-
-          match = self.RE_UNFINISHED.match(line)
-          if match:
-            if match.group(1) in self._pending_calls:
-              raise TracingFailure(
-                  'Found two unfinished calls for the same function',
-                  None, None, None,
-                  self._pending_calls)
-            self._pending_calls[match.group(1)] = (
-                match.group(1) + match.group(2))
-            return
-
-          match = self.RE_UNAVAILABLE.match(line)
-          if match:
-            # This usually means a process was killed and a pending call was
-            # canceled.
-            # TODO(maruel): Look up the last exit_group() trace just above and
-            # make sure any self._pending_calls[anything] is properly flushed.
-            return
-
-          match = self.RE_PTRACE.match(line)
-          if match:
-            # Not sure what this means. Anyhow, the process died.
-            # TODO(maruel): Add note that only RE_PROCESS_EXITED is valid
-            # afterward.
-            return
-
-          match = self.RE_RESUMED.match(line)
-          if match:
-            if match.group(1) not in self._pending_calls:
-              raise TracingFailure(
-                  'Found a resumed call that was not logged as unfinished',
-                  None, None, None,
-                  self._pending_calls)
-            pending = self._pending_calls.pop(match.group(1))
-            # Reconstruct the line.
-            line = pending + match.group(2)
-
-          match = self.RE_HEADER.match(line)
-          if not match:
-            # The line is corrupted. It happens occasionally when a process is
-            # killed forcibly with activity going on. Assume the process died.
-            # No other line can be processed afterward.
-            logging.debug('%d is done: %s', self.pid, line)
-            self._done = True
-            return
-
-          if match.group(1) == self.UNNAMED_FUNCTION:
-            return
-
-          # It's a valid line, handle it.
-          handler = getattr(self, 'handle_%s' % match.group(1), None)
-          if not handler:
-            self._handle_unknown(match.group(1), match.group(2), match.group(3))
-          return handler(match.group(2), match.group(3))
-        except TracingFailure, e:
-          # Hack in the values since the handler could be a static function.
-          e.pid = self.pid
-          e.line = line
-          e.line_number = self._line_number
-          # Re-raise the modified exception.
-          raise
-        except (KeyError, NotImplementedError, ValueError), e:
-          raise TracingFailure(
-              'Trace generated a %s exception: %s' % (
-                  e.__class__.__name__, str(e)),
-              self.pid,
-              self._line_number,
-              line,
-              e)
-
-      @parse_args(r'^\"(.+?)\", [FKORWX_|]+$', True)
-      def handle_access(self, args, _result):
-        self._handle_file(args[0], Results.File.TOUCHED)
-
-      @parse_args(r'^\"(.+?)\"$', True)
-      def handle_chdir(self, args, _result):
-        """Updates cwd."""
-        self.cwd = self._mangle(args[0])
-        logging.debug('handle_chdir(%d, %s)' % (self.pid, self.cwd))
-
-      @parse_args(r'^\"(.+?)\", (\d+), (\d+)$', False)
-      def handle_chown(self, args, _result):
-        # TODO(maruel): Look at result?
-        self._handle_file(args[0], Results.File.WRITE)
-
-      def handle_clone(self, _args, result):
-        self._handling_forking('clone', result)
-
-      def handle_close(self, _args, _result):
-        pass
-
-      @parse_args(r'^\"(.+?)\", (\d+)$', False)
-      def handle_chmod(self, args, _result):
-        self._handle_file(args[0], Results.File.WRITE)
-
-      @parse_args(r'^\"(.+?)\"$', False)
-      def handle_chroot(self, _args, _result):
-        # This is used by Chromium's sandbox. See
-        # https://chromium.googlesource.com/chromium/src/+/master/sandbox/linux/suid/sandbox.c
-        pass
-
-      @parse_args(r'^\"(.+?)\", (\d+)$', False)
-      def handle_creat(self, args, _result):
-        self._handle_file(args[0], Results.File.WRITE)
-
-      @parse_args(r'^\"(.+?)\", \[(.+)\], \[\/\* \d+ vars? \*\/\]$', True)
-      def handle_execve(self, args, _result):
-        # Even if in practice execve() doesn't returns when it succeeds, strace
-        # still prints '0' as the result.
-        filepath = args[0]
-        self._handle_file(filepath, Results.File.READ)
-        self.executable = self._mangle(filepath)
-        try:
-          self.command = strace_process_quoted_arguments(args[1])
-        except ValueError as e:
-          raise TracingFailure(
-              'Failed to process command line argument:\n%s' % e.args[0],
-              None, None, None)
-
-      def handle_exit_group(self, _args, _result):
-        """Removes cwd."""
-        self.cwd = None
-
-      @parse_args(r'^(\d+|AT_FDCWD), \"(.*?)\", ([A-Z\_\|]+)(|, \d+)$', True)
-      def handle_faccessat(self, args, _results):
-        if args[0] == 'AT_FDCWD':
-          self._handle_file(args[1], Results.File.TOUCHED)
-        else:
-          raise Exception('Relative faccess not implemented.')
-
-      def handle_fallocate(self, _args, result):
-        pass
-
-      def handle_fork(self, args, result):
-        self._handle_unknown('fork', args, result)
-
-      @parse_args(r'^(\d+), \"(.*?)\", ({.+}), (\d+)$', True)
-      def handle_fstatat64(self, _args, _result):
-        # TODO(maruel): Handle.
-        pass
-
-      def handle_futex(self, _args, _result):
-        pass
-
-      @parse_args(r'^(\".+?\"|0x[a-f0-9]+), (\d+)$', False)
-      def handle_getcwd(self, args, _result):
-        # TODO(maruel): Resolve file handle.
-        if not args[0].startswith('0x'):
-          filepath = args[0][1:-1]
-          if os.path.isabs(filepath):
-            logging.debug('handle_chdir(%d, %s)' % (self.pid, self.cwd))
-            if not isinstance(self.cwd, unicode):
-              # Take the occasion to reset the path.
-              self.cwd = self._mangle(filepath)
-            else:
-              # It should always match.
-              assert self.cwd == Strace.load_filename(filepath), (
-                  self.cwd, filepath)
-
-      @parse_args(r'^\"(.+?)\", \"(.+?)\"$', True)
-      def handle_link(self, args, _result):
-        self._handle_file(args[0], Results.File.READ)
-        self._handle_file(args[1], Results.File.WRITE)
-
-      @parse_args(r'^(\".+?\"|0x[a-f0-9]+), \{(?:.+?, )?\.\.\.\}$', True)
-      def handle_lstat(self, args, _result):
-        # TODO(maruel): Resolve file handle.
-        if not args[0].startswith('0x'):
-          self._handle_file(args[0][1:-1], Results.File.TOUCHED)
-
-      def handle_mkdir(self, _args, _result):
-        # We track content, not directories.
-        pass
-
-      @parse_args(r'^(\d+|AT_FDCWD), \".*?\", ({.+}), (\d+)$', True)
-      def handle_newfstatat(self, _args, _result):
-        # TODO(maruel): Handle
-        pass
-
-      @parse_args(r'^(\".*?\"|0x[a-f0-9]+), ([A-Z\_\|]+)(|, \d+)$', False)
-      def handle_open(self, args, _result):
-        if 'O_DIRECTORY' in args[1]:
-          return
-        # TODO(maruel): Resolve file handle.
-        if not args[0].startswith('0x'):
-          t = Results.File.READ if 'O_RDONLY' in args[1] else Results.File.WRITE
-          self._handle_file(args[0][1:-1], t)
-
-      @parse_args(
-          r'^(\d+|AT_FDCWD), (\".*?\"|0x[a-f0-9]+), ([A-Z\_\|]+)(|, \d+)$',
-          False)
-      def handle_openat(self, args, _result):
-        if 'O_DIRECTORY' in args[2]:
-          return
-        if args[0] == 'AT_FDCWD':
-          # TODO(maruel): Resolve file handle.
-          if not args[1].startswith('0x'):
-            t = (
-                Results.File.READ if 'O_RDONLY' in args[2]
-                else Results.File.WRITE)
-          self._handle_file(args[1][1:-1], t)
-        else:
-          # TODO(maruel): Implement relative open if necessary instead of the
-          # AT_FDCWD flag, let's hope not since this means tracking all active
-          # directory handles.
-          raise NotImplementedError('Relative open via openat not implemented.')
-
-      @parse_args(
-          r'^(\".+?\"|0x[a-f0-9]+), (?:\".+?\"(?:\.\.\.)?|0x[a-f0-9]+), '
-            '\d+$',
-          False)
-      def handle_readlink(self, args, _result):
-        # TODO(maruel): Resolve file handle.
-        if not args[0].startswith('0x'):
-          self._handle_file(args[0][1:-1], Results.File.READ)
-
-      @parse_args(r'^\"(.+?)\", \"(.+?)\"$', True)
-      def handle_rename(self, args, _result):
-        self._handle_file(args[0], Results.File.READ)
-        self._handle_file(args[1], Results.File.WRITE)
-
-      def handle_rmdir(self, _args, _result):
-        # We track content, not directories.
-        pass
-
-      def handle_setxattr(self, _args, _result):
-        # TODO(maruel): self._handle_file(args[0], Results.File.WRITE)
-        pass
-
-      @parse_args(r'^(\".+?\"|0x[a-f0-9]+), \{.+?\}$', True)
-      def handle_stat(self, args, _result):
-        # TODO(maruel): Resolve file handle.
-        if not args[0].startswith('0x'):
-          self._handle_file(args[0][1:-1], Results.File.TOUCHED)
-
-      @parse_args(r'^(\".+?\"|0x[a-f0-9]+), \{.+?\}$', True)
-      def handle_stat64(self, args, _result):
-        if not args[0].startswith('0x'):
-          self._handle_file(args[0][1:-1], Results.File.TOUCHED)
-
-      @parse_args(r'^\"(.+?)\", \"(.+?)\"$', True)
-      def handle_symlink(self, args, _result):
-        self._handle_file(args[0], Results.File.TOUCHED)
-        self._handle_file(args[1], Results.File.WRITE)
-
-      @parse_args(
-          r'^([0-9xa-f]+), ([0-9xa-f]+), ([0-9xa-f]+), ([0-9xa-f]+), '
-          r'([0-9xa-f]+), ([0-9xa-f]+)$',
-          False)
-      def handle_syscall_317(self, _args, _result):
-        # move_pages()
-        pass
-
-      @parse_args(r'^\"(.+?)\", \d+', True)
-      def handle_truncate(self, args, _result):
-        self._handle_file(args[0], Results.File.WRITE)
-
-      def handle_unlink(self, _args, _result):
-        # In theory, the file had to be created anyway.
-        pass
-
-      def handle_unlinkat(self, _args, _result):
-        # In theory, the file had to be created anyway.
-        pass
-
-      def handle_statfs(self, _args, _result):
-        pass
-
-      def handle_utimensat(self, _args, _result):
-        pass
-
-      def handle_vfork(self, _args, result):
-        self._handling_forking('vfork', result)
-
-      @staticmethod
-      def _handle_unknown(function, args, result):
-        raise TracingFailure(
-            'Unexpected/unimplemented trace %s(%s)= %s' %
-            (function, args, result),
-            None, None, None)
-
-      def _handling_forking(self, name, result):
-        """Transfers cwd."""
-        if result.startswith(('?', '-1')):
-          # The call failed.
-          return
-        # Update the other process right away.
-        childpid = int(result)
-        if childpid < 20:
-          # Ignore, it's probably the Chromium sandbox.
-          return
-        child = self._root().get_or_set_proc(childpid)
-        if child.parentid is not None or childpid in self.children:
-          raise TracingFailure(
-              'Found internal inconsitency in process lifetime detection '
-              'during a %s()=%s call:\n%s' % (
-                name,
-                result,
-                sorted(self._root()._process_lookup)),
-              None, None, None)
-        # Copy the cwd object.
-        child.initial_cwd = self.get_cwd()
-        child.parentid = self.pid
-        # It is necessary because the logs are processed out of order.
-        self.children.append(child)
-
-      def _handle_file(self, filepath, mode):
-        filepath = self._mangle(filepath)
-        self.add_file(filepath, mode)
-
-      def _mangle(self, filepath):
-        """Decodes a filepath found in the log and convert it to a late-bound
-        path if necessary.
-
-        |filepath| is an strace 'encoded' string and the returned value is
-        either an unicode string if the path was absolute or a late bound path
-        otherwise.
-        """
-        filepath = Strace.load_filename(filepath)
-        if os.path.isabs(filepath):
-          return filepath
-        else:
-          if isinstance(self.get_cwd(), unicode):
-            return os.path.normpath(os.path.join(self.get_cwd(), filepath))
-          return self.RelativePath(self.get_cwd(), filepath)
-
-    def __init__(self, blacklist, root_pid, initial_cwd):
-      """|root_pid| may be None when the root process is not known.
-
-      In that case, a search is done after reading all the logs to figure out
-      the root process.
-      """
-      super(Strace.Context, self).__init__(blacklist)
-      assert_is_renderable(initial_cwd)
-      self.root_pid = root_pid
-      self.initial_cwd = initial_cwd
-
-    def render(self):
-      """Returns the string value of the initial cwd of the root process.
-
-      Used by RelativePath.
-      """
-      return self.initial_cwd
-
-    def on_line(self, pid, line):
-      """Transfers control into the Process.on_line() function."""
-      self.get_or_set_proc(pid).on_line(line.strip())
-
-    def to_results(self):
-      """If necessary, finds back the root process and verify consistency."""
-      if not self.root_pid:
-        # The non-sudo case. The traced process was started by strace itself,
-        # so the pid of the traced process is not known.
-        root = [p for p in self._process_lookup.itervalues() if not p.parentid]
-        if len(root) == 1:
-          self.root_process = root[0]
-          # Save it for later.
-          self.root_pid = self.root_process.pid
-        else:
-          raise TracingFailure(
-              'Found internal inconsitency in process lifetime detection '
-              'while finding the root process',
-              None,
-              None,
-              None,
-              self.root_pid,
-              sorted(self._process_lookup))
-      else:
-        # The sudo case. The traced process was started manually so its pid is
-        # known.
-        self.root_process = self._process_lookup.get(self.root_pid)
-      if not self.root_process:
-        raise TracingFailure(
-            'Found internal inconsitency in process lifetime detection '
-            'while finding the root process',
-            None,
-            None,
-            None,
-            self.root_pid,
-            sorted(self._process_lookup))
-      process = self.root_process.to_results_process()
-      if sorted(self._process_lookup) != sorted(p.pid for p in process.all):
-        raise TracingFailure(
-            'Found internal inconsitency in process lifetime detection '
-            'while looking for len(tree) == len(list)',
-            None,
-            None,
-            None,
-            sorted(self._process_lookup),
-            sorted(p.pid for p in process.all))
-      return Results(process)
-
-    def get_or_set_proc(self, pid):
-      """Returns the Context.Process instance for this pid or creates a new one.
-      """
-      if not pid or not isinstance(pid, int):
-        raise TracingFailure(
-            'Unpexpected value for pid: %r' % pid,
-            pid,
-            None,
-            None,
-            pid)
-      if pid not in self._process_lookup:
-        self._process_lookup[pid] = self.Process(self, pid)
-      return self._process_lookup[pid]
-
-    @classmethod
-    def traces(cls):
-      """Returns the list of all handled traces to pass this as an argument to
-      strace.
-      """
-      prefix = 'handle_'
-      return [i[len(prefix):] for i in dir(cls.Process) if i.startswith(prefix)]
-
-  class Tracer(ApiBase.Tracer):
-    MAX_LEN = 256
-
-    def __init__(self, logname, use_sudo):
-      super(Strace.Tracer, self).__init__(logname)
-      self.use_sudo = use_sudo
-      if use_sudo:
-        # TODO(maruel): Use the jump script systematically to make it easy to
-        # figure out the root process, so RelativePath is not necessary anymore.
-        self._child_script = create_exec_thunk()
-        self._scripts_to_cleanup.append(self._child_script)
-
-    def trace(self, cmd, cwd, tracename, output):
-      """Runs strace on an executable.
-
-      When use_sudo=True, it is a 3-phases process: start the thunk, start
-      sudo strace with the pid of the thunk and then have the thunk os.execve()
-      the process to trace.
-      """
-      logging.info('trace(%s, %s, %s, %s)' % (cmd, cwd, tracename, output))
-      assert os.path.isabs(cmd[0]), cmd[0]
-      assert os.path.isabs(cwd), cwd
-      assert os.path.normpath(cwd) == cwd, cwd
-      with self._lock:
-        if not self._initialized:
-          raise TracingFailure(
-              'Called Tracer.trace() on an unitialized object',
-              None, None, None, tracename)
-        assert tracename not in (i['trace'] for i in self._traces)
-      stdout = stderr = None
-      if output:
-        stdout = subprocess.PIPE
-        stderr = subprocess.STDOUT
-
-      # Ensure all file related APIs are hooked.
-      traces = ','.join(
-          [i for i in Strace.Context.traces()
-            if not i.startswith('syscall_')] +
-          ['file'])
-      flags = [
-        # Each child process has its own trace file. It is necessary because
-        # strace may generate corrupted log file if multiple processes are
-        # heavily doing syscalls simultaneously.
-        '-ff',
-        # Reduce whitespace usage.
-        '-a1',
-        # hex encode non-ascii strings.
-        # TODO(maruel): '-x',
-        # TODO(maruel): '-ttt',
-        # Signals are unnecessary noise here. Note the parser can cope with them
-        # but reduce the unnecessary output.
-        '-esignal=none',
-        # Print as much data as wanted.
-        '-s', '%d' % self.MAX_LEN,
-        '-e', 'trace=%s' % traces,
-        '-o', self._logname + '.' + tracename,
-      ]
-      logging.info('%s', flags)
-
-      if self.use_sudo:
-        pipe_r, pipe_w = os.pipe()
-        # Start the child process paused.
-        target_cmd = [sys.executable, self._child_script, str(pipe_r)] + cmd
-        logging.debug(' '.join(target_cmd))
-        child_proc = subprocess.Popen(
-            target_cmd,
-            stdin=subprocess.PIPE,
-            stdout=stdout,
-            stderr=stderr,
-            cwd=cwd)
-
-        # TODO(maruel): both processes must use the same UID for it to work
-        # without sudo. Look why -p is failing at the moment without sudo.
-        trace_cmd = [
-          'sudo',
-          'strace',
-          '-p', str(child_proc.pid),
-        ] + flags
-        logging.debug(' '.join(trace_cmd))
-        strace_proc = subprocess.Popen(
-            trace_cmd,
-            cwd=cwd,
-            stdin=subprocess.PIPE,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE)
-
-        line = strace_proc.stderr.readline()
-        if not re.match(r'^Process \d+ attached \- interrupt to quit$', line):
-          # TODO(maruel): Raise an exception.
-          assert False, line
-
-        # Now fire the child process.
-        os.write(pipe_w, 'x')
-
-        out = child_proc.communicate()[0]
-        strace_out = strace_proc.communicate()[0]
-
-        # TODO(maruel): if strace_proc.returncode: Add an exception.
-        saved_out = strace_out if strace_proc.returncode else out
-        root_pid = child_proc.pid
-      else:
-        # Non-sudo case.
-        trace_cmd = [
-          'strace',
-        ] + flags + cmd
-        logging.debug(' '.join(trace_cmd))
-        child_proc = subprocess.Popen(
-            trace_cmd,
-            cwd=cwd,
-            stdin=subprocess.PIPE,
-            stdout=stdout,
-            stderr=stderr)
-        out = child_proc.communicate()[0]
-        # TODO(maruel): Walk the logs and figure out the root process would
-        # simplify parsing the logs a *lot*.
-        saved_out = out
-        # The trace reader will have to figure out.
-        root_pid = None
-
-      with self._lock:
-        assert tracename not in (i['trace'] for i in self._traces)
-        self._traces.append(
-          {
-            'cmd': cmd,
-            'cwd': cwd,
-            'output': saved_out,
-            'pid': root_pid,
-            'trace': tracename,
-          })
-      return child_proc.returncode, out
-
-  def __init__(self, use_sudo=None):
-    super(Strace, self).__init__()
-    self.use_sudo = use_sudo
-
-  def get_tracer(self, logname):
-    return self.Tracer(logname, self.use_sudo)
-
-  @staticmethod
-  def clean_trace(logname):
-    if fs.isfile(logname):
-      fs.remove(logname)
-    # Also delete any pid specific file from previous traces.
-    for i in glob.iglob(logname + '.*'):
-      if i.rsplit('.', 1)[1].isdigit():
-        fs.remove(i)
-
-  @classmethod
-  def parse_log(cls, logname, blacklist, trace_name):
-    logging.info('parse_log(%s, ..., %s)', logname, trace_name)
-    assert os.path.isabs(logname)
-    data = tools.read_json(logname)
-    out = []
-    for item in data['traces']:
-      if trace_name and item['trace'] != trace_name:
-        continue
-      result = {
-        'output': item['output'],
-        'trace': item['trace'],
-      }
-      try:
-        context = cls.Context(blacklist, item['pid'], item['cwd'])
-        for pidfile in glob.iglob('%s.%s.*' % (logname, item['trace'])):
-          logging.debug('Reading %s', pidfile)
-          pid = pidfile.rsplit('.', 1)[1]
-          if pid.isdigit():
-            pid = int(pid)
-            found_line = False
-            for line in open(pidfile, 'rb'):
-              context.on_line(pid, line)
-              found_line = True
-            if not found_line:
-              # Ensures that a completely empty trace still creates the
-              # corresponding Process instance by logging a dummy line.
-              context.on_line(pid, '')
-          else:
-            logging.warning('Found unexpected file %s', pidfile)
-        result['results'] = context.to_results()
-      except TracingFailure:
-        result['exception'] = sys.exc_info()
-      out.append(result)
-    return out
-
-
-class Dtrace(ApiBase):
-  """Uses DTrace framework through dtrace. Requires root access.
-
-  Implies Mac OSX.
-
-  dtruss can't be used because it has compatibility issues with python.
-
-  Also, the pid->cwd handling needs to be done manually since OSX has no way to
-  get the absolute path of the 'cwd' dtrace variable from the probe.
-
-  Also, OSX doesn't populate curpsinfo->pr_psargs properly, see
-  https://discussions.apple.com/thread/1980539. So resort to handling execve()
-  manually.
-
-  errno is not printed in the log since this implementation currently only cares
-  about files that were successfully opened.
-  """
-  class Context(ApiBase.Context):
-    # Format: index pid function(args)
-    RE_HEADER = re.compile(r'^\d+ (\d+) ([a-zA-Z_\-]+)\((.*?)\)$')
-
-    # Arguments parsing.
-    RE_DTRACE_BEGIN = re.compile(r'^\"(.+?)\"$')
-    RE_CHDIR = re.compile(r'^\"(.+?)\"$')
-    RE_EXECVE = re.compile(r'^\"(.+?)\", \[(\d+), (.+)\]$')
-    RE_OPEN = re.compile(r'^\"(.+?)\", (0x[0-9a-z]+), (0x[0-9a-z]+)$')
-    RE_PROC_START = re.compile(r'^(\d+), \"(.+?)\", (\d+)$')
-    RE_RENAME = re.compile(r'^\"(.+?)\", \"(.+?)\"$')
-
-    # O_DIRECTORY is not defined on Windows and dtrace doesn't exist on Windows.
-    O_DIRECTORY = os.O_DIRECTORY if hasattr(os, 'O_DIRECTORY') else None
-    O_RDWR = os.O_RDWR
-    O_WRONLY = os.O_WRONLY
-
-    class Process(ApiBase.Context.Process):
-      def __init__(self, *args):
-        super(Dtrace.Context.Process, self).__init__(*args)
-        self.cwd = self.initial_cwd
-
-    def __init__(self, blacklist, thunk_pid, initial_cwd):
-      logging.info(
-          '%s(%d, %s)' % (self.__class__.__name__, thunk_pid, initial_cwd))
-      super(Dtrace.Context, self).__init__(blacklist)
-      assert isinstance(initial_cwd, unicode), initial_cwd
-      # Process ID of the temporary script created by create_subprocess_thunk().
-      self._thunk_pid = thunk_pid
-      self._initial_cwd = initial_cwd
-      self._line_number = 0
-
-    def on_line(self, line):
-      assert isinstance(line, unicode), line
-      self._line_number += 1
-      match = self.RE_HEADER.match(line)
-      if not match:
-        raise TracingFailure(
-            'Found malformed line: %s' % line,
-            None,
-            self._line_number,
-            line)
-      fn = getattr(
-          self,
-          'handle_%s' % match.group(2).replace('-', '_'),
-          self._handle_ignored)
-      # It is guaranteed to succeed because of the regexp. Or at least I thought
-      # it would.
-      pid = int(match.group(1))
-      try:
-        return fn(pid, match.group(3))
-      except TracingFailure, e:
-        # Hack in the values since the handler could be a static function.
-        e.pid = pid
-        e.line = line
-        e.line_number = self._line_number
-        # Re-raise the modified exception.
-        raise
-      except (KeyError, NotImplementedError, ValueError), e:
-        raise TracingFailure(
-            'Trace generated a %s exception: %s' % (
-                e.__class__.__name__, str(e)),
-            pid,
-            self._line_number,
-            line,
-            e)
-
-    def to_results(self):
-      process = self.root_process.to_results_process()
-      # Internal concistency check.
-      if sorted(self._process_lookup) != sorted(p.pid for p in process.all):
-        raise TracingFailure(
-            'Found internal inconsitency in process lifetime detection '
-            'while looking for len(tree) == len(list)',
-            None,
-            None,
-            None,
-            sorted(self._process_lookup),
-            sorted(p.pid for p in process.all))
-      return Results(process)
-
-    def handle_dtrace_BEGIN(self, _pid, args):
-      if not self.RE_DTRACE_BEGIN.match(args):
-        raise TracingFailure(
-            'Found internal inconsitency in dtrace_BEGIN log line',
-            None, None, None)
-
-    def handle_proc_start(self, pid, args):
-      """Transfers cwd.
-
-      The dtrace script already takes care of only tracing the processes that
-      are child of the traced processes so there is no need to verify the
-      process hierarchy.
-      """
-      if pid in self._process_lookup:
-        raise TracingFailure(
-            'Found internal inconsitency in proc_start: %d started two times' %
-                pid,
-            None, None, None)
-      match = self.RE_PROC_START.match(args)
-      if not match:
-        raise TracingFailure(
-            'Failed to parse arguments: %s' % args,
-            None, None, None)
-      ppid = int(match.group(1))
-      if ppid == self._thunk_pid and not self.root_process:
-        proc = self.root_process = self.Process(
-            self.blacklist, pid, self._initial_cwd)
-      elif ppid in self._process_lookup:
-        proc = self.Process(self.blacklist, pid, self._process_lookup[ppid].cwd)
-        self._process_lookup[ppid].children.append(proc)
-      else:
-        # Another process tree, ignore.
-        return
-      self._process_lookup[pid] = proc
-      logging.debug(
-          'New child: %s -> %d  cwd:%s' %
-          (ppid, pid, render(proc.initial_cwd)))
-
-    def handle_proc_exit(self, pid, _args):
-      """Removes cwd."""
-      if pid in self._process_lookup:
-        # self._thunk_pid is not traced itself and other traces run neither.
-        self._process_lookup[pid].cwd = None
-
-    def handle_execve(self, pid, args):
-      """Sets the process' executable.
-
-      TODO(maruel): Read command line arguments.  See
-      https://discussions.apple.com/thread/1980539 for an example.
-      https://gist.github.com/1242279
-
-      Will have to put the answer at http://stackoverflow.com/questions/7556249.
-      :)
-      """
-      if not pid in self._process_lookup:
-        # Another process tree, ignore.
-        return
-      match = self.RE_EXECVE.match(args)
-      if not match:
-        raise TracingFailure(
-            'Failed to parse arguments: %r' % args,
-            None, None, None)
-      proc = self._process_lookup[pid]
-      proc.executable = match.group(1)
-      self._handle_file(pid, proc.executable, Results.File.READ)
-      proc.command = self.process_escaped_arguments(match.group(3))
-      if int(match.group(2)) != len(proc.command):
-        raise TracingFailure(
-            'Failed to parse execve() arguments: %s' % args,
-            None, None, None)
-
-    def handle_chdir(self, pid, args):
-      """Updates cwd."""
-      if pid not in self._process_lookup:
-        # Another process tree, ignore.
-        return
-      cwd = self.RE_CHDIR.match(args).group(1)
-      if not cwd.startswith('/'):
-        cwd2 = os.path.join(self._process_lookup[pid].cwd, cwd)
-        logging.debug('handle_chdir(%d, %s) -> %s' % (pid, cwd, cwd2))
-      else:
-        logging.debug('handle_chdir(%d, %s)' % (pid, cwd))
-        cwd2 = cwd
-      self._process_lookup[pid].cwd = cwd2
-
-    def handle_open_nocancel(self, pid, args):
-      """Redirects to handle_open()."""
-      return self.handle_open(pid, args)
-
-    def handle_open(self, pid, args):
-      if pid not in self._process_lookup:
-        # Another process tree, ignore.
-        return
-      match = self.RE_OPEN.match(args)
-      if not match:
-        raise TracingFailure(
-            'Failed to parse arguments: %s' % args,
-            None, None, None)
-      flag = int(match.group(2), 16)
-      if self.O_DIRECTORY & flag == self.O_DIRECTORY:
-        # Ignore directories.
-        return
-      self._handle_file(
-          pid,
-          match.group(1),
-          Results.File.READ if not ((self.O_RDWR | self.O_WRONLY) & flag)
-            else Results.File.WRITE)
-
-    def handle_rename(self, pid, args):
-      if pid not in self._process_lookup:
-        # Another process tree, ignore.
-        return
-      match = self.RE_RENAME.match(args)
-      if not match:
-        raise TracingFailure(
-            'Failed to parse arguments: %s' % args,
-            None, None, None)
-      self._handle_file(pid, match.group(1), Results.File.READ)
-      self._handle_file(pid, match.group(2), Results.File.WRITE)
-
-    def _handle_file(self, pid, filepath, mode):
-      if not filepath.startswith('/'):
-        filepath = os.path.join(self._process_lookup[pid].cwd, filepath)
-      # We can get '..' in the path.
-      filepath = os.path.normpath(filepath)
-      # Sadly, still need to filter out directories here;
-      # saw open_nocancel(".", 0, 0) = 0 lines.
-      if fs.isdir(filepath):
-        return
-      self._process_lookup[pid].add_file(filepath, mode)
-
-    def handle_ftruncate(self, pid, args):
-      """Just used as a signal to kill dtrace, ignoring."""
-      pass
-
-    @staticmethod
-    def _handle_ignored(pid, args):
-      """Is called for all the event traces that are not handled."""
-      raise NotImplementedError('Please implement me')
-
-    @staticmethod
-    def process_escaped_arguments(text):
-      """Extracts escaped arguments on a string and return the arguments as a
-      list.
-
-      Implemented as an automaton.
-
-      Example:
-        With text = '\\001python2.7\\001-c\\001print(\\"hi\\")\\0', the
-        function will return ['python2.7', '-c', 'print("hi")]
-      """
-      if not text.endswith('\\0'):
-        raise ValueError('String is not null terminated: %r' % text, text)
-      text = text[:-2]
-
-      def unescape(x):
-        """Replaces '\\' with '\' and '\?' (where ? is anything) with ?."""
-        out = []
-        escaped = False
-        for i in x:
-          if i == '\\' and not escaped:
-            escaped = True
-            continue
-          escaped = False
-          out.append(i)
-        return ''.join(out)
-
-      return [unescape(i) for i in text.split('\\001')]
-
-  class Tracer(ApiBase.Tracer):
-    # pylint: disable=C0301
-    #
-    # To understand the following code, you'll want to take a look at:
-    # http://developers.sun.com/solaris/articles/dtrace_quickref/dtrace_quickref.html
-    # https://wikis.oracle.com/display/DTrace/Variables
-    # http://docs.oracle.com/cd/E19205-01/820-4221/
-    #
-    # 0. Dump all the valid probes into a text file. It is important, you
-    #    want to redirect into a file and you don't want to constantly 'sudo'.
-    # $ sudo dtrace -l > probes.txt
-    #
-    # 1. Count the number of probes:
-    # $ wc -l probes.txt
-    # 81823  # On OSX 10.7, including 1 header line.
-    #
-    # 2. List providers, intentionally skipping all the 'syspolicy10925' and the
-    #    likes and skipping the header with NR>1:
-    # $ awk 'NR>1 { print $2 }' probes.txt | sort | uniq | grep -v '[[:digit:]]'
-    # dtrace
-    # fbt
-    # io
-    # ip
-    # lockstat
-    # mach_trap
-    # proc
-    # profile
-    # sched
-    # syscall
-    # tcp
-    # vminfo
-    #
-    # 3. List of valid probes:
-    # $ grep syscall probes.txt | less
-    #    or use dtrace directly:
-    # $ sudo dtrace -l -P syscall | less
-    #
-    # trackedpid is an associative array where its value can be 0, 1 or 2.
-    # 0 is for untracked processes and is the default value for items not
-    #   in the associative array.
-    # 1 is for tracked processes.
-    # 2 is for the script created by create_subprocess_thunk() only. It is not
-    #   tracked itself but all its decendants are.
-    #
-    # The script will kill itself only once waiting_to_die == 1 and
-    # current_processes == 0, so that both getlogin() was called and that
-    # all traced processes exited.
-    #
-    # TODO(maruel): Use cacheable predicates. See
-    # https://wikis.oracle.com/display/DTrace/Performance+Considerations
-    D_CODE = """
-      dtrace:::BEGIN {
-        waiting_to_die = 0;
-        current_processes = 0;
-        logindex = 0;
-        printf("%d %d %s_%s(\\"%s\\")\\n",
-               logindex, PID, probeprov, probename, SCRIPT);
-        logindex++;
-      }
-
-      proc:::start /trackedpid[ppid]/ {
-        trackedpid[pid] = 1;
-        current_processes += 1;
-        printf("%d %d %s_%s(%d, \\"%s\\", %d)\\n",
-               logindex, pid, probeprov, probename,
-               ppid,
-               execname,
-               current_processes);
-        logindex++;
-      }
-      /* Should use SCRIPT but there is no access to this variable at that
-       * point. */
-      proc:::start /ppid == PID && execname == "Python"/ {
-        trackedpid[pid] = 2;
-        current_processes += 1;
-        printf("%d %d %s_%s(%d, \\"%s\\", %d)\\n",
-               logindex, pid, probeprov, probename,
-               ppid,
-               execname,
-               current_processes);
-        logindex++;
-      }
-      proc:::exit /trackedpid[pid] &&
-                  current_processes == 1 &&
-                  waiting_to_die == 1/ {
-        trackedpid[pid] = 0;
-        current_processes -= 1;
-        printf("%d %d %s_%s(%d)\\n",
-               logindex, pid, probeprov, probename,
-               current_processes);
-        logindex++;
-        exit(0);
-      }
-      proc:::exit /trackedpid[pid]/ {
-        trackedpid[pid] = 0;
-        current_processes -= 1;
-        printf("%d %d %s_%s(%d)\\n",
-               logindex, pid, probeprov, probename,
-               current_processes);
-        logindex++;
-      }
-
-      /* Use an arcane function to detect when we need to die */
-      syscall::ftruncate:entry /pid == PID && arg0 == FILE_ID/ {
-        waiting_to_die = 1;
-        printf("%d %d %s()\\n", logindex, pid, probefunc);
-        logindex++;
-      }
-      syscall::ftruncate:entry /
-          pid == PID && arg0 == FILE_ID && current_processes == 0/ {
-        exit(0);
-      }
-
-      syscall::open*:entry /trackedpid[pid] == 1/ {
-        self->open_arg0 = arg0;
-        self->open_arg1 = arg1;
-        self->open_arg2 = arg2;
-      }
-      syscall::open*:return /trackedpid[pid] == 1 && errno == 0/ {
-        this->open_arg0 = copyinstr(self->open_arg0);
-        printf("%d %d %s(\\"%s\\", 0x%x, 0x%x)\\n",
-               logindex, pid, probefunc,
-               this->open_arg0,
-               self->open_arg1,
-               self->open_arg2);
-        logindex++;
-        this->open_arg0 = 0;
-      }
-      syscall::open*:return /trackedpid[pid] == 1/ {
-        self->open_arg0 = 0;
-        self->open_arg1 = 0;
-        self->open_arg2 = 0;
-      }
-
-      syscall::rename:entry /trackedpid[pid] == 1/ {
-        self->rename_arg0 = arg0;
-        self->rename_arg1 = arg1;
-      }
-      syscall::rename:return /trackedpid[pid] == 1 && errno == 0/ {
-        this->rename_arg0 = copyinstr(self->rename_arg0);
-        this->rename_arg1 = copyinstr(self->rename_arg1);
-        printf("%d %d %s(\\"%s\\", \\"%s\\")\\n",
-               logindex, pid, probefunc,
-               this->rename_arg0,
-               this->rename_arg1);
-        logindex++;
-        this->rename_arg0 = 0;
-        this->rename_arg1 = 0;
-      }
-      syscall::rename:return /trackedpid[pid] == 1/ {
-        self->rename_arg0 = 0;
-        self->rename_arg1 = 0;
-      }
-
-      /* Track chdir, it's painful because it is only receiving relative path.
-       */
-      syscall::chdir:entry /trackedpid[pid] == 1/ {
-        self->chdir_arg0 = arg0;
-      }
-      syscall::chdir:return /trackedpid[pid] == 1 && errno == 0/ {
-        this->chdir_arg0 = copyinstr(self->chdir_arg0);
-        printf("%d %d %s(\\"%s\\")\\n",
-               logindex, pid, probefunc,
-               this->chdir_arg0);
-        logindex++;
-        this->chdir_arg0 = 0;
-      }
-      syscall::chdir:return /trackedpid[pid] == 1/ {
-        self->chdir_arg0 = 0;
-      }
-      """
-
-    # execve-specific code, tends to throw a lot of exceptions.
-    D_CODE_EXECVE = """
-      /* Finally what we care about! */
-      syscall::exec*:entry /trackedpid[pid]/ {
-        self->exec_arg0 = copyinstr(arg0);
-        /* Incrementally probe for a NULL in the argv parameter of execve() to
-         * figure out argc. */
-        /* TODO(maruel): Skip the remaining copyin() when a NULL pointer was
-         * found. */
-        self->exec_argc = 0;
-        /* Probe for argc==1 */
-        this->exec_argv = (user_addr_t*)copyin(
-             arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
-        self->exec_argc = this->exec_argv[self->exec_argc] ?
-            (self->exec_argc + 1) : self->exec_argc;
-
-        /* Probe for argc==2 */
-        this->exec_argv = (user_addr_t*)copyin(
-             arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
-        self->exec_argc = this->exec_argv[self->exec_argc] ?
-            (self->exec_argc + 1) : self->exec_argc;
-
-        /* Probe for argc==3 */
-        this->exec_argv = (user_addr_t*)copyin(
-             arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
-        self->exec_argc = this->exec_argv[self->exec_argc] ?
-            (self->exec_argc + 1) : self->exec_argc;
-
-        /* Probe for argc==4 */
-        this->exec_argv = (user_addr_t*)copyin(
-             arg1, sizeof(user_addr_t) * (self->exec_argc + 1));
-        self->exec_argc = this->exec_argv[self->exec_argc] ?
-            (self->exec_argc + 1) : self->exec_argc;
-
-        /* Copy the inputs strings since there is no guarantee they'll be
-         * present after the call completed. */
-        self->exec_argv0 = (self->exec_argc > 0) ?
-            copyinstr(this->exec_argv[0]) : "";
-        self->exec_argv1 = (self->exec_argc > 1) ?
-            copyinstr(this->exec_argv[1]) : "";
-        self->exec_argv2 = (self->exec_argc > 2) ?
-            copyinstr(this->exec_argv[2]) : "";
-        self->exec_argv3 = (self->exec_argc > 3) ?
-            copyinstr(this->exec_argv[3]) : "";
-        this->exec_argv = 0;
-      }
-      syscall::exec*:return /trackedpid[pid] && errno == 0/ {
-        /* We need to join strings here, as using multiple printf() would
-         * cause tearing when multiple threads/processes are traced.
-         * Since it is impossible to escape a string and join it to another one,
-         * like sprintf("%s%S", previous, more), use hackery.
-         * Each of the elements are split with a \\1. \\0 cannot be used because
-         * it is simply ignored. This will conflict with any program putting a
-         * \\1 in their execve() string but this should be "rare enough" */
-        this->args = "";
-        /* Process exec_argv[0] */
-        this->args = strjoin(
-            this->args, (self->exec_argc > 0) ? self->exec_argv0 : "");
-
-        /* Process exec_argv[1] */
-        this->args = strjoin(
-            this->args, (self->exec_argc > 1) ? "\\1" : "");
-        this->args = strjoin(
-            this->args, (self->exec_argc > 1) ? self->exec_argv1 : "");
-
-        /* Process exec_argv[2] */
-        this->args = strjoin(
-            this->args, (self->exec_argc > 2) ? "\\1" : "");
-        this->args = strjoin(
-            this->args, (self->exec_argc > 2) ? self->exec_argv2 : "");
-
-        /* Process exec_argv[3] */
-        this->args = strjoin(
-            this->args, (self->exec_argc > 3) ? "\\1" : "");
-        this->args = strjoin(
-            this->args, (self->exec_argc > 3) ? self->exec_argv3 : "");
-
-        /* Prints self->exec_argc to permits verifying the internal
-         * consistency since this code is quite fishy. */
-        printf("%d %d %s(\\"%s\\", [%d, %S])\\n",
-               logindex, pid, probefunc,
-               self->exec_arg0,
-               self->exec_argc,
-               this->args);
-        logindex++;
-        this->args = 0;
-      }
-      syscall::exec*:return /trackedpid[pid]/ {
-        self->exec_arg0 = 0;
-        self->exec_argc = 0;
-        self->exec_argv0 = 0;
-        self->exec_argv1 = 0;
-        self->exec_argv2 = 0;
-        self->exec_argv3 = 0;
-      }
-      """
-
-    # Code currently not used.
-    D_EXTRANEOUS = """
-      /* This is a good learning experience, since it traces a lot of things
-       * related to the process and child processes.
-       * Warning: it generates a gigantic log. For example, tracing
-       * "data/trace_inputs/child1.py --child" generates a 2mb log and takes
-       * several minutes to execute.
-       */
-      /*
-      mach_trap::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
-        printf("%d %d %s_%s() = %d\\n",
-               logindex, pid, probeprov, probefunc, errno);
-        logindex++;
-      }
-      proc::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
-        printf("%d %d %s_%s() = %d\\n",
-               logindex, pid, probeprov, probefunc, errno);
-        logindex++;
-      }
-      sched::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
-        printf("%d %d %s_%s() = %d\\n",
-               logindex, pid, probeprov, probefunc, errno);
-        logindex++;
-      }
-      syscall::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
-        printf("%d %d %s_%s() = %d\\n",
-               logindex, pid, probeprov, probefunc, errno);
-        logindex++;
-      }
-      vminfo::: /trackedpid[pid] == 1 || trackedpid[ppid]/ {
-        printf("%d %d %s_%s() = %d\\n",
-               logindex, pid, probeprov, probefunc, errno);
-        logindex++;
-      }
-      */
-      /* TODO(maruel): *stat* functions and friends
-        syscall::access:return,
-        syscall::chdir:return,
-        syscall::chflags:return,
-        syscall::chown:return,
-        syscall::chroot:return,
-        syscall::getattrlist:return,
-        syscall::getxattr:return,
-        syscall::lchown:return,
-        syscall::lstat64:return,
-        syscall::lstat:return,
-        syscall::mkdir:return,
-        syscall::pathconf:return,
-        syscall::readlink:return,
-        syscall::removexattr:return,
-        syscall::setxattr:return,
-        syscall::stat64:return,
-        syscall::stat:return,
-        syscall::truncate:return,
-        syscall::unlink:return,
-        syscall::utimes:return,
-      */
-      """
-
-    def __init__(self, logname, use_sudo):
-      """Starts the log collection with dtrace.
-
-      Requires root access or chmod 4555 on dtrace. dtrace is asynchronous so
-      this needs to wait for dtrace to be "warmed up".
-      """
-      super(Dtrace.Tracer, self).__init__(logname)
-      # This script is used as a signal to figure out the root process.
-      self._signal_script = create_subprocess_thunk()
-      self._scripts_to_cleanup.append(self._signal_script)
-      # This unique dummy temp file is used to signal the dtrace script that it
-      # should stop as soon as all the child processes are done. A bit hackish
-      # but works fine enough.
-      self._dummy_file_id, self._dummy_file_name = tempfile.mkstemp(
-          prefix=u'trace_signal_file')
-
-      dtrace_path = '/usr/sbin/dtrace'
-      if not fs.isfile(dtrace_path):
-        dtrace_path = 'dtrace'
-      elif use_sudo is None and (fs.stat(dtrace_path).st_mode & stat.S_ISUID):
-        # No need to sudo. For those following at home, don't do that.
-        use_sudo = False
-
-      # Note: do not use the -p flag. It's useless if the initial process quits
-      # too fast, resulting in missing traces from the grand-children. The D
-      # code manages the dtrace lifetime itself.
-      trace_cmd = [
-        dtrace_path,
-        # Use a larger buffer if getting 'out of scratch space' errors.
-        # Ref: https://wikis.oracle.com/display/DTrace/Options+and+Tunables
-        '-b', '10m',
-        '-x', 'dynvarsize=10m',
-        #'-x', 'dtrace_global_maxsize=1m',
-        '-x', 'evaltime=exec',
-        '-o', '/dev/stderr',
-        '-q',
-        '-n', self._get_dtrace_code(),
-      ]
-      if use_sudo is not False:
-        trace_cmd.insert(0, 'sudo')
-
-      with fs.open(self._logname + '.log', 'wb') as logfile:
-        self._dtrace = subprocess.Popen(
-            trace_cmd, stdout=logfile, stderr=subprocess.STDOUT)
-      logging.debug('Started dtrace pid: %d' % self._dtrace.pid)
-
-      # Reads until one line is printed, which signifies dtrace is up and ready.
-      with fs.open(self._logname + '.log', 'rb') as logfile:
-        while 'dtrace_BEGIN' not in logfile.readline():
-          if self._dtrace.poll() is not None:
-            # Do a busy wait. :/
-            break
-      logging.debug('dtrace started')
-
-    def _get_dtrace_code(self):
-      """Setups the D code to implement child process tracking.
-
-      Injects the cookie in the script so it knows when to stop.
-
-      The script will detect any instance of the script created with
-      create_subprocess_thunk() and will start tracing it.
-      """
-      out = (
-          'inline int PID = %d;\n'
-          'inline string SCRIPT = "%s";\n'
-          'inline int FILE_ID = %d;\n'
-          '\n'
-          '%s') % (
-              os.getpid(),
-              self._signal_script,
-              self._dummy_file_id,
-              self.D_CODE)
-      if os.environ.get('TRACE_INPUTS_DTRACE_ENABLE_EXECVE') == '1':
-        # Do not enable by default since it tends to spew dtrace: error lines
-        # because the execve() parameters are not in valid memory at the time of
-        # logging.
-        # TODO(maruel): Find a way to make this reliable since it's useful but
-        # only works in limited/trivial uses cases for now.
-        out += self.D_CODE_EXECVE
-      return out
-
-    def trace(self, cmd, cwd, tracename, output):
-      """Runs dtrace on an executable.
-
-      This dtruss is broken when it starts the process itself or when tracing
-      child processes, this code starts a wrapper process
-      generated with create_subprocess_thunk() which starts the executable to
-      trace.
-      """
-      logging.info('trace(%s, %s, %s, %s)' % (cmd, cwd, tracename, output))
-      assert os.path.isabs(cmd[0]), cmd[0]
-      assert os.path.isabs(cwd), cwd
-      assert os.path.normpath(cwd) == cwd, cwd
-      with self._lock:
-        if not self._initialized:
-          raise TracingFailure(
-              'Called Tracer.trace() on an unitialized object',
-              None, None, None, tracename)
-        assert tracename not in (i['trace'] for i in self._traces)
-
-      # Starts the script wrapper to start the child process. This signals the
-      # dtrace script that this process is to be traced.
-      stdout = stderr = None
-      if output:
-        stdout = subprocess.PIPE
-        stderr = subprocess.STDOUT
-      child_cmd = [
-        sys.executable,
-        self._signal_script,
-        tracename,
-      ]
-      # Call a dummy function so that dtrace knows I'm about to launch a process
-      # that needs to be traced.
-      # Yummy.
-      child = subprocess.Popen(
-          child_cmd + tools.fix_python_path(cmd),
-          stdin=subprocess.PIPE,
-          stdout=stdout,
-          stderr=stderr,
-          cwd=cwd)
-      logging.debug('Started child pid: %d' % child.pid)
-
-      out = child.communicate()[0]
-      # This doesn't mean tracing is done, one of the grand-child process may
-      # still be alive. It will be tracked with the dtrace script.
-
-      with self._lock:
-        assert tracename not in (i['trace'] for i in self._traces)
-        self._traces.append(
-          {
-            'cmd': cmd,
-            'cwd': cwd,
-            'pid': child.pid,
-            'output': out,
-            'trace': tracename,
-          })
-      return child.returncode, out
-
-    def close(self, timeout=None):
-      """Terminates dtrace."""
-      logging.debug('close(%s)' % timeout)
-      try:
-        try:
-          super(Dtrace.Tracer, self).close(timeout)
-          # Signal dtrace that it should stop now.
-          # ftruncate doesn't exist on Windows.
-          os.ftruncate(self._dummy_file_id, 0)  # pylint: disable=E1101
-          if timeout:
-            start = time.time()
-            # Use polling. :/
-            while (self._dtrace.poll() is None and
-                   (time.time() - start) < timeout):
-              time.sleep(0.1)
-            self._dtrace.kill()
-          self._dtrace.wait()
-        finally:
-          # Make sure to kill it in any case.
-          if self._dtrace.poll() is None:
-            try:
-              self._dtrace.kill()
-              self._dtrace.wait()
-            except OSError:
-              pass
-
-        if self._dtrace.returncode != 0:
-          # Warn about any dtrace failure but basically ignore it.
-          print 'dtrace failure: %s' % self._dtrace.returncode
-      finally:
-        os.close(self._dummy_file_id)
-        fs.remove(self._dummy_file_name)
-
-    def post_process_log(self):
-      """Sorts the log back in order when each call occured.
-
-      dtrace doesn't save the buffer in strict order since it keeps one buffer
-      per CPU.
-      """
-      super(Dtrace.Tracer, self).post_process_log()
-      logname = self._logname + '.log'
-      with fs.open(logname, 'rb') as logfile:
-        lines = [l for l in logfile if l.strip()]
-      errors = [l for l in lines if l.startswith('dtrace:')]
-      if errors:
-        raise TracingFailure(
-            'Found errors in the trace: %s' % '\n'.join(errors),
-            None, None, None, logname)
-      try:
-        lines = sorted(lines, key=lambda l: int(l.split(' ', 1)[0]))
-      except ValueError:
-        raise TracingFailure(
-            'Found errors in the trace: %s' % '\n'.join(
-                l for l in lines if l.split(' ', 1)[0].isdigit()),
-            None, None, None, logname)
-      with fs.open(logname, 'wb') as logfile:
-        logfile.write(''.join(lines))
-
-  def __init__(self, use_sudo=None):
-    super(Dtrace, self).__init__()
-    self.use_sudo = use_sudo
-
-  def get_tracer(self, logname):
-    return self.Tracer(logname, self.use_sudo)
-
-  @staticmethod
-  def clean_trace(logname):
-    for ext in ('', '.log'):
-      if fs.isfile(logname + ext):
-        fs.remove(logname + ext)
-
-  @classmethod
-  def parse_log(cls, logname, blacklist, trace_name):
-    logging.info('parse_log(%s, ..., %s)', logname, trace_name)
-    assert os.path.isabs(logname)
-
-    def blacklist_more(filepath):
-      # All the HFS metadata is in the form /.vol/...
-      return blacklist(filepath) or re.match(r'^\/\.vol\/.+$', filepath)
-
-    data = tools.read_json(logname)
-    out = []
-    for item in data['traces']:
-      if trace_name and item['trace'] != trace_name:
-        continue
-      result = {
-        'output': item['output'],
-        'trace': item['trace'],
-      }
-      try:
-        context = cls.Context(blacklist_more, item['pid'], item['cwd'])
-        # It's fine to assume the file as UTF-8: OSX enforces the file names to
-        # be valid UTF-8 and we control the log output.
-        for line in codecs.open(logname + '.log', 'rb', encoding='utf-8'):
-          context.on_line(line)
-        result['results'] = context.to_results()
-      except TracingFailure:
-        result['exception'] = sys.exc_info()
-      out.append(result)
-    return out
-
-
-class LogmanTrace(ApiBase):
-  """Uses the native Windows ETW based tracing functionality to trace a child
-  process.
-
-  Caveat: this implementations doesn't track cwd or initial_cwd. It is because
-  the Windows Kernel doesn't have a concept of 'current working directory' at
-  all. A Win32 process has a map of current directories, one per drive letter
-  and it is managed by the user mode kernel32.dll. In kernel, a file is always
-  opened relative to another file_object or as an absolute path. All the current
-  working directory logic is done in user mode.
-  """
-  class Context(ApiBase.Context):
-    """Processes a ETW log line and keeps the list of existent and non
-    existent files accessed.
-
-    Ignores directories.
-    """
-    # These indexes are for the stripped version in json.
-    EVENT_NAME = 0
-    TYPE = 1
-    PID = 2
-    TID = 3
-    PROCESSOR_ID = 4
-    TIMESTAMP = 5
-    USER_DATA = 6
-
-    class Process(ApiBase.Context.Process):
-      def __init__(self, *args):
-        super(LogmanTrace.Context.Process, self).__init__(*args)
-        # Handle file objects that succeeded.
-        self.file_objects = {}
-
-    def __init__(self, blacklist, thunk_pid, trace_name, thunk_cmd):
-      logging.info(
-          '%s(%d, %s, %s)', self.__class__.__name__, thunk_pid, trace_name,
-          thunk_cmd)
-      super(LogmanTrace.Context, self).__init__(blacklist)
-      self._drive_map = file_path.DosDriveMap()
-      # Threads mapping to the corresponding process id.
-      self._threads_active = {}
-      # Process ID of the tracer, e.g. the temporary script created by
-      # create_subprocess_thunk(). This is tricky because the process id may
-      # have been reused.
-      self._thunk_pid = thunk_pid
-      self._thunk_cmd = thunk_cmd
-      self._trace_name = trace_name
-      self._line_number = 0
-      self._thunk_process = None
-
-    def on_line(self, line):
-      """Processes a json Event line."""
-      self._line_number += 1
-      try:
-        # By Opcode
-        handler = getattr(
-            self,
-            'handle_%s_%s' % (line[self.EVENT_NAME], line[self.TYPE]),
-            None)
-        if not handler:
-          raise TracingFailure(
-              'Unexpected event %s_%s' % (
-                  line[self.EVENT_NAME], line[self.TYPE]),
-              None, None, None)
-        handler(line)
-      except TracingFailure, e:
-        # Hack in the values since the handler could be a static function.
-        e.pid = line[self.PID]
-        e.line = line
-        e.line_number = self._line_number
-        # Re-raise the modified exception.
-        raise
-      except (KeyError, NotImplementedError, ValueError), e:
-        raise TracingFailure(
-            'Trace generated a %s exception: %s' % (
-                e.__class__.__name__, str(e)),
-            line[self.PID],
-            self._line_number,
-            line,
-            e)
-
-    def to_results(self):
-      if not self.root_process:
-        raise TracingFailure(
-            'Failed to detect the initial process %d' % self._thunk_pid,
-            None, None, None)
-      process = self.root_process.to_results_process()
-      return Results(process)
-
-    def _thread_to_process(self, tid):
-      """Finds the process from the thread id."""
-      tid = int(tid, 16)
-      pid = self._threads_active.get(tid)
-      if not pid or not self._process_lookup.get(pid):
-        return
-      return self._process_lookup[pid]
-
-    @classmethod
-    def handle_EventTrace_Header(cls, line):
-      """Verifies no event was dropped, e.g. no buffer overrun occured."""
-      BUFFER_SIZE = cls.USER_DATA
-      #VERSION = cls.USER_DATA + 1
-      #PROVIDER_VERSION = cls.USER_DATA + 2
-      #NUMBER_OF_PROCESSORS = cls.USER_DATA + 3
-      #END_TIME = cls.USER_DATA + 4
-      #TIMER_RESOLUTION = cls.USER_DATA + 5
-      #MAX_FILE_SIZE = cls.USER_DATA + 6
-      #LOG_FILE_MODE = cls.USER_DATA + 7
-      #BUFFERS_WRITTEN = cls.USER_DATA + 8
-      #START_BUFFERS = cls.USER_DATA + 9
-      #POINTER_SIZE = cls.USER_DATA + 10
-      EVENTS_LOST = cls.USER_DATA + 11
-      #CPU_SPEED = cls.USER_DATA + 12
-      #LOGGER_NAME = cls.USER_DATA + 13
-      #LOG_FILE_NAME = cls.USER_DATA + 14
-      #BOOT_TIME = cls.USER_DATA + 15
-      #PERF_FREQ = cls.USER_DATA + 16
-      #START_TIME = cls.USER_DATA + 17
-      #RESERVED_FLAGS = cls.USER_DATA + 18
-      #BUFFERS_LOST = cls.USER_DATA + 19
-      #SESSION_NAME_STRING = cls.USER_DATA + 20
-      #LOG_FILE_NAME_STRING = cls.USER_DATA + 21
-      if line[EVENTS_LOST] != '0':
-        raise TracingFailure(
-            ( '%s events were lost during trace, please increase the buffer '
-              'size from %s') % (line[EVENTS_LOST], line[BUFFER_SIZE]),
-            None, None, None)
-
-    def handle_FileIo_Cleanup(self, line):
-      """General wisdom: if a file is closed, it's because it was opened.
-
-      Note that FileIo_Close is not used since if a file was opened properly but
-      not closed before the process exits, only Cleanup will be logged.
-      """
-      #IRP = self.USER_DATA
-      TTID = self.USER_DATA + 1  # Thread ID, that's what we want.
-      FILE_OBJECT = self.USER_DATA + 2
-      #FILE_KEY = self.USER_DATA + 3
-      proc = self._thread_to_process(line[TTID])
-      if not proc:
-        # Not a process we care about.
-        return
-      file_object = line[FILE_OBJECT]
-      if file_object in proc.file_objects:
-        filepath, access_type = proc.file_objects.pop(file_object)
-        proc.add_file(filepath, access_type)
-
-    def handle_FileIo_Create(self, line):
-      """Handles a file open.
-
-      All FileIo events are described at
-      http://msdn.microsoft.com/library/windows/desktop/aa363884.aspx
-      for some value of 'description'.
-
-      " (..) process and thread id values of the IO events (..) are not valid "
-      http://msdn.microsoft.com/magazine/ee358703.aspx
-
-      The FileIo.Create event doesn't return if the CreateFile() call
-      succeeded, so keep track of the file_object and check that it is
-      eventually closed with FileIo_Cleanup.
-      """
-      #IRP = self.USER_DATA
-      TTID = self.USER_DATA + 1  # Thread ID, that's what we want.
-      FILE_OBJECT = self.USER_DATA + 2
-      #CREATE_OPTIONS = self.USER_DATA + 3
-      #FILE_ATTRIBUTES = self.USER_DATA + 4
-      #SHARE_ACCESS = self.USER_DATA + 5
-      OPEN_PATH = self.USER_DATA + 6
-
-      proc = self._thread_to_process(line[TTID])
-      if not proc:
-        # Not a process we care about.
-        return
-
-      raw_path = line[OPEN_PATH]
-      # Ignore directories and bare drive right away.
-      if raw_path.endswith(os.path.sep):
-        return
-      filepath = self._drive_map.to_win32(raw_path)
-      # Ignore bare drive right away. Some may still fall through with format
-      # like '\\?\X:'
-      if len(filepath) == 2:
-        return
-      file_object = line[FILE_OBJECT]
-      if fs.isdir(filepath):
-        # There is no O_DIRECTORY equivalent on Windows. The closed is
-        # FILE_FLAG_BACKUP_SEMANTICS but it's not exactly right either. So
-        # simply discard directories are they are found.
-        return
-      # Override any stale file object.
-      # TODO(maruel): Figure out a way to detect if the file was opened for
-      # reading or writting. Sadly CREATE_OPTIONS doesn't seem to be of any help
-      # here. For now mark as None to make it clear we have no idea what it is
-      # about.
-      proc.file_objects[file_object] = (filepath, None)
-
-    def handle_FileIo_Rename(self, line):
-      # TODO(maruel): Handle?
-      pass
-
-    def handle_Process_End(self, line):
-      pid = line[self.PID]
-      if self._process_lookup.get(pid):
-        logging.info('Terminated: %d' % pid)
-        self._process_lookup[pid] = None
-      else:
-        logging.debug('Terminated: %d' % pid)
-      if self._thunk_process and self._thunk_process.pid == pid:
-        self._thunk_process = None
-
-    def handle_Process_Start(self, line):
-      """Handles a new child process started by PID."""
-      #UNIQUE_PROCESS_KEY = self.USER_DATA
-      PROCESS_ID = self.USER_DATA + 1
-      #PARENT_PID = self.USER_DATA + 2
-      #SESSION_ID = self.USER_DATA + 3
-      #EXIT_STATUS = self.USER_DATA + 4
-      #DIRECTORY_TABLE_BASE = self.USER_DATA + 5
-      #USER_SID = self.USER_DATA + 6
-      IMAGE_FILE_NAME = self.USER_DATA + 7
-      COMMAND_LINE = self.USER_DATA + 8
-
-      ppid = line[self.PID]
-      pid = int(line[PROCESS_ID], 16)
-      command_line = CommandLineToArgvW(line[COMMAND_LINE])
-      logging.debug(
-          'New process %d->%d (%s) %s' %
-            (ppid, pid, line[IMAGE_FILE_NAME], command_line))
-
-      if pid == self._thunk_pid:
-        # Need to ignore processes we don't know about because the log is
-        # system-wide. self._thunk_pid shall start only one process.
-        # This is tricky though because Windows *loves* to reuse process id and
-        # it happens often that the process ID of the thunk script created by
-        # create_subprocess_thunk() is reused. So just detecting the pid here is
-        # not sufficient, we must confirm the command line.
-        if command_line[:len(self._thunk_cmd)] != self._thunk_cmd:
-          logging.info(
-              'Ignoring duplicate pid %d for %s: %s while searching for %s',
-              pid, self._trace_name, command_line, self._thunk_cmd)
-          return
-
-        # TODO(maruel): The check is quite weak. Add the thunk path.
-        if self._thunk_process:
-          raise TracingFailure(
-              ( 'Parent process is _thunk_pid(%d) but thunk_process(%d) is '
-                'already set') % (self._thunk_pid, self._thunk_process.pid),
-              None, None, None)
-        proc = self.Process(self.blacklist, pid, None)
-        self._thunk_process = proc
-        return
-      elif ppid == self._thunk_pid and self._thunk_process:
-        proc = self.Process(self.blacklist, pid, None)
-        self.root_process = proc
-        ppid = None
-      elif self._process_lookup.get(ppid):
-        proc = self.Process(self.blacklist, pid, None)
-        self._process_lookup[ppid].children.append(proc)
-      else:
-        # Ignore
-        return
-      self._process_lookup[pid] = proc
-
-      proc.command = command_line
-      proc.executable = line[IMAGE_FILE_NAME]
-      # proc.command[0] may be the absolute path of 'executable' but it may be
-      # anything else too. If it happens that command[0] ends with executable,
-      # use it, otherwise defaults to the base name.
-      cmd0 = proc.command[0].lower()
-      if not cmd0.endswith('.exe'):
-        # TODO(maruel): That's not strictly true either.
-        cmd0 += '.exe'
-      if cmd0.endswith(proc.executable) and fs.isfile(cmd0):
-        # Fix the path.
-        cmd0 = cmd0.replace('/', os.path.sep)
-        cmd0 = os.path.normpath(cmd0)
-        proc.executable = file_path.get_native_path_case(cmd0)
-      logging.info(
-          'New child: %s -> %d %s' % (ppid, pid, proc.executable))
-
-    def handle_Thread_End(self, line):
-      """Has the same parameters as Thread_Start."""
-      tid = int(line[self.TID], 16)
-      self._threads_active.pop(tid, None)
-
-    def handle_Thread_Start(self, line):
-      """Handles a new thread created.
-
-      Do not use self.PID here since a process' initial thread is created by
-      the parent process.
-      """
-      PROCESS_ID = self.USER_DATA
-      TTHREAD_ID = self.USER_DATA + 1
-      #STACK_BASE = self.USER_DATA + 2
-      #STACK_LIMIT = self.USER_DATA + 3
-      #USER_STACK_BASE = self.USER_DATA + 4
-      #USER_STACK_LIMIT = self.USER_DATA + 5
-      #AFFINITY = self.USER_DATA + 6
-      #WIN32_START_ADDR = self.USER_DATA + 7
-      #TEB_BASE = self.USER_DATA + 8
-      #SUB_PROCESS_TAG = self.USER_DATA + 9
-      #BASE_PRIORITY = self.USER_DATA + 10
-      #PAGE_PRIORITY = self.USER_DATA + 11
-      #IO_PRIORITY = self.USER_DATA + 12
-      #THREAD_FLAGS = self.USER_DATA + 13
-      # Do not use self.PID here since a process' initial thread is created by
-      # the parent process.
-      pid = int(line[PROCESS_ID], 16)
-      tid = int(line[TTHREAD_ID], 16)
-      logging.debug('New thread pid:%d, tid:%d' % (pid, tid))
-      self._threads_active[tid] = pid
-
-    @classmethod
-    def supported_events(cls):
-      """Returns all the procesed events."""
-      out = []
-      for member in dir(cls):
-        match = re.match(r'^handle_([A-Za-z]+)_([A-Za-z]+)$', member)
-        if match:
-          out.append(match.groups())
-      return out
-
-  class Tracer(ApiBase.Tracer):
-    # The basic headers.
-    EXPECTED_HEADER = [
-      u'Event Name',
-      u'Type',
-      u'Event ID',
-      u'Version',
-      u'Channel',
-      u'Level',  # 5
-      u'Opcode',
-      u'Task',
-      u'Keyword',
-      u'PID',
-      u'TID',  # 10
-      u'Processor Number',
-      u'Instance ID',
-      u'Parent Instance ID',
-      u'Activity ID',
-      u'Related Activity ID',  # 15
-      u'Clock-Time',
-      u'Kernel(ms)',  # Both have a resolution of ~15ms which makes them
-      u'User(ms)',    # pretty much useless.
-      u'User Data',   # Extra arguments that are event-specific.
-    ]
-    # Only the useful headers common to all entries are listed there. Any column
-    # at 19 or higher is dependent on the specific event.
-    EVENT_NAME = 0
-    TYPE = 1
-    PID = 9
-    TID = 10
-    PROCESSOR_ID = 11
-    TIMESTAMP = 16
-    NULL_GUID = '{00000000-0000-0000-0000-000000000000}'
-    USER_DATA = 19
-
-    class CsvReader(object):
-      """CSV reader that reads files generated by tracerpt.exe.
-
-      csv.reader() fails to read them properly, it mangles file names quoted
-      with "" with a comma in it.
-      """
-        # 0. Had a ',' or one of the following ' ' after a comma, next should
-        # be ' ', '"' or string or ',' for an empty field.
-      ( HAD_DELIMITER,
-        # 1. Processing an unquoted field up to ','.
-        IN_STR,
-        # 2. Processing a new field starting with '"'.
-        STARTING_STR_QUOTED,
-        # 3. Second quote in a row at the start of a field. It could be either
-        # '""foo""' or '""'. Who the hell thought it was a great idea to use
-        # the same character for delimiting and escaping?
-        STARTING_SECOND_QUOTE,
-        # 4. A quote inside a quoted string where the previous character was
-        # not a quote, so the string is not empty. Can be either: end of a
-        # quoted string (a delimiter) or a quote escape. The next char must be
-        # either '"' or ','.
-        HAD_QUOTE_IN_QUOTED,
-        # 5. Second quote inside a quoted string.
-        HAD_SECOND_QUOTE_IN_A_ROW_IN_QUOTED,
-        # 6. Processing a field that started with '"'.
-        IN_STR_QUOTED) = range(7)
-
-      def __init__(self, f):
-        self.f = f
-
-      def __iter__(self):
-        return self
-
-      def next(self):
-        """Splits the line in fields."""
-        line = self.f.readline()
-        if not line:
-          raise StopIteration()
-        line = line.strip()
-        fields = []
-        state = self.HAD_DELIMITER
-        for i, c in enumerate(line):
-          if state == self.HAD_DELIMITER:
-            if c == ',':
-              # Empty field.
-              fields.append('')
-            elif c == ' ':
-              # Ignore initial whitespaces
-              pass
-            elif c == '"':
-              state = self.STARTING_STR_QUOTED
-              fields.append('')
-            else:
-              # Start of a new field.
-              state = self.IN_STR
-              fields.append(c)
-
-          elif state == self.IN_STR:
-            # Do not accept quote inside unquoted field.
-            assert c != '"', (i, c, line, fields)
-            if c == ',':
-              fields[-1] = fields[-1].strip()
-              state = self.HAD_DELIMITER
-            else:
-              fields[-1] = fields[-1] + c
-
-          elif state == self.STARTING_STR_QUOTED:
-            if c == '"':
-              # Do not store the character yet.
-              state = self.STARTING_SECOND_QUOTE
-            else:
-              state = self.IN_STR_QUOTED
-              fields[-1] = fields[-1] + c
-
-          elif state == self.STARTING_SECOND_QUOTE:
-            if c == ',':
-              # It was an empty field. '""' == ''.
-              state = self.HAD_DELIMITER
-            else:
-              fields[-1] = fields[-1] + '"' + c
-              state = self.IN_STR_QUOTED
-
-          elif state == self.HAD_QUOTE_IN_QUOTED:
-            if c == ',':
-              # End of the string.
-              state = self.HAD_DELIMITER
-            elif c == '"':
-              state = self.HAD_SECOND_QUOTE_IN_A_ROW_IN_QUOTED
-            else:
-              # The previous double-quote was just an unescaped quote.
-              fields[-1] = fields[-1] + '"' + c
-              state = self.IN_STR_QUOTED
-
-          elif state == self.HAD_SECOND_QUOTE_IN_A_ROW_IN_QUOTED:
-            if c == ',':
-              # End of the string.
-              state = self.HAD_DELIMITER
-              fields[-1] = fields[-1] + '"'
-            else:
-              # That's just how the logger rolls. Revert back to appending the
-              # char and "guess" it was a quote in a double-quoted string.
-              state = self.IN_STR_QUOTED
-              fields[-1] = fields[-1] + '"' + c
-
-          elif state == self.IN_STR_QUOTED:
-            if c == '"':
-              # Could be a delimiter or an escape.
-              state = self.HAD_QUOTE_IN_QUOTED
-            else:
-              fields[-1] = fields[-1] + c
-
-        if state == self.HAD_SECOND_QUOTE_IN_A_ROW_IN_QUOTED:
-          fields[-1] = fields[-1] + '"'
-        else:
-          assert state in (
-              # Terminated with a normal field.
-              self.IN_STR,
-              # Terminated with an empty field.
-              self.STARTING_SECOND_QUOTE,
-              # Terminated with a normal quoted field.
-              self.HAD_QUOTE_IN_QUOTED), (
-              line, state, fields)
-        return fields
-
-    def __init__(self, logname):
-      """Starts the log collection.
-
-      Requires administrative access. logman.exe is synchronous so no need for a
-      "warmup" call.  'Windows Kernel Trace' is *localized* so use its GUID
-      instead.  The GUID constant name is SystemTraceControlGuid. Lovely.
-
-      One can get the list of potentially interesting providers with:
-      "logman query providers | findstr /i file"
-      """
-      super(LogmanTrace.Tracer, self).__init__(logname)
-      self._signal_script = create_subprocess_thunk()
-      self._scripts_to_cleanup.append(self._signal_script)
-      cmd_start = [
-        'logman.exe',
-        'start',
-        'NT Kernel Logger',
-        '-p', '{9e814aad-3204-11d2-9a82-006008a86939}',
-        # splitio,fileiocompletion,syscall,file,cswitch,img
-        '(process,fileio,thread)',
-        '-o', self._logname + '.etl',
-        '-ets',  # Send directly to kernel
-        # Values extracted out of thin air.
-        # Event Trace Session buffer size in kb.
-        '-bs', '10240',
-        # Number of Event Trace Session buffers.
-        '-nb', '16', '256',
-      ]
-      logging.debug('Running: %s' % cmd_start)
-      try:
-        subprocess.check_call(
-            cmd_start,
-            stdin=subprocess.PIPE,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.STDOUT)
-      except subprocess.CalledProcessError, e:
-        if e.returncode == -2147024891:
-          print >> sys.stderr, 'Please restart with an elevated admin prompt'
-        elif e.returncode == -2144337737:
-          print >> sys.stderr, (
-              'A kernel trace was already running, stop it and try again')
-        raise
-
-    def trace(self, cmd, cwd, tracename, output):
-      logging.info('trace(%s, %s, %s, %s)' % (cmd, cwd, tracename, output))
-      assert os.path.isabs(cmd[0]), cmd[0]
-      assert os.path.isabs(cwd), cwd
-      assert os.path.normpath(cwd) == cwd, cwd
-      with self._lock:
-        if not self._initialized:
-          raise TracingFailure(
-              'Called Tracer.trace() on an unitialized object',
-              None, None, None, tracename)
-        assert tracename not in (i['trace'] for i in self._traces)
-
-      # Use "logman -?" for help.
-
-      stdout = stderr = None
-      if output:
-        stdout = subprocess.PIPE
-        stderr = subprocess.STDOUT
-
-      # Run the child process.
-      logging.debug('Running: %s' % cmd)
-      # Use the temporary script generated with create_subprocess_thunk() so we
-      # have a clear pid owner. Since trace_inputs.py can be used as a library
-      # and could trace multiple processes simultaneously, it makes it more
-      # complex if the executable to be traced is executed directly here. It
-      # also solves issues related to logman.exe that needs to be executed to
-      # control the kernel trace.
-      child_cmd = [
-        sys.executable,
-        self._signal_script,
-        tracename,
-      ]
-      child = subprocess.Popen(
-          child_cmd + tools.fix_python_path(cmd),
-          cwd=cwd,
-          stdin=subprocess.PIPE,
-          stdout=stdout,
-          stderr=stderr)
-      logging.debug('Started child pid: %d' % child.pid)
-      out = child.communicate()[0]
-      # This doesn't mean all the grand-children are done. Sadly, we don't have
-      # a good way to determine that.
-
-      with self._lock:
-        assert tracename not in (i['trace'] for i in self._traces)
-        self._traces.append({
-          'cmd': cmd,
-          'cwd': cwd,
-          'output': out,
-          'pid': child.pid,
-          # Used to figure out the real process when process ids are reused.
-          'thunk_cmd': child_cmd,
-          'trace': tracename,
-        })
-
-      return child.returncode, out
-
-    def close(self, _timeout=None):
-      """Stops the kernel log collection and converts the traces to text
-      representation.
-      """
-      with self._lock:
-        try:
-          super(LogmanTrace.Tracer, self).close()
-        finally:
-          cmd_stop = [
-            'logman.exe',
-            'stop',
-            'NT Kernel Logger',
-            '-ets',  # Sends the command directly to the kernel.
-          ]
-          logging.debug('Running: %s' % cmd_stop)
-          subprocess.check_call(
-              cmd_stop,
-              stdin=subprocess.PIPE,
-              stdout=subprocess.PIPE,
-              stderr=subprocess.STDOUT)
-
-    def post_process_log(self):
-      """Converts the .etl file into .csv then into .json."""
-      super(LogmanTrace.Tracer, self).post_process_log()
-      logformat = 'csv'
-      self._convert_log(logformat)
-      self._trim_log(logformat)
-
-    def _gen_logdata(self):
-      return  {
-        'format': 'csv',
-        'traces': self._traces,
-      }
-
-    def _trim_log(self, logformat):
-      """Reduces the amount of data in original log by generating a 'reduced'
-      log.
-      """
-      if logformat == 'csv_utf16':
-        file_handle = codecs.open(
-            self._logname + '.' + logformat, 'r', encoding='utf-16')
-
-      elif logformat == 'csv':
-        assert sys.getfilesystemencoding() == 'mbcs'
-        file_handle = codecs.open(
-              self._logname + '.' + logformat, 'r',
-              encoding=get_current_encoding())
-
-      supported_events = LogmanTrace.Context.supported_events()
-
-      def trim(generator):
-        """Loads items from the generator and returns the interesting data.
-
-        It filters out any uninteresting line and reduce the amount of data in
-        the trace.
-        """
-        for index, line in enumerate(generator):
-          if not index:
-            if line != self.EXPECTED_HEADER:
-              raise TracingFailure(
-                  'Found malformed header: %s' % line,
-                  None, None, None)
-            continue
-          # As you can see, the CSV is full of useful non-redundant information:
-          if (line[2] != '0' or  # Event ID
-              line[3] not in ('2', '3') or  # Version
-              line[4] != '0' or  # Channel
-              line[5] != '0' or  # Level
-              line[7] != '0' or  # Task
-              line[8] != '0x0000000000000000' or  # Keyword
-              line[12] != '' or  # Instance ID
-              line[13] != '' or  # Parent Instance ID
-              line[14] != self.NULL_GUID or  # Activity ID
-              line[15] != ''):  # Related Activity ID
-            raise TracingFailure(
-                'Found unexpected values in line: %s' % ' '.join(line),
-                  None, None, None)
-
-          if (line[self.EVENT_NAME], line[self.TYPE]) not in supported_events:
-            continue
-
-          yield [
-              line[self.EVENT_NAME],
-              line[self.TYPE],
-              line[self.PID],
-              line[self.TID],
-              line[self.PROCESSOR_ID],
-              line[self.TIMESTAMP],
-          ] + line[self.USER_DATA:]
-
-      # must not convert the trim() call into a list, since it will use too much
-      # memory for large trace. use a csv file as a workaround since the json
-      # parser requires a complete in-memory file.
-      path = os.path.join(
-          unicode(os.getcwd()), u'%s.preprocessed' % self._logname)
-      with fs.open(path, 'wb') as f:
-        # $ and * can't be used in file name on windows, reducing the likelihood
-        # of having to escape a string.
-        out = csv.writer(
-            f, delimiter='$', quotechar='*', quoting=csv.QUOTE_MINIMAL)
-        for line in trim(self.CsvReader(file_handle)):
-          out.writerow([s.encode('utf-8') for s in line])
-
-    def _convert_log(self, logformat):
-      """Converts the ETL trace to text representation.
-
-      Normally, 'csv' is sufficient. If complex scripts are used (like eastern
-      languages), use 'csv_utf16'. If localization gets in the way, use 'xml'.
-
-      Arguments:
-        - logformat: Text format to be generated, csv, csv_utf16 or xml.
-
-      Use "tracerpt -?" for help.
-      """
-      LOCALE_INVARIANT = 0x7F
-      windll.kernel32.SetThreadLocale(LOCALE_INVARIANT)
-      cmd_convert = [
-        'tracerpt.exe',
-        '-l', self._logname + '.etl',
-        '-o', self._logname + '.' + logformat,
-        '-gmt',  # Use UTC
-        '-y',  # No prompt
-        # Use -of XML to get the header of each items after column 19, e.g. all
-        # the actual headers of 'User Data'.
-      ]
-
-      if logformat == 'csv':
-        # tracerpt localizes the 'Type' column, for major brainfuck
-        # entertainment. I can't imagine any sane reason to do that.
-        cmd_convert.extend(['-of', 'CSV'])
-      elif logformat == 'csv_utf16':
-        # This causes it to use UTF-16, which doubles the log size but ensures
-        # the log is readable for non-ASCII characters.
-        cmd_convert.extend(['-of', 'CSV', '-en', 'Unicode'])
-      elif logformat == 'xml':
-        cmd_convert.extend(['-of', 'XML'])
-      else:
-        raise ValueError('Unexpected log format \'%s\'' % logformat)
-      logging.debug('Running: %s' % cmd_convert)
-      # This can takes tens of minutes for large logs.
-      # Redirects all output to stderr.
-      subprocess.check_call(
-          cmd_convert,
-          stdin=subprocess.PIPE,
-          stdout=sys.stderr,
-          stderr=sys.stderr)
-
-  def __init__(self, use_sudo=False):  # pylint: disable=W0613
-    super(LogmanTrace, self).__init__()
-    # Ignore use_sudo. It's irrelevant on Windows but kept to simplify the API.
-
-  @staticmethod
-  def clean_trace(logname):
-    for ext in ('', '.csv', '.etl', '.json', '.xml', '.preprocessed'):
-      if fs.isfile(logname + ext):
-        fs.remove(logname + ext)
-
-  @classmethod
-  def parse_log(cls, logname, blacklist, trace_name):
-    logging.info('parse_log(%s, ..., %s)', logname, trace_name)
-    assert os.path.isabs(logname)
-
-    def blacklist_more(filepath):
-      # All the NTFS metadata is in the form x:\$EXTEND or stuff like that.
-      return blacklist(filepath) or re.match(r'[A-Z]\:\\\$EXTEND', filepath)
-
-    # Create a list of (Context, result_dict) tuples. This is necessary because
-    # the csv file may be larger than the amount of available memory.
-    contexes = [
-      (
-        cls.Context(
-            blacklist_more, item['pid'], item['trace'], item['thunk_cmd']),
-        {
-          'output': item['output'],
-          'trace': item['trace'],
-        },
-      )
-      for item in tools.read_json(logname)['traces']
-      if not trace_name or item['trace'] == trace_name
-    ]
-
-    # The log may be too large to fit in memory and it is not efficient to read
-    # it multiple times, so multiplex the contexes instead, which is slightly
-    # more awkward.
-    path = os.path.join(unicode(os.getcwd()), u'%s.preprocessed' % logname)
-    with fs.open(path, 'rb') as f:
-      lines = csv.reader(
-          f, delimiter='$', quotechar='*', quoting=csv.QUOTE_MINIMAL)
-      for encoded in lines:
-        line = [s.decode('utf-8') for s in encoded]
-        # Convert the PID in-place from hex.
-        line[cls.Context.PID] = int(line[cls.Context.PID], 16)
-        for context in contexes:
-          if 'exception' in context[1]:
-            continue
-          try:
-            context[0].on_line(line)
-          except TracingFailure:
-            context[1]['exception'] = sys.exc_info()
-
-    for context in contexes:
-      if 'exception' in context[1]:
-        continue
-      context[1]['results'] = context[0].to_results()
-
-    return [context[1] for context in contexes]
-
-
-def get_api(**kwargs):
-  """Returns the correct implementation for the current OS."""
-  if sys.platform == 'cygwin':
-    raise NotImplementedError(
-        'Not implemented for cygwin, start the script from Win32 python')
-  flavors = {
-    'win32': LogmanTrace,
-    'darwin': Dtrace,
-    'sunos5': Dtrace,
-    'freebsd7': Dtrace,
-    'freebsd8': Dtrace,
-  }
-  # Defaults to strace.
-  return flavors.get(sys.platform, Strace)(**kwargs)
-
-
-def extract_directories(root_dir, files, blacklist):
-  """Detects if all the files in a directory are in |files| and if so, replace
-  the individual files by a Results.Directory instance.
-
-  Takes a list of Results.File instances and returns a shorter list of
-  Results.File and Results.Directory instances.
-
-  Arguments:
-    - root_dir: Optional base directory that shouldn't be search further.
-    - files: list of Results.File instances.
-    - blacklist: lambda to reject unneeded files, for example r'.+\.pyc'.
-  """
-  logging.info(
-      'extract_directories(%s, %d files, ...)' % (root_dir, len(files)))
-  assert not (root_dir or '').endswith(os.path.sep), root_dir
-  # It is important for root_dir to not be a symlinked path, make sure to call
-  # os.path.realpath() as needed.
-  assert not root_dir or (
-      os.path.realpath(file_path.get_native_path_case(root_dir)) == root_dir)
-  assert not any(isinstance(f, Results.Directory) for f in files)
-  # Remove non existent files.
-  files = [f for f in files if f.existent]
-  if not files:
-    return files
-  # All files must share the same root, which can be None.
-  assert len(set(f.root for f in files)) == 1, set(f.root for f in files)
-
-  # Creates a {directory: {filename: File}} mapping, up to root.
-  buckets = {}
-  if root_dir:
-    buckets[root_dir] = {}
-  for fileobj in files:
-    path = fileobj.full_path
-    directory = os.path.dirname(path)
-    assert directory
-    # Do not use os.path.basename() so trailing os.path.sep is kept.
-    basename = path[len(directory)+1:]
-    files_in_directory = buckets.setdefault(directory, {})
-    files_in_directory[basename] = fileobj
-    # Add all the directories recursively up to root.
-    while True:
-      old_d = directory
-      directory = os.path.dirname(directory)
-      if directory + os.path.sep == root_dir or directory == old_d:
-        break
-      buckets.setdefault(directory, {})
-
-  root_prefix = len(root_dir) + 1 if root_dir else 0
-  for directory in sorted(buckets, reverse=True):
-    if not fs.isdir(directory):
-      logging.debug(
-          '%s was a directory but doesn\'t exist anymore; ignoring', directory)
-      continue
-    actual = set(f for f in fs.listdir(directory) if not blacklist(f))
-    expected = set(buckets[directory])
-    if not (actual - expected):
-      parent = os.path.dirname(directory)
-      buckets[parent][os.path.basename(directory)] = Results.Directory(
-        root_dir,
-        directory[root_prefix:],
-        False,
-        sum(f.size for f in buckets[directory].itervalues()),
-        sum(f.nb_files for f in buckets[directory].itervalues()))
-      # Remove the whole bucket.
-      del buckets[directory]
-
-  # Reverse the mapping with what remains. The original instances are returned,
-  # so the cached meta data is kept.
-  files = sum((x.values() for x in buckets.itervalues()), [])
-  return sorted(files, key=lambda x: x.path)
-
-
-def trace(logfile, cmd, cwd, api, output):
-  """Traces an executable. Returns (returncode, output) from api.
-
-  Arguments:
-  - logfile: file to write to.
-  - cmd: command to run.
-  - cwd: current directory to start the process in.
-  - api: a tracing api instance.
-  - output: if True, returns output, otherwise prints it at the console.
-  """
-  cmd = tools.fix_python_path(cmd)
-  api.clean_trace(logfile)
-  with api.get_tracer(logfile) as tracer:
-    return tracer.trace(cmd, cwd, 'default', output)
-
-
-def CMDclean(parser, args):
-  """Cleans up traces."""
-  options, args = parser.parse_args(args)
-  api = get_api()
-  api.clean_trace(options.log)
-  return 0
-
-
-def CMDtrace(parser, args):
-  """Traces an executable."""
-  parser.allow_interspersed_args = False
-  parser.add_option(
-      '-q', '--quiet', action='store_true',
-      help='Redirects traced executable output to /dev/null')
-  parser.add_option(
-      '-s', '--sudo', action='store_true',
-      help='Use sudo when shelling out the tracer tool (ignored on Windows)')
-  parser.add_option(
-      '-n', '--no-sudo', action='store_false',
-      help='Don\'t use sudo')
-  options, args = parser.parse_args(args)
-
-  if not args:
-    parser.error('Please provide a command to run')
-
-  if not can_trace():
-    parser.error('Please rerun this program with admin privileges')
-
-  if not os.path.isabs(args[0]) and fs.access(args[0], os.X_OK):
-    args[0] = os.path.abspath(args[0])
-
-  # options.sudo default value is None, which is to do whatever tracer defaults
-  # do.
-  api = get_api(use_sudo=options.sudo)
-  return trace(options.log, args, os.getcwd(), api, options.quiet)[0]
-
-
-def CMDread(parser, args):
-  """Reads the logs and prints the result."""
-  parser.add_option(
-      '-V', '--variable',
-      nargs=2,
-      action='append',
-      dest='variables',
-      metavar='VAR_NAME directory',
-      default=[],
-      help=('Variables to replace relative directories against. Example: '
-            '"-v \'$HOME\' \'/home/%s\'" will replace all occurence of your '
-            'home dir with $HOME') % getpass.getuser())
-  parser.add_option(
-      '--root-dir',
-      help='Root directory to base everything off it. Anything outside of this '
-           'this directory will not be reported')
-  parser.add_option(
-      '--trace-name',
-      help='Only reads one of the trace. Defaults to reading all traces')
-  parser.add_option(
-      '-j', '--json', action='store_true',
-      help='Outputs raw result data as json')
-  parser.add_option(
-      '--trace-blacklist', action='append', default=[],
-      help='List of regexp to use as blacklist filter')
-  options, args = parser.parse_args(args)
-
-  if options.root_dir:
-    options.root_dir = file_path.get_native_path_case(
-        unicode(os.path.abspath(options.root_dir)))
-
-  variables = dict(options.variables)
-  api = get_api()
-  blacklist = tools.gen_blacklist(options.trace_blacklist)
-  data = api.parse_log(options.log, blacklist, options.trace_name)
-  # Process each trace.
-  output_as_json = []
-  try:
-    for item in data:
-      if 'exception' in item:
-        # Do not abort the other traces.
-        print >> sys.stderr, (
-            'Trace %s: Got an exception: %s' % (
-              item['trace'], item['exception'][1]))
-        continue
-      results = item['results']
-      if options.root_dir:
-        results = results.strip_root(options.root_dir)
-
-      if options.json:
-        output_as_json.append(results.flatten())
-      else:
-        simplified = extract_directories(
-            options.root_dir, results.files, blacklist)
-        simplified = [f.replace_variables(variables) for f in simplified]
-        if len(data) > 1:
-          print('Trace: %s' % item['trace'])
-        print('Total: %d' % len(results.files))
-        print('Non existent: %d' % len(results.non_existent))
-        for f in results.non_existent:
-          print('  %s' % f.path)
-        print(
-            'Interesting: %d reduced to %d' % (
-                len(results.existent), len(simplified)))
-        for f in simplified:
-          print('  %s' % f.path)
-
-    if options.json:
-      tools.write_json(sys.stdout, output_as_json, False)
-  except KeyboardInterrupt:
-    return 1
-  except IOError as e:
-    if e.errno == errno.EPIPE:
-      # Do not print a stack trace when the output is piped to less and the user
-      # quits before the whole output was written.
-      return 1
-    raise
-  return 0
-
-
-class OptionParserTraceInputs(logging_utils.OptionParserWithLogging):
-  """Adds automatic --log handling."""
-
-  # Disable --log-file options since both --log and --log-file options are
-  # confusing.
-  # TODO(vadimsh): Rename --log-file or --log to something else.
-  enable_log_file = False
-
-  def __init__(self, **kwargs):
-    logging_utils.OptionParserWithLogging.__init__(self, **kwargs)
-    self.add_option(
-        '-l', '--log', help='Log file to generate or read, required')
-
-  def parse_args(self, *args, **kwargs):
-    """Makes sure the paths make sense.
-
-    On Windows, / and \ are often mixed together in a path.
-    """
-    options, args = logging_utils.OptionParserWithLogging.parse_args(
-        self, *args, **kwargs)
-    if not options.log:
-      self.error('Must supply a log file with -l')
-    options.log = unicode(os.path.abspath(options.log))
-    return options, args
-
-
-def main(argv):
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  try:
-    return dispatcher.execute(
-        OptionParserTraceInputs(version=__version__), argv)
-  except TracingFailure, e:
-    sys.stderr.write('\nError: ')
-    sys.stderr.write(str(e))
-    sys.stderr.write('\n')
-    return 1
-
-
-if __name__ == '__main__':
-  subprocess42.inhibit_os_error_reporting()
-  fix_encoding.fix_encoding()
-  tools.disable_buffering()
-  colorama.init()
-  sys.exit(main(sys.argv[1:]))
diff --git a/tools/swarming_client/utils/__init__.py b/tools/swarming_client/utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/swarming_client/utils/__init__.py
+++ /dev/null
diff --git a/tools/swarming_client/utils/cacert.pem b/tools/swarming_client/utils/cacert.pem
deleted file mode 100644
index 070a366..0000000
--- a/tools/swarming_client/utils/cacert.pem
+++ /dev/null
@@ -1,2142 +0,0 @@
-# Source: https://pki.google.com/roots.pem
-# Retrieved: 2014-12-17
-
-# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
-# Label: "GTE CyberTrust Global Root"
-# Serial: 421
-# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
-# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
-# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
------BEGIN CERTIFICATE-----
-MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
-VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
-bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
-b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
-iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
-r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
-04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
-GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
-3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
-lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Server CA"
-# Serial: 1
-# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
-# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
-# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
------BEGIN CERTIFICATE-----
-MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
-MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
-MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
-DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
-dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
-cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
-DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
-gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
-yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
-L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
-EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
-7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
-QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
-qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
------END CERTIFICATE-----
-
-# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
-# Label: "Thawte Premium Server CA"
-# Serial: 1
-# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
-# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
-# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
------BEGIN CERTIFICATE-----
-MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
-VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
-biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
-dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
-MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
-MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
-A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
-cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
-bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
-VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
-ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
-uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
-9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
-hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
-pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
------END CERTIFICATE-----
-
-# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
-# Subject: O=Equifax OU=Equifax Secure Certificate Authority
-# Label: "Equifax Secure CA"
-# Serial: 903804111
-# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
-# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
-# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
-dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
-MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
-dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
-BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
-cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
-MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
-aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
-ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
-IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
-7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
-1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
------END CERTIFICATE-----
-
-# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
-# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
-# Serial: 167285380242319648451154478808036881606
-# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
-# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
-# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
------BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
-pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
-13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
-U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
-F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
-oJ2daZH9
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Label: "GlobalSign Root CA"
-# Serial: 4835703278459707669005204
-# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
-# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
-# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
-MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
-aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
-jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
-xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
-1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
-snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
-U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
-9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
-AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
-yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
-38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
-AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
-DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
-HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Label: "GlobalSign Root CA - R2"
-# Serial: 4835703278459682885658125
-# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
-# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
-# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
-MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
-v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
-eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
-tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
-C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
-zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
-mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
-V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
-bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
-3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
-J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
-291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
-ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
-AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
-TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
-# Label: "ValiCert Class 1 VA"
-# Serial: 1
-# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
-# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
-# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
-NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
-LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
-TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
-TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
-LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
-I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
-nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
-# Label: "ValiCert Class 2 VA"
-# Serial: 1
-# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
-# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
-# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
------END CERTIFICATE-----
-
-# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
-# Label: "RSA Root Certificate 1"
-# Serial: 1
-# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
-# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
-# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
-NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
-cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
-2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
-JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
-Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
-n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
-PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
-# Serial: 206684696279472310254277870180966723415
-# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
-# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
-# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
-N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
-KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
-kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
-CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
-Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
-imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
-2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
-DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
-/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
-F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
-TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
-# Serial: 314531972711909413743075096039378935511
-# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
-# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
-# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
-GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
-+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
-U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
-NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
-ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
-ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
-CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
-g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
-fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
-2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
-bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Secure Server CA"
-# Serial: 927650371
-# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
-# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
-# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
------BEGIN CERTIFICATE-----
-MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
-VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
-ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
-KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
-ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
-MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
-ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
-b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
-bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
-U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
-A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
-I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
-wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
-AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
-oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
-BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
-dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
-MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
-b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
-dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
-MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
-E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
-MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
-hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
-95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
-2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Premium 2048 Secure Server CA"
-# Serial: 946059622
-# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc
-# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe
-# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f
------BEGIN CERTIFICATE-----
-MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
-RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
-bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
-IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy
-MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
-LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
-YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
-A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
-K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
-sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
-MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
-XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
-HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
-4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA
-vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G
-CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA
-WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo
-oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ
-h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18
-f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN
-B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy
-vUxFnmG6v4SBkgPR0ml8xQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Label: "Baltimore CyberTrust Root"
-# Serial: 33554617
-# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
-# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
-# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
-RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
-VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
-DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
-ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
-VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
-mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
-IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
-mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
-XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
-dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
-jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
-BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
-DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
-9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
-jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
-Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
-ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
-R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure Global eBusiness CA"
-# Serial: 1
-# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
-# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
-# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
------BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
------END CERTIFICATE-----
-
-# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
-# Label: "Equifax Secure eBusiness CA 1"
-# Serial: 4
-# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
-# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
-# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
------BEGIN CERTIFICATE-----
-MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
-ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
-MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
-LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
-KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
-RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
-WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
-Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
-AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
-eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
-zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
-WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
-/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
------END CERTIFICATE-----
-
-# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
-# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
-# Label: "Equifax Secure eBusiness CA 2"
-# Serial: 930140085
-# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca
-# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc
-# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
-dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
-NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
-VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
-vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
-BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
-IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
-NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
-y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
-0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
-E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Low-Value Services Root"
-# Serial: 1
-# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
-# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
-# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
------BEGIN CERTIFICATE-----
-MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
-MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
-QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
-VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
-A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
-CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
-tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
-dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
-PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
-+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
-BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
-MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
-ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
-IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
-7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
-43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
-eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
-pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
-WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Label: "AddTrust External Root"
-# Serial: 1
-# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
-# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
-# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
------BEGIN CERTIFICATE-----
-MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
-IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
-MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
-FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
-bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
-dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
-H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
-uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
-mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
-a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
-E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
-WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
-VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
-Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
-cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
-IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
-AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
-YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
-6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
-Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
-c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
-mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Public Services Root"
-# Serial: 1
-# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
-# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
-# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
-MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
-ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
-BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
-6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
-GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
-dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
-1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
-62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
-BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
-AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
-MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
-cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
-b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
-IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
-iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
-GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
-4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
-XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
-# Label: "AddTrust Qualified Certificates Root"
-# Serial: 1
-# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
-# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
-# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
------BEGIN CERTIFICATE-----
-MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
-b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
-MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
-EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
-BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
-xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
-87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
-2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
-WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
-0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
-A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
-pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
-ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
-aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
-hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
-hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
-dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
-P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
-iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
-xqE=
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Label: "Entrust Root Certification Authority"
-# Serial: 1164660820
-# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
-# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
-# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
------BEGIN CERTIFICATE-----
-MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
-Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
-KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
-cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
-NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
-NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
-ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
-BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
-KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
-Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
-4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
-KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
-rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
-94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
-sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
-gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
-kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
-vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
-A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
-O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
-AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
-9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
-eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
-0vdXcDazv/wor3ElhVsT/h5/WrQ8
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Label: "GeoTrust Global CA"
-# Serial: 144470
-# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
-# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
-# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
------BEGIN CERTIFICATE-----
-MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
-MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
-YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
-R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
-9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
-fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
-iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
-1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
-bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
-MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
-ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
-uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
-Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
-tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
-PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
-hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
-5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Global CA 2"
-# Serial: 1
-# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
-# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
-# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
------BEGIN CERTIFICATE-----
-MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
-IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
-R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
-PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
-Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
-TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
-5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
-S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
-2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
-FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
-EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
-EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
-/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
-A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
-abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
-I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
-4iIprn2DQKi6bA==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA"
-# Serial: 1
-# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
-# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
-# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
------BEGIN CERTIFICATE-----
-MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
-BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
-IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
-VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
-cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
-QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
-F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
-c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
-mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
-VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
-teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
-f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
-Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
-nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
-/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
-MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
-9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
-aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
-IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
-ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
-uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
-Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
-QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
-koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
-ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
-DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
-bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA 2"
-# Serial: 1
-# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
-# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
-# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
------BEGIN CERTIFICATE-----
-MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
-VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
-c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
-AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
-WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
-FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
-XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
-se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
-KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
-IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
-y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
-hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
-QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
-Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
-HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
-KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
-dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
-L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
-Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
-ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
-T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
-GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
-1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
-OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
-6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
-QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
------END CERTIFICATE-----
-
-# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc.
-# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc.
-# Label: "America Online Root Certification Authority 1"
-# Serial: 1
-# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e
-# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a
-# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3
------BEGIN CERTIFICATE-----
-MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
-bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2
-MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
-ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
-Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk
-hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym
-1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW
-OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb
-2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko
-O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU
-AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
-BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF
-Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb
-LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir
-oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C
-MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds
-sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7
------END CERTIFICATE-----
-
-# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc.
-# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc.
-# Label: "America Online Root Certification Authority 2"
-# Serial: 1
-# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf
-# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84
-# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd
------BEGIN CERTIFICATE-----
-MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
-bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2
-MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
-ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
-Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
-ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC
-206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci
-KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2
-JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9
-BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e
-Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B
-PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67
-Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq
-Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ
-o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3
-+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj
-YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj
-FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE
-AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn
-xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2
-LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc
-obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8
-CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe
-IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA
-DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F
-AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX
-Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb
-AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl
-Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw
-RY8mkaKO/qk=
------END CERTIFICATE-----
-
-# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
-# Subject: CN=AAA Certificate Services O=Comodo CA Limited
-# Label: "Comodo AAA Services root"
-# Serial: 1
-# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
-# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
-# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
------BEGIN CERTIFICATE-----
-MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
-YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
-GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
-BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
-3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
-YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
-rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
-ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
-oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
-MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
-QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
-b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
-AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
-GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
-Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
-G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
-l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
-smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
------END CERTIFICATE-----
-
-# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
-# Subject: CN=Secure Certificate Services O=Comodo CA Limited
-# Label: "Comodo Secure Services root"
-# Serial: 1
-# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
-# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
-# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
------BEGIN CERTIFICATE-----
-MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
-ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
-fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
-BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
-cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
-HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
-CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
-3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
-6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
-HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
-EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
-Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
-Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
-DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
-5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
-Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
-gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
-aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
-izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
------END CERTIFICATE-----
-
-# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
-# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
-# Label: "Comodo Trusted Services root"
-# Serial: 1
-# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
-# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
-# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
-aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
-MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
-BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
-VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
-fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
-TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
-fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
-1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
-kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
-A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
-ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
-dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
-Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
-HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
-pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
-jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
-xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
-dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
------END CERTIFICATE-----
-
-# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN DATACorp SGC Root CA"
-# Serial: 91374294542884689855167577680241077609
-# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
-# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
-# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
------BEGIN CERTIFICATE-----
-MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
-kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
-IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
-EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
-VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
-dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
-BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
-E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
-D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
-4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
-lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
-bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
-o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
-MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
-LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
-BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
-AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
-Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
-j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
-KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
-2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
-mfnGV/TJVTl4uix5yaaIK/QI
------END CERTIFICATE-----
-
-# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
-# Label: "UTN USERFirst Hardware Root CA"
-# Serial: 91374294542884704022267039221184531197
-# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
-# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
-# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
------BEGIN CERTIFICATE-----
-MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
-lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
-Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
-dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
-SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
-A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
-MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
-d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
-cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
-0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
-M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
-MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
-oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
-DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
-oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
-VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
-dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
-bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
-BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
-//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
-CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
-CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
-3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
-KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
------END CERTIFICATE-----
-
-# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Label: "XRamp Global CA Root"
-# Serial: 107108908803651509692980124233745014957
-# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
-# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
-# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
------BEGIN CERTIFICATE-----
-MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
-gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
-MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
-UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
-NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
-dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
-dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
-dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
-38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
-KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
-DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
-qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
-JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
-PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
-BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
-jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
-eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
-ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
-vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
-qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
-IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
-i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
-O+7ETPTsJ3xCwnR8gooJybQDJbw=
------END CERTIFICATE-----
-
-# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Label: "Go Daddy Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
-# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
-# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
------BEGIN CERTIFICATE-----
-MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
-MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
-YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
-MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
-ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
-MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
-ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
-PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
-wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
-EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
-avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
-YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
-sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
-/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
-IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
-OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
-TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
-HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
-dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
-ReYNnyicsbkqWletNw+vHX/bvZ8=
------END CERTIFICATE-----
-
-# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Label: "Starfield Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
-# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
-# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
-MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
-U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
-NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
-ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
-ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
-DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
-8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
-+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
-X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
-K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
-1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
-A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
-zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
-YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
-bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
-DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
-L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
-eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
-xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
-VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
-WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
-# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
-# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
------BEGIN CERTIFICATE-----
-MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
-FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
-ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
-LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
-BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
-Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
-dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
-cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
-YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
-dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
-bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
-YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
-TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
-9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
-jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
-FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
-ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
-ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
-EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
-L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
-yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
-O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
-um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
-NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root CA"
-# Serial: 17154717934120587862167794914071425081
-# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
-# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
-# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
------BEGIN CERTIFICATE-----
-MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
-JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
-mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
-wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
-VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
-AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
-AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
-BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
-pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
-dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
-fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
-NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
-H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
-+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root CA"
-# Serial: 10944719598952040374951832963794454346
-# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
-# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
-# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
------BEGIN CERTIFICATE-----
-MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
-QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
-CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
-nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
-43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
-T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
-gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
-BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
-TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
-DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
-hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
-06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
-PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
-YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
-CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert High Assurance EV Root CA"
-# Serial: 3553400076410547919724730734378100087
-# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
-# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
-# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
-ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
-LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
-RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
-+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
-PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
-xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
-Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
-hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
-EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
-FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
-nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
-eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
-hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
-Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
-vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
-+OkuE6N36B9K
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Label: "GeoTrust Primary Certification Authority"
-# Serial: 32798226551256963324313806436981982369
-# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
-# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
-# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
------BEGIN CERTIFICATE-----
-MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
-MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
-R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
-MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
-Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
-AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
-ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
-7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
-kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
-mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
-KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
-6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
-4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
-oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
-UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
-AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA"
-# Serial: 69529181992039203566298953787712940909
-# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
-# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
-# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
-qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
-BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
-NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
-LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
-A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
-IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
-W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
-3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
-6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
-Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
-NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
-MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
-r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
-DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
-YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
-xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
-/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
-LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
-jVaMaA==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
-# Serial: 33037644167568058970164719475676101450
-# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
-# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
-# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
------BEGIN CERTIFICATE-----
-MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
-yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
-ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
-nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
-t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
-SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
-BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
-rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
-NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
-BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
-BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
-aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
-MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
-p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
-5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
-WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
-4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
-hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
-# Label: "COMODO Certification Authority"
-# Serial: 104350513648249232941998508985834464573
-# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
-# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
-# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
------BEGIN CERTIFICATE-----
-MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
-gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
-BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
-MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
-YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
-RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
-UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
-2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
-Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
-+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
-DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
-nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
-/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
-PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
-QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
-SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
-IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
-RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
-zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
-BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
-ZQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Label: "Network Solutions Certificate Authority"
-# Serial: 116697915152937497490437556386812487904
-# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
-# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
-# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
------BEGIN CERTIFICATE-----
-MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
-MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
-MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
-dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
-UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
-ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
-c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
-OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
-mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
-BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
-qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
-gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
-bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
-dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
-6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
-h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
-/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
-wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
-pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Label: "COMODO ECC Certification Authority"
-# Serial: 41578283867086692638256921589707938090
-# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
-# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
-# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
------BEGIN CERTIFICATE-----
-MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
-IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
-MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
-ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
-T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
-biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
-FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
-cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
-BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
-fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
-GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
-# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
-# Label: "TC TrustCenter Class 2 CA II"
-# Serial: 941389028203453866782103406992443
-# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23
-# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e
-# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4
------BEGIN CERTIFICATE-----
-MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
-BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
-Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1
-OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
-SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc
-VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf
-tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg
-uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J
-XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK
-8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99
-5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3
-kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
-dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6
-Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
-JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
-Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
-TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS
-GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt
-ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8
-au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV
-hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI
-dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
-# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
-# Label: "TC TrustCenter Class 3 CA II"
-# Serial: 1506523511417715638772220530020799
-# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e
-# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5
-# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e
------BEGIN CERTIFICATE-----
-MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
-BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
-Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1
-OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
-SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc
-VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW
-Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q
-Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2
-1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq
-ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1
-Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX
-XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
-dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6
-Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
-JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
-Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
-TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN
-irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8
-TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6
-g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB
-95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj
-S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Label: "TC TrustCenter Universal CA I"
-# Serial: 601024842042189035295619584734726
-# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c
-# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3
-# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7
------BEGIN CERTIFICATE-----
-MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
-BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1
-c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx
-MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg
-R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD
-VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR
-JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T
-fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu
-jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z
-wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ
-fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD
-VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G
-CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1
-7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn
-8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs
-ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT
-ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
-2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
------END CERTIFICATE-----
-
-# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Label: "Cybertrust Global Root"
-# Serial: 4835703278459682877484360
-# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
-# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
-# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
------BEGIN CERTIFICATE-----
-MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
-A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
-bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
-ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
-b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
-7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
-J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
-HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
-t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
-FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
-XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
-MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
-hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
-MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
-A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
-Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
-XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
-omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
-A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
-WL1WMRJOEcgh4LMRkWXbtKaIOM5V
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G3"
-# Serial: 28809105769928564313984085209975885599
-# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
-# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
-# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
------BEGIN CERTIFICATE-----
-MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
-mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
-MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
-eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
-cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
-BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
-MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
-BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
-+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
-hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
-5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
-JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
-DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
-huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
-HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
-AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
-zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
-kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
-AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
-SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
-spki4cErx5z481+oghLrGREt
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G2"
-# Serial: 71758320672825410020661621085256472406
-# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
-# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
-# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
------BEGIN CERTIFICATE-----
-MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
-IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
-BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
-MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
-d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
-YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
-dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
-BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
-papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
-BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
-DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
-KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
-XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G3"
-# Serial: 127614157056681299805556476275995414779
-# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
-# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
-# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
-rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
-BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
-Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
-LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
-MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
-ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
-gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
-YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
-b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
-9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
-zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
-OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
-HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
-2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
-oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
-t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
-KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
-m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
-MdRAGmI0Nj81Aa6sY6A=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G2"
-# Serial: 80682863203381065782177908751794619243
-# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
-# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
-# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
------BEGIN CERTIFICATE-----
-MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
-MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
-KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
-MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
-eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
-BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
-NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
-BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
-MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
-So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
-tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
-CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
-qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
-rD6ogRLQy7rQkgu2npaqBA+K
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Universal Root Certification Authority"
-# Serial: 85209574734084581917763752644031726877
-# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
-# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
-# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
------BEGIN CERTIFICATE-----
-MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
-vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
-ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
-IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
-IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
-bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
-9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
-H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
-LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
-/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
-rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
-WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
-exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
-DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
-sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
-seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
-4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
-BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
-lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
-7M2CYfE45k+XmCpajQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
-# Serial: 63143484348153506665311985501458640051
-# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
-# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
-# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
------BEGIN CERTIFICATE-----
-MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
-U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
-SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
-biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
-GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
-fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
-AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
-aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
-aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
-kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
-4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
-FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Label: "GlobalSign Root CA - R3"
-# Serial: 4835703278459759426209954
-# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
-# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
-# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
------BEGIN CERTIFICATE-----
-MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
-MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
-RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
-gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
-KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
-QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
-XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
-LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
-RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
-jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
-6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
-mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
-Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
-WD9f
------END CERTIFICATE-----
-
-# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
-# Label: "TC TrustCenter Universal CA III"
-# Serial: 2010889993983507346460533407902964
-# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b
-# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87
-# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d
------BEGIN CERTIFICATE-----
-MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL
-MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
-BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1
-c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy
-MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl
-ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm
-BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF
-5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv
-DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v
-zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT
-yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj
-dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh
-MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB
-Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI
-4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz
-dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY
-aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G
-DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV
-CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH
-LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==
------END CERTIFICATE-----
-
-# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Label: "Go Daddy Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
-# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
-# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
-EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
-ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
-NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
-EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
-AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
-DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
-E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
-/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
-DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
-GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
-tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
-AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
-FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
-WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
-9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
-gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
-2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
-LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
-4uJEvlz36hz1
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
-# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
-# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
------BEGIN CERTIFICATE-----
-MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
-ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
-MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
-b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
-aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
-Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
-nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
-HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
-Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
-dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
-HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
-CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
-sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
-4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
-8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
-pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
-mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Services Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
-# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
-# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
------BEGIN CERTIFICATE-----
-MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
-ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
-MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
-VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
-ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
-dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
-OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
-8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
-Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
-hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
-6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
-DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
-AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
-bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
-ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
-qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
-iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
-0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
-sSi6
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
-# Subject: CN=AffirmTrust Commercial O=AffirmTrust
-# Label: "AffirmTrust Commercial"
-# Serial: 8608355977964138876
-# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
-# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
-# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
-Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
-ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
-MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
-yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
-VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
-nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
-XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
-vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
-Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
-N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
-nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Networking O=AffirmTrust
-# Subject: CN=AffirmTrust Networking O=AffirmTrust
-# Label: "AffirmTrust Networking"
-# Serial: 8957382827206547757
-# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
-# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
-# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
-YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
-kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
-QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
-6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
-yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
-QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
-tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
-QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
-Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
-olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
-x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium O=AffirmTrust
-# Subject: CN=AffirmTrust Premium O=AffirmTrust
-# Label: "AffirmTrust Premium"
-# Serial: 7893706540734352110
-# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
-# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
-# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
------BEGIN CERTIFICATE-----
-MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
-dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
-A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
-cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
-qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
-JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
-+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
-s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
-HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
-70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
-V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
-qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
-5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
-C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
-OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
-FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
-KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
-Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
-8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
-MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
-0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
-u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
-u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
-YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
-GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
-RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
-KeC2uAloGRwYQw==
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Label: "AffirmTrust Premium ECC"
-# Serial: 8401224907861490260
-# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
-# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
-# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
------BEGIN CERTIFICATE-----
-MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
-VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
-cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
-BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
-VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
-0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
-ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
-A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
-aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
-flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
-# Label: "StartCom Certification Authority"
-# Serial: 45
-# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
-# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
-# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
------BEGIN CERTIFICATE-----
-MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
-Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
-MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
-U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
-cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
-A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
-pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
-OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
-Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
-Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
-HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
-Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
-+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
-Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
-Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
-26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
-AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
-VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
-F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
-ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
-ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
-aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
-YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
-c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
-d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
-CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
-dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
-wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
-Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
-0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
-pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
-CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
-P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
-1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
-KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
-JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
-8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
-fyWl8kgAwKQB2j8=
------END CERTIFICATE-----
-
-# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
-# Label: "StartCom Certification Authority G2"
-# Serial: 59
-# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
-# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
-# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
------BEGIN CERTIFICATE-----
-MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
-MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
-OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
-A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
-JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
-vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
-D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
-Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
-RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
-HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
-nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
-0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
-UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
-Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
-TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
-AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
-BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
-2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
-UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
-6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
-9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
-HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
-wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
-XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
-IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
-hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
-so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
------END CERTIFICATE-----
diff --git a/tools/swarming_client/utils/file_path.py b/tools/swarming_client/utils/file_path.py
deleted file mode 100644
index 6cb52d1..0000000
--- a/tools/swarming_client/utils/file_path.py
+++ /dev/null
@@ -1,1176 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Provides functions: get_native_path_case(), isabs() and safe_join().
-
-This module assumes that filesystem is not changing while current process
-is running and thus it caches results of functions that depend on FS state.
-"""
-
-import ctypes
-import getpass
-import logging
-import os
-import posixpath
-import re
-import shlex
-import stat
-import sys
-import tempfile
-import time
-import unicodedata
-
-from utils import fs
-from utils import tools
-
-
-# Types of action accepted by link_file().
-HARDLINK, HARDLINK_WITH_FALLBACK, SYMLINK, SYMLINK_WITH_FALLBACK, COPY = range(
-    1, 6)
-
-
-## OS-specific imports
-
-
-if sys.platform == 'win32':
-  import locale
-  from ctypes.wintypes import create_unicode_buffer
-  from ctypes.wintypes import windll  # pylint: disable=E0611
-  from ctypes.wintypes import GetLastError  # pylint: disable=E0611
-elif sys.platform == 'darwin':
-  import Carbon.File  #  pylint: disable=F0401
-  import MacOS  # pylint: disable=F0401
-
-
-if sys.platform == 'win32':
-  class LUID(ctypes.Structure):
-    _fields_ = [
-      ('low_part', ctypes.wintypes.DWORD), ('high_part', ctypes.wintypes.LONG),
-    ]
-
-
-  class LUID_AND_ATTRIBUTES(ctypes.Structure):
-    _fields_ = [('LUID', LUID), ('attributes', ctypes.wintypes.DWORD)]
-
-
-  class TOKEN_PRIVILEGES(ctypes.Structure):
-    _fields_ = [
-      ('count', ctypes.wintypes.DWORD), ('privileges', LUID_AND_ATTRIBUTES*0),
-    ]
-
-    def get_array(self):
-      array_type = LUID_AND_ATTRIBUTES * self.count
-      return ctypes.cast(self.privileges, ctypes.POINTER(array_type)).contents
-
-
-  GetCurrentProcess = ctypes.windll.kernel32.GetCurrentProcess
-  GetCurrentProcess.restype = ctypes.wintypes.HANDLE
-  OpenProcessToken = ctypes.windll.advapi32.OpenProcessToken
-  OpenProcessToken.argtypes = (
-      ctypes.wintypes.HANDLE, ctypes.wintypes.DWORD,
-      ctypes.POINTER(ctypes.wintypes.HANDLE))
-  OpenProcessToken.restype = ctypes.wintypes.BOOL
-  LookupPrivilegeValue = ctypes.windll.advapi32.LookupPrivilegeValueW
-  LookupPrivilegeValue.argtypes = (
-      ctypes.wintypes.LPWSTR, ctypes.wintypes.LPWSTR, ctypes.POINTER(LUID))
-  LookupPrivilegeValue.restype = ctypes.wintypes.BOOL
-  LookupPrivilegeName = ctypes.windll.advapi32.LookupPrivilegeNameW
-  LookupPrivilegeName.argtypes = (
-      ctypes.wintypes.LPWSTR, ctypes.POINTER(LUID), ctypes.wintypes.LPWSTR,
-      ctypes.POINTER(ctypes.wintypes.DWORD))
-  LookupPrivilegeName.restype = ctypes.wintypes.BOOL
-  PTOKEN_PRIVILEGES = ctypes.POINTER(TOKEN_PRIVILEGES)
-  GetTokenInformation = ctypes.windll.advapi32.GetTokenInformation
-  GetTokenInformation.argtypes = (
-      ctypes.wintypes.HANDLE, ctypes.c_uint, ctypes.c_void_p,
-      ctypes.wintypes.DWORD, ctypes.POINTER(ctypes.wintypes.DWORD))
-  GetTokenInformation.restype = ctypes.wintypes.BOOL
-  AdjustTokenPrivileges = ctypes.windll.advapi32.AdjustTokenPrivileges
-  AdjustTokenPrivileges.restype = ctypes.wintypes.BOOL
-  AdjustTokenPrivileges.argtypes = (
-      ctypes.wintypes.HANDLE, ctypes.wintypes.BOOL, PTOKEN_PRIVILEGES,
-      ctypes.wintypes.DWORD, PTOKEN_PRIVILEGES,
-      ctypes.POINTER(ctypes.wintypes.DWORD))
-
-
-  def FormatError(err):
-    """Returns a formatted error on Windows in unicode."""
-    # We need to take in account the current code page.
-    return ctypes.wintypes.FormatError(err).decode(
-        locale.getpreferredencoding(), 'replace')
-
-
-  def QueryDosDevice(drive_letter):
-    """Returns the Windows 'native' path for a DOS drive letter."""
-    assert re.match(r'^[a-zA-Z]:$', drive_letter), drive_letter
-    assert isinstance(drive_letter, unicode)
-    # Guesswork. QueryDosDeviceW never returns the required number of bytes.
-    chars = 1024
-    drive_letter = drive_letter
-    p = create_unicode_buffer(chars)
-    if 0 == windll.kernel32.QueryDosDeviceW(drive_letter, p, chars):
-      err = GetLastError()
-      if err:
-        # pylint: disable=undefined-variable
-        msg = u'QueryDosDevice(%s): %s (%d)' % (
-              drive_letter, FormatError(err), err)
-        raise WindowsError(err, msg.encode('utf-8'))
-    return p.value
-
-
-  def GetShortPathName(long_path):
-    """Returns the Windows short path equivalent for a 'long' path."""
-    path = fs.extend(long_path)
-    chars = windll.kernel32.GetShortPathNameW(path, None, 0)
-    if chars:
-      p = create_unicode_buffer(chars)
-      if windll.kernel32.GetShortPathNameW(path, p, chars):
-        return fs.trim(p.value)
-
-    err = GetLastError()
-    if err:
-      # pylint: disable=undefined-variable
-      msg = u'GetShortPathName(%s): %s (%d)' % (
-            long_path, FormatError(err), err)
-      raise WindowsError(err, msg.encode('utf-8'))
-
-
-  def GetLongPathName(short_path):
-    """Returns the Windows long path equivalent for a 'short' path."""
-    path = fs.extend(short_path)
-    chars = windll.kernel32.GetLongPathNameW(path, None, 0)
-    if chars:
-      p = create_unicode_buffer(chars)
-      if windll.kernel32.GetLongPathNameW(path, p, chars):
-        return fs.trim(p.value)
-
-    err = GetLastError()
-    if err:
-      # pylint: disable=undefined-variable
-      msg = u'GetLongPathName(%s): %s (%d)' % (
-            short_path, FormatError(err), err)
-      raise WindowsError(err, msg.encode('utf-8'))
-
-
-  def MoveFileEx(oldpath, newpath, flags):
-    """Calls MoveFileEx, converting errors to WindowsError exceptions."""
-    old_p = fs.extend(oldpath)
-    new_p = fs.extend(newpath)
-    if not windll.kernel32.MoveFileExW(old_p, new_p, int(flags)):
-      # pylint: disable=undefined-variable
-      err = GetLastError()
-      msg = u'MoveFileEx(%s, %s, %d): %s (%d)' % (
-            oldpath, newpath, flags, FormatError(err), err)
-      raise WindowsError(err, msg.encode('utf-8'))
-
-
-  class DosDriveMap(object):
-    """Maps \Device\HarddiskVolumeN to N: on Windows."""
-    # Keep one global cache.
-    _MAPPING = {}
-
-    def __init__(self):
-      """Lazy loads the cache."""
-      if not self._MAPPING:
-        # This is related to UNC resolver on windows. Ignore that.
-        self._MAPPING[u'\\Device\\Mup'] = None
-        self._MAPPING[u'\\SystemRoot'] = os.environ[u'SystemRoot']
-
-        for letter in (chr(l) for l in xrange(ord('C'), ord('Z')+1)):
-          try:
-            letter = u'%s:' % letter
-            mapped = QueryDosDevice(letter)
-            if mapped in self._MAPPING:
-              logging.warn(
-                  ('Two drives: \'%s\' and \'%s\', are mapped to the same disk'
-                   '. Drive letters are a user-mode concept and the kernel '
-                   'traces only have NT path, so all accesses will be '
-                   'associated with the first drive letter, independent of the '
-                   'actual letter used by the code') % (
-                     self._MAPPING[mapped], letter))
-            else:
-              self._MAPPING[mapped] = letter
-          except WindowsError:  # pylint: disable=undefined-variable
-            pass
-
-    def to_win32(self, path):
-      """Converts a native NT path to Win32/DOS compatible path."""
-      match = re.match(r'(^\\Device\\[a-zA-Z0-9]+)(\\.*)?$', path)
-      if not match:
-        raise ValueError(
-            'Can\'t convert %s into a Win32 compatible path' % path,
-            path)
-      if not match.group(1) in self._MAPPING:
-        # Unmapped partitions may be accessed by windows for the
-        # fun of it while the test is running. Discard these.
-        return None
-      drive = self._MAPPING[match.group(1)]
-      if not drive or not match.group(2):
-        return drive
-      return drive + match.group(2)
-
-
-  def change_acl_for_delete(path):
-    """Zaps the SECURITY_DESCRIPTOR's DACL on a directory entry that is tedious
-    to delete.
-
-    This function is a heavy hammer. It discards the SECURITY_DESCRIPTOR and
-    creates a new one with only one DACL set to user:FILE_ALL_ACCESS.
-
-    Used as last resort.
-    """
-    STANDARD_RIGHTS_REQUIRED = 0xf0000
-    SYNCHRONIZE = 0x100000
-    FILE_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3ff
-
-    import win32security
-    user, _domain, _type = win32security.LookupAccountName(
-        '', getpass.getuser())
-    sd = win32security.SECURITY_DESCRIPTOR()
-    sd.Initialize()
-    sd.SetSecurityDescriptorOwner(user, False)
-    dacl = win32security.ACL()
-    dacl.Initialize()
-    dacl.AddAccessAllowedAce(
-        win32security.ACL_REVISION_DS, FILE_ALL_ACCESS, user)
-    sd.SetSecurityDescriptorDacl(1, dacl, 0)
-    # Note that this assumes the object is either owned by the current user or
-    # its group or that the current ACL permits this. Otherwise it will silently
-    # fail.
-    win32security.SetFileSecurity(
-        fs.extend(path), win32security.DACL_SECURITY_INFORMATION, sd)
-    # It's important to also look for the read only bit after, as it's possible
-    # the set_read_only() call to remove the read only bit had silently failed
-    # because there was no DACL for the user.
-    if not (os.stat(path).st_mode & stat.S_IWUSR):
-      os.chmod(path, 0777)
-
-  def isabs(path):
-    """Accepts X: as an absolute path, unlike python's os.path.isabs()."""
-    return os.path.isabs(path) or len(path) == 2 and path[1] == ':'
-
-
-  def find_item_native_case(root, item):
-    """Gets the native path case of a single item based at root_path."""
-    if item == '..':
-      return item
-
-    root = get_native_path_case(root)
-    return os.path.basename(get_native_path_case(os.path.join(root, item)))
-
-
-  @tools.profile
-  @tools.cached
-  def get_native_path_case(p):
-    """Returns the native path case for an existing file.
-
-    On Windows, removes any leading '\\?\'.
-    """
-    assert isinstance(p, unicode), repr(p)
-    if not isabs(p):
-      raise ValueError(
-          'get_native_path_case(%r): Require an absolute path' % p, p)
-
-    # Make sure it is normalized to os.path.sep. Do not do it here to keep the
-    # function fast
-    assert '/' not in p, p
-    suffix = ''
-    count = p.count(':')
-    if count > 1:
-      # This means it has an alternate-data stream. There could be 3 ':', since
-      # it could be the $DATA datastream of an ADS. Split the whole ADS suffix
-      # off and add it back afterward. There is no way to know the native path
-      # case of an alternate data stream.
-      items = p.split(':')
-      p = ':'.join(items[0:2])
-      suffix = ''.join(':' + i for i in items[2:])
-
-    # TODO(maruel): Use os.path.normpath?
-    if p.endswith('.\\'):
-      p = p[:-2]
-
-    # Windows used to have an option to turn on case sensitivity on non Win32
-    # subsystem but that's out of scope here and isn't supported anymore.
-    # Go figure why GetShortPathName() is needed.
-    try:
-      out = GetLongPathName(GetShortPathName(p))
-    except OSError, e:
-      if e.args[0] in (2, 3, 5):
-        # The path does not exist. Try to recurse and reconstruct the path.
-        base = os.path.dirname(p)
-        rest = os.path.basename(p)
-        return os.path.join(get_native_path_case(base), rest)
-      raise
-    # Always upper case the first letter since GetLongPathName() will return the
-    # drive letter in the case it was given.
-    return out[0].upper() + out[1:] + suffix
-
-
-  def enum_processes_win():
-    """Returns all processes on the system that are accessible to this process.
-
-    Returns:
-      Win32_Process COM objects. See
-      http://msdn.microsoft.com/library/aa394372.aspx for more details.
-    """
-    import win32com.client  # pylint: disable=F0401
-    wmi_service = win32com.client.Dispatch('WbemScripting.SWbemLocator')
-    wbem = wmi_service.ConnectServer('.', 'root\\cimv2')
-    return [proc for proc in wbem.ExecQuery('SELECT * FROM Win32_Process')]
-
-
-  def filter_processes_dir_win(processes, root_dir):
-    """Returns all processes which has their main executable located inside
-    root_dir.
-    """
-    def normalize_path(filename):
-      try:
-        return GetLongPathName(unicode(filename)).lower()
-      except:  # pylint: disable=W0702
-        return unicode(filename).lower()
-
-    root_dir = normalize_path(root_dir)
-
-    def process_name(proc):
-      if proc.ExecutablePath:
-        return normalize_path(proc.ExecutablePath)
-      # proc.ExecutablePath may be empty if the process hasn't finished
-      # initializing, but the command line may be valid.
-      if proc.CommandLine is None:
-        return None
-      parsed_line = shlex.split(proc.CommandLine)
-      if len(parsed_line) >= 1 and os.path.isabs(parsed_line[0]):
-        return normalize_path(parsed_line[0])
-      return None
-
-    long_names = ((process_name(proc), proc) for proc in processes)
-
-    return [
-      proc for name, proc in long_names
-      if name is not None and name.startswith(root_dir)
-    ]
-
-
-  def filter_processes_tree_win(processes):
-    """Returns all the processes under the current process."""
-    # Convert to dict.
-    processes = dict((p.ProcessId, p) for p in processes)
-    root_pid = os.getpid()
-    out = {root_pid: processes[root_pid]}
-    while True:
-      found = set()
-      for pid in out:
-        found.update(
-            p.ProcessId for p in processes.itervalues()
-            if p.ParentProcessId == pid)
-      found -= set(out)
-      if not found:
-        break
-      out.update((p, processes[p]) for p in found)
-    return out.values()
-
-
-  def get_process_token():
-    """Get the current process token."""
-    TOKEN_ALL_ACCESS = 0xF01FF
-    token = ctypes.wintypes.HANDLE()
-    if not OpenProcessToken(
-        GetCurrentProcess(), TOKEN_ALL_ACCESS, ctypes.byref(token)):
-      # pylint: disable=undefined-variable
-      raise WindowsError('Couldn\'t get process token')
-    return token
-
-
-  def get_luid(name):
-    """Returns the LUID for a privilege."""
-    luid = LUID()
-    if not LookupPrivilegeValue(None, unicode(name), ctypes.byref(luid)):
-      # pylint: disable=undefined-variable
-      raise WindowsError('Couldn\'t lookup privilege value')
-    return luid
-
-
-  def enable_privilege(name):
-    """Enables the privilege for the current process token.
-
-    Returns:
-    - True if the assignment is successful.
-    """
-    SE_PRIVILEGE_ENABLED = 2
-    ERROR_NOT_ALL_ASSIGNED = 1300
-
-    size = ctypes.sizeof(TOKEN_PRIVILEGES) + ctypes.sizeof(LUID_AND_ATTRIBUTES)
-    buf = ctypes.create_string_buffer(size)
-    tp = ctypes.cast(buf, ctypes.POINTER(TOKEN_PRIVILEGES)).contents
-    tp.count = 1
-    tp.get_array()[0].LUID = get_luid(name)
-    tp.get_array()[0].Attributes = SE_PRIVILEGE_ENABLED
-    token = get_process_token()
-    try:
-      if not AdjustTokenPrivileges(token, False, tp, 0, None, None):
-        # pylint: disable=undefined-variable
-        raise WindowsError('Error in AdjustTokenPrivileges')
-    finally:
-      ctypes.windll.kernel32.CloseHandle(token)
-    return ctypes.windll.kernel32.GetLastError() != ERROR_NOT_ALL_ASSIGNED
-
-
-  def enable_symlink():
-    """Enable SeCreateSymbolicLinkPrivilege for the current token.
-
-    Returns:
-    - True if symlink support is enabled.
-
-    Thanks Microsoft. This is appreciated.
-    """
-    return enable_privilege(u'SeCreateSymbolicLinkPrivilege')
-
-
-elif sys.platform == 'darwin':
-
-
-  # On non-windows, keep the stdlib behavior.
-  isabs = os.path.isabs
-
-
-  def _native_case(p):
-    """Gets the native path case. Warning: this function resolves symlinks."""
-    try:
-      rel_ref, _ = Carbon.File.FSPathMakeRef(p.encode('utf-8'))
-      # The OSX underlying code uses NFD but python strings are in NFC. This
-      # will cause issues with os.listdir() for example. Since the dtrace log
-      # *is* in NFC, normalize it here.
-      out = unicodedata.normalize(
-          'NFC', rel_ref.FSRefMakePath().decode('utf-8'))
-      if p.endswith(os.path.sep) and not out.endswith(os.path.sep):
-        return out + os.path.sep
-      return out
-    except MacOS.Error, e:
-      if e.args[0] in (-43, -120):
-        # The path does not exist. Try to recurse and reconstruct the path.
-        # -43 means file not found.
-        # -120 means directory not found.
-        base = os.path.dirname(p)
-        rest = os.path.basename(p)
-        return os.path.join(_native_case(base), rest)
-      raise OSError(
-          e.args[0], 'Failed to get native path for %s' % p, p, e.args[1])
-
-
-  def _split_at_symlink_native(base_path, rest):
-    """Returns the native path for a symlink."""
-    base, symlink, rest = split_at_symlink(base_path, rest)
-    if symlink:
-      if not base_path:
-        base_path = base
-      else:
-        base_path = safe_join(base_path, base)
-      symlink = find_item_native_case(base_path, symlink)
-    return base, symlink, rest
-
-
-  def find_item_native_case(root_path, item):
-    """Gets the native path case of a single item based at root_path.
-
-    There is no API to get the native path case of symlinks on OSX. So it
-    needs to be done the slow way.
-    """
-    if item == '..':
-      return item
-
-    item = item.lower()
-    for element in fs.listdir(root_path):
-      if element.lower() == item:
-        return element
-
-
-  @tools.profile
-  @tools.cached
-  def get_native_path_case(path):
-    """Returns the native path case for an existing file.
-
-    Technically, it's only HFS+ on OSX that is case preserving and
-    insensitive. It's the default setting on HFS+ but can be changed.
-    """
-    assert isinstance(path, unicode), repr(path)
-    if not isabs(path):
-      raise ValueError(
-          'get_native_path_case(%r): Require an absolute path' % path, path)
-    if path.startswith('/dev'):
-      # /dev is not visible from Carbon, causing an exception.
-      return path
-
-    # Starts assuming there is no symlink along the path.
-    resolved = _native_case(path)
-    if path.lower() in (resolved.lower(), resolved.lower() + './'):
-      # This code path is incredibly faster.
-      logging.debug('get_native_path_case(%s) = %s' % (path, resolved))
-      return resolved
-
-    # There was a symlink, process it.
-    base, symlink, rest = _split_at_symlink_native(None, path)
-    if not symlink:
-      # TODO(maruel): This can happen on OSX because we use stale APIs on OSX.
-      # Fixing the APIs usage will likely fix this bug. The bug occurs due to
-      # hardlinked files, where the API may return one file path or the other
-      # depending on how it feels.
-      return base
-    prev = base
-    base = safe_join(_native_case(base), symlink)
-    assert len(base) > len(prev)
-    while rest:
-      prev = base
-      relbase, symlink, rest = _split_at_symlink_native(base, rest)
-      base = safe_join(base, relbase)
-      assert len(base) > len(prev), (prev, base, symlink)
-      if symlink:
-        base = safe_join(base, symlink)
-      assert len(base) > len(prev), (prev, base, symlink)
-    # Make sure no symlink was resolved.
-    assert base.lower() == path.lower(), (base, path)
-    logging.debug('get_native_path_case(%s) = %s' % (path, base))
-    return base
-
-
-  def enable_symlink():
-    return True
-
-
-else:  # OSes other than Windows and OSX.
-
-
-  # On non-windows, keep the stdlib behavior.
-  isabs = os.path.isabs
-
-
-  def find_item_native_case(root, item):
-    """Gets the native path case of a single item based at root_path."""
-    if item == '..':
-      return item
-
-    root = get_native_path_case(root)
-    return os.path.basename(get_native_path_case(os.path.join(root, item)))
-
-
-  @tools.profile
-  @tools.cached
-  def get_native_path_case(path):
-    """Returns the native path case for an existing file.
-
-    On OSes other than OSX and Windows, assume the file system is
-    case-sensitive.
-
-    TODO(maruel): This is not strictly true. Implement if necessary.
-    """
-    assert isinstance(path, unicode), repr(path)
-    if not isabs(path):
-      raise ValueError(
-          'get_native_path_case(%r): Require an absolute path' % path, path)
-    # Give up on cygwin, as GetLongPathName() can't be called.
-    # Linux traces tends to not be normalized so use this occasion to normalize
-    # it. This function implementation already normalizes the path on the other
-    # OS so this needs to be done here to be coherent between OSes.
-    out = os.path.normpath(path)
-    if path.endswith(os.path.sep) and not out.endswith(os.path.sep):
-      out = out + os.path.sep
-    # In 99.99% of cases on Linux out == path. Since a return value is cached
-    # forever, reuse (also cached) |path| object. It safes approx 7MB of ram
-    # when isolating Chromium tests. It's important on memory constrained
-    # systems running ARM.
-    return path if out == path else out
-
-
-  def enable_symlink():
-    return True
-
-
-if sys.platform != 'win32':  # All non-Windows OSes.
-
-
-  def safe_join(*args):
-    """Joins path elements like os.path.join() but doesn't abort on absolute
-    path.
-
-    os.path.join('foo', '/bar') == '/bar'
-    but safe_join('foo', '/bar') == 'foo/bar'.
-    """
-    out = ''
-    for element in args:
-      if element.startswith(os.path.sep):
-        if out.endswith(os.path.sep):
-          out += element[1:]
-        else:
-          out += element
-      else:
-        if out.endswith(os.path.sep):
-          out += element
-        else:
-          out += os.path.sep + element
-    return out
-
-
-  @tools.profile
-  def split_at_symlink(base_dir, relfile):
-    """Scans each component of relfile and cut the string at the symlink if
-    there is any.
-
-    Returns a tuple (base_path, symlink, rest), with symlink == rest == None if
-    not symlink was found.
-    """
-    if base_dir:
-      assert relfile
-      assert os.path.isabs(base_dir)
-      index = 0
-    else:
-      assert os.path.isabs(relfile)
-      index = 1
-
-    def at_root(rest):
-      if base_dir:
-        return safe_join(base_dir, rest)
-      return rest
-
-    while True:
-      try:
-        index = relfile.index(os.path.sep, index)
-      except ValueError:
-        index = len(relfile)
-      full = at_root(relfile[:index])
-      if fs.islink(full):
-        # A symlink!
-        base = os.path.dirname(relfile[:index])
-        symlink = os.path.basename(relfile[:index])
-        rest = relfile[index:]
-        logging.debug(
-            'split_at_symlink(%s, %s) -> (%s, %s, %s)' %
-            (base_dir, relfile, base, symlink, rest))
-        return base, symlink, rest
-      if index == len(relfile):
-        break
-      index += 1
-    return relfile, None, None
-
-
-def relpath(path, root):
-  """os.path.relpath() that keeps trailing os.path.sep."""
-  out = os.path.relpath(path, root)
-  if path.endswith(os.path.sep):
-    out += os.path.sep
-  return out
-
-
-def safe_relpath(filepath, basepath):
-  """Do not throw on Windows when filepath and basepath are on different drives.
-
-  Different than relpath() above since this one doesn't keep the trailing
-  os.path.sep and it swallows exceptions on Windows and return the original
-  absolute path in the case of different drives.
-  """
-  try:
-    return os.path.relpath(filepath, basepath)
-  except ValueError:
-    assert sys.platform == 'win32'
-    return filepath
-
-
-def normpath(path):
-  """os.path.normpath() that keeps trailing os.path.sep."""
-  out = os.path.normpath(path)
-  if path.endswith(os.path.sep):
-    out += os.path.sep
-  return out
-
-
-def posix_relpath(path, root):
-  """posix.relpath() that keeps trailing slash.
-
-  It is different from relpath() since it can be used on Windows.
-  """
-  out = posixpath.relpath(path, root)
-  if path.endswith('/'):
-    out += '/'
-  return out
-
-
-def cleanup_path(x):
-  """Cleans up a relative path. Converts any os.path.sep to '/' on Windows."""
-  if x:
-    x = x.rstrip(os.path.sep).replace(os.path.sep, '/')
-  if x == '.':
-    x = ''
-  if x:
-    x += '/'
-  return x
-
-
-def is_url(path):
-  """Returns True if it looks like an HTTP url instead of a file path."""
-  return bool(re.match(r'^https?://.+$', path))
-
-
-def path_starts_with(prefix, path):
-  """Returns true if the components of the path |prefix| are the same as the
-  initial components of |path| (or all of the components of |path|). The paths
-  must be absolute.
-  """
-  assert os.path.isabs(prefix) and os.path.isabs(path)
-  prefix = os.path.normpath(prefix)
-  path = os.path.normpath(path)
-  assert prefix == get_native_path_case(prefix), prefix
-  assert path == get_native_path_case(path), path
-  prefix = prefix.rstrip(os.path.sep) + os.path.sep
-  path = path.rstrip(os.path.sep) + os.path.sep
-  return path.startswith(prefix)
-
-
-@tools.profile
-def fix_native_path_case(root, path):
-  """Ensures that each component of |path| has the proper native case.
-
-  It does so by iterating slowly over the directory elements of |path|. The file
-  must exist.
-  """
-  native_case_path = root
-  for raw_part in path.split(os.sep):
-    if not raw_part or raw_part == '.':
-      break
-
-    part = find_item_native_case(native_case_path, raw_part)
-    if not part:
-      raise OSError(
-          'File %s doesn\'t exist' %
-          os.path.join(native_case_path, raw_part))
-    native_case_path = os.path.join(native_case_path, part)
-
-  return os.path.normpath(native_case_path)
-
-
-def ensure_command_has_abs_path(command, cwd):
-  """Ensures that an isolate command uses absolute path.
-
-  This is needed since isolate can specify a command relative to 'cwd' and
-  subprocess.call doesn't consider 'cwd' when searching for executable.
-  """
-  if not os.path.isabs(command[0]):
-    command[0] = os.path.abspath(os.path.join(cwd, command[0]))
-
-
-def is_same_filesystem(path1, path2):
-  """Returns True if both paths are on the same filesystem.
-
-  This is required to enable the use of hardlinks.
-  """
-  assert os.path.isabs(path1), path1
-  assert os.path.isabs(path2), path2
-  if sys.platform == 'win32':
-    # If the drive letter mismatches, assume it's a separate partition.
-    # TODO(maruel): It should look at the underlying drive, a drive letter could
-    # be a mount point to a directory on another drive.
-    assert re.match(ur'^[a-zA-Z]\:\\.*', path1), path1
-    assert re.match(ur'^[a-zA-Z]\:\\.*', path2), path2
-    if path1[0].lower() != path2[0].lower():
-      return False
-  return fs.stat(path1).st_dev == fs.stat(path2).st_dev
-
-
-def get_free_space(path):
-  """Returns the number of free bytes.
-
-  On POSIX platforms, this returns the free space as visible by the current
-  user. On some systems, there's a percentage of the free space on the partition
-  that is only accessible as the root user.
-  """
-  if sys.platform == 'win32':
-    free_bytes = ctypes.c_ulonglong(0)
-    ctypes.windll.kernel32.GetDiskFreeSpaceExW(
-        ctypes.c_wchar_p(path), None, None, ctypes.pointer(free_bytes))
-    return free_bytes.value
-  # For OSes other than Windows.
-  f = fs.statvfs(path)  # pylint: disable=E1101
-  return f.f_bfree * f.f_frsize
-
-
-### Write file functions.
-
-
-def hardlink(source, link_name):
-  """Hardlinks a file.
-
-  Add support for os.link() on Windows.
-  """
-  assert isinstance(source, unicode), source
-  assert isinstance(link_name, unicode), link_name
-  if sys.platform == 'win32':
-    if not ctypes.windll.kernel32.CreateHardLinkW(
-        fs.extend(link_name), fs.extend(source), 0):
-      raise OSError()
-  else:
-    fs.link(source, link_name)
-
-
-def readable_copy(outfile, infile):
-  """Makes a copy of the file that is readable by everyone."""
-  fs.copy2(infile, outfile)
-  fs.chmod(
-      outfile,
-      fs.stat(outfile).st_mode | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
-
-
-def set_read_only(path, read_only):
-  """Sets or resets the write bit on a file or directory.
-
-  Zaps out access to 'group' and 'others'.
-  """
-  mode = fs.lstat(path).st_mode
-  # TODO(maruel): Stop removing GO bits.
-  mode = (mode & 0500) if read_only else (mode | 0200)
-  if hasattr(os, 'lchmod'):
-    fs.lchmod(path, mode)  # pylint: disable=E1101
-  else:
-    if stat.S_ISLNK(mode):
-      # Skip symlink without lchmod() support.
-      logging.debug(
-          'Can\'t change %sw bit on symlink %s',
-          '-' if read_only else '+', path)
-      return
-
-    # TODO(maruel): Implement proper DACL modification on Windows.
-    fs.chmod(path, mode)
-
-
-def set_read_only_swallow(path, read_only):
-  """Returns if an OSError exception occured."""
-  try:
-    set_read_only(path, read_only)
-  except OSError as e:
-    return e
-
-
-def remove(filepath):
-  """Removes a file, even if it is read-only."""
-  # TODO(maruel): Not do it unless necessary since it slows this function
-  # down.
-  if sys.platform == 'win32':
-    # Deleting a read-only file will fail if it is read-only.
-    set_read_only(filepath, False)
-  else:
-    # Deleting a read-only file will fail if the directory is read-only.
-    set_read_only(os.path.dirname(filepath), False)
-  fs.remove(filepath)
-
-
-def try_remove(filepath):
-  """Removes a file without crashing even if it doesn't exist."""
-  try:
-    remove(filepath)
-  except OSError:
-    pass
-
-
-def link_file(outfile, infile, action):
-  """Links a file. The type of link depends on |action|.
-
-  Returns:
-    True if the action was carried on, False if fallback was used.
-  """
-  if action < 1 or action > COPY:
-    raise ValueError('Unknown mapping action %s' % action)
-  # TODO(maruel): Skip these checks.
-  if not fs.isfile(infile):
-    raise OSError('%s is missing' % infile)
-  if fs.isfile(outfile):
-    raise OSError(
-        '%s already exist; insize:%d; outsize:%d' %
-        (outfile, fs.stat(infile).st_size, fs.stat(outfile).st_size))
-
-  if action == COPY:
-    readable_copy(outfile, infile)
-    return True
-
-  if action in (SYMLINK, SYMLINK_WITH_FALLBACK):
-    try:
-      fs.symlink(infile, outfile)  # pylint: disable=E1101
-      return True
-    except OSError:
-      if action == SYMLINK:
-        raise
-      logging.warning(
-          'Failed to symlink, falling back to copy %s to %s' % (
-            infile, outfile))
-      # Signal caller that fallback copy was used.
-      readable_copy(outfile, infile)
-      return False
-
-  # HARDLINK or HARDLINK_WITH_FALLBACK.
-  try:
-    hardlink(infile, outfile)
-    return True
-  except OSError as e:
-    if action == HARDLINK:
-      raise OSError('Failed to hardlink %s to %s: %s' % (infile, outfile, e))
-
-  # Probably a different file system.
-  logging.warning(
-      'Failed to hardlink, falling back to copy %s to %s' % (
-        infile, outfile))
-  readable_copy(outfile, infile)
-  # Signal caller that fallback copy was used.
-  return False
-
-
-def atomic_replace(path, body):
-  """Atomically replaces content of the file at given path.
-
-  'body' will be written to the file as is (as in open(..., 'wb') mode).
-
-  Does not preserve file attributes.
-
-  Raises OSError or IOError on errors (e.g. in case the file is locked on
-  Windows). The caller may retry a bunch of times in such cases before giving
-  up.
-  """
-  assert path and path[-1] != os.sep, path
-  path = os.path.abspath(path)
-  dir_name, base_name = os.path.split(path)
-
-  fd, tmp_name = tempfile.mkstemp(dir=dir_name, prefix=base_name+'_')
-  try:
-    with os.fdopen(fd, 'wb') as f:
-      f.write(body)
-      f.flush()
-      os.fsync(fd)
-    if sys.platform != 'win32':
-      os.rename(tmp_name, path)
-    else:
-      # Flags are MOVEFILE_REPLACE_EXISTING|MOVEFILE_WRITE_THROUGH.
-      MoveFileEx(unicode(tmp_name), unicode(path), 0x1|0x8)
-    tmp_name =  None # no need to remove it in 'finally' block anymore
-  finally:
-    if tmp_name:
-      try:
-        os.remove(tmp_name)
-      except OSError as e:
-        logging.warning(
-            'Failed to delete temp file %s in replace_file_content: %s',
-            tmp_name, e)
-
-
-### Write directory functions.
-
-
-def ensure_tree(path, perm=0777):
-  """Ensures a directory exists."""
-  if not fs.isdir(path):
-    fs.makedirs(path, perm)
-
-
-def make_tree_read_only(root):
-  """Makes all the files in the directories read only.
-
-  Also makes the directories read only, only if it makes sense on the platform.
-
-  This means no file can be created or deleted.
-  """
-  err = None
-  logging.debug('make_tree_read_only(%s)', root)
-  for dirpath, dirnames, filenames in fs.walk(root, topdown=True):
-    for filename in filenames:
-      e = set_read_only_swallow(os.path.join(dirpath, filename), True)
-      if not err:
-        err = e
-    if sys.platform != 'win32':
-      # It must not be done on Windows.
-      for dirname in dirnames:
-        e = set_read_only_swallow(os.path.join(dirpath, dirname), True)
-        if not err:
-          err = e
-  if sys.platform != 'win32':
-    e = set_read_only_swallow(root, True)
-    if not err:
-      err = e
-  if err:
-    # pylint: disable=raising-bad-type
-    raise err
-
-
-def make_tree_files_read_only(root):
-  """Makes all the files in the directories read only but not the directories
-  themselves.
-
-  This means files can be created or deleted.
-  """
-  logging.debug('make_tree_files_read_only(%s)', root)
-  if sys.platform != 'win32':
-    set_read_only(root, False)
-  for dirpath, dirnames, filenames in fs.walk(root, topdown=True):
-    for filename in filenames:
-      set_read_only(os.path.join(dirpath, filename), True)
-    if sys.platform != 'win32':
-      # It must not be done on Windows.
-      for dirname in dirnames:
-        set_read_only(os.path.join(dirpath, dirname), False)
-
-
-def make_tree_writeable(root):
-  """Makes all the files in the directories writeable.
-
-  Also makes the directories writeable, only if it makes sense on the platform.
-
-  It is different from make_tree_deleteable() because it unconditionally affects
-  the files.
-  """
-  logging.debug('make_tree_writeable(%s)', root)
-  if sys.platform != 'win32':
-    set_read_only(root, False)
-  for dirpath, dirnames, filenames in fs.walk(root, topdown=True):
-    for filename in filenames:
-      set_read_only(os.path.join(dirpath, filename), False)
-    if sys.platform != 'win32':
-      # It must not be done on Windows.
-      for dirname in dirnames:
-        set_read_only(os.path.join(dirpath, dirname), False)
-
-
-def make_tree_deleteable(root):
-  """Changes the appropriate permissions so the files in the directories can be
-  deleted.
-
-  On Windows, the files are modified. On other platforms, modify the directory.
-  It only does the minimum so the files can be deleted safely.
-
-  Warning on Windows: since file permission is modified, the file node is
-  modified. This means that for hard-linked files, every directory entry for the
-  file node has its file permission modified.
-  """
-  logging.debug('make_tree_deleteable(%s)', root)
-  err = None
-  if sys.platform != 'win32':
-    e = set_read_only_swallow(root, False)
-    if not err:
-      err = e
-  for dirpath, dirnames, filenames in fs.walk(root, topdown=True):
-    if sys.platform == 'win32':
-      for filename in filenames:
-        e = set_read_only_swallow(os.path.join(dirpath, filename), False)
-        if not err:
-          err = e
-    else:
-      for dirname in dirnames:
-        e = set_read_only_swallow(os.path.join(dirpath, dirname), False)
-        if not err:
-          err = e
-  if err:
-    # pylint: disable=raising-bad-type
-    raise err
-
-
-def rmtree(root):
-  """Wrapper around shutil.rmtree() to retry automatically on Windows.
-
-  On Windows, forcibly kills processes that are found to interfere with the
-  deletion.
-
-  Returns:
-    True on normal execution, False if berserk techniques (like killing
-    processes) had to be used.
-  """
-  logging.info('rmtree(%s)', root)
-  assert sys.getdefaultencoding() == 'utf-8', sys.getdefaultencoding()
-  # Do not assert here yet because this would break too much code.
-  root = unicode(root)
-  try:
-    make_tree_deleteable(root)
-  except OSError as e:
-    logging.warning('Swallowing make_tree_deleteable() error: %s', e)
-
-  # First try the soft way: tries 3 times to delete and sleep a bit in between.
-  # Retries help if test subprocesses outlive main process and try to actively
-  # use or write to the directory while it is being deleted.
-  max_tries = 3
-  for i in xrange(max_tries):
-    # errors is a list of tuple(function, path, excinfo).
-    errors = []
-    fs.rmtree(root, onerror=lambda *args: errors.append(args))
-    if not errors:
-      return True
-    if not i and sys.platform == 'win32':
-      for _, path, _ in errors:
-        try:
-          change_acl_for_delete(path)
-        except Exception as e:
-          sys.stderr.write('- %s (failed to update ACL: %s)\n' % (path, e))
-
-    if i == max_tries - 1:
-      sys.stderr.write(
-          'Failed to delete %s. The following files remain:\n' % root)
-      for _, path, _ in errors:
-        sys.stderr.write('- %s\n' % path)
-    else:
-      delay = (i+1)*2
-      sys.stderr.write(
-          'Failed to delete %s (%d files remaining).\n'
-          '  Maybe the test has a subprocess outliving it.\n'
-          '  Sleeping %d seconds.\n' %
-          (root, len(errors), delay))
-      time.sleep(delay)
-
-  # If soft retries fail on Linux, there's nothing better we can do.
-  if sys.platform != 'win32':
-    raise errors[0][2][0], errors[0][2][1], errors[0][2][2]
-
-  # The soft way was not good enough. Try the hard way. Enumerates both:
-  # - all child processes from this process.
-  # - processes where the main executable in inside 'root'. The reason is that
-  #   the ancestry may be broken so stray grand-children processes could be
-  #   undetected by the first technique.
-  # This technique is not fool-proof but gets mostly there.
-  def get_processes():
-    processes = enum_processes_win()
-    tree_processes = filter_processes_tree_win(processes)
-    dir_processes = filter_processes_dir_win(processes, root)
-    # Convert to dict to remove duplicates.
-    processes = dict((p.ProcessId, p) for p in tree_processes)
-    processes.update((p.ProcessId, p) for p in dir_processes)
-    processes.pop(os.getpid())
-    return processes
-
-  for i in xrange(3):
-    sys.stderr.write('Enumerating processes:\n')
-    processes = get_processes()
-    if not processes:
-      break
-    for _, proc in sorted(processes.iteritems()):
-      sys.stderr.write(
-          '- pid %d; Handles: %d; Exe: %s; Cmd: %s\n' % (
-            proc.ProcessId,
-            proc.HandleCount,
-            proc.ExecutablePath,
-            proc.CommandLine))
-    sys.stderr.write('Terminating %d processes.\n' % len(processes))
-    for pid in sorted(processes):
-      try:
-        # Killing is asynchronous.
-        os.kill(pid, 9)
-        sys.stderr.write('- %d killed\n' % pid)
-      except OSError:
-        sys.stderr.write('- failed to kill %s\n' % pid)
-    if i < 2:
-      time.sleep((i+1)*2)
-  else:
-    processes = get_processes()
-    if processes:
-      sys.stderr.write('Failed to terminate processes.\n')
-      raise errors[0][2][0], errors[0][2][1], errors[0][2][2]
-
-  # Now that annoying processes in root are evicted, try again.
-  errors = []
-  fs.rmtree(root, onerror=lambda *args: errors.append(args))
-  if errors:
-    # There's no hope.
-    sys.stderr.write(
-        'Failed to delete %s. The following files remain:\n' % root)
-    for _, path, _ in errors:
-      sys.stderr.write('- %s\n' % path)
-    raise errors[0][2][0], errors[0][2][1], errors[0][2][2]
-  return False
diff --git a/tools/swarming_client/utils/fs.py b/tools/swarming_client/utils/fs.py
deleted file mode 100644
index 1c422fb..0000000
--- a/tools/swarming_client/utils/fs.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright 2015 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Wraps os, os.path and shutil functions to work around MAX_PATH on Windows."""
-
-import __builtin__
-import inspect
-import os
-import shutil
-import sys
-
-
-if sys.platform == 'win32':
-
-
-  import ctypes
-  GetFileAttributesW = ctypes.windll.kernel32.GetFileAttributesW
-  GetFileAttributesW.argtypes = (ctypes.c_wchar_p,)
-  GetFileAttributesW.restype = ctypes.c_uint
-  CreateSymbolicLinkW = ctypes.windll.kernel32.CreateSymbolicLinkW
-  CreateSymbolicLinkW.argtypes = (
-      ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32)
-  CreateSymbolicLinkW.restype = ctypes.c_ubyte
-
-
-  def extend(path):
-    """Adds '\\\\?\\' when given an absolute path so the MAX_PATH (260) limit is
-    not enforced.
-    """
-    assert os.path.isabs(path), path
-    assert isinstance(path, unicode), path
-    prefix = u'\\\\?\\'
-    return path if path.startswith(prefix) else prefix + path
-
-
-  def trim(path):
-    """Removes '\\\\?\\' when receiving a path."""
-    assert isinstance(path, unicode), path
-    prefix = u'\\\\?\\'
-    if path.startswith(prefix):
-      path = path[len(prefix):]
-    assert os.path.isabs(path), path
-    return path
-
-
-  def islink(path):
-    """Proper implementation of islink() for Windows.
-
-    The stdlib is broken.
-    https://msdn.microsoft.com/library/windows/desktop/aa365682.aspx
-    """
-    FILE_ATTRIBUTE_REPARSE_POINT = 1024
-    return bool(GetFileAttributesW(extend(path)) & FILE_ATTRIBUTE_REPARSE_POINT)
-
-
-  def symlink(source, link_name):
-    """Creates a symlink on Windows 7 and later.
-
-    This function will only work once SeCreateSymbolicLinkPrivilege has been
-    enabled. See file_path.enable_symlink().
-
-    Useful material:
-    CreateSymbolicLinkW:
-      https://msdn.microsoft.com/library/windows/desktop/aa363866.aspx
-    UAC and privilege stripping:
-      https://msdn.microsoft.com/library/bb530410.aspx
-    Privilege constants:
-      https://msdn.microsoft.com/library/windows/desktop/bb530716.aspx
-    """
-    # TODO(maruel): This forces always creating absolute path symlinks.
-    source = extend(source)
-    flags = 1 if os.path.isdir(source) else 0
-    if not CreateSymbolicLinkW(extend(link_name), source, flags):
-      raise WindowsError()  # pylint: disable=undefined-variable
-
-
-  def walk(top, *args, **kwargs):
-    return os.walk(extend(top), *args, **kwargs)
-
-
-else:
-
-
-  def extend(path):
-    """Convert the path back to utf-8.
-
-    In some rare case, concatenating str and unicode may cause a
-    UnicodeEncodeError because the default encoding is 'ascii'.
-    """
-    assert os.path.isabs(path), path
-    assert isinstance(path, unicode), path
-    return path.encode('utf-8')
-
-
-  def trim(path):
-    """Path mangling is not needed on POSIX."""
-    assert os.path.isabs(path), path
-    assert isinstance(path, str), path
-    return path.decode('utf-8')
-
-
-  def islink(path):
-    return os.path.islink(extend(path))
-
-
-  def symlink(source, link_name):
-    return os.symlink(source, extend(link_name))
-
-
-  def walk(top, *args, **kwargs):
-    for root, dirs, files in os.walk(extend(top), *args, **kwargs):
-      yield trim(root), dirs, files
-
-
-## builtin
-
-
-def open(path, *args, **kwargs):  # pylint: disable=redefined-builtin
-  return __builtin__.open(extend(path), *args, **kwargs)
-
-
-## os
-
-
-def link(source, link_name):
-  return os.link(extend(source), extend(link_name))
-
-
-def rename(old, new):
-  return os.rename(extend(old), extend(new))
-
-
-def renames(old, new):
-  return os.renames(extend(old), extend(new))
-
-
-## shutil
-
-
-def copy2(src, dst):
-  return shutil.copy2(extend(src), extend(dst))
-
-
-def rmtree(path, *args, **kwargs):
-  return shutil.rmtree(extend(path), *args, **kwargs)
-
-
-## The rest
-
-
-def _get_lambda(func):
-  return lambda path, *args, **kwargs: func(extend(path), *args, **kwargs)
-
-
-def _is_path_fn(func):
-  return (inspect.getargspec(func)[0] or [None]) == 'path'
-
-
-_os_fns = (
-  'access', 'chdir', 'chflags', 'chroot', 'chmod', 'chown', 'lchflags',
-  'lchmod', 'lchown', 'listdir', 'lstat', 'mknod', 'mkdir', 'makedirs',
-  'remove', 'removedirs', 'rmdir', 'stat', 'statvfs', 'unlink', 'utime')
-
-_os_path_fns = (
-  'exists', 'lexists', 'getatime', 'getmtime', 'getctime', 'getsize', 'isfile',
-  'isdir', 'ismount')
-
-
-for _fn in _os_fns:
-  if hasattr(os, _fn):
-    sys.modules[__name__].__dict__.setdefault(
-        _fn, _get_lambda(getattr(os, _fn)))
-
-
-for _fn in _os_path_fns:
-  if hasattr(os.path, _fn):
-    sys.modules[__name__].__dict__.setdefault(
-        _fn, _get_lambda(getattr(os.path, _fn)))
diff --git a/tools/swarming_client/utils/graph.py b/tools/swarming_client/utils/graph.py
deleted file mode 100644
index 9779b9f..0000000
--- a/tools/swarming_client/utils/graph.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-import os
-import re
-
-from third_party import colorama
-
-
-UNITS = ('', 'k', 'm', 'g', 't', 'p', 'e', 'z', 'y')
-
-
-def get_console_width(default=80):
-  """Returns the console width, if available."""
-  # TODO(maruel): Implement Windows.
-  try:
-    _, columns = os.popen('stty size', 'r').read().split()
-  except (IOError, OSError, ValueError):
-    columns = default
-  return int(columns)
-
-
-def generate_histogram(data, buckets):
-  """Generates an histogram out of a list of floats.
-
-  The data is bucketed into |buckets| buckets.
-
-  Returns:
-    dict of bucket: size
-  """
-  if not data:
-    return {}
-
-  minimum = min(data)
-  maximum = max(data)
-  if minimum == maximum:
-    return {data[0]: len(data)}
-
-  buckets = min(len(data), buckets)
-  bucket_size = (maximum-minimum)/buckets
-  out = dict((i, 0) for i in xrange(buckets))
-  for i in data:
-    out[min(int((i-minimum)/bucket_size), buckets-1)] += 1
-  return dict(((k*bucket_size)+minimum, v) for k, v in out.iteritems())
-
-
-def print_histogram(data, columns=0, key_format=None):
-  """Prints ASCII art representing an histogram.
-
-  Arguments:
-    data: as formatted by generate_histogram().
-    columns: width of the graph.
-    key_format: printf like format for the keys.
-  """
-  # TODO(maruel): Add dots for tens.
-  if not data:
-    # Nothing to print.
-    return
-
-  columns = columns or get_console_width()
-  key_format = key_format or '%s'
-
-  max_key_width = max(len(key_format % k) for k in data)
-  # 3 == 1 for ' ' prefix, 2 for ': ' suffix.
-  width = columns - max_key_width - 3
-  assert width > 1
-
-  maxvalue = max(data.itervalues())
-  if all(isinstance(i, int) for i in data.itervalues()) and maxvalue < width:
-    width = maxvalue
-  norm = float(maxvalue) / width
-
-  form = '%s: %s%s%s'
-  for k in sorted(data):
-    line = '*' * int(data[k] / norm)
-    key = (key_format % k).rjust(max_key_width)
-    print(form % (key, colorama.Fore.GREEN, line, colorama.Fore.RESET))
-
-
-def to_units(number):
-  """Convert a string to numbers."""
-  unit = 0
-  while number >= 1024.:
-    unit += 1
-    number = number / 1024.
-    if unit == len(UNITS) - 1:
-      break
-  if unit:
-    return '%.2f%s' % (number, UNITS[unit])
-  return '%d' % number
-
-
-def from_units(text):
-  """Convert a text to numbers.
-
-  Example: from_unit('0.1k') == 102
-  """
-  match = re.match(r'^([0-9\.]+)(|[' + ''.join(UNITS[1:]) + r'])$', text)
-  if not match:
-    return None
-
-  number = float(match.group(1))
-  unit = match.group(2)
-  return int(number * 1024**UNITS.index(unit))
-
-
-def unit_arg(option, opt, value, parser):
-  """OptionParser callback that supports units like 10.5m or 20k."""
-  actual = from_units(value)
-  if actual is None:
-    parser.error('Invalid value \'%s\' for %s' % (value, opt))
-  setattr(parser.values, option.dest, actual)
-
-
-def unit_option(parser, *args, **kwargs):
-  """Add an option that uses unit_arg()."""
-  parser.add_option(
-      *args, type='str', metavar='N', action='callback', callback=unit_arg,
-      nargs=1, **kwargs)
diff --git a/tools/swarming_client/utils/large.py b/tools/swarming_client/utils/large.py
deleted file mode 100644
index 90bc7f6..0000000
--- a/tools/swarming_client/utils/large.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2016 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Implements an integer set compression algorithm based on delta
-encoded varints, which is then deflate'd.
-
-The algorithm is intentionally simple.
-
-This only works with sorted list of integers. The resulting compression level
-can be very high for monotonically increasing sets.
-"""
-
-import cStringIO
-import zlib
-
-
-def pack(values):
-  """Returns a deflate'd buffer of delta encoded varints.
-
-  Arguments:
-    values: sorted list of int.
-
-  Returns:
-    compressed buffer as a str.
-  """
-  out = ''
-  if not values:
-    return ''
-  last = 0
-  max_value = 2L**63
-  assert 0 <= values[0] < max_value, 'Values must be between 0 and 2**63'
-  assert 0 <= values[-1] < max_value, 'Values must be between 0 and 2**63'
-  for value in values:
-    v = value
-    value -= last
-    assert value >= 0, 'List must be sorted ascending'
-    last = v
-    while value > 127:
-      out += chr((1 << 7) | (value & 0x7F))
-      value >>=  7
-    out += chr(value)
-  return zlib.compress(out)
-
-
-def unpack(data):
-  """Decompresses a deflate'd delta encoded list of varints.
-
-  Arguments:
-    compressed buffer as a str. Accepts None to simplify call sites.
-
-  Returns:
-    values: sorted list of int.
-  """
-  out = []
-  if data == '':
-    return out
-  value = 0
-  base = 1
-  last = 0
-  for d in zlib.decompress(data):
-    val_byte = ord(d)
-    value += (val_byte & 0x7F) * base
-    if val_byte & 0x80:
-      base <<= 7
-    else:
-      out.append(value + last)
-      last += value
-      value = 0
-      base = 1
-  return out
diff --git a/tools/swarming_client/utils/logging_utils.py b/tools/swarming_client/utils/logging_utils.py
deleted file mode 100644
index 9b559f5..0000000
--- a/tools/swarming_client/utils/logging_utils.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# Copyright 2015 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Utility relating to logging."""
-
-import argparse
-import codecs
-import ctypes
-import logging
-import logging.handlers
-import optparse
-import os
-import sys
-import tempfile
-import time
-
-from utils import file_path
-
-# This works around file locking issue on Windows specifically in the case of
-# long lived child processes.
-#
-# Python opens files with inheritable handle and without file sharing by
-# default. This causes the RotatingFileHandler file handle to be duplicated in
-# the subprocesses even if the log file is not used in it. Because of this
-# handle in the child process, when the RotatingFileHandler tries to os.rename()
-# the file in the parent process, it fails with:
-#     WindowsError: [Error 32] The process cannot access the file because
-#     it is being used by another process
-if sys.platform == 'win32':
-  import ctypes
-  import msvcrt  # pylint: disable=F0401
-  import _subprocess  # pylint: disable=F0401
-
-  FILE_ATTRIBUTE_NORMAL = 0x00000080
-  FILE_SHARE_READ = 1
-  FILE_SHARE_WRITE = 2
-  FILE_SHARE_DELETE = 4
-  GENERIC_READ = 0x80000000
-  GENERIC_WRITE = 0x40000000
-  OPEN_ALWAYS = 4
-
-  # TODO(maruel): Make it work in cygwin too if necessary. This would have to
-  # use ctypes.cdll.kernel32 instead of _subprocess and msvcrt.
-
-
-  def shared_open(path):
-    """Opens a file with full sharing mode and without inheritance.
-
-    The file is open for both read and write.
-
-    See https://bugs.python.org/issue15244 for inspiration.
-    """
-    path = unicode(path)
-    handle = ctypes.windll.kernel32.CreateFileW(
-        path,
-        GENERIC_READ|GENERIC_WRITE,
-        FILE_SHARE_READ|FILE_SHARE_WRITE|FILE_SHARE_DELETE,
-        None,
-        OPEN_ALWAYS,
-        FILE_ATTRIBUTE_NORMAL,
-        None)
-    ctr_handle = msvcrt.open_osfhandle(handle, os.O_BINARY | os.O_NOINHERIT)
-    return os.fdopen(ctr_handle, 'r+b')
-
-
-  class NoInheritRotatingFileHandler(logging.handlers.RotatingFileHandler):
-    def _open(self):
-      """Opens the log file without handle inheritance but with file sharing.
-
-      Ignores self.mode.
-      """
-      f = shared_open(self.baseFilename)
-      if self.encoding:
-        # Do the equivalent of
-        # codecs.open(self.baseFilename, self.mode, self.encoding)
-        info = codecs.lookup(self.encoding)
-        f = codecs.StreamReaderWriter(
-            f, info.streamreader, info.streamwriter, 'replace')
-        f.encoding = self.encoding
-      return f
-
-
-else:  # Not Windows.
-
-
-  NoInheritRotatingFileHandler = logging.handlers.RotatingFileHandler
-
-
-# Levels used for logging.
-LEVELS = [logging.ERROR, logging.INFO, logging.DEBUG]
-
-
-class CaptureLogs(object):
-  """Captures all the logs in a context."""
-  def __init__(self, prefix, root=None):
-    handle, self._path = tempfile.mkstemp(prefix=prefix, suffix='.log')
-    os.close(handle)
-    self._handler = logging.FileHandler(self._path, 'w')
-    self._handler.setLevel(logging.DEBUG)
-    formatter = UTCFormatter(
-        '%(process)d %(asctime)s: %(levelname)-5s %(message)s')
-    self._handler.setFormatter(formatter)
-    self._root = root or logging.getLogger()
-    self._root.addHandler(self._handler)
-    assert self._root.isEnabledFor(logging.DEBUG)
-
-  def read(self):
-    """Returns the current content of the logs.
-
-    This also closes the log capture so future logs will not be captured.
-    """
-    self._disconnect()
-    assert self._path
-    try:
-      with open(self._path, 'rb') as f:
-        return f.read()
-    except IOError as e:
-      return 'Failed to read %s: %s' % (self._path, e)
-
-  def close(self):
-    """Closes and delete the log."""
-    self._disconnect()
-    if self._path:
-      try:
-        os.remove(self._path)
-      except OSError as e:
-        logging.error('Failed to delete log file %s: %s', self._path, e)
-      self._path = None
-
-  def __enter__(self):
-    return self
-
-  def __exit__(self, _exc_type, _exc_value, _traceback):
-    self.close()
-
-  def _disconnect(self):
-    if self._handler:
-      self._root.removeHandler(self._handler)
-      self._handler.close()
-      self._handler = None
-
-
-class UTCFormatter(logging.Formatter):
-  converter = time.gmtime
-
-  def formatTime(self, record, datefmt=None):
-    """Change is ',' to '.'."""
-    ct = self.converter(record.created)
-    if datefmt:
-      return time.strftime(datefmt, ct)
-    else:
-      t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
-      return "%s.%03d" % (t, record.msecs)
-
-
-class Filter(object):
-  """Adds fields used by the infra-specific formatter.
-
-  Fields added:
-  - 'severity': one-letter indicator of log level (first letter of levelname).
-  """
-
-  def filter(self, record):
-    record.severity = record.levelname[0]
-    return True
-
-
-def find_stderr(root=None):
-  """Returns the logging.handler streaming to stderr, if any."""
-  for log in (root or logging.getLogger()).handlers:
-    if getattr(log, 'stream', None) is sys.stderr:
-      return log
-
-
-def prepare_logging(filename, root=None):
-  """Prepare logging for scripts.
-
-  Makes it log in UTC all the time. Prepare a rotating file based log.
-  """
-  assert not find_stderr(root)
-  formatter = UTCFormatter('%(process)d %(asctime)s %(severity)s: %(message)s')
-
-  # It is a requirement that the root logger is set to DEBUG, so the messages
-  # are not lost. It defaults to WARNING otherwise.
-  logger = root or logging.getLogger()
-  if not logger:
-    # Better print insanity than crash.
-    print >> sys.stderr, 'OMG NO ROOT'
-    return
-  logger.setLevel(logging.DEBUG)
-
-  stderr = logging.StreamHandler()
-  stderr.setFormatter(formatter)
-  stderr.addFilter(Filter())
-  # Default to ERROR.
-  stderr.setLevel(logging.ERROR)
-  logger.addHandler(stderr)
-
-  # Setup up logging to a constant file so we can debug issues where
-  # the results aren't properly sent to the result URL.
-  if filename:
-    file_path.ensure_tree(os.path.dirname(os.path.abspath(unicode(filename))))
-    try:
-      rotating_file = NoInheritRotatingFileHandler(
-          filename, maxBytes=10 * 1024 * 1024, backupCount=5,
-          encoding='utf-8')
-      rotating_file.setLevel(logging.DEBUG)
-      rotating_file.setFormatter(formatter)
-      rotating_file.addFilter(Filter())
-      logger.addHandler(rotating_file)
-    except Exception:
-      # May happen on cygwin. Do not crash.
-      logging.exception('Failed to open %s', filename)
-
-
-def set_console_level(level, root=None):
-  """Reset the console (stderr) logging level."""
-  handler = find_stderr(root)
-  if not handler:
-    # Better print insanity than crash.
-    print >> sys.stderr, 'OMG NO STDERR'
-    return
-  handler.setLevel(level)
-
-
-class OptionParserWithLogging(optparse.OptionParser):
-  """Adds --verbose option."""
-
-  # Set to True to enable --log-file options.
-  enable_log_file = True
-
-  # Set in unit tests.
-  logger_root = None
-
-  def __init__(self, verbose=0, log_file=None, **kwargs):
-    kwargs.setdefault('description', sys.modules['__main__'].__doc__)
-    optparse.OptionParser.__init__(self, **kwargs)
-    self.group_logging = optparse.OptionGroup(self, 'Logging')
-    self.group_logging.add_option(
-        '-v', '--verbose',
-        action='count',
-        default=verbose,
-        help='Use multiple times to increase verbosity')
-    if self.enable_log_file:
-      self.group_logging.add_option(
-          '-l', '--log-file',
-          default=log_file,
-          help='The name of the file to store rotating log details')
-      self.group_logging.add_option(
-          '--no-log', action='store_const', const='', dest='log_file',
-          help='Disable log file')
-
-  def parse_args(self, *args, **kwargs):
-    # Make sure this group is always the last one.
-    self.add_option_group(self.group_logging)
-
-    options, args = optparse.OptionParser.parse_args(self, *args, **kwargs)
-    prepare_logging(self.enable_log_file and options.log_file, self.logger_root)
-    set_console_level(
-        LEVELS[min(len(LEVELS) - 1, options.verbose)], self.logger_root)
-    return options, args
-
-
-class ArgumentParserWithLogging(argparse.ArgumentParser):
-  """Adds --verbose option."""
-
-  # Set to True to enable --log-file options.
-  enable_log_file = True
-
-  def __init__(self, verbose=0, log_file=None, **kwargs):
-    kwargs.setdefault('description', sys.modules['__main__'].__doc__)
-    kwargs.setdefault('conflict_handler', 'resolve')
-    self.__verbose = verbose
-    self.__log_file = log_file
-    super(ArgumentParserWithLogging, self).__init__(**kwargs)
-
-  def _add_logging_group(self):
-    group = self.add_argument_group('Logging')
-    group.add_argument(
-        '-v', '--verbose',
-        action='count',
-        default=self.__verbose,
-        help='Use multiple times to increase verbosity')
-    if self.enable_log_file:
-      group.add_argument(
-          '-l', '--log-file',
-          default=self.__log_file,
-          help='The name of the file to store rotating log details')
-      group.add_argument(
-          '--no-log', action='store_const', const='', dest='log_file',
-          help='Disable log file')
-
-  def parse_args(self, *args, **kwargs):
-    # Make sure this group is always the last one.
-    self._add_logging_group()
-
-    args = super(ArgumentParserWithLogging, self).parse_args(*args, **kwargs)
-    prepare_logging(self.enable_log_file and args.log_file, self.logger_root)
-    set_console_level(
-        LEVELS[min(len(LEVELS) - 1, args.verbose)], self.logger_root)
-    return args
diff --git a/tools/swarming_client/utils/lru.py b/tools/swarming_client/utils/lru.py
deleted file mode 100644
index d4ef5ef..0000000
--- a/tools/swarming_client/utils/lru.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Defines a dictionary that can evict least recently used items."""
-
-import collections
-import json
-
-
-class LRUDict(object):
-  """Dictionary that can evict least recently used items.
-
-  Implemented as a wrapper around OrderedDict object. An OrderedDict stores
-  (key, value) pairs in order they are inserted and can effectively pop oldest
-  items.
-
-  Can also store its state as *.json file on disk.
-  """
-
-  def __init__(self):
-    # Ordered key -> value mapping, newest items at the bottom.
-    self._items = collections.OrderedDict()
-    # True if was modified after loading.
-    self._dirty = True
-
-  def __nonzero__(self):
-    """False if dict is empty."""
-    return bool(self._items)
-
-  def __iter__(self):
-    """Iterate over the keys."""
-    return self._items.__iter__()
-
-  def __len__(self):
-    """Number of items in the dict."""
-    return len(self._items)
-
-  def __contains__(self, key):
-    """True if |key| is in the dict."""
-    return key in self._items
-
-  def __getitem__(self, key):
-    """Returns value for |key| or raises KeyError if not found."""
-    return self._items[key]
-
-  @classmethod
-  def load(cls, state_file):
-    """Loads previously saved state and returns LRUDict in that state.
-
-    Raises ValueError if state file is corrupted.
-    """
-    try:
-      state = json.load(open(state_file, 'r'))
-    except (IOError, ValueError) as e:
-      raise ValueError('Broken state file %s: %s' % (state_file, e))
-
-    if not isinstance(state, list):
-      raise ValueError(
-          'Broken state file %s, should be json list' % (state_file,))
-
-    # Items are stored oldest to newest. Put them back in the same order.
-    lru = cls()
-    for pair in state:
-      if not isinstance(pair, (list, tuple)) or len(pair) != 2:
-        raise ValueError(
-           'Broken state file %s, expecting pairs: %s' % (state_file, pair))
-      lru.add(pair[0], pair[1])
-
-    # Check for duplicate keys.
-    if len(lru) != len(state):
-      raise ValueError(
-          'Broken state file %s, found duplicate keys' % (state_file,))
-
-    # Now state from the file corresponds to state in the memory.
-    lru._dirty = False
-    return lru
-
-  def save(self, state_file):
-    """Saves cache state to a file if it was modified."""
-    if not self._dirty:
-      return False
-
-    with open(state_file, 'wb') as f:
-      json.dump(self._items.items(), f, separators=(',',':'))
-
-    self._dirty = False
-    return True
-
-  def add(self, key, value):
-    """Adds or replaces a |value| for |key|, marks it as most recently used."""
-    self._items.pop(key, None)
-    self._items[key] = value
-    self._dirty = True
-
-  def batch_insert_oldest(self, items):
-    """Prepends list of |items| to the dict, marks them as least recently used.
-
-    |items| is a list of (key, value) pairs to add.
-
-    It's a very slow operation that completely rebuilds the dictionary.
-    """
-    new_items = collections.OrderedDict()
-
-    # Insert |items| first, so they became oldest.
-    for key, value in items:
-      new_items[key] = value
-
-    # Insert the rest, be careful not to override keys from |items|.
-    for key, value in self._items.iteritems():
-      if key not in new_items:
-        new_items[key] = value
-
-    self._items = new_items
-    self._dirty = True
-
-  def keys_set(self):
-    """Set of keys of items in this dict."""
-    return set(self._items)
-
-  def get(self, key, default=None):
-    """Returns value for |key| or |default| if not found."""
-    return self._items.get(key, default)
-
-  def touch(self, key):
-    """Marks |key| as most recently used.
-
-    Raises KeyError if |key| is not in the dict.
-    """
-    self._items[key] = self._items.pop(key)
-    self._dirty = True
-
-  def pop(self, key):
-    """Removes item from the dict, returns its value.
-
-    Raises KeyError if |key| is not in the dict.
-    """
-    value = self._items.pop(key)
-    self._dirty = True
-    return value
-
-  def get_oldest(self):
-    """Returns oldest item as tuple (key, value).
-
-    Raises KeyError if dict is empty.
-    """
-    for i in self._items.iteritems():
-      return i
-    raise KeyError('dictionary is empty')
-
-  def pop_oldest(self):
-    """Removes oldest item from the dict and returns it as tuple (key, value).
-
-    Raises KeyError if dict is empty.
-    """
-    pair = self._items.popitem(last=False)
-    self._dirty = True
-    return pair
-
-  def itervalues(self):
-    """Iterator over stored values in arbitrary order."""
-    return self._items.itervalues()
diff --git a/tools/swarming_client/utils/net.py b/tools/swarming_client/utils/net.py
deleted file mode 100644
index 11bb276..0000000
--- a/tools/swarming_client/utils/net.py
+++ /dev/null
@@ -1,869 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Classes and functions for generic network communication over HTTP."""
-
-import cookielib
-import cStringIO as StringIO
-import datetime
-import httplib
-import itertools
-import json
-import logging
-import math
-import os
-import random
-import re
-import socket
-import ssl
-import threading
-import time
-import urllib
-import urlparse
-
-from third_party import requests
-from third_party.requests import adapters
-from third_party.requests import structures
-
-from utils import oauth
-from utils import tools
-
-
-# TODO(vadimsh): Remove this once we don't have to support python 2.6 anymore.
-def monkey_patch_httplib():
-  """Patch httplib.HTTPConnection to have '_tunnel_host' attribute.
-
-  'requests' library (>= v2) accesses 'HTTPConnection._tunnel_host' attribute
-  added only in python 2.6.3. This function patches HTTPConnection to have it
-  on python 2.6.2 as well.
-  """
-  conn = httplib.HTTPConnection('example.com')
-  if not hasattr(conn, '_tunnel_host'):
-    httplib.HTTPConnection._tunnel_host = None
-monkey_patch_httplib()
-
-
-# Default maximum number of attempts to trying opening a url before aborting.
-URL_OPEN_MAX_ATTEMPTS = 30
-
-# Default timeout when retrying.
-URL_OPEN_TIMEOUT = 6*60.
-
-# Default timeout when reading from open HTTP connection.
-URL_READ_TIMEOUT = 60
-
-# Content type for url encoded POST body.
-URL_ENCODED_FORM_CONTENT_TYPE = 'application/x-www-form-urlencoded'
-# Content type for JSON body.
-JSON_CONTENT_TYPE = 'application/json; charset=UTF-8'
-# Default content type for POST body.
-DEFAULT_CONTENT_TYPE = URL_ENCODED_FORM_CONTENT_TYPE
-
-# Content type -> function that encodes a request body.
-CONTENT_ENCODERS = {
-  URL_ENCODED_FORM_CONTENT_TYPE:
-    urllib.urlencode,
-  JSON_CONTENT_TYPE:
-    lambda x: json.dumps(x, sort_keys=True, separators=(',', ':')),
-}
-
-
-# Google Storage URL regular expression.
-GS_STORAGE_HOST_URL_RE = re.compile(r'https://.*\.storage\.googleapis\.com')
-
-# Global (for now) map: server URL (http://example.com) -> HttpService instance.
-# Used by get_http_service to cache HttpService instances.
-_http_services = {}
-_http_services_lock = threading.Lock()
-
-# This lock ensures that user won't be confused with multiple concurrent
-# login prompts.
-_auth_lock = threading.Lock()
-
-# Set in 'set_oauth_config'. If 'set_oauth_config' is not called before the
-# first request, will be set to oauth.make_oauth_config().
-_auth_config = None
-
-# A class to use to send HTTP requests. Can be changed by 'set_engine_class'.
-# Default is RequestsLibEngine.
-_request_engine_cls = None
-
-
-class NetError(IOError):
-  """Generic network related error."""
-
-  def __init__(self, inner_exc=None):
-    super(NetError, self).__init__(str(inner_exc or self.__doc__))
-    self.inner_exc = inner_exc
-    self.verbose_info = None
-
-
-class TimeoutError(NetError):
-  """Timeout while reading HTTP response."""
-
-
-class ConnectionError(NetError):
-  """Failed to connect to the server."""
-
-
-class HttpError(NetError):
-  """Server returned HTTP error code."""
-
-  def __init__(self, code, content_type, inner_exc):
-    super(HttpError, self).__init__(inner_exc)
-    self.code = code
-    self.content_type = content_type
-
-
-def set_engine_class(engine_cls):
-  """Globally changes a class to use to execute HTTP requests.
-
-  Default engine is RequestsLibEngine that uses 'requests' library. Changing the
-  engine on the fly is not supported. It must be set before the first request.
-
-  Custom engine class should support same public interface as RequestsLibEngine.
-  """
-  global _request_engine_cls
-  assert _request_engine_cls is None
-  _request_engine_cls = engine_cls
-
-
-def get_engine_class():
-  """Returns a class to use to execute HTTP requests."""
-  return _request_engine_cls or RequestsLibEngine
-
-
-def url_open(url, **kwargs):  # pylint: disable=W0621
-  """Attempts to open the given url multiple times.
-
-  |data| can be either:
-    - None for a GET request
-    - str for pre-encoded data
-    - list for data to be encoded
-    - dict for data to be encoded
-
-  See HttpService.request for a full list of arguments.
-
-  Returns HttpResponse object, where the response may be read from, or None
-  if it was unable to connect.
-  """
-  urlhost, urlpath = split_server_request_url(url)
-  service = get_http_service(urlhost)
-  return service.request(urlpath, **kwargs)
-
-
-def url_read(url, **kwargs):
-  """Attempts to open the given url multiple times and read all data from it.
-
-  Accepts same arguments as url_open function.
-
-  Returns all data read or None if it was unable to connect or read the data.
-  """
-  response = url_open(url, stream=False, **kwargs)
-  if not response:
-    return None
-  try:
-    return response.read()
-  except TimeoutError:
-    return None
-
-
-def url_read_json(url, **kwargs):
-  """Attempts to open the given url multiple times and read all data from it.
-
-  Accepts same arguments as url_open function.
-
-  Returns all data read or None if it was unable to connect or read the data.
-  """
-  urlhost, urlpath = split_server_request_url(url)
-  service = get_http_service(urlhost)
-  try:
-    return service.json_request(urlpath, **kwargs)
-  except TimeoutError:
-    return None
-
-
-def url_retrieve(filepath, url, **kwargs):
-  """Downloads an URL to a file. Returns True on success."""
-  response = url_open(url, stream=False, **kwargs)
-  if not response:
-    return False
-  try:
-    with open(filepath, 'wb') as f:
-      for buf in response.iter_content(65536):
-        f.write(buf)
-    return True
-  except (IOError, OSError, TimeoutError):
-    try:
-      os.remove(filepath)
-    except IOError:
-      pass
-    return False
-
-
-def split_server_request_url(url):
-  """Splits the url into scheme+netloc and path+params+query+fragment."""
-  url_parts = list(urlparse.urlparse(url))
-  urlhost = '%s://%s' % (url_parts[0], url_parts[1])
-  urlpath = urlparse.urlunparse(['', ''] + url_parts[2:])
-  return urlhost, urlpath
-
-
-def fix_url(url):
-  """Fixes an url to https."""
-  parts = urlparse.urlparse(url, 'https')
-  if parts.query:
-    raise ValueError('doesn\'t support query parameter.')
-  if parts.fragment:
-    raise ValueError('doesn\'t support fragment in the url.')
-  # urlparse('foo.com') will result in netloc='', path='foo.com', which is not
-  # what is desired here.
-  new = list(parts)
-  if not new[1] and new[2]:
-    new[1] = new[2].rstrip('/')
-    new[2] = ''
-  new[2] = new[2].rstrip('/')
-  return urlparse.urlunparse(new)
-
-
-def get_http_service(urlhost, allow_cached=True):
-  """Returns existing or creates new instance of HttpService that can send
-  requests to given base urlhost.
-  """
-  def new_service():
-    # Create separate authenticator only if engine is not providing
-    # authentication already. Also we use signed URLs for Google Storage, no
-    # need for special authentication.
-    authenticator = None
-    engine_cls = get_engine_class()
-    is_gs = GS_STORAGE_HOST_URL_RE.match(urlhost)
-    conf = get_oauth_config()
-    if not engine_cls.provides_auth and not is_gs and not conf.disabled:
-      authenticator = OAuthAuthenticator(urlhost, conf)
-    return HttpService(
-        urlhost,
-        engine=engine_cls(),
-        authenticator=authenticator)
-
-  # Ensure consistency in url naming.
-  urlhost = str(urlhost).lower().rstrip('/')
-
-  if not allow_cached:
-    return new_service()
-  with _http_services_lock:
-    service = _http_services.get(urlhost)
-    if not service:
-      service = new_service()
-      _http_services[urlhost] = service
-    return service
-
-
-def set_oauth_config(config):
-  """Defines what OAuth configuration to use for authentication.
-
-  If request engine (see get_engine_class) provides authentication already (as
-  indicated by its 'provides_auth=True' class property) this setting is ignored.
-
-  Arguments:
-    config: oauth.OAuthConfig instance.
-  """
-  global _auth_config
-  _auth_config = config
-
-
-def get_oauth_config():
-  """Returns global OAuthConfig as set by 'set_oauth_config' or default one."""
-  return _auth_config or oauth.make_oauth_config()
-
-
-def get_case_insensitive_dict(original):
-  """Given a dict with string keys returns new CaseInsensitiveDict.
-
-  Raises ValueError if there are duplicate keys.
-  """
-  normalized = structures.CaseInsensitiveDict(original or {})
-  if len(normalized) != len(original):
-    raise ValueError('Duplicate keys in: %s' % repr(original))
-  return normalized
-
-
-class HttpService(object):
-  """Base class for a class that provides an API to HTTP based service:
-    - Provides 'request' method.
-    - Supports automatic request retries.
-    - Thread safe.
-  """
-
-  def __init__(self, urlhost, engine, authenticator=None):
-    self.urlhost = urlhost
-    self.engine = engine
-    self.authenticator = authenticator
-
-  @staticmethod
-  def is_transient_http_error(code, retry_404, retry_50x, suburl, content_type):
-    """Returns True if given HTTP response code is a transient error."""
-    # Google Storage can return this and it should be retried.
-    if code == 408:
-      return True
-    if code == 404:
-      # Retry 404 if allowed by the caller.
-      if retry_404:
-        return retry_404
-      # Transparently retry 404 IIF it is a CloudEndpoints API call *and* the
-      # result is not JSON. This assumes that we only use JSON encoding.
-      return (
-          suburl.startswith('/_ah/api/') and
-          not content_type.startswith('application/json'))
-    # All other 4** errors are fatal.
-    if code < 500:
-      return False
-    # Retry >= 500 error only if allowed by the caller.
-    return retry_50x
-
-  @staticmethod
-  def encode_request_body(body, content_type):
-    """Returns request body encoded according to its content type."""
-    # No body or it is already encoded.
-    if body is None or isinstance(body, str):
-      return body
-    # Any body should have content type set.
-    assert content_type, 'Request has body, but no content type'
-    encoder = CONTENT_ENCODERS.get(content_type)
-    assert encoder, ('Unknown content type %s' % content_type)
-    return encoder(body)
-
-  def login(self, allow_user_interaction):
-    """Runs authentication flow to refresh short lived access token.
-
-    Authentication flow may need to interact with the user (read username from
-    stdin, open local browser for OAuth2, etc.). If interaction is required and
-    |allow_user_interaction| is False, the login will silently be considered
-    failed (i.e. this function returns False).
-
-    'request' method always uses non-interactive login, so long-lived
-    authentication tokens (OAuth2 refresh token, etc) have to be set up
-    manually by developer (by calling 'auth.py login' perhaps) prior running
-    any swarming or isolate scripts.
-    """
-    # Use global lock to ensure two authentication flows never run in parallel.
-    with _auth_lock:
-      if self.authenticator:
-        return self.authenticator.login(allow_user_interaction)
-      return False
-
-  def logout(self):
-    """Purges access credentials from local cache."""
-    if self.authenticator:
-      self.authenticator.logout()
-
-  def request(
-      self,
-      urlpath,
-      data=None,
-      content_type=None,
-      max_attempts=URL_OPEN_MAX_ATTEMPTS,
-      retry_404=False,
-      retry_50x=True,
-      timeout=URL_OPEN_TIMEOUT,
-      read_timeout=URL_READ_TIMEOUT,
-      stream=True,
-      method=None,
-      headers=None,
-      follow_redirects=True):
-    """Attempts to open the given url multiple times.
-
-    |urlpath| is relative to the server root, i.e. '/some/request?param=1'.
-
-    |data| can be either:
-      - None for a GET request
-      - str for pre-encoded data
-      - list for data to be form-encoded
-      - dict for data to be form-encoded
-
-    - Optionally retries HTTP 404 and 50x.
-    - Retries up to |max_attempts| times. If None or 0, there's no limit in the
-      number of retries.
-    - Retries up to |timeout| duration in seconds. If None or 0, there's no
-      limit in the time taken to do retries.
-    - If both |max_attempts| and |timeout| are None or 0, this functions retries
-      indefinitely.
-
-    If |method| is given it can be 'DELETE', 'GET', 'POST' or 'PUT' and it will
-    be used when performing the request. By default it's GET if |data| is None
-    and POST if |data| is not None.
-
-    If |headers| is given, it should be a dict with HTTP headers to append
-    to request. Caller is responsible for providing headers that make sense.
-
-    If |follow_redirects| is True, will transparently follow HTTP redirects,
-    otherwise redirect response will be returned as is. It can be recognized
-    by the presence of 'Location' response header.
-
-    If |read_timeout| is not None will configure underlying socket to
-    raise TimeoutError exception whenever there's no response from the server
-    for more than |read_timeout| seconds. It can happen during any read
-    operation so once you pass non-None |read_timeout| be prepared to handle
-    these exceptions in subsequent reads from the stream.
-
-    Returns a file-like object, where the response may be read from, or None
-    if it was unable to connect. If |stream| is False will read whole response
-    into memory buffer before returning file-like object that reads from this
-    memory buffer.
-    """
-    assert urlpath and urlpath[0] == '/', urlpath
-
-    if data is not None:
-      assert method in (None, 'DELETE', 'POST', 'PUT')
-      method = method or 'POST'
-      content_type = content_type or DEFAULT_CONTENT_TYPE
-      body = self.encode_request_body(data, content_type)
-    else:
-      assert method in (None, 'DELETE', 'GET')
-      method = method or 'GET'
-      body = None
-      assert not content_type, 'Can\'t use content_type on %s' % method
-
-    # Prepare request info.
-    parsed = urlparse.urlparse('/' + urlpath.lstrip('/'))
-    resource_url = urlparse.urljoin(self.urlhost, parsed.path)
-    query_params = urlparse.parse_qsl(parsed.query)
-
-    # Prepare headers.
-    headers = get_case_insensitive_dict(headers or {})
-    if body is not None:
-      headers['Content-Length'] = len(body)
-      if content_type:
-        headers['Content-Type'] = content_type
-
-    last_error = None
-    auth_attempted = False
-
-    for attempt in retry_loop(max_attempts, timeout):
-      # Log non-first attempt.
-      if attempt.attempt:
-        logging.warning(
-            'Retrying request %s, attempt %d/%d...',
-            resource_url, attempt.attempt, max_attempts)
-
-      try:
-        # Prepare and send a new request.
-        request = HttpRequest(
-            method, resource_url, query_params, body,
-            headers, read_timeout, stream, follow_redirects)
-        if self.authenticator:
-          self.authenticator.authorize(request)
-        response = self.engine.perform_request(request)
-        response._timeout_exc_classes = self.engine.timeout_exception_classes()
-        logging.debug('Request %s succeeded', request.get_full_url())
-        return response
-
-      except (ConnectionError, TimeoutError) as e:
-        last_error = e
-        logging.warning(
-            'Unable to open url %s on attempt %d.\n%s',
-            request.get_full_url(), attempt.attempt, self._format_error(e))
-        continue
-
-      except HttpError as e:
-        last_error = e
-
-        # Access denied -> authenticate.
-        if e.code in (401, 403):
-          logging.warning(
-              'Authentication is required for %s on attempt %d.\n%s',
-              request.get_full_url(), attempt.attempt, self._format_error(e))
-          # Try forcefully refresh the token. If it doesn't help, then server
-          # does not support authentication or user doesn't have required
-          # access.
-          if not auth_attempted:
-            auth_attempted = True
-            if self.login(allow_user_interaction=False):
-              # Success! Run request again immediately.
-              attempt.skip_sleep = True
-              continue
-          # Authentication attempt was unsuccessful.
-          logging.error(
-              'Unable to authenticate to %s (%s).',
-              self.urlhost, self._format_error(e))
-          if self.authenticator:
-            logging.error(
-                'Use auth.py to login: python auth.py login --service=%s',
-                self.urlhost)
-          return None
-
-        # Hit a error that can not be retried -> stop retry loop.
-        if not self.is_transient_http_error(
-            e.code, retry_404, retry_50x, parsed.path, e.content_type):
-          # This HttpError means we reached the server and there was a problem
-          # with the request, so don't retry.
-          logging.warning(
-              'Able to connect to %s but an exception was thrown.\n%s',
-              request.get_full_url(), self._format_error(e, verbose=True))
-          return None
-
-        # Retry all other errors.
-        logging.warning(
-            'Server responded with error on %s on attempt %d.\n%s',
-            request.get_full_url(), attempt.attempt, self._format_error(e))
-        continue
-
-    logging.error(
-        'Unable to open given url, %s, after %d attempts.\n%s',
-        request.get_full_url(), max_attempts,
-        self._format_error(last_error, verbose=True))
-    return None
-
-  def json_request(self, urlpath, data=None, **kwargs):
-    """Sends JSON request to the server and parses JSON response it get back.
-
-    Arguments:
-      urlpath: relative request path (e.g. '/auth/v1/...').
-      data: object to serialize to JSON and sent in the request.
-
-    See self.request() for more details.
-
-    Returns:
-      Deserialized JSON response on success, None on error or timeout.
-    """
-    content_type = JSON_CONTENT_TYPE if data is not None else None
-    response = self.request(
-        urlpath, content_type=content_type, data=data, stream=False, **kwargs)
-    if not response:
-      return None
-    try:
-      text = response.read()
-      if not text:
-        return None
-    except TimeoutError:
-      return None
-    try:
-      return json.loads(text)
-    except ValueError as e:
-      logging.error('Not a JSON response when calling %s: %s; full text: %s',
-                    urlpath, e, text)
-      return None
-
-  def _format_error(self, exc, verbose=False):
-    """Returns readable description of a NetError."""
-    if not isinstance(exc, NetError):
-      return str(exc)
-    if not verbose:
-      return str(exc.inner_exc or exc)
-    # Avoid making multiple calls to parse_request_exception since they may
-    # have side effects on the exception, e.g. urllib2 based exceptions are in
-    # fact file-like objects that can not be read twice.
-    if exc.verbose_info is None:
-      out = [str(exc.inner_exc or exc)]
-      headers, body = self.engine.parse_request_exception(exc.inner_exc)
-      if headers or body:
-        out.append('----------')
-        if headers:
-          for header, value in headers:
-            if not header.startswith('x-'):
-              out.append('%s: %s' % (header.capitalize(), value))
-          out.append('')
-        out.append(body or '<empty body>')
-        out.append('----------')
-      exc.verbose_info = '\n'.join(out)
-    return exc.verbose_info
-
-
-class HttpRequest(object):
-  """Request to HttpService."""
-
-  def __init__(
-      self, method, url, params, body,
-      headers, timeout, stream, follow_redirects):
-    """Arguments:
-      |method| - HTTP method to use
-      |url| - relative URL to the resource, without query parameters
-      |params| - list of (key, value) pairs to put into GET parameters
-      |body| - encoded body of the request (None or str)
-      |headers| - dict with request headers
-      |timeout| - socket read timeout (None to disable)
-      |stream| - True to stream response from socket
-      |follow_redirects| - True to follow HTTP redirects.
-    """
-    self.method = method
-    self.url = url
-    self.params = params[:]
-    self.body = body
-    self.headers = headers.copy()
-    self.timeout = timeout
-    self.stream = stream
-    self.follow_redirects = follow_redirects
-    self._cookies = None
-
-  @property
-  def cookies(self):
-    """CookieJar object that will be used for cookies in this request."""
-    if self._cookies is None:
-      self._cookies = cookielib.CookieJar()
-    return self._cookies
-
-  def get_full_url(self):
-    """Resource URL with url-encoded GET parameters."""
-    if not self.params:
-      return self.url
-    else:
-      return '%s?%s' % (self.url, urllib.urlencode(self.params))
-
-
-class HttpResponse(object):
-  """Response from HttpService."""
-
-  def __init__(self, response, url, headers):
-    self._response = response
-    self._url = url
-    self._headers = get_case_insensitive_dict(headers)
-    self._timeout_exc_classes = ()
-
-  def iter_content(self, chunk_size):
-    assert all(issubclass(e, Exception) for e in self._timeout_exc_classes)
-    try:
-      read = 0
-      if hasattr(self._response, 'iter_content'):
-        # request.Response.
-        for buf in self._response.iter_content(chunk_size):
-          read += len(buf)
-          yield buf
-      else:
-        # File-like object.
-        while True:
-          buf = self._response.read(chunk_size)
-          if not buf:
-            break
-          read += len(buf)
-          yield buf
-    except self._timeout_exc_classes as e:
-      logging.error('Timeout while reading from %s, read %d of %s: %s',
-          self._url, read, self.get_header('Content-Length'), e)
-      raise TimeoutError(e)
-
-  def read(self):
-    assert all(issubclass(e, Exception) for e in self._timeout_exc_classes)
-    try:
-      if hasattr(self._response, 'content'):
-        # request.Response.
-        return self._response.content
-      # File-like object.
-      return self._response.read()
-    except self._timeout_exc_classes as e:
-      logging.error('Timeout while reading from %s, expected %s bytes: %s',
-          self._url, self.get_header('Content-Length'), e)
-      raise TimeoutError(e)
-
-  def get_header(self, header):
-    """Returns response header (as str) or None if no such header."""
-    return self._headers.get(header)
-
-
-class Authenticator(object):
-  """Base class for objects that know how to authenticate into http services."""
-
-  def authorize(self, request):
-    """Add authentication information to the request."""
-
-  def login(self, allow_user_interaction):
-    """Run interactive authentication flow refreshing the token."""
-    raise NotImplementedError()
-
-  def logout(self):
-    """Purges access credentials from local cache."""
-
-
-class RequestsLibEngine(object):
-  """Class that knows how to execute HttpRequests via requests library."""
-
-  # This engine doesn't know how to authenticate requests on transport level.
-  provides_auth = False
-
-  @classmethod
-  def parse_request_exception(cls, exc):
-    """Extracts HTTP headers and body from inner exceptions put in HttpError."""
-    if isinstance(exc, requests.HTTPError):
-      return exc.response.headers.items(), exc.response.content
-    return None, None
-
-  @classmethod
-  def timeout_exception_classes(cls):
-    """A tuple of exception classes that represent timeout.
-
-    Will be caught while reading a streaming response in HttpResponse.read and
-    transformed to TimeoutError.
-    """
-    return (
-        socket.timeout, ssl.SSLError,
-        requests.Timeout,
-        requests.ConnectionError,
-        requests.packages.urllib3.exceptions.ProtocolError,
-        requests.packages.urllib3.exceptions.TimeoutError)
-
-  def __init__(self):
-    super(RequestsLibEngine, self).__init__()
-    self.session = requests.Session()
-    # Configure session.
-    self.session.trust_env = False
-    self.session.verify = tools.get_cacerts_bundle()
-    # Configure connection pools.
-    for protocol in ('https://', 'http://'):
-      self.session.mount(protocol, adapters.HTTPAdapter(
-          pool_connections=64,
-          pool_maxsize=64,
-          max_retries=0,
-          pool_block=False))
-
-  def perform_request(self, request):
-    """Sends a HttpRequest to the server and reads back the response.
-
-    Returns HttpResponse.
-
-    Raises:
-      ConnectionError - failed to establish connection to the server.
-      TimeoutError - timeout while connecting or reading response.
-      HttpError - server responded with >= 400 error code.
-    """
-    try:
-      # response is a requests.models.Response.
-      response = self.session.request(
-          method=request.method,
-          url=request.url,
-          params=request.params,
-          data=request.body,
-          headers=request.headers,
-          cookies=request.cookies,
-          timeout=request.timeout,
-          stream=request.stream,
-          allow_redirects=request.follow_redirects)
-      response.raise_for_status()
-      return HttpResponse(response, request.get_full_url(), response.headers)
-    except requests.Timeout as e:
-      raise TimeoutError(e)
-    except requests.HTTPError as e:
-      raise HttpError(
-          e.response.status_code, e.response.headers.get('Content-Type'), e)
-    except (requests.ConnectionError, socket.timeout, ssl.SSLError) as e:
-      raise ConnectionError(e)
-
-
-class OAuthAuthenticator(Authenticator):
-  """Uses OAuth Authorization header to authenticate requests."""
-
-  def __init__(self, urlhost, config):
-    super(OAuthAuthenticator, self).__init__()
-    assert isinstance(config, oauth.OAuthConfig)
-    self.urlhost = urlhost
-    self.config = config
-    self._lock = threading.Lock()
-    self._access_token = None
-
-  def authorize(self, request):
-    with self._lock:
-      # Load from cache on a first access.
-      if not self._access_token:
-        self._access_token = oauth.load_access_token(self.urlhost, self.config)
-      # Refresh if expired.
-      need_refresh = True
-      if self._access_token:
-        if self._access_token.expires_at is not None:
-          # Allow 5 min of clock skew.
-          now = datetime.datetime.utcnow() + datetime.timedelta(seconds=300)
-          need_refresh = now >= self._access_token.expires_at
-        else:
-          # Token without expiration time never expired.
-          need_refresh = False
-      if need_refresh:
-        self._access_token = oauth.create_access_token(
-            self.urlhost, self.config, False)
-      if self._access_token:
-        request.headers['Authorization'] = (
-            'Bearer %s' % self._access_token.token)
-
-  def login(self, allow_user_interaction):
-    with self._lock:
-      # Forcefully refresh the token.
-      self._access_token = oauth.create_access_token(
-          self.urlhost, self.config, allow_user_interaction)
-      return self._access_token is not None
-
-  def logout(self):
-    with self._lock:
-      self._access_token = None
-      oauth.purge_access_token(self.urlhost, self.config)
-
-
-class RetryAttempt(object):
-  """Contains information about current retry attempt.
-
-  Yielded from retry_loop.
-  """
-
-  def __init__(self, attempt, remaining):
-    """Information about current attempt in retry loop:
-      |attempt| - zero based index of attempt.
-      |remaining| - how much time is left before retry loop finishes retries.
-    """
-    self.attempt = attempt
-    self.remaining = remaining
-    self.skip_sleep = False
-
-
-def calculate_sleep_before_retry(attempt, max_duration):
-  """How long to sleep before retrying an attempt in retry_loop."""
-  # Maximum sleeping time. We're hammering a cloud-distributed service, it'll
-  # survive.
-  MAX_SLEEP = 10.
-  # random.random() returns [0.0, 1.0). Starts with relatively short waiting
-  # time by starting with 1.5/2+1.5^-1 median offset.
-  duration = (random.random() * 1.5) + math.pow(1.5, (attempt - 1))
-  assert duration > 0.1
-  duration = min(MAX_SLEEP, duration)
-  if max_duration:
-    duration = min(max_duration, duration)
-  return duration
-
-
-def sleep_before_retry(attempt, max_duration):
-  """Sleeps for some amount of time when retrying the attempt in retry_loop.
-
-  To be mocked in tests.
-  """
-  time.sleep(calculate_sleep_before_retry(attempt, max_duration))
-
-
-def current_time():
-  """Used by retry loop to get current time.
-
-  To be mocked in tests.
-  """
-  return time.time()
-
-
-def retry_loop(max_attempts=None, timeout=None):
-  """Yields whenever new attempt to perform some action is needed.
-
-  Yields instances of RetryAttempt class that contains information about current
-  attempt. Setting |skip_sleep| attribute of RetryAttempt to True will cause
-  retry loop to run next attempt immediately.
-  """
-  start = current_time()
-  for attempt in itertools.count():
-    # Too many attempts?
-    if max_attempts and attempt == max_attempts:
-      break
-    # Retried for too long?
-    remaining = (timeout - (current_time() - start)) if timeout else None
-    if remaining is not None and remaining < 0:
-      break
-    # Kick next iteration.
-    attemp_obj = RetryAttempt(attempt, remaining)
-    yield attemp_obj
-    if attemp_obj.skip_sleep:
-      continue
-    # Only sleep if we are going to try again.
-    if max_attempts and attempt != max_attempts - 1:
-      remaining = (timeout - (current_time() - start)) if timeout else None
-      if remaining is not None and remaining < 0:
-        break
-      sleep_before_retry(attempt, remaining)
diff --git a/tools/swarming_client/utils/oauth.py b/tools/swarming_client/utils/oauth.py
deleted file mode 100644
index 8e10da1..0000000
--- a/tools/swarming_client/utils/oauth.py
+++ /dev/null
@@ -1,602 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""OAuth2 related utilities and implementation of browser based login flow."""
-
-# pylint: disable=W0613
-
-import base64
-import BaseHTTPServer
-import collections
-import datetime
-import json
-import logging
-import optparse
-import os
-import socket
-import sys
-import threading
-import time
-import urllib
-import urlparse
-import webbrowser
-
-# All libraries here expect to find themselves in sys.path.
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(
-    __file__.decode(sys.getfilesystemencoding()))))
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party'))
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party', 'pyasn1'))
-sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party', 'rsa'))
-
-import httplib2
-import rsa
-
-from pyasn1.codec.der import decoder
-from pyasn1.type import univ
-
-from oauth2client import client
-from oauth2client import multistore_file
-
-from third_party import requests
-from utils import tools
-
-
-# Path to a file with cached OAuth2 credentials used by default. Can be
-# overridden by command line option or env variable.
-DEFAULT_OAUTH_TOKENS_CACHE = os.path.join(
-    os.path.expanduser('~'), '.isolated_oauth')
-
-# List of space separated OAuth scopes for generated tokens. GAE apps usually
-# use userinfo.email scope for authentication.
-OAUTH_SCOPES = 'https://www.googleapis.com/auth/userinfo.email'
-
-# Endpoint to generate access tokens.
-OAUTH_TOKEN_ENDPOINT = 'https://www.googleapis.com/oauth2/v3/token'
-
-
-# OAuth authentication method configuration, used by utils/net.py.
-# See doc string for 'make_oauth_config' for meaning of fields.
-OAuthConfig = collections.namedtuple('OAuthConfig', [
-  'disabled',
-  'tokens_cache',
-  'no_local_webserver',
-  'webserver_port',
-  'service_account_json',
-])
-
-
-# Access token with its expiration time (UTC datetime, or None if not known).
-AccessToken = collections.namedtuple('AccessToken', [
-  'token',
-  'expires_at',
-])
-
-
-# Service account credentials as loaded from JSON file.
-ServiceAccountCredentials = collections.namedtuple('ServiceAccountCredentials',
-[
-  'client_email',
-  'client_id',
-  'private_key', # PEM encoded.
-  'private_key_id',
-])
-
-
-# Configuration fetched from a service, returned by _fetch_service_config.
-_ServiceConfig = collections.namedtuple('_ServiceConfig', [
-  'client_id',
-  'client_secret',
-  'primary_url',
-])
-
-# Process cache of _fetch_service_config results.
-_service_config_cache = {}
-_service_config_cache_lock = threading.Lock()
-
-
-class BadServiceAccountCredentials(Exception):
-  """Service account JSON is missing or not valid."""
-
-
-def make_oauth_config(
-    disabled=None,
-    tokens_cache=None,
-    no_local_webserver=None,
-    webserver_port=None,
-    service_account_json=None):
-  """Returns new instance of OAuthConfig.
-
-  If some config option is not provided or None, it will be set to a reasonable
-  default value. This function also acts as an authoritative place for default
-  values of corresponding command line options.
-
-  Args:
-    disabled: True to completely turn off OAuth authentication.
-    tokens_cache: path to a file with cached OAuth2 credentials.
-    no_local_webserver: if True, do not try to run local web server that
-        handles redirects. Use copy-pasted verification code instead.
-    webserver_port: port to run local webserver on.
-    service_account_json: path to JSON file with service account credentials.
-  """
-  if tokens_cache is None:
-    tokens_cache = os.environ.get(
-        'SWARMING_AUTH_TOKENS_CACHE', DEFAULT_OAUTH_TOKENS_CACHE)
-  if no_local_webserver is None:
-    no_local_webserver = tools.get_bool_env_var(
-        'SWARMING_AUTH_NO_LOCAL_WEBSERVER')
-  if webserver_port is None:
-    webserver_port = 8090
-  if service_account_json is None:
-    service_account_json = os.environ.get('SWARMING_AUTH_SERVICE_ACCOUNT_JSON')
-  if disabled is None:
-    disabled = tools.is_headless() and not service_account_json
-  return OAuthConfig(
-      disabled,
-      tokens_cache,
-      no_local_webserver,
-      webserver_port,
-      service_account_json)
-
-
-def add_oauth_options(parser):
-  """Appends OAuth related options to OptionParser."""
-  default_config = make_oauth_config()
-  parser.oauth_group = optparse.OptionGroup(parser, 'OAuth options')
-  parser.oauth_group.add_option(
-      '--auth-disabled',
-      type=int,
-      default=int(default_config.disabled),
-      help='Set to 1 to disable OAuth and rely only on IP whitelist for '
-           'authentication. Currently used from bots. [default: %default]')
-  parser.oauth_group.add_option(
-      '--auth-tokens-cache',
-      default=default_config.tokens_cache,
-      help='Path to a file with oauth2client tokens cache. It should be a safe '
-          'location accessible only to a current user: knowing content of this '
-          'file is roughly equivalent to knowing account password. Can also be '
-          'set with SWARMING_AUTH_TOKENS_CACHE environment variable. '
-          '[default: %default]')
-  parser.oauth_group.add_option(
-      '--auth-no-local-webserver',
-      action='store_true',
-      default=default_config.no_local_webserver,
-      help='Do not run a local web server when performing OAuth2 login flow. '
-          'Can also be set with SWARMING_AUTH_NO_LOCAL_WEBSERVER=1 '
-          'environment variable. [default: %default]')
-  parser.oauth_group.add_option(
-      '--auth-host-port',
-      type=int,
-      default=default_config.webserver_port,
-      help='Port a local web server should listen on. Used only if '
-          '--auth-no-local-webserver is not set. [default: %default]')
-  parser.oauth_group.add_option(
-      '--auth-service-account-json',
-      default=default_config.service_account_json,
-      help='Path to a JSON file with service account credentials to use. '
-          'Can be generated by "Generate new JSON key" button in "Credentials" '
-          'section of any Cloud Console project. The value can also be set '
-          'with SWARMING_AUTH_SERVICE_ACCOUNT_JSON environment variable. '
-          '[default: %default]')
-  parser.add_option_group(parser.oauth_group)
-
-
-def extract_oauth_config_from_options(options):
-  """Given OptionParser with oauth options, extracts OAuthConfig from it.
-
-  OptionParser should be populated with oauth options by 'add_oauth_options'.
-  """
-  # Validate service account JSON is correct by trying to load it.
-  try:
-    if options.auth_service_account_json:
-      acc = _load_service_account_json(options.auth_service_account_json)
-      _parse_private_key(acc.private_key)
-  except BadServiceAccountCredentials as exc:
-    raise ValueError('Bad service account credentials: %s' % exc)
-  return make_oauth_config(
-      disabled=(
-          bool(options.auth_disabled) and
-          not options.auth_service_account_json),
-      tokens_cache=options.auth_tokens_cache,
-      no_local_webserver=options.auth_no_local_webserver,
-      webserver_port=options.auth_host_port,
-      service_account_json=options.auth_service_account_json)
-
-
-def load_access_token(urlhost, config):
-  """Returns cached AccessToken if it is not expired yet."""
-  assert isinstance(config, OAuthConfig)
-  if config.disabled:
-    return None
-  auth_service_url = _fetch_auth_service_url(urlhost)
-  if not auth_service_url:
-    return None
-  storage = _get_storage(auth_service_url, config)
-  credentials = storage.get()
-  # Missing?
-  if not credentials or credentials.invalid:
-    return None
-  # Expired?
-  if not credentials.access_token or credentials.access_token_expired:
-    return None
-  return AccessToken(credentials.access_token, credentials.token_expiry)
-
-
-def create_access_token(urlhost, config, allow_user_interaction):
-  """Mints and caches new access_token, launching OAuth2 dance if necessary.
-
-  Args:
-    urlhost: base URL of a host to make OAuth2 token for.
-    config: OAuthConfig instance.
-    allow_user_interaction: if False, do not use interactive browser based
-        flow (return None instead if it is required).
-
-  Returns:
-    AccessToken on success.
-    None on error or if OAuth2 flow was interrupted.
-  """
-  assert isinstance(config, OAuthConfig)
-  if config.disabled:
-    return None
-  auth_service_url = _fetch_auth_service_url(urlhost)
-  if not auth_service_url:
-    return None
-
-  storage = _get_storage(auth_service_url, config)
-  credentials = None
-
-  if config.service_account_json:
-    # 2-legged flow that uses service account credentials.
-    try:
-      service_account = _load_service_account_json(config.service_account_json)
-    except BadServiceAccountCredentials as e:
-      logging.error('Bad service account credentials: %s', e)
-      return None
-
-    # Body of token refresh request (with JWT assertion signed with secret key).
-    body = urllib.urlencode({
-      'assertion': _make_assertion_jwt(service_account),
-      'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer',
-    })
-
-    # Exchange it for access_token.
-    http = httplib2.Http(ca_certs=tools.get_cacerts_bundle())
-    resp, content = http.request(
-        uri=OAUTH_TOKEN_ENDPOINT,
-        method='POST',
-        body=body,
-        headers={'Content-Type': 'application/x-www-form-urlencoded'})
-    if resp.status != 200:
-      logging.error(
-          'Failed to grab access token for service account: %r', content)
-      return None
-
-    try:
-      token = json.loads(content)
-      access_token = token['access_token']
-      expires_at = None
-      if 'expires_in' in token:
-        expires_at = datetime.datetime.utcnow()
-        expires_at += datetime.timedelta(seconds=int(token['expires_in']))
-    except (KeyError, ValueError) as e:
-      logging.error('Unexpected access token response format: %s', e)
-      return None
-
-    credentials = client.OAuth2Credentials(
-        access_token=access_token,
-        client_id=service_account.client_id,
-        client_secret=None,
-        refresh_token=None,
-        token_expiry=expires_at,
-        token_uri=None,
-        user_agent=None)
-  else:
-    # 3-legged flow with (perhaps cached) refresh token.
-    credentials = storage.get()
-    refreshed = False
-    if credentials and not credentials.invalid:
-      try:
-        credentials.refresh(httplib2.Http(ca_certs=tools.get_cacerts_bundle()))
-        refreshed = True
-      except client.Error as err:
-        logging.error('OAuth error: %s', err)
-
-    # Refresh token is missing or invalid, go through full flow.
-    if not refreshed:
-      if not allow_user_interaction:
-        return None
-      credentials = _run_oauth_dance(auth_service_url, config)
-      if not credentials:
-        return None
-
-  # Success.
-  logging.info('OAuth access_token refreshed. Expires in %s.',
-      credentials.token_expiry - datetime.datetime.utcnow())
-  credentials.set_store(storage)
-  storage.put(credentials)
-  return AccessToken(credentials.access_token, credentials.token_expiry)
-
-
-def purge_access_token(urlhost, config):
-  """Deletes OAuth tokens that can be used to access |urlhost|."""
-  assert isinstance(config, OAuthConfig)
-  auth_service_url = _fetch_auth_service_url(urlhost)
-  if auth_service_url:
-    _get_storage(auth_service_url, config).delete()
-
-
-def _get_storage(urlhost, config):
-  """Returns oauth2client.Storage with tokens to access |urlhost|."""
-  # Do not mix access_token caches for different service accounts.
-  if config.service_account_json:
-    creds = _load_service_account_json(config.service_account_json)
-    key = 'sa:%s:%s' % (creds.client_id, urlhost.rstrip('/'))
-  else:
-    key = urlhost.rstrip('/')
-  return multistore_file.get_credential_storage_custom_string_key(
-      config.tokens_cache, key)
-
-
-def _fetch_auth_service_url(urlhost):
-  """Fetches URL of a main authentication service used by |urlhost|.
-
-  Returns:
-    * If |urlhost| is using a authentication service, returns its URL.
-    * If |urlhost| is not using authentication servier, returns |urlhost|.
-    * If there was a error communicating with |urlhost|, returns None.
-  """
-  service_config = _fetch_service_config(urlhost)
-  if not service_config:
-    return None
-  url = (service_config.primary_url or urlhost).rstrip('/')
-  assert url.startswith(
-      ('https://', 'http://127.0.0.1:', 'http://localhost:')), url
-  return url
-
-
-def _fetch_service_config(urlhost):
-  """Fetches OAuth related configuration from a service.
-
-  The configuration includes OAuth client_id and client_secret, as well as
-  URL of a primary authentication service (or None if not used).
-
-  Returns:
-    Instance of _ServiceConfig on success, None on failure.
-  """
-  def do_fetch():
-    # client_secret is not really a secret in that case. So an attacker can
-    # impersonate service's identity in OAuth2 flow. But that's generally
-    # fine as long as a list of allowed redirect_uri's associated with client_id
-    # is limited to 'localhost' or 'urn:ietf:wg:oauth:2.0:oob'. In that case
-    # attacker needs some process running on user's machine to successfully
-    # complete the flow and grab access_token. When you have malicious code
-    # running on your machine you're screwed anyway.
-    response = requests.get(
-        '%s/auth/api/v1/server/oauth_config' % urlhost.rstrip('/'),
-        verify=tools.get_cacerts_bundle())
-    if response.status_code == 200:
-      try:
-        config = response.json()
-        if not isinstance(config, dict):
-          raise ValueError()
-        return _ServiceConfig(
-            config['client_id'],
-            config['client_not_so_secret'],
-            config.get('primary_url'))
-      except (KeyError, ValueError) as err:
-        logging.error('Invalid response from the service: %s', err)
-    else:
-      logging.warning(
-          'Error when fetching oauth_config, HTTP status code %d',
-          response.status_code)
-    return None
-
-  # Use local cache to avoid unnecessary network calls.
-  with _service_config_cache_lock:
-    if urlhost not in _service_config_cache:
-      config = do_fetch()
-      if config:
-        _service_config_cache[urlhost] = config
-    return _service_config_cache.get(urlhost)
-
-
-# Service account related code.
-
-
-def _load_service_account_json(path):
-  """Loads ServiceAccountCredentials from a JSON file.
-
-  Raises BadServiceAccountCredentials if file is missing or not valid.
-  """
-  try:
-    with open(path, 'r') as f:
-      data = json.load(f)
-    return ServiceAccountCredentials(
-        client_email=str(data['client_email']),
-        client_id=str(data['client_id']),
-        private_key=str(data['private_key']),
-        private_key_id=str(data['private_key_id']))
-  except IOError as e:
-    raise BadServiceAccountCredentials('Can\'t open %s: %s' % (path, e))
-  except ValueError as e:
-    raise BadServiceAccountCredentials('Not a JSON file %s: %s' % (path, e))
-  except KeyError as e:
-    raise BadServiceAccountCredentials('Missing key in %s: %s' % (path, e))
-
-
-def _parse_private_key(pem):
-  """PEM encoded OpenSSL private RSA key -> rsa.PrivateKey."""
-  # Cloud console generates OpenSSL compatible private RSA keys. 'rsa' library
-  # doesn't support them natively. Do some ASN unwrapping to extract naked
-  # RSA key (in der-encoded form). See https://www.ietf.org/rfc/rfc2313.txt.
-  try:
-    der = rsa.pem.load_pem(pem, 'PRIVATE KEY')
-    keyinfo, _ = decoder.decode(der)
-    if keyinfo[1][0] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'):
-        raise BadServiceAccountCredentials(
-            'Not a DER-encoded OpenSSL private RSA key')
-    private_key_der = keyinfo[2].asOctets()
-  except IndexError:
-    raise BadServiceAccountCredentials(
-        'Not a DER-encoded OpenSSL private RSA key')
-  return rsa.PrivateKey.load_pkcs1(private_key_der, format='DER')
-
-
-def _make_assertion_jwt(service_account):
-  """Generates signed assertion JWT for 2-legged OAuth flow."""
-  # For more info see:
-  # https://developers.google.com/accounts/docs/OAuth2ServiceAccount.
-  now = long(time.time())
-  payload = {
-    'aud': OAUTH_TOKEN_ENDPOINT,
-    'scope': OAUTH_SCOPES,
-    'iat': now,
-    'exp': now + 3600,
-    'iss': service_account.client_email,
-  }
-  # oauth2client knows how to use PyCrypo or PyOpenSSL for signing. Both are
-  # heavy libraries, that require compiled extensions. Use pure python 'rsa' lib
-  # instead. It is slower, but we do not care (since this code path is exercised
-  # only when access token expires (once an hour).
-  pkey = _parse_private_key(service_account.private_key)
-  return _make_signed_jwt(payload, pkey)
-
-
-def _make_signed_jwt(payload, pkey):
-  """Wraps |payload| dict into signed JSON Web Token."""
-  # See http://self-issued.info/docs/draft-jones-json-web-token.html.
-  as_json = lambda d: json.dumps(d, sort_keys=True, separators=(',', ':'))
-  b64encode = lambda d: base64.urlsafe_b64encode(d).rstrip('=')
-  to_sign = '%s.%s' % (
-      b64encode(as_json({'typ': 'JWT', 'alg': 'RS256'})),
-      b64encode(as_json(payload)))
-  signature = rsa.sign(to_sign, pkey, 'SHA-256')
-  return '%s.%s' % (to_sign, b64encode(signature))
-
-
-# The chunk of code below is based on oauth2client.tools module, but adapted for
-# usage of _fetch_service_config, our command line arguments, and so on.
-
-
-def _run_oauth_dance(urlhost, config):
-  """Perform full OAuth2 dance with the browser."""
-  def out(s):
-    print s
-  def err(s):
-    print >> sys.stderr, s
-
-  # Fetch client_id and client_secret from the service itself.
-  service_config = _fetch_service_config(urlhost)
-  if not service_config:
-    err('Couldn\'t fetch OAuth configuration')
-    return None
-  if not service_config.client_id or not service_config.client_secret:
-    err('OAuth is not configured on the service')
-    return None
-
-  flow = client.OAuth2WebServerFlow(
-      service_config.client_id,
-      service_config.client_secret,
-      OAUTH_SCOPES,
-      approval_prompt='force')
-
-  use_local_webserver = not config.no_local_webserver
-  port = config.webserver_port
-  if use_local_webserver:
-    success = False
-    try:
-      httpd = ClientRedirectServer(('localhost', port), ClientRedirectHandler)
-    except socket.error:
-      pass
-    else:
-      success = True
-    use_local_webserver = success
-    if not success:
-      out(
-        'Failed to start a local webserver listening on port %d.\n'
-        'Please check your firewall settings and locally running programs that '
-        'may be blocking or using those ports.\n\n'
-        'Falling back to --auth-no-local-webserver and continuing with '
-        'authentication.\n' % port)
-
-  if use_local_webserver:
-    oauth_callback = 'http://localhost:%s/' % port
-  else:
-    oauth_callback = client.OOB_CALLBACK_URN
-  flow.redirect_uri = oauth_callback
-  authorize_url = flow.step1_get_authorize_url()
-
-  if use_local_webserver:
-    webbrowser.open(authorize_url, new=1, autoraise=True)
-    out(
-      'Your browser has been opened to visit:\n\n'
-      '    %s\n\n'
-      'If your browser is on a different machine then exit and re-run this '
-      'application with the command-line parameter\n\n'
-      '  --auth-no-local-webserver\n' % authorize_url)
-  else:
-    out(
-      'Go to the following link in your browser:\n\n'
-      '    %s\n' % authorize_url)
-
-  try:
-    code = None
-    if use_local_webserver:
-      httpd.handle_request()
-      if 'error' in httpd.query_params:
-        err('Authentication request was rejected.')
-        return None
-      if 'code' not in httpd.query_params:
-        err(
-          'Failed to find "code" in the query parameters of the redirect.\n'
-          'Try running with --auth-no-local-webserver.')
-        return None
-      code = httpd.query_params['code']
-    else:
-      code = raw_input('Enter verification code: ').strip()
-  except KeyboardInterrupt:
-    err('Canceled.')
-    return None
-
-  try:
-    return flow.step2_exchange(code)
-  except client.FlowExchangeError as e:
-    err('Authentication has failed: %s' % e)
-    return None
-
-
-class ClientRedirectServer(BaseHTTPServer.HTTPServer):
-  """A server to handle OAuth 2.0 redirects back to localhost.
-
-  Waits for a single request and parses the query parameters
-  into query_params and then stops serving.
-  """
-  query_params = {}
-
-
-class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-  """A handler for OAuth 2.0 redirects back to localhost.
-
-  Waits for a single request and parses the query parameters
-  into the servers query_params and then stops serving.
-  """
-
-  def do_GET(self):
-    """Handle a GET request.
-
-    Parses the query parameters and prints a message
-    if the flow has completed. Note that we can't detect
-    if an error occurred.
-    """
-    self.send_response(200)
-    self.send_header('Content-type', 'text/html')
-    self.end_headers()
-    query = self.path.split('?', 1)[-1]
-    query = dict(urlparse.parse_qsl(query))
-    self.server.query_params = query
-    self.wfile.write('<html><head><title>Authentication Status</title></head>')
-    self.wfile.write('<body><p>The authentication flow has completed.</p>')
-    self.wfile.write('</body></html>')
-
-  def log_message(self, _format, *args):
-    """Do not log messages to stdout while running as command line program."""
diff --git a/tools/swarming_client/utils/on_error.py b/tools/swarming_client/utils/on_error.py
deleted file mode 100644
index fc1dbd7..0000000
--- a/tools/swarming_client/utils/on_error.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# coding=utf-8
-# Copyright 2014 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Declares a single function to report errors to a server.
-
-By running the script, you accept that errors will be reported to the server you
-connect to.
-"""
-
-import atexit
-import getpass
-import os
-import platform
-import re
-import socket
-import sys
-import time
-import traceback
-
-from . import net
-from . import tools
-from . import zip_package
-
-
-# It is very important to not get reports from non Chromium infrastructure. We
-# *really* do not want to know anything about you, dear non Google employee.
-_ENABLED_DOMAINS = (
-  '.chromium.org',
-  '.google.com',
-  '.google.com.internal',
-)
-
-# If this envar is '1' then disable reports. Useful when developing the client.
-_DISABLE_ENVVAR = 'SWARMING_DISABLE_ON_ERROR'
-
-
-# Set this variable to the net.HttpService server to be used to report errors.
-# It must be done early at process startup. Once this value is set, it is
-# considered that the atexit handler is enabled.
-_SERVER = None
-
-# This is tricky because it is looked at during import time. At atexit time,
-# __file__ is not defined anymore so it has to be saved first. Also make it work
-# when executed directly from a .zip file. Also handle interactive mode where
-# this is always set to None.
-_SOURCE = zip_package.get_main_script_path()
-if _SOURCE:
-  _SOURCE = os.path.basename(_SOURCE)
-
-_TIME_STARTED = time.time()
-
-_HOSTNAME = None
-
-
-# Paths that can be stripped from the stack traces by _relative_path().
-_PATHS_TO_STRIP = (
-  os.getcwd() + os.path.sep,
-  os.path.dirname(os.__file__) + os.path.sep,
-  '.' + os.path.sep,
-)
-
-
-# Used to simplify the stack trace, by removing path information when possible.
-_RE_STACK_TRACE_FILE = (
-    r'^(?P<prefix>  File \")(?P<file>[^\"]+)(?P<suffix>\"\, line )'
-    r'(?P<line_no>\d+)(?P<rest>|\, in .+)$')
-
-
-### Private stuff.
-
-
-def _relative_path(path):
-  """Strips the current working directory or common library prefix.
-
-  Used by Formatter.
-  """
-  for i in _PATHS_TO_STRIP:
-    if path.startswith(i):
-      return path[len(i):]
-  return path
-
-
-def _reformat_stack(stack):
-  """Post processes the stack trace through _relative_path()."""
-  def replace(l):
-    m = re.match(_RE_STACK_TRACE_FILE, l, re.DOTALL)
-    if m:
-      groups = list(m.groups())
-      groups[1] = _relative_path(groups[1])
-      return ''.join(groups)
-    return l
-
-  # Trim paths.
-  out = map(replace, stack.splitlines(True))
-
-  # Trim indentation.
-  while all(l.startswith(' ') for l in out):
-    out = [l[1:] for l in out]
-  return ''.join(out)
-
-
-def _format_exception(e):
-  """Returns a human readable form of an exception.
-
-  Adds the maximum number of interesting information in the safest way."""
-  try:
-    out = repr(e)
-  except Exception:
-    out = ''
-  try:
-    out = str(e)
-  except Exception:
-    pass
-  return out
-
-
-def _post(params):
-  """Executes the HTTP Post to the server."""
-  if not _SERVER:
-    return None
-  return _SERVER.json_request(
-      '/ereporter2/api/v1/on_error', data=params, max_attempts=1, timeout=20)
-
-
-def _serialize_env():
-  """Makes os.environ json serializable.
-
-  It happens that the environment variable may have non-ASCII characters like
-  ANSI escape code.
-  """
-  return dict(
-      (k, v.encode('ascii', 'replace')) for k, v in os.environ.iteritems())
-
-
-def _report_exception(message, e, stack):
-  """Sends the stack trace to the breakpad server."""
-  name = 'crash report' if e else 'report'
-  sys.stderr.write('Sending the %s ...' % name)
-  message = (message or '').rstrip()
-  if e:
-    if message:
-      message += '\n'
-    message += (_format_exception(e)).rstrip()
-
-  params = {
-    'args': sys.argv,
-    'cwd': os.getcwd(),
-    'duration': time.time() - _TIME_STARTED,
-    'env': _serialize_env(),
-    'hostname': _HOSTNAME,
-    'message': message,
-    'os': sys.platform,
-    'python_version': platform.python_version(),
-    'source': _SOURCE,
-    'user': getpass.getuser(),
-  }
-  if e:
-    params['category'] = 'exception'
-    params['exception_type'] = e.__class__.__name__
-  else:
-    params['category'] = 'report'
-
-  if stack:
-    params['stack'] = _reformat_stack(stack).rstrip()
-    if len(params['stack']) > 4096:
-      params['stack'] = params['stack'][:4095] + '…'
-
-  version = getattr(sys.modules['__main__'], '__version__', None)
-  if version:
-    params['version'] = version
-
-  data = {
-    'r': params,
-    # Bump the version when changing the packet format.
-    'v': '1',
-  }
-  response = _post(data)
-  if response and response.get('url'):
-    sys.stderr.write(' done.\nReport URL: %s\n' % response['url'])
-  else:
-    sys.stderr.write(' failed!\n')
-  sys.stderr.write(message + '\n')
-
-
-def _check_for_exception_on_exit():
-  """Runs at exit. Look if there was an exception active and report if so.
-
-  Since atexit() may not be called from the frame itself, use sys.last_value.
-  """
-  # Sadly, sys.exc_info() cannot be used here, since atexit calls are called
-  # outside the exception handler.
-  exception = getattr(sys, 'last_value', None)
-  if not exception or isinstance(exception, KeyboardInterrupt):
-    return
-
-  last_tb = getattr(sys, 'last_traceback', None)
-  if not last_tb:
-    return
-
-  _report_exception(
-      'Process exited due to exception',
-      exception,
-      ''.join(traceback.format_tb(last_tb)))
-
-
-def _is_in_test():
-  """Returns True if filename of __main__ module ends with _test.py(c)."""
-  main_file = os.path.basename(getattr(sys.modules['__main__'], '__file__', ''))
-  return os.path.splitext(main_file)[0].endswith('_test')
-
-
-### Public API.
-
-
-def report_on_exception_exit(server):
-  """Registers the callback at exit to report an error if the process exits due
-  to an exception.
-  """
-  global _HOSTNAME
-  global _SERVER
-  if _SERVER:
-    raise ValueError('on_error.report_on_exception_exit() was called twice')
-
-  if tools.get_bool_env_var(_DISABLE_ENVVAR):
-    return False
-
-  if _is_in_test():
-    # Disable when running inside unit tests process.
-    return False
-
-  if not server.startswith('https://'):
-    # Only allow report over HTTPS. Silently drop it.
-    return False
-
-  _HOSTNAME = socket.getfqdn()
-  if not _HOSTNAME.endswith(_ENABLED_DOMAINS):
-    # Silently skip non-google infrastructure. Technically, it reports to the
-    # server the client code is talking to so in practice, it would be safe for
-    # non googler to manually enable this assuming their client code talks to a
-    # server they also own. Please send a CL if you desire this functionality.
-    return False
-
-  _SERVER = net.get_http_service(server, allow_cached=False)
-  atexit.register(_check_for_exception_on_exit)
-  return True
-
-
-def report(error):
-  """Either reports an error to the server or prints a error to stderr.
-
-  It's indented to be used only for non recoverable unexpected errors that must
-  be monitored at server-level like API request failure. Is should NOT be used
-  for input validation, command line argument errors, etc.
-
-  Arguments:
-    error: error message string (possibly multiple lines) or None. If a
-        exception frame is active, it will be logged.
-  """
-  exc_info = sys.exc_info()
-  if _SERVER:
-    _report_exception(
-        error, exc_info[1], ''.join(traceback.format_tb(exc_info[2])))
-    return
-
-  if error:
-    sys.stderr.write(error + '\n')
-  if exc_info[1]:
-    sys.stderr.write(_format_exception(exc_info[1]) + '\n')
diff --git a/tools/swarming_client/utils/subprocess42.py b/tools/swarming_client/utils/subprocess42.py
deleted file mode 100644
index e6f1dfe..0000000
--- a/tools/swarming_client/utils/subprocess42.py
+++ /dev/null
@@ -1,689 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""subprocess42 is the answer to life the universe and everything.
-
-It has the particularity of having a Popen implementation that can yield output
-as it is produced while implementing a timeout and NOT requiring the use of
-worker threads.
-
-Example:
-  Wait for a child process with a timeout, send SIGTERM, wait a grace period
-  then send SIGKILL:
-
-    def wait_terminate_then_kill(proc, timeout, grace):
-      try:
-        return proc.wait(timeout)
-      except subprocess42.TimeoutExpired:
-        proc.terminate()
-        try:
-          return proc.wait(grace)
-        except subprocess42.TimeoutExpired:
-          proc.kill()
-        return proc.wait()
-
-
-TODO(maruel): Add VOID support like subprocess2.
-"""
-
-import collections
-import contextlib
-import errno
-import os
-import signal
-import sys
-import threading
-import time
-
-import subprocess
-
-from subprocess import CalledProcessError, PIPE, STDOUT  # pylint: disable=W0611
-from subprocess import list2cmdline
-
-
-# Default maxsize argument.
-MAX_SIZE = 16384
-
-
-# Set to True when inhibit_crash_dump() has been called.
-_OS_ERROR_REPORTING_INHIBITED = False
-
-
-if subprocess.mswindows:
-  import ctypes
-  import msvcrt  # pylint: disable=F0401
-  from ctypes import wintypes
-  from ctypes import windll
-
-
-  # Which to be received depends on how this process was called and outside the
-  # control of this script. See Popen docstring for more details.
-  STOP_SIGNALS = (signal.SIGBREAK, signal.SIGTERM)
-
-
-  def ReadFile(handle, desired_bytes):
-    """Calls kernel32.ReadFile()."""
-    c_read = wintypes.DWORD()
-    buff = wintypes.create_string_buffer(desired_bytes+1)
-    windll.kernel32.ReadFile(
-        handle, buff, desired_bytes, wintypes.byref(c_read), None)
-    # NULL terminate it.
-    buff[c_read.value] = '\x00'
-    return wintypes.GetLastError(), buff.value
-
-  def PeekNamedPipe(handle):
-    """Calls kernel32.PeekNamedPipe(). Simplified version."""
-    c_avail = wintypes.DWORD()
-    c_message = wintypes.DWORD()
-    success = windll.kernel32.PeekNamedPipe(
-        handle, None, 0, None, wintypes.byref(c_avail),
-        wintypes.byref(c_message))
-    if not success:
-      raise OSError(wintypes.GetLastError())
-    return c_avail.value
-
-  def recv_multi_impl(conns, maxsize, timeout):
-    """Reads from the first available pipe.
-
-    It will immediately return on a closed connection, independent of timeout.
-
-    Arguments:
-    - maxsize: Maximum number of bytes to return. Defaults to MAX_SIZE.
-    - timeout: If None, it is blocking. If 0 or above, will return None if no
-          data is available within |timeout| seconds.
-
-    Returns:
-      tuple(int(index), str(data), bool(closed)).
-    """
-    assert conns
-    assert timeout is None or isinstance(timeout, (int, float)), timeout
-    maxsize = max(maxsize or MAX_SIZE, 1)
-
-    # TODO(maruel): Use WaitForMultipleObjects(). Python creates anonymous pipes
-    # for proc.stdout and proc.stderr but they are implemented as named pipes on
-    # Windows. Since named pipes are not waitable object, they can't be passed
-    # as-is to WFMO(). So this means N times CreateEvent(), N times ReadFile()
-    # and finally WFMO(). This requires caching the events handles in the Popen
-    # object and remembering the pending ReadFile() calls. This will require
-    # some re-architecture to store the relevant event handle and OVERLAPPEDIO
-    # object in Popen or the file object.
-    start = time.time()
-    handles = [
-      (i, msvcrt.get_osfhandle(c.fileno())) for i, c in enumerate(conns)
-    ]
-    while True:
-      for index, handle in handles:
-        try:
-          avail = min(PeekNamedPipe(handle), maxsize)
-          if avail:
-            return index, ReadFile(handle, avail)[1], False
-        except OSError:
-          # The pipe closed.
-          return index, None, True
-
-      if timeout is not None and (time.time() - start) >= timeout:
-        return None, None, False
-      # Polling rocks.
-      time.sleep(0.001)
-
-else:
-  import fcntl  # pylint: disable=F0401
-  import select
-
-
-  # Signals that mean this process should exit quickly.
-  STOP_SIGNALS = (signal.SIGINT, signal.SIGTERM)
-
-
-  def recv_multi_impl(conns, maxsize, timeout):
-    """Reads from the first available pipe.
-
-    It will immediately return on a closed connection, independent of timeout.
-
-    Arguments:
-    - maxsize: Maximum number of bytes to return. Defaults to MAX_SIZE.
-    - timeout: If None, it is blocking. If 0 or above, will return None if no
-          data is available within |timeout| seconds.
-
-    Returns:
-      tuple(int(index), str(data), bool(closed)).
-    """
-    assert conns
-    assert timeout is None or isinstance(timeout, (int, float)), timeout
-    maxsize = max(maxsize or MAX_SIZE, 1)
-
-    # select(timeout=0) will block, it has to be a value > 0.
-    if timeout == 0:
-      timeout = 0.001
-    try:
-      r, _, _ = select.select(conns, [], [], timeout)
-    except select.error:
-      r = None
-    if not r:
-      return None, None, False
-
-    conn = r[0]
-    # Temporarily make it non-blocking.
-    # TODO(maruel): This is not very efficient when the caller is doing this in
-    # a loop. Add a mechanism to have the caller handle this.
-    flags = fcntl.fcntl(conn, fcntl.F_GETFL)
-    if not conn.closed:
-      # pylint: disable=E1101
-      fcntl.fcntl(conn, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-    try:
-      try:
-        data = conn.read(maxsize)
-      except IOError as e:
-        # On posix, this means the read would block.
-        if e.errno == errno.EAGAIN:
-          return conns.index(conn), None, False
-        raise e
-
-      if not data:
-        # On posix, this means the channel closed.
-        return conns.index(conn), None, True
-
-      return conns.index(conn), data, False
-    finally:
-      if not conn.closed:
-        fcntl.fcntl(conn, fcntl.F_SETFL, flags)
-
-
-class TimeoutExpired(Exception):
-  """Compatible with python3 subprocess."""
-  def __init__(self, cmd, timeout, output=None, stderr=None):
-    self.cmd = cmd
-    self.timeout = timeout
-    self.output = output
-    # Non-standard:
-    self.stderr = stderr
-    super(TimeoutExpired, self).__init__(str(self))
-
-  def __str__(self):
-    return "Command '%s' timed out after %s seconds" % (self.cmd, self.timeout)
-
-
-class Popen(subprocess.Popen):
-  """Adds timeout support on stdout and stderr.
-
-  Inspired by
-  http://code.activestate.com/recipes/440554-module-to-allow-asynchronous-subprocess-use-on-win/
-
-  Unlike subprocess, yield_any(), recv_*(), communicate() will close stdout and
-  stderr once the child process closes them, after all the data is read.
-
-  Arguments:
-  - detached: If True, the process is created in a new process group. On
-    Windows, use CREATE_NEW_PROCESS_GROUP. On posix, use os.setpgid(0, 0).
-
-  Additional members:
-  - start: timestamp when this process started.
-  - end: timestamp when this process exited, as seen by this process.
-  - detached: If True, the child process was started as a detached process.
-  - gid: process group id, if any.
-  - duration: time in seconds the process lasted.
-
-  Additional methods:
-  - yield_any(): yields output until the process terminates.
-  - recv_any(): reads from stdout and/or stderr with optional timeout.
-  - recv_out() & recv_err(): specialized version of recv_any().
-  """
-  # subprocess.Popen.__init__() is not threadsafe; there is a race between
-  # creating the exec-error pipe for the child and setting it to CLOEXEC during
-  # which another thread can fork and cause the pipe to be inherited by its
-  # descendents, which will cause the current Popen to hang until all those
-  # descendents exit. Protect this with a lock so that only one fork/exec can
-  # happen at a time.
-  popen_lock = threading.Lock()
-
-  def __init__(self, args, **kwargs):
-    assert 'creationflags' not in kwargs
-    assert 'preexec_fn' not in kwargs, 'Use detached=True instead'
-    self.start = time.time()
-    self.end = None
-    self.gid = None
-    self.detached = kwargs.pop('detached', False)
-    if self.detached:
-      if subprocess.mswindows:
-        kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
-      else:
-        kwargs['preexec_fn'] = lambda: os.setpgid(0, 0)
-    with self.popen_lock:
-      super(Popen, self).__init__(args, **kwargs)
-    self.args = args
-    if self.detached and not subprocess.mswindows:
-      try:
-        self.gid = os.getpgid(self.pid)
-      except OSError:
-        # sometimes the process can run+finish before we collect its pgid. fun.
-        pass
-
-  def duration(self):
-    """Duration of the child process.
-
-    It is greater or equal to the actual time the child process ran. It can be
-    significantly higher than the real value if neither .wait() nor .poll() was
-    used.
-    """
-    return (self.end or time.time()) - self.start
-
-  # pylint: disable=arguments-differ,redefined-builtin
-  def communicate(self, input=None, timeout=None):
-    """Implements python3's timeout support.
-
-    Unlike wait(), timeout=0 is considered the same as None.
-
-    Raises:
-    - TimeoutExpired when more than timeout seconds were spent waiting for the
-      process.
-    """
-    if not timeout:
-      return super(Popen, self).communicate(input=input)
-
-    assert isinstance(timeout, (int, float)), timeout
-
-    if self.stdin or self.stdout or self.stderr:
-      stdout = '' if self.stdout else None
-      stderr = '' if self.stderr else None
-      t = None
-      if input is not None:
-        assert self.stdin, (
-            'Can\'t use communicate(input) if not using '
-            'Popen(stdin=subprocess42.PIPE')
-        # TODO(maruel): Switch back to non-threading.
-        def write():
-          try:
-            self.stdin.write(input)
-          except IOError:
-            pass
-        t = threading.Thread(name='Popen.communicate', target=write)
-        t.daemon = True
-        t.start()
-
-      try:
-        if self.stdout or self.stderr:
-          start = time.time()
-          end = start + timeout
-          def remaining():
-            return max(end - time.time(), 0)
-          for pipe, data in self.yield_any(timeout=remaining):
-            if pipe is None:
-              raise TimeoutExpired(self.args, timeout, stdout, stderr)
-            assert pipe in ('stdout', 'stderr'), pipe
-            if pipe == 'stdout':
-              stdout += data
-            else:
-              stderr += data
-        else:
-          # Only stdin is piped.
-          self.wait(timeout=timeout)
-      finally:
-        if t:
-          try:
-            self.stdin.close()
-          except IOError:
-            pass
-          t.join()
-    else:
-      # No pipe. The user wanted to use wait().
-      self.wait(timeout=timeout)
-      return None, None
-
-    # Indirectly initialize self.end.
-    self.wait()
-    return stdout, stderr
-
-  def wait(self, timeout=None):  # pylint: disable=arguments-differ
-    """Implements python3's timeout support.
-
-    Raises:
-    - TimeoutExpired when more than timeout seconds were spent waiting for the
-      process.
-    """
-    assert timeout is None or isinstance(timeout, (int, float)), timeout
-    if timeout is None:
-      super(Popen, self).wait()
-    elif self.returncode is None:
-      if subprocess.mswindows:
-        WAIT_TIMEOUT = 258
-        result = subprocess._subprocess.WaitForSingleObject(
-            self._handle, int(timeout * 1000))
-        if result == WAIT_TIMEOUT:
-          raise TimeoutExpired(self.args, timeout)
-        self.returncode = subprocess._subprocess.GetExitCodeProcess(
-            self._handle)
-      else:
-        # If you think the following code is horrible, it's because it is
-        # inspired by python3's stdlib.
-        end = time.time() + timeout
-        delay = 0.001
-        while True:
-          try:
-            pid, sts = subprocess._eintr_retry_call(
-                os.waitpid, self.pid, os.WNOHANG)
-          except OSError as e:
-            if e.errno != errno.ECHILD:
-              raise
-            pid = self.pid
-            sts = 0
-          if pid == self.pid:
-            # This sets self.returncode.
-            self._handle_exitstatus(sts)
-            break
-          remaining = end - time.time()
-          if remaining <= 0:
-            raise TimeoutExpired(self.args, timeout)
-          delay = min(delay * 2, remaining, .05)
-          time.sleep(delay)
-
-    if not self.end:
-      # communicate() uses wait() internally.
-      self.end = time.time()
-    return self.returncode
-
-  def poll(self):
-    ret = super(Popen, self).poll()
-    if ret is not None and not self.end:
-      self.end = time.time()
-    return ret
-
-  def yield_any_line(self, **kwargs):
-    """Yields lines until the process terminates.
-
-    Like yield_any, but yields lines.
-    """
-    return split(self.yield_any(**kwargs))
-
-  def yield_any(self, maxsize=None, timeout=None):
-    """Yields output until the process terminates.
-
-    Unlike wait(), does not raise TimeoutExpired.
-
-    Yields:
-      (pipename, data) where pipename is either 'stdout', 'stderr' or None in
-      case of timeout or when the child process closed one of the pipe(s) and
-      all pending data on the pipe was read.
-
-    Arguments:
-    - maxsize: See recv_any(). Can be a callable function.
-    - timeout: If None, the call is blocking. If set, yields None, None if no
-          data is available within |timeout| seconds. It resets itself after
-          each yield. Can be a callable function.
-    """
-    assert self.stdout or self.stderr
-    if timeout is not None:
-      # timeout=0 effectively means that the pipe is continuously polled.
-      if isinstance(timeout, (int, float)):
-        assert timeout >= 0, timeout
-        old_timeout = timeout
-        timeout = lambda: old_timeout
-      else:
-        assert callable(timeout), timeout
-
-    if maxsize is not None and not callable(maxsize):
-      assert isinstance(maxsize, (int, float)), maxsize
-
-    last_yield = time.time()
-    while self.poll() is None:
-      to = timeout() if timeout else None
-      if to is not None:
-        to = max(to - (time.time() - last_yield), 0)
-      t, data = self.recv_any(
-          maxsize=maxsize() if callable(maxsize) else maxsize, timeout=to)
-      if data or to is 0:
-        yield t, data
-        last_yield = time.time()
-
-    # Read all remaining output in the pipes.
-    # There is 3 cases:
-    # - pipes get closed automatically by the calling process before it exits
-    # - pipes are closed automated by the OS
-    # - pipes are kept open due to grand-children processes outliving the
-    #   children process.
-    while True:
-      ms = maxsize
-      if callable(maxsize):
-        ms = maxsize()
-      # timeout=0 is mainly to handle the case where a grand-children process
-      # outlives the process started.
-      t, data = self.recv_any(maxsize=ms, timeout=0)
-      if not data:
-        break
-      yield t, data
-
-  def recv_any(self, maxsize=None, timeout=None):
-    """Reads from the first pipe available from stdout and stderr.
-
-    Unlike wait(), does not throw TimeoutExpired.
-
-    Arguments:
-    - maxsize: Maximum number of bytes to return. Defaults to MAX_SIZE.
-    - timeout: If None, it is blocking. If 0 or above, will return None if no
-          data is available within |timeout| seconds.
-
-    Returns:
-      tuple(pipename or None, str(data)). pipename is one of 'stdout' or
-      'stderr'.
-    """
-    # recv_multi_impl will early exit on a closed connection. Loop accordingly
-    # to simplify call sites.
-    while True:
-      pipes = [
-        x for x in ((self.stderr, 'stderr'), (self.stdout, 'stdout')) if x[0]
-      ]
-      # If both stdout and stderr have the exact file handle, they are
-      # effectively the same pipe. Deduplicate it since otherwise it confuses
-      # recv_multi_impl().
-      if len(pipes) == 2 and self.stderr.fileno() == self.stdout.fileno():
-        pipes.pop(0)
-
-      if not pipes:
-        return None, None
-      start = time.time()
-      conns, names = zip(*pipes)
-      index, data, closed = recv_multi_impl(conns, maxsize, timeout)
-      if index is None:
-        return index, data
-      if closed:
-        self._close(names[index])
-        if not data:
-          # Loop again. The other pipe may still be open.
-          if timeout:
-            timeout -= (time.time() - start)
-          continue
-
-      if self.universal_newlines and data:
-        data = self._translate_newlines(data)
-      return names[index], data
-
-  def recv_out(self, maxsize=None, timeout=None):
-    """Reads from stdout synchronously with timeout."""
-    return self._recv('stdout', maxsize, timeout)
-
-  def recv_err(self, maxsize=None, timeout=None):
-    """Reads from stderr synchronously with timeout."""
-    return self._recv('stderr', maxsize, timeout)
-
-  def terminate(self):
-    """Tries to do something saner on Windows that the stdlib.
-
-    Windows:
-      self.detached/CREATE_NEW_PROCESS_GROUP determines what can be used:
-      - If set, only SIGBREAK can be sent and it is sent to a single process.
-      - If not set, in theory only SIGINT can be used and *all processes* in
-         the processgroup receive it. In practice, we just kill the process.
-      See http://msdn.microsoft.com/library/windows/desktop/ms683155.aspx
-      The default on Windows is to call TerminateProcess() always, which is not
-      useful.
-
-    On Posix, always send SIGTERM.
-    """
-    try:
-      if subprocess.mswindows and self.detached:
-        return self.send_signal(signal.CTRL_BREAK_EVENT)
-      super(Popen, self).terminate()
-    except OSError:
-      # The function will throw if the process terminated in-between. Swallow
-      # this.
-      pass
-
-  def kill(self):
-    """Kills the process and its children if possible.
-
-    Swallows exceptions and return True on success.
-    """
-    if self.gid:
-      try:
-        os.killpg(self.gid, signal.SIGKILL)
-      except OSError:
-        return False
-    else:
-      try:
-        super(Popen, self).kill()
-      except OSError:
-        return False
-    return True
-
-  def _close(self, which):
-    """Closes either stdout or stderr."""
-    getattr(self, which).close()
-    setattr(self, which, None)
-
-  def _recv(self, which, maxsize, timeout):
-    """Reads from one of stdout or stderr synchronously with timeout."""
-    conn = getattr(self, which)
-    if conn is None:
-      return None
-    _, data, closed = recv_multi_impl([conn], maxsize, timeout)
-    if closed:
-      self._close(which)
-    if self.universal_newlines and data:
-      data = self._translate_newlines(data)
-    return data
-
-
-@contextlib.contextmanager
-def set_signal_handler(signals, handler):
-  """Temporarilly override signals handler.
-
-  Useful when waiting for a child process to handle signals like SIGTERM, so the
-  signal can be propagated to the child process.
-  """
-  previous = {s: signal.signal(s, handler) for s in signals}
-  try:
-    yield
-  finally:
-    for sig, h in previous.iteritems():
-      signal.signal(sig, h)
-
-
-def call(*args, **kwargs):
-  """Adds support for timeout."""
-  timeout = kwargs.pop('timeout', None)
-  return Popen(*args, **kwargs).wait(timeout)
-
-
-def check_call(*args, **kwargs):
-  """Adds support for timeout."""
-  retcode = call(*args, **kwargs)
-  if retcode:
-    raise CalledProcessError(retcode, kwargs.get('args') or args[0])
-  return 0
-
-
-def check_output(*args, **kwargs):
-  """Adds support for timeout."""
-  timeout = kwargs.pop('timeout', None)
-  if 'stdout' in kwargs:
-    raise ValueError('stdout argument not allowed, it will be overridden.')
-  process = Popen(stdout=PIPE, *args, **kwargs)
-  output, _ = process.communicate(timeout=timeout)
-  retcode = process.poll()
-  if retcode:
-    raise CalledProcessError(retcode, kwargs.get('args') or args[0], output)
-  return output
-
-
-def call_with_timeout(args, timeout, **kwargs):
-  """Runs an executable; kill it in case of timeout."""
-  proc = Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, **kwargs)
-  try:
-    out, err = proc.communicate(timeout=timeout)
-  except TimeoutExpired as e:
-    out = e.output
-    err = e.stderr
-    proc.kill()
-    proc.wait()
-  return out, err, proc.returncode, proc.duration()
-
-
-def inhibit_os_error_reporting():
-  """Inhibits error reporting UI and core files.
-
-  This function should be called as early as possible in the process lifetime.
-  """
-  global _OS_ERROR_REPORTING_INHIBITED
-  if not _OS_ERROR_REPORTING_INHIBITED:
-    _OS_ERROR_REPORTING_INHIBITED = True
-    if sys.platform == 'win32':
-      # Windows has a bad habit of opening a dialog when a console program
-      # crashes, rather than just letting it crash. Therefore, when a program
-      # crashes on Windows, we don't find out until the build step times out.
-      # This code prevents the dialog from appearing, so that we find out
-      # immediately and don't waste time waiting for a user to close the dialog.
-      # https://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx
-      SEM_FAILCRITICALERRORS = 1
-      SEM_NOGPFAULTERRORBOX = 2
-      SEM_NOALIGNMENTFAULTEXCEPT = 0x8000
-      ctypes.windll.kernel32.SetErrorMode(
-          SEM_FAILCRITICALERRORS|SEM_NOGPFAULTERRORBOX|
-            SEM_NOALIGNMENTFAULTEXCEPT)
-  # TODO(maruel): Other OSes.
-  # - OSX, need to figure out a way to make the following process tree local:
-  #     defaults write com.apple.CrashReporter UseUNC 1
-  #     defaults write com.apple.CrashReporter DialogType none
-  # - Ubuntu, disable apport if needed.
-
-
-def split(data, sep='\n'):
-  """Splits pipe data by |sep|. Does some buffering.
-
-  For example, [('stdout', 'a\nb'), ('stdout', '\n'), ('stderr', 'c\n')] ->
-  [('stdout', 'a'), ('stdout', 'b'), ('stderr', 'c')].
-
-  Args:
-    data: iterable of tuples (pipe_name, bytes).
-
-  Returns:
-    An iterator of tuples (pipe_name, bytes) where bytes is the input data
-    but split by sep into separate tuples.
-  """
-  # A dict {pipe_name -> list of pending chunks without separators}
-  pending_chunks = collections.defaultdict(list)
-  for pipe_name, chunk in data:
-    if chunk is None:
-      # Happens if a pipe is closed.
-      continue
-
-    pending = pending_chunks[pipe_name]
-    start = 0  # offset in chunk to start |sep| search from
-    while start < len(chunk):
-      j = chunk.find(sep, start)
-      if j == -1:
-        pending_chunks[pipe_name].append(chunk[start:])
-        break
-
-      to_emit = chunk[start:j]
-      start = j + 1
-      if pending:
-        # prepend and forget
-        to_emit = ''.join(pending) + to_emit
-        pending = []
-        pending_chunks[pipe_name] = pending
-      yield pipe_name, to_emit
-
-  # Emit remaining chunks that don't end with separators as is.
-  for pipe_name, chunks in sorted(pending_chunks.iteritems()):
-    if chunks:
-      yield pipe_name, ''.join(chunks)
diff --git a/tools/swarming_client/utils/threading_utils.py b/tools/swarming_client/utils/threading_utils.py
deleted file mode 100644
index eef6ac8..0000000
--- a/tools/swarming_client/utils/threading_utils.py
+++ /dev/null
@@ -1,812 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Classes and functions related to threading."""
-
-import functools
-import inspect
-import logging
-import os
-import Queue
-import sys
-import threading
-import time
-import traceback
-
-
-# Priorities for tasks in AutoRetryThreadPool, particular values are important.
-PRIORITY_HIGH = 1 << 8
-PRIORITY_MED = 2 << 8
-PRIORITY_LOW = 3 << 8
-
-
-class LockWithAssert(object):
-  """Wrapper around (non recursive) Lock that tracks its owner."""
-
-  def __init__(self):
-    self._lock = threading.Lock()
-    self._owner = None
-
-  def __enter__(self):
-    self._lock.acquire()
-    assert self._owner is None
-    self._owner = threading.current_thread()
-
-  def __exit__(self, _exc_type, _exec_value, _traceback):
-    self.assert_locked('Releasing unowned lock')
-    self._owner = None
-    self._lock.release()
-    return False
-
-  def assert_locked(self, msg=None):
-    """Asserts the lock is owned by running thread."""
-    assert self._owner == threading.current_thread(), msg
-
-
-class ThreadPoolError(Exception):
-  """Base class for exceptions raised by ThreadPool."""
-  pass
-
-
-class ThreadPoolEmpty(ThreadPoolError):
-  """Trying to get task result from a thread pool with no pending tasks."""
-  pass
-
-
-class ThreadPoolClosed(ThreadPoolError):
-  """Trying to do something with a closed thread pool."""
-  pass
-
-
-class ThreadPool(object):
-  """Multithreaded worker pool with priority support.
-
-  When the priority of tasks match, it works in strict FIFO mode.
-  """
-  QUEUE_CLASS = Queue.PriorityQueue
-
-  def __init__(self, initial_threads, max_threads, queue_size, prefix=None):
-    """Immediately starts |initial_threads| threads.
-
-    Arguments:
-      initial_threads: Number of threads to start immediately. Can be 0 if it is
-                       uncertain that threads will be needed.
-      max_threads: Maximum number of threads that will be started when all the
-                   threads are busy working. Often the number of CPU cores.
-      queue_size: Maximum number of tasks to buffer in the queue. 0 for
-                  unlimited queue. A non-zero value may make add_task()
-                  blocking.
-      prefix: Prefix to use for thread names. Pool's threads will be
-              named '<prefix>-<thread index>'.
-    """
-    prefix = prefix or 'tp-0x%0x' % id(self)
-    logging.debug(
-        'New ThreadPool(%d, %d, %d): %s', initial_threads, max_threads,
-        queue_size, prefix)
-    assert initial_threads <= max_threads
-    assert max_threads <= 1024
-
-    self.tasks = self.QUEUE_CLASS(queue_size)
-    self._max_threads = max_threads
-    self._prefix = prefix
-
-    # Used to assign indexes to tasks.
-    self._num_of_added_tasks_lock = threading.Lock()
-    self._num_of_added_tasks = 0
-
-    # Lock that protected everything below (including conditional variable).
-    self._lock = threading.Lock()
-
-    # Condition 'bool(_outputs) or bool(_exceptions) or _pending_count == 0'.
-    self._outputs_exceptions_cond = threading.Condition(self._lock)
-    self._outputs = []
-    self._exceptions = []
-
-    # Number of pending tasks (queued or being processed now).
-    self._pending_count = 0
-
-    # List of threads.
-    self._workers = []
-    # Number of threads that are waiting for new tasks.
-    self._ready = 0
-    # Number of threads already added to _workers, but not yet running the loop.
-    self._starting = 0
-    # True if close was called. Forbids adding new tasks.
-    self._is_closed = False
-
-    for _ in range(initial_threads):
-      self._add_worker()
-
-  def _add_worker(self):
-    """Adds one worker thread if there isn't too many. Thread-safe."""
-    with self._lock:
-      if len(self._workers) >= self._max_threads or self._is_closed:
-        return False
-      worker = threading.Thread(
-        name='%s-%d' % (self._prefix, len(self._workers)), target=self._run)
-      self._workers.append(worker)
-      self._starting += 1
-    logging.debug('Starting worker thread %s', worker.name)
-    worker.daemon = True
-    worker.start()
-    return True
-
-  def add_task(self, priority, func, *args, **kwargs):
-    """Adds a task, a function to be executed by a worker.
-
-    Arguments:
-    - priority: priority of the task versus others. Lower priority takes
-                precedence.
-    - func: function to run. Can either return a return value to be added to the
-            output list or be a generator which can emit multiple values.
-    - args and kwargs: arguments to |func|. Note that if func mutates |args| or
-                       |kwargs| and that the task is retried, see
-                       AutoRetryThreadPool, the retry will use the mutated
-                       values.
-
-    Returns:
-      Index of the item added, e.g. the total number of enqueued items up to
-      now.
-    """
-    assert isinstance(priority, int)
-    assert callable(func)
-    with self._lock:
-      if self._is_closed:
-        raise ThreadPoolClosed('Can not add a task to a closed ThreadPool')
-      start_new_worker = (
-        # Pending task count plus new task > number of available workers.
-        self.tasks.qsize() + 1 > self._ready + self._starting and
-        # Enough slots.
-        len(self._workers) < self._max_threads
-      )
-      self._pending_count += 1
-    with self._num_of_added_tasks_lock:
-      self._num_of_added_tasks += 1
-      index = self._num_of_added_tasks
-    self.tasks.put((priority, index, func, args, kwargs))
-    if start_new_worker:
-      self._add_worker()
-    return index
-
-  def _run(self):
-    """Worker thread loop. Runs until a None task is queued."""
-    # Thread has started, adjust counters.
-    with self._lock:
-      self._starting -= 1
-      self._ready += 1
-    while True:
-      try:
-        task = self.tasks.get()
-      finally:
-        with self._lock:
-          self._ready -= 1
-      try:
-        if task is None:
-          # We're done.
-          return
-        _priority, _index, func, args, kwargs = task
-        if inspect.isgeneratorfunction(func):
-          for out in func(*args, **kwargs):
-            self._output_append(out)
-        else:
-          out = func(*args, **kwargs)
-          self._output_append(out)
-      except Exception as e:
-        logging.warning('Caught exception: %s', e)
-        exc_info = sys.exc_info()
-        logging.info(''.join(traceback.format_tb(exc_info[2])))
-        with self._outputs_exceptions_cond:
-          self._exceptions.append(exc_info)
-          self._outputs_exceptions_cond.notifyAll()
-      finally:
-        try:
-          # Mark thread as ready again, mark task as processed. Do it before
-          # waking up threads waiting on self.tasks.join(). Otherwise they might
-          # find ThreadPool still 'busy' and perform unnecessary wait on CV.
-          with self._outputs_exceptions_cond:
-            self._ready += 1
-            self._pending_count -= 1
-            if self._pending_count == 0:
-              self._outputs_exceptions_cond.notifyAll()
-          self.tasks.task_done()
-        except Exception as e:
-          # We need to catch and log this error here because this is the root
-          # function for the thread, nothing higher will catch the error.
-          logging.exception('Caught exception while marking task as done: %s',
-                            e)
-
-  def _output_append(self, out):
-    if out is not None:
-      with self._outputs_exceptions_cond:
-        self._outputs.append(out)
-        self._outputs_exceptions_cond.notifyAll()
-
-  def join(self):
-    """Extracts all the results from each threads unordered.
-
-    Call repeatedly to extract all the exceptions if desired.
-
-    Note: will wait for all work items to be done before returning an exception.
-    To get an exception early, use get_one_result().
-    """
-    # TODO(maruel): Stop waiting as soon as an exception is caught.
-    self.tasks.join()
-    with self._outputs_exceptions_cond:
-      if self._exceptions:
-        e = self._exceptions.pop(0)
-        raise e[0], e[1], e[2]
-      out = self._outputs
-      self._outputs = []
-    return out
-
-  def get_one_result(self):
-    """Returns the next item that was generated or raises an exception if one
-    occurred.
-
-    Raises:
-      ThreadPoolEmpty - no results available.
-    """
-    # Get first available result.
-    for result in self.iter_results():
-      return result
-    # No results -> tasks queue is empty.
-    raise ThreadPoolEmpty('Task queue is empty')
-
-  def iter_results(self):
-    """Yields results as they appear until all tasks are processed."""
-    while True:
-      # Check for pending results.
-      result = None
-      self._on_iter_results_step()
-      with self._outputs_exceptions_cond:
-        if self._exceptions:
-          e = self._exceptions.pop(0)
-          raise e[0], e[1], e[2]
-        if self._outputs:
-          # Remember the result to yield it outside of the lock.
-          result = self._outputs.pop(0)
-        else:
-          # No pending tasks -> all tasks are done.
-          if not self._pending_count:
-            return
-          # Some task is queued, wait for its result to appear.
-          # Use non-None timeout so that process reacts to Ctrl+C and other
-          # signals, see http://bugs.python.org/issue8844.
-          self._outputs_exceptions_cond.wait(timeout=0.1)
-          continue
-      yield result
-
-  def close(self):
-    """Closes all the threads."""
-    # Ensure no new threads can be started, self._workers is effectively
-    # a constant after that and can be accessed outside the lock.
-    with self._lock:
-      if self._is_closed:
-        raise ThreadPoolClosed('Can not close already closed ThreadPool')
-      self._is_closed = True
-    for _ in range(len(self._workers)):
-      # Enqueueing None causes the worker to stop.
-      self.tasks.put(None)
-    for t in self._workers:
-      # 'join' without timeout blocks signal handlers, spin with timeout.
-      while t.is_alive():
-        t.join(30)
-    logging.debug(
-      'Thread pool \'%s\' closed: spawned %d threads total',
-      self._prefix, len(self._workers))
-
-  def abort(self):
-    """Empties the queue.
-
-    To be used when the pool should stop early, like when Ctrl-C was detected.
-
-    Returns:
-      Number of tasks cancelled.
-    """
-    index = 0
-    while True:
-      try:
-        self.tasks.get_nowait()
-        self.tasks.task_done()
-        index += 1
-      except Queue.Empty:
-        return index
-
-  def _on_iter_results_step(self):
-    pass
-
-  def __enter__(self):
-    """Enables 'with' statement."""
-    return self
-
-  def __exit__(self, _exc_type, _exc_value, _traceback):
-    """Enables 'with' statement."""
-    self.close()
-
-
-class AutoRetryThreadPool(ThreadPool):
-  """Automatically retries enqueued operations on exception."""
-  # See also PRIORITY_* module-level constants.
-  INTERNAL_PRIORITY_BITS = (1<<8) - 1
-
-  def __init__(self, exceptions, retries, *args, **kwargs):
-    """
-    Arguments:
-      exceptions: list of exception classes that can be retried on.
-      retries: maximum number of retries to do.
-    """
-    assert exceptions and all(issubclass(e, Exception) for e in exceptions), (
-        exceptions)
-    assert 1 <= retries <= self.INTERNAL_PRIORITY_BITS
-    super(AutoRetryThreadPool, self).__init__(*args, **kwargs)
-    self._swallowed_exceptions = tuple(exceptions)
-    self._retries = retries
-
-  def add_task(self, priority, func, *args, **kwargs):
-    """Tasks added must not use the lower priority bits since they are reserved
-    for retries.
-    """
-    assert (priority & self.INTERNAL_PRIORITY_BITS) == 0
-    return super(AutoRetryThreadPool, self).add_task(
-        priority,
-        self._task_executer,
-        priority,
-        None,
-        func,
-        *args,
-        **kwargs)
-
-  def add_task_with_channel(self, channel, priority, func, *args, **kwargs):
-    """Tasks added must not use the lower priority bits since they are reserved
-    for retries.
-    """
-    assert (priority & self.INTERNAL_PRIORITY_BITS) == 0
-    return super(AutoRetryThreadPool, self).add_task(
-        priority,
-        self._task_executer,
-        priority,
-        channel,
-        func,
-        *args,
-        **kwargs)
-
-  def _task_executer(self, priority, channel, func, *args, **kwargs):
-    """Wraps the function and automatically retry on exceptions."""
-    try:
-      result = func(*args, **kwargs)
-      if channel is None:
-        return result
-      channel.send_result(result)
-    # pylint: disable=catching-non-exception
-    except self._swallowed_exceptions as e:
-      # Retry a few times, lowering the priority.
-      actual_retries = priority & self.INTERNAL_PRIORITY_BITS
-      if actual_retries < self._retries:
-        priority += 1
-        logging.debug(
-            'Swallowed exception \'%s\'. Retrying at lower priority %X',
-            e, priority)
-        super(AutoRetryThreadPool, self).add_task(
-            priority,
-            self._task_executer,
-            priority,
-            channel,
-            func,
-            *args,
-            **kwargs)
-        return
-      if channel is None:
-        raise
-      channel.send_exception()
-    except Exception:
-      if channel is None:
-        raise
-      channel.send_exception()
-
-
-class IOAutoRetryThreadPool(AutoRetryThreadPool):
-  """Thread pool that automatically retries on IOError.
-
-  Supposed to be used for IO bound tasks, and thus default maximum number of
-  worker threads is independent of number of CPU cores.
-  """
-  # Initial and maximum number of worker threads.
-  INITIAL_WORKERS = 2
-  MAX_WORKERS = 16 if sys.maxsize > 2L**32 else 8
-  RETRIES = 5
-
-  def __init__(self):
-    super(IOAutoRetryThreadPool, self).__init__(
-        [IOError],
-        self.RETRIES,
-        self.INITIAL_WORKERS,
-        self.MAX_WORKERS,
-        0,
-        'io')
-
-
-class Progress(object):
-  """Prints progress and accepts updates thread-safely."""
-  def __init__(self, columns):
-    """Creates a Progress bar that will updates asynchronously from the worker
-    threads.
-
-    Arguments:
-      columns: list of tuple(name, initialvalue), defines both the number of
-               columns and their initial values.
-    """
-    assert all(
-        len(c) == 2 and isinstance(c[0], str) and isinstance(c[1], int)
-        for c in columns), columns
-    # Members to be used exclusively in the primary thread.
-    self.use_cr_only = True
-    self.unfinished_commands = set()
-    self.start = time.time()
-    self._last_printed_line = ''
-    self._columns = [c[1] for c in columns]
-    self._columns_lookup = dict((c[0], i) for i, c in enumerate(columns))
-    # Setting it to True forces a print on the first print_update() call.
-    self._value_changed = True
-
-    # To be used in all threads.
-    self._queued_updates = Queue.Queue()
-
-  def update_item(self, name, raw=False, **kwargs):
-    """Queue information to print out.
-
-    Arguments:
-      name: string to print out to describe something that was completed.
-      raw: if True, prints the data without the header.
-      raw: if True, prints the data without the header.
-      <kwargs>: argument name is a name of a column. it's value is the increment
-                to the column, value is usually 0 or 1.
-    """
-    assert isinstance(name, str)
-    assert isinstance(raw, bool)
-    assert all(isinstance(v, int) for v in kwargs.itervalues())
-    args = [(self._columns_lookup[k], v) for k, v in kwargs.iteritems() if v]
-    self._queued_updates.put((name, raw, args))
-
-  def print_update(self):
-    """Prints the current status."""
-    # Flush all the logging output so it doesn't appear within this output.
-    for handler in logging.root.handlers:
-      handler.flush()
-
-    got_one = False
-    while True:
-      try:
-        name, raw, args = self._queued_updates.get_nowait()
-      except Queue.Empty:
-        break
-
-      for k, v in args:
-        self._columns[k] += v
-      self._value_changed = bool(args)
-      if not name:
-        # Even if raw=True, there's nothing to print.
-        continue
-
-      got_one = True
-      if raw:
-        # Prints the data as-is.
-        self._last_printed_line = ''
-        sys.stdout.write('\n%s\n' % name.strip('\n'))
-      else:
-        line, self._last_printed_line = self._gen_line(name)
-        sys.stdout.write(line)
-
-    if not got_one and self._value_changed:
-      # Make sure a line is printed in that case where statistics changes.
-      line, self._last_printed_line = self._gen_line('')
-      sys.stdout.write(line)
-      got_one = True
-    self._value_changed = False
-    if got_one:
-      # Ensure that all the output is flushed to prevent it from getting mixed
-      # with other output streams (like the logging streams).
-      sys.stdout.flush()
-
-    if self.unfinished_commands:
-      logging.debug('Waiting for the following commands to finish:\n%s',
-                    '\n'.join(self.unfinished_commands))
-
-  def _gen_line(self, name):
-    """Generates the line to be printed."""
-    next_line = ('[%s] %6.2fs %s') % (
-        self._render_columns(), time.time() - self.start, name)
-    # Fill it with whitespace only if self.use_cr_only is set.
-    prefix = ''
-    if self.use_cr_only and self._last_printed_line:
-      prefix = '\r'
-    if self.use_cr_only:
-      suffix = ' ' * max(0, len(self._last_printed_line) - len(next_line))
-    else:
-      suffix = '\n'
-    return '%s%s%s' % (prefix, next_line, suffix), next_line
-
-  def _render_columns(self):
-    """Renders the columns."""
-    columns_as_str = map(str, self._columns)
-    max_len = max(map(len, columns_as_str))
-    return '/'.join(i.rjust(max_len) for i in columns_as_str)
-
-
-class QueueWithProgress(Queue.PriorityQueue):
-  """Implements progress support in join()."""
-  def __init__(self, progress, *args, **kwargs):
-    Queue.PriorityQueue.__init__(self, *args, **kwargs)
-    self.progress = progress
-
-  def task_done(self):
-    """Contrary to Queue.task_done(), it wakes self.all_tasks_done at each task
-    done.
-    """
-    with self.all_tasks_done:
-      try:
-        unfinished = self.unfinished_tasks - 1
-        if unfinished < 0:
-          raise ValueError('task_done() called too many times')
-        self.unfinished_tasks = unfinished
-        # This is less efficient, because we want the Progress to be updated.
-        self.all_tasks_done.notify_all()
-      except Exception as e:
-        logging.exception('task_done threw an exception.\n%s', e)
-
-  def wake_up(self):
-    """Wakes up all_tasks_done.
-
-    Unlike task_done(), do not substract one from self.unfinished_tasks.
-    """
-    # TODO(maruel): This is highly inefficient, since the listener is awaken
-    # twice; once per output, once per task. There should be no relationship
-    # between the number of output and the number of input task.
-    with self.all_tasks_done:
-      self.all_tasks_done.notify_all()
-
-  def join(self):
-    """Calls print_update() whenever possible."""
-    self.progress.print_update()
-    with self.all_tasks_done:
-      while self.unfinished_tasks:
-        self.progress.print_update()
-        # Use a short wait timeout so updates are printed in a timely manner.
-        # TODO(maruel): Find a way so Progress.queue and self.all_tasks_done
-        # share the same underlying event so no polling is necessary.
-        self.all_tasks_done.wait(0.1)
-      self.progress.print_update()
-
-
-class ThreadPoolWithProgress(ThreadPool):
-  QUEUE_CLASS = QueueWithProgress
-
-  def __init__(self, progress, *args, **kwargs):
-    self.QUEUE_CLASS = functools.partial(self.QUEUE_CLASS, progress)
-    super(ThreadPoolWithProgress, self).__init__(*args, **kwargs)
-
-  def _output_append(self, out):
-    """Also wakes up the listener on new completed test_case."""
-    super(ThreadPoolWithProgress, self)._output_append(out)
-    self.tasks.wake_up()
-
-  def _on_iter_results_step(self):
-    self.tasks.progress.print_update()
-
-
-class DeadlockDetector(object):
-  """Context manager that can detect deadlocks.
-
-  It will dump stack frames of all running threads if its 'ping' method isn't
-  called in time.
-
-  Usage:
-    with DeadlockDetector(timeout=60) as detector:
-      for item in some_work():
-        ...
-        detector.ping()
-        ...
-
-  Arguments:
-    timeout - maximum allowed time between calls to 'ping'.
-  """
-
-  def __init__(self, timeout):
-    self.timeout = timeout
-    self._thread = None
-    # Thread stop condition. Also lock for shared variables below.
-    self._stop_cv = threading.Condition()
-    self._stop_flag = False
-    # Time when 'ping' was called last time.
-    self._last_ping = None
-    # True if pings are coming on time.
-    self._alive = True
-
-  def __enter__(self):
-    """Starts internal watcher thread."""
-    assert self._thread is None
-    self.ping()
-    self._thread = threading.Thread(name='deadlock-detector', target=self._run)
-    self._thread.daemon = True
-    self._thread.start()
-    return self
-
-  def __exit__(self, *_args):
-    """Stops internal watcher thread."""
-    assert self._thread is not None
-    with self._stop_cv:
-      self._stop_flag = True
-      self._stop_cv.notify()
-    self._thread.join()
-    self._thread = None
-    self._stop_flag = False
-
-  def ping(self):
-    """Notify detector that main thread is still running.
-
-    Should be called periodically to inform the detector that everything is
-    running as it should.
-    """
-    with self._stop_cv:
-      self._last_ping = time.time()
-      self._alive = True
-
-  def _run(self):
-    """Loop that watches for pings and dumps threads state if ping is late."""
-    with self._stop_cv:
-      while not self._stop_flag:
-        # Skipped deadline? Dump threads and switch to 'not alive' state.
-        if self._alive and time.time() > self._last_ping + self.timeout:
-          self.dump_threads(time.time() - self._last_ping, True)
-          self._alive = False
-
-        # Pings are on time?
-        if self._alive:
-          # Wait until the moment we need to dump stack traces.
-          # Most probably some other thread will call 'ping' to move deadline
-          # further in time. We don't bother to wake up after each 'ping',
-          # only right before initial expected deadline.
-          self._stop_cv.wait(self._last_ping + self.timeout - time.time())
-        else:
-          # Skipped some pings previously. Just periodically silently check
-          # for new pings with some arbitrary frequency.
-          self._stop_cv.wait(self.timeout * 0.1)
-
-  @staticmethod
-  def dump_threads(timeout=None, skip_current_thread=False):
-    """Dumps stack frames of all running threads."""
-    all_threads = threading.enumerate()
-    current_thread_id = threading.current_thread().ident
-
-    # Collect tracebacks: thread name -> traceback string.
-    tracebacks = {}
-
-    # pylint: disable=W0212
-    for thread_id, frame in sys._current_frames().iteritems():
-      # Don't dump deadlock detector's own thread, it's boring.
-      if thread_id == current_thread_id and skip_current_thread:
-        continue
-
-      # Try to get more informative symbolic thread name.
-      name = 'untitled'
-      for thread in all_threads:
-        if thread.ident == thread_id:
-          name = thread.name
-          break
-      name += ' #%d' % (thread_id,)
-      tracebacks[name] = ''.join(traceback.format_stack(frame))
-
-    # Function to print a message. Makes it easier to change output destination.
-    def output(msg):
-      logging.warning(msg.rstrip())
-
-    # Print tracebacks, sorting them by thread name. That way a thread pool's
-    # threads will be printed as one group.
-    output('=============== Potential deadlock detected ===============')
-    if timeout is not None:
-      output('No pings in last %d sec.' % (timeout,))
-    output('Dumping stack frames for all threads:')
-    for name in sorted(tracebacks):
-      output('Traceback for \'%s\':\n%s' % (name, tracebacks[name]))
-    output('===========================================================')
-
-
-class TaskChannel(object):
-  """Queue of results of async task execution."""
-
-  class Timeout(Exception):
-    """Raised by 'pull' in case of timeout."""
-
-  _ITEM_RESULT = 0
-  _ITEM_EXCEPTION = 1
-
-  def __init__(self):
-    self._queue = Queue.Queue()
-
-  def send_result(self, result):
-    """Enqueues a result of task execution."""
-    self._queue.put((self._ITEM_RESULT, result))
-
-  def send_exception(self, exc_info=None):
-    """Enqueue an exception raised by a task.
-
-    Arguments:
-      exc_info: If given, should be 3-tuple returned by sys.exc_info(),
-                default is current value of sys.exc_info(). Use default in
-                'except' blocks to capture currently processed exception.
-    """
-    exc_info = exc_info or sys.exc_info()
-    assert isinstance(exc_info, tuple) and len(exc_info) == 3
-    # Transparently passing Timeout will break 'pull' contract, since a caller
-    # has no way to figure out that's an exception from the task and not from
-    # 'pull' itself. Transform Timeout into generic RuntimeError with
-    # explanation.
-    if isinstance(exc_info[1], TaskChannel.Timeout):
-      exc_info = (
-          RuntimeError,
-          RuntimeError('Task raised Timeout exception'),
-          exc_info[2])
-    self._queue.put((self._ITEM_EXCEPTION, exc_info))
-
-  def pull(self, timeout=None):
-    """Dequeues available result or exception.
-
-    Args:
-      timeout: if not None will block no longer than |timeout| seconds and will
-          raise TaskChannel.Timeout exception if no results are available.
-
-    Returns:
-      Whatever task pushes to the queue by calling 'send_result'.
-
-    Raises:
-      TaskChannel.Timeout: waiting longer than |timeout|.
-      Whatever exception task raises.
-    """
-    # Do not ever use timeout == None, in that case signal handlers are not
-    # being called (at least on Python 2.7, http://bugs.python.org/issue8844).
-    while True:
-      try:
-        item_type, value = self._queue.get(
-            timeout=timeout if timeout is not None else 30.0)
-        break
-      except Queue.Empty:
-        if timeout is None:
-          continue
-        raise TaskChannel.Timeout()
-    if item_type == self._ITEM_RESULT:
-      return value
-    if item_type == self._ITEM_EXCEPTION:
-      # 'value' is captured sys.exc_info() 3-tuple. Use extended raise syntax
-      # to preserve stack frame of original exception (that was raised in
-      # another thread).
-      assert isinstance(value, tuple) and len(value) == 3
-      raise value[0], value[1], value[2]
-    assert False, 'Impossible queue item type: %r' % item_type
-
-  def wrap_task(self, task):
-    """Decorator that makes a function push results into this channel."""
-    @functools.wraps(task)
-    def wrapped(*args, **kwargs):
-      try:
-        self.send_result(task(*args, **kwargs))
-      except Exception:
-        self.send_exception()
-    return wrapped
-
-
-def num_processors():
-  """Returns the number of processors.
-
-  Python on OSX 10.6 raises a NotImplementedError exception.
-  """
-  try:
-    # Multiprocessing
-    import multiprocessing
-    return multiprocessing.cpu_count()
-  except:  # pylint: disable=W0702
-    try:
-      # Mac OS 10.6
-      return int(os.sysconf('SC_NPROCESSORS_ONLN'))  # pylint: disable=E1101
-    except:
-      # Some of the windows builders seem to get here.
-      return 4
diff --git a/tools/swarming_client/utils/tools.py b/tools/swarming_client/utils/tools.py
deleted file mode 100644
index 823ae51..0000000
--- a/tools/swarming_client/utils/tools.py
+++ /dev/null
@@ -1,329 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Various utility functions and classes not specific to any single area."""
-
-import atexit
-import cStringIO
-import functools
-import json
-import logging
-import os
-import re
-import sys
-import threading
-import time
-
-import utils
-from utils import zip_package
-
-
-# Path to (possibly extracted from zip) cacert.pem bundle file.
-# See get_cacerts_bundle().
-_ca_certs = None
-_ca_certs_lock = threading.Lock()
-
-
-# @cached decorators registered by report_cache_stats_at_exit.
-_caches = []
-_caches_lock = threading.Lock()
-
-
-class Profiler(object):
-  """Context manager that records time spend inside its body."""
-  def __init__(self, name):
-    self.name = name
-    self.start_time = None
-
-  def __enter__(self):
-    self.start_time = time.time()
-    return self
-
-  def __exit__(self, _exc_type, _exec_value, _traceback):
-    time_taken = time.time() - self.start_time
-    logging.info('Profiling: Section %s took %3.3f seconds',
-                 self.name, time_taken)
-
-
-class ProfileCounter(object):
-  """Records total time spent in a chunk of code during lifetime of a process.
-
-  Recursive calls count as a single call (i.e. only the time spent in the outer
-  call is recorded).
-
-  Autoregisters itself in a global list when instantiated. All counters will be
-  reported at the process exit time (in atexit hook). Best to be used as with
-  @profile decorator.
-  """
-
-  _instances_lock = threading.Lock()
-  _instances = []
-
-  @staticmethod
-  def summarize_all():
-    print('\nProfiling report:')
-    print('-' * 80)
-    print(
-        '{:<38}{:<10}{:<16}{:<16}'.format(
-            'Name', 'Count', 'Total ms', 'Average ms'))
-    print('-' * 80)
-    with ProfileCounter._instances_lock:
-      for i in sorted(ProfileCounter._instances, key=lambda x: -x.total_time):
-        print(
-            '{:<38}{:<10}{:<16.1f}{:<16.1f}'.format(
-                i.name,
-                i.call_count,
-                i.total_time * 1000,
-                i.average_time * 1000))
-    print('-' * 80)
-
-  def __init__(self, name):
-    self._lock = threading.Lock()
-    self._call_count = 0
-    self._name = name
-    self._total_time = 0
-    self._active = threading.local()
-    with self._instances_lock:
-      self._instances.append(self)
-      if len(self._instances) == 1:
-        atexit.register(ProfileCounter.summarize_all)
-
-  @property
-  def name(self):
-    return self._name
-
-  @property
-  def call_count(self):
-    return self._call_count
-
-  @property
-  def total_time(self):
-    return self._total_time
-
-  @property
-  def average_time(self):
-    with self._lock:
-      if self._call_count:
-        return self._total_time / self._call_count
-      return 0
-
-  def __enter__(self):
-    recursion = getattr(self._active, 'recursion', 0)
-    if not recursion:
-      self._active.started = time.time()
-    self._active.recursion = recursion + 1
-
-  def __exit__(self, _exc_type, _exec_value, _traceback):
-    self._active.recursion -= 1
-    if not self._active.recursion:
-      time_inside = time.time() - self._active.started
-      with self._lock:
-        self._total_time += time_inside
-        self._call_count += 1
-
-
-def profile(func):
-  """Decorator that profiles a function if SWARMING_PROFILE env var is set.
-
-  Will gather a number of calls to that function and total time spent inside.
-  The final report is emitted to stdout at the process exit time.
-  """
-  # No performance impact whatsoever if SWARMING_PROFILE is not set.
-  if os.environ.get('SWARMING_PROFILE') != '1':
-    return func
-  timer = ProfileCounter(func.__name__)
-  @functools.wraps(func)
-  def wrapper(*args, **kwargs):
-    with timer:
-      return func(*args, **kwargs)
-  return wrapper
-
-
-def report_cache_stats_at_exit(func, cache):
-  """Registers a hook that reports state of the cache on the process exit."""
-  # Very dumb. Tries to account for object reuse though.
-  def get_size(obj, seen):
-    # Use id(...) to avoid triggering __hash__ and comparing by value instead.
-    if id(obj) in seen:
-      return 0
-    seen.add(id(obj))
-    size = sys.getsizeof(obj)
-    if isinstance(obj, (list, tuple)):
-      return size + sum(get_size(x, seen) for x in obj)
-    elif isinstance(obj, dict):
-      return size + sum(
-          get_size(k, seen) + get_size(v, seen) for k, v in obj.iteritems())
-    return size
-
-  def report_caches_state():
-    print('\nFunction cache report:')
-    print('-' * 80)
-    print('{:<40}{:<16}{:<26}'.format('Name', 'Items', 'Approx size, KB'))
-    print('-' * 80)
-    with _caches_lock:
-      total = 0
-      seen_objects = set()
-      for func, cache in sorted(_caches, key=lambda x: -len(x[1])):
-        size = get_size(cache, seen_objects)
-        total += size
-        print(
-            '{:<40}{:<16}{:<26}'.format(func.__name__, len(cache), size / 1024))
-    print('-' * 80)
-    print('Total: %.1f MB' % (total / 1024 / 1024,))
-    print('-' * 80)
-
-  with _caches_lock:
-    _caches.append((func, cache))
-    if len(_caches) == 1:
-      atexit.register(report_caches_state)
-
-
-def cached(func):
-  """Decorator that permanently caches a result of function invocation.
-
-  It tries to be super fast and because of that is somewhat limited:
-    * The function being cached can accept only positional arguments.
-    * All arguments should be hashable.
-    * The function may be called multiple times with same arguments in
-      multithreaded environment.
-    * The cache is not cleared up at all.
-
-  If SWARMING_PROFILE env var is set, will produce a report about the state of
-  the cache at the process exit (number of items and approximate size).
-  """
-  empty = object()
-  cache = {}
-
-  if os.environ.get('SWARMING_PROFILE') == '1':
-    report_cache_stats_at_exit(func, cache)
-
-  @functools.wraps(func)
-  def wrapper(*args):
-    v = cache.get(args, empty)
-    if v is empty:
-      v = func(*args)
-      cache[args] = v
-    return v
-
-  return wrapper
-
-
-class Unbuffered(object):
-  """Disable buffering on a file object."""
-  def __init__(self, stream):
-    self.stream = stream
-
-  def write(self, data):
-    self.stream.write(data)
-    if '\n' in data:
-      self.stream.flush()
-
-  def __getattr__(self, attr):
-    return getattr(self.stream, attr)
-
-
-def disable_buffering():
-  """Makes this process and child processes stdout unbuffered."""
-  if not os.environ.get('PYTHONUNBUFFERED'):
-    # Since sys.stdout is a C++ object, it's impossible to do
-    # sys.stdout.write = lambda...
-    sys.stdout = Unbuffered(sys.stdout)
-    os.environ['PYTHONUNBUFFERED'] = 'x'
-
-
-def fix_python_path(cmd):
-  """Returns the fixed command line to call the right python executable."""
-  out = cmd[:]
-  if out[0] == 'python':
-    out[0] = sys.executable
-  elif out[0].endswith('.py'):
-    out.insert(0, sys.executable)
-  return out
-
-
-def read_json(filepath):
-  with open(filepath, 'r') as f:
-    return json.load(f)
-
-
-def write_json(filepath_or_handle, data, dense):
-  """Writes data into filepath or file handle encoded as json.
-
-  If dense is True, the json is packed. Otherwise, it is human readable.
-  """
-  if dense:
-    kwargs = {'sort_keys': True, 'separators': (',',':')}
-  else:
-    kwargs = {'sort_keys': True, 'indent': 2}
-
-  if hasattr(filepath_or_handle, 'write'):
-    json.dump(data, filepath_or_handle, **kwargs)
-  else:
-    with open(filepath_or_handle, 'wb') as f:
-      json.dump(data, f, **kwargs)
-
-
-def format_json(data, dense):
-  """Returns a string with json encoded data.
-
-  If dense is True, the json is packed. Otherwise, it is human readable.
-  """
-  buf = cStringIO.StringIO()
-  write_json(buf, data, dense)
-  return buf.getvalue()
-
-
-def gen_blacklist(regexes):
-  """Returns a lambda to be used as a blacklist."""
-  compiled = [re.compile(i) for i in regexes or []]
-  return lambda f: any(j.match(f) for j in compiled)
-
-
-def get_bool_env_var(name):
-  """Return True if integer environment variable |name| value is non zero.
-
-  If environment variable is missing or is set to '0', returns False.
-  """
-  return bool(int(os.environ.get(name, '0')))
-
-
-def is_headless():
-  """True if running in non-interactive mode on some bot machine.
-
-  Examines os.environ for presence of SWARMING_HEADLESS var.
-  """
-  headless_env_keys = (
-    # This is Chromium specific. Set when running under buildbot slave.
-    'CHROME_HEADLESS',
-    # Set when running under swarm bot.
-    'SWARMING_HEADLESS',
-  )
-  return any(get_bool_env_var(key) for key in headless_env_keys)
-
-
-def get_cacerts_bundle():
-  """Returns path to a file with CA root certificates bundle.
-
-  Python's ssl module needs a real file on disk, so if code is running from
-  a zip archive, we need to extract the file first.
-  """
-  global _ca_certs
-  with _ca_certs_lock:
-    if _ca_certs is not None and os.path.exists(_ca_certs):
-      return _ca_certs
-    # Some rogue process clears /tmp and causes cacert.pem to disappear. Extract
-    # to current directory instead. We use our own bundled copy of cacert.pem.
-    _ca_certs = zip_package.extract_resource(utils, 'cacert.pem', temp_dir='.')
-    return _ca_certs
-
-
-def sliding_timeout(timeout):
-  """Returns a function that returns how much time left till (now+timeout).
-
-  If timeout is None, the returned function always returns None.
-  """
-  if timeout is None:
-    return lambda: None
-  deadline = time.time() + timeout
-  return lambda: deadline - time.time()
diff --git a/tools/swarming_client/utils/zip_package.py b/tools/swarming_client/utils/zip_package.py
deleted file mode 100755
index 539709b..0000000
--- a/tools/swarming_client/utils/zip_package.py
+++ /dev/null
@@ -1,364 +0,0 @@
-# Copyright 2013 The LUCI Authors. All rights reserved.
-# Use of this source code is governed under the Apache License, Version 2.0
-# that can be found in the LICENSE file.
-
-"""Utilities to work with importable python zip packages."""
-
-import atexit
-import collections
-import cStringIO as StringIO
-import hashlib
-import os
-import pkgutil
-import re
-import sys
-import tempfile
-import threading
-import zipfile
-import zipimport
-
-
-# Glob patterns for files to exclude from a package by default.
-EXCLUDE_LIST = (
-  # Ignore hidden files (including .svn and .git).
-  r'\..*',
-
-  # Ignore precompiled python files since they depend on python version and we
-  # don't want zip package to be version-depended.
-  r'.*\.pyc$',
-  r'.*\.pyo$',
-)
-
-
-# Temporary files extracted by extract_resource. Removed in atexit hook.
-_extracted_files = []
-_extracted_files_lock = threading.Lock()
-
-
-# Patch zipimport.zipimporter hook to accept unicode strings
-def zipimporter_unicode(archivepath):
-  if isinstance(archivepath, unicode):
-    archivepath = archivepath.encode(sys.getfilesystemencoding())
-  return zipimport.zipimporter(archivepath)
-
-
-for i, hook in enumerate(sys.path_hooks):
-  if hook is zipimport.zipimporter:
-    sys.path_hooks[i] = zipimporter_unicode
-
-
-class ZipPackageError(RuntimeError):
-  """Failed to create a zip package."""
-
-
-class ZipPackage(object):
-  """A set of files that can be zipped to file on disk or into memory buffer.
-
-  Usage:
-    package = ZipPackage(root)
-    package.add_file('some_file.py', '__main__.py')
-    package.add_directory('some_directory')
-    package.add_buffer('generated.py', 'any string here')
-
-    buf = package.zip_into_buffer()
-    package.zip_into_file('my_zip.zip')
-  """
-
-  _FileRef = collections.namedtuple('_FileRef', ['abs_path'])
-  _BufferRef = collections.namedtuple('_BufferRef', ['buffer'])
-
-  def __init__(self, root):
-    """Initializes new empty ZipPackage.
-
-    All files added to the package should live under the |root|. It will also
-    be used when calculating relative paths of files in the package.
-
-    |root| must be an absolute path.
-    """
-    assert os.path.isabs(root), root
-    self.root = root.rstrip(os.sep) + os.sep
-    self._items = {}
-
-  @property
-  def files(self):
-    """Files added to the package as a list of relative paths in zip."""
-    return self._items.keys()
-
-  def add_file(self, absolute_path, archive_path=None):
-    """Adds a single file to the package.
-
-    |archive_path| is a relative path in archive for this file, by default it's
-    equal to |absolute_path| taken relative to |root|. In that case
-    |absolute_path| must be in a |root| subtree.
-
-    If |archive_path| is given, |absolute_path| can point to any file.
-    """
-    assert os.path.isabs(absolute_path), absolute_path
-    absolute_path = os.path.normpath(absolute_path)
-    # If |archive_path| is not given, ensure that |absolute_path| is under root.
-    if not archive_path and not absolute_path.startswith(self.root):
-      raise ZipPackageError(
-          'Path %s is not inside root %s' % (absolute_path, self.root))
-    if not os.path.exists(absolute_path):
-      raise ZipPackageError('No such file: %s' % absolute_path)
-    if not os.path.isfile(absolute_path):
-      raise ZipPackageError('Object %s is not a regular file' % absolute_path)
-    archive_path = archive_path or absolute_path[len(self.root):]
-    self._add_entry(archive_path, ZipPackage._FileRef(absolute_path))
-
-  def add_python_file(self, absolute_path, archive_path=None):
-    """Adds a single python file to the package.
-
-    Recognizes *.pyc files and adds corresponding *.py file instead.
-    """
-    base, ext = os.path.splitext(absolute_path)
-    if ext in ('.pyc', '.pyo'):
-      absolute_path = base + '.py'
-    elif ext != '.py':
-      raise ZipPackageError('Not a python file: %s' % absolute_path)
-    self.add_file(absolute_path, archive_path)
-
-  def add_directory(self, absolute_path, archive_path=None,
-                    exclude=EXCLUDE_LIST):
-    """Recursively adds all files from given directory to the package.
-
-    |archive_path| is a relative path in archive for this directory, by default
-    it's equal to |absolute_path| taken relative to |root|. In that case
-    |absolute_path| must be in |root| subtree.
-
-    If |archive_path| is given, |absolute_path| can point to any directory.
-
-    |exclude| defines a list of regular expressions for file names to exclude
-    from the package.
-
-    Only non-empty directories will be actually added to the package.
-    """
-    assert os.path.isabs(absolute_path), absolute_path
-    absolute_path = os.path.normpath(absolute_path).rstrip(os.sep) + os.sep
-    # If |archive_path| is not given, ensure that |path| is under root.
-    if not archive_path and not absolute_path.startswith(self.root):
-      raise ZipPackageError(
-          'Path %s is not inside root %s' % (absolute_path, self.root))
-    if not os.path.exists(absolute_path):
-      raise ZipPackageError('No such directory: %s' % absolute_path)
-    if not os.path.isdir(absolute_path):
-      raise ZipPackageError('Object %s is not a directory' % absolute_path)
-
-    # Precompile regular expressions.
-    exclude_regexps = [re.compile(r) for r in exclude]
-    # Returns True if |name| should be excluded from the package.
-    should_exclude = lambda name: any(r.match(name) for r in exclude_regexps)
-
-    archive_path = archive_path or absolute_path[len(self.root):]
-    for cur_dir, dirs, files in os.walk(absolute_path):
-      # Add all non-excluded files.
-      for name in files:
-        if not should_exclude(name):
-          absolute = os.path.join(cur_dir, name)
-          relative = absolute[len(absolute_path):]
-          assert absolute.startswith(absolute_path)
-          self.add_file(absolute, os.path.join(archive_path, relative))
-      # Remove excluded directories from enumeration.
-      for name in [d for d in dirs if should_exclude(d)]:
-        dirs.remove(name)
-
-  def add_buffer(self, archive_path, buf):
-    """Adds a contents of the given string |buf| to the package as a file.
-
-    |archive_path| is a path in archive for this file.
-    """
-    # Only 'str' is allowed here, no 'unicode'
-    assert isinstance(buf, str)
-    self._add_entry(archive_path, ZipPackage._BufferRef(buf))
-
-  def zip_into_buffer(self, compress=True):
-    """Zips added files into in-memory zip file and returns it as str."""
-    stream = StringIO.StringIO()
-    try:
-      self._zip_into_stream(stream, compress)
-      return stream.getvalue()
-    finally:
-      stream.close()
-
-  def zip_into_file(self, path, compress=True):
-    """Zips added files into a file on disk."""
-    with open(path, 'wb') as stream:
-      self._zip_into_stream(stream, compress)
-
-  def _add_entry(self, archive_path, ref):
-    """Adds new zip package entry."""
-    # Always use forward slashes in zip.
-    archive_path = archive_path.replace(os.sep, '/')
-    # Ensure there are no suspicious components in the path.
-    assert not any(p in ('', '.', '..') for p in archive_path.split('/'))
-    # Ensure there's no file overwrites.
-    if archive_path in self._items:
-      raise ZipPackageError('Duplicated entry: %s' % archive_path)
-    self._items[archive_path] = ref
-
-  def _zip_into_stream(self, stream, compress):
-    """Zips files added so far into some output stream.
-
-    Some measures are taken to guarantee that final zip depends only on the
-    content of added files:
-      * File modification time is not stored.
-      * Entries are sorted by file name in archive.
-    """
-    compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
-    zip_file = zipfile.ZipFile(stream, 'w', compression)
-    try:
-      for archive_path in sorted(self._items):
-        ref = self._items[archive_path]
-        info = zipfile.ZipInfo(filename=archive_path)
-        info.compress_type = compression
-        info.create_system = 3
-        if isinstance(ref, ZipPackage._FileRef):
-          info.external_attr = (os.stat(ref.abs_path)[0] & 0xFFFF) << 16L
-          with open(ref.abs_path, 'rb') as f:
-            buf = f.read()
-        elif isinstance(ref, ZipPackage._BufferRef):
-          buf = ref.buffer
-        else:
-          assert False, 'Unexpected type %s' % ref
-        zip_file.writestr(info, buf)
-    finally:
-      zip_file.close()
-
-
-def get_module_zip_archive(module):
-  """Given a module, returns path to a zip package that contains it or None."""
-  loader = pkgutil.get_loader(module)
-  if not isinstance(loader, zipimport.zipimporter):
-    return None
-  # 'archive' property is documented only for python 2.7, but it appears to be
-  # there at least since python 2.5.2.
-  return loader.archive
-
-
-def is_zipped_module(module):
-  """True if given module was loaded from a zip package."""
-  return bool(get_module_zip_archive(module))
-
-
-def get_main_script_path():
-  """If running from zip returns path to a zip file, else path to __main__.
-
-  Basically returns path to a file passed to python for execution
-  as in 'python <main_script>' considering a case of executable zip package.
-
-  Returns path relative to a current directory of when process was started.
-  """
-  # If running from interactive console __file__ is not defined.
-  main = sys.modules['__main__']
-  path = get_module_zip_archive(main)
-  if path:
-    return path
-
-  path = getattr(main, '__file__', None)
-  if path:
-    return path.decode(sys.getfilesystemencoding())
-
-
-def _write_temp_data(name, data, temp_dir):
-  """Writes content-addressed file in `temp_dir` if relevant."""
-  filename = '%s-%s' % (hashlib.sha1(data).hexdigest(), name)
-  filepath = os.path.join(temp_dir, filename)
-  if os.path.isfile(filepath):
-    with open(filepath, 'rb') as f:
-      if f.read() == data:
-        # It already exists.
-        return filepath
-    # It's different, can't use it.
-    return None
-
-  try:
-    fd = os.open(filepath, os.O_WRONLY|os.O_CREAT|os.O_EXCL, 0600)
-    with os.fdopen(fd, 'wb') as f:
-      f.write(data)
-    return filepath
-  except (IOError, OSError):
-    return None
-
-
-def extract_resource(package, resource, temp_dir=None):
-  """Returns real file system path to a |resource| file from a |package|.
-
-  If it's inside a zip package, will extract it first into a file. Such file is
-  readable and writable only by the creating user ID.
-
-  Arguments:
-    package: is a python module object that represents a package.
-    resource: should be a relative filename, using '/'' as the path separator.
-    temp_dir: if set, it will extra the file in this directory with the filename
-        being the hash of the content. Otherwise, it uses tempfile.mkstemp().
-
-  Raises ValueError if no such resource.
-  """
-  # For regular non-zip packages just construct an absolute path.
-  if not is_zipped_module(package):
-    # Package's __file__ attribute is always an absolute path.
-    ppath = package.__file__.decode(sys.getfilesystemencoding())
-    path = os.path.join(os.path.dirname(ppath),
-        resource.replace('/', os.sep))
-    if not os.path.exists(path):
-      raise ValueError('No such resource in %s: %s' % (package, resource))
-    return path
-
-  # For zipped packages extract the resource into a temp file.
-  data = pkgutil.get_data(package.__name__, resource)
-  if data is None:
-    raise ValueError('No such resource in zipped %s: %s' % (package, resource))
-
-  if temp_dir:
-    filepath = _write_temp_data(os.path.basename(resource), data, temp_dir)
-    if filepath:
-      return filepath
-
-  fd, filepath = tempfile.mkstemp(
-      prefix=u'.zip_pkg-',
-      suffix=u'-' + os.path.basename(resource),
-      dir=temp_dir)
-  with os.fdopen(fd, 'wb') as stream:
-    stream.write(data)
-
-  # Register it for removal when process dies.
-  with _extracted_files_lock:
-    _extracted_files.append(filepath)
-    # First extracted file -> register atexit hook that cleans them all.
-    if len(_extracted_files) == 1:
-      atexit.register(cleanup_extracted_resources)
-
-  return filepath
-
-
-def cleanup_extracted_resources():
-  """Removes all temporary files created by extract_resource.
-
-  Executed as atexit hook.
-  """
-  with _extracted_files_lock:
-    while _extracted_files:
-      try:
-        os.remove(_extracted_files.pop())
-      except OSError:
-        pass
-
-
-def generate_version():
-  """Generates the sha-1 based on the content of this zip.
-
-  It is hashing the content of the zip, not the compressed bits. The compression
-  has other side effects that kicks in, like zlib's library version, compression
-  level, order in which the files were specified, etc.
-  """
-  assert is_zipped_module(sys.modules['__main__'])
-  h = hashlib.sha1()
-  with zipfile.ZipFile(get_main_script_path(), 'r') as z:
-    for name in sorted(z.namelist()):
-      with z.open(name) as f:
-        h.update(str(len(name)))
-        h.update(name)
-        content = f.read()
-        h.update(str(len(content)))
-        h.update(content)
-  return h.hexdigest()
diff --git a/tools/testrunner/local/testsuite.py b/tools/testrunner/local/testsuite.py
index 11d2207..f7fa19b 100644
--- a/tools/testrunner/local/testsuite.py
+++ b/tools/testrunner/local/testsuite.py
@@ -325,13 +325,22 @@
     shell = os.path.abspath(os.path.join(context.shell_dir, self.shell()))
     if utils.IsWindows():
       shell += ".exe"
-    output = commands.Execute(context.command_prefix +
-                              [shell, "--gtest_list_tests"] +
-                              context.extra_flags)
-    if output.exit_code != 0:
+
+    output = None
+    for i in xrange(3): # Try 3 times in case of errors.
+      output = commands.Execute(context.command_prefix +
+                                [shell, "--gtest_list_tests"] +
+                                context.extra_flags)
+      if output.exit_code == 0:
+        break
+      print "Test executable failed to list the tests (try %d).\n\nStdout:" % i
       print output.stdout
+      print "\nStderr:"
       print output.stderr
+      print "\nExit code: %d" % output.exit_code
+    else:
       raise Exception("Test executable failed to list the tests.")
+
     tests = []
     test_case = ''
     for line in output.stdout.splitlines():
diff --git a/tools/testrunner/local/variants.py b/tools/testrunner/local/variants.py
index b224e41..ea42bf5 100644
--- a/tools/testrunner/local/variants.py
+++ b/tools/testrunner/local/variants.py
@@ -13,6 +13,7 @@
   "ignition_staging": [["--ignition-staging"]],
   "ignition_turbofan": [["--ignition-staging", "--turbo"]],
   "preparser": [["--min-preparse-length=0"]],
+  "asm_wasm": [["--validate-asm"]],
 }
 
 # FAST_VARIANTS implies no --always-opt.
@@ -25,8 +26,9 @@
   "ignition_staging": [["--ignition-staging"]],
   "ignition_turbofan": [["--ignition-staging", "--turbo"]],
   "preparser": [["--min-preparse-length=0"]],
+  "asm_wasm": [["--validate-asm"]],
 }
 
 ALL_VARIANTS = set(["default", "stress", "turbofan", "turbofan_opt",
                     "nocrankshaft", "ignition", "ignition_staging",
-                    "ignition_turbofan", "preparser"])
+                    "ignition_turbofan", "preparser", "asm_wasm"])
diff --git a/tools/try_perf.py b/tools/try_perf.py
index e022ab4..05e240e 100755
--- a/tools/try_perf.py
+++ b/tools/try_perf.py
@@ -49,6 +49,7 @@
   'simdjs',
   'sunspider',
   'sunspider-ignition',
+  'unity',
   'wasm',
 ]
 
diff --git a/tools/turbolizer/constants.js b/tools/turbolizer/constants.js
index f062fa2..da92c45 100644
--- a/tools/turbolizer/constants.js
+++ b/tools/turbolizer/constants.js
@@ -20,5 +20,11 @@
 var DISASSEMBLY_EXPAND_ID = 'disassembly-expand';
 var COLLAPSE_PANE_BUTTON_VISIBLE = 'button-input';
 var COLLAPSE_PANE_BUTTON_INVISIBLE = 'button-input-invisible';
-var PROF_HIGH = 5;
-var PROF_MED = 0.5;
+var UNICODE_BLOCK = '&#9611;';
+var PROF_COLS = [
+  { perc:   0, col: { r: 255, g: 255, b: 255 } },
+  { perc: 0.5, col: { r: 255, g: 255, b: 128 } },
+  { perc:   5, col: { r: 255, g: 128, b:   0 } },
+  { perc:  15, col: { r: 255, g:   0, b:   0 } },
+  { perc: 100, col: { r:   0, g:   0, b:   0 } }
+];
diff --git a/tools/turbolizer/disassembly-view.js b/tools/turbolizer/disassembly-view.js
index b704c77..a2a534c 100644
--- a/tools/turbolizer/disassembly-view.js
+++ b/tools/turbolizer/disassembly-view.js
@@ -159,6 +159,7 @@
     view.pos_start = -1;
     view.addr_event_counts = null;
     view.total_event_counts = null;
+    view.max_event_counts = null;
     view.pos_lines = new Array();
     // Comment lines for line 0 include sourcePosition already, only need to
     // add sourcePosition for lines > 0.
@@ -181,15 +182,18 @@
       view.addr_event_counts = eventCounts;
 
       view.total_event_counts = {};
-      for (var ev_name in view.addr_event_counts) {
+      view.max_event_counts = {};
+      for (let ev_name in view.addr_event_counts) {
         let keys = Object.keys(view.addr_event_counts[ev_name]);
         let values = keys.map(key => view.addr_event_counts[ev_name][key]);
         view.total_event_counts[ev_name] = values.reduce((a, b) => a + b);
+        view.max_event_counts[ev_name] = values.reduce((a, b) => Math.max(a, b));
       }
     }
     else {
       view.addr_event_counts = null;
       view.total_event_counts = null;
+      view.max_event_counts = null;
     }
   }
 
@@ -198,6 +202,11 @@
     return num.toFixed(3).replace(/\.?0+$/, "") + "%";
   }
 
+  // Interpolate between the given start and end values by a fraction of val/max.
+  interpolate(val, max, start, end) {
+    return start + (end - start) * (val / max);
+  }
+
   processLine(line) {
     let view = this;
     let func = function(match, p1, p2, p3) {
@@ -214,30 +223,49 @@
 
     // Add profiling data per instruction if available.
     if (view.total_event_counts) {
-      let event_selector = document.getElementById('event-selector');
-      if (event_selector.length !== 0) {
-        let event = event_selector.value;
-        let matches = /^(0x[0-9a-fA-F]+)\s+\d+\s+[0-9a-fA-F]+/.exec(line);
-        if (matches) {
+      let matches = /^(0x[0-9a-fA-F]+)\s+\d+\s+[0-9a-fA-F]+/.exec(line);
+      if (matches) {
+        let newFragments = [];
+        for (let event in view.addr_event_counts) {
           let count = view.addr_event_counts[event][matches[1]];
-          let str = "";
-          let css_cls = undefined;
+          let str = " ";
+          let css_cls = "prof";
           if(count !== undefined) {
             let perc = count / view.total_event_counts[event] * 100;
 
-            str = "(" + view.humanize(perc) + ") ";
+            let col = { r: 255, g: 255, b: 255 };
+            for (let i = 0; i < PROF_COLS.length; i++) {
+              if (perc === PROF_COLS[i].perc) {
+                col = PROF_COLS[i].col;
+                break;
+              }
+              else if (perc > PROF_COLS[i].perc && perc < PROF_COLS[i + 1].perc) {
+                let col1 = PROF_COLS[i].col;
+                let col2 = PROF_COLS[i + 1].col;
 
-            css_cls = "prof-low";
-            if(perc > PROF_HIGH)
-              css_cls = "prof-high";
-            else if(perc > PROF_MED)
-              css_cls = "prof-med";
+                let val = perc - PROF_COLS[i].perc;
+                let max = PROF_COLS[i + 1].perc - PROF_COLS[i].perc;
+
+                col.r = Math.round(view.interpolate(val, max, col1.r, col2.r));
+                col.g = Math.round(view.interpolate(val, max, col1.g, col2.g));
+                col.b = Math.round(view.interpolate(val, max, col1.b, col2.b));
+                break;
+              }
+            }
+
+            str = UNICODE_BLOCK;
+
+            let fragment = view.createFragment(str, css_cls);
+            fragment.title = event + ": " + view.humanize(perc) + " (" + count + ")";
+            fragment.style.color = "rgb(" + col.r + ", " + col.g + ", " + col.b + ")";
+
+            newFragments.push(fragment);
           }
-          // Pad extra spaces to keep alignment for all instructions.
-          str = (" ".repeat(10) + str).slice(-10);
+          else
+            newFragments.push(view.createFragment(str, css_cls));
 
-          fragments.splice(0, 0, view.createFragment(str, css_cls));
         }
+        fragments = newFragments.concat(fragments);
       }
     }
     return fragments;
diff --git a/tools/turbolizer/index.html b/tools/turbolizer/index.html
index 8dc21b7..4066fd8 100644
--- a/tools/turbolizer/index.html
+++ b/tools/turbolizer/index.html
@@ -1,6 +1,7 @@
 <!DOCTYPE HTML>
 <html>
   <head>
+    <title>Turbolizer</title>
     <link rel="stylesheet" href="turbo-visualizer.css" />
   </head>
   <body width="100%">
@@ -53,12 +54,9 @@
       </text></svg></div>
     </div>
     <div id="right">
-      <span id="disassembly-toolbox">
-        <select id="event-selector"></select>
-      </span>
       <div id='disassembly'>
         <pre id='disassembly-text-pre' class='prettyprint prettyprinted'>
-          <ul id='disassembly-list' class='nolinenums noindent'>            
+          <ul id='disassembly-list' class='nolinenums noindent'>
           </ul>
         </pre>
       </div>
diff --git a/tools/turbolizer/text-view.js b/tools/turbolizer/text-view.js
index 70d2a25..6822500 100644
--- a/tools/turbolizer/text-view.js
+++ b/tools/turbolizer/text-view.js
@@ -120,7 +120,7 @@
     if (style != undefined) {
       span.classList.add(style);
     }
-    span.innerText = text;
+    span.innerHTML = text;
     return span;
   }
 
diff --git a/tools/turbolizer/turbo-visualizer.css b/tools/turbolizer/turbo-visualizer.css
index 8e2bab2..69a6cca 100644
--- a/tools/turbolizer/turbo-visualizer.css
+++ b/tools/turbolizer/turbo-visualizer.css
@@ -326,16 +326,8 @@
     display: none;
 }
 
-.prof-low {
-    color: #888;
-}
-
-.prof-med {
-    color: #080;
-}
-
-.prof-high {
-    color: #800;
+.prof {
+    cursor: default;
 }
 
 tspan {
diff --git a/tools/turbolizer/turbo-visualizer.js b/tools/turbolizer/turbo-visualizer.js
index b8d7762..280caf0 100644
--- a/tools/turbolizer/turbo-visualizer.js
+++ b/tools/turbolizer/turbo-visualizer.js
@@ -188,13 +188,6 @@
               }
             }
 
-            var eventMenu = document.getElementById('event-selector');
-            eventMenu.innerHTML = '';
-            for (var event in jsonObj.eventCounts) {
-              var optionElement = document.createElement("option");
-              optionElement.text = event;
-              eventMenu.add(optionElement, null);
-            }
             disassemblyView.initializePerfProfile(jsonObj.eventCounts);
             disassemblyView.show(disassemblyPhase.data, null);
 
@@ -216,10 +209,6 @@
               displayPhase(jsonObj.phases[selectMenu.selectedIndex]);
             }
 
-            eventMenu.onchange = function(item) {
-              disassemblyView.show(disassemblyView.data, null);
-            }
-
             fitPanesToParents();
 
             d3.select("#search-input").attr("value", window.sessionStorage.getItem("lastSearch") || "");
diff --git a/tools/update-wasm-fuzzers.sh b/tools/update-wasm-fuzzers.sh
new file mode 100755
index 0000000..3652829
--- /dev/null
+++ b/tools/update-wasm-fuzzers.sh
@@ -0,0 +1,56 @@
+#!/bin/bash
+# Copyright 2016 the V8 project authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+TOOLS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+cd ${TOOLS_DIR}/..
+
+rm -rf test/fuzzer/wasm
+rm -rf test/fuzzer/wasm_asmjs
+
+make x64.debug -j
+
+mkdir -p test/fuzzer/wasm
+mkdir -p test/fuzzer/wasm_asmjs
+
+# asm.js
+./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
+  --mode=debug --no-presubmit --extra-flags="--dump-wasm-module \
+  --dump-wasm-module-path=./test/fuzzer/wasm_asmjs/" mjsunit/wasm/asm*
+./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
+  --mode=debug --no-presubmit --extra-flags="--dump-wasm-module \
+  --dump-wasm-module-path=./test/fuzzer/wasm_asmjs/" mjsunit/asm/*
+./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
+  --mode=debug --no-presubmit --extra-flags="--dump-wasm-module \
+  --dump-wasm-module-path=./test/fuzzer/wasm_asmjs/" mjsunit/regress/asm/*
+# WASM
+./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
+  --mode=debug --no-presubmit --extra-flags="--dump-wasm-module \
+  --dump-wasm-module-path=./test/fuzzer/wasm/" unittests
+./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
+  --mode=debug --no-presubmit --extra-flags="--dump-wasm-module \
+  --dump-wasm-module-path=./test/fuzzer/wasm/" mjsunit/wasm/*
+./tools/run-tests.py -j8 --variants=default --timeout=10 --arch=x64 \
+  --mode=debug --no-presubmit --extra-flags="--dump-wasm-module \
+  --dump-wasm-module-path=./test/fuzzer/wasm/" \
+  $(cd test/; ls cctest/wasm/test-*.cc | \
+  sed -es/wasm\\///g | sed -es/[.]cc/\\/\\*/g)
+
+# Delete items over 20k.
+for x in $(find ./test/fuzzer/wasm/ -type f -size +20k)
+do
+  rm $x
+done
+for x in $(find ./test/fuzzer/wasm_asmjs/ -type f -size +20k)
+do
+  rm $x
+done
+
+# Upload changes.
+cd test/fuzzer
+upload_to_google_storage.py -a -b v8-wasm-fuzzer wasm
+upload_to_google_storage.py -a -b v8-wasm-asmjs-fuzzer wasm_asmjs
diff --git a/tools/v8heapconst.py b/tools/v8heapconst.py
index 69d73c2..0ff0cf3 100644
--- a/tools/v8heapconst.py
+++ b/tools/v8heapconst.py
@@ -89,7 +89,7 @@
   163: "ALIASED_ARGUMENTS_ENTRY_TYPE",
   164: "BOX_TYPE",
   173: "PROTOTYPE_INFO_TYPE",
-  174: "SLOPPY_BLOCK_WITH_EVAL_CONTEXT_EXTENSION_TYPE",
+  174: "CONTEXT_EXTENSION_TYPE",
   167: "FIXED_ARRAY_TYPE",
   148: "FIXED_DOUBLE_ARRAY_TYPE",
   168: "SHARED_FUNCTION_INFO_TYPE",
@@ -232,7 +232,7 @@
   0x09231: (165, "DebugInfoMap"),
   0x0925d: (166, "BreakPointInfoMap"),
   0x09289: (173, "PrototypeInfoMap"),
-  0x092b5: (174, "SloppyBlockWithEvalContextExtensionMap"),
+  0x092b5: (174, "ContextExtensionMap"),
 }
 
 # List of known V8 objects.
diff --git a/tools/verify_source_deps.py b/tools/verify_source_deps.py
index 56e3156..a3fdb2e 100755
--- a/tools/verify_source_deps.py
+++ b/tools/verify_source_deps.py
@@ -8,45 +8,103 @@
 .h and .cc files in the source tree and which files are included in the gyp
 and gn files. The latter inclusion is overapproximated.
 
-TODO(machenbach): Gyp files in src will point to source files in src without a
-src/ prefix. For simplicity, all paths relative to src are stripped. But this
-tool won't be accurate for other sources in other directories (e.g. cctest).
+TODO(machenbach): If two source files with the same name exist, but only one
+is referenced from a gyp/gn file, we won't necessarily detect it.
 """
 
 import itertools
 import re
 import os
+import subprocess
+import sys
 
 
 V8_BASE = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
-V8_SRC_BASE = os.path.join(V8_BASE, 'src')
-V8_INCLUDE_BASE = os.path.join(V8_BASE, 'include')
 
 GYP_FILES = [
   os.path.join(V8_BASE, 'src', 'd8.gyp'),
   os.path.join(V8_BASE, 'src', 'v8.gyp'),
+  os.path.join(V8_BASE, 'src', 'inspector', 'inspector.gypi'),
   os.path.join(V8_BASE, 'src', 'third_party', 'vtune', 'v8vtune.gyp'),
+  os.path.join(V8_BASE, 'samples', 'samples.gyp'),
   os.path.join(V8_BASE, 'test', 'cctest', 'cctest.gyp'),
+  os.path.join(V8_BASE, 'test', 'fuzzer', 'fuzzer.gyp'),
   os.path.join(V8_BASE, 'test', 'unittests', 'unittests.gyp'),
+  os.path.join(V8_BASE, 'test', 'inspector', 'inspector.gyp'),
+  os.path.join(V8_BASE, 'testing', 'gmock.gyp'),
+  os.path.join(V8_BASE, 'testing', 'gtest.gyp'),
   os.path.join(V8_BASE, 'tools', 'parser-shell.gyp'),
 ]
 
+ALL_GYP_PREFIXES = [
+  '..',
+  'common',
+  os.path.join('src', 'third_party', 'vtune'),
+  'src',
+  'samples',
+  'testing',
+  'tools',
+  os.path.join('test', 'cctest'),
+  os.path.join('test', 'common'),
+  os.path.join('test', 'fuzzer'),
+  os.path.join('test', 'unittests'),
+  os.path.join('test', 'inspector'),
+]
 
-def path_no_prefix(path):
-  if path.startswith('../'):
-    return path_no_prefix(path[3:])
-  elif path.startswith('src/'):
-    return path_no_prefix(path[4:])
-  else:
-    return path
+GYP_UNSUPPORTED_FEATURES = [
+  'gcmole',
+]
+
+GN_FILES = [
+  os.path.join(V8_BASE, 'BUILD.gn'),
+  os.path.join(V8_BASE, 'build', 'secondary', 'testing', 'gmock', 'BUILD.gn'),
+  os.path.join(V8_BASE, 'build', 'secondary', 'testing', 'gtest', 'BUILD.gn'),
+  os.path.join(V8_BASE, 'src', 'inspector', 'BUILD.gn'),
+  os.path.join(V8_BASE, 'test', 'cctest', 'BUILD.gn'),
+  os.path.join(V8_BASE, 'test', 'unittests', 'BUILD.gn'),
+  os.path.join(V8_BASE, 'test', 'inspector', 'BUILD.gn'),
+  os.path.join(V8_BASE, 'tools', 'BUILD.gn'),
+]
+
+GN_UNSUPPORTED_FEATURES = [
+  'aix',
+  'cygwin',
+  'freebsd',
+  'gcmole',
+  'openbsd',
+  'ppc',
+  'qnx',
+  'solaris',
+  'vtune',
+  'x87',
+]
+
+ALL_GN_PREFIXES = [
+  '..',
+  os.path.join('src', 'inspector'),
+  'src',
+  'testing',
+  os.path.join('test', 'cctest'),
+  os.path.join('test', 'unittests'),
+  os.path.join('test', 'inspector'),
+]
+
+def pathsplit(path):
+  return re.split('[/\\\\]', path)
+
+def path_no_prefix(path, prefixes):
+  for prefix in prefixes:
+    if path.startswith(prefix + os.sep):
+      return path_no_prefix(path[len(prefix) + 1:], prefixes)
+  return path
 
 
-def isources(directory):
-  for root, dirs, files in os.walk(directory):
-    for f in files:
-      if not (f.endswith('.h') or f.endswith('.cc')):
-        continue
-      yield path_no_prefix(os.path.relpath(os.path.join(root, f), V8_BASE))
+def isources(prefixes):
+  cmd = ['git', 'ls-tree', '-r', 'HEAD', '--full-name', '--name-only']
+  for f in subprocess.check_output(cmd, universal_newlines=True).split('\n'):
+    if not (f.endswith('.h') or f.endswith('.cc')):
+      continue
+    yield path_no_prefix(os.path.join(*pathsplit(f)), prefixes)
 
 
 def iflatten(obj):
@@ -59,7 +117,7 @@
       for i in iflatten(value):
         yield i
   elif isinstance(obj, basestring):
-    yield path_no_prefix(obj)
+    yield path_no_prefix(os.path.join(*pathsplit(obj)), ALL_GYP_PREFIXES)
 
 
 def iflatten_gyp_file(gyp_file):
@@ -80,27 +138,44 @@
     for line in f.read().splitlines():
       match = re.match(r'.*"([^"]*)".*', line)
       if match:
-        yield path_no_prefix(match.group(1))
+        yield path_no_prefix(
+            os.path.join(*pathsplit(match.group(1))), ALL_GN_PREFIXES)
 
 
-def icheck_values(values, *source_dirs):
-  for source_file in itertools.chain(
-      *[isources(source_dir) for source_dir in source_dirs]
-    ):
+def icheck_values(values, prefixes):
+  for source_file in isources(prefixes):
     if source_file not in values:
       yield source_file
 
 
-gyp_values = set(itertools.chain(
-  *[iflatten_gyp_file(gyp_file) for gyp_file in GYP_FILES]
-  ))
+def missing_gyp_files():
+  gyp_values = set(itertools.chain(
+    *[iflatten_gyp_file(gyp_file) for gyp_file in GYP_FILES]
+    ))
+  gyp_files = sorted(icheck_values(gyp_values, ALL_GYP_PREFIXES))
+  return filter(
+      lambda x: not any(i in x for i in GYP_UNSUPPORTED_FEATURES), gyp_files)
 
-print "----------- Files not in gyp: ------------"
-for i in sorted(icheck_values(gyp_values, V8_SRC_BASE, V8_INCLUDE_BASE)):
-  print i
 
-gn_values = set(iflatten_gn_file(os.path.join(V8_BASE, 'BUILD.gn')))
+def missing_gn_files():
+  gn_values = set(itertools.chain(
+    *[iflatten_gn_file(gn_file) for gn_file in GN_FILES]
+    ))
 
-print "\n----------- Files not in gn: -------------"
-for i in sorted(icheck_values(gn_values, V8_SRC_BASE, V8_INCLUDE_BASE)):
-  print i
+  gn_files = sorted(icheck_values(gn_values, ALL_GN_PREFIXES))
+  return filter(
+      lambda x: not any(i in x for i in GN_UNSUPPORTED_FEATURES), gn_files)
+
+
+def main():
+  print "----------- Files not in gyp: ------------"
+  for i in missing_gyp_files():
+    print i
+
+  print "\n----------- Files not in gn: -------------"
+  for i in missing_gn_files():
+    print i
+  return 0
+
+if '__main__' == __name__:
+  sys.exit(main())